diff --git a/whisper/whisper_base_finetuned/README.md b/whisper/whisper_base_finetuned/README.md new file mode 100644 index 0000000000000000000000000000000000000000..4dc951a672dbff8701d7e29f5a9a7a3fee9b604c --- /dev/null +++ b/whisper/whisper_base_finetuned/README.md @@ -0,0 +1,89 @@ +--- +tags: +- generated_from_trainer +datasets: +- audiofolder +metrics: +- wer +model-index: +- name: whisper_base_finetuned + results: + - task: + name: Automatic Speech Recognition + type: automatic-speech-recognition + dataset: + name: audiofolder + type: audiofolder + config: default + split: validation + args: default + metrics: + - name: Wer + type: wer + value: 0.3192600084831479 +--- + + + +[Visualize in Weights & Biases](https://wandb.ai/querying/huggingface/runs/1wtpwccg) +# whisper_base_finetuned + +This model was trained from scratch on the audiofolder dataset. +It achieves the following results on the evaluation set: +- Loss: 0.3375 +- Wer: 0.3193 + +## Model description + +More information needed + +## Intended uses & limitations + +More information needed + +## Training and evaluation data + +More information needed + +## Training procedure + +### Training hyperparameters + +The following hyperparameters were used during training: +- learning_rate: 1e-05 +- train_batch_size: 16 +- eval_batch_size: 8 +- seed: 42 +- distributed_type: multi-GPU +- num_devices: 4 +- total_train_batch_size: 64 +- total_eval_batch_size: 32 +- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 +- lr_scheduler_type: linear +- lr_scheduler_warmup_steps: 500 +- num_epochs: 10 +- mixed_precision_training: Native AMP + +### Training results + +| Training Loss | Epoch | Step | Validation Loss | Wer | +|:-------------:|:-----:|:----:|:---------------:|:------:| +| 0.4111 | 1.0 | 973 | 0.4590 | 0.4551 | +| 0.4068 | 2.0 | 1946 | 0.3847 | 0.4812 | +| 0.3617 | 3.0 | 2919 | 0.3585 | 0.4326 | +| 0.3144 | 4.0 | 3892 | 0.3436 | 0.3594 | +| 0.272 | 5.0 | 4865 | 0.3425 | 0.3639 | +| 0.2246 | 6.0 | 5838 | 0.3371 | 0.3341 | +| 0.1541 | 7.0 | 6811 | 0.3404 | 0.3377 | +| 0.1387 | 8.0 | 7784 | 0.3370 | 0.3196 | +| 0.1554 | 9.0 | 8757 | 0.3387 | 0.3113 | +| 0.1692 | 10.0 | 9730 | 0.3375 | 0.3193 | + + +### Framework versions + +- Transformers 4.42.0.dev0 +- Pytorch 2.2.1 +- Datasets 2.19.0 +- Tokenizers 0.19.1 diff --git a/whisper/whisper_base_finetuned/checkpoint-1946/config.json b/whisper/whisper_base_finetuned/checkpoint-1946/config.json new file mode 100644 index 0000000000000000000000000000000000000000..91728b7bc6c3a43bb11e0d161949a286ca009408 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-1946/config.json @@ -0,0 +1,52 @@ +{ + "_name_or_path": "whisper_base_finetuned", + "activation_dropout": 0.0, + "activation_function": "gelu", + "apply_spec_augment": true, + "architectures": [ + "WhisperForConditionalGeneration" + ], + "attention_dropout": 0.0, + "begin_suppress_tokens": [ + 220, + 50257 + ], + "bos_token_id": 50257, + "classifier_proj_size": 256, + "d_model": 512, + "decoder_attention_heads": 8, + "decoder_ffn_dim": 2048, + "decoder_layerdrop": 0.0, + "decoder_layers": 6, + "decoder_start_token_id": 50258, + "dropout": 0.0, + "encoder_attention_heads": 8, + "encoder_ffn_dim": 2048, + "encoder_layerdrop": 0.0, + "encoder_layers": 6, + "eos_token_id": 50257, + "forced_decoder_ids": null, + "init_std": 0.02, + "is_encoder_decoder": true, + "mask_feature_length": 10, + "mask_feature_min_masks": 0, + "mask_feature_prob": 0.05, + "mask_time_length": 10, + "mask_time_min_masks": 2, + "mask_time_prob": 0.05, + "max_length": 448, + "max_source_positions": 1500, + "max_target_positions": 448, + "median_filter_width": 7, + "model_type": "whisper", + "num_hidden_layers": 6, + "num_mel_bins": 80, + "pad_token_id": 50257, + "scale_embedding": false, + "suppress_tokens": [], + "torch_dtype": "float32", + "transformers_version": "4.42.0.dev0", + "use_cache": true, + "use_weighted_layer_sum": false, + "vocab_size": 51865 +} diff --git a/whisper/whisper_base_finetuned/checkpoint-1946/generation_config.json b/whisper/whisper_base_finetuned/checkpoint-1946/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..3ce877d310342bb057324d0dfcf6f83dc6055c1a --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-1946/generation_config.json @@ -0,0 +1,256 @@ +{ + "alignment_heads": [ + [ + 3, + 1 + ], + [ + 4, + 2 + ], + [ + 4, + 3 + ], + [ + 4, + 7 + ], + [ + 5, + 1 + ], + [ + 5, + 2 + ], + [ + 5, + 4 + ], + [ + 5, + 6 + ] + ], + "begin_suppress_tokens": [ + 220, + 50257 + ], + "bos_token_id": 50257, + "decoder_start_token_id": 50258, + "eos_token_id": 50257, + "forced_decoder_ids": [ + [ + 1, + null + ], + [ + 2, + 50359 + ] + ], + "is_multilingual": true, + "lang_to_id": { + "<|af|>": 50327, + "<|am|>": 50334, + "<|ar|>": 50272, + "<|as|>": 50350, + "<|az|>": 50304, + "<|ba|>": 50355, + "<|be|>": 50330, + "<|bg|>": 50292, + "<|bn|>": 50302, + "<|bo|>": 50347, + "<|br|>": 50309, + "<|bs|>": 50315, + "<|ca|>": 50270, + "<|cs|>": 50283, + "<|cy|>": 50297, + "<|da|>": 50285, + "<|de|>": 50261, + "<|el|>": 50281, + "<|en|>": 50259, + "<|es|>": 50262, + "<|et|>": 50307, + "<|eu|>": 50310, + "<|fa|>": 50300, + "<|fi|>": 50277, + "<|fo|>": 50338, + "<|fr|>": 50265, + "<|gl|>": 50319, + "<|gu|>": 50333, + "<|haw|>": 50352, + "<|ha|>": 50354, + "<|he|>": 50279, + "<|hi|>": 50276, + "<|hr|>": 50291, + "<|ht|>": 50339, + "<|hu|>": 50286, + "<|hy|>": 50312, + "<|id|>": 50275, + "<|is|>": 50311, + "<|it|>": 50274, + "<|ja|>": 50266, + "<|jw|>": 50356, + "<|ka|>": 50329, + "<|kk|>": 50316, + "<|km|>": 50323, + "<|kn|>": 50306, + "<|ko|>": 50264, + "<|la|>": 50294, + "<|lb|>": 50345, + "<|ln|>": 50353, + "<|lo|>": 50336, + "<|lt|>": 50293, + "<|lv|>": 50301, + "<|mg|>": 50349, + "<|mi|>": 50295, + "<|mk|>": 50308, + "<|ml|>": 50296, + "<|mn|>": 50314, + "<|mr|>": 50320, + "<|ms|>": 50282, + "<|mt|>": 50343, + "<|my|>": 50346, + "<|ne|>": 50313, + "<|nl|>": 50271, + "<|nn|>": 50342, + "<|no|>": 50288, + "<|oc|>": 50328, + "<|pa|>": 50321, + "<|pl|>": 50269, + "<|ps|>": 50340, + "<|pt|>": 50267, + "<|ro|>": 50284, + "<|ru|>": 50263, + "<|sa|>": 50344, + "<|sd|>": 50332, + "<|si|>": 50322, + "<|sk|>": 50298, + "<|sl|>": 50305, + "<|sn|>": 50324, + "<|so|>": 50326, + "<|sq|>": 50317, + "<|sr|>": 50303, + "<|su|>": 50357, + "<|sv|>": 50273, + "<|sw|>": 50318, + "<|ta|>": 50287, + "<|te|>": 50299, + "<|tg|>": 50331, + "<|th|>": 50289, + "<|tk|>": 50341, + "<|tl|>": 50348, + "<|tr|>": 50268, + "<|tt|>": 50351, + "<|uk|>": 50280, + "<|ur|>": 50290, + "<|uz|>": 50337, + "<|vi|>": 50278, + "<|yi|>": 50335, + "<|yo|>": 50325, + "<|zh|>": 50260 + }, + "max_initial_timestamp_index": 50, + "max_length": 448, + "no_timestamps_token_id": 50363, + "pad_token_id": 50257, + "prev_sot_token_id": 50361, + "return_timestamps": false, + "suppress_tokens": [ + 1, + 2, + 7, + 8, + 9, + 10, + 14, + 25, + 26, + 27, + 28, + 29, + 31, + 58, + 59, + 60, + 61, + 62, + 63, + 90, + 91, + 92, + 93, + 359, + 503, + 522, + 542, + 873, + 893, + 902, + 918, + 922, + 931, + 1350, + 1853, + 1982, + 2460, + 2627, + 3246, + 3253, + 3268, + 3536, + 3846, + 3961, + 4183, + 4667, + 6585, + 6647, + 7273, + 9061, + 9383, + 10428, + 10929, + 11938, + 12033, + 12331, + 12562, + 13793, + 14157, + 14635, + 15265, + 15618, + 16553, + 16604, + 18362, + 18956, + 20075, + 21675, + 22520, + 26130, + 26161, + 26435, + 28279, + 29464, + 31650, + 32302, + 32470, + 36865, + 42863, + 47425, + 49870, + 50254, + 50258, + 50358, + 50359, + 50360, + 50361, + 50362 + ], + "task_to_id": { + "transcribe": 50359, + "translate": 50358 + }, + "transformers_version": "4.42.0.dev0" +} diff --git a/whisper/whisper_base_finetuned/checkpoint-1946/model.safetensors b/whisper/whisper_base_finetuned/checkpoint-1946/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d3ffdfb85687050faeb83213beaa74fff65de794 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-1946/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4dd026348fe37281549fd7c6a808f086ca0fd97d58b2cacc1f727f1b30983206 +size 290403936 diff --git a/whisper/whisper_base_finetuned/checkpoint-1946/optimizer.pt b/whisper/whisper_base_finetuned/checkpoint-1946/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..7e40da14e9976a0d6a0f63ba7bcad08369a4e853 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-1946/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:553f15ba654f04fd871be21212d6df91de0ae8724da43efc4f15ac36ba17d344 +size 574811514 diff --git a/whisper/whisper_base_finetuned/checkpoint-1946/preprocessor_config.json b/whisper/whisper_base_finetuned/checkpoint-1946/preprocessor_config.json new file mode 100644 index 0000000000000000000000000000000000000000..91876762a536a746d268353c5cba57286e76b058 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-1946/preprocessor_config.json @@ -0,0 +1,14 @@ +{ + "chunk_length": 30, + "feature_extractor_type": "WhisperFeatureExtractor", + "feature_size": 80, + "hop_length": 160, + "n_fft": 400, + "n_samples": 480000, + "nb_max_frames": 3000, + "padding_side": "right", + "padding_value": 0.0, + "processor_class": "WhisperProcessor", + "return_attention_mask": false, + "sampling_rate": 16000 +} diff --git a/whisper/whisper_base_finetuned/checkpoint-1946/rng_state_0.pth b/whisper/whisper_base_finetuned/checkpoint-1946/rng_state_0.pth new file mode 100644 index 0000000000000000000000000000000000000000..e85eea7a525d710ba9857dc2024abefc4fd5f3ce --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-1946/rng_state_0.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3a110c9d357e287a75aaf1239a21b93307ef274903bc341868b29e95bf582f6a +size 14960 diff --git a/whisper/whisper_base_finetuned/checkpoint-1946/rng_state_1.pth b/whisper/whisper_base_finetuned/checkpoint-1946/rng_state_1.pth new file mode 100644 index 0000000000000000000000000000000000000000..7f065db1d42192691d649a6f3b410e4182bf0929 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-1946/rng_state_1.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4dc7a73450f09e1dfbf3d92b7766dd920803a16d72378745ade594e61b71ef99 +size 14960 diff --git a/whisper/whisper_base_finetuned/checkpoint-1946/rng_state_2.pth b/whisper/whisper_base_finetuned/checkpoint-1946/rng_state_2.pth new file mode 100644 index 0000000000000000000000000000000000000000..d572e386b045dba8ffedeefabdd36097b5947544 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-1946/rng_state_2.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0850a53101d32aee75186072819d788c275b8018ee81aca2b2e264027b3f706e +size 14960 diff --git a/whisper/whisper_base_finetuned/checkpoint-1946/rng_state_3.pth b/whisper/whisper_base_finetuned/checkpoint-1946/rng_state_3.pth new file mode 100644 index 0000000000000000000000000000000000000000..6e9a0fa9bb800b68146b141b223bf27cfe150008 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-1946/rng_state_3.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a0f219c1ad064e6a471d6dd3b4c8e03edef564bd702d0376fe0265ca34089229 +size 14960 diff --git a/whisper/whisper_base_finetuned/checkpoint-1946/scheduler.pt b/whisper/whisper_base_finetuned/checkpoint-1946/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..ef56ee237ccd93ea2d7530bbf9795010a21e3961 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-1946/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9047ca18e71ad6d17bbd68121c15bff7b1e23d09f7900127f0bf720216251fbf +size 1064 diff --git a/whisper/whisper_base_finetuned/checkpoint-1946/trainer_state.json b/whisper/whisper_base_finetuned/checkpoint-1946/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..001aaa593dfbbb51b2fd57b294ba01b0f5046ab5 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-1946/trainer_state.json @@ -0,0 +1,1409 @@ +{ + "best_metric": 0.48120656465137523, + "best_model_checkpoint": "./whisper_base_finetuned/checkpoint-1946", + "epoch": 2.0, + "eval_steps": 500, + "global_step": 1946, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.010277492291880781, + "grad_norm": 78.41651916503906, + "learning_rate": 1.0000000000000001e-07, + "loss": 3.3505, + "step": 10 + }, + { + "epoch": 0.020554984583761562, + "grad_norm": 74.72834777832031, + "learning_rate": 3.0000000000000004e-07, + "loss": 3.2167, + "step": 20 + }, + { + "epoch": 0.030832476875642344, + "grad_norm": 48.560585021972656, + "learning_rate": 5.000000000000001e-07, + "loss": 2.9513, + "step": 30 + }, + { + "epoch": 0.041109969167523124, + "grad_norm": 30.085025787353516, + "learning_rate": 7.000000000000001e-07, + "loss": 2.6321, + "step": 40 + }, + { + "epoch": 0.051387461459403906, + "grad_norm": 21.896045684814453, + "learning_rate": 9.000000000000001e-07, + "loss": 2.376, + "step": 50 + }, + { + "epoch": 0.06166495375128469, + "grad_norm": 16.388851165771484, + "learning_rate": 1.1e-06, + "loss": 2.1473, + "step": 60 + }, + { + "epoch": 0.07194244604316546, + "grad_norm": 15.245757102966309, + "learning_rate": 1.3e-06, + "loss": 1.9096, + "step": 70 + }, + { + "epoch": 0.08221993833504625, + "grad_norm": 12.178542137145996, + "learning_rate": 1.5e-06, + "loss": 1.7493, + "step": 80 + }, + { + "epoch": 0.09249743062692703, + "grad_norm": 9.62636947631836, + "learning_rate": 1.7000000000000002e-06, + "loss": 1.5233, + "step": 90 + }, + { + "epoch": 0.10277492291880781, + "grad_norm": 9.04529094696045, + "learning_rate": 1.9000000000000002e-06, + "loss": 1.3753, + "step": 100 + }, + { + "epoch": 0.1130524152106886, + "grad_norm": 8.049132347106934, + "learning_rate": 2.1000000000000002e-06, + "loss": 1.2293, + "step": 110 + }, + { + "epoch": 0.12332990750256938, + "grad_norm": 7.2756147384643555, + "learning_rate": 2.3000000000000004e-06, + "loss": 1.1131, + "step": 120 + }, + { + "epoch": 0.13360739979445016, + "grad_norm": 7.049572944641113, + "learning_rate": 2.5e-06, + "loss": 1.1889, + "step": 130 + }, + { + "epoch": 0.14388489208633093, + "grad_norm": 6.429234981536865, + "learning_rate": 2.7000000000000004e-06, + "loss": 1.0655, + "step": 140 + }, + { + "epoch": 0.15416238437821173, + "grad_norm": 6.281942844390869, + "learning_rate": 2.9e-06, + "loss": 0.9696, + "step": 150 + }, + { + "epoch": 0.1644398766700925, + "grad_norm": 6.72721004486084, + "learning_rate": 3.1000000000000004e-06, + "loss": 1.0683, + "step": 160 + }, + { + "epoch": 0.1747173689619733, + "grad_norm": 5.773904800415039, + "learning_rate": 3.3000000000000006e-06, + "loss": 1.0132, + "step": 170 + }, + { + "epoch": 0.18499486125385406, + "grad_norm": 6.2021870613098145, + "learning_rate": 3.5e-06, + "loss": 0.9325, + "step": 180 + }, + { + "epoch": 0.19527235354573483, + "grad_norm": 6.268314838409424, + "learning_rate": 3.7e-06, + "loss": 0.8658, + "step": 190 + }, + { + "epoch": 0.20554984583761562, + "grad_norm": 6.014781951904297, + "learning_rate": 3.900000000000001e-06, + "loss": 0.9349, + "step": 200 + }, + { + "epoch": 0.2158273381294964, + "grad_norm": 7.0086188316345215, + "learning_rate": 4.1e-06, + "loss": 0.9347, + "step": 210 + }, + { + "epoch": 0.2261048304213772, + "grad_norm": 6.318301200866699, + "learning_rate": 4.3e-06, + "loss": 0.8023, + "step": 220 + }, + { + "epoch": 0.23638232271325796, + "grad_norm": 5.627261638641357, + "learning_rate": 4.5e-06, + "loss": 0.871, + "step": 230 + }, + { + "epoch": 0.24665981500513876, + "grad_norm": 5.793784141540527, + "learning_rate": 4.7e-06, + "loss": 0.8654, + "step": 240 + }, + { + "epoch": 0.2569373072970195, + "grad_norm": 5.449838638305664, + "learning_rate": 4.9000000000000005e-06, + "loss": 0.7499, + "step": 250 + }, + { + "epoch": 0.2672147995889003, + "grad_norm": 4.993557453155518, + "learning_rate": 5.1e-06, + "loss": 0.7281, + "step": 260 + }, + { + "epoch": 0.2774922918807811, + "grad_norm": 6.478201866149902, + "learning_rate": 5.300000000000001e-06, + "loss": 0.7543, + "step": 270 + }, + { + "epoch": 0.28776978417266186, + "grad_norm": 5.355103492736816, + "learning_rate": 5.500000000000001e-06, + "loss": 0.7545, + "step": 280 + }, + { + "epoch": 0.29804727646454265, + "grad_norm": 4.926327228546143, + "learning_rate": 5.7e-06, + "loss": 0.7397, + "step": 290 + }, + { + "epoch": 0.30832476875642345, + "grad_norm": 5.97158145904541, + "learning_rate": 5.9e-06, + "loss": 0.7262, + "step": 300 + }, + { + "epoch": 0.3186022610483042, + "grad_norm": 5.3029584884643555, + "learning_rate": 6.1e-06, + "loss": 0.7085, + "step": 310 + }, + { + "epoch": 0.328879753340185, + "grad_norm": 4.928351879119873, + "learning_rate": 6.300000000000001e-06, + "loss": 0.6726, + "step": 320 + }, + { + "epoch": 0.3391572456320658, + "grad_norm": 5.714938640594482, + "learning_rate": 6.5000000000000004e-06, + "loss": 0.6985, + "step": 330 + }, + { + "epoch": 0.3494347379239466, + "grad_norm": 4.394782543182373, + "learning_rate": 6.700000000000001e-06, + "loss": 0.6555, + "step": 340 + }, + { + "epoch": 0.3597122302158273, + "grad_norm": 4.944245338439941, + "learning_rate": 6.9e-06, + "loss": 0.6629, + "step": 350 + }, + { + "epoch": 0.3699897225077081, + "grad_norm": 5.048007965087891, + "learning_rate": 7.100000000000001e-06, + "loss": 0.65, + "step": 360 + }, + { + "epoch": 0.3802672147995889, + "grad_norm": 5.267819404602051, + "learning_rate": 7.3e-06, + "loss": 0.5775, + "step": 370 + }, + { + "epoch": 0.39054470709146966, + "grad_norm": 5.552557945251465, + "learning_rate": 7.500000000000001e-06, + "loss": 0.683, + "step": 380 + }, + { + "epoch": 0.40082219938335045, + "grad_norm": 4.37172269821167, + "learning_rate": 7.7e-06, + "loss": 0.5931, + "step": 390 + }, + { + "epoch": 0.41109969167523125, + "grad_norm": 5.033542156219482, + "learning_rate": 7.9e-06, + "loss": 0.6662, + "step": 400 + }, + { + "epoch": 0.42137718396711205, + "grad_norm": 4.7163190841674805, + "learning_rate": 8.1e-06, + "loss": 0.7605, + "step": 410 + }, + { + "epoch": 0.4316546762589928, + "grad_norm": 5.401676177978516, + "learning_rate": 8.3e-06, + "loss": 0.5939, + "step": 420 + }, + { + "epoch": 0.4419321685508736, + "grad_norm": 5.293227672576904, + "learning_rate": 8.5e-06, + "loss": 0.6061, + "step": 430 + }, + { + "epoch": 0.4522096608427544, + "grad_norm": 5.0345940589904785, + "learning_rate": 8.700000000000001e-06, + "loss": 0.5386, + "step": 440 + }, + { + "epoch": 0.4624871531346352, + "grad_norm": 4.590668678283691, + "learning_rate": 8.900000000000001e-06, + "loss": 0.5455, + "step": 450 + }, + { + "epoch": 0.4727646454265159, + "grad_norm": 5.621304512023926, + "learning_rate": 9.100000000000001e-06, + "loss": 0.514, + "step": 460 + }, + { + "epoch": 0.4830421377183967, + "grad_norm": 5.061606407165527, + "learning_rate": 9.3e-06, + "loss": 0.5945, + "step": 470 + }, + { + "epoch": 0.4933196300102775, + "grad_norm": 4.499748229980469, + "learning_rate": 9.5e-06, + "loss": 0.5241, + "step": 480 + }, + { + "epoch": 0.5035971223021583, + "grad_norm": 4.244873523712158, + "learning_rate": 9.7e-06, + "loss": 0.5179, + "step": 490 + }, + { + "epoch": 0.513874614594039, + "grad_norm": 5.057434558868408, + "learning_rate": 9.9e-06, + "loss": 0.5744, + "step": 500 + }, + { + "epoch": 0.5241521068859198, + "grad_norm": 5.502700328826904, + "learning_rate": 9.994582881906827e-06, + "loss": 0.5752, + "step": 510 + }, + { + "epoch": 0.5344295991778006, + "grad_norm": 4.976474285125732, + "learning_rate": 9.983748645720479e-06, + "loss": 0.6109, + "step": 520 + }, + { + "epoch": 0.5447070914696814, + "grad_norm": 4.148839473724365, + "learning_rate": 9.972914409534129e-06, + "loss": 0.5643, + "step": 530 + }, + { + "epoch": 0.5549845837615622, + "grad_norm": 3.930744171142578, + "learning_rate": 9.96208017334778e-06, + "loss": 0.4671, + "step": 540 + }, + { + "epoch": 0.5652620760534429, + "grad_norm": 5.741941928863525, + "learning_rate": 9.95124593716143e-06, + "loss": 0.5608, + "step": 550 + }, + { + "epoch": 0.5755395683453237, + "grad_norm": 4.145395755767822, + "learning_rate": 9.940411700975083e-06, + "loss": 0.4698, + "step": 560 + }, + { + "epoch": 0.5858170606372045, + "grad_norm": 4.2532830238342285, + "learning_rate": 9.929577464788733e-06, + "loss": 0.4084, + "step": 570 + }, + { + "epoch": 0.5960945529290853, + "grad_norm": 4.680564880371094, + "learning_rate": 9.918743228602385e-06, + "loss": 0.6091, + "step": 580 + }, + { + "epoch": 0.6063720452209661, + "grad_norm": 3.8156168460845947, + "learning_rate": 9.907908992416035e-06, + "loss": 0.5567, + "step": 590 + }, + { + "epoch": 0.6166495375128469, + "grad_norm": 4.722325801849365, + "learning_rate": 9.897074756229687e-06, + "loss": 0.5543, + "step": 600 + }, + { + "epoch": 0.6269270298047277, + "grad_norm": 5.177743911743164, + "learning_rate": 9.886240520043338e-06, + "loss": 0.477, + "step": 610 + }, + { + "epoch": 0.6372045220966084, + "grad_norm": 4.9859209060668945, + "learning_rate": 9.875406283856989e-06, + "loss": 0.4592, + "step": 620 + }, + { + "epoch": 0.6474820143884892, + "grad_norm": 4.872037887573242, + "learning_rate": 9.86457204767064e-06, + "loss": 0.5632, + "step": 630 + }, + { + "epoch": 0.65775950668037, + "grad_norm": 4.967211723327637, + "learning_rate": 9.85373781148429e-06, + "loss": 0.5553, + "step": 640 + }, + { + "epoch": 0.6680369989722508, + "grad_norm": 4.748555660247803, + "learning_rate": 9.842903575297942e-06, + "loss": 0.5228, + "step": 650 + }, + { + "epoch": 0.6783144912641316, + "grad_norm": 4.945960521697998, + "learning_rate": 9.832069339111592e-06, + "loss": 0.5018, + "step": 660 + }, + { + "epoch": 0.6885919835560124, + "grad_norm": 3.7931437492370605, + "learning_rate": 9.821235102925244e-06, + "loss": 0.5347, + "step": 670 + }, + { + "epoch": 0.6988694758478932, + "grad_norm": 4.149494171142578, + "learning_rate": 9.810400866738896e-06, + "loss": 0.413, + "step": 680 + }, + { + "epoch": 0.7091469681397738, + "grad_norm": 4.979891300201416, + "learning_rate": 9.799566630552548e-06, + "loss": 0.4836, + "step": 690 + }, + { + "epoch": 0.7194244604316546, + "grad_norm": 5.043586730957031, + "learning_rate": 9.788732394366198e-06, + "loss": 0.5286, + "step": 700 + }, + { + "epoch": 0.7297019527235354, + "grad_norm": 4.017364978790283, + "learning_rate": 9.77789815817985e-06, + "loss": 0.3785, + "step": 710 + }, + { + "epoch": 0.7399794450154162, + "grad_norm": 4.4453959465026855, + "learning_rate": 9.7670639219935e-06, + "loss": 0.52, + "step": 720 + }, + { + "epoch": 0.750256937307297, + "grad_norm": 4.624840259552002, + "learning_rate": 9.756229685807152e-06, + "loss": 0.5339, + "step": 730 + }, + { + "epoch": 0.7605344295991778, + "grad_norm": 4.6119771003723145, + "learning_rate": 9.745395449620802e-06, + "loss": 0.4857, + "step": 740 + }, + { + "epoch": 0.7708119218910586, + "grad_norm": 4.147925853729248, + "learning_rate": 9.734561213434454e-06, + "loss": 0.4363, + "step": 750 + }, + { + "epoch": 0.7810894141829393, + "grad_norm": 5.529519557952881, + "learning_rate": 9.723726977248104e-06, + "loss": 0.5206, + "step": 760 + }, + { + "epoch": 0.7913669064748201, + "grad_norm": 3.9015376567840576, + "learning_rate": 9.712892741061756e-06, + "loss": 0.4836, + "step": 770 + }, + { + "epoch": 0.8016443987667009, + "grad_norm": 4.5102057456970215, + "learning_rate": 9.702058504875406e-06, + "loss": 0.4437, + "step": 780 + }, + { + "epoch": 0.8119218910585817, + "grad_norm": 5.272336006164551, + "learning_rate": 9.691224268689058e-06, + "loss": 0.4402, + "step": 790 + }, + { + "epoch": 0.8221993833504625, + "grad_norm": 4.404648303985596, + "learning_rate": 9.68039003250271e-06, + "loss": 0.4443, + "step": 800 + }, + { + "epoch": 0.8324768756423433, + "grad_norm": 4.636880397796631, + "learning_rate": 9.66955579631636e-06, + "loss": 0.4943, + "step": 810 + }, + { + "epoch": 0.8427543679342241, + "grad_norm": 4.826484203338623, + "learning_rate": 9.658721560130012e-06, + "loss": 0.5385, + "step": 820 + }, + { + "epoch": 0.8530318602261048, + "grad_norm": 4.46310567855835, + "learning_rate": 9.647887323943664e-06, + "loss": 0.413, + "step": 830 + }, + { + "epoch": 0.8633093525179856, + "grad_norm": 4.603589057922363, + "learning_rate": 9.637053087757314e-06, + "loss": 0.4801, + "step": 840 + }, + { + "epoch": 0.8735868448098664, + "grad_norm": 3.7884819507598877, + "learning_rate": 9.626218851570966e-06, + "loss": 0.4294, + "step": 850 + }, + { + "epoch": 0.8838643371017472, + "grad_norm": 3.0480997562408447, + "learning_rate": 9.615384615384616e-06, + "loss": 0.4737, + "step": 860 + }, + { + "epoch": 0.894141829393628, + "grad_norm": 4.840622901916504, + "learning_rate": 9.604550379198268e-06, + "loss": 0.4806, + "step": 870 + }, + { + "epoch": 0.9044193216855088, + "grad_norm": 4.64235782623291, + "learning_rate": 9.59371614301192e-06, + "loss": 0.5359, + "step": 880 + }, + { + "epoch": 0.9146968139773896, + "grad_norm": 4.615347385406494, + "learning_rate": 9.58288190682557e-06, + "loss": 0.4526, + "step": 890 + }, + { + "epoch": 0.9249743062692704, + "grad_norm": 4.345542907714844, + "learning_rate": 9.572047670639221e-06, + "loss": 0.5112, + "step": 900 + }, + { + "epoch": 0.935251798561151, + "grad_norm": 3.5318965911865234, + "learning_rate": 9.561213434452872e-06, + "loss": 0.455, + "step": 910 + }, + { + "epoch": 0.9455292908530318, + "grad_norm": 4.852155685424805, + "learning_rate": 9.550379198266523e-06, + "loss": 0.5012, + "step": 920 + }, + { + "epoch": 0.9558067831449126, + "grad_norm": 4.666072368621826, + "learning_rate": 9.539544962080174e-06, + "loss": 0.4781, + "step": 930 + }, + { + "epoch": 0.9660842754367934, + "grad_norm": 4.7242865562438965, + "learning_rate": 9.528710725893825e-06, + "loss": 0.5102, + "step": 940 + }, + { + "epoch": 0.9763617677286742, + "grad_norm": 3.9831533432006836, + "learning_rate": 9.517876489707475e-06, + "loss": 0.416, + "step": 950 + }, + { + "epoch": 0.986639260020555, + "grad_norm": 4.294024467468262, + "learning_rate": 9.507042253521127e-06, + "loss": 0.4254, + "step": 960 + }, + { + "epoch": 0.9969167523124358, + "grad_norm": 4.132877826690674, + "learning_rate": 9.496208017334777e-06, + "loss": 0.4111, + "step": 970 + }, + { + "epoch": 1.0, + "eval_loss": 0.4590415954589844, + "eval_runtime": 1119.5991, + "eval_samples_per_second": 5.241, + "eval_steps_per_second": 0.164, + "eval_wer": 0.45510457111161867, + "step": 973 + }, + { + "epoch": 1.0071942446043165, + "grad_norm": 3.725668430328369, + "learning_rate": 9.485373781148431e-06, + "loss": 0.3556, + "step": 980 + }, + { + "epoch": 1.0174717368961974, + "grad_norm": 4.162373065948486, + "learning_rate": 9.474539544962081e-06, + "loss": 0.3531, + "step": 990 + }, + { + "epoch": 1.027749229188078, + "grad_norm": 3.697767734527588, + "learning_rate": 9.463705308775733e-06, + "loss": 0.3627, + "step": 1000 + }, + { + "epoch": 1.0380267214799588, + "grad_norm": 4.862727642059326, + "learning_rate": 9.452871072589383e-06, + "loss": 0.4393, + "step": 1010 + }, + { + "epoch": 1.0483042137718397, + "grad_norm": 4.021687030792236, + "learning_rate": 9.442036836403035e-06, + "loss": 0.4544, + "step": 1020 + }, + { + "epoch": 1.0585817060637204, + "grad_norm": 3.82734751701355, + "learning_rate": 9.431202600216685e-06, + "loss": 0.3937, + "step": 1030 + }, + { + "epoch": 1.0688591983556013, + "grad_norm": 3.5762205123901367, + "learning_rate": 9.420368364030337e-06, + "loss": 0.4722, + "step": 1040 + }, + { + "epoch": 1.079136690647482, + "grad_norm": 4.2156147956848145, + "learning_rate": 9.409534127843987e-06, + "loss": 0.3398, + "step": 1050 + }, + { + "epoch": 1.0894141829393629, + "grad_norm": 4.575065612792969, + "learning_rate": 9.398699891657639e-06, + "loss": 0.3457, + "step": 1060 + }, + { + "epoch": 1.0996916752312436, + "grad_norm": 3.8678557872772217, + "learning_rate": 9.387865655471289e-06, + "loss": 0.3178, + "step": 1070 + }, + { + "epoch": 1.1099691675231242, + "grad_norm": 4.01522970199585, + "learning_rate": 9.377031419284941e-06, + "loss": 0.4911, + "step": 1080 + }, + { + "epoch": 1.1202466598150052, + "grad_norm": 4.648536205291748, + "learning_rate": 9.366197183098593e-06, + "loss": 0.4531, + "step": 1090 + }, + { + "epoch": 1.1305241521068858, + "grad_norm": 4.106440544128418, + "learning_rate": 9.355362946912243e-06, + "loss": 0.3942, + "step": 1100 + }, + { + "epoch": 1.1408016443987667, + "grad_norm": 4.35145378112793, + "learning_rate": 9.344528710725895e-06, + "loss": 0.4071, + "step": 1110 + }, + { + "epoch": 1.1510791366906474, + "grad_norm": 4.132904529571533, + "learning_rate": 9.333694474539545e-06, + "loss": 0.4148, + "step": 1120 + }, + { + "epoch": 1.1613566289825283, + "grad_norm": 3.6607720851898193, + "learning_rate": 9.322860238353197e-06, + "loss": 0.298, + "step": 1130 + }, + { + "epoch": 1.171634121274409, + "grad_norm": 3.625558376312256, + "learning_rate": 9.312026002166849e-06, + "loss": 0.4097, + "step": 1140 + }, + { + "epoch": 1.1819116135662897, + "grad_norm": 4.5726494789123535, + "learning_rate": 9.301191765980499e-06, + "loss": 0.3953, + "step": 1150 + }, + { + "epoch": 1.1921891058581706, + "grad_norm": 4.445627212524414, + "learning_rate": 9.29035752979415e-06, + "loss": 0.3835, + "step": 1160 + }, + { + "epoch": 1.2024665981500513, + "grad_norm": 4.57354211807251, + "learning_rate": 9.279523293607802e-06, + "loss": 0.3832, + "step": 1170 + }, + { + "epoch": 1.2127440904419322, + "grad_norm": 4.104644775390625, + "learning_rate": 9.268689057421453e-06, + "loss": 0.416, + "step": 1180 + }, + { + "epoch": 1.223021582733813, + "grad_norm": 4.074865818023682, + "learning_rate": 9.257854821235104e-06, + "loss": 0.3608, + "step": 1190 + }, + { + "epoch": 1.2332990750256938, + "grad_norm": 3.4953205585479736, + "learning_rate": 9.247020585048755e-06, + "loss": 0.3351, + "step": 1200 + }, + { + "epoch": 1.2435765673175745, + "grad_norm": 4.539699077606201, + "learning_rate": 9.236186348862406e-06, + "loss": 0.4058, + "step": 1210 + }, + { + "epoch": 1.2538540596094552, + "grad_norm": 4.369785308837891, + "learning_rate": 9.225352112676057e-06, + "loss": 0.3412, + "step": 1220 + }, + { + "epoch": 1.264131551901336, + "grad_norm": 3.9678955078125, + "learning_rate": 9.214517876489708e-06, + "loss": 0.3832, + "step": 1230 + }, + { + "epoch": 1.274409044193217, + "grad_norm": 4.361431121826172, + "learning_rate": 9.203683640303359e-06, + "loss": 0.4099, + "step": 1240 + }, + { + "epoch": 1.2846865364850977, + "grad_norm": 4.076035022735596, + "learning_rate": 9.19284940411701e-06, + "loss": 0.4206, + "step": 1250 + }, + { + "epoch": 1.2949640287769784, + "grad_norm": 4.084390640258789, + "learning_rate": 9.18201516793066e-06, + "loss": 0.4271, + "step": 1260 + }, + { + "epoch": 1.3052415210688593, + "grad_norm": 3.262382745742798, + "learning_rate": 9.171180931744312e-06, + "loss": 0.3687, + "step": 1270 + }, + { + "epoch": 1.31551901336074, + "grad_norm": 4.841338634490967, + "learning_rate": 9.160346695557964e-06, + "loss": 0.3659, + "step": 1280 + }, + { + "epoch": 1.3257965056526206, + "grad_norm": 4.267407417297363, + "learning_rate": 9.149512459371616e-06, + "loss": 0.3784, + "step": 1290 + }, + { + "epoch": 1.3360739979445015, + "grad_norm": 3.525167465209961, + "learning_rate": 9.138678223185266e-06, + "loss": 0.3593, + "step": 1300 + }, + { + "epoch": 1.3463514902363825, + "grad_norm": 4.050453186035156, + "learning_rate": 9.127843986998918e-06, + "loss": 0.3737, + "step": 1310 + }, + { + "epoch": 1.3566289825282631, + "grad_norm": 3.8488476276397705, + "learning_rate": 9.117009750812568e-06, + "loss": 0.417, + "step": 1320 + }, + { + "epoch": 1.3669064748201438, + "grad_norm": 4.5935540199279785, + "learning_rate": 9.10617551462622e-06, + "loss": 0.4056, + "step": 1330 + }, + { + "epoch": 1.3771839671120247, + "grad_norm": 4.8325581550598145, + "learning_rate": 9.09534127843987e-06, + "loss": 0.4208, + "step": 1340 + }, + { + "epoch": 1.3874614594039054, + "grad_norm": 4.0957818031311035, + "learning_rate": 9.084507042253522e-06, + "loss": 0.408, + "step": 1350 + }, + { + "epoch": 1.397738951695786, + "grad_norm": 5.2503767013549805, + "learning_rate": 9.073672806067174e-06, + "loss": 0.4164, + "step": 1360 + }, + { + "epoch": 1.408016443987667, + "grad_norm": 3.5155766010284424, + "learning_rate": 9.062838569880824e-06, + "loss": 0.3548, + "step": 1370 + }, + { + "epoch": 1.418293936279548, + "grad_norm": 3.994086503982544, + "learning_rate": 9.052004333694476e-06, + "loss": 0.4025, + "step": 1380 + }, + { + "epoch": 1.4285714285714286, + "grad_norm": 5.403133392333984, + "learning_rate": 9.041170097508126e-06, + "loss": 0.4171, + "step": 1390 + }, + { + "epoch": 1.4388489208633093, + "grad_norm": 3.069812774658203, + "learning_rate": 9.030335861321778e-06, + "loss": 0.3431, + "step": 1400 + }, + { + "epoch": 1.4491264131551902, + "grad_norm": 3.9196174144744873, + "learning_rate": 9.019501625135428e-06, + "loss": 0.4507, + "step": 1410 + }, + { + "epoch": 1.4594039054470709, + "grad_norm": 3.6087749004364014, + "learning_rate": 9.00866738894908e-06, + "loss": 0.336, + "step": 1420 + }, + { + "epoch": 1.4696813977389516, + "grad_norm": 4.544300079345703, + "learning_rate": 8.99783315276273e-06, + "loss": 0.4188, + "step": 1430 + }, + { + "epoch": 1.4799588900308325, + "grad_norm": 3.7331314086914062, + "learning_rate": 8.986998916576382e-06, + "loss": 0.4092, + "step": 1440 + }, + { + "epoch": 1.4902363823227134, + "grad_norm": 4.780219078063965, + "learning_rate": 8.976164680390034e-06, + "loss": 0.3587, + "step": 1450 + }, + { + "epoch": 1.500513874614594, + "grad_norm": 4.3289690017700195, + "learning_rate": 8.965330444203685e-06, + "loss": 0.3504, + "step": 1460 + }, + { + "epoch": 1.5107913669064748, + "grad_norm": 4.810067653656006, + "learning_rate": 8.954496208017336e-06, + "loss": 0.3261, + "step": 1470 + }, + { + "epoch": 1.5210688591983557, + "grad_norm": 4.626963138580322, + "learning_rate": 8.943661971830987e-06, + "loss": 0.41, + "step": 1480 + }, + { + "epoch": 1.5313463514902363, + "grad_norm": 4.636073589324951, + "learning_rate": 8.932827735644638e-06, + "loss": 0.3462, + "step": 1490 + }, + { + "epoch": 1.541623843782117, + "grad_norm": 4.311732769012451, + "learning_rate": 8.92199349945829e-06, + "loss": 0.3888, + "step": 1500 + }, + { + "epoch": 1.551901336073998, + "grad_norm": 4.3718461990356445, + "learning_rate": 8.91115926327194e-06, + "loss": 0.3813, + "step": 1510 + }, + { + "epoch": 1.5621788283658788, + "grad_norm": 3.1939361095428467, + "learning_rate": 8.900325027085591e-06, + "loss": 0.4208, + "step": 1520 + }, + { + "epoch": 1.5724563206577595, + "grad_norm": 4.095200538635254, + "learning_rate": 8.889490790899242e-06, + "loss": 0.3845, + "step": 1530 + }, + { + "epoch": 1.5827338129496402, + "grad_norm": 3.5258431434631348, + "learning_rate": 8.878656554712893e-06, + "loss": 0.3284, + "step": 1540 + }, + { + "epoch": 1.5930113052415211, + "grad_norm": 3.1825735569000244, + "learning_rate": 8.867822318526545e-06, + "loss": 0.3385, + "step": 1550 + }, + { + "epoch": 1.6032887975334018, + "grad_norm": 3.5939745903015137, + "learning_rate": 8.856988082340195e-06, + "loss": 0.4101, + "step": 1560 + }, + { + "epoch": 1.6135662898252825, + "grad_norm": 4.911982536315918, + "learning_rate": 8.846153846153847e-06, + "loss": 0.4328, + "step": 1570 + }, + { + "epoch": 1.6238437821171634, + "grad_norm": 3.6301517486572266, + "learning_rate": 8.835319609967497e-06, + "loss": 0.3399, + "step": 1580 + }, + { + "epoch": 1.6341212744090443, + "grad_norm": 3.6853671073913574, + "learning_rate": 8.82448537378115e-06, + "loss": 0.3271, + "step": 1590 + }, + { + "epoch": 1.644398766700925, + "grad_norm": 3.029378652572632, + "learning_rate": 8.8136511375948e-06, + "loss": 0.3638, + "step": 1600 + }, + { + "epoch": 1.6546762589928057, + "grad_norm": 4.740921497344971, + "learning_rate": 8.802816901408451e-06, + "loss": 0.2964, + "step": 1610 + }, + { + "epoch": 1.6649537512846866, + "grad_norm": 4.348399639129639, + "learning_rate": 8.791982665222103e-06, + "loss": 0.364, + "step": 1620 + }, + { + "epoch": 1.6752312435765673, + "grad_norm": 4.521662712097168, + "learning_rate": 8.781148429035755e-06, + "loss": 0.3946, + "step": 1630 + }, + { + "epoch": 1.685508735868448, + "grad_norm": 4.327390670776367, + "learning_rate": 8.770314192849405e-06, + "loss": 0.3325, + "step": 1640 + }, + { + "epoch": 1.6957862281603289, + "grad_norm": 4.260695934295654, + "learning_rate": 8.759479956663057e-06, + "loss": 0.4112, + "step": 1650 + }, + { + "epoch": 1.7060637204522098, + "grad_norm": 3.723114490509033, + "learning_rate": 8.748645720476707e-06, + "loss": 0.3777, + "step": 1660 + }, + { + "epoch": 1.7163412127440905, + "grad_norm": 3.6276798248291016, + "learning_rate": 8.737811484290359e-06, + "loss": 0.4307, + "step": 1670 + }, + { + "epoch": 1.7266187050359711, + "grad_norm": 4.2474446296691895, + "learning_rate": 8.726977248104009e-06, + "loss": 0.3451, + "step": 1680 + }, + { + "epoch": 1.736896197327852, + "grad_norm": 4.7757368087768555, + "learning_rate": 8.71614301191766e-06, + "loss": 0.3528, + "step": 1690 + }, + { + "epoch": 1.7471736896197327, + "grad_norm": 3.768132209777832, + "learning_rate": 8.705308775731311e-06, + "loss": 0.4086, + "step": 1700 + }, + { + "epoch": 1.7574511819116134, + "grad_norm": 4.793600559234619, + "learning_rate": 8.694474539544963e-06, + "loss": 0.414, + "step": 1710 + }, + { + "epoch": 1.7677286742034943, + "grad_norm": 4.651284217834473, + "learning_rate": 8.683640303358613e-06, + "loss": 0.3424, + "step": 1720 + }, + { + "epoch": 1.7780061664953752, + "grad_norm": 3.656557083129883, + "learning_rate": 8.672806067172265e-06, + "loss": 0.2813, + "step": 1730 + }, + { + "epoch": 1.788283658787256, + "grad_norm": 3.836421012878418, + "learning_rate": 8.661971830985915e-06, + "loss": 0.3834, + "step": 1740 + }, + { + "epoch": 1.7985611510791366, + "grad_norm": 4.500270843505859, + "learning_rate": 8.651137594799567e-06, + "loss": 0.337, + "step": 1750 + }, + { + "epoch": 1.8088386433710175, + "grad_norm": 3.3500618934631348, + "learning_rate": 8.640303358613219e-06, + "loss": 0.393, + "step": 1760 + }, + { + "epoch": 1.8191161356628982, + "grad_norm": 3.52258563041687, + "learning_rate": 8.62946912242687e-06, + "loss": 0.397, + "step": 1770 + }, + { + "epoch": 1.829393627954779, + "grad_norm": 4.57402229309082, + "learning_rate": 8.61863488624052e-06, + "loss": 0.4048, + "step": 1780 + }, + { + "epoch": 1.8396711202466598, + "grad_norm": 3.952526092529297, + "learning_rate": 8.607800650054172e-06, + "loss": 0.316, + "step": 1790 + }, + { + "epoch": 1.8499486125385407, + "grad_norm": 4.35211706161499, + "learning_rate": 8.596966413867823e-06, + "loss": 0.4373, + "step": 1800 + }, + { + "epoch": 1.8602261048304214, + "grad_norm": 3.061844825744629, + "learning_rate": 8.586132177681474e-06, + "loss": 0.2665, + "step": 1810 + }, + { + "epoch": 1.870503597122302, + "grad_norm": 3.6002986431121826, + "learning_rate": 8.575297941495125e-06, + "loss": 0.3807, + "step": 1820 + }, + { + "epoch": 1.880781089414183, + "grad_norm": 4.012722492218018, + "learning_rate": 8.564463705308776e-06, + "loss": 0.376, + "step": 1830 + }, + { + "epoch": 1.8910585817060637, + "grad_norm": 3.516463041305542, + "learning_rate": 8.553629469122428e-06, + "loss": 0.3497, + "step": 1840 + }, + { + "epoch": 1.9013360739979444, + "grad_norm": 4.711485385894775, + "learning_rate": 8.542795232936078e-06, + "loss": 0.2968, + "step": 1850 + }, + { + "epoch": 1.9116135662898253, + "grad_norm": 3.4084625244140625, + "learning_rate": 8.53196099674973e-06, + "loss": 0.3475, + "step": 1860 + }, + { + "epoch": 1.9218910585817062, + "grad_norm": 3.9419453144073486, + "learning_rate": 8.52112676056338e-06, + "loss": 0.342, + "step": 1870 + }, + { + "epoch": 1.9321685508735869, + "grad_norm": 3.4985804557800293, + "learning_rate": 8.510292524377032e-06, + "loss": 0.3805, + "step": 1880 + }, + { + "epoch": 1.9424460431654675, + "grad_norm": 4.257175445556641, + "learning_rate": 8.499458288190682e-06, + "loss": 0.3727, + "step": 1890 + }, + { + "epoch": 1.9527235354573484, + "grad_norm": 3.3339366912841797, + "learning_rate": 8.488624052004334e-06, + "loss": 0.3684, + "step": 1900 + }, + { + "epoch": 1.9630010277492291, + "grad_norm": 4.362495422363281, + "learning_rate": 8.477789815817984e-06, + "loss": 0.3216, + "step": 1910 + }, + { + "epoch": 1.9732785200411098, + "grad_norm": 4.4011549949646, + "learning_rate": 8.466955579631638e-06, + "loss": 0.3437, + "step": 1920 + }, + { + "epoch": 1.9835560123329907, + "grad_norm": 3.513015031814575, + "learning_rate": 8.456121343445288e-06, + "loss": 0.3621, + "step": 1930 + }, + { + "epoch": 1.9938335046248716, + "grad_norm": 3.66763973236084, + "learning_rate": 8.44528710725894e-06, + "loss": 0.4068, + "step": 1940 + }, + { + "epoch": 2.0, + "eval_loss": 0.38473397493362427, + "eval_runtime": 2272.3326, + "eval_samples_per_second": 2.582, + "eval_steps_per_second": 0.081, + "eval_wer": 0.48120656465137523, + "step": 1946 + } + ], + "logging_steps": 10, + "max_steps": 9730, + "num_input_tokens_seen": 0, + "num_train_epochs": 10, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 8.077923711992201e+18, + "train_batch_size": 16, + "trial_name": null, + "trial_params": null +} diff --git a/whisper/whisper_base_finetuned/checkpoint-1946/training_args.bin b/whisper/whisper_base_finetuned/checkpoint-1946/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..cec0038665d32391824dfe472a35578679380744 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-1946/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d9e2cc97af116b33d30c72667d46ddd426569f5f483ad8392e19d95860dfcc43 +size 5240 diff --git a/whisper/whisper_base_finetuned/checkpoint-9730/config.json b/whisper/whisper_base_finetuned/checkpoint-9730/config.json new file mode 100644 index 0000000000000000000000000000000000000000..91728b7bc6c3a43bb11e0d161949a286ca009408 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-9730/config.json @@ -0,0 +1,52 @@ +{ + "_name_or_path": "whisper_base_finetuned", + "activation_dropout": 0.0, + "activation_function": "gelu", + "apply_spec_augment": true, + "architectures": [ + "WhisperForConditionalGeneration" + ], + "attention_dropout": 0.0, + "begin_suppress_tokens": [ + 220, + 50257 + ], + "bos_token_id": 50257, + "classifier_proj_size": 256, + "d_model": 512, + "decoder_attention_heads": 8, + "decoder_ffn_dim": 2048, + "decoder_layerdrop": 0.0, + "decoder_layers": 6, + "decoder_start_token_id": 50258, + "dropout": 0.0, + "encoder_attention_heads": 8, + "encoder_ffn_dim": 2048, + "encoder_layerdrop": 0.0, + "encoder_layers": 6, + "eos_token_id": 50257, + "forced_decoder_ids": null, + "init_std": 0.02, + "is_encoder_decoder": true, + "mask_feature_length": 10, + "mask_feature_min_masks": 0, + "mask_feature_prob": 0.05, + "mask_time_length": 10, + "mask_time_min_masks": 2, + "mask_time_prob": 0.05, + "max_length": 448, + "max_source_positions": 1500, + "max_target_positions": 448, + "median_filter_width": 7, + "model_type": "whisper", + "num_hidden_layers": 6, + "num_mel_bins": 80, + "pad_token_id": 50257, + "scale_embedding": false, + "suppress_tokens": [], + "torch_dtype": "float32", + "transformers_version": "4.42.0.dev0", + "use_cache": true, + "use_weighted_layer_sum": false, + "vocab_size": 51865 +} diff --git a/whisper/whisper_base_finetuned/checkpoint-9730/generation_config.json b/whisper/whisper_base_finetuned/checkpoint-9730/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..3ce877d310342bb057324d0dfcf6f83dc6055c1a --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-9730/generation_config.json @@ -0,0 +1,256 @@ +{ + "alignment_heads": [ + [ + 3, + 1 + ], + [ + 4, + 2 + ], + [ + 4, + 3 + ], + [ + 4, + 7 + ], + [ + 5, + 1 + ], + [ + 5, + 2 + ], + [ + 5, + 4 + ], + [ + 5, + 6 + ] + ], + "begin_suppress_tokens": [ + 220, + 50257 + ], + "bos_token_id": 50257, + "decoder_start_token_id": 50258, + "eos_token_id": 50257, + "forced_decoder_ids": [ + [ + 1, + null + ], + [ + 2, + 50359 + ] + ], + "is_multilingual": true, + "lang_to_id": { + "<|af|>": 50327, + "<|am|>": 50334, + "<|ar|>": 50272, + "<|as|>": 50350, + "<|az|>": 50304, + "<|ba|>": 50355, + "<|be|>": 50330, + "<|bg|>": 50292, + "<|bn|>": 50302, + "<|bo|>": 50347, + "<|br|>": 50309, + "<|bs|>": 50315, + "<|ca|>": 50270, + "<|cs|>": 50283, + "<|cy|>": 50297, + "<|da|>": 50285, + "<|de|>": 50261, + "<|el|>": 50281, + "<|en|>": 50259, + "<|es|>": 50262, + "<|et|>": 50307, + "<|eu|>": 50310, + "<|fa|>": 50300, + "<|fi|>": 50277, + "<|fo|>": 50338, + "<|fr|>": 50265, + "<|gl|>": 50319, + "<|gu|>": 50333, + "<|haw|>": 50352, + "<|ha|>": 50354, + "<|he|>": 50279, + "<|hi|>": 50276, + "<|hr|>": 50291, + "<|ht|>": 50339, + "<|hu|>": 50286, + "<|hy|>": 50312, + "<|id|>": 50275, + "<|is|>": 50311, + "<|it|>": 50274, + "<|ja|>": 50266, + "<|jw|>": 50356, + "<|ka|>": 50329, + "<|kk|>": 50316, + "<|km|>": 50323, + "<|kn|>": 50306, + "<|ko|>": 50264, + "<|la|>": 50294, + "<|lb|>": 50345, + "<|ln|>": 50353, + "<|lo|>": 50336, + "<|lt|>": 50293, + "<|lv|>": 50301, + "<|mg|>": 50349, + "<|mi|>": 50295, + "<|mk|>": 50308, + "<|ml|>": 50296, + "<|mn|>": 50314, + "<|mr|>": 50320, + "<|ms|>": 50282, + "<|mt|>": 50343, + "<|my|>": 50346, + "<|ne|>": 50313, + "<|nl|>": 50271, + "<|nn|>": 50342, + "<|no|>": 50288, + "<|oc|>": 50328, + "<|pa|>": 50321, + "<|pl|>": 50269, + "<|ps|>": 50340, + "<|pt|>": 50267, + "<|ro|>": 50284, + "<|ru|>": 50263, + "<|sa|>": 50344, + "<|sd|>": 50332, + "<|si|>": 50322, + "<|sk|>": 50298, + "<|sl|>": 50305, + "<|sn|>": 50324, + "<|so|>": 50326, + "<|sq|>": 50317, + "<|sr|>": 50303, + "<|su|>": 50357, + "<|sv|>": 50273, + "<|sw|>": 50318, + "<|ta|>": 50287, + "<|te|>": 50299, + "<|tg|>": 50331, + "<|th|>": 50289, + "<|tk|>": 50341, + "<|tl|>": 50348, + "<|tr|>": 50268, + "<|tt|>": 50351, + "<|uk|>": 50280, + "<|ur|>": 50290, + "<|uz|>": 50337, + "<|vi|>": 50278, + "<|yi|>": 50335, + "<|yo|>": 50325, + "<|zh|>": 50260 + }, + "max_initial_timestamp_index": 50, + "max_length": 448, + "no_timestamps_token_id": 50363, + "pad_token_id": 50257, + "prev_sot_token_id": 50361, + "return_timestamps": false, + "suppress_tokens": [ + 1, + 2, + 7, + 8, + 9, + 10, + 14, + 25, + 26, + 27, + 28, + 29, + 31, + 58, + 59, + 60, + 61, + 62, + 63, + 90, + 91, + 92, + 93, + 359, + 503, + 522, + 542, + 873, + 893, + 902, + 918, + 922, + 931, + 1350, + 1853, + 1982, + 2460, + 2627, + 3246, + 3253, + 3268, + 3536, + 3846, + 3961, + 4183, + 4667, + 6585, + 6647, + 7273, + 9061, + 9383, + 10428, + 10929, + 11938, + 12033, + 12331, + 12562, + 13793, + 14157, + 14635, + 15265, + 15618, + 16553, + 16604, + 18362, + 18956, + 20075, + 21675, + 22520, + 26130, + 26161, + 26435, + 28279, + 29464, + 31650, + 32302, + 32470, + 36865, + 42863, + 47425, + 49870, + 50254, + 50258, + 50358, + 50359, + 50360, + 50361, + 50362 + ], + "task_to_id": { + "transcribe": 50359, + "translate": 50358 + }, + "transformers_version": "4.42.0.dev0" +} diff --git a/whisper/whisper_base_finetuned/checkpoint-9730/model.safetensors b/whisper/whisper_base_finetuned/checkpoint-9730/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..950c821051deed0c0adfe241789e4a25a23001a1 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-9730/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:03e9cfe5ee86f9e21d408022d52d567bd7efc032907a8471aa8f9a7fac4898cf +size 290403936 diff --git a/whisper/whisper_base_finetuned/checkpoint-9730/optimizer.pt b/whisper/whisper_base_finetuned/checkpoint-9730/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..d2585e54cfeabe6badb9c5f4ea2b19eff5dcb213 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-9730/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e7cda38c6ad5c6721aad9c9aec0e44b58367283c048ea387c021c671ae92f88b +size 574811514 diff --git a/whisper/whisper_base_finetuned/checkpoint-9730/preprocessor_config.json b/whisper/whisper_base_finetuned/checkpoint-9730/preprocessor_config.json new file mode 100644 index 0000000000000000000000000000000000000000..91876762a536a746d268353c5cba57286e76b058 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-9730/preprocessor_config.json @@ -0,0 +1,14 @@ +{ + "chunk_length": 30, + "feature_extractor_type": "WhisperFeatureExtractor", + "feature_size": 80, + "hop_length": 160, + "n_fft": 400, + "n_samples": 480000, + "nb_max_frames": 3000, + "padding_side": "right", + "padding_value": 0.0, + "processor_class": "WhisperProcessor", + "return_attention_mask": false, + "sampling_rate": 16000 +} diff --git a/whisper/whisper_base_finetuned/checkpoint-9730/rng_state_0.pth b/whisper/whisper_base_finetuned/checkpoint-9730/rng_state_0.pth new file mode 100644 index 0000000000000000000000000000000000000000..7f727848c2bfedf310a49780c1de3e284e69e2b8 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-9730/rng_state_0.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0b7fbfa4e910ce544e97f6dbeb7b208f4a49b65f2effd74d57de1af0d71c00d4 +size 15024 diff --git a/whisper/whisper_base_finetuned/checkpoint-9730/rng_state_1.pth b/whisper/whisper_base_finetuned/checkpoint-9730/rng_state_1.pth new file mode 100644 index 0000000000000000000000000000000000000000..6027ff8500f65a8d37ad317adb8aeca8e304ea3f --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-9730/rng_state_1.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b0142621a5bb1159d191b072ebd4a5652c1a11c8ae996f962a0fb69a7de1e8bd +size 15024 diff --git a/whisper/whisper_base_finetuned/checkpoint-9730/rng_state_2.pth b/whisper/whisper_base_finetuned/checkpoint-9730/rng_state_2.pth new file mode 100644 index 0000000000000000000000000000000000000000..43a2d90fcc0c54516c5e05d24e91782a8d0f1914 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-9730/rng_state_2.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e628ef532a484d3fd015c141ac910a0e1bf2534dc77ccd7bf11f0599d3a390d5 +size 15024 diff --git a/whisper/whisper_base_finetuned/checkpoint-9730/rng_state_3.pth b/whisper/whisper_base_finetuned/checkpoint-9730/rng_state_3.pth new file mode 100644 index 0000000000000000000000000000000000000000..83c50ee23648caf8ba55ea2dda4623a892e65e47 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-9730/rng_state_3.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:85c8d0adbc97f743992cc26dc8369f3b254ab17f01ecb27067c8bc9a87bec039 +size 15024 diff --git a/whisper/whisper_base_finetuned/checkpoint-9730/scheduler.pt b/whisper/whisper_base_finetuned/checkpoint-9730/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..ca2c327186c42d9a462a1db5e0dd14700b98b336 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-9730/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fb1923bd551b2f286111cc5d97750c031553a4df582d098f6ee5d7f8fef85f9e +size 1064 diff --git a/whisper/whisper_base_finetuned/checkpoint-9730/trainer_state.json b/whisper/whisper_base_finetuned/checkpoint-9730/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..50908bb1c1ad94a729e394266e832d7422bf6380 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-9730/trainer_state.json @@ -0,0 +1,6934 @@ +{ + "best_metric": 0.48120656465137523, + "best_model_checkpoint": "./whisper_base_finetuned/checkpoint-1946", + "epoch": 10.0, + "eval_steps": 500, + "global_step": 9730, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.010277492291880781, + "grad_norm": 78.41651916503906, + "learning_rate": 1.0000000000000001e-07, + "loss": 3.3505, + "step": 10 + }, + { + "epoch": 0.020554984583761562, + "grad_norm": 74.72834777832031, + "learning_rate": 3.0000000000000004e-07, + "loss": 3.2167, + "step": 20 + }, + { + "epoch": 0.030832476875642344, + "grad_norm": 48.560585021972656, + "learning_rate": 5.000000000000001e-07, + "loss": 2.9513, + "step": 30 + }, + { + "epoch": 0.041109969167523124, + "grad_norm": 30.085025787353516, + "learning_rate": 7.000000000000001e-07, + "loss": 2.6321, + "step": 40 + }, + { + "epoch": 0.051387461459403906, + "grad_norm": 21.896045684814453, + "learning_rate": 9.000000000000001e-07, + "loss": 2.376, + "step": 50 + }, + { + "epoch": 0.06166495375128469, + "grad_norm": 16.388851165771484, + "learning_rate": 1.1e-06, + "loss": 2.1473, + "step": 60 + }, + { + "epoch": 0.07194244604316546, + "grad_norm": 15.245757102966309, + "learning_rate": 1.3e-06, + "loss": 1.9096, + "step": 70 + }, + { + "epoch": 0.08221993833504625, + "grad_norm": 12.178542137145996, + "learning_rate": 1.5e-06, + "loss": 1.7493, + "step": 80 + }, + { + "epoch": 0.09249743062692703, + "grad_norm": 9.62636947631836, + "learning_rate": 1.7000000000000002e-06, + "loss": 1.5233, + "step": 90 + }, + { + "epoch": 0.10277492291880781, + "grad_norm": 9.04529094696045, + "learning_rate": 1.9000000000000002e-06, + "loss": 1.3753, + "step": 100 + }, + { + "epoch": 0.1130524152106886, + "grad_norm": 8.049132347106934, + "learning_rate": 2.1000000000000002e-06, + "loss": 1.2293, + "step": 110 + }, + { + "epoch": 0.12332990750256938, + "grad_norm": 7.2756147384643555, + "learning_rate": 2.3000000000000004e-06, + "loss": 1.1131, + "step": 120 + }, + { + "epoch": 0.13360739979445016, + "grad_norm": 7.049572944641113, + "learning_rate": 2.5e-06, + "loss": 1.1889, + "step": 130 + }, + { + "epoch": 0.14388489208633093, + "grad_norm": 6.429234981536865, + "learning_rate": 2.7000000000000004e-06, + "loss": 1.0655, + "step": 140 + }, + { + "epoch": 0.15416238437821173, + "grad_norm": 6.281942844390869, + "learning_rate": 2.9e-06, + "loss": 0.9696, + "step": 150 + }, + { + "epoch": 0.1644398766700925, + "grad_norm": 6.72721004486084, + "learning_rate": 3.1000000000000004e-06, + "loss": 1.0683, + "step": 160 + }, + { + "epoch": 0.1747173689619733, + "grad_norm": 5.773904800415039, + "learning_rate": 3.3000000000000006e-06, + "loss": 1.0132, + "step": 170 + }, + { + "epoch": 0.18499486125385406, + "grad_norm": 6.2021870613098145, + "learning_rate": 3.5e-06, + "loss": 0.9325, + "step": 180 + }, + { + "epoch": 0.19527235354573483, + "grad_norm": 6.268314838409424, + "learning_rate": 3.7e-06, + "loss": 0.8658, + "step": 190 + }, + { + "epoch": 0.20554984583761562, + "grad_norm": 6.014781951904297, + "learning_rate": 3.900000000000001e-06, + "loss": 0.9349, + "step": 200 + }, + { + "epoch": 0.2158273381294964, + "grad_norm": 7.0086188316345215, + "learning_rate": 4.1e-06, + "loss": 0.9347, + "step": 210 + }, + { + "epoch": 0.2261048304213772, + "grad_norm": 6.318301200866699, + "learning_rate": 4.3e-06, + "loss": 0.8023, + "step": 220 + }, + { + "epoch": 0.23638232271325796, + "grad_norm": 5.627261638641357, + "learning_rate": 4.5e-06, + "loss": 0.871, + "step": 230 + }, + { + "epoch": 0.24665981500513876, + "grad_norm": 5.793784141540527, + "learning_rate": 4.7e-06, + "loss": 0.8654, + "step": 240 + }, + { + "epoch": 0.2569373072970195, + "grad_norm": 5.449838638305664, + "learning_rate": 4.9000000000000005e-06, + "loss": 0.7499, + "step": 250 + }, + { + "epoch": 0.2672147995889003, + "grad_norm": 4.993557453155518, + "learning_rate": 5.1e-06, + "loss": 0.7281, + "step": 260 + }, + { + "epoch": 0.2774922918807811, + "grad_norm": 6.478201866149902, + "learning_rate": 5.300000000000001e-06, + "loss": 0.7543, + "step": 270 + }, + { + "epoch": 0.28776978417266186, + "grad_norm": 5.355103492736816, + "learning_rate": 5.500000000000001e-06, + "loss": 0.7545, + "step": 280 + }, + { + "epoch": 0.29804727646454265, + "grad_norm": 4.926327228546143, + "learning_rate": 5.7e-06, + "loss": 0.7397, + "step": 290 + }, + { + "epoch": 0.30832476875642345, + "grad_norm": 5.97158145904541, + "learning_rate": 5.9e-06, + "loss": 0.7262, + "step": 300 + }, + { + "epoch": 0.3186022610483042, + "grad_norm": 5.3029584884643555, + "learning_rate": 6.1e-06, + "loss": 0.7085, + "step": 310 + }, + { + "epoch": 0.328879753340185, + "grad_norm": 4.928351879119873, + "learning_rate": 6.300000000000001e-06, + "loss": 0.6726, + "step": 320 + }, + { + "epoch": 0.3391572456320658, + "grad_norm": 5.714938640594482, + "learning_rate": 6.5000000000000004e-06, + "loss": 0.6985, + "step": 330 + }, + { + "epoch": 0.3494347379239466, + "grad_norm": 4.394782543182373, + "learning_rate": 6.700000000000001e-06, + "loss": 0.6555, + "step": 340 + }, + { + "epoch": 0.3597122302158273, + "grad_norm": 4.944245338439941, + "learning_rate": 6.9e-06, + "loss": 0.6629, + "step": 350 + }, + { + "epoch": 0.3699897225077081, + "grad_norm": 5.048007965087891, + "learning_rate": 7.100000000000001e-06, + "loss": 0.65, + "step": 360 + }, + { + "epoch": 0.3802672147995889, + "grad_norm": 5.267819404602051, + "learning_rate": 7.3e-06, + "loss": 0.5775, + "step": 370 + }, + { + "epoch": 0.39054470709146966, + "grad_norm": 5.552557945251465, + "learning_rate": 7.500000000000001e-06, + "loss": 0.683, + "step": 380 + }, + { + "epoch": 0.40082219938335045, + "grad_norm": 4.37172269821167, + "learning_rate": 7.7e-06, + "loss": 0.5931, + "step": 390 + }, + { + "epoch": 0.41109969167523125, + "grad_norm": 5.033542156219482, + "learning_rate": 7.9e-06, + "loss": 0.6662, + "step": 400 + }, + { + "epoch": 0.42137718396711205, + "grad_norm": 4.7163190841674805, + "learning_rate": 8.1e-06, + "loss": 0.7605, + "step": 410 + }, + { + "epoch": 0.4316546762589928, + "grad_norm": 5.401676177978516, + "learning_rate": 8.3e-06, + "loss": 0.5939, + "step": 420 + }, + { + "epoch": 0.4419321685508736, + "grad_norm": 5.293227672576904, + "learning_rate": 8.5e-06, + "loss": 0.6061, + "step": 430 + }, + { + "epoch": 0.4522096608427544, + "grad_norm": 5.0345940589904785, + "learning_rate": 8.700000000000001e-06, + "loss": 0.5386, + "step": 440 + }, + { + "epoch": 0.4624871531346352, + "grad_norm": 4.590668678283691, + "learning_rate": 8.900000000000001e-06, + "loss": 0.5455, + "step": 450 + }, + { + "epoch": 0.4727646454265159, + "grad_norm": 5.621304512023926, + "learning_rate": 9.100000000000001e-06, + "loss": 0.514, + "step": 460 + }, + { + "epoch": 0.4830421377183967, + "grad_norm": 5.061606407165527, + "learning_rate": 9.3e-06, + "loss": 0.5945, + "step": 470 + }, + { + "epoch": 0.4933196300102775, + "grad_norm": 4.499748229980469, + "learning_rate": 9.5e-06, + "loss": 0.5241, + "step": 480 + }, + { + "epoch": 0.5035971223021583, + "grad_norm": 4.244873523712158, + "learning_rate": 9.7e-06, + "loss": 0.5179, + "step": 490 + }, + { + "epoch": 0.513874614594039, + "grad_norm": 5.057434558868408, + "learning_rate": 9.9e-06, + "loss": 0.5744, + "step": 500 + }, + { + "epoch": 0.5241521068859198, + "grad_norm": 5.502700328826904, + "learning_rate": 9.994582881906827e-06, + "loss": 0.5752, + "step": 510 + }, + { + "epoch": 0.5344295991778006, + "grad_norm": 4.976474285125732, + "learning_rate": 9.983748645720479e-06, + "loss": 0.6109, + "step": 520 + }, + { + "epoch": 0.5447070914696814, + "grad_norm": 4.148839473724365, + "learning_rate": 9.972914409534129e-06, + "loss": 0.5643, + "step": 530 + }, + { + "epoch": 0.5549845837615622, + "grad_norm": 3.930744171142578, + "learning_rate": 9.96208017334778e-06, + "loss": 0.4671, + "step": 540 + }, + { + "epoch": 0.5652620760534429, + "grad_norm": 5.741941928863525, + "learning_rate": 9.95124593716143e-06, + "loss": 0.5608, + "step": 550 + }, + { + "epoch": 0.5755395683453237, + "grad_norm": 4.145395755767822, + "learning_rate": 9.940411700975083e-06, + "loss": 0.4698, + "step": 560 + }, + { + "epoch": 0.5858170606372045, + "grad_norm": 4.2532830238342285, + "learning_rate": 9.929577464788733e-06, + "loss": 0.4084, + "step": 570 + }, + { + "epoch": 0.5960945529290853, + "grad_norm": 4.680564880371094, + "learning_rate": 9.918743228602385e-06, + "loss": 0.6091, + "step": 580 + }, + { + "epoch": 0.6063720452209661, + "grad_norm": 3.8156168460845947, + "learning_rate": 9.907908992416035e-06, + "loss": 0.5567, + "step": 590 + }, + { + "epoch": 0.6166495375128469, + "grad_norm": 4.722325801849365, + "learning_rate": 9.897074756229687e-06, + "loss": 0.5543, + "step": 600 + }, + { + "epoch": 0.6269270298047277, + "grad_norm": 5.177743911743164, + "learning_rate": 9.886240520043338e-06, + "loss": 0.477, + "step": 610 + }, + { + "epoch": 0.6372045220966084, + "grad_norm": 4.9859209060668945, + "learning_rate": 9.875406283856989e-06, + "loss": 0.4592, + "step": 620 + }, + { + "epoch": 0.6474820143884892, + "grad_norm": 4.872037887573242, + "learning_rate": 9.86457204767064e-06, + "loss": 0.5632, + "step": 630 + }, + { + "epoch": 0.65775950668037, + "grad_norm": 4.967211723327637, + "learning_rate": 9.85373781148429e-06, + "loss": 0.5553, + "step": 640 + }, + { + "epoch": 0.6680369989722508, + "grad_norm": 4.748555660247803, + "learning_rate": 9.842903575297942e-06, + "loss": 0.5228, + "step": 650 + }, + { + "epoch": 0.6783144912641316, + "grad_norm": 4.945960521697998, + "learning_rate": 9.832069339111592e-06, + "loss": 0.5018, + "step": 660 + }, + { + "epoch": 0.6885919835560124, + "grad_norm": 3.7931437492370605, + "learning_rate": 9.821235102925244e-06, + "loss": 0.5347, + "step": 670 + }, + { + "epoch": 0.6988694758478932, + "grad_norm": 4.149494171142578, + "learning_rate": 9.810400866738896e-06, + "loss": 0.413, + "step": 680 + }, + { + "epoch": 0.7091469681397738, + "grad_norm": 4.979891300201416, + "learning_rate": 9.799566630552548e-06, + "loss": 0.4836, + "step": 690 + }, + { + "epoch": 0.7194244604316546, + "grad_norm": 5.043586730957031, + "learning_rate": 9.788732394366198e-06, + "loss": 0.5286, + "step": 700 + }, + { + "epoch": 0.7297019527235354, + "grad_norm": 4.017364978790283, + "learning_rate": 9.77789815817985e-06, + "loss": 0.3785, + "step": 710 + }, + { + "epoch": 0.7399794450154162, + "grad_norm": 4.4453959465026855, + "learning_rate": 9.7670639219935e-06, + "loss": 0.52, + "step": 720 + }, + { + "epoch": 0.750256937307297, + "grad_norm": 4.624840259552002, + "learning_rate": 9.756229685807152e-06, + "loss": 0.5339, + "step": 730 + }, + { + "epoch": 0.7605344295991778, + "grad_norm": 4.6119771003723145, + "learning_rate": 9.745395449620802e-06, + "loss": 0.4857, + "step": 740 + }, + { + "epoch": 0.7708119218910586, + "grad_norm": 4.147925853729248, + "learning_rate": 9.734561213434454e-06, + "loss": 0.4363, + "step": 750 + }, + { + "epoch": 0.7810894141829393, + "grad_norm": 5.529519557952881, + "learning_rate": 9.723726977248104e-06, + "loss": 0.5206, + "step": 760 + }, + { + "epoch": 0.7913669064748201, + "grad_norm": 3.9015376567840576, + "learning_rate": 9.712892741061756e-06, + "loss": 0.4836, + "step": 770 + }, + { + "epoch": 0.8016443987667009, + "grad_norm": 4.5102057456970215, + "learning_rate": 9.702058504875406e-06, + "loss": 0.4437, + "step": 780 + }, + { + "epoch": 0.8119218910585817, + "grad_norm": 5.272336006164551, + "learning_rate": 9.691224268689058e-06, + "loss": 0.4402, + "step": 790 + }, + { + "epoch": 0.8221993833504625, + "grad_norm": 4.404648303985596, + "learning_rate": 9.68039003250271e-06, + "loss": 0.4443, + "step": 800 + }, + { + "epoch": 0.8324768756423433, + "grad_norm": 4.636880397796631, + "learning_rate": 9.66955579631636e-06, + "loss": 0.4943, + "step": 810 + }, + { + "epoch": 0.8427543679342241, + "grad_norm": 4.826484203338623, + "learning_rate": 9.658721560130012e-06, + "loss": 0.5385, + "step": 820 + }, + { + "epoch": 0.8530318602261048, + "grad_norm": 4.46310567855835, + "learning_rate": 9.647887323943664e-06, + "loss": 0.413, + "step": 830 + }, + { + "epoch": 0.8633093525179856, + "grad_norm": 4.603589057922363, + "learning_rate": 9.637053087757314e-06, + "loss": 0.4801, + "step": 840 + }, + { + "epoch": 0.8735868448098664, + "grad_norm": 3.7884819507598877, + "learning_rate": 9.626218851570966e-06, + "loss": 0.4294, + "step": 850 + }, + { + "epoch": 0.8838643371017472, + "grad_norm": 3.0480997562408447, + "learning_rate": 9.615384615384616e-06, + "loss": 0.4737, + "step": 860 + }, + { + "epoch": 0.894141829393628, + "grad_norm": 4.840622901916504, + "learning_rate": 9.604550379198268e-06, + "loss": 0.4806, + "step": 870 + }, + { + "epoch": 0.9044193216855088, + "grad_norm": 4.64235782623291, + "learning_rate": 9.59371614301192e-06, + "loss": 0.5359, + "step": 880 + }, + { + "epoch": 0.9146968139773896, + "grad_norm": 4.615347385406494, + "learning_rate": 9.58288190682557e-06, + "loss": 0.4526, + "step": 890 + }, + { + "epoch": 0.9249743062692704, + "grad_norm": 4.345542907714844, + "learning_rate": 9.572047670639221e-06, + "loss": 0.5112, + "step": 900 + }, + { + "epoch": 0.935251798561151, + "grad_norm": 3.5318965911865234, + "learning_rate": 9.561213434452872e-06, + "loss": 0.455, + "step": 910 + }, + { + "epoch": 0.9455292908530318, + "grad_norm": 4.852155685424805, + "learning_rate": 9.550379198266523e-06, + "loss": 0.5012, + "step": 920 + }, + { + "epoch": 0.9558067831449126, + "grad_norm": 4.666072368621826, + "learning_rate": 9.539544962080174e-06, + "loss": 0.4781, + "step": 930 + }, + { + "epoch": 0.9660842754367934, + "grad_norm": 4.7242865562438965, + "learning_rate": 9.528710725893825e-06, + "loss": 0.5102, + "step": 940 + }, + { + "epoch": 0.9763617677286742, + "grad_norm": 3.9831533432006836, + "learning_rate": 9.517876489707475e-06, + "loss": 0.416, + "step": 950 + }, + { + "epoch": 0.986639260020555, + "grad_norm": 4.294024467468262, + "learning_rate": 9.507042253521127e-06, + "loss": 0.4254, + "step": 960 + }, + { + "epoch": 0.9969167523124358, + "grad_norm": 4.132877826690674, + "learning_rate": 9.496208017334777e-06, + "loss": 0.4111, + "step": 970 + }, + { + "epoch": 1.0, + "eval_loss": 0.4590415954589844, + "eval_runtime": 1119.5991, + "eval_samples_per_second": 5.241, + "eval_steps_per_second": 0.164, + "eval_wer": 0.45510457111161867, + "step": 973 + }, + { + "epoch": 1.0071942446043165, + "grad_norm": 3.725668430328369, + "learning_rate": 9.485373781148431e-06, + "loss": 0.3556, + "step": 980 + }, + { + "epoch": 1.0174717368961974, + "grad_norm": 4.162373065948486, + "learning_rate": 9.474539544962081e-06, + "loss": 0.3531, + "step": 990 + }, + { + "epoch": 1.027749229188078, + "grad_norm": 3.697767734527588, + "learning_rate": 9.463705308775733e-06, + "loss": 0.3627, + "step": 1000 + }, + { + "epoch": 1.0380267214799588, + "grad_norm": 4.862727642059326, + "learning_rate": 9.452871072589383e-06, + "loss": 0.4393, + "step": 1010 + }, + { + "epoch": 1.0483042137718397, + "grad_norm": 4.021687030792236, + "learning_rate": 9.442036836403035e-06, + "loss": 0.4544, + "step": 1020 + }, + { + "epoch": 1.0585817060637204, + "grad_norm": 3.82734751701355, + "learning_rate": 9.431202600216685e-06, + "loss": 0.3937, + "step": 1030 + }, + { + "epoch": 1.0688591983556013, + "grad_norm": 3.5762205123901367, + "learning_rate": 9.420368364030337e-06, + "loss": 0.4722, + "step": 1040 + }, + { + "epoch": 1.079136690647482, + "grad_norm": 4.2156147956848145, + "learning_rate": 9.409534127843987e-06, + "loss": 0.3398, + "step": 1050 + }, + { + "epoch": 1.0894141829393629, + "grad_norm": 4.575065612792969, + "learning_rate": 9.398699891657639e-06, + "loss": 0.3457, + "step": 1060 + }, + { + "epoch": 1.0996916752312436, + "grad_norm": 3.8678557872772217, + "learning_rate": 9.387865655471289e-06, + "loss": 0.3178, + "step": 1070 + }, + { + "epoch": 1.1099691675231242, + "grad_norm": 4.01522970199585, + "learning_rate": 9.377031419284941e-06, + "loss": 0.4911, + "step": 1080 + }, + { + "epoch": 1.1202466598150052, + "grad_norm": 4.648536205291748, + "learning_rate": 9.366197183098593e-06, + "loss": 0.4531, + "step": 1090 + }, + { + "epoch": 1.1305241521068858, + "grad_norm": 4.106440544128418, + "learning_rate": 9.355362946912243e-06, + "loss": 0.3942, + "step": 1100 + }, + { + "epoch": 1.1408016443987667, + "grad_norm": 4.35145378112793, + "learning_rate": 9.344528710725895e-06, + "loss": 0.4071, + "step": 1110 + }, + { + "epoch": 1.1510791366906474, + "grad_norm": 4.132904529571533, + "learning_rate": 9.333694474539545e-06, + "loss": 0.4148, + "step": 1120 + }, + { + "epoch": 1.1613566289825283, + "grad_norm": 3.6607720851898193, + "learning_rate": 9.322860238353197e-06, + "loss": 0.298, + "step": 1130 + }, + { + "epoch": 1.171634121274409, + "grad_norm": 3.625558376312256, + "learning_rate": 9.312026002166849e-06, + "loss": 0.4097, + "step": 1140 + }, + { + "epoch": 1.1819116135662897, + "grad_norm": 4.5726494789123535, + "learning_rate": 9.301191765980499e-06, + "loss": 0.3953, + "step": 1150 + }, + { + "epoch": 1.1921891058581706, + "grad_norm": 4.445627212524414, + "learning_rate": 9.29035752979415e-06, + "loss": 0.3835, + "step": 1160 + }, + { + "epoch": 1.2024665981500513, + "grad_norm": 4.57354211807251, + "learning_rate": 9.279523293607802e-06, + "loss": 0.3832, + "step": 1170 + }, + { + "epoch": 1.2127440904419322, + "grad_norm": 4.104644775390625, + "learning_rate": 9.268689057421453e-06, + "loss": 0.416, + "step": 1180 + }, + { + "epoch": 1.223021582733813, + "grad_norm": 4.074865818023682, + "learning_rate": 9.257854821235104e-06, + "loss": 0.3608, + "step": 1190 + }, + { + "epoch": 1.2332990750256938, + "grad_norm": 3.4953205585479736, + "learning_rate": 9.247020585048755e-06, + "loss": 0.3351, + "step": 1200 + }, + { + "epoch": 1.2435765673175745, + "grad_norm": 4.539699077606201, + "learning_rate": 9.236186348862406e-06, + "loss": 0.4058, + "step": 1210 + }, + { + "epoch": 1.2538540596094552, + "grad_norm": 4.369785308837891, + "learning_rate": 9.225352112676057e-06, + "loss": 0.3412, + "step": 1220 + }, + { + "epoch": 1.264131551901336, + "grad_norm": 3.9678955078125, + "learning_rate": 9.214517876489708e-06, + "loss": 0.3832, + "step": 1230 + }, + { + "epoch": 1.274409044193217, + "grad_norm": 4.361431121826172, + "learning_rate": 9.203683640303359e-06, + "loss": 0.4099, + "step": 1240 + }, + { + "epoch": 1.2846865364850977, + "grad_norm": 4.076035022735596, + "learning_rate": 9.19284940411701e-06, + "loss": 0.4206, + "step": 1250 + }, + { + "epoch": 1.2949640287769784, + "grad_norm": 4.084390640258789, + "learning_rate": 9.18201516793066e-06, + "loss": 0.4271, + "step": 1260 + }, + { + "epoch": 1.3052415210688593, + "grad_norm": 3.262382745742798, + "learning_rate": 9.171180931744312e-06, + "loss": 0.3687, + "step": 1270 + }, + { + "epoch": 1.31551901336074, + "grad_norm": 4.841338634490967, + "learning_rate": 9.160346695557964e-06, + "loss": 0.3659, + "step": 1280 + }, + { + "epoch": 1.3257965056526206, + "grad_norm": 4.267407417297363, + "learning_rate": 9.149512459371616e-06, + "loss": 0.3784, + "step": 1290 + }, + { + "epoch": 1.3360739979445015, + "grad_norm": 3.525167465209961, + "learning_rate": 9.138678223185266e-06, + "loss": 0.3593, + "step": 1300 + }, + { + "epoch": 1.3463514902363825, + "grad_norm": 4.050453186035156, + "learning_rate": 9.127843986998918e-06, + "loss": 0.3737, + "step": 1310 + }, + { + "epoch": 1.3566289825282631, + "grad_norm": 3.8488476276397705, + "learning_rate": 9.117009750812568e-06, + "loss": 0.417, + "step": 1320 + }, + { + "epoch": 1.3669064748201438, + "grad_norm": 4.5935540199279785, + "learning_rate": 9.10617551462622e-06, + "loss": 0.4056, + "step": 1330 + }, + { + "epoch": 1.3771839671120247, + "grad_norm": 4.8325581550598145, + "learning_rate": 9.09534127843987e-06, + "loss": 0.4208, + "step": 1340 + }, + { + "epoch": 1.3874614594039054, + "grad_norm": 4.0957818031311035, + "learning_rate": 9.084507042253522e-06, + "loss": 0.408, + "step": 1350 + }, + { + "epoch": 1.397738951695786, + "grad_norm": 5.2503767013549805, + "learning_rate": 9.073672806067174e-06, + "loss": 0.4164, + "step": 1360 + }, + { + "epoch": 1.408016443987667, + "grad_norm": 3.5155766010284424, + "learning_rate": 9.062838569880824e-06, + "loss": 0.3548, + "step": 1370 + }, + { + "epoch": 1.418293936279548, + "grad_norm": 3.994086503982544, + "learning_rate": 9.052004333694476e-06, + "loss": 0.4025, + "step": 1380 + }, + { + "epoch": 1.4285714285714286, + "grad_norm": 5.403133392333984, + "learning_rate": 9.041170097508126e-06, + "loss": 0.4171, + "step": 1390 + }, + { + "epoch": 1.4388489208633093, + "grad_norm": 3.069812774658203, + "learning_rate": 9.030335861321778e-06, + "loss": 0.3431, + "step": 1400 + }, + { + "epoch": 1.4491264131551902, + "grad_norm": 3.9196174144744873, + "learning_rate": 9.019501625135428e-06, + "loss": 0.4507, + "step": 1410 + }, + { + "epoch": 1.4594039054470709, + "grad_norm": 3.6087749004364014, + "learning_rate": 9.00866738894908e-06, + "loss": 0.336, + "step": 1420 + }, + { + "epoch": 1.4696813977389516, + "grad_norm": 4.544300079345703, + "learning_rate": 8.99783315276273e-06, + "loss": 0.4188, + "step": 1430 + }, + { + "epoch": 1.4799588900308325, + "grad_norm": 3.7331314086914062, + "learning_rate": 8.986998916576382e-06, + "loss": 0.4092, + "step": 1440 + }, + { + "epoch": 1.4902363823227134, + "grad_norm": 4.780219078063965, + "learning_rate": 8.976164680390034e-06, + "loss": 0.3587, + "step": 1450 + }, + { + "epoch": 1.500513874614594, + "grad_norm": 4.3289690017700195, + "learning_rate": 8.965330444203685e-06, + "loss": 0.3504, + "step": 1460 + }, + { + "epoch": 1.5107913669064748, + "grad_norm": 4.810067653656006, + "learning_rate": 8.954496208017336e-06, + "loss": 0.3261, + "step": 1470 + }, + { + "epoch": 1.5210688591983557, + "grad_norm": 4.626963138580322, + "learning_rate": 8.943661971830987e-06, + "loss": 0.41, + "step": 1480 + }, + { + "epoch": 1.5313463514902363, + "grad_norm": 4.636073589324951, + "learning_rate": 8.932827735644638e-06, + "loss": 0.3462, + "step": 1490 + }, + { + "epoch": 1.541623843782117, + "grad_norm": 4.311732769012451, + "learning_rate": 8.92199349945829e-06, + "loss": 0.3888, + "step": 1500 + }, + { + "epoch": 1.551901336073998, + "grad_norm": 4.3718461990356445, + "learning_rate": 8.91115926327194e-06, + "loss": 0.3813, + "step": 1510 + }, + { + "epoch": 1.5621788283658788, + "grad_norm": 3.1939361095428467, + "learning_rate": 8.900325027085591e-06, + "loss": 0.4208, + "step": 1520 + }, + { + "epoch": 1.5724563206577595, + "grad_norm": 4.095200538635254, + "learning_rate": 8.889490790899242e-06, + "loss": 0.3845, + "step": 1530 + }, + { + "epoch": 1.5827338129496402, + "grad_norm": 3.5258431434631348, + "learning_rate": 8.878656554712893e-06, + "loss": 0.3284, + "step": 1540 + }, + { + "epoch": 1.5930113052415211, + "grad_norm": 3.1825735569000244, + "learning_rate": 8.867822318526545e-06, + "loss": 0.3385, + "step": 1550 + }, + { + "epoch": 1.6032887975334018, + "grad_norm": 3.5939745903015137, + "learning_rate": 8.856988082340195e-06, + "loss": 0.4101, + "step": 1560 + }, + { + "epoch": 1.6135662898252825, + "grad_norm": 4.911982536315918, + "learning_rate": 8.846153846153847e-06, + "loss": 0.4328, + "step": 1570 + }, + { + "epoch": 1.6238437821171634, + "grad_norm": 3.6301517486572266, + "learning_rate": 8.835319609967497e-06, + "loss": 0.3399, + "step": 1580 + }, + { + "epoch": 1.6341212744090443, + "grad_norm": 3.6853671073913574, + "learning_rate": 8.82448537378115e-06, + "loss": 0.3271, + "step": 1590 + }, + { + "epoch": 1.644398766700925, + "grad_norm": 3.029378652572632, + "learning_rate": 8.8136511375948e-06, + "loss": 0.3638, + "step": 1600 + }, + { + "epoch": 1.6546762589928057, + "grad_norm": 4.740921497344971, + "learning_rate": 8.802816901408451e-06, + "loss": 0.2964, + "step": 1610 + }, + { + "epoch": 1.6649537512846866, + "grad_norm": 4.348399639129639, + "learning_rate": 8.791982665222103e-06, + "loss": 0.364, + "step": 1620 + }, + { + "epoch": 1.6752312435765673, + "grad_norm": 4.521662712097168, + "learning_rate": 8.781148429035755e-06, + "loss": 0.3946, + "step": 1630 + }, + { + "epoch": 1.685508735868448, + "grad_norm": 4.327390670776367, + "learning_rate": 8.770314192849405e-06, + "loss": 0.3325, + "step": 1640 + }, + { + "epoch": 1.6957862281603289, + "grad_norm": 4.260695934295654, + "learning_rate": 8.759479956663057e-06, + "loss": 0.4112, + "step": 1650 + }, + { + "epoch": 1.7060637204522098, + "grad_norm": 3.723114490509033, + "learning_rate": 8.748645720476707e-06, + "loss": 0.3777, + "step": 1660 + }, + { + "epoch": 1.7163412127440905, + "grad_norm": 3.6276798248291016, + "learning_rate": 8.737811484290359e-06, + "loss": 0.4307, + "step": 1670 + }, + { + "epoch": 1.7266187050359711, + "grad_norm": 4.2474446296691895, + "learning_rate": 8.726977248104009e-06, + "loss": 0.3451, + "step": 1680 + }, + { + "epoch": 1.736896197327852, + "grad_norm": 4.7757368087768555, + "learning_rate": 8.71614301191766e-06, + "loss": 0.3528, + "step": 1690 + }, + { + "epoch": 1.7471736896197327, + "grad_norm": 3.768132209777832, + "learning_rate": 8.705308775731311e-06, + "loss": 0.4086, + "step": 1700 + }, + { + "epoch": 1.7574511819116134, + "grad_norm": 4.793600559234619, + "learning_rate": 8.694474539544963e-06, + "loss": 0.414, + "step": 1710 + }, + { + "epoch": 1.7677286742034943, + "grad_norm": 4.651284217834473, + "learning_rate": 8.683640303358613e-06, + "loss": 0.3424, + "step": 1720 + }, + { + "epoch": 1.7780061664953752, + "grad_norm": 3.656557083129883, + "learning_rate": 8.672806067172265e-06, + "loss": 0.2813, + "step": 1730 + }, + { + "epoch": 1.788283658787256, + "grad_norm": 3.836421012878418, + "learning_rate": 8.661971830985915e-06, + "loss": 0.3834, + "step": 1740 + }, + { + "epoch": 1.7985611510791366, + "grad_norm": 4.500270843505859, + "learning_rate": 8.651137594799567e-06, + "loss": 0.337, + "step": 1750 + }, + { + "epoch": 1.8088386433710175, + "grad_norm": 3.3500618934631348, + "learning_rate": 8.640303358613219e-06, + "loss": 0.393, + "step": 1760 + }, + { + "epoch": 1.8191161356628982, + "grad_norm": 3.52258563041687, + "learning_rate": 8.62946912242687e-06, + "loss": 0.397, + "step": 1770 + }, + { + "epoch": 1.829393627954779, + "grad_norm": 4.57402229309082, + "learning_rate": 8.61863488624052e-06, + "loss": 0.4048, + "step": 1780 + }, + { + "epoch": 1.8396711202466598, + "grad_norm": 3.952526092529297, + "learning_rate": 8.607800650054172e-06, + "loss": 0.316, + "step": 1790 + }, + { + "epoch": 1.8499486125385407, + "grad_norm": 4.35211706161499, + "learning_rate": 8.596966413867823e-06, + "loss": 0.4373, + "step": 1800 + }, + { + "epoch": 1.8602261048304214, + "grad_norm": 3.061844825744629, + "learning_rate": 8.586132177681474e-06, + "loss": 0.2665, + "step": 1810 + }, + { + "epoch": 1.870503597122302, + "grad_norm": 3.6002986431121826, + "learning_rate": 8.575297941495125e-06, + "loss": 0.3807, + "step": 1820 + }, + { + "epoch": 1.880781089414183, + "grad_norm": 4.012722492218018, + "learning_rate": 8.564463705308776e-06, + "loss": 0.376, + "step": 1830 + }, + { + "epoch": 1.8910585817060637, + "grad_norm": 3.516463041305542, + "learning_rate": 8.553629469122428e-06, + "loss": 0.3497, + "step": 1840 + }, + { + "epoch": 1.9013360739979444, + "grad_norm": 4.711485385894775, + "learning_rate": 8.542795232936078e-06, + "loss": 0.2968, + "step": 1850 + }, + { + "epoch": 1.9116135662898253, + "grad_norm": 3.4084625244140625, + "learning_rate": 8.53196099674973e-06, + "loss": 0.3475, + "step": 1860 + }, + { + "epoch": 1.9218910585817062, + "grad_norm": 3.9419453144073486, + "learning_rate": 8.52112676056338e-06, + "loss": 0.342, + "step": 1870 + }, + { + "epoch": 1.9321685508735869, + "grad_norm": 3.4985804557800293, + "learning_rate": 8.510292524377032e-06, + "loss": 0.3805, + "step": 1880 + }, + { + "epoch": 1.9424460431654675, + "grad_norm": 4.257175445556641, + "learning_rate": 8.499458288190682e-06, + "loss": 0.3727, + "step": 1890 + }, + { + "epoch": 1.9527235354573484, + "grad_norm": 3.3339366912841797, + "learning_rate": 8.488624052004334e-06, + "loss": 0.3684, + "step": 1900 + }, + { + "epoch": 1.9630010277492291, + "grad_norm": 4.362495422363281, + "learning_rate": 8.477789815817984e-06, + "loss": 0.3216, + "step": 1910 + }, + { + "epoch": 1.9732785200411098, + "grad_norm": 4.4011549949646, + "learning_rate": 8.466955579631638e-06, + "loss": 0.3437, + "step": 1920 + }, + { + "epoch": 1.9835560123329907, + "grad_norm": 3.513015031814575, + "learning_rate": 8.456121343445288e-06, + "loss": 0.3621, + "step": 1930 + }, + { + "epoch": 1.9938335046248716, + "grad_norm": 3.66763973236084, + "learning_rate": 8.44528710725894e-06, + "loss": 0.4068, + "step": 1940 + }, + { + "epoch": 2.0, + "eval_loss": 0.38473397493362427, + "eval_runtime": 2272.3326, + "eval_samples_per_second": 2.582, + "eval_steps_per_second": 0.081, + "eval_wer": 0.48120656465137523, + "step": 1946 + }, + { + "epoch": 2.004110996916752, + "grad_norm": 3.577178716659546, + "learning_rate": 8.43445287107259e-06, + "loss": 0.3205, + "step": 1950 + }, + { + "epoch": 2.014388489208633, + "grad_norm": 3.6927402019500732, + "learning_rate": 8.423618634886242e-06, + "loss": 0.3097, + "step": 1960 + }, + { + "epoch": 2.024665981500514, + "grad_norm": 3.644838333129883, + "learning_rate": 8.412784398699892e-06, + "loss": 0.3386, + "step": 1970 + }, + { + "epoch": 2.034943473792395, + "grad_norm": 3.595942497253418, + "learning_rate": 8.401950162513544e-06, + "loss": 0.2982, + "step": 1980 + }, + { + "epoch": 2.0452209660842753, + "grad_norm": 4.100467205047607, + "learning_rate": 8.391115926327194e-06, + "loss": 0.2846, + "step": 1990 + }, + { + "epoch": 2.055498458376156, + "grad_norm": 3.059053421020508, + "learning_rate": 8.380281690140846e-06, + "loss": 0.2793, + "step": 2000 + }, + { + "epoch": 2.065775950668037, + "grad_norm": 3.1486964225769043, + "learning_rate": 8.369447453954496e-06, + "loss": 0.3014, + "step": 2010 + }, + { + "epoch": 2.0760534429599176, + "grad_norm": 3.5525431632995605, + "learning_rate": 8.358613217768148e-06, + "loss": 0.3615, + "step": 2020 + }, + { + "epoch": 2.0863309352517985, + "grad_norm": 3.877607583999634, + "learning_rate": 8.3477789815818e-06, + "loss": 0.3924, + "step": 2030 + }, + { + "epoch": 2.0966084275436794, + "grad_norm": 3.1528847217559814, + "learning_rate": 8.33694474539545e-06, + "loss": 0.3216, + "step": 2040 + }, + { + "epoch": 2.1068859198355603, + "grad_norm": 2.3761839866638184, + "learning_rate": 8.326110509209102e-06, + "loss": 0.2952, + "step": 2050 + }, + { + "epoch": 2.1171634121274407, + "grad_norm": 2.5675883293151855, + "learning_rate": 8.315276273022752e-06, + "loss": 0.3921, + "step": 2060 + }, + { + "epoch": 2.1274409044193217, + "grad_norm": 3.158750057220459, + "learning_rate": 8.304442036836404e-06, + "loss": 0.2995, + "step": 2070 + }, + { + "epoch": 2.1377183967112026, + "grad_norm": 4.566508769989014, + "learning_rate": 8.293607800650055e-06, + "loss": 0.3218, + "step": 2080 + }, + { + "epoch": 2.1479958890030835, + "grad_norm": 3.650635242462158, + "learning_rate": 8.282773564463706e-06, + "loss": 0.3943, + "step": 2090 + }, + { + "epoch": 2.158273381294964, + "grad_norm": 3.3389713764190674, + "learning_rate": 8.271939328277357e-06, + "loss": 0.2847, + "step": 2100 + }, + { + "epoch": 2.168550873586845, + "grad_norm": 4.3921685218811035, + "learning_rate": 8.26110509209101e-06, + "loss": 0.3476, + "step": 2110 + }, + { + "epoch": 2.1788283658787257, + "grad_norm": 4.2259087562561035, + "learning_rate": 8.25027085590466e-06, + "loss": 0.305, + "step": 2120 + }, + { + "epoch": 2.189105858170606, + "grad_norm": 3.849501609802246, + "learning_rate": 8.239436619718311e-06, + "loss": 0.2916, + "step": 2130 + }, + { + "epoch": 2.199383350462487, + "grad_norm": 4.750916957855225, + "learning_rate": 8.228602383531961e-06, + "loss": 0.3295, + "step": 2140 + }, + { + "epoch": 2.209660842754368, + "grad_norm": 2.9767019748687744, + "learning_rate": 8.217768147345613e-06, + "loss": 0.3359, + "step": 2150 + }, + { + "epoch": 2.2199383350462485, + "grad_norm": 3.6668782234191895, + "learning_rate": 8.206933911159263e-06, + "loss": 0.2647, + "step": 2160 + }, + { + "epoch": 2.2302158273381294, + "grad_norm": 4.5765790939331055, + "learning_rate": 8.196099674972915e-06, + "loss": 0.332, + "step": 2170 + }, + { + "epoch": 2.2404933196300103, + "grad_norm": 3.7282257080078125, + "learning_rate": 8.185265438786565e-06, + "loss": 0.2857, + "step": 2180 + }, + { + "epoch": 2.250770811921891, + "grad_norm": 4.175727367401123, + "learning_rate": 8.174431202600217e-06, + "loss": 0.3395, + "step": 2190 + }, + { + "epoch": 2.2610483042137717, + "grad_norm": 4.007727146148682, + "learning_rate": 8.163596966413867e-06, + "loss": 0.3229, + "step": 2200 + }, + { + "epoch": 2.2713257965056526, + "grad_norm": 3.212737560272217, + "learning_rate": 8.15276273022752e-06, + "loss": 0.2824, + "step": 2210 + }, + { + "epoch": 2.2816032887975335, + "grad_norm": 2.9416749477386475, + "learning_rate": 8.141928494041171e-06, + "loss": 0.2931, + "step": 2220 + }, + { + "epoch": 2.2918807810894144, + "grad_norm": 3.415862560272217, + "learning_rate": 8.131094257854823e-06, + "loss": 0.2788, + "step": 2230 + }, + { + "epoch": 2.302158273381295, + "grad_norm": 3.2938742637634277, + "learning_rate": 8.120260021668473e-06, + "loss": 0.3279, + "step": 2240 + }, + { + "epoch": 2.3124357656731758, + "grad_norm": 3.2525196075439453, + "learning_rate": 8.109425785482125e-06, + "loss": 0.2773, + "step": 2250 + }, + { + "epoch": 2.3227132579650567, + "grad_norm": 3.1913652420043945, + "learning_rate": 8.098591549295775e-06, + "loss": 0.2507, + "step": 2260 + }, + { + "epoch": 2.332990750256937, + "grad_norm": 4.119409561157227, + "learning_rate": 8.087757313109427e-06, + "loss": 0.321, + "step": 2270 + }, + { + "epoch": 2.343268242548818, + "grad_norm": 2.8287854194641113, + "learning_rate": 8.076923076923077e-06, + "loss": 0.3036, + "step": 2280 + }, + { + "epoch": 2.353545734840699, + "grad_norm": 3.1350390911102295, + "learning_rate": 8.066088840736729e-06, + "loss": 0.2861, + "step": 2290 + }, + { + "epoch": 2.3638232271325794, + "grad_norm": 4.139387607574463, + "learning_rate": 8.05525460455038e-06, + "loss": 0.3996, + "step": 2300 + }, + { + "epoch": 2.3741007194244603, + "grad_norm": 4.706663131713867, + "learning_rate": 8.04442036836403e-06, + "loss": 0.3329, + "step": 2310 + }, + { + "epoch": 2.3843782117163412, + "grad_norm": 3.5738885402679443, + "learning_rate": 8.033586132177683e-06, + "loss": 0.2948, + "step": 2320 + }, + { + "epoch": 2.394655704008222, + "grad_norm": 4.643505573272705, + "learning_rate": 8.022751895991333e-06, + "loss": 0.3465, + "step": 2330 + }, + { + "epoch": 2.4049331963001026, + "grad_norm": 4.715351104736328, + "learning_rate": 8.011917659804985e-06, + "loss": 0.4091, + "step": 2340 + }, + { + "epoch": 2.4152106885919835, + "grad_norm": 4.204322814941406, + "learning_rate": 8.001083423618635e-06, + "loss": 0.2641, + "step": 2350 + }, + { + "epoch": 2.4254881808838644, + "grad_norm": 4.201789379119873, + "learning_rate": 7.990249187432287e-06, + "loss": 0.2787, + "step": 2360 + }, + { + "epoch": 2.4357656731757453, + "grad_norm": 2.986111879348755, + "learning_rate": 7.979414951245937e-06, + "loss": 0.2681, + "step": 2370 + }, + { + "epoch": 2.446043165467626, + "grad_norm": 3.5498292446136475, + "learning_rate": 7.968580715059589e-06, + "loss": 0.3293, + "step": 2380 + }, + { + "epoch": 2.4563206577595067, + "grad_norm": 3.319282293319702, + "learning_rate": 7.95774647887324e-06, + "loss": 0.3647, + "step": 2390 + }, + { + "epoch": 2.4665981500513876, + "grad_norm": 3.654517889022827, + "learning_rate": 7.946912242686892e-06, + "loss": 0.3494, + "step": 2400 + }, + { + "epoch": 2.476875642343268, + "grad_norm": 3.8484079837799072, + "learning_rate": 7.936078006500542e-06, + "loss": 0.2913, + "step": 2410 + }, + { + "epoch": 2.487153134635149, + "grad_norm": 3.6659047603607178, + "learning_rate": 7.925243770314194e-06, + "loss": 0.2696, + "step": 2420 + }, + { + "epoch": 2.49743062692703, + "grad_norm": 3.6242451667785645, + "learning_rate": 7.914409534127844e-06, + "loss": 0.2779, + "step": 2430 + }, + { + "epoch": 2.5077081192189103, + "grad_norm": 4.905155658721924, + "learning_rate": 7.903575297941496e-06, + "loss": 0.3085, + "step": 2440 + }, + { + "epoch": 2.5179856115107913, + "grad_norm": 3.8230979442596436, + "learning_rate": 7.892741061755146e-06, + "loss": 0.3179, + "step": 2450 + }, + { + "epoch": 2.528263103802672, + "grad_norm": 3.2261550426483154, + "learning_rate": 7.881906825568798e-06, + "loss": 0.2982, + "step": 2460 + }, + { + "epoch": 2.538540596094553, + "grad_norm": 3.4974489212036133, + "learning_rate": 7.871072589382448e-06, + "loss": 0.2456, + "step": 2470 + }, + { + "epoch": 2.548818088386434, + "grad_norm": 2.6326630115509033, + "learning_rate": 7.8602383531961e-06, + "loss": 0.2948, + "step": 2480 + }, + { + "epoch": 2.5590955806783144, + "grad_norm": 3.988820791244507, + "learning_rate": 7.849404117009752e-06, + "loss": 0.3942, + "step": 2490 + }, + { + "epoch": 2.5693730729701953, + "grad_norm": 4.203096389770508, + "learning_rate": 7.838569880823402e-06, + "loss": 0.3251, + "step": 2500 + }, + { + "epoch": 2.5796505652620763, + "grad_norm": 4.1997199058532715, + "learning_rate": 7.827735644637054e-06, + "loss": 0.2908, + "step": 2510 + }, + { + "epoch": 2.5899280575539567, + "grad_norm": 3.816044330596924, + "learning_rate": 7.816901408450704e-06, + "loss": 0.2962, + "step": 2520 + }, + { + "epoch": 2.6002055498458376, + "grad_norm": 3.998377561569214, + "learning_rate": 7.806067172264356e-06, + "loss": 0.4137, + "step": 2530 + }, + { + "epoch": 2.6104830421377185, + "grad_norm": 3.7878313064575195, + "learning_rate": 7.795232936078008e-06, + "loss": 0.2679, + "step": 2540 + }, + { + "epoch": 2.620760534429599, + "grad_norm": 4.914570331573486, + "learning_rate": 7.784398699891658e-06, + "loss": 0.2976, + "step": 2550 + }, + { + "epoch": 2.63103802672148, + "grad_norm": 4.354416370391846, + "learning_rate": 7.77356446370531e-06, + "loss": 0.3577, + "step": 2560 + }, + { + "epoch": 2.641315519013361, + "grad_norm": 3.3747782707214355, + "learning_rate": 7.762730227518962e-06, + "loss": 0.2868, + "step": 2570 + }, + { + "epoch": 2.6515930113052413, + "grad_norm": 3.854323148727417, + "learning_rate": 7.751895991332612e-06, + "loss": 0.3262, + "step": 2580 + }, + { + "epoch": 2.661870503597122, + "grad_norm": 3.3421154022216797, + "learning_rate": 7.741061755146264e-06, + "loss": 0.2576, + "step": 2590 + }, + { + "epoch": 2.672147995889003, + "grad_norm": 3.1543657779693604, + "learning_rate": 7.730227518959914e-06, + "loss": 0.2698, + "step": 2600 + }, + { + "epoch": 2.682425488180884, + "grad_norm": 3.4310245513916016, + "learning_rate": 7.719393282773566e-06, + "loss": 0.338, + "step": 2610 + }, + { + "epoch": 2.692702980472765, + "grad_norm": 4.017239093780518, + "learning_rate": 7.708559046587216e-06, + "loss": 0.236, + "step": 2620 + }, + { + "epoch": 2.7029804727646454, + "grad_norm": 4.115433692932129, + "learning_rate": 7.697724810400868e-06, + "loss": 0.317, + "step": 2630 + }, + { + "epoch": 2.7132579650565263, + "grad_norm": 3.788522243499756, + "learning_rate": 7.686890574214518e-06, + "loss": 0.357, + "step": 2640 + }, + { + "epoch": 2.723535457348407, + "grad_norm": 4.260583400726318, + "learning_rate": 7.67605633802817e-06, + "loss": 0.2541, + "step": 2650 + }, + { + "epoch": 2.7338129496402876, + "grad_norm": 4.43798828125, + "learning_rate": 7.66522210184182e-06, + "loss": 0.3086, + "step": 2660 + }, + { + "epoch": 2.7440904419321686, + "grad_norm": 3.0351390838623047, + "learning_rate": 7.654387865655472e-06, + "loss": 0.2927, + "step": 2670 + }, + { + "epoch": 2.7543679342240495, + "grad_norm": 3.693898916244507, + "learning_rate": 7.643553629469122e-06, + "loss": 0.3282, + "step": 2680 + }, + { + "epoch": 2.76464542651593, + "grad_norm": 3.6460795402526855, + "learning_rate": 7.632719393282774e-06, + "loss": 0.2946, + "step": 2690 + }, + { + "epoch": 2.774922918807811, + "grad_norm": 3.9163734912872314, + "learning_rate": 7.6218851570964255e-06, + "loss": 0.2634, + "step": 2700 + }, + { + "epoch": 2.7852004110996917, + "grad_norm": 2.770110607147217, + "learning_rate": 7.611050920910077e-06, + "loss": 0.2571, + "step": 2710 + }, + { + "epoch": 2.795477903391572, + "grad_norm": 3.0240771770477295, + "learning_rate": 7.600216684723728e-06, + "loss": 0.2387, + "step": 2720 + }, + { + "epoch": 2.805755395683453, + "grad_norm": 4.414377689361572, + "learning_rate": 7.589382448537379e-06, + "loss": 0.2457, + "step": 2730 + }, + { + "epoch": 2.816032887975334, + "grad_norm": 3.280635118484497, + "learning_rate": 7.57854821235103e-06, + "loss": 0.3292, + "step": 2740 + }, + { + "epoch": 2.826310380267215, + "grad_norm": 3.114398717880249, + "learning_rate": 7.567713976164681e-06, + "loss": 0.3323, + "step": 2750 + }, + { + "epoch": 2.836587872559096, + "grad_norm": 3.8051183223724365, + "learning_rate": 7.556879739978332e-06, + "loss": 0.3752, + "step": 2760 + }, + { + "epoch": 2.8468653648509763, + "grad_norm": 4.505266189575195, + "learning_rate": 7.546045503791983e-06, + "loss": 0.3081, + "step": 2770 + }, + { + "epoch": 2.857142857142857, + "grad_norm": 3.9669296741485596, + "learning_rate": 7.535211267605634e-06, + "loss": 0.2627, + "step": 2780 + }, + { + "epoch": 2.867420349434738, + "grad_norm": 3.6369543075561523, + "learning_rate": 7.524377031419285e-06, + "loss": 0.311, + "step": 2790 + }, + { + "epoch": 2.8776978417266186, + "grad_norm": 4.124249458312988, + "learning_rate": 7.513542795232936e-06, + "loss": 0.3401, + "step": 2800 + }, + { + "epoch": 2.8879753340184995, + "grad_norm": 3.646584987640381, + "learning_rate": 7.502708559046587e-06, + "loss": 0.3055, + "step": 2810 + }, + { + "epoch": 2.8982528263103804, + "grad_norm": 3.3459272384643555, + "learning_rate": 7.491874322860238e-06, + "loss": 0.2818, + "step": 2820 + }, + { + "epoch": 2.908530318602261, + "grad_norm": 5.0626630783081055, + "learning_rate": 7.48104008667389e-06, + "loss": 0.3489, + "step": 2830 + }, + { + "epoch": 2.9188078108941418, + "grad_norm": 2.9129016399383545, + "learning_rate": 7.470205850487541e-06, + "loss": 0.2889, + "step": 2840 + }, + { + "epoch": 2.9290853031860227, + "grad_norm": 4.322904109954834, + "learning_rate": 7.459371614301192e-06, + "loss": 0.3367, + "step": 2850 + }, + { + "epoch": 2.939362795477903, + "grad_norm": 3.556774139404297, + "learning_rate": 7.448537378114844e-06, + "loss": 0.3137, + "step": 2860 + }, + { + "epoch": 2.949640287769784, + "grad_norm": 2.786842107772827, + "learning_rate": 7.437703141928495e-06, + "loss": 0.3469, + "step": 2870 + }, + { + "epoch": 2.959917780061665, + "grad_norm": 3.740945339202881, + "learning_rate": 7.426868905742146e-06, + "loss": 0.2975, + "step": 2880 + }, + { + "epoch": 2.970195272353546, + "grad_norm": 3.770343780517578, + "learning_rate": 7.416034669555797e-06, + "loss": 0.3193, + "step": 2890 + }, + { + "epoch": 2.9804727646454268, + "grad_norm": 2.6630139350891113, + "learning_rate": 7.405200433369448e-06, + "loss": 0.2735, + "step": 2900 + }, + { + "epoch": 2.9907502569373072, + "grad_norm": 3.7929859161376953, + "learning_rate": 7.3943661971831e-06, + "loss": 0.3617, + "step": 2910 + }, + { + "epoch": 3.0, + "eval_loss": 0.35847166180610657, + "eval_runtime": 2239.9414, + "eval_samples_per_second": 2.62, + "eval_steps_per_second": 0.082, + "eval_wer": 0.43257528793761624, + "step": 2919 + }, + { + "epoch": 3.001027749229188, + "grad_norm": 4.5707197189331055, + "learning_rate": 7.383531960996751e-06, + "loss": 0.265, + "step": 2920 + }, + { + "epoch": 3.011305241521069, + "grad_norm": 3.3364627361297607, + "learning_rate": 7.372697724810402e-06, + "loss": 0.3148, + "step": 2930 + }, + { + "epoch": 3.0215827338129495, + "grad_norm": 3.253480911254883, + "learning_rate": 7.361863488624053e-06, + "loss": 0.2638, + "step": 2940 + }, + { + "epoch": 3.0318602261048304, + "grad_norm": 3.9313085079193115, + "learning_rate": 7.351029252437704e-06, + "loss": 0.2463, + "step": 2950 + }, + { + "epoch": 3.0421377183967113, + "grad_norm": 3.0780415534973145, + "learning_rate": 7.340195016251355e-06, + "loss": 0.2363, + "step": 2960 + }, + { + "epoch": 3.052415210688592, + "grad_norm": 3.63417649269104, + "learning_rate": 7.329360780065006e-06, + "loss": 0.2946, + "step": 2970 + }, + { + "epoch": 3.0626927029804727, + "grad_norm": 2.868053436279297, + "learning_rate": 7.318526543878657e-06, + "loss": 0.2659, + "step": 2980 + }, + { + "epoch": 3.0729701952723536, + "grad_norm": 3.3707587718963623, + "learning_rate": 7.307692307692308e-06, + "loss": 0.2843, + "step": 2990 + }, + { + "epoch": 3.0832476875642345, + "grad_norm": 4.61034631729126, + "learning_rate": 7.296858071505959e-06, + "loss": 0.3422, + "step": 3000 + }, + { + "epoch": 3.093525179856115, + "grad_norm": 3.640913963317871, + "learning_rate": 7.286023835319611e-06, + "loss": 0.2587, + "step": 3010 + }, + { + "epoch": 3.103802672147996, + "grad_norm": 3.330796957015991, + "learning_rate": 7.275189599133262e-06, + "loss": 0.2165, + "step": 3020 + }, + { + "epoch": 3.114080164439877, + "grad_norm": 4.815010070800781, + "learning_rate": 7.264355362946913e-06, + "loss": 0.2619, + "step": 3030 + }, + { + "epoch": 3.1243576567317572, + "grad_norm": 4.485762596130371, + "learning_rate": 7.253521126760564e-06, + "loss": 0.2913, + "step": 3040 + }, + { + "epoch": 3.134635149023638, + "grad_norm": 3.712749719619751, + "learning_rate": 7.242686890574215e-06, + "loss": 0.3216, + "step": 3050 + }, + { + "epoch": 3.144912641315519, + "grad_norm": 3.5483086109161377, + "learning_rate": 7.231852654387866e-06, + "loss": 0.2683, + "step": 3060 + }, + { + "epoch": 3.1551901336074, + "grad_norm": 4.34190034866333, + "learning_rate": 7.221018418201517e-06, + "loss": 0.2582, + "step": 3070 + }, + { + "epoch": 3.1654676258992804, + "grad_norm": 3.329151153564453, + "learning_rate": 7.210184182015168e-06, + "loss": 0.3271, + "step": 3080 + }, + { + "epoch": 3.1757451181911613, + "grad_norm": 3.318019151687622, + "learning_rate": 7.199349945828819e-06, + "loss": 0.2994, + "step": 3090 + }, + { + "epoch": 3.1860226104830422, + "grad_norm": 3.2192223072052, + "learning_rate": 7.188515709642471e-06, + "loss": 0.2656, + "step": 3100 + }, + { + "epoch": 3.1963001027749227, + "grad_norm": 3.161679267883301, + "learning_rate": 7.177681473456122e-06, + "loss": 0.2975, + "step": 3110 + }, + { + "epoch": 3.2065775950668036, + "grad_norm": 2.8480417728424072, + "learning_rate": 7.166847237269773e-06, + "loss": 0.3112, + "step": 3120 + }, + { + "epoch": 3.2168550873586845, + "grad_norm": 3.912022829055786, + "learning_rate": 7.156013001083424e-06, + "loss": 0.2526, + "step": 3130 + }, + { + "epoch": 3.2271325796505654, + "grad_norm": 3.466198682785034, + "learning_rate": 7.145178764897075e-06, + "loss": 0.2659, + "step": 3140 + }, + { + "epoch": 3.237410071942446, + "grad_norm": 4.21922492980957, + "learning_rate": 7.134344528710726e-06, + "loss": 0.2879, + "step": 3150 + }, + { + "epoch": 3.247687564234327, + "grad_norm": 3.762397527694702, + "learning_rate": 7.123510292524377e-06, + "loss": 0.2626, + "step": 3160 + }, + { + "epoch": 3.2579650565262077, + "grad_norm": 3.112666606903076, + "learning_rate": 7.112676056338029e-06, + "loss": 0.2293, + "step": 3170 + }, + { + "epoch": 3.2682425488180886, + "grad_norm": 4.5521650314331055, + "learning_rate": 7.101841820151681e-06, + "loss": 0.2594, + "step": 3180 + }, + { + "epoch": 3.278520041109969, + "grad_norm": 3.388822555541992, + "learning_rate": 7.091007583965332e-06, + "loss": 0.2543, + "step": 3190 + }, + { + "epoch": 3.28879753340185, + "grad_norm": 2.899162530899048, + "learning_rate": 7.080173347778983e-06, + "loss": 0.258, + "step": 3200 + }, + { + "epoch": 3.299075025693731, + "grad_norm": 2.9126293659210205, + "learning_rate": 7.069339111592634e-06, + "loss": 0.2437, + "step": 3210 + }, + { + "epoch": 3.3093525179856114, + "grad_norm": 4.10038423538208, + "learning_rate": 7.058504875406285e-06, + "loss": 0.2757, + "step": 3220 + }, + { + "epoch": 3.3196300102774923, + "grad_norm": 2.4944069385528564, + "learning_rate": 7.047670639219936e-06, + "loss": 0.2908, + "step": 3230 + }, + { + "epoch": 3.329907502569373, + "grad_norm": 3.2674710750579834, + "learning_rate": 7.036836403033587e-06, + "loss": 0.269, + "step": 3240 + }, + { + "epoch": 3.3401849948612536, + "grad_norm": 3.861447334289551, + "learning_rate": 7.026002166847238e-06, + "loss": 0.3253, + "step": 3250 + }, + { + "epoch": 3.3504624871531345, + "grad_norm": 4.241086483001709, + "learning_rate": 7.015167930660889e-06, + "loss": 0.3116, + "step": 3260 + }, + { + "epoch": 3.3607399794450155, + "grad_norm": 3.33585262298584, + "learning_rate": 7.00433369447454e-06, + "loss": 0.2506, + "step": 3270 + }, + { + "epoch": 3.3710174717368964, + "grad_norm": 4.410299777984619, + "learning_rate": 6.993499458288191e-06, + "loss": 0.2116, + "step": 3280 + }, + { + "epoch": 3.381294964028777, + "grad_norm": 2.706984281539917, + "learning_rate": 6.982665222101842e-06, + "loss": 0.2611, + "step": 3290 + }, + { + "epoch": 3.3915724563206577, + "grad_norm": 3.3027334213256836, + "learning_rate": 6.9718309859154935e-06, + "loss": 0.2962, + "step": 3300 + }, + { + "epoch": 3.4018499486125386, + "grad_norm": 4.3007354736328125, + "learning_rate": 6.9609967497291445e-06, + "loss": 0.245, + "step": 3310 + }, + { + "epoch": 3.4121274409044196, + "grad_norm": 4.280706405639648, + "learning_rate": 6.9501625135427955e-06, + "loss": 0.3149, + "step": 3320 + }, + { + "epoch": 3.4224049331963, + "grad_norm": 3.1681008338928223, + "learning_rate": 6.939328277356447e-06, + "loss": 0.2401, + "step": 3330 + }, + { + "epoch": 3.432682425488181, + "grad_norm": 4.223625183105469, + "learning_rate": 6.928494041170098e-06, + "loss": 0.2456, + "step": 3340 + }, + { + "epoch": 3.442959917780062, + "grad_norm": 3.4283297061920166, + "learning_rate": 6.917659804983749e-06, + "loss": 0.2488, + "step": 3350 + }, + { + "epoch": 3.4532374100719423, + "grad_norm": 4.3228631019592285, + "learning_rate": 6.9068255687974e-06, + "loss": 0.2837, + "step": 3360 + }, + { + "epoch": 3.463514902363823, + "grad_norm": 2.614888906478882, + "learning_rate": 6.895991332611051e-06, + "loss": 0.2818, + "step": 3370 + }, + { + "epoch": 3.473792394655704, + "grad_norm": 4.487213611602783, + "learning_rate": 6.885157096424703e-06, + "loss": 0.236, + "step": 3380 + }, + { + "epoch": 3.4840698869475846, + "grad_norm": 2.5529470443725586, + "learning_rate": 6.874322860238354e-06, + "loss": 0.3254, + "step": 3390 + }, + { + "epoch": 3.4943473792394655, + "grad_norm": 3.5503623485565186, + "learning_rate": 6.863488624052005e-06, + "loss": 0.1991, + "step": 3400 + }, + { + "epoch": 3.5046248715313464, + "grad_norm": 3.0319414138793945, + "learning_rate": 6.852654387865656e-06, + "loss": 0.2483, + "step": 3410 + }, + { + "epoch": 3.5149023638232273, + "grad_norm": 3.8595309257507324, + "learning_rate": 6.841820151679307e-06, + "loss": 0.2865, + "step": 3420 + }, + { + "epoch": 3.5251798561151078, + "grad_norm": 4.65828800201416, + "learning_rate": 6.830985915492958e-06, + "loss": 0.2457, + "step": 3430 + }, + { + "epoch": 3.5354573484069887, + "grad_norm": 4.1883015632629395, + "learning_rate": 6.820151679306609e-06, + "loss": 0.2911, + "step": 3440 + }, + { + "epoch": 3.5457348406988696, + "grad_norm": 2.8787269592285156, + "learning_rate": 6.80931744312026e-06, + "loss": 0.23, + "step": 3450 + }, + { + "epoch": 3.5560123329907505, + "grad_norm": 3.850490093231201, + "learning_rate": 6.798483206933911e-06, + "loss": 0.2203, + "step": 3460 + }, + { + "epoch": 3.566289825282631, + "grad_norm": 4.513101577758789, + "learning_rate": 6.787648970747562e-06, + "loss": 0.2693, + "step": 3470 + }, + { + "epoch": 3.576567317574512, + "grad_norm": 3.458218574523926, + "learning_rate": 6.776814734561215e-06, + "loss": 0.2462, + "step": 3480 + }, + { + "epoch": 3.5868448098663928, + "grad_norm": 2.8902974128723145, + "learning_rate": 6.765980498374866e-06, + "loss": 0.2759, + "step": 3490 + }, + { + "epoch": 3.597122302158273, + "grad_norm": 4.30615234375, + "learning_rate": 6.755146262188517e-06, + "loss": 0.3263, + "step": 3500 + }, + { + "epoch": 3.607399794450154, + "grad_norm": 2.434847116470337, + "learning_rate": 6.744312026002168e-06, + "loss": 0.2939, + "step": 3510 + }, + { + "epoch": 3.617677286742035, + "grad_norm": 3.484827756881714, + "learning_rate": 6.733477789815819e-06, + "loss": 0.2583, + "step": 3520 + }, + { + "epoch": 3.6279547790339155, + "grad_norm": 3.158450126647949, + "learning_rate": 6.72264355362947e-06, + "loss": 0.2121, + "step": 3530 + }, + { + "epoch": 3.6382322713257964, + "grad_norm": 3.7957651615142822, + "learning_rate": 6.711809317443121e-06, + "loss": 0.2668, + "step": 3540 + }, + { + "epoch": 3.6485097636176773, + "grad_norm": 3.237265110015869, + "learning_rate": 6.700975081256772e-06, + "loss": 0.217, + "step": 3550 + }, + { + "epoch": 3.6587872559095582, + "grad_norm": 3.7385904788970947, + "learning_rate": 6.690140845070423e-06, + "loss": 0.2668, + "step": 3560 + }, + { + "epoch": 3.6690647482014387, + "grad_norm": 3.3100502490997314, + "learning_rate": 6.6793066088840745e-06, + "loss": 0.279, + "step": 3570 + }, + { + "epoch": 3.6793422404933196, + "grad_norm": 2.7477927207946777, + "learning_rate": 6.6684723726977255e-06, + "loss": 0.3294, + "step": 3580 + }, + { + "epoch": 3.6896197327852005, + "grad_norm": 3.710700750350952, + "learning_rate": 6.6576381365113765e-06, + "loss": 0.3125, + "step": 3590 + }, + { + "epoch": 3.6998972250770814, + "grad_norm": 2.787705898284912, + "learning_rate": 6.6468039003250275e-06, + "loss": 0.2507, + "step": 3600 + }, + { + "epoch": 3.710174717368962, + "grad_norm": 2.693133592605591, + "learning_rate": 6.6359696641386785e-06, + "loss": 0.2254, + "step": 3610 + }, + { + "epoch": 3.720452209660843, + "grad_norm": 3.121232509613037, + "learning_rate": 6.6251354279523295e-06, + "loss": 0.2866, + "step": 3620 + }, + { + "epoch": 3.7307297019527237, + "grad_norm": 7.0685224533081055, + "learning_rate": 6.6143011917659805e-06, + "loss": 0.2865, + "step": 3630 + }, + { + "epoch": 3.741007194244604, + "grad_norm": 3.528265953063965, + "learning_rate": 6.603466955579632e-06, + "loss": 0.2963, + "step": 3640 + }, + { + "epoch": 3.751284686536485, + "grad_norm": 2.5636558532714844, + "learning_rate": 6.592632719393284e-06, + "loss": 0.2784, + "step": 3650 + }, + { + "epoch": 3.761562178828366, + "grad_norm": 3.7930173873901367, + "learning_rate": 6.581798483206935e-06, + "loss": 0.2101, + "step": 3660 + }, + { + "epoch": 3.7718396711202464, + "grad_norm": 2.674428939819336, + "learning_rate": 6.570964247020586e-06, + "loss": 0.2337, + "step": 3670 + }, + { + "epoch": 3.7821171634121273, + "grad_norm": 3.629955768585205, + "learning_rate": 6.560130010834237e-06, + "loss": 0.2849, + "step": 3680 + }, + { + "epoch": 3.7923946557040082, + "grad_norm": 2.831402063369751, + "learning_rate": 6.549295774647888e-06, + "loss": 0.248, + "step": 3690 + }, + { + "epoch": 3.802672147995889, + "grad_norm": 3.889512300491333, + "learning_rate": 6.538461538461539e-06, + "loss": 0.367, + "step": 3700 + }, + { + "epoch": 3.81294964028777, + "grad_norm": 3.9846222400665283, + "learning_rate": 6.52762730227519e-06, + "loss": 0.3112, + "step": 3710 + }, + { + "epoch": 3.8232271325796505, + "grad_norm": 3.839672327041626, + "learning_rate": 6.516793066088841e-06, + "loss": 0.3242, + "step": 3720 + }, + { + "epoch": 3.8335046248715314, + "grad_norm": 3.0426747798919678, + "learning_rate": 6.505958829902492e-06, + "loss": 0.2678, + "step": 3730 + }, + { + "epoch": 3.8437821171634123, + "grad_norm": 3.6387476921081543, + "learning_rate": 6.495124593716143e-06, + "loss": 0.2387, + "step": 3740 + }, + { + "epoch": 3.854059609455293, + "grad_norm": 4.004968166351318, + "learning_rate": 6.484290357529794e-06, + "loss": 0.2768, + "step": 3750 + }, + { + "epoch": 3.8643371017471737, + "grad_norm": 3.176748752593994, + "learning_rate": 6.473456121343445e-06, + "loss": 0.2588, + "step": 3760 + }, + { + "epoch": 3.8746145940390546, + "grad_norm": 4.246847152709961, + "learning_rate": 6.462621885157097e-06, + "loss": 0.3123, + "step": 3770 + }, + { + "epoch": 3.884892086330935, + "grad_norm": 2.7734780311584473, + "learning_rate": 6.451787648970748e-06, + "loss": 0.2182, + "step": 3780 + }, + { + "epoch": 3.895169578622816, + "grad_norm": 2.851536512374878, + "learning_rate": 6.440953412784399e-06, + "loss": 0.28, + "step": 3790 + }, + { + "epoch": 3.905447070914697, + "grad_norm": 4.055552005767822, + "learning_rate": 6.430119176598051e-06, + "loss": 0.2843, + "step": 3800 + }, + { + "epoch": 3.9157245632065774, + "grad_norm": 3.3340227603912354, + "learning_rate": 6.419284940411702e-06, + "loss": 0.1989, + "step": 3810 + }, + { + "epoch": 3.9260020554984583, + "grad_norm": 3.8631956577301025, + "learning_rate": 6.408450704225353e-06, + "loss": 0.3183, + "step": 3820 + }, + { + "epoch": 3.936279547790339, + "grad_norm": 3.597165107727051, + "learning_rate": 6.397616468039004e-06, + "loss": 0.2707, + "step": 3830 + }, + { + "epoch": 3.94655704008222, + "grad_norm": 4.144725799560547, + "learning_rate": 6.386782231852655e-06, + "loss": 0.2473, + "step": 3840 + }, + { + "epoch": 3.956834532374101, + "grad_norm": 3.035017967224121, + "learning_rate": 6.3759479956663066e-06, + "loss": 0.2145, + "step": 3850 + }, + { + "epoch": 3.9671120246659815, + "grad_norm": 4.453218936920166, + "learning_rate": 6.3651137594799575e-06, + "loss": 0.2232, + "step": 3860 + }, + { + "epoch": 3.9773895169578624, + "grad_norm": 3.9047586917877197, + "learning_rate": 6.3542795232936085e-06, + "loss": 0.2968, + "step": 3870 + }, + { + "epoch": 3.9876670092497433, + "grad_norm": 3.47819447517395, + "learning_rate": 6.3434452871072595e-06, + "loss": 0.2044, + "step": 3880 + }, + { + "epoch": 3.9979445015416237, + "grad_norm": 2.6448206901550293, + "learning_rate": 6.3326110509209105e-06, + "loss": 0.3144, + "step": 3890 + }, + { + "epoch": 4.0, + "eval_loss": 0.34360429644584656, + "eval_runtime": 1328.4973, + "eval_samples_per_second": 4.417, + "eval_steps_per_second": 0.139, + "eval_wer": 0.35935919605859895, + "step": 3892 + }, + { + "epoch": 4.008221993833504, + "grad_norm": 2.355408191680908, + "learning_rate": 6.3217768147345615e-06, + "loss": 0.2327, + "step": 3900 + }, + { + "epoch": 4.0184994861253855, + "grad_norm": 5.050731658935547, + "learning_rate": 6.3109425785482125e-06, + "loss": 0.2291, + "step": 3910 + }, + { + "epoch": 4.028776978417266, + "grad_norm": 3.9454739093780518, + "learning_rate": 6.3001083423618635e-06, + "loss": 0.3174, + "step": 3920 + }, + { + "epoch": 4.039054470709147, + "grad_norm": 3.784989356994629, + "learning_rate": 6.2892741061755145e-06, + "loss": 0.2269, + "step": 3930 + }, + { + "epoch": 4.049331963001028, + "grad_norm": 2.7388339042663574, + "learning_rate": 6.2784398699891655e-06, + "loss": 0.2213, + "step": 3940 + }, + { + "epoch": 4.059609455292908, + "grad_norm": 3.768240451812744, + "learning_rate": 6.267605633802818e-06, + "loss": 0.2409, + "step": 3950 + }, + { + "epoch": 4.06988694758479, + "grad_norm": 4.0260233879089355, + "learning_rate": 6.256771397616469e-06, + "loss": 0.1907, + "step": 3960 + }, + { + "epoch": 4.08016443987667, + "grad_norm": 3.6307528018951416, + "learning_rate": 6.24593716143012e-06, + "loss": 0.2457, + "step": 3970 + }, + { + "epoch": 4.090441932168551, + "grad_norm": 3.1473422050476074, + "learning_rate": 6.235102925243771e-06, + "loss": 0.2211, + "step": 3980 + }, + { + "epoch": 4.100719424460432, + "grad_norm": 3.6961750984191895, + "learning_rate": 6.224268689057422e-06, + "loss": 0.2347, + "step": 3990 + }, + { + "epoch": 4.110996916752312, + "grad_norm": 2.0914347171783447, + "learning_rate": 6.213434452871073e-06, + "loss": 0.289, + "step": 4000 + }, + { + "epoch": 4.121274409044193, + "grad_norm": 3.504941463470459, + "learning_rate": 6.202600216684724e-06, + "loss": 0.2985, + "step": 4010 + }, + { + "epoch": 4.131551901336074, + "grad_norm": 4.29639196395874, + "learning_rate": 6.191765980498375e-06, + "loss": 0.2286, + "step": 4020 + }, + { + "epoch": 4.141829393627955, + "grad_norm": 2.81628155708313, + "learning_rate": 6.180931744312026e-06, + "loss": 0.2581, + "step": 4030 + }, + { + "epoch": 4.152106885919835, + "grad_norm": 3.66825008392334, + "learning_rate": 6.170097508125678e-06, + "loss": 0.1727, + "step": 4040 + }, + { + "epoch": 4.1623843782117165, + "grad_norm": 4.361361026763916, + "learning_rate": 6.159263271939329e-06, + "loss": 0.2076, + "step": 4050 + }, + { + "epoch": 4.172661870503597, + "grad_norm": 3.2681326866149902, + "learning_rate": 6.14842903575298e-06, + "loss": 0.2044, + "step": 4060 + }, + { + "epoch": 4.182939362795478, + "grad_norm": 3.082566499710083, + "learning_rate": 6.137594799566631e-06, + "loss": 0.2175, + "step": 4070 + }, + { + "epoch": 4.193216855087359, + "grad_norm": 4.153407573699951, + "learning_rate": 6.126760563380282e-06, + "loss": 0.2607, + "step": 4080 + }, + { + "epoch": 4.203494347379239, + "grad_norm": 3.0195565223693848, + "learning_rate": 6.115926327193933e-06, + "loss": 0.2601, + "step": 4090 + }, + { + "epoch": 4.213771839671121, + "grad_norm": 3.5535833835601807, + "learning_rate": 6.105092091007584e-06, + "loss": 0.2432, + "step": 4100 + }, + { + "epoch": 4.224049331963001, + "grad_norm": 3.4361705780029297, + "learning_rate": 6.094257854821236e-06, + "loss": 0.1582, + "step": 4110 + }, + { + "epoch": 4.2343268242548815, + "grad_norm": 4.60875940322876, + "learning_rate": 6.083423618634888e-06, + "loss": 0.2713, + "step": 4120 + }, + { + "epoch": 4.244604316546763, + "grad_norm": 2.7858967781066895, + "learning_rate": 6.072589382448539e-06, + "loss": 0.2475, + "step": 4130 + }, + { + "epoch": 4.254881808838643, + "grad_norm": 3.1303088665008545, + "learning_rate": 6.06175514626219e-06, + "loss": 0.2682, + "step": 4140 + }, + { + "epoch": 4.265159301130524, + "grad_norm": 4.026236057281494, + "learning_rate": 6.0509209100758406e-06, + "loss": 0.2533, + "step": 4150 + }, + { + "epoch": 4.275436793422405, + "grad_norm": 2.858705520629883, + "learning_rate": 6.0400866738894916e-06, + "loss": 0.255, + "step": 4160 + }, + { + "epoch": 4.285714285714286, + "grad_norm": 2.965857744216919, + "learning_rate": 6.0292524377031426e-06, + "loss": 0.2476, + "step": 4170 + }, + { + "epoch": 4.295991778006167, + "grad_norm": 4.407792568206787, + "learning_rate": 6.0184182015167935e-06, + "loss": 0.2316, + "step": 4180 + }, + { + "epoch": 4.306269270298047, + "grad_norm": 2.92669939994812, + "learning_rate": 6.0075839653304445e-06, + "loss": 0.2162, + "step": 4190 + }, + { + "epoch": 4.316546762589928, + "grad_norm": 3.5979628562927246, + "learning_rate": 5.9967497291440955e-06, + "loss": 0.3606, + "step": 4200 + }, + { + "epoch": 4.326824254881809, + "grad_norm": 2.726447343826294, + "learning_rate": 5.9859154929577465e-06, + "loss": 0.205, + "step": 4210 + }, + { + "epoch": 4.33710174717369, + "grad_norm": 3.386230945587158, + "learning_rate": 5.9750812567713975e-06, + "loss": 0.2342, + "step": 4220 + }, + { + "epoch": 4.34737923946557, + "grad_norm": 3.2639143466949463, + "learning_rate": 5.9642470205850485e-06, + "loss": 0.2644, + "step": 4230 + }, + { + "epoch": 4.3576567317574515, + "grad_norm": 2.778188467025757, + "learning_rate": 5.9534127843987e-06, + "loss": 0.2256, + "step": 4240 + }, + { + "epoch": 4.367934224049332, + "grad_norm": 4.341221809387207, + "learning_rate": 5.942578548212351e-06, + "loss": 0.2249, + "step": 4250 + }, + { + "epoch": 4.378211716341212, + "grad_norm": 4.01497745513916, + "learning_rate": 5.931744312026003e-06, + "loss": 0.2355, + "step": 4260 + }, + { + "epoch": 4.388489208633094, + "grad_norm": 2.9536073207855225, + "learning_rate": 5.920910075839654e-06, + "loss": 0.2209, + "step": 4270 + }, + { + "epoch": 4.398766700924974, + "grad_norm": 2.984705686569214, + "learning_rate": 5.910075839653305e-06, + "loss": 0.2678, + "step": 4280 + }, + { + "epoch": 4.409044193216855, + "grad_norm": 2.538471221923828, + "learning_rate": 5.899241603466956e-06, + "loss": 0.234, + "step": 4290 + }, + { + "epoch": 4.419321685508736, + "grad_norm": 2.6294751167297363, + "learning_rate": 5.888407367280607e-06, + "loss": 0.2502, + "step": 4300 + }, + { + "epoch": 4.4295991778006165, + "grad_norm": 3.243605613708496, + "learning_rate": 5.877573131094258e-06, + "loss": 0.2293, + "step": 4310 + }, + { + "epoch": 4.439876670092497, + "grad_norm": 4.492715835571289, + "learning_rate": 5.86673889490791e-06, + "loss": 0.2296, + "step": 4320 + }, + { + "epoch": 4.450154162384378, + "grad_norm": 3.0587844848632812, + "learning_rate": 5.855904658721561e-06, + "loss": 0.219, + "step": 4330 + }, + { + "epoch": 4.460431654676259, + "grad_norm": 3.431396722793579, + "learning_rate": 5.845070422535212e-06, + "loss": 0.2164, + "step": 4340 + }, + { + "epoch": 4.47070914696814, + "grad_norm": 3.3825416564941406, + "learning_rate": 5.834236186348863e-06, + "loss": 0.2235, + "step": 4350 + }, + { + "epoch": 4.480986639260021, + "grad_norm": 3.5739195346832275, + "learning_rate": 5.823401950162514e-06, + "loss": 0.3161, + "step": 4360 + }, + { + "epoch": 4.491264131551901, + "grad_norm": 3.242276430130005, + "learning_rate": 5.812567713976165e-06, + "loss": 0.2277, + "step": 4370 + }, + { + "epoch": 4.501541623843782, + "grad_norm": 3.149808406829834, + "learning_rate": 5.801733477789816e-06, + "loss": 0.3057, + "step": 4380 + }, + { + "epoch": 4.511819116135663, + "grad_norm": 3.0861527919769287, + "learning_rate": 5.790899241603467e-06, + "loss": 0.2673, + "step": 4390 + }, + { + "epoch": 4.522096608427543, + "grad_norm": 2.8522121906280518, + "learning_rate": 5.780065005417118e-06, + "loss": 0.2213, + "step": 4400 + }, + { + "epoch": 4.532374100719425, + "grad_norm": 3.0801758766174316, + "learning_rate": 5.769230769230769e-06, + "loss": 0.2283, + "step": 4410 + }, + { + "epoch": 4.542651593011305, + "grad_norm": 4.235361576080322, + "learning_rate": 5.758396533044422e-06, + "loss": 0.2484, + "step": 4420 + }, + { + "epoch": 4.552929085303186, + "grad_norm": 3.881052017211914, + "learning_rate": 5.747562296858073e-06, + "loss": 0.2577, + "step": 4430 + }, + { + "epoch": 4.563206577595067, + "grad_norm": 4.833401203155518, + "learning_rate": 5.736728060671724e-06, + "loss": 0.2614, + "step": 4440 + }, + { + "epoch": 4.5734840698869474, + "grad_norm": 3.7018377780914307, + "learning_rate": 5.725893824485375e-06, + "loss": 0.2539, + "step": 4450 + }, + { + "epoch": 4.583761562178829, + "grad_norm": 3.454493522644043, + "learning_rate": 5.715059588299026e-06, + "loss": 0.2393, + "step": 4460 + }, + { + "epoch": 4.594039054470709, + "grad_norm": 3.1760177612304688, + "learning_rate": 5.7042253521126766e-06, + "loss": 0.2756, + "step": 4470 + }, + { + "epoch": 4.60431654676259, + "grad_norm": 3.2941226959228516, + "learning_rate": 5.6933911159263276e-06, + "loss": 0.221, + "step": 4480 + }, + { + "epoch": 4.614594039054471, + "grad_norm": 2.6715846061706543, + "learning_rate": 5.6825568797399786e-06, + "loss": 0.1891, + "step": 4490 + }, + { + "epoch": 4.6248715313463515, + "grad_norm": 4.533013343811035, + "learning_rate": 5.6717226435536295e-06, + "loss": 0.2153, + "step": 4500 + }, + { + "epoch": 4.635149023638232, + "grad_norm": 3.7271065711975098, + "learning_rate": 5.6608884073672805e-06, + "loss": 0.1851, + "step": 4510 + }, + { + "epoch": 4.645426515930113, + "grad_norm": 3.6285195350646973, + "learning_rate": 5.650054171180932e-06, + "loss": 0.2545, + "step": 4520 + }, + { + "epoch": 4.655704008221994, + "grad_norm": 2.942715883255005, + "learning_rate": 5.639219934994583e-06, + "loss": 0.2742, + "step": 4530 + }, + { + "epoch": 4.665981500513874, + "grad_norm": 3.835853099822998, + "learning_rate": 5.628385698808234e-06, + "loss": 0.26, + "step": 4540 + }, + { + "epoch": 4.676258992805756, + "grad_norm": 2.985145092010498, + "learning_rate": 5.617551462621885e-06, + "loss": 0.1806, + "step": 4550 + }, + { + "epoch": 4.686536485097636, + "grad_norm": 2.787224531173706, + "learning_rate": 5.606717226435536e-06, + "loss": 0.2763, + "step": 4560 + }, + { + "epoch": 4.6968139773895174, + "grad_norm": 3.8157551288604736, + "learning_rate": 5.595882990249187e-06, + "loss": 0.2892, + "step": 4570 + }, + { + "epoch": 4.707091469681398, + "grad_norm": 3.440402030944824, + "learning_rate": 5.585048754062839e-06, + "loss": 0.2788, + "step": 4580 + }, + { + "epoch": 4.717368961973278, + "grad_norm": 4.048103332519531, + "learning_rate": 5.57421451787649e-06, + "loss": 0.265, + "step": 4590 + }, + { + "epoch": 4.727646454265159, + "grad_norm": 2.923424482345581, + "learning_rate": 5.563380281690142e-06, + "loss": 0.2165, + "step": 4600 + }, + { + "epoch": 4.73792394655704, + "grad_norm": 2.8023345470428467, + "learning_rate": 5.552546045503793e-06, + "loss": 0.1895, + "step": 4610 + }, + { + "epoch": 4.748201438848921, + "grad_norm": 2.900959014892578, + "learning_rate": 5.541711809317444e-06, + "loss": 0.195, + "step": 4620 + }, + { + "epoch": 4.758478931140802, + "grad_norm": 3.2633719444274902, + "learning_rate": 5.530877573131095e-06, + "loss": 0.2658, + "step": 4630 + }, + { + "epoch": 4.7687564234326825, + "grad_norm": 3.7170844078063965, + "learning_rate": 5.520043336944746e-06, + "loss": 0.2247, + "step": 4640 + }, + { + "epoch": 4.779033915724563, + "grad_norm": 3.317171573638916, + "learning_rate": 5.509209100758397e-06, + "loss": 0.2497, + "step": 4650 + }, + { + "epoch": 4.789311408016444, + "grad_norm": 3.8781418800354004, + "learning_rate": 5.498374864572048e-06, + "loss": 0.218, + "step": 4660 + }, + { + "epoch": 4.799588900308325, + "grad_norm": 3.596952438354492, + "learning_rate": 5.487540628385699e-06, + "loss": 0.2871, + "step": 4670 + }, + { + "epoch": 4.809866392600205, + "grad_norm": 3.8521931171417236, + "learning_rate": 5.47670639219935e-06, + "loss": 0.2784, + "step": 4680 + }, + { + "epoch": 4.820143884892087, + "grad_norm": 3.562053680419922, + "learning_rate": 5.465872156013001e-06, + "loss": 0.2502, + "step": 4690 + }, + { + "epoch": 4.830421377183967, + "grad_norm": 3.1473138332366943, + "learning_rate": 5.455037919826652e-06, + "loss": 0.1935, + "step": 4700 + }, + { + "epoch": 4.8406988694758475, + "grad_norm": 3.755488157272339, + "learning_rate": 5.444203683640304e-06, + "loss": 0.2786, + "step": 4710 + }, + { + "epoch": 4.850976361767729, + "grad_norm": 2.7507431507110596, + "learning_rate": 5.433369447453955e-06, + "loss": 0.1997, + "step": 4720 + }, + { + "epoch": 4.861253854059609, + "grad_norm": 2.5082316398620605, + "learning_rate": 5.422535211267607e-06, + "loss": 0.1983, + "step": 4730 + }, + { + "epoch": 4.871531346351491, + "grad_norm": 4.426095485687256, + "learning_rate": 5.411700975081258e-06, + "loss": 0.2835, + "step": 4740 + }, + { + "epoch": 4.881808838643371, + "grad_norm": 3.8168785572052, + "learning_rate": 5.400866738894909e-06, + "loss": 0.197, + "step": 4750 + }, + { + "epoch": 4.892086330935252, + "grad_norm": 3.388019561767578, + "learning_rate": 5.39003250270856e-06, + "loss": 0.2651, + "step": 4760 + }, + { + "epoch": 4.902363823227133, + "grad_norm": 4.148802757263184, + "learning_rate": 5.379198266522211e-06, + "loss": 0.267, + "step": 4770 + }, + { + "epoch": 4.912641315519013, + "grad_norm": 4.763253211975098, + "learning_rate": 5.368364030335862e-06, + "loss": 0.2307, + "step": 4780 + }, + { + "epoch": 4.922918807810894, + "grad_norm": 5.207771301269531, + "learning_rate": 5.357529794149513e-06, + "loss": 0.2559, + "step": 4790 + }, + { + "epoch": 4.933196300102775, + "grad_norm": 2.9947853088378906, + "learning_rate": 5.346695557963164e-06, + "loss": 0.1951, + "step": 4800 + }, + { + "epoch": 4.943473792394656, + "grad_norm": 3.326383113861084, + "learning_rate": 5.335861321776815e-06, + "loss": 0.2096, + "step": 4810 + }, + { + "epoch": 4.953751284686536, + "grad_norm": 4.471996307373047, + "learning_rate": 5.325027085590466e-06, + "loss": 0.2297, + "step": 4820 + }, + { + "epoch": 4.9640287769784175, + "grad_norm": 2.7022922039031982, + "learning_rate": 5.314192849404117e-06, + "loss": 0.1895, + "step": 4830 + }, + { + "epoch": 4.974306269270298, + "grad_norm": 2.3356800079345703, + "learning_rate": 5.303358613217768e-06, + "loss": 0.1827, + "step": 4840 + }, + { + "epoch": 4.984583761562179, + "grad_norm": 3.5485782623291016, + "learning_rate": 5.292524377031419e-06, + "loss": 0.2314, + "step": 4850 + }, + { + "epoch": 4.99486125385406, + "grad_norm": 4.482807159423828, + "learning_rate": 5.28169014084507e-06, + "loss": 0.272, + "step": 4860 + }, + { + "epoch": 5.0, + "eval_loss": 0.3424507975578308, + "eval_runtime": 2155.9454, + "eval_samples_per_second": 2.722, + "eval_steps_per_second": 0.085, + "eval_wer": 0.3638944174361317, + "step": 4865 + }, + { + "epoch": 5.00513874614594, + "grad_norm": 3.5453717708587646, + "learning_rate": 5.270855904658721e-06, + "loss": 0.1935, + "step": 4870 + }, + { + "epoch": 5.015416238437822, + "grad_norm": 2.4929885864257812, + "learning_rate": 5.260021668472372e-06, + "loss": 0.2384, + "step": 4880 + }, + { + "epoch": 5.025693730729702, + "grad_norm": 4.065361499786377, + "learning_rate": 5.249187432286025e-06, + "loss": 0.1792, + "step": 4890 + }, + { + "epoch": 5.0359712230215825, + "grad_norm": 2.583575487136841, + "learning_rate": 5.238353196099676e-06, + "loss": 0.2077, + "step": 4900 + }, + { + "epoch": 5.046248715313464, + "grad_norm": 2.885948896408081, + "learning_rate": 5.227518959913327e-06, + "loss": 0.1988, + "step": 4910 + }, + { + "epoch": 5.056526207605344, + "grad_norm": 3.0783309936523438, + "learning_rate": 5.216684723726978e-06, + "loss": 0.1974, + "step": 4920 + }, + { + "epoch": 5.066803699897225, + "grad_norm": 2.903958559036255, + "learning_rate": 5.205850487540629e-06, + "loss": 0.2528, + "step": 4930 + }, + { + "epoch": 5.077081192189106, + "grad_norm": 4.175934791564941, + "learning_rate": 5.19501625135428e-06, + "loss": 0.2178, + "step": 4940 + }, + { + "epoch": 5.087358684480987, + "grad_norm": 2.6728291511535645, + "learning_rate": 5.184182015167931e-06, + "loss": 0.2193, + "step": 4950 + }, + { + "epoch": 5.097636176772867, + "grad_norm": 2.6697630882263184, + "learning_rate": 5.173347778981582e-06, + "loss": 0.1994, + "step": 4960 + }, + { + "epoch": 5.107913669064748, + "grad_norm": 3.4050991535186768, + "learning_rate": 5.162513542795233e-06, + "loss": 0.2599, + "step": 4970 + }, + { + "epoch": 5.118191161356629, + "grad_norm": 4.359245300292969, + "learning_rate": 5.151679306608884e-06, + "loss": 0.1863, + "step": 4980 + }, + { + "epoch": 5.128468653648509, + "grad_norm": 4.7175726890563965, + "learning_rate": 5.140845070422536e-06, + "loss": 0.2285, + "step": 4990 + }, + { + "epoch": 5.138746145940391, + "grad_norm": 3.808244228363037, + "learning_rate": 5.130010834236187e-06, + "loss": 0.2287, + "step": 5000 + }, + { + "epoch": 5.149023638232271, + "grad_norm": 2.721421957015991, + "learning_rate": 5.119176598049838e-06, + "loss": 0.1837, + "step": 5010 + }, + { + "epoch": 5.1593011305241525, + "grad_norm": 3.716888427734375, + "learning_rate": 5.108342361863489e-06, + "loss": 0.1911, + "step": 5020 + }, + { + "epoch": 5.169578622816033, + "grad_norm": 2.224301338195801, + "learning_rate": 5.09750812567714e-06, + "loss": 0.2105, + "step": 5030 + }, + { + "epoch": 5.179856115107913, + "grad_norm": 2.7661211490631104, + "learning_rate": 5.086673889490791e-06, + "loss": 0.1896, + "step": 5040 + }, + { + "epoch": 5.190133607399795, + "grad_norm": 2.1640748977661133, + "learning_rate": 5.075839653304443e-06, + "loss": 0.2515, + "step": 5050 + }, + { + "epoch": 5.200411099691675, + "grad_norm": 2.9602606296539307, + "learning_rate": 5.065005417118094e-06, + "loss": 0.2077, + "step": 5060 + }, + { + "epoch": 5.210688591983556, + "grad_norm": 3.2131927013397217, + "learning_rate": 5.0541711809317454e-06, + "loss": 0.1956, + "step": 5070 + }, + { + "epoch": 5.220966084275437, + "grad_norm": 4.2276105880737305, + "learning_rate": 5.0433369447453964e-06, + "loss": 0.2595, + "step": 5080 + }, + { + "epoch": 5.2312435765673175, + "grad_norm": 2.062800645828247, + "learning_rate": 5.0325027085590474e-06, + "loss": 0.2245, + "step": 5090 + }, + { + "epoch": 5.241521068859198, + "grad_norm": 3.1266229152679443, + "learning_rate": 5.0216684723726984e-06, + "loss": 0.222, + "step": 5100 + }, + { + "epoch": 5.251798561151079, + "grad_norm": 2.811793327331543, + "learning_rate": 5.010834236186349e-06, + "loss": 0.2297, + "step": 5110 + }, + { + "epoch": 5.26207605344296, + "grad_norm": 3.1398720741271973, + "learning_rate": 5e-06, + "loss": 0.1736, + "step": 5120 + }, + { + "epoch": 5.272353545734841, + "grad_norm": 3.829897880554199, + "learning_rate": 4.989165763813651e-06, + "loss": 0.2096, + "step": 5130 + }, + { + "epoch": 5.282631038026722, + "grad_norm": 3.5916318893432617, + "learning_rate": 4.978331527627302e-06, + "loss": 0.2428, + "step": 5140 + }, + { + "epoch": 5.292908530318602, + "grad_norm": 2.3700063228607178, + "learning_rate": 4.967497291440953e-06, + "loss": 0.1932, + "step": 5150 + }, + { + "epoch": 5.303186022610483, + "grad_norm": 3.2748498916625977, + "learning_rate": 4.956663055254605e-06, + "loss": 0.2216, + "step": 5160 + }, + { + "epoch": 5.313463514902364, + "grad_norm": 2.50570011138916, + "learning_rate": 4.945828819068256e-06, + "loss": 0.219, + "step": 5170 + }, + { + "epoch": 5.323741007194244, + "grad_norm": 3.0017030239105225, + "learning_rate": 4.934994582881907e-06, + "loss": 0.231, + "step": 5180 + }, + { + "epoch": 5.334018499486126, + "grad_norm": 2.34260630607605, + "learning_rate": 4.924160346695558e-06, + "loss": 0.2528, + "step": 5190 + }, + { + "epoch": 5.344295991778006, + "grad_norm": 5.0798444747924805, + "learning_rate": 4.913326110509209e-06, + "loss": 0.2046, + "step": 5200 + }, + { + "epoch": 5.354573484069887, + "grad_norm": 3.9050886631011963, + "learning_rate": 4.90249187432286e-06, + "loss": 0.2026, + "step": 5210 + }, + { + "epoch": 5.364850976361768, + "grad_norm": 4.162816524505615, + "learning_rate": 4.891657638136512e-06, + "loss": 0.2601, + "step": 5220 + }, + { + "epoch": 5.3751284686536485, + "grad_norm": 3.383274555206299, + "learning_rate": 4.880823401950163e-06, + "loss": 0.1653, + "step": 5230 + }, + { + "epoch": 5.385405960945529, + "grad_norm": 2.9354922771453857, + "learning_rate": 4.869989165763814e-06, + "loss": 0.2238, + "step": 5240 + }, + { + "epoch": 5.39568345323741, + "grad_norm": 2.3812334537506104, + "learning_rate": 4.859154929577465e-06, + "loss": 0.2059, + "step": 5250 + }, + { + "epoch": 5.405960945529291, + "grad_norm": 2.6520116329193115, + "learning_rate": 4.848320693391117e-06, + "loss": 0.2624, + "step": 5260 + }, + { + "epoch": 5.416238437821171, + "grad_norm": 3.0220510959625244, + "learning_rate": 4.837486457204768e-06, + "loss": 0.2858, + "step": 5270 + }, + { + "epoch": 5.4265159301130526, + "grad_norm": 3.373061180114746, + "learning_rate": 4.826652221018419e-06, + "loss": 0.2445, + "step": 5280 + }, + { + "epoch": 5.436793422404933, + "grad_norm": 3.253922462463379, + "learning_rate": 4.81581798483207e-06, + "loss": 0.2465, + "step": 5290 + }, + { + "epoch": 5.447070914696814, + "grad_norm": 3.8040966987609863, + "learning_rate": 4.804983748645721e-06, + "loss": 0.2157, + "step": 5300 + }, + { + "epoch": 5.457348406988695, + "grad_norm": 2.812648057937622, + "learning_rate": 4.794149512459372e-06, + "loss": 0.2039, + "step": 5310 + }, + { + "epoch": 5.467625899280575, + "grad_norm": 3.9564015865325928, + "learning_rate": 4.783315276273024e-06, + "loss": 0.1993, + "step": 5320 + }, + { + "epoch": 5.477903391572457, + "grad_norm": 2.991647481918335, + "learning_rate": 4.772481040086675e-06, + "loss": 0.1606, + "step": 5330 + }, + { + "epoch": 5.488180883864337, + "grad_norm": 3.1116158962249756, + "learning_rate": 4.761646803900326e-06, + "loss": 0.2625, + "step": 5340 + }, + { + "epoch": 5.498458376156218, + "grad_norm": 4.179168224334717, + "learning_rate": 4.750812567713977e-06, + "loss": 0.2525, + "step": 5350 + }, + { + "epoch": 5.508735868448099, + "grad_norm": 3.158571481704712, + "learning_rate": 4.739978331527628e-06, + "loss": 0.2093, + "step": 5360 + }, + { + "epoch": 5.519013360739979, + "grad_norm": 3.2934162616729736, + "learning_rate": 4.729144095341279e-06, + "loss": 0.2581, + "step": 5370 + }, + { + "epoch": 5.52929085303186, + "grad_norm": 2.104811668395996, + "learning_rate": 4.71830985915493e-06, + "loss": 0.1754, + "step": 5380 + }, + { + "epoch": 5.539568345323741, + "grad_norm": 2.4097349643707275, + "learning_rate": 4.707475622968581e-06, + "loss": 0.1439, + "step": 5390 + }, + { + "epoch": 5.549845837615622, + "grad_norm": 3.515024423599243, + "learning_rate": 4.6966413867822324e-06, + "loss": 0.1735, + "step": 5400 + }, + { + "epoch": 5.560123329907503, + "grad_norm": 3.5324950218200684, + "learning_rate": 4.6858071505958834e-06, + "loss": 0.2659, + "step": 5410 + }, + { + "epoch": 5.5704008221993835, + "grad_norm": 2.9916086196899414, + "learning_rate": 4.674972914409534e-06, + "loss": 0.2829, + "step": 5420 + }, + { + "epoch": 5.580678314491264, + "grad_norm": 3.8028905391693115, + "learning_rate": 4.664138678223185e-06, + "loss": 0.2463, + "step": 5430 + }, + { + "epoch": 5.590955806783145, + "grad_norm": 3.8579933643341064, + "learning_rate": 4.653304442036836e-06, + "loss": 0.2034, + "step": 5440 + }, + { + "epoch": 5.601233299075026, + "grad_norm": 2.935047149658203, + "learning_rate": 4.642470205850487e-06, + "loss": 0.1735, + "step": 5450 + }, + { + "epoch": 5.611510791366906, + "grad_norm": 3.832740068435669, + "learning_rate": 4.631635969664139e-06, + "loss": 0.2052, + "step": 5460 + }, + { + "epoch": 5.621788283658788, + "grad_norm": 3.1037473678588867, + "learning_rate": 4.62080173347779e-06, + "loss": 0.1715, + "step": 5470 + }, + { + "epoch": 5.632065775950668, + "grad_norm": 3.4924092292785645, + "learning_rate": 4.609967497291441e-06, + "loss": 0.1661, + "step": 5480 + }, + { + "epoch": 5.6423432682425485, + "grad_norm": 3.0313289165496826, + "learning_rate": 4.599133261105092e-06, + "loss": 0.2318, + "step": 5490 + }, + { + "epoch": 5.65262076053443, + "grad_norm": 2.711442708969116, + "learning_rate": 4.588299024918744e-06, + "loss": 0.203, + "step": 5500 + }, + { + "epoch": 5.66289825282631, + "grad_norm": 3.3626229763031006, + "learning_rate": 4.577464788732395e-06, + "loss": 0.1957, + "step": 5510 + }, + { + "epoch": 5.673175745118191, + "grad_norm": 5.538825988769531, + "learning_rate": 4.566630552546046e-06, + "loss": 0.2218, + "step": 5520 + }, + { + "epoch": 5.683453237410072, + "grad_norm": 4.4782280921936035, + "learning_rate": 4.555796316359697e-06, + "loss": 0.2317, + "step": 5530 + }, + { + "epoch": 5.693730729701953, + "grad_norm": 3.762585401535034, + "learning_rate": 4.544962080173348e-06, + "loss": 0.2034, + "step": 5540 + }, + { + "epoch": 5.704008221993833, + "grad_norm": 2.648967981338501, + "learning_rate": 4.534127843986999e-06, + "loss": 0.2342, + "step": 5550 + }, + { + "epoch": 5.714285714285714, + "grad_norm": 3.2710864543914795, + "learning_rate": 4.523293607800651e-06, + "loss": 0.2271, + "step": 5560 + }, + { + "epoch": 5.724563206577595, + "grad_norm": 3.4262428283691406, + "learning_rate": 4.512459371614302e-06, + "loss": 0.2354, + "step": 5570 + }, + { + "epoch": 5.734840698869476, + "grad_norm": 4.622767925262451, + "learning_rate": 4.501625135427953e-06, + "loss": 0.2362, + "step": 5580 + }, + { + "epoch": 5.745118191161357, + "grad_norm": 2.523181915283203, + "learning_rate": 4.490790899241604e-06, + "loss": 0.277, + "step": 5590 + }, + { + "epoch": 5.755395683453237, + "grad_norm": 3.465257167816162, + "learning_rate": 4.479956663055255e-06, + "loss": 0.1887, + "step": 5600 + }, + { + "epoch": 5.7656731757451185, + "grad_norm": 3.522796869277954, + "learning_rate": 4.469122426868906e-06, + "loss": 0.2124, + "step": 5610 + }, + { + "epoch": 5.775950668036999, + "grad_norm": 2.6344645023345947, + "learning_rate": 4.458288190682557e-06, + "loss": 0.1447, + "step": 5620 + }, + { + "epoch": 5.786228160328879, + "grad_norm": 4.947503566741943, + "learning_rate": 4.447453954496209e-06, + "loss": 0.2845, + "step": 5630 + }, + { + "epoch": 5.796505652620761, + "grad_norm": 2.601114511489868, + "learning_rate": 4.43661971830986e-06, + "loss": 0.225, + "step": 5640 + }, + { + "epoch": 5.806783144912641, + "grad_norm": 2.1763648986816406, + "learning_rate": 4.425785482123511e-06, + "loss": 0.2572, + "step": 5650 + }, + { + "epoch": 5.817060637204522, + "grad_norm": 3.327956199645996, + "learning_rate": 4.414951245937162e-06, + "loss": 0.252, + "step": 5660 + }, + { + "epoch": 5.827338129496403, + "grad_norm": 3.0444459915161133, + "learning_rate": 4.404117009750813e-06, + "loss": 0.202, + "step": 5670 + }, + { + "epoch": 5.8376156217882835, + "grad_norm": 3.5194921493530273, + "learning_rate": 4.393282773564464e-06, + "loss": 0.1841, + "step": 5680 + }, + { + "epoch": 5.847893114080165, + "grad_norm": 6.3386759757995605, + "learning_rate": 4.3824485373781155e-06, + "loss": 0.2575, + "step": 5690 + }, + { + "epoch": 5.858170606372045, + "grad_norm": 2.679220676422119, + "learning_rate": 4.3716143011917665e-06, + "loss": 0.2159, + "step": 5700 + }, + { + "epoch": 5.868448098663926, + "grad_norm": 3.0586998462677, + "learning_rate": 4.3607800650054174e-06, + "loss": 0.2096, + "step": 5710 + }, + { + "epoch": 5.878725590955807, + "grad_norm": 3.4423577785491943, + "learning_rate": 4.3499458288190684e-06, + "loss": 0.2255, + "step": 5720 + }, + { + "epoch": 5.889003083247688, + "grad_norm": 2.7511608600616455, + "learning_rate": 4.33911159263272e-06, + "loss": 0.1964, + "step": 5730 + }, + { + "epoch": 5.899280575539568, + "grad_norm": 2.233632802963257, + "learning_rate": 4.328277356446371e-06, + "loss": 0.197, + "step": 5740 + }, + { + "epoch": 5.909558067831449, + "grad_norm": 2.6019561290740967, + "learning_rate": 4.317443120260022e-06, + "loss": 0.167, + "step": 5750 + }, + { + "epoch": 5.91983556012333, + "grad_norm": 2.9982266426086426, + "learning_rate": 4.306608884073673e-06, + "loss": 0.2546, + "step": 5760 + }, + { + "epoch": 5.93011305241521, + "grad_norm": 2.8179750442504883, + "learning_rate": 4.295774647887324e-06, + "loss": 0.2555, + "step": 5770 + }, + { + "epoch": 5.940390544707092, + "grad_norm": 3.8446030616760254, + "learning_rate": 4.284940411700975e-06, + "loss": 0.2172, + "step": 5780 + }, + { + "epoch": 5.950668036998972, + "grad_norm": 3.377340078353882, + "learning_rate": 4.274106175514627e-06, + "loss": 0.2168, + "step": 5790 + }, + { + "epoch": 5.9609455292908535, + "grad_norm": 4.0742411613464355, + "learning_rate": 4.263271939328278e-06, + "loss": 0.2135, + "step": 5800 + }, + { + "epoch": 5.971223021582734, + "grad_norm": 2.8021926879882812, + "learning_rate": 4.252437703141929e-06, + "loss": 0.2091, + "step": 5810 + }, + { + "epoch": 5.9815005138746145, + "grad_norm": 2.8668556213378906, + "learning_rate": 4.24160346695558e-06, + "loss": 0.1975, + "step": 5820 + }, + { + "epoch": 5.991778006166495, + "grad_norm": 3.0243079662323, + "learning_rate": 4.230769230769231e-06, + "loss": 0.2246, + "step": 5830 + }, + { + "epoch": 6.0, + "eval_loss": 0.3370836079120636, + "eval_runtime": 1607.0593, + "eval_samples_per_second": 3.651, + "eval_steps_per_second": 0.114, + "eval_wer": 0.3341055173088845, + "step": 5838 + }, + { + "epoch": 6.002055498458376, + "grad_norm": 4.44910192489624, + "learning_rate": 4.219934994582882e-06, + "loss": 0.2229, + "step": 5840 + }, + { + "epoch": 6.012332990750257, + "grad_norm": 4.300351142883301, + "learning_rate": 4.209100758396533e-06, + "loss": 0.2699, + "step": 5850 + }, + { + "epoch": 6.022610483042138, + "grad_norm": 2.6255762577056885, + "learning_rate": 4.198266522210184e-06, + "loss": 0.2489, + "step": 5860 + }, + { + "epoch": 6.0328879753340185, + "grad_norm": 3.224179267883301, + "learning_rate": 4.187432286023836e-06, + "loss": 0.2066, + "step": 5870 + }, + { + "epoch": 6.043165467625899, + "grad_norm": 2.1570770740509033, + "learning_rate": 4.176598049837487e-06, + "loss": 0.1952, + "step": 5880 + }, + { + "epoch": 6.05344295991778, + "grad_norm": 2.8564834594726562, + "learning_rate": 4.165763813651138e-06, + "loss": 0.1647, + "step": 5890 + }, + { + "epoch": 6.063720452209661, + "grad_norm": 3.121005058288574, + "learning_rate": 4.154929577464789e-06, + "loss": 0.1846, + "step": 5900 + }, + { + "epoch": 6.073997944501541, + "grad_norm": 2.7559916973114014, + "learning_rate": 4.14409534127844e-06, + "loss": 0.2056, + "step": 5910 + }, + { + "epoch": 6.084275436793423, + "grad_norm": 2.2764577865600586, + "learning_rate": 4.133261105092091e-06, + "loss": 0.1827, + "step": 5920 + }, + { + "epoch": 6.094552929085303, + "grad_norm": 3.273794412612915, + "learning_rate": 4.122426868905743e-06, + "loss": 0.2004, + "step": 5930 + }, + { + "epoch": 6.104830421377184, + "grad_norm": 3.497180461883545, + "learning_rate": 4.111592632719394e-06, + "loss": 0.1765, + "step": 5940 + }, + { + "epoch": 6.115107913669065, + "grad_norm": 2.506742238998413, + "learning_rate": 4.100758396533045e-06, + "loss": 0.1928, + "step": 5950 + }, + { + "epoch": 6.125385405960945, + "grad_norm": 4.537923812866211, + "learning_rate": 4.089924160346696e-06, + "loss": 0.2638, + "step": 5960 + }, + { + "epoch": 6.135662898252827, + "grad_norm": 4.140702247619629, + "learning_rate": 4.0790899241603475e-06, + "loss": 0.2831, + "step": 5970 + }, + { + "epoch": 6.145940390544707, + "grad_norm": 3.754379987716675, + "learning_rate": 4.0682556879739985e-06, + "loss": 0.2594, + "step": 5980 + }, + { + "epoch": 6.156217882836588, + "grad_norm": 4.139322757720947, + "learning_rate": 4.0574214517876495e-06, + "loss": 0.154, + "step": 5990 + }, + { + "epoch": 6.166495375128469, + "grad_norm": 2.700397491455078, + "learning_rate": 4.0465872156013005e-06, + "loss": 0.1733, + "step": 6000 + }, + { + "epoch": 6.1767728674203495, + "grad_norm": 4.563075542449951, + "learning_rate": 4.0357529794149515e-06, + "loss": 0.1818, + "step": 6010 + }, + { + "epoch": 6.18705035971223, + "grad_norm": 3.9990346431732178, + "learning_rate": 4.024918743228603e-06, + "loss": 0.1735, + "step": 6020 + }, + { + "epoch": 6.197327852004111, + "grad_norm": 3.266754627227783, + "learning_rate": 4.014084507042254e-06, + "loss": 0.2065, + "step": 6030 + }, + { + "epoch": 6.207605344295992, + "grad_norm": 2.936103105545044, + "learning_rate": 4.003250270855905e-06, + "loss": 0.2146, + "step": 6040 + }, + { + "epoch": 6.217882836587872, + "grad_norm": 3.2443127632141113, + "learning_rate": 3.992416034669556e-06, + "loss": 0.2163, + "step": 6050 + }, + { + "epoch": 6.228160328879754, + "grad_norm": 1.8829902410507202, + "learning_rate": 3.981581798483207e-06, + "loss": 0.2112, + "step": 6060 + }, + { + "epoch": 6.238437821171634, + "grad_norm": 3.7794463634490967, + "learning_rate": 3.970747562296858e-06, + "loss": 0.2099, + "step": 6070 + }, + { + "epoch": 6.2487153134635145, + "grad_norm": 3.4124205112457275, + "learning_rate": 3.959913326110509e-06, + "loss": 0.2275, + "step": 6080 + }, + { + "epoch": 6.258992805755396, + "grad_norm": 3.0793240070343018, + "learning_rate": 3.94907908992416e-06, + "loss": 0.2107, + "step": 6090 + }, + { + "epoch": 6.269270298047276, + "grad_norm": 2.9140002727508545, + "learning_rate": 3.938244853737812e-06, + "loss": 0.2207, + "step": 6100 + }, + { + "epoch": 6.279547790339157, + "grad_norm": 4.15077543258667, + "learning_rate": 3.927410617551463e-06, + "loss": 0.2423, + "step": 6110 + }, + { + "epoch": 6.289825282631038, + "grad_norm": 3.4205381870269775, + "learning_rate": 3.916576381365114e-06, + "loss": 0.2209, + "step": 6120 + }, + { + "epoch": 6.300102774922919, + "grad_norm": 3.495804786682129, + "learning_rate": 3.905742145178765e-06, + "loss": 0.2076, + "step": 6130 + }, + { + "epoch": 6.3103802672148, + "grad_norm": 2.691032886505127, + "learning_rate": 3.894907908992416e-06, + "loss": 0.1965, + "step": 6140 + }, + { + "epoch": 6.32065775950668, + "grad_norm": 3.958749771118164, + "learning_rate": 3.884073672806067e-06, + "loss": 0.2056, + "step": 6150 + }, + { + "epoch": 6.330935251798561, + "grad_norm": 2.556640386581421, + "learning_rate": 3.873239436619718e-06, + "loss": 0.215, + "step": 6160 + }, + { + "epoch": 6.341212744090442, + "grad_norm": 2.6547491550445557, + "learning_rate": 3.86240520043337e-06, + "loss": 0.1615, + "step": 6170 + }, + { + "epoch": 6.351490236382323, + "grad_norm": 2.9845190048217773, + "learning_rate": 3.851570964247021e-06, + "loss": 0.2045, + "step": 6180 + }, + { + "epoch": 6.361767728674203, + "grad_norm": 3.978686571121216, + "learning_rate": 3.840736728060672e-06, + "loss": 0.2107, + "step": 6190 + }, + { + "epoch": 6.3720452209660845, + "grad_norm": 2.142049551010132, + "learning_rate": 3.829902491874323e-06, + "loss": 0.2276, + "step": 6200 + }, + { + "epoch": 6.382322713257965, + "grad_norm": 2.729975938796997, + "learning_rate": 3.819068255687975e-06, + "loss": 0.2222, + "step": 6210 + }, + { + "epoch": 6.392600205549845, + "grad_norm": 3.538694381713867, + "learning_rate": 3.8082340195016253e-06, + "loss": 0.208, + "step": 6220 + }, + { + "epoch": 6.402877697841727, + "grad_norm": 2.490054130554199, + "learning_rate": 3.7973997833152767e-06, + "loss": 0.2149, + "step": 6230 + }, + { + "epoch": 6.413155190133607, + "grad_norm": 3.753293514251709, + "learning_rate": 3.7865655471289277e-06, + "loss": 0.1673, + "step": 6240 + }, + { + "epoch": 6.423432682425489, + "grad_norm": 3.622450828552246, + "learning_rate": 3.7757313109425787e-06, + "loss": 0.2291, + "step": 6250 + }, + { + "epoch": 6.433710174717369, + "grad_norm": 4.3948187828063965, + "learning_rate": 3.76489707475623e-06, + "loss": 0.2168, + "step": 6260 + }, + { + "epoch": 6.4439876670092495, + "grad_norm": 3.6386053562164307, + "learning_rate": 3.7540628385698815e-06, + "loss": 0.2269, + "step": 6270 + }, + { + "epoch": 6.454265159301131, + "grad_norm": 2.8120999336242676, + "learning_rate": 3.7432286023835325e-06, + "loss": 0.2137, + "step": 6280 + }, + { + "epoch": 6.464542651593011, + "grad_norm": 4.64008092880249, + "learning_rate": 3.7323943661971835e-06, + "loss": 0.2356, + "step": 6290 + }, + { + "epoch": 6.474820143884892, + "grad_norm": 3.2172248363494873, + "learning_rate": 3.7215601300108345e-06, + "loss": 0.2019, + "step": 6300 + }, + { + "epoch": 6.485097636176773, + "grad_norm": 3.1428282260894775, + "learning_rate": 3.7107258938244855e-06, + "loss": 0.1808, + "step": 6310 + }, + { + "epoch": 6.495375128468654, + "grad_norm": 4.095731735229492, + "learning_rate": 3.6998916576381365e-06, + "loss": 0.2001, + "step": 6320 + }, + { + "epoch": 6.505652620760534, + "grad_norm": 2.5641703605651855, + "learning_rate": 3.689057421451788e-06, + "loss": 0.2002, + "step": 6330 + }, + { + "epoch": 6.515930113052415, + "grad_norm": 2.615081787109375, + "learning_rate": 3.6782231852654393e-06, + "loss": 0.1668, + "step": 6340 + }, + { + "epoch": 6.526207605344296, + "grad_norm": 3.6635892391204834, + "learning_rate": 3.6673889490790903e-06, + "loss": 0.2371, + "step": 6350 + }, + { + "epoch": 6.536485097636177, + "grad_norm": 4.5991950035095215, + "learning_rate": 3.6565547128927413e-06, + "loss": 0.1814, + "step": 6360 + }, + { + "epoch": 6.546762589928058, + "grad_norm": 3.3164796829223633, + "learning_rate": 3.6457204767063927e-06, + "loss": 0.1804, + "step": 6370 + }, + { + "epoch": 6.557040082219938, + "grad_norm": 2.7094385623931885, + "learning_rate": 3.6348862405200437e-06, + "loss": 0.2325, + "step": 6380 + }, + { + "epoch": 6.567317574511819, + "grad_norm": 3.2538888454437256, + "learning_rate": 3.6240520043336947e-06, + "loss": 0.1925, + "step": 6390 + }, + { + "epoch": 6.5775950668037, + "grad_norm": 4.265585422515869, + "learning_rate": 3.6132177681473457e-06, + "loss": 0.2064, + "step": 6400 + }, + { + "epoch": 6.5878725590955804, + "grad_norm": 3.4931657314300537, + "learning_rate": 3.6023835319609967e-06, + "loss": 0.2144, + "step": 6410 + }, + { + "epoch": 6.598150051387462, + "grad_norm": 3.760213851928711, + "learning_rate": 3.5915492957746485e-06, + "loss": 0.1294, + "step": 6420 + }, + { + "epoch": 6.608427543679342, + "grad_norm": 2.018725872039795, + "learning_rate": 3.5807150595882995e-06, + "loss": 0.2067, + "step": 6430 + }, + { + "epoch": 6.618705035971223, + "grad_norm": 3.8554389476776123, + "learning_rate": 3.5698808234019505e-06, + "loss": 0.1834, + "step": 6440 + }, + { + "epoch": 6.628982528263104, + "grad_norm": 2.937488555908203, + "learning_rate": 3.5590465872156015e-06, + "loss": 0.193, + "step": 6450 + }, + { + "epoch": 6.6392600205549845, + "grad_norm": 3.228877067565918, + "learning_rate": 3.5482123510292525e-06, + "loss": 0.2365, + "step": 6460 + }, + { + "epoch": 6.649537512846865, + "grad_norm": 2.6487700939178467, + "learning_rate": 3.537378114842904e-06, + "loss": 0.2074, + "step": 6470 + }, + { + "epoch": 6.659815005138746, + "grad_norm": 1.9501376152038574, + "learning_rate": 3.526543878656555e-06, + "loss": 0.1831, + "step": 6480 + }, + { + "epoch": 6.670092497430627, + "grad_norm": 3.428683280944824, + "learning_rate": 3.5157096424702063e-06, + "loss": 0.1707, + "step": 6490 + }, + { + "epoch": 6.680369989722507, + "grad_norm": 4.162604808807373, + "learning_rate": 3.5048754062838573e-06, + "loss": 0.1814, + "step": 6500 + }, + { + "epoch": 6.690647482014389, + "grad_norm": 2.8636157512664795, + "learning_rate": 3.4940411700975087e-06, + "loss": 0.2169, + "step": 6510 + }, + { + "epoch": 6.700924974306269, + "grad_norm": 3.628262519836426, + "learning_rate": 3.4832069339111597e-06, + "loss": 0.2447, + "step": 6520 + }, + { + "epoch": 6.7112024665981505, + "grad_norm": 3.1727042198181152, + "learning_rate": 3.4723726977248107e-06, + "loss": 0.2149, + "step": 6530 + }, + { + "epoch": 6.721479958890031, + "grad_norm": 4.583132743835449, + "learning_rate": 3.4615384615384617e-06, + "loss": 0.1968, + "step": 6540 + }, + { + "epoch": 6.731757451181911, + "grad_norm": 2.8516876697540283, + "learning_rate": 3.4507042253521127e-06, + "loss": 0.1502, + "step": 6550 + }, + { + "epoch": 6.742034943473793, + "grad_norm": 3.4025397300720215, + "learning_rate": 3.4398699891657637e-06, + "loss": 0.2249, + "step": 6560 + }, + { + "epoch": 6.752312435765673, + "grad_norm": 2.9527840614318848, + "learning_rate": 3.4290357529794155e-06, + "loss": 0.1757, + "step": 6570 + }, + { + "epoch": 6.762589928057554, + "grad_norm": 3.4826791286468506, + "learning_rate": 3.4182015167930665e-06, + "loss": 0.1957, + "step": 6580 + }, + { + "epoch": 6.772867420349435, + "grad_norm": 2.293030023574829, + "learning_rate": 3.4073672806067175e-06, + "loss": 0.2031, + "step": 6590 + }, + { + "epoch": 6.7831449126413155, + "grad_norm": 2.7427804470062256, + "learning_rate": 3.3965330444203685e-06, + "loss": 0.1944, + "step": 6600 + }, + { + "epoch": 6.793422404933196, + "grad_norm": 2.6534154415130615, + "learning_rate": 3.38569880823402e-06, + "loss": 0.2157, + "step": 6610 + }, + { + "epoch": 6.803699897225077, + "grad_norm": 3.0126819610595703, + "learning_rate": 3.374864572047671e-06, + "loss": 0.204, + "step": 6620 + }, + { + "epoch": 6.813977389516958, + "grad_norm": 2.957345724105835, + "learning_rate": 3.364030335861322e-06, + "loss": 0.1765, + "step": 6630 + }, + { + "epoch": 6.824254881808839, + "grad_norm": 3.7311325073242188, + "learning_rate": 3.353196099674973e-06, + "loss": 0.2501, + "step": 6640 + }, + { + "epoch": 6.83453237410072, + "grad_norm": 3.535660743713379, + "learning_rate": 3.3423618634886247e-06, + "loss": 0.2223, + "step": 6650 + }, + { + "epoch": 6.8448098663926, + "grad_norm": 2.5555760860443115, + "learning_rate": 3.3315276273022757e-06, + "loss": 0.1979, + "step": 6660 + }, + { + "epoch": 6.8550873586844805, + "grad_norm": 3.8657376766204834, + "learning_rate": 3.3206933911159267e-06, + "loss": 0.1537, + "step": 6670 + }, + { + "epoch": 6.865364850976362, + "grad_norm": 3.7345712184906006, + "learning_rate": 3.3098591549295777e-06, + "loss": 0.1761, + "step": 6680 + }, + { + "epoch": 6.875642343268242, + "grad_norm": 3.04667329788208, + "learning_rate": 3.2990249187432287e-06, + "loss": 0.2655, + "step": 6690 + }, + { + "epoch": 6.885919835560124, + "grad_norm": 2.715324640274048, + "learning_rate": 3.2881906825568797e-06, + "loss": 0.2037, + "step": 6700 + }, + { + "epoch": 6.896197327852004, + "grad_norm": 3.4638121128082275, + "learning_rate": 3.277356446370531e-06, + "loss": 0.2189, + "step": 6710 + }, + { + "epoch": 6.906474820143885, + "grad_norm": 1.9942344427108765, + "learning_rate": 3.266522210184182e-06, + "loss": 0.2246, + "step": 6720 + }, + { + "epoch": 6.916752312435766, + "grad_norm": 4.648674011230469, + "learning_rate": 3.2556879739978335e-06, + "loss": 0.1811, + "step": 6730 + }, + { + "epoch": 6.927029804727646, + "grad_norm": 3.068770170211792, + "learning_rate": 3.2448537378114845e-06, + "loss": 0.1767, + "step": 6740 + }, + { + "epoch": 6.937307297019527, + "grad_norm": 4.408273696899414, + "learning_rate": 3.234019501625136e-06, + "loss": 0.1555, + "step": 6750 + }, + { + "epoch": 6.947584789311408, + "grad_norm": 2.8085215091705322, + "learning_rate": 3.223185265438787e-06, + "loss": 0.1882, + "step": 6760 + }, + { + "epoch": 6.957862281603289, + "grad_norm": 2.6439192295074463, + "learning_rate": 3.212351029252438e-06, + "loss": 0.188, + "step": 6770 + }, + { + "epoch": 6.968139773895169, + "grad_norm": 3.1821489334106445, + "learning_rate": 3.201516793066089e-06, + "loss": 0.1666, + "step": 6780 + }, + { + "epoch": 6.9784172661870505, + "grad_norm": 4.910435199737549, + "learning_rate": 3.19068255687974e-06, + "loss": 0.2278, + "step": 6790 + }, + { + "epoch": 6.988694758478931, + "grad_norm": 3.4465863704681396, + "learning_rate": 3.1798483206933913e-06, + "loss": 0.2256, + "step": 6800 + }, + { + "epoch": 6.998972250770812, + "grad_norm": 4.111727237701416, + "learning_rate": 3.1690140845070427e-06, + "loss": 0.1541, + "step": 6810 + }, + { + "epoch": 7.0, + "eval_loss": 0.3404325246810913, + "eval_runtime": 1214.8326, + "eval_samples_per_second": 4.83, + "eval_steps_per_second": 0.151, + "eval_wer": 0.3377271689125257, + "step": 6811 + }, + { + "epoch": 7.009249743062693, + "grad_norm": 2.5531973838806152, + "learning_rate": 3.1581798483206937e-06, + "loss": 0.1542, + "step": 6820 + }, + { + "epoch": 7.019527235354573, + "grad_norm": 2.626990556716919, + "learning_rate": 3.1473456121343447e-06, + "loss": 0.2472, + "step": 6830 + }, + { + "epoch": 7.029804727646455, + "grad_norm": 3.149609327316284, + "learning_rate": 3.136511375947996e-06, + "loss": 0.1635, + "step": 6840 + }, + { + "epoch": 7.040082219938335, + "grad_norm": 2.6103062629699707, + "learning_rate": 3.125677139761647e-06, + "loss": 0.1614, + "step": 6850 + }, + { + "epoch": 7.0503597122302155, + "grad_norm": 3.6957461833953857, + "learning_rate": 3.114842903575298e-06, + "loss": 0.2344, + "step": 6860 + }, + { + "epoch": 7.060637204522097, + "grad_norm": 3.363518238067627, + "learning_rate": 3.104008667388949e-06, + "loss": 0.1342, + "step": 6870 + }, + { + "epoch": 7.070914696813977, + "grad_norm": 4.158355236053467, + "learning_rate": 3.093174431202601e-06, + "loss": 0.2523, + "step": 6880 + }, + { + "epoch": 7.081192189105858, + "grad_norm": 2.6004796028137207, + "learning_rate": 3.082340195016252e-06, + "loss": 0.1161, + "step": 6890 + }, + { + "epoch": 7.091469681397739, + "grad_norm": 2.7090814113616943, + "learning_rate": 3.071505958829903e-06, + "loss": 0.1893, + "step": 6900 + }, + { + "epoch": 7.10174717368962, + "grad_norm": 3.5262081623077393, + "learning_rate": 3.060671722643554e-06, + "loss": 0.2165, + "step": 6910 + }, + { + "epoch": 7.112024665981501, + "grad_norm": 4.6180267333984375, + "learning_rate": 3.049837486457205e-06, + "loss": 0.1831, + "step": 6920 + }, + { + "epoch": 7.122302158273381, + "grad_norm": 3.9975624084472656, + "learning_rate": 3.039003250270856e-06, + "loss": 0.175, + "step": 6930 + }, + { + "epoch": 7.132579650565262, + "grad_norm": 4.310418605804443, + "learning_rate": 3.0281690140845073e-06, + "loss": 0.2098, + "step": 6940 + }, + { + "epoch": 7.142857142857143, + "grad_norm": 2.2856907844543457, + "learning_rate": 3.0173347778981583e-06, + "loss": 0.189, + "step": 6950 + }, + { + "epoch": 7.153134635149024, + "grad_norm": 3.758925199508667, + "learning_rate": 3.0065005417118097e-06, + "loss": 0.2086, + "step": 6960 + }, + { + "epoch": 7.163412127440904, + "grad_norm": 4.2040252685546875, + "learning_rate": 2.9956663055254607e-06, + "loss": 0.2719, + "step": 6970 + }, + { + "epoch": 7.1736896197327855, + "grad_norm": 3.452918767929077, + "learning_rate": 2.984832069339112e-06, + "loss": 0.1471, + "step": 6980 + }, + { + "epoch": 7.183967112024666, + "grad_norm": 3.493724822998047, + "learning_rate": 2.973997833152763e-06, + "loss": 0.1716, + "step": 6990 + }, + { + "epoch": 7.194244604316546, + "grad_norm": 2.078178882598877, + "learning_rate": 2.963163596966414e-06, + "loss": 0.2764, + "step": 7000 + }, + { + "epoch": 7.204522096608428, + "grad_norm": 3.1027865409851074, + "learning_rate": 2.952329360780065e-06, + "loss": 0.22, + "step": 7010 + }, + { + "epoch": 7.214799588900308, + "grad_norm": 3.700126886367798, + "learning_rate": 2.941495124593716e-06, + "loss": 0.1683, + "step": 7020 + }, + { + "epoch": 7.225077081192189, + "grad_norm": 3.756478786468506, + "learning_rate": 2.930660888407367e-06, + "loss": 0.2066, + "step": 7030 + }, + { + "epoch": 7.23535457348407, + "grad_norm": 3.128451108932495, + "learning_rate": 2.919826652221019e-06, + "loss": 0.1784, + "step": 7040 + }, + { + "epoch": 7.2456320657759505, + "grad_norm": 3.994805097579956, + "learning_rate": 2.90899241603467e-06, + "loss": 0.157, + "step": 7050 + }, + { + "epoch": 7.255909558067831, + "grad_norm": 4.610233783721924, + "learning_rate": 2.898158179848321e-06, + "loss": 0.2441, + "step": 7060 + }, + { + "epoch": 7.266187050359712, + "grad_norm": 2.6469390392303467, + "learning_rate": 2.887323943661972e-06, + "loss": 0.1671, + "step": 7070 + }, + { + "epoch": 7.276464542651593, + "grad_norm": 2.4186360836029053, + "learning_rate": 2.8764897074756233e-06, + "loss": 0.1811, + "step": 7080 + }, + { + "epoch": 7.286742034943474, + "grad_norm": 1.9925665855407715, + "learning_rate": 2.8656554712892743e-06, + "loss": 0.222, + "step": 7090 + }, + { + "epoch": 7.297019527235355, + "grad_norm": 3.5605309009552, + "learning_rate": 2.8548212351029253e-06, + "loss": 0.197, + "step": 7100 + }, + { + "epoch": 7.307297019527235, + "grad_norm": 2.999796152114868, + "learning_rate": 2.8439869989165763e-06, + "loss": 0.2111, + "step": 7110 + }, + { + "epoch": 7.3175745118191164, + "grad_norm": 4.410515308380127, + "learning_rate": 2.833152762730228e-06, + "loss": 0.1883, + "step": 7120 + }, + { + "epoch": 7.327852004110997, + "grad_norm": 2.7855818271636963, + "learning_rate": 2.822318526543879e-06, + "loss": 0.2016, + "step": 7130 + }, + { + "epoch": 7.338129496402877, + "grad_norm": 3.494032859802246, + "learning_rate": 2.81148429035753e-06, + "loss": 0.2146, + "step": 7140 + }, + { + "epoch": 7.348406988694759, + "grad_norm": 2.8652873039245605, + "learning_rate": 2.800650054171181e-06, + "loss": 0.1437, + "step": 7150 + }, + { + "epoch": 7.358684480986639, + "grad_norm": 2.307541847229004, + "learning_rate": 2.789815817984832e-06, + "loss": 0.1561, + "step": 7160 + }, + { + "epoch": 7.36896197327852, + "grad_norm": 2.4155330657958984, + "learning_rate": 2.778981581798483e-06, + "loss": 0.1579, + "step": 7170 + }, + { + "epoch": 7.379239465570401, + "grad_norm": 2.9460861682891846, + "learning_rate": 2.7681473456121345e-06, + "loss": 0.1708, + "step": 7180 + }, + { + "epoch": 7.3895169578622815, + "grad_norm": 3.845841407775879, + "learning_rate": 2.7573131094257855e-06, + "loss": 0.2023, + "step": 7190 + }, + { + "epoch": 7.399794450154163, + "grad_norm": 3.194304943084717, + "learning_rate": 2.746478873239437e-06, + "loss": 0.187, + "step": 7200 + }, + { + "epoch": 7.410071942446043, + "grad_norm": 3.090686559677124, + "learning_rate": 2.735644637053088e-06, + "loss": 0.2484, + "step": 7210 + }, + { + "epoch": 7.420349434737924, + "grad_norm": 2.5463943481445312, + "learning_rate": 2.7248104008667394e-06, + "loss": 0.1819, + "step": 7220 + }, + { + "epoch": 7.430626927029805, + "grad_norm": 3.6043484210968018, + "learning_rate": 2.7139761646803903e-06, + "loss": 0.178, + "step": 7230 + }, + { + "epoch": 7.440904419321686, + "grad_norm": 2.053682804107666, + "learning_rate": 2.7031419284940413e-06, + "loss": 0.1712, + "step": 7240 + }, + { + "epoch": 7.451181911613566, + "grad_norm": 3.977004051208496, + "learning_rate": 2.6923076923076923e-06, + "loss": 0.1217, + "step": 7250 + }, + { + "epoch": 7.461459403905447, + "grad_norm": 3.555269479751587, + "learning_rate": 2.6814734561213433e-06, + "loss": 0.1494, + "step": 7260 + }, + { + "epoch": 7.471736896197328, + "grad_norm": 3.3504462242126465, + "learning_rate": 2.6706392199349947e-06, + "loss": 0.2055, + "step": 7270 + }, + { + "epoch": 7.482014388489208, + "grad_norm": 3.7585649490356445, + "learning_rate": 2.659804983748646e-06, + "loss": 0.2169, + "step": 7280 + }, + { + "epoch": 7.49229188078109, + "grad_norm": 3.8398501873016357, + "learning_rate": 2.648970747562297e-06, + "loss": 0.2372, + "step": 7290 + }, + { + "epoch": 7.50256937307297, + "grad_norm": 3.8778791427612305, + "learning_rate": 2.638136511375948e-06, + "loss": 0.2498, + "step": 7300 + }, + { + "epoch": 7.5128468653648515, + "grad_norm": 4.305149555206299, + "learning_rate": 2.6273022751895996e-06, + "loss": 0.2394, + "step": 7310 + }, + { + "epoch": 7.523124357656732, + "grad_norm": 2.9688165187835693, + "learning_rate": 2.6164680390032506e-06, + "loss": 0.2236, + "step": 7320 + }, + { + "epoch": 7.533401849948612, + "grad_norm": 3.433166265487671, + "learning_rate": 2.6056338028169015e-06, + "loss": 0.252, + "step": 7330 + }, + { + "epoch": 7.543679342240493, + "grad_norm": 6.375361919403076, + "learning_rate": 2.5947995666305525e-06, + "loss": 0.1974, + "step": 7340 + }, + { + "epoch": 7.553956834532374, + "grad_norm": 3.1338272094726562, + "learning_rate": 2.5839653304442044e-06, + "loss": 0.1581, + "step": 7350 + }, + { + "epoch": 7.564234326824255, + "grad_norm": 3.2185239791870117, + "learning_rate": 2.5731310942578554e-06, + "loss": 0.2223, + "step": 7360 + }, + { + "epoch": 7.574511819116136, + "grad_norm": 3.0594232082366943, + "learning_rate": 2.5622968580715064e-06, + "loss": 0.1841, + "step": 7370 + }, + { + "epoch": 7.5847893114080165, + "grad_norm": 3.161983013153076, + "learning_rate": 2.5514626218851574e-06, + "loss": 0.168, + "step": 7380 + }, + { + "epoch": 7.595066803699897, + "grad_norm": 3.0678579807281494, + "learning_rate": 2.5406283856988083e-06, + "loss": 0.1824, + "step": 7390 + }, + { + "epoch": 7.605344295991778, + "grad_norm": 2.438215494155884, + "learning_rate": 2.5297941495124593e-06, + "loss": 0.1585, + "step": 7400 + }, + { + "epoch": 7.615621788283659, + "grad_norm": 3.349719285964966, + "learning_rate": 2.5189599133261108e-06, + "loss": 0.1897, + "step": 7410 + }, + { + "epoch": 7.625899280575539, + "grad_norm": 2.981184720993042, + "learning_rate": 2.5081256771397617e-06, + "loss": 0.1652, + "step": 7420 + }, + { + "epoch": 7.636176772867421, + "grad_norm": 3.5360848903656006, + "learning_rate": 2.497291440953413e-06, + "loss": 0.1877, + "step": 7430 + }, + { + "epoch": 7.646454265159301, + "grad_norm": 3.8278608322143555, + "learning_rate": 2.486457204767064e-06, + "loss": 0.2073, + "step": 7440 + }, + { + "epoch": 7.6567317574511815, + "grad_norm": 3.7580535411834717, + "learning_rate": 2.4756229685807156e-06, + "loss": 0.2043, + "step": 7450 + }, + { + "epoch": 7.667009249743063, + "grad_norm": 2.7531728744506836, + "learning_rate": 2.4647887323943666e-06, + "loss": 0.2109, + "step": 7460 + }, + { + "epoch": 7.677286742034943, + "grad_norm": 3.5091471672058105, + "learning_rate": 2.4539544962080176e-06, + "loss": 0.2381, + "step": 7470 + }, + { + "epoch": 7.687564234326825, + "grad_norm": 3.000122547149658, + "learning_rate": 2.4431202600216686e-06, + "loss": 0.2226, + "step": 7480 + }, + { + "epoch": 7.697841726618705, + "grad_norm": 2.8081085681915283, + "learning_rate": 2.43228602383532e-06, + "loss": 0.1817, + "step": 7490 + }, + { + "epoch": 7.708119218910586, + "grad_norm": 3.1888771057128906, + "learning_rate": 2.421451787648971e-06, + "loss": 0.1591, + "step": 7500 + }, + { + "epoch": 7.718396711202467, + "grad_norm": 2.8959813117980957, + "learning_rate": 2.410617551462622e-06, + "loss": 0.1577, + "step": 7510 + }, + { + "epoch": 7.728674203494347, + "grad_norm": 3.66943359375, + "learning_rate": 2.399783315276273e-06, + "loss": 0.2263, + "step": 7520 + }, + { + "epoch": 7.738951695786228, + "grad_norm": 2.4040560722351074, + "learning_rate": 2.3889490790899244e-06, + "loss": 0.1604, + "step": 7530 + }, + { + "epoch": 7.749229188078109, + "grad_norm": 3.37458872795105, + "learning_rate": 2.3781148429035754e-06, + "loss": 0.1309, + "step": 7540 + }, + { + "epoch": 7.75950668036999, + "grad_norm": 3.064039707183838, + "learning_rate": 2.3672806067172268e-06, + "loss": 0.1594, + "step": 7550 + }, + { + "epoch": 7.76978417266187, + "grad_norm": 3.4622888565063477, + "learning_rate": 2.3564463705308778e-06, + "loss": 0.1117, + "step": 7560 + }, + { + "epoch": 7.7800616649537515, + "grad_norm": 4.166457176208496, + "learning_rate": 2.345612134344529e-06, + "loss": 0.1856, + "step": 7570 + }, + { + "epoch": 7.790339157245632, + "grad_norm": 4.1613054275512695, + "learning_rate": 2.33477789815818e-06, + "loss": 0.1904, + "step": 7580 + }, + { + "epoch": 7.800616649537513, + "grad_norm": 2.1978330612182617, + "learning_rate": 2.323943661971831e-06, + "loss": 0.1733, + "step": 7590 + }, + { + "epoch": 7.810894141829394, + "grad_norm": 3.197047472000122, + "learning_rate": 2.313109425785482e-06, + "loss": 0.2034, + "step": 7600 + }, + { + "epoch": 7.821171634121274, + "grad_norm": 3.35565447807312, + "learning_rate": 2.3022751895991336e-06, + "loss": 0.2322, + "step": 7610 + }, + { + "epoch": 7.831449126413155, + "grad_norm": 2.9416370391845703, + "learning_rate": 2.2914409534127846e-06, + "loss": 0.1569, + "step": 7620 + }, + { + "epoch": 7.841726618705036, + "grad_norm": 3.303370952606201, + "learning_rate": 2.2806067172264356e-06, + "loss": 0.1698, + "step": 7630 + }, + { + "epoch": 7.8520041109969165, + "grad_norm": 3.4625742435455322, + "learning_rate": 2.2697724810400866e-06, + "loss": 0.1802, + "step": 7640 + }, + { + "epoch": 7.862281603288798, + "grad_norm": 2.0214016437530518, + "learning_rate": 2.258938244853738e-06, + "loss": 0.1632, + "step": 7650 + }, + { + "epoch": 7.872559095580678, + "grad_norm": 4.070474624633789, + "learning_rate": 2.248104008667389e-06, + "loss": 0.211, + "step": 7660 + }, + { + "epoch": 7.882836587872559, + "grad_norm": 4.477987766265869, + "learning_rate": 2.2372697724810404e-06, + "loss": 0.2202, + "step": 7670 + }, + { + "epoch": 7.89311408016444, + "grad_norm": 3.7259104251861572, + "learning_rate": 2.2264355362946914e-06, + "loss": 0.2256, + "step": 7680 + }, + { + "epoch": 7.903391572456321, + "grad_norm": 2.9181573390960693, + "learning_rate": 2.2156013001083428e-06, + "loss": 0.1601, + "step": 7690 + }, + { + "epoch": 7.913669064748201, + "grad_norm": 5.0646138191223145, + "learning_rate": 2.2047670639219938e-06, + "loss": 0.19, + "step": 7700 + }, + { + "epoch": 7.923946557040082, + "grad_norm": 2.449718952178955, + "learning_rate": 2.1939328277356448e-06, + "loss": 0.1684, + "step": 7710 + }, + { + "epoch": 7.934224049331963, + "grad_norm": 2.8502633571624756, + "learning_rate": 2.1830985915492958e-06, + "loss": 0.1802, + "step": 7720 + }, + { + "epoch": 7.944501541623843, + "grad_norm": 3.0859627723693848, + "learning_rate": 2.172264355362947e-06, + "loss": 0.2066, + "step": 7730 + }, + { + "epoch": 7.954779033915725, + "grad_norm": 3.228625774383545, + "learning_rate": 2.161430119176598e-06, + "loss": 0.1639, + "step": 7740 + }, + { + "epoch": 7.965056526207605, + "grad_norm": 4.516101360321045, + "learning_rate": 2.150595882990249e-06, + "loss": 0.2496, + "step": 7750 + }, + { + "epoch": 7.9753340184994865, + "grad_norm": 3.3014371395111084, + "learning_rate": 2.1397616468039006e-06, + "loss": 0.1935, + "step": 7760 + }, + { + "epoch": 7.985611510791367, + "grad_norm": 3.219586133956909, + "learning_rate": 2.1289274106175516e-06, + "loss": 0.1628, + "step": 7770 + }, + { + "epoch": 7.9958890030832475, + "grad_norm": 2.924616813659668, + "learning_rate": 2.1180931744312026e-06, + "loss": 0.1387, + "step": 7780 + }, + { + "epoch": 8.0, + "eval_loss": 0.337012380361557, + "eval_runtime": 1074.5571, + "eval_samples_per_second": 5.461, + "eval_steps_per_second": 0.171, + "eval_wer": 0.31961891089431954, + "step": 7784 + }, + { + "epoch": 8.006166495375128, + "grad_norm": 3.2360055446624756, + "learning_rate": 2.107258938244854e-06, + "loss": 0.1754, + "step": 7790 + }, + { + "epoch": 8.016443987667008, + "grad_norm": 3.3808040618896484, + "learning_rate": 2.096424702058505e-06, + "loss": 0.1478, + "step": 7800 + }, + { + "epoch": 8.02672147995889, + "grad_norm": 3.0137150287628174, + "learning_rate": 2.0855904658721564e-06, + "loss": 0.16, + "step": 7810 + }, + { + "epoch": 8.036998972250771, + "grad_norm": 3.083184003829956, + "learning_rate": 2.0747562296858074e-06, + "loss": 0.2265, + "step": 7820 + }, + { + "epoch": 8.047276464542652, + "grad_norm": 3.101036310195923, + "learning_rate": 2.0639219934994584e-06, + "loss": 0.2517, + "step": 7830 + }, + { + "epoch": 8.057553956834532, + "grad_norm": 2.7504403591156006, + "learning_rate": 2.05308775731311e-06, + "loss": 0.147, + "step": 7840 + }, + { + "epoch": 8.067831449126412, + "grad_norm": 2.818974733352661, + "learning_rate": 2.0422535211267608e-06, + "loss": 0.1647, + "step": 7850 + }, + { + "epoch": 8.078108941418295, + "grad_norm": 2.7966461181640625, + "learning_rate": 2.0314192849404118e-06, + "loss": 0.1488, + "step": 7860 + }, + { + "epoch": 8.088386433710175, + "grad_norm": 2.811796188354492, + "learning_rate": 2.0205850487540628e-06, + "loss": 0.1302, + "step": 7870 + }, + { + "epoch": 8.098663926002056, + "grad_norm": 3.827772617340088, + "learning_rate": 2.009750812567714e-06, + "loss": 0.2351, + "step": 7880 + }, + { + "epoch": 8.108941418293936, + "grad_norm": 2.867687463760376, + "learning_rate": 1.998916576381365e-06, + "loss": 0.1956, + "step": 7890 + }, + { + "epoch": 8.119218910585817, + "grad_norm": 4.618106365203857, + "learning_rate": 1.9880823401950166e-06, + "loss": 0.24, + "step": 7900 + }, + { + "epoch": 8.129496402877697, + "grad_norm": 4.213103771209717, + "learning_rate": 1.9772481040086676e-06, + "loss": 0.1727, + "step": 7910 + }, + { + "epoch": 8.13977389516958, + "grad_norm": 2.7897140979766846, + "learning_rate": 1.966413867822319e-06, + "loss": 0.1638, + "step": 7920 + }, + { + "epoch": 8.15005138746146, + "grad_norm": 4.621983528137207, + "learning_rate": 1.95557963163597e-06, + "loss": 0.2088, + "step": 7930 + }, + { + "epoch": 8.16032887975334, + "grad_norm": 4.140936374664307, + "learning_rate": 1.944745395449621e-06, + "loss": 0.1728, + "step": 7940 + }, + { + "epoch": 8.17060637204522, + "grad_norm": 4.0776872634887695, + "learning_rate": 1.933911159263272e-06, + "loss": 0.1286, + "step": 7950 + }, + { + "epoch": 8.180883864337101, + "grad_norm": 2.705868721008301, + "learning_rate": 1.9230769230769234e-06, + "loss": 0.1931, + "step": 7960 + }, + { + "epoch": 8.191161356628983, + "grad_norm": 1.8853975534439087, + "learning_rate": 1.9122426868905744e-06, + "loss": 0.1576, + "step": 7970 + }, + { + "epoch": 8.201438848920864, + "grad_norm": 2.564020872116089, + "learning_rate": 1.9014084507042254e-06, + "loss": 0.1561, + "step": 7980 + }, + { + "epoch": 8.211716341212744, + "grad_norm": 2.386718988418579, + "learning_rate": 1.8905742145178766e-06, + "loss": 0.1759, + "step": 7990 + }, + { + "epoch": 8.221993833504625, + "grad_norm": 2.7982938289642334, + "learning_rate": 1.8797399783315278e-06, + "loss": 0.1265, + "step": 8000 + }, + { + "epoch": 8.232271325796505, + "grad_norm": 3.071965456008911, + "learning_rate": 1.868905742145179e-06, + "loss": 0.1538, + "step": 8010 + }, + { + "epoch": 8.242548818088386, + "grad_norm": 3.2080135345458984, + "learning_rate": 1.85807150595883e-06, + "loss": 0.1783, + "step": 8020 + }, + { + "epoch": 8.252826310380268, + "grad_norm": 3.6915481090545654, + "learning_rate": 1.847237269772481e-06, + "loss": 0.1344, + "step": 8030 + }, + { + "epoch": 8.263103802672148, + "grad_norm": 2.8961260318756104, + "learning_rate": 1.8364030335861324e-06, + "loss": 0.2134, + "step": 8040 + }, + { + "epoch": 8.273381294964029, + "grad_norm": 2.1078479290008545, + "learning_rate": 1.8255687973997834e-06, + "loss": 0.1981, + "step": 8050 + }, + { + "epoch": 8.28365878725591, + "grad_norm": 3.1685969829559326, + "learning_rate": 1.8147345612134346e-06, + "loss": 0.2123, + "step": 8060 + }, + { + "epoch": 8.29393627954779, + "grad_norm": 2.517881155014038, + "learning_rate": 1.8039003250270856e-06, + "loss": 0.1796, + "step": 8070 + }, + { + "epoch": 8.30421377183967, + "grad_norm": 3.021322250366211, + "learning_rate": 1.793066088840737e-06, + "loss": 0.1674, + "step": 8080 + }, + { + "epoch": 8.314491264131552, + "grad_norm": 2.5098516941070557, + "learning_rate": 1.782231852654388e-06, + "loss": 0.1511, + "step": 8090 + }, + { + "epoch": 8.324768756423433, + "grad_norm": 1.9316967725753784, + "learning_rate": 1.7713976164680392e-06, + "loss": 0.1702, + "step": 8100 + }, + { + "epoch": 8.335046248715313, + "grad_norm": 4.33780574798584, + "learning_rate": 1.7605633802816902e-06, + "loss": 0.2348, + "step": 8110 + }, + { + "epoch": 8.345323741007194, + "grad_norm": 2.2334718704223633, + "learning_rate": 1.7497291440953416e-06, + "loss": 0.2045, + "step": 8120 + }, + { + "epoch": 8.355601233299074, + "grad_norm": 4.0230817794799805, + "learning_rate": 1.7388949079089926e-06, + "loss": 0.1708, + "step": 8130 + }, + { + "epoch": 8.365878725590957, + "grad_norm": 4.178952693939209, + "learning_rate": 1.7280606717226436e-06, + "loss": 0.2161, + "step": 8140 + }, + { + "epoch": 8.376156217882837, + "grad_norm": 4.08049201965332, + "learning_rate": 1.7172264355362948e-06, + "loss": 0.1663, + "step": 8150 + }, + { + "epoch": 8.386433710174718, + "grad_norm": 2.8087127208709717, + "learning_rate": 1.706392199349946e-06, + "loss": 0.1395, + "step": 8160 + }, + { + "epoch": 8.396711202466598, + "grad_norm": 2.3792827129364014, + "learning_rate": 1.6955579631635972e-06, + "loss": 0.1782, + "step": 8170 + }, + { + "epoch": 8.406988694758478, + "grad_norm": 2.47105073928833, + "learning_rate": 1.6847237269772482e-06, + "loss": 0.1244, + "step": 8180 + }, + { + "epoch": 8.417266187050359, + "grad_norm": 2.740131139755249, + "learning_rate": 1.6738894907908992e-06, + "loss": 0.1589, + "step": 8190 + }, + { + "epoch": 8.427543679342241, + "grad_norm": 3.5961389541625977, + "learning_rate": 1.6630552546045506e-06, + "loss": 0.2159, + "step": 8200 + }, + { + "epoch": 8.437821171634122, + "grad_norm": 2.2895331382751465, + "learning_rate": 1.6522210184182016e-06, + "loss": 0.2396, + "step": 8210 + }, + { + "epoch": 8.448098663926002, + "grad_norm": 2.7489428520202637, + "learning_rate": 1.6413867822318528e-06, + "loss": 0.1571, + "step": 8220 + }, + { + "epoch": 8.458376156217883, + "grad_norm": 2.2983639240264893, + "learning_rate": 1.630552546045504e-06, + "loss": 0.1638, + "step": 8230 + }, + { + "epoch": 8.468653648509763, + "grad_norm": 4.014866352081299, + "learning_rate": 1.6197183098591552e-06, + "loss": 0.1661, + "step": 8240 + }, + { + "epoch": 8.478931140801645, + "grad_norm": 3.341618299484253, + "learning_rate": 1.6088840736728062e-06, + "loss": 0.1608, + "step": 8250 + }, + { + "epoch": 8.489208633093526, + "grad_norm": 2.8683981895446777, + "learning_rate": 1.5980498374864572e-06, + "loss": 0.1725, + "step": 8260 + }, + { + "epoch": 8.499486125385406, + "grad_norm": 2.0408174991607666, + "learning_rate": 1.5872156013001086e-06, + "loss": 0.1465, + "step": 8270 + }, + { + "epoch": 8.509763617677287, + "grad_norm": 3.999898910522461, + "learning_rate": 1.5763813651137596e-06, + "loss": 0.2527, + "step": 8280 + }, + { + "epoch": 8.520041109969167, + "grad_norm": 1.6335963010787964, + "learning_rate": 1.5655471289274108e-06, + "loss": 0.2212, + "step": 8290 + }, + { + "epoch": 8.530318602261048, + "grad_norm": 3.973618268966675, + "learning_rate": 1.5547128927410618e-06, + "loss": 0.2156, + "step": 8300 + }, + { + "epoch": 8.54059609455293, + "grad_norm": 3.429445743560791, + "learning_rate": 1.5438786565547132e-06, + "loss": 0.1736, + "step": 8310 + }, + { + "epoch": 8.55087358684481, + "grad_norm": 3.0188443660736084, + "learning_rate": 1.5330444203683642e-06, + "loss": 0.2454, + "step": 8320 + }, + { + "epoch": 8.56115107913669, + "grad_norm": 3.69152569770813, + "learning_rate": 1.5222101841820152e-06, + "loss": 0.1734, + "step": 8330 + }, + { + "epoch": 8.571428571428571, + "grad_norm": 2.118117570877075, + "learning_rate": 1.5113759479956664e-06, + "loss": 0.1258, + "step": 8340 + }, + { + "epoch": 8.581706063720452, + "grad_norm": 3.0305957794189453, + "learning_rate": 1.5005417118093176e-06, + "loss": 0.1582, + "step": 8350 + }, + { + "epoch": 8.591983556012334, + "grad_norm": 3.0334131717681885, + "learning_rate": 1.4897074756229688e-06, + "loss": 0.2074, + "step": 8360 + }, + { + "epoch": 8.602261048304214, + "grad_norm": 4.964859962463379, + "learning_rate": 1.4788732394366198e-06, + "loss": 0.13, + "step": 8370 + }, + { + "epoch": 8.612538540596095, + "grad_norm": 2.461299180984497, + "learning_rate": 1.4680390032502708e-06, + "loss": 0.1735, + "step": 8380 + }, + { + "epoch": 8.622816032887975, + "grad_norm": 3.70139217376709, + "learning_rate": 1.4572047670639222e-06, + "loss": 0.204, + "step": 8390 + }, + { + "epoch": 8.633093525179856, + "grad_norm": 3.2094924449920654, + "learning_rate": 1.4463705308775732e-06, + "loss": 0.1603, + "step": 8400 + }, + { + "epoch": 8.643371017471736, + "grad_norm": 2.1742327213287354, + "learning_rate": 1.4355362946912244e-06, + "loss": 0.1881, + "step": 8410 + }, + { + "epoch": 8.653648509763618, + "grad_norm": 4.349330425262451, + "learning_rate": 1.4247020585048754e-06, + "loss": 0.2027, + "step": 8420 + }, + { + "epoch": 8.663926002055499, + "grad_norm": 2.7261245250701904, + "learning_rate": 1.4138678223185268e-06, + "loss": 0.1838, + "step": 8430 + }, + { + "epoch": 8.67420349434738, + "grad_norm": 3.8358700275421143, + "learning_rate": 1.4030335861321778e-06, + "loss": 0.164, + "step": 8440 + }, + { + "epoch": 8.68448098663926, + "grad_norm": 3.345456600189209, + "learning_rate": 1.3921993499458288e-06, + "loss": 0.2412, + "step": 8450 + }, + { + "epoch": 8.69475847893114, + "grad_norm": 4.2143235206604, + "learning_rate": 1.38136511375948e-06, + "loss": 0.1478, + "step": 8460 + }, + { + "epoch": 8.70503597122302, + "grad_norm": 4.512454032897949, + "learning_rate": 1.3705308775731312e-06, + "loss": 0.2299, + "step": 8470 + }, + { + "epoch": 8.715313463514903, + "grad_norm": 4.468479156494141, + "learning_rate": 1.3596966413867824e-06, + "loss": 0.2131, + "step": 8480 + }, + { + "epoch": 8.725590955806783, + "grad_norm": 2.5895509719848633, + "learning_rate": 1.3488624052004334e-06, + "loss": 0.1332, + "step": 8490 + }, + { + "epoch": 8.735868448098664, + "grad_norm": 3.56890606880188, + "learning_rate": 1.3380281690140844e-06, + "loss": 0.2086, + "step": 8500 + }, + { + "epoch": 8.746145940390544, + "grad_norm": 2.2950034141540527, + "learning_rate": 1.3271939328277358e-06, + "loss": 0.1424, + "step": 8510 + }, + { + "epoch": 8.756423432682425, + "grad_norm": 3.012187957763672, + "learning_rate": 1.3163596966413868e-06, + "loss": 0.1308, + "step": 8520 + }, + { + "epoch": 8.766700924974307, + "grad_norm": 3.362175464630127, + "learning_rate": 1.305525460455038e-06, + "loss": 0.1948, + "step": 8530 + }, + { + "epoch": 8.776978417266188, + "grad_norm": 2.3637940883636475, + "learning_rate": 1.294691224268689e-06, + "loss": 0.1984, + "step": 8540 + }, + { + "epoch": 8.787255909558068, + "grad_norm": 2.958972215652466, + "learning_rate": 1.2838569880823404e-06, + "loss": 0.2032, + "step": 8550 + }, + { + "epoch": 8.797533401849948, + "grad_norm": 3.2505674362182617, + "learning_rate": 1.2730227518959914e-06, + "loss": 0.205, + "step": 8560 + }, + { + "epoch": 8.807810894141829, + "grad_norm": 2.259211301803589, + "learning_rate": 1.2621885157096424e-06, + "loss": 0.1454, + "step": 8570 + }, + { + "epoch": 8.81808838643371, + "grad_norm": 2.959885358810425, + "learning_rate": 1.2513542795232936e-06, + "loss": 0.1514, + "step": 8580 + }, + { + "epoch": 8.828365878725592, + "grad_norm": 3.410799980163574, + "learning_rate": 1.2405200433369448e-06, + "loss": 0.1741, + "step": 8590 + }, + { + "epoch": 8.838643371017472, + "grad_norm": 2.8411593437194824, + "learning_rate": 1.229685807150596e-06, + "loss": 0.1531, + "step": 8600 + }, + { + "epoch": 8.848920863309353, + "grad_norm": 4.301185607910156, + "learning_rate": 1.218851570964247e-06, + "loss": 0.167, + "step": 8610 + }, + { + "epoch": 8.859198355601233, + "grad_norm": 3.080378770828247, + "learning_rate": 1.2080173347778982e-06, + "loss": 0.1387, + "step": 8620 + }, + { + "epoch": 8.869475847893113, + "grad_norm": 2.687458038330078, + "learning_rate": 1.1971830985915492e-06, + "loss": 0.1533, + "step": 8630 + }, + { + "epoch": 8.879753340184994, + "grad_norm": 1.749852180480957, + "learning_rate": 1.1863488624052004e-06, + "loss": 0.1726, + "step": 8640 + }, + { + "epoch": 8.890030832476876, + "grad_norm": 3.142988920211792, + "learning_rate": 1.1755146262188516e-06, + "loss": 0.1886, + "step": 8650 + }, + { + "epoch": 8.900308324768757, + "grad_norm": 3.1398189067840576, + "learning_rate": 1.1646803900325028e-06, + "loss": 0.1674, + "step": 8660 + }, + { + "epoch": 8.910585817060637, + "grad_norm": 4.18948221206665, + "learning_rate": 1.153846153846154e-06, + "loss": 0.1637, + "step": 8670 + }, + { + "epoch": 8.920863309352518, + "grad_norm": 2.723585844039917, + "learning_rate": 1.143011917659805e-06, + "loss": 0.1851, + "step": 8680 + }, + { + "epoch": 8.931140801644398, + "grad_norm": 2.2024054527282715, + "learning_rate": 1.1321776814734562e-06, + "loss": 0.2011, + "step": 8690 + }, + { + "epoch": 8.94141829393628, + "grad_norm": 3.5099658966064453, + "learning_rate": 1.1224268689057424e-06, + "loss": 0.2067, + "step": 8700 + }, + { + "epoch": 8.95169578622816, + "grad_norm": 3.7603750228881836, + "learning_rate": 1.1115926327193934e-06, + "loss": 0.2005, + "step": 8710 + }, + { + "epoch": 8.961973278520041, + "grad_norm": 2.5853493213653564, + "learning_rate": 1.1007583965330446e-06, + "loss": 0.1571, + "step": 8720 + }, + { + "epoch": 8.972250770811922, + "grad_norm": 3.06177020072937, + "learning_rate": 1.0899241603466956e-06, + "loss": 0.1363, + "step": 8730 + }, + { + "epoch": 8.982528263103802, + "grad_norm": 2.5474419593811035, + "learning_rate": 1.0790899241603468e-06, + "loss": 0.1746, + "step": 8740 + }, + { + "epoch": 8.992805755395683, + "grad_norm": 3.2577226161956787, + "learning_rate": 1.068255687973998e-06, + "loss": 0.1554, + "step": 8750 + }, + { + "epoch": 9.0, + "eval_loss": 0.338668555021286, + "eval_runtime": 1131.2123, + "eval_samples_per_second": 5.187, + "eval_steps_per_second": 0.163, + "eval_wer": 0.3113152141994845, + "step": 8757 + }, + { + "epoch": 9.003083247687565, + "grad_norm": 3.3654706478118896, + "learning_rate": 1.0574214517876492e-06, + "loss": 0.1761, + "step": 8760 + }, + { + "epoch": 9.013360739979445, + "grad_norm": 3.2227976322174072, + "learning_rate": 1.0465872156013002e-06, + "loss": 0.152, + "step": 8770 + }, + { + "epoch": 9.023638232271326, + "grad_norm": 3.463777542114258, + "learning_rate": 1.0357529794149514e-06, + "loss": 0.1545, + "step": 8780 + }, + { + "epoch": 9.033915724563206, + "grad_norm": 2.3301193714141846, + "learning_rate": 1.0249187432286024e-06, + "loss": 0.1805, + "step": 8790 + }, + { + "epoch": 9.044193216855087, + "grad_norm": 3.6022043228149414, + "learning_rate": 1.0140845070422536e-06, + "loss": 0.1652, + "step": 8800 + }, + { + "epoch": 9.054470709146969, + "grad_norm": 1.9217338562011719, + "learning_rate": 1.0032502708559048e-06, + "loss": 0.1265, + "step": 8810 + }, + { + "epoch": 9.06474820143885, + "grad_norm": 2.3764290809631348, + "learning_rate": 9.92416034669556e-07, + "loss": 0.152, + "step": 8820 + }, + { + "epoch": 9.07502569373073, + "grad_norm": 3.19063663482666, + "learning_rate": 9.81581798483207e-07, + "loss": 0.2095, + "step": 8830 + }, + { + "epoch": 9.08530318602261, + "grad_norm": 3.6087141036987305, + "learning_rate": 9.707475622968582e-07, + "loss": 0.1757, + "step": 8840 + }, + { + "epoch": 9.09558067831449, + "grad_norm": 3.343388080596924, + "learning_rate": 9.599133261105092e-07, + "loss": 0.1481, + "step": 8850 + }, + { + "epoch": 9.105858170606371, + "grad_norm": 3.509066581726074, + "learning_rate": 9.490790899241605e-07, + "loss": 0.1844, + "step": 8860 + }, + { + "epoch": 9.116135662898253, + "grad_norm": 5.043178081512451, + "learning_rate": 9.382448537378115e-07, + "loss": 0.1882, + "step": 8870 + }, + { + "epoch": 9.126413155190134, + "grad_norm": 2.8442821502685547, + "learning_rate": 9.274106175514627e-07, + "loss": 0.1949, + "step": 8880 + }, + { + "epoch": 9.136690647482014, + "grad_norm": 4.197086334228516, + "learning_rate": 9.165763813651138e-07, + "loss": 0.1897, + "step": 8890 + }, + { + "epoch": 9.146968139773895, + "grad_norm": 4.18369722366333, + "learning_rate": 9.05742145178765e-07, + "loss": 0.1915, + "step": 8900 + }, + { + "epoch": 9.157245632065775, + "grad_norm": 5.1201395988464355, + "learning_rate": 8.949079089924161e-07, + "loss": 0.1678, + "step": 8910 + }, + { + "epoch": 9.167523124357658, + "grad_norm": 3.6054434776306152, + "learning_rate": 8.840736728060673e-07, + "loss": 0.1345, + "step": 8920 + }, + { + "epoch": 9.177800616649538, + "grad_norm": 1.9621152877807617, + "learning_rate": 8.732394366197183e-07, + "loss": 0.1695, + "step": 8930 + }, + { + "epoch": 9.188078108941419, + "grad_norm": 2.4010167121887207, + "learning_rate": 8.624052004333695e-07, + "loss": 0.198, + "step": 8940 + }, + { + "epoch": 9.198355601233299, + "grad_norm": 4.3703837394714355, + "learning_rate": 8.515709642470206e-07, + "loss": 0.1694, + "step": 8950 + }, + { + "epoch": 9.20863309352518, + "grad_norm": 3.70489239692688, + "learning_rate": 8.407367280606718e-07, + "loss": 0.1332, + "step": 8960 + }, + { + "epoch": 9.21891058581706, + "grad_norm": 2.4999241828918457, + "learning_rate": 8.299024918743229e-07, + "loss": 0.1845, + "step": 8970 + }, + { + "epoch": 9.229188078108942, + "grad_norm": 3.0255820751190186, + "learning_rate": 8.190682556879741e-07, + "loss": 0.1724, + "step": 8980 + }, + { + "epoch": 9.239465570400823, + "grad_norm": 4.293249130249023, + "learning_rate": 8.082340195016251e-07, + "loss": 0.1723, + "step": 8990 + }, + { + "epoch": 9.249743062692703, + "grad_norm": 3.077747106552124, + "learning_rate": 7.973997833152763e-07, + "loss": 0.13, + "step": 9000 + }, + { + "epoch": 9.260020554984584, + "grad_norm": 3.6788992881774902, + "learning_rate": 7.865655471289274e-07, + "loss": 0.1805, + "step": 9010 + }, + { + "epoch": 9.270298047276464, + "grad_norm": 2.088778257369995, + "learning_rate": 7.757313109425786e-07, + "loss": 0.2132, + "step": 9020 + }, + { + "epoch": 9.280575539568344, + "grad_norm": 3.2747962474823, + "learning_rate": 7.648970747562297e-07, + "loss": 0.2321, + "step": 9030 + }, + { + "epoch": 9.290853031860227, + "grad_norm": 2.9871788024902344, + "learning_rate": 7.540628385698809e-07, + "loss": 0.2326, + "step": 9040 + }, + { + "epoch": 9.301130524152107, + "grad_norm": 3.4682159423828125, + "learning_rate": 7.432286023835321e-07, + "loss": 0.1606, + "step": 9050 + }, + { + "epoch": 9.311408016443988, + "grad_norm": 2.0525877475738525, + "learning_rate": 7.323943661971832e-07, + "loss": 0.1504, + "step": 9060 + }, + { + "epoch": 9.321685508735868, + "grad_norm": 2.734421491622925, + "learning_rate": 7.215601300108344e-07, + "loss": 0.2069, + "step": 9070 + }, + { + "epoch": 9.331963001027749, + "grad_norm": 2.3396363258361816, + "learning_rate": 7.107258938244854e-07, + "loss": 0.1294, + "step": 9080 + }, + { + "epoch": 9.34224049331963, + "grad_norm": 3.383275032043457, + "learning_rate": 6.998916576381366e-07, + "loss": 0.1732, + "step": 9090 + }, + { + "epoch": 9.352517985611511, + "grad_norm": 3.2529306411743164, + "learning_rate": 6.890574214517877e-07, + "loss": 0.2062, + "step": 9100 + }, + { + "epoch": 9.362795477903392, + "grad_norm": 2.869129180908203, + "learning_rate": 6.782231852654389e-07, + "loss": 0.1562, + "step": 9110 + }, + { + "epoch": 9.373072970195272, + "grad_norm": 2.590285301208496, + "learning_rate": 6.6738894907909e-07, + "loss": 0.1701, + "step": 9120 + }, + { + "epoch": 9.383350462487153, + "grad_norm": 2.4754040241241455, + "learning_rate": 6.565547128927412e-07, + "loss": 0.1178, + "step": 9130 + }, + { + "epoch": 9.393627954779033, + "grad_norm": 3.87373948097229, + "learning_rate": 6.457204767063922e-07, + "loss": 0.1609, + "step": 9140 + }, + { + "epoch": 9.403905447070915, + "grad_norm": 2.705742359161377, + "learning_rate": 6.348862405200434e-07, + "loss": 0.1613, + "step": 9150 + }, + { + "epoch": 9.414182939362796, + "grad_norm": 2.920018434524536, + "learning_rate": 6.240520043336945e-07, + "loss": 0.1814, + "step": 9160 + }, + { + "epoch": 9.424460431654676, + "grad_norm": 4.477851390838623, + "learning_rate": 6.132177681473456e-07, + "loss": 0.202, + "step": 9170 + }, + { + "epoch": 9.434737923946557, + "grad_norm": 2.75160551071167, + "learning_rate": 6.023835319609968e-07, + "loss": 0.1518, + "step": 9180 + }, + { + "epoch": 9.445015416238437, + "grad_norm": 3.1380374431610107, + "learning_rate": 5.915492957746479e-07, + "loss": 0.18, + "step": 9190 + }, + { + "epoch": 9.45529290853032, + "grad_norm": 3.3420140743255615, + "learning_rate": 5.80715059588299e-07, + "loss": 0.1787, + "step": 9200 + }, + { + "epoch": 9.4655704008222, + "grad_norm": 2.556490182876587, + "learning_rate": 5.698808234019502e-07, + "loss": 0.191, + "step": 9210 + }, + { + "epoch": 9.47584789311408, + "grad_norm": 3.437117576599121, + "learning_rate": 5.590465872156013e-07, + "loss": 0.1654, + "step": 9220 + }, + { + "epoch": 9.48612538540596, + "grad_norm": 3.4692296981811523, + "learning_rate": 5.482123510292524e-07, + "loss": 0.2181, + "step": 9230 + }, + { + "epoch": 9.496402877697841, + "grad_norm": 3.6604814529418945, + "learning_rate": 5.373781148429036e-07, + "loss": 0.1442, + "step": 9240 + }, + { + "epoch": 9.506680369989722, + "grad_norm": 3.1510462760925293, + "learning_rate": 5.265438786565547e-07, + "loss": 0.1292, + "step": 9250 + }, + { + "epoch": 9.516957862281604, + "grad_norm": 3.3884165287017822, + "learning_rate": 5.157096424702059e-07, + "loss": 0.1995, + "step": 9260 + }, + { + "epoch": 9.527235354573484, + "grad_norm": 4.259904384613037, + "learning_rate": 5.04875406283857e-07, + "loss": 0.1732, + "step": 9270 + }, + { + "epoch": 9.537512846865365, + "grad_norm": 2.8383660316467285, + "learning_rate": 4.940411700975082e-07, + "loss": 0.1649, + "step": 9280 + }, + { + "epoch": 9.547790339157245, + "grad_norm": 3.905196189880371, + "learning_rate": 4.832069339111593e-07, + "loss": 0.142, + "step": 9290 + }, + { + "epoch": 9.558067831449126, + "grad_norm": 4.23677396774292, + "learning_rate": 4.7237269772481046e-07, + "loss": 0.1343, + "step": 9300 + }, + { + "epoch": 9.568345323741006, + "grad_norm": 3.761404275894165, + "learning_rate": 4.615384615384616e-07, + "loss": 0.1493, + "step": 9310 + }, + { + "epoch": 9.578622816032889, + "grad_norm": 3.2816426753997803, + "learning_rate": 4.507042253521127e-07, + "loss": 0.1869, + "step": 9320 + }, + { + "epoch": 9.588900308324769, + "grad_norm": 2.690589666366577, + "learning_rate": 4.3986998916576387e-07, + "loss": 0.2415, + "step": 9330 + }, + { + "epoch": 9.59917780061665, + "grad_norm": 3.552229642868042, + "learning_rate": 4.29035752979415e-07, + "loss": 0.1602, + "step": 9340 + }, + { + "epoch": 9.60945529290853, + "grad_norm": 2.21655535697937, + "learning_rate": 4.1820151679306617e-07, + "loss": 0.1942, + "step": 9350 + }, + { + "epoch": 9.61973278520041, + "grad_norm": 2.400130271911621, + "learning_rate": 4.0736728060671727e-07, + "loss": 0.1857, + "step": 9360 + }, + { + "epoch": 9.630010277492293, + "grad_norm": 3.0297770500183105, + "learning_rate": 3.965330444203684e-07, + "loss": 0.162, + "step": 9370 + }, + { + "epoch": 9.640287769784173, + "grad_norm": 2.344736099243164, + "learning_rate": 3.8569880823401957e-07, + "loss": 0.1301, + "step": 9380 + }, + { + "epoch": 9.650565262076054, + "grad_norm": 2.695235013961792, + "learning_rate": 3.7486457204767067e-07, + "loss": 0.1601, + "step": 9390 + }, + { + "epoch": 9.660842754367934, + "grad_norm": 4.2191243171691895, + "learning_rate": 3.640303358613218e-07, + "loss": 0.2298, + "step": 9400 + }, + { + "epoch": 9.671120246659815, + "grad_norm": 2.826716184616089, + "learning_rate": 3.5319609967497297e-07, + "loss": 0.2066, + "step": 9410 + }, + { + "epoch": 9.681397738951695, + "grad_norm": 2.636258363723755, + "learning_rate": 3.4236186348862407e-07, + "loss": 0.2013, + "step": 9420 + }, + { + "epoch": 9.691675231243577, + "grad_norm": 4.043715953826904, + "learning_rate": 3.315276273022752e-07, + "loss": 0.1886, + "step": 9430 + }, + { + "epoch": 9.701952723535458, + "grad_norm": 3.714195728302002, + "learning_rate": 3.2069339111592637e-07, + "loss": 0.209, + "step": 9440 + }, + { + "epoch": 9.712230215827338, + "grad_norm": 2.3214926719665527, + "learning_rate": 3.0985915492957747e-07, + "loss": 0.1636, + "step": 9450 + }, + { + "epoch": 9.722507708119219, + "grad_norm": 3.8343677520751953, + "learning_rate": 2.990249187432286e-07, + "loss": 0.2022, + "step": 9460 + }, + { + "epoch": 9.732785200411099, + "grad_norm": 3.7999889850616455, + "learning_rate": 2.881906825568798e-07, + "loss": 0.1934, + "step": 9470 + }, + { + "epoch": 9.743062692702981, + "grad_norm": 3.770751953125, + "learning_rate": 2.773564463705309e-07, + "loss": 0.1713, + "step": 9480 + }, + { + "epoch": 9.753340184994862, + "grad_norm": 4.560354709625244, + "learning_rate": 2.66522210184182e-07, + "loss": 0.1804, + "step": 9490 + }, + { + "epoch": 9.763617677286742, + "grad_norm": 2.4577202796936035, + "learning_rate": 2.556879739978332e-07, + "loss": 0.1585, + "step": 9500 + }, + { + "epoch": 9.773895169578623, + "grad_norm": 3.556216239929199, + "learning_rate": 2.4485373781148433e-07, + "loss": 0.1846, + "step": 9510 + }, + { + "epoch": 9.784172661870503, + "grad_norm": 3.2222604751586914, + "learning_rate": 2.3401950162513545e-07, + "loss": 0.1611, + "step": 9520 + }, + { + "epoch": 9.794450154162384, + "grad_norm": 2.583037853240967, + "learning_rate": 2.2318526543878658e-07, + "loss": 0.1712, + "step": 9530 + }, + { + "epoch": 9.804727646454266, + "grad_norm": 2.7843735218048096, + "learning_rate": 2.123510292524377e-07, + "loss": 0.1325, + "step": 9540 + }, + { + "epoch": 9.815005138746146, + "grad_norm": 2.146963357925415, + "learning_rate": 2.0151679306608885e-07, + "loss": 0.1508, + "step": 9550 + }, + { + "epoch": 9.825282631038027, + "grad_norm": 3.2385213375091553, + "learning_rate": 1.9068255687973998e-07, + "loss": 0.1541, + "step": 9560 + }, + { + "epoch": 9.835560123329907, + "grad_norm": 3.864046573638916, + "learning_rate": 1.798483206933911e-07, + "loss": 0.1544, + "step": 9570 + }, + { + "epoch": 9.845837615621788, + "grad_norm": 3.785207748413086, + "learning_rate": 1.6901408450704225e-07, + "loss": 0.1882, + "step": 9580 + }, + { + "epoch": 9.85611510791367, + "grad_norm": 3.159970760345459, + "learning_rate": 1.5817984832069343e-07, + "loss": 0.1798, + "step": 9590 + }, + { + "epoch": 9.86639260020555, + "grad_norm": 3.8631491661071777, + "learning_rate": 1.4734561213434453e-07, + "loss": 0.1657, + "step": 9600 + }, + { + "epoch": 9.87667009249743, + "grad_norm": 2.512830972671509, + "learning_rate": 1.3651137594799568e-07, + "loss": 0.1679, + "step": 9610 + }, + { + "epoch": 9.886947584789311, + "grad_norm": 3.2564988136291504, + "learning_rate": 1.256771397616468e-07, + "loss": 0.2286, + "step": 9620 + }, + { + "epoch": 9.897225077081192, + "grad_norm": 2.4131650924682617, + "learning_rate": 1.1484290357529795e-07, + "loss": 0.164, + "step": 9630 + }, + { + "epoch": 9.907502569373072, + "grad_norm": 4.661231517791748, + "learning_rate": 1.040086673889491e-07, + "loss": 0.1114, + "step": 9640 + }, + { + "epoch": 9.917780061664955, + "grad_norm": 3.26019024848938, + "learning_rate": 9.317443120260024e-08, + "loss": 0.1821, + "step": 9650 + }, + { + "epoch": 9.928057553956835, + "grad_norm": 3.3677456378936768, + "learning_rate": 8.234019501625136e-08, + "loss": 0.1456, + "step": 9660 + }, + { + "epoch": 9.938335046248715, + "grad_norm": 3.299299716949463, + "learning_rate": 7.15059588299025e-08, + "loss": 0.191, + "step": 9670 + }, + { + "epoch": 9.948612538540596, + "grad_norm": 2.5458364486694336, + "learning_rate": 6.067172264355364e-08, + "loss": 0.2054, + "step": 9680 + }, + { + "epoch": 9.958890030832476, + "grad_norm": 4.219174385070801, + "learning_rate": 4.983748645720477e-08, + "loss": 0.206, + "step": 9690 + }, + { + "epoch": 9.969167523124357, + "grad_norm": 1.5195386409759521, + "learning_rate": 3.9003250270855906e-08, + "loss": 0.1463, + "step": 9700 + }, + { + "epoch": 9.979445015416239, + "grad_norm": 3.900343418121338, + "learning_rate": 2.8169014084507045e-08, + "loss": 0.1477, + "step": 9710 + }, + { + "epoch": 9.98972250770812, + "grad_norm": 4.024109363555908, + "learning_rate": 1.7334777898158183e-08, + "loss": 0.2275, + "step": 9720 + }, + { + "epoch": 10.0, + "grad_norm": 2.2625277042388916, + "learning_rate": 6.5005417118093186e-09, + "loss": 0.1692, + "step": 9730 + }, + { + "epoch": 10.0, + "eval_loss": 0.33747920393943787, + "eval_runtime": 1209.0372, + "eval_samples_per_second": 4.853, + "eval_steps_per_second": 0.152, + "eval_wer": 0.3192600084831479, + "step": 9730 + } + ], + "logging_steps": 10, + "max_steps": 9730, + "num_input_tokens_seen": 0, + "num_train_epochs": 10, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 4.03896185545923e+19, + "train_batch_size": 16, + "trial_name": null, + "trial_params": null +} diff --git a/whisper/whisper_base_finetuned/checkpoint-9730/training_args.bin b/whisper/whisper_base_finetuned/checkpoint-9730/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..cec0038665d32391824dfe472a35578679380744 --- /dev/null +++ b/whisper/whisper_base_finetuned/checkpoint-9730/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d9e2cc97af116b33d30c72667d46ddd426569f5f483ad8392e19d95860dfcc43 +size 5240 diff --git a/whisper/whisper_base_finetuned/config.json b/whisper/whisper_base_finetuned/config.json new file mode 100644 index 0000000000000000000000000000000000000000..91728b7bc6c3a43bb11e0d161949a286ca009408 --- /dev/null +++ b/whisper/whisper_base_finetuned/config.json @@ -0,0 +1,52 @@ +{ + "_name_or_path": "whisper_base_finetuned", + "activation_dropout": 0.0, + "activation_function": "gelu", + "apply_spec_augment": true, + "architectures": [ + "WhisperForConditionalGeneration" + ], + "attention_dropout": 0.0, + "begin_suppress_tokens": [ + 220, + 50257 + ], + "bos_token_id": 50257, + "classifier_proj_size": 256, + "d_model": 512, + "decoder_attention_heads": 8, + "decoder_ffn_dim": 2048, + "decoder_layerdrop": 0.0, + "decoder_layers": 6, + "decoder_start_token_id": 50258, + "dropout": 0.0, + "encoder_attention_heads": 8, + "encoder_ffn_dim": 2048, + "encoder_layerdrop": 0.0, + "encoder_layers": 6, + "eos_token_id": 50257, + "forced_decoder_ids": null, + "init_std": 0.02, + "is_encoder_decoder": true, + "mask_feature_length": 10, + "mask_feature_min_masks": 0, + "mask_feature_prob": 0.05, + "mask_time_length": 10, + "mask_time_min_masks": 2, + "mask_time_prob": 0.05, + "max_length": 448, + "max_source_positions": 1500, + "max_target_positions": 448, + "median_filter_width": 7, + "model_type": "whisper", + "num_hidden_layers": 6, + "num_mel_bins": 80, + "pad_token_id": 50257, + "scale_embedding": false, + "suppress_tokens": [], + "torch_dtype": "float32", + "transformers_version": "4.42.0.dev0", + "use_cache": true, + "use_weighted_layer_sum": false, + "vocab_size": 51865 +} diff --git a/whisper/whisper_base_finetuned/generation_config.json b/whisper/whisper_base_finetuned/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..3ce877d310342bb057324d0dfcf6f83dc6055c1a --- /dev/null +++ b/whisper/whisper_base_finetuned/generation_config.json @@ -0,0 +1,256 @@ +{ + "alignment_heads": [ + [ + 3, + 1 + ], + [ + 4, + 2 + ], + [ + 4, + 3 + ], + [ + 4, + 7 + ], + [ + 5, + 1 + ], + [ + 5, + 2 + ], + [ + 5, + 4 + ], + [ + 5, + 6 + ] + ], + "begin_suppress_tokens": [ + 220, + 50257 + ], + "bos_token_id": 50257, + "decoder_start_token_id": 50258, + "eos_token_id": 50257, + "forced_decoder_ids": [ + [ + 1, + null + ], + [ + 2, + 50359 + ] + ], + "is_multilingual": true, + "lang_to_id": { + "<|af|>": 50327, + "<|am|>": 50334, + "<|ar|>": 50272, + "<|as|>": 50350, + "<|az|>": 50304, + "<|ba|>": 50355, + "<|be|>": 50330, + "<|bg|>": 50292, + "<|bn|>": 50302, + "<|bo|>": 50347, + "<|br|>": 50309, + "<|bs|>": 50315, + "<|ca|>": 50270, + "<|cs|>": 50283, + "<|cy|>": 50297, + "<|da|>": 50285, + "<|de|>": 50261, + "<|el|>": 50281, + "<|en|>": 50259, + "<|es|>": 50262, + "<|et|>": 50307, + "<|eu|>": 50310, + "<|fa|>": 50300, + "<|fi|>": 50277, + "<|fo|>": 50338, + "<|fr|>": 50265, + "<|gl|>": 50319, + "<|gu|>": 50333, + "<|haw|>": 50352, + "<|ha|>": 50354, + "<|he|>": 50279, + "<|hi|>": 50276, + "<|hr|>": 50291, + "<|ht|>": 50339, + "<|hu|>": 50286, + "<|hy|>": 50312, + "<|id|>": 50275, + "<|is|>": 50311, + "<|it|>": 50274, + "<|ja|>": 50266, + "<|jw|>": 50356, + "<|ka|>": 50329, + "<|kk|>": 50316, + "<|km|>": 50323, + "<|kn|>": 50306, + "<|ko|>": 50264, + "<|la|>": 50294, + "<|lb|>": 50345, + "<|ln|>": 50353, + "<|lo|>": 50336, + "<|lt|>": 50293, + "<|lv|>": 50301, + "<|mg|>": 50349, + "<|mi|>": 50295, + "<|mk|>": 50308, + "<|ml|>": 50296, + "<|mn|>": 50314, + "<|mr|>": 50320, + "<|ms|>": 50282, + "<|mt|>": 50343, + "<|my|>": 50346, + "<|ne|>": 50313, + "<|nl|>": 50271, + "<|nn|>": 50342, + "<|no|>": 50288, + "<|oc|>": 50328, + "<|pa|>": 50321, + "<|pl|>": 50269, + "<|ps|>": 50340, + "<|pt|>": 50267, + "<|ro|>": 50284, + "<|ru|>": 50263, + "<|sa|>": 50344, + "<|sd|>": 50332, + "<|si|>": 50322, + "<|sk|>": 50298, + "<|sl|>": 50305, + "<|sn|>": 50324, + "<|so|>": 50326, + "<|sq|>": 50317, + "<|sr|>": 50303, + "<|su|>": 50357, + "<|sv|>": 50273, + "<|sw|>": 50318, + "<|ta|>": 50287, + "<|te|>": 50299, + "<|tg|>": 50331, + "<|th|>": 50289, + "<|tk|>": 50341, + "<|tl|>": 50348, + "<|tr|>": 50268, + "<|tt|>": 50351, + "<|uk|>": 50280, + "<|ur|>": 50290, + "<|uz|>": 50337, + "<|vi|>": 50278, + "<|yi|>": 50335, + "<|yo|>": 50325, + "<|zh|>": 50260 + }, + "max_initial_timestamp_index": 50, + "max_length": 448, + "no_timestamps_token_id": 50363, + "pad_token_id": 50257, + "prev_sot_token_id": 50361, + "return_timestamps": false, + "suppress_tokens": [ + 1, + 2, + 7, + 8, + 9, + 10, + 14, + 25, + 26, + 27, + 28, + 29, + 31, + 58, + 59, + 60, + 61, + 62, + 63, + 90, + 91, + 92, + 93, + 359, + 503, + 522, + 542, + 873, + 893, + 902, + 918, + 922, + 931, + 1350, + 1853, + 1982, + 2460, + 2627, + 3246, + 3253, + 3268, + 3536, + 3846, + 3961, + 4183, + 4667, + 6585, + 6647, + 7273, + 9061, + 9383, + 10428, + 10929, + 11938, + 12033, + 12331, + 12562, + 13793, + 14157, + 14635, + 15265, + 15618, + 16553, + 16604, + 18362, + 18956, + 20075, + 21675, + 22520, + 26130, + 26161, + 26435, + 28279, + 29464, + 31650, + 32302, + 32470, + 36865, + 42863, + 47425, + 49870, + 50254, + 50258, + 50358, + 50359, + 50360, + 50361, + 50362 + ], + "task_to_id": { + "transcribe": 50359, + "translate": 50358 + }, + "transformers_version": "4.42.0.dev0" +} diff --git a/whisper/whisper_base_finetuned/model.safetensors b/whisper/whisper_base_finetuned/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d3ffdfb85687050faeb83213beaa74fff65de794 --- /dev/null +++ b/whisper/whisper_base_finetuned/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4dd026348fe37281549fd7c6a808f086ca0fd97d58b2cacc1f727f1b30983206 +size 290403936 diff --git a/whisper/whisper_base_finetuned/preprocessor_config.json b/whisper/whisper_base_finetuned/preprocessor_config.json new file mode 100644 index 0000000000000000000000000000000000000000..91876762a536a746d268353c5cba57286e76b058 --- /dev/null +++ b/whisper/whisper_base_finetuned/preprocessor_config.json @@ -0,0 +1,14 @@ +{ + "chunk_length": 30, + "feature_extractor_type": "WhisperFeatureExtractor", + "feature_size": 80, + "hop_length": 160, + "n_fft": 400, + "n_samples": 480000, + "nb_max_frames": 3000, + "padding_side": "right", + "padding_value": 0.0, + "processor_class": "WhisperProcessor", + "return_attention_mask": false, + "sampling_rate": 16000 +} diff --git a/whisper/whisper_base_finetuned/training_args.bin b/whisper/whisper_base_finetuned/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..cec0038665d32391824dfe472a35578679380744 --- /dev/null +++ b/whisper/whisper_base_finetuned/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d9e2cc97af116b33d30c72667d46ddd426569f5f483ad8392e19d95860dfcc43 +size 5240 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/best-train-loss.pt b/zipformer/finetuned/ctc/causal/exp_finetune/best-train-loss.pt new file mode 100644 index 0000000000000000000000000000000000000000..07dbca08854d656fb0251367591c640c51ff603d --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/best-train-loss.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:abeb187cfd750a533d745951ed4f05300ab887a94d12510446173afc9ad64ab9 +size 1062964430 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/best-valid-loss.pt b/zipformer/finetuned/ctc/causal/exp_finetune/best-valid-loss.pt new file mode 100644 index 0000000000000000000000000000000000000000..d77e0a4e3753c76a3ee54894f5ad9ef6813c4700 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/best-valid-loss.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:92908b67b3cd59091848f2ebd72c1e4aad91ff3c0774f27aeada8668ff589fa4 +size 1062964494 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-1.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-1.pt new file mode 100644 index 0000000000000000000000000000000000000000..40fffa46139aabf4dfe8e5377d3f832affc3824e --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-1.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d927f5b0338a62d415c36f3705bde3f3276c88369f904575e83945d138cfef3e +size 1062960853 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-10.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-10.pt new file mode 100644 index 0000000000000000000000000000000000000000..04cea4713cc514c906aa4da064d3d001232ee9e0 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-10.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:07c28ba01255527170c8d297aafe10678dbf051a263f2eb8ca06442c701f9ef3 +size 1062964046 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-11.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-11.pt new file mode 100644 index 0000000000000000000000000000000000000000..a669461baa8fbb9bb73aa160bab4c7c0f630a5e0 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-11.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:254bb0451bb8282fbf44b55ae9225a2b933a125f6677aa57a8c1a80426fc3299 +size 1062964110 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-12.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-12.pt new file mode 100644 index 0000000000000000000000000000000000000000..a8da0d373a5fa8e05aebf0143328bd85b4667ca1 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-12.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:51af1f57dbd87eddb1ebeba3b31b845063e4c042fd899f5884dd0a41ff95d5b4 +size 1062964110 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-13.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-13.pt new file mode 100644 index 0000000000000000000000000000000000000000..b44a655a2c0085324463d42258c1007544c58653 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-13.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2c0afecf8875b63321a7c020df9565414884617e54de884959c413f4013673df +size 1062964174 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-14.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-14.pt new file mode 100644 index 0000000000000000000000000000000000000000..2267b9d8427ec547006b09f4e04f42a3160224f4 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-14.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e59080fab9bbe2b6e7e7c99d7feab0c5f034097ea7c2c6f025c765430295ee57 +size 1062964238 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-15.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-15.pt new file mode 100644 index 0000000000000000000000000000000000000000..31ba5cb894a2a702bc6223735dc55aeb6c9d730e --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-15.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4655cf2317de278a498cf45e8656f994bf7b48cd131ed70150f1657bd7c38281 +size 1062964238 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-16.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-16.pt new file mode 100644 index 0000000000000000000000000000000000000000..fc41edd0897203fdf5d360077494730fb1df8a95 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-16.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:425e29b71db27e9b165b0b6b87e65e7e92ff430f1af45dd5244f5d8c70a278ec +size 1062964302 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-17.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-17.pt new file mode 100644 index 0000000000000000000000000000000000000000..36cf2a104cffe71b3a62adbd1b27281a26a8e237 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-17.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7635c71701052a4652e68f2e104cf144e2bba02256354e9b9683464cb8888997 +size 1062964366 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-18.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-18.pt new file mode 100644 index 0000000000000000000000000000000000000000..3cbe966dcd6a8634d080102cfc9f6971097a7889 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-18.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0ef656e75a76c9a7823846c9482d96836da5661219f38363748bcabd08cb9049 +size 1062964430 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-19.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-19.pt new file mode 100644 index 0000000000000000000000000000000000000000..07dbca08854d656fb0251367591c640c51ff603d --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-19.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:abeb187cfd750a533d745951ed4f05300ab887a94d12510446173afc9ad64ab9 +size 1062964430 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-2.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-2.pt new file mode 100644 index 0000000000000000000000000000000000000000..2cc7b54c01a2216413d463c4930f218b61ff8d6a --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-2.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9aac5c412ebfa61e6173739f6adc63283fb6268f87042a68c2579d7aaec4160d +size 1062960981 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-20.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-20.pt new file mode 100644 index 0000000000000000000000000000000000000000..d77e0a4e3753c76a3ee54894f5ad9ef6813c4700 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-20.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:92908b67b3cd59091848f2ebd72c1e4aad91ff3c0774f27aeada8668ff589fa4 +size 1062964494 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-3.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-3.pt new file mode 100644 index 0000000000000000000000000000000000000000..4227883ea394fbc49f937943cf8b3d221e1e597d --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-3.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4958378241bdca4e0b09d75953d8c36bf70b1d1f9d7309a857426e8078ef7e75 +size 1062961045 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-4.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-4.pt new file mode 100644 index 0000000000000000000000000000000000000000..f5bb9205102e76ea2426c85b4c92fb5d759f807d --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-4.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:65dc89fda1a1e82c2cf0314218e04f134ed92834f63b80599c9ebdd365170781 +size 1062961045 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-5.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-5.pt new file mode 100644 index 0000000000000000000000000000000000000000..1f9f8e1ebef2a7916e6547f87bfd11ec007e89dd --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-5.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1467ef51278ca01f91eef1d5d4d57d360d202cba68a1b5c35bfc478e9bf15273 +size 1062961109 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-6.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-6.pt new file mode 100644 index 0000000000000000000000000000000000000000..805478a06c0859a31be6c633a5341a339f32839e --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-6.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d2dd8b95f78b2539a4664a439bb2a8cf944fd06851485385105cdf3fd80785ff +size 1062961173 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-7.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-7.pt new file mode 100644 index 0000000000000000000000000000000000000000..962089be4133ab8e3f82b53fd2dc3a7fa4776d5e --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-7.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e9b1237f3dbe638e1975b925a8b7d98bf31503d57a8e2adc9c4df5d28c90b0f0 +size 1062961237 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-8.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-8.pt new file mode 100644 index 0000000000000000000000000000000000000000..9e0e1cd54231311a4d77433c015251713b120650 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-8.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bc920a87e4efc362adfbad874c73c1f90bef5d1340669d1b58a9030b7bbc7e95 +size 1062961237 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/epoch-9.pt b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-9.pt new file mode 100644 index 0000000000000000000000000000000000000000..ca687b4d4b4b6fa2c4b5b6f53f9fa294381c7882 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/epoch-9.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8bd9fe5609b8a747cd7c1e92dbaf30d3d5b9dff5c3124266e0079e01df5657bc +size 1062961301 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-03-37-48-0 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-03-37-48-0 new file mode 100644 index 0000000000000000000000000000000000000000..cbbedfe201c43b9b2841e8a0c2e936a29f544e38 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-03-37-48-0 @@ -0,0 +1,9 @@ +2024-08-29 03:37:48,106 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-29 03:37:48,411 INFO [dysarthria_finetune.py:1214] (0/4) (33748090880, 34072559616) +2024-08-29 03:37:48,411 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-29 03:37:48,765 INFO [dysarthria_finetune.py:1219] (0/4) (33748090880, 34072559616) +2024-08-29 03:37:48,771 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-29 03:37:49,594 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2655.int.cedar.computecanada.ca', 'IP address': '172.16.146.92'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 03:37:49,594 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-29 03:37:50,277 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 66367431 +2024-08-29 03:37:50,816 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-03-37-48-1 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-03-37-48-1 new file mode 100644 index 0000000000000000000000000000000000000000..883755739d168a9375bd4af105e36153c701cf1a --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-03-37-48-1 @@ -0,0 +1,7 @@ +2024-08-29 03:37:48,477 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-29 03:37:50,829 INFO [dysarthria_finetune.py:1214] (1/4) (33427226624, 34072559616) +2024-08-29 03:37:50,829 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-29 03:37:51,205 INFO [dysarthria_finetune.py:1219] (1/4) (33427226624, 34072559616) +2024-08-29 03:37:51,205 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-29 03:37:51,208 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2655.int.cedar.computecanada.ca', 'IP address': '172.16.146.92'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 03:37:51,209 INFO [dysarthria_finetune.py:1243] (1/4) About to create model diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-03-37-48-2 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-03-37-48-2 new file mode 100644 index 0000000000000000000000000000000000000000..f863a9af401ff1ed95184a0a3ac78a1e2f3a9ea5 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-03-37-48-2 @@ -0,0 +1 @@ +2024-08-29 03:37:48,474 INFO [dysarthria_finetune.py:1212] (2/4) Training started diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-03-37-48-3 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-03-37-48-3 new file mode 100644 index 0000000000000000000000000000000000000000..55e082fe8f24fd2a6375fc8935f7bb10e4f2abe9 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-03-37-48-3 @@ -0,0 +1,3 @@ +2024-08-29 03:37:48,476 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-29 03:37:55,030 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-29 03:37:55,030 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-11-41-33-0 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-11-41-33-0 new file mode 100644 index 0000000000000000000000000000000000000000..2e35d0583622f5a47755e0d015f4b7c5ae7df549 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-11-41-33-0 @@ -0,0 +1,9 @@ +2024-08-29 11:41:33,292 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-29 11:41:33,521 INFO [dysarthria_finetune.py:1214] (0/4) (33725022208, 34072559616) +2024-08-29 11:41:33,521 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-29 11:41:33,990 INFO [dysarthria_finetune.py:1219] (0/4) (33427226624, 34072559616) +2024-08-29 11:41:33,994 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-29 11:41:36,507 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2558.int.cedar.computecanada.ca', 'IP address': '172.16.145.251'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 11:41:36,507 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-29 11:41:37,205 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 66367431 +2024-08-29 11:41:37,767 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-11-41-33-1 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-11-41-33-1 new file mode 100644 index 0000000000000000000000000000000000000000..97d11ee5609cb0985a9c7befa5bab0510bb5d49e --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-11-41-33-1 @@ -0,0 +1 @@ +2024-08-29 11:41:33,540 INFO [dysarthria_finetune.py:1212] (1/4) Training started diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-11-41-33-2 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-11-41-33-2 new file mode 100644 index 0000000000000000000000000000000000000000..b3b694744f34db8f50e3b8bfa4e820227d998971 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-11-41-33-2 @@ -0,0 +1,9 @@ +2024-08-29 11:41:33,436 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-29 11:41:40,957 INFO [dysarthria_finetune.py:1214] (2/4) (33748090880, 34072559616) +2024-08-29 11:41:40,957 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-29 11:41:41,322 INFO [dysarthria_finetune.py:1219] (2/4) (33748090880, 34072559616) +2024-08-29 11:41:41,324 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-29 11:41:41,326 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2558.int.cedar.computecanada.ca', 'IP address': '172.16.145.251'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 11:41:41,327 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-29 11:41:42,016 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 66367431 +2024-08-29 11:41:42,016 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-11-41-33-3 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-11-41-33-3 new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-20-11-55-0 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-20-11-55-0 new file mode 100644 index 0000000000000000000000000000000000000000..534bcb2d26134ab69efb738ac67fa381db477111 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-20-11-55-0 @@ -0,0 +1,4 @@ +2024-08-29 20:11:55,398 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-29 20:11:55,756 INFO [dysarthria_finetune.py:1214] (0/4) (33748090880, 34072559616) +2024-08-29 20:11:55,757 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-29 20:11:56,549 INFO [dysarthria_finetune.py:1219] (0/4) (33106362368, 34072559616) diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-20-11-55-1 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-20-11-55-1 new file mode 100644 index 0000000000000000000000000000000000000000..353da3c1bb048f1346a7f6a8105b0aa33e7c7c0f --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-20-11-55-1 @@ -0,0 +1,9 @@ +2024-08-29 20:11:55,762 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-29 20:11:55,804 INFO [dysarthria_finetune.py:1214] (1/4) (33735507968, 34072559616) +2024-08-29 20:11:55,804 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-29 20:11:56,545 INFO [dysarthria_finetune.py:1219] (1/4) (33106362368, 34072559616) +2024-08-29 20:11:58,602 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-29 20:11:58,660 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 20:11:58,660 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-29 20:11:59,356 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 66367431 +2024-08-29 20:11:59,357 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-20-11-55-2 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-20-11-55-2 new file mode 100644 index 0000000000000000000000000000000000000000..06238258cbb09f462f1f3e280a7e187bdd5711bc --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-20-11-55-2 @@ -0,0 +1,9 @@ +2024-08-29 20:11:55,746 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-29 20:11:58,602 INFO [dysarthria_finetune.py:1214] (2/4) (32783400960, 34072559616) +2024-08-29 20:11:58,602 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-29 20:11:58,988 INFO [dysarthria_finetune.py:1219] (2/4) (32783400960, 34072559616) +2024-08-29 20:11:58,989 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-29 20:11:58,993 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 20:11:58,993 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-29 20:11:59,702 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 66367431 +2024-08-29 20:11:59,702 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-20-11-55-3 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-20-11-55-3 new file mode 100644 index 0000000000000000000000000000000000000000..5bdc9714ffd5197179650093eae344bd33b71f56 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-29-20-11-55-3 @@ -0,0 +1,9 @@ +2024-08-29 20:11:55,761 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-29 20:11:55,804 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-29 20:11:55,804 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-29 20:11:56,543 INFO [dysarthria_finetune.py:1219] (3/4) (33106362368, 34072559616) +2024-08-29 20:11:58,602 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-29 20:11:58,660 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 20:11:58,661 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-29 20:11:59,362 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 66367431 +2024-08-29 20:11:59,362 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-17-23-52-0 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-17-23-52-0 new file mode 100644 index 0000000000000000000000000000000000000000..77cd5f5ca9a6f6269455e87446e46e51c4eb1722 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-17-23-52-0 @@ -0,0 +1 @@ +2024-08-30 17:23:52,592 INFO [dysarthria_finetune.py:1212] (0/4) Training started diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-17-23-52-1 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-17-23-52-1 new file mode 100644 index 0000000000000000000000000000000000000000..3135f29ace324a73c67f0972e6462c5e050383d6 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-17-23-52-1 @@ -0,0 +1,9 @@ +2024-08-30 17:23:52,733 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-30 17:23:52,765 INFO [dysarthria_finetune.py:1214] (1/4) (33748090880, 34072559616) +2024-08-30 17:23:52,766 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-30 17:23:53,415 INFO [dysarthria_finetune.py:1219] (1/4) (33414643712, 34072559616) +2024-08-30 17:23:53,415 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-30 17:23:53,662 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2651.int.cedar.computecanada.ca', 'IP address': '172.16.146.88'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 17:23:53,662 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-30 17:23:54,364 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 66367431 +2024-08-30 17:23:54,364 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-17-23-52-2 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-17-23-52-2 new file mode 100644 index 0000000000000000000000000000000000000000..81b1316b7761eb9a1ed9e33ba0309090ddbb7bad --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-17-23-52-2 @@ -0,0 +1,9 @@ +2024-08-30 17:23:52,703 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-30 17:23:53,136 INFO [dysarthria_finetune.py:1214] (2/4) (33414643712, 34072559616) +2024-08-30 17:23:53,136 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-30 17:23:53,608 INFO [dysarthria_finetune.py:1219] (2/4) (33106362368, 34072559616) +2024-08-30 17:23:53,609 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-30 17:23:53,661 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2651.int.cedar.computecanada.ca', 'IP address': '172.16.146.88'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 17:23:53,661 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-30 17:23:54,373 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 66367431 +2024-08-30 17:23:54,373 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-17-23-52-3 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-17-23-52-3 new file mode 100644 index 0000000000000000000000000000000000000000..28c09f84ee5056bb158935bb63852af99ff70955 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-17-23-52-3 @@ -0,0 +1,9 @@ +2024-08-30 17:23:52,718 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-30 17:23:52,720 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-30 17:23:52,720 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-30 17:23:53,398 INFO [dysarthria_finetune.py:1219] (3/4) (33427226624, 34072559616) +2024-08-30 17:23:53,399 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-30 17:23:53,661 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2651.int.cedar.computecanada.ca', 'IP address': '172.16.146.88'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 17:23:53,661 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-30 17:23:54,364 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 66367431 +2024-08-30 17:23:54,364 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-21-11-33-0 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-21-11-33-0 new file mode 100644 index 0000000000000000000000000000000000000000..671001a017b546d97a6aa9f7b77823a3d206f99e --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-21-11-33-0 @@ -0,0 +1,9 @@ +2024-08-30 21:11:33,959 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-30 21:11:34,230 INFO [dysarthria_finetune.py:1214] (0/4) (33735507968, 34072559616) +2024-08-30 21:11:34,231 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-30 21:11:34,950 INFO [dysarthria_finetune.py:1219] (0/4) (33106362368, 34072559616) +2024-08-30 21:11:34,958 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-30 21:11:34,960 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2647.int.cedar.computecanada.ca', 'IP address': '172.16.146.84'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 21:11:34,961 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-30 21:11:35,755 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 66367431 +2024-08-30 21:11:36,317 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-21-11-34-1 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-21-11-34-1 new file mode 100644 index 0000000000000000000000000000000000000000..4d8112b475661e227df6f5e1378ae6f058e78472 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-21-11-34-1 @@ -0,0 +1,9 @@ +2024-08-30 21:11:34,198 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-30 21:11:34,230 INFO [dysarthria_finetune.py:1214] (1/4) (33748090880, 34072559616) +2024-08-30 21:11:34,231 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-30 21:11:34,961 INFO [dysarthria_finetune.py:1219] (1/4) (33106362368, 34072559616) +2024-08-30 21:11:34,962 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-30 21:11:34,964 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2647.int.cedar.computecanada.ca', 'IP address': '172.16.146.84'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 21:11:34,965 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-30 21:11:35,679 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 66367431 +2024-08-30 21:11:35,679 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-21-11-34-2 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-21-11-34-2 new file mode 100644 index 0000000000000000000000000000000000000000..f2caa03243c75553b575e823459c4b80cc1ed045 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-21-11-34-2 @@ -0,0 +1,9 @@ +2024-08-30 21:11:34,197 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-30 21:11:35,546 INFO [dysarthria_finetune.py:1214] (2/4) (32783400960, 34072559616) +2024-08-30 21:11:35,546 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-30 21:11:35,930 INFO [dysarthria_finetune.py:1219] (2/4) (32783400960, 34072559616) +2024-08-30 21:11:35,931 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-30 21:11:35,933 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2647.int.cedar.computecanada.ca', 'IP address': '172.16.146.84'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 21:11:35,933 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-30 21:11:36,632 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 66367431 +2024-08-30 21:11:36,632 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-21-11-34-3 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-21-11-34-3 new file mode 100644 index 0000000000000000000000000000000000000000..22624786ca583564f188b4d0af7dccf0e9062a90 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-21-11-34-3 @@ -0,0 +1,9 @@ +2024-08-30 21:11:34,184 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-30 21:11:34,185 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-30 21:11:34,185 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-30 21:11:34,945 INFO [dysarthria_finetune.py:1219] (3/4) (33106362368, 34072559616) +2024-08-30 21:11:34,946 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-30 21:11:34,948 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2647.int.cedar.computecanada.ca', 'IP address': '172.16.146.84'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 21:11:34,948 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-30 21:11:35,649 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 66367431 +2024-08-30 21:11:35,649 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-22-48-54-0 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-22-48-54-0 new file mode 100644 index 0000000000000000000000000000000000000000..af96a4145393ae81e5e169dab9ce3d8fcd4f1d93 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-22-48-54-0 @@ -0,0 +1 @@ +2024-08-30 22:48:54,417 INFO [dysarthria_finetune.py:1212] (0/4) Training started diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-22-48-54-1 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-22-48-54-1 new file mode 100644 index 0000000000000000000000000000000000000000..5a5127ac50027fd70414bb87cea4d9a32ef3fb04 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-22-48-54-1 @@ -0,0 +1,3 @@ +2024-08-30 22:48:54,486 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-30 22:49:06,853 INFO [dysarthria_finetune.py:1214] (1/4) (33748090880, 34072559616) +2024-08-30 22:49:06,853 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-22-48-54-2 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-22-48-54-2 new file mode 100644 index 0000000000000000000000000000000000000000..6912371f16a858ad9cae74639067d72890ce1920 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-22-48-54-2 @@ -0,0 +1,9 @@ +2024-08-30 22:48:54,490 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-30 22:48:54,531 INFO [dysarthria_finetune.py:1214] (2/4) (33748090880, 34072559616) +2024-08-30 22:48:54,531 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-30 22:48:55,055 INFO [dysarthria_finetune.py:1219] (2/4) (33427226624, 34072559616) +2024-08-30 22:48:55,055 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-30 22:48:55,099 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2539.int.cedar.computecanada.ca', 'IP address': '172.16.145.232'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 22:48:55,100 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-30 22:48:55,797 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 66367431 +2024-08-30 22:48:55,797 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-22-48-54-3 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-22-48-54-3 new file mode 100644 index 0000000000000000000000000000000000000000..9541a6908eddf653acb65bceef27dd6e85f0fca5 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-30-22-48-54-3 @@ -0,0 +1,9 @@ +2024-08-30 22:48:54,484 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-30 22:48:54,485 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-30 22:48:54,485 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-30 22:48:55,049 INFO [dysarthria_finetune.py:1219] (3/4) (33427226624, 34072559616) +2024-08-30 22:48:55,050 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-30 22:48:55,100 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2539.int.cedar.computecanada.ca', 'IP address': '172.16.145.232'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 22:48:55,100 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-30 22:48:55,797 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 66367431 +2024-08-30 22:48:55,797 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-18-50-37-0 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-18-50-37-0 new file mode 100644 index 0000000000000000000000000000000000000000..8894177ac7e3525aef1f3ea4945b364d4fe5d4f2 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-18-50-37-0 @@ -0,0 +1,11 @@ +2024-08-31 18:50:37,365 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-31 18:50:37,681 INFO [dysarthria_finetune.py:1214] (0/4) (33748090880, 34072559616) +2024-08-31 18:50:37,681 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-31 18:50:38,683 INFO [dysarthria_finetune.py:1219] (0/4) (32783400960, 34072559616) +2024-08-31 18:50:41,220 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-31 18:50:41,223 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2535.int.cedar.computecanada.ca', 'IP address': '172.16.145.228'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 18:50:41,224 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-31 18:50:41,941 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 66367431 +2024-08-31 18:50:42,495 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt +2024-08-31 18:50:44,403 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-31 18:50:45,805 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-18-50-37-1 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-18-50-37-1 new file mode 100644 index 0000000000000000000000000000000000000000..4363bcf8b0d9610d8ac88b39dfaae5b8888c573f --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-18-50-37-1 @@ -0,0 +1,11 @@ +2024-08-31 18:50:37,683 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-31 18:50:37,726 INFO [dysarthria_finetune.py:1214] (1/4) (33735507968, 34072559616) +2024-08-31 18:50:37,726 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-31 18:50:38,674 INFO [dysarthria_finetune.py:1219] (1/4) (32783400960, 34072559616) +2024-08-31 18:50:38,675 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-31 18:50:38,754 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2535.int.cedar.computecanada.ca', 'IP address': '172.16.145.228'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 18:50:38,754 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-31 18:50:39,457 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 66367431 +2024-08-31 18:50:39,457 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt +2024-08-31 18:50:40,690 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-31 18:50:45,796 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-18-50-37-2 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-18-50-37-2 new file mode 100644 index 0000000000000000000000000000000000000000..2dcf96b42efd0b0d62ee043441c70fc7f7c35957 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-18-50-37-2 @@ -0,0 +1,11 @@ +2024-08-31 18:50:37,845 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-31 18:50:37,847 INFO [dysarthria_finetune.py:1214] (2/4) (33748090880, 34072559616) +2024-08-31 18:50:37,847 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-31 18:50:38,686 INFO [dysarthria_finetune.py:1219] (2/4) (32783400960, 34072559616) +2024-08-31 18:50:38,686 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-31 18:50:38,754 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2535.int.cedar.computecanada.ca', 'IP address': '172.16.145.228'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 18:50:38,754 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-31 18:50:39,432 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 66367431 +2024-08-31 18:50:39,432 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt +2024-08-31 18:50:40,631 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-31 18:50:45,784 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-18-50-37-3 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-18-50-37-3 new file mode 100644 index 0000000000000000000000000000000000000000..b8d0019c4d5966502e71323afe8a8ac20ca98335 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-18-50-37-3 @@ -0,0 +1,11 @@ +2024-08-31 18:50:37,695 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-31 18:50:37,726 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-31 18:50:37,726 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-31 18:50:38,683 INFO [dysarthria_finetune.py:1219] (3/4) (32783400960, 34072559616) +2024-08-31 18:50:38,683 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-31 18:50:38,754 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2535.int.cedar.computecanada.ca', 'IP address': '172.16.145.228'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 18:50:38,754 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-31 18:50:39,454 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 66367431 +2024-08-31 18:50:39,454 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt +2024-08-31 18:50:40,688 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-31 18:50:45,792 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-22-08-59-0 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-22-08-59-0 new file mode 100644 index 0000000000000000000000000000000000000000..12892eedc9376dc9f8f90739168e64c62fee2a6e --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-22-08-59-0 @@ -0,0 +1,551 @@ +2024-08-31 22:08:59,831 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-31 22:09:00,050 INFO [dysarthria_finetune.py:1214] (0/4) (33735507968, 34072559616) +2024-08-31 22:09:00,050 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-31 22:09:00,558 INFO [dysarthria_finetune.py:1219] (0/4) (33427226624, 34072559616) +2024-08-31 22:09:00,564 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-31 22:09:01,025 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2653.int.cedar.computecanada.ca', 'IP address': '172.16.146.90'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 22:09:01,025 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-31 22:09:16,970 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 66367431 +2024-08-31 22:10:07,061 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt +2024-08-31 22:10:51,211 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-31 22:11:01,688 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts +2024-08-31 22:11:01,783 INFO [dysarthria_finetune.py:1319] (0/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 22:11:02,460 INFO [dysarthria_asr_datamodule.py:239] (0/4) Disable MUSAN +2024-08-31 22:11:02,460 INFO [dysarthria_asr_datamodule.py:257] (0/4) Enable SpecAugment +2024-08-31 22:11:02,461 INFO [dysarthria_asr_datamodule.py:258] (0/4) Time warp factor: 80 +2024-08-31 22:11:02,461 INFO [dysarthria_asr_datamodule.py:268] (0/4) Num frame mask: 10 +2024-08-31 22:11:02,461 INFO [dysarthria_asr_datamodule.py:281] (0/4) About to create train dataset +2024-08-31 22:11:17,376 INFO [dysarthria_asr_datamodule.py:308] (0/4) Using DynamicBucketingSampler. +2024-08-31 22:11:18,292 INFO [dysarthria_asr_datamodule.py:325] (0/4) About to create train dataloader +2024-08-31 22:11:18,293 INFO [dysarthria_asr_datamodule.py:501] (0/4) About to get dev cuts +2024-08-31 22:11:18,428 INFO [dysarthria_asr_datamodule.py:356] (0/4) About to create dev dataset +2024-08-31 22:11:18,749 INFO [dysarthria_asr_datamodule.py:373] (0/4) About to create dev dataloader +2024-08-31 22:11:18,749 INFO [dysarthria_finetune.py:1490] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 22:12:59,094 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=5.27 vs. limit=5.0 +2024-08-31 22:12:59,567 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=15.76 vs. limit=7.5 +2024-08-31 22:13:03,030 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12626MB +2024-08-31 22:13:04,411 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=384, metric=18.93 vs. limit=7.5 +2024-08-31 22:13:04,880 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12626MB +2024-08-31 22:14:12,900 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12626MB +2024-08-31 22:14:14,957 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12626MB +2024-08-31 22:19:48,583 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=25.16 vs. limit=7.5 +2024-08-31 22:19:51,916 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12626MB +2024-08-31 22:19:54,344 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12626MB +2024-08-31 22:20:43,746 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 0, loss[loss=0.3566, simple_loss=0.292, pruned_loss=0.165, ctc_loss=0.2404, over 18513.00 frames. ], tot_loss[loss=0.3566, simple_loss=0.292, pruned_loss=0.165, ctc_loss=0.2404, over 18513.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-31 22:20:43,747 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-31 22:46:04,596 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 1, validation: loss=0.3942, simple_loss=0.3187, pruned_loss=0.1927, ctc_loss=0.281, over 1073944.00 frames. +2024-08-31 22:46:04,729 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-08-31 23:01:30,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=100000.0, ans=0.125 +2024-08-31 23:06:11,554 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.685e+02 9.975e+02 1.051e+03 1.091e+03 1.133e+03, threshold=4.203e+03, percent-clipped=0.0 +2024-08-31 23:13:47,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=100053.33333333333, ans=0.125 +2024-08-31 23:24:16,381 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=25.24 vs. limit=15.0 +2024-08-31 23:25:12,997 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.960e+02 9.836e+02 1.043e+03 1.067e+03 1.144e+03, threshold=4.173e+03, percent-clipped=0.0 +2024-08-31 23:31:26,726 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=32.76 vs. limit=15.0 +2024-08-31 23:43:25,261 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=19.62 vs. limit=15.0 +2024-08-31 23:50:05,829 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 7.677e+02 8.648e+02 9.697e+02 1.051e+03 1.144e+03, threshold=3.879e+03, percent-clipped=0.0 +2024-08-31 23:53:03,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=100213.33333333333, ans=0.09899494936611666 +2024-08-31 23:53:03,346 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=18.59 vs. limit=15.0 +2024-08-31 23:55:34,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=100266.66666666667, ans=0.125 +2024-08-31 23:55:38,224 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 50, loss[loss=0.4293, simple_loss=0.3426, pruned_loss=0.209, ctc_loss=0.3269, over 18890.00 frames. ], tot_loss[loss=0.4257, simple_loss=0.3414, pruned_loss=0.2117, ctc_loss=0.3147, over 828692.51 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-31 23:59:17,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100266.66666666667, ans=0.125 +2024-09-01 00:09:28,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100373.33333333333, ans=0.125 +2024-09-01 00:14:15,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=100480.0, ans=0.125 +2024-09-01 00:16:22,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=100480.0, ans=0.07 +2024-09-01 00:18:32,454 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.599e+02 6.914e+02 7.776e+02 9.170e+02 1.144e+03, threshold=1.555e+03, percent-clipped=0.0 +2024-09-01 00:18:32,492 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 100, loss[loss=0.3986, simple_loss=0.3194, pruned_loss=0.198, ctc_loss=0.2924, over 19293.00 frames. ], tot_loss[loss=0.406, simple_loss=0.326, pruned_loss=0.1985, ctc_loss=0.2989, over 1474004.25 frames. ], batch size: 144, lr: 6.01e-05, grad_scale: 4.0 +2024-09-01 00:20:55,215 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=21.73 vs. limit=15.0 +2024-09-01 00:21:57,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=19.91 vs. limit=15.0 +2024-09-01 00:27:33,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=100640.0, ans=0.2 +2024-09-01 00:28:35,602 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-1.pt +2024-09-01 00:30:00,886 INFO [dysarthria_finetune.py:1435] (0/4) (1324023808, 34072559616) +2024-09-01 00:30:00,886 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 00:30:00,913 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 00:30:12,104 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 2, batch 0, loss[loss=0.3438, simple_loss=0.2817, pruned_loss=0.1436, ctc_loss=0.2423, over 18874.00 frames. ], tot_loss[loss=0.3438, simple_loss=0.2817, pruned_loss=0.1436, ctc_loss=0.2423, over 18874.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-09-01 00:30:12,105 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 00:34:27,528 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 2, validation: loss=0.3547, simple_loss=0.2901, pruned_loss=0.1627, ctc_loss=0.2412, over 1073944.00 frames. +2024-09-01 00:34:27,529 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 00:39:15,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=100736.0, ans=0.0 +2024-09-01 00:39:16,183 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.17 vs. limit=15.0 +2024-09-01 00:39:57,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=100736.0, ans=0.125 +2024-09-01 00:40:02,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=100736.0, ans=0.125 +2024-09-01 00:41:34,528 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=22.90 vs. limit=15.0 +2024-09-01 00:41:38,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=100789.33333333333, ans=0.2 +2024-09-01 00:41:38,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-09-01 00:45:14,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=100842.66666666667, ans=0.035 +2024-09-01 00:48:16,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100949.33333333333, ans=0.125 +2024-09-01 00:48:18,401 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 2, batch 50, loss[loss=0.3832, simple_loss=0.3059, pruned_loss=0.1888, ctc_loss=0.2848, over 18964.00 frames. ], tot_loss[loss=0.3931, simple_loss=0.3168, pruned_loss=0.1856, ctc_loss=0.2849, over 826819.73 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 8.0 +2024-09-01 00:49:20,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=100949.33333333333, ans=0.0 +2024-09-01 00:49:32,146 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.01 vs. limit=10.0 +2024-09-01 00:49:32,223 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.82 vs. limit=15.0 +2024-09-01 00:50:03,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=101002.66666666667, ans=0.0 +2024-09-01 00:52:13,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101002.66666666667, ans=0.1 +2024-09-01 00:53:05,014 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.81 vs. limit=15.0 +2024-09-01 00:53:05,343 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.883e+02 4.624e+02 4.997e+02 5.383e+02 6.686e+02, threshold=9.995e+02, percent-clipped=0.0 +2024-09-01 00:54:01,503 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 00:54:20,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=101109.33333333333, ans=0.2 +2024-09-01 00:56:30,068 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 2, batch 100, loss[loss=0.4099, simple_loss=0.3316, pruned_loss=0.189, ctc_loss=0.2944, over 19229.00 frames. ], tot_loss[loss=0.3792, simple_loss=0.3066, pruned_loss=0.1765, ctc_loss=0.2723, over 1473154.80 frames. ], batch size: 144, lr: 7.29e-05, grad_scale: 8.0 +2024-09-01 00:57:11,943 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=29.46 vs. limit=22.5 +2024-09-01 01:00:20,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=101269.33333333333, ans=0.0 +2024-09-01 01:00:29,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=101269.33333333333, ans=0.125 +2024-09-01 01:01:16,814 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-2.pt +2024-09-01 01:01:27,525 INFO [dysarthria_finetune.py:1435] (0/4) (1368064000, 34072559616) +2024-09-01 01:01:27,526 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 01:01:27,552 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 01:01:35,831 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 3, batch 0, loss[loss=0.3716, simple_loss=0.3001, pruned_loss=0.1732, ctc_loss=0.2671, over 18603.00 frames. ], tot_loss[loss=0.3716, simple_loss=0.3001, pruned_loss=0.1732, ctc_loss=0.2671, over 18603.00 frames. ], batch size: 65, lr: 7.58e-05, grad_scale: 16.0 +2024-09-01 01:01:35,832 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 01:01:59,922 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 3, validation: loss=0.3274, simple_loss=0.2708, pruned_loss=0.1428, ctc_loss=0.2163, over 1073944.00 frames. +2024-09-01 01:01:59,923 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 01:03:02,874 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.85 vs. limit=5.0 +2024-09-01 01:03:42,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=101477.33333333333, ans=0.07 +2024-09-01 01:04:29,801 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.68 vs. limit=15.0 +2024-09-01 01:04:33,379 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.942e+02 3.454e+02 3.711e+02 3.996e+02 5.509e+02, threshold=7.422e+02, percent-clipped=0.0 +2024-09-01 01:04:56,821 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 3, batch 50, loss[loss=0.3628, simple_loss=0.2979, pruned_loss=0.1588, ctc_loss=0.2498, over 18964.00 frames. ], tot_loss[loss=0.3617, simple_loss=0.2938, pruned_loss=0.1631, ctc_loss=0.2578, over 827741.27 frames. ], batch size: 102, lr: 8.08e-05, grad_scale: 16.0 +2024-09-01 01:05:07,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=101637.33333333333, ans=0.1 +2024-09-01 01:05:40,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=101690.66666666667, ans=0.125 +2024-09-01 01:06:07,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101744.0, ans=0.1 +2024-09-01 01:06:43,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=101850.66666666667, ans=0.125 +2024-09-01 01:06:49,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=101850.66666666667, ans=0.125 +2024-09-01 01:07:00,076 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 3, batch 100, loss[loss=0.3478, simple_loss=0.2835, pruned_loss=0.1557, ctc_loss=0.245, over 19231.00 frames. ], tot_loss[loss=0.3529, simple_loss=0.2873, pruned_loss=0.1579, ctc_loss=0.2503, over 1473938.15 frames. ], batch size: 144, lr: 8.58e-05, grad_scale: 16.0 +2024-09-01 01:07:57,683 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=18.54 vs. limit=15.0 +2024-09-01 01:08:18,703 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.13 vs. limit=6.0 +2024-09-01 01:08:21,702 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-3.pt +2024-09-01 01:08:26,528 INFO [dysarthria_finetune.py:1435] (0/4) (1328218112, 34072559616) +2024-09-01 01:08:26,529 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 01:08:26,555 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 01:08:35,077 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 4, batch 0, loss[loss=0.3452, simple_loss=0.2786, pruned_loss=0.1602, ctc_loss=0.2485, over 18523.00 frames. ], tot_loss[loss=0.3452, simple_loss=0.2786, pruned_loss=0.1602, ctc_loss=0.2485, over 18523.00 frames. ], batch size: 65, lr: 8.86e-05, grad_scale: 32.0 +2024-09-01 01:08:35,078 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 01:08:58,403 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 4, validation: loss=0.308, simple_loss=0.2573, pruned_loss=0.1299, ctc_loss=0.2, over 1073944.00 frames. +2024-09-01 01:08:58,403 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 01:09:33,161 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.484e+02 2.869e+02 3.070e+02 3.452e+02 5.291e+02, threshold=6.140e+02, percent-clipped=0.0 +2024-09-01 01:09:57,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=102160.0, ans=0.2 +2024-09-01 01:10:06,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=102213.33333333333, ans=0.025 +2024-09-01 01:10:47,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=102266.66666666667, ans=0.125 +2024-09-01 01:10:51,030 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 4, batch 50, loss[loss=0.3704, simple_loss=0.3044, pruned_loss=0.1625, ctc_loss=0.2554, over 18961.00 frames. ], tot_loss[loss=0.3425, simple_loss=0.2794, pruned_loss=0.15, ctc_loss=0.2453, over 828586.64 frames. ], batch size: 102, lr: 9.36e-05, grad_scale: 32.0 +2024-09-01 01:11:03,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102320.0, ans=0.1 +2024-09-01 01:11:07,950 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.22 vs. limit=15.0 +2024-09-01 01:11:25,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=102373.33333333333, ans=0.125 +2024-09-01 01:11:29,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.48 vs. limit=15.0 +2024-09-01 01:11:31,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=102373.33333333333, ans=0.0 +2024-09-01 01:11:38,201 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.71 vs. limit=10.0 +2024-09-01 01:11:46,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=102426.66666666667, ans=0.125 +2024-09-01 01:12:16,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=102533.33333333333, ans=0.2 +2024-09-01 01:12:25,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=102533.33333333333, ans=0.125 +2024-09-01 01:12:38,908 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 4, batch 100, loss[loss=0.3573, simple_loss=0.2881, pruned_loss=0.157, ctc_loss=0.2709, over 19286.00 frames. ], tot_loss[loss=0.3347, simple_loss=0.2734, pruned_loss=0.1459, ctc_loss=0.2392, over 1474147.24 frames. ], batch size: 144, lr: 9.86e-05, grad_scale: 32.0 +2024-09-01 01:12:55,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=102586.66666666667, ans=0.125 +2024-09-01 01:12:56,218 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.73 vs. limit=15.0 +2024-09-01 01:13:12,348 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.110e+02 2.669e+02 2.871e+02 3.122e+02 4.671e+02, threshold=5.742e+02, percent-clipped=0.0 +2024-09-01 01:13:39,282 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:13:40,148 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-4.pt +2024-09-01 01:13:44,640 INFO [dysarthria_finetune.py:1435] (0/4) (1368064000, 34072559616) +2024-09-01 01:13:44,640 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 01:13:44,668 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 01:13:53,058 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 5, batch 0, loss[loss=0.282, simple_loss=0.231, pruned_loss=0.1172, ctc_loss=0.2094, over 18549.00 frames. ], tot_loss[loss=0.282, simple_loss=0.231, pruned_loss=0.1172, ctc_loss=0.2094, over 18549.00 frames. ], batch size: 65, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:13:53,059 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 01:14:16,495 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 5, validation: loss=0.2909, simple_loss=0.2453, pruned_loss=0.1191, ctc_loss=0.1881, over 1073944.00 frames. +2024-09-01 01:14:16,496 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 01:15:18,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=102741.33333333333, ans=0.025 +2024-09-01 01:15:54,979 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=18.36 vs. limit=15.0 +2024-09-01 01:16:52,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=102848.0, ans=0.0 +2024-09-01 01:17:05,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102848.0, ans=0.1 +2024-09-01 01:17:33,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=102901.33333333333, ans=0.0 +2024-09-01 01:19:42,059 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.86 vs. limit=10.0 +2024-09-01 01:20:50,672 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 5, batch 50, loss[loss=0.3179, simple_loss=0.2635, pruned_loss=0.135, ctc_loss=0.2188, over 19008.00 frames. ], tot_loss[loss=0.3198, simple_loss=0.2635, pruned_loss=0.1343, ctc_loss=0.2292, over 828355.03 frames. ], batch size: 102, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:22:35,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=103061.33333333333, ans=0.125 +2024-09-01 01:22:44,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=103061.33333333333, ans=0.0 +2024-09-01 01:25:33,874 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.253e+02 2.485e+02 2.709e+02 2.997e+02 4.733e+02, threshold=5.419e+02, percent-clipped=0.0 +2024-09-01 01:25:46,312 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.72 vs. limit=15.0 +2024-09-01 01:25:49,667 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.57 vs. limit=22.5 +2024-09-01 01:25:52,259 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.02 vs. limit=22.5 +2024-09-01 01:26:12,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=103221.33333333333, ans=0.025 +2024-09-01 01:26:19,492 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.36 vs. limit=6.0 +2024-09-01 01:26:23,127 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 5, batch 100, loss[loss=0.3115, simple_loss=0.2548, pruned_loss=0.1334, ctc_loss=0.2282, over 19287.00 frames. ], tot_loss[loss=0.316, simple_loss=0.2604, pruned_loss=0.1331, ctc_loss=0.2261, over 1473652.43 frames. ], batch size: 144, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:26:48,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=103274.66666666667, ans=0.2 +2024-09-01 01:27:24,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=103328.0, ans=0.125 +2024-09-01 01:27:28,280 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.96 vs. limit=15.0 +2024-09-01 01:27:48,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103381.33333333333, ans=0.1 +2024-09-01 01:27:55,386 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-5.pt +2024-09-01 01:28:06,319 INFO [dysarthria_finetune.py:1435] (0/4) (1328218112, 34072559616) +2024-09-01 01:28:06,319 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 01:28:06,346 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 01:28:14,505 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 6, batch 0, loss[loss=0.2938, simple_loss=0.2489, pruned_loss=0.113, ctc_loss=0.2044, over 18610.00 frames. ], tot_loss[loss=0.2938, simple_loss=0.2489, pruned_loss=0.113, ctc_loss=0.2044, over 18610.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:28:14,506 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 01:28:37,895 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 6, validation: loss=0.2789, simple_loss=0.2369, pruned_loss=0.1122, ctc_loss=0.1819, over 1073944.00 frames. +2024-09-01 01:28:37,896 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 01:28:57,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=103424.0, ans=0.125 +2024-09-01 01:30:32,255 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 6, batch 50, loss[loss=0.2775, simple_loss=0.2359, pruned_loss=0.1085, ctc_loss=0.1889, over 19047.00 frames. ], tot_loss[loss=0.3118, simple_loss=0.2575, pruned_loss=0.1305, ctc_loss=0.2251, over 829577.21 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:31:04,059 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:31:04,785 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.055e+02 2.419e+02 2.583e+02 2.819e+02 4.094e+02, threshold=5.165e+02, percent-clipped=0.0 +2024-09-01 01:31:38,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=103797.33333333333, ans=0.0 +2024-09-01 01:32:43,095 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 6, batch 100, loss[loss=0.2663, simple_loss=0.2264, pruned_loss=0.1063, ctc_loss=0.1789, over 19232.00 frames. ], tot_loss[loss=0.2978, simple_loss=0.2479, pruned_loss=0.1225, ctc_loss=0.212, over 1476247.28 frames. ], batch size: 144, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:33:44,575 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-6.pt +2024-09-01 01:33:49,546 INFO [dysarthria_finetune.py:1435] (0/4) (1328218112, 34072559616) +2024-09-01 01:33:49,546 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 01:33:49,572 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 01:33:57,915 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 7, batch 0, loss[loss=0.2773, simple_loss=0.2403, pruned_loss=0.107, ctc_loss=0.1771, over 18570.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.2403, pruned_loss=0.107, ctc_loss=0.1771, over 18570.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:33:57,916 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 01:34:21,896 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 7, validation: loss=0.2604, simple_loss=0.2251, pruned_loss=0.1007, ctc_loss=0.1681, over 1073944.00 frames. +2024-09-01 01:34:21,897 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 01:34:25,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104106.66666666667, ans=0.1 +2024-09-01 01:34:52,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=104160.0, ans=0.0 +2024-09-01 01:35:05,677 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=7.97 vs. limit=15.0 +2024-09-01 01:35:34,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=104213.33333333333, ans=0.0 +2024-09-01 01:35:40,238 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.049e+02 2.272e+02 2.384e+02 2.601e+02 4.291e+02, threshold=4.768e+02, percent-clipped=0.0 +2024-09-01 01:37:02,200 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 7, batch 50, loss[loss=0.3143, simple_loss=0.2592, pruned_loss=0.1296, ctc_loss=0.2374, over 18968.00 frames. ], tot_loss[loss=0.2908, simple_loss=0.2432, pruned_loss=0.1174, ctc_loss=0.2108, over 827907.61 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 16.0 +2024-09-01 01:38:09,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=104426.66666666667, ans=0.125 +2024-09-01 01:39:04,052 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.70 vs. limit=15.0 +2024-09-01 01:39:36,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=104533.33333333333, ans=15.0 +2024-09-01 01:39:53,559 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=19.59 vs. limit=22.5 +2024-09-01 01:40:47,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=104586.66666666667, ans=0.1 +2024-09-01 01:40:47,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104586.66666666667, ans=0.1 +2024-09-01 01:40:52,485 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 7, batch 100, loss[loss=0.286, simple_loss=0.2391, pruned_loss=0.1153, ctc_loss=0.2109, over 19302.00 frames. ], tot_loss[loss=0.2884, simple_loss=0.241, pruned_loss=0.1167, ctc_loss=0.2105, over 1473040.93 frames. ], batch size: 144, lr: 9.99e-05, grad_scale: 16.0 +2024-09-01 01:41:30,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=104640.0, ans=0.125 +2024-09-01 01:41:50,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=104693.33333333333, ans=0.035 +2024-09-01 01:42:07,329 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=9.33 vs. limit=12.0 +2024-09-01 01:42:59,045 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-7.pt +2024-09-01 01:43:04,118 INFO [dysarthria_finetune.py:1435] (0/4) (1328218112, 34072559616) +2024-09-01 01:43:04,119 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 01:43:04,145 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 01:43:13,102 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 8, batch 0, loss[loss=0.2877, simple_loss=0.2454, pruned_loss=0.1148, ctc_loss=0.1975, over 18485.00 frames. ], tot_loss[loss=0.2877, simple_loss=0.2454, pruned_loss=0.1148, ctc_loss=0.1975, over 18485.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:43:13,103 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 01:44:04,944 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 8, validation: loss=0.2572, simple_loss=0.2228, pruned_loss=0.09973, ctc_loss=0.1708, over 1073944.00 frames. +2024-09-01 01:44:04,945 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 01:44:08,917 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.92 vs. limit=15.0 +2024-09-01 01:44:16,260 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:44:25,624 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.924e+02 2.205e+02 2.324e+02 2.533e+02 3.850e+02, threshold=4.647e+02, percent-clipped=0.0 +2024-09-01 01:51:43,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=104842.66666666667, ans=0.0 +2024-09-01 01:53:51,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=104949.33333333333, ans=6.0 +2024-09-01 01:54:21,882 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.58 vs. limit=12.0 +2024-09-01 01:54:34,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=104949.33333333333, ans=0.05 +2024-09-01 01:54:53,262 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.77 vs. limit=15.0 +2024-09-01 01:56:19,986 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 8, batch 50, loss[loss=0.2756, simple_loss=0.2331, pruned_loss=0.1082, ctc_loss=0.204, over 18938.00 frames. ], tot_loss[loss=0.2763, simple_loss=0.2336, pruned_loss=0.109, ctc_loss=0.2025, over 828565.55 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:57:05,484 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.54 vs. limit=15.0 +2024-09-01 01:58:16,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=105109.33333333333, ans=0.0 +2024-09-01 01:59:06,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=105162.66666666667, ans=0.0 +2024-09-01 01:59:07,119 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.95 vs. limit=15.0 +2024-09-01 01:59:39,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=105216.0, ans=0.025 +2024-09-01 01:59:59,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=105216.0, ans=0.125 +2024-09-01 02:00:29,560 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=12.53 vs. limit=15.0 +2024-09-01 02:00:56,047 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 8, batch 100, loss[loss=0.2612, simple_loss=0.2296, pruned_loss=0.09556, ctc_loss=0.1819, over 19222.00 frames. ], tot_loss[loss=0.2756, simple_loss=0.2331, pruned_loss=0.109, ctc_loss=0.202, over 1474444.14 frames. ], batch size: 144, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 02:01:07,951 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.921e+02 2.165e+02 2.362e+02 2.610e+02 3.254e+02, threshold=4.723e+02, percent-clipped=0.0 +2024-09-01 02:01:24,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=105322.66666666667, ans=0.0 +2024-09-01 02:01:34,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=105322.66666666667, ans=0.09899494936611666 +2024-09-01 02:01:57,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=105376.0, ans=0.125 +2024-09-01 02:02:13,171 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=3.204e-02 +2024-09-01 02:02:56,106 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=26.18 vs. limit=22.5 +2024-09-01 02:02:57,167 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-8.pt +2024-09-01 02:03:01,685 INFO [dysarthria_finetune.py:1435] (0/4) (1365966848, 34072559616) +2024-09-01 02:03:01,685 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:03:01,712 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 02:03:10,292 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 9, batch 0, loss[loss=0.2911, simple_loss=0.2426, pruned_loss=0.1212, ctc_loss=0.2131, over 18596.00 frames. ], tot_loss[loss=0.2911, simple_loss=0.2426, pruned_loss=0.1212, ctc_loss=0.2131, over 18596.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:03:10,293 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:03:33,810 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 9, validation: loss=0.2431, simple_loss=0.2147, pruned_loss=0.0913, ctc_loss=0.1608, over 1073944.00 frames. +2024-09-01 02:03:33,811 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 02:04:28,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=105584.0, ans=0.2 +2024-09-01 02:04:49,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=105637.33333333333, ans=0.125 +2024-09-01 02:05:08,864 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.40 vs. limit=15.0 +2024-09-01 02:05:31,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=105690.66666666667, ans=0.2 +2024-09-01 02:05:38,136 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 9, batch 50, loss[loss=0.284, simple_loss=0.2405, pruned_loss=0.1115, ctc_loss=0.2153, over 19065.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.2311, pruned_loss=0.1053, ctc_loss=0.2006, over 828972.56 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:05:48,898 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=27.06 vs. limit=22.5 +2024-09-01 02:05:58,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=105744.0, ans=0.125 +2024-09-01 02:05:58,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=105744.0, ans=0.125 +2024-09-01 02:06:37,638 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.941e+02 2.168e+02 2.346e+02 2.556e+02 3.441e+02, threshold=4.692e+02, percent-clipped=0.0 +2024-09-01 02:06:49,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=105904.0, ans=0.125 +2024-09-01 02:07:58,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=105957.33333333333, ans=15.0 +2024-09-01 02:08:13,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=105957.33333333333, ans=0.125 +2024-09-01 02:08:17,420 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 9, batch 100, loss[loss=0.2064, simple_loss=0.1894, pruned_loss=0.06727, ctc_loss=0.1475, over 19269.00 frames. ], tot_loss[loss=0.2636, simple_loss=0.2268, pruned_loss=0.1013, ctc_loss=0.1939, over 1474236.32 frames. ], batch size: 144, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:08:48,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=106064.0, ans=0.2 +2024-09-01 02:08:50,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=106064.0, ans=0.125 +2024-09-01 02:08:52,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=106064.0, ans=0.2 +2024-09-01 02:09:05,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=106117.33333333333, ans=0.0 +2024-09-01 02:09:08,354 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.68 vs. limit=22.5 +2024-09-01 02:09:14,801 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.74 vs. limit=6.0 +2024-09-01 02:09:19,435 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-9.pt +2024-09-01 02:09:26,254 INFO [dysarthria_finetune.py:1435] (0/4) (1328218112, 34072559616) +2024-09-01 02:09:26,254 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:09:26,282 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 02:09:34,441 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 10, batch 0, loss[loss=0.2289, simple_loss=0.2026, pruned_loss=0.08397, ctc_loss=0.1637, over 18682.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.2026, pruned_loss=0.08397, ctc_loss=0.1637, over 18682.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:09:34,441 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:09:58,831 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 10, validation: loss=0.2363, simple_loss=0.211, pruned_loss=0.08786, ctc_loss=0.1591, over 1073944.00 frames. +2024-09-01 02:09:58,832 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 02:10:04,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=106165.33333333333, ans=0.0 +2024-09-01 02:10:17,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=106165.33333333333, ans=0.1 +2024-09-01 02:10:35,288 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.07 vs. limit=15.0 +2024-09-01 02:10:41,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=106272.0, ans=0.125 +2024-09-01 02:11:00,989 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:11:11,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=106325.33333333333, ans=0.125 +2024-09-01 02:11:22,762 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.27 vs. limit=6.0 +2024-09-01 02:11:36,258 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.880e+02 2.111e+02 2.256e+02 2.412e+02 3.661e+02, threshold=4.511e+02, percent-clipped=0.0 +2024-09-01 02:11:39,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=106378.66666666667, ans=0.125 +2024-09-01 02:11:39,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=106378.66666666667, ans=0.125 +2024-09-01 02:11:47,312 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 10, batch 50, loss[loss=0.305, simple_loss=0.255, pruned_loss=0.1214, ctc_loss=0.2443, over 19012.00 frames. ], tot_loss[loss=0.262, simple_loss=0.2252, pruned_loss=0.101, ctc_loss=0.1974, over 829104.52 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:11:50,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=106432.0, ans=0.1 +2024-09-01 02:11:50,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106432.0, ans=0.1 +2024-09-01 02:12:18,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=106485.33333333333, ans=0.125 +2024-09-01 02:12:36,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=106538.66666666667, ans=0.025 +2024-09-01 02:12:41,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=106538.66666666667, ans=0.125 +2024-09-01 02:13:21,688 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.82 vs. limit=6.0 +2024-09-01 02:13:23,791 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.94 vs. limit=15.0 +2024-09-01 02:13:32,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=106645.33333333333, ans=0.125 +2024-09-01 02:13:35,237 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 10, batch 100, loss[loss=0.2427, simple_loss=0.216, pruned_loss=0.08691, ctc_loss=0.1835, over 19226.00 frames. ], tot_loss[loss=0.2582, simple_loss=0.2232, pruned_loss=0.09907, ctc_loss=0.1934, over 1474931.95 frames. ], batch size: 144, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:13:51,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=106698.66666666667, ans=0.125 +2024-09-01 02:14:15,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=106752.0, ans=0.125 +2024-09-01 02:14:17,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106805.33333333333, ans=0.1 +2024-09-01 02:14:23,957 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.85 vs. limit=15.0 +2024-09-01 02:14:27,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=106805.33333333333, ans=0.025 +2024-09-01 02:14:34,899 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-10.pt +2024-09-01 02:14:39,264 INFO [dysarthria_finetune.py:1435] (0/4) (1368064000, 34072559616) +2024-09-01 02:14:39,264 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:14:39,291 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 02:14:48,321 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 11, batch 0, loss[loss=0.2698, simple_loss=0.2322, pruned_loss=0.1077, ctc_loss=0.1957, over 18505.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.2322, pruned_loss=0.1077, ctc_loss=0.1957, over 18505.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:14:48,322 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:15:11,808 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 11, validation: loss=0.2335, simple_loss=0.2098, pruned_loss=0.0867, ctc_loss=0.1618, over 1073944.00 frames. +2024-09-01 02:15:11,809 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 02:15:22,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106853.33333333333, ans=0.1 +2024-09-01 02:16:01,165 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.900e+02 2.106e+02 2.175e+02 2.350e+02 3.456e+02, threshold=4.351e+02, percent-clipped=0.0 +2024-09-01 02:16:03,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=106906.66666666667, ans=0.1 +2024-09-01 02:16:17,062 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=10.45 vs. limit=15.0 +2024-09-01 02:16:36,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=107013.33333333333, ans=0.0 +2024-09-01 02:16:54,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=107013.33333333333, ans=0.025 +2024-09-01 02:21:55,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=107066.66666666667, ans=0.0 +2024-09-01 02:21:58,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=107066.66666666667, ans=0.025 +2024-09-01 02:22:24,190 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 11, batch 50, loss[loss=0.2693, simple_loss=0.2289, pruned_loss=0.1065, ctc_loss=0.211, over 19023.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.2218, pruned_loss=0.09766, ctc_loss=0.1965, over 827570.26 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:22:31,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=107120.0, ans=0.125 +2024-09-01 02:22:31,947 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.89 vs. limit=22.5 +2024-09-01 02:23:34,131 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:24:12,454 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:24:19,057 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.92 vs. limit=15.0 +2024-09-01 02:24:26,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107280.0, ans=0.1 +2024-09-01 02:25:09,439 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 11, batch 100, loss[loss=0.2193, simple_loss=0.1998, pruned_loss=0.07434, ctc_loss=0.1746, over 19237.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.2183, pruned_loss=0.0944, ctc_loss=0.1915, over 1473115.37 frames. ], batch size: 144, lr: 9.97e-05, grad_scale: 16.0 +2024-09-01 02:25:36,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107440.0, ans=0.1 +2024-09-01 02:25:41,976 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.31 vs. limit=6.0 +2024-09-01 02:25:52,647 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.857e+02 2.027e+02 2.133e+02 2.278e+02 3.178e+02, threshold=4.267e+02, percent-clipped=0.0 +2024-09-01 02:25:54,956 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=16.56 vs. limit=15.0 +2024-09-01 02:26:15,389 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-11.pt +2024-09-01 02:26:28,581 INFO [dysarthria_finetune.py:1435] (0/4) (1328218112, 34072559616) +2024-09-01 02:26:28,582 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:26:28,608 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 02:26:37,015 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 12, batch 0, loss[loss=0.2513, simple_loss=0.2169, pruned_loss=0.1003, ctc_loss=0.1861, over 18585.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.2169, pruned_loss=0.1003, ctc_loss=0.1861, over 18585.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:26:37,015 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:27:00,632 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 12, validation: loss=0.2234, simple_loss=0.2042, pruned_loss=0.08189, ctc_loss=0.1554, over 1073944.00 frames. +2024-09-01 02:27:00,632 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 02:27:19,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=107541.33333333333, ans=0.2 +2024-09-01 02:27:24,925 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.58 vs. limit=15.0 +2024-09-01 02:27:38,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=107594.66666666667, ans=10.0 +2024-09-01 02:27:43,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=107594.66666666667, ans=0.0 +2024-09-01 02:28:37,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107754.66666666667, ans=0.1 +2024-09-01 02:28:37,653 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.40 vs. limit=15.0 +2024-09-01 02:28:39,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=107754.66666666667, ans=0.0 +2024-09-01 02:28:53,634 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 12, batch 50, loss[loss=0.2094, simple_loss=0.1964, pruned_loss=0.06983, ctc_loss=0.1595, over 18986.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.2107, pruned_loss=0.08652, ctc_loss=0.1809, over 829307.75 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:28:55,466 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=10.06 vs. limit=12.0 +2024-09-01 02:29:31,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107861.33333333333, ans=0.1 +2024-09-01 02:29:38,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=107914.66666666667, ans=0.125 +2024-09-01 02:29:38,665 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.90 vs. limit=15.0 +2024-09-01 02:29:56,225 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.81 vs. limit=15.0 +2024-09-01 02:30:06,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=107968.0, ans=0.125 +2024-09-01 02:30:20,075 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.831e+02 2.056e+02 2.167e+02 2.338e+02 2.987e+02, threshold=4.333e+02, percent-clipped=0.0 +2024-09-01 02:30:23,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=108021.33333333333, ans=0.125 +2024-09-01 02:34:06,108 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 12, batch 100, loss[loss=0.1869, simple_loss=0.1783, pruned_loss=0.06038, ctc_loss=0.1445, over 19194.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.211, pruned_loss=0.08909, ctc_loss=0.1836, over 1473409.16 frames. ], batch size: 144, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:34:14,186 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=10.70 vs. limit=12.0 +2024-09-01 02:35:00,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.59 vs. limit=10.0 +2024-09-01 02:35:45,469 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.98 vs. limit=6.0 +2024-09-01 02:36:42,074 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-12.pt +2024-09-01 02:36:46,406 INFO [dysarthria_finetune.py:1435] (0/4) (1328218112, 34072559616) +2024-09-01 02:36:46,406 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:36:46,433 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 02:36:54,943 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 13, batch 0, loss[loss=0.2847, simple_loss=0.2388, pruned_loss=0.1179, ctc_loss=0.2214, over 18643.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.2388, pruned_loss=0.1179, ctc_loss=0.2214, over 18643.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:36:54,944 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:37:18,556 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 13, validation: loss=0.2186, simple_loss=0.2014, pruned_loss=0.08061, ctc_loss=0.1543, over 1073944.00 frames. +2024-09-01 02:37:18,557 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 02:37:37,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=108229.33333333333, ans=0.2 +2024-09-01 02:37:45,009 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.23 vs. limit=15.0 +2024-09-01 02:37:54,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=108282.66666666667, ans=0.125 +2024-09-01 02:37:57,604 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=9.47 vs. limit=12.0 +2024-09-01 02:38:02,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108336.0, ans=0.1 +2024-09-01 02:38:19,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=108336.0, ans=0.035 +2024-09-01 02:39:08,430 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 13, batch 50, loss[loss=0.19, simple_loss=0.1872, pruned_loss=0.05898, ctc_loss=0.1459, over 19011.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.2092, pruned_loss=0.08686, ctc_loss=0.1819, over 829773.70 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:39:14,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=108496.0, ans=0.0 +2024-09-01 02:39:18,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=108496.0, ans=0.2 +2024-09-01 02:39:21,334 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.22 vs. limit=22.5 +2024-09-01 02:39:30,401 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.852e+02 2.005e+02 2.143e+02 2.348e+02 3.224e+02, threshold=4.286e+02, percent-clipped=0.0 +2024-09-01 02:40:02,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=108602.66666666667, ans=0.2 +2024-09-01 02:40:17,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=108656.0, ans=0.125 +2024-09-01 02:40:28,577 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.71 vs. limit=22.5 +2024-09-01 02:40:34,794 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.98 vs. limit=15.0 +2024-09-01 02:40:56,423 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 13, batch 100, loss[loss=0.1837, simple_loss=0.1699, pruned_loss=0.06266, ctc_loss=0.1548, over 19225.00 frames. ], tot_loss[loss=0.232, simple_loss=0.2081, pruned_loss=0.08664, ctc_loss=0.1802, over 1474982.28 frames. ], batch size: 144, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:40:59,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=108762.66666666667, ans=0.2 +2024-09-01 02:41:10,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=108762.66666666667, ans=0.125 +2024-09-01 02:41:30,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=108816.0, ans=0.125 +2024-09-01 02:41:41,597 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.17 vs. limit=15.0 +2024-09-01 02:41:47,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=108869.33333333333, ans=0.025 +2024-09-01 02:41:56,706 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-13.pt +2024-09-01 02:42:01,259 INFO [dysarthria_finetune.py:1435] (0/4) (1328218112, 34072559616) +2024-09-01 02:42:01,260 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:42:01,288 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 02:42:09,606 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 14, batch 0, loss[loss=0.2404, simple_loss=0.2186, pruned_loss=0.09083, ctc_loss=0.1789, over 18695.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.2186, pruned_loss=0.09083, ctc_loss=0.1789, over 18695.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:42:09,607 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:42:31,154 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([4.3587, 4.3646, 4.6946, 4.5945, 4.5833, 4.5968, 4.6705, 4.4017], + device='cuda:0') +2024-09-01 02:42:33,559 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 14, validation: loss=0.209, simple_loss=0.1966, pruned_loss=0.0763, ctc_loss=0.148, over 1073944.00 frames. +2024-09-01 02:42:33,560 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 02:42:41,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=108917.33333333333, ans=0.125 +2024-09-01 02:42:48,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108917.33333333333, ans=0.1 +2024-09-01 02:42:57,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=108970.66666666667, ans=0.2 +2024-09-01 02:43:35,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=109024.0, ans=0.0 +2024-09-01 02:43:42,311 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.90 vs. limit=15.0 +2024-09-01 02:43:42,871 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.737e+02 1.996e+02 2.096e+02 2.326e+02 2.912e+02, threshold=4.192e+02, percent-clipped=0.0 +2024-09-01 02:44:24,652 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 14, batch 50, loss[loss=0.1948, simple_loss=0.1946, pruned_loss=0.06246, ctc_loss=0.147, over 18964.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.2036, pruned_loss=0.08346, ctc_loss=0.1768, over 828263.79 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:44:28,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=109184.0, ans=0.125 +2024-09-01 02:45:22,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=109290.66666666667, ans=0.125 +2024-09-01 02:45:35,732 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=18.29 vs. limit=15.0 +2024-09-01 02:46:09,563 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.51 vs. limit=15.0 +2024-09-01 02:46:12,178 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 14, batch 100, loss[loss=0.196, simple_loss=0.1811, pruned_loss=0.07169, ctc_loss=0.1541, over 19207.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2029, pruned_loss=0.08251, ctc_loss=0.1739, over 1474261.28 frames. ], batch size: 144, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:46:36,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=109504.0, ans=0.125 +2024-09-01 02:47:12,361 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-14.pt +2024-09-01 02:47:17,068 INFO [dysarthria_finetune.py:1435] (0/4) (1328218112, 34072559616) +2024-09-01 02:47:17,068 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:47:17,095 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 02:47:25,787 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 15, batch 0, loss[loss=0.276, simple_loss=0.2437, pruned_loss=0.1108, ctc_loss=0.2061, over 18509.00 frames. ], tot_loss[loss=0.276, simple_loss=0.2437, pruned_loss=0.1108, ctc_loss=0.2061, over 18509.00 frames. ], batch size: 65, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:47:25,787 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:48:03,604 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 15, validation: loss=0.2059, simple_loss=0.1951, pruned_loss=0.07588, ctc_loss=0.1481, over 1073944.00 frames. +2024-09-01 02:48:03,605 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 02:48:13,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=109605.33333333333, ans=0.125 +2024-09-01 02:48:20,619 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.737e+02 1.965e+02 2.102e+02 2.301e+02 3.159e+02, threshold=4.205e+02, percent-clipped=0.0 +2024-09-01 02:48:27,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=109605.33333333333, ans=0.95 +2024-09-01 02:50:38,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=109765.33333333333, ans=0.125 +2024-09-01 02:51:41,397 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 15, batch 50, loss[loss=0.2064, simple_loss=0.1964, pruned_loss=0.07236, ctc_loss=0.1668, over 19011.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2025, pruned_loss=0.08352, ctc_loss=0.1791, over 827942.50 frames. ], batch size: 102, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:53:10,054 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.78 vs. limit=15.0 +2024-09-01 02:53:57,780 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.77 vs. limit=22.5 +2024-09-01 02:54:10,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110032.0, ans=0.1 +2024-09-01 02:54:19,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=110032.0, ans=0.025 +2024-09-01 02:55:02,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=110085.33333333333, ans=0.125 +2024-09-01 02:55:28,635 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 15, batch 100, loss[loss=0.1792, simple_loss=0.1763, pruned_loss=0.0591, ctc_loss=0.1513, over 19251.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2002, pruned_loss=0.08205, ctc_loss=0.1755, over 1473903.80 frames. ], batch size: 144, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:55:44,087 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.784e+02 1.961e+02 2.099e+02 2.266e+02 2.969e+02, threshold=4.197e+02, percent-clipped=0.0 +2024-09-01 02:57:18,309 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-15.pt +2024-09-01 02:57:24,862 INFO [dysarthria_finetune.py:1435] (0/4) (1328218112, 34072559616) +2024-09-01 02:57:24,862 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:57:24,890 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 02:57:33,288 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 16, batch 0, loss[loss=0.2495, simple_loss=0.2262, pruned_loss=0.09837, ctc_loss=0.1861, over 18729.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.2262, pruned_loss=0.09837, ctc_loss=0.1861, over 18729.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:57:33,289 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:58:10,349 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 16, validation: loss=0.2065, simple_loss=0.1951, pruned_loss=0.07751, ctc_loss=0.1523, over 1073944.00 frames. +2024-09-01 02:58:10,349 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 02:58:32,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=110293.33333333333, ans=0.0 +2024-09-01 02:58:38,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=110346.66666666667, ans=0.0 +2024-09-01 02:58:49,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=110346.66666666667, ans=0.125 +2024-09-01 02:59:01,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=110346.66666666667, ans=0.025 +2024-09-01 02:59:21,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=110400.0, ans=0.025 +2024-09-01 03:00:21,805 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 16, batch 50, loss[loss=0.2085, simple_loss=0.1941, pruned_loss=0.07676, ctc_loss=0.1723, over 18988.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.1995, pruned_loss=0.08073, ctc_loss=0.1727, over 828175.61 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:00:45,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=110560.0, ans=0.125 +2024-09-01 03:01:02,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=110613.33333333333, ans=0.125 +2024-09-01 03:01:12,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=110613.33333333333, ans=0.0 +2024-09-01 03:01:25,489 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.686e+02 1.971e+02 2.093e+02 2.277e+02 2.936e+02, threshold=4.187e+02, percent-clipped=0.0 +2024-09-01 03:02:04,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=110773.33333333333, ans=0.125 +2024-09-01 03:02:22,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=110773.33333333333, ans=0.0 +2024-09-01 03:02:24,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=110773.33333333333, ans=0.0 +2024-09-01 03:02:27,247 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 16, batch 100, loss[loss=0.2253, simple_loss=0.1982, pruned_loss=0.08526, ctc_loss=0.2047, over 19270.00 frames. ], tot_loss[loss=0.213, simple_loss=0.1975, pruned_loss=0.07974, ctc_loss=0.1709, over 1473314.28 frames. ], batch size: 144, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:02:37,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=110826.66666666667, ans=0.125 +2024-09-01 03:03:29,007 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-16.pt +2024-09-01 03:03:32,845 INFO [dysarthria_finetune.py:1435] (0/4) (1328218112, 34072559616) +2024-09-01 03:03:32,846 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 03:03:32,873 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 03:03:41,377 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 17, batch 0, loss[loss=0.2455, simple_loss=0.2106, pruned_loss=0.1009, ctc_loss=0.1965, over 18739.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.2106, pruned_loss=0.1009, ctc_loss=0.1965, over 18739.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:03:41,377 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 03:03:43,427 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.7321, 2.9711, 2.8178, 3.0906], device='cuda:0') +2024-09-01 03:03:45,988 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([1.6832, 1.6698, 1.6951, 1.7560, 1.8006, 1.7465, 1.7382, 1.7596], + device='cuda:0') +2024-09-01 03:04:05,400 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 17, validation: loss=0.1943, simple_loss=0.1886, pruned_loss=0.07183, ctc_loss=0.1409, over 1073944.00 frames. +2024-09-01 03:04:05,401 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 03:04:40,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=111034.66666666667, ans=0.0 +2024-09-01 03:04:42,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=111034.66666666667, ans=0.125 +2024-09-01 03:05:09,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=111088.0, ans=0.0 +2024-09-01 03:05:42,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=111194.66666666667, ans=0.0 +2024-09-01 03:05:45,147 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.712e+02 1.958e+02 2.075e+02 2.282e+02 2.777e+02, threshold=4.150e+02, percent-clipped=0.0 +2024-09-01 03:05:56,265 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 17, batch 50, loss[loss=0.2139, simple_loss=0.2033, pruned_loss=0.07789, ctc_loss=0.1716, over 19028.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.1948, pruned_loss=0.07861, ctc_loss=0.1709, over 827378.67 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:06:10,890 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.08 vs. limit=15.0 +2024-09-01 03:07:29,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=111461.33333333333, ans=0.125 +2024-09-01 03:07:40,496 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.88 vs. limit=15.0 +2024-09-01 03:07:45,179 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 17, batch 100, loss[loss=0.1577, simple_loss=0.1609, pruned_loss=0.05103, ctc_loss=0.1309, over 19218.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.1959, pruned_loss=0.07927, ctc_loss=0.1712, over 1473529.96 frames. ], batch size: 144, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:07:50,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.26 vs. limit=6.0 +2024-09-01 03:07:56,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=111514.66666666667, ans=0.1 +2024-09-01 03:08:44,481 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-17.pt +2024-09-01 03:09:07,772 INFO [dysarthria_finetune.py:1435] (0/4) (1368064000, 34072559616) +2024-09-01 03:09:07,772 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 03:09:07,799 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 03:09:15,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=111669.33333333333, ans=0.2 +2024-09-01 03:09:16,219 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 18, batch 0, loss[loss=0.2407, simple_loss=0.216, pruned_loss=0.095, ctc_loss=0.1886, over 18538.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.216, pruned_loss=0.095, ctc_loss=0.1886, over 18538.00 frames. ], batch size: 65, lr: 9.93e-05, grad_scale: 32.0 +2024-09-01 03:09:16,220 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 03:09:39,625 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 18, validation: loss=0.1961, simple_loss=0.1886, pruned_loss=0.07291, ctc_loss=0.1441, over 1073944.00 frames. +2024-09-01 03:09:39,625 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 03:09:43,354 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=1.651e-01 +2024-09-01 03:09:45,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111669.33333333333, ans=0.125 +2024-09-01 03:09:45,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=111669.33333333333, ans=0.125 +2024-09-01 03:10:14,931 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.703e+02 1.913e+02 2.060e+02 2.285e+02 3.151e+02, threshold=4.120e+02, percent-clipped=0.0 +2024-09-01 03:10:32,023 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.78 vs. limit=22.5 +2024-09-01 03:10:55,627 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.55 vs. limit=15.0 +2024-09-01 03:10:57,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=111829.33333333333, ans=0.125 +2024-09-01 03:11:28,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111936.0, ans=0.125 +2024-09-01 03:11:29,067 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 18, batch 50, loss[loss=0.1717, simple_loss=0.1789, pruned_loss=0.05455, ctc_loss=0.1384, over 18998.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.1957, pruned_loss=0.07887, ctc_loss=0.1723, over 828205.61 frames. ], batch size: 102, lr: 9.93e-05, grad_scale: 32.0 +2024-09-01 03:11:36,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=111936.0, ans=0.125 +2024-09-01 03:11:45,847 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=28.42 vs. limit=22.5 +2024-09-01 03:12:31,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=112042.66666666667, ans=0.2 +2024-09-01 03:12:44,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=112096.0, ans=0.2 +2024-09-01 03:13:33,983 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.00 vs. limit=22.5 +2024-09-01 03:13:44,692 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 18, batch 100, loss[loss=0.1647, simple_loss=0.1685, pruned_loss=0.05338, ctc_loss=0.1351, over 19294.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.1936, pruned_loss=0.077, ctc_loss=0.1672, over 1473690.24 frames. ], batch size: 144, lr: 9.93e-05, grad_scale: 32.0 +2024-09-01 03:13:56,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=112202.66666666667, ans=0.125 +2024-09-01 03:14:14,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=112256.0, ans=0.0 +2024-09-01 03:14:19,257 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.738e+02 1.898e+02 2.020e+02 2.262e+02 2.800e+02, threshold=4.040e+02, percent-clipped=0.0 +2024-09-01 03:14:37,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=112309.33333333333, ans=0.125 +2024-09-01 03:14:38,016 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.19 vs. limit=22.5 +2024-09-01 03:14:43,282 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-18.pt +2024-09-01 03:14:46,652 INFO [dysarthria_finetune.py:1435] (0/4) (1328218112, 34072559616) +2024-09-01 03:14:46,652 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 03:14:46,680 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 03:14:55,813 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 19, batch 0, loss[loss=0.2336, simple_loss=0.2161, pruned_loss=0.09103, ctc_loss=0.1725, over 18598.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.2161, pruned_loss=0.09103, ctc_loss=0.1725, over 18598.00 frames. ], batch size: 65, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:14:55,814 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 03:15:42,617 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 19, validation: loss=0.1928, simple_loss=0.1862, pruned_loss=0.07146, ctc_loss=0.1413, over 1073944.00 frames. +2024-09-01 03:15:42,618 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 03:16:05,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=112352.0, ans=0.125 +2024-09-01 03:16:28,513 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.64 vs. limit=15.0 +2024-09-01 03:16:43,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=112405.33333333333, ans=0.0 +2024-09-01 03:16:47,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=112405.33333333333, ans=0.5 +2024-09-01 03:17:43,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=112458.66666666667, ans=0.125 +2024-09-01 03:17:51,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=112458.66666666667, ans=0.2 +2024-09-01 03:18:20,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=112512.0, ans=0.0 +2024-09-01 03:19:34,380 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 19, batch 50, loss[loss=0.1994, simple_loss=0.1861, pruned_loss=0.07271, ctc_loss=0.1683, over 19038.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.1873, pruned_loss=0.07333, ctc_loss=0.1613, over 827203.46 frames. ], batch size: 102, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:20:28,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=112672.0, ans=0.025 +2024-09-01 03:21:16,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=112725.33333333333, ans=0.125 +2024-09-01 03:21:58,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=112725.33333333333, ans=0.04949747468305833 +2024-09-01 03:22:34,904 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.708e+02 1.922e+02 2.090e+02 2.243e+02 2.725e+02, threshold=4.180e+02, percent-clipped=0.0 +2024-09-01 03:22:47,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=112832.0, ans=0.0 +2024-09-01 03:23:34,280 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 19, batch 100, loss[loss=0.1909, simple_loss=0.1792, pruned_loss=0.06871, ctc_loss=0.1631, over 19274.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.1869, pruned_loss=0.07385, ctc_loss=0.1614, over 1472434.33 frames. ], batch size: 144, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:24:30,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=112938.66666666667, ans=0.125 +2024-09-01 03:24:30,719 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=12.47 vs. limit=12.0 +2024-09-01 03:24:44,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=112992.0, ans=0.05 +2024-09-01 03:24:59,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=112992.0, ans=0.035 +2024-09-01 03:25:10,294 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.36 vs. limit=15.0 +2024-09-01 03:25:13,449 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-19.pt +2024-09-01 03:25:17,836 INFO [dysarthria_finetune.py:1435] (0/4) (1328218112, 34072559616) +2024-09-01 03:25:17,836 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 03:25:17,864 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 03:25:27,036 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 20, batch 0, loss[loss=0.1914, simple_loss=0.1827, pruned_loss=0.07141, ctc_loss=0.1432, over 18599.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.1827, pruned_loss=0.07141, ctc_loss=0.1432, over 18599.00 frames. ], batch size: 65, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:25:27,037 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 03:26:10,761 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 20, validation: loss=0.19, simple_loss=0.1838, pruned_loss=0.07041, ctc_loss=0.1385, over 1073944.00 frames. +2024-09-01 03:26:10,762 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26728MB +2024-09-01 03:26:26,512 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.19 vs. limit=15.0 +2024-09-01 03:26:39,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=113040.0, ans=0.0 +2024-09-01 03:27:12,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=113093.33333333333, ans=0.2 +2024-09-01 03:27:59,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=113146.66666666667, ans=0.125 +2024-09-01 03:28:34,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=113200.0, ans=0.0 +2024-09-01 03:29:04,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=113253.33333333333, ans=0.2 +2024-09-01 03:29:26,707 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 20, batch 50, loss[loss=0.223, simple_loss=0.202, pruned_loss=0.08467, ctc_loss=0.1864, over 18985.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.1895, pruned_loss=0.07666, ctc_loss=0.1662, over 828130.18 frames. ], batch size: 102, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:30:05,873 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.607e+02 1.917e+02 2.046e+02 2.200e+02 2.791e+02, threshold=4.093e+02, percent-clipped=0.0 +2024-09-01 03:30:47,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=113360.0, ans=0.2 +2024-09-01 03:31:57,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=113466.66666666667, ans=0.1 +2024-09-01 03:32:17,088 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.19 vs. limit=10.0 +2024-09-01 03:32:43,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=113520.0, ans=0.0 +2024-09-01 03:33:06,750 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 20, batch 100, loss[loss=0.1667, simple_loss=0.1658, pruned_loss=0.05756, ctc_loss=0.1309, over 19321.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.1871, pruned_loss=0.0747, ctc_loss=0.1609, over 1472900.97 frames. ], batch size: 144, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:34:16,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=113626.66666666667, ans=0.025 +2024-09-01 03:35:10,299 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune/epoch-20.pt +2024-09-01 03:35:13,227 INFO [dysarthria_finetune.py:1435] (0/4) (1368064000, 34072559616) +2024-09-01 03:35:13,227 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 03:35:13,254 INFO [dysarthria_finetune.py:1440] (0/4) (29818028032, 34072559616) +2024-09-01 03:35:13,254 INFO [dysarthria_finetune.py:1442] (0/4) Done! diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-22-09-00-1 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-22-09-00-1 new file mode 100644 index 0000000000000000000000000000000000000000..a9f4d26becd84f5feab92e48f5e8795e21f05187 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-22-09-00-1 @@ -0,0 +1,544 @@ +2024-08-31 22:09:00,008 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-31 22:09:15,261 INFO [dysarthria_finetune.py:1214] (1/4) (32783400960, 34072559616) +2024-08-31 22:09:15,261 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-31 22:09:15,629 INFO [dysarthria_finetune.py:1219] (1/4) (32783400960, 34072559616) +2024-08-31 22:09:15,630 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-31 22:09:15,633 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2653.int.cedar.computecanada.ca', 'IP address': '172.16.146.90'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 22:09:15,633 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-31 22:09:16,872 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 66367431 +2024-08-31 22:09:16,872 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt +2024-08-31 22:10:50,764 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-31 22:11:01,688 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts +2024-08-31 22:11:01,783 INFO [dysarthria_finetune.py:1319] (1/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 22:11:02,460 INFO [dysarthria_asr_datamodule.py:239] (1/4) Disable MUSAN +2024-08-31 22:11:02,460 INFO [dysarthria_asr_datamodule.py:257] (1/4) Enable SpecAugment +2024-08-31 22:11:02,460 INFO [dysarthria_asr_datamodule.py:258] (1/4) Time warp factor: 80 +2024-08-31 22:11:02,461 INFO [dysarthria_asr_datamodule.py:268] (1/4) Num frame mask: 10 +2024-08-31 22:11:02,461 INFO [dysarthria_asr_datamodule.py:281] (1/4) About to create train dataset +2024-08-31 22:11:17,376 INFO [dysarthria_asr_datamodule.py:308] (1/4) Using DynamicBucketingSampler. +2024-08-31 22:11:18,296 INFO [dysarthria_asr_datamodule.py:325] (1/4) About to create train dataloader +2024-08-31 22:11:18,297 INFO [dysarthria_asr_datamodule.py:501] (1/4) About to get dev cuts +2024-08-31 22:11:18,428 INFO [dysarthria_asr_datamodule.py:356] (1/4) About to create dev dataset +2024-08-31 22:11:18,755 INFO [dysarthria_asr_datamodule.py:373] (1/4) About to create dev dataloader +2024-08-31 22:11:18,755 INFO [dysarthria_finetune.py:1490] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 22:12:59,093 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=512, metric=5.41 vs. limit=5.0 +2024-08-31 22:12:59,566 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=192, metric=12.58 vs. limit=7.5 +2024-08-31 22:13:03,035 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12709MB +2024-08-31 22:13:04,365 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=384, metric=18.27 vs. limit=7.5 +2024-08-31 22:13:04,879 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12709MB +2024-08-31 22:14:12,900 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12709MB +2024-08-31 22:14:14,953 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12709MB +2024-08-31 22:19:42,273 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=512, metric=25.56 vs. limit=7.5 +2024-08-31 22:19:51,909 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12709MB +2024-08-31 22:19:54,347 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12709MB +2024-08-31 22:20:43,750 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 0, loss[loss=0.4218, simple_loss=0.336, pruned_loss=0.2127, ctc_loss=0.3225, over 18549.00 frames. ], tot_loss[loss=0.4218, simple_loss=0.336, pruned_loss=0.2127, ctc_loss=0.3225, over 18549.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-31 22:20:43,751 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-31 22:46:04,587 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 1, validation: loss=0.3942, simple_loss=0.3187, pruned_loss=0.1927, ctc_loss=0.281, over 1073944.00 frames. +2024-08-31 22:46:09,467 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13137MB +2024-08-31 23:01:27,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=100000.0, ans=0.125 +2024-08-31 23:06:11,558 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.685e+02 9.975e+02 1.051e+03 1.091e+03 1.133e+03, threshold=4.203e+03, percent-clipped=0.0 +2024-08-31 23:25:13,000 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.960e+02 9.836e+02 1.043e+03 1.067e+03 1.144e+03, threshold=4.173e+03, percent-clipped=0.0 +2024-08-31 23:45:54,002 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=21.65 vs. limit=15.0 +2024-08-31 23:47:54,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=100160.0, ans=0.025 +2024-08-31 23:49:19,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=100160.0, ans=0.125 +2024-08-31 23:49:50,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=100160.0, ans=0.125 +2024-08-31 23:50:05,833 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.677e+02 8.648e+02 9.697e+02 1.051e+03 1.144e+03, threshold=3.879e+03, percent-clipped=0.0 +2024-08-31 23:55:15,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=100213.33333333333, ans=0.1 +2024-08-31 23:55:38,251 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 50, loss[loss=0.4441, simple_loss=0.3529, pruned_loss=0.2241, ctc_loss=0.3407, over 19042.00 frames. ], tot_loss[loss=0.433, simple_loss=0.346, pruned_loss=0.2183, ctc_loss=0.3241, over 827432.33 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-31 23:56:31,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100266.66666666667, ans=0.1 +2024-08-31 23:59:19,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=100266.66666666667, ans=0.0 +2024-09-01 00:01:26,292 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=22.44 vs. limit=15.0 +2024-09-01 00:02:57,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=100320.0, ans=0.125 +2024-09-01 00:07:56,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=100373.33333333333, ans=0.0 +2024-09-01 00:14:36,710 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=26.44 vs. limit=22.5 +2024-09-01 00:16:22,837 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.35 vs. limit=15.0 +2024-09-01 00:18:32,452 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.599e+02 6.914e+02 7.776e+02 9.170e+02 1.144e+03, threshold=1.555e+03, percent-clipped=0.0 +2024-09-01 00:18:32,506 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 100, loss[loss=0.441, simple_loss=0.349, pruned_loss=0.2281, ctc_loss=0.3385, over 19093.00 frames. ], tot_loss[loss=0.4152, simple_loss=0.3328, pruned_loss=0.2049, ctc_loss=0.3073, over 1470684.91 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-09-01 00:22:05,927 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.56 vs. limit=15.0 +2024-09-01 00:22:28,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=100586.66666666667, ans=0.0 +2024-09-01 00:28:33,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=100640.0, ans=0.125 +2024-09-01 00:28:35,595 INFO [dysarthria_finetune.py:1435] (1/4) (14068416512, 34072559616) +2024-09-01 00:28:35,596 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 00:28:35,632 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 00:30:12,106 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 2, batch 0, loss[loss=0.3566, simple_loss=0.2869, pruned_loss=0.158, ctc_loss=0.27, over 18746.00 frames. ], tot_loss[loss=0.3566, simple_loss=0.2869, pruned_loss=0.158, ctc_loss=0.27, over 18746.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-09-01 00:30:12,106 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 00:34:27,524 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 2, validation: loss=0.3547, simple_loss=0.2901, pruned_loss=0.1627, ctc_loss=0.2412, over 1073944.00 frames. +2024-09-01 00:34:27,525 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13656MB +2024-09-01 00:37:41,578 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.07 vs. limit=15.0 +2024-09-01 00:40:34,560 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=18.56 vs. limit=15.0 +2024-09-01 00:45:12,362 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=22.90 vs. limit=22.5 +2024-09-01 00:45:14,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=100842.66666666667, ans=0.125 +2024-09-01 00:45:55,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=100896.0, ans=0.125 +2024-09-01 00:46:00,529 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=29.89 vs. limit=22.5 +2024-09-01 00:47:24,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=100896.0, ans=0.125 +2024-09-01 00:48:18,417 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 2, batch 50, loss[loss=0.4438, simple_loss=0.3531, pruned_loss=0.2176, ctc_loss=0.3362, over 19071.00 frames. ], tot_loss[loss=0.3919, simple_loss=0.3156, pruned_loss=0.1853, ctc_loss=0.285, over 827854.65 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 8.0 +2024-09-01 00:52:13,931 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.79 vs. limit=10.0 +2024-09-01 00:53:05,342 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.883e+02 4.624e+02 4.997e+02 5.383e+02 6.686e+02, threshold=9.995e+02, percent-clipped=0.0 +2024-09-01 00:53:41,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=101056.0, ans=0.0 +2024-09-01 00:53:44,500 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=22.33 vs. limit=22.5 +2024-09-01 00:54:20,904 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=19.51 vs. limit=15.0 +2024-09-01 00:55:22,144 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=28.13 vs. limit=22.5 +2024-09-01 00:56:06,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=101162.66666666667, ans=0.125 +2024-09-01 00:56:30,062 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 2, batch 100, loss[loss=0.3696, simple_loss=0.3008, pruned_loss=0.1697, ctc_loss=0.2581, over 19090.00 frames. ], tot_loss[loss=0.3808, simple_loss=0.3075, pruned_loss=0.1777, ctc_loss=0.2748, over 1472213.55 frames. ], batch size: 133, lr: 7.29e-05, grad_scale: 8.0 +2024-09-01 00:57:14,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=101216.0, ans=15.0 +2024-09-01 01:00:20,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=101269.33333333333, ans=0.05 +2024-09-01 01:00:32,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=101322.66666666667, ans=0.2 +2024-09-01 01:01:10,487 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.76 vs. limit=22.5 +2024-09-01 01:01:16,816 INFO [dysarthria_finetune.py:1435] (1/4) (793444352, 34072559616) +2024-09-01 01:01:16,817 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 01:01:16,894 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 01:01:35,834 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 3, batch 0, loss[loss=0.3973, simple_loss=0.3195, pruned_loss=0.1906, ctc_loss=0.2856, over 18511.00 frames. ], tot_loss[loss=0.3973, simple_loss=0.3195, pruned_loss=0.1906, ctc_loss=0.2856, over 18511.00 frames. ], batch size: 65, lr: 7.58e-05, grad_scale: 16.0 +2024-09-01 01:01:35,835 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 01:01:59,924 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 3, validation: loss=0.3274, simple_loss=0.2708, pruned_loss=0.1428, ctc_loss=0.2163, over 1073944.00 frames. +2024-09-01 01:01:59,925 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13713MB +2024-09-01 01:02:07,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=101370.66666666667, ans=0.2 +2024-09-01 01:03:09,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101424.0, ans=0.1 +2024-09-01 01:03:18,406 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.41 vs. limit=15.0 +2024-09-01 01:03:46,764 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=22.31 vs. limit=15.0 +2024-09-01 01:04:33,383 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.942e+02 3.454e+02 3.711e+02 3.996e+02 5.509e+02, threshold=7.422e+02, percent-clipped=0.0 +2024-09-01 01:04:53,467 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.14 vs. limit=22.5 +2024-09-01 01:04:56,823 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 3, batch 50, loss[loss=0.3554, simple_loss=0.2909, pruned_loss=0.1567, ctc_loss=0.2475, over 19005.00 frames. ], tot_loss[loss=0.3623, simple_loss=0.2945, pruned_loss=0.1628, ctc_loss=0.2579, over 828905.42 frames. ], batch size: 102, lr: 8.08e-05, grad_scale: 16.0 +2024-09-01 01:06:12,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=101797.33333333333, ans=0.0 +2024-09-01 01:06:42,265 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.22 vs. limit=15.0 +2024-09-01 01:06:46,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=101850.66666666667, ans=0.0 +2024-09-01 01:06:46,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=101850.66666666667, ans=0.125 +2024-09-01 01:07:00,095 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 3, batch 100, loss[loss=0.3174, simple_loss=0.2632, pruned_loss=0.1347, ctc_loss=0.2131, over 19133.00 frames. ], tot_loss[loss=0.3504, simple_loss=0.2856, pruned_loss=0.156, ctc_loss=0.2478, over 1474266.40 frames. ], batch size: 133, lr: 8.58e-05, grad_scale: 16.0 +2024-09-01 01:07:06,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=101904.0, ans=0.125 +2024-09-01 01:07:13,919 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.84 vs. limit=15.0 +2024-09-01 01:07:35,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=101904.0, ans=0.2 +2024-09-01 01:07:40,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101957.33333333333, ans=0.1 +2024-09-01 01:07:45,107 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.46 vs. limit=22.5 +2024-09-01 01:08:21,704 INFO [dysarthria_finetune.py:1435] (1/4) (1508573184, 34072559616) +2024-09-01 01:08:21,705 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 01:08:21,776 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 01:08:35,076 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 4, batch 0, loss[loss=0.313, simple_loss=0.2583, pruned_loss=0.1351, ctc_loss=0.2132, over 18466.00 frames. ], tot_loss[loss=0.313, simple_loss=0.2583, pruned_loss=0.1351, ctc_loss=0.2132, over 18466.00 frames. ], batch size: 65, lr: 8.86e-05, grad_scale: 32.0 +2024-09-01 01:08:35,077 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 01:08:58,413 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 4, validation: loss=0.308, simple_loss=0.2573, pruned_loss=0.1299, ctc_loss=0.2, over 1073944.00 frames. +2024-09-01 01:08:58,414 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13713MB +2024-09-01 01:09:16,932 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=20.81 vs. limit=15.0 +2024-09-01 01:09:27,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=102106.66666666667, ans=0.07 +2024-09-01 01:09:33,166 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.484e+02 2.869e+02 3.070e+02 3.452e+02 5.291e+02, threshold=6.140e+02, percent-clipped=0.0 +2024-09-01 01:10:51,070 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 4, batch 50, loss[loss=0.3214, simple_loss=0.2639, pruned_loss=0.1394, ctc_loss=0.2247, over 18961.00 frames. ], tot_loss[loss=0.3356, simple_loss=0.2752, pruned_loss=0.1446, ctc_loss=0.2375, over 827373.05 frames. ], batch size: 102, lr: 9.36e-05, grad_scale: 32.0 +2024-09-01 01:11:07,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=102320.0, ans=0.125 +2024-09-01 01:11:48,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=102426.66666666667, ans=0.125 +2024-09-01 01:12:12,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=102480.0, ans=0.0 +2024-09-01 01:12:29,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102533.33333333333, ans=0.1 +2024-09-01 01:12:38,912 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 4, batch 100, loss[loss=0.3201, simple_loss=0.2669, pruned_loss=0.1323, ctc_loss=0.217, over 19038.00 frames. ], tot_loss[loss=0.3311, simple_loss=0.2713, pruned_loss=0.1429, ctc_loss=0.2354, over 1472261.06 frames. ], batch size: 133, lr: 9.86e-05, grad_scale: 32.0 +2024-09-01 01:13:12,352 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.110e+02 2.669e+02 2.871e+02 3.122e+02 4.671e+02, threshold=5.742e+02, percent-clipped=0.0 +2024-09-01 01:13:40,161 INFO [dysarthria_finetune.py:1435] (1/4) (799735808, 34072559616) +2024-09-01 01:13:40,162 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 01:13:40,221 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 01:13:53,068 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 5, batch 0, loss[loss=0.3073, simple_loss=0.2544, pruned_loss=0.1296, ctc_loss=0.2131, over 18670.00 frames. ], tot_loss[loss=0.3073, simple_loss=0.2544, pruned_loss=0.1296, ctc_loss=0.2131, over 18670.00 frames. ], batch size: 65, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:13:53,069 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 01:14:16,501 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 5, validation: loss=0.2909, simple_loss=0.2453, pruned_loss=0.1191, ctc_loss=0.1881, over 1073944.00 frames. +2024-09-01 01:14:16,501 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13713MB +2024-09-01 01:14:35,836 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.00 vs. limit=15.0 +2024-09-01 01:15:01,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=102741.33333333333, ans=0.0 +2024-09-01 01:15:18,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=102741.33333333333, ans=0.025 +2024-09-01 01:15:54,697 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.74 vs. limit=22.5 +2024-09-01 01:16:49,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=102848.0, ans=0.125 +2024-09-01 01:18:31,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=102901.33333333333, ans=0.125 +2024-09-01 01:20:35,882 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=25.21 vs. limit=22.5 +2024-09-01 01:20:50,672 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 5, batch 50, loss[loss=0.3274, simple_loss=0.2667, pruned_loss=0.1438, ctc_loss=0.2369, over 18968.00 frames. ], tot_loss[loss=0.3183, simple_loss=0.2627, pruned_loss=0.1338, ctc_loss=0.2259, over 828630.89 frames. ], batch size: 102, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:21:20,932 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.82 vs. limit=15.0 +2024-09-01 01:24:03,880 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:24:22,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=103114.66666666667, ans=0.125 +2024-09-01 01:24:29,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=103114.66666666667, ans=0.125 +2024-09-01 01:25:25,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=103168.0, ans=10.0 +2024-09-01 01:25:33,876 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.253e+02 2.485e+02 2.709e+02 2.997e+02 4.733e+02, threshold=5.419e+02, percent-clipped=0.0 +2024-09-01 01:26:02,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=103221.33333333333, ans=0.125 +2024-09-01 01:26:19,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=103221.33333333333, ans=0.0 +2024-09-01 01:26:23,125 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 5, batch 100, loss[loss=0.3026, simple_loss=0.2513, pruned_loss=0.1244, ctc_loss=0.2155, over 19157.00 frames. ], tot_loss[loss=0.3131, simple_loss=0.2586, pruned_loss=0.1315, ctc_loss=0.2222, over 1473409.40 frames. ], batch size: 133, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:26:40,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=103274.66666666667, ans=0.125 +2024-09-01 01:26:52,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=103274.66666666667, ans=0.125 +2024-09-01 01:27:36,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=103381.33333333333, ans=0.0 +2024-09-01 01:27:48,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=103381.33333333333, ans=0.125 +2024-09-01 01:27:54,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=103381.33333333333, ans=0.025 +2024-09-01 01:27:55,391 INFO [dysarthria_finetune.py:1435] (1/4) (468385792, 34072559616) +2024-09-01 01:27:55,392 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 01:27:55,477 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 01:28:14,515 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 6, batch 0, loss[loss=0.3173, simple_loss=0.2612, pruned_loss=0.1317, ctc_loss=0.2336, over 18435.00 frames. ], tot_loss[loss=0.3173, simple_loss=0.2612, pruned_loss=0.1317, ctc_loss=0.2336, over 18435.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:28:14,516 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 01:28:37,903 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 6, validation: loss=0.2789, simple_loss=0.2369, pruned_loss=0.1122, ctc_loss=0.1819, over 1073944.00 frames. +2024-09-01 01:28:37,904 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13962MB +2024-09-01 01:28:58,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=103424.0, ans=0.125 +2024-09-01 01:29:21,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103477.33333333333, ans=0.1 +2024-09-01 01:29:21,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=103477.33333333333, ans=0.125 +2024-09-01 01:29:32,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103530.66666666667, ans=0.1 +2024-09-01 01:29:58,587 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.87 vs. limit=15.0 +2024-09-01 01:30:02,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=103584.0, ans=0.0 +2024-09-01 01:30:11,137 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=13.90 vs. limit=15.0 +2024-09-01 01:30:29,744 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.14 vs. limit=15.0 +2024-09-01 01:30:32,266 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 6, batch 50, loss[loss=0.2858, simple_loss=0.2375, pruned_loss=0.1173, ctc_loss=0.2057, over 19041.00 frames. ], tot_loss[loss=0.2981, simple_loss=0.2489, pruned_loss=0.1209, ctc_loss=0.2115, over 827399.35 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:30:37,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=103690.66666666667, ans=0.125 +2024-09-01 01:30:37,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=103690.66666666667, ans=0.125 +2024-09-01 01:31:04,792 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.055e+02 2.419e+02 2.583e+02 2.819e+02 4.094e+02, threshold=5.165e+02, percent-clipped=0.0 +2024-09-01 01:31:06,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=103690.66666666667, ans=0.0 +2024-09-01 01:31:29,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=103744.0, ans=0.125 +2024-09-01 01:31:40,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=103797.33333333333, ans=0.0 +2024-09-01 01:31:45,485 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.55 vs. limit=15.0 +2024-09-01 01:31:51,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=103797.33333333333, ans=0.125 +2024-09-01 01:32:43,094 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 6, batch 100, loss[loss=0.2708, simple_loss=0.2248, pruned_loss=0.1102, ctc_loss=0.1992, over 19066.00 frames. ], tot_loss[loss=0.2947, simple_loss=0.2461, pruned_loss=0.1199, ctc_loss=0.2093, over 1471849.14 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:33:15,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=104010.66666666667, ans=0.125 +2024-09-01 01:33:44,582 INFO [dysarthria_finetune.py:1435] (1/4) (1112211456, 34072559616) +2024-09-01 01:33:44,582 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 01:33:44,671 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 01:33:57,916 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 7, batch 0, loss[loss=0.3301, simple_loss=0.2633, pruned_loss=0.1475, ctc_loss=0.2582, over 18532.00 frames. ], tot_loss[loss=0.3301, simple_loss=0.2633, pruned_loss=0.1475, ctc_loss=0.2582, over 18532.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:33:57,916 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 01:34:21,902 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 7, validation: loss=0.2604, simple_loss=0.2251, pruned_loss=0.1007, ctc_loss=0.1681, over 1073944.00 frames. +2024-09-01 01:34:21,902 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13962MB +2024-09-01 01:35:11,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=104213.33333333333, ans=0.0 +2024-09-01 01:35:39,543 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.94 vs. limit=15.0 +2024-09-01 01:35:40,243 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.049e+02 2.272e+02 2.384e+02 2.601e+02 4.291e+02, threshold=4.768e+02, percent-clipped=0.0 +2024-09-01 01:36:31,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=104320.0, ans=0.125 +2024-09-01 01:36:31,718 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=8.88 vs. limit=12.0 +2024-09-01 01:36:51,662 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=19.61 vs. limit=15.0 +2024-09-01 01:37:01,398 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.16 vs. limit=15.0 +2024-09-01 01:37:02,202 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 7, batch 50, loss[loss=0.291, simple_loss=0.241, pruned_loss=0.1187, ctc_loss=0.2188, over 19096.00 frames. ], tot_loss[loss=0.2897, simple_loss=0.2425, pruned_loss=0.1165, ctc_loss=0.2101, over 827950.42 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 16.0 +2024-09-01 01:37:13,597 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.98 vs. limit=15.0 +2024-09-01 01:39:36,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=104533.33333333333, ans=0.125 +2024-09-01 01:40:52,492 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 7, batch 100, loss[loss=0.2884, simple_loss=0.2429, pruned_loss=0.1151, ctc_loss=0.209, over 19105.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.238, pruned_loss=0.1125, ctc_loss=0.2036, over 1472811.51 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 16.0 +2024-09-01 01:41:42,009 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.40 vs. limit=15.0 +2024-09-01 01:42:01,332 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.93 vs. limit=15.0 +2024-09-01 01:42:16,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=104746.66666666667, ans=0.0 +2024-09-01 01:42:17,198 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.73 vs. limit=15.0 +2024-09-01 01:42:59,066 INFO [dysarthria_finetune.py:1435] (1/4) (14456389632, 34072559616) +2024-09-01 01:42:59,067 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 01:42:59,100 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 01:43:13,125 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 8, batch 0, loss[loss=0.2446, simple_loss=0.2135, pruned_loss=0.09198, ctc_loss=0.1633, over 18679.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.2135, pruned_loss=0.09198, ctc_loss=0.1633, over 18679.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:43:13,125 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 01:44:04,948 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 8, validation: loss=0.2572, simple_loss=0.2228, pruned_loss=0.09973, ctc_loss=0.1708, over 1073944.00 frames. +2024-09-01 01:44:04,949 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13962MB +2024-09-01 01:44:09,716 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=9.93 vs. limit=12.0 +2024-09-01 01:44:25,626 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.924e+02 2.205e+02 2.324e+02 2.533e+02 3.850e+02, threshold=4.647e+02, percent-clipped=0.0 +2024-09-01 01:44:59,183 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.36 vs. limit=22.5 +2024-09-01 01:51:46,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=104842.66666666667, ans=0.125 +2024-09-01 01:54:05,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=104949.33333333333, ans=0.0 +2024-09-01 01:54:30,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=104949.33333333333, ans=0.05 +2024-09-01 01:55:52,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.69 vs. limit=15.0 +2024-09-01 01:56:19,988 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 8, batch 50, loss[loss=0.255, simple_loss=0.2204, pruned_loss=0.09907, ctc_loss=0.173, over 19009.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.2345, pruned_loss=0.1097, ctc_loss=0.202, over 829068.39 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:59:30,321 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.64 vs. limit=15.0 +2024-09-01 01:59:33,879 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.84 vs. limit=22.5 +2024-09-01 01:59:49,740 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.73 vs. limit=22.5 +2024-09-01 02:00:03,655 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.93 vs. limit=6.0 +2024-09-01 02:00:08,454 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=10.22 vs. limit=12.0 +2024-09-01 02:00:24,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=105269.33333333333, ans=0.125 +2024-09-01 02:00:56,068 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 8, batch 100, loss[loss=0.2567, simple_loss=0.223, pruned_loss=0.09536, ctc_loss=0.185, over 19109.00 frames. ], tot_loss[loss=0.2743, simple_loss=0.2327, pruned_loss=0.1081, ctc_loss=0.1993, over 1473116.98 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 02:00:58,858 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.47 vs. limit=15.0 +2024-09-01 02:01:07,952 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.921e+02 2.165e+02 2.362e+02 2.610e+02 3.254e+02, threshold=4.723e+02, percent-clipped=0.0 +2024-09-01 02:01:20,673 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.21 vs. limit=22.5 +2024-09-01 02:02:31,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=105429.33333333333, ans=0.0 +2024-09-01 02:02:57,168 INFO [dysarthria_finetune.py:1435] (1/4) (954925056, 34072559616) +2024-09-01 02:02:57,169 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:02:57,236 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 02:03:10,308 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 9, batch 0, loss[loss=0.2887, simple_loss=0.2399, pruned_loss=0.1205, ctc_loss=0.2133, over 18520.00 frames. ], tot_loss[loss=0.2887, simple_loss=0.2399, pruned_loss=0.1205, ctc_loss=0.2133, over 18520.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:03:10,309 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:03:33,813 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 9, validation: loss=0.2431, simple_loss=0.2147, pruned_loss=0.0913, ctc_loss=0.1608, over 1073944.00 frames. +2024-09-01 02:03:33,813 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14024MB +2024-09-01 02:03:43,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=105477.33333333333, ans=0.125 +2024-09-01 02:03:52,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=105477.33333333333, ans=0.125 +2024-09-01 02:03:57,970 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.40 vs. limit=15.0 +2024-09-01 02:04:03,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=105530.66666666667, ans=0.2 +2024-09-01 02:04:12,985 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.50 vs. limit=22.5 +2024-09-01 02:04:26,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105584.0, ans=0.1 +2024-09-01 02:05:38,157 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 9, batch 50, loss[loss=0.3039, simple_loss=0.2554, pruned_loss=0.1225, ctc_loss=0.2281, over 19008.00 frames. ], tot_loss[loss=0.268, simple_loss=0.2283, pruned_loss=0.1046, ctc_loss=0.1989, over 827563.28 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:05:39,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=105744.0, ans=0.025 +2024-09-01 02:05:42,204 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.93 vs. limit=15.0 +2024-09-01 02:05:52,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=105744.0, ans=0.125 +2024-09-01 02:06:32,901 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.05 vs. limit=6.0 +2024-09-01 02:06:37,639 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.941e+02 2.168e+02 2.346e+02 2.556e+02 3.441e+02, threshold=4.692e+02, percent-clipped=0.0 +2024-09-01 02:07:33,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=105904.0, ans=0.125 +2024-09-01 02:07:35,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=105904.0, ans=0.2 +2024-09-01 02:08:04,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=105957.33333333333, ans=0.125 +2024-09-01 02:08:10,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=105957.33333333333, ans=0.125 +2024-09-01 02:08:16,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=106010.66666666667, ans=0.0 +2024-09-01 02:08:17,445 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 9, batch 100, loss[loss=0.2692, simple_loss=0.2325, pruned_loss=0.1023, ctc_loss=0.2006, over 19113.00 frames. ], tot_loss[loss=0.2648, simple_loss=0.227, pruned_loss=0.1024, ctc_loss=0.1953, over 1473118.75 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:08:41,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=106064.0, ans=0.2 +2024-09-01 02:09:01,687 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.10 vs. limit=15.0 +2024-09-01 02:09:19,435 INFO [dysarthria_finetune.py:1435] (1/4) (1303052288, 34072559616) +2024-09-01 02:09:19,436 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:09:19,509 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 02:09:34,441 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 10, batch 0, loss[loss=0.2564, simple_loss=0.226, pruned_loss=0.09643, ctc_loss=0.1795, over 18522.00 frames. ], tot_loss[loss=0.2564, simple_loss=0.226, pruned_loss=0.09643, ctc_loss=0.1795, over 18522.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:09:34,441 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:09:58,839 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 10, validation: loss=0.2363, simple_loss=0.211, pruned_loss=0.08786, ctc_loss=0.1591, over 1073944.00 frames. +2024-09-01 02:09:58,839 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14024MB +2024-09-01 02:10:06,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=106165.33333333333, ans=0.0 +2024-09-01 02:10:24,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=106218.66666666667, ans=0.125 +2024-09-01 02:10:24,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106218.66666666667, ans=0.125 +2024-09-01 02:10:33,209 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.41 vs. limit=15.0 +2024-09-01 02:10:39,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=106218.66666666667, ans=0.125 +2024-09-01 02:10:46,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=106272.0, ans=0.0 +2024-09-01 02:11:12,041 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=7.30 vs. limit=15.0 +2024-09-01 02:11:36,265 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.880e+02 2.111e+02 2.256e+02 2.412e+02 3.661e+02, threshold=4.511e+02, percent-clipped=0.0 +2024-09-01 02:11:47,330 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 10, batch 50, loss[loss=0.2232, simple_loss=0.2074, pruned_loss=0.07342, ctc_loss=0.1559, over 18973.00 frames. ], tot_loss[loss=0.258, simple_loss=0.223, pruned_loss=0.09831, ctc_loss=0.1937, over 826863.11 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:12:21,544 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=20.16 vs. limit=15.0 +2024-09-01 02:12:32,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=106538.66666666667, ans=0.125 +2024-09-01 02:12:36,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=106538.66666666667, ans=0.2 +2024-09-01 02:12:38,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=106538.66666666667, ans=0.2 +2024-09-01 02:13:04,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=106592.0, ans=0.04949747468305833 +2024-09-01 02:13:35,236 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 10, batch 100, loss[loss=0.2546, simple_loss=0.2246, pruned_loss=0.09207, ctc_loss=0.1965, over 19188.00 frames. ], tot_loss[loss=0.253, simple_loss=0.22, pruned_loss=0.09552, ctc_loss=0.1901, over 1472464.39 frames. ], batch size: 134, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:13:38,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=106698.66666666667, ans=0.5 +2024-09-01 02:13:45,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106698.66666666667, ans=0.1 +2024-09-01 02:13:53,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=106698.66666666667, ans=0.0 +2024-09-01 02:14:34,903 INFO [dysarthria_finetune.py:1435] (1/4) (14122942464, 34072559616) +2024-09-01 02:14:34,904 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:14:34,957 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 02:14:48,355 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 11, batch 0, loss[loss=0.2626, simple_loss=0.2202, pruned_loss=0.106, ctc_loss=0.2062, over 18704.00 frames. ], tot_loss[loss=0.2626, simple_loss=0.2202, pruned_loss=0.106, ctc_loss=0.2062, over 18704.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:14:48,356 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:15:11,815 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 11, validation: loss=0.2335, simple_loss=0.2098, pruned_loss=0.0867, ctc_loss=0.1618, over 1073944.00 frames. +2024-09-01 02:15:11,815 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14024MB +2024-09-01 02:15:37,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=106853.33333333333, ans=0.0 +2024-09-01 02:15:43,438 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.80 vs. limit=15.0 +2024-09-01 02:15:46,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=106906.66666666667, ans=0.125 +2024-09-01 02:16:01,168 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.900e+02 2.106e+02 2.175e+02 2.350e+02 3.456e+02, threshold=4.351e+02, percent-clipped=0.0 +2024-09-01 02:16:28,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=106960.0, ans=0.125 +2024-09-01 02:16:31,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=106960.0, ans=0.2 +2024-09-01 02:16:47,207 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.55 vs. limit=22.5 +2024-09-01 02:21:27,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107013.33333333333, ans=0.1 +2024-09-01 02:21:37,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=107066.66666666667, ans=0.2 +2024-09-01 02:21:58,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=107066.66666666667, ans=0.2 +2024-09-01 02:22:24,203 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 11, batch 50, loss[loss=0.2509, simple_loss=0.2201, pruned_loss=0.09399, ctc_loss=0.192, over 18947.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.2183, pruned_loss=0.09389, ctc_loss=0.189, over 828704.78 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:22:28,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=107120.0, ans=0.125 +2024-09-01 02:22:33,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107120.0, ans=0.1 +2024-09-01 02:23:31,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=107173.33333333333, ans=0.125 +2024-09-01 02:24:10,261 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=13.71 vs. limit=15.0 +2024-09-01 02:24:34,081 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.23 vs. limit=6.0 +2024-09-01 02:24:41,420 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:24:41,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=107280.0, ans=0.0 +2024-09-01 02:24:53,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=107333.33333333333, ans=0.125 +2024-09-01 02:25:09,440 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 11, batch 100, loss[loss=0.2481, simple_loss=0.214, pruned_loss=0.0948, ctc_loss=0.1981, over 19147.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.2164, pruned_loss=0.09269, ctc_loss=0.1875, over 1473582.76 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 16.0 +2024-09-01 02:25:16,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=107386.66666666667, ans=0.125 +2024-09-01 02:25:17,252 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=10.94 vs. limit=12.0 +2024-09-01 02:25:22,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=107386.66666666667, ans=0.125 +2024-09-01 02:25:34,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107440.0, ans=0.1 +2024-09-01 02:25:52,642 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.857e+02 2.027e+02 2.133e+02 2.278e+02 3.178e+02, threshold=4.267e+02, percent-clipped=0.0 +2024-09-01 02:26:15,389 INFO [dysarthria_finetune.py:1435] (1/4) (594214912, 34072559616) +2024-09-01 02:26:15,390 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:26:15,476 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 02:26:37,015 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 12, batch 0, loss[loss=0.2963, simple_loss=0.2408, pruned_loss=0.1238, ctc_loss=0.2475, over 18735.00 frames. ], tot_loss[loss=0.2963, simple_loss=0.2408, pruned_loss=0.1238, ctc_loss=0.2475, over 18735.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:26:37,016 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:27:00,638 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 12, validation: loss=0.2234, simple_loss=0.2042, pruned_loss=0.08189, ctc_loss=0.1554, over 1073944.00 frames. +2024-09-01 02:27:00,638 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14024MB +2024-09-01 02:27:24,924 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.67 vs. limit=15.0 +2024-09-01 02:27:29,596 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=7.09 vs. limit=12.0 +2024-09-01 02:27:50,994 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.88 vs. limit=15.0 +2024-09-01 02:28:04,299 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.45 vs. limit=15.0 +2024-09-01 02:28:30,529 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:28:42,045 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.26 vs. limit=12.0 +2024-09-01 02:28:53,640 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 12, batch 50, loss[loss=0.1935, simple_loss=0.1812, pruned_loss=0.06457, ctc_loss=0.1483, over 18974.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.2142, pruned_loss=0.0907, ctc_loss=0.1879, over 827168.58 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:29:15,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107861.33333333333, ans=0.1 +2024-09-01 02:29:22,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=107861.33333333333, ans=0.125 +2024-09-01 02:30:00,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=107968.0, ans=0.0 +2024-09-01 02:30:17,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=107968.0, ans=0.125 +2024-09-01 02:30:19,478 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.16 vs. limit=22.5 +2024-09-01 02:30:20,079 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.831e+02 2.056e+02 2.167e+02 2.338e+02 2.987e+02, threshold=4.333e+02, percent-clipped=0.0 +2024-09-01 02:30:34,763 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.06 vs. limit=6.0 +2024-09-01 02:34:06,103 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 12, batch 100, loss[loss=0.2109, simple_loss=0.1987, pruned_loss=0.07313, ctc_loss=0.1513, over 19114.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.2133, pruned_loss=0.09117, ctc_loss=0.1866, over 1473649.48 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:35:08,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=108074.66666666667, ans=0.0 +2024-09-01 02:35:45,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=108128.0, ans=0.2 +2024-09-01 02:35:56,698 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.84 vs. limit=22.5 +2024-09-01 02:36:36,970 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=31.70 vs. limit=22.5 +2024-09-01 02:36:42,074 INFO [dysarthria_finetune.py:1435] (1/4) (14311686144, 34072559616) +2024-09-01 02:36:42,074 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:36:42,127 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 02:36:54,946 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 13, batch 0, loss[loss=0.2932, simple_loss=0.2442, pruned_loss=0.1225, ctc_loss=0.2299, over 18361.00 frames. ], tot_loss[loss=0.2932, simple_loss=0.2442, pruned_loss=0.1225, ctc_loss=0.2299, over 18361.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:36:54,946 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:37:18,562 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 13, validation: loss=0.2186, simple_loss=0.2014, pruned_loss=0.08061, ctc_loss=0.1543, over 1073944.00 frames. +2024-09-01 02:37:18,563 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14024MB +2024-09-01 02:37:37,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=108229.33333333333, ans=0.025 +2024-09-01 02:37:46,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=108282.66666666667, ans=0.0 +2024-09-01 02:37:47,227 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.90 vs. limit=15.0 +2024-09-01 02:37:55,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=108282.66666666667, ans=0.125 +2024-09-01 02:38:01,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=108336.0, ans=0.0 +2024-09-01 02:38:04,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=108336.0, ans=0.0 +2024-09-01 02:38:12,998 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.76 vs. limit=15.0 +2024-09-01 02:38:28,246 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=26.02 vs. limit=22.5 +2024-09-01 02:38:43,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108389.33333333333, ans=0.0 +2024-09-01 02:38:51,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=108442.66666666667, ans=0.125 +2024-09-01 02:38:56,813 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.78 vs. limit=15.0 +2024-09-01 02:39:08,446 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 13, batch 50, loss[loss=0.191, simple_loss=0.193, pruned_loss=0.05823, ctc_loss=0.1366, over 19011.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.2069, pruned_loss=0.08682, ctc_loss=0.1828, over 828396.70 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:39:14,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=108496.0, ans=0.125 +2024-09-01 02:39:16,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=108496.0, ans=0.2 +2024-09-01 02:39:30,398 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.852e+02 2.005e+02 2.143e+02 2.348e+02 3.224e+02, threshold=4.286e+02, percent-clipped=0.0 +2024-09-01 02:39:58,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108602.66666666667, ans=0.1 +2024-09-01 02:40:23,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108656.0, ans=0.1 +2024-09-01 02:40:32,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=108656.0, ans=0.0 +2024-09-01 02:40:38,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=108709.33333333333, ans=0.0 +2024-09-01 02:40:56,445 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 13, batch 100, loss[loss=0.2167, simple_loss=0.1985, pruned_loss=0.07983, ctc_loss=0.1637, over 19217.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.2059, pruned_loss=0.0858, ctc_loss=0.1799, over 1472353.64 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:40:57,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=108762.66666666667, ans=0.0 +2024-09-01 02:41:06,795 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.51 vs. limit=15.0 +2024-09-01 02:41:08,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=108762.66666666667, ans=0.05 +2024-09-01 02:41:19,490 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=1.132e-02 +2024-09-01 02:41:47,683 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.70 vs. limit=22.5 +2024-09-01 02:41:49,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=108869.33333333333, ans=0.07 +2024-09-01 02:41:56,720 INFO [dysarthria_finetune.py:1435] (1/4) (14064222208, 34072559616) +2024-09-01 02:41:56,721 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:41:56,771 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 02:42:04,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=108917.33333333333, ans=0.125 +2024-09-01 02:42:09,611 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 14, batch 0, loss[loss=0.2634, simple_loss=0.2238, pruned_loss=0.1056, ctc_loss=0.2156, over 18619.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.2238, pruned_loss=0.1056, ctc_loss=0.2156, over 18619.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:42:09,611 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:42:14,833 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([1.2601, 1.2442, 1.5896, 0.6401, 1.6574, 1.7630, 1.6676, 1.6827], + device='cuda:1') +2024-09-01 02:42:33,565 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 14, validation: loss=0.209, simple_loss=0.1966, pruned_loss=0.0763, ctc_loss=0.148, over 1073944.00 frames. +2024-09-01 02:42:33,566 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14024MB +2024-09-01 02:43:06,977 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.73 vs. limit=12.0 +2024-09-01 02:43:37,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=109024.0, ans=0.0 +2024-09-01 02:43:42,876 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.737e+02 1.996e+02 2.096e+02 2.326e+02 2.912e+02, threshold=4.192e+02, percent-clipped=0.0 +2024-09-01 02:43:52,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=109077.33333333333, ans=0.125 +2024-09-01 02:44:24,649 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 14, batch 50, loss[loss=0.249, simple_loss=0.2262, pruned_loss=0.09241, ctc_loss=0.1968, over 19004.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.2053, pruned_loss=0.08527, ctc_loss=0.1797, over 826629.84 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:44:41,918 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.70 vs. limit=15.0 +2024-09-01 02:44:43,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=109184.0, ans=0.125 +2024-09-01 02:44:50,599 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=7.15 vs. limit=10.0 +2024-09-01 02:45:01,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109237.33333333333, ans=0.1 +2024-09-01 02:45:16,263 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.70 vs. limit=15.0 +2024-09-01 02:46:12,202 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 14, batch 100, loss[loss=0.2042, simple_loss=0.1958, pruned_loss=0.07109, ctc_loss=0.1569, over 19114.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.2041, pruned_loss=0.08424, ctc_loss=0.1779, over 1472155.55 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:46:26,422 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.75 vs. limit=6.0 +2024-09-01 02:46:38,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109504.0, ans=0.1 +2024-09-01 02:46:52,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=109504.0, ans=0.0 +2024-09-01 02:47:12,383 INFO [dysarthria_finetune.py:1435] (1/4) (14116651008, 34072559616) +2024-09-01 02:47:12,384 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:47:12,422 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 02:47:25,807 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 15, batch 0, loss[loss=0.2453, simple_loss=0.2093, pruned_loss=0.09896, ctc_loss=0.2011, over 18480.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.2093, pruned_loss=0.09896, ctc_loss=0.2011, over 18480.00 frames. ], batch size: 65, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:47:25,808 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:48:03,598 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 15, validation: loss=0.2059, simple_loss=0.1951, pruned_loss=0.07588, ctc_loss=0.1481, over 1073944.00 frames. +2024-09-01 02:48:03,598 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14024MB +2024-09-01 02:48:14,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=109605.33333333333, ans=0.125 +2024-09-01 02:48:20,624 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.737e+02 1.965e+02 2.102e+02 2.301e+02 3.159e+02, threshold=4.205e+02, percent-clipped=0.0 +2024-09-01 02:48:46,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=109658.66666666667, ans=0.125 +2024-09-01 02:49:18,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109658.66666666667, ans=0.1 +2024-09-01 02:49:40,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=109712.0, ans=0.125 +2024-09-01 02:50:50,537 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:51:22,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=109818.66666666667, ans=0.0 +2024-09-01 02:51:41,387 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 15, batch 50, loss[loss=0.2511, simple_loss=0.2222, pruned_loss=0.09594, ctc_loss=0.2113, over 19020.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.2037, pruned_loss=0.08513, ctc_loss=0.1815, over 827766.05 frames. ], batch size: 102, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:52:39,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=109925.33333333333, ans=0.07 +2024-09-01 02:53:06,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=109925.33333333333, ans=0.125 +2024-09-01 02:55:23,016 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.70 vs. limit=22.5 +2024-09-01 02:55:28,630 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 15, batch 100, loss[loss=0.1817, simple_loss=0.1722, pruned_loss=0.06281, ctc_loss=0.1566, over 19074.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2029, pruned_loss=0.08357, ctc_loss=0.1771, over 1471681.17 frames. ], batch size: 133, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:55:44,094 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.784e+02 1.961e+02 2.099e+02 2.266e+02 2.969e+02, threshold=4.197e+02, percent-clipped=0.0 +2024-09-01 02:56:20,378 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=9.09 vs. limit=15.0 +2024-09-01 02:57:03,848 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.27 vs. limit=22.5 +2024-09-01 02:57:12,072 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.79 vs. limit=15.0 +2024-09-01 02:57:18,308 INFO [dysarthria_finetune.py:1435] (1/4) (2664103936, 34072559616) +2024-09-01 02:57:18,309 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:57:18,384 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 02:57:33,285 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 16, batch 0, loss[loss=0.2472, simple_loss=0.2223, pruned_loss=0.09475, ctc_loss=0.2018, over 18847.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.2223, pruned_loss=0.09475, ctc_loss=0.2018, over 18847.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:57:33,285 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:58:10,357 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 16, validation: loss=0.2065, simple_loss=0.1951, pruned_loss=0.07751, ctc_loss=0.1523, over 1073944.00 frames. +2024-09-01 02:58:10,357 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14024MB +2024-09-01 02:58:51,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=110346.66666666667, ans=0.0 +2024-09-01 02:59:29,104 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.09 vs. limit=22.5 +2024-09-01 02:59:49,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=110453.33333333333, ans=0.09899494936611666 +2024-09-01 02:59:54,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=110506.66666666667, ans=0.0 +2024-09-01 02:59:58,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=110506.66666666667, ans=0.2 +2024-09-01 03:00:01,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=110506.66666666667, ans=0.0 +2024-09-01 03:00:02,299 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.15 vs. limit=22.5 +2024-09-01 03:00:21,810 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 16, batch 50, loss[loss=0.1869, simple_loss=0.1886, pruned_loss=0.06437, ctc_loss=0.1393, over 19018.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.1999, pruned_loss=0.08214, ctc_loss=0.1765, over 827868.27 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:00:43,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110560.0, ans=0.1 +2024-09-01 03:00:49,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=110613.33333333333, ans=0.125 +2024-09-01 03:01:09,984 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.78 vs. limit=10.0 +2024-09-01 03:01:10,112 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.92 vs. limit=15.0 +2024-09-01 03:01:25,487 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.686e+02 1.971e+02 2.093e+02 2.277e+02 2.936e+02, threshold=4.187e+02, percent-clipped=0.0 +2024-09-01 03:02:27,243 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 16, batch 100, loss[loss=0.1943, simple_loss=0.1857, pruned_loss=0.06962, ctc_loss=0.1594, over 19118.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.1988, pruned_loss=0.08047, ctc_loss=0.1725, over 1473208.83 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:02:28,764 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.14 vs. limit=22.5 +2024-09-01 03:03:01,498 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.95 vs. limit=22.5 +2024-09-01 03:03:29,025 INFO [dysarthria_finetune.py:1435] (1/4) (168493056, 34072559616) +2024-09-01 03:03:29,026 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 03:03:29,085 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 03:03:41,379 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 17, batch 0, loss[loss=0.2688, simple_loss=0.2431, pruned_loss=0.1072, ctc_loss=0.2005, over 18527.00 frames. ], tot_loss[loss=0.2688, simple_loss=0.2431, pruned_loss=0.1072, ctc_loss=0.2005, over 18527.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:03:41,380 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 03:03:45,302 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([1.6355, 2.0034, 1.7446, 0.6951, 1.7418, 1.8900, 1.8514, 1.3211], + device='cuda:1') +2024-09-01 03:04:05,405 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 17, validation: loss=0.1943, simple_loss=0.1886, pruned_loss=0.07183, ctc_loss=0.1409, over 1073944.00 frames. +2024-09-01 03:04:05,406 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14024MB +2024-09-01 03:04:24,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110981.33333333333, ans=0.1 +2024-09-01 03:04:38,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=111034.66666666667, ans=0.125 +2024-09-01 03:04:40,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111034.66666666667, ans=0.1 +2024-09-01 03:04:53,881 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:05:02,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=111088.0, ans=0.125 +2024-09-01 03:05:09,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=111088.0, ans=0.0 +2024-09-01 03:05:20,835 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=7.75 vs. limit=12.0 +2024-09-01 03:05:45,148 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.712e+02 1.958e+02 2.075e+02 2.282e+02 2.777e+02, threshold=4.150e+02, percent-clipped=0.0 +2024-09-01 03:05:48,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=111194.66666666667, ans=0.2 +2024-09-01 03:05:56,283 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 17, batch 50, loss[loss=0.1922, simple_loss=0.19, pruned_loss=0.06448, ctc_loss=0.1634, over 19037.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.1963, pruned_loss=0.07886, ctc_loss=0.1696, over 827680.99 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:06:20,147 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=26.24 vs. limit=22.5 +2024-09-01 03:06:30,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=111301.33333333333, ans=0.125 +2024-09-01 03:06:33,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=111301.33333333333, ans=0.95 +2024-09-01 03:06:44,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=111354.66666666667, ans=0.2 +2024-09-01 03:07:10,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=111408.0, ans=0.0 +2024-09-01 03:07:14,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=111408.0, ans=0.125 +2024-09-01 03:07:18,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=111408.0, ans=0.125 +2024-09-01 03:07:45,186 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 17, batch 100, loss[loss=0.1672, simple_loss=0.171, pruned_loss=0.05633, ctc_loss=0.127, over 19067.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.1937, pruned_loss=0.07742, ctc_loss=0.1677, over 1472664.14 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:07:54,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=111514.66666666667, ans=0.95 +2024-09-01 03:08:23,225 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.57 vs. limit=22.5 +2024-09-01 03:08:44,483 INFO [dysarthria_finetune.py:1435] (1/4) (1298857984, 34072559616) +2024-09-01 03:08:44,484 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 03:08:44,548 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 03:08:52,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=111669.33333333333, ans=0.2 +2024-09-01 03:09:16,217 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 18, batch 0, loss[loss=0.2577, simple_loss=0.2193, pruned_loss=0.104, ctc_loss=0.2202, over 18622.00 frames. ], tot_loss[loss=0.2577, simple_loss=0.2193, pruned_loss=0.104, ctc_loss=0.2202, over 18622.00 frames. ], batch size: 65, lr: 9.93e-05, grad_scale: 32.0 +2024-09-01 03:09:16,217 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 03:09:39,632 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 18, validation: loss=0.1961, simple_loss=0.1886, pruned_loss=0.07291, ctc_loss=0.1441, over 1073944.00 frames. +2024-09-01 03:09:39,632 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14024MB +2024-09-01 03:10:14,936 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.703e+02 1.913e+02 2.060e+02 2.285e+02 3.151e+02, threshold=4.120e+02, percent-clipped=0.0 +2024-09-01 03:10:21,024 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.29 vs. limit=10.0 +2024-09-01 03:10:23,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=111776.0, ans=0.025 +2024-09-01 03:10:59,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=111829.33333333333, ans=0.125 +2024-09-01 03:11:00,152 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=11.48 vs. limit=12.0 +2024-09-01 03:11:29,090 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 18, batch 50, loss[loss=0.2202, simple_loss=0.2014, pruned_loss=0.08342, ctc_loss=0.1803, over 19026.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.1936, pruned_loss=0.07855, ctc_loss=0.1693, over 826500.31 frames. ], batch size: 102, lr: 9.93e-05, grad_scale: 32.0 +2024-09-01 03:11:45,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=111936.0, ans=0.2 +2024-09-01 03:12:33,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112096.0, ans=0.1 +2024-09-01 03:12:42,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=112096.0, ans=0.0 +2024-09-01 03:12:44,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=112096.0, ans=0.125 +2024-09-01 03:13:05,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112149.33333333333, ans=0.1 +2024-09-01 03:13:44,716 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 18, batch 100, loss[loss=0.2002, simple_loss=0.1888, pruned_loss=0.07276, ctc_loss=0.1651, over 19036.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.1919, pruned_loss=0.07755, ctc_loss=0.1664, over 1471672.61 frames. ], batch size: 133, lr: 9.93e-05, grad_scale: 32.0 +2024-09-01 03:14:03,334 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.75 vs. limit=15.0 +2024-09-01 03:14:19,258 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.738e+02 1.898e+02 2.020e+02 2.262e+02 2.800e+02, threshold=4.040e+02, percent-clipped=0.0 +2024-09-01 03:14:22,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=112256.0, ans=0.125 +2024-09-01 03:14:43,277 INFO [dysarthria_finetune.py:1435] (1/4) (428539904, 34072559616) +2024-09-01 03:14:43,279 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 03:14:43,351 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 03:14:55,814 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 19, batch 0, loss[loss=0.2309, simple_loss=0.2158, pruned_loss=0.08766, ctc_loss=0.177, over 18691.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.2158, pruned_loss=0.08766, ctc_loss=0.177, over 18691.00 frames. ], batch size: 65, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:14:55,814 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 03:15:42,622 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 19, validation: loss=0.1928, simple_loss=0.1862, pruned_loss=0.07146, ctc_loss=0.1413, over 1073944.00 frames. +2024-09-01 03:15:42,623 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14024MB +2024-09-01 03:16:00,155 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.51 vs. limit=15.0 +2024-09-01 03:16:31,651 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:16:43,541 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=13.94 vs. limit=15.0 +2024-09-01 03:17:02,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=112405.33333333333, ans=0.0 +2024-09-01 03:18:36,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=112512.0, ans=0.125 +2024-09-01 03:18:42,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=112565.33333333333, ans=0.09899494936611666 +2024-09-01 03:19:33,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=112618.66666666667, ans=0.125 +2024-09-01 03:19:34,391 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 19, batch 50, loss[loss=0.2181, simple_loss=0.2028, pruned_loss=0.07977, ctc_loss=0.1846, over 18976.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.1919, pruned_loss=0.07771, ctc_loss=0.1678, over 828010.25 frames. ], batch size: 102, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:22:34,913 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.708e+02 1.922e+02 2.090e+02 2.243e+02 2.725e+02, threshold=4.180e+02, percent-clipped=0.0 +2024-09-01 03:22:47,886 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=27.87 vs. limit=22.5 +2024-09-01 03:23:34,303 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 19, batch 100, loss[loss=0.1669, simple_loss=0.1637, pruned_loss=0.05815, ctc_loss=0.1346, over 19118.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.1916, pruned_loss=0.07696, ctc_loss=0.1665, over 1474453.83 frames. ], batch size: 133, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:23:44,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=112885.33333333333, ans=0.0 +2024-09-01 03:24:33,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=112938.66666666667, ans=0.125 +2024-09-01 03:24:41,041 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.62 vs. limit=15.0 +2024-09-01 03:25:02,820 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.22 vs. limit=15.0 +2024-09-01 03:25:13,452 INFO [dysarthria_finetune.py:1435] (1/4) (12938051584, 34072559616) +2024-09-01 03:25:13,454 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 03:25:13,497 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 03:25:27,039 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 20, batch 0, loss[loss=0.2548, simple_loss=0.2276, pruned_loss=0.1014, ctc_loss=0.1979, over 18758.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.2276, pruned_loss=0.1014, ctc_loss=0.1979, over 18758.00 frames. ], batch size: 65, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:25:27,039 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 03:26:10,759 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 20, validation: loss=0.19, simple_loss=0.1838, pruned_loss=0.07041, ctc_loss=0.1385, over 1073944.00 frames. +2024-09-01 03:26:10,760 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14024MB +2024-09-01 03:26:12,982 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.58 vs. limit=15.0 +2024-09-01 03:26:23,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=113040.0, ans=0.125 +2024-09-01 03:27:22,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=113093.33333333333, ans=0.2 +2024-09-01 03:27:59,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113146.66666666667, ans=0.1 +2024-09-01 03:28:35,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=113200.0, ans=0.025 +2024-09-01 03:29:06,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113253.33333333333, ans=0.1 +2024-09-01 03:29:26,740 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 20, batch 50, loss[loss=0.198, simple_loss=0.1865, pruned_loss=0.07357, ctc_loss=0.1557, over 19069.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.1889, pruned_loss=0.077, ctc_loss=0.1662, over 828644.17 frames. ], batch size: 102, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:29:32,765 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.88 vs. limit=22.5 +2024-09-01 03:30:05,879 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.607e+02 1.917e+02 2.046e+02 2.200e+02 2.791e+02, threshold=4.093e+02, percent-clipped=0.0 +2024-09-01 03:30:44,062 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.75 vs. limit=22.5 +2024-09-01 03:31:15,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=113413.33333333333, ans=0.125 +2024-09-01 03:31:23,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=113413.33333333333, ans=0.125 +2024-09-01 03:32:29,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.72 vs. limit=15.0 +2024-09-01 03:33:06,774 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 20, batch 100, loss[loss=0.1532, simple_loss=0.149, pruned_loss=0.05368, ctc_loss=0.1252, over 19104.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.1865, pruned_loss=0.07485, ctc_loss=0.1617, over 1473557.06 frames. ], batch size: 133, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:33:46,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=113573.33333333333, ans=0.125 +2024-09-01 03:34:10,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=113626.66666666667, ans=0.125 +2024-09-01 03:35:10,295 INFO [dysarthria_finetune.py:1435] (1/4) (376111104, 34072559616) +2024-09-01 03:35:10,296 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 03:35:10,372 INFO [dysarthria_finetune.py:1440] (1/4) (29688004608, 34072559616) +2024-09-01 03:35:10,373 INFO [dysarthria_finetune.py:1442] (1/4) Done! diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-22-09-00-2 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-22-09-00-2 new file mode 100644 index 0000000000000000000000000000000000000000..944c6a92f6ad640c9474b75e517774daab084e93 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-22-09-00-2 @@ -0,0 +1,551 @@ +2024-08-31 22:09:00,004 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-31 22:09:00,960 INFO [dysarthria_finetune.py:1214] (2/4) (33106362368, 34072559616) +2024-08-31 22:09:00,960 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-31 22:09:01,332 INFO [dysarthria_finetune.py:1219] (2/4) (33106362368, 34072559616) +2024-08-31 22:09:01,332 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-31 22:09:01,335 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2653.int.cedar.computecanada.ca', 'IP address': '172.16.146.90'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 22:09:01,335 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-31 22:09:16,893 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 66367431 +2024-08-31 22:09:16,893 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt +2024-08-31 22:10:50,739 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-31 22:11:01,693 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts +2024-08-31 22:11:01,783 INFO [dysarthria_finetune.py:1319] (2/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 22:11:02,460 INFO [dysarthria_asr_datamodule.py:239] (2/4) Disable MUSAN +2024-08-31 22:11:02,460 INFO [dysarthria_asr_datamodule.py:257] (2/4) Enable SpecAugment +2024-08-31 22:11:02,460 INFO [dysarthria_asr_datamodule.py:258] (2/4) Time warp factor: 80 +2024-08-31 22:11:02,461 INFO [dysarthria_asr_datamodule.py:268] (2/4) Num frame mask: 10 +2024-08-31 22:11:02,461 INFO [dysarthria_asr_datamodule.py:281] (2/4) About to create train dataset +2024-08-31 22:11:17,376 INFO [dysarthria_asr_datamodule.py:308] (2/4) Using DynamicBucketingSampler. +2024-08-31 22:11:18,285 INFO [dysarthria_asr_datamodule.py:325] (2/4) About to create train dataloader +2024-08-31 22:11:18,291 INFO [dysarthria_asr_datamodule.py:501] (2/4) About to get dev cuts +2024-08-31 22:11:18,428 INFO [dysarthria_asr_datamodule.py:356] (2/4) About to create dev dataset +2024-08-31 22:11:18,753 INFO [dysarthria_asr_datamodule.py:373] (2/4) About to create dev dataloader +2024-08-31 22:11:18,754 INFO [dysarthria_finetune.py:1490] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 22:12:59,095 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=512, metric=5.41 vs. limit=5.0 +2024-08-31 22:12:59,567 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=15.16 vs. limit=7.5 +2024-08-31 22:13:03,031 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12713MB +2024-08-31 22:13:04,361 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=384, metric=18.51 vs. limit=7.5 +2024-08-31 22:13:04,877 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12713MB +2024-08-31 22:14:12,892 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12713MB +2024-08-31 22:14:14,953 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12713MB +2024-08-31 22:19:47,416 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=512, metric=27.15 vs. limit=7.5 +2024-08-31 22:19:51,922 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12713MB +2024-08-31 22:19:54,352 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12713MB +2024-08-31 22:20:43,766 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 0, loss[loss=0.4194, simple_loss=0.3368, pruned_loss=0.2044, ctc_loss=0.3107, over 18533.00 frames. ], tot_loss[loss=0.4194, simple_loss=0.3368, pruned_loss=0.2044, ctc_loss=0.3107, over 18533.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-31 22:20:43,766 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-31 22:46:04,595 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 1, validation: loss=0.3942, simple_loss=0.3187, pruned_loss=0.1927, ctc_loss=0.281, over 1073944.00 frames. +2024-08-31 22:46:14,297 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19801MB +2024-08-31 23:06:10,406 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=16.18 vs. limit=15.0 +2024-08-31 23:06:10,480 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.51 vs. limit=22.5 +2024-08-31 23:06:11,554 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.685e+02 9.975e+02 1.051e+03 1.091e+03 1.133e+03, threshold=4.203e+03, percent-clipped=0.0 +2024-08-31 23:23:02,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=100053.33333333333, ans=0.125 +2024-08-31 23:25:12,994 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.960e+02 9.836e+02 1.043e+03 1.067e+03 1.144e+03, threshold=4.173e+03, percent-clipped=0.0 +2024-08-31 23:45:44,220 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=8.30 vs. limit=6.0 +2024-08-31 23:49:49,539 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=7.16 vs. limit=6.0 +2024-08-31 23:50:05,828 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.677e+02 8.648e+02 9.697e+02 1.051e+03 1.144e+03, threshold=3.879e+03, percent-clipped=0.0 +2024-08-31 23:55:38,225 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 50, loss[loss=0.4319, simple_loss=0.3435, pruned_loss=0.2185, ctc_loss=0.3295, over 19018.00 frames. ], tot_loss[loss=0.4291, simple_loss=0.3434, pruned_loss=0.2152, ctc_loss=0.3194, over 827419.58 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-09-01 00:03:12,048 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.75 vs. limit=6.0 +2024-09-01 00:03:33,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=100320.0, ans=0.125 +2024-09-01 00:08:31,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=100373.33333333333, ans=0.025 +2024-09-01 00:13:07,229 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.73 vs. limit=15.0 +2024-09-01 00:18:32,448 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.599e+02 6.914e+02 7.776e+02 9.170e+02 1.144e+03, threshold=1.555e+03, percent-clipped=0.0 +2024-09-01 00:18:32,485 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 100, loss[loss=0.3764, simple_loss=0.3057, pruned_loss=0.1764, ctc_loss=0.2637, over 19117.00 frames. ], tot_loss[loss=0.4128, simple_loss=0.331, pruned_loss=0.2035, ctc_loss=0.3049, over 1475925.13 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-09-01 00:23:33,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=100586.66666666667, ans=0.025 +2024-09-01 00:27:43,313 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.88 vs. limit=15.0 +2024-09-01 00:27:58,750 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=31.79 vs. limit=22.5 +2024-09-01 00:28:35,635 INFO [dysarthria_finetune.py:1435] (2/4) (10100604928, 34072559616) +2024-09-01 00:28:35,636 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 00:28:35,657 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 00:30:12,128 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 2, batch 0, loss[loss=0.3746, simple_loss=0.3028, pruned_loss=0.1772, ctc_loss=0.2681, over 18502.00 frames. ], tot_loss[loss=0.3746, simple_loss=0.3028, pruned_loss=0.1772, ctc_loss=0.2681, over 18502.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-09-01 00:30:12,129 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 00:34:27,519 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 2, validation: loss=0.3547, simple_loss=0.2901, pruned_loss=0.1627, ctc_loss=0.2412, over 1073944.00 frames. +2024-09-01 00:34:27,520 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 00:41:34,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=100789.33333333333, ans=0.125 +2024-09-01 00:41:38,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=100789.33333333333, ans=0.05 +2024-09-01 00:43:08,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-09-01 00:43:14,095 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=20.01 vs. limit=15.0 +2024-09-01 00:45:12,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=100842.66666666667, ans=0.09899494936611666 +2024-09-01 00:48:18,393 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 2, batch 50, loss[loss=0.3747, simple_loss=0.304, pruned_loss=0.1698, ctc_loss=0.2683, over 18952.00 frames. ], tot_loss[loss=0.3931, simple_loss=0.3162, pruned_loss=0.1878, ctc_loss=0.286, over 829638.79 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 8.0 +2024-09-01 00:48:23,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=100949.33333333333, ans=0.125 +2024-09-01 00:50:02,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101002.66666666667, ans=0.1 +2024-09-01 00:51:31,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=101002.66666666667, ans=22.5 +2024-09-01 00:53:05,353 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.883e+02 4.624e+02 4.997e+02 5.383e+02 6.686e+02, threshold=9.995e+02, percent-clipped=0.0 +2024-09-01 00:53:41,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101056.0, ans=0.1 +2024-09-01 00:54:47,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=101109.33333333333, ans=0.125 +2024-09-01 00:55:04,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=101109.33333333333, ans=0.125 +2024-09-01 00:56:26,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101216.0, ans=0.1 +2024-09-01 00:56:30,069 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 2, batch 100, loss[loss=0.3088, simple_loss=0.2566, pruned_loss=0.1307, ctc_loss=0.2019, over 19108.00 frames. ], tot_loss[loss=0.3788, simple_loss=0.3059, pruned_loss=0.1776, ctc_loss=0.2728, over 1476292.15 frames. ], batch size: 133, lr: 7.29e-05, grad_scale: 8.0 +2024-09-01 00:56:56,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101216.0, ans=0.1 +2024-09-01 00:57:14,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=101216.0, ans=0.125 +2024-09-01 00:59:53,910 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=20.21 vs. limit=15.0 +2024-09-01 01:00:09,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=101269.33333333333, ans=0.2 +2024-09-01 01:00:28,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=101269.33333333333, ans=0.2 +2024-09-01 01:01:04,886 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.72 vs. limit=15.0 +2024-09-01 01:01:15,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=101322.66666666667, ans=0.025 +2024-09-01 01:01:16,824 INFO [dysarthria_finetune.py:1435] (2/4) (10140450816, 34072559616) +2024-09-01 01:01:16,824 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 01:01:16,866 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 01:01:24,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=101370.66666666667, ans=0.0 +2024-09-01 01:01:35,837 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 3, batch 0, loss[loss=0.3886, simple_loss=0.3111, pruned_loss=0.1873, ctc_loss=0.2854, over 18600.00 frames. ], tot_loss[loss=0.3886, simple_loss=0.3111, pruned_loss=0.1873, ctc_loss=0.2854, over 18600.00 frames. ], batch size: 65, lr: 7.58e-05, grad_scale: 16.0 +2024-09-01 01:01:35,837 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 01:01:59,928 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 3, validation: loss=0.3274, simple_loss=0.2708, pruned_loss=0.1428, ctc_loss=0.2163, over 1073944.00 frames. +2024-09-01 01:01:59,928 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 01:03:09,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=101424.0, ans=0.2 +2024-09-01 01:03:53,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=101477.33333333333, ans=0.09899494936611666 +2024-09-01 01:03:59,503 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=26.13 vs. limit=22.5 +2024-09-01 01:04:06,819 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=27.42 vs. limit=22.5 +2024-09-01 01:04:33,381 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.942e+02 3.454e+02 3.711e+02 3.996e+02 5.509e+02, threshold=7.422e+02, percent-clipped=0.0 +2024-09-01 01:04:56,820 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 3, batch 50, loss[loss=0.3581, simple_loss=0.2895, pruned_loss=0.1633, ctc_loss=0.2597, over 19168.00 frames. ], tot_loss[loss=0.3653, simple_loss=0.2967, pruned_loss=0.1653, ctc_loss=0.26, over 828229.52 frames. ], batch size: 103, lr: 8.08e-05, grad_scale: 16.0 +2024-09-01 01:05:02,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=101637.33333333333, ans=0.125 +2024-09-01 01:05:38,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=101690.66666666667, ans=0.125 +2024-09-01 01:05:38,830 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=19.91 vs. limit=15.0 +2024-09-01 01:05:41,749 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=27.19 vs. limit=22.5 +2024-09-01 01:06:07,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101744.0, ans=0.1 +2024-09-01 01:06:09,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=101744.0, ans=0.125 +2024-09-01 01:06:19,422 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.30 vs. limit=15.0 +2024-09-01 01:06:21,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=101797.33333333333, ans=0.125 +2024-09-01 01:06:44,325 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=20.87 vs. limit=15.0 +2024-09-01 01:06:46,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=101850.66666666667, ans=15.0 +2024-09-01 01:07:00,078 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 3, batch 100, loss[loss=0.3048, simple_loss=0.2513, pruned_loss=0.1295, ctc_loss=0.2104, over 19024.00 frames. ], tot_loss[loss=0.3547, simple_loss=0.2887, pruned_loss=0.1592, ctc_loss=0.2512, over 1476045.82 frames. ], batch size: 133, lr: 8.58e-05, grad_scale: 16.0 +2024-09-01 01:08:05,640 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.50 vs. limit=15.0 +2024-09-01 01:08:07,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=102010.66666666667, ans=0.125 +2024-09-01 01:08:21,715 INFO [dysarthria_finetune.py:1435] (2/4) (10138353664, 34072559616) +2024-09-01 01:08:21,715 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 01:08:21,747 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 01:08:35,097 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 4, batch 0, loss[loss=0.36, simple_loss=0.292, pruned_loss=0.1595, ctc_loss=0.2627, over 18618.00 frames. ], tot_loss[loss=0.36, simple_loss=0.292, pruned_loss=0.1595, ctc_loss=0.2627, over 18618.00 frames. ], batch size: 65, lr: 8.86e-05, grad_scale: 32.0 +2024-09-01 01:08:35,097 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 01:08:58,405 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 4, validation: loss=0.308, simple_loss=0.2573, pruned_loss=0.1299, ctc_loss=0.2, over 1073944.00 frames. +2024-09-01 01:08:58,406 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 01:09:19,175 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.80 vs. limit=15.0 +2024-09-01 01:09:33,167 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.484e+02 2.869e+02 3.070e+02 3.452e+02 5.291e+02, threshold=6.140e+02, percent-clipped=0.0 +2024-09-01 01:09:38,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=102106.66666666667, ans=0.125 +2024-09-01 01:09:48,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=102160.0, ans=0.125 +2024-09-01 01:09:55,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=102160.0, ans=0.0 +2024-09-01 01:10:19,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=102213.33333333333, ans=0.2 +2024-09-01 01:10:32,581 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:10:47,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=102266.66666666667, ans=0.125 +2024-09-01 01:10:51,037 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 4, batch 50, loss[loss=0.3395, simple_loss=0.276, pruned_loss=0.149, ctc_loss=0.2475, over 18961.00 frames. ], tot_loss[loss=0.3414, simple_loss=0.2789, pruned_loss=0.1484, ctc_loss=0.2439, over 828488.26 frames. ], batch size: 102, lr: 9.36e-05, grad_scale: 32.0 +2024-09-01 01:10:59,363 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=14.85 vs. limit=15.0 +2024-09-01 01:11:05,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=102320.0, ans=0.0 +2024-09-01 01:11:12,206 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=8.95 vs. limit=15.0 +2024-09-01 01:11:19,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.83 vs. limit=10.0 +2024-09-01 01:11:27,783 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.72 vs. limit=22.5 +2024-09-01 01:11:53,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=102426.66666666667, ans=0.125 +2024-09-01 01:12:02,015 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.40 vs. limit=10.0 +2024-09-01 01:12:08,734 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.72 vs. limit=6.0 +2024-09-01 01:12:38,912 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 4, batch 100, loss[loss=0.3042, simple_loss=0.2512, pruned_loss=0.1326, ctc_loss=0.2065, over 19090.00 frames. ], tot_loss[loss=0.3367, simple_loss=0.2753, pruned_loss=0.1466, ctc_loss=0.2401, over 1476821.49 frames. ], batch size: 133, lr: 9.86e-05, grad_scale: 32.0 +2024-09-01 01:12:55,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=102586.66666666667, ans=0.0 +2024-09-01 01:13:01,027 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=19.80 vs. limit=15.0 +2024-09-01 01:13:04,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=102640.0, ans=0.125 +2024-09-01 01:13:09,811 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=17.88 vs. limit=15.0 +2024-09-01 01:13:12,349 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.110e+02 2.669e+02 2.871e+02 3.122e+02 4.671e+02, threshold=5.742e+02, percent-clipped=0.0 +2024-09-01 01:13:40,143 INFO [dysarthria_finetune.py:1435] (2/4) (10169810944, 34072559616) +2024-09-01 01:13:40,144 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 01:13:40,189 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 01:13:53,058 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 5, batch 0, loss[loss=0.3214, simple_loss=0.2649, pruned_loss=0.136, ctc_loss=0.227, over 18551.00 frames. ], tot_loss[loss=0.3214, simple_loss=0.2649, pruned_loss=0.136, ctc_loss=0.227, over 18551.00 frames. ], batch size: 65, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:13:53,058 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 01:14:16,500 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 5, validation: loss=0.2909, simple_loss=0.2453, pruned_loss=0.1191, ctc_loss=0.1881, over 1073944.00 frames. +2024-09-01 01:14:16,501 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 01:15:29,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=102794.66666666667, ans=0.125 +2024-09-01 01:15:51,501 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.05 vs. limit=6.0 +2024-09-01 01:15:55,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=102794.66666666667, ans=0.1 +2024-09-01 01:15:55,872 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.96 vs. limit=15.0 +2024-09-01 01:16:06,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=102794.66666666667, ans=0.0 +2024-09-01 01:16:58,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=102848.0, ans=0.0 +2024-09-01 01:17:08,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=102848.0, ans=0.125 +2024-09-01 01:17:19,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=102848.0, ans=0.125 +2024-09-01 01:18:35,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=102901.33333333333, ans=0.125 +2024-09-01 01:20:50,700 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 5, batch 50, loss[loss=0.2865, simple_loss=0.2385, pruned_loss=0.1176, ctc_loss=0.1999, over 19027.00 frames. ], tot_loss[loss=0.3257, simple_loss=0.2679, pruned_loss=0.1381, ctc_loss=0.2331, over 828775.72 frames. ], batch size: 102, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:21:28,674 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.16 vs. limit=15.0 +2024-09-01 01:21:54,518 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.88 vs. limit=6.0 +2024-09-01 01:22:28,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=103061.33333333333, ans=0.2 +2024-09-01 01:24:14,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=103114.66666666667, ans=0.0 +2024-09-01 01:24:46,487 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=22.75 vs. limit=15.0 +2024-09-01 01:25:33,880 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.253e+02 2.485e+02 2.709e+02 2.997e+02 4.733e+02, threshold=5.419e+02, percent-clipped=0.0 +2024-09-01 01:26:15,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=103221.33333333333, ans=0.0 +2024-09-01 01:26:21,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=103274.66666666667, ans=0.125 +2024-09-01 01:26:23,136 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 5, batch 100, loss[loss=0.3173, simple_loss=0.2618, pruned_loss=0.1357, ctc_loss=0.2234, over 19114.00 frames. ], tot_loss[loss=0.3152, simple_loss=0.2599, pruned_loss=0.1329, ctc_loss=0.2248, over 1478197.42 frames. ], batch size: 133, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:26:40,040 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.14 vs. limit=10.0 +2024-09-01 01:26:43,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103274.66666666667, ans=0.1 +2024-09-01 01:27:05,510 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=8.37 vs. limit=12.0 +2024-09-01 01:27:34,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103381.33333333333, ans=0.1 +2024-09-01 01:27:55,386 INFO [dysarthria_finetune.py:1435] (2/4) (10140450816, 34072559616) +2024-09-01 01:27:55,387 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 01:27:55,421 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 01:28:14,511 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 6, batch 0, loss[loss=0.2893, simple_loss=0.2454, pruned_loss=0.1135, ctc_loss=0.1957, over 18783.00 frames. ], tot_loss[loss=0.2893, simple_loss=0.2454, pruned_loss=0.1135, ctc_loss=0.1957, over 18783.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:28:14,512 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 01:28:37,895 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 6, validation: loss=0.2789, simple_loss=0.2369, pruned_loss=0.1122, ctc_loss=0.1819, over 1073944.00 frames. +2024-09-01 01:28:37,895 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 01:29:32,699 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.80 vs. limit=15.0 +2024-09-01 01:29:51,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=103584.0, ans=0.025 +2024-09-01 01:29:53,844 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.09 vs. limit=15.0 +2024-09-01 01:30:17,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=103637.33333333333, ans=0.025 +2024-09-01 01:30:32,251 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 6, batch 50, loss[loss=0.2931, simple_loss=0.2474, pruned_loss=0.1183, ctc_loss=0.1988, over 19006.00 frames. ], tot_loss[loss=0.2996, simple_loss=0.2486, pruned_loss=0.1228, ctc_loss=0.2163, over 828020.78 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:30:35,774 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.83 vs. limit=15.0 +2024-09-01 01:31:04,785 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.055e+02 2.419e+02 2.583e+02 2.819e+02 4.094e+02, threshold=5.165e+02, percent-clipped=0.0 +2024-09-01 01:31:29,862 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=9.99 vs. limit=15.0 +2024-09-01 01:31:29,910 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.98 vs. limit=6.0 +2024-09-01 01:31:38,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=103797.33333333333, ans=0.125 +2024-09-01 01:31:38,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=103797.33333333333, ans=0.0 +2024-09-01 01:31:49,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=103797.33333333333, ans=0.0 +2024-09-01 01:32:43,096 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 6, batch 100, loss[loss=0.3281, simple_loss=0.2726, pruned_loss=0.1348, ctc_loss=0.2379, over 19060.00 frames. ], tot_loss[loss=0.2982, simple_loss=0.248, pruned_loss=0.1225, ctc_loss=0.2137, over 1475525.13 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:33:15,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=104010.66666666667, ans=0.125 +2024-09-01 01:33:44,580 INFO [dysarthria_finetune.py:1435] (2/4) (10140450816, 34072559616) +2024-09-01 01:33:44,581 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 01:33:44,617 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 01:33:57,935 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 7, batch 0, loss[loss=0.2927, simple_loss=0.2482, pruned_loss=0.1179, ctc_loss=0.1984, over 18435.00 frames. ], tot_loss[loss=0.2927, simple_loss=0.2482, pruned_loss=0.1179, ctc_loss=0.1984, over 18435.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:33:57,935 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 01:34:21,903 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 7, validation: loss=0.2604, simple_loss=0.2251, pruned_loss=0.1007, ctc_loss=0.1681, over 1073944.00 frames. +2024-09-01 01:34:21,904 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 01:34:50,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=104160.0, ans=0.125 +2024-09-01 01:35:40,238 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.049e+02 2.272e+02 2.384e+02 2.601e+02 4.291e+02, threshold=4.768e+02, percent-clipped=0.0 +2024-09-01 01:37:02,208 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 7, batch 50, loss[loss=0.2726, simple_loss=0.2309, pruned_loss=0.1049, ctc_loss=0.1991, over 18970.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.2474, pruned_loss=0.121, ctc_loss=0.2161, over 828175.40 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 16.0 +2024-09-01 01:37:57,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=104426.66666666667, ans=0.125 +2024-09-01 01:39:43,302 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.25 vs. limit=15.0 +2024-09-01 01:40:04,597 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.92 vs. limit=22.5 +2024-09-01 01:40:52,487 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 7, batch 100, loss[loss=0.2466, simple_loss=0.2186, pruned_loss=0.08723, ctc_loss=0.1633, over 19065.00 frames. ], tot_loss[loss=0.2878, simple_loss=0.2411, pruned_loss=0.1162, ctc_loss=0.2083, over 1476190.89 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 16.0 +2024-09-01 01:41:03,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=104640.0, ans=0.125 +2024-09-01 01:41:07,881 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.48 vs. limit=6.0 +2024-09-01 01:41:50,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=104693.33333333333, ans=0.125 +2024-09-01 01:42:54,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=104746.66666666667, ans=0.0 +2024-09-01 01:42:59,051 INFO [dysarthria_finetune.py:1435] (2/4) (10176102400, 34072559616) +2024-09-01 01:42:59,052 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 01:42:59,100 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 01:43:13,095 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 8, batch 0, loss[loss=0.2607, simple_loss=0.2234, pruned_loss=0.09992, ctc_loss=0.1855, over 18635.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.2234, pruned_loss=0.09992, ctc_loss=0.1855, over 18635.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:43:13,095 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 01:44:04,949 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 8, validation: loss=0.2572, simple_loss=0.2228, pruned_loss=0.09973, ctc_loss=0.1708, over 1073944.00 frames. +2024-09-01 01:44:04,950 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 01:44:10,073 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=9.73 vs. limit=12.0 +2024-09-01 01:44:25,630 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.924e+02 2.205e+02 2.324e+02 2.533e+02 3.850e+02, threshold=4.647e+02, percent-clipped=0.0 +2024-09-01 01:44:41,305 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.98 vs. limit=15.0 +2024-09-01 01:44:58,155 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=16.56 vs. limit=15.0 +2024-09-01 01:52:39,804 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.12 vs. limit=15.0 +2024-09-01 01:52:53,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=104896.0, ans=0.0 +2024-09-01 01:54:00,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=104949.33333333333, ans=0.0 +2024-09-01 01:54:40,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104949.33333333333, ans=0.1 +2024-09-01 01:55:49,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=105002.66666666667, ans=0.2 +2024-09-01 01:56:19,985 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 8, batch 50, loss[loss=0.2533, simple_loss=0.2276, pruned_loss=0.08824, ctc_loss=0.1662, over 19000.00 frames. ], tot_loss[loss=0.2852, simple_loss=0.2386, pruned_loss=0.1145, ctc_loss=0.2132, over 827531.12 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:56:45,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=105056.0, ans=0.0 +2024-09-01 01:56:45,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=105056.0, ans=0.07 +2024-09-01 02:00:20,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=105269.33333333333, ans=0.125 +2024-09-01 02:00:31,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=105269.33333333333, ans=0.125 +2024-09-01 02:00:56,053 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 8, batch 100, loss[loss=0.2517, simple_loss=0.2194, pruned_loss=0.09369, ctc_loss=0.1783, over 19093.00 frames. ], tot_loss[loss=0.2757, simple_loss=0.2327, pruned_loss=0.1092, ctc_loss=0.2035, over 1475468.79 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 02:01:07,948 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.921e+02 2.165e+02 2.362e+02 2.610e+02 3.254e+02, threshold=4.723e+02, percent-clipped=0.0 +2024-09-01 02:01:29,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=105322.66666666667, ans=0.125 +2024-09-01 02:02:23,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=105376.0, ans=0.2 +2024-09-01 02:02:36,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=105429.33333333333, ans=0.1 +2024-09-01 02:02:56,068 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.63 vs. limit=22.5 +2024-09-01 02:02:57,170 INFO [dysarthria_finetune.py:1435] (2/4) (10142547968, 34072559616) +2024-09-01 02:02:57,171 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:02:57,207 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 02:03:10,289 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 9, batch 0, loss[loss=0.2894, simple_loss=0.243, pruned_loss=0.1182, ctc_loss=0.212, over 18461.00 frames. ], tot_loss[loss=0.2894, simple_loss=0.243, pruned_loss=0.1182, ctc_loss=0.212, over 18461.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:03:10,290 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:03:33,813 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 9, validation: loss=0.2431, simple_loss=0.2147, pruned_loss=0.0913, ctc_loss=0.1608, over 1073944.00 frames. +2024-09-01 02:03:33,813 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 02:03:38,872 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.30 vs. limit=15.0 +2024-09-01 02:03:52,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=105477.33333333333, ans=0.2 +2024-09-01 02:04:08,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=105530.66666666667, ans=0.125 +2024-09-01 02:04:52,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=105637.33333333333, ans=0.125 +2024-09-01 02:05:27,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=105690.66666666667, ans=0.125 +2024-09-01 02:05:38,142 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 9, batch 50, loss[loss=0.2838, simple_loss=0.2448, pruned_loss=0.1078, ctc_loss=0.2093, over 18943.00 frames. ], tot_loss[loss=0.2655, simple_loss=0.2266, pruned_loss=0.1032, ctc_loss=0.1967, over 826909.81 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:05:52,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=105744.0, ans=0.125 +2024-09-01 02:06:23,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=105797.33333333333, ans=0.125 +2024-09-01 02:06:37,636 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.941e+02 2.168e+02 2.346e+02 2.556e+02 3.441e+02, threshold=4.692e+02, percent-clipped=0.0 +2024-09-01 02:07:40,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=105904.0, ans=0.025 +2024-09-01 02:07:53,865 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.29 vs. limit=15.0 +2024-09-01 02:07:53,880 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.17 vs. limit=10.0 +2024-09-01 02:08:10,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=105957.33333333333, ans=0.125 +2024-09-01 02:08:17,428 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 9, batch 100, loss[loss=0.2426, simple_loss=0.2191, pruned_loss=0.08446, ctc_loss=0.1699, over 19136.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.2261, pruned_loss=0.1022, ctc_loss=0.1967, over 1474643.82 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:08:27,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=106010.66666666667, ans=0.05 +2024-09-01 02:08:30,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=106010.66666666667, ans=0.125 +2024-09-01 02:08:30,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=106010.66666666667, ans=0.125 +2024-09-01 02:08:43,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=106064.0, ans=0.025 +2024-09-01 02:09:19,445 INFO [dysarthria_finetune.py:1435] (2/4) (10142547968, 34072559616) +2024-09-01 02:09:19,446 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:09:19,481 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 02:09:34,479 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 10, batch 0, loss[loss=0.3038, simple_loss=0.2551, pruned_loss=0.1237, ctc_loss=0.2282, over 18505.00 frames. ], tot_loss[loss=0.3038, simple_loss=0.2551, pruned_loss=0.1237, ctc_loss=0.2282, over 18505.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:09:34,480 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:09:54,959 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.8844, 4.8770, 3.6462, 2.8434], device='cuda:2') +2024-09-01 02:09:58,839 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 10, validation: loss=0.2363, simple_loss=0.211, pruned_loss=0.08786, ctc_loss=0.1591, over 1073944.00 frames. +2024-09-01 02:09:58,839 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 02:10:06,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106165.33333333333, ans=0.1 +2024-09-01 02:10:29,265 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.74 vs. limit=15.0 +2024-09-01 02:10:35,317 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.93 vs. limit=22.5 +2024-09-01 02:10:37,409 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=8.34 vs. limit=15.0 +2024-09-01 02:10:43,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=106272.0, ans=0.125 +2024-09-01 02:10:58,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=106272.0, ans=0.0 +2024-09-01 02:11:09,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=106325.33333333333, ans=0.0 +2024-09-01 02:11:09,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=106325.33333333333, ans=0.125 +2024-09-01 02:11:28,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=106378.66666666667, ans=0.125 +2024-09-01 02:11:36,257 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.880e+02 2.111e+02 2.256e+02 2.412e+02 3.661e+02, threshold=4.511e+02, percent-clipped=0.0 +2024-09-01 02:11:47,317 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 10, batch 50, loss[loss=0.2391, simple_loss=0.2155, pruned_loss=0.08232, ctc_loss=0.1786, over 19019.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.2229, pruned_loss=0.09915, ctc_loss=0.1955, over 827816.98 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:11:59,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=106432.0, ans=15.0 +2024-09-01 02:12:16,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=106485.33333333333, ans=0.0 +2024-09-01 02:12:18,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=106485.33333333333, ans=0.025 +2024-09-01 02:12:23,839 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.88 vs. limit=15.0 +2024-09-01 02:12:41,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=106538.66666666667, ans=0.125 +2024-09-01 02:12:43,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=106538.66666666667, ans=0.95 +2024-09-01 02:13:00,179 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=11.05 vs. limit=12.0 +2024-09-01 02:13:23,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=106645.33333333333, ans=0.0 +2024-09-01 02:13:35,242 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 10, batch 100, loss[loss=0.2263, simple_loss=0.1993, pruned_loss=0.08021, ctc_loss=0.1811, over 19070.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.2173, pruned_loss=0.09356, ctc_loss=0.1867, over 1475821.74 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:13:45,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=106698.66666666667, ans=0.07 +2024-09-01 02:13:53,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=106698.66666666667, ans=0.0 +2024-09-01 02:13:58,264 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.83 vs. limit=15.0 +2024-09-01 02:14:31,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106805.33333333333, ans=0.0 +2024-09-01 02:14:34,903 INFO [dysarthria_finetune.py:1435] (2/4) (10169810944, 34072559616) +2024-09-01 02:14:34,904 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:14:34,956 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 02:14:48,322 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 11, batch 0, loss[loss=0.3055, simple_loss=0.2484, pruned_loss=0.1299, ctc_loss=0.2437, over 18525.00 frames. ], tot_loss[loss=0.3055, simple_loss=0.2484, pruned_loss=0.1299, ctc_loss=0.2437, over 18525.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:14:48,322 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:15:11,809 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 11, validation: loss=0.2335, simple_loss=0.2098, pruned_loss=0.0867, ctc_loss=0.1618, over 1073944.00 frames. +2024-09-01 02:15:11,810 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 02:16:01,162 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.900e+02 2.106e+02 2.175e+02 2.350e+02 3.456e+02, threshold=4.351e+02, percent-clipped=0.0 +2024-09-01 02:16:11,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=106960.0, ans=0.125 +2024-09-01 02:16:13,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=106960.0, ans=0.125 +2024-09-01 02:16:23,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=106960.0, ans=0.125 +2024-09-01 02:16:49,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107013.33333333333, ans=0.1 +2024-09-01 02:22:24,184 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 11, batch 50, loss[loss=0.2462, simple_loss=0.2151, pruned_loss=0.09435, ctc_loss=0.1839, over 19068.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.2172, pruned_loss=0.09387, ctc_loss=0.1891, over 827285.47 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:22:28,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=107120.0, ans=0.125 +2024-09-01 02:22:31,848 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.87 vs. limit=15.0 +2024-09-01 02:22:39,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=107120.0, ans=0.2 +2024-09-01 02:23:18,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=107173.33333333333, ans=0.125 +2024-09-01 02:23:34,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=107173.33333333333, ans=0.125 +2024-09-01 02:24:38,838 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.73 vs. limit=22.5 +2024-09-01 02:24:55,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107333.33333333333, ans=0.1 +2024-09-01 02:25:09,456 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 11, batch 100, loss[loss=0.2397, simple_loss=0.2067, pruned_loss=0.09236, ctc_loss=0.1889, over 19059.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.2155, pruned_loss=0.09244, ctc_loss=0.1864, over 1474809.38 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 16.0 +2024-09-01 02:25:51,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=107440.0, ans=0.125 +2024-09-01 02:25:52,649 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.857e+02 2.027e+02 2.133e+02 2.278e+02 3.178e+02, threshold=4.267e+02, percent-clipped=0.0 +2024-09-01 02:26:15,406 INFO [dysarthria_finetune.py:1435] (2/4) (10138353664, 34072559616) +2024-09-01 02:26:15,407 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:26:15,462 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 02:26:37,053 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 12, batch 0, loss[loss=0.2582, simple_loss=0.2227, pruned_loss=0.0996, ctc_loss=0.2043, over 18505.00 frames. ], tot_loss[loss=0.2582, simple_loss=0.2227, pruned_loss=0.0996, ctc_loss=0.2043, over 18505.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:26:37,053 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:27:00,641 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 12, validation: loss=0.2234, simple_loss=0.2042, pruned_loss=0.08189, ctc_loss=0.1554, over 1073944.00 frames. +2024-09-01 02:27:00,642 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 02:27:06,845 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.14 vs. limit=15.0 +2024-09-01 02:27:24,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=107594.66666666667, ans=0.125 +2024-09-01 02:27:24,926 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.09 vs. limit=22.5 +2024-09-01 02:27:53,445 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.94 vs. limit=15.0 +2024-09-01 02:28:15,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=107701.33333333333, ans=0.125 +2024-09-01 02:28:28,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107701.33333333333, ans=0.125 +2024-09-01 02:28:37,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=107754.66666666667, ans=0.125 +2024-09-01 02:28:41,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=107754.66666666667, ans=0.2 +2024-09-01 02:28:43,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=107754.66666666667, ans=0.95 +2024-09-01 02:28:53,638 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 12, batch 50, loss[loss=0.2293, simple_loss=0.2119, pruned_loss=0.0785, ctc_loss=0.177, over 18979.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.2125, pruned_loss=0.09069, ctc_loss=0.1886, over 828348.40 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:28:55,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107808.0, ans=0.1 +2024-09-01 02:28:57,746 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.30 vs. limit=15.0 +2024-09-01 02:29:27,246 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.99 vs. limit=15.0 +2024-09-01 02:29:38,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=107914.66666666667, ans=0.0 +2024-09-01 02:29:38,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=107914.66666666667, ans=0.0 +2024-09-01 02:30:00,524 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.72 vs. limit=22.5 +2024-09-01 02:30:17,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=107968.0, ans=15.0 +2024-09-01 02:30:20,080 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.831e+02 2.056e+02 2.167e+02 2.338e+02 2.987e+02, threshold=4.333e+02, percent-clipped=0.0 +2024-09-01 02:34:06,134 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 12, batch 100, loss[loss=0.2075, simple_loss=0.1981, pruned_loss=0.06937, ctc_loss=0.1512, over 19089.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.2128, pruned_loss=0.0902, ctc_loss=0.187, over 1475248.85 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:35:08,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=108074.66666666667, ans=0.125 +2024-09-01 02:36:26,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=108181.33333333333, ans=0.5 +2024-09-01 02:36:42,077 INFO [dysarthria_finetune.py:1435] (2/4) (10140450816, 34072559616) +2024-09-01 02:36:42,078 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:36:42,128 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 02:36:54,936 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 13, batch 0, loss[loss=0.2667, simple_loss=0.2317, pruned_loss=0.1072, ctc_loss=0.1963, over 18540.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.2317, pruned_loss=0.1072, ctc_loss=0.1963, over 18540.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:36:54,936 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:37:18,563 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 13, validation: loss=0.2186, simple_loss=0.2014, pruned_loss=0.08061, ctc_loss=0.1543, over 1073944.00 frames. +2024-09-01 02:37:18,563 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 02:37:37,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=108229.33333333333, ans=0.125 +2024-09-01 02:37:59,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=108282.66666666667, ans=0.125 +2024-09-01 02:38:01,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=108336.0, ans=0.0 +2024-09-01 02:38:18,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=108336.0, ans=0.1 +2024-09-01 02:38:25,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108389.33333333333, ans=0.1 +2024-09-01 02:38:28,262 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.67 vs. limit=15.0 +2024-09-01 02:38:45,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=108442.66666666667, ans=0.025 +2024-09-01 02:39:03,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=108442.66666666667, ans=0.125 +2024-09-01 02:39:08,426 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 13, batch 50, loss[loss=0.2401, simple_loss=0.2208, pruned_loss=0.08813, ctc_loss=0.1762, over 18984.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.2086, pruned_loss=0.08491, ctc_loss=0.1811, over 829065.08 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:39:16,489 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:39:18,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=108496.0, ans=0.2 +2024-09-01 02:39:29,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=108549.33333333333, ans=0.025 +2024-09-01 02:39:30,405 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.852e+02 2.005e+02 2.143e+02 2.348e+02 3.224e+02, threshold=4.286e+02, percent-clipped=0.0 +2024-09-01 02:40:23,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=108656.0, ans=0.125 +2024-09-01 02:40:40,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=108709.33333333333, ans=0.125 +2024-09-01 02:40:56,424 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 13, batch 100, loss[loss=0.2213, simple_loss=0.2013, pruned_loss=0.08022, ctc_loss=0.1769, over 19116.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.2083, pruned_loss=0.0856, ctc_loss=0.1797, over 1477011.25 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:40:57,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=108762.66666666667, ans=15.0 +2024-09-01 02:41:26,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108816.0, ans=0.1 +2024-09-01 02:41:41,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108869.33333333333, ans=0.1 +2024-09-01 02:41:49,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=108869.33333333333, ans=0.125 +2024-09-01 02:41:56,705 INFO [dysarthria_finetune.py:1435] (2/4) (10140450816, 34072559616) +2024-09-01 02:41:56,705 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:41:56,770 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 02:42:04,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=108917.33333333333, ans=0.125 +2024-09-01 02:42:09,602 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 14, batch 0, loss[loss=0.2447, simple_loss=0.2132, pruned_loss=0.0963, ctc_loss=0.1926, over 18523.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.2132, pruned_loss=0.0963, ctc_loss=0.1926, over 18523.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:42:09,603 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:42:17,646 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.2284, 3.3698, 3.6828, 2.8419, 3.8979, 3.9161, 3.8591, 3.8486], + device='cuda:2') +2024-09-01 02:42:33,562 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 14, validation: loss=0.209, simple_loss=0.1966, pruned_loss=0.0763, ctc_loss=0.148, over 1073944.00 frames. +2024-09-01 02:42:33,562 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 02:42:51,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=108917.33333333333, ans=0.07 +2024-09-01 02:43:15,696 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.44 vs. limit=15.0 +2024-09-01 02:43:22,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=109024.0, ans=0.125 +2024-09-01 02:43:32,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=109024.0, ans=0.125 +2024-09-01 02:43:42,871 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.737e+02 1.996e+02 2.096e+02 2.326e+02 2.912e+02, threshold=4.192e+02, percent-clipped=0.0 +2024-09-01 02:43:52,722 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:43:52,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=109077.33333333333, ans=0.0 +2024-09-01 02:44:17,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=109130.66666666667, ans=0.0 +2024-09-01 02:44:24,655 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 14, batch 50, loss[loss=0.239, simple_loss=0.2087, pruned_loss=0.09073, ctc_loss=0.2038, over 18999.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.2068, pruned_loss=0.08665, ctc_loss=0.1851, over 827850.18 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:44:46,219 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.12 vs. limit=15.0 +2024-09-01 02:45:06,012 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.38 vs. limit=15.0 +2024-09-01 02:45:09,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=109290.66666666667, ans=0.2 +2024-09-01 02:45:11,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=109290.66666666667, ans=0.125 +2024-09-01 02:45:22,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=109290.66666666667, ans=0.0 +2024-09-01 02:45:23,013 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.60 vs. limit=22.5 +2024-09-01 02:46:12,178 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 14, batch 100, loss[loss=0.2044, simple_loss=0.1957, pruned_loss=0.07142, ctc_loss=0.1567, over 19059.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.2072, pruned_loss=0.08708, ctc_loss=0.1831, over 1475617.37 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:46:28,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=109450.66666666667, ans=0.125 +2024-09-01 02:46:52,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=109504.0, ans=0.2 +2024-09-01 02:46:54,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=109557.33333333333, ans=0.0 +2024-09-01 02:47:12,358 INFO [dysarthria_finetune.py:1435] (2/4) (10081730560, 34072559616) +2024-09-01 02:47:12,359 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:47:12,420 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 02:47:25,785 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 15, batch 0, loss[loss=0.2861, simple_loss=0.247, pruned_loss=0.1161, ctc_loss=0.2234, over 18678.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.247, pruned_loss=0.1161, ctc_loss=0.2234, over 18678.00 frames. ], batch size: 65, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:47:25,786 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:48:03,605 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 15, validation: loss=0.2059, simple_loss=0.1951, pruned_loss=0.07588, ctc_loss=0.1481, over 1073944.00 frames. +2024-09-01 02:48:03,605 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 02:48:10,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=109605.33333333333, ans=0.0 +2024-09-01 02:48:14,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=109605.33333333333, ans=0.0 +2024-09-01 02:48:20,626 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.737e+02 1.965e+02 2.102e+02 2.301e+02 3.159e+02, threshold=4.205e+02, percent-clipped=0.0 +2024-09-01 02:48:25,036 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.02 vs. limit=22.5 +2024-09-01 02:48:27,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=109605.33333333333, ans=0.0 +2024-09-01 02:48:40,495 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.52 vs. limit=12.0 +2024-09-01 02:50:24,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=109765.33333333333, ans=0.0 +2024-09-01 02:50:39,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=109765.33333333333, ans=0.0 +2024-09-01 02:50:53,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=109818.66666666667, ans=0.125 +2024-09-01 02:51:41,401 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 15, batch 50, loss[loss=0.2163, simple_loss=0.2089, pruned_loss=0.07619, ctc_loss=0.1653, over 18994.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2041, pruned_loss=0.08491, ctc_loss=0.1802, over 827605.34 frames. ], batch size: 102, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:52:40,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=109925.33333333333, ans=0.2 +2024-09-01 02:53:16,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.21 vs. limit=15.0 +2024-09-01 02:54:03,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=109978.66666666667, ans=0.125 +2024-09-01 02:54:31,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=110032.0, ans=0.125 +2024-09-01 02:55:28,639 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 15, batch 100, loss[loss=0.2082, simple_loss=0.1991, pruned_loss=0.07444, ctc_loss=0.1631, over 19062.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2034, pruned_loss=0.0844, ctc_loss=0.1786, over 1475114.53 frames. ], batch size: 133, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:55:32,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=110138.66666666667, ans=0.2 +2024-09-01 02:55:44,090 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.784e+02 1.961e+02 2.099e+02 2.266e+02 2.969e+02, threshold=4.197e+02, percent-clipped=0.0 +2024-09-01 02:57:18,316 INFO [dysarthria_finetune.py:1435] (2/4) (10140450816, 34072559616) +2024-09-01 02:57:18,317 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:57:18,359 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 02:57:33,304 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 16, batch 0, loss[loss=0.257, simple_loss=0.2223, pruned_loss=0.1039, ctc_loss=0.2068, over 18504.00 frames. ], tot_loss[loss=0.257, simple_loss=0.2223, pruned_loss=0.1039, ctc_loss=0.2068, over 18504.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:57:33,304 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:58:10,357 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 16, validation: loss=0.2065, simple_loss=0.1951, pruned_loss=0.07751, ctc_loss=0.1523, over 1073944.00 frames. +2024-09-01 02:58:10,358 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 02:58:32,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=110293.33333333333, ans=0.0 +2024-09-01 02:58:35,362 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.84 vs. limit=10.0 +2024-09-01 02:58:39,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=110346.66666666667, ans=0.2 +2024-09-01 02:58:49,898 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:59:54,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=110506.66666666667, ans=0.125 +2024-09-01 03:00:21,826 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 16, batch 50, loss[loss=0.1955, simple_loss=0.1932, pruned_loss=0.06912, ctc_loss=0.1474, over 19044.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.1968, pruned_loss=0.07968, ctc_loss=0.1734, over 828171.03 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:00:45,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=110560.0, ans=0.125 +2024-09-01 03:01:14,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=110666.66666666667, ans=0.025 +2024-09-01 03:01:25,494 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.686e+02 1.971e+02 2.093e+02 2.277e+02 2.936e+02, threshold=4.187e+02, percent-clipped=0.0 +2024-09-01 03:02:01,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=110720.0, ans=0.2 +2024-09-01 03:02:27,261 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 16, batch 100, loss[loss=0.1912, simple_loss=0.1843, pruned_loss=0.06797, ctc_loss=0.1552, over 19090.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.1967, pruned_loss=0.07934, ctc_loss=0.1707, over 1476933.27 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:02:37,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=110826.66666666667, ans=0.125 +2024-09-01 03:02:54,988 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.16 vs. limit=22.5 +2024-09-01 03:03:06,184 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=9.42 vs. limit=12.0 +2024-09-01 03:03:29,007 INFO [dysarthria_finetune.py:1435] (2/4) (10138353664, 34072559616) +2024-09-01 03:03:29,008 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 03:03:29,055 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 03:03:41,380 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 17, batch 0, loss[loss=0.2406, simple_loss=0.2169, pruned_loss=0.09479, ctc_loss=0.1866, over 18336.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.2169, pruned_loss=0.09479, ctc_loss=0.1866, over 18336.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:03:41,381 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 03:03:42,840 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.7163, 2.9691, 2.6776, 3.0356], device='cuda:2') +2024-09-01 03:03:45,327 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([1.6989, 1.5512, 1.5774, 1.7242, 1.8196, 1.7138, 1.7730, 1.7477], + device='cuda:2') +2024-09-01 03:04:05,401 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 17, validation: loss=0.1943, simple_loss=0.1886, pruned_loss=0.07183, ctc_loss=0.1409, over 1073944.00 frames. +2024-09-01 03:04:05,402 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 03:04:20,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=110981.33333333333, ans=0.125 +2024-09-01 03:04:32,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111034.66666666667, ans=0.1 +2024-09-01 03:04:38,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=111034.66666666667, ans=0.125 +2024-09-01 03:05:09,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111088.0, ans=0.125 +2024-09-01 03:05:33,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=111194.66666666667, ans=0.2 +2024-09-01 03:05:45,143 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.712e+02 1.958e+02 2.075e+02 2.282e+02 2.777e+02, threshold=4.150e+02, percent-clipped=0.0 +2024-09-01 03:05:46,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=111194.66666666667, ans=0.125 +2024-09-01 03:05:56,267 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 17, batch 50, loss[loss=0.1852, simple_loss=0.1923, pruned_loss=0.06054, ctc_loss=0.1424, over 19057.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.1965, pruned_loss=0.07954, ctc_loss=0.1714, over 827125.84 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:07:14,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=111408.0, ans=0.0 +2024-09-01 03:07:14,830 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=8.06 vs. limit=15.0 +2024-09-01 03:07:33,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=111461.33333333333, ans=0.125 +2024-09-01 03:07:45,180 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 17, batch 100, loss[loss=0.1887, simple_loss=0.1797, pruned_loss=0.06918, ctc_loss=0.1485, over 19126.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.1953, pruned_loss=0.07861, ctc_loss=0.1691, over 1475165.47 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:08:03,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=111514.66666666667, ans=0.125 +2024-09-01 03:08:24,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=111568.0, ans=0.1 +2024-09-01 03:08:29,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=111621.33333333333, ans=0.0 +2024-09-01 03:08:44,474 INFO [dysarthria_finetune.py:1435] (2/4) (10140450816, 34072559616) +2024-09-01 03:08:44,475 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 03:08:44,517 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 03:08:52,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=111669.33333333333, ans=0.2 +2024-09-01 03:09:16,224 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 18, batch 0, loss[loss=0.2402, simple_loss=0.2173, pruned_loss=0.09277, ctc_loss=0.1937, over 18559.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.2173, pruned_loss=0.09277, ctc_loss=0.1937, over 18559.00 frames. ], batch size: 65, lr: 9.93e-05, grad_scale: 32.0 +2024-09-01 03:09:16,225 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 03:09:39,624 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 18, validation: loss=0.1961, simple_loss=0.1886, pruned_loss=0.07291, ctc_loss=0.1441, over 1073944.00 frames. +2024-09-01 03:09:39,625 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 03:09:45,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=111669.33333333333, ans=0.0 +2024-09-01 03:10:12,348 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.17 vs. limit=15.0 +2024-09-01 03:10:14,927 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.703e+02 1.913e+02 2.060e+02 2.285e+02 3.151e+02, threshold=4.120e+02, percent-clipped=0.0 +2024-09-01 03:10:33,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=111776.0, ans=0.125 +2024-09-01 03:10:37,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=111776.0, ans=0.0 +2024-09-01 03:11:03,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=111829.33333333333, ans=0.0 +2024-09-01 03:11:10,920 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.30 vs. limit=22.5 +2024-09-01 03:11:29,066 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 18, batch 50, loss[loss=0.2482, simple_loss=0.2214, pruned_loss=0.09671, ctc_loss=0.2041, over 18975.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.1969, pruned_loss=0.08079, ctc_loss=0.1745, over 827610.12 frames. ], batch size: 102, lr: 9.93e-05, grad_scale: 32.0 +2024-09-01 03:11:38,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=111936.0, ans=0.0 +2024-09-01 03:12:07,857 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.70 vs. limit=15.0 +2024-09-01 03:12:59,857 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.50 vs. limit=15.0 +2024-09-01 03:13:10,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=112149.33333333333, ans=0.125 +2024-09-01 03:13:35,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=112202.66666666667, ans=0.0 +2024-09-01 03:13:44,688 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 18, batch 100, loss[loss=0.1786, simple_loss=0.171, pruned_loss=0.06428, ctc_loss=0.1442, over 19135.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.1936, pruned_loss=0.07824, ctc_loss=0.1695, over 1477220.69 frames. ], batch size: 133, lr: 9.93e-05, grad_scale: 32.0 +2024-09-01 03:13:56,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=112202.66666666667, ans=0.125 +2024-09-01 03:14:19,253 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.738e+02 1.898e+02 2.020e+02 2.262e+02 2.800e+02, threshold=4.040e+02, percent-clipped=0.0 +2024-09-01 03:14:37,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=112309.33333333333, ans=0.125 +2024-09-01 03:14:38,009 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.63 vs. limit=15.0 +2024-09-01 03:14:43,308 INFO [dysarthria_finetune.py:1435] (2/4) (10142547968, 34072559616) +2024-09-01 03:14:43,308 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 03:14:43,349 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 03:14:55,811 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 19, batch 0, loss[loss=0.222, simple_loss=0.2012, pruned_loss=0.08618, ctc_loss=0.176, over 18438.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2012, pruned_loss=0.08618, ctc_loss=0.176, over 18438.00 frames. ], batch size: 65, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:14:55,811 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 03:15:42,620 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 19, validation: loss=0.1928, simple_loss=0.1862, pruned_loss=0.07146, ctc_loss=0.1413, over 1073944.00 frames. +2024-09-01 03:15:42,621 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 03:16:00,856 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.31 vs. limit=15.0 +2024-09-01 03:16:42,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=112405.33333333333, ans=0.125 +2024-09-01 03:17:02,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=112405.33333333333, ans=0.1 +2024-09-01 03:17:57,753 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.03 vs. limit=15.0 +2024-09-01 03:18:56,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=112565.33333333333, ans=0.125 +2024-09-01 03:19:02,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=112565.33333333333, ans=0.2 +2024-09-01 03:19:10,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=112565.33333333333, ans=0.125 +2024-09-01 03:19:34,387 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 19, batch 50, loss[loss=0.2048, simple_loss=0.1981, pruned_loss=0.07234, ctc_loss=0.167, over 19013.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.192, pruned_loss=0.07794, ctc_loss=0.1689, over 827262.88 frames. ], batch size: 102, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:20:07,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=112618.66666666667, ans=0.0 +2024-09-01 03:20:34,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=112672.0, ans=0.04949747468305833 +2024-09-01 03:22:03,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=112778.66666666667, ans=0.025 +2024-09-01 03:22:34,902 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.708e+02 1.922e+02 2.090e+02 2.243e+02 2.725e+02, threshold=4.180e+02, percent-clipped=0.0 +2024-09-01 03:22:36,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=112778.66666666667, ans=0.125 +2024-09-01 03:23:27,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=112832.0, ans=0.5 +2024-09-01 03:23:34,279 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 19, batch 100, loss[loss=0.1568, simple_loss=0.1539, pruned_loss=0.05365, ctc_loss=0.1308, over 19169.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.1917, pruned_loss=0.07787, ctc_loss=0.167, over 1475351.90 frames. ], batch size: 134, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:23:46,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=112885.33333333333, ans=0.2 +2024-09-01 03:24:40,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112938.66666666667, ans=0.1 +2024-09-01 03:25:05,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=112992.0, ans=0.025 +2024-09-01 03:25:13,468 INFO [dysarthria_finetune.py:1435] (2/4) (10169810944, 34072559616) +2024-09-01 03:25:13,469 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 03:25:13,503 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 03:25:27,057 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 20, batch 0, loss[loss=0.2318, simple_loss=0.2127, pruned_loss=0.08921, ctc_loss=0.1813, over 18527.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.2127, pruned_loss=0.08921, ctc_loss=0.1813, over 18527.00 frames. ], batch size: 65, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:25:27,057 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 03:26:10,755 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 20, validation: loss=0.19, simple_loss=0.1838, pruned_loss=0.07041, ctc_loss=0.1385, over 1073944.00 frames. +2024-09-01 03:26:10,756 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19803MB +2024-09-01 03:26:27,189 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.53 vs. limit=15.0 +2024-09-01 03:26:40,555 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.89 vs. limit=5.0 +2024-09-01 03:27:14,702 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.60 vs. limit=22.5 +2024-09-01 03:27:22,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=113093.33333333333, ans=0.1 +2024-09-01 03:27:59,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=113146.66666666667, ans=0.125 +2024-09-01 03:28:13,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=113146.66666666667, ans=0.09899494936611666 +2024-09-01 03:28:33,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=113200.0, ans=0.025 +2024-09-01 03:29:11,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113253.33333333333, ans=0.1 +2024-09-01 03:29:26,704 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 20, batch 50, loss[loss=0.203, simple_loss=0.192, pruned_loss=0.07429, ctc_loss=0.1635, over 18968.00 frames. ], tot_loss[loss=0.204, simple_loss=0.1893, pruned_loss=0.07603, ctc_loss=0.1667, over 828106.18 frames. ], batch size: 102, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:30:05,882 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.607e+02 1.917e+02 2.046e+02 2.200e+02 2.791e+02, threshold=4.093e+02, percent-clipped=0.0 +2024-09-01 03:30:48,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=113360.0, ans=0.125 +2024-09-01 03:31:59,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=113466.66666666667, ans=0.0 +2024-09-01 03:33:06,746 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 20, batch 100, loss[loss=0.1599, simple_loss=0.1626, pruned_loss=0.0538, ctc_loss=0.1242, over 19074.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.1877, pruned_loss=0.07473, ctc_loss=0.1634, over 1476081.83 frames. ], batch size: 133, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:33:09,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=113573.33333333333, ans=0.2 +2024-09-01 03:33:39,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=113573.33333333333, ans=0.5 +2024-09-01 03:34:13,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113626.66666666667, ans=0.1 +2024-09-01 03:34:13,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113626.66666666667, ans=0.1 +2024-09-01 03:34:43,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=113680.0, ans=0.125 +2024-09-01 03:35:10,305 INFO [dysarthria_finetune.py:1435] (2/4) (10140450816, 34072559616) +2024-09-01 03:35:10,306 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 03:35:10,365 INFO [dysarthria_finetune.py:1440] (2/4) (28979167232, 34072559616) +2024-09-01 03:35:10,365 INFO [dysarthria_finetune.py:1442] (2/4) Done! diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-22-09-00-3 b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-22-09-00-3 new file mode 100644 index 0000000000000000000000000000000000000000..1eacf8b0759ec625dfa7c90a9b028a42b101dd26 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/log/log-train-2024-08-31-22-09-00-3 @@ -0,0 +1,560 @@ +2024-08-31 22:09:00,005 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-31 22:09:00,006 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-31 22:09:00,006 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-31 22:09:00,558 INFO [dysarthria_finetune.py:1219] (3/4) (33427226624, 34072559616) +2024-08-31 22:09:00,559 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-31 22:09:01,025 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2653.int.cedar.computecanada.ca', 'IP address': '172.16.146.90'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 22:09:01,026 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-31 22:09:16,886 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 66367431 +2024-08-31 22:09:16,886 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt +2024-08-31 22:10:50,729 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-31 22:11:01,688 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts +2024-08-31 22:11:01,783 INFO [dysarthria_finetune.py:1319] (3/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 22:11:02,460 INFO [dysarthria_asr_datamodule.py:239] (3/4) Disable MUSAN +2024-08-31 22:11:02,460 INFO [dysarthria_asr_datamodule.py:257] (3/4) Enable SpecAugment +2024-08-31 22:11:02,460 INFO [dysarthria_asr_datamodule.py:258] (3/4) Time warp factor: 80 +2024-08-31 22:11:02,460 INFO [dysarthria_asr_datamodule.py:268] (3/4) Num frame mask: 10 +2024-08-31 22:11:02,461 INFO [dysarthria_asr_datamodule.py:281] (3/4) About to create train dataset +2024-08-31 22:11:17,376 INFO [dysarthria_asr_datamodule.py:308] (3/4) Using DynamicBucketingSampler. +2024-08-31 22:11:18,285 INFO [dysarthria_asr_datamodule.py:325] (3/4) About to create train dataloader +2024-08-31 22:11:18,291 INFO [dysarthria_asr_datamodule.py:501] (3/4) About to get dev cuts +2024-08-31 22:11:18,428 INFO [dysarthria_asr_datamodule.py:356] (3/4) About to create dev dataset +2024-08-31 22:11:18,751 INFO [dysarthria_asr_datamodule.py:373] (3/4) About to create dev dataloader +2024-08-31 22:11:18,751 INFO [dysarthria_finetune.py:1490] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 22:12:59,095 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=5.63 vs. limit=5.0 +2024-08-31 22:12:59,567 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=192, metric=13.51 vs. limit=7.5 +2024-08-31 22:13:03,031 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12706MB +2024-08-31 22:13:04,356 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=384, metric=17.15 vs. limit=7.5 +2024-08-31 22:13:04,883 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12706MB +2024-08-31 22:14:12,892 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12706MB +2024-08-31 22:14:14,958 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12706MB +2024-08-31 22:19:50,669 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=23.91 vs. limit=7.5 +2024-08-31 22:19:51,915 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12706MB +2024-08-31 22:19:54,343 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12706MB +2024-08-31 22:20:43,748 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 0, loss[loss=0.3536, simple_loss=0.2859, pruned_loss=0.1622, ctc_loss=0.2575, over 18634.00 frames. ], tot_loss[loss=0.3536, simple_loss=0.2859, pruned_loss=0.1622, ctc_loss=0.2575, over 18634.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-31 22:20:43,749 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-31 22:46:04,597 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 1, validation: loss=0.3942, simple_loss=0.3187, pruned_loss=0.1927, ctc_loss=0.281, over 1073944.00 frames. +2024-08-31 22:46:04,729 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14296MB +2024-08-31 23:01:25,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=100000.0, ans=0.125 +2024-08-31 23:06:11,555 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.685e+02 9.975e+02 1.051e+03 1.091e+03 1.133e+03, threshold=4.203e+03, percent-clipped=0.0 +2024-08-31 23:12:07,271 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.80 vs. limit=22.5 +2024-08-31 23:24:50,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=100106.66666666667, ans=0.125 +2024-08-31 23:25:12,998 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.960e+02 9.836e+02 1.043e+03 1.067e+03 1.144e+03, threshold=4.173e+03, percent-clipped=0.0 +2024-08-31 23:43:54,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.61 vs. limit=15.0 +2024-08-31 23:43:54,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=38.43 vs. limit=22.5 +2024-08-31 23:48:48,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=100160.0, ans=0.125 +2024-08-31 23:50:05,824 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 7.677e+02 8.648e+02 9.697e+02 1.051e+03 1.144e+03, threshold=3.879e+03, percent-clipped=0.0 +2024-08-31 23:50:46,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=100213.33333333333, ans=10.0 +2024-08-31 23:52:46,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=100213.33333333333, ans=0.2 +2024-08-31 23:55:34,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=100266.66666666667, ans=15.0 +2024-08-31 23:55:38,233 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 50, loss[loss=0.4536, simple_loss=0.3606, pruned_loss=0.2303, ctc_loss=0.3464, over 19001.00 frames. ], tot_loss[loss=0.4274, simple_loss=0.3426, pruned_loss=0.2127, ctc_loss=0.3165, over 828973.50 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-31 23:56:18,614 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=20.32 vs. limit=15.0 +2024-08-31 23:59:46,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=100320.0, ans=0.0 +2024-09-01 00:01:23,412 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=18.41 vs. limit=15.0 +2024-09-01 00:06:50,103 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=16.09 vs. limit=15.0 +2024-09-01 00:07:16,392 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=3.99 vs. limit=15.0 +2024-09-01 00:08:29,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100373.33333333333, ans=0.1 +2024-09-01 00:09:24,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=100373.33333333333, ans=0.125 +2024-09-01 00:14:30,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=100480.0, ans=0.125 +2024-09-01 00:14:31,114 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=31.14 vs. limit=15.0 +2024-09-01 00:16:00,367 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=8.77 vs. limit=12.0 +2024-09-01 00:18:32,448 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.599e+02 6.914e+02 7.776e+02 9.170e+02 1.144e+03, threshold=1.555e+03, percent-clipped=0.0 +2024-09-01 00:18:32,486 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 100, loss[loss=0.3936, simple_loss=0.3164, pruned_loss=0.1907, ctc_loss=0.2875, over 19146.00 frames. ], tot_loss[loss=0.4119, simple_loss=0.3302, pruned_loss=0.2029, ctc_loss=0.305, over 1476162.18 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-09-01 00:22:28,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=35.40 vs. limit=15.0 +2024-09-01 00:23:02,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=100586.66666666667, ans=0.0 +2024-09-01 00:25:35,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=100586.66666666667, ans=0.02 +2024-09-01 00:27:44,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=100640.0, ans=15.0 +2024-09-01 00:28:35,600 INFO [dysarthria_finetune.py:1435] (3/4) (13353287680, 34072559616) +2024-09-01 00:28:35,601 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 00:28:35,633 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 00:30:12,102 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 2, batch 0, loss[loss=0.3937, simple_loss=0.3173, pruned_loss=0.182, ctc_loss=0.2893, over 18501.00 frames. ], tot_loss[loss=0.3937, simple_loss=0.3173, pruned_loss=0.182, ctc_loss=0.2893, over 18501.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-09-01 00:30:12,103 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 00:34:27,528 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 2, validation: loss=0.3547, simple_loss=0.2901, pruned_loss=0.1627, ctc_loss=0.2412, over 1073944.00 frames. +2024-09-01 00:34:27,529 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 00:40:40,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-09-01 00:41:39,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-09-01 00:41:39,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=100789.33333333333, ans=0.125 +2024-09-01 00:43:46,327 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.27 vs. limit=6.0 +2024-09-01 00:44:20,353 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.67 vs. limit=6.0 +2024-09-01 00:45:11,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=100842.66666666667, ans=0.07 +2024-09-01 00:45:59,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=100896.0, ans=0.04949747468305833 +2024-09-01 00:48:18,400 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 2, batch 50, loss[loss=0.4155, simple_loss=0.3323, pruned_loss=0.1958, ctc_loss=0.3136, over 18956.00 frames. ], tot_loss[loss=0.3971, simple_loss=0.319, pruned_loss=0.1902, ctc_loss=0.2911, over 828460.00 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 8.0 +2024-09-01 00:49:22,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=100949.33333333333, ans=0.0 +2024-09-01 00:50:01,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=101002.66666666667, ans=0.0 +2024-09-01 00:50:04,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=101002.66666666667, ans=0.125 +2024-09-01 00:53:05,352 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.883e+02 4.624e+02 4.997e+02 5.383e+02 6.686e+02, threshold=9.995e+02, percent-clipped=0.0 +2024-09-01 00:54:15,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=101109.33333333333, ans=0.0 +2024-09-01 00:54:21,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=101109.33333333333, ans=0.0 +2024-09-01 00:55:06,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=101162.66666666667, ans=0.0 +2024-09-01 00:56:13,740 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=13.36 vs. limit=12.0 +2024-09-01 00:56:30,067 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 2, batch 100, loss[loss=0.343, simple_loss=0.2774, pruned_loss=0.158, ctc_loss=0.2471, over 19077.00 frames. ], tot_loss[loss=0.3817, simple_loss=0.308, pruned_loss=0.1791, ctc_loss=0.276, over 1476919.42 frames. ], batch size: 133, lr: 7.29e-05, grad_scale: 8.0 +2024-09-01 00:59:23,684 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:00:16,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=101269.33333333333, ans=0.125 +2024-09-01 01:01:16,818 INFO [dysarthria_finetune.py:1435] (3/4) (13298761728, 34072559616) +2024-09-01 01:01:16,818 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 01:01:16,861 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 01:01:35,833 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 3, batch 0, loss[loss=0.3399, simple_loss=0.2754, pruned_loss=0.1477, ctc_loss=0.2517, over 18579.00 frames. ], tot_loss[loss=0.3399, simple_loss=0.2754, pruned_loss=0.1477, ctc_loss=0.2517, over 18579.00 frames. ], batch size: 65, lr: 7.58e-05, grad_scale: 16.0 +2024-09-01 01:01:35,833 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 01:01:59,926 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 3, validation: loss=0.3274, simple_loss=0.2708, pruned_loss=0.1428, ctc_loss=0.2163, over 1073944.00 frames. +2024-09-01 01:01:59,926 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 01:02:02,030 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=20.98 vs. limit=15.0 +2024-09-01 01:03:09,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=101424.0, ans=0.125 +2024-09-01 01:03:48,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=101477.33333333333, ans=0.125 +2024-09-01 01:04:30,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=101584.0, ans=0.125 +2024-09-01 01:04:33,377 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.942e+02 3.454e+02 3.711e+02 3.996e+02 5.509e+02, threshold=7.422e+02, percent-clipped=0.0 +2024-09-01 01:04:56,814 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 3, batch 50, loss[loss=0.3518, simple_loss=0.2929, pruned_loss=0.148, ctc_loss=0.2311, over 19113.00 frames. ], tot_loss[loss=0.3636, simple_loss=0.2954, pruned_loss=0.1633, ctc_loss=0.2594, over 827781.85 frames. ], batch size: 102, lr: 8.08e-05, grad_scale: 16.0 +2024-09-01 01:05:07,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=101637.33333333333, ans=0.1 +2024-09-01 01:05:18,644 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.04 vs. limit=6.0 +2024-09-01 01:05:41,118 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=16.88 vs. limit=15.0 +2024-09-01 01:05:43,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=101690.66666666667, ans=0.125 +2024-09-01 01:06:07,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=101744.0, ans=0.2 +2024-09-01 01:06:26,313 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=10.20 vs. limit=12.0 +2024-09-01 01:06:30,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=101797.33333333333, ans=0.2 +2024-09-01 01:06:32,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=101797.33333333333, ans=0.125 +2024-09-01 01:06:39,802 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=25.13 vs. limit=15.0 +2024-09-01 01:06:43,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=101850.66666666667, ans=0.125 +2024-09-01 01:06:47,070 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.14 vs. limit=15.0 +2024-09-01 01:06:59,465 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.77 vs. limit=22.5 +2024-09-01 01:07:00,076 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 3, batch 100, loss[loss=0.3301, simple_loss=0.2706, pruned_loss=0.1429, ctc_loss=0.2322, over 19145.00 frames. ], tot_loss[loss=0.3545, simple_loss=0.2882, pruned_loss=0.1589, ctc_loss=0.2525, over 1476240.06 frames. ], batch size: 133, lr: 8.58e-05, grad_scale: 16.0 +2024-09-01 01:07:06,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101904.0, ans=0.1 +2024-09-01 01:07:28,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=101904.0, ans=0.2 +2024-09-01 01:07:35,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=101904.0, ans=0.025 +2024-09-01 01:08:21,702 INFO [dysarthria_finetune.py:1435] (3/4) (13321830400, 34072559616) +2024-09-01 01:08:21,702 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 01:08:21,745 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 01:08:35,075 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 4, batch 0, loss[loss=0.4029, simple_loss=0.3191, pruned_loss=0.1877, ctc_loss=0.3163, over 18645.00 frames. ], tot_loss[loss=0.4029, simple_loss=0.3191, pruned_loss=0.1877, ctc_loss=0.3163, over 18645.00 frames. ], batch size: 65, lr: 8.86e-05, grad_scale: 32.0 +2024-09-01 01:08:35,075 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 01:08:58,406 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 4, validation: loss=0.308, simple_loss=0.2573, pruned_loss=0.1299, ctc_loss=0.2, over 1073944.00 frames. +2024-09-01 01:08:58,406 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 01:09:19,169 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=17.65 vs. limit=15.0 +2024-09-01 01:09:30,348 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.03 vs. limit=22.5 +2024-09-01 01:09:33,164 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.484e+02 2.869e+02 3.070e+02 3.452e+02 5.291e+02, threshold=6.140e+02, percent-clipped=0.0 +2024-09-01 01:09:38,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=102106.66666666667, ans=0.2 +2024-09-01 01:09:55,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=102160.0, ans=0.0 +2024-09-01 01:09:57,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=102160.0, ans=0.125 +2024-09-01 01:09:59,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=102160.0, ans=0.125 +2024-09-01 01:10:01,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=102160.0, ans=0.125 +2024-09-01 01:10:24,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=102213.33333333333, ans=0.0 +2024-09-01 01:10:51,038 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 4, batch 50, loss[loss=0.3285, simple_loss=0.2705, pruned_loss=0.1422, ctc_loss=0.2266, over 18993.00 frames. ], tot_loss[loss=0.3431, simple_loss=0.2801, pruned_loss=0.1499, ctc_loss=0.2455, over 827748.42 frames. ], batch size: 102, lr: 9.36e-05, grad_scale: 32.0 +2024-09-01 01:11:18,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=102373.33333333333, ans=0.125 +2024-09-01 01:11:27,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=102373.33333333333, ans=0.025 +2024-09-01 01:11:57,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=102480.0, ans=0.125 +2024-09-01 01:12:38,908 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 4, batch 100, loss[loss=0.3329, simple_loss=0.2716, pruned_loss=0.1491, ctc_loss=0.2342, over 19161.00 frames. ], tot_loss[loss=0.3318, simple_loss=0.2721, pruned_loss=0.1437, ctc_loss=0.2345, over 1475350.41 frames. ], batch size: 133, lr: 9.86e-05, grad_scale: 32.0 +2024-09-01 01:13:12,356 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.110e+02 2.669e+02 2.871e+02 3.122e+02 4.671e+02, threshold=5.742e+02, percent-clipped=0.0 +2024-09-01 01:13:40,158 INFO [dysarthria_finetune.py:1435] (3/4) (13313441792, 34072559616) +2024-09-01 01:13:40,159 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 01:13:40,191 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 01:13:53,060 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 5, batch 0, loss[loss=0.3017, simple_loss=0.2507, pruned_loss=0.1278, ctc_loss=0.2041, over 18566.00 frames. ], tot_loss[loss=0.3017, simple_loss=0.2507, pruned_loss=0.1278, ctc_loss=0.2041, over 18566.00 frames. ], batch size: 65, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:13:53,061 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 01:14:16,499 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 5, validation: loss=0.2909, simple_loss=0.2453, pruned_loss=0.1191, ctc_loss=0.1881, over 1073944.00 frames. +2024-09-01 01:14:16,500 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 01:15:16,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.70 vs. limit=15.0 +2024-09-01 01:15:32,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=102794.66666666667, ans=0.015 +2024-09-01 01:16:01,769 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=15.26 vs. limit=15.0 +2024-09-01 01:16:49,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=102848.0, ans=0.0 +2024-09-01 01:20:50,680 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 5, batch 50, loss[loss=0.3449, simple_loss=0.282, pruned_loss=0.152, ctc_loss=0.2447, over 18976.00 frames. ], tot_loss[loss=0.3189, simple_loss=0.2629, pruned_loss=0.1341, ctc_loss=0.2275, over 827749.28 frames. ], batch size: 102, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:22:21,049 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.48 vs. limit=22.5 +2024-09-01 01:24:18,284 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.00 vs. limit=22.5 +2024-09-01 01:24:37,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=103114.66666666667, ans=0.0 +2024-09-01 01:25:19,624 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.73 vs. limit=15.0 +2024-09-01 01:25:29,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=103168.0, ans=0.0 +2024-09-01 01:25:33,874 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.253e+02 2.485e+02 2.709e+02 2.997e+02 4.733e+02, threshold=5.419e+02, percent-clipped=0.0 +2024-09-01 01:26:19,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=103221.33333333333, ans=0.0 +2024-09-01 01:26:22,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=103274.66666666667, ans=0.125 +2024-09-01 01:26:23,132 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 5, batch 100, loss[loss=0.3073, simple_loss=0.2558, pruned_loss=0.1258, ctc_loss=0.217, over 19091.00 frames. ], tot_loss[loss=0.315, simple_loss=0.2597, pruned_loss=0.1327, ctc_loss=0.2248, over 1475913.16 frames. ], batch size: 133, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:26:36,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=103274.66666666667, ans=0.125 +2024-09-01 01:26:40,124 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.50 vs. limit=15.0 +2024-09-01 01:27:10,986 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.75 vs. limit=22.5 +2024-09-01 01:27:54,325 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=19.75 vs. limit=22.5 +2024-09-01 01:27:55,382 INFO [dysarthria_finetune.py:1435] (3/4) (1227554816, 34072559616) +2024-09-01 01:27:55,382 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 01:27:55,473 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 01:28:14,510 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 6, batch 0, loss[loss=0.281, simple_loss=0.2376, pruned_loss=0.113, ctc_loss=0.1882, over 18684.00 frames. ], tot_loss[loss=0.281, simple_loss=0.2376, pruned_loss=0.113, ctc_loss=0.1882, over 18684.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:28:14,510 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 01:28:37,896 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 6, validation: loss=0.2789, simple_loss=0.2369, pruned_loss=0.1122, ctc_loss=0.1819, over 1073944.00 frames. +2024-09-01 01:28:37,897 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 01:29:51,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=103584.0, ans=0.025 +2024-09-01 01:30:04,885 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.49 vs. limit=15.0 +2024-09-01 01:30:17,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=103637.33333333333, ans=0.125 +2024-09-01 01:30:32,248 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 6, batch 50, loss[loss=0.3048, simple_loss=0.2522, pruned_loss=0.1284, ctc_loss=0.2173, over 19058.00 frames. ], tot_loss[loss=0.3056, simple_loss=0.2532, pruned_loss=0.1264, ctc_loss=0.2201, over 828493.81 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:30:44,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=103690.66666666667, ans=0.0 +2024-09-01 01:31:04,785 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.055e+02 2.419e+02 2.583e+02 2.819e+02 4.094e+02, threshold=5.165e+02, percent-clipped=0.0 +2024-09-01 01:31:34,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=103744.0, ans=10.0 +2024-09-01 01:31:38,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=103797.33333333333, ans=0.125 +2024-09-01 01:31:43,192 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.81 vs. limit=22.5 +2024-09-01 01:32:27,122 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:32:37,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=103904.0, ans=0.125 +2024-09-01 01:32:43,095 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 6, batch 100, loss[loss=0.2937, simple_loss=0.2405, pruned_loss=0.1251, ctc_loss=0.2179, over 19113.00 frames. ], tot_loss[loss=0.3029, simple_loss=0.2513, pruned_loss=0.1251, ctc_loss=0.2181, over 1475249.23 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:32:55,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=103957.33333333333, ans=0.125 +2024-09-01 01:32:55,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=103957.33333333333, ans=0.5 +2024-09-01 01:33:24,240 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104010.66666666667, ans=0.1 +2024-09-01 01:33:39,682 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.08 vs. limit=15.0 +2024-09-01 01:33:44,580 INFO [dysarthria_finetune.py:1435] (3/4) (717946880, 34072559616) +2024-09-01 01:33:44,581 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 01:33:44,671 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 01:33:57,912 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 7, batch 0, loss[loss=0.3232, simple_loss=0.2643, pruned_loss=0.1388, ctc_loss=0.2391, over 18595.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.2643, pruned_loss=0.1388, ctc_loss=0.2391, over 18595.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:33:57,912 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 01:34:21,900 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 7, validation: loss=0.2604, simple_loss=0.2251, pruned_loss=0.1007, ctc_loss=0.1681, over 1073944.00 frames. +2024-09-01 01:34:21,901 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 01:34:52,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=104160.0, ans=0.0 +2024-09-01 01:35:40,245 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.049e+02 2.272e+02 2.384e+02 2.601e+02 4.291e+02, threshold=4.768e+02, percent-clipped=0.0 +2024-09-01 01:36:07,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=104266.66666666667, ans=0.2 +2024-09-01 01:36:15,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=104320.0, ans=0.125 +2024-09-01 01:37:02,201 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 7, batch 50, loss[loss=0.2985, simple_loss=0.2498, pruned_loss=0.122, ctc_loss=0.2136, over 18963.00 frames. ], tot_loss[loss=0.2936, simple_loss=0.2456, pruned_loss=0.1184, ctc_loss=0.2131, over 827887.87 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 16.0 +2024-09-01 01:37:35,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=104373.33333333333, ans=0.07 +2024-09-01 01:38:41,681 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.31 vs. limit=15.0 +2024-09-01 01:39:56,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104533.33333333333, ans=0.1 +2024-09-01 01:39:59,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=104533.33333333333, ans=0.0 +2024-09-01 01:40:47,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=104586.66666666667, ans=0.125 +2024-09-01 01:40:52,490 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 7, batch 100, loss[loss=0.2774, simple_loss=0.2327, pruned_loss=0.1126, ctc_loss=0.1999, over 19124.00 frames. ], tot_loss[loss=0.287, simple_loss=0.2407, pruned_loss=0.1155, ctc_loss=0.2078, over 1475075.17 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 16.0 +2024-09-01 01:41:26,351 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.95 vs. limit=10.0 +2024-09-01 01:41:48,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=104693.33333333333, ans=0.2 +2024-09-01 01:42:54,574 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.18 vs. limit=15.0 +2024-09-01 01:42:59,046 INFO [dysarthria_finetune.py:1435] (3/4) (13351190528, 34072559616) +2024-09-01 01:42:59,047 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 01:42:59,096 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 01:43:13,098 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 8, batch 0, loss[loss=0.2737, simple_loss=0.2319, pruned_loss=0.1106, ctc_loss=0.1908, over 18547.00 frames. ], tot_loss[loss=0.2737, simple_loss=0.2319, pruned_loss=0.1106, ctc_loss=0.1908, over 18547.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:43:13,099 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 01:44:04,944 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 8, validation: loss=0.2572, simple_loss=0.2228, pruned_loss=0.09973, ctc_loss=0.1708, over 1073944.00 frames. +2024-09-01 01:44:04,944 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 01:44:09,697 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=9.79 vs. limit=12.0 +2024-09-01 01:44:25,620 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.924e+02 2.205e+02 2.324e+02 2.533e+02 3.850e+02, threshold=4.647e+02, percent-clipped=0.0 +2024-09-01 01:53:56,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=104949.33333333333, ans=0.025 +2024-09-01 01:54:31,867 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:54:39,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=104949.33333333333, ans=0.125 +2024-09-01 01:55:10,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=105002.66666666667, ans=0.025 +2024-09-01 01:56:16,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=105056.0, ans=0.125 +2024-09-01 01:56:19,987 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 8, batch 50, loss[loss=0.2953, simple_loss=0.2463, pruned_loss=0.1227, ctc_loss=0.2139, over 18964.00 frames. ], tot_loss[loss=0.2811, simple_loss=0.237, pruned_loss=0.1118, ctc_loss=0.2062, over 828441.23 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:57:13,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=105056.0, ans=0.125 +2024-09-01 01:57:46,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=105109.33333333333, ans=0.125 +2024-09-01 01:58:24,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=105109.33333333333, ans=0.0 +2024-09-01 01:59:29,723 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.62 vs. limit=22.5 +2024-09-01 02:00:56,046 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 8, batch 100, loss[loss=0.2942, simple_loss=0.248, pruned_loss=0.1172, ctc_loss=0.2189, over 19119.00 frames. ], tot_loss[loss=0.2779, simple_loss=0.2346, pruned_loss=0.1105, ctc_loss=0.2037, over 1475727.62 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 02:01:07,947 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.921e+02 2.165e+02 2.362e+02 2.610e+02 3.254e+02, threshold=4.723e+02, percent-clipped=0.0 +2024-09-01 02:02:50,968 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.73 vs. limit=15.0 +2024-09-01 02:02:57,163 INFO [dysarthria_finetune.py:1435] (3/4) (14437515264, 34072559616) +2024-09-01 02:02:57,164 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:02:57,205 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 02:03:10,302 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 9, batch 0, loss[loss=0.2955, simple_loss=0.25, pruned_loss=0.1182, ctc_loss=0.2164, over 18777.00 frames. ], tot_loss[loss=0.2955, simple_loss=0.25, pruned_loss=0.1182, ctc_loss=0.2164, over 18777.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:03:10,302 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:03:33,812 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 9, validation: loss=0.2431, simple_loss=0.2147, pruned_loss=0.0913, ctc_loss=0.1608, over 1073944.00 frames. +2024-09-01 02:03:33,813 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 02:04:14,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=105530.66666666667, ans=0.125 +2024-09-01 02:04:16,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=105530.66666666667, ans=0.035 +2024-09-01 02:04:21,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=105530.66666666667, ans=0.0 +2024-09-01 02:04:25,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=105584.0, ans=0.125 +2024-09-01 02:04:28,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=105584.0, ans=0.0 +2024-09-01 02:04:42,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=105584.0, ans=0.0 +2024-09-01 02:04:50,634 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=19.46 vs. limit=15.0 +2024-09-01 02:04:52,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=105637.33333333333, ans=0.125 +2024-09-01 02:05:04,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=105637.33333333333, ans=0.1 +2024-09-01 02:05:23,369 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.56 vs. limit=10.0 +2024-09-01 02:05:27,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=105690.66666666667, ans=0.0 +2024-09-01 02:05:29,756 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.45 vs. limit=15.0 +2024-09-01 02:05:38,136 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 9, batch 50, loss[loss=0.2586, simple_loss=0.2286, pruned_loss=0.09443, ctc_loss=0.1816, over 18965.00 frames. ], tot_loss[loss=0.2673, simple_loss=0.2283, pruned_loss=0.1035, ctc_loss=0.1986, over 827503.70 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:05:55,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=105744.0, ans=0.125 +2024-09-01 02:06:34,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=105850.66666666667, ans=0.0 +2024-09-01 02:06:34,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=105850.66666666667, ans=0.125 +2024-09-01 02:06:37,632 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.941e+02 2.168e+02 2.346e+02 2.556e+02 3.441e+02, threshold=4.692e+02, percent-clipped=0.0 +2024-09-01 02:06:47,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=105850.66666666667, ans=0.0 +2024-09-01 02:08:17,425 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 9, batch 100, loss[loss=0.2393, simple_loss=0.2098, pruned_loss=0.0883, ctc_loss=0.1748, over 19159.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.2252, pruned_loss=0.1023, ctc_loss=0.1953, over 1475225.92 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:08:32,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=106010.66666666667, ans=0.025 +2024-09-01 02:08:32,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=106010.66666666667, ans=0.125 +2024-09-01 02:08:50,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=106064.0, ans=0.5 +2024-09-01 02:09:18,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=106117.33333333333, ans=0.2 +2024-09-01 02:09:19,436 INFO [dysarthria_finetune.py:1435] (3/4) (13315538944, 34072559616) +2024-09-01 02:09:19,436 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:09:19,480 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 02:09:34,443 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 10, batch 0, loss[loss=0.2592, simple_loss=0.2218, pruned_loss=0.1007, ctc_loss=0.1952, over 18587.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.2218, pruned_loss=0.1007, ctc_loss=0.1952, over 18587.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:09:34,443 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:09:49,238 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.7520, 4.0905, 2.7166, 1.9140], device='cuda:3') +2024-09-01 02:09:58,833 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 10, validation: loss=0.2363, simple_loss=0.211, pruned_loss=0.08786, ctc_loss=0.1591, over 1073944.00 frames. +2024-09-01 02:09:58,833 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 02:10:06,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106165.33333333333, ans=0.1 +2024-09-01 02:10:17,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=106165.33333333333, ans=0.0 +2024-09-01 02:10:39,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=106218.66666666667, ans=0.125 +2024-09-01 02:10:41,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=106272.0, ans=0.2 +2024-09-01 02:10:43,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=106272.0, ans=0.125 +2024-09-01 02:10:46,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=106272.0, ans=0.2 +2024-09-01 02:11:05,812 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=6.56 vs. limit=12.0 +2024-09-01 02:11:07,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=106325.33333333333, ans=0.0 +2024-09-01 02:11:29,260 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.02 vs. limit=10.0 +2024-09-01 02:11:35,721 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.98 vs. limit=15.0 +2024-09-01 02:11:36,265 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.880e+02 2.111e+02 2.256e+02 2.412e+02 3.661e+02, threshold=4.511e+02, percent-clipped=0.0 +2024-09-01 02:11:47,308 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 10, batch 50, loss[loss=0.2803, simple_loss=0.2436, pruned_loss=0.1078, ctc_loss=0.2045, over 19101.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.2222, pruned_loss=0.09829, ctc_loss=0.1942, over 827631.91 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:12:02,129 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.92 vs. limit=15.0 +2024-09-01 02:12:21,354 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=11.61 vs. limit=15.0 +2024-09-01 02:12:25,861 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=14.17 vs. limit=15.0 +2024-09-01 02:12:38,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=106538.66666666667, ans=0.125 +2024-09-01 02:12:40,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=106538.66666666667, ans=0.125 +2024-09-01 02:12:41,366 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=18.95 vs. limit=15.0 +2024-09-01 02:13:08,889 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.42 vs. limit=6.0 +2024-09-01 02:13:32,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=106645.33333333333, ans=0.05 +2024-09-01 02:13:35,243 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 10, batch 100, loss[loss=0.259, simple_loss=0.2205, pruned_loss=0.1012, ctc_loss=0.2024, over 19051.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.2216, pruned_loss=0.09689, ctc_loss=0.1918, over 1475773.03 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:14:15,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=106752.0, ans=0.125 +2024-09-01 02:14:30,380 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.46 vs. limit=10.0 +2024-09-01 02:14:34,897 INFO [dysarthria_finetune.py:1435] (3/4) (13326024704, 34072559616) +2024-09-01 02:14:34,898 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:14:34,954 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 02:14:48,322 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 11, batch 0, loss[loss=0.286, simple_loss=0.2392, pruned_loss=0.1148, ctc_loss=0.229, over 18604.00 frames. ], tot_loss[loss=0.286, simple_loss=0.2392, pruned_loss=0.1148, ctc_loss=0.229, over 18604.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:14:48,323 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:15:11,810 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 11, validation: loss=0.2335, simple_loss=0.2098, pruned_loss=0.0867, ctc_loss=0.1618, over 1073944.00 frames. +2024-09-01 02:15:11,811 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 02:15:57,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=106906.66666666667, ans=0.07 +2024-09-01 02:16:00,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106906.66666666667, ans=0.1 +2024-09-01 02:16:01,157 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.900e+02 2.106e+02 2.175e+02 2.350e+02 3.456e+02, threshold=4.351e+02, percent-clipped=0.0 +2024-09-01 02:16:19,066 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=29.65 vs. limit=22.5 +2024-09-01 02:16:49,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=107013.33333333333, ans=0.125 +2024-09-01 02:22:24,187 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 11, batch 50, loss[loss=0.2323, simple_loss=0.2107, pruned_loss=0.07996, ctc_loss=0.1795, over 19110.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.2124, pruned_loss=0.08917, ctc_loss=0.1816, over 828132.31 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:22:29,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=107120.0, ans=0.0 +2024-09-01 02:22:57,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=107173.33333333333, ans=0.07 +2024-09-01 02:23:33,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=107173.33333333333, ans=0.025 +2024-09-01 02:23:33,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107173.33333333333, ans=0.1 +2024-09-01 02:23:38,834 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.97 vs. limit=6.0 +2024-09-01 02:24:56,229 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.69 vs. limit=15.0 +2024-09-01 02:25:09,436 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 11, batch 100, loss[loss=0.2157, simple_loss=0.1995, pruned_loss=0.07149, ctc_loss=0.1681, over 19127.00 frames. ], tot_loss[loss=0.245, simple_loss=0.2152, pruned_loss=0.09198, ctc_loss=0.1859, over 1475363.18 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 16.0 +2024-09-01 02:25:20,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=107386.66666666667, ans=0.2 +2024-09-01 02:25:36,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=107440.0, ans=0.125 +2024-09-01 02:25:52,128 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.68 vs. limit=10.0 +2024-09-01 02:25:52,645 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.857e+02 2.027e+02 2.133e+02 2.278e+02 3.178e+02, threshold=4.267e+02, percent-clipped=0.0 +2024-09-01 02:26:15,381 INFO [dysarthria_finetune.py:1435] (3/4) (38469632, 34072559616) +2024-09-01 02:26:15,382 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:26:15,462 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 02:26:37,018 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 12, batch 0, loss[loss=0.258, simple_loss=0.2231, pruned_loss=0.1014, ctc_loss=0.1955, over 18650.00 frames. ], tot_loss[loss=0.258, simple_loss=0.2231, pruned_loss=0.1014, ctc_loss=0.1955, over 18650.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:26:37,019 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:27:00,633 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 12, validation: loss=0.2234, simple_loss=0.2042, pruned_loss=0.08189, ctc_loss=0.1554, over 1073944.00 frames. +2024-09-01 02:27:00,633 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 02:27:06,895 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.36 vs. limit=15.0 +2024-09-01 02:27:09,035 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.02 vs. limit=22.5 +2024-09-01 02:27:32,347 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.33 vs. limit=15.0 +2024-09-01 02:28:26,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=107701.33333333333, ans=0.0 +2024-09-01 02:28:53,637 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 12, batch 50, loss[loss=0.2197, simple_loss=0.2091, pruned_loss=0.07319, ctc_loss=0.1581, over 19037.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.2135, pruned_loss=0.09011, ctc_loss=0.1859, over 828666.57 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:28:57,797 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.14 vs. limit=15.0 +2024-09-01 02:28:59,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=107808.0, ans=0.125 +2024-09-01 02:29:04,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=107808.0, ans=10.0 +2024-09-01 02:29:15,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=107861.33333333333, ans=0.125 +2024-09-01 02:29:18,112 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=26.08 vs. limit=22.5 +2024-09-01 02:29:22,437 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=12.43 vs. limit=12.0 +2024-09-01 02:29:42,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=107914.66666666667, ans=0.125 +2024-09-01 02:30:04,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=107968.0, ans=6.0 +2024-09-01 02:30:20,075 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.831e+02 2.056e+02 2.167e+02 2.338e+02 2.987e+02, threshold=4.333e+02, percent-clipped=0.0 +2024-09-01 02:30:23,745 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.64 vs. limit=22.5 +2024-09-01 02:30:43,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=108074.66666666667, ans=0.0 +2024-09-01 02:34:06,101 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 12, batch 100, loss[loss=0.1993, simple_loss=0.1849, pruned_loss=0.06802, ctc_loss=0.1561, over 19142.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.212, pruned_loss=0.08878, ctc_loss=0.1828, over 1477170.44 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:36:24,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=108181.33333333333, ans=0.125 +2024-09-01 02:36:28,859 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.44 vs. limit=15.0 +2024-09-01 02:36:42,080 INFO [dysarthria_finetune.py:1435] (3/4) (13317636096, 34072559616) +2024-09-01 02:36:42,080 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:36:42,127 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 02:36:54,939 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 13, batch 0, loss[loss=0.2353, simple_loss=0.2058, pruned_loss=0.09363, ctc_loss=0.1727, over 18629.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.2058, pruned_loss=0.09363, ctc_loss=0.1727, over 18629.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:36:54,940 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:37:18,565 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 13, validation: loss=0.2186, simple_loss=0.2014, pruned_loss=0.08061, ctc_loss=0.1543, over 1073944.00 frames. +2024-09-01 02:37:18,566 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 02:37:46,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=9.50 vs. limit=15.0 +2024-09-01 02:37:55,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=108282.66666666667, ans=0.0 +2024-09-01 02:38:01,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=108336.0, ans=0.025 +2024-09-01 02:38:43,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108389.33333333333, ans=0.1 +2024-09-01 02:38:54,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=108442.66666666667, ans=0.2 +2024-09-01 02:39:08,428 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 13, batch 50, loss[loss=0.2346, simple_loss=0.2087, pruned_loss=0.08958, ctc_loss=0.179, over 19050.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.2099, pruned_loss=0.08861, ctc_loss=0.1834, over 828311.79 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:39:18,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=108496.0, ans=0.2 +2024-09-01 02:39:29,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=108549.33333333333, ans=0.025 +2024-09-01 02:39:30,395 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.852e+02 2.005e+02 2.143e+02 2.348e+02 3.224e+02, threshold=4.286e+02, percent-clipped=0.0 +2024-09-01 02:39:53,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=108602.66666666667, ans=0.0 +2024-09-01 02:40:34,519 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.18 vs. limit=6.0 +2024-09-01 02:40:40,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=108709.33333333333, ans=0.125 +2024-09-01 02:40:42,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=108709.33333333333, ans=0.125 +2024-09-01 02:40:56,423 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 13, batch 100, loss[loss=0.2441, simple_loss=0.2142, pruned_loss=0.09483, ctc_loss=0.1914, over 19095.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.2096, pruned_loss=0.08778, ctc_loss=0.1817, over 1474662.24 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:40:57,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108762.66666666667, ans=0.1 +2024-09-01 02:41:10,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=108762.66666666667, ans=0.0 +2024-09-01 02:41:12,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108762.66666666667, ans=0.1 +2024-09-01 02:41:15,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=108762.66666666667, ans=0.125 +2024-09-01 02:41:22,140 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.48 vs. limit=15.0 +2024-09-01 02:41:26,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=108816.0, ans=0.125 +2024-09-01 02:41:30,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=108816.0, ans=0.0 +2024-09-01 02:41:36,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=108816.0, ans=0.2 +2024-09-01 02:41:43,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=108869.33333333333, ans=0.125 +2024-09-01 02:41:49,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=108869.33333333333, ans=0.5 +2024-09-01 02:41:56,702 INFO [dysarthria_finetune.py:1435] (3/4) (13313441792, 34072559616) +2024-09-01 02:41:56,703 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:41:56,770 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 02:42:09,605 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 14, batch 0, loss[loss=0.2681, simple_loss=0.2332, pruned_loss=0.1092, ctc_loss=0.1965, over 18650.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.2332, pruned_loss=0.1092, ctc_loss=0.1965, over 18650.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:42:09,606 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:42:27,805 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.3442, 1.2449, 1.9848, 1.7180, 1.7528, 1.8117, 2.0398, 1.3363], + device='cuda:3') +2024-09-01 02:42:33,566 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 14, validation: loss=0.209, simple_loss=0.1966, pruned_loss=0.0763, ctc_loss=0.148, over 1073944.00 frames. +2024-09-01 02:42:33,567 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 02:42:41,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=108917.33333333333, ans=0.125 +2024-09-01 02:42:51,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=108917.33333333333, ans=0.05 +2024-09-01 02:43:10,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=108970.66666666667, ans=0.0 +2024-09-01 02:43:19,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=109024.0, ans=0.125 +2024-09-01 02:43:22,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109024.0, ans=0.1 +2024-09-01 02:43:32,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=109024.0, ans=0.125 +2024-09-01 02:43:42,874 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.737e+02 1.996e+02 2.096e+02 2.326e+02 2.912e+02, threshold=4.192e+02, percent-clipped=0.0 +2024-09-01 02:43:46,609 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=14.72 vs. limit=15.0 +2024-09-01 02:44:06,363 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.82 vs. limit=22.5 +2024-09-01 02:44:10,778 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.38 vs. limit=15.0 +2024-09-01 02:44:24,656 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 14, batch 50, loss[loss=0.1907, simple_loss=0.1905, pruned_loss=0.05882, ctc_loss=0.1541, over 19012.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.2081, pruned_loss=0.08681, ctc_loss=0.1832, over 829335.16 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:44:43,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=109184.0, ans=0.125 +2024-09-01 02:45:09,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=109290.66666666667, ans=0.2 +2024-09-01 02:45:11,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=109290.66666666667, ans=0.025 +2024-09-01 02:45:11,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109290.66666666667, ans=0.1 +2024-09-01 02:45:18,997 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=7.30 vs. limit=10.0 +2024-09-01 02:45:22,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=109290.66666666667, ans=0.125 +2024-09-01 02:46:06,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=109397.33333333333, ans=0.1 +2024-09-01 02:46:12,183 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 14, batch 100, loss[loss=0.1904, simple_loss=0.1892, pruned_loss=0.06296, ctc_loss=0.1426, over 19114.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.2053, pruned_loss=0.08485, ctc_loss=0.1787, over 1476363.01 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:46:45,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=109504.0, ans=0.125 +2024-09-01 02:46:56,405 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:47:12,364 INFO [dysarthria_finetune.py:1435] (3/4) (13317636096, 34072559616) +2024-09-01 02:47:12,365 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:47:12,421 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 02:47:25,787 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 15, batch 0, loss[loss=0.2133, simple_loss=0.1969, pruned_loss=0.08074, ctc_loss=0.1578, over 18716.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.1969, pruned_loss=0.08074, ctc_loss=0.1578, over 18716.00 frames. ], batch size: 65, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:47:25,788 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:48:03,607 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 15, validation: loss=0.2059, simple_loss=0.1951, pruned_loss=0.07588, ctc_loss=0.1481, over 1073944.00 frames. +2024-09-01 02:48:03,608 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 02:48:05,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=109605.33333333333, ans=0.2 +2024-09-01 02:48:20,625 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.737e+02 1.965e+02 2.102e+02 2.301e+02 3.159e+02, threshold=4.205e+02, percent-clipped=0.0 +2024-09-01 02:48:45,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=109658.66666666667, ans=0.125 +2024-09-01 02:51:15,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=109818.66666666667, ans=0.125 +2024-09-01 02:51:41,396 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 15, batch 50, loss[loss=0.1936, simple_loss=0.1821, pruned_loss=0.06909, ctc_loss=0.1566, over 19179.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2016, pruned_loss=0.0824, ctc_loss=0.1765, over 827713.24 frames. ], batch size: 103, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:51:51,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.04 vs. limit=15.0 +2024-09-01 02:53:50,203 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.53 vs. limit=6.0 +2024-09-01 02:54:24,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=110032.0, ans=0.125 +2024-09-01 02:54:35,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=110032.0, ans=0.0 +2024-09-01 02:55:28,639 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 15, batch 100, loss[loss=0.1899, simple_loss=0.1831, pruned_loss=0.06653, ctc_loss=0.1513, over 19073.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2005, pruned_loss=0.08164, ctc_loss=0.1744, over 1475236.97 frames. ], batch size: 133, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:55:41,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=110138.66666666667, ans=0.2 +2024-09-01 02:55:44,088 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.784e+02 1.961e+02 2.099e+02 2.266e+02 2.969e+02, threshold=4.197e+02, percent-clipped=0.0 +2024-09-01 02:56:07,455 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.10 vs. limit=6.0 +2024-09-01 02:56:59,036 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=10.63 vs. limit=12.0 +2024-09-01 02:57:16,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=110245.33333333333, ans=0.025 +2024-09-01 02:57:18,310 INFO [dysarthria_finetune.py:1435] (3/4) (13317636096, 34072559616) +2024-09-01 02:57:18,311 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:57:18,356 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 02:57:33,285 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 16, batch 0, loss[loss=0.2518, simple_loss=0.2202, pruned_loss=0.1011, ctc_loss=0.1999, over 18560.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.2202, pruned_loss=0.1011, ctc_loss=0.1999, over 18560.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:57:33,286 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:58:10,352 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 16, validation: loss=0.2065, simple_loss=0.1951, pruned_loss=0.07751, ctc_loss=0.1523, over 1073944.00 frames. +2024-09-01 02:58:10,353 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 02:58:27,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=110293.33333333333, ans=0.0 +2024-09-01 02:58:49,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=110346.66666666667, ans=0.2 +2024-09-01 02:59:29,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=110400.0, ans=0.025 +2024-09-01 02:59:40,703 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.11 vs. limit=15.0 +2024-09-01 02:59:44,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=110453.33333333333, ans=0.025 +2024-09-01 02:59:49,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=110453.33333333333, ans=0.07 +2024-09-01 02:59:52,275 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.85 vs. limit=15.0 +2024-09-01 02:59:54,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=110506.66666666667, ans=0.0 +2024-09-01 03:00:21,807 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 16, batch 50, loss[loss=0.2227, simple_loss=0.2109, pruned_loss=0.0807, ctc_loss=0.1812, over 19044.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2011, pruned_loss=0.08314, ctc_loss=0.1775, over 827661.95 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:01:16,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=110666.66666666667, ans=0.025 +2024-09-01 03:01:25,494 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.686e+02 1.971e+02 2.093e+02 2.277e+02 2.936e+02, threshold=4.187e+02, percent-clipped=0.0 +2024-09-01 03:01:40,562 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.75 vs. limit=10.0 +2024-09-01 03:01:47,068 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=9.65 vs. limit=15.0 +2024-09-01 03:02:04,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=110773.33333333333, ans=0.0 +2024-09-01 03:02:24,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=110773.33333333333, ans=0.125 +2024-09-01 03:02:24,280 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.24 vs. limit=15.0 +2024-09-01 03:02:27,247 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 16, batch 100, loss[loss=0.199, simple_loss=0.1997, pruned_loss=0.06824, ctc_loss=0.1546, over 19120.00 frames. ], tot_loss[loss=0.216, simple_loss=0.1993, pruned_loss=0.08135, ctc_loss=0.1734, over 1474935.70 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:02:28,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110826.66666666667, ans=0.1 +2024-09-01 03:02:41,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110826.66666666667, ans=0.1 +2024-09-01 03:02:41,876 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.83 vs. limit=10.0 +2024-09-01 03:03:29,006 INFO [dysarthria_finetune.py:1435] (3/4) (13321830400, 34072559616) +2024-09-01 03:03:29,007 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 03:03:29,049 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 03:03:41,384 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 17, batch 0, loss[loss=0.2884, simple_loss=0.2418, pruned_loss=0.1234, ctc_loss=0.2209, over 18583.00 frames. ], tot_loss[loss=0.2884, simple_loss=0.2418, pruned_loss=0.1234, ctc_loss=0.2209, over 18583.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:03:41,385 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 03:03:45,284 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.5985, 3.1697, 2.4082, 1.6290], device='cuda:3') +2024-09-01 03:03:48,131 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([7.1225, 6.3742, 6.0682, 5.8848], device='cuda:3') +2024-09-01 03:04:05,406 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 17, validation: loss=0.1943, simple_loss=0.1886, pruned_loss=0.07183, ctc_loss=0.1409, over 1073944.00 frames. +2024-09-01 03:04:05,407 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 03:04:06,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110981.33333333333, ans=0.1 +2024-09-01 03:04:22,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=110981.33333333333, ans=0.0 +2024-09-01 03:05:14,004 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=9.45 vs. limit=12.0 +2024-09-01 03:05:45,143 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.712e+02 1.958e+02 2.075e+02 2.282e+02 2.777e+02, threshold=4.150e+02, percent-clipped=0.0 +2024-09-01 03:05:46,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111194.66666666667, ans=0.1 +2024-09-01 03:05:51,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=111194.66666666667, ans=0.0 +2024-09-01 03:05:56,269 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 17, batch 50, loss[loss=0.2272, simple_loss=0.2052, pruned_loss=0.08853, ctc_loss=0.1803, over 18982.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.1959, pruned_loss=0.0787, ctc_loss=0.1702, over 827806.80 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:06:06,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111248.0, ans=0.1 +2024-09-01 03:06:08,844 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.12 vs. limit=15.0 +2024-09-01 03:06:13,154 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.73 vs. limit=15.0 +2024-09-01 03:06:15,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=111248.0, ans=0.125 +2024-09-01 03:06:33,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=111301.33333333333, ans=0.125 +2024-09-01 03:07:10,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111408.0, ans=0.1 +2024-09-01 03:07:12,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=111408.0, ans=0.125 +2024-09-01 03:07:14,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=111408.0, ans=0.0 +2024-09-01 03:07:19,225 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=11.22 vs. limit=15.0 +2024-09-01 03:07:33,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=111461.33333333333, ans=0.025 +2024-09-01 03:07:45,186 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 17, batch 100, loss[loss=0.1815, simple_loss=0.1828, pruned_loss=0.06018, ctc_loss=0.1497, over 19078.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.1947, pruned_loss=0.0776, ctc_loss=0.1672, over 1476033.73 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 03:08:18,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=111568.0, ans=0.0 +2024-09-01 03:08:18,378 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:08:44,490 INFO [dysarthria_finetune.py:1435] (3/4) (13344899072, 34072559616) +2024-09-01 03:08:44,490 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 03:08:44,520 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 03:08:52,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=111669.33333333333, ans=0.2 +2024-09-01 03:09:16,218 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 18, batch 0, loss[loss=0.2415, simple_loss=0.2135, pruned_loss=0.09544, ctc_loss=0.1967, over 18613.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.2135, pruned_loss=0.09544, ctc_loss=0.1967, over 18613.00 frames. ], batch size: 65, lr: 9.93e-05, grad_scale: 32.0 +2024-09-01 03:09:16,218 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 03:09:39,625 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 18, validation: loss=0.1961, simple_loss=0.1886, pruned_loss=0.07291, ctc_loss=0.1441, over 1073944.00 frames. +2024-09-01 03:09:39,626 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 03:09:45,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=111669.33333333333, ans=0.0 +2024-09-01 03:10:14,933 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.703e+02 1.913e+02 2.060e+02 2.285e+02 3.151e+02, threshold=4.120e+02, percent-clipped=0.0 +2024-09-01 03:10:20,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=111722.66666666667, ans=0.0 +2024-09-01 03:10:59,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=111829.33333333333, ans=0.2 +2024-09-01 03:11:29,063 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 18, batch 50, loss[loss=0.2061, simple_loss=0.1941, pruned_loss=0.07372, ctc_loss=0.1768, over 19004.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.19, pruned_loss=0.07554, ctc_loss=0.1647, over 828768.32 frames. ], batch size: 102, lr: 9.93e-05, grad_scale: 32.0 +2024-09-01 03:11:48,162 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.55 vs. limit=15.0 +2024-09-01 03:11:58,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=111989.33333333333, ans=0.95 +2024-09-01 03:13:08,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=112149.33333333333, ans=0.125 +2024-09-01 03:13:31,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=112149.33333333333, ans=0.0 +2024-09-01 03:13:44,696 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 18, batch 100, loss[loss=0.1749, simple_loss=0.1696, pruned_loss=0.06203, ctc_loss=0.1404, over 19084.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.1895, pruned_loss=0.07561, ctc_loss=0.1646, over 1476677.05 frames. ], batch size: 133, lr: 9.93e-05, grad_scale: 32.0 +2024-09-01 03:13:50,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=112202.66666666667, ans=0.0 +2024-09-01 03:14:19,249 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.738e+02 1.898e+02 2.020e+02 2.262e+02 2.800e+02, threshold=4.040e+02, percent-clipped=0.0 +2024-09-01 03:14:27,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=112309.33333333333, ans=0.025 +2024-09-01 03:14:37,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112309.33333333333, ans=0.1 +2024-09-01 03:14:43,277 INFO [dysarthria_finetune.py:1435] (3/4) (12157911040, 34072559616) +2024-09-01 03:14:43,278 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 03:14:43,304 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 03:14:55,811 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 19, batch 0, loss[loss=0.2695, simple_loss=0.2295, pruned_loss=0.1123, ctc_loss=0.2121, over 18562.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.2295, pruned_loss=0.1123, ctc_loss=0.2121, over 18562.00 frames. ], batch size: 65, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:14:55,812 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 03:15:42,621 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 19, validation: loss=0.1928, simple_loss=0.1862, pruned_loss=0.07146, ctc_loss=0.1413, over 1073944.00 frames. +2024-09-01 03:15:42,622 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 03:16:01,149 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.58 vs. limit=15.0 +2024-09-01 03:17:03,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=112405.33333333333, ans=0.125 +2024-09-01 03:17:03,181 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=14.64 vs. limit=15.0 +2024-09-01 03:17:08,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=112405.33333333333, ans=10.0 +2024-09-01 03:17:19,078 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=14.93 vs. limit=15.0 +2024-09-01 03:18:01,561 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.17 vs. limit=15.0 +2024-09-01 03:18:18,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=112512.0, ans=0.0 +2024-09-01 03:18:24,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=112512.0, ans=0.0 +2024-09-01 03:19:02,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=112565.33333333333, ans=0.0 +2024-09-01 03:19:34,383 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 19, batch 50, loss[loss=0.1746, simple_loss=0.1673, pruned_loss=0.06108, ctc_loss=0.1497, over 19015.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.1894, pruned_loss=0.07542, ctc_loss=0.165, over 829365.51 frames. ], batch size: 102, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:20:06,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=112618.66666666667, ans=0.0 +2024-09-01 03:20:21,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=112672.0, ans=0.0 +2024-09-01 03:20:32,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=112672.0, ans=0.125 +2024-09-01 03:21:50,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=112725.33333333333, ans=0.1 +2024-09-01 03:22:34,905 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.708e+02 1.922e+02 2.090e+02 2.243e+02 2.725e+02, threshold=4.180e+02, percent-clipped=0.0 +2024-09-01 03:23:02,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=112832.0, ans=0.0 +2024-09-01 03:23:34,284 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 19, batch 100, loss[loss=0.1821, simple_loss=0.1724, pruned_loss=0.06553, ctc_loss=0.1519, over 19083.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.1903, pruned_loss=0.0761, ctc_loss=0.1645, over 1476389.98 frames. ], batch size: 133, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:24:01,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=112885.33333333333, ans=0.125 +2024-09-01 03:24:54,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=112992.0, ans=0.125 +2024-09-01 03:24:54,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112992.0, ans=0.1 +2024-09-01 03:25:13,449 INFO [dysarthria_finetune.py:1435] (3/4) (95092736, 34072559616) +2024-09-01 03:25:13,450 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 03:25:13,528 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 03:25:27,033 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 20, batch 0, loss[loss=0.2494, simple_loss=0.2148, pruned_loss=0.102, ctc_loss=0.1999, over 18436.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.2148, pruned_loss=0.102, ctc_loss=0.1999, over 18436.00 frames. ], batch size: 65, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:25:27,033 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 03:26:10,756 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 20, validation: loss=0.19, simple_loss=0.1838, pruned_loss=0.07041, ctc_loss=0.1385, over 1073944.00 frames. +2024-09-01 03:26:10,757 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14327MB +2024-09-01 03:26:13,575 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.64 vs. limit=15.0 +2024-09-01 03:27:26,702 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=25.96 vs. limit=22.5 +2024-09-01 03:28:24,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113200.0, ans=0.1 +2024-09-01 03:28:30,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=113200.0, ans=0.0 +2024-09-01 03:29:26,712 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 20, batch 50, loss[loss=0.1842, simple_loss=0.1833, pruned_loss=0.06356, ctc_loss=0.1451, over 18942.00 frames. ], tot_loss[loss=0.202, simple_loss=0.1878, pruned_loss=0.0754, ctc_loss=0.1638, over 827999.75 frames. ], batch size: 102, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:30:05,874 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.607e+02 1.917e+02 2.046e+02 2.200e+02 2.791e+02, threshold=4.093e+02, percent-clipped=0.0 +2024-09-01 03:30:39,709 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=11.74 vs. limit=15.0 +2024-09-01 03:31:20,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=113413.33333333333, ans=0.125 +2024-09-01 03:31:28,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=113413.33333333333, ans=0.2 +2024-09-01 03:31:57,633 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.571e-03 +2024-09-01 03:33:06,755 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 20, batch 100, loss[loss=0.1494, simple_loss=0.1504, pruned_loss=0.04917, ctc_loss=0.1252, over 19171.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.1865, pruned_loss=0.0743, ctc_loss=0.1611, over 1475487.52 frames. ], batch size: 133, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:33:50,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=113626.66666666667, ans=0.125 +2024-09-01 03:34:32,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113626.66666666667, ans=0.1 +2024-09-01 03:34:32,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=113626.66666666667, ans=0.0 +2024-09-01 03:34:36,116 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.99 vs. limit=15.0 +2024-09-01 03:34:56,924 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.93 vs. limit=15.0 +2024-09-01 03:35:10,301 INFO [dysarthria_finetune.py:1435] (3/4) (13292470272, 34072559616) +2024-09-01 03:35:10,302 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 03:35:10,367 INFO [dysarthria_finetune.py:1440] (3/4) (28926738432, 34072559616) +2024-09-01 03:35:10,367 INFO [dysarthria_finetune.py:1442] (3/4) Done! diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1724927868.cdr2655.int.cedar.computecanada.ca.5363.0 b/zipformer/finetuned/ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1724927868.cdr2655.int.cedar.computecanada.ca.5363.0 new file mode 100644 index 0000000000000000000000000000000000000000..c097c527475f89cf95cb7a6cf966385edaecd6fe --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1724927868.cdr2655.int.cedar.computecanada.ca.5363.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c5fe84fca2e197b41eb13f237e284f2d5085db33c780ece42f606319f41c7486 +size 88 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1724956893.cdr2558.int.cedar.computecanada.ca.4947.0 b/zipformer/finetuned/ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1724956893.cdr2558.int.cedar.computecanada.ca.4947.0 new file mode 100644 index 0000000000000000000000000000000000000000..bc46c0d057bc950fc2b659b73d1ad6b76e52e0e7 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1724956893.cdr2558.int.cedar.computecanada.ca.4947.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:676eac1dc5de20b5ff933c687fe80ab817bafe411db5fb341d04b8217c826d14 +size 88 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725077494.cdr2647.int.cedar.computecanada.ca.9011.0 b/zipformer/finetuned/ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725077494.cdr2647.int.cedar.computecanada.ca.9011.0 new file mode 100644 index 0000000000000000000000000000000000000000..3795359141aa24d0aa086c6985f044a41ca1562c --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725077494.cdr2647.int.cedar.computecanada.ca.9011.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a718b7150cc52b776e5f804ae62b41cfd8c566dbf9e14176188b11bce7343e72 +size 88 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725155438.cdr2535.int.cedar.computecanada.ca.964213.0 b/zipformer/finetuned/ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725155438.cdr2535.int.cedar.computecanada.ca.964213.0 new file mode 100644 index 0000000000000000000000000000000000000000..f4202b7e9755330774c1aa1723a8827c61ef2d06 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725155438.cdr2535.int.cedar.computecanada.ca.964213.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b899685ad2b429a7e59a6d3c4dae34bfa331c704a8dd948cf778ff4087026522 +size 88 diff --git a/zipformer/finetuned/ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725167340.cdr2653.int.cedar.computecanada.ca.70.0 b/zipformer/finetuned/ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725167340.cdr2653.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..15d02f125722a2d8cfb3909a8a0c896a04a60946 --- /dev/null +++ b/zipformer/finetuned/ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725167340.cdr2653.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9a123bfe2910af523afb2c9b84d456185eef77c2685e8c038094c49d74ae0886 +size 41711 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/best-train-loss.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/best-train-loss.pt new file mode 100644 index 0000000000000000000000000000000000000000..0177502cd371b3469ae2c12d59dcba09600d6cd8 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/best-train-loss.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0b7d7b69128a29cd54032dbe2295fad7a0b21a80a76c9a62afefee023e091049 +size 1053873294 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/best-valid-loss.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/best-valid-loss.pt new file mode 100644 index 0000000000000000000000000000000000000000..0177502cd371b3469ae2c12d59dcba09600d6cd8 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/best-valid-loss.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0b7d7b69128a29cd54032dbe2295fad7a0b21a80a76c9a62afefee023e091049 +size 1053873294 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-1.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-1.pt new file mode 100644 index 0000000000000000000000000000000000000000..d7fcb15cc12a4cc0361b090f485b4a592246c81a --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-1.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:34acbe2c712fbb8bc34842394a734a02020b9b4c67e6991c2af5732aadd9659a +size 1053870013 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-10.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-10.pt new file mode 100644 index 0000000000000000000000000000000000000000..b0e2f7b0e14a855868e1b83b8089959d20f8dbb0 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-10.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ce0231004222fac39cea9d10effba8d343dab92391554f4cdd7fbf8f1cbddfee +size 1053872846 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-11.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-11.pt new file mode 100644 index 0000000000000000000000000000000000000000..c112a1a1bb35e7f724e183bd44d42c74d553aaf5 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-11.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:44e33dec6d61d25f8373c34f321f82146f2614edc1747effd1ffaf9c6c7e6066 +size 1053872846 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-12.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-12.pt new file mode 100644 index 0000000000000000000000000000000000000000..43d0a9600922402067acd59a92ebcfe98a4599a0 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-12.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:be18917bdaf5b2e70eee21750e4869203bec58963b2e03d9a44b8003e1161664 +size 1053872910 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-13.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-13.pt new file mode 100644 index 0000000000000000000000000000000000000000..083471f7566585f171e4e3a1947f0c98d0204b06 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-13.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:10dd1991a8636dca053e72899402eaf8e2916ec9628b416644cc813c737710b2 +size 1053872974 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-14.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-14.pt new file mode 100644 index 0000000000000000000000000000000000000000..54c0fc37f043506a2d5a27731dde8b871d1d63e7 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-14.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bc414f73f1dc6275731140be8e9cf16a4e5903c59d8997ccacf56e713d952720 +size 1053873038 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-15.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-15.pt new file mode 100644 index 0000000000000000000000000000000000000000..84646b52c72cc90101f64c6f13eabfef22ca8055 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-15.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:af49a1e204647024034be1b133409fbe678d8c44854259a83fa8f8cf17df5f79 +size 1053873038 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-16.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-16.pt new file mode 100644 index 0000000000000000000000000000000000000000..da07a366f562e57397925ad5b504af5f97c71a88 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-16.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:db9479d208000bf6d7bbc1f9bef59e5876e601da9e3cea133af95c019519aaee +size 1053873102 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-17.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-17.pt new file mode 100644 index 0000000000000000000000000000000000000000..d39058d26f4f6ca4bfeab5360599bd27bdbc824b --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-17.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8475f6a8aca8f70c999a9ab322cd79c6eb84b8a81ee29bfa786377952a37695a +size 1053873166 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-18.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-18.pt new file mode 100644 index 0000000000000000000000000000000000000000..8ea44cb96cce755b7d887c07b65584c19ad76463 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-18.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8dbfa68c582b6a11463e0c31c8adb1bc3a88f5c27e3f72955459dd08ba1d261c +size 1053873166 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-19.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-19.pt new file mode 100644 index 0000000000000000000000000000000000000000..0e5f144e4c3ec577e832cd7f67a8f2f5b78675ca --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-19.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59ceffa28268612ec62c94f48d9c3c72793ac115878ffb42c264156fe6a787ec +size 1053873230 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-2.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-2.pt new file mode 100644 index 0000000000000000000000000000000000000000..14b773b347eba8bfc244434e7bbf15cc887fa0d9 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-2.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c134b1ee04fd87ec48624142022f1a7df672bf1b09fc3b9864215dcca944949e +size 1053870141 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-20.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-20.pt new file mode 100644 index 0000000000000000000000000000000000000000..0177502cd371b3469ae2c12d59dcba09600d6cd8 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-20.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0b7d7b69128a29cd54032dbe2295fad7a0b21a80a76c9a62afefee023e091049 +size 1053873294 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-3.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-3.pt new file mode 100644 index 0000000000000000000000000000000000000000..72ce70db1abb5d80700d7db40cebceb183358835 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-3.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:32caab33b6111fc798d9442e1d6509c370101e9e2f23745f6ae45328bfdf76a3 +size 1053870205 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-4.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-4.pt new file mode 100644 index 0000000000000000000000000000000000000000..08a00b4a467243f102c416ded6fc0d4a856ffbdb --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-4.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bc4e8461ee21bf819d478d7fe467a62d9c09c386ecc907f2cfa50551e3a497cd +size 1053870205 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-5.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-5.pt new file mode 100644 index 0000000000000000000000000000000000000000..09f60562327a4275b250f1ee485c7eb5551417d4 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-5.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6099ebba17037a9b4efb7c84dc2e6093319fbaf7127d482c07fe17752f964823 +size 1053870269 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-6.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-6.pt new file mode 100644 index 0000000000000000000000000000000000000000..43a830988d4a00b42d0e77c69295fd3551ebdc56 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-6.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fd47b496cd4017c9c93f73672307d3dff16c22c48d9a4b6b7870646ce4b5b1b6 +size 1053870333 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-7.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-7.pt new file mode 100644 index 0000000000000000000000000000000000000000..7d146c96d434e8f81be03c9ed98924a886d37bb7 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-7.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:678691531440493c61429ba770daca9b99b882c6f97389ffeca828e8c644aaa5 +size 1053870397 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-8.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-8.pt new file mode 100644 index 0000000000000000000000000000000000000000..5ab65448ce9af0ab97c1edec62b55999540e23f4 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-8.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fdb949b9b780343945bbddd43a28016b5b30810c7b28c6f7bb267f1a56862cdb +size 1053870397 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-9.pt b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-9.pt new file mode 100644 index 0000000000000000000000000000000000000000..0e12f1499eaaae605e27676a2ba01088f682f01b --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/epoch-9.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3c91f8829402f0d6e4fa2058ef22ce7a956edd6cc4aecb4f5feaa13bb8306bb7 +size 1053870461 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-25-10-42-15-0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-25-10-42-15-0 new file mode 100644 index 0000000000000000000000000000000000000000..c441bdd8368635dfa39084e3eb7362d41468bf66 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-25-10-42-15-0 @@ -0,0 +1,4 @@ +2024-08-25 10:42:15,884 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-25 10:42:16,124 INFO [dysarthria_finetune.py:1214] (0/4) (33735507968, 34072559616) +2024-08-25 10:42:16,124 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-25 10:42:16,599 INFO [dysarthria_finetune.py:1219] (0/4) (33427226624, 34072559616) diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-25-10-42-16-1 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-25-10-42-16-1 new file mode 100644 index 0000000000000000000000000000000000000000..fb800f71a9810d75eae94e6d9605b35edb94e93e --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-25-10-42-16-1 @@ -0,0 +1,9 @@ +2024-08-25 10:42:16,068 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-25 10:42:26,109 INFO [dysarthria_finetune.py:1214] (1/4) (32783400960, 34072559616) +2024-08-25 10:42:26,109 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-25 10:42:26,520 INFO [dysarthria_finetune.py:1219] (1/4) (32783400960, 34072559616) +2024-08-25 10:42:26,521 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-25 10:42:26,524 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2649.int.cedar.computecanada.ca', 'IP address': '172.16.146.86'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-25 10:42:26,524 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-25 10:42:27,200 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 65805511 +2024-08-25 10:42:27,462 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-25-10-42-16-2 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-25-10-42-16-2 new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-25-10-42-16-3 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-25-10-42-16-3 new file mode 100644 index 0000000000000000000000000000000000000000..3777d15df51cc2deb6a650fe66c0657cfbcfabfd --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-25-10-42-16-3 @@ -0,0 +1,9 @@ +2024-08-25 10:42:16,055 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-25 10:42:16,055 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-25 10:42:16,056 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-25 10:42:16,579 INFO [dysarthria_finetune.py:1219] (3/4) (33427226624, 34072559616) +2024-08-25 10:42:16,580 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-25 10:42:24,705 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2649.int.cedar.computecanada.ca', 'IP address': '172.16.146.86'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-25 10:42:24,705 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-25 10:42:25,375 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 65805511 +2024-08-25 10:42:25,375 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-27-06-19-53-0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-27-06-19-53-0 new file mode 100644 index 0000000000000000000000000000000000000000..991e34ec8a6ea1b26c9c8439c98d85959fda917f --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-27-06-19-53-0 @@ -0,0 +1,4 @@ +2024-08-27 06:19:57,547 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-27 06:20:01,938 INFO [dysarthria_finetune.py:1214] (0/4) (33414643712, 34072559616) +2024-08-27 06:20:01,939 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-27 06:20:02,467 INFO [dysarthria_finetune.py:1219] (0/4) (33106362368, 34072559616) diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-27-06-19-53-1 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-27-06-19-53-1 new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-27-06-19-53-2 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-27-06-19-53-2 new file mode 100644 index 0000000000000000000000000000000000000000..68e084f8cb12fe693cc6dbbcdeda6dbe7f5c1bf4 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-27-06-19-53-2 @@ -0,0 +1,9 @@ +2024-08-27 06:19:57,489 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-27 06:20:01,008 INFO [dysarthria_finetune.py:1214] (2/4) (33748090880, 34072559616) +2024-08-27 06:20:01,008 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-27 06:20:01,365 INFO [dysarthria_finetune.py:1219] (2/4) (33748090880, 34072559616) +2024-08-27 06:20:01,366 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-27 06:20:05,207 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2652.int.cedar.computecanada.ca', 'IP address': '172.16.146.89'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-27 06:20:05,208 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-27 06:20:05,885 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 65805511 +2024-08-27 06:20:05,885 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-27-06-19-53-3 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-27-06-19-53-3 new file mode 100644 index 0000000000000000000000000000000000000000..f881b4f7d89d1ef93a39af9e0bcfcc498367e2e0 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-27-06-19-53-3 @@ -0,0 +1,9 @@ +2024-08-27 06:19:57,690 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-27 06:20:01,895 INFO [dysarthria_finetune.py:1214] (3/4) (33427226624, 34072559616) +2024-08-27 06:20:01,896 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-27 06:20:02,467 INFO [dysarthria_finetune.py:1219] (3/4) (33106362368, 34072559616) +2024-08-27 06:20:02,562 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-27 06:20:05,763 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2652.int.cedar.computecanada.ca', 'IP address': '172.16.146.89'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-27 06:20:05,763 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-27 06:20:06,437 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 65805511 +2024-08-27 06:20:06,437 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-08-12-43-0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-08-12-43-0 new file mode 100644 index 0000000000000000000000000000000000000000..7c01a52d80fa3be6ecb2643db495733184ffbcbb --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-08-12-43-0 @@ -0,0 +1,11 @@ +2024-08-29 08:12:43,667 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-29 08:12:43,907 INFO [dysarthria_finetune.py:1214] (0/4) (33748090880, 34072559616) +2024-08-29 08:12:43,907 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-29 08:12:44,905 INFO [dysarthria_finetune.py:1219] (0/4) (32783400960, 34072559616) +2024-08-29 08:12:54,340 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-29 08:12:54,721 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2538.int.cedar.computecanada.ca', 'IP address': '172.16.145.231'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 08:12:54,721 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-29 08:12:55,427 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 65805511 +2024-08-29 08:12:55,979 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-29 08:12:57,637 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-29 08:12:58,871 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-08-12-43-1 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-08-12-43-1 new file mode 100644 index 0000000000000000000000000000000000000000..ab7b6a2422a9933fb6dffb6767e7cd4d7cea1dae --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-08-12-43-1 @@ -0,0 +1,11 @@ +2024-08-29 08:12:43,926 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-29 08:12:43,951 INFO [dysarthria_finetune.py:1214] (1/4) (33735507968, 34072559616) +2024-08-29 08:12:43,951 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-29 08:12:44,907 INFO [dysarthria_finetune.py:1219] (1/4) (32783400960, 34072559616) +2024-08-29 08:12:54,336 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-29 08:12:54,721 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2538.int.cedar.computecanada.ca', 'IP address': '172.16.145.231'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 08:12:54,721 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-29 08:12:55,415 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 65805511 +2024-08-29 08:12:55,415 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-29 08:12:56,638 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-29 08:13:00,540 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-08-12-43-2 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-08-12-43-2 new file mode 100644 index 0000000000000000000000000000000000000000..3c385148017a62f4989f6473772f422c469d48aa --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-08-12-43-2 @@ -0,0 +1,11 @@ +2024-08-29 08:12:43,914 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-29 08:12:43,951 INFO [dysarthria_finetune.py:1214] (2/4) (33748090880, 34072559616) +2024-08-29 08:12:43,951 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-29 08:12:44,898 INFO [dysarthria_finetune.py:1219] (2/4) (32783400960, 34072559616) +2024-08-29 08:12:54,335 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-29 08:12:54,721 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2538.int.cedar.computecanada.ca', 'IP address': '172.16.145.231'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 08:12:54,721 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-29 08:12:55,416 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 65805511 +2024-08-29 08:12:55,416 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-29 08:12:56,648 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-29 08:13:00,542 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-08-12-43-3 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-08-12-43-3 new file mode 100644 index 0000000000000000000000000000000000000000..9c4f1f879ae664093bafb0710aa4a48d5d7427b9 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-08-12-43-3 @@ -0,0 +1,11 @@ +2024-08-29 08:12:43,984 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-29 08:12:43,996 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-29 08:12:43,996 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-29 08:12:44,905 INFO [dysarthria_finetune.py:1219] (3/4) (32783400960, 34072559616) +2024-08-29 08:12:54,336 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-29 08:12:54,721 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2538.int.cedar.computecanada.ca', 'IP address': '172.16.145.231'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 08:12:54,722 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-29 08:12:55,504 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 65805511 +2024-08-29 08:12:55,504 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-29 08:12:56,699 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-29 08:13:00,540 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-15-49-51-0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-15-49-51-0 new file mode 100644 index 0000000000000000000000000000000000000000..4cdbf1ba07d9a1d6dfcbd1b3bc75cea3951d1e3c --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-15-49-51-0 @@ -0,0 +1,11 @@ +2024-08-29 15:49:51,145 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-29 15:49:51,348 INFO [dysarthria_finetune.py:1214] (0/4) (33735507968, 34072559616) +2024-08-29 15:49:51,349 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-29 15:49:51,857 INFO [dysarthria_finetune.py:1219] (0/4) (33427226624, 34072559616) +2024-08-29 15:49:51,863 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-29 15:49:52,254 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2563.int.cedar.computecanada.ca', 'IP address': '172.16.146.0'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 15:49:52,254 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-29 15:49:52,924 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 65805511 +2024-08-29 15:49:53,465 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-29 15:49:55,209 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-29 15:51:26,934 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-15-49-51-1 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-15-49-51-1 new file mode 100644 index 0000000000000000000000000000000000000000..f73ebd5523e5cc97d12d73fc6d548f0d32f59768 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-15-49-51-1 @@ -0,0 +1,11 @@ +2024-08-29 15:49:51,308 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-29 15:49:52,108 INFO [dysarthria_finetune.py:1214] (1/4) (33106362368, 34072559616) +2024-08-29 15:49:52,109 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-29 15:49:52,476 INFO [dysarthria_finetune.py:1219] (1/4) (33106362368, 34072559616) +2024-08-29 15:49:52,476 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-29 15:49:52,479 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2563.int.cedar.computecanada.ca', 'IP address': '172.16.146.0'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 15:49:52,479 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-29 15:49:53,158 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 65805511 +2024-08-29 15:49:53,158 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-29 15:49:54,426 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-29 15:51:27,097 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-15-49-51-2 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-15-49-51-2 new file mode 100644 index 0000000000000000000000000000000000000000..abc2cad0b9babdbbec4529fead26301224817d88 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-15-49-51-2 @@ -0,0 +1,11 @@ +2024-08-29 15:49:51,303 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-29 15:49:51,304 INFO [dysarthria_finetune.py:1214] (2/4) (33748090880, 34072559616) +2024-08-29 15:49:51,304 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-29 15:49:51,867 INFO [dysarthria_finetune.py:1219] (2/4) (33427226624, 34072559616) +2024-08-29 15:49:51,868 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-29 15:49:52,253 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2563.int.cedar.computecanada.ca', 'IP address': '172.16.146.0'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 15:49:52,254 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-29 15:49:52,949 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 65805511 +2024-08-29 15:49:52,949 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-29 15:49:54,160 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-29 15:51:27,033 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-15-49-51-3 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-15-49-51-3 new file mode 100644 index 0000000000000000000000000000000000000000..78831247904b95aed37044a7d651f9467fc4e786 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-29-15-49-51-3 @@ -0,0 +1,11 @@ +2024-08-29 15:49:51,296 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-29 15:51:23,354 INFO [dysarthria_finetune.py:1214] (3/4) (32783400960, 34072559616) +2024-08-29 15:51:23,354 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-29 15:51:23,731 INFO [dysarthria_finetune.py:1219] (3/4) (32783400960, 34072559616) +2024-08-29 15:51:23,732 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-29 15:51:23,735 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2563.int.cedar.computecanada.ca', 'IP address': '172.16.146.0'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 15:51:23,735 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-29 15:51:24,426 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 65805511 +2024-08-29 15:51:24,426 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-29 15:51:25,558 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-29 15:51:26,946 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-12-40-15-0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-12-40-15-0 new file mode 100644 index 0000000000000000000000000000000000000000..1d6b3e282bf6e6df314d63b6369ae31e3fb6e79b --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-12-40-15-0 @@ -0,0 +1,11 @@ +2024-08-30 12:40:15,977 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-30 12:40:16,169 INFO [dysarthria_finetune.py:1214] (0/4) (33748090880, 34072559616) +2024-08-30 12:40:16,169 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-30 12:40:16,959 INFO [dysarthria_finetune.py:1219] (0/4) (33106362368, 34072559616) +2024-08-30 12:40:22,401 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-30 12:40:22,404 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2547.int.cedar.computecanada.ca', 'IP address': '172.16.145.240'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 12:40:22,404 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-30 12:40:50,753 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 65805511 +2024-08-30 12:40:51,300 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-30 12:49:26,185 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-30 12:49:32,120 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-12-40-16-1 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-12-40-16-1 new file mode 100644 index 0000000000000000000000000000000000000000..31cbd4e8532566bb6d6e890fe5388a89abeba1af --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-12-40-16-1 @@ -0,0 +1,11 @@ +2024-08-30 12:40:16,177 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-30 12:40:18,742 INFO [dysarthria_finetune.py:1214] (1/4) (32783400960, 34072559616) +2024-08-30 12:40:18,742 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-30 12:40:19,123 INFO [dysarthria_finetune.py:1219] (1/4) (32783400960, 34072559616) +2024-08-30 12:40:19,123 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-30 12:40:19,126 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2547.int.cedar.computecanada.ca', 'IP address': '172.16.145.240'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 12:40:19,126 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-30 12:40:50,762 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 65805511 +2024-08-30 12:40:50,762 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-30 12:49:25,910 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-30 12:49:32,120 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-12-40-16-2 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-12-40-16-2 new file mode 100644 index 0000000000000000000000000000000000000000..187f880708616ae334368dc3da58e5d25bd38bf9 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-12-40-16-2 @@ -0,0 +1,11 @@ +2024-08-30 12:40:16,171 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-30 12:40:16,214 INFO [dysarthria_finetune.py:1214] (2/4) (33748090880, 34072559616) +2024-08-30 12:40:16,214 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-30 12:40:16,955 INFO [dysarthria_finetune.py:1219] (2/4) (33106362368, 34072559616) +2024-08-30 12:40:16,955 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-30 12:40:17,100 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2547.int.cedar.computecanada.ca', 'IP address': '172.16.145.240'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 12:40:17,100 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-30 12:40:50,764 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 65805511 +2024-08-30 12:40:50,764 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-30 12:49:25,941 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-30 12:49:32,124 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-12-40-16-3 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-12-40-16-3 new file mode 100644 index 0000000000000000000000000000000000000000..5b02a552652f726798143bec6a7136915b5cc7f6 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-12-40-16-3 @@ -0,0 +1,11 @@ +2024-08-30 12:40:16,169 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-30 12:40:16,214 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-30 12:40:16,214 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-30 12:40:16,960 INFO [dysarthria_finetune.py:1219] (3/4) (33106362368, 34072559616) +2024-08-30 12:40:16,960 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-30 12:40:17,099 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2547.int.cedar.computecanada.ca', 'IP address': '172.16.145.240'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 12:40:17,099 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-30 12:40:50,732 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 65805511 +2024-08-30 12:40:50,733 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-30 12:49:25,933 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-30 12:49:32,120 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-21-13-40-0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-21-13-40-0 new file mode 100644 index 0000000000000000000000000000000000000000..e8e52b502a95eab2e2c29c7ae4cfdf691a2d8299 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-21-13-40-0 @@ -0,0 +1,11 @@ +2024-08-30 21:13:40,655 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-30 21:13:40,845 INFO [dysarthria_finetune.py:1214] (0/4) (33748090880, 34072559616) +2024-08-30 21:13:40,845 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-30 21:13:41,615 INFO [dysarthria_finetune.py:1219] (0/4) (33106362368, 34072559616) +2024-08-30 21:13:41,620 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-30 21:13:41,625 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 21:13:41,626 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-30 21:13:42,734 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 65805511 +2024-08-30 21:13:43,274 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-30 21:15:04,102 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-30 21:15:06,552 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-21-13-40-1 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-21-13-40-1 new file mode 100644 index 0000000000000000000000000000000000000000..4d4c84d62bde40af8d98181372ecbd2243bc6acc --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-21-13-40-1 @@ -0,0 +1,11 @@ +2024-08-30 21:13:40,853 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-30 21:13:40,889 INFO [dysarthria_finetune.py:1214] (1/4) (33735507968, 34072559616) +2024-08-30 21:13:40,889 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-30 21:13:41,617 INFO [dysarthria_finetune.py:1219] (1/4) (33106362368, 34072559616) +2024-08-30 21:13:41,618 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-30 21:13:41,626 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 21:13:41,626 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-30 21:13:42,738 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 65805511 +2024-08-30 21:13:42,738 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-30 21:15:03,629 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-30 21:15:06,556 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-21-13-40-2 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-21-13-40-2 new file mode 100644 index 0000000000000000000000000000000000000000..073f91970d9bd86e86c3fda43ee9d384afbf4916 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-21-13-40-2 @@ -0,0 +1,11 @@ +2024-08-30 21:13:40,846 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-30 21:14:32,778 INFO [dysarthria_finetune.py:1214] (2/4) (32783400960, 34072559616) +2024-08-30 21:14:32,778 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-30 21:14:33,157 INFO [dysarthria_finetune.py:1219] (2/4) (32783400960, 34072559616) +2024-08-30 21:14:33,158 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-30 21:14:33,160 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 21:14:33,160 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-30 21:14:33,856 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 65805511 +2024-08-30 21:14:33,856 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-30 21:15:03,720 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-30 21:15:06,562 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-21-13-40-3 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-21-13-40-3 new file mode 100644 index 0000000000000000000000000000000000000000..20ae2e489ee1b47d483af0bd327e012398e780b5 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-21-13-40-3 @@ -0,0 +1,11 @@ +2024-08-30 21:13:40,904 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-30 21:13:40,934 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-30 21:13:40,934 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-30 21:13:41,625 INFO [dysarthria_finetune.py:1219] (3/4) (33106362368, 34072559616) +2024-08-30 21:13:41,626 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-30 21:13:41,628 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 21:13:41,629 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-30 21:13:42,728 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 65805511 +2024-08-30 21:13:42,728 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-30 21:15:03,576 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-30 21:15:06,551 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-21-55-13 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-21-55-13 new file mode 100644 index 0000000000000000000000000000000000000000..9234e73866a032d3e1609af100d71f6fd835bd24 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-30-21-55-13 @@ -0,0 +1,22 @@ +2024-08-30 21:55:13,527 INFO [dysarthria_finetune.py:1212] Training started +2024-08-30 21:55:14,228 INFO [dysarthria_finetune.py:1214] (33748090880, 34072559616) +2024-08-30 21:55:14,228 INFO [dysarthria_finetune.py:1215] Empty cache: before and after +2024-08-30 21:55:14,228 INFO [dysarthria_finetune.py:1219] (33748090880, 34072559616) +2024-08-30 21:55:14,235 INFO [dysarthria_finetune.py:1229] Device: cuda:0 +2024-08-30 21:55:15,120 INFO [dysarthria_finetune.py:1241] {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2608.int.cedar.computecanada.ca', 'IP address': '172.16.146.45'}, 'world_size': 1, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 21:55:15,121 INFO [dysarthria_finetune.py:1243] About to create model +2024-08-30 21:55:17,865 INFO [dysarthria_finetune.py:1247] Number of model parameters: 65805511 +2024-08-30 21:55:18,402 INFO [dysarthria_finetune.py:769] Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-30 22:09:46,377 INFO [dysarthria_asr_datamodule.py:494] About to get train cuts +2024-08-30 22:13:07,937 INFO [dysarthria_finetune.py:1319] CutSet(len=62255) [underlying data type: ] +2024-08-30 22:13:08,780 INFO [dysarthria_asr_datamodule.py:239] Disable MUSAN +2024-08-30 22:13:08,780 INFO [dysarthria_asr_datamodule.py:257] Enable SpecAugment +2024-08-30 22:13:08,781 INFO [dysarthria_asr_datamodule.py:258] Time warp factor: 80 +2024-08-30 22:13:08,781 INFO [dysarthria_asr_datamodule.py:268] Num frame mask: 10 +2024-08-30 22:13:08,781 INFO [dysarthria_asr_datamodule.py:281] About to create train dataset +2024-08-30 22:13:10,568 INFO [dysarthria_asr_datamodule.py:308] Using DynamicBucketingSampler. +2024-08-30 22:13:11,483 INFO [dysarthria_asr_datamodule.py:325] About to create train dataloader +2024-08-30 22:13:11,490 INFO [dysarthria_asr_datamodule.py:500] About to get dev cuts +2024-08-30 22:13:11,687 INFO [dysarthria_asr_datamodule.py:356] About to create dev dataset +2024-08-30 22:13:12,243 INFO [dysarthria_asr_datamodule.py:373] About to create dev dataloader +2024-08-30 22:13:12,244 INFO [dysarthria_finetune.py:1490] Sanity check -- see if any of the batches in epoch 1 would cause OOM. diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-00-09-09-0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-00-09-09-0 new file mode 100644 index 0000000000000000000000000000000000000000..e1876288e6ead2aefebd9a759d80ac10a4b76c22 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-00-09-09-0 @@ -0,0 +1,26 @@ +2024-08-31 00:09:09,992 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-31 00:09:11,091 INFO [dysarthria_finetune.py:1214] (0/4) (32783400960, 34072559616) +2024-08-31 00:09:11,091 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-31 00:09:11,467 INFO [dysarthria_finetune.py:1219] (0/4) (32783400960, 34072559616) +2024-08-31 00:09:11,505 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-31 00:21:09,697 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2654.int.cedar.computecanada.ca', 'IP address': '172.16.146.91'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 00:21:09,697 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-31 00:21:11,743 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 65805511 +2024-08-31 00:21:12,288 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-31 00:22:26,316 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-31 00:22:31,012 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts +2024-08-31 00:22:31,252 INFO [dysarthria_finetune.py:1319] (0/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:239] (0/4) Disable MUSAN +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:257] (0/4) Enable SpecAugment +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:258] (0/4) Time warp factor: 80 +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:268] (0/4) Num frame mask: 10 +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:281] (0/4) About to create train dataset +2024-08-31 00:22:33,063 INFO [dysarthria_asr_datamodule.py:308] (0/4) Using DynamicBucketingSampler. +2024-08-31 00:22:33,953 INFO [dysarthria_asr_datamodule.py:325] (0/4) About to create train dataloader +2024-08-31 00:22:33,953 INFO [dysarthria_asr_datamodule.py:501] (0/4) About to get dev cuts +2024-08-31 00:22:34,027 INFO [dysarthria_asr_datamodule.py:356] (0/4) About to create dev dataset +2024-08-31 00:22:34,393 INFO [dysarthria_asr_datamodule.py:373] (0/4) About to create dev dataloader +2024-08-31 00:22:34,393 INFO [dysarthria_finetune.py:1490] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 02:23:24,740 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=18.74 vs. limit=7.5 +2024-08-31 02:23:26,354 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=21.21 vs. limit=7.5 +2024-08-31 02:23:27,402 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11759MB diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-00-09-10-1 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-00-09-10-1 new file mode 100644 index 0000000000000000000000000000000000000000..85d3018d2606fd45c1e07bc20b2a848317b85a5d --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-00-09-10-1 @@ -0,0 +1,26 @@ +2024-08-31 00:09:10,287 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-31 00:09:10,299 INFO [dysarthria_finetune.py:1214] (1/4) (33748090880, 34072559616) +2024-08-31 00:09:10,300 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-31 00:09:11,153 INFO [dysarthria_finetune.py:1219] (1/4) (33091682304, 34072559616) +2024-08-31 00:09:11,154 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-31 00:21:09,697 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2654.int.cedar.computecanada.ca', 'IP address': '172.16.146.91'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 00:21:09,697 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-31 00:21:11,837 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 65805511 +2024-08-31 00:21:11,837 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-31 00:22:26,314 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-31 00:22:31,066 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts +2024-08-31 00:22:31,253 INFO [dysarthria_finetune.py:1319] (1/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:239] (1/4) Disable MUSAN +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:257] (1/4) Enable SpecAugment +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:258] (1/4) Time warp factor: 80 +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:268] (1/4) Num frame mask: 10 +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:281] (1/4) About to create train dataset +2024-08-31 00:22:32,988 INFO [dysarthria_asr_datamodule.py:308] (1/4) Using DynamicBucketingSampler. +2024-08-31 00:22:33,876 INFO [dysarthria_asr_datamodule.py:325] (1/4) About to create train dataloader +2024-08-31 00:22:33,881 INFO [dysarthria_asr_datamodule.py:501] (1/4) About to get dev cuts +2024-08-31 00:22:34,027 INFO [dysarthria_asr_datamodule.py:356] (1/4) About to create dev dataset +2024-08-31 00:22:34,395 INFO [dysarthria_asr_datamodule.py:373] (1/4) About to create dev dataloader +2024-08-31 00:22:34,396 INFO [dysarthria_finetune.py:1490] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 02:23:24,734 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=512, metric=18.54 vs. limit=7.5 +2024-08-31 02:23:26,353 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=192, metric=17.58 vs. limit=7.5 +2024-08-31 02:23:27,404 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11828MB diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-00-09-10-2 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-00-09-10-2 new file mode 100644 index 0000000000000000000000000000000000000000..5c3542fb5b797ab2eb25c687da9b86abcca74a87 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-00-09-10-2 @@ -0,0 +1,26 @@ +2024-08-31 00:09:10,260 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-31 00:09:10,299 INFO [dysarthria_finetune.py:1214] (2/4) (33748090880, 34072559616) +2024-08-31 00:09:10,300 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-31 00:09:11,135 INFO [dysarthria_finetune.py:1219] (2/4) (33106362368, 34072559616) +2024-08-31 00:09:11,136 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-31 00:21:09,697 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2654.int.cedar.computecanada.ca', 'IP address': '172.16.146.91'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 00:21:09,697 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-31 00:21:11,784 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 65805511 +2024-08-31 00:21:11,784 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-31 00:22:26,343 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-31 00:22:31,015 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts +2024-08-31 00:22:31,252 INFO [dysarthria_finetune.py:1319] (2/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:239] (2/4) Disable MUSAN +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:257] (2/4) Enable SpecAugment +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:258] (2/4) Time warp factor: 80 +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:268] (2/4) Num frame mask: 10 +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:281] (2/4) About to create train dataset +2024-08-31 00:22:32,988 INFO [dysarthria_asr_datamodule.py:308] (2/4) Using DynamicBucketingSampler. +2024-08-31 00:22:33,901 INFO [dysarthria_asr_datamodule.py:325] (2/4) About to create train dataloader +2024-08-31 00:22:33,902 INFO [dysarthria_asr_datamodule.py:501] (2/4) About to get dev cuts +2024-08-31 00:22:34,027 INFO [dysarthria_asr_datamodule.py:356] (2/4) About to create dev dataset +2024-08-31 00:22:34,399 INFO [dysarthria_asr_datamodule.py:373] (2/4) About to create dev dataloader +2024-08-31 00:22:34,403 INFO [dysarthria_finetune.py:1490] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 02:23:24,735 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=512, metric=17.72 vs. limit=7.5 +2024-08-31 02:23:26,354 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=21.83 vs. limit=7.5 +2024-08-31 02:23:27,400 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11838MB diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-00-09-10-3 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-00-09-10-3 new file mode 100644 index 0000000000000000000000000000000000000000..60f9e8767c850103ae4a46b8ef9c2b3426eba179 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-00-09-10-3 @@ -0,0 +1,29 @@ +2024-08-31 00:09:10,255 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-31 00:09:10,256 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-31 00:09:10,256 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-31 00:09:11,092 INFO [dysarthria_finetune.py:1219] (3/4) (33106362368, 34072559616) +2024-08-31 00:09:11,092 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-31 00:21:09,697 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2654.int.cedar.computecanada.ca', 'IP address': '172.16.146.91'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 00:21:09,718 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-31 00:21:11,782 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 65805511 +2024-08-31 00:21:11,782 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-31 00:22:26,336 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-31 00:22:31,011 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts +2024-08-31 00:22:31,252 INFO [dysarthria_finetune.py:1319] (3/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:239] (3/4) Disable MUSAN +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:257] (3/4) Enable SpecAugment +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:258] (3/4) Time warp factor: 80 +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:268] (3/4) Num frame mask: 10 +2024-08-31 00:22:31,541 INFO [dysarthria_asr_datamodule.py:281] (3/4) About to create train dataset +2024-08-31 00:22:32,988 INFO [dysarthria_asr_datamodule.py:308] (3/4) Using DynamicBucketingSampler. +2024-08-31 00:22:33,876 INFO [dysarthria_asr_datamodule.py:325] (3/4) About to create train dataloader +2024-08-31 00:22:33,881 INFO [dysarthria_asr_datamodule.py:501] (3/4) About to get dev cuts +2024-08-31 00:22:34,027 INFO [dysarthria_asr_datamodule.py:356] (3/4) About to create dev dataset +2024-08-31 00:22:34,395 INFO [dysarthria_asr_datamodule.py:373] (3/4) About to create dev dataloader +2024-08-31 00:22:34,395 INFO [dysarthria_finetune.py:1490] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 02:23:24,734 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=16.32 vs. limit=7.5 +2024-08-31 02:23:26,354 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=192, metric=21.06 vs. limit=7.5 +2024-08-31 02:23:27,398 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11828MB +2024-08-31 02:59:28,695 INFO [dysarthria_finetune.py:1468] (3/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/batch-bdd640fb-0667-1ad1-1c80-317fa3b1799d.pt +2024-08-31 03:12:30,862 INFO [dysarthria_finetune.py:1474] (3/4) features shape: torch.Size([26, 2997, 80]) +2024-08-31 03:12:30,864 INFO [dysarthria_finetune.py:1478] (3/4) num tokens: 2978 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-13-20-08-0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-13-20-08-0 new file mode 100644 index 0000000000000000000000000000000000000000..c358989d4887c08a29ef326463750f9ea1d91148 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-13-20-08-0 @@ -0,0 +1,34 @@ +2024-08-31 13:20:08,819 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-31 13:20:17,703 INFO [dysarthria_finetune.py:1214] (0/4) (33748090880, 34072559616) +2024-08-31 13:20:17,703 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-31 13:20:18,056 INFO [dysarthria_finetune.py:1219] (0/4) (33748090880, 34072559616) +2024-08-31 13:20:18,064 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-31 13:20:18,101 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2552.int.cedar.computecanada.ca', 'IP address': '172.16.145.245'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 13:20:18,102 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-31 13:20:24,135 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 65805511 +2024-08-31 13:20:25,956 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-31 13:35:28,411 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-31 13:35:40,931 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts +2024-08-31 13:35:41,025 INFO [dysarthria_finetune.py:1319] (0/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 13:35:41,430 INFO [dysarthria_asr_datamodule.py:239] (0/4) Disable MUSAN +2024-08-31 13:35:41,430 INFO [dysarthria_asr_datamodule.py:257] (0/4) Enable SpecAugment +2024-08-31 13:35:41,430 INFO [dysarthria_asr_datamodule.py:258] (0/4) Time warp factor: 80 +2024-08-31 13:35:41,430 INFO [dysarthria_asr_datamodule.py:268] (0/4) Num frame mask: 10 +2024-08-31 13:35:41,431 INFO [dysarthria_asr_datamodule.py:281] (0/4) About to create train dataset +2024-08-31 13:35:44,713 INFO [dysarthria_asr_datamodule.py:308] (0/4) Using DynamicBucketingSampler. +2024-08-31 13:35:45,618 INFO [dysarthria_asr_datamodule.py:325] (0/4) About to create train dataloader +2024-08-31 13:35:45,619 INFO [dysarthria_asr_datamodule.py:501] (0/4) About to get dev cuts +2024-08-31 13:35:45,646 INFO [dysarthria_asr_datamodule.py:356] (0/4) About to create dev dataset +2024-08-31 13:35:46,005 INFO [dysarthria_asr_datamodule.py:373] (0/4) About to create dev dataloader +2024-08-31 13:35:46,006 INFO [dysarthria_finetune.py:1490] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 13:39:46,774 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=18.74 vs. limit=7.5 +2024-08-31 13:39:47,156 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=21.21 vs. limit=7.5 +2024-08-31 13:39:48,869 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11759MB +2024-08-31 13:39:50,790 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11759MB +2024-08-31 14:32:39,038 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11759MB +2024-08-31 14:34:09,039 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11759MB +2024-08-31 14:44:25,352 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=288, metric=7.02 vs. limit=5.0 +2024-08-31 14:44:26,023 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11759MB +2024-08-31 14:44:28,189 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11759MB +2024-08-31 14:46:35,797 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 0, loss[loss=0.3292, simple_loss=0.2739, pruned_loss=0.1428, ctc_loss=0.2051, over 18513.00 frames. ], tot_loss[loss=0.3292, simple_loss=0.2739, pruned_loss=0.1428, ctc_loss=0.2051, over 18513.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-31 14:46:35,797 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-13-20-09-1 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-13-20-09-1 new file mode 100644 index 0000000000000000000000000000000000000000..ffbc6539780fdc04bb246bb28b227ccbecd32744 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-13-20-09-1 @@ -0,0 +1,34 @@ +2024-08-31 13:20:09,080 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-31 13:20:30,461 INFO [dysarthria_finetune.py:1214] (1/4) (33427226624, 34072559616) +2024-08-31 13:20:30,461 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-31 13:20:30,827 INFO [dysarthria_finetune.py:1219] (1/4) (33427226624, 34072559616) +2024-08-31 13:20:30,828 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-31 13:20:30,831 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2552.int.cedar.computecanada.ca', 'IP address': '172.16.145.245'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 13:20:30,831 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-31 13:20:31,521 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 65805511 +2024-08-31 13:20:31,521 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-31 13:35:28,176 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-31 13:35:40,937 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts +2024-08-31 13:35:41,025 INFO [dysarthria_finetune.py:1319] (1/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 13:35:41,430 INFO [dysarthria_asr_datamodule.py:239] (1/4) Disable MUSAN +2024-08-31 13:35:41,430 INFO [dysarthria_asr_datamodule.py:257] (1/4) Enable SpecAugment +2024-08-31 13:35:41,430 INFO [dysarthria_asr_datamodule.py:258] (1/4) Time warp factor: 80 +2024-08-31 13:35:41,430 INFO [dysarthria_asr_datamodule.py:268] (1/4) Num frame mask: 10 +2024-08-31 13:35:41,431 INFO [dysarthria_asr_datamodule.py:281] (1/4) About to create train dataset +2024-08-31 13:35:44,629 INFO [dysarthria_asr_datamodule.py:308] (1/4) Using DynamicBucketingSampler. +2024-08-31 13:35:45,566 INFO [dysarthria_asr_datamodule.py:325] (1/4) About to create train dataloader +2024-08-31 13:35:45,569 INFO [dysarthria_asr_datamodule.py:501] (1/4) About to get dev cuts +2024-08-31 13:35:45,647 INFO [dysarthria_asr_datamodule.py:356] (1/4) About to create dev dataset +2024-08-31 13:35:46,006 INFO [dysarthria_asr_datamodule.py:373] (1/4) About to create dev dataloader +2024-08-31 13:35:46,007 INFO [dysarthria_finetune.py:1490] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 13:39:46,776 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=512, metric=18.54 vs. limit=7.5 +2024-08-31 13:39:47,155 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=192, metric=17.58 vs. limit=7.5 +2024-08-31 13:39:48,861 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11828MB +2024-08-31 13:39:50,793 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11828MB +2024-08-31 14:32:39,026 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11828MB +2024-08-31 14:34:09,032 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11828MB +2024-08-31 14:44:26,024 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11828MB +2024-08-31 14:44:28,198 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11828MB +2024-08-31 14:46:35,798 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 0, loss[loss=0.3929, simple_loss=0.3185, pruned_loss=0.1863, ctc_loss=0.2785, over 18549.00 frames. ], tot_loss[loss=0.3929, simple_loss=0.3185, pruned_loss=0.1863, ctc_loss=0.2785, over 18549.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-31 14:46:35,798 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-31 15:51:58,632 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 1, validation: loss=0.3058, simple_loss=0.2552, pruned_loss=0.1294, ctc_loss=0.1884, over 276520.00 frames. diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-13-20-09-2 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-13-20-09-2 new file mode 100644 index 0000000000000000000000000000000000000000..61112c443750a1227aee69f85da1e73abbce4a6f --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-13-20-09-2 @@ -0,0 +1,35 @@ +2024-08-31 13:20:09,083 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-31 13:20:48,982 INFO [dysarthria_finetune.py:1214] (2/4) (32783400960, 34072559616) +2024-08-31 13:20:48,982 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-31 13:20:49,354 INFO [dysarthria_finetune.py:1219] (2/4) (32783400960, 34072559616) +2024-08-31 13:20:49,354 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-31 13:20:49,357 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2552.int.cedar.computecanada.ca', 'IP address': '172.16.145.245'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 13:20:49,358 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-31 13:20:50,038 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 65805511 +2024-08-31 13:20:50,039 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-31 13:35:28,190 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-31 13:35:40,931 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts +2024-08-31 13:35:41,025 INFO [dysarthria_finetune.py:1319] (2/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 13:35:41,430 INFO [dysarthria_asr_datamodule.py:239] (2/4) Disable MUSAN +2024-08-31 13:35:41,430 INFO [dysarthria_asr_datamodule.py:257] (2/4) Enable SpecAugment +2024-08-31 13:35:41,430 INFO [dysarthria_asr_datamodule.py:258] (2/4) Time warp factor: 80 +2024-08-31 13:35:41,430 INFO [dysarthria_asr_datamodule.py:268] (2/4) Num frame mask: 10 +2024-08-31 13:35:41,431 INFO [dysarthria_asr_datamodule.py:281] (2/4) About to create train dataset +2024-08-31 13:35:44,629 INFO [dysarthria_asr_datamodule.py:308] (2/4) Using DynamicBucketingSampler. +2024-08-31 13:35:45,576 INFO [dysarthria_asr_datamodule.py:325] (2/4) About to create train dataloader +2024-08-31 13:35:45,577 INFO [dysarthria_asr_datamodule.py:501] (2/4) About to get dev cuts +2024-08-31 13:35:45,646 INFO [dysarthria_asr_datamodule.py:356] (2/4) About to create dev dataset +2024-08-31 13:35:46,007 INFO [dysarthria_asr_datamodule.py:373] (2/4) About to create dev dataloader +2024-08-31 13:35:46,008 INFO [dysarthria_finetune.py:1490] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 13:39:46,776 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=512, metric=17.72 vs. limit=7.5 +2024-08-31 13:39:47,156 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=21.83 vs. limit=7.5 +2024-08-31 13:39:48,857 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11838MB +2024-08-31 13:39:50,794 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11838MB +2024-08-31 14:32:39,031 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11838MB +2024-08-31 14:34:09,033 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11838MB +2024-08-31 14:44:26,026 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11838MB +2024-08-31 14:44:28,195 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11838MB +2024-08-31 14:46:35,796 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 0, loss[loss=0.4003, simple_loss=0.3243, pruned_loss=0.1928, ctc_loss=0.2836, over 18533.00 frames. ], tot_loss[loss=0.4003, simple_loss=0.3243, pruned_loss=0.1928, ctc_loss=0.2836, over 18533.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-31 14:46:35,797 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-31 15:52:10,156 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 1, validation: loss=0.3058, simple_loss=0.2552, pruned_loss=0.1294, ctc_loss=0.1884, over 276520.00 frames. +2024-08-31 15:52:11,609 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19755MB diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-13-20-09-3 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-13-20-09-3 new file mode 100644 index 0000000000000000000000000000000000000000..4d0deda8ef3e7f0f4b9bf82f3c04303ea07ff8a4 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-13-20-09-3 @@ -0,0 +1,35 @@ +2024-08-31 13:20:09,100 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-31 13:20:39,984 INFO [dysarthria_finetune.py:1214] (3/4) (33106362368, 34072559616) +2024-08-31 13:20:39,985 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-31 13:20:40,357 INFO [dysarthria_finetune.py:1219] (3/4) (33106362368, 34072559616) +2024-08-31 13:20:40,357 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-31 13:20:40,360 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2552.int.cedar.computecanada.ca', 'IP address': '172.16.145.245'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 13:20:40,360 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-31 13:20:41,062 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 65805511 +2024-08-31 13:20:41,063 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-31 13:35:28,175 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-31 13:35:40,931 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts +2024-08-31 13:35:41,025 INFO [dysarthria_finetune.py:1319] (3/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 13:35:41,430 INFO [dysarthria_asr_datamodule.py:239] (3/4) Disable MUSAN +2024-08-31 13:35:41,430 INFO [dysarthria_asr_datamodule.py:257] (3/4) Enable SpecAugment +2024-08-31 13:35:41,430 INFO [dysarthria_asr_datamodule.py:258] (3/4) Time warp factor: 80 +2024-08-31 13:35:41,431 INFO [dysarthria_asr_datamodule.py:268] (3/4) Num frame mask: 10 +2024-08-31 13:35:41,431 INFO [dysarthria_asr_datamodule.py:281] (3/4) About to create train dataset +2024-08-31 13:35:44,629 INFO [dysarthria_asr_datamodule.py:308] (3/4) Using DynamicBucketingSampler. +2024-08-31 13:35:45,562 INFO [dysarthria_asr_datamodule.py:325] (3/4) About to create train dataloader +2024-08-31 13:35:45,569 INFO [dysarthria_asr_datamodule.py:501] (3/4) About to get dev cuts +2024-08-31 13:35:45,646 INFO [dysarthria_asr_datamodule.py:356] (3/4) About to create dev dataset +2024-08-31 13:35:46,008 INFO [dysarthria_asr_datamodule.py:373] (3/4) About to create dev dataloader +2024-08-31 13:35:46,008 INFO [dysarthria_finetune.py:1490] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 13:39:46,781 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=16.32 vs. limit=7.5 +2024-08-31 13:39:47,156 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=192, metric=21.06 vs. limit=7.5 +2024-08-31 13:39:48,862 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11825MB +2024-08-31 13:39:50,790 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11825MB +2024-08-31 14:32:39,026 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11825MB +2024-08-31 14:34:09,037 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11825MB +2024-08-31 14:44:26,024 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11825MB +2024-08-31 14:44:28,198 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11825MB +2024-08-31 14:46:35,800 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 0, loss[loss=0.3339, simple_loss=0.2734, pruned_loss=0.1449, ctc_loss=0.2303, over 18634.00 frames. ], tot_loss[loss=0.3339, simple_loss=0.2734, pruned_loss=0.1449, ctc_loss=0.2303, over 18634.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-31 14:46:35,800 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-31 15:52:10,160 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 1, validation: loss=0.3058, simple_loss=0.2552, pruned_loss=0.1294, ctc_loss=0.1884, over 276520.00 frames. +2024-08-31 15:52:33,807 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14286MB diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-22-13-17-0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-22-13-17-0 new file mode 100644 index 0000000000000000000000000000000000000000..46414bf69525278fa041192fe10d6859db276261 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-22-13-17-0 @@ -0,0 +1,559 @@ +2024-08-31 22:13:17,694 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-31 22:13:19,468 INFO [dysarthria_finetune.py:1214] (0/4) (32783400960, 34072559616) +2024-08-31 22:13:19,468 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-31 22:13:19,851 INFO [dysarthria_finetune.py:1219] (0/4) (32783400960, 34072559616) +2024-08-31 22:13:19,868 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-31 22:13:19,872 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 22:13:19,872 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-31 22:13:21,156 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 65805511 +2024-08-31 22:13:21,693 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-31 22:13:33,264 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-31 22:14:37,196 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts +2024-08-31 22:14:37,263 INFO [dysarthria_finetune.py:1319] (0/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 22:14:37,991 INFO [dysarthria_asr_datamodule.py:239] (0/4) Disable MUSAN +2024-08-31 22:14:37,992 INFO [dysarthria_asr_datamodule.py:257] (0/4) Enable SpecAugment +2024-08-31 22:14:37,992 INFO [dysarthria_asr_datamodule.py:258] (0/4) Time warp factor: 80 +2024-08-31 22:14:37,992 INFO [dysarthria_asr_datamodule.py:268] (0/4) Num frame mask: 10 +2024-08-31 22:14:37,992 INFO [dysarthria_asr_datamodule.py:281] (0/4) About to create train dataset +2024-08-31 22:14:55,449 INFO [dysarthria_asr_datamodule.py:308] (0/4) Using DynamicBucketingSampler. +2024-08-31 22:14:56,378 INFO [dysarthria_asr_datamodule.py:325] (0/4) About to create train dataloader +2024-08-31 22:14:56,378 INFO [dysarthria_asr_datamodule.py:501] (0/4) About to get dev cuts +2024-08-31 22:14:56,477 INFO [dysarthria_asr_datamodule.py:356] (0/4) About to create dev dataset +2024-08-31 22:14:57,473 INFO [dysarthria_asr_datamodule.py:373] (0/4) About to create dev dataloader +2024-08-31 22:14:57,473 INFO [dysarthria_finetune.py:1490] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 22:16:23,767 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=18.74 vs. limit=7.5 +2024-08-31 22:16:31,018 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=21.21 vs. limit=7.5 +2024-08-31 22:16:34,163 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11759MB +2024-08-31 22:16:36,234 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11759MB +2024-08-31 22:17:53,349 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11759MB +2024-08-31 22:17:55,324 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11759MB +2024-08-31 22:19:45,161 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=288, metric=7.02 vs. limit=5.0 +2024-08-31 22:19:46,278 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11759MB +2024-08-31 22:19:48,456 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11759MB +2024-08-31 22:20:26,656 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 0, loss[loss=0.3292, simple_loss=0.2739, pruned_loss=0.1428, ctc_loss=0.2051, over 18513.00 frames. ], tot_loss[loss=0.3292, simple_loss=0.2739, pruned_loss=0.1428, ctc_loss=0.2051, over 18513.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-31 22:20:26,657 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-31 22:32:57,022 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 1, validation: loss=0.373, simple_loss=0.3046, pruned_loss=0.1755, ctc_loss=0.2544, over 1073944.00 frames. +2024-08-31 22:32:57,064 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-08-31 22:36:03,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=100000.0, ans=0.0 +2024-08-31 22:47:02,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=100000.0, ans=0.125 +2024-08-31 22:51:26,990 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.923e+02 1.157e+03 1.203e+03 1.280e+03 1.380e+03, threshold=4.812e+03, percent-clipped=0.0 +2024-08-31 22:57:43,578 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=35.69 vs. limit=15.0 +2024-08-31 23:02:59,151 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.26 vs. limit=15.0 +2024-08-31 23:03:06,383 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.113e+02 1.083e+03 1.198e+03 1.280e+03 1.431e+03, threshold=4.794e+03, percent-clipped=0.0 +2024-08-31 23:21:24,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=100160.0, ans=0.125 +2024-08-31 23:29:32,322 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.285e+02 9.052e+02 1.061e+03 1.198e+03 1.431e+03, threshold=4.243e+03, percent-clipped=0.0 +2024-08-31 23:49:33,573 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 50, loss[loss=0.4115, simple_loss=0.332, pruned_loss=0.1923, ctc_loss=0.3005, over 18890.00 frames. ], tot_loss[loss=0.4074, simple_loss=0.3296, pruned_loss=0.1961, ctc_loss=0.2899, over 828692.51 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-31 23:56:27,573 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.62 vs. limit=22.5 +2024-09-01 00:02:21,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=100320.0, ans=0.125 +2024-09-01 00:07:03,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100373.33333333333, ans=0.1 +2024-09-01 00:07:03,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100373.33333333333, ans=0.1 +2024-09-01 00:18:03,026 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=20.00 vs. limit=15.0 +2024-09-01 00:18:31,030 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.958e+02 6.817e+02 8.321e+02 1.009e+03 1.431e+03, threshold=1.664e+03, percent-clipped=0.0 +2024-09-01 00:18:31,064 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 100, loss[loss=0.3787, simple_loss=0.3065, pruned_loss=0.1801, ctc_loss=0.2691, over 19293.00 frames. ], tot_loss[loss=0.388, simple_loss=0.3145, pruned_loss=0.1836, ctc_loss=0.2747, over 1474004.25 frames. ], batch size: 144, lr: 6.01e-05, grad_scale: 4.0 +2024-09-01 00:19:11,004 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.24 vs. limit=6.0 +2024-09-01 00:28:35,452 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-1.pt +2024-09-01 00:28:50,184 INFO [dysarthria_finetune.py:1435] (0/4) (1179320320, 34072559616) +2024-09-01 00:28:50,184 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 00:28:50,216 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 00:29:13,525 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 2, batch 0, loss[loss=0.3305, simple_loss=0.2745, pruned_loss=0.1334, ctc_loss=0.219, over 18874.00 frames. ], tot_loss[loss=0.3305, simple_loss=0.2745, pruned_loss=0.1334, ctc_loss=0.219, over 18874.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-09-01 00:29:13,526 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 00:34:07,571 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 2, validation: loss=0.3353, simple_loss=0.2773, pruned_loss=0.1482, ctc_loss=0.2175, over 1073944.00 frames. +2024-09-01 00:34:07,571 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 00:49:20,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=100736.0, ans=10.0 +2024-09-01 00:55:06,238 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.36 vs. limit=15.0 +2024-09-01 00:56:09,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=40.10 vs. limit=22.5 +2024-09-01 00:56:46,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=100896.0, ans=0.0 +2024-09-01 00:57:00,575 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 00:59:42,891 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 2, batch 50, loss[loss=0.3734, simple_loss=0.2993, pruned_loss=0.1828, ctc_loss=0.2721, over 18964.00 frames. ], tot_loss[loss=0.3749, simple_loss=0.3056, pruned_loss=0.1713, ctc_loss=0.2604, over 826819.73 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 8.0 +2024-09-01 01:02:49,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=100949.33333333333, ans=0.0 +2024-09-01 01:07:12,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=101002.66666666667, ans=0.07 +2024-09-01 01:08:04,419 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.183e+02 4.403e+02 5.126e+02 5.917e+02 6.888e+02, threshold=1.025e+03, percent-clipped=0.0 +2024-09-01 01:09:08,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101056.0, ans=0.1 +2024-09-01 01:09:23,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=101056.0, ans=0.0 +2024-09-01 01:10:02,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=101109.33333333333, ans=0.125 +2024-09-01 01:10:13,361 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.60 vs. limit=15.0 +2024-09-01 01:10:32,032 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=32.10 vs. limit=22.5 +2024-09-01 01:10:48,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=101109.33333333333, ans=0.125 +2024-09-01 01:14:19,324 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 2, batch 100, loss[loss=0.4031, simple_loss=0.3274, pruned_loss=0.1838, ctc_loss=0.2858, over 19229.00 frames. ], tot_loss[loss=0.3604, simple_loss=0.295, pruned_loss=0.1616, ctc_loss=0.2476, over 1473154.80 frames. ], batch size: 144, lr: 7.29e-05, grad_scale: 8.0 +2024-09-01 01:14:23,202 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=23.98 vs. limit=15.0 +2024-09-01 01:14:58,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=101216.0, ans=0.125 +2024-09-01 01:18:19,541 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.80 vs. limit=15.0 +2024-09-01 01:18:25,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=101269.33333333333, ans=0.125 +2024-09-01 01:18:42,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=101322.66666666667, ans=0.0 +2024-09-01 01:20:51,911 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-2.pt +2024-09-01 01:21:12,424 INFO [dysarthria_finetune.py:1435] (0/4) (1126891520, 34072559616) +2024-09-01 01:21:12,424 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 01:21:12,451 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 01:21:20,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=101370.66666666667, ans=0.0 +2024-09-01 01:21:21,460 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 3, batch 0, loss[loss=0.3464, simple_loss=0.2848, pruned_loss=0.1562, ctc_loss=0.2308, over 18603.00 frames. ], tot_loss[loss=0.3464, simple_loss=0.2848, pruned_loss=0.1562, ctc_loss=0.2308, over 18603.00 frames. ], batch size: 65, lr: 7.58e-05, grad_scale: 16.0 +2024-09-01 01:21:21,461 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 01:21:44,693 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 3, validation: loss=0.309, simple_loss=0.2588, pruned_loss=0.13, ctc_loss=0.1938, over 1073944.00 frames. +2024-09-01 01:21:44,694 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 01:22:28,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=101370.66666666667, ans=0.04949747468305833 +2024-09-01 01:22:58,942 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.44 vs. limit=15.0 +2024-09-01 01:23:07,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=101424.0, ans=0.07 +2024-09-01 01:23:21,591 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:23:46,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=101477.33333333333, ans=0.125 +2024-09-01 01:24:27,009 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.669e+02 3.351e+02 3.834e+02 4.204e+02 5.264e+02, threshold=7.667e+02, percent-clipped=0.0 +2024-09-01 01:24:47,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=101637.33333333333, ans=0.125 +2024-09-01 01:24:49,033 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 3, batch 50, loss[loss=0.3486, simple_loss=0.2882, pruned_loss=0.1492, ctc_loss=0.235, over 18964.00 frames. ], tot_loss[loss=0.3454, simple_loss=0.2836, pruned_loss=0.1512, ctc_loss=0.2379, over 827741.27 frames. ], batch size: 102, lr: 8.08e-05, grad_scale: 16.0 +2024-09-01 01:25:16,177 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=17.86 vs. limit=15.0 +2024-09-01 01:26:25,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=101797.33333333333, ans=0.5 +2024-09-01 01:26:29,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=101797.33333333333, ans=0.2 +2024-09-01 01:26:34,692 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.78 vs. limit=22.5 +2024-09-01 01:27:00,716 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 3, batch 100, loss[loss=0.3133, simple_loss=0.2615, pruned_loss=0.1308, ctc_loss=0.2054, over 19231.00 frames. ], tot_loss[loss=0.3356, simple_loss=0.2765, pruned_loss=0.1452, ctc_loss=0.229, over 1473938.15 frames. ], batch size: 144, lr: 8.58e-05, grad_scale: 16.0 +2024-09-01 01:27:28,713 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=18.45 vs. limit=15.0 +2024-09-01 01:27:32,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=101957.33333333333, ans=0.0 +2024-09-01 01:27:49,783 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=17.92 vs. limit=15.0 +2024-09-01 01:29:32,085 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-3.pt +2024-09-01 01:29:36,842 INFO [dysarthria_finetune.py:1435] (0/4) (1128988672, 34072559616) +2024-09-01 01:29:36,842 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 01:29:36,889 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 01:29:45,316 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 4, batch 0, loss[loss=0.3173, simple_loss=0.2603, pruned_loss=0.1423, ctc_loss=0.2156, over 18523.00 frames. ], tot_loss[loss=0.3173, simple_loss=0.2603, pruned_loss=0.1423, ctc_loss=0.2156, over 18523.00 frames. ], batch size: 65, lr: 8.86e-05, grad_scale: 32.0 +2024-09-01 01:29:45,317 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 01:30:08,489 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 4, validation: loss=0.2887, simple_loss=0.2447, pruned_loss=0.1169, ctc_loss=0.1781, over 1073944.00 frames. +2024-09-01 01:30:08,490 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 01:30:42,262 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.378e+02 2.838e+02 3.147e+02 3.460e+02 5.318e+02, threshold=6.294e+02, percent-clipped=0.0 +2024-09-01 01:30:43,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=102106.66666666667, ans=0.125 +2024-09-01 01:30:54,192 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=18.43 vs. limit=15.0 +2024-09-01 01:31:02,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=102160.0, ans=0.0 +2024-09-01 01:31:27,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=102213.33333333333, ans=0.025 +2024-09-01 01:32:01,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=102320.0, ans=0.0 +2024-09-01 01:32:02,189 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 4, batch 50, loss[loss=0.3468, simple_loss=0.2905, pruned_loss=0.1474, ctc_loss=0.2216, over 18961.00 frames. ], tot_loss[loss=0.3193, simple_loss=0.2647, pruned_loss=0.1338, ctc_loss=0.2182, over 828586.64 frames. ], batch size: 102, lr: 9.36e-05, grad_scale: 32.0 +2024-09-01 01:32:05,685 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.53 vs. limit=15.0 +2024-09-01 01:32:25,399 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.78 vs. limit=15.0 +2024-09-01 01:32:42,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=102426.66666666667, ans=0.125 +2024-09-01 01:32:50,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=102426.66666666667, ans=0.2 +2024-09-01 01:32:56,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=102426.66666666667, ans=0.125 +2024-09-01 01:32:58,806 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=17.50 vs. limit=15.0 +2024-09-01 01:35:45,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102480.0, ans=0.1 +2024-09-01 01:35:53,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=102480.0, ans=0.0 +2024-09-01 01:37:29,677 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=14.54 vs. limit=15.0 +2024-09-01 01:37:33,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=102533.33333333333, ans=0.0 +2024-09-01 01:37:37,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102533.33333333333, ans=0.1 +2024-09-01 01:37:40,023 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 4, batch 100, loss[loss=0.3266, simple_loss=0.2695, pruned_loss=0.1364, ctc_loss=0.2321, over 19286.00 frames. ], tot_loss[loss=0.3131, simple_loss=0.26, pruned_loss=0.1307, ctc_loss=0.2136, over 1474147.24 frames. ], batch size: 144, lr: 9.86e-05, grad_scale: 32.0 +2024-09-01 01:38:05,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=102640.0, ans=0.0 +2024-09-01 01:38:09,925 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.185e+02 2.526e+02 2.751e+02 3.040e+02 4.636e+02, threshold=5.501e+02, percent-clipped=0.0 +2024-09-01 01:38:30,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=102693.33333333333, ans=0.1 +2024-09-01 01:38:32,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=102693.33333333333, ans=0.2 +2024-09-01 01:38:34,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=102693.33333333333, ans=0.5 +2024-09-01 01:38:35,841 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-4.pt +2024-09-01 01:38:40,667 INFO [dysarthria_finetune.py:1435] (0/4) (1126891520, 34072559616) +2024-09-01 01:38:40,667 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 01:38:40,695 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 01:38:49,733 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 5, batch 0, loss[loss=0.27, simple_loss=0.224, pruned_loss=0.1086, ctc_loss=0.1944, over 18549.00 frames. ], tot_loss[loss=0.27, simple_loss=0.224, pruned_loss=0.1086, ctc_loss=0.1944, over 18549.00 frames. ], batch size: 65, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:38:49,734 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 01:39:30,974 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 5, validation: loss=0.2717, simple_loss=0.233, pruned_loss=0.1066, ctc_loss=0.1665, over 1073944.00 frames. +2024-09-01 01:39:30,974 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 01:40:54,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=102741.33333333333, ans=0.2 +2024-09-01 01:41:16,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=102794.66666666667, ans=0.125 +2024-09-01 01:41:24,330 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.84 vs. limit=15.0 +2024-09-01 01:41:31,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.78 vs. limit=15.0 +2024-09-01 01:41:33,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=102794.66666666667, ans=0.125 +2024-09-01 01:41:34,103 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=15.83 vs. limit=15.0 +2024-09-01 01:42:07,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=102848.0, ans=0.025 +2024-09-01 01:42:16,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=102848.0, ans=0.025 +2024-09-01 01:43:55,506 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 5, batch 50, loss[loss=0.2897, simple_loss=0.2467, pruned_loss=0.1121, ctc_loss=0.1907, over 19008.00 frames. ], tot_loss[loss=0.3019, simple_loss=0.2525, pruned_loss=0.122, ctc_loss=0.2082, over 828355.03 frames. ], batch size: 102, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:44:25,052 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.54 vs. limit=10.0 +2024-09-01 01:44:53,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=103061.33333333333, ans=0.125 +2024-09-01 01:44:59,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=103061.33333333333, ans=0.2 +2024-09-01 01:45:02,867 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.19 vs. limit=15.0 +2024-09-01 01:45:15,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=103114.66666666667, ans=0.125 +2024-09-01 01:45:36,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=103114.66666666667, ans=0.125 +2024-09-01 01:46:04,692 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.130e+02 2.382e+02 2.524e+02 2.770e+02 4.371e+02, threshold=5.047e+02, percent-clipped=0.0 +2024-09-01 01:46:25,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=103221.33333333333, ans=0.0 +2024-09-01 01:46:25,463 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.27 vs. limit=15.0 +2024-09-01 01:46:33,255 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=15.22 vs. limit=15.0 +2024-09-01 01:46:38,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103221.33333333333, ans=0.1 +2024-09-01 01:46:52,893 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 5, batch 100, loss[loss=0.2814, simple_loss=0.2368, pruned_loss=0.1128, ctc_loss=0.1924, over 19287.00 frames. ], tot_loss[loss=0.2958, simple_loss=0.2481, pruned_loss=0.1192, ctc_loss=0.2027, over 1473652.43 frames. ], batch size: 144, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:46:59,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=103274.66666666667, ans=0.0 +2024-09-01 01:46:59,694 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.52 vs. limit=15.0 +2024-09-01 01:47:08,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103274.66666666667, ans=0.1 +2024-09-01 01:47:42,774 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.85 vs. limit=15.0 +2024-09-01 01:48:11,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=103381.33333333333, ans=0.025 +2024-09-01 01:48:20,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=103381.33333333333, ans=0.125 +2024-09-01 01:48:26,052 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-5.pt +2024-09-01 01:48:33,615 INFO [dysarthria_finetune.py:1435] (0/4) (1124794368, 34072559616) +2024-09-01 01:48:33,615 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 01:48:33,644 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 01:48:41,999 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 6, batch 0, loss[loss=0.2961, simple_loss=0.2496, pruned_loss=0.1166, ctc_loss=0.2059, over 18610.00 frames. ], tot_loss[loss=0.2961, simple_loss=0.2496, pruned_loss=0.1166, ctc_loss=0.2059, over 18610.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:48:42,000 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 01:49:05,140 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 6, validation: loss=0.2578, simple_loss=0.2238, pruned_loss=0.09861, ctc_loss=0.1582, over 1073944.00 frames. +2024-09-01 01:49:05,141 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 01:49:19,962 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.18 vs. limit=15.0 +2024-09-01 01:49:45,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=103477.33333333333, ans=0.125 +2024-09-01 01:49:45,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=103477.33333333333, ans=0.125 +2024-09-01 01:50:04,464 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.91 vs. limit=15.0 +2024-09-01 01:50:06,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=103530.66666666667, ans=0.0 +2024-09-01 01:50:44,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.04 vs. limit=15.0 +2024-09-01 01:50:46,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=103637.33333333333, ans=0.0 +2024-09-01 01:50:52,014 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 6, batch 50, loss[loss=0.2805, simple_loss=0.2386, pruned_loss=0.1096, ctc_loss=0.1906, over 19047.00 frames. ], tot_loss[loss=0.2926, simple_loss=0.2459, pruned_loss=0.1176, ctc_loss=0.2034, over 829577.21 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:50:57,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=103690.66666666667, ans=0.09899494936611666 +2024-09-01 01:50:57,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=103690.66666666667, ans=0.125 +2024-09-01 01:51:08,006 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.083e+02 2.277e+02 2.375e+02 2.614e+02 3.891e+02, threshold=4.750e+02, percent-clipped=0.0 +2024-09-01 01:51:46,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=103797.33333333333, ans=0.025 +2024-09-01 01:52:13,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=103904.0, ans=0.0 +2024-09-01 01:52:17,142 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.02 vs. limit=6.0 +2024-09-01 01:52:34,156 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 6, batch 100, loss[loss=0.2687, simple_loss=0.2287, pruned_loss=0.1047, ctc_loss=0.1851, over 19232.00 frames. ], tot_loss[loss=0.2801, simple_loss=0.2375, pruned_loss=0.1104, ctc_loss=0.1921, over 1476247.28 frames. ], batch size: 144, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:52:53,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=103957.33333333333, ans=0.125 +2024-09-01 01:52:55,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=103957.33333333333, ans=0.125 +2024-09-01 01:53:14,927 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.19 vs. limit=15.0 +2024-09-01 01:53:34,207 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-6.pt +2024-09-01 01:53:38,637 INFO [dysarthria_finetune.py:1435] (0/4) (1128988672, 34072559616) +2024-09-01 01:53:38,637 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 01:53:38,664 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 01:53:47,074 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 7, batch 0, loss[loss=0.2723, simple_loss=0.2373, pruned_loss=0.1046, ctc_loss=0.17, over 18570.00 frames. ], tot_loss[loss=0.2723, simple_loss=0.2373, pruned_loss=0.1046, ctc_loss=0.17, over 18570.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:53:47,075 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 01:54:10,657 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 7, validation: loss=0.2464, simple_loss=0.2165, pruned_loss=0.09214, ctc_loss=0.1523, over 1073944.00 frames. +2024-09-01 01:54:10,658 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 01:55:12,003 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.931e+02 2.149e+02 2.268e+02 2.457e+02 3.821e+02, threshold=4.535e+02, percent-clipped=0.0 +2024-09-01 01:55:53,896 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 7, batch 50, loss[loss=0.2764, simple_loss=0.2408, pruned_loss=0.09974, ctc_loss=0.191, over 18968.00 frames. ], tot_loss[loss=0.2739, simple_loss=0.2336, pruned_loss=0.1058, ctc_loss=0.1916, over 827907.61 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:56:26,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=104426.66666666667, ans=0.125 +2024-09-01 01:57:12,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=104586.66666666667, ans=0.125 +2024-09-01 01:57:30,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=104640.0, ans=0.125 +2024-09-01 01:57:30,837 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 7, batch 100, loss[loss=0.2587, simple_loss=0.2229, pruned_loss=0.09694, ctc_loss=0.1833, over 19302.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.2294, pruned_loss=0.1027, ctc_loss=0.1868, over 1473040.93 frames. ], batch size: 144, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:57:36,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=104640.0, ans=0.125 +2024-09-01 01:58:00,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=104693.33333333333, ans=0.125 +2024-09-01 01:58:12,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=104746.66666666667, ans=0.0 +2024-09-01 01:58:22,457 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-7.pt +2024-09-01 01:58:26,695 INFO [dysarthria_finetune.py:1435] (0/4) (1128988672, 34072559616) +2024-09-01 01:58:26,696 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 01:58:26,722 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 01:58:35,282 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 8, batch 0, loss[loss=0.2824, simple_loss=0.2416, pruned_loss=0.1115, ctc_loss=0.1942, over 18485.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.2416, pruned_loss=0.1115, ctc_loss=0.1942, over 18485.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:58:35,283 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 01:58:58,370 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 8, validation: loss=0.236, simple_loss=0.2103, pruned_loss=0.08624, ctc_loss=0.1474, over 1073944.00 frames. +2024-09-01 01:58:58,370 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 01:59:02,416 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.906e+02 2.080e+02 2.182e+02 2.331e+02 3.634e+02, threshold=4.365e+02, percent-clipped=0.0 +2024-09-01 01:59:33,987 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=15.88 vs. limit=15.0 +2024-09-01 02:00:22,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=105002.66666666667, ans=0.2 +2024-09-01 02:00:31,978 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=10.98 vs. limit=12.0 +2024-09-01 02:00:36,429 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 8, batch 50, loss[loss=0.2596, simple_loss=0.2259, pruned_loss=0.09894, ctc_loss=0.1757, over 18938.00 frames. ], tot_loss[loss=0.2568, simple_loss=0.2225, pruned_loss=0.0957, ctc_loss=0.1816, over 828565.55 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 02:02:06,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=105269.33333333333, ans=0.125 +2024-09-01 02:02:13,394 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 8, batch 100, loss[loss=0.258, simple_loss=0.2276, pruned_loss=0.09404, ctc_loss=0.1778, over 19222.00 frames. ], tot_loss[loss=0.2549, simple_loss=0.2215, pruned_loss=0.09523, ctc_loss=0.179, over 1474444.14 frames. ], batch size: 144, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 02:02:17,359 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.888e+02 2.064e+02 2.191e+02 2.358e+02 3.385e+02, threshold=4.381e+02, percent-clipped=0.0 +2024-09-01 02:02:32,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=105376.0, ans=0.125 +2024-09-01 02:02:34,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105376.0, ans=0.1 +2024-09-01 02:02:36,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=105376.0, ans=0.125 +2024-09-01 02:02:38,459 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.62 vs. limit=15.0 +2024-09-01 02:02:41,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=105376.0, ans=0.2 +2024-09-01 02:02:43,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=105376.0, ans=0.125 +2024-09-01 02:02:59,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=105429.33333333333, ans=0.0 +2024-09-01 02:03:07,393 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-8.pt +2024-09-01 02:03:11,779 INFO [dysarthria_finetune.py:1435] (0/4) (1126891520, 34072559616) +2024-09-01 02:03:11,780 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:03:11,807 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 02:03:20,910 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 9, batch 0, loss[loss=0.2555, simple_loss=0.2233, pruned_loss=0.09714, ctc_loss=0.1749, over 18596.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.2233, pruned_loss=0.09714, ctc_loss=0.1749, over 18596.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:03:20,911 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:03:44,100 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 9, validation: loss=0.2267, simple_loss=0.2052, pruned_loss=0.08107, ctc_loss=0.1434, over 1073944.00 frames. +2024-09-01 02:03:44,101 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 02:03:55,238 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.00 vs. limit=15.0 +2024-09-01 02:04:17,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=105530.66666666667, ans=0.05 +2024-09-01 02:04:38,130 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.20 vs. limit=15.0 +2024-09-01 02:05:27,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=105690.66666666667, ans=0.0 +2024-09-01 02:05:30,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=105690.66666666667, ans=0.2 +2024-09-01 02:05:35,852 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.99 vs. limit=15.0 +2024-09-01 02:05:38,303 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 9, batch 50, loss[loss=0.2616, simple_loss=0.2292, pruned_loss=0.09597, ctc_loss=0.19, over 19065.00 frames. ], tot_loss[loss=0.248, simple_loss=0.2187, pruned_loss=0.09054, ctc_loss=0.1749, over 828972.56 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:05:44,130 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:06:08,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=105797.33333333333, ans=0.0 +2024-09-01 02:06:22,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=105797.33333333333, ans=0.125 +2024-09-01 02:06:29,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=105850.66666666667, ans=0.125 +2024-09-01 02:06:31,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=105850.66666666667, ans=0.2 +2024-09-01 02:06:31,756 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.876e+02 2.077e+02 2.184e+02 2.316e+02 3.584e+02, threshold=4.367e+02, percent-clipped=0.0 +2024-09-01 02:07:12,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=105957.33333333333, ans=0.0 +2024-09-01 02:07:32,678 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 9, batch 100, loss[loss=0.1912, simple_loss=0.1796, pruned_loss=0.06002, ctc_loss=0.129, over 19269.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.216, pruned_loss=0.08866, ctc_loss=0.1723, over 1474236.32 frames. ], batch size: 144, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:07:35,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=106010.66666666667, ans=0.125 +2024-09-01 02:07:54,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=106010.66666666667, ans=0.125 +2024-09-01 02:08:02,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=106064.0, ans=0.0 +2024-09-01 02:08:34,528 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-9.pt +2024-09-01 02:08:39,100 INFO [dysarthria_finetune.py:1435] (0/4) (1126891520, 34072559616) +2024-09-01 02:08:39,100 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:08:39,127 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 02:08:48,127 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 10, batch 0, loss[loss=0.2179, simple_loss=0.197, pruned_loss=0.07655, ctc_loss=0.1518, over 18682.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.197, pruned_loss=0.07655, ctc_loss=0.1518, over 18682.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:08:48,128 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:09:26,928 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 10, validation: loss=0.2182, simple_loss=0.2007, pruned_loss=0.07671, ctc_loss=0.1399, over 1073944.00 frames. +2024-09-01 02:09:26,929 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 02:09:40,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=106165.33333333333, ans=0.125 +2024-09-01 02:10:02,437 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.09 vs. limit=22.5 +2024-09-01 02:10:10,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=106218.66666666667, ans=0.2 +2024-09-01 02:10:11,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106218.66666666667, ans=0.125 +2024-09-01 02:10:39,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=106272.0, ans=0.125 +2024-09-01 02:11:16,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106325.33333333333, ans=0.125 +2024-09-01 02:11:54,668 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.856e+02 2.023e+02 2.117e+02 2.323e+02 3.505e+02, threshold=4.234e+02, percent-clipped=0.0 +2024-09-01 02:12:00,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=106378.66666666667, ans=0.025 +2024-09-01 02:12:16,359 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 10, batch 50, loss[loss=0.2755, simple_loss=0.2387, pruned_loss=0.1048, ctc_loss=0.2069, over 19012.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.2142, pruned_loss=0.0884, ctc_loss=0.1757, over 829104.52 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:12:37,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.90 vs. limit=22.5 +2024-09-01 02:12:55,968 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.89 vs. limit=15.0 +2024-09-01 02:13:32,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=106538.66666666667, ans=0.125 +2024-09-01 02:13:48,945 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=7.81 vs. limit=12.0 +2024-09-01 02:14:12,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=106592.0, ans=0.5 +2024-09-01 02:15:12,129 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 10, batch 100, loss[loss=0.2289, simple_loss=0.208, pruned_loss=0.08011, ctc_loss=0.1646, over 19226.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.2129, pruned_loss=0.08675, ctc_loss=0.1726, over 1474931.95 frames. ], batch size: 144, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:15:33,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=106698.66666666667, ans=0.2 +2024-09-01 02:15:39,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=106698.66666666667, ans=0.0 +2024-09-01 02:16:08,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=106752.0, ans=0.125 +2024-09-01 02:16:21,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=106805.33333333333, ans=0.025 +2024-09-01 02:16:29,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=106805.33333333333, ans=0.125 +2024-09-01 02:16:36,655 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-10.pt +2024-09-01 02:16:40,877 INFO [dysarthria_finetune.py:1435] (0/4) (1126891520, 34072559616) +2024-09-01 02:16:40,878 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:16:40,905 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 02:16:49,151 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 11, batch 0, loss[loss=0.2417, simple_loss=0.2184, pruned_loss=0.0897, ctc_loss=0.1631, over 18505.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.2184, pruned_loss=0.0897, ctc_loss=0.1631, over 18505.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:16:49,152 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:17:12,832 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 11, validation: loss=0.211, simple_loss=0.1968, pruned_loss=0.07375, ctc_loss=0.137, over 1073944.00 frames. +2024-09-01 02:17:12,833 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 02:17:43,503 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.769e+02 1.989e+02 2.082e+02 2.188e+02 3.029e+02, threshold=4.165e+02, percent-clipped=0.0 +2024-09-01 02:17:56,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=106960.0, ans=0.0 +2024-09-01 02:18:38,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=107066.66666666667, ans=0.125 +2024-09-01 02:18:40,624 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.46 vs. limit=10.0 +2024-09-01 02:18:46,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.38 vs. limit=15.0 +2024-09-01 02:18:53,621 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 11, batch 50, loss[loss=0.222, simple_loss=0.203, pruned_loss=0.07789, ctc_loss=0.1607, over 19023.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.2098, pruned_loss=0.08428, ctc_loss=0.1722, over 827570.26 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:18:57,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=107120.0, ans=0.0 +2024-09-01 02:19:15,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=107173.33333333333, ans=0.0 +2024-09-01 02:19:31,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=107173.33333333333, ans=0.0 +2024-09-01 02:20:09,994 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.08 vs. limit=22.5 +2024-09-01 02:20:35,688 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 11, batch 100, loss[loss=0.2039, simple_loss=0.1895, pruned_loss=0.06603, ctc_loss=0.1611, over 19237.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.2076, pruned_loss=0.08272, ctc_loss=0.1697, over 1473115.37 frames. ], batch size: 144, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:20:43,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=107386.66666666667, ans=0.0 +2024-09-01 02:20:47,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=107386.66666666667, ans=0.125 +2024-09-01 02:21:05,341 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.795e+02 1.934e+02 2.032e+02 2.152e+02 3.346e+02, threshold=4.063e+02, percent-clipped=0.0 +2024-09-01 02:21:08,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=107440.0, ans=0.125 +2024-09-01 02:21:30,083 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-11.pt +2024-09-01 02:21:36,882 INFO [dysarthria_finetune.py:1435] (0/4) (1126891520, 34072559616) +2024-09-01 02:21:36,882 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:21:36,911 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 02:21:45,317 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 12, batch 0, loss[loss=0.2209, simple_loss=0.1993, pruned_loss=0.08268, ctc_loss=0.1561, over 18585.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.1993, pruned_loss=0.08268, ctc_loss=0.1561, over 18585.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:21:45,318 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:22:12,224 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 12, validation: loss=0.2042, simple_loss=0.1932, pruned_loss=0.07127, ctc_loss=0.1341, over 1073944.00 frames. +2024-09-01 02:22:12,225 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 02:22:23,031 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=29.75 vs. limit=22.5 +2024-09-01 02:22:35,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=107541.33333333333, ans=0.125 +2024-09-01 02:22:42,726 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=17.24 vs. limit=15.0 +2024-09-01 02:23:00,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=107594.66666666667, ans=0.2 +2024-09-01 02:23:26,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=107648.0, ans=0.125 +2024-09-01 02:24:27,766 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 12, batch 50, loss[loss=0.209, simple_loss=0.1967, pruned_loss=0.06904, ctc_loss=0.1599, over 18986.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2002, pruned_loss=0.07591, ctc_loss=0.1597, over 829307.75 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:25:31,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=107914.66666666667, ans=0.125 +2024-09-01 02:25:39,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=107914.66666666667, ans=0.125 +2024-09-01 02:25:48,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=107968.0, ans=0.125 +2024-09-01 02:25:55,564 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.770e+02 1.958e+02 2.051e+02 2.245e+02 3.047e+02, threshold=4.102e+02, percent-clipped=0.0 +2024-09-01 02:25:59,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107968.0, ans=0.1 +2024-09-01 02:26:05,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=108021.33333333333, ans=0.0 +2024-09-01 02:26:30,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=108074.66666666667, ans=0.125 +2024-09-01 02:26:30,856 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 12, batch 100, loss[loss=0.2047, simple_loss=0.1873, pruned_loss=0.07253, ctc_loss=0.1584, over 19194.00 frames. ], tot_loss[loss=0.217, simple_loss=0.1997, pruned_loss=0.07696, ctc_loss=0.1602, over 1473409.16 frames. ], batch size: 144, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:26:50,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=108074.66666666667, ans=0.0 +2024-09-01 02:26:57,555 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.07 vs. limit=15.0 +2024-09-01 02:27:31,272 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-12.pt +2024-09-01 02:27:35,879 INFO [dysarthria_finetune.py:1435] (0/4) (1128988672, 34072559616) +2024-09-01 02:27:35,879 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:27:35,906 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 02:27:44,307 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 13, batch 0, loss[loss=0.2553, simple_loss=0.2229, pruned_loss=0.1014, ctc_loss=0.1895, over 18643.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.2229, pruned_loss=0.1014, ctc_loss=0.1895, over 18643.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:27:44,308 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:27:51,667 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([7.0065, 6.7918, 6.6442, 6.7461], device='cuda:0') +2024-09-01 02:28:07,302 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 13, validation: loss=0.1981, simple_loss=0.19, pruned_loss=0.06934, ctc_loss=0.1316, over 1073944.00 frames. +2024-09-01 02:28:07,303 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 02:28:33,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=108282.66666666667, ans=0.125 +2024-09-01 02:28:45,350 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:28:57,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=108336.0, ans=0.0 +2024-09-01 02:29:01,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=108336.0, ans=0.09899494936611666 +2024-09-01 02:29:47,493 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 13, batch 50, loss[loss=0.2022, simple_loss=0.1926, pruned_loss=0.06639, ctc_loss=0.1605, over 19011.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.1995, pruned_loss=0.07699, ctc_loss=0.162, over 829773.70 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:29:48,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=108496.0, ans=0.09899494936611666 +2024-09-01 02:29:59,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=108496.0, ans=0.125 +2024-09-01 02:30:01,816 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.749e+02 1.921e+02 2.017e+02 2.151e+02 2.785e+02, threshold=4.034e+02, percent-clipped=0.0 +2024-09-01 02:30:07,314 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.38 vs. limit=22.5 +2024-09-01 02:30:24,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=108549.33333333333, ans=0.025 +2024-09-01 02:30:34,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=108602.66666666667, ans=0.125 +2024-09-01 02:30:51,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108656.0, ans=0.0 +2024-09-01 02:31:01,452 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.83 vs. limit=15.0 +2024-09-01 02:31:03,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=108656.0, ans=0.0 +2024-09-01 02:31:09,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=108709.33333333333, ans=0.125 +2024-09-01 02:31:25,778 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 13, batch 100, loss[loss=0.1709, simple_loss=0.1634, pruned_loss=0.05655, ctc_loss=0.1361, over 19225.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.1975, pruned_loss=0.07526, ctc_loss=0.1582, over 1474982.28 frames. ], batch size: 144, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:31:26,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108762.66666666667, ans=0.1 +2024-09-01 02:31:34,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=108762.66666666667, ans=0.05 +2024-09-01 02:31:40,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=108762.66666666667, ans=0.125 +2024-09-01 02:31:48,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=108816.0, ans=0.125 +2024-09-01 02:31:53,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=108816.0, ans=0.125 +2024-09-01 02:32:01,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=108816.0, ans=0.125 +2024-09-01 02:32:19,307 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-13.pt +2024-09-01 02:32:50,905 INFO [dysarthria_finetune.py:1435] (0/4) (1126891520, 34072559616) +2024-09-01 02:32:50,905 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:32:50,932 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 02:33:00,707 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 14, batch 0, loss[loss=0.2195, simple_loss=0.21, pruned_loss=0.07698, ctc_loss=0.1585, over 18695.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.21, pruned_loss=0.07698, ctc_loss=0.1585, over 18695.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:33:00,707 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:33:44,936 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 14, validation: loss=0.1924, simple_loss=0.1871, pruned_loss=0.06768, ctc_loss=0.1293, over 1073944.00 frames. +2024-09-01 02:33:44,937 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 02:34:45,647 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.06 vs. limit=6.0 +2024-09-01 02:34:50,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=109024.0, ans=0.125 +2024-09-01 02:35:04,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=109024.0, ans=0.0 +2024-09-01 02:35:09,747 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.754e+02 1.893e+02 1.977e+02 2.192e+02 2.916e+02, threshold=3.954e+02, percent-clipped=0.0 +2024-09-01 02:35:46,511 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.97 vs. limit=15.0 +2024-09-01 02:35:57,740 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.65 vs. limit=6.0 +2024-09-01 02:36:00,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=109130.66666666667, ans=0.0 +2024-09-01 02:36:12,035 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 14, batch 50, loss[loss=0.1972, simple_loss=0.1935, pruned_loss=0.06447, ctc_loss=0.1529, over 18964.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.1941, pruned_loss=0.07394, ctc_loss=0.1576, over 828263.79 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:36:32,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=109184.0, ans=0.125 +2024-09-01 02:36:46,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=109184.0, ans=0.125 +2024-09-01 02:37:18,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109237.33333333333, ans=0.1 +2024-09-01 02:37:22,008 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:37:51,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=109344.0, ans=0.125 +2024-09-01 02:38:02,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=109344.0, ans=22.5 +2024-09-01 02:38:12,691 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.05 vs. limit=10.0 +2024-09-01 02:38:41,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=109450.66666666667, ans=0.0 +2024-09-01 02:38:42,947 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 14, batch 100, loss[loss=0.1876, simple_loss=0.1769, pruned_loss=0.06694, ctc_loss=0.1447, over 19207.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.1935, pruned_loss=0.07294, ctc_loss=0.1541, over 1474261.28 frames. ], batch size: 144, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:39:21,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=109504.0, ans=0.125 +2024-09-01 02:39:29,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=109557.33333333333, ans=0.125 +2024-09-01 02:39:29,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=109557.33333333333, ans=0.05 +2024-09-01 02:39:50,920 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.731e+02 1.903e+02 1.972e+02 2.079e+02 2.713e+02, threshold=3.943e+02, percent-clipped=0.0 +2024-09-01 02:39:50,964 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-14.pt +2024-09-01 02:39:55,362 INFO [dysarthria_finetune.py:1435] (0/4) (1126891520, 34072559616) +2024-09-01 02:39:55,363 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:39:55,390 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 02:40:03,794 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 15, batch 0, loss[loss=0.2503, simple_loss=0.2338, pruned_loss=0.09422, ctc_loss=0.1799, over 18509.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.2338, pruned_loss=0.09422, ctc_loss=0.1799, over 18509.00 frames. ], batch size: 65, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:40:03,795 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:40:34,881 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 15, validation: loss=0.1871, simple_loss=0.1844, pruned_loss=0.06629, ctc_loss=0.1271, over 1073944.00 frames. +2024-09-01 02:40:34,881 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 02:40:40,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=109605.33333333333, ans=0.125 +2024-09-01 02:40:51,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=109605.33333333333, ans=10.0 +2024-09-01 02:40:55,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=109605.33333333333, ans=0.125 +2024-09-01 02:41:56,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=109712.0, ans=0.0 +2024-09-01 02:42:09,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=109765.33333333333, ans=0.125 +2024-09-01 02:42:17,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=109765.33333333333, ans=0.0 +2024-09-01 02:42:31,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=109818.66666666667, ans=0.0 +2024-09-01 02:42:48,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=109818.66666666667, ans=0.2 +2024-09-01 02:43:06,231 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 15, batch 50, loss[loss=0.1895, simple_loss=0.1866, pruned_loss=0.06509, ctc_loss=0.143, over 19011.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.1898, pruned_loss=0.07055, ctc_loss=0.1526, over 827942.50 frames. ], batch size: 102, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:43:21,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109872.0, ans=0.1 +2024-09-01 02:44:02,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=109925.33333333333, ans=0.125 +2024-09-01 02:44:02,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=109925.33333333333, ans=0.125 +2024-09-01 02:45:16,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110085.33333333333, ans=0.1 +2024-09-01 02:45:26,968 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.707e+02 1.886e+02 2.042e+02 2.162e+02 2.644e+02, threshold=4.084e+02, percent-clipped=0.0 +2024-09-01 02:45:29,588 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 15, batch 100, loss[loss=0.1667, simple_loss=0.1683, pruned_loss=0.0525, ctc_loss=0.1414, over 19251.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.1885, pruned_loss=0.07, ctc_loss=0.1499, over 1473903.80 frames. ], batch size: 144, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:46:37,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=110245.33333333333, ans=0.125 +2024-09-01 02:46:46,673 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-15.pt +2024-09-01 02:46:52,333 INFO [dysarthria_finetune.py:1435] (0/4) (1124794368, 34072559616) +2024-09-01 02:46:52,333 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:46:52,362 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 02:47:00,713 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 16, batch 0, loss[loss=0.2209, simple_loss=0.2124, pruned_loss=0.08209, ctc_loss=0.1577, over 18729.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2124, pruned_loss=0.08209, ctc_loss=0.1577, over 18729.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:47:00,714 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:47:23,694 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 16, validation: loss=0.182, simple_loss=0.1819, pruned_loss=0.06496, ctc_loss=0.1251, over 1073944.00 frames. +2024-09-01 02:47:23,694 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 02:47:37,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=17.32 vs. limit=15.0 +2024-09-01 02:47:46,088 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=17.85 vs. limit=15.0 +2024-09-01 02:48:19,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=110400.0, ans=0.125 +2024-09-01 02:48:43,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=110506.66666666667, ans=0.025 +2024-09-01 02:49:04,112 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 16, batch 50, loss[loss=0.1632, simple_loss=0.1731, pruned_loss=0.05215, ctc_loss=0.1206, over 18988.00 frames. ], tot_loss[loss=0.196, simple_loss=0.1891, pruned_loss=0.07039, ctc_loss=0.1519, over 828175.61 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:49:05,777 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.21 vs. limit=22.5 +2024-09-01 02:49:43,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=110666.66666666667, ans=0.025 +2024-09-01 02:49:43,768 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.44 vs. limit=15.0 +2024-09-01 02:49:44,011 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.735e+02 1.879e+02 1.996e+02 2.191e+02 2.692e+02, threshold=3.992e+02, percent-clipped=0.0 +2024-09-01 02:49:45,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110666.66666666667, ans=0.1 +2024-09-01 02:50:29,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=110773.33333333333, ans=0.2 +2024-09-01 02:50:31,911 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.12 vs. limit=15.0 +2024-09-01 02:50:39,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=110773.33333333333, ans=0.125 +2024-09-01 02:50:42,118 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 16, batch 100, loss[loss=0.1836, simple_loss=0.1767, pruned_loss=0.06443, ctc_loss=0.154, over 19270.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.1868, pruned_loss=0.06936, ctc_loss=0.1489, over 1473314.28 frames. ], batch size: 144, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:51:14,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110880.0, ans=0.1 +2024-09-01 02:51:35,763 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-16.pt +2024-09-01 02:51:40,126 INFO [dysarthria_finetune.py:1435] (0/4) (1128988672, 34072559616) +2024-09-01 02:51:40,126 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:51:40,156 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 02:51:50,596 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 17, batch 0, loss[loss=0.207, simple_loss=0.1938, pruned_loss=0.07901, ctc_loss=0.1552, over 18739.00 frames. ], tot_loss[loss=0.207, simple_loss=0.1938, pruned_loss=0.07901, ctc_loss=0.1552, over 18739.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:51:50,597 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:51:59,247 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.2399, 5.1477, 5.1967, 5.1140], device='cuda:0') +2024-09-01 02:52:13,655 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 17, validation: loss=0.1784, simple_loss=0.1796, pruned_loss=0.06394, ctc_loss=0.1232, over 1073944.00 frames. +2024-09-01 02:52:13,655 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 02:52:31,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=110981.33333333333, ans=0.0 +2024-09-01 02:52:38,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=111034.66666666667, ans=0.05 +2024-09-01 02:52:52,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=111034.66666666667, ans=0.2 +2024-09-01 02:52:52,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=111034.66666666667, ans=0.125 +2024-09-01 02:53:21,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=111088.0, ans=0.0 +2024-09-01 02:53:59,382 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.683e+02 1.870e+02 1.982e+02 2.091e+02 2.808e+02, threshold=3.964e+02, percent-clipped=0.0 +2024-09-01 02:54:03,905 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.54 vs. limit=10.0 +2024-09-01 02:54:15,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=111194.66666666667, ans=0.2 +2024-09-01 02:54:24,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=111194.66666666667, ans=0.2 +2024-09-01 02:54:35,608 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 17, batch 50, loss[loss=0.1962, simple_loss=0.1953, pruned_loss=0.06817, ctc_loss=0.152, over 19028.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.1837, pruned_loss=0.06735, ctc_loss=0.1462, over 827378.67 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:54:38,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111248.0, ans=0.1 +2024-09-01 02:56:18,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=111354.66666666667, ans=0.0 +2024-09-01 02:57:05,490 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.91 vs. limit=15.0 +2024-09-01 02:57:16,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111408.0, ans=0.1 +2024-09-01 02:57:24,324 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=10.35 vs. limit=12.0 +2024-09-01 02:58:16,892 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 17, batch 100, loss[loss=0.1594, simple_loss=0.1594, pruned_loss=0.05394, ctc_loss=0.129, over 19218.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.1848, pruned_loss=0.06842, ctc_loss=0.1472, over 1473529.96 frames. ], batch size: 144, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:58:35,898 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.97 vs. limit=15.0 +2024-09-01 02:58:59,921 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:59:04,503 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.94 vs. limit=15.0 +2024-09-01 03:00:07,747 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-17.pt +2024-09-01 03:00:12,514 INFO [dysarthria_finetune.py:1435] (0/4) (1128988672, 34072559616) +2024-09-01 03:00:12,514 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 03:00:12,540 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 03:00:21,385 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 18, batch 0, loss[loss=0.204, simple_loss=0.1988, pruned_loss=0.07543, ctc_loss=0.1456, over 18538.00 frames. ], tot_loss[loss=0.204, simple_loss=0.1988, pruned_loss=0.07543, ctc_loss=0.1456, over 18538.00 frames. ], batch size: 65, lr: 9.93e-05, grad_scale: 32.0 +2024-09-01 03:00:21,386 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 03:01:08,459 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 18, validation: loss=0.1758, simple_loss=0.1773, pruned_loss=0.06291, ctc_loss=0.1213, over 1073944.00 frames. +2024-09-01 03:01:08,459 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 03:01:40,353 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.79 vs. limit=6.0 +2024-09-01 03:02:04,459 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.676e+02 1.863e+02 1.965e+02 2.122e+02 2.833e+02, threshold=3.929e+02, percent-clipped=0.0 +2024-09-01 03:02:06,796 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.37 vs. limit=10.0 +2024-09-01 03:02:11,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=111722.66666666667, ans=0.125 +2024-09-01 03:02:11,685 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.68 vs. limit=15.0 +2024-09-01 03:04:43,909 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.38 vs. limit=15.0 +2024-09-01 03:04:48,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.35 vs. limit=15.0 +2024-09-01 03:05:51,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=111882.66666666667, ans=0.0 +2024-09-01 03:05:55,549 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 18, batch 50, loss[loss=0.1689, simple_loss=0.1749, pruned_loss=0.05463, ctc_loss=0.1339, over 18998.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.1827, pruned_loss=0.06641, ctc_loss=0.1443, over 828205.61 frames. ], batch size: 102, lr: 9.93e-05, grad_scale: 16.0 +2024-09-01 03:06:04,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=111936.0, ans=0.125 +2024-09-01 03:07:21,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=111989.33333333333, ans=0.125 +2024-09-01 03:08:50,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=112042.66666666667, ans=0.2 +2024-09-01 03:09:05,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=112096.0, ans=0.2 +2024-09-01 03:09:40,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=112149.33333333333, ans=0.125 +2024-09-01 03:11:05,718 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 18, batch 100, loss[loss=0.1452, simple_loss=0.1576, pruned_loss=0.04325, ctc_loss=0.1158, over 19294.00 frames. ], tot_loss[loss=0.184, simple_loss=0.1813, pruned_loss=0.06508, ctc_loss=0.1413, over 1473690.24 frames. ], batch size: 144, lr: 9.93e-05, grad_scale: 16.0 +2024-09-01 03:11:50,913 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.87 vs. limit=15.0 +2024-09-01 03:11:51,851 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 1.826e+02 1.931e+02 2.035e+02 3.279e+02, threshold=3.861e+02, percent-clipped=0.0 +2024-09-01 03:12:03,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=112256.0, ans=0.0 +2024-09-01 03:12:22,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=112309.33333333333, ans=0.2 +2024-09-01 03:12:26,089 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-18.pt +2024-09-01 03:12:30,500 INFO [dysarthria_finetune.py:1435] (0/4) (1126891520, 34072559616) +2024-09-01 03:12:30,500 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 03:12:30,529 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 03:12:38,680 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 19, batch 0, loss[loss=0.2037, simple_loss=0.2001, pruned_loss=0.07511, ctc_loss=0.1428, over 18598.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2001, pruned_loss=0.07511, ctc_loss=0.1428, over 18598.00 frames. ], batch size: 65, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:12:38,681 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 03:13:02,312 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 19, validation: loss=0.1735, simple_loss=0.1751, pruned_loss=0.06201, ctc_loss=0.1194, over 1073944.00 frames. +2024-09-01 03:13:02,313 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 03:13:13,711 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.30 vs. limit=15.0 +2024-09-01 03:13:25,224 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:13:57,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=112458.66666666667, ans=0.0 +2024-09-01 03:13:59,466 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:14:34,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112565.33333333333, ans=0.1 +2024-09-01 03:14:35,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=112565.33333333333, ans=0.125 +2024-09-01 03:14:46,403 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.86 vs. limit=22.5 +2024-09-01 03:14:48,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=112618.66666666667, ans=0.0 +2024-09-01 03:14:48,750 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 19, batch 50, loss[loss=0.1746, simple_loss=0.1724, pruned_loss=0.06082, ctc_loss=0.1376, over 19038.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.1771, pruned_loss=0.06374, ctc_loss=0.1402, over 827203.46 frames. ], batch size: 102, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:15:06,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112618.66666666667, ans=0.1 +2024-09-01 03:15:22,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=112672.0, ans=0.1 +2024-09-01 03:15:28,690 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.53 vs. limit=15.0 +2024-09-01 03:16:00,811 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.699e+02 1.859e+02 1.957e+02 2.051e+02 3.574e+02, threshold=3.914e+02, percent-clipped=0.0 +2024-09-01 03:16:12,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.whiten.whitening_limit, batch_count=112832.0, ans=12.0 +2024-09-01 03:16:28,720 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 19, batch 100, loss[loss=0.1598, simple_loss=0.1632, pruned_loss=0.05302, ctc_loss=0.1259, over 19274.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.1768, pruned_loss=0.0643, ctc_loss=0.1406, over 1472434.33 frames. ], batch size: 144, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:16:30,367 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.43 vs. limit=22.5 +2024-09-01 03:17:01,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=112938.66666666667, ans=0.0 +2024-09-01 03:17:22,977 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-19.pt +2024-09-01 03:17:28,914 INFO [dysarthria_finetune.py:1435] (0/4) (1126891520, 34072559616) +2024-09-01 03:17:28,914 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 03:17:28,942 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 03:17:37,119 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 20, batch 0, loss[loss=0.1694, simple_loss=0.1732, pruned_loss=0.05904, ctc_loss=0.1189, over 18599.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.1732, pruned_loss=0.05904, ctc_loss=0.1189, over 18599.00 frames. ], batch size: 65, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:17:37,120 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 03:18:00,782 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 20, validation: loss=0.1713, simple_loss=0.1732, pruned_loss=0.06117, ctc_loss=0.1175, over 1073944.00 frames. +2024-09-01 03:18:00,783 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26683MB +2024-09-01 03:19:07,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113200.0, ans=0.1 +2024-09-01 03:19:09,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=113200.0, ans=0.025 +2024-09-01 03:19:11,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=113200.0, ans=0.125 +2024-09-01 03:19:32,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=113253.33333333333, ans=0.2 +2024-09-01 03:19:39,006 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 20, batch 50, loss[loss=0.1865, simple_loss=0.1815, pruned_loss=0.06666, ctc_loss=0.1454, over 18985.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.1783, pruned_loss=0.06582, ctc_loss=0.1418, over 828130.18 frames. ], batch size: 102, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:19:42,340 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.82 vs. limit=15.0 +2024-09-01 03:19:50,257 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.25 vs. limit=6.0 +2024-09-01 03:19:52,480 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.624e+02 1.810e+02 1.894e+02 2.049e+02 3.111e+02, threshold=3.788e+02, percent-clipped=0.0 +2024-09-01 03:20:47,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=113466.66666666667, ans=0.2 +2024-09-01 03:20:56,452 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=15.04 vs. limit=15.0 +2024-09-01 03:20:58,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=113520.0, ans=0.125 +2024-09-01 03:21:15,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=113573.33333333333, ans=0.0 +2024-09-01 03:21:15,358 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.17 vs. limit=22.5 +2024-09-01 03:21:15,875 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 20, batch 100, loss[loss=0.1754, simple_loss=0.1686, pruned_loss=0.0633, ctc_loss=0.1389, over 19321.00 frames. ], tot_loss[loss=0.18, simple_loss=0.1764, pruned_loss=0.06423, ctc_loss=0.1377, over 1472900.97 frames. ], batch size: 144, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:21:22,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=113573.33333333333, ans=0.0 +2024-09-01 03:21:28,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=113573.33333333333, ans=0.125 +2024-09-01 03:21:41,713 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.13 vs. limit=15.0 +2024-09-01 03:21:50,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113626.66666666667, ans=0.1 +2024-09-01 03:22:08,714 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune/epoch-20.pt +2024-09-01 03:22:13,061 INFO [dysarthria_finetune.py:1435] (0/4) (1128988672, 34072559616) +2024-09-01 03:22:13,061 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 03:22:13,088 INFO [dysarthria_finetune.py:1440] (0/4) (29576855552, 34072559616) +2024-09-01 03:22:13,089 INFO [dysarthria_finetune.py:1442] (0/4) Done! diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-22-13-17-1 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-22-13-17-1 new file mode 100644 index 0000000000000000000000000000000000000000..baaebd12d9c1c96c0b1a04d245a8b648b78c9576 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-22-13-17-1 @@ -0,0 +1,543 @@ +2024-08-31 22:13:17,922 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-31 22:13:17,923 INFO [dysarthria_finetune.py:1214] (1/4) (33748090880, 34072559616) +2024-08-31 22:13:17,923 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-31 22:13:18,699 INFO [dysarthria_finetune.py:1219] (1/4) (33106362368, 34072559616) +2024-08-31 22:13:18,699 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-31 22:13:18,725 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 22:13:18,725 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-31 22:13:21,118 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 65805511 +2024-08-31 22:13:23,245 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-31 22:13:33,286 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-31 22:14:37,197 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts +2024-08-31 22:14:37,263 INFO [dysarthria_finetune.py:1319] (1/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 22:14:37,991 INFO [dysarthria_asr_datamodule.py:239] (1/4) Disable MUSAN +2024-08-31 22:14:37,991 INFO [dysarthria_asr_datamodule.py:257] (1/4) Enable SpecAugment +2024-08-31 22:14:37,991 INFO [dysarthria_asr_datamodule.py:258] (1/4) Time warp factor: 80 +2024-08-31 22:14:37,991 INFO [dysarthria_asr_datamodule.py:268] (1/4) Num frame mask: 10 +2024-08-31 22:14:37,992 INFO [dysarthria_asr_datamodule.py:281] (1/4) About to create train dataset +2024-08-31 22:14:55,323 INFO [dysarthria_asr_datamodule.py:308] (1/4) Using DynamicBucketingSampler. +2024-08-31 22:14:56,287 INFO [dysarthria_asr_datamodule.py:325] (1/4) About to create train dataloader +2024-08-31 22:14:56,288 INFO [dysarthria_asr_datamodule.py:501] (1/4) About to get dev cuts +2024-08-31 22:14:56,477 INFO [dysarthria_asr_datamodule.py:356] (1/4) About to create dev dataset +2024-08-31 22:14:57,473 INFO [dysarthria_asr_datamodule.py:373] (1/4) About to create dev dataloader +2024-08-31 22:14:57,473 INFO [dysarthria_finetune.py:1490] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 22:16:23,768 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=512, metric=18.54 vs. limit=7.5 +2024-08-31 22:16:31,021 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=192, metric=17.58 vs. limit=7.5 +2024-08-31 22:16:34,170 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11828MB +2024-08-31 22:16:36,238 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11828MB +2024-08-31 22:17:53,351 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11828MB +2024-08-31 22:17:55,319 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11828MB +2024-08-31 22:19:46,278 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11828MB +2024-08-31 22:19:48,452 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11828MB +2024-08-31 22:20:26,655 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 0, loss[loss=0.3929, simple_loss=0.3185, pruned_loss=0.1863, ctc_loss=0.2785, over 18549.00 frames. ], tot_loss[loss=0.3929, simple_loss=0.3185, pruned_loss=0.1863, ctc_loss=0.2785, over 18549.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-31 22:20:26,655 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-31 22:32:57,020 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 1, validation: loss=0.373, simple_loss=0.3046, pruned_loss=0.1755, ctc_loss=0.2544, over 1073944.00 frames. +2024-08-31 22:32:57,063 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-31 22:34:49,196 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=21.81 vs. limit=22.5 +2024-08-31 22:36:04,116 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=17.03 vs. limit=12.0 +2024-08-31 22:48:03,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=100000.0, ans=0.125 +2024-08-31 22:51:26,991 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.923e+02 1.157e+03 1.203e+03 1.280e+03 1.380e+03, threshold=4.812e+03, percent-clipped=0.0 +2024-08-31 22:55:46,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=36.70 vs. limit=15.0 +2024-08-31 23:01:51,987 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=25.04 vs. limit=15.0 +2024-08-31 23:03:06,378 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.113e+02 1.083e+03 1.198e+03 1.280e+03 1.431e+03, threshold=4.794e+03, percent-clipped=0.0 +2024-08-31 23:16:33,798 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=34.78 vs. limit=22.5 +2024-08-31 23:20:08,819 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.95 vs. limit=15.0 +2024-08-31 23:29:32,325 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.285e+02 9.052e+02 1.061e+03 1.198e+03 1.431e+03, threshold=4.243e+03, percent-clipped=0.0 +2024-08-31 23:46:25,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=100213.33333333333, ans=0.125 +2024-08-31 23:47:34,905 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=24.72 vs. limit=15.0 +2024-08-31 23:47:34,929 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=20.39 vs. limit=15.0 +2024-08-31 23:49:30,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=100266.66666666667, ans=0.05 +2024-08-31 23:49:33,571 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 50, loss[loss=0.4062, simple_loss=0.327, pruned_loss=0.191, ctc_loss=0.2993, over 19042.00 frames. ], tot_loss[loss=0.4112, simple_loss=0.3319, pruned_loss=0.1999, ctc_loss=0.2955, over 827432.33 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-31 23:52:55,700 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.86 vs. limit=6.0 +2024-08-31 23:53:24,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=100266.66666666667, ans=0.125 +2024-09-01 00:08:29,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=100373.33333333333, ans=0.125 +2024-09-01 00:09:15,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=100373.33333333333, ans=0.2 +2024-09-01 00:11:15,340 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.54 vs. limit=6.0 +2024-09-01 00:16:02,257 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=21.94 vs. limit=15.0 +2024-09-01 00:18:30,687 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.05 vs. limit=6.0 +2024-09-01 00:18:31,037 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.958e+02 6.817e+02 8.321e+02 1.009e+03 1.431e+03, threshold=1.664e+03, percent-clipped=0.0 +2024-09-01 00:18:31,086 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 100, loss[loss=0.3915, simple_loss=0.3186, pruned_loss=0.1807, ctc_loss=0.2733, over 19093.00 frames. ], tot_loss[loss=0.3948, simple_loss=0.3199, pruned_loss=0.1879, ctc_loss=0.2794, over 1470684.91 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-09-01 00:19:42,902 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=22.77 vs. limit=15.0 +2024-09-01 00:27:43,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100640.0, ans=0.1 +2024-09-01 00:27:57,066 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.96 vs. limit=22.5 +2024-09-01 00:28:35,455 INFO [dysarthria_finetune.py:1435] (1/4) (13953073152, 34072559616) +2024-09-01 00:28:35,455 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 00:28:35,511 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 00:29:13,534 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 2, batch 0, loss[loss=0.3314, simple_loss=0.2697, pruned_loss=0.1393, ctc_loss=0.2423, over 18746.00 frames. ], tot_loss[loss=0.3314, simple_loss=0.2697, pruned_loss=0.1393, ctc_loss=0.2423, over 18746.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-09-01 00:29:13,534 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 00:34:07,572 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 2, validation: loss=0.3353, simple_loss=0.2773, pruned_loss=0.1482, ctc_loss=0.2175, over 1073944.00 frames. +2024-09-01 00:34:07,573 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13101MB +2024-09-01 00:35:35,412 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.59 vs. limit=15.0 +2024-09-01 00:51:25,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=100789.33333333333, ans=0.125 +2024-09-01 00:52:52,155 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=12.44 vs. limit=12.0 +2024-09-01 00:55:13,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=100842.66666666667, ans=0.125 +2024-09-01 00:57:01,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=100896.0, ans=0.09899494936611666 +2024-09-01 00:59:42,909 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 2, batch 50, loss[loss=0.4006, simple_loss=0.3255, pruned_loss=0.1851, ctc_loss=0.2811, over 19071.00 frames. ], tot_loss[loss=0.3719, simple_loss=0.3032, pruned_loss=0.1698, ctc_loss=0.2585, over 827854.65 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 8.0 +2024-09-01 01:00:10,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100949.33333333333, ans=0.1 +2024-09-01 01:03:11,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100949.33333333333, ans=0.1 +2024-09-01 01:08:04,423 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.183e+02 4.403e+02 5.126e+02 5.917e+02 6.888e+02, threshold=1.025e+03, percent-clipped=0.0 +2024-09-01 01:08:14,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=17.19 vs. limit=15.0 +2024-09-01 01:09:14,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=101056.0, ans=0.2 +2024-09-01 01:10:03,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=101109.33333333333, ans=0.0 +2024-09-01 01:10:54,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=101109.33333333333, ans=0.025 +2024-09-01 01:13:05,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=101162.66666666667, ans=0.125 +2024-09-01 01:14:19,328 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 2, batch 100, loss[loss=0.3548, simple_loss=0.2915, pruned_loss=0.1566, ctc_loss=0.2409, over 19090.00 frames. ], tot_loss[loss=0.362, simple_loss=0.2959, pruned_loss=0.1629, ctc_loss=0.2501, over 1472213.55 frames. ], batch size: 133, lr: 7.29e-05, grad_scale: 8.0 +2024-09-01 01:15:26,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101216.0, ans=0.1 +2024-09-01 01:18:34,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=101322.66666666667, ans=0.125 +2024-09-01 01:18:48,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=101322.66666666667, ans=0.125 +2024-09-01 01:19:37,915 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.92 vs. limit=15.0 +2024-09-01 01:20:51,904 INFO [dysarthria_finetune.py:1435] (1/4) (1095434240, 34072559616) +2024-09-01 01:20:51,905 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 01:20:51,973 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 01:20:59,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=101370.66666666667, ans=0.0 +2024-09-01 01:21:21,466 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 3, batch 0, loss[loss=0.3737, simple_loss=0.3051, pruned_loss=0.1704, ctc_loss=0.2575, over 18511.00 frames. ], tot_loss[loss=0.3737, simple_loss=0.3051, pruned_loss=0.1704, ctc_loss=0.2575, over 18511.00 frames. ], batch size: 65, lr: 7.58e-05, grad_scale: 16.0 +2024-09-01 01:21:21,466 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 01:21:44,699 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 3, validation: loss=0.309, simple_loss=0.2588, pruned_loss=0.13, ctc_loss=0.1938, over 1073944.00 frames. +2024-09-01 01:21:44,700 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13101MB +2024-09-01 01:22:18,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=101370.66666666667, ans=0.125 +2024-09-01 01:22:28,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=101370.66666666667, ans=0.2 +2024-09-01 01:22:38,673 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:22:55,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=101424.0, ans=0.0 +2024-09-01 01:23:20,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=101477.33333333333, ans=0.0 +2024-09-01 01:23:38,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=101477.33333333333, ans=0.125 +2024-09-01 01:23:38,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101477.33333333333, ans=0.1 +2024-09-01 01:23:47,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=101477.33333333333, ans=0.0 +2024-09-01 01:24:26,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=101584.0, ans=0.125 +2024-09-01 01:24:27,010 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.669e+02 3.351e+02 3.834e+02 4.204e+02 5.264e+02, threshold=7.667e+02, percent-clipped=0.0 +2024-09-01 01:24:44,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=101584.0, ans=0.2 +2024-09-01 01:24:49,028 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 3, batch 50, loss[loss=0.343, simple_loss=0.2836, pruned_loss=0.1457, ctc_loss=0.2326, over 19005.00 frames. ], tot_loss[loss=0.3475, simple_loss=0.2851, pruned_loss=0.1519, ctc_loss=0.2398, over 828905.42 frames. ], batch size: 102, lr: 8.08e-05, grad_scale: 16.0 +2024-09-01 01:24:50,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=101637.33333333333, ans=0.2 +2024-09-01 01:24:53,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=101637.33333333333, ans=0.0 +2024-09-01 01:25:42,162 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.50 vs. limit=15.0 +2024-09-01 01:26:05,555 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.06 vs. limit=6.0 +2024-09-01 01:26:50,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=101850.66666666667, ans=0.0 +2024-09-01 01:27:00,710 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 3, batch 100, loss[loss=0.2981, simple_loss=0.2518, pruned_loss=0.1191, ctc_loss=0.1887, over 19133.00 frames. ], tot_loss[loss=0.3348, simple_loss=0.2759, pruned_loss=0.1444, ctc_loss=0.2287, over 1474266.40 frames. ], batch size: 133, lr: 8.58e-05, grad_scale: 16.0 +2024-09-01 01:29:20,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=102010.66666666667, ans=0.125 +2024-09-01 01:29:32,083 INFO [dysarthria_finetune.py:1435] (1/4) (1168834560, 34072559616) +2024-09-01 01:29:32,083 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 01:29:32,147 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 01:29:45,319 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 4, batch 0, loss[loss=0.2882, simple_loss=0.2425, pruned_loss=0.1176, ctc_loss=0.1844, over 18466.00 frames. ], tot_loss[loss=0.2882, simple_loss=0.2425, pruned_loss=0.1176, ctc_loss=0.1844, over 18466.00 frames. ], batch size: 65, lr: 8.86e-05, grad_scale: 32.0 +2024-09-01 01:29:45,319 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 01:30:08,497 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 4, validation: loss=0.2887, simple_loss=0.2447, pruned_loss=0.1169, ctc_loss=0.1781, over 1073944.00 frames. +2024-09-01 01:30:08,498 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13101MB +2024-09-01 01:30:42,266 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.378e+02 2.838e+02 3.147e+02 3.460e+02 5.318e+02, threshold=6.294e+02, percent-clipped=0.0 +2024-09-01 01:30:49,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=102106.66666666667, ans=0.125 +2024-09-01 01:31:02,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=102160.0, ans=0.0 +2024-09-01 01:31:08,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=102160.0, ans=0.125 +2024-09-01 01:31:27,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=102213.33333333333, ans=0.125 +2024-09-01 01:31:40,824 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.82 vs. limit=22.5 +2024-09-01 01:32:01,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=102320.0, ans=0.125 +2024-09-01 01:32:02,221 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 4, batch 50, loss[loss=0.2984, simple_loss=0.2506, pruned_loss=0.1259, ctc_loss=0.1891, over 18961.00 frames. ], tot_loss[loss=0.3174, simple_loss=0.2639, pruned_loss=0.1315, ctc_loss=0.2153, over 827373.05 frames. ], batch size: 102, lr: 9.36e-05, grad_scale: 32.0 +2024-09-01 01:32:11,562 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=14.39 vs. limit=12.0 +2024-09-01 01:32:19,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=102320.0, ans=0.0 +2024-09-01 01:32:53,047 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.04 vs. limit=15.0 +2024-09-01 01:32:54,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=102426.66666666667, ans=0.125 +2024-09-01 01:32:56,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=102426.66666666667, ans=0.0 +2024-09-01 01:33:22,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=102480.0, ans=0.0 +2024-09-01 01:35:51,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=102480.0, ans=0.125 +2024-09-01 01:35:51,609 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=16.08 vs. limit=15.0 +2024-09-01 01:37:33,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=102533.33333333333, ans=0.025 +2024-09-01 01:37:40,028 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 4, batch 100, loss[loss=0.3159, simple_loss=0.2645, pruned_loss=0.1296, ctc_loss=0.2111, over 19038.00 frames. ], tot_loss[loss=0.3123, simple_loss=0.2598, pruned_loss=0.1296, ctc_loss=0.2125, over 1472261.06 frames. ], batch size: 133, lr: 9.86e-05, grad_scale: 32.0 +2024-09-01 01:37:59,639 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.78 vs. limit=22.5 +2024-09-01 01:38:03,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=102640.0, ans=0.025 +2024-09-01 01:38:09,924 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.185e+02 2.526e+02 2.751e+02 3.040e+02 4.636e+02, threshold=5.501e+02, percent-clipped=0.0 +2024-09-01 01:38:25,634 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=16.79 vs. limit=15.0 +2024-09-01 01:38:35,851 INFO [dysarthria_finetune.py:1435] (1/4) (2173370368, 34072559616) +2024-09-01 01:38:35,851 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 01:38:35,919 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 01:38:49,739 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 5, batch 0, loss[loss=0.2775, simple_loss=0.2361, pruned_loss=0.1081, ctc_loss=0.1796, over 18670.00 frames. ], tot_loss[loss=0.2775, simple_loss=0.2361, pruned_loss=0.1081, ctc_loss=0.1796, over 18670.00 frames. ], batch size: 65, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:38:49,739 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 01:39:30,980 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 5, validation: loss=0.2717, simple_loss=0.233, pruned_loss=0.1066, ctc_loss=0.1665, over 1073944.00 frames. +2024-09-01 01:39:30,981 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13101MB +2024-09-01 01:40:54,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=102741.33333333333, ans=0.125 +2024-09-01 01:41:17,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=102794.66666666667, ans=0.025 +2024-09-01 01:42:51,067 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=21.16 vs. limit=15.0 +2024-09-01 01:43:09,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=102901.33333333333, ans=0.0 +2024-09-01 01:43:55,232 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.02 vs. limit=22.5 +2024-09-01 01:43:55,509 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 5, batch 50, loss[loss=0.2851, simple_loss=0.241, pruned_loss=0.116, ctc_loss=0.185, over 18968.00 frames. ], tot_loss[loss=0.2983, simple_loss=0.2505, pruned_loss=0.1195, ctc_loss=0.2029, over 828630.89 frames. ], batch size: 102, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:44:04,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=103008.0, ans=0.0 +2024-09-01 01:44:45,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103061.33333333333, ans=0.1 +2024-09-01 01:44:49,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103061.33333333333, ans=0.1 +2024-09-01 01:45:12,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=103114.66666666667, ans=0.125 +2024-09-01 01:45:56,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=103168.0, ans=0.125 +2024-09-01 01:46:04,699 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.130e+02 2.382e+02 2.524e+02 2.770e+02 4.371e+02, threshold=5.047e+02, percent-clipped=0.0 +2024-09-01 01:46:25,375 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=24.09 vs. limit=15.0 +2024-09-01 01:46:32,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=103221.33333333333, ans=0.0 +2024-09-01 01:46:52,901 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 5, batch 100, loss[loss=0.2655, simple_loss=0.2286, pruned_loss=0.1001, ctc_loss=0.172, over 19157.00 frames. ], tot_loss[loss=0.2929, simple_loss=0.2463, pruned_loss=0.1174, ctc_loss=0.1992, over 1473409.40 frames. ], batch size: 133, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:47:10,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=103274.66666666667, ans=0.125 +2024-09-01 01:48:04,626 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.31 vs. limit=6.0 +2024-09-01 01:48:26,057 INFO [dysarthria_finetune.py:1435] (1/4) (434831360, 34072559616) +2024-09-01 01:48:26,057 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 01:48:26,120 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 01:48:42,001 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 6, batch 0, loss[loss=0.3156, simple_loss=0.2616, pruned_loss=0.129, ctc_loss=0.2293, over 18435.00 frames. ], tot_loss[loss=0.3156, simple_loss=0.2616, pruned_loss=0.129, ctc_loss=0.2293, over 18435.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:48:42,002 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 01:49:05,146 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 6, validation: loss=0.2578, simple_loss=0.2238, pruned_loss=0.09861, ctc_loss=0.1582, over 1073944.00 frames. +2024-09-01 01:49:05,146 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13101MB +2024-09-01 01:50:06,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103530.66666666667, ans=0.1 +2024-09-01 01:50:08,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103584.0, ans=0.1 +2024-09-01 01:50:26,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=103584.0, ans=0.125 +2024-09-01 01:50:44,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=103637.33333333333, ans=0.04949747468305833 +2024-09-01 01:50:52,021 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 6, batch 50, loss[loss=0.2631, simple_loss=0.2241, pruned_loss=0.1011, ctc_loss=0.1811, over 19041.00 frames. ], tot_loss[loss=0.2848, simple_loss=0.2408, pruned_loss=0.1121, ctc_loss=0.1968, over 827399.35 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:51:08,004 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.083e+02 2.277e+02 2.375e+02 2.614e+02 3.891e+02, threshold=4.750e+02, percent-clipped=0.0 +2024-09-01 01:51:15,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=103744.0, ans=0.025 +2024-09-01 01:51:30,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103744.0, ans=0.1 +2024-09-01 01:52:10,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=103850.66666666667, ans=0.025 +2024-09-01 01:52:34,157 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 6, batch 100, loss[loss=0.248, simple_loss=0.2116, pruned_loss=0.09546, ctc_loss=0.1716, over 19066.00 frames. ], tot_loss[loss=0.2783, simple_loss=0.2364, pruned_loss=0.1088, ctc_loss=0.1905, over 1471849.14 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:53:08,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=104010.66666666667, ans=0.125 +2024-09-01 01:53:08,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=104010.66666666667, ans=0.125 +2024-09-01 01:53:17,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104010.66666666667, ans=0.1 +2024-09-01 01:53:21,709 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.47 vs. limit=15.0 +2024-09-01 01:53:34,203 INFO [dysarthria_finetune.py:1435] (1/4) (2053832704, 34072559616) +2024-09-01 01:53:34,204 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 01:53:34,286 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 01:53:47,074 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 7, batch 0, loss[loss=0.3082, simple_loss=0.2525, pruned_loss=0.132, ctc_loss=0.2269, over 18532.00 frames. ], tot_loss[loss=0.3082, simple_loss=0.2525, pruned_loss=0.132, ctc_loss=0.2269, over 18532.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:53:47,074 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 01:54:10,657 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 7, validation: loss=0.2464, simple_loss=0.2165, pruned_loss=0.09214, ctc_loss=0.1523, over 1073944.00 frames. +2024-09-01 01:54:10,658 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13101MB +2024-09-01 01:55:12,009 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.931e+02 2.149e+02 2.268e+02 2.457e+02 3.821e+02, threshold=4.535e+02, percent-clipped=0.0 +2024-09-01 01:55:29,927 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=18.83 vs. limit=15.0 +2024-09-01 01:55:33,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=104320.0, ans=0.125 +2024-09-01 01:55:53,893 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 7, batch 50, loss[loss=0.265, simple_loss=0.2272, pruned_loss=0.1011, ctc_loss=0.185, over 19096.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.2309, pruned_loss=0.1032, ctc_loss=0.188, over 827950.42 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:56:10,785 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:56:30,513 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.51 vs. limit=15.0 +2024-09-01 01:56:42,312 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=7.31 vs. limit=15.0 +2024-09-01 01:56:44,255 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.22 vs. limit=15.0 +2024-09-01 01:57:06,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=104533.33333333333, ans=0.125 +2024-09-01 01:57:08,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=104533.33333333333, ans=0.125 +2024-09-01 01:57:30,842 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 7, batch 100, loss[loss=0.2526, simple_loss=0.2212, pruned_loss=0.09285, ctc_loss=0.1694, over 19105.00 frames. ], tot_loss[loss=0.2648, simple_loss=0.2277, pruned_loss=0.1005, ctc_loss=0.1834, over 1472811.51 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:57:43,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=104640.0, ans=0.2 +2024-09-01 01:57:51,435 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=13.90 vs. limit=12.0 +2024-09-01 01:58:22,477 INFO [dysarthria_finetune.py:1435] (1/4) (12868845568, 34072559616) +2024-09-01 01:58:22,477 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 01:58:22,508 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 01:58:35,287 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 8, batch 0, loss[loss=0.2397, simple_loss=0.2111, pruned_loss=0.08677, ctc_loss=0.1616, over 18679.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.2111, pruned_loss=0.08677, ctc_loss=0.1616, over 18679.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:58:35,288 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 01:58:58,374 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 8, validation: loss=0.236, simple_loss=0.2103, pruned_loss=0.08624, ctc_loss=0.1474, over 1073944.00 frames. +2024-09-01 01:58:58,374 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13101MB +2024-09-01 01:59:02,417 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.906e+02 2.080e+02 2.182e+02 2.331e+02 3.634e+02, threshold=4.365e+02, percent-clipped=0.0 +2024-09-01 01:59:23,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=104842.66666666667, ans=0.125 +2024-09-01 02:00:36,450 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 8, batch 50, loss[loss=0.2318, simple_loss=0.2062, pruned_loss=0.08328, ctc_loss=0.1528, over 19009.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.2247, pruned_loss=0.09848, ctc_loss=0.1839, over 829068.39 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 02:01:05,473 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=12.00 vs. limit=12.0 +2024-09-01 02:01:20,992 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=7.23 vs. limit=12.0 +2024-09-01 02:01:26,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=105162.66666666667, ans=0.0 +2024-09-01 02:02:01,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=105269.33333333333, ans=0.09899494936611666 +2024-09-01 02:02:09,093 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.55 vs. limit=10.0 +2024-09-01 02:02:13,404 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 8, batch 100, loss[loss=0.2534, simple_loss=0.2206, pruned_loss=0.09383, ctc_loss=0.182, over 19109.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.2224, pruned_loss=0.09558, ctc_loss=0.1793, over 1473116.98 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 02:02:17,363 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.888e+02 2.064e+02 2.191e+02 2.358e+02 3.385e+02, threshold=4.381e+02, percent-clipped=0.0 +2024-09-01 02:02:29,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=105322.66666666667, ans=0.125 +2024-09-01 02:02:41,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=105376.0, ans=0.0 +2024-09-01 02:02:57,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=105429.33333333333, ans=0.0 +2024-09-01 02:03:07,409 INFO [dysarthria_finetune.py:1435] (1/4) (774569984, 34072559616) +2024-09-01 02:03:07,410 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:03:07,476 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 02:03:20,944 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 9, batch 0, loss[loss=0.2506, simple_loss=0.2184, pruned_loss=0.09615, ctc_loss=0.1711, over 18520.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.2184, pruned_loss=0.09615, ctc_loss=0.1711, over 18520.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:03:20,944 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:03:44,108 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 9, validation: loss=0.2267, simple_loss=0.2052, pruned_loss=0.08107, ctc_loss=0.1434, over 1073944.00 frames. +2024-09-01 02:03:44,109 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13101MB +2024-09-01 02:03:52,827 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.99 vs. limit=22.5 +2024-09-01 02:04:05,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.09 vs. limit=6.0 +2024-09-01 02:04:46,182 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.74 vs. limit=15.0 +2024-09-01 02:04:48,550 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.77 vs. limit=6.0 +2024-09-01 02:04:50,700 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.61 vs. limit=10.0 +2024-09-01 02:04:52,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=105637.33333333333, ans=0.125 +2024-09-01 02:05:06,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=105637.33333333333, ans=0.05 +2024-09-01 02:05:12,658 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:05:29,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=105690.66666666667, ans=0.2 +2024-09-01 02:05:38,307 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 9, batch 50, loss[loss=0.2754, simple_loss=0.2433, pruned_loss=0.1011, ctc_loss=0.1925, over 19008.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.2167, pruned_loss=0.09097, ctc_loss=0.1767, over 827563.28 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:05:54,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=105744.0, ans=0.025 +2024-09-01 02:06:08,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=105797.33333333333, ans=0.07 +2024-09-01 02:06:22,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105797.33333333333, ans=0.1 +2024-09-01 02:06:31,765 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.876e+02 2.077e+02 2.184e+02 2.316e+02 3.584e+02, threshold=4.367e+02, percent-clipped=0.0 +2024-09-01 02:07:32,671 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 9, batch 100, loss[loss=0.246, simple_loss=0.2213, pruned_loss=0.08692, ctc_loss=0.1715, over 19113.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.2162, pruned_loss=0.08993, ctc_loss=0.1746, over 1473118.75 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:07:47,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106010.66666666667, ans=0.125 +2024-09-01 02:07:54,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=106010.66666666667, ans=0.125 +2024-09-01 02:08:09,213 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=14.39 vs. limit=15.0 +2024-09-01 02:08:34,530 INFO [dysarthria_finetune.py:1435] (1/4) (2156593152, 34072559616) +2024-09-01 02:08:34,611 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:08:34,680 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 02:08:44,810 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.05 vs. limit=10.0 +2024-09-01 02:08:48,132 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 10, batch 0, loss[loss=0.2489, simple_loss=0.2221, pruned_loss=0.09378, ctc_loss=0.1645, over 18522.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.2221, pruned_loss=0.09378, ctc_loss=0.1645, over 18522.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:08:48,132 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:09:26,929 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 10, validation: loss=0.2182, simple_loss=0.2007, pruned_loss=0.07671, ctc_loss=0.1399, over 1073944.00 frames. +2024-09-01 02:09:26,929 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13101MB +2024-09-01 02:09:55,279 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.33 vs. limit=22.5 +2024-09-01 02:10:02,224 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.78 vs. limit=22.5 +2024-09-01 02:11:12,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=106325.33333333333, ans=0.125 +2024-09-01 02:11:29,302 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:11:54,676 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.856e+02 2.023e+02 2.117e+02 2.323e+02 3.505e+02, threshold=4.234e+02, percent-clipped=0.0 +2024-09-01 02:12:15,828 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:12:15,984 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.21 vs. limit=22.5 +2024-09-01 02:12:16,364 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 10, batch 50, loss[loss=0.2192, simple_loss=0.2044, pruned_loss=0.07052, ctc_loss=0.1557, over 18973.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.2136, pruned_loss=0.08742, ctc_loss=0.1744, over 826863.11 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:12:24,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=106432.0, ans=0.1 +2024-09-01 02:13:00,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=106485.33333333333, ans=0.025 +2024-09-01 02:13:07,894 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:13:32,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=106538.66666666667, ans=0.125 +2024-09-01 02:14:12,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=106592.0, ans=0.05 +2024-09-01 02:14:19,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=106592.0, ans=0.025 +2024-09-01 02:14:19,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.81 vs. limit=22.5 +2024-09-01 02:15:12,144 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 10, batch 100, loss[loss=0.2319, simple_loss=0.2132, pruned_loss=0.07686, ctc_loss=0.1723, over 19188.00 frames. ], tot_loss[loss=0.235, simple_loss=0.2103, pruned_loss=0.08446, ctc_loss=0.1695, over 1472464.39 frames. ], batch size: 134, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:15:39,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=106698.66666666667, ans=0.0 +2024-09-01 02:16:23,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106805.33333333333, ans=0.125 +2024-09-01 02:16:29,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=106805.33333333333, ans=0.125 +2024-09-01 02:16:36,663 INFO [dysarthria_finetune.py:1435] (1/4) (14664007680, 34072559616) +2024-09-01 02:16:36,664 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:16:36,698 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 02:16:49,174 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 11, batch 0, loss[loss=0.2333, simple_loss=0.2045, pruned_loss=0.08879, ctc_loss=0.1714, over 18704.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.2045, pruned_loss=0.08879, ctc_loss=0.1714, over 18704.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:16:49,174 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:17:12,841 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 11, validation: loss=0.211, simple_loss=0.1968, pruned_loss=0.07375, ctc_loss=0.137, over 1073944.00 frames. +2024-09-01 02:17:12,841 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13101MB +2024-09-01 02:17:43,504 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.769e+02 1.989e+02 2.082e+02 2.188e+02 3.029e+02, threshold=4.165e+02, percent-clipped=0.0 +2024-09-01 02:17:44,724 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106906.66666666667, ans=0.125 +2024-09-01 02:17:54,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=106960.0, ans=0.025 +2024-09-01 02:17:56,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106960.0, ans=0.1 +2024-09-01 02:18:14,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=107013.33333333333, ans=0.2 +2024-09-01 02:18:36,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=107066.66666666667, ans=0.125 +2024-09-01 02:18:38,606 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=10.73 vs. limit=12.0 +2024-09-01 02:18:40,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=107066.66666666667, ans=0.0 +2024-09-01 02:18:46,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=107066.66666666667, ans=0.125 +2024-09-01 02:18:53,624 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 11, batch 50, loss[loss=0.2338, simple_loss=0.2109, pruned_loss=0.08345, ctc_loss=0.174, over 18947.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.2077, pruned_loss=0.08247, ctc_loss=0.1691, over 828704.78 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:18:56,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=107120.0, ans=0.125 +2024-09-01 02:19:01,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107120.0, ans=0.1 +2024-09-01 02:19:13,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=107173.33333333333, ans=0.125 +2024-09-01 02:20:13,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=107280.0, ans=0.05 +2024-09-01 02:20:17,830 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.05 vs. limit=22.5 +2024-09-01 02:20:21,550 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.38 vs. limit=22.5 +2024-09-01 02:20:35,707 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 11, batch 100, loss[loss=0.2198, simple_loss=0.1993, pruned_loss=0.07881, ctc_loss=0.1626, over 19147.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.2059, pruned_loss=0.08112, ctc_loss=0.1659, over 1473582.76 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:20:56,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=107440.0, ans=0.125 +2024-09-01 02:21:05,348 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.795e+02 1.934e+02 2.032e+02 2.152e+02 3.346e+02, threshold=4.063e+02, percent-clipped=0.0 +2024-09-01 02:21:16,515 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.51 vs. limit=15.0 +2024-09-01 02:21:30,099 INFO [dysarthria_finetune.py:1435] (1/4) (1246429184, 34072559616) +2024-09-01 02:21:30,100 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:21:30,165 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 02:21:45,316 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 12, batch 0, loss[loss=0.2606, simple_loss=0.224, pruned_loss=0.101, ctc_loss=0.2074, over 18735.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.224, pruned_loss=0.101, ctc_loss=0.2074, over 18735.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:21:45,317 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:22:12,229 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 12, validation: loss=0.2042, simple_loss=0.1932, pruned_loss=0.07127, ctc_loss=0.1341, over 1073944.00 frames. +2024-09-01 02:22:12,230 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13101MB +2024-09-01 02:22:35,407 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:22:35,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=107541.33333333333, ans=0.125 +2024-09-01 02:22:58,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=107594.66666666667, ans=0.125 +2024-09-01 02:23:00,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107594.66666666667, ans=0.1 +2024-09-01 02:23:27,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=107648.0, ans=0.125 +2024-09-01 02:23:33,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107701.33333333333, ans=0.1 +2024-09-01 02:24:02,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=107754.66666666667, ans=0.125 +2024-09-01 02:24:12,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=107754.66666666667, ans=0.125 +2024-09-01 02:24:27,768 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 12, batch 50, loss[loss=0.1979, simple_loss=0.1832, pruned_loss=0.06733, ctc_loss=0.1532, over 18974.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.204, pruned_loss=0.07999, ctc_loss=0.1667, over 827168.58 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:24:55,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=107861.33333333333, ans=0.125 +2024-09-01 02:24:57,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=107861.33333333333, ans=0.0 +2024-09-01 02:25:24,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=107914.66666666667, ans=0.5 +2024-09-01 02:25:47,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=107968.0, ans=0.125 +2024-09-01 02:25:55,569 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.770e+02 1.958e+02 2.051e+02 2.245e+02 3.047e+02, threshold=4.102e+02, percent-clipped=0.0 +2024-09-01 02:26:29,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=108074.66666666667, ans=0.04949747468305833 +2024-09-01 02:26:30,863 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 12, batch 100, loss[loss=0.1878, simple_loss=0.1852, pruned_loss=0.06015, ctc_loss=0.1288, over 19114.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.202, pruned_loss=0.07903, ctc_loss=0.1632, over 1473649.48 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:26:52,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=108128.0, ans=0.0 +2024-09-01 02:26:52,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=108128.0, ans=0.125 +2024-09-01 02:26:53,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=108128.0, ans=0.95 +2024-09-01 02:27:01,964 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.55 vs. limit=6.0 +2024-09-01 02:27:06,366 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=8.95 vs. limit=12.0 +2024-09-01 02:27:31,273 INFO [dysarthria_finetune.py:1435] (1/4) (2171273216, 34072559616) +2024-09-01 02:27:31,273 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:27:31,343 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 02:27:44,302 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 13, batch 0, loss[loss=0.2617, simple_loss=0.2277, pruned_loss=0.1046, ctc_loss=0.1939, over 18361.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.2277, pruned_loss=0.1046, ctc_loss=0.1939, over 18361.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:27:44,303 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:27:51,689 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([7.8112, 7.5258, 7.6056, 7.5671], device='cuda:1') +2024-09-01 02:28:07,304 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 13, validation: loss=0.1981, simple_loss=0.19, pruned_loss=0.06934, ctc_loss=0.1316, over 1073944.00 frames. +2024-09-01 02:28:07,305 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 19046MB +2024-09-01 02:28:21,697 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=14.87 vs. limit=15.0 +2024-09-01 02:28:33,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=108282.66666666667, ans=0.125 +2024-09-01 02:28:41,586 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.08 vs. limit=15.0 +2024-09-01 02:28:45,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=108282.66666666667, ans=0.04949747468305833 +2024-09-01 02:29:01,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108336.0, ans=0.1 +2024-09-01 02:29:24,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108389.33333333333, ans=0.1 +2024-09-01 02:29:47,490 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 13, batch 50, loss[loss=0.1821, simple_loss=0.1872, pruned_loss=0.05393, ctc_loss=0.127, over 19011.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.1961, pruned_loss=0.07581, ctc_loss=0.1608, over 828396.70 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:29:48,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=108496.0, ans=0.125 +2024-09-01 02:30:01,818 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.749e+02 1.921e+02 2.017e+02 2.151e+02 2.785e+02, threshold=4.034e+02, percent-clipped=0.0 +2024-09-01 02:30:40,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=108602.66666666667, ans=0.125 +2024-09-01 02:30:44,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=108602.66666666667, ans=0.125 +2024-09-01 02:30:46,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=108656.0, ans=0.125 +2024-09-01 02:30:46,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=108656.0, ans=0.125 +2024-09-01 02:30:55,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=108656.0, ans=0.125 +2024-09-01 02:31:01,591 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=9.91 vs. limit=15.0 +2024-09-01 02:31:07,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=108709.33333333333, ans=0.125 +2024-09-01 02:31:07,794 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.55 vs. limit=15.0 +2024-09-01 02:31:24,904 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:31:25,776 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 13, batch 100, loss[loss=0.1943, simple_loss=0.1869, pruned_loss=0.06741, ctc_loss=0.1383, over 19217.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.1958, pruned_loss=0.07534, ctc_loss=0.159, over 1472353.64 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:31:34,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=108762.66666666667, ans=0.125 +2024-09-01 02:31:34,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=108762.66666666667, ans=0.2 +2024-09-01 02:31:48,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=108816.0, ans=0.025 +2024-09-01 02:31:52,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=108816.0, ans=0.2 +2024-09-01 02:31:53,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=108816.0, ans=0.0 +2024-09-01 02:31:57,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=108816.0, ans=0.125 +2024-09-01 02:32:07,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=108869.33333333333, ans=0.2 +2024-09-01 02:32:19,306 INFO [dysarthria_finetune.py:1435] (1/4) (11878989824, 34072559616) +2024-09-01 02:32:19,307 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:32:19,325 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 02:33:00,704 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 14, batch 0, loss[loss=0.247, simple_loss=0.2149, pruned_loss=0.09772, ctc_loss=0.1934, over 18619.00 frames. ], tot_loss[loss=0.247, simple_loss=0.2149, pruned_loss=0.09772, ctc_loss=0.1934, over 18619.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:33:00,705 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:33:44,927 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 14, validation: loss=0.1924, simple_loss=0.1871, pruned_loss=0.06768, ctc_loss=0.1293, over 1073944.00 frames. +2024-09-01 02:33:44,927 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 19046MB +2024-09-01 02:34:07,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108917.33333333333, ans=0.1 +2024-09-01 02:34:20,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=108970.66666666667, ans=0.125 +2024-09-01 02:34:28,974 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.41 vs. limit=22.5 +2024-09-01 02:35:04,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109024.0, ans=0.1 +2024-09-01 02:35:09,749 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.754e+02 1.893e+02 1.977e+02 2.192e+02 2.916e+02, threshold=3.954e+02, percent-clipped=0.0 +2024-09-01 02:35:30,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=109077.33333333333, ans=0.125 +2024-09-01 02:35:39,267 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=28.19 vs. limit=22.5 +2024-09-01 02:36:00,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=109130.66666666667, ans=0.125 +2024-09-01 02:36:11,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=109184.0, ans=0.125 +2024-09-01 02:36:12,041 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 14, batch 50, loss[loss=0.2304, simple_loss=0.2158, pruned_loss=0.08233, ctc_loss=0.1772, over 19004.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.1941, pruned_loss=0.07349, ctc_loss=0.157, over 826629.84 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:36:13,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109184.0, ans=0.1 +2024-09-01 02:37:17,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109237.33333333333, ans=0.1 +2024-09-01 02:38:19,766 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.93 vs. limit=22.5 +2024-09-01 02:38:42,949 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 14, batch 100, loss[loss=0.1921, simple_loss=0.1883, pruned_loss=0.06618, ctc_loss=0.1392, over 19114.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.1935, pruned_loss=0.07345, ctc_loss=0.1556, over 1472155.55 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:39:21,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=109504.0, ans=0.0 +2024-09-01 02:39:29,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=109557.33333333333, ans=0.05 +2024-09-01 02:39:50,922 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.731e+02 1.903e+02 1.972e+02 2.079e+02 2.713e+02, threshold=3.943e+02, percent-clipped=0.0 +2024-09-01 02:39:50,971 INFO [dysarthria_finetune.py:1435] (1/4) (14632550400, 34072559616) +2024-09-01 02:39:50,972 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:39:51,011 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 02:40:03,818 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 15, batch 0, loss[loss=0.2244, simple_loss=0.1982, pruned_loss=0.088, ctc_loss=0.1765, over 18480.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.1982, pruned_loss=0.088, ctc_loss=0.1765, over 18480.00 frames. ], batch size: 65, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:40:03,818 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:40:34,886 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 15, validation: loss=0.1871, simple_loss=0.1844, pruned_loss=0.06629, ctc_loss=0.1271, over 1073944.00 frames. +2024-09-01 02:40:34,887 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 19046MB +2024-09-01 02:40:37,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=109605.33333333333, ans=0.125 +2024-09-01 02:40:51,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=109605.33333333333, ans=10.0 +2024-09-01 02:40:51,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.37 vs. limit=15.0 +2024-09-01 02:41:26,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=109658.66666666667, ans=0.125 +2024-09-01 02:41:26,246 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=11.33 vs. limit=12.0 +2024-09-01 02:41:56,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=109712.0, ans=0.0 +2024-09-01 02:43:06,229 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 15, batch 50, loss[loss=0.2169, simple_loss=0.2047, pruned_loss=0.07852, ctc_loss=0.1683, over 19020.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.1915, pruned_loss=0.07276, ctc_loss=0.1557, over 827766.05 frames. ], batch size: 102, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:44:42,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=110032.0, ans=0.2 +2024-09-01 02:44:46,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=110032.0, ans=0.125 +2024-09-01 02:45:16,035 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.73 vs. limit=22.5 +2024-09-01 02:45:26,968 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.707e+02 1.886e+02 2.042e+02 2.162e+02 2.644e+02, threshold=4.084e+02, percent-clipped=0.0 +2024-09-01 02:45:29,583 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 15, batch 100, loss[loss=0.1688, simple_loss=0.1642, pruned_loss=0.05712, ctc_loss=0.1404, over 19074.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.1913, pruned_loss=0.07179, ctc_loss=0.1523, over 1471681.17 frames. ], batch size: 133, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:45:34,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=110138.66666666667, ans=0.0 +2024-09-01 02:46:46,684 INFO [dysarthria_finetune.py:1435] (1/4) (1296760832, 34072559616) +2024-09-01 02:46:46,685 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:46:46,746 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 02:47:00,733 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 16, batch 0, loss[loss=0.2129, simple_loss=0.2038, pruned_loss=0.07732, ctc_loss=0.1628, over 18847.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2038, pruned_loss=0.07732, ctc_loss=0.1628, over 18847.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:47:00,733 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:47:23,696 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 16, validation: loss=0.182, simple_loss=0.1819, pruned_loss=0.06496, ctc_loss=0.1251, over 1073944.00 frames. +2024-09-01 02:47:23,697 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 19046MB +2024-09-01 02:47:35,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=110293.33333333333, ans=0.125 +2024-09-01 02:47:39,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=110293.33333333333, ans=0.0 +2024-09-01 02:47:41,811 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.59 vs. limit=15.0 +2024-09-01 02:47:55,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110346.66666666667, ans=0.1 +2024-09-01 02:48:45,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=110506.66666666667, ans=0.125 +2024-09-01 02:48:59,310 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:49:04,117 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 16, batch 50, loss[loss=0.1839, simple_loss=0.186, pruned_loss=0.06362, ctc_loss=0.1348, over 19018.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.1877, pruned_loss=0.06917, ctc_loss=0.1501, over 827868.27 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:49:23,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=110613.33333333333, ans=0.0 +2024-09-01 02:49:44,008 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.735e+02 1.879e+02 1.996e+02 2.191e+02 2.692e+02, threshold=3.992e+02, percent-clipped=0.0 +2024-09-01 02:49:45,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=110666.66666666667, ans=0.125 +2024-09-01 02:50:42,131 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 16, batch 100, loss[loss=0.185, simple_loss=0.1788, pruned_loss=0.06614, ctc_loss=0.147, over 19118.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.1886, pruned_loss=0.07002, ctc_loss=0.15, over 1473208.83 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:51:35,752 INFO [dysarthria_finetune.py:1435] (1/4) (348848128, 34072559616) +2024-09-01 02:51:35,753 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:51:35,836 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 02:51:50,595 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 17, batch 0, loss[loss=0.2335, simple_loss=0.2254, pruned_loss=0.08817, ctc_loss=0.1633, over 18527.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.2254, pruned_loss=0.08817, ctc_loss=0.1633, over 18527.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:51:50,595 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:51:54,529 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.4605, 3.1179, 3.0393, 3.0576], device='cuda:1') +2024-09-01 02:52:13,658 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 17, validation: loss=0.1784, simple_loss=0.1796, pruned_loss=0.06394, ctc_loss=0.1232, over 1073944.00 frames. +2024-09-01 02:52:13,658 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 19046MB +2024-09-01 02:52:50,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=111034.66666666667, ans=0.125 +2024-09-01 02:52:52,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=111034.66666666667, ans=0.0 +2024-09-01 02:52:54,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=111034.66666666667, ans=0.0 +2024-09-01 02:53:16,771 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.88 vs. limit=15.0 +2024-09-01 02:53:59,384 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.683e+02 1.870e+02 1.982e+02 2.091e+02 2.808e+02, threshold=3.964e+02, percent-clipped=0.0 +2024-09-01 02:54:24,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=111194.66666666667, ans=0.125 +2024-09-01 02:54:35,607 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 17, batch 50, loss[loss=0.1864, simple_loss=0.1836, pruned_loss=0.06437, ctc_loss=0.151, over 19037.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.1864, pruned_loss=0.06869, ctc_loss=0.1481, over 827680.99 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:55:16,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=111248.0, ans=0.05 +2024-09-01 02:56:18,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111354.66666666667, ans=0.1 +2024-09-01 02:56:22,819 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=14.71 vs. limit=15.0 +2024-09-01 02:56:44,129 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.75 vs. limit=22.5 +2024-09-01 02:56:55,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.74 vs. limit=15.0 +2024-09-01 02:56:55,537 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.46 vs. limit=15.0 +2024-09-01 02:57:16,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=111408.0, ans=0.125 +2024-09-01 02:57:16,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111408.0, ans=0.1 +2024-09-01 02:57:27,168 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.94 vs. limit=15.0 +2024-09-01 02:57:34,903 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.63 vs. limit=15.0 +2024-09-01 02:58:16,886 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 17, batch 100, loss[loss=0.1508, simple_loss=0.1603, pruned_loss=0.04824, ctc_loss=0.1122, over 19067.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.1827, pruned_loss=0.06649, ctc_loss=0.1442, over 1472664.14 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:59:27,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=111568.0, ans=0.015 +2024-09-01 03:00:07,752 INFO [dysarthria_finetune.py:1435] (1/4) (847970304, 34072559616) +2024-09-01 03:00:07,753 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 03:00:07,831 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 03:00:21,397 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 18, batch 0, loss[loss=0.2181, simple_loss=0.1998, pruned_loss=0.08303, ctc_loss=0.1757, over 18622.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.1998, pruned_loss=0.08303, ctc_loss=0.1757, over 18622.00 frames. ], batch size: 65, lr: 9.93e-05, grad_scale: 32.0 +2024-09-01 03:00:21,397 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 03:01:08,463 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 18, validation: loss=0.1758, simple_loss=0.1773, pruned_loss=0.06291, ctc_loss=0.1213, over 1073944.00 frames. +2024-09-01 03:01:08,464 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 19046MB +2024-09-01 03:01:21,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=111669.33333333333, ans=0.07 +2024-09-01 03:02:04,462 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.676e+02 1.863e+02 1.965e+02 2.122e+02 2.833e+02, threshold=3.929e+02, percent-clipped=0.0 +2024-09-01 03:04:29,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=111776.0, ans=0.125 +2024-09-01 03:05:06,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=111829.33333333333, ans=0.035 +2024-09-01 03:05:55,207 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.06 vs. limit=15.0 +2024-09-01 03:05:55,550 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 18, batch 50, loss[loss=0.1765, simple_loss=0.1796, pruned_loss=0.06107, ctc_loss=0.1283, over 19026.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.1826, pruned_loss=0.06837, ctc_loss=0.1464, over 826500.31 frames. ], batch size: 102, lr: 9.93e-05, grad_scale: 16.0 +2024-09-01 03:07:09,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=111936.0, ans=0.2 +2024-09-01 03:07:41,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=111989.33333333333, ans=0.125 +2024-09-01 03:08:08,293 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:08:53,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=112042.66666666667, ans=0.125 +2024-09-01 03:09:58,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=112149.33333333333, ans=10.0 +2024-09-01 03:11:05,717 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 18, batch 100, loss[loss=0.2001, simple_loss=0.1876, pruned_loss=0.07348, ctc_loss=0.1644, over 19036.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.1812, pruned_loss=0.06738, ctc_loss=0.1441, over 1471672.61 frames. ], batch size: 133, lr: 9.93e-05, grad_scale: 16.0 +2024-09-01 03:11:51,849 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 1.826e+02 1.931e+02 2.035e+02 3.279e+02, threshold=3.861e+02, percent-clipped=0.0 +2024-09-01 03:12:06,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112256.0, ans=0.1 +2024-09-01 03:12:08,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=112309.33333333333, ans=0.125 +2024-09-01 03:12:26,102 INFO [dysarthria_finetune.py:1435] (1/4) (669712384, 34072559616) +2024-09-01 03:12:26,103 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 03:12:26,165 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 03:12:38,700 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 19, batch 0, loss[loss=0.2027, simple_loss=0.2018, pruned_loss=0.07314, ctc_loss=0.1432, over 18691.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2018, pruned_loss=0.07314, ctc_loss=0.1432, over 18691.00 frames. ], batch size: 65, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:12:38,700 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 03:13:02,318 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 19, validation: loss=0.1735, simple_loss=0.1751, pruned_loss=0.06201, ctc_loss=0.1194, over 1073944.00 frames. +2024-09-01 03:13:02,319 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 19046MB +2024-09-01 03:13:59,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=112458.66666666667, ans=0.125 +2024-09-01 03:14:03,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=112458.66666666667, ans=0.125 +2024-09-01 03:14:20,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=112512.0, ans=0.125 +2024-09-01 03:14:24,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=112512.0, ans=0.125 +2024-09-01 03:14:34,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=112565.33333333333, ans=0.125 +2024-09-01 03:14:44,023 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.47 vs. limit=10.0 +2024-09-01 03:14:48,752 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 19, batch 50, loss[loss=0.1885, simple_loss=0.1874, pruned_loss=0.06613, ctc_loss=0.1432, over 18976.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.1805, pruned_loss=0.0668, ctc_loss=0.1443, over 828010.25 frames. ], batch size: 102, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:15:08,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=112672.0, ans=0.125 +2024-09-01 03:15:10,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=112672.0, ans=0.2 +2024-09-01 03:15:14,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=112672.0, ans=0.0 +2024-09-01 03:15:16,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=112672.0, ans=0.0 +2024-09-01 03:15:16,849 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.21 vs. limit=22.5 +2024-09-01 03:15:40,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=112725.33333333333, ans=0.125 +2024-09-01 03:15:44,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=112725.33333333333, ans=0.0 +2024-09-01 03:15:48,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=112778.66666666667, ans=0.125 +2024-09-01 03:15:58,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=112778.66666666667, ans=0.125 +2024-09-01 03:16:00,817 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.699e+02 1.859e+02 1.957e+02 2.051e+02 3.574e+02, threshold=3.914e+02, percent-clipped=0.0 +2024-09-01 03:16:06,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=112778.66666666667, ans=0.1 +2024-09-01 03:16:28,743 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 19, batch 100, loss[loss=0.1562, simple_loss=0.157, pruned_loss=0.05291, ctc_loss=0.1238, over 19118.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.1795, pruned_loss=0.06506, ctc_loss=0.1402, over 1474453.83 frames. ], batch size: 133, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:16:35,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112885.33333333333, ans=0.1 +2024-09-01 03:16:37,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=112885.33333333333, ans=0.0 +2024-09-01 03:16:45,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=112885.33333333333, ans=0.2 +2024-09-01 03:17:04,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=112938.66666666667, ans=0.0 +2024-09-01 03:17:18,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=112992.0, ans=0.125 +2024-09-01 03:17:20,795 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.06 vs. limit=15.0 +2024-09-01 03:17:22,980 INFO [dysarthria_finetune.py:1435] (1/4) (116064256, 34072559616) +2024-09-01 03:17:22,980 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 03:17:23,052 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 03:17:37,120 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 20, batch 0, loss[loss=0.2349, simple_loss=0.2173, pruned_loss=0.09138, ctc_loss=0.1742, over 18758.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.2173, pruned_loss=0.09138, ctc_loss=0.1742, over 18758.00 frames. ], batch size: 65, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:17:37,120 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 03:18:00,790 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 20, validation: loss=0.1713, simple_loss=0.1732, pruned_loss=0.06117, ctc_loss=0.1175, over 1073944.00 frames. +2024-09-01 03:18:00,790 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 19046MB +2024-09-01 03:18:18,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=113040.0, ans=0.125 +2024-09-01 03:18:20,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=113093.33333333333, ans=0.0 +2024-09-01 03:18:34,098 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.20 vs. limit=15.0 +2024-09-01 03:18:49,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=113146.66666666667, ans=0.1 +2024-09-01 03:19:03,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=113200.0, ans=0.04949747468305833 +2024-09-01 03:19:07,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=113200.0, ans=0.125 +2024-09-01 03:19:34,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=113253.33333333333, ans=0.1 +2024-09-01 03:19:34,534 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.20 vs. limit=15.0 +2024-09-01 03:19:39,001 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 20, batch 50, loss[loss=0.1893, simple_loss=0.1804, pruned_loss=0.06867, ctc_loss=0.1518, over 19069.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.1771, pruned_loss=0.06551, ctc_loss=0.1411, over 828644.17 frames. ], batch size: 102, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:19:52,481 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.624e+02 1.810e+02 1.894e+02 2.049e+02 3.111e+02, threshold=3.788e+02, percent-clipped=0.0 +2024-09-01 03:19:57,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=113360.0, ans=0.0 +2024-09-01 03:20:17,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=113413.33333333333, ans=0.125 +2024-09-01 03:20:32,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=113413.33333333333, ans=0.125 +2024-09-01 03:20:59,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=113520.0, ans=0.125 +2024-09-01 03:21:09,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=113520.0, ans=0.125 +2024-09-01 03:21:15,880 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 20, batch 100, loss[loss=0.1508, simple_loss=0.1471, pruned_loss=0.05311, ctc_loss=0.1208, over 19104.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.1751, pruned_loss=0.06389, ctc_loss=0.1372, over 1473557.06 frames. ], batch size: 133, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:21:17,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=113573.33333333333, ans=0.07 +2024-09-01 03:21:49,315 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=9.47 vs. limit=12.0 +2024-09-01 03:22:06,347 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.56 vs. limit=22.5 +2024-09-01 03:22:08,743 INFO [dysarthria_finetune.py:1435] (1/4) (931856384, 34072559616) +2024-09-01 03:22:08,743 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 03:22:08,783 INFO [dysarthria_finetune.py:1440] (1/4) (29920788480, 34072559616) +2024-09-01 03:22:08,783 INFO [dysarthria_finetune.py:1442] (1/4) Done! diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-22-13-17-2 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-22-13-17-2 new file mode 100644 index 0000000000000000000000000000000000000000..432c923e39ce90bc8c00e8c8b8ddeacdb11a32fb --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-22-13-17-2 @@ -0,0 +1,529 @@ +2024-08-31 22:13:17,928 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-31 22:13:17,967 INFO [dysarthria_finetune.py:1214] (2/4) (33748090880, 34072559616) +2024-08-31 22:13:17,968 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-31 22:13:18,721 INFO [dysarthria_finetune.py:1219] (2/4) (33106362368, 34072559616) +2024-08-31 22:13:18,721 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-31 22:13:18,725 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 22:13:18,725 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-31 22:13:21,221 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 65805511 +2024-08-31 22:13:21,924 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-31 22:13:33,287 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-31 22:14:37,196 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts +2024-08-31 22:14:37,263 INFO [dysarthria_finetune.py:1319] (2/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 22:14:37,991 INFO [dysarthria_asr_datamodule.py:239] (2/4) Disable MUSAN +2024-08-31 22:14:37,991 INFO [dysarthria_asr_datamodule.py:257] (2/4) Enable SpecAugment +2024-08-31 22:14:37,991 INFO [dysarthria_asr_datamodule.py:258] (2/4) Time warp factor: 80 +2024-08-31 22:14:37,991 INFO [dysarthria_asr_datamodule.py:268] (2/4) Num frame mask: 10 +2024-08-31 22:14:37,992 INFO [dysarthria_asr_datamodule.py:281] (2/4) About to create train dataset +2024-08-31 22:14:55,323 INFO [dysarthria_asr_datamodule.py:308] (2/4) Using DynamicBucketingSampler. +2024-08-31 22:14:56,266 INFO [dysarthria_asr_datamodule.py:325] (2/4) About to create train dataloader +2024-08-31 22:14:56,272 INFO [dysarthria_asr_datamodule.py:501] (2/4) About to get dev cuts +2024-08-31 22:14:56,477 INFO [dysarthria_asr_datamodule.py:356] (2/4) About to create dev dataset +2024-08-31 22:14:57,473 INFO [dysarthria_asr_datamodule.py:373] (2/4) About to create dev dataloader +2024-08-31 22:14:57,473 INFO [dysarthria_finetune.py:1490] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 22:16:23,769 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=512, metric=17.72 vs. limit=7.5 +2024-08-31 22:16:31,019 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=21.83 vs. limit=7.5 +2024-08-31 22:16:34,164 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11838MB +2024-08-31 22:16:36,228 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11838MB +2024-08-31 22:17:53,346 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11838MB +2024-08-31 22:17:55,317 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11838MB +2024-08-31 22:19:46,278 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11838MB +2024-08-31 22:19:48,456 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11838MB +2024-08-31 22:20:26,655 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 0, loss[loss=0.4003, simple_loss=0.3243, pruned_loss=0.1928, ctc_loss=0.2836, over 18533.00 frames. ], tot_loss[loss=0.4003, simple_loss=0.3243, pruned_loss=0.1928, ctc_loss=0.2836, over 18533.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-31 22:20:26,655 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-31 22:32:57,021 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 1, validation: loss=0.373, simple_loss=0.3046, pruned_loss=0.1755, ctc_loss=0.2544, over 1073944.00 frames. +2024-08-31 22:32:57,064 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19759MB +2024-08-31 22:34:50,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100000.0, ans=0.1 +2024-08-31 22:36:08,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=100000.0, ans=0.125 +2024-08-31 22:46:56,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=100000.0, ans=0.125 +2024-08-31 22:49:21,745 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=21.10 vs. limit=15.0 +2024-08-31 22:51:12,341 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.81 vs. limit=6.0 +2024-08-31 22:51:26,994 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.923e+02 1.157e+03 1.203e+03 1.280e+03 1.380e+03, threshold=4.812e+03, percent-clipped=0.0 +2024-08-31 23:03:06,380 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.113e+02 1.083e+03 1.198e+03 1.280e+03 1.431e+03, threshold=4.794e+03, percent-clipped=0.0 +2024-08-31 23:06:50,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.66 vs. limit=6.0 +2024-08-31 23:21:30,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=100160.0, ans=0.0 +2024-08-31 23:21:30,576 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.03 vs. limit=6.0 +2024-08-31 23:29:30,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=100213.33333333333, ans=0.125 +2024-08-31 23:29:32,326 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.285e+02 9.052e+02 1.061e+03 1.198e+03 1.431e+03, threshold=4.243e+03, percent-clipped=0.0 +2024-08-31 23:49:33,570 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 50, loss[loss=0.4246, simple_loss=0.3403, pruned_loss=0.2104, ctc_loss=0.3139, over 19018.00 frames. ], tot_loss[loss=0.4107, simple_loss=0.3319, pruned_loss=0.1996, ctc_loss=0.2934, over 827419.58 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-31 23:52:53,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=100266.66666666667, ans=0.2 +2024-08-31 23:54:11,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=100266.66666666667, ans=0.2 +2024-08-31 23:56:26,528 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=31.02 vs. limit=22.5 +2024-09-01 00:02:23,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=100320.0, ans=0.09899494936611666 +2024-09-01 00:02:24,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=100320.0, ans=15.0 +2024-09-01 00:05:55,443 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=21.55 vs. limit=15.0 +2024-09-01 00:07:03,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=100373.33333333333, ans=0.04949747468305833 +2024-09-01 00:15:34,743 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=23.52 vs. limit=15.0 +2024-09-01 00:18:31,035 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.958e+02 6.817e+02 8.321e+02 1.009e+03 1.431e+03, threshold=1.664e+03, percent-clipped=0.0 +2024-09-01 00:18:31,069 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 100, loss[loss=0.3746, simple_loss=0.3051, pruned_loss=0.1784, ctc_loss=0.2558, over 19117.00 frames. ], tot_loss[loss=0.3952, simple_loss=0.32, pruned_loss=0.1891, ctc_loss=0.2805, over 1475925.13 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-09-01 00:22:55,039 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.59 vs. limit=15.0 +2024-09-01 00:28:35,454 INFO [dysarthria_finetune.py:1435] (2/4) (3714777088, 34072559616) +2024-09-01 00:28:35,454 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 00:28:35,511 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 00:29:13,526 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 2, batch 0, loss[loss=0.3335, simple_loss=0.2754, pruned_loss=0.1462, ctc_loss=0.2192, over 18502.00 frames. ], tot_loss[loss=0.3335, simple_loss=0.2754, pruned_loss=0.1462, ctc_loss=0.2192, over 18502.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-09-01 00:29:13,527 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 00:34:07,562 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 2, validation: loss=0.3353, simple_loss=0.2773, pruned_loss=0.1482, ctc_loss=0.2175, over 1073944.00 frames. +2024-09-01 00:34:07,563 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 00:50:27,298 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.57 vs. limit=15.0 +2024-09-01 00:50:35,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-09-01 00:51:26,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-09-01 00:53:03,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=100789.33333333333, ans=0.125 +2024-09-01 00:53:03,888 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=10.02 vs. limit=12.0 +2024-09-01 00:53:15,153 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=44.13 vs. limit=22.5 +2024-09-01 00:55:14,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=100842.66666666667, ans=0.07 +2024-09-01 00:57:09,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=100896.0, ans=0.125 +2024-09-01 00:59:42,888 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 2, batch 50, loss[loss=0.3746, simple_loss=0.3053, pruned_loss=0.1691, ctc_loss=0.2621, over 18952.00 frames. ], tot_loss[loss=0.3734, simple_loss=0.3039, pruned_loss=0.1721, ctc_loss=0.2606, over 829638.79 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 8.0 +2024-09-01 01:00:18,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=100949.33333333333, ans=0.0 +2024-09-01 01:04:18,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101002.66666666667, ans=0.1 +2024-09-01 01:08:04,417 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.183e+02 4.403e+02 5.126e+02 5.917e+02 6.888e+02, threshold=1.025e+03, percent-clipped=0.0 +2024-09-01 01:10:07,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=101109.33333333333, ans=0.2 +2024-09-01 01:10:54,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=101109.33333333333, ans=0.2 +2024-09-01 01:11:03,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=101162.66666666667, ans=0.0 +2024-09-01 01:11:13,214 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.52 vs. limit=15.0 +2024-09-01 01:14:13,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=101216.0, ans=0.0 +2024-09-01 01:14:19,319 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 2, batch 100, loss[loss=0.3146, simple_loss=0.2641, pruned_loss=0.1333, ctc_loss=0.1927, over 19108.00 frames. ], tot_loss[loss=0.3602, simple_loss=0.2943, pruned_loss=0.163, ctc_loss=0.2483, over 1476292.15 frames. ], batch size: 133, lr: 7.29e-05, grad_scale: 8.0 +2024-09-01 01:19:35,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=101322.66666666667, ans=0.0 +2024-09-01 01:20:51,904 INFO [dysarthria_finetune.py:1435] (2/4) (10733944832, 34072559616) +2024-09-01 01:20:51,905 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 01:20:51,946 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 01:21:21,469 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 3, batch 0, loss[loss=0.3605, simple_loss=0.2929, pruned_loss=0.1647, ctc_loss=0.2546, over 18600.00 frames. ], tot_loss[loss=0.3605, simple_loss=0.2929, pruned_loss=0.1647, ctc_loss=0.2546, over 18600.00 frames. ], batch size: 65, lr: 7.58e-05, grad_scale: 16.0 +2024-09-01 01:21:21,470 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 01:21:44,700 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 3, validation: loss=0.309, simple_loss=0.2588, pruned_loss=0.13, ctc_loss=0.1938, over 1073944.00 frames. +2024-09-01 01:21:44,701 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 01:22:17,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=101370.66666666667, ans=0.2 +2024-09-01 01:22:17,647 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=22.27 vs. limit=15.0 +2024-09-01 01:22:56,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101424.0, ans=0.1 +2024-09-01 01:23:07,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=101424.0, ans=0.125 +2024-09-01 01:23:29,757 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.96 vs. limit=15.0 +2024-09-01 01:23:32,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=101477.33333333333, ans=0.0 +2024-09-01 01:24:06,866 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.73 vs. limit=22.5 +2024-09-01 01:24:27,005 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.669e+02 3.351e+02 3.834e+02 4.204e+02 5.264e+02, threshold=7.667e+02, percent-clipped=0.0 +2024-09-01 01:24:45,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=101584.0, ans=0.125 +2024-09-01 01:24:47,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=101637.33333333333, ans=0.1 +2024-09-01 01:24:49,026 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 3, batch 50, loss[loss=0.3317, simple_loss=0.2721, pruned_loss=0.1444, ctc_loss=0.2304, over 19168.00 frames. ], tot_loss[loss=0.3478, simple_loss=0.2855, pruned_loss=0.1526, ctc_loss=0.2393, over 828229.52 frames. ], batch size: 103, lr: 8.08e-05, grad_scale: 16.0 +2024-09-01 01:24:50,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=101637.33333333333, ans=0.2 +2024-09-01 01:26:25,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=101797.33333333333, ans=0.5 +2024-09-01 01:26:34,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=101797.33333333333, ans=0.0 +2024-09-01 01:26:45,165 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=19.66 vs. limit=15.0 +2024-09-01 01:26:46,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=101850.66666666667, ans=0.125 +2024-09-01 01:27:00,717 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 3, batch 100, loss[loss=0.2821, simple_loss=0.2376, pruned_loss=0.1111, ctc_loss=0.1838, over 19024.00 frames. ], tot_loss[loss=0.3379, simple_loss=0.2782, pruned_loss=0.1466, ctc_loss=0.231, over 1476045.82 frames. ], batch size: 133, lr: 8.58e-05, grad_scale: 16.0 +2024-09-01 01:27:30,545 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=18.88 vs. limit=15.0 +2024-09-01 01:27:35,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=101957.33333333333, ans=0.125 +2024-09-01 01:29:32,085 INFO [dysarthria_finetune.py:1435] (2/4) (10729750528, 34072559616) +2024-09-01 01:29:32,086 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 01:29:32,121 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 01:29:45,318 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 4, batch 0, loss[loss=0.317, simple_loss=0.2654, pruned_loss=0.1272, ctc_loss=0.2122, over 18618.00 frames. ], tot_loss[loss=0.317, simple_loss=0.2654, pruned_loss=0.1272, ctc_loss=0.2122, over 18618.00 frames. ], batch size: 65, lr: 8.86e-05, grad_scale: 32.0 +2024-09-01 01:29:45,318 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 01:30:08,496 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 4, validation: loss=0.2887, simple_loss=0.2447, pruned_loss=0.1169, ctc_loss=0.1781, over 1073944.00 frames. +2024-09-01 01:30:08,496 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 01:30:42,263 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.378e+02 2.838e+02 3.147e+02 3.460e+02 5.318e+02, threshold=6.294e+02, percent-clipped=0.0 +2024-09-01 01:30:43,720 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=9.30 vs. limit=12.0 +2024-09-01 01:30:49,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=102106.66666666667, ans=0.125 +2024-09-01 01:30:56,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=102160.0, ans=0.125 +2024-09-01 01:31:03,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=102160.0, ans=0.0 +2024-09-01 01:31:12,827 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=32.34 vs. limit=15.0 +2024-09-01 01:31:18,964 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.64 vs. limit=22.5 +2024-09-01 01:32:02,190 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 4, batch 50, loss[loss=0.31, simple_loss=0.2574, pruned_loss=0.1262, ctc_loss=0.216, over 18961.00 frames. ], tot_loss[loss=0.32, simple_loss=0.2655, pruned_loss=0.1335, ctc_loss=0.2187, over 828488.26 frames. ], batch size: 102, lr: 9.36e-05, grad_scale: 32.0 +2024-09-01 01:32:09,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=102320.0, ans=0.125 +2024-09-01 01:32:19,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=102320.0, ans=0.125 +2024-09-01 01:32:25,366 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=36.92 vs. limit=22.5 +2024-09-01 01:32:33,248 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=13.04 vs. limit=12.0 +2024-09-01 01:32:35,216 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.57 vs. limit=15.0 +2024-09-01 01:32:40,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=102426.66666666667, ans=0.125 +2024-09-01 01:32:56,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=102426.66666666667, ans=0.0 +2024-09-01 01:35:50,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=102480.0, ans=0.0 +2024-09-01 01:35:57,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=102533.33333333333, ans=0.0 +2024-09-01 01:36:37,368 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.94 vs. limit=6.0 +2024-09-01 01:37:40,020 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 4, batch 100, loss[loss=0.292, simple_loss=0.2442, pruned_loss=0.1212, ctc_loss=0.1942, over 19090.00 frames. ], tot_loss[loss=0.3158, simple_loss=0.2624, pruned_loss=0.1317, ctc_loss=0.215, over 1476821.49 frames. ], batch size: 133, lr: 9.86e-05, grad_scale: 32.0 +2024-09-01 01:38:09,931 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.185e+02 2.526e+02 2.751e+02 3.040e+02 4.636e+02, threshold=5.501e+02, percent-clipped=0.0 +2024-09-01 01:38:19,386 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.16 vs. limit=15.0 +2024-09-01 01:38:30,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=102693.33333333333, ans=0.0 +2024-09-01 01:38:35,842 INFO [dysarthria_finetune.py:1435] (2/4) (10731847680, 34072559616) +2024-09-01 01:38:35,842 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 01:38:35,891 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 01:38:49,736 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 5, batch 0, loss[loss=0.299, simple_loss=0.2515, pruned_loss=0.1206, ctc_loss=0.1996, over 18551.00 frames. ], tot_loss[loss=0.299, simple_loss=0.2515, pruned_loss=0.1206, ctc_loss=0.1996, over 18551.00 frames. ], batch size: 65, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:38:49,736 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 01:39:30,977 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 5, validation: loss=0.2717, simple_loss=0.233, pruned_loss=0.1066, ctc_loss=0.1665, over 1073944.00 frames. +2024-09-01 01:39:30,978 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 01:40:54,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=102741.33333333333, ans=0.2 +2024-09-01 01:41:06,686 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:41:15,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=102794.66666666667, ans=0.0 +2024-09-01 01:41:23,677 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.45 vs. limit=15.0 +2024-09-01 01:42:50,160 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.43 vs. limit=15.0 +2024-09-01 01:43:07,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=102901.33333333333, ans=0.0 +2024-09-01 01:43:55,504 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 5, batch 50, loss[loss=0.2797, simple_loss=0.236, pruned_loss=0.1101, ctc_loss=0.19, over 19027.00 frames. ], tot_loss[loss=0.305, simple_loss=0.2551, pruned_loss=0.1237, ctc_loss=0.2097, over 828775.72 frames. ], batch size: 102, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:44:01,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=103008.0, ans=0.0 +2024-09-01 01:44:21,320 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=6.61 vs. limit=12.0 +2024-09-01 01:44:36,741 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.70 vs. limit=15.0 +2024-09-01 01:45:12,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=103114.66666666667, ans=0.125 +2024-09-01 01:45:29,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=103114.66666666667, ans=0.125 +2024-09-01 01:46:04,690 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.130e+02 2.382e+02 2.524e+02 2.770e+02 4.371e+02, threshold=5.047e+02, percent-clipped=0.0 +2024-09-01 01:46:35,325 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=26.90 vs. limit=22.5 +2024-09-01 01:46:51,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=103274.66666666667, ans=0.125 +2024-09-01 01:46:52,897 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 5, batch 100, loss[loss=0.2983, simple_loss=0.2483, pruned_loss=0.1244, ctc_loss=0.2067, over 19114.00 frames. ], tot_loss[loss=0.2957, simple_loss=0.248, pruned_loss=0.1196, ctc_loss=0.202, over 1478197.42 frames. ], batch size: 133, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:47:03,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=103274.66666666667, ans=0.0 +2024-09-01 01:48:01,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=103328.0, ans=0.2 +2024-09-01 01:48:07,757 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=18.59 vs. limit=15.0 +2024-09-01 01:48:10,622 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:48:25,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103381.33333333333, ans=0.1 +2024-09-01 01:48:26,051 INFO [dysarthria_finetune.py:1435] (2/4) (10733944832, 34072559616) +2024-09-01 01:48:26,052 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 01:48:26,090 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 01:48:41,997 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 6, batch 0, loss[loss=0.283, simple_loss=0.2417, pruned_loss=0.1094, ctc_loss=0.1881, over 18783.00 frames. ], tot_loss[loss=0.283, simple_loss=0.2417, pruned_loss=0.1094, ctc_loss=0.1881, over 18783.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:48:41,998 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 01:49:05,149 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 6, validation: loss=0.2578, simple_loss=0.2238, pruned_loss=0.09861, ctc_loss=0.1582, over 1073944.00 frames. +2024-09-01 01:49:05,149 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 01:49:45,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=103477.33333333333, ans=0.125 +2024-09-01 01:50:06,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103530.66666666667, ans=0.1 +2024-09-01 01:50:14,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103584.0, ans=0.1 +2024-09-01 01:50:24,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=103584.0, ans=0.0 +2024-09-01 01:50:43,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=103637.33333333333, ans=0.0 +2024-09-01 01:50:52,023 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 6, batch 50, loss[loss=0.287, simple_loss=0.2442, pruned_loss=0.1152, ctc_loss=0.1884, over 19006.00 frames. ], tot_loss[loss=0.2821, simple_loss=0.2381, pruned_loss=0.1113, ctc_loss=0.196, over 828020.78 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:50:55,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=103690.66666666667, ans=0.125 +2024-09-01 01:50:55,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=103690.66666666667, ans=0.125 +2024-09-01 01:51:03,920 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=21.79 vs. limit=15.0 +2024-09-01 01:51:07,999 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.083e+02 2.277e+02 2.375e+02 2.614e+02 3.891e+02, threshold=4.750e+02, percent-clipped=0.0 +2024-09-01 01:51:13,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=103744.0, ans=0.09899494936611666 +2024-09-01 01:51:34,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=103797.33333333333, ans=0.125 +2024-09-01 01:52:09,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=103850.66666666667, ans=0.0 +2024-09-01 01:52:34,153 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 6, batch 100, loss[loss=0.2988, simple_loss=0.2538, pruned_loss=0.1164, ctc_loss=0.208, over 19060.00 frames. ], tot_loss[loss=0.28, simple_loss=0.2372, pruned_loss=0.1103, ctc_loss=0.1926, over 1475525.13 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:52:49,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=103957.33333333333, ans=22.5 +2024-09-01 01:53:03,334 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.84 vs. limit=10.0 +2024-09-01 01:53:06,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=104010.66666666667, ans=0.125 +2024-09-01 01:53:07,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=104010.66666666667, ans=15.0 +2024-09-01 01:53:25,785 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=18.09 vs. limit=15.0 +2024-09-01 01:53:34,207 INFO [dysarthria_finetune.py:1435] (2/4) (10733944832, 34072559616) +2024-09-01 01:53:34,208 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 01:53:34,257 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 01:53:47,071 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 7, batch 0, loss[loss=0.2767, simple_loss=0.2399, pruned_loss=0.1067, ctc_loss=0.1772, over 18435.00 frames. ], tot_loss[loss=0.2767, simple_loss=0.2399, pruned_loss=0.1067, ctc_loss=0.1772, over 18435.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:53:47,072 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 01:54:10,665 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 7, validation: loss=0.2464, simple_loss=0.2165, pruned_loss=0.09214, ctc_loss=0.1523, over 1073944.00 frames. +2024-09-01 01:54:10,666 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 01:54:30,363 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.87 vs. limit=22.5 +2024-09-01 01:55:12,006 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.931e+02 2.149e+02 2.268e+02 2.457e+02 3.821e+02, threshold=4.535e+02, percent-clipped=0.0 +2024-09-01 01:55:33,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=104320.0, ans=0.125 +2024-09-01 01:55:53,623 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=27.70 vs. limit=22.5 +2024-09-01 01:55:53,889 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 7, batch 50, loss[loss=0.2528, simple_loss=0.2215, pruned_loss=0.08959, ctc_loss=0.1735, over 18970.00 frames. ], tot_loss[loss=0.2758, simple_loss=0.2352, pruned_loss=0.1067, ctc_loss=0.1922, over 828175.40 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:57:07,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104533.33333333333, ans=0.1 +2024-09-01 01:57:10,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=104586.66666666667, ans=0.2 +2024-09-01 01:57:30,837 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 7, batch 100, loss[loss=0.2526, simple_loss=0.2209, pruned_loss=0.09424, ctc_loss=0.1673, over 19065.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.2307, pruned_loss=0.1038, ctc_loss=0.1878, over 1476190.89 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:57:59,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=104693.33333333333, ans=0.125 +2024-09-01 01:58:12,657 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.81 vs. limit=15.0 +2024-09-01 01:58:22,454 INFO [dysarthria_finetune.py:1435] (2/4) (10733944832, 34072559616) +2024-09-01 01:58:22,454 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 01:58:22,506 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 01:58:35,293 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 8, batch 0, loss[loss=0.24, simple_loss=0.2117, pruned_loss=0.08632, ctc_loss=0.1619, over 18635.00 frames. ], tot_loss[loss=0.24, simple_loss=0.2117, pruned_loss=0.08632, ctc_loss=0.1619, over 18635.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:58:35,294 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 01:58:58,374 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 8, validation: loss=0.236, simple_loss=0.2103, pruned_loss=0.08624, ctc_loss=0.1474, over 1073944.00 frames. +2024-09-01 01:58:58,375 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 01:59:02,421 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.906e+02 2.080e+02 2.182e+02 2.331e+02 3.634e+02, threshold=4.365e+02, percent-clipped=0.0 +2024-09-01 01:59:15,888 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.92 vs. limit=15.0 +2024-09-01 02:00:10,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=104949.33333333333, ans=0.2 +2024-09-01 02:00:22,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=105002.66666666667, ans=0.125 +2024-09-01 02:00:24,403 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.59 vs. limit=10.0 +2024-09-01 02:00:36,423 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 8, batch 50, loss[loss=0.2406, simple_loss=0.2204, pruned_loss=0.0796, ctc_loss=0.153, over 19000.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.2251, pruned_loss=0.09806, ctc_loss=0.1853, over 827531.12 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 02:01:03,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=105109.33333333333, ans=0.025 +2024-09-01 02:01:07,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105109.33333333333, ans=0.1 +2024-09-01 02:01:27,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=105162.66666666667, ans=15.0 +2024-09-01 02:01:40,404 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=9.90 vs. limit=12.0 +2024-09-01 02:02:13,400 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 8, batch 100, loss[loss=0.2446, simple_loss=0.2157, pruned_loss=0.08849, ctc_loss=0.1706, over 19093.00 frames. ], tot_loss[loss=0.2531, simple_loss=0.2202, pruned_loss=0.09397, ctc_loss=0.1786, over 1475468.79 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 02:02:17,363 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.888e+02 2.064e+02 2.191e+02 2.358e+02 3.385e+02, threshold=4.381e+02, percent-clipped=0.0 +2024-09-01 02:02:30,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=105322.66666666667, ans=0.05 +2024-09-01 02:03:07,391 INFO [dysarthria_finetune.py:1435] (2/4) (10733944832, 34072559616) +2024-09-01 02:03:07,392 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:03:07,422 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 02:03:20,910 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 9, batch 0, loss[loss=0.2596, simple_loss=0.2274, pruned_loss=0.09849, ctc_loss=0.1763, over 18461.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.2274, pruned_loss=0.09849, ctc_loss=0.1763, over 18461.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:03:20,910 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:03:44,108 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 9, validation: loss=0.2267, simple_loss=0.2052, pruned_loss=0.08107, ctc_loss=0.1434, over 1073944.00 frames. +2024-09-01 02:03:44,109 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 02:03:55,046 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.21 vs. limit=15.0 +2024-09-01 02:04:35,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=105584.0, ans=0.125 +2024-09-01 02:05:08,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=105637.33333333333, ans=0.05 +2024-09-01 02:05:38,302 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 9, batch 50, loss[loss=0.2611, simple_loss=0.2336, pruned_loss=0.0895, ctc_loss=0.1905, over 18943.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.2168, pruned_loss=0.09126, ctc_loss=0.1778, over 826909.81 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:05:54,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=105744.0, ans=0.09899494936611666 +2024-09-01 02:06:03,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=105797.33333333333, ans=0.125 +2024-09-01 02:06:24,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=105850.66666666667, ans=0.125 +2024-09-01 02:06:31,752 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.876e+02 2.077e+02 2.184e+02 2.316e+02 3.584e+02, threshold=4.367e+02, percent-clipped=0.0 +2024-09-01 02:06:54,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=105904.0, ans=0.2 +2024-09-01 02:07:29,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=105957.33333333333, ans=0.0 +2024-09-01 02:07:31,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=106010.66666666667, ans=0.0 +2024-09-01 02:07:32,676 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 9, batch 100, loss[loss=0.2387, simple_loss=0.2168, pruned_loss=0.08077, ctc_loss=0.1699, over 19136.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.2154, pruned_loss=0.08912, ctc_loss=0.1746, over 1474643.82 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:07:34,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=106010.66666666667, ans=0.125 +2024-09-01 02:07:36,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.47 vs. limit=15.0 +2024-09-01 02:08:34,533 INFO [dysarthria_finetune.py:1435] (2/4) (10731847680, 34072559616) +2024-09-01 02:08:34,611 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:08:34,655 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 02:08:48,126 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 10, batch 0, loss[loss=0.2754, simple_loss=0.24, pruned_loss=0.1071, ctc_loss=0.1914, over 18505.00 frames. ], tot_loss[loss=0.2754, simple_loss=0.24, pruned_loss=0.1071, ctc_loss=0.1914, over 18505.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:08:48,127 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:09:26,937 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 10, validation: loss=0.2182, simple_loss=0.2007, pruned_loss=0.07671, ctc_loss=0.1399, over 1073944.00 frames. +2024-09-01 02:09:26,938 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 02:09:40,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=106165.33333333333, ans=0.2 +2024-09-01 02:09:48,204 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=18.69 vs. limit=15.0 +2024-09-01 02:10:32,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=106218.66666666667, ans=0.125 +2024-09-01 02:11:13,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=106325.33333333333, ans=0.0 +2024-09-01 02:11:50,818 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.83 vs. limit=15.0 +2024-09-01 02:11:54,673 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.856e+02 2.023e+02 2.117e+02 2.323e+02 3.505e+02, threshold=4.234e+02, percent-clipped=0.0 +2024-09-01 02:12:16,362 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 10, batch 50, loss[loss=0.242, simple_loss=0.2174, pruned_loss=0.08316, ctc_loss=0.1835, over 19019.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.2127, pruned_loss=0.08738, ctc_loss=0.1743, over 827816.98 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:12:19,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106432.0, ans=0.1 +2024-09-01 02:12:19,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=106432.0, ans=0.2 +2024-09-01 02:12:24,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106432.0, ans=0.1 +2024-09-01 02:12:33,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=106432.0, ans=0.125 +2024-09-01 02:12:59,184 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.47 vs. limit=22.5 +2024-09-01 02:14:00,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106592.0, ans=0.1 +2024-09-01 02:15:12,126 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 10, batch 100, loss[loss=0.2091, simple_loss=0.1888, pruned_loss=0.0722, ctc_loss=0.1578, over 19070.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.208, pruned_loss=0.08243, ctc_loss=0.1673, over 1475821.74 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:15:33,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=106698.66666666667, ans=0.0 +2024-09-01 02:15:39,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=106698.66666666667, ans=0.2 +2024-09-01 02:16:11,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=106752.0, ans=0.0 +2024-09-01 02:16:30,144 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=27.92 vs. limit=22.5 +2024-09-01 02:16:33,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=106805.33333333333, ans=0.2 +2024-09-01 02:16:36,658 INFO [dysarthria_finetune.py:1435] (2/4) (10702487552, 34072559616) +2024-09-01 02:16:36,658 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:16:36,697 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 02:16:49,149 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 11, batch 0, loss[loss=0.2627, simple_loss=0.2275, pruned_loss=0.1052, ctc_loss=0.1849, over 18525.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.2275, pruned_loss=0.1052, ctc_loss=0.1849, over 18525.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:16:49,150 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:17:12,839 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 11, validation: loss=0.211, simple_loss=0.1968, pruned_loss=0.07375, ctc_loss=0.137, over 1073944.00 frames. +2024-09-01 02:17:12,840 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 02:17:42,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=106906.66666666667, ans=0.0 +2024-09-01 02:17:43,501 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.769e+02 1.989e+02 2.082e+02 2.188e+02 3.029e+02, threshold=4.165e+02, percent-clipped=0.0 +2024-09-01 02:17:54,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=106960.0, ans=0.0 +2024-09-01 02:17:59,007 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=11.29 vs. limit=15.0 +2024-09-01 02:18:18,352 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.28 vs. limit=15.0 +2024-09-01 02:18:42,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=107066.66666666667, ans=0.125 +2024-09-01 02:18:46,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=107066.66666666667, ans=0.125 +2024-09-01 02:18:53,621 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 11, batch 50, loss[loss=0.2359, simple_loss=0.2111, pruned_loss=0.08585, ctc_loss=0.1757, over 19068.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.2062, pruned_loss=0.08189, ctc_loss=0.168, over 827285.47 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:18:57,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107120.0, ans=0.1 +2024-09-01 02:18:57,327 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:19:15,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=107173.33333333333, ans=0.0 +2024-09-01 02:19:35,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107226.66666666667, ans=0.1 +2024-09-01 02:20:35,686 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 11, batch 100, loss[loss=0.2191, simple_loss=0.1975, pruned_loss=0.07976, ctc_loss=0.1617, over 19059.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2044, pruned_loss=0.08028, ctc_loss=0.1641, over 1474809.38 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:20:49,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=107386.66666666667, ans=0.125 +2024-09-01 02:20:51,186 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.93 vs. limit=15.0 +2024-09-01 02:20:58,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=107440.0, ans=0.125 +2024-09-01 02:21:05,341 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.795e+02 1.934e+02 2.032e+02 2.152e+02 3.346e+02, threshold=4.063e+02, percent-clipped=0.0 +2024-09-01 02:21:12,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=107440.0, ans=0.125 +2024-09-01 02:21:30,082 INFO [dysarthria_finetune.py:1435] (2/4) (10731847680, 34072559616) +2024-09-01 02:21:30,084 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:21:30,119 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 02:21:45,323 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 12, batch 0, loss[loss=0.2246, simple_loss=0.2054, pruned_loss=0.0805, ctc_loss=0.1625, over 18505.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.2054, pruned_loss=0.0805, ctc_loss=0.1625, over 18505.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:21:45,324 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:22:12,230 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 12, validation: loss=0.2042, simple_loss=0.1932, pruned_loss=0.07127, ctc_loss=0.1341, over 1073944.00 frames. +2024-09-01 02:22:12,230 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 02:22:22,895 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.94 vs. limit=6.0 +2024-09-01 02:22:39,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=107594.66666666667, ans=0.125 +2024-09-01 02:23:24,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=107648.0, ans=0.0 +2024-09-01 02:23:26,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=107648.0, ans=0.09899494936611666 +2024-09-01 02:23:33,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=107701.33333333333, ans=0.125 +2024-09-01 02:23:38,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.51 vs. limit=15.0 +2024-09-01 02:23:59,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=107754.66666666667, ans=0.125 +2024-09-01 02:24:21,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107754.66666666667, ans=0.1 +2024-09-01 02:24:27,762 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 12, batch 50, loss[loss=0.2083, simple_loss=0.2007, pruned_loss=0.06736, ctc_loss=0.15, over 18979.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2005, pruned_loss=0.07786, ctc_loss=0.1632, over 828348.40 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:25:55,561 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.770e+02 1.958e+02 2.051e+02 2.245e+02 3.047e+02, threshold=4.102e+02, percent-clipped=0.0 +2024-09-01 02:26:01,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=107968.0, ans=0.125 +2024-09-01 02:26:07,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=108021.33333333333, ans=0.2 +2024-09-01 02:26:30,862 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 12, batch 100, loss[loss=0.1846, simple_loss=0.1856, pruned_loss=0.05625, ctc_loss=0.1266, over 19089.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2006, pruned_loss=0.07702, ctc_loss=0.1615, over 1475248.85 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:26:32,384 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.53 vs. limit=22.5 +2024-09-01 02:26:54,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=108128.0, ans=0.0 +2024-09-01 02:26:54,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=108128.0, ans=0.125 +2024-09-01 02:27:31,273 INFO [dysarthria_finetune.py:1435] (2/4) (10733944832, 34072559616) +2024-09-01 02:27:31,273 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:27:31,316 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 02:27:44,308 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 13, batch 0, loss[loss=0.2366, simple_loss=0.2158, pruned_loss=0.089, ctc_loss=0.1669, over 18540.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.2158, pruned_loss=0.089, ctc_loss=0.1669, over 18540.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:27:44,308 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:27:51,981 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([7.1314, 6.7609, 6.6446, 6.7421], device='cuda:2') +2024-09-01 02:28:07,302 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 13, validation: loss=0.1981, simple_loss=0.19, pruned_loss=0.06934, ctc_loss=0.1316, over 1073944.00 frames. +2024-09-01 02:28:07,303 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 02:28:13,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=108229.33333333333, ans=0.125 +2024-09-01 02:28:33,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=108282.66666666667, ans=0.0 +2024-09-01 02:28:57,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=108336.0, ans=0.2 +2024-09-01 02:29:01,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=108336.0, ans=0.125 +2024-09-01 02:29:11,563 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.86 vs. limit=8.0 +2024-09-01 02:29:25,032 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=14.32 vs. limit=15.0 +2024-09-01 02:29:47,490 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 13, batch 50, loss[loss=0.2234, simple_loss=0.2105, pruned_loss=0.07913, ctc_loss=0.1604, over 18984.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.1985, pruned_loss=0.07439, ctc_loss=0.1587, over 829065.08 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:29:52,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=108496.0, ans=0.0 +2024-09-01 02:29:59,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=108496.0, ans=0.125 +2024-09-01 02:30:01,821 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.749e+02 1.921e+02 2.017e+02 2.151e+02 2.785e+02, threshold=4.034e+02, percent-clipped=0.0 +2024-09-01 02:30:38,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=108602.66666666667, ans=0.125 +2024-09-01 02:30:47,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=108656.0, ans=10.0 +2024-09-01 02:30:49,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=108656.0, ans=0.125 +2024-09-01 02:31:11,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=108709.33333333333, ans=0.0 +2024-09-01 02:31:25,776 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 13, batch 100, loss[loss=0.2028, simple_loss=0.1903, pruned_loss=0.07005, ctc_loss=0.1594, over 19116.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.198, pruned_loss=0.07471, ctc_loss=0.158, over 1477011.25 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:31:38,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=108762.66666666667, ans=0.125 +2024-09-01 02:31:44,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=108816.0, ans=0.125 +2024-09-01 02:31:46,622 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.97 vs. limit=22.5 +2024-09-01 02:31:53,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=108816.0, ans=0.025 +2024-09-01 02:31:57,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=108816.0, ans=0.2 +2024-09-01 02:31:59,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=108816.0, ans=0.09899494936611666 +2024-09-01 02:32:12,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108869.33333333333, ans=0.1 +2024-09-01 02:32:16,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108869.33333333333, ans=0.1 +2024-09-01 02:32:19,312 INFO [dysarthria_finetune.py:1435] (2/4) (10731847680, 34072559616) +2024-09-01 02:32:27,538 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:32:27,576 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 02:33:00,702 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 14, batch 0, loss[loss=0.2161, simple_loss=0.1988, pruned_loss=0.07934, ctc_loss=0.1641, over 18523.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.1988, pruned_loss=0.07934, ctc_loss=0.1641, over 18523.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:33:00,702 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:33:44,931 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 14, validation: loss=0.1924, simple_loss=0.1871, pruned_loss=0.06768, ctc_loss=0.1293, over 1073944.00 frames. +2024-09-01 02:33:44,931 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 02:34:07,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=108917.33333333333, ans=0.125 +2024-09-01 02:34:38,237 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.82 vs. limit=15.0 +2024-09-01 02:35:09,744 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.754e+02 1.893e+02 1.977e+02 2.192e+02 2.916e+02, threshold=3.954e+02, percent-clipped=0.0 +2024-09-01 02:35:45,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=109130.66666666667, ans=0.0 +2024-09-01 02:35:53,450 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:36:12,038 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 14, batch 50, loss[loss=0.1972, simple_loss=0.191, pruned_loss=0.06519, ctc_loss=0.1571, over 18999.00 frames. ], tot_loss[loss=0.206, simple_loss=0.1938, pruned_loss=0.07297, ctc_loss=0.1572, over 827850.18 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:36:46,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=109184.0, ans=0.125 +2024-09-01 02:37:16,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=109237.33333333333, ans=0.2 +2024-09-01 02:38:17,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=109344.0, ans=0.125 +2024-09-01 02:38:39,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=109397.33333333333, ans=0.0 +2024-09-01 02:38:42,949 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 14, batch 100, loss[loss=0.1956, simple_loss=0.1924, pruned_loss=0.06485, ctc_loss=0.1513, over 19059.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.1944, pruned_loss=0.07358, ctc_loss=0.1557, over 1475617.37 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:39:10,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=109504.0, ans=0.0 +2024-09-01 02:39:27,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=109504.0, ans=0.0 +2024-09-01 02:39:50,923 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.731e+02 1.903e+02 1.972e+02 2.079e+02 2.713e+02, threshold=3.943e+02, percent-clipped=0.0 +2024-09-01 02:39:50,971 INFO [dysarthria_finetune.py:1435] (2/4) (10702487552, 34072559616) +2024-09-01 02:39:50,972 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:39:51,011 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 02:40:03,803 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 15, batch 0, loss[loss=0.273, simple_loss=0.2393, pruned_loss=0.1097, ctc_loss=0.2079, over 18678.00 frames. ], tot_loss[loss=0.273, simple_loss=0.2393, pruned_loss=0.1097, ctc_loss=0.2079, over 18678.00 frames. ], batch size: 65, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:40:03,804 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:40:34,881 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 15, validation: loss=0.1871, simple_loss=0.1844, pruned_loss=0.06629, ctc_loss=0.1271, over 1073944.00 frames. +2024-09-01 02:40:34,882 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 02:40:36,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=109605.33333333333, ans=0.2 +2024-09-01 02:40:40,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=109605.33333333333, ans=0.0 +2024-09-01 02:42:07,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=109765.33333333333, ans=0.0 +2024-09-01 02:42:18,416 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:43:06,231 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 15, batch 50, loss[loss=0.1988, simple_loss=0.2007, pruned_loss=0.06618, ctc_loss=0.1465, over 18994.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.192, pruned_loss=0.07302, ctc_loss=0.1541, over 827605.34 frames. ], batch size: 102, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:44:02,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=109925.33333333333, ans=0.2 +2024-09-01 02:44:10,190 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=13.16 vs. limit=15.0 +2024-09-01 02:44:28,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=109978.66666666667, ans=0.0 +2024-09-01 02:44:42,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=110032.0, ans=0.125 +2024-09-01 02:45:26,975 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.707e+02 1.886e+02 2.042e+02 2.162e+02 2.644e+02, threshold=4.084e+02, percent-clipped=0.0 +2024-09-01 02:45:29,587 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 15, batch 100, loss[loss=0.2103, simple_loss=0.1993, pruned_loss=0.07644, ctc_loss=0.1632, over 19062.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.1909, pruned_loss=0.07155, ctc_loss=0.1507, over 1475114.53 frames. ], batch size: 133, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:45:33,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=110138.66666666667, ans=0.2 +2024-09-01 02:46:46,674 INFO [dysarthria_finetune.py:1435] (2/4) (10731847680, 34072559616) +2024-09-01 02:46:46,674 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:46:46,717 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 02:47:00,714 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 16, batch 0, loss[loss=0.2247, simple_loss=0.2055, pruned_loss=0.08675, ctc_loss=0.1717, over 18504.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.2055, pruned_loss=0.08675, ctc_loss=0.1717, over 18504.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:47:00,714 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:47:23,703 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 16, validation: loss=0.182, simple_loss=0.1819, pruned_loss=0.06496, ctc_loss=0.1251, over 1073944.00 frames. +2024-09-01 02:47:23,704 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 02:47:31,664 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.66 vs. limit=15.0 +2024-09-01 02:47:39,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110293.33333333333, ans=0.1 +2024-09-01 02:48:19,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=110400.0, ans=0.0 +2024-09-01 02:48:22,299 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.95 vs. limit=15.0 +2024-09-01 02:48:35,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110453.33333333333, ans=0.1 +2024-09-01 02:49:04,110 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 16, batch 50, loss[loss=0.207, simple_loss=0.1973, pruned_loss=0.07561, ctc_loss=0.1623, over 19044.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.1857, pruned_loss=0.06909, ctc_loss=0.1503, over 828171.03 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:49:44,008 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.735e+02 1.879e+02 1.996e+02 2.191e+02 2.692e+02, threshold=3.992e+02, percent-clipped=0.0 +2024-09-01 02:49:45,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=110666.66666666667, ans=0.0 +2024-09-01 02:50:05,001 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=19.00 vs. limit=22.5 +2024-09-01 02:50:25,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=110773.33333333333, ans=0.125 +2024-09-01 02:50:29,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=110773.33333333333, ans=0.0 +2024-09-01 02:50:42,119 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 16, batch 100, loss[loss=0.148, simple_loss=0.1613, pruned_loss=0.04574, ctc_loss=0.108, over 19090.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.1854, pruned_loss=0.06829, ctc_loss=0.1472, over 1476933.27 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:51:35,759 INFO [dysarthria_finetune.py:1435] (2/4) (10729750528, 34072559616) +2024-09-01 02:51:35,759 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:51:35,813 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 02:51:50,595 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 17, batch 0, loss[loss=0.1922, simple_loss=0.1926, pruned_loss=0.06908, ctc_loss=0.1343, over 18336.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.1926, pruned_loss=0.06908, ctc_loss=0.1343, over 18336.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:51:50,595 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:51:55,656 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.5937, 4.0910, 4.4951, 4.2329], device='cuda:2') +2024-09-01 02:52:13,657 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 17, validation: loss=0.1784, simple_loss=0.1796, pruned_loss=0.06394, ctc_loss=0.1232, over 1073944.00 frames. +2024-09-01 02:52:13,657 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 02:52:38,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=111034.66666666667, ans=0.025 +2024-09-01 02:52:52,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=111034.66666666667, ans=0.5 +2024-09-01 02:52:54,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=111034.66666666667, ans=0.0 +2024-09-01 02:53:56,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=111194.66666666667, ans=0.125 +2024-09-01 02:53:59,380 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.683e+02 1.870e+02 1.982e+02 2.091e+02 2.808e+02, threshold=3.964e+02, percent-clipped=0.0 +2024-09-01 02:54:35,614 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 17, batch 50, loss[loss=0.1758, simple_loss=0.1878, pruned_loss=0.05534, ctc_loss=0.1329, over 19057.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.1853, pruned_loss=0.06877, ctc_loss=0.1476, over 827125.84 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:54:58,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=111248.0, ans=0.125 +2024-09-01 02:55:15,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=111248.0, ans=0.0 +2024-09-01 02:55:26,382 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=22.38 vs. limit=22.5 +2024-09-01 02:55:56,447 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.42 vs. limit=15.0 +2024-09-01 02:56:13,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=111354.66666666667, ans=0.0 +2024-09-01 02:58:16,890 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 17, batch 100, loss[loss=0.1727, simple_loss=0.1708, pruned_loss=0.06052, ctc_loss=0.1337, over 19126.00 frames. ], tot_loss[loss=0.188, simple_loss=0.1837, pruned_loss=0.06731, ctc_loss=0.1442, over 1475165.47 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:59:24,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=111568.0, ans=0.2 +2024-09-01 03:00:07,747 INFO [dysarthria_finetune.py:1435] (2/4) (10733944832, 34072559616) +2024-09-01 03:00:07,747 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 03:00:07,779 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 03:00:21,392 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 18, batch 0, loss[loss=0.207, simple_loss=0.2003, pruned_loss=0.07552, ctc_loss=0.1567, over 18559.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2003, pruned_loss=0.07552, ctc_loss=0.1567, over 18559.00 frames. ], batch size: 65, lr: 9.93e-05, grad_scale: 32.0 +2024-09-01 03:00:21,393 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 03:01:08,467 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 18, validation: loss=0.1758, simple_loss=0.1773, pruned_loss=0.06291, ctc_loss=0.1213, over 1073944.00 frames. +2024-09-01 03:01:08,468 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 03:01:20,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=111669.33333333333, ans=0.07 +2024-09-01 03:02:04,451 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.676e+02 1.863e+02 1.965e+02 2.122e+02 2.833e+02, threshold=3.929e+02, percent-clipped=0.0 +2024-09-01 03:02:31,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111776.0, ans=0.125 +2024-09-01 03:04:29,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=111776.0, ans=0.125 +2024-09-01 03:04:54,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=111829.33333333333, ans=0.0 +2024-09-01 03:05:04,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=111829.33333333333, ans=0.125 +2024-09-01 03:05:55,541 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 18, batch 50, loss[loss=0.2046, simple_loss=0.1999, pruned_loss=0.07307, ctc_loss=0.1577, over 18975.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.1831, pruned_loss=0.06615, ctc_loss=0.1438, over 827610.12 frames. ], batch size: 102, lr: 9.93e-05, grad_scale: 16.0 +2024-09-01 03:06:11,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=111936.0, ans=0.125 +2024-09-01 03:06:16,506 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.71 vs. limit=22.5 +2024-09-01 03:07:44,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=111989.33333333333, ans=0.5 +2024-09-01 03:08:53,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=112042.66666666667, ans=0.5 +2024-09-01 03:09:40,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=112149.33333333333, ans=0.125 +2024-09-01 03:09:43,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112149.33333333333, ans=0.1 +2024-09-01 03:10:04,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=112202.66666666667, ans=0.035 +2024-09-01 03:11:02,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=112202.66666666667, ans=0.0 +2024-09-01 03:11:05,713 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 18, batch 100, loss[loss=0.1682, simple_loss=0.1659, pruned_loss=0.05914, ctc_loss=0.1308, over 19135.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.1807, pruned_loss=0.06495, ctc_loss=0.1408, over 1477220.69 frames. ], batch size: 133, lr: 9.93e-05, grad_scale: 16.0 +2024-09-01 03:11:51,852 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 1.826e+02 1.931e+02 2.035e+02 3.279e+02, threshold=3.861e+02, percent-clipped=0.0 +2024-09-01 03:11:59,958 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.34 vs. limit=8.0 +2024-09-01 03:12:26,078 INFO [dysarthria_finetune.py:1435] (2/4) (10731847680, 34072559616) +2024-09-01 03:12:26,079 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 03:12:26,110 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 03:12:34,295 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.42 vs. limit=15.0 +2024-09-01 03:12:38,678 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 19, batch 0, loss[loss=0.1939, simple_loss=0.1869, pruned_loss=0.0716, ctc_loss=0.1444, over 18438.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.1869, pruned_loss=0.0716, ctc_loss=0.1444, over 18438.00 frames. ], batch size: 65, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:12:38,678 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 03:13:02,321 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 19, validation: loss=0.1735, simple_loss=0.1751, pruned_loss=0.06201, ctc_loss=0.1194, over 1073944.00 frames. +2024-09-01 03:13:02,321 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 03:13:19,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=112352.0, ans=0.125 +2024-09-01 03:13:29,377 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:13:31,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=112405.33333333333, ans=0.0 +2024-09-01 03:13:52,101 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.77 vs. limit=10.0 +2024-09-01 03:13:57,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=112458.66666666667, ans=0.125 +2024-09-01 03:14:18,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=112512.0, ans=0.0 +2024-09-01 03:14:22,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=112512.0, ans=0.125 +2024-09-01 03:14:33,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=112565.33333333333, ans=0.0 +2024-09-01 03:14:36,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=112565.33333333333, ans=0.025 +2024-09-01 03:14:48,752 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 19, batch 50, loss[loss=0.1785, simple_loss=0.1834, pruned_loss=0.05929, ctc_loss=0.1374, over 19013.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.181, pruned_loss=0.06746, ctc_loss=0.1465, over 827262.88 frames. ], batch size: 102, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:14:56,037 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.05 vs. limit=15.0 +2024-09-01 03:15:00,796 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.37 vs. limit=10.0 +2024-09-01 03:15:10,704 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=10.70 vs. limit=12.0 +2024-09-01 03:15:12,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=112672.0, ans=0.125 +2024-09-01 03:15:14,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=112672.0, ans=0.0 +2024-09-01 03:15:24,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112672.0, ans=0.1 +2024-09-01 03:15:42,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=112725.33333333333, ans=0.125 +2024-09-01 03:15:46,878 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.64 vs. limit=22.5 +2024-09-01 03:15:46,934 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=13.36 vs. limit=12.0 +2024-09-01 03:15:48,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=112778.66666666667, ans=0.2 +2024-09-01 03:15:52,329 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:15:56,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=112778.66666666667, ans=0.2 +2024-09-01 03:16:00,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.65 vs. limit=10.0 +2024-09-01 03:16:00,811 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.699e+02 1.859e+02 1.957e+02 2.051e+02 3.574e+02, threshold=3.914e+02, percent-clipped=0.0 +2024-09-01 03:16:13,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112832.0, ans=0.1 +2024-09-01 03:16:28,716 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 19, batch 100, loss[loss=0.1433, simple_loss=0.1459, pruned_loss=0.04748, ctc_loss=0.1146, over 19169.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.1807, pruned_loss=0.06717, ctc_loss=0.1437, over 1475351.90 frames. ], batch size: 134, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:16:31,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=112885.33333333333, ans=0.125 +2024-09-01 03:16:47,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112938.66666666667, ans=0.1 +2024-09-01 03:17:16,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=112992.0, ans=0.2 +2024-09-01 03:17:22,980 INFO [dysarthria_finetune.py:1435] (2/4) (10731847680, 34072559616) +2024-09-01 03:17:22,981 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 03:17:23,024 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 03:17:37,125 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 20, batch 0, loss[loss=0.2007, simple_loss=0.1982, pruned_loss=0.07284, ctc_loss=0.144, over 18527.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.1982, pruned_loss=0.07284, ctc_loss=0.144, over 18527.00 frames. ], batch size: 65, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:17:37,125 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 03:18:00,787 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 20, validation: loss=0.1713, simple_loss=0.1732, pruned_loss=0.06117, ctc_loss=0.1175, over 1073944.00 frames. +2024-09-01 03:18:00,788 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19762MB +2024-09-01 03:18:10,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=113040.0, ans=0.125 +2024-09-01 03:18:44,181 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=25.37 vs. limit=15.0 +2024-09-01 03:18:57,794 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.17 vs. limit=15.0 +2024-09-01 03:19:07,181 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:19:07,347 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.93 vs. limit=15.0 +2024-09-01 03:19:32,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=113253.33333333333, ans=0.1 +2024-09-01 03:19:32,408 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.15 vs. limit=15.0 +2024-09-01 03:19:39,005 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 20, batch 50, loss[loss=0.1959, simple_loss=0.1899, pruned_loss=0.06938, ctc_loss=0.1579, over 18968.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.1767, pruned_loss=0.06392, ctc_loss=0.1395, over 828106.18 frames. ], batch size: 102, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:19:42,275 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=13.65 vs. limit=15.0 +2024-09-01 03:19:52,484 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.624e+02 1.810e+02 1.894e+02 2.049e+02 3.111e+02, threshold=3.788e+02, percent-clipped=0.0 +2024-09-01 03:19:55,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=113306.66666666667, ans=0.125 +2024-09-01 03:21:01,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=113520.0, ans=0.09899494936611666 +2024-09-01 03:21:15,374 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.47 vs. limit=22.5 +2024-09-01 03:21:15,878 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 20, batch 100, loss[loss=0.1521, simple_loss=0.1573, pruned_loss=0.04963, ctc_loss=0.1189, over 19074.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.1758, pruned_loss=0.06325, ctc_loss=0.1377, over 1476081.83 frames. ], batch size: 133, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:21:28,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=113573.33333333333, ans=0.0 +2024-09-01 03:21:35,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=113626.66666666667, ans=0.025 +2024-09-01 03:21:43,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=113626.66666666667, ans=0.025 +2024-09-01 03:21:50,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113626.66666666667, ans=0.1 +2024-09-01 03:22:02,509 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.56 vs. limit=22.5 +2024-09-01 03:22:08,709 INFO [dysarthria_finetune.py:1435] (2/4) (10733944832, 34072559616) +2024-09-01 03:22:08,710 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 03:22:08,730 INFO [dysarthria_finetune.py:1440] (2/4) (29526523904, 34072559616) +2024-09-01 03:22:08,730 INFO [dysarthria_finetune.py:1442] (2/4) Done! diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-22-13-17-3 b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-22-13-17-3 new file mode 100644 index 0000000000000000000000000000000000000000..7a3412c47a45bca124d8223196fd9f1c07ceb8f2 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/log/log-train-2024-08-31-22-13-17-3 @@ -0,0 +1,546 @@ +2024-08-31 22:13:17,927 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-31 22:13:17,967 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-31 22:13:17,968 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-31 22:13:18,721 INFO [dysarthria_finetune.py:1219] (3/4) (33106362368, 34072559616) +2024-08-31 22:13:18,721 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-31 22:13:18,725 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 22:13:18,725 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-31 22:13:21,126 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 65805511 +2024-08-31 22:14:30,940 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-31 22:14:32,110 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-31 22:14:37,200 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts +2024-08-31 22:14:37,263 INFO [dysarthria_finetune.py:1319] (3/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 22:14:37,991 INFO [dysarthria_asr_datamodule.py:239] (3/4) Disable MUSAN +2024-08-31 22:14:37,991 INFO [dysarthria_asr_datamodule.py:257] (3/4) Enable SpecAugment +2024-08-31 22:14:37,991 INFO [dysarthria_asr_datamodule.py:258] (3/4) Time warp factor: 80 +2024-08-31 22:14:37,992 INFO [dysarthria_asr_datamodule.py:268] (3/4) Num frame mask: 10 +2024-08-31 22:14:37,992 INFO [dysarthria_asr_datamodule.py:281] (3/4) About to create train dataset +2024-08-31 22:14:55,323 INFO [dysarthria_asr_datamodule.py:308] (3/4) Using DynamicBucketingSampler. +2024-08-31 22:14:56,267 INFO [dysarthria_asr_datamodule.py:325] (3/4) About to create train dataloader +2024-08-31 22:14:56,272 INFO [dysarthria_asr_datamodule.py:501] (3/4) About to get dev cuts +2024-08-31 22:14:56,477 INFO [dysarthria_asr_datamodule.py:356] (3/4) About to create dev dataset +2024-08-31 22:14:57,476 INFO [dysarthria_asr_datamodule.py:373] (3/4) About to create dev dataloader +2024-08-31 22:14:57,476 INFO [dysarthria_finetune.py:1490] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 22:16:23,767 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=16.32 vs. limit=7.5 +2024-08-31 22:16:31,024 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=192, metric=21.06 vs. limit=7.5 +2024-08-31 22:16:34,157 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11828MB +2024-08-31 22:16:36,229 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11828MB +2024-08-31 22:17:53,348 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11828MB +2024-08-31 22:17:55,325 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11828MB +2024-08-31 22:19:46,278 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11828MB +2024-08-31 22:19:48,462 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11828MB +2024-08-31 22:20:26,675 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 0, loss[loss=0.3339, simple_loss=0.2734, pruned_loss=0.1449, ctc_loss=0.2303, over 18634.00 frames. ], tot_loss[loss=0.3339, simple_loss=0.2734, pruned_loss=0.1449, ctc_loss=0.2303, over 18634.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-31 22:20:26,675 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-31 22:32:57,030 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 1, validation: loss=0.373, simple_loss=0.3046, pruned_loss=0.1755, ctc_loss=0.2544, over 1073944.00 frames. +2024-08-31 22:32:57,064 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14282MB +2024-08-31 22:34:42,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.50 vs. limit=10.0 +2024-08-31 22:48:08,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=100000.0, ans=0.125 +2024-08-31 22:48:40,818 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=29.16 vs. limit=22.5 +2024-08-31 22:51:26,991 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.923e+02 1.157e+03 1.203e+03 1.280e+03 1.380e+03, threshold=4.812e+03, percent-clipped=0.0 +2024-08-31 23:03:06,387 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.113e+02 1.083e+03 1.198e+03 1.280e+03 1.431e+03, threshold=4.794e+03, percent-clipped=0.0 +2024-08-31 23:06:52,166 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.65 vs. limit=15.0 +2024-08-31 23:29:32,327 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.285e+02 9.052e+02 1.061e+03 1.198e+03 1.431e+03, threshold=4.243e+03, percent-clipped=0.0 +2024-08-31 23:31:39,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=100213.33333333333, ans=0.09899494936611666 +2024-08-31 23:49:33,585 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 50, loss[loss=0.4371, simple_loss=0.3501, pruned_loss=0.2139, ctc_loss=0.3259, over 19001.00 frames. ], tot_loss[loss=0.4103, simple_loss=0.3317, pruned_loss=0.1979, ctc_loss=0.2931, over 828973.50 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-31 23:53:22,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=100266.66666666667, ans=0.125 +2024-09-01 00:02:21,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=100320.0, ans=0.125 +2024-09-01 00:05:08,425 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=20.15 vs. limit=15.0 +2024-09-01 00:11:34,795 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.27 vs. limit=15.0 +2024-09-01 00:18:31,026 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.958e+02 6.817e+02 8.321e+02 1.009e+03 1.431e+03, threshold=1.664e+03, percent-clipped=0.0 +2024-09-01 00:18:31,060 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 100, loss[loss=0.3806, simple_loss=0.3074, pruned_loss=0.1808, ctc_loss=0.2732, over 19146.00 frames. ], tot_loss[loss=0.3942, simple_loss=0.3192, pruned_loss=0.1878, ctc_loss=0.2804, over 1476162.18 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-09-01 00:28:33,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=100640.0, ans=0.125 +2024-09-01 00:28:35,485 INFO [dysarthria_finetune.py:1435] (3/4) (13281984512, 34072559616) +2024-09-01 00:28:35,486 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 00:28:35,512 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 00:29:13,550 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 2, batch 0, loss[loss=0.3599, simple_loss=0.2961, pruned_loss=0.1555, ctc_loss=0.2437, over 18501.00 frames. ], tot_loss[loss=0.3599, simple_loss=0.2961, pruned_loss=0.1555, ctc_loss=0.2437, over 18501.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-09-01 00:29:13,551 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 00:34:07,569 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 2, validation: loss=0.3353, simple_loss=0.2773, pruned_loss=0.1482, ctc_loss=0.2175, over 1073944.00 frames. +2024-09-01 00:34:07,570 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14285MB +2024-09-01 00:35:35,051 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.09 vs. limit=15.0 +2024-09-01 00:50:34,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-09-01 00:51:25,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=100789.33333333333, ans=0.05 +2024-09-01 00:51:25,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=100789.33333333333, ans=0.125 +2024-09-01 00:55:05,704 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.69 vs. limit=15.0 +2024-09-01 00:56:09,333 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.56 vs. limit=15.0 +2024-09-01 00:56:43,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=100896.0, ans=0.2 +2024-09-01 00:57:03,205 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 00:58:34,955 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.40 vs. limit=15.0 +2024-09-01 00:59:42,896 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 2, batch 50, loss[loss=0.3911, simple_loss=0.3179, pruned_loss=0.1808, ctc_loss=0.2741, over 18956.00 frames. ], tot_loss[loss=0.3739, simple_loss=0.3043, pruned_loss=0.1721, ctc_loss=0.2609, over 828460.00 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 8.0 +2024-09-01 01:02:49,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=100949.33333333333, ans=0.0 +2024-09-01 01:08:04,415 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.183e+02 4.403e+02 5.126e+02 5.917e+02 6.888e+02, threshold=1.025e+03, percent-clipped=0.0 +2024-09-01 01:09:21,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=101056.0, ans=0.125 +2024-09-01 01:10:44,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=101109.33333333333, ans=0.0 +2024-09-01 01:10:53,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=101109.33333333333, ans=0.0 +2024-09-01 01:14:19,318 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 2, batch 100, loss[loss=0.3113, simple_loss=0.258, pruned_loss=0.131, ctc_loss=0.2073, over 19077.00 frames. ], tot_loss[loss=0.361, simple_loss=0.2949, pruned_loss=0.1631, ctc_loss=0.2494, over 1476919.42 frames. ], batch size: 133, lr: 7.29e-05, grad_scale: 8.0 +2024-09-01 01:14:24,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=101216.0, ans=0.025 +2024-09-01 01:15:08,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101216.0, ans=0.1 +2024-09-01 01:15:23,711 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=18.27 vs. limit=15.0 +2024-09-01 01:17:26,523 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=10.45 vs. limit=15.0 +2024-09-01 01:18:50,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=101322.66666666667, ans=0.0 +2024-09-01 01:19:12,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=101322.66666666667, ans=0.0 +2024-09-01 01:20:51,921 INFO [dysarthria_finetune.py:1435] (3/4) (13305053184, 34072559616) +2024-09-01 01:20:51,922 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 01:20:51,954 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 01:21:02,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=101370.66666666667, ans=0.0 +2024-09-01 01:21:21,463 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 3, batch 0, loss[loss=0.3055, simple_loss=0.2542, pruned_loss=0.1262, ctc_loss=0.2024, over 18579.00 frames. ], tot_loss[loss=0.3055, simple_loss=0.2542, pruned_loss=0.1262, ctc_loss=0.2024, over 18579.00 frames. ], batch size: 65, lr: 7.58e-05, grad_scale: 16.0 +2024-09-01 01:21:21,463 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 01:21:44,700 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 3, validation: loss=0.309, simple_loss=0.2588, pruned_loss=0.13, ctc_loss=0.1938, over 1073944.00 frames. +2024-09-01 01:21:44,700 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14285MB +2024-09-01 01:23:14,942 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=21.97 vs. limit=15.0 +2024-09-01 01:23:34,345 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=7.23 vs. limit=6.0 +2024-09-01 01:23:47,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=101477.33333333333, ans=0.125 +2024-09-01 01:24:27,008 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.669e+02 3.351e+02 3.834e+02 4.204e+02 5.264e+02, threshold=7.667e+02, percent-clipped=0.0 +2024-09-01 01:24:48,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=101637.33333333333, ans=0.125 +2024-09-01 01:24:48,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=101637.33333333333, ans=0.5 +2024-09-01 01:24:49,051 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 3, batch 50, loss[loss=0.3404, simple_loss=0.2856, pruned_loss=0.1373, ctc_loss=0.2202, over 19113.00 frames. ], tot_loss[loss=0.345, simple_loss=0.2839, pruned_loss=0.1496, ctc_loss=0.2359, over 827781.85 frames. ], batch size: 102, lr: 8.08e-05, grad_scale: 16.0 +2024-09-01 01:24:50,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=101637.33333333333, ans=0.2 +2024-09-01 01:24:51,535 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=17.30 vs. limit=15.0 +2024-09-01 01:25:13,295 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.61 vs. limit=6.0 +2024-09-01 01:25:39,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=101690.66666666667, ans=0.0 +2024-09-01 01:26:11,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.72 vs. limit=15.0 +2024-09-01 01:26:33,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=101797.33333333333, ans=0.125 +2024-09-01 01:26:44,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=101850.66666666667, ans=0.125 +2024-09-01 01:26:48,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=101850.66666666667, ans=0.125 +2024-09-01 01:27:00,716 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 3, batch 100, loss[loss=0.306, simple_loss=0.2566, pruned_loss=0.1242, ctc_loss=0.1998, over 19145.00 frames. ], tot_loss[loss=0.3332, simple_loss=0.2751, pruned_loss=0.1431, ctc_loss=0.226, over 1476240.06 frames. ], batch size: 133, lr: 8.58e-05, grad_scale: 16.0 +2024-09-01 01:28:06,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=102010.66666666667, ans=0.125 +2024-09-01 01:29:32,077 INFO [dysarthria_finetune.py:1435] (3/4) (14445903872, 34072559616) +2024-09-01 01:29:32,078 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 01:29:32,119 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 01:29:45,321 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 4, batch 0, loss[loss=0.3649, simple_loss=0.2971, pruned_loss=0.1592, ctc_loss=0.2645, over 18645.00 frames. ], tot_loss[loss=0.3649, simple_loss=0.2971, pruned_loss=0.1592, ctc_loss=0.2645, over 18645.00 frames. ], batch size: 65, lr: 8.86e-05, grad_scale: 32.0 +2024-09-01 01:29:45,321 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 01:30:08,494 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 4, validation: loss=0.2887, simple_loss=0.2447, pruned_loss=0.1169, ctc_loss=0.1781, over 1073944.00 frames. +2024-09-01 01:30:08,495 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14285MB +2024-09-01 01:30:42,268 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.378e+02 2.838e+02 3.147e+02 3.460e+02 5.318e+02, threshold=6.294e+02, percent-clipped=0.0 +2024-09-01 01:30:47,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=102106.66666666667, ans=0.125 +2024-09-01 01:31:06,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=102160.0, ans=0.5 +2024-09-01 01:31:19,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=102213.33333333333, ans=0.0 +2024-09-01 01:32:02,185 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 4, batch 50, loss[loss=0.3091, simple_loss=0.2589, pruned_loss=0.1289, ctc_loss=0.2013, over 18993.00 frames. ], tot_loss[loss=0.3279, simple_loss=0.2707, pruned_loss=0.1388, ctc_loss=0.2273, over 827748.42 frames. ], batch size: 102, lr: 9.36e-05, grad_scale: 32.0 +2024-09-01 01:32:15,757 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=16.55 vs. limit=15.0 +2024-09-01 01:32:50,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=102426.66666666667, ans=0.2 +2024-09-01 01:32:55,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=102426.66666666667, ans=22.5 +2024-09-01 01:32:56,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=102426.66666666667, ans=0.0 +2024-09-01 01:33:00,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=102480.0, ans=0.0 +2024-09-01 01:33:22,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=102480.0, ans=0.0 +2024-09-01 01:35:45,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=102480.0, ans=0.5 +2024-09-01 01:35:51,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=102480.0, ans=0.125 +2024-09-01 01:37:40,025 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 4, batch 100, loss[loss=0.3241, simple_loss=0.2658, pruned_loss=0.141, ctc_loss=0.2283, over 19161.00 frames. ], tot_loss[loss=0.317, simple_loss=0.2629, pruned_loss=0.1328, ctc_loss=0.2172, over 1475350.41 frames. ], batch size: 133, lr: 9.86e-05, grad_scale: 32.0 +2024-09-01 01:37:59,825 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=16.24 vs. limit=15.0 +2024-09-01 01:38:09,931 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.185e+02 2.526e+02 2.751e+02 3.040e+02 4.636e+02, threshold=5.501e+02, percent-clipped=0.0 +2024-09-01 01:38:21,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.64 vs. limit=22.5 +2024-09-01 01:38:32,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=102693.33333333333, ans=0.0 +2024-09-01 01:38:35,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=102693.33333333333, ans=0.125 +2024-09-01 01:38:35,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=102693.33333333333, ans=15.0 +2024-09-01 01:38:35,835 INFO [dysarthria_finetune.py:1435] (3/4) (545980416, 34072559616) +2024-09-01 01:38:35,835 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 01:38:35,904 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 01:38:49,735 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 5, batch 0, loss[loss=0.2823, simple_loss=0.2383, pruned_loss=0.1151, ctc_loss=0.1824, over 18566.00 frames. ], tot_loss[loss=0.2823, simple_loss=0.2383, pruned_loss=0.1151, ctc_loss=0.1824, over 18566.00 frames. ], batch size: 65, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:38:49,736 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 01:39:30,980 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 5, validation: loss=0.2717, simple_loss=0.233, pruned_loss=0.1066, ctc_loss=0.1665, over 1073944.00 frames. +2024-09-01 01:39:30,980 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14285MB +2024-09-01 01:41:16,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=102794.66666666667, ans=0.0 +2024-09-01 01:41:29,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=102794.66666666667, ans=0.125 +2024-09-01 01:43:25,224 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=13.47 vs. limit=15.0 +2024-09-01 01:43:55,517 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 5, batch 50, loss[loss=0.3143, simple_loss=0.2635, pruned_loss=0.1299, ctc_loss=0.2099, over 18976.00 frames. ], tot_loss[loss=0.2996, simple_loss=0.251, pruned_loss=0.1211, ctc_loss=0.2051, over 827749.28 frames. ], batch size: 102, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:44:01,620 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.82 vs. limit=15.0 +2024-09-01 01:44:04,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=103008.0, ans=0.0 +2024-09-01 01:44:50,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=103061.33333333333, ans=0.125 +2024-09-01 01:45:15,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=103114.66666666667, ans=0.125 +2024-09-01 01:45:35,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=103114.66666666667, ans=0.125 +2024-09-01 01:46:04,697 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.130e+02 2.382e+02 2.524e+02 2.770e+02 4.371e+02, threshold=5.047e+02, percent-clipped=0.0 +2024-09-01 01:46:13,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=103168.0, ans=0.2 +2024-09-01 01:46:25,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=103221.33333333333, ans=0.0 +2024-09-01 01:46:52,899 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 5, batch 100, loss[loss=0.2783, simple_loss=0.2383, pruned_loss=0.1066, ctc_loss=0.1825, over 19091.00 frames. ], tot_loss[loss=0.2953, simple_loss=0.2477, pruned_loss=0.1192, ctc_loss=0.2019, over 1475913.16 frames. ], batch size: 133, lr: 1.00e-04, grad_scale: 32.0 +2024-09-01 01:47:10,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=103274.66666666667, ans=0.125 +2024-09-01 01:47:12,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=103274.66666666667, ans=0.0 +2024-09-01 01:48:10,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.05 vs. limit=6.0 +2024-09-01 01:48:20,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=103381.33333333333, ans=0.0 +2024-09-01 01:48:26,073 INFO [dysarthria_finetune.py:1435] (3/4) (14450098176, 34072559616) +2024-09-01 01:48:26,074 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 01:48:26,115 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 01:48:41,998 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 6, batch 0, loss[loss=0.2784, simple_loss=0.235, pruned_loss=0.1134, ctc_loss=0.1853, over 18684.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.235, pruned_loss=0.1134, ctc_loss=0.1853, over 18684.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:48:41,999 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 01:49:05,144 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 6, validation: loss=0.2578, simple_loss=0.2238, pruned_loss=0.09861, ctc_loss=0.1582, over 1073944.00 frames. +2024-09-01 01:49:05,145 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14285MB +2024-09-01 01:49:20,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.60 vs. limit=15.0 +2024-09-01 01:49:23,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=103424.0, ans=22.5 +2024-09-01 01:49:38,361 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=14.17 vs. limit=15.0 +2024-09-01 01:49:45,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=103477.33333333333, ans=0.125 +2024-09-01 01:49:46,109 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=28.45 vs. limit=22.5 +2024-09-01 01:49:56,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=103530.66666666667, ans=0.0 +2024-09-01 01:50:08,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103584.0, ans=0.1 +2024-09-01 01:50:14,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=103584.0, ans=0.025 +2024-09-01 01:50:52,019 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 6, batch 50, loss[loss=0.2816, simple_loss=0.2394, pruned_loss=0.1123, ctc_loss=0.1872, over 19058.00 frames. ], tot_loss[loss=0.2855, simple_loss=0.241, pruned_loss=0.113, ctc_loss=0.1976, over 828493.81 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:50:57,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=103690.66666666667, ans=0.0 +2024-09-01 01:50:57,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=103690.66666666667, ans=0.125 +2024-09-01 01:51:08,007 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.083e+02 2.277e+02 2.375e+02 2.614e+02 3.891e+02, threshold=4.750e+02, percent-clipped=0.0 +2024-09-01 01:51:26,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=103744.0, ans=0.125 +2024-09-01 01:51:32,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=103797.33333333333, ans=0.2 +2024-09-01 01:51:59,301 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.90 vs. limit=6.0 +2024-09-01 01:52:34,172 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 6, batch 100, loss[loss=0.262, simple_loss=0.223, pruned_loss=0.1007, ctc_loss=0.1837, over 19113.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.2395, pruned_loss=0.1117, ctc_loss=0.195, over 1475249.23 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:52:53,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=103957.33333333333, ans=0.0 +2024-09-01 01:52:53,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=103957.33333333333, ans=0.125 +2024-09-01 01:53:10,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=104010.66666666667, ans=0.1 +2024-09-01 01:53:18,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=104064.0, ans=0.125 +2024-09-01 01:53:20,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=104064.0, ans=0.125 +2024-09-01 01:53:34,223 INFO [dysarthria_finetune.py:1435] (3/4) (411762688, 34072559616) +2024-09-01 01:53:34,224 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 01:53:34,291 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 01:53:47,072 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 7, batch 0, loss[loss=0.3217, simple_loss=0.2649, pruned_loss=0.136, ctc_loss=0.2354, over 18595.00 frames. ], tot_loss[loss=0.3217, simple_loss=0.2649, pruned_loss=0.136, ctc_loss=0.2354, over 18595.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:53:47,072 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 01:54:10,658 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 7, validation: loss=0.2464, simple_loss=0.2165, pruned_loss=0.09214, ctc_loss=0.1523, over 1073944.00 frames. +2024-09-01 01:54:10,658 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14285MB +2024-09-01 01:55:12,004 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.931e+02 2.149e+02 2.268e+02 2.457e+02 3.821e+02, threshold=4.535e+02, percent-clipped=0.0 +2024-09-01 01:55:53,894 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 7, batch 50, loss[loss=0.2874, simple_loss=0.2453, pruned_loss=0.1128, ctc_loss=0.1976, over 18963.00 frames. ], tot_loss[loss=0.2743, simple_loss=0.2344, pruned_loss=0.1057, ctc_loss=0.1905, over 827887.87 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:56:05,032 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:56:12,819 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:56:18,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=104426.66666666667, ans=0.0 +2024-09-01 01:56:59,225 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:57:08,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=104533.33333333333, ans=0.125 +2024-09-01 01:57:11,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=104586.66666666667, ans=0.125 +2024-09-01 01:57:18,642 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=15.29 vs. limit=15.0 +2024-09-01 01:57:30,855 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 7, batch 100, loss[loss=0.2523, simple_loss=0.22, pruned_loss=0.0936, ctc_loss=0.1706, over 19124.00 frames. ], tot_loss[loss=0.2672, simple_loss=0.2293, pruned_loss=0.1022, ctc_loss=0.1851, over 1475075.17 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:57:32,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=10.99 vs. limit=12.0 +2024-09-01 01:57:36,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=104640.0, ans=0.125 +2024-09-01 01:57:36,321 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.89 vs. limit=15.0 +2024-09-01 01:57:45,843 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=11.88 vs. limit=15.0 +2024-09-01 01:57:55,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=104693.33333333333, ans=15.0 +2024-09-01 01:58:12,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=104746.66666666667, ans=0.0 +2024-09-01 01:58:21,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104746.66666666667, ans=0.1 +2024-09-01 01:58:22,459 INFO [dysarthria_finetune.py:1435] (3/4) (13317636096, 34072559616) +2024-09-01 01:58:22,460 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 01:58:22,507 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 01:58:35,306 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 8, batch 0, loss[loss=0.257, simple_loss=0.2233, pruned_loss=0.09832, ctc_loss=0.1715, over 18547.00 frames. ], tot_loss[loss=0.257, simple_loss=0.2233, pruned_loss=0.09832, ctc_loss=0.1715, over 18547.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 01:58:35,307 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 01:58:58,371 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 8, validation: loss=0.236, simple_loss=0.2103, pruned_loss=0.08624, ctc_loss=0.1474, over 1073944.00 frames. +2024-09-01 01:58:58,371 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14285MB +2024-09-01 01:59:02,414 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.906e+02 2.080e+02 2.182e+02 2.331e+02 3.634e+02, threshold=4.365e+02, percent-clipped=0.0 +2024-09-01 01:59:58,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=104949.33333333333, ans=0.125 +2024-09-01 02:00:10,654 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:00:25,808 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:00:34,014 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.35 vs. limit=15.0 +2024-09-01 02:00:36,425 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 8, batch 50, loss[loss=0.2715, simple_loss=0.2348, pruned_loss=0.103, ctc_loss=0.1903, over 18964.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.2253, pruned_loss=0.09813, ctc_loss=0.1831, over 828441.23 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 02:01:40,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=105216.0, ans=0.0 +2024-09-01 02:02:02,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=105269.33333333333, ans=0.125 +2024-09-01 02:02:13,396 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 8, batch 100, loss[loss=0.2455, simple_loss=0.2194, pruned_loss=0.08673, ctc_loss=0.1662, over 19119.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.2226, pruned_loss=0.09605, ctc_loss=0.1797, over 1475727.62 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-09-01 02:02:17,358 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.888e+02 2.064e+02 2.191e+02 2.358e+02 3.385e+02, threshold=4.381e+02, percent-clipped=0.0 +2024-09-01 02:02:41,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105376.0, ans=0.1 +2024-09-01 02:02:41,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=105376.0, ans=0.2 +2024-09-01 02:02:57,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=105429.33333333333, ans=0.125 +2024-09-01 02:03:07,396 INFO [dysarthria_finetune.py:1435] (3/4) (428539904, 34072559616) +2024-09-01 02:03:07,397 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:03:07,474 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 02:03:20,916 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 9, batch 0, loss[loss=0.2566, simple_loss=0.2288, pruned_loss=0.09494, ctc_loss=0.1661, over 18777.00 frames. ], tot_loss[loss=0.2566, simple_loss=0.2288, pruned_loss=0.09494, ctc_loss=0.1661, over 18777.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:03:20,916 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:03:44,108 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 9, validation: loss=0.2267, simple_loss=0.2052, pruned_loss=0.08107, ctc_loss=0.1434, over 1073944.00 frames. +2024-09-01 02:03:44,109 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14285MB +2024-09-01 02:04:33,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=105584.0, ans=0.0 +2024-09-01 02:05:01,314 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=18.78 vs. limit=15.0 +2024-09-01 02:05:08,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=105637.33333333333, ans=0.1 +2024-09-01 02:05:14,824 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:05:31,428 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=26.00 vs. limit=22.5 +2024-09-01 02:05:38,307 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 9, batch 50, loss[loss=0.2413, simple_loss=0.2176, pruned_loss=0.08417, ctc_loss=0.1654, over 18965.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.2186, pruned_loss=0.09307, ctc_loss=0.1797, over 827503.70 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:05:52,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=105744.0, ans=0.0 +2024-09-01 02:06:11,283 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.54 vs. limit=15.0 +2024-09-01 02:06:24,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105850.66666666667, ans=0.1 +2024-09-01 02:06:31,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=105850.66666666667, ans=0.125 +2024-09-01 02:06:31,751 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.876e+02 2.077e+02 2.184e+02 2.316e+02 3.584e+02, threshold=4.367e+02, percent-clipped=0.0 +2024-09-01 02:06:46,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=105850.66666666667, ans=0.125 +2024-09-01 02:07:32,678 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 9, batch 100, loss[loss=0.2162, simple_loss=0.198, pruned_loss=0.0729, ctc_loss=0.1484, over 19159.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.2153, pruned_loss=0.09052, ctc_loss=0.175, over 1475225.92 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:07:35,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=106010.66666666667, ans=0.0 +2024-09-01 02:07:45,412 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.58 vs. limit=22.5 +2024-09-01 02:08:04,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=106064.0, ans=0.0 +2024-09-01 02:08:33,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=106117.33333333333, ans=0.05 +2024-09-01 02:08:34,530 INFO [dysarthria_finetune.py:1435] (3/4) (13296664576, 34072559616) +2024-09-01 02:08:34,611 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:08:34,655 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 02:08:48,148 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 10, batch 0, loss[loss=0.2323, simple_loss=0.2072, pruned_loss=0.08459, ctc_loss=0.1627, over 18587.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.2072, pruned_loss=0.08459, ctc_loss=0.1627, over 18587.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:08:48,149 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:09:26,929 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 10, validation: loss=0.2182, simple_loss=0.2007, pruned_loss=0.07671, ctc_loss=0.1399, over 1073944.00 frames. +2024-09-01 02:09:26,929 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14285MB +2024-09-01 02:09:40,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106165.33333333333, ans=0.1 +2024-09-01 02:10:57,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.66 vs. limit=6.0 +2024-09-01 02:11:54,661 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.856e+02 2.023e+02 2.117e+02 2.323e+02 3.505e+02, threshold=4.234e+02, percent-clipped=0.0 +2024-09-01 02:12:15,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=106432.0, ans=0.1 +2024-09-01 02:12:15,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106432.0, ans=0.1 +2024-09-01 02:12:16,363 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 10, batch 50, loss[loss=0.2345, simple_loss=0.2211, pruned_loss=0.07679, ctc_loss=0.1539, over 19101.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.2118, pruned_loss=0.08627, ctc_loss=0.1741, over 827631.91 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:12:32,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106432.0, ans=0.1 +2024-09-01 02:13:07,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=106485.33333333333, ans=0.09899494936611666 +2024-09-01 02:13:33,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=106538.66666666667, ans=0.2 +2024-09-01 02:13:33,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=12.73 vs. limit=15.0 +2024-09-01 02:14:04,316 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.41 vs. limit=22.5 +2024-09-01 02:14:14,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=106592.0, ans=0.0 +2024-09-01 02:14:19,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=106592.0, ans=0.125 +2024-09-01 02:15:12,133 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 10, batch 100, loss[loss=0.2145, simple_loss=0.1977, pruned_loss=0.07197, ctc_loss=0.1551, over 19051.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.2113, pruned_loss=0.08504, ctc_loss=0.1712, over 1475773.03 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 32.0 +2024-09-01 02:15:32,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=106698.66666666667, ans=0.5 +2024-09-01 02:15:56,554 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.82 vs. limit=15.0 +2024-09-01 02:16:29,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=106805.33333333333, ans=0.0 +2024-09-01 02:16:31,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=106805.33333333333, ans=0.2 +2024-09-01 02:16:32,150 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=9.69 vs. limit=12.0 +2024-09-01 02:16:36,645 INFO [dysarthria_finetune.py:1435] (3/4) (13290373120, 34072559616) +2024-09-01 02:16:36,645 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:16:36,696 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 02:16:49,155 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 11, batch 0, loss[loss=0.2535, simple_loss=0.2222, pruned_loss=0.09401, ctc_loss=0.1949, over 18604.00 frames. ], tot_loss[loss=0.2535, simple_loss=0.2222, pruned_loss=0.09401, ctc_loss=0.1949, over 18604.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:16:49,155 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:17:12,841 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 11, validation: loss=0.211, simple_loss=0.1968, pruned_loss=0.07375, ctc_loss=0.137, over 1073944.00 frames. +2024-09-01 02:17:12,841 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14285MB +2024-09-01 02:17:43,500 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.769e+02 1.989e+02 2.082e+02 2.188e+02 3.029e+02, threshold=4.165e+02, percent-clipped=0.0 +2024-09-01 02:17:44,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=106906.66666666667, ans=0.0 +2024-09-01 02:17:56,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=106960.0, ans=10.0 +2024-09-01 02:18:04,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=106960.0, ans=0.125 +2024-09-01 02:18:06,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=106960.0, ans=0.125 +2024-09-01 02:18:44,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=107066.66666666667, ans=0.0 +2024-09-01 02:18:53,615 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 11, batch 50, loss[loss=0.2405, simple_loss=0.2142, pruned_loss=0.08627, ctc_loss=0.1867, over 19110.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2014, pruned_loss=0.0772, ctc_loss=0.1612, over 828132.31 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:18:55,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=107120.0, ans=0.125 +2024-09-01 02:19:29,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=107173.33333333333, ans=0.025 +2024-09-01 02:20:13,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107280.0, ans=0.1 +2024-09-01 02:20:35,684 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 11, batch 100, loss[loss=0.2002, simple_loss=0.1919, pruned_loss=0.0629, ctc_loss=0.1462, over 19127.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2039, pruned_loss=0.07926, ctc_loss=0.1635, over 1475363.18 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:20:43,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=107386.66666666667, ans=0.04949747468305833 +2024-09-01 02:20:43,405 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=10.30 vs. limit=12.0 +2024-09-01 02:20:47,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=107386.66666666667, ans=0.0 +2024-09-01 02:21:05,346 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.795e+02 1.934e+02 2.032e+02 2.152e+02 3.346e+02, threshold=4.063e+02, percent-clipped=0.0 +2024-09-01 02:21:08,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=107440.0, ans=0.125 +2024-09-01 02:21:25,882 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.94 vs. limit=15.0 +2024-09-01 02:21:30,080 INFO [dysarthria_finetune.py:1435] (3/4) (688586752, 34072559616) +2024-09-01 02:21:30,081 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:21:30,160 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 02:21:45,324 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 12, batch 0, loss[loss=0.2243, simple_loss=0.2054, pruned_loss=0.08135, ctc_loss=0.158, over 18650.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2054, pruned_loss=0.08135, ctc_loss=0.158, over 18650.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:21:45,324 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:22:12,234 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 12, validation: loss=0.2042, simple_loss=0.1932, pruned_loss=0.07127, ctc_loss=0.1341, over 1073944.00 frames. +2024-09-01 02:22:12,234 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14285MB +2024-09-01 02:22:35,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=107541.33333333333, ans=0.0 +2024-09-01 02:22:58,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=107594.66666666667, ans=0.0 +2024-09-01 02:23:03,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=107594.66666666667, ans=0.0 +2024-09-01 02:23:24,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=107648.0, ans=0.125 +2024-09-01 02:24:02,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=107754.66666666667, ans=0.0 +2024-09-01 02:24:27,774 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 12, batch 50, loss[loss=0.2162, simple_loss=0.2053, pruned_loss=0.0726, ctc_loss=0.1549, over 19037.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2026, pruned_loss=0.07876, ctc_loss=0.1634, over 828666.57 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:24:53,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107861.33333333333, ans=0.1 +2024-09-01 02:25:24,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=107914.66666666667, ans=0.0 +2024-09-01 02:25:31,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=107914.66666666667, ans=0.0 +2024-09-01 02:25:37,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=107914.66666666667, ans=0.125 +2024-09-01 02:25:48,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107968.0, ans=0.1 +2024-09-01 02:25:55,568 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.770e+02 1.958e+02 2.051e+02 2.245e+02 3.047e+02, threshold=4.102e+02, percent-clipped=0.0 +2024-09-01 02:25:58,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=107968.0, ans=0.0 +2024-09-01 02:26:05,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=108021.33333333333, ans=0.0 +2024-09-01 02:26:30,879 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 12, batch 100, loss[loss=0.1882, simple_loss=0.1771, pruned_loss=0.06143, ctc_loss=0.1507, over 19142.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2012, pruned_loss=0.077, ctc_loss=0.1599, over 1477170.44 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 32.0 +2024-09-01 02:26:53,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=108128.0, ans=0.2 +2024-09-01 02:27:31,271 INFO [dysarthria_finetune.py:1435] (3/4) (14458486784, 34072559616) +2024-09-01 02:27:31,272 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:27:31,315 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 02:27:44,329 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 13, batch 0, loss[loss=0.2076, simple_loss=0.1909, pruned_loss=0.07786, ctc_loss=0.1427, over 18629.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.1909, pruned_loss=0.07786, ctc_loss=0.1427, over 18629.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:27:44,329 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:27:51,180 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([7.8226, 7.8059, 7.8470, 7.7607], device='cuda:3') +2024-09-01 02:28:07,310 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 13, validation: loss=0.1981, simple_loss=0.19, pruned_loss=0.06934, ctc_loss=0.1316, over 1073944.00 frames. +2024-09-01 02:28:07,310 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 20821MB +2024-09-01 02:28:25,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108229.33333333333, ans=0.0 +2024-09-01 02:28:33,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=108282.66666666667, ans=0.125 +2024-09-01 02:28:47,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=108336.0, ans=0.95 +2024-09-01 02:28:57,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=108336.0, ans=0.125 +2024-09-01 02:29:01,445 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:29:26,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=108442.66666666667, ans=0.2 +2024-09-01 02:29:47,492 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 13, batch 50, loss[loss=0.2123, simple_loss=0.1976, pruned_loss=0.07518, ctc_loss=0.16, over 19050.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.199, pruned_loss=0.077, ctc_loss=0.1615, over 828311.79 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:29:57,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=108496.0, ans=0.025 +2024-09-01 02:30:01,822 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.749e+02 1.921e+02 2.017e+02 2.151e+02 2.785e+02, threshold=4.034e+02, percent-clipped=0.0 +2024-09-01 02:30:13,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108549.33333333333, ans=0.1 +2024-09-01 02:30:15,249 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.08 vs. limit=15.0 +2024-09-01 02:30:28,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=108602.66666666667, ans=0.0 +2024-09-01 02:30:34,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=108602.66666666667, ans=0.125 +2024-09-01 02:30:44,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=108602.66666666667, ans=0.1 +2024-09-01 02:30:49,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108656.0, ans=0.1 +2024-09-01 02:30:49,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=108656.0, ans=0.125 +2024-09-01 02:31:13,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=108709.33333333333, ans=0.125 +2024-09-01 02:31:25,779 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 13, batch 100, loss[loss=0.2117, simple_loss=0.1989, pruned_loss=0.07476, ctc_loss=0.1586, over 19095.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.1982, pruned_loss=0.07565, ctc_loss=0.158, over 1474662.24 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:31:28,824 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:31:40,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=108762.66666666667, ans=0.025 +2024-09-01 02:31:59,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=108816.0, ans=0.2 +2024-09-01 02:32:12,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=108869.33333333333, ans=0.2 +2024-09-01 02:32:14,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=108869.33333333333, ans=0.025 +2024-09-01 02:32:19,307 INFO [dysarthria_finetune.py:1435] (3/4) (10194976768, 34072559616) +2024-09-01 02:32:27,537 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:32:27,576 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 02:33:00,722 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 14, batch 0, loss[loss=0.2422, simple_loss=0.2202, pruned_loss=0.09329, ctc_loss=0.1727, over 18650.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.2202, pruned_loss=0.09329, ctc_loss=0.1727, over 18650.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:33:00,722 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:33:40,869 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.1891, 2.1878, 2.8789, 3.6593], device='cuda:3') +2024-09-01 02:33:44,931 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 14, validation: loss=0.1924, simple_loss=0.1871, pruned_loss=0.06768, ctc_loss=0.1293, over 1073944.00 frames. +2024-09-01 02:33:44,932 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 20821MB +2024-09-01 02:34:19,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=108970.66666666667, ans=0.125 +2024-09-01 02:35:04,341 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:35:09,744 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.754e+02 1.893e+02 1.977e+02 2.192e+02 2.916e+02, threshold=3.954e+02, percent-clipped=0.0 +2024-09-01 02:35:46,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=109130.66666666667, ans=0.125 +2024-09-01 02:36:00,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=109130.66666666667, ans=0.0 +2024-09-01 02:36:12,048 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 14, batch 50, loss[loss=0.1756, simple_loss=0.1826, pruned_loss=0.05069, ctc_loss=0.1366, over 19012.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.197, pruned_loss=0.07558, ctc_loss=0.1602, over 829335.16 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:36:45,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=109184.0, ans=0.125 +2024-09-01 02:37:10,503 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.49 vs. limit=15.0 +2024-09-01 02:37:17,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109237.33333333333, ans=0.1 +2024-09-01 02:37:32,857 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:38:17,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=109344.0, ans=0.025 +2024-09-01 02:38:42,958 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 14, batch 100, loss[loss=0.1924, simple_loss=0.1896, pruned_loss=0.06381, ctc_loss=0.1479, over 19114.00 frames. ], tot_loss[loss=0.206, simple_loss=0.1942, pruned_loss=0.07354, ctc_loss=0.1556, over 1476363.01 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 32.0 +2024-09-01 02:38:55,320 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=16.21 vs. limit=15.0 +2024-09-01 02:39:00,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=109450.66666666667, ans=0.0 +2024-09-01 02:39:11,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=109504.0, ans=0.2 +2024-09-01 02:39:19,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=109504.0, ans=0.125 +2024-09-01 02:39:26,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=109504.0, ans=0.125 +2024-09-01 02:39:27,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=109504.0, ans=0.125 +2024-09-01 02:39:27,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.19 vs. limit=15.0 +2024-09-01 02:39:47,672 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=29.49 vs. limit=22.5 +2024-09-01 02:39:50,915 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.731e+02 1.903e+02 1.972e+02 2.079e+02 2.713e+02, threshold=3.943e+02, percent-clipped=0.0 +2024-09-01 02:39:50,959 INFO [dysarthria_finetune.py:1435] (3/4) (13302956032, 34072559616) +2024-09-01 02:39:50,959 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:39:51,008 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 02:40:03,799 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 15, batch 0, loss[loss=0.1968, simple_loss=0.1884, pruned_loss=0.07154, ctc_loss=0.1406, over 18716.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.1884, pruned_loss=0.07154, ctc_loss=0.1406, over 18716.00 frames. ], batch size: 65, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:40:03,799 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:40:34,885 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 15, validation: loss=0.1871, simple_loss=0.1844, pruned_loss=0.06629, ctc_loss=0.1271, over 1073944.00 frames. +2024-09-01 02:40:34,886 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 20821MB +2024-09-01 02:40:36,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=109605.33333333333, ans=0.2 +2024-09-01 02:40:40,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=109605.33333333333, ans=0.0 +2024-09-01 02:40:51,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=109605.33333333333, ans=0.125 +2024-09-01 02:41:54,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=109712.0, ans=0.0 +2024-09-01 02:42:06,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=109765.33333333333, ans=0.125 +2024-09-01 02:42:36,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=109818.66666666667, ans=0.125 +2024-09-01 02:43:06,252 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 15, batch 50, loss[loss=0.167, simple_loss=0.1685, pruned_loss=0.05464, ctc_loss=0.1276, over 19179.00 frames. ], tot_loss[loss=0.198, simple_loss=0.1898, pruned_loss=0.07017, ctc_loss=0.1513, over 827713.24 frames. ], batch size: 103, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:44:00,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=109925.33333333333, ans=0.125 +2024-09-01 02:44:24,137 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.92 vs. limit=15.0 +2024-09-01 02:44:42,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=110032.0, ans=0.125 +2024-09-01 02:44:56,247 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.83 vs. limit=6.0 +2024-09-01 02:45:26,974 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.707e+02 1.886e+02 2.042e+02 2.162e+02 2.644e+02, threshold=4.084e+02, percent-clipped=0.0 +2024-09-01 02:45:29,599 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 15, batch 100, loss[loss=0.182, simple_loss=0.1765, pruned_loss=0.06338, ctc_loss=0.1442, over 19073.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.1893, pruned_loss=0.07005, ctc_loss=0.1509, over 1475236.97 frames. ], batch size: 133, lr: 9.95e-05, grad_scale: 32.0 +2024-09-01 02:46:23,849 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:46:46,674 INFO [dysarthria_finetune.py:1435] (3/4) (13296664576, 34072559616) +2024-09-01 02:46:46,675 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:46:46,717 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 02:47:00,713 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 16, batch 0, loss[loss=0.2143, simple_loss=0.2025, pruned_loss=0.08034, ctc_loss=0.1586, over 18560.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2025, pruned_loss=0.08034, ctc_loss=0.1586, over 18560.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:47:00,713 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:47:23,703 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 16, validation: loss=0.182, simple_loss=0.1819, pruned_loss=0.06496, ctc_loss=0.1251, over 1073944.00 frames. +2024-09-01 02:47:23,704 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 20821MB +2024-09-01 02:47:35,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=110293.33333333333, ans=0.125 +2024-09-01 02:47:46,143 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=18.39 vs. limit=15.0 +2024-09-01 02:47:57,763 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.17 vs. limit=15.0 +2024-09-01 02:48:35,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=110453.33333333333, ans=0.125 +2024-09-01 02:49:04,116 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 16, batch 50, loss[loss=0.1771, simple_loss=0.1879, pruned_loss=0.05662, ctc_loss=0.1308, over 19044.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.1892, pruned_loss=0.07087, ctc_loss=0.1514, over 827661.95 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:49:21,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=110560.0, ans=0.2 +2024-09-01 02:49:33,702 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.68 vs. limit=15.0 +2024-09-01 02:49:43,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=110666.66666666667, ans=0.125 +2024-09-01 02:49:43,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=110666.66666666667, ans=0.0 +2024-09-01 02:49:44,017 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.735e+02 1.879e+02 1.996e+02 2.191e+02 2.692e+02, threshold=3.992e+02, percent-clipped=0.0 +2024-09-01 02:50:27,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=110773.33333333333, ans=0.04949747468305833 +2024-09-01 02:50:37,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=110773.33333333333, ans=0.125 +2024-09-01 02:50:42,114 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 16, batch 100, loss[loss=0.1919, simple_loss=0.1923, pruned_loss=0.06697, ctc_loss=0.1436, over 19120.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.1877, pruned_loss=0.06982, ctc_loss=0.1482, over 1474935.70 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:51:06,869 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.72 vs. limit=15.0 +2024-09-01 02:51:33,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=110933.33333333333, ans=0.025 +2024-09-01 02:51:35,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=110933.33333333333, ans=0.0 +2024-09-01 02:51:35,751 INFO [dysarthria_finetune.py:1435] (3/4) (235601920, 34072559616) +2024-09-01 02:51:35,751 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:51:35,835 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 02:51:50,606 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 17, batch 0, loss[loss=0.25, simple_loss=0.2231, pruned_loss=0.1017, ctc_loss=0.1841, over 18583.00 frames. ], tot_loss[loss=0.25, simple_loss=0.2231, pruned_loss=0.1017, ctc_loss=0.1841, over 18583.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:51:50,607 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:51:54,665 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.2776, 2.0626, 2.8034, 3.5089], device='cuda:3') +2024-09-01 02:52:13,661 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 17, validation: loss=0.1784, simple_loss=0.1796, pruned_loss=0.06394, ctc_loss=0.1232, over 1073944.00 frames. +2024-09-01 02:52:13,662 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 20821MB +2024-09-01 02:52:38,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=111034.66666666667, ans=0.025 +2024-09-01 02:52:52,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=111034.66666666667, ans=0.125 +2024-09-01 02:52:52,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=111034.66666666667, ans=0.5 +2024-09-01 02:53:21,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=111088.0, ans=0.125 +2024-09-01 02:53:27,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=111088.0, ans=0.0 +2024-09-01 02:53:46,312 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.48 vs. limit=15.0 +2024-09-01 02:53:59,378 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.683e+02 1.870e+02 1.982e+02 2.091e+02 2.808e+02, threshold=3.964e+02, percent-clipped=0.0 +2024-09-01 02:54:19,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=111194.66666666667, ans=0.2 +2024-09-01 02:54:35,620 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 17, batch 50, loss[loss=0.1912, simple_loss=0.1886, pruned_loss=0.06888, ctc_loss=0.1401, over 18982.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.1849, pruned_loss=0.06812, ctc_loss=0.1466, over 827806.80 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:55:15,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=111248.0, ans=0.07 +2024-09-01 02:55:25,168 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:55:46,464 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.59 vs. limit=15.0 +2024-09-01 02:55:52,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=111301.33333333333, ans=0.07 +2024-09-01 02:56:22,590 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=14.62 vs. limit=15.0 +2024-09-01 02:57:32,015 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.41 vs. limit=6.0 +2024-09-01 02:58:16,894 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 17, batch 100, loss[loss=0.1836, simple_loss=0.1811, pruned_loss=0.06227, ctc_loss=0.1539, over 19078.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.1835, pruned_loss=0.06674, ctc_loss=0.1438, over 1476033.73 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 32.0 +2024-09-01 02:59:13,963 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=7.27 vs. limit=12.0 +2024-09-01 02:59:18,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.26 vs. limit=6.0 +2024-09-01 03:00:07,763 INFO [dysarthria_finetune.py:1435] (3/4) (627769344, 34072559616) +2024-09-01 03:00:07,763 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 03:00:07,832 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 03:00:21,387 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 18, batch 0, loss[loss=0.2033, simple_loss=0.1934, pruned_loss=0.0746, ctc_loss=0.1601, over 18613.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.1934, pruned_loss=0.0746, ctc_loss=0.1601, over 18613.00 frames. ], batch size: 65, lr: 9.93e-05, grad_scale: 32.0 +2024-09-01 03:00:21,387 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 03:01:08,462 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 18, validation: loss=0.1758, simple_loss=0.1773, pruned_loss=0.06291, ctc_loss=0.1213, over 1073944.00 frames. +2024-09-01 03:01:08,463 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 20821MB +2024-09-01 03:01:20,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=111669.33333333333, ans=0.125 +2024-09-01 03:01:20,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=111669.33333333333, ans=0.125 +2024-09-01 03:02:04,459 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.676e+02 1.863e+02 1.965e+02 2.122e+02 2.833e+02, threshold=3.929e+02, percent-clipped=0.0 +2024-09-01 03:02:31,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=111776.0, ans=0.2 +2024-09-01 03:04:37,317 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.95 vs. limit=6.0 +2024-09-01 03:05:54,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=111936.0, ans=0.125 +2024-09-01 03:05:55,554 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 18, batch 50, loss[loss=0.1719, simple_loss=0.1772, pruned_loss=0.05631, ctc_loss=0.1349, over 19004.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.1786, pruned_loss=0.064, ctc_loss=0.1409, over 828768.32 frames. ], batch size: 102, lr: 9.93e-05, grad_scale: 16.0 +2024-09-01 03:06:04,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=111936.0, ans=0.2 +2024-09-01 03:07:05,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111936.0, ans=0.125 +2024-09-01 03:07:21,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=111989.33333333333, ans=0.0 +2024-09-01 03:07:44,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=111989.33333333333, ans=0.125 +2024-09-01 03:08:50,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=112042.66666666667, ans=0.125 +2024-09-01 03:09:23,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=112149.33333333333, ans=10.0 +2024-09-01 03:09:44,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=112149.33333333333, ans=10.0 +2024-09-01 03:09:56,959 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.76 vs. limit=15.0 +2024-09-01 03:10:04,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=112202.66666666667, ans=0.0 +2024-09-01 03:11:05,728 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 18, batch 100, loss[loss=0.157, simple_loss=0.1568, pruned_loss=0.05465, ctc_loss=0.1196, over 19084.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.1785, pruned_loss=0.06487, ctc_loss=0.1409, over 1476677.05 frames. ], batch size: 133, lr: 9.93e-05, grad_scale: 16.0 +2024-09-01 03:11:34,558 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.78 vs. limit=22.5 +2024-09-01 03:11:50,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.89 vs. limit=15.0 +2024-09-01 03:11:51,861 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 1.826e+02 1.931e+02 2.035e+02 3.279e+02, threshold=3.861e+02, percent-clipped=0.0 +2024-09-01 03:12:04,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=112256.0, ans=0.0 +2024-09-01 03:12:08,618 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.33 vs. limit=15.0 +2024-09-01 03:12:26,079 INFO [dysarthria_finetune.py:1435] (3/4) (164298752, 34072559616) +2024-09-01 03:12:26,080 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 03:12:26,160 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 03:12:38,680 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 19, batch 0, loss[loss=0.2327, simple_loss=0.2108, pruned_loss=0.09255, ctc_loss=0.1741, over 18562.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.2108, pruned_loss=0.09255, ctc_loss=0.1741, over 18562.00 frames. ], batch size: 65, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:12:38,680 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 03:13:02,315 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 19, validation: loss=0.1735, simple_loss=0.1751, pruned_loss=0.06201, ctc_loss=0.1194, over 1073944.00 frames. +2024-09-01 03:13:02,315 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 20821MB +2024-09-01 03:13:19,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=112352.0, ans=0.125 +2024-09-01 03:13:31,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=112405.33333333333, ans=0.0 +2024-09-01 03:14:18,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=112512.0, ans=0.1 +2024-09-01 03:14:22,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=112512.0, ans=0.2 +2024-09-01 03:14:33,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=112565.33333333333, ans=0.125 +2024-09-01 03:14:37,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=112565.33333333333, ans=0.025 +2024-09-01 03:14:37,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=112565.33333333333, ans=0.125 +2024-09-01 03:14:48,395 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.07 vs. limit=15.0 +2024-09-01 03:14:48,747 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 19, batch 50, loss[loss=0.1669, simple_loss=0.1656, pruned_loss=0.05631, ctc_loss=0.139, over 19015.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.1797, pruned_loss=0.06587, ctc_loss=0.1435, over 829365.51 frames. ], batch size: 102, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:15:08,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=112672.0, ans=0.125 +2024-09-01 03:15:08,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=112672.0, ans=0.125 +2024-09-01 03:15:12,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=112672.0, ans=0.0 +2024-09-01 03:15:14,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=112672.0, ans=0.0 +2024-09-01 03:15:24,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112672.0, ans=0.1 +2024-09-01 03:15:48,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=112778.66666666667, ans=0.125 +2024-09-01 03:16:00,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112778.66666666667, ans=0.1 +2024-09-01 03:16:00,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=112778.66666666667, ans=0.05 +2024-09-01 03:16:00,813 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.699e+02 1.859e+02 1.957e+02 2.051e+02 3.574e+02, threshold=3.914e+02, percent-clipped=0.0 +2024-09-01 03:16:15,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=112832.0, ans=0.1 +2024-09-01 03:16:28,718 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 19, batch 100, loss[loss=0.1609, simple_loss=0.1599, pruned_loss=0.05447, ctc_loss=0.1323, over 19083.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.1794, pruned_loss=0.06534, ctc_loss=0.1406, over 1476389.98 frames. ], batch size: 133, lr: 9.92e-05, grad_scale: 32.0 +2024-09-01 03:16:33,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=112885.33333333333, ans=0.0 +2024-09-01 03:16:35,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=112885.33333333333, ans=0.125 +2024-09-01 03:16:43,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=112885.33333333333, ans=0.0 +2024-09-01 03:17:04,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=112938.66666666667, ans=0.125 +2024-09-01 03:17:05,227 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.45 vs. limit=15.0 +2024-09-01 03:17:18,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=112992.0, ans=22.5 +2024-09-01 03:17:22,996 INFO [dysarthria_finetune.py:1435] (3/4) (13292470272, 34072559616) +2024-09-01 03:17:22,997 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 03:17:23,027 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 03:17:37,149 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 20, batch 0, loss[loss=0.2298, simple_loss=0.206, pruned_loss=0.09203, ctc_loss=0.1737, over 18436.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.206, pruned_loss=0.09203, ctc_loss=0.1737, over 18436.00 frames. ], batch size: 65, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:17:37,150 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 03:18:00,790 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 20, validation: loss=0.1713, simple_loss=0.1732, pruned_loss=0.06117, ctc_loss=0.1175, over 1073944.00 frames. +2024-09-01 03:18:00,790 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 20821MB +2024-09-01 03:18:10,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=113040.0, ans=0.125 +2024-09-01 03:18:18,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=113040.0, ans=0.025 +2024-09-01 03:18:49,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=113146.66666666667, ans=0.2 +2024-09-01 03:18:57,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=113146.66666666667, ans=0.5 +2024-09-01 03:19:07,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=113200.0, ans=0.0 +2024-09-01 03:19:09,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=113200.0, ans=0.035 +2024-09-01 03:19:39,005 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 20, batch 50, loss[loss=0.1828, simple_loss=0.1836, pruned_loss=0.06274, ctc_loss=0.1412, over 18942.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.1764, pruned_loss=0.06407, ctc_loss=0.1379, over 827999.75 frames. ], batch size: 102, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:19:52,487 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.624e+02 1.810e+02 1.894e+02 2.049e+02 3.111e+02, threshold=3.788e+02, percent-clipped=0.0 +2024-09-01 03:20:23,209 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=11.57 vs. limit=12.0 +2024-09-01 03:20:40,498 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.28 vs. limit=15.0 +2024-09-01 03:20:51,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113466.66666666667, ans=0.1 +2024-09-01 03:21:09,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=113520.0, ans=0.125 +2024-09-01 03:21:15,874 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 20, batch 100, loss[loss=0.1365, simple_loss=0.1436, pruned_loss=0.04304, ctc_loss=0.1084, over 19171.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.1753, pruned_loss=0.06338, ctc_loss=0.1362, over 1475487.52 frames. ], batch size: 133, lr: 9.91e-05, grad_scale: 32.0 +2024-09-01 03:21:24,861 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.33 vs. limit=15.0 +2024-09-01 03:21:32,224 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:21:45,465 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.94 vs. limit=15.0 +2024-09-01 03:21:47,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=113626.66666666667, ans=0.025 +2024-09-01 03:21:56,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=113680.0, ans=0.125 +2024-09-01 03:22:08,728 INFO [dysarthria_finetune.py:1435] (3/4) (14441709568, 34072559616) +2024-09-01 03:22:08,729 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 03:22:08,752 INFO [dysarthria_finetune.py:1440] (3/4) (30023548928, 34072559616) +2024-09-01 03:22:08,752 INFO [dysarthria_finetune.py:1442] (3/4) Done! diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1724607736.cdr2649.int.cedar.computecanada.ca.964583.0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1724607736.cdr2649.int.cedar.computecanada.ca.964583.0 new file mode 100644 index 0000000000000000000000000000000000000000..6643547f93411f6eed3ab08c017c2f2344079e07 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1724607736.cdr2649.int.cedar.computecanada.ca.964583.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cf1c9f21b4b7ac22e576a7faab605fb5a738a9d3485331668fd5d8b4559cd484 +size 88 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1724764802.cdr2652.int.cedar.computecanada.ca.875662.0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1724764802.cdr2652.int.cedar.computecanada.ca.875662.0 new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1724944374.cdr2538.int.cedar.computecanada.ca.643204.0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1724944374.cdr2538.int.cedar.computecanada.ca.643204.0 new file mode 100644 index 0000000000000000000000000000000000000000..8832d7d6f39ce5479a5030f7844cf8f6899c9d76 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1724944374.cdr2538.int.cedar.computecanada.ca.643204.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b463c51e3579b50cea73f34be8a33b133c5a77e5dc7f8c6dfd8279c9e5aeacd6 +size 88 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1724971791.cdr2563.int.cedar.computecanada.ca.643204.0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1724971791.cdr2563.int.cedar.computecanada.ca.643204.0 new file mode 100644 index 0000000000000000000000000000000000000000..40ca887e6780470c9c0c42deae5310b3509a066f --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1724971791.cdr2563.int.cedar.computecanada.ca.643204.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:06bd2f735f45c44148a25231373bcdfad98cd8b83c28460cc3249a499b011c5c +size 88 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725046816.cdr2547.int.cedar.computecanada.ca.70.0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725046816.cdr2547.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..a64a74b588330e0f3b6d353985bdea2b40a5f883 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725046816.cdr2547.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9fa645d3d4a34b3ddb50310e7b5debb054d71d1e718ad9247df940b5e7760d7b +size 88 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725077621.cdr2549.int.cedar.computecanada.ca.70.0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725077621.cdr2549.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..9071201c0b955122247b83a631d9fce800e6f35a --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725077621.cdr2549.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:03d1fc80071133e3229cbe1e41928f813babf238a3c4d22b212b27f183c063da +size 88 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725080114.cdr2608.int.cedar.computecanada.ca.2191949.0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725080114.cdr2608.int.cedar.computecanada.ca.2191949.0 new file mode 100644 index 0000000000000000000000000000000000000000..0ea19f87ece56aa537a6bb9e58592714eff54a16 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725080114.cdr2608.int.cedar.computecanada.ca.2191949.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c2804ae12525233bdf0a7ebde73f18d367304cfa25b3b6280c2627459389ad24 +size 88 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725088151.cdr2654.int.cedar.computecanada.ca.70.0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725088151.cdr2654.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..984e31baebf37309ac3cf3b6eeb0e7aa610a4a42 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725088151.cdr2654.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:282d36dd02b36719e394d5230a6476e3894e7d685f30d76a73d56ab290641cd5 +size 88 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725135618.cdr2552.int.cedar.computecanada.ca.70.0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725135618.cdr2552.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..fe0cb02e2a20a659995f42aa33cf970e8811acee --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725135618.cdr2552.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:68a06904a6431e75ace39f0cf2d6279ebd9b8bd74e1f7db3c925039c880cc713 +size 714 diff --git a/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725167599.cdr2549.int.cedar.computecanada.ca.70.0 b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725167599.cdr2549.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..0973f42138cf3e8030d6dc61cf43d98b1afa6cf3 --- /dev/null +++ b/zipformer/finetuned/ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1725167599.cdr2549.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:284a6506c4c4fc05501d607de67fbd8ea8382660345d8ee4e52f891c6d4b9ae6 +size 41711 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/best-train-loss.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/best-train-loss.pt new file mode 100644 index 0000000000000000000000000000000000000000..ffa4bb6c37319990ba2f91b71fea9edd3c9eb939 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/best-train-loss.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6939f0b14acb06ac04e9495d8309f13547d51492ee28dff423a3b34ebca4a63a +size 1058858830 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/best-valid-loss.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/best-valid-loss.pt new file mode 100644 index 0000000000000000000000000000000000000000..ffa4bb6c37319990ba2f91b71fea9edd3c9eb939 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/best-valid-loss.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6939f0b14acb06ac04e9495d8309f13547d51492ee28dff423a3b34ebca4a63a +size 1058858830 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-1.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-1.pt new file mode 100644 index 0000000000000000000000000000000000000000..f5216827d83b3d76045254a263cccb716553b75e --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-1.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c9ee575e714a6ad8f8b7e62b3ac0fd2d04874c7df51293a477d62f0b503a6c33 +size 1058855257 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-10.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-10.pt new file mode 100644 index 0000000000000000000000000000000000000000..76c5ba656b6d02345b6a5fd87e0ef1792ef8f397 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-10.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:762f579b13a9bb27e684d2e0d602a2d7e76e56016bc27101ac01e0142057c403 +size 1058858382 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-11.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-11.pt new file mode 100644 index 0000000000000000000000000000000000000000..ca1a9613fe19e0deabfe6bf1d8fb2f601f66c52e --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-11.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:78f654453c9f79819582d54e116438eaf86de93766f83edaaf3e36f85d6aa5e6 +size 1058858446 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-12.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-12.pt new file mode 100644 index 0000000000000000000000000000000000000000..e583e3a7355d153cdfb6944d7361d27d008e0630 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-12.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b0c53f1d3de28c9b3ddcabc6095ab398a65e0981fb3611be25df1d48089b74e2 +size 1058858446 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-13.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-13.pt new file mode 100644 index 0000000000000000000000000000000000000000..db226ff77456b79847ee66e8931642e0a2d531aa --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-13.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2bf8b5e3160d1a817e2377133621d50302d46775b8ec49e0860f8ca87f2d919a +size 1058858510 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-14.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-14.pt new file mode 100644 index 0000000000000000000000000000000000000000..6da548d113e0a3d883623af278d730db44297132 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-14.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9a230fe1082ce1f6bd23825822a2627e633245cbfb28b3ec5c4c58debfce0ab6 +size 1058858574 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-15.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-15.pt new file mode 100644 index 0000000000000000000000000000000000000000..e8a98c9de43bb498a26d4c21eec25b4bedc7a444 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-15.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:261f4b766a8f546beff0f27620484c8caa92c532a020af14d5a9ca56ab5a2854 +size 1058858638 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-16.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-16.pt new file mode 100644 index 0000000000000000000000000000000000000000..fd58ccfda7b717eb1a5239d25ad3eb4e3f375a78 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-16.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bab97df0e02ae023b9e4f330239a298fc35cfe6609709e8ea480bf727f98298c +size 1058858638 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-17.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-17.pt new file mode 100644 index 0000000000000000000000000000000000000000..5e81f0d6e38488820d464c4545cbc862a5aaf431 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-17.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:15cb14a99cd13a336ecae106653d535d2a18f83fd5d08d3f987bd8fccf50b4e8 +size 1058858702 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-18.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-18.pt new file mode 100644 index 0000000000000000000000000000000000000000..f02ef7f6c827991aa7eb554bc4cdda02d6070eff --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-18.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d3b8552178f9dfa56dd642b1f5c880207970e6386e9cd12b28403fb6079553f8 +size 1058858766 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-19.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-19.pt new file mode 100644 index 0000000000000000000000000000000000000000..8c09eb22addab9b9e904f35659d3a369d62d7828 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-19.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8b4e08058c6417de4cc2a258e46217113b15d488b15615826128d1115e0c1385 +size 1058858766 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-2.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-2.pt new file mode 100644 index 0000000000000000000000000000000000000000..f862aa33d9279f56dd9b3bf1edb0db5a1d27c6ca --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-2.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f1f7d608d7c7b9a6c7e80edcbb07c304368e95dbb4a10d9f1fd617528c9c7764 +size 1058855321 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-20.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-20.pt new file mode 100644 index 0000000000000000000000000000000000000000..ffa4bb6c37319990ba2f91b71fea9edd3c9eb939 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-20.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6939f0b14acb06ac04e9495d8309f13547d51492ee28dff423a3b34ebca4a63a +size 1058858830 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-3.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-3.pt new file mode 100644 index 0000000000000000000000000000000000000000..4d3dd60c233dedb343f1923c71f8443d0ac4ccaf --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-3.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:051b813ec720021c4f5a674d7858147d9323ae1f97addf97c5f667c81fec05ef +size 1058855449 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-4.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-4.pt new file mode 100644 index 0000000000000000000000000000000000000000..c8aed26a7df280c71791e387715258a67b03768c --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-4.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:41beb58939343f2562cbb949102c0ffe508a2ace8041c98041ec306afe4b961f +size 1058855513 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-5.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-5.pt new file mode 100644 index 0000000000000000000000000000000000000000..3f8e7610a733beae1883cf4e8c2fb404cc5da97b --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-5.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:482578baea01f8869eeef49a3c00e22d8a207e0ea8157122e078249c7d1553d4 +size 1058855513 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-6.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-6.pt new file mode 100644 index 0000000000000000000000000000000000000000..862dad5682b326a4a9039c2009434fd1dff59bfc --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-6.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0754ac745c5cc155f74af3190a6fef50daee30045eebdec241ed9d8be9787059 +size 1058855577 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-7.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-7.pt new file mode 100644 index 0000000000000000000000000000000000000000..70a6f70890bada8d552a54a71b50773afe14211e --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-7.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ebaebb8cb52deabcf8a0dc5cdff24482607ea58a9e9c2428a9df7f92bfc70f5e +size 1058855641 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-8.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-8.pt new file mode 100644 index 0000000000000000000000000000000000000000..f18a6a8b7a5cf3d406c2ca5ef5c81768f6c8cc24 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-8.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9b7285e4963e76f8337ca5b494877cbefb1d6100312a206fb19cab53f733bb4b +size 1058855705 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-9.pt b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-9.pt new file mode 100644 index 0000000000000000000000000000000000000000..78c24289c47a4a008efcd502bc3405f59f95a27c --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/epoch-9.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4d9d3bef1f15b8c04562b4f67218fc3d5173ccf83de845d3a3ff53d137537626 +size 1058855705 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-02-02-55-0 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-02-02-55-0 new file mode 100644 index 0000000000000000000000000000000000000000..8274b45c10f78c36f7715e7c8c132747021824c3 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-02-02-55-0 @@ -0,0 +1,38 @@ +2024-08-29 02:02:55,177 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-29 02:02:55,415 INFO [dysarthria_finetune.py:1214] (0/4) (33748090880, 34072559616) +2024-08-29 02:02:55,415 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-29 02:02:56,206 INFO [dysarthria_finetune.py:1219] (0/4) (33106362368, 34072559616) +2024-08-29 02:02:56,211 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-29 02:02:56,313 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2545.int.cedar.computecanada.ca', 'IP address': '172.16.145.238'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 02:02:56,313 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-29 02:03:22,041 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 66110931 +2024-08-29 02:03:22,599 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-29 02:03:49,472 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-29 02:05:03,022 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts +2024-08-29 02:05:03,303 INFO [dysarthria_finetune.py:1319] (0/4) CutSet(len=62255) [underlying data type: ] +2024-08-29 02:05:03,647 INFO [dysarthria_asr_datamodule.py:239] (0/4) Disable MUSAN +2024-08-29 02:05:03,647 INFO [dysarthria_asr_datamodule.py:257] (0/4) Enable SpecAugment +2024-08-29 02:05:03,647 INFO [dysarthria_asr_datamodule.py:258] (0/4) Time warp factor: 80 +2024-08-29 02:05:03,647 INFO [dysarthria_asr_datamodule.py:268] (0/4) Num frame mask: 10 +2024-08-29 02:05:03,648 INFO [dysarthria_asr_datamodule.py:281] (0/4) About to create train dataset +2024-08-29 02:05:05,175 INFO [dysarthria_asr_datamodule.py:308] (0/4) Using DynamicBucketingSampler. +2024-08-29 02:05:06,111 INFO [dysarthria_asr_datamodule.py:325] (0/4) About to create train dataloader +2024-08-29 02:05:06,117 INFO [dysarthria_asr_datamodule.py:500] (0/4) About to get dev cuts +2024-08-29 02:05:06,259 INFO [dysarthria_asr_datamodule.py:356] (0/4) About to create dev dataset +2024-08-29 02:05:06,633 INFO [dysarthria_asr_datamodule.py:373] (0/4) About to create dev dataloader +2024-08-29 02:05:06,633 INFO [dysarthria_finetune.py:1490] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 02:08:02,279 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=4.90 vs. limit=5.0 +2024-08-29 02:08:02,600 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=10.49 vs. limit=7.5 +2024-08-29 02:08:11,678 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-29 02:08:13,090 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.39 vs. limit=7.5 +2024-08-29 02:08:13,758 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-29 02:17:32,002 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-29 02:17:34,329 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-29 02:29:44,993 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.92 vs. limit=3.0 +2024-08-29 02:29:50,600 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-29 02:30:10,801 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-29 02:33:10,176 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 0, loss[loss=0.3242, simple_loss=0.3071, pruned_loss=0.1717, over 18513.00 frames. ], tot_loss[loss=0.3242, simple_loss=0.3071, pruned_loss=0.1717, over 18513.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-29 02:33:10,177 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-29 04:58:19,211 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 1, validation: loss=0.3678, simple_loss=0.3479, pruned_loss=0.1987, over 1073944.00 frames. +2024-08-29 04:58:19,549 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-08-29 05:17:50,818 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=19.68 vs. limit=15.0 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-02-02-55-1 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-02-02-55-1 new file mode 100644 index 0000000000000000000000000000000000000000..b76afbd5848e3e8865e4f551c2379f91560a8476 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-02-02-55-1 @@ -0,0 +1,39 @@ +2024-08-29 02:02:55,410 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-29 02:04:21,190 INFO [dysarthria_finetune.py:1214] (1/4) (32783400960, 34072559616) +2024-08-29 02:04:21,190 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-29 02:04:21,590 INFO [dysarthria_finetune.py:1219] (1/4) (32783400960, 34072559616) +2024-08-29 02:04:21,590 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-29 02:04:21,593 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2545.int.cedar.computecanada.ca', 'IP address': '172.16.145.238'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 02:04:21,593 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-29 02:04:22,288 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 66110931 +2024-08-29 02:04:22,926 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-29 02:04:24,057 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-29 02:05:03,019 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts +2024-08-29 02:05:03,303 INFO [dysarthria_finetune.py:1319] (1/4) CutSet(len=62255) [underlying data type: ] +2024-08-29 02:05:03,647 INFO [dysarthria_asr_datamodule.py:239] (1/4) Disable MUSAN +2024-08-29 02:05:03,647 INFO [dysarthria_asr_datamodule.py:257] (1/4) Enable SpecAugment +2024-08-29 02:05:03,647 INFO [dysarthria_asr_datamodule.py:258] (1/4) Time warp factor: 80 +2024-08-29 02:05:03,647 INFO [dysarthria_asr_datamodule.py:268] (1/4) Num frame mask: 10 +2024-08-29 02:05:03,648 INFO [dysarthria_asr_datamodule.py:281] (1/4) About to create train dataset +2024-08-29 02:05:05,175 INFO [dysarthria_asr_datamodule.py:308] (1/4) Using DynamicBucketingSampler. +2024-08-29 02:05:06,112 INFO [dysarthria_asr_datamodule.py:325] (1/4) About to create train dataloader +2024-08-29 02:05:06,117 INFO [dysarthria_asr_datamodule.py:500] (1/4) About to get dev cuts +2024-08-29 02:05:06,259 INFO [dysarthria_asr_datamodule.py:356] (1/4) About to create dev dataset +2024-08-29 02:05:06,633 INFO [dysarthria_asr_datamodule.py:373] (1/4) About to create dev dataloader +2024-08-29 02:05:06,633 INFO [dysarthria_finetune.py:1490] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 02:08:02,279 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=512, metric=5.15 vs. limit=5.0 +2024-08-29 02:08:02,597 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=192, metric=8.80 vs. limit=7.5 +2024-08-29 02:08:11,678 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12657MB +2024-08-29 02:08:12,984 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=384, metric=15.02 vs. limit=7.5 +2024-08-29 02:08:13,759 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12657MB +2024-08-29 02:17:31,998 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12657MB +2024-08-29 02:17:34,336 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12657MB +2024-08-29 02:29:13,735 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.92 vs. limit=3.0 +2024-08-29 02:29:50,598 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12657MB +2024-08-29 02:30:10,792 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12657MB +2024-08-29 02:33:10,200 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 0, loss[loss=0.385, simple_loss=0.3627, pruned_loss=0.2224, over 18549.00 frames. ], tot_loss[loss=0.385, simple_loss=0.3627, pruned_loss=0.2224, over 18549.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-29 02:33:10,200 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-29 04:58:19,216 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 1, validation: loss=0.3678, simple_loss=0.3479, pruned_loss=0.1987, over 1073944.00 frames. +2024-08-29 04:58:20,509 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13133MB +2024-08-29 05:08:46,374 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.75 vs. limit=15.0 +2024-08-29 05:18:40,692 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.24 vs. limit=15.0 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-02-02-55-2 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-02-02-55-2 new file mode 100644 index 0000000000000000000000000000000000000000..7d2e67fd3fb95cc7ec17bff8524508113017bb6d --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-02-02-55-2 @@ -0,0 +1,41 @@ +2024-08-29 02:02:55,411 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-29 02:02:55,415 INFO [dysarthria_finetune.py:1214] (2/4) (33748090880, 34072559616) +2024-08-29 02:02:55,415 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-29 02:02:56,223 INFO [dysarthria_finetune.py:1219] (2/4) (33106362368, 34072559616) +2024-08-29 02:02:56,223 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-29 02:02:56,313 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2545.int.cedar.computecanada.ca', 'IP address': '172.16.145.238'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 02:02:56,313 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-29 02:03:22,026 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 66110931 +2024-08-29 02:03:22,026 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-29 02:03:49,473 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-29 02:05:03,014 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts +2024-08-29 02:05:03,302 INFO [dysarthria_finetune.py:1319] (2/4) CutSet(len=62255) [underlying data type: ] +2024-08-29 02:05:03,647 INFO [dysarthria_asr_datamodule.py:239] (2/4) Disable MUSAN +2024-08-29 02:05:03,647 INFO [dysarthria_asr_datamodule.py:257] (2/4) Enable SpecAugment +2024-08-29 02:05:03,648 INFO [dysarthria_asr_datamodule.py:258] (2/4) Time warp factor: 80 +2024-08-29 02:05:03,648 INFO [dysarthria_asr_datamodule.py:268] (2/4) Num frame mask: 10 +2024-08-29 02:05:03,648 INFO [dysarthria_asr_datamodule.py:281] (2/4) About to create train dataset +2024-08-29 02:05:05,175 INFO [dysarthria_asr_datamodule.py:308] (2/4) Using DynamicBucketingSampler. +2024-08-29 02:05:06,111 INFO [dysarthria_asr_datamodule.py:325] (2/4) About to create train dataloader +2024-08-29 02:05:06,938 INFO [dysarthria_asr_datamodule.py:500] (2/4) About to get dev cuts +2024-08-29 02:05:06,939 INFO [dysarthria_asr_datamodule.py:356] (2/4) About to create dev dataset +2024-08-29 02:05:07,259 INFO [dysarthria_asr_datamodule.py:373] (2/4) About to create dev dataloader +2024-08-29 02:05:07,259 INFO [dysarthria_finetune.py:1490] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 02:08:02,281 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=512, metric=4.96 vs. limit=5.0 +2024-08-29 02:08:02,601 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=9.49 vs. limit=7.5 +2024-08-29 02:08:11,680 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-29 02:08:13,047 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.53 vs. limit=7.5 +2024-08-29 02:08:13,755 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-29 02:17:31,999 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-29 02:17:34,338 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-29 02:28:46,234 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.99 vs. limit=3.0 +2024-08-29 02:29:50,598 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-29 02:30:10,797 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-29 02:33:10,176 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 0, loss[loss=0.3828, simple_loss=0.3613, pruned_loss=0.2142, over 18533.00 frames. ], tot_loss[loss=0.3828, simple_loss=0.3613, pruned_loss=0.2142, over 18533.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-29 02:33:10,177 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-29 04:58:19,206 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 1, validation: loss=0.3678, simple_loss=0.3479, pruned_loss=0.1987, over 1073944.00 frames. +2024-08-29 04:58:19,738 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19764MB +2024-08-29 05:37:40,941 INFO [checkpoint.py:75] (2/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/bad-model-2.pt +2024-08-29 05:37:42,975 INFO [dysarthria_finetune.py:1468] (2/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/batch-bdd640fb-0667-1ad1-1c80-317fa3b1799d.pt +2024-08-29 05:38:18,044 INFO [dysarthria_finetune.py:1474] (2/4) features shape: torch.Size([31, 2509, 80]) +2024-08-29 05:38:18,046 INFO [dysarthria_finetune.py:1478] (2/4) num tokens: 2568 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-02-02-55-3 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-02-02-55-3 new file mode 100644 index 0000000000000000000000000000000000000000..b0818a7b6869040279f8ddb30f8d171d6c75151d --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-02-02-55-3 @@ -0,0 +1,37 @@ +2024-08-29 02:02:55,422 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-29 02:02:55,461 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-29 02:02:55,461 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-29 02:02:56,207 INFO [dysarthria_finetune.py:1219] (3/4) (33106362368, 34072559616) +2024-08-29 02:02:56,208 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-29 02:02:56,313 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2545.int.cedar.computecanada.ca', 'IP address': '172.16.145.238'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 02:02:56,314 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-29 02:03:22,059 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 66110931 +2024-08-29 02:03:22,060 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-29 02:03:49,471 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-29 02:05:03,013 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts +2024-08-29 02:05:03,302 INFO [dysarthria_finetune.py:1319] (3/4) CutSet(len=62255) [underlying data type: ] +2024-08-29 02:05:03,647 INFO [dysarthria_asr_datamodule.py:239] (3/4) Disable MUSAN +2024-08-29 02:05:03,647 INFO [dysarthria_asr_datamodule.py:257] (3/4) Enable SpecAugment +2024-08-29 02:05:03,647 INFO [dysarthria_asr_datamodule.py:258] (3/4) Time warp factor: 80 +2024-08-29 02:05:03,647 INFO [dysarthria_asr_datamodule.py:268] (3/4) Num frame mask: 10 +2024-08-29 02:05:03,647 INFO [dysarthria_asr_datamodule.py:281] (3/4) About to create train dataset +2024-08-29 02:05:05,175 INFO [dysarthria_asr_datamodule.py:308] (3/4) Using DynamicBucketingSampler. +2024-08-29 02:05:06,117 INFO [dysarthria_asr_datamodule.py:325] (3/4) About to create train dataloader +2024-08-29 02:05:06,962 INFO [dysarthria_asr_datamodule.py:500] (3/4) About to get dev cuts +2024-08-29 02:05:06,963 INFO [dysarthria_asr_datamodule.py:356] (3/4) About to create dev dataset +2024-08-29 02:05:07,279 INFO [dysarthria_asr_datamodule.py:373] (3/4) About to create dev dataloader +2024-08-29 02:05:07,279 INFO [dysarthria_finetune.py:1490] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 02:08:02,289 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=5.07 vs. limit=5.0 +2024-08-29 02:08:02,609 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=192, metric=9.46 vs. limit=7.5 +2024-08-29 02:08:11,685 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12651MB +2024-08-29 02:08:13,183 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.37 vs. limit=7.5 +2024-08-29 02:08:13,756 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12651MB +2024-08-29 02:17:32,005 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12651MB +2024-08-29 02:17:34,335 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12651MB +2024-08-29 02:27:28,508 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.87 vs. limit=3.0 +2024-08-29 02:29:50,603 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12651MB +2024-08-29 02:30:10,797 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12651MB +2024-08-29 02:33:10,177 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 0, loss[loss=0.3239, simple_loss=0.3072, pruned_loss=0.1668, over 18634.00 frames. ], tot_loss[loss=0.3239, simple_loss=0.3072, pruned_loss=0.1668, over 18634.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-29 02:33:10,178 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-29 04:58:19,215 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 1, validation: loss=0.3678, simple_loss=0.3479, pruned_loss=0.1987, over 1073944.00 frames. +2024-08-29 04:58:19,723 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-10-53-05-0 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-10-53-05-0 new file mode 100644 index 0000000000000000000000000000000000000000..bfcd2b8d7b749c782a73a16015694bf08be4f188 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-10-53-05-0 @@ -0,0 +1,84 @@ +2024-08-29 10:53:05,329 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-29 10:54:08,831 INFO [dysarthria_finetune.py:1214] (0/4) (32783400960, 34072559616) +2024-08-29 10:54:08,832 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-29 10:54:09,214 INFO [dysarthria_finetune.py:1219] (0/4) (32783400960, 34072559616) +2024-08-29 10:54:09,220 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-29 10:54:09,223 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2500.int.cedar.computecanada.ca', 'IP address': '172.16.145.194'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 10:54:09,224 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-29 10:54:09,916 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 66110931 +2024-08-29 10:54:10,456 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-29 10:56:59,533 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-29 10:57:05,797 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts +2024-08-29 10:57:06,462 INFO [dysarthria_finetune.py:1319] (0/4) CutSet(len=62255) [underlying data type: ] +2024-08-29 10:57:07,140 INFO [dysarthria_asr_datamodule.py:239] (0/4) Disable MUSAN +2024-08-29 10:57:07,141 INFO [dysarthria_asr_datamodule.py:257] (0/4) Enable SpecAugment +2024-08-29 10:57:07,141 INFO [dysarthria_asr_datamodule.py:258] (0/4) Time warp factor: 80 +2024-08-29 10:57:07,141 INFO [dysarthria_asr_datamodule.py:268] (0/4) Num frame mask: 10 +2024-08-29 10:57:07,141 INFO [dysarthria_asr_datamodule.py:281] (0/4) About to create train dataset +2024-08-29 10:57:10,482 INFO [dysarthria_asr_datamodule.py:308] (0/4) Using DynamicBucketingSampler. +2024-08-29 10:57:11,440 INFO [dysarthria_asr_datamodule.py:325] (0/4) About to create train dataloader +2024-08-29 10:57:11,441 INFO [dysarthria_asr_datamodule.py:500] (0/4) About to get dev cuts +2024-08-29 10:57:11,672 INFO [dysarthria_asr_datamodule.py:356] (0/4) About to create dev dataset +2024-08-29 10:57:12,059 INFO [dysarthria_asr_datamodule.py:373] (0/4) About to create dev dataloader +2024-08-29 10:57:12,060 INFO [dysarthria_finetune.py:1490] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 11:02:23,370 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=4.90 vs. limit=5.0 +2024-08-29 11:02:23,804 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=10.49 vs. limit=7.5 +2024-08-29 11:02:25,089 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-29 11:02:26,511 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.39 vs. limit=7.5 +2024-08-29 11:02:34,143 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-29 11:10:11,722 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-29 11:10:14,039 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-29 11:32:20,120 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.92 vs. limit=3.0 +2024-08-29 11:32:32,333 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-29 11:32:34,529 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-29 11:37:09,584 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 0, loss[loss=0.3242, simple_loss=0.3071, pruned_loss=0.1717, over 18513.00 frames. ], tot_loss[loss=0.3242, simple_loss=0.3071, pruned_loss=0.1717, over 18513.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-29 11:37:09,585 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-29 12:10:26,833 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 1, validation: loss=0.3678, simple_loss=0.3479, pruned_loss=0.1987, over 1073944.00 frames. +2024-08-29 12:11:03,378 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-08-29 12:17:21,558 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=19.68 vs. limit=15.0 +2024-08-29 12:23:07,996 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 7.525e+02 8.970e+02 9.815e+02 1.002e+03 1.048e+03, threshold=3.926e+03, percent-clipped=0.0 +2024-08-29 12:29:12,532 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.22 vs. limit=15.0 +2024-08-29 12:36:03,988 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 7.160e+02 8.687e+02 9.467e+02 1.002e+03 1.055e+03, threshold=3.787e+03, percent-clipped=0.0 +2024-08-29 12:40:09,429 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=31.60 vs. limit=22.5 +2024-08-29 12:50:17,089 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.00 vs. limit=6.0 +2024-08-29 12:57:57,260 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.191e+02 7.816e+02 8.684e+02 9.467e+02 1.055e+03, threshold=3.474e+03, percent-clipped=0.0 +2024-08-29 13:14:58,208 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 50, loss[loss=0.4119, simple_loss=0.3886, pruned_loss=0.2299, over 18890.00 frames. ], tot_loss[loss=0.3929, simple_loss=0.3707, pruned_loss=0.2207, over 828692.51 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-29 13:32:59,561 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=30.13 vs. limit=15.0 +2024-08-29 13:52:25,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100373.33333333333, ans=0.1 +2024-08-29 13:59:24,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=100426.66666666667, ans=0.0 +2024-08-29 14:09:35,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=100480.0, ans=0.2 +2024-08-29 14:16:08,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=100480.0, ans=0.125 +2024-08-29 14:20:31,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100480.0, ans=0.1 +2024-08-29 14:22:13,043 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=25.79 vs. limit=15.0 +2024-08-29 14:22:13,145 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=24.57 vs. limit=15.0 +2024-08-29 14:23:38,344 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.869e+02 6.935e+02 7.716e+02 8.607e+02 1.055e+03, threshold=1.543e+03, percent-clipped=0.0 +2024-08-29 14:23:38,382 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 100, loss[loss=0.3861, simple_loss=0.3668, pruned_loss=0.1961, over 19293.00 frames. ], tot_loss[loss=0.3756, simple_loss=0.3548, pruned_loss=0.2066, over 1474004.25 frames. ], batch size: 144, lr: 6.01e-05, grad_scale: 4.0 +2024-08-29 14:42:49,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=100586.66666666667, ans=0.2 +2024-08-29 14:46:18,610 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=18.55 vs. limit=15.0 +2024-08-29 14:48:45,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100640.0, ans=0.1 +2024-08-29 14:52:44,408 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-1.pt +2024-08-29 14:52:50,249 INFO [dysarthria_finetune.py:1435] (0/4) (1470824448, 34072559616) +2024-08-29 14:52:50,249 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-29 14:52:50,276 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-08-29 14:53:05,588 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 2, batch 0, loss[loss=0.3086, simple_loss=0.2942, pruned_loss=0.1514, over 18874.00 frames. ], tot_loss[loss=0.3086, simple_loss=0.2942, pruned_loss=0.1514, over 18874.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-08-29 14:53:05,588 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-29 15:16:53,502 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 2, validation: loss=0.3287, simple_loss=0.3125, pruned_loss=0.1663, over 1073944.00 frames. +2024-08-29 15:16:53,949 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-08-29 15:22:37,357 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.28 vs. limit=15.0 +2024-08-29 15:33:50,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=100736.0, ans=0.2 +2024-08-29 15:38:20,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-08-29 15:41:15,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-08-29 15:41:15,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=100789.33333333333, ans=0.2 +2024-08-29 15:42:02,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=100789.33333333333, ans=0.025 +2024-08-29 15:42:02,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-08-29 15:45:25,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100789.33333333333, ans=0.1 +2024-08-29 15:50:26,334 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=7.80 vs. limit=12.0 +2024-08-29 16:09:52,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=100896.0, ans=0.0 +2024-08-29 16:33:57,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=100949.33333333333, ans=0.0 +2024-08-29 16:34:48,761 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 2, batch 50, loss[loss=0.3584, simple_loss=0.3373, pruned_loss=0.2001, over 18964.00 frames. ], tot_loss[loss=0.3554, simple_loss=0.3363, pruned_loss=0.1893, over 826819.73 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 4.0 +2024-08-29 16:59:00,169 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 18:55:58,098 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.24 vs. limit=15.0 +2024-08-29 19:26:12,742 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/bad-model-0.pt +2024-08-29 19:26:20,383 INFO [dysarthria_finetune.py:1468] (0/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/batch-c33f4584-b23b-c1d8-493c-d01609de8895.pt +2024-08-29 19:26:58,809 INFO [dysarthria_finetune.py:1474] (0/4) features shape: torch.Size([122, 652, 80]) +2024-08-29 19:26:58,811 INFO [dysarthria_finetune.py:1478] (0/4) num tokens: 2359 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-10-53-05-1 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-10-53-05-1 new file mode 100644 index 0000000000000000000000000000000000000000..0d623beb0d5556777892ca03dc9d760ea660ca52 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-10-53-05-1 @@ -0,0 +1,71 @@ +2024-08-29 10:53:05,605 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-29 10:53:27,400 INFO [dysarthria_finetune.py:1214] (1/4) (33106362368, 34072559616) +2024-08-29 10:53:27,400 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-29 10:53:27,779 INFO [dysarthria_finetune.py:1219] (1/4) (33106362368, 34072559616) +2024-08-29 10:53:27,779 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-29 10:53:27,782 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2500.int.cedar.computecanada.ca', 'IP address': '172.16.145.194'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 10:53:27,782 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-29 10:53:28,463 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 66110931 +2024-08-29 10:53:28,463 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-29 10:56:59,563 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-29 10:57:05,790 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts +2024-08-29 10:58:07,498 INFO [dysarthria_finetune.py:1319] (1/4) CutSet(len=62255) [underlying data type: ] +2024-08-29 10:58:07,657 INFO [dysarthria_asr_datamodule.py:239] (1/4) Disable MUSAN +2024-08-29 10:58:07,657 INFO [dysarthria_asr_datamodule.py:257] (1/4) Enable SpecAugment +2024-08-29 10:58:07,657 INFO [dysarthria_asr_datamodule.py:258] (1/4) Time warp factor: 80 +2024-08-29 10:58:07,657 INFO [dysarthria_asr_datamodule.py:268] (1/4) Num frame mask: 10 +2024-08-29 10:58:07,658 INFO [dysarthria_asr_datamodule.py:281] (1/4) About to create train dataset +2024-08-29 10:58:07,739 INFO [dysarthria_asr_datamodule.py:308] (1/4) Using DynamicBucketingSampler. +2024-08-29 10:58:08,682 INFO [dysarthria_asr_datamodule.py:325] (1/4) About to create train dataloader +2024-08-29 10:58:08,683 INFO [dysarthria_asr_datamodule.py:500] (1/4) About to get dev cuts +2024-08-29 10:58:08,684 INFO [dysarthria_asr_datamodule.py:356] (1/4) About to create dev dataset +2024-08-29 10:58:09,004 INFO [dysarthria_asr_datamodule.py:373] (1/4) About to create dev dataloader +2024-08-29 10:58:09,005 INFO [dysarthria_finetune.py:1490] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 11:02:23,374 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=512, metric=5.15 vs. limit=5.0 +2024-08-29 11:02:23,804 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=192, metric=8.80 vs. limit=7.5 +2024-08-29 11:02:25,093 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12658MB +2024-08-29 11:02:26,396 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=384, metric=15.02 vs. limit=7.5 +2024-08-29 11:02:34,145 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12658MB +2024-08-29 11:10:11,728 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12658MB +2024-08-29 11:10:14,045 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12658MB +2024-08-29 11:32:23,892 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.92 vs. limit=3.0 +2024-08-29 11:32:32,344 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12658MB +2024-08-29 11:32:34,534 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12658MB +2024-08-29 11:37:09,587 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 0, loss[loss=0.385, simple_loss=0.3627, pruned_loss=0.2224, over 18549.00 frames. ], tot_loss[loss=0.385, simple_loss=0.3627, pruned_loss=0.2224, over 18549.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-29 11:37:09,588 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-29 12:10:26,831 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 1, validation: loss=0.3678, simple_loss=0.3479, pruned_loss=0.1987, over 1073944.00 frames. +2024-08-29 12:10:51,379 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13133MB +2024-08-29 12:12:38,617 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.75 vs. limit=15.0 +2024-08-29 12:17:22,845 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.24 vs. limit=15.0 +2024-08-29 12:23:07,995 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.525e+02 8.970e+02 9.815e+02 1.002e+03 1.048e+03, threshold=3.926e+03, percent-clipped=0.0 +2024-08-29 12:32:06,836 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=8.34 vs. limit=12.0 +2024-08-29 12:36:03,996 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.160e+02 8.687e+02 9.467e+02 1.002e+03 1.055e+03, threshold=3.787e+03, percent-clipped=0.0 +2024-08-29 12:47:28,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=100160.0, ans=0.0 +2024-08-29 12:48:51,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=100160.0, ans=0.125 +2024-08-29 12:57:56,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=100213.33333333333, ans=0.0 +2024-08-29 12:57:57,263 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.191e+02 7.816e+02 8.684e+02 9.467e+02 1.055e+03, threshold=3.474e+03, percent-clipped=0.0 +2024-08-29 12:58:19,498 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=25.05 vs. limit=15.0 +2024-08-29 13:14:56,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=100266.66666666667, ans=0.125 +2024-08-29 13:14:58,208 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 50, loss[loss=0.4065, simple_loss=0.3849, pruned_loss=0.2161, over 19042.00 frames. ], tot_loss[loss=0.3907, simple_loss=0.3686, pruned_loss=0.2194, over 827432.33 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-29 13:25:27,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=100266.66666666667, ans=0.025 +2024-08-29 13:52:22,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100373.33333333333, ans=0.125 +2024-08-29 13:55:56,076 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.24 vs. limit=15.0 +2024-08-29 14:23:38,351 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.869e+02 6.935e+02 7.716e+02 8.607e+02 1.055e+03, threshold=1.543e+03, percent-clipped=0.0 +2024-08-29 14:23:38,389 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 100, loss[loss=0.3902, simple_loss=0.3697, pruned_loss=0.2049, over 19093.00 frames. ], tot_loss[loss=0.3768, simple_loss=0.3559, pruned_loss=0.2076, over 1470684.91 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-08-29 14:41:42,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=100586.66666666667, ans=0.2 +2024-08-29 14:44:05,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100586.66666666667, ans=0.125 +2024-08-29 14:52:44,380 INFO [dysarthria_finetune.py:1435] (1/4) (4260036608, 34072559616) +2024-08-29 14:52:44,381 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-29 14:52:44,438 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-08-29 14:53:05,596 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 2, batch 0, loss[loss=0.3255, simple_loss=0.3086, pruned_loss=0.1697, over 18746.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3086, pruned_loss=0.1697, over 18746.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-08-29 14:53:05,597 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-29 15:16:53,492 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 2, validation: loss=0.3287, simple_loss=0.3125, pruned_loss=0.1663, over 1073944.00 frames. +2024-08-29 15:17:43,739 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13360MB +2024-08-29 15:45:44,403 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=18.53 vs. limit=15.0 +2024-08-29 16:08:14,796 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.42 vs. limit=22.5 +2024-08-29 16:08:39,668 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 16:08:51,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=100896.0, ans=0.2 +2024-08-29 16:34:48,768 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 2, batch 50, loss[loss=0.4057, simple_loss=0.382, pruned_loss=0.2256, over 19071.00 frames. ], tot_loss[loss=0.3517, simple_loss=0.3329, pruned_loss=0.1866, over 827854.65 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 4.0 +2024-08-29 16:49:18,460 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.92 vs. limit=15.0 +2024-08-29 17:55:00,839 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.85 vs. limit=15.0 +2024-08-29 18:41:08,262 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=9.11 vs. limit=12.0 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-10-53-05-2 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-10-53-05-2 new file mode 100644 index 0000000000000000000000000000000000000000..6c7b58d89b6187df04e9d7410db4b72a6d82af24 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-10-53-05-2 @@ -0,0 +1,75 @@ +2024-08-29 10:53:05,604 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-29 10:53:05,605 INFO [dysarthria_finetune.py:1214] (2/4) (33748090880, 34072559616) +2024-08-29 10:53:05,605 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-29 10:53:06,155 INFO [dysarthria_finetune.py:1219] (2/4) (33427226624, 34072559616) +2024-08-29 10:53:06,155 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-29 10:53:07,982 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2500.int.cedar.computecanada.ca', 'IP address': '172.16.145.194'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 10:53:07,983 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-29 10:53:10,064 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 66110931 +2024-08-29 10:54:16,144 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-29 10:56:59,533 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-29 10:57:05,799 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts +2024-08-29 10:57:06,461 INFO [dysarthria_finetune.py:1319] (2/4) CutSet(len=62255) [underlying data type: ] +2024-08-29 10:57:07,141 INFO [dysarthria_asr_datamodule.py:239] (2/4) Disable MUSAN +2024-08-29 10:57:07,141 INFO [dysarthria_asr_datamodule.py:257] (2/4) Enable SpecAugment +2024-08-29 10:57:07,141 INFO [dysarthria_asr_datamodule.py:258] (2/4) Time warp factor: 80 +2024-08-29 10:57:07,141 INFO [dysarthria_asr_datamodule.py:268] (2/4) Num frame mask: 10 +2024-08-29 10:57:07,141 INFO [dysarthria_asr_datamodule.py:281] (2/4) About to create train dataset +2024-08-29 10:57:10,481 INFO [dysarthria_asr_datamodule.py:308] (2/4) Using DynamicBucketingSampler. +2024-08-29 10:57:11,431 INFO [dysarthria_asr_datamodule.py:325] (2/4) About to create train dataloader +2024-08-29 10:57:11,432 INFO [dysarthria_asr_datamodule.py:500] (2/4) About to get dev cuts +2024-08-29 10:57:11,672 INFO [dysarthria_asr_datamodule.py:356] (2/4) About to create dev dataset +2024-08-29 10:57:12,058 INFO [dysarthria_asr_datamodule.py:373] (2/4) About to create dev dataloader +2024-08-29 10:57:12,059 INFO [dysarthria_finetune.py:1490] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 11:02:23,366 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=512, metric=4.96 vs. limit=5.0 +2024-08-29 11:02:23,805 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=9.49 vs. limit=7.5 +2024-08-29 11:02:25,089 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-29 11:02:26,427 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.53 vs. limit=7.5 +2024-08-29 11:02:34,140 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-29 11:10:11,722 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-29 11:10:14,044 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-29 11:30:33,237 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.99 vs. limit=3.0 +2024-08-29 11:32:32,344 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-29 11:32:34,527 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-29 11:37:09,582 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 0, loss[loss=0.3828, simple_loss=0.3613, pruned_loss=0.2142, over 18533.00 frames. ], tot_loss[loss=0.3828, simple_loss=0.3613, pruned_loss=0.2142, over 18533.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-29 11:37:09,583 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-29 12:10:26,825 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 1, validation: loss=0.3678, simple_loss=0.3479, pruned_loss=0.1987, over 1073944.00 frames. +2024-08-29 12:10:26,826 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19764MB +2024-08-29 12:20:52,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=100000.0, ans=0.125 +2024-08-29 12:21:51,526 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=38.73 vs. limit=15.0 +2024-08-29 12:22:57,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=100053.33333333333, ans=0.0 +2024-08-29 12:23:07,996 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.525e+02 8.970e+02 9.815e+02 1.002e+03 1.048e+03, threshold=3.926e+03, percent-clipped=0.0 +2024-08-29 12:36:03,002 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=18.96 vs. limit=15.0 +2024-08-29 12:36:03,990 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.160e+02 8.687e+02 9.467e+02 1.002e+03 1.055e+03, threshold=3.787e+03, percent-clipped=0.0 +2024-08-29 12:49:19,235 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.29 vs. limit=15.0 +2024-08-29 12:50:18,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=22.00 vs. limit=15.0 +2024-08-29 12:57:57,260 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.191e+02 7.816e+02 8.684e+02 9.467e+02 1.055e+03, threshold=3.474e+03, percent-clipped=0.0 +2024-08-29 13:00:17,521 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.01 vs. limit=15.0 +2024-08-29 13:13:18,769 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.24 vs. limit=22.5 +2024-08-29 13:14:58,206 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 50, loss[loss=0.4013, simple_loss=0.3798, pruned_loss=0.2136, over 19018.00 frames. ], tot_loss[loss=0.3889, simple_loss=0.367, pruned_loss=0.2174, over 827419.58 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-29 13:15:23,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=100266.66666666667, ans=0.04949747468305833 +2024-08-29 13:17:25,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=100266.66666666667, ans=0.125 +2024-08-29 14:14:24,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=100480.0, ans=0.125 +2024-08-29 14:23:38,348 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.869e+02 6.935e+02 7.716e+02 8.607e+02 1.055e+03, threshold=1.543e+03, percent-clipped=0.0 +2024-08-29 14:23:38,387 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 100, loss[loss=0.3574, simple_loss=0.3383, pruned_loss=0.1901, over 19117.00 frames. ], tot_loss[loss=0.3756, simple_loss=0.3548, pruned_loss=0.2063, over 1475925.13 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-08-29 14:42:22,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=100586.66666666667, ans=0.1 +2024-08-29 14:51:39,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=100640.0, ans=0.125 +2024-08-29 14:51:53,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=100640.0, ans=0.0 +2024-08-29 14:52:44,378 INFO [dysarthria_finetune.py:1435] (2/4) (10291445760, 34072559616) +2024-08-29 14:52:44,379 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-29 14:52:44,419 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-08-29 14:53:05,587 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 2, batch 0, loss[loss=0.3342, simple_loss=0.3157, pruned_loss=0.1813, over 18502.00 frames. ], tot_loss[loss=0.3342, simple_loss=0.3157, pruned_loss=0.1813, over 18502.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-08-29 14:53:05,587 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-29 15:16:53,495 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 2, validation: loss=0.3287, simple_loss=0.3125, pruned_loss=0.1663, over 1073944.00 frames. +2024-08-29 15:16:53,495 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-08-29 15:30:17,200 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.74 vs. limit=15.0 +2024-08-29 15:33:50,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=100736.0, ans=0.2 +2024-08-29 15:38:22,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-08-29 15:42:00,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100789.33333333333, ans=0.1 +2024-08-29 15:45:43,441 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=37.32 vs. limit=22.5 +2024-08-29 15:57:23,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=100842.66666666667, ans=0.125 +2024-08-29 16:03:43,631 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=19.37 vs. limit=15.0 +2024-08-29 16:12:45,777 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=13.29 vs. limit=12.0 +2024-08-29 16:34:48,761 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 2, batch 50, loss[loss=0.3586, simple_loss=0.3384, pruned_loss=0.1958, over 18952.00 frames. ], tot_loss[loss=0.3548, simple_loss=0.3354, pruned_loss=0.1905, over 829638.79 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 4.0 +2024-08-29 16:54:35,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100949.33333333333, ans=0.125 +2024-08-29 18:29:01,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101002.66666666667, ans=0.1 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-10-53-05-3 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-10-53-05-3 new file mode 100644 index 0000000000000000000000000000000000000000..616ac1d0d471297c86b49083741d0598f27eb3f0 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-29-10-53-05-3 @@ -0,0 +1,72 @@ +2024-08-29 10:53:05,640 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-29 10:53:05,651 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-29 10:53:05,651 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-29 10:53:06,159 INFO [dysarthria_finetune.py:1219] (3/4) (33427226624, 34072559616) +2024-08-29 10:53:06,159 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-29 10:53:07,983 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2500.int.cedar.computecanada.ca', 'IP address': '172.16.145.194'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-29 10:53:07,983 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-29 10:53:10,084 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 66110931 +2024-08-29 10:54:43,568 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-29 10:56:59,512 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-29 10:57:05,792 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts +2024-08-29 10:57:06,461 INFO [dysarthria_finetune.py:1319] (3/4) CutSet(len=62255) [underlying data type: ] +2024-08-29 10:57:07,140 INFO [dysarthria_asr_datamodule.py:239] (3/4) Disable MUSAN +2024-08-29 10:57:07,140 INFO [dysarthria_asr_datamodule.py:257] (3/4) Enable SpecAugment +2024-08-29 10:57:07,141 INFO [dysarthria_asr_datamodule.py:258] (3/4) Time warp factor: 80 +2024-08-29 10:57:07,141 INFO [dysarthria_asr_datamodule.py:268] (3/4) Num frame mask: 10 +2024-08-29 10:57:07,141 INFO [dysarthria_asr_datamodule.py:281] (3/4) About to create train dataset +2024-08-29 10:57:10,482 INFO [dysarthria_asr_datamodule.py:308] (3/4) Using DynamicBucketingSampler. +2024-08-29 10:57:11,425 INFO [dysarthria_asr_datamodule.py:325] (3/4) About to create train dataloader +2024-08-29 10:57:11,431 INFO [dysarthria_asr_datamodule.py:500] (3/4) About to get dev cuts +2024-08-29 10:57:11,672 INFO [dysarthria_asr_datamodule.py:356] (3/4) About to create dev dataset +2024-08-29 10:57:12,056 INFO [dysarthria_asr_datamodule.py:373] (3/4) About to create dev dataloader +2024-08-29 10:57:12,057 INFO [dysarthria_finetune.py:1490] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 11:02:23,367 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=5.07 vs. limit=5.0 +2024-08-29 11:02:23,805 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=192, metric=9.46 vs. limit=7.5 +2024-08-29 11:02:25,088 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12651MB +2024-08-29 11:02:26,397 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.37 vs. limit=7.5 +2024-08-29 11:02:34,139 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12651MB +2024-08-29 11:10:11,722 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12651MB +2024-08-29 11:10:14,049 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12651MB +2024-08-29 11:31:33,889 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.87 vs. limit=3.0 +2024-08-29 11:32:32,344 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12651MB +2024-08-29 11:32:34,529 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12651MB +2024-08-29 11:37:09,584 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 0, loss[loss=0.3239, simple_loss=0.3072, pruned_loss=0.1668, over 18634.00 frames. ], tot_loss[loss=0.3239, simple_loss=0.3072, pruned_loss=0.1668, over 18634.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-29 11:37:09,584 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-29 12:10:26,833 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 1, validation: loss=0.3678, simple_loss=0.3479, pruned_loss=0.1987, over 1073944.00 frames. +2024-08-29 12:10:26,834 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-08-29 12:23:07,999 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 7.525e+02 8.970e+02 9.815e+02 1.002e+03 1.048e+03, threshold=3.926e+03, percent-clipped=0.0 +2024-08-29 12:34:01,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=100053.33333333333, ans=0.2 +2024-08-29 12:36:03,984 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 7.160e+02 8.687e+02 9.467e+02 1.002e+03 1.055e+03, threshold=3.787e+03, percent-clipped=0.0 +2024-08-29 12:43:16,280 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.01 vs. limit=15.0 +2024-08-29 12:47:57,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=100160.0, ans=0.2 +2024-08-29 12:48:52,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=100160.0, ans=0.125 +2024-08-29 12:49:20,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=100160.0, ans=0.125 +2024-08-29 12:57:57,256 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.191e+02 7.816e+02 8.684e+02 9.467e+02 1.055e+03, threshold=3.474e+03, percent-clipped=0.0 +2024-08-29 12:58:19,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=100213.33333333333, ans=0.125 +2024-08-29 13:14:57,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=100266.66666666667, ans=0.125 +2024-08-29 13:14:58,206 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 50, loss[loss=0.4536, simple_loss=0.4286, pruned_loss=0.248, over 19001.00 frames. ], tot_loss[loss=0.3945, simple_loss=0.3721, pruned_loss=0.2223, over 828973.50 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-29 13:15:27,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100266.66666666667, ans=0.125 +2024-08-29 13:26:40,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=100266.66666666667, ans=0.025 +2024-08-29 14:08:15,668 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=7.25 vs. limit=6.0 +2024-08-29 14:09:34,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=100480.0, ans=0.0 +2024-08-29 14:16:08,616 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=17.63 vs. limit=15.0 +2024-08-29 14:23:38,344 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.869e+02 6.935e+02 7.716e+02 8.607e+02 1.055e+03, threshold=1.543e+03, percent-clipped=0.0 +2024-08-29 14:23:38,382 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 100, loss[loss=0.3629, simple_loss=0.3427, pruned_loss=0.1984, over 19146.00 frames. ], tot_loss[loss=0.3806, simple_loss=0.3592, pruned_loss=0.2113, over 1476162.18 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-08-29 14:24:07,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=100533.33333333333, ans=0.95 +2024-08-29 14:32:16,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=100533.33333333333, ans=0.1 +2024-08-29 14:52:44,373 INFO [dysarthria_finetune.py:1435] (3/4) (13187612672, 34072559616) +2024-08-29 14:52:44,374 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-29 14:52:44,420 INFO [dysarthria_finetune.py:1440] (3/4) (29811736576, 34072559616) +2024-08-29 14:53:05,584 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 2, batch 0, loss[loss=0.353, simple_loss=0.3342, pruned_loss=0.1871, over 18501.00 frames. ], tot_loss[loss=0.353, simple_loss=0.3342, pruned_loss=0.1871, over 18501.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-08-29 14:53:05,585 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-29 15:16:53,501 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 2, validation: loss=0.3287, simple_loss=0.3125, pruned_loss=0.1663, over 1073944.00 frames. +2024-08-29 15:16:53,502 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14322MB +2024-08-29 15:25:17,491 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=21.54 vs. limit=15.0 +2024-08-29 16:09:55,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=100896.0, ans=0.0 +2024-08-29 16:30:55,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=100949.33333333333, ans=0.125 +2024-08-29 16:34:48,762 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 2, batch 50, loss[loss=0.3756, simple_loss=0.3534, pruned_loss=0.2106, over 18956.00 frames. ], tot_loss[loss=0.3543, simple_loss=0.335, pruned_loss=0.1903, over 828460.00 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 4.0 +2024-08-29 17:32:43,654 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.08 vs. limit=10.0 +2024-08-29 17:38:07,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=101002.66666666667, ans=0.09899494936611666 +2024-08-29 18:02:02,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.13 vs. limit=15.0 +2024-08-29 18:55:51,343 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=8.02 vs. limit=12.0 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-13-13-09-0 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-13-13-09-0 new file mode 100644 index 0000000000000000000000000000000000000000..b9885eb5725bc8c88aeaab2875dfd09dcc4180fb --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-13-13-09-0 @@ -0,0 +1,79 @@ +2024-08-30 13:13:09,521 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-30 13:15:42,213 INFO [dysarthria_finetune.py:1214] (0/4) (32783400960, 34072559616) +2024-08-30 13:15:42,214 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-30 13:15:42,591 INFO [dysarthria_finetune.py:1219] (0/4) (32783400960, 34072559616) +2024-08-30 13:15:42,596 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-30 13:15:42,599 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2558.int.cedar.computecanada.ca', 'IP address': '172.16.145.251'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 13:15:42,599 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-30 13:15:43,269 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 66110931 +2024-08-30 13:15:43,814 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-30 13:17:10,957 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-30 13:17:18,119 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts +2024-08-30 13:18:24,886 INFO [dysarthria_finetune.py:1319] (0/4) CutSet(len=62255) [underlying data type: ] +2024-08-30 13:18:25,730 INFO [dysarthria_asr_datamodule.py:239] (0/4) Disable MUSAN +2024-08-30 13:18:25,730 INFO [dysarthria_asr_datamodule.py:257] (0/4) Enable SpecAugment +2024-08-30 13:18:25,730 INFO [dysarthria_asr_datamodule.py:258] (0/4) Time warp factor: 80 +2024-08-30 13:18:25,730 INFO [dysarthria_asr_datamodule.py:268] (0/4) Num frame mask: 10 +2024-08-30 13:18:25,730 INFO [dysarthria_asr_datamodule.py:281] (0/4) About to create train dataset +2024-08-30 13:18:28,518 INFO [dysarthria_asr_datamodule.py:308] (0/4) Using DynamicBucketingSampler. +2024-08-30 13:18:42,137 INFO [dysarthria_asr_datamodule.py:325] (0/4) About to create train dataloader +2024-08-30 13:19:40,938 INFO [dysarthria_asr_datamodule.py:500] (0/4) About to get dev cuts +2024-08-30 13:20:07,514 INFO [dysarthria_asr_datamodule.py:356] (0/4) About to create dev dataset +2024-08-30 13:20:35,460 INFO [dysarthria_asr_datamodule.py:373] (0/4) About to create dev dataloader +2024-08-30 13:20:35,633 INFO [dysarthria_finetune.py:1490] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 13:21:50,039 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=4.90 vs. limit=5.0 +2024-08-30 13:21:50,381 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=10.49 vs. limit=7.5 +2024-08-30 13:21:52,106 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-30 13:21:53,491 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.39 vs. limit=7.5 +2024-08-30 13:21:53,954 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-30 13:25:17,407 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-30 13:25:19,694 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-30 13:30:44,199 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.92 vs. limit=3.0 +2024-08-30 13:30:59,590 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-30 13:31:01,821 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-30 13:32:06,010 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 0, loss[loss=0.3242, simple_loss=0.3071, pruned_loss=0.1717, over 18513.00 frames. ], tot_loss[loss=0.3242, simple_loss=0.3071, pruned_loss=0.1717, over 18513.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-30 13:32:06,010 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-30 13:58:08,878 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 1, validation: loss=0.3678, simple_loss=0.3479, pruned_loss=0.1987, over 1073944.00 frames. +2024-08-30 13:58:35,643 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-08-30 14:00:53,813 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=19.68 vs. limit=15.0 +2024-08-30 14:38:36,222 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 7.525e+02 8.969e+02 9.815e+02 1.002e+03 1.048e+03, threshold=3.926e+03, percent-clipped=0.0 +2024-08-30 14:54:22,605 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.22 vs. limit=15.0 +2024-08-30 15:20:58,865 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 7.161e+02 8.687e+02 9.467e+02 1.002e+03 1.055e+03, threshold=3.787e+03, percent-clipped=0.0 +2024-08-30 15:25:11,925 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=31.60 vs. limit=22.5 +2024-08-30 16:18:31,468 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.00 vs. limit=6.0 +2024-08-30 16:41:34,164 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.238e+02 7.870e+02 8.666e+02 9.467e+02 1.055e+03, threshold=3.466e+03, percent-clipped=0.0 +2024-08-30 17:29:25,327 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 50, loss[loss=0.4119, simple_loss=0.3886, pruned_loss=0.2299, over 18890.00 frames. ], tot_loss[loss=0.3929, simple_loss=0.3707, pruned_loss=0.2207, over 828692.51 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-30 18:46:52,770 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=29.98 vs. limit=15.0 +2024-08-30 19:11:00,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100373.33333333333, ans=0.1 +2024-08-30 19:34:38,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=100426.66666666667, ans=0.0 +2024-08-30 19:48:06,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=100480.0, ans=0.2 +2024-08-30 19:50:32,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=100480.0, ans=0.125 +2024-08-30 19:55:14,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100480.0, ans=0.1 +2024-08-30 19:56:56,423 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=25.62 vs. limit=15.0 +2024-08-30 19:56:56,525 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=24.11 vs. limit=15.0 +2024-08-30 19:57:50,791 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+02 6.912e+02 7.699e+02 8.540e+02 1.055e+03, threshold=1.540e+03, percent-clipped=0.0 +2024-08-30 19:57:50,830 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 100, loss[loss=0.3858, simple_loss=0.3667, pruned_loss=0.195, over 19293.00 frames. ], tot_loss[loss=0.3756, simple_loss=0.3548, pruned_loss=0.2065, over 1474004.25 frames. ], batch size: 144, lr: 6.01e-05, grad_scale: 4.0 +2024-08-30 20:23:06,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=100586.66666666667, ans=0.2 +2024-08-30 20:40:34,809 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=18.52 vs. limit=15.0 +2024-08-30 20:45:20,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100640.0, ans=0.1 +2024-08-30 20:52:05,739 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-1.pt +2024-08-30 20:52:12,862 INFO [dysarthria_finetune.py:1435] (0/4) (1470824448, 34072559616) +2024-08-30 20:52:12,862 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-30 20:52:12,892 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-08-30 20:53:26,607 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 2, batch 0, loss[loss=0.3087, simple_loss=0.2943, pruned_loss=0.1512, over 18874.00 frames. ], tot_loss[loss=0.3087, simple_loss=0.2943, pruned_loss=0.1512, over 18874.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-08-30 20:54:00,490 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-30 21:18:09,287 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 2, validation: loss=0.3282, simple_loss=0.3121, pruned_loss=0.1657, over 1073944.00 frames. +2024-08-30 21:19:02,934 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-08-30 21:29:16,995 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.28 vs. limit=15.0 +2024-08-30 21:47:04,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=100736.0, ans=0.2 +2024-08-30 21:51:22,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-08-30 22:00:23,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-08-30 22:00:23,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=100789.33333333333, ans=0.2 +2024-08-30 22:10:06,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=100789.33333333333, ans=0.025 +2024-08-30 22:10:06,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-08-30 22:17:30,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100789.33333333333, ans=0.1 +2024-08-30 22:29:09,391 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=7.74 vs. limit=12.0 +2024-08-30 23:20:41,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=100896.0, ans=0.0 +2024-08-30 23:31:41,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=100949.33333333333, ans=0.0 +2024-08-30 23:31:43,019 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 2, batch 50, loss[loss=0.359, simple_loss=0.3379, pruned_loss=0.2005, over 18964.00 frames. ], tot_loss[loss=0.3556, simple_loss=0.3364, pruned_loss=0.1894, over 826819.73 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 4.0 +2024-08-30 23:43:12,193 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-13-13-09-1 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-13-13-09-1 new file mode 100644 index 0000000000000000000000000000000000000000..de3c87ba4ca7c1571018e7881bf2542c5da87cad --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-13-13-09-1 @@ -0,0 +1,82 @@ +2024-08-30 13:13:09,783 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-30 13:14:09,497 INFO [dysarthria_finetune.py:1214] (1/4) (33106362368, 34072559616) +2024-08-30 13:14:09,497 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-30 13:14:09,864 INFO [dysarthria_finetune.py:1219] (1/4) (33106362368, 34072559616) +2024-08-30 13:14:09,864 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-30 13:14:09,867 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2558.int.cedar.computecanada.ca', 'IP address': '172.16.145.251'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 13:14:09,867 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-30 13:14:10,537 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 66110931 +2024-08-30 13:14:10,537 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-30 13:17:10,445 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-30 13:17:18,119 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts +2024-08-30 13:18:24,886 INFO [dysarthria_finetune.py:1319] (1/4) CutSet(len=62255) [underlying data type: ] +2024-08-30 13:18:25,730 INFO [dysarthria_asr_datamodule.py:239] (1/4) Disable MUSAN +2024-08-30 13:18:25,730 INFO [dysarthria_asr_datamodule.py:257] (1/4) Enable SpecAugment +2024-08-30 13:18:25,731 INFO [dysarthria_asr_datamodule.py:258] (1/4) Time warp factor: 80 +2024-08-30 13:18:25,731 INFO [dysarthria_asr_datamodule.py:268] (1/4) Num frame mask: 10 +2024-08-30 13:18:25,731 INFO [dysarthria_asr_datamodule.py:281] (1/4) About to create train dataset +2024-08-30 13:18:28,518 INFO [dysarthria_asr_datamodule.py:308] (1/4) Using DynamicBucketingSampler. +2024-08-30 13:18:42,141 INFO [dysarthria_asr_datamodule.py:325] (1/4) About to create train dataloader +2024-08-30 13:19:40,938 INFO [dysarthria_asr_datamodule.py:500] (1/4) About to get dev cuts +2024-08-30 13:20:07,513 INFO [dysarthria_asr_datamodule.py:356] (1/4) About to create dev dataset +2024-08-30 13:20:35,461 INFO [dysarthria_asr_datamodule.py:373] (1/4) About to create dev dataloader +2024-08-30 13:20:35,633 INFO [dysarthria_finetune.py:1490] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 13:21:50,023 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=512, metric=5.15 vs. limit=5.0 +2024-08-30 13:21:50,380 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=192, metric=8.80 vs. limit=7.5 +2024-08-30 13:21:52,106 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12657MB +2024-08-30 13:21:53,429 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=384, metric=15.02 vs. limit=7.5 +2024-08-30 13:21:53,958 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12657MB +2024-08-30 13:25:17,403 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12657MB +2024-08-30 13:25:19,701 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12657MB +2024-08-30 13:30:58,758 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.92 vs. limit=3.0 +2024-08-30 13:30:59,585 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12657MB +2024-08-30 13:31:01,823 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12657MB +2024-08-30 13:32:05,994 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 0, loss[loss=0.385, simple_loss=0.3627, pruned_loss=0.2224, over 18549.00 frames. ], tot_loss[loss=0.385, simple_loss=0.3627, pruned_loss=0.2224, over 18549.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-30 13:32:05,995 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-30 13:58:08,867 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 1, validation: loss=0.3678, simple_loss=0.3479, pruned_loss=0.1987, over 1073944.00 frames. +2024-08-30 13:58:09,774 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13133MB +2024-08-30 13:59:17,651 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.75 vs. limit=15.0 +2024-08-30 14:00:54,466 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.24 vs. limit=15.0 +2024-08-30 14:38:36,224 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.525e+02 8.969e+02 9.815e+02 1.002e+03 1.048e+03, threshold=3.926e+03, percent-clipped=0.0 +2024-08-30 15:06:06,002 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=8.34 vs. limit=12.0 +2024-08-30 15:20:58,857 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.161e+02 8.687e+02 9.467e+02 1.002e+03 1.055e+03, threshold=3.787e+03, percent-clipped=0.0 +2024-08-30 15:55:09,621 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.31 vs. limit=15.0 +2024-08-30 16:33:11,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100160.0, ans=0.125 +2024-08-30 16:41:34,171 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.238e+02 7.870e+02 8.666e+02 9.467e+02 1.055e+03, threshold=3.466e+03, percent-clipped=0.0 +2024-08-30 17:07:43,704 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=22.16 vs. limit=15.0 +2024-08-30 17:28:29,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=100266.66666666667, ans=0.0 +2024-08-30 17:29:25,318 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 50, loss[loss=0.401, simple_loss=0.3803, pruned_loss=0.2082, over 19042.00 frames. ], tot_loss[loss=0.3933, simple_loss=0.3709, pruned_loss=0.2224, over 827432.33 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-30 18:25:42,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=100266.66666666667, ans=0.125 +2024-08-30 18:25:47,797 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.18 vs. limit=15.0 +2024-08-30 18:33:31,591 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=26.78 vs. limit=15.0 +2024-08-30 19:00:47,014 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=7.18 vs. limit=6.0 +2024-08-30 19:46:54,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=100480.0, ans=0.125 +2024-08-30 19:47:12,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=100480.0, ans=0.125 +2024-08-30 19:54:23,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=100480.0, ans=0.125 +2024-08-30 19:57:50,786 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+02 6.912e+02 7.699e+02 8.540e+02 1.055e+03, threshold=1.540e+03, percent-clipped=0.0 +2024-08-30 19:57:50,825 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 100, loss[loss=0.3791, simple_loss=0.3607, pruned_loss=0.1891, over 19093.00 frames. ], tot_loss[loss=0.3772, simple_loss=0.3563, pruned_loss=0.2074, over 1470684.91 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-08-30 20:20:34,111 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=38.65 vs. limit=22.5 +2024-08-30 20:41:47,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100586.66666666667, ans=0.1 +2024-08-30 20:52:05,701 INFO [dysarthria_finetune.py:1435] (1/4) (4245356544, 34072559616) +2024-08-30 20:52:05,805 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-30 20:52:05,867 INFO [dysarthria_finetune.py:1440] (1/4) (29490872320, 34072559616) +2024-08-30 20:53:26,609 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 2, batch 0, loss[loss=0.3255, simple_loss=0.3086, pruned_loss=0.1698, over 18746.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3086, pruned_loss=0.1698, over 18746.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-08-30 20:53:26,609 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-30 21:18:09,290 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 2, validation: loss=0.3282, simple_loss=0.3121, pruned_loss=0.1657, over 1073944.00 frames. +2024-08-30 21:18:09,895 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13448MB +2024-08-30 21:39:09,657 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=20.96 vs. limit=15.0 +2024-08-30 21:46:45,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=100736.0, ans=0.125 +2024-08-30 21:49:58,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100736.0, ans=0.1 +2024-08-30 22:01:40,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-08-30 22:45:23,642 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=20.59 vs. limit=15.0 +2024-08-30 23:07:14,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=100896.0, ans=0.125 +2024-08-30 23:07:18,824 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=24.13 vs. limit=22.5 +2024-08-30 23:28:09,007 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=21.91 vs. limit=15.0 +2024-08-30 23:31:43,016 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 2, batch 50, loss[loss=0.4026, simple_loss=0.3807, pruned_loss=0.2158, over 19071.00 frames. ], tot_loss[loss=0.3529, simple_loss=0.334, pruned_loss=0.1871, over 827854.65 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 4.0 +2024-08-30 23:32:43,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=100949.33333333333, ans=0.125 +2024-08-30 23:34:12,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=100949.33333333333, ans=0.0 +2024-08-30 23:37:14,353 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.98 vs. limit=15.0 +2024-08-30 23:59:48,263 INFO [checkpoint.py:75] (1/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/bad-model-1.pt +2024-08-31 00:00:05,102 INFO [dysarthria_finetune.py:1468] (1/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/batch-c33f4584-b23b-c1d8-493c-d01609de8895.pt +2024-08-31 00:01:41,345 INFO [dysarthria_finetune.py:1474] (1/4) features shape: torch.Size([154, 516, 80]) +2024-08-31 00:01:41,348 INFO [dysarthria_finetune.py:1478] (1/4) num tokens: 2318 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-13-13-09-2 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-13-13-09-2 new file mode 100644 index 0000000000000000000000000000000000000000..786a94e02651ebb270e41b0d0bbae051b5f8819f --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-13-13-09-2 @@ -0,0 +1,76 @@ +2024-08-30 13:13:09,783 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-30 13:13:09,785 INFO [dysarthria_finetune.py:1214] (2/4) (33748090880, 34072559616) +2024-08-30 13:13:09,785 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-30 13:13:10,354 INFO [dysarthria_finetune.py:1219] (2/4) (33427226624, 34072559616) +2024-08-30 13:13:10,354 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-30 13:13:12,308 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2558.int.cedar.computecanada.ca', 'IP address': '172.16.145.251'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 13:13:12,308 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-30 13:13:13,679 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 66110931 +2024-08-30 13:13:13,679 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-30 13:17:10,420 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-30 13:17:18,118 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts +2024-08-30 13:18:24,886 INFO [dysarthria_finetune.py:1319] (2/4) CutSet(len=62255) [underlying data type: ] +2024-08-30 13:18:25,730 INFO [dysarthria_asr_datamodule.py:239] (2/4) Disable MUSAN +2024-08-30 13:18:25,730 INFO [dysarthria_asr_datamodule.py:257] (2/4) Enable SpecAugment +2024-08-30 13:18:25,730 INFO [dysarthria_asr_datamodule.py:258] (2/4) Time warp factor: 80 +2024-08-30 13:18:25,730 INFO [dysarthria_asr_datamodule.py:268] (2/4) Num frame mask: 10 +2024-08-30 13:18:25,731 INFO [dysarthria_asr_datamodule.py:281] (2/4) About to create train dataset +2024-08-30 13:18:28,518 INFO [dysarthria_asr_datamodule.py:308] (2/4) Using DynamicBucketingSampler. +2024-08-30 13:18:42,127 INFO [dysarthria_asr_datamodule.py:325] (2/4) About to create train dataloader +2024-08-30 13:19:40,938 INFO [dysarthria_asr_datamodule.py:500] (2/4) About to get dev cuts +2024-08-30 13:20:07,513 INFO [dysarthria_asr_datamodule.py:356] (2/4) About to create dev dataset +2024-08-30 13:20:35,459 INFO [dysarthria_asr_datamodule.py:373] (2/4) About to create dev dataloader +2024-08-30 13:20:35,633 INFO [dysarthria_finetune.py:1490] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 13:21:50,022 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=512, metric=4.96 vs. limit=5.0 +2024-08-30 13:21:50,381 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=9.49 vs. limit=7.5 +2024-08-30 13:21:52,106 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-30 13:21:53,429 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.53 vs. limit=7.5 +2024-08-30 13:21:53,957 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-30 13:25:17,404 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-30 13:25:19,692 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-30 13:30:44,740 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.99 vs. limit=3.0 +2024-08-30 13:30:59,593 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-30 13:31:01,822 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-30 13:32:05,988 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 0, loss[loss=0.3828, simple_loss=0.3613, pruned_loss=0.2142, over 18533.00 frames. ], tot_loss[loss=0.3828, simple_loss=0.3613, pruned_loss=0.2142, over 18533.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-30 13:32:05,989 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-30 13:58:08,871 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 1, validation: loss=0.3678, simple_loss=0.3479, pruned_loss=0.1987, over 1073944.00 frames. +2024-08-30 13:58:08,872 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19764MB +2024-08-30 14:13:09,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=100000.0, ans=0.125 +2024-08-30 14:14:58,999 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=38.73 vs. limit=15.0 +2024-08-30 14:38:33,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=100053.33333333333, ans=0.0 +2024-08-30 14:38:36,226 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.525e+02 8.969e+02 9.815e+02 1.002e+03 1.048e+03, threshold=3.926e+03, percent-clipped=0.0 +2024-08-30 15:20:56,402 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=18.96 vs. limit=15.0 +2024-08-30 15:20:58,858 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.161e+02 8.687e+02 9.467e+02 1.002e+03 1.055e+03, threshold=3.787e+03, percent-clipped=0.0 +2024-08-30 16:08:23,135 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.29 vs. limit=15.0 +2024-08-30 16:18:39,252 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=22.00 vs. limit=15.0 +2024-08-30 16:41:34,168 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.238e+02 7.870e+02 8.666e+02 9.467e+02 1.055e+03, threshold=3.466e+03, percent-clipped=0.0 +2024-08-30 16:59:41,319 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.99 vs. limit=15.0 +2024-08-30 17:29:25,313 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 50, loss[loss=0.4073, simple_loss=0.3851, pruned_loss=0.2204, over 19018.00 frames. ], tot_loss[loss=0.389, simple_loss=0.3671, pruned_loss=0.2175, over 827419.58 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-30 18:28:02,899 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=13.78 vs. limit=12.0 +2024-08-30 18:46:59,455 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=49.03 vs. limit=22.5 +2024-08-30 18:53:57,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=100320.0, ans=0.0 +2024-08-30 18:53:57,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=100320.0, ans=0.0 +2024-08-30 19:26:03,898 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=17.17 vs. limit=15.0 +2024-08-30 19:51:04,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=100480.0, ans=0.0 +2024-08-30 19:57:50,787 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+02 6.912e+02 7.699e+02 8.540e+02 1.055e+03, threshold=1.540e+03, percent-clipped=0.0 +2024-08-30 19:57:50,826 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 100, loss[loss=0.3714, simple_loss=0.35, pruned_loss=0.2081, over 19117.00 frames. ], tot_loss[loss=0.3755, simple_loss=0.3547, pruned_loss=0.2066, over 1475925.13 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-08-30 20:10:17,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100533.33333333333, ans=0.1 +2024-08-30 20:45:08,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=100640.0, ans=0.125 +2024-08-30 20:47:09,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=100640.0, ans=0.025 +2024-08-30 20:49:40,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100640.0, ans=0.1 +2024-08-30 20:52:05,699 INFO [dysarthria_finetune.py:1435] (2/4) (10291445760, 34072559616) +2024-08-30 20:52:05,805 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-30 20:52:05,862 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-08-30 20:53:26,608 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 2, batch 0, loss[loss=0.3258, simple_loss=0.3083, pruned_loss=0.1731, over 18502.00 frames. ], tot_loss[loss=0.3258, simple_loss=0.3083, pruned_loss=0.1731, over 18502.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-08-30 20:53:26,609 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-30 21:18:09,292 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 2, validation: loss=0.3282, simple_loss=0.3121, pruned_loss=0.1657, over 1073944.00 frames. +2024-08-30 21:18:09,895 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-08-30 21:39:08,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100736.0, ans=0.1 +2024-08-30 21:50:30,051 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.25 vs. limit=15.0 +2024-08-30 22:17:30,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=100789.33333333333, ans=0.125 +2024-08-30 22:17:56,646 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=12.00 vs. limit=15.0 +2024-08-30 22:41:36,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=100842.66666666667, ans=0.0 +2024-08-30 23:00:16,371 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.88 vs. limit=22.5 +2024-08-30 23:20:03,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=100896.0, ans=0.125 +2024-08-30 23:31:43,022 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 2, batch 50, loss[loss=0.3654, simple_loss=0.3449, pruned_loss=0.199, over 18952.00 frames. ], tot_loss[loss=0.3543, simple_loss=0.3351, pruned_loss=0.1895, over 829638.79 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 4.0 +2024-08-30 23:39:32,070 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.74 vs. limit=15.0 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-13-13-09-3 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-13-13-09-3 new file mode 100644 index 0000000000000000000000000000000000000000..2b3995bfd6efcd3757d066f20d773983c6dbf3e5 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-13-13-09-3 @@ -0,0 +1,75 @@ +2024-08-30 13:13:09,784 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-30 13:13:09,785 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-30 13:13:09,785 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-30 13:13:10,359 INFO [dysarthria_finetune.py:1219] (3/4) (33427226624, 34072559616) +2024-08-30 13:13:10,360 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-30 13:13:12,308 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2558.int.cedar.computecanada.ca', 'IP address': '172.16.145.251'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 13:13:12,309 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-30 13:13:13,656 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 66110931 +2024-08-30 13:13:13,656 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-30 13:17:10,444 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-30 13:17:18,122 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts +2024-08-30 13:18:24,886 INFO [dysarthria_finetune.py:1319] (3/4) CutSet(len=62255) [underlying data type: ] +2024-08-30 13:18:25,730 INFO [dysarthria_asr_datamodule.py:239] (3/4) Disable MUSAN +2024-08-30 13:18:25,730 INFO [dysarthria_asr_datamodule.py:257] (3/4) Enable SpecAugment +2024-08-30 13:18:25,730 INFO [dysarthria_asr_datamodule.py:258] (3/4) Time warp factor: 80 +2024-08-30 13:18:25,730 INFO [dysarthria_asr_datamodule.py:268] (3/4) Num frame mask: 10 +2024-08-30 13:18:25,731 INFO [dysarthria_asr_datamodule.py:281] (3/4) About to create train dataset +2024-08-30 13:18:28,518 INFO [dysarthria_asr_datamodule.py:308] (3/4) Using DynamicBucketingSampler. +2024-08-30 13:18:42,111 INFO [dysarthria_asr_datamodule.py:325] (3/4) About to create train dataloader +2024-08-30 13:19:40,938 INFO [dysarthria_asr_datamodule.py:500] (3/4) About to get dev cuts +2024-08-30 13:20:07,513 INFO [dysarthria_asr_datamodule.py:356] (3/4) About to create dev dataset +2024-08-30 13:20:35,454 INFO [dysarthria_asr_datamodule.py:373] (3/4) About to create dev dataloader +2024-08-30 13:20:35,633 INFO [dysarthria_finetune.py:1490] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 13:21:50,022 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=5.07 vs. limit=5.0 +2024-08-30 13:21:50,381 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=192, metric=9.46 vs. limit=7.5 +2024-08-30 13:21:52,111 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12655MB +2024-08-30 13:21:53,457 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.37 vs. limit=7.5 +2024-08-30 13:21:53,964 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12655MB +2024-08-30 13:25:17,400 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12655MB +2024-08-30 13:25:19,700 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12655MB +2024-08-30 13:30:50,514 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.87 vs. limit=3.0 +2024-08-30 13:30:59,587 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12655MB +2024-08-30 13:31:01,825 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12655MB +2024-08-30 13:32:05,988 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 0, loss[loss=0.3239, simple_loss=0.3072, pruned_loss=0.1668, over 18634.00 frames. ], tot_loss[loss=0.3239, simple_loss=0.3072, pruned_loss=0.1668, over 18634.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-30 13:32:05,989 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-30 13:58:08,875 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 1, validation: loss=0.3678, simple_loss=0.3479, pruned_loss=0.1987, over 1073944.00 frames. +2024-08-30 13:58:09,627 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14320MB +2024-08-30 14:38:36,222 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 7.525e+02 8.969e+02 9.815e+02 1.002e+03 1.048e+03, threshold=3.926e+03, percent-clipped=0.0 +2024-08-30 15:14:24,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=100053.33333333333, ans=0.2 +2024-08-30 15:20:58,864 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 7.161e+02 8.687e+02 9.467e+02 1.002e+03 1.055e+03, threshold=3.787e+03, percent-clipped=0.0 +2024-08-30 15:26:30,735 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.01 vs. limit=15.0 +2024-08-30 15:54:40,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=100160.0, ans=0.2 +2024-08-30 15:58:04,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=100160.0, ans=0.125 +2024-08-30 16:08:16,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=100160.0, ans=0.2 +2024-08-30 16:39:39,967 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 16:41:34,177 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.238e+02 7.870e+02 8.666e+02 9.467e+02 1.055e+03, threshold=3.466e+03, percent-clipped=0.0 +2024-08-30 17:14:51,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=100213.33333333333, ans=0.0 +2024-08-30 17:29:25,318 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 50, loss[loss=0.4508, simple_loss=0.4255, pruned_loss=0.2501, over 19001.00 frames. ], tot_loss[loss=0.3942, simple_loss=0.3718, pruned_loss=0.2222, over 828973.50 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-30 17:41:50,692 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=43.94 vs. limit=22.5 +2024-08-30 18:12:29,174 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.04 vs. limit=22.5 +2024-08-30 18:48:12,943 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=17.58 vs. limit=15.0 +2024-08-30 18:51:50,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100320.0, ans=0.125 +2024-08-30 19:23:10,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=100373.33333333333, ans=0.125 +2024-08-30 19:34:03,273 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=21.31 vs. limit=15.0 +2024-08-30 19:35:03,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=100426.66666666667, ans=0.025 +2024-08-30 19:48:08,292 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=47.84 vs. limit=22.5 +2024-08-30 19:57:50,790 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+02 6.912e+02 7.699e+02 8.540e+02 1.055e+03, threshold=1.540e+03, percent-clipped=0.0 +2024-08-30 19:57:50,828 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 100, loss[loss=0.358, simple_loss=0.3396, pruned_loss=0.1854, over 19146.00 frames. ], tot_loss[loss=0.3783, simple_loss=0.3572, pruned_loss=0.2086, over 1476162.18 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-08-30 20:20:28,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=100586.66666666667, ans=0.0 +2024-08-30 20:52:05,697 INFO [dysarthria_finetune.py:1435] (3/4) (13370064896, 34072559616) +2024-08-30 20:52:05,805 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-30 20:52:05,864 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-08-30 20:53:26,625 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 2, batch 0, loss[loss=0.3531, simple_loss=0.3343, pruned_loss=0.1872, over 18501.00 frames. ], tot_loss[loss=0.3531, simple_loss=0.3343, pruned_loss=0.1872, over 18501.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-08-30 20:53:26,626 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-30 21:18:09,293 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 2, validation: loss=0.3282, simple_loss=0.3121, pruned_loss=0.1657, over 1073944.00 frames. +2024-08-30 21:18:09,895 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-08-30 21:51:22,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=100789.33333333333, ans=0.125 +2024-08-30 22:01:03,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=100789.33333333333, ans=0.2 +2024-08-30 22:17:56,580 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.34 vs. limit=22.5 +2024-08-30 22:41:35,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100842.66666666667, ans=0.125 +2024-08-30 22:56:01,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=100896.0, ans=0.125 +2024-08-30 23:20:48,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=100896.0, ans=0.125 +2024-08-30 23:20:49,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=100896.0, ans=0.0 +2024-08-30 23:31:43,028 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 2, batch 50, loss[loss=0.3498, simple_loss=0.333, pruned_loss=0.176, over 18956.00 frames. ], tot_loss[loss=0.3546, simple_loss=0.3354, pruned_loss=0.1896, over 828460.00 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 4.0 +2024-08-30 23:37:14,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.70 vs. limit=15.0 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-20-41-22-0 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-20-41-22-0 new file mode 100644 index 0000000000000000000000000000000000000000..e96ff57f22a9a63682d391246514cb39faf688b9 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-20-41-22-0 @@ -0,0 +1,31 @@ +2024-08-30 20:41:22,568 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-30 20:41:22,905 INFO [dysarthria_finetune.py:1214] (0/4) (33748090880, 34072559616) +2024-08-30 20:41:22,905 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-30 20:41:23,898 INFO [dysarthria_finetune.py:1219] (0/4) (32783400960, 34072559616) +2024-08-30 20:41:26,110 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-30 20:43:36,544 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 20:43:36,544 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-30 20:43:37,243 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 66110931 +2024-08-30 20:43:37,791 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-30 20:44:19,305 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-30 20:44:25,336 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts +2024-08-30 20:44:30,345 INFO [dysarthria_finetune.py:1319] (0/4) CutSet(len=62255) [underlying data type: ] +2024-08-30 20:44:35,319 INFO [dysarthria_asr_datamodule.py:239] (0/4) Disable MUSAN +2024-08-30 20:44:35,319 INFO [dysarthria_asr_datamodule.py:257] (0/4) Enable SpecAugment +2024-08-30 20:44:35,319 INFO [dysarthria_asr_datamodule.py:258] (0/4) Time warp factor: 80 +2024-08-30 20:44:35,320 INFO [dysarthria_asr_datamodule.py:268] (0/4) Num frame mask: 10 +2024-08-30 20:44:35,320 INFO [dysarthria_asr_datamodule.py:281] (0/4) About to create train dataset +2024-08-30 20:44:37,518 INFO [dysarthria_asr_datamodule.py:308] (0/4) Using DynamicBucketingSampler. +2024-08-30 20:45:04,799 INFO [dysarthria_asr_datamodule.py:325] (0/4) About to create train dataloader +2024-08-30 20:45:04,800 INFO [dysarthria_asr_datamodule.py:500] (0/4) About to get dev cuts +2024-08-30 20:45:04,942 INFO [dysarthria_asr_datamodule.py:356] (0/4) About to create dev dataset +2024-08-30 20:45:05,361 INFO [dysarthria_asr_datamodule.py:373] (0/4) About to create dev dataloader +2024-08-30 20:45:05,361 INFO [dysarthria_finetune.py:1490] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 20:46:10,595 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=4.90 vs. limit=5.0 +2024-08-30 20:46:10,917 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=10.49 vs. limit=7.5 +2024-08-30 20:46:12,726 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-30 20:46:14,117 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.39 vs. limit=7.5 +2024-08-30 20:46:14,580 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-30 20:51:59,472 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-30 20:52:02,100 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-30 21:01:55,509 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.92 vs. limit=3.0 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-20-41-22-1 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-20-41-22-1 new file mode 100644 index 0000000000000000000000000000000000000000..36a86041de5ef1d251a077b8a745cf200092ae3c --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-20-41-22-1 @@ -0,0 +1,30 @@ +2024-08-30 20:41:22,903 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-30 20:41:22,904 INFO [dysarthria_finetune.py:1214] (1/4) (33748090880, 34072559616) +2024-08-30 20:41:22,904 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-30 20:41:23,898 INFO [dysarthria_finetune.py:1219] (1/4) (32783400960, 34072559616) +2024-08-30 20:41:23,899 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-30 20:41:24,835 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 20:41:24,835 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-30 20:41:26,828 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 66110931 +2024-08-30 20:43:36,541 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-30 20:44:19,304 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-30 20:44:25,334 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts +2024-08-30 20:44:30,345 INFO [dysarthria_finetune.py:1319] (1/4) CutSet(len=62255) [underlying data type: ] +2024-08-30 20:44:35,319 INFO [dysarthria_asr_datamodule.py:239] (1/4) Disable MUSAN +2024-08-30 20:44:35,319 INFO [dysarthria_asr_datamodule.py:257] (1/4) Enable SpecAugment +2024-08-30 20:44:35,319 INFO [dysarthria_asr_datamodule.py:258] (1/4) Time warp factor: 80 +2024-08-30 20:44:35,320 INFO [dysarthria_asr_datamodule.py:268] (1/4) Num frame mask: 10 +2024-08-30 20:44:35,320 INFO [dysarthria_asr_datamodule.py:281] (1/4) About to create train dataset +2024-08-30 20:44:37,518 INFO [dysarthria_asr_datamodule.py:308] (1/4) Using DynamicBucketingSampler. +2024-08-30 20:45:04,805 INFO [dysarthria_asr_datamodule.py:325] (1/4) About to create train dataloader +2024-08-30 20:45:04,807 INFO [dysarthria_asr_datamodule.py:500] (1/4) About to get dev cuts +2024-08-30 20:45:04,942 INFO [dysarthria_asr_datamodule.py:356] (1/4) About to create dev dataset +2024-08-30 20:45:05,355 INFO [dysarthria_asr_datamodule.py:373] (1/4) About to create dev dataloader +2024-08-30 20:45:05,356 INFO [dysarthria_finetune.py:1490] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 20:46:10,596 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=512, metric=5.15 vs. limit=5.0 +2024-08-30 20:46:10,917 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=192, metric=8.80 vs. limit=7.5 +2024-08-30 20:46:12,726 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12657MB +2024-08-30 20:46:14,087 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=384, metric=15.02 vs. limit=7.5 +2024-08-30 20:46:14,578 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12657MB +2024-08-30 20:51:59,477 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12657MB +2024-08-30 20:52:02,100 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12657MB diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-20-41-22-2 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-20-41-22-2 new file mode 100644 index 0000000000000000000000000000000000000000..5c0037bcea1e568e2775b8a2c55c17ff9f75279a --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-20-41-22-2 @@ -0,0 +1,30 @@ +2024-08-30 20:41:22,909 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-30 20:41:22,949 INFO [dysarthria_finetune.py:1214] (2/4) (33748090880, 34072559616) +2024-08-30 20:41:22,949 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-30 20:41:23,918 INFO [dysarthria_finetune.py:1219] (2/4) (32783400960, 34072559616) +2024-08-30 20:41:23,918 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-30 20:41:24,834 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 20:41:24,835 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-30 20:41:26,847 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 66110931 +2024-08-30 20:43:36,541 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-30 20:44:19,313 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-30 20:44:25,337 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts +2024-08-30 20:44:30,345 INFO [dysarthria_finetune.py:1319] (2/4) CutSet(len=62255) [underlying data type: ] +2024-08-30 20:44:35,319 INFO [dysarthria_asr_datamodule.py:239] (2/4) Disable MUSAN +2024-08-30 20:44:35,319 INFO [dysarthria_asr_datamodule.py:257] (2/4) Enable SpecAugment +2024-08-30 20:44:35,319 INFO [dysarthria_asr_datamodule.py:258] (2/4) Time warp factor: 80 +2024-08-30 20:44:35,320 INFO [dysarthria_asr_datamodule.py:268] (2/4) Num frame mask: 10 +2024-08-30 20:44:35,320 INFO [dysarthria_asr_datamodule.py:281] (2/4) About to create train dataset +2024-08-30 20:44:37,518 INFO [dysarthria_asr_datamodule.py:308] (2/4) Using DynamicBucketingSampler. +2024-08-30 20:45:04,794 INFO [dysarthria_asr_datamodule.py:325] (2/4) About to create train dataloader +2024-08-30 20:45:04,798 INFO [dysarthria_asr_datamodule.py:500] (2/4) About to get dev cuts +2024-08-30 20:45:04,942 INFO [dysarthria_asr_datamodule.py:356] (2/4) About to create dev dataset +2024-08-30 20:45:05,355 INFO [dysarthria_asr_datamodule.py:373] (2/4) About to create dev dataloader +2024-08-30 20:45:05,355 INFO [dysarthria_finetune.py:1490] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 20:46:10,596 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=512, metric=4.96 vs. limit=5.0 +2024-08-30 20:46:10,918 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=9.49 vs. limit=7.5 +2024-08-30 20:46:12,731 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-30 20:46:14,069 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.53 vs. limit=7.5 +2024-08-30 20:46:14,578 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-30 20:51:59,475 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-30 20:52:02,095 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-20-41-22-3 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-20-41-22-3 new file mode 100644 index 0000000000000000000000000000000000000000..bf29db92debd79f2aaca2006070a854fb9d5e3cf --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-30-20-41-22-3 @@ -0,0 +1,31 @@ +2024-08-30 20:41:22,908 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-30 20:41:22,949 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-30 20:41:22,949 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-30 20:41:23,917 INFO [dysarthria_finetune.py:1219] (3/4) (32783400960, 34072559616) +2024-08-30 20:41:23,917 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-30 20:41:24,834 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-30 20:41:24,835 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-30 20:41:26,826 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 66110931 +2024-08-30 20:43:35,413 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-30 20:44:19,309 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-30 20:44:25,343 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts +2024-08-30 20:44:30,345 INFO [dysarthria_finetune.py:1319] (3/4) CutSet(len=62255) [underlying data type: ] +2024-08-30 20:44:35,319 INFO [dysarthria_asr_datamodule.py:239] (3/4) Disable MUSAN +2024-08-30 20:44:35,320 INFO [dysarthria_asr_datamodule.py:257] (3/4) Enable SpecAugment +2024-08-30 20:44:35,320 INFO [dysarthria_asr_datamodule.py:258] (3/4) Time warp factor: 80 +2024-08-30 20:44:35,320 INFO [dysarthria_asr_datamodule.py:268] (3/4) Num frame mask: 10 +2024-08-30 20:44:35,320 INFO [dysarthria_asr_datamodule.py:281] (3/4) About to create train dataset +2024-08-30 20:44:37,518 INFO [dysarthria_asr_datamodule.py:308] (3/4) Using DynamicBucketingSampler. +2024-08-30 20:45:04,790 INFO [dysarthria_asr_datamodule.py:325] (3/4) About to create train dataloader +2024-08-30 20:45:04,799 INFO [dysarthria_asr_datamodule.py:500] (3/4) About to get dev cuts +2024-08-30 20:45:04,942 INFO [dysarthria_asr_datamodule.py:356] (3/4) About to create dev dataset +2024-08-30 20:45:05,353 INFO [dysarthria_asr_datamodule.py:373] (3/4) About to create dev dataloader +2024-08-30 20:45:05,353 INFO [dysarthria_finetune.py:1490] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 20:46:10,602 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=5.07 vs. limit=5.0 +2024-08-30 20:46:10,918 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=192, metric=9.46 vs. limit=7.5 +2024-08-30 20:46:12,724 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12655MB +2024-08-30 20:46:14,053 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.37 vs. limit=7.5 +2024-08-30 20:46:14,577 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12655MB +2024-08-30 20:51:59,470 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12655MB +2024-08-30 20:52:02,094 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12655MB +2024-08-30 21:00:00,981 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.87 vs. limit=3.0 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-31-13-16-10-0 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-31-13-16-10-0 new file mode 100644 index 0000000000000000000000000000000000000000..498915f6530f7372a7b3cd09ba910a23fcada922 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-31-13-16-10-0 @@ -0,0 +1,545 @@ +2024-08-31 13:16:10,659 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-31 13:16:10,940 INFO [dysarthria_finetune.py:1214] (0/4) (33748090880, 34072559616) +2024-08-31 13:16:10,941 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-31 13:16:11,940 INFO [dysarthria_finetune.py:1219] (0/4) (32783400960, 34072559616) +2024-08-31 13:16:11,946 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-31 13:16:13,232 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2558.int.cedar.computecanada.ca', 'IP address': '172.16.145.251'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 13:16:13,232 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-31 13:16:14,925 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 66110931 +2024-08-31 13:16:16,057 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-31 13:18:23,840 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-31 13:20:29,536 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts +2024-08-31 13:20:29,666 INFO [dysarthria_finetune.py:1319] (0/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 13:20:29,920 INFO [dysarthria_asr_datamodule.py:239] (0/4) Disable MUSAN +2024-08-31 13:20:29,920 INFO [dysarthria_asr_datamodule.py:257] (0/4) Enable SpecAugment +2024-08-31 13:20:29,920 INFO [dysarthria_asr_datamodule.py:258] (0/4) Time warp factor: 80 +2024-08-31 13:20:29,920 INFO [dysarthria_asr_datamodule.py:268] (0/4) Num frame mask: 10 +2024-08-31 13:20:29,920 INFO [dysarthria_asr_datamodule.py:281] (0/4) About to create train dataset +2024-08-31 13:20:31,921 INFO [dysarthria_asr_datamodule.py:308] (0/4) Using DynamicBucketingSampler. +2024-08-31 13:20:32,863 INFO [dysarthria_asr_datamodule.py:325] (0/4) About to create train dataloader +2024-08-31 13:20:32,866 INFO [dysarthria_asr_datamodule.py:501] (0/4) About to get dev cuts +2024-08-31 13:20:33,113 INFO [dysarthria_asr_datamodule.py:356] (0/4) About to create dev dataset +2024-08-31 13:20:33,461 INFO [dysarthria_asr_datamodule.py:373] (0/4) About to create dev dataloader +2024-08-31 13:20:33,461 INFO [dysarthria_finetune.py:1490] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 13:44:09,215 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=4.90 vs. limit=5.0 +2024-08-31 13:44:10,210 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=10.49 vs. limit=7.5 +2024-08-31 13:44:14,874 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-31 13:45:00,457 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.39 vs. limit=7.5 +2024-08-31 13:45:00,956 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-31 13:47:50,046 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-31 13:47:52,428 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-31 13:50:17,751 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.92 vs. limit=3.0 +2024-08-31 13:50:20,330 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-31 13:50:22,585 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 12578MB +2024-08-31 13:51:23,114 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 0, loss[loss=0.3242, simple_loss=0.3071, pruned_loss=0.1717, over 18513.00 frames. ], tot_loss[loss=0.3242, simple_loss=0.3071, pruned_loss=0.1717, over 18513.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-31 13:51:23,115 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-31 14:29:03,496 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 1, validation: loss=0.3678, simple_loss=0.3479, pruned_loss=0.1987, over 1073944.00 frames. +2024-08-31 14:29:03,497 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-08-31 14:41:38,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=19.68 vs. limit=15.0 +2024-08-31 15:24:16,876 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 7.525e+02 8.969e+02 9.815e+02 1.002e+03 1.048e+03, threshold=3.926e+03, percent-clipped=0.0 +2024-08-31 15:41:04,047 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.22 vs. limit=15.0 +2024-08-31 15:52:42,133 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 7.161e+02 8.685e+02 9.467e+02 1.002e+03 1.055e+03, threshold=3.787e+03, percent-clipped=0.0 +2024-08-31 15:57:12,063 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=31.60 vs. limit=22.5 +2024-08-31 16:29:06,735 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.00 vs. limit=6.0 +2024-08-31 16:32:13,896 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.192e+02 7.846e+02 8.685e+02 9.467e+02 1.055e+03, threshold=3.474e+03, percent-clipped=0.0 +2024-08-31 17:02:30,134 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 50, loss[loss=0.4119, simple_loss=0.3886, pruned_loss=0.2299, over 18890.00 frames. ], tot_loss[loss=0.3929, simple_loss=0.3707, pruned_loss=0.2207, over 828692.51 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-31 17:22:55,942 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=30.11 vs. limit=15.0 +2024-08-31 17:37:25,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100373.33333333333, ans=0.1 +2024-08-31 18:12:52,329 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.869e+02 6.982e+02 7.682e+02 8.607e+02 1.055e+03, threshold=1.536e+03, percent-clipped=0.0 +2024-08-31 18:12:52,368 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 100, loss[loss=0.4038, simple_loss=0.3814, pruned_loss=0.2206, over 19293.00 frames. ], tot_loss[loss=0.3752, simple_loss=0.3544, pruned_loss=0.2062, over 1474004.25 frames. ], batch size: 144, lr: 6.01e-05, grad_scale: 4.0 +2024-08-31 18:36:35,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=100586.66666666667, ans=0.0 +2024-08-31 18:40:57,979 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.82 vs. limit=6.0 +2024-08-31 18:42:23,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=100640.0, ans=0.2 +2024-08-31 18:43:29,199 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=10.02 vs. limit=15.0 +2024-08-31 18:44:19,310 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-1.pt +2024-08-31 18:44:43,509 INFO [dysarthria_finetune.py:1435] (0/4) (1470824448, 34072559616) +2024-08-31 18:44:43,510 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-31 18:44:43,539 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-08-31 18:46:01,836 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 2, batch 0, loss[loss=0.3207, simple_loss=0.3046, pruned_loss=0.1641, over 18874.00 frames. ], tot_loss[loss=0.3207, simple_loss=0.3046, pruned_loss=0.1641, over 18874.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-08-31 18:46:01,836 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-31 19:10:08,822 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 2, validation: loss=0.3307, simple_loss=0.3141, pruned_loss=0.1687, over 1073944.00 frames. +2024-08-31 19:10:08,823 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-08-31 19:22:26,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=100682.66666666667, ans=0.025 +2024-08-31 19:46:09,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=100736.0, ans=0.125 +2024-08-31 19:57:17,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=100789.33333333333, ans=0.125 +2024-08-31 20:05:00,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=100789.33333333333, ans=0.2 +2024-08-31 20:21:09,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100896.0, ans=0.1 +2024-08-31 20:29:28,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=100896.0, ans=0.125 +2024-08-31 20:31:44,806 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 2, batch 50, loss[loss=0.3587, simple_loss=0.3374, pruned_loss=0.2017, over 18964.00 frames. ], tot_loss[loss=0.3547, simple_loss=0.3357, pruned_loss=0.1885, over 826819.73 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 4.0 +2024-08-31 20:34:07,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=100949.33333333333, ans=0.125 +2024-08-31 20:40:53,474 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=18.40 vs. limit=15.0 +2024-08-31 20:43:40,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=101002.66666666667, ans=0.125 +2024-08-31 21:03:09,444 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 4.995e+02 5.661e+02 6.268e+02 7.321e+02, threshold=1.132e+03, percent-clipped=0.0 +2024-08-31 21:09:37,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=101056.0, ans=0.125 +2024-08-31 21:15:13,524 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.56 vs. limit=15.0 +2024-08-31 21:20:55,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=101109.33333333333, ans=0.125 +2024-08-31 21:42:13,849 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 2, batch 100, loss[loss=0.3782, simple_loss=0.3604, pruned_loss=0.1893, over 19229.00 frames. ], tot_loss[loss=0.3412, simple_loss=0.3236, pruned_loss=0.1779, over 1473154.80 frames. ], batch size: 144, lr: 7.29e-05, grad_scale: 4.0 +2024-08-31 21:47:36,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=101216.0, ans=6.0 +2024-08-31 21:48:42,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=101216.0, ans=0.0 +2024-08-31 22:10:06,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101269.33333333333, ans=0.1 +2024-08-31 22:21:43,457 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=18.46 vs. limit=15.0 +2024-08-31 22:21:43,991 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-2.pt +2024-08-31 22:21:48,544 INFO [dysarthria_finetune.py:1435] (0/4) (1412104192, 34072559616) +2024-08-31 22:21:48,544 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-31 22:21:48,574 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-08-31 22:22:39,012 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 3, batch 0, loss[loss=0.3229, simple_loss=0.3053, pruned_loss=0.1723, over 18603.00 frames. ], tot_loss[loss=0.3229, simple_loss=0.3053, pruned_loss=0.1723, over 18603.00 frames. ], batch size: 65, lr: 7.58e-05, grad_scale: 2.0 +2024-08-31 22:22:39,013 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-31 22:31:34,594 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 3, validation: loss=0.2979, simple_loss=0.2853, pruned_loss=0.1432, over 1073944.00 frames. +2024-08-31 22:31:34,954 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-08-31 22:50:38,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=101424.0, ans=0.0 +2024-08-31 23:06:08,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=101477.33333333333, ans=0.125 +2024-08-31 23:08:20,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=101477.33333333333, ans=0.0 +2024-08-31 23:19:22,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=101530.66666666667, ans=0.1 +2024-08-31 23:30:40,082 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.802e+02 3.787e+02 4.308e+02 4.929e+02 6.122e+02, threshold=8.616e+02, percent-clipped=0.0 +2024-08-31 23:32:40,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=101637.33333333333, ans=0.015 +2024-08-31 23:32:40,875 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 23:32:42,312 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 3, batch 50, loss[loss=0.3352, simple_loss=0.3187, pruned_loss=0.1717, over 18964.00 frames. ], tot_loss[loss=0.3276, simple_loss=0.3113, pruned_loss=0.1684, over 827741.27 frames. ], batch size: 102, lr: 8.08e-05, grad_scale: 1.0 +2024-08-31 23:54:22,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=101690.66666666667, ans=0.125 +2024-08-31 23:55:08,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=101690.66666666667, ans=0.125 +2024-09-01 00:02:13,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=101744.0, ans=0.0 +2024-09-01 00:04:29,068 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.90 vs. limit=15.0 +2024-09-01 00:13:14,908 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 00:17:08,369 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 3, batch 100, loss[loss=0.3094, simple_loss=0.2961, pruned_loss=0.1523, over 19231.00 frames. ], tot_loss[loss=0.3186, simple_loss=0.3033, pruned_loss=0.1621, over 1473938.15 frames. ], batch size: 144, lr: 8.58e-05, grad_scale: 1.0 +2024-09-01 00:22:57,983 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.64 vs. limit=10.0 +2024-09-01 00:25:34,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=102010.66666666667, ans=0.07 +2024-09-01 00:26:08,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=102010.66666666667, ans=0.0 +2024-09-01 00:26:09,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=102010.66666666667, ans=0.09899494936611666 +2024-09-01 00:27:26,415 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-3.pt +2024-09-01 00:27:30,507 INFO [dysarthria_finetune.py:1435] (0/4) (1414201344, 34072559616) +2024-09-01 00:27:30,507 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 00:27:30,537 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 00:27:42,730 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 4, batch 0, loss[loss=0.3028, simple_loss=0.2854, pruned_loss=0.1641, over 18523.00 frames. ], tot_loss[loss=0.3028, simple_loss=0.2854, pruned_loss=0.1641, over 18523.00 frames. ], batch size: 65, lr: 8.86e-05, grad_scale: 2.0 +2024-09-01 00:27:42,731 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 00:46:27,440 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 4, validation: loss=0.279, simple_loss=0.2687, pruned_loss=0.1325, over 1073944.00 frames. +2024-09-01 00:46:27,441 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 01:16:52,153 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.614e+02 3.221e+02 3.659e+02 4.077e+02 5.349e+02, threshold=7.318e+02, percent-clipped=0.0 +2024-09-01 01:19:06,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=102160.0, ans=0.0 +2024-09-01 01:29:12,906 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:39:48,723 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 4, batch 50, loss[loss=0.3261, simple_loss=0.3126, pruned_loss=0.1607, over 18961.00 frames. ], tot_loss[loss=0.2993, simple_loss=0.2866, pruned_loss=0.1482, over 828586.64 frames. ], batch size: 102, lr: 9.36e-05, grad_scale: 0.5 +2024-09-01 01:43:16,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=102320.0, ans=0.2 +2024-09-01 01:44:42,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=102373.33333333333, ans=0.1 +2024-09-01 01:47:41,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=102373.33333333333, ans=0.05 +2024-09-01 01:48:30,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=102426.66666666667, ans=0.0 +2024-09-01 01:53:53,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=102480.0, ans=0.025 +2024-09-01 01:54:46,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff2.min_abs, batch_count=102480.0, ans=0.1 +2024-09-01 01:55:59,735 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=10.08 vs. limit=15.0 +2024-09-01 01:58:53,814 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.62 vs. limit=6.0 +2024-09-01 02:01:07,722 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 4, batch 100, loss[loss=0.3101, simple_loss=0.2959, pruned_loss=0.1573, over 19286.00 frames. ], tot_loss[loss=0.2947, simple_loss=0.2822, pruned_loss=0.1462, over 1474147.24 frames. ], batch size: 144, lr: 9.86e-05, grad_scale: 1.0 +2024-09-01 02:09:39,909 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.323e+02 2.859e+02 3.213e+02 3.589e+02 4.738e+02, threshold=6.426e+02, percent-clipped=0.0 +2024-09-01 02:10:13,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102693.33333333333, ans=0.1 +2024-09-01 02:10:16,937 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-4.pt +2024-09-01 02:10:26,429 INFO [dysarthria_finetune.py:1435] (0/4) (1414201344, 34072559616) +2024-09-01 02:10:26,429 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:10:26,462 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 02:10:37,117 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 5, batch 0, loss[loss=0.2472, simple_loss=0.2384, pruned_loss=0.119, over 18549.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.2384, pruned_loss=0.119, over 18549.00 frames. ], batch size: 65, lr: 1.00e-04, grad_scale: 1.0 +2024-09-01 02:10:37,118 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:15:37,614 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 5, validation: loss=0.2588, simple_loss=0.2515, pruned_loss=0.1195, over 1073944.00 frames. +2024-09-01 02:15:37,615 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 02:17:13,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=102741.33333333333, ans=0.025 +2024-09-01 02:18:42,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=102794.66666666667, ans=0.125 +2024-09-01 02:23:25,257 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 5, batch 50, loss[loss=0.2825, simple_loss=0.2735, pruned_loss=0.1344, over 19008.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.2701, pruned_loss=0.1361, over 828355.03 frames. ], batch size: 102, lr: 1.00e-04, grad_scale: 0.25 +2024-09-01 02:24:35,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=103061.33333333333, ans=0.025 +2024-09-01 02:25:48,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=103114.66666666667, ans=0.2 +2024-09-01 02:26:53,322 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.73 vs. limit=15.0 +2024-09-01 02:26:55,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=103168.0, ans=0.125 +2024-09-01 02:27:11,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103168.0, ans=0.1 +2024-09-01 02:27:48,818 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.197e+02 2.619e+02 2.908e+02 3.410e+02 5.061e+02, threshold=5.817e+02, percent-clipped=0.0 +2024-09-01 02:28:05,165 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 5, batch 100, loss[loss=0.258, simple_loss=0.2514, pruned_loss=0.1201, over 19287.00 frames. ], tot_loss[loss=0.2756, simple_loss=0.2659, pruned_loss=0.1335, over 1473652.43 frames. ], batch size: 144, lr: 1.00e-04, grad_scale: 0.5 +2024-09-01 02:28:34,860 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.42 vs. limit=15.0 +2024-09-01 02:29:16,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103328.0, ans=0.1 +2024-09-01 02:29:56,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=103381.33333333333, ans=0.025 +2024-09-01 02:30:03,404 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-5.pt +2024-09-01 02:30:10,482 INFO [dysarthria_finetune.py:1435] (0/4) (1414201344, 34072559616) +2024-09-01 02:30:10,482 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:30:10,512 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 02:30:19,013 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 6, batch 0, loss[loss=0.2578, simple_loss=0.2521, pruned_loss=0.1188, over 18610.00 frames. ], tot_loss[loss=0.2578, simple_loss=0.2521, pruned_loss=0.1188, over 18610.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 1.0 +2024-09-01 02:30:19,014 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:30:42,393 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 6, validation: loss=0.247, simple_loss=0.2415, pruned_loss=0.1137, over 1073944.00 frames. +2024-09-01 02:30:51,758 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 02:33:06,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=103584.0, ans=0.125 +2024-09-01 02:33:15,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=103584.0, ans=0.125 +2024-09-01 02:33:49,358 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 6, batch 50, loss[loss=0.2701, simple_loss=0.2628, pruned_loss=0.1283, over 19047.00 frames. ], tot_loss[loss=0.2688, simple_loss=0.2606, pruned_loss=0.1292, over 829577.21 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 0.5 +2024-09-01 02:34:09,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=103690.66666666667, ans=0.0 +2024-09-01 02:34:09,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=103690.66666666667, ans=0.0 +2024-09-01 02:34:12,830 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=20.23 vs. limit=15.0 +2024-09-01 02:34:20,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=103744.0, ans=0.125 +2024-09-01 02:34:34,965 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=10.53 vs. limit=15.0 +2024-09-01 02:34:35,011 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=7.02 vs. limit=6.0 +2024-09-01 02:34:39,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=103797.33333333333, ans=0.0 +2024-09-01 02:34:40,107 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.072e+02 2.401e+02 2.633e+02 2.975e+02 4.049e+02, threshold=5.266e+02, percent-clipped=0.0 +2024-09-01 02:34:44,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103797.33333333333, ans=0.1 +2024-09-01 02:34:52,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=103797.33333333333, ans=0.125 +2024-09-01 02:35:51,952 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:35:55,609 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 6, batch 100, loss[loss=0.2561, simple_loss=0.2488, pruned_loss=0.1233, over 19232.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.2528, pruned_loss=0.1237, over 1476247.28 frames. ], batch size: 144, lr: 9.99e-05, grad_scale: 1.0 +2024-09-01 02:36:48,673 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:36:58,487 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-6.pt +2024-09-01 02:37:28,669 INFO [dysarthria_finetune.py:1435] (0/4) (1454047232, 34072559616) +2024-09-01 02:37:28,669 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:37:28,698 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 02:37:37,090 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 7, batch 0, loss[loss=0.2413, simple_loss=0.2389, pruned_loss=0.1085, over 18570.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.2389, pruned_loss=0.1085, over 18570.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 2.0 +2024-09-01 02:37:37,090 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:38:00,927 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 7, validation: loss=0.2303, simple_loss=0.2284, pruned_loss=0.1027, over 1073944.00 frames. +2024-09-01 02:38:00,928 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 02:38:02,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104106.66666666667, ans=0.1 +2024-09-01 02:38:03,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104106.66666666667, ans=0.1 +2024-09-01 02:38:12,411 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.83 vs. limit=6.0 +2024-09-01 02:38:48,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=104213.33333333333, ans=0.125 +2024-09-01 02:39:39,126 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.900e+02 2.248e+02 2.388e+02 2.643e+02 3.863e+02, threshold=4.776e+02, percent-clipped=0.0 +2024-09-01 02:39:40,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=104320.0, ans=0.125 +2024-09-01 02:39:54,677 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 7, batch 50, loss[loss=0.2572, simple_loss=0.255, pruned_loss=0.1164, over 18968.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.246, pruned_loss=0.1185, over 827907.61 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 1.0 +2024-09-01 02:40:19,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=104426.66666666667, ans=0.125 +2024-09-01 02:41:17,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=104533.33333333333, ans=0.035 +2024-09-01 02:41:32,446 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.30 vs. limit=10.0 +2024-09-01 02:41:41,952 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 7, batch 100, loss[loss=0.2236, simple_loss=0.2256, pruned_loss=0.09572, over 19302.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.2418, pruned_loss=0.1155, over 1473040.93 frames. ], batch size: 144, lr: 9.99e-05, grad_scale: 2.0 +2024-09-01 02:42:08,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=104693.33333333333, ans=0.125 +2024-09-01 02:42:39,964 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-7.pt +2024-09-01 02:42:44,190 INFO [dysarthria_finetune.py:1435] (0/4) (1412104192, 34072559616) +2024-09-01 02:42:44,190 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:42:44,219 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 02:42:52,904 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 8, batch 0, loss[loss=0.2588, simple_loss=0.2521, pruned_loss=0.1256, over 18485.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.2521, pruned_loss=0.1256, over 18485.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 4.0 +2024-09-01 02:42:52,904 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:43:16,304 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 8, validation: loss=0.2224, simple_loss=0.2225, pruned_loss=0.09892, over 1073944.00 frames. +2024-09-01 02:43:16,305 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 02:43:18,014 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=9.58 vs. limit=12.0 +2024-09-01 02:43:28,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=104789.33333333333, ans=0.125 +2024-09-01 02:43:29,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.05 vs. limit=22.5 +2024-09-01 02:43:51,414 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.775e+02 2.041e+02 2.195e+02 2.485e+02 3.530e+02, threshold=4.390e+02, percent-clipped=0.0 +2024-09-01 02:44:21,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=104949.33333333333, ans=0.0 +2024-09-01 02:44:37,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=104949.33333333333, ans=0.09899494936611666 +2024-09-01 02:44:47,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=105002.66666666667, ans=0.025 +2024-09-01 02:45:06,407 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 8, batch 50, loss[loss=0.2511, simple_loss=0.2451, pruned_loss=0.1218, over 18938.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.2369, pruned_loss=0.1124, over 828565.55 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 2.0 +2024-09-01 02:46:41,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=105109.33333333333, ans=0.0 +2024-09-01 02:47:14,237 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.17 vs. limit=22.5 +2024-09-01 02:47:40,120 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=11.51 vs. limit=12.0 +2024-09-01 02:47:48,593 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.17 vs. limit=10.0 +2024-09-01 02:47:50,408 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:47:55,798 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 8, batch 100, loss[loss=0.2388, simple_loss=0.2411, pruned_loss=0.1055, over 19222.00 frames. ], tot_loss[loss=0.2384, simple_loss=0.2356, pruned_loss=0.1113, over 1474444.14 frames. ], batch size: 144, lr: 9.99e-05, grad_scale: 2.0 +2024-09-01 02:48:07,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=105322.66666666667, ans=0.0 +2024-09-01 02:48:24,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105376.0, ans=0.1 +2024-09-01 02:48:34,267 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.711e+02 1.930e+02 2.062e+02 2.246e+02 3.148e+02, threshold=4.124e+02, percent-clipped=0.0 +2024-09-01 02:48:37,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=105429.33333333333, ans=0.125 +2024-09-01 02:48:42,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=105429.33333333333, ans=0.125 +2024-09-01 02:48:55,520 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-8.pt +2024-09-01 02:49:00,083 INFO [dysarthria_finetune.py:1435] (0/4) (1412104192, 34072559616) +2024-09-01 02:49:00,084 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:49:00,113 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 02:49:09,692 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 9, batch 0, loss[loss=0.2515, simple_loss=0.2454, pruned_loss=0.1229, over 18596.00 frames. ], tot_loss[loss=0.2515, simple_loss=0.2454, pruned_loss=0.1229, over 18596.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 2.0 +2024-09-01 02:49:09,693 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:49:40,375 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 9, validation: loss=0.211, simple_loss=0.2147, pruned_loss=0.09159, over 1073944.00 frames. +2024-09-01 02:49:40,376 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 02:49:47,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.16 vs. limit=22.5 +2024-09-01 02:49:53,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=105477.33333333333, ans=0.0 +2024-09-01 02:50:05,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=105477.33333333333, ans=0.2 +2024-09-01 02:50:12,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=105477.33333333333, ans=10.0 +2024-09-01 02:50:21,123 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.18 vs. limit=15.0 +2024-09-01 02:50:36,408 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.34 vs. limit=15.0 +2024-09-01 02:51:30,736 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.79 vs. limit=10.0 +2024-09-01 02:52:12,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=105690.66666666667, ans=0.0 +2024-09-01 02:52:18,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=105690.66666666667, ans=0.2 +2024-09-01 02:52:30,683 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 9, batch 50, loss[loss=0.2332, simple_loss=0.2358, pruned_loss=0.1042, over 19065.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.2305, pruned_loss=0.1044, over 828972.56 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 2.0 +2024-09-01 02:52:35,445 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:53:01,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=105744.0, ans=0.0 +2024-09-01 02:53:01,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=105744.0, ans=0.025 +2024-09-01 02:53:08,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=105797.33333333333, ans=0.2 +2024-09-01 02:53:13,554 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.39 vs. limit=15.0 +2024-09-01 02:53:37,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=105850.66666666667, ans=0.2 +2024-09-01 02:53:46,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=105850.66666666667, ans=0.07 +2024-09-01 02:54:05,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=105850.66666666667, ans=0.0 +2024-09-01 02:54:08,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=105904.0, ans=0.0 +2024-09-01 02:54:17,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=105904.0, ans=0.125 +2024-09-01 02:54:35,247 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.637e+02 1.850e+02 1.979e+02 2.143e+02 2.885e+02, threshold=3.959e+02, percent-clipped=0.0 +2024-09-01 02:54:39,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105957.33333333333, ans=0.1 +2024-09-01 02:54:55,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=105957.33333333333, ans=0.125 +2024-09-01 02:54:58,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=105957.33333333333, ans=0.125 +2024-09-01 02:55:01,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=105957.33333333333, ans=0.125 +2024-09-01 02:55:06,103 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 9, batch 100, loss[loss=0.184, simple_loss=0.1937, pruned_loss=0.07395, over 19269.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.2285, pruned_loss=0.1031, over 1474236.32 frames. ], batch size: 144, lr: 9.98e-05, grad_scale: 4.0 +2024-09-01 02:55:15,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=106010.66666666667, ans=0.0 +2024-09-01 02:55:42,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=106064.0, ans=0.2 +2024-09-01 02:56:27,104 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-9.pt +2024-09-01 02:56:31,510 INFO [dysarthria_finetune.py:1435] (0/4) (1414201344, 34072559616) +2024-09-01 02:56:31,511 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 02:56:31,543 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 02:56:40,078 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 10, batch 0, loss[loss=0.1934, simple_loss=0.1995, pruned_loss=0.0832, over 18682.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.1995, pruned_loss=0.0832, over 18682.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 4.0 +2024-09-01 02:56:40,079 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 02:56:52,578 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.5255, 1.8610, 4.1780, 3.9660], device='cuda:0') +2024-09-01 02:57:03,504 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 10, validation: loss=0.2075, simple_loss=0.2129, pruned_loss=0.09054, over 1073944.00 frames. +2024-09-01 02:57:03,505 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 02:57:22,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=106165.33333333333, ans=0.125 +2024-09-01 02:57:38,114 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:57:40,469 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.38 vs. limit=22.5 +2024-09-01 02:58:14,399 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:58:14,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=106325.33333333333, ans=0.2 +2024-09-01 02:58:20,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=106325.33333333333, ans=0.125 +2024-09-01 02:58:36,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106378.66666666667, ans=0.1 +2024-09-01 02:58:55,063 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 10, batch 50, loss[loss=0.2409, simple_loss=0.2443, pruned_loss=0.1094, over 19012.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2261, pruned_loss=0.102, over 829104.52 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 2.0 +2024-09-01 02:58:56,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=106432.0, ans=0.07 +2024-09-01 02:58:56,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=106432.0, ans=0.125 +2024-09-01 02:59:02,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=106432.0, ans=0.2 +2024-09-01 02:59:07,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106432.0, ans=0.1 +2024-09-01 02:59:13,815 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.30 vs. limit=15.0 +2024-09-01 02:59:20,726 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.594e+02 1.769e+02 1.897e+02 2.105e+02 2.891e+02, threshold=3.793e+02, percent-clipped=0.0 +2024-09-01 02:59:28,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=106485.33333333333, ans=0.2 +2024-09-01 02:59:45,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=106538.66666666667, ans=0.125 +2024-09-01 03:00:26,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=106645.33333333333, ans=0.07 +2024-09-01 03:00:29,049 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.69 vs. limit=15.0 +2024-09-01 03:00:42,966 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 10, batch 100, loss[loss=0.1987, simple_loss=0.2117, pruned_loss=0.0803, over 19226.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.223, pruned_loss=0.09873, over 1474931.95 frames. ], batch size: 144, lr: 9.98e-05, grad_scale: 4.0 +2024-09-01 03:00:46,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=106698.66666666667, ans=0.125 +2024-09-01 03:00:57,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=106698.66666666667, ans=0.2 +2024-09-01 03:01:27,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=106752.0, ans=0.0 +2024-09-01 03:01:46,995 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-10.pt +2024-09-01 03:07:52,130 INFO [dysarthria_finetune.py:1435] (0/4) (1416298496, 34072559616) +2024-09-01 03:07:52,130 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 03:07:52,159 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 03:08:00,728 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 11, batch 0, loss[loss=0.2256, simple_loss=0.23, pruned_loss=0.1025, over 18505.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.23, pruned_loss=0.1025, over 18505.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:08:00,729 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 03:08:32,542 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 11, validation: loss=0.2002, simple_loss=0.2088, pruned_loss=0.08618, over 1073944.00 frames. +2024-09-01 03:08:32,542 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 03:09:44,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=106906.66666666667, ans=0.2 +2024-09-01 03:09:56,777 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.21 vs. limit=15.0 +2024-09-01 03:10:00,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=106960.0, ans=0.125 +2024-09-01 03:10:19,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=106960.0, ans=0.0 +2024-09-01 03:10:28,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=107013.33333333333, ans=0.0 +2024-09-01 03:10:32,792 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.50 vs. limit=22.5 +2024-09-01 03:10:42,593 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.566e+02 1.721e+02 1.824e+02 2.016e+02 2.682e+02, threshold=3.648e+02, percent-clipped=0.0 +2024-09-01 03:12:03,941 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 11, batch 50, loss[loss=0.2143, simple_loss=0.2209, pruned_loss=0.09575, over 19023.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2221, pruned_loss=0.09671, over 827570.26 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:12:48,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=107173.33333333333, ans=0.0 +2024-09-01 03:13:56,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=107226.66666666667, ans=0.2 +2024-09-01 03:15:13,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=107333.33333333333, ans=0.125 +2024-09-01 03:15:56,720 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 11, batch 100, loss[loss=0.2003, simple_loss=0.2132, pruned_loss=0.08428, over 19237.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2204, pruned_loss=0.09574, over 1473115.37 frames. ], batch size: 144, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:16:20,510 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.35 vs. limit=10.0 +2024-09-01 03:17:12,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=107440.0, ans=0.125 +2024-09-01 03:17:56,546 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-11.pt +2024-09-01 03:18:00,890 INFO [dysarthria_finetune.py:1435] (0/4) (1412104192, 34072559616) +2024-09-01 03:18:00,890 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 03:18:00,920 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 03:18:09,586 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 12, batch 0, loss[loss=0.2107, simple_loss=0.2148, pruned_loss=0.09758, over 18585.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2148, pruned_loss=0.09758, over 18585.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:18:09,587 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 03:18:33,048 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 12, validation: loss=0.1929, simple_loss=0.2049, pruned_loss=0.0821, over 1073944.00 frames. +2024-09-01 03:18:33,049 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 03:18:41,827 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.14 vs. limit=15.0 +2024-09-01 03:18:54,487 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.518e+02 1.683e+02 1.764e+02 1.920e+02 2.754e+02, threshold=3.529e+02, percent-clipped=0.0 +2024-09-01 03:20:46,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=107754.66666666667, ans=0.025 +2024-09-01 03:20:49,960 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.09 vs. limit=12.0 +2024-09-01 03:20:56,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=107754.66666666667, ans=0.125 +2024-09-01 03:21:03,703 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 12, batch 50, loss[loss=0.197, simple_loss=0.2118, pruned_loss=0.08272, over 18986.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2128, pruned_loss=0.08763, over 829307.75 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:21:26,179 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=12.18 vs. limit=15.0 +2024-09-01 03:22:34,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=107914.66666666667, ans=0.0 +2024-09-01 03:23:14,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=107968.0, ans=0.2 +2024-09-01 03:24:01,752 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 12, batch 100, loss[loss=0.1852, simple_loss=0.1986, pruned_loss=0.07908, over 19194.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2124, pruned_loss=0.08917, over 1473409.16 frames. ], batch size: 144, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:24:21,998 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.519e+02 1.650e+02 1.753e+02 1.928e+02 2.697e+02, threshold=3.507e+02, percent-clipped=0.0 +2024-09-01 03:24:23,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108074.66666666667, ans=0.1 +2024-09-01 03:24:48,727 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.06 vs. limit=15.0 +2024-09-01 03:24:50,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=108181.33333333333, ans=0.125 +2024-09-01 03:25:11,683 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-12.pt +2024-09-01 03:25:16,171 INFO [dysarthria_finetune.py:1435] (0/4) (1414201344, 34072559616) +2024-09-01 03:25:16,171 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 03:25:16,201 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 03:25:24,769 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 13, batch 0, loss[loss=0.2429, simple_loss=0.2404, pruned_loss=0.1199, over 18643.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.2404, pruned_loss=0.1199, over 18643.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 4.0 +2024-09-01 03:25:24,770 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 03:25:48,259 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 13, validation: loss=0.1886, simple_loss=0.2026, pruned_loss=0.08078, over 1073944.00 frames. +2024-09-01 03:25:48,259 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 03:26:07,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=108229.33333333333, ans=0.125 +2024-09-01 03:26:17,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108282.66666666667, ans=0.1 +2024-09-01 03:27:10,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=108389.33333333333, ans=0.0 +2024-09-01 03:27:12,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=108389.33333333333, ans=0.025 +2024-09-01 03:27:30,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=108442.66666666667, ans=0.125 +2024-09-01 03:27:54,668 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 13, batch 50, loss[loss=0.1725, simple_loss=0.1958, pruned_loss=0.06699, over 19011.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2112, pruned_loss=0.08759, over 829773.70 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 1.0 +2024-09-01 03:27:58,718 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.26 vs. limit=15.0 +2024-09-01 03:28:05,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108496.0, ans=0.1 +2024-09-01 03:28:42,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=108602.66666666667, ans=0.0 +2024-09-01 03:29:07,380 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 1.617e+02 1.723e+02 2.007e+02 2.594e+02, threshold=3.446e+02, percent-clipped=0.0 +2024-09-01 03:29:08,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=108656.0, ans=0.125 +2024-09-01 03:29:25,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=108709.33333333333, ans=0.0 +2024-09-01 03:29:32,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=108709.33333333333, ans=0.125 +2024-09-01 03:29:45,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=108762.66666666667, ans=0.125 +2024-09-01 03:29:45,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=108762.66666666667, ans=0.0 +2024-09-01 03:29:46,147 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 13, batch 100, loss[loss=0.1379, simple_loss=0.1628, pruned_loss=0.05001, over 19225.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.21, pruned_loss=0.08675, over 1474982.28 frames. ], batch size: 144, lr: 9.96e-05, grad_scale: 2.0 +2024-09-01 03:30:00,768 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.34 vs. limit=10.0 +2024-09-01 03:30:13,647 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.44 vs. limit=15.0 +2024-09-01 03:30:41,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=108869.33333333333, ans=0.025 +2024-09-01 03:30:46,117 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-13.pt +2024-09-01 03:30:50,601 INFO [dysarthria_finetune.py:1435] (0/4) (1414201344, 34072559616) +2024-09-01 03:30:50,601 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 03:30:50,631 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 03:30:58,993 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 14, batch 0, loss[loss=0.2071, simple_loss=0.2241, pruned_loss=0.09005, over 18695.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2241, pruned_loss=0.09005, over 18695.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 4.0 +2024-09-01 03:30:58,994 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 03:31:07,320 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.4236, 5.1880, 4.8787, 4.2392], device='cuda:0') +2024-09-01 03:31:23,131 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([1.6024, 1.4805, 1.0069, 1.4129, 1.6895, 1.5457, 1.5948, 1.6163], + device='cuda:0') +2024-09-01 03:31:23,178 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 14, validation: loss=0.1833, simple_loss=0.2, pruned_loss=0.07856, over 1073944.00 frames. +2024-09-01 03:31:23,178 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 03:31:26,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=108917.33333333333, ans=0.125 +2024-09-01 03:31:40,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108917.33333333333, ans=0.1 +2024-09-01 03:31:49,943 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.04 vs. limit=10.0 +2024-09-01 03:32:00,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108970.66666666667, ans=0.1 +2024-09-01 03:32:08,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff3.min_abs, batch_count=109024.0, ans=0.2 +2024-09-01 03:32:11,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=109024.0, ans=0.125 +2024-09-01 03:32:21,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=109024.0, ans=0.125 +2024-09-01 03:33:13,481 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 14, batch 50, loss[loss=0.1743, simple_loss=0.2043, pruned_loss=0.06634, over 18964.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2072, pruned_loss=0.08496, over 828263.79 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 4.0 +2024-09-01 03:33:19,742 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 1.619e+02 1.722e+02 1.984e+02 2.668e+02, threshold=3.445e+02, percent-clipped=0.0 +2024-09-01 03:33:31,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=109184.0, ans=0.125 +2024-09-01 03:33:31,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=109184.0, ans=0.0 +2024-09-01 03:33:57,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=109290.66666666667, ans=0.0 +2024-09-01 03:34:17,462 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.77 vs. limit=6.0 +2024-09-01 03:35:00,699 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 14, batch 100, loss[loss=0.1676, simple_loss=0.1841, pruned_loss=0.07243, over 19207.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2073, pruned_loss=0.08486, over 1474261.28 frames. ], batch size: 144, lr: 9.96e-05, grad_scale: 8.0 +2024-09-01 03:35:14,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=109450.66666666667, ans=0.0 +2024-09-01 03:35:41,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=109557.33333333333, ans=0.0 +2024-09-01 03:36:00,182 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-14.pt +2024-09-01 03:36:05,441 INFO [dysarthria_finetune.py:1435] (0/4) (1454047232, 34072559616) +2024-09-01 03:36:05,441 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 03:36:05,470 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 03:36:14,232 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 15, batch 0, loss[loss=0.2357, simple_loss=0.2492, pruned_loss=0.1084, over 18509.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.2492, pruned_loss=0.1084, over 18509.00 frames. ], batch size: 65, lr: 9.95e-05, grad_scale: 4.0 +2024-09-01 03:36:14,233 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 03:36:45,416 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 15, validation: loss=0.1765, simple_loss=0.1963, pruned_loss=0.07531, over 1073944.00 frames. +2024-09-01 03:36:45,417 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 03:36:47,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=109605.33333333333, ans=0.0 +2024-09-01 03:37:11,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109605.33333333333, ans=0.1 +2024-09-01 03:37:18,331 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=12.91 vs. limit=12.0 +2024-09-01 03:37:18,353 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=22.26 vs. limit=15.0 +2024-09-01 03:37:31,972 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.49 vs. limit=15.0 +2024-09-01 03:38:03,356 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.430e+02 1.579e+02 1.672e+02 1.908e+02 2.431e+02, threshold=3.343e+02, percent-clipped=0.0 +2024-09-01 03:39:13,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109872.0, ans=0.1 +2024-09-01 03:39:14,758 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 15, batch 50, loss[loss=0.1856, simple_loss=0.2042, pruned_loss=0.08139, over 19011.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2009, pruned_loss=0.07931, over 827942.50 frames. ], batch size: 102, lr: 9.95e-05, grad_scale: 4.0 +2024-09-01 03:40:36,591 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.32 vs. limit=10.0 +2024-09-01 03:40:44,730 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.30 vs. limit=6.0 +2024-09-01 03:40:49,913 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.21 vs. limit=15.0 +2024-09-01 03:41:54,222 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 15, batch 100, loss[loss=0.1683, simple_loss=0.1907, pruned_loss=0.07146, over 19251.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2008, pruned_loss=0.07977, over 1473903.80 frames. ], batch size: 144, lr: 9.95e-05, grad_scale: 8.0 +2024-09-01 03:42:24,137 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.35 vs. limit=15.0 +2024-09-01 03:43:12,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=110245.33333333333, ans=0.1 +2024-09-01 03:43:21,363 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 1.548e+02 1.650e+02 1.862e+02 2.617e+02, threshold=3.300e+02, percent-clipped=0.0 +2024-09-01 03:43:24,585 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-15.pt +2024-09-01 03:43:30,754 INFO [dysarthria_finetune.py:1435] (0/4) (1412104192, 34072559616) +2024-09-01 03:43:30,754 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 03:43:30,783 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 03:43:40,069 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 16, batch 0, loss[loss=0.212, simple_loss=0.2271, pruned_loss=0.09751, over 18729.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2271, pruned_loss=0.09751, over 18729.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 8.0 +2024-09-01 03:43:40,069 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 03:44:25,988 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 16, validation: loss=0.1763, simple_loss=0.1967, pruned_loss=0.07691, over 1073944.00 frames. +2024-09-01 03:44:25,989 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 03:45:04,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=110293.33333333333, ans=0.0 +2024-09-01 03:45:54,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.09 vs. limit=6.0 +2024-09-01 03:46:03,009 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:46:25,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=110400.0, ans=0.0 +2024-09-01 03:48:00,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=110506.66666666667, ans=0.1 +2024-09-01 03:48:37,719 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 16, batch 50, loss[loss=0.1597, simple_loss=0.1896, pruned_loss=0.06457, over 18988.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2023, pruned_loss=0.08062, over 828175.61 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 4.0 +2024-09-01 03:49:07,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110560.0, ans=0.1 +2024-09-01 03:51:34,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110720.0, ans=0.1 +2024-09-01 03:51:52,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=110773.33333333333, ans=0.025 +2024-09-01 03:52:02,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=110773.33333333333, ans=0.125 +2024-09-01 03:52:26,721 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.416e+02 1.555e+02 1.657e+02 1.896e+02 2.445e+02, threshold=3.314e+02, percent-clipped=0.0 +2024-09-01 03:52:31,263 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 16, batch 100, loss[loss=0.1603, simple_loss=0.1863, pruned_loss=0.06715, over 19270.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2004, pruned_loss=0.07963, over 1473314.28 frames. ], batch size: 144, lr: 9.94e-05, grad_scale: 8.0 +2024-09-01 03:54:05,648 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.95 vs. limit=6.0 +2024-09-01 03:54:10,906 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-16.pt +2024-09-01 03:54:30,050 INFO [dysarthria_finetune.py:1435] (0/4) (1414201344, 34072559616) +2024-09-01 03:54:30,050 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 03:54:30,080 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 03:54:38,976 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 17, batch 0, loss[loss=0.2101, simple_loss=0.2144, pruned_loss=0.1029, over 18739.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2144, pruned_loss=0.1029, over 18739.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 8.0 +2024-09-01 03:54:38,977 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 03:54:45,295 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.3006, 1.6254, 3.2721, 3.1045], device='cuda:0') +2024-09-01 03:54:58,430 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([6.9866, 6.0336, 6.2285, 6.1565], device='cuda:0') +2024-09-01 03:55:19,872 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 17, validation: loss=0.1671, simple_loss=0.1912, pruned_loss=0.07151, over 1073944.00 frames. +2024-09-01 03:55:19,873 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 03:55:22,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110981.33333333333, ans=0.1 +2024-09-01 03:56:16,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=111034.66666666667, ans=0.125 +2024-09-01 03:56:27,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=111034.66666666667, ans=0.1 +2024-09-01 03:56:56,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=111088.0, ans=0.125 +2024-09-01 03:57:45,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=111141.33333333333, ans=0.125 +2024-09-01 03:58:57,661 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 17, batch 50, loss[loss=0.1722, simple_loss=0.2039, pruned_loss=0.0702, over 19028.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.1963, pruned_loss=0.07632, over 827378.67 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 4.0 +2024-09-01 03:59:47,869 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 04:01:02,159 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.350e+02 1.555e+02 1.659e+02 1.888e+02 2.626e+02, threshold=3.319e+02, percent-clipped=0.0 +2024-09-01 04:01:09,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111354.66666666667, ans=0.125 +2024-09-01 04:01:45,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=111408.0, ans=0.2 +2024-09-01 04:01:58,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=111408.0, ans=0.125 +2024-09-01 04:02:21,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=111461.33333333333, ans=0.125 +2024-09-01 04:02:48,360 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 17, batch 100, loss[loss=0.1444, simple_loss=0.1741, pruned_loss=0.05738, over 19218.00 frames. ], tot_loss[loss=0.178, simple_loss=0.199, pruned_loss=0.07852, over 1473529.96 frames. ], batch size: 144, lr: 9.94e-05, grad_scale: 8.0 +2024-09-01 04:02:59,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=111514.66666666667, ans=0.1 +2024-09-01 04:03:17,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=111514.66666666667, ans=0.125 +2024-09-01 04:04:03,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=111568.0, ans=0.125 +2024-09-01 04:04:09,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=111621.33333333333, ans=0.125 +2024-09-01 04:05:21,573 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-17.pt +2024-09-01 04:05:29,262 INFO [dysarthria_finetune.py:1435] (0/4) (1454047232, 34072559616) +2024-09-01 04:05:29,262 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 04:05:29,291 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 04:05:38,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=111669.33333333333, ans=0.2 +2024-09-01 04:05:38,881 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 18, batch 0, loss[loss=0.2063, simple_loss=0.2202, pruned_loss=0.09617, over 18538.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2202, pruned_loss=0.09617, over 18538.00 frames. ], batch size: 65, lr: 9.93e-05, grad_scale: 8.0 +2024-09-01 04:05:38,882 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 04:06:14,846 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 18, validation: loss=0.1676, simple_loss=0.191, pruned_loss=0.07213, over 1073944.00 frames. +2024-09-01 04:06:14,847 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 04:08:04,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=111776.0, ans=0.125 +2024-09-01 04:08:44,231 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.19 vs. limit=22.5 +2024-09-01 04:09:08,777 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.22 vs. limit=15.0 +2024-09-01 04:09:19,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111829.33333333333, ans=0.1 +2024-09-01 04:09:42,029 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.341e+02 1.516e+02 1.635e+02 1.895e+02 3.024e+02, threshold=3.269e+02, percent-clipped=0.0 +2024-09-01 04:09:43,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=111882.66666666667, ans=0.125 +2024-09-01 04:10:05,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=111882.66666666667, ans=0.0 +2024-09-01 04:10:10,252 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 18, batch 50, loss[loss=0.1676, simple_loss=0.1984, pruned_loss=0.0684, over 18998.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.1981, pruned_loss=0.0775, over 828205.61 frames. ], batch size: 102, lr: 9.93e-05, grad_scale: 8.0 +2024-09-01 04:10:57,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=111989.33333333333, ans=0.125 +2024-09-01 04:12:38,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=112042.66666666667, ans=0.0 +2024-09-01 04:12:52,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=112042.66666666667, ans=0.0 +2024-09-01 04:13:26,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=112096.0, ans=0.2 +2024-09-01 04:13:30,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=112096.0, ans=0.125 +2024-09-01 04:14:13,993 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 18, batch 100, loss[loss=0.1437, simple_loss=0.1802, pruned_loss=0.05357, over 19294.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.1965, pruned_loss=0.07568, over 1473690.24 frames. ], batch size: 144, lr: 9.93e-05, grad_scale: 8.0 +2024-09-01 04:14:27,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112202.66666666667, ans=0.1 +2024-09-01 04:14:52,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=112256.0, ans=0.0 +2024-09-01 04:15:53,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=112309.33333333333, ans=0.2 +2024-09-01 04:15:56,121 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-18.pt +2024-09-01 04:16:07,172 INFO [dysarthria_finetune.py:1435] (0/4) (1414201344, 34072559616) +2024-09-01 04:16:07,172 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 04:16:07,203 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 04:16:15,956 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 19, batch 0, loss[loss=0.1948, simple_loss=0.2148, pruned_loss=0.08744, over 18598.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2148, pruned_loss=0.08744, over 18598.00 frames. ], batch size: 65, lr: 9.92e-05, grad_scale: 8.0 +2024-09-01 04:16:15,957 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 04:16:39,398 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 19, validation: loss=0.1638, simple_loss=0.1883, pruned_loss=0.06968, over 1073944.00 frames. +2024-09-01 04:16:39,398 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 04:16:45,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=112352.0, ans=0.0 +2024-09-01 04:16:50,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=112352.0, ans=0.125 +2024-09-01 04:16:57,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=112352.0, ans=0.09899494936611666 +2024-09-01 04:16:59,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=112352.0, ans=0.0 +2024-09-01 04:17:15,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=112405.33333333333, ans=0.025 +2024-09-01 04:17:22,801 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.531e+02 1.615e+02 1.818e+02 2.373e+02, threshold=3.231e+02, percent-clipped=0.0 +2024-09-01 04:17:38,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=112458.66666666667, ans=0.125 +2024-09-01 04:17:38,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=112458.66666666667, ans=0.125 +2024-09-01 04:18:14,103 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.50 vs. limit=15.0 +2024-09-01 04:18:37,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=112565.33333333333, ans=0.0 +2024-09-01 04:19:10,160 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 19, batch 50, loss[loss=0.1625, simple_loss=0.1897, pruned_loss=0.06767, over 19038.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.1935, pruned_loss=0.075, over 827203.46 frames. ], batch size: 102, lr: 9.92e-05, grad_scale: 4.0 +2024-09-01 04:19:16,118 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.25 vs. limit=15.0 +2024-09-01 04:19:32,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=112672.0, ans=0.125 +2024-09-01 04:20:27,402 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.51 vs. limit=10.0 +2024-09-01 04:20:38,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=112778.66666666667, ans=0.1 +2024-09-01 04:21:10,130 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 19, batch 100, loss[loss=0.1603, simple_loss=0.1872, pruned_loss=0.06673, over 19274.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.1912, pruned_loss=0.07308, over 1472434.33 frames. ], batch size: 144, lr: 9.92e-05, grad_scale: 8.0 +2024-09-01 04:21:22,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=112885.33333333333, ans=0.0 +2024-09-01 04:21:45,541 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.33 vs. limit=15.0 +2024-09-01 04:21:47,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112938.66666666667, ans=0.1 +2024-09-01 04:21:54,611 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.303e+02 1.501e+02 1.584e+02 1.820e+02 2.268e+02, threshold=3.167e+02, percent-clipped=0.0 +2024-09-01 04:22:11,533 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-19.pt +2024-09-01 04:22:18,153 INFO [dysarthria_finetune.py:1435] (0/4) (1414201344, 34072559616) +2024-09-01 04:22:18,154 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 04:22:18,185 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 04:22:26,680 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 20, batch 0, loss[loss=0.1587, simple_loss=0.1814, pruned_loss=0.06804, over 18599.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.1814, pruned_loss=0.06804, over 18599.00 frames. ], batch size: 65, lr: 9.91e-05, grad_scale: 8.0 +2024-09-01 04:22:26,680 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-09-01 04:22:50,260 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 20, validation: loss=0.1638, simple_loss=0.1875, pruned_loss=0.07, over 1073944.00 frames. +2024-09-01 04:22:50,261 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26725MB +2024-09-01 04:22:51,932 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.92 vs. limit=15.0 +2024-09-01 04:23:00,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=113040.0, ans=0.125 +2024-09-01 04:23:09,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=113040.0, ans=0.0 +2024-09-01 04:23:20,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=113093.33333333333, ans=0.025 +2024-09-01 04:23:23,296 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.19 vs. limit=6.0 +2024-09-01 04:23:55,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=113146.66666666667, ans=0.125 +2024-09-01 04:24:18,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=113200.0, ans=0.025 +2024-09-01 04:24:43,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.62 vs. limit=22.5 +2024-09-01 04:24:55,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=113253.33333333333, ans=0.025 +2024-09-01 04:25:07,018 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 20, batch 50, loss[loss=0.168, simple_loss=0.1934, pruned_loss=0.07124, over 18985.00 frames. ], tot_loss[loss=0.171, simple_loss=0.193, pruned_loss=0.07451, over 828130.18 frames. ], batch size: 102, lr: 9.91e-05, grad_scale: 4.0 +2024-09-01 04:26:06,481 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.99 vs. limit=15.0 +2024-09-01 04:26:32,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=113413.33333333333, ans=0.125 +2024-09-01 04:26:35,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.13 vs. limit=15.0 +2024-09-01 04:27:05,373 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.305e+02 1.522e+02 1.605e+02 1.869e+02 2.652e+02, threshold=3.210e+02, percent-clipped=0.0 +2024-09-01 04:27:15,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=113520.0, ans=0.2 +2024-09-01 04:27:26,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=113573.33333333333, ans=0.125 +2024-09-01 04:27:27,162 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 20, batch 100, loss[loss=0.1512, simple_loss=0.1762, pruned_loss=0.06313, over 19321.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.1915, pruned_loss=0.07286, over 1472900.97 frames. ], batch size: 144, lr: 9.91e-05, grad_scale: 8.0 +2024-09-01 04:28:21,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=113626.66666666667, ans=0.125 +2024-09-01 04:28:24,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=113680.0, ans=0.0 +2024-09-01 04:28:26,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=113680.0, ans=0.2 +2024-09-01 04:28:38,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=113680.0, ans=0.0 +2024-09-01 04:28:43,106 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune/epoch-20.pt +2024-09-01 04:28:49,631 INFO [dysarthria_finetune.py:1435] (0/4) (1414201344, 34072559616) +2024-09-01 04:28:49,631 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-09-01 04:28:49,661 INFO [dysarthria_finetune.py:1440] (0/4) (29908205568, 34072559616) +2024-09-01 04:28:49,661 INFO [dysarthria_finetune.py:1442] (0/4) Done! diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-31-13-16-10-1 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-31-13-16-10-1 new file mode 100644 index 0000000000000000000000000000000000000000..bbdeada70f7077e1130313078061fc641e97b81c --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-31-13-16-10-1 @@ -0,0 +1,547 @@ +2024-08-31 13:16:10,953 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-31 13:16:10,986 INFO [dysarthria_finetune.py:1214] (1/4) (33735507968, 34072559616) +2024-08-31 13:16:10,986 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-31 13:16:11,957 INFO [dysarthria_finetune.py:1219] (1/4) (32783400960, 34072559616) +2024-08-31 13:16:11,957 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-31 13:16:13,232 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2558.int.cedar.computecanada.ca', 'IP address': '172.16.145.251'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 13:16:13,232 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-31 13:16:14,953 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 66110931 +2024-08-31 13:16:16,265 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-31 13:18:23,850 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-31 13:20:29,533 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts +2024-08-31 13:20:29,665 INFO [dysarthria_finetune.py:1319] (1/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 13:20:29,919 INFO [dysarthria_asr_datamodule.py:239] (1/4) Disable MUSAN +2024-08-31 13:20:29,919 INFO [dysarthria_asr_datamodule.py:257] (1/4) Enable SpecAugment +2024-08-31 13:20:29,919 INFO [dysarthria_asr_datamodule.py:258] (1/4) Time warp factor: 80 +2024-08-31 13:20:29,919 INFO [dysarthria_asr_datamodule.py:268] (1/4) Num frame mask: 10 +2024-08-31 13:20:29,919 INFO [dysarthria_asr_datamodule.py:281] (1/4) About to create train dataset +2024-08-31 13:20:31,921 INFO [dysarthria_asr_datamodule.py:308] (1/4) Using DynamicBucketingSampler. +2024-08-31 13:20:32,860 INFO [dysarthria_asr_datamodule.py:325] (1/4) About to create train dataloader +2024-08-31 13:20:32,865 INFO [dysarthria_asr_datamodule.py:501] (1/4) About to get dev cuts +2024-08-31 13:20:33,113 INFO [dysarthria_asr_datamodule.py:356] (1/4) About to create dev dataset +2024-08-31 13:20:33,461 INFO [dysarthria_asr_datamodule.py:373] (1/4) About to create dev dataloader +2024-08-31 13:20:33,461 INFO [dysarthria_finetune.py:1490] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 13:44:09,211 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=512, metric=5.15 vs. limit=5.0 +2024-08-31 13:44:10,208 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=192, metric=8.80 vs. limit=7.5 +2024-08-31 13:44:14,874 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12658MB +2024-08-31 13:45:00,384 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=384, metric=15.02 vs. limit=7.5 +2024-08-31 13:45:00,949 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12658MB +2024-08-31 13:47:50,045 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12658MB +2024-08-31 13:47:52,427 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12658MB +2024-08-31 13:50:07,939 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.92 vs. limit=3.0 +2024-08-31 13:50:20,329 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12658MB +2024-08-31 13:50:22,578 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 12658MB +2024-08-31 13:51:23,105 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 0, loss[loss=0.385, simple_loss=0.3627, pruned_loss=0.2224, over 18549.00 frames. ], tot_loss[loss=0.385, simple_loss=0.3627, pruned_loss=0.2224, over 18549.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-31 13:51:23,105 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-31 14:29:03,504 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 1, validation: loss=0.3678, simple_loss=0.3479, pruned_loss=0.1987, over 1073944.00 frames. +2024-08-31 14:29:03,505 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13133MB +2024-08-31 14:32:02,017 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.75 vs. limit=15.0 +2024-08-31 14:42:00,278 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.24 vs. limit=15.0 +2024-08-31 15:24:16,876 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.525e+02 8.969e+02 9.815e+02 1.002e+03 1.048e+03, threshold=3.926e+03, percent-clipped=0.0 +2024-08-31 15:47:57,557 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=8.34 vs. limit=12.0 +2024-08-31 15:52:42,138 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.161e+02 8.685e+02 9.467e+02 1.002e+03 1.055e+03, threshold=3.787e+03, percent-clipped=0.0 +2024-08-31 16:18:01,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=100160.0, ans=0.0 +2024-08-31 16:22:18,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=100160.0, ans=0.125 +2024-08-31 16:32:12,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=100213.33333333333, ans=0.0 +2024-08-31 16:32:13,898 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.192e+02 7.846e+02 8.685e+02 9.467e+02 1.055e+03, threshold=3.474e+03, percent-clipped=0.0 +2024-08-31 16:33:28,400 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=25.05 vs. limit=15.0 +2024-08-31 17:02:17,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=100266.66666666667, ans=0.125 +2024-08-31 17:02:30,144 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 50, loss[loss=0.4065, simple_loss=0.3849, pruned_loss=0.216, over 19042.00 frames. ], tot_loss[loss=0.3907, simple_loss=0.3686, pruned_loss=0.2194, over 827432.33 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-31 17:15:25,325 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=14.92 vs. limit=15.0 +2024-08-31 17:24:15,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100320.0, ans=0.1 +2024-08-31 17:26:36,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=100320.0, ans=0.0 +2024-08-31 17:38:48,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100373.33333333333, ans=0.125 +2024-08-31 17:57:49,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=100426.66666666667, ans=0.2 +2024-08-31 18:02:28,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=100426.66666666667, ans=0.2 +2024-08-31 18:02:28,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.83 vs. limit=15.0 +2024-08-31 18:03:25,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=100480.0, ans=0.0 +2024-08-31 18:03:26,030 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=21.70 vs. limit=15.0 +2024-08-31 18:03:46,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=100480.0, ans=0.125 +2024-08-31 18:06:32,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=100480.0, ans=0.0 +2024-08-31 18:08:20,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=100480.0, ans=0.125 +2024-08-31 18:12:52,331 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.869e+02 6.982e+02 7.682e+02 8.607e+02 1.055e+03, threshold=1.536e+03, percent-clipped=0.0 +2024-08-31 18:12:52,368 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 100, loss[loss=0.389, simple_loss=0.3686, pruned_loss=0.2046, over 19093.00 frames. ], tot_loss[loss=0.3765, simple_loss=0.3557, pruned_loss=0.2068, over 1470684.91 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-08-31 18:14:55,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=8.09 vs. limit=6.0 +2024-08-31 18:40:56,939 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.92 vs. limit=15.0 +2024-08-31 18:42:00,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=100640.0, ans=0.2 +2024-08-31 18:44:19,282 INFO [dysarthria_finetune.py:1435] (1/4) (4260036608, 34072559616) +2024-08-31 18:44:19,283 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-31 18:44:19,342 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-08-31 18:46:01,811 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 2, batch 0, loss[loss=0.3255, simple_loss=0.3086, pruned_loss=0.1697, over 18746.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3086, pruned_loss=0.1697, over 18746.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-08-31 18:46:01,812 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-31 19:10:08,816 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 2, validation: loss=0.3307, simple_loss=0.3141, pruned_loss=0.1687, over 1073944.00 frames. +2024-08-31 19:10:08,816 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13360MB +2024-08-31 19:26:44,334 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=21.90 vs. limit=15.0 +2024-08-31 19:29:07,880 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.91 vs. limit=22.5 +2024-08-31 19:48:00,190 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=14.77 vs. limit=12.0 +2024-08-31 19:51:01,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=100789.33333333333, ans=0.2 +2024-08-31 20:05:00,931 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.38 vs. limit=15.0 +2024-08-31 20:15:21,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=100842.66666666667, ans=0.125 +2024-08-31 20:15:22,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=100842.66666666667, ans=0.125 +2024-08-31 20:31:44,803 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 2, batch 50, loss[loss=0.402, simple_loss=0.3797, pruned_loss=0.2175, over 19071.00 frames. ], tot_loss[loss=0.3532, simple_loss=0.3343, pruned_loss=0.1876, over 827854.65 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 4.0 +2024-08-31 20:33:01,095 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.96 vs. limit=22.5 +2024-08-31 20:51:20,585 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.69 vs. limit=15.0 +2024-08-31 20:55:24,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101002.66666666667, ans=0.1 +2024-08-31 20:58:18,279 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.84 vs. limit=15.0 +2024-08-31 21:01:41,643 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=7.35 vs. limit=6.0 +2024-08-31 21:03:09,445 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 4.995e+02 5.661e+02 6.268e+02 7.321e+02, threshold=1.132e+03, percent-clipped=0.0 +2024-08-31 21:18:50,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=101109.33333333333, ans=0.0 +2024-08-31 21:18:51,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=101109.33333333333, ans=0.125 +2024-08-31 21:20:54,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=101109.33333333333, ans=0.125 +2024-08-31 21:20:54,857 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.51 vs. limit=6.0 +2024-08-31 21:26:13,034 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.57 vs. limit=15.0 +2024-08-31 21:40:46,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=101162.66666666667, ans=0.125 +2024-08-31 21:42:13,844 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 2, batch 100, loss[loss=0.3321, simple_loss=0.3165, pruned_loss=0.1663, over 19090.00 frames. ], tot_loss[loss=0.3448, simple_loss=0.3267, pruned_loss=0.1813, over 1472213.55 frames. ], batch size: 133, lr: 7.29e-05, grad_scale: 4.0 +2024-08-31 22:21:43,987 INFO [dysarthria_finetune.py:1435] (1/4) (30081024, 34072559616) +2024-08-31 22:21:43,988 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-31 22:21:44,061 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-08-31 22:22:38,984 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 3, batch 0, loss[loss=0.352, simple_loss=0.3312, pruned_loss=0.1954, over 18511.00 frames. ], tot_loss[loss=0.352, simple_loss=0.3312, pruned_loss=0.1954, over 18511.00 frames. ], batch size: 65, lr: 7.58e-05, grad_scale: 2.0 +2024-08-31 22:22:38,985 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-31 22:31:34,595 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 3, validation: loss=0.2979, simple_loss=0.2853, pruned_loss=0.1432, over 1073944.00 frames. +2024-08-31 22:31:34,955 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13741MB +2024-08-31 22:44:42,479 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 22:50:32,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=101424.0, ans=0.0 +2024-08-31 23:03:41,630 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=13.14 vs. limit=15.0 +2024-08-31 23:06:44,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=101477.33333333333, ans=0.0 +2024-08-31 23:17:17,664 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.64 vs. limit=15.0 +2024-08-31 23:30:40,080 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.802e+02 3.787e+02 4.308e+02 4.929e+02 6.122e+02, threshold=8.616e+02, percent-clipped=0.0 +2024-08-31 23:32:38,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=101637.33333333333, ans=0.125 +2024-08-31 23:32:38,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=101637.33333333333, ans=0.07 +2024-08-31 23:32:42,302 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 3, batch 50, loss[loss=0.3363, simple_loss=0.3191, pruned_loss=0.175, over 19005.00 frames. ], tot_loss[loss=0.3273, simple_loss=0.3109, pruned_loss=0.1692, over 828905.42 frames. ], batch size: 102, lr: 8.08e-05, grad_scale: 1.0 +2024-09-01 00:13:44,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=101850.66666666667, ans=0.05 +2024-09-01 00:13:44,418 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=9.59 vs. limit=12.0 +2024-09-01 00:17:08,365 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 3, batch 100, loss[loss=0.2752, simple_loss=0.2658, pruned_loss=0.1272, over 19133.00 frames. ], tot_loss[loss=0.3162, simple_loss=0.3011, pruned_loss=0.1606, over 1474266.40 frames. ], batch size: 133, lr: 8.58e-05, grad_scale: 1.0 +2024-09-01 00:27:26,415 INFO [dysarthria_finetune.py:1435] (1/4) (751501312, 34072559616) +2024-09-01 00:27:26,416 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 00:27:26,488 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 00:27:42,721 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 4, batch 0, loss[loss=0.2836, simple_loss=0.2705, pruned_loss=0.1431, over 18466.00 frames. ], tot_loss[loss=0.2836, simple_loss=0.2705, pruned_loss=0.1431, over 18466.00 frames. ], batch size: 65, lr: 8.86e-05, grad_scale: 2.0 +2024-09-01 00:27:42,721 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 00:46:27,445 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 4, validation: loss=0.279, simple_loss=0.2687, pruned_loss=0.1325, over 1073944.00 frames. +2024-09-01 00:46:27,445 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13741MB +2024-09-01 01:00:08,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=102053.33333333333, ans=0.125 +2024-09-01 01:00:08,498 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.43 vs. limit=15.0 +2024-09-01 01:16:25,440 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=7.50 vs. limit=10.0 +2024-09-01 01:16:52,147 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.614e+02 3.221e+02 3.659e+02 4.077e+02 5.349e+02, threshold=7.318e+02, percent-clipped=0.0 +2024-09-01 01:18:56,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=102160.0, ans=0.125 +2024-09-01 01:21:13,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=102213.33333333333, ans=0.0 +2024-09-01 01:25:29,400 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.11 vs. limit=10.0 +2024-09-01 01:27:58,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=102266.66666666667, ans=0.1 +2024-09-01 01:31:59,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=102266.66666666667, ans=0.125 +2024-09-01 01:39:48,718 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 4, batch 50, loss[loss=0.2835, simple_loss=0.2737, pruned_loss=0.1338, over 18961.00 frames. ], tot_loss[loss=0.3005, simple_loss=0.2876, pruned_loss=0.1492, over 827373.05 frames. ], batch size: 102, lr: 9.36e-05, grad_scale: 0.5 +2024-09-01 01:41:25,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=102320.0, ans=0.0 +2024-09-01 01:42:52,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=102320.0, ans=0.125 +2024-09-01 01:44:49,010 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.69 vs. limit=6.0 +2024-09-01 01:47:07,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=102373.33333333333, ans=0.125 +2024-09-01 01:47:40,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=12.81 vs. limit=12.0 +2024-09-01 01:50:19,740 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=7.59 vs. limit=6.0 +2024-09-01 01:59:50,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=102533.33333333333, ans=0.025 +2024-09-01 02:01:07,716 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 4, batch 100, loss[loss=0.292, simple_loss=0.2822, pruned_loss=0.1383, over 19038.00 frames. ], tot_loss[loss=0.2947, simple_loss=0.2821, pruned_loss=0.1463, over 1472261.06 frames. ], batch size: 133, lr: 9.86e-05, grad_scale: 1.0 +2024-09-01 02:05:31,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=102640.0, ans=0.0 +2024-09-01 02:07:59,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=102640.0, ans=0.5 +2024-09-01 02:09:39,912 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.323e+02 2.859e+02 3.213e+02 3.589e+02 4.738e+02, threshold=6.426e+02, percent-clipped=0.0 +2024-09-01 02:09:57,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102693.33333333333, ans=0.1 +2024-09-01 02:10:16,926 INFO [dysarthria_finetune.py:1435] (1/4) (986382336, 34072559616) +2024-09-01 02:10:16,927 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:10:16,997 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 02:10:37,119 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 5, batch 0, loss[loss=0.2672, simple_loss=0.2581, pruned_loss=0.1276, over 18670.00 frames. ], tot_loss[loss=0.2672, simple_loss=0.2581, pruned_loss=0.1276, over 18670.00 frames. ], batch size: 65, lr: 1.00e-04, grad_scale: 1.0 +2024-09-01 02:10:37,120 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:15:37,616 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 5, validation: loss=0.2588, simple_loss=0.2515, pruned_loss=0.1195, over 1073944.00 frames. +2024-09-01 02:15:37,616 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13741MB +2024-09-01 02:17:13,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=102741.33333333333, ans=0.025 +2024-09-01 02:18:24,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=102794.66666666667, ans=0.0 +2024-09-01 02:18:38,280 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.15 vs. limit=15.0 +2024-09-01 02:18:41,911 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.54 vs. limit=22.5 +2024-09-01 02:19:48,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=102848.0, ans=0.125 +2024-09-01 02:20:16,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=102848.0, ans=0.025 +2024-09-01 02:20:17,157 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.34 vs. limit=6.0 +2024-09-01 02:23:25,250 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 5, batch 50, loss[loss=0.2869, simple_loss=0.2761, pruned_loss=0.1406, over 18968.00 frames. ], tot_loss[loss=0.2789, simple_loss=0.2688, pruned_loss=0.135, over 828630.89 frames. ], batch size: 102, lr: 1.00e-04, grad_scale: 0.25 +2024-09-01 02:24:38,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=103061.33333333333, ans=0.125 +2024-09-01 02:25:34,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=103061.33333333333, ans=0.125 +2024-09-01 02:25:52,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=103114.66666666667, ans=0.125 +2024-09-01 02:26:35,043 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.28 vs. limit=15.0 +2024-09-01 02:27:25,351 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.97 vs. limit=6.0 +2024-09-01 02:27:28,390 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=9.75 vs. limit=15.0 +2024-09-01 02:27:48,820 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.197e+02 2.619e+02 2.908e+02 3.410e+02 5.061e+02, threshold=5.817e+02, percent-clipped=0.0 +2024-09-01 02:27:54,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=103221.33333333333, ans=0.2 +2024-09-01 02:28:05,167 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 5, batch 100, loss[loss=0.2608, simple_loss=0.2525, pruned_loss=0.1251, over 19157.00 frames. ], tot_loss[loss=0.2754, simple_loss=0.2656, pruned_loss=0.1337, over 1473409.40 frames. ], batch size: 133, lr: 1.00e-04, grad_scale: 0.5 +2024-09-01 02:29:05,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103328.0, ans=0.1 +2024-09-01 02:29:05,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=103328.0, ans=0.125 +2024-09-01 02:30:03,385 INFO [dysarthria_finetune.py:1435] (1/4) (206241792, 34072559616) +2024-09-01 02:30:03,386 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:30:03,459 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 02:30:19,017 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 6, batch 0, loss[loss=0.273, simple_loss=0.2654, pruned_loss=0.1292, over 18435.00 frames. ], tot_loss[loss=0.273, simple_loss=0.2654, pruned_loss=0.1292, over 18435.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 1.0 +2024-09-01 02:30:19,018 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:30:42,395 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 6, validation: loss=0.247, simple_loss=0.2415, pruned_loss=0.1137, over 1073944.00 frames. +2024-09-01 02:30:51,758 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13802MB +2024-09-01 02:32:21,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103477.33333333333, ans=0.1 +2024-09-01 02:32:55,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=103530.66666666667, ans=0.125 +2024-09-01 02:33:12,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=103584.0, ans=0.2 +2024-09-01 02:33:29,944 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=20.43 vs. limit=15.0 +2024-09-01 02:33:49,355 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 6, batch 50, loss[loss=0.2351, simple_loss=0.2307, pruned_loss=0.1078, over 19041.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.2549, pruned_loss=0.1245, over 827399.35 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 0.5 +2024-09-01 02:33:52,846 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.59 vs. limit=15.0 +2024-09-01 02:34:02,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=103690.66666666667, ans=0.2 +2024-09-01 02:34:29,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=103744.0, ans=0.125 +2024-09-01 02:34:37,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=103744.0, ans=0.0 +2024-09-01 02:34:37,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103744.0, ans=0.1 +2024-09-01 02:34:40,101 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.072e+02 2.401e+02 2.633e+02 2.975e+02 4.049e+02, threshold=5.266e+02, percent-clipped=0.0 +2024-09-01 02:34:42,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=103797.33333333333, ans=0.2 +2024-09-01 02:35:55,583 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 6, batch 100, loss[loss=0.2186, simple_loss=0.2172, pruned_loss=0.09615, over 19066.00 frames. ], tot_loss[loss=0.2579, simple_loss=0.2511, pruned_loss=0.1221, over 1471849.14 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 1.0 +2024-09-01 02:36:03,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=103957.33333333333, ans=0.125 +2024-09-01 02:36:08,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=103957.33333333333, ans=0.5 +2024-09-01 02:36:15,328 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.74 vs. limit=15.0 +2024-09-01 02:36:58,487 INFO [dysarthria_finetune.py:1435] (1/4) (1099628544, 34072559616) +2024-09-01 02:36:58,488 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:36:58,570 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 02:37:37,099 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 7, batch 0, loss[loss=0.2803, simple_loss=0.2652, pruned_loss=0.1482, over 18532.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.2652, pruned_loss=0.1482, over 18532.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 2.0 +2024-09-01 02:37:37,099 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:38:00,932 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 7, validation: loss=0.2303, simple_loss=0.2284, pruned_loss=0.1027, over 1073944.00 frames. +2024-09-01 02:38:00,932 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13802MB +2024-09-01 02:38:05,413 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.53 vs. limit=15.0 +2024-09-01 02:38:12,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=104106.66666666667, ans=0.0 +2024-09-01 02:38:48,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104213.33333333333, ans=0.1 +2024-09-01 02:38:50,861 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.93 vs. limit=22.5 +2024-09-01 02:39:00,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=104213.33333333333, ans=0.0 +2024-09-01 02:39:31,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=104320.0, ans=0.125 +2024-09-01 02:39:39,116 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.900e+02 2.248e+02 2.388e+02 2.643e+02 3.863e+02, threshold=4.776e+02, percent-clipped=0.0 +2024-09-01 02:39:40,397 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:39:54,673 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 7, batch 50, loss[loss=0.2541, simple_loss=0.2467, pruned_loss=0.1236, over 19096.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.2481, pruned_loss=0.12, over 827950.42 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 1.0 +2024-09-01 02:40:02,794 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=17.78 vs. limit=15.0 +2024-09-01 02:40:27,014 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.27 vs. limit=8.0 +2024-09-01 02:40:30,884 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=10.27 vs. limit=12.0 +2024-09-01 02:40:39,251 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.20 vs. limit=15.0 +2024-09-01 02:40:39,449 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.09 vs. limit=15.0 +2024-09-01 02:41:15,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=104533.33333333333, ans=0.125 +2024-09-01 02:41:17,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=104533.33333333333, ans=0.0 +2024-09-01 02:41:20,117 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.78 vs. limit=6.0 +2024-09-01 02:41:41,952 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 7, batch 100, loss[loss=0.2433, simple_loss=0.2411, pruned_loss=0.1111, over 19105.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.2432, pruned_loss=0.1163, over 1472811.51 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 2.0 +2024-09-01 02:41:43,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=104640.0, ans=0.125 +2024-09-01 02:42:06,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=104693.33333333333, ans=0.2 +2024-09-01 02:42:39,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten.whitening_limit, batch_count=104746.66666666667, ans=15.0 +2024-09-01 02:42:39,968 INFO [dysarthria_finetune.py:1435] (1/4) (13976141824, 34072559616) +2024-09-01 02:42:39,969 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:42:40,006 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 02:42:52,884 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 8, batch 0, loss[loss=0.21, simple_loss=0.2125, pruned_loss=0.08962, over 18679.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2125, pruned_loss=0.08962, over 18679.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 4.0 +2024-09-01 02:42:52,884 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:43:16,312 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 8, validation: loss=0.2224, simple_loss=0.2225, pruned_loss=0.09892, over 1073944.00 frames. +2024-09-01 02:43:16,312 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13802MB +2024-09-01 02:43:18,011 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=9.78 vs. limit=12.0 +2024-09-01 02:43:35,829 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.41 vs. limit=22.5 +2024-09-01 02:43:51,416 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.775e+02 2.041e+02 2.195e+02 2.485e+02 3.530e+02, threshold=4.390e+02, percent-clipped=0.0 +2024-09-01 02:43:52,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=104842.66666666667, ans=0.125 +2024-09-01 02:44:26,493 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.97 vs. limit=22.5 +2024-09-01 02:45:00,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105002.66666666667, ans=0.1 +2024-09-01 02:45:06,401 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 8, batch 50, loss[loss=0.2115, simple_loss=0.2135, pruned_loss=0.09232, over 19009.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.2365, pruned_loss=0.1107, over 829068.39 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 2.0 +2024-09-01 02:45:14,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=105056.0, ans=0.125 +2024-09-01 02:46:34,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=105109.33333333333, ans=0.125 +2024-09-01 02:46:43,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105109.33333333333, ans=0.1 +2024-09-01 02:46:43,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105109.33333333333, ans=0.1 +2024-09-01 02:47:46,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.03 vs. limit=15.0 +2024-09-01 02:47:55,802 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 8, batch 100, loss[loss=0.2412, simple_loss=0.2401, pruned_loss=0.1112, over 19109.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.2354, pruned_loss=0.1097, over 1473116.98 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 2.0 +2024-09-01 02:48:01,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=105322.66666666667, ans=0.025 +2024-09-01 02:48:34,268 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.711e+02 1.930e+02 2.062e+02 2.246e+02 3.148e+02, threshold=4.124e+02, percent-clipped=0.0 +2024-09-01 02:48:55,528 INFO [dysarthria_finetune.py:1435] (1/4) (2892693504, 34072559616) +2024-09-01 02:48:55,529 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:48:55,597 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 02:49:09,691 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 9, batch 0, loss[loss=0.2444, simple_loss=0.2384, pruned_loss=0.1195, over 18520.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.2384, pruned_loss=0.1195, over 18520.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 2.0 +2024-09-01 02:49:09,691 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:49:40,376 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 9, validation: loss=0.211, simple_loss=0.2147, pruned_loss=0.09159, over 1073944.00 frames. +2024-09-01 02:49:40,377 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13802MB +2024-09-01 02:49:47,083 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.78 vs. limit=22.5 +2024-09-01 02:50:36,220 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.01 vs. limit=22.5 +2024-09-01 02:50:50,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=105584.0, ans=15.0 +2024-09-01 02:50:55,479 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.03 vs. limit=22.5 +2024-09-01 02:51:33,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=105637.33333333333, ans=0.125 +2024-09-01 02:52:18,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=105690.66666666667, ans=0.1 +2024-09-01 02:52:30,683 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 9, batch 50, loss[loss=0.2645, simple_loss=0.259, pruned_loss=0.1287, over 19008.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.2272, pruned_loss=0.1031, over 827563.28 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 2.0 +2024-09-01 02:52:56,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=105744.0, ans=0.0 +2024-09-01 02:52:56,540 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=11.42 vs. limit=12.0 +2024-09-01 02:53:09,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105797.33333333333, ans=0.1 +2024-09-01 02:53:13,982 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.61 vs. limit=22.5 +2024-09-01 02:53:46,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=105850.66666666667, ans=0.125 +2024-09-01 02:53:56,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=105850.66666666667, ans=0.125 +2024-09-01 02:54:35,246 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.637e+02 1.850e+02 1.979e+02 2.143e+02 2.885e+02, threshold=3.959e+02, percent-clipped=0.0 +2024-09-01 02:54:39,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105957.33333333333, ans=0.1 +2024-09-01 02:55:06,099 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 9, batch 100, loss[loss=0.2255, simple_loss=0.2295, pruned_loss=0.0999, over 19113.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.2282, pruned_loss=0.1036, over 1473118.75 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 4.0 +2024-09-01 02:55:59,762 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.84 vs. limit=15.0 +2024-09-01 02:56:27,086 INFO [dysarthria_finetune.py:1435] (1/4) (879427584, 34072559616) +2024-09-01 02:56:27,087 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 02:56:27,156 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 02:56:40,077 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 10, batch 0, loss[loss=0.2391, simple_loss=0.2381, pruned_loss=0.1126, over 18522.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.2381, pruned_loss=0.1126, over 18522.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 4.0 +2024-09-01 02:56:40,078 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 02:56:45,717 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.3437, 1.5189, 3.2162, 3.0289], device='cuda:1') +2024-09-01 02:57:03,512 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 10, validation: loss=0.2075, simple_loss=0.2129, pruned_loss=0.09054, over 1073944.00 frames. +2024-09-01 02:57:03,512 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13802MB +2024-09-01 02:57:11,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=106165.33333333333, ans=0.125 +2024-09-01 02:57:30,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106218.66666666667, ans=0.125 +2024-09-01 02:57:52,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106272.0, ans=0.1 +2024-09-01 02:57:55,131 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.90 vs. limit=22.5 +2024-09-01 02:58:03,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=106272.0, ans=0.025 +2024-09-01 02:58:12,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=106325.33333333333, ans=0.125 +2024-09-01 02:58:14,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=106325.33333333333, ans=0.125 +2024-09-01 02:58:18,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=106325.33333333333, ans=0.125 +2024-09-01 02:58:54,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=106432.0, ans=0.125 +2024-09-01 02:58:54,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106432.0, ans=0.1 +2024-09-01 02:58:55,057 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 10, batch 50, loss[loss=0.2182, simple_loss=0.2253, pruned_loss=0.09465, over 18973.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2246, pruned_loss=0.1008, over 826863.11 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 2.0 +2024-09-01 02:58:58,981 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=30.43 vs. limit=22.5 +2024-09-01 02:59:20,727 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.594e+02 1.769e+02 1.897e+02 2.105e+02 2.891e+02, threshold=3.793e+02, percent-clipped=0.0 +2024-09-01 02:59:22,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=106485.33333333333, ans=0.125 +2024-09-01 02:59:43,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=106538.66666666667, ans=0.0 +2024-09-01 02:59:44,106 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.31 vs. limit=15.0 +2024-09-01 03:00:09,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=106592.0, ans=0.09899494936611666 +2024-09-01 03:00:09,662 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.02 vs. limit=15.0 +2024-09-01 03:00:24,548 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.44 vs. limit=15.0 +2024-09-01 03:00:41,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=106698.66666666667, ans=0.125 +2024-09-01 03:00:42,960 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 10, batch 100, loss[loss=0.2084, simple_loss=0.2207, pruned_loss=0.08569, over 19188.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2207, pruned_loss=0.09663, over 1472464.39 frames. ], batch size: 134, lr: 9.98e-05, grad_scale: 4.0 +2024-09-01 03:00:57,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=106698.66666666667, ans=0.1 +2024-09-01 03:01:01,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=106698.66666666667, ans=0.125 +2024-09-01 03:01:46,984 INFO [dysarthria_finetune.py:1435] (1/4) (13751746560, 34072559616) +2024-09-01 03:01:46,985 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 03:01:47,028 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 03:08:00,726 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 11, batch 0, loss[loss=0.2229, simple_loss=0.2207, pruned_loss=0.1079, over 18704.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2207, pruned_loss=0.1079, over 18704.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:08:00,726 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 03:08:32,550 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 11, validation: loss=0.2002, simple_loss=0.2088, pruned_loss=0.08618, over 1073944.00 frames. +2024-09-01 03:08:32,551 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13802MB +2024-09-01 03:09:34,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=106906.66666666667, ans=0.125 +2024-09-01 03:09:39,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=106906.66666666667, ans=0.2 +2024-09-01 03:10:17,185 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.20 vs. limit=10.0 +2024-09-01 03:10:19,684 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.065e-02 +2024-09-01 03:10:42,588 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.566e+02 1.721e+02 1.824e+02 2.016e+02 2.682e+02, threshold=3.648e+02, percent-clipped=0.0 +2024-09-01 03:10:51,141 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.97 vs. limit=15.0 +2024-09-01 03:11:27,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=107066.66666666667, ans=0.0 +2024-09-01 03:12:03,915 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 11, batch 50, loss[loss=0.2246, simple_loss=0.2287, pruned_loss=0.1031, over 18947.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2186, pruned_loss=0.09438, over 828704.78 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:12:09,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=107120.0, ans=0.07 +2024-09-01 03:12:17,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=107120.0, ans=0.125 +2024-09-01 03:13:48,074 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=17.65 vs. limit=15.0 +2024-09-01 03:13:52,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=107173.33333333333, ans=0.0 +2024-09-01 03:13:52,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=107173.33333333333, ans=0.125 +2024-09-01 03:15:13,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107333.33333333333, ans=0.1 +2024-09-01 03:15:56,724 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 11, batch 100, loss[loss=0.2048, simple_loss=0.2119, pruned_loss=0.09169, over 19147.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2164, pruned_loss=0.09262, over 1473582.76 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:16:04,623 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.76 vs. limit=15.0 +2024-09-01 03:17:23,526 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=9.56 vs. limit=12.0 +2024-09-01 03:17:56,543 INFO [dysarthria_finetune.py:1435] (1/4) (761987072, 34072559616) +2024-09-01 03:17:56,543 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 03:17:56,610 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 03:18:09,585 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 12, batch 0, loss[loss=0.2491, simple_loss=0.2421, pruned_loss=0.1257, over 18735.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.2421, pruned_loss=0.1257, over 18735.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:18:09,585 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 03:18:33,051 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 12, validation: loss=0.1929, simple_loss=0.2049, pruned_loss=0.0821, over 1073944.00 frames. +2024-09-01 03:18:33,051 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14301MB +2024-09-01 03:18:54,484 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.518e+02 1.683e+02 1.764e+02 1.920e+02 2.754e+02, threshold=3.529e+02, percent-clipped=0.0 +2024-09-01 03:19:05,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=107594.66666666667, ans=0.125 +2024-09-01 03:19:13,069 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.42 vs. limit=15.0 +2024-09-01 03:19:20,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=107594.66666666667, ans=10.0 +2024-09-01 03:19:28,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=107594.66666666667, ans=0.0 +2024-09-01 03:20:27,107 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.24 vs. limit=22.5 +2024-09-01 03:20:40,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=107754.66666666667, ans=0.0 +2024-09-01 03:21:00,430 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.88 vs. limit=22.5 +2024-09-01 03:21:03,705 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 12, batch 50, loss[loss=0.191, simple_loss=0.2034, pruned_loss=0.08188, over 18974.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2168, pruned_loss=0.09266, over 827168.58 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:21:26,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=107808.0, ans=0.125 +2024-09-01 03:22:11,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=107861.33333333333, ans=0.025 +2024-09-01 03:23:19,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107968.0, ans=0.1 +2024-09-01 03:23:36,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=108021.33333333333, ans=0.05 +2024-09-01 03:24:01,733 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 12, batch 100, loss[loss=0.1954, simple_loss=0.2087, pruned_loss=0.08413, over 19114.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2153, pruned_loss=0.09222, over 1473649.48 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:24:05,439 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:24:21,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108074.66666666667, ans=0.1 +2024-09-01 03:24:21,991 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.519e+02 1.650e+02 1.753e+02 1.928e+02 2.697e+02, threshold=3.507e+02, percent-clipped=0.0 +2024-09-01 03:25:11,655 INFO [dysarthria_finetune.py:1435] (1/4) (13963558912, 34072559616) +2024-09-01 03:25:11,656 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 03:25:11,707 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 03:25:24,762 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 13, batch 0, loss[loss=0.2518, simple_loss=0.2478, pruned_loss=0.1254, over 18361.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.2478, pruned_loss=0.1254, over 18361.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 4.0 +2024-09-01 03:25:24,763 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 03:25:48,266 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 13, validation: loss=0.1886, simple_loss=0.2026, pruned_loss=0.08078, over 1073944.00 frames. +2024-09-01 03:25:48,267 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14301MB +2024-09-01 03:26:03,848 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=11.36 vs. limit=15.0 +2024-09-01 03:26:17,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108282.66666666667, ans=0.1 +2024-09-01 03:26:25,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=108282.66666666667, ans=0.125 +2024-09-01 03:26:34,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=108336.0, ans=0.09899494936611666 +2024-09-01 03:26:49,068 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=9.94 vs. limit=15.0 +2024-09-01 03:27:30,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108442.66666666667, ans=0.1 +2024-09-01 03:27:54,665 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 13, batch 50, loss[loss=0.1767, simple_loss=0.2053, pruned_loss=0.0651, over 19011.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2082, pruned_loss=0.08648, over 828396.70 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 1.0 +2024-09-01 03:27:58,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=108496.0, ans=0.125 +2024-09-01 03:28:16,752 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.87 vs. limit=22.5 +2024-09-01 03:28:39,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108602.66666666667, ans=0.1 +2024-09-01 03:28:59,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108602.66666666667, ans=0.1 +2024-09-01 03:29:07,377 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 1.617e+02 1.723e+02 2.007e+02 2.594e+02, threshold=3.446e+02, percent-clipped=0.0 +2024-09-01 03:29:43,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=108709.33333333333, ans=0.125 +2024-09-01 03:29:46,152 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 13, batch 100, loss[loss=0.1983, simple_loss=0.2085, pruned_loss=0.08984, over 19217.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2076, pruned_loss=0.08571, over 1472353.64 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 2.0 +2024-09-01 03:29:47,411 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:30:04,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=108762.66666666667, ans=0.125 +2024-09-01 03:30:40,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=108869.33333333333, ans=0.125 +2024-09-01 03:30:46,121 INFO [dysarthria_finetune.py:1435] (1/4) (13737066496, 34072559616) +2024-09-01 03:30:46,122 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 03:30:46,171 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 03:30:58,986 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 14, batch 0, loss[loss=0.2251, simple_loss=0.2279, pruned_loss=0.1084, over 18619.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.2279, pruned_loss=0.1084, over 18619.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 4.0 +2024-09-01 03:30:58,986 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 03:31:18,493 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.0223, 4.3421, 4.7493, 3.7136], device='cuda:1') +2024-09-01 03:31:23,178 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 14, validation: loss=0.1833, simple_loss=0.2, pruned_loss=0.07856, over 1073944.00 frames. +2024-09-01 03:31:23,179 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14301MB +2024-09-01 03:31:38,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=108917.33333333333, ans=0.125 +2024-09-01 03:31:38,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=108917.33333333333, ans=0.0 +2024-09-01 03:31:40,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=108917.33333333333, ans=0.125 +2024-09-01 03:31:47,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=108970.66666666667, ans=0.2 +2024-09-01 03:31:58,302 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.21 vs. limit=22.5 +2024-09-01 03:32:08,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=109024.0, ans=0.0 +2024-09-01 03:32:24,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109024.0, ans=0.1 +2024-09-01 03:32:57,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=109130.66666666667, ans=0.125 +2024-09-01 03:33:13,475 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 14, batch 50, loss[loss=0.2199, simple_loss=0.2364, pruned_loss=0.09745, over 19004.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2079, pruned_loss=0.0854, over 826629.84 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 4.0 +2024-09-01 03:33:19,742 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 1.619e+02 1.722e+02 1.984e+02 2.668e+02, threshold=3.445e+02, percent-clipped=0.0 +2024-09-01 03:33:59,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=109290.66666666667, ans=0.125 +2024-09-01 03:34:10,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=109290.66666666667, ans=0.125 +2024-09-01 03:34:41,075 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.34 vs. limit=15.0 +2024-09-01 03:35:00,688 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 14, batch 100, loss[loss=0.1848, simple_loss=0.2069, pruned_loss=0.0775, over 19114.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2058, pruned_loss=0.08364, over 1472155.55 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 8.0 +2024-09-01 03:35:12,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=109450.66666666667, ans=0.2 +2024-09-01 03:35:14,752 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.28 vs. limit=15.0 +2024-09-01 03:35:16,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=109450.66666666667, ans=0.125 +2024-09-01 03:35:29,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=109504.0, ans=0.125 +2024-09-01 03:35:35,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109504.0, ans=0.1 +2024-09-01 03:35:41,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=109557.33333333333, ans=0.125 +2024-09-01 03:35:43,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=109557.33333333333, ans=0.0 +2024-09-01 03:35:50,275 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=2.634e-03 +2024-09-01 03:36:00,185 INFO [dysarthria_finetune.py:1435] (1/4) (13764329472, 34072559616) +2024-09-01 03:36:00,186 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 03:36:00,233 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 03:36:14,210 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 15, batch 0, loss[loss=0.2063, simple_loss=0.2099, pruned_loss=0.09989, over 18480.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2099, pruned_loss=0.09989, over 18480.00 frames. ], batch size: 65, lr: 9.95e-05, grad_scale: 4.0 +2024-09-01 03:36:14,210 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 03:36:45,412 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 15, validation: loss=0.1765, simple_loss=0.1963, pruned_loss=0.07531, over 1073944.00 frames. +2024-09-01 03:36:45,413 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14301MB +2024-09-01 03:36:52,292 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.42 vs. limit=6.0 +2024-09-01 03:37:05,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=109605.33333333333, ans=0.04949747468305833 +2024-09-01 03:37:18,330 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=11.08 vs. limit=12.0 +2024-09-01 03:37:54,896 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=9.64 vs. limit=12.0 +2024-09-01 03:38:03,357 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.430e+02 1.579e+02 1.672e+02 1.908e+02 2.431e+02, threshold=3.343e+02, percent-clipped=0.0 +2024-09-01 03:38:04,830 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:38:05,279 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.71 vs. limit=10.0 +2024-09-01 03:38:07,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.64 vs. limit=6.0 +2024-09-01 03:38:52,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=109818.66666666667, ans=0.125 +2024-09-01 03:39:14,738 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 15, batch 50, loss[loss=0.2065, simple_loss=0.2209, pruned_loss=0.09415, over 19020.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2044, pruned_loss=0.08297, over 827766.05 frames. ], batch size: 102, lr: 9.95e-05, grad_scale: 4.0 +2024-09-01 03:39:22,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109872.0, ans=0.1 +2024-09-01 03:39:26,044 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.28 vs. limit=15.0 +2024-09-01 03:41:37,698 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:41:54,226 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 15, batch 100, loss[loss=0.1575, simple_loss=0.1791, pruned_loss=0.06648, over 19074.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2046, pruned_loss=0.0824, over 1471681.17 frames. ], batch size: 133, lr: 9.95e-05, grad_scale: 8.0 +2024-09-01 03:42:37,655 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.54 vs. limit=15.0 +2024-09-01 03:43:21,354 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 1.548e+02 1.650e+02 1.862e+02 2.617e+02, threshold=3.300e+02, percent-clipped=0.0 +2024-09-01 03:43:24,588 INFO [dysarthria_finetune.py:1435] (1/4) (751501312, 34072559616) +2024-09-01 03:43:24,589 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 03:43:24,672 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 03:43:40,066 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 16, batch 0, loss[loss=0.2092, simple_loss=0.2236, pruned_loss=0.0965, over 18847.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2236, pruned_loss=0.0965, over 18847.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 8.0 +2024-09-01 03:43:40,066 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 03:44:25,988 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 16, validation: loss=0.1763, simple_loss=0.1967, pruned_loss=0.07691, over 1073944.00 frames. +2024-09-01 03:44:25,988 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14301MB +2024-09-01 03:45:29,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=110346.66666666667, ans=0.125 +2024-09-01 03:46:19,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=110400.0, ans=0.125 +2024-09-01 03:46:31,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=110400.0, ans=0.025 +2024-09-01 03:47:45,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=110453.33333333333, ans=0.125 +2024-09-01 03:47:45,932 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=1.033e-02 +2024-09-01 03:48:04,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=110506.66666666667, ans=0.0 +2024-09-01 03:48:37,719 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 16, batch 50, loss[loss=0.195, simple_loss=0.2132, pruned_loss=0.08816, over 19018.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2022, pruned_loss=0.08131, over 827868.27 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 4.0 +2024-09-01 03:48:48,135 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.42 vs. limit=15.0 +2024-09-01 03:49:14,931 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.15 vs. limit=15.0 +2024-09-01 03:49:45,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=110613.33333333333, ans=0.025 +2024-09-01 03:49:45,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=110613.33333333333, ans=0.125 +2024-09-01 03:51:00,306 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.55 vs. limit=15.0 +2024-09-01 03:52:02,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=110773.33333333333, ans=0.125 +2024-09-01 03:52:25,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=110773.33333333333, ans=0.0 +2024-09-01 03:52:26,724 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.416e+02 1.555e+02 1.657e+02 1.896e+02 2.445e+02, threshold=3.314e+02, percent-clipped=0.0 +2024-09-01 03:52:31,257 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 16, batch 100, loss[loss=0.1459, simple_loss=0.1778, pruned_loss=0.05702, over 19118.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2022, pruned_loss=0.08043, over 1473208.83 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 8.0 +2024-09-01 03:52:49,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=110826.66666666667, ans=0.125 +2024-09-01 03:52:53,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=110826.66666666667, ans=0.2 +2024-09-01 03:54:05,586 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=9.16 vs. limit=15.0 +2024-09-01 03:54:07,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=110933.33333333333, ans=0.0 +2024-09-01 03:54:10,877 INFO [dysarthria_finetune.py:1435] (1/4) (1116405760, 34072559616) +2024-09-01 03:54:10,878 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 03:54:10,950 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 03:54:38,950 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 17, batch 0, loss[loss=0.2221, simple_loss=0.2402, pruned_loss=0.102, over 18527.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2402, pruned_loss=0.102, over 18527.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 8.0 +2024-09-01 03:54:38,951 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 03:54:42,525 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.2105, 1.5260, 3.5489, 3.2683], device='cuda:1') +2024-09-01 03:54:57,218 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.0398, 3.2854, 3.4219, 3.2284], device='cuda:1') +2024-09-01 03:55:19,882 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 17, validation: loss=0.1671, simple_loss=0.1912, pruned_loss=0.07151, over 1073944.00 frames. +2024-09-01 03:55:19,882 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14301MB +2024-09-01 03:55:35,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=110981.33333333333, ans=0.125 +2024-09-01 03:58:57,666 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 17, batch 50, loss[loss=0.1555, simple_loss=0.1911, pruned_loss=0.05993, over 19037.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.1997, pruned_loss=0.07926, over 827680.99 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 4.0 +2024-09-01 04:00:12,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=111248.0, ans=0.0 +2024-09-01 04:00:12,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff3.min_abs, batch_count=111248.0, ans=0.2 +2024-09-01 04:01:02,144 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.350e+02 1.555e+02 1.659e+02 1.888e+02 2.626e+02, threshold=3.319e+02, percent-clipped=0.0 +2024-09-01 04:01:58,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=111408.0, ans=0.125 +2024-09-01 04:02:01,734 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.77 vs. limit=15.0 +2024-09-01 04:02:48,335 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 17, batch 100, loss[loss=0.1459, simple_loss=0.1774, pruned_loss=0.05723, over 19067.00 frames. ], tot_loss[loss=0.175, simple_loss=0.1964, pruned_loss=0.0768, over 1472664.14 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 8.0 +2024-09-01 04:04:10,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=111621.33333333333, ans=0.1 +2024-09-01 04:05:21,543 INFO [dysarthria_finetune.py:1435] (1/4) (13766426624, 34072559616) +2024-09-01 04:05:21,544 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 04:05:21,588 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 04:05:29,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=111669.33333333333, ans=0.2 +2024-09-01 04:05:38,878 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 18, batch 0, loss[loss=0.2076, simple_loss=0.2183, pruned_loss=0.09838, over 18622.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2183, pruned_loss=0.09838, over 18622.00 frames. ], batch size: 65, lr: 9.93e-05, grad_scale: 8.0 +2024-09-01 04:05:38,879 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 04:06:14,844 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 18, validation: loss=0.1676, simple_loss=0.191, pruned_loss=0.07213, over 1073944.00 frames. +2024-09-01 04:06:14,844 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14301MB +2024-09-01 04:06:34,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=111669.33333333333, ans=0.125 +2024-09-01 04:08:04,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=111776.0, ans=0.0 +2024-09-01 04:09:16,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=111829.33333333333, ans=0.125 +2024-09-01 04:09:32,364 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.87 vs. limit=15.0 +2024-09-01 04:09:42,029 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.341e+02 1.516e+02 1.635e+02 1.895e+02 3.024e+02, threshold=3.269e+02, percent-clipped=0.0 +2024-09-01 04:10:09,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=111936.0, ans=0.0 +2024-09-01 04:10:10,259 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 18, batch 50, loss[loss=0.1811, simple_loss=0.2012, pruned_loss=0.08052, over 19026.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.1963, pruned_loss=0.07754, over 826500.31 frames. ], batch size: 102, lr: 9.93e-05, grad_scale: 8.0 +2024-09-01 04:10:23,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111936.0, ans=0.1 +2024-09-01 04:13:28,356 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.98 vs. limit=15.0 +2024-09-01 04:13:38,850 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.55 vs. limit=22.5 +2024-09-01 04:13:44,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=112149.33333333333, ans=0.0 +2024-09-01 04:14:13,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=112202.66666666667, ans=0.125 +2024-09-01 04:14:13,997 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 18, batch 100, loss[loss=0.1574, simple_loss=0.1868, pruned_loss=0.06402, over 19036.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.1946, pruned_loss=0.07588, over 1471672.61 frames. ], batch size: 133, lr: 9.93e-05, grad_scale: 8.0 +2024-09-01 04:14:49,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=112256.0, ans=0.125 +2024-09-01 04:14:58,175 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.53 vs. limit=22.5 +2024-09-01 04:14:59,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=112256.0, ans=0.125 +2024-09-01 04:15:13,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112309.33333333333, ans=0.1 +2024-09-01 04:15:56,099 INFO [dysarthria_finetune.py:1435] (1/4) (403374080, 34072559616) +2024-09-01 04:15:56,100 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 04:15:56,172 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 04:16:15,956 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 19, batch 0, loss[loss=0.1961, simple_loss=0.2182, pruned_loss=0.08699, over 18691.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2182, pruned_loss=0.08699, over 18691.00 frames. ], batch size: 65, lr: 9.92e-05, grad_scale: 8.0 +2024-09-01 04:16:15,956 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 04:16:39,406 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 19, validation: loss=0.1638, simple_loss=0.1883, pruned_loss=0.06968, over 1073944.00 frames. +2024-09-01 04:16:39,407 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14301MB +2024-09-01 04:17:22,803 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.531e+02 1.615e+02 1.818e+02 2.373e+02, threshold=3.231e+02, percent-clipped=0.0 +2024-09-01 04:17:26,734 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.47 vs. limit=15.0 +2024-09-01 04:18:02,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112458.66666666667, ans=0.1 +2024-09-01 04:18:30,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=13.21 vs. limit=12.0 +2024-09-01 04:18:39,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=112565.33333333333, ans=0.07 +2024-09-01 04:18:43,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=112565.33333333333, ans=0.125 +2024-09-01 04:19:10,122 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 19, batch 50, loss[loss=0.1958, simple_loss=0.217, pruned_loss=0.08734, over 18976.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.1954, pruned_loss=0.07605, over 828010.25 frames. ], batch size: 102, lr: 9.92e-05, grad_scale: 4.0 +2024-09-01 04:19:25,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112618.66666666667, ans=0.1 +2024-09-01 04:19:38,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112672.0, ans=0.1 +2024-09-01 04:19:53,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112672.0, ans=0.1 +2024-09-01 04:19:58,615 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.67 vs. limit=15.0 +2024-09-01 04:20:16,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=112725.33333333333, ans=0.025 +2024-09-01 04:20:33,974 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.70 vs. limit=15.0 +2024-09-01 04:20:36,553 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.17 vs. limit=15.0 +2024-09-01 04:21:10,111 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 19, batch 100, loss[loss=0.1588, simple_loss=0.182, pruned_loss=0.0678, over 19118.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.1947, pruned_loss=0.07491, over 1474453.83 frames. ], batch size: 133, lr: 9.92e-05, grad_scale: 8.0 +2024-09-01 04:21:15,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=112885.33333333333, ans=0.0 +2024-09-01 04:21:17,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=112885.33333333333, ans=0.0 +2024-09-01 04:21:24,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=112885.33333333333, ans=0.07 +2024-09-01 04:21:38,233 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.62 vs. limit=6.0 +2024-09-01 04:21:40,254 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=7.21 vs. limit=15.0 +2024-09-01 04:21:54,610 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.303e+02 1.501e+02 1.584e+02 1.820e+02 2.268e+02, threshold=3.167e+02, percent-clipped=0.0 +2024-09-01 04:22:00,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=112992.0, ans=0.025 +2024-09-01 04:22:06,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=112992.0, ans=0.125 +2024-09-01 04:22:11,531 INFO [dysarthria_finetune.py:1435] (1/4) (72024064, 34072559616) +2024-09-01 04:22:11,532 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 04:22:11,605 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 04:22:26,680 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 20, batch 0, loss[loss=0.2134, simple_loss=0.2294, pruned_loss=0.09876, over 18758.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2294, pruned_loss=0.09876, over 18758.00 frames. ], batch size: 65, lr: 9.91e-05, grad_scale: 8.0 +2024-09-01 04:22:26,681 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-09-01 04:22:50,267 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 20, validation: loss=0.1638, simple_loss=0.1875, pruned_loss=0.07, over 1073944.00 frames. +2024-09-01 04:22:50,268 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 14301MB +2024-09-01 04:22:58,611 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.88 vs. limit=15.0 +2024-09-01 04:23:06,025 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.89 vs. limit=5.0 +2024-09-01 04:23:11,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=113093.33333333333, ans=0.125 +2024-09-01 04:23:39,361 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.80 vs. limit=8.0 +2024-09-01 04:24:22,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=113200.0, ans=0.0 +2024-09-01 04:24:57,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=113253.33333333333, ans=0.125 +2024-09-01 04:25:07,014 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 20, batch 50, loss[loss=0.1729, simple_loss=0.1966, pruned_loss=0.07457, over 19069.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.193, pruned_loss=0.07585, over 828644.17 frames. ], batch size: 102, lr: 9.91e-05, grad_scale: 4.0 +2024-09-01 04:26:18,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=113413.33333333333, ans=0.04949747468305833 +2024-09-01 04:26:50,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=113466.66666666667, ans=0.2 +2024-09-01 04:27:02,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=113520.0, ans=0.125 +2024-09-01 04:27:05,364 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.305e+02 1.522e+02 1.605e+02 1.869e+02 2.652e+02, threshold=3.210e+02, percent-clipped=0.0 +2024-09-01 04:27:27,154 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 20, batch 100, loss[loss=0.1396, simple_loss=0.1607, pruned_loss=0.05925, over 19104.00 frames. ], tot_loss[loss=0.17, simple_loss=0.1913, pruned_loss=0.07439, over 1473557.06 frames. ], batch size: 133, lr: 9.91e-05, grad_scale: 8.0 +2024-09-01 04:28:00,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=113626.66666666667, ans=0.0 +2024-09-01 04:28:06,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113626.66666666667, ans=0.1 +2024-09-01 04:28:26,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=113680.0, ans=0.0 +2024-09-01 04:28:43,103 INFO [dysarthria_finetune.py:1435] (1/4) (13755940864, 34072559616) +2024-09-01 04:28:43,103 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-09-01 04:28:43,134 INFO [dysarthria_finetune.py:1440] (1/4) (29300031488, 34072559616) +2024-09-01 04:28:43,134 INFO [dysarthria_finetune.py:1442] (1/4) Done! diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-31-13-16-10-2 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-31-13-16-10-2 new file mode 100644 index 0000000000000000000000000000000000000000..2ebac7c714efbce8c2788e0597f4e5edc242ecdf --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-31-13-16-10-2 @@ -0,0 +1,544 @@ +2024-08-31 13:16:10,955 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-31 13:16:10,986 INFO [dysarthria_finetune.py:1214] (2/4) (33748090880, 34072559616) +2024-08-31 13:16:10,986 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-31 13:16:11,955 INFO [dysarthria_finetune.py:1219] (2/4) (32783400960, 34072559616) +2024-08-31 13:16:11,956 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-31 13:16:13,232 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2558.int.cedar.computecanada.ca', 'IP address': '172.16.145.251'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 13:16:13,232 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-31 13:16:14,948 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 66110931 +2024-08-31 13:17:58,666 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-31 13:18:23,852 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-31 13:20:29,532 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts +2024-08-31 13:20:34,906 INFO [dysarthria_finetune.py:1319] (2/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 13:20:35,066 INFO [dysarthria_asr_datamodule.py:239] (2/4) Disable MUSAN +2024-08-31 13:20:35,066 INFO [dysarthria_asr_datamodule.py:257] (2/4) Enable SpecAugment +2024-08-31 13:20:35,066 INFO [dysarthria_asr_datamodule.py:258] (2/4) Time warp factor: 80 +2024-08-31 13:20:35,067 INFO [dysarthria_asr_datamodule.py:268] (2/4) Num frame mask: 10 +2024-08-31 13:20:35,067 INFO [dysarthria_asr_datamodule.py:281] (2/4) About to create train dataset +2024-08-31 13:20:35,147 INFO [dysarthria_asr_datamodule.py:308] (2/4) Using DynamicBucketingSampler. +2024-08-31 13:20:36,077 INFO [dysarthria_asr_datamodule.py:325] (2/4) About to create train dataloader +2024-08-31 13:20:36,078 INFO [dysarthria_asr_datamodule.py:501] (2/4) About to get dev cuts +2024-08-31 13:20:36,079 INFO [dysarthria_asr_datamodule.py:356] (2/4) About to create dev dataset +2024-08-31 13:20:36,396 INFO [dysarthria_asr_datamodule.py:373] (2/4) About to create dev dataloader +2024-08-31 13:20:36,397 INFO [dysarthria_finetune.py:1490] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 13:44:09,211 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=512, metric=4.96 vs. limit=5.0 +2024-08-31 13:44:10,210 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=9.49 vs. limit=7.5 +2024-08-31 13:44:14,874 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-31 13:45:00,401 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.53 vs. limit=7.5 +2024-08-31 13:45:00,957 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-31 13:47:50,038 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-31 13:47:52,427 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-31 13:50:16,266 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.99 vs. limit=3.0 +2024-08-31 13:50:20,328 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-31 13:50:22,577 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 12660MB +2024-08-31 13:51:23,098 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 0, loss[loss=0.3828, simple_loss=0.3613, pruned_loss=0.2142, over 18533.00 frames. ], tot_loss[loss=0.3828, simple_loss=0.3613, pruned_loss=0.2142, over 18533.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-31 13:51:23,099 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-31 14:29:03,504 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 1, validation: loss=0.3678, simple_loss=0.3479, pruned_loss=0.1987, over 1073944.00 frames. +2024-08-31 14:29:03,505 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19764MB +2024-08-31 15:07:09,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=100000.0, ans=0.125 +2024-08-31 15:19:05,836 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=38.72 vs. limit=15.0 +2024-08-31 15:24:12,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=100053.33333333333, ans=0.0 +2024-08-31 15:24:16,876 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.525e+02 8.969e+02 9.815e+02 1.002e+03 1.048e+03, threshold=3.926e+03, percent-clipped=0.0 +2024-08-31 15:52:39,439 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=18.96 vs. limit=15.0 +2024-08-31 15:52:42,142 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.161e+02 8.685e+02 9.467e+02 1.002e+03 1.055e+03, threshold=3.787e+03, percent-clipped=0.0 +2024-08-31 16:23:44,790 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.29 vs. limit=15.0 +2024-08-31 16:29:14,744 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=22.00 vs. limit=15.0 +2024-08-31 16:32:13,906 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.192e+02 7.846e+02 8.685e+02 9.467e+02 1.055e+03, threshold=3.474e+03, percent-clipped=0.0 +2024-08-31 16:38:41,438 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.02 vs. limit=15.0 +2024-08-31 17:00:11,169 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.24 vs. limit=22.5 +2024-08-31 17:02:30,137 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 50, loss[loss=0.4013, simple_loss=0.3799, pruned_loss=0.2136, over 19018.00 frames. ], tot_loss[loss=0.3889, simple_loss=0.367, pruned_loss=0.2174, over 827419.58 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-31 17:07:59,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=100266.66666666667, ans=0.04949747468305833 +2024-08-31 17:10:25,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=100266.66666666667, ans=0.125 +2024-08-31 18:05:58,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=100480.0, ans=0.125 +2024-08-31 18:12:52,331 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.869e+02 6.982e+02 7.682e+02 8.607e+02 1.055e+03, threshold=1.536e+03, percent-clipped=0.0 +2024-08-31 18:12:52,368 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 100, loss[loss=0.3537, simple_loss=0.3356, pruned_loss=0.1829, over 19117.00 frames. ], tot_loss[loss=0.3755, simple_loss=0.3547, pruned_loss=0.2061, over 1475925.13 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-08-31 18:34:00,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=100586.66666666667, ans=0.1 +2024-08-31 18:36:32,007 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.81 vs. limit=15.0 +2024-08-31 18:42:25,150 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.32 vs. limit=15.0 +2024-08-31 18:44:19,277 INFO [dysarthria_finetune.py:1435] (2/4) (10291445760, 34072559616) +2024-08-31 18:44:19,278 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-31 18:44:19,335 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-08-31 18:46:01,814 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 2, batch 0, loss[loss=0.3342, simple_loss=0.3158, pruned_loss=0.1813, over 18502.00 frames. ], tot_loss[loss=0.3342, simple_loss=0.3158, pruned_loss=0.1813, over 18502.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-08-31 18:46:01,814 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-31 19:10:08,813 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 2, validation: loss=0.3307, simple_loss=0.3141, pruned_loss=0.1687, over 1073944.00 frames. +2024-08-31 19:10:08,814 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-08-31 19:45:24,956 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 19:45:32,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=100736.0, ans=0.125 +2024-08-31 19:46:04,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=100736.0, ans=0.125 +2024-08-31 19:51:06,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-08-31 20:01:36,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=100789.33333333333, ans=0.125 +2024-08-31 20:15:05,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=100842.66666666667, ans=0.0 +2024-08-31 20:18:22,898 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=7.34 vs. limit=6.0 +2024-08-31 20:20:09,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=100896.0, ans=0.0 +2024-08-31 20:23:29,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=100896.0, ans=0.125 +2024-08-31 20:23:30,000 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=7.39 vs. limit=12.0 +2024-08-31 20:31:44,822 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 2, batch 50, loss[loss=0.3633, simple_loss=0.3437, pruned_loss=0.1937, over 18952.00 frames. ], tot_loss[loss=0.3583, simple_loss=0.3386, pruned_loss=0.1935, over 829638.79 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 4.0 +2024-08-31 20:33:22,761 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=20.15 vs. limit=15.0 +2024-08-31 20:42:33,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=101002.66666666667, ans=0.0 +2024-08-31 21:01:03,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=101056.0, ans=0.125 +2024-08-31 21:03:09,438 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 4.995e+02 5.661e+02 6.268e+02 7.321e+02, threshold=1.132e+03, percent-clipped=0.0 +2024-08-31 21:18:49,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=101109.33333333333, ans=0.0 +2024-08-31 21:20:09,275 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=35.40 vs. limit=22.5 +2024-08-31 21:20:57,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=101109.33333333333, ans=0.2 +2024-08-31 21:37:51,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=101162.66666666667, ans=0.2 +2024-08-31 21:42:13,844 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 2, batch 100, loss[loss=0.2879, simple_loss=0.2751, pruned_loss=0.1406, over 19108.00 frames. ], tot_loss[loss=0.3452, simple_loss=0.3268, pruned_loss=0.1827, over 1476292.15 frames. ], batch size: 133, lr: 7.29e-05, grad_scale: 4.0 +2024-08-31 21:42:47,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=101216.0, ans=0.125 +2024-08-31 21:46:33,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=101216.0, ans=0.125 +2024-08-31 21:47:44,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=101216.0, ans=0.125 +2024-08-31 22:08:22,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=101269.33333333333, ans=0.2 +2024-08-31 22:10:11,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101269.33333333333, ans=0.1 +2024-08-31 22:10:43,024 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=14.18 vs. limit=12.0 +2024-08-31 22:17:06,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=101322.66666666667, ans=0.0 +2024-08-31 22:21:40,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=101322.66666666667, ans=0.2 +2024-08-31 22:21:43,990 INFO [dysarthria_finetune.py:1435] (2/4) (10283057152, 34072559616) +2024-08-31 22:21:43,991 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-31 22:21:44,034 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-08-31 22:22:35,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=101370.66666666667, ans=0.0 +2024-08-31 22:22:38,992 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 3, batch 0, loss[loss=0.3511, simple_loss=0.3323, pruned_loss=0.186, over 18600.00 frames. ], tot_loss[loss=0.3511, simple_loss=0.3323, pruned_loss=0.186, over 18600.00 frames. ], batch size: 65, lr: 7.58e-05, grad_scale: 2.0 +2024-08-31 22:22:38,993 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-31 22:31:34,596 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 3, validation: loss=0.2979, simple_loss=0.2853, pruned_loss=0.1432, over 1073944.00 frames. +2024-08-31 22:31:34,955 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-08-31 22:50:30,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=101424.0, ans=0.0 +2024-08-31 22:50:30,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=101424.0, ans=0.125 +2024-08-31 22:50:30,704 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.53 vs. limit=15.0 +2024-08-31 22:52:38,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=101424.0, ans=0.09899494936611666 +2024-08-31 22:53:17,913 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=9.02 vs. limit=15.0 +2024-08-31 23:00:56,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=101477.33333333333, ans=0.0 +2024-08-31 23:26:27,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101584.0, ans=0.1 +2024-08-31 23:30:40,086 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.802e+02 3.787e+02 4.308e+02 4.929e+02 6.122e+02, threshold=8.616e+02, percent-clipped=0.0 +2024-08-31 23:32:42,301 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 3, batch 50, loss[loss=0.3083, simple_loss=0.2955, pruned_loss=0.1491, over 19168.00 frames. ], tot_loss[loss=0.3293, simple_loss=0.313, pruned_loss=0.1694, over 828229.52 frames. ], batch size: 103, lr: 8.08e-05, grad_scale: 1.0 +2024-08-31 23:35:36,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=101637.33333333333, ans=0.025 +2024-08-31 23:41:39,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=101637.33333333333, ans=0.125 +2024-08-31 23:54:21,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=101690.66666666667, ans=0.0 +2024-09-01 00:00:05,915 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=21.52 vs. limit=15.0 +2024-09-01 00:02:14,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=101744.0, ans=0.0 +2024-09-01 00:03:32,109 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=7.59 vs. limit=6.0 +2024-09-01 00:07:29,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=101797.33333333333, ans=0.0 +2024-09-01 00:12:55,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=101850.66666666667, ans=0.125 +2024-09-01 00:13:17,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101850.66666666667, ans=0.1 +2024-09-01 00:17:08,370 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 3, batch 100, loss[loss=0.2584, simple_loss=0.2495, pruned_loss=0.1197, over 19024.00 frames. ], tot_loss[loss=0.3203, simple_loss=0.3047, pruned_loss=0.1636, over 1476045.82 frames. ], batch size: 133, lr: 8.58e-05, grad_scale: 1.0 +2024-09-01 00:18:28,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=101904.0, ans=0.2 +2024-09-01 00:18:28,725 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.59 vs. limit=15.0 +2024-09-01 00:18:28,834 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.77 vs. limit=6.0 +2024-09-01 00:21:40,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=101957.33333333333, ans=0.0 +2024-09-01 00:22:17,901 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.89 vs. limit=15.0 +2024-09-01 00:26:12,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=102010.66666666667, ans=0.125 +2024-09-01 00:27:26,437 INFO [dysarthria_finetune.py:1435] (2/4) (10310320128, 34072559616) +2024-09-01 00:27:26,438 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 00:27:26,476 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 00:27:42,756 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 4, batch 0, loss[loss=0.3144, simple_loss=0.2996, pruned_loss=0.1597, over 18618.00 frames. ], tot_loss[loss=0.3144, simple_loss=0.2996, pruned_loss=0.1597, over 18618.00 frames. ], batch size: 65, lr: 8.86e-05, grad_scale: 2.0 +2024-09-01 00:27:42,756 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 00:46:27,436 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 4, validation: loss=0.279, simple_loss=0.2687, pruned_loss=0.1325, over 1073944.00 frames. +2024-09-01 00:46:27,437 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 01:00:56,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=102053.33333333333, ans=0.125 +2024-09-01 01:00:56,295 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.50 vs. limit=15.0 +2024-09-01 01:16:34,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=102160.0, ans=0.125 +2024-09-01 01:16:52,154 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.614e+02 3.221e+02 3.659e+02 4.077e+02 5.349e+02, threshold=7.318e+02, percent-clipped=0.0 +2024-09-01 01:20:07,287 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=7.53 vs. limit=6.0 +2024-09-01 01:21:05,790 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=10.63 vs. limit=12.0 +2024-09-01 01:29:34,895 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.51 vs. limit=15.0 +2024-09-01 01:39:48,725 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 4, batch 50, loss[loss=0.3026, simple_loss=0.2892, pruned_loss=0.1517, over 18961.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.2862, pruned_loss=0.1468, over 828488.26 frames. ], batch size: 102, lr: 9.36e-05, grad_scale: 0.5 +2024-09-01 01:43:03,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=102320.0, ans=0.2 +2024-09-01 01:43:14,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=102320.0, ans=0.07 +2024-09-01 01:45:34,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=102373.33333333333, ans=0.2 +2024-09-01 01:48:00,220 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:55:59,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=102480.0, ans=0.125 +2024-09-01 01:57:53,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=102533.33333333333, ans=0.025 +2024-09-01 01:58:37,621 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.73 vs. limit=15.0 +2024-09-01 01:59:02,839 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.68 vs. limit=15.0 +2024-09-01 02:00:16,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=102533.33333333333, ans=0.0 +2024-09-01 02:01:07,739 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 4, batch 100, loss[loss=0.2707, simple_loss=0.2609, pruned_loss=0.1302, over 19090.00 frames. ], tot_loss[loss=0.2952, simple_loss=0.283, pruned_loss=0.1455, over 1476821.49 frames. ], batch size: 133, lr: 9.86e-05, grad_scale: 1.0 +2024-09-01 02:02:19,072 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.37 vs. limit=15.0 +2024-09-01 02:09:39,920 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.323e+02 2.859e+02 3.213e+02 3.589e+02 4.738e+02, threshold=6.426e+02, percent-clipped=0.0 +2024-09-01 02:10:16,953 INFO [dysarthria_finetune.py:1435] (2/4) (10280960000, 34072559616) +2024-09-01 02:10:16,953 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:10:16,994 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 02:10:37,119 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 5, batch 0, loss[loss=0.2921, simple_loss=0.2801, pruned_loss=0.1447, over 18551.00 frames. ], tot_loss[loss=0.2921, simple_loss=0.2801, pruned_loss=0.1447, over 18551.00 frames. ], batch size: 65, lr: 1.00e-04, grad_scale: 1.0 +2024-09-01 02:10:37,119 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:15:37,613 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 5, validation: loss=0.2588, simple_loss=0.2515, pruned_loss=0.1195, over 1073944.00 frames. +2024-09-01 02:15:37,613 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 02:18:43,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=102794.66666666667, ans=0.125 +2024-09-01 02:19:22,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=102848.0, ans=0.2 +2024-09-01 02:20:12,613 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.32 vs. limit=15.0 +2024-09-01 02:21:02,960 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.04 vs. limit=15.0 +2024-09-01 02:21:07,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=102901.33333333333, ans=0.125 +2024-09-01 02:21:10,006 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.45 vs. limit=15.0 +2024-09-01 02:23:25,250 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 5, batch 50, loss[loss=0.2753, simple_loss=0.2647, pruned_loss=0.1354, over 19027.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.2748, pruned_loss=0.1388, over 828775.72 frames. ], batch size: 102, lr: 1.00e-04, grad_scale: 0.25 +2024-09-01 02:24:38,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=103061.33333333333, ans=0.025 +2024-09-01 02:25:52,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=103114.66666666667, ans=0.025 +2024-09-01 02:26:14,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=103114.66666666667, ans=0.0 +2024-09-01 02:26:22,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=103114.66666666667, ans=0.125 +2024-09-01 02:27:01,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=103168.0, ans=0.125 +2024-09-01 02:27:14,643 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=103168.0, ans=0.125 +2024-09-01 02:27:28,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=103221.33333333333, ans=0.015 +2024-09-01 02:27:48,829 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.197e+02 2.619e+02 2.908e+02 3.410e+02 5.061e+02, threshold=5.817e+02, percent-clipped=0.0 +2024-09-01 02:28:03,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=13.14 vs. limit=12.0 +2024-09-01 02:28:05,197 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 5, batch 100, loss[loss=0.2857, simple_loss=0.2739, pruned_loss=0.143, over 19114.00 frames. ], tot_loss[loss=0.2777, simple_loss=0.2677, pruned_loss=0.1352, over 1478197.42 frames. ], batch size: 133, lr: 1.00e-04, grad_scale: 0.5 +2024-09-01 02:29:09,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=103328.0, ans=0.2 +2024-09-01 02:29:32,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=103381.33333333333, ans=0.0 +2024-09-01 02:30:03,384 INFO [dysarthria_finetune.py:1435] (2/4) (10283057152, 34072559616) +2024-09-01 02:30:03,385 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:30:03,429 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 02:30:19,018 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 6, batch 0, loss[loss=0.2542, simple_loss=0.2486, pruned_loss=0.1171, over 18783.00 frames. ], tot_loss[loss=0.2542, simple_loss=0.2486, pruned_loss=0.1171, over 18783.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 1.0 +2024-09-01 02:30:19,018 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:30:42,394 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 6, validation: loss=0.247, simple_loss=0.2415, pruned_loss=0.1137, over 1073944.00 frames. +2024-09-01 02:30:51,758 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 02:31:53,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=103424.0, ans=0.125 +2024-09-01 02:32:01,261 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.83 vs. limit=6.0 +2024-09-01 02:32:55,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=103530.66666666667, ans=0.125 +2024-09-01 02:33:05,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=103584.0, ans=0.0 +2024-09-01 02:33:31,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=103637.33333333333, ans=0.025 +2024-09-01 02:33:49,369 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 6, batch 50, loss[loss=0.2716, simple_loss=0.2642, pruned_loss=0.129, over 19006.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.2514, pruned_loss=0.1224, over 828020.78 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 0.5 +2024-09-01 02:33:52,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=103690.66666666667, ans=0.125 +2024-09-01 02:34:07,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103690.66666666667, ans=0.1 +2024-09-01 02:34:17,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103744.0, ans=0.1 +2024-09-01 02:34:31,076 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=23.35 vs. limit=22.5 +2024-09-01 02:34:40,111 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.072e+02 2.401e+02 2.633e+02 2.975e+02 4.049e+02, threshold=5.266e+02, percent-clipped=0.0 +2024-09-01 02:35:28,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=103850.66666666667, ans=0.025 +2024-09-01 02:35:55,580 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 6, batch 100, loss[loss=0.2783, simple_loss=0.27, pruned_loss=0.1347, over 19060.00 frames. ], tot_loss[loss=0.259, simple_loss=0.2519, pruned_loss=0.1234, over 1475525.13 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 1.0 +2024-09-01 02:36:10,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=103957.33333333333, ans=0.0 +2024-09-01 02:36:48,558 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:36:58,495 INFO [dysarthria_finetune.py:1435] (2/4) (10283057152, 34072559616) +2024-09-01 02:36:58,495 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:36:58,549 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 02:37:37,099 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 7, batch 0, loss[loss=0.2478, simple_loss=0.2452, pruned_loss=0.1115, over 18435.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.2452, pruned_loss=0.1115, over 18435.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 2.0 +2024-09-01 02:37:37,099 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:38:00,928 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 7, validation: loss=0.2303, simple_loss=0.2284, pruned_loss=0.1027, over 1073944.00 frames. +2024-09-01 02:38:00,928 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 02:38:30,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=104160.0, ans=0.125 +2024-09-01 02:38:48,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=104213.33333333333, ans=0.0 +2024-09-01 02:39:19,358 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.14 vs. limit=22.5 +2024-09-01 02:39:23,672 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.18 vs. limit=15.0 +2024-09-01 02:39:39,118 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.900e+02 2.248e+02 2.388e+02 2.643e+02 3.863e+02, threshold=4.776e+02, percent-clipped=0.0 +2024-09-01 02:39:54,674 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 7, batch 50, loss[loss=0.2454, simple_loss=0.2411, pruned_loss=0.1147, over 18970.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.2498, pruned_loss=0.1215, over 828175.40 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 1.0 +2024-09-01 02:40:24,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=104426.66666666667, ans=0.125 +2024-09-01 02:41:04,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=104533.33333333333, ans=0.125 +2024-09-01 02:41:19,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=104586.66666666667, ans=0.0 +2024-09-01 02:41:38,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=104586.66666666667, ans=0.125 +2024-09-01 02:41:38,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=104586.66666666667, ans=0.125 +2024-09-01 02:41:41,952 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 7, batch 100, loss[loss=0.2287, simple_loss=0.2303, pruned_loss=0.09875, over 19065.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.2443, pruned_loss=0.1179, over 1476190.89 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 2.0 +2024-09-01 02:41:45,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=104640.0, ans=0.125 +2024-09-01 02:42:02,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=104693.33333333333, ans=0.1 +2024-09-01 02:42:06,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=104693.33333333333, ans=0.125 +2024-09-01 02:42:39,952 INFO [dysarthria_finetune.py:1435] (2/4) (10280960000, 34072559616) +2024-09-01 02:42:39,953 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:42:40,003 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 02:42:52,884 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 8, batch 0, loss[loss=0.2157, simple_loss=0.2177, pruned_loss=0.09301, over 18635.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2177, pruned_loss=0.09301, over 18635.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 4.0 +2024-09-01 02:42:52,884 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:43:16,310 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 8, validation: loss=0.2224, simple_loss=0.2225, pruned_loss=0.09892, over 1073944.00 frames. +2024-09-01 02:43:16,311 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 02:43:29,257 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.78 vs. limit=6.0 +2024-09-01 02:43:35,759 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=19.55 vs. limit=15.0 +2024-09-01 02:43:42,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=104842.66666666667, ans=0.125 +2024-09-01 02:43:51,414 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.775e+02 2.041e+02 2.195e+02 2.485e+02 3.530e+02, threshold=4.390e+02, percent-clipped=0.0 +2024-09-01 02:43:52,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=104842.66666666667, ans=0.0 +2024-09-01 02:44:24,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=104949.33333333333, ans=0.025 +2024-09-01 02:44:37,030 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:45:06,421 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 8, batch 50, loss[loss=0.24, simple_loss=0.241, pruned_loss=0.1067, over 19000.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.2382, pruned_loss=0.1131, over 827531.12 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 2.0 +2024-09-01 02:46:34,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=105109.33333333333, ans=0.125 +2024-09-01 02:46:41,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=105109.33333333333, ans=0.125 +2024-09-01 02:46:50,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105162.66666666667, ans=0.1 +2024-09-01 02:47:03,201 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.30 vs. limit=15.0 +2024-09-01 02:47:11,808 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.98 vs. limit=15.0 +2024-09-01 02:47:55,828 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 8, batch 100, loss[loss=0.2265, simple_loss=0.228, pruned_loss=0.1008, over 19093.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.2328, pruned_loss=0.1084, over 1475468.79 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 2.0 +2024-09-01 02:48:14,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=105322.66666666667, ans=0.05 +2024-09-01 02:48:27,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=105376.0, ans=0.0 +2024-09-01 02:48:34,259 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.711e+02 1.930e+02 2.062e+02 2.246e+02 3.148e+02, threshold=4.124e+02, percent-clipped=0.0 +2024-09-01 02:48:55,542 INFO [dysarthria_finetune.py:1435] (2/4) (10314514432, 34072559616) +2024-09-01 02:48:55,543 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:48:55,575 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 02:49:09,716 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 9, batch 0, loss[loss=0.2438, simple_loss=0.2401, pruned_loss=0.1161, over 18461.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.2401, pruned_loss=0.1161, over 18461.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 2.0 +2024-09-01 02:49:09,716 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:49:40,375 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 9, validation: loss=0.211, simple_loss=0.2147, pruned_loss=0.09159, over 1073944.00 frames. +2024-09-01 02:49:40,375 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 02:49:42,916 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=13.27 vs. limit=12.0 +2024-09-01 02:49:56,677 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.71 vs. limit=6.0 +2024-09-01 02:50:12,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=105477.33333333333, ans=10.0 +2024-09-01 02:50:26,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=105530.66666666667, ans=0.125 +2024-09-01 02:50:33,856 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.87 vs. limit=15.0 +2024-09-01 02:50:51,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105584.0, ans=0.1 +2024-09-01 02:50:55,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=105584.0, ans=0.125 +2024-09-01 02:51:27,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=105637.33333333333, ans=0.125 +2024-09-01 02:51:45,169 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.47 vs. limit=15.0 +2024-09-01 02:51:45,383 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.56 vs. limit=10.0 +2024-09-01 02:52:30,678 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 9, batch 50, loss[loss=0.2505, simple_loss=0.2527, pruned_loss=0.1126, over 18943.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.2279, pruned_loss=0.1042, over 826909.81 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 2.0 +2024-09-01 02:53:34,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=105797.33333333333, ans=0.125 +2024-09-01 02:54:02,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=105850.66666666667, ans=0.125 +2024-09-01 02:54:14,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=105904.0, ans=0.125 +2024-09-01 02:54:17,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=105904.0, ans=0.2 +2024-09-01 02:54:35,251 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.637e+02 1.850e+02 1.979e+02 2.143e+02 2.885e+02, threshold=3.959e+02, percent-clipped=0.0 +2024-09-01 02:54:39,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=105957.33333333333, ans=0.0 +2024-09-01 02:55:01,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=105957.33333333333, ans=0.125 +2024-09-01 02:55:06,133 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 9, batch 100, loss[loss=0.2263, simple_loss=0.2316, pruned_loss=0.09879, over 19136.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.2268, pruned_loss=0.1027, over 1474643.82 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 4.0 +2024-09-01 02:55:25,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=106010.66666666667, ans=0.125 +2024-09-01 02:55:26,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=106010.66666666667, ans=0.125 +2024-09-01 02:55:48,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=106064.0, ans=0.0 +2024-09-01 02:56:27,087 INFO [dysarthria_finetune.py:1435] (2/4) (10312417280, 34072559616) +2024-09-01 02:56:27,088 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 02:56:27,131 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 02:56:40,077 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 10, batch 0, loss[loss=0.2627, simple_loss=0.2586, pruned_loss=0.1271, over 18505.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.2586, pruned_loss=0.1271, over 18505.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 4.0 +2024-09-01 02:56:40,078 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 02:57:03,512 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 10, validation: loss=0.2075, simple_loss=0.2129, pruned_loss=0.09054, over 1073944.00 frames. +2024-09-01 02:57:03,512 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 02:57:11,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=106165.33333333333, ans=0.125 +2024-09-01 02:57:38,738 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=6.71 vs. limit=12.0 +2024-09-01 02:57:50,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=106272.0, ans=0.125 +2024-09-01 02:58:10,295 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.39 vs. limit=22.5 +2024-09-01 02:58:16,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=106325.33333333333, ans=0.125 +2024-09-01 02:58:55,056 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 10, batch 50, loss[loss=0.2382, simple_loss=0.2379, pruned_loss=0.1121, over 19019.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2238, pruned_loss=0.1007, over 827816.98 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 2.0 +2024-09-01 02:59:07,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=106432.0, ans=0.0 +2024-09-01 02:59:20,729 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.594e+02 1.769e+02 1.897e+02 2.105e+02 2.891e+02, threshold=3.793e+02, percent-clipped=0.0 +2024-09-01 02:59:30,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=106485.33333333333, ans=0.0 +2024-09-01 02:59:48,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=106538.66666666667, ans=0.125 +2024-09-01 02:59:54,892 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.66 vs. limit=15.0 +2024-09-01 03:00:01,245 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.65 vs. limit=22.5 +2024-09-01 03:00:15,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=106592.0, ans=0.125 +2024-09-01 03:00:33,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=106645.33333333333, ans=0.0 +2024-09-01 03:00:42,965 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 10, batch 100, loss[loss=0.1829, simple_loss=0.1932, pruned_loss=0.07573, over 19070.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2201, pruned_loss=0.09702, over 1475821.74 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 4.0 +2024-09-01 03:01:46,973 INFO [dysarthria_finetune.py:1435] (2/4) (10280960000, 34072559616) +2024-09-01 03:01:46,974 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 03:01:47,026 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 03:08:00,751 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 11, batch 0, loss[loss=0.264, simple_loss=0.2522, pruned_loss=0.1368, over 18525.00 frames. ], tot_loss[loss=0.264, simple_loss=0.2522, pruned_loss=0.1368, over 18525.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:08:00,751 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 03:08:32,545 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 11, validation: loss=0.2002, simple_loss=0.2088, pruned_loss=0.08618, over 1073944.00 frames. +2024-09-01 03:08:32,545 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 03:09:37,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106906.66666666667, ans=0.1 +2024-09-01 03:10:00,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=106960.0, ans=0.125 +2024-09-01 03:10:26,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106960.0, ans=0.1 +2024-09-01 03:10:32,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=107013.33333333333, ans=0.0 +2024-09-01 03:10:42,594 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.566e+02 1.721e+02 1.824e+02 2.016e+02 2.682e+02, threshold=3.648e+02, percent-clipped=0.0 +2024-09-01 03:11:22,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=107066.66666666667, ans=0.125 +2024-09-01 03:11:29,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=107066.66666666667, ans=0.025 +2024-09-01 03:12:03,920 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 11, batch 50, loss[loss=0.2221, simple_loss=0.2243, pruned_loss=0.1036, over 19068.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2177, pruned_loss=0.09419, over 827285.47 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:12:43,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=107173.33333333333, ans=0.125 +2024-09-01 03:12:48,709 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.93 vs. limit=22.5 +2024-09-01 03:13:53,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107173.33333333333, ans=0.1 +2024-09-01 03:14:36,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=107226.66666666667, ans=0.125 +2024-09-01 03:15:56,744 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 11, batch 100, loss[loss=0.1955, simple_loss=0.204, pruned_loss=0.08595, over 19059.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2156, pruned_loss=0.09201, over 1474809.38 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:16:46,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=107440.0, ans=0.125 +2024-09-01 03:17:08,535 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.51 vs. limit=10.0 +2024-09-01 03:17:56,538 INFO [dysarthria_finetune.py:1435] (2/4) (10280960000, 34072559616) +2024-09-01 03:17:56,539 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 03:17:56,580 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 03:18:09,578 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 12, batch 0, loss[loss=0.2182, simple_loss=0.2236, pruned_loss=0.09999, over 18505.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2236, pruned_loss=0.09999, over 18505.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:18:09,579 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 03:18:33,054 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 12, validation: loss=0.1929, simple_loss=0.2049, pruned_loss=0.0821, over 1073944.00 frames. +2024-09-01 03:18:33,055 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 03:18:54,482 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.518e+02 1.683e+02 1.764e+02 1.920e+02 2.754e+02, threshold=3.529e+02, percent-clipped=0.0 +2024-09-01 03:18:59,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=107541.33333333333, ans=0.125 +2024-09-01 03:19:05,122 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=16.50 vs. limit=15.0 +2024-09-01 03:19:10,809 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.26 vs. limit=10.0 +2024-09-01 03:19:18,378 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=9.43 vs. limit=12.0 +2024-09-01 03:20:40,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107754.66666666667, ans=0.1 +2024-09-01 03:21:03,699 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 12, batch 50, loss[loss=0.1801, simple_loss=0.2048, pruned_loss=0.06629, over 18979.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2128, pruned_loss=0.08997, over 828348.40 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:22:18,200 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.52 vs. limit=15.0 +2024-09-01 03:22:43,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=107914.66666666667, ans=0.2 +2024-09-01 03:23:19,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=107968.0, ans=22.5 +2024-09-01 03:23:36,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=108021.33333333333, ans=0.05 +2024-09-01 03:23:38,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=108021.33333333333, ans=0.0 +2024-09-01 03:23:46,706 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.09 vs. limit=22.5 +2024-09-01 03:24:01,730 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 12, batch 100, loss[loss=0.1744, simple_loss=0.1978, pruned_loss=0.06586, over 19089.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2127, pruned_loss=0.08821, over 1475248.85 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:24:21,999 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.519e+02 1.650e+02 1.753e+02 1.928e+02 2.697e+02, threshold=3.507e+02, percent-clipped=0.0 +2024-09-01 03:24:26,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=108128.0, ans=0.0 +2024-09-01 03:24:50,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=108181.33333333333, ans=0.125 +2024-09-01 03:25:11,657 INFO [dysarthria_finetune.py:1435] (2/4) (10280960000, 34072559616) +2024-09-01 03:25:11,658 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 03:25:11,708 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 03:25:24,783 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 13, batch 0, loss[loss=0.2224, simple_loss=0.2284, pruned_loss=0.1034, over 18540.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2284, pruned_loss=0.1034, over 18540.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 4.0 +2024-09-01 03:25:24,783 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 03:25:48,262 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 13, validation: loss=0.1886, simple_loss=0.2026, pruned_loss=0.08078, over 1073944.00 frames. +2024-09-01 03:25:48,262 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 03:26:17,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=108282.66666666667, ans=0.125 +2024-09-01 03:26:23,174 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:27:08,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=108389.33333333333, ans=0.2 +2024-09-01 03:27:49,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=108442.66666666667, ans=0.035 +2024-09-01 03:27:54,660 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 13, batch 50, loss[loss=0.2188, simple_loss=0.2283, pruned_loss=0.09965, over 18984.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2118, pruned_loss=0.08726, over 829065.08 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 1.0 +2024-09-01 03:28:37,122 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.26 vs. limit=15.0 +2024-09-01 03:29:02,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108656.0, ans=0.0 +2024-09-01 03:29:07,380 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 1.617e+02 1.723e+02 2.007e+02 2.594e+02, threshold=3.446e+02, percent-clipped=0.0 +2024-09-01 03:29:21,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=108656.0, ans=0.125 +2024-09-01 03:29:32,656 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.25 vs. limit=15.0 +2024-09-01 03:29:42,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=108709.33333333333, ans=0.0 +2024-09-01 03:29:42,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=108709.33333333333, ans=0.0 +2024-09-01 03:29:46,168 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 13, batch 100, loss[loss=0.1808, simple_loss=0.2002, pruned_loss=0.0749, over 19116.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2129, pruned_loss=0.08897, over 1477011.25 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 2.0 +2024-09-01 03:30:00,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=108762.66666666667, ans=0.2 +2024-09-01 03:30:13,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=108816.0, ans=0.0 +2024-09-01 03:30:24,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=108816.0, ans=10.0 +2024-09-01 03:30:28,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108869.33333333333, ans=0.1 +2024-09-01 03:30:46,141 INFO [dysarthria_finetune.py:1435] (2/4) (10283057152, 34072559616) +2024-09-01 03:30:46,142 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 03:30:46,175 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 03:30:59,024 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 14, batch 0, loss[loss=0.2076, simple_loss=0.2113, pruned_loss=0.09931, over 18523.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2113, pruned_loss=0.09931, over 18523.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 4.0 +2024-09-01 03:30:59,025 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 03:31:19,198 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([3.9384, 4.1038, 3.9670, 4.0796, 4.1657, 3.8715, 3.9641, 3.6802], + device='cuda:2') +2024-09-01 03:31:23,179 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 14, validation: loss=0.1833, simple_loss=0.2, pruned_loss=0.07856, over 1073944.00 frames. +2024-09-01 03:31:23,180 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 03:31:51,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=108970.66666666667, ans=0.025 +2024-09-01 03:33:13,492 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 14, batch 50, loss[loss=0.192, simple_loss=0.2094, pruned_loss=0.08317, over 18999.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2072, pruned_loss=0.08463, over 827850.18 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 4.0 +2024-09-01 03:33:16,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109184.0, ans=0.1 +2024-09-01 03:33:19,751 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 1.619e+02 1.722e+02 1.984e+02 2.668e+02, threshold=3.445e+02, percent-clipped=0.0 +2024-09-01 03:33:21,632 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=24.82 vs. limit=22.5 +2024-09-01 03:33:31,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=109184.0, ans=0.125 +2024-09-01 03:33:55,724 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:34:19,777 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.24 vs. limit=6.0 +2024-09-01 03:34:57,881 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=20.89 vs. limit=15.0 +2024-09-01 03:35:00,697 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 14, batch 100, loss[loss=0.1792, simple_loss=0.2045, pruned_loss=0.07281, over 19059.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2078, pruned_loss=0.08484, over 1475617.37 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 8.0 +2024-09-01 03:35:02,327 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.29 vs. limit=15.0 +2024-09-01 03:35:43,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=109557.33333333333, ans=0.125 +2024-09-01 03:35:43,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=109557.33333333333, ans=0.05 +2024-09-01 03:35:46,192 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.23 vs. limit=15.0 +2024-09-01 03:36:00,195 INFO [dysarthria_finetune.py:1435] (2/4) (10283057152, 34072559616) +2024-09-01 03:36:00,197 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 03:36:00,232 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 03:36:14,209 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 15, batch 0, loss[loss=0.2441, simple_loss=0.2491, pruned_loss=0.1178, over 18678.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.2491, pruned_loss=0.1178, over 18678.00 frames. ], batch size: 65, lr: 9.95e-05, grad_scale: 4.0 +2024-09-01 03:36:14,210 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 03:36:45,406 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 15, validation: loss=0.1765, simple_loss=0.1963, pruned_loss=0.07531, over 1073944.00 frames. +2024-09-01 03:36:45,407 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 03:37:08,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=109605.33333333333, ans=0.0 +2024-09-01 03:37:52,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=109712.0, ans=0.125 +2024-09-01 03:38:03,368 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.430e+02 1.579e+02 1.672e+02 1.908e+02 2.431e+02, threshold=3.343e+02, percent-clipped=0.0 +2024-09-01 03:38:18,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=109765.33333333333, ans=0.125 +2024-09-01 03:38:26,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=109765.33333333333, ans=0.0 +2024-09-01 03:38:29,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=109765.33333333333, ans=0.125 +2024-09-01 03:39:14,736 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 15, batch 50, loss[loss=0.2063, simple_loss=0.2281, pruned_loss=0.08976, over 18994.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2037, pruned_loss=0.08233, over 827605.34 frames. ], batch size: 102, lr: 9.95e-05, grad_scale: 4.0 +2024-09-01 03:39:52,839 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=9.40 vs. limit=15.0 +2024-09-01 03:40:10,289 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.44 vs. limit=15.0 +2024-09-01 03:40:15,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=109925.33333333333, ans=0.015 +2024-09-01 03:40:36,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=109978.66666666667, ans=0.125 +2024-09-01 03:40:41,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=109978.66666666667, ans=0.125 +2024-09-01 03:40:49,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=110032.0, ans=15.0 +2024-09-01 03:41:13,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=110032.0, ans=0.1 +2024-09-01 03:41:20,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=110032.0, ans=0.125 +2024-09-01 03:41:54,237 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 15, batch 100, loss[loss=0.172, simple_loss=0.198, pruned_loss=0.07123, over 19062.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2033, pruned_loss=0.08134, over 1475114.53 frames. ], batch size: 133, lr: 9.95e-05, grad_scale: 8.0 +2024-09-01 03:42:02,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=110138.66666666667, ans=0.0 +2024-09-01 03:42:34,703 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.44 vs. limit=22.5 +2024-09-01 03:43:09,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=110245.33333333333, ans=0.125 +2024-09-01 03:43:21,356 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 1.548e+02 1.650e+02 1.862e+02 2.617e+02, threshold=3.300e+02, percent-clipped=0.0 +2024-09-01 03:43:24,583 INFO [dysarthria_finetune.py:1435] (2/4) (10280960000, 34072559616) +2024-09-01 03:43:24,584 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 03:43:24,618 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 03:43:40,085 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 16, batch 0, loss[loss=0.2076, simple_loss=0.2188, pruned_loss=0.09746, over 18504.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2188, pruned_loss=0.09746, over 18504.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 8.0 +2024-09-01 03:43:40,085 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 03:44:25,981 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 16, validation: loss=0.1763, simple_loss=0.1967, pruned_loss=0.07691, over 1073944.00 frames. +2024-09-01 03:44:25,981 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 03:44:37,559 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.03 vs. limit=5.0 +2024-09-01 03:44:45,305 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=27.61 vs. limit=22.5 +2024-09-01 03:44:56,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=110293.33333333333, ans=0.125 +2024-09-01 03:45:09,868 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.55 vs. limit=15.0 +2024-09-01 03:45:14,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=110346.66666666667, ans=0.2 +2024-09-01 03:45:29,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=110346.66666666667, ans=0.2 +2024-09-01 03:45:36,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=110346.66666666667, ans=0.125 +2024-09-01 03:46:19,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=110400.0, ans=0.125 +2024-09-01 03:46:32,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110400.0, ans=0.1 +2024-09-01 03:48:06,678 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:48:09,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=110506.66666666667, ans=0.0 +2024-09-01 03:48:21,448 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.07 vs. limit=15.0 +2024-09-01 03:48:37,720 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 16, batch 50, loss[loss=0.1825, simple_loss=0.2061, pruned_loss=0.07919, over 19044.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.1999, pruned_loss=0.0802, over 828171.03 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 4.0 +2024-09-01 03:49:19,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=110613.33333333333, ans=0.0 +2024-09-01 03:50:08,511 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=11.73 vs. limit=12.0 +2024-09-01 03:50:11,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=110666.66666666667, ans=0.125 +2024-09-01 03:52:26,726 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.416e+02 1.555e+02 1.657e+02 1.896e+02 2.445e+02, threshold=3.314e+02, percent-clipped=0.0 +2024-09-01 03:52:30,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=110826.66666666667, ans=0.125 +2024-09-01 03:52:31,279 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 16, batch 100, loss[loss=0.1497, simple_loss=0.182, pruned_loss=0.0587, over 19090.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.1998, pruned_loss=0.07951, over 1476933.27 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 8.0 +2024-09-01 03:52:55,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=110826.66666666667, ans=0.025 +2024-09-01 03:53:40,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110880.0, ans=0.1 +2024-09-01 03:54:09,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110933.33333333333, ans=0.1 +2024-09-01 03:54:10,882 INFO [dysarthria_finetune.py:1435] (2/4) (10278862848, 34072559616) +2024-09-01 03:54:10,883 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 03:54:10,927 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 03:54:38,956 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 17, batch 0, loss[loss=0.1794, simple_loss=0.2036, pruned_loss=0.07765, over 18336.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2036, pruned_loss=0.07765, over 18336.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 8.0 +2024-09-01 03:54:38,956 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 03:54:45,552 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.5097, 2.9556, 2.7725, 2.6922], device='cuda:2') +2024-09-01 03:55:19,881 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 17, validation: loss=0.1671, simple_loss=0.1912, pruned_loss=0.07151, over 1073944.00 frames. +2024-09-01 03:55:19,882 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 03:56:27,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=111034.66666666667, ans=0.2 +2024-09-01 03:57:04,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=111088.0, ans=0.0 +2024-09-01 03:58:10,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111194.66666666667, ans=0.125 +2024-09-01 03:58:57,673 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 17, batch 50, loss[loss=0.159, simple_loss=0.1978, pruned_loss=0.06004, over 19057.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.1984, pruned_loss=0.07808, over 827125.84 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 4.0 +2024-09-01 04:00:43,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=111301.33333333333, ans=0.125 +2024-09-01 04:00:57,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=111354.66666666667, ans=0.5 +2024-09-01 04:01:01,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=111354.66666666667, ans=0.0 +2024-09-01 04:01:02,151 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.350e+02 1.555e+02 1.659e+02 1.888e+02 2.626e+02, threshold=3.319e+02, percent-clipped=0.0 +2024-09-01 04:01:12,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=111354.66666666667, ans=0.125 +2024-09-01 04:01:24,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=111354.66666666667, ans=0.125 +2024-09-01 04:01:48,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=111408.0, ans=0.125 +2024-09-01 04:02:07,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111461.33333333333, ans=0.1 +2024-09-01 04:02:25,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=111461.33333333333, ans=0.125 +2024-09-01 04:02:48,337 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 17, batch 100, loss[loss=0.1398, simple_loss=0.1729, pruned_loss=0.05339, over 19126.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.1971, pruned_loss=0.07661, over 1475165.47 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 8.0 +2024-09-01 04:03:58,722 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.43 vs. limit=15.0 +2024-09-01 04:04:19,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=111621.33333333333, ans=0.125 +2024-09-01 04:05:21,534 INFO [dysarthria_finetune.py:1435] (2/4) (10283057152, 34072559616) +2024-09-01 04:05:21,536 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 04:05:21,587 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 04:05:29,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=111669.33333333333, ans=0.2 +2024-09-01 04:05:38,881 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 18, batch 0, loss[loss=0.2067, simple_loss=0.2219, pruned_loss=0.09575, over 18559.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2219, pruned_loss=0.09575, over 18559.00 frames. ], batch size: 65, lr: 9.93e-05, grad_scale: 8.0 +2024-09-01 04:05:38,882 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 04:06:14,838 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 18, validation: loss=0.1676, simple_loss=0.191, pruned_loss=0.07213, over 1073944.00 frames. +2024-09-01 04:06:14,838 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 04:06:36,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=111669.33333333333, ans=0.0 +2024-09-01 04:08:59,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.87 vs. limit=15.0 +2024-09-01 04:09:42,035 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.341e+02 1.516e+02 1.635e+02 1.895e+02 3.024e+02, threshold=3.269e+02, percent-clipped=0.0 +2024-09-01 04:10:10,275 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 18, batch 50, loss[loss=0.195, simple_loss=0.2189, pruned_loss=0.08561, over 18975.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.1979, pruned_loss=0.07661, over 827610.12 frames. ], batch size: 102, lr: 9.93e-05, grad_scale: 8.0 +2024-09-01 04:10:19,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=111936.0, ans=0.125 +2024-09-01 04:10:23,775 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.52 vs. limit=15.0 +2024-09-01 04:10:41,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=111936.0, ans=0.125 +2024-09-01 04:11:15,007 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.67 vs. limit=6.0 +2024-09-01 04:13:56,086 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=10.90 vs. limit=12.0 +2024-09-01 04:14:13,997 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 18, batch 100, loss[loss=0.1737, simple_loss=0.1935, pruned_loss=0.07697, over 19135.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.1957, pruned_loss=0.07525, over 1477220.69 frames. ], batch size: 133, lr: 9.93e-05, grad_scale: 8.0 +2024-09-01 04:14:24,056 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.87 vs. limit=22.5 +2024-09-01 04:14:49,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=112256.0, ans=0.125 +2024-09-01 04:15:09,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=112256.0, ans=0.0 +2024-09-01 04:15:13,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112309.33333333333, ans=0.1 +2024-09-01 04:15:56,108 INFO [dysarthria_finetune.py:1435] (2/4) (10283057152, 34072559616) +2024-09-01 04:15:56,108 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 04:15:56,154 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 04:16:15,964 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 19, batch 0, loss[loss=0.1853, simple_loss=0.203, pruned_loss=0.08377, over 18438.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.203, pruned_loss=0.08377, over 18438.00 frames. ], batch size: 65, lr: 9.92e-05, grad_scale: 8.0 +2024-09-01 04:16:15,965 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 04:16:39,407 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 19, validation: loss=0.1638, simple_loss=0.1883, pruned_loss=0.06968, over 1073944.00 frames. +2024-09-01 04:16:39,407 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 04:16:45,881 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=19.75 vs. limit=15.0 +2024-09-01 04:17:15,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=112405.33333333333, ans=0.125 +2024-09-01 04:17:22,804 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.531e+02 1.615e+02 1.818e+02 2.373e+02, threshold=3.231e+02, percent-clipped=0.0 +2024-09-01 04:17:38,587 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.01 vs. limit=15.0 +2024-09-01 04:18:32,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=112565.33333333333, ans=0.125 +2024-09-01 04:18:41,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=112565.33333333333, ans=0.0 +2024-09-01 04:19:09,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=112618.66666666667, ans=0.0 +2024-09-01 04:19:10,123 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 19, batch 50, loss[loss=0.1781, simple_loss=0.2083, pruned_loss=0.07392, over 19013.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.1957, pruned_loss=0.07677, over 827262.88 frames. ], batch size: 102, lr: 9.92e-05, grad_scale: 4.0 +2024-09-01 04:19:22,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=112618.66666666667, ans=0.0 +2024-09-01 04:20:04,628 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.37 vs. limit=6.0 +2024-09-01 04:20:14,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=112725.33333333333, ans=0.125 +2024-09-01 04:20:29,610 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.78 vs. limit=10.0 +2024-09-01 04:20:33,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=112778.66666666667, ans=0.0 +2024-09-01 04:20:42,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=112778.66666666667, ans=0.0 +2024-09-01 04:20:53,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=112832.0, ans=0.125 +2024-09-01 04:21:07,193 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=19.85 vs. limit=15.0 +2024-09-01 04:21:10,111 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 19, batch 100, loss[loss=0.1362, simple_loss=0.1624, pruned_loss=0.05503, over 19169.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.1959, pruned_loss=0.07689, over 1475351.90 frames. ], batch size: 134, lr: 9.92e-05, grad_scale: 8.0 +2024-09-01 04:21:54,612 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.303e+02 1.501e+02 1.584e+02 1.820e+02 2.268e+02, threshold=3.167e+02, percent-clipped=0.0 +2024-09-01 04:22:11,534 INFO [dysarthria_finetune.py:1435] (2/4) (10314514432, 34072559616) +2024-09-01 04:22:11,535 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 04:22:11,577 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 04:22:26,683 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 20, batch 0, loss[loss=0.1906, simple_loss=0.2139, pruned_loss=0.08364, over 18527.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2139, pruned_loss=0.08364, over 18527.00 frames. ], batch size: 65, lr: 9.91e-05, grad_scale: 8.0 +2024-09-01 04:22:26,684 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-09-01 04:22:50,260 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 20, validation: loss=0.1638, simple_loss=0.1875, pruned_loss=0.07, over 1073944.00 frames. +2024-09-01 04:22:50,261 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19796MB +2024-09-01 04:24:19,600 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.96 vs. limit=22.5 +2024-09-01 04:24:35,370 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.84 vs. limit=22.5 +2024-09-01 04:24:55,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=113253.33333333333, ans=0.0 +2024-09-01 04:24:57,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=113253.33333333333, ans=0.0 +2024-09-01 04:25:07,039 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 20, batch 50, loss[loss=0.1885, simple_loss=0.2103, pruned_loss=0.08336, over 18968.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.1931, pruned_loss=0.07421, over 828106.18 frames. ], batch size: 102, lr: 9.91e-05, grad_scale: 4.0 +2024-09-01 04:25:41,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=113360.0, ans=0.125 +2024-09-01 04:25:57,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=113360.0, ans=0.125 +2024-09-01 04:26:00,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=113360.0, ans=0.0 +2024-09-01 04:26:55,373 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.00 vs. limit=15.0 +2024-09-01 04:26:59,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=113520.0, ans=0.2 +2024-09-01 04:27:00,330 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.45 vs. limit=6.0 +2024-09-01 04:27:05,362 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.305e+02 1.522e+02 1.605e+02 1.869e+02 2.652e+02, threshold=3.210e+02, percent-clipped=0.0 +2024-09-01 04:27:27,155 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 20, batch 100, loss[loss=0.1403, simple_loss=0.1699, pruned_loss=0.05533, over 19074.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.1921, pruned_loss=0.07331, over 1476081.83 frames. ], batch size: 133, lr: 9.91e-05, grad_scale: 8.0 +2024-09-01 04:27:49,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=113573.33333333333, ans=0.2 +2024-09-01 04:28:00,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=113626.66666666667, ans=0.125 +2024-09-01 04:28:21,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113626.66666666667, ans=0.1 +2024-09-01 04:28:43,142 INFO [dysarthria_finetune.py:1435] (2/4) (10283057152, 34072559616) +2024-09-01 04:28:43,143 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-09-01 04:28:43,165 INFO [dysarthria_finetune.py:1440] (2/4) (29109190656, 34072559616) +2024-09-01 04:28:43,166 INFO [dysarthria_finetune.py:1442] (2/4) Done! diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-31-13-16-10-3 b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-31-13-16-10-3 new file mode 100644 index 0000000000000000000000000000000000000000..40680a4b55aa6d80db56d7e4b494707b787b5273 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/log/log-train-2024-08-31-13-16-10-3 @@ -0,0 +1,545 @@ +2024-08-31 13:16:10,943 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-31 13:16:10,986 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-31 13:16:10,986 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-31 13:16:11,947 INFO [dysarthria_finetune.py:1219] (3/4) (32783400960, 34072559616) +2024-08-31 13:16:11,947 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-31 13:16:13,232 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2558.int.cedar.computecanada.ca', 'IP address': '172.16.145.251'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/4b/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-31 13:16:13,233 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-31 13:16:14,935 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 66110931 +2024-08-31 13:16:14,936 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-31 13:18:23,851 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-31 13:20:29,534 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts +2024-08-31 13:20:29,665 INFO [dysarthria_finetune.py:1319] (3/4) CutSet(len=62255) [underlying data type: ] +2024-08-31 13:20:29,919 INFO [dysarthria_asr_datamodule.py:239] (3/4) Disable MUSAN +2024-08-31 13:20:29,920 INFO [dysarthria_asr_datamodule.py:257] (3/4) Enable SpecAugment +2024-08-31 13:20:29,920 INFO [dysarthria_asr_datamodule.py:258] (3/4) Time warp factor: 80 +2024-08-31 13:20:29,920 INFO [dysarthria_asr_datamodule.py:268] (3/4) Num frame mask: 10 +2024-08-31 13:20:29,920 INFO [dysarthria_asr_datamodule.py:281] (3/4) About to create train dataset +2024-08-31 13:20:31,921 INFO [dysarthria_asr_datamodule.py:308] (3/4) Using DynamicBucketingSampler. +2024-08-31 13:20:32,874 INFO [dysarthria_asr_datamodule.py:325] (3/4) About to create train dataloader +2024-08-31 13:20:32,876 INFO [dysarthria_asr_datamodule.py:501] (3/4) About to get dev cuts +2024-08-31 13:20:33,113 INFO [dysarthria_asr_datamodule.py:356] (3/4) About to create dev dataset +2024-08-31 13:20:33,463 INFO [dysarthria_asr_datamodule.py:373] (3/4) About to create dev dataloader +2024-08-31 13:20:33,464 INFO [dysarthria_finetune.py:1490] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 13:44:09,209 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=5.07 vs. limit=5.0 +2024-08-31 13:44:10,210 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=192, metric=9.46 vs. limit=7.5 +2024-08-31 13:44:14,876 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12655MB +2024-08-31 13:45:00,383 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.37 vs. limit=7.5 +2024-08-31 13:45:00,951 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12655MB +2024-08-31 13:47:50,042 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12655MB +2024-08-31 13:47:52,422 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12655MB +2024-08-31 13:50:17,118 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.87 vs. limit=3.0 +2024-08-31 13:50:20,328 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12655MB +2024-08-31 13:50:22,579 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 12655MB +2024-08-31 13:51:23,102 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 0, loss[loss=0.3239, simple_loss=0.3072, pruned_loss=0.1668, over 18634.00 frames. ], tot_loss[loss=0.3239, simple_loss=0.3072, pruned_loss=0.1668, over 18634.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-31 13:51:23,103 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-31 14:29:03,503 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 1, validation: loss=0.3678, simple_loss=0.3479, pruned_loss=0.1987, over 1073944.00 frames. +2024-08-31 14:29:03,504 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14320MB +2024-08-31 15:24:16,877 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 7.525e+02 8.969e+02 9.815e+02 1.002e+03 1.048e+03, threshold=3.926e+03, percent-clipped=0.0 +2024-08-31 15:49:37,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=100053.33333333333, ans=0.2 +2024-08-31 15:52:42,133 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 7.161e+02 8.685e+02 9.467e+02 1.002e+03 1.055e+03, threshold=3.787e+03, percent-clipped=0.0 +2024-08-31 16:00:43,033 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.01 vs. limit=15.0 +2024-08-31 16:20:00,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=100160.0, ans=0.2 +2024-08-31 16:22:14,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=100160.0, ans=0.125 +2024-08-31 16:23:54,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=100160.0, ans=0.2 +2024-08-31 16:31:57,886 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:32:13,901 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.192e+02 7.846e+02 8.685e+02 9.467e+02 1.055e+03, threshold=3.474e+03, percent-clipped=0.0 +2024-08-31 16:54:29,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=100213.33333333333, ans=0.0 +2024-08-31 17:02:30,134 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 50, loss[loss=0.4509, simple_loss=0.4255, pruned_loss=0.2502, over 19001.00 frames. ], tot_loss[loss=0.3942, simple_loss=0.3718, pruned_loss=0.2222, over 828973.50 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-31 17:08:28,670 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=44.12 vs. limit=22.5 +2024-08-31 17:11:49,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.06 vs. limit=22.5 +2024-08-31 17:23:51,440 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=17.48 vs. limit=15.0 +2024-08-31 17:26:14,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100320.0, ans=0.125 +2024-08-31 17:39:39,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=100373.33333333333, ans=0.125 +2024-08-31 17:57:52,208 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=21.41 vs. limit=15.0 +2024-08-31 17:58:07,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=100426.66666666667, ans=0.025 +2024-08-31 18:03:47,496 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=47.91 vs. limit=22.5 +2024-08-31 18:12:52,329 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.869e+02 6.982e+02 7.682e+02 8.607e+02 1.055e+03, threshold=1.536e+03, percent-clipped=0.0 +2024-08-31 18:12:52,367 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 100, loss[loss=0.358, simple_loss=0.3396, pruned_loss=0.1858, over 19146.00 frames. ], tot_loss[loss=0.3783, simple_loss=0.3572, pruned_loss=0.2086, over 1476162.18 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-08-31 18:34:03,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=100586.66666666667, ans=0.0 +2024-08-31 18:44:19,285 INFO [dysarthria_finetune.py:1435] (3/4) (13370064896, 34072559616) +2024-08-31 18:44:19,285 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-31 18:44:19,339 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-08-31 18:46:01,812 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 2, batch 0, loss[loss=0.3531, simple_loss=0.3343, pruned_loss=0.1873, over 18501.00 frames. ], tot_loss[loss=0.3531, simple_loss=0.3343, pruned_loss=0.1873, over 18501.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-08-31 18:46:01,812 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-31 19:10:08,822 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 2, validation: loss=0.3307, simple_loss=0.3141, pruned_loss=0.1687, over 1073944.00 frames. +2024-08-31 19:10:08,823 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-08-31 19:50:38,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=100789.33333333333, ans=0.125 +2024-08-31 20:01:04,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=100789.33333333333, ans=0.2 +2024-08-31 20:05:23,665 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.36 vs. limit=22.5 +2024-08-31 20:15:22,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100842.66666666667, ans=0.125 +2024-08-31 20:20:10,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=100896.0, ans=0.125 +2024-08-31 20:23:39,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=100896.0, ans=0.125 +2024-08-31 20:23:39,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=100896.0, ans=0.0 +2024-08-31 20:31:44,811 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 2, batch 50, loss[loss=0.3496, simple_loss=0.3328, pruned_loss=0.1758, over 18956.00 frames. ], tot_loss[loss=0.3548, simple_loss=0.3356, pruned_loss=0.1898, over 828460.00 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 4.0 +2024-08-31 20:34:09,143 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.67 vs. limit=15.0 +2024-08-31 20:42:30,519 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.53 vs. limit=15.0 +2024-08-31 20:51:24,188 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=101002.66666666667, ans=0.125 +2024-08-31 21:03:09,447 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 4.995e+02 5.661e+02 6.268e+02 7.321e+02, threshold=1.132e+03, percent-clipped=0.0 +2024-08-31 21:07:10,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=101056.0, ans=0.0 +2024-08-31 21:16:13,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101109.33333333333, ans=0.1 +2024-08-31 21:17:59,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=101109.33333333333, ans=0.0 +2024-08-31 21:17:59,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=101109.33333333333, ans=0.0 +2024-08-31 21:19:17,333 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.39 vs. limit=15.0 +2024-08-31 21:32:23,529 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.95 vs. limit=22.5 +2024-08-31 21:42:13,842 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 2, batch 100, loss[loss=0.3303, simple_loss=0.3131, pruned_loss=0.173, over 19077.00 frames. ], tot_loss[loss=0.3422, simple_loss=0.3244, pruned_loss=0.1792, over 1476919.42 frames. ], batch size: 133, lr: 7.29e-05, grad_scale: 4.0 +2024-08-31 21:46:29,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=101216.0, ans=0.025 +2024-08-31 22:07:16,155 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.97 vs. limit=6.0 +2024-08-31 22:21:43,996 INFO [dysarthria_finetune.py:1435] (3/4) (13187612672, 34072559616) +2024-08-31 22:21:43,998 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-31 22:21:44,038 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-08-31 22:22:38,994 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 3, batch 0, loss[loss=0.2899, simple_loss=0.2766, pruned_loss=0.1441, over 18579.00 frames. ], tot_loss[loss=0.2899, simple_loss=0.2766, pruned_loss=0.1441, over 18579.00 frames. ], batch size: 65, lr: 7.58e-05, grad_scale: 2.0 +2024-08-31 22:22:38,995 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-31 22:31:34,590 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 3, validation: loss=0.2979, simple_loss=0.2853, pruned_loss=0.1432, over 1073944.00 frames. +2024-08-31 22:31:34,954 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-08-31 23:30:40,081 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.802e+02 3.787e+02 4.308e+02 4.929e+02 6.122e+02, threshold=8.616e+02, percent-clipped=0.0 +2024-08-31 23:32:08,311 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=13.29 vs. limit=15.0 +2024-08-31 23:32:42,308 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 3, batch 50, loss[loss=0.3383, simple_loss=0.3247, pruned_loss=0.1617, over 19113.00 frames. ], tot_loss[loss=0.3251, simple_loss=0.3092, pruned_loss=0.166, over 827781.85 frames. ], batch size: 102, lr: 8.08e-05, grad_scale: 1.0 +2024-08-31 23:54:25,131 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=28.44 vs. limit=22.5 +2024-09-01 00:04:57,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=101797.33333333333, ans=0.95 +2024-09-01 00:07:16,971 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.99 vs. limit=15.0 +2024-09-01 00:13:15,691 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.00 vs. limit=15.0 +2024-09-01 00:17:08,367 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 3, batch 100, loss[loss=0.3064, simple_loss=0.2918, pruned_loss=0.156, over 19145.00 frames. ], tot_loss[loss=0.3161, simple_loss=0.301, pruned_loss=0.1605, over 1476240.06 frames. ], batch size: 133, lr: 8.58e-05, grad_scale: 1.0 +2024-09-01 00:18:15,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=101904.0, ans=0.125 +2024-09-01 00:19:35,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101904.0, ans=0.1 +2024-09-01 00:25:35,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102010.66666666667, ans=0.1 +2024-09-01 00:25:35,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=102010.66666666667, ans=0.125 +2024-09-01 00:25:35,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=102010.66666666667, ans=0.1 +2024-09-01 00:26:04,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=102010.66666666667, ans=0.125 +2024-09-01 00:27:08,600 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 00:27:26,416 INFO [dysarthria_finetune.py:1435] (3/4) (13179224064, 34072559616) +2024-09-01 00:27:26,417 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 00:27:26,456 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 00:27:42,728 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 4, batch 0, loss[loss=0.3503, simple_loss=0.3295, pruned_loss=0.1922, over 18645.00 frames. ], tot_loss[loss=0.3503, simple_loss=0.3295, pruned_loss=0.1922, over 18645.00 frames. ], batch size: 65, lr: 8.86e-05, grad_scale: 2.0 +2024-09-01 00:27:42,729 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 00:46:27,437 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 4, validation: loss=0.279, simple_loss=0.2687, pruned_loss=0.1325, over 1073944.00 frames. +2024-09-01 00:46:27,438 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 00:59:45,234 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.85 vs. limit=22.5 +2024-09-01 01:06:46,635 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.22 vs. limit=15.0 +2024-09-01 01:16:41,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=102160.0, ans=0.0 +2024-09-01 01:16:52,151 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.614e+02 3.221e+02 3.659e+02 4.077e+02 5.349e+02, threshold=7.318e+02, percent-clipped=0.0 +2024-09-01 01:25:25,551 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.91 vs. limit=15.0 +2024-09-01 01:27:41,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=102213.33333333333, ans=0.025 +2024-09-01 01:28:22,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=102266.66666666667, ans=0.125 +2024-09-01 01:29:39,649 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=28.14 vs. limit=22.5 +2024-09-01 01:37:41,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=102266.66666666667, ans=0.125 +2024-09-01 01:39:48,717 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 4, batch 50, loss[loss=0.304, simple_loss=0.2895, pruned_loss=0.1553, over 18993.00 frames. ], tot_loss[loss=0.3055, simple_loss=0.292, pruned_loss=0.1525, over 827748.42 frames. ], batch size: 102, lr: 9.36e-05, grad_scale: 0.5 +2024-09-01 01:40:37,068 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.57 vs. limit=15.0 +2024-09-01 01:40:37,135 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.27 vs. limit=15.0 +2024-09-01 01:44:00,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=102320.0, ans=0.0 +2024-09-01 01:46:06,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=102373.33333333333, ans=0.125 +2024-09-01 01:51:17,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=102426.66666666667, ans=0.0 +2024-09-01 01:57:09,401 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 01:59:03,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=102533.33333333333, ans=0.125 +2024-09-01 01:59:47,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102533.33333333333, ans=0.1 +2024-09-01 02:01:07,724 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 4, batch 100, loss[loss=0.3067, simple_loss=0.2926, pruned_loss=0.1558, over 19161.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.2832, pruned_loss=0.1462, over 1475350.41 frames. ], batch size: 133, lr: 9.86e-05, grad_scale: 1.0 +2024-09-01 02:05:19,001 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.89 vs. limit=6.0 +2024-09-01 02:09:39,914 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.323e+02 2.859e+02 3.213e+02 3.589e+02 4.738e+02, threshold=6.426e+02, percent-clipped=0.0 +2024-09-01 02:10:07,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=102693.33333333333, ans=0.0 +2024-09-01 02:10:07,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.06 vs. limit=6.0 +2024-09-01 02:10:16,929 INFO [dysarthria_finetune.py:1435] (3/4) (13193904128, 34072559616) +2024-09-01 02:10:16,930 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:10:16,959 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 02:10:37,117 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 5, batch 0, loss[loss=0.26, simple_loss=0.2518, pruned_loss=0.1223, over 18566.00 frames. ], tot_loss[loss=0.26, simple_loss=0.2518, pruned_loss=0.1223, over 18566.00 frames. ], batch size: 65, lr: 1.00e-04, grad_scale: 1.0 +2024-09-01 02:10:37,117 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:15:37,620 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 5, validation: loss=0.2588, simple_loss=0.2515, pruned_loss=0.1195, over 1073944.00 frames. +2024-09-01 02:15:37,621 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 02:17:57,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=102794.66666666667, ans=0.125 +2024-09-01 02:18:25,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102794.66666666667, ans=0.1 +2024-09-01 02:18:28,630 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.47 vs. limit=6.0 +2024-09-01 02:18:43,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=102794.66666666667, ans=0.125 +2024-09-01 02:19:22,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=102848.0, ans=0.2 +2024-09-01 02:19:46,796 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=5.58 vs. limit=15.0 +2024-09-01 02:21:03,374 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.22 vs. limit=15.0 +2024-09-01 02:23:25,248 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 5, batch 50, loss[loss=0.3013, simple_loss=0.2886, pruned_loss=0.1509, over 18976.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.27, pruned_loss=0.1363, over 827749.28 frames. ], batch size: 102, lr: 1.00e-04, grad_scale: 0.25 +2024-09-01 02:24:04,287 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.05 vs. limit=22.5 +2024-09-01 02:24:44,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=103061.33333333333, ans=0.0 +2024-09-01 02:25:12,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=103061.33333333333, ans=0.0 +2024-09-01 02:25:29,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=103061.33333333333, ans=0.0 +2024-09-01 02:25:42,160 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:25:54,053 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.49 vs. limit=15.0 +2024-09-01 02:26:14,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103114.66666666667, ans=0.1 +2024-09-01 02:27:05,610 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:27:14,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=103168.0, ans=0.125 +2024-09-01 02:27:15,166 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.48 vs. limit=15.0 +2024-09-01 02:27:48,820 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.197e+02 2.619e+02 2.908e+02 3.410e+02 5.061e+02, threshold=5.817e+02, percent-clipped=0.0 +2024-09-01 02:27:54,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=103221.33333333333, ans=0.2 +2024-09-01 02:28:05,158 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 5, batch 100, loss[loss=0.277, simple_loss=0.2668, pruned_loss=0.1361, over 19091.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.2673, pruned_loss=0.1351, over 1475913.16 frames. ], batch size: 133, lr: 1.00e-04, grad_scale: 0.5 +2024-09-01 02:29:03,537 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.40 vs. limit=6.0 +2024-09-01 02:29:29,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=103328.0, ans=0.0 +2024-09-01 02:30:03,389 INFO [dysarthria_finetune.py:1435] (3/4) (13175029760, 34072559616) +2024-09-01 02:30:03,390 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:30:03,431 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 02:30:19,016 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 6, batch 0, loss[loss=0.2498, simple_loss=0.2427, pruned_loss=0.1183, over 18684.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.2427, pruned_loss=0.1183, over 18684.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 1.0 +2024-09-01 02:30:19,016 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:30:42,399 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 6, validation: loss=0.247, simple_loss=0.2415, pruned_loss=0.1137, over 1073944.00 frames. +2024-09-01 02:30:51,758 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 02:32:19,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=103477.33333333333, ans=0.125 +2024-09-01 02:32:24,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=18.53 vs. limit=15.0 +2024-09-01 02:32:55,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=103530.66666666667, ans=0.125 +2024-09-01 02:33:49,353 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 6, batch 50, loss[loss=0.2401, simple_loss=0.2377, pruned_loss=0.1058, over 19058.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.2565, pruned_loss=0.1267, over 828493.81 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 0.5 +2024-09-01 02:34:10,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103690.66666666667, ans=0.1 +2024-09-01 02:34:39,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=103797.33333333333, ans=0.0 +2024-09-01 02:34:39,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=28.48 vs. limit=22.5 +2024-09-01 02:34:40,098 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.072e+02 2.401e+02 2.633e+02 2.975e+02 4.049e+02, threshold=5.266e+02, percent-clipped=0.0 +2024-09-01 02:34:59,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=103797.33333333333, ans=0.125 +2024-09-01 02:35:49,194 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=38.71 vs. limit=22.5 +2024-09-01 02:35:51,872 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:35:55,582 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 6, batch 100, loss[loss=0.2423, simple_loss=0.2367, pruned_loss=0.114, over 19113.00 frames. ], tot_loss[loss=0.2631, simple_loss=0.2554, pruned_loss=0.1262, over 1475249.23 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 1.0 +2024-09-01 02:36:30,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=104010.66666666667, ans=0.125 +2024-09-01 02:36:30,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=104010.66666666667, ans=0.125 +2024-09-01 02:36:58,483 INFO [dysarthria_finetune.py:1435] (3/4) (380305408, 34072559616) +2024-09-01 02:36:58,484 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:36:58,568 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 02:37:37,096 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 7, batch 0, loss[loss=0.294, simple_loss=0.2799, pruned_loss=0.1521, over 18595.00 frames. ], tot_loss[loss=0.294, simple_loss=0.2799, pruned_loss=0.1521, over 18595.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 2.0 +2024-09-01 02:37:37,097 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:38:00,937 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 7, validation: loss=0.2303, simple_loss=0.2284, pruned_loss=0.1027, over 1073944.00 frames. +2024-09-01 02:38:00,938 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 02:38:14,474 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.83 vs. limit=15.0 +2024-09-01 02:38:46,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104213.33333333333, ans=0.1 +2024-09-01 02:39:01,150 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.96 vs. limit=15.0 +2024-09-01 02:39:39,114 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.900e+02 2.248e+02 2.388e+02 2.643e+02 3.863e+02, threshold=4.776e+02, percent-clipped=0.0 +2024-09-01 02:39:54,666 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 7, batch 50, loss[loss=0.2562, simple_loss=0.2514, pruned_loss=0.12, over 18963.00 frames. ], tot_loss[loss=0.2552, simple_loss=0.2491, pruned_loss=0.1215, over 827887.87 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 1.0 +2024-09-01 02:40:12,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104373.33333333333, ans=0.1 +2024-09-01 02:40:37,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=104480.0, ans=0.07 +2024-09-01 02:40:56,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.09 vs. limit=15.0 +2024-09-01 02:41:04,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=104533.33333333333, ans=0.0 +2024-09-01 02:41:19,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=104586.66666666667, ans=0.125 +2024-09-01 02:41:41,952 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 7, batch 100, loss[loss=0.225, simple_loss=0.224, pruned_loss=0.1012, over 19124.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.2438, pruned_loss=0.1179, over 1475075.17 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 2.0 +2024-09-01 02:41:47,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=104640.0, ans=0.2 +2024-09-01 02:41:47,696 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.22 vs. limit=22.5 +2024-09-01 02:41:55,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=104640.0, ans=10.0 +2024-09-01 02:42:00,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=104640.0, ans=0.125 +2024-09-01 02:42:39,968 INFO [dysarthria_finetune.py:1435] (3/4) (13219069952, 34072559616) +2024-09-01 02:42:39,969 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:42:40,007 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 02:42:52,891 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 8, batch 0, loss[loss=0.2285, simple_loss=0.2285, pruned_loss=0.1018, over 18547.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.2285, pruned_loss=0.1018, over 18547.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 4.0 +2024-09-01 02:42:52,891 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:43:16,310 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 8, validation: loss=0.2224, simple_loss=0.2225, pruned_loss=0.09892, over 1073944.00 frames. +2024-09-01 02:43:16,311 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 02:43:17,989 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.60 vs. limit=15.0 +2024-09-01 02:43:19,888 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 02:43:51,418 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.775e+02 2.041e+02 2.195e+02 2.485e+02 3.530e+02, threshold=4.390e+02, percent-clipped=0.0 +2024-09-01 02:44:24,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=104949.33333333333, ans=0.0 +2024-09-01 02:44:28,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=104949.33333333333, ans=0.0 +2024-09-01 02:45:06,402 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 8, batch 50, loss[loss=0.2497, simple_loss=0.2442, pruned_loss=0.1204, over 18964.00 frames. ], tot_loss[loss=0.242, simple_loss=0.2388, pruned_loss=0.1131, over 828441.23 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 2.0 +2024-09-01 02:45:07,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=105056.0, ans=0.125 +2024-09-01 02:46:20,132 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=11.76 vs. limit=12.0 +2024-09-01 02:46:26,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=105056.0, ans=0.125 +2024-09-01 02:46:47,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=105109.33333333333, ans=0.0 +2024-09-01 02:47:09,841 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=17.46 vs. limit=15.0 +2024-09-01 02:47:29,439 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.84 vs. limit=15.0 +2024-09-01 02:47:48,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=105269.33333333333, ans=0.125 +2024-09-01 02:47:55,804 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 8, batch 100, loss[loss=0.2384, simple_loss=0.2394, pruned_loss=0.1071, over 19119.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.2364, pruned_loss=0.1121, over 1475727.62 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 2.0 +2024-09-01 02:48:10,448 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.68 vs. limit=15.0 +2024-09-01 02:48:16,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=105376.0, ans=0.125 +2024-09-01 02:48:29,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=105376.0, ans=0.09899494936611666 +2024-09-01 02:48:34,263 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.711e+02 1.930e+02 2.062e+02 2.246e+02 3.148e+02, threshold=4.124e+02, percent-clipped=0.0 +2024-09-01 02:48:55,527 INFO [dysarthria_finetune.py:1435] (3/4) (13208584192, 34072559616) +2024-09-01 02:48:55,528 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:48:55,571 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 02:49:09,690 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 9, batch 0, loss[loss=0.2539, simple_loss=0.2518, pruned_loss=0.1187, over 18777.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.2518, pruned_loss=0.1187, over 18777.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 2.0 +2024-09-01 02:49:09,691 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:49:40,379 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 9, validation: loss=0.211, simple_loss=0.2147, pruned_loss=0.09159, over 1073944.00 frames. +2024-09-01 02:49:40,380 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 02:50:05,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=105477.33333333333, ans=0.125 +2024-09-01 02:50:51,237 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=9.15 vs. limit=15.0 +2024-09-01 02:51:27,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=105637.33333333333, ans=0.2 +2024-09-01 02:51:33,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=105637.33333333333, ans=0.125 +2024-09-01 02:51:38,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=105637.33333333333, ans=0.09899494936611666 +2024-09-01 02:52:16,756 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=17.83 vs. limit=22.5 +2024-09-01 02:52:30,678 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 9, batch 50, loss[loss=0.2246, simple_loss=0.2293, pruned_loss=0.09762, over 18965.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.2273, pruned_loss=0.1034, over 827503.70 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 2.0 +2024-09-01 02:53:44,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=105850.66666666667, ans=0.125 +2024-09-01 02:54:14,230 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.74 vs. limit=15.0 +2024-09-01 02:54:20,858 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.33 vs. limit=15.0 +2024-09-01 02:54:35,248 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.637e+02 1.850e+02 1.979e+02 2.143e+02 2.885e+02, threshold=3.959e+02, percent-clipped=0.0 +2024-09-01 02:54:39,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=105957.33333333333, ans=0.125 +2024-09-01 02:55:01,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105957.33333333333, ans=0.1 +2024-09-01 02:55:06,093 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 9, batch 100, loss[loss=0.2105, simple_loss=0.2145, pruned_loss=0.093, over 19159.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.226, pruned_loss=0.1037, over 1475225.92 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 4.0 +2024-09-01 02:56:27,085 INFO [dysarthria_finetune.py:1435] (3/4) (14370406400, 34072559616) +2024-09-01 02:56:27,085 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 02:56:27,127 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 02:56:40,079 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 10, batch 0, loss[loss=0.2173, simple_loss=0.2183, pruned_loss=0.1002, over 18587.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2183, pruned_loss=0.1002, over 18587.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 4.0 +2024-09-01 02:56:40,080 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 02:57:03,505 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 10, validation: loss=0.2075, simple_loss=0.2129, pruned_loss=0.09054, over 1073944.00 frames. +2024-09-01 02:57:03,506 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 02:57:25,523 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.73 vs. limit=22.5 +2024-09-01 02:57:38,391 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.84 vs. limit=15.0 +2024-09-01 02:58:03,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.41 vs. limit=15.0 +2024-09-01 02:58:14,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=106325.33333333333, ans=0.125 +2024-09-01 02:58:18,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=106325.33333333333, ans=0.2 +2024-09-01 02:58:42,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=106378.66666666667, ans=0.125 +2024-09-01 02:58:55,060 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 10, batch 50, loss[loss=0.2416, simple_loss=0.2461, pruned_loss=0.1085, over 19101.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2253, pruned_loss=0.1024, over 827631.91 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 2.0 +2024-09-01 02:59:01,093 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=9.85 vs. limit=12.0 +2024-09-01 02:59:15,807 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.00 vs. limit=15.0 +2024-09-01 02:59:20,728 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.594e+02 1.769e+02 1.897e+02 2.105e+02 2.891e+02, threshold=3.793e+02, percent-clipped=0.0 +2024-09-01 02:59:22,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=106485.33333333333, ans=0.0 +2024-09-01 02:59:46,211 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.69 vs. limit=15.0 +2024-09-01 03:00:07,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=106592.0, ans=0.5 +2024-09-01 03:00:24,818 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.44 vs. limit=15.0 +2024-09-01 03:00:40,067 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.09 vs. limit=15.0 +2024-09-01 03:00:42,969 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 10, batch 100, loss[loss=0.209, simple_loss=0.215, pruned_loss=0.09248, over 19051.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2241, pruned_loss=0.1007, over 1475773.03 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 4.0 +2024-09-01 03:01:03,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=106698.66666666667, ans=0.125 +2024-09-01 03:01:25,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106752.0, ans=0.0 +2024-09-01 03:01:40,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=106805.33333333333, ans=0.125 +2024-09-01 03:01:41,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=106805.33333333333, ans=0.0 +2024-09-01 03:01:46,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=106805.33333333333, ans=0.0 +2024-09-01 03:01:46,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=106805.33333333333, ans=0.125 +2024-09-01 03:01:46,974 INFO [dysarthria_finetune.py:1435] (3/4) (14313783296, 34072559616) +2024-09-01 03:01:46,975 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 03:01:47,028 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 03:08:00,726 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 11, batch 0, loss[loss=0.2372, simple_loss=0.2374, pruned_loss=0.1122, over 18604.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.2374, pruned_loss=0.1122, over 18604.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:08:00,726 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 03:08:32,552 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 11, validation: loss=0.2002, simple_loss=0.2088, pruned_loss=0.08618, over 1073944.00 frames. +2024-09-01 03:08:32,552 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 03:09:36,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=106906.66666666667, ans=0.0 +2024-09-01 03:10:00,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=106960.0, ans=0.125 +2024-09-01 03:10:26,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=106960.0, ans=0.0 +2024-09-01 03:10:42,589 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.566e+02 1.721e+02 1.824e+02 2.016e+02 2.682e+02, threshold=3.648e+02, percent-clipped=0.0 +2024-09-01 03:11:52,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=107066.66666666667, ans=0.125 +2024-09-01 03:12:03,918 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 11, batch 50, loss[loss=0.2191, simple_loss=0.2263, pruned_loss=0.09753, over 19110.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2148, pruned_loss=0.09185, over 828132.31 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:12:12,928 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.91 vs. limit=15.0 +2024-09-01 03:12:36,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=107120.0, ans=0.125 +2024-09-01 03:14:04,156 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.39 vs. limit=15.0 +2024-09-01 03:14:36,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=107226.66666666667, ans=0.2 +2024-09-01 03:15:07,532 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.11 vs. limit=15.0 +2024-09-01 03:15:13,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=107333.33333333333, ans=0.05 +2024-09-01 03:15:18,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=107333.33333333333, ans=0.04949747468305833 +2024-09-01 03:15:56,722 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 11, batch 100, loss[loss=0.1842, simple_loss=0.2024, pruned_loss=0.0718, over 19127.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2163, pruned_loss=0.09267, over 1475363.18 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:16:47,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=107440.0, ans=0.0 +2024-09-01 03:17:09,177 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.76 vs. limit=10.0 +2024-09-01 03:17:56,543 INFO [dysarthria_finetune.py:1435] (3/4) (13189709824, 34072559616) +2024-09-01 03:17:56,544 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 03:17:56,583 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 03:18:09,579 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 12, batch 0, loss[loss=0.2184, simple_loss=0.2228, pruned_loss=0.101, over 18650.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2228, pruned_loss=0.101, over 18650.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:18:09,579 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 03:18:33,054 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 12, validation: loss=0.1929, simple_loss=0.2049, pruned_loss=0.0821, over 1073944.00 frames. +2024-09-01 03:18:33,055 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 03:18:41,680 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.01 vs. limit=15.0 +2024-09-01 03:18:47,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=107541.33333333333, ans=0.125 +2024-09-01 03:18:51,346 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.02 vs. limit=22.5 +2024-09-01 03:18:54,486 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.518e+02 1.683e+02 1.764e+02 1.920e+02 2.754e+02, threshold=3.529e+02, percent-clipped=0.0 +2024-09-01 03:19:20,923 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:19:21,125 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.69 vs. limit=6.0 +2024-09-01 03:19:27,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=107594.66666666667, ans=0.0 +2024-09-01 03:20:01,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=107648.0, ans=0.125 +2024-09-01 03:20:02,043 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.21 vs. limit=15.0 +2024-09-01 03:20:40,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=107754.66666666667, ans=0.2 +2024-09-01 03:20:41,418 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.31 vs. limit=22.5 +2024-09-01 03:21:02,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=107808.0, ans=0.0 +2024-09-01 03:21:03,694 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 12, batch 50, loss[loss=0.2181, simple_loss=0.2283, pruned_loss=0.09669, over 19037.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2144, pruned_loss=0.0904, over 828666.57 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:21:07,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107808.0, ans=0.1 +2024-09-01 03:21:26,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=107808.0, ans=0.125 +2024-09-01 03:21:30,786 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.58 vs. limit=15.0 +2024-09-01 03:22:34,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=107914.66666666667, ans=0.0 +2024-09-01 03:22:34,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=107914.66666666667, ans=0.0 +2024-09-01 03:24:01,736 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 12, batch 100, loss[loss=0.1706, simple_loss=0.1873, pruned_loss=0.06935, over 19142.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2134, pruned_loss=0.08897, over 1477170.44 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 4.0 +2024-09-01 03:24:21,999 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.519e+02 1.650e+02 1.753e+02 1.928e+02 2.697e+02, threshold=3.507e+02, percent-clipped=0.0 +2024-09-01 03:24:38,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=108128.0, ans=0.0 +2024-09-01 03:24:50,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=108181.33333333333, ans=0.125 +2024-09-01 03:25:11,656 INFO [dysarthria_finetune.py:1435] (3/4) (13181321216, 34072559616) +2024-09-01 03:25:11,656 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 03:25:11,709 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 03:25:24,761 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 13, batch 0, loss[loss=0.1961, simple_loss=0.203, pruned_loss=0.08995, over 18629.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.203, pruned_loss=0.08995, over 18629.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 4.0 +2024-09-01 03:25:24,761 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 03:25:48,259 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 13, validation: loss=0.1886, simple_loss=0.2026, pruned_loss=0.08078, over 1073944.00 frames. +2024-09-01 03:25:48,260 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 03:26:07,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=108229.33333333333, ans=0.125 +2024-09-01 03:26:34,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=108336.0, ans=0.0 +2024-09-01 03:27:13,286 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=17.39 vs. limit=15.0 +2024-09-01 03:27:17,704 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.98 vs. limit=15.0 +2024-09-01 03:27:49,276 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:27:54,662 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 13, batch 50, loss[loss=0.2111, simple_loss=0.2194, pruned_loss=0.09681, over 19050.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2108, pruned_loss=0.08718, over 828311.79 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 1.0 +2024-09-01 03:28:03,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=108496.0, ans=0.2 +2024-09-01 03:28:07,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=108496.0, ans=0.125 +2024-09-01 03:28:38,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=108602.66666666667, ans=0.125 +2024-09-01 03:29:04,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=108656.0, ans=0.125 +2024-09-01 03:29:07,378 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 1.617e+02 1.723e+02 2.007e+02 2.594e+02, threshold=3.446e+02, percent-clipped=0.0 +2024-09-01 03:29:21,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=108656.0, ans=0.125 +2024-09-01 03:29:28,222 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.58 vs. limit=15.0 +2024-09-01 03:29:30,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=108709.33333333333, ans=0.0 +2024-09-01 03:29:42,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=108709.33333333333, ans=0.125 +2024-09-01 03:29:46,146 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 13, batch 100, loss[loss=0.1923, simple_loss=0.2091, pruned_loss=0.08237, over 19095.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2103, pruned_loss=0.08593, over 1474662.24 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 2.0 +2024-09-01 03:29:58,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.whiten.whitening_limit, batch_count=108762.66666666667, ans=12.0 +2024-09-01 03:30:00,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=108762.66666666667, ans=0.125 +2024-09-01 03:30:09,389 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.49 vs. limit=15.0 +2024-09-01 03:30:13,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=108816.0, ans=0.125 +2024-09-01 03:30:18,035 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.25 vs. limit=15.0 +2024-09-01 03:30:19,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=108816.0, ans=0.125 +2024-09-01 03:30:19,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=108816.0, ans=0.2 +2024-09-01 03:30:26,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=108816.0, ans=0.0 +2024-09-01 03:30:36,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=108869.33333333333, ans=0.125 +2024-09-01 03:30:46,125 INFO [dysarthria_finetune.py:1435] (3/4) (13185515520, 34072559616) +2024-09-01 03:30:46,126 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 03:30:46,174 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 03:30:58,987 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 14, batch 0, loss[loss=0.2284, simple_loss=0.2338, pruned_loss=0.1083, over 18650.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.2338, pruned_loss=0.1083, over 18650.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 4.0 +2024-09-01 03:30:58,987 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 03:31:17,316 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.9378, 2.6222, 3.4462, 1.6604], device='cuda:3') +2024-09-01 03:31:23,186 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 14, validation: loss=0.1833, simple_loss=0.2, pruned_loss=0.07856, over 1073944.00 frames. +2024-09-01 03:31:23,187 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 03:31:26,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=108917.33333333333, ans=0.0 +2024-09-01 03:31:38,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=108917.33333333333, ans=0.125 +2024-09-01 03:31:38,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=108917.33333333333, ans=0.0 +2024-09-01 03:33:12,970 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=10.78 vs. limit=12.0 +2024-09-01 03:33:13,473 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 14, batch 50, loss[loss=0.1708, simple_loss=0.1998, pruned_loss=0.0652, over 19012.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2075, pruned_loss=0.08365, over 829335.16 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 4.0 +2024-09-01 03:33:14,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=109184.0, ans=0.0 +2024-09-01 03:33:19,741 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 1.619e+02 1.722e+02 1.984e+02 2.668e+02, threshold=3.445e+02, percent-clipped=0.0 +2024-09-01 03:33:47,234 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=25.54 vs. limit=22.5 +2024-09-01 03:34:53,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=109397.33333333333, ans=0.0 +2024-09-01 03:35:00,698 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 14, batch 100, loss[loss=0.1701, simple_loss=0.1978, pruned_loss=0.06686, over 19114.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2055, pruned_loss=0.08227, over 1476363.01 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 8.0 +2024-09-01 03:35:39,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=109504.0, ans=0.025 +2024-09-01 03:36:00,177 INFO [dysarthria_finetune.py:1435] (3/4) (14307491840, 34072559616) +2024-09-01 03:36:00,178 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 03:36:00,229 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 03:36:14,209 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 15, batch 0, loss[loss=0.1834, simple_loss=0.1993, pruned_loss=0.0811, over 18716.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.1993, pruned_loss=0.0811, over 18716.00 frames. ], batch size: 65, lr: 9.95e-05, grad_scale: 4.0 +2024-09-01 03:36:14,210 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 03:36:45,407 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 15, validation: loss=0.1765, simple_loss=0.1963, pruned_loss=0.07531, over 1073944.00 frames. +2024-09-01 03:36:45,408 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 03:37:05,703 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.01 vs. limit=15.0 +2024-09-01 03:37:52,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=109712.0, ans=0.0 +2024-09-01 03:38:03,353 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.430e+02 1.579e+02 1.672e+02 1.908e+02 2.431e+02, threshold=3.343e+02, percent-clipped=0.0 +2024-09-01 03:38:04,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=109712.0, ans=0.125 +2024-09-01 03:38:16,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=109765.33333333333, ans=0.125 +2024-09-01 03:39:11,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=109818.66666666667, ans=0.125 +2024-09-01 03:39:14,743 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 15, batch 50, loss[loss=0.1633, simple_loss=0.1826, pruned_loss=0.06987, over 19179.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2041, pruned_loss=0.08247, over 827713.24 frames. ], batch size: 103, lr: 9.95e-05, grad_scale: 4.0 +2024-09-01 03:39:25,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=109872.0, ans=0.0 +2024-09-01 03:39:37,646 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.42 vs. limit=15.0 +2024-09-01 03:40:49,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=110032.0, ans=0.5 +2024-09-01 03:41:15,404 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.88 vs. limit=15.0 +2024-09-01 03:41:54,230 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 15, batch 100, loss[loss=0.1555, simple_loss=0.1843, pruned_loss=0.06161, over 19073.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2039, pruned_loss=0.08199, over 1475236.97 frames. ], batch size: 133, lr: 9.95e-05, grad_scale: 8.0 +2024-09-01 03:42:13,318 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.75 vs. limit=15.0 +2024-09-01 03:42:50,386 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.63 vs. limit=15.0 +2024-09-01 03:43:21,351 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 1.548e+02 1.650e+02 1.862e+02 2.617e+02, threshold=3.300e+02, percent-clipped=0.0 +2024-09-01 03:43:24,594 INFO [dysarthria_finetune.py:1435] (3/4) (604700672, 34072559616) +2024-09-01 03:43:24,595 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 03:43:24,675 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 03:43:40,072 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 16, batch 0, loss[loss=0.2067, simple_loss=0.2168, pruned_loss=0.09759, over 18560.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2168, pruned_loss=0.09759, over 18560.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 8.0 +2024-09-01 03:43:40,073 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 03:44:25,991 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 16, validation: loss=0.1763, simple_loss=0.1967, pruned_loss=0.07691, over 1073944.00 frames. +2024-09-01 03:44:25,991 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 03:46:02,852 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:46:15,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=110400.0, ans=0.125 +2024-09-01 03:47:46,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=110453.33333333333, ans=0.125 +2024-09-01 03:47:59,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=110506.66666666667, ans=0.0 +2024-09-01 03:48:37,713 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 16, batch 50, loss[loss=0.1725, simple_loss=0.2045, pruned_loss=0.06984, over 19044.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2028, pruned_loss=0.08122, over 827661.95 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 4.0 +2024-09-01 03:49:15,058 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=15.56 vs. limit=15.0 +2024-09-01 03:49:50,948 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:50:42,382 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=13.39 vs. limit=15.0 +2024-09-01 03:51:55,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=110773.33333333333, ans=0.0 +2024-09-01 03:52:21,039 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.93 vs. limit=22.5 +2024-09-01 03:52:26,727 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.416e+02 1.555e+02 1.657e+02 1.896e+02 2.445e+02, threshold=3.314e+02, percent-clipped=0.0 +2024-09-01 03:52:31,255 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 16, batch 100, loss[loss=0.1832, simple_loss=0.2103, pruned_loss=0.07804, over 19120.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2022, pruned_loss=0.08081, over 1474935.70 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 8.0 +2024-09-01 03:52:41,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=110826.66666666667, ans=0.125 +2024-09-01 03:52:44,914 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.02 vs. limit=22.5 +2024-09-01 03:52:47,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=110826.66666666667, ans=0.125 +2024-09-01 03:52:52,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=110826.66666666667, ans=0.125 +2024-09-01 03:53:00,565 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=10.38 vs. limit=12.0 +2024-09-01 03:53:08,413 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.86 vs. limit=15.0 +2024-09-01 03:53:34,812 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-09-01 03:54:07,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=110933.33333333333, ans=0.04949747468305833 +2024-09-01 03:54:07,967 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.63 vs. limit=6.0 +2024-09-01 03:54:10,883 INFO [dysarthria_finetune.py:1435] (3/4) (13185515520, 34072559616) +2024-09-01 03:54:10,884 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 03:54:10,932 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 03:54:38,949 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 17, batch 0, loss[loss=0.2429, simple_loss=0.2444, pruned_loss=0.1207, over 18583.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.2444, pruned_loss=0.1207, over 18583.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 8.0 +2024-09-01 03:54:38,950 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 03:54:59,406 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.1051, 3.8526, 3.6111, 3.7194, 3.9271, 2.8770, 3.7905, 3.8597], + device='cuda:3') +2024-09-01 03:55:19,878 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 17, validation: loss=0.1671, simple_loss=0.1912, pruned_loss=0.07151, over 1073944.00 frames. +2024-09-01 03:55:19,879 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 03:55:22,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110981.33333333333, ans=0.1 +2024-09-01 03:56:06,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=111034.66666666667, ans=0.125 +2024-09-01 03:56:27,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=111034.66666666667, ans=0.0 +2024-09-01 03:56:30,360 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.83 vs. limit=15.0 +2024-09-01 03:56:42,693 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=18.82 vs. limit=15.0 +2024-09-01 03:57:04,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=111088.0, ans=0.125 +2024-09-01 03:57:20,469 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.53 vs. limit=22.5 +2024-09-01 03:57:33,471 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.18 vs. limit=22.5 +2024-09-01 03:58:12,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=111194.66666666667, ans=0.1 +2024-09-01 03:58:57,666 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 17, batch 50, loss[loss=0.1765, simple_loss=0.1985, pruned_loss=0.0772, over 18982.00 frames. ], tot_loss[loss=0.176, simple_loss=0.1981, pruned_loss=0.0769, over 827806.80 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 4.0 +2024-09-01 04:00:54,007 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.39 vs. limit=15.0 +2024-09-01 04:00:57,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111354.66666666667, ans=0.1 +2024-09-01 04:01:02,143 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.350e+02 1.555e+02 1.659e+02 1.888e+02 2.626e+02, threshold=3.319e+02, percent-clipped=0.0 +2024-09-01 04:02:24,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=111461.33333333333, ans=0.125 +2024-09-01 04:02:28,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111461.33333333333, ans=0.1 +2024-09-01 04:02:48,339 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 17, batch 100, loss[loss=0.1706, simple_loss=0.1997, pruned_loss=0.07079, over 19078.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.1977, pruned_loss=0.07629, over 1476033.73 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 8.0 +2024-09-01 04:03:18,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111514.66666666667, ans=0.1 +2024-09-01 04:03:23,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=111514.66666666667, ans=0.125 +2024-09-01 04:05:21,540 INFO [dysarthria_finetune.py:1435] (3/4) (13177126912, 34072559616) +2024-09-01 04:05:21,541 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 04:05:21,589 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 04:05:30,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=111669.33333333333, ans=0.2 +2024-09-01 04:05:38,880 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 18, batch 0, loss[loss=0.2044, simple_loss=0.218, pruned_loss=0.09541, over 18613.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.218, pruned_loss=0.09541, over 18613.00 frames. ], batch size: 65, lr: 9.93e-05, grad_scale: 8.0 +2024-09-01 04:05:38,880 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 04:06:14,845 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 18, validation: loss=0.1676, simple_loss=0.191, pruned_loss=0.07213, over 1073944.00 frames. +2024-09-01 04:06:14,846 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 04:06:24,774 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=7.608e-02 +2024-09-01 04:08:29,785 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.02 vs. limit=15.0 +2024-09-01 04:09:42,032 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.341e+02 1.516e+02 1.635e+02 1.895e+02 3.024e+02, threshold=3.269e+02, percent-clipped=0.0 +2024-09-01 04:09:57,728 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.02 vs. limit=22.5 +2024-09-01 04:10:10,258 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 18, batch 50, loss[loss=0.1768, simple_loss=0.2004, pruned_loss=0.07658, over 19004.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.1938, pruned_loss=0.07445, over 828768.32 frames. ], batch size: 102, lr: 9.93e-05, grad_scale: 8.0 +2024-09-01 04:10:19,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=111936.0, ans=0.125 +2024-09-01 04:13:49,688 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.24 vs. limit=15.0 +2024-09-01 04:13:49,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.70 vs. limit=10.0 +2024-09-01 04:14:13,999 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 18, batch 100, loss[loss=0.1394, simple_loss=0.1673, pruned_loss=0.05573, over 19084.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.1932, pruned_loss=0.07458, over 1476677.05 frames. ], batch size: 133, lr: 9.93e-05, grad_scale: 8.0 +2024-09-01 04:14:30,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=112202.66666666667, ans=0.125 +2024-09-01 04:15:02,847 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.55 vs. limit=15.0 +2024-09-01 04:15:13,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=112309.33333333333, ans=0.125 +2024-09-01 04:15:46,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=112309.33333333333, ans=0.0 +2024-09-01 04:15:56,105 INFO [dysarthria_finetune.py:1435] (3/4) (13172932608, 34072559616) +2024-09-01 04:15:56,105 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 04:15:56,155 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 04:16:15,954 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 19, batch 0, loss[loss=0.2243, simple_loss=0.2289, pruned_loss=0.1099, over 18562.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2289, pruned_loss=0.1099, over 18562.00 frames. ], batch size: 65, lr: 9.92e-05, grad_scale: 8.0 +2024-09-01 04:16:15,954 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 04:16:39,401 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 19, validation: loss=0.1638, simple_loss=0.1883, pruned_loss=0.06968, over 1073944.00 frames. +2024-09-01 04:16:39,401 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 04:16:50,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112352.0, ans=0.1 +2024-09-01 04:16:56,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=112352.0, ans=0.0 +2024-09-01 04:17:22,798 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.531e+02 1.615e+02 1.818e+02 2.373e+02, threshold=3.231e+02, percent-clipped=0.0 +2024-09-01 04:17:38,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=112458.66666666667, ans=0.125 +2024-09-01 04:18:07,741 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.74 vs. limit=6.0 +2024-09-01 04:18:32,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=112565.33333333333, ans=0.125 +2024-09-01 04:18:32,839 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.53 vs. limit=6.0 +2024-09-01 04:18:39,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=112565.33333333333, ans=0.0 +2024-09-01 04:19:09,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=112618.66666666667, ans=0.125 +2024-09-01 04:19:10,127 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 19, batch 50, loss[loss=0.1575, simple_loss=0.1829, pruned_loss=0.066, over 19015.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.193, pruned_loss=0.07382, over 829365.51 frames. ], batch size: 102, lr: 9.92e-05, grad_scale: 4.0 +2024-09-01 04:19:51,653 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.57 vs. limit=15.0 +2024-09-01 04:20:00,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=112725.33333333333, ans=0.2 +2024-09-01 04:20:00,816 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.35 vs. limit=10.0 +2024-09-01 04:20:10,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=112725.33333333333, ans=0.125 +2024-09-01 04:20:40,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=112778.66666666667, ans=0.09899494936611666 +2024-09-01 04:20:51,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=112832.0, ans=0.125 +2024-09-01 04:21:10,108 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 19, batch 100, loss[loss=0.1561, simple_loss=0.1802, pruned_loss=0.06602, over 19083.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.1942, pruned_loss=0.07482, over 1476389.98 frames. ], batch size: 133, lr: 9.92e-05, grad_scale: 8.0 +2024-09-01 04:21:31,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=112938.66666666667, ans=0.2 +2024-09-01 04:21:50,066 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=10.56 vs. limit=12.0 +2024-09-01 04:21:54,607 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.303e+02 1.501e+02 1.584e+02 1.820e+02 2.268e+02, threshold=3.167e+02, percent-clipped=0.0 +2024-09-01 04:21:58,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=112992.0, ans=0.07 +2024-09-01 04:22:11,534 INFO [dysarthria_finetune.py:1435] (3/4) (13212778496, 34072559616) +2024-09-01 04:22:11,535 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 04:22:11,579 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 04:22:26,676 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 20, batch 0, loss[loss=0.2041, simple_loss=0.213, pruned_loss=0.09759, over 18436.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.213, pruned_loss=0.09759, over 18436.00 frames. ], batch size: 65, lr: 9.91e-05, grad_scale: 8.0 +2024-09-01 04:22:26,677 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-09-01 04:22:50,266 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 20, validation: loss=0.1638, simple_loss=0.1875, pruned_loss=0.07, over 1073944.00 frames. +2024-09-01 04:22:50,267 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14321MB +2024-09-01 04:22:56,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=113040.0, ans=0.125 +2024-09-01 04:23:19,572 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.07 vs. limit=15.0 +2024-09-01 04:23:20,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=113093.33333333333, ans=0.2 +2024-09-01 04:23:20,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=113093.33333333333, ans=0.0 +2024-09-01 04:23:27,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=113093.33333333333, ans=0.125 +2024-09-01 04:23:55,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=113146.66666666667, ans=0.2 +2024-09-01 04:24:57,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=113253.33333333333, ans=0.0 +2024-09-01 04:25:07,019 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 20, batch 50, loss[loss=0.1742, simple_loss=0.2013, pruned_loss=0.0736, over 18942.00 frames. ], tot_loss[loss=0.171, simple_loss=0.192, pruned_loss=0.07499, over 827999.75 frames. ], batch size: 102, lr: 9.91e-05, grad_scale: 4.0 +2024-09-01 04:25:22,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=113306.66666666667, ans=0.0 +2024-09-01 04:25:57,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=113360.0, ans=0.125 +2024-09-01 04:26:11,882 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.23 vs. limit=15.0 +2024-09-01 04:27:04,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=113520.0, ans=10.0 +2024-09-01 04:27:05,365 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.305e+02 1.522e+02 1.605e+02 1.869e+02 2.652e+02, threshold=3.210e+02, percent-clipped=0.0 +2024-09-01 04:27:27,157 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 20, batch 100, loss[loss=0.1332, simple_loss=0.1609, pruned_loss=0.05269, over 19171.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.1908, pruned_loss=0.07332, over 1475487.52 frames. ], batch size: 133, lr: 9.91e-05, grad_scale: 8.0 +2024-09-01 04:27:34,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=113573.33333333333, ans=0.0 +2024-09-01 04:28:11,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113626.66666666667, ans=0.1 +2024-09-01 04:28:22,543 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.08 vs. limit=6.0 +2024-09-01 04:28:26,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113680.0, ans=0.1 +2024-09-01 04:28:43,107 INFO [dysarthria_finetune.py:1435] (3/4) (13204389888, 34072559616) +2024-09-01 04:28:43,108 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-09-01 04:28:43,135 INFO [dysarthria_finetune.py:1440] (3/4) (29977411584, 34072559616) +2024-09-01 04:28:43,136 INFO [dysarthria_finetune.py:1442] (3/4) Done! diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1724922176.cdr2545.int.cedar.computecanada.ca.70.0 b/zipformer/finetuned/non_ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1724922176.cdr2545.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..b69328ea649bed37d6c19c01909ac670cf5a8183 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1724922176.cdr2545.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:23a1db0cd13f9f343e91bdd0e39824b1bcd4787346a0bdd0353b0fbfc063a5e6 +size 795 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1724954049.cdr2500.int.cedar.computecanada.ca.70.0 b/zipformer/finetuned/non_ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1724954049.cdr2500.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..5dade4a3675ef6b625311a8fbbead5f7dba998a1 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1724954049.cdr2500.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a2d585a054cf6291c3056d8d4a538cd23d0d3bfafaa3826967de2f119edbe333 +size 2913 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725048942.cdr2558.int.cedar.computecanada.ca.70.0 b/zipformer/finetuned/non_ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725048942.cdr2558.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..427271e264be9568ff3710910c4ff80f07656691 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725048942.cdr2558.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0b9911ca5b48d87ab8f592f2b499cebe47052e960f21dc5b838293c6005ecfe6 +size 2913 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725075683.cdr2549.int.cedar.computecanada.ca.70.0 b/zipformer/finetuned/non_ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725075683.cdr2549.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..e3c664fc38e3eae8f362198daa6063ba47e119d4 --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725075683.cdr2549.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0debd6746a60033f489a0d99f2da1883b373fc2997d27e149f7752476786dbb2 +size 88 diff --git a/zipformer/finetuned/non_ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725135371.cdr2558.int.cedar.computecanada.ca.70.0 b/zipformer/finetuned/non_ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725135371.cdr2558.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..dba159547ec89e0b23d61d73650efe5ecc78f0af --- /dev/null +++ b/zipformer/finetuned/non_ctc/causal/exp_finetune/tensorboard/events.out.tfevents.1725135371.cdr2558.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aa9fdc485834f1a5eb2d1734f98436e8992ba3bdd9992fcab8e9f37ed98cd183 +size 33318 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/best-train-loss.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/best-train-loss.pt new file mode 100644 index 0000000000000000000000000000000000000000..d6dc59a4b51dab51ffb6acb6b6bd8bcc7dac0097 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/best-train-loss.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a02d980bab1cf51199f8a322afec45228f04c701bbc8e53ffa6b95dff3d1af32 +size 1049767630 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/best-valid-loss.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/best-valid-loss.pt new file mode 100644 index 0000000000000000000000000000000000000000..d6dc59a4b51dab51ffb6acb6b6bd8bcc7dac0097 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/best-valid-loss.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a02d980bab1cf51199f8a322afec45228f04c701bbc8e53ffa6b95dff3d1af32 +size 1049767630 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-1.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-1.pt new file mode 100644 index 0000000000000000000000000000000000000000..ce9e92c8bbf92a001a04760297a9d5efdd970714 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-1.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8bf6591bbaa08df8deece1c131bb0f077fc947df16a02839a3cf6f5b8adb6f77 +size 1049764225 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-10.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-10.pt new file mode 100644 index 0000000000000000000000000000000000000000..eb86675d4e19beabf017a6f1d0e23dc21a0dcbce --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-10.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d496a20aced6969ccce0127e7724f97d99e4d2e95515d1c4c5563b62a0fc726 +size 1049767182 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-11.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-11.pt new file mode 100644 index 0000000000000000000000000000000000000000..a0f7e10bb177da58af6a1c8fadd888400784fedb --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-11.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:325ebfa4ec186df868b2af7be88a6a491f110ec0a8618c66520e6cbc481936e5 +size 1049767246 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-12.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-12.pt new file mode 100644 index 0000000000000000000000000000000000000000..571a58e49321353a918b34dc22fedeadc27e632c --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-12.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:465735360596882ff42ffcd52a951185d22f0edc6c870fb7bc8a02827cee4a51 +size 1049767246 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-13.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-13.pt new file mode 100644 index 0000000000000000000000000000000000000000..f8f3e76d53d378eae457daa4e9ff7c353f90c219 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-13.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:78fd167d620ca2c8528072a2b63f69cacbc5d606ebd636245be9e235fba471f6 +size 1049767310 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-14.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-14.pt new file mode 100644 index 0000000000000000000000000000000000000000..0a7940f6fd6eaf5300b8a77755c62da7730fff5b --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-14.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f786bf51e47eeb03b13babbf96ed152e3ac422e2d0a3fe82b4f1ec49dc6a9fc5 +size 1049767374 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-15.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-15.pt new file mode 100644 index 0000000000000000000000000000000000000000..a6aa2ab661fdb5c79902f8985369b5bf05102dd1 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-15.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:330c6ef9a279b4eaf0ec13177f983b2cec1ddc1923a3cd796f2f92f155ee2819 +size 1049767438 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-16.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-16.pt new file mode 100644 index 0000000000000000000000000000000000000000..4baea69ec8804e73a5e2bca227c8296449d3db7c --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-16.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:88d17ba2fd3abe76f889f33f7057b9ce8f28a8f5cf9cc506bb16e3e248e1d5e4 +size 1049767438 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-17.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-17.pt new file mode 100644 index 0000000000000000000000000000000000000000..0e5234b0a15cd46352805feb24a5359cc8ca06a1 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-17.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:69ddee01425a498fccb9dd89d5c12338ae3d806cae7f0e65c5a491bc12ac0f09 +size 1049767502 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-18.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-18.pt new file mode 100644 index 0000000000000000000000000000000000000000..7b97caebb0dfaddd59e4568fa7835c70e27cda43 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-18.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:52654bb82ba8b355114dec260b1ad3c6556b3ded5195b0b5547d7eb22c1c0639 +size 1049767566 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-19.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-19.pt new file mode 100644 index 0000000000000000000000000000000000000000..e49c66e876caf3f40b2b6479e23b708d2b459a12 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-19.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0a911d925a8f5828417993c7abf5408d01e7a0600c6b2b752dad486e2e839ce0 +size 1049767630 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-2.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-2.pt new file mode 100644 index 0000000000000000000000000000000000000000..ea8ddd877cf36e4acf008f1277352528bff5f28a --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-2.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1eb3e7ba818fc718138d2ac2b9979f5c5489ce23997c64437ebc38ebe8732e14 +size 1049764353 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-20.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-20.pt new file mode 100644 index 0000000000000000000000000000000000000000..d6dc59a4b51dab51ffb6acb6b6bd8bcc7dac0097 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-20.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a02d980bab1cf51199f8a322afec45228f04c701bbc8e53ffa6b95dff3d1af32 +size 1049767630 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-3.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-3.pt new file mode 100644 index 0000000000000000000000000000000000000000..bfa6207347f66c428c6345daa2ceb2760ea69030 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-3.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ffaf609e3f562b806178f9d2f8a4305522a5d319a5e0bd117f0cff49e3e2a609 +size 1049764417 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-4.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-4.pt new file mode 100644 index 0000000000000000000000000000000000000000..28bb995d979c23e11bf471b2c5e4c3b170347195 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-4.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1553ef709b9d00e9baa2e07d4544148e968bf5ec9a89ea66c1dc998b9d692f74 +size 1049764481 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-5.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-5.pt new file mode 100644 index 0000000000000000000000000000000000000000..3aaf43a4f9466dfe7ad1042a4d130c0303243b96 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-5.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9e47fd0bfc8e06cb590101e65e6e410fa9a666e4cd1c435bfb030dba5c9d7d64 +size 1049764481 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-6.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-6.pt new file mode 100644 index 0000000000000000000000000000000000000000..53b4cb0e7f25c16f2a4c9458c75d0f687f020e93 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-6.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:df1a6f2d87d44717d37e02a44f2f9cf3b563d2098e8c4300fde8a0136df40719 +size 1049764545 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-7.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-7.pt new file mode 100644 index 0000000000000000000000000000000000000000..61b652f6b15ab34c53c7ad857d11c2580aff1243 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-7.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8b309e45915dd30d1134d8c4919cd92e66bd987528f127e1d89d1b62feb3cd0f +size 1049764609 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-8.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-8.pt new file mode 100644 index 0000000000000000000000000000000000000000..3b4ee0881e0a200d6779a0081ecfdfe50ad90c56 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-8.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b29753416e3775e314089448b443ac4dded7e5706b88965210e21809e0798be1 +size 1049764673 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-9.pt b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-9.pt new file mode 100644 index 0000000000000000000000000000000000000000..0d9aa37c3e809896cf974157c98a92866e6148fc --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/epoch-9.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aa74c839e14cee9d19c78a6887d99e79e4a569dcd407602435309836a0d70210 +size 1049764673 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-12-23-55-25-0 b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-12-23-55-25-0 new file mode 100644 index 0000000000000000000000000000000000000000..c0d0ea5436342c867fb1c0c8f8de1b70ef920f72 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-12-23-55-25-0 @@ -0,0 +1,66 @@ +2024-08-12 23:55:25,674 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-12 23:55:25,895 INFO [dysarthria_finetune.py:1214] (0/4) (33748090880, 34072559616) +2024-08-12 23:55:25,895 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-12 23:55:26,256 INFO [dysarthria_finetune.py:1219] (0/4) (33748090880, 34072559616) +2024-08-12 23:55:27,783 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-12 23:55:28,274 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2650.int.cedar.computecanada.ca', 'IP address': '172.16.146.87'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 500, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-12 23:55:28,274 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-12 23:55:29,263 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 65549011 +2024-08-12 23:55:29,824 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt +2024-08-12 23:55:39,907 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-12 23:55:48,042 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts +2024-08-12 23:55:48,316 INFO [dysarthria_finetune.py:1319] (0/4) CutSet(len=62255) [underlying data type: ] +2024-08-12 23:55:49,218 INFO [dysarthria_asr_datamodule.py:239] (0/4) Disable MUSAN +2024-08-12 23:55:49,218 INFO [dysarthria_asr_datamodule.py:257] (0/4) Enable SpecAugment +2024-08-12 23:55:49,218 INFO [dysarthria_asr_datamodule.py:258] (0/4) Time warp factor: 80 +2024-08-12 23:55:49,218 INFO [dysarthria_asr_datamodule.py:268] (0/4) Num frame mask: 10 +2024-08-12 23:55:49,218 INFO [dysarthria_asr_datamodule.py:281] (0/4) About to create train dataset +2024-08-12 23:55:52,160 INFO [dysarthria_asr_datamodule.py:308] (0/4) Using DynamicBucketingSampler. +2024-08-12 23:55:53,092 INFO [dysarthria_asr_datamodule.py:325] (0/4) About to create train dataloader +2024-08-12 23:55:53,094 INFO [dysarthria_asr_datamodule.py:500] (0/4) About to get dev cuts +2024-08-12 23:55:53,095 INFO [dysarthria_asr_datamodule.py:356] (0/4) About to create dev dataset +2024-08-12 23:55:53,414 INFO [dysarthria_asr_datamodule.py:373] (0/4) About to create dev dataloader +2024-08-12 23:55:53,414 INFO [dysarthria_finetune.py:1490] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-12 23:57:01,486 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=9.46 vs. limit=7.5 +2024-08-12 23:57:02,260 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 7499MB +2024-08-12 23:57:03,576 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 7699MB +2024-08-12 23:58:07,742 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 7699MB +2024-08-12 23:58:09,122 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 7699MB +2024-08-12 23:59:32,122 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 7699MB +2024-08-12 23:59:33,640 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 7699MB +2024-08-13 00:00:14,100 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 0, loss[loss=0.2996, simple_loss=0.2854, pruned_loss=0.1418, over 11720.00 frames. ], tot_loss[loss=0.2996, simple_loss=0.2854, pruned_loss=0.1418, over 11720.00 frames. ], batch size: 41, lr: 5.01e-05, grad_scale: 2.0 +2024-08-13 00:00:14,101 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-13 00:19:18,429 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 1, validation: loss=0.3215, simple_loss=0.3039, pruned_loss=0.1764, over 1073944.00 frames. +2024-08-13 00:19:18,430 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 12287MB +2024-08-13 00:29:11,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100000.0, ans=0.125 +2024-08-13 00:29:25,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=100000.0, ans=0.125 +2024-08-13 00:31:33,245 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.808e+02 6.474e+02 7.040e+02 7.172e+02 7.430e+02, threshold=2.816e+03, percent-clipped=0.0 +2024-08-13 00:34:36,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=100033.33333333333, ans=0.07 +2024-08-13 00:41:15,030 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.070e+02 5.541e+02 6.558e+02 7.172e+02 7.522e+02, threshold=2.623e+03, percent-clipped=0.0 +2024-08-13 01:22:57,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=100100.0, ans=0.125 +2024-08-13 01:25:52,229 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.705e+02 5.003e+02 5.937e+02 6.682e+02 7.522e+02, threshold=2.375e+03, percent-clipped=0.0 +2024-08-13 01:32:49,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=100133.33333333333, ans=0.125 +2024-08-13 01:37:51,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=100133.33333333333, ans=0.125 +2024-08-13 01:38:09,238 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 50, loss[loss=0.3339, simple_loss=0.3132, pruned_loss=0.2013, over 11899.00 frames. ], tot_loss[loss=0.3486, simple_loss=0.3288, pruned_loss=0.1959, over 516580.24 frames. ], batch size: 64, lr: 5.51e-05, grad_scale: 2.0 +2024-08-13 01:39:07,620 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=26.11 vs. limit=15.0 +2024-08-13 01:39:42,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=100166.66666666667, ans=0.0 +2024-08-13 01:57:38,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=100233.33333333333, ans=0.125 +2024-08-13 01:57:38,555 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.35 vs. limit=15.0 +2024-08-13 02:08:03,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=100266.66666666667, ans=0.07 +2024-08-13 02:15:48,562 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.801e+02 3.979e+02 4.793e+02 5.640e+02 7.522e+02, threshold=9.587e+02, percent-clipped=0.0 +2024-08-13 02:15:48,597 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 100, loss[loss=0.3407, simple_loss=0.3206, pruned_loss=0.1939, over 11886.00 frames. ], tot_loss[loss=0.338, simple_loss=0.3189, pruned_loss=0.1886, over 916559.82 frames. ], batch size: 96, lr: 6.01e-05, grad_scale: 4.0 +2024-08-13 02:23:24,118 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=25.73 vs. limit=15.0 +2024-08-13 02:24:26,377 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.88 vs. limit=15.0 +2024-08-13 02:40:27,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100466.66666666667, ans=0.1 +2024-08-13 02:43:23,332 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 150, loss[loss=0.3101, simple_loss=0.2936, pruned_loss=0.1637, over 11781.00 frames. ], tot_loss[loss=0.3329, simple_loss=0.3143, pruned_loss=0.1838, over 1229700.22 frames. ], batch size: 69, lr: 6.51e-05, grad_scale: 4.0 +2024-08-13 02:46:56,396 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=25.28 vs. limit=15.0 +2024-08-13 02:53:02,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=100533.33333333333, ans=0.0 +2024-08-13 03:03:09,464 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.86 vs. limit=15.0 +2024-08-13 03:29:02,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=100633.33333333333, ans=0.0 +2024-08-13 03:30:44,924 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.239e+02 2.802e+02 3.112e+02 3.482e+02 4.513e+02, threshold=6.224e+02, percent-clipped=0.0 +2024-08-13 03:30:44,958 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 200, loss[loss=0.3584, simple_loss=0.3344, pruned_loss=0.214, over 11418.00 frames. ], tot_loss[loss=0.3249, simple_loss=0.307, pruned_loss=0.177, over 1472498.12 frames. ], batch size: 46, lr: 7.01e-05, grad_scale: 8.0 +2024-08-13 03:34:41,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100666.66666666667, ans=0.1 +2024-08-13 03:35:46,987 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-1.pt +2024-08-13 03:41:32,251 INFO [dysarthria_finetune.py:1435] (0/4) (18644402176, 34072559616) +2024-08-13 03:41:32,252 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-13 03:41:32,266 INFO [dysarthria_finetune.py:1440] (0/4) (30036131840, 34072559616) diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-12-23-55-25-1 b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-12-23-55-25-1 new file mode 100644 index 0000000000000000000000000000000000000000..99b476183fcfa8dc5c465ee7180e74639fb01379 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-12-23-55-25-1 @@ -0,0 +1,68 @@ +2024-08-12 23:55:25,903 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-12 23:55:33,367 INFO [dysarthria_finetune.py:1214] (1/4) (33106362368, 34072559616) +2024-08-12 23:55:33,367 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-12 23:55:33,989 INFO [dysarthria_finetune.py:1219] (1/4) (32783400960, 34072559616) +2024-08-12 23:55:33,989 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-12 23:55:33,992 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2650.int.cedar.computecanada.ca', 'IP address': '172.16.146.87'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 500, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-12 23:55:33,992 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-12 23:55:34,683 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 65549011 +2024-08-12 23:55:34,684 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt +2024-08-12 23:55:39,161 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-12 23:55:48,035 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts +2024-08-12 23:55:48,316 INFO [dysarthria_finetune.py:1319] (1/4) CutSet(len=62255) [underlying data type: ] +2024-08-12 23:55:49,218 INFO [dysarthria_asr_datamodule.py:239] (1/4) Disable MUSAN +2024-08-12 23:55:49,218 INFO [dysarthria_asr_datamodule.py:257] (1/4) Enable SpecAugment +2024-08-12 23:55:49,218 INFO [dysarthria_asr_datamodule.py:258] (1/4) Time warp factor: 80 +2024-08-12 23:55:49,218 INFO [dysarthria_asr_datamodule.py:268] (1/4) Num frame mask: 10 +2024-08-12 23:55:49,218 INFO [dysarthria_asr_datamodule.py:281] (1/4) About to create train dataset +2024-08-12 23:55:52,031 INFO [dysarthria_asr_datamodule.py:308] (1/4) Using DynamicBucketingSampler. +2024-08-12 23:55:52,998 INFO [dysarthria_asr_datamodule.py:325] (1/4) About to create train dataloader +2024-08-12 23:55:52,999 INFO [dysarthria_asr_datamodule.py:500] (1/4) About to get dev cuts +2024-08-12 23:55:53,007 INFO [dysarthria_asr_datamodule.py:356] (1/4) About to create dev dataset +2024-08-12 23:55:53,414 INFO [dysarthria_asr_datamodule.py:373] (1/4) About to create dev dataloader +2024-08-12 23:55:53,415 INFO [dysarthria_finetune.py:1490] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-12 23:57:01,486 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=192, metric=9.48 vs. limit=7.5 +2024-08-12 23:57:02,259 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 7522MB +2024-08-12 23:57:03,572 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 7711MB +2024-08-12 23:58:07,747 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 7711MB +2024-08-12 23:58:09,127 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 7711MB +2024-08-12 23:59:32,123 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 7711MB +2024-08-12 23:59:33,642 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 7711MB +2024-08-13 00:00:14,086 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 0, loss[loss=0.3398, simple_loss=0.3215, pruned_loss=0.1834, over 11438.00 frames. ], tot_loss[loss=0.3398, simple_loss=0.3215, pruned_loss=0.1834, over 11438.00 frames. ], batch size: 40, lr: 5.01e-05, grad_scale: 2.0 +2024-08-13 00:00:14,086 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-13 00:19:18,431 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 1, validation: loss=0.3215, simple_loss=0.3039, pruned_loss=0.1764, over 1073944.00 frames. +2024-08-13 00:19:18,432 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 10510MB +2024-08-13 00:23:11,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100000.0, ans=0.1 +2024-08-13 00:27:15,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=100000.0, ans=0.125 +2024-08-13 00:28:44,694 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.24 vs. limit=22.5 +2024-08-13 00:29:15,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=100000.0, ans=0.125 +2024-08-13 00:31:33,245 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.808e+02 6.474e+02 7.040e+02 7.172e+02 7.430e+02, threshold=2.816e+03, percent-clipped=0.0 +2024-08-13 00:38:33,980 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.01 vs. limit=15.0 +2024-08-13 00:41:15,033 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.070e+02 5.541e+02 6.558e+02 7.172e+02 7.522e+02, threshold=2.623e+03, percent-clipped=0.0 +2024-08-13 00:54:00,363 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.10 vs. limit=15.0 +2024-08-13 01:25:52,224 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.705e+02 5.003e+02 5.937e+02 6.682e+02 7.522e+02, threshold=2.375e+03, percent-clipped=0.0 +2024-08-13 01:38:09,241 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 50, loss[loss=0.3026, simple_loss=0.287, pruned_loss=0.1565, over 11921.00 frames. ], tot_loss[loss=0.3528, simple_loss=0.3326, pruned_loss=0.2009, over 515986.49 frames. ], batch size: 64, lr: 5.51e-05, grad_scale: 2.0 +2024-08-13 01:40:15,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=100166.66666666667, ans=0.125 +2024-08-13 01:51:38,184 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.91 vs. limit=15.0 +2024-08-13 01:59:35,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=100233.33333333333, ans=0.2 +2024-08-13 01:59:35,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=100233.33333333333, ans=0.125 +2024-08-13 02:08:27,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=100266.66666666667, ans=0.025 +2024-08-13 02:15:48,563 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.801e+02 3.979e+02 4.793e+02 5.640e+02 7.522e+02, threshold=9.587e+02, percent-clipped=0.0 +2024-08-13 02:15:48,612 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 100, loss[loss=0.359, simple_loss=0.3377, pruned_loss=0.205, over 11911.00 frames. ], tot_loss[loss=0.3413, simple_loss=0.322, pruned_loss=0.1909, over 915517.59 frames. ], batch size: 96, lr: 6.01e-05, grad_scale: 4.0 +2024-08-13 02:16:11,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100333.33333333333, ans=0.1 +2024-08-13 02:37:41,001 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.85 vs. limit=15.0 +2024-08-13 02:38:46,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=100433.33333333333, ans=0.2 +2024-08-13 02:39:45,215 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=25.95 vs. limit=15.0 +2024-08-13 02:43:23,335 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 150, loss[loss=0.3098, simple_loss=0.2931, pruned_loss=0.1654, over 11699.00 frames. ], tot_loss[loss=0.3335, simple_loss=0.3148, pruned_loss=0.1842, over 1227213.99 frames. ], batch size: 69, lr: 6.51e-05, grad_scale: 4.0 +2024-08-13 02:47:16,635 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=34.85 vs. limit=15.0 +2024-08-13 02:53:03,481 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=18.76 vs. limit=15.0 +2024-08-13 03:00:24,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=100566.66666666667, ans=0.0 +2024-08-13 03:03:10,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=100566.66666666667, ans=0.125 +2024-08-13 03:23:37,785 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-13 03:30:44,929 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.239e+02 2.802e+02 3.112e+02 3.482e+02 4.513e+02, threshold=6.224e+02, percent-clipped=0.0 +2024-08-13 03:30:44,964 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 200, loss[loss=0.2979, simple_loss=0.2832, pruned_loss=0.152, over 11607.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3076, pruned_loss=0.1775, over 1470998.27 frames. ], batch size: 46, lr: 7.01e-05, grad_scale: 8.0 +2024-08-13 03:31:26,378 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=16.58 vs. limit=15.0 +2024-08-13 03:34:39,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=100666.66666666667, ans=0.125 +2024-08-13 03:35:46,995 INFO [dysarthria_finetune.py:1435] (1/4) (18224971776, 34072559616) +2024-08-13 03:35:46,996 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-13 03:35:47,028 INFO [dysarthria_finetune.py:1440] (1/4) (30065491968, 34072559616) diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-12-23-55-25-2 b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-12-23-55-25-2 new file mode 100644 index 0000000000000000000000000000000000000000..dcaad0f5f54db3a0a8e28d92783375643702e4ce --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-12-23-55-25-2 @@ -0,0 +1,76 @@ +2024-08-12 23:55:25,900 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-12 23:55:33,367 INFO [dysarthria_finetune.py:1214] (2/4) (33106362368, 34072559616) +2024-08-12 23:55:33,367 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-12 23:55:33,979 INFO [dysarthria_finetune.py:1219] (2/4) (32783400960, 34072559616) +2024-08-12 23:55:33,980 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-12 23:55:33,983 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2650.int.cedar.computecanada.ca', 'IP address': '172.16.146.87'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 500, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-12 23:55:33,983 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-12 23:55:34,668 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 65549011 +2024-08-12 23:55:34,668 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt +2024-08-12 23:55:39,209 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-12 23:55:48,035 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts +2024-08-12 23:55:48,316 INFO [dysarthria_finetune.py:1319] (2/4) CutSet(len=62255) [underlying data type: ] +2024-08-12 23:55:49,218 INFO [dysarthria_asr_datamodule.py:239] (2/4) Disable MUSAN +2024-08-12 23:55:49,218 INFO [dysarthria_asr_datamodule.py:257] (2/4) Enable SpecAugment +2024-08-12 23:55:49,218 INFO [dysarthria_asr_datamodule.py:258] (2/4) Time warp factor: 80 +2024-08-12 23:55:49,218 INFO [dysarthria_asr_datamodule.py:268] (2/4) Num frame mask: 10 +2024-08-12 23:55:49,218 INFO [dysarthria_asr_datamodule.py:281] (2/4) About to create train dataset +2024-08-12 23:55:52,031 INFO [dysarthria_asr_datamodule.py:308] (2/4) Using DynamicBucketingSampler. +2024-08-12 23:55:52,973 INFO [dysarthria_asr_datamodule.py:325] (2/4) About to create train dataloader +2024-08-12 23:55:52,980 INFO [dysarthria_asr_datamodule.py:500] (2/4) About to get dev cuts +2024-08-12 23:55:53,007 INFO [dysarthria_asr_datamodule.py:356] (2/4) About to create dev dataset +2024-08-12 23:55:53,413 INFO [dysarthria_asr_datamodule.py:373] (2/4) About to create dev dataloader +2024-08-12 23:55:53,414 INFO [dysarthria_finetune.py:1490] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-12 23:57:01,485 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=9.64 vs. limit=7.5 +2024-08-12 23:57:02,259 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 7517MB +2024-08-12 23:57:03,571 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 7703MB +2024-08-12 23:58:07,741 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 7703MB +2024-08-12 23:58:09,123 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 7703MB +2024-08-12 23:59:32,122 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 7703MB +2024-08-12 23:59:33,643 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 7703MB +2024-08-13 00:00:07,019 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.39 vs. limit=15.0 +2024-08-13 00:00:14,081 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 0, loss[loss=0.3133, simple_loss=0.2969, pruned_loss=0.1634, over 11286.00 frames. ], tot_loss[loss=0.3133, simple_loss=0.2969, pruned_loss=0.1634, over 11286.00 frames. ], batch size: 40, lr: 5.01e-05, grad_scale: 2.0 +2024-08-13 00:00:14,082 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-13 00:19:18,432 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 1, validation: loss=0.3215, simple_loss=0.3039, pruned_loss=0.1764, over 1073944.00 frames. +2024-08-13 00:19:18,433 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 10601MB +2024-08-13 00:23:10,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100000.0, ans=0.1 +2024-08-13 00:27:26,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=100000.0, ans=0.0 +2024-08-13 00:29:24,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=100000.0, ans=0.125 +2024-08-13 00:31:33,245 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.808e+02 6.474e+02 7.040e+02 7.172e+02 7.430e+02, threshold=2.816e+03, percent-clipped=0.0 +2024-08-13 00:34:34,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=100033.33333333333, ans=0.2 +2024-08-13 00:39:52,000 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=22.06 vs. limit=15.0 +2024-08-13 00:41:15,029 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.070e+02 5.541e+02 6.558e+02 7.172e+02 7.522e+02, threshold=2.623e+03, percent-clipped=0.0 +2024-08-13 01:24:24,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=100100.0, ans=0.0 +2024-08-13 01:25:52,226 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.705e+02 5.003e+02 5.937e+02 6.682e+02 7.522e+02, threshold=2.375e+03, percent-clipped=0.0 +2024-08-13 01:32:50,365 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=51.69 vs. limit=15.0 +2024-08-13 01:38:07,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=100166.66666666667, ans=0.125 +2024-08-13 01:38:09,239 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 50, loss[loss=0.3475, simple_loss=0.3283, pruned_loss=0.19, over 11995.00 frames. ], tot_loss[loss=0.354, simple_loss=0.3337, pruned_loss=0.2009, over 516736.81 frames. ], batch size: 64, lr: 5.51e-05, grad_scale: 2.0 +2024-08-13 01:38:49,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100166.66666666667, ans=0.1 +2024-08-13 01:39:06,072 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.95 vs. limit=15.0 +2024-08-13 01:39:45,576 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=24.83 vs. limit=15.0 +2024-08-13 01:51:37,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=100200.0, ans=0.125 +2024-08-13 01:52:06,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=100200.0, ans=0.0 +2024-08-13 01:57:33,083 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=19.70 vs. limit=15.0 +2024-08-13 01:59:18,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=100233.33333333333, ans=0.2 +2024-08-13 02:08:25,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=100266.66666666667, ans=0.2 +2024-08-13 02:08:25,313 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=23.65 vs. limit=15.0 +2024-08-13 02:08:25,379 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=33.13 vs. limit=22.5 +2024-08-13 02:09:01,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=100266.66666666667, ans=0.125 +2024-08-13 02:10:54,946 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=23.85 vs. limit=15.0 +2024-08-13 02:15:48,560 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.801e+02 3.979e+02 4.793e+02 5.640e+02 7.522e+02, threshold=9.587e+02, percent-clipped=0.0 +2024-08-13 02:15:48,594 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 100, loss[loss=0.3244, simple_loss=0.3059, pruned_loss=0.1802, over 11924.00 frames. ], tot_loss[loss=0.3395, simple_loss=0.3204, pruned_loss=0.1889, over 916658.53 frames. ], batch size: 96, lr: 6.01e-05, grad_scale: 4.0 +2024-08-13 02:17:02,706 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=19.59 vs. limit=15.0 +2024-08-13 02:31:19,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100400.0, ans=0.1 +2024-08-13 02:33:11,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=100400.0, ans=0.125 +2024-08-13 02:43:23,334 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 150, loss[loss=0.352, simple_loss=0.3315, pruned_loss=0.1967, over 11776.00 frames. ], tot_loss[loss=0.3326, simple_loss=0.3141, pruned_loss=0.1828, over 1229118.95 frames. ], batch size: 69, lr: 6.51e-05, grad_scale: 4.0 +2024-08-13 03:00:25,170 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.91 vs. limit=15.0 +2024-08-13 03:03:09,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=100566.66666666667, ans=0.125 +2024-08-13 03:05:50,295 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-13 03:13:12,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=100600.0, ans=0.0 +2024-08-13 03:30:44,923 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.239e+02 2.802e+02 3.112e+02 3.482e+02 4.513e+02, threshold=6.224e+02, percent-clipped=0.0 +2024-08-13 03:30:44,957 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 200, loss[loss=0.2976, simple_loss=0.2823, pruned_loss=0.1549, over 11600.00 frames. ], tot_loss[loss=0.3241, simple_loss=0.3065, pruned_loss=0.1754, over 1472852.73 frames. ], batch size: 46, lr: 7.01e-05, grad_scale: 8.0 +2024-08-13 03:31:01,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=100666.66666666667, ans=0.125 +2024-08-13 03:34:30,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=100666.66666666667, ans=0.2 +2024-08-13 03:35:46,990 INFO [dysarthria_finetune.py:1435] (2/4) (19120455680, 34072559616) +2024-08-13 03:35:46,990 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-13 03:35:47,028 INFO [dysarthria_finetune.py:1440] (2/4) (30036131840, 34072559616) diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-12-23-55-25-3 b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-12-23-55-25-3 new file mode 100644 index 0000000000000000000000000000000000000000..bfef760ef3a224104dfa926bfb96fce374a4f8dd --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-12-23-55-25-3 @@ -0,0 +1,65 @@ +2024-08-12 23:55:25,898 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-12 23:55:31,887 INFO [dysarthria_finetune.py:1214] (3/4) (33427226624, 34072559616) +2024-08-12 23:55:31,888 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-12 23:55:32,263 INFO [dysarthria_finetune.py:1219] (3/4) (33427226624, 34072559616) +2024-08-12 23:55:32,264 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-12 23:55:32,267 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2650.int.cedar.computecanada.ca', 'IP address': '172.16.146.87'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 500, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-12 23:55:32,267 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-12 23:55:32,956 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 65549011 +2024-08-12 23:55:32,956 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt +2024-08-12 23:55:39,186 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-12 23:55:48,038 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts +2024-08-12 23:55:48,316 INFO [dysarthria_finetune.py:1319] (3/4) CutSet(len=62255) [underlying data type: ] +2024-08-12 23:55:49,218 INFO [dysarthria_asr_datamodule.py:239] (3/4) Disable MUSAN +2024-08-12 23:55:49,219 INFO [dysarthria_asr_datamodule.py:257] (3/4) Enable SpecAugment +2024-08-12 23:55:49,219 INFO [dysarthria_asr_datamodule.py:258] (3/4) Time warp factor: 80 +2024-08-12 23:55:49,219 INFO [dysarthria_asr_datamodule.py:268] (3/4) Num frame mask: 10 +2024-08-12 23:55:49,219 INFO [dysarthria_asr_datamodule.py:281] (3/4) About to create train dataset +2024-08-12 23:55:52,031 INFO [dysarthria_asr_datamodule.py:308] (3/4) Using DynamicBucketingSampler. +2024-08-12 23:55:52,973 INFO [dysarthria_asr_datamodule.py:325] (3/4) About to create train dataloader +2024-08-12 23:55:52,980 INFO [dysarthria_asr_datamodule.py:500] (3/4) About to get dev cuts +2024-08-12 23:55:53,007 INFO [dysarthria_asr_datamodule.py:356] (3/4) About to create dev dataset +2024-08-12 23:55:53,411 INFO [dysarthria_asr_datamodule.py:373] (3/4) About to create dev dataloader +2024-08-12 23:55:53,411 INFO [dysarthria_finetune.py:1490] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-12 23:57:01,485 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=192, metric=9.13 vs. limit=7.5 +2024-08-12 23:57:02,259 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 7527MB +2024-08-12 23:57:03,571 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 7720MB +2024-08-12 23:58:07,741 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 7720MB +2024-08-12 23:58:09,122 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 7720MB +2024-08-12 23:59:32,122 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 7720MB +2024-08-12 23:59:33,639 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 7720MB +2024-08-13 00:00:04,254 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.84 vs. limit=15.0 +2024-08-13 00:00:14,080 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 0, loss[loss=0.3627, simple_loss=0.3426, pruned_loss=0.201, over 11386.00 frames. ], tot_loss[loss=0.3627, simple_loss=0.3426, pruned_loss=0.201, over 11386.00 frames. ], batch size: 40, lr: 5.01e-05, grad_scale: 2.0 +2024-08-13 00:00:14,080 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-13 00:19:18,431 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 1, validation: loss=0.3215, simple_loss=0.3039, pruned_loss=0.1764, over 1073944.00 frames. +2024-08-13 00:19:18,432 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 9924MB +2024-08-13 00:29:27,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=100000.0, ans=0.0 +2024-08-13 00:31:33,245 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.808e+02 6.474e+02 7.040e+02 7.172e+02 7.430e+02, threshold=2.816e+03, percent-clipped=0.0 +2024-08-13 00:34:40,774 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.38 vs. limit=22.5 +2024-08-13 00:39:47,601 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.97 vs. limit=15.0 +2024-08-13 00:41:15,032 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.070e+02 5.541e+02 6.558e+02 7.172e+02 7.522e+02, threshold=2.623e+03, percent-clipped=0.0 +2024-08-13 00:55:16,173 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.97 vs. limit=15.0 +2024-08-13 01:25:52,222 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.705e+02 5.003e+02 5.937e+02 6.682e+02 7.522e+02, threshold=2.375e+03, percent-clipped=0.0 +2024-08-13 01:37:51,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=100133.33333333333, ans=0.125 +2024-08-13 01:38:09,238 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 50, loss[loss=0.3556, simple_loss=0.3339, pruned_loss=0.2114, over 11934.00 frames. ], tot_loss[loss=0.35, simple_loss=0.3301, pruned_loss=0.1974, over 516465.20 frames. ], batch size: 64, lr: 5.51e-05, grad_scale: 2.0 +2024-08-13 01:51:19,753 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.63 vs. limit=15.0 +2024-08-13 01:57:33,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=100233.33333333333, ans=0.0 +2024-08-13 02:15:46,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=100333.33333333333, ans=0.025 +2024-08-13 02:15:48,560 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.801e+02 3.979e+02 4.793e+02 5.640e+02 7.522e+02, threshold=9.587e+02, percent-clipped=0.0 +2024-08-13 02:15:48,594 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 100, loss[loss=0.3405, simple_loss=0.3201, pruned_loss=0.1962, over 11888.00 frames. ], tot_loss[loss=0.3384, simple_loss=0.3194, pruned_loss=0.1878, over 916989.95 frames. ], batch size: 96, lr: 6.01e-05, grad_scale: 4.0 +2024-08-13 02:16:07,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100333.33333333333, ans=0.1 +2024-08-13 02:31:17,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=100400.0, ans=0.125 +2024-08-13 02:38:48,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=100433.33333333333, ans=0.125 +2024-08-13 02:39:58,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=100466.66666666667, ans=0.025 +2024-08-13 02:42:00,773 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.71 vs. limit=22.5 +2024-08-13 02:43:23,057 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=11.77 vs. limit=12.0 +2024-08-13 02:43:23,330 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 150, loss[loss=0.2971, simple_loss=0.282, pruned_loss=0.1533, over 11775.00 frames. ], tot_loss[loss=0.3328, simple_loss=0.3143, pruned_loss=0.1825, over 1227470.66 frames. ], batch size: 69, lr: 6.51e-05, grad_scale: 4.0 +2024-08-13 02:45:30,282 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.87 vs. limit=22.5 +2024-08-13 03:00:46,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=100566.66666666667, ans=0.0 +2024-08-13 03:13:14,031 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.74 vs. limit=22.5 +2024-08-13 03:27:59,823 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.45 vs. limit=10.0 +2024-08-13 03:30:44,927 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.239e+02 2.802e+02 3.112e+02 3.482e+02 4.513e+02, threshold=6.224e+02, percent-clipped=0.0 +2024-08-13 03:30:44,963 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 200, loss[loss=0.3044, simple_loss=0.2881, pruned_loss=0.1615, over 11055.00 frames. ], tot_loss[loss=0.3247, simple_loss=0.3069, pruned_loss=0.1758, over 1470172.09 frames. ], batch size: 23, lr: 7.01e-05, grad_scale: 8.0 +2024-08-13 03:35:46,991 INFO [dysarthria_finetune.py:1435] (3/4) (20447952896, 34072559616) +2024-08-13 03:35:46,991 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-13 03:35:47,027 INFO [dysarthria_finetune.py:1440] (3/4) (30206001152, 34072559616) diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-20-03-50-0 b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-20-03-50-0 new file mode 100644 index 0000000000000000000000000000000000000000..7c8d819824b184305b24776f453440d64e0b1824 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-20-03-50-0 @@ -0,0 +1,49 @@ +2024-08-13 20:03:50,982 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-13 20:03:51,865 INFO [dysarthria_finetune.py:1214] (0/4) (32783400960, 34072559616) +2024-08-13 20:03:51,866 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-13 20:03:52,354 INFO [dysarthria_finetune.py:1219] (0/4) (32783400960, 34072559616) +2024-08-13 20:03:52,359 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-13 20:03:52,362 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2649.int.cedar.computecanada.ca', 'IP address': '172.16.146.86'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-13 20:03:52,362 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-13 20:03:53,072 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 65549011 +2024-08-13 20:03:53,634 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt +2024-08-13 20:09:02,288 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-13 20:09:05,915 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts +2024-08-13 20:09:06,006 INFO [dysarthria_finetune.py:1319] (0/4) CutSet(len=62255) [underlying data type: ] +2024-08-13 20:09:06,260 INFO [dysarthria_asr_datamodule.py:239] (0/4) Disable MUSAN +2024-08-13 20:09:06,260 INFO [dysarthria_asr_datamodule.py:257] (0/4) Enable SpecAugment +2024-08-13 20:09:06,260 INFO [dysarthria_asr_datamodule.py:258] (0/4) Time warp factor: 80 +2024-08-13 20:09:06,260 INFO [dysarthria_asr_datamodule.py:268] (0/4) Num frame mask: 10 +2024-08-13 20:09:06,261 INFO [dysarthria_asr_datamodule.py:281] (0/4) About to create train dataset +2024-08-13 20:09:07,385 INFO [dysarthria_asr_datamodule.py:308] (0/4) Using DynamicBucketingSampler. +2024-08-13 20:09:08,328 INFO [dysarthria_asr_datamodule.py:325] (0/4) About to create train dataloader +2024-08-13 20:09:08,330 INFO [dysarthria_asr_datamodule.py:500] (0/4) About to get dev cuts +2024-08-13 20:09:08,452 INFO [dysarthria_asr_datamodule.py:356] (0/4) About to create dev dataset +2024-08-13 20:09:08,941 INFO [dysarthria_asr_datamodule.py:373] (0/4) About to create dev dataloader +2024-08-13 20:09:08,942 INFO [dysarthria_finetune.py:1490] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-13 20:09:46,138 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=24.02 vs. limit=7.5 +2024-08-13 20:09:46,398 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=18.77 vs. limit=7.5 +2024-08-13 20:09:47,191 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11707MB +2024-08-13 20:09:48,993 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11707MB +2024-08-13 20:12:21,529 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11707MB +2024-08-13 20:12:23,563 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11707MB +2024-08-13 20:17:32,565 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=8.74 vs. limit=5.0 +2024-08-13 20:17:33,028 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11707MB +2024-08-13 20:17:35,209 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11707MB +2024-08-13 20:19:00,520 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=10.28 vs. limit=12.0 +2024-08-13 20:19:23,403 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 0, loss[loss=0.288, simple_loss=0.2741, pruned_loss=0.1393, over 18513.00 frames. ], tot_loss[loss=0.288, simple_loss=0.2741, pruned_loss=0.1393, over 18513.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-13 20:19:23,404 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-13 20:50:10,155 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 1, validation: loss=0.3215, simple_loss=0.3039, pruned_loss=0.1764, over 1073944.00 frames. +2024-08-13 20:50:10,168 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-13 20:57:02,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=100000.0, ans=0.0 +2024-08-13 20:57:02,921 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=8.07 vs. limit=15.0 +2024-08-13 21:11:38,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100000.0, ans=0.1 +2024-08-13 21:19:40,364 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 7.297e+02 1.050e+03 1.114e+03 1.201e+03 1.245e+03, threshold=4.457e+03, percent-clipped=0.0 +2024-08-13 21:53:14,155 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.960e+02 9.448e+02 1.050e+03 1.152e+03 1.319e+03, threshold=4.200e+03, percent-clipped=0.0 +2024-08-13 22:20:37,285 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.221e+02 7.297e+02 9.456e+02 1.050e+03 1.319e+03, threshold=3.783e+03, percent-clipped=0.0 +2024-08-13 22:22:33,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=100213.33333333333, ans=0.125 +2024-08-13 22:33:06,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=24.09 vs. limit=15.0 +2024-08-13 22:42:42,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=100266.66666666667, ans=0.0 +2024-08-13 22:42:43,527 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 50, loss[loss=0.3521, simple_loss=0.3326, pruned_loss=0.1925, over 18890.00 frames. ], tot_loss[loss=0.3518, simple_loss=0.3318, pruned_loss=0.1981, over 828692.51 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-13 23:00:28,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=100266.66666666667, ans=0.125 +2024-08-13 23:01:28,174 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.49 vs. limit=6.0 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-20-03-51-1 b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-20-03-51-1 new file mode 100644 index 0000000000000000000000000000000000000000..d233d9fb220c0e306db2a08c64aee0225c9601c7 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-20-03-51-1 @@ -0,0 +1,47 @@ +2024-08-13 20:03:51,119 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-13 20:03:51,121 INFO [dysarthria_finetune.py:1214] (1/4) (33748090880, 34072559616) +2024-08-13 20:03:51,121 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-13 20:03:51,865 INFO [dysarthria_finetune.py:1219] (1/4) (33414643712, 34072559616) +2024-08-13 20:03:51,866 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-13 20:03:51,966 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2649.int.cedar.computecanada.ca', 'IP address': '172.16.146.86'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-13 20:03:51,967 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-13 20:03:52,865 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 65549011 +2024-08-13 20:03:52,865 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt +2024-08-13 20:09:01,829 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-13 20:09:05,914 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts +2024-08-13 20:09:06,006 INFO [dysarthria_finetune.py:1319] (1/4) CutSet(len=62255) [underlying data type: ] +2024-08-13 20:09:06,258 INFO [dysarthria_asr_datamodule.py:239] (1/4) Disable MUSAN +2024-08-13 20:09:06,258 INFO [dysarthria_asr_datamodule.py:257] (1/4) Enable SpecAugment +2024-08-13 20:09:06,258 INFO [dysarthria_asr_datamodule.py:258] (1/4) Time warp factor: 80 +2024-08-13 20:09:06,258 INFO [dysarthria_asr_datamodule.py:268] (1/4) Num frame mask: 10 +2024-08-13 20:09:06,258 INFO [dysarthria_asr_datamodule.py:281] (1/4) About to create train dataset +2024-08-13 20:09:07,252 INFO [dysarthria_asr_datamodule.py:308] (1/4) Using DynamicBucketingSampler. +2024-08-13 20:09:08,186 INFO [dysarthria_asr_datamodule.py:325] (1/4) About to create train dataloader +2024-08-13 20:09:08,192 INFO [dysarthria_asr_datamodule.py:500] (1/4) About to get dev cuts +2024-08-13 20:09:08,452 INFO [dysarthria_asr_datamodule.py:356] (1/4) About to create dev dataset +2024-08-13 20:09:08,933 INFO [dysarthria_asr_datamodule.py:373] (1/4) About to create dev dataloader +2024-08-13 20:09:08,933 INFO [dysarthria_finetune.py:1490] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-13 20:09:46,148 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=512, metric=19.93 vs. limit=7.5 +2024-08-13 20:09:46,398 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=192, metric=16.77 vs. limit=7.5 +2024-08-13 20:09:47,196 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11776MB +2024-08-13 20:09:48,988 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11776MB +2024-08-13 20:12:21,524 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11776MB +2024-08-13 20:12:23,562 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11776MB +2024-08-13 20:17:33,027 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11776MB +2024-08-13 20:17:35,210 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11776MB +2024-08-13 20:19:23,405 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 0, loss[loss=0.343, simple_loss=0.3241, pruned_loss=0.1887, over 18549.00 frames. ], tot_loss[loss=0.343, simple_loss=0.3241, pruned_loss=0.1887, over 18549.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-13 20:19:23,405 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-13 20:50:10,151 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 1, validation: loss=0.3215, simple_loss=0.3039, pruned_loss=0.1764, over 1073944.00 frames. +2024-08-13 20:50:10,168 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13098MB +2024-08-13 20:52:19,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100000.0, ans=0.125 +2024-08-13 20:56:44,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=100000.0, ans=0.125 +2024-08-13 21:08:48,181 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.62 vs. limit=15.0 +2024-08-13 21:19:40,364 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.297e+02 1.050e+03 1.114e+03 1.201e+03 1.245e+03, threshold=4.457e+03, percent-clipped=0.0 +2024-08-13 21:37:57,730 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.79 vs. limit=15.0 +2024-08-13 21:49:24,259 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.96 vs. limit=10.0 +2024-08-13 21:53:14,153 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.960e+02 9.448e+02 1.050e+03 1.152e+03 1.319e+03, threshold=4.200e+03, percent-clipped=0.0 +2024-08-13 22:17:29,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=100160.0, ans=0.0 +2024-08-13 22:20:37,286 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.221e+02 7.297e+02 9.456e+02 1.050e+03 1.319e+03, threshold=3.783e+03, percent-clipped=0.0 +2024-08-13 22:29:43,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=100213.33333333333, ans=0.125 +2024-08-13 22:42:43,509 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 50, loss[loss=0.3541, simple_loss=0.3336, pruned_loss=0.2013, over 19042.00 frames. ], tot_loss[loss=0.3545, simple_loss=0.3342, pruned_loss=0.2019, over 827432.33 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-13 23:00:01,624 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=31.42 vs. limit=15.0 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-20-03-51-2 b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-20-03-51-2 new file mode 100644 index 0000000000000000000000000000000000000000..fd7cf3e37c1ab881a485f101ef0d84b97c6b63c7 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-20-03-51-2 @@ -0,0 +1,51 @@ +2024-08-13 20:03:51,121 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-13 20:03:51,121 INFO [dysarthria_finetune.py:1214] (2/4) (33748090880, 34072559616) +2024-08-13 20:03:51,122 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-13 20:03:51,865 INFO [dysarthria_finetune.py:1219] (2/4) (33427226624, 34072559616) +2024-08-13 20:03:51,866 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-13 20:03:51,966 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2649.int.cedar.computecanada.ca', 'IP address': '172.16.146.86'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-13 20:03:51,966 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-13 20:03:52,859 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 65549011 +2024-08-13 20:03:52,859 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt +2024-08-13 20:09:01,828 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-13 20:09:05,910 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts +2024-08-13 20:09:06,006 INFO [dysarthria_finetune.py:1319] (2/4) CutSet(len=62255) [underlying data type: ] +2024-08-13 20:09:06,258 INFO [dysarthria_asr_datamodule.py:239] (2/4) Disable MUSAN +2024-08-13 20:09:06,258 INFO [dysarthria_asr_datamodule.py:257] (2/4) Enable SpecAugment +2024-08-13 20:09:06,258 INFO [dysarthria_asr_datamodule.py:258] (2/4) Time warp factor: 80 +2024-08-13 20:09:06,258 INFO [dysarthria_asr_datamodule.py:268] (2/4) Num frame mask: 10 +2024-08-13 20:09:06,258 INFO [dysarthria_asr_datamodule.py:281] (2/4) About to create train dataset +2024-08-13 20:09:07,254 INFO [dysarthria_asr_datamodule.py:308] (2/4) Using DynamicBucketingSampler. +2024-08-13 20:09:08,185 INFO [dysarthria_asr_datamodule.py:325] (2/4) About to create train dataloader +2024-08-13 20:09:08,192 INFO [dysarthria_asr_datamodule.py:500] (2/4) About to get dev cuts +2024-08-13 20:09:08,452 INFO [dysarthria_asr_datamodule.py:356] (2/4) About to create dev dataset +2024-08-13 20:09:08,931 INFO [dysarthria_asr_datamodule.py:373] (2/4) About to create dev dataloader +2024-08-13 20:09:08,932 INFO [dysarthria_finetune.py:1490] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-13 20:09:46,142 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=512, metric=22.66 vs. limit=7.5 +2024-08-13 20:09:46,399 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=17.80 vs. limit=7.5 +2024-08-13 20:09:47,191 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11790MB +2024-08-13 20:09:48,990 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11790MB +2024-08-13 20:12:21,524 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11790MB +2024-08-13 20:12:23,562 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11790MB +2024-08-13 20:17:33,028 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11790MB +2024-08-13 20:17:35,210 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11790MB +2024-08-13 20:19:23,426 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 0, loss[loss=0.3479, simple_loss=0.3286, pruned_loss=0.1929, over 18533.00 frames. ], tot_loss[loss=0.3479, simple_loss=0.3286, pruned_loss=0.1929, over 18533.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-13 20:19:23,427 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-13 20:50:10,155 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 1, validation: loss=0.3215, simple_loss=0.3039, pruned_loss=0.1764, over 1073944.00 frames. +2024-08-13 20:50:10,169 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19754MB +2024-08-13 20:52:07,422 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.11 vs. limit=22.5 +2024-08-13 20:52:19,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100000.0, ans=0.1 +2024-08-13 20:56:34,203 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.08 vs. limit=15.0 +2024-08-13 21:11:02,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=100000.0, ans=0.09899494936611666 +2024-08-13 21:19:40,365 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.297e+02 1.050e+03 1.114e+03 1.201e+03 1.245e+03, threshold=4.457e+03, percent-clipped=0.0 +2024-08-13 21:35:57,119 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.06 vs. limit=15.0 +2024-08-13 21:37:57,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=19.66 vs. limit=15.0 +2024-08-13 21:50:07,484 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.42 vs. limit=15.0 +2024-08-13 21:53:14,154 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.960e+02 9.448e+02 1.050e+03 1.152e+03 1.319e+03, threshold=4.200e+03, percent-clipped=0.0 +2024-08-13 22:18:01,424 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=8.70 vs. limit=15.0 +2024-08-13 22:20:37,282 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.221e+02 7.297e+02 9.456e+02 1.050e+03 1.319e+03, threshold=3.783e+03, percent-clipped=0.0 +2024-08-13 22:31:43,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=100213.33333333333, ans=0.125 +2024-08-13 22:42:35,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=100266.66666666667, ans=0.125 +2024-08-13 22:42:43,511 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 50, loss[loss=0.3626, simple_loss=0.3409, pruned_loss=0.2122, over 19018.00 frames. ], tot_loss[loss=0.3542, simple_loss=0.3338, pruned_loss=0.2019, over 827419.58 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-13 23:01:23,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100266.66666666667, ans=0.1 +2024-08-13 23:01:23,406 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=24.05 vs. limit=15.0 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-20-03-51-3 b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-20-03-51-3 new file mode 100644 index 0000000000000000000000000000000000000000..1b0352c53ce1e6e2519901fda80e3c84d038aece --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-20-03-51-3 @@ -0,0 +1,47 @@ +2024-08-13 20:03:51,142 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-13 20:03:51,534 INFO [dysarthria_finetune.py:1214] (3/4) (33735507968, 34072559616) +2024-08-13 20:03:51,534 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-13 20:03:52,261 INFO [dysarthria_finetune.py:1219] (3/4) (33093779456, 34072559616) +2024-08-13 20:03:52,262 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-13 20:03:52,265 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2649.int.cedar.computecanada.ca', 'IP address': '172.16.146.86'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-13 20:03:52,265 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-13 20:03:52,953 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 65549011 +2024-08-13 20:03:52,953 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt +2024-08-13 20:09:01,804 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-13 20:09:05,910 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts +2024-08-13 20:09:06,006 INFO [dysarthria_finetune.py:1319] (3/4) CutSet(len=62255) [underlying data type: ] +2024-08-13 20:09:06,258 INFO [dysarthria_asr_datamodule.py:239] (3/4) Disable MUSAN +2024-08-13 20:09:06,258 INFO [dysarthria_asr_datamodule.py:257] (3/4) Enable SpecAugment +2024-08-13 20:09:06,258 INFO [dysarthria_asr_datamodule.py:258] (3/4) Time warp factor: 80 +2024-08-13 20:09:06,258 INFO [dysarthria_asr_datamodule.py:268] (3/4) Num frame mask: 10 +2024-08-13 20:09:06,258 INFO [dysarthria_asr_datamodule.py:281] (3/4) About to create train dataset +2024-08-13 20:09:07,252 INFO [dysarthria_asr_datamodule.py:308] (3/4) Using DynamicBucketingSampler. +2024-08-13 20:09:08,194 INFO [dysarthria_asr_datamodule.py:325] (3/4) About to create train dataloader +2024-08-13 20:09:08,195 INFO [dysarthria_asr_datamodule.py:500] (3/4) About to get dev cuts +2024-08-13 20:09:08,452 INFO [dysarthria_asr_datamodule.py:356] (3/4) About to create dev dataset +2024-08-13 20:09:08,932 INFO [dysarthria_asr_datamodule.py:373] (3/4) About to create dev dataloader +2024-08-13 20:09:08,932 INFO [dysarthria_finetune.py:1490] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-13 20:09:46,142 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=17.38 vs. limit=7.5 +2024-08-13 20:09:46,399 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=192, metric=19.31 vs. limit=7.5 +2024-08-13 20:09:47,191 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11778MB +2024-08-13 20:09:48,992 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11778MB +2024-08-13 20:12:21,527 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11778MB +2024-08-13 20:12:23,566 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11778MB +2024-08-13 20:17:33,029 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11778MB +2024-08-13 20:17:35,212 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11778MB +2024-08-13 20:19:23,427 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 0, loss[loss=0.2854, simple_loss=0.2712, pruned_loss=0.1421, over 18634.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.2712, pruned_loss=0.1421, over 18634.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-13 20:19:23,428 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-13 20:50:10,155 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 1, validation: loss=0.3215, simple_loss=0.3039, pruned_loss=0.1764, over 1073944.00 frames. +2024-08-13 20:50:10,169 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14284MB +2024-08-13 20:52:17,783 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.78 vs. limit=22.5 +2024-08-13 20:52:19,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100000.0, ans=0.1 +2024-08-13 20:56:49,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=100000.0, ans=0.125 +2024-08-13 21:08:40,401 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.66 vs. limit=22.5 +2024-08-13 21:19:40,369 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 7.297e+02 1.050e+03 1.114e+03 1.201e+03 1.245e+03, threshold=4.457e+03, percent-clipped=0.0 +2024-08-13 21:53:14,153 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.960e+02 9.448e+02 1.050e+03 1.152e+03 1.319e+03, threshold=4.200e+03, percent-clipped=0.0 +2024-08-13 22:20:37,283 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.221e+02 7.297e+02 9.456e+02 1.050e+03 1.319e+03, threshold=3.783e+03, percent-clipped=0.0 +2024-08-13 22:29:37,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=100213.33333333333, ans=0.0 +2024-08-13 22:31:29,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=100213.33333333333, ans=0.025 +2024-08-13 22:37:35,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100213.33333333333, ans=0.1 +2024-08-13 22:42:43,511 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 50, loss[loss=0.3748, simple_loss=0.3525, pruned_loss=0.2175, over 19001.00 frames. ], tot_loss[loss=0.3538, simple_loss=0.3337, pruned_loss=0.1999, over 828973.50 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-13 23:01:29,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=100266.66666666667, ans=0.125 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-23-24-47-0 b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-23-24-47-0 new file mode 100644 index 0000000000000000000000000000000000000000..d99eb9d3b921bcc92417c034b90e071385056a36 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-23-24-47-0 @@ -0,0 +1,565 @@ +2024-08-13 23:24:47,605 INFO [dysarthria_finetune.py:1212] (0/4) Training started +2024-08-13 23:24:47,914 INFO [dysarthria_finetune.py:1214] (0/4) (33748090880, 34072559616) +2024-08-13 23:24:47,914 INFO [dysarthria_finetune.py:1215] (0/4) Empty cache: before and after +2024-08-13 23:24:48,924 INFO [dysarthria_finetune.py:1219] (0/4) (32783400960, 34072559616) +2024-08-13 23:24:48,929 INFO [dysarthria_finetune.py:1229] (0/4) Device: cuda:0 +2024-08-13 23:24:48,990 INFO [dysarthria_finetune.py:1241] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2649.int.cedar.computecanada.ca', 'IP address': '172.16.146.86'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-13 23:24:48,990 INFO [dysarthria_finetune.py:1243] (0/4) About to create model +2024-08-13 23:24:49,990 INFO [dysarthria_finetune.py:1247] (0/4) Number of model parameters: 65549011 +2024-08-13 23:24:50,530 INFO [dysarthria_finetune.py:769] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt +2024-08-13 23:25:00,373 INFO [dysarthria_finetune.py:1275] (0/4) Using DDP +2024-08-13 23:25:17,950 INFO [dysarthria_asr_datamodule.py:494] (0/4) About to get train cuts +2024-08-13 23:25:18,291 INFO [dysarthria_finetune.py:1319] (0/4) CutSet(len=62255) [underlying data type: ] +2024-08-13 23:25:18,620 INFO [dysarthria_asr_datamodule.py:239] (0/4) Disable MUSAN +2024-08-13 23:25:18,620 INFO [dysarthria_asr_datamodule.py:257] (0/4) Enable SpecAugment +2024-08-13 23:25:18,621 INFO [dysarthria_asr_datamodule.py:258] (0/4) Time warp factor: 80 +2024-08-13 23:25:18,621 INFO [dysarthria_asr_datamodule.py:268] (0/4) Num frame mask: 10 +2024-08-13 23:25:18,621 INFO [dysarthria_asr_datamodule.py:281] (0/4) About to create train dataset +2024-08-13 23:25:19,390 INFO [dysarthria_asr_datamodule.py:308] (0/4) Using DynamicBucketingSampler. +2024-08-13 23:25:20,348 INFO [dysarthria_asr_datamodule.py:325] (0/4) About to create train dataloader +2024-08-13 23:25:24,505 INFO [dysarthria_asr_datamodule.py:500] (0/4) About to get dev cuts +2024-08-13 23:25:24,698 INFO [dysarthria_asr_datamodule.py:356] (0/4) About to create dev dataset +2024-08-13 23:25:28,032 INFO [dysarthria_asr_datamodule.py:373] (0/4) About to create dev dataloader +2024-08-13 23:25:28,032 INFO [dysarthria_finetune.py:1490] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-13 23:27:16,792 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=24.02 vs. limit=7.5 +2024-08-13 23:27:17,110 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=18.77 vs. limit=7.5 +2024-08-13 23:27:17,919 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11707MB +2024-08-13 23:27:19,736 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11707MB +2024-08-13 23:32:34,804 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11707MB +2024-08-13 23:32:36,803 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11707MB +2024-08-13 23:35:37,938 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=8.74 vs. limit=5.0 +2024-08-13 23:35:38,400 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11707MB +2024-08-13 23:35:41,667 INFO [dysarthria_finetune.py:1518] (0/4) Maximum memory allocated so far is 11707MB +2024-08-13 23:36:57,313 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=10.28 vs. limit=12.0 +2024-08-13 23:36:58,730 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 0, loss[loss=0.288, simple_loss=0.2741, pruned_loss=0.1393, over 18513.00 frames. ], tot_loss[loss=0.288, simple_loss=0.2741, pruned_loss=0.1393, over 18513.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-13 23:36:58,732 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-13 23:49:47,046 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 1, validation: loss=0.3215, simple_loss=0.3039, pruned_loss=0.1764, over 1073944.00 frames. +2024-08-13 23:49:47,367 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-13 23:56:08,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=100000.0, ans=0.0 +2024-08-13 23:56:08,878 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=8.07 vs. limit=15.0 +2024-08-14 00:02:40,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100000.0, ans=0.1 +2024-08-14 00:15:49,520 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 7.298e+02 1.050e+03 1.114e+03 1.201e+03 1.245e+03, threshold=4.457e+03, percent-clipped=0.0 +2024-08-14 00:23:15,347 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.960e+02 9.450e+02 1.050e+03 1.152e+03 1.319e+03, threshold=4.200e+03, percent-clipped=0.0 +2024-08-14 00:29:53,827 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.241e+02 7.298e+02 9.450e+02 1.050e+03 1.319e+03, threshold=3.780e+03, percent-clipped=0.0 +2024-08-14 00:30:11,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=100213.33333333333, ans=0.125 +2024-08-14 00:34:46,097 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=24.15 vs. limit=15.0 +2024-08-14 00:36:59,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=100266.66666666667, ans=0.0 +2024-08-14 00:37:01,628 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 50, loss[loss=0.352, simple_loss=0.3326, pruned_loss=0.1925, over 18890.00 frames. ], tot_loss[loss=0.3517, simple_loss=0.3318, pruned_loss=0.198, over 828692.51 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-14 00:42:26,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=100266.66666666667, ans=0.125 +2024-08-14 00:42:53,124 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.49 vs. limit=6.0 +2024-08-14 00:46:02,623 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.91 vs. limit=10.0 +2024-08-14 00:46:59,382 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.53 vs. limit=15.0 +2024-08-14 01:01:58,595 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+02 5.963e+02 7.298e+02 8.800e+02 1.319e+03, threshold=1.460e+03, percent-clipped=0.0 +2024-08-14 01:01:58,631 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 1, batch 100, loss[loss=0.3331, simple_loss=0.3136, pruned_loss=0.1882, over 19293.00 frames. ], tot_loss[loss=0.3347, simple_loss=0.316, pruned_loss=0.1854, over 1474004.25 frames. ], batch size: 144, lr: 6.01e-05, grad_scale: 4.0 +2024-08-14 01:10:23,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100640.0, ans=0.1 +2024-08-14 01:10:23,456 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.39 vs. limit=15.0 +2024-08-14 01:11:48,029 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-1.pt +2024-08-14 01:12:16,343 INFO [dysarthria_finetune.py:1435] (0/4) (910884864, 34072559616) +2024-08-14 01:12:16,343 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 01:12:16,370 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 01:12:35,400 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 2, batch 0, loss[loss=0.2799, simple_loss=0.2683, pruned_loss=0.1286, over 18874.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.2683, pruned_loss=0.1286, over 18874.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-08-14 01:12:35,401 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 01:16:55,991 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 2, validation: loss=0.2907, simple_loss=0.276, pruned_loss=0.149, over 1073944.00 frames. +2024-08-14 01:16:55,992 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 01:19:27,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=100736.0, ans=0.025 +2024-08-14 01:20:03,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=100736.0, ans=0.125 +2024-08-14 01:20:03,731 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.23 vs. limit=15.0 +2024-08-14 01:20:52,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=100789.33333333333, ans=0.125 +2024-08-14 01:21:45,029 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=29.10 vs. limit=15.0 +2024-08-14 01:21:59,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=100842.66666666667, ans=0.125 +2024-08-14 01:22:26,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100896.0, ans=0.125 +2024-08-14 01:22:31,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=100896.0, ans=0.0 +2024-08-14 01:24:54,901 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 2, batch 50, loss[loss=0.3168, simple_loss=0.2976, pruned_loss=0.1804, over 18964.00 frames. ], tot_loss[loss=0.3212, simple_loss=0.3039, pruned_loss=0.1711, over 826819.73 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 8.0 +2024-08-14 01:25:01,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=100949.33333333333, ans=0.04949747468305833 +2024-08-14 01:26:04,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101002.66666666667, ans=0.1 +2024-08-14 01:26:13,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=101002.66666666667, ans=0.125 +2024-08-14 01:27:29,015 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.249e+02 4.347e+02 4.852e+02 5.543e+02 7.043e+02, threshold=9.703e+02, percent-clipped=0.0 +2024-08-14 01:27:52,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=101056.0, ans=0.125 +2024-08-14 01:28:07,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=101109.33333333333, ans=0.125 +2024-08-14 01:28:13,356 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.92 vs. limit=15.0 +2024-08-14 01:28:52,976 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.66 vs. limit=15.0 +2024-08-14 01:29:00,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=101216.0, ans=0.125 +2024-08-14 01:29:06,635 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 2, batch 100, loss[loss=0.3529, simple_loss=0.3322, pruned_loss=0.1957, over 19229.00 frames. ], tot_loss[loss=0.3105, simple_loss=0.2942, pruned_loss=0.163, over 1473154.80 frames. ], batch size: 144, lr: 7.29e-05, grad_scale: 8.0 +2024-08-14 01:29:19,189 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.97 vs. limit=6.0 +2024-08-14 01:29:33,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=101216.0, ans=0.125 +2024-08-14 01:29:44,800 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 01:30:25,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101322.66666666667, ans=0.1 +2024-08-14 01:30:32,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101322.66666666667, ans=0.1 +2024-08-14 01:30:42,041 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-2.pt +2024-08-14 01:30:46,426 INFO [dysarthria_finetune.py:1435] (0/4) (856358912, 34072559616) +2024-08-14 01:30:46,427 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 01:30:46,453 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 01:30:54,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=101370.66666666667, ans=0.0 +2024-08-14 01:30:55,539 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 3, batch 0, loss[loss=0.2949, simple_loss=0.2798, pruned_loss=0.1535, over 18603.00 frames. ], tot_loss[loss=0.2949, simple_loss=0.2798, pruned_loss=0.1535, over 18603.00 frames. ], batch size: 65, lr: 7.58e-05, grad_scale: 16.0 +2024-08-14 01:30:55,540 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 01:31:18,579 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 3, validation: loss=0.2682, simple_loss=0.2564, pruned_loss=0.1309, over 1073944.00 frames. +2024-08-14 01:31:18,579 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 01:31:50,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101424.0, ans=0.1 +2024-08-14 01:32:09,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=101477.33333333333, ans=0.05 +2024-08-14 01:32:14,107 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.10 vs. limit=22.5 +2024-08-14 01:32:25,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=101477.33333333333, ans=0.0 +2024-08-14 01:32:41,640 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.53 vs. limit=15.0 +2024-08-14 01:32:57,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101584.0, ans=0.1 +2024-08-14 01:32:58,606 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.574e+02 3.350e+02 3.692e+02 4.154e+02 5.648e+02, threshold=7.384e+02, percent-clipped=0.0 +2024-08-14 01:33:11,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=101584.0, ans=0.125 +2024-08-14 01:33:11,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=101584.0, ans=0.2 +2024-08-14 01:33:14,925 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 3, batch 50, loss[loss=0.3053, simple_loss=0.2895, pruned_loss=0.1595, over 18964.00 frames. ], tot_loss[loss=0.2944, simple_loss=0.28, pruned_loss=0.1503, over 827741.27 frames. ], batch size: 102, lr: 8.08e-05, grad_scale: 16.0 +2024-08-14 01:33:16,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=101637.33333333333, ans=0.2 +2024-08-14 01:33:18,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=101637.33333333333, ans=0.0 +2024-08-14 01:33:58,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101690.66666666667, ans=0.1 +2024-08-14 01:34:29,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=16.91 vs. limit=15.0 +2024-08-14 01:34:33,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=101797.33333333333, ans=0.5 +2024-08-14 01:34:33,328 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=26.82 vs. limit=22.5 +2024-08-14 01:34:46,263 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.93 vs. limit=15.0 +2024-08-14 01:34:59,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=101850.66666666667, ans=0.2 +2024-08-14 01:35:02,107 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.23 vs. limit=22.5 +2024-08-14 01:35:08,700 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 3, batch 100, loss[loss=0.2793, simple_loss=0.2658, pruned_loss=0.1428, over 19231.00 frames. ], tot_loss[loss=0.2872, simple_loss=0.2737, pruned_loss=0.1451, over 1473938.15 frames. ], batch size: 144, lr: 8.58e-05, grad_scale: 16.0 +2024-08-14 01:35:27,432 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.83 vs. limit=22.5 +2024-08-14 01:35:53,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=102010.66666666667, ans=0.0 +2024-08-14 01:36:03,991 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-3.pt +2024-08-14 01:36:11,345 INFO [dysarthria_finetune.py:1435] (0/4) (858456064, 34072559616) +2024-08-14 01:36:11,346 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 01:36:11,389 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 01:36:20,154 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 4, batch 0, loss[loss=0.2718, simple_loss=0.2572, pruned_loss=0.144, over 18523.00 frames. ], tot_loss[loss=0.2718, simple_loss=0.2572, pruned_loss=0.144, over 18523.00 frames. ], batch size: 65, lr: 8.86e-05, grad_scale: 32.0 +2024-08-14 01:36:20,154 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 01:36:43,053 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 4, validation: loss=0.2499, simple_loss=0.241, pruned_loss=0.1173, over 1073944.00 frames. +2024-08-14 01:36:43,054 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 01:36:57,294 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.85 vs. limit=6.0 +2024-08-14 01:37:14,066 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.440e+02 2.841e+02 3.076e+02 3.396e+02 5.357e+02, threshold=6.153e+02, percent-clipped=0.0 +2024-08-14 01:37:15,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=102106.66666666667, ans=0.1 +2024-08-14 01:37:21,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=102106.66666666667, ans=0.125 +2024-08-14 01:37:34,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=102160.0, ans=6.0 +2024-08-14 01:37:37,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=102160.0, ans=0.125 +2024-08-14 01:37:49,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=102213.33333333333, ans=0.04949747468305833 +2024-08-14 01:37:57,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=102213.33333333333, ans=0.2 +2024-08-14 01:38:22,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=102320.0, ans=0.2 +2024-08-14 01:38:23,458 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 4, batch 50, loss[loss=0.3128, simple_loss=0.2964, pruned_loss=0.1644, over 18961.00 frames. ], tot_loss[loss=0.2733, simple_loss=0.2618, pruned_loss=0.1348, over 828586.64 frames. ], batch size: 102, lr: 9.36e-05, grad_scale: 32.0 +2024-08-14 01:38:30,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=102320.0, ans=0.0 +2024-08-14 01:38:44,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=102373.33333333333, ans=0.125 +2024-08-14 01:39:17,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=102426.66666666667, ans=0.2 +2024-08-14 01:39:21,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=102480.0, ans=0.0 +2024-08-14 01:39:25,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=102480.0, ans=0.0 +2024-08-14 01:39:39,295 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 01:39:54,665 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.20 vs. limit=15.0 +2024-08-14 01:40:00,898 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 4, batch 100, loss[loss=0.2781, simple_loss=0.2654, pruned_loss=0.1412, over 19286.00 frames. ], tot_loss[loss=0.2672, simple_loss=0.2562, pruned_loss=0.1315, over 1474147.24 frames. ], batch size: 144, lr: 9.86e-05, grad_scale: 32.0 +2024-08-14 01:40:30,560 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.147e+02 2.524e+02 2.719e+02 2.975e+02 4.617e+02, threshold=5.438e+02, percent-clipped=0.0 +2024-08-14 01:40:43,539 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.28 vs. limit=6.0 +2024-08-14 01:40:50,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=102693.33333333333, ans=0.0 +2024-08-14 01:40:55,799 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-4.pt +2024-08-14 01:41:02,216 INFO [dysarthria_finetune.py:1435] (0/4) (858456064, 34072559616) +2024-08-14 01:41:02,216 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 01:41:02,245 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 01:41:11,068 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 5, batch 0, loss[loss=0.2267, simple_loss=0.2196, pruned_loss=0.1067, over 18549.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.2196, pruned_loss=0.1067, over 18549.00 frames. ], batch size: 65, lr: 1.00e-04, grad_scale: 32.0 +2024-08-14 01:41:11,069 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 01:41:34,534 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 5, validation: loss=0.2343, simple_loss=0.2283, pruned_loss=0.1066, over 1073944.00 frames. +2024-08-14 01:41:34,534 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 01:42:12,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=102794.66666666667, ans=0.125 +2024-08-14 01:42:18,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=102794.66666666667, ans=0.0 +2024-08-14 01:42:28,401 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=12.34 vs. limit=15.0 +2024-08-14 01:42:57,680 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.60 vs. limit=22.5 +2024-08-14 01:43:29,161 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 5, batch 50, loss[loss=0.2442, simple_loss=0.2374, pruned_loss=0.1139, over 19008.00 frames. ], tot_loss[loss=0.256, simple_loss=0.2474, pruned_loss=0.1226, over 828355.03 frames. ], batch size: 102, lr: 1.00e-04, grad_scale: 32.0 +2024-08-14 01:43:32,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=103008.0, ans=0.125 +2024-08-14 01:44:14,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=103061.33333333333, ans=0.0 +2024-08-14 01:44:14,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=103061.33333333333, ans=15.0 +2024-08-14 01:44:20,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=103061.33333333333, ans=0.0 +2024-08-14 01:44:28,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=103114.66666666667, ans=0.125 +2024-08-14 01:44:43,822 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.73 vs. limit=6.0 +2024-08-14 01:44:54,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=103168.0, ans=0.125 +2024-08-14 01:44:59,446 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.063e+02 2.398e+02 2.550e+02 2.967e+02 4.732e+02, threshold=5.099e+02, percent-clipped=0.0 +2024-08-14 01:45:00,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103168.0, ans=0.1 +2024-08-14 01:45:14,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=103221.33333333333, ans=0.125 +2024-08-14 01:45:27,207 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 5, batch 100, loss[loss=0.2437, simple_loss=0.2337, pruned_loss=0.1217, over 19287.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.2424, pruned_loss=0.1193, over 1473652.43 frames. ], batch size: 144, lr: 1.00e-04, grad_scale: 32.0 +2024-08-14 01:45:32,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103274.66666666667, ans=0.1 +2024-08-14 01:45:34,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=103274.66666666667, ans=0.125 +2024-08-14 01:45:39,940 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=10.20 vs. limit=12.0 +2024-08-14 01:46:02,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=103328.0, ans=0.2 +2024-08-14 01:46:18,154 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-5.pt +2024-08-14 01:46:22,554 INFO [dysarthria_finetune.py:1435] (0/4) (858456064, 34072559616) +2024-08-14 01:46:22,554 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 01:46:22,581 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 01:46:32,108 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 6, batch 0, loss[loss=0.2526, simple_loss=0.247, pruned_loss=0.1163, over 18610.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.247, pruned_loss=0.1163, over 18610.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:46:32,109 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 01:46:55,695 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 6, validation: loss=0.2214, simple_loss=0.2182, pruned_loss=0.09842, over 1073944.00 frames. +2024-08-14 01:46:55,696 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 01:47:15,971 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.57 vs. limit=15.0 +2024-08-14 01:47:47,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=103477.33333333333, ans=0.07 +2024-08-14 01:48:17,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=103584.0, ans=0.0 +2024-08-14 01:48:20,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=103584.0, ans=0.125 +2024-08-14 01:48:22,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103584.0, ans=0.1 +2024-08-14 01:48:25,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=103584.0, ans=0.125 +2024-08-14 01:48:40,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=103584.0, ans=0.125 +2024-08-14 01:48:48,427 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.49 vs. limit=6.0 +2024-08-14 01:48:55,622 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=9.02 vs. limit=12.0 +2024-08-14 01:49:09,886 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 6, batch 50, loss[loss=0.2322, simple_loss=0.2277, pruned_loss=0.1068, over 19047.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.2385, pruned_loss=0.1161, over 829577.21 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:49:16,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=103690.66666666667, ans=0.0 +2024-08-14 01:49:28,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103690.66666666667, ans=0.1 +2024-08-14 01:49:32,950 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.945e+02 2.293e+02 2.374e+02 2.625e+02 4.193e+02, threshold=4.747e+02, percent-clipped=0.0 +2024-08-14 01:49:35,328 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.22 vs. limit=15.0 +2024-08-14 01:49:42,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=103744.0, ans=0.125 +2024-08-14 01:49:56,699 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=14.45 vs. limit=15.0 +2024-08-14 01:51:17,782 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 6, batch 100, loss[loss=0.2261, simple_loss=0.2199, pruned_loss=0.1083, over 19232.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.2305, pruned_loss=0.11, over 1476247.28 frames. ], batch size: 144, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:51:52,505 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.08 vs. limit=15.0 +2024-08-14 01:51:55,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=104010.66666666667, ans=0.125 +2024-08-14 01:52:05,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104010.66666666667, ans=0.1 +2024-08-14 01:52:05,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=104010.66666666667, ans=6.0 +2024-08-14 01:52:24,235 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-6.pt +2024-08-14 01:52:28,823 INFO [dysarthria_finetune.py:1435] (0/4) (856358912, 34072559616) +2024-08-14 01:52:28,823 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 01:52:28,850 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 01:52:37,503 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 7, batch 0, loss[loss=0.236, simple_loss=0.2346, pruned_loss=0.1043, over 18570.00 frames. ], tot_loss[loss=0.236, simple_loss=0.2346, pruned_loss=0.1043, over 18570.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:52:37,504 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 01:53:01,318 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 7, validation: loss=0.2103, simple_loss=0.2098, pruned_loss=0.0916, over 1073944.00 frames. +2024-08-14 01:53:01,318 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 01:54:01,265 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.925e+02 2.137e+02 2.271e+02 2.445e+02 3.999e+02, threshold=4.542e+02, percent-clipped=0.0 +2024-08-14 01:54:04,804 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-08-14 01:54:22,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=104320.0, ans=0.125 +2024-08-14 01:54:28,186 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.66 vs. limit=15.0 +2024-08-14 01:54:40,882 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 7, batch 50, loss[loss=0.2262, simple_loss=0.2282, pruned_loss=0.09549, over 18968.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.2243, pruned_loss=0.1043, over 827907.61 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:54:54,527 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=18.07 vs. limit=22.5 +2024-08-14 01:55:00,089 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 01:55:05,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=104426.66666666667, ans=0.0 +2024-08-14 01:55:46,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.55 vs. limit=5.0 +2024-08-14 01:55:57,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=104533.33333333333, ans=0.125 +2024-08-14 01:56:18,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=104640.0, ans=0.0 +2024-08-14 01:56:18,960 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 7, batch 100, loss[loss=0.2147, simple_loss=0.2139, pruned_loss=0.09623, over 19302.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2204, pruned_loss=0.1017, over 1473040.93 frames. ], batch size: 144, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:56:22,400 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.63 vs. limit=22.5 +2024-08-14 01:56:24,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=104640.0, ans=0.125 +2024-08-14 01:56:33,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=104640.0, ans=0.125 +2024-08-14 01:57:10,868 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-7.pt +2024-08-14 01:57:16,254 INFO [dysarthria_finetune.py:1435] (0/4) (858456064, 34072559616) +2024-08-14 01:57:16,255 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 01:57:16,281 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 01:57:24,749 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 8, batch 0, loss[loss=0.2382, simple_loss=0.2347, pruned_loss=0.1114, over 18485.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.2347, pruned_loss=0.1114, over 18485.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:57:24,750 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 01:57:48,461 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 8, validation: loss=0.2004, simple_loss=0.2027, pruned_loss=0.08579, over 1073944.00 frames. +2024-08-14 01:57:48,462 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 01:57:54,667 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.806e+02 2.054e+02 2.212e+02 2.317e+02 3.796e+02, threshold=4.423e+02, percent-clipped=0.0 +2024-08-14 01:58:08,346 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=10.76 vs. limit=12.0 +2024-08-14 01:58:51,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104949.33333333333, ans=0.1 +2024-08-14 01:59:44,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=105002.66666666667, ans=0.125 +2024-08-14 01:59:59,173 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 8, batch 50, loss[loss=0.2117, simple_loss=0.2122, pruned_loss=0.09446, over 18938.00 frames. ], tot_loss[loss=0.213, simple_loss=0.213, pruned_loss=0.09554, over 828565.55 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 02:00:08,722 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.95 vs. limit=15.0 +2024-08-14 02:01:25,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=105269.33333333333, ans=0.125 +2024-08-14 02:01:36,492 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 8, batch 100, loss[loss=0.2034, simple_loss=0.2076, pruned_loss=0.08666, over 19222.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2117, pruned_loss=0.09462, over 1474444.14 frames. ], batch size: 144, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 02:01:42,320 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.822e+02 2.040e+02 2.200e+02 2.368e+02 3.520e+02, threshold=4.401e+02, percent-clipped=0.0 +2024-08-14 02:01:51,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=105322.66666666667, ans=0.125 +2024-08-14 02:01:53,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=105322.66666666667, ans=0.125 +2024-08-14 02:01:54,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=105376.0, ans=0.125 +2024-08-14 02:02:28,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=105429.33333333333, ans=0.125 +2024-08-14 02:02:29,650 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-8.pt +2024-08-14 02:02:33,930 INFO [dysarthria_finetune.py:1435] (0/4) (858456064, 34072559616) +2024-08-14 02:02:33,930 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 02:02:33,956 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 02:02:42,910 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 9, batch 0, loss[loss=0.2174, simple_loss=0.2178, pruned_loss=0.09882, over 18596.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2178, pruned_loss=0.09882, over 18596.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:02:42,910 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 02:03:19,140 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 9, validation: loss=0.1911, simple_loss=0.1962, pruned_loss=0.08053, over 1073944.00 frames. +2024-08-14 02:03:19,141 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 02:03:31,830 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.47 vs. limit=15.0 +2024-08-14 02:03:51,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=105530.66666666667, ans=0.0 +2024-08-14 02:03:57,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=105530.66666666667, ans=0.0 +2024-08-14 02:04:16,827 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=13.34 vs. limit=12.0 +2024-08-14 02:04:19,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=105584.0, ans=0.125 +2024-08-14 02:04:19,265 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=11.67 vs. limit=12.0 +2024-08-14 02:04:40,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=105637.33333333333, ans=0.0 +2024-08-14 02:06:19,734 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 9, batch 50, loss[loss=0.2134, simple_loss=0.2166, pruned_loss=0.0944, over 19065.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2102, pruned_loss=0.09185, over 828972.56 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:06:34,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=105744.0, ans=0.125 +2024-08-14 02:06:48,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=105797.33333333333, ans=0.025 +2024-08-14 02:07:13,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=105850.66666666667, ans=0.2 +2024-08-14 02:07:13,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=105850.66666666667, ans=0.125 +2024-08-14 02:07:22,053 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.86 vs. limit=22.5 +2024-08-14 02:07:22,960 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.818e+02 2.009e+02 2.115e+02 2.263e+02 3.410e+02, threshold=4.229e+02, percent-clipped=0.0 +2024-08-14 02:07:37,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=105904.0, ans=0.125 +2024-08-14 02:08:11,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105957.33333333333, ans=0.1 +2024-08-14 02:08:25,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105957.33333333333, ans=0.1 +2024-08-14 02:08:30,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=106010.66666666667, ans=0.125 +2024-08-14 02:08:32,109 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 9, batch 100, loss[loss=0.1651, simple_loss=0.1733, pruned_loss=0.06696, over 19269.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.206, pruned_loss=0.08884, over 1474236.32 frames. ], batch size: 144, lr: 9.98e-05, grad_scale: 16.0 +2024-08-14 02:08:46,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=106010.66666666667, ans=0.025 +2024-08-14 02:09:08,038 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.38 vs. limit=15.0 +2024-08-14 02:09:32,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=106117.33333333333, ans=0.125 +2024-08-14 02:09:37,399 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-9.pt +2024-08-14 02:09:44,335 INFO [dysarthria_finetune.py:1435] (0/4) (858456064, 34072559616) +2024-08-14 02:09:44,336 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 02:09:44,365 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 02:09:53,509 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 10, batch 0, loss[loss=0.1795, simple_loss=0.1871, pruned_loss=0.07487, over 18682.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.1871, pruned_loss=0.07487, over 18682.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:09:53,510 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 02:10:16,414 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 10, validation: loss=0.1833, simple_loss=0.191, pruned_loss=0.07653, over 1073944.00 frames. +2024-08-14 02:10:16,415 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 02:10:54,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=106218.66666666667, ans=0.2 +2024-08-14 02:11:19,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=106325.33333333333, ans=0.125 +2024-08-14 02:11:47,861 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.689e+02 1.913e+02 2.021e+02 2.184e+02 3.494e+02, threshold=4.042e+02, percent-clipped=0.0 +2024-08-14 02:11:55,859 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 10, batch 50, loss[loss=0.2065, simple_loss=0.2146, pruned_loss=0.08811, over 19012.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.204, pruned_loss=0.08796, over 829104.52 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:11:57,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=106432.0, ans=0.025 +2024-08-14 02:12:02,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=106432.0, ans=0.125 +2024-08-14 02:12:02,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=106432.0, ans=0.125 +2024-08-14 02:12:22,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=106485.33333333333, ans=0.05 +2024-08-14 02:12:22,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=106485.33333333333, ans=15.0 +2024-08-14 02:13:07,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=106592.0, ans=0.0 +2024-08-14 02:13:20,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=106645.33333333333, ans=0.125 +2024-08-14 02:13:33,246 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 10, batch 100, loss[loss=0.1794, simple_loss=0.19, pruned_loss=0.0738, over 19226.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2022, pruned_loss=0.08657, over 1474931.95 frames. ], batch size: 144, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:14:07,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=106752.0, ans=0.05 +2024-08-14 02:14:11,322 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.95 vs. limit=6.0 +2024-08-14 02:14:16,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=106805.33333333333, ans=0.125 +2024-08-14 02:14:22,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.whiten.whitening_limit, batch_count=106805.33333333333, ans=12.0 +2024-08-14 02:14:26,602 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-10.pt +2024-08-14 02:14:30,910 INFO [dysarthria_finetune.py:1435] (0/4) (858456064, 34072559616) +2024-08-14 02:14:30,910 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 02:14:30,937 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 02:14:39,663 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 11, batch 0, loss[loss=0.1989, simple_loss=0.2076, pruned_loss=0.08558, over 18505.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2076, pruned_loss=0.08558, over 18505.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-08-14 02:14:39,664 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 02:15:02,461 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 11, validation: loss=0.1768, simple_loss=0.1869, pruned_loss=0.07357, over 1073944.00 frames. +2024-08-14 02:15:02,461 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 02:15:33,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=106906.66666666667, ans=0.0 +2024-08-14 02:15:35,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=106906.66666666667, ans=0.0 +2024-08-14 02:15:35,905 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.665e+02 1.865e+02 1.931e+02 2.118e+02 3.052e+02, threshold=3.863e+02, percent-clipped=0.0 +2024-08-14 02:15:58,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=106960.0, ans=0.09899494936611666 +2024-08-14 02:15:59,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=106960.0, ans=0.125 +2024-08-14 02:16:28,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=107066.66666666667, ans=0.2 +2024-08-14 02:16:30,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=107066.66666666667, ans=0.125 +2024-08-14 02:16:32,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=107066.66666666667, ans=0.07 +2024-08-14 02:16:45,030 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 11, batch 50, loss[loss=0.1831, simple_loss=0.1916, pruned_loss=0.07922, over 19023.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.1983, pruned_loss=0.08405, over 827570.26 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 16.0 +2024-08-14 02:16:52,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=107120.0, ans=0.125 +2024-08-14 02:17:13,652 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 02:17:21,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=107173.33333333333, ans=0.0 +2024-08-14 02:18:19,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=107333.33333333333, ans=0.05 +2024-08-14 02:18:21,864 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 02:18:50,454 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 11, batch 100, loss[loss=0.1647, simple_loss=0.1749, pruned_loss=0.06974, over 19237.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.1961, pruned_loss=0.08256, over 1473115.37 frames. ], batch size: 144, lr: 9.97e-05, grad_scale: 16.0 +2024-08-14 02:19:24,168 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.22 vs. limit=6.0 +2024-08-14 02:19:36,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=107440.0, ans=0.0 +2024-08-14 02:19:36,870 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.589e+02 1.754e+02 1.842e+02 1.998e+02 3.456e+02, threshold=3.684e+02, percent-clipped=0.0 +2024-08-14 02:19:38,568 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.23 vs. limit=10.0 +2024-08-14 02:19:59,996 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-11.pt +2024-08-14 02:20:04,361 INFO [dysarthria_finetune.py:1435] (0/4) (858456064, 34072559616) +2024-08-14 02:20:04,361 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 02:20:04,388 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 02:20:13,224 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 12, batch 0, loss[loss=0.185, simple_loss=0.1932, pruned_loss=0.08157, over 18585.00 frames. ], tot_loss[loss=0.185, simple_loss=0.1932, pruned_loss=0.08157, over 18585.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-08-14 02:20:13,225 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 02:20:15,420 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([6.3439, 5.7683, 5.6063, 6.2187], device='cuda:0') +2024-08-14 02:20:42,008 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 12, validation: loss=0.1712, simple_loss=0.1836, pruned_loss=0.0713, over 1073944.00 frames. +2024-08-14 02:20:42,009 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 02:21:18,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=107594.66666666667, ans=0.125 +2024-08-14 02:21:44,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=107594.66666666667, ans=0.125 +2024-08-14 02:21:57,660 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.83 vs. limit=15.0 +2024-08-14 02:22:20,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=107701.33333333333, ans=0.0 +2024-08-14 02:22:35,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107701.33333333333, ans=0.1 +2024-08-14 02:23:41,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=107754.66666666667, ans=0.0 +2024-08-14 02:24:24,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=107808.0, ans=0.125 +2024-08-14 02:24:25,102 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 12, batch 50, loss[loss=0.1659, simple_loss=0.1811, pruned_loss=0.06732, over 18986.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.1888, pruned_loss=0.07557, over 829307.75 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-08-14 02:24:36,012 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.13 vs. limit=15.0 +2024-08-14 02:25:14,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=107861.33333333333, ans=0.0 +2024-08-14 02:26:41,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=107914.66666666667, ans=0.5 +2024-08-14 02:27:09,000 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.564e+02 1.754e+02 1.846e+02 2.049e+02 2.889e+02, threshold=3.691e+02, percent-clipped=0.0 +2024-08-14 02:27:37,282 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 12, batch 100, loss[loss=0.1443, simple_loss=0.1574, pruned_loss=0.05951, over 19194.00 frames. ], tot_loss[loss=0.178, simple_loss=0.1885, pruned_loss=0.07713, over 1473409.16 frames. ], batch size: 144, lr: 9.97e-05, grad_scale: 32.0 +2024-08-14 02:27:38,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=108074.66666666667, ans=0.125 +2024-08-14 02:28:06,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=108128.0, ans=0.025 +2024-08-14 02:28:06,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108128.0, ans=0.1 +2024-08-14 02:28:45,320 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.38 vs. limit=6.0 +2024-08-14 02:28:48,209 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-12.pt +2024-08-14 02:28:52,634 INFO [dysarthria_finetune.py:1435] (0/4) (858456064, 34072559616) +2024-08-14 02:28:52,635 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 02:28:52,664 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 02:29:01,626 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 13, batch 0, loss[loss=0.2097, simple_loss=0.2152, pruned_loss=0.09757, over 18643.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2152, pruned_loss=0.09757, over 18643.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:29:01,627 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 02:29:09,015 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.9204, 6.1498, 5.9117, 6.0544], device='cuda:0') +2024-08-14 02:29:24,536 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 13, validation: loss=0.1662, simple_loss=0.1808, pruned_loss=0.06949, over 1073944.00 frames. +2024-08-14 02:29:24,536 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 02:29:30,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=108229.33333333333, ans=0.125 +2024-08-14 02:29:38,960 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=17.16 vs. limit=15.0 +2024-08-14 02:29:51,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=108282.66666666667, ans=0.125 +2024-08-14 02:30:29,819 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.29 vs. limit=22.5 +2024-08-14 02:30:48,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=108442.66666666667, ans=0.125 +2024-08-14 02:31:06,615 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 13, batch 50, loss[loss=0.1534, simple_loss=0.1716, pruned_loss=0.06137, over 19011.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.1882, pruned_loss=0.07679, over 829773.70 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:31:10,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=108496.0, ans=0.07 +2024-08-14 02:31:16,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=108496.0, ans=0.07 +2024-08-14 02:31:20,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=108496.0, ans=0.125 +2024-08-14 02:31:24,776 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=14.57 vs. limit=15.0 +2024-08-14 02:31:27,170 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.593e+02 1.723e+02 1.826e+02 1.962e+02 2.693e+02, threshold=3.652e+02, percent-clipped=0.0 +2024-08-14 02:31:48,565 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 02:32:08,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108656.0, ans=0.0 +2024-08-14 02:32:09,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108656.0, ans=0.1 +2024-08-14 02:32:09,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=108656.0, ans=0.125 +2024-08-14 02:32:17,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108656.0, ans=0.1 +2024-08-14 02:32:25,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=108709.33333333333, ans=0.0 +2024-08-14 02:32:30,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=108709.33333333333, ans=0.125 +2024-08-14 02:32:45,027 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 13, batch 100, loss[loss=0.1293, simple_loss=0.146, pruned_loss=0.05157, over 19225.00 frames. ], tot_loss[loss=0.175, simple_loss=0.1875, pruned_loss=0.07626, over 1474982.28 frames. ], batch size: 144, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:32:46,440 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.55 vs. limit=10.0 +2024-08-14 02:32:55,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=108762.66666666667, ans=0.2 +2024-08-14 02:32:57,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=108762.66666666667, ans=0.05 +2024-08-14 02:33:15,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=108816.0, ans=0.0 +2024-08-14 02:33:17,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=108816.0, ans=0.0 +2024-08-14 02:33:17,291 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.06 vs. limit=22.5 +2024-08-14 02:33:24,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=108869.33333333333, ans=0.2 +2024-08-14 02:33:38,653 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-13.pt +2024-08-14 02:33:43,014 INFO [dysarthria_finetune.py:1435] (0/4) (856358912, 34072559616) +2024-08-14 02:33:43,014 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 02:33:43,042 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 02:33:51,753 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 14, batch 0, loss[loss=0.186, simple_loss=0.2056, pruned_loss=0.07781, over 18695.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2056, pruned_loss=0.07781, over 18695.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:33:51,754 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 02:34:15,235 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 14, validation: loss=0.1615, simple_loss=0.1782, pruned_loss=0.06778, over 1073944.00 frames. +2024-08-14 02:34:15,235 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 02:34:28,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=108917.33333333333, ans=0.125 +2024-08-14 02:35:16,114 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.537e+02 1.678e+02 1.779e+02 1.987e+02 2.879e+02, threshold=3.559e+02, percent-clipped=0.0 +2024-08-14 02:35:23,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109077.33333333333, ans=0.1 +2024-08-14 02:35:42,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=109130.66666666667, ans=0.0 +2024-08-14 02:35:48,100 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 02:35:52,353 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.28 vs. limit=6.0 +2024-08-14 02:35:52,703 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 14, batch 50, loss[loss=0.1508, simple_loss=0.1741, pruned_loss=0.05901, over 18964.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.1828, pruned_loss=0.07314, over 828263.79 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:35:53,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=109184.0, ans=0.0 +2024-08-14 02:36:07,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=109184.0, ans=0.125 +2024-08-14 02:36:26,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109237.33333333333, ans=0.1 +2024-08-14 02:37:28,723 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 14, batch 100, loss[loss=0.1549, simple_loss=0.1676, pruned_loss=0.06858, over 19207.00 frames. ], tot_loss[loss=0.167, simple_loss=0.1824, pruned_loss=0.07218, over 1474261.28 frames. ], batch size: 144, lr: 9.96e-05, grad_scale: 16.0 +2024-08-14 02:37:43,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=109450.66666666667, ans=0.025 +2024-08-14 02:37:50,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109504.0, ans=0.1 +2024-08-14 02:38:02,446 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.61 vs. limit=15.0 +2024-08-14 02:38:03,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=109504.0, ans=0.125 +2024-08-14 02:38:21,631 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-14.pt +2024-08-14 02:38:26,154 INFO [dysarthria_finetune.py:1435] (0/4) (858456064, 34072559616) +2024-08-14 02:38:26,154 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 02:38:26,182 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 02:38:34,953 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 15, batch 0, loss[loss=0.2081, simple_loss=0.2223, pruned_loss=0.09439, over 18509.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2223, pruned_loss=0.09439, over 18509.00 frames. ], batch size: 65, lr: 9.95e-05, grad_scale: 32.0 +2024-08-14 02:38:34,954 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 02:38:57,684 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 15, validation: loss=0.1571, simple_loss=0.176, pruned_loss=0.06629, over 1073944.00 frames. +2024-08-14 02:38:57,684 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 02:38:59,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=109605.33333333333, ans=0.2 +2024-08-14 02:39:03,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=109605.33333333333, ans=0.0 +2024-08-14 02:39:07,270 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.490e+02 1.642e+02 1.752e+02 1.914e+02 2.610e+02, threshold=3.503e+02, percent-clipped=0.0 +2024-08-14 02:39:16,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109605.33333333333, ans=0.1 +2024-08-14 02:39:39,007 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=9.54 vs. limit=12.0 +2024-08-14 02:40:04,604 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.78 vs. limit=22.5 +2024-08-14 02:40:57,612 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 15, batch 50, loss[loss=0.1567, simple_loss=0.1791, pruned_loss=0.06481, over 19011.00 frames. ], tot_loss[loss=0.164, simple_loss=0.18, pruned_loss=0.07181, over 827942.50 frames. ], batch size: 102, lr: 9.95e-05, grad_scale: 32.0 +2024-08-14 02:42:51,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=110032.0, ans=0.125 +2024-08-14 02:43:05,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=110085.33333333333, ans=0.0 +2024-08-14 02:43:17,713 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 15, batch 100, loss[loss=0.1361, simple_loss=0.1549, pruned_loss=0.05733, over 19251.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.179, pruned_loss=0.07093, over 1473903.80 frames. ], batch size: 144, lr: 9.95e-05, grad_scale: 32.0 +2024-08-14 02:43:19,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=110138.66666666667, ans=0.2 +2024-08-14 02:43:23,631 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.493e+02 1.639e+02 1.741e+02 1.916e+02 2.571e+02, threshold=3.482e+02, percent-clipped=0.0 +2024-08-14 02:44:29,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=110245.33333333333, ans=0.125 +2024-08-14 02:44:39,964 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-15.pt +2024-08-14 02:44:45,329 INFO [dysarthria_finetune.py:1435] (0/4) (858456064, 34072559616) +2024-08-14 02:44:45,329 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 02:44:45,355 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 02:44:54,086 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 16, batch 0, loss[loss=0.1827, simple_loss=0.2028, pruned_loss=0.08034, over 18729.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2028, pruned_loss=0.08034, over 18729.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:44:54,087 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 02:45:16,882 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 16, validation: loss=0.1529, simple_loss=0.1739, pruned_loss=0.06493, over 1073944.00 frames. +2024-08-14 02:45:16,883 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 02:45:32,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110293.33333333333, ans=0.1 +2024-08-14 02:46:13,518 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=10.67 vs. limit=12.0 +2024-08-14 02:46:34,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=110506.66666666667, ans=0.025 +2024-08-14 02:47:33,730 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 16, batch 50, loss[loss=0.1377, simple_loss=0.1638, pruned_loss=0.05546, over 18988.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.1787, pruned_loss=0.06963, over 828175.61 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:48:14,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=110666.66666666667, ans=0.0 +2024-08-14 02:48:18,596 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.72 vs. limit=15.0 +2024-08-14 02:48:22,829 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.464e+02 1.614e+02 1.779e+02 1.933e+02 2.621e+02, threshold=3.558e+02, percent-clipped=0.0 +2024-08-14 02:49:22,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=110773.33333333333, ans=0.0 +2024-08-14 02:49:33,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=110826.66666666667, ans=0.125 +2024-08-14 02:49:34,517 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 16, batch 100, loss[loss=0.1376, simple_loss=0.1575, pruned_loss=0.05883, over 19270.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.1772, pruned_loss=0.0688, over 1473314.28 frames. ], batch size: 144, lr: 9.94e-05, grad_scale: 16.0 +2024-08-14 02:50:07,771 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.50 vs. limit=10.0 +2024-08-14 02:50:17,105 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=9.14 vs. limit=12.0 +2024-08-14 02:50:28,477 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.15 vs. limit=6.0 +2024-08-14 02:50:28,874 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-16.pt +2024-08-14 02:50:39,327 INFO [dysarthria_finetune.py:1435] (0/4) (858456064, 34072559616) +2024-08-14 02:50:39,327 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 02:50:39,354 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 02:50:47,961 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 17, batch 0, loss[loss=0.1732, simple_loss=0.1852, pruned_loss=0.08058, over 18739.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.1852, pruned_loss=0.08058, over 18739.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:50:47,962 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 02:50:56,791 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.2244, 4.9887, 5.1861, 5.1181], device='cuda:0') +2024-08-14 02:51:11,069 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 17, validation: loss=0.1498, simple_loss=0.1721, pruned_loss=0.06377, over 1073944.00 frames. +2024-08-14 02:51:11,070 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 02:51:31,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111034.66666666667, ans=0.125 +2024-08-14 02:51:40,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=111034.66666666667, ans=0.2 +2024-08-14 02:51:42,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=111034.66666666667, ans=0.2 +2024-08-14 02:51:42,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=111034.66666666667, ans=0.0 +2024-08-14 02:52:59,544 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.443e+02 1.599e+02 1.701e+02 1.889e+02 2.501e+02, threshold=3.403e+02, percent-clipped=0.0 +2024-08-14 02:53:00,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=111194.66666666667, ans=0.2 +2024-08-14 02:53:07,421 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 17, batch 50, loss[loss=0.1463, simple_loss=0.177, pruned_loss=0.0578, over 19028.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.1754, pruned_loss=0.06896, over 827378.67 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:53:37,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=111301.33333333333, ans=0.0 +2024-08-14 02:53:38,292 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.34 vs. limit=15.0 +2024-08-14 02:53:49,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=111354.66666666667, ans=0.125 +2024-08-14 02:53:49,616 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=6.33 vs. limit=12.0 +2024-08-14 02:54:06,548 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 02:54:14,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=111408.0, ans=0.04949747468305833 +2024-08-14 02:54:15,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=111408.0, ans=0.125 +2024-08-14 02:54:43,505 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.27 vs. limit=15.0 +2024-08-14 02:55:04,515 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 17, batch 100, loss[loss=0.1196, simple_loss=0.1429, pruned_loss=0.04812, over 19218.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.1762, pruned_loss=0.06882, over 1473529.96 frames. ], batch size: 144, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:55:45,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=111621.33333333333, ans=0.125 +2024-08-14 02:56:14,222 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-17.pt +2024-08-14 02:56:19,092 INFO [dysarthria_finetune.py:1435] (0/4) (856358912, 34072559616) +2024-08-14 02:56:19,092 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 02:56:19,118 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 02:56:27,749 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 18, batch 0, loss[loss=0.1748, simple_loss=0.1932, pruned_loss=0.07825, over 18538.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.1932, pruned_loss=0.07825, over 18538.00 frames. ], batch size: 65, lr: 9.93e-05, grad_scale: 32.0 +2024-08-14 02:56:27,750 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 02:56:58,388 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 18, validation: loss=0.1479, simple_loss=0.1705, pruned_loss=0.06271, over 1073944.00 frames. +2024-08-14 02:56:58,389 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 02:57:05,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=111669.33333333333, ans=0.07 +2024-08-14 02:57:44,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=111669.33333333333, ans=0.05 +2024-08-14 02:58:03,299 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.430e+02 1.609e+02 1.680e+02 1.858e+02 2.812e+02, threshold=3.359e+02, percent-clipped=0.0 +2024-08-14 02:58:35,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=111776.0, ans=0.125 +2024-08-14 02:59:54,697 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 18, batch 50, loss[loss=0.1382, simple_loss=0.1639, pruned_loss=0.0563, over 18998.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.1739, pruned_loss=0.06641, over 828205.61 frames. ], batch size: 102, lr: 9.93e-05, grad_scale: 32.0 +2024-08-14 03:01:13,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=111989.33333333333, ans=0.125 +2024-08-14 03:01:57,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=112042.66666666667, ans=10.0 +2024-08-14 03:03:17,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=112149.33333333333, ans=0.125 +2024-08-14 03:03:56,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112149.33333333333, ans=0.1 +2024-08-14 03:04:01,394 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.10 vs. limit=6.0 +2024-08-14 03:04:53,085 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 18, batch 100, loss[loss=0.1273, simple_loss=0.1522, pruned_loss=0.05118, over 19294.00 frames. ], tot_loss[loss=0.1519, simple_loss=0.1732, pruned_loss=0.06535, over 1473690.24 frames. ], batch size: 144, lr: 9.93e-05, grad_scale: 32.0 +2024-08-14 03:05:52,848 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.418e+02 1.561e+02 1.643e+02 1.812e+02 2.261e+02, threshold=3.287e+02, percent-clipped=0.0 +2024-08-14 03:06:41,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=112256.0, ans=0.125 +2024-08-14 03:07:01,005 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.95 vs. limit=6.0 +2024-08-14 03:07:12,805 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-18.pt +2024-08-14 03:07:17,223 INFO [dysarthria_finetune.py:1435] (0/4) (856358912, 34072559616) +2024-08-14 03:07:17,223 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 03:07:17,251 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 03:07:26,410 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 19, batch 0, loss[loss=0.168, simple_loss=0.1928, pruned_loss=0.07154, over 18598.00 frames. ], tot_loss[loss=0.168, simple_loss=0.1928, pruned_loss=0.07154, over 18598.00 frames. ], batch size: 65, lr: 9.92e-05, grad_scale: 32.0 +2024-08-14 03:07:26,411 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 03:07:58,726 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 19, validation: loss=0.1464, simple_loss=0.169, pruned_loss=0.06188, over 1073944.00 frames. +2024-08-14 03:07:58,727 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 03:08:43,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=112405.33333333333, ans=0.125 +2024-08-14 03:09:18,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=112458.66666666667, ans=0.0 +2024-08-14 03:10:15,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=112512.0, ans=0.0 +2024-08-14 03:10:39,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=112565.33333333333, ans=0.125 +2024-08-14 03:10:40,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=112565.33333333333, ans=0.2 +2024-08-14 03:10:40,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=112565.33333333333, ans=0.025 +2024-08-14 03:10:57,788 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 19, batch 50, loss[loss=0.1412, simple_loss=0.1633, pruned_loss=0.05958, over 19038.00 frames. ], tot_loss[loss=0.1476, simple_loss=0.169, pruned_loss=0.06306, over 827203.46 frames. ], batch size: 102, lr: 9.92e-05, grad_scale: 32.0 +2024-08-14 03:11:00,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=112618.66666666667, ans=0.125 +2024-08-14 03:11:02,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=112618.66666666667, ans=0.125 +2024-08-14 03:11:55,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=112672.0, ans=0.125 +2024-08-14 03:11:57,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=112672.0, ans=0.125 +2024-08-14 03:12:19,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=112672.0, ans=0.125 +2024-08-14 03:12:23,987 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.60 vs. limit=15.0 +2024-08-14 03:12:35,221 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.78 vs. limit=22.5 +2024-08-14 03:12:51,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=112778.66666666667, ans=0.2 +2024-08-14 03:13:01,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=112778.66666666667, ans=0.2 +2024-08-14 03:13:05,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=112778.66666666667, ans=0.0 +2024-08-14 03:13:37,507 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 1.593e+02 1.694e+02 1.909e+02 3.031e+02, threshold=3.389e+02, percent-clipped=0.0 +2024-08-14 03:13:45,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=112832.0, ans=0.1 +2024-08-14 03:14:02,406 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 19, batch 100, loss[loss=0.1316, simple_loss=0.1542, pruned_loss=0.05448, over 19274.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.169, pruned_loss=0.06375, over 1472434.33 frames. ], batch size: 144, lr: 9.92e-05, grad_scale: 16.0 +2024-08-14 03:14:05,521 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 03:15:24,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=112938.66666666667, ans=0.125 +2024-08-14 03:15:42,704 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.01 vs. limit=15.0 +2024-08-14 03:15:54,673 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-19.pt +2024-08-14 03:16:00,521 INFO [dysarthria_finetune.py:1435] (0/4) (858456064, 34072559616) +2024-08-14 03:16:00,522 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 03:16:00,550 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 03:16:41,825 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 20, batch 0, loss[loss=0.1428, simple_loss=0.1684, pruned_loss=0.05857, over 18599.00 frames. ], tot_loss[loss=0.1428, simple_loss=0.1684, pruned_loss=0.05857, over 18599.00 frames. ], batch size: 65, lr: 9.91e-05, grad_scale: 32.0 +2024-08-14 03:16:41,826 INFO [dysarthria_finetune.py:1165] (0/4) Computing validation loss on speech +2024-08-14 03:17:15,216 INFO [dysarthria_finetune.py:1174] (0/4) Validation on speech: Epoch 20, validation: loss=0.1449, simple_loss=0.1677, pruned_loss=0.0611, over 1073944.00 frames. +2024-08-14 03:17:15,217 INFO [dysarthria_finetune.py:1177] (0/4) Maximum memory allocated so far is 26678MB +2024-08-14 03:17:30,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=113040.0, ans=0.125 +2024-08-14 03:17:42,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=113040.0, ans=0.125 +2024-08-14 03:17:50,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=113093.33333333333, ans=0.2 +2024-08-14 03:19:34,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=113200.0, ans=0.09899494936611666 +2024-08-14 03:20:51,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=113253.33333333333, ans=0.2 +2024-08-14 03:21:08,396 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 20, batch 50, loss[loss=0.1408, simple_loss=0.1682, pruned_loss=0.05677, over 18985.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.1695, pruned_loss=0.0642, over 828130.18 frames. ], batch size: 102, lr: 9.91e-05, grad_scale: 32.0 +2024-08-14 03:21:49,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=113306.66666666667, ans=0.0 +2024-08-14 03:21:59,150 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=9.27 vs. limit=12.0 +2024-08-14 03:22:10,070 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.419e+02 1.567e+02 1.664e+02 1.868e+02 2.522e+02, threshold=3.327e+02, percent-clipped=0.0 +2024-08-14 03:23:21,307 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.31 vs. limit=6.0 +2024-08-14 03:24:28,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=113520.0, ans=0.07 +2024-08-14 03:24:58,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=113573.33333333333, ans=0.125 +2024-08-14 03:24:59,420 INFO [dysarthria_finetune.py:1141] (0/4) Epoch 20, batch 100, loss[loss=0.1312, simple_loss=0.1537, pruned_loss=0.05428, over 19321.00 frames. ], tot_loss[loss=0.1479, simple_loss=0.1688, pruned_loss=0.06343, over 1472900.97 frames. ], batch size: 144, lr: 9.91e-05, grad_scale: 32.0 +2024-08-14 03:25:17,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=113573.33333333333, ans=0.125 +2024-08-14 03:25:17,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=113573.33333333333, ans=0.125 +2024-08-14 03:25:23,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=113573.33333333333, ans=0.0 +2024-08-14 03:26:28,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=113626.66666666667, ans=0.125 +2024-08-14 03:26:45,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=113626.66666666667, ans=0.125 +2024-08-14 03:27:20,911 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune/epoch-20.pt +2024-08-14 03:27:42,250 INFO [dysarthria_finetune.py:1435] (0/4) (858456064, 34072559616) +2024-08-14 03:27:42,250 INFO [dysarthria_finetune.py:1436] (0/4) Empty cache: before and after +2024-08-14 03:27:42,277 INFO [dysarthria_finetune.py:1440] (0/4) (29306322944, 34072559616) +2024-08-14 03:27:42,277 INFO [dysarthria_finetune.py:1442] (0/4) Done! diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-23-24-47-1 b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-23-24-47-1 new file mode 100644 index 0000000000000000000000000000000000000000..ff59f956ef2c6a32fb32c90b9f3ad09028338d81 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-23-24-47-1 @@ -0,0 +1,533 @@ +2024-08-13 23:24:47,920 INFO [dysarthria_finetune.py:1212] (1/4) Training started +2024-08-13 23:24:47,960 INFO [dysarthria_finetune.py:1214] (1/4) (33735507968, 34072559616) +2024-08-13 23:24:47,960 INFO [dysarthria_finetune.py:1215] (1/4) Empty cache: before and after +2024-08-13 23:24:48,946 INFO [dysarthria_finetune.py:1219] (1/4) (32783400960, 34072559616) +2024-08-13 23:24:48,947 INFO [dysarthria_finetune.py:1229] (1/4) Device: cuda:1 +2024-08-13 23:24:48,990 INFO [dysarthria_finetune.py:1241] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2649.int.cedar.computecanada.ca', 'IP address': '172.16.146.86'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-13 23:24:48,991 INFO [dysarthria_finetune.py:1243] (1/4) About to create model +2024-08-13 23:24:50,010 INFO [dysarthria_finetune.py:1247] (1/4) Number of model parameters: 65549011 +2024-08-13 23:24:50,011 INFO [dysarthria_finetune.py:769] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt +2024-08-13 23:24:59,882 INFO [dysarthria_finetune.py:1275] (1/4) Using DDP +2024-08-13 23:25:17,971 INFO [dysarthria_asr_datamodule.py:494] (1/4) About to get train cuts +2024-08-13 23:25:18,291 INFO [dysarthria_finetune.py:1319] (1/4) CutSet(len=62255) [underlying data type: ] +2024-08-13 23:25:18,622 INFO [dysarthria_asr_datamodule.py:239] (1/4) Disable MUSAN +2024-08-13 23:25:18,622 INFO [dysarthria_asr_datamodule.py:257] (1/4) Enable SpecAugment +2024-08-13 23:25:18,622 INFO [dysarthria_asr_datamodule.py:258] (1/4) Time warp factor: 80 +2024-08-13 23:25:18,622 INFO [dysarthria_asr_datamodule.py:268] (1/4) Num frame mask: 10 +2024-08-13 23:25:18,622 INFO [dysarthria_asr_datamodule.py:281] (1/4) About to create train dataset +2024-08-13 23:25:19,281 INFO [dysarthria_asr_datamodule.py:308] (1/4) Using DynamicBucketingSampler. +2024-08-13 23:25:20,229 INFO [dysarthria_asr_datamodule.py:325] (1/4) About to create train dataloader +2024-08-13 23:25:20,234 INFO [dysarthria_asr_datamodule.py:500] (1/4) About to get dev cuts +2024-08-13 23:25:24,698 INFO [dysarthria_asr_datamodule.py:356] (1/4) About to create dev dataset +2024-08-13 23:25:28,026 INFO [dysarthria_asr_datamodule.py:373] (1/4) About to create dev dataloader +2024-08-13 23:25:28,026 INFO [dysarthria_finetune.py:1490] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-13 23:27:16,793 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=512, metric=19.93 vs. limit=7.5 +2024-08-13 23:27:17,109 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=192, metric=16.77 vs. limit=7.5 +2024-08-13 23:27:17,919 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11776MB +2024-08-13 23:27:19,736 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11776MB +2024-08-13 23:32:34,802 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11776MB +2024-08-13 23:32:36,803 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11776MB +2024-08-13 23:35:38,399 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11776MB +2024-08-13 23:35:41,667 INFO [dysarthria_finetune.py:1518] (1/4) Maximum memory allocated so far is 11776MB +2024-08-13 23:36:58,726 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 0, loss[loss=0.343, simple_loss=0.3241, pruned_loss=0.1887, over 18549.00 frames. ], tot_loss[loss=0.343, simple_loss=0.3241, pruned_loss=0.1887, over 18549.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-13 23:36:58,726 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-13 23:49:47,041 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 1, validation: loss=0.3215, simple_loss=0.3039, pruned_loss=0.1764, over 1073944.00 frames. +2024-08-13 23:49:47,367 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13098MB +2024-08-13 23:51:42,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100000.0, ans=0.125 +2024-08-13 23:53:30,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=100000.0, ans=0.125 +2024-08-14 00:00:01,105 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.62 vs. limit=15.0 +2024-08-14 00:15:49,520 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.298e+02 1.050e+03 1.114e+03 1.201e+03 1.245e+03, threshold=4.457e+03, percent-clipped=0.0 +2024-08-14 00:20:36,358 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.79 vs. limit=15.0 +2024-08-14 00:22:16,299 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.96 vs. limit=10.0 +2024-08-14 00:23:15,347 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.960e+02 9.450e+02 1.050e+03 1.152e+03 1.319e+03, threshold=4.200e+03, percent-clipped=0.0 +2024-08-14 00:27:43,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=100160.0, ans=0.0 +2024-08-14 00:29:53,824 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.241e+02 7.298e+02 9.450e+02 1.050e+03 1.319e+03, threshold=3.780e+03, percent-clipped=0.0 +2024-08-14 00:31:20,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=100213.33333333333, ans=0.125 +2024-08-14 00:37:01,644 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 50, loss[loss=0.3541, simple_loss=0.3336, pruned_loss=0.2013, over 19042.00 frames. ], tot_loss[loss=0.3545, simple_loss=0.3342, pruned_loss=0.2019, over 827432.33 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-14 00:42:06,328 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.17 vs. limit=15.0 +2024-08-14 00:42:13,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=100266.66666666667, ans=0.2 +2024-08-14 00:43:30,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=100320.0, ans=0.0 +2024-08-14 00:47:09,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=100320.0, ans=0.0 +2024-08-14 00:57:33,325 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=26.97 vs. limit=15.0 +2024-08-14 00:58:13,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=100480.0, ans=0.0 +2024-08-14 01:01:58,600 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+02 5.963e+02 7.298e+02 8.800e+02 1.319e+03, threshold=1.460e+03, percent-clipped=0.0 +2024-08-14 01:01:58,635 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 1, batch 100, loss[loss=0.3387, simple_loss=0.3196, pruned_loss=0.1871, over 19093.00 frames. ], tot_loss[loss=0.341, simple_loss=0.3218, pruned_loss=0.1899, over 1470684.91 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-08-14 01:11:48,008 INFO [dysarthria_finetune.py:1435] (1/4) (13820952576, 34072559616) +2024-08-14 01:11:48,009 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 01:11:48,058 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 01:12:35,408 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 2, batch 0, loss[loss=0.2894, simple_loss=0.2745, pruned_loss=0.1501, over 18746.00 frames. ], tot_loss[loss=0.2894, simple_loss=0.2745, pruned_loss=0.1501, over 18746.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-08-14 01:12:35,408 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 01:16:55,986 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 2, validation: loss=0.2907, simple_loss=0.276, pruned_loss=0.149, over 1073944.00 frames. +2024-08-14 01:16:55,986 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-14 01:19:23,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=100736.0, ans=0.5 +2024-08-14 01:20:02,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=100736.0, ans=0.125 +2024-08-14 01:20:05,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=100736.0, ans=0.2 +2024-08-14 01:20:57,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-08-14 01:20:57,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=100789.33333333333, ans=0.125 +2024-08-14 01:21:13,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=100789.33333333333, ans=0.2 +2024-08-14 01:21:57,272 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.74 vs. limit=15.0 +2024-08-14 01:21:59,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=100842.66666666667, ans=0.125 +2024-08-14 01:22:26,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=100896.0, ans=0.125 +2024-08-14 01:22:31,958 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 01:24:53,691 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.49 vs. limit=22.5 +2024-08-14 01:24:54,903 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 2, batch 50, loss[loss=0.345, simple_loss=0.325, pruned_loss=0.1911, over 19071.00 frames. ], tot_loss[loss=0.3192, simple_loss=0.3021, pruned_loss=0.17, over 827854.65 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 8.0 +2024-08-14 01:25:44,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=100949.33333333333, ans=0.125 +2024-08-14 01:26:11,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=101002.66666666667, ans=0.09899494936611666 +2024-08-14 01:27:07,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=101002.66666666667, ans=0.2 +2024-08-14 01:27:29,018 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.249e+02 4.347e+02 4.852e+02 5.543e+02 7.043e+02, threshold=9.703e+02, percent-clipped=0.0 +2024-08-14 01:27:52,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=101056.0, ans=0.0 +2024-08-14 01:28:04,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=101109.33333333333, ans=0.0 +2024-08-14 01:28:31,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=101109.33333333333, ans=0.125 +2024-08-14 01:28:39,701 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 01:28:52,699 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.08 vs. limit=22.5 +2024-08-14 01:28:58,129 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.47 vs. limit=15.0 +2024-08-14 01:29:06,633 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 2, batch 100, loss[loss=0.3062, simple_loss=0.2903, pruned_loss=0.1601, over 19090.00 frames. ], tot_loss[loss=0.3106, simple_loss=0.2943, pruned_loss=0.1633, over 1472213.55 frames. ], batch size: 133, lr: 7.29e-05, grad_scale: 8.0 +2024-08-14 01:29:10,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=101216.0, ans=0.0 +2024-08-14 01:29:30,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=101216.0, ans=0.125 +2024-08-14 01:30:16,030 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.39 vs. limit=10.0 +2024-08-14 01:30:20,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=101322.66666666667, ans=0.125 +2024-08-14 01:30:25,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=101322.66666666667, ans=0.2 +2024-08-14 01:30:33,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=101322.66666666667, ans=0.0 +2024-08-14 01:30:42,019 INFO [dysarthria_finetune.py:1435] (1/4) (665518080, 34072559616) +2024-08-14 01:30:42,019 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 01:30:42,089 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 01:30:49,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=101370.66666666667, ans=0.0 +2024-08-14 01:30:55,556 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 3, batch 0, loss[loss=0.3208, simple_loss=0.3029, pruned_loss=0.1732, over 18511.00 frames. ], tot_loss[loss=0.3208, simple_loss=0.3029, pruned_loss=0.1732, over 18511.00 frames. ], batch size: 65, lr: 7.58e-05, grad_scale: 16.0 +2024-08-14 01:30:55,556 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 01:31:18,577 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 3, validation: loss=0.2682, simple_loss=0.2564, pruned_loss=0.1309, over 1073944.00 frames. +2024-08-14 01:31:18,578 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-14 01:31:23,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=101370.66666666667, ans=0.0 +2024-08-14 01:32:09,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=101477.33333333333, ans=0.025 +2024-08-14 01:32:19,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=101477.33333333333, ans=0.04949747468305833 +2024-08-14 01:32:30,691 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.07 vs. limit=15.0 +2024-08-14 01:32:58,609 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.574e+02 3.350e+02 3.692e+02 4.154e+02 5.648e+02, threshold=7.384e+02, percent-clipped=0.0 +2024-08-14 01:33:12,584 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.57 vs. limit=10.0 +2024-08-14 01:33:14,921 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 3, batch 50, loss[loss=0.2928, simple_loss=0.2799, pruned_loss=0.1447, over 19005.00 frames. ], tot_loss[loss=0.2971, simple_loss=0.2825, pruned_loss=0.1522, over 828905.42 frames. ], batch size: 102, lr: 8.08e-05, grad_scale: 16.0 +2024-08-14 01:33:23,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101637.33333333333, ans=0.1 +2024-08-14 01:34:00,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=101690.66666666667, ans=0.125 +2024-08-14 01:34:25,521 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=8.78 vs. limit=12.0 +2024-08-14 01:34:38,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=101797.33333333333, ans=0.5 +2024-08-14 01:34:40,556 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=25.39 vs. limit=15.0 +2024-08-14 01:34:46,257 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.06 vs. limit=15.0 +2024-08-14 01:35:01,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=101850.66666666667, ans=0.0 +2024-08-14 01:35:08,704 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 3, batch 100, loss[loss=0.26, simple_loss=0.25, pruned_loss=0.1238, over 19133.00 frames. ], tot_loss[loss=0.2869, simple_loss=0.2733, pruned_loss=0.145, over 1474266.40 frames. ], batch size: 133, lr: 8.58e-05, grad_scale: 16.0 +2024-08-14 01:35:10,556 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.02 vs. limit=15.0 +2024-08-14 01:36:03,995 INFO [dysarthria_finetune.py:1435] (1/4) (803930112, 34072559616) +2024-08-14 01:36:04,626 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 01:36:04,696 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 01:36:20,153 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 4, batch 0, loss[loss=0.2541, simple_loss=0.2436, pruned_loss=0.1242, over 18466.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.2436, pruned_loss=0.1242, over 18466.00 frames. ], batch size: 65, lr: 8.86e-05, grad_scale: 32.0 +2024-08-14 01:36:20,154 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 01:36:43,054 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 4, validation: loss=0.2499, simple_loss=0.241, pruned_loss=0.1173, over 1073944.00 frames. +2024-08-14 01:36:43,055 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-14 01:37:14,070 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.440e+02 2.841e+02 3.076e+02 3.396e+02 5.357e+02, threshold=6.153e+02, percent-clipped=0.0 +2024-08-14 01:37:19,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=102106.66666666667, ans=0.125 +2024-08-14 01:37:23,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=102160.0, ans=0.125 +2024-08-14 01:37:33,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=102160.0, ans=0.0 +2024-08-14 01:37:36,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=102160.0, ans=0.5 +2024-08-14 01:37:39,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=102160.0, ans=0.2 +2024-08-14 01:37:57,586 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.24 vs. limit=6.0 +2024-08-14 01:38:18,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=102266.66666666667, ans=0.125 +2024-08-14 01:38:23,454 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 4, batch 50, loss[loss=0.2644, simple_loss=0.2537, pruned_loss=0.1293, over 18961.00 frames. ], tot_loss[loss=0.2724, simple_loss=0.2613, pruned_loss=0.1331, over 827373.05 frames. ], batch size: 102, lr: 9.36e-05, grad_scale: 32.0 +2024-08-14 01:38:36,626 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=21.77 vs. limit=15.0 +2024-08-14 01:38:40,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=102320.0, ans=0.125 +2024-08-14 01:39:39,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=102480.0, ans=0.0 +2024-08-14 01:39:54,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=102533.33333333333, ans=0.125 +2024-08-14 01:39:58,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=102533.33333333333, ans=10.0 +2024-08-14 01:39:58,559 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=18.12 vs. limit=15.0 +2024-08-14 01:40:00,899 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 4, batch 100, loss[loss=0.268, simple_loss=0.258, pruned_loss=0.1298, over 19038.00 frames. ], tot_loss[loss=0.2674, simple_loss=0.2566, pruned_loss=0.1309, over 1472261.06 frames. ], batch size: 133, lr: 9.86e-05, grad_scale: 32.0 +2024-08-14 01:40:18,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=102586.66666666667, ans=0.05 +2024-08-14 01:40:20,699 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.02 vs. limit=6.0 +2024-08-14 01:40:30,558 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.147e+02 2.524e+02 2.719e+02 2.975e+02 4.617e+02, threshold=5.438e+02, percent-clipped=0.0 +2024-08-14 01:40:37,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=102640.0, ans=0.125 +2024-08-14 01:40:49,187 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=8.70 vs. limit=12.0 +2024-08-14 01:40:51,041 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=20.30 vs. limit=15.0 +2024-08-14 01:40:55,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=102693.33333333333, ans=0.0 +2024-08-14 01:40:55,789 INFO [dysarthria_finetune.py:1435] (1/4) (1640693760, 34072559616) +2024-08-14 01:40:55,789 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 01:40:55,862 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 01:41:11,086 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 5, batch 0, loss[loss=0.2389, simple_loss=0.2328, pruned_loss=0.1088, over 18670.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.2328, pruned_loss=0.1088, over 18670.00 frames. ], batch size: 65, lr: 1.00e-04, grad_scale: 32.0 +2024-08-14 01:41:11,086 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 01:41:34,534 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 5, validation: loss=0.2343, simple_loss=0.2283, pruned_loss=0.1066, over 1073944.00 frames. +2024-08-14 01:41:34,535 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-14 01:41:46,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=102741.33333333333, ans=0.0 +2024-08-14 01:42:12,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=102794.66666666667, ans=0.125 +2024-08-14 01:42:39,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=102848.0, ans=0.125 +2024-08-14 01:43:07,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=102901.33333333333, ans=0.2 +2024-08-14 01:43:29,161 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 5, batch 50, loss[loss=0.248, simple_loss=0.2377, pruned_loss=0.1238, over 18968.00 frames. ], tot_loss[loss=0.2535, simple_loss=0.2454, pruned_loss=0.1204, over 828630.89 frames. ], batch size: 102, lr: 1.00e-04, grad_scale: 32.0 +2024-08-14 01:44:16,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=103061.33333333333, ans=0.125 +2024-08-14 01:44:22,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=103061.33333333333, ans=0.0 +2024-08-14 01:44:30,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=103114.66666666667, ans=0.025 +2024-08-14 01:44:59,445 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.063e+02 2.398e+02 2.550e+02 2.967e+02 4.732e+02, threshold=5.099e+02, percent-clipped=0.0 +2024-08-14 01:45:02,679 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 01:45:27,206 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 5, batch 100, loss[loss=0.2347, simple_loss=0.2292, pruned_loss=0.108, over 19157.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.2408, pruned_loss=0.1176, over 1473409.40 frames. ], batch size: 133, lr: 1.00e-04, grad_scale: 32.0 +2024-08-14 01:45:34,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=103274.66666666667, ans=0.1 +2024-08-14 01:46:18,160 INFO [dysarthria_finetune.py:1435] (1/4) (443219968, 34072559616) +2024-08-14 01:46:18,161 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 01:46:18,212 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 01:46:32,125 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 6, batch 0, loss[loss=0.2545, simple_loss=0.2475, pruned_loss=0.1201, over 18435.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.2475, pruned_loss=0.1201, over 18435.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:46:32,126 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 01:46:55,697 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 6, validation: loss=0.2214, simple_loss=0.2182, pruned_loss=0.09842, over 1073944.00 frames. +2024-08-14 01:46:55,697 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-14 01:47:15,546 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.80 vs. limit=15.0 +2024-08-14 01:48:02,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=103530.66666666667, ans=0.2 +2024-08-14 01:48:27,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=103584.0, ans=0.0 +2024-08-14 01:48:55,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=103637.33333333333, ans=0.125 +2024-08-14 01:49:09,903 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 6, batch 50, loss[loss=0.2199, simple_loss=0.2163, pruned_loss=0.09993, over 19041.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.2333, pruned_loss=0.1112, over 827399.35 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:49:30,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=103690.66666666667, ans=0.125 +2024-08-14 01:49:32,949 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.945e+02 2.293e+02 2.374e+02 2.625e+02 4.193e+02, threshold=4.747e+02, percent-clipped=0.0 +2024-08-14 01:49:34,926 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 01:49:56,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=103744.0, ans=0.2 +2024-08-14 01:50:03,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=103797.33333333333, ans=0.0 +2024-08-14 01:50:08,109 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=16.67 vs. limit=15.0 +2024-08-14 01:50:10,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.57 vs. limit=15.0 +2024-08-14 01:50:21,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=103797.33333333333, ans=0.0 +2024-08-14 01:50:21,517 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.78 vs. limit=15.0 +2024-08-14 01:51:17,784 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 6, batch 100, loss[loss=0.2029, simple_loss=0.2011, pruned_loss=0.09017, over 19066.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.2289, pruned_loss=0.108, over 1471849.14 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:51:29,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=103957.33333333333, ans=0.0 +2024-08-14 01:51:54,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=104010.66666666667, ans=0.125 +2024-08-14 01:51:57,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=104010.66666666667, ans=0.125 +2024-08-14 01:52:24,216 INFO [dysarthria_finetune.py:1435] (1/4) (1141571584, 34072559616) +2024-08-14 01:52:24,216 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 01:52:24,300 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 01:52:37,503 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 7, batch 0, loss[loss=0.2495, simple_loss=0.2397, pruned_loss=0.1253, over 18532.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.2397, pruned_loss=0.1253, over 18532.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:52:37,503 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 01:53:01,318 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 7, validation: loss=0.2103, simple_loss=0.2098, pruned_loss=0.0916, over 1073944.00 frames. +2024-08-14 01:53:01,319 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-14 01:53:13,298 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.12 vs. limit=15.0 +2024-08-14 01:53:43,057 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.93 vs. limit=15.0 +2024-08-14 01:54:01,265 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.925e+02 2.137e+02 2.271e+02 2.445e+02 3.999e+02, threshold=4.542e+02, percent-clipped=0.0 +2024-08-14 01:54:06,548 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=15.98 vs. limit=15.0 +2024-08-14 01:54:40,899 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 7, batch 50, loss[loss=0.2189, simple_loss=0.2164, pruned_loss=0.09995, over 19096.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.2229, pruned_loss=0.1029, over 827950.42 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:55:10,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=104426.66666666667, ans=0.125 +2024-08-14 01:55:16,524 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 01:56:18,959 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 7, batch 100, loss[loss=0.2146, simple_loss=0.2143, pruned_loss=0.09547, over 19105.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2197, pruned_loss=0.1002, over 1472811.51 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:56:22,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=104640.0, ans=0.07 +2024-08-14 01:56:30,027 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.34 vs. limit=15.0 +2024-08-14 01:56:31,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=104640.0, ans=0.2 +2024-08-14 01:56:39,626 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.82 vs. limit=15.0 +2024-08-14 01:56:45,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=104693.33333333333, ans=0.125 +2024-08-14 01:56:56,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=104746.66666666667, ans=0.09899494936611666 +2024-08-14 01:57:06,706 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.07 vs. limit=15.0 +2024-08-14 01:57:10,846 INFO [dysarthria_finetune.py:1435] (1/4) (14915665920, 34072559616) +2024-08-14 01:57:10,846 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 01:57:10,897 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 01:57:24,750 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 8, batch 0, loss[loss=0.2019, simple_loss=0.2048, pruned_loss=0.08546, over 18679.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2048, pruned_loss=0.08546, over 18679.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:57:24,750 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 01:57:48,462 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 8, validation: loss=0.2004, simple_loss=0.2027, pruned_loss=0.08579, over 1073944.00 frames. +2024-08-14 01:57:48,462 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-14 01:57:54,670 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.806e+02 2.054e+02 2.212e+02 2.317e+02 3.796e+02, threshold=4.423e+02, percent-clipped=0.0 +2024-08-14 01:58:12,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=104842.66666666667, ans=15.0 +2024-08-14 01:58:18,077 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.13 vs. limit=15.0 +2024-08-14 01:58:46,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=104949.33333333333, ans=0.0 +2024-08-14 01:58:50,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104949.33333333333, ans=0.1 +2024-08-14 01:59:00,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=104949.33333333333, ans=0.0 +2024-08-14 01:59:59,177 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 8, batch 50, loss[loss=0.1938, simple_loss=0.1951, pruned_loss=0.08525, over 19009.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2158, pruned_loss=0.09784, over 829068.39 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 02:00:16,459 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.69 vs. limit=15.0 +2024-08-14 02:01:01,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105216.0, ans=0.1 +2024-08-14 02:01:10,961 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.39 vs. limit=15.0 +2024-08-14 02:01:11,033 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.02 vs. limit=6.0 +2024-08-14 02:01:22,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=105269.33333333333, ans=0.1 +2024-08-14 02:01:36,489 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 8, batch 100, loss[loss=0.1994, simple_loss=0.2026, pruned_loss=0.08622, over 19109.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2135, pruned_loss=0.09549, over 1473116.98 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 02:01:42,321 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.822e+02 2.040e+02 2.200e+02 2.368e+02 3.520e+02, threshold=4.401e+02, percent-clipped=0.0 +2024-08-14 02:01:51,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=105322.66666666667, ans=0.125 +2024-08-14 02:01:52,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=105322.66666666667, ans=0.04949747468305833 +2024-08-14 02:02:08,440 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.13 vs. limit=6.0 +2024-08-14 02:02:28,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105429.33333333333, ans=0.1 +2024-08-14 02:02:29,646 INFO [dysarthria_finetune.py:1435] (1/4) (2469068800, 34072559616) +2024-08-14 02:02:29,647 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 02:02:29,705 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 02:02:42,909 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 9, batch 0, loss[loss=0.2088, simple_loss=0.2112, pruned_loss=0.09219, over 18520.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2112, pruned_loss=0.09219, over 18520.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:02:42,910 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 02:03:19,141 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 9, validation: loss=0.1911, simple_loss=0.1962, pruned_loss=0.08053, over 1073944.00 frames. +2024-08-14 02:03:19,142 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-14 02:03:45,205 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.00 vs. limit=22.5 +2024-08-14 02:04:40,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=105637.33333333333, ans=0.125 +2024-08-14 02:05:05,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=105637.33333333333, ans=0.025 +2024-08-14 02:05:27,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=105690.66666666667, ans=0.125 +2024-08-14 02:05:32,389 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 02:06:19,732 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 9, batch 50, loss[loss=0.2236, simple_loss=0.2257, pruned_loss=0.1003, over 19008.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2062, pruned_loss=0.09046, over 827563.28 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:06:30,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=105744.0, ans=0.09899494936611666 +2024-08-14 02:07:11,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=105850.66666666667, ans=0.0 +2024-08-14 02:07:13,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=105850.66666666667, ans=0.0 +2024-08-14 02:07:22,962 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.818e+02 2.009e+02 2.115e+02 2.263e+02 3.410e+02, threshold=4.229e+02, percent-clipped=0.0 +2024-08-14 02:07:37,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=105904.0, ans=0.125 +2024-08-14 02:07:41,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=105904.0, ans=0.0 +2024-08-14 02:08:28,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=105957.33333333333, ans=0.125 +2024-08-14 02:08:32,107 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 9, batch 100, loss[loss=0.1983, simple_loss=0.2047, pruned_loss=0.08455, over 19113.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2053, pruned_loss=0.08865, over 1473118.75 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 16.0 +2024-08-14 02:08:55,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106064.0, ans=0.0 +2024-08-14 02:09:37,379 INFO [dysarthria_finetune.py:1435] (1/4) (789250048, 34072559616) +2024-08-14 02:09:37,380 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 02:09:37,449 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 02:09:53,516 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 10, batch 0, loss[loss=0.2102, simple_loss=0.2164, pruned_loss=0.09095, over 18522.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2164, pruned_loss=0.09095, over 18522.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:09:53,517 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 02:10:16,415 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 10, validation: loss=0.1833, simple_loss=0.191, pruned_loss=0.07653, over 1073944.00 frames. +2024-08-14 02:10:16,416 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-14 02:10:24,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=106165.33333333333, ans=0.125 +2024-08-14 02:10:40,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=106218.66666666667, ans=0.125 +2024-08-14 02:10:41,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=106218.66666666667, ans=6.0 +2024-08-14 02:11:14,407 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.02 vs. limit=22.5 +2024-08-14 02:11:19,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106325.33333333333, ans=0.0 +2024-08-14 02:11:43,480 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.26 vs. limit=22.5 +2024-08-14 02:11:47,860 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.689e+02 1.913e+02 2.021e+02 2.184e+02 3.494e+02, threshold=4.042e+02, percent-clipped=0.0 +2024-08-14 02:11:55,860 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 10, batch 50, loss[loss=0.1712, simple_loss=0.1864, pruned_loss=0.06374, over 18973.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.202, pruned_loss=0.08725, over 826863.11 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:12:00,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=106432.0, ans=0.125 +2024-08-14 02:12:20,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106485.33333333333, ans=0.0 +2024-08-14 02:13:19,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=106645.33333333333, ans=0.125 +2024-08-14 02:13:33,251 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 10, batch 100, loss[loss=0.1914, simple_loss=0.1991, pruned_loss=0.08243, over 19188.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.1986, pruned_loss=0.08399, over 1472464.39 frames. ], batch size: 134, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:13:44,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=106698.66666666667, ans=0.125 +2024-08-14 02:14:05,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=106752.0, ans=0.0 +2024-08-14 02:14:22,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=106805.33333333333, ans=0.125 +2024-08-14 02:14:26,590 INFO [dysarthria_finetune.py:1435] (1/4) (13749649408, 34072559616) +2024-08-14 02:14:26,590 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 02:14:26,635 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 02:14:39,660 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 11, batch 0, loss[loss=0.1895, simple_loss=0.1933, pruned_loss=0.08603, over 18704.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.1933, pruned_loss=0.08603, over 18704.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-08-14 02:14:39,660 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 02:15:02,461 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 11, validation: loss=0.1768, simple_loss=0.1869, pruned_loss=0.07357, over 1073944.00 frames. +2024-08-14 02:15:02,462 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-14 02:15:35,905 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.665e+02 1.865e+02 1.931e+02 2.118e+02 3.052e+02, threshold=3.863e+02, percent-clipped=0.0 +2024-08-14 02:15:37,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=106906.66666666667, ans=0.025 +2024-08-14 02:15:50,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=106960.0, ans=0.125 +2024-08-14 02:16:01,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=106960.0, ans=0.125 +2024-08-14 02:16:30,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=107066.66666666667, ans=22.5 +2024-08-14 02:16:34,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=107066.66666666667, ans=0.125 +2024-08-14 02:16:45,029 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 11, batch 50, loss[loss=0.1769, simple_loss=0.1871, pruned_loss=0.07456, over 18947.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.1966, pruned_loss=0.0828, over 828704.78 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 16.0 +2024-08-14 02:16:50,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=107120.0, ans=0.2 +2024-08-14 02:17:23,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=107226.66666666667, ans=0.05 +2024-08-14 02:17:23,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=107226.66666666667, ans=0.04949747468305833 +2024-08-14 02:18:09,671 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.08 vs. limit=15.0 +2024-08-14 02:18:50,465 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 11, batch 100, loss[loss=0.1781, simple_loss=0.1863, pruned_loss=0.07788, over 19147.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.1942, pruned_loss=0.08063, over 1473582.76 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 16.0 +2024-08-14 02:19:08,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=107386.66666666667, ans=0.0 +2024-08-14 02:19:36,876 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.589e+02 1.754e+02 1.842e+02 1.998e+02 3.456e+02, threshold=3.684e+02, percent-clipped=0.0 +2024-08-14 02:19:46,439 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.93 vs. limit=15.0 +2024-08-14 02:20:00,007 INFO [dysarthria_finetune.py:1435] (1/4) (847970304, 34072559616) +2024-08-14 02:20:00,008 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 02:20:00,053 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 02:20:13,228 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 12, batch 0, loss[loss=0.2045, simple_loss=0.2089, pruned_loss=0.09434, over 18735.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2089, pruned_loss=0.09434, over 18735.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-08-14 02:20:13,229 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 02:20:15,597 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([6.3578, 5.7049, 5.7704, 6.2608], device='cuda:1') +2024-08-14 02:20:42,007 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 12, validation: loss=0.1712, simple_loss=0.1836, pruned_loss=0.0713, over 1073944.00 frames. +2024-08-14 02:20:42,007 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-14 02:21:12,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=107541.33333333333, ans=0.0 +2024-08-14 02:22:17,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=107648.0, ans=0.125 +2024-08-14 02:23:15,913 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=9.78 vs. limit=12.0 +2024-08-14 02:23:52,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=107754.66666666667, ans=0.95 +2024-08-14 02:24:25,106 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 12, batch 50, loss[loss=0.1513, simple_loss=0.166, pruned_loss=0.06077, over 18974.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.1923, pruned_loss=0.07962, over 827168.58 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-08-14 02:25:10,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=107861.33333333333, ans=0.125 +2024-08-14 02:25:14,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=107861.33333333333, ans=0.125 +2024-08-14 02:26:24,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=107914.66666666667, ans=0.07 +2024-08-14 02:26:34,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=107914.66666666667, ans=0.0 +2024-08-14 02:26:44,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=107968.0, ans=0.125 +2024-08-14 02:27:01,951 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=9.24 vs. limit=12.0 +2024-08-14 02:27:09,002 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.564e+02 1.754e+02 1.846e+02 2.049e+02 2.889e+02, threshold=3.691e+02, percent-clipped=0.0 +2024-08-14 02:27:10,584 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.27 vs. limit=15.0 +2024-08-14 02:27:37,286 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 12, batch 100, loss[loss=0.1658, simple_loss=0.1814, pruned_loss=0.06789, over 19114.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.1909, pruned_loss=0.07896, over 1473649.48 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 32.0 +2024-08-14 02:27:40,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=108074.66666666667, ans=0.05 +2024-08-14 02:28:06,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108128.0, ans=0.1 +2024-08-14 02:28:48,223 INFO [dysarthria_finetune.py:1435] (1/4) (994770944, 34072559616) +2024-08-14 02:28:48,224 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 02:28:48,267 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 02:29:01,627 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 13, batch 0, loss[loss=0.2181, simple_loss=0.2197, pruned_loss=0.1047, over 18361.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2197, pruned_loss=0.1047, over 18361.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:29:01,628 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 02:29:08,177 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([5.1633, 6.1968, 6.2010, 6.0783], device='cuda:1') +2024-08-14 02:29:24,534 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 13, validation: loss=0.1662, simple_loss=0.1808, pruned_loss=0.06949, over 1073944.00 frames. +2024-08-14 02:29:24,535 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-14 02:29:42,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=108229.33333333333, ans=0.0 +2024-08-14 02:30:05,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=108336.0, ans=0.0 +2024-08-14 02:30:21,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=108336.0, ans=0.0 +2024-08-14 02:30:27,938 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.23 vs. limit=22.5 +2024-08-14 02:30:56,051 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.06 vs. limit=10.0 +2024-08-14 02:31:06,613 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 13, batch 50, loss[loss=0.1459, simple_loss=0.1716, pruned_loss=0.05217, over 19011.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.1843, pruned_loss=0.0756, over 828396.70 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:31:10,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=108496.0, ans=0.2 +2024-08-14 02:31:12,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=108496.0, ans=6.0 +2024-08-14 02:31:15,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=108496.0, ans=0.125 +2024-08-14 02:31:16,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=108496.0, ans=0.09899494936611666 +2024-08-14 02:31:27,172 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.593e+02 1.723e+02 1.826e+02 1.962e+02 2.693e+02, threshold=3.652e+02, percent-clipped=0.0 +2024-08-14 02:31:56,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=108602.66666666667, ans=0.025 +2024-08-14 02:32:06,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=108656.0, ans=15.0 +2024-08-14 02:32:08,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=108656.0, ans=0.125 +2024-08-14 02:32:13,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=108656.0, ans=0.0 +2024-08-14 02:32:45,046 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 13, batch 100, loss[loss=0.1582, simple_loss=0.173, pruned_loss=0.06706, over 19217.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.1845, pruned_loss=0.07492, over 1472353.64 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:32:46,293 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 02:32:57,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=108762.66666666667, ans=0.0 +2024-08-14 02:32:59,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=108762.66666666667, ans=0.2 +2024-08-14 02:33:07,577 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 02:33:11,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108816.0, ans=0.1 +2024-08-14 02:33:15,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=108816.0, ans=0.0 +2024-08-14 02:33:22,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=108869.33333333333, ans=0.125 +2024-08-14 02:33:34,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=108869.33333333333, ans=0.125 +2024-08-14 02:33:38,661 INFO [dysarthria_finetune.py:1435] (1/4) (583729152, 34072559616) +2024-08-14 02:33:38,662 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 02:33:38,712 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 02:33:51,753 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 14, batch 0, loss[loss=0.1986, simple_loss=0.2028, pruned_loss=0.09453, over 18619.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2028, pruned_loss=0.09453, over 18619.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:33:51,753 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 02:34:15,236 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 14, validation: loss=0.1615, simple_loss=0.1782, pruned_loss=0.06778, over 1073944.00 frames. +2024-08-14 02:34:15,237 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-14 02:34:56,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=109024.0, ans=0.2 +2024-08-14 02:35:16,114 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.537e+02 1.678e+02 1.779e+02 1.987e+02 2.879e+02, threshold=3.559e+02, percent-clipped=0.0 +2024-08-14 02:35:38,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=109130.66666666667, ans=0.125 +2024-08-14 02:35:48,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=109130.66666666667, ans=0.125 +2024-08-14 02:35:52,730 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 14, batch 50, loss[loss=0.1723, simple_loss=0.1937, pruned_loss=0.07089, over 19004.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.183, pruned_loss=0.07312, over 826629.84 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:35:56,272 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.94 vs. limit=6.0 +2024-08-14 02:36:00,252 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.04 vs. limit=22.5 +2024-08-14 02:36:03,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109184.0, ans=0.1 +2024-08-14 02:36:11,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=109237.33333333333, ans=0.125 +2024-08-14 02:36:36,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=109290.66666666667, ans=0.125 +2024-08-14 02:36:53,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=109344.0, ans=0.0 +2024-08-14 02:37:10,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff2.min_abs, batch_count=109397.33333333333, ans=0.1 +2024-08-14 02:37:28,739 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 14, batch 100, loss[loss=0.1575, simple_loss=0.1762, pruned_loss=0.06615, over 19114.00 frames. ], tot_loss[loss=0.168, simple_loss=0.1825, pruned_loss=0.0733, over 1472155.55 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 16.0 +2024-08-14 02:37:29,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=109450.66666666667, ans=0.125 +2024-08-14 02:37:47,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=109504.0, ans=0.025 +2024-08-14 02:37:54,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109504.0, ans=0.1 +2024-08-14 02:37:58,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=11.20 vs. limit=12.0 +2024-08-14 02:38:02,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=109504.0, ans=0.07 +2024-08-14 02:38:07,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=109557.33333333333, ans=0.125 +2024-08-14 02:38:09,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=109557.33333333333, ans=0.04949747468305833 +2024-08-14 02:38:21,630 INFO [dysarthria_finetune.py:1435] (1/4) (14963900416, 34072559616) +2024-08-14 02:38:21,630 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 02:38:21,674 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 02:38:34,972 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 15, batch 0, loss[loss=0.1867, simple_loss=0.1909, pruned_loss=0.08974, over 18480.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.1909, pruned_loss=0.08974, over 18480.00 frames. ], batch size: 65, lr: 9.95e-05, grad_scale: 32.0 +2024-08-14 02:38:34,972 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 02:38:57,685 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 15, validation: loss=0.1571, simple_loss=0.176, pruned_loss=0.06629, over 1073944.00 frames. +2024-08-14 02:38:57,685 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-14 02:38:59,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=109605.33333333333, ans=0.2 +2024-08-14 02:39:02,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=109605.33333333333, ans=0.0 +2024-08-14 02:39:07,270 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.490e+02 1.642e+02 1.752e+02 1.914e+02 2.610e+02, threshold=3.503e+02, percent-clipped=0.0 +2024-08-14 02:39:13,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=109605.33333333333, ans=0.125 +2024-08-14 02:39:16,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=109605.33333333333, ans=0.125 +2024-08-14 02:39:38,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=109658.66666666667, ans=0.09899494936611666 +2024-08-14 02:39:38,888 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.02 vs. limit=15.0 +2024-08-14 02:39:50,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109712.0, ans=0.1 +2024-08-14 02:40:26,204 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.11 vs. limit=8.0 +2024-08-14 02:40:31,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=109765.33333333333, ans=0.125 +2024-08-14 02:40:57,612 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 15, batch 50, loss[loss=0.1749, simple_loss=0.1913, pruned_loss=0.07731, over 19020.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.1824, pruned_loss=0.07406, over 827766.05 frames. ], batch size: 102, lr: 9.95e-05, grad_scale: 32.0 +2024-08-14 02:42:37,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110032.0, ans=0.1 +2024-08-14 02:42:53,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=110085.33333333333, ans=0.125 +2024-08-14 02:43:07,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=110085.33333333333, ans=0.125 +2024-08-14 02:43:17,728 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 15, batch 100, loss[loss=0.145, simple_loss=0.1596, pruned_loss=0.06408, over 19074.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.182, pruned_loss=0.07244, over 1471681.17 frames. ], batch size: 133, lr: 9.95e-05, grad_scale: 32.0 +2024-08-14 02:43:23,630 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.493e+02 1.639e+02 1.741e+02 1.916e+02 2.571e+02, threshold=3.482e+02, percent-clipped=0.0 +2024-08-14 02:43:34,758 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.34 vs. limit=15.0 +2024-08-14 02:44:39,950 INFO [dysarthria_finetune.py:1435] (1/4) (2863333376, 34072559616) +2024-08-14 02:44:39,950 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 02:44:40,034 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 02:44:54,093 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 16, batch 0, loss[loss=0.176, simple_loss=0.1983, pruned_loss=0.07578, over 18847.00 frames. ], tot_loss[loss=0.176, simple_loss=0.1983, pruned_loss=0.07578, over 18847.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:44:54,093 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 02:45:16,881 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 16, validation: loss=0.1529, simple_loss=0.1739, pruned_loss=0.06493, over 1073944.00 frames. +2024-08-14 02:45:16,882 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-14 02:45:32,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=110293.33333333333, ans=0.0 +2024-08-14 02:45:50,057 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.86 vs. limit=22.5 +2024-08-14 02:46:26,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=110453.33333333333, ans=0.125 +2024-08-14 02:46:36,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=110506.66666666667, ans=0.125 +2024-08-14 02:46:38,491 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 02:47:33,746 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 16, batch 50, loss[loss=0.1623, simple_loss=0.1838, pruned_loss=0.07012, over 19018.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.1783, pruned_loss=0.07014, over 827868.27 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:47:42,977 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.20 vs. limit=15.0 +2024-08-14 02:47:44,759 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 02:47:50,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=110560.0, ans=0.125 +2024-08-14 02:48:12,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=110666.66666666667, ans=0.05 +2024-08-14 02:48:12,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=110666.66666666667, ans=0.0 +2024-08-14 02:48:22,827 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.464e+02 1.614e+02 1.779e+02 1.933e+02 2.621e+02, threshold=3.558e+02, percent-clipped=0.0 +2024-08-14 02:49:16,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=110773.33333333333, ans=0.0 +2024-08-14 02:49:34,534 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 16, batch 100, loss[loss=0.1414, simple_loss=0.164, pruned_loss=0.05939, over 19118.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.1793, pruned_loss=0.07065, over 1473208.83 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 16.0 +2024-08-14 02:49:35,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=110826.66666666667, ans=0.125 +2024-08-14 02:49:36,023 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.49 vs. limit=22.5 +2024-08-14 02:50:22,678 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.88 vs. limit=15.0 +2024-08-14 02:50:28,860 INFO [dysarthria_finetune.py:1435] (1/4) (371916800, 34072559616) +2024-08-14 02:50:28,861 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 02:50:28,931 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 02:50:47,962 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 17, batch 0, loss[loss=0.1927, simple_loss=0.2145, pruned_loss=0.08548, over 18527.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2145, pruned_loss=0.08548, over 18527.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:50:47,963 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 02:50:53,048 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.6480, 4.1175, 4.0232, 4.5265], device='cuda:1') +2024-08-14 02:51:11,068 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 17, validation: loss=0.1498, simple_loss=0.1721, pruned_loss=0.06377, over 1073944.00 frames. +2024-08-14 02:51:11,069 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13100MB +2024-08-14 02:51:27,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=110981.33333333333, ans=0.125 +2024-08-14 02:51:31,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=111034.66666666667, ans=0.125 +2024-08-14 02:51:44,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111034.66666666667, ans=0.125 +2024-08-14 02:52:04,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=111088.0, ans=0.125 +2024-08-14 02:52:08,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=111088.0, ans=0.025 +2024-08-14 02:52:59,543 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.443e+02 1.599e+02 1.701e+02 1.889e+02 2.501e+02, threshold=3.403e+02, percent-clipped=0.0 +2024-08-14 02:53:07,423 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 17, batch 50, loss[loss=0.1487, simple_loss=0.1722, pruned_loss=0.06262, over 19037.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.1768, pruned_loss=0.06867, over 827680.99 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:53:10,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=111248.0, ans=0.0 +2024-08-14 02:54:15,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=111408.0, ans=0.0 +2024-08-14 02:54:49,163 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.66 vs. limit=22.5 +2024-08-14 02:55:04,519 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 17, batch 100, loss[loss=0.1296, simple_loss=0.1546, pruned_loss=0.05235, over 19067.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.1737, pruned_loss=0.06649, over 1472664.14 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:55:45,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=111621.33333333333, ans=0.125 +2024-08-14 02:56:14,212 INFO [dysarthria_finetune.py:1435] (1/4) (1472921600, 34072559616) +2024-08-14 02:56:14,212 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 02:56:14,282 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 02:56:27,749 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 18, batch 0, loss[loss=0.1738, simple_loss=0.1889, pruned_loss=0.07937, over 18622.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.1889, pruned_loss=0.07937, over 18622.00 frames. ], batch size: 65, lr: 9.93e-05, grad_scale: 32.0 +2024-08-14 02:56:27,750 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 02:56:58,387 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 18, validation: loss=0.1479, simple_loss=0.1705, pruned_loss=0.06271, over 1073944.00 frames. +2024-08-14 02:56:58,388 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13133MB +2024-08-14 02:57:05,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=111669.33333333333, ans=0.07 +2024-08-14 02:57:59,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=111722.66666666667, ans=0.0 +2024-08-14 02:58:03,299 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.430e+02 1.609e+02 1.680e+02 1.858e+02 2.812e+02, threshold=3.359e+02, percent-clipped=0.0 +2024-08-14 02:58:17,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=111776.0, ans=0.07 +2024-08-14 02:58:31,849 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.69 vs. limit=5.0 +2024-08-14 02:58:52,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=111829.33333333333, ans=0.125 +2024-08-14 02:58:58,250 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=6.83 vs. limit=12.0 +2024-08-14 02:59:52,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=111936.0, ans=0.125 +2024-08-14 02:59:54,714 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 18, batch 50, loss[loss=0.1593, simple_loss=0.1816, pruned_loss=0.06857, over 19026.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.1741, pruned_loss=0.06749, over 826500.31 frames. ], batch size: 102, lr: 9.93e-05, grad_scale: 32.0 +2024-08-14 03:00:00,908 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.43 vs. limit=15.0 +2024-08-14 03:00:06,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=111936.0, ans=0.125 +2024-08-14 03:04:04,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=112149.33333333333, ans=0.1 +2024-08-14 03:04:53,114 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 18, batch 100, loss[loss=0.1446, simple_loss=0.1667, pruned_loss=0.06129, over 19036.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.1729, pruned_loss=0.06666, over 1471672.61 frames. ], batch size: 133, lr: 9.93e-05, grad_scale: 32.0 +2024-08-14 03:05:52,852 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.418e+02 1.561e+02 1.643e+02 1.812e+02 2.261e+02, threshold=3.287e+02, percent-clipped=0.0 +2024-08-14 03:07:12,790 INFO [dysarthria_finetune.py:1435] (1/4) (1055588352, 34072559616) +2024-08-14 03:07:12,790 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 03:07:12,861 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 03:07:26,409 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 19, batch 0, loss[loss=0.1685, simple_loss=0.1947, pruned_loss=0.07114, over 18691.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.1947, pruned_loss=0.07114, over 18691.00 frames. ], batch size: 65, lr: 9.92e-05, grad_scale: 32.0 +2024-08-14 03:07:26,409 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 03:07:58,726 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 19, validation: loss=0.1464, simple_loss=0.169, pruned_loss=0.06188, over 1073944.00 frames. +2024-08-14 03:07:58,727 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13133MB +2024-08-14 03:08:43,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=112405.33333333333, ans=0.0 +2024-08-14 03:09:05,115 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.45 vs. limit=15.0 +2024-08-14 03:09:10,839 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.63 vs. limit=22.5 +2024-08-14 03:09:19,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=112458.66666666667, ans=0.125 +2024-08-14 03:09:59,486 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=9.02 vs. limit=12.0 +2024-08-14 03:10:21,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112512.0, ans=0.1 +2024-08-14 03:10:41,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=112565.33333333333, ans=0.0 +2024-08-14 03:10:57,785 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 19, batch 50, loss[loss=0.1563, simple_loss=0.1807, pruned_loss=0.06597, over 18976.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.1734, pruned_loss=0.06759, over 828010.25 frames. ], batch size: 102, lr: 9.92e-05, grad_scale: 32.0 +2024-08-14 03:11:02,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112618.66666666667, ans=0.1 +2024-08-14 03:11:55,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=112672.0, ans=0.125 +2024-08-14 03:11:57,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=112672.0, ans=0.125 +2024-08-14 03:12:19,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112672.0, ans=0.1 +2024-08-14 03:12:51,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=112778.66666666667, ans=10.0 +2024-08-14 03:13:01,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=112778.66666666667, ans=0.0 +2024-08-14 03:13:37,507 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 1.593e+02 1.694e+02 1.909e+02 3.031e+02, threshold=3.389e+02, percent-clipped=0.0 +2024-08-14 03:13:45,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=112832.0, ans=0.125 +2024-08-14 03:14:02,405 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 19, batch 100, loss[loss=0.1267, simple_loss=0.1481, pruned_loss=0.05264, over 19118.00 frames. ], tot_loss[loss=0.1514, simple_loss=0.1722, pruned_loss=0.06526, over 1474453.83 frames. ], batch size: 133, lr: 9.92e-05, grad_scale: 16.0 +2024-08-14 03:14:07,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=112885.33333333333, ans=0.125 +2024-08-14 03:14:10,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=112885.33333333333, ans=0.2 +2024-08-14 03:14:19,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=112885.33333333333, ans=0.125 +2024-08-14 03:15:47,494 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.85 vs. limit=15.0 +2024-08-14 03:15:54,658 INFO [dysarthria_finetune.py:1435] (1/4) (877330432, 34072559616) +2024-08-14 03:15:54,659 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 03:15:54,731 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 03:16:41,856 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 20, batch 0, loss[loss=0.1886, simple_loss=0.2068, pruned_loss=0.08519, over 18758.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2068, pruned_loss=0.08519, over 18758.00 frames. ], batch size: 65, lr: 9.91e-05, grad_scale: 32.0 +2024-08-14 03:16:41,857 INFO [dysarthria_finetune.py:1165] (1/4) Computing validation loss on speech +2024-08-14 03:17:15,217 INFO [dysarthria_finetune.py:1174] (1/4) Validation on speech: Epoch 20, validation: loss=0.1449, simple_loss=0.1677, pruned_loss=0.0611, over 1073944.00 frames. +2024-08-14 03:17:15,218 INFO [dysarthria_finetune.py:1177] (1/4) Maximum memory allocated so far is 13133MB +2024-08-14 03:17:42,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=113040.0, ans=0.125 +2024-08-14 03:17:46,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=113093.33333333333, ans=0.125 +2024-08-14 03:19:26,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113146.66666666667, ans=0.1 +2024-08-14 03:19:36,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=113200.0, ans=0.07 +2024-08-14 03:19:39,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=113200.0, ans=0.125 +2024-08-14 03:19:42,198 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 03:19:42,360 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.60 vs. limit=15.0 +2024-08-14 03:20:43,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=113253.33333333333, ans=0.1 +2024-08-14 03:20:55,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=113253.33333333333, ans=0.125 +2024-08-14 03:21:08,395 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 20, batch 50, loss[loss=0.1513, simple_loss=0.1714, pruned_loss=0.06561, over 19069.00 frames. ], tot_loss[loss=0.1516, simple_loss=0.1707, pruned_loss=0.06624, over 828644.17 frames. ], batch size: 102, lr: 9.91e-05, grad_scale: 32.0 +2024-08-14 03:22:10,071 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.419e+02 1.567e+02 1.664e+02 1.868e+02 2.522e+02, threshold=3.327e+02, percent-clipped=0.0 +2024-08-14 03:24:34,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=113520.0, ans=0.0 +2024-08-14 03:24:59,416 INFO [dysarthria_finetune.py:1141] (1/4) Epoch 20, batch 100, loss[loss=0.1247, simple_loss=0.1401, pruned_loss=0.0547, over 19104.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.169, pruned_loss=0.06475, over 1473557.06 frames. ], batch size: 133, lr: 9.91e-05, grad_scale: 32.0 +2024-08-14 03:25:15,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=113573.33333333333, ans=0.0 +2024-08-14 03:25:32,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=113626.66666666667, ans=0.125 +2024-08-14 03:26:31,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=113626.66666666667, ans=0.0 +2024-08-14 03:26:46,486 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.03 vs. limit=15.0 +2024-08-14 03:26:50,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=113626.66666666667, ans=0.025 +2024-08-14 03:26:55,963 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.75 vs. limit=15.0 +2024-08-14 03:27:02,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113680.0, ans=0.1 +2024-08-14 03:27:02,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=113680.0, ans=0.0 +2024-08-14 03:27:20,905 INFO [dysarthria_finetune.py:1435] (1/4) (1347092480, 34072559616) +2024-08-14 03:27:20,905 INFO [dysarthria_finetune.py:1436] (1/4) Empty cache: before and after +2024-08-14 03:27:20,968 INFO [dysarthria_finetune.py:1440] (1/4) (29283254272, 34072559616) +2024-08-14 03:27:20,968 INFO [dysarthria_finetune.py:1442] (1/4) Done! diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-23-24-47-2 b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-23-24-47-2 new file mode 100644 index 0000000000000000000000000000000000000000..05c10f62bc4123006f5a32438e5b346310383cc5 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-23-24-47-2 @@ -0,0 +1,527 @@ +2024-08-13 23:24:47,931 INFO [dysarthria_finetune.py:1212] (2/4) Training started +2024-08-13 23:24:47,960 INFO [dysarthria_finetune.py:1214] (2/4) (33748090880, 34072559616) +2024-08-13 23:24:47,960 INFO [dysarthria_finetune.py:1215] (2/4) Empty cache: before and after +2024-08-13 23:24:48,945 INFO [dysarthria_finetune.py:1219] (2/4) (32783400960, 34072559616) +2024-08-13 23:24:48,946 INFO [dysarthria_finetune.py:1229] (2/4) Device: cuda:2 +2024-08-13 23:24:48,990 INFO [dysarthria_finetune.py:1241] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2649.int.cedar.computecanada.ca', 'IP address': '172.16.146.86'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-13 23:24:48,991 INFO [dysarthria_finetune.py:1243] (2/4) About to create model +2024-08-13 23:24:50,075 INFO [dysarthria_finetune.py:1247] (2/4) Number of model parameters: 65549011 +2024-08-13 23:24:50,075 INFO [dysarthria_finetune.py:769] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt +2024-08-13 23:24:59,919 INFO [dysarthria_finetune.py:1275] (2/4) Using DDP +2024-08-13 23:25:17,950 INFO [dysarthria_asr_datamodule.py:494] (2/4) About to get train cuts +2024-08-13 23:25:18,291 INFO [dysarthria_finetune.py:1319] (2/4) CutSet(len=62255) [underlying data type: ] +2024-08-13 23:25:18,622 INFO [dysarthria_asr_datamodule.py:239] (2/4) Disable MUSAN +2024-08-13 23:25:18,622 INFO [dysarthria_asr_datamodule.py:257] (2/4) Enable SpecAugment +2024-08-13 23:25:18,622 INFO [dysarthria_asr_datamodule.py:258] (2/4) Time warp factor: 80 +2024-08-13 23:25:18,622 INFO [dysarthria_asr_datamodule.py:268] (2/4) Num frame mask: 10 +2024-08-13 23:25:18,622 INFO [dysarthria_asr_datamodule.py:281] (2/4) About to create train dataset +2024-08-13 23:25:19,282 INFO [dysarthria_asr_datamodule.py:308] (2/4) Using DynamicBucketingSampler. +2024-08-13 23:25:20,228 INFO [dysarthria_asr_datamodule.py:325] (2/4) About to create train dataloader +2024-08-13 23:25:20,234 INFO [dysarthria_asr_datamodule.py:500] (2/4) About to get dev cuts +2024-08-13 23:25:24,698 INFO [dysarthria_asr_datamodule.py:356] (2/4) About to create dev dataset +2024-08-13 23:25:28,033 INFO [dysarthria_asr_datamodule.py:373] (2/4) About to create dev dataloader +2024-08-13 23:25:28,034 INFO [dysarthria_finetune.py:1490] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-13 23:27:16,791 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=512, metric=22.66 vs. limit=7.5 +2024-08-13 23:27:17,110 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=17.80 vs. limit=7.5 +2024-08-13 23:27:17,919 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11790MB +2024-08-13 23:27:19,738 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11790MB +2024-08-13 23:32:34,805 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11790MB +2024-08-13 23:32:36,803 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11790MB +2024-08-13 23:35:38,399 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11790MB +2024-08-13 23:35:41,668 INFO [dysarthria_finetune.py:1518] (2/4) Maximum memory allocated so far is 11790MB +2024-08-13 23:36:58,732 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 0, loss[loss=0.3479, simple_loss=0.3286, pruned_loss=0.1929, over 18533.00 frames. ], tot_loss[loss=0.3479, simple_loss=0.3286, pruned_loss=0.1929, over 18533.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-13 23:36:58,733 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-13 23:49:47,044 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 1, validation: loss=0.3215, simple_loss=0.3039, pruned_loss=0.1764, over 1073944.00 frames. +2024-08-13 23:49:47,367 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19754MB +2024-08-13 23:51:29,857 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.11 vs. limit=22.5 +2024-08-13 23:51:42,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100000.0, ans=0.1 +2024-08-13 23:53:32,690 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.08 vs. limit=15.0 +2024-08-14 00:02:25,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=100000.0, ans=0.09899494936611666 +2024-08-14 00:15:49,521 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.298e+02 1.050e+03 1.114e+03 1.201e+03 1.245e+03, threshold=4.457e+03, percent-clipped=0.0 +2024-08-14 00:19:46,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.06 vs. limit=15.0 +2024-08-14 00:20:37,578 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=19.66 vs. limit=15.0 +2024-08-14 00:22:20,358 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.42 vs. limit=15.0 +2024-08-14 00:23:15,345 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.960e+02 9.450e+02 1.050e+03 1.152e+03 1.319e+03, threshold=4.200e+03, percent-clipped=0.0 +2024-08-14 00:28:13,089 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=8.71 vs. limit=15.0 +2024-08-14 00:29:53,825 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.241e+02 7.298e+02 9.450e+02 1.050e+03 1.319e+03, threshold=3.780e+03, percent-clipped=0.0 +2024-08-14 00:33:57,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=100213.33333333333, ans=0.125 +2024-08-14 00:37:00,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=100266.66666666667, ans=0.125 +2024-08-14 00:37:01,625 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 50, loss[loss=0.3626, simple_loss=0.3408, pruned_loss=0.2122, over 19018.00 frames. ], tot_loss[loss=0.3542, simple_loss=0.3338, pruned_loss=0.2019, over 827419.58 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-14 00:42:55,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100266.66666666667, ans=0.1 +2024-08-14 00:42:55,082 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=24.05 vs. limit=15.0 +2024-08-14 00:46:57,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=100320.0, ans=0.125 +2024-08-14 00:47:09,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=100320.0, ans=0.125 +2024-08-14 00:49:05,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=7.62 vs. limit=6.0 +2024-08-14 00:52:23,139 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.54 vs. limit=22.5 +2024-08-14 00:57:45,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=100480.0, ans=0.125 +2024-08-14 01:01:58,596 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+02 5.963e+02 7.298e+02 8.800e+02 1.319e+03, threshold=1.460e+03, percent-clipped=0.0 +2024-08-14 01:01:58,632 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 1, batch 100, loss[loss=0.3197, simple_loss=0.302, pruned_loss=0.1735, over 19117.00 frames. ], tot_loss[loss=0.3405, simple_loss=0.3212, pruned_loss=0.1903, over 1475925.13 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-08-14 01:03:28,403 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=14.96 vs. limit=15.0 +2024-08-14 01:11:48,006 INFO [dysarthria_finetune.py:1435] (2/4) (6835339264, 34072559616) +2024-08-14 01:11:48,007 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 01:11:48,057 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 01:12:35,403 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 2, batch 0, loss[loss=0.2944, simple_loss=0.2803, pruned_loss=0.1466, over 18502.00 frames. ], tot_loss[loss=0.2944, simple_loss=0.2803, pruned_loss=0.1466, over 18502.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-08-14 01:12:35,404 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 01:16:55,990 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 2, validation: loss=0.2907, simple_loss=0.276, pruned_loss=0.149, over 1073944.00 frames. +2024-08-14 01:16:55,991 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 01:20:02,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100736.0, ans=0.1 +2024-08-14 01:20:03,118 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.04 vs. limit=15.0 +2024-08-14 01:20:52,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=100789.33333333333, ans=15.0 +2024-08-14 01:22:32,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=100896.0, ans=0.2 +2024-08-14 01:24:54,898 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 2, batch 50, loss[loss=0.3095, simple_loss=0.2935, pruned_loss=0.1612, over 18952.00 frames. ], tot_loss[loss=0.3217, simple_loss=0.3039, pruned_loss=0.1742, over 829638.79 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 8.0 +2024-08-14 01:27:07,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=101002.66666666667, ans=0.0 +2024-08-14 01:27:16,529 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 01:27:29,014 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.249e+02 4.347e+02 4.852e+02 5.543e+02 7.043e+02, threshold=9.703e+02, percent-clipped=0.0 +2024-08-14 01:27:52,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101056.0, ans=0.1 +2024-08-14 01:28:04,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=101109.33333333333, ans=15.0 +2024-08-14 01:28:12,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=101109.33333333333, ans=0.0 +2024-08-14 01:28:12,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=101109.33333333333, ans=0.125 +2024-08-14 01:28:20,554 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.23 vs. limit=22.5 +2024-08-14 01:28:36,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.64 vs. limit=15.0 +2024-08-14 01:28:52,973 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.51 vs. limit=6.0 +2024-08-14 01:28:57,355 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=15.65 vs. limit=15.0 +2024-08-14 01:29:06,627 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 2, batch 100, loss[loss=0.2613, simple_loss=0.2493, pruned_loss=0.1292, over 19108.00 frames. ], tot_loss[loss=0.31, simple_loss=0.2936, pruned_loss=0.1641, over 1476292.15 frames. ], batch size: 133, lr: 7.29e-05, grad_scale: 8.0 +2024-08-14 01:29:10,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=101216.0, ans=0.0 +2024-08-14 01:30:15,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=101322.66666666667, ans=0.0 +2024-08-14 01:30:33,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=101322.66666666667, ans=0.125 +2024-08-14 01:30:42,018 INFO [dysarthria_finetune.py:1435] (2/4) (10761207808, 34072559616) +2024-08-14 01:30:42,018 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 01:30:42,060 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 01:30:55,539 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 3, batch 0, loss[loss=0.3069, simple_loss=0.2898, pruned_loss=0.1657, over 18600.00 frames. ], tot_loss[loss=0.3069, simple_loss=0.2898, pruned_loss=0.1657, over 18600.00 frames. ], batch size: 65, lr: 7.58e-05, grad_scale: 16.0 +2024-08-14 01:30:55,539 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 01:31:18,578 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 3, validation: loss=0.2682, simple_loss=0.2564, pruned_loss=0.1309, over 1073944.00 frames. +2024-08-14 01:31:18,578 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 01:31:50,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=101424.0, ans=0.025 +2024-08-14 01:31:50,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=101424.0, ans=0.125 +2024-08-14 01:32:00,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=101424.0, ans=0.2 +2024-08-14 01:32:24,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=101477.33333333333, ans=0.2 +2024-08-14 01:32:25,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=101477.33333333333, ans=0.125 +2024-08-14 01:32:32,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=101530.66666666667, ans=0.125 +2024-08-14 01:32:58,604 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.574e+02 3.350e+02 3.692e+02 4.154e+02 5.648e+02, threshold=7.384e+02, percent-clipped=0.0 +2024-08-14 01:33:11,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101584.0, ans=0.1 +2024-08-14 01:33:14,922 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 3, batch 50, loss[loss=0.2893, simple_loss=0.2752, pruned_loss=0.148, over 19168.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.2827, pruned_loss=0.152, over 828229.52 frames. ], batch size: 103, lr: 8.08e-05, grad_scale: 16.0 +2024-08-14 01:33:16,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101637.33333333333, ans=0.1 +2024-08-14 01:33:18,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=101637.33333333333, ans=0.125 +2024-08-14 01:35:08,698 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 3, batch 100, loss[loss=0.2503, simple_loss=0.2407, pruned_loss=0.1192, over 19024.00 frames. ], tot_loss[loss=0.2889, simple_loss=0.2751, pruned_loss=0.1467, over 1476045.82 frames. ], batch size: 133, lr: 8.58e-05, grad_scale: 16.0 +2024-08-14 01:35:55,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=102010.66666666667, ans=0.125 +2024-08-14 01:36:03,976 INFO [dysarthria_finetune.py:1435] (2/4) (10761207808, 34072559616) +2024-08-14 01:36:04,625 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 01:36:04,670 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 01:36:20,152 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 4, batch 0, loss[loss=0.2799, simple_loss=0.2699, pruned_loss=0.1314, over 18618.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.2699, pruned_loss=0.1314, over 18618.00 frames. ], batch size: 65, lr: 8.86e-05, grad_scale: 32.0 +2024-08-14 01:36:20,152 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 01:36:43,052 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 4, validation: loss=0.2499, simple_loss=0.241, pruned_loss=0.1173, over 1073944.00 frames. +2024-08-14 01:36:43,053 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 01:37:05,281 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=12.46 vs. limit=15.0 +2024-08-14 01:37:11,682 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=6.15 vs. limit=12.0 +2024-08-14 01:37:13,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=102106.66666666667, ans=0.0 +2024-08-14 01:37:14,067 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.440e+02 2.841e+02 3.076e+02 3.396e+02 5.357e+02, threshold=6.153e+02, percent-clipped=0.0 +2024-08-14 01:38:23,454 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 4, batch 50, loss[loss=0.2688, simple_loss=0.2584, pruned_loss=0.1302, over 18961.00 frames. ], tot_loss[loss=0.273, simple_loss=0.2618, pruned_loss=0.1339, over 828488.26 frames. ], batch size: 102, lr: 9.36e-05, grad_scale: 32.0 +2024-08-14 01:38:30,698 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.16 vs. limit=15.0 +2024-08-14 01:38:40,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=102320.0, ans=0.125 +2024-08-14 01:38:58,601 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.41 vs. limit=22.5 +2024-08-14 01:39:23,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=102480.0, ans=0.125 +2024-08-14 01:39:29,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=102480.0, ans=0.0 +2024-08-14 01:39:43,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=102533.33333333333, ans=0.035 +2024-08-14 01:39:52,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff2.min_abs, batch_count=102533.33333333333, ans=0.1 +2024-08-14 01:40:00,371 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=20.48 vs. limit=15.0 +2024-08-14 01:40:00,893 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 4, batch 100, loss[loss=0.2498, simple_loss=0.2395, pruned_loss=0.1238, over 19090.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.2584, pruned_loss=0.1321, over 1476821.49 frames. ], batch size: 133, lr: 9.86e-05, grad_scale: 32.0 +2024-08-14 01:40:22,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=102640.0, ans=22.5 +2024-08-14 01:40:30,559 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.147e+02 2.524e+02 2.719e+02 2.975e+02 4.617e+02, threshold=5.438e+02, percent-clipped=0.0 +2024-08-14 01:40:34,051 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.58 vs. limit=6.0 +2024-08-14 01:40:48,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=102693.33333333333, ans=0.0 +2024-08-14 01:40:50,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=102693.33333333333, ans=0.0 +2024-08-14 01:40:51,225 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.84 vs. limit=22.5 +2024-08-14 01:40:55,780 INFO [dysarthria_finetune.py:1435] (2/4) (10761207808, 34072559616) +2024-08-14 01:40:55,781 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 01:40:55,824 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 01:41:11,067 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 5, batch 0, loss[loss=0.2592, simple_loss=0.25, pruned_loss=0.1247, over 18551.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.25, pruned_loss=0.1247, over 18551.00 frames. ], batch size: 65, lr: 1.00e-04, grad_scale: 32.0 +2024-08-14 01:41:11,068 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 01:41:34,533 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 5, validation: loss=0.2343, simple_loss=0.2283, pruned_loss=0.1066, over 1073944.00 frames. +2024-08-14 01:41:34,534 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 01:42:38,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=102848.0, ans=0.0 +2024-08-14 01:43:11,226 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.62 vs. limit=15.0 +2024-08-14 01:43:29,159 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 5, batch 50, loss[loss=0.2364, simple_loss=0.2296, pruned_loss=0.1108, over 19027.00 frames. ], tot_loss[loss=0.2577, simple_loss=0.2492, pruned_loss=0.1229, over 828775.72 frames. ], batch size: 102, lr: 1.00e-04, grad_scale: 32.0 +2024-08-14 01:44:12,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=103061.33333333333, ans=0.05 +2024-08-14 01:44:16,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=103061.33333333333, ans=0.125 +2024-08-14 01:44:26,287 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.78 vs. limit=22.5 +2024-08-14 01:44:29,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103114.66666666667, ans=0.1 +2024-08-14 01:44:59,445 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.063e+02 2.398e+02 2.550e+02 2.967e+02 4.732e+02, threshold=5.099e+02, percent-clipped=0.0 +2024-08-14 01:45:02,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103168.0, ans=0.1 +2024-08-14 01:45:06,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=103221.33333333333, ans=0.5 +2024-08-14 01:45:14,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=103221.33333333333, ans=0.0 +2024-08-14 01:45:18,278 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.64 vs. limit=10.0 +2024-08-14 01:45:23,003 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.27 vs. limit=15.0 +2024-08-14 01:45:27,201 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 5, batch 100, loss[loss=0.2459, simple_loss=0.2379, pruned_loss=0.1181, over 19114.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.2416, pruned_loss=0.1182, over 1478197.42 frames. ], batch size: 133, lr: 1.00e-04, grad_scale: 32.0 +2024-08-14 01:46:02,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=103328.0, ans=0.1 +2024-08-14 01:46:08,381 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.60 vs. limit=22.5 +2024-08-14 01:46:18,141 INFO [dysarthria_finetune.py:1435] (2/4) (10761207808, 34072559616) +2024-08-14 01:46:18,141 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 01:46:18,185 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 01:46:32,107 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 6, batch 0, loss[loss=0.2436, simple_loss=0.2385, pruned_loss=0.1115, over 18783.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.2385, pruned_loss=0.1115, over 18783.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:46:32,108 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 01:46:55,695 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 6, validation: loss=0.2214, simple_loss=0.2182, pruned_loss=0.09842, over 1073944.00 frames. +2024-08-14 01:46:55,696 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 01:47:15,259 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.55 vs. limit=15.0 +2024-08-14 01:47:47,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103477.33333333333, ans=0.1 +2024-08-14 01:47:47,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103477.33333333333, ans=0.1 +2024-08-14 01:47:53,789 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.08 vs. limit=15.0 +2024-08-14 01:47:56,786 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.64 vs. limit=22.5 +2024-08-14 01:48:12,421 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=17.78 vs. limit=15.0 +2024-08-14 01:48:17,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=103584.0, ans=0.025 +2024-08-14 01:48:23,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=103584.0, ans=0.025 +2024-08-14 01:48:25,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=103584.0, ans=0.125 +2024-08-14 01:49:09,883 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 6, batch 50, loss[loss=0.2439, simple_loss=0.238, pruned_loss=0.1143, over 19006.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.2314, pruned_loss=0.1106, over 828020.78 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:49:32,269 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.23 vs. limit=22.5 +2024-08-14 01:49:32,949 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.945e+02 2.293e+02 2.374e+02 2.625e+02 4.193e+02, threshold=4.747e+02, percent-clipped=0.0 +2024-08-14 01:49:56,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=103744.0, ans=0.125 +2024-08-14 01:49:59,451 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.12 vs. limit=15.0 +2024-08-14 01:50:03,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=103797.33333333333, ans=0.5 +2024-08-14 01:50:08,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=103797.33333333333, ans=0.09899494936611666 +2024-08-14 01:50:13,258 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=18.06 vs. limit=15.0 +2024-08-14 01:50:21,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103797.33333333333, ans=0.1 +2024-08-14 01:50:49,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=103850.66666666667, ans=0.0 +2024-08-14 01:50:50,015 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.93 vs. limit=6.0 +2024-08-14 01:51:17,783 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 6, batch 100, loss[loss=0.2473, simple_loss=0.2428, pruned_loss=0.1142, over 19060.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.2305, pruned_loss=0.1097, over 1475525.13 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:51:31,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=103957.33333333333, ans=0.95 +2024-08-14 01:51:54,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=104010.66666666667, ans=0.2 +2024-08-14 01:52:24,215 INFO [dysarthria_finetune.py:1435] (2/4) (10763304960, 34072559616) +2024-08-14 01:52:24,216 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 01:52:24,255 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 01:52:37,500 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 7, batch 0, loss[loss=0.2387, simple_loss=0.2368, pruned_loss=0.1063, over 18435.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.2368, pruned_loss=0.1063, over 18435.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:52:37,500 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 01:53:01,317 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 7, validation: loss=0.2103, simple_loss=0.2098, pruned_loss=0.0916, over 1073944.00 frames. +2024-08-14 01:53:01,318 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 01:53:52,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=104213.33333333333, ans=0.125 +2024-08-14 01:53:52,925 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=11.41 vs. limit=12.0 +2024-08-14 01:53:54,744 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.59 vs. limit=15.0 +2024-08-14 01:54:01,264 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.925e+02 2.137e+02 2.271e+02 2.445e+02 3.999e+02, threshold=4.542e+02, percent-clipped=0.0 +2024-08-14 01:54:22,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=104320.0, ans=0.125 +2024-08-14 01:54:40,882 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 7, batch 50, loss[loss=0.2052, simple_loss=0.2075, pruned_loss=0.08584, over 18970.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.227, pruned_loss=0.1064, over 828175.40 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:55:47,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104533.33333333333, ans=0.1 +2024-08-14 01:55:57,254 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.06 vs. limit=15.0 +2024-08-14 01:55:59,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=104586.66666666667, ans=0.125 +2024-08-14 01:56:02,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=104586.66666666667, ans=0.2 +2024-08-14 01:56:18,958 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 7, batch 100, loss[loss=0.2159, simple_loss=0.2161, pruned_loss=0.09522, over 19065.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.2222, pruned_loss=0.1029, over 1476190.89 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:56:20,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=104640.0, ans=0.125 +2024-08-14 01:56:28,349 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.03 vs. limit=22.5 +2024-08-14 01:57:02,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=104746.66666666667, ans=0.0 +2024-08-14 01:57:10,846 INFO [dysarthria_finetune.py:1435] (2/4) (10763304960, 34072559616) +2024-08-14 01:57:10,847 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 01:57:10,898 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 01:57:24,750 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 8, batch 0, loss[loss=0.2036, simple_loss=0.2073, pruned_loss=0.08498, over 18635.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2073, pruned_loss=0.08498, over 18635.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:57:24,751 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 01:57:48,460 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 8, validation: loss=0.2004, simple_loss=0.2027, pruned_loss=0.08579, over 1073944.00 frames. +2024-08-14 01:57:48,461 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 01:57:54,666 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.806e+02 2.054e+02 2.212e+02 2.317e+02 3.796e+02, threshold=4.423e+02, percent-clipped=0.0 +2024-08-14 01:58:14,379 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.69 vs. limit=15.0 +2024-08-14 01:58:33,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=104896.0, ans=0.0 +2024-08-14 01:58:49,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=104949.33333333333, ans=0.125 +2024-08-14 01:59:02,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=104949.33333333333, ans=0.0 +2024-08-14 01:59:46,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=105002.66666666667, ans=0.125 +2024-08-14 01:59:59,169 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 8, batch 50, loss[loss=0.2061, simple_loss=0.212, pruned_loss=0.08423, over 19000.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2169, pruned_loss=0.09951, over 827531.12 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 02:00:26,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105109.33333333333, ans=0.1 +2024-08-14 02:00:29,102 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.58 vs. limit=22.5 +2024-08-14 02:00:51,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=105162.66666666667, ans=0.2 +2024-08-14 02:01:36,491 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 8, batch 100, loss[loss=0.202, simple_loss=0.2028, pruned_loss=0.09074, over 19093.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2112, pruned_loss=0.0944, over 1475468.79 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 02:01:42,319 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.822e+02 2.040e+02 2.200e+02 2.368e+02 3.520e+02, threshold=4.401e+02, percent-clipped=0.0 +2024-08-14 02:01:47,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=105322.66666666667, ans=0.125 +2024-08-14 02:01:54,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=105376.0, ans=0.2 +2024-08-14 02:01:58,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=105376.0, ans=0.0 +2024-08-14 02:01:58,923 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.71 vs. limit=15.0 +2024-08-14 02:02:21,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.28 vs. limit=15.0 +2024-08-14 02:02:29,637 INFO [dysarthria_finetune.py:1435] (2/4) (10763304960, 34072559616) +2024-08-14 02:02:29,637 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 02:02:29,681 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 02:02:42,908 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 9, batch 0, loss[loss=0.2207, simple_loss=0.2212, pruned_loss=0.1001, over 18461.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2212, pruned_loss=0.1001, over 18461.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:02:42,909 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 02:03:19,142 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 9, validation: loss=0.1911, simple_loss=0.1962, pruned_loss=0.08053, over 1073944.00 frames. +2024-08-14 02:03:19,143 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 02:04:18,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=105584.0, ans=0.0 +2024-08-14 02:04:22,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105584.0, ans=0.1 +2024-08-14 02:04:25,009 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.87 vs. limit=15.0 +2024-08-14 02:04:43,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=105637.33333333333, ans=0.125 +2024-08-14 02:05:08,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=105637.33333333333, ans=0.05 +2024-08-14 02:06:19,439 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=7.51 vs. limit=12.0 +2024-08-14 02:06:19,732 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 9, batch 50, loss[loss=0.2093, simple_loss=0.2165, pruned_loss=0.08741, over 18943.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2055, pruned_loss=0.0898, over 826909.81 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:07:11,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105850.66666666667, ans=0.1 +2024-08-14 02:07:19,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=105850.66666666667, ans=0.2 +2024-08-14 02:07:19,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=105850.66666666667, ans=0.125 +2024-08-14 02:07:22,957 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.818e+02 2.009e+02 2.115e+02 2.263e+02 3.410e+02, threshold=4.229e+02, percent-clipped=0.0 +2024-08-14 02:07:25,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=11.41 vs. limit=12.0 +2024-08-14 02:07:33,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=105904.0, ans=0.0 +2024-08-14 02:08:32,106 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 9, batch 100, loss[loss=0.1913, simple_loss=0.1994, pruned_loss=0.07928, over 19136.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.204, pruned_loss=0.08796, over 1474643.82 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 16.0 +2024-08-14 02:08:33,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=106010.66666666667, ans=0.2 +2024-08-14 02:08:40,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=106010.66666666667, ans=0.2 +2024-08-14 02:08:43,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=106010.66666666667, ans=0.0 +2024-08-14 02:09:28,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=106117.33333333333, ans=0.035 +2024-08-14 02:09:37,378 INFO [dysarthria_finetune.py:1435] (2/4) (10761207808, 34072559616) +2024-08-14 02:09:37,379 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 02:09:37,422 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 02:09:53,511 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 10, batch 0, loss[loss=0.2298, simple_loss=0.2323, pruned_loss=0.1042, over 18505.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.2323, pruned_loss=0.1042, over 18505.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:09:53,512 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 02:10:16,413 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 10, validation: loss=0.1833, simple_loss=0.191, pruned_loss=0.07653, over 1073944.00 frames. +2024-08-14 02:10:16,414 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 02:10:17,967 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.18 vs. limit=15.0 +2024-08-14 02:10:24,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=106165.33333333333, ans=0.2 +2024-08-14 02:10:54,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=106218.66666666667, ans=0.125 +2024-08-14 02:11:19,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=106325.33333333333, ans=0.0 +2024-08-14 02:11:35,614 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.13 vs. limit=15.0 +2024-08-14 02:11:47,860 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.689e+02 1.913e+02 2.021e+02 2.184e+02 3.494e+02, threshold=4.042e+02, percent-clipped=0.0 +2024-08-14 02:11:55,858 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 10, batch 50, loss[loss=0.1915, simple_loss=0.1991, pruned_loss=0.08149, over 19019.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2011, pruned_loss=0.08652, over 827816.98 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:11:57,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=106432.0, ans=0.2 +2024-08-14 02:12:06,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=106432.0, ans=0.0 +2024-08-14 02:13:05,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=106592.0, ans=0.95 +2024-08-14 02:13:09,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=106592.0, ans=0.09899494936611666 +2024-08-14 02:13:33,245 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 10, batch 100, loss[loss=0.1694, simple_loss=0.1776, pruned_loss=0.07144, over 19070.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.1969, pruned_loss=0.08306, over 1475821.74 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:14:09,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=106752.0, ans=0.125 +2024-08-14 02:14:18,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=106805.33333333333, ans=0.125 +2024-08-14 02:14:26,593 INFO [dysarthria_finetune.py:1435] (2/4) (10696196096, 34072559616) +2024-08-14 02:14:26,593 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 02:14:26,636 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 02:14:39,660 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 11, batch 0, loss[loss=0.2263, simple_loss=0.2236, pruned_loss=0.1098, over 18525.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.2236, pruned_loss=0.1098, over 18525.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-08-14 02:14:39,661 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 02:15:02,460 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 11, validation: loss=0.1768, simple_loss=0.1869, pruned_loss=0.07357, over 1073944.00 frames. +2024-08-14 02:15:02,460 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 02:15:33,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=106906.66666666667, ans=0.125 +2024-08-14 02:15:35,905 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.665e+02 1.865e+02 1.931e+02 2.118e+02 3.052e+02, threshold=3.863e+02, percent-clipped=0.0 +2024-08-14 02:15:58,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106960.0, ans=0.1 +2024-08-14 02:16:34,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=107066.66666666667, ans=0.025 +2024-08-14 02:16:45,031 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 11, batch 50, loss[loss=0.1874, simple_loss=0.1936, pruned_loss=0.08339, over 19068.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.1953, pruned_loss=0.08196, over 827285.47 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 16.0 +2024-08-14 02:16:48,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=107120.0, ans=0.125 +2024-08-14 02:17:21,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=107173.33333333333, ans=0.2 +2024-08-14 02:17:21,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=107173.33333333333, ans=0.0 +2024-08-14 02:17:51,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107226.66666666667, ans=0.125 +2024-08-14 02:18:20,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=107333.33333333333, ans=0.2 +2024-08-14 02:18:50,455 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 11, batch 100, loss[loss=0.1721, simple_loss=0.1814, pruned_loss=0.07399, over 19059.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.1937, pruned_loss=0.08056, over 1474809.38 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 16.0 +2024-08-14 02:19:04,203 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.71 vs. limit=6.0 +2024-08-14 02:19:14,605 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.70 vs. limit=22.5 +2024-08-14 02:19:36,871 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.589e+02 1.754e+02 1.842e+02 1.998e+02 3.456e+02, threshold=3.684e+02, percent-clipped=0.0 +2024-08-14 02:19:41,106 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.52 vs. limit=15.0 +2024-08-14 02:19:59,984 INFO [dysarthria_finetune.py:1435] (2/4) (10761207808, 34072559616) +2024-08-14 02:19:59,985 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 02:20:00,025 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 02:20:13,223 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 12, batch 0, loss[loss=0.1858, simple_loss=0.1971, pruned_loss=0.07927, over 18505.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.1971, pruned_loss=0.07927, over 18505.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-08-14 02:20:13,223 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 02:20:42,008 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 12, validation: loss=0.1712, simple_loss=0.1836, pruned_loss=0.0713, over 1073944.00 frames. +2024-08-14 02:20:42,009 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 02:21:16,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=107594.66666666667, ans=0.125 +2024-08-14 02:22:18,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=107648.0, ans=0.125 +2024-08-14 02:23:13,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=107701.33333333333, ans=0.125 +2024-08-14 02:23:32,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=107754.66666666667, ans=0.125 +2024-08-14 02:23:46,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=107754.66666666667, ans=0.2 +2024-08-14 02:24:25,100 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 12, batch 50, loss[loss=0.174, simple_loss=0.1882, pruned_loss=0.07209, over 18979.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.1897, pruned_loss=0.07895, over 828348.40 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-08-14 02:26:15,344 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.85 vs. limit=6.0 +2024-08-14 02:26:31,767 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.79 vs. limit=10.0 +2024-08-14 02:26:42,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=107914.66666666667, ans=0.125 +2024-08-14 02:27:04,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=107968.0, ans=0.2 +2024-08-14 02:27:08,998 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.564e+02 1.754e+02 1.846e+02 2.049e+02 2.889e+02, threshold=3.691e+02, percent-clipped=0.0 +2024-08-14 02:27:13,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=108021.33333333333, ans=0.0 +2024-08-14 02:27:13,683 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.37 vs. limit=15.0 +2024-08-14 02:27:37,285 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 12, batch 100, loss[loss=0.1595, simple_loss=0.176, pruned_loss=0.06412, over 19089.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.1899, pruned_loss=0.07782, over 1475248.85 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 32.0 +2024-08-14 02:28:03,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=108128.0, ans=0.2 +2024-08-14 02:28:03,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=108128.0, ans=0.125 +2024-08-14 02:28:48,198 INFO [dysarthria_finetune.py:1435] (2/4) (10763304960, 34072559616) +2024-08-14 02:28:48,199 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 02:28:48,236 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 02:29:01,627 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 13, batch 0, loss[loss=0.2041, simple_loss=0.2147, pruned_loss=0.09096, over 18540.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2147, pruned_loss=0.09096, over 18540.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:29:01,628 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 02:29:09,427 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([6.5372, 5.8372, 6.1243, 5.9362], device='cuda:2') +2024-08-14 02:29:24,534 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 13, validation: loss=0.1662, simple_loss=0.1808, pruned_loss=0.06949, over 1073944.00 frames. +2024-08-14 02:29:24,535 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 02:29:38,896 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=15.67 vs. limit=15.0 +2024-08-14 02:29:51,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=108282.66666666667, ans=0.0 +2024-08-14 02:30:04,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=108336.0, ans=0.0 +2024-08-14 02:30:11,723 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.20 vs. limit=10.0 +2024-08-14 02:30:21,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=108336.0, ans=0.125 +2024-08-14 02:30:33,611 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.19 vs. limit=22.5 +2024-08-14 02:30:35,908 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.68 vs. limit=15.0 +2024-08-14 02:30:46,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=108442.66666666667, ans=0.125 +2024-08-14 02:31:06,619 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 13, batch 50, loss[loss=0.1903, simple_loss=0.2024, pruned_loss=0.08382, over 18984.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.1876, pruned_loss=0.07481, over 829065.08 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:31:27,171 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.593e+02 1.723e+02 1.826e+02 1.962e+02 2.693e+02, threshold=3.652e+02, percent-clipped=0.0 +2024-08-14 02:31:46,873 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=9.70 vs. limit=12.0 +2024-08-14 02:31:48,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=108602.66666666667, ans=0.025 +2024-08-14 02:32:13,937 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.84 vs. limit=6.0 +2024-08-14 02:32:17,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=108656.0, ans=0.0 +2024-08-14 02:32:27,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=108709.33333333333, ans=0.0 +2024-08-14 02:32:32,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=108709.33333333333, ans=0.125 +2024-08-14 02:32:32,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=108709.33333333333, ans=0.2 +2024-08-14 02:32:45,025 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 13, batch 100, loss[loss=0.1596, simple_loss=0.1771, pruned_loss=0.06582, over 19116.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.1881, pruned_loss=0.07597, over 1477011.25 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:32:49,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=108762.66666666667, ans=0.2 +2024-08-14 02:33:03,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=108816.0, ans=0.04949747468305833 +2024-08-14 02:33:09,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=108816.0, ans=0.125 +2024-08-14 02:33:17,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=108816.0, ans=0.125 +2024-08-14 02:33:17,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=108816.0, ans=0.125 +2024-08-14 02:33:21,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=108816.0, ans=10.0 +2024-08-14 02:33:28,880 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.32 vs. limit=15.0 +2024-08-14 02:33:38,639 INFO [dysarthria_finetune.py:1435] (2/4) (10761207808, 34072559616) +2024-08-14 02:33:38,640 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 02:33:38,680 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 02:33:51,753 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 14, batch 0, loss[loss=0.1893, simple_loss=0.1956, pruned_loss=0.08854, over 18523.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.1956, pruned_loss=0.08854, over 18523.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:33:51,754 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 02:34:15,234 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 14, validation: loss=0.1615, simple_loss=0.1782, pruned_loss=0.06778, over 1073944.00 frames. +2024-08-14 02:34:15,235 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 02:34:28,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=108917.33333333333, ans=0.125 +2024-08-14 02:35:13,451 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 02:35:16,114 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.537e+02 1.678e+02 1.779e+02 1.987e+02 2.879e+02, threshold=3.559e+02, percent-clipped=0.0 +2024-08-14 02:35:23,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=109077.33333333333, ans=0.125 +2024-08-14 02:35:42,040 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 02:35:44,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109130.66666666667, ans=0.1 +2024-08-14 02:35:52,700 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 14, batch 50, loss[loss=0.1562, simple_loss=0.175, pruned_loss=0.06467, over 18999.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.1832, pruned_loss=0.07324, over 827850.18 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:36:07,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=109184.0, ans=0.125 +2024-08-14 02:37:28,721 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 14, batch 100, loss[loss=0.1649, simple_loss=0.1814, pruned_loss=0.07122, over 19059.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.1844, pruned_loss=0.07397, over 1475617.37 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 16.0 +2024-08-14 02:37:43,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=109450.66666666667, ans=0.0 +2024-08-14 02:37:50,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109504.0, ans=0.1 +2024-08-14 02:37:56,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=109504.0, ans=0.125 +2024-08-14 02:38:03,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=109504.0, ans=0.125 +2024-08-14 02:38:04,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=109504.0, ans=0.0 +2024-08-14 02:38:21,623 INFO [dysarthria_finetune.py:1435] (2/4) (10700390400, 34072559616) +2024-08-14 02:38:21,624 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 02:38:21,674 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 02:38:34,954 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 15, batch 0, loss[loss=0.2223, simple_loss=0.2309, pruned_loss=0.1047, over 18678.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2309, pruned_loss=0.1047, over 18678.00 frames. ], batch size: 65, lr: 9.95e-05, grad_scale: 32.0 +2024-08-14 02:38:34,955 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 02:38:57,683 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 15, validation: loss=0.1571, simple_loss=0.176, pruned_loss=0.06629, over 1073944.00 frames. +2024-08-14 02:38:57,683 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 02:39:00,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=109605.33333333333, ans=0.2 +2024-08-14 02:39:02,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=109605.33333333333, ans=0.0 +2024-08-14 02:39:07,269 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.490e+02 1.642e+02 1.752e+02 1.914e+02 2.610e+02, threshold=3.503e+02, percent-clipped=0.0 +2024-08-14 02:39:16,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=109605.33333333333, ans=0.125 +2024-08-14 02:39:40,796 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.35 vs. limit=15.0 +2024-08-14 02:39:44,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=109658.66666666667, ans=0.125 +2024-08-14 02:40:31,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=109765.33333333333, ans=0.0 +2024-08-14 02:40:39,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=109818.66666666667, ans=0.125 +2024-08-14 02:40:57,611 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 15, batch 50, loss[loss=0.164, simple_loss=0.1872, pruned_loss=0.06803, over 18994.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.1822, pruned_loss=0.0729, over 827605.34 frames. ], batch size: 102, lr: 9.95e-05, grad_scale: 32.0 +2024-08-14 02:42:00,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=109925.33333333333, ans=0.0 +2024-08-14 02:42:07,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=109925.33333333333, ans=0.0 +2024-08-14 02:42:24,875 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.35 vs. limit=15.0 +2024-08-14 02:42:31,109 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.02 vs. limit=15.0 +2024-08-14 02:42:35,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=110032.0, ans=0.125 +2024-08-14 02:42:39,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=110032.0, ans=0.0 +2024-08-14 02:43:17,711 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 15, batch 100, loss[loss=0.1574, simple_loss=0.18, pruned_loss=0.06593, over 19062.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.1815, pruned_loss=0.07178, over 1475114.53 frames. ], batch size: 133, lr: 9.95e-05, grad_scale: 32.0 +2024-08-14 02:43:23,630 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.493e+02 1.639e+02 1.741e+02 1.916e+02 2.571e+02, threshold=3.482e+02, percent-clipped=0.0 +2024-08-14 02:44:29,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=110245.33333333333, ans=0.125 +2024-08-14 02:44:39,951 INFO [dysarthria_finetune.py:1435] (2/4) (10761207808, 34072559616) +2024-08-14 02:44:39,952 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 02:44:39,990 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 02:44:54,086 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 16, batch 0, loss[loss=0.1862, simple_loss=0.1953, pruned_loss=0.08789, over 18504.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.1953, pruned_loss=0.08789, over 18504.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:44:54,086 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 02:45:16,882 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 16, validation: loss=0.1529, simple_loss=0.1739, pruned_loss=0.06493, over 1073944.00 frames. +2024-08-14 02:45:16,882 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 02:45:28,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=110293.33333333333, ans=0.0 +2024-08-14 02:45:46,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=110346.66666666667, ans=0.0 +2024-08-14 02:46:26,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=110453.33333333333, ans=0.125 +2024-08-14 02:46:26,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=110453.33333333333, ans=0.2 +2024-08-14 02:46:34,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110506.66666666667, ans=0.1 +2024-08-14 02:47:33,727 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 16, batch 50, loss[loss=0.1601, simple_loss=0.1806, pruned_loss=0.06948, over 19044.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.1764, pruned_loss=0.06947, over 828171.03 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:48:12,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=110666.66666666667, ans=0.125 +2024-08-14 02:48:22,826 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.464e+02 1.614e+02 1.779e+02 1.933e+02 2.621e+02, threshold=3.558e+02, percent-clipped=0.0 +2024-08-14 02:48:32,054 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.88 vs. limit=6.0 +2024-08-14 02:49:16,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=110773.33333333333, ans=0.2 +2024-08-14 02:49:20,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=110773.33333333333, ans=0.125 +2024-08-14 02:49:31,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=110773.33333333333, ans=0.2 +2024-08-14 02:49:34,513 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 16, batch 100, loss[loss=0.125, simple_loss=0.1537, pruned_loss=0.04821, over 19090.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.1773, pruned_loss=0.06966, over 1476933.27 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 16.0 +2024-08-14 02:50:28,861 INFO [dysarthria_finetune.py:1435] (2/4) (10761207808, 34072559616) +2024-08-14 02:50:28,862 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 02:50:28,907 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 02:50:47,962 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 17, batch 0, loss[loss=0.1627, simple_loss=0.1872, pruned_loss=0.06912, over 18336.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.1872, pruned_loss=0.06912, over 18336.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:50:47,962 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 02:50:53,043 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.6270, 4.0767, 3.9122, 4.4789], device='cuda:2') +2024-08-14 02:51:11,069 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 17, validation: loss=0.1498, simple_loss=0.1721, pruned_loss=0.06377, over 1073944.00 frames. +2024-08-14 02:51:11,070 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 02:51:27,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=110981.33333333333, ans=0.125 +2024-08-14 02:51:40,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=111034.66666666667, ans=0.0 +2024-08-14 02:51:40,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=111034.66666666667, ans=0.125 +2024-08-14 02:51:42,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=111034.66666666667, ans=0.95 +2024-08-14 02:51:44,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=111034.66666666667, ans=0.2 +2024-08-14 02:52:08,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=111088.0, ans=0.0 +2024-08-14 02:52:10,488 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.44 vs. limit=15.0 +2024-08-14 02:52:21,963 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.59 vs. limit=15.0 +2024-08-14 02:52:58,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=111194.66666666667, ans=0.125 +2024-08-14 02:52:59,543 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.443e+02 1.599e+02 1.701e+02 1.889e+02 2.501e+02, threshold=3.403e+02, percent-clipped=0.0 +2024-08-14 02:53:07,418 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 17, batch 50, loss[loss=0.1407, simple_loss=0.174, pruned_loss=0.05366, over 19057.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.1769, pruned_loss=0.06846, over 827125.84 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:53:35,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=111301.33333333333, ans=0.0 +2024-08-14 02:54:15,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=111408.0, ans=0.0 +2024-08-14 02:54:51,060 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.94 vs. limit=15.0 +2024-08-14 02:55:04,514 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 17, batch 100, loss[loss=0.1347, simple_loss=0.1598, pruned_loss=0.0548, over 19126.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.1754, pruned_loss=0.06756, over 1475165.47 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:55:20,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=111514.66666666667, ans=0.125 +2024-08-14 02:55:21,239 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=12.50 vs. limit=15.0 +2024-08-14 02:55:34,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=111568.0, ans=0.0 +2024-08-14 02:55:36,483 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.04 vs. limit=15.0 +2024-08-14 02:55:54,769 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.91 vs. limit=6.0 +2024-08-14 02:56:14,213 INFO [dysarthria_finetune.py:1435] (2/4) (10761207808, 34072559616) +2024-08-14 02:56:14,213 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 02:56:14,265 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 02:56:27,748 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 18, batch 0, loss[loss=0.1608, simple_loss=0.1858, pruned_loss=0.06784, over 18559.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.1858, pruned_loss=0.06784, over 18559.00 frames. ], batch size: 65, lr: 9.93e-05, grad_scale: 32.0 +2024-08-14 02:56:27,748 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 02:56:58,388 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 18, validation: loss=0.1479, simple_loss=0.1705, pruned_loss=0.06271, over 1073944.00 frames. +2024-08-14 02:56:58,388 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 02:57:05,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=111669.33333333333, ans=0.07 +2024-08-14 02:57:07,690 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.60 vs. limit=10.0 +2024-08-14 02:57:43,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111669.33333333333, ans=0.125 +2024-08-14 02:58:00,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111722.66666666667, ans=0.1 +2024-08-14 02:58:03,297 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.430e+02 1.609e+02 1.680e+02 1.858e+02 2.812e+02, threshold=3.359e+02, percent-clipped=0.0 +2024-08-14 02:58:59,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=111829.33333333333, ans=0.0 +2024-08-14 02:59:05,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111829.33333333333, ans=0.125 +2024-08-14 02:59:54,694 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 18, batch 50, loss[loss=0.1692, simple_loss=0.1913, pruned_loss=0.07357, over 18975.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.1751, pruned_loss=0.06711, over 827610.12 frames. ], batch size: 102, lr: 9.93e-05, grad_scale: 32.0 +2024-08-14 03:00:02,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=111936.0, ans=0.0 +2024-08-14 03:01:19,082 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.27 vs. limit=15.0 +2024-08-14 03:01:54,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=112042.66666666667, ans=0.025 +2024-08-14 03:03:07,592 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 03:04:53,082 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 18, batch 100, loss[loss=0.141, simple_loss=0.1592, pruned_loss=0.06137, over 19135.00 frames. ], tot_loss[loss=0.1521, simple_loss=0.173, pruned_loss=0.06556, over 1477220.69 frames. ], batch size: 133, lr: 9.93e-05, grad_scale: 32.0 +2024-08-14 03:05:52,850 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.418e+02 1.561e+02 1.643e+02 1.812e+02 2.261e+02, threshold=3.287e+02, percent-clipped=0.0 +2024-08-14 03:06:41,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=112256.0, ans=0.015 +2024-08-14 03:06:41,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=112256.0, ans=0.0 +2024-08-14 03:07:12,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=112309.33333333333, ans=0.125 +2024-08-14 03:07:12,791 INFO [dysarthria_finetune.py:1435] (2/4) (10761207808, 34072559616) +2024-08-14 03:07:12,792 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 03:07:12,839 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 03:07:26,409 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 19, batch 0, loss[loss=0.1619, simple_loss=0.1806, pruned_loss=0.07156, over 18438.00 frames. ], tot_loss[loss=0.1619, simple_loss=0.1806, pruned_loss=0.07156, over 18438.00 frames. ], batch size: 65, lr: 9.92e-05, grad_scale: 32.0 +2024-08-14 03:07:26,409 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 03:07:58,727 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 19, validation: loss=0.1464, simple_loss=0.169, pruned_loss=0.06188, over 1073944.00 frames. +2024-08-14 03:07:58,728 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 03:08:08,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=112352.0, ans=0.0 +2024-08-14 03:08:15,783 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.88 vs. limit=15.0 +2024-08-14 03:08:43,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=112405.33333333333, ans=0.2 +2024-08-14 03:09:15,492 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.59 vs. limit=15.0 +2024-08-14 03:09:18,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=112458.66666666667, ans=0.125 +2024-08-14 03:09:56,934 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.91 vs. limit=15.0 +2024-08-14 03:10:24,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112512.0, ans=0.1 +2024-08-14 03:10:40,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=112565.33333333333, ans=0.0 +2024-08-14 03:10:57,784 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 19, batch 50, loss[loss=0.1517, simple_loss=0.1768, pruned_loss=0.06333, over 19013.00 frames. ], tot_loss[loss=0.1524, simple_loss=0.1722, pruned_loss=0.06632, over 827262.88 frames. ], batch size: 102, lr: 9.92e-05, grad_scale: 32.0 +2024-08-14 03:11:54,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=112672.0, ans=0.125 +2024-08-14 03:11:57,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112672.0, ans=0.1 +2024-08-14 03:12:18,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112672.0, ans=0.1 +2024-08-14 03:12:19,036 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.36 vs. limit=15.0 +2024-08-14 03:12:41,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=112725.33333333333, ans=0.2 +2024-08-14 03:12:46,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=112725.33333333333, ans=0.125 +2024-08-14 03:13:01,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=112778.66666666667, ans=0.025 +2024-08-14 03:13:37,508 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 1.593e+02 1.694e+02 1.909e+02 3.031e+02, threshold=3.389e+02, percent-clipped=0.0 +2024-08-14 03:14:02,404 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 19, batch 100, loss[loss=0.1167, simple_loss=0.1383, pruned_loss=0.0476, over 19169.00 frames. ], tot_loss[loss=0.1528, simple_loss=0.1726, pruned_loss=0.06654, over 1475351.90 frames. ], batch size: 134, lr: 9.92e-05, grad_scale: 16.0 +2024-08-14 03:14:05,857 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.64 vs. limit=22.5 +2024-08-14 03:14:08,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=112885.33333333333, ans=0.025 +2024-08-14 03:14:10,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=112885.33333333333, ans=0.125 +2024-08-14 03:14:10,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=112885.33333333333, ans=0.125 +2024-08-14 03:14:11,080 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.53 vs. limit=15.0 +2024-08-14 03:15:24,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=112938.66666666667, ans=0.125 +2024-08-14 03:15:54,661 INFO [dysarthria_finetune.py:1435] (2/4) (10759110656, 34072559616) +2024-08-14 03:15:54,662 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 03:15:54,706 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 03:16:41,826 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 20, batch 0, loss[loss=0.1682, simple_loss=0.1915, pruned_loss=0.07247, over 18527.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.1915, pruned_loss=0.07247, over 18527.00 frames. ], batch size: 65, lr: 9.91e-05, grad_scale: 32.0 +2024-08-14 03:16:41,827 INFO [dysarthria_finetune.py:1165] (2/4) Computing validation loss on speech +2024-08-14 03:17:15,215 INFO [dysarthria_finetune.py:1174] (2/4) Validation on speech: Epoch 20, validation: loss=0.1449, simple_loss=0.1677, pruned_loss=0.0611, over 1073944.00 frames. +2024-08-14 03:17:15,216 INFO [dysarthria_finetune.py:1177] (2/4) Maximum memory allocated so far is 19757MB +2024-08-14 03:17:42,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=113040.0, ans=0.125 +2024-08-14 03:17:49,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=113093.33333333333, ans=0.125 +2024-08-14 03:19:25,757 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.27 vs. limit=22.5 +2024-08-14 03:19:36,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113200.0, ans=0.1 +2024-08-14 03:19:39,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=113200.0, ans=0.1 +2024-08-14 03:21:08,393 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 20, batch 50, loss[loss=0.1562, simple_loss=0.1756, pruned_loss=0.06841, over 18968.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.1697, pruned_loss=0.06406, over 828106.18 frames. ], batch size: 102, lr: 9.91e-05, grad_scale: 32.0 +2024-08-14 03:22:10,067 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.419e+02 1.567e+02 1.664e+02 1.868e+02 2.522e+02, threshold=3.327e+02, percent-clipped=0.0 +2024-08-14 03:22:48,923 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.40 vs. limit=22.5 +2024-08-14 03:24:59,418 INFO [dysarthria_finetune.py:1141] (2/4) Epoch 20, batch 100, loss[loss=0.1274, simple_loss=0.1493, pruned_loss=0.05271, over 19074.00 frames. ], tot_loss[loss=0.1476, simple_loss=0.1688, pruned_loss=0.06321, over 1476081.83 frames. ], batch size: 133, lr: 9.91e-05, grad_scale: 32.0 +2024-08-14 03:25:17,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=113573.33333333333, ans=0.0 +2024-08-14 03:25:19,980 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 03:26:46,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=113626.66666666667, ans=0.125 +2024-08-14 03:27:20,896 INFO [dysarthria_finetune.py:1435] (2/4) (10763304960, 34072559616) +2024-08-14 03:27:20,897 INFO [dysarthria_finetune.py:1436] (2/4) Empty cache: before and after +2024-08-14 03:27:20,940 INFO [dysarthria_finetune.py:1440] (2/4) (29522329600, 34072559616) +2024-08-14 03:27:20,941 INFO [dysarthria_finetune.py:1442] (2/4) Done! diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-23-24-47-3 b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-23-24-47-3 new file mode 100644 index 0000000000000000000000000000000000000000..925cea856c2cdcccc6a4a32d252af2769dc30730 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/log/log-train-2024-08-13-23-24-47-3 @@ -0,0 +1,559 @@ +2024-08-13 23:24:47,922 INFO [dysarthria_finetune.py:1212] (3/4) Training started +2024-08-13 23:24:47,960 INFO [dysarthria_finetune.py:1214] (3/4) (33748090880, 34072559616) +2024-08-13 23:24:47,960 INFO [dysarthria_finetune.py:1215] (3/4) Empty cache: before and after +2024-08-13 23:24:48,946 INFO [dysarthria_finetune.py:1219] (3/4) (32783400960, 34072559616) +2024-08-13 23:24:48,947 INFO [dysarthria_finetune.py:1229] (3/4) Device: cuda:3 +2024-08-13 23:24:48,990 INFO [dysarthria_finetune.py:1241] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2649.int.cedar.computecanada.ca', 'IP address': '172.16.146.86'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp_finetune'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.0001, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'do_finetune': True, 'use_mux': False, 'init_modules': None, 'finetune_ckpt': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt', 'full_libri': False, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/data/speech_accessibility/manifests'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 20, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': True, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 0, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2024-08-13 23:24:48,991 INFO [dysarthria_finetune.py:1243] (3/4) About to create model +2024-08-13 23:24:49,988 INFO [dysarthria_finetune.py:1247] (3/4) Number of model parameters: 65549011 +2024-08-13 23:24:49,988 INFO [dysarthria_finetune.py:769] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/libri/exp/epoch-20.pt +2024-08-13 23:24:59,905 INFO [dysarthria_finetune.py:1275] (3/4) Using DDP +2024-08-13 23:25:17,957 INFO [dysarthria_asr_datamodule.py:494] (3/4) About to get train cuts +2024-08-13 23:25:18,291 INFO [dysarthria_finetune.py:1319] (3/4) CutSet(len=62255) [underlying data type: ] +2024-08-13 23:25:18,622 INFO [dysarthria_asr_datamodule.py:239] (3/4) Disable MUSAN +2024-08-13 23:25:18,622 INFO [dysarthria_asr_datamodule.py:257] (3/4) Enable SpecAugment +2024-08-13 23:25:18,622 INFO [dysarthria_asr_datamodule.py:258] (3/4) Time warp factor: 80 +2024-08-13 23:25:18,622 INFO [dysarthria_asr_datamodule.py:268] (3/4) Num frame mask: 10 +2024-08-13 23:25:18,622 INFO [dysarthria_asr_datamodule.py:281] (3/4) About to create train dataset +2024-08-13 23:25:19,282 INFO [dysarthria_asr_datamodule.py:308] (3/4) Using DynamicBucketingSampler. +2024-08-13 23:25:20,228 INFO [dysarthria_asr_datamodule.py:325] (3/4) About to create train dataloader +2024-08-13 23:25:20,234 INFO [dysarthria_asr_datamodule.py:500] (3/4) About to get dev cuts +2024-08-13 23:25:24,698 INFO [dysarthria_asr_datamodule.py:356] (3/4) About to create dev dataset +2024-08-13 23:25:28,030 INFO [dysarthria_asr_datamodule.py:373] (3/4) About to create dev dataloader +2024-08-13 23:25:28,030 INFO [dysarthria_finetune.py:1490] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-13 23:27:16,791 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=17.38 vs. limit=7.5 +2024-08-13 23:27:17,110 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=192, metric=19.31 vs. limit=7.5 +2024-08-13 23:27:17,920 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11778MB +2024-08-13 23:27:19,742 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11778MB +2024-08-13 23:32:34,804 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11778MB +2024-08-13 23:32:36,808 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11778MB +2024-08-13 23:35:38,405 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11778MB +2024-08-13 23:35:41,667 INFO [dysarthria_finetune.py:1518] (3/4) Maximum memory allocated so far is 11778MB +2024-08-13 23:36:58,745 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 0, loss[loss=0.2854, simple_loss=0.2712, pruned_loss=0.1421, over 18634.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.2712, pruned_loss=0.1421, over 18634.00 frames. ], batch size: 65, lr: 5.01e-05, grad_scale: 2.0 +2024-08-13 23:36:58,746 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-13 23:49:47,045 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 1, validation: loss=0.3215, simple_loss=0.3039, pruned_loss=0.1764, over 1073944.00 frames. +2024-08-13 23:49:47,367 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14284MB +2024-08-13 23:51:36,335 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.78 vs. limit=22.5 +2024-08-13 23:51:42,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100000.0, ans=0.1 +2024-08-13 23:53:41,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=100000.0, ans=0.125 +2024-08-13 23:59:58,835 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.66 vs. limit=22.5 +2024-08-14 00:15:49,520 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 7.298e+02 1.050e+03 1.114e+03 1.201e+03 1.245e+03, threshold=4.457e+03, percent-clipped=0.0 +2024-08-14 00:23:15,349 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.960e+02 9.450e+02 1.050e+03 1.152e+03 1.319e+03, threshold=4.200e+03, percent-clipped=0.0 +2024-08-14 00:27:45,410 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=16.30 vs. limit=15.0 +2024-08-14 00:29:33,642 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=45.82 vs. limit=15.0 +2024-08-14 00:29:33,937 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=15.71 vs. limit=15.0 +2024-08-14 00:29:53,831 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.241e+02 7.298e+02 9.450e+02 1.050e+03 1.319e+03, threshold=3.780e+03, percent-clipped=0.0 +2024-08-14 00:31:20,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=100213.33333333333, ans=0.07 +2024-08-14 00:33:53,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=100213.33333333333, ans=0.1 +2024-08-14 00:35:42,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=100213.33333333333, ans=0.125 +2024-08-14 00:37:01,624 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 50, loss[loss=0.3685, simple_loss=0.3467, pruned_loss=0.2133, over 19001.00 frames. ], tot_loss[loss=0.3534, simple_loss=0.3333, pruned_loss=0.1995, over 828973.50 frames. ], batch size: 102, lr: 5.51e-05, grad_scale: 2.0 +2024-08-14 00:42:02,060 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.16 vs. limit=6.0 +2024-08-14 00:42:12,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=100266.66666666667, ans=22.5 +2024-08-14 00:47:09,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=100320.0, ans=0.2 +2024-08-14 00:49:53,844 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.60 vs. limit=6.0 +2024-08-14 00:52:25,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=100373.33333333333, ans=0.125 +2024-08-14 00:53:36,533 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.72 vs. limit=15.0 +2024-08-14 00:55:10,396 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 00:58:17,519 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.64 vs. limit=22.5 +2024-08-14 01:01:58,593 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+02 5.963e+02 7.298e+02 8.800e+02 1.319e+03, threshold=1.460e+03, percent-clipped=0.0 +2024-08-14 01:01:58,627 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 1, batch 100, loss[loss=0.3293, simple_loss=0.3105, pruned_loss=0.1828, over 19146.00 frames. ], tot_loss[loss=0.3393, simple_loss=0.3202, pruned_loss=0.1893, over 1476162.18 frames. ], batch size: 133, lr: 6.01e-05, grad_scale: 4.0 +2024-08-14 01:10:39,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=100640.0, ans=0.0 +2024-08-14 01:11:48,001 INFO [dysarthria_finetune.py:1435] (3/4) (13995016192, 34072559616) +2024-08-14 01:11:48,001 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 01:11:48,057 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 01:12:35,401 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 2, batch 0, loss[loss=0.3084, simple_loss=0.2937, pruned_loss=0.153, over 18501.00 frames. ], tot_loss[loss=0.3084, simple_loss=0.2937, pruned_loss=0.153, over 18501.00 frames. ], batch size: 65, lr: 6.29e-05, grad_scale: 8.0 +2024-08-14 01:12:35,402 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 01:16:55,988 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 2, validation: loss=0.2907, simple_loss=0.276, pruned_loss=0.149, over 1073944.00 frames. +2024-08-14 01:16:55,989 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 01:18:48,591 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=22.13 vs. limit=15.0 +2024-08-14 01:20:02,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=100736.0, ans=0.0 +2024-08-14 01:20:02,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=100736.0, ans=0.125 +2024-08-14 01:20:19,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=100789.33333333333, ans=0.025 +2024-08-14 01:20:51,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=100789.33333333333, ans=0.0 +2024-08-14 01:20:51,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100789.33333333333, ans=0.1 +2024-08-14 01:22:24,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=100896.0, ans=0.0 +2024-08-14 01:22:24,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=100896.0, ans=0.125 +2024-08-14 01:22:29,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=100896.0, ans=0.125 +2024-08-14 01:24:54,914 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 2, batch 50, loss[loss=0.3246, simple_loss=0.3073, pruned_loss=0.1718, over 18956.00 frames. ], tot_loss[loss=0.3216, simple_loss=0.3039, pruned_loss=0.1733, over 828460.00 frames. ], batch size: 102, lr: 6.79e-05, grad_scale: 8.0 +2024-08-14 01:25:46,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=100949.33333333333, ans=0.0 +2024-08-14 01:26:11,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=101002.66666666667, ans=0.0 +2024-08-14 01:27:29,014 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.249e+02 4.347e+02 4.852e+02 5.543e+02 7.043e+02, threshold=9.703e+02, percent-clipped=0.0 +2024-08-14 01:27:48,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=101056.0, ans=0.0 +2024-08-14 01:27:53,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=101056.0, ans=0.125 +2024-08-14 01:28:17,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=101109.33333333333, ans=0.0 +2024-08-14 01:28:19,615 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.38 vs. limit=6.0 +2024-08-14 01:28:31,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=101109.33333333333, ans=0.125 +2024-08-14 01:28:33,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=101162.66666666667, ans=0.0 +2024-08-14 01:29:04,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=101216.0, ans=0.125 +2024-08-14 01:29:06,626 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 2, batch 100, loss[loss=0.2677, simple_loss=0.2549, pruned_loss=0.1349, over 19077.00 frames. ], tot_loss[loss=0.3107, simple_loss=0.2943, pruned_loss=0.1643, over 1476919.42 frames. ], batch size: 133, lr: 7.29e-05, grad_scale: 8.0 +2024-08-14 01:29:10,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=101216.0, ans=0.125 +2024-08-14 01:30:03,664 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=29.02 vs. limit=22.5 +2024-08-14 01:30:15,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=101322.66666666667, ans=0.125 +2024-08-14 01:30:34,219 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=17.92 vs. limit=22.5 +2024-08-14 01:30:36,800 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 01:30:42,018 INFO [dysarthria_finetune.py:1435] (3/4) (13936295936, 34072559616) +2024-08-14 01:30:42,018 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 01:30:42,061 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 01:30:49,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=101370.66666666667, ans=0.0 +2024-08-14 01:30:55,538 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 3, batch 0, loss[loss=0.2691, simple_loss=0.2578, pruned_loss=0.1294, over 18579.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.2578, pruned_loss=0.1294, over 18579.00 frames. ], batch size: 65, lr: 7.58e-05, grad_scale: 16.0 +2024-08-14 01:30:55,538 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 01:31:18,579 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 3, validation: loss=0.2682, simple_loss=0.2564, pruned_loss=0.1309, over 1073944.00 frames. +2024-08-14 01:31:18,580 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 01:31:23,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=101370.66666666667, ans=0.125 +2024-08-14 01:31:55,143 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.57 vs. limit=10.0 +2024-08-14 01:32:00,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=101424.0, ans=0.0 +2024-08-14 01:32:19,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=101477.33333333333, ans=0.025 +2024-08-14 01:32:58,605 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.574e+02 3.350e+02 3.692e+02 4.154e+02 5.648e+02, threshold=7.384e+02, percent-clipped=0.0 +2024-08-14 01:33:14,949 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 3, batch 50, loss[loss=0.2948, simple_loss=0.283, pruned_loss=0.141, over 19113.00 frames. ], tot_loss[loss=0.2958, simple_loss=0.2815, pruned_loss=0.1506, over 827781.85 frames. ], batch size: 102, lr: 8.08e-05, grad_scale: 16.0 +2024-08-14 01:33:18,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=101637.33333333333, ans=0.2 +2024-08-14 01:33:58,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=101690.66666666667, ans=0.2 +2024-08-14 01:34:25,415 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.15 vs. limit=6.0 +2024-08-14 01:34:46,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=101797.33333333333, ans=0.2 +2024-08-14 01:35:00,141 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.10 vs. limit=6.0 +2024-08-14 01:35:08,697 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 3, batch 100, loss[loss=0.2637, simple_loss=0.254, pruned_loss=0.124, over 19145.00 frames. ], tot_loss[loss=0.2875, simple_loss=0.2739, pruned_loss=0.1457, over 1476240.06 frames. ], batch size: 133, lr: 8.58e-05, grad_scale: 16.0 +2024-08-14 01:35:16,165 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 01:35:29,340 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.75 vs. limit=6.0 +2024-08-14 01:35:53,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=102010.66666666667, ans=0.125 +2024-08-14 01:36:03,539 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=8.23 vs. limit=12.0 +2024-08-14 01:36:03,974 INFO [dysarthria_finetune.py:1435] (3/4) (13969850368, 34072559616) +2024-08-14 01:36:04,625 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 01:36:04,673 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 01:36:20,153 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 4, batch 0, loss[loss=0.3092, simple_loss=0.2951, pruned_loss=0.1555, over 18645.00 frames. ], tot_loss[loss=0.3092, simple_loss=0.2951, pruned_loss=0.1555, over 18645.00 frames. ], batch size: 65, lr: 8.86e-05, grad_scale: 32.0 +2024-08-14 01:36:20,153 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 01:36:43,053 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 4, validation: loss=0.2499, simple_loss=0.241, pruned_loss=0.1173, over 1073944.00 frames. +2024-08-14 01:36:43,054 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 01:36:57,268 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.15 vs. limit=15.0 +2024-08-14 01:37:14,065 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.440e+02 2.841e+02 3.076e+02 3.396e+02 5.357e+02, threshold=6.153e+02, percent-clipped=0.0 +2024-08-14 01:37:21,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=102106.66666666667, ans=0.125 +2024-08-14 01:37:25,534 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=14.57 vs. limit=15.0 +2024-08-14 01:37:34,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=102160.0, ans=0.0 +2024-08-14 01:37:37,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=102160.0, ans=10.0 +2024-08-14 01:37:39,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=102160.0, ans=0.125 +2024-08-14 01:37:43,587 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=26.14 vs. limit=15.0 +2024-08-14 01:37:49,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=102213.33333333333, ans=0.0 +2024-08-14 01:38:09,265 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.11 vs. limit=10.0 +2024-08-14 01:38:23,456 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 4, batch 50, loss[loss=0.2704, simple_loss=0.2583, pruned_loss=0.136, over 18993.00 frames. ], tot_loss[loss=0.2764, simple_loss=0.2647, pruned_loss=0.1366, over 827748.42 frames. ], batch size: 102, lr: 9.36e-05, grad_scale: 32.0 +2024-08-14 01:38:36,641 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=22.38 vs. limit=15.0 +2024-08-14 01:38:40,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=102320.0, ans=0.125 +2024-08-14 01:38:46,947 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.37 vs. limit=15.0 +2024-08-14 01:38:58,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=102373.33333333333, ans=0.1 +2024-08-14 01:39:04,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102426.66666666667, ans=0.1 +2024-08-14 01:39:11,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=102426.66666666667, ans=0.125 +2024-08-14 01:39:12,323 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.65 vs. limit=15.0 +2024-08-14 01:39:19,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102426.66666666667, ans=0.1 +2024-08-14 01:39:45,266 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.79 vs. limit=15.0 +2024-08-14 01:39:48,685 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 01:39:54,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=102533.33333333333, ans=0.025 +2024-08-14 01:40:00,893 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 4, batch 100, loss[loss=0.2655, simple_loss=0.2537, pruned_loss=0.1338, over 19161.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.2576, pruned_loss=0.1315, over 1475350.41 frames. ], batch size: 133, lr: 9.86e-05, grad_scale: 32.0 +2024-08-14 01:40:04,250 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.92 vs. limit=15.0 +2024-08-14 01:40:18,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=102586.66666666667, ans=0.05 +2024-08-14 01:40:30,558 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.147e+02 2.524e+02 2.719e+02 2.975e+02 4.617e+02, threshold=5.438e+02, percent-clipped=0.0 +2024-08-14 01:40:39,684 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=21.43 vs. limit=15.0 +2024-08-14 01:40:53,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=102693.33333333333, ans=0.125 +2024-08-14 01:40:54,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=102693.33333333333, ans=0.0 +2024-08-14 01:40:55,781 INFO [dysarthria_finetune.py:1435] (3/4) (147521536, 34072559616) +2024-08-14 01:40:55,782 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 01:40:55,864 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 01:41:11,067 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 5, batch 0, loss[loss=0.2403, simple_loss=0.2335, pruned_loss=0.1112, over 18566.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.2335, pruned_loss=0.1112, over 18566.00 frames. ], batch size: 65, lr: 1.00e-04, grad_scale: 32.0 +2024-08-14 01:41:11,068 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 01:41:34,535 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 5, validation: loss=0.2343, simple_loss=0.2283, pruned_loss=0.1066, over 1073944.00 frames. +2024-08-14 01:41:34,536 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 01:42:12,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=102794.66666666667, ans=0.125 +2024-08-14 01:42:32,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=102848.0, ans=0.0 +2024-08-14 01:43:29,177 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 5, batch 50, loss[loss=0.2675, simple_loss=0.2579, pruned_loss=0.1299, over 18976.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.2464, pruned_loss=0.1215, over 827749.28 frames. ], batch size: 102, lr: 1.00e-04, grad_scale: 32.0 +2024-08-14 01:43:34,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=103008.0, ans=0.0 +2024-08-14 01:44:16,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=103061.33333333333, ans=0.0 +2024-08-14 01:44:56,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=103168.0, ans=0.125 +2024-08-14 01:44:59,449 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.063e+02 2.398e+02 2.550e+02 2.967e+02 4.732e+02, threshold=5.099e+02, percent-clipped=0.0 +2024-08-14 01:45:02,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103168.0, ans=0.1 +2024-08-14 01:45:16,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=103221.33333333333, ans=0.0 +2024-08-14 01:45:27,200 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 5, batch 100, loss[loss=0.2397, simple_loss=0.2332, pruned_loss=0.1122, over 19091.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.2432, pruned_loss=0.1201, over 1475913.16 frames. ], batch size: 133, lr: 1.00e-04, grad_scale: 32.0 +2024-08-14 01:45:34,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103274.66666666667, ans=0.1 +2024-08-14 01:46:04,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=103381.33333333333, ans=0.0 +2024-08-14 01:46:10,054 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 01:46:18,141 INFO [dysarthria_finetune.py:1435] (3/4) (13957267456, 34072559616) +2024-08-14 01:46:18,141 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 01:46:18,187 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 01:46:32,108 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 6, batch 0, loss[loss=0.239, simple_loss=0.2331, pruned_loss=0.1115, over 18684.00 frames. ], tot_loss[loss=0.239, simple_loss=0.2331, pruned_loss=0.1115, over 18684.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:46:32,108 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 01:46:55,695 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 6, validation: loss=0.2214, simple_loss=0.2182, pruned_loss=0.09842, over 1073944.00 frames. +2024-08-14 01:46:55,695 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 01:47:15,409 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.76 vs. limit=15.0 +2024-08-14 01:47:20,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=103424.0, ans=6.0 +2024-08-14 01:48:02,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=103530.66666666667, ans=0.125 +2024-08-14 01:48:15,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103530.66666666667, ans=0.1 +2024-08-14 01:48:20,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=103584.0, ans=0.2 +2024-08-14 01:48:27,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=103584.0, ans=0.125 +2024-08-14 01:49:03,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=103637.33333333333, ans=0.125 +2024-08-14 01:49:09,884 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 6, batch 50, loss[loss=0.2246, simple_loss=0.2213, pruned_loss=0.1012, over 19058.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.2337, pruned_loss=0.1123, over 828493.81 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:49:16,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=103690.66666666667, ans=0.0 +2024-08-14 01:49:30,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=103690.66666666667, ans=0.125 +2024-08-14 01:49:32,953 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.945e+02 2.293e+02 2.374e+02 2.625e+02 4.193e+02, threshold=4.747e+02, percent-clipped=0.0 +2024-08-14 01:49:59,462 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=11.70 vs. limit=15.0 +2024-08-14 01:49:59,513 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.33 vs. limit=6.0 +2024-08-14 01:51:17,787 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 6, batch 100, loss[loss=0.2107, simple_loss=0.2083, pruned_loss=0.09466, over 19113.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.2319, pruned_loss=0.1108, over 1475249.23 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:51:57,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=104010.66666666667, ans=0.025 +2024-08-14 01:52:10,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=104064.0, ans=0.125 +2024-08-14 01:52:24,217 INFO [dysarthria_finetune.py:1435] (3/4) (413859840, 34072559616) +2024-08-14 01:52:24,218 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 01:52:24,301 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 01:52:37,505 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 7, batch 0, loss[loss=0.2662, simple_loss=0.2552, pruned_loss=0.1346, over 18595.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.2552, pruned_loss=0.1346, over 18595.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:52:37,506 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 01:53:01,317 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 7, validation: loss=0.2103, simple_loss=0.2098, pruned_loss=0.0916, over 1073944.00 frames. +2024-08-14 01:53:01,317 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 01:53:42,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=104213.33333333333, ans=0.125 +2024-08-14 01:54:01,266 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.925e+02 2.137e+02 2.271e+02 2.445e+02 3.999e+02, threshold=4.542e+02, percent-clipped=0.0 +2024-08-14 01:54:40,880 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 7, batch 50, loss[loss=0.2308, simple_loss=0.2286, pruned_loss=0.1046, over 18963.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.2277, pruned_loss=0.107, over 827887.87 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:55:05,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=104426.66666666667, ans=0.1 +2024-08-14 01:55:12,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=104426.66666666667, ans=0.0 +2024-08-14 01:55:45,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=104533.33333333333, ans=0.025 +2024-08-14 01:55:59,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=104586.66666666667, ans=0.125 +2024-08-14 01:56:12,675 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.34 vs. limit=22.5 +2024-08-14 01:56:18,976 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 7, batch 100, loss[loss=0.2045, simple_loss=0.2048, pruned_loss=0.08995, over 19124.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.2218, pruned_loss=0.1026, over 1475075.17 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:56:41,480 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.14 vs. limit=15.0 +2024-08-14 01:57:10,852 INFO [dysarthria_finetune.py:1435] (3/4) (12835291136, 34072559616) +2024-08-14 01:57:10,852 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 01:57:10,899 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 01:57:24,754 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 8, batch 0, loss[loss=0.2156, simple_loss=0.2149, pruned_loss=0.09707, over 18547.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2149, pruned_loss=0.09707, over 18547.00 frames. ], batch size: 65, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 01:57:24,754 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 01:57:48,461 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 8, validation: loss=0.2004, simple_loss=0.2027, pruned_loss=0.08579, over 1073944.00 frames. +2024-08-14 01:57:48,462 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 01:57:54,664 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.806e+02 2.054e+02 2.212e+02 2.317e+02 3.796e+02, threshold=4.423e+02, percent-clipped=0.0 +2024-08-14 01:58:08,267 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=13.63 vs. limit=12.0 +2024-08-14 01:58:25,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.16 vs. limit=15.0 +2024-08-14 01:58:47,515 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.97 vs. limit=15.0 +2024-08-14 01:59:10,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=105002.66666666667, ans=0.125 +2024-08-14 01:59:59,173 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 8, batch 50, loss[loss=0.2288, simple_loss=0.2264, pruned_loss=0.1064, over 18964.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2165, pruned_loss=0.09738, over 828441.23 frames. ], batch size: 102, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 02:00:24,583 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.45 vs. limit=15.0 +2024-08-14 02:01:24,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=105269.33333333333, ans=0.0 +2024-08-14 02:01:36,505 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 8, batch 100, loss[loss=0.2167, simple_loss=0.2185, pruned_loss=0.09597, over 19119.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2137, pruned_loss=0.09557, over 1475727.62 frames. ], batch size: 133, lr: 9.99e-05, grad_scale: 32.0 +2024-08-14 02:01:42,324 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.822e+02 2.040e+02 2.200e+02 2.368e+02 3.520e+02, threshold=4.401e+02, percent-clipped=0.0 +2024-08-14 02:01:43,879 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=31.83 vs. limit=22.5 +2024-08-14 02:01:49,429 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.37 vs. limit=10.0 +2024-08-14 02:01:53,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105322.66666666667, ans=0.1 +2024-08-14 02:02:00,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=105376.0, ans=0.0 +2024-08-14 02:02:15,879 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=8.07 vs. limit=12.0 +2024-08-14 02:02:28,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=105429.33333333333, ans=0.0 +2024-08-14 02:02:29,636 INFO [dysarthria_finetune.py:1435] (3/4) (12810125312, 34072559616) +2024-08-14 02:02:29,636 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 02:02:29,684 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 02:02:42,914 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 9, batch 0, loss[loss=0.2212, simple_loss=0.2252, pruned_loss=0.09584, over 18777.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2252, pruned_loss=0.09584, over 18777.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:02:42,914 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 02:03:19,142 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 9, validation: loss=0.1911, simple_loss=0.1962, pruned_loss=0.08053, over 1073944.00 frames. +2024-08-14 02:03:19,143 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 02:04:18,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=105584.0, ans=0.125 +2024-08-14 02:05:05,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=105637.33333333333, ans=0.025 +2024-08-14 02:05:12,564 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.98 vs. limit=10.0 +2024-08-14 02:06:19,736 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 9, batch 50, loss[loss=0.2134, simple_loss=0.2172, pruned_loss=0.09364, over 18965.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2074, pruned_loss=0.09092, over 827503.70 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:06:34,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=105744.0, ans=0.025 +2024-08-14 02:07:03,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=105797.33333333333, ans=0.125 +2024-08-14 02:07:14,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=105850.66666666667, ans=0.09899494936611666 +2024-08-14 02:07:22,958 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.818e+02 2.009e+02 2.115e+02 2.263e+02 3.410e+02, threshold=4.229e+02, percent-clipped=0.0 +2024-08-14 02:07:37,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=105904.0, ans=0.025 +2024-08-14 02:07:41,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=105904.0, ans=0.125 +2024-08-14 02:08:31,693 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.22 vs. limit=22.5 +2024-08-14 02:08:32,109 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 9, batch 100, loss[loss=0.1813, simple_loss=0.1873, pruned_loss=0.07712, over 19159.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2048, pruned_loss=0.08975, over 1475225.92 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 16.0 +2024-08-14 02:08:33,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=106010.66666666667, ans=0.125 +2024-08-14 02:08:43,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=106010.66666666667, ans=0.05 +2024-08-14 02:08:53,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=106010.66666666667, ans=0.2 +2024-08-14 02:09:26,614 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.68 vs. limit=22.5 +2024-08-14 02:09:32,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=106117.33333333333, ans=0.0 +2024-08-14 02:09:37,395 INFO [dysarthria_finetune.py:1435] (3/4) (13986627584, 34072559616) +2024-08-14 02:09:37,395 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 02:09:37,436 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 02:09:53,509 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 10, batch 0, loss[loss=0.1941, simple_loss=0.1986, pruned_loss=0.08522, over 18587.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.1986, pruned_loss=0.08522, over 18587.00 frames. ], batch size: 65, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:09:53,509 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 02:10:16,415 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 10, validation: loss=0.1833, simple_loss=0.191, pruned_loss=0.07653, over 1073944.00 frames. +2024-08-14 02:10:16,416 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 02:10:36,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.15 vs. limit=22.5 +2024-08-14 02:10:40,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=106218.66666666667, ans=0.2 +2024-08-14 02:10:40,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106218.66666666667, ans=0.125 +2024-08-14 02:10:47,024 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.03 vs. limit=6.0 +2024-08-14 02:11:17,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=106325.33333333333, ans=0.125 +2024-08-14 02:11:47,864 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.689e+02 1.913e+02 2.021e+02 2.184e+02 3.494e+02, threshold=4.042e+02, percent-clipped=0.0 +2024-08-14 02:11:55,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=106432.0, ans=0.0 +2024-08-14 02:11:55,875 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 10, batch 50, loss[loss=0.2107, simple_loss=0.2173, pruned_loss=0.09176, over 19101.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2016, pruned_loss=0.08822, over 827631.91 frames. ], batch size: 102, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:11:59,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=106432.0, ans=0.125 +2024-08-14 02:11:59,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=106432.0, ans=0.2 +2024-08-14 02:12:03,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=106432.0, ans=15.0 +2024-08-14 02:12:18,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=106485.33333333333, ans=0.025 +2024-08-14 02:12:20,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=106485.33333333333, ans=0.125 +2024-08-14 02:13:33,244 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 10, batch 100, loss[loss=0.1796, simple_loss=0.1869, pruned_loss=0.07732, over 19051.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2004, pruned_loss=0.08596, over 1475773.03 frames. ], batch size: 133, lr: 9.98e-05, grad_scale: 32.0 +2024-08-14 02:13:42,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=106698.66666666667, ans=0.0 +2024-08-14 02:13:46,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=106698.66666666667, ans=0.0 +2024-08-14 02:14:20,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=106805.33333333333, ans=0.125 +2024-08-14 02:14:20,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=106805.33333333333, ans=0.0 +2024-08-14 02:14:22,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=106805.33333333333, ans=0.2 +2024-08-14 02:14:26,593 INFO [dysarthria_finetune.py:1435] (3/4) (158007296, 34072559616) +2024-08-14 02:14:26,594 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 02:14:26,665 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 02:14:39,664 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 11, batch 0, loss[loss=0.2055, simple_loss=0.2098, pruned_loss=0.09303, over 18604.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2098, pruned_loss=0.09303, over 18604.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-08-14 02:14:39,664 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 02:15:02,461 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 11, validation: loss=0.1768, simple_loss=0.1869, pruned_loss=0.07357, over 1073944.00 frames. +2024-08-14 02:15:02,462 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 02:15:31,966 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.92 vs. limit=15.0 +2024-08-14 02:15:33,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=106906.66666666667, ans=0.0 +2024-08-14 02:15:35,909 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.665e+02 1.865e+02 1.931e+02 2.118e+02 3.052e+02, threshold=3.863e+02, percent-clipped=0.0 +2024-08-14 02:15:50,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=106960.0, ans=0.125 +2024-08-14 02:15:52,630 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.67 vs. limit=15.0 +2024-08-14 02:15:58,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106960.0, ans=0.1 +2024-08-14 02:16:15,598 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.03 vs. limit=22.5 +2024-08-14 02:16:21,467 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=11.73 vs. limit=12.0 +2024-08-14 02:16:30,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=107066.66666666667, ans=0.125 +2024-08-14 02:16:32,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=107066.66666666667, ans=0.125 +2024-08-14 02:16:45,046 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 11, batch 50, loss[loss=0.1846, simple_loss=0.1953, pruned_loss=0.07778, over 19110.00 frames. ], tot_loss[loss=0.181, simple_loss=0.1902, pruned_loss=0.07711, over 828132.31 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 16.0 +2024-08-14 02:16:46,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=107120.0, ans=0.125 +2024-08-14 02:17:19,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=107173.33333333333, ans=0.025 +2024-08-14 02:17:19,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=107173.33333333333, ans=0.2 +2024-08-14 02:18:50,472 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 11, batch 100, loss[loss=0.1525, simple_loss=0.1703, pruned_loss=0.05702, over 19127.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.1926, pruned_loss=0.07929, over 1475363.18 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 16.0 +2024-08-14 02:19:36,870 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.589e+02 1.754e+02 1.842e+02 1.998e+02 3.456e+02, threshold=3.684e+02, percent-clipped=0.0 +2024-08-14 02:19:51,213 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.10 vs. limit=15.0 +2024-08-14 02:19:59,981 INFO [dysarthria_finetune.py:1435] (3/4) (13980336128, 34072559616) +2024-08-14 02:19:59,982 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 02:20:00,025 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 02:20:13,223 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 12, batch 0, loss[loss=0.1876, simple_loss=0.1997, pruned_loss=0.0795, over 18650.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.1997, pruned_loss=0.0795, over 18650.00 frames. ], batch size: 65, lr: 9.97e-05, grad_scale: 32.0 +2024-08-14 02:20:13,223 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 02:20:42,014 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 12, validation: loss=0.1712, simple_loss=0.1836, pruned_loss=0.0713, over 1073944.00 frames. +2024-08-14 02:20:42,014 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 02:21:13,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=107541.33333333333, ans=0.0 +2024-08-14 02:21:41,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107594.66666666667, ans=0.1 +2024-08-14 02:23:50,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=107754.66666666667, ans=0.95 +2024-08-14 02:24:17,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=107754.66666666667, ans=0.0 +2024-08-14 02:24:25,100 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 12, batch 50, loss[loss=0.1629, simple_loss=0.1835, pruned_loss=0.06146, over 19037.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.1912, pruned_loss=0.07819, over 828666.57 frames. ], batch size: 102, lr: 9.97e-05, grad_scale: 32.0 +2024-08-14 02:24:37,079 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.99 vs. limit=15.0 +2024-08-14 02:25:06,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=107861.33333333333, ans=0.0 +2024-08-14 02:26:20,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=107914.66666666667, ans=0.125 +2024-08-14 02:26:30,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=107914.66666666667, ans=0.125 +2024-08-14 02:26:51,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=107968.0, ans=0.125 +2024-08-14 02:27:04,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107968.0, ans=0.1 +2024-08-14 02:27:09,000 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.564e+02 1.754e+02 1.846e+02 2.049e+02 2.889e+02, threshold=3.691e+02, percent-clipped=0.0 +2024-08-14 02:27:13,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=108021.33333333333, ans=0.025 +2024-08-14 02:27:36,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=108074.66666666667, ans=0.0 +2024-08-14 02:27:37,278 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 12, batch 100, loss[loss=0.1505, simple_loss=0.1636, pruned_loss=0.0625, over 19142.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.1898, pruned_loss=0.07656, over 1477170.44 frames. ], batch size: 133, lr: 9.97e-05, grad_scale: 32.0 +2024-08-14 02:28:03,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=108128.0, ans=0.125 +2024-08-14 02:28:48,193 INFO [dysarthria_finetune.py:1435] (3/4) (13997113344, 34072559616) +2024-08-14 02:28:48,194 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 02:28:48,236 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 02:29:01,644 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 13, batch 0, loss[loss=0.1797, simple_loss=0.1891, pruned_loss=0.08004, over 18629.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.1891, pruned_loss=0.08004, over 18629.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:29:01,644 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 02:29:08,193 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([6.5646, 5.8579, 6.1448, 5.9949], device='cuda:3') +2024-08-14 02:29:24,537 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 13, validation: loss=0.1662, simple_loss=0.1808, pruned_loss=0.06949, over 1073944.00 frames. +2024-08-14 02:29:24,538 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 02:29:28,339 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=19.60 vs. limit=15.0 +2024-08-14 02:29:30,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=108229.33333333333, ans=0.125 +2024-08-14 02:29:32,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=108229.33333333333, ans=0.0 +2024-08-14 02:29:42,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108229.33333333333, ans=0.0 +2024-08-14 02:29:51,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=108282.66666666667, ans=0.125 +2024-08-14 02:30:15,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=108336.0, ans=0.0 +2024-08-14 02:30:19,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=108336.0, ans=0.0 +2024-08-14 02:30:27,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=108389.33333333333, ans=0.0 +2024-08-14 02:31:06,629 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 13, batch 50, loss[loss=0.1766, simple_loss=0.1889, pruned_loss=0.07699, over 19050.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.1875, pruned_loss=0.07624, over 828311.79 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:31:14,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=108496.0, ans=0.125 +2024-08-14 02:31:14,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=108496.0, ans=0.2 +2024-08-14 02:31:26,704 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.68 vs. limit=22.5 +2024-08-14 02:31:27,175 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.593e+02 1.723e+02 1.826e+02 1.962e+02 2.693e+02, threshold=3.652e+02, percent-clipped=0.0 +2024-08-14 02:31:30,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=108549.33333333333, ans=0.0 +2024-08-14 02:32:02,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=108602.66666666667, ans=0.125 +2024-08-14 02:32:06,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=108656.0, ans=0.025 +2024-08-14 02:32:11,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108656.0, ans=0.0 +2024-08-14 02:32:28,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=108709.33333333333, ans=0.2 +2024-08-14 02:32:28,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=108709.33333333333, ans=0.125 +2024-08-14 02:32:45,024 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 13, batch 100, loss[loss=0.1724, simple_loss=0.1871, pruned_loss=0.07406, over 19095.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.1868, pruned_loss=0.07517, over 1474662.24 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:32:55,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=108762.66666666667, ans=0.125 +2024-08-14 02:33:01,518 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 02:33:05,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=108816.0, ans=0.09899494936611666 +2024-08-14 02:33:07,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=108816.0, ans=0.125 +2024-08-14 02:33:17,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=108816.0, ans=0.0 +2024-08-14 02:33:21,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=108816.0, ans=0.125 +2024-08-14 02:33:26,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=108869.33333333333, ans=0.0 +2024-08-14 02:33:30,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=108869.33333333333, ans=10.0 +2024-08-14 02:33:38,638 INFO [dysarthria_finetune.py:1435] (3/4) (13955170304, 34072559616) +2024-08-14 02:33:38,639 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 02:33:38,682 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 02:33:51,769 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 14, batch 0, loss[loss=0.2076, simple_loss=0.2169, pruned_loss=0.09542, over 18650.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2169, pruned_loss=0.09542, over 18650.00 frames. ], batch size: 65, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:33:51,770 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 02:34:15,234 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 14, validation: loss=0.1615, simple_loss=0.1782, pruned_loss=0.06778, over 1073944.00 frames. +2024-08-14 02:34:15,235 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 02:34:28,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108917.33333333333, ans=0.1 +2024-08-14 02:34:52,450 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.77 vs. limit=15.0 +2024-08-14 02:35:16,113 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.537e+02 1.678e+02 1.779e+02 1.987e+02 2.879e+02, threshold=3.559e+02, percent-clipped=0.0 +2024-08-14 02:35:44,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=109130.66666666667, ans=0.125 +2024-08-14 02:35:50,321 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.18 vs. limit=15.0 +2024-08-14 02:35:52,700 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 14, batch 50, loss[loss=0.1424, simple_loss=0.1678, pruned_loss=0.0535, over 19012.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.186, pruned_loss=0.07465, over 829335.16 frames. ], batch size: 102, lr: 9.96e-05, grad_scale: 32.0 +2024-08-14 02:36:09,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=109184.0, ans=0.2 +2024-08-14 02:36:17,543 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.19 vs. limit=6.0 +2024-08-14 02:36:28,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109237.33333333333, ans=0.1 +2024-08-14 02:36:30,742 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 02:36:59,457 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.54 vs. limit=15.0 +2024-08-14 02:37:28,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=109450.66666666667, ans=0.0 +2024-08-14 02:37:28,721 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 14, batch 100, loss[loss=0.1599, simple_loss=0.1767, pruned_loss=0.06846, over 19114.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.1837, pruned_loss=0.07297, over 1476363.01 frames. ], batch size: 133, lr: 9.96e-05, grad_scale: 16.0 +2024-08-14 02:37:45,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=109450.66666666667, ans=0.0 +2024-08-14 02:37:52,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=109504.0, ans=0.0 +2024-08-14 02:38:05,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=109557.33333333333, ans=0.125 +2024-08-14 02:38:07,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=109557.33333333333, ans=0.0 +2024-08-14 02:38:21,621 INFO [dysarthria_finetune.py:1435] (3/4) (13988724736, 34072559616) +2024-08-14 02:38:21,622 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 02:38:21,675 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 02:38:34,952 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 15, batch 0, loss[loss=0.1615, simple_loss=0.1796, pruned_loss=0.06895, over 18716.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.1796, pruned_loss=0.06895, over 18716.00 frames. ], batch size: 65, lr: 9.95e-05, grad_scale: 32.0 +2024-08-14 02:38:34,952 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 02:38:57,684 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 15, validation: loss=0.1571, simple_loss=0.176, pruned_loss=0.06629, over 1073944.00 frames. +2024-08-14 02:38:57,685 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 02:39:00,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=109605.33333333333, ans=0.2 +2024-08-14 02:39:03,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=109605.33333333333, ans=0.0 +2024-08-14 02:39:07,274 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.490e+02 1.642e+02 1.752e+02 1.914e+02 2.610e+02, threshold=3.503e+02, percent-clipped=0.0 +2024-08-14 02:39:16,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=109605.33333333333, ans=0.0 +2024-08-14 02:39:50,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=109712.0, ans=0.125 +2024-08-14 02:40:07,375 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.90 vs. limit=15.0 +2024-08-14 02:40:19,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=109765.33333333333, ans=0.04949747468305833 +2024-08-14 02:40:41,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=109818.66666666667, ans=0.125 +2024-08-14 02:40:49,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=109818.66666666667, ans=0.07 +2024-08-14 02:40:57,638 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 15, batch 50, loss[loss=0.1329, simple_loss=0.1539, pruned_loss=0.05384, over 19179.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.1805, pruned_loss=0.07142, over 827713.24 frames. ], batch size: 103, lr: 9.95e-05, grad_scale: 32.0 +2024-08-14 02:42:05,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=109925.33333333333, ans=0.125 +2024-08-14 02:42:24,803 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.43 vs. limit=15.0 +2024-08-14 02:42:43,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=110032.0, ans=0.125 +2024-08-14 02:42:55,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=110085.33333333333, ans=0.0 +2024-08-14 02:43:17,709 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 15, batch 100, loss[loss=0.1376, simple_loss=0.1601, pruned_loss=0.05611, over 19073.00 frames. ], tot_loss[loss=0.1619, simple_loss=0.1795, pruned_loss=0.07025, over 1475236.97 frames. ], batch size: 133, lr: 9.95e-05, grad_scale: 32.0 +2024-08-14 02:43:23,632 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.493e+02 1.639e+02 1.741e+02 1.916e+02 2.571e+02, threshold=3.482e+02, percent-clipped=0.0 +2024-08-14 02:43:24,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=110138.66666666667, ans=0.125 +2024-08-14 02:43:55,587 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.69 vs. limit=22.5 +2024-08-14 02:44:39,969 INFO [dysarthria_finetune.py:1435] (3/4) (434831360, 34072559616) +2024-08-14 02:44:39,970 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 02:44:40,037 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 02:44:54,087 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 16, batch 0, loss[loss=0.1765, simple_loss=0.1964, pruned_loss=0.07736, over 18560.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.1964, pruned_loss=0.07736, over 18560.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:44:54,088 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 02:45:16,883 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 16, validation: loss=0.1529, simple_loss=0.1739, pruned_loss=0.06493, over 1073944.00 frames. +2024-08-14 02:45:16,884 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 02:45:30,447 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.90 vs. limit=15.0 +2024-08-14 02:45:38,664 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.18 vs. limit=15.0 +2024-08-14 02:45:48,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=110346.66666666667, ans=0.2 +2024-08-14 02:45:56,172 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.63 vs. limit=15.0 +2024-08-14 02:46:29,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=110453.33333333333, ans=0.125 +2024-08-14 02:46:30,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=110453.33333333333, ans=0.125 +2024-08-14 02:46:36,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=110506.66666666667, ans=0.125 +2024-08-14 02:47:33,728 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 16, batch 50, loss[loss=0.1504, simple_loss=0.1786, pruned_loss=0.06072, over 19044.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.1795, pruned_loss=0.07052, over 827661.95 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:47:43,224 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.66 vs. limit=8.0 +2024-08-14 02:48:16,647 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.23 vs. limit=22.5 +2024-08-14 02:48:22,830 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.464e+02 1.614e+02 1.779e+02 1.933e+02 2.621e+02, threshold=3.558e+02, percent-clipped=0.0 +2024-08-14 02:49:20,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=110773.33333333333, ans=0.125 +2024-08-14 02:49:26,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=110773.33333333333, ans=0.0 +2024-08-14 02:49:34,038 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=19.14 vs. limit=22.5 +2024-08-14 02:49:34,513 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 16, batch 100, loss[loss=0.1544, simple_loss=0.1834, pruned_loss=0.06267, over 19120.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.1791, pruned_loss=0.07008, over 1474935.70 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 16.0 +2024-08-14 02:49:35,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110826.66666666667, ans=0.1 +2024-08-14 02:49:39,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=110826.66666666667, ans=0.025 +2024-08-14 02:50:11,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=110880.0, ans=0.0 +2024-08-14 02:50:28,867 INFO [dysarthria_finetune.py:1435] (3/4) (13967753216, 34072559616) +2024-08-14 02:50:28,868 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 02:50:28,918 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 02:50:47,978 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 17, batch 0, loss[loss=0.2102, simple_loss=0.2204, pruned_loss=0.1, over 18583.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2204, pruned_loss=0.1, over 18583.00 frames. ], batch size: 65, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:50:47,979 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 02:51:11,071 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 17, validation: loss=0.1498, simple_loss=0.1721, pruned_loss=0.06377, over 1073944.00 frames. +2024-08-14 02:51:11,071 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 02:51:27,514 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=12.41 vs. limit=15.0 +2024-08-14 02:51:31,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=111034.66666666667, ans=0.025 +2024-08-14 02:51:31,441 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=14.24 vs. limit=12.0 +2024-08-14 02:51:48,993 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.83 vs. limit=15.0 +2024-08-14 02:52:04,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=111088.0, ans=0.0 +2024-08-14 02:52:51,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111194.66666666667, ans=0.125 +2024-08-14 02:52:59,547 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.443e+02 1.599e+02 1.701e+02 1.889e+02 2.501e+02, threshold=3.403e+02, percent-clipped=0.0 +2024-08-14 02:53:02,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111194.66666666667, ans=0.125 +2024-08-14 02:53:07,418 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 17, batch 50, loss[loss=0.1637, simple_loss=0.1822, pruned_loss=0.07261, over 18982.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.1761, pruned_loss=0.06846, over 827806.80 frames. ], batch size: 102, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:53:20,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=111248.0, ans=0.0 +2024-08-14 02:53:22,534 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.18 vs. limit=10.0 +2024-08-14 02:53:51,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=111354.66666666667, ans=10.0 +2024-08-14 02:54:41,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=111408.0, ans=0.125 +2024-08-14 02:55:04,514 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 17, batch 100, loss[loss=0.1366, simple_loss=0.1629, pruned_loss=0.05516, over 19078.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.1749, pruned_loss=0.06694, over 1476033.73 frames. ], batch size: 133, lr: 9.94e-05, grad_scale: 32.0 +2024-08-14 02:55:26,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=111568.0, ans=0.0 +2024-08-14 02:55:39,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=111568.0, ans=0.125 +2024-08-14 02:56:14,229 INFO [dysarthria_finetune.py:1435] (3/4) (13955170304, 34072559616) +2024-08-14 02:56:14,230 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 02:56:14,277 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 02:56:27,747 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 18, batch 0, loss[loss=0.1721, simple_loss=0.1919, pruned_loss=0.07617, over 18613.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.1919, pruned_loss=0.07617, over 18613.00 frames. ], batch size: 65, lr: 9.93e-05, grad_scale: 32.0 +2024-08-14 02:56:27,748 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 02:56:58,391 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 18, validation: loss=0.1479, simple_loss=0.1705, pruned_loss=0.06271, over 1073944.00 frames. +2024-08-14 02:56:58,392 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 02:57:05,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=111669.33333333333, ans=0.07 +2024-08-14 02:57:09,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=111669.33333333333, ans=0.0 +2024-08-14 02:58:00,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=111722.66666666667, ans=0.0 +2024-08-14 02:58:03,301 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.430e+02 1.609e+02 1.680e+02 1.858e+02 2.812e+02, threshold=3.359e+02, percent-clipped=0.0 +2024-08-14 02:58:09,258 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.51 vs. limit=10.0 +2024-08-14 02:58:36,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=111776.0, ans=0.0 +2024-08-14 02:58:56,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=111829.33333333333, ans=0.0 +2024-08-14 02:59:01,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=111829.33333333333, ans=0.0 +2024-08-14 02:59:10,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=111829.33333333333, ans=0.125 +2024-08-14 02:59:10,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=111829.33333333333, ans=0.125 +2024-08-14 02:59:29,674 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.02 vs. limit=15.0 +2024-08-14 02:59:54,694 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 18, batch 50, loss[loss=0.1436, simple_loss=0.168, pruned_loss=0.0596, over 19004.00 frames. ], tot_loss[loss=0.1507, simple_loss=0.1712, pruned_loss=0.06509, over 828768.32 frames. ], batch size: 102, lr: 9.93e-05, grad_scale: 32.0 +2024-08-14 02:59:58,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=111936.0, ans=0.2 +2024-08-14 03:00:05,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=111936.0, ans=0.125 +2024-08-14 03:01:15,307 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.66 vs. limit=15.0 +2024-08-14 03:02:00,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=112096.0, ans=0.2 +2024-08-14 03:02:16,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=112096.0, ans=10.0 +2024-08-14 03:03:28,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=112149.33333333333, ans=0.125 +2024-08-14 03:04:12,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112149.33333333333, ans=0.1 +2024-08-14 03:04:53,080 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 18, batch 100, loss[loss=0.13, simple_loss=0.1511, pruned_loss=0.05441, over 19084.00 frames. ], tot_loss[loss=0.1506, simple_loss=0.1709, pruned_loss=0.06518, over 1476677.05 frames. ], batch size: 133, lr: 9.93e-05, grad_scale: 32.0 +2024-08-14 03:05:04,103 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 03:05:52,850 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.418e+02 1.561e+02 1.643e+02 1.812e+02 2.261e+02, threshold=3.287e+02, percent-clipped=0.0 +2024-08-14 03:06:42,024 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=11.19 vs. limit=12.0 +2024-08-14 03:06:47,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=112309.33333333333, ans=0.0 +2024-08-14 03:06:51,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=112309.33333333333, ans=0.125 +2024-08-14 03:06:55,031 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.57 vs. limit=15.0 +2024-08-14 03:07:12,793 INFO [dysarthria_finetune.py:1435] (3/4) (12814319616, 34072559616) +2024-08-14 03:07:12,793 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 03:07:12,855 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 03:07:26,411 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 19, batch 0, loss[loss=0.1931, simple_loss=0.2064, pruned_loss=0.08986, over 18562.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2064, pruned_loss=0.08986, over 18562.00 frames. ], batch size: 65, lr: 9.92e-05, grad_scale: 32.0 +2024-08-14 03:07:26,412 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 03:07:58,727 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 19, validation: loss=0.1464, simple_loss=0.169, pruned_loss=0.06188, over 1073944.00 frames. +2024-08-14 03:07:58,728 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 03:08:25,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=112352.0, ans=0.125 +2024-08-14 03:08:44,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=112405.33333333333, ans=0.125 +2024-08-14 03:09:56,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=112458.66666666667, ans=0.025 +2024-08-14 03:10:23,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112512.0, ans=0.1 +2024-08-14 03:10:38,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=112565.33333333333, ans=0.125 +2024-08-14 03:10:40,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=112565.33333333333, ans=0.0 +2024-08-14 03:10:40,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=112565.33333333333, ans=0.025 +2024-08-14 03:10:57,804 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 19, batch 50, loss[loss=0.1358, simple_loss=0.1555, pruned_loss=0.05803, over 19015.00 frames. ], tot_loss[loss=0.1517, simple_loss=0.1719, pruned_loss=0.06576, over 829365.51 frames. ], batch size: 102, lr: 9.92e-05, grad_scale: 32.0 +2024-08-14 03:11:17,601 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-14 03:11:55,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=112672.0, ans=0.5 +2024-08-14 03:12:07,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=112672.0, ans=0.125 +2024-08-14 03:12:10,278 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=7.22 vs. limit=10.0 +2024-08-14 03:12:19,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=112672.0, ans=0.1 +2024-08-14 03:12:40,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=112725.33333333333, ans=0.0 +2024-08-14 03:12:57,233 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.10 vs. limit=15.0 +2024-08-14 03:13:05,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=112778.66666666667, ans=0.0 +2024-08-14 03:13:37,512 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 1.593e+02 1.694e+02 1.909e+02 3.031e+02, threshold=3.389e+02, percent-clipped=0.0 +2024-08-14 03:13:43,254 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.96 vs. limit=15.0 +2024-08-14 03:14:02,408 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 19, batch 100, loss[loss=0.1263, simple_loss=0.149, pruned_loss=0.05176, over 19083.00 frames. ], tot_loss[loss=0.1515, simple_loss=0.1719, pruned_loss=0.06554, over 1476389.98 frames. ], batch size: 133, lr: 9.92e-05, grad_scale: 16.0 +2024-08-14 03:14:15,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=112885.33333333333, ans=0.125 +2024-08-14 03:14:15,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=112885.33333333333, ans=0.0 +2024-08-14 03:14:22,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=112885.33333333333, ans=0.0 +2024-08-14 03:15:07,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=112938.66666666667, ans=0.125 +2024-08-14 03:15:47,736 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.64 vs. limit=10.0 +2024-08-14 03:15:54,662 INFO [dysarthria_finetune.py:1435] (3/4) (13942587392, 34072559616) +2024-08-14 03:15:54,662 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 03:15:54,714 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 03:16:41,826 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 20, batch 0, loss[loss=0.1815, simple_loss=0.194, pruned_loss=0.0845, over 18436.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.194, pruned_loss=0.0845, over 18436.00 frames. ], batch size: 65, lr: 9.91e-05, grad_scale: 32.0 +2024-08-14 03:16:41,827 INFO [dysarthria_finetune.py:1165] (3/4) Computing validation loss on speech +2024-08-14 03:17:15,217 INFO [dysarthria_finetune.py:1174] (3/4) Validation on speech: Epoch 20, validation: loss=0.1449, simple_loss=0.1677, pruned_loss=0.0611, over 1073944.00 frames. +2024-08-14 03:17:15,218 INFO [dysarthria_finetune.py:1177] (3/4) Maximum memory allocated so far is 14287MB +2024-08-14 03:17:41,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=113040.0, ans=0.125 +2024-08-14 03:17:47,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=113093.33333333333, ans=0.2 +2024-08-14 03:19:14,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=113146.66666666667, ans=0.2 +2024-08-14 03:19:25,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113146.66666666667, ans=0.1 +2024-08-14 03:19:25,697 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.46 vs. limit=15.0 +2024-08-14 03:19:34,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=113200.0, ans=0.04949747468305833 +2024-08-14 03:19:40,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=113200.0, ans=0.125 +2024-08-14 03:20:55,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=113253.33333333333, ans=0.125 +2024-08-14 03:21:08,410 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 20, batch 50, loss[loss=0.1451, simple_loss=0.1708, pruned_loss=0.05966, over 18942.00 frames. ], tot_loss[loss=0.1497, simple_loss=0.1697, pruned_loss=0.06485, over 827999.75 frames. ], batch size: 102, lr: 9.91e-05, grad_scale: 32.0 +2024-08-14 03:22:09,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=113360.0, ans=0.0 +2024-08-14 03:22:10,067 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.419e+02 1.567e+02 1.664e+02 1.868e+02 2.522e+02, threshold=3.327e+02, percent-clipped=0.0 +2024-08-14 03:22:28,116 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.96 vs. limit=22.5 +2024-08-14 03:22:37,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=113360.0, ans=0.2 +2024-08-14 03:23:59,328 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.05 vs. limit=22.5 +2024-08-14 03:24:26,404 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.90 vs. limit=22.5 +2024-08-14 03:24:59,418 INFO [dysarthria_finetune.py:1141] (3/4) Epoch 20, batch 100, loss[loss=0.1083, simple_loss=0.1327, pruned_loss=0.04189, over 19171.00 frames. ], tot_loss[loss=0.1476, simple_loss=0.1684, pruned_loss=0.06338, over 1475487.52 frames. ], batch size: 133, lr: 9.91e-05, grad_scale: 32.0 +2024-08-14 03:25:22,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=113573.33333333333, ans=0.2 +2024-08-14 03:26:34,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=113626.66666666667, ans=0.2 +2024-08-14 03:27:20,901 INFO [dysarthria_finetune.py:1435] (3/4) (13946781696, 34072559616) +2024-08-14 03:27:20,902 INFO [dysarthria_finetune.py:1436] (3/4) Empty cache: before and after +2024-08-14 03:27:20,945 INFO [dysarthria_finetune.py:1440] (3/4) (29576855552, 34072559616) +2024-08-14 03:27:20,945 INFO [dysarthria_finetune.py:1442] (3/4) Done! diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1723532126.cdr2650.int.cedar.computecanada.ca.70.0 b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1723532126.cdr2650.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..43457d3d00ead825f53a1753c80f84d1d2d314ec --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1723532126.cdr2650.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b8540c7763e9059b44d17c8091a697d3bb2068106b265b9131478919f1388c52 +size 2713 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1723604632.cdr2649.int.cedar.computecanada.ca.70.0 b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1723604632.cdr2649.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..3eebb10fa2084a1def07128d3e0466011afe12a1 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1723604632.cdr2649.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7d8938ce5a041661ca003bd7730a36b90bdedc0ad588df390dc0acdf54c69b9c +size 1258 diff --git a/zipformer/finetuned/non_ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1723616688.cdr2649.int.cedar.computecanada.ca.70.0 b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1723616688.cdr2649.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..d64b436add8c713235bc3c82169b2991b8dfd374 --- /dev/null +++ b/zipformer/finetuned/non_ctc/non_causal/exp_finetune/tensorboard/events.out.tfevents.1723616688.cdr2649.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e1ee011530ded5f6a641c63bcbd842da7a170d5172b9882cdb0d62d986cb5409 +size 33318 diff --git a/zipformer/pretrained/ctc/causal/exp/best-train-loss.pt b/zipformer/pretrained/ctc/causal/exp/best-train-loss.pt new file mode 100644 index 0000000000000000000000000000000000000000..1545fe340294e2d827f41eeda5833e8ec6710e00 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/best-train-loss.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:67cc7bf27b506c8f8e8f15235f3e4bf6d77b4a02ece1b03df8369bcd03b531f0 +size 1062964046 diff --git a/zipformer/pretrained/ctc/causal/exp/best-valid-loss.pt b/zipformer/pretrained/ctc/causal/exp/best-valid-loss.pt new file mode 100644 index 0000000000000000000000000000000000000000..1545fe340294e2d827f41eeda5833e8ec6710e00 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/best-valid-loss.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:67cc7bf27b506c8f8e8f15235f3e4bf6d77b4a02ece1b03df8369bcd03b531f0 +size 1062964046 diff --git a/zipformer/pretrained/ctc/causal/exp/checkpoint-12000.pt b/zipformer/pretrained/ctc/causal/exp/checkpoint-12000.pt new file mode 100644 index 0000000000000000000000000000000000000000..5f9a4e196e4bdaca3fffdb4e63a52f3009b72c91 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/checkpoint-12000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:daa0d052a0cbd03f076d94a67ff518ce67a60fe5b353f9bb448558366639a3f2 +size 1062981526 diff --git a/zipformer/pretrained/ctc/causal/exp/checkpoint-16000.pt b/zipformer/pretrained/ctc/causal/exp/checkpoint-16000.pt new file mode 100644 index 0000000000000000000000000000000000000000..a797a812cd930aeb7dc34c400421d3aebead29ac --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/checkpoint-16000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:850993b35913b34ddf7c7523dbe3319aa05ebb22bc059821c75e7d51fac7dbdc +size 1062981654 diff --git a/zipformer/pretrained/ctc/causal/exp/checkpoint-20000.pt b/zipformer/pretrained/ctc/causal/exp/checkpoint-20000.pt new file mode 100644 index 0000000000000000000000000000000000000000..48d76a5610f935eacdf97f5cf82b4094f1ffc228 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/checkpoint-20000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ae35c57baa90792cf6df7e37311ae9511495642ea032a041027e5ed2c0f9164 +size 1062981718 diff --git a/zipformer/pretrained/ctc/causal/exp/checkpoint-24000.pt b/zipformer/pretrained/ctc/causal/exp/checkpoint-24000.pt new file mode 100644 index 0000000000000000000000000000000000000000..dcf2c47843c9108da1259363f29d2bbebe510201 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/checkpoint-24000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:13fe0b09f4259d5396b44719b08a92119f32f865bc3d1a5c67e15fe362d39451 +size 1062981782 diff --git a/zipformer/pretrained/ctc/causal/exp/checkpoint-28000.pt b/zipformer/pretrained/ctc/causal/exp/checkpoint-28000.pt new file mode 100644 index 0000000000000000000000000000000000000000..b91a88a6fc501a615b87ca4acbfddad6394cbe23 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/checkpoint-28000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:33ae62b50c47c15408bb9ec97328ae87b942d46771fd6233b76d118a4089abd3 +size 1062981910 diff --git a/zipformer/pretrained/ctc/causal/exp/checkpoint-32000.pt b/zipformer/pretrained/ctc/causal/exp/checkpoint-32000.pt new file mode 100644 index 0000000000000000000000000000000000000000..bbc546500321fea8e0138cf550a5d03f9c8659a7 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/checkpoint-32000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c67271df2d530a600d308014bd3b8006ef37462d3339eed7b75ec0c8a0b5c854 +size 1062981910 diff --git a/zipformer/pretrained/ctc/causal/exp/checkpoint-36000.pt b/zipformer/pretrained/ctc/causal/exp/checkpoint-36000.pt new file mode 100644 index 0000000000000000000000000000000000000000..993a09180bab900808eae4f7ece2cabc9701879b --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/checkpoint-36000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4c8a8716468f5078a6ed84b3396fea73cedcfc1674a831062e177b4f3b7b9879 +size 1062981462 diff --git a/zipformer/pretrained/ctc/causal/exp/checkpoint-4000.pt b/zipformer/pretrained/ctc/causal/exp/checkpoint-4000.pt new file mode 100644 index 0000000000000000000000000000000000000000..bfe31e79de51ad830c9c6bfe25e897c7114e494e --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/checkpoint-4000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b7282c284c0fe3f0de14775219ee0557fe4a23bddd8f1d4f614df9092fc7acf6 +size 1062979229 diff --git a/zipformer/pretrained/ctc/causal/exp/checkpoint-40000.pt b/zipformer/pretrained/ctc/causal/exp/checkpoint-40000.pt new file mode 100644 index 0000000000000000000000000000000000000000..2949aadce4aaf0e2d2d0d7ddb0ab2936b5d5d647 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/checkpoint-40000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e28573be2b8cd387ad6c4e0555fa5cef97461dd1fee8cca3974c0f8f34371d63 +size 1062981398 diff --git a/zipformer/pretrained/ctc/causal/exp/checkpoint-44000.pt b/zipformer/pretrained/ctc/causal/exp/checkpoint-44000.pt new file mode 100644 index 0000000000000000000000000000000000000000..b8760cfd7c008cbebe3fdbbd3de41e0c01f925ac --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/checkpoint-44000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a56f79a7ad4bbf46eb01c8295d8bee29961a993fb487530f9ba1372d12deb8f8 +size 1062981398 diff --git a/zipformer/pretrained/ctc/causal/exp/checkpoint-48000.pt b/zipformer/pretrained/ctc/causal/exp/checkpoint-48000.pt new file mode 100644 index 0000000000000000000000000000000000000000..6c3b135bd69c443d551ff6b99340d562e19fdd5f --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/checkpoint-48000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c06f614ef6af29291a42c0889e9adaab77a601439c27a0e431b85ca024b6721f +size 1062981526 diff --git a/zipformer/pretrained/ctc/causal/exp/checkpoint-8000.pt b/zipformer/pretrained/ctc/causal/exp/checkpoint-8000.pt new file mode 100644 index 0000000000000000000000000000000000000000..834500fd5b4ba090aed05836db55795102320148 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/checkpoint-8000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4934bf4f850b2d9d8b54af562b3b4d6a6bf30559e9b3b5c016576e441d6fc358 +size 1062979293 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-1.pt b/zipformer/pretrained/ctc/causal/exp/epoch-1.pt new file mode 100644 index 0000000000000000000000000000000000000000..d29617597252f8f4aae7f77df2cf07c6098c624b --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-1.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a3b5d7655b8e150e919e2dfe4f25dd230f20af2d9fc5d0e8f2fe8477eb0958bf +size 1062961173 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-10.pt b/zipformer/pretrained/ctc/causal/exp/epoch-10.pt new file mode 100644 index 0000000000000000000000000000000000000000..490a56adb3b797fa4d105fe631277c0fe64ad1eb --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-10.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ce210f71dde53db846470f4438ca8981a5bb5862504e9bb621c2abefe16b801f +size 1062964302 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-11.pt b/zipformer/pretrained/ctc/causal/exp/epoch-11.pt new file mode 100644 index 0000000000000000000000000000000000000000..c4daec9011a40a641c632902409e69cc47b99ad9 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-11.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4207336a54cf4fd8ebf28b906b2a4bf71a29cde577c8447778a1a6f1c2dff40a +size 1062964366 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-12.pt b/zipformer/pretrained/ctc/causal/exp/epoch-12.pt new file mode 100644 index 0000000000000000000000000000000000000000..be448be1335da4d0deca266f3b5de975fea323d6 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-12.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1c7ba168264ea2e6ba9eed0e04ff7045f236834fce410d3588c049abca887fdf +size 1062964366 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-13.pt b/zipformer/pretrained/ctc/causal/exp/epoch-13.pt new file mode 100644 index 0000000000000000000000000000000000000000..dfa93c3f60990e8915b08e6d35b99516b89562b2 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-13.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:caf32501297129597767ab5656d3a9bba610baccbfcf42c208315f8a87d20546 +size 1062964430 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-14.pt b/zipformer/pretrained/ctc/causal/exp/epoch-14.pt new file mode 100644 index 0000000000000000000000000000000000000000..d7b3e01e33e6cbda330833837ae21857fb912275 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-14.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4741ab625e55a8bf10b267dbfceaea1aa44b8c5a6b590c6afa2a8802117839d8 +size 1062963918 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-15.pt b/zipformer/pretrained/ctc/causal/exp/epoch-15.pt new file mode 100644 index 0000000000000000000000000000000000000000..c3398606ad29edf5bd2a0a8f06dfdca842ad07b1 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-15.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ab16e338dee355d8583067604d9cf505abee05c784db0b638e3d4708b8433efe +size 1062963982 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-16.pt b/zipformer/pretrained/ctc/causal/exp/epoch-16.pt new file mode 100644 index 0000000000000000000000000000000000000000..e3823c164b6c6b8408fe3a2f9f14112eeeebc063 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-16.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7487753fada1b15a96bbe85ef051ae77db988bb9d2c721c1ac5c52d39a982c1f +size 1062964046 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-17.pt b/zipformer/pretrained/ctc/causal/exp/epoch-17.pt new file mode 100644 index 0000000000000000000000000000000000000000..4ec1cec180dde3fd0154d7c96c9a0eb09e432333 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-17.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4190acd166d549f6ff45f92c5ec0e468d3453181793030d13995afb4651676f0 +size 1062963918 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-18.pt b/zipformer/pretrained/ctc/causal/exp/epoch-18.pt new file mode 100644 index 0000000000000000000000000000000000000000..45632e2dab0b42efd7b73cc5584e303f4ac64534 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-18.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:53bcafdb1bb7b03e7803cbb9e94bcfa9a5ccc028e9a1211ad7d2387e6473c6b3 +size 1062963918 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-19.pt b/zipformer/pretrained/ctc/causal/exp/epoch-19.pt new file mode 100644 index 0000000000000000000000000000000000000000..5bac506833a5482c9d53c3de3692d24d4c6c9b37 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-19.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3965981c384b21858edd0ba79fa963b830634de4e5609e53d15d9491dbe73e8c +size 1062963982 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-2.pt b/zipformer/pretrained/ctc/causal/exp/epoch-2.pt new file mode 100644 index 0000000000000000000000000000000000000000..8c0d988c1e28e933ed0e5b9828a7af0a06cf9028 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-2.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:140dd0e719c2564e63254777fcb5a5e809b45c524761d99e848939b79d2a85fa +size 1062961237 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-20.pt b/zipformer/pretrained/ctc/causal/exp/epoch-20.pt new file mode 100644 index 0000000000000000000000000000000000000000..1545fe340294e2d827f41eeda5833e8ec6710e00 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-20.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:67cc7bf27b506c8f8e8f15235f3e4bf6d77b4a02ece1b03df8369bcd03b531f0 +size 1062964046 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-3.pt b/zipformer/pretrained/ctc/causal/exp/epoch-3.pt new file mode 100644 index 0000000000000000000000000000000000000000..fdae6ee05fd6506da8b35fba443adde7d72d21e4 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-3.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2125d40afd9da39b03fee242d410af37e4b02cb26db2ad74002a7b90099f9b98 +size 1062961301 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-4.pt b/zipformer/pretrained/ctc/causal/exp/epoch-4.pt new file mode 100644 index 0000000000000000000000000000000000000000..18699c2e1dae1e7b392606c51ea911d23c423b85 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-4.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a74e0311bf6e996d56f4c035be74c301f4294d99e7d7293b10f8f256ee1c34d0 +size 1062961301 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-5.pt b/zipformer/pretrained/ctc/causal/exp/epoch-5.pt new file mode 100644 index 0000000000000000000000000000000000000000..3c895c5c743c519c2e756c1f5599bd2953121853 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-5.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aefc7d7f2f07711438c8e45938564ed4a6765512ba0c5f038d7d96aaad4e77cd +size 1062961365 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-6.pt b/zipformer/pretrained/ctc/causal/exp/epoch-6.pt new file mode 100644 index 0000000000000000000000000000000000000000..799b4c8a99f73b714a9f4dfdbe350f6ee9bfcec4 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-6.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d585e81ee4b02dd85dade0635a90c1c9557d8333d613bb4fa5de1df439340edf +size 1062961429 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-7.pt b/zipformer/pretrained/ctc/causal/exp/epoch-7.pt new file mode 100644 index 0000000000000000000000000000000000000000..1c05b9dfdd85984843e1e93f0fe883cb9503009e --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-7.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:00f567efe03a64bbb2b4e6aea83d0e843eda4b82e6dc9368d055eb5c51fc8eff +size 1062961493 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-8.pt b/zipformer/pretrained/ctc/causal/exp/epoch-8.pt new file mode 100644 index 0000000000000000000000000000000000000000..18acc172465cacb94486351b260be7ef695cc492 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-8.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ea516f04be5ccabe53945e12247a30feeb8400a27b613386a6ceed80d1ae6a16 +size 1062961493 diff --git a/zipformer/pretrained/ctc/causal/exp/epoch-9.pt b/zipformer/pretrained/ctc/causal/exp/epoch-9.pt new file mode 100644 index 0000000000000000000000000000000000000000..0e836f01ae06eac50707e1aa9d98d7ceab9aec82 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/epoch-9.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:80c9c071f902eeec395bbf6364e395e6f2d4ee030c7b6e7bc4755e084fab3c7c +size 1062961557 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-25-02-23-27-0 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-25-02-23-27-0 new file mode 100644 index 0000000000000000000000000000000000000000..5fb13f1f285c78f91e4493dc2619180f75751555 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-25-02-23-27-0 @@ -0,0 +1,4886 @@ +2024-08-25 02:23:27,399 INFO [train.py:1182] (0/4) Training started +2024-08-25 02:23:46,264 INFO [train.py:1192] (0/4) Device: cuda:0 +2024-08-25 02:23:46,266 INFO [train.py:1210] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2654.int.cedar.computecanada.ca', 'IP address': '172.16.146.91'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-25 02:23:46,266 INFO [train.py:1212] (0/4) About to create model +2024-08-25 02:23:46,944 INFO [train.py:1216] (0/4) Number of model parameters: 66367431 +2024-08-25 02:23:47,739 INFO [train.py:1231] (0/4) Using DDP +2024-08-25 02:23:51,127 INFO [asr_datamodule.py:894] (0/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:696] (0/4) Disable MUSAN +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:714] (0/4) Enable SpecAugment +2024-08-25 02:23:51,498 INFO [asr_datamodule.py:715] (0/4) Time warp factor: 80 +2024-08-25 02:23:51,498 INFO [asr_datamodule.py:725] (0/4) Num frame mask: 10 +2024-08-25 02:23:51,498 INFO [asr_datamodule.py:738] (0/4) About to create train dataset +2024-08-25 02:23:51,498 INFO [asr_datamodule.py:765] (0/4) Using DynamicBucketingSampler. +2024-08-25 02:23:53,043 INFO [asr_datamodule.py:782] (0/4) About to create train dataloader +2024-08-25 02:23:53,051 INFO [asr_datamodule.py:911] (0/4) About to get dev-clean cuts +2024-08-25 02:23:53,293 INFO [asr_datamodule.py:918] (0/4) About to get dev-other cuts +2024-08-25 02:23:53,346 INFO [asr_datamodule.py:814] (0/4) About to create dev dataset +2024-08-25 02:23:53,656 INFO [asr_datamodule.py:831] (0/4) About to create dev dataloader +2024-08-25 02:23:53,656 INFO [train.py:1435] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-25 02:27:50,706 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12287MB +2024-08-25 02:27:52,173 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12375MB +2024-08-25 02:28:01,911 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12375MB +2024-08-25 02:28:03,369 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12375MB +2024-08-25 02:28:25,874 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=256, metric=42.50 vs. limit=7.5 +2024-08-25 02:28:26,151 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12375MB +2024-08-25 02:28:27,775 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12611MB +2024-08-25 02:29:16,119 INFO [train.py:1114] (0/4) Epoch 1, batch 0, loss[loss=8.844, simple_loss=7.212, pruned_loss=6.79, ctc_loss=4.757, over 19814.00 frames. ], tot_loss[loss=8.844, simple_loss=7.212, pruned_loss=6.79, ctc_loss=4.757, over 19814.00 frames. ], batch size: 49, lr: 2.25e-02, grad_scale: 1.0 +2024-08-25 02:29:16,120 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-25 02:29:29,442 INFO [train.py:1146] (0/4) Epoch 1, validation: loss=8.973, simple_loss=7.311, pruned_loss=6.819, ctc_loss=4.895, over 944034.00 frames. +2024-08-25 02:29:29,443 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 12611MB +2024-08-25 02:29:31,175 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.82 vs. limit=7.5 +2024-08-25 02:29:40,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=0.0, ans=7.5 +2024-08-25 02:30:23,438 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.714e+03 3.750e+03 4.817e+03 5.615e+03 6.551e+03, threshold=1.927e+04, percent-clipped=0.0 +2024-08-25 02:30:45,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=53.333333333333336, ans=0.4975 +2024-08-25 02:32:20,508 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=105.09 vs. limit=7.54 +2024-08-25 02:32:26,052 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.867e+02 1.019e+03 3.714e+03 5.063e+03 6.846e+03, threshold=1.486e+04, percent-clipped=0.0 +2024-08-25 02:32:40,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106.66666666666667, ans=0.29893333333333333 +2024-08-25 02:32:40,594 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=238.44 vs. limit=7.54 +2024-08-25 02:33:18,707 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=297.01 vs. limit=7.56 +2024-08-25 02:33:29,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=213.33333333333334, ans=0.04933333333333333 +2024-08-25 02:33:33,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=213.33333333333334, ans=0.192 +2024-08-25 02:33:34,370 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.10 vs. limit=7.66 +2024-08-25 02:33:36,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=237.78 vs. limit=7.66 +2024-08-25 02:33:36,811 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+02 7.649e+02 1.076e+03 3.731e+03 6.846e+03, threshold=4.304e+03, percent-clipped=0.0 +2024-08-25 02:33:47,866 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=17.69 vs. limit=4.085333333333334 +2024-08-25 02:33:48,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=213.33333333333334, ans=7.58 +2024-08-25 02:34:02,351 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=38.61 vs. limit=7.6 +2024-08-25 02:34:04,703 INFO [train.py:1114] (0/4) Epoch 1, batch 50, loss[loss=1.365, simple_loss=1.015, pruned_loss=1.182, ctc_loss=1.089, over 19697.00 frames. ], tot_loss[loss=3.548, simple_loss=2.93, pruned_loss=2.55, ctc_loss=1.778, over 845725.26 frames. ], batch size: 47, lr: 2.48e-02, grad_scale: 0.25 +2024-08-25 02:34:09,888 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=83.60 vs. limit=5.133333333333334 +2024-08-25 02:34:32,595 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.82 vs. limit=3.048 +2024-08-25 02:34:32,665 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=35.84 vs. limit=7.62 +2024-08-25 02:34:35,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=320.0, ans=7.74 +2024-08-25 02:35:14,687 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=20.72 vs. limit=5.093333333333334 +2024-08-25 02:35:25,159 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=26.31 vs. limit=5.093333333333334 +2024-08-25 02:35:28,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=426.6666666666667, ans=0.226 +2024-08-25 02:35:28,805 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=50.79 vs. limit=7.66 +2024-08-25 02:37:25,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=480.0, ans=7.68 +2024-08-25 02:37:42,232 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=41.10 vs. limit=7.86 +2024-08-25 02:37:48,165 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=130.03 vs. limit=7.68 +2024-08-25 02:37:49,346 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=40.94 vs. limit=7.68 +2024-08-25 02:37:51,541 INFO [train.py:1114] (0/4) Epoch 1, batch 100, loss[loss=1.353, simple_loss=0.9669, pruned_loss=1.236, ctc_loss=1.153, over 19718.00 frames. ], tot_loss[loss=2.407, simple_loss=1.911, pruned_loss=1.859, ctc_loss=1.468, over 1499439.12 frames. ], batch size: 51, lr: 2.70e-02, grad_scale: 0.5 +2024-08-25 02:37:55,737 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 7.639e+01 1.517e+02 3.832e+02 1.019e+03 9.054e+03, threshold=7.665e+02, percent-clipped=2.0 +2024-08-25 02:38:01,167 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=22.74 vs. limit=4.213333333333333 +2024-08-25 02:38:02,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=533.3333333333334, ans=0.43333333333333335 +2024-08-25 02:38:05,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=533.3333333333334, ans=5.333333333333333 +2024-08-25 02:38:07,352 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=193.72 vs. limit=5.293333333333333 +2024-08-25 02:38:10,874 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=14.22 vs. limit=4.234666666666667 +2024-08-25 02:38:11,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=586.6666666666666, ans=7.72 +2024-08-25 02:38:19,489 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=328.05 vs. limit=7.72 +2024-08-25 02:38:25,751 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=20.39 vs. limit=5.1466666666666665 +2024-08-25 02:38:35,059 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=22.09 vs. limit=7.72 +2024-08-25 02:38:39,219 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=44.24 vs. limit=7.74 +2024-08-25 02:38:40,439 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=37.69 vs. limit=7.74 +2024-08-25 02:38:47,923 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=40.95 vs. limit=7.98 +2024-08-25 02:38:55,783 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=160.72 vs. limit=5.346666666666667 +2024-08-25 02:39:07,282 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=105.56 vs. limit=7.76 +2024-08-25 02:39:09,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=746.6666666666666, ans=0.46499999999999997 +2024-08-25 02:39:13,784 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.33 vs. limit=3.112 +2024-08-25 02:39:14,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=746.6666666666666, ans=0.24253333333333332 +2024-08-25 02:39:15,218 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=63.04 vs. limit=8.06 +2024-08-25 02:39:16,495 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=191.76 vs. limit=7.78 +2024-08-25 02:39:22,873 INFO [train.py:1114] (0/4) Epoch 1, batch 150, loss[loss=1.132, simple_loss=0.783, pruned_loss=0.9897, ctc_loss=1.062, over 19717.00 frames. ], tot_loss[loss=1.939, simple_loss=1.489, pruned_loss=1.561, ctc_loss=1.343, over 2027737.98 frames. ], batch size: 47, lr: 2.93e-02, grad_scale: 0.5 +2024-08-25 02:39:26,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=800.0, ans=0.872 +2024-08-25 02:39:26,319 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=30.59 vs. limit=8.1 +2024-08-25 02:39:30,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=800.0, ans=0.872 +2024-08-25 02:39:41,718 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=25.35 vs. limit=8.14 +2024-08-25 02:39:56,925 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=91.94 vs. limit=7.84 +2024-08-25 02:40:15,218 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=51.79 vs. limit=7.86 +2024-08-25 02:40:20,617 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=13.95 vs. limit=5.253333333333333 +2024-08-25 02:40:21,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=1013.3333333333334, ans=0.5 +2024-08-25 02:40:32,731 INFO [train.py:1114] (0/4) Epoch 1, batch 200, loss[loss=1.257, simple_loss=0.8673, pruned_loss=1.006, ctc_loss=1.207, over 18215.00 frames. ], tot_loss[loss=1.687, simple_loss=1.262, pruned_loss=1.373, ctc_loss=1.278, over 2435361.75 frames. ], batch size: 85, lr: 3.15e-02, grad_scale: 1.0 +2024-08-25 02:40:34,775 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=13.03 vs. limit=4.426666666666667 +2024-08-25 02:40:35,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.whiten.whitening_limit, batch_count=1066.6666666666667, ans=4.426666666666667 +2024-08-25 02:40:36,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=1066.6666666666667, ans=5.533333333333333 +2024-08-25 02:40:36,940 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.587e+01 1.185e+02 1.545e+02 1.999e+02 4.229e+02, threshold=3.089e+02, percent-clipped=0.0 +2024-08-25 02:41:02,915 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=21.53 vs. limit=4.426666666666667 +2024-08-25 02:41:08,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=1120.0, ans=0.8608 +2024-08-25 02:41:17,214 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.37 vs. limit=8.34 +2024-08-25 02:41:18,497 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=9.91 vs. limit=4.448 +2024-08-25 02:41:18,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=117.54 vs. limit=5.5600000000000005 +2024-08-25 02:41:24,174 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.09 vs. limit=8.38 +2024-08-25 02:41:50,714 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=155.74 vs. limit=7.96 +2024-08-25 02:42:01,541 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.83 vs. limit=8.46 +2024-08-25 02:42:11,858 INFO [train.py:1114] (0/4) Epoch 1, batch 250, loss[loss=1.253, simple_loss=0.8473, pruned_loss=0.9942, ctc_loss=1.236, over 19443.00 frames. ], tot_loss[loss=1.535, simple_loss=1.123, pruned_loss=1.249, ctc_loss=1.244, over 2755446.34 frames. ], batch size: 67, lr: 3.38e-02, grad_scale: 1.0 +2024-08-25 02:42:20,284 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=101.13 vs. limit=8.0 +2024-08-25 02:42:22,920 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=46.16 vs. limit=8.0 +2024-08-25 02:42:24,356 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.24 vs. limit=8.5 +2024-08-25 02:42:43,615 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=7.29 vs. limit=5.36 +2024-08-25 02:42:44,773 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=44.28 vs. limit=8.04 +2024-08-25 02:42:45,029 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.12 vs. limit=3.216 +2024-08-25 02:42:54,445 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.82 vs. limit=8.58 +2024-08-25 02:42:55,736 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=225.64 vs. limit=8.06 +2024-08-25 02:42:57,008 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=87.74 vs. limit=8.06 +2024-08-25 02:42:58,737 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=29.18 vs. limit=8.06 +2024-08-25 02:43:02,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=116.38 vs. limit=5.746666666666666 +2024-08-25 02:43:18,838 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=60.41 vs. limit=8.08 +2024-08-25 02:43:22,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=1600.0, ans=0.425 +2024-08-25 02:43:23,909 INFO [train.py:1114] (0/4) Epoch 1, batch 300, loss[loss=1.223, simple_loss=0.8178, pruned_loss=0.9622, ctc_loss=1.204, over 19507.00 frames. ], tot_loss[loss=1.435, simple_loss=1.029, pruned_loss=1.159, ctc_loss=1.221, over 2998983.33 frames. ], batch size: 61, lr: 3.60e-02, grad_scale: 2.0 +2024-08-25 02:43:27,980 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.516e+01 1.281e+02 1.784e+02 2.457e+02 1.092e+03, threshold=3.568e+02, percent-clipped=12.0 +2024-08-25 02:43:36,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=1653.3333333333333, ans=0.4225 +2024-08-25 02:43:38,433 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=90.18 vs. limit=8.12 +2024-08-25 02:43:43,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=1653.3333333333333, ans=0.4225 +2024-08-25 02:43:46,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=1653.3333333333333, ans=0.157 +2024-08-25 02:43:58,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=1706.6666666666667, ans=0.42 +2024-08-25 02:44:12,446 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.44 vs. limit=8.82 +2024-08-25 02:44:13,943 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=9.85 vs. limit=4.704 +2024-08-25 02:44:17,994 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=216.45 vs. limit=8.16 +2024-08-25 02:44:24,737 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.71 vs. limit=5.453333333333333 +2024-08-25 02:44:24,824 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=107.41 vs. limit=8.18 +2024-08-25 02:44:31,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=1813.3333333333333, ans=0.8365333333333334 +2024-08-25 02:44:37,982 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=12.11 vs. limit=4.746666666666667 +2024-08-25 02:44:38,850 INFO [train.py:1114] (0/4) Epoch 1, batch 350, loss[loss=1.085, simple_loss=0.7166, pruned_loss=0.8414, ctc_loss=1.073, over 19768.00 frames. ], tot_loss[loss=1.367, simple_loss=0.9644, pruned_loss=1.095, ctc_loss=1.206, over 3189785.81 frames. ], batch size: 48, lr: 3.83e-02, grad_scale: 2.0 +2024-08-25 02:44:40,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=1866.6666666666667, ans=0.14500000000000002 +2024-08-25 02:44:43,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=1866.6666666666667, ans=0.7686666666666666 +2024-08-25 02:44:45,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=1866.6666666666667, ans=0.13 +2024-08-25 02:44:48,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=1866.6666666666667, ans=0.5 +2024-08-25 02:44:55,586 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.44 vs. limit=8.94 +2024-08-25 02:44:58,237 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.25 vs. limit=5.96 +2024-08-25 02:45:03,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=1920.0, ans=6.2 +2024-08-25 02:45:23,917 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=74.49 vs. limit=8.26 +2024-08-25 02:45:25,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=2026.6666666666667, ans=0.405 +2024-08-25 02:45:25,618 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=16.06 vs. limit=8.26 +2024-08-25 02:45:26,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=2026.6666666666667, ans=0.08733333333333333 +2024-08-25 02:45:26,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=2026.6666666666667, ans=0.0544 +2024-08-25 02:45:31,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=2026.6666666666667, ans=0.5 +2024-08-25 02:46:57,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=18.94 vs. limit=8.28 +2024-08-25 02:47:09,119 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.61 vs. limit=3.32 +2024-08-25 02:47:09,644 INFO [train.py:1114] (0/4) Epoch 1, batch 400, loss[loss=1.209, simple_loss=0.7953, pruned_loss=0.9198, ctc_loss=1.177, over 19500.00 frames. ], tot_loss[loss=1.318, simple_loss=0.9159, pruned_loss=1.046, ctc_loss=1.192, over 3341471.66 frames. ], batch size: 54, lr: 4.05e-02, grad_scale: 4.0 +2024-08-25 02:47:13,858 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.873e+01 1.501e+02 1.913e+02 2.464e+02 6.763e+02, threshold=3.826e+02, percent-clipped=7.0 +2024-08-25 02:47:21,438 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=106.99 vs. limit=8.3 +2024-08-25 02:47:28,299 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=52.39 vs. limit=8.32 +2024-08-25 02:47:32,709 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.23 vs. limit=9.14 +2024-08-25 02:47:32,817 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.55 vs. limit=9.14 +2024-08-25 02:47:36,756 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=70.76 vs. limit=8.32 +2024-08-25 02:47:38,197 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.68 vs. limit=9.14 +2024-08-25 02:47:42,087 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=36.92 vs. limit=8.34 +2024-08-25 02:47:42,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=2240.0, ans=0.395 +2024-08-25 02:47:42,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=2240.0, ans=0.2336 +2024-08-25 02:47:47,358 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=65.20 vs. limit=8.34 +2024-08-25 02:47:56,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=2293.3333333333335, ans=0.8197333333333333 +2024-08-25 02:47:58,218 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=20.07 vs. limit=8.36 +2024-08-25 02:48:03,887 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=11.71 vs. limit=8.36 +2024-08-25 02:48:04,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=2293.3333333333335, ans=0.11399999999999999 +2024-08-25 02:48:05,193 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.79 vs. limit=9.22 +2024-08-25 02:48:13,554 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=52.23 vs. limit=8.38 +2024-08-25 02:48:13,563 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=198.25 vs. limit=8.38 +2024-08-25 02:48:14,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2346.6666666666665, ans=0.2765333333333333 +2024-08-25 02:48:21,729 INFO [train.py:1114] (0/4) Epoch 1, batch 450, loss[loss=1.191, simple_loss=0.7788, pruned_loss=0.8944, ctc_loss=1.146, over 19617.00 frames. ], tot_loss[loss=1.284, simple_loss=0.8804, pruned_loss=1.007, ctc_loss=1.18, over 3449265.00 frames. ], batch size: 55, lr: 4.28e-02, grad_scale: 4.0 +2024-08-25 02:48:24,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=2400.0, ans=0.774 +2024-08-25 02:48:24,760 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.64 vs. limit=9.3 +2024-08-25 02:48:25,030 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.16 vs. limit=8.4 +2024-08-25 02:48:38,460 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=15.57 vs. limit=8.42 +2024-08-25 02:48:43,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=2453.3333333333335, ans=0.108 +2024-08-25 02:48:44,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=2453.3333333333335, ans=0.8141333333333334 +2024-08-25 02:48:47,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=2506.6666666666665, ans=0.3825 +2024-08-25 02:48:49,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=39.32 vs. limit=8.44 +2024-08-25 02:48:53,136 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=27.10 vs. limit=8.44 +2024-08-25 02:48:53,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=2506.6666666666665, ans=0.3825 +2024-08-25 02:48:54,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=2506.6666666666665, ans=0.3825 +2024-08-25 02:49:03,817 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.97 vs. limit=5.64 +2024-08-25 02:49:11,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=2560.0, ans=0.38 +2024-08-25 02:49:13,400 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=18.19 vs. limit=8.46 +2024-08-25 02:49:18,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=13.64 vs. limit=8.48 +2024-08-25 02:49:18,893 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.69 vs. limit=9.46 +2024-08-25 02:49:20,074 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.40 vs. limit=5.653333333333333 +2024-08-25 02:49:21,250 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=33.19 vs. limit=8.48 +2024-08-25 02:49:28,647 INFO [train.py:1114] (0/4) Epoch 1, batch 500, loss[loss=1.248, simple_loss=0.8246, pruned_loss=0.8885, ctc_loss=1.19, over 19666.00 frames. ], tot_loss[loss=1.252, simple_loss=0.8503, pruned_loss=0.9674, ctc_loss=1.162, over 3545091.46 frames. ], batch size: 63, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:49:30,855 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=11.90 vs. limit=8.5 +2024-08-25 02:49:31,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=2666.6666666666665, ans=0.375 +2024-08-25 02:49:32,580 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.093e+02 1.834e+02 2.411e+02 2.968e+02 6.409e+02, threshold=4.822e+02, percent-clipped=7.0 +2024-08-25 02:49:35,948 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.02 vs. limit=9.5 +2024-08-25 02:49:37,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=2666.6666666666665, ans=0.16666666666666669 +2024-08-25 02:49:37,352 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.55 vs. limit=9.5 +2024-08-25 02:49:41,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=2720.0, ans=0.7772 +2024-08-25 02:49:48,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.whiten.whitening_limit, batch_count=2720.0, ans=5.088 +2024-08-25 02:49:53,911 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.02 vs. limit=8.52 +2024-08-25 02:49:59,227 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.18 vs. limit=9.58 +2024-08-25 02:50:00,695 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=10.25 vs. limit=8.54 +2024-08-25 02:50:04,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.08 vs. limit=9.58 +2024-08-25 02:50:06,159 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.62 vs. limit=8.54 +2024-08-25 02:50:18,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=2826.6666666666665, ans=0.7782666666666667 +2024-08-25 02:50:22,386 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=20.19 vs. limit=8.56 +2024-08-25 02:50:24,114 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.65 vs. limit=5.706666666666667 +2024-08-25 02:50:24,265 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=18.45 vs. limit=8.56 +2024-08-25 02:50:31,759 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.76 vs. limit=8.58 +2024-08-25 02:50:33,192 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=11.69 vs. limit=8.58 +2024-08-25 02:50:34,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=2880.0, ans=0.365 +2024-08-25 02:50:39,247 INFO [train.py:1114] (0/4) Epoch 1, batch 550, loss[loss=1.18, simple_loss=0.8011, pruned_loss=0.7633, ctc_loss=1.121, over 19288.00 frames. ], tot_loss[loss=1.22, simple_loss=0.8261, pruned_loss=0.9177, ctc_loss=1.138, over 3607779.82 frames. ], batch size: 71, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:50:45,480 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=23.75 vs. limit=8.6 +2024-08-25 02:50:47,252 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=18.43 vs. limit=8.6 +2024-08-25 02:50:47,656 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=17.64 vs. limit=8.6 +2024-08-25 02:50:54,620 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.39 vs. limit=5.733333333333333 +2024-08-25 02:51:00,658 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=21.75 vs. limit=8.620000000000001 +2024-08-25 02:51:02,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=2986.6666666666665, ans=0.7954666666666667 +2024-08-25 02:51:04,701 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.13 vs. limit=9.74 +2024-08-25 02:51:05,980 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.12 vs. limit=9.74 +2024-08-25 02:51:07,619 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.58 vs. limit=9.74 +2024-08-25 02:51:15,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=3040.0, ans=0.17188 +2024-08-25 02:51:15,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3040.0, ans=0.2696 +2024-08-25 02:51:20,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=3040.0, ans=0.086 +2024-08-25 02:51:23,669 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.00 vs. limit=8.64 +2024-08-25 02:51:25,166 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.35 vs. limit=8.66 +2024-08-25 02:51:31,466 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.65 vs. limit=9.82 +2024-08-25 02:51:31,731 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.68 vs. limit=5.773333333333333 +2024-08-25 02:51:35,077 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=2.100e+02 +2024-08-25 02:51:41,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=3146.6666666666665, ans=0.35250000000000004 +2024-08-25 02:51:44,873 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.56 vs. limit=8.68 +2024-08-25 02:51:48,675 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=10.51 vs. limit=9.86 +2024-08-25 02:51:54,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=3200.0, ans=0.35 +2024-08-25 02:51:55,125 INFO [train.py:1114] (0/4) Epoch 1, batch 600, loss[loss=0.9636, simple_loss=0.6644, pruned_loss=0.5763, ctc_loss=0.928, over 19369.00 frames. ], tot_loss[loss=1.173, simple_loss=0.7961, pruned_loss=0.8503, ctc_loss=1.1, over 3664209.30 frames. ], batch size: 67, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:51:59,177 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 2.677e+02 3.553e+02 4.456e+02 9.241e+02, threshold=7.106e+02, percent-clipped=18.0 +2024-08-25 02:52:09,519 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=10.25 vs. limit=5.8133333333333335 +2024-08-25 02:52:33,402 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.83 vs. limit=9.98 +2024-08-25 02:52:51,119 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.78 vs. limit=8.78 +2024-08-25 02:53:01,034 INFO [train.py:1114] (0/4) Epoch 1, batch 650, loss[loss=0.9319, simple_loss=0.6536, pruned_loss=0.5221, ctc_loss=0.8974, over 19771.00 frames. ], tot_loss[loss=1.111, simple_loss=0.758, pruned_loss=0.7741, ctc_loss=1.047, over 3714614.08 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 4.0 +2024-08-25 02:53:04,356 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=10.24 vs. limit=10.1 +2024-08-25 02:53:14,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=3520.0, ans=0.0208 +2024-08-25 02:53:19,900 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.80 vs. limit=5.88 +2024-08-25 02:53:24,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_na.min_abs, batch_count=3520.0, ans=0.01808 +2024-08-25 02:53:49,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3626.6666666666665, ans=0.32999999999999996 +2024-08-25 02:53:54,115 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=10.83 vs. limit=10.26 +2024-08-25 02:53:57,087 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.63 vs. limit=8.879999999999999 +2024-08-25 02:53:57,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=3680.0, ans=0.017199999999999993 +2024-08-25 02:53:59,458 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.45 vs. limit=8.879999999999999 +2024-08-25 02:54:00,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=3680.0, ans=0.017199999999999993 +2024-08-25 02:54:00,642 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.68 vs. limit=8.879999999999999 +2024-08-25 02:54:09,008 INFO [train.py:1114] (0/4) Epoch 1, batch 700, loss[loss=0.7998, simple_loss=0.5628, pruned_loss=0.4396, ctc_loss=0.7653, over 19712.00 frames. ], tot_loss[loss=1.05, simple_loss=0.7216, pruned_loss=0.7016, ctc_loss=0.9919, over 3746967.74 frames. ], batch size: 51, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:54:12,434 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.63 vs. limit=8.9 +2024-08-25 02:54:14,213 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.736e+02 2.975e+02 3.878e+02 5.385e+02 1.936e+03, threshold=7.756e+02, percent-clipped=10.0 +2024-08-25 02:54:27,031 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.31 vs. limit=8.92 +2024-08-25 02:54:30,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=3786.6666666666665, ans=0.3225 +2024-08-25 02:54:36,605 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.28 vs. limit=5.96 +2024-08-25 02:54:42,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=3840.0, ans=5.96 +2024-08-25 02:54:44,278 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.29 vs. limit=8.94 +2024-08-25 02:55:00,353 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.64 vs. limit=10.42 +2024-08-25 02:55:01,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.21 vs. limit=5.973333333333334 +2024-08-25 02:55:02,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=3893.3333333333335, ans=0.3175 +2024-08-25 02:55:12,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3946.6666666666665, ans=0.26053333333333334 +2024-08-25 02:55:15,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=4000.0, ans=0.3125 +2024-08-25 02:55:16,802 INFO [train.py:1114] (0/4) Epoch 1, batch 750, loss[loss=0.8004, simple_loss=0.5798, pruned_loss=0.4093, ctc_loss=0.7508, over 19497.00 frames. ], tot_loss[loss=0.9861, simple_loss=0.6839, pruned_loss=0.6323, ctc_loss=0.9314, over 3772518.90 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:55:17,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=4000.0, ans=0.3125 +2024-08-25 02:55:36,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=4053.3333333333335, ans=0.31 +2024-08-25 02:56:10,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=4213.333333333333, ans=0.2632 +2024-08-25 02:56:11,854 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.16 vs. limit=10.66 +2024-08-25 02:56:22,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4213.333333333333, ans=0.2578666666666667 +2024-08-25 02:56:24,841 INFO [train.py:1114] (0/4) Epoch 1, batch 800, loss[loss=0.6259, simple_loss=0.4699, pruned_loss=0.3012, ctc_loss=0.5569, over 19798.00 frames. ], tot_loss[loss=0.9258, simple_loss=0.6493, pruned_loss=0.5697, ctc_loss=0.8703, over 3793423.11 frames. ], batch size: 49, lr: 4.49e-02, grad_scale: 16.0 +2024-08-25 02:56:29,875 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.884e+02 2.945e+02 3.956e+02 5.210e+02 9.107e+02, threshold=7.913e+02, percent-clipped=4.0 +2024-08-25 02:57:15,330 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=5.158e-02 +2024-08-25 02:57:18,296 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.67 vs. limit=10.86 +2024-08-25 02:57:22,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=4480.0, ans=0.29000000000000004 +2024-08-25 02:57:30,597 INFO [train.py:1114] (0/4) Epoch 1, batch 850, loss[loss=0.7145, simple_loss=0.5405, pruned_loss=0.3382, ctc_loss=0.6286, over 19652.00 frames. ], tot_loss[loss=0.8712, simple_loss=0.6186, pruned_loss=0.5148, ctc_loss=0.8122, over 3813331.40 frames. ], batch size: 59, lr: 4.49e-02, grad_scale: 16.0 +2024-08-25 02:57:38,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=4533.333333333333, ans=0.7413333333333334 +2024-08-25 02:57:55,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=4586.666666666667, ans=0.28500000000000003 +2024-08-25 02:57:56,753 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.11 vs. limit=10.94 +2024-08-25 02:57:58,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=4640.0, ans=0.04733333333333334 +2024-08-25 02:58:04,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=4640.0, ans=0.2825 +2024-08-25 02:58:17,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=4693.333333333333, ans=7.933333333333334 +2024-08-25 02:58:42,819 INFO [train.py:1114] (0/4) Epoch 1, batch 900, loss[loss=0.6303, simple_loss=0.479, pruned_loss=0.2974, ctc_loss=0.5441, over 19815.00 frames. ], tot_loss[loss=0.8267, simple_loss=0.5941, pruned_loss=0.4705, ctc_loss=0.7628, over 3817847.63 frames. ], batch size: 49, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 02:58:48,910 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.859e+02 2.783e+02 3.682e+02 4.971e+02 1.764e+03, threshold=7.364e+02, percent-clipped=6.0 +2024-08-25 02:59:04,247 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.512e-03 +2024-08-25 02:59:05,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=4853.333333333333, ans=0.27249999999999996 +2024-08-25 02:59:05,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=4853.333333333333, ans=0.2728 +2024-08-25 02:59:33,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=4960.0, ans=0.7264 +2024-08-25 02:59:41,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=5013.333333333333, ans=0.265 +2024-08-25 02:59:50,575 INFO [train.py:1114] (0/4) Epoch 1, batch 950, loss[loss=0.5972, simple_loss=0.4646, pruned_loss=0.2652, ctc_loss=0.5148, over 19497.00 frames. ], tot_loss[loss=0.7866, simple_loss=0.5722, pruned_loss=0.4319, ctc_loss=0.7177, over 3819682.03 frames. ], batch size: 49, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 03:00:02,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=5120.0, ans=0.26 +2024-08-25 03:00:12,841 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.42 vs. limit=7.5600000000000005 +2024-08-25 03:00:15,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=5173.333333333333, ans=0.009744927536231884 +2024-08-25 03:00:54,654 INFO [train.py:1114] (0/4) Epoch 1, batch 1000, loss[loss=0.5859, simple_loss=0.4622, pruned_loss=0.2541, ctc_loss=0.4972, over 19848.00 frames. ], tot_loss[loss=0.7513, simple_loss=0.5535, pruned_loss=0.3986, ctc_loss=0.6767, over 3816190.86 frames. ], batch size: 52, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 03:01:01,310 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.763e+02 2.847e+02 3.463e+02 4.611e+02 9.717e+02, threshold=6.926e+02, percent-clipped=4.0 +2024-08-25 03:01:01,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=5333.333333333333, ans=0.044444444444444446 +2024-08-25 03:01:04,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=5333.333333333333, ans=0.25 +2024-08-25 03:01:34,128 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.98 vs. limit=9.54 +2024-08-25 03:01:51,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=5546.666666666667, ans=0.24 +2024-08-25 03:02:07,694 INFO [train.py:1114] (0/4) Epoch 1, batch 1050, loss[loss=0.6187, simple_loss=0.492, pruned_loss=0.2646, ctc_loss=0.5219, over 19842.00 frames. ], tot_loss[loss=0.7164, simple_loss=0.5346, pruned_loss=0.368, ctc_loss=0.6368, over 3821745.37 frames. ], batch size: 57, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 03:02:10,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=5600.0, ans=8.5 +2024-08-25 03:02:15,577 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.49 vs. limit=11.7 +2024-08-25 03:02:16,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=5600.0, ans=0.043333333333333335 +2024-08-25 03:02:18,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=5653.333333333333, ans=0.0 +2024-08-25 03:02:36,987 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.62 vs. limit=9.64 +2024-08-25 03:03:13,747 INFO [train.py:1114] (0/4) Epoch 1, batch 1100, loss[loss=0.5333, simple_loss=0.4344, pruned_loss=0.2207, ctc_loss=0.4354, over 19601.00 frames. ], tot_loss[loss=0.6862, simple_loss=0.5186, pruned_loss=0.342, ctc_loss=0.6017, over 3828941.86 frames. ], batch size: 52, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 03:03:20,120 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.922e+02 2.626e+02 3.754e+02 4.559e+02 6.965e+02, threshold=7.509e+02, percent-clipped=1.0 +2024-08-25 03:03:23,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=5866.666666666667, ans=0.22499999999999998 +2024-08-25 03:03:33,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=5920.0, ans=0.2888 +2024-08-25 03:03:33,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5920.0, ans=0.22249999999999998 +2024-08-25 03:03:36,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=5920.0, ans=0.2408 +2024-08-25 03:03:39,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=5973.333333333333, ans=0.21999999999999997 +2024-08-25 03:04:13,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=6080.0, ans=0.21500000000000002 +2024-08-25 03:04:15,104 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.49 vs. limit=9.78 +2024-08-25 03:04:18,531 INFO [train.py:1114] (0/4) Epoch 1, batch 1150, loss[loss=0.5629, simple_loss=0.4536, pruned_loss=0.2398, ctc_loss=0.4584, over 19591.00 frames. ], tot_loss[loss=0.6605, simple_loss=0.5051, pruned_loss=0.3202, ctc_loss=0.5719, over 3828275.11 frames. ], batch size: 52, lr: 4.47e-02, grad_scale: 8.0 +2024-08-25 03:04:24,853 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.86 vs. limit=9.8 +2024-08-25 03:04:28,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=6133.333333333333, ans=0.21250000000000002 +2024-08-25 03:04:29,947 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.17 vs. limit=9.8 +2024-08-25 03:04:38,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=6186.666666666667, ans=0.00952463768115942 +2024-08-25 03:05:10,918 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:05:19,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=6346.666666666667, ans=0.2025 +2024-08-25 03:05:19,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=6346.666666666667, ans=0.6778666666666666 +2024-08-25 03:05:24,509 INFO [train.py:1114] (0/4) Epoch 1, batch 1200, loss[loss=0.525, simple_loss=0.4368, pruned_loss=0.2125, ctc_loss=0.4163, over 19841.00 frames. ], tot_loss[loss=0.6398, simple_loss=0.4949, pruned_loss=0.3025, ctc_loss=0.547, over 3823615.31 frames. ], batch size: 57, lr: 4.47e-02, grad_scale: 16.0 +2024-08-25 03:05:30,709 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.839e+02 2.702e+02 3.344e+02 4.028e+02 1.038e+03, threshold=6.687e+02, percent-clipped=4.0 +2024-08-25 03:05:45,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=6453.333333333333, ans=0.07 +2024-08-25 03:05:46,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=6453.333333333333, ans=0.23546666666666666 +2024-08-25 03:05:56,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=6506.666666666667, ans=0.6722666666666667 +2024-08-25 03:06:02,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=6506.666666666667, ans=0.195 +2024-08-25 03:06:04,274 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.20 vs. limit=9.94 +2024-08-25 03:06:05,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=6560.0, ans=0.03933333333333334 +2024-08-25 03:06:07,918 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.50 vs. limit=6.64 +2024-08-25 03:06:07,964 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.95 vs. limit=12.42 +2024-08-25 03:06:18,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=5.52 vs. limit=5.322666666666667 +2024-08-25 03:06:33,195 INFO [train.py:1114] (0/4) Epoch 1, batch 1250, loss[loss=0.5577, simple_loss=0.4624, pruned_loss=0.2294, ctc_loss=0.439, over 19507.00 frames. ], tot_loss[loss=0.6188, simple_loss=0.4847, pruned_loss=0.2854, ctc_loss=0.522, over 3841835.80 frames. ], batch size: 61, lr: 4.47e-02, grad_scale: 8.0 +2024-08-25 03:06:38,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=6666.666666666667, ans=0.03888888888888889 +2024-08-25 03:06:51,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=6720.0, ans=0.185 +2024-08-25 03:06:54,358 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.91 vs. limit=12.54 +2024-08-25 03:06:54,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=6720.0, ans=12.54 +2024-08-25 03:06:54,499 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.59 vs. limit=6.68 +2024-08-25 03:07:18,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=6826.666666666667, ans=0.18 +2024-08-25 03:07:19,455 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:07:32,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=6880.0, ans=0.07 +2024-08-25 03:07:53,439 INFO [train.py:1114] (0/4) Epoch 1, batch 1300, loss[loss=0.5651, simple_loss=0.4631, pruned_loss=0.2361, ctc_loss=0.4542, over 18813.00 frames. ], tot_loss[loss=0.5974, simple_loss=0.4733, pruned_loss=0.2697, ctc_loss=0.4979, over 3845162.98 frames. ], batch size: 76, lr: 4.47e-02, grad_scale: 8.0 +2024-08-25 03:07:56,771 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.21 vs. limit=12.7 +2024-08-25 03:08:00,983 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.769e+02 2.595e+02 3.171e+02 4.007e+02 5.829e+02, threshold=6.342e+02, percent-clipped=0.0 +2024-08-25 03:08:01,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=6933.333333333333, ans=0.175 +2024-08-25 03:08:29,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=7040.0, ans=0.05600000000000001 +2024-08-25 03:08:38,038 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.42 vs. limit=12.82 +2024-08-25 03:08:47,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.79 vs. limit=6.786666666666667 +2024-08-25 03:08:50,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=7146.666666666667, ans=0.16499999999999998 +2024-08-25 03:09:00,206 INFO [train.py:1114] (0/4) Epoch 1, batch 1350, loss[loss=0.5349, simple_loss=0.4416, pruned_loss=0.2219, ctc_loss=0.4269, over 19757.00 frames. ], tot_loss[loss=0.5798, simple_loss=0.4645, pruned_loss=0.2566, ctc_loss=0.4774, over 3855296.72 frames. ], batch size: 54, lr: 4.46e-02, grad_scale: 8.0 +2024-08-25 03:09:11,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=7253.333333333333, ans=0.036444444444444446 +2024-08-25 03:10:19,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=7306.666666666667, ans=0.15749999999999997 +2024-08-25 03:10:35,773 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:10:44,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=7360.0, ans=0.036000000000000004 +2024-08-25 03:12:10,378 INFO [train.py:1114] (0/4) Epoch 1, batch 1400, loss[loss=0.39, simple_loss=0.3473, pruned_loss=0.145, ctc_loss=0.292, over 19686.00 frames. ], tot_loss[loss=0.5646, simple_loss=0.4571, pruned_loss=0.2456, ctc_loss=0.4597, over 3862562.08 frames. ], batch size: 46, lr: 4.46e-02, grad_scale: 8.0 +2024-08-25 03:12:15,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=7466.666666666667, ans=0.15000000000000002 +2024-08-25 03:12:32,377 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.817e+02 2.490e+02 2.974e+02 4.034e+02 6.918e+02, threshold=5.948e+02, percent-clipped=1.0 +2024-08-25 03:13:10,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=7626.666666666667, ans=0.14250000000000002 +2024-08-25 03:13:25,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=7680.0, ans=0.14 +2024-08-25 03:13:28,394 INFO [train.py:1114] (0/4) Epoch 1, batch 1450, loss[loss=0.4807, simple_loss=0.4271, pruned_loss=0.1826, ctc_loss=0.3549, over 19680.00 frames. ], tot_loss[loss=0.5521, simple_loss=0.4515, pruned_loss=0.2364, ctc_loss=0.4454, over 3860643.92 frames. ], batch size: 63, lr: 4.46e-02, grad_scale: 8.0 +2024-08-25 03:13:28,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=7733.333333333333, ans=0.1375 +2024-08-25 03:13:42,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=7786.666666666667, ans=0.6274666666666666 +2024-08-25 03:14:23,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=7946.666666666667, ans=0.050333333333333334 +2024-08-25 03:14:24,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=7946.666666666667, ans=0.025 +2024-08-25 03:14:24,715 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.83 vs. limit=10.48 +2024-08-25 03:14:30,704 INFO [train.py:1114] (0/4) Epoch 1, batch 1500, loss[loss=0.4825, simple_loss=0.4233, pruned_loss=0.1861, ctc_loss=0.3682, over 19588.00 frames. ], tot_loss[loss=0.5398, simple_loss=0.4459, pruned_loss=0.2278, ctc_loss=0.4314, over 3860583.50 frames. ], batch size: 57, lr: 4.46e-02, grad_scale: 8.0 +2024-08-25 03:14:38,506 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.864e+02 2.576e+02 3.382e+02 4.091e+02 7.597e+02, threshold=6.763e+02, percent-clipped=6.0 +2024-08-25 03:14:42,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=8053.333333333333, ans=0.125 +2024-08-25 03:14:56,472 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.04 vs. limit=13.54 +2024-08-25 03:15:22,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=8213.333333333334, ans=0.125 +2024-08-25 03:15:40,011 INFO [train.py:1114] (0/4) Epoch 1, batch 1550, loss[loss=0.5507, simple_loss=0.4626, pruned_loss=0.225, ctc_loss=0.4395, over 19602.00 frames. ], tot_loss[loss=0.531, simple_loss=0.4421, pruned_loss=0.2218, ctc_loss=0.4213, over 3844875.38 frames. ], batch size: 60, lr: 4.45e-02, grad_scale: 8.0 +2024-08-25 03:16:05,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=8373.333333333334, ans=0.125 +2024-08-25 03:16:07,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=8373.333333333334, ans=0.8337333333333333 +2024-08-25 03:16:23,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=8426.666666666666, ans=0.03155555555555556 +2024-08-25 03:16:30,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=8480.0, ans=0.00902608695652174 +2024-08-25 03:16:35,771 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.07 vs. limit=7.12 +2024-08-25 03:16:45,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=8480.0, ans=0.025 +2024-08-25 03:16:49,352 INFO [train.py:1114] (0/4) Epoch 1, batch 1600, loss[loss=0.4763, simple_loss=0.4216, pruned_loss=0.1842, ctc_loss=0.3611, over 19842.00 frames. ], tot_loss[loss=0.5216, simple_loss=0.4378, pruned_loss=0.2159, ctc_loss=0.4107, over 3835647.09 frames. ], batch size: 57, lr: 4.45e-02, grad_scale: 16.0 +2024-08-25 03:16:59,532 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.906e+02 2.604e+02 3.125e+02 4.170e+02 2.617e+03, threshold=6.251e+02, percent-clipped=7.0 +2024-08-25 03:16:59,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=8533.333333333334, ans=0.125 +2024-08-25 03:17:06,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=8586.666666666666, ans=0.21413333333333334 +2024-08-25 03:17:36,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=8693.333333333334, ans=0.030444444444444444 +2024-08-25 03:17:40,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=8693.333333333334, ans=0.025 +2024-08-25 03:18:56,611 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.78 vs. limit=14.059999999999999 +2024-08-25 03:19:05,109 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.95 vs. limit=10.78 +2024-08-25 03:19:09,294 INFO [train.py:1114] (0/4) Epoch 1, batch 1650, loss[loss=0.5122, simple_loss=0.4395, pruned_loss=0.2038, ctc_loss=0.4115, over 19671.00 frames. ], tot_loss[loss=0.5123, simple_loss=0.4336, pruned_loss=0.2101, ctc_loss=0.4007, over 3832259.73 frames. ], batch size: 59, lr: 4.45e-02, grad_scale: 16.0 +2024-08-25 03:19:15,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=8800.0, ans=0.008956521739130436 +2024-08-25 03:19:23,051 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:19:26,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=8853.333333333334, ans=0.008944927536231884 +2024-08-25 03:19:41,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=8906.666666666666, ans=0.125 +2024-08-25 03:20:12,486 INFO [train.py:1114] (0/4) Epoch 1, batch 1700, loss[loss=0.468, simple_loss=0.3963, pruned_loss=0.191, ctc_loss=0.3745, over 19670.00 frames. ], tot_loss[loss=0.5031, simple_loss=0.4294, pruned_loss=0.2046, ctc_loss=0.3909, over 3846772.86 frames. ], batch size: 46, lr: 4.44e-02, grad_scale: 16.0 +2024-08-25 03:20:19,831 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.828e+02 2.395e+02 2.888e+02 3.702e+02 8.491e+02, threshold=5.776e+02, percent-clipped=2.0 +2024-08-25 03:20:30,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=9120.0, ans=0.125 +2024-08-25 03:20:35,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=9120.0, ans=10.92 +2024-08-25 03:20:54,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=9226.666666666666, ans=0.125 +2024-08-25 03:22:16,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=9226.666666666666, ans=0.125 +2024-08-25 03:22:23,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=9280.0, ans=0.008852173913043479 +2024-08-25 03:22:23,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=9280.0, ans=0.5752 +2024-08-25 03:22:25,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=9280.0, ans=0.5752 +2024-08-25 03:22:25,356 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.04 vs. limit=14.46 +2024-08-25 03:22:29,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=9280.0, ans=0.0 +2024-08-25 03:22:33,855 INFO [train.py:1114] (0/4) Epoch 1, batch 1750, loss[loss=0.3843, simple_loss=0.3631, pruned_loss=0.1421, ctc_loss=0.2698, over 19689.00 frames. ], tot_loss[loss=0.4939, simple_loss=0.4254, pruned_loss=0.1994, ctc_loss=0.3812, over 3851413.56 frames. ], batch size: 45, lr: 4.44e-02, grad_scale: 16.0 +2024-08-25 03:22:50,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=9386.666666666666, ans=0.125 +2024-08-25 03:22:53,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=9386.666666666666, ans=0.025 +2024-08-25 03:22:54,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=9386.666666666666, ans=0.125 +2024-08-25 03:23:01,283 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:23:25,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=9546.666666666666, ans=0.125 +2024-08-25 03:23:31,438 INFO [train.py:1114] (0/4) Epoch 1, batch 1800, loss[loss=0.4395, simple_loss=0.4101, pruned_loss=0.1635, ctc_loss=0.3256, over 19621.00 frames. ], tot_loss[loss=0.487, simple_loss=0.4228, pruned_loss=0.1953, ctc_loss=0.374, over 3852102.84 frames. ], batch size: 55, lr: 4.44e-02, grad_scale: 8.0 +2024-08-25 03:23:32,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.50 vs. limit=11.1 +2024-08-25 03:23:39,418 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.960e+02 2.646e+02 3.473e+02 4.220e+02 8.344e+02, threshold=6.945e+02, percent-clipped=3.0 +2024-08-25 03:23:40,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=9600.0, ans=0.125 +2024-08-25 03:23:44,844 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.58 vs. limit=14.74 +2024-08-25 03:24:08,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=9760.0, ans=0.125 +2024-08-25 03:24:16,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=9760.0, ans=0.125 +2024-08-25 03:24:17,032 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.18 vs. limit=14.82 +2024-08-25 03:24:28,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=9813.333333333334, ans=0.125 +2024-08-25 03:24:35,868 INFO [train.py:1114] (0/4) Epoch 1, batch 1850, loss[loss=0.4632, simple_loss=0.4237, pruned_loss=0.1806, ctc_loss=0.3355, over 19592.00 frames. ], tot_loss[loss=0.4784, simple_loss=0.4194, pruned_loss=0.1905, ctc_loss=0.3654, over 3856242.32 frames. ], batch size: 57, lr: 4.43e-02, grad_scale: 8.0 +2024-08-25 03:24:57,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=9920.0, ans=0.2008 +2024-08-25 03:25:20,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=10026.666666666666, ans=0.025 +2024-08-25 03:25:25,692 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=6.16 vs. limit=8.010666666666665 +2024-08-25 03:25:40,395 INFO [train.py:1114] (0/4) Epoch 1, batch 1900, loss[loss=0.5035, simple_loss=0.4569, pruned_loss=0.1982, ctc_loss=0.3722, over 19637.00 frames. ], tot_loss[loss=0.4741, simple_loss=0.4187, pruned_loss=0.1881, ctc_loss=0.3605, over 3862203.38 frames. ], batch size: 59, lr: 4.43e-02, grad_scale: 8.0 +2024-08-25 03:25:48,468 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.873e+02 2.554e+02 2.990e+02 4.033e+02 8.041e+02, threshold=5.979e+02, percent-clipped=3.0 +2024-08-25 03:25:52,383 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.02 vs. limit=11.32 +2024-08-25 03:26:10,962 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.00 vs. limit=11.34 +2024-08-25 03:26:11,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=10240.0, ans=0.024000000000000004 +2024-08-25 03:26:16,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=10293.333333333334, ans=0.125 +2024-08-25 03:26:18,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=10293.333333333334, ans=0.19706666666666667 +2024-08-25 03:26:25,002 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.39 vs. limit=10.146666666666668 +2024-08-25 03:26:26,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10346.666666666666, ans=0.19653333333333334 +2024-08-25 03:26:34,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=10346.666666666666, ans=0.0 +2024-08-25 03:26:38,016 INFO [train.py:1114] (0/4) Epoch 1, batch 1950, loss[loss=0.4437, simple_loss=0.4095, pruned_loss=0.172, ctc_loss=0.3288, over 19597.00 frames. ], tot_loss[loss=0.4691, simple_loss=0.4176, pruned_loss=0.1852, ctc_loss=0.3558, over 3870430.16 frames. ], batch size: 52, lr: 4.43e-02, grad_scale: 8.0 +2024-08-25 03:27:13,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=10560.0, ans=0.02266666666666667 +2024-08-25 03:27:16,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=10560.0, ans=0.09899494936611666 +2024-08-25 03:27:20,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=10560.0, ans=0.5304000000000001 +2024-08-25 03:27:36,518 INFO [train.py:1114] (0/4) Epoch 1, batch 2000, loss[loss=0.3685, simple_loss=0.3452, pruned_loss=0.1399, ctc_loss=0.2798, over 19630.00 frames. ], tot_loss[loss=0.4652, simple_loss=0.4167, pruned_loss=0.1832, ctc_loss=0.3521, over 3853410.72 frames. ], batch size: 45, lr: 4.42e-02, grad_scale: 16.0 +2024-08-25 03:27:44,894 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.863e+02 2.508e+02 3.011e+02 3.695e+02 6.472e+02, threshold=6.022e+02, percent-clipped=1.0 +2024-08-25 03:27:51,056 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.47 vs. limit=15.54 +2024-08-25 03:27:56,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2.whitening_limit, batch_count=10720.0, ans=10.36 +2024-08-25 03:28:22,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=10880.0, ans=0.025 +2024-08-25 03:28:25,648 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.06 vs. limit=11.58 +2024-08-25 03:28:41,985 INFO [train.py:1114] (0/4) Epoch 1, batch 2050, loss[loss=0.3956, simple_loss=0.3742, pruned_loss=0.1495, ctc_loss=0.2951, over 19705.00 frames. ], tot_loss[loss=0.4597, simple_loss=0.4142, pruned_loss=0.1808, ctc_loss=0.3466, over 3850116.75 frames. ], batch size: 47, lr: 4.42e-02, grad_scale: 16.0 +2024-08-25 03:29:13,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=10986.666666666666, ans=0.05 +2024-08-25 03:30:30,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11093.333333333334, ans=0.18906666666666666 +2024-08-25 03:31:02,649 INFO [train.py:1114] (0/4) Epoch 1, batch 2100, loss[loss=0.4473, simple_loss=0.4137, pruned_loss=0.175, ctc_loss=0.3274, over 19773.00 frames. ], tot_loss[loss=0.4534, simple_loss=0.4114, pruned_loss=0.1776, ctc_loss=0.3406, over 3857796.26 frames. ], batch size: 54, lr: 4.42e-02, grad_scale: 16.0 +2024-08-25 03:31:14,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=11200.0, ans=0.020000000000000004 +2024-08-25 03:31:19,370 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.953e+02 2.443e+02 2.901e+02 4.101e+02 7.108e+02, threshold=5.802e+02, percent-clipped=5.0 +2024-08-25 03:31:19,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=11200.0, ans=0.368 +2024-08-25 03:32:16,418 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.28 vs. limit=16.060000000000002 +2024-08-25 03:32:32,802 INFO [train.py:1114] (0/4) Epoch 1, batch 2150, loss[loss=0.3694, simple_loss=0.3604, pruned_loss=0.1361, ctc_loss=0.2658, over 19587.00 frames. ], tot_loss[loss=0.4452, simple_loss=0.4074, pruned_loss=0.1734, ctc_loss=0.333, over 3868506.34 frames. ], batch size: 52, lr: 4.41e-02, grad_scale: 8.0 +2024-08-25 03:32:44,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=11466.666666666666, ans=0.018888888888888893 +2024-08-25 03:32:50,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=11520.0, ans=0.008365217391304348 +2024-08-25 03:32:54,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=11520.0, ans=0.008365217391304348 +2024-08-25 03:33:14,600 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.88 vs. limit=11.84 +2024-08-25 03:33:18,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11573.333333333334, ans=0.18426666666666666 +2024-08-25 03:33:40,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=11626.666666666666, ans=0.125 +2024-08-25 03:33:40,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=11626.666666666666, ans=0.4930666666666667 +2024-08-25 03:33:40,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=11626.666666666666, ans=0.125 +2024-08-25 03:33:57,375 INFO [train.py:1114] (0/4) Epoch 1, batch 2200, loss[loss=0.4024, simple_loss=0.3908, pruned_loss=0.1477, ctc_loss=0.2965, over 19580.00 frames. ], tot_loss[loss=0.4402, simple_loss=0.4053, pruned_loss=0.1708, ctc_loss=0.3281, over 3867344.24 frames. ], batch size: 57, lr: 4.41e-02, grad_scale: 8.0 +2024-08-25 03:33:57,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11733.333333333334, ans=0.18266666666666664 +2024-08-25 03:34:05,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11733.333333333334, ans=0.18266666666666664 +2024-08-25 03:34:07,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=11733.333333333334, ans=0.125 +2024-08-25 03:34:08,398 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.911e+02 2.628e+02 3.380e+02 4.438e+02 7.655e+02, threshold=6.760e+02, percent-clipped=12.0 +2024-08-25 03:34:16,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=11786.666666666666, ans=0.01755555555555556 +2024-08-25 03:34:17,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=11786.666666666666, ans=0.07 +2024-08-25 03:34:29,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=11840.0, ans=0.025 +2024-08-25 03:34:45,011 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.61 vs. limit=4.784 +2024-08-25 03:34:47,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=11946.666666666666, ans=0.4818666666666667 +2024-08-25 03:35:01,289 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.20 vs. limit=16.46 +2024-08-25 03:35:03,311 INFO [train.py:1114] (0/4) Epoch 1, batch 2250, loss[loss=0.4709, simple_loss=0.4257, pruned_loss=0.1849, ctc_loss=0.366, over 19620.00 frames. ], tot_loss[loss=0.437, simple_loss=0.4039, pruned_loss=0.1691, ctc_loss=0.3251, over 3868369.72 frames. ], batch size: 55, lr: 4.40e-02, grad_scale: 8.0 +2024-08-25 03:35:05,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=12000.0, ans=0.125 +2024-08-25 03:35:20,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=12053.333333333334, ans=0.125 +2024-08-25 03:35:24,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.70 vs. limit=16.54 +2024-08-25 03:35:53,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=12213.333333333334, ans=0.015777777777777773 +2024-08-25 03:36:01,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=12213.333333333334, ans=0.015777777777777773 +2024-08-25 03:36:03,067 INFO [train.py:1114] (0/4) Epoch 1, batch 2300, loss[loss=0.3946, simple_loss=0.3789, pruned_loss=0.1464, ctc_loss=0.2937, over 19478.00 frames. ], tot_loss[loss=0.4333, simple_loss=0.402, pruned_loss=0.1673, ctc_loss=0.3214, over 3862515.72 frames. ], batch size: 49, lr: 4.40e-02, grad_scale: 8.0 +2024-08-25 03:36:03,472 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.63 vs. limit=16.7 +2024-08-25 03:36:12,293 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.784e+02 2.546e+02 3.099e+02 3.956e+02 8.242e+02, threshold=6.199e+02, percent-clipped=6.0 +2024-08-25 03:36:26,027 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.35 vs. limit=16.78 +2024-08-25 03:36:39,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=12426.666666666666, ans=0.125 +2024-08-25 03:36:59,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=12533.333333333334, ans=0.125 +2024-08-25 03:37:00,690 INFO [train.py:1114] (0/4) Epoch 1, batch 2350, loss[loss=0.4363, simple_loss=0.4172, pruned_loss=0.1645, ctc_loss=0.316, over 19657.00 frames. ], tot_loss[loss=0.4289, simple_loss=0.3998, pruned_loss=0.1651, ctc_loss=0.3169, over 3864591.13 frames. ], batch size: 63, lr: 4.40e-02, grad_scale: 8.0 +2024-08-25 03:37:24,676 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.80 vs. limit=9.056000000000001 +2024-08-25 03:37:30,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=12640.0, ans=0.008121739130434782 +2024-08-25 03:37:38,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=12693.333333333334, ans=0.17306666666666667 +2024-08-25 03:37:48,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=12746.666666666666, ans=0.125 +2024-08-25 03:37:53,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=12746.666666666666, ans=0.125 +2024-08-25 03:37:59,409 INFO [train.py:1114] (0/4) Epoch 1, batch 2400, loss[loss=0.4701, simple_loss=0.433, pruned_loss=0.1842, ctc_loss=0.3469, over 19375.00 frames. ], tot_loss[loss=0.4297, simple_loss=0.4016, pruned_loss=0.1652, ctc_loss=0.3165, over 3860114.29 frames. ], batch size: 67, lr: 4.39e-02, grad_scale: 16.0 +2024-08-25 03:38:00,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=12800.0, ans=0.125 +2024-08-25 03:38:08,242 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.904e+02 2.522e+02 3.053e+02 3.990e+02 1.210e+03, threshold=6.106e+02, percent-clipped=3.0 +2024-08-25 03:38:16,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=12853.333333333334, ans=0.00807536231884058 +2024-08-25 03:38:43,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=12960.0, ans=0.125 +2024-08-25 03:39:03,886 INFO [train.py:1114] (0/4) Epoch 1, batch 2450, loss[loss=0.5474, simple_loss=0.4643, pruned_loss=0.2311, ctc_loss=0.4204, over 13585.00 frames. ], tot_loss[loss=0.4378, simple_loss=0.4064, pruned_loss=0.1694, ctc_loss=0.3242, over 3739887.67 frames. ], batch size: 140, lr: 4.39e-02, grad_scale: 16.0 +2024-08-25 03:39:09,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.01 vs. limit=17.3 +2024-08-25 03:39:13,040 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.64 vs. limit=12.4 +2024-08-25 03:39:16,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=13120.0, ans=0.125 +2024-08-25 03:39:19,252 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.80 vs. limit=17.34 +2024-08-25 03:39:32,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=13173.333333333334, ans=0.125 +2024-08-25 03:39:39,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=13226.666666666666, ans=0.125 +2024-08-25 03:39:48,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=13226.666666666666, ans=0.125 +2024-08-25 03:39:50,244 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-1.pt +2024-08-25 03:40:42,880 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=2.510e-03 +2024-08-25 03:40:43,722 INFO [train.py:1114] (0/4) Epoch 2, batch 0, loss[loss=0.4069, simple_loss=0.3795, pruned_loss=0.1585, ctc_loss=0.2935, over 19411.00 frames. ], tot_loss[loss=0.4069, simple_loss=0.3795, pruned_loss=0.1585, ctc_loss=0.2935, over 19411.00 frames. ], batch size: 48, lr: 4.30e-02, grad_scale: 32.0 +2024-08-25 03:40:43,722 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-25 03:40:55,171 INFO [train.py:1146] (0/4) Epoch 2, validation: loss=0.3317, simple_loss=0.3718, pruned_loss=0.1058, ctc_loss=0.2, over 944034.00 frames. +2024-08-25 03:40:55,172 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 14058MB +2024-08-25 03:41:11,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=13333.333333333334, ans=0.125 +2024-08-25 03:41:17,118 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.783e+02 2.388e+02 2.818e+02 3.444e+02 6.577e+02, threshold=5.636e+02, percent-clipped=3.0 +2024-08-25 03:41:20,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13386.666666666666, ans=0.16613333333333333 +2024-08-25 03:41:21,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=13386.666666666666, ans=0.125 +2024-08-25 03:41:25,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=13386.666666666666, ans=0.4314666666666667 +2024-08-25 03:41:57,951 INFO [train.py:1114] (0/4) Epoch 2, batch 50, loss[loss=0.3601, simple_loss=0.3509, pruned_loss=0.1328, ctc_loss=0.2593, over 19712.00 frames. ], tot_loss[loss=0.428, simple_loss=0.4029, pruned_loss=0.164, ctc_loss=0.3128, over 843639.75 frames. ], batch size: 47, lr: 4.29e-02, grad_scale: 16.0 +2024-08-25 03:42:07,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=13546.666666666666, ans=0.125 +2024-08-25 03:42:13,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=13600.0, ans=0.125 +2024-08-25 03:42:30,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=13653.333333333334, ans=0.00790144927536232 +2024-08-25 03:43:26,691 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.47 vs. limit=5.064 +2024-08-25 03:43:33,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=13760.0, ans=0.125 +2024-08-25 03:43:36,879 INFO [train.py:1114] (0/4) Epoch 2, batch 100, loss[loss=0.3628, simple_loss=0.3692, pruned_loss=0.1291, ctc_loss=0.2456, over 19726.00 frames. ], tot_loss[loss=0.4224, simple_loss=0.4011, pruned_loss=0.1605, ctc_loss=0.3069, over 1499110.32 frames. ], batch size: 51, lr: 4.29e-02, grad_scale: 16.0 +2024-08-25 03:43:44,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13813.333333333334, ans=0.16186666666666666 +2024-08-25 03:43:51,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=13866.666666666666, ans=0.125 +2024-08-25 03:43:54,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=13866.666666666666, ans=0.125 +2024-08-25 03:44:02,828 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.678e+02 2.500e+02 2.916e+02 3.893e+02 6.295e+02, threshold=5.832e+02, percent-clipped=2.0 +2024-08-25 03:44:06,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=13920.0, ans=0.125 +2024-08-25 03:44:12,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13920.0, ans=0.1608 +2024-08-25 03:44:13,017 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.48 vs. limit=9.568 +2024-08-25 03:44:33,792 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.67 vs. limit=12.76 +2024-08-25 03:44:38,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=14026.666666666666, ans=0.025 +2024-08-25 03:44:42,785 INFO [train.py:1114] (0/4) Epoch 2, batch 150, loss[loss=0.3602, simple_loss=0.358, pruned_loss=0.1297, ctc_loss=0.2577, over 19727.00 frames. ], tot_loss[loss=0.4141, simple_loss=0.3957, pruned_loss=0.1562, ctc_loss=0.3001, over 2027714.20 frames. ], batch size: 47, lr: 4.29e-02, grad_scale: 16.0 +2024-08-25 03:44:42,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=14080.0, ans=0.4072 +2024-08-25 03:45:12,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=14186.666666666666, ans=0.125 +2024-08-25 03:45:19,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=14240.0, ans=0.125 +2024-08-25 03:45:24,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=14240.0, ans=0.125 +2024-08-25 03:45:34,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=14293.333333333334, ans=0.007762318840579711 +2024-08-25 03:45:34,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=14293.333333333334, ans=0.10706666666666664 +2024-08-25 03:45:35,687 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.72 vs. limit=18.22 +2024-08-25 03:45:42,121 INFO [train.py:1114] (0/4) Epoch 2, batch 200, loss[loss=0.4582, simple_loss=0.4264, pruned_loss=0.1789, ctc_loss=0.3307, over 18298.00 frames. ], tot_loss[loss=0.4074, simple_loss=0.3919, pruned_loss=0.1527, ctc_loss=0.294, over 2435029.92 frames. ], batch size: 85, lr: 4.28e-02, grad_scale: 16.0 +2024-08-25 03:45:59,985 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:46:06,457 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.951e+02 2.445e+02 2.940e+02 3.728e+02 6.995e+02, threshold=5.880e+02, percent-clipped=3.0 +2024-08-25 03:46:44,125 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.82 vs. limit=18.42 +2024-08-25 03:46:44,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=14613.333333333334, ans=0.125 +2024-08-25 03:46:45,925 INFO [train.py:1114] (0/4) Epoch 2, batch 250, loss[loss=0.4519, simple_loss=0.418, pruned_loss=0.174, ctc_loss=0.3443, over 19414.00 frames. ], tot_loss[loss=0.4036, simple_loss=0.3895, pruned_loss=0.1509, ctc_loss=0.2896, over 2754771.79 frames. ], batch size: 67, lr: 4.28e-02, grad_scale: 16.0 +2024-08-25 03:47:04,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=14666.666666666666, ans=0.3866666666666667 +2024-08-25 03:47:08,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=14720.0, ans=0.125 +2024-08-25 03:47:11,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=14720.0, ans=0.125 +2024-08-25 03:47:26,842 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.73 vs. limit=13.04 +2024-08-25 03:47:39,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=14826.666666666666, ans=0.125 +2024-08-25 03:47:41,883 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.82 vs. limit=18.619999999999997 +2024-08-25 03:47:42,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14826.666666666666, ans=0.15173333333333333 +2024-08-25 03:47:50,864 INFO [train.py:1114] (0/4) Epoch 2, batch 300, loss[loss=0.4161, simple_loss=0.4035, pruned_loss=0.1558, ctc_loss=0.293, over 19540.00 frames. ], tot_loss[loss=0.4003, simple_loss=0.3878, pruned_loss=0.1491, ctc_loss=0.2863, over 2999480.96 frames. ], batch size: 61, lr: 4.27e-02, grad_scale: 16.0 +2024-08-25 03:47:51,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=14880.0, ans=0.3792 +2024-08-25 03:47:53,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=14880.0, ans=0.3792 +2024-08-25 03:48:02,065 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.57 vs. limit=5.24 +2024-08-25 03:48:13,149 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.763e+02 2.396e+02 2.818e+02 3.488e+02 8.647e+02, threshold=5.636e+02, percent-clipped=6.0 +2024-08-25 03:48:32,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=15040.0, ans=0.0040000000000000036 +2024-08-25 03:48:48,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=15093.333333333334, ans=0.02 +2024-08-25 03:48:50,387 INFO [train.py:1114] (0/4) Epoch 2, batch 350, loss[loss=0.3351, simple_loss=0.3468, pruned_loss=0.1165, ctc_loss=0.2264, over 19747.00 frames. ], tot_loss[loss=0.3997, simple_loss=0.388, pruned_loss=0.1487, ctc_loss=0.2848, over 3189626.37 frames. ], batch size: 48, lr: 4.27e-02, grad_scale: 16.0 +2024-08-25 03:48:59,191 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.63 vs. limit=13.18 +2024-08-25 03:49:02,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=15200.0, ans=0.003333333333333334 +2024-08-25 03:49:38,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=15253.333333333334, ans=0.125 +2024-08-25 03:50:10,915 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.21 vs. limit=13.26 +2024-08-25 03:50:17,378 INFO [train.py:1114] (0/4) Epoch 2, batch 400, loss[loss=0.383, simple_loss=0.3789, pruned_loss=0.1393, ctc_loss=0.2714, over 19505.00 frames. ], tot_loss[loss=0.3958, simple_loss=0.3856, pruned_loss=0.1467, ctc_loss=0.2813, over 3341816.65 frames. ], batch size: 54, lr: 4.26e-02, grad_scale: 32.0 +2024-08-25 03:50:37,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=15466.666666666666, ans=0.0075072463768115945 +2024-08-25 03:50:39,707 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.940e+02 2.407e+02 2.984e+02 3.456e+02 5.488e+02, threshold=5.968e+02, percent-clipped=0.0 +2024-08-25 03:50:43,634 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:51:02,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=15573.333333333334, ans=0.125 +2024-08-25 03:51:07,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=15626.666666666666, ans=0.125 +2024-08-25 03:51:18,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=15680.0, ans=0.125 +2024-08-25 03:51:18,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=15680.0, ans=0.125 +2024-08-25 03:51:19,335 INFO [train.py:1114] (0/4) Epoch 2, batch 450, loss[loss=0.3733, simple_loss=0.3832, pruned_loss=0.1303, ctc_loss=0.257, over 19618.00 frames. ], tot_loss[loss=0.3963, simple_loss=0.3857, pruned_loss=0.1472, ctc_loss=0.2811, over 3449617.60 frames. ], batch size: 55, lr: 4.26e-02, grad_scale: 32.0 +2024-08-25 03:51:29,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=15680.0, ans=0.125 +2024-08-25 03:51:55,949 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.19 vs. limit=13.42 +2024-08-25 03:51:57,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=15840.0, ans=0.125 +2024-08-25 03:52:02,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=15840.0, ans=0.125 +2024-08-25 03:52:21,861 INFO [train.py:1114] (0/4) Epoch 2, batch 500, loss[loss=0.3874, simple_loss=0.3946, pruned_loss=0.1375, ctc_loss=0.2627, over 19670.00 frames. ], tot_loss[loss=0.3932, simple_loss=0.3837, pruned_loss=0.1457, ctc_loss=0.2785, over 3545631.43 frames. ], batch size: 63, lr: 4.25e-02, grad_scale: 16.0 +2024-08-25 03:52:25,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=15946.666666666666, ans=0.125 +2024-08-25 03:53:11,995 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.769e+02 2.425e+02 3.079e+02 3.995e+02 1.154e+03, threshold=6.159e+02, percent-clipped=13.0 +2024-08-25 03:53:22,641 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.06 vs. limit=13.52 +2024-08-25 03:53:23,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=16106.666666666666, ans=0.0 +2024-08-25 03:53:29,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=16106.666666666666, ans=0.3362666666666667 +2024-08-25 03:53:32,906 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.10 vs. limit=10.442666666666668 +2024-08-25 03:53:41,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=16160.0, ans=0.125 +2024-08-25 03:53:49,958 INFO [train.py:1114] (0/4) Epoch 2, batch 550, loss[loss=0.4051, simple_loss=0.3967, pruned_loss=0.1489, ctc_loss=0.2895, over 19306.00 frames. ], tot_loss[loss=0.3915, simple_loss=0.3831, pruned_loss=0.1447, ctc_loss=0.2763, over 3608461.94 frames. ], batch size: 71, lr: 4.25e-02, grad_scale: 16.0 +2024-08-25 03:54:01,878 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.98 vs. limit=13.58 +2024-08-25 03:54:23,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=16320.0, ans=0.0 +2024-08-25 03:54:26,060 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.57 vs. limit=19.740000000000002 +2024-08-25 03:54:29,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=16373.333333333334, ans=0.0 +2024-08-25 03:54:36,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=16373.333333333334, ans=0.125 +2024-08-25 03:54:39,004 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.85 vs. limit=13.66 +2024-08-25 03:54:51,530 INFO [train.py:1114] (0/4) Epoch 2, batch 600, loss[loss=0.4379, simple_loss=0.4132, pruned_loss=0.1698, ctc_loss=0.3074, over 19382.00 frames. ], tot_loss[loss=0.3901, simple_loss=0.3829, pruned_loss=0.1438, ctc_loss=0.2745, over 3666275.06 frames. ], batch size: 67, lr: 4.24e-02, grad_scale: 16.0 +2024-08-25 03:54:54,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=16480.0, ans=0.0 +2024-08-25 03:55:14,972 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.729e+02 2.336e+02 2.753e+02 3.494e+02 8.105e+02, threshold=5.507e+02, percent-clipped=1.0 +2024-08-25 03:55:15,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=16586.666666666668, ans=0.125 +2024-08-25 03:55:33,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=16640.0, ans=5.496 +2024-08-25 03:55:38,920 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.04 vs. limit=13.74 +2024-08-25 03:55:45,937 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.66 vs. limit=13.759999999999998 +2024-08-25 03:55:56,157 INFO [train.py:1114] (0/4) Epoch 2, batch 650, loss[loss=0.3786, simple_loss=0.3846, pruned_loss=0.1341, ctc_loss=0.2608, over 19784.00 frames. ], tot_loss[loss=0.3876, simple_loss=0.3812, pruned_loss=0.1426, ctc_loss=0.2719, over 3716432.68 frames. ], batch size: 54, lr: 4.24e-02, grad_scale: 16.0 +2024-08-25 03:56:23,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16853.333333333332, ans=0.13146666666666668 +2024-08-25 03:56:56,409 INFO [train.py:1114] (0/4) Epoch 2, batch 700, loss[loss=0.385, simple_loss=0.3786, pruned_loss=0.1419, ctc_loss=0.2688, over 19706.00 frames. ], tot_loss[loss=0.3895, simple_loss=0.3823, pruned_loss=0.1436, ctc_loss=0.2736, over 3747743.13 frames. ], batch size: 51, lr: 4.23e-02, grad_scale: 16.0 +2024-08-25 03:57:00,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=17013.333333333332, ans=0.025 +2024-08-25 03:57:03,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=17013.333333333332, ans=0.125 +2024-08-25 03:57:23,235 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.791e+02 2.519e+02 2.895e+02 3.628e+02 6.087e+02, threshold=5.790e+02, percent-clipped=2.0 +2024-08-25 03:57:41,536 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.32 vs. limit=9.293333333333333 +2024-08-25 03:57:54,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=17226.666666666668, ans=0.2970666666666667 +2024-08-25 03:58:01,086 INFO [train.py:1114] (0/4) Epoch 2, batch 750, loss[loss=0.4175, simple_loss=0.4001, pruned_loss=0.1578, ctc_loss=0.2985, over 19521.00 frames. ], tot_loss[loss=0.3875, simple_loss=0.3813, pruned_loss=0.1425, ctc_loss=0.2715, over 3774462.36 frames. ], batch size: 54, lr: 4.23e-02, grad_scale: 16.0 +2024-08-25 03:58:06,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.61 vs. limit=13.98 +2024-08-25 03:58:14,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=17333.333333333332, ans=0.125 +2024-08-25 03:58:28,688 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.31 vs. limit=10.954666666666668 +2024-08-25 03:58:40,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=17440.0, ans=0.007078260869565218 +2024-08-25 03:58:40,867 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.96 vs. limit=20.58 +2024-08-25 04:00:12,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=17546.666666666668, ans=0.125 +2024-08-25 04:00:16,095 INFO [train.py:1114] (0/4) Epoch 2, batch 800, loss[loss=0.3199, simple_loss=0.3308, pruned_loss=0.1114, ctc_loss=0.2158, over 19396.00 frames. ], tot_loss[loss=0.3846, simple_loss=0.3797, pruned_loss=0.141, ctc_loss=0.2688, over 3795728.73 frames. ], batch size: 48, lr: 4.22e-02, grad_scale: 32.0 +2024-08-25 04:00:17,754 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.37 vs. limit=14.08 +2024-08-25 04:00:36,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=17600.0, ans=0.007043478260869565 +2024-08-25 04:00:39,334 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.683e+02 2.611e+02 3.088e+02 3.881e+02 9.768e+02, threshold=6.176e+02, percent-clipped=6.0 +2024-08-25 04:01:03,140 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.04 vs. limit=11.104 +2024-08-25 04:01:15,038 INFO [train.py:1114] (0/4) Epoch 2, batch 850, loss[loss=0.4252, simple_loss=0.4128, pruned_loss=0.1595, ctc_loss=0.2964, over 19654.00 frames. ], tot_loss[loss=0.3825, simple_loss=0.3787, pruned_loss=0.1398, ctc_loss=0.2667, over 3814410.93 frames. ], batch size: 59, lr: 4.22e-02, grad_scale: 32.0 +2024-08-25 04:01:15,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=17813.333333333332, ans=0.12186666666666668 +2024-08-25 04:01:18,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=17813.333333333332, ans=0.0 +2024-08-25 04:01:46,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=17920.0, ans=0.0708 +2024-08-25 04:02:12,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff2.min_abs, batch_count=18026.666666666668, ans=0.1 +2024-08-25 04:02:18,991 INFO [train.py:1114] (0/4) Epoch 2, batch 900, loss[loss=0.3214, simple_loss=0.3366, pruned_loss=0.1107, ctc_loss=0.2122, over 19409.00 frames. ], tot_loss[loss=0.3835, simple_loss=0.3791, pruned_loss=0.1405, ctc_loss=0.2672, over 3817568.43 frames. ], batch size: 48, lr: 4.21e-02, grad_scale: 8.0 +2024-08-25 04:02:40,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=18080.0, ans=0.125 +2024-08-25 04:02:41,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=18080.0, ans=0.025 +2024-08-25 04:02:53,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=18133.333333333332, ans=0.025 +2024-08-25 04:03:03,823 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.814e+02 2.530e+02 3.033e+02 3.602e+02 3.379e+03, threshold=6.066e+02, percent-clipped=6.0 +2024-08-25 04:03:04,442 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.99 vs. limit=14.32 +2024-08-25 04:03:10,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=18186.666666666668, ans=0.0 +2024-08-25 04:03:16,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.45 vs. limit=14.34 +2024-08-25 04:03:35,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=18346.666666666668, ans=0.05 +2024-08-25 04:03:36,933 INFO [train.py:1114] (0/4) Epoch 2, batch 950, loss[loss=0.3467, simple_loss=0.3445, pruned_loss=0.1261, ctc_loss=0.2421, over 19504.00 frames. ], tot_loss[loss=0.3821, simple_loss=0.3783, pruned_loss=0.1398, ctc_loss=0.2656, over 3819324.98 frames. ], batch size: 49, lr: 4.21e-02, grad_scale: 8.0 +2024-08-25 04:03:57,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=18400.0, ans=0.125 +2024-08-25 04:04:06,496 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.78 vs. limit=21.34 +2024-08-25 04:04:14,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=18506.666666666668, ans=0.0 +2024-08-25 04:04:14,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=18506.666666666668, ans=0.25226666666666675 +2024-08-25 04:04:15,477 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.10 vs. limit=14.253333333333334 +2024-08-25 04:04:21,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=18506.666666666668, ans=0.0 +2024-08-25 04:04:22,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=18506.666666666668, ans=0.125 +2024-08-25 04:04:27,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.94 vs. limit=14.46 +2024-08-25 04:04:34,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18560.0, ans=0.1144 +2024-08-25 04:04:39,326 INFO [train.py:1114] (0/4) Epoch 2, batch 1000, loss[loss=0.3341, simple_loss=0.347, pruned_loss=0.1161, ctc_loss=0.2227, over 19845.00 frames. ], tot_loss[loss=0.3819, simple_loss=0.3785, pruned_loss=0.1396, ctc_loss=0.2652, over 3815946.82 frames. ], batch size: 52, lr: 4.20e-02, grad_scale: 8.0 +2024-08-25 04:04:45,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=18613.333333333332, ans=0.24853333333333338 +2024-08-25 04:04:51,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=18666.666666666668, ans=0.125 +2024-08-25 04:04:58,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=18666.666666666668, ans=0.11333333333333331 +2024-08-25 04:05:01,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=18666.666666666668, ans=0.125 +2024-08-25 04:05:05,789 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.709e+02 2.321e+02 2.743e+02 3.485e+02 6.350e+02, threshold=5.486e+02, percent-clipped=2.0 +2024-08-25 04:05:41,818 INFO [train.py:1114] (0/4) Epoch 2, batch 1050, loss[loss=0.3451, simple_loss=0.3612, pruned_loss=0.1188, ctc_loss=0.2284, over 19838.00 frames. ], tot_loss[loss=0.3803, simple_loss=0.3774, pruned_loss=0.1389, ctc_loss=0.2637, over 3823718.17 frames. ], batch size: 57, lr: 4.20e-02, grad_scale: 8.0 +2024-08-25 04:05:45,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=18880.0, ans=0.125 +2024-08-25 04:06:04,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=18933.333333333332, ans=0.0 +2024-08-25 04:06:27,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=19040.0, ans=0.0 +2024-08-25 04:06:36,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=19093.333333333332, ans=0.23173333333333346 +2024-08-25 04:06:44,167 INFO [train.py:1114] (0/4) Epoch 2, batch 1100, loss[loss=0.3337, simple_loss=0.3465, pruned_loss=0.1146, ctc_loss=0.2293, over 19565.00 frames. ], tot_loss[loss=0.38, simple_loss=0.377, pruned_loss=0.1388, ctc_loss=0.2632, over 3831878.62 frames. ], batch size: 52, lr: 4.19e-02, grad_scale: 8.0 +2024-08-25 04:07:11,086 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.799e+02 2.465e+02 2.960e+02 4.039e+02 7.406e+02, threshold=5.919e+02, percent-clipped=11.0 +2024-08-25 04:07:14,246 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.50 vs. limit=9.813333333333333 +2024-08-25 04:07:49,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=19306.666666666668, ans=0.125 +2024-08-25 04:07:55,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19360.0, ans=0.10640000000000002 +2024-08-25 04:08:08,069 INFO [train.py:1114] (0/4) Epoch 2, batch 1150, loss[loss=0.3876, simple_loss=0.3776, pruned_loss=0.1434, ctc_loss=0.2767, over 19587.00 frames. ], tot_loss[loss=0.3785, simple_loss=0.3764, pruned_loss=0.138, ctc_loss=0.2615, over 3831567.54 frames. ], batch size: 52, lr: 4.19e-02, grad_scale: 8.0 +2024-08-25 04:08:09,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=19413.333333333332, ans=0.125 +2024-08-25 04:08:32,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=19520.0, ans=0.125 +2024-08-25 04:08:46,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=19573.333333333332, ans=0.0 +2024-08-25 04:08:49,037 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.82 vs. limit=14.84 +2024-08-25 04:08:56,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=19626.666666666668, ans=0.125 +2024-08-25 04:09:08,085 INFO [train.py:1114] (0/4) Epoch 2, batch 1200, loss[loss=0.3908, simple_loss=0.3923, pruned_loss=0.1407, ctc_loss=0.2696, over 19835.00 frames. ], tot_loss[loss=0.3791, simple_loss=0.3771, pruned_loss=0.1382, ctc_loss=0.2618, over 3827106.49 frames. ], batch size: 57, lr: 4.18e-02, grad_scale: 16.0 +2024-08-25 04:09:29,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=19733.333333333332, ans=0.125 +2024-08-25 04:09:34,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=19786.666666666668, ans=0.2074666666666667 +2024-08-25 04:09:35,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=19786.666666666668, ans=0.125 +2024-08-25 04:09:36,232 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.782e+02 2.637e+02 3.065e+02 4.000e+02 6.600e+02, threshold=6.130e+02, percent-clipped=2.0 +2024-08-25 04:09:41,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=19786.666666666668, ans=0.10213333333333333 +2024-08-25 04:10:02,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=19893.333333333332, ans=0.125 +2024-08-25 04:10:11,986 INFO [train.py:1114] (0/4) Epoch 2, batch 1250, loss[loss=0.3997, simple_loss=0.3953, pruned_loss=0.145, ctc_loss=0.2855, over 19537.00 frames. ], tot_loss[loss=0.3763, simple_loss=0.376, pruned_loss=0.1365, ctc_loss=0.2586, over 3844123.46 frames. ], batch size: 61, lr: 4.17e-02, grad_scale: 16.0 +2024-08-25 04:10:29,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=20000.0, ans=0.95 +2024-08-25 04:11:09,335 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=26.98 vs. limit=22.5 +2024-08-25 04:11:15,941 INFO [train.py:1114] (0/4) Epoch 2, batch 1300, loss[loss=0.4363, simple_loss=0.4174, pruned_loss=0.1657, ctc_loss=0.3098, over 18852.00 frames. ], tot_loss[loss=0.3745, simple_loss=0.375, pruned_loss=0.1355, ctc_loss=0.2571, over 3845391.14 frames. ], batch size: 76, lr: 4.17e-02, grad_scale: 16.0 +2024-08-25 04:11:35,566 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.59 vs. limit=15.0 +2024-08-25 04:11:39,417 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.40 vs. limit=22.5 +2024-08-25 04:11:41,991 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.748e+02 2.187e+02 2.429e+02 2.931e+02 4.736e+02, threshold=4.858e+02, percent-clipped=0.0 +2024-08-25 04:11:45,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=20320.0, ans=0.125 +2024-08-25 04:11:58,537 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.06 vs. limit=10.0 +2024-08-25 04:12:03,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=20426.666666666668, ans=0.006428985507246377 +2024-08-25 04:12:07,858 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=25.09 vs. limit=22.5 +2024-08-25 04:12:15,265 INFO [train.py:1114] (0/4) Epoch 2, batch 1350, loss[loss=0.3335, simple_loss=0.359, pruned_loss=0.112, ctc_loss=0.2099, over 19753.00 frames. ], tot_loss[loss=0.3717, simple_loss=0.3735, pruned_loss=0.1341, ctc_loss=0.2543, over 3856086.37 frames. ], batch size: 54, lr: 4.16e-02, grad_scale: 16.0 +2024-08-25 04:12:15,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.43 vs. limit=22.5 +2024-08-25 04:12:21,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=20480.0, ans=0.0 +2024-08-25 04:12:24,228 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.97 vs. limit=15.0 +2024-08-25 04:12:25,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=20480.0, ans=0.0 +2024-08-25 04:12:30,183 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:12:37,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=20533.333333333332, ans=0.1 +2024-08-25 04:12:48,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=20586.666666666668, ans=0.006394202898550725 +2024-08-25 04:12:49,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20586.666666666668, ans=0.1 +2024-08-25 04:12:58,796 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.43 vs. limit=22.5 +2024-08-25 04:13:03,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=20640.0, ans=0.2 +2024-08-25 04:13:10,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=20693.333333333332, ans=0.125 +2024-08-25 04:13:10,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=20693.333333333332, ans=0.0 +2024-08-25 04:13:18,518 INFO [train.py:1114] (0/4) Epoch 2, batch 1400, loss[loss=0.2968, simple_loss=0.3085, pruned_loss=0.1043, ctc_loss=0.1911, over 19657.00 frames. ], tot_loss[loss=0.3697, simple_loss=0.3725, pruned_loss=0.133, ctc_loss=0.2522, over 3863322.05 frames. ], batch size: 46, lr: 4.16e-02, grad_scale: 16.0 +2024-08-25 04:13:36,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=20800.0, ans=0.0 +2024-08-25 04:14:03,163 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.799e+02 2.385e+02 2.674e+02 3.744e+02 6.684e+02, threshold=5.347e+02, percent-clipped=6.0 +2024-08-25 04:14:12,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=20906.666666666668, ans=0.0 +2024-08-25 04:14:18,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=20906.666666666668, ans=0.04949747468305833 +2024-08-25 04:14:18,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20906.666666666668, ans=0.1 +2024-08-25 04:14:37,932 INFO [train.py:1114] (0/4) Epoch 2, batch 1450, loss[loss=0.3556, simple_loss=0.375, pruned_loss=0.1231, ctc_loss=0.225, over 19700.00 frames. ], tot_loss[loss=0.3696, simple_loss=0.3727, pruned_loss=0.1329, ctc_loss=0.2515, over 3861891.40 frames. ], batch size: 63, lr: 4.15e-02, grad_scale: 16.0 +2024-08-25 04:15:59,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=21120.0, ans=0.2 +2024-08-25 04:16:15,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=21173.333333333332, ans=0.025 +2024-08-25 04:16:16,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=21173.333333333332, ans=0.125 +2024-08-25 04:16:23,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=21226.666666666668, ans=0.125 +2024-08-25 04:16:26,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21226.666666666668, ans=0.1 +2024-08-25 04:16:28,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=21226.666666666668, ans=0.0 +2024-08-25 04:16:28,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=21226.666666666668, ans=0.125 +2024-08-25 04:16:33,105 INFO [train.py:1114] (0/4) Epoch 2, batch 1500, loss[loss=0.3554, simple_loss=0.377, pruned_loss=0.1195, ctc_loss=0.237, over 19567.00 frames. ], tot_loss[loss=0.3687, simple_loss=0.3726, pruned_loss=0.1324, ctc_loss=0.2501, over 3860597.06 frames. ], batch size: 57, lr: 4.15e-02, grad_scale: 16.0 +2024-08-25 04:16:42,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=21280.0, ans=0.0 +2024-08-25 04:16:44,294 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/checkpoint-4000.pt +2024-08-25 04:17:08,008 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.803e+02 2.509e+02 2.906e+02 4.274e+02 8.598e+02, threshold=5.813e+02, percent-clipped=13.0 +2024-08-25 04:17:13,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=21386.666666666668, ans=0.125 +2024-08-25 04:17:18,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=21440.0, ans=0.125 +2024-08-25 04:17:19,575 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.00 vs. limit=15.0 +2024-08-25 04:17:25,476 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.99 vs. limit=12.0 +2024-08-25 04:17:31,487 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.54 vs. limit=15.0 +2024-08-25 04:17:41,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=21546.666666666668, ans=0.006185507246376811 +2024-08-25 04:17:42,731 INFO [train.py:1114] (0/4) Epoch 2, batch 1550, loss[loss=0.3915, simple_loss=0.3955, pruned_loss=0.1401, ctc_loss=0.268, over 19590.00 frames. ], tot_loss[loss=0.3692, simple_loss=0.3727, pruned_loss=0.1328, ctc_loss=0.2505, over 3846009.12 frames. ], batch size: 60, lr: 4.14e-02, grad_scale: 16.0 +2024-08-25 04:18:10,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=21653.333333333332, ans=10.0 +2024-08-25 04:18:44,965 INFO [train.py:1114] (0/4) Epoch 2, batch 1600, loss[loss=0.3852, simple_loss=0.3881, pruned_loss=0.1374, ctc_loss=0.2684, over 19845.00 frames. ], tot_loss[loss=0.3687, simple_loss=0.3721, pruned_loss=0.1325, ctc_loss=0.2505, over 3836526.19 frames. ], batch size: 57, lr: 4.13e-02, grad_scale: 32.0 +2024-08-25 04:19:07,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21866.666666666668, ans=0.1 +2024-08-25 04:19:09,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=21866.666666666668, ans=0.0 +2024-08-25 04:19:13,747 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.915e+02 2.370e+02 2.902e+02 3.664e+02 6.938e+02, threshold=5.803e+02, percent-clipped=2.0 +2024-08-25 04:19:25,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=21973.333333333332, ans=0.2 +2024-08-25 04:19:27,442 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.40 vs. limit=15.0 +2024-08-25 04:19:28,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21973.333333333332, ans=0.1 +2024-08-25 04:19:30,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=21973.333333333332, ans=0.0 +2024-08-25 04:19:41,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=22026.666666666668, ans=0.125 +2024-08-25 04:19:47,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=22026.666666666668, ans=0.0 +2024-08-25 04:19:49,387 INFO [train.py:1114] (0/4) Epoch 2, batch 1650, loss[loss=0.4057, simple_loss=0.4005, pruned_loss=0.1481, ctc_loss=0.2867, over 19663.00 frames. ], tot_loss[loss=0.3686, simple_loss=0.3717, pruned_loss=0.1326, ctc_loss=0.2505, over 3833014.04 frames. ], batch size: 59, lr: 4.13e-02, grad_scale: 32.0 +2024-08-25 04:19:50,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=22080.0, ans=0.125 +2024-08-25 04:19:54,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=22080.0, ans=0.125 +2024-08-25 04:19:58,208 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:20:07,874 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.68 vs. limit=12.0 +2024-08-25 04:20:17,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=22186.666666666668, ans=0.5 +2024-08-25 04:20:26,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=22240.0, ans=0.2 +2024-08-25 04:20:41,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=22293.333333333332, ans=0.2 +2024-08-25 04:20:48,548 INFO [train.py:1114] (0/4) Epoch 2, batch 1700, loss[loss=0.3328, simple_loss=0.3327, pruned_loss=0.121, ctc_loss=0.2273, over 19663.00 frames. ], tot_loss[loss=0.3656, simple_loss=0.3703, pruned_loss=0.1309, ctc_loss=0.2474, over 3847569.20 frames. ], batch size: 46, lr: 4.12e-02, grad_scale: 32.0 +2024-08-25 04:21:16,630 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.820e+02 2.264e+02 2.715e+02 3.253e+02 5.462e+02, threshold=5.430e+02, percent-clipped=0.0 +2024-08-25 04:21:19,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=22453.333333333332, ans=0.125 +2024-08-25 04:21:48,270 INFO [train.py:1114] (0/4) Epoch 2, batch 1750, loss[loss=0.3342, simple_loss=0.3361, pruned_loss=0.1211, ctc_loss=0.2255, over 19718.00 frames. ], tot_loss[loss=0.363, simple_loss=0.3684, pruned_loss=0.1298, ctc_loss=0.245, over 3852085.59 frames. ], batch size: 45, lr: 4.12e-02, grad_scale: 32.0 +2024-08-25 04:21:48,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=22613.333333333332, ans=0.025 +2024-08-25 04:21:55,544 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.08 vs. limit=15.0 +2024-08-25 04:22:11,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=22720.0, ans=0.025 +2024-08-25 04:22:21,132 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.94 vs. limit=15.0 +2024-08-25 04:22:24,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=22773.333333333332, ans=0.125 +2024-08-25 04:22:33,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22773.333333333332, ans=0.1 +2024-08-25 04:23:01,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=22880.0, ans=0.125 +2024-08-25 04:23:02,448 INFO [train.py:1114] (0/4) Epoch 2, batch 1800, loss[loss=0.3738, simple_loss=0.3766, pruned_loss=0.1348, ctc_loss=0.2536, over 19612.00 frames. ], tot_loss[loss=0.3632, simple_loss=0.3686, pruned_loss=0.1299, ctc_loss=0.2452, over 3853623.10 frames. ], batch size: 55, lr: 4.11e-02, grad_scale: 32.0 +2024-08-25 04:23:08,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_na.min_abs, batch_count=22880.0, ans=0.02 +2024-08-25 04:23:11,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=22880.0, ans=0.0 +2024-08-25 04:23:17,515 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.90 vs. limit=15.0 +2024-08-25 04:23:28,018 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.805e+02 2.473e+02 2.913e+02 3.585e+02 6.262e+02, threshold=5.825e+02, percent-clipped=5.0 +2024-08-25 04:23:29,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=22986.666666666668, ans=0.2 +2024-08-25 04:23:31,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=22986.666666666668, ans=0.0 +2024-08-25 04:23:32,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22986.666666666668, ans=0.1 +2024-08-25 04:23:35,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=23040.0, ans=0.125 +2024-08-25 04:23:42,967 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.25 vs. limit=22.5 +2024-08-25 04:23:55,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=23093.333333333332, ans=0.04949747468305833 +2024-08-25 04:23:58,972 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.08 vs. limit=22.5 +2024-08-25 04:23:59,520 INFO [train.py:1114] (0/4) Epoch 2, batch 1850, loss[loss=0.3558, simple_loss=0.3728, pruned_loss=0.1235, ctc_loss=0.2297, over 19567.00 frames. ], tot_loss[loss=0.3606, simple_loss=0.367, pruned_loss=0.1286, ctc_loss=0.2428, over 3856613.79 frames. ], batch size: 57, lr: 4.11e-02, grad_scale: 32.0 +2024-08-25 04:24:02,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=23146.666666666668, ans=0.04949747468305833 +2024-08-25 04:24:19,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=23200.0, ans=0.125 +2024-08-25 04:24:37,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=23306.666666666668, ans=0.125 +2024-08-25 04:24:52,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=23360.0, ans=0.125 +2024-08-25 04:24:54,816 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.98 vs. limit=6.0 +2024-08-25 04:24:56,426 INFO [train.py:1114] (0/4) Epoch 2, batch 1900, loss[loss=0.3601, simple_loss=0.3764, pruned_loss=0.1237, ctc_loss=0.2414, over 19660.00 frames. ], tot_loss[loss=0.3611, simple_loss=0.3676, pruned_loss=0.1287, ctc_loss=0.243, over 3861488.72 frames. ], batch size: 59, lr: 4.10e-02, grad_scale: 32.0 +2024-08-25 04:24:56,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=23413.333333333332, ans=0.125 +2024-08-25 04:25:21,307 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.736e+02 2.247e+02 2.781e+02 3.399e+02 7.136e+02, threshold=5.561e+02, percent-clipped=3.0 +2024-08-25 04:25:36,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=23573.333333333332, ans=0.0 +2024-08-25 04:25:55,301 INFO [train.py:1114] (0/4) Epoch 2, batch 1950, loss[loss=0.3441, simple_loss=0.3516, pruned_loss=0.1221, ctc_loss=0.2308, over 19592.00 frames. ], tot_loss[loss=0.3616, simple_loss=0.3684, pruned_loss=0.1288, ctc_loss=0.243, over 3870074.09 frames. ], batch size: 52, lr: 4.09e-02, grad_scale: 32.0 +2024-08-25 04:25:57,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=23680.0, ans=0.2 +2024-08-25 04:26:05,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=23680.0, ans=0.0 +2024-08-25 04:26:17,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=23786.666666666668, ans=0.2 +2024-08-25 04:26:25,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=23786.666666666668, ans=0.125 +2024-08-25 04:26:53,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23946.666666666668, ans=0.1 +2024-08-25 04:26:54,474 INFO [train.py:1114] (0/4) Epoch 2, batch 2000, loss[loss=0.3721, simple_loss=0.3574, pruned_loss=0.1402, ctc_loss=0.2662, over 19656.00 frames. ], tot_loss[loss=0.3639, simple_loss=0.3699, pruned_loss=0.13, ctc_loss=0.245, over 3855218.47 frames. ], batch size: 45, lr: 4.09e-02, grad_scale: 32.0 +2024-08-25 04:26:56,025 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.51 vs. limit=15.0 +2024-08-25 04:27:08,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=24000.0, ans=0.125 +2024-08-25 04:27:20,441 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.739e+02 2.625e+02 3.128e+02 3.968e+02 6.078e+02, threshold=6.255e+02, percent-clipped=2.0 +2024-08-25 04:27:32,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=24106.666666666668, ans=0.2 +2024-08-25 04:27:36,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24106.666666666668, ans=0.1 +2024-08-25 04:27:49,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=24213.333333333332, ans=0.1 +2024-08-25 04:27:51,124 INFO [train.py:1114] (0/4) Epoch 2, batch 2050, loss[loss=0.2713, simple_loss=0.3073, pruned_loss=0.0837, ctc_loss=0.17, over 19714.00 frames. ], tot_loss[loss=0.3637, simple_loss=0.3693, pruned_loss=0.13, ctc_loss=0.2454, over 3852140.17 frames. ], batch size: 47, lr: 4.08e-02, grad_scale: 32.0 +2024-08-25 04:27:55,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=24213.333333333332, ans=0.0 +2024-08-25 04:27:55,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=24213.333333333332, ans=0.125 +2024-08-25 04:27:55,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=24213.333333333332, ans=0.0 +2024-08-25 04:28:00,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=24213.333333333332, ans=0.125 +2024-08-25 04:28:08,419 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.12 vs. limit=10.0 +2024-08-25 04:28:12,624 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.57 vs. limit=8.0 +2024-08-25 04:28:15,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=24320.0, ans=0.125 +2024-08-25 04:28:29,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=24373.333333333332, ans=0.95 +2024-08-25 04:28:29,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=24373.333333333332, ans=0.1 +2024-08-25 04:28:47,780 INFO [train.py:1114] (0/4) Epoch 2, batch 2100, loss[loss=0.3298, simple_loss=0.3476, pruned_loss=0.1137, ctc_loss=0.212, over 19792.00 frames. ], tot_loss[loss=0.3601, simple_loss=0.3673, pruned_loss=0.1281, ctc_loss=0.242, over 3859095.84 frames. ], batch size: 54, lr: 4.08e-02, grad_scale: 32.0 +2024-08-25 04:28:53,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=24480.0, ans=0.0 +2024-08-25 04:28:57,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24480.0, ans=0.1 +2024-08-25 04:28:58,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=24533.333333333332, ans=0.0 +2024-08-25 04:29:04,624 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.81 vs. limit=22.5 +2024-08-25 04:29:07,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=24533.333333333332, ans=0.005536231884057972 +2024-08-25 04:29:11,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=24586.666666666668, ans=0.125 +2024-08-25 04:29:14,124 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.675e+02 2.311e+02 2.619e+02 3.137e+02 5.086e+02, threshold=5.238e+02, percent-clipped=0.0 +2024-08-25 04:29:24,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=24640.0, ans=0.0 +2024-08-25 04:29:35,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=24693.333333333332, ans=0.2 +2024-08-25 04:29:44,340 INFO [train.py:1114] (0/4) Epoch 2, batch 2150, loss[loss=0.326, simple_loss=0.3462, pruned_loss=0.1107, ctc_loss=0.211, over 19582.00 frames. ], tot_loss[loss=0.3569, simple_loss=0.3653, pruned_loss=0.1265, ctc_loss=0.2387, over 3869528.29 frames. ], batch size: 52, lr: 4.07e-02, grad_scale: 32.0 +2024-08-25 04:29:51,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=24746.666666666668, ans=0.125 +2024-08-25 04:30:01,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=24800.0, ans=0.0 +2024-08-25 04:30:08,599 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.63 vs. limit=15.0 +2024-08-25 04:30:17,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=24906.666666666668, ans=0.125 +2024-08-25 04:30:20,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=24906.666666666668, ans=0.2 +2024-08-25 04:30:24,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=24906.666666666668, ans=0.025 +2024-08-25 04:30:39,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=25013.333333333332, ans=0.125 +2024-08-25 04:30:40,040 INFO [train.py:1114] (0/4) Epoch 2, batch 2200, loss[loss=0.3349, simple_loss=0.3534, pruned_loss=0.1149, ctc_loss=0.2165, over 19607.00 frames. ], tot_loss[loss=0.3571, simple_loss=0.3656, pruned_loss=0.1265, ctc_loss=0.2388, over 3867725.96 frames. ], batch size: 57, lr: 4.06e-02, grad_scale: 32.0 +2024-08-25 04:30:44,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25013.333333333332, ans=0.1 +2024-08-25 04:31:01,285 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.59 vs. limit=15.0 +2024-08-25 04:31:06,350 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.751e+02 2.398e+02 2.814e+02 3.505e+02 8.042e+02, threshold=5.628e+02, percent-clipped=3.0 +2024-08-25 04:31:15,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=25173.333333333332, ans=0.0 +2024-08-25 04:31:27,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=25226.666666666668, ans=0.125 +2024-08-25 04:31:35,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=25226.666666666668, ans=0.125 +2024-08-25 04:31:37,496 INFO [train.py:1114] (0/4) Epoch 2, batch 2250, loss[loss=0.3802, simple_loss=0.3787, pruned_loss=0.1382, ctc_loss=0.263, over 19629.00 frames. ], tot_loss[loss=0.3565, simple_loss=0.3651, pruned_loss=0.1263, ctc_loss=0.2382, over 3866861.28 frames. ], batch size: 55, lr: 4.06e-02, grad_scale: 32.0 +2024-08-25 04:31:45,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=25280.0, ans=0.005373913043478261 +2024-08-25 04:31:56,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25333.333333333332, ans=0.1 +2024-08-25 04:32:06,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=25386.666666666668, ans=0.125 +2024-08-25 04:32:08,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=25386.666666666668, ans=0.125 +2024-08-25 04:32:21,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25493.333333333332, ans=0.1 +2024-08-25 04:32:33,191 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.64 vs. limit=15.0 +2024-08-25 04:32:33,459 INFO [train.py:1114] (0/4) Epoch 2, batch 2300, loss[loss=0.3605, simple_loss=0.3629, pruned_loss=0.131, ctc_loss=0.2404, over 19494.00 frames. ], tot_loss[loss=0.3563, simple_loss=0.3644, pruned_loss=0.1264, ctc_loss=0.2383, over 3861320.93 frames. ], batch size: 49, lr: 4.05e-02, grad_scale: 16.0 +2024-08-25 04:32:37,327 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.64 vs. limit=22.5 +2024-08-25 04:32:40,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=25546.666666666668, ans=0.125 +2024-08-25 04:32:41,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=25546.666666666668, ans=0.2 +2024-08-25 04:32:45,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=25600.0, ans=0.2 +2024-08-25 04:32:51,864 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.85 vs. limit=15.0 +2024-08-25 04:33:00,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=25653.333333333332, ans=0.125 +2024-08-25 04:33:02,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=25653.333333333332, ans=0.0 +2024-08-25 04:33:03,049 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.730e+02 2.317e+02 2.709e+02 3.466e+02 6.027e+02, threshold=5.417e+02, percent-clipped=4.0 +2024-08-25 04:33:07,669 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.55 vs. limit=22.5 +2024-08-25 04:33:14,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=25706.666666666668, ans=0.125 +2024-08-25 04:33:19,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=25706.666666666668, ans=0.125 +2024-08-25 04:33:32,456 INFO [train.py:1114] (0/4) Epoch 2, batch 2350, loss[loss=0.3885, simple_loss=0.3859, pruned_loss=0.1429, ctc_loss=0.2632, over 19671.00 frames. ], tot_loss[loss=0.3565, simple_loss=0.3646, pruned_loss=0.1266, ctc_loss=0.2379, over 3864359.88 frames. ], batch size: 63, lr: 4.04e-02, grad_scale: 16.0 +2024-08-25 04:33:45,189 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:33:47,696 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.37 vs. limit=15.0 +2024-08-25 04:33:49,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=25866.666666666668, ans=0.125 +2024-08-25 04:34:00,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=25920.0, ans=0.125 +2024-08-25 04:34:01,278 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:34:10,384 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=23.17 vs. limit=15.0 +2024-08-25 04:34:13,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=25973.333333333332, ans=0.125 +2024-08-25 04:34:26,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=26026.666666666668, ans=0.025 +2024-08-25 04:34:30,672 INFO [train.py:1114] (0/4) Epoch 2, batch 2400, loss[loss=0.3307, simple_loss=0.3618, pruned_loss=0.1073, ctc_loss=0.2124, over 19337.00 frames. ], tot_loss[loss=0.3588, simple_loss=0.3669, pruned_loss=0.1274, ctc_loss=0.2397, over 3858931.03 frames. ], batch size: 67, lr: 4.04e-02, grad_scale: 32.0 +2024-08-25 04:34:57,145 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.789e+02 2.184e+02 2.505e+02 3.102e+02 8.045e+02, threshold=5.010e+02, percent-clipped=5.0 +2024-08-25 04:35:04,044 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:35:27,217 INFO [train.py:1114] (0/4) Epoch 2, batch 2450, loss[loss=0.4359, simple_loss=0.4027, pruned_loss=0.1718, ctc_loss=0.3139, over 13904.00 frames. ], tot_loss[loss=0.3694, simple_loss=0.3728, pruned_loss=0.1331, ctc_loss=0.2494, over 3730699.37 frames. ], batch size: 140, lr: 4.03e-02, grad_scale: 32.0 +2024-08-25 04:35:34,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=26346.666666666668, ans=0.0 +2024-08-25 04:35:35,006 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.61 vs. limit=22.5 +2024-08-25 04:35:42,934 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.92 vs. limit=15.0 +2024-08-25 04:36:11,219 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-2.pt +2024-08-25 04:36:55,764 INFO [train.py:1114] (0/4) Epoch 3, batch 0, loss[loss=0.3357, simple_loss=0.3463, pruned_loss=0.1184, ctc_loss=0.2208, over 19791.00 frames. ], tot_loss[loss=0.3357, simple_loss=0.3463, pruned_loss=0.1184, ctc_loss=0.2208, over 19791.00 frames. ], batch size: 49, lr: 3.83e-02, grad_scale: 32.0 +2024-08-25 04:36:55,765 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-25 04:37:08,018 INFO [train.py:1146] (0/4) Epoch 3, validation: loss=0.2847, simple_loss=0.3461, pruned_loss=0.08168, ctc_loss=0.1499, over 944034.00 frames. +2024-08-25 04:37:08,018 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 14058MB +2024-08-25 04:37:16,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=26554.666666666668, ans=0.04949747468305833 +2024-08-25 04:37:27,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=26608.0, ans=0.125 +2024-08-25 04:37:36,177 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:37:43,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26714.666666666668, ans=0.1 +2024-08-25 04:37:46,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=26714.666666666668, ans=0.09899494936611666 +2024-08-25 04:37:50,801 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.817e+02 2.252e+02 2.580e+02 3.143e+02 6.401e+02, threshold=5.159e+02, percent-clipped=2.0 +2024-08-25 04:37:53,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=26714.666666666668, ans=0.1 +2024-08-25 04:38:10,080 INFO [train.py:1114] (0/4) Epoch 3, batch 50, loss[loss=0.2937, simple_loss=0.3174, pruned_loss=0.09674, ctc_loss=0.1914, over 19698.00 frames. ], tot_loss[loss=0.3658, simple_loss=0.3722, pruned_loss=0.1305, ctc_loss=0.2463, over 845269.70 frames. ], batch size: 47, lr: 3.82e-02, grad_scale: 16.0 +2024-08-25 04:38:18,472 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.87 vs. limit=15.0 +2024-08-25 04:38:35,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=26821.333333333332, ans=0.005038840579710145 +2024-08-25 04:38:40,977 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:38:42,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=26874.666666666668, ans=0.09899494936611666 +2024-08-25 04:38:52,951 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.46 vs. limit=10.0 +2024-08-25 04:38:53,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=26928.0, ans=0.025 +2024-08-25 04:38:55,451 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-08-25 04:39:12,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=26981.333333333332, ans=0.0 +2024-08-25 04:39:12,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=26981.333333333332, ans=0.0 +2024-08-25 04:39:14,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=26981.333333333332, ans=0.0 +2024-08-25 04:39:26,801 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.93 vs. limit=15.0 +2024-08-25 04:39:28,431 INFO [train.py:1114] (0/4) Epoch 3, batch 100, loss[loss=0.3251, simple_loss=0.3424, pruned_loss=0.1114, ctc_loss=0.2124, over 19727.00 frames. ], tot_loss[loss=0.3601, simple_loss=0.369, pruned_loss=0.1275, ctc_loss=0.2406, over 1500423.89 frames. ], batch size: 51, lr: 3.82e-02, grad_scale: 16.0 +2024-08-25 04:39:33,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27088.0, ans=0.1 +2024-08-25 04:39:52,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=27194.666666666668, ans=0.125 +2024-08-25 04:40:03,041 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.98 vs. limit=15.0 +2024-08-25 04:40:11,089 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.865e+02 2.221e+02 2.583e+02 3.158e+02 4.904e+02, threshold=5.165e+02, percent-clipped=0.0 +2024-08-25 04:40:27,491 INFO [train.py:1114] (0/4) Epoch 3, batch 150, loss[loss=0.298, simple_loss=0.3216, pruned_loss=0.1009, ctc_loss=0.1818, over 19745.00 frames. ], tot_loss[loss=0.3534, simple_loss=0.3641, pruned_loss=0.1244, ctc_loss=0.2346, over 2028606.56 frames. ], batch size: 47, lr: 3.81e-02, grad_scale: 16.0 +2024-08-25 04:40:34,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=27354.666666666668, ans=0.125 +2024-08-25 04:40:35,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27354.666666666668, ans=0.1 +2024-08-25 04:40:54,121 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:40:56,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=27461.333333333332, ans=0.125 +2024-08-25 04:41:01,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27461.333333333332, ans=0.1 +2024-08-25 04:41:07,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=27514.666666666668, ans=0.0 +2024-08-25 04:41:14,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=27514.666666666668, ans=0.125 +2024-08-25 04:41:21,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=27568.0, ans=0.125 +2024-08-25 04:41:26,627 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.62 vs. limit=10.0 +2024-08-25 04:41:29,357 INFO [train.py:1114] (0/4) Epoch 3, batch 200, loss[loss=0.3843, simple_loss=0.3738, pruned_loss=0.1448, ctc_loss=0.2626, over 18269.00 frames. ], tot_loss[loss=0.351, simple_loss=0.362, pruned_loss=0.1235, ctc_loss=0.2328, over 2435586.34 frames. ], batch size: 85, lr: 3.80e-02, grad_scale: 16.0 +2024-08-25 04:41:31,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=27621.333333333332, ans=0.004864927536231884 +2024-08-25 04:41:56,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=27728.0, ans=0.0 +2024-08-25 04:42:03,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=27728.0, ans=0.125 +2024-08-25 04:42:14,181 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.479e+02 2.192e+02 2.550e+02 3.125e+02 5.269e+02, threshold=5.099e+02, percent-clipped=1.0 +2024-08-25 04:42:28,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=27834.666666666668, ans=0.025 +2024-08-25 04:42:35,057 INFO [train.py:1114] (0/4) Epoch 3, batch 250, loss[loss=0.4041, simple_loss=0.3973, pruned_loss=0.149, ctc_loss=0.2822, over 19362.00 frames. ], tot_loss[loss=0.3512, simple_loss=0.3622, pruned_loss=0.1236, ctc_loss=0.2325, over 2754938.96 frames. ], batch size: 67, lr: 3.80e-02, grad_scale: 16.0 +2024-08-25 04:42:49,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=27941.333333333332, ans=0.09899494936611666 +2024-08-25 04:42:56,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=27941.333333333332, ans=0.125 +2024-08-25 04:43:15,346 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.28 vs. limit=15.0 +2024-08-25 04:43:25,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=28101.333333333332, ans=0.0 +2024-08-25 04:43:33,551 INFO [train.py:1114] (0/4) Epoch 3, batch 300, loss[loss=0.3772, simple_loss=0.3851, pruned_loss=0.1348, ctc_loss=0.2494, over 19553.00 frames. ], tot_loss[loss=0.3491, simple_loss=0.3606, pruned_loss=0.1226, ctc_loss=0.2309, over 2999555.88 frames. ], batch size: 61, lr: 3.79e-02, grad_scale: 16.0 +2024-08-25 04:43:33,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=28154.666666666668, ans=0.125 +2024-08-25 04:43:34,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=28154.666666666668, ans=0.004748985507246377 +2024-08-25 04:43:38,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=28154.666666666668, ans=0.125 +2024-08-25 04:43:41,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=28154.666666666668, ans=0.1 +2024-08-25 04:43:42,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=28154.666666666668, ans=0.004748985507246377 +2024-08-25 04:43:43,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=28154.666666666668, ans=0.125 +2024-08-25 04:43:48,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=28208.0, ans=0.0 +2024-08-25 04:43:51,552 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.47 vs. limit=22.5 +2024-08-25 04:43:52,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=28208.0, ans=10.0 +2024-08-25 04:44:10,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=28314.666666666668, ans=0.0 +2024-08-25 04:44:18,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=28314.666666666668, ans=0.125 +2024-08-25 04:44:18,931 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.708e+02 2.242e+02 2.624e+02 3.299e+02 5.169e+02, threshold=5.248e+02, percent-clipped=1.0 +2024-08-25 04:44:24,585 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.98 vs. limit=15.0 +2024-08-25 04:44:36,137 INFO [train.py:1114] (0/4) Epoch 3, batch 350, loss[loss=0.325, simple_loss=0.3361, pruned_loss=0.1147, ctc_loss=0.2113, over 19749.00 frames. ], tot_loss[loss=0.3501, simple_loss=0.3614, pruned_loss=0.123, ctc_loss=0.2318, over 3189804.67 frames. ], batch size: 48, lr: 3.79e-02, grad_scale: 16.0 +2024-08-25 04:44:39,466 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:44:44,243 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:45:09,210 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.66 vs. limit=6.0 +2024-08-25 04:45:59,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=28634.666666666668, ans=0.5 +2024-08-25 04:46:52,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28634.666666666668, ans=0.1 +2024-08-25 04:46:55,908 INFO [train.py:1114] (0/4) Epoch 3, batch 400, loss[loss=0.3213, simple_loss=0.3501, pruned_loss=0.1066, ctc_loss=0.198, over 19501.00 frames. ], tot_loss[loss=0.3483, simple_loss=0.3605, pruned_loss=0.122, ctc_loss=0.2302, over 3342807.24 frames. ], batch size: 54, lr: 3.78e-02, grad_scale: 32.0 +2024-08-25 04:47:29,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=28741.333333333332, ans=0.125 +2024-08-25 04:47:37,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=28741.333333333332, ans=0.125 +2024-08-25 04:47:39,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=28794.666666666668, ans=0.125 +2024-08-25 04:48:22,804 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.711e+02 2.232e+02 2.568e+02 3.025e+02 1.134e+03, threshold=5.136e+02, percent-clipped=4.0 +2024-08-25 04:48:36,685 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=9.27 vs. limit=12.0 +2024-08-25 04:48:45,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=28901.333333333332, ans=0.025 +2024-08-25 04:48:48,319 INFO [train.py:1114] (0/4) Epoch 3, batch 450, loss[loss=0.3477, simple_loss=0.3688, pruned_loss=0.1185, ctc_loss=0.2244, over 19610.00 frames. ], tot_loss[loss=0.3464, simple_loss=0.3595, pruned_loss=0.121, ctc_loss=0.2283, over 3452268.07 frames. ], batch size: 55, lr: 3.77e-02, grad_scale: 32.0 +2024-08-25 04:48:55,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=28954.666666666668, ans=0.0 +2024-08-25 04:49:03,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=29008.0, ans=0.125 +2024-08-25 04:49:28,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=29114.666666666668, ans=0.09899494936611666 +2024-08-25 04:49:31,337 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.92 vs. limit=15.0 +2024-08-25 04:50:09,360 INFO [train.py:1114] (0/4) Epoch 3, batch 500, loss[loss=0.3209, simple_loss=0.3535, pruned_loss=0.1059, ctc_loss=0.1916, over 19636.00 frames. ], tot_loss[loss=0.3453, simple_loss=0.3585, pruned_loss=0.1206, ctc_loss=0.2275, over 3547304.73 frames. ], batch size: 63, lr: 3.77e-02, grad_scale: 32.0 +2024-08-25 04:50:54,717 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.47 vs. limit=15.0 +2024-08-25 04:51:09,146 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.756e+02 2.370e+02 2.734e+02 3.745e+02 5.336e+02, threshold=5.469e+02, percent-clipped=1.0 +2024-08-25 04:51:28,444 INFO [train.py:1114] (0/4) Epoch 3, batch 550, loss[loss=0.348, simple_loss=0.3649, pruned_loss=0.1193, ctc_loss=0.2316, over 19227.00 frames. ], tot_loss[loss=0.3451, simple_loss=0.3586, pruned_loss=0.1204, ctc_loss=0.2268, over 3608971.33 frames. ], batch size: 71, lr: 3.76e-02, grad_scale: 32.0 +2024-08-25 04:51:54,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=29594.666666666668, ans=0.0 +2024-08-25 04:52:03,825 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.37 vs. limit=15.0 +2024-08-25 04:52:07,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=29648.0, ans=0.1 +2024-08-25 04:52:50,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29648.0, ans=0.1 +2024-08-25 04:52:56,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=29701.333333333332, ans=0.0 +2024-08-25 04:53:06,044 INFO [train.py:1114] (0/4) Epoch 3, batch 600, loss[loss=0.3857, simple_loss=0.3881, pruned_loss=0.1406, ctc_loss=0.2549, over 19344.00 frames. ], tot_loss[loss=0.3429, simple_loss=0.3575, pruned_loss=0.1192, ctc_loss=0.2246, over 3665528.88 frames. ], batch size: 67, lr: 3.76e-02, grad_scale: 32.0 +2024-08-25 04:53:06,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29754.666666666668, ans=0.1 +2024-08-25 04:53:09,065 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.77 vs. limit=15.0 +2024-08-25 04:53:21,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29808.0, ans=0.1 +2024-08-25 04:53:49,297 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 2.141e+02 2.536e+02 3.031e+02 6.622e+02, threshold=5.071e+02, percent-clipped=2.0 +2024-08-25 04:54:00,789 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.83 vs. limit=15.0 +2024-08-25 04:54:04,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29968.0, ans=0.1 +2024-08-25 04:54:06,081 INFO [train.py:1114] (0/4) Epoch 3, batch 650, loss[loss=0.3153, simple_loss=0.3485, pruned_loss=0.1026, ctc_loss=0.1921, over 19787.00 frames. ], tot_loss[loss=0.3421, simple_loss=0.3568, pruned_loss=0.1188, ctc_loss=0.2241, over 3715665.21 frames. ], batch size: 54, lr: 3.75e-02, grad_scale: 32.0 +2024-08-25 04:54:14,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=30021.333333333332, ans=0.125 +2024-08-25 04:54:20,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=30074.666666666668, ans=0.125 +2024-08-25 04:54:22,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=30074.666666666668, ans=0.125 +2024-08-25 04:54:37,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=30074.666666666668, ans=0.125 +2024-08-25 04:55:06,291 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.56 vs. limit=15.0 +2024-08-25 04:55:10,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30234.666666666668, ans=0.1 +2024-08-25 04:55:16,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=30234.666666666668, ans=0.0 +2024-08-25 04:55:19,007 INFO [train.py:1114] (0/4) Epoch 3, batch 700, loss[loss=0.3472, simple_loss=0.3544, pruned_loss=0.1228, ctc_loss=0.2362, over 19730.00 frames. ], tot_loss[loss=0.3423, simple_loss=0.357, pruned_loss=0.119, ctc_loss=0.2243, over 3746817.77 frames. ], batch size: 51, lr: 3.74e-02, grad_scale: 32.0 +2024-08-25 04:55:22,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=30288.0, ans=0.125 +2024-08-25 04:55:44,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=30394.666666666668, ans=0.2 +2024-08-25 04:56:38,918 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.716e+02 2.292e+02 2.520e+02 3.192e+02 5.203e+02, threshold=5.040e+02, percent-clipped=1.0 +2024-08-25 04:56:41,164 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.83 vs. limit=15.0 +2024-08-25 04:56:44,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30501.333333333332, ans=0.1 +2024-08-25 04:56:56,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30554.666666666668, ans=0.1 +2024-08-25 04:56:57,175 INFO [train.py:1114] (0/4) Epoch 3, batch 750, loss[loss=0.3245, simple_loss=0.3482, pruned_loss=0.1088, ctc_loss=0.2078, over 19487.00 frames. ], tot_loss[loss=0.3417, simple_loss=0.3564, pruned_loss=0.1188, ctc_loss=0.2234, over 3772901.44 frames. ], batch size: 54, lr: 3.74e-02, grad_scale: 32.0 +2024-08-25 04:57:04,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=30554.666666666668, ans=0.125 +2024-08-25 04:57:07,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=30554.666666666668, ans=15.0 +2024-08-25 04:57:09,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30554.666666666668, ans=0.1 +2024-08-25 04:57:19,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=30608.0, ans=0.125 +2024-08-25 04:57:37,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=30714.666666666668, ans=0.0 +2024-08-25 04:57:38,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=30714.666666666668, ans=0.004192463768115941 +2024-08-25 04:57:45,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=30714.666666666668, ans=0.0 +2024-08-25 04:57:59,168 INFO [train.py:1114] (0/4) Epoch 3, batch 800, loss[loss=0.3067, simple_loss=0.3264, pruned_loss=0.1048, ctc_loss=0.1935, over 19821.00 frames. ], tot_loss[loss=0.3408, simple_loss=0.3558, pruned_loss=0.1184, ctc_loss=0.2225, over 3794536.37 frames. ], batch size: 49, lr: 3.73e-02, grad_scale: 32.0 +2024-08-25 04:58:26,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=30928.0, ans=0.2 +2024-08-25 04:58:27,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=30928.0, ans=0.125 +2024-08-25 04:58:32,136 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:58:42,765 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.650e+02 2.211e+02 2.622e+02 3.205e+02 5.257e+02, threshold=5.244e+02, percent-clipped=1.0 +2024-08-25 04:58:46,183 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.79 vs. limit=15.0 +2024-08-25 04:58:47,213 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.26 vs. limit=10.0 +2024-08-25 04:58:49,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=31034.666666666668, ans=0.125 +2024-08-25 04:58:51,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=31034.666666666668, ans=0.125 +2024-08-25 04:59:01,970 INFO [train.py:1114] (0/4) Epoch 3, batch 850, loss[loss=0.3465, simple_loss=0.3672, pruned_loss=0.1175, ctc_loss=0.2272, over 19628.00 frames. ], tot_loss[loss=0.3406, simple_loss=0.3559, pruned_loss=0.1182, ctc_loss=0.2222, over 3815463.35 frames. ], batch size: 59, lr: 3.73e-02, grad_scale: 32.0 +2024-08-25 04:59:23,801 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:59:54,019 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=5.499e-03 +2024-08-25 05:00:04,411 INFO [train.py:1114] (0/4) Epoch 3, batch 900, loss[loss=0.3238, simple_loss=0.3303, pruned_loss=0.1146, ctc_loss=0.22, over 19827.00 frames. ], tot_loss[loss=0.3427, simple_loss=0.357, pruned_loss=0.1194, ctc_loss=0.224, over 3819825.72 frames. ], batch size: 49, lr: 3.72e-02, grad_scale: 8.0 +2024-08-25 05:00:06,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=31354.666666666668, ans=0.004053333333333333 +2024-08-25 05:00:08,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=31354.666666666668, ans=0.125 +2024-08-25 05:00:09,170 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.86 vs. limit=15.0 +2024-08-25 05:00:17,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=31408.0, ans=0.125 +2024-08-25 05:00:19,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=31408.0, ans=0.2 +2024-08-25 05:00:54,424 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.711e+02 2.296e+02 2.736e+02 3.525e+02 1.528e+03, threshold=5.472e+02, percent-clipped=4.0 +2024-08-25 05:01:02,171 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.09 vs. limit=15.0 +2024-08-25 05:01:08,295 INFO [train.py:1114] (0/4) Epoch 3, batch 950, loss[loss=0.3109, simple_loss=0.3273, pruned_loss=0.1051, ctc_loss=0.2106, over 19499.00 frames. ], tot_loss[loss=0.3423, simple_loss=0.3569, pruned_loss=0.1191, ctc_loss=0.2239, over 3821606.58 frames. ], batch size: 49, lr: 3.71e-02, grad_scale: 8.0 +2024-08-25 05:01:14,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=31621.333333333332, ans=0.125 +2024-08-25 05:01:21,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=31674.666666666668, ans=0.125 +2024-08-25 05:01:27,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=31674.666666666668, ans=0.5 +2024-08-25 05:01:53,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31781.333333333332, ans=0.1 +2024-08-25 05:02:08,124 INFO [train.py:1114] (0/4) Epoch 3, batch 1000, loss[loss=0.2884, simple_loss=0.3227, pruned_loss=0.0923, ctc_loss=0.1738, over 19845.00 frames. ], tot_loss[loss=0.3433, simple_loss=0.3577, pruned_loss=0.1196, ctc_loss=0.2243, over 3817304.87 frames. ], batch size: 52, lr: 3.71e-02, grad_scale: 8.0 +2024-08-25 05:02:08,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31888.0, ans=0.1 +2024-08-25 05:02:37,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=31994.666666666668, ans=0.2 +2024-08-25 05:02:51,015 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.38 vs. limit=15.0 +2024-08-25 05:02:56,467 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.586e+02 2.163e+02 2.492e+02 3.027e+02 5.724e+02, threshold=4.983e+02, percent-clipped=1.0 +2024-08-25 05:02:58,474 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.57 vs. limit=22.5 +2024-08-25 05:03:13,729 INFO [train.py:1114] (0/4) Epoch 3, batch 1050, loss[loss=0.3936, simple_loss=0.3971, pruned_loss=0.1432, ctc_loss=0.2594, over 19840.00 frames. ], tot_loss[loss=0.3425, simple_loss=0.3569, pruned_loss=0.1193, ctc_loss=0.2238, over 3823854.97 frames. ], batch size: 57, lr: 3.70e-02, grad_scale: 8.0 +2024-08-25 05:03:14,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=32154.666666666668, ans=0.2 +2024-08-25 05:03:18,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=32154.666666666668, ans=0.1 +2024-08-25 05:04:30,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=32314.666666666668, ans=0.025 +2024-08-25 05:04:30,424 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.30 vs. limit=12.0 +2024-08-25 05:04:54,484 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.82 vs. limit=6.0 +2024-08-25 05:05:04,405 INFO [train.py:1114] (0/4) Epoch 3, batch 1100, loss[loss=0.3226, simple_loss=0.344, pruned_loss=0.1103, ctc_loss=0.2016, over 19578.00 frames. ], tot_loss[loss=0.3412, simple_loss=0.3563, pruned_loss=0.1185, ctc_loss=0.2225, over 3830357.79 frames. ], batch size: 52, lr: 3.70e-02, grad_scale: 8.0 +2024-08-25 05:05:04,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=32421.333333333332, ans=0.2 +2024-08-25 05:05:08,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=32421.333333333332, ans=0.125 +2024-08-25 05:06:00,571 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.823e+02 2.355e+02 2.517e+02 3.019e+02 4.945e+02, threshold=5.033e+02, percent-clipped=0.0 +2024-08-25 05:06:15,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=32634.666666666668, ans=0.125 +2024-08-25 05:06:23,033 INFO [train.py:1114] (0/4) Epoch 3, batch 1150, loss[loss=0.2892, simple_loss=0.3198, pruned_loss=0.09385, ctc_loss=0.1773, over 19566.00 frames. ], tot_loss[loss=0.3414, simple_loss=0.356, pruned_loss=0.1188, ctc_loss=0.2229, over 3828597.53 frames. ], batch size: 52, lr: 3.69e-02, grad_scale: 8.0 +2024-08-25 05:06:40,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=32741.333333333332, ans=0.125 +2024-08-25 05:06:41,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=32741.333333333332, ans=0.125 +2024-08-25 05:07:07,693 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.34 vs. limit=15.0 +2024-08-25 05:07:08,984 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.95 vs. limit=6.0 +2024-08-25 05:07:14,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=32848.0, ans=0.0 +2024-08-25 05:07:29,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=32901.333333333336, ans=0.125 +2024-08-25 05:07:32,070 INFO [train.py:1114] (0/4) Epoch 3, batch 1200, loss[loss=0.3156, simple_loss=0.3536, pruned_loss=0.1009, ctc_loss=0.1894, over 19832.00 frames. ], tot_loss[loss=0.3425, simple_loss=0.3571, pruned_loss=0.1192, ctc_loss=0.2236, over 3824323.82 frames. ], batch size: 57, lr: 3.68e-02, grad_scale: 16.0 +2024-08-25 05:07:34,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=32954.666666666664, ans=0.0 +2024-08-25 05:07:54,987 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.89 vs. limit=22.5 +2024-08-25 05:07:57,500 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.02 vs. limit=15.0 +2024-08-25 05:08:16,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=33114.666666666664, ans=0.00367072463768116 +2024-08-25 05:08:19,680 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.731e+02 2.128e+02 2.359e+02 2.757e+02 6.653e+02, threshold=4.718e+02, percent-clipped=2.0 +2024-08-25 05:08:31,443 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.21 vs. limit=10.0 +2024-08-25 05:08:38,023 INFO [train.py:1114] (0/4) Epoch 3, batch 1250, loss[loss=0.3799, simple_loss=0.3859, pruned_loss=0.1342, ctc_loss=0.2639, over 19515.00 frames. ], tot_loss[loss=0.3419, simple_loss=0.3572, pruned_loss=0.1187, ctc_loss=0.2229, over 3843084.15 frames. ], batch size: 61, lr: 3.68e-02, grad_scale: 16.0 +2024-08-25 05:08:42,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=33221.333333333336, ans=0.1 +2024-08-25 05:08:58,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=33274.666666666664, ans=0.125 +2024-08-25 05:09:03,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=33328.0, ans=0.0 +2024-08-25 05:09:18,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.66 vs. limit=15.0 +2024-08-25 05:09:19,990 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.37 vs. limit=15.0 +2024-08-25 05:09:42,118 INFO [train.py:1114] (0/4) Epoch 3, batch 1300, loss[loss=0.3645, simple_loss=0.3718, pruned_loss=0.1287, ctc_loss=0.2495, over 18759.00 frames. ], tot_loss[loss=0.3406, simple_loss=0.3561, pruned_loss=0.1182, ctc_loss=0.222, over 3847195.98 frames. ], batch size: 76, lr: 3.67e-02, grad_scale: 16.0 +2024-08-25 05:09:44,051 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.10 vs. limit=15.0 +2024-08-25 05:09:50,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=33488.0, ans=0.125 +2024-08-25 05:09:51,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=33488.0, ans=0.125 +2024-08-25 05:09:56,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33541.333333333336, ans=0.1 +2024-08-25 05:10:04,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=33594.666666666664, ans=0.125 +2024-08-25 05:10:05,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=33594.666666666664, ans=0.2 +2024-08-25 05:10:48,159 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.613e+02 2.161e+02 2.525e+02 2.896e+02 5.464e+02, threshold=5.050e+02, percent-clipped=3.0 +2024-08-25 05:11:02,302 INFO [train.py:1114] (0/4) Epoch 3, batch 1350, loss[loss=0.328, simple_loss=0.3485, pruned_loss=0.1119, ctc_loss=0.2094, over 19761.00 frames. ], tot_loss[loss=0.3406, simple_loss=0.356, pruned_loss=0.1183, ctc_loss=0.2216, over 3857821.88 frames. ], batch size: 54, lr: 3.67e-02, grad_scale: 16.0 +2024-08-25 05:11:07,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=33754.666666666664, ans=0.2 +2024-08-25 05:11:15,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=33754.666666666664, ans=0.125 +2024-08-25 05:11:49,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=33861.333333333336, ans=0.003508405797101449 +2024-08-25 05:11:55,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33861.333333333336, ans=0.1 +2024-08-25 05:12:05,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=33914.666666666664, ans=0.0034968115942028994 +2024-08-25 05:12:05,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=33914.666666666664, ans=0.0034968115942028994 +2024-08-25 05:12:06,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=33914.666666666664, ans=0.125 +2024-08-25 05:12:26,329 INFO [train.py:1114] (0/4) Epoch 3, batch 1400, loss[loss=0.3036, simple_loss=0.3214, pruned_loss=0.104, ctc_loss=0.1949, over 19694.00 frames. ], tot_loss[loss=0.3384, simple_loss=0.3547, pruned_loss=0.1172, ctc_loss=0.2196, over 3864952.25 frames. ], batch size: 46, lr: 3.66e-02, grad_scale: 16.0 +2024-08-25 05:12:35,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=34021.333333333336, ans=0.125 +2024-08-25 05:13:29,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=34181.333333333336, ans=0.125 +2024-08-25 05:13:31,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=34181.333333333336, ans=0.07 +2024-08-25 05:13:31,982 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 2.210e+02 2.531e+02 3.096e+02 9.067e+02, threshold=5.062e+02, percent-clipped=2.0 +2024-08-25 05:14:24,484 INFO [train.py:1114] (0/4) Epoch 3, batch 1450, loss[loss=0.3541, simple_loss=0.3764, pruned_loss=0.12, ctc_loss=0.2295, over 19649.00 frames. ], tot_loss[loss=0.3397, simple_loss=0.3555, pruned_loss=0.1178, ctc_loss=0.2209, over 3863352.84 frames. ], batch size: 63, lr: 3.65e-02, grad_scale: 16.0 +2024-08-25 05:14:27,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=34288.0, ans=0.125 +2024-08-25 05:14:27,642 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.15 vs. limit=22.5 +2024-08-25 05:14:46,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=34341.333333333336, ans=0.0 +2024-08-25 05:14:51,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34341.333333333336, ans=0.1 +2024-08-25 05:14:55,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=34394.666666666664, ans=0.125 +2024-08-25 05:15:09,650 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.89 vs. limit=15.0 +2024-08-25 05:15:30,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=34501.333333333336, ans=0.0 +2024-08-25 05:15:32,930 INFO [train.py:1114] (0/4) Epoch 3, batch 1500, loss[loss=0.3387, simple_loss=0.3693, pruned_loss=0.1107, ctc_loss=0.2164, over 19595.00 frames. ], tot_loss[loss=0.3401, simple_loss=0.356, pruned_loss=0.1179, ctc_loss=0.2211, over 3863083.24 frames. ], batch size: 57, lr: 3.65e-02, grad_scale: 16.0 +2024-08-25 05:15:53,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=34608.0, ans=0.0 +2024-08-25 05:15:59,166 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.83 vs. limit=15.0 +2024-08-25 05:16:43,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=34714.666666666664, ans=0.125 +2024-08-25 05:16:51,229 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.550e+02 2.151e+02 2.498e+02 3.151e+02 6.810e+02, threshold=4.996e+02, percent-clipped=2.0 +2024-08-25 05:19:30,677 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.69 vs. limit=15.0 +2024-08-25 05:19:36,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=34768.0, ans=0.125 +2024-08-25 05:19:40,302 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.75 vs. limit=15.0 +2024-08-25 05:20:00,634 INFO [train.py:1114] (0/4) Epoch 3, batch 1550, loss[loss=0.3799, simple_loss=0.3922, pruned_loss=0.1352, ctc_loss=0.2427, over 19626.00 frames. ], tot_loss[loss=0.34, simple_loss=0.3559, pruned_loss=0.1179, ctc_loss=0.2208, over 3846218.17 frames. ], batch size: 60, lr: 3.64e-02, grad_scale: 16.0 +2024-08-25 05:21:06,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=34981.333333333336, ans=0.125 +2024-08-25 05:22:04,996 INFO [train.py:1114] (0/4) Epoch 3, batch 1600, loss[loss=0.3178, simple_loss=0.3489, pruned_loss=0.1051, ctc_loss=0.1911, over 19836.00 frames. ], tot_loss[loss=0.3403, simple_loss=0.3557, pruned_loss=0.1182, ctc_loss=0.2212, over 3835008.45 frames. ], batch size: 57, lr: 3.64e-02, grad_scale: 32.0 +2024-08-25 05:22:11,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=35088.0, ans=0.0 +2024-08-25 05:22:16,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=35141.333333333336, ans=0.95 +2024-08-25 05:22:17,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=35141.333333333336, ans=0.125 +2024-08-25 05:23:22,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=35248.0, ans=0.0 +2024-08-25 05:23:25,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=35248.0, ans=0.125 +2024-08-25 05:23:42,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=35248.0, ans=0.125 +2024-08-25 05:23:43,086 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.751e+02 2.193e+02 2.529e+02 3.233e+02 6.645e+02, threshold=5.059e+02, percent-clipped=2.0 +2024-08-25 05:23:45,101 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.39 vs. limit=12.0 +2024-08-25 05:23:49,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=35301.333333333336, ans=0.2 +2024-08-25 05:24:23,000 INFO [train.py:1114] (0/4) Epoch 3, batch 1650, loss[loss=0.3575, simple_loss=0.3793, pruned_loss=0.1228, ctc_loss=0.2252, over 19661.00 frames. ], tot_loss[loss=0.3403, simple_loss=0.3556, pruned_loss=0.1182, ctc_loss=0.2214, over 3831787.34 frames. ], batch size: 59, lr: 3.63e-02, grad_scale: 32.0 +2024-08-25 05:24:31,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=35354.666666666664, ans=0.125 +2024-08-25 05:25:28,147 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.22 vs. limit=6.0 +2024-08-25 05:25:56,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35514.666666666664, ans=0.1 +2024-08-25 05:26:16,061 INFO [train.py:1114] (0/4) Epoch 3, batch 1700, loss[loss=0.3002, simple_loss=0.3128, pruned_loss=0.1026, ctc_loss=0.2061, over 19671.00 frames. ], tot_loss[loss=0.338, simple_loss=0.3544, pruned_loss=0.1169, ctc_loss=0.2194, over 3846193.75 frames. ], batch size: 46, lr: 3.62e-02, grad_scale: 32.0 +2024-08-25 05:26:16,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=35621.333333333336, ans=0.0 +2024-08-25 05:26:31,278 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.21 vs. limit=15.0 +2024-08-25 05:26:40,001 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.83 vs. limit=15.0 +2024-08-25 05:26:48,319 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.69 vs. limit=10.0 +2024-08-25 05:26:49,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=35728.0, ans=0.0 +2024-08-25 05:26:58,248 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.36 vs. limit=15.0 +2024-08-25 05:26:59,413 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.60 vs. limit=15.0 +2024-08-25 05:27:08,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=35781.333333333336, ans=0.04949747468305833 +2024-08-25 05:27:10,189 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.755e+02 2.342e+02 2.819e+02 3.429e+02 5.215e+02, threshold=5.637e+02, percent-clipped=1.0 +2024-08-25 05:27:19,614 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.06 vs. limit=6.0 +2024-08-25 05:27:19,697 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.95 vs. limit=15.0 +2024-08-25 05:27:23,548 INFO [train.py:1114] (0/4) Epoch 3, batch 1750, loss[loss=0.2892, simple_loss=0.3037, pruned_loss=0.1002, ctc_loss=0.1857, over 19628.00 frames. ], tot_loss[loss=0.3366, simple_loss=0.3535, pruned_loss=0.1163, ctc_loss=0.2179, over 3851012.17 frames. ], batch size: 45, lr: 3.62e-02, grad_scale: 32.0 +2024-08-25 05:27:35,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=35941.333333333336, ans=0.125 +2024-08-25 05:27:36,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=35941.333333333336, ans=0.0 +2024-08-25 05:29:19,805 INFO [train.py:1114] (0/4) Epoch 3, batch 1800, loss[loss=0.3328, simple_loss=0.3619, pruned_loss=0.1095, ctc_loss=0.2117, over 19624.00 frames. ], tot_loss[loss=0.3364, simple_loss=0.3536, pruned_loss=0.1161, ctc_loss=0.2178, over 3852942.62 frames. ], batch size: 55, lr: 3.61e-02, grad_scale: 16.0 +2024-08-25 05:29:45,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=36154.666666666664, ans=0.0 +2024-08-25 05:31:58,636 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.624e+02 2.106e+02 2.466e+02 3.299e+02 1.077e+03, threshold=4.933e+02, percent-clipped=1.0 +2024-08-25 05:32:11,706 INFO [train.py:1114] (0/4) Epoch 3, batch 1850, loss[loss=0.3065, simple_loss=0.3454, pruned_loss=0.09589, ctc_loss=0.1895, over 19590.00 frames. ], tot_loss[loss=0.3343, simple_loss=0.3521, pruned_loss=0.1151, ctc_loss=0.216, over 3857160.26 frames. ], batch size: 57, lr: 3.61e-02, grad_scale: 16.0 +2024-08-25 05:32:40,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=36528.0, ans=0.125 +2024-08-25 05:33:00,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=36634.666666666664, ans=0.2 +2024-08-25 05:33:11,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=36688.0, ans=0.2 +2024-08-25 05:33:12,868 INFO [train.py:1114] (0/4) Epoch 3, batch 1900, loss[loss=0.3451, simple_loss=0.3701, pruned_loss=0.116, ctc_loss=0.2204, over 19648.00 frames. ], tot_loss[loss=0.3342, simple_loss=0.3523, pruned_loss=0.1149, ctc_loss=0.2156, over 3860676.35 frames. ], batch size: 59, lr: 3.60e-02, grad_scale: 16.0 +2024-08-25 05:34:05,256 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.713e+02 2.260e+02 2.560e+02 3.105e+02 5.689e+02, threshold=5.120e+02, percent-clipped=2.0 +2024-08-25 05:34:06,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36901.333333333336, ans=0.1 +2024-08-25 05:34:14,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=36901.333333333336, ans=0.05 +2024-08-25 05:34:49,883 INFO [train.py:1114] (0/4) Epoch 3, batch 1950, loss[loss=0.3078, simple_loss=0.3327, pruned_loss=0.1021, ctc_loss=0.197, over 19579.00 frames. ], tot_loss[loss=0.3361, simple_loss=0.3542, pruned_loss=0.1156, ctc_loss=0.217, over 3869319.91 frames. ], batch size: 52, lr: 3.59e-02, grad_scale: 16.0 +2024-08-25 05:34:54,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=36954.666666666664, ans=0.2 +2024-08-25 05:35:54,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=36954.666666666664, ans=0.0028359420289855086 +2024-08-25 05:36:28,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=37061.333333333336, ans=0.0 +2024-08-25 05:36:34,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=37061.333333333336, ans=0.125 +2024-08-25 05:36:37,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=37061.333333333336, ans=0.125 +2024-08-25 05:36:47,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=37114.666666666664, ans=0.125 +2024-08-25 05:36:50,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=37114.666666666664, ans=10.0 +2024-08-25 05:36:51,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=37114.666666666664, ans=0.2 +2024-08-25 05:37:05,778 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:37:09,008 INFO [train.py:1114] (0/4) Epoch 3, batch 2000, loss[loss=0.2815, simple_loss=0.3095, pruned_loss=0.09152, ctc_loss=0.176, over 19645.00 frames. ], tot_loss[loss=0.3367, simple_loss=0.3546, pruned_loss=0.1159, ctc_loss=0.2175, over 3854838.15 frames. ], batch size: 45, lr: 3.59e-02, grad_scale: 32.0 +2024-08-25 05:37:36,327 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=26.04 vs. limit=22.5 +2024-08-25 05:38:02,421 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.775e+02 2.243e+02 2.650e+02 3.292e+02 1.299e+03, threshold=5.300e+02, percent-clipped=6.0 +2024-08-25 05:38:08,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=37434.666666666664, ans=0.95 +2024-08-25 05:38:10,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=37434.666666666664, ans=0.0027315942028985516 +2024-08-25 05:38:13,916 INFO [train.py:1114] (0/4) Epoch 3, batch 2050, loss[loss=0.3178, simple_loss=0.3309, pruned_loss=0.1109, ctc_loss=0.2069, over 19718.00 frames. ], tot_loss[loss=0.3355, simple_loss=0.3532, pruned_loss=0.1155, ctc_loss=0.2169, over 3852097.82 frames. ], batch size: 47, lr: 3.58e-02, grad_scale: 16.0 +2024-08-25 05:38:15,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=37488.0, ans=0.125 +2024-08-25 05:38:15,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=37488.0, ans=0.125 +2024-08-25 05:38:16,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=37488.0, ans=0.125 +2024-08-25 05:38:30,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=37541.333333333336, ans=0.125 +2024-08-25 05:38:33,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37541.333333333336, ans=0.1 +2024-08-25 05:38:33,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37541.333333333336, ans=0.1 +2024-08-25 05:38:42,807 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.06 vs. limit=15.0 +2024-08-25 05:38:44,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=37594.666666666664, ans=0.125 +2024-08-25 05:38:44,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=37594.666666666664, ans=0.04949747468305833 +2024-08-25 05:38:56,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=37648.0, ans=0.95 +2024-08-25 05:39:04,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=37701.333333333336, ans=0.05 +2024-08-25 05:39:13,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=37701.333333333336, ans=0.05 +2024-08-25 05:39:40,810 INFO [train.py:1114] (0/4) Epoch 3, batch 2100, loss[loss=0.3207, simple_loss=0.3416, pruned_loss=0.1085, ctc_loss=0.2071, over 19755.00 frames. ], tot_loss[loss=0.3345, simple_loss=0.3527, pruned_loss=0.115, ctc_loss=0.2158, over 3858773.03 frames. ], batch size: 54, lr: 3.58e-02, grad_scale: 16.0 +2024-08-25 05:39:43,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=37754.666666666664, ans=0.125 +2024-08-25 05:40:12,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=37808.0, ans=0.2 +2024-08-25 05:40:15,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=37808.0, ans=0.025 +2024-08-25 05:40:18,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=37861.333333333336, ans=0.125 +2024-08-25 05:40:40,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=37861.333333333336, ans=0.04949747468305833 +2024-08-25 05:40:53,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=37914.666666666664, ans=0.125 +2024-08-25 05:40:58,553 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.646e+02 2.072e+02 2.352e+02 2.718e+02 4.903e+02, threshold=4.703e+02, percent-clipped=0.0 +2024-08-25 05:41:02,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=37968.0, ans=0.2 +2024-08-25 05:41:10,087 INFO [train.py:1114] (0/4) Epoch 3, batch 2150, loss[loss=0.3094, simple_loss=0.3313, pruned_loss=0.104, ctc_loss=0.199, over 19588.00 frames. ], tot_loss[loss=0.3329, simple_loss=0.3514, pruned_loss=0.1143, ctc_loss=0.2143, over 3869168.56 frames. ], batch size: 52, lr: 3.57e-02, grad_scale: 16.0 +2024-08-25 05:41:45,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=38128.0, ans=0.0 +2024-08-25 05:41:51,252 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.71 vs. limit=22.5 +2024-08-25 05:41:54,671 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.14 vs. limit=15.0 +2024-08-25 05:42:42,872 INFO [train.py:1114] (0/4) Epoch 3, batch 2200, loss[loss=0.363, simple_loss=0.3704, pruned_loss=0.1294, ctc_loss=0.2416, over 19591.00 frames. ], tot_loss[loss=0.3326, simple_loss=0.3511, pruned_loss=0.1142, ctc_loss=0.214, over 3867395.74 frames. ], batch size: 57, lr: 3.56e-02, grad_scale: 16.0 +2024-08-25 05:42:52,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=38288.0, ans=0.025 +2024-08-25 05:42:52,721 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.26 vs. limit=15.0 +2024-08-25 05:42:59,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=38341.333333333336, ans=0.125 +2024-08-25 05:43:05,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=38341.333333333336, ans=0.07 +2024-08-25 05:43:15,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=38394.666666666664, ans=0.2 +2024-08-25 05:43:34,315 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.802e+02 2.197e+02 2.629e+02 2.994e+02 6.107e+02, threshold=5.259e+02, percent-clipped=1.0 +2024-08-25 05:43:50,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=38554.666666666664, ans=0.2 +2024-08-25 05:43:51,063 INFO [train.py:1114] (0/4) Epoch 3, batch 2250, loss[loss=0.3456, simple_loss=0.3663, pruned_loss=0.1184, ctc_loss=0.2203, over 19631.00 frames. ], tot_loss[loss=0.3327, simple_loss=0.3513, pruned_loss=0.1143, ctc_loss=0.2136, over 3866860.08 frames. ], batch size: 55, lr: 3.56e-02, grad_scale: 16.0 +2024-08-25 05:44:29,398 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.33 vs. limit=15.0 +2024-08-25 05:44:33,121 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.90 vs. limit=15.0 +2024-08-25 05:44:44,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=38661.333333333336, ans=0.2 +2024-08-25 05:44:53,461 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.62 vs. limit=6.0 +2024-08-25 05:45:15,056 INFO [train.py:1114] (0/4) Epoch 3, batch 2300, loss[loss=0.2673, simple_loss=0.304, pruned_loss=0.08335, ctc_loss=0.1596, over 19515.00 frames. ], tot_loss[loss=0.3326, simple_loss=0.3507, pruned_loss=0.1145, ctc_loss=0.2141, over 3860470.99 frames. ], batch size: 49, lr: 3.55e-02, grad_scale: 16.0 +2024-08-25 05:45:22,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=38821.333333333336, ans=0.5 +2024-08-25 05:45:26,884 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.88 vs. limit=15.0 +2024-08-25 05:46:11,640 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.07 vs. limit=15.0 +2024-08-25 05:46:11,678 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.54 vs. limit=15.0 +2024-08-25 05:46:22,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=38981.333333333336, ans=0.07 +2024-08-25 05:46:22,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.whiten.whitening_limit, batch_count=38981.333333333336, ans=12.0 +2024-08-25 05:47:15,674 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.630e+02 2.233e+02 2.542e+02 3.133e+02 7.552e+02, threshold=5.083e+02, percent-clipped=3.0 +2024-08-25 05:47:19,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39034.666666666664, ans=0.1 +2024-08-25 05:47:27,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=39088.0, ans=0.125 +2024-08-25 05:47:27,937 INFO [train.py:1114] (0/4) Epoch 3, batch 2350, loss[loss=0.3395, simple_loss=0.3531, pruned_loss=0.1193, ctc_loss=0.2183, over 19684.00 frames. ], tot_loss[loss=0.3316, simple_loss=0.3501, pruned_loss=0.1139, ctc_loss=0.213, over 3863948.62 frames. ], batch size: 63, lr: 3.55e-02, grad_scale: 16.0 +2024-08-25 05:47:30,279 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:47:30,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=39088.0, ans=0.125 +2024-08-25 05:47:31,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=39088.0, ans=0.05 +2024-08-25 05:47:36,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=39088.0, ans=15.0 +2024-08-25 05:47:40,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=39141.333333333336, ans=0.125 +2024-08-25 05:47:55,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=39194.666666666664, ans=0.09899494936611666 +2024-08-25 05:48:24,945 INFO [train.py:1114] (0/4) Epoch 3, batch 2400, loss[loss=0.3639, simple_loss=0.3774, pruned_loss=0.127, ctc_loss=0.2412, over 19526.00 frames. ], tot_loss[loss=0.3352, simple_loss=0.353, pruned_loss=0.1155, ctc_loss=0.2157, over 3858826.04 frames. ], batch size: 67, lr: 3.54e-02, grad_scale: 32.0 +2024-08-25 05:48:26,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=39354.666666666664, ans=0.125 +2024-08-25 05:48:42,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=39408.0, ans=0.125 +2024-08-25 05:49:03,897 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.03 vs. limit=6.0 +2024-08-25 05:49:04,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=39514.666666666664, ans=0.125 +2024-08-25 05:49:09,959 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.31 vs. limit=15.0 +2024-08-25 05:49:10,299 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.864e+02 2.241e+02 2.672e+02 3.161e+02 5.607e+02, threshold=5.344e+02, percent-clipped=4.0 +2024-08-25 05:49:26,460 INFO [train.py:1114] (0/4) Epoch 3, batch 2450, loss[loss=0.4103, simple_loss=0.3838, pruned_loss=0.1599, ctc_loss=0.2929, over 13134.00 frames. ], tot_loss[loss=0.3433, simple_loss=0.3577, pruned_loss=0.1197, ctc_loss=0.224, over 3733231.49 frames. ], batch size: 141, lr: 3.53e-02, grad_scale: 32.0 +2024-08-25 05:49:30,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=39621.333333333336, ans=0.2 +2024-08-25 05:49:30,127 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.31 vs. limit=15.0 +2024-08-25 05:49:40,614 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.24 vs. limit=22.5 +2024-08-25 05:49:48,833 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.88 vs. limit=22.5 +2024-08-25 05:49:57,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.48 vs. limit=15.0 +2024-08-25 05:50:05,714 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.16 vs. limit=15.0 +2024-08-25 05:50:10,757 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-3.pt +2024-08-25 05:51:05,709 INFO [train.py:1114] (0/4) Epoch 4, batch 0, loss[loss=0.3345, simple_loss=0.3462, pruned_loss=0.1166, ctc_loss=0.2243, over 19412.00 frames. ], tot_loss[loss=0.3345, simple_loss=0.3462, pruned_loss=0.1166, ctc_loss=0.2243, over 19412.00 frames. ], batch size: 48, lr: 3.30e-02, grad_scale: 32.0 +2024-08-25 05:51:05,710 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-25 05:51:35,403 INFO [train.py:1146] (0/4) Epoch 4, validation: loss=0.2629, simple_loss=0.3337, pruned_loss=0.07032, ctc_loss=0.1284, over 944034.00 frames. +2024-08-25 05:51:35,404 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 14058MB +2024-08-25 05:51:48,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=39882.666666666664, ans=0.125 +2024-08-25 05:52:06,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=39936.0, ans=0.125 +2024-08-25 05:52:12,135 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.40 vs. limit=15.0 +2024-08-25 05:52:17,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=39989.333333333336, ans=0.1 +2024-08-25 05:52:41,492 INFO [train.py:1114] (0/4) Epoch 4, batch 50, loss[loss=0.2894, simple_loss=0.3173, pruned_loss=0.09519, ctc_loss=0.1776, over 19690.00 frames. ], tot_loss[loss=0.3432, simple_loss=0.3575, pruned_loss=0.1194, ctc_loss=0.2252, over 844645.13 frames. ], batch size: 47, lr: 3.30e-02, grad_scale: 32.0 +2024-08-25 05:52:47,062 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.793e+02 2.147e+02 2.483e+02 2.920e+02 4.932e+02, threshold=4.967e+02, percent-clipped=0.0 +2024-08-25 05:52:56,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=40096.0, ans=0.125 +2024-08-25 05:53:06,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=40149.333333333336, ans=0.125 +2024-08-25 05:53:09,642 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.74 vs. limit=15.0 +2024-08-25 05:53:34,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=40202.666666666664, ans=0.0 +2024-08-25 05:53:56,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40309.333333333336, ans=0.1 +2024-08-25 05:54:02,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=40309.333333333336, ans=0.125 +2024-08-25 05:54:03,008 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.02 vs. limit=10.0 +2024-08-25 05:54:08,134 INFO [train.py:1114] (0/4) Epoch 4, batch 100, loss[loss=0.3455, simple_loss=0.3609, pruned_loss=0.1196, ctc_loss=0.2277, over 19708.00 frames. ], tot_loss[loss=0.3364, simple_loss=0.355, pruned_loss=0.1154, ctc_loss=0.2178, over 1498983.17 frames. ], batch size: 51, lr: 3.29e-02, grad_scale: 32.0 +2024-08-25 05:54:31,237 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.78 vs. limit=6.0 +2024-08-25 05:54:35,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=40416.0, ans=0.0020834782608695653 +2024-08-25 05:55:03,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40469.333333333336, ans=0.1 +2024-08-25 05:55:40,420 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.30 vs. limit=12.0 +2024-08-25 05:55:55,480 INFO [train.py:1114] (0/4) Epoch 4, batch 150, loss[loss=0.292, simple_loss=0.3145, pruned_loss=0.09688, ctc_loss=0.1895, over 19733.00 frames. ], tot_loss[loss=0.3292, simple_loss=0.35, pruned_loss=0.1119, ctc_loss=0.2117, over 2028351.17 frames. ], batch size: 47, lr: 3.28e-02, grad_scale: 32.0 +2024-08-25 05:55:56,487 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.643e+02 2.033e+02 2.286e+02 2.661e+02 4.118e+02, threshold=4.571e+02, percent-clipped=0.0 +2024-08-25 05:56:09,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=40682.666666666664, ans=0.125 +2024-08-25 05:56:13,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=40682.666666666664, ans=0.125 +2024-08-25 05:56:21,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=40736.0, ans=0.07 +2024-08-25 05:56:24,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=40736.0, ans=0.125 +2024-08-25 05:56:42,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.94 vs. limit=15.0 +2024-08-25 05:57:00,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=40842.666666666664, ans=0.125 +2024-08-25 05:57:04,756 INFO [train.py:1114] (0/4) Epoch 4, batch 200, loss[loss=0.3678, simple_loss=0.3749, pruned_loss=0.1304, ctc_loss=0.2499, over 18390.00 frames. ], tot_loss[loss=0.3265, simple_loss=0.348, pruned_loss=0.1107, ctc_loss=0.2091, over 2436007.39 frames. ], batch size: 85, lr: 3.28e-02, grad_scale: 32.0 +2024-08-25 05:57:19,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=40896.0, ans=0.125 +2024-08-25 05:57:38,399 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:57:45,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=41002.666666666664, ans=0.125 +2024-08-25 05:57:52,025 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.80 vs. limit=15.0 +2024-08-25 05:58:08,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=41056.0, ans=0.125 +2024-08-25 05:58:28,524 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.02 vs. limit=15.0 +2024-08-25 05:58:44,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=41109.333333333336, ans=0.1 +2024-08-25 05:59:03,059 INFO [train.py:1114] (0/4) Epoch 4, batch 250, loss[loss=0.3759, simple_loss=0.3858, pruned_loss=0.1349, ctc_loss=0.2405, over 19399.00 frames. ], tot_loss[loss=0.326, simple_loss=0.3478, pruned_loss=0.1105, ctc_loss=0.2081, over 2755963.59 frames. ], batch size: 67, lr: 3.27e-02, grad_scale: 32.0 +2024-08-25 05:59:04,092 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.652e+02 2.098e+02 2.387e+02 2.939e+02 4.251e+02, threshold=4.774e+02, percent-clipped=0.0 +2024-08-25 05:59:11,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=41162.666666666664, ans=0.125 +2024-08-25 05:59:31,470 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.05 vs. limit=15.0 +2024-08-25 05:59:59,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=41322.666666666664, ans=0.125 +2024-08-25 06:00:14,175 INFO [train.py:1114] (0/4) Epoch 4, batch 300, loss[loss=0.37, simple_loss=0.3778, pruned_loss=0.1319, ctc_loss=0.2461, over 19505.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3474, pruned_loss=0.1103, ctc_loss=0.2077, over 3001044.60 frames. ], batch size: 61, lr: 3.27e-02, grad_scale: 32.0 +2024-08-25 06:00:29,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=41482.666666666664, ans=0.125 +2024-08-25 06:00:39,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=41536.0, ans=0.125 +2024-08-25 06:00:53,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41536.0, ans=0.1 +2024-08-25 06:00:55,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=41536.0, ans=0.125 +2024-08-25 06:01:02,045 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.21 vs. limit=10.0 +2024-08-25 06:01:05,318 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.78 vs. limit=22.5 +2024-08-25 06:01:16,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=41642.666666666664, ans=0.0018168115942028985 +2024-08-25 06:01:20,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=41642.666666666664, ans=0.125 +2024-08-25 06:01:21,995 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=29.05 vs. limit=22.5 +2024-08-25 06:01:24,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=41642.666666666664, ans=0.07 +2024-08-25 06:01:36,629 INFO [train.py:1114] (0/4) Epoch 4, batch 350, loss[loss=0.2835, simple_loss=0.3148, pruned_loss=0.09125, ctc_loss=0.1743, over 19766.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3476, pruned_loss=0.1102, ctc_loss=0.2075, over 3191455.99 frames. ], batch size: 48, lr: 3.26e-02, grad_scale: 32.0 +2024-08-25 06:01:37,794 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.558e+02 2.143e+02 2.517e+02 2.887e+02 6.595e+02, threshold=5.034e+02, percent-clipped=1.0 +2024-08-25 06:01:46,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=41696.0, ans=0.0 +2024-08-25 06:02:38,784 INFO [train.py:1114] (0/4) Epoch 4, batch 400, loss[loss=0.329, simple_loss=0.3516, pruned_loss=0.113, ctc_loss=0.2008, over 19502.00 frames. ], tot_loss[loss=0.3249, simple_loss=0.347, pruned_loss=0.11, ctc_loss=0.2068, over 3343576.12 frames. ], batch size: 54, lr: 3.26e-02, grad_scale: 32.0 +2024-08-25 06:02:42,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=41962.666666666664, ans=0.125 +2024-08-25 06:03:04,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=42016.0, ans=0.0 +2024-08-25 06:03:06,690 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.09 vs. limit=15.0 +2024-08-25 06:03:25,891 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.92 vs. limit=15.0 +2024-08-25 06:03:26,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=42122.666666666664, ans=0.125 +2024-08-25 06:04:04,050 INFO [train.py:1114] (0/4) Epoch 4, batch 450, loss[loss=0.3205, simple_loss=0.3538, pruned_loss=0.1041, ctc_loss=0.1975, over 19616.00 frames. ], tot_loss[loss=0.324, simple_loss=0.3465, pruned_loss=0.1096, ctc_loss=0.2056, over 3452205.27 frames. ], batch size: 55, lr: 3.25e-02, grad_scale: 32.0 +2024-08-25 06:04:06,526 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.656e+02 2.107e+02 2.479e+02 2.897e+02 5.564e+02, threshold=4.958e+02, percent-clipped=2.0 +2024-08-25 06:04:26,312 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.63 vs. limit=15.0 +2024-08-25 06:04:34,938 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:04:34,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=42336.0, ans=0.125 +2024-08-25 06:04:56,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=42389.333333333336, ans=0.0 +2024-08-25 06:05:10,923 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.01 vs. limit=6.0 +2024-08-25 06:05:29,497 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.02 vs. limit=22.5 +2024-08-25 06:05:32,257 INFO [train.py:1114] (0/4) Epoch 4, batch 500, loss[loss=0.2587, simple_loss=0.3151, pruned_loss=0.0738, ctc_loss=0.1365, over 19707.00 frames. ], tot_loss[loss=0.3221, simple_loss=0.345, pruned_loss=0.1088, ctc_loss=0.204, over 3547717.56 frames. ], batch size: 63, lr: 3.25e-02, grad_scale: 32.0 +2024-08-25 06:05:33,858 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.86 vs. limit=15.0 +2024-08-25 06:06:03,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42602.666666666664, ans=0.1 +2024-08-25 06:06:08,757 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/checkpoint-8000.pt +2024-08-25 06:06:34,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=42709.333333333336, ans=0.125 +2024-08-25 06:06:41,087 INFO [train.py:1114] (0/4) Epoch 4, batch 550, loss[loss=0.3416, simple_loss=0.3581, pruned_loss=0.1181, ctc_loss=0.2225, over 19276.00 frames. ], tot_loss[loss=0.3225, simple_loss=0.3454, pruned_loss=0.109, ctc_loss=0.2042, over 3609344.55 frames. ], batch size: 71, lr: 3.24e-02, grad_scale: 16.0 +2024-08-25 06:06:44,788 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.740e+02 2.027e+02 2.416e+02 2.881e+02 5.051e+02, threshold=4.833e+02, percent-clipped=1.0 +2024-08-25 06:06:45,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten.whitening_limit, batch_count=42762.666666666664, ans=15.0 +2024-08-25 06:06:57,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=42816.0, ans=0.125 +2024-08-25 06:07:15,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=42869.333333333336, ans=0.0 +2024-08-25 06:07:20,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=42922.666666666664, ans=0.125 +2024-08-25 06:07:49,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43029.333333333336, ans=0.1 +2024-08-25 06:07:50,622 INFO [train.py:1114] (0/4) Epoch 4, batch 600, loss[loss=0.3331, simple_loss=0.3518, pruned_loss=0.1154, ctc_loss=0.2091, over 19390.00 frames. ], tot_loss[loss=0.3206, simple_loss=0.3444, pruned_loss=0.1079, ctc_loss=0.2024, over 3666017.55 frames. ], batch size: 67, lr: 3.24e-02, grad_scale: 16.0 +2024-08-25 06:07:55,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=43029.333333333336, ans=0.025 +2024-08-25 06:08:27,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=43189.333333333336, ans=0.1 +2024-08-25 06:08:29,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43189.333333333336, ans=0.1 +2024-08-25 06:08:34,775 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.00 vs. limit=15.0 +2024-08-25 06:08:39,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.64 vs. limit=15.0 +2024-08-25 06:08:58,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=43242.666666666664, ans=0.125 +2024-08-25 06:09:00,685 INFO [train.py:1114] (0/4) Epoch 4, batch 650, loss[loss=0.287, simple_loss=0.3291, pruned_loss=0.08963, ctc_loss=0.1643, over 19764.00 frames. ], tot_loss[loss=0.3183, simple_loss=0.3425, pruned_loss=0.107, ctc_loss=0.2006, over 3716027.46 frames. ], batch size: 54, lr: 3.23e-02, grad_scale: 16.0 +2024-08-25 06:09:15,855 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.456e+02 2.140e+02 2.544e+02 3.023e+02 7.017e+02, threshold=5.088e+02, percent-clipped=9.0 +2024-08-25 06:09:32,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=43349.333333333336, ans=0.125 +2024-08-25 06:09:48,397 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.79 vs. limit=15.0 +2024-08-25 06:09:59,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=43456.0, ans=0.0 +2024-08-25 06:10:08,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=43509.333333333336, ans=0.2 +2024-08-25 06:10:13,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.whiten.whitening_limit, batch_count=43509.333333333336, ans=15.0 +2024-08-25 06:10:18,930 INFO [train.py:1114] (0/4) Epoch 4, batch 700, loss[loss=0.2821, simple_loss=0.3135, pruned_loss=0.09129, ctc_loss=0.1703, over 19712.00 frames. ], tot_loss[loss=0.3186, simple_loss=0.3431, pruned_loss=0.1069, ctc_loss=0.2005, over 3748433.07 frames. ], batch size: 51, lr: 3.22e-02, grad_scale: 16.0 +2024-08-25 06:10:25,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=43562.666666666664, ans=0.2 +2024-08-25 06:10:27,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=43562.666666666664, ans=0.125 +2024-08-25 06:10:50,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=43669.333333333336, ans=0.125 +2024-08-25 06:10:53,028 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.75 vs. limit=22.5 +2024-08-25 06:11:09,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=43776.0, ans=0.00135304347826087 +2024-08-25 06:11:14,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=43776.0, ans=0.00135304347826087 +2024-08-25 06:11:22,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=43829.333333333336, ans=0.125 +2024-08-25 06:11:23,852 INFO [train.py:1114] (0/4) Epoch 4, batch 750, loss[loss=0.3647, simple_loss=0.3833, pruned_loss=0.1258, ctc_loss=0.236, over 19497.00 frames. ], tot_loss[loss=0.3173, simple_loss=0.3426, pruned_loss=0.1062, ctc_loss=0.1993, over 3775360.00 frames. ], batch size: 54, lr: 3.22e-02, grad_scale: 16.0 +2024-08-25 06:11:28,686 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.530e+02 2.141e+02 2.481e+02 2.931e+02 4.472e+02, threshold=4.962e+02, percent-clipped=0.0 +2024-08-25 06:11:36,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=43829.333333333336, ans=10.0 +2024-08-25 06:11:46,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=43882.666666666664, ans=0.125 +2024-08-25 06:11:47,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=43882.666666666664, ans=0.025 +2024-08-25 06:11:47,670 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.85 vs. limit=6.0 +2024-08-25 06:11:53,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=43936.0, ans=0.0 +2024-08-25 06:12:00,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=43936.0, ans=0.125 +2024-08-25 06:12:21,962 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.75 vs. limit=15.0 +2024-08-25 06:12:22,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=44042.666666666664, ans=0.5 +2024-08-25 06:12:24,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=44042.666666666664, ans=0.125 +2024-08-25 06:12:29,307 INFO [train.py:1114] (0/4) Epoch 4, batch 800, loss[loss=0.3216, simple_loss=0.3356, pruned_loss=0.1138, ctc_loss=0.2004, over 19439.00 frames. ], tot_loss[loss=0.3169, simple_loss=0.3424, pruned_loss=0.106, ctc_loss=0.1988, over 3795552.55 frames. ], batch size: 48, lr: 3.21e-02, grad_scale: 32.0 +2024-08-25 06:12:57,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.21 vs. limit=10.0 +2024-08-25 06:13:02,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=44202.666666666664, ans=0.1 +2024-08-25 06:13:13,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=44256.0, ans=0.0 +2024-08-25 06:13:28,041 INFO [train.py:1114] (0/4) Epoch 4, batch 850, loss[loss=0.3406, simple_loss=0.3707, pruned_loss=0.1135, ctc_loss=0.2089, over 19643.00 frames. ], tot_loss[loss=0.3166, simple_loss=0.342, pruned_loss=0.1059, ctc_loss=0.1983, over 3815313.62 frames. ], batch size: 59, lr: 3.21e-02, grad_scale: 32.0 +2024-08-25 06:13:29,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_na.min_abs, batch_count=44362.666666666664, ans=0.02 +2024-08-25 06:13:31,253 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.656e+02 2.074e+02 2.402e+02 2.888e+02 5.555e+02, threshold=4.804e+02, percent-clipped=1.0 +2024-08-25 06:13:33,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=44362.666666666664, ans=0.09899494936611666 +2024-08-25 06:13:38,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=44416.0, ans=0.125 +2024-08-25 06:13:54,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=44469.333333333336, ans=0.125 +2024-08-25 06:14:32,268 INFO [train.py:1114] (0/4) Epoch 4, batch 900, loss[loss=0.3076, simple_loss=0.3247, pruned_loss=0.1042, ctc_loss=0.2056, over 19384.00 frames. ], tot_loss[loss=0.3184, simple_loss=0.3428, pruned_loss=0.1069, ctc_loss=0.2002, over 3818849.62 frames. ], batch size: 48, lr: 3.20e-02, grad_scale: 32.0 +2024-08-25 06:14:38,488 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:14:38,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=44629.333333333336, ans=0.0 +2024-08-25 06:14:49,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=44682.666666666664, ans=0.0011559420289855085 +2024-08-25 06:14:53,589 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.93 vs. limit=15.0 +2024-08-25 06:15:06,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=44736.0, ans=0.1 +2024-08-25 06:15:07,658 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.67 vs. limit=15.0 +2024-08-25 06:15:08,888 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.77 vs. limit=15.0 +2024-08-25 06:15:20,866 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.90 vs. limit=10.0 +2024-08-25 06:15:38,567 INFO [train.py:1114] (0/4) Epoch 4, batch 950, loss[loss=0.2611, simple_loss=0.3014, pruned_loss=0.07938, ctc_loss=0.1554, over 19495.00 frames. ], tot_loss[loss=0.3197, simple_loss=0.3434, pruned_loss=0.1076, ctc_loss=0.2018, over 3820486.04 frames. ], batch size: 49, lr: 3.20e-02, grad_scale: 32.0 +2024-08-25 06:15:42,144 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.701e+02 2.101e+02 2.364e+02 2.735e+02 6.196e+02, threshold=4.728e+02, percent-clipped=2.0 +2024-08-25 06:16:06,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=45002.666666666664, ans=0.125 +2024-08-25 06:16:14,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=45002.666666666664, ans=0.125 +2024-08-25 06:16:42,343 INFO [train.py:1114] (0/4) Epoch 4, batch 1000, loss[loss=0.2972, simple_loss=0.3235, pruned_loss=0.09899, ctc_loss=0.1822, over 19848.00 frames. ], tot_loss[loss=0.32, simple_loss=0.3438, pruned_loss=0.1077, ctc_loss=0.2019, over 3816355.45 frames. ], batch size: 52, lr: 3.19e-02, grad_scale: 32.0 +2024-08-25 06:16:48,685 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.10 vs. limit=6.0 +2024-08-25 06:17:06,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_na.min_abs, batch_count=45216.0, ans=0.02 +2024-08-25 06:17:13,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45216.0, ans=0.1 +2024-08-25 06:17:31,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=45269.333333333336, ans=0.125 +2024-08-25 06:18:04,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.63 vs. limit=15.0 +2024-08-25 06:18:10,601 INFO [train.py:1114] (0/4) Epoch 4, batch 1050, loss[loss=0.3683, simple_loss=0.3843, pruned_loss=0.1294, ctc_loss=0.2343, over 19838.00 frames. ], tot_loss[loss=0.3174, simple_loss=0.3423, pruned_loss=0.1064, ctc_loss=0.1993, over 3823183.60 frames. ], batch size: 57, lr: 3.19e-02, grad_scale: 16.0 +2024-08-25 06:18:23,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=45429.333333333336, ans=0.025 +2024-08-25 06:18:24,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=45429.333333333336, ans=0.125 +2024-08-25 06:18:26,180 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.527e+02 1.982e+02 2.200e+02 2.634e+02 5.388e+02, threshold=4.401e+02, percent-clipped=1.0 +2024-08-25 06:18:35,091 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.46 vs. limit=15.0 +2024-08-25 06:18:36,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=45482.666666666664, ans=0.0009820289855072464 +2024-08-25 06:18:46,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=45536.0, ans=0.125 +2024-08-25 06:19:01,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=45589.333333333336, ans=0.125 +2024-08-25 06:19:10,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=45589.333333333336, ans=0.0009588405797101435 +2024-08-25 06:19:18,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=45589.333333333336, ans=0.125 +2024-08-25 06:19:26,167 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.04 vs. limit=22.5 +2024-08-25 06:19:36,328 INFO [train.py:1114] (0/4) Epoch 4, batch 1100, loss[loss=0.2892, simple_loss=0.3259, pruned_loss=0.09189, ctc_loss=0.1717, over 19598.00 frames. ], tot_loss[loss=0.3167, simple_loss=0.3418, pruned_loss=0.106, ctc_loss=0.1986, over 3830731.93 frames. ], batch size: 52, lr: 3.18e-02, grad_scale: 16.0 +2024-08-25 06:19:55,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45749.333333333336, ans=0.1 +2024-08-25 06:20:12,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=45802.666666666664, ans=0.125 +2024-08-25 06:20:52,238 INFO [train.py:1114] (0/4) Epoch 4, batch 1150, loss[loss=0.3182, simple_loss=0.3348, pruned_loss=0.1096, ctc_loss=0.2056, over 19591.00 frames. ], tot_loss[loss=0.3176, simple_loss=0.342, pruned_loss=0.1066, ctc_loss=0.1999, over 3829434.88 frames. ], batch size: 52, lr: 3.18e-02, grad_scale: 16.0 +2024-08-25 06:20:57,030 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.742e+02 2.122e+02 2.390e+02 2.706e+02 4.199e+02, threshold=4.779e+02, percent-clipped=0.0 +2024-08-25 06:21:04,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=45962.666666666664, ans=0.0008776811594202894 +2024-08-25 06:21:15,262 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=5.630e-03 +2024-08-25 06:21:28,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=46069.333333333336, ans=0.025 +2024-08-25 06:21:28,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=46069.333333333336, ans=0.1 +2024-08-25 06:21:35,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=46122.666666666664, ans=0.05 +2024-08-25 06:22:00,000 INFO [train.py:1114] (0/4) Epoch 4, batch 1200, loss[loss=0.3466, simple_loss=0.3591, pruned_loss=0.1217, ctc_loss=0.2269, over 19839.00 frames. ], tot_loss[loss=0.3188, simple_loss=0.3429, pruned_loss=0.1071, ctc_loss=0.201, over 3825559.34 frames. ], batch size: 57, lr: 3.17e-02, grad_scale: 32.0 +2024-08-25 06:22:07,419 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=2.539e-03 +2024-08-25 06:22:14,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=46229.333333333336, ans=0.125 +2024-08-25 06:22:22,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=46282.666666666664, ans=0.125 +2024-08-25 06:22:44,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=46389.333333333336, ans=0.000784927536231883 +2024-08-25 06:23:10,731 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=8.78 vs. limit=12.0 +2024-08-25 06:23:19,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=46442.666666666664, ans=0.0 +2024-08-25 06:23:21,337 INFO [train.py:1114] (0/4) Epoch 4, batch 1250, loss[loss=0.3314, simple_loss=0.3564, pruned_loss=0.1129, ctc_loss=0.2013, over 19526.00 frames. ], tot_loss[loss=0.3182, simple_loss=0.343, pruned_loss=0.1067, ctc_loss=0.2, over 3843759.16 frames. ], batch size: 61, lr: 3.17e-02, grad_scale: 32.0 +2024-08-25 06:23:26,216 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.617e+02 1.962e+02 2.225e+02 2.468e+02 3.508e+02, threshold=4.451e+02, percent-clipped=0.0 +2024-08-25 06:23:40,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=46549.333333333336, ans=0.125 +2024-08-25 06:23:48,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=46549.333333333336, ans=0.125 +2024-08-25 06:23:55,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=46549.333333333336, ans=0.2 +2024-08-25 06:24:04,031 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.10 vs. limit=15.0 +2024-08-25 06:24:24,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=46656.0, ans=0.125 +2024-08-25 06:24:25,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=46656.0, ans=0.025 +2024-08-25 06:24:34,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=46709.333333333336, ans=0.0 +2024-08-25 06:24:37,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=46709.333333333336, ans=0.125 +2024-08-25 06:24:48,933 INFO [train.py:1114] (0/4) Epoch 4, batch 1300, loss[loss=0.366, simple_loss=0.3856, pruned_loss=0.1264, ctc_loss=0.234, over 19010.00 frames. ], tot_loss[loss=0.3172, simple_loss=0.3421, pruned_loss=0.1063, ctc_loss=0.1992, over 3847381.00 frames. ], batch size: 76, lr: 3.16e-02, grad_scale: 32.0 +2024-08-25 06:24:52,821 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.44 vs. limit=12.0 +2024-08-25 06:25:03,324 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.10 vs. limit=10.0 +2024-08-25 06:25:11,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=46869.333333333336, ans=0.125 +2024-08-25 06:25:22,955 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.95 vs. limit=22.5 +2024-08-25 06:25:28,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=46922.666666666664, ans=0.2 +2024-08-25 06:25:52,879 INFO [train.py:1114] (0/4) Epoch 4, batch 1350, loss[loss=0.3269, simple_loss=0.3471, pruned_loss=0.1117, ctc_loss=0.208, over 19764.00 frames. ], tot_loss[loss=0.3157, simple_loss=0.3413, pruned_loss=0.1055, ctc_loss=0.1975, over 3856211.67 frames. ], batch size: 54, lr: 3.16e-02, grad_scale: 32.0 +2024-08-25 06:25:55,407 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:26:07,748 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.473e+02 2.269e+02 2.560e+02 3.229e+02 4.886e+02, threshold=5.120e+02, percent-clipped=5.0 +2024-08-25 06:26:28,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=47029.333333333336, ans=0.125 +2024-08-25 06:26:41,530 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.40 vs. limit=15.0 +2024-08-25 06:27:04,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=47189.333333333336, ans=0.0006110144927536226 +2024-08-25 06:27:16,677 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.50 vs. limit=12.0 +2024-08-25 06:27:20,738 INFO [train.py:1114] (0/4) Epoch 4, batch 1400, loss[loss=0.2757, simple_loss=0.3057, pruned_loss=0.08884, ctc_loss=0.1704, over 19662.00 frames. ], tot_loss[loss=0.3154, simple_loss=0.3411, pruned_loss=0.1053, ctc_loss=0.1977, over 3862713.70 frames. ], batch size: 46, lr: 3.15e-02, grad_scale: 32.0 +2024-08-25 06:27:50,346 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.06 vs. limit=5.0 +2024-08-25 06:28:13,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=47456.0, ans=0.125 +2024-08-25 06:28:19,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=47456.0, ans=0.125 +2024-08-25 06:28:25,607 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.57 vs. limit=15.0 +2024-08-25 06:28:32,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=47509.333333333336, ans=0.0 +2024-08-25 06:28:39,597 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.21 vs. limit=22.5 +2024-08-25 06:28:43,704 INFO [train.py:1114] (0/4) Epoch 4, batch 1450, loss[loss=0.318, simple_loss=0.3457, pruned_loss=0.1061, ctc_loss=0.1954, over 19662.00 frames. ], tot_loss[loss=0.317, simple_loss=0.3423, pruned_loss=0.106, ctc_loss=0.1991, over 3860652.50 frames. ], batch size: 63, lr: 3.15e-02, grad_scale: 32.0 +2024-08-25 06:28:48,586 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.613e+02 2.026e+02 2.327e+02 2.659e+02 4.329e+02, threshold=4.654e+02, percent-clipped=0.0 +2024-08-25 06:29:44,346 INFO [train.py:1114] (0/4) Epoch 4, batch 1500, loss[loss=0.3245, simple_loss=0.3583, pruned_loss=0.1055, ctc_loss=0.1991, over 19581.00 frames. ], tot_loss[loss=0.3167, simple_loss=0.3425, pruned_loss=0.1057, ctc_loss=0.1987, over 3859580.36 frames. ], batch size: 57, lr: 3.14e-02, grad_scale: 16.0 +2024-08-25 06:30:11,915 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.18 vs. limit=6.0 +2024-08-25 06:30:26,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=47989.333333333336, ans=0.0 +2024-08-25 06:30:29,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=47989.333333333336, ans=0.125 +2024-08-25 06:30:38,473 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.61 vs. limit=15.0 +2024-08-25 06:31:38,007 INFO [train.py:1114] (0/4) Epoch 4, batch 1550, loss[loss=0.3502, simple_loss=0.3633, pruned_loss=0.1258, ctc_loss=0.2139, over 19593.00 frames. ], tot_loss[loss=0.3171, simple_loss=0.3425, pruned_loss=0.106, ctc_loss=0.199, over 3846078.25 frames. ], batch size: 60, lr: 3.14e-02, grad_scale: 16.0 +2024-08-25 06:31:49,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=48096.0, ans=15.0 +2024-08-25 06:31:49,984 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.489e+02 2.013e+02 2.262e+02 2.770e+02 1.090e+03, threshold=4.525e+02, percent-clipped=1.0 +2024-08-25 06:32:09,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.66 vs. limit=15.0 +2024-08-25 06:32:11,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=48149.333333333336, ans=0.125 +2024-08-25 06:32:20,106 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.58 vs. limit=15.0 +2024-08-25 06:32:22,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=48202.666666666664, ans=0.125 +2024-08-25 06:32:31,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=48256.0, ans=0.125 +2024-08-25 06:32:39,436 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=19.07 vs. limit=15.0 +2024-08-25 06:32:44,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=48309.333333333336, ans=0.0 +2024-08-25 06:33:08,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=48309.333333333336, ans=0.025 +2024-08-25 06:33:09,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=48309.333333333336, ans=0.1 +2024-08-25 06:33:18,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=48309.333333333336, ans=0.0 +2024-08-25 06:33:26,250 INFO [train.py:1114] (0/4) Epoch 4, batch 1600, loss[loss=0.3325, simple_loss=0.3598, pruned_loss=0.1109, ctc_loss=0.209, over 19838.00 frames. ], tot_loss[loss=0.3164, simple_loss=0.342, pruned_loss=0.1057, ctc_loss=0.1985, over 3836386.44 frames. ], batch size: 57, lr: 3.13e-02, grad_scale: 32.0 +2024-08-25 06:33:29,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=48362.666666666664, ans=0.2 +2024-08-25 06:33:36,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=48362.666666666664, ans=0.95 +2024-08-25 06:33:36,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=48362.666666666664, ans=0.125 +2024-08-25 06:33:50,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=48416.0, ans=0.125 +2024-08-25 06:34:05,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=48469.333333333336, ans=0.125 +2024-08-25 06:35:15,016 INFO [train.py:1114] (0/4) Epoch 4, batch 1650, loss[loss=0.3374, simple_loss=0.3616, pruned_loss=0.1122, ctc_loss=0.2221, over 19659.00 frames. ], tot_loss[loss=0.316, simple_loss=0.3416, pruned_loss=0.1055, ctc_loss=0.198, over 3833121.08 frames. ], batch size: 59, lr: 3.13e-02, grad_scale: 32.0 +2024-08-25 06:35:21,186 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.567e+02 2.079e+02 2.506e+02 2.996e+02 5.422e+02, threshold=5.011e+02, percent-clipped=2.0 +2024-08-25 06:36:11,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=48736.0, ans=0.02 +2024-08-25 06:36:37,433 INFO [train.py:1114] (0/4) Epoch 4, batch 1700, loss[loss=0.272, simple_loss=0.3037, pruned_loss=0.08725, ctc_loss=0.1646, over 19658.00 frames. ], tot_loss[loss=0.3152, simple_loss=0.3411, pruned_loss=0.1051, ctc_loss=0.1974, over 3847142.73 frames. ], batch size: 46, lr: 3.12e-02, grad_scale: 32.0 +2024-08-25 06:36:42,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=48896.0, ans=0.125 +2024-08-25 06:36:43,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=48896.0, ans=0.125 +2024-08-25 06:37:26,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49002.666666666664, ans=0.1 +2024-08-25 06:38:07,133 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.10 vs. limit=22.5 +2024-08-25 06:38:12,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=49109.333333333336, ans=0.0 +2024-08-25 06:38:13,367 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.86 vs. limit=10.0 +2024-08-25 06:38:25,915 INFO [train.py:1114] (0/4) Epoch 4, batch 1750, loss[loss=0.2993, simple_loss=0.3182, pruned_loss=0.1016, ctc_loss=0.1931, over 19657.00 frames. ], tot_loss[loss=0.3135, simple_loss=0.3401, pruned_loss=0.1043, ctc_loss=0.196, over 3852663.18 frames. ], batch size: 45, lr: 3.11e-02, grad_scale: 32.0 +2024-08-25 06:38:33,085 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.628e+02 1.987e+02 2.278e+02 2.713e+02 5.908e+02, threshold=4.555e+02, percent-clipped=1.0 +2024-08-25 06:38:41,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=49216.0, ans=0.0 +2024-08-25 06:38:50,098 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.66 vs. limit=15.0 +2024-08-25 06:39:13,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=49322.666666666664, ans=0.125 +2024-08-25 06:39:31,718 INFO [train.py:1114] (0/4) Epoch 4, batch 1800, loss[loss=0.3016, simple_loss=0.341, pruned_loss=0.09538, ctc_loss=0.1787, over 19615.00 frames. ], tot_loss[loss=0.3156, simple_loss=0.3411, pruned_loss=0.1055, ctc_loss=0.1979, over 3852526.50 frames. ], batch size: 55, lr: 3.11e-02, grad_scale: 32.0 +2024-08-25 06:39:45,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49482.666666666664, ans=0.1 +2024-08-25 06:40:21,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=49536.0, ans=0.00010086956521739195 +2024-08-25 06:40:26,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=49536.0, ans=0.0 +2024-08-25 06:40:54,709 INFO [train.py:1114] (0/4) Epoch 4, batch 1850, loss[loss=0.3068, simple_loss=0.344, pruned_loss=0.09848, ctc_loss=0.1817, over 19599.00 frames. ], tot_loss[loss=0.3146, simple_loss=0.3405, pruned_loss=0.1049, ctc_loss=0.197, over 3856120.50 frames. ], batch size: 57, lr: 3.10e-02, grad_scale: 32.0 +2024-08-25 06:41:01,659 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.611e+02 2.149e+02 2.307e+02 2.574e+02 4.619e+02, threshold=4.614e+02, percent-clipped=1.0 +2024-08-25 06:41:03,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=49696.0, ans=0.035 +2024-08-25 06:41:20,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=49802.666666666664, ans=0.1 +2024-08-25 06:41:23,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=49802.666666666664, ans=0.0 +2024-08-25 06:41:48,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=49909.333333333336, ans=0.125 +2024-08-25 06:41:49,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=49909.333333333336, ans=0.125 +2024-08-25 06:41:50,824 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.84 vs. limit=15.0 +2024-08-25 06:41:59,173 INFO [train.py:1114] (0/4) Epoch 4, batch 1900, loss[loss=0.2977, simple_loss=0.3454, pruned_loss=0.09086, ctc_loss=0.1706, over 19647.00 frames. ], tot_loss[loss=0.315, simple_loss=0.3411, pruned_loss=0.105, ctc_loss=0.197, over 3860972.37 frames. ], batch size: 59, lr: 3.10e-02, grad_scale: 32.0 +2024-08-25 06:42:47,444 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:42:57,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=50069.333333333336, ans=0.125 +2024-08-25 06:43:18,269 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.77 vs. limit=15.0 +2024-08-25 06:43:21,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=50176.0, ans=0.07 +2024-08-25 06:43:38,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=50229.333333333336, ans=0.125 +2024-08-25 06:43:39,886 INFO [train.py:1114] (0/4) Epoch 4, batch 1950, loss[loss=0.313, simple_loss=0.3389, pruned_loss=0.1047, ctc_loss=0.1945, over 19588.00 frames. ], tot_loss[loss=0.315, simple_loss=0.3417, pruned_loss=0.1049, ctc_loss=0.1965, over 3869830.13 frames. ], batch size: 52, lr: 3.09e-02, grad_scale: 32.0 +2024-08-25 06:43:45,592 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.759e+02 2.065e+02 2.259e+02 2.635e+02 4.732e+02, threshold=4.517e+02, percent-clipped=1.0 +2024-08-25 06:44:02,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=50336.0, ans=0.035 +2024-08-25 06:44:24,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=50442.666666666664, ans=0.025 +2024-08-25 06:44:29,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=50442.666666666664, ans=0.125 +2024-08-25 06:44:31,103 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.96 vs. limit=6.0 +2024-08-25 06:44:41,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=50496.0, ans=0.0 +2024-08-25 06:44:48,825 INFO [train.py:1114] (0/4) Epoch 4, batch 2000, loss[loss=0.2959, simple_loss=0.3113, pruned_loss=0.1029, ctc_loss=0.187, over 19654.00 frames. ], tot_loss[loss=0.3163, simple_loss=0.3424, pruned_loss=0.1056, ctc_loss=0.1975, over 3855683.96 frames. ], batch size: 45, lr: 3.09e-02, grad_scale: 32.0 +2024-08-25 06:44:51,734 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.82 vs. limit=15.0 +2024-08-25 06:45:06,177 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.67 vs. limit=15.0 +2024-08-25 06:45:06,286 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.43 vs. limit=6.0 +2024-08-25 06:45:09,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=50549.333333333336, ans=0.025 +2024-08-25 06:45:18,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50549.333333333336, ans=0.1 +2024-08-25 06:45:40,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.16 vs. limit=15.0 +2024-08-25 06:45:41,170 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.28 vs. limit=15.0 +2024-08-25 06:45:46,505 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.41 vs. limit=15.0 +2024-08-25 06:45:51,744 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.15 vs. limit=22.5 +2024-08-25 06:46:34,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=50762.666666666664, ans=0.125 +2024-08-25 06:46:35,029 INFO [train.py:1114] (0/4) Epoch 4, batch 2050, loss[loss=0.2866, simple_loss=0.3019, pruned_loss=0.09892, ctc_loss=0.1836, over 19719.00 frames. ], tot_loss[loss=0.3153, simple_loss=0.3409, pruned_loss=0.1054, ctc_loss=0.1972, over 3851342.85 frames. ], batch size: 47, lr: 3.08e-02, grad_scale: 32.0 +2024-08-25 06:46:38,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=50762.666666666664, ans=0.0 +2024-08-25 06:46:45,630 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.515e+02 2.046e+02 2.338e+02 2.720e+02 4.537e+02, threshold=4.675e+02, percent-clipped=1.0 +2024-08-25 06:46:52,519 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.76 vs. limit=15.0 +2024-08-25 06:47:14,271 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.54 vs. limit=15.0 +2024-08-25 06:47:19,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=50922.666666666664, ans=0.0 +2024-08-25 06:47:24,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=50922.666666666664, ans=0.0 +2024-08-25 06:47:31,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=50976.0, ans=10.0 +2024-08-25 06:47:47,560 INFO [train.py:1114] (0/4) Epoch 4, batch 2100, loss[loss=0.314, simple_loss=0.3391, pruned_loss=0.1063, ctc_loss=0.1907, over 19770.00 frames. ], tot_loss[loss=0.3131, simple_loss=0.3397, pruned_loss=0.1042, ctc_loss=0.195, over 3857773.24 frames. ], batch size: 54, lr: 3.08e-02, grad_scale: 32.0 +2024-08-25 06:48:22,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=51082.666666666664, ans=0.0 +2024-08-25 06:48:22,577 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.00 vs. limit=15.0 +2024-08-25 06:48:35,456 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.17 vs. limit=22.5 +2024-08-25 06:48:44,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51136.0, ans=0.1 +2024-08-25 06:49:45,534 INFO [train.py:1114] (0/4) Epoch 4, batch 2150, loss[loss=0.3037, simple_loss=0.331, pruned_loss=0.1009, ctc_loss=0.1866, over 19590.00 frames. ], tot_loss[loss=0.3112, simple_loss=0.3384, pruned_loss=0.1033, ctc_loss=0.1936, over 3868264.41 frames. ], batch size: 52, lr: 3.07e-02, grad_scale: 32.0 +2024-08-25 06:49:54,449 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.724e+02 2.035e+02 2.305e+02 2.639e+02 4.596e+02, threshold=4.610e+02, percent-clipped=0.0 +2024-08-25 06:50:16,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=51349.333333333336, ans=0.2 +2024-08-25 06:50:45,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=51402.666666666664, ans=0.0 +2024-08-25 06:50:50,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=51456.0, ans=0.125 +2024-08-25 06:50:52,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=51456.0, ans=0.07 +2024-08-25 06:50:58,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=51456.0, ans=0.2 +2024-08-25 06:51:15,390 INFO [train.py:1114] (0/4) Epoch 4, batch 2200, loss[loss=0.2837, simple_loss=0.3281, pruned_loss=0.08671, ctc_loss=0.1644, over 19592.00 frames. ], tot_loss[loss=0.3114, simple_loss=0.3383, pruned_loss=0.1035, ctc_loss=0.1938, over 3867008.68 frames. ], batch size: 57, lr: 3.07e-02, grad_scale: 32.0 +2024-08-25 06:51:30,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=51616.0, ans=0.015 +2024-08-25 06:52:05,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=51722.666666666664, ans=0.1 +2024-08-25 06:52:07,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=51722.666666666664, ans=0.125 +2024-08-25 06:52:18,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=51776.0, ans=0.2 +2024-08-25 06:52:21,036 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.08 vs. limit=22.5 +2024-08-25 06:52:24,901 INFO [train.py:1114] (0/4) Epoch 4, batch 2250, loss[loss=0.3172, simple_loss=0.3483, pruned_loss=0.1024, ctc_loss=0.2028, over 19623.00 frames. ], tot_loss[loss=0.312, simple_loss=0.3389, pruned_loss=0.1037, ctc_loss=0.1944, over 3867747.12 frames. ], batch size: 55, lr: 3.06e-02, grad_scale: 32.0 +2024-08-25 06:52:27,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=51829.333333333336, ans=0.0 +2024-08-25 06:52:31,993 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.699e+02 2.164e+02 2.622e+02 3.263e+02 6.940e+02, threshold=5.245e+02, percent-clipped=2.0 +2024-08-25 06:52:35,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=51829.333333333336, ans=0.2 +2024-08-25 06:52:41,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=51882.666666666664, ans=0.125 +2024-08-25 06:52:48,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=51936.0, ans=0.2 +2024-08-25 06:53:04,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=51989.333333333336, ans=0.125 +2024-08-25 06:53:04,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=51989.333333333336, ans=0.07 +2024-08-25 06:53:04,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=51989.333333333336, ans=0.0 +2024-08-25 06:53:27,234 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=18.54 vs. limit=15.0 +2024-08-25 06:53:30,941 INFO [train.py:1114] (0/4) Epoch 4, batch 2300, loss[loss=0.2924, simple_loss=0.3252, pruned_loss=0.09512, ctc_loss=0.173, over 19504.00 frames. ], tot_loss[loss=0.3112, simple_loss=0.3378, pruned_loss=0.1035, ctc_loss=0.1942, over 3860995.81 frames. ], batch size: 49, lr: 3.06e-02, grad_scale: 32.0 +2024-08-25 06:53:55,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=52149.333333333336, ans=0.0 +2024-08-25 06:53:57,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=52202.666666666664, ans=0.025 +2024-08-25 06:54:06,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=52202.666666666664, ans=0.125 +2024-08-25 06:54:06,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=52202.666666666664, ans=0.0 +2024-08-25 06:54:32,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=52309.333333333336, ans=0.05 +2024-08-25 06:54:43,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=52362.666666666664, ans=0.2 +2024-08-25 06:54:53,358 INFO [train.py:1114] (0/4) Epoch 4, batch 2350, loss[loss=0.3269, simple_loss=0.3567, pruned_loss=0.1093, ctc_loss=0.1961, over 19660.00 frames. ], tot_loss[loss=0.3107, simple_loss=0.3377, pruned_loss=0.1032, ctc_loss=0.1933, over 3864022.99 frames. ], batch size: 63, lr: 3.05e-02, grad_scale: 32.0 +2024-08-25 06:54:58,708 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.580e+02 2.121e+02 2.497e+02 3.048e+02 4.745e+02, threshold=4.995e+02, percent-clipped=0.0 +2024-08-25 06:55:01,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=52362.666666666664, ans=0.0 +2024-08-25 06:55:04,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=52362.666666666664, ans=0.125 +2024-08-25 06:55:20,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=52416.0, ans=0.0 +2024-08-25 06:55:21,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=52416.0, ans=0.0 +2024-08-25 06:55:38,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=52469.333333333336, ans=0.0 +2024-08-25 07:07:21,821 INFO [train.py:1114] (0/4) Epoch 4, batch 2400, loss[loss=0.3588, simple_loss=0.3738, pruned_loss=0.1235, ctc_loss=0.2417, over 19401.00 frames. ], tot_loss[loss=0.313, simple_loss=0.3399, pruned_loss=0.1041, ctc_loss=0.1948, over 3858940.05 frames. ], batch size: 67, lr: 3.05e-02, grad_scale: 32.0 +2024-08-25 07:10:22,335 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.59 vs. limit=12.0 +2024-08-25 07:18:32,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff3.min_abs, batch_count=52736.0, ans=0.2 +2024-08-25 07:18:32,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=52736.0, ans=10.0 +2024-08-25 07:20:53,364 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.30 vs. limit=15.0 +2024-08-25 07:34:58,794 INFO [train.py:1114] (0/4) Epoch 4, batch 2450, loss[loss=0.4387, simple_loss=0.3985, pruned_loss=0.1719, ctc_loss=0.338, over 13118.00 frames. ], tot_loss[loss=0.3226, simple_loss=0.3455, pruned_loss=0.109, ctc_loss=0.2042, over 3734912.68 frames. ], batch size: 140, lr: 3.05e-02, grad_scale: 16.0 +2024-08-25 07:36:27,110 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.701e+02 2.096e+02 2.355e+02 2.735e+02 5.246e+02, threshold=4.710e+02, percent-clipped=1.0 +2024-08-25 07:39:43,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=52949.333333333336, ans=0.125 +2024-08-25 07:42:30,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=53002.666666666664, ans=0.0 +2024-08-25 07:42:30,602 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.31 vs. limit=22.5 +2024-08-25 07:44:03,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=53056.0, ans=0.025 +2024-08-25 07:44:48,701 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-4.pt +2024-08-25 07:46:30,883 INFO [train.py:1114] (0/4) Epoch 5, batch 0, loss[loss=0.2965, simple_loss=0.3197, pruned_loss=0.1001, ctc_loss=0.1823, over 19423.00 frames. ], tot_loss[loss=0.2965, simple_loss=0.3197, pruned_loss=0.1001, ctc_loss=0.1823, over 19423.00 frames. ], batch size: 48, lr: 2.83e-02, grad_scale: 32.0 +2024-08-25 07:46:30,884 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-25 07:49:02,120 INFO [train.py:1146] (0/4) Epoch 5, validation: loss=0.2543, simple_loss=0.3259, pruned_loss=0.06691, ctc_loss=0.1221, over 944034.00 frames. +2024-08-25 07:49:02,120 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 14058MB +2024-08-25 07:51:02,359 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.94 vs. limit=15.0 +2024-08-25 07:54:37,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=53157.333333333336, ans=0.125 +2024-08-25 07:55:21,793 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.48 vs. limit=15.0 +2024-08-25 07:58:52,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=53210.666666666664, ans=0.0 +2024-08-25 07:59:15,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=53264.0, ans=10.0 +2024-08-25 07:59:20,935 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=7.676e-02 +2024-08-25 07:59:21,214 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.80 vs. limit=22.5 +2024-08-25 07:59:25,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=53264.0, ans=0.0 +2024-08-25 08:01:56,938 INFO [train.py:1114] (0/4) Epoch 5, batch 50, loss[loss=0.2475, simple_loss=0.3007, pruned_loss=0.07098, ctc_loss=0.1308, over 19753.00 frames. ], tot_loss[loss=0.3214, simple_loss=0.3454, pruned_loss=0.1078, ctc_loss=0.2042, over 844155.08 frames. ], batch size: 47, lr: 2.83e-02, grad_scale: 32.0 +2024-08-25 08:03:41,030 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.95 vs. limit=15.0 +2024-08-25 08:03:51,548 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.650e+02 1.984e+02 2.202e+02 2.522e+02 4.045e+02, threshold=4.404e+02, percent-clipped=0.0 +2024-08-25 08:04:16,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=53424.0, ans=0.125 +2024-08-25 08:05:13,664 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.47 vs. limit=15.0 +2024-08-25 08:06:45,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=53530.666666666664, ans=0.0 +2024-08-25 08:07:20,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=53584.0, ans=0.0 +2024-08-25 08:07:22,865 INFO [train.py:1114] (0/4) Epoch 5, batch 100, loss[loss=0.2778, simple_loss=0.3138, pruned_loss=0.0884, ctc_loss=0.1625, over 19724.00 frames. ], tot_loss[loss=0.3169, simple_loss=0.3432, pruned_loss=0.1054, ctc_loss=0.1992, over 1498403.14 frames. ], batch size: 51, lr: 2.82e-02, grad_scale: 32.0 +2024-08-25 08:08:21,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=53690.666666666664, ans=0.5 +2024-08-25 08:08:31,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=53744.0, ans=0.125 +2024-08-25 08:08:44,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=53744.0, ans=0.125 +2024-08-25 08:10:03,747 INFO [train.py:1114] (0/4) Epoch 5, batch 150, loss[loss=0.3018, simple_loss=0.3147, pruned_loss=0.1055, ctc_loss=0.1951, over 19741.00 frames. ], tot_loss[loss=0.3097, simple_loss=0.3384, pruned_loss=0.102, ctc_loss=0.1926, over 2028143.16 frames. ], batch size: 47, lr: 2.82e-02, grad_scale: 32.0 +2024-08-25 08:10:19,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=53904.0, ans=0.125 +2024-08-25 08:10:40,327 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.601e+02 2.115e+02 2.389e+02 2.764e+02 4.531e+02, threshold=4.777e+02, percent-clipped=1.0 +2024-08-25 08:11:08,120 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.35 vs. limit=15.0 +2024-08-25 08:12:01,138 INFO [train.py:1114] (0/4) Epoch 5, batch 200, loss[loss=0.3452, simple_loss=0.3616, pruned_loss=0.1207, ctc_loss=0.2181, over 18194.00 frames. ], tot_loss[loss=0.3077, simple_loss=0.3366, pruned_loss=0.1013, ctc_loss=0.191, over 2436360.97 frames. ], batch size: 85, lr: 2.81e-02, grad_scale: 32.0 +2024-08-25 08:12:04,231 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.53 vs. limit=15.0 +2024-08-25 08:13:53,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=54170.666666666664, ans=0.125 +2024-08-25 08:13:53,585 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 08:13:53,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=54170.666666666664, ans=0.2 +2024-08-25 08:13:54,899 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 08:13:57,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=54170.666666666664, ans=0.125 +2024-08-25 08:14:32,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=54224.0, ans=0.125 +2024-08-25 08:14:40,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.78 vs. limit=15.0 +2024-08-25 08:14:43,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=54277.333333333336, ans=0.0 +2024-08-25 08:14:56,878 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.00 vs. limit=15.0 +2024-08-25 08:15:40,996 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.11 vs. limit=22.5 +2024-08-25 08:15:58,970 INFO [train.py:1114] (0/4) Epoch 5, batch 250, loss[loss=0.3369, simple_loss=0.3571, pruned_loss=0.1151, ctc_loss=0.2165, over 19428.00 frames. ], tot_loss[loss=0.3065, simple_loss=0.3356, pruned_loss=0.1007, ctc_loss=0.1901, over 2757087.79 frames. ], batch size: 67, lr: 2.81e-02, grad_scale: 32.0 +2024-08-25 08:16:00,110 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.57 vs. limit=22.5 +2024-08-25 08:16:47,932 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.557e+02 1.969e+02 2.164e+02 2.373e+02 3.326e+02, threshold=4.328e+02, percent-clipped=0.0 +2024-08-25 08:16:59,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=54544.0, ans=0.025 +2024-08-25 08:17:30,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=54650.666666666664, ans=0.125 +2024-08-25 08:17:32,590 INFO [train.py:1114] (0/4) Epoch 5, batch 300, loss[loss=0.3326, simple_loss=0.36, pruned_loss=0.1104, ctc_loss=0.211, over 19541.00 frames. ], tot_loss[loss=0.3052, simple_loss=0.3349, pruned_loss=0.09997, ctc_loss=0.1887, over 3002184.15 frames. ], batch size: 61, lr: 2.81e-02, grad_scale: 32.0 +2024-08-25 08:17:49,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=54757.333333333336, ans=0.125 +2024-08-25 08:17:52,065 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.79 vs. limit=12.0 +2024-08-25 08:18:13,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=54864.0, ans=0.09899494936611666 +2024-08-25 08:18:32,274 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.33 vs. limit=22.5 +2024-08-25 08:18:37,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=54970.666666666664, ans=0.0 +2024-08-25 08:18:38,549 INFO [train.py:1114] (0/4) Epoch 5, batch 350, loss[loss=0.2965, simple_loss=0.3236, pruned_loss=0.09874, ctc_loss=0.1798, over 19769.00 frames. ], tot_loss[loss=0.3048, simple_loss=0.3348, pruned_loss=0.09978, ctc_loss=0.1881, over 3191174.97 frames. ], batch size: 48, lr: 2.80e-02, grad_scale: 16.0 +2024-08-25 08:18:42,867 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 08:18:44,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=54970.666666666664, ans=0.125 +2024-08-25 08:18:44,876 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=15.19 vs. limit=15.0 +2024-08-25 08:18:49,609 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.67 vs. limit=15.0 +2024-08-25 08:18:51,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=54970.666666666664, ans=0.125 +2024-08-25 08:18:57,690 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.09 vs. limit=15.0 +2024-08-25 08:18:57,782 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.90 vs. limit=15.0 +2024-08-25 08:19:10,791 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 1.967e+02 2.265e+02 2.794e+02 4.039e+02, threshold=4.529e+02, percent-clipped=0.0 +2024-08-25 08:19:20,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=55077.333333333336, ans=0.2 +2024-08-25 08:19:24,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=55077.333333333336, ans=0.125 +2024-08-25 08:19:51,989 INFO [train.py:1114] (0/4) Epoch 5, batch 400, loss[loss=0.2838, simple_loss=0.3314, pruned_loss=0.08493, ctc_loss=0.1658, over 19499.00 frames. ], tot_loss[loss=0.3034, simple_loss=0.3339, pruned_loss=0.09913, ctc_loss=0.1867, over 3343119.31 frames. ], batch size: 54, lr: 2.80e-02, grad_scale: 32.0 +2024-08-25 08:19:58,968 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.08 vs. limit=22.5 +2024-08-25 08:20:04,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=55290.666666666664, ans=0.125 +2024-08-25 08:20:12,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=55290.666666666664, ans=0.125 +2024-08-25 08:20:13,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=55290.666666666664, ans=0.0 +2024-08-25 08:20:25,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=55344.0, ans=0.125 +2024-08-25 08:21:27,043 INFO [train.py:1114] (0/4) Epoch 5, batch 450, loss[loss=0.2916, simple_loss=0.3299, pruned_loss=0.09039, ctc_loss=0.1811, over 19599.00 frames. ], tot_loss[loss=0.3031, simple_loss=0.3337, pruned_loss=0.09897, ctc_loss=0.1865, over 3450619.45 frames. ], batch size: 55, lr: 2.79e-02, grad_scale: 32.0 +2024-08-25 08:21:27,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=55504.0, ans=0.0 +2024-08-25 08:21:28,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=55504.0, ans=0.125 +2024-08-25 08:21:47,867 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.640e+02 2.008e+02 2.249e+02 2.774e+02 4.428e+02, threshold=4.498e+02, percent-clipped=0.0 +2024-08-25 08:21:51,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=55610.666666666664, ans=0.2 +2024-08-25 08:21:56,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=55610.666666666664, ans=0.2 +2024-08-25 08:22:06,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=55664.0, ans=0.04949747468305833 +2024-08-25 08:22:53,987 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.46 vs. limit=15.0 +2024-08-25 08:22:58,212 INFO [train.py:1114] (0/4) Epoch 5, batch 500, loss[loss=0.3046, simple_loss=0.3382, pruned_loss=0.09933, ctc_loss=0.181, over 19665.00 frames. ], tot_loss[loss=0.302, simple_loss=0.333, pruned_loss=0.09846, ctc_loss=0.1854, over 3546264.51 frames. ], batch size: 63, lr: 2.79e-02, grad_scale: 32.0 +2024-08-25 08:23:40,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=55824.0, ans=0.0 +2024-08-25 08:24:06,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=55930.666666666664, ans=0.025 +2024-08-25 08:24:11,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=55984.0, ans=0.125 +2024-08-25 08:24:21,996 INFO [train.py:1114] (0/4) Epoch 5, batch 550, loss[loss=0.3305, simple_loss=0.359, pruned_loss=0.1101, ctc_loss=0.2046, over 19337.00 frames. ], tot_loss[loss=0.3028, simple_loss=0.3334, pruned_loss=0.09888, ctc_loss=0.1862, over 3609162.31 frames. ], batch size: 71, lr: 2.78e-02, grad_scale: 32.0 +2024-08-25 08:24:32,248 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.30 vs. limit=15.0 +2024-08-25 08:24:36,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=56090.666666666664, ans=0.035 +2024-08-25 08:24:37,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=56090.666666666664, ans=0.2 +2024-08-25 08:24:47,068 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.533e+02 1.991e+02 2.247e+02 2.867e+02 6.260e+02, threshold=4.494e+02, percent-clipped=1.0 +2024-08-25 08:24:55,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=56144.0, ans=0.125 +2024-08-25 08:25:37,707 INFO [train.py:1114] (0/4) Epoch 5, batch 600, loss[loss=0.342, simple_loss=0.364, pruned_loss=0.1155, ctc_loss=0.2226, over 19446.00 frames. ], tot_loss[loss=0.3027, simple_loss=0.3335, pruned_loss=0.09878, ctc_loss=0.1858, over 3667103.29 frames. ], batch size: 67, lr: 2.78e-02, grad_scale: 32.0 +2024-08-25 08:25:38,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=56304.0, ans=0.125 +2024-08-25 08:25:40,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=56304.0, ans=0.125 +2024-08-25 08:26:14,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.85 vs. limit=22.5 +2024-08-25 08:26:24,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=56464.0, ans=0.05 +2024-08-25 08:26:34,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=56517.333333333336, ans=0.0 +2024-08-25 08:26:37,476 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.49 vs. limit=22.5 +2024-08-25 08:26:44,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=56570.666666666664, ans=0.2 +2024-08-25 08:26:47,456 INFO [train.py:1114] (0/4) Epoch 5, batch 650, loss[loss=0.3045, simple_loss=0.3423, pruned_loss=0.09768, ctc_loss=0.1782, over 19760.00 frames. ], tot_loss[loss=0.3008, simple_loss=0.3323, pruned_loss=0.09785, ctc_loss=0.1842, over 3717918.01 frames. ], batch size: 54, lr: 2.77e-02, grad_scale: 32.0 +2024-08-25 08:26:47,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=56570.666666666664, ans=0.125 +2024-08-25 08:26:50,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=56570.666666666664, ans=0.07 +2024-08-25 08:26:55,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=56570.666666666664, ans=0.0 +2024-08-25 08:27:13,355 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.617e+02 1.957e+02 2.352e+02 2.685e+02 4.359e+02, threshold=4.704e+02, percent-clipped=0.0 +2024-08-25 08:27:39,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=56730.666666666664, ans=0.2 +2024-08-25 08:28:04,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=56784.0, ans=0.0 +2024-08-25 08:28:10,109 INFO [train.py:1114] (0/4) Epoch 5, batch 700, loss[loss=0.2598, simple_loss=0.3032, pruned_loss=0.07727, ctc_loss=0.1545, over 19720.00 frames. ], tot_loss[loss=0.3016, simple_loss=0.3328, pruned_loss=0.09825, ctc_loss=0.1848, over 3749766.63 frames. ], batch size: 51, lr: 2.77e-02, grad_scale: 32.0 +2024-08-25 08:28:44,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=56890.666666666664, ans=0.0 +2024-08-25 08:28:59,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=56944.0, ans=0.125 +2024-08-25 08:29:21,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=57050.666666666664, ans=0.0 +2024-08-25 08:29:41,353 INFO [train.py:1114] (0/4) Epoch 5, batch 750, loss[loss=0.3353, simple_loss=0.362, pruned_loss=0.1109, ctc_loss=0.217, over 19841.00 frames. ], tot_loss[loss=0.3012, simple_loss=0.3324, pruned_loss=0.09806, ctc_loss=0.1848, over 3776345.09 frames. ], batch size: 55, lr: 2.77e-02, grad_scale: 32.0 +2024-08-25 08:29:43,384 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.65 vs. limit=15.0 +2024-08-25 08:30:18,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=57157.333333333336, ans=0.025 +2024-08-25 08:30:28,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=57157.333333333336, ans=0.125 +2024-08-25 08:30:40,377 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.677e+02 2.099e+02 2.472e+02 3.181e+02 5.803e+02, threshold=4.945e+02, percent-clipped=2.0 +2024-08-25 08:30:46,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=57157.333333333336, ans=0.2 +2024-08-25 08:30:58,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=57210.666666666664, ans=0.0 +2024-08-25 08:31:03,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=57210.666666666664, ans=0.125 +2024-08-25 08:31:38,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=57317.333333333336, ans=0.0 +2024-08-25 08:32:02,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=57317.333333333336, ans=0.2 +2024-08-25 08:32:05,739 INFO [train.py:1114] (0/4) Epoch 5, batch 800, loss[loss=0.2481, simple_loss=0.2995, pruned_loss=0.07084, ctc_loss=0.1374, over 19430.00 frames. ], tot_loss[loss=0.3004, simple_loss=0.332, pruned_loss=0.09762, ctc_loss=0.1839, over 3797007.90 frames. ], batch size: 48, lr: 2.76e-02, grad_scale: 32.0 +2024-08-25 08:32:23,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=57424.0, ans=0.2 +2024-08-25 08:32:56,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=57477.333333333336, ans=0.125 +2024-08-25 08:33:28,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=57584.0, ans=0.125 +2024-08-25 08:33:28,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=57584.0, ans=0.1 +2024-08-25 08:33:33,529 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 08:33:37,763 INFO [train.py:1114] (0/4) Epoch 5, batch 850, loss[loss=0.3167, simple_loss=0.3542, pruned_loss=0.1006, ctc_loss=0.1948, over 19660.00 frames. ], tot_loss[loss=0.2988, simple_loss=0.331, pruned_loss=0.09688, ctc_loss=0.1822, over 3815725.18 frames. ], batch size: 59, lr: 2.76e-02, grad_scale: 32.0 +2024-08-25 08:34:03,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=57637.333333333336, ans=0.04949747468305833 +2024-08-25 08:34:13,043 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.90 vs. limit=6.0 +2024-08-25 08:34:26,550 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.581e+02 1.963e+02 2.197e+02 2.544e+02 4.330e+02, threshold=4.395e+02, percent-clipped=0.0 +2024-08-25 08:35:09,291 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.58 vs. limit=5.0 +2024-08-25 08:35:17,374 INFO [train.py:1114] (0/4) Epoch 5, batch 900, loss[loss=0.2455, simple_loss=0.2965, pruned_loss=0.0709, ctc_loss=0.1319, over 19403.00 frames. ], tot_loss[loss=0.2996, simple_loss=0.3314, pruned_loss=0.09728, ctc_loss=0.1828, over 3818513.59 frames. ], batch size: 48, lr: 2.75e-02, grad_scale: 32.0 +2024-08-25 08:35:22,072 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=18.48 vs. limit=15.0 +2024-08-25 08:35:30,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=57904.0, ans=0.125 +2024-08-25 08:35:35,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57904.0, ans=0.1 +2024-08-25 08:35:40,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=57957.333333333336, ans=0.025 +2024-08-25 08:35:55,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=58010.666666666664, ans=0.0 +2024-08-25 08:36:07,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=58064.0, ans=0.125 +2024-08-25 08:36:12,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=58064.0, ans=0.125 +2024-08-25 08:36:18,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=58064.0, ans=22.5 +2024-08-25 08:36:41,307 INFO [train.py:1114] (0/4) Epoch 5, batch 950, loss[loss=0.2484, simple_loss=0.2949, pruned_loss=0.07297, ctc_loss=0.1399, over 19477.00 frames. ], tot_loss[loss=0.3005, simple_loss=0.3319, pruned_loss=0.09784, ctc_loss=0.1836, over 3820323.62 frames. ], batch size: 49, lr: 2.75e-02, grad_scale: 32.0 +2024-08-25 08:36:52,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=58224.0, ans=0.0 +2024-08-25 08:36:56,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=58224.0, ans=0.0 +2024-08-25 08:36:59,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=58224.0, ans=0.0 +2024-08-25 08:37:00,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=58224.0, ans=0.125 +2024-08-25 08:37:02,453 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.615e+02 2.021e+02 2.236e+02 2.607e+02 6.234e+02, threshold=4.471e+02, percent-clipped=1.0 +2024-08-25 08:37:04,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=58277.333333333336, ans=0.125 +2024-08-25 08:37:20,185 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.67 vs. limit=15.0 +2024-08-25 08:37:26,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=58330.666666666664, ans=0.5 +2024-08-25 08:37:49,064 INFO [train.py:1114] (0/4) Epoch 5, batch 1000, loss[loss=0.2897, simple_loss=0.3264, pruned_loss=0.09177, ctc_loss=0.1738, over 19842.00 frames. ], tot_loss[loss=0.3007, simple_loss=0.3321, pruned_loss=0.09787, ctc_loss=0.1837, over 3816207.50 frames. ], batch size: 52, lr: 2.74e-02, grad_scale: 32.0 +2024-08-25 08:38:30,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=58490.666666666664, ans=0.2 +2024-08-25 08:38:41,189 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.06 vs. limit=22.5 +2024-08-25 08:38:51,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=58544.0, ans=0.0 +2024-08-25 08:39:20,336 INFO [train.py:1114] (0/4) Epoch 5, batch 1050, loss[loss=0.2969, simple_loss=0.3361, pruned_loss=0.09342, ctc_loss=0.1769, over 19842.00 frames. ], tot_loss[loss=0.3004, simple_loss=0.3316, pruned_loss=0.09778, ctc_loss=0.1838, over 3823196.06 frames. ], batch size: 57, lr: 2.74e-02, grad_scale: 32.0 +2024-08-25 08:39:41,242 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.532e+02 1.929e+02 2.228e+02 2.594e+02 4.447e+02, threshold=4.456e+02, percent-clipped=0.0 +2024-08-25 08:40:30,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=58917.333333333336, ans=0.0 +2024-08-25 08:40:42,231 INFO [train.py:1114] (0/4) Epoch 5, batch 1100, loss[loss=0.2905, simple_loss=0.3206, pruned_loss=0.09472, ctc_loss=0.1772, over 19566.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.3305, pruned_loss=0.09702, ctc_loss=0.1823, over 3831654.08 frames. ], batch size: 52, lr: 2.74e-02, grad_scale: 32.0 +2024-08-25 08:41:13,172 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.67 vs. limit=15.0 +2024-08-25 08:41:16,258 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 08:41:34,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=59130.666666666664, ans=0.1 +2024-08-25 08:42:01,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59184.0, ans=0.1 +2024-08-25 08:42:06,715 INFO [train.py:1114] (0/4) Epoch 5, batch 1150, loss[loss=0.2673, simple_loss=0.3054, pruned_loss=0.08354, ctc_loss=0.155, over 19606.00 frames. ], tot_loss[loss=0.2994, simple_loss=0.331, pruned_loss=0.09739, ctc_loss=0.1827, over 3829563.53 frames. ], batch size: 52, lr: 2.73e-02, grad_scale: 32.0 +2024-08-25 08:42:19,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59237.333333333336, ans=0.1 +2024-08-25 08:42:24,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=59290.666666666664, ans=0.125 +2024-08-25 08:42:26,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=59290.666666666664, ans=0.2 +2024-08-25 08:42:26,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59290.666666666664, ans=0.1 +2024-08-25 08:42:38,157 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.520e+02 2.022e+02 2.244e+02 2.636e+02 4.087e+02, threshold=4.489e+02, percent-clipped=0.0 +2024-08-25 08:42:44,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=59344.0, ans=0.125 +2024-08-25 08:42:50,818 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.54 vs. limit=22.5 +2024-08-25 08:42:59,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=59344.0, ans=0.0 +2024-08-25 08:43:22,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59450.666666666664, ans=0.1 +2024-08-25 08:43:33,345 INFO [train.py:1114] (0/4) Epoch 5, batch 1200, loss[loss=0.2815, simple_loss=0.3259, pruned_loss=0.08644, ctc_loss=0.1606, over 19839.00 frames. ], tot_loss[loss=0.3002, simple_loss=0.3318, pruned_loss=0.09764, ctc_loss=0.1832, over 3824947.14 frames. ], batch size: 57, lr: 2.73e-02, grad_scale: 32.0 +2024-08-25 08:44:01,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=59557.333333333336, ans=0.125 +2024-08-25 08:44:07,469 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.75 vs. limit=22.5 +2024-08-25 08:44:19,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=59610.666666666664, ans=0.125 +2024-08-25 08:44:28,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=59664.0, ans=0.125 +2024-08-25 08:44:30,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=59664.0, ans=0.0 +2024-08-25 08:44:34,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59664.0, ans=0.1 +2024-08-25 08:44:40,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=59717.333333333336, ans=0.0 +2024-08-25 08:44:55,340 INFO [train.py:1114] (0/4) Epoch 5, batch 1250, loss[loss=0.3171, simple_loss=0.3508, pruned_loss=0.1045, ctc_loss=0.1858, over 19557.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.3314, pruned_loss=0.09674, ctc_loss=0.1813, over 3842931.87 frames. ], batch size: 61, lr: 2.72e-02, grad_scale: 32.0 +2024-08-25 08:45:15,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59824.0, ans=0.1 +2024-08-25 08:45:21,216 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.499e+02 1.906e+02 2.098e+02 2.362e+02 4.005e+02, threshold=4.196e+02, percent-clipped=0.0 +2024-08-25 08:45:21,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59824.0, ans=0.1 +2024-08-25 08:45:28,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=59877.333333333336, ans=0.125 +2024-08-25 08:45:29,428 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=7.545e-01 +2024-08-25 08:45:46,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=59930.666666666664, ans=0.0 +2024-08-25 08:46:00,951 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.41 vs. limit=15.0 +2024-08-25 08:46:03,589 INFO [train.py:1114] (0/4) Epoch 5, batch 1300, loss[loss=0.3204, simple_loss=0.348, pruned_loss=0.1078, ctc_loss=0.1935, over 18839.00 frames. ], tot_loss[loss=0.2974, simple_loss=0.3304, pruned_loss=0.09621, ctc_loss=0.1802, over 3846267.59 frames. ], batch size: 76, lr: 2.72e-02, grad_scale: 32.0 +2024-08-25 08:46:21,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=60037.333333333336, ans=0.125 +2024-08-25 08:46:34,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60090.666666666664, ans=0.1 +2024-08-25 08:46:37,601 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.20 vs. limit=15.0 +2024-08-25 08:47:24,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=60250.666666666664, ans=0.025 +2024-08-25 08:47:27,101 INFO [train.py:1114] (0/4) Epoch 5, batch 1350, loss[loss=0.2741, simple_loss=0.3172, pruned_loss=0.08401, ctc_loss=0.1573, over 19751.00 frames. ], tot_loss[loss=0.2967, simple_loss=0.33, pruned_loss=0.09577, ctc_loss=0.1796, over 3857861.13 frames. ], batch size: 54, lr: 2.71e-02, grad_scale: 32.0 +2024-08-25 08:47:52,697 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.95 vs. limit=15.0 +2024-08-25 08:47:59,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=60357.333333333336, ans=0.125 +2024-08-25 08:48:06,341 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.575e+02 1.950e+02 2.204e+02 2.621e+02 4.331e+02, threshold=4.409e+02, percent-clipped=1.0 +2024-08-25 08:48:32,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=60464.0, ans=0.2 +2024-08-25 08:48:43,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=60464.0, ans=0.2 +2024-08-25 08:49:14,311 INFO [train.py:1114] (0/4) Epoch 5, batch 1400, loss[loss=0.2479, simple_loss=0.2892, pruned_loss=0.07476, ctc_loss=0.1429, over 19661.00 frames. ], tot_loss[loss=0.296, simple_loss=0.3294, pruned_loss=0.09544, ctc_loss=0.1792, over 3864258.34 frames. ], batch size: 46, lr: 2.71e-02, grad_scale: 32.0 +2024-08-25 08:49:14,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=60570.666666666664, ans=0.125 +2024-08-25 08:49:21,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60570.666666666664, ans=0.1 +2024-08-25 08:49:26,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=60570.666666666664, ans=0.125 +2024-08-25 08:49:27,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=60624.0, ans=0.0 +2024-08-25 08:49:33,063 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.13 vs. limit=6.0 +2024-08-25 08:49:54,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=60730.666666666664, ans=0.125 +2024-08-25 08:49:54,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=60730.666666666664, ans=0.0 +2024-08-25 08:49:55,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=60730.666666666664, ans=0.07 +2024-08-25 09:01:57,444 INFO [train.py:1114] (0/4) Epoch 5, batch 1450, loss[loss=0.2943, simple_loss=0.3342, pruned_loss=0.09308, ctc_loss=0.1708, over 19665.00 frames. ], tot_loss[loss=0.2972, simple_loss=0.3306, pruned_loss=0.09584, ctc_loss=0.1801, over 3862292.07 frames. ], batch size: 63, lr: 2.71e-02, grad_scale: 32.0 +2024-08-25 09:08:28,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60837.333333333336, ans=0.1 +2024-08-25 09:12:43,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=60890.666666666664, ans=0.125 +2024-08-25 09:14:29,255 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.613e+02 1.942e+02 2.164e+02 2.480e+02 4.633e+02, threshold=4.329e+02, percent-clipped=1.0 +2024-08-25 09:36:13,493 INFO [train.py:1114] (0/4) Epoch 5, batch 1500, loss[loss=0.3, simple_loss=0.3421, pruned_loss=0.09317, ctc_loss=0.179, over 19582.00 frames. ], tot_loss[loss=0.2969, simple_loss=0.3304, pruned_loss=0.09569, ctc_loss=0.1799, over 3862535.21 frames. ], batch size: 57, lr: 2.70e-02, grad_scale: 32.0 +2024-08-25 09:44:49,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=61157.333333333336, ans=0.09899494936611666 +2024-08-25 09:52:32,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=61210.666666666664, ans=0.125 +2024-08-25 09:52:39,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=61210.666666666664, ans=0.2 +2024-08-25 09:56:04,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=61264.0, ans=0.1 +2024-08-25 10:03:52,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=61317.333333333336, ans=0.1 +2024-08-25 10:06:52,325 INFO [train.py:1114] (0/4) Epoch 5, batch 1550, loss[loss=0.3197, simple_loss=0.3468, pruned_loss=0.1063, ctc_loss=0.2005, over 19603.00 frames. ], tot_loss[loss=0.298, simple_loss=0.331, pruned_loss=0.09629, ctc_loss=0.1811, over 3847063.11 frames. ], batch size: 60, lr: 2.70e-02, grad_scale: 16.0 +2024-08-25 10:09:18,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=61370.666666666664, ans=0.1 +2024-08-25 10:14:47,428 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.601e+02 1.971e+02 2.260e+02 2.611e+02 5.554e+02, threshold=4.519e+02, percent-clipped=3.0 +2024-08-25 10:16:54,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=61477.333333333336, ans=0.025 +2024-08-25 10:21:24,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=61530.666666666664, ans=0.125 +2024-08-25 10:28:13,746 INFO [train.py:1114] (0/4) Epoch 5, batch 1600, loss[loss=0.3018, simple_loss=0.3393, pruned_loss=0.09604, ctc_loss=0.1804, over 19839.00 frames. ], tot_loss[loss=0.2988, simple_loss=0.3313, pruned_loss=0.09675, ctc_loss=0.1821, over 3836525.65 frames. ], batch size: 57, lr: 2.69e-02, grad_scale: 32.0 +2024-08-25 10:28:14,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=61637.333333333336, ans=0.125 +2024-08-25 10:31:58,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=61690.666666666664, ans=0.125 +2024-08-25 10:31:58,760 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.63 vs. limit=10.0 +2024-08-25 10:33:18,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=61744.0, ans=0.0 +2024-08-25 10:36:29,729 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.22 vs. limit=12.0 +2024-08-25 10:39:04,900 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.62 vs. limit=10.0 +2024-08-25 10:40:17,104 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:40:45,714 INFO [train.py:1114] (0/4) Epoch 5, batch 1650, loss[loss=0.3018, simple_loss=0.3418, pruned_loss=0.09497, ctc_loss=0.1795, over 19640.00 frames. ], tot_loss[loss=0.2998, simple_loss=0.3318, pruned_loss=0.09727, ctc_loss=0.1831, over 3833447.05 frames. ], batch size: 59, lr: 2.69e-02, grad_scale: 32.0 +2024-08-25 10:42:09,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=61957.333333333336, ans=0.125 +2024-08-25 10:42:51,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff2.min_abs, batch_count=61957.333333333336, ans=0.1 +2024-08-25 10:43:04,118 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.454e+02 1.985e+02 2.336e+02 2.616e+02 4.728e+02, threshold=4.672e+02, percent-clipped=1.0 +2024-08-25 10:43:08,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=62010.666666666664, ans=0.125 +2024-08-25 10:43:12,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=62010.666666666664, ans=0.125 +2024-08-25 10:44:08,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=62064.0, ans=0.0 +2024-08-25 10:45:13,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=62064.0, ans=0.0 +2024-08-25 10:46:25,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=62117.333333333336, ans=0.0 +2024-08-25 10:46:42,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=62170.666666666664, ans=0.0 +2024-08-25 10:46:43,734 INFO [train.py:1114] (0/4) Epoch 5, batch 1700, loss[loss=0.2456, simple_loss=0.2846, pruned_loss=0.07489, ctc_loss=0.1419, over 19665.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3304, pruned_loss=0.09622, ctc_loss=0.1811, over 3847846.32 frames. ], batch size: 46, lr: 2.69e-02, grad_scale: 32.0 +2024-08-25 10:48:00,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=62224.0, ans=0.125 +2024-08-25 10:48:32,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=62277.333333333336, ans=0.0 +2024-08-25 10:49:28,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=62330.666666666664, ans=0.125 +2024-08-25 10:50:15,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=62384.0, ans=0.05 +2024-08-25 10:50:54,998 INFO [train.py:1114] (0/4) Epoch 5, batch 1750, loss[loss=0.2631, simple_loss=0.293, pruned_loss=0.08487, ctc_loss=0.1586, over 19655.00 frames. ], tot_loss[loss=0.2961, simple_loss=0.3296, pruned_loss=0.09541, ctc_loss=0.1796, over 3852781.90 frames. ], batch size: 45, lr: 2.68e-02, grad_scale: 32.0 +2024-08-25 10:51:05,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=62437.333333333336, ans=0.125 +2024-08-25 10:53:52,988 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.475e+02 2.010e+02 2.326e+02 2.972e+02 6.446e+02, threshold=4.653e+02, percent-clipped=3.0 +2024-08-25 10:54:44,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=62544.0, ans=0.125 +2024-08-25 10:55:12,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=62544.0, ans=0.125 +2024-08-25 10:56:56,150 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.14 vs. limit=15.0 +2024-08-25 10:57:11,534 INFO [train.py:1114] (0/4) Epoch 5, batch 1800, loss[loss=0.3092, simple_loss=0.3348, pruned_loss=0.1028, ctc_loss=0.1948, over 19613.00 frames. ], tot_loss[loss=0.2976, simple_loss=0.3305, pruned_loss=0.09617, ctc_loss=0.1811, over 3853809.76 frames. ], batch size: 55, lr: 2.68e-02, grad_scale: 32.0 +2024-08-25 10:58:13,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=62810.666666666664, ans=0.0 +2024-08-25 10:58:58,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=62917.333333333336, ans=0.125 +2024-08-25 10:59:01,577 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.41 vs. limit=15.0 +2024-08-25 10:59:06,183 INFO [train.py:1114] (0/4) Epoch 5, batch 1850, loss[loss=0.2571, simple_loss=0.3144, pruned_loss=0.07291, ctc_loss=0.1349, over 19569.00 frames. ], tot_loss[loss=0.2967, simple_loss=0.33, pruned_loss=0.09571, ctc_loss=0.1799, over 3856524.31 frames. ], batch size: 57, lr: 2.67e-02, grad_scale: 32.0 +2024-08-25 10:59:12,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=62970.666666666664, ans=0.0 +2024-08-25 10:59:16,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=62970.666666666664, ans=0.125 +2024-08-25 10:59:27,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63024.0, ans=0.1 +2024-08-25 10:59:32,444 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.560e+02 2.044e+02 2.314e+02 2.820e+02 4.474e+02, threshold=4.628e+02, percent-clipped=0.0 +2024-08-25 11:00:05,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=63184.0, ans=0.125 +2024-08-25 11:00:20,616 INFO [train.py:1114] (0/4) Epoch 5, batch 1900, loss[loss=0.2902, simple_loss=0.3338, pruned_loss=0.08839, ctc_loss=0.1744, over 19652.00 frames. ], tot_loss[loss=0.2966, simple_loss=0.3302, pruned_loss=0.09555, ctc_loss=0.1795, over 3860781.12 frames. ], batch size: 59, lr: 2.67e-02, grad_scale: 32.0 +2024-08-25 11:00:29,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=63237.333333333336, ans=0.5 +2024-08-25 11:00:33,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63237.333333333336, ans=0.1 +2024-08-25 11:00:59,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63344.0, ans=0.1 +2024-08-25 11:01:18,796 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.99 vs. limit=6.0 +2024-08-25 11:01:35,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=63397.333333333336, ans=0.125 +2024-08-25 11:01:57,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=63397.333333333336, ans=0.0 +2024-08-25 11:02:34,474 INFO [train.py:1114] (0/4) Epoch 5, batch 1950, loss[loss=0.272, simple_loss=0.3178, pruned_loss=0.08182, ctc_loss=0.1566, over 19569.00 frames. ], tot_loss[loss=0.2971, simple_loss=0.331, pruned_loss=0.09566, ctc_loss=0.1797, over 3869409.36 frames. ], batch size: 52, lr: 2.67e-02, grad_scale: 32.0 +2024-08-25 11:02:46,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=63504.0, ans=0.125 +2024-08-25 11:02:51,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=63557.333333333336, ans=0.125 +2024-08-25 11:02:56,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63557.333333333336, ans=0.1 +2024-08-25 11:03:15,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=63557.333333333336, ans=0.1 +2024-08-25 11:03:16,688 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.519e+02 1.932e+02 2.130e+02 2.461e+02 4.838e+02, threshold=4.259e+02, percent-clipped=1.0 +2024-08-25 11:03:35,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=63610.666666666664, ans=0.0 +2024-08-25 11:04:21,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=63717.333333333336, ans=0.125 +2024-08-25 11:04:37,799 INFO [train.py:1114] (0/4) Epoch 5, batch 2000, loss[loss=0.2422, simple_loss=0.2792, pruned_loss=0.07365, ctc_loss=0.1448, over 19636.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.332, pruned_loss=0.09654, ctc_loss=0.1811, over 3853618.52 frames. ], batch size: 45, lr: 2.66e-02, grad_scale: 32.0 +2024-08-25 11:04:41,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=63770.666666666664, ans=0.0 +2024-08-25 11:05:12,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=63877.333333333336, ans=0.125 +2024-08-25 11:05:13,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=63877.333333333336, ans=0.125 +2024-08-25 11:05:18,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=63877.333333333336, ans=0.125 +2024-08-25 11:05:29,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=63930.666666666664, ans=0.125 +2024-08-25 11:05:35,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=63930.666666666664, ans=0.2 +2024-08-25 11:05:37,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=63930.666666666664, ans=0.05 +2024-08-25 11:05:39,336 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.67 vs. limit=6.0 +2024-08-25 11:05:46,651 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:05:49,747 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/checkpoint-12000.pt +2024-08-25 11:06:03,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=63984.0, ans=0.2 +2024-08-25 11:06:04,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=63984.0, ans=0.125 +2024-08-25 11:06:08,376 INFO [train.py:1114] (0/4) Epoch 5, batch 2050, loss[loss=0.2528, simple_loss=0.2909, pruned_loss=0.07765, ctc_loss=0.1485, over 19706.00 frames. ], tot_loss[loss=0.2972, simple_loss=0.3303, pruned_loss=0.09601, ctc_loss=0.1803, over 3850977.62 frames. ], batch size: 47, lr: 2.66e-02, grad_scale: 32.0 +2024-08-25 11:06:13,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=64037.333333333336, ans=0.09899494936611666 +2024-08-25 11:06:19,819 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.69 vs. limit=15.0 +2024-08-25 11:06:29,153 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.597e+02 2.037e+02 2.272e+02 2.892e+02 6.343e+02, threshold=4.544e+02, percent-clipped=1.0 +2024-08-25 11:06:35,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=64144.0, ans=0.125 +2024-08-25 11:06:46,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=64144.0, ans=0.125 +2024-08-25 11:07:13,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=64197.333333333336, ans=0.125 +2024-08-25 11:07:25,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=64250.666666666664, ans=0.125 +2024-08-25 11:07:48,385 INFO [train.py:1114] (0/4) Epoch 5, batch 2100, loss[loss=0.3122, simple_loss=0.3408, pruned_loss=0.1018, ctc_loss=0.2003, over 19789.00 frames. ], tot_loss[loss=0.2957, simple_loss=0.3293, pruned_loss=0.09526, ctc_loss=0.1789, over 3858618.66 frames. ], batch size: 54, lr: 2.65e-02, grad_scale: 32.0 +2024-08-25 11:08:07,063 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.56 vs. limit=15.0 +2024-08-25 11:08:12,561 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.83 vs. limit=6.0 +2024-08-25 11:08:14,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64357.333333333336, ans=0.1 +2024-08-25 11:08:53,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=64464.0, ans=0.125 +2024-08-25 11:08:58,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64517.333333333336, ans=0.1 +2024-08-25 11:09:00,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=64517.333333333336, ans=0.0 +2024-08-25 11:09:15,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=64517.333333333336, ans=0.125 +2024-08-25 11:09:21,097 INFO [train.py:1114] (0/4) Epoch 5, batch 2150, loss[loss=0.2832, simple_loss=0.3214, pruned_loss=0.08801, ctc_loss=0.1727, over 19597.00 frames. ], tot_loss[loss=0.294, simple_loss=0.3281, pruned_loss=0.0945, ctc_loss=0.1773, over 3869218.50 frames. ], batch size: 52, lr: 2.65e-02, grad_scale: 32.0 +2024-08-25 11:09:38,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=64624.0, ans=10.0 +2024-08-25 11:09:40,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=64624.0, ans=0.125 +2024-08-25 11:09:44,524 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.564e+02 2.041e+02 2.279e+02 2.689e+02 3.624e+02, threshold=4.557e+02, percent-clipped=0.0 +2024-08-25 11:09:57,761 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.36 vs. limit=15.0 +2024-08-25 11:09:59,759 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.91 vs. limit=6.0 +2024-08-25 11:10:21,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64784.0, ans=0.1 +2024-08-25 11:10:21,469 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.52 vs. limit=22.5 +2024-08-25 11:10:29,159 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.89 vs. limit=6.0 +2024-08-25 11:10:34,038 INFO [train.py:1114] (0/4) Epoch 5, batch 2200, loss[loss=0.3087, simple_loss=0.3419, pruned_loss=0.09827, ctc_loss=0.1971, over 19592.00 frames. ], tot_loss[loss=0.2934, simple_loss=0.3279, pruned_loss=0.09407, ctc_loss=0.177, over 3867821.49 frames. ], batch size: 57, lr: 2.65e-02, grad_scale: 32.0 +2024-08-25 11:10:36,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=64837.333333333336, ans=0.125 +2024-08-25 11:10:36,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=64837.333333333336, ans=0.07 +2024-08-25 11:10:58,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=64944.0, ans=10.0 +2024-08-25 11:10:58,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=64944.0, ans=0.2 +2024-08-25 11:11:29,245 INFO [train.py:1114] (0/4) Epoch 5, batch 2250, loss[loss=0.2711, simple_loss=0.3211, pruned_loss=0.08013, ctc_loss=0.1521, over 19606.00 frames. ], tot_loss[loss=0.2943, simple_loss=0.3285, pruned_loss=0.09454, ctc_loss=0.1777, over 3867541.43 frames. ], batch size: 55, lr: 2.64e-02, grad_scale: 32.0 +2024-08-25 11:11:29,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=65104.0, ans=0.125 +2024-08-25 11:11:43,161 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.95 vs. limit=15.0 +2024-08-25 11:11:51,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=65157.333333333336, ans=0.125 +2024-08-25 11:11:51,991 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.714e+02 2.180e+02 2.514e+02 3.003e+02 5.559e+02, threshold=5.029e+02, percent-clipped=2.0 +2024-08-25 11:11:56,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=65210.666666666664, ans=0.0 +2024-08-25 11:11:57,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=65210.666666666664, ans=0.125 +2024-08-25 11:12:16,079 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.31 vs. limit=22.5 +2024-08-25 11:12:24,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=65264.0, ans=0.0 +2024-08-25 11:12:38,218 INFO [train.py:1114] (0/4) Epoch 5, batch 2300, loss[loss=0.2787, simple_loss=0.3152, pruned_loss=0.08885, ctc_loss=0.1613, over 19504.00 frames. ], tot_loss[loss=0.2931, simple_loss=0.327, pruned_loss=0.09421, ctc_loss=0.1769, over 3861826.33 frames. ], batch size: 49, lr: 2.64e-02, grad_scale: 32.0 +2024-08-25 11:12:46,309 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.21 vs. limit=15.0 +2024-08-25 11:12:55,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=65424.0, ans=0.0 +2024-08-25 11:13:11,457 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.91 vs. limit=15.0 +2024-08-25 11:13:28,008 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.97 vs. limit=22.5 +2024-08-25 11:13:50,076 INFO [train.py:1114] (0/4) Epoch 5, batch 2350, loss[loss=0.2735, simple_loss=0.3263, pruned_loss=0.08036, ctc_loss=0.1502, over 19684.00 frames. ], tot_loss[loss=0.2939, simple_loss=0.3277, pruned_loss=0.09461, ctc_loss=0.1775, over 3864672.00 frames. ], batch size: 63, lr: 2.63e-02, grad_scale: 32.0 +2024-08-25 11:14:00,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=65637.33333333333, ans=0.1 +2024-08-25 11:14:31,470 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.391e+02 1.936e+02 2.303e+02 2.820e+02 4.151e+02, threshold=4.606e+02, percent-clipped=0.0 +2024-08-25 11:14:33,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=65744.0, ans=0.0 +2024-08-25 11:14:36,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=65744.0, ans=0.125 +2024-08-25 11:14:55,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=65797.33333333333, ans=0.125 +2024-08-25 11:15:08,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=65797.33333333333, ans=0.125 +2024-08-25 11:15:10,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=65797.33333333333, ans=0.125 +2024-08-25 11:15:16,244 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.62 vs. limit=5.0 +2024-08-25 11:15:23,124 INFO [train.py:1114] (0/4) Epoch 5, batch 2400, loss[loss=0.3112, simple_loss=0.3507, pruned_loss=0.09777, ctc_loss=0.1906, over 19429.00 frames. ], tot_loss[loss=0.2962, simple_loss=0.33, pruned_loss=0.09536, ctc_loss=0.1791, over 3858442.69 frames. ], batch size: 67, lr: 2.63e-02, grad_scale: 32.0 +2024-08-25 11:15:26,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=65904.0, ans=0.125 +2024-08-25 11:15:27,372 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.57 vs. limit=22.5 +2024-08-25 11:15:59,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=66010.66666666667, ans=0.1 +2024-08-25 11:16:19,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=66064.0, ans=0.05 +2024-08-25 11:16:21,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=66064.0, ans=0.125 +2024-08-25 11:16:32,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66064.0, ans=0.1 +2024-08-25 11:16:54,515 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.62 vs. limit=15.0 +2024-08-25 11:16:56,805 INFO [train.py:1114] (0/4) Epoch 5, batch 2450, loss[loss=0.3655, simple_loss=0.3663, pruned_loss=0.1322, ctc_loss=0.2509, over 13486.00 frames. ], tot_loss[loss=0.3049, simple_loss=0.335, pruned_loss=0.0999, ctc_loss=0.1873, over 3734260.93 frames. ], batch size: 141, lr: 2.63e-02, grad_scale: 32.0 +2024-08-25 11:17:21,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=66224.0, ans=0.2 +2024-08-25 11:17:43,156 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.516e+02 2.021e+02 2.221e+02 2.524e+02 3.558e+02, threshold=4.443e+02, percent-clipped=0.0 +2024-08-25 11:17:56,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=66277.33333333333, ans=0.125 +2024-08-25 11:18:11,480 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-5.pt +2024-08-25 11:19:28,343 INFO [train.py:1114] (0/4) Epoch 6, batch 0, loss[loss=0.2835, simple_loss=0.3088, pruned_loss=0.09401, ctc_loss=0.1755, over 19827.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3088, pruned_loss=0.09401, ctc_loss=0.1755, over 19827.00 frames. ], batch size: 49, lr: 2.45e-02, grad_scale: 32.0 +2024-08-25 11:19:28,344 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-25 11:20:29,258 INFO [train.py:1146] (0/4) Epoch 6, validation: loss=0.2388, simple_loss=0.3147, pruned_loss=0.05993, ctc_loss=0.1076, over 944034.00 frames. +2024-08-25 11:20:29,258 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 14058MB +2024-08-25 11:21:00,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66485.33333333333, ans=0.1 +2024-08-25 11:21:00,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=66485.33333333333, ans=0.125 +2024-08-25 11:21:18,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=66592.0, ans=0.0 +2024-08-25 11:21:26,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=66592.0, ans=0.2 +2024-08-25 11:21:52,009 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.60 vs. limit=22.5 +2024-08-25 11:21:56,953 INFO [train.py:1114] (0/4) Epoch 6, batch 50, loss[loss=0.2588, simple_loss=0.2907, pruned_loss=0.08219, ctc_loss=0.156, over 19728.00 frames. ], tot_loss[loss=0.3027, simple_loss=0.3329, pruned_loss=0.09875, ctc_loss=0.1874, over 844928.81 frames. ], batch size: 47, lr: 2.44e-02, grad_scale: 32.0 +2024-08-25 11:22:17,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=66698.66666666667, ans=0.125 +2024-08-25 11:22:45,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=66752.0, ans=0.0 +2024-08-25 11:22:50,719 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.580e+02 1.959e+02 2.174e+02 2.569e+02 5.460e+02, threshold=4.347e+02, percent-clipped=1.0 +2024-08-25 11:23:07,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=66858.66666666667, ans=0.125 +2024-08-25 11:23:11,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=66858.66666666667, ans=0.125 +2024-08-25 11:23:18,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=66912.0, ans=0.2 +2024-08-25 11:23:18,892 INFO [train.py:1114] (0/4) Epoch 6, batch 100, loss[loss=0.2625, simple_loss=0.3078, pruned_loss=0.07885, ctc_loss=0.1486, over 19711.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3327, pruned_loss=0.09637, ctc_loss=0.1824, over 1499585.12 frames. ], batch size: 51, lr: 2.44e-02, grad_scale: 32.0 +2024-08-25 11:23:29,965 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.53 vs. limit=15.0 +2024-08-25 11:23:34,151 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.30 vs. limit=12.0 +2024-08-25 11:23:46,820 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.15 vs. limit=10.0 +2024-08-25 11:24:21,816 INFO [train.py:1114] (0/4) Epoch 6, batch 150, loss[loss=0.2673, simple_loss=0.2992, pruned_loss=0.08509, ctc_loss=0.163, over 19714.00 frames. ], tot_loss[loss=0.2926, simple_loss=0.3275, pruned_loss=0.09357, ctc_loss=0.1766, over 2027782.93 frames. ], batch size: 47, lr: 2.44e-02, grad_scale: 32.0 +2024-08-25 11:24:51,938 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=6.224e+00 +2024-08-25 11:24:53,575 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.64 vs. limit=12.0 +2024-08-25 11:25:04,952 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.596e+02 1.947e+02 2.172e+02 2.650e+02 4.091e+02, threshold=4.343e+02, percent-clipped=0.0 +2024-08-25 11:25:14,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=67338.66666666667, ans=0.0 +2024-08-25 11:25:21,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=67392.0, ans=0.125 +2024-08-25 11:25:25,298 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.60 vs. limit=15.0 +2024-08-25 11:25:27,177 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.32 vs. limit=15.0 +2024-08-25 11:25:35,922 INFO [train.py:1114] (0/4) Epoch 6, batch 200, loss[loss=0.3444, simple_loss=0.3528, pruned_loss=0.1225, ctc_loss=0.2276, over 18434.00 frames. ], tot_loss[loss=0.2879, simple_loss=0.3243, pruned_loss=0.09134, ctc_loss=0.172, over 2435777.26 frames. ], batch size: 85, lr: 2.43e-02, grad_scale: 32.0 +2024-08-25 11:26:42,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=67605.33333333333, ans=0.0 +2024-08-25 11:26:52,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=67605.33333333333, ans=0.0 +2024-08-25 11:27:02,727 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.58 vs. limit=15.0 +2024-08-25 11:27:17,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=67712.0, ans=0.1 +2024-08-25 11:27:19,502 INFO [train.py:1114] (0/4) Epoch 6, batch 250, loss[loss=0.3237, simple_loss=0.35, pruned_loss=0.1081, ctc_loss=0.2026, over 19444.00 frames. ], tot_loss[loss=0.2893, simple_loss=0.3254, pruned_loss=0.0919, ctc_loss=0.1733, over 2756020.36 frames. ], batch size: 67, lr: 2.43e-02, grad_scale: 32.0 +2024-08-25 11:27:53,808 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.98 vs. limit=15.0 +2024-08-25 11:27:55,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=67765.33333333333, ans=0.035 +2024-08-25 11:28:32,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=67818.66666666667, ans=0.125 +2024-08-25 11:28:34,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=67872.0, ans=0.2 +2024-08-25 11:28:36,846 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.549e+02 1.900e+02 2.111e+02 2.483e+02 4.707e+02, threshold=4.222e+02, percent-clipped=1.0 +2024-08-25 11:29:38,945 INFO [train.py:1114] (0/4) Epoch 6, batch 300, loss[loss=0.3019, simple_loss=0.3427, pruned_loss=0.09583, ctc_loss=0.1735, over 19503.00 frames. ], tot_loss[loss=0.2879, simple_loss=0.3244, pruned_loss=0.09132, ctc_loss=0.172, over 3000636.50 frames. ], batch size: 61, lr: 2.43e-02, grad_scale: 32.0 +2024-08-25 11:29:46,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=67978.66666666667, ans=0.025 +2024-08-25 11:29:53,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=68032.0, ans=0.025 +2024-08-25 11:30:20,653 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.38 vs. limit=10.0 +2024-08-25 11:30:52,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=68085.33333333333, ans=0.125 +2024-08-25 11:31:07,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=68138.66666666667, ans=0.0 +2024-08-25 11:31:08,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=68138.66666666667, ans=0.04949747468305833 +2024-08-25 11:31:11,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=68192.0, ans=0.125 +2024-08-25 11:31:37,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=68192.0, ans=0.0 +2024-08-25 11:31:39,925 INFO [train.py:1114] (0/4) Epoch 6, batch 350, loss[loss=0.2515, simple_loss=0.2959, pruned_loss=0.07608, ctc_loss=0.1374, over 19759.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.325, pruned_loss=0.09123, ctc_loss=0.1718, over 3190927.67 frames. ], batch size: 48, lr: 2.42e-02, grad_scale: 32.0 +2024-08-25 11:31:47,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=68245.33333333333, ans=0.125 +2024-08-25 11:32:13,187 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.56 vs. limit=15.0 +2024-08-25 11:32:19,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=68298.66666666667, ans=0.125 +2024-08-25 11:32:21,315 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.06 vs. limit=22.5 +2024-08-25 11:32:35,305 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.504e+02 2.039e+02 2.360e+02 2.872e+02 5.301e+02, threshold=4.720e+02, percent-clipped=2.0 +2024-08-25 11:32:48,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68458.66666666667, ans=0.1 +2024-08-25 11:32:52,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=68458.66666666667, ans=0.0 +2024-08-25 11:33:02,564 INFO [train.py:1114] (0/4) Epoch 6, batch 400, loss[loss=0.2452, simple_loss=0.2957, pruned_loss=0.071, ctc_loss=0.1317, over 19521.00 frames. ], tot_loss[loss=0.2866, simple_loss=0.3239, pruned_loss=0.09058, ctc_loss=0.1705, over 3343391.92 frames. ], batch size: 54, lr: 2.42e-02, grad_scale: 32.0 +2024-08-25 11:33:07,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=68512.0, ans=0.5 +2024-08-25 11:33:08,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=68512.0, ans=0.0 +2024-08-25 11:33:15,730 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:33:25,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68618.66666666667, ans=0.1 +2024-08-25 11:33:58,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=68672.0, ans=0.0 +2024-08-25 11:34:13,405 INFO [train.py:1114] (0/4) Epoch 6, batch 450, loss[loss=0.325, simple_loss=0.3498, pruned_loss=0.1093, ctc_loss=0.204, over 19629.00 frames. ], tot_loss[loss=0.2876, simple_loss=0.3244, pruned_loss=0.09112, ctc_loss=0.1715, over 3451979.55 frames. ], batch size: 55, lr: 2.42e-02, grad_scale: 32.0 +2024-08-25 11:34:13,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=68778.66666666667, ans=0.2 +2024-08-25 11:34:30,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=68832.0, ans=0.0 +2024-08-25 11:34:40,716 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.57 vs. limit=15.0 +2024-08-25 11:34:49,657 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.571e+02 1.969e+02 2.191e+02 2.793e+02 4.218e+02, threshold=4.382e+02, percent-clipped=0.0 +2024-08-25 11:34:54,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=68938.66666666667, ans=0.2 +2024-08-25 11:35:05,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=68992.0, ans=0.2 +2024-08-25 11:35:10,579 INFO [train.py:1114] (0/4) Epoch 6, batch 500, loss[loss=0.2976, simple_loss=0.3426, pruned_loss=0.09282, ctc_loss=0.1676, over 19635.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.3226, pruned_loss=0.08998, ctc_loss=0.1693, over 3547779.06 frames. ], batch size: 63, lr: 2.41e-02, grad_scale: 32.0 +2024-08-25 11:35:20,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=69045.33333333333, ans=0.125 +2024-08-25 11:35:25,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=69098.66666666667, ans=0.125 +2024-08-25 11:35:27,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=69098.66666666667, ans=0.025 +2024-08-25 11:35:39,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=69152.0, ans=0.125 +2024-08-25 11:35:49,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=69205.33333333333, ans=0.125 +2024-08-25 11:35:49,898 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.97 vs. limit=15.0 +2024-08-25 11:35:50,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=69205.33333333333, ans=0.0 +2024-08-25 11:36:10,421 INFO [train.py:1114] (0/4) Epoch 6, batch 550, loss[loss=0.3145, simple_loss=0.344, pruned_loss=0.1045, ctc_loss=0.1904, over 19282.00 frames. ], tot_loss[loss=0.2862, simple_loss=0.3234, pruned_loss=0.09042, ctc_loss=0.1704, over 3609600.28 frames. ], batch size: 71, lr: 2.41e-02, grad_scale: 32.0 +2024-08-25 11:36:21,490 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.00 vs. limit=15.0 +2024-08-25 11:36:38,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=69418.66666666667, ans=0.07 +2024-08-25 11:36:43,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.29 vs. limit=15.0 +2024-08-25 11:36:46,543 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.559e+02 2.100e+02 2.439e+02 2.966e+02 5.259e+02, threshold=4.878e+02, percent-clipped=1.0 +2024-08-25 11:37:20,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=69525.33333333333, ans=0.0 +2024-08-25 11:37:21,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=69525.33333333333, ans=0.1 +2024-08-25 11:37:28,779 INFO [train.py:1114] (0/4) Epoch 6, batch 600, loss[loss=0.3136, simple_loss=0.3477, pruned_loss=0.1024, ctc_loss=0.1863, over 19348.00 frames. ], tot_loss[loss=0.2869, simple_loss=0.3238, pruned_loss=0.09083, ctc_loss=0.171, over 3666363.11 frames. ], batch size: 67, lr: 2.41e-02, grad_scale: 32.0 +2024-08-25 11:37:40,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=69578.66666666667, ans=0.125 +2024-08-25 11:38:00,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=69685.33333333333, ans=0.0 +2024-08-25 11:38:19,959 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=5.424e-03 +2024-08-25 11:38:24,711 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.32 vs. limit=15.0 +2024-08-25 11:38:32,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=69792.0, ans=0.0 +2024-08-25 11:38:40,020 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.29 vs. limit=15.0 +2024-08-25 11:38:46,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=69792.0, ans=0.125 +2024-08-25 11:38:58,906 INFO [train.py:1114] (0/4) Epoch 6, batch 650, loss[loss=0.2548, simple_loss=0.3077, pruned_loss=0.07385, ctc_loss=0.1352, over 19766.00 frames. ], tot_loss[loss=0.2859, simple_loss=0.3231, pruned_loss=0.09031, ctc_loss=0.17, over 3716488.73 frames. ], batch size: 54, lr: 2.40e-02, grad_scale: 32.0 +2024-08-25 11:39:25,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=69898.66666666667, ans=0.025 +2024-08-25 11:39:50,478 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.537e+02 1.931e+02 2.137e+02 2.425e+02 3.711e+02, threshold=4.274e+02, percent-clipped=0.0 +2024-08-25 11:40:16,199 INFO [train.py:1114] (0/4) Epoch 6, batch 700, loss[loss=0.241, simple_loss=0.2889, pruned_loss=0.07063, ctc_loss=0.1294, over 19724.00 frames. ], tot_loss[loss=0.2863, simple_loss=0.3235, pruned_loss=0.09054, ctc_loss=0.1702, over 3748518.01 frames. ], batch size: 51, lr: 2.40e-02, grad_scale: 32.0 +2024-08-25 11:40:24,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=70112.0, ans=0.0 +2024-08-25 11:40:40,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=70165.33333333333, ans=0.0 +2024-08-25 11:40:48,553 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:41:10,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=70218.66666666667, ans=0.125 +2024-08-25 11:41:57,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=70325.33333333333, ans=0.0 +2024-08-25 11:42:06,504 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.40 vs. limit=12.0 +2024-08-25 11:42:12,746 INFO [train.py:1114] (0/4) Epoch 6, batch 750, loss[loss=0.2955, simple_loss=0.3354, pruned_loss=0.09312, ctc_loss=0.1732, over 19502.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3228, pruned_loss=0.09003, ctc_loss=0.1691, over 3774484.37 frames. ], batch size: 54, lr: 2.40e-02, grad_scale: 32.0 +2024-08-25 11:42:28,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=70432.0, ans=0.125 +2024-08-25 11:42:53,007 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.09 vs. limit=15.0 +2024-08-25 11:42:59,521 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.30 vs. limit=15.0 +2024-08-25 11:43:09,525 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.653e+02 2.022e+02 2.297e+02 2.693e+02 4.652e+02, threshold=4.594e+02, percent-clipped=2.0 +2024-08-25 11:43:16,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=70538.66666666667, ans=0.125 +2024-08-25 11:43:20,610 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.50 vs. limit=22.5 +2024-08-25 11:43:22,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=70592.0, ans=0.2 +2024-08-25 11:43:34,918 INFO [train.py:1114] (0/4) Epoch 6, batch 800, loss[loss=0.245, simple_loss=0.2861, pruned_loss=0.07458, ctc_loss=0.1368, over 19810.00 frames. ], tot_loss[loss=0.2842, simple_loss=0.322, pruned_loss=0.08955, ctc_loss=0.1683, over 3794875.81 frames. ], batch size: 49, lr: 2.39e-02, grad_scale: 32.0 +2024-08-25 11:43:44,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=70645.33333333333, ans=0.125 +2024-08-25 11:43:45,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=70645.33333333333, ans=0.2 +2024-08-25 11:44:02,045 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.20 vs. limit=15.0 +2024-08-25 11:44:08,849 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=16.32 vs. limit=15.0 +2024-08-25 11:44:18,087 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.88 vs. limit=10.0 +2024-08-25 11:44:27,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=70805.33333333333, ans=0.125 +2024-08-25 11:44:46,463 INFO [train.py:1114] (0/4) Epoch 6, batch 850, loss[loss=0.2822, simple_loss=0.329, pruned_loss=0.08536, ctc_loss=0.1616, over 19681.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3212, pruned_loss=0.08893, ctc_loss=0.167, over 3813603.32 frames. ], batch size: 59, lr: 2.39e-02, grad_scale: 32.0 +2024-08-25 11:44:51,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=70912.0, ans=0.125 +2024-08-25 11:45:08,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=70912.0, ans=0.125 +2024-08-25 11:45:37,128 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.06 vs. limit=15.0 +2024-08-25 11:45:46,245 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.501e+02 1.893e+02 2.077e+02 2.374e+02 4.075e+02, threshold=4.154e+02, percent-clipped=0.0 +2024-08-25 11:46:07,495 INFO [train.py:1114] (0/4) Epoch 6, batch 900, loss[loss=0.2528, simple_loss=0.2907, pruned_loss=0.0781, ctc_loss=0.1466, over 19439.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.3217, pruned_loss=0.08964, ctc_loss=0.1682, over 3817682.59 frames. ], batch size: 48, lr: 2.39e-02, grad_scale: 16.0 +2024-08-25 11:46:16,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=71178.66666666667, ans=0.0 +2024-08-25 11:46:35,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=71232.0, ans=0.0 +2024-08-25 11:46:38,464 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.82 vs. limit=22.5 +2024-08-25 11:46:41,493 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.43 vs. limit=6.0 +2024-08-25 11:46:45,334 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.76 vs. limit=22.5 +2024-08-25 11:47:21,574 INFO [train.py:1114] (0/4) Epoch 6, batch 950, loss[loss=0.2729, simple_loss=0.3121, pruned_loss=0.08289, ctc_loss=0.1699, over 19522.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3216, pruned_loss=0.08928, ctc_loss=0.1679, over 3819604.40 frames. ], batch size: 49, lr: 2.38e-02, grad_scale: 16.0 +2024-08-25 11:48:00,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=71498.66666666667, ans=0.125 +2024-08-25 11:48:00,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=71498.66666666667, ans=0.125 +2024-08-25 11:48:05,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=71552.0, ans=0.125 +2024-08-25 11:48:21,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=71605.33333333333, ans=0.125 +2024-08-25 11:48:23,513 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.560e+02 1.900e+02 2.167e+02 2.553e+02 4.088e+02, threshold=4.334e+02, percent-clipped=0.0 +2024-08-25 11:49:03,396 INFO [train.py:1114] (0/4) Epoch 6, batch 1000, loss[loss=0.2646, simple_loss=0.31, pruned_loss=0.08, ctc_loss=0.1481, over 19845.00 frames. ], tot_loss[loss=0.2849, simple_loss=0.3226, pruned_loss=0.08985, ctc_loss=0.1686, over 3815694.02 frames. ], batch size: 52, lr: 2.38e-02, grad_scale: 16.0 +2024-08-25 11:49:07,809 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.53 vs. limit=15.0 +2024-08-25 11:49:23,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=71765.33333333333, ans=0.0 +2024-08-25 11:49:30,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=71765.33333333333, ans=0.0 +2024-08-25 11:50:03,424 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:50:04,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=71872.0, ans=0.0 +2024-08-25 11:50:10,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=71872.0, ans=0.125 +2024-08-25 11:50:48,056 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:50:57,810 INFO [train.py:1114] (0/4) Epoch 6, batch 1050, loss[loss=0.2681, simple_loss=0.3189, pruned_loss=0.07921, ctc_loss=0.1472, over 19845.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.3219, pruned_loss=0.08958, ctc_loss=0.1681, over 3821802.79 frames. ], batch size: 57, lr: 2.37e-02, grad_scale: 16.0 +2024-08-25 11:51:35,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=72085.33333333333, ans=0.0 +2024-08-25 11:51:55,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=72085.33333333333, ans=0.025 +2024-08-25 11:52:00,139 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.585e+02 1.944e+02 2.201e+02 2.550e+02 3.957e+02, threshold=4.403e+02, percent-clipped=0.0 +2024-08-25 11:52:23,477 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.64 vs. limit=10.0 +2024-08-25 11:52:25,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=72138.66666666667, ans=0.125 +2024-08-25 11:52:26,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=72138.66666666667, ans=0.05 +2024-08-25 11:52:27,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=72138.66666666667, ans=0.0 +2024-08-25 11:52:39,928 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.93 vs. limit=15.0 +2024-08-25 11:52:48,890 INFO [train.py:1114] (0/4) Epoch 6, batch 1100, loss[loss=0.2569, simple_loss=0.3107, pruned_loss=0.0755, ctc_loss=0.1301, over 19586.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3213, pruned_loss=0.08897, ctc_loss=0.1668, over 3829564.03 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 16.0 +2024-08-25 11:52:51,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=72245.33333333333, ans=0.125 +2024-08-25 11:53:03,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=72245.33333333333, ans=0.2 +2024-08-25 11:53:14,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=72298.66666666667, ans=0.125 +2024-08-25 11:53:28,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=72352.0, ans=0.2 +2024-08-25 11:53:28,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72352.0, ans=0.1 +2024-08-25 11:53:31,456 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.08 vs. limit=15.0 +2024-08-25 11:53:39,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=72405.33333333333, ans=0.125 +2024-08-25 11:53:48,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=72458.66666666667, ans=0.0 +2024-08-25 11:53:58,642 INFO [train.py:1114] (0/4) Epoch 6, batch 1150, loss[loss=0.261, simple_loss=0.3031, pruned_loss=0.07922, ctc_loss=0.151, over 19581.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3207, pruned_loss=0.08875, ctc_loss=0.1666, over 3828050.86 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 16.0 +2024-08-25 11:54:19,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=72565.33333333333, ans=0.1 +2024-08-25 11:54:19,978 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.77 vs. limit=15.0 +2024-08-25 11:54:35,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=72618.66666666667, ans=0.0 +2024-08-25 11:54:43,441 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 1.952e+02 2.194e+02 2.505e+02 4.680e+02, threshold=4.387e+02, percent-clipped=1.0 +2024-08-25 11:54:47,554 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.62 vs. limit=15.0 +2024-08-25 11:54:48,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=72672.0, ans=0.125 +2024-08-25 11:55:11,879 INFO [train.py:1114] (0/4) Epoch 6, batch 1200, loss[loss=0.335, simple_loss=0.3603, pruned_loss=0.1134, ctc_loss=0.2069, over 19845.00 frames. ], tot_loss[loss=0.2849, simple_loss=0.3225, pruned_loss=0.08988, ctc_loss=0.1688, over 3823637.56 frames. ], batch size: 57, lr: 2.36e-02, grad_scale: 32.0 +2024-08-25 11:55:18,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=72778.66666666667, ans=0.125 +2024-08-25 11:55:22,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=72778.66666666667, ans=0.125 +2024-08-25 11:55:34,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=72832.0, ans=0.2 +2024-08-25 11:55:50,393 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.25 vs. limit=12.0 +2024-08-25 11:56:44,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=72992.0, ans=0.125 +2024-08-25 11:56:47,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=72992.0, ans=0.125 +2024-08-25 11:56:55,096 INFO [train.py:1114] (0/4) Epoch 6, batch 1250, loss[loss=0.2976, simple_loss=0.3321, pruned_loss=0.09737, ctc_loss=0.1708, over 19526.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.3231, pruned_loss=0.08999, ctc_loss=0.169, over 3842300.87 frames. ], batch size: 61, lr: 2.36e-02, grad_scale: 32.0 +2024-08-25 11:57:06,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=73098.66666666667, ans=0.0 +2024-08-25 11:57:59,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=73152.0, ans=0.125 +2024-08-25 11:58:12,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=73205.33333333333, ans=0.125 +2024-08-25 11:58:13,321 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.685e+02 2.073e+02 2.305e+02 2.660e+02 4.224e+02, threshold=4.609e+02, percent-clipped=0.0 +2024-08-25 11:58:25,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=73258.66666666667, ans=0.0 +2024-08-25 11:58:28,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=73258.66666666667, ans=0.125 +2024-08-25 11:58:45,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=73312.0, ans=0.125 +2024-08-25 11:58:46,690 INFO [train.py:1114] (0/4) Epoch 6, batch 1300, loss[loss=0.3193, simple_loss=0.3575, pruned_loss=0.1029, ctc_loss=0.1885, over 18835.00 frames. ], tot_loss[loss=0.2845, simple_loss=0.3225, pruned_loss=0.08963, ctc_loss=0.1683, over 3846607.15 frames. ], batch size: 76, lr: 2.36e-02, grad_scale: 32.0 +2024-08-25 11:59:04,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=73312.0, ans=0.125 +2024-08-25 11:59:34,457 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:59:34,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=73365.33333333333, ans=0.0 +2024-08-25 11:59:42,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=73418.66666666667, ans=0.2 +2024-08-25 11:59:45,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=73418.66666666667, ans=0.1 +2024-08-25 11:59:47,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=73418.66666666667, ans=0.0 +2024-08-25 11:59:59,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=73472.0, ans=0.125 +2024-08-25 12:00:04,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=73525.33333333333, ans=0.125 +2024-08-25 12:00:10,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=73525.33333333333, ans=0.125 +2024-08-25 12:00:19,976 INFO [train.py:1114] (0/4) Epoch 6, batch 1350, loss[loss=0.2802, simple_loss=0.3262, pruned_loss=0.08491, ctc_loss=0.161, over 19765.00 frames. ], tot_loss[loss=0.2821, simple_loss=0.3211, pruned_loss=0.08836, ctc_loss=0.166, over 3857129.69 frames. ], batch size: 54, lr: 2.36e-02, grad_scale: 32.0 +2024-08-25 12:00:34,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=73578.66666666667, ans=0.125 +2024-08-25 12:00:34,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=73578.66666666667, ans=0.125 +2024-08-25 12:00:48,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=73632.0, ans=0.125 +2024-08-25 12:00:59,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=73685.33333333333, ans=0.2 +2024-08-25 12:01:02,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73738.66666666667, ans=0.1 +2024-08-25 12:01:05,005 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.532e+02 2.025e+02 2.295e+02 2.579e+02 4.133e+02, threshold=4.590e+02, percent-clipped=0.0 +2024-08-25 12:01:30,917 INFO [train.py:1114] (0/4) Epoch 6, batch 1400, loss[loss=0.2849, simple_loss=0.3086, pruned_loss=0.09603, ctc_loss=0.173, over 19670.00 frames. ], tot_loss[loss=0.2823, simple_loss=0.3211, pruned_loss=0.08856, ctc_loss=0.1661, over 3864354.35 frames. ], batch size: 46, lr: 2.35e-02, grad_scale: 32.0 +2024-08-25 12:01:39,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=73845.33333333333, ans=0.09899494936611666 +2024-08-25 12:01:50,806 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.51 vs. limit=22.5 +2024-08-25 12:01:52,276 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.38 vs. limit=22.5 +2024-08-25 12:02:20,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73952.0, ans=0.1 +2024-08-25 12:02:23,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74005.33333333333, ans=0.1 +2024-08-25 12:02:52,963 INFO [train.py:1114] (0/4) Epoch 6, batch 1450, loss[loss=0.2998, simple_loss=0.3387, pruned_loss=0.09361, ctc_loss=0.1842, over 19641.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.3214, pruned_loss=0.08856, ctc_loss=0.1663, over 3863037.37 frames. ], batch size: 63, lr: 2.35e-02, grad_scale: 32.0 +2024-08-25 12:02:53,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=74112.0, ans=0.0 +2024-08-25 12:03:32,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=74165.33333333333, ans=0.07 +2024-08-25 12:03:34,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=74165.33333333333, ans=0.125 +2024-08-25 12:03:40,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=74218.66666666667, ans=0.125 +2024-08-25 12:03:48,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=74218.66666666667, ans=0.2 +2024-08-25 12:03:53,271 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.572e+02 1.998e+02 2.330e+02 2.811e+02 4.670e+02, threshold=4.661e+02, percent-clipped=1.0 +2024-08-25 12:04:08,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=74325.33333333333, ans=0.0 +2024-08-25 12:04:25,548 INFO [train.py:1114] (0/4) Epoch 6, batch 1500, loss[loss=0.3002, simple_loss=0.3334, pruned_loss=0.09718, ctc_loss=0.1817, over 19578.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.3217, pruned_loss=0.08856, ctc_loss=0.1663, over 3862765.27 frames. ], batch size: 57, lr: 2.35e-02, grad_scale: 32.0 +2024-08-25 12:04:30,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=74378.66666666667, ans=0.125 +2024-08-25 12:05:02,097 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.88 vs. limit=15.0 +2024-08-25 12:05:52,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74592.0, ans=0.1 +2024-08-25 12:06:01,373 INFO [train.py:1114] (0/4) Epoch 6, batch 1550, loss[loss=0.2908, simple_loss=0.3277, pruned_loss=0.09234, ctc_loss=0.1727, over 19610.00 frames. ], tot_loss[loss=0.2832, simple_loss=0.3218, pruned_loss=0.08892, ctc_loss=0.167, over 3846999.82 frames. ], batch size: 60, lr: 2.34e-02, grad_scale: 32.0 +2024-08-25 12:06:21,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=74698.66666666667, ans=0.125 +2024-08-25 12:06:22,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=74698.66666666667, ans=0.125 +2024-08-25 12:06:37,901 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.607e+02 2.061e+02 2.512e+02 3.027e+02 4.789e+02, threshold=5.024e+02, percent-clipped=1.0 +2024-08-25 12:07:01,770 INFO [train.py:1114] (0/4) Epoch 6, batch 1600, loss[loss=0.3033, simple_loss=0.3415, pruned_loss=0.09644, ctc_loss=0.1806, over 19841.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.3222, pruned_loss=0.08944, ctc_loss=0.1679, over 3836462.91 frames. ], batch size: 57, lr: 2.34e-02, grad_scale: 32.0 +2024-08-25 12:07:31,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75018.66666666667, ans=0.1 +2024-08-25 12:07:41,955 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.88 vs. limit=15.0 +2024-08-25 12:07:42,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=75072.0, ans=0.2 +2024-08-25 12:08:00,982 INFO [train.py:1114] (0/4) Epoch 6, batch 1650, loss[loss=0.2796, simple_loss=0.3299, pruned_loss=0.0833, ctc_loss=0.1565, over 19643.00 frames. ], tot_loss[loss=0.2842, simple_loss=0.3222, pruned_loss=0.08954, ctc_loss=0.168, over 3832786.98 frames. ], batch size: 59, lr: 2.34e-02, grad_scale: 32.0 +2024-08-25 12:08:26,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=75285.33333333333, ans=0.125 +2024-08-25 12:08:34,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=75338.66666666667, ans=0.125 +2024-08-25 12:08:37,753 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.531e+02 1.893e+02 2.381e+02 2.784e+02 7.281e+02, threshold=4.762e+02, percent-clipped=1.0 +2024-08-25 12:08:44,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=75338.66666666667, ans=0.125 +2024-08-25 12:08:50,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=75392.0, ans=0.0 +2024-08-25 12:08:51,688 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.58 vs. limit=10.0 +2024-08-25 12:09:00,105 INFO [train.py:1114] (0/4) Epoch 6, batch 1700, loss[loss=0.242, simple_loss=0.2806, pruned_loss=0.07463, ctc_loss=0.1356, over 19668.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3208, pruned_loss=0.0883, ctc_loss=0.1657, over 3846878.92 frames. ], batch size: 46, lr: 2.33e-02, grad_scale: 32.0 +2024-08-25 12:09:02,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=75445.33333333333, ans=0.125 +2024-08-25 12:09:08,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=75445.33333333333, ans=0.125 +2024-08-25 12:09:18,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=75498.66666666667, ans=0.2 +2024-08-25 12:09:26,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=75552.0, ans=0.125 +2024-08-25 12:09:42,137 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.68 vs. limit=15.0 +2024-08-25 12:09:43,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75658.66666666667, ans=0.1 +2024-08-25 12:09:50,480 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=3.906e-02 +2024-08-25 12:09:55,904 INFO [train.py:1114] (0/4) Epoch 6, batch 1750, loss[loss=0.2379, simple_loss=0.2805, pruned_loss=0.07065, ctc_loss=0.1349, over 19645.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3202, pruned_loss=0.08783, ctc_loss=0.165, over 3852018.78 frames. ], batch size: 45, lr: 2.33e-02, grad_scale: 16.0 +2024-08-25 12:10:01,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=75712.0, ans=0.0 +2024-08-25 12:10:03,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=75712.0, ans=0.125 +2024-08-25 12:10:04,130 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.94 vs. limit=15.0 +2024-08-25 12:10:05,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=75765.33333333333, ans=0.025 +2024-08-25 12:10:11,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=75765.33333333333, ans=0.125 +2024-08-25 12:10:12,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=75765.33333333333, ans=0.1 +2024-08-25 12:10:17,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=75818.66666666667, ans=0.0 +2024-08-25 12:10:31,315 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.92 vs. limit=10.0 +2024-08-25 12:10:32,752 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.521e+02 1.890e+02 2.130e+02 2.587e+02 4.262e+02, threshold=4.260e+02, percent-clipped=0.0 +2024-08-25 12:10:35,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=75872.0, ans=0.1 +2024-08-25 12:10:39,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75925.33333333333, ans=0.1 +2024-08-25 12:10:47,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75925.33333333333, ans=0.1 +2024-08-25 12:10:50,354 INFO [train.py:1114] (0/4) Epoch 6, batch 1800, loss[loss=0.2981, simple_loss=0.3377, pruned_loss=0.09346, ctc_loss=0.1787, over 19623.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3201, pruned_loss=0.08759, ctc_loss=0.1648, over 3853824.76 frames. ], batch size: 55, lr: 2.33e-02, grad_scale: 8.0 +2024-08-25 12:10:54,207 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.08 vs. limit=15.0 +2024-08-25 12:11:05,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=76032.0, ans=0.125 +2024-08-25 12:11:12,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff3.min_abs, batch_count=76085.33333333333, ans=0.2 +2024-08-25 12:11:14,804 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.71 vs. limit=6.0 +2024-08-25 12:11:19,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=76085.33333333333, ans=0.0 +2024-08-25 12:11:23,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=76138.66666666667, ans=0.0 +2024-08-25 12:11:44,879 INFO [train.py:1114] (0/4) Epoch 6, batch 1850, loss[loss=0.3182, simple_loss=0.3512, pruned_loss=0.1049, ctc_loss=0.1887, over 19579.00 frames. ], tot_loss[loss=0.2811, simple_loss=0.3202, pruned_loss=0.08796, ctc_loss=0.1651, over 3858141.23 frames. ], batch size: 57, lr: 2.32e-02, grad_scale: 8.0 +2024-08-25 12:11:46,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=76245.33333333333, ans=0.125 +2024-08-25 12:11:50,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=76245.33333333333, ans=0.125 +2024-08-25 12:12:06,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=76352.0, ans=0.125 +2024-08-25 12:12:11,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=76352.0, ans=0.125 +2024-08-25 12:12:22,230 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.502e+02 1.994e+02 2.285e+02 2.712e+02 4.413e+02, threshold=4.569e+02, percent-clipped=2.0 +2024-08-25 12:12:43,393 INFO [train.py:1114] (0/4) Epoch 6, batch 1900, loss[loss=0.2632, simple_loss=0.3236, pruned_loss=0.07421, ctc_loss=0.1357, over 19656.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3207, pruned_loss=0.08767, ctc_loss=0.1646, over 3863486.84 frames. ], batch size: 59, lr: 2.32e-02, grad_scale: 8.0 +2024-08-25 12:12:59,005 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.82 vs. limit=15.0 +2024-08-25 12:13:21,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=76672.0, ans=0.125 +2024-08-25 12:13:40,519 INFO [train.py:1114] (0/4) Epoch 6, batch 1950, loss[loss=0.2712, simple_loss=0.3124, pruned_loss=0.08334, ctc_loss=0.1583, over 19571.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3214, pruned_loss=0.08742, ctc_loss=0.1641, over 3871849.41 frames. ], batch size: 52, lr: 2.32e-02, grad_scale: 8.0 +2024-08-25 12:13:42,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=76778.66666666667, ans=0.07 +2024-08-25 12:14:17,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=76938.66666666667, ans=0.0 +2024-08-25 12:14:18,621 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.709e+02 1.890e+02 2.137e+02 2.349e+02 3.743e+02, threshold=4.275e+02, percent-clipped=0.0 +2024-08-25 12:14:34,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=77045.33333333333, ans=0.0 +2024-08-25 12:14:36,004 INFO [train.py:1114] (0/4) Epoch 6, batch 2000, loss[loss=0.2717, simple_loss=0.2997, pruned_loss=0.08873, ctc_loss=0.1658, over 19691.00 frames. ], tot_loss[loss=0.2821, simple_loss=0.3222, pruned_loss=0.08791, ctc_loss=0.1653, over 3855417.73 frames. ], batch size: 45, lr: 2.31e-02, grad_scale: 16.0 +2024-08-25 12:14:40,614 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:14:42,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=77045.33333333333, ans=0.025 +2024-08-25 12:14:44,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=77045.33333333333, ans=0.07 +2024-08-25 12:15:22,398 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.58 vs. limit=15.0 +2024-08-25 12:15:30,077 INFO [train.py:1114] (0/4) Epoch 6, batch 2050, loss[loss=0.2439, simple_loss=0.2798, pruned_loss=0.07446, ctc_loss=0.1479, over 19725.00 frames. ], tot_loss[loss=0.2802, simple_loss=0.3202, pruned_loss=0.08734, ctc_loss=0.164, over 3852827.87 frames. ], batch size: 47, lr: 2.31e-02, grad_scale: 16.0 +2024-08-25 12:16:07,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=77418.66666666667, ans=0.0 +2024-08-25 12:16:11,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=77472.0, ans=0.0 +2024-08-25 12:16:14,698 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.405e+02 1.955e+02 2.380e+02 2.986e+02 1.021e+03, threshold=4.760e+02, percent-clipped=7.0 +2024-08-25 12:16:32,219 INFO [train.py:1114] (0/4) Epoch 6, batch 2100, loss[loss=0.2945, simple_loss=0.3339, pruned_loss=0.09206, ctc_loss=0.1775, over 19770.00 frames. ], tot_loss[loss=0.2793, simple_loss=0.3196, pruned_loss=0.08686, ctc_loss=0.1633, over 3859819.95 frames. ], batch size: 54, lr: 2.31e-02, grad_scale: 16.0 +2024-08-25 12:16:34,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=77578.66666666667, ans=0.125 +2024-08-25 12:16:39,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=77578.66666666667, ans=0.0 +2024-08-25 12:16:41,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=77578.66666666667, ans=0.0 +2024-08-25 12:17:05,706 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:17:16,414 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.47 vs. limit=15.0 +2024-08-25 12:17:18,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=77792.0, ans=0.0 +2024-08-25 12:17:21,044 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.65 vs. limit=22.5 +2024-08-25 12:17:26,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=77792.0, ans=0.125 +2024-08-25 12:17:28,081 INFO [train.py:1114] (0/4) Epoch 6, batch 2150, loss[loss=0.2405, simple_loss=0.2979, pruned_loss=0.06641, ctc_loss=0.1258, over 19586.00 frames. ], tot_loss[loss=0.2785, simple_loss=0.319, pruned_loss=0.08648, ctc_loss=0.1625, over 3870116.27 frames. ], batch size: 52, lr: 2.30e-02, grad_scale: 16.0 +2024-08-25 12:17:28,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=77845.33333333333, ans=0.0 +2024-08-25 12:17:38,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=77898.66666666667, ans=0.2 +2024-08-25 12:17:44,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=77898.66666666667, ans=0.0 +2024-08-25 12:17:44,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=77898.66666666667, ans=0.1 +2024-08-25 12:17:45,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77898.66666666667, ans=0.1 +2024-08-25 12:18:04,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=77952.0, ans=15.0 +2024-08-25 12:18:19,492 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.583e+02 1.877e+02 2.258e+02 2.799e+02 6.726e+02, threshold=4.515e+02, percent-clipped=2.0 +2024-08-25 12:18:20,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=78005.33333333333, ans=0.0 +2024-08-25 12:18:22,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=78005.33333333333, ans=0.2 +2024-08-25 12:19:06,984 INFO [train.py:1114] (0/4) Epoch 6, batch 2200, loss[loss=0.301, simple_loss=0.3343, pruned_loss=0.09707, ctc_loss=0.1841, over 19602.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3193, pruned_loss=0.08687, ctc_loss=0.163, over 3868804.12 frames. ], batch size: 57, lr: 2.30e-02, grad_scale: 16.0 +2024-08-25 12:19:08,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=78112.0, ans=0.0 +2024-08-25 12:19:11,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=78112.0, ans=0.125 +2024-08-25 12:19:14,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=78112.0, ans=0.125 +2024-08-25 12:19:18,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=78165.33333333333, ans=0.0 +2024-08-25 12:19:20,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=78165.33333333333, ans=0.0 +2024-08-25 12:19:28,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=78218.66666666667, ans=0.125 +2024-08-25 12:19:46,287 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.83 vs. limit=15.0 +2024-08-25 12:19:48,386 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=15.0 +2024-08-25 12:20:02,388 INFO [train.py:1114] (0/4) Epoch 6, batch 2250, loss[loss=0.2582, simple_loss=0.3092, pruned_loss=0.07551, ctc_loss=0.1404, over 19609.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3188, pruned_loss=0.08642, ctc_loss=0.1622, over 3868380.80 frames. ], batch size: 55, lr: 2.30e-02, grad_scale: 16.0 +2024-08-25 12:20:09,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=78378.66666666667, ans=0.125 +2024-08-25 12:20:15,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=78432.0, ans=0.07 +2024-08-25 12:20:34,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=78538.66666666667, ans=0.125 +2024-08-25 12:20:38,634 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.553e+02 2.005e+02 2.234e+02 2.581e+02 4.325e+02, threshold=4.468e+02, percent-clipped=0.0 +2024-08-25 12:20:45,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=78592.0, ans=0.125 +2024-08-25 12:20:53,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=78592.0, ans=0.125 +2024-08-25 12:20:56,329 INFO [train.py:1114] (0/4) Epoch 6, batch 2300, loss[loss=0.264, simple_loss=0.301, pruned_loss=0.08165, ctc_loss=0.1592, over 19514.00 frames. ], tot_loss[loss=0.2783, simple_loss=0.3183, pruned_loss=0.08662, ctc_loss=0.1628, over 3861729.87 frames. ], batch size: 49, lr: 2.29e-02, grad_scale: 16.0 +2024-08-25 12:21:14,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=78698.66666666667, ans=0.125 +2024-08-25 12:21:39,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=78805.33333333333, ans=0.0 +2024-08-25 12:21:42,458 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.99 vs. limit=15.0 +2024-08-25 12:21:52,656 INFO [train.py:1114] (0/4) Epoch 6, batch 2350, loss[loss=0.2569, simple_loss=0.3115, pruned_loss=0.07224, ctc_loss=0.1444, over 19666.00 frames. ], tot_loss[loss=0.2783, simple_loss=0.3184, pruned_loss=0.08661, ctc_loss=0.1626, over 3864765.58 frames. ], batch size: 63, lr: 2.29e-02, grad_scale: 16.0 +2024-08-25 12:21:53,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=78912.0, ans=0.125 +2024-08-25 12:22:09,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78965.33333333333, ans=0.1 +2024-08-25 12:22:11,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78965.33333333333, ans=0.1 +2024-08-25 12:22:18,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=79018.66666666667, ans=0.125 +2024-08-25 12:22:27,238 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=4.224e-02 +2024-08-25 12:22:30,295 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.404e+02 2.097e+02 2.553e+02 3.084e+02 6.792e+02, threshold=5.106e+02, percent-clipped=2.0 +2024-08-25 12:22:47,954 INFO [train.py:1114] (0/4) Epoch 6, batch 2400, loss[loss=0.3017, simple_loss=0.3359, pruned_loss=0.09635, ctc_loss=0.1869, over 19346.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.321, pruned_loss=0.08796, ctc_loss=0.1652, over 3859314.25 frames. ], batch size: 67, lr: 2.29e-02, grad_scale: 32.0 +2024-08-25 12:22:50,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=79178.66666666667, ans=0.125 +2024-08-25 12:22:52,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=79178.66666666667, ans=10.0 +2024-08-25 12:23:02,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=79232.0, ans=0.125 +2024-08-25 12:23:06,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=79232.0, ans=0.125 +2024-08-25 12:23:26,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=79338.66666666667, ans=0.1 +2024-08-25 12:23:32,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=79338.66666666667, ans=0.125 +2024-08-25 12:23:43,089 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.75 vs. limit=15.0 +2024-08-25 12:23:43,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=79392.0, ans=0.125 +2024-08-25 12:23:45,704 INFO [train.py:1114] (0/4) Epoch 6, batch 2450, loss[loss=0.3622, simple_loss=0.3604, pruned_loss=0.1332, ctc_loss=0.244, over 13494.00 frames. ], tot_loss[loss=0.29, simple_loss=0.3262, pruned_loss=0.09226, ctc_loss=0.1733, over 3735937.20 frames. ], batch size: 140, lr: 2.29e-02, grad_scale: 32.0 +2024-08-25 12:23:50,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=79445.33333333333, ans=0.125 +2024-08-25 12:24:16,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.47 vs. limit=22.5 +2024-08-25 12:25:48,655 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.60 vs. limit=15.0 +2024-08-25 12:25:48,696 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.72 vs. limit=22.5 +2024-08-25 12:27:22,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=79605.33333333333, ans=0.09899494936611666 +2024-08-25 12:28:01,634 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.548e+02 2.056e+02 2.291e+02 2.526e+02 5.572e+02, threshold=4.582e+02, percent-clipped=1.0 +2024-08-25 12:28:19,343 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-6.pt +2024-08-25 12:29:27,601 INFO [train.py:1114] (0/4) Epoch 7, batch 0, loss[loss=0.2917, simple_loss=0.3199, pruned_loss=0.09673, ctc_loss=0.1754, over 19812.00 frames. ], tot_loss[loss=0.2917, simple_loss=0.3199, pruned_loss=0.09673, ctc_loss=0.1754, over 19812.00 frames. ], batch size: 49, lr: 2.14e-02, grad_scale: 32.0 +2024-08-25 12:29:27,602 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-25 12:29:44,290 INFO [train.py:1146] (0/4) Epoch 7, validation: loss=0.2269, simple_loss=0.307, pruned_loss=0.05393, ctc_loss=0.0975, over 944034.00 frames. +2024-08-25 12:29:44,291 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 14058MB +2024-08-25 12:29:44,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=79658.66666666667, ans=0.0 +2024-08-25 12:29:44,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=79658.66666666667, ans=0.0 +2024-08-25 12:30:04,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=79658.66666666667, ans=0.0 +2024-08-25 12:31:09,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=79765.33333333333, ans=0.125 +2024-08-25 12:31:14,406 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.65 vs. limit=6.0 +2024-08-25 12:31:23,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.45 vs. limit=15.0 +2024-08-25 12:33:04,695 INFO [train.py:1114] (0/4) Epoch 7, batch 50, loss[loss=0.2463, simple_loss=0.2939, pruned_loss=0.07189, ctc_loss=0.1371, over 19723.00 frames. ], tot_loss[loss=0.2887, simple_loss=0.325, pruned_loss=0.0917, ctc_loss=0.1725, over 844189.99 frames. ], batch size: 47, lr: 2.14e-02, grad_scale: 32.0 +2024-08-25 12:33:56,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=80085.33333333333, ans=0.1 +2024-08-25 12:34:00,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=80085.33333333333, ans=0.125 +2024-08-25 12:34:08,368 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.82 vs. limit=15.0 +2024-08-25 12:34:17,271 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.570e+02 1.999e+02 2.246e+02 2.808e+02 5.514e+02, threshold=4.492e+02, percent-clipped=3.0 +2024-08-25 12:34:24,309 INFO [train.py:1114] (0/4) Epoch 7, batch 100, loss[loss=0.2556, simple_loss=0.3054, pruned_loss=0.07375, ctc_loss=0.1457, over 19727.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.3235, pruned_loss=0.08879, ctc_loss=0.1679, over 1500395.15 frames. ], batch size: 51, lr: 2.13e-02, grad_scale: 32.0 +2024-08-25 12:34:40,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=80245.33333333333, ans=0.0 +2024-08-25 12:34:53,444 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.32 vs. limit=22.5 +2024-08-25 12:35:05,195 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.06 vs. limit=22.5 +2024-08-25 12:35:07,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=80352.0, ans=0.2 +2024-08-25 12:35:11,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=80405.33333333333, ans=0.125 +2024-08-25 12:35:23,291 INFO [train.py:1114] (0/4) Epoch 7, batch 150, loss[loss=0.2578, simple_loss=0.2881, pruned_loss=0.08287, ctc_loss=0.1544, over 19726.00 frames. ], tot_loss[loss=0.2798, simple_loss=0.3203, pruned_loss=0.08685, ctc_loss=0.1642, over 2029556.49 frames. ], batch size: 47, lr: 2.13e-02, grad_scale: 32.0 +2024-08-25 12:35:36,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=80512.0, ans=0.09899494936611666 +2024-08-25 12:35:40,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=80512.0, ans=0.0 +2024-08-25 12:35:59,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.92 vs. limit=15.0 +2024-08-25 12:36:12,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=80618.66666666667, ans=0.025 +2024-08-25 12:36:13,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80672.0, ans=0.1 +2024-08-25 12:36:18,826 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.539e+02 1.959e+02 2.217e+02 2.953e+02 5.735e+02, threshold=4.434e+02, percent-clipped=2.0 +2024-08-25 12:36:20,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=80672.0, ans=0.125 +2024-08-25 12:36:21,894 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.67 vs. limit=15.0 +2024-08-25 12:36:22,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=80672.0, ans=0.125 +2024-08-25 12:36:26,001 INFO [train.py:1114] (0/4) Epoch 7, batch 200, loss[loss=0.3271, simple_loss=0.3511, pruned_loss=0.1102, ctc_loss=0.207, over 18351.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3184, pruned_loss=0.08634, ctc_loss=0.163, over 2436499.74 frames. ], batch size: 85, lr: 2.13e-02, grad_scale: 32.0 +2024-08-25 12:36:48,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=80832.0, ans=0.0 +2024-08-25 12:37:04,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=80885.33333333333, ans=0.2 +2024-08-25 12:37:07,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=80885.33333333333, ans=0.125 +2024-08-25 12:37:17,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=80938.66666666667, ans=0.1 +2024-08-25 12:37:22,886 INFO [train.py:1114] (0/4) Epoch 7, batch 250, loss[loss=0.2912, simple_loss=0.3349, pruned_loss=0.09043, ctc_loss=0.1665, over 19400.00 frames. ], tot_loss[loss=0.276, simple_loss=0.3172, pruned_loss=0.08523, ctc_loss=0.1607, over 2756000.58 frames. ], batch size: 67, lr: 2.13e-02, grad_scale: 32.0 +2024-08-25 12:37:34,553 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.09 vs. limit=12.0 +2024-08-25 12:37:41,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=81045.33333333333, ans=0.0 +2024-08-25 12:37:56,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=81098.66666666667, ans=22.5 +2024-08-25 12:37:59,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=81152.0, ans=0.125 +2024-08-25 12:38:06,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81152.0, ans=0.1 +2024-08-25 12:38:16,687 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.422e+02 1.901e+02 2.294e+02 2.833e+02 4.254e+02, threshold=4.587e+02, percent-clipped=0.0 +2024-08-25 12:38:17,097 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:38:17,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=81205.33333333333, ans=0.125 +2024-08-25 12:38:23,341 INFO [train.py:1114] (0/4) Epoch 7, batch 300, loss[loss=0.2938, simple_loss=0.339, pruned_loss=0.08917, ctc_loss=0.1757, over 19538.00 frames. ], tot_loss[loss=0.2742, simple_loss=0.3161, pruned_loss=0.08429, ctc_loss=0.1591, over 2999935.01 frames. ], batch size: 61, lr: 2.12e-02, grad_scale: 32.0 +2024-08-25 12:38:27,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=81258.66666666667, ans=0.125 +2024-08-25 12:38:30,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=81258.66666666667, ans=0.2 +2024-08-25 12:38:57,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=81365.33333333333, ans=0.125 +2024-08-25 12:39:11,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81418.66666666667, ans=0.1 +2024-08-25 12:39:46,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=81472.0, ans=0.0 +2024-08-25 12:39:52,664 INFO [train.py:1114] (0/4) Epoch 7, batch 350, loss[loss=0.2385, simple_loss=0.281, pruned_loss=0.07171, ctc_loss=0.1312, over 19755.00 frames. ], tot_loss[loss=0.2744, simple_loss=0.3162, pruned_loss=0.08442, ctc_loss=0.1593, over 3189143.82 frames. ], batch size: 48, lr: 2.12e-02, grad_scale: 32.0 +2024-08-25 12:39:55,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=81525.33333333333, ans=0.07 +2024-08-25 12:40:03,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=81578.66666666667, ans=0.0 +2024-08-25 12:40:14,085 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.73 vs. limit=22.5 +2024-08-25 12:40:16,178 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.63 vs. limit=15.0 +2024-08-25 12:40:19,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=81632.0, ans=0.125 +2024-08-25 12:40:34,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=81685.33333333333, ans=0.2 +2024-08-25 12:40:38,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=81738.66666666667, ans=0.0 +2024-08-25 12:40:43,966 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.560e+02 1.980e+02 2.268e+02 2.810e+02 5.782e+02, threshold=4.535e+02, percent-clipped=1.0 +2024-08-25 12:40:44,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81738.66666666667, ans=0.1 +2024-08-25 12:40:50,665 INFO [train.py:1114] (0/4) Epoch 7, batch 400, loss[loss=0.2633, simple_loss=0.3156, pruned_loss=0.07597, ctc_loss=0.1478, over 19482.00 frames. ], tot_loss[loss=0.2735, simple_loss=0.3158, pruned_loss=0.08396, ctc_loss=0.1584, over 3340491.50 frames. ], batch size: 54, lr: 2.12e-02, grad_scale: 32.0 +2024-08-25 12:40:59,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=81792.0, ans=0.025 +2024-08-25 12:41:01,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81845.33333333333, ans=0.1 +2024-08-25 12:41:04,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=81845.33333333333, ans=0.125 +2024-08-25 12:41:07,552 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.75 vs. limit=15.0 +2024-08-25 12:41:08,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=81845.33333333333, ans=0.0 +2024-08-25 12:41:11,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=81845.33333333333, ans=0.125 +2024-08-25 12:41:23,306 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.60 vs. limit=6.0 +2024-08-25 12:41:23,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=81898.66666666667, ans=0.2 +2024-08-25 12:41:28,565 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.73 vs. limit=10.0 +2024-08-25 12:41:50,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=82005.33333333333, ans=0.2 +2024-08-25 12:41:52,336 INFO [train.py:1114] (0/4) Epoch 7, batch 450, loss[loss=0.2491, simple_loss=0.3095, pruned_loss=0.06735, ctc_loss=0.1351, over 19601.00 frames. ], tot_loss[loss=0.2738, simple_loss=0.316, pruned_loss=0.08409, ctc_loss=0.1586, over 3447901.07 frames. ], batch size: 55, lr: 2.11e-02, grad_scale: 32.0 +2024-08-25 12:42:11,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=82112.0, ans=0.05 +2024-08-25 12:42:13,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82112.0, ans=0.1 +2024-08-25 12:42:26,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=82218.66666666667, ans=15.0 +2024-08-25 12:42:32,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=82218.66666666667, ans=0.2 +2024-08-25 12:42:33,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82218.66666666667, ans=0.1 +2024-08-25 12:42:38,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=82272.0, ans=10.0 +2024-08-25 12:42:45,062 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.611e+02 1.947e+02 2.448e+02 2.960e+02 4.262e+02, threshold=4.896e+02, percent-clipped=0.0 +2024-08-25 12:42:45,231 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:42:52,068 INFO [train.py:1114] (0/4) Epoch 7, batch 500, loss[loss=0.2594, simple_loss=0.316, pruned_loss=0.07345, ctc_loss=0.1397, over 19655.00 frames. ], tot_loss[loss=0.2721, simple_loss=0.3146, pruned_loss=0.08342, ctc_loss=0.1572, over 3543511.16 frames. ], batch size: 63, lr: 2.11e-02, grad_scale: 32.0 +2024-08-25 12:43:00,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=82325.33333333333, ans=10.0 +2024-08-25 12:43:07,126 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.31 vs. limit=15.0 +2024-08-25 12:43:09,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=82378.66666666667, ans=0.5 +2024-08-25 12:43:16,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=82432.0, ans=0.0 +2024-08-25 12:43:39,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=82538.66666666667, ans=0.125 +2024-08-25 12:43:51,852 INFO [train.py:1114] (0/4) Epoch 7, batch 550, loss[loss=0.282, simple_loss=0.3231, pruned_loss=0.08812, ctc_loss=0.1614, over 19167.00 frames. ], tot_loss[loss=0.2726, simple_loss=0.315, pruned_loss=0.08361, ctc_loss=0.1574, over 3604774.38 frames. ], batch size: 71, lr: 2.11e-02, grad_scale: 32.0 +2024-08-25 12:44:00,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=82592.0, ans=0.125 +2024-08-25 12:44:32,742 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.74 vs. limit=6.0 +2024-08-25 12:44:38,193 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.23 vs. limit=15.0 +2024-08-25 12:44:42,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=82805.33333333333, ans=0.07 +2024-08-25 12:44:44,967 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.503e+02 2.000e+02 2.364e+02 2.910e+02 5.356e+02, threshold=4.728e+02, percent-clipped=1.0 +2024-08-25 12:44:52,589 INFO [train.py:1114] (0/4) Epoch 7, batch 600, loss[loss=0.2949, simple_loss=0.338, pruned_loss=0.09179, ctc_loss=0.1705, over 19322.00 frames. ], tot_loss[loss=0.2718, simple_loss=0.315, pruned_loss=0.083, ctc_loss=0.1565, over 3663939.57 frames. ], batch size: 67, lr: 2.11e-02, grad_scale: 16.0 +2024-08-25 12:44:56,836 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.48 vs. limit=15.0 +2024-08-25 12:45:03,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=82912.0, ans=0.125 +2024-08-25 12:45:05,140 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:45:25,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=82965.33333333333, ans=0.125 +2024-08-25 12:45:33,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=83018.66666666667, ans=0.1 +2024-08-25 12:45:38,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=83072.0, ans=0.0 +2024-08-25 12:45:51,130 INFO [train.py:1114] (0/4) Epoch 7, batch 650, loss[loss=0.2963, simple_loss=0.3361, pruned_loss=0.09319, ctc_loss=0.1753, over 19746.00 frames. ], tot_loss[loss=0.2711, simple_loss=0.3141, pruned_loss=0.08281, ctc_loss=0.1559, over 3714209.88 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 16.0 +2024-08-25 12:45:51,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=83125.33333333333, ans=0.125 +2024-08-25 12:45:54,274 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.17 vs. limit=15.0 +2024-08-25 12:45:54,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=83125.33333333333, ans=0.0 +2024-08-25 12:46:47,169 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.487e+02 1.844e+02 2.004e+02 2.285e+02 4.065e+02, threshold=4.009e+02, percent-clipped=0.0 +2024-08-25 12:46:49,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=83338.66666666667, ans=0.1 +2024-08-25 12:46:52,907 INFO [train.py:1114] (0/4) Epoch 7, batch 700, loss[loss=0.2139, simple_loss=0.2774, pruned_loss=0.05413, ctc_loss=0.1054, over 19715.00 frames. ], tot_loss[loss=0.2716, simple_loss=0.3146, pruned_loss=0.08304, ctc_loss=0.1565, over 3746801.58 frames. ], batch size: 51, lr: 2.10e-02, grad_scale: 16.0 +2024-08-25 12:46:54,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=83392.0, ans=0.125 +2024-08-25 12:47:01,404 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.07 vs. limit=12.0 +2024-08-25 12:47:01,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=83392.0, ans=0.125 +2024-08-25 12:47:17,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=83498.66666666667, ans=0.2 +2024-08-25 12:47:21,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=83498.66666666667, ans=0.025 +2024-08-25 12:47:29,343 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:47:38,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=83605.33333333333, ans=0.0 +2024-08-25 12:47:49,556 INFO [train.py:1114] (0/4) Epoch 7, batch 750, loss[loss=0.2743, simple_loss=0.3193, pruned_loss=0.08299, ctc_loss=0.1583, over 19505.00 frames. ], tot_loss[loss=0.2701, simple_loss=0.3136, pruned_loss=0.0823, ctc_loss=0.1549, over 3773207.45 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 16.0 +2024-08-25 12:48:28,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=83818.66666666667, ans=0.0 +2024-08-25 12:48:45,005 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.565e+02 1.885e+02 2.166e+02 2.690e+02 4.534e+02, threshold=4.331e+02, percent-clipped=3.0 +2024-08-25 12:48:50,718 INFO [train.py:1114] (0/4) Epoch 7, batch 800, loss[loss=0.2407, simple_loss=0.2918, pruned_loss=0.06895, ctc_loss=0.1296, over 19801.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.3138, pruned_loss=0.0824, ctc_loss=0.1549, over 3794941.03 frames. ], batch size: 49, lr: 2.10e-02, grad_scale: 32.0 +2024-08-25 12:48:50,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=83925.33333333333, ans=0.2 +2024-08-25 12:48:53,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=83925.33333333333, ans=0.125 +2024-08-25 12:49:10,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=83978.66666666667, ans=0.125 +2024-08-25 12:49:18,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=84032.0, ans=0.2 +2024-08-25 12:49:19,788 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.87 vs. limit=22.5 +2024-08-25 12:49:21,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=84032.0, ans=0.125 +2024-08-25 12:49:23,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=84032.0, ans=0.125 +2024-08-25 12:49:26,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=84085.33333333333, ans=0.125 +2024-08-25 12:49:51,393 INFO [train.py:1114] (0/4) Epoch 7, batch 850, loss[loss=0.2648, simple_loss=0.3236, pruned_loss=0.07508, ctc_loss=0.1396, over 19655.00 frames. ], tot_loss[loss=0.2711, simple_loss=0.314, pruned_loss=0.0829, ctc_loss=0.1558, over 3814006.77 frames. ], batch size: 59, lr: 2.09e-02, grad_scale: 32.0 +2024-08-25 12:49:53,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84192.0, ans=0.1 +2024-08-25 12:50:12,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=84245.33333333333, ans=0.125 +2024-08-25 12:50:24,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=84298.66666666667, ans=0.025 +2024-08-25 12:50:30,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=84352.0, ans=0.035 +2024-08-25 12:50:38,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=84405.33333333333, ans=0.035 +2024-08-25 12:50:43,498 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.470e+02 1.946e+02 2.270e+02 2.825e+02 4.143e+02, threshold=4.540e+02, percent-clipped=0.0 +2024-08-25 12:50:49,148 INFO [train.py:1114] (0/4) Epoch 7, batch 900, loss[loss=0.2454, simple_loss=0.2869, pruned_loss=0.07502, ctc_loss=0.1345, over 19440.00 frames. ], tot_loss[loss=0.2719, simple_loss=0.3144, pruned_loss=0.0834, ctc_loss=0.1565, over 3817543.60 frames. ], batch size: 48, lr: 2.09e-02, grad_scale: 32.0 +2024-08-25 12:51:24,078 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.30 vs. limit=15.0 +2024-08-25 12:51:26,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=84565.33333333333, ans=0.125 +2024-08-25 12:51:57,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=84672.0, ans=0.2 +2024-08-25 12:51:58,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=84672.0, ans=0.07 +2024-08-25 12:52:05,338 INFO [train.py:1114] (0/4) Epoch 7, batch 950, loss[loss=0.258, simple_loss=0.3002, pruned_loss=0.07915, ctc_loss=0.1439, over 19510.00 frames. ], tot_loss[loss=0.2722, simple_loss=0.3146, pruned_loss=0.08351, ctc_loss=0.157, over 3819914.51 frames. ], batch size: 49, lr: 2.09e-02, grad_scale: 16.0 +2024-08-25 12:52:06,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=84725.33333333333, ans=10.0 +2024-08-25 12:52:24,933 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.38 vs. limit=22.5 +2024-08-25 12:52:34,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=84832.0, ans=0.125 +2024-08-25 12:52:58,083 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.05 vs. limit=15.0 +2024-08-25 12:52:59,184 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.627e+02 2.065e+02 2.373e+02 2.949e+02 1.128e+03, threshold=4.746e+02, percent-clipped=6.0 +2024-08-25 12:53:05,274 INFO [train.py:1114] (0/4) Epoch 7, batch 1000, loss[loss=0.2316, simple_loss=0.2876, pruned_loss=0.06288, ctc_loss=0.1245, over 19858.00 frames. ], tot_loss[loss=0.2737, simple_loss=0.3157, pruned_loss=0.0842, ctc_loss=0.1582, over 3816644.91 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 16.0 +2024-08-25 12:53:05,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=84992.0, ans=0.05 +2024-08-25 12:53:14,635 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.98 vs. limit=15.0 +2024-08-25 12:53:15,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84992.0, ans=0.1 +2024-08-25 12:53:22,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=85045.33333333333, ans=0.125 +2024-08-25 12:53:28,042 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.48 vs. limit=15.0 +2024-08-25 12:53:28,309 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.20 vs. limit=6.0 +2024-08-25 12:53:47,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=85152.0, ans=0.025 +2024-08-25 12:53:51,767 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.95 vs. limit=15.0 +2024-08-25 12:53:57,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=85205.33333333333, ans=0.0 +2024-08-25 12:54:01,124 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.07 vs. limit=15.0 +2024-08-25 12:54:05,085 INFO [train.py:1114] (0/4) Epoch 7, batch 1050, loss[loss=0.2693, simple_loss=0.3247, pruned_loss=0.07707, ctc_loss=0.1496, over 19864.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.3151, pruned_loss=0.08383, ctc_loss=0.1573, over 3823808.53 frames. ], batch size: 57, lr: 2.08e-02, grad_scale: 16.0 +2024-08-25 12:54:15,079 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.86 vs. limit=15.0 +2024-08-25 12:54:16,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=85312.0, ans=0.1 +2024-08-25 12:54:20,411 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/checkpoint-16000.pt +2024-08-25 12:54:22,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=85312.0, ans=0.125 +2024-08-25 12:54:25,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=85312.0, ans=0.5 +2024-08-25 12:54:37,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=85365.33333333333, ans=0.125 +2024-08-25 12:54:42,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=85418.66666666667, ans=0.125 +2024-08-25 12:54:50,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=85418.66666666667, ans=0.0 +2024-08-25 12:54:51,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=85472.0, ans=0.125 +2024-08-25 12:54:55,799 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.98 vs. limit=22.5 +2024-08-25 12:54:56,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=85472.0, ans=0.07 +2024-08-25 12:55:01,663 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.423e+02 1.918e+02 2.325e+02 2.776e+02 4.591e+02, threshold=4.650e+02, percent-clipped=1.0 +2024-08-25 12:55:03,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=85472.0, ans=0.125 +2024-08-25 12:55:06,550 INFO [train.py:1114] (0/4) Epoch 7, batch 1100, loss[loss=0.2567, simple_loss=0.3039, pruned_loss=0.07575, ctc_loss=0.1448, over 19601.00 frames. ], tot_loss[loss=0.2709, simple_loss=0.3139, pruned_loss=0.08283, ctc_loss=0.1558, over 3831458.50 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 16.0 +2024-08-25 12:55:06,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85525.33333333333, ans=0.1 +2024-08-25 12:55:22,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=85578.66666666667, ans=0.0 +2024-08-25 12:55:22,422 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.57 vs. limit=15.0 +2024-08-25 12:55:45,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=85685.33333333333, ans=0.0 +2024-08-25 12:56:01,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85738.66666666667, ans=0.1 +2024-08-25 12:56:01,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=85738.66666666667, ans=0.125 +2024-08-25 12:56:05,772 INFO [train.py:1114] (0/4) Epoch 7, batch 1150, loss[loss=0.2738, simple_loss=0.3086, pruned_loss=0.08745, ctc_loss=0.1604, over 19588.00 frames. ], tot_loss[loss=0.2715, simple_loss=0.3141, pruned_loss=0.08323, ctc_loss=0.1564, over 3830327.90 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 16.0 +2024-08-25 12:56:05,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=85792.0, ans=0.125 +2024-08-25 12:56:08,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=85792.0, ans=0.0 +2024-08-25 12:56:24,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=85845.33333333333, ans=0.125 +2024-08-25 12:56:27,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=85845.33333333333, ans=0.125 +2024-08-25 12:56:35,354 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.40 vs. limit=22.5 +2024-08-25 12:56:44,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=85952.0, ans=0.0 +2024-08-25 12:56:51,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=85952.0, ans=0.0 +2024-08-25 12:57:02,980 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.571e+02 1.959e+02 2.167e+02 2.666e+02 4.946e+02, threshold=4.335e+02, percent-clipped=2.0 +2024-08-25 12:57:07,697 INFO [train.py:1114] (0/4) Epoch 7, batch 1200, loss[loss=0.26, simple_loss=0.3188, pruned_loss=0.07313, ctc_loss=0.1372, over 19850.00 frames. ], tot_loss[loss=0.2731, simple_loss=0.3154, pruned_loss=0.08389, ctc_loss=0.1576, over 3826209.33 frames. ], batch size: 57, lr: 2.07e-02, grad_scale: 32.0 +2024-08-25 12:57:27,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=86112.0, ans=0.0 +2024-08-25 12:57:52,159 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.65 vs. limit=15.0 +2024-08-25 12:58:05,929 INFO [train.py:1114] (0/4) Epoch 7, batch 1250, loss[loss=0.2737, simple_loss=0.3181, pruned_loss=0.08357, ctc_loss=0.1555, over 19523.00 frames. ], tot_loss[loss=0.2721, simple_loss=0.3152, pruned_loss=0.08324, ctc_loss=0.1563, over 3844367.75 frames. ], batch size: 61, lr: 2.07e-02, grad_scale: 32.0 +2024-08-25 12:58:07,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=86325.33333333333, ans=0.1 +2024-08-25 12:58:23,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=86378.66666666667, ans=0.2 +2024-08-25 12:58:24,779 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.40 vs. limit=15.0 +2024-08-25 12:58:26,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.56 vs. limit=15.0 +2024-08-25 12:58:47,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=86485.33333333333, ans=0.125 +2024-08-25 12:58:50,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.72 vs. limit=10.0 +2024-08-25 12:58:57,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=86538.66666666667, ans=0.025 +2024-08-25 12:59:02,864 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.532e+02 1.964e+02 2.304e+02 2.729e+02 5.465e+02, threshold=4.608e+02, percent-clipped=2.0 +2024-08-25 12:59:07,509 INFO [train.py:1114] (0/4) Epoch 7, batch 1300, loss[loss=0.2903, simple_loss=0.3306, pruned_loss=0.09217, ctc_loss=0.1643, over 18932.00 frames. ], tot_loss[loss=0.2706, simple_loss=0.314, pruned_loss=0.08255, ctc_loss=0.1551, over 3847421.45 frames. ], batch size: 76, lr: 2.07e-02, grad_scale: 32.0 +2024-08-25 12:59:12,720 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.70 vs. limit=6.0 +2024-08-25 12:59:18,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=86645.33333333333, ans=0.025 +2024-08-25 12:59:34,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=86698.66666666667, ans=0.04949747468305833 +2024-08-25 12:59:47,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=86752.0, ans=0.125 +2024-08-25 12:59:50,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten.whitening_limit, batch_count=86752.0, ans=15.0 +2024-08-25 12:59:53,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=86805.33333333333, ans=0.125 +2024-08-25 13:00:07,956 INFO [train.py:1114] (0/4) Epoch 7, batch 1350, loss[loss=0.2608, simple_loss=0.3113, pruned_loss=0.07692, ctc_loss=0.1411, over 19771.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.3125, pruned_loss=0.08148, ctc_loss=0.1534, over 3858190.88 frames. ], batch size: 54, lr: 2.07e-02, grad_scale: 32.0 +2024-08-25 13:00:18,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=86912.0, ans=0.125 +2024-08-25 13:00:30,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=86965.33333333333, ans=0.125 +2024-08-25 13:00:46,455 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.86 vs. limit=15.0 +2024-08-25 13:00:47,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=87018.66666666667, ans=0.125 +2024-08-25 13:01:52,686 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 13:01:55,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=87072.0, ans=0.125 +2024-08-25 13:01:59,609 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.534e+02 1.935e+02 2.309e+02 3.009e+02 4.449e+02, threshold=4.618e+02, percent-clipped=0.0 +2024-08-25 13:02:02,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=87072.0, ans=0.025 +2024-08-25 13:02:04,190 INFO [train.py:1114] (0/4) Epoch 7, batch 1400, loss[loss=0.2105, simple_loss=0.2614, pruned_loss=0.05829, ctc_loss=0.1073, over 19698.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3113, pruned_loss=0.08075, ctc_loss=0.1518, over 3866044.91 frames. ], batch size: 46, lr: 2.06e-02, grad_scale: 32.0 +2024-08-25 13:02:08,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=87125.33333333333, ans=0.0 +2024-08-25 13:02:21,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=87178.66666666667, ans=0.0 +2024-08-25 13:02:28,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=87232.0, ans=0.125 +2024-08-25 13:02:28,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=87232.0, ans=0.125 +2024-08-25 13:02:46,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=87285.33333333333, ans=0.125 +2024-08-25 13:02:57,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=87338.66666666667, ans=0.0 +2024-08-25 13:03:03,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=87338.66666666667, ans=0.0 +2024-08-25 13:03:05,423 INFO [train.py:1114] (0/4) Epoch 7, batch 1450, loss[loss=0.2903, simple_loss=0.3306, pruned_loss=0.09105, ctc_loss=0.1698, over 19693.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.313, pruned_loss=0.08176, ctc_loss=0.1539, over 3862692.13 frames. ], batch size: 63, lr: 2.06e-02, grad_scale: 16.0 +2024-08-25 13:03:34,077 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.02 vs. limit=15.0 +2024-08-25 13:04:22,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=87552.0, ans=0.125 +2024-08-25 13:04:37,116 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.77 vs. limit=15.0 +2024-08-25 13:04:46,566 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.575e+02 2.015e+02 2.285e+02 2.716e+02 4.465e+02, threshold=4.569e+02, percent-clipped=0.0 +2024-08-25 13:04:50,192 INFO [train.py:1114] (0/4) Epoch 7, batch 1500, loss[loss=0.2774, simple_loss=0.3229, pruned_loss=0.08512, ctc_loss=0.1539, over 19570.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3131, pruned_loss=0.08158, ctc_loss=0.1537, over 3861815.85 frames. ], batch size: 57, lr: 2.06e-02, grad_scale: 16.0 +2024-08-25 13:04:52,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=87658.66666666667, ans=0.125 +2024-08-25 13:05:02,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=87712.0, ans=0.0 +2024-08-25 13:05:05,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=87712.0, ans=0.125 +2024-08-25 13:05:21,566 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.28 vs. limit=15.0 +2024-08-25 13:05:48,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=87872.0, ans=0.1 +2024-08-25 13:05:57,452 INFO [train.py:1114] (0/4) Epoch 7, batch 1550, loss[loss=0.2836, simple_loss=0.3285, pruned_loss=0.08524, ctc_loss=0.1706, over 19600.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.3137, pruned_loss=0.08241, ctc_loss=0.1551, over 3847484.92 frames. ], batch size: 60, lr: 2.06e-02, grad_scale: 16.0 +2024-08-25 13:06:07,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=87925.33333333333, ans=0.025 +2024-08-25 13:06:48,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=88138.66666666667, ans=0.2 +2024-08-25 13:06:55,900 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.554e+02 1.880e+02 2.225e+02 2.757e+02 4.141e+02, threshold=4.451e+02, percent-clipped=0.0 +2024-08-25 13:07:00,952 INFO [train.py:1114] (0/4) Epoch 7, batch 1600, loss[loss=0.2633, simple_loss=0.3169, pruned_loss=0.07723, ctc_loss=0.1381, over 19840.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.314, pruned_loss=0.08268, ctc_loss=0.1557, over 3836479.69 frames. ], batch size: 57, lr: 2.05e-02, grad_scale: 32.0 +2024-08-25 13:07:08,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=88192.0, ans=0.125 +2024-08-25 13:07:35,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=88352.0, ans=0.0 +2024-08-25 13:07:36,930 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 13:07:41,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=88352.0, ans=0.125 +2024-08-25 13:07:47,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=88405.33333333333, ans=0.125 +2024-08-25 13:07:58,851 INFO [train.py:1114] (0/4) Epoch 7, batch 1650, loss[loss=0.2839, simple_loss=0.3246, pruned_loss=0.08711, ctc_loss=0.1727, over 19659.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.3137, pruned_loss=0.0828, ctc_loss=0.1559, over 3832847.28 frames. ], batch size: 59, lr: 2.05e-02, grad_scale: 32.0 +2024-08-25 13:08:11,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=88512.0, ans=0.125 +2024-08-25 13:08:13,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=88512.0, ans=10.0 +2024-08-25 13:08:20,665 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.92 vs. limit=5.0 +2024-08-25 13:08:23,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=88565.33333333333, ans=0.125 +2024-08-25 13:08:29,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=88565.33333333333, ans=0.0 +2024-08-25 13:08:44,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=88618.66666666667, ans=15.0 +2024-08-25 13:08:54,965 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.476e+02 1.917e+02 2.131e+02 2.729e+02 4.248e+02, threshold=4.261e+02, percent-clipped=0.0 +2024-08-25 13:08:58,388 INFO [train.py:1114] (0/4) Epoch 7, batch 1700, loss[loss=0.2408, simple_loss=0.2808, pruned_loss=0.07218, ctc_loss=0.1414, over 19668.00 frames. ], tot_loss[loss=0.2692, simple_loss=0.3131, pruned_loss=0.08185, ctc_loss=0.1542, over 3847638.34 frames. ], batch size: 46, lr: 2.05e-02, grad_scale: 32.0 +2024-08-25 13:09:17,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff3.min_abs, batch_count=88778.66666666667, ans=0.2 +2024-08-25 13:09:21,044 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.67 vs. limit=15.0 +2024-08-25 13:09:28,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=88832.0, ans=0.2 +2024-08-25 13:09:37,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=88885.33333333333, ans=0.125 +2024-08-25 13:09:42,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=88885.33333333333, ans=0.2 +2024-08-25 13:09:54,518 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.95 vs. limit=15.0 +2024-08-25 13:09:55,078 INFO [train.py:1114] (0/4) Epoch 7, batch 1750, loss[loss=0.2146, simple_loss=0.2593, pruned_loss=0.06182, ctc_loss=0.1159, over 19664.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.3131, pruned_loss=0.08226, ctc_loss=0.1549, over 3851910.98 frames. ], batch size: 45, lr: 2.05e-02, grad_scale: 32.0 +2024-08-25 13:10:00,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=88992.0, ans=0.2 +2024-08-25 13:11:06,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=89045.33333333333, ans=0.0 +2024-08-25 13:16:07,771 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.27 vs. limit=22.5 +2024-08-25 13:16:10,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=89045.33333333333, ans=0.2 +2024-08-25 13:17:36,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.10 vs. limit=6.0 +2024-08-25 13:25:11,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=89205.33333333333, ans=0.125 +2024-08-25 13:29:44,214 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.589e+02 1.972e+02 2.344e+02 2.828e+02 4.449e+02, threshold=4.688e+02, percent-clipped=1.0 +2024-08-25 13:29:47,701 INFO [train.py:1114] (0/4) Epoch 7, batch 1800, loss[loss=0.2784, simple_loss=0.3218, pruned_loss=0.08497, ctc_loss=0.1629, over 19609.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3131, pruned_loss=0.08212, ctc_loss=0.1545, over 3853707.10 frames. ], batch size: 55, lr: 2.04e-02, grad_scale: 32.0 +2024-08-25 13:38:42,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=89418.66666666667, ans=0.125 +2024-08-25 13:40:18,458 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.27 vs. limit=10.0 +2024-08-25 13:40:34,852 INFO [train.py:1114] (0/4) Epoch 7, batch 1850, loss[loss=0.2608, simple_loss=0.3166, pruned_loss=0.07464, ctc_loss=0.1392, over 19566.00 frames. ], tot_loss[loss=0.2682, simple_loss=0.3122, pruned_loss=0.08148, ctc_loss=0.1532, over 3856339.26 frames. ], batch size: 57, lr: 2.04e-02, grad_scale: 32.0 +2024-08-25 13:41:16,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=89525.33333333333, ans=0.125 +2024-08-25 13:41:51,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=89578.66666666667, ans=0.125 +2024-08-25 13:41:51,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=89578.66666666667, ans=0.07 +2024-08-25 13:43:05,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=89632.0, ans=0.0 +2024-08-25 13:43:22,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=89685.33333333333, ans=0.1 +2024-08-25 13:43:46,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=89738.66666666667, ans=0.2 +2024-08-25 13:43:46,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=89738.66666666667, ans=0.0 +2024-08-25 13:44:01,303 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.521e+02 1.852e+02 2.070e+02 2.397e+02 4.608e+02, threshold=4.140e+02, percent-clipped=0.0 +2024-08-25 13:44:07,949 INFO [train.py:1114] (0/4) Epoch 7, batch 1900, loss[loss=0.2696, simple_loss=0.3181, pruned_loss=0.08036, ctc_loss=0.1509, over 19679.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.313, pruned_loss=0.08166, ctc_loss=0.1535, over 3861764.73 frames. ], batch size: 59, lr: 2.04e-02, grad_scale: 32.0 +2024-08-25 13:44:16,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=89792.0, ans=0.1 +2024-08-25 13:44:27,334 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.74 vs. limit=15.0 +2024-08-25 13:44:33,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=89845.33333333333, ans=0.0 +2024-08-25 13:44:33,455 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 13:45:00,267 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.95 vs. limit=15.0 +2024-08-25 13:45:36,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=90005.33333333333, ans=0.025 +2024-08-25 13:45:41,212 INFO [train.py:1114] (0/4) Epoch 7, batch 1950, loss[loss=0.2592, simple_loss=0.3131, pruned_loss=0.07461, ctc_loss=0.1399, over 19594.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3135, pruned_loss=0.08135, ctc_loss=0.153, over 3871132.26 frames. ], batch size: 52, lr: 2.04e-02, grad_scale: 16.0 +2024-08-25 13:45:41,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=90058.66666666667, ans=0.2 +2024-08-25 13:45:55,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=90112.0, ans=0.025 +2024-08-25 13:45:56,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=90112.0, ans=0.025 +2024-08-25 13:46:42,768 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.614e+02 1.896e+02 2.177e+02 2.703e+02 3.964e+02, threshold=4.354e+02, percent-clipped=0.0 +2024-08-25 13:46:45,048 INFO [train.py:1114] (0/4) Epoch 7, batch 2000, loss[loss=0.2469, simple_loss=0.2853, pruned_loss=0.07439, ctc_loss=0.1491, over 19665.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.314, pruned_loss=0.08184, ctc_loss=0.1539, over 3856701.82 frames. ], batch size: 45, lr: 2.03e-02, grad_scale: 32.0 +2024-08-25 13:46:46,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=90325.33333333333, ans=0.125 +2024-08-25 13:46:58,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=90378.66666666667, ans=0.0 +2024-08-25 13:46:59,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=90378.66666666667, ans=0.05 +2024-08-25 13:47:00,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=90378.66666666667, ans=0.125 +2024-08-25 13:47:04,088 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 13:47:11,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.06 vs. limit=15.0 +2024-08-25 13:47:12,610 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.74 vs. limit=22.5 +2024-08-25 13:47:39,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=90538.66666666667, ans=0.1 +2024-08-25 13:47:41,004 INFO [train.py:1114] (0/4) Epoch 7, batch 2050, loss[loss=0.2526, simple_loss=0.2923, pruned_loss=0.07794, ctc_loss=0.1427, over 19720.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3123, pruned_loss=0.08111, ctc_loss=0.1525, over 3852774.23 frames. ], batch size: 47, lr: 2.03e-02, grad_scale: 32.0 +2024-08-25 13:47:47,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=90592.0, ans=0.125 +2024-08-25 13:47:49,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=90592.0, ans=0.0 +2024-08-25 13:47:49,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=90592.0, ans=0.025 +2024-08-25 13:47:57,119 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.78 vs. limit=15.0 +2024-08-25 13:48:07,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.51 vs. limit=15.0 +2024-08-25 13:48:16,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=90752.0, ans=0.0 +2024-08-25 13:48:16,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=90752.0, ans=0.1 +2024-08-25 13:48:22,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=90752.0, ans=0.0 +2024-08-25 13:48:30,274 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.18 vs. limit=15.0 +2024-08-25 13:48:32,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=90805.33333333333, ans=0.2 +2024-08-25 13:48:36,344 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.580e+02 2.053e+02 2.413e+02 3.017e+02 5.203e+02, threshold=4.827e+02, percent-clipped=2.0 +2024-08-25 13:48:38,583 INFO [train.py:1114] (0/4) Epoch 7, batch 2100, loss[loss=0.2702, simple_loss=0.3172, pruned_loss=0.08129, ctc_loss=0.1513, over 19765.00 frames. ], tot_loss[loss=0.2664, simple_loss=0.3116, pruned_loss=0.08034, ctc_loss=0.1514, over 3859772.11 frames. ], batch size: 54, lr: 2.03e-02, grad_scale: 32.0 +2024-08-25 13:48:42,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=90858.66666666667, ans=0.0 +2024-08-25 13:48:43,603 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.98 vs. limit=22.5 +2024-08-25 13:48:58,914 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.68 vs. limit=15.0 +2024-08-25 13:48:59,029 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.10 vs. limit=12.0 +2024-08-25 13:48:59,306 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.21 vs. limit=12.0 +2024-08-25 13:49:12,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=90965.33333333333, ans=0.125 +2024-08-25 13:49:20,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=91018.66666666667, ans=0.125 +2024-08-25 13:49:23,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=91018.66666666667, ans=0.0 +2024-08-25 13:49:24,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=91018.66666666667, ans=0.2 +2024-08-25 13:49:29,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=91018.66666666667, ans=0.125 +2024-08-25 13:49:43,250 INFO [train.py:1114] (0/4) Epoch 7, batch 2150, loss[loss=0.2382, simple_loss=0.2926, pruned_loss=0.06673, ctc_loss=0.126, over 19586.00 frames. ], tot_loss[loss=0.2651, simple_loss=0.3106, pruned_loss=0.07981, ctc_loss=0.1502, over 3870090.82 frames. ], batch size: 52, lr: 2.03e-02, grad_scale: 32.0 +2024-08-25 13:49:47,972 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.96 vs. limit=6.0 +2024-08-25 13:49:58,269 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.48 vs. limit=15.0 +2024-08-25 13:50:04,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=91232.0, ans=0.125 +2024-08-25 13:50:09,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=91232.0, ans=0.0 +2024-08-25 13:50:13,305 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=14.74 vs. limit=15.0 +2024-08-25 13:50:14,247 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.91 vs. limit=12.0 +2024-08-25 13:50:14,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=91232.0, ans=0.125 +2024-08-25 13:50:17,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=91285.33333333333, ans=0.125 +2024-08-25 13:50:20,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=91285.33333333333, ans=0.2 +2024-08-25 13:50:33,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=91338.66666666667, ans=0.0 +2024-08-25 13:50:36,454 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.440e+02 1.920e+02 2.200e+02 2.924e+02 5.090e+02, threshold=4.400e+02, percent-clipped=1.0 +2024-08-25 13:50:39,149 INFO [train.py:1114] (0/4) Epoch 7, batch 2200, loss[loss=0.2755, simple_loss=0.3197, pruned_loss=0.08384, ctc_loss=0.1592, over 19566.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3104, pruned_loss=0.07953, ctc_loss=0.1498, over 3867872.96 frames. ], batch size: 57, lr: 2.02e-02, grad_scale: 32.0 +2024-08-25 13:51:01,106 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.40 vs. limit=6.0 +2024-08-25 13:51:01,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=91498.66666666667, ans=0.0 +2024-08-25 13:51:34,995 INFO [train.py:1114] (0/4) Epoch 7, batch 2250, loss[loss=0.2767, simple_loss=0.3224, pruned_loss=0.08377, ctc_loss=0.1587, over 19606.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.311, pruned_loss=0.08012, ctc_loss=0.151, over 3867908.29 frames. ], batch size: 55, lr: 2.02e-02, grad_scale: 16.0 +2024-08-25 13:51:43,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=91658.66666666667, ans=0.125 +2024-08-25 13:52:03,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=91765.33333333333, ans=0.0 +2024-08-25 13:52:26,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=91872.0, ans=0.125 +2024-08-25 13:52:28,412 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.566e+02 2.146e+02 2.677e+02 3.204e+02 4.930e+02, threshold=5.354e+02, percent-clipped=3.0 +2024-08-25 13:52:29,565 INFO [train.py:1114] (0/4) Epoch 7, batch 2300, loss[loss=0.2607, simple_loss=0.2975, pruned_loss=0.08062, ctc_loss=0.1565, over 19477.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.3105, pruned_loss=0.08033, ctc_loss=0.1514, over 3861624.74 frames. ], batch size: 49, lr: 2.02e-02, grad_scale: 16.0 +2024-08-25 13:52:31,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91925.33333333333, ans=0.1 +2024-08-25 13:52:39,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=91925.33333333333, ans=6.0 +2024-08-25 13:53:02,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=92085.33333333333, ans=0.1 +2024-08-25 13:53:03,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=92085.33333333333, ans=0.0 +2024-08-25 13:53:18,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=92138.66666666667, ans=0.125 +2024-08-25 13:53:23,181 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.70 vs. limit=15.0 +2024-08-25 13:53:25,154 INFO [train.py:1114] (0/4) Epoch 7, batch 2350, loss[loss=0.2519, simple_loss=0.3092, pruned_loss=0.07141, ctc_loss=0.1296, over 19667.00 frames. ], tot_loss[loss=0.2657, simple_loss=0.3104, pruned_loss=0.08039, ctc_loss=0.1508, over 3864539.95 frames. ], batch size: 63, lr: 2.02e-02, grad_scale: 16.0 +2024-08-25 13:53:32,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_na.min_abs, batch_count=92192.0, ans=0.02 +2024-08-25 13:54:02,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=92352.0, ans=0.2 +2024-08-25 13:54:12,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=92405.33333333333, ans=0.125 +2024-08-25 13:54:18,229 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.474e+02 1.985e+02 2.336e+02 2.802e+02 4.974e+02, threshold=4.671e+02, percent-clipped=0.0 +2024-08-25 13:54:18,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=92458.66666666667, ans=0.125 +2024-08-25 13:54:19,292 INFO [train.py:1114] (0/4) Epoch 7, batch 2400, loss[loss=0.2661, simple_loss=0.318, pruned_loss=0.07833, ctc_loss=0.1439, over 19393.00 frames. ], tot_loss[loss=0.2675, simple_loss=0.3123, pruned_loss=0.08096, ctc_loss=0.1517, over 3858854.77 frames. ], batch size: 67, lr: 2.01e-02, grad_scale: 32.0 +2024-08-25 13:54:32,288 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.89 vs. limit=15.0 +2024-08-25 13:54:45,316 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.43 vs. limit=22.5 +2024-08-25 13:55:46,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=92565.33333333333, ans=0.0 +2024-08-25 13:55:58,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=92618.66666666667, ans=0.0 +2024-08-25 13:56:12,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=92725.33333333333, ans=0.125 +2024-08-25 13:56:13,546 INFO [train.py:1114] (0/4) Epoch 7, batch 2450, loss[loss=0.3284, simple_loss=0.3446, pruned_loss=0.114, ctc_loss=0.2103, over 14230.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.3184, pruned_loss=0.08592, ctc_loss=0.1611, over 3733452.56 frames. ], batch size: 140, lr: 2.01e-02, grad_scale: 32.0 +2024-08-25 13:56:27,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.93 vs. limit=10.0 +2024-08-25 13:56:28,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=92778.66666666667, ans=10.0 +2024-08-25 13:56:29,519 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 13:56:55,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=92832.0, ans=0.125 +2024-08-25 13:56:57,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=92885.33333333333, ans=0.125 +2024-08-25 13:57:06,705 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-7.pt +2024-08-25 13:57:54,269 INFO [train.py:1114] (0/4) Epoch 8, batch 0, loss[loss=0.2463, simple_loss=0.2936, pruned_loss=0.07354, ctc_loss=0.1299, over 19412.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.2936, pruned_loss=0.07354, ctc_loss=0.1299, over 19412.00 frames. ], batch size: 48, lr: 1.89e-02, grad_scale: 32.0 +2024-08-25 13:57:54,271 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-25 13:59:56,300 INFO [train.py:1146] (0/4) Epoch 8, validation: loss=0.2171, simple_loss=0.2997, pruned_loss=0.04948, ctc_loss=0.08904, over 944034.00 frames. +2024-08-25 13:59:56,301 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 14058MB +2024-08-25 13:59:56,770 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.17 vs. limit=15.0 +2024-08-25 14:01:03,642 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.719e+02 2.158e+02 2.483e+02 2.902e+02 5.180e+02, threshold=4.965e+02, percent-clipped=2.0 +2024-08-25 14:01:11,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=92986.66666666667, ans=0.0 +2024-08-25 14:02:02,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=93093.33333333333, ans=0.0 +2024-08-25 14:02:12,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=93146.66666666667, ans=0.2 +2024-08-25 14:02:17,079 INFO [train.py:1114] (0/4) Epoch 8, batch 50, loss[loss=0.2746, simple_loss=0.3056, pruned_loss=0.08734, ctc_loss=0.1724, over 19734.00 frames. ], tot_loss[loss=0.2754, simple_loss=0.3179, pruned_loss=0.08434, ctc_loss=0.1606, over 844483.73 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 32.0 +2024-08-25 14:02:30,500 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.00 vs. limit=15.0 +2024-08-25 14:02:57,388 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.70 vs. limit=15.0 +2024-08-25 14:05:03,229 INFO [train.py:1114] (0/4) Epoch 8, batch 100, loss[loss=0.2297, simple_loss=0.2863, pruned_loss=0.0629, ctc_loss=0.1181, over 19716.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.315, pruned_loss=0.08195, ctc_loss=0.1552, over 1499106.47 frames. ], batch size: 51, lr: 1.89e-02, grad_scale: 32.0 +2024-08-25 14:05:09,977 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.53 vs. limit=15.0 +2024-08-25 14:05:14,929 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 1.910e+02 2.219e+02 2.660e+02 5.043e+02, threshold=4.439e+02, percent-clipped=1.0 +2024-08-25 14:05:22,265 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.27 vs. limit=15.0 +2024-08-25 14:05:24,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93520.0, ans=0.1 +2024-08-25 14:05:32,384 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 14:05:49,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=93626.66666666667, ans=0.125 +2024-08-25 14:07:16,380 INFO [train.py:1114] (0/4) Epoch 8, batch 150, loss[loss=0.2105, simple_loss=0.2682, pruned_loss=0.05567, ctc_loss=0.1033, over 19697.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3119, pruned_loss=0.08035, ctc_loss=0.1523, over 2026666.89 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 32.0 +2024-08-25 14:07:17,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=93733.33333333333, ans=0.125 +2024-08-25 14:07:26,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=93786.66666666667, ans=0.125 +2024-08-25 14:08:13,931 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.95 vs. limit=6.0 +2024-08-25 14:09:06,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=93840.0, ans=0.0 +2024-08-25 14:09:12,471 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=19.11 vs. limit=22.5 +2024-08-25 14:09:17,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.21 vs. limit=15.0 +2024-08-25 14:10:16,267 INFO [train.py:1114] (0/4) Epoch 8, batch 200, loss[loss=0.2821, simple_loss=0.327, pruned_loss=0.08547, ctc_loss=0.1659, over 18453.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3087, pruned_loss=0.07834, ctc_loss=0.1483, over 2434139.91 frames. ], batch size: 85, lr: 1.88e-02, grad_scale: 32.0 +2024-08-25 14:10:16,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=94000.0, ans=0.1 +2024-08-25 14:10:29,233 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.854e+02 2.093e+02 2.544e+02 5.078e+02, threshold=4.187e+02, percent-clipped=1.0 +2024-08-25 14:10:32,049 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.59 vs. limit=6.0 +2024-08-25 14:10:40,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=94106.66666666667, ans=0.0 +2024-08-25 14:10:45,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=94106.66666666667, ans=0.2 +2024-08-25 14:10:45,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=94106.66666666667, ans=0.2 +2024-08-25 14:10:49,779 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.78 vs. limit=22.5 +2024-08-25 14:11:16,475 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.76 vs. limit=10.0 +2024-08-25 14:11:17,845 INFO [train.py:1114] (0/4) Epoch 8, batch 250, loss[loss=0.2882, simple_loss=0.3273, pruned_loss=0.09055, ctc_loss=0.17, over 19396.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3086, pruned_loss=0.07842, ctc_loss=0.1482, over 2754429.15 frames. ], batch size: 67, lr: 1.88e-02, grad_scale: 32.0 +2024-08-25 14:11:28,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=94266.66666666667, ans=0.0 +2024-08-25 14:12:37,743 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.62 vs. limit=15.0 +2024-08-25 14:12:49,962 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.69 vs. limit=10.0 +2024-08-25 14:13:15,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=94480.0, ans=0.125 +2024-08-25 14:13:21,909 INFO [train.py:1114] (0/4) Epoch 8, batch 300, loss[loss=0.2922, simple_loss=0.3346, pruned_loss=0.09057, ctc_loss=0.1716, over 19547.00 frames. ], tot_loss[loss=0.2618, simple_loss=0.3083, pruned_loss=0.07814, ctc_loss=0.1476, over 2999392.35 frames. ], batch size: 61, lr: 1.88e-02, grad_scale: 32.0 +2024-08-25 14:13:33,355 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.508e+02 1.987e+02 2.340e+02 3.022e+02 6.047e+02, threshold=4.681e+02, percent-clipped=9.0 +2024-08-25 14:13:37,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=94586.66666666667, ans=0.125 +2024-08-25 14:13:40,957 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.17 vs. limit=15.0 +2024-08-25 14:13:44,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=94640.0, ans=0.125 +2024-08-25 14:14:20,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=94640.0, ans=0.125 +2024-08-25 14:14:35,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=94693.33333333333, ans=0.125 +2024-08-25 14:14:52,113 INFO [train.py:1114] (0/4) Epoch 8, batch 350, loss[loss=0.2479, simple_loss=0.2877, pruned_loss=0.07635, ctc_loss=0.1387, over 19784.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3084, pruned_loss=0.07787, ctc_loss=0.147, over 3189369.02 frames. ], batch size: 48, lr: 1.88e-02, grad_scale: 32.0 +2024-08-25 14:14:59,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=94800.0, ans=0.1 +2024-08-25 14:15:03,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=94853.33333333333, ans=0.125 +2024-08-25 14:15:52,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=94906.66666666667, ans=0.2 +2024-08-25 14:15:59,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=94906.66666666667, ans=0.125 +2024-08-25 14:16:35,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=94960.0, ans=0.0 +2024-08-25 14:16:35,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=94960.0, ans=15.0 +2024-08-25 14:16:39,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=95013.33333333333, ans=0.025 +2024-08-25 14:16:44,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=95013.33333333333, ans=0.125 +2024-08-25 14:16:50,836 INFO [train.py:1114] (0/4) Epoch 8, batch 400, loss[loss=0.2641, simple_loss=0.3158, pruned_loss=0.07684, ctc_loss=0.1469, over 19501.00 frames. ], tot_loss[loss=0.261, simple_loss=0.308, pruned_loss=0.07768, ctc_loss=0.1465, over 3341730.71 frames. ], batch size: 54, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:16:58,356 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.98 vs. limit=15.0 +2024-08-25 14:17:03,875 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.620e+02 2.019e+02 2.528e+02 3.132e+02 5.852e+02, threshold=5.056e+02, percent-clipped=7.0 +2024-08-25 14:17:11,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=95120.0, ans=0.07 +2024-08-25 14:17:35,627 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.49 vs. limit=15.0 +2024-08-25 14:18:38,391 INFO [train.py:1114] (0/4) Epoch 8, batch 450, loss[loss=0.2345, simple_loss=0.2966, pruned_loss=0.06307, ctc_loss=0.1155, over 19589.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3082, pruned_loss=0.07784, ctc_loss=0.1466, over 3449127.76 frames. ], batch size: 55, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:19:15,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=95493.33333333333, ans=0.1 +2024-08-25 14:19:22,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=95493.33333333333, ans=0.125 +2024-08-25 14:19:23,992 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.00 vs. limit=15.0 +2024-08-25 14:19:39,041 INFO [train.py:1114] (0/4) Epoch 8, batch 500, loss[loss=0.2956, simple_loss=0.3363, pruned_loss=0.09344, ctc_loss=0.1699, over 19623.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3078, pruned_loss=0.07801, ctc_loss=0.1469, over 3545194.71 frames. ], batch size: 63, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:21:37,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=95600.0, ans=0.125 +2024-08-25 14:21:42,075 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.507e+02 1.925e+02 2.242e+02 2.655e+02 4.786e+02, threshold=4.483e+02, percent-clipped=0.0 +2024-08-25 14:21:46,232 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.37 vs. limit=12.0 +2024-08-25 14:21:48,448 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.61 vs. limit=6.0 +2024-08-25 14:21:52,079 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.27 vs. limit=22.5 +2024-08-25 14:22:16,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=95760.0, ans=0.125 +2024-08-25 14:22:32,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=95813.33333333333, ans=0.025 +2024-08-25 14:22:36,090 INFO [train.py:1114] (0/4) Epoch 8, batch 550, loss[loss=0.2597, simple_loss=0.3127, pruned_loss=0.07395, ctc_loss=0.147, over 19305.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3072, pruned_loss=0.07761, ctc_loss=0.1462, over 3607776.32 frames. ], batch size: 71, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:22:39,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=95866.66666666667, ans=0.1 +2024-08-25 14:22:43,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=95866.66666666667, ans=0.125 +2024-08-25 14:25:33,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=96080.0, ans=0.125 +2024-08-25 14:25:43,160 INFO [train.py:1114] (0/4) Epoch 8, batch 600, loss[loss=0.2569, simple_loss=0.3155, pruned_loss=0.07205, ctc_loss=0.1354, over 19373.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3078, pruned_loss=0.07758, ctc_loss=0.1462, over 3665938.20 frames. ], batch size: 67, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:25:54,320 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.517e+02 1.975e+02 2.461e+02 2.998e+02 6.685e+02, threshold=4.922e+02, percent-clipped=2.0 +2024-08-25 14:26:00,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=96186.66666666667, ans=0.0 +2024-08-25 14:26:31,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=96293.33333333333, ans=0.0 +2024-08-25 14:27:47,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=96346.66666666667, ans=0.1 +2024-08-25 14:27:49,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=96346.66666666667, ans=0.0 +2024-08-25 14:29:19,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96346.66666666667, ans=0.1 +2024-08-25 14:29:21,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=96400.0, ans=0.5 +2024-08-25 14:29:23,566 INFO [train.py:1114] (0/4) Epoch 8, batch 650, loss[loss=0.2501, simple_loss=0.3006, pruned_loss=0.07265, ctc_loss=0.1359, over 19765.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3071, pruned_loss=0.07749, ctc_loss=0.1458, over 3715955.42 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 32.0 +2024-08-25 14:29:25,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=96400.0, ans=0.025 +2024-08-25 14:29:35,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=96453.33333333333, ans=0.125 +2024-08-25 14:29:45,577 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=2.595e+00 +2024-08-25 14:29:45,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=96453.33333333333, ans=0.125 +2024-08-25 14:29:52,669 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.06 vs. limit=12.0 +2024-08-25 14:30:53,907 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.56 vs. limit=15.0 +2024-08-25 14:31:05,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=96613.33333333333, ans=0.125 +2024-08-25 14:31:23,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=96666.66666666667, ans=0.125 +2024-08-25 14:31:24,401 INFO [train.py:1114] (0/4) Epoch 8, batch 700, loss[loss=0.2615, simple_loss=0.3072, pruned_loss=0.07793, ctc_loss=0.1499, over 19717.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.3074, pruned_loss=0.07763, ctc_loss=0.1461, over 3747733.37 frames. ], batch size: 51, lr: 1.86e-02, grad_scale: 32.0 +2024-08-25 14:31:36,080 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.482e+02 1.952e+02 2.228e+02 2.907e+02 4.140e+02, threshold=4.456e+02, percent-clipped=0.0 +2024-08-25 14:31:47,337 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.77 vs. limit=15.0 +2024-08-25 14:32:15,147 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.95 vs. limit=6.0 +2024-08-25 14:32:35,090 INFO [train.py:1114] (0/4) Epoch 8, batch 750, loss[loss=0.2483, simple_loss=0.307, pruned_loss=0.06917, ctc_loss=0.1282, over 19505.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3067, pruned_loss=0.07677, ctc_loss=0.1447, over 3773504.01 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 32.0 +2024-08-25 14:32:43,475 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.42 vs. limit=15.0 +2024-08-25 14:32:49,424 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.21 vs. limit=22.5 +2024-08-25 14:33:00,285 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.80 vs. limit=22.5 +2024-08-25 14:33:10,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=97040.0, ans=0.125 +2024-08-25 14:33:15,375 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 14:33:16,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=97040.0, ans=0.1 +2024-08-25 14:33:41,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=97146.66666666667, ans=0.1 +2024-08-25 14:33:45,414 INFO [train.py:1114] (0/4) Epoch 8, batch 800, loss[loss=0.206, simple_loss=0.2654, pruned_loss=0.05211, ctc_loss=0.1057, over 19402.00 frames. ], tot_loss[loss=0.2582, simple_loss=0.3061, pruned_loss=0.07637, ctc_loss=0.1439, over 3795054.21 frames. ], batch size: 48, lr: 1.86e-02, grad_scale: 32.0 +2024-08-25 14:34:35,081 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.511e+02 1.855e+02 2.176e+02 2.933e+02 4.905e+02, threshold=4.353e+02, percent-clipped=3.0 +2024-08-25 14:34:40,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=97253.33333333333, ans=0.125 +2024-08-25 14:35:14,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=97413.33333333333, ans=10.0 +2024-08-25 14:35:22,274 INFO [train.py:1114] (0/4) Epoch 8, batch 850, loss[loss=0.3085, simple_loss=0.3432, pruned_loss=0.09822, ctc_loss=0.1932, over 19658.00 frames. ], tot_loss[loss=0.2589, simple_loss=0.3062, pruned_loss=0.07683, ctc_loss=0.1446, over 3814413.34 frames. ], batch size: 59, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:35:30,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=97466.66666666667, ans=0.0 +2024-08-25 14:35:32,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=97520.0, ans=0.0 +2024-08-25 14:35:39,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=97520.0, ans=0.125 +2024-08-25 14:35:39,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=97520.0, ans=0.125 +2024-08-25 14:35:39,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=97520.0, ans=0.1 +2024-08-25 14:35:40,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=97520.0, ans=0.125 +2024-08-25 14:36:02,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=97626.66666666667, ans=0.0 +2024-08-25 14:36:16,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=97680.0, ans=0.125 +2024-08-25 14:36:19,790 INFO [train.py:1114] (0/4) Epoch 8, batch 900, loss[loss=0.2492, simple_loss=0.292, pruned_loss=0.07527, ctc_loss=0.1395, over 19397.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.307, pruned_loss=0.07731, ctc_loss=0.1453, over 3817837.83 frames. ], batch size: 48, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:38:25,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=97733.33333333333, ans=0.125 +2024-08-25 14:38:27,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=97733.33333333333, ans=0.5 +2024-08-25 14:38:30,487 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.590e+02 1.935e+02 2.327e+02 2.780e+02 5.034e+02, threshold=4.654e+02, percent-clipped=2.0 +2024-08-25 14:38:34,364 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.49 vs. limit=12.0 +2024-08-25 14:39:07,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=97893.33333333333, ans=0.025 +2024-08-25 14:39:56,385 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.46 vs. limit=15.0 +2024-08-25 14:39:58,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=97946.66666666667, ans=0.1 +2024-08-25 14:40:01,367 INFO [train.py:1114] (0/4) Epoch 8, batch 950, loss[loss=0.2638, simple_loss=0.309, pruned_loss=0.08057, ctc_loss=0.1439, over 19485.00 frames. ], tot_loss[loss=0.2604, simple_loss=0.3074, pruned_loss=0.07755, ctc_loss=0.1458, over 3818995.90 frames. ], batch size: 49, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:40:20,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=98053.33333333333, ans=0.125 +2024-08-25 14:41:23,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=98106.66666666667, ans=0.125 +2024-08-25 14:43:24,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=98213.33333333333, ans=0.125 +2024-08-25 14:43:29,291 INFO [train.py:1114] (0/4) Epoch 8, batch 1000, loss[loss=0.2315, simple_loss=0.2851, pruned_loss=0.06381, ctc_loss=0.1257, over 19854.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.308, pruned_loss=0.07794, ctc_loss=0.1464, over 3815625.27 frames. ], batch size: 52, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:43:29,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=98266.66666666667, ans=0.1 +2024-08-25 14:43:47,364 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.458e+02 2.014e+02 2.465e+02 3.304e+02 4.205e+02, threshold=4.930e+02, percent-clipped=0.0 +2024-08-25 14:46:06,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=98320.0, ans=0.125 +2024-08-25 14:46:26,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=98426.66666666667, ans=0.125 +2024-08-25 14:46:39,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=98480.0, ans=0.05 +2024-08-25 14:46:44,412 INFO [train.py:1114] (0/4) Epoch 8, batch 1050, loss[loss=0.2549, simple_loss=0.3075, pruned_loss=0.07376, ctc_loss=0.1368, over 19837.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3067, pruned_loss=0.07734, ctc_loss=0.1453, over 3820525.10 frames. ], batch size: 57, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:46:56,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=98586.66666666667, ans=0.025 +2024-08-25 14:47:26,305 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.75 vs. limit=15.0 +2024-08-25 14:47:30,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=98746.66666666667, ans=0.125 +2024-08-25 14:47:44,583 INFO [train.py:1114] (0/4) Epoch 8, batch 1100, loss[loss=0.2567, simple_loss=0.3089, pruned_loss=0.07486, ctc_loss=0.1371, over 19597.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3065, pruned_loss=0.07697, ctc_loss=0.145, over 3829314.28 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 32.0 +2024-08-25 14:47:45,117 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.37 vs. limit=15.0 +2024-08-25 14:48:13,773 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.527e+02 1.814e+02 2.071e+02 2.620e+02 3.682e+02, threshold=4.142e+02, percent-clipped=0.0 +2024-08-25 14:48:32,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=98853.33333333333, ans=0.04949747468305833 +2024-08-25 14:49:04,252 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.61 vs. limit=15.0 +2024-08-25 14:49:55,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=99013.33333333333, ans=0.025 +2024-08-25 14:50:00,880 INFO [train.py:1114] (0/4) Epoch 8, batch 1150, loss[loss=0.2687, simple_loss=0.3098, pruned_loss=0.08282, ctc_loss=0.1549, over 19589.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.3066, pruned_loss=0.07715, ctc_loss=0.1454, over 3827681.85 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 32.0 +2024-08-25 14:51:01,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=99066.66666666667, ans=0.125 +2024-08-25 14:51:03,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=99066.66666666667, ans=0.125 +2024-08-25 14:52:00,222 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.57 vs. limit=15.0 +2024-08-25 14:52:03,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=99226.66666666667, ans=15.0 +2024-08-25 14:52:51,800 INFO [train.py:1114] (0/4) Epoch 8, batch 1200, loss[loss=0.2551, simple_loss=0.3099, pruned_loss=0.07424, ctc_loss=0.1296, over 19848.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.3073, pruned_loss=0.07723, ctc_loss=0.1454, over 3824393.63 frames. ], batch size: 57, lr: 1.84e-02, grad_scale: 32.0 +2024-08-25 14:52:55,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=99333.33333333333, ans=0.2 +2024-08-25 14:53:05,860 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.44 vs. limit=15.0 +2024-08-25 14:53:06,263 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.879e+02 2.149e+02 2.634e+02 4.011e+02, threshold=4.298e+02, percent-clipped=0.0 +2024-08-25 14:53:51,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=99600.0, ans=0.025 +2024-08-25 14:53:52,347 INFO [train.py:1114] (0/4) Epoch 8, batch 1250, loss[loss=0.2977, simple_loss=0.3389, pruned_loss=0.09371, ctc_loss=0.173, over 19533.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.308, pruned_loss=0.07739, ctc_loss=0.1454, over 3842788.18 frames. ], batch size: 61, lr: 1.84e-02, grad_scale: 32.0 +2024-08-25 14:55:32,516 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.25 vs. limit=15.0 +2024-08-25 14:56:01,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=99813.33333333333, ans=0.025 +2024-08-25 14:56:05,572 INFO [train.py:1114] (0/4) Epoch 8, batch 1300, loss[loss=0.3116, simple_loss=0.3446, pruned_loss=0.1007, ctc_loss=0.1929, over 18859.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.3072, pruned_loss=0.07694, ctc_loss=0.1446, over 3845901.22 frames. ], batch size: 76, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 14:56:12,112 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.29 vs. limit=12.0 +2024-08-25 14:56:15,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=99866.66666666667, ans=0.025 +2024-08-25 14:56:17,017 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.458e+02 1.809e+02 2.147e+02 2.747e+02 4.726e+02, threshold=4.293e+02, percent-clipped=4.0 +2024-08-25 14:56:22,341 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.48 vs. limit=15.0 +2024-08-25 14:58:02,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=100026.66666666667, ans=0.07 +2024-08-25 14:58:50,536 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 14:58:53,770 INFO [train.py:1114] (0/4) Epoch 8, batch 1350, loss[loss=0.2561, simple_loss=0.3016, pruned_loss=0.07603, ctc_loss=0.1464, over 19769.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.306, pruned_loss=0.07598, ctc_loss=0.1427, over 3857801.95 frames. ], batch size: 54, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 14:58:58,235 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.42 vs. limit=15.0 +2024-08-25 14:59:06,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=100186.66666666667, ans=0.125 +2024-08-25 14:59:13,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=100186.66666666667, ans=0.09899494936611666 +2024-08-25 14:59:22,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=100240.0, ans=0.2 +2024-08-25 14:59:30,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=100293.33333333333, ans=0.125 +2024-08-25 14:59:51,910 INFO [train.py:1114] (0/4) Epoch 8, batch 1400, loss[loss=0.2286, simple_loss=0.2755, pruned_loss=0.06546, ctc_loss=0.1269, over 19669.00 frames. ], tot_loss[loss=0.2578, simple_loss=0.3059, pruned_loss=0.07625, ctc_loss=0.143, over 3864552.82 frames. ], batch size: 46, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 15:00:03,304 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.557e+02 2.018e+02 2.600e+02 3.300e+02 7.375e+02, threshold=5.199e+02, percent-clipped=11.0 +2024-08-25 15:00:03,906 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.25 vs. limit=10.0 +2024-08-25 15:00:06,923 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 15:00:15,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100506.66666666667, ans=0.1 +2024-08-25 15:00:31,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=100560.0, ans=0.2 +2024-08-25 15:00:38,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=100613.33333333333, ans=0.0 +2024-08-25 15:00:56,744 INFO [train.py:1114] (0/4) Epoch 8, batch 1450, loss[loss=0.2791, simple_loss=0.3262, pruned_loss=0.08453, ctc_loss=0.1574, over 19703.00 frames. ], tot_loss[loss=0.2581, simple_loss=0.3064, pruned_loss=0.07627, ctc_loss=0.1431, over 3862989.12 frames. ], batch size: 63, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 15:01:03,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=100666.66666666667, ans=0.0 +2024-08-25 15:01:08,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=100720.0, ans=0.125 +2024-08-25 15:01:20,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=100773.33333333333, ans=0.0 +2024-08-25 15:03:17,705 INFO [train.py:1114] (0/4) Epoch 8, batch 1500, loss[loss=0.249, simple_loss=0.3066, pruned_loss=0.06902, ctc_loss=0.1332, over 19578.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.3073, pruned_loss=0.07678, ctc_loss=0.1445, over 3862350.18 frames. ], batch size: 57, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 15:05:15,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=100933.33333333333, ans=0.125 +2024-08-25 15:05:24,436 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.596e+02 1.972e+02 2.271e+02 2.845e+02 5.404e+02, threshold=4.542e+02, percent-clipped=1.0 +2024-08-25 15:05:26,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=100986.66666666667, ans=0.125 +2024-08-25 15:05:32,519 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.05 vs. limit=22.5 +2024-08-25 15:07:43,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=100986.66666666667, ans=0.125 +2024-08-25 15:09:55,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=101146.66666666667, ans=0.0 +2024-08-25 15:10:02,863 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.77 vs. limit=15.0 +2024-08-25 15:10:18,890 INFO [train.py:1114] (0/4) Epoch 8, batch 1550, loss[loss=0.2504, simple_loss=0.3083, pruned_loss=0.06967, ctc_loss=0.1327, over 19607.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3069, pruned_loss=0.07672, ctc_loss=0.1444, over 3846891.21 frames. ], batch size: 60, lr: 1.82e-02, grad_scale: 32.0 +2024-08-25 15:10:45,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=101253.33333333333, ans=0.025 +2024-08-25 15:13:33,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=101360.0, ans=0.0 +2024-08-25 15:13:57,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.35 vs. limit=15.0 +2024-08-25 15:14:11,863 INFO [train.py:1114] (0/4) Epoch 8, batch 1600, loss[loss=0.2537, simple_loss=0.3075, pruned_loss=0.07241, ctc_loss=0.1379, over 19829.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3064, pruned_loss=0.07649, ctc_loss=0.1438, over 3836792.40 frames. ], batch size: 57, lr: 1.82e-02, grad_scale: 32.0 +2024-08-25 15:14:12,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=101466.66666666667, ans=0.125 +2024-08-25 15:14:12,703 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.01 vs. limit=22.5 +2024-08-25 15:14:21,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=101466.66666666667, ans=0.125 +2024-08-25 15:14:31,967 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.575e+02 1.915e+02 2.222e+02 2.696e+02 4.640e+02, threshold=4.444e+02, percent-clipped=1.0 +2024-08-25 15:14:42,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=101520.0, ans=0.125 +2024-08-25 15:14:53,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=101573.33333333333, ans=0.0 +2024-08-25 15:15:06,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=101626.66666666667, ans=0.2 +2024-08-25 15:15:30,465 INFO [train.py:1114] (0/4) Epoch 8, batch 1650, loss[loss=0.2362, simple_loss=0.3003, pruned_loss=0.06183, ctc_loss=0.1212, over 19644.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3066, pruned_loss=0.07681, ctc_loss=0.1443, over 3834185.61 frames. ], batch size: 59, lr: 1.82e-02, grad_scale: 32.0 +2024-08-25 15:15:30,870 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.86 vs. limit=15.0 +2024-08-25 15:15:58,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101840.0, ans=0.1 +2024-08-25 15:16:27,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=102000.0, ans=0.0 +2024-08-25 15:16:28,208 INFO [train.py:1114] (0/4) Epoch 8, batch 1700, loss[loss=0.2282, simple_loss=0.2758, pruned_loss=0.06457, ctc_loss=0.1287, over 19678.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3058, pruned_loss=0.07607, ctc_loss=0.143, over 3848314.27 frames. ], batch size: 46, lr: 1.82e-02, grad_scale: 16.0 +2024-08-25 15:16:40,738 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.585e+02 1.920e+02 2.237e+02 2.711e+02 4.644e+02, threshold=4.474e+02, percent-clipped=2.0 +2024-08-25 15:16:53,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.43 vs. limit=6.0 +2024-08-25 15:17:20,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=102160.0, ans=0.125 +2024-08-25 15:17:28,113 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.08 vs. limit=15.0 +2024-08-25 15:17:39,506 INFO [train.py:1114] (0/4) Epoch 8, batch 1750, loss[loss=0.2635, simple_loss=0.2951, pruned_loss=0.08475, ctc_loss=0.156, over 19684.00 frames. ], tot_loss[loss=0.2572, simple_loss=0.3053, pruned_loss=0.07601, ctc_loss=0.1426, over 3851911.08 frames. ], batch size: 45, lr: 1.82e-02, grad_scale: 16.0 +2024-08-25 15:17:50,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=102320.0, ans=0.0 +2024-08-25 15:19:54,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=102373.33333333333, ans=0.2 +2024-08-25 15:20:05,591 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.96 vs. limit=15.0 +2024-08-25 15:20:09,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=102426.66666666667, ans=0.125 +2024-08-25 15:20:10,867 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.73 vs. limit=15.0 +2024-08-25 15:20:14,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=102480.0, ans=0.125 +2024-08-25 15:20:17,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=102480.0, ans=0.05 +2024-08-25 15:20:25,849 INFO [train.py:1114] (0/4) Epoch 8, batch 1800, loss[loss=0.2588, simple_loss=0.3152, pruned_loss=0.07384, ctc_loss=0.1371, over 19612.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3055, pruned_loss=0.07603, ctc_loss=0.1424, over 3854379.38 frames. ], batch size: 55, lr: 1.81e-02, grad_scale: 16.0 +2024-08-25 15:20:37,815 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.544e+02 1.874e+02 2.230e+02 2.859e+02 4.439e+02, threshold=4.460e+02, percent-clipped=0.0 +2024-08-25 15:24:49,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=102693.33333333333, ans=0.125 +2024-08-25 15:26:47,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=102693.33333333333, ans=0.0 +2024-08-25 15:28:59,115 INFO [train.py:1114] (0/4) Epoch 8, batch 1850, loss[loss=0.2922, simple_loss=0.3338, pruned_loss=0.09085, ctc_loss=0.1724, over 19594.00 frames. ], tot_loss[loss=0.2571, simple_loss=0.3054, pruned_loss=0.07585, ctc_loss=0.1425, over 3857135.20 frames. ], batch size: 57, lr: 1.81e-02, grad_scale: 16.0 +2024-08-25 15:28:59,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=102800.0, ans=0.125 +2024-08-25 15:29:02,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102800.0, ans=0.1 +2024-08-25 15:29:14,411 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.38 vs. limit=10.0 +2024-08-25 15:29:18,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=102853.33333333333, ans=10.0 +2024-08-25 15:29:26,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=102906.66666666667, ans=0.09899494936611666 +2024-08-25 15:29:44,453 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.70 vs. limit=22.5 +2024-08-25 15:32:37,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=103066.66666666667, ans=0.0 +2024-08-25 15:32:38,670 INFO [train.py:1114] (0/4) Epoch 8, batch 1900, loss[loss=0.2515, simple_loss=0.3195, pruned_loss=0.06733, ctc_loss=0.122, over 19649.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3058, pruned_loss=0.07591, ctc_loss=0.1427, over 3861782.54 frames. ], batch size: 59, lr: 1.81e-02, grad_scale: 16.0 +2024-08-25 15:32:38,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=103066.66666666667, ans=0.125 +2024-08-25 15:32:44,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=103066.66666666667, ans=0.125 +2024-08-25 15:32:45,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=103066.66666666667, ans=0.125 +2024-08-25 15:32:52,958 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.596e+02 1.872e+02 2.139e+02 2.618e+02 5.849e+02, threshold=4.279e+02, percent-clipped=4.0 +2024-08-25 15:32:54,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=103120.0, ans=0.035 +2024-08-25 15:33:18,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=103226.66666666667, ans=0.125 +2024-08-25 15:33:37,635 INFO [train.py:1114] (0/4) Epoch 8, batch 1950, loss[loss=0.2433, simple_loss=0.2979, pruned_loss=0.0673, ctc_loss=0.1352, over 19589.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.3072, pruned_loss=0.07613, ctc_loss=0.1435, over 3870478.34 frames. ], batch size: 52, lr: 1.81e-02, grad_scale: 16.0 +2024-08-25 15:33:50,191 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.80 vs. limit=12.0 +2024-08-25 15:33:51,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=103333.33333333333, ans=0.125 +2024-08-25 15:34:08,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=103440.0, ans=0.0 +2024-08-25 15:34:23,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=103493.33333333333, ans=0.125 +2024-08-25 15:34:23,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=103493.33333333333, ans=0.125 +2024-08-25 15:34:26,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=103493.33333333333, ans=0.125 +2024-08-25 15:34:34,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=103546.66666666667, ans=0.125 +2024-08-25 15:34:42,878 INFO [train.py:1114] (0/4) Epoch 8, batch 2000, loss[loss=0.213, simple_loss=0.2638, pruned_loss=0.05975, ctc_loss=0.107, over 19643.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3082, pruned_loss=0.07708, ctc_loss=0.1449, over 3855829.59 frames. ], batch size: 45, lr: 1.81e-02, grad_scale: 32.0 +2024-08-25 15:34:52,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=103600.0, ans=0.025 +2024-08-25 15:34:55,662 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.405e+02 1.835e+02 2.022e+02 2.450e+02 4.734e+02, threshold=4.043e+02, percent-clipped=1.0 +2024-08-25 15:35:03,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=103653.33333333333, ans=0.125 +2024-08-25 15:35:05,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=103706.66666666667, ans=0.05 +2024-08-25 15:35:38,656 INFO [train.py:1114] (0/4) Epoch 8, batch 2050, loss[loss=0.2438, simple_loss=0.2878, pruned_loss=0.07309, ctc_loss=0.1339, over 19688.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3073, pruned_loss=0.07704, ctc_loss=0.1446, over 3851400.97 frames. ], batch size: 47, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:35:43,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=103866.66666666667, ans=0.0 +2024-08-25 15:35:46,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=103866.66666666667, ans=0.125 +2024-08-25 15:35:47,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=103866.66666666667, ans=0.025 +2024-08-25 15:36:16,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=104026.66666666667, ans=0.125 +2024-08-25 15:36:24,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=104080.0, ans=0.125 +2024-08-25 15:36:32,754 INFO [train.py:1114] (0/4) Epoch 8, batch 2100, loss[loss=0.2474, simple_loss=0.3009, pruned_loss=0.07016, ctc_loss=0.1339, over 19772.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3058, pruned_loss=0.07594, ctc_loss=0.1429, over 3858466.81 frames. ], batch size: 54, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:36:41,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=104133.33333333333, ans=0.0 +2024-08-25 15:36:44,886 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.433e+02 2.055e+02 2.348e+02 2.987e+02 4.948e+02, threshold=4.695e+02, percent-clipped=5.0 +2024-08-25 15:36:53,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104240.0, ans=0.1 +2024-08-25 15:36:59,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=104240.0, ans=0.0 +2024-08-25 15:37:25,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.41 vs. limit=22.5 +2024-08-25 15:37:27,179 INFO [train.py:1114] (0/4) Epoch 8, batch 2150, loss[loss=0.2381, simple_loss=0.2961, pruned_loss=0.0657, ctc_loss=0.1217, over 19585.00 frames. ], tot_loss[loss=0.2561, simple_loss=0.3048, pruned_loss=0.07538, ctc_loss=0.1417, over 3869125.10 frames. ], batch size: 52, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:37:42,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=104453.33333333333, ans=0.0 +2024-08-25 15:37:48,316 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.36 vs. limit=15.0 +2024-08-25 15:37:58,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=104506.66666666667, ans=0.04949747468305833 +2024-08-25 15:38:23,253 INFO [train.py:1114] (0/4) Epoch 8, batch 2200, loss[loss=0.2823, simple_loss=0.3251, pruned_loss=0.08663, ctc_loss=0.1659, over 19594.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.3053, pruned_loss=0.07581, ctc_loss=0.1424, over 3867395.04 frames. ], batch size: 57, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:38:32,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.59 vs. limit=22.5 +2024-08-25 15:38:35,673 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.489e+02 1.961e+02 2.280e+02 3.038e+02 5.675e+02, threshold=4.560e+02, percent-clipped=2.0 +2024-08-25 15:38:45,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104773.33333333333, ans=0.1 +2024-08-25 15:38:57,995 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 15:39:02,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=104826.66666666667, ans=0.125 +2024-08-25 15:39:05,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=104826.66666666667, ans=0.125 +2024-08-25 15:39:18,578 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.58 vs. limit=5.0 +2024-08-25 15:39:19,044 INFO [train.py:1114] (0/4) Epoch 8, batch 2250, loss[loss=0.24, simple_loss=0.304, pruned_loss=0.06424, ctc_loss=0.1191, over 19628.00 frames. ], tot_loss[loss=0.2566, simple_loss=0.3052, pruned_loss=0.07565, ctc_loss=0.1419, over 3867405.77 frames. ], batch size: 55, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:39:33,137 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.86 vs. limit=22.5 +2024-08-25 15:39:44,817 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.97 vs. limit=22.5 +2024-08-25 15:39:45,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=105040.0, ans=0.125 +2024-08-25 15:39:52,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=105093.33333333333, ans=0.07 +2024-08-25 15:39:52,737 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.69 vs. limit=15.0 +2024-08-25 15:40:01,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=105093.33333333333, ans=0.125 +2024-08-25 15:40:13,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=105200.0, ans=0.2 +2024-08-25 15:40:14,491 INFO [train.py:1114] (0/4) Epoch 8, batch 2300, loss[loss=0.2142, simple_loss=0.2725, pruned_loss=0.05682, ctc_loss=0.1058, over 19513.00 frames. ], tot_loss[loss=0.2557, simple_loss=0.3039, pruned_loss=0.07544, ctc_loss=0.1417, over 3860937.02 frames. ], batch size: 49, lr: 1.79e-02, grad_scale: 32.0 +2024-08-25 15:40:28,024 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.388e+02 1.907e+02 2.167e+02 2.593e+02 4.976e+02, threshold=4.335e+02, percent-clipped=1.0 +2024-08-25 15:40:40,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=105306.66666666667, ans=0.125 +2024-08-25 15:41:11,101 INFO [train.py:1114] (0/4) Epoch 8, batch 2350, loss[loss=0.2924, simple_loss=0.3378, pruned_loss=0.08855, ctc_loss=0.1745, over 19669.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3038, pruned_loss=0.07526, ctc_loss=0.1413, over 3863603.50 frames. ], batch size: 63, lr: 1.79e-02, grad_scale: 32.0 +2024-08-25 15:41:22,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=105520.0, ans=0.125 +2024-08-25 15:41:40,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=105573.33333333333, ans=0.09899494936611666 +2024-08-25 15:41:40,995 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.75 vs. limit=15.0 +2024-08-25 15:41:53,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=105680.0, ans=0.125 +2024-08-25 15:42:05,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=105733.33333333333, ans=0.125 +2024-08-25 15:42:06,007 INFO [train.py:1114] (0/4) Epoch 8, batch 2400, loss[loss=0.2521, simple_loss=0.3134, pruned_loss=0.06942, ctc_loss=0.1297, over 19477.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.306, pruned_loss=0.07608, ctc_loss=0.1427, over 3858382.89 frames. ], batch size: 67, lr: 1.79e-02, grad_scale: 32.0 +2024-08-25 15:42:07,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=105733.33333333333, ans=0.0 +2024-08-25 15:42:14,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=105733.33333333333, ans=0.125 +2024-08-25 15:42:18,060 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.476e+02 1.983e+02 2.255e+02 2.870e+02 5.067e+02, threshold=4.510e+02, percent-clipped=2.0 +2024-08-25 15:42:20,828 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.85 vs. limit=22.5 +2024-08-25 15:42:36,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=105840.0, ans=0.0 +2024-08-25 15:42:40,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=105893.33333333333, ans=0.125 +2024-08-25 15:43:01,715 INFO [train.py:1114] (0/4) Epoch 8, batch 2450, loss[loss=0.3471, simple_loss=0.354, pruned_loss=0.1234, ctc_loss=0.2331, over 13958.00 frames. ], tot_loss[loss=0.267, simple_loss=0.3116, pruned_loss=0.0808, ctc_loss=0.1518, over 3736317.57 frames. ], batch size: 140, lr: 1.79e-02, grad_scale: 32.0 +2024-08-25 15:43:07,667 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.20 vs. limit=6.0 +2024-08-25 15:43:10,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=106000.0, ans=0.025 +2024-08-25 15:43:31,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=106106.66666666667, ans=0.05 +2024-08-25 15:43:43,377 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-8.pt +2024-08-25 15:44:31,297 INFO [train.py:1114] (0/4) Epoch 9, batch 0, loss[loss=0.2416, simple_loss=0.2848, pruned_loss=0.07241, ctc_loss=0.1341, over 19818.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.2848, pruned_loss=0.07241, ctc_loss=0.1341, over 19818.00 frames. ], batch size: 49, lr: 1.69e-02, grad_scale: 32.0 +2024-08-25 15:44:31,298 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-25 15:44:49,826 INFO [train.py:1146] (0/4) Epoch 9, validation: loss=0.21, simple_loss=0.2947, pruned_loss=0.04621, ctc_loss=0.08206, over 944034.00 frames. +2024-08-25 15:44:49,826 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 14058MB +2024-08-25 15:45:00,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=106261.33333333333, ans=0.035 +2024-08-25 15:45:14,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106314.66666666667, ans=0.1 +2024-08-25 15:45:15,530 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.726e+02 2.154e+02 2.510e+02 2.953e+02 5.707e+02, threshold=5.019e+02, percent-clipped=2.0 +2024-08-25 15:46:11,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=106314.66666666667, ans=0.125 +2024-08-25 15:46:22,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=106368.0, ans=0.0 +2024-08-25 15:46:36,868 INFO [train.py:1114] (0/4) Epoch 9, batch 50, loss[loss=0.2125, simple_loss=0.2683, pruned_loss=0.0565, ctc_loss=0.1092, over 19734.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3084, pruned_loss=0.07768, ctc_loss=0.1467, over 844043.89 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 32.0 +2024-08-25 15:46:40,806 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.32 vs. limit=15.0 +2024-08-25 15:46:41,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=106474.66666666667, ans=0.025 +2024-08-25 15:47:03,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=106581.33333333333, ans=0.0 +2024-08-25 15:47:11,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=106634.66666666667, ans=0.0 +2024-08-25 15:47:19,599 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/checkpoint-20000.pt +2024-08-25 15:47:44,309 INFO [train.py:1114] (0/4) Epoch 9, batch 100, loss[loss=0.2363, simple_loss=0.2927, pruned_loss=0.06473, ctc_loss=0.126, over 19724.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3085, pruned_loss=0.07646, ctc_loss=0.1454, over 1497202.38 frames. ], batch size: 51, lr: 1.69e-02, grad_scale: 32.0 +2024-08-25 15:47:53,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=106741.33333333333, ans=0.1 +2024-08-25 15:48:01,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=106794.66666666667, ans=0.0 +2024-08-25 15:48:09,486 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 1.842e+02 2.163e+02 2.785e+02 4.838e+02, threshold=4.326e+02, percent-clipped=0.0 +2024-08-25 15:48:09,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106848.0, ans=0.1 +2024-08-25 15:48:13,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=106848.0, ans=0.0 +2024-08-25 15:48:15,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=106848.0, ans=0.125 +2024-08-25 15:48:23,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=106901.33333333333, ans=0.125 +2024-08-25 15:48:42,151 INFO [train.py:1114] (0/4) Epoch 9, batch 150, loss[loss=0.2247, simple_loss=0.2765, pruned_loss=0.06356, ctc_loss=0.1143, over 19693.00 frames. ], tot_loss[loss=0.2549, simple_loss=0.3047, pruned_loss=0.07438, ctc_loss=0.1411, over 2027503.00 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 32.0 +2024-08-25 15:48:57,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=107061.33333333333, ans=0.0 +2024-08-25 15:49:06,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=107114.66666666667, ans=0.2 +2024-08-25 15:49:13,727 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 15:49:41,060 INFO [train.py:1114] (0/4) Epoch 9, batch 200, loss[loss=0.2864, simple_loss=0.3219, pruned_loss=0.09117, ctc_loss=0.1712, over 18274.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3025, pruned_loss=0.07309, ctc_loss=0.1382, over 2435063.29 frames. ], batch size: 85, lr: 1.68e-02, grad_scale: 32.0 +2024-08-25 15:49:44,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=107274.66666666667, ans=0.125 +2024-08-25 15:49:47,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=107274.66666666667, ans=0.125 +2024-08-25 15:49:59,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=107328.0, ans=0.125 +2024-08-25 15:50:06,174 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.398e+02 1.799e+02 2.039e+02 2.617e+02 5.282e+02, threshold=4.078e+02, percent-clipped=1.0 +2024-08-25 15:50:44,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107381.33333333333, ans=0.1 +2024-08-25 15:50:47,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=107381.33333333333, ans=0.125 +2024-08-25 15:50:53,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=107434.66666666667, ans=0.125 +2024-08-25 15:51:05,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=107488.0, ans=0.0 +2024-08-25 15:51:06,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=107488.0, ans=0.125 +2024-08-25 15:51:17,113 INFO [train.py:1114] (0/4) Epoch 9, batch 250, loss[loss=0.3086, simple_loss=0.3436, pruned_loss=0.1016, ctc_loss=0.1762, over 19360.00 frames. ], tot_loss[loss=0.2533, simple_loss=0.3034, pruned_loss=0.07382, ctc_loss=0.1391, over 2754598.94 frames. ], batch size: 67, lr: 1.68e-02, grad_scale: 32.0 +2024-08-25 15:51:17,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=107541.33333333333, ans=10.0 +2024-08-25 15:51:20,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=107541.33333333333, ans=0.125 +2024-08-25 15:51:54,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=107701.33333333333, ans=0.125 +2024-08-25 15:52:00,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=107701.33333333333, ans=0.125 +2024-08-25 15:52:01,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=107701.33333333333, ans=0.0 +2024-08-25 15:52:10,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=107754.66666666667, ans=0.0 +2024-08-25 15:52:13,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=107754.66666666667, ans=0.0 +2024-08-25 15:52:15,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=107754.66666666667, ans=0.0 +2024-08-25 15:52:15,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=107754.66666666667, ans=0.125 +2024-08-25 15:52:18,773 INFO [train.py:1114] (0/4) Epoch 9, batch 300, loss[loss=0.2682, simple_loss=0.3129, pruned_loss=0.08198, ctc_loss=0.149, over 19553.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.3021, pruned_loss=0.07312, ctc_loss=0.1375, over 2999865.94 frames. ], batch size: 61, lr: 1.68e-02, grad_scale: 16.0 +2024-08-25 15:52:30,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107861.33333333333, ans=0.1 +2024-08-25 15:52:31,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=107861.33333333333, ans=0.07 +2024-08-25 15:52:43,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107914.66666666667, ans=0.1 +2024-08-25 15:52:47,053 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.324e+02 1.831e+02 2.248e+02 2.885e+02 5.251e+02, threshold=4.495e+02, percent-clipped=2.0 +2024-08-25 15:53:01,492 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.19 vs. limit=15.0 +2024-08-25 15:53:02,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=107968.0, ans=0.025 +2024-08-25 15:53:02,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107968.0, ans=0.1 +2024-08-25 15:53:18,339 INFO [train.py:1114] (0/4) Epoch 9, batch 350, loss[loss=0.2412, simple_loss=0.2865, pruned_loss=0.07006, ctc_loss=0.1392, over 19747.00 frames. ], tot_loss[loss=0.2522, simple_loss=0.3027, pruned_loss=0.07329, ctc_loss=0.138, over 3189846.95 frames. ], batch size: 48, lr: 1.68e-02, grad_scale: 16.0 +2024-08-25 15:53:19,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=108074.66666666667, ans=0.015 +2024-08-25 15:53:25,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=108074.66666666667, ans=0.0 +2024-08-25 15:53:33,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=108128.0, ans=0.125 +2024-08-25 15:53:39,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=108128.0, ans=0.0 +2024-08-25 15:53:40,828 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.29 vs. limit=15.0 +2024-08-25 15:53:41,984 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.15 vs. limit=22.5 +2024-08-25 15:53:55,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=108234.66666666667, ans=0.2 +2024-08-25 15:54:14,912 INFO [train.py:1114] (0/4) Epoch 9, batch 400, loss[loss=0.2434, simple_loss=0.307, pruned_loss=0.06575, ctc_loss=0.1206, over 19514.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3019, pruned_loss=0.07276, ctc_loss=0.137, over 3342190.73 frames. ], batch size: 54, lr: 1.68e-02, grad_scale: 32.0 +2024-08-25 15:54:21,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=108341.33333333333, ans=0.125 +2024-08-25 15:54:22,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=108341.33333333333, ans=15.0 +2024-08-25 15:54:43,462 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.428e+02 2.039e+02 2.514e+02 3.062e+02 4.428e+02, threshold=5.028e+02, percent-clipped=0.0 +2024-08-25 15:54:56,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108501.33333333333, ans=0.1 +2024-08-25 15:55:18,514 INFO [train.py:1114] (0/4) Epoch 9, batch 450, loss[loss=0.2205, simple_loss=0.2911, pruned_loss=0.05381, ctc_loss=0.1057, over 19624.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3019, pruned_loss=0.07277, ctc_loss=0.1368, over 3450818.45 frames. ], batch size: 55, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 15:55:18,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=108608.0, ans=0.125 +2024-08-25 15:55:23,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=108608.0, ans=0.125 +2024-08-25 15:55:24,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=108608.0, ans=0.125 +2024-08-25 15:55:32,687 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.44 vs. limit=15.0 +2024-08-25 15:55:42,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=108714.66666666667, ans=0.125 +2024-08-25 15:59:08,945 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 15:59:11,154 INFO [train.py:1114] (0/4) Epoch 9, batch 500, loss[loss=0.2524, simple_loss=0.3116, pruned_loss=0.07077, ctc_loss=0.1288, over 19701.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3011, pruned_loss=0.07253, ctc_loss=0.1364, over 3545802.40 frames. ], batch size: 63, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 15:59:13,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=108874.66666666667, ans=0.0 +2024-08-25 15:59:14,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.97 vs. limit=22.5 +2024-08-25 15:59:24,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=108928.0, ans=0.125 +2024-08-25 15:59:30,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=108928.0, ans=0.0 +2024-08-25 15:59:33,283 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.29 vs. limit=22.5 +2024-08-25 15:59:37,503 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.583e+02 1.839e+02 2.298e+02 3.023e+02 4.931e+02, threshold=4.596e+02, percent-clipped=0.0 +2024-08-25 15:59:58,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109088.0, ans=0.1 +2024-08-25 16:00:06,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=109088.0, ans=0.125 +2024-08-25 16:00:08,630 INFO [train.py:1114] (0/4) Epoch 9, batch 550, loss[loss=0.2638, simple_loss=0.309, pruned_loss=0.07944, ctc_loss=0.1491, over 19392.00 frames. ], tot_loss[loss=0.2512, simple_loss=0.3017, pruned_loss=0.07289, ctc_loss=0.1373, over 3607826.60 frames. ], batch size: 71, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 16:00:30,447 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.15 vs. limit=12.0 +2024-08-25 16:00:37,457 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.15 vs. limit=15.0 +2024-08-25 16:00:48,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=109301.33333333333, ans=0.0 +2024-08-25 16:01:00,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=109354.66666666667, ans=0.2 +2024-08-25 16:01:12,447 INFO [train.py:1114] (0/4) Epoch 9, batch 600, loss[loss=0.2718, simple_loss=0.3172, pruned_loss=0.08153, ctc_loss=0.1583, over 19429.00 frames. ], tot_loss[loss=0.2515, simple_loss=0.3022, pruned_loss=0.07288, ctc_loss=0.1373, over 3665188.56 frames. ], batch size: 67, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 16:01:43,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=109461.33333333333, ans=0.0 +2024-08-25 16:01:46,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=109461.33333333333, ans=0.125 +2024-08-25 16:01:48,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=109514.66666666667, ans=0.0 +2024-08-25 16:01:51,491 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.960e+02 2.208e+02 2.721e+02 5.490e+02, threshold=4.416e+02, percent-clipped=2.0 +2024-08-25 16:01:55,640 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.27 vs. limit=15.0 +2024-08-25 16:02:34,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109568.0, ans=0.1 +2024-08-25 16:02:47,609 INFO [train.py:1114] (0/4) Epoch 9, batch 650, loss[loss=0.218, simple_loss=0.2839, pruned_loss=0.05504, ctc_loss=0.1053, over 19758.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3012, pruned_loss=0.07221, ctc_loss=0.1362, over 3715812.23 frames. ], batch size: 54, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 16:02:53,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=109674.66666666667, ans=0.125 +2024-08-25 16:03:22,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=109781.33333333333, ans=0.125 +2024-08-25 16:03:22,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=109781.33333333333, ans=0.2 +2024-08-25 16:03:31,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=109834.66666666667, ans=0.125 +2024-08-25 16:03:47,859 INFO [train.py:1114] (0/4) Epoch 9, batch 700, loss[loss=0.2368, simple_loss=0.2901, pruned_loss=0.06647, ctc_loss=0.1262, over 19723.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3015, pruned_loss=0.07258, ctc_loss=0.1369, over 3748214.42 frames. ], batch size: 51, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 16:03:49,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=109941.33333333333, ans=0.025 +2024-08-25 16:04:02,989 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:04:06,823 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.92 vs. limit=15.0 +2024-08-25 16:04:06,862 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.16 vs. limit=22.5 +2024-08-25 16:04:09,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=109994.66666666667, ans=0.025 +2024-08-25 16:04:14,376 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.641e+02 1.949e+02 2.382e+02 2.859e+02 4.618e+02, threshold=4.764e+02, percent-clipped=1.0 +2024-08-25 16:04:18,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=110048.0, ans=0.09899494936611666 +2024-08-25 16:04:22,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=110101.33333333333, ans=0.0 +2024-08-25 16:04:44,755 INFO [train.py:1114] (0/4) Epoch 9, batch 750, loss[loss=0.287, simple_loss=0.3244, pruned_loss=0.09087, ctc_loss=0.1699, over 19483.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3013, pruned_loss=0.07241, ctc_loss=0.1365, over 3774614.02 frames. ], batch size: 54, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:04:53,833 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.09 vs. limit=15.0 +2024-08-25 16:04:57,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=110261.33333333333, ans=0.125 +2024-08-25 16:05:20,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=110314.66666666667, ans=0.125 +2024-08-25 16:05:28,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=110368.0, ans=0.125 +2024-08-25 16:05:47,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=110474.66666666667, ans=0.2 +2024-08-25 16:05:48,071 INFO [train.py:1114] (0/4) Epoch 9, batch 800, loss[loss=0.207, simple_loss=0.2575, pruned_loss=0.05675, ctc_loss=0.1076, over 19841.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3012, pruned_loss=0.07241, ctc_loss=0.136, over 3796448.51 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:06:02,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=110528.0, ans=0.125 +2024-08-25 16:06:03,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=110528.0, ans=0.125 +2024-08-25 16:06:14,977 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.861e+02 2.104e+02 2.558e+02 4.618e+02, threshold=4.207e+02, percent-clipped=0.0 +2024-08-25 16:06:16,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=110581.33333333333, ans=0.0 +2024-08-25 16:06:32,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=110634.66666666667, ans=0.125 +2024-08-25 16:06:47,180 INFO [train.py:1114] (0/4) Epoch 9, batch 850, loss[loss=0.2375, simple_loss=0.2989, pruned_loss=0.06478, ctc_loss=0.1163, over 19657.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3006, pruned_loss=0.07217, ctc_loss=0.1356, over 3815637.27 frames. ], batch size: 59, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:06:49,004 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=15.0 +2024-08-25 16:07:02,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=110794.66666666667, ans=0.0 +2024-08-25 16:07:05,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=110794.66666666667, ans=0.0 +2024-08-25 16:07:23,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=110901.33333333333, ans=0.125 +2024-08-25 16:08:32,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=110954.66666666667, ans=0.125 +2024-08-25 16:08:42,730 INFO [train.py:1114] (0/4) Epoch 9, batch 900, loss[loss=0.2456, simple_loss=0.285, pruned_loss=0.07463, ctc_loss=0.1427, over 19805.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3011, pruned_loss=0.07264, ctc_loss=0.1365, over 3820448.07 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:09:12,338 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.426e+02 1.982e+02 2.328e+02 2.784e+02 5.806e+02, threshold=4.657e+02, percent-clipped=1.0 +2024-08-25 16:09:12,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=111114.66666666667, ans=0.0 +2024-08-25 16:09:15,362 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.52 vs. limit=6.0 +2024-08-25 16:09:17,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=111114.66666666667, ans=0.125 +2024-08-25 16:09:20,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=111168.0, ans=0.125 +2024-08-25 16:09:29,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=111168.0, ans=0.2 +2024-08-25 16:09:47,306 INFO [train.py:1114] (0/4) Epoch 9, batch 950, loss[loss=0.2195, simple_loss=0.2746, pruned_loss=0.06068, ctc_loss=0.1076, over 19520.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3016, pruned_loss=0.07274, ctc_loss=0.1366, over 3822695.00 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:10:05,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=111328.0, ans=0.125 +2024-08-25 16:10:05,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=111328.0, ans=0.125 +2024-08-25 16:10:08,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=111328.0, ans=0.2 +2024-08-25 16:10:10,371 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.16 vs. limit=15.0 +2024-08-25 16:10:36,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=111488.0, ans=0.025 +2024-08-25 16:10:45,136 INFO [train.py:1114] (0/4) Epoch 9, batch 1000, loss[loss=0.2142, simple_loss=0.2785, pruned_loss=0.05403, ctc_loss=0.1048, over 19829.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3022, pruned_loss=0.07291, ctc_loss=0.1369, over 3819280.03 frames. ], batch size: 52, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:11:02,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=111594.66666666667, ans=0.125 +2024-08-25 16:11:06,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=111594.66666666667, ans=0.05 +2024-08-25 16:11:06,454 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.06 vs. limit=15.0 +2024-08-25 16:11:13,877 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 1.864e+02 2.156e+02 2.793e+02 4.751e+02, threshold=4.311e+02, percent-clipped=1.0 +2024-08-25 16:11:43,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=111754.66666666667, ans=0.0 +2024-08-25 16:11:45,639 INFO [train.py:1114] (0/4) Epoch 9, batch 1050, loss[loss=0.261, simple_loss=0.3121, pruned_loss=0.07658, ctc_loss=0.1419, over 19859.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3015, pruned_loss=0.07264, ctc_loss=0.1366, over 3823747.95 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:12:00,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111861.33333333333, ans=0.1 +2024-08-25 16:12:05,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=111861.33333333333, ans=0.0 +2024-08-25 16:12:51,802 INFO [train.py:1114] (0/4) Epoch 9, batch 1100, loss[loss=0.2397, simple_loss=0.2967, pruned_loss=0.06657, ctc_loss=0.1237, over 19592.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3005, pruned_loss=0.0719, ctc_loss=0.1354, over 3831698.90 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:13:04,365 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.04 vs. limit=15.0 +2024-08-25 16:13:09,056 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.28 vs. limit=22.5 +2024-08-25 16:13:19,827 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.558e+02 1.820e+02 2.090e+02 2.645e+02 4.523e+02, threshold=4.179e+02, percent-clipped=2.0 +2024-08-25 16:13:21,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=112181.33333333333, ans=0.2 +2024-08-25 16:13:24,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=112181.33333333333, ans=0.125 +2024-08-25 16:13:46,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=112288.0, ans=0.0 +2024-08-25 16:13:50,956 INFO [train.py:1114] (0/4) Epoch 9, batch 1150, loss[loss=0.2223, simple_loss=0.2838, pruned_loss=0.05814, ctc_loss=0.1115, over 19600.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3001, pruned_loss=0.07192, ctc_loss=0.1353, over 3830700.83 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:13:51,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=112341.33333333333, ans=0.0 +2024-08-25 16:13:51,468 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.18 vs. limit=12.0 +2024-08-25 16:14:10,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=112394.66666666667, ans=0.0 +2024-08-25 16:14:15,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=112448.0, ans=0.125 +2024-08-25 16:14:51,114 INFO [train.py:1114] (0/4) Epoch 9, batch 1200, loss[loss=0.2296, simple_loss=0.2951, pruned_loss=0.05817, ctc_loss=0.1193, over 19834.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3006, pruned_loss=0.07203, ctc_loss=0.1355, over 3825832.07 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:15:06,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112661.33333333333, ans=0.1 +2024-08-25 16:16:05,769 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.491e+02 1.875e+02 2.166e+02 2.598e+02 4.323e+02, threshold=4.331e+02, percent-clipped=2.0 +2024-08-25 16:16:24,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=112768.0, ans=0.2 +2024-08-25 16:16:28,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=112821.33333333333, ans=0.125 +2024-08-25 16:16:39,514 INFO [train.py:1114] (0/4) Epoch 9, batch 1250, loss[loss=0.3054, simple_loss=0.3372, pruned_loss=0.1, ctc_loss=0.1839, over 19548.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3009, pruned_loss=0.07188, ctc_loss=0.1353, over 3844137.32 frames. ], batch size: 61, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:16:44,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=112874.66666666667, ans=0.0 +2024-08-25 16:17:00,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=112928.0, ans=0.0 +2024-08-25 16:17:33,619 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.35 vs. limit=15.0 +2024-08-25 16:17:40,917 INFO [train.py:1114] (0/4) Epoch 9, batch 1300, loss[loss=0.3178, simple_loss=0.3449, pruned_loss=0.1055, ctc_loss=0.1992, over 18806.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3002, pruned_loss=0.07174, ctc_loss=0.1349, over 3847673.27 frames. ], batch size: 76, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:17:46,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=113141.33333333333, ans=0.2 +2024-08-25 16:17:47,251 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.48 vs. limit=22.5 +2024-08-25 16:18:00,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=113194.66666666667, ans=0.2 +2024-08-25 16:18:08,521 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.553e+02 1.959e+02 2.315e+02 2.984e+02 4.812e+02, threshold=4.630e+02, percent-clipped=1.0 +2024-08-25 16:18:15,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=113301.33333333333, ans=0.125 +2024-08-25 16:18:30,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=113354.66666666667, ans=0.0 +2024-08-25 16:18:32,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=113354.66666666667, ans=0.125 +2024-08-25 16:18:42,123 INFO [train.py:1114] (0/4) Epoch 9, batch 1350, loss[loss=0.2196, simple_loss=0.2822, pruned_loss=0.05646, ctc_loss=0.11, over 19737.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.2989, pruned_loss=0.07073, ctc_loss=0.133, over 3859306.16 frames. ], batch size: 54, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:18:56,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=113461.33333333333, ans=0.125 +2024-08-25 16:19:04,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=113514.66666666667, ans=0.125 +2024-08-25 16:19:18,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=113568.0, ans=0.07 +2024-08-25 16:19:32,254 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.49 vs. limit=6.0 +2024-08-25 16:19:40,023 INFO [train.py:1114] (0/4) Epoch 9, batch 1400, loss[loss=0.2021, simple_loss=0.2625, pruned_loss=0.05168, ctc_loss=0.09587, over 19662.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.2995, pruned_loss=0.07113, ctc_loss=0.1337, over 3866130.34 frames. ], batch size: 46, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:19:50,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=113728.0, ans=0.125 +2024-08-25 16:20:07,559 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.393e+02 1.860e+02 2.127e+02 2.545e+02 4.134e+02, threshold=4.253e+02, percent-clipped=0.0 +2024-08-25 16:20:07,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=113781.33333333333, ans=0.125 +2024-08-25 16:20:09,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113781.33333333333, ans=0.1 +2024-08-25 16:20:12,121 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:20:20,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=113834.66666666667, ans=0.0 +2024-08-25 16:20:25,398 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.39 vs. limit=15.0 +2024-08-25 16:20:43,034 INFO [train.py:1114] (0/4) Epoch 9, batch 1450, loss[loss=0.2751, simple_loss=0.3166, pruned_loss=0.08484, ctc_loss=0.16, over 19679.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3002, pruned_loss=0.07162, ctc_loss=0.1346, over 3864108.67 frames. ], batch size: 63, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:20:43,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=113941.33333333333, ans=0.05 +2024-08-25 16:20:48,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=113941.33333333333, ans=0.09899494936611666 +2024-08-25 16:20:51,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=113941.33333333333, ans=0.125 +2024-08-25 16:20:55,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=113994.66666666667, ans=22.5 +2024-08-25 16:21:07,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114048.0, ans=0.1 +2024-08-25 16:21:08,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=114048.0, ans=0.125 +2024-08-25 16:21:27,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=114101.33333333333, ans=0.0 +2024-08-25 16:21:45,885 INFO [train.py:1114] (0/4) Epoch 9, batch 1500, loss[loss=0.26, simple_loss=0.3149, pruned_loss=0.07356, ctc_loss=0.1451, over 19600.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3008, pruned_loss=0.07191, ctc_loss=0.135, over 3863198.78 frames. ], batch size: 57, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:21:46,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=114208.0, ans=0.95 +2024-08-25 16:21:49,120 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.41 vs. limit=15.0 +2024-08-25 16:21:54,682 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.41 vs. limit=10.0 +2024-08-25 16:22:02,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=114261.33333333333, ans=0.125 +2024-08-25 16:22:06,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=114261.33333333333, ans=10.0 +2024-08-25 16:22:08,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=114261.33333333333, ans=0.125 +2024-08-25 16:22:15,434 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.478e+02 1.928e+02 2.180e+02 2.740e+02 4.350e+02, threshold=4.360e+02, percent-clipped=2.0 +2024-08-25 16:22:19,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=114314.66666666667, ans=0.125 +2024-08-25 16:22:45,662 INFO [train.py:1114] (0/4) Epoch 9, batch 1550, loss[loss=0.279, simple_loss=0.326, pruned_loss=0.08539, ctc_loss=0.153, over 19597.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3015, pruned_loss=0.07253, ctc_loss=0.136, over 3847363.39 frames. ], batch size: 60, lr: 1.64e-02, grad_scale: 16.0 +2024-08-25 16:23:10,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=114581.33333333333, ans=0.125 +2024-08-25 16:23:15,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=114581.33333333333, ans=0.0 +2024-08-25 16:23:17,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=114581.33333333333, ans=0.125 +2024-08-25 16:23:24,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=114634.66666666667, ans=0.025 +2024-08-25 16:23:32,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=114634.66666666667, ans=0.125 +2024-08-25 16:23:46,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=114741.33333333333, ans=0.125 +2024-08-25 16:23:47,249 INFO [train.py:1114] (0/4) Epoch 9, batch 1600, loss[loss=0.2307, simple_loss=0.2928, pruned_loss=0.061, ctc_loss=0.1163, over 19838.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3013, pruned_loss=0.07233, ctc_loss=0.1359, over 3835919.64 frames. ], batch size: 57, lr: 1.63e-02, grad_scale: 32.0 +2024-08-25 16:23:59,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=114794.66666666667, ans=0.125 +2024-08-25 16:24:03,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=114794.66666666667, ans=0.125 +2024-08-25 16:24:05,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=114794.66666666667, ans=0.125 +2024-08-25 16:24:16,806 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.516e+02 1.930e+02 2.504e+02 3.084e+02 5.673e+02, threshold=5.009e+02, percent-clipped=4.0 +2024-08-25 16:24:46,353 INFO [train.py:1114] (0/4) Epoch 9, batch 1650, loss[loss=0.2497, simple_loss=0.31, pruned_loss=0.06904, ctc_loss=0.1283, over 19639.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3012, pruned_loss=0.07218, ctc_loss=0.1356, over 3833183.94 frames. ], batch size: 59, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:25:26,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=115168.0, ans=0.125 +2024-08-25 16:25:34,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=115221.33333333333, ans=0.0 +2024-08-25 16:25:41,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=115221.33333333333, ans=0.125 +2024-08-25 16:25:45,143 INFO [train.py:1114] (0/4) Epoch 9, batch 1700, loss[loss=0.1959, simple_loss=0.2599, pruned_loss=0.04677, ctc_loss=0.0958, over 19661.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3004, pruned_loss=0.07129, ctc_loss=0.1342, over 3847890.26 frames. ], batch size: 46, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:25:47,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=115274.66666666667, ans=0.07 +2024-08-25 16:25:58,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=115328.0, ans=0.0 +2024-08-25 16:26:13,051 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.415e+02 1.773e+02 1.969e+02 2.283e+02 4.673e+02, threshold=3.938e+02, percent-clipped=0.0 +2024-08-25 16:26:26,615 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.60 vs. limit=15.0 +2024-08-25 16:26:41,738 INFO [train.py:1114] (0/4) Epoch 9, batch 1750, loss[loss=0.245, simple_loss=0.2847, pruned_loss=0.075, ctc_loss=0.1383, over 19683.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.2998, pruned_loss=0.07119, ctc_loss=0.1339, over 3852696.95 frames. ], batch size: 45, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:26:42,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=115541.33333333333, ans=0.125 +2024-08-25 16:26:47,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=115541.33333333333, ans=0.0 +2024-08-25 16:27:46,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=115648.0, ans=0.025 +2024-08-25 16:28:08,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=115754.66666666667, ans=0.125 +2024-08-25 16:28:12,464 INFO [train.py:1114] (0/4) Epoch 9, batch 1800, loss[loss=0.2636, simple_loss=0.3177, pruned_loss=0.07554, ctc_loss=0.1459, over 19616.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3003, pruned_loss=0.07163, ctc_loss=0.1348, over 3852814.33 frames. ], batch size: 55, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:28:22,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=115808.0, ans=0.125 +2024-08-25 16:28:40,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=115861.33333333333, ans=0.0 +2024-08-25 16:28:46,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=115914.66666666667, ans=0.95 +2024-08-25 16:28:49,004 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.488e+02 1.840e+02 2.097e+02 2.711e+02 4.220e+02, threshold=4.193e+02, percent-clipped=2.0 +2024-08-25 16:28:59,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=115968.0, ans=0.125 +2024-08-25 16:29:10,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=115968.0, ans=0.0 +2024-08-25 16:29:25,118 INFO [train.py:1114] (0/4) Epoch 9, batch 1850, loss[loss=0.269, simple_loss=0.3232, pruned_loss=0.07808, ctc_loss=0.1467, over 19591.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3002, pruned_loss=0.07165, ctc_loss=0.1346, over 3856726.08 frames. ], batch size: 57, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:29:43,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=116128.0, ans=0.0 +2024-08-25 16:30:21,273 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:30:26,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=116181.33333333333, ans=0.0 +2024-08-25 16:30:33,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=116234.66666666667, ans=0.125 +2024-08-25 16:30:42,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=116234.66666666667, ans=0.025 +2024-08-25 16:30:55,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=116341.33333333333, ans=0.0 +2024-08-25 16:30:56,367 INFO [train.py:1114] (0/4) Epoch 9, batch 1900, loss[loss=0.2582, simple_loss=0.3155, pruned_loss=0.07306, ctc_loss=0.1371, over 19640.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3008, pruned_loss=0.07193, ctc_loss=0.135, over 3862410.33 frames. ], batch size: 59, lr: 1.62e-02, grad_scale: 16.0 +2024-08-25 16:32:01,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=116341.33333333333, ans=0.125 +2024-08-25 16:32:08,051 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.57 vs. limit=15.0 +2024-08-25 16:32:21,937 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.810e+02 2.075e+02 2.674e+02 4.757e+02, threshold=4.150e+02, percent-clipped=3.0 +2024-08-25 16:32:25,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=116448.0, ans=0.125 +2024-08-25 16:32:25,892 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.02 vs. limit=10.0 +2024-08-25 16:32:35,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=116501.33333333333, ans=0.0 +2024-08-25 16:32:45,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=116554.66666666667, ans=0.0 +2024-08-25 16:33:06,107 INFO [train.py:1114] (0/4) Epoch 9, batch 1950, loss[loss=0.2565, simple_loss=0.3082, pruned_loss=0.0732, ctc_loss=0.146, over 19575.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3016, pruned_loss=0.07191, ctc_loss=0.1352, over 3871997.96 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 16.0 +2024-08-25 16:33:09,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=116608.0, ans=0.2 +2024-08-25 16:33:23,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=116661.33333333333, ans=0.0 +2024-08-25 16:33:30,529 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:33:47,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=116768.0, ans=0.0 +2024-08-25 16:34:01,152 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.31 vs. limit=15.0 +2024-08-25 16:34:02,768 INFO [train.py:1114] (0/4) Epoch 9, batch 2000, loss[loss=0.1966, simple_loss=0.2506, pruned_loss=0.05241, ctc_loss=0.09474, over 19696.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3018, pruned_loss=0.07208, ctc_loss=0.1357, over 3856507.86 frames. ], batch size: 45, lr: 1.62e-02, grad_scale: 32.0 +2024-08-25 16:34:14,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=116928.0, ans=0.0 +2024-08-25 16:34:15,528 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.13 vs. limit=15.0 +2024-08-25 16:34:16,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=116928.0, ans=0.0 +2024-08-25 16:34:17,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=116928.0, ans=0.1 +2024-08-25 16:34:29,392 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.77 vs. limit=15.0 +2024-08-25 16:34:30,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=116981.33333333333, ans=0.125 +2024-08-25 16:34:30,982 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.555e+02 1.787e+02 2.122e+02 2.673e+02 5.196e+02, threshold=4.245e+02, percent-clipped=10.0 +2024-08-25 16:34:42,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=117034.66666666667, ans=0.125 +2024-08-25 16:34:50,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=117088.0, ans=0.09899494936611666 +2024-08-25 16:34:59,605 INFO [train.py:1114] (0/4) Epoch 9, batch 2050, loss[loss=0.2136, simple_loss=0.2706, pruned_loss=0.05625, ctc_loss=0.1104, over 19686.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3011, pruned_loss=0.07206, ctc_loss=0.1357, over 3852956.16 frames. ], batch size: 47, lr: 1.62e-02, grad_scale: 32.0 +2024-08-25 16:35:12,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=117194.66666666667, ans=0.0 +2024-08-25 16:35:14,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=117194.66666666667, ans=0.0 +2024-08-25 16:35:28,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=117248.0, ans=0.125 +2024-08-25 16:35:32,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=117301.33333333333, ans=0.2 +2024-08-25 16:35:34,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=117301.33333333333, ans=0.125 +2024-08-25 16:36:04,968 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.27 vs. limit=15.0 +2024-08-25 16:36:57,814 INFO [train.py:1114] (0/4) Epoch 9, batch 2100, loss[loss=0.2543, simple_loss=0.3047, pruned_loss=0.07488, ctc_loss=0.1351, over 19767.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3004, pruned_loss=0.07159, ctc_loss=0.135, over 3859079.64 frames. ], batch size: 54, lr: 1.62e-02, grad_scale: 32.0 +2024-08-25 16:37:17,055 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.87 vs. limit=15.0 +2024-08-25 16:37:29,780 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.98 vs. limit=10.0 +2024-08-25 16:37:35,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=117514.66666666667, ans=0.05 +2024-08-25 16:37:36,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=117514.66666666667, ans=0.1 +2024-08-25 16:37:38,952 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.501e+02 1.824e+02 2.012e+02 2.446e+02 4.504e+02, threshold=4.025e+02, percent-clipped=2.0 +2024-08-25 16:37:50,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=117568.0, ans=0.2 +2024-08-25 16:37:57,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=117621.33333333333, ans=0.07 +2024-08-25 16:38:06,806 INFO [train.py:1114] (0/4) Epoch 9, batch 2150, loss[loss=0.214, simple_loss=0.2694, pruned_loss=0.05747, ctc_loss=0.1091, over 19589.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.2992, pruned_loss=0.07095, ctc_loss=0.1335, over 3869879.56 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 32.0 +2024-08-25 16:38:12,993 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.82 vs. limit=15.0 +2024-08-25 16:38:14,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=117674.66666666667, ans=0.025 +2024-08-25 16:38:21,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=117728.0, ans=0.025 +2024-08-25 16:38:21,675 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.22 vs. limit=22.5 +2024-08-25 16:38:22,987 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.93 vs. limit=12.0 +2024-08-25 16:38:28,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117781.33333333333, ans=0.1 +2024-08-25 16:38:33,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=117781.33333333333, ans=0.125 +2024-08-25 16:38:46,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=117834.66666666667, ans=0.125 +2024-08-25 16:38:57,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=117888.0, ans=0.125 +2024-08-25 16:39:02,671 INFO [train.py:1114] (0/4) Epoch 9, batch 2200, loss[loss=0.2637, simple_loss=0.312, pruned_loss=0.0766, ctc_loss=0.1557, over 19579.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.2993, pruned_loss=0.07082, ctc_loss=0.1333, over 3869284.31 frames. ], batch size: 57, lr: 1.61e-02, grad_scale: 32.0 +2024-08-25 16:39:04,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=117941.33333333333, ans=0.0 +2024-08-25 16:39:12,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=117941.33333333333, ans=0.125 +2024-08-25 16:39:27,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=118048.0, ans=0.025 +2024-08-25 16:39:27,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=118048.0, ans=0.0 +2024-08-25 16:39:30,923 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.404e+02 1.840e+02 2.263e+02 2.882e+02 6.553e+02, threshold=4.526e+02, percent-clipped=9.0 +2024-08-25 16:39:38,460 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:39:57,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=118154.66666666667, ans=0.5 +2024-08-25 16:39:59,965 INFO [train.py:1114] (0/4) Epoch 9, batch 2250, loss[loss=0.2449, simple_loss=0.3016, pruned_loss=0.06772, ctc_loss=0.1318, over 19627.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.2996, pruned_loss=0.07097, ctc_loss=0.1337, over 3868578.45 frames. ], batch size: 55, lr: 1.61e-02, grad_scale: 16.0 +2024-08-25 16:40:14,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=118261.33333333333, ans=0.0 +2024-08-25 16:40:29,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=118314.66666666667, ans=0.2 +2024-08-25 16:40:35,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=118368.0, ans=0.125 +2024-08-25 16:40:54,823 INFO [train.py:1114] (0/4) Epoch 9, batch 2300, loss[loss=0.2173, simple_loss=0.2691, pruned_loss=0.06068, ctc_loss=0.1104, over 19510.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.2984, pruned_loss=0.07082, ctc_loss=0.1334, over 3861868.07 frames. ], batch size: 49, lr: 1.61e-02, grad_scale: 16.0 +2024-08-25 16:41:02,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=118474.66666666667, ans=0.125 +2024-08-25 16:41:24,907 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.547e+02 1.864e+02 2.265e+02 3.023e+02 5.230e+02, threshold=4.530e+02, percent-clipped=2.0 +2024-08-25 16:41:30,565 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.79 vs. limit=15.0 +2024-08-25 16:41:34,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=118634.66666666667, ans=10.0 +2024-08-25 16:41:40,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=118688.0, ans=0.125 +2024-08-25 16:41:44,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=118688.0, ans=0.2 +2024-08-25 16:41:51,058 INFO [train.py:1114] (0/4) Epoch 9, batch 2350, loss[loss=0.2707, simple_loss=0.3173, pruned_loss=0.07994, ctc_loss=0.1604, over 19635.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.2979, pruned_loss=0.07036, ctc_loss=0.1324, over 3864113.15 frames. ], batch size: 63, lr: 1.61e-02, grad_scale: 16.0 +2024-08-25 16:42:01,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=118741.33333333333, ans=0.025 +2024-08-25 16:42:04,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=118794.66666666667, ans=0.125 +2024-08-25 16:42:07,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=118794.66666666667, ans=0.04949747468305833 +2024-08-25 16:42:30,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=118848.0, ans=0.0 +2024-08-25 16:42:30,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=118848.0, ans=0.0 +2024-08-25 16:42:39,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=118901.33333333333, ans=0.2 +2024-08-25 16:42:44,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=118901.33333333333, ans=0.125 +2024-08-25 16:42:52,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=118954.66666666667, ans=0.0 +2024-08-25 16:42:59,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=118954.66666666667, ans=0.0 +2024-08-25 16:43:02,787 INFO [train.py:1114] (0/4) Epoch 9, batch 2400, loss[loss=0.2781, simple_loss=0.3222, pruned_loss=0.08454, ctc_loss=0.1622, over 19405.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3002, pruned_loss=0.07157, ctc_loss=0.1346, over 3857848.50 frames. ], batch size: 67, lr: 1.61e-02, grad_scale: 32.0 +2024-08-25 16:43:12,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=119008.0, ans=0.125 +2024-08-25 16:43:27,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=119114.66666666667, ans=0.125 +2024-08-25 16:43:32,518 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.581e+02 1.930e+02 2.301e+02 2.799e+02 4.768e+02, threshold=4.601e+02, percent-clipped=1.0 +2024-08-25 16:43:33,077 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.59 vs. limit=15.0 +2024-08-25 16:43:33,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=119114.66666666667, ans=0.125 +2024-08-25 16:43:48,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=119221.33333333333, ans=0.0 +2024-08-25 16:43:59,351 INFO [train.py:1114] (0/4) Epoch 9, batch 2450, loss[loss=0.3547, simple_loss=0.3567, pruned_loss=0.1287, ctc_loss=0.2384, over 13679.00 frames. ], tot_loss[loss=0.2565, simple_loss=0.3049, pruned_loss=0.07561, ctc_loss=0.1423, over 3728798.97 frames. ], batch size: 140, lr: 1.61e-02, grad_scale: 32.0 +2024-08-25 16:44:20,732 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=2.735e-03 +2024-08-25 16:44:22,086 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.92 vs. limit=10.0 +2024-08-25 16:44:36,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=119434.66666666667, ans=0.2 +2024-08-25 16:44:38,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=119434.66666666667, ans=0.0 +2024-08-25 16:44:40,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=119434.66666666667, ans=0.125 +2024-08-25 16:44:41,664 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-9.pt +2024-08-25 16:45:25,693 INFO [train.py:1114] (0/4) Epoch 10, batch 0, loss[loss=0.2335, simple_loss=0.2831, pruned_loss=0.06725, ctc_loss=0.1234, over 19806.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.2831, pruned_loss=0.06725, ctc_loss=0.1234, over 19806.00 frames. ], batch size: 49, lr: 1.53e-02, grad_scale: 32.0 +2024-08-25 16:45:25,693 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-25 16:45:33,000 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.6104, 4.1222, 2.4195, 1.8686], device='cuda:0') +2024-08-25 16:46:37,097 INFO [train.py:1146] (0/4) Epoch 10, validation: loss=0.2041, simple_loss=0.2903, pruned_loss=0.04356, ctc_loss=0.07708, over 944034.00 frames. +2024-08-25 16:46:37,097 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 14058MB +2024-08-25 16:46:58,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.87 vs. limit=15.0 +2024-08-25 16:47:07,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=119589.33333333333, ans=0.125 +2024-08-25 16:47:17,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=119642.66666666667, ans=0.025 +2024-08-25 16:47:31,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=7.01 vs. limit=12.0 +2024-08-25 16:47:46,605 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.636e+02 1.955e+02 2.116e+02 2.362e+02 4.652e+02, threshold=4.231e+02, percent-clipped=1.0 +2024-08-25 16:48:18,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=119696.0, ans=0.2 +2024-08-25 16:48:20,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=119696.0, ans=0.0 +2024-08-25 16:48:28,308 INFO [train.py:1114] (0/4) Epoch 10, batch 50, loss[loss=0.2072, simple_loss=0.2628, pruned_loss=0.05458, ctc_loss=0.1061, over 19711.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3031, pruned_loss=0.07283, ctc_loss=0.1373, over 843213.08 frames. ], batch size: 47, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:49:43,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.35 vs. limit=12.0 +2024-08-25 16:50:11,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=119856.0, ans=0.1 +2024-08-25 16:51:23,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=119962.66666666667, ans=0.0 +2024-08-25 16:51:36,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=119962.66666666667, ans=0.1 +2024-08-25 16:52:34,138 INFO [train.py:1114] (0/4) Epoch 10, batch 100, loss[loss=0.2276, simple_loss=0.2849, pruned_loss=0.06177, ctc_loss=0.1171, over 19710.00 frames. ], tot_loss[loss=0.2528, simple_loss=0.3045, pruned_loss=0.07305, ctc_loss=0.1376, over 1497783.82 frames. ], batch size: 51, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:52:37,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=120016.0, ans=0.125 +2024-08-25 16:53:12,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=120069.33333333333, ans=0.125 +2024-08-25 16:53:40,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=120176.0, ans=0.05 +2024-08-25 16:53:47,830 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.460e+02 1.798e+02 2.253e+02 2.860e+02 4.134e+02, threshold=4.507e+02, percent-clipped=0.0 +2024-08-25 16:54:30,891 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.17 vs. limit=15.0 +2024-08-25 16:54:47,463 INFO [train.py:1114] (0/4) Epoch 10, batch 150, loss[loss=0.2617, simple_loss=0.2978, pruned_loss=0.08234, ctc_loss=0.1522, over 19725.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3017, pruned_loss=0.07164, ctc_loss=0.1345, over 2026659.98 frames. ], batch size: 47, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:54:54,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=120282.66666666667, ans=0.025 +2024-08-25 16:55:05,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=120336.0, ans=0.125 +2024-08-25 16:55:39,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=120442.66666666667, ans=0.125 +2024-08-25 16:55:43,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=120442.66666666667, ans=0.125 +2024-08-25 16:56:01,839 INFO [train.py:1114] (0/4) Epoch 10, batch 200, loss[loss=0.262, simple_loss=0.3072, pruned_loss=0.07912, ctc_loss=0.1466, over 18272.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.2994, pruned_loss=0.07085, ctc_loss=0.1329, over 2434216.39 frames. ], batch size: 85, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:56:02,447 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.32 vs. limit=15.0 +2024-08-25 16:56:10,317 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.623e-03 +2024-08-25 16:56:11,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=120549.33333333333, ans=0.125 +2024-08-25 16:57:32,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=120656.0, ans=0.125 +2024-08-25 16:57:36,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=120656.0, ans=0.1 +2024-08-25 16:58:07,761 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 1.824e+02 2.064e+02 2.548e+02 6.143e+02, threshold=4.128e+02, percent-clipped=2.0 +2024-08-25 16:58:32,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=120816.0, ans=0.125 +2024-08-25 16:58:32,994 INFO [train.py:1114] (0/4) Epoch 10, batch 250, loss[loss=0.2775, simple_loss=0.324, pruned_loss=0.08482, ctc_loss=0.1532, over 19445.00 frames. ], tot_loss[loss=0.246, simple_loss=0.2989, pruned_loss=0.07023, ctc_loss=0.1318, over 2755282.56 frames. ], batch size: 67, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:58:58,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=120869.33333333333, ans=0.125 +2024-08-25 16:58:59,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=120869.33333333333, ans=0.0 +2024-08-25 16:59:47,532 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.87 vs. limit=22.5 +2024-08-25 16:59:56,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=121029.33333333333, ans=0.2 +2024-08-25 17:00:02,078 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.17 vs. limit=22.5 +2024-08-25 17:00:08,739 INFO [train.py:1114] (0/4) Epoch 10, batch 300, loss[loss=0.2971, simple_loss=0.3351, pruned_loss=0.09341, ctc_loss=0.1805, over 19529.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.2977, pruned_loss=0.06965, ctc_loss=0.1309, over 3000652.65 frames. ], batch size: 61, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 17:00:21,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=121082.66666666667, ans=0.0 +2024-08-25 17:00:22,718 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.47 vs. limit=15.0 +2024-08-25 17:00:23,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=121082.66666666667, ans=0.125 +2024-08-25 17:00:34,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=121136.0, ans=0.125 +2024-08-25 17:00:43,123 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.09 vs. limit=6.0 +2024-08-25 17:00:50,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=121189.33333333333, ans=0.125 +2024-08-25 17:01:01,168 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.522e+02 1.908e+02 2.186e+02 2.769e+02 4.118e+02, threshold=4.372e+02, percent-clipped=0.0 +2024-08-25 17:02:40,284 INFO [train.py:1114] (0/4) Epoch 10, batch 350, loss[loss=0.2065, simple_loss=0.2655, pruned_loss=0.05429, ctc_loss=0.09698, over 19747.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.2977, pruned_loss=0.06953, ctc_loss=0.1309, over 3191619.59 frames. ], batch size: 48, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 17:02:49,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=121349.33333333333, ans=15.0 +2024-08-25 17:02:58,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=121402.66666666667, ans=0.2 +2024-08-25 17:03:42,414 INFO [train.py:1114] (0/4) Epoch 10, batch 400, loss[loss=0.2343, simple_loss=0.2967, pruned_loss=0.06266, ctc_loss=0.1162, over 19862.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.2966, pruned_loss=0.06876, ctc_loss=0.1293, over 3342903.28 frames. ], batch size: 55, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:03:44,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121616.0, ans=0.1 +2024-08-25 17:03:47,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=121616.0, ans=0.125 +2024-08-25 17:03:48,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=121616.0, ans=0.0 +2024-08-25 17:03:54,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=121669.33333333333, ans=0.125 +2024-08-25 17:03:54,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=121669.33333333333, ans=0.125 +2024-08-25 17:04:05,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=121669.33333333333, ans=0.015 +2024-08-25 17:04:33,762 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.409e+02 1.874e+02 2.151e+02 2.761e+02 4.102e+02, threshold=4.302e+02, percent-clipped=0.0 +2024-08-25 17:04:42,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121829.33333333333, ans=0.1 +2024-08-25 17:04:47,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=121829.33333333333, ans=0.0 +2024-08-25 17:04:50,479 INFO [train.py:1114] (0/4) Epoch 10, batch 450, loss[loss=0.232, simple_loss=0.2942, pruned_loss=0.06106, ctc_loss=0.1193, over 19603.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.297, pruned_loss=0.06914, ctc_loss=0.1302, over 3450466.04 frames. ], batch size: 55, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:07:13,225 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.66 vs. limit=15.0 +2024-08-25 17:07:15,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=121936.0, ans=0.2 +2024-08-25 17:07:43,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.28 vs. limit=12.0 +2024-08-25 17:07:48,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=121989.33333333333, ans=0.125 +2024-08-25 17:08:12,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=122042.66666666667, ans=0.2 +2024-08-25 17:09:04,069 INFO [train.py:1114] (0/4) Epoch 10, batch 500, loss[loss=0.2553, simple_loss=0.305, pruned_loss=0.07505, ctc_loss=0.1389, over 19671.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.2957, pruned_loss=0.06848, ctc_loss=0.1291, over 3546117.80 frames. ], batch size: 63, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:09:09,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=122149.33333333333, ans=0.0 +2024-08-25 17:09:25,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122202.66666666667, ans=0.1 +2024-08-25 17:09:29,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=122202.66666666667, ans=0.2 +2024-08-25 17:09:30,453 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.49 vs. limit=22.5 +2024-08-25 17:09:36,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=122202.66666666667, ans=0.125 +2024-08-25 17:09:41,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=122256.0, ans=0.2 +2024-08-25 17:10:03,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=122256.0, ans=0.125 +2024-08-25 17:10:21,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=122309.33333333333, ans=10.0 +2024-08-25 17:10:22,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=122309.33333333333, ans=0.0 +2024-08-25 17:10:36,242 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.369e+02 1.797e+02 2.290e+02 2.870e+02 3.920e+02, threshold=4.579e+02, percent-clipped=0.0 +2024-08-25 17:10:38,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=122362.66666666667, ans=0.125 +2024-08-25 17:10:51,440 INFO [train.py:1114] (0/4) Epoch 10, batch 550, loss[loss=0.2989, simple_loss=0.3358, pruned_loss=0.09475, ctc_loss=0.1812, over 19236.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.2963, pruned_loss=0.06908, ctc_loss=0.1303, over 3606988.23 frames. ], batch size: 71, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:11:07,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.35 vs. limit=15.0 +2024-08-25 17:11:09,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=122416.0, ans=0.2 +2024-08-25 17:13:54,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=122576.0, ans=0.0 +2024-08-25 17:14:01,433 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.20 vs. limit=6.0 +2024-08-25 17:14:20,767 INFO [train.py:1114] (0/4) Epoch 10, batch 600, loss[loss=0.2906, simple_loss=0.3277, pruned_loss=0.0928, ctc_loss=0.1695, over 19406.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.2964, pruned_loss=0.06888, ctc_loss=0.1301, over 3665624.41 frames. ], batch size: 67, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:14:22,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=122682.66666666667, ans=0.125 +2024-08-25 17:14:27,221 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.70 vs. limit=10.0 +2024-08-25 17:15:05,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=122842.66666666667, ans=0.125 +2024-08-25 17:15:08,638 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.448e+02 1.815e+02 2.061e+02 2.496e+02 4.365e+02, threshold=4.122e+02, percent-clipped=0.0 +2024-08-25 17:15:10,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=122896.0, ans=0.0 +2024-08-25 17:15:15,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=122896.0, ans=0.1 +2024-08-25 17:15:24,797 INFO [train.py:1114] (0/4) Epoch 10, batch 650, loss[loss=0.2502, simple_loss=0.2998, pruned_loss=0.07247, ctc_loss=0.1392, over 19767.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.2955, pruned_loss=0.0681, ctc_loss=0.1286, over 3716191.05 frames. ], batch size: 54, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:15:43,920 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.47 vs. limit=22.5 +2024-08-25 17:15:51,398 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.60 vs. limit=6.0 +2024-08-25 17:16:01,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=123109.33333333333, ans=0.04949747468305833 +2024-08-25 17:16:05,117 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.37 vs. limit=15.0 +2024-08-25 17:16:34,664 INFO [train.py:1114] (0/4) Epoch 10, batch 700, loss[loss=0.2579, simple_loss=0.3053, pruned_loss=0.07646, ctc_loss=0.1438, over 19721.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.296, pruned_loss=0.06853, ctc_loss=0.1293, over 3747909.39 frames. ], batch size: 51, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:17:41,336 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.67 vs. limit=15.0 +2024-08-25 17:17:50,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=123269.33333333333, ans=0.125 +2024-08-25 17:18:07,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=123376.0, ans=0.2 +2024-08-25 17:18:13,482 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.562e+02 1.934e+02 2.276e+02 3.026e+02 5.626e+02, threshold=4.552e+02, percent-clipped=3.0 +2024-08-25 17:18:16,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=123429.33333333333, ans=0.1 +2024-08-25 17:18:17,694 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.86 vs. limit=15.0 +2024-08-25 17:18:22,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=123429.33333333333, ans=0.0 +2024-08-25 17:18:28,275 INFO [train.py:1114] (0/4) Epoch 10, batch 750, loss[loss=0.252, simple_loss=0.3101, pruned_loss=0.07096, ctc_loss=0.1303, over 19860.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.2954, pruned_loss=0.06796, ctc_loss=0.128, over 3774351.17 frames. ], batch size: 55, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:18:32,973 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 17:18:40,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=123536.0, ans=6.0 +2024-08-25 17:18:40,901 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.89 vs. limit=6.0 +2024-08-25 17:19:01,857 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.16 vs. limit=22.5 +2024-08-25 17:19:24,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=123696.0, ans=0.125 +2024-08-25 17:19:25,045 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.64 vs. limit=15.0 +2024-08-25 17:19:32,718 INFO [train.py:1114] (0/4) Epoch 10, batch 800, loss[loss=0.2283, simple_loss=0.2788, pruned_loss=0.06537, ctc_loss=0.1176, over 19816.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.2958, pruned_loss=0.06824, ctc_loss=0.1285, over 3795624.09 frames. ], batch size: 49, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:20:04,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=123802.66666666667, ans=0.125 +2024-08-25 17:20:04,502 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.71 vs. limit=10.0 +2024-08-25 17:20:13,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=123856.0, ans=0.1 +2024-08-25 17:20:24,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=123909.33333333333, ans=0.0 +2024-08-25 17:20:24,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=123909.33333333333, ans=0.125 +2024-08-25 17:20:26,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=123909.33333333333, ans=10.0 +2024-08-25 17:20:33,018 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.431e+02 1.887e+02 2.136e+02 2.736e+02 3.984e+02, threshold=4.273e+02, percent-clipped=0.0 +2024-08-25 17:20:47,944 INFO [train.py:1114] (0/4) Epoch 10, batch 850, loss[loss=0.2385, simple_loss=0.2973, pruned_loss=0.06431, ctc_loss=0.1277, over 19633.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.2955, pruned_loss=0.06821, ctc_loss=0.1284, over 3815807.51 frames. ], batch size: 59, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:20:59,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=124069.33333333333, ans=0.125 +2024-08-25 17:21:11,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=124122.66666666667, ans=0.125 +2024-08-25 17:21:13,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=124122.66666666667, ans=0.125 +2024-08-25 17:22:20,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=124229.33333333333, ans=0.0 +2024-08-25 17:22:28,570 INFO [train.py:1114] (0/4) Epoch 10, batch 900, loss[loss=0.2556, simple_loss=0.2873, pruned_loss=0.08098, ctc_loss=0.155, over 19398.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.2955, pruned_loss=0.06868, ctc_loss=0.1292, over 3820191.06 frames. ], batch size: 48, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:22:36,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=124282.66666666667, ans=0.1 +2024-08-25 17:22:36,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_na.min_abs, batch_count=124282.66666666667, ans=0.02 +2024-08-25 17:23:12,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=124442.66666666667, ans=0.0 +2024-08-25 17:23:13,390 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.54 vs. limit=10.0 +2024-08-25 17:23:13,941 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 1.854e+02 2.167e+02 2.763e+02 5.395e+02, threshold=4.333e+02, percent-clipped=2.0 +2024-08-25 17:23:30,305 INFO [train.py:1114] (0/4) Epoch 10, batch 950, loss[loss=0.2249, simple_loss=0.2835, pruned_loss=0.05984, ctc_loss=0.1166, over 19493.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.2962, pruned_loss=0.06907, ctc_loss=0.13, over 3821394.20 frames. ], batch size: 49, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:23:41,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=124602.66666666667, ans=0.125 +2024-08-25 17:23:50,675 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.30 vs. limit=22.5 +2024-08-25 17:24:24,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124762.66666666667, ans=0.1 +2024-08-25 17:24:34,471 INFO [train.py:1114] (0/4) Epoch 10, batch 1000, loss[loss=0.2575, simple_loss=0.306, pruned_loss=0.07521, ctc_loss=0.1461, over 19839.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.2965, pruned_loss=0.06926, ctc_loss=0.1301, over 3818105.96 frames. ], batch size: 52, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:24:43,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=124816.0, ans=0.0 +2024-08-25 17:24:50,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=124869.33333333333, ans=0.125 +2024-08-25 17:25:10,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=124976.0, ans=0.125 +2024-08-25 17:25:11,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=124976.0, ans=0.125 +2024-08-25 17:25:18,036 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.505e+02 1.797e+02 2.069e+02 2.553e+02 4.130e+02, threshold=4.138e+02, percent-clipped=0.0 +2024-08-25 17:25:25,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=125029.33333333333, ans=0.125 +2024-08-25 17:25:33,282 INFO [train.py:1114] (0/4) Epoch 10, batch 1050, loss[loss=0.2865, simple_loss=0.3298, pruned_loss=0.08846, ctc_loss=0.1655, over 19856.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.2963, pruned_loss=0.06914, ctc_loss=0.1301, over 3822872.28 frames. ], batch size: 57, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:25:34,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=125082.66666666667, ans=0.025 +2024-08-25 17:26:00,305 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 17:26:13,146 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.66 vs. limit=15.0 +2024-08-25 17:26:20,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=125296.0, ans=0.125 +2024-08-25 17:26:21,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=125296.0, ans=0.1 +2024-08-25 17:26:25,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=125296.0, ans=0.125 +2024-08-25 17:26:32,056 INFO [train.py:1114] (0/4) Epoch 10, batch 1100, loss[loss=0.2563, simple_loss=0.3004, pruned_loss=0.07717, ctc_loss=0.1445, over 19586.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.2956, pruned_loss=0.06839, ctc_loss=0.1287, over 3830875.15 frames. ], batch size: 52, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:26:32,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=125349.33333333333, ans=0.025 +2024-08-25 17:26:58,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=125456.0, ans=0.125 +2024-08-25 17:27:08,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=125456.0, ans=0.0 +2024-08-25 17:27:18,165 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.422e+02 1.787e+02 2.060e+02 2.560e+02 4.808e+02, threshold=4.120e+02, percent-clipped=1.0 +2024-08-25 17:27:33,334 INFO [train.py:1114] (0/4) Epoch 10, batch 1150, loss[loss=0.2148, simple_loss=0.2764, pruned_loss=0.05445, ctc_loss=0.1104, over 19602.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.2954, pruned_loss=0.06826, ctc_loss=0.1284, over 3828605.14 frames. ], batch size: 52, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:27:40,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=125616.0, ans=0.125 +2024-08-25 17:27:40,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.30 vs. limit=10.0 +2024-08-25 17:28:10,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=125722.66666666667, ans=0.025 +2024-08-25 17:28:25,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=125776.0, ans=0.0 +2024-08-25 17:28:29,069 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.65 vs. limit=15.0 +2024-08-25 17:28:31,523 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.89 vs. limit=15.0 +2024-08-25 17:28:44,410 INFO [train.py:1114] (0/4) Epoch 10, batch 1200, loss[loss=0.2472, simple_loss=0.3056, pruned_loss=0.06717, ctc_loss=0.136, over 19844.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.2958, pruned_loss=0.06827, ctc_loss=0.1286, over 3823900.47 frames. ], batch size: 57, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:28:56,348 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.63 vs. limit=15.0 +2024-08-25 17:29:04,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=125936.0, ans=0.2 +2024-08-25 17:29:30,100 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.522e+02 1.823e+02 2.047e+02 2.358e+02 4.051e+02, threshold=4.094e+02, percent-clipped=0.0 +2024-08-25 17:29:32,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=126096.0, ans=0.125 +2024-08-25 17:29:33,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=126096.0, ans=0.125 +2024-08-25 17:29:45,835 INFO [train.py:1114] (0/4) Epoch 10, batch 1250, loss[loss=0.2655, simple_loss=0.3108, pruned_loss=0.07998, ctc_loss=0.1506, over 19528.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.2965, pruned_loss=0.06859, ctc_loss=0.1288, over 3842182.81 frames. ], batch size: 61, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:30:57,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=126362.66666666667, ans=0.125 +2024-08-25 17:30:59,775 INFO [train.py:1114] (0/4) Epoch 10, batch 1300, loss[loss=0.2759, simple_loss=0.3174, pruned_loss=0.08596, ctc_loss=0.1563, over 18915.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2957, pruned_loss=0.06803, ctc_loss=0.1279, over 3846635.47 frames. ], batch size: 76, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:31:45,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=126469.33333333333, ans=0.125 +2024-08-25 17:32:13,113 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.515e+02 1.900e+02 2.303e+02 2.970e+02 5.096e+02, threshold=4.606e+02, percent-clipped=7.0 +2024-08-25 17:32:19,441 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.47 vs. limit=15.0 +2024-08-25 17:32:20,403 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.29 vs. limit=22.5 +2024-08-25 17:32:28,194 INFO [train.py:1114] (0/4) Epoch 10, batch 1350, loss[loss=0.2241, simple_loss=0.285, pruned_loss=0.05938, ctc_loss=0.111, over 19767.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.2953, pruned_loss=0.0676, ctc_loss=0.1269, over 3858173.24 frames. ], batch size: 54, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:32:53,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=126789.33333333333, ans=0.09899494936611666 +2024-08-25 17:33:01,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=126842.66666666667, ans=0.07 +2024-08-25 17:33:18,407 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.73 vs. limit=6.0 +2024-08-25 17:33:30,485 INFO [train.py:1114] (0/4) Epoch 10, batch 1400, loss[loss=0.2204, simple_loss=0.2691, pruned_loss=0.06153, ctc_loss=0.1218, over 19682.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.2946, pruned_loss=0.06727, ctc_loss=0.1261, over 3865225.95 frames. ], batch size: 46, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:33:38,185 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.33 vs. limit=15.0 +2024-08-25 17:33:44,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127002.66666666667, ans=0.1 +2024-08-25 17:33:49,710 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.63 vs. limit=12.0 +2024-08-25 17:33:57,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=127056.0, ans=0.125 +2024-08-25 17:34:42,450 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.525e+02 1.856e+02 2.167e+02 2.631e+02 4.500e+02, threshold=4.335e+02, percent-clipped=0.0 +2024-08-25 17:35:02,138 INFO [train.py:1114] (0/4) Epoch 10, batch 1450, loss[loss=0.2367, simple_loss=0.3081, pruned_loss=0.06006, ctc_loss=0.113, over 19636.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.2955, pruned_loss=0.06753, ctc_loss=0.1266, over 3863599.03 frames. ], batch size: 63, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:35:06,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=127216.0, ans=0.125 +2024-08-25 17:35:12,751 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.62 vs. limit=15.0 +2024-08-25 17:35:22,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=127269.33333333333, ans=0.0 +2024-08-25 17:35:57,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=127429.33333333333, ans=0.125 +2024-08-25 17:36:02,114 INFO [train.py:1114] (0/4) Epoch 10, batch 1500, loss[loss=0.2869, simple_loss=0.3256, pruned_loss=0.08966, ctc_loss=0.1721, over 19587.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.2966, pruned_loss=0.06837, ctc_loss=0.1284, over 3862952.85 frames. ], batch size: 57, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:36:05,112 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.17 vs. limit=12.0 +2024-08-25 17:36:09,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=127482.66666666667, ans=0.025 +2024-08-25 17:36:43,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=127589.33333333333, ans=0.0 +2024-08-25 17:36:50,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=127642.66666666667, ans=0.125 +2024-08-25 17:36:55,685 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.462e+02 1.877e+02 2.186e+02 2.626e+02 4.478e+02, threshold=4.372e+02, percent-clipped=1.0 +2024-08-25 17:37:23,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=127749.33333333333, ans=0.125 +2024-08-25 17:37:24,243 INFO [train.py:1114] (0/4) Epoch 10, batch 1550, loss[loss=0.2225, simple_loss=0.2841, pruned_loss=0.05873, ctc_loss=0.1084, over 19610.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.2964, pruned_loss=0.06857, ctc_loss=0.1288, over 3847527.24 frames. ], batch size: 60, lr: 1.48e-02, grad_scale: 16.0 +2024-08-25 17:37:31,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=127749.33333333333, ans=0.125 +2024-08-25 17:37:40,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=127802.66666666667, ans=0.125 +2024-08-25 17:39:46,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=127962.66666666667, ans=0.0 +2024-08-25 17:39:52,372 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/checkpoint-24000.pt +2024-08-25 17:41:06,795 INFO [train.py:1114] (0/4) Epoch 10, batch 1600, loss[loss=0.2342, simple_loss=0.2929, pruned_loss=0.06362, ctc_loss=0.1204, over 19839.00 frames. ], tot_loss[loss=0.243, simple_loss=0.2965, pruned_loss=0.06879, ctc_loss=0.1295, over 3836742.72 frames. ], batch size: 57, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:42:06,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=128016.0, ans=0.0 +2024-08-25 17:42:35,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=128016.0, ans=0.0 +2024-08-25 17:42:42,620 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.79 vs. limit=22.5 +2024-08-25 17:42:44,833 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.82 vs. limit=22.5 +2024-08-25 17:42:45,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=128069.33333333333, ans=0.04949747468305833 +2024-08-25 17:43:02,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=128176.0, ans=0.125 +2024-08-25 17:43:14,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=128176.0, ans=0.125 +2024-08-25 17:43:24,235 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.528e+02 1.849e+02 2.080e+02 2.733e+02 5.175e+02, threshold=4.161e+02, percent-clipped=4.0 +2024-08-25 17:43:52,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=128229.33333333333, ans=0.125 +2024-08-25 17:43:54,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=128229.33333333333, ans=0.125 +2024-08-25 17:44:00,791 INFO [train.py:1114] (0/4) Epoch 10, batch 1650, loss[loss=0.2511, simple_loss=0.3065, pruned_loss=0.0702, ctc_loss=0.1381, over 19641.00 frames. ], tot_loss[loss=0.242, simple_loss=0.2959, pruned_loss=0.06834, ctc_loss=0.1287, over 3834161.07 frames. ], batch size: 59, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:44:34,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=128336.0, ans=0.09899494936611666 +2024-08-25 17:45:16,124 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.63 vs. limit=15.0 +2024-08-25 17:45:19,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=128496.0, ans=0.125 +2024-08-25 17:45:46,338 INFO [train.py:1114] (0/4) Epoch 10, batch 1700, loss[loss=0.2366, simple_loss=0.2794, pruned_loss=0.06986, ctc_loss=0.1354, over 19696.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.2952, pruned_loss=0.06751, ctc_loss=0.1269, over 3847940.54 frames. ], batch size: 46, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:46:32,391 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=1.233e-01 +2024-08-25 17:46:34,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=128602.66666666667, ans=0.2 +2024-08-25 17:46:36,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=128602.66666666667, ans=0.125 +2024-08-25 17:46:50,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=128656.0, ans=0.125 +2024-08-25 17:47:10,886 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.94 vs. limit=15.0 +2024-08-25 17:47:11,344 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.443e+02 1.773e+02 2.059e+02 2.527e+02 4.467e+02, threshold=4.119e+02, percent-clipped=1.0 +2024-08-25 17:48:08,448 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.75 vs. limit=6.0 +2024-08-25 17:48:12,453 INFO [train.py:1114] (0/4) Epoch 10, batch 1750, loss[loss=0.238, simple_loss=0.2779, pruned_loss=0.07156, ctc_loss=0.1376, over 19649.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.2947, pruned_loss=0.06745, ctc_loss=0.1271, over 3852797.68 frames. ], batch size: 45, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:48:20,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=128816.0, ans=0.125 +2024-08-25 17:48:25,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=128869.33333333333, ans=0.125 +2024-08-25 17:48:39,200 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.25 vs. limit=10.0 +2024-08-25 17:48:45,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=128922.66666666667, ans=0.125 +2024-08-25 17:49:11,939 INFO [train.py:1114] (0/4) Epoch 10, batch 1800, loss[loss=0.2234, simple_loss=0.2901, pruned_loss=0.05637, ctc_loss=0.1099, over 19616.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.2951, pruned_loss=0.0677, ctc_loss=0.1276, over 3854022.31 frames. ], batch size: 55, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 17:50:12,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=129136.0, ans=0.95 +2024-08-25 18:00:10,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=129189.33333333333, ans=0.0 +2024-08-25 18:06:34,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129242.66666666667, ans=0.1 +2024-08-25 18:08:39,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=129242.66666666667, ans=0.0 +2024-08-25 18:11:17,736 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.471e+02 1.930e+02 2.270e+02 3.115e+02 5.695e+02, threshold=4.540e+02, percent-clipped=10.0 +2024-08-25 18:13:41,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=129296.0, ans=0.125 +2024-08-25 18:17:46,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=129296.0, ans=0.125 +2024-08-25 18:19:59,315 INFO [train.py:1114] (0/4) Epoch 10, batch 1850, loss[loss=0.2533, simple_loss=0.3126, pruned_loss=0.07038, ctc_loss=0.1329, over 19577.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.2948, pruned_loss=0.0675, ctc_loss=0.127, over 3858493.05 frames. ], batch size: 57, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:26:29,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=129456.0, ans=0.125 +2024-08-25 18:28:59,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=129509.33333333333, ans=0.125 +2024-08-25 18:29:10,893 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 18:29:51,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=129509.33333333333, ans=0.025 +2024-08-25 18:32:26,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=129562.66666666667, ans=0.2 +2024-08-25 18:32:37,431 INFO [train.py:1114] (0/4) Epoch 10, batch 1900, loss[loss=0.2149, simple_loss=0.2877, pruned_loss=0.05128, ctc_loss=0.09868, over 19653.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.2959, pruned_loss=0.06815, ctc_loss=0.1279, over 3864230.46 frames. ], batch size: 59, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:33:08,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=129616.0, ans=0.125 +2024-08-25 18:35:38,493 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.39 vs. limit=15.0 +2024-08-25 18:35:38,800 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.55 vs. limit=15.0 +2024-08-25 18:36:33,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=129722.66666666667, ans=0.125 +2024-08-25 18:37:43,362 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.539e+02 1.882e+02 2.156e+02 2.772e+02 4.689e+02, threshold=4.313e+02, percent-clipped=1.0 +2024-08-25 18:37:43,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=129776.0, ans=0.125 +2024-08-25 18:38:09,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=129829.33333333333, ans=0.0 +2024-08-25 18:38:51,199 INFO [train.py:1114] (0/4) Epoch 10, batch 1950, loss[loss=0.2462, simple_loss=0.2931, pruned_loss=0.07125, ctc_loss=0.1419, over 19591.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.2964, pruned_loss=0.06802, ctc_loss=0.1277, over 3872510.94 frames. ], batch size: 52, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:38:51,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=129882.66666666667, ans=0.125 +2024-08-25 18:39:44,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=129882.66666666667, ans=0.0 +2024-08-25 18:40:13,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=129936.0, ans=0.125 +2024-08-25 18:40:23,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=129936.0, ans=0.125 +2024-08-25 18:42:07,650 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.05 vs. limit=22.5 +2024-08-25 18:43:02,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=130096.0, ans=0.025 +2024-08-25 18:44:04,313 INFO [train.py:1114] (0/4) Epoch 10, batch 2000, loss[loss=0.2399, simple_loss=0.2808, pruned_loss=0.07218, ctc_loss=0.1366, over 19679.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.2968, pruned_loss=0.06816, ctc_loss=0.1281, over 3857015.78 frames. ], batch size: 45, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:44:04,438 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 18:44:10,606 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.93 vs. limit=22.5 +2024-08-25 18:44:40,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=130202.66666666667, ans=0.125 +2024-08-25 18:45:35,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=130256.0, ans=0.2 +2024-08-25 18:47:32,406 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.525e+02 1.882e+02 2.262e+02 2.707e+02 4.864e+02, threshold=4.523e+02, percent-clipped=1.0 +2024-08-25 18:48:39,774 INFO [train.py:1114] (0/4) Epoch 10, batch 2050, loss[loss=0.2176, simple_loss=0.2625, pruned_loss=0.06243, ctc_loss=0.1197, over 19736.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.2952, pruned_loss=0.06745, ctc_loss=0.1271, over 3851578.47 frames. ], batch size: 47, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:49:07,435 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.75 vs. limit=15.0 +2024-08-25 18:50:09,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=130469.33333333333, ans=0.125 +2024-08-25 18:50:09,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130469.33333333333, ans=0.1 +2024-08-25 18:51:50,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=130629.33333333333, ans=0.125 +2024-08-25 18:52:20,501 INFO [train.py:1114] (0/4) Epoch 10, batch 2100, loss[loss=0.2321, simple_loss=0.2872, pruned_loss=0.06427, ctc_loss=0.121, over 19749.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.2957, pruned_loss=0.06814, ctc_loss=0.128, over 3858479.51 frames. ], batch size: 54, lr: 1.47e-02, grad_scale: 16.0 +2024-08-25 18:52:23,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=130682.66666666667, ans=0.125 +2024-08-25 18:52:25,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=130682.66666666667, ans=0.0 +2024-08-25 18:52:40,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=130736.0, ans=0.0 +2024-08-25 18:53:34,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=130789.33333333333, ans=0.125 +2024-08-25 18:53:50,011 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.03 vs. limit=15.0 +2024-08-25 18:53:53,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=130842.66666666667, ans=0.0 +2024-08-25 18:53:58,216 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.502e+02 1.839e+02 2.296e+02 2.721e+02 6.154e+02, threshold=4.593e+02, percent-clipped=3.0 +2024-08-25 18:54:37,552 INFO [train.py:1114] (0/4) Epoch 10, batch 2150, loss[loss=0.2206, simple_loss=0.2792, pruned_loss=0.05918, ctc_loss=0.109, over 19579.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.2946, pruned_loss=0.06747, ctc_loss=0.1265, over 3869417.32 frames. ], batch size: 52, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 18:54:57,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=131002.66666666667, ans=0.125 +2024-08-25 18:55:03,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131002.66666666667, ans=0.1 +2024-08-25 18:55:37,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=131056.0, ans=0.05 +2024-08-25 18:56:07,027 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.06 vs. limit=15.0 +2024-08-25 18:56:19,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131162.66666666666, ans=0.1 +2024-08-25 18:56:33,016 INFO [train.py:1114] (0/4) Epoch 10, batch 2200, loss[loss=0.2342, simple_loss=0.3019, pruned_loss=0.06098, ctc_loss=0.1115, over 19596.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.2943, pruned_loss=0.06713, ctc_loss=0.1259, over 3867611.73 frames. ], batch size: 57, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 18:56:44,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=131216.0, ans=0.125 +2024-08-25 18:57:10,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=131269.33333333334, ans=0.2 +2024-08-25 18:57:38,763 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.20 vs. limit=15.0 +2024-08-25 18:57:50,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=131376.0, ans=0.2 +2024-08-25 18:57:51,462 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.449e+02 1.773e+02 2.006e+02 2.540e+02 3.937e+02, threshold=4.013e+02, percent-clipped=0.0 +2024-08-25 18:58:07,774 INFO [train.py:1114] (0/4) Epoch 10, batch 2250, loss[loss=0.256, simple_loss=0.3059, pruned_loss=0.0758, ctc_loss=0.1362, over 19624.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.2946, pruned_loss=0.06733, ctc_loss=0.1261, over 3867698.01 frames. ], batch size: 55, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 18:58:14,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=131482.66666666666, ans=0.125 +2024-08-25 18:58:24,150 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.34 vs. limit=12.0 +2024-08-25 18:58:29,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131536.0, ans=0.1 +2024-08-25 18:58:33,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=131589.33333333334, ans=0.2 +2024-08-25 18:58:50,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.24 vs. limit=15.0 +2024-08-25 18:58:51,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=131642.66666666666, ans=0.09899494936611666 +2024-08-25 18:59:05,063 INFO [train.py:1114] (0/4) Epoch 10, batch 2300, loss[loss=0.2089, simple_loss=0.2714, pruned_loss=0.05281, ctc_loss=0.1019, over 19495.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.2935, pruned_loss=0.06699, ctc_loss=0.1256, over 3861259.43 frames. ], batch size: 49, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 18:59:06,736 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.94 vs. limit=15.0 +2024-08-25 18:59:22,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=131802.66666666666, ans=0.0 +2024-08-25 18:59:30,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=131856.0, ans=0.125 +2024-08-25 18:59:52,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=131909.33333333334, ans=0.0 +2024-08-25 18:59:54,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=131909.33333333334, ans=0.1 +2024-08-25 19:00:00,743 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 1.815e+02 2.310e+02 2.961e+02 4.661e+02, threshold=4.621e+02, percent-clipped=5.0 +2024-08-25 19:00:07,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=131962.66666666666, ans=0.2 +2024-08-25 19:00:14,686 INFO [train.py:1114] (0/4) Epoch 10, batch 2350, loss[loss=0.2514, simple_loss=0.3124, pruned_loss=0.07053, ctc_loss=0.1233, over 19695.00 frames. ], tot_loss[loss=0.2393, simple_loss=0.2938, pruned_loss=0.0672, ctc_loss=0.126, over 3864159.33 frames. ], batch size: 63, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 19:00:21,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=132016.0, ans=0.0 +2024-08-25 19:00:32,597 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.12 vs. limit=15.0 +2024-08-25 19:00:53,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=132176.0, ans=0.125 +2024-08-25 19:00:56,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=132176.0, ans=0.125 +2024-08-25 19:01:03,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=132229.33333333334, ans=0.125 +2024-08-25 19:01:13,185 INFO [train.py:1114] (0/4) Epoch 10, batch 2400, loss[loss=0.247, simple_loss=0.2994, pruned_loss=0.07074, ctc_loss=0.1327, over 19387.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.2961, pruned_loss=0.06838, ctc_loss=0.1283, over 3858298.54 frames. ], batch size: 67, lr: 1.46e-02, grad_scale: 32.0 +2024-08-25 19:01:24,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=132336.0, ans=0.125 +2024-08-25 19:01:29,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=132336.0, ans=0.025 +2024-08-25 19:01:32,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=132336.0, ans=0.125 +2024-08-25 19:01:35,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=132336.0, ans=0.0 +2024-08-25 19:01:58,227 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=8.03 vs. limit=12.0 +2024-08-25 19:02:10,724 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.475e+02 1.986e+02 2.279e+02 2.618e+02 8.799e+02, threshold=4.558e+02, percent-clipped=0.0 +2024-08-25 19:02:21,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=132549.33333333334, ans=0.125 +2024-08-25 19:02:22,031 INFO [train.py:1114] (0/4) Epoch 10, batch 2450, loss[loss=0.3213, simple_loss=0.3418, pruned_loss=0.11, ctc_loss=0.2022, over 14117.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3008, pruned_loss=0.07194, ctc_loss=0.1354, over 3732667.83 frames. ], batch size: 140, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 19:03:09,889 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-10.pt +2024-08-25 19:04:28,611 INFO [train.py:1114] (0/4) Epoch 11, batch 0, loss[loss=0.2676, simple_loss=0.305, pruned_loss=0.08401, ctc_loss=0.1556, over 19429.00 frames. ], tot_loss[loss=0.2676, simple_loss=0.305, pruned_loss=0.08401, ctc_loss=0.1556, over 19429.00 frames. ], batch size: 48, lr: 1.39e-02, grad_scale: 32.0 +2024-08-25 19:04:28,612 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-25 19:04:55,863 INFO [train.py:1146] (0/4) Epoch 11, validation: loss=0.2031, simple_loss=0.2887, pruned_loss=0.04339, ctc_loss=0.0768, over 944034.00 frames. +2024-08-25 19:04:55,863 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 14058MB +2024-08-25 19:04:58,730 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.07 vs. limit=15.0 +2024-08-25 19:05:00,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=132757.33333333334, ans=0.125 +2024-08-25 19:05:26,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=132864.0, ans=0.0 +2024-08-25 19:06:02,239 INFO [train.py:1114] (0/4) Epoch 11, batch 50, loss[loss=0.2049, simple_loss=0.2636, pruned_loss=0.05351, ctc_loss=0.09771, over 19703.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3005, pruned_loss=0.07083, ctc_loss=0.1351, over 844145.34 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 32.0 +2024-08-25 19:06:03,359 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.465e+02 2.050e+02 2.234e+02 2.552e+02 4.359e+02, threshold=4.468e+02, percent-clipped=1.0 +2024-08-25 19:06:14,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=133024.0, ans=0.025 +2024-08-25 19:06:34,618 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.03 vs. limit=6.0 +2024-08-25 19:06:38,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=133130.66666666666, ans=0.95 +2024-08-25 19:06:51,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=133184.0, ans=0.1 +2024-08-25 19:07:25,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=133184.0, ans=0.0 +2024-08-25 19:07:42,547 INFO [train.py:1114] (0/4) Epoch 11, batch 100, loss[loss=0.2239, simple_loss=0.2842, pruned_loss=0.05928, ctc_loss=0.1126, over 19719.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.2998, pruned_loss=0.06887, ctc_loss=0.1306, over 1498581.10 frames. ], batch size: 51, lr: 1.39e-02, grad_scale: 32.0 +2024-08-25 19:07:54,845 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:07:59,428 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.59 vs. limit=15.0 +2024-08-25 19:08:20,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=133450.66666666666, ans=0.2 +2024-08-25 19:08:23,313 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.38 vs. limit=10.0 +2024-08-25 19:08:23,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=133450.66666666666, ans=0.125 +2024-08-25 19:08:26,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=133450.66666666666, ans=0.04949747468305833 +2024-08-25 19:08:45,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=133504.0, ans=0.0 +2024-08-25 19:08:59,332 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.36 vs. limit=12.0 +2024-08-25 19:09:10,084 INFO [train.py:1114] (0/4) Epoch 11, batch 150, loss[loss=0.2312, simple_loss=0.2738, pruned_loss=0.06807, ctc_loss=0.1311, over 19738.00 frames. ], tot_loss[loss=0.2396, simple_loss=0.2956, pruned_loss=0.0666, ctc_loss=0.1259, over 2028235.97 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 32.0 +2024-08-25 19:09:12,929 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.747e+02 2.015e+02 2.344e+02 3.708e+02, threshold=4.031e+02, percent-clipped=0.0 +2024-08-25 19:09:59,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=133717.33333333334, ans=0.2 +2024-08-25 19:10:24,024 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.17 vs. limit=15.0 +2024-08-25 19:10:34,574 INFO [train.py:1114] (0/4) Epoch 11, batch 200, loss[loss=0.2805, simple_loss=0.3263, pruned_loss=0.08527, ctc_loss=0.1606, over 18128.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.2935, pruned_loss=0.06534, ctc_loss=0.1233, over 2435787.07 frames. ], batch size: 85, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:11:34,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=133930.66666666666, ans=0.125 +2024-08-25 19:11:54,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=134037.33333333334, ans=0.2 +2024-08-25 19:11:59,247 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.35 vs. limit=15.0 +2024-08-25 19:12:01,076 INFO [train.py:1114] (0/4) Epoch 11, batch 250, loss[loss=0.2648, simple_loss=0.311, pruned_loss=0.07982, ctc_loss=0.1474, over 19397.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.293, pruned_loss=0.06547, ctc_loss=0.1235, over 2755511.01 frames. ], batch size: 67, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:12:02,131 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.716e+02 2.023e+02 2.469e+02 5.021e+02, threshold=4.046e+02, percent-clipped=3.0 +2024-08-25 19:12:14,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=134144.0, ans=0.1 +2024-08-25 19:12:18,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=134144.0, ans=0.025 +2024-08-25 19:12:19,616 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.95 vs. limit=15.0 +2024-08-25 19:12:21,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=134144.0, ans=0.2 +2024-08-25 19:12:32,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=134197.33333333334, ans=0.0 +2024-08-25 19:12:58,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=134304.0, ans=0.125 +2024-08-25 19:13:03,621 INFO [train.py:1114] (0/4) Epoch 11, batch 300, loss[loss=0.2855, simple_loss=0.323, pruned_loss=0.09191, ctc_loss=0.1605, over 19497.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.2921, pruned_loss=0.06496, ctc_loss=0.1226, over 2999613.45 frames. ], batch size: 61, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:13:06,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=134357.33333333334, ans=0.0 +2024-08-25 19:13:12,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=134357.33333333334, ans=0.0 +2024-08-25 19:13:39,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=134464.0, ans=0.1 +2024-08-25 19:13:57,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=134570.66666666666, ans=0.125 +2024-08-25 19:14:07,053 INFO [train.py:1114] (0/4) Epoch 11, batch 350, loss[loss=0.219, simple_loss=0.2709, pruned_loss=0.06068, ctc_loss=0.1143, over 19775.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.2923, pruned_loss=0.06496, ctc_loss=0.1222, over 3190014.24 frames. ], batch size: 48, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:14:08,106 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.422e+02 1.838e+02 2.258e+02 2.898e+02 4.827e+02, threshold=4.516e+02, percent-clipped=2.0 +2024-08-25 19:14:08,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=134624.0, ans=0.125 +2024-08-25 19:14:48,428 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.87 vs. limit=22.5 +2024-08-25 19:14:57,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=134730.66666666666, ans=0.125 +2024-08-25 19:15:05,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=134730.66666666666, ans=0.125 +2024-08-25 19:15:14,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=134784.0, ans=0.1 +2024-08-25 19:15:49,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=134837.33333333334, ans=0.0 +2024-08-25 19:15:49,704 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.58 vs. limit=10.0 +2024-08-25 19:15:57,894 INFO [train.py:1114] (0/4) Epoch 11, batch 400, loss[loss=0.2365, simple_loss=0.3, pruned_loss=0.06358, ctc_loss=0.1145, over 19835.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.2923, pruned_loss=0.0649, ctc_loss=0.1221, over 3342096.11 frames. ], batch size: 55, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:15:59,598 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.53 vs. limit=15.0 +2024-08-25 19:16:15,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=134944.0, ans=0.0 +2024-08-25 19:16:30,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=134944.0, ans=0.035 +2024-08-25 19:16:31,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=134944.0, ans=0.09899494936611666 +2024-08-25 19:16:38,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=134997.33333333334, ans=0.125 +2024-08-25 19:17:13,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=135104.0, ans=0.05 +2024-08-25 19:17:22,222 INFO [train.py:1114] (0/4) Epoch 11, batch 450, loss[loss=0.2196, simple_loss=0.2866, pruned_loss=0.05533, ctc_loss=0.1048, over 19612.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.2922, pruned_loss=0.06509, ctc_loss=0.1223, over 3450879.35 frames. ], batch size: 55, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:17:31,726 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.328e+02 1.841e+02 2.102e+02 2.681e+02 4.407e+02, threshold=4.204e+02, percent-clipped=0.0 +2024-08-25 19:17:41,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=135210.66666666666, ans=0.0 +2024-08-25 19:17:49,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=135210.66666666666, ans=0.125 +2024-08-25 19:18:31,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=135370.66666666666, ans=0.125 +2024-08-25 19:18:34,775 INFO [train.py:1114] (0/4) Epoch 11, batch 500, loss[loss=0.2582, simple_loss=0.3072, pruned_loss=0.07608, ctc_loss=0.1427, over 19700.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.2916, pruned_loss=0.06514, ctc_loss=0.1225, over 3546311.09 frames. ], batch size: 63, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:18:54,494 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.51 vs. limit=15.0 +2024-08-25 19:19:42,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=135584.0, ans=0.1 +2024-08-25 19:20:13,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=135637.33333333334, ans=0.2 +2024-08-25 19:20:13,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=135637.33333333334, ans=0.125 +2024-08-25 19:20:17,206 INFO [train.py:1114] (0/4) Epoch 11, batch 550, loss[loss=0.217, simple_loss=0.2835, pruned_loss=0.05417, ctc_loss=0.1053, over 19187.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.2918, pruned_loss=0.0653, ctc_loss=0.1228, over 3609092.84 frames. ], batch size: 71, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:20:18,391 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.344e+02 1.822e+02 2.069e+02 2.386e+02 4.149e+02, threshold=4.137e+02, percent-clipped=0.0 +2024-08-25 19:20:21,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=135690.66666666666, ans=0.0 +2024-08-25 19:20:22,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=135690.66666666666, ans=0.125 +2024-08-25 19:20:42,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=135744.0, ans=0.125 +2024-08-25 19:20:49,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=135797.33333333334, ans=0.125 +2024-08-25 19:21:08,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=135850.66666666666, ans=0.125 +2024-08-25 19:21:22,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=135904.0, ans=0.0 +2024-08-25 19:21:24,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=135904.0, ans=0.0 +2024-08-25 19:21:30,824 INFO [train.py:1114] (0/4) Epoch 11, batch 600, loss[loss=0.2449, simple_loss=0.3064, pruned_loss=0.06652, ctc_loss=0.1261, over 19392.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.2918, pruned_loss=0.06505, ctc_loss=0.1224, over 3667260.93 frames. ], batch size: 67, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:21:43,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=135957.33333333334, ans=0.125 +2024-08-25 19:22:28,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=136010.66666666666, ans=0.07 +2024-08-25 19:22:34,706 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.74 vs. limit=12.0 +2024-08-25 19:22:48,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=136064.0, ans=0.025 +2024-08-25 19:23:41,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=136170.66666666666, ans=0.125 +2024-08-25 19:23:54,558 INFO [train.py:1114] (0/4) Epoch 11, batch 650, loss[loss=0.2156, simple_loss=0.2866, pruned_loss=0.05134, ctc_loss=0.1046, over 19769.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.291, pruned_loss=0.06468, ctc_loss=0.1218, over 3717034.93 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:23:55,634 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.531e+02 1.913e+02 2.094e+02 2.738e+02 4.984e+02, threshold=4.187e+02, percent-clipped=5.0 +2024-08-25 19:24:42,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=136330.66666666666, ans=0.2 +2024-08-25 19:24:43,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=136330.66666666666, ans=0.025 +2024-08-25 19:24:57,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=136384.0, ans=0.0 +2024-08-25 19:25:06,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=136437.33333333334, ans=0.125 +2024-08-25 19:25:12,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=136437.33333333334, ans=0.2 +2024-08-25 19:25:18,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=136490.66666666666, ans=0.025 +2024-08-25 19:25:34,150 INFO [train.py:1114] (0/4) Epoch 11, batch 700, loss[loss=0.225, simple_loss=0.2877, pruned_loss=0.05883, ctc_loss=0.1117, over 19722.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.2915, pruned_loss=0.06457, ctc_loss=0.1215, over 3748776.55 frames. ], batch size: 51, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:26:11,734 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.95 vs. limit=15.0 +2024-08-25 19:26:32,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=136544.0, ans=0.0 +2024-08-25 19:26:43,146 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.75 vs. limit=10.0 +2024-08-25 19:26:52,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=136597.33333333334, ans=0.07 +2024-08-25 19:27:12,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=136650.66666666666, ans=0.2 +2024-08-25 19:27:49,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=136704.0, ans=0.125 +2024-08-25 19:28:08,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=136757.33333333334, ans=0.2 +2024-08-25 19:28:09,974 INFO [train.py:1114] (0/4) Epoch 11, batch 750, loss[loss=0.2403, simple_loss=0.295, pruned_loss=0.06881, ctc_loss=0.1201, over 19492.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.2906, pruned_loss=0.0642, ctc_loss=0.1206, over 3775041.64 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:28:25,939 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.371e+02 1.821e+02 2.028e+02 2.720e+02 4.524e+02, threshold=4.057e+02, percent-clipped=2.0 +2024-08-25 19:28:49,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=136810.66666666666, ans=10.0 +2024-08-25 19:29:57,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=136917.33333333334, ans=0.125 +2024-08-25 19:30:12,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=136970.66666666666, ans=0.1 +2024-08-25 19:32:08,680 INFO [train.py:1114] (0/4) Epoch 11, batch 800, loss[loss=0.2108, simple_loss=0.2625, pruned_loss=0.0583, ctc_loss=0.1065, over 19826.00 frames. ], tot_loss[loss=0.234, simple_loss=0.291, pruned_loss=0.06435, ctc_loss=0.121, over 3797332.69 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:33:43,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=137237.33333333334, ans=0.0 +2024-08-25 19:33:47,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=137237.33333333334, ans=0.2 +2024-08-25 19:33:49,180 INFO [train.py:1114] (0/4) Epoch 11, batch 850, loss[loss=0.255, simple_loss=0.3096, pruned_loss=0.07322, ctc_loss=0.1348, over 19636.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.291, pruned_loss=0.06445, ctc_loss=0.1213, over 3815886.78 frames. ], batch size: 59, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:33:50,254 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.863e+02 2.065e+02 2.415e+02 4.305e+02, threshold=4.130e+02, percent-clipped=1.0 +2024-08-25 19:33:52,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=137290.66666666666, ans=0.0 +2024-08-25 19:33:55,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=137290.66666666666, ans=0.125 +2024-08-25 19:34:09,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=137344.0, ans=0.0 +2024-08-25 19:34:20,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=137397.33333333334, ans=0.0 +2024-08-25 19:34:40,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=137450.66666666666, ans=0.125 +2024-08-25 19:34:44,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=137450.66666666666, ans=0.035 +2024-08-25 19:34:47,638 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:35:05,261 INFO [train.py:1114] (0/4) Epoch 11, batch 900, loss[loss=0.2383, simple_loss=0.2797, pruned_loss=0.07069, ctc_loss=0.139, over 19416.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.2917, pruned_loss=0.06509, ctc_loss=0.1224, over 3820477.17 frames. ], batch size: 48, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:35:23,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=137610.66666666666, ans=0.0 +2024-08-25 19:35:46,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=137664.0, ans=0.0 +2024-08-25 19:36:16,777 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.36 vs. limit=15.0 +2024-08-25 19:36:19,821 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.36 vs. limit=15.0 +2024-08-25 19:36:50,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=137770.66666666666, ans=0.125 +2024-08-25 19:36:53,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=137770.66666666666, ans=0.07 +2024-08-25 19:37:18,561 INFO [train.py:1114] (0/4) Epoch 11, batch 950, loss[loss=0.2694, simple_loss=0.3036, pruned_loss=0.08605, ctc_loss=0.1579, over 19507.00 frames. ], tot_loss[loss=0.236, simple_loss=0.292, pruned_loss=0.06539, ctc_loss=0.1229, over 3820471.57 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:37:19,709 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.805e+02 2.081e+02 2.536e+02 4.211e+02, threshold=4.162e+02, percent-clipped=2.0 +2024-08-25 19:37:41,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=137877.33333333334, ans=0.125 +2024-08-25 19:37:41,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=137877.33333333334, ans=0.025 +2024-08-25 19:37:42,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137930.66666666666, ans=0.1 +2024-08-25 19:38:40,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=138090.66666666666, ans=0.125 +2024-08-25 19:38:48,986 INFO [train.py:1114] (0/4) Epoch 11, batch 1000, loss[loss=0.2112, simple_loss=0.2747, pruned_loss=0.05381, ctc_loss=0.1003, over 19840.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.2924, pruned_loss=0.06571, ctc_loss=0.1233, over 3815736.60 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:39:27,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=138197.33333333334, ans=0.025 +2024-08-25 19:39:40,676 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.82 vs. limit=15.0 +2024-08-25 19:39:50,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=138250.66666666666, ans=0.2 +2024-08-25 19:40:03,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=138304.0, ans=0.125 +2024-08-25 19:40:14,835 INFO [train.py:1114] (0/4) Epoch 11, batch 1050, loss[loss=0.2528, simple_loss=0.3092, pruned_loss=0.07007, ctc_loss=0.1405, over 19837.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.2918, pruned_loss=0.06554, ctc_loss=0.1231, over 3821539.10 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:40:16,861 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.394e+02 1.874e+02 2.329e+02 2.645e+02 4.211e+02, threshold=4.658e+02, percent-clipped=2.0 +2024-08-25 19:40:38,737 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.46 vs. limit=10.0 +2024-08-25 19:40:38,765 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.65 vs. limit=10.0 +2024-08-25 19:40:46,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=138464.0, ans=0.1 +2024-08-25 19:40:50,938 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.91 vs. limit=10.0 +2024-08-25 19:41:02,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138517.33333333334, ans=0.1 +2024-08-25 19:41:26,135 INFO [train.py:1114] (0/4) Epoch 11, batch 1100, loss[loss=0.22, simple_loss=0.2796, pruned_loss=0.05806, ctc_loss=0.111, over 19566.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.2915, pruned_loss=0.06509, ctc_loss=0.1221, over 3828733.98 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:41:26,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=138624.0, ans=0.125 +2024-08-25 19:41:27,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=138624.0, ans=0.125 +2024-08-25 19:41:55,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138624.0, ans=0.1 +2024-08-25 19:42:26,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138677.33333333334, ans=0.1 +2024-08-25 19:42:40,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=138677.33333333334, ans=0.1 +2024-08-25 19:43:03,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=138730.66666666666, ans=0.2 +2024-08-25 19:43:18,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=138784.0, ans=0.125 +2024-08-25 19:43:18,983 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.99 vs. limit=15.0 +2024-08-25 19:43:36,017 INFO [train.py:1114] (0/4) Epoch 11, batch 1150, loss[loss=0.2081, simple_loss=0.2754, pruned_loss=0.05111, ctc_loss=0.09626, over 19585.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.2916, pruned_loss=0.06532, ctc_loss=0.1224, over 3827621.95 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:43:37,198 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.425e+02 1.797e+02 2.039e+02 2.453e+02 4.580e+02, threshold=4.079e+02, percent-clipped=0.0 +2024-08-25 19:44:29,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=139104.0, ans=0.04949747468305833 +2024-08-25 19:44:39,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139104.0, ans=0.1 +2024-08-25 19:44:41,837 INFO [train.py:1114] (0/4) Epoch 11, batch 1200, loss[loss=0.2769, simple_loss=0.3301, pruned_loss=0.08174, ctc_loss=0.1507, over 19840.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.2928, pruned_loss=0.06575, ctc_loss=0.1235, over 3823615.73 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:44:54,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=139210.66666666666, ans=0.0 +2024-08-25 19:45:15,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=139264.0, ans=0.025 +2024-08-25 19:45:37,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=139264.0, ans=0.0 +2024-08-25 19:46:04,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139370.66666666666, ans=0.1 +2024-08-25 19:46:08,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=139370.66666666666, ans=0.125 +2024-08-25 19:46:15,640 INFO [train.py:1114] (0/4) Epoch 11, batch 1250, loss[loss=0.2163, simple_loss=0.2809, pruned_loss=0.05549, ctc_loss=0.1016, over 19520.00 frames. ], tot_loss[loss=0.236, simple_loss=0.2926, pruned_loss=0.06519, ctc_loss=0.1226, over 3842516.75 frames. ], batch size: 61, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:46:16,714 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.516e+02 1.769e+02 1.992e+02 2.545e+02 3.633e+02, threshold=3.984e+02, percent-clipped=0.0 +2024-08-25 19:46:17,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=139424.0, ans=0.05 +2024-08-25 19:46:17,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=139424.0, ans=0.125 +2024-08-25 19:47:40,574 INFO [train.py:1114] (0/4) Epoch 11, batch 1300, loss[loss=0.274, simple_loss=0.3172, pruned_loss=0.084, ctc_loss=0.157, over 18823.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.2922, pruned_loss=0.06514, ctc_loss=0.1225, over 3846394.09 frames. ], batch size: 76, lr: 1.36e-02, grad_scale: 16.0 +2024-08-25 19:47:53,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=139744.0, ans=0.025 +2024-08-25 19:47:53,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=139744.0, ans=0.2 +2024-08-25 19:48:21,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=139797.33333333334, ans=0.0 +2024-08-25 19:48:21,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=139797.33333333334, ans=0.125 +2024-08-25 19:48:31,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=139850.66666666666, ans=0.04949747468305833 +2024-08-25 19:48:56,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=139904.0, ans=0.125 +2024-08-25 19:48:59,253 INFO [train.py:1114] (0/4) Epoch 11, batch 1350, loss[loss=0.2473, simple_loss=0.3023, pruned_loss=0.0704, ctc_loss=0.1287, over 19769.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.2918, pruned_loss=0.06497, ctc_loss=0.1221, over 3857051.96 frames. ], batch size: 54, lr: 1.36e-02, grad_scale: 16.0 +2024-08-25 19:49:01,647 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.430e+02 1.851e+02 2.124e+02 2.742e+02 4.665e+02, threshold=4.248e+02, percent-clipped=3.0 +2024-08-25 19:49:29,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=140064.0, ans=0.125 +2024-08-25 19:49:34,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140064.0, ans=0.1 +2024-08-25 19:49:51,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=140170.66666666666, ans=0.125 +2024-08-25 19:50:00,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=140170.66666666666, ans=0.125 +2024-08-25 19:50:07,171 INFO [train.py:1114] (0/4) Epoch 11, batch 1400, loss[loss=0.2027, simple_loss=0.2618, pruned_loss=0.05297, ctc_loss=0.09407, over 19656.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.292, pruned_loss=0.06534, ctc_loss=0.1225, over 3863704.71 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 19:50:08,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=140224.0, ans=0.0 +2024-08-25 19:50:08,990 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.96 vs. limit=15.0 +2024-08-25 19:50:40,785 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.23 vs. limit=12.0 +2024-08-25 19:50:52,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=140330.66666666666, ans=0.125 +2024-08-25 19:51:04,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140330.66666666666, ans=0.1 +2024-08-25 19:51:08,517 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:51:23,835 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.27 vs. limit=22.5 +2024-08-25 19:51:38,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=140437.33333333334, ans=0.0 +2024-08-25 19:51:42,657 INFO [train.py:1114] (0/4) Epoch 11, batch 1450, loss[loss=0.2562, simple_loss=0.3027, pruned_loss=0.07657, ctc_loss=0.1415, over 19695.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.292, pruned_loss=0.06504, ctc_loss=0.1221, over 3861968.91 frames. ], batch size: 63, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 19:51:44,160 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:51:45,003 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.475e+02 1.813e+02 2.052e+02 2.523e+02 4.896e+02, threshold=4.103e+02, percent-clipped=2.0 +2024-08-25 19:51:58,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=140544.0, ans=0.0 +2024-08-25 19:52:08,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=140544.0, ans=0.0 +2024-08-25 19:52:08,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=140544.0, ans=0.0 +2024-08-25 19:52:09,223 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.52 vs. limit=15.0 +2024-08-25 19:53:14,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=140704.0, ans=0.125 +2024-08-25 19:53:19,925 INFO [train.py:1114] (0/4) Epoch 11, batch 1500, loss[loss=0.2838, simple_loss=0.3285, pruned_loss=0.08556, ctc_loss=0.1702, over 19582.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.2925, pruned_loss=0.06524, ctc_loss=0.1228, over 3860681.04 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 19:53:27,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=140757.33333333334, ans=0.0 +2024-08-25 19:53:41,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=140757.33333333334, ans=0.1 +2024-08-25 19:53:50,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=140810.66666666666, ans=0.125 +2024-08-25 19:54:52,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=140970.66666666666, ans=0.07 +2024-08-25 19:54:58,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=140970.66666666666, ans=0.0 +2024-08-25 19:55:04,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=140970.66666666666, ans=0.0 +2024-08-25 19:55:07,085 INFO [train.py:1114] (0/4) Epoch 11, batch 1550, loss[loss=0.2648, simple_loss=0.3125, pruned_loss=0.07705, ctc_loss=0.1576, over 19578.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.2931, pruned_loss=0.06593, ctc_loss=0.124, over 3845422.91 frames. ], batch size: 60, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 19:55:10,759 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.421e+02 1.804e+02 2.014e+02 2.422e+02 4.168e+02, threshold=4.028e+02, percent-clipped=1.0 +2024-08-25 19:55:12,470 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.05 vs. limit=22.5 +2024-08-25 19:55:47,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=141077.33333333334, ans=0.2 +2024-08-25 19:55:58,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=141130.66666666666, ans=0.125 +2024-08-25 19:56:13,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=141130.66666666666, ans=0.125 +2024-08-25 19:57:03,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=141184.0, ans=0.025 +2024-08-25 19:57:19,245 INFO [train.py:1114] (0/4) Epoch 11, batch 1600, loss[loss=0.2156, simple_loss=0.2928, pruned_loss=0.04997, ctc_loss=0.09643, over 19843.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.2925, pruned_loss=0.06559, ctc_loss=0.1233, over 3834287.04 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 32.0 +2024-08-25 19:58:03,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=141397.33333333334, ans=0.0 +2024-08-25 19:58:30,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=141504.0, ans=0.125 +2024-08-25 19:58:45,632 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.30 vs. limit=15.0 +2024-08-25 19:59:27,402 INFO [train.py:1114] (0/4) Epoch 11, batch 1650, loss[loss=0.2303, simple_loss=0.2951, pruned_loss=0.06074, ctc_loss=0.1103, over 19656.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.2925, pruned_loss=0.06567, ctc_loss=0.1233, over 3831344.31 frames. ], batch size: 59, lr: 1.35e-02, grad_scale: 32.0 +2024-08-25 19:59:29,886 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.768e+02 1.990e+02 2.303e+02 4.438e+02, threshold=3.979e+02, percent-clipped=2.0 +2024-08-25 19:59:49,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=141610.66666666666, ans=0.035 +2024-08-25 19:59:49,898 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.96 vs. limit=15.0 +2024-08-25 20:00:01,265 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.69 vs. limit=15.0 +2024-08-25 20:00:06,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=141610.66666666666, ans=0.0 +2024-08-25 20:00:29,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=141717.33333333334, ans=0.125 +2024-08-25 20:01:05,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=141770.66666666666, ans=0.2 +2024-08-25 20:01:17,967 INFO [train.py:1114] (0/4) Epoch 11, batch 1700, loss[loss=0.2242, simple_loss=0.2691, pruned_loss=0.0651, ctc_loss=0.1229, over 19650.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.2916, pruned_loss=0.06514, ctc_loss=0.1224, over 3845513.70 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 20:01:35,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=141877.33333333334, ans=0.125 +2024-08-25 20:01:45,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=141930.66666666666, ans=0.125 +2024-08-25 20:02:16,970 INFO [train.py:1114] (0/4) Epoch 11, batch 1750, loss[loss=0.193, simple_loss=0.252, pruned_loss=0.04819, ctc_loss=0.09381, over 19641.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.291, pruned_loss=0.06468, ctc_loss=0.1216, over 3851280.81 frames. ], batch size: 45, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 20:02:20,527 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.412e+02 1.814e+02 2.107e+02 2.366e+02 3.890e+02, threshold=4.214e+02, percent-clipped=0.0 +2024-08-25 20:02:59,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=142144.0, ans=0.0 +2024-08-25 20:03:13,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=142197.33333333334, ans=15.0 +2024-08-25 20:03:27,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=142250.66666666666, ans=0.1 +2024-08-25 20:03:33,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=142250.66666666666, ans=0.125 +2024-08-25 20:04:24,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=142304.0, ans=0.125 +2024-08-25 20:04:26,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=142357.33333333334, ans=0.125 +2024-08-25 20:04:27,016 INFO [train.py:1114] (0/4) Epoch 11, batch 1800, loss[loss=0.228, simple_loss=0.2977, pruned_loss=0.05735, ctc_loss=0.1091, over 19620.00 frames. ], tot_loss[loss=0.235, simple_loss=0.2914, pruned_loss=0.06491, ctc_loss=0.122, over 3852829.33 frames. ], batch size: 55, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 20:04:45,794 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.61 vs. limit=15.0 +2024-08-25 20:05:38,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=142517.33333333334, ans=0.125 +2024-08-25 20:05:47,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=142570.66666666666, ans=0.125 +2024-08-25 20:06:13,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=142570.66666666666, ans=0.125 +2024-08-25 20:06:15,200 INFO [train.py:1114] (0/4) Epoch 11, batch 1850, loss[loss=0.2233, simple_loss=0.2886, pruned_loss=0.05687, ctc_loss=0.1107, over 19558.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.2913, pruned_loss=0.0649, ctc_loss=0.122, over 3855883.92 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 16.0 +2024-08-25 20:06:17,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=142624.0, ans=0.2 +2024-08-25 20:06:18,513 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.477e+02 1.849e+02 2.256e+02 2.966e+02 5.642e+02, threshold=4.511e+02, percent-clipped=6.0 +2024-08-25 20:06:19,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=142624.0, ans=0.05 +2024-08-25 20:06:25,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=142677.33333333334, ans=0.04949747468305833 +2024-08-25 20:06:33,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=142677.33333333334, ans=0.025 +2024-08-25 20:06:47,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=142677.33333333334, ans=0.025 +2024-08-25 20:07:00,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=142730.66666666666, ans=0.0 +2024-08-25 20:07:07,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=142730.66666666666, ans=0.125 +2024-08-25 20:07:24,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=142784.0, ans=0.0 +2024-08-25 20:07:51,895 INFO [train.py:1114] (0/4) Epoch 11, batch 1900, loss[loss=0.2329, simple_loss=0.3008, pruned_loss=0.06092, ctc_loss=0.1078, over 19643.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.2915, pruned_loss=0.06471, ctc_loss=0.1216, over 3860584.81 frames. ], batch size: 59, lr: 1.34e-02, grad_scale: 16.0 +2024-08-25 20:07:56,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=142890.66666666666, ans=0.125 +2024-08-25 20:08:07,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=142944.0, ans=0.09899494936611666 +2024-08-25 20:09:49,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=142997.33333333334, ans=0.125 +2024-08-25 20:28:43,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=143050.66666666666, ans=0.125 +2024-08-25 20:42:44,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=143104.0, ans=0.2 +2024-08-25 20:55:30,006 INFO [train.py:1114] (0/4) Epoch 11, batch 1950, loss[loss=0.2085, simple_loss=0.2806, pruned_loss=0.04923, ctc_loss=0.09483, over 19582.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.2926, pruned_loss=0.06477, ctc_loss=0.1216, over 3869804.74 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 16.0 +2024-08-25 21:03:39,813 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.468e+02 1.850e+02 2.123e+02 2.695e+02 5.282e+02, threshold=4.246e+02, percent-clipped=2.0 +2024-08-25 21:05:39,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=143157.33333333334, ans=0.125 +2024-08-25 21:13:01,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=143210.66666666666, ans=0.0 +2024-08-25 21:29:56,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=143317.33333333334, ans=0.025 +2024-08-25 21:36:26,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=143317.33333333334, ans=0.125 +2024-08-25 21:45:06,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=143370.66666666666, ans=0.09899494936611666 +2024-08-25 21:46:38,199 INFO [train.py:1114] (0/4) Epoch 11, batch 2000, loss[loss=0.2224, simple_loss=0.2713, pruned_loss=0.06336, ctc_loss=0.1168, over 19676.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.2929, pruned_loss=0.06525, ctc_loss=0.1225, over 3855510.50 frames. ], batch size: 45, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 21:56:49,682 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=6.843e-02 +2024-08-25 22:05:28,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=143530.66666666666, ans=0.07 +2024-08-25 22:14:45,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=143637.33333333334, ans=0.2 +2024-08-25 22:18:48,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143637.33333333334, ans=0.1 +2024-08-25 22:19:42,815 INFO [train.py:1114] (0/4) Epoch 11, batch 2050, loss[loss=0.223, simple_loss=0.2797, pruned_loss=0.05985, ctc_loss=0.1165, over 19730.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.2926, pruned_loss=0.06548, ctc_loss=0.1229, over 3852352.43 frames. ], batch size: 47, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 22:19:45,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=143690.66666666666, ans=0.0 +2024-08-25 22:20:13,491 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 1.838e+02 2.216e+02 2.724e+02 4.008e+02, threshold=4.432e+02, percent-clipped=0.0 +2024-08-25 22:20:41,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=143690.66666666666, ans=0.125 +2024-08-25 22:24:23,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=143744.0, ans=0.1 +2024-08-25 22:24:52,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=143797.33333333334, ans=0.0 +2024-08-25 22:30:50,492 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 22:32:23,598 INFO [train.py:1114] (0/4) Epoch 11, batch 2100, loss[loss=0.2195, simple_loss=0.2789, pruned_loss=0.05835, ctc_loss=0.1085, over 19789.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.2917, pruned_loss=0.06485, ctc_loss=0.1219, over 3859782.18 frames. ], batch size: 54, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 22:35:16,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=144010.66666666666, ans=0.125 +2024-08-25 22:35:17,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=144010.66666666666, ans=0.2 +2024-08-25 22:35:19,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=144010.66666666666, ans=0.95 +2024-08-25 22:35:47,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=144064.0, ans=0.0 +2024-08-25 22:35:47,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.92 vs. limit=22.5 +2024-08-25 22:37:32,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=144117.33333333334, ans=0.125 +2024-08-25 22:38:44,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=144170.66666666666, ans=0.0 +2024-08-25 22:39:07,558 INFO [train.py:1114] (0/4) Epoch 11, batch 2150, loss[loss=0.2493, simple_loss=0.2984, pruned_loss=0.0739, ctc_loss=0.1309, over 19591.00 frames. ], tot_loss[loss=0.235, simple_loss=0.2917, pruned_loss=0.06486, ctc_loss=0.1217, over 3869376.40 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 22:39:51,932 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.523e+02 1.804e+02 2.068e+02 2.942e+02 5.639e+02, threshold=4.136e+02, percent-clipped=4.0 +2024-08-25 22:41:18,258 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.52 vs. limit=6.0 +2024-08-25 22:41:30,576 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.97 vs. limit=15.0 +2024-08-25 22:42:30,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=144330.66666666666, ans=0.125 +2024-08-25 22:43:27,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=144437.33333333334, ans=0.125 +2024-08-25 22:44:02,551 INFO [train.py:1114] (0/4) Epoch 11, batch 2200, loss[loss=0.2403, simple_loss=0.3045, pruned_loss=0.06425, ctc_loss=0.1193, over 19588.00 frames. ], tot_loss[loss=0.234, simple_loss=0.291, pruned_loss=0.06438, ctc_loss=0.1207, over 3869041.31 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 22:44:32,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=144490.66666666666, ans=0.0 +2024-08-25 22:47:57,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144650.66666666666, ans=0.1 +2024-08-25 22:49:03,020 INFO [train.py:1114] (0/4) Epoch 11, batch 2250, loss[loss=0.2291, simple_loss=0.2884, pruned_loss=0.06179, ctc_loss=0.1154, over 19619.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.291, pruned_loss=0.06424, ctc_loss=0.1204, over 3868584.66 frames. ], batch size: 55, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:49:09,615 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.411e+02 1.818e+02 2.110e+02 2.782e+02 6.628e+02, threshold=4.220e+02, percent-clipped=3.0 +2024-08-25 22:49:29,244 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 22:49:40,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=144810.66666666666, ans=0.0 +2024-08-25 22:50:03,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=144917.33333333334, ans=0.0 +2024-08-25 22:50:03,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=144917.33333333334, ans=0.125 +2024-08-25 22:50:10,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=144917.33333333334, ans=0.025 +2024-08-25 22:50:46,978 INFO [train.py:1114] (0/4) Epoch 11, batch 2300, loss[loss=0.2334, simple_loss=0.2797, pruned_loss=0.07007, ctc_loss=0.1175, over 19508.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.2902, pruned_loss=0.06424, ctc_loss=0.1204, over 3861329.49 frames. ], batch size: 49, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:51:17,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=145024.0, ans=0.025 +2024-08-25 22:51:18,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145024.0, ans=0.1 +2024-08-25 22:51:31,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=145077.33333333334, ans=0.09899494936611666 +2024-08-25 22:51:32,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=145077.33333333334, ans=0.125 +2024-08-25 22:51:39,151 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.54 vs. limit=22.5 +2024-08-25 22:52:42,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=145237.33333333334, ans=0.1 +2024-08-25 22:52:51,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=145237.33333333334, ans=0.025 +2024-08-25 22:52:55,290 INFO [train.py:1114] (0/4) Epoch 11, batch 2350, loss[loss=0.2301, simple_loss=0.3022, pruned_loss=0.05776, ctc_loss=0.1061, over 19678.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.29, pruned_loss=0.06436, ctc_loss=0.1207, over 3863696.93 frames. ], batch size: 63, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:52:56,719 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.67 vs. limit=15.0 +2024-08-25 22:53:01,247 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.317e+02 1.788e+02 2.141e+02 2.380e+02 3.835e+02, threshold=4.282e+02, percent-clipped=0.0 +2024-08-25 22:53:27,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=145344.0, ans=0.2 +2024-08-25 22:53:30,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145344.0, ans=0.1 +2024-08-25 22:53:44,731 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.46 vs. limit=22.5 +2024-08-25 22:54:11,483 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.89 vs. limit=15.0 +2024-08-25 22:54:16,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=145504.0, ans=0.125 +2024-08-25 22:54:26,026 INFO [train.py:1114] (0/4) Epoch 11, batch 2400, loss[loss=0.2543, simple_loss=0.3224, pruned_loss=0.06836, ctc_loss=0.124, over 19327.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.2925, pruned_loss=0.06544, ctc_loss=0.1225, over 3858441.92 frames. ], batch size: 67, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:54:27,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=145557.33333333334, ans=0.0 +2024-08-25 22:54:28,797 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.10 vs. limit=10.0 +2024-08-25 22:54:43,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=145610.66666666666, ans=0.125 +2024-08-25 22:54:47,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=145610.66666666666, ans=6.0 +2024-08-25 22:55:11,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145717.33333333334, ans=0.1 +2024-08-25 22:55:20,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=145717.33333333334, ans=0.0 +2024-08-25 22:55:44,076 INFO [train.py:1114] (0/4) Epoch 11, batch 2450, loss[loss=0.3518, simple_loss=0.3607, pruned_loss=0.1214, ctc_loss=0.2499, over 13466.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.297, pruned_loss=0.06881, ctc_loss=0.1294, over 3734133.43 frames. ], batch size: 141, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:56:00,768 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.910e+02 2.208e+02 2.594e+02 5.356e+02, threshold=4.415e+02, percent-clipped=1.0 +2024-08-25 22:56:01,333 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.00 vs. limit=10.0 +2024-08-25 22:56:53,872 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.24 vs. limit=22.5 +2024-08-25 22:56:56,007 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.57 vs. limit=10.0 +2024-08-25 22:57:26,218 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.26 vs. limit=22.5 +2024-08-25 22:57:27,679 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-11.pt +2024-08-25 22:58:44,105 INFO [train.py:1114] (0/4) Epoch 12, batch 0, loss[loss=0.2364, simple_loss=0.2812, pruned_loss=0.06971, ctc_loss=0.1304, over 19444.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.2812, pruned_loss=0.06971, ctc_loss=0.1304, over 19444.00 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 22:58:44,106 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-25 23:00:02,934 INFO [train.py:1146] (0/4) Epoch 12, validation: loss=0.1972, simple_loss=0.2841, pruned_loss=0.04086, ctc_loss=0.07109, over 944034.00 frames. +2024-08-25 23:00:02,935 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 14058MB +2024-08-25 23:00:33,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=146138.66666666666, ans=0.125 +2024-08-25 23:01:08,423 INFO [train.py:1114] (0/4) Epoch 12, batch 50, loss[loss=0.2002, simple_loss=0.2577, pruned_loss=0.05189, ctc_loss=0.09718, over 19701.00 frames. ], tot_loss[loss=0.242, simple_loss=0.2961, pruned_loss=0.06816, ctc_loss=0.129, over 843793.79 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:01:21,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=146352.0, ans=0.125 +2024-08-25 23:01:26,978 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.43 vs. limit=22.5 +2024-08-25 23:01:27,718 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.425e+02 1.810e+02 2.073e+02 2.436e+02 4.057e+02, threshold=4.147e+02, percent-clipped=0.0 +2024-08-25 23:01:37,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=146405.33333333334, ans=0.125 +2024-08-25 23:01:50,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=146458.66666666666, ans=0.125 +2024-08-25 23:02:22,999 INFO [train.py:1114] (0/4) Epoch 12, batch 100, loss[loss=0.2136, simple_loss=0.274, pruned_loss=0.05636, ctc_loss=0.1012, over 19716.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.2957, pruned_loss=0.06632, ctc_loss=0.1253, over 1498987.01 frames. ], batch size: 51, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:02:52,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=146618.66666666666, ans=0.125 +2024-08-25 23:02:55,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=146672.0, ans=0.05 +2024-08-25 23:03:08,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=146672.0, ans=0.1 +2024-08-25 23:03:10,063 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:03:18,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=146725.33333333334, ans=0.0 +2024-08-25 23:03:19,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=146725.33333333334, ans=10.0 +2024-08-25 23:03:24,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=146725.33333333334, ans=0.0 +2024-08-25 23:03:39,241 INFO [train.py:1114] (0/4) Epoch 12, batch 150, loss[loss=0.2283, simple_loss=0.2775, pruned_loss=0.06459, ctc_loss=0.1248, over 19734.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.2919, pruned_loss=0.06435, ctc_loss=0.1213, over 2027803.58 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:03:39,826 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.29 vs. limit=15.0 +2024-08-25 23:04:09,862 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.330e+02 1.659e+02 1.880e+02 2.314e+02 3.650e+02, threshold=3.760e+02, percent-clipped=0.0 +2024-08-25 23:04:12,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=146885.33333333334, ans=0.025 +2024-08-25 23:04:19,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=146938.66666666666, ans=0.125 +2024-08-25 23:04:30,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=146938.66666666666, ans=0.125 +2024-08-25 23:04:44,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=146992.0, ans=0.125 +2024-08-25 23:04:51,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=146992.0, ans=0.125 +2024-08-25 23:04:55,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=147045.33333333334, ans=0.025 +2024-08-25 23:05:07,039 INFO [train.py:1114] (0/4) Epoch 12, batch 200, loss[loss=0.2644, simple_loss=0.3115, pruned_loss=0.07933, ctc_loss=0.1464, over 18565.00 frames. ], tot_loss[loss=0.232, simple_loss=0.2897, pruned_loss=0.06325, ctc_loss=0.1192, over 2435789.11 frames. ], batch size: 85, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:05:13,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=147098.66666666666, ans=0.125 +2024-08-25 23:06:20,735 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:06:24,456 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.30 vs. limit=6.0 +2024-08-25 23:06:39,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=147258.66666666666, ans=0.0 +2024-08-25 23:06:40,717 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.64 vs. limit=15.0 +2024-08-25 23:06:51,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=147312.0, ans=0.05 +2024-08-25 23:07:01,819 INFO [train.py:1114] (0/4) Epoch 12, batch 250, loss[loss=0.2464, simple_loss=0.3045, pruned_loss=0.06939, ctc_loss=0.1237, over 19348.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.2901, pruned_loss=0.06302, ctc_loss=0.1187, over 2755129.31 frames. ], batch size: 67, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:07:22,627 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.308e+02 1.825e+02 2.154e+02 2.499e+02 3.884e+02, threshold=4.307e+02, percent-clipped=2.0 +2024-08-25 23:07:33,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=147472.0, ans=0.2 +2024-08-25 23:07:49,169 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:08:13,949 INFO [train.py:1114] (0/4) Epoch 12, batch 300, loss[loss=0.2722, simple_loss=0.3118, pruned_loss=0.08555, ctc_loss=0.1539, over 19533.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.2898, pruned_loss=0.06314, ctc_loss=0.1188, over 3000376.94 frames. ], batch size: 61, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:08:20,280 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.98 vs. limit=15.0 +2024-08-25 23:08:33,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=147685.33333333334, ans=0.5 +2024-08-25 23:08:54,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=147792.0, ans=0.0 +2024-08-25 23:08:57,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=147792.0, ans=0.125 +2024-08-25 23:08:59,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=147792.0, ans=0.0 +2024-08-25 23:09:03,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=147845.33333333334, ans=0.125 +2024-08-25 23:09:17,484 INFO [train.py:1114] (0/4) Epoch 12, batch 350, loss[loss=0.2255, simple_loss=0.2728, pruned_loss=0.0658, ctc_loss=0.1165, over 19738.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.29, pruned_loss=0.06304, ctc_loss=0.1186, over 3190558.33 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:09:25,103 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.91 vs. limit=15.0 +2024-08-25 23:09:36,451 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.749e+02 2.047e+02 2.740e+02 4.170e+02, threshold=4.094e+02, percent-clipped=0.0 +2024-08-25 23:09:37,203 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.59 vs. limit=15.0 +2024-08-25 23:09:45,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=148005.33333333334, ans=0.0 +2024-08-25 23:09:56,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=148058.66666666666, ans=0.07 +2024-08-25 23:09:58,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=148058.66666666666, ans=0.0 +2024-08-25 23:10:01,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=148058.66666666666, ans=0.125 +2024-08-25 23:10:12,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148058.66666666666, ans=0.1 +2024-08-25 23:10:13,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=148112.0, ans=0.09899494936611666 +2024-08-25 23:10:25,926 INFO [train.py:1114] (0/4) Epoch 12, batch 400, loss[loss=0.2552, simple_loss=0.3113, pruned_loss=0.07254, ctc_loss=0.1353, over 19522.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.2889, pruned_loss=0.06231, ctc_loss=0.1172, over 3341150.43 frames. ], batch size: 54, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:10:58,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=148272.0, ans=0.2 +2024-08-25 23:11:16,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=148325.33333333334, ans=0.02 +2024-08-25 23:11:27,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148325.33333333334, ans=0.1 +2024-08-25 23:12:05,920 INFO [train.py:1114] (0/4) Epoch 12, batch 450, loss[loss=0.2561, simple_loss=0.3106, pruned_loss=0.0729, ctc_loss=0.1396, over 19609.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.2893, pruned_loss=0.06245, ctc_loss=0.1175, over 3450543.39 frames. ], batch size: 55, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:12:16,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=148432.0, ans=0.0 +2024-08-25 23:12:28,367 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.359e+02 1.830e+02 2.201e+02 2.765e+02 4.484e+02, threshold=4.403e+02, percent-clipped=1.0 +2024-08-25 23:12:56,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148645.33333333334, ans=0.1 +2024-08-25 23:13:01,100 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.22 vs. limit=15.0 +2024-08-25 23:13:21,160 INFO [train.py:1114] (0/4) Epoch 12, batch 500, loss[loss=0.2295, simple_loss=0.2973, pruned_loss=0.05947, ctc_loss=0.107, over 19667.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2886, pruned_loss=0.06236, ctc_loss=0.1172, over 3546574.60 frames. ], batch size: 63, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:13:21,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=148698.66666666666, ans=0.125 +2024-08-25 23:13:21,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=148698.66666666666, ans=0.0 +2024-08-25 23:13:22,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=148698.66666666666, ans=0.125 +2024-08-25 23:13:27,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=148698.66666666666, ans=0.2 +2024-08-25 23:13:31,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=148698.66666666666, ans=0.07 +2024-08-25 23:13:33,582 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.26 vs. limit=6.0 +2024-08-25 23:13:43,615 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.54 vs. limit=15.0 +2024-08-25 23:13:58,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=148805.33333333334, ans=0.0 +2024-08-25 23:14:12,265 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.15 vs. limit=15.0 +2024-08-25 23:14:41,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=148858.66666666666, ans=0.2 +2024-08-25 23:14:44,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=148912.0, ans=0.0 +2024-08-25 23:14:45,652 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.29 vs. limit=15.0 +2024-08-25 23:14:59,250 INFO [train.py:1114] (0/4) Epoch 12, batch 550, loss[loss=0.2719, simple_loss=0.317, pruned_loss=0.08293, ctc_loss=0.1522, over 19334.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.2885, pruned_loss=0.06249, ctc_loss=0.1174, over 3608185.56 frames. ], batch size: 71, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:15:42,256 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.325e+02 1.692e+02 2.049e+02 2.499e+02 4.022e+02, threshold=4.098e+02, percent-clipped=0.0 +2024-08-25 23:16:01,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=149072.0, ans=0.125 +2024-08-25 23:16:46,429 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.70 vs. limit=15.0 +2024-08-25 23:16:54,716 INFO [train.py:1114] (0/4) Epoch 12, batch 600, loss[loss=0.2767, simple_loss=0.3262, pruned_loss=0.08223, ctc_loss=0.1569, over 19414.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.2888, pruned_loss=0.06268, ctc_loss=0.1178, over 3665590.69 frames. ], batch size: 67, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:17:22,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=149285.33333333334, ans=0.125 +2024-08-25 23:17:25,003 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/checkpoint-28000.pt +2024-08-25 23:18:46,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=149285.33333333334, ans=0.1 +2024-08-25 23:18:52,432 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.24 vs. limit=15.0 +2024-08-25 23:19:03,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=149392.0, ans=0.0 +2024-08-25 23:19:22,625 INFO [train.py:1114] (0/4) Epoch 12, batch 650, loss[loss=0.2112, simple_loss=0.2836, pruned_loss=0.05065, ctc_loss=0.09372, over 19757.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.288, pruned_loss=0.06225, ctc_loss=0.1171, over 3715431.52 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:19:40,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=149552.0, ans=0.2 +2024-08-25 23:19:42,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=149552.0, ans=0.0 +2024-08-25 23:19:48,484 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.406e+02 1.911e+02 2.346e+02 2.911e+02 5.072e+02, threshold=4.691e+02, percent-clipped=6.0 +2024-08-25 23:19:51,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=149552.0, ans=0.025 +2024-08-25 23:20:29,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=149658.66666666666, ans=0.5 +2024-08-25 23:20:37,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=149712.0, ans=0.125 +2024-08-25 23:20:43,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=149712.0, ans=0.125 +2024-08-25 23:20:46,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=149712.0, ans=0.0 +2024-08-25 23:20:49,397 INFO [train.py:1114] (0/4) Epoch 12, batch 700, loss[loss=0.2096, simple_loss=0.2759, pruned_loss=0.05194, ctc_loss=0.09859, over 19738.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.2884, pruned_loss=0.06232, ctc_loss=0.1171, over 3747405.65 frames. ], batch size: 51, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:20:58,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=149765.33333333334, ans=10.0 +2024-08-25 23:21:04,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=149818.66666666666, ans=0.2 +2024-08-25 23:21:14,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=149872.0, ans=0.0 +2024-08-25 23:21:15,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=149872.0, ans=0.0 +2024-08-25 23:21:44,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=149978.66666666666, ans=0.125 +2024-08-25 23:21:51,364 INFO [train.py:1114] (0/4) Epoch 12, batch 750, loss[loss=0.2193, simple_loss=0.2852, pruned_loss=0.05536, ctc_loss=0.1071, over 19491.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.2877, pruned_loss=0.06193, ctc_loss=0.1164, over 3774030.89 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:22:14,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=150085.33333333334, ans=0.05 +2024-08-25 23:22:20,742 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.394e+02 1.992e+02 2.563e+02 3.460e+02 5.252e+02, threshold=5.125e+02, percent-clipped=3.0 +2024-08-25 23:22:25,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=150085.33333333334, ans=0.125 +2024-08-25 23:22:43,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=150138.66666666666, ans=0.1 +2024-08-25 23:23:04,530 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.30 vs. limit=10.0 +2024-08-25 23:23:04,718 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.71 vs. limit=15.0 +2024-08-25 23:23:10,619 INFO [train.py:1114] (0/4) Epoch 12, batch 800, loss[loss=0.239, simple_loss=0.2895, pruned_loss=0.06815, ctc_loss=0.1307, over 19782.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.2877, pruned_loss=0.06194, ctc_loss=0.1163, over 3796126.48 frames. ], batch size: 49, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:23:18,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150298.66666666666, ans=0.1 +2024-08-25 23:23:20,107 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.47 vs. limit=6.0 +2024-08-25 23:23:32,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=150405.33333333334, ans=0.0 +2024-08-25 23:23:36,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=150405.33333333334, ans=0.125 +2024-08-25 23:24:03,023 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.77 vs. limit=22.5 +2024-08-25 23:24:07,596 INFO [train.py:1114] (0/4) Epoch 12, batch 850, loss[loss=0.2019, simple_loss=0.2765, pruned_loss=0.04583, ctc_loss=0.0894, over 19663.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.287, pruned_loss=0.06164, ctc_loss=0.1157, over 3814370.38 frames. ], batch size: 59, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:24:15,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=150565.33333333334, ans=0.0 +2024-08-25 23:24:30,659 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.376e+02 1.732e+02 2.149e+02 2.756e+02 4.869e+02, threshold=4.297e+02, percent-clipped=0.0 +2024-08-25 23:25:39,226 INFO [train.py:1114] (0/4) Epoch 12, batch 900, loss[loss=0.2283, simple_loss=0.2775, pruned_loss=0.06514, ctc_loss=0.1222, over 19826.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.2877, pruned_loss=0.06231, ctc_loss=0.1172, over 3818421.65 frames. ], batch size: 49, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:25:43,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=150832.0, ans=0.5 +2024-08-25 23:25:47,243 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.37 vs. limit=10.0 +2024-08-25 23:26:19,260 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.54 vs. limit=22.5 +2024-08-25 23:26:28,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=150938.66666666666, ans=0.125 +2024-08-25 23:26:41,953 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.43 vs. limit=15.0 +2024-08-25 23:26:49,086 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.68 vs. limit=15.0 +2024-08-25 23:27:07,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=151098.66666666666, ans=0.05 +2024-08-25 23:27:21,987 INFO [train.py:1114] (0/4) Epoch 12, batch 950, loss[loss=0.227, simple_loss=0.276, pruned_loss=0.0641, ctc_loss=0.1242, over 19519.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.2879, pruned_loss=0.06242, ctc_loss=0.1174, over 3821068.99 frames. ], batch size: 49, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:27:32,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=151098.66666666666, ans=0.0 +2024-08-25 23:27:39,207 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.51 vs. limit=6.0 +2024-08-25 23:27:39,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=151098.66666666666, ans=0.0 +2024-08-25 23:27:44,054 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.39 vs. limit=15.0 +2024-08-25 23:27:47,805 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.461e+02 1.727e+02 2.047e+02 2.468e+02 3.873e+02, threshold=4.093e+02, percent-clipped=0.0 +2024-08-25 23:28:00,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=151152.0, ans=0.0 +2024-08-25 23:28:03,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=151205.33333333334, ans=0.0 +2024-08-25 23:28:31,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=151258.66666666666, ans=0.025 +2024-08-25 23:28:45,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=151312.0, ans=10.0 +2024-08-25 23:28:55,957 INFO [train.py:1114] (0/4) Epoch 12, batch 1000, loss[loss=0.23, simple_loss=0.2846, pruned_loss=0.06349, ctc_loss=0.1211, over 19850.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.2886, pruned_loss=0.06273, ctc_loss=0.118, over 3816496.46 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:30:03,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=151418.66666666666, ans=0.05 +2024-08-25 23:30:03,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=151418.66666666666, ans=0.0 +2024-08-25 23:30:14,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=151418.66666666666, ans=0.025 +2024-08-25 23:30:15,991 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:30:30,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=151525.33333333334, ans=0.125 +2024-08-25 23:30:37,716 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.09 vs. limit=15.0 +2024-08-25 23:30:38,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=151525.33333333334, ans=0.0 +2024-08-25 23:30:52,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=151578.66666666666, ans=0.125 +2024-08-25 23:30:55,451 INFO [train.py:1114] (0/4) Epoch 12, batch 1050, loss[loss=0.2224, simple_loss=0.2853, pruned_loss=0.05718, ctc_loss=0.1126, over 19841.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.2875, pruned_loss=0.06226, ctc_loss=0.1169, over 3823111.14 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:31:06,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=151632.0, ans=0.125 +2024-08-25 23:31:14,267 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 1.748e+02 2.222e+02 2.883e+02 4.562e+02, threshold=4.445e+02, percent-clipped=3.0 +2024-08-25 23:31:45,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=151845.33333333334, ans=0.035 +2024-08-25 23:31:50,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=151845.33333333334, ans=0.125 +2024-08-25 23:32:14,291 INFO [train.py:1114] (0/4) Epoch 12, batch 1100, loss[loss=0.224, simple_loss=0.2788, pruned_loss=0.0618, ctc_loss=0.1139, over 19597.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.2875, pruned_loss=0.06224, ctc_loss=0.1169, over 3830721.63 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:32:14,611 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:32:23,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=151898.66666666666, ans=0.0 +2024-08-25 23:32:36,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=151952.0, ans=0.2 +2024-08-25 23:32:49,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=152005.33333333334, ans=0.125 +2024-08-25 23:32:54,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=152058.66666666666, ans=15.0 +2024-08-25 23:33:01,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=152058.66666666666, ans=0.125 +2024-08-25 23:33:01,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=152058.66666666666, ans=0.125 +2024-08-25 23:33:32,438 INFO [train.py:1114] (0/4) Epoch 12, batch 1150, loss[loss=0.2321, simple_loss=0.2879, pruned_loss=0.06385, ctc_loss=0.1215, over 19581.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.288, pruned_loss=0.06266, ctc_loss=0.1176, over 3829672.44 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:33:42,511 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.54 vs. limit=15.0 +2024-08-25 23:33:43,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152165.33333333334, ans=0.1 +2024-08-25 23:34:07,251 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.375e+02 1.763e+02 2.002e+02 2.335e+02 5.298e+02, threshold=4.005e+02, percent-clipped=1.0 +2024-08-25 23:34:09,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.12 vs. limit=15.0 +2024-08-25 23:34:19,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=152272.0, ans=0.025 +2024-08-25 23:34:28,903 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.10 vs. limit=6.0 +2024-08-25 23:34:46,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=152378.66666666666, ans=0.0 +2024-08-25 23:34:59,071 INFO [train.py:1114] (0/4) Epoch 12, batch 1200, loss[loss=0.2404, simple_loss=0.3029, pruned_loss=0.06295, ctc_loss=0.1302, over 19837.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.289, pruned_loss=0.06295, ctc_loss=0.1186, over 3826395.84 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:35:14,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=152432.0, ans=0.025 +2024-08-25 23:35:14,511 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.44 vs. limit=10.0 +2024-08-25 23:35:23,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=152485.33333333334, ans=0.125 +2024-08-25 23:35:32,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=152538.66666666666, ans=0.025 +2024-08-25 23:35:44,219 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.75 vs. limit=15.0 +2024-08-25 23:35:52,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=152592.0, ans=0.125 +2024-08-25 23:36:09,982 INFO [train.py:1114] (0/4) Epoch 12, batch 1250, loss[loss=0.2488, simple_loss=0.3091, pruned_loss=0.06958, ctc_loss=0.1232, over 19529.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.2894, pruned_loss=0.06277, ctc_loss=0.118, over 3844268.22 frames. ], batch size: 61, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:36:23,861 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.19 vs. limit=15.0 +2024-08-25 23:36:25,237 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.18 vs. limit=15.0 +2024-08-25 23:36:34,031 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.907e+02 2.265e+02 2.785e+02 4.753e+02, threshold=4.530e+02, percent-clipped=2.0 +2024-08-25 23:36:34,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=152752.0, ans=0.125 +2024-08-25 23:36:35,122 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.86 vs. limit=15.0 +2024-08-25 23:36:42,606 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.83 vs. limit=10.0 +2024-08-25 23:37:18,965 INFO [train.py:1114] (0/4) Epoch 12, batch 1300, loss[loss=0.2658, simple_loss=0.3092, pruned_loss=0.08124, ctc_loss=0.1498, over 18869.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.2886, pruned_loss=0.06247, ctc_loss=0.1176, over 3847329.36 frames. ], batch size: 76, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:37:31,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=152965.33333333334, ans=15.0 +2024-08-25 23:37:52,115 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=8.94 vs. limit=22.5 +2024-08-25 23:37:55,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=153072.0, ans=0.125 +2024-08-25 23:38:03,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=153125.33333333334, ans=0.0 +2024-08-25 23:38:28,986 INFO [train.py:1114] (0/4) Epoch 12, batch 1350, loss[loss=0.2469, simple_loss=0.3051, pruned_loss=0.06805, ctc_loss=0.1315, over 19756.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.2881, pruned_loss=0.06197, ctc_loss=0.1167, over 3858031.15 frames. ], batch size: 54, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:38:40,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.94 vs. limit=15.0 +2024-08-25 23:38:46,294 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.300e+02 1.707e+02 2.039e+02 2.408e+02 4.402e+02, threshold=4.078e+02, percent-clipped=0.0 +2024-08-25 23:38:47,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=153285.33333333334, ans=0.2 +2024-08-25 23:39:01,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=153338.66666666666, ans=0.1 +2024-08-25 23:39:15,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=153392.0, ans=0.2 +2024-08-25 23:39:33,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=153445.33333333334, ans=0.0 +2024-08-25 23:39:37,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=153445.33333333334, ans=0.07 +2024-08-25 23:39:43,079 INFO [train.py:1114] (0/4) Epoch 12, batch 1400, loss[loss=0.1836, simple_loss=0.2485, pruned_loss=0.04383, ctc_loss=0.07755, over 19661.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.2876, pruned_loss=0.06169, ctc_loss=0.116, over 3864670.14 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:40:48,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=153658.66666666666, ans=0.025 +2024-08-25 23:40:52,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=153712.0, ans=0.0 +2024-08-25 23:40:57,357 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.62 vs. limit=6.0 +2024-08-25 23:41:07,359 INFO [train.py:1114] (0/4) Epoch 12, batch 1450, loss[loss=0.2582, simple_loss=0.3093, pruned_loss=0.07555, ctc_loss=0.1402, over 19681.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.2881, pruned_loss=0.06194, ctc_loss=0.1165, over 3862329.37 frames. ], batch size: 63, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:41:23,128 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.74 vs. limit=15.0 +2024-08-25 23:41:27,989 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.446e+02 1.773e+02 2.135e+02 2.639e+02 4.435e+02, threshold=4.270e+02, percent-clipped=2.0 +2024-08-25 23:42:43,082 INFO [train.py:1114] (0/4) Epoch 12, batch 1500, loss[loss=0.2134, simple_loss=0.2835, pruned_loss=0.05238, ctc_loss=0.09619, over 19579.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.2883, pruned_loss=0.06187, ctc_loss=0.1163, over 3861668.37 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:43:10,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=154085.33333333334, ans=0.125 +2024-08-25 23:43:10,317 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.55 vs. limit=15.0 +2024-08-25 23:43:20,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=154138.66666666666, ans=0.025 +2024-08-25 23:43:55,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=154192.0, ans=0.0 +2024-08-25 23:44:09,930 INFO [train.py:1114] (0/4) Epoch 12, batch 1550, loss[loss=0.2599, simple_loss=0.3142, pruned_loss=0.07407, ctc_loss=0.1436, over 19607.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.2884, pruned_loss=0.06196, ctc_loss=0.1166, over 3846894.07 frames. ], batch size: 60, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:44:10,209 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:44:16,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=154298.66666666666, ans=0.1 +2024-08-25 23:44:43,865 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.525e+02 1.860e+02 2.194e+02 2.828e+02 4.590e+02, threshold=4.388e+02, percent-clipped=1.0 +2024-08-25 23:45:05,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=154405.33333333334, ans=0.1 +2024-08-25 23:46:12,534 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.13 vs. limit=12.0 +2024-08-25 23:46:23,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=154458.66666666666, ans=0.1 +2024-08-25 23:46:27,553 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.74 vs. limit=22.5 +2024-08-25 23:46:37,567 INFO [train.py:1114] (0/4) Epoch 12, batch 1600, loss[loss=0.2631, simple_loss=0.3153, pruned_loss=0.07653, ctc_loss=0.1445, over 19819.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.2884, pruned_loss=0.06194, ctc_loss=0.1166, over 3835404.97 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:46:39,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=154565.33333333334, ans=0.125 +2024-08-25 23:46:43,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=154565.33333333334, ans=0.125 +2024-08-25 23:48:01,984 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.75 vs. limit=22.5 +2024-08-25 23:48:12,948 INFO [train.py:1114] (0/4) Epoch 12, batch 1650, loss[loss=0.2288, simple_loss=0.3001, pruned_loss=0.05635, ctc_loss=0.1118, over 19658.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.2882, pruned_loss=0.06184, ctc_loss=0.1167, over 3832233.24 frames. ], batch size: 59, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:48:13,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=154832.0, ans=0.125 +2024-08-25 23:48:14,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=154832.0, ans=0.0 +2024-08-25 23:48:25,176 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.33 vs. limit=15.0 +2024-08-25 23:48:27,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=154885.33333333334, ans=0.2 +2024-08-25 23:48:32,098 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:48:32,982 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.442e+02 1.751e+02 2.060e+02 2.481e+02 4.497e+02, threshold=4.120e+02, percent-clipped=1.0 +2024-08-25 23:48:34,477 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:48:57,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=154992.0, ans=0.1 +2024-08-25 23:49:17,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=155045.33333333334, ans=0.0 +2024-08-25 23:49:18,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=155098.66666666666, ans=0.0 +2024-08-25 23:49:19,225 INFO [train.py:1114] (0/4) Epoch 12, batch 1700, loss[loss=0.2128, simple_loss=0.2591, pruned_loss=0.06105, ctc_loss=0.1112, over 19678.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.2878, pruned_loss=0.06149, ctc_loss=0.1159, over 3846671.64 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:49:37,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=155152.0, ans=0.125 +2024-08-25 23:49:43,789 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:50:19,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=155258.66666666666, ans=0.09899494936611666 +2024-08-25 23:50:36,465 INFO [train.py:1114] (0/4) Epoch 12, batch 1750, loss[loss=0.1894, simple_loss=0.2513, pruned_loss=0.04628, ctc_loss=0.08734, over 19690.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.287, pruned_loss=0.06111, ctc_loss=0.1152, over 3852665.87 frames. ], batch size: 45, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:51:09,600 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.61 vs. limit=6.0 +2024-08-25 23:51:09,943 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.88 vs. limit=15.0 +2024-08-25 23:51:12,450 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.321e+02 1.691e+02 1.944e+02 2.310e+02 4.068e+02, threshold=3.888e+02, percent-clipped=0.0 +2024-08-25 23:51:17,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=155418.66666666666, ans=0.125 +2024-08-25 23:51:23,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=155472.0, ans=0.125 +2024-08-25 23:51:25,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=155472.0, ans=0.0 +2024-08-25 23:51:42,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=155525.33333333334, ans=0.0 +2024-08-25 23:51:48,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=155578.66666666666, ans=0.125 +2024-08-25 23:51:55,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=155578.66666666666, ans=0.0 +2024-08-25 23:52:03,819 INFO [train.py:1114] (0/4) Epoch 12, batch 1800, loss[loss=0.2499, simple_loss=0.3089, pruned_loss=0.07006, ctc_loss=0.1268, over 19602.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.2875, pruned_loss=0.0614, ctc_loss=0.1155, over 3853341.67 frames. ], batch size: 55, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:52:47,616 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.18 vs. limit=15.0 +2024-08-25 23:52:50,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=155738.66666666666, ans=0.125 +2024-08-25 23:53:17,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=155792.0, ans=0.0 +2024-08-25 23:53:18,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=155792.0, ans=0.0 +2024-08-25 23:53:18,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=155792.0, ans=0.04949747468305833 +2024-08-25 23:53:29,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155792.0, ans=0.1 +2024-08-25 23:53:37,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=155845.33333333334, ans=0.025 +2024-08-25 23:54:05,056 INFO [train.py:1114] (0/4) Epoch 12, batch 1850, loss[loss=0.2305, simple_loss=0.2916, pruned_loss=0.06141, ctc_loss=0.1163, over 19579.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.2868, pruned_loss=0.06105, ctc_loss=0.1146, over 3854769.03 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:54:25,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=155898.66666666666, ans=0.125 +2024-08-25 23:54:44,938 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.455e+02 1.785e+02 2.050e+02 2.712e+02 4.249e+02, threshold=4.100e+02, percent-clipped=1.0 +2024-08-25 23:54:56,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=155952.0, ans=0.0 +2024-08-25 23:55:11,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156005.33333333334, ans=0.1 +2024-08-25 23:55:19,904 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=8.66 vs. limit=22.5 +2024-08-25 23:55:24,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=156058.66666666666, ans=0.04949747468305833 +2024-08-25 23:55:37,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=156112.0, ans=0.2 +2024-08-25 23:56:02,306 INFO [train.py:1114] (0/4) Epoch 12, batch 1900, loss[loss=0.2393, simple_loss=0.2983, pruned_loss=0.0649, ctc_loss=0.1261, over 19655.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.288, pruned_loss=0.06168, ctc_loss=0.1158, over 3859826.76 frames. ], batch size: 59, lr: 1.23e-02, grad_scale: 32.0 +2024-08-25 23:56:20,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=156165.33333333334, ans=0.0 +2024-08-25 23:56:49,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=156272.0, ans=0.2 +2024-08-25 23:57:00,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=156272.0, ans=0.0 +2024-08-25 23:57:03,919 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.62 vs. limit=6.0 +2024-08-25 23:57:18,047 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.17 vs. limit=15.0 +2024-08-25 23:57:31,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=156325.33333333334, ans=0.04949747468305833 +2024-08-25 23:57:35,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=156378.66666666666, ans=0.1 +2024-08-25 23:58:05,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=156378.66666666666, ans=0.125 +2024-08-25 23:58:28,732 INFO [train.py:1114] (0/4) Epoch 12, batch 1950, loss[loss=0.2328, simple_loss=0.2879, pruned_loss=0.06529, ctc_loss=0.1179, over 19587.00 frames. ], tot_loss[loss=0.23, simple_loss=0.2893, pruned_loss=0.0621, ctc_loss=0.1165, over 3869081.57 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 32.0 +2024-08-25 23:58:31,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=156432.0, ans=0.0 +2024-08-25 23:59:03,816 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.459e+02 1.700e+02 2.031e+02 2.417e+02 3.778e+02, threshold=4.063e+02, percent-clipped=0.0 +2024-08-25 23:59:31,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=156592.0, ans=0.125 +2024-08-25 23:59:51,579 INFO [train.py:1114] (0/4) Epoch 12, batch 2000, loss[loss=0.1799, simple_loss=0.2418, pruned_loss=0.04368, ctc_loss=0.0765, over 19697.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.2896, pruned_loss=0.06233, ctc_loss=0.117, over 3855269.57 frames. ], batch size: 45, lr: 1.23e-02, grad_scale: 32.0 +2024-08-25 23:59:56,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=156698.66666666666, ans=0.2 +2024-08-26 00:00:00,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=156698.66666666666, ans=0.125 +2024-08-26 00:01:28,934 INFO [train.py:1114] (0/4) Epoch 12, batch 2050, loss[loss=0.2093, simple_loss=0.2629, pruned_loss=0.05739, ctc_loss=0.1025, over 19728.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2888, pruned_loss=0.06226, ctc_loss=0.1171, over 3851162.19 frames. ], batch size: 47, lr: 1.23e-02, grad_scale: 32.0 +2024-08-26 00:01:46,859 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.778e+02 1.977e+02 2.412e+02 4.440e+02, threshold=3.953e+02, percent-clipped=1.0 +2024-08-26 00:02:05,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=157072.0, ans=0.125 +2024-08-26 00:02:16,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=157072.0, ans=0.025 +2024-08-26 00:02:19,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=157072.0, ans=0.125 +2024-08-26 00:02:33,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=157125.33333333334, ans=0.2 +2024-08-26 00:02:47,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=157178.66666666666, ans=0.125 +2024-08-26 00:02:59,585 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=20.05 vs. limit=15.0 +2024-08-26 00:03:00,082 INFO [train.py:1114] (0/4) Epoch 12, batch 2100, loss[loss=0.2146, simple_loss=0.2832, pruned_loss=0.05398, ctc_loss=0.09531, over 19782.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.2881, pruned_loss=0.06192, ctc_loss=0.1164, over 3859765.02 frames. ], batch size: 54, lr: 1.23e-02, grad_scale: 32.0 +2024-08-26 00:03:02,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=157232.0, ans=0.125 +2024-08-26 00:25:13,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=157338.66666666666, ans=0.1 +2024-08-26 00:33:21,226 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.75 vs. limit=15.0 +2024-08-26 00:33:35,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157392.0, ans=0.1 +2024-08-26 00:56:07,943 INFO [train.py:1114] (0/4) Epoch 12, batch 2150, loss[loss=0.2025, simple_loss=0.269, pruned_loss=0.04919, ctc_loss=0.09439, over 19584.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.288, pruned_loss=0.06192, ctc_loss=0.1162, over 3870387.59 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 32.0 +2024-08-26 00:59:27,491 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.12 vs. limit=15.0 +2024-08-26 01:06:20,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=157552.0, ans=0.0 +2024-08-26 01:09:51,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157552.0, ans=0.1 +2024-08-26 01:09:53,319 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.502e+02 1.788e+02 2.174e+02 2.705e+02 6.148e+02, threshold=4.348e+02, percent-clipped=11.0 +2024-08-26 01:11:53,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157552.0, ans=0.1 +2024-08-26 01:11:54,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=157552.0, ans=0.025 +2024-08-26 01:13:56,141 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=13.43 vs. limit=15.0 +2024-08-26 01:35:47,646 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.91 vs. limit=6.0 +2024-08-26 01:37:35,684 INFO [train.py:1114] (0/4) Epoch 12, batch 2200, loss[loss=0.2218, simple_loss=0.2953, pruned_loss=0.05451, ctc_loss=0.09846, over 19582.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.2875, pruned_loss=0.06152, ctc_loss=0.1156, over 3868343.65 frames. ], batch size: 57, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 01:38:03,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=157765.33333333334, ans=0.2 +2024-08-26 01:43:11,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157818.66666666666, ans=0.1 +2024-08-26 01:44:09,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=157818.66666666666, ans=0.125 +2024-08-26 01:47:42,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=157872.0, ans=0.0 +2024-08-26 01:49:49,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=157925.33333333334, ans=0.125 +2024-08-26 01:50:18,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=157925.33333333334, ans=0.125 +2024-08-26 01:55:39,503 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.08 vs. limit=15.0 +2024-08-26 01:57:29,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=158032.0, ans=0.125 +2024-08-26 01:57:30,331 INFO [train.py:1114] (0/4) Epoch 12, batch 2250, loss[loss=0.2438, simple_loss=0.3034, pruned_loss=0.06669, ctc_loss=0.1273, over 19636.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.287, pruned_loss=0.06127, ctc_loss=0.115, over 3868134.48 frames. ], batch size: 55, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 02:03:20,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158085.33333333334, ans=0.1 +2024-08-26 02:04:28,482 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 1.839e+02 2.199e+02 2.577e+02 6.358e+02, threshold=4.399e+02, percent-clipped=1.0 +2024-08-26 02:08:01,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=158192.0, ans=0.0 +2024-08-26 02:09:46,483 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.76 vs. limit=15.0 +2024-08-26 02:12:18,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=158245.33333333334, ans=0.025 +2024-08-26 02:13:21,300 INFO [train.py:1114] (0/4) Epoch 12, batch 2300, loss[loss=0.2004, simple_loss=0.2647, pruned_loss=0.04923, ctc_loss=0.09407, over 19490.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.2865, pruned_loss=0.06143, ctc_loss=0.1153, over 3862323.12 frames. ], batch size: 49, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 02:13:56,669 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.58 vs. limit=15.0 +2024-08-26 02:17:35,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158405.33333333334, ans=0.1 +2024-08-26 02:17:37,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=158405.33333333334, ans=10.0 +2024-08-26 02:21:51,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=158512.0, ans=0.025 +2024-08-26 02:22:39,621 INFO [train.py:1114] (0/4) Epoch 12, batch 2350, loss[loss=0.2453, simple_loss=0.3093, pruned_loss=0.0667, ctc_loss=0.12, over 19671.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.2863, pruned_loss=0.06121, ctc_loss=0.115, over 3864821.42 frames. ], batch size: 63, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 02:23:11,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=158565.33333333334, ans=0.025 +2024-08-26 02:25:18,442 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.432e+02 1.991e+02 2.536e+02 3.183e+02 5.552e+02, threshold=5.072e+02, percent-clipped=5.0 +2024-08-26 02:28:01,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=158725.33333333334, ans=0.125 +2024-08-26 02:30:26,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158778.66666666666, ans=0.1 +2024-08-26 02:30:53,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=158778.66666666666, ans=0.0 +2024-08-26 02:30:58,360 INFO [train.py:1114] (0/4) Epoch 12, batch 2400, loss[loss=0.2609, simple_loss=0.3108, pruned_loss=0.0771, ctc_loss=0.1421, over 19390.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.2884, pruned_loss=0.06227, ctc_loss=0.1169, over 3859457.85 frames. ], batch size: 67, lr: 1.22e-02, grad_scale: 32.0 +2024-08-26 02:31:10,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=158832.0, ans=0.025 +2024-08-26 02:31:28,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=158885.33333333334, ans=0.2 +2024-08-26 02:37:41,011 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.03 vs. limit=15.0 +2024-08-26 02:38:16,207 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.35 vs. limit=10.0 +2024-08-26 02:38:22,378 INFO [train.py:1114] (0/4) Epoch 12, batch 2450, loss[loss=0.2844, simple_loss=0.318, pruned_loss=0.09148, ctc_loss=0.1695, over 13431.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.2926, pruned_loss=0.0655, ctc_loss=0.1231, over 3736242.85 frames. ], batch size: 141, lr: 1.22e-02, grad_scale: 32.0 +2024-08-26 02:38:42,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=159098.66666666666, ans=0.2 +2024-08-26 02:39:07,170 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.18 vs. limit=15.0 +2024-08-26 02:39:11,645 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.25 vs. limit=15.0 +2024-08-26 02:39:42,314 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.506e+02 1.859e+02 2.162e+02 2.447e+02 4.124e+02, threshold=4.324e+02, percent-clipped=0.0 +2024-08-26 02:39:42,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=159152.0, ans=0.05 +2024-08-26 02:40:01,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=159152.0, ans=0.125 +2024-08-26 02:40:14,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=159152.0, ans=0.125 +2024-08-26 02:40:15,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=159205.33333333334, ans=0.0 +2024-08-26 02:40:40,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=159205.33333333334, ans=0.0 +2024-08-26 02:41:48,774 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-12.pt +2024-08-26 02:43:45,614 INFO [train.py:1114] (0/4) Epoch 13, batch 0, loss[loss=0.223, simple_loss=0.2715, pruned_loss=0.06214, ctc_loss=0.1254, over 19800.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2715, pruned_loss=0.06214, ctc_loss=0.1254, over 19800.00 frames. ], batch size: 49, lr: 1.18e-02, grad_scale: 32.0 +2024-08-26 02:43:45,616 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-26 02:45:27,908 INFO [train.py:1146] (0/4) Epoch 13, validation: loss=0.1972, simple_loss=0.2835, pruned_loss=0.04113, ctc_loss=0.07151, over 944034.00 frames. +2024-08-26 02:45:27,909 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 14058MB +2024-08-26 02:45:31,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=159306.66666666666, ans=0.0 +2024-08-26 02:45:32,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=159306.66666666666, ans=0.125 +2024-08-26 02:46:06,322 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.08 vs. limit=15.0 +2024-08-26 02:46:07,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=159360.0, ans=0.025 +2024-08-26 02:46:14,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=159413.33333333334, ans=0.2 +2024-08-26 02:46:22,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=159413.33333333334, ans=0.04949747468305833 +2024-08-26 02:47:02,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159520.0, ans=0.1 +2024-08-26 02:47:15,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=159520.0, ans=0.0 +2024-08-26 02:48:06,366 INFO [train.py:1114] (0/4) Epoch 13, batch 50, loss[loss=0.1752, simple_loss=0.2456, pruned_loss=0.03811, ctc_loss=0.07125, over 19681.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.2893, pruned_loss=0.06277, ctc_loss=0.1198, over 844325.49 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:48:32,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=159626.66666666666, ans=0.1 +2024-08-26 02:48:55,481 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 1.822e+02 2.122e+02 2.766e+02 5.339e+02, threshold=4.244e+02, percent-clipped=3.0 +2024-08-26 02:49:16,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=159786.66666666666, ans=0.0 +2024-08-26 02:49:20,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=159786.66666666666, ans=0.125 +2024-08-26 02:49:26,937 INFO [train.py:1114] (0/4) Epoch 13, batch 100, loss[loss=0.2069, simple_loss=0.2663, pruned_loss=0.05282, ctc_loss=0.1049, over 19711.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.2894, pruned_loss=0.06168, ctc_loss=0.1175, over 1498893.67 frames. ], batch size: 51, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:49:33,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159840.0, ans=0.1 +2024-08-26 02:50:00,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=159946.66666666666, ans=0.125 +2024-08-26 02:50:00,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=159946.66666666666, ans=0.0 +2024-08-26 02:50:21,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=159946.66666666666, ans=0.125 +2024-08-26 02:50:45,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=160000.0, ans=0.125 +2024-08-26 02:50:46,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=160000.0, ans=0.0 +2024-08-26 02:51:16,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=160053.33333333334, ans=0.125 +2024-08-26 02:51:20,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=160053.33333333334, ans=0.2 +2024-08-26 02:51:23,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160053.33333333334, ans=0.1 +2024-08-26 02:51:27,248 INFO [train.py:1114] (0/4) Epoch 13, batch 150, loss[loss=0.2121, simple_loss=0.2703, pruned_loss=0.0568, ctc_loss=0.1009, over 19717.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.2872, pruned_loss=0.06134, ctc_loss=0.1165, over 2027521.62 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:51:42,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=160106.66666666666, ans=0.0 +2024-08-26 02:51:54,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=160160.0, ans=0.0 +2024-08-26 02:52:05,706 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.56 vs. limit=12.0 +2024-08-26 02:52:48,530 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.368e+02 1.693e+02 1.889e+02 2.276e+02 3.515e+02, threshold=3.778e+02, percent-clipped=0.0 +2024-08-26 02:53:36,276 INFO [train.py:1114] (0/4) Epoch 13, batch 200, loss[loss=0.2545, simple_loss=0.3105, pruned_loss=0.07242, ctc_loss=0.134, over 18324.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.2861, pruned_loss=0.06061, ctc_loss=0.1146, over 2435886.34 frames. ], batch size: 85, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:53:52,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=160426.66666666666, ans=0.04949747468305833 +2024-08-26 02:54:01,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=160426.66666666666, ans=0.07 +2024-08-26 02:54:10,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=160426.66666666666, ans=0.2 +2024-08-26 02:54:32,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=160480.0, ans=0.0 +2024-08-26 02:54:36,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=160480.0, ans=0.0 +2024-08-26 02:54:49,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=160480.0, ans=0.2 +2024-08-26 02:55:15,723 INFO [train.py:1114] (0/4) Epoch 13, batch 250, loss[loss=0.2129, simple_loss=0.2799, pruned_loss=0.05318, ctc_loss=0.09903, over 19389.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2851, pruned_loss=0.05994, ctc_loss=0.1135, over 2755928.06 frames. ], batch size: 67, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:55:22,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=160640.0, ans=0.125 +2024-08-26 02:55:30,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=160693.33333333334, ans=0.125 +2024-08-26 02:55:34,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=160693.33333333334, ans=0.125 +2024-08-26 02:55:47,659 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 1.754e+02 2.188e+02 2.577e+02 4.403e+02, threshold=4.375e+02, percent-clipped=2.0 +2024-08-26 02:55:53,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=160800.0, ans=0.2 +2024-08-26 02:55:53,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=160800.0, ans=0.05 +2024-08-26 02:56:10,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160853.33333333334, ans=0.1 +2024-08-26 02:56:15,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=160853.33333333334, ans=0.1 +2024-08-26 02:56:23,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=160906.66666666666, ans=0.125 +2024-08-26 02:56:43,566 INFO [train.py:1114] (0/4) Epoch 13, batch 300, loss[loss=0.2551, simple_loss=0.3096, pruned_loss=0.07454, ctc_loss=0.1288, over 19506.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.2854, pruned_loss=0.06006, ctc_loss=0.1135, over 3000371.21 frames. ], batch size: 61, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:56:50,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=160906.66666666666, ans=0.2 +2024-08-26 02:56:56,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=160906.66666666666, ans=0.125 +2024-08-26 02:57:19,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=161013.33333333334, ans=0.2 +2024-08-26 02:57:34,859 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.84 vs. limit=10.0 +2024-08-26 02:57:37,185 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.99 vs. limit=15.0 +2024-08-26 02:57:45,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff2.min_abs, batch_count=161120.0, ans=0.1 +2024-08-26 02:57:50,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.20 vs. limit=22.5 +2024-08-26 02:57:50,484 INFO [train.py:1114] (0/4) Epoch 13, batch 350, loss[loss=0.2038, simple_loss=0.257, pruned_loss=0.05432, ctc_loss=0.1049, over 19781.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.2855, pruned_loss=0.05992, ctc_loss=0.1133, over 3190650.97 frames. ], batch size: 48, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:57:57,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=161173.33333333334, ans=0.125 +2024-08-26 02:57:57,996 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.79 vs. limit=10.0 +2024-08-26 02:58:09,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=161226.66666666666, ans=0.0 +2024-08-26 02:58:17,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=161280.0, ans=0.125 +2024-08-26 02:58:25,614 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.486e+02 1.772e+02 2.039e+02 2.354e+02 3.759e+02, threshold=4.079e+02, percent-clipped=0.0 +2024-08-26 02:59:24,157 INFO [train.py:1114] (0/4) Epoch 13, batch 400, loss[loss=0.2535, simple_loss=0.3033, pruned_loss=0.07513, ctc_loss=0.1338, over 19512.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.2856, pruned_loss=0.0603, ctc_loss=0.1136, over 3342951.10 frames. ], batch size: 54, lr: 1.17e-02, grad_scale: 32.0 +2024-08-26 02:59:29,399 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.39 vs. limit=10.0 +2024-08-26 02:59:53,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=161440.0, ans=0.2 +2024-08-26 03:00:07,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=161493.33333333334, ans=0.0 +2024-08-26 03:00:43,178 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.32 vs. limit=15.0 +2024-08-26 03:00:44,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=161600.0, ans=0.125 +2024-08-26 03:00:46,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=161600.0, ans=0.025 +2024-08-26 03:00:52,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=161600.0, ans=0.125 +2024-08-26 03:01:10,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161653.33333333334, ans=0.1 +2024-08-26 03:01:53,835 INFO [train.py:1114] (0/4) Epoch 13, batch 450, loss[loss=0.2194, simple_loss=0.2934, pruned_loss=0.05228, ctc_loss=0.102, over 19616.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.2863, pruned_loss=0.06059, ctc_loss=0.1143, over 3451082.16 frames. ], batch size: 55, lr: 1.17e-02, grad_scale: 32.0 +2024-08-26 03:01:54,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=161706.66666666666, ans=0.125 +2024-08-26 03:02:04,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=161706.66666666666, ans=15.0 +2024-08-26 03:02:14,628 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.85 vs. limit=10.0 +2024-08-26 03:02:50,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=161813.33333333334, ans=0.125 +2024-08-26 03:02:57,039 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.35 vs. limit=15.0 +2024-08-26 03:03:10,115 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.726e+02 2.085e+02 2.754e+02 4.301e+02, threshold=4.170e+02, percent-clipped=3.0 +2024-08-26 03:03:29,141 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.84 vs. limit=15.0 +2024-08-26 03:03:51,757 INFO [train.py:1114] (0/4) Epoch 13, batch 500, loss[loss=0.2272, simple_loss=0.2924, pruned_loss=0.05933, ctc_loss=0.1084, over 19664.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.2853, pruned_loss=0.06001, ctc_loss=0.113, over 3546593.79 frames. ], batch size: 63, lr: 1.17e-02, grad_scale: 32.0 +2024-08-26 03:04:16,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=161973.33333333334, ans=0.125 +2024-08-26 03:04:36,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=162026.66666666666, ans=0.0 +2024-08-26 03:05:02,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=162026.66666666666, ans=0.125 +2024-08-26 03:05:04,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=162080.0, ans=0.125 +2024-08-26 03:05:06,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=162080.0, ans=0.0 +2024-08-26 03:05:20,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=162080.0, ans=0.0 +2024-08-26 03:05:27,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=162133.33333333334, ans=0.05 +2024-08-26 03:05:35,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=162186.66666666666, ans=0.125 +2024-08-26 03:06:00,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=162186.66666666666, ans=0.2 +2024-08-26 03:06:03,081 INFO [train.py:1114] (0/4) Epoch 13, batch 550, loss[loss=0.245, simple_loss=0.3025, pruned_loss=0.06901, ctc_loss=0.1235, over 19314.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.2854, pruned_loss=0.06031, ctc_loss=0.1136, over 3608138.28 frames. ], batch size: 71, lr: 1.17e-02, grad_scale: 32.0 +2024-08-26 03:06:10,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=162240.0, ans=0.125 +2024-08-26 03:06:26,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=162293.33333333334, ans=0.125 +2024-08-26 03:06:37,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=162346.66666666666, ans=0.0 +2024-08-26 03:06:38,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=9.07 vs. limit=15.0 +2024-08-26 03:06:39,057 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.01 vs. limit=15.0 +2024-08-26 03:06:47,057 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.318e+02 1.758e+02 1.954e+02 2.485e+02 4.688e+02, threshold=3.908e+02, percent-clipped=2.0 +2024-08-26 03:07:01,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=162400.0, ans=0.0 +2024-08-26 03:07:24,286 INFO [train.py:1114] (0/4) Epoch 13, batch 600, loss[loss=0.2683, simple_loss=0.3173, pruned_loss=0.08046, ctc_loss=0.146, over 19428.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2852, pruned_loss=0.06004, ctc_loss=0.1128, over 3665328.45 frames. ], batch size: 67, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:07:27,587 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.91 vs. limit=15.0 +2024-08-26 03:07:48,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=162560.0, ans=0.0 +2024-08-26 03:07:52,004 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.84 vs. limit=22.5 +2024-08-26 03:08:34,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=162666.66666666666, ans=0.125 +2024-08-26 03:09:14,963 INFO [train.py:1114] (0/4) Epoch 13, batch 650, loss[loss=0.2021, simple_loss=0.2763, pruned_loss=0.04641, ctc_loss=0.08764, over 19748.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.285, pruned_loss=0.05988, ctc_loss=0.1125, over 3715452.02 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:09:25,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=162773.33333333334, ans=0.125 +2024-08-26 03:09:26,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=162773.33333333334, ans=0.125 +2024-08-26 03:09:54,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=162880.0, ans=0.0 +2024-08-26 03:09:56,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=162880.0, ans=0.0 +2024-08-26 03:10:09,866 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 1.755e+02 2.119e+02 2.960e+02 5.119e+02, threshold=4.237e+02, percent-clipped=6.0 +2024-08-26 03:10:16,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=162933.33333333334, ans=0.0 +2024-08-26 03:10:16,525 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.64 vs. limit=22.5 +2024-08-26 03:10:17,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162933.33333333334, ans=0.1 +2024-08-26 03:10:36,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=162986.66666666666, ans=0.125 +2024-08-26 03:10:36,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=162986.66666666666, ans=0.0 +2024-08-26 03:10:39,747 INFO [train.py:1114] (0/4) Epoch 13, batch 700, loss[loss=0.2031, simple_loss=0.2731, pruned_loss=0.04795, ctc_loss=0.09305, over 19719.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.2858, pruned_loss=0.06019, ctc_loss=0.1132, over 3748105.30 frames. ], batch size: 51, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:11:10,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=163146.66666666666, ans=0.125 +2024-08-26 03:11:32,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=163200.0, ans=0.125 +2024-08-26 03:11:56,189 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.46 vs. limit=15.0 +2024-08-26 03:12:00,798 INFO [train.py:1114] (0/4) Epoch 13, batch 750, loss[loss=0.2366, simple_loss=0.3016, pruned_loss=0.0618, ctc_loss=0.12, over 19514.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2853, pruned_loss=0.05996, ctc_loss=0.1129, over 3775227.81 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:12:09,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=163306.66666666666, ans=0.1 +2024-08-26 03:12:20,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=163360.0, ans=0.025 +2024-08-26 03:12:36,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163413.33333333334, ans=0.1 +2024-08-26 03:12:43,032 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.800e+02 2.310e+02 2.882e+02 4.749e+02, threshold=4.619e+02, percent-clipped=2.0 +2024-08-26 03:12:45,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=163466.66666666666, ans=0.125 +2024-08-26 03:12:56,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=163520.0, ans=0.125 +2024-08-26 03:13:56,360 INFO [train.py:1114] (0/4) Epoch 13, batch 800, loss[loss=0.1997, simple_loss=0.263, pruned_loss=0.04875, ctc_loss=0.09744, over 19405.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2846, pruned_loss=0.05953, ctc_loss=0.1121, over 3796428.28 frames. ], batch size: 48, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:14:17,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=163626.66666666666, ans=0.0 +2024-08-26 03:14:32,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=163680.0, ans=0.125 +2024-08-26 03:14:41,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=163680.0, ans=0.07 +2024-08-26 03:14:50,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=163733.33333333334, ans=0.125 +2024-08-26 03:15:04,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=163786.66666666666, ans=0.125 +2024-08-26 03:15:13,886 INFO [train.py:1114] (0/4) Epoch 13, batch 850, loss[loss=0.235, simple_loss=0.3005, pruned_loss=0.06111, ctc_loss=0.1182, over 19647.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2839, pruned_loss=0.05917, ctc_loss=0.1113, over 3815611.85 frames. ], batch size: 59, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:15:23,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=163840.0, ans=0.125 +2024-08-26 03:15:25,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=163893.33333333334, ans=0.0 +2024-08-26 03:16:04,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=163946.66666666666, ans=0.0 +2024-08-26 03:16:11,613 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.448e+02 1.727e+02 1.948e+02 2.271e+02 3.773e+02, threshold=3.897e+02, percent-clipped=0.0 +2024-08-26 03:16:17,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=164000.0, ans=0.0 +2024-08-26 03:16:38,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=164106.66666666666, ans=0.125 +2024-08-26 03:16:39,641 INFO [train.py:1114] (0/4) Epoch 13, batch 900, loss[loss=0.2042, simple_loss=0.2607, pruned_loss=0.05372, ctc_loss=0.1009, over 19817.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.2847, pruned_loss=0.05964, ctc_loss=0.1122, over 3819191.02 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:16:58,289 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.30 vs. limit=22.5 +2024-08-26 03:17:11,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=164213.33333333334, ans=0.125 +2024-08-26 03:17:24,780 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 03:17:39,456 INFO [train.py:1114] (0/4) Epoch 13, batch 950, loss[loss=0.2366, simple_loss=0.2818, pruned_loss=0.06987, ctc_loss=0.1291, over 19491.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.2853, pruned_loss=0.06016, ctc_loss=0.113, over 3821463.59 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:18:42,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=164480.0, ans=0.025 +2024-08-26 03:18:47,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=164480.0, ans=0.125 +2024-08-26 03:18:52,283 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.763e+02 2.081e+02 2.549e+02 5.575e+02, threshold=4.162e+02, percent-clipped=2.0 +2024-08-26 03:18:56,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=164533.33333333334, ans=0.2 +2024-08-26 03:18:57,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=164533.33333333334, ans=0.125 +2024-08-26 03:19:26,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164586.66666666666, ans=0.1 +2024-08-26 03:19:29,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164640.0, ans=0.1 +2024-08-26 03:19:29,891 INFO [train.py:1114] (0/4) Epoch 13, batch 1000, loss[loss=0.1883, simple_loss=0.2548, pruned_loss=0.04514, ctc_loss=0.07869, over 19854.00 frames. ], tot_loss[loss=0.226, simple_loss=0.2856, pruned_loss=0.06052, ctc_loss=0.1135, over 3817729.86 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:19:45,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=164693.33333333334, ans=0.125 +2024-08-26 03:19:51,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=164693.33333333334, ans=0.125 +2024-08-26 03:19:57,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=164746.66666666666, ans=0.0 +2024-08-26 03:20:26,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=164853.33333333334, ans=0.125 +2024-08-26 03:20:27,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=164853.33333333334, ans=0.0 +2024-08-26 03:20:32,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=164853.33333333334, ans=0.0 +2024-08-26 03:20:35,683 INFO [train.py:1114] (0/4) Epoch 13, batch 1050, loss[loss=0.2247, simple_loss=0.2904, pruned_loss=0.05894, ctc_loss=0.1027, over 19837.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.2848, pruned_loss=0.06013, ctc_loss=0.1129, over 3823736.33 frames. ], batch size: 57, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:20:44,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164906.66666666666, ans=0.1 +2024-08-26 03:20:58,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=165013.33333333334, ans=0.07 +2024-08-26 03:20:58,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=165013.33333333334, ans=0.025 +2024-08-26 03:21:05,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=165013.33333333334, ans=0.125 +2024-08-26 03:21:08,101 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.372e+02 1.698e+02 1.997e+02 2.318e+02 3.616e+02, threshold=3.994e+02, percent-clipped=0.0 +2024-08-26 03:21:18,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=165066.66666666666, ans=0.0 +2024-08-26 03:21:37,109 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.39 vs. limit=15.0 +2024-08-26 03:21:44,506 INFO [train.py:1114] (0/4) Epoch 13, batch 1100, loss[loss=0.2278, simple_loss=0.2893, pruned_loss=0.06085, ctc_loss=0.1113, over 19566.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2848, pruned_loss=0.05997, ctc_loss=0.1126, over 3830771.65 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:22:16,222 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.09 vs. limit=15.0 +2024-08-26 03:22:17,136 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 03:22:36,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=165386.66666666666, ans=0.0 +2024-08-26 03:22:53,424 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.52 vs. limit=15.0 +2024-08-26 03:22:57,619 INFO [train.py:1114] (0/4) Epoch 13, batch 1150, loss[loss=0.2392, simple_loss=0.2891, pruned_loss=0.06833, ctc_loss=0.1317, over 19597.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2847, pruned_loss=0.06004, ctc_loss=0.1128, over 3830498.68 frames. ], batch size: 52, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:23:15,358 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.18 vs. limit=22.5 +2024-08-26 03:23:19,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=165493.33333333334, ans=0.07 +2024-08-26 03:23:37,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=165546.66666666666, ans=0.125 +2024-08-26 03:23:38,677 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.345e+02 1.729e+02 2.006e+02 2.456e+02 7.202e+02, threshold=4.012e+02, percent-clipped=3.0 +2024-08-26 03:23:40,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=165600.0, ans=0.125 +2024-08-26 03:24:03,510 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.47 vs. limit=15.0 +2024-08-26 03:24:11,590 INFO [train.py:1114] (0/4) Epoch 13, batch 1200, loss[loss=0.2083, simple_loss=0.2795, pruned_loss=0.05089, ctc_loss=0.08842, over 19841.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.286, pruned_loss=0.06054, ctc_loss=0.1139, over 3825729.86 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:24:23,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=165706.66666666666, ans=0.2 +2024-08-26 03:24:34,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=165760.0, ans=0.2 +2024-08-26 03:26:10,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=165920.0, ans=0.125 +2024-08-26 03:26:18,753 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.78 vs. limit=15.0 +2024-08-26 03:26:20,506 INFO [train.py:1114] (0/4) Epoch 13, batch 1250, loss[loss=0.2155, simple_loss=0.2831, pruned_loss=0.05276, ctc_loss=0.1059, over 19518.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.285, pruned_loss=0.05952, ctc_loss=0.1119, over 3843655.39 frames. ], batch size: 61, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:26:25,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=165973.33333333334, ans=0.1 +2024-08-26 03:26:45,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=166026.66666666666, ans=0.125 +2024-08-26 03:27:18,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=166080.0, ans=0.0 +2024-08-26 03:27:23,469 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.311e+02 1.715e+02 1.869e+02 2.285e+02 3.930e+02, threshold=3.738e+02, percent-clipped=0.0 +2024-08-26 03:27:28,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=166080.0, ans=0.1 +2024-08-26 03:27:46,282 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.07 vs. limit=12.0 +2024-08-26 03:28:00,547 INFO [train.py:1114] (0/4) Epoch 13, batch 1300, loss[loss=0.2488, simple_loss=0.3018, pruned_loss=0.07149, ctc_loss=0.1319, over 18789.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2844, pruned_loss=0.05943, ctc_loss=0.1116, over 3847040.83 frames. ], batch size: 76, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:28:43,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=166293.33333333334, ans=0.0 +2024-08-26 03:29:33,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=166453.33333333334, ans=0.125 +2024-08-26 03:30:19,054 INFO [train.py:1114] (0/4) Epoch 13, batch 1350, loss[loss=0.2186, simple_loss=0.2801, pruned_loss=0.05651, ctc_loss=0.1102, over 19774.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.2835, pruned_loss=0.05876, ctc_loss=0.1105, over 3858652.72 frames. ], batch size: 54, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:30:34,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=166560.0, ans=0.125 +2024-08-26 03:30:54,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=166613.33333333334, ans=0.0 +2024-08-26 03:31:08,773 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.377e+02 1.736e+02 2.053e+02 2.622e+02 5.263e+02, threshold=4.106e+02, percent-clipped=6.0 +2024-08-26 03:31:10,624 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.10 vs. limit=10.0 +2024-08-26 03:31:12,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=166666.66666666666, ans=0.0 +2024-08-26 03:31:40,386 INFO [train.py:1114] (0/4) Epoch 13, batch 1400, loss[loss=0.2035, simple_loss=0.2556, pruned_loss=0.0546, ctc_loss=0.1053, over 19692.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2834, pruned_loss=0.05886, ctc_loss=0.1108, over 3864667.05 frames. ], batch size: 46, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:31:53,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166773.33333333334, ans=0.1 +2024-08-26 03:32:37,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=166880.0, ans=0.125 +2024-08-26 03:32:56,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=166933.33333333334, ans=0.125 +2024-08-26 03:33:07,201 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.75 vs. limit=15.0 +2024-08-26 03:33:14,110 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.33 vs. limit=15.0 +2024-08-26 03:33:18,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=166986.66666666666, ans=0.125 +2024-08-26 03:33:21,477 INFO [train.py:1114] (0/4) Epoch 13, batch 1450, loss[loss=0.212, simple_loss=0.2774, pruned_loss=0.05286, ctc_loss=0.1023, over 19679.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.284, pruned_loss=0.05917, ctc_loss=0.1112, over 3862526.47 frames. ], batch size: 63, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:33:26,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=167040.0, ans=0.0 +2024-08-26 03:33:59,242 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.391e+02 1.756e+02 1.937e+02 2.380e+02 3.895e+02, threshold=3.874e+02, percent-clipped=0.0 +2024-08-26 03:34:20,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=167253.33333333334, ans=0.04949747468305833 +2024-08-26 03:34:30,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167253.33333333334, ans=0.1 +2024-08-26 03:34:33,941 INFO [train.py:1114] (0/4) Epoch 13, batch 1500, loss[loss=0.2257, simple_loss=0.2935, pruned_loss=0.05792, ctc_loss=0.1051, over 19580.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.2854, pruned_loss=0.06008, ctc_loss=0.1127, over 3861682.56 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:34:37,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=167306.66666666666, ans=0.125 +2024-08-26 03:34:55,999 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.22 vs. limit=10.0 +2024-08-26 03:35:21,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.17 vs. limit=15.0 +2024-08-26 03:35:25,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=167520.0, ans=0.0 +2024-08-26 03:35:32,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=167520.0, ans=0.0 +2024-08-26 03:35:43,249 INFO [train.py:1114] (0/4) Epoch 13, batch 1550, loss[loss=0.2555, simple_loss=0.3075, pruned_loss=0.07421, ctc_loss=0.1375, over 19588.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2852, pruned_loss=0.06003, ctc_loss=0.1128, over 3846022.74 frames. ], batch size: 60, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:35:56,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=167626.66666666666, ans=0.2 +2024-08-26 03:35:57,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=167626.66666666666, ans=0.0 +2024-08-26 03:36:18,497 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.13 vs. limit=15.0 +2024-08-26 03:36:25,732 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.65 vs. limit=12.0 +2024-08-26 03:36:35,365 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.327e+02 1.778e+02 2.054e+02 2.767e+02 5.252e+02, threshold=4.108e+02, percent-clipped=7.0 +2024-08-26 03:36:44,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=167733.33333333334, ans=0.09899494936611666 +2024-08-26 03:37:02,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=167786.66666666666, ans=0.125 +2024-08-26 03:37:05,306 INFO [train.py:1114] (0/4) Epoch 13, batch 1600, loss[loss=0.2075, simple_loss=0.285, pruned_loss=0.04666, ctc_loss=0.09161, over 19856.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.2853, pruned_loss=0.06003, ctc_loss=0.113, over 3836438.34 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:37:40,187 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.75 vs. limit=10.0 +2024-08-26 03:37:46,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=167946.66666666666, ans=0.125 +2024-08-26 03:38:19,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=168000.0, ans=0.125 +2024-08-26 03:38:35,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168106.66666666666, ans=0.1 +2024-08-26 03:38:36,006 INFO [train.py:1114] (0/4) Epoch 13, batch 1650, loss[loss=0.2236, simple_loss=0.2877, pruned_loss=0.05634, ctc_loss=0.1171, over 19662.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.2852, pruned_loss=0.06005, ctc_loss=0.1131, over 3833199.88 frames. ], batch size: 59, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:38:48,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=168106.66666666666, ans=0.125 +2024-08-26 03:39:17,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=168213.33333333334, ans=0.0 +2024-08-26 03:39:20,045 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.425e+02 1.825e+02 2.209e+02 2.614e+02 4.167e+02, threshold=4.418e+02, percent-clipped=2.0 +2024-08-26 03:39:37,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=168320.0, ans=0.0 +2024-08-26 03:39:42,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=168320.0, ans=0.0 +2024-08-26 03:40:00,095 INFO [train.py:1114] (0/4) Epoch 13, batch 1700, loss[loss=0.1931, simple_loss=0.2542, pruned_loss=0.04796, ctc_loss=0.09034, over 19674.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2845, pruned_loss=0.05933, ctc_loss=0.1117, over 3847041.83 frames. ], batch size: 46, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:40:14,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=168373.33333333334, ans=0.125 +2024-08-26 03:40:51,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=168533.33333333334, ans=0.2 +2024-08-26 03:41:06,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=168586.66666666666, ans=0.125 +2024-08-26 03:41:10,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=168640.0, ans=0.125 +2024-08-26 03:41:17,849 INFO [train.py:1114] (0/4) Epoch 13, batch 1750, loss[loss=0.1964, simple_loss=0.255, pruned_loss=0.04963, ctc_loss=0.09642, over 19627.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2837, pruned_loss=0.05898, ctc_loss=0.1111, over 3852460.98 frames. ], batch size: 45, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:42:01,100 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.702e+02 2.065e+02 2.813e+02 5.109e+02, threshold=4.129e+02, percent-clipped=2.0 +2024-08-26 03:42:15,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=168800.0, ans=0.025 +2024-08-26 03:42:29,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=168853.33333333334, ans=0.025 +2024-08-26 03:42:45,946 INFO [train.py:1114] (0/4) Epoch 13, batch 1800, loss[loss=0.2513, simple_loss=0.3125, pruned_loss=0.069, ctc_loss=0.1304, over 19597.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2841, pruned_loss=0.05913, ctc_loss=0.1112, over 3854137.29 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:43:05,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=168960.0, ans=0.0 +2024-08-26 03:43:10,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=169013.33333333334, ans=0.2 +2024-08-26 03:43:22,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=169013.33333333334, ans=0.125 +2024-08-26 03:43:29,584 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.53 vs. limit=15.0 +2024-08-26 03:43:41,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=169120.0, ans=0.0 +2024-08-26 03:43:47,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=169120.0, ans=0.0 +2024-08-26 03:43:53,523 INFO [train.py:1114] (0/4) Epoch 13, batch 1850, loss[loss=0.2421, simple_loss=0.3019, pruned_loss=0.06442, ctc_loss=0.1335, over 19579.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2838, pruned_loss=0.05924, ctc_loss=0.1113, over 3856853.35 frames. ], batch size: 57, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:43:59,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=169173.33333333334, ans=0.0 +2024-08-26 03:44:11,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=169226.66666666666, ans=0.125 +2024-08-26 03:44:21,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=169280.0, ans=0.05 +2024-08-26 03:44:22,655 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 03:44:29,682 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.525e+02 1.936e+02 2.666e+02 3.402e+02 5.252e+02, threshold=5.332e+02, percent-clipped=13.0 +2024-08-26 03:44:43,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=169386.66666666666, ans=0.125 +2024-08-26 03:44:56,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=169386.66666666666, ans=0.125 +2024-08-26 03:45:07,841 INFO [train.py:1114] (0/4) Epoch 13, batch 1900, loss[loss=0.2428, simple_loss=0.3022, pruned_loss=0.066, ctc_loss=0.1284, over 19653.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.2845, pruned_loss=0.05957, ctc_loss=0.112, over 3861046.47 frames. ], batch size: 59, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:45:15,043 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.21 vs. limit=15.0 +2024-08-26 03:45:18,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=169493.33333333334, ans=0.1 +2024-08-26 03:45:23,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=169493.33333333334, ans=0.125 +2024-08-26 03:45:48,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=169600.0, ans=0.1 +2024-08-26 03:45:54,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169600.0, ans=0.1 +2024-08-26 03:46:02,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=169600.0, ans=0.125 +2024-08-26 03:46:24,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=169706.66666666666, ans=0.1 +2024-08-26 03:46:29,180 INFO [train.py:1114] (0/4) Epoch 13, batch 1950, loss[loss=0.2093, simple_loss=0.2687, pruned_loss=0.05452, ctc_loss=0.1021, over 19587.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.285, pruned_loss=0.0595, ctc_loss=0.1117, over 3870178.17 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:46:44,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=169760.0, ans=0.0 +2024-08-26 03:46:55,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=169760.0, ans=0.125 +2024-08-26 03:47:25,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=16.65 vs. limit=15.0 +2024-08-26 03:50:26,633 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.484e+02 1.795e+02 2.018e+02 2.323e+02 3.502e+02, threshold=4.036e+02, percent-clipped=0.0 +2024-08-26 04:18:42,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=169920.0, ans=0.125 +2024-08-26 04:22:39,281 INFO [train.py:1114] (0/4) Epoch 13, batch 2000, loss[loss=0.2162, simple_loss=0.2719, pruned_loss=0.05887, ctc_loss=0.107, over 19613.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.2861, pruned_loss=0.06017, ctc_loss=0.1131, over 3853235.11 frames. ], batch size: 45, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 04:22:40,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=169973.33333333334, ans=0.125 +2024-08-26 04:28:30,736 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.58 vs. limit=10.0 +2024-08-26 04:34:05,978 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.50 vs. limit=6.0 +2024-08-26 04:49:48,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=170080.0, ans=0.125 +2024-08-26 04:49:49,235 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.34 vs. limit=15.0 +2024-08-26 04:58:16,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=170133.33333333334, ans=0.125 +2024-08-26 05:04:26,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=170133.33333333334, ans=0.0 +2024-08-26 05:08:12,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170186.66666666666, ans=0.1 +2024-08-26 05:17:15,370 INFO [train.py:1114] (0/4) Epoch 13, batch 2050, loss[loss=0.2317, simple_loss=0.2845, pruned_loss=0.06497, ctc_loss=0.1221, over 19708.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.2856, pruned_loss=0.06038, ctc_loss=0.1134, over 3851156.12 frames. ], batch size: 47, lr: 1.14e-02, grad_scale: 64.0 +2024-08-26 05:17:50,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=170240.0, ans=0.125 +2024-08-26 05:17:51,314 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.08 vs. limit=15.0 +2024-08-26 05:20:46,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=170240.0, ans=0.0 +2024-08-26 05:22:11,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=170293.33333333334, ans=0.125 +2024-08-26 05:22:12,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=170293.33333333334, ans=0.2 +2024-08-26 05:34:32,862 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.739e+02 2.095e+02 2.592e+02 3.598e+02, threshold=4.189e+02, percent-clipped=0.0 +2024-08-26 05:35:15,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=170400.0, ans=0.035 +2024-08-26 05:37:54,966 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.62 vs. limit=15.0 +2024-08-26 05:38:21,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=170400.0, ans=0.0 +2024-08-26 05:45:21,792 INFO [train.py:1114] (0/4) Epoch 13, batch 2100, loss[loss=0.1964, simple_loss=0.2652, pruned_loss=0.04663, ctc_loss=0.0859, over 19762.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.2848, pruned_loss=0.05969, ctc_loss=0.1121, over 3858379.74 frames. ], batch size: 54, lr: 1.14e-02, grad_scale: 64.0 +2024-08-26 05:47:11,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=170506.66666666666, ans=0.2 +2024-08-26 05:50:52,143 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.76 vs. limit=15.0 +2024-08-26 05:51:45,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=170560.0, ans=0.125 +2024-08-26 05:52:25,916 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.62 vs. limit=15.0 +2024-08-26 05:53:15,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=170613.33333333334, ans=0.125 +2024-08-26 05:53:29,280 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/checkpoint-32000.pt +2024-08-26 05:56:41,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=170720.0, ans=0.0 +2024-08-26 05:57:05,751 INFO [train.py:1114] (0/4) Epoch 13, batch 2150, loss[loss=0.2078, simple_loss=0.2746, pruned_loss=0.05189, ctc_loss=0.09315, over 19575.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.284, pruned_loss=0.05937, ctc_loss=0.1114, over 3869607.32 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 64.0 +2024-08-26 05:58:18,622 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.97 vs. limit=15.0 +2024-08-26 06:00:56,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=170826.66666666666, ans=0.0 +2024-08-26 06:01:20,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=170880.0, ans=0.05 +2024-08-26 06:02:02,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=170880.0, ans=0.125 +2024-08-26 06:02:10,737 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.397e+02 1.801e+02 2.071e+02 2.646e+02 5.963e+02, threshold=4.141e+02, percent-clipped=6.0 +2024-08-26 06:03:39,260 INFO [train.py:1114] (0/4) Epoch 13, batch 2200, loss[loss=0.2534, simple_loss=0.3168, pruned_loss=0.06919, ctc_loss=0.1293, over 19592.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2838, pruned_loss=0.05901, ctc_loss=0.1106, over 3867172.51 frames. ], batch size: 57, lr: 1.14e-02, grad_scale: 64.0 +2024-08-26 06:03:46,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171040.0, ans=0.1 +2024-08-26 06:04:25,405 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.01 vs. limit=15.0 +2024-08-26 06:05:08,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171200.0, ans=0.1 +2024-08-26 06:05:28,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=171200.0, ans=0.05 +2024-08-26 06:05:57,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=171253.33333333334, ans=0.0 +2024-08-26 06:06:26,735 INFO [train.py:1114] (0/4) Epoch 13, batch 2250, loss[loss=0.267, simple_loss=0.3206, pruned_loss=0.07899, ctc_loss=0.1388, over 19623.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2843, pruned_loss=0.05926, ctc_loss=0.1112, over 3867954.74 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 06:06:30,659 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.08 vs. limit=22.5 +2024-08-26 06:06:31,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171306.66666666666, ans=0.1 +2024-08-26 06:08:30,404 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.765e+02 2.070e+02 2.599e+02 3.761e+02, threshold=4.140e+02, percent-clipped=0.0 +2024-08-26 06:09:25,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=171466.66666666666, ans=0.04949747468305833 +2024-08-26 06:10:16,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=171520.0, ans=0.2 +2024-08-26 06:10:19,073 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.59 vs. limit=22.5 +2024-08-26 06:10:19,750 INFO [train.py:1114] (0/4) Epoch 13, batch 2300, loss[loss=0.1933, simple_loss=0.2575, pruned_loss=0.04745, ctc_loss=0.08533, over 19511.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2837, pruned_loss=0.05932, ctc_loss=0.1116, over 3862602.56 frames. ], batch size: 49, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 06:10:27,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=171573.33333333334, ans=0.0 +2024-08-26 06:11:02,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=171680.0, ans=0.0 +2024-08-26 06:11:15,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=171680.0, ans=0.125 +2024-08-26 06:11:22,323 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.23 vs. limit=22.5 +2024-08-26 06:11:41,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=171786.66666666666, ans=22.5 +2024-08-26 06:11:43,315 INFO [train.py:1114] (0/4) Epoch 13, batch 2350, loss[loss=0.219, simple_loss=0.2834, pruned_loss=0.05625, ctc_loss=0.1055, over 19693.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2835, pruned_loss=0.059, ctc_loss=0.1109, over 3865403.74 frames. ], batch size: 63, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 06:11:47,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=171840.0, ans=0.125 +2024-08-26 06:12:04,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=171946.66666666666, ans=0.125 +2024-08-26 06:12:09,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=171946.66666666666, ans=0.125 +2024-08-26 06:12:16,624 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.342e+02 1.773e+02 2.247e+02 3.255e+02 4.983e+02, threshold=4.494e+02, percent-clipped=2.0 +2024-08-26 06:12:20,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=172000.0, ans=0.0 +2024-08-26 06:12:44,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=172053.33333333334, ans=0.0 +2024-08-26 06:12:46,290 INFO [train.py:1114] (0/4) Epoch 13, batch 2400, loss[loss=0.2608, simple_loss=0.3182, pruned_loss=0.07306, ctc_loss=0.1434, over 19379.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.2859, pruned_loss=0.06023, ctc_loss=0.1132, over 3859690.14 frames. ], batch size: 67, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 06:12:58,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=172106.66666666666, ans=0.125 +2024-08-26 06:13:12,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=172160.0, ans=0.125 +2024-08-26 06:13:23,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=172160.0, ans=0.2 +2024-08-26 06:13:29,970 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.57 vs. limit=15.0 +2024-08-26 06:13:41,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=172266.66666666666, ans=0.125 +2024-08-26 06:14:03,511 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=9.72 vs. limit=12.0 +2024-08-26 06:14:08,367 INFO [train.py:1114] (0/4) Epoch 13, batch 2450, loss[loss=0.3072, simple_loss=0.3251, pruned_loss=0.1032, ctc_loss=0.2069, over 12724.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.2902, pruned_loss=0.06354, ctc_loss=0.1196, over 3732461.70 frames. ], batch size: 140, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 06:14:11,490 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.92 vs. limit=15.0 +2024-08-26 06:14:34,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=172480.0, ans=0.1 +2024-08-26 06:14:35,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=172480.0, ans=0.125 +2024-08-26 06:14:43,293 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 1.935e+02 2.072e+02 2.350e+02 4.711e+02, threshold=4.143e+02, percent-clipped=2.0 +2024-08-26 06:14:57,421 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-13.pt +2024-08-26 06:16:27,490 INFO [train.py:1114] (0/4) Epoch 14, batch 0, loss[loss=0.2177, simple_loss=0.2753, pruned_loss=0.05867, ctc_loss=0.107, over 19413.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2753, pruned_loss=0.05867, ctc_loss=0.107, over 19413.00 frames. ], batch size: 48, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:16:27,490 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-26 06:17:56,655 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([0.1238, 3.1051, 3.6303, 2.8044], device='cuda:0') +2024-08-26 06:17:58,789 INFO [train.py:1146] (0/4) Epoch 14, validation: loss=0.1898, simple_loss=0.2778, pruned_loss=0.03769, ctc_loss=0.06578, over 944034.00 frames. +2024-08-26 06:18:12,591 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 14058MB +2024-08-26 06:18:29,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=172634.66666666666, ans=0.125 +2024-08-26 06:18:32,906 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.71 vs. limit=12.0 +2024-08-26 06:19:24,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=172688.0, ans=0.0 +2024-08-26 06:19:28,054 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.76 vs. limit=22.5 +2024-08-26 06:19:33,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=172741.33333333334, ans=0.125 +2024-08-26 06:19:53,752 INFO [train.py:1114] (0/4) Epoch 14, batch 50, loss[loss=0.2156, simple_loss=0.2724, pruned_loss=0.05824, ctc_loss=0.1059, over 19713.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.2863, pruned_loss=0.05912, ctc_loss=0.1119, over 844800.35 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:20:03,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172848.0, ans=0.1 +2024-08-26 06:20:05,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=172848.0, ans=0.125 +2024-08-26 06:20:19,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=172901.33333333334, ans=0.0 +2024-08-26 06:20:25,160 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.00 vs. limit=22.5 +2024-08-26 06:20:42,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172901.33333333334, ans=0.1 +2024-08-26 06:20:45,405 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.54 vs. limit=10.0 +2024-08-26 06:20:55,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=172954.66666666666, ans=0.0 +2024-08-26 06:21:17,215 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.410e+02 1.738e+02 2.047e+02 2.487e+02 4.948e+02, threshold=4.095e+02, percent-clipped=4.0 +2024-08-26 06:21:19,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=173061.33333333334, ans=0.0 +2024-08-26 06:21:51,844 INFO [train.py:1114] (0/4) Epoch 14, batch 100, loss[loss=0.2338, simple_loss=0.2884, pruned_loss=0.0645, ctc_loss=0.1257, over 19723.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.2889, pruned_loss=0.06068, ctc_loss=0.1147, over 1499143.56 frames. ], batch size: 51, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:21:54,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=173114.66666666666, ans=0.125 +2024-08-26 06:22:47,273 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:22:55,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=173274.66666666666, ans=0.0 +2024-08-26 06:23:30,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=173328.0, ans=0.125 +2024-08-26 06:23:38,131 INFO [train.py:1114] (0/4) Epoch 14, batch 150, loss[loss=0.1824, simple_loss=0.2476, pruned_loss=0.04345, ctc_loss=0.07581, over 19719.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2858, pruned_loss=0.05892, ctc_loss=0.1113, over 2027452.09 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:24:00,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.34 vs. limit=15.0 +2024-08-26 06:24:12,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=173434.66666666666, ans=0.2 +2024-08-26 06:24:31,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=173541.33333333334, ans=0.0 +2024-08-26 06:24:46,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=173594.66666666666, ans=0.125 +2024-08-26 06:24:49,746 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 1.676e+02 1.898e+02 2.213e+02 4.155e+02, threshold=3.795e+02, percent-clipped=1.0 +2024-08-26 06:25:00,466 INFO [train.py:1114] (0/4) Epoch 14, batch 200, loss[loss=0.2518, simple_loss=0.3073, pruned_loss=0.07121, ctc_loss=0.1345, over 18353.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2834, pruned_loss=0.05849, ctc_loss=0.1104, over 2435099.22 frames. ], batch size: 85, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:25:10,607 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.55 vs. limit=22.5 +2024-08-26 06:25:22,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=173648.0, ans=0.0 +2024-08-26 06:25:30,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=173701.33333333334, ans=0.0 +2024-08-26 06:25:44,598 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.46 vs. limit=22.5 +2024-08-26 06:25:53,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=173808.0, ans=0.125 +2024-08-26 06:26:09,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=173861.33333333334, ans=0.2 +2024-08-26 06:26:13,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=173861.33333333334, ans=0.0 +2024-08-26 06:26:16,062 INFO [train.py:1114] (0/4) Epoch 14, batch 250, loss[loss=0.2341, simple_loss=0.2899, pruned_loss=0.06461, ctc_loss=0.1225, over 19394.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2836, pruned_loss=0.05869, ctc_loss=0.1109, over 2755806.45 frames. ], batch size: 67, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:26:29,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=173914.66666666666, ans=0.125 +2024-08-26 06:26:58,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=174021.33333333334, ans=0.0 +2024-08-26 06:27:18,006 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.683e+02 2.061e+02 2.648e+02 4.927e+02, threshold=4.123e+02, percent-clipped=4.0 +2024-08-26 06:27:21,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=174128.0, ans=0.015 +2024-08-26 06:27:28,140 INFO [train.py:1114] (0/4) Epoch 14, batch 300, loss[loss=0.2278, simple_loss=0.2836, pruned_loss=0.06351, ctc_loss=0.1124, over 19521.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2827, pruned_loss=0.05813, ctc_loss=0.1098, over 3001481.83 frames. ], batch size: 61, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:27:28,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=174181.33333333334, ans=0.125 +2024-08-26 06:27:49,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=174234.66666666666, ans=0.125 +2024-08-26 06:27:53,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=174288.0, ans=0.125 +2024-08-26 06:28:02,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174288.0, ans=0.1 +2024-08-26 06:28:28,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=174394.66666666666, ans=0.125 +2024-08-26 06:28:34,433 INFO [train.py:1114] (0/4) Epoch 14, batch 350, loss[loss=0.1805, simple_loss=0.2447, pruned_loss=0.04208, ctc_loss=0.08036, over 19759.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2822, pruned_loss=0.05768, ctc_loss=0.1088, over 3191850.72 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:29:16,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=174554.66666666666, ans=0.0 +2024-08-26 06:29:28,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=174608.0, ans=0.05 +2024-08-26 06:29:32,482 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.272e+02 1.657e+02 1.894e+02 2.440e+02 4.007e+02, threshold=3.787e+02, percent-clipped=0.0 +2024-08-26 06:29:42,958 INFO [train.py:1114] (0/4) Epoch 14, batch 400, loss[loss=0.2454, simple_loss=0.3014, pruned_loss=0.06946, ctc_loss=0.1262, over 19496.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2821, pruned_loss=0.05783, ctc_loss=0.109, over 3342545.93 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:29:58,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=174768.0, ans=0.0 +2024-08-26 06:30:51,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=174928.0, ans=0.0 +2024-08-26 06:30:58,872 INFO [train.py:1114] (0/4) Epoch 14, batch 450, loss[loss=0.2194, simple_loss=0.2907, pruned_loss=0.0533, ctc_loss=0.1039, over 19609.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2825, pruned_loss=0.05801, ctc_loss=0.1095, over 3450915.96 frames. ], batch size: 55, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:31:05,553 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.00 vs. limit=15.0 +2024-08-26 06:31:39,257 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.83 vs. limit=10.0 +2024-08-26 06:32:11,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=175141.33333333334, ans=0.0 +2024-08-26 06:32:14,233 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.63 vs. limit=22.5 +2024-08-26 06:32:20,554 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.45 vs. limit=5.0 +2024-08-26 06:32:27,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=175141.33333333334, ans=0.125 +2024-08-26 06:32:32,612 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.297e+02 1.702e+02 1.875e+02 2.205e+02 3.904e+02, threshold=3.749e+02, percent-clipped=2.0 +2024-08-26 06:32:59,817 INFO [train.py:1114] (0/4) Epoch 14, batch 500, loss[loss=0.2444, simple_loss=0.3138, pruned_loss=0.06432, ctc_loss=0.116, over 19681.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2816, pruned_loss=0.05745, ctc_loss=0.1083, over 3546652.26 frames. ], batch size: 63, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:32:59,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=175248.0, ans=0.125 +2024-08-26 06:33:25,582 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=1.255e-01 +2024-08-26 06:33:25,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=175248.0, ans=0.07 +2024-08-26 06:33:26,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=175301.33333333334, ans=0.125 +2024-08-26 06:34:21,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=175461.33333333334, ans=0.025 +2024-08-26 06:34:32,952 INFO [train.py:1114] (0/4) Epoch 14, batch 550, loss[loss=0.2502, simple_loss=0.3082, pruned_loss=0.06965, ctc_loss=0.1323, over 19273.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2819, pruned_loss=0.05737, ctc_loss=0.1083, over 3608532.80 frames. ], batch size: 71, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:34:36,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=175514.66666666666, ans=0.125 +2024-08-26 06:34:47,156 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.86 vs. limit=6.0 +2024-08-26 06:35:00,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=175568.0, ans=0.125 +2024-08-26 06:35:00,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=175568.0, ans=0.2 +2024-08-26 06:35:24,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=175674.66666666666, ans=0.0 +2024-08-26 06:35:32,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=175674.66666666666, ans=0.125 +2024-08-26 06:35:36,411 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.450e+02 1.729e+02 1.957e+02 2.291e+02 4.042e+02, threshold=3.913e+02, percent-clipped=2.0 +2024-08-26 06:35:59,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=175728.0, ans=0.5 +2024-08-26 06:36:15,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=175728.0, ans=0.125 +2024-08-26 06:36:18,853 INFO [train.py:1114] (0/4) Epoch 14, batch 600, loss[loss=0.2363, simple_loss=0.3006, pruned_loss=0.06274, ctc_loss=0.1165, over 19370.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2827, pruned_loss=0.05769, ctc_loss=0.1091, over 3666425.54 frames. ], batch size: 67, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:36:19,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=175781.33333333334, ans=0.0 +2024-08-26 06:36:24,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=175781.33333333334, ans=0.0 +2024-08-26 06:36:27,327 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.02 vs. limit=15.0 +2024-08-26 06:37:41,301 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.52 vs. limit=15.0 +2024-08-26 06:37:44,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.65 vs. limit=22.5 +2024-08-26 06:38:39,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=175888.0, ans=0.125 +2024-08-26 06:38:48,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=175888.0, ans=0.0 +2024-08-26 06:38:54,366 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.31 vs. limit=15.0 +2024-08-26 06:39:24,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=175994.66666666666, ans=0.125 +2024-08-26 06:39:25,933 INFO [train.py:1114] (0/4) Epoch 14, batch 650, loss[loss=0.2378, simple_loss=0.3028, pruned_loss=0.06222, ctc_loss=0.1211, over 19762.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2823, pruned_loss=0.05745, ctc_loss=0.1085, over 3716649.43 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:40:13,731 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.36 vs. limit=15.0 +2024-08-26 06:40:38,354 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.67 vs. limit=15.0 +2024-08-26 06:40:41,709 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.57 vs. limit=15.0 +2024-08-26 06:40:54,862 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.34 vs. limit=15.0 +2024-08-26 06:40:55,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=176154.66666666666, ans=0.125 +2024-08-26 06:40:58,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=176154.66666666666, ans=0.2 +2024-08-26 06:41:14,819 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.89 vs. limit=15.0 +2024-08-26 06:41:22,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=176208.0, ans=10.0 +2024-08-26 06:41:24,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=176208.0, ans=0.125 +2024-08-26 06:41:31,385 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.301e+02 1.772e+02 2.123e+02 2.635e+02 4.354e+02, threshold=4.247e+02, percent-clipped=3.0 +2024-08-26 06:41:33,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176261.33333333334, ans=0.1 +2024-08-26 06:41:35,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=176261.33333333334, ans=0.025 +2024-08-26 06:41:45,038 INFO [train.py:1114] (0/4) Epoch 14, batch 700, loss[loss=0.2009, simple_loss=0.2688, pruned_loss=0.04809, ctc_loss=0.09225, over 19729.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2827, pruned_loss=0.05752, ctc_loss=0.1087, over 3748596.56 frames. ], batch size: 51, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:41:47,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=176314.66666666666, ans=0.125 +2024-08-26 06:41:48,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=176314.66666666666, ans=0.04949747468305833 +2024-08-26 06:41:51,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=176314.66666666666, ans=0.0 +2024-08-26 06:41:52,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=176314.66666666666, ans=0.025 +2024-08-26 06:42:03,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=176368.0, ans=0.125 +2024-08-26 06:42:08,791 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.62 vs. limit=6.0 +2024-08-26 06:42:20,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=176421.33333333334, ans=0.2 +2024-08-26 06:42:27,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=176474.66666666666, ans=0.125 +2024-08-26 06:42:45,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=176528.0, ans=0.07 +2024-08-26 06:42:51,113 INFO [train.py:1114] (0/4) Epoch 14, batch 750, loss[loss=0.2169, simple_loss=0.2821, pruned_loss=0.05548, ctc_loss=0.1016, over 19505.00 frames. ], tot_loss[loss=0.22, simple_loss=0.282, pruned_loss=0.05738, ctc_loss=0.1081, over 3775051.89 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:42:56,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=176581.33333333334, ans=0.125 +2024-08-26 06:43:07,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=176581.33333333334, ans=0.125 +2024-08-26 06:43:18,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=176634.66666666666, ans=0.0 +2024-08-26 06:44:13,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=176741.33333333334, ans=0.125 +2024-08-26 06:44:25,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=176794.66666666666, ans=0.125 +2024-08-26 06:44:27,361 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.292e+02 1.803e+02 2.358e+02 3.080e+02 4.835e+02, threshold=4.715e+02, percent-clipped=7.0 +2024-08-26 06:44:29,884 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:44:32,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=176794.66666666666, ans=0.04949747468305833 +2024-08-26 06:44:39,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176794.66666666666, ans=0.1 +2024-08-26 06:44:41,993 INFO [train.py:1114] (0/4) Epoch 14, batch 800, loss[loss=0.1853, simple_loss=0.2533, pruned_loss=0.04223, ctc_loss=0.08197, over 19409.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2813, pruned_loss=0.05681, ctc_loss=0.1072, over 3795596.12 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:44:52,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=176901.33333333334, ans=0.125 +2024-08-26 06:44:55,090 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:45:37,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=177061.33333333334, ans=0.125 +2024-08-26 06:45:52,035 INFO [train.py:1114] (0/4) Epoch 14, batch 850, loss[loss=0.2294, simple_loss=0.2995, pruned_loss=0.05749, ctc_loss=0.1109, over 19642.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2814, pruned_loss=0.05716, ctc_loss=0.1078, over 3814865.22 frames. ], batch size: 59, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:46:17,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=177114.66666666666, ans=0.07 +2024-08-26 06:46:47,356 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.55 vs. limit=15.0 +2024-08-26 06:46:49,512 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.87 vs. limit=15.0 +2024-08-26 06:47:11,725 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.690e+02 1.974e+02 2.351e+02 3.908e+02, threshold=3.948e+02, percent-clipped=0.0 +2024-08-26 06:47:24,575 INFO [train.py:1114] (0/4) Epoch 14, batch 900, loss[loss=0.2022, simple_loss=0.263, pruned_loss=0.05119, ctc_loss=0.09751, over 19784.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2818, pruned_loss=0.05748, ctc_loss=0.1082, over 3818440.25 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:47:27,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=177381.33333333334, ans=0.0 +2024-08-26 06:47:35,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=177434.66666666666, ans=0.2 +2024-08-26 06:48:02,588 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.44 vs. limit=12.0 +2024-08-26 06:48:06,006 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.36 vs. limit=15.0 +2024-08-26 06:48:38,038 INFO [train.py:1114] (0/4) Epoch 14, batch 950, loss[loss=0.209, simple_loss=0.2698, pruned_loss=0.05439, ctc_loss=0.09887, over 19503.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2821, pruned_loss=0.05769, ctc_loss=0.1086, over 3820636.09 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:48:41,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=177648.0, ans=0.2 +2024-08-26 06:48:52,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=177701.33333333334, ans=0.125 +2024-08-26 06:49:08,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=177754.66666666666, ans=0.0 +2024-08-26 06:49:15,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=177754.66666666666, ans=0.0 +2024-08-26 06:49:28,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=177808.0, ans=0.0 +2024-08-26 06:49:34,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=177861.33333333334, ans=0.0 +2024-08-26 06:49:36,189 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.388e+02 1.810e+02 2.092e+02 2.519e+02 4.035e+02, threshold=4.185e+02, percent-clipped=1.0 +2024-08-26 06:49:44,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=177861.33333333334, ans=0.025 +2024-08-26 06:50:04,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=177861.33333333334, ans=0.0 +2024-08-26 06:50:06,722 INFO [train.py:1114] (0/4) Epoch 14, batch 1000, loss[loss=0.2074, simple_loss=0.2651, pruned_loss=0.05506, ctc_loss=0.09895, over 19854.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2831, pruned_loss=0.05852, ctc_loss=0.1102, over 3816509.39 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:50:17,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=177968.0, ans=0.125 +2024-08-26 06:50:31,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177968.0, ans=0.1 +2024-08-26 06:50:46,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=178074.66666666666, ans=0.0 +2024-08-26 06:50:47,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=178074.66666666666, ans=0.125 +2024-08-26 06:50:50,779 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:51:23,205 INFO [train.py:1114] (0/4) Epoch 14, batch 1050, loss[loss=0.2143, simple_loss=0.2849, pruned_loss=0.05178, ctc_loss=0.1003, over 19845.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2821, pruned_loss=0.05799, ctc_loss=0.1092, over 3823872.80 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:51:40,779 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.45 vs. limit=10.0 +2024-08-26 06:51:41,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=178234.66666666666, ans=0.125 +2024-08-26 06:51:51,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=178288.0, ans=0.125 +2024-08-26 06:52:05,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=178341.33333333334, ans=0.0 +2024-08-26 06:52:17,054 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.429e+02 1.767e+02 2.034e+02 2.568e+02 4.426e+02, threshold=4.067e+02, percent-clipped=2.0 +2024-08-26 06:52:20,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=178394.66666666666, ans=0.125 +2024-08-26 06:52:39,173 INFO [train.py:1114] (0/4) Epoch 14, batch 1100, loss[loss=0.2002, simple_loss=0.2668, pruned_loss=0.04857, ctc_loss=0.09117, over 19612.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2815, pruned_loss=0.05758, ctc_loss=0.1083, over 3831458.89 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:52:48,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=178448.0, ans=0.2 +2024-08-26 06:52:49,705 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.57 vs. limit=22.5 +2024-08-26 06:53:36,508 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=8.76 vs. limit=12.0 +2024-08-26 06:53:39,054 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.76 vs. limit=15.0 +2024-08-26 06:53:49,716 INFO [train.py:1114] (0/4) Epoch 14, batch 1150, loss[loss=0.2215, simple_loss=0.2785, pruned_loss=0.05933, ctc_loss=0.1145, over 19593.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2813, pruned_loss=0.05756, ctc_loss=0.1084, over 3828830.16 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:54:09,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=178768.0, ans=0.125 +2024-08-26 06:54:11,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=178768.0, ans=0.2 +2024-08-26 06:54:24,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=178821.33333333334, ans=0.025 +2024-08-26 06:54:29,732 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.54 vs. limit=15.0 +2024-08-26 06:54:34,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=178874.66666666666, ans=0.0 +2024-08-26 06:54:47,795 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.255e+02 1.672e+02 1.916e+02 2.259e+02 4.129e+02, threshold=3.832e+02, percent-clipped=1.0 +2024-08-26 06:54:58,197 INFO [train.py:1114] (0/4) Epoch 14, batch 1200, loss[loss=0.2426, simple_loss=0.3023, pruned_loss=0.06683, ctc_loss=0.123, over 19839.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2827, pruned_loss=0.058, ctc_loss=0.1094, over 3825820.21 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:55:10,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179034.66666666666, ans=0.1 +2024-08-26 06:55:13,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=179034.66666666666, ans=0.0 +2024-08-26 06:55:20,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=179034.66666666666, ans=0.1 +2024-08-26 06:55:20,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=179034.66666666666, ans=0.0 +2024-08-26 06:55:36,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179141.33333333334, ans=0.1 +2024-08-26 06:56:27,985 INFO [train.py:1114] (0/4) Epoch 14, batch 1250, loss[loss=0.2243, simple_loss=0.2905, pruned_loss=0.0578, ctc_loss=0.1063, over 19531.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2826, pruned_loss=0.05765, ctc_loss=0.1084, over 3843334.00 frames. ], batch size: 61, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:56:32,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=179248.0, ans=0.125 +2024-08-26 06:56:44,516 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.94 vs. limit=6.0 +2024-08-26 06:56:47,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=179301.33333333334, ans=0.2 +2024-08-26 06:57:56,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=179408.0, ans=0.125 +2024-08-26 06:57:56,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=179408.0, ans=0.0 +2024-08-26 06:57:57,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=179408.0, ans=0.2 +2024-08-26 06:58:13,350 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.449e+02 1.864e+02 2.134e+02 2.537e+02 3.723e+02, threshold=4.267e+02, percent-clipped=0.0 +2024-08-26 06:58:14,065 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=6.78 vs. limit=15.0 +2024-08-26 06:58:31,276 INFO [train.py:1114] (0/4) Epoch 14, batch 1300, loss[loss=0.2353, simple_loss=0.2991, pruned_loss=0.0609, ctc_loss=0.1242, over 18833.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.282, pruned_loss=0.05724, ctc_loss=0.1077, over 3847330.15 frames. ], batch size: 76, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:58:36,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=179514.66666666666, ans=0.2 +2024-08-26 06:58:39,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=179514.66666666666, ans=0.125 +2024-08-26 06:58:59,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=179514.66666666666, ans=0.2 +2024-08-26 06:59:00,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=179568.0, ans=0.2 +2024-08-26 06:59:10,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=179568.0, ans=0.125 +2024-08-26 06:59:20,342 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.86 vs. limit=15.0 +2024-08-26 06:59:50,578 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.45 vs. limit=6.0 +2024-08-26 07:00:13,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=179728.0, ans=0.125 +2024-08-26 07:00:24,875 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 07:00:32,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=179728.0, ans=0.0 +2024-08-26 07:00:35,357 INFO [train.py:1114] (0/4) Epoch 14, batch 1350, loss[loss=0.2216, simple_loss=0.279, pruned_loss=0.05973, ctc_loss=0.1115, over 19770.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2818, pruned_loss=0.05726, ctc_loss=0.1077, over 3858687.41 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:01:19,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=179834.66666666666, ans=0.025 +2024-08-26 07:02:10,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=179941.33333333334, ans=0.125 +2024-08-26 07:02:26,030 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.464e+02 1.690e+02 1.870e+02 2.214e+02 3.706e+02, threshold=3.740e+02, percent-clipped=0.0 +2024-08-26 07:02:31,113 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.85 vs. limit=22.5 +2024-08-26 07:02:47,345 INFO [train.py:1114] (0/4) Epoch 14, batch 1400, loss[loss=0.2116, simple_loss=0.2713, pruned_loss=0.05417, ctc_loss=0.1088, over 19674.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2819, pruned_loss=0.05722, ctc_loss=0.1075, over 3864940.55 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:03:02,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=180048.0, ans=0.015 +2024-08-26 07:03:29,696 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=21.26 vs. limit=22.5 +2024-08-26 07:03:34,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.94 vs. limit=22.5 +2024-08-26 07:03:38,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=180154.66666666666, ans=0.125 +2024-08-26 07:04:20,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=180261.33333333334, ans=0.125 +2024-08-26 07:04:21,203 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.14 vs. limit=6.0 +2024-08-26 07:04:25,282 INFO [train.py:1114] (0/4) Epoch 14, batch 1450, loss[loss=0.2382, simple_loss=0.2963, pruned_loss=0.06556, ctc_loss=0.1224, over 19668.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2825, pruned_loss=0.05746, ctc_loss=0.1081, over 3862868.32 frames. ], batch size: 63, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:04:46,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=180368.0, ans=0.0 +2024-08-26 07:04:46,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180368.0, ans=0.1 +2024-08-26 07:05:05,330 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 07:05:21,404 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=6.98 vs. limit=15.0 +2024-08-26 07:05:24,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.47 vs. limit=15.0 +2024-08-26 07:05:28,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=180474.66666666666, ans=0.125 +2024-08-26 07:05:40,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=180528.0, ans=0.025 +2024-08-26 07:05:41,163 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.314e+02 1.716e+02 1.963e+02 2.339e+02 6.137e+02, threshold=3.925e+02, percent-clipped=1.0 +2024-08-26 07:05:57,995 INFO [train.py:1114] (0/4) Epoch 14, batch 1500, loss[loss=0.2157, simple_loss=0.2815, pruned_loss=0.055, ctc_loss=0.09994, over 19588.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2827, pruned_loss=0.05756, ctc_loss=0.1083, over 3861664.46 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:07:02,589 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.44 vs. limit=12.0 +2024-08-26 07:07:07,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=180741.33333333334, ans=0.05 +2024-08-26 07:07:18,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=180794.66666666666, ans=0.125 +2024-08-26 07:07:26,408 INFO [train.py:1114] (0/4) Epoch 14, batch 1550, loss[loss=0.2374, simple_loss=0.2973, pruned_loss=0.06405, ctc_loss=0.1236, over 19602.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2822, pruned_loss=0.05747, ctc_loss=0.1083, over 3847541.67 frames. ], batch size: 60, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:07:52,872 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.74 vs. limit=15.0 +2024-08-26 07:07:54,873 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 07:07:58,460 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 07:08:00,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=180954.66666666666, ans=0.2 +2024-08-26 07:08:20,838 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.275e+02 1.735e+02 1.996e+02 2.323e+02 4.332e+02, threshold=3.992e+02, percent-clipped=2.0 +2024-08-26 07:08:46,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=181114.66666666666, ans=0.2 +2024-08-26 07:08:46,938 INFO [train.py:1114] (0/4) Epoch 14, batch 1600, loss[loss=0.2169, simple_loss=0.2818, pruned_loss=0.05606, ctc_loss=0.09974, over 19828.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2814, pruned_loss=0.057, ctc_loss=0.1074, over 3837473.80 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:09:17,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=181168.0, ans=0.125 +2024-08-26 07:09:21,125 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 07:09:26,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=181221.33333333334, ans=0.125 +2024-08-26 07:09:31,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=181221.33333333334, ans=0.2 +2024-08-26 07:09:38,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=181274.66666666666, ans=0.1 +2024-08-26 07:10:18,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=181328.0, ans=0.125 +2024-08-26 07:10:22,357 INFO [train.py:1114] (0/4) Epoch 14, batch 1650, loss[loss=0.2128, simple_loss=0.2888, pruned_loss=0.049, ctc_loss=0.09699, over 19654.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2814, pruned_loss=0.05712, ctc_loss=0.1076, over 3833134.13 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:10:23,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=181381.33333333334, ans=0.5 +2024-08-26 07:10:36,234 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 07:10:36,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=181434.66666666666, ans=0.0 +2024-08-26 07:11:10,771 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.415e+02 1.857e+02 2.243e+02 2.957e+02 5.258e+02, threshold=4.486e+02, percent-clipped=5.0 +2024-08-26 07:11:28,242 INFO [train.py:1114] (0/4) Epoch 14, batch 1700, loss[loss=0.1856, simple_loss=0.2466, pruned_loss=0.04521, ctc_loss=0.0856, over 19703.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2809, pruned_loss=0.05656, ctc_loss=0.1064, over 3847514.77 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:11:40,082 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.63 vs. limit=6.0 +2024-08-26 07:12:23,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=181914.66666666666, ans=0.025 +2024-08-26 07:12:24,392 INFO [train.py:1114] (0/4) Epoch 14, batch 1750, loss[loss=0.1641, simple_loss=0.2277, pruned_loss=0.03648, ctc_loss=0.06901, over 19617.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2805, pruned_loss=0.0563, ctc_loss=0.1061, over 3852353.23 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:12:30,054 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=8.76 vs. limit=22.5 +2024-08-26 07:12:35,920 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.80 vs. limit=15.0 +2024-08-26 07:13:14,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=182021.33333333334, ans=0.0 +2024-08-26 07:13:19,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=182021.33333333334, ans=0.125 +2024-08-26 07:13:35,902 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.427e+02 1.769e+02 2.123e+02 2.747e+02 4.234e+02, threshold=4.245e+02, percent-clipped=0.0 +2024-08-26 07:13:38,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182128.0, ans=0.1 +2024-08-26 07:13:42,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=182128.0, ans=0.1 +2024-08-26 07:13:51,683 INFO [train.py:1114] (0/4) Epoch 14, batch 1800, loss[loss=0.2173, simple_loss=0.2756, pruned_loss=0.05814, ctc_loss=0.1068, over 19592.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2809, pruned_loss=0.05647, ctc_loss=0.1064, over 3852353.41 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:14:13,902 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.15 vs. limit=6.0 +2024-08-26 07:14:25,172 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.21 vs. limit=15.0 +2024-08-26 07:14:33,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff3.min_abs, batch_count=182341.33333333334, ans=0.2 +2024-08-26 07:14:36,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=182341.33333333334, ans=0.125 +2024-08-26 07:14:49,558 INFO [train.py:1114] (0/4) Epoch 14, batch 1850, loss[loss=0.2094, simple_loss=0.282, pruned_loss=0.05006, ctc_loss=0.09181, over 19596.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.281, pruned_loss=0.05648, ctc_loss=0.1063, over 3854748.92 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:14:53,201 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.29 vs. limit=10.0 +2024-08-26 07:14:57,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=182448.0, ans=0.125 +2024-08-26 07:14:58,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=182448.0, ans=0.125 +2024-08-26 07:15:01,500 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.34 vs. limit=15.0 +2024-08-26 07:15:14,017 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.02 vs. limit=12.0 +2024-08-26 07:15:20,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=182554.66666666666, ans=0.0 +2024-08-26 07:15:31,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=182608.0, ans=0.025 +2024-08-26 07:15:35,871 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.305e+02 1.755e+02 2.000e+02 2.500e+02 5.147e+02, threshold=4.001e+02, percent-clipped=3.0 +2024-08-26 07:15:52,271 INFO [train.py:1114] (0/4) Epoch 14, batch 1900, loss[loss=0.2068, simple_loss=0.2914, pruned_loss=0.0443, ctc_loss=0.08373, over 19628.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2815, pruned_loss=0.05662, ctc_loss=0.1065, over 3861097.39 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:16:02,015 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.47 vs. limit=15.0 +2024-08-26 07:16:02,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=182714.66666666666, ans=0.025 +2024-08-26 07:16:12,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=182768.0, ans=0.125 +2024-08-26 07:16:18,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=182821.33333333334, ans=0.015 +2024-08-26 07:16:19,071 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 07:16:41,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=182928.0, ans=0.125 +2024-08-26 07:16:56,694 INFO [train.py:1114] (0/4) Epoch 14, batch 1950, loss[loss=0.1885, simple_loss=0.2594, pruned_loss=0.0431, ctc_loss=0.07879, over 19583.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2823, pruned_loss=0.05654, ctc_loss=0.1063, over 3870300.04 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:17:16,623 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.34 vs. limit=15.0 +2024-08-26 07:17:47,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183141.33333333334, ans=0.1 +2024-08-26 07:17:51,728 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.75 vs. limit=6.0 +2024-08-26 07:17:55,527 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.375e+02 1.666e+02 1.941e+02 2.281e+02 4.229e+02, threshold=3.882e+02, percent-clipped=1.0 +2024-08-26 07:18:05,617 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 07:18:11,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=183194.66666666666, ans=0.1 +2024-08-26 07:18:14,113 INFO [train.py:1114] (0/4) Epoch 14, batch 2000, loss[loss=0.1698, simple_loss=0.2401, pruned_loss=0.03565, ctc_loss=0.07049, over 19643.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2829, pruned_loss=0.05695, ctc_loss=0.1072, over 3854479.09 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 64.0 +2024-08-26 07:18:30,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=183301.33333333334, ans=0.2 +2024-08-26 07:18:43,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=183354.66666666666, ans=0.125 +2024-08-26 07:19:11,484 INFO [train.py:1114] (0/4) Epoch 14, batch 2050, loss[loss=0.1877, simple_loss=0.2491, pruned_loss=0.04625, ctc_loss=0.08475, over 19725.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2821, pruned_loss=0.05706, ctc_loss=0.1073, over 3850346.95 frames. ], batch size: 47, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:19:38,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=183568.0, ans=0.125 +2024-08-26 07:19:38,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=183568.0, ans=0.5 +2024-08-26 07:19:43,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=183621.33333333334, ans=0.125 +2024-08-26 07:19:53,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=183674.66666666666, ans=0.125 +2024-08-26 07:20:51,599 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.432e+02 1.705e+02 1.994e+02 2.461e+02 3.917e+02, threshold=3.988e+02, percent-clipped=1.0 +2024-08-26 07:24:48,279 INFO [train.py:1114] (0/4) Epoch 14, batch 2100, loss[loss=0.2134, simple_loss=0.2771, pruned_loss=0.05456, ctc_loss=0.1011, over 19758.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.281, pruned_loss=0.05658, ctc_loss=0.1065, over 3857367.71 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:41:49,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=183834.66666666666, ans=0.125 +2024-08-26 07:45:05,533 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.38 vs. limit=10.0 +2024-08-26 07:50:50,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=183888.0, ans=0.125 +2024-08-26 08:13:15,360 INFO [train.py:1114] (0/4) Epoch 14, batch 2150, loss[loss=0.2287, simple_loss=0.2881, pruned_loss=0.0614, ctc_loss=0.1163, over 19570.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2811, pruned_loss=0.05695, ctc_loss=0.1068, over 3868244.08 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 08:24:49,148 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.19 vs. limit=12.0 +2024-08-26 08:31:59,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=184101.33333333334, ans=0.2 +2024-08-26 08:40:58,792 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.82 vs. limit=15.0 +2024-08-26 08:59:37,607 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.387e+02 1.765e+02 2.052e+02 2.784e+02 6.261e+02, threshold=4.104e+02, percent-clipped=7.0 +2024-08-26 09:03:09,781 INFO [train.py:1114] (0/4) Epoch 14, batch 2200, loss[loss=0.2102, simple_loss=0.2827, pruned_loss=0.05042, ctc_loss=0.09214, over 19567.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.281, pruned_loss=0.05674, ctc_loss=0.1065, over 3866837.54 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 09:05:46,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=184314.66666666666, ans=0.125 +2024-08-26 09:05:47,259 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.76 vs. limit=22.5 +2024-08-26 09:11:59,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=184368.0, ans=0.2 +2024-08-26 09:12:27,213 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.57 vs. limit=15.0 +2024-08-26 09:16:07,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=184421.33333333334, ans=0.125 +2024-08-26 09:17:25,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184421.33333333334, ans=0.1 +2024-08-26 09:18:58,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184474.66666666666, ans=0.1 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-25-02-23-27-1 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-25-02-23-27-1 new file mode 100644 index 0000000000000000000000000000000000000000..5cd3ef91edecdb8892b315ee1903c8529e197518 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-25-02-23-27-1 @@ -0,0 +1,4963 @@ +2024-08-25 02:23:27,598 INFO [train.py:1182] (1/4) Training started +2024-08-25 02:23:28,628 INFO [train.py:1192] (1/4) Device: cuda:1 +2024-08-25 02:23:28,631 INFO [train.py:1210] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2654.int.cedar.computecanada.ca', 'IP address': '172.16.146.91'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-25 02:23:28,631 INFO [train.py:1212] (1/4) About to create model +2024-08-25 02:23:29,330 INFO [train.py:1216] (1/4) Number of model parameters: 66367431 +2024-08-25 02:23:29,457 INFO [train.py:1231] (1/4) Using DDP +2024-08-25 02:23:51,118 INFO [asr_datamodule.py:894] (1/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:696] (1/4) Disable MUSAN +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:714] (1/4) Enable SpecAugment +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:715] (1/4) Time warp factor: 80 +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:725] (1/4) Num frame mask: 10 +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:738] (1/4) About to create train dataset +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:765] (1/4) Using DynamicBucketingSampler. +2024-08-25 02:23:53,073 INFO [asr_datamodule.py:782] (1/4) About to create train dataloader +2024-08-25 02:23:53,074 INFO [asr_datamodule.py:911] (1/4) About to get dev-clean cuts +2024-08-25 02:23:53,293 INFO [asr_datamodule.py:918] (1/4) About to get dev-other cuts +2024-08-25 02:23:53,346 INFO [asr_datamodule.py:814] (1/4) About to create dev dataset +2024-08-25 02:23:53,652 INFO [asr_datamodule.py:831] (1/4) About to create dev dataloader +2024-08-25 02:23:53,652 INFO [train.py:1435] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-25 02:27:49,592 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=9.26 vs. limit=3.0 +2024-08-25 02:27:50,706 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12815MB +2024-08-25 02:27:52,179 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12945MB +2024-08-25 02:28:01,908 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12945MB +2024-08-25 02:28:03,367 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12945MB +2024-08-25 02:28:25,926 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=256, metric=46.49 vs. limit=7.5 +2024-08-25 02:28:26,159 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12945MB +2024-08-25 02:28:27,781 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12945MB +2024-08-25 02:29:15,517 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.95 vs. limit=7.5 +2024-08-25 02:29:16,119 INFO [train.py:1114] (1/4) Epoch 1, batch 0, loss[loss=8.848, simple_loss=7.21, pruned_loss=6.822, ctc_loss=4.776, over 19825.00 frames. ], tot_loss[loss=8.848, simple_loss=7.21, pruned_loss=6.822, ctc_loss=4.776, over 19825.00 frames. ], batch size: 49, lr: 2.25e-02, grad_scale: 1.0 +2024-08-25 02:29:16,120 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-25 02:29:29,444 INFO [train.py:1146] (1/4) Epoch 1, validation: loss=8.973, simple_loss=7.311, pruned_loss=6.819, ctc_loss=4.895, over 944034.00 frames. +2024-08-25 02:29:29,445 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12945MB +2024-08-25 02:29:37,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=0.0, ans=0.2 +2024-08-25 02:29:37,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=0.0, ans=0.1 +2024-08-25 02:29:40,026 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.31 vs. limit=7.5 +2024-08-25 02:29:47,571 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.30 vs. limit=7.5 +2024-08-25 02:30:12,490 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=19.35 vs. limit=7.52 +2024-08-25 02:30:23,438 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.714e+03 3.750e+03 4.817e+03 5.615e+03 6.551e+03, threshold=1.927e+04, percent-clipped=0.0 +2024-08-25 02:30:23,976 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=15.55 vs. limit=7.52 +2024-08-25 02:30:47,098 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=72.34 vs. limit=7.52 +2024-08-25 02:32:25,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106.66666666666667, ans=0.4866666666666667 +2024-08-25 02:32:26,056 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.867e+02 1.019e+03 3.714e+03 5.063e+03 6.846e+03, threshold=1.486e+04, percent-clipped=0.0 +2024-08-25 02:32:44,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=106.66666666666667, ans=0.196 +2024-08-25 02:32:44,899 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=71.66 vs. limit=7.54 +2024-08-25 02:33:14,345 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=39.59 vs. limit=5.08 +2024-08-25 02:33:20,404 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=323.22 vs. limit=7.56 +2024-08-25 02:33:28,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.22 vs. limit=3.032 +2024-08-25 02:33:29,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=213.33333333333334, ans=0.49 +2024-08-25 02:33:34,442 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 02:33:36,820 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+02 7.649e+02 1.076e+03 3.731e+03 6.846e+03, threshold=4.304e+03, percent-clipped=0.0 +2024-08-25 02:33:41,047 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=33.01 vs. limit=7.58 +2024-08-25 02:33:47,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=213.33333333333334, ans=0.8925333333333334 +2024-08-25 02:33:59,696 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=109.20 vs. limit=7.66 +2024-08-25 02:34:03,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=266.6666666666667, ans=0.20400000000000001 +2024-08-25 02:34:04,704 INFO [train.py:1114] (1/4) Epoch 1, batch 50, loss[loss=1.447, simple_loss=1.077, pruned_loss=1.253, ctc_loss=1.145, over 19710.00 frames. ], tot_loss[loss=3.556, simple_loss=2.937, pruned_loss=2.558, ctc_loss=1.778, over 844643.19 frames. ], batch size: 47, lr: 2.48e-02, grad_scale: 0.25 +2024-08-25 02:34:24,008 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=95.63 vs. limit=7.6 +2024-08-25 02:34:25,213 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=40.68 vs. limit=7.62 +2024-08-25 02:34:33,142 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=42.81 vs. limit=7.62 +2024-08-25 02:34:34,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=320.0, ans=0.008 +2024-08-25 02:34:41,310 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=35.33 vs. limit=5.08 +2024-08-25 02:35:24,778 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=11.15 vs. limit=4.149333333333333 +2024-08-25 02:35:30,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=426.6666666666667, ans=0.48 +2024-08-25 02:35:32,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=426.6666666666667, ans=0.184 +2024-08-25 02:35:36,419 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=38.62 vs. limit=7.82 +2024-08-25 02:37:51,538 INFO [train.py:1114] (1/4) Epoch 1, batch 100, loss[loss=1.337, simple_loss=0.9588, pruned_loss=1.217, ctc_loss=1.128, over 19760.00 frames. ], tot_loss[loss=2.415, simple_loss=1.917, pruned_loss=1.866, ctc_loss=1.472, over 1498832.64 frames. ], batch size: 51, lr: 2.70e-02, grad_scale: 0.5 +2024-08-25 02:37:55,735 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.639e+01 1.517e+02 3.832e+02 1.019e+03 9.054e+03, threshold=7.665e+02, percent-clipped=2.0 +2024-08-25 02:38:07,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=586.6666666666666, ans=0.2941333333333333 +2024-08-25 02:38:08,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=586.6666666666666, ans=0.0868 +2024-08-25 02:38:17,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=586.6666666666666, ans=7.94 +2024-08-25 02:38:23,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=586.6666666666666, ans=5.366666666666666 +2024-08-25 02:38:34,894 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=113.18 vs. limit=7.72 +2024-08-25 02:38:34,957 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=44.73 vs. limit=7.94 +2024-08-25 02:38:53,873 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=38.46 vs. limit=8.02 +2024-08-25 02:38:56,501 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=133.57 vs. limit=7.76 +2024-08-25 02:39:01,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=693.3333333333334, ans=0.4675 +2024-08-25 02:39:02,225 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=71.15 vs. limit=5.346666666666667 +2024-08-25 02:39:02,671 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=208.72 vs. limit=5.346666666666667 +2024-08-25 02:39:10,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=746.6666666666666, ans=0.006986666666666667 +2024-08-25 02:39:11,009 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=57.76 vs. limit=7.78 +2024-08-25 02:39:12,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=746.6666666666666, ans=0.2925333333333333 +2024-08-25 02:39:12,613 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=131.28 vs. limit=5.373333333333333 +2024-08-25 02:39:18,428 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=38.71 vs. limit=5.1866666666666665 +2024-08-25 02:39:22,877 INFO [train.py:1114] (1/4) Epoch 1, batch 150, loss[loss=1.132, simple_loss=0.7871, pruned_loss=0.9836, ctc_loss=1.049, over 19707.00 frames. ], tot_loss[loss=1.944, simple_loss=1.493, pruned_loss=1.565, ctc_loss=1.345, over 2028052.72 frames. ], batch size: 47, lr: 2.93e-02, grad_scale: 0.5 +2024-08-25 02:39:26,566 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=268.75 vs. limit=7.8 +2024-08-25 02:39:28,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=800.0, ans=0.872 +2024-08-25 02:39:30,645 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=12.42 vs. limit=5.2 +2024-08-25 02:39:36,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=97.72 vs. limit=7.82 +2024-08-25 02:39:46,102 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=91.39 vs. limit=7.82 +2024-08-25 02:39:46,255 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=180.44 vs. limit=4.1706666666666665 +2024-08-25 02:39:51,668 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.00 vs. limit=8.18 +2024-08-25 02:39:55,411 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=14.20 vs. limit=7.84 +2024-08-25 02:40:03,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=960.0, ans=0.455 +2024-08-25 02:40:06,955 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=30.67 vs. limit=5.48 +2024-08-25 02:40:08,467 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=327.07 vs. limit=7.86 +2024-08-25 02:40:18,013 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=51.96 vs. limit=7.88 +2024-08-25 02:40:27,461 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=28.21 vs. limit=5.506666666666667 +2024-08-25 02:40:31,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=1066.6666666666667, ans=0.16 +2024-08-25 02:40:32,728 INFO [train.py:1114] (1/4) Epoch 1, batch 200, loss[loss=1.274, simple_loss=0.8773, pruned_loss=1.021, ctc_loss=1.229, over 18088.00 frames. ], tot_loss[loss=1.688, simple_loss=1.263, pruned_loss=1.375, ctc_loss=1.278, over 2435338.20 frames. ], batch size: 85, lr: 3.15e-02, grad_scale: 1.0 +2024-08-25 02:40:33,215 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=9.13 vs. limit=5.266666666666667 +2024-08-25 02:40:33,700 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=31.47 vs. limit=7.9 +2024-08-25 02:40:34,962 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=10.88 vs. limit=4.426666666666667 +2024-08-25 02:40:36,941 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.587e+01 1.185e+02 1.545e+02 1.999e+02 4.229e+02, threshold=3.089e+02, percent-clipped=0.0 +2024-08-25 02:40:57,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1066.6666666666667, ans=0.28933333333333333 +2024-08-25 02:40:59,298 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=18.70 vs. limit=7.9 +2024-08-25 02:41:07,561 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=82.19 vs. limit=7.92 +2024-08-25 02:41:13,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=1120.0, ans=0.8608 +2024-08-25 02:41:15,434 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=13.22 vs. limit=5.28 +2024-08-25 02:41:15,516 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=84.98 vs. limit=7.92 +2024-08-25 02:41:17,927 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 02:41:22,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=1173.3333333333333, ans=0.445 +2024-08-25 02:41:24,100 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.90 vs. limit=8.38 +2024-08-25 02:41:31,116 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=28.45 vs. limit=8.38 +2024-08-25 02:41:34,073 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=95.54 vs. limit=7.94 +2024-08-25 02:41:45,032 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=59.78 vs. limit=7.96 +2024-08-25 02:41:50,908 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=181.32 vs. limit=7.96 +2024-08-25 02:41:57,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=1280.0, ans=0.44 +2024-08-25 02:42:06,918 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=113.56 vs. limit=5.64 +2024-08-25 02:42:11,294 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=26.09 vs. limit=8.0 +2024-08-25 02:42:11,847 INFO [train.py:1114] (1/4) Epoch 1, batch 250, loss[loss=1.243, simple_loss=0.8419, pruned_loss=0.991, ctc_loss=1.214, over 19372.00 frames. ], tot_loss[loss=1.534, simple_loss=1.122, pruned_loss=1.247, ctc_loss=1.242, over 2755780.74 frames. ], batch size: 67, lr: 3.38e-02, grad_scale: 1.0 +2024-08-25 02:42:15,192 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=12.07 vs. limit=5.333333333333333 +2024-08-25 02:42:26,884 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=77.01 vs. limit=8.0 +2024-08-25 02:42:32,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=1386.6666666666667, ans=0.5 +2024-08-25 02:42:34,289 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=6.14 vs. limit=4.554666666666667 +2024-08-25 02:42:38,149 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=10.09 vs. limit=5.346666666666667 +2024-08-25 02:42:39,409 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=106.23 vs. limit=8.02 +2024-08-25 02:42:48,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=1440.0, ans=0.0676 +2024-08-25 02:42:52,287 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=5.708e+00 +2024-08-25 02:42:52,575 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=89.65 vs. limit=8.04 +2024-08-25 02:42:54,144 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=76.40 vs. limit=8.04 +2024-08-25 02:42:54,540 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=39.68 vs. limit=8.04 +2024-08-25 02:42:56,879 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=114.09 vs. limit=8.06 +2024-08-25 02:43:01,020 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=26.58 vs. limit=8.620000000000001 +2024-08-25 02:43:02,715 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.53 vs. limit=8.620000000000001 +2024-08-25 02:43:08,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=1546.6666666666667, ans=0.4275 +2024-08-25 02:43:10,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=1546.6666666666667, ans=0.163 +2024-08-25 02:43:14,521 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=249.09 vs. limit=8.08 +2024-08-25 02:43:17,758 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=7.10 vs. limit=5.386666666666667 +2024-08-25 02:43:18,724 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=32.79 vs. limit=4.618666666666667 +2024-08-25 02:43:21,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=1546.6666666666667, ans=0.06520000000000001 +2024-08-25 02:43:23,906 INFO [train.py:1114] (1/4) Epoch 1, batch 300, loss[loss=1.224, simple_loss=0.819, pruned_loss=0.9581, ctc_loss=1.207, over 19521.00 frames. ], tot_loss[loss=1.433, simple_loss=1.028, pruned_loss=1.158, ctc_loss=1.218, over 3000944.94 frames. ], batch size: 61, lr: 3.60e-02, grad_scale: 2.0 +2024-08-25 02:43:24,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=1600.0, ans=0.16 +2024-08-25 02:43:24,384 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=74.68 vs. limit=8.7 +2024-08-25 02:43:26,075 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=40.53 vs. limit=8.1 +2024-08-25 02:43:27,978 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.516e+01 1.281e+02 1.784e+02 2.457e+02 1.092e+03, threshold=3.568e+02, percent-clipped=12.0 +2024-08-25 02:43:30,110 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=9.03 vs. limit=5.4 +2024-08-25 02:43:37,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=1653.3333333333333, ans=0.157 +2024-08-25 02:43:44,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=1653.3333333333333, ans=0.29333333333333333 +2024-08-25 02:43:50,587 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.98 vs. limit=5.413333333333333 +2024-08-25 02:44:00,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=1706.6666666666667, ans=0.42 +2024-08-25 02:44:00,830 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=28.57 vs. limit=5.8533333333333335 +2024-08-25 02:44:03,217 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=28.73 vs. limit=8.78 +2024-08-25 02:44:03,408 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=8.98 vs. limit=5.426666666666667 +2024-08-25 02:44:08,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=1760.0, ans=0.4175 +2024-08-25 02:44:19,198 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=9.00 vs. limit=4.704 +2024-08-25 02:44:24,616 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=182.27 vs. limit=5.906666666666666 +2024-08-25 02:44:27,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.29 vs. limit=8.86 +2024-08-25 02:44:29,687 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=9.306e-01 +2024-08-25 02:44:31,621 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.29 vs. limit=8.86 +2024-08-25 02:44:31,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=1813.3333333333333, ans=8.86 +2024-08-25 02:44:31,892 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=24.75 vs. limit=8.18 +2024-08-25 02:44:35,055 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=34.33 vs. limit=8.18 +2024-08-25 02:44:36,730 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.97 vs. limit=5.906666666666666 +2024-08-25 02:44:37,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=1866.6666666666667, ans=0.4125 +2024-08-25 02:44:38,844 INFO [train.py:1114] (1/4) Epoch 1, batch 350, loss[loss=1.019, simple_loss=0.6725, pruned_loss=0.7912, ctc_loss=1.006, over 19751.00 frames. ], tot_loss[loss=1.367, simple_loss=0.9642, pruned_loss=1.095, ctc_loss=1.205, over 3190480.39 frames. ], batch size: 48, lr: 3.83e-02, grad_scale: 2.0 +2024-08-25 02:44:42,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=24.80 vs. limit=5.933333333333334 +2024-08-25 02:44:44,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=1866.6666666666667, ans=8.2 +2024-08-25 02:44:46,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=1866.6666666666667, ans=0.4125 +2024-08-25 02:44:49,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1866.6666666666667, ans=0.2813333333333333 +2024-08-25 02:45:01,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=1920.0, ans=0.0568 +2024-08-25 02:45:02,255 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.82 vs. limit=5.48 +2024-08-25 02:45:06,435 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=41.56 vs. limit=8.24 +2024-08-25 02:45:15,737 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.15 vs. limit=8.98 +2024-08-25 02:45:17,497 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.35 vs. limit=8.98 +2024-08-25 02:45:26,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=2026.6666666666667, ans=0.24666666666666665 +2024-08-25 02:45:28,212 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=75.45 vs. limit=8.26 +2024-08-25 02:45:36,056 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=87.72 vs. limit=8.26 +2024-08-25 02:46:55,721 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=40.82 vs. limit=8.28 +2024-08-25 02:47:02,592 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=57.15 vs. limit=8.28 +2024-08-25 02:47:08,195 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=35.89 vs. limit=9.1 +2024-08-25 02:47:09,641 INFO [train.py:1114] (1/4) Epoch 1, batch 400, loss[loss=1.217, simple_loss=0.8005, pruned_loss=0.9253, ctc_loss=1.186, over 19878.00 frames. ], tot_loss[loss=1.317, simple_loss=0.9151, pruned_loss=1.045, ctc_loss=1.19, over 3342068.16 frames. ], batch size: 55, lr: 4.05e-02, grad_scale: 4.0 +2024-08-25 02:47:10,151 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=75.98 vs. limit=8.3 +2024-08-25 02:47:13,218 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=30.55 vs. limit=8.3 +2024-08-25 02:47:13,850 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.873e+01 1.501e+02 1.913e+02 2.464e+02 6.763e+02, threshold=3.826e+02, percent-clipped=7.0 +2024-08-25 02:47:14,614 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=21.34 vs. limit=8.3 +2024-08-25 02:47:20,288 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.98 vs. limit=4.8533333333333335 +2024-08-25 02:47:21,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=96.05 vs. limit=8.3 +2024-08-25 02:47:26,987 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=83.05 vs. limit=8.32 +2024-08-25 02:47:32,410 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=91.27 vs. limit=8.32 +2024-08-25 02:47:37,979 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=86.41 vs. limit=8.32 +2024-08-25 02:47:42,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1.whitening_limit, batch_count=2240.0, ans=5.5600000000000005 +2024-08-25 02:47:42,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=2240.0, ans=0.395 +2024-08-25 02:47:43,315 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=6.35 vs. limit=4.896 +2024-08-25 02:47:52,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=2293.3333333333335, ans=0.121 +2024-08-25 02:47:52,902 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=70.88 vs. limit=8.36 +2024-08-25 02:47:58,320 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.77 vs. limit=9.22 +2024-08-25 02:48:06,461 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.54 vs. limit=4.938666666666666 +2024-08-25 02:48:21,737 INFO [train.py:1114] (1/4) Epoch 1, batch 450, loss[loss=1.239, simple_loss=0.8127, pruned_loss=0.9277, ctc_loss=1.183, over 19609.00 frames. ], tot_loss[loss=1.283, simple_loss=0.88, pruned_loss=1.006, ctc_loss=1.178, over 3450267.38 frames. ], batch size: 55, lr: 4.28e-02, grad_scale: 4.0 +2024-08-25 02:48:27,713 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=28.04 vs. limit=8.4 +2024-08-25 02:48:29,274 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=6.63 vs. limit=4.96 +2024-08-25 02:48:30,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=2400.0, ans=0.11 +2024-08-25 02:48:38,474 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.78 vs. limit=9.34 +2024-08-25 02:48:40,962 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=33.24 vs. limit=8.42 +2024-08-25 02:48:45,544 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=26.46 vs. limit=8.42 +2024-08-25 02:48:54,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=2506.6666666666665, ans=0.3825 +2024-08-25 02:49:00,145 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=32.08 vs. limit=8.44 +2024-08-25 02:49:03,476 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.70 vs. limit=5.024 +2024-08-25 02:49:06,500 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.27 vs. limit=5.64 +2024-08-25 02:49:10,663 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=19.76 vs. limit=8.46 +2024-08-25 02:49:14,706 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.51 vs. limit=8.48 +2024-08-25 02:49:15,930 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=48.20 vs. limit=8.48 +2024-08-25 02:49:24,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=2613.3333333333335, ans=0.3775 +2024-08-25 02:49:27,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=2666.6666666666665, ans=0.375 +2024-08-25 02:49:28,254 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.61 vs. limit=9.5 +2024-08-25 02:49:28,637 INFO [train.py:1114] (1/4) Epoch 1, batch 500, loss[loss=1.183, simple_loss=0.7805, pruned_loss=0.8439, ctc_loss=1.128, over 19664.00 frames. ], tot_loss[loss=1.25, simple_loss=0.8495, pruned_loss=0.9663, ctc_loss=1.16, over 3545445.64 frames. ], batch size: 63, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:49:29,059 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=34.63 vs. limit=8.5 +2024-08-25 02:49:29,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.whiten.whitening_limit, batch_count=2666.6666666666665, ans=5.066666666666666 +2024-08-25 02:49:30,977 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=4.47 vs. limit=4.533333333333333 +2024-08-25 02:49:32,572 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.093e+02 1.834e+02 2.411e+02 2.968e+02 6.409e+02, threshold=4.822e+02, percent-clipped=7.0 +2024-08-25 02:49:39,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=2666.6666666666665, ans=0.16666666666666669 +2024-08-25 02:49:53,848 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.40 vs. limit=8.52 +2024-08-25 02:49:57,712 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.28 vs. limit=9.58 +2024-08-25 02:49:58,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=2773.3333333333335, ans=0.37 +2024-08-25 02:50:04,697 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=13.95 vs. limit=8.54 +2024-08-25 02:50:04,855 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=9.50 vs. limit=8.54 +2024-08-25 02:50:19,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=2826.6666666666665, ans=0.08233333333333334 +2024-08-25 02:50:27,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=2880.0, ans=0.365 +2024-08-25 02:50:30,274 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=19.04 vs. limit=8.58 +2024-08-25 02:50:35,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=2880.0, ans=0.088 +2024-08-25 02:50:39,203 INFO [train.py:1114] (1/4) Epoch 1, batch 550, loss[loss=1.1, simple_loss=0.7419, pruned_loss=0.7123, ctc_loss=1.062, over 19313.00 frames. ], tot_loss[loss=1.219, simple_loss=0.8256, pruned_loss=0.9171, ctc_loss=1.137, over 3607780.47 frames. ], batch size: 71, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:50:46,742 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.69 vs. limit=5.733333333333333 +2024-08-25 02:50:56,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=2933.3333333333335, ans=0.5 +2024-08-25 02:50:59,966 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=1.349e+01 +2024-08-25 02:51:13,020 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.04 vs. limit=5.216 +2024-08-25 02:51:19,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=3040.0, ans=0.35750000000000004 +2024-08-25 02:51:23,388 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=6.071e+01 +2024-08-25 02:51:50,151 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=20.39 vs. limit=8.68 +2024-08-25 02:51:55,122 INFO [train.py:1114] (1/4) Epoch 1, batch 600, loss[loss=1.002, simple_loss=0.6891, pruned_loss=0.5985, ctc_loss=0.9742, over 19416.00 frames. ], tot_loss[loss=1.173, simple_loss=0.7967, pruned_loss=0.8506, ctc_loss=1.1, over 3665856.03 frames. ], batch size: 67, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:51:59,171 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 2.677e+02 3.553e+02 4.456e+02 9.241e+02, threshold=7.106e+02, percent-clipped=18.0 +2024-08-25 02:52:20,053 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.15 vs. limit=8.72 +2024-08-25 02:52:40,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=3360.0, ans=7.1 +2024-08-25 02:52:40,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=3360.0, ans=0.02439999999999999 +2024-08-25 02:53:01,029 INFO [train.py:1114] (1/4) Epoch 1, batch 650, loss[loss=0.8533, simple_loss=0.6013, pruned_loss=0.4708, ctc_loss=0.8244, over 19771.00 frames. ], tot_loss[loss=1.112, simple_loss=0.7585, pruned_loss=0.7746, ctc_loss=1.048, over 3716267.31 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 4.0 +2024-08-25 02:53:10,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.12 vs. limit=10.1 +2024-08-25 02:53:23,664 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.97 vs. limit=8.82 +2024-08-25 02:53:25,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=3573.3333333333335, ans=0.04899999999999999 +2024-08-25 02:53:25,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3573.3333333333335, ans=0.26426666666666665 +2024-08-25 02:53:39,276 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.07 vs. limit=3.544 +2024-08-25 02:53:48,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=3626.6666666666665, ans=0.06399999999999997 +2024-08-25 02:53:55,627 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.15 vs. limit=5.92 +2024-08-25 02:53:58,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=3680.0, ans=0.3275 +2024-08-25 02:54:01,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3680.0, ans=0.3275 +2024-08-25 02:54:09,015 INFO [train.py:1114] (1/4) Epoch 1, batch 700, loss[loss=0.7897, simple_loss=0.5598, pruned_loss=0.4316, ctc_loss=0.7434, over 19715.00 frames. ], tot_loss[loss=1.05, simple_loss=0.7215, pruned_loss=0.7016, ctc_loss=0.9917, over 3749077.52 frames. ], batch size: 51, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:54:14,188 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.736e+02 2.975e+02 3.878e+02 5.385e+02 1.936e+03, threshold=7.756e+02, percent-clipped=10.0 +2024-08-25 02:54:22,809 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.85 vs. limit=5.514666666666667 +2024-08-25 02:54:24,103 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.80 vs. limit=8.92 +2024-08-25 02:54:40,292 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.24 vs. limit=6.92 +2024-08-25 02:54:45,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=3840.0, ans=0.013600000000000001 +2024-08-25 02:54:57,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=3893.3333333333335, ans=0.3175 +2024-08-25 02:55:03,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=3946.6666666666665, ans=0.315 +2024-08-25 02:55:03,949 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.13 vs. limit=8.98 +2024-08-25 02:55:05,034 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.48 vs. limit=5.578666666666667 +2024-08-25 02:55:06,464 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.97 vs. limit=8.98 +2024-08-25 02:55:15,172 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.03 vs. limit=5.986666666666666 +2024-08-25 02:55:16,795 INFO [train.py:1114] (1/4) Epoch 1, batch 750, loss[loss=0.7512, simple_loss=0.55, pruned_loss=0.3742, ctc_loss=0.7029, over 19495.00 frames. ], tot_loss[loss=0.988, simple_loss=0.6852, pruned_loss=0.6333, ctc_loss=0.9334, over 3775728.42 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:55:18,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=4000.0, ans=0.3125 +2024-08-25 02:55:37,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=4053.3333333333335, ans=0.7581333333333333 +2024-08-25 02:56:02,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=4160.0, ans=0.305 +2024-08-25 02:56:04,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=4160.0, ans=0.2624 +2024-08-25 02:56:15,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=4213.333333333333, ans=0.3025 +2024-08-25 02:56:15,588 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.74 vs. limit=9.08 +2024-08-25 02:56:23,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=4266.666666666667, ans=0.025 +2024-08-25 02:56:24,844 INFO [train.py:1114] (1/4) Epoch 1, batch 800, loss[loss=0.6423, simple_loss=0.4807, pruned_loss=0.3093, ctc_loss=0.577, over 19827.00 frames. ], tot_loss[loss=0.9285, simple_loss=0.6512, pruned_loss=0.5711, ctc_loss=0.8726, over 3797367.18 frames. ], batch size: 49, lr: 4.49e-02, grad_scale: 16.0 +2024-08-25 02:56:25,437 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.31 vs. limit=10.7 +2024-08-25 02:56:29,872 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.884e+02 2.945e+02 3.956e+02 5.210e+02 9.107e+02, threshold=7.913e+02, percent-clipped=4.0 +2024-08-25 02:56:57,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=4373.333333333333, ans=0.29500000000000004 +2024-08-25 02:57:05,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=4426.666666666667, ans=0.2557333333333333 +2024-08-25 02:57:07,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=4426.666666666667, ans=0.07 +2024-08-25 02:57:15,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=4426.666666666667, ans=0.04822222222222222 +2024-08-25 02:57:30,597 INFO [train.py:1114] (1/4) Epoch 1, batch 850, loss[loss=0.6783, simple_loss=0.5149, pruned_loss=0.3159, ctc_loss=0.602, over 19671.00 frames. ], tot_loss[loss=0.8746, simple_loss=0.621, pruned_loss=0.5167, ctc_loss=0.8152, over 3815221.61 frames. ], batch size: 59, lr: 4.49e-02, grad_scale: 16.0 +2024-08-25 02:57:33,891 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.68 vs. limit=3.68 +2024-08-25 02:57:34,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=4533.333333333333, ans=0.2875 +2024-08-25 02:57:36,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=4533.333333333333, ans=0.009884057971014493 +2024-08-25 02:57:36,443 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=2.81 vs. limit=3.68 +2024-08-25 02:57:40,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=4533.333333333333, ans=0.7413333333333334 +2024-08-25 02:57:53,610 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=4.622e-01 +2024-08-25 02:58:06,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=4640.0, ans=0.2536 +2024-08-25 02:58:09,568 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.08 vs. limit=9.24 +2024-08-25 02:58:36,753 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.56 vs. limit=3.7119999999999997 +2024-08-25 02:58:42,813 INFO [train.py:1114] (1/4) Epoch 1, batch 900, loss[loss=0.5668, simple_loss=0.4403, pruned_loss=0.2531, ctc_loss=0.4886, over 19823.00 frames. ], tot_loss[loss=0.8287, simple_loss=0.5955, pruned_loss=0.4714, ctc_loss=0.7649, over 3819074.93 frames. ], batch size: 49, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 02:58:48,904 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.859e+02 2.783e+02 3.682e+02 4.971e+02 1.764e+03, threshold=7.364e+02, percent-clipped=6.0 +2024-08-25 02:58:53,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=4800.0, ans=0.275 +2024-08-25 02:59:01,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=4853.333333333333, ans=0.7301333333333334 +2024-08-25 02:59:19,999 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.70 vs. limit=3.7359999999999998 +2024-08-25 02:59:20,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=4906.666666666667, ans=0.27 +2024-08-25 02:59:31,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=4960.0, ans=0.7264 +2024-08-25 02:59:50,550 INFO [train.py:1114] (1/4) Epoch 1, batch 950, loss[loss=0.579, simple_loss=0.4513, pruned_loss=0.2569, ctc_loss=0.4969, over 19494.00 frames. ], tot_loss[loss=0.7856, simple_loss=0.5719, pruned_loss=0.4309, ctc_loss=0.7168, over 3820940.71 frames. ], batch size: 49, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 03:00:29,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=5226.666666666667, ans=0.07 +2024-08-25 03:00:33,179 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:00:51,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=5280.0, ans=0.24719999999999998 +2024-08-25 03:00:54,649 INFO [train.py:1114] (1/4) Epoch 1, batch 1000, loss[loss=0.6094, simple_loss=0.4714, pruned_loss=0.2765, ctc_loss=0.5187, over 19853.00 frames. ], tot_loss[loss=0.751, simple_loss=0.5535, pruned_loss=0.3982, ctc_loss=0.6767, over 3817984.21 frames. ], batch size: 52, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 03:01:01,304 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.763e+02 2.847e+02 3.463e+02 4.611e+02 9.717e+02, threshold=6.926e+02, percent-clipped=4.0 +2024-08-25 03:01:01,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5333.333333333333, ans=0.25 +2024-08-25 03:01:16,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=5386.666666666667, ans=0.0 +2024-08-25 03:01:16,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=5386.666666666667, ans=0.0 +2024-08-25 03:01:28,095 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.91 vs. limit=7.720000000000001 +2024-08-25 03:01:53,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=5546.666666666667, ans=0.24 +2024-08-25 03:01:55,477 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=5546.666666666667, ans=0.00966376811594203 +2024-08-25 03:01:57,086 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.69 vs. limit=6.2186666666666675 +2024-08-25 03:02:07,677 INFO [train.py:1114] (1/4) Epoch 1, batch 1050, loss[loss=0.6225, simple_loss=0.4916, pruned_loss=0.2723, ctc_loss=0.5204, over 19861.00 frames. ], tot_loss[loss=0.7153, simple_loss=0.5342, pruned_loss=0.367, ctc_loss=0.6359, over 3823633.07 frames. ], batch size: 57, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 03:02:17,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=5600.0, ans=0.194 +2024-08-25 03:02:39,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=5706.666666666667, ans=0.24293333333333333 +2024-08-25 03:02:46,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=5760.0, ans=0.6984 +2024-08-25 03:02:54,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=5760.0, ans=0.2864 +2024-08-25 03:02:59,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=5813.333333333333, ans=0.009605797101449275 +2024-08-25 03:03:13,747 INFO [train.py:1114] (1/4) Epoch 1, batch 1100, loss[loss=0.594, simple_loss=0.4711, pruned_loss=0.2582, ctc_loss=0.4939, over 19577.00 frames. ], tot_loss[loss=0.6842, simple_loss=0.5174, pruned_loss=0.3405, ctc_loss=0.6001, over 3830869.71 frames. ], batch size: 52, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 03:03:16,809 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.35 vs. limit=9.7 +2024-08-25 03:03:20,118 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.922e+02 2.626e+02 3.754e+02 4.559e+02 6.965e+02, threshold=7.509e+02, percent-clipped=1.0 +2024-08-25 03:03:20,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=5866.666666666667, ans=0.22499999999999998 +2024-08-25 03:03:24,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=5866.666666666667, ans=0.24133333333333332 +2024-08-25 03:03:24,912 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.06 vs. limit=6.346666666666667 +2024-08-25 03:03:27,111 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.89 vs. limit=11.940000000000001 +2024-08-25 03:03:34,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=5920.0, ans=0.22249999999999998 +2024-08-25 03:03:34,382 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:03:38,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=5973.333333333333, ans=0.025 +2024-08-25 03:03:46,424 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.99 vs. limit=9.74 +2024-08-25 03:03:47,940 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.44 vs. limit=9.74 +2024-08-25 03:03:48,511 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:03:57,868 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.94 vs. limit=12.02 +2024-08-25 03:04:14,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=6080.0, ans=0.21500000000000002 +2024-08-25 03:04:18,511 INFO [train.py:1114] (1/4) Epoch 1, batch 1150, loss[loss=0.5848, simple_loss=0.4674, pruned_loss=0.2497, ctc_loss=0.4878, over 19567.00 frames. ], tot_loss[loss=0.6605, simple_loss=0.5051, pruned_loss=0.3199, ctc_loss=0.5724, over 3829809.04 frames. ], batch size: 52, lr: 4.47e-02, grad_scale: 8.0 +2024-08-25 03:04:49,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=6240.0, ans=0.09899494936611666 +2024-08-25 03:04:54,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=6240.0, ans=0.20750000000000002 +2024-08-25 03:05:08,726 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.80 vs. limit=12.219999999999999 +2024-08-25 03:05:16,326 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.49 vs. limit=8.173333333333334 +2024-08-25 03:05:20,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=6346.666666666667, ans=0.2025 +2024-08-25 03:05:20,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=6346.666666666667, ans=0.2025 +2024-08-25 03:05:24,499 INFO [train.py:1114] (1/4) Epoch 1, batch 1200, loss[loss=0.5452, simple_loss=0.4531, pruned_loss=0.2213, ctc_loss=0.4323, over 19852.00 frames. ], tot_loss[loss=0.6399, simple_loss=0.495, pruned_loss=0.3023, ctc_loss=0.548, over 3825553.02 frames. ], batch size: 57, lr: 4.47e-02, grad_scale: 16.0 +2024-08-25 03:05:30,708 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.839e+02 2.702e+02 3.344e+02 4.028e+02 1.038e+03, threshold=6.687e+02, percent-clipped=4.0 +2024-08-25 03:05:34,029 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.77 vs. limit=8.2 +2024-08-25 03:06:12,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=6560.0, ans=0.1925 +2024-08-25 03:06:16,672 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.21 vs. limit=12.42 +2024-08-25 03:06:33,175 INFO [train.py:1114] (1/4) Epoch 1, batch 1250, loss[loss=0.5484, simple_loss=0.4521, pruned_loss=0.2282, ctc_loss=0.4321, over 19519.00 frames. ], tot_loss[loss=0.6188, simple_loss=0.4848, pruned_loss=0.2852, ctc_loss=0.5224, over 3843501.71 frames. ], batch size: 61, lr: 4.47e-02, grad_scale: 8.0 +2024-08-25 03:06:46,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=6720.0, ans=0.009408695652173914 +2024-08-25 03:06:51,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=6720.0, ans=0.185 +2024-08-25 03:06:56,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=6720.0, ans=0.0 +2024-08-25 03:07:29,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=6880.0, ans=0.0 +2024-08-25 03:07:36,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=6933.333333333333, ans=0.175 +2024-08-25 03:07:53,431 INFO [train.py:1114] (1/4) Epoch 1, batch 1300, loss[loss=0.5643, simple_loss=0.4603, pruned_loss=0.2378, ctc_loss=0.4536, over 18874.00 frames. ], tot_loss[loss=0.5964, simple_loss=0.473, pruned_loss=0.2688, ctc_loss=0.4969, over 3846323.59 frames. ], batch size: 76, lr: 4.47e-02, grad_scale: 8.0 +2024-08-25 03:08:00,986 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.769e+02 2.595e+02 3.171e+02 4.007e+02 5.829e+02, threshold=6.342e+02, percent-clipped=0.0 +2024-08-25 03:08:22,669 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.17 vs. limit=12.780000000000001 +2024-08-25 03:08:31,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=7093.333333333333, ans=0.8209333333333333 +2024-08-25 03:08:52,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=7146.666666666667, ans=0.025 +2024-08-25 03:08:59,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=7200.0, ans=0.009304347826086957 +2024-08-25 03:09:00,194 INFO [train.py:1114] (1/4) Epoch 1, batch 1350, loss[loss=0.4781, simple_loss=0.4108, pruned_loss=0.1863, ctc_loss=0.3709, over 19760.00 frames. ], tot_loss[loss=0.579, simple_loss=0.4642, pruned_loss=0.2559, ctc_loss=0.4768, over 3858477.95 frames. ], batch size: 54, lr: 4.46e-02, grad_scale: 8.0 +2024-08-25 03:09:05,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=7200.0, ans=0.16249999999999998 +2024-08-25 03:09:11,417 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.13 vs. limit=8.6 +2024-08-25 03:09:11,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=7253.333333333333, ans=9.533333333333333 +2024-08-25 03:10:09,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=7253.333333333333, ans=0.036444444444444446 +2024-08-25 03:10:22,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=7306.666666666667, ans=0.15749999999999997 +2024-08-25 03:11:52,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=7360.0, ans=0.036000000000000004 +2024-08-25 03:12:03,222 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.11 vs. limit=6.965333333333334 +2024-08-25 03:12:05,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=7413.333333333333, ans=0.009257971014492754 +2024-08-25 03:12:10,366 INFO [train.py:1114] (1/4) Epoch 1, batch 1400, loss[loss=0.4261, simple_loss=0.3677, pruned_loss=0.1678, ctc_loss=0.3231, over 19668.00 frames. ], tot_loss[loss=0.5637, simple_loss=0.4566, pruned_loss=0.245, ctc_loss=0.4592, over 3864290.12 frames. ], batch size: 46, lr: 4.46e-02, grad_scale: 8.0 +2024-08-25 03:12:32,369 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.817e+02 2.490e+02 2.974e+02 4.034e+02 6.918e+02, threshold=5.948e+02, percent-clipped=1.0 +2024-08-25 03:13:09,471 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.36 vs. limit=10.36 +2024-08-25 03:13:26,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7680.0, ans=0.2232 +2024-08-25 03:13:28,397 INFO [train.py:1114] (1/4) Epoch 1, batch 1450, loss[loss=0.5238, simple_loss=0.4481, pruned_loss=0.2108, ctc_loss=0.3977, over 19686.00 frames. ], tot_loss[loss=0.5518, simple_loss=0.4515, pruned_loss=0.2362, ctc_loss=0.4444, over 3861926.89 frames. ], batch size: 63, lr: 4.46e-02, grad_scale: 8.0 +2024-08-25 03:13:29,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=7733.333333333333, ans=0.1375 +2024-08-25 03:13:30,333 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.06 vs. limit=13.3 +2024-08-25 03:13:36,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=7733.333333333333, ans=0.1375 +2024-08-25 03:13:50,950 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.62 vs. limit=10.42 +2024-08-25 03:13:59,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=7840.0, ans=0.034 +2024-08-25 03:14:15,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=7893.333333333333, ans=0.13 +2024-08-25 03:14:24,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=7946.666666666667, ans=0.033555555555555554 +2024-08-25 03:14:30,703 INFO [train.py:1114] (1/4) Epoch 1, batch 1500, loss[loss=0.5177, simple_loss=0.4487, pruned_loss=0.2029, ctc_loss=0.4004, over 19597.00 frames. ], tot_loss[loss=0.5407, simple_loss=0.4465, pruned_loss=0.2284, ctc_loss=0.4317, over 3862190.24 frames. ], batch size: 57, lr: 4.46e-02, grad_scale: 8.0 +2024-08-25 03:14:38,503 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.864e+02 2.576e+02 3.382e+02 4.091e+02 7.597e+02, threshold=6.763e+02, percent-clipped=6.0 +2024-08-25 03:14:40,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=8000.0, ans=0.125 +2024-08-25 03:14:47,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=8053.333333333333, ans=0.025 +2024-08-25 03:15:12,741 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.09 vs. limit=13.620000000000001 +2024-08-25 03:15:16,523 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.07 vs. limit=10.56 +2024-08-25 03:15:19,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=8160.0, ans=0.125 +2024-08-25 03:15:24,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=8213.333333333334, ans=0.03244444444444444 +2024-08-25 03:15:40,014 INFO [train.py:1114] (1/4) Epoch 1, batch 1550, loss[loss=0.5474, simple_loss=0.4547, pruned_loss=0.2274, ctc_loss=0.4375, over 19624.00 frames. ], tot_loss[loss=0.5307, simple_loss=0.442, pruned_loss=0.2218, ctc_loss=0.4205, over 3846925.98 frames. ], batch size: 60, lr: 4.45e-02, grad_scale: 8.0 +2024-08-25 03:15:51,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=8320.0, ans=0.1668 +2024-08-25 03:15:57,877 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.70 vs. limit=13.74 +2024-08-25 03:16:23,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=8426.666666666666, ans=0.125 +2024-08-25 03:16:30,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8480.0, ans=0.2152 +2024-08-25 03:16:31,706 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.61 vs. limit=13.86 +2024-08-25 03:16:36,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=8480.0, ans=0.125 +2024-08-25 03:16:49,350 INFO [train.py:1114] (1/4) Epoch 1, batch 1600, loss[loss=0.4744, simple_loss=0.4255, pruned_loss=0.1787, ctc_loss=0.3629, over 19831.00 frames. ], tot_loss[loss=0.5214, simple_loss=0.4376, pruned_loss=0.2159, ctc_loss=0.4102, over 3836866.91 frames. ], batch size: 57, lr: 4.45e-02, grad_scale: 16.0 +2024-08-25 03:16:55,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=8533.333333333334, ans=0.09899494936611666 +2024-08-25 03:16:57,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=8533.333333333334, ans=0.125 +2024-08-25 03:16:59,538 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.906e+02 2.604e+02 3.125e+02 4.170e+02 2.617e+03, threshold=6.251e+02, percent-clipped=7.0 +2024-08-25 03:17:03,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=8586.666666666666, ans=0.125 +2024-08-25 03:17:08,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=8586.666666666666, ans=0.05 +2024-08-25 03:17:19,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=8640.0, ans=0.125 +2024-08-25 03:17:24,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=8640.0, ans=0.5976 +2024-08-25 03:17:40,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=8693.333333333334, ans=0.125 +2024-08-25 03:19:09,295 INFO [train.py:1114] (1/4) Epoch 1, batch 1650, loss[loss=0.4358, simple_loss=0.3984, pruned_loss=0.1621, ctc_loss=0.3257, over 19652.00 frames. ], tot_loss[loss=0.512, simple_loss=0.4332, pruned_loss=0.21, ctc_loss=0.4003, over 3831942.97 frames. ], batch size: 59, lr: 4.45e-02, grad_scale: 16.0 +2024-08-25 03:19:10,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=8800.0, ans=0.125 +2024-08-25 03:19:17,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=8800.0, ans=0.125 +2024-08-25 03:19:19,071 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.65 vs. limit=10.8 +2024-08-25 03:19:52,621 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.14 vs. limit=5.0 +2024-08-25 03:19:57,108 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.49 vs. limit=14.219999999999999 +2024-08-25 03:20:12,473 INFO [train.py:1114] (1/4) Epoch 1, batch 1700, loss[loss=0.39, simple_loss=0.3524, pruned_loss=0.1486, ctc_loss=0.2942, over 19670.00 frames. ], tot_loss[loss=0.5017, simple_loss=0.4288, pruned_loss=0.2039, ctc_loss=0.3894, over 3845983.60 frames. ], batch size: 46, lr: 4.44e-02, grad_scale: 16.0 +2024-08-25 03:20:15,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=9066.666666666666, ans=0.125 +2024-08-25 03:20:19,814 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.828e+02 2.395e+02 2.888e+02 3.702e+02 8.491e+02, threshold=5.776e+02, percent-clipped=2.0 +2024-08-25 03:20:21,483 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.94 vs. limit=9.533333333333333 +2024-08-25 03:20:29,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=9120.0, ans=0.02866666666666667 +2024-08-25 03:22:14,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=9226.666666666666, ans=0.125 +2024-08-25 03:22:22,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=9280.0, ans=0.125 +2024-08-25 03:22:22,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=9280.0, ans=0.125 +2024-08-25 03:22:27,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=9280.0, ans=0.125 +2024-08-25 03:22:33,180 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.11 vs. limit=4.4 +2024-08-25 03:22:33,852 INFO [train.py:1114] (1/4) Epoch 1, batch 1750, loss[loss=0.3899, simple_loss=0.3639, pruned_loss=0.1449, ctc_loss=0.2831, over 19627.00 frames. ], tot_loss[loss=0.4934, simple_loss=0.4251, pruned_loss=0.199, ctc_loss=0.3807, over 3850913.43 frames. ], batch size: 45, lr: 4.44e-02, grad_scale: 16.0 +2024-08-25 03:22:37,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=9333.333333333334, ans=0.125 +2024-08-25 03:22:47,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=9386.666666666666, ans=0.5714666666666668 +2024-08-25 03:22:51,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=9386.666666666666, ans=0.20613333333333334 +2024-08-25 03:22:52,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=9386.666666666666, ans=0.125 +2024-08-25 03:23:04,157 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.23 vs. limit=14.58 +2024-08-25 03:23:29,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=9546.666666666666, ans=0.125 +2024-08-25 03:23:31,435 INFO [train.py:1114] (1/4) Epoch 1, batch 1800, loss[loss=0.4977, simple_loss=0.4344, pruned_loss=0.2018, ctc_loss=0.3762, over 19608.00 frames. ], tot_loss[loss=0.4891, simple_loss=0.4241, pruned_loss=0.1964, ctc_loss=0.3761, over 3852586.37 frames. ], batch size: 55, lr: 4.44e-02, grad_scale: 8.0 +2024-08-25 03:23:39,408 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.960e+02 2.646e+02 3.473e+02 4.220e+02 8.344e+02, threshold=6.945e+02, percent-clipped=3.0 +2024-08-25 03:23:49,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=9653.333333333334, ans=0.125 +2024-08-25 03:24:05,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=9706.666666666666, ans=0.5602666666666667 +2024-08-25 03:24:15,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=9760.0, ans=0.026000000000000002 +2024-08-25 03:24:16,921 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.92 vs. limit=11.16 +2024-08-25 03:24:23,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=9813.333333333334, ans=0.025777777777777778 +2024-08-25 03:24:35,842 INFO [train.py:1114] (1/4) Epoch 1, batch 1850, loss[loss=0.4235, simple_loss=0.3908, pruned_loss=0.1637, ctc_loss=0.3044, over 19578.00 frames. ], tot_loss[loss=0.48, simple_loss=0.4204, pruned_loss=0.1913, ctc_loss=0.367, over 3856379.05 frames. ], batch size: 57, lr: 4.43e-02, grad_scale: 8.0 +2024-08-25 03:24:44,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=9866.666666666666, ans=0.5546666666666666 +2024-08-25 03:24:51,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.73 vs. limit=11.2 +2024-08-25 03:25:01,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=9920.0, ans=0.125 +2024-08-25 03:25:18,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10026.666666666666, ans=0.19973333333333332 +2024-08-25 03:25:38,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=10080.0, ans=0.02466666666666667 +2024-08-25 03:25:40,387 INFO [train.py:1114] (1/4) Epoch 1, batch 1900, loss[loss=0.4883, simple_loss=0.4366, pruned_loss=0.1937, ctc_loss=0.371, over 19629.00 frames. ], tot_loss[loss=0.4752, simple_loss=0.4192, pruned_loss=0.1886, ctc_loss=0.3618, over 3860757.23 frames. ], batch size: 59, lr: 4.43e-02, grad_scale: 8.0 +2024-08-25 03:25:46,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=10133.333333333334, ans=0.19866666666666666 +2024-08-25 03:25:48,461 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.873e+02 2.554e+02 2.990e+02 4.033e+02 8.041e+02, threshold=5.979e+02, percent-clipped=3.0 +2024-08-25 03:26:08,612 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.96 vs. limit=15.18 +2024-08-25 03:26:09,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=10240.0, ans=0.125 +2024-08-25 03:26:09,503 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.88 vs. limit=8.096 +2024-08-25 03:26:14,311 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.83 vs. limit=11.36 +2024-08-25 03:26:19,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=10293.333333333334, ans=0.125 +2024-08-25 03:26:25,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=10346.666666666666, ans=0.125 +2024-08-25 03:26:38,010 INFO [train.py:1114] (1/4) Epoch 1, batch 1950, loss[loss=0.4296, simple_loss=0.3962, pruned_loss=0.1663, ctc_loss=0.32, over 19583.00 frames. ], tot_loss[loss=0.4682, simple_loss=0.4171, pruned_loss=0.1847, ctc_loss=0.355, over 3870139.29 frames. ], batch size: 52, lr: 4.43e-02, grad_scale: 8.0 +2024-08-25 03:26:47,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=10400.0, ans=0.125 +2024-08-25 03:27:03,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=10506.666666666666, ans=0.19493333333333335 +2024-08-25 03:27:07,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.62 vs. limit=15.379999999999999 +2024-08-25 03:27:15,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=10560.0, ans=0.008573913043478262 +2024-08-25 03:27:23,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10613.333333333334, ans=0.19386666666666666 +2024-08-25 03:27:36,523 INFO [train.py:1114] (1/4) Epoch 1, batch 2000, loss[loss=0.408, simple_loss=0.3765, pruned_loss=0.157, ctc_loss=0.3136, over 19677.00 frames. ], tot_loss[loss=0.4639, simple_loss=0.4158, pruned_loss=0.1826, ctc_loss=0.3511, over 3854520.47 frames. ], batch size: 45, lr: 4.42e-02, grad_scale: 16.0 +2024-08-25 03:27:42,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=10666.666666666666, ans=0.0 +2024-08-25 03:27:44,891 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.863e+02 2.508e+02 3.011e+02 3.695e+02 6.472e+02, threshold=6.022e+02, percent-clipped=1.0 +2024-08-25 03:27:46,524 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.53 vs. limit=11.5 +2024-08-25 03:27:51,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=10720.0, ans=0.125 +2024-08-25 03:27:54,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=10720.0, ans=0.5248 +2024-08-25 03:28:27,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=10880.0, ans=0.008504347826086956 +2024-08-25 03:28:41,990 INFO [train.py:1114] (1/4) Epoch 1, batch 2050, loss[loss=0.3962, simple_loss=0.3767, pruned_loss=0.1497, ctc_loss=0.2908, over 19750.00 frames. ], tot_loss[loss=0.4573, simple_loss=0.4124, pruned_loss=0.1796, ctc_loss=0.3451, over 3850741.77 frames. ], batch size: 47, lr: 4.42e-02, grad_scale: 16.0 +2024-08-25 03:30:05,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=11093.333333333334, ans=0.5117333333333334 +2024-08-25 03:31:02,643 INFO [train.py:1114] (1/4) Epoch 1, batch 2100, loss[loss=0.4076, simple_loss=0.3939, pruned_loss=0.151, ctc_loss=0.2982, over 19772.00 frames. ], tot_loss[loss=0.4489, simple_loss=0.4085, pruned_loss=0.1752, ctc_loss=0.3372, over 3857647.84 frames. ], batch size: 54, lr: 4.42e-02, grad_scale: 16.0 +2024-08-25 03:31:03,205 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=6.18 vs. limit=8.48 +2024-08-25 03:31:04,125 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.33 vs. limit=11.7 +2024-08-25 03:31:14,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=11200.0, ans=0.020000000000000004 +2024-08-25 03:31:19,369 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.953e+02 2.443e+02 2.901e+02 4.101e+02 7.108e+02, threshold=5.802e+02, percent-clipped=5.0 +2024-08-25 03:31:44,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=11306.666666666666, ans=0.035 +2024-08-25 03:32:07,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=11360.0, ans=0.5024000000000001 +2024-08-25 03:32:32,797 INFO [train.py:1114] (1/4) Epoch 1, batch 2150, loss[loss=0.4253, simple_loss=0.395, pruned_loss=0.1647, ctc_loss=0.3154, over 19609.00 frames. ], tot_loss[loss=0.4419, simple_loss=0.4053, pruned_loss=0.1716, ctc_loss=0.3303, over 3868410.32 frames. ], batch size: 52, lr: 4.41e-02, grad_scale: 8.0 +2024-08-25 03:32:53,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=11520.0, ans=0.01866666666666667 +2024-08-25 03:32:58,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=11520.0, ans=0.1848 +2024-08-25 03:33:39,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=11626.666666666666, ans=0.125 +2024-08-25 03:33:39,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11626.666666666666, ans=0.18373333333333333 +2024-08-25 03:33:54,225 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.78 vs. limit=4.752 +2024-08-25 03:33:57,383 INFO [train.py:1114] (1/4) Epoch 1, batch 2200, loss[loss=0.4627, simple_loss=0.4278, pruned_loss=0.1788, ctc_loss=0.3496, over 19555.00 frames. ], tot_loss[loss=0.4386, simple_loss=0.4041, pruned_loss=0.1699, ctc_loss=0.3274, over 3866197.78 frames. ], batch size: 57, lr: 4.41e-02, grad_scale: 8.0 +2024-08-25 03:34:01,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=11733.333333333334, ans=0.18266666666666664 +2024-08-25 03:34:05,547 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.75 vs. limit=11.9 +2024-08-25 03:34:08,398 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.911e+02 2.628e+02 3.380e+02 4.438e+02 7.655e+02, threshold=6.760e+02, percent-clipped=12.0 +2024-08-25 03:34:10,119 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.28 vs. limit=11.92 +2024-08-25 03:34:13,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=11786.666666666666, ans=0.0 +2024-08-25 03:34:13,461 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.91 vs. limit=11.92 +2024-08-25 03:34:13,790 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=7.90 vs. limit=7.946666666666666 +2024-08-25 03:34:14,669 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.84 vs. limit=11.92 +2024-08-25 03:34:30,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=11840.0, ans=0.025 +2024-08-25 03:34:56,346 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.66 vs. limit=16.46 +2024-08-25 03:35:03,295 INFO [train.py:1114] (1/4) Epoch 1, batch 2250, loss[loss=0.3898, simple_loss=0.3856, pruned_loss=0.1401, ctc_loss=0.2842, over 19598.00 frames. ], tot_loss[loss=0.4345, simple_loss=0.4025, pruned_loss=0.1677, ctc_loss=0.3228, over 3866141.34 frames. ], batch size: 55, lr: 4.40e-02, grad_scale: 8.0 +2024-08-25 03:35:03,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=12000.0, ans=0.125 +2024-08-25 03:35:18,667 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.48 vs. limit=11.026666666666667 +2024-08-25 03:35:40,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=12160.0, ans=0.125 +2024-08-25 03:35:53,579 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.77 vs. limit=11.106666666666667 +2024-08-25 03:35:54,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=12213.333333333334, ans=0.025 +2024-08-25 03:36:03,055 INFO [train.py:1114] (1/4) Epoch 1, batch 2300, loss[loss=0.394, simple_loss=0.378, pruned_loss=0.1475, ctc_loss=0.2871, over 19507.00 frames. ], tot_loss[loss=0.4306, simple_loss=0.4002, pruned_loss=0.1659, ctc_loss=0.3191, over 3860747.31 frames. ], batch size: 49, lr: 4.40e-02, grad_scale: 8.0 +2024-08-25 03:36:07,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=12266.666666666666, ans=0.125 +2024-08-25 03:36:12,308 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.784e+02 2.546e+02 3.099e+02 3.956e+02 8.242e+02, threshold=6.199e+02, percent-clipped=6.0 +2024-08-25 03:36:46,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12426.666666666666, ans=0.17573333333333335 +2024-08-25 03:36:54,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=12480.0, ans=0.125 +2024-08-25 03:37:00,696 INFO [train.py:1114] (1/4) Epoch 1, batch 2350, loss[loss=0.4892, simple_loss=0.4365, pruned_loss=0.1945, ctc_loss=0.3824, over 19700.00 frames. ], tot_loss[loss=0.4258, simple_loss=0.3982, pruned_loss=0.1633, ctc_loss=0.3142, over 3863328.77 frames. ], batch size: 63, lr: 4.40e-02, grad_scale: 8.0 +2024-08-25 03:37:24,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=12640.0, ans=0.4576 +2024-08-25 03:37:35,001 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.34 vs. limit=12.26 +2024-08-25 03:37:38,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=12693.333333333334, ans=0.125 +2024-08-25 03:37:40,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=12693.333333333334, ans=0.013777777777777778 +2024-08-25 03:37:42,136 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.49 vs. limit=17.02 +2024-08-25 03:37:51,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=12746.666666666666, ans=0.125 +2024-08-25 03:37:51,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12746.666666666666, ans=0.17253333333333334 +2024-08-25 03:37:59,408 INFO [train.py:1114] (1/4) Epoch 1, batch 2400, loss[loss=0.4502, simple_loss=0.4223, pruned_loss=0.1727, ctc_loss=0.3319, over 19379.00 frames. ], tot_loss[loss=0.4273, simple_loss=0.4004, pruned_loss=0.1638, ctc_loss=0.3145, over 3858170.43 frames. ], batch size: 67, lr: 4.39e-02, grad_scale: 16.0 +2024-08-25 03:38:02,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=12800.0, ans=0.07 +2024-08-25 03:38:08,244 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.904e+02 2.522e+02 3.053e+02 3.990e+02 1.210e+03, threshold=6.106e+02, percent-clipped=3.0 +2024-08-25 03:38:15,553 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.00 vs. limit=12.32 +2024-08-25 03:38:18,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=12853.333333333334, ans=0.013111111111111108 +2024-08-25 03:38:30,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=12906.666666666666, ans=0.125 +2024-08-25 03:38:37,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=12960.0, ans=0.08012000000000001 +2024-08-25 03:38:38,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=12960.0, ans=0.125 +2024-08-25 03:38:41,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=12960.0, ans=0.125 +2024-08-25 03:39:03,263 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.05 vs. limit=17.3 +2024-08-25 03:39:03,887 INFO [train.py:1114] (1/4) Epoch 1, batch 2450, loss[loss=0.5034, simple_loss=0.4388, pruned_loss=0.2057, ctc_loss=0.3917, over 13653.00 frames. ], tot_loss[loss=0.4376, simple_loss=0.4062, pruned_loss=0.1693, ctc_loss=0.3239, over 3732983.70 frames. ], batch size: 140, lr: 4.39e-02, grad_scale: 16.0 +2024-08-25 03:39:07,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=13066.666666666666, ans=0.0 +2024-08-25 03:40:43,719 INFO [train.py:1114] (1/4) Epoch 2, batch 0, loss[loss=0.4347, simple_loss=0.3982, pruned_loss=0.1704, ctc_loss=0.326, over 19390.00 frames. ], tot_loss[loss=0.4347, simple_loss=0.3982, pruned_loss=0.1704, ctc_loss=0.326, over 19390.00 frames. ], batch size: 48, lr: 4.30e-02, grad_scale: 32.0 +2024-08-25 03:40:43,720 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-25 03:40:51,836 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.6884, 3.4234, 2.7515, 3.4895], device='cuda:1') +2024-08-25 03:40:53,952 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([1.7451, 3.1147, 3.4833, 3.2353], device='cuda:1') +2024-08-25 03:40:55,163 INFO [train.py:1146] (1/4) Epoch 2, validation: loss=0.3317, simple_loss=0.3718, pruned_loss=0.1058, ctc_loss=0.2, over 944034.00 frames. +2024-08-25 03:40:55,164 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 14083MB +2024-08-25 03:41:17,122 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.783e+02 2.388e+02 2.818e+02 3.444e+02 6.577e+02, threshold=5.636e+02, percent-clipped=3.0 +2024-08-25 03:41:19,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=13386.666666666666, ans=0.025 +2024-08-25 03:41:20,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=13386.666666666666, ans=0.125 +2024-08-25 03:41:25,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=13386.666666666666, ans=0.125 +2024-08-25 03:41:32,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=13440.0, ans=0.0 +2024-08-25 03:41:57,951 INFO [train.py:1114] (1/4) Epoch 2, batch 50, loss[loss=0.346, simple_loss=0.3437, pruned_loss=0.1264, ctc_loss=0.2384, over 19743.00 frames. ], tot_loss[loss=0.4242, simple_loss=0.4003, pruned_loss=0.162, ctc_loss=0.3102, over 845814.23 frames. ], batch size: 47, lr: 4.29e-02, grad_scale: 16.0 +2024-08-25 03:41:59,905 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.78 vs. limit=12.58 +2024-08-25 03:42:07,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=13546.666666666666, ans=0.4258666666666667 +2024-08-25 03:42:07,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.84 vs. limit=5.032 +2024-08-25 03:42:12,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=13600.0, ans=0.025 +2024-08-25 03:42:22,992 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.19 vs. limit=17.740000000000002 +2024-08-25 03:43:11,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=13706.666666666666, ans=0.125 +2024-08-25 03:43:29,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=13760.0, ans=0.00933333333333334 +2024-08-25 03:43:36,875 INFO [train.py:1114] (1/4) Epoch 2, batch 100, loss[loss=0.3597, simple_loss=0.3616, pruned_loss=0.1285, ctc_loss=0.2518, over 19718.00 frames. ], tot_loss[loss=0.421, simple_loss=0.4006, pruned_loss=0.1594, ctc_loss=0.3062, over 1499532.21 frames. ], batch size: 51, lr: 4.29e-02, grad_scale: 16.0 +2024-08-25 03:43:37,929 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.96 vs. limit=5.072 +2024-08-25 03:43:39,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=13813.333333333334, ans=0.125 +2024-08-25 03:43:40,963 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:43:44,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=13813.333333333334, ans=0.025 +2024-08-25 03:43:50,080 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=12.68 +2024-08-25 03:43:50,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=13866.666666666666, ans=0.00888888888888889 +2024-08-25 03:44:02,825 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.678e+02 2.500e+02 2.916e+02 3.893e+02 6.295e+02, threshold=5.832e+02, percent-clipped=2.0 +2024-08-25 03:44:11,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=13920.0, ans=0.4128 +2024-08-25 03:44:22,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=13973.333333333334, ans=0.125 +2024-08-25 03:44:23,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=13973.333333333334, ans=0.125 +2024-08-25 03:44:39,621 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.33 vs. limit=18.02 +2024-08-25 03:44:42,788 INFO [train.py:1114] (1/4) Epoch 2, batch 150, loss[loss=0.3788, simple_loss=0.3614, pruned_loss=0.1415, ctc_loss=0.2831, over 19716.00 frames. ], tot_loss[loss=0.4117, simple_loss=0.3946, pruned_loss=0.155, ctc_loss=0.2973, over 2028297.15 frames. ], batch size: 47, lr: 4.29e-02, grad_scale: 16.0 +2024-08-25 03:44:58,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=14133.333333333334, ans=0.15866666666666665 +2024-08-25 03:45:00,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=14133.333333333334, ans=0.007777777777777772 +2024-08-25 03:45:14,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=14186.666666666666, ans=0.007785507246376812 +2024-08-25 03:45:25,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=14240.0, ans=0.125 +2024-08-25 03:45:42,127 INFO [train.py:1114] (1/4) Epoch 2, batch 200, loss[loss=0.4496, simple_loss=0.4089, pruned_loss=0.1768, ctc_loss=0.3416, over 18347.00 frames. ], tot_loss[loss=0.4043, simple_loss=0.3896, pruned_loss=0.1513, ctc_loss=0.2907, over 2435894.34 frames. ], batch size: 85, lr: 4.28e-02, grad_scale: 16.0 +2024-08-25 03:45:54,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14346.666666666666, ans=0.15653333333333333 +2024-08-25 03:46:05,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=14400.0, ans=0.396 +2024-08-25 03:46:06,460 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.951e+02 2.445e+02 2.940e+02 3.728e+02 6.995e+02, threshold=5.880e+02, percent-clipped=3.0 +2024-08-25 03:46:18,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=14453.333333333334, ans=0.0 +2024-08-25 03:46:22,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=14506.666666666666, ans=0.125 +2024-08-25 03:46:45,918 INFO [train.py:1114] (1/4) Epoch 2, batch 250, loss[loss=0.448, simple_loss=0.4253, pruned_loss=0.1719, ctc_loss=0.3172, over 19410.00 frames. ], tot_loss[loss=0.4039, simple_loss=0.3894, pruned_loss=0.1513, ctc_loss=0.2894, over 2755875.80 frames. ], batch size: 67, lr: 4.28e-02, grad_scale: 16.0 +2024-08-25 03:47:10,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=14720.0, ans=0.007669565217391304 +2024-08-25 03:47:21,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=14773.333333333334, ans=0.0 +2024-08-25 03:47:40,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=14826.666666666666, ans=0.04949747468305833 +2024-08-25 03:47:41,821 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.85 vs. limit=13.059999999999999 +2024-08-25 03:47:45,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=14826.666666666666, ans=0.125 +2024-08-25 03:47:50,841 INFO [train.py:1114] (1/4) Epoch 2, batch 300, loss[loss=0.4209, simple_loss=0.4101, pruned_loss=0.1566, ctc_loss=0.2963, over 19532.00 frames. ], tot_loss[loss=0.4004, simple_loss=0.3875, pruned_loss=0.1494, ctc_loss=0.2859, over 3000767.60 frames. ], batch size: 61, lr: 4.27e-02, grad_scale: 16.0 +2024-08-25 03:47:53,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=14880.0, ans=0.007634782608695653 +2024-08-25 03:47:56,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=14880.0, ans=0.125 +2024-08-25 03:47:59,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=14880.0, ans=0.025 +2024-08-25 03:48:13,155 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.763e+02 2.396e+02 2.818e+02 3.488e+02 8.647e+02, threshold=5.636e+02, percent-clipped=6.0 +2024-08-25 03:48:50,383 INFO [train.py:1114] (1/4) Epoch 2, batch 350, loss[loss=0.3374, simple_loss=0.3444, pruned_loss=0.119, ctc_loss=0.2309, over 19746.00 frames. ], tot_loss[loss=0.4001, simple_loss=0.3876, pruned_loss=0.1492, ctc_loss=0.2855, over 3189874.42 frames. ], batch size: 48, lr: 4.27e-02, grad_scale: 16.0 +2024-08-25 03:49:33,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=15200.0, ans=0.42800000000000005 +2024-08-25 03:50:01,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=15306.666666666666, ans=0.125 +2024-08-25 03:50:17,350 INFO [train.py:1114] (1/4) Epoch 2, batch 400, loss[loss=0.4106, simple_loss=0.3951, pruned_loss=0.1546, ctc_loss=0.292, over 19498.00 frames. ], tot_loss[loss=0.3978, simple_loss=0.3868, pruned_loss=0.1479, ctc_loss=0.2829, over 3341436.16 frames. ], batch size: 54, lr: 4.26e-02, grad_scale: 32.0 +2024-08-25 03:50:39,710 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.940e+02 2.407e+02 2.984e+02 3.456e+02 5.488e+02, threshold=5.968e+02, percent-clipped=0.0 +2024-08-25 03:50:43,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=15520.0, ans=0.125 +2024-08-25 03:50:48,426 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.55 vs. limit=13.32 +2024-08-25 03:50:49,371 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=2.700e-02 +2024-08-25 03:50:50,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=15520.0, ans=19.14 +2024-08-25 03:51:03,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=15626.666666666666, ans=0.125 +2024-08-25 03:51:19,334 INFO [train.py:1114] (1/4) Epoch 2, batch 450, loss[loss=0.396, simple_loss=0.3946, pruned_loss=0.1438, ctc_loss=0.2745, over 19607.00 frames. ], tot_loss[loss=0.3967, simple_loss=0.3859, pruned_loss=0.1475, ctc_loss=0.2816, over 3450016.16 frames. ], batch size: 55, lr: 4.26e-02, grad_scale: 32.0 +2024-08-25 03:51:24,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=15680.0, ans=0.125 +2024-08-25 03:51:32,132 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.39 vs. limit=5.359999999999999 +2024-08-25 03:51:38,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=15733.333333333334, ans=0.125 +2024-08-25 03:51:43,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=15733.333333333334, ans=0.125 +2024-08-25 03:51:49,927 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.10 vs. limit=13.42 +2024-08-25 03:51:54,680 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.82 vs. limit=19.34 +2024-08-25 03:52:05,146 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.66 vs. limit=13.440000000000001 +2024-08-25 03:52:10,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=15893.333333333334, ans=0.34373333333333334 +2024-08-25 03:52:20,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15946.666666666666, ans=0.14053333333333334 +2024-08-25 03:52:21,860 INFO [train.py:1114] (1/4) Epoch 2, batch 500, loss[loss=0.4456, simple_loss=0.4176, pruned_loss=0.1723, ctc_loss=0.3222, over 19633.00 frames. ], tot_loss[loss=0.3934, simple_loss=0.384, pruned_loss=0.1457, ctc_loss=0.2784, over 3545224.30 frames. ], batch size: 63, lr: 4.25e-02, grad_scale: 16.0 +2024-08-25 03:53:00,693 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.27 vs. limit=9.0 +2024-08-25 03:53:07,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=16000.0, ans=0.007391304347826087 +2024-08-25 03:53:11,993 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.769e+02 2.425e+02 3.079e+02 3.995e+02 1.154e+03, threshold=6.159e+02, percent-clipped=13.0 +2024-08-25 03:53:12,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=16053.333333333334, ans=0.9105333333333333 +2024-08-25 03:53:19,801 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.33 vs. limit=19.54 +2024-08-25 03:53:19,945 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.68 vs. limit=19.54 +2024-08-25 03:53:24,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=16106.666666666666, ans=0.13893333333333333 +2024-08-25 03:53:49,313 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.87 vs. limit=13.58 +2024-08-25 03:53:49,957 INFO [train.py:1114] (1/4) Epoch 2, batch 550, loss[loss=0.3737, simple_loss=0.3772, pruned_loss=0.1325, ctc_loss=0.2632, over 19278.00 frames. ], tot_loss[loss=0.3916, simple_loss=0.3834, pruned_loss=0.1447, ctc_loss=0.2765, over 3606845.61 frames. ], batch size: 71, lr: 4.25e-02, grad_scale: 16.0 +2024-08-25 03:54:10,282 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.64 vs. limit=5.4399999999999995 +2024-08-25 03:54:27,163 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.21 vs. limit=13.64 +2024-08-25 03:54:30,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=16373.333333333334, ans=0.3269333333333333 +2024-08-25 03:54:51,528 INFO [train.py:1114] (1/4) Epoch 2, batch 600, loss[loss=0.4331, simple_loss=0.4156, pruned_loss=0.1626, ctc_loss=0.3136, over 19448.00 frames. ], tot_loss[loss=0.3894, simple_loss=0.3824, pruned_loss=0.1434, ctc_loss=0.2741, over 3664934.48 frames. ], batch size: 67, lr: 4.24e-02, grad_scale: 16.0 +2024-08-25 03:54:52,264 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.70 vs. limit=19.86 +2024-08-25 03:55:02,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=16533.333333333332, ans=0.125 +2024-08-25 03:55:02,655 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.10 vs. limit=13.7 +2024-08-25 03:55:04,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=16533.333333333332, ans=0.13466666666666668 +2024-08-25 03:55:11,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=16533.333333333332, ans=0.125 +2024-08-25 03:55:14,977 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.729e+02 2.336e+02 2.753e+02 3.494e+02 8.105e+02, threshold=5.507e+02, percent-clipped=1.0 +2024-08-25 03:55:30,464 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.23 vs. limit=19.98 +2024-08-25 03:55:56,157 INFO [train.py:1114] (1/4) Epoch 2, batch 650, loss[loss=0.3689, simple_loss=0.3702, pruned_loss=0.1322, ctc_loss=0.2578, over 19762.00 frames. ], tot_loss[loss=0.3882, simple_loss=0.3814, pruned_loss=0.1429, ctc_loss=0.273, over 3715274.43 frames. ], batch size: 54, lr: 4.24e-02, grad_scale: 16.0 +2024-08-25 03:56:27,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16853.333333333332, ans=0.13146666666666668 +2024-08-25 03:56:40,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=16906.666666666668, ans=0.125 +2024-08-25 03:56:46,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=16960.0, ans=0.025 +2024-08-25 03:56:56,407 INFO [train.py:1114] (1/4) Epoch 2, batch 700, loss[loss=0.3288, simple_loss=0.3428, pruned_loss=0.1121, ctc_loss=0.2263, over 19719.00 frames. ], tot_loss[loss=0.3872, simple_loss=0.3811, pruned_loss=0.1423, ctc_loss=0.2718, over 3747259.36 frames. ], batch size: 51, lr: 4.23e-02, grad_scale: 16.0 +2024-08-25 03:57:05,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=17013.333333333332, ans=0.125 +2024-08-25 03:57:13,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=17066.666666666668, ans=0.07 +2024-08-25 03:57:23,239 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.791e+02 2.519e+02 2.895e+02 3.628e+02 6.087e+02, threshold=5.790e+02, percent-clipped=2.0 +2024-08-25 03:58:00,564 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.14 vs. limit=13.98 +2024-08-25 03:58:01,082 INFO [train.py:1114] (1/4) Epoch 2, batch 750, loss[loss=0.3486, simple_loss=0.363, pruned_loss=0.1198, ctc_loss=0.2364, over 19500.00 frames. ], tot_loss[loss=0.3869, simple_loss=0.3809, pruned_loss=0.1422, ctc_loss=0.2712, over 3774238.27 frames. ], batch size: 54, lr: 4.23e-02, grad_scale: 16.0 +2024-08-25 03:58:21,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=17333.333333333332, ans=0.125 +2024-08-25 03:58:23,874 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.23 vs. limit=5.6 +2024-08-25 03:58:27,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=17386.666666666668, ans=0.29146666666666676 +2024-08-25 03:58:27,402 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.57 vs. limit=14.02 +2024-08-25 03:58:28,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=17386.666666666668, ans=0.125 +2024-08-25 04:00:08,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=17493.333333333332, ans=0.125 +2024-08-25 04:00:16,090 INFO [train.py:1114] (1/4) Epoch 2, batch 800, loss[loss=0.3618, simple_loss=0.3632, pruned_loss=0.1313, ctc_loss=0.2446, over 19415.00 frames. ], tot_loss[loss=0.3853, simple_loss=0.38, pruned_loss=0.1414, ctc_loss=0.2694, over 3796416.54 frames. ], batch size: 48, lr: 4.22e-02, grad_scale: 32.0 +2024-08-25 04:00:17,824 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.89 vs. limit=14.08 +2024-08-25 04:00:20,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17546.666666666668, ans=0.1245333333333333 +2024-08-25 04:00:25,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=17546.666666666668, ans=0.125 +2024-08-25 04:00:30,523 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.13 vs. limit=20.7 +2024-08-25 04:00:39,331 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.683e+02 2.611e+02 3.088e+02 3.881e+02 9.768e+02, threshold=6.176e+02, percent-clipped=6.0 +2024-08-25 04:00:57,257 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.14 vs. limit=14.14 +2024-08-25 04:01:00,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=17706.666666666668, ans=0.125 +2024-08-25 04:01:09,446 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:01:15,035 INFO [train.py:1114] (1/4) Epoch 2, batch 850, loss[loss=0.3971, simple_loss=0.3955, pruned_loss=0.1462, ctc_loss=0.2659, over 19672.00 frames. ], tot_loss[loss=0.3827, simple_loss=0.3785, pruned_loss=0.1401, ctc_loss=0.2668, over 3814423.31 frames. ], batch size: 59, lr: 4.22e-02, grad_scale: 32.0 +2024-08-25 04:01:21,620 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.83 vs. limit=20.86 +2024-08-25 04:01:22,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=17813.333333333332, ans=0.12186666666666668 +2024-08-25 04:01:26,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=17866.666666666668, ans=0.2746666666666667 +2024-08-25 04:01:40,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=17920.0, ans=0.125 +2024-08-25 04:01:47,547 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:01:49,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=17920.0, ans=0.0 +2024-08-25 04:01:51,679 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.47 vs. limit=5.696 +2024-08-25 04:01:56,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=17973.333333333332, ans=0.07026666666666667 +2024-08-25 04:01:57,392 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.78 vs. limit=9.493333333333332 +2024-08-25 04:02:00,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=17973.333333333332, ans=0.00696231884057971 +2024-08-25 04:02:09,684 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.00 vs. limit=14.26 +2024-08-25 04:02:18,993 INFO [train.py:1114] (1/4) Epoch 2, batch 900, loss[loss=0.3408, simple_loss=0.3496, pruned_loss=0.1197, ctc_loss=0.2317, over 19803.00 frames. ], tot_loss[loss=0.383, simple_loss=0.3787, pruned_loss=0.1404, ctc_loss=0.2666, over 3818215.99 frames. ], batch size: 49, lr: 4.21e-02, grad_scale: 8.0 +2024-08-25 04:03:03,829 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.814e+02 2.530e+02 3.033e+02 3.602e+02 3.379e+03, threshold=6.066e+02, percent-clipped=6.0 +2024-08-25 04:03:06,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=18186.666666666668, ans=0.26346666666666674 +2024-08-25 04:03:17,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=18240.0, ans=0.125 +2024-08-25 04:03:36,914 INFO [train.py:1114] (1/4) Epoch 2, batch 950, loss[loss=0.3957, simple_loss=0.3641, pruned_loss=0.1554, ctc_loss=0.2913, over 19517.00 frames. ], tot_loss[loss=0.3832, simple_loss=0.3788, pruned_loss=0.1404, ctc_loss=0.2669, over 3819215.99 frames. ], batch size: 49, lr: 4.21e-02, grad_scale: 8.0 +2024-08-25 04:03:37,756 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.19 vs. limit=9.586666666666666 +2024-08-25 04:03:40,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=18346.666666666668, ans=0.0 +2024-08-25 04:04:05,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18453.333333333332, ans=0.11546666666666669 +2024-08-25 04:04:18,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=18506.666666666668, ans=0.25226666666666675 +2024-08-25 04:04:19,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=18506.666666666668, ans=0.0 +2024-08-25 04:04:26,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=18560.0, ans=0.0 +2024-08-25 04:04:39,324 INFO [train.py:1114] (1/4) Epoch 2, batch 1000, loss[loss=0.3534, simple_loss=0.3626, pruned_loss=0.1234, ctc_loss=0.2439, over 19834.00 frames. ], tot_loss[loss=0.3816, simple_loss=0.3785, pruned_loss=0.1393, ctc_loss=0.2649, over 3814199.32 frames. ], batch size: 52, lr: 4.20e-02, grad_scale: 8.0 +2024-08-25 04:04:53,226 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.57 vs. limit=9.666666666666668 +2024-08-25 04:05:03,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=18720.0, ans=0.24480000000000002 +2024-08-25 04:05:05,784 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.709e+02 2.321e+02 2.743e+02 3.485e+02 6.350e+02, threshold=5.486e+02, percent-clipped=2.0 +2024-08-25 04:05:07,688 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.45 vs. limit=14.52 +2024-08-25 04:05:18,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=18773.333333333332, ans=10.0 +2024-08-25 04:05:41,810 INFO [train.py:1114] (1/4) Epoch 2, batch 1050, loss[loss=0.3724, simple_loss=0.3785, pruned_loss=0.1337, ctc_loss=0.2472, over 19836.00 frames. ], tot_loss[loss=0.3796, simple_loss=0.3771, pruned_loss=0.1384, ctc_loss=0.2628, over 3821203.38 frames. ], batch size: 57, lr: 4.20e-02, grad_scale: 8.0 +2024-08-25 04:05:45,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=18880.0, ans=0.11120000000000002 +2024-08-25 04:05:50,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=18880.0, ans=0.006765217391304348 +2024-08-25 04:05:54,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18933.333333333332, ans=0.11066666666666669 +2024-08-25 04:06:06,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.72 vs. limit=14.620000000000001 +2024-08-25 04:06:10,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=18986.666666666668, ans=0.125 +2024-08-25 04:06:29,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=19040.0, ans=0.0 +2024-08-25 04:06:33,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=19093.333333333332, ans=0.125 +2024-08-25 04:06:44,163 INFO [train.py:1114] (1/4) Epoch 2, batch 1100, loss[loss=0.3708, simple_loss=0.3691, pruned_loss=0.1342, ctc_loss=0.2603, over 19593.00 frames. ], tot_loss[loss=0.3777, simple_loss=0.3761, pruned_loss=0.1375, ctc_loss=0.2607, over 3829216.55 frames. ], batch size: 52, lr: 4.19e-02, grad_scale: 8.0 +2024-08-25 04:06:44,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=19146.666666666668, ans=0.0 +2024-08-25 04:07:11,086 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.799e+02 2.465e+02 2.960e+02 4.039e+02 7.406e+02, threshold=5.919e+02, percent-clipped=11.0 +2024-08-25 04:07:45,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=19306.666666666668, ans=0.125 +2024-08-25 04:08:08,071 INFO [train.py:1114] (1/4) Epoch 2, batch 1150, loss[loss=0.3785, simple_loss=0.3687, pruned_loss=0.1414, ctc_loss=0.264, over 19580.00 frames. ], tot_loss[loss=0.3777, simple_loss=0.376, pruned_loss=0.1375, ctc_loss=0.2607, over 3829591.84 frames. ], batch size: 52, lr: 4.19e-02, grad_scale: 8.0 +2024-08-25 04:08:16,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=19413.333333333332, ans=0.125 +2024-08-25 04:08:35,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=19520.0, ans=0.125 +2024-08-25 04:08:53,188 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:08:58,112 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.97 vs. limit=14.86 +2024-08-25 04:09:03,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=19626.666666666668, ans=0.21306666666666674 +2024-08-25 04:09:08,084 INFO [train.py:1114] (1/4) Epoch 2, batch 1200, loss[loss=0.3709, simple_loss=0.3821, pruned_loss=0.1294, ctc_loss=0.2524, over 19837.00 frames. ], tot_loss[loss=0.3774, simple_loss=0.3761, pruned_loss=0.1372, ctc_loss=0.2605, over 3824515.07 frames. ], batch size: 57, lr: 4.18e-02, grad_scale: 16.0 +2024-08-25 04:09:21,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=19733.333333333332, ans=0.10266666666666668 +2024-08-25 04:09:36,224 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.782e+02 2.637e+02 3.065e+02 4.000e+02 6.600e+02, threshold=6.130e+02, percent-clipped=2.0 +2024-08-25 04:09:36,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=19786.666666666668, ans=0.10213333333333333 +2024-08-25 04:09:44,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=19786.666666666668, ans=0.125 +2024-08-25 04:09:49,326 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.83 vs. limit=14.940000000000001 +2024-08-25 04:09:53,240 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.88 vs. limit=14.940000000000001 +2024-08-25 04:10:10,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=19946.666666666668, ans=0.006533333333333334 +2024-08-25 04:10:11,978 INFO [train.py:1114] (1/4) Epoch 2, batch 1250, loss[loss=0.4142, simple_loss=0.4028, pruned_loss=0.1554, ctc_loss=0.2874, over 19534.00 frames. ], tot_loss[loss=0.3745, simple_loss=0.375, pruned_loss=0.1356, ctc_loss=0.2573, over 3842435.77 frames. ], batch size: 61, lr: 4.17e-02, grad_scale: 16.0 +2024-08-25 04:10:12,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=19946.666666666668, ans=0.025 +2024-08-25 04:10:17,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19946.666666666668, ans=0.10053333333333334 +2024-08-25 04:10:19,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=19946.666666666668, ans=0.10053333333333334 +2024-08-25 04:10:25,964 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.37 vs. limit=6.0 +2024-08-25 04:10:29,427 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.15 vs. limit=6.0 +2024-08-25 04:10:38,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=20053.333333333332, ans=10.0 +2024-08-25 04:10:46,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=20106.666666666668, ans=0.00649855072463768 +2024-08-25 04:10:52,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20106.666666666668, ans=0.1 +2024-08-25 04:10:56,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=20106.666666666668, ans=0.125 +2024-08-25 04:11:08,861 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:11:15,949 INFO [train.py:1114] (1/4) Epoch 2, batch 1300, loss[loss=0.3919, simple_loss=0.3894, pruned_loss=0.1433, ctc_loss=0.269, over 18869.00 frames. ], tot_loss[loss=0.3732, simple_loss=0.3742, pruned_loss=0.1349, ctc_loss=0.2561, over 3846646.55 frames. ], batch size: 76, lr: 4.17e-02, grad_scale: 16.0 +2024-08-25 04:11:41,991 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.748e+02 2.187e+02 2.429e+02 2.931e+02 4.736e+02, threshold=4.858e+02, percent-clipped=0.0 +2024-08-25 04:11:47,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=20320.0, ans=0.2 +2024-08-25 04:11:53,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=20373.333333333332, ans=0.125 +2024-08-25 04:11:53,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=20373.333333333332, ans=0.0 +2024-08-25 04:11:54,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=20373.333333333332, ans=0.125 +2024-08-25 04:12:05,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=20426.666666666668, ans=0.125 +2024-08-25 04:12:15,276 INFO [train.py:1114] (1/4) Epoch 2, batch 1350, loss[loss=0.3624, simple_loss=0.3657, pruned_loss=0.1302, ctc_loss=0.247, over 19775.00 frames. ], tot_loss[loss=0.3724, simple_loss=0.3738, pruned_loss=0.1345, ctc_loss=0.2551, over 3858167.57 frames. ], batch size: 54, lr: 4.16e-02, grad_scale: 16.0 +2024-08-25 04:12:15,965 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.64 vs. limit=15.0 +2024-08-25 04:12:21,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=20480.0, ans=0.125 +2024-08-25 04:12:31,487 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.76 vs. limit=15.0 +2024-08-25 04:12:34,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=20533.333333333332, ans=0.125 +2024-08-25 04:12:46,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=20586.666666666668, ans=0.125 +2024-08-25 04:12:48,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=20586.666666666668, ans=0.125 +2024-08-25 04:13:18,521 INFO [train.py:1114] (1/4) Epoch 2, batch 1400, loss[loss=0.2754, simple_loss=0.3096, pruned_loss=0.08623, ctc_loss=0.1717, over 19678.00 frames. ], tot_loss[loss=0.3696, simple_loss=0.3723, pruned_loss=0.1329, ctc_loss=0.2525, over 3864535.33 frames. ], batch size: 46, lr: 4.16e-02, grad_scale: 16.0 +2024-08-25 04:13:19,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=20746.666666666668, ans=0.125 +2024-08-25 04:13:20,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=20746.666666666668, ans=0.125 +2024-08-25 04:13:31,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=20800.0, ans=0.0 +2024-08-25 04:13:59,308 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.04 vs. limit=22.5 +2024-08-25 04:14:03,159 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.799e+02 2.385e+02 2.674e+02 3.744e+02 6.684e+02, threshold=5.347e+02, percent-clipped=6.0 +2024-08-25 04:14:10,023 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.29 vs. limit=15.0 +2024-08-25 04:14:18,423 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.26 vs. limit=22.5 +2024-08-25 04:14:21,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=20906.666666666668, ans=0.2 +2024-08-25 04:14:26,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=20960.0, ans=0.125 +2024-08-25 04:14:37,932 INFO [train.py:1114] (1/4) Epoch 2, batch 1450, loss[loss=0.3926, simple_loss=0.3934, pruned_loss=0.1418, ctc_loss=0.2705, over 19723.00 frames. ], tot_loss[loss=0.3696, simple_loss=0.3725, pruned_loss=0.1329, ctc_loss=0.2519, over 3862719.65 frames. ], batch size: 63, lr: 4.15e-02, grad_scale: 16.0 +2024-08-25 04:14:40,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=21013.333333333332, ans=0.125 +2024-08-25 04:14:54,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=21066.666666666668, ans=0.125 +2024-08-25 04:14:57,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=21066.666666666668, ans=0.0 +2024-08-25 04:16:08,022 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.07 vs. limit=22.5 +2024-08-25 04:16:11,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=21173.333333333332, ans=0.125 +2024-08-25 04:16:15,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=21173.333333333332, ans=0.125 +2024-08-25 04:16:20,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=21226.666666666668, ans=0.2 +2024-08-25 04:16:22,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=21226.666666666668, ans=0.95 +2024-08-25 04:16:23,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=21226.666666666668, ans=0.125 +2024-08-25 04:16:32,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21280.0, ans=0.1 +2024-08-25 04:16:33,079 INFO [train.py:1114] (1/4) Epoch 2, batch 1500, loss[loss=0.3947, simple_loss=0.3886, pruned_loss=0.1446, ctc_loss=0.2793, over 19583.00 frames. ], tot_loss[loss=0.3695, simple_loss=0.3728, pruned_loss=0.1327, ctc_loss=0.2517, over 3861872.94 frames. ], batch size: 57, lr: 4.15e-02, grad_scale: 16.0 +2024-08-25 04:16:41,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=21280.0, ans=0.125 +2024-08-25 04:16:44,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=21333.333333333332, ans=0.125 +2024-08-25 04:17:08,004 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.803e+02 2.509e+02 2.906e+02 4.274e+02 8.598e+02, threshold=5.813e+02, percent-clipped=13.0 +2024-08-25 04:17:10,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=21386.666666666668, ans=0.0 +2024-08-25 04:17:13,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=21386.666666666668, ans=0.025 +2024-08-25 04:17:20,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=21440.0, ans=0.0 +2024-08-25 04:17:22,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21440.0, ans=0.1 +2024-08-25 04:17:25,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=21440.0, ans=0.2 +2024-08-25 04:17:42,726 INFO [train.py:1114] (1/4) Epoch 2, batch 1550, loss[loss=0.3928, simple_loss=0.3949, pruned_loss=0.1417, ctc_loss=0.268, over 19584.00 frames. ], tot_loss[loss=0.3691, simple_loss=0.3726, pruned_loss=0.1326, ctc_loss=0.2513, over 3846711.90 frames. ], batch size: 60, lr: 4.14e-02, grad_scale: 16.0 +2024-08-25 04:17:46,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=21546.666666666668, ans=0.5 +2024-08-25 04:17:52,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=21546.666666666668, ans=0.125 +2024-08-25 04:17:58,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=21600.0, ans=0.015 +2024-08-25 04:17:59,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=21600.0, ans=0.2 +2024-08-25 04:18:21,993 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.92 vs. limit=15.0 +2024-08-25 04:18:28,196 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.51 vs. limit=22.5 +2024-08-25 04:18:44,946 INFO [train.py:1114] (1/4) Epoch 2, batch 1600, loss[loss=0.3803, simple_loss=0.3874, pruned_loss=0.1355, ctc_loss=0.2556, over 19837.00 frames. ], tot_loss[loss=0.3672, simple_loss=0.3712, pruned_loss=0.1317, ctc_loss=0.2495, over 3835638.61 frames. ], batch size: 57, lr: 4.13e-02, grad_scale: 32.0 +2024-08-25 04:19:01,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=21866.666666666668, ans=0.125 +2024-08-25 04:19:05,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=21866.666666666668, ans=0.006115942028985508 +2024-08-25 04:19:13,738 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.915e+02 2.370e+02 2.902e+02 3.664e+02 6.938e+02, threshold=5.803e+02, percent-clipped=2.0 +2024-08-25 04:19:24,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=21973.333333333332, ans=0.0060927536231884065 +2024-08-25 04:19:35,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=21973.333333333332, ans=0.125 +2024-08-25 04:19:36,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=22026.666666666668, ans=0.125 +2024-08-25 04:19:49,396 INFO [train.py:1114] (1/4) Epoch 2, batch 1650, loss[loss=0.4021, simple_loss=0.3978, pruned_loss=0.1484, ctc_loss=0.2742, over 19641.00 frames. ], tot_loss[loss=0.3664, simple_loss=0.3704, pruned_loss=0.1314, ctc_loss=0.2491, over 3833478.71 frames. ], batch size: 59, lr: 4.13e-02, grad_scale: 32.0 +2024-08-25 04:19:49,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=22080.0, ans=0.125 +2024-08-25 04:19:50,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=22080.0, ans=0.125 +2024-08-25 04:20:06,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=22133.333333333332, ans=0.0 +2024-08-25 04:20:36,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=22293.333333333332, ans=0.0 +2024-08-25 04:20:43,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=22293.333333333332, ans=0.125 +2024-08-25 04:20:48,531 INFO [train.py:1114] (1/4) Epoch 2, batch 1700, loss[loss=0.3303, simple_loss=0.3364, pruned_loss=0.117, ctc_loss=0.2255, over 19679.00 frames. ], tot_loss[loss=0.3651, simple_loss=0.3699, pruned_loss=0.1306, ctc_loss=0.2478, over 3848170.98 frames. ], batch size: 46, lr: 4.12e-02, grad_scale: 32.0 +2024-08-25 04:21:16,624 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.820e+02 2.264e+02 2.715e+02 3.253e+02 5.462e+02, threshold=5.430e+02, percent-clipped=0.0 +2024-08-25 04:21:25,607 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.78 vs. limit=10.0 +2024-08-25 04:21:27,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=22506.666666666668, ans=0.1 +2024-08-25 04:21:48,261 INFO [train.py:1114] (1/4) Epoch 2, batch 1750, loss[loss=0.349, simple_loss=0.3446, pruned_loss=0.1291, ctc_loss=0.2383, over 19643.00 frames. ], tot_loss[loss=0.3628, simple_loss=0.3683, pruned_loss=0.1295, ctc_loss=0.2456, over 3853458.26 frames. ], batch size: 45, lr: 4.12e-02, grad_scale: 32.0 +2024-08-25 04:21:55,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=22613.333333333332, ans=0.95 +2024-08-25 04:22:33,415 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.00 vs. limit=15.0 +2024-08-25 04:22:50,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=22826.666666666668, ans=0.025 +2024-08-25 04:22:56,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=22826.666666666668, ans=0.125 +2024-08-25 04:23:00,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=22826.666666666668, ans=0.0 +2024-08-25 04:23:02,441 INFO [train.py:1114] (1/4) Epoch 2, batch 1800, loss[loss=0.3592, simple_loss=0.3724, pruned_loss=0.1251, ctc_loss=0.2394, over 19617.00 frames. ], tot_loss[loss=0.3642, simple_loss=0.3692, pruned_loss=0.1303, ctc_loss=0.2466, over 3854744.74 frames. ], batch size: 55, lr: 4.11e-02, grad_scale: 32.0 +2024-08-25 04:23:10,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=22880.0, ans=0.2 +2024-08-25 04:23:28,012 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.805e+02 2.473e+02 2.913e+02 3.585e+02 6.262e+02, threshold=5.825e+02, percent-clipped=5.0 +2024-08-25 04:23:38,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=23040.0, ans=0.025 +2024-08-25 04:23:59,522 INFO [train.py:1114] (1/4) Epoch 2, batch 1850, loss[loss=0.3396, simple_loss=0.3626, pruned_loss=0.1151, ctc_loss=0.2161, over 19579.00 frames. ], tot_loss[loss=0.3625, simple_loss=0.3686, pruned_loss=0.1293, ctc_loss=0.2447, over 3858462.73 frames. ], batch size: 57, lr: 4.11e-02, grad_scale: 32.0 +2024-08-25 04:24:12,820 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.59 vs. limit=15.0 +2024-08-25 04:24:17,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=23200.0, ans=0.0 +2024-08-25 04:24:25,535 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.51 vs. limit=15.0 +2024-08-25 04:24:29,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=23253.333333333332, ans=0.2 +2024-08-25 04:24:43,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=23360.0, ans=0.005791304347826087 +2024-08-25 04:24:56,424 INFO [train.py:1114] (1/4) Epoch 2, batch 1900, loss[loss=0.353, simple_loss=0.3725, pruned_loss=0.1209, ctc_loss=0.2294, over 19635.00 frames. ], tot_loss[loss=0.3616, simple_loss=0.3683, pruned_loss=0.1287, ctc_loss=0.2436, over 3861789.55 frames. ], batch size: 59, lr: 4.10e-02, grad_scale: 32.0 +2024-08-25 04:25:12,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=23466.666666666668, ans=0.125 +2024-08-25 04:25:21,309 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.736e+02 2.247e+02 2.781e+02 3.399e+02 7.136e+02, threshold=5.561e+02, percent-clipped=3.0 +2024-08-25 04:25:21,923 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.36 vs. limit=6.0 +2024-08-25 04:25:48,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=23626.666666666668, ans=0.005733333333333333 +2024-08-25 04:25:53,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=23626.666666666668, ans=0.125 +2024-08-25 04:25:55,287 INFO [train.py:1114] (1/4) Epoch 2, batch 1950, loss[loss=0.3779, simple_loss=0.3784, pruned_loss=0.1358, ctc_loss=0.2646, over 19600.00 frames. ], tot_loss[loss=0.3623, simple_loss=0.3692, pruned_loss=0.1289, ctc_loss=0.2439, over 3870452.80 frames. ], batch size: 52, lr: 4.09e-02, grad_scale: 32.0 +2024-08-25 04:25:56,861 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.05 vs. limit=15.0 +2024-08-25 04:26:07,121 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.39 vs. limit=12.0 +2024-08-25 04:26:20,610 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.45 vs. limit=15.0 +2024-08-25 04:26:27,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23786.666666666668, ans=0.1 +2024-08-25 04:26:28,399 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.81 vs. limit=15.0 +2024-08-25 04:26:30,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23840.0, ans=0.1 +2024-08-25 04:26:34,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=23840.0, ans=0.95 +2024-08-25 04:26:40,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23840.0, ans=0.1 +2024-08-25 04:26:41,667 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.61 vs. limit=10.0 +2024-08-25 04:26:46,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=23893.333333333332, ans=0.025 +2024-08-25 04:26:52,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=23893.333333333332, ans=0.125 +2024-08-25 04:26:53,992 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.12 vs. limit=15.0 +2024-08-25 04:26:54,476 INFO [train.py:1114] (1/4) Epoch 2, batch 2000, loss[loss=0.3134, simple_loss=0.3244, pruned_loss=0.1085, ctc_loss=0.2134, over 19636.00 frames. ], tot_loss[loss=0.3637, simple_loss=0.37, pruned_loss=0.1297, ctc_loss=0.245, over 3854595.39 frames. ], batch size: 45, lr: 4.09e-02, grad_scale: 32.0 +2024-08-25 04:27:05,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=24000.0, ans=0.0 +2024-08-25 04:27:15,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=24000.0, ans=0.125 +2024-08-25 04:27:20,444 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.739e+02 2.625e+02 3.128e+02 3.968e+02 6.078e+02, threshold=6.255e+02, percent-clipped=2.0 +2024-08-25 04:27:28,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=24106.666666666668, ans=0.1 +2024-08-25 04:27:30,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=24106.666666666668, ans=0.2 +2024-08-25 04:27:30,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=24106.666666666668, ans=0.0 +2024-08-25 04:27:32,377 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.40 vs. limit=15.0 +2024-08-25 04:27:36,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=24106.666666666668, ans=0.125 +2024-08-25 04:27:38,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=24160.0, ans=0.125 +2024-08-25 04:27:51,118 INFO [train.py:1114] (1/4) Epoch 2, batch 2050, loss[loss=0.2978, simple_loss=0.3172, pruned_loss=0.1007, ctc_loss=0.1925, over 19707.00 frames. ], tot_loss[loss=0.3617, simple_loss=0.3682, pruned_loss=0.1288, ctc_loss=0.2436, over 3851137.52 frames. ], batch size: 47, lr: 4.08e-02, grad_scale: 32.0 +2024-08-25 04:28:08,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=24266.666666666668, ans=0.125 +2024-08-25 04:28:19,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=24320.0, ans=0.125 +2024-08-25 04:28:35,284 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.03 vs. limit=15.0 +2024-08-25 04:28:36,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=24426.666666666668, ans=0.005559420289855072 +2024-08-25 04:28:47,790 INFO [train.py:1114] (1/4) Epoch 2, batch 2100, loss[loss=0.3552, simple_loss=0.368, pruned_loss=0.1244, ctc_loss=0.2339, over 19760.00 frames. ], tot_loss[loss=0.3598, simple_loss=0.3669, pruned_loss=0.128, ctc_loss=0.2419, over 3857910.48 frames. ], batch size: 54, lr: 4.08e-02, grad_scale: 32.0 +2024-08-25 04:28:51,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=24480.0, ans=0.125 +2024-08-25 04:29:06,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=24533.333333333332, ans=0.0 +2024-08-25 04:29:13,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=24586.666666666668, ans=0.125 +2024-08-25 04:29:14,128 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.675e+02 2.311e+02 2.619e+02 3.137e+02 5.086e+02, threshold=5.238e+02, percent-clipped=0.0 +2024-08-25 04:29:18,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=24586.666666666668, ans=0.07 +2024-08-25 04:29:19,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=24586.666666666668, ans=0.2 +2024-08-25 04:29:22,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=24640.0, ans=0.125 +2024-08-25 04:29:35,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=24693.333333333332, ans=0.2 +2024-08-25 04:29:37,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=24693.333333333332, ans=0.0 +2024-08-25 04:29:44,336 INFO [train.py:1114] (1/4) Epoch 2, batch 2150, loss[loss=0.3015, simple_loss=0.3338, pruned_loss=0.09751, ctc_loss=0.1856, over 19577.00 frames. ], tot_loss[loss=0.3578, simple_loss=0.3655, pruned_loss=0.127, ctc_loss=0.24, over 3868715.03 frames. ], batch size: 52, lr: 4.07e-02, grad_scale: 32.0 +2024-08-25 04:29:46,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=24746.666666666668, ans=0.1 +2024-08-25 04:29:53,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=24746.666666666668, ans=0.025 +2024-08-25 04:30:03,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=24800.0, ans=0.125 +2024-08-25 04:30:11,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=24853.333333333332, ans=0.025 +2024-08-25 04:30:27,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=24960.0, ans=0.125 +2024-08-25 04:30:33,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=24960.0, ans=0.125 +2024-08-25 04:30:33,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=24960.0, ans=0.125 +2024-08-25 04:30:35,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=24960.0, ans=0.125 +2024-08-25 04:30:40,042 INFO [train.py:1114] (1/4) Epoch 2, batch 2200, loss[loss=0.3799, simple_loss=0.392, pruned_loss=0.1339, ctc_loss=0.2499, over 19601.00 frames. ], tot_loss[loss=0.3573, simple_loss=0.3656, pruned_loss=0.1267, ctc_loss=0.239, over 3867598.72 frames. ], batch size: 57, lr: 4.06e-02, grad_scale: 32.0 +2024-08-25 04:31:06,347 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.751e+02 2.398e+02 2.814e+02 3.505e+02 8.042e+02, threshold=5.628e+02, percent-clipped=3.0 +2024-08-25 04:31:23,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=25173.333333333332, ans=0.125 +2024-08-25 04:31:37,460 INFO [train.py:1114] (1/4) Epoch 2, batch 2250, loss[loss=0.3346, simple_loss=0.3636, pruned_loss=0.1106, ctc_loss=0.2109, over 19604.00 frames. ], tot_loss[loss=0.3576, simple_loss=0.366, pruned_loss=0.1268, ctc_loss=0.2391, over 3867623.39 frames. ], batch size: 55, lr: 4.06e-02, grad_scale: 32.0 +2024-08-25 04:31:46,396 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:31:47,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=25333.333333333332, ans=0.05 +2024-08-25 04:31:55,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25333.333333333332, ans=0.1 +2024-08-25 04:32:09,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=25386.666666666668, ans=0.125 +2024-08-25 04:32:14,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=25440.0, ans=0.1 +2024-08-25 04:32:18,662 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.53 vs. limit=6.0 +2024-08-25 04:32:31,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=25493.333333333332, ans=0.0 +2024-08-25 04:32:33,458 INFO [train.py:1114] (1/4) Epoch 2, batch 2300, loss[loss=0.3058, simple_loss=0.3347, pruned_loss=0.1003, ctc_loss=0.1903, over 19519.00 frames. ], tot_loss[loss=0.3554, simple_loss=0.3641, pruned_loss=0.126, ctc_loss=0.2371, over 3861882.93 frames. ], batch size: 49, lr: 4.05e-02, grad_scale: 16.0 +2024-08-25 04:32:40,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=25546.666666666668, ans=0.005315942028985506 +2024-08-25 04:32:47,283 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.10 vs. limit=15.0 +2024-08-25 04:33:03,051 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.730e+02 2.317e+02 2.709e+02 3.466e+02 6.027e+02, threshold=5.417e+02, percent-clipped=4.0 +2024-08-25 04:33:04,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=25653.333333333332, ans=0.125 +2024-08-25 04:33:04,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=25653.333333333332, ans=0.125 +2024-08-25 04:33:23,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=25760.0, ans=0.1 +2024-08-25 04:33:32,444 INFO [train.py:1114] (1/4) Epoch 2, batch 2350, loss[loss=0.3511, simple_loss=0.3732, pruned_loss=0.1194, ctc_loss=0.2254, over 19672.00 frames. ], tot_loss[loss=0.3564, simple_loss=0.3646, pruned_loss=0.1265, ctc_loss=0.238, over 3864853.50 frames. ], batch size: 63, lr: 4.04e-02, grad_scale: 16.0 +2024-08-25 04:33:42,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.22 vs. limit=15.0 +2024-08-25 04:33:45,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=25866.666666666668, ans=0.0 +2024-08-25 04:33:58,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=25920.0, ans=0.0 +2024-08-25 04:34:04,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=25920.0, ans=0.125 +2024-08-25 04:34:17,613 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.87 vs. limit=22.5 +2024-08-25 04:34:20,916 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.02 vs. limit=22.5 +2024-08-25 04:34:22,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=26026.666666666668, ans=0.125 +2024-08-25 04:34:23,239 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.97 vs. limit=15.0 +2024-08-25 04:34:30,678 INFO [train.py:1114] (1/4) Epoch 2, batch 2400, loss[loss=0.4125, simple_loss=0.4019, pruned_loss=0.1535, ctc_loss=0.2904, over 19187.00 frames. ], tot_loss[loss=0.3585, simple_loss=0.3667, pruned_loss=0.1273, ctc_loss=0.2394, over 3859043.96 frames. ], batch size: 71, lr: 4.04e-02, grad_scale: 32.0 +2024-08-25 04:34:57,146 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.789e+02 2.184e+02 2.505e+02 3.102e+02 8.045e+02, threshold=5.010e+02, percent-clipped=5.0 +2024-08-25 04:35:06,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=26240.0, ans=0.2 +2024-08-25 04:35:24,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=26293.333333333332, ans=0.125 +2024-08-25 04:35:27,217 INFO [train.py:1114] (1/4) Epoch 2, batch 2450, loss[loss=0.4679, simple_loss=0.4143, pruned_loss=0.1894, ctc_loss=0.3566, over 13465.00 frames. ], tot_loss[loss=0.3687, simple_loss=0.3724, pruned_loss=0.1326, ctc_loss=0.2489, over 3732884.53 frames. ], batch size: 141, lr: 4.03e-02, grad_scale: 32.0 +2024-08-25 04:35:34,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=26346.666666666668, ans=0.125 +2024-08-25 04:35:49,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=26453.333333333332, ans=0.125 +2024-08-25 04:35:49,976 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.47 vs. limit=22.5 +2024-08-25 04:36:02,190 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=6.43 vs. limit=12.0 +2024-08-25 04:36:05,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=26506.666666666668, ans=0.125 +2024-08-25 04:36:50,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=26554.666666666668, ans=0.07 +2024-08-25 04:36:55,747 INFO [train.py:1114] (1/4) Epoch 3, batch 0, loss[loss=0.3225, simple_loss=0.3344, pruned_loss=0.1136, ctc_loss=0.2084, over 19823.00 frames. ], tot_loss[loss=0.3225, simple_loss=0.3344, pruned_loss=0.1136, ctc_loss=0.2084, over 19823.00 frames. ], batch size: 49, lr: 3.83e-02, grad_scale: 32.0 +2024-08-25 04:36:55,748 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-25 04:37:08,022 INFO [train.py:1146] (1/4) Epoch 3, validation: loss=0.2847, simple_loss=0.3461, pruned_loss=0.08168, ctc_loss=0.1499, over 944034.00 frames. +2024-08-25 04:37:08,022 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 14083MB +2024-08-25 04:37:10,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=26554.666666666668, ans=0.005096811594202898 +2024-08-25 04:37:17,240 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.80 vs. limit=15.0 +2024-08-25 04:37:27,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=26608.0, ans=0.0 +2024-08-25 04:37:45,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=26714.666666666668, ans=0.025 +2024-08-25 04:37:50,805 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.817e+02 2.252e+02 2.580e+02 3.143e+02 6.401e+02, threshold=5.159e+02, percent-clipped=2.0 +2024-08-25 04:38:10,074 INFO [train.py:1114] (1/4) Epoch 3, batch 50, loss[loss=0.2604, simple_loss=0.2978, pruned_loss=0.0813, ctc_loss=0.1508, over 19667.00 frames. ], tot_loss[loss=0.3616, simple_loss=0.3681, pruned_loss=0.129, ctc_loss=0.2429, over 844347.37 frames. ], batch size: 47, lr: 3.82e-02, grad_scale: 16.0 +2024-08-25 04:38:17,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.77 vs. limit=15.0 +2024-08-25 04:38:33,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=26821.333333333332, ans=0.0 +2024-08-25 04:38:47,239 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.68 vs. limit=15.0 +2024-08-25 04:38:51,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=26928.0, ans=0.2 +2024-08-25 04:39:12,336 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.77 vs. limit=15.0 +2024-08-25 04:39:19,442 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.42 vs. limit=15.0 +2024-08-25 04:39:28,426 INFO [train.py:1114] (1/4) Epoch 3, batch 100, loss[loss=0.3049, simple_loss=0.3376, pruned_loss=0.09751, ctc_loss=0.1931, over 19702.00 frames. ], tot_loss[loss=0.3609, simple_loss=0.369, pruned_loss=0.128, ctc_loss=0.2417, over 1499014.27 frames. ], batch size: 51, lr: 3.82e-02, grad_scale: 16.0 +2024-08-25 04:39:32,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27088.0, ans=0.1 +2024-08-25 04:39:38,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=27088.0, ans=10.0 +2024-08-25 04:39:40,717 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.68 vs. limit=15.0 +2024-08-25 04:39:50,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=27194.666666666668, ans=0.025 +2024-08-25 04:40:11,087 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.865e+02 2.221e+02 2.583e+02 3.158e+02 4.904e+02, threshold=5.165e+02, percent-clipped=0.0 +2024-08-25 04:40:27,485 INFO [train.py:1114] (1/4) Epoch 3, batch 150, loss[loss=0.3358, simple_loss=0.3444, pruned_loss=0.1201, ctc_loss=0.2179, over 19725.00 frames. ], tot_loss[loss=0.3543, simple_loss=0.3645, pruned_loss=0.1249, ctc_loss=0.2355, over 2028246.68 frames. ], batch size: 47, lr: 3.81e-02, grad_scale: 16.0 +2024-08-25 04:40:30,482 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.93 vs. limit=6.0 +2024-08-25 04:40:32,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=27354.666666666668, ans=0.125 +2024-08-25 04:40:34,887 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.23 vs. limit=22.5 +2024-08-25 04:41:07,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=27514.666666666668, ans=0.0 +2024-08-25 04:41:29,363 INFO [train.py:1114] (1/4) Epoch 3, batch 200, loss[loss=0.4263, simple_loss=0.4112, pruned_loss=0.1611, ctc_loss=0.2977, over 18329.00 frames. ], tot_loss[loss=0.3502, simple_loss=0.3616, pruned_loss=0.123, ctc_loss=0.2316, over 2436541.20 frames. ], batch size: 85, lr: 3.80e-02, grad_scale: 16.0 +2024-08-25 04:41:36,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27621.333333333332, ans=0.1 +2024-08-25 04:41:52,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=27674.666666666668, ans=0.125 +2024-08-25 04:42:14,175 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.479e+02 2.192e+02 2.550e+02 3.125e+02 5.269e+02, threshold=5.099e+02, percent-clipped=1.0 +2024-08-25 04:42:34,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=27888.0, ans=10.0 +2024-08-25 04:42:35,061 INFO [train.py:1114] (1/4) Epoch 3, batch 250, loss[loss=0.3762, simple_loss=0.3882, pruned_loss=0.1335, ctc_loss=0.2431, over 19373.00 frames. ], tot_loss[loss=0.3492, simple_loss=0.3611, pruned_loss=0.1226, ctc_loss=0.2307, over 2755941.42 frames. ], batch size: 67, lr: 3.80e-02, grad_scale: 16.0 +2024-08-25 04:42:38,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=27888.0, ans=0.015 +2024-08-25 04:42:53,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=27941.333333333332, ans=0.0 +2024-08-25 04:42:55,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=27941.333333333332, ans=0.125 +2024-08-25 04:42:57,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=27994.666666666668, ans=0.125 +2024-08-25 04:42:57,957 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.01 vs. limit=15.0 +2024-08-25 04:43:02,546 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.36 vs. limit=15.0 +2024-08-25 04:43:03,463 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.40 vs. limit=10.0 +2024-08-25 04:43:12,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=28048.0, ans=0.125 +2024-08-25 04:43:16,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=28048.0, ans=0.025 +2024-08-25 04:43:21,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=28101.333333333332, ans=0.125 +2024-08-25 04:43:27,226 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.68 vs. limit=15.0 +2024-08-25 04:43:28,113 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.99 vs. limit=15.0 +2024-08-25 04:43:29,378 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.78 vs. limit=15.0 +2024-08-25 04:43:33,534 INFO [train.py:1114] (1/4) Epoch 3, batch 300, loss[loss=0.382, simple_loss=0.376, pruned_loss=0.1417, ctc_loss=0.2612, over 19525.00 frames. ], tot_loss[loss=0.3492, simple_loss=0.3606, pruned_loss=0.1226, ctc_loss=0.2309, over 3000471.32 frames. ], batch size: 61, lr: 3.79e-02, grad_scale: 16.0 +2024-08-25 04:43:36,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=28154.666666666668, ans=0.1 +2024-08-25 04:43:37,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=28154.666666666668, ans=0.004748985507246377 +2024-08-25 04:43:38,013 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.33 vs. limit=15.0 +2024-08-25 04:43:48,757 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.29 vs. limit=12.0 +2024-08-25 04:43:55,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=28208.0, ans=0.125 +2024-08-25 04:44:09,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=28314.666666666668, ans=0.125 +2024-08-25 04:44:10,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=28314.666666666668, ans=0.125 +2024-08-25 04:44:18,924 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.708e+02 2.242e+02 2.624e+02 3.299e+02 5.169e+02, threshold=5.248e+02, percent-clipped=1.0 +2024-08-25 04:44:36,146 INFO [train.py:1114] (1/4) Epoch 3, batch 350, loss[loss=0.3184, simple_loss=0.3349, pruned_loss=0.1107, ctc_loss=0.2014, over 19763.00 frames. ], tot_loss[loss=0.349, simple_loss=0.3605, pruned_loss=0.1226, ctc_loss=0.2306, over 3190566.06 frames. ], batch size: 48, lr: 3.79e-02, grad_scale: 16.0 +2024-08-25 04:44:38,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=28421.333333333332, ans=0.0 +2024-08-25 04:44:53,792 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.61 vs. limit=15.0 +2024-08-25 04:44:58,871 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.24 vs. limit=15.0 +2024-08-25 04:45:00,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=28528.0, ans=0.125 +2024-08-25 04:45:14,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=28528.0, ans=0.025 +2024-08-25 04:45:36,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=28581.333333333332, ans=0.125 +2024-08-25 04:45:48,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=28581.333333333332, ans=0.5 +2024-08-25 04:46:55,912 INFO [train.py:1114] (1/4) Epoch 3, batch 400, loss[loss=0.331, simple_loss=0.3558, pruned_loss=0.1103, ctc_loss=0.2143, over 19530.00 frames. ], tot_loss[loss=0.3477, simple_loss=0.3599, pruned_loss=0.1219, ctc_loss=0.2294, over 3342662.72 frames. ], batch size: 54, lr: 3.78e-02, grad_scale: 32.0 +2024-08-25 04:47:20,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=28741.333333333332, ans=0.2 +2024-08-25 04:47:24,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=28741.333333333332, ans=0.0 +2024-08-25 04:47:25,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=28741.333333333332, ans=0.125 +2024-08-25 04:47:28,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=28741.333333333332, ans=0.2 +2024-08-25 04:47:40,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=28794.666666666668, ans=0.025 +2024-08-25 04:48:22,794 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.711e+02 2.232e+02 2.568e+02 3.025e+02 1.134e+03, threshold=5.136e+02, percent-clipped=4.0 +2024-08-25 04:48:48,325 INFO [train.py:1114] (1/4) Epoch 3, batch 450, loss[loss=0.3204, simple_loss=0.3515, pruned_loss=0.1054, ctc_loss=0.1964, over 19611.00 frames. ], tot_loss[loss=0.347, simple_loss=0.3596, pruned_loss=0.1215, ctc_loss=0.2285, over 3450195.22 frames. ], batch size: 55, lr: 3.77e-02, grad_scale: 32.0 +2024-08-25 04:48:51,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=28954.666666666668, ans=0.004575072463768116 +2024-08-25 04:48:53,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=28954.666666666668, ans=0.025 +2024-08-25 04:48:58,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=28954.666666666668, ans=0.0 +2024-08-25 04:49:19,442 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.01 vs. limit=15.0 +2024-08-25 04:50:03,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=29168.0, ans=0.125 +2024-08-25 04:50:05,186 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.88 vs. limit=15.0 +2024-08-25 04:50:09,368 INFO [train.py:1114] (1/4) Epoch 3, batch 500, loss[loss=0.3476, simple_loss=0.3702, pruned_loss=0.1163, ctc_loss=0.2308, over 19675.00 frames. ], tot_loss[loss=0.3468, simple_loss=0.3593, pruned_loss=0.1215, ctc_loss=0.2287, over 3545219.68 frames. ], batch size: 63, lr: 3.77e-02, grad_scale: 32.0 +2024-08-25 04:50:27,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29274.666666666668, ans=0.1 +2024-08-25 04:50:53,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=29328.0, ans=0.125 +2024-08-25 04:51:02,430 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.16 vs. limit=15.0 +2024-08-25 04:51:03,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=29381.333333333332, ans=0.125 +2024-08-25 04:51:09,140 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.756e+02 2.370e+02 2.734e+02 3.745e+02 5.336e+02, threshold=5.469e+02, percent-clipped=1.0 +2024-08-25 04:51:26,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=29434.666666666668, ans=0.125 +2024-08-25 04:51:28,439 INFO [train.py:1114] (1/4) Epoch 3, batch 550, loss[loss=0.3897, simple_loss=0.3836, pruned_loss=0.1447, ctc_loss=0.2662, over 19305.00 frames. ], tot_loss[loss=0.3464, simple_loss=0.3594, pruned_loss=0.1212, ctc_loss=0.2278, over 3607695.08 frames. ], batch size: 71, lr: 3.76e-02, grad_scale: 32.0 +2024-08-25 04:51:46,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=29541.333333333332, ans=0.125 +2024-08-25 04:51:54,285 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:52:52,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=29648.0, ans=0.07 +2024-08-25 04:52:53,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=29701.333333333332, ans=0.1 +2024-08-25 04:52:53,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=29701.333333333332, ans=0.2 +2024-08-25 04:53:06,025 INFO [train.py:1114] (1/4) Epoch 3, batch 600, loss[loss=0.3442, simple_loss=0.3629, pruned_loss=0.1181, ctc_loss=0.2229, over 19375.00 frames. ], tot_loss[loss=0.3452, simple_loss=0.3587, pruned_loss=0.1205, ctc_loss=0.227, over 3665416.30 frames. ], batch size: 67, lr: 3.76e-02, grad_scale: 32.0 +2024-08-25 04:53:06,495 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.09 vs. limit=15.0 +2024-08-25 04:53:11,451 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.11 vs. limit=10.0 +2024-08-25 04:53:24,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29808.0, ans=0.1 +2024-08-25 04:53:41,081 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.23 vs. limit=15.0 +2024-08-25 04:53:41,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=29914.666666666668, ans=0.125 +2024-08-25 04:53:48,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=29914.666666666668, ans=0.0 +2024-08-25 04:53:49,296 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 2.141e+02 2.536e+02 3.031e+02 6.622e+02, threshold=5.071e+02, percent-clipped=2.0 +2024-08-25 04:53:54,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=29968.0, ans=0.05 +2024-08-25 04:53:55,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=29968.0, ans=0.2 +2024-08-25 04:54:04,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=29968.0, ans=0.125 +2024-08-25 04:54:06,084 INFO [train.py:1114] (1/4) Epoch 3, batch 650, loss[loss=0.3396, simple_loss=0.3571, pruned_loss=0.1173, ctc_loss=0.2186, over 19774.00 frames. ], tot_loss[loss=0.3435, simple_loss=0.3574, pruned_loss=0.1197, ctc_loss=0.2256, over 3716429.58 frames. ], batch size: 54, lr: 3.75e-02, grad_scale: 32.0 +2024-08-25 04:54:06,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=30021.333333333332, ans=0.125 +2024-08-25 04:54:09,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=30021.333333333332, ans=0.125 +2024-08-25 04:54:10,190 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.70 vs. limit=10.0 +2024-08-25 04:54:13,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=30021.333333333332, ans=0.0 +2024-08-25 04:54:15,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=30021.333333333332, ans=0.125 +2024-08-25 04:54:22,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=30074.666666666668, ans=0.125 +2024-08-25 04:54:37,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=30074.666666666668, ans=0.125 +2024-08-25 04:54:55,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=30181.333333333332, ans=0.2 +2024-08-25 04:54:59,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=30181.333333333332, ans=0.125 +2024-08-25 04:55:18,360 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.48 vs. limit=22.5 +2024-08-25 04:55:19,010 INFO [train.py:1114] (1/4) Epoch 3, batch 700, loss[loss=0.3055, simple_loss=0.3349, pruned_loss=0.1016, ctc_loss=0.1821, over 19739.00 frames. ], tot_loss[loss=0.3446, simple_loss=0.3583, pruned_loss=0.1202, ctc_loss=0.2264, over 3749289.09 frames. ], batch size: 51, lr: 3.74e-02, grad_scale: 32.0 +2024-08-25 04:55:45,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30394.666666666668, ans=0.1 +2024-08-25 04:56:32,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=30448.0, ans=0.2 +2024-08-25 04:56:35,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=30448.0, ans=0.1 +2024-08-25 04:56:38,911 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.716e+02 2.292e+02 2.520e+02 3.192e+02 5.203e+02, threshold=5.040e+02, percent-clipped=1.0 +2024-08-25 04:56:39,628 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.87 vs. limit=15.0 +2024-08-25 04:56:41,087 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.44 vs. limit=15.0 +2024-08-25 04:56:55,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=30501.333333333332, ans=0.125 +2024-08-25 04:56:57,177 INFO [train.py:1114] (1/4) Epoch 3, batch 750, loss[loss=0.3274, simple_loss=0.3565, pruned_loss=0.1077, ctc_loss=0.2073, over 19487.00 frames. ], tot_loss[loss=0.3429, simple_loss=0.3569, pruned_loss=0.1195, ctc_loss=0.2249, over 3775271.28 frames. ], batch size: 54, lr: 3.74e-02, grad_scale: 32.0 +2024-08-25 04:57:08,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=30554.666666666668, ans=0.025 +2024-08-25 04:57:11,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=30608.0, ans=0.004215652173913044 +2024-08-25 04:57:16,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=30608.0, ans=0.0 +2024-08-25 04:57:32,648 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.37 vs. limit=15.0 +2024-08-25 04:57:57,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30768.0, ans=0.1 +2024-08-25 04:57:59,141 INFO [train.py:1114] (1/4) Epoch 3, batch 800, loss[loss=0.2969, simple_loss=0.3249, pruned_loss=0.09724, ctc_loss=0.1862, over 19807.00 frames. ], tot_loss[loss=0.3421, simple_loss=0.3564, pruned_loss=0.1191, ctc_loss=0.2242, over 3796174.33 frames. ], batch size: 49, lr: 3.73e-02, grad_scale: 32.0 +2024-08-25 04:58:16,485 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.92 vs. limit=15.0 +2024-08-25 04:58:23,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=30928.0, ans=0.125 +2024-08-25 04:58:25,415 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.62 vs. limit=15.0 +2024-08-25 04:58:35,970 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.74 vs. limit=15.0 +2024-08-25 04:58:38,904 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.90 vs. limit=15.0 +2024-08-25 04:58:42,759 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.650e+02 2.211e+02 2.622e+02 3.205e+02 5.257e+02, threshold=5.244e+02, percent-clipped=1.0 +2024-08-25 04:59:01,968 INFO [train.py:1114] (1/4) Epoch 3, batch 850, loss[loss=0.3609, simple_loss=0.3706, pruned_loss=0.1293, ctc_loss=0.2315, over 19655.00 frames. ], tot_loss[loss=0.3402, simple_loss=0.355, pruned_loss=0.1182, ctc_loss=0.2226, over 3815868.27 frames. ], batch size: 59, lr: 3.73e-02, grad_scale: 32.0 +2024-08-25 04:59:04,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=31088.0, ans=0.2 +2024-08-25 04:59:10,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31088.0, ans=0.1 +2024-08-25 04:59:30,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=31194.666666666668, ans=0.2 +2024-08-25 04:59:43,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=31248.0, ans=0.0 +2024-08-25 04:59:59,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=31301.333333333332, ans=0.125 +2024-08-25 05:00:04,394 INFO [train.py:1114] (1/4) Epoch 3, batch 900, loss[loss=0.3022, simple_loss=0.3197, pruned_loss=0.1034, ctc_loss=0.1948, over 19422.00 frames. ], tot_loss[loss=0.3424, simple_loss=0.3562, pruned_loss=0.1194, ctc_loss=0.2245, over 3819560.69 frames. ], batch size: 48, lr: 3.72e-02, grad_scale: 8.0 +2024-08-25 05:00:19,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=31408.0, ans=0.0040417391304347836 +2024-08-25 05:00:29,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=31461.333333333332, ans=0.0 +2024-08-25 05:00:49,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=31514.666666666668, ans=0.125 +2024-08-25 05:00:54,421 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.711e+02 2.296e+02 2.736e+02 3.525e+02 1.528e+03, threshold=5.472e+02, percent-clipped=4.0 +2024-08-25 05:01:08,272 INFO [train.py:1114] (1/4) Epoch 3, batch 950, loss[loss=0.3452, simple_loss=0.3483, pruned_loss=0.1237, ctc_loss=0.237, over 19491.00 frames. ], tot_loss[loss=0.3417, simple_loss=0.3559, pruned_loss=0.119, ctc_loss=0.224, over 3821438.83 frames. ], batch size: 49, lr: 3.71e-02, grad_scale: 8.0 +2024-08-25 05:01:22,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=31674.666666666668, ans=0.003983768115942029 +2024-08-25 05:01:27,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=31674.666666666668, ans=0.025 +2024-08-25 05:01:33,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=31728.0, ans=0.125 +2024-08-25 05:01:40,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=31728.0, ans=0.2 +2024-08-25 05:02:05,436 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.76 vs. limit=6.0 +2024-08-25 05:02:08,119 INFO [train.py:1114] (1/4) Epoch 3, batch 1000, loss[loss=0.3014, simple_loss=0.3337, pruned_loss=0.09818, ctc_loss=0.1819, over 19850.00 frames. ], tot_loss[loss=0.3432, simple_loss=0.3573, pruned_loss=0.1196, ctc_loss=0.2247, over 3817613.11 frames. ], batch size: 52, lr: 3.71e-02, grad_scale: 8.0 +2024-08-25 05:02:19,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=31941.333333333332, ans=0.025 +2024-08-25 05:02:55,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=32048.0, ans=0.125 +2024-08-25 05:02:56,467 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.586e+02 2.163e+02 2.492e+02 3.027e+02 5.724e+02, threshold=4.983e+02, percent-clipped=1.0 +2024-08-25 05:03:13,723 INFO [train.py:1114] (1/4) Epoch 3, batch 1050, loss[loss=0.3461, simple_loss=0.357, pruned_loss=0.1217, ctc_loss=0.2291, over 19827.00 frames. ], tot_loss[loss=0.3428, simple_loss=0.3567, pruned_loss=0.1196, ctc_loss=0.2247, over 3824558.96 frames. ], batch size: 57, lr: 3.70e-02, grad_scale: 8.0 +2024-08-25 05:03:20,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=32154.666666666668, ans=0.125 +2024-08-25 05:03:21,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=32154.666666666668, ans=0.125 +2024-08-25 05:03:24,761 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:04:08,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=32261.333333333332, ans=0.1 +2024-08-25 05:04:17,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=32261.333333333332, ans=0.125 +2024-08-25 05:05:04,400 INFO [train.py:1114] (1/4) Epoch 3, batch 1100, loss[loss=0.3235, simple_loss=0.3488, pruned_loss=0.1085, ctc_loss=0.2033, over 19577.00 frames. ], tot_loss[loss=0.3419, simple_loss=0.3564, pruned_loss=0.119, ctc_loss=0.2238, over 3831467.53 frames. ], batch size: 52, lr: 3.70e-02, grad_scale: 8.0 +2024-08-25 05:05:50,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=32581.333333333332, ans=0.2 +2024-08-25 05:05:51,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=32581.333333333332, ans=0.0037866666666666665 +2024-08-25 05:06:00,567 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.823e+02 2.355e+02 2.517e+02 3.019e+02 4.945e+02, threshold=5.033e+02, percent-clipped=0.0 +2024-08-25 05:06:03,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=32634.666666666668, ans=0.2 +2024-08-25 05:06:20,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=32634.666666666668, ans=0.2 +2024-08-25 05:06:23,027 INFO [train.py:1114] (1/4) Epoch 3, batch 1150, loss[loss=0.3365, simple_loss=0.3558, pruned_loss=0.1146, ctc_loss=0.22, over 19585.00 frames. ], tot_loss[loss=0.3418, simple_loss=0.3564, pruned_loss=0.1189, ctc_loss=0.2234, over 3831042.27 frames. ], batch size: 52, lr: 3.69e-02, grad_scale: 8.0 +2024-08-25 05:06:24,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=32688.0, ans=15.0 +2024-08-25 05:06:32,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=32688.0, ans=0.125 +2024-08-25 05:06:41,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=32741.333333333332, ans=0.125 +2024-08-25 05:06:46,324 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.95 vs. limit=15.0 +2024-08-25 05:06:50,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=32794.666666666664, ans=0.003740289855072464 +2024-08-25 05:06:53,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=32794.666666666664, ans=0.0 +2024-08-25 05:07:10,035 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.15 vs. limit=10.0 +2024-08-25 05:07:10,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=32848.0, ans=0.125 +2024-08-25 05:07:15,103 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.85 vs. limit=15.0 +2024-08-25 05:07:25,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=32901.333333333336, ans=0.125 +2024-08-25 05:07:28,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=32901.333333333336, ans=22.5 +2024-08-25 05:07:32,070 INFO [train.py:1114] (1/4) Epoch 3, batch 1200, loss[loss=0.3686, simple_loss=0.3816, pruned_loss=0.1299, ctc_loss=0.2396, over 19842.00 frames. ], tot_loss[loss=0.3431, simple_loss=0.3577, pruned_loss=0.1194, ctc_loss=0.224, over 3825539.56 frames. ], batch size: 57, lr: 3.68e-02, grad_scale: 16.0 +2024-08-25 05:07:34,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=32954.666666666664, ans=0.1 +2024-08-25 05:08:12,083 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.82 vs. limit=15.0 +2024-08-25 05:08:19,680 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.731e+02 2.128e+02 2.359e+02 2.757e+02 6.653e+02, threshold=4.718e+02, percent-clipped=2.0 +2024-08-25 05:08:38,022 INFO [train.py:1114] (1/4) Epoch 3, batch 1250, loss[loss=0.3388, simple_loss=0.3672, pruned_loss=0.1129, ctc_loss=0.2114, over 19541.00 frames. ], tot_loss[loss=0.3416, simple_loss=0.3574, pruned_loss=0.1185, ctc_loss=0.2222, over 3843396.04 frames. ], batch size: 61, lr: 3.68e-02, grad_scale: 16.0 +2024-08-25 05:08:53,205 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.43 vs. limit=15.0 +2024-08-25 05:08:56,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=33274.666666666664, ans=0.07 +2024-08-25 05:08:59,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=33274.666666666664, ans=0.125 +2024-08-25 05:09:00,333 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.07 vs. limit=10.0 +2024-08-25 05:09:01,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=33328.0, ans=0.0036243478260869558 +2024-08-25 05:09:17,696 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.68 vs. limit=10.0 +2024-08-25 05:09:19,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=33381.333333333336, ans=0.003612753623188405 +2024-08-25 05:09:26,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=33381.333333333336, ans=0.0 +2024-08-25 05:09:41,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=33488.0, ans=0.2 +2024-08-25 05:09:42,096 INFO [train.py:1114] (1/4) Epoch 3, batch 1300, loss[loss=0.3676, simple_loss=0.3739, pruned_loss=0.1326, ctc_loss=0.2401, over 18793.00 frames. ], tot_loss[loss=0.3394, simple_loss=0.3557, pruned_loss=0.1175, ctc_loss=0.2201, over 3847174.36 frames. ], batch size: 76, lr: 3.67e-02, grad_scale: 16.0 +2024-08-25 05:10:04,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=33594.666666666664, ans=0.0 +2024-08-25 05:10:33,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=33594.666666666664, ans=0.125 +2024-08-25 05:10:48,151 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.613e+02 2.161e+02 2.525e+02 2.896e+02 5.464e+02, threshold=5.050e+02, percent-clipped=3.0 +2024-08-25 05:10:50,189 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.32 vs. limit=15.0 +2024-08-25 05:10:52,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33701.333333333336, ans=0.1 +2024-08-25 05:11:02,307 INFO [train.py:1114] (1/4) Epoch 3, batch 1350, loss[loss=0.3587, simple_loss=0.3752, pruned_loss=0.1244, ctc_loss=0.2337, over 19771.00 frames. ], tot_loss[loss=0.3379, simple_loss=0.3548, pruned_loss=0.1167, ctc_loss=0.2188, over 3856257.20 frames. ], batch size: 54, lr: 3.67e-02, grad_scale: 16.0 +2024-08-25 05:11:07,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=33754.666666666664, ans=0.003531594202898552 +2024-08-25 05:11:42,205 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.08 vs. limit=15.0 +2024-08-25 05:11:49,551 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.40 vs. limit=15.0 +2024-08-25 05:12:10,571 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.69 vs. limit=22.5 +2024-08-25 05:12:26,305 INFO [train.py:1114] (1/4) Epoch 3, batch 1400, loss[loss=0.2883, simple_loss=0.3166, pruned_loss=0.09457, ctc_loss=0.1771, over 19699.00 frames. ], tot_loss[loss=0.3374, simple_loss=0.3542, pruned_loss=0.1166, ctc_loss=0.2186, over 3864232.26 frames. ], batch size: 46, lr: 3.66e-02, grad_scale: 16.0 +2024-08-25 05:12:37,865 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.28 vs. limit=15.0 +2024-08-25 05:12:49,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=34074.666666666664, ans=0.003462028985507247 +2024-08-25 05:13:29,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34181.333333333336, ans=0.1 +2024-08-25 05:13:31,969 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 2.210e+02 2.531e+02 3.096e+02 9.067e+02, threshold=5.062e+02, percent-clipped=2.0 +2024-08-25 05:14:12,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=34234.666666666664, ans=0.025 +2024-08-25 05:14:24,482 INFO [train.py:1114] (1/4) Epoch 3, batch 1450, loss[loss=0.3425, simple_loss=0.3625, pruned_loss=0.1184, ctc_loss=0.2144, over 19654.00 frames. ], tot_loss[loss=0.3383, simple_loss=0.3549, pruned_loss=0.117, ctc_loss=0.219, over 3862401.94 frames. ], batch size: 63, lr: 3.65e-02, grad_scale: 16.0 +2024-08-25 05:14:36,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=34288.0, ans=0.125 +2024-08-25 05:14:39,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=34288.0, ans=0.025 +2024-08-25 05:14:48,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.89 vs. limit=15.0 +2024-08-25 05:14:52,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=34341.333333333336, ans=0.2 +2024-08-25 05:14:58,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=34394.666666666664, ans=0.125 +2024-08-25 05:15:04,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=34394.666666666664, ans=0.125 +2024-08-25 05:15:08,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=34448.0, ans=0.125 +2024-08-25 05:15:32,915 INFO [train.py:1114] (1/4) Epoch 3, batch 1500, loss[loss=0.3389, simple_loss=0.3682, pruned_loss=0.1135, ctc_loss=0.2063, over 19575.00 frames. ], tot_loss[loss=0.3388, simple_loss=0.3556, pruned_loss=0.1172, ctc_loss=0.2193, over 3862253.01 frames. ], batch size: 57, lr: 3.65e-02, grad_scale: 16.0 +2024-08-25 05:15:35,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=34554.666666666664, ans=0.0 +2024-08-25 05:15:36,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=34554.666666666664, ans=0.0 +2024-08-25 05:16:03,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=34661.333333333336, ans=0.125 +2024-08-25 05:16:37,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=34661.333333333336, ans=0.0 +2024-08-25 05:16:38,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=34661.333333333336, ans=0.0033344927536231887 +2024-08-25 05:16:43,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=34714.666666666664, ans=0.125 +2024-08-25 05:16:51,238 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.550e+02 2.151e+02 2.498e+02 3.151e+02 6.810e+02, threshold=4.996e+02, percent-clipped=2.0 +2024-08-25 05:20:00,626 INFO [train.py:1114] (1/4) Epoch 3, batch 1550, loss[loss=0.3801, simple_loss=0.3857, pruned_loss=0.1386, ctc_loss=0.243, over 19587.00 frames. ], tot_loss[loss=0.3391, simple_loss=0.3556, pruned_loss=0.1174, ctc_loss=0.2197, over 3847577.24 frames. ], batch size: 60, lr: 3.64e-02, grad_scale: 16.0 +2024-08-25 05:20:00,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=34821.333333333336, ans=0.125 +2024-08-25 05:20:22,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=34874.666666666664, ans=0.125 +2024-08-25 05:21:04,631 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.29 vs. limit=15.0 +2024-08-25 05:21:09,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=34981.333333333336, ans=0.125 +2024-08-25 05:22:04,976 INFO [train.py:1114] (1/4) Epoch 3, batch 1600, loss[loss=0.3829, simple_loss=0.3918, pruned_loss=0.1348, ctc_loss=0.2609, over 19839.00 frames. ], tot_loss[loss=0.3393, simple_loss=0.3555, pruned_loss=0.1175, ctc_loss=0.2202, over 3835820.56 frames. ], batch size: 57, lr: 3.64e-02, grad_scale: 32.0 +2024-08-25 05:22:15,613 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.36 vs. limit=10.0 +2024-08-25 05:22:52,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=35194.666666666664, ans=0.125 +2024-08-25 05:23:08,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=35248.0, ans=0.125 +2024-08-25 05:23:22,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=35248.0, ans=0.125 +2024-08-25 05:23:43,082 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.751e+02 2.193e+02 2.529e+02 3.233e+02 6.645e+02, threshold=5.059e+02, percent-clipped=2.0 +2024-08-25 05:24:22,997 INFO [train.py:1114] (1/4) Epoch 3, batch 1650, loss[loss=0.308, simple_loss=0.3429, pruned_loss=0.1005, ctc_loss=0.1802, over 19638.00 frames. ], tot_loss[loss=0.3379, simple_loss=0.3546, pruned_loss=0.1168, ctc_loss=0.2191, over 3831935.54 frames. ], batch size: 59, lr: 3.63e-02, grad_scale: 32.0 +2024-08-25 05:24:31,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=35354.666666666664, ans=0.0 +2024-08-25 05:24:56,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=35408.0, ans=0.0 +2024-08-25 05:25:13,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=35408.0, ans=0.003172173913043479 +2024-08-25 05:25:57,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=35514.666666666664, ans=0.125 +2024-08-25 05:26:16,035 INFO [train.py:1114] (1/4) Epoch 3, batch 1700, loss[loss=0.3137, simple_loss=0.3283, pruned_loss=0.1088, ctc_loss=0.2042, over 19649.00 frames. ], tot_loss[loss=0.3367, simple_loss=0.3539, pruned_loss=0.1162, ctc_loss=0.2179, over 3846531.43 frames. ], batch size: 46, lr: 3.62e-02, grad_scale: 32.0 +2024-08-25 05:26:16,679 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.32 vs. limit=15.0 +2024-08-25 05:27:02,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35781.333333333336, ans=0.1 +2024-08-25 05:27:10,189 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.755e+02 2.342e+02 2.819e+02 3.429e+02 5.215e+02, threshold=5.637e+02, percent-clipped=1.0 +2024-08-25 05:27:18,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=35834.666666666664, ans=0.0030794202898550734 +2024-08-25 05:27:23,550 INFO [train.py:1114] (1/4) Epoch 3, batch 1750, loss[loss=0.3062, simple_loss=0.3232, pruned_loss=0.1052, ctc_loss=0.1969, over 19681.00 frames. ], tot_loss[loss=0.335, simple_loss=0.3523, pruned_loss=0.1155, ctc_loss=0.2166, over 3851205.57 frames. ], batch size: 45, lr: 3.62e-02, grad_scale: 32.0 +2024-08-25 05:27:29,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=35888.0, ans=0.125 +2024-08-25 05:27:32,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=35888.0, ans=0.1 +2024-08-25 05:29:19,796 INFO [train.py:1114] (1/4) Epoch 3, batch 1800, loss[loss=0.3074, simple_loss=0.3337, pruned_loss=0.1029, ctc_loss=0.1883, over 19609.00 frames. ], tot_loss[loss=0.3352, simple_loss=0.3526, pruned_loss=0.1155, ctc_loss=0.217, over 3853268.25 frames. ], batch size: 55, lr: 3.61e-02, grad_scale: 16.0 +2024-08-25 05:31:26,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=36208.0, ans=0.0 +2024-08-25 05:31:43,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=36261.333333333336, ans=0.125 +2024-08-25 05:31:45,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=36261.333333333336, ans=0.125 +2024-08-25 05:31:53,588 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.83 vs. limit=15.0 +2024-08-25 05:31:58,633 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.624e+02 2.106e+02 2.466e+02 3.299e+02 1.077e+03, threshold=4.933e+02, percent-clipped=1.0 +2024-08-25 05:32:01,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=36368.0, ans=0.125 +2024-08-25 05:32:06,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=36368.0, ans=0.002963478260869565 +2024-08-25 05:32:11,696 INFO [train.py:1114] (1/4) Epoch 3, batch 1850, loss[loss=0.3196, simple_loss=0.3492, pruned_loss=0.1058, ctc_loss=0.1964, over 19608.00 frames. ], tot_loss[loss=0.3338, simple_loss=0.3515, pruned_loss=0.1149, ctc_loss=0.2157, over 3856041.44 frames. ], batch size: 57, lr: 3.61e-02, grad_scale: 16.0 +2024-08-25 05:32:12,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=36421.333333333336, ans=0.2 +2024-08-25 05:32:14,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=36421.333333333336, ans=0.125 +2024-08-25 05:32:16,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=36421.333333333336, ans=0.025 +2024-08-25 05:32:30,137 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.71 vs. limit=6.0 +2024-08-25 05:32:30,800 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:32:35,680 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.64 vs. limit=22.5 +2024-08-25 05:32:47,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=36528.0, ans=0.125 +2024-08-25 05:32:48,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=36581.333333333336, ans=0.1 +2024-08-25 05:32:53,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36581.333333333336, ans=0.1 +2024-08-25 05:33:12,867 INFO [train.py:1114] (1/4) Epoch 3, batch 1900, loss[loss=0.3154, simple_loss=0.3534, pruned_loss=0.0995, ctc_loss=0.196, over 19659.00 frames. ], tot_loss[loss=0.3345, simple_loss=0.3522, pruned_loss=0.1151, ctc_loss=0.2162, over 3861295.52 frames. ], batch size: 59, lr: 3.60e-02, grad_scale: 16.0 +2024-08-25 05:33:37,995 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.59 vs. limit=10.0 +2024-08-25 05:33:38,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=36794.666666666664, ans=0.125 +2024-08-25 05:33:43,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=36794.666666666664, ans=0.2 +2024-08-25 05:33:52,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=36794.666666666664, ans=0.0 +2024-08-25 05:33:59,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=36848.0, ans=0.2 +2024-08-25 05:34:01,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff2.min_abs, batch_count=36848.0, ans=0.1 +2024-08-25 05:34:01,370 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.69 vs. limit=15.0 +2024-08-25 05:34:05,255 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.713e+02 2.260e+02 2.560e+02 3.105e+02 5.689e+02, threshold=5.120e+02, percent-clipped=2.0 +2024-08-25 05:34:49,853 INFO [train.py:1114] (1/4) Epoch 3, batch 1950, loss[loss=0.3271, simple_loss=0.3395, pruned_loss=0.1143, ctc_loss=0.215, over 19591.00 frames. ], tot_loss[loss=0.3354, simple_loss=0.3533, pruned_loss=0.1154, ctc_loss=0.2163, over 3870141.84 frames. ], batch size: 52, lr: 3.59e-02, grad_scale: 16.0 +2024-08-25 05:36:26,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37008.0, ans=0.1 +2024-08-25 05:36:27,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37008.0, ans=0.1 +2024-08-25 05:36:32,543 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=3.872e+00 +2024-08-25 05:36:41,013 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.69 vs. limit=15.0 +2024-08-25 05:36:42,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=37114.666666666664, ans=0.0 +2024-08-25 05:36:44,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=37114.666666666664, ans=0.125 +2024-08-25 05:37:09,005 INFO [train.py:1114] (1/4) Epoch 3, batch 2000, loss[loss=0.3134, simple_loss=0.3242, pruned_loss=0.1097, ctc_loss=0.2081, over 19661.00 frames. ], tot_loss[loss=0.3358, simple_loss=0.3538, pruned_loss=0.1155, ctc_loss=0.2166, over 3854345.99 frames. ], batch size: 45, lr: 3.59e-02, grad_scale: 32.0 +2024-08-25 05:37:17,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=37221.333333333336, ans=0.95 +2024-08-25 05:37:18,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=37221.333333333336, ans=0.125 +2024-08-25 05:37:31,183 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.54 vs. limit=15.0 +2024-08-25 05:37:35,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=37328.0, ans=0.125 +2024-08-25 05:37:36,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=37328.0, ans=0.125 +2024-08-25 05:37:45,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=37381.333333333336, ans=0.125 +2024-08-25 05:37:55,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=37381.333333333336, ans=0.04949747468305833 +2024-08-25 05:38:02,425 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.775e+02 2.243e+02 2.650e+02 3.292e+02 1.299e+03, threshold=5.300e+02, percent-clipped=6.0 +2024-08-25 05:38:13,922 INFO [train.py:1114] (1/4) Epoch 3, batch 2050, loss[loss=0.3235, simple_loss=0.3324, pruned_loss=0.1144, ctc_loss=0.2144, over 19725.00 frames. ], tot_loss[loss=0.3339, simple_loss=0.3523, pruned_loss=0.1147, ctc_loss=0.2155, over 3850800.98 frames. ], batch size: 47, lr: 3.58e-02, grad_scale: 16.0 +2024-08-25 05:38:17,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=37488.0, ans=0.125 +2024-08-25 05:38:30,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=37541.333333333336, ans=0.125 +2024-08-25 05:38:31,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37541.333333333336, ans=0.1 +2024-08-25 05:38:47,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=37594.666666666664, ans=0.125 +2024-08-25 05:38:59,376 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.47 vs. limit=15.0 +2024-08-25 05:39:40,791 INFO [train.py:1114] (1/4) Epoch 3, batch 2100, loss[loss=0.2848, simple_loss=0.3249, pruned_loss=0.08779, ctc_loss=0.1727, over 19758.00 frames. ], tot_loss[loss=0.3333, simple_loss=0.3518, pruned_loss=0.1143, ctc_loss=0.215, over 3857825.56 frames. ], batch size: 54, lr: 3.58e-02, grad_scale: 16.0 +2024-08-25 05:39:49,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=37754.666666666664, ans=0.125 +2024-08-25 05:39:55,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=37754.666666666664, ans=0.2 +2024-08-25 05:40:05,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=37808.0, ans=0.0 +2024-08-25 05:40:13,352 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.60 vs. limit=22.5 +2024-08-25 05:40:45,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=37861.333333333336, ans=0.0 +2024-08-25 05:40:50,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=37914.666666666664, ans=0.05 +2024-08-25 05:40:53,535 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.73 vs. limit=22.5 +2024-08-25 05:40:54,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=37914.666666666664, ans=0.125 +2024-08-25 05:40:58,550 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.646e+02 2.072e+02 2.352e+02 2.718e+02 4.903e+02, threshold=4.703e+02, percent-clipped=0.0 +2024-08-25 05:41:08,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=37968.0, ans=0.0 +2024-08-25 05:41:10,087 INFO [train.py:1114] (1/4) Epoch 3, batch 2150, loss[loss=0.3358, simple_loss=0.3533, pruned_loss=0.1157, ctc_loss=0.2173, over 19594.00 frames. ], tot_loss[loss=0.3317, simple_loss=0.3504, pruned_loss=0.1137, ctc_loss=0.214, over 3868883.36 frames. ], batch size: 52, lr: 3.57e-02, grad_scale: 16.0 +2024-08-25 05:41:28,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=38074.666666666664, ans=0.125 +2024-08-25 05:41:31,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=38074.666666666664, ans=0.125 +2024-08-25 05:41:44,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=38128.0, ans=0.07 +2024-08-25 05:41:54,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=38181.333333333336, ans=0.2 +2024-08-25 05:41:55,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=38181.333333333336, ans=0.125 +2024-08-25 05:42:24,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=38181.333333333336, ans=0.025 +2024-08-25 05:42:25,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=38234.666666666664, ans=0.125 +2024-08-25 05:42:31,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38234.666666666664, ans=0.1 +2024-08-25 05:42:42,869 INFO [train.py:1114] (1/4) Epoch 3, batch 2200, loss[loss=0.351, simple_loss=0.364, pruned_loss=0.122, ctc_loss=0.2347, over 19580.00 frames. ], tot_loss[loss=0.3322, simple_loss=0.3509, pruned_loss=0.1139, ctc_loss=0.2141, over 3868042.63 frames. ], batch size: 57, lr: 3.56e-02, grad_scale: 16.0 +2024-08-25 05:42:43,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=38288.0, ans=0.0025460869565217398 +2024-08-25 05:42:54,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=38288.0, ans=0.0025460869565217398 +2024-08-25 05:43:02,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=38341.333333333336, ans=0.125 +2024-08-25 05:43:02,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=38341.333333333336, ans=0.125 +2024-08-25 05:43:34,307 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.802e+02 2.197e+02 2.629e+02 2.994e+02 6.107e+02, threshold=5.259e+02, percent-clipped=1.0 +2024-08-25 05:43:51,061 INFO [train.py:1114] (1/4) Epoch 3, batch 2250, loss[loss=0.2887, simple_loss=0.3364, pruned_loss=0.08739, ctc_loss=0.1655, over 19603.00 frames. ], tot_loss[loss=0.3319, simple_loss=0.351, pruned_loss=0.1137, ctc_loss=0.2133, over 3868149.79 frames. ], batch size: 55, lr: 3.56e-02, grad_scale: 16.0 +2024-08-25 05:44:20,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=38608.0, ans=0.125 +2024-08-25 05:44:24,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=38608.0, ans=0.125 +2024-08-25 05:44:26,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=38608.0, ans=0.2 +2024-08-25 05:44:29,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=38608.0, ans=0.125 +2024-08-25 05:44:44,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=38661.333333333336, ans=0.125 +2024-08-25 05:45:07,821 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.66 vs. limit=15.0 +2024-08-25 05:45:10,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=38768.0, ans=0.125 +2024-08-25 05:45:15,053 INFO [train.py:1114] (1/4) Epoch 3, batch 2300, loss[loss=0.3051, simple_loss=0.3227, pruned_loss=0.1036, ctc_loss=0.2011, over 19527.00 frames. ], tot_loss[loss=0.3309, simple_loss=0.3496, pruned_loss=0.1135, ctc_loss=0.2129, over 3862623.77 frames. ], batch size: 49, lr: 3.55e-02, grad_scale: 16.0 +2024-08-25 05:45:19,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38821.333333333336, ans=0.1 +2024-08-25 05:45:22,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=38821.333333333336, ans=0.125 +2024-08-25 05:46:07,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=38874.666666666664, ans=0.125 +2024-08-25 05:46:11,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=38928.0, ans=0.1 +2024-08-25 05:46:32,205 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=21.44 vs. limit=22.5 +2024-08-25 05:47:15,683 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.630e+02 2.233e+02 2.542e+02 3.133e+02 7.552e+02, threshold=5.083e+02, percent-clipped=3.0 +2024-08-25 05:47:16,978 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:47:17,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=39034.666666666664, ans=0.125 +2024-08-25 05:47:27,928 INFO [train.py:1114] (1/4) Epoch 3, batch 2350, loss[loss=0.3878, simple_loss=0.3906, pruned_loss=0.1407, ctc_loss=0.2589, over 19678.00 frames. ], tot_loss[loss=0.3291, simple_loss=0.3486, pruned_loss=0.1125, ctc_loss=0.2113, over 3864981.09 frames. ], batch size: 63, lr: 3.55e-02, grad_scale: 16.0 +2024-08-25 05:48:09,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=39248.0, ans=0.125 +2024-08-25 05:48:10,353 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.06 vs. limit=15.0 +2024-08-25 05:48:24,944 INFO [train.py:1114] (1/4) Epoch 3, batch 2400, loss[loss=0.3473, simple_loss=0.3691, pruned_loss=0.1186, ctc_loss=0.2211, over 19316.00 frames. ], tot_loss[loss=0.3333, simple_loss=0.3518, pruned_loss=0.1145, ctc_loss=0.2144, over 3859318.23 frames. ], batch size: 71, lr: 3.54e-02, grad_scale: 32.0 +2024-08-25 05:48:25,590 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.55 vs. limit=15.0 +2024-08-25 05:48:27,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=39354.666666666664, ans=0.125 +2024-08-25 05:48:35,216 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.97 vs. limit=15.0 +2024-08-25 05:48:51,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=39461.333333333336, ans=0.025 +2024-08-25 05:49:10,300 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.864e+02 2.241e+02 2.672e+02 3.161e+02 5.607e+02, threshold=5.344e+02, percent-clipped=4.0 +2024-08-25 05:49:11,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=39568.0, ans=0.125 +2024-08-25 05:49:26,435 INFO [train.py:1114] (1/4) Epoch 3, batch 2450, loss[loss=0.4262, simple_loss=0.3958, pruned_loss=0.166, ctc_loss=0.3114, over 13715.00 frames. ], tot_loss[loss=0.3435, simple_loss=0.3575, pruned_loss=0.1198, ctc_loss=0.2244, over 3731134.73 frames. ], batch size: 140, lr: 3.53e-02, grad_scale: 32.0 +2024-08-25 05:49:26,812 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=9.79 vs. limit=12.0 +2024-08-25 05:50:05,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=39781.333333333336, ans=0.2 +2024-08-25 05:50:49,424 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.74 vs. limit=22.5 +2024-08-25 05:51:05,715 INFO [train.py:1114] (1/4) Epoch 4, batch 0, loss[loss=0.3387, simple_loss=0.3474, pruned_loss=0.1199, ctc_loss=0.2254, over 19425.00 frames. ], tot_loss[loss=0.3387, simple_loss=0.3474, pruned_loss=0.1199, ctc_loss=0.2254, over 19425.00 frames. ], batch size: 48, lr: 3.30e-02, grad_scale: 32.0 +2024-08-25 05:51:05,715 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-25 05:51:35,402 INFO [train.py:1146] (1/4) Epoch 4, validation: loss=0.2629, simple_loss=0.3337, pruned_loss=0.07032, ctc_loss=0.1284, over 944034.00 frames. +2024-08-25 05:51:35,403 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 14083MB +2024-08-25 05:51:40,981 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.51 vs. limit=15.0 +2024-08-25 05:52:02,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.08 vs. limit=15.0 +2024-08-25 05:52:21,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=39989.333333333336, ans=0.025 +2024-08-25 05:52:22,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=39989.333333333336, ans=0.125 +2024-08-25 05:52:29,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=40042.666666666664, ans=0.1 +2024-08-25 05:52:38,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=40042.666666666664, ans=0.2 +2024-08-25 05:52:41,492 INFO [train.py:1114] (1/4) Epoch 4, batch 50, loss[loss=0.2678, simple_loss=0.3018, pruned_loss=0.08433, ctc_loss=0.1629, over 19702.00 frames. ], tot_loss[loss=0.3396, simple_loss=0.356, pruned_loss=0.1173, ctc_loss=0.2216, over 845181.07 frames. ], batch size: 47, lr: 3.30e-02, grad_scale: 32.0 +2024-08-25 05:52:47,061 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.793e+02 2.147e+02 2.483e+02 2.920e+02 4.932e+02, threshold=4.967e+02, percent-clipped=0.0 +2024-08-25 05:52:56,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=40096.0, ans=0.1 +2024-08-25 05:53:07,075 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.05 vs. limit=15.0 +2024-08-25 05:53:18,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=40149.333333333336, ans=0.025 +2024-08-25 05:53:38,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=40202.666666666664, ans=0.0 +2024-08-25 05:53:41,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=40256.0, ans=0.125 +2024-08-25 05:53:54,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=40256.0, ans=0.125 +2024-08-25 05:54:04,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=40309.333333333336, ans=0.025 +2024-08-25 05:54:08,128 INFO [train.py:1114] (1/4) Epoch 4, batch 100, loss[loss=0.3207, simple_loss=0.3454, pruned_loss=0.1066, ctc_loss=0.2069, over 19702.00 frames. ], tot_loss[loss=0.3349, simple_loss=0.354, pruned_loss=0.1147, ctc_loss=0.2159, over 1499126.13 frames. ], batch size: 51, lr: 3.29e-02, grad_scale: 32.0 +2024-08-25 05:54:14,758 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.09 vs. limit=10.0 +2024-08-25 05:54:32,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=40362.666666666664, ans=0.125 +2024-08-25 05:54:59,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=40416.0, ans=0.025 +2024-08-25 05:55:45,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=40576.0, ans=0.125 +2024-08-25 05:55:55,478 INFO [train.py:1114] (1/4) Epoch 4, batch 150, loss[loss=0.284, simple_loss=0.3066, pruned_loss=0.09457, ctc_loss=0.1804, over 19700.00 frames. ], tot_loss[loss=0.3288, simple_loss=0.3492, pruned_loss=0.112, ctc_loss=0.2107, over 2026958.37 frames. ], batch size: 47, lr: 3.28e-02, grad_scale: 32.0 +2024-08-25 05:55:56,484 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.643e+02 2.033e+02 2.286e+02 2.661e+02 4.118e+02, threshold=4.571e+02, percent-clipped=0.0 +2024-08-25 05:56:07,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=40682.666666666664, ans=0.125 +2024-08-25 05:56:11,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=40682.666666666664, ans=0.0 +2024-08-25 05:56:45,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40789.333333333336, ans=0.1 +2024-08-25 05:56:58,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40842.666666666664, ans=0.1 +2024-08-25 05:57:04,741 INFO [train.py:1114] (1/4) Epoch 4, batch 200, loss[loss=0.3522, simple_loss=0.3642, pruned_loss=0.1256, ctc_loss=0.2223, over 18345.00 frames. ], tot_loss[loss=0.3251, simple_loss=0.3469, pruned_loss=0.1102, ctc_loss=0.2073, over 2434725.20 frames. ], batch size: 85, lr: 3.28e-02, grad_scale: 32.0 +2024-08-25 05:57:25,885 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.86 vs. limit=22.5 +2024-08-25 05:57:31,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_na.min_abs, batch_count=40949.333333333336, ans=0.02 +2024-08-25 05:57:32,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=40949.333333333336, ans=0.07 +2024-08-25 05:57:36,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.15 vs. limit=15.0 +2024-08-25 05:57:39,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=40949.333333333336, ans=0.2 +2024-08-25 05:57:48,926 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:58:29,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=41056.0, ans=0.0019443478260869566 +2024-08-25 05:58:36,809 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.66 vs. limit=15.0 +2024-08-25 05:58:55,858 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.30 vs. limit=15.0 +2024-08-25 05:59:03,057 INFO [train.py:1114] (1/4) Epoch 4, batch 250, loss[loss=0.3613, simple_loss=0.3702, pruned_loss=0.1279, ctc_loss=0.2416, over 19353.00 frames. ], tot_loss[loss=0.3259, simple_loss=0.3474, pruned_loss=0.1106, ctc_loss=0.2079, over 2754787.26 frames. ], batch size: 67, lr: 3.27e-02, grad_scale: 32.0 +2024-08-25 05:59:03,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=41162.666666666664, ans=0.125 +2024-08-25 05:59:04,099 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.652e+02 2.098e+02 2.387e+02 2.939e+02 4.251e+02, threshold=4.774e+02, percent-clipped=0.0 +2024-08-25 05:59:26,825 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.62 vs. limit=15.0 +2024-08-25 05:59:54,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=41322.666666666664, ans=0.125 +2024-08-25 06:00:14,168 INFO [train.py:1114] (1/4) Epoch 4, batch 300, loss[loss=0.3251, simple_loss=0.3484, pruned_loss=0.1089, ctc_loss=0.21, over 19508.00 frames. ], tot_loss[loss=0.3248, simple_loss=0.3467, pruned_loss=0.11, ctc_loss=0.207, over 3000076.96 frames. ], batch size: 61, lr: 3.27e-02, grad_scale: 32.0 +2024-08-25 06:00:34,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=41482.666666666664, ans=0.125 +2024-08-25 06:00:37,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=41536.0, ans=0.125 +2024-08-25 06:00:57,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=41536.0, ans=0.0 +2024-08-25 06:01:08,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=41589.333333333336, ans=0.125 +2024-08-25 06:01:21,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=41642.666666666664, ans=0.07 +2024-08-25 06:01:23,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=41642.666666666664, ans=0.125 +2024-08-25 06:01:36,618 INFO [train.py:1114] (1/4) Epoch 4, batch 350, loss[loss=0.2773, simple_loss=0.308, pruned_loss=0.08938, ctc_loss=0.1698, over 19757.00 frames. ], tot_loss[loss=0.3239, simple_loss=0.3463, pruned_loss=0.1096, ctc_loss=0.2059, over 3188636.47 frames. ], batch size: 48, lr: 3.26e-02, grad_scale: 32.0 +2024-08-25 06:01:37,789 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.558e+02 2.143e+02 2.517e+02 2.887e+02 6.595e+02, threshold=5.034e+02, percent-clipped=1.0 +2024-08-25 06:01:44,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=41696.0, ans=0.0 +2024-08-25 06:02:07,773 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.06 vs. limit=22.5 +2024-08-25 06:02:14,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=41856.0, ans=0.0 +2024-08-25 06:02:21,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=41856.0, ans=0.125 +2024-08-25 06:02:38,783 INFO [train.py:1114] (1/4) Epoch 4, batch 400, loss[loss=0.3326, simple_loss=0.3554, pruned_loss=0.1116, ctc_loss=0.2165, over 19504.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.3461, pruned_loss=0.1091, ctc_loss=0.2052, over 3340991.85 frames. ], batch size: 54, lr: 3.26e-02, grad_scale: 32.0 +2024-08-25 06:02:39,001 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:02:49,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=41962.666666666664, ans=0.125 +2024-08-25 06:03:24,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=42122.666666666664, ans=0.125 +2024-08-25 06:03:46,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=42122.666666666664, ans=0.125 +2024-08-25 06:04:04,054 INFO [train.py:1114] (1/4) Epoch 4, batch 450, loss[loss=0.3925, simple_loss=0.3969, pruned_loss=0.1406, ctc_loss=0.2672, over 19623.00 frames. ], tot_loss[loss=0.3241, simple_loss=0.3465, pruned_loss=0.1097, ctc_loss=0.2057, over 3449441.54 frames. ], batch size: 55, lr: 3.25e-02, grad_scale: 32.0 +2024-08-25 06:04:05,483 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=6.143e-02 +2024-08-25 06:04:06,526 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.656e+02 2.107e+02 2.479e+02 2.897e+02 5.564e+02, threshold=4.958e+02, percent-clipped=2.0 +2024-08-25 06:04:08,010 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:04:11,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=42229.333333333336, ans=0.125 +2024-08-25 06:04:19,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=42282.666666666664, ans=0.125 +2024-08-25 06:04:21,208 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.02 vs. limit=12.0 +2024-08-25 06:04:21,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=42282.666666666664, ans=0.125 +2024-08-25 06:04:57,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=42389.333333333336, ans=0.05 +2024-08-25 06:05:03,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42389.333333333336, ans=0.1 +2024-08-25 06:05:32,236 INFO [train.py:1114] (1/4) Epoch 4, batch 500, loss[loss=0.3338, simple_loss=0.3595, pruned_loss=0.1121, ctc_loss=0.2102, over 19692.00 frames. ], tot_loss[loss=0.3218, simple_loss=0.3448, pruned_loss=0.1086, ctc_loss=0.204, over 3545718.72 frames. ], batch size: 63, lr: 3.25e-02, grad_scale: 32.0 +2024-08-25 06:05:50,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=42549.333333333336, ans=0.0 +2024-08-25 06:05:52,706 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.11 vs. limit=15.0 +2024-08-25 06:06:40,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=42762.666666666664, ans=0.0 +2024-08-25 06:06:41,081 INFO [train.py:1114] (1/4) Epoch 4, batch 550, loss[loss=0.3327, simple_loss=0.355, pruned_loss=0.1132, ctc_loss=0.2098, over 19214.00 frames. ], tot_loss[loss=0.3224, simple_loss=0.3451, pruned_loss=0.1089, ctc_loss=0.2043, over 3606988.10 frames. ], batch size: 71, lr: 3.24e-02, grad_scale: 16.0 +2024-08-25 06:06:44,779 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.740e+02 2.027e+02 2.416e+02 2.881e+02 5.051e+02, threshold=4.833e+02, percent-clipped=1.0 +2024-08-25 06:07:01,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.whiten.whitening_limit, batch_count=42816.0, ans=12.0 +2024-08-25 06:07:18,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=42869.333333333336, ans=0.125 +2024-08-25 06:07:50,594 INFO [train.py:1114] (1/4) Epoch 4, batch 600, loss[loss=0.3629, simple_loss=0.378, pruned_loss=0.127, ctc_loss=0.2346, over 19407.00 frames. ], tot_loss[loss=0.3222, simple_loss=0.345, pruned_loss=0.1088, ctc_loss=0.2042, over 3664793.90 frames. ], batch size: 67, lr: 3.24e-02, grad_scale: 16.0 +2024-08-25 06:07:54,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=43029.333333333336, ans=0.125 +2024-08-25 06:07:57,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=43029.333333333336, ans=0.2 +2024-08-25 06:07:58,465 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.71 vs. limit=22.5 +2024-08-25 06:08:01,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43082.666666666664, ans=0.1 +2024-08-25 06:08:06,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=43082.666666666664, ans=0.125 +2024-08-25 06:08:12,899 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.85 vs. limit=10.0 +2024-08-25 06:08:26,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=43189.333333333336, ans=0.2 +2024-08-25 06:08:44,203 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.07 vs. limit=22.5 +2024-08-25 06:08:59,031 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.17 vs. limit=15.0 +2024-08-25 06:09:00,687 INFO [train.py:1114] (1/4) Epoch 4, batch 650, loss[loss=0.2809, simple_loss=0.3247, pruned_loss=0.08583, ctc_loss=0.1638, over 19790.00 frames. ], tot_loss[loss=0.3207, simple_loss=0.3439, pruned_loss=0.1082, ctc_loss=0.2027, over 3715390.33 frames. ], batch size: 54, lr: 3.23e-02, grad_scale: 16.0 +2024-08-25 06:09:15,857 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.456e+02 2.140e+02 2.544e+02 3.023e+02 7.017e+02, threshold=5.088e+02, percent-clipped=9.0 +2024-08-25 06:09:18,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=43296.0, ans=0.125 +2024-08-25 06:09:37,566 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.85 vs. limit=10.0 +2024-08-25 06:09:50,507 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.25 vs. limit=15.0 +2024-08-25 06:09:51,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=43402.666666666664, ans=0.125 +2024-08-25 06:09:59,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=43456.0, ans=0.125 +2024-08-25 06:10:09,209 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.14 vs. limit=15.0 +2024-08-25 06:10:12,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=43509.333333333336, ans=0.125 +2024-08-25 06:10:18,927 INFO [train.py:1114] (1/4) Epoch 4, batch 700, loss[loss=0.2577, simple_loss=0.3078, pruned_loss=0.07501, ctc_loss=0.1437, over 19727.00 frames. ], tot_loss[loss=0.3203, simple_loss=0.3442, pruned_loss=0.1078, ctc_loss=0.2021, over 3747440.16 frames. ], batch size: 51, lr: 3.22e-02, grad_scale: 16.0 +2024-08-25 06:10:39,098 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.14 vs. limit=12.0 +2024-08-25 06:10:39,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=43616.0, ans=0.025 +2024-08-25 06:10:53,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=43669.333333333336, ans=0.025 +2024-08-25 06:11:04,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=43722.666666666664, ans=0.125 +2024-08-25 06:11:04,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=43722.666666666664, ans=0.125 +2024-08-25 06:11:06,788 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.49 vs. limit=22.5 +2024-08-25 06:11:20,009 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.55 vs. limit=22.5 +2024-08-25 06:11:23,835 INFO [train.py:1114] (1/4) Epoch 4, batch 750, loss[loss=0.3383, simple_loss=0.3627, pruned_loss=0.1139, ctc_loss=0.2157, over 19483.00 frames. ], tot_loss[loss=0.319, simple_loss=0.3432, pruned_loss=0.1072, ctc_loss=0.2011, over 3772264.34 frames. ], batch size: 54, lr: 3.22e-02, grad_scale: 16.0 +2024-08-25 06:11:28,683 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.530e+02 2.141e+02 2.481e+02 2.931e+02 4.472e+02, threshold=4.962e+02, percent-clipped=0.0 +2024-08-25 06:11:49,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=43882.666666666664, ans=0.125 +2024-08-25 06:12:09,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=43989.333333333336, ans=10.0 +2024-08-25 06:12:16,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=44042.666666666664, ans=0.5 +2024-08-25 06:12:29,316 INFO [train.py:1114] (1/4) Epoch 4, batch 800, loss[loss=0.3329, simple_loss=0.3394, pruned_loss=0.1198, ctc_loss=0.2173, over 19810.00 frames. ], tot_loss[loss=0.3187, simple_loss=0.3428, pruned_loss=0.1071, ctc_loss=0.2009, over 3794731.81 frames. ], batch size: 49, lr: 3.21e-02, grad_scale: 32.0 +2024-08-25 06:12:56,022 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.26 vs. limit=15.0 +2024-08-25 06:13:03,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=44256.0, ans=0.125 +2024-08-25 06:13:03,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=44256.0, ans=0.125 +2024-08-25 06:13:22,765 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.11 vs. limit=15.0 +2024-08-25 06:13:27,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=44362.666666666664, ans=0.125 +2024-08-25 06:13:28,033 INFO [train.py:1114] (1/4) Epoch 4, batch 850, loss[loss=0.3459, simple_loss=0.3698, pruned_loss=0.1169, ctc_loss=0.2209, over 19670.00 frames. ], tot_loss[loss=0.3172, simple_loss=0.3418, pruned_loss=0.1064, ctc_loss=0.1996, over 3815241.96 frames. ], batch size: 59, lr: 3.21e-02, grad_scale: 32.0 +2024-08-25 06:13:31,254 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.656e+02 2.074e+02 2.402e+02 2.888e+02 5.555e+02, threshold=4.804e+02, percent-clipped=1.0 +2024-08-25 06:13:33,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=44362.666666666664, ans=0.5 +2024-08-25 06:13:39,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=44416.0, ans=0.0 +2024-08-25 06:13:41,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=44416.0, ans=0.125 +2024-08-25 06:13:47,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=44416.0, ans=0.02 +2024-08-25 06:13:53,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=44469.333333333336, ans=0.125 +2024-08-25 06:13:57,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=44469.333333333336, ans=0.2 +2024-08-25 06:14:00,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=44469.333333333336, ans=0.125 +2024-08-25 06:14:09,296 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.46 vs. limit=22.5 +2024-08-25 06:14:13,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=44522.666666666664, ans=0.125 +2024-08-25 06:14:16,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=44576.0, ans=0.125 +2024-08-25 06:14:21,161 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.89 vs. limit=15.0 +2024-08-25 06:14:30,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=44576.0, ans=0.1 +2024-08-25 06:14:32,266 INFO [train.py:1114] (1/4) Epoch 4, batch 900, loss[loss=0.3033, simple_loss=0.3253, pruned_loss=0.1027, ctc_loss=0.1897, over 19799.00 frames. ], tot_loss[loss=0.319, simple_loss=0.3429, pruned_loss=0.1073, ctc_loss=0.2012, over 3819422.89 frames. ], batch size: 49, lr: 3.20e-02, grad_scale: 32.0 +2024-08-25 06:14:36,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=44629.333333333336, ans=0.125 +2024-08-25 06:14:37,666 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.75 vs. limit=22.5 +2024-08-25 06:15:01,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=44736.0, ans=0.125 +2024-08-25 06:15:02,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=44736.0, ans=15.0 +2024-08-25 06:15:32,385 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.08 vs. limit=15.0 +2024-08-25 06:15:38,553 INFO [train.py:1114] (1/4) Epoch 4, batch 950, loss[loss=0.3037, simple_loss=0.3243, pruned_loss=0.1047, ctc_loss=0.1844, over 19496.00 frames. ], tot_loss[loss=0.3205, simple_loss=0.3439, pruned_loss=0.108, ctc_loss=0.2028, over 3821051.77 frames. ], batch size: 49, lr: 3.20e-02, grad_scale: 32.0 +2024-08-25 06:15:41,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=44896.0, ans=0.0 +2024-08-25 06:15:42,142 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.701e+02 2.101e+02 2.364e+02 2.735e+02 6.196e+02, threshold=4.728e+02, percent-clipped=2.0 +2024-08-25 06:15:44,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=44896.0, ans=0.125 +2024-08-25 06:15:59,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=44949.333333333336, ans=0.09899494936611666 +2024-08-25 06:16:30,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=45109.333333333336, ans=0.125 +2024-08-25 06:16:35,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=45109.333333333336, ans=0.125 +2024-08-25 06:16:42,344 INFO [train.py:1114] (1/4) Epoch 4, batch 1000, loss[loss=0.2576, simple_loss=0.3126, pruned_loss=0.073, ctc_loss=0.1419, over 19857.00 frames. ], tot_loss[loss=0.3214, simple_loss=0.3447, pruned_loss=0.1083, ctc_loss=0.2034, over 3817432.80 frames. ], batch size: 52, lr: 3.19e-02, grad_scale: 32.0 +2024-08-25 06:17:03,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=45162.666666666664, ans=0.125 +2024-08-25 06:17:55,465 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:18:10,596 INFO [train.py:1114] (1/4) Epoch 4, batch 1050, loss[loss=0.3207, simple_loss=0.3523, pruned_loss=0.1046, ctc_loss=0.1998, over 19835.00 frames. ], tot_loss[loss=0.3191, simple_loss=0.3431, pruned_loss=0.1073, ctc_loss=0.2013, over 3823387.51 frames. ], batch size: 57, lr: 3.19e-02, grad_scale: 16.0 +2024-08-25 06:18:11,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.86 vs. limit=15.0 +2024-08-25 06:18:26,176 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.527e+02 1.982e+02 2.200e+02 2.634e+02 5.388e+02, threshold=4.401e+02, percent-clipped=1.0 +2024-08-25 06:19:03,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=45589.333333333336, ans=0.07 +2024-08-25 06:19:09,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=45589.333333333336, ans=0.0009588405797101435 +2024-08-25 06:19:36,311 INFO [train.py:1114] (1/4) Epoch 4, batch 1100, loss[loss=0.3076, simple_loss=0.3421, pruned_loss=0.09954, ctc_loss=0.1849, over 19585.00 frames. ], tot_loss[loss=0.3182, simple_loss=0.3423, pruned_loss=0.1069, ctc_loss=0.2008, over 3829412.18 frames. ], batch size: 52, lr: 3.18e-02, grad_scale: 16.0 +2024-08-25 06:19:47,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=45696.0, ans=0.025 +2024-08-25 06:19:48,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=45696.0, ans=0.125 +2024-08-25 06:19:58,187 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.82 vs. limit=15.0 +2024-08-25 06:20:07,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45749.333333333336, ans=0.1 +2024-08-25 06:20:19,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=45802.666666666664, ans=0.2 +2024-08-25 06:20:39,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=45909.333333333336, ans=0.125 +2024-08-25 06:20:52,238 INFO [train.py:1114] (1/4) Epoch 4, batch 1150, loss[loss=0.3218, simple_loss=0.3428, pruned_loss=0.1106, ctc_loss=0.199, over 19587.00 frames. ], tot_loss[loss=0.3186, simple_loss=0.3426, pruned_loss=0.1071, ctc_loss=0.2011, over 3827714.63 frames. ], batch size: 52, lr: 3.18e-02, grad_scale: 16.0 +2024-08-25 06:20:57,036 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.742e+02 2.122e+02 2.390e+02 2.706e+02 4.199e+02, threshold=4.779e+02, percent-clipped=0.0 +2024-08-25 06:21:12,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=46016.0, ans=0.125 +2024-08-25 06:21:12,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=46016.0, ans=0.125 +2024-08-25 06:21:27,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=46069.333333333336, ans=0.125 +2024-08-25 06:21:51,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=46176.0, ans=0.0 +2024-08-25 06:21:59,986 INFO [train.py:1114] (1/4) Epoch 4, batch 1200, loss[loss=0.3435, simple_loss=0.3644, pruned_loss=0.115, ctc_loss=0.2319, over 19854.00 frames. ], tot_loss[loss=0.3196, simple_loss=0.3437, pruned_loss=0.1074, ctc_loss=0.2016, over 3824320.42 frames. ], batch size: 57, lr: 3.17e-02, grad_scale: 32.0 +2024-08-25 06:22:17,167 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.11 vs. limit=15.0 +2024-08-25 06:22:21,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=46282.666666666664, ans=0.125 +2024-08-25 06:22:22,572 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:22:30,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=46336.0, ans=0.0 +2024-08-25 06:22:33,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=46336.0, ans=0.125 +2024-08-25 06:22:53,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=46389.333333333336, ans=0.000784927536231883 +2024-08-25 06:23:10,618 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=1.742e-02 +2024-08-25 06:23:12,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=46442.666666666664, ans=0.0007733333333333325 +2024-08-25 06:23:19,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=46442.666666666664, ans=0.125 +2024-08-25 06:23:21,319 INFO [train.py:1114] (1/4) Epoch 4, batch 1250, loss[loss=0.3121, simple_loss=0.3505, pruned_loss=0.09988, ctc_loss=0.1849, over 19492.00 frames. ], tot_loss[loss=0.3179, simple_loss=0.3432, pruned_loss=0.1064, ctc_loss=0.1996, over 3842481.00 frames. ], batch size: 61, lr: 3.17e-02, grad_scale: 32.0 +2024-08-25 06:23:21,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=46496.0, ans=0.0 +2024-08-25 06:23:26,214 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.617e+02 1.962e+02 2.225e+02 2.468e+02 3.508e+02, threshold=4.451e+02, percent-clipped=0.0 +2024-08-25 06:23:30,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=46496.0, ans=0.2 +2024-08-25 06:24:10,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=46602.666666666664, ans=0.2 +2024-08-25 06:24:21,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=46656.0, ans=0.125 +2024-08-25 06:24:35,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=46709.333333333336, ans=0.125 +2024-08-25 06:24:35,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=46709.333333333336, ans=0.125 +2024-08-25 06:24:36,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=46709.333333333336, ans=0.2 +2024-08-25 06:24:48,929 INFO [train.py:1114] (1/4) Epoch 4, batch 1300, loss[loss=0.3811, simple_loss=0.3816, pruned_loss=0.1388, ctc_loss=0.2573, over 18872.00 frames. ], tot_loss[loss=0.3177, simple_loss=0.3426, pruned_loss=0.1064, ctc_loss=0.1999, over 3847173.41 frames. ], batch size: 76, lr: 3.16e-02, grad_scale: 32.0 +2024-08-25 06:25:52,881 INFO [train.py:1114] (1/4) Epoch 4, batch 1350, loss[loss=0.2746, simple_loss=0.3174, pruned_loss=0.08496, ctc_loss=0.1545, over 19767.00 frames. ], tot_loss[loss=0.3165, simple_loss=0.342, pruned_loss=0.1058, ctc_loss=0.1987, over 3856426.63 frames. ], batch size: 54, lr: 3.16e-02, grad_scale: 32.0 +2024-08-25 06:25:54,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=47029.333333333336, ans=0.0006457971014492744 +2024-08-25 06:26:07,743 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.473e+02 2.269e+02 2.560e+02 3.229e+02 4.886e+02, threshold=5.120e+02, percent-clipped=5.0 +2024-08-25 06:26:31,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=47082.666666666664, ans=0.1 +2024-08-25 06:26:35,776 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.97 vs. limit=15.0 +2024-08-25 06:26:43,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47082.666666666664, ans=0.1 +2024-08-25 06:26:45,708 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.87 vs. limit=15.0 +2024-08-25 06:26:47,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=47136.0, ans=0.0006226086956521732 +2024-08-25 06:27:14,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=47242.666666666664, ans=0.125 +2024-08-25 06:27:20,739 INFO [train.py:1114] (1/4) Epoch 4, batch 1400, loss[loss=0.2741, simple_loss=0.2977, pruned_loss=0.08987, ctc_loss=0.1766, over 19673.00 frames. ], tot_loss[loss=0.3164, simple_loss=0.3417, pruned_loss=0.1058, ctc_loss=0.1988, over 3863554.18 frames. ], batch size: 46, lr: 3.15e-02, grad_scale: 32.0 +2024-08-25 06:27:38,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=47296.0, ans=0.125 +2024-08-25 06:27:48,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=47349.333333333336, ans=0.0 +2024-08-25 06:28:01,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=47402.666666666664, ans=0.125 +2024-08-25 06:28:06,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=47402.666666666664, ans=0.0 +2024-08-25 06:28:43,704 INFO [train.py:1114] (1/4) Epoch 4, batch 1450, loss[loss=0.3652, simple_loss=0.3752, pruned_loss=0.1297, ctc_loss=0.2396, over 19649.00 frames. ], tot_loss[loss=0.3169, simple_loss=0.3422, pruned_loss=0.106, ctc_loss=0.199, over 3859954.86 frames. ], batch size: 63, lr: 3.15e-02, grad_scale: 32.0 +2024-08-25 06:28:48,583 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.613e+02 2.026e+02 2.327e+02 2.659e+02 4.329e+02, threshold=4.654e+02, percent-clipped=0.0 +2024-08-25 06:28:48,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=47562.666666666664, ans=0.125 +2024-08-25 06:28:56,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=47616.0, ans=0.0 +2024-08-25 06:29:06,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=47616.0, ans=0.125 +2024-08-25 06:29:09,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=47669.333333333336, ans=0.0 +2024-08-25 06:29:10,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=47669.333333333336, ans=0.025 +2024-08-25 06:29:34,700 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=25.30 vs. limit=15.0 +2024-08-25 06:29:44,348 INFO [train.py:1114] (1/4) Epoch 4, batch 1500, loss[loss=0.324, simple_loss=0.3589, pruned_loss=0.1048, ctc_loss=0.1986, over 19579.00 frames. ], tot_loss[loss=0.3168, simple_loss=0.3423, pruned_loss=0.1059, ctc_loss=0.1989, over 3860060.18 frames. ], batch size: 57, lr: 3.14e-02, grad_scale: 16.0 +2024-08-25 06:29:47,996 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:30:09,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=47936.0, ans=0.0 +2024-08-25 06:30:24,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=47989.333333333336, ans=0.025 +2024-08-25 06:30:27,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=47989.333333333336, ans=0.125 +2024-08-25 06:30:37,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=48042.666666666664, ans=0.025 +2024-08-25 06:31:30,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=48042.666666666664, ans=0.2 +2024-08-25 06:31:38,007 INFO [train.py:1114] (1/4) Epoch 4, batch 1550, loss[loss=0.2917, simple_loss=0.335, pruned_loss=0.09083, ctc_loss=0.1667, over 19613.00 frames. ], tot_loss[loss=0.317, simple_loss=0.3422, pruned_loss=0.1061, ctc_loss=0.1992, over 3844874.58 frames. ], batch size: 60, lr: 3.14e-02, grad_scale: 16.0 +2024-08-25 06:31:49,988 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.489e+02 2.013e+02 2.262e+02 2.770e+02 1.090e+03, threshold=4.525e+02, percent-clipped=1.0 +2024-08-25 06:32:11,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=48149.333333333336, ans=0.2 +2024-08-25 06:32:33,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48256.0, ans=0.1 +2024-08-25 06:32:41,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=48256.0, ans=0.1 +2024-08-25 06:33:26,257 INFO [train.py:1114] (1/4) Epoch 4, batch 1600, loss[loss=0.3121, simple_loss=0.3455, pruned_loss=0.1009, ctc_loss=0.1926, over 19836.00 frames. ], tot_loss[loss=0.317, simple_loss=0.3419, pruned_loss=0.1062, ctc_loss=0.1993, over 3834288.08 frames. ], batch size: 57, lr: 3.13e-02, grad_scale: 32.0 +2024-08-25 06:33:29,926 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:33:54,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=48416.0, ans=0.125 +2024-08-25 06:34:00,413 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.24 vs. limit=15.0 +2024-08-25 06:34:03,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=48469.333333333336, ans=0.125 +2024-08-25 06:34:04,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=48469.333333333336, ans=0.0 +2024-08-25 06:34:05,059 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.36 vs. limit=15.0 +2024-08-25 06:34:42,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=48522.666666666664, ans=0.125 +2024-08-25 06:34:54,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=48576.0, ans=0.125 +2024-08-25 06:35:09,916 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:35:15,008 INFO [train.py:1114] (1/4) Epoch 4, batch 1650, loss[loss=0.3516, simple_loss=0.3646, pruned_loss=0.1229, ctc_loss=0.2317, over 19651.00 frames. ], tot_loss[loss=0.3159, simple_loss=0.341, pruned_loss=0.1057, ctc_loss=0.1987, over 3829513.74 frames. ], batch size: 59, lr: 3.13e-02, grad_scale: 32.0 +2024-08-25 06:35:16,893 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.74 vs. limit=15.0 +2024-08-25 06:35:21,183 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.567e+02 2.079e+02 2.506e+02 2.996e+02 5.422e+02, threshold=5.011e+02, percent-clipped=2.0 +2024-08-25 06:35:40,423 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.02 vs. limit=10.0 +2024-08-25 06:36:37,429 INFO [train.py:1114] (1/4) Epoch 4, batch 1700, loss[loss=0.2579, simple_loss=0.2941, pruned_loss=0.0796, ctc_loss=0.1563, over 19694.00 frames. ], tot_loss[loss=0.3156, simple_loss=0.3409, pruned_loss=0.1055, ctc_loss=0.1984, over 3843496.50 frames. ], batch size: 46, lr: 3.12e-02, grad_scale: 32.0 +2024-08-25 06:36:52,615 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.59 vs. limit=12.0 +2024-08-25 06:37:38,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=49056.0, ans=0.00020521739130434716 +2024-08-25 06:37:45,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=49056.0, ans=0.0 +2024-08-25 06:38:16,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=49109.333333333336, ans=10.0 +2024-08-25 06:38:25,884 INFO [train.py:1114] (1/4) Epoch 4, batch 1750, loss[loss=0.2528, simple_loss=0.2921, pruned_loss=0.0768, ctc_loss=0.1496, over 19641.00 frames. ], tot_loss[loss=0.3136, simple_loss=0.3397, pruned_loss=0.1044, ctc_loss=0.1965, over 3848065.76 frames. ], batch size: 45, lr: 3.11e-02, grad_scale: 32.0 +2024-08-25 06:38:33,082 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.628e+02 1.987e+02 2.278e+02 2.713e+02 5.908e+02, threshold=4.555e+02, percent-clipped=1.0 +2024-08-25 06:38:40,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=49216.0, ans=0.125 +2024-08-25 06:38:59,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=49269.333333333336, ans=0.0 +2024-08-25 06:39:01,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=49269.333333333336, ans=0.025 +2024-08-25 06:39:02,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.55 vs. limit=22.5 +2024-08-25 06:39:06,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=49322.666666666664, ans=0.000147246376811596 +2024-08-25 06:39:06,797 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.76 vs. limit=15.0 +2024-08-25 06:39:10,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=49322.666666666664, ans=0.0 +2024-08-25 06:39:11,483 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.35 vs. limit=15.0 +2024-08-25 06:39:31,715 INFO [train.py:1114] (1/4) Epoch 4, batch 1800, loss[loss=0.3176, simple_loss=0.349, pruned_loss=0.1031, ctc_loss=0.2001, over 19620.00 frames. ], tot_loss[loss=0.3135, simple_loss=0.3397, pruned_loss=0.1044, ctc_loss=0.1964, over 3849760.61 frames. ], batch size: 55, lr: 3.11e-02, grad_scale: 32.0 +2024-08-25 06:40:31,518 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.51 vs. limit=22.5 +2024-08-25 06:40:48,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=49642.666666666664, ans=0.125 +2024-08-25 06:40:54,710 INFO [train.py:1114] (1/4) Epoch 4, batch 1850, loss[loss=0.3193, simple_loss=0.3476, pruned_loss=0.1051, ctc_loss=0.202, over 19583.00 frames. ], tot_loss[loss=0.3144, simple_loss=0.3402, pruned_loss=0.1049, ctc_loss=0.197, over 3853486.93 frames. ], batch size: 57, lr: 3.10e-02, grad_scale: 32.0 +2024-08-25 06:40:59,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=49696.0, ans=0.0 +2024-08-25 06:41:01,661 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.611e+02 2.149e+02 2.307e+02 2.574e+02 4.619e+02, threshold=4.614e+02, percent-clipped=1.0 +2024-08-25 06:41:15,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=49749.333333333336, ans=0.2 +2024-08-25 06:41:39,353 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.36 vs. limit=15.0 +2024-08-25 06:41:47,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=49909.333333333336, ans=0.0 +2024-08-25 06:41:50,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=49909.333333333336, ans=0.125 +2024-08-25 06:41:54,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=49909.333333333336, ans=0.0 +2024-08-25 06:41:59,170 INFO [train.py:1114] (1/4) Epoch 4, batch 1900, loss[loss=0.2898, simple_loss=0.3314, pruned_loss=0.08876, ctc_loss=0.177, over 19639.00 frames. ], tot_loss[loss=0.3148, simple_loss=0.341, pruned_loss=0.105, ctc_loss=0.197, over 3858891.13 frames. ], batch size: 59, lr: 3.10e-02, grad_scale: 32.0 +2024-08-25 06:42:32,894 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.80 vs. limit=22.5 +2024-08-25 06:43:14,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=50122.666666666664, ans=0.09899494936611666 +2024-08-25 06:43:37,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=50176.0, ans=0.09899494936611666 +2024-08-25 06:43:39,883 INFO [train.py:1114] (1/4) Epoch 4, batch 1950, loss[loss=0.2621, simple_loss=0.3093, pruned_loss=0.07667, ctc_loss=0.1539, over 19581.00 frames. ], tot_loss[loss=0.3148, simple_loss=0.3417, pruned_loss=0.1046, ctc_loss=0.1965, over 3868493.40 frames. ], batch size: 52, lr: 3.09e-02, grad_scale: 32.0 +2024-08-25 06:43:41,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=50229.333333333336, ans=0.09899494936611666 +2024-08-25 06:43:45,593 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.759e+02 2.065e+02 2.259e+02 2.635e+02 4.732e+02, threshold=4.517e+02, percent-clipped=1.0 +2024-08-25 06:44:00,263 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.46 vs. limit=15.0 +2024-08-25 06:44:01,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=50336.0, ans=0.025 +2024-08-25 06:44:18,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=50389.333333333336, ans=0.125 +2024-08-25 06:44:19,700 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.01 vs. limit=10.0 +2024-08-25 06:44:48,804 INFO [train.py:1114] (1/4) Epoch 4, batch 2000, loss[loss=0.2666, simple_loss=0.2958, pruned_loss=0.08521, ctc_loss=0.1672, over 19608.00 frames. ], tot_loss[loss=0.3159, simple_loss=0.3423, pruned_loss=0.1053, ctc_loss=0.1976, over 3853087.84 frames. ], batch size: 45, lr: 3.09e-02, grad_scale: 32.0 +2024-08-25 06:45:44,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.52 vs. limit=15.0 +2024-08-25 06:45:54,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=50602.666666666664, ans=0.125 +2024-08-25 06:46:13,596 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:46:32,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=50709.333333333336, ans=0.07 +2024-08-25 06:46:34,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=50762.666666666664, ans=0.0 +2024-08-25 06:46:35,025 INFO [train.py:1114] (1/4) Epoch 4, batch 2050, loss[loss=0.2484, simple_loss=0.2912, pruned_loss=0.07412, ctc_loss=0.1435, over 19738.00 frames. ], tot_loss[loss=0.3154, simple_loss=0.3412, pruned_loss=0.1052, ctc_loss=0.1976, over 3850142.93 frames. ], batch size: 47, lr: 3.08e-02, grad_scale: 32.0 +2024-08-25 06:46:36,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=50762.666666666664, ans=0.2 +2024-08-25 06:46:45,622 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.515e+02 2.046e+02 2.338e+02 2.720e+02 4.537e+02, threshold=4.675e+02, percent-clipped=1.0 +2024-08-25 06:46:51,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=50816.0, ans=0.125 +2024-08-25 06:47:18,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=50922.666666666664, ans=0.125 +2024-08-25 06:47:23,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50922.666666666664, ans=0.1 +2024-08-25 06:47:27,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=50922.666666666664, ans=0.125 +2024-08-25 06:47:32,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=50976.0, ans=0.125 +2024-08-25 06:47:37,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=50976.0, ans=0.5 +2024-08-25 06:47:43,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=50976.0, ans=0.0 +2024-08-25 06:47:47,556 INFO [train.py:1114] (1/4) Epoch 4, batch 2100, loss[loss=0.279, simple_loss=0.3226, pruned_loss=0.08636, ctc_loss=0.1565, over 19799.00 frames. ], tot_loss[loss=0.3141, simple_loss=0.3404, pruned_loss=0.1046, ctc_loss=0.1962, over 3857068.53 frames. ], batch size: 54, lr: 3.08e-02, grad_scale: 32.0 +2024-08-25 06:48:02,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51029.333333333336, ans=0.1 +2024-08-25 06:48:38,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=51082.666666666664, ans=0.125 +2024-08-25 06:48:43,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=51136.0, ans=0.125 +2024-08-25 06:48:45,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=51136.0, ans=0.125 +2024-08-25 06:49:09,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=51189.333333333336, ans=0.2 +2024-08-25 06:49:31,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=51242.666666666664, ans=0.5 +2024-08-25 06:49:32,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=51242.666666666664, ans=0.1 +2024-08-25 06:49:33,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=51242.666666666664, ans=0.125 +2024-08-25 06:49:45,527 INFO [train.py:1114] (1/4) Epoch 4, batch 2150, loss[loss=0.3019, simple_loss=0.3292, pruned_loss=0.1001, ctc_loss=0.186, over 19577.00 frames. ], tot_loss[loss=0.312, simple_loss=0.3388, pruned_loss=0.1037, ctc_loss=0.1945, over 3867589.37 frames. ], batch size: 52, lr: 3.07e-02, grad_scale: 32.0 +2024-08-25 06:49:54,450 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.724e+02 2.035e+02 2.305e+02 2.639e+02 4.596e+02, threshold=4.610e+02, percent-clipped=0.0 +2024-08-25 06:50:06,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=51296.0, ans=0.0 +2024-08-25 06:50:12,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=51349.333333333336, ans=0.0 +2024-08-25 06:50:20,008 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.69 vs. limit=10.0 +2024-08-25 06:51:15,390 INFO [train.py:1114] (1/4) Epoch 4, batch 2200, loss[loss=0.3026, simple_loss=0.3466, pruned_loss=0.09274, ctc_loss=0.1829, over 19581.00 frames. ], tot_loss[loss=0.3113, simple_loss=0.3385, pruned_loss=0.1033, ctc_loss=0.1936, over 3866382.53 frames. ], batch size: 57, lr: 3.07e-02, grad_scale: 32.0 +2024-08-25 06:51:25,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=51616.0, ans=0.2 +2024-08-25 06:51:33,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51616.0, ans=0.1 +2024-08-25 06:51:33,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=51616.0, ans=0.0 +2024-08-25 06:51:36,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=51616.0, ans=0.125 +2024-08-25 06:51:52,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=51669.333333333336, ans=0.125 +2024-08-25 06:51:53,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=51669.333333333336, ans=0.2 +2024-08-25 06:51:53,633 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.91 vs. limit=15.0 +2024-08-25 06:51:59,239 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.37 vs. limit=22.5 +2024-08-25 06:52:24,904 INFO [train.py:1114] (1/4) Epoch 4, batch 2250, loss[loss=0.3152, simple_loss=0.3494, pruned_loss=0.1036, ctc_loss=0.1843, over 19607.00 frames. ], tot_loss[loss=0.3124, simple_loss=0.339, pruned_loss=0.104, ctc_loss=0.1947, over 3866626.85 frames. ], batch size: 55, lr: 3.06e-02, grad_scale: 32.0 +2024-08-25 06:52:26,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51829.333333333336, ans=0.1 +2024-08-25 06:52:27,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.32 vs. limit=22.5 +2024-08-25 06:52:32,000 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.699e+02 2.164e+02 2.622e+02 3.263e+02 6.940e+02, threshold=5.245e+02, percent-clipped=2.0 +2024-08-25 06:52:41,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=51882.666666666664, ans=0.125 +2024-08-25 06:52:50,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=51936.0, ans=0.125 +2024-08-25 06:53:16,188 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.01 vs. limit=15.0 +2024-08-25 06:53:30,935 INFO [train.py:1114] (1/4) Epoch 4, batch 2300, loss[loss=0.3195, simple_loss=0.3357, pruned_loss=0.1102, ctc_loss=0.2072, over 19509.00 frames. ], tot_loss[loss=0.312, simple_loss=0.3383, pruned_loss=0.1039, ctc_loss=0.1945, over 3861051.19 frames. ], batch size: 49, lr: 3.06e-02, grad_scale: 32.0 +2024-08-25 06:53:41,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=52096.0, ans=0.125 +2024-08-25 06:53:45,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=52149.333333333336, ans=0.125 +2024-08-25 06:53:45,990 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.56 vs. limit=6.0 +2024-08-25 06:53:50,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=52149.333333333336, ans=0.125 +2024-08-25 06:53:54,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=52149.333333333336, ans=0.025 +2024-08-25 06:54:03,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=52202.666666666664, ans=0.025 +2024-08-25 06:54:08,198 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.23 vs. limit=15.0 +2024-08-25 06:54:28,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=52309.333333333336, ans=0.0 +2024-08-25 06:54:53,354 INFO [train.py:1114] (1/4) Epoch 4, batch 2350, loss[loss=0.3222, simple_loss=0.3487, pruned_loss=0.1089, ctc_loss=0.1951, over 19643.00 frames. ], tot_loss[loss=0.3109, simple_loss=0.3377, pruned_loss=0.1033, ctc_loss=0.1937, over 3864614.61 frames. ], batch size: 63, lr: 3.05e-02, grad_scale: 32.0 +2024-08-25 06:54:56,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=52362.666666666664, ans=0.125 +2024-08-25 06:54:58,711 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.580e+02 2.121e+02 2.497e+02 3.048e+02 4.745e+02, threshold=4.995e+02, percent-clipped=0.0 +2024-08-25 06:55:01,104 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=1.041e-02 +2024-08-25 06:55:11,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=52416.0, ans=0.125 +2024-08-25 07:03:05,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=52576.0, ans=0.0 +2024-08-25 07:06:48,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52576.0, ans=0.1 +2024-08-25 07:07:21,817 INFO [train.py:1114] (1/4) Epoch 4, batch 2400, loss[loss=0.3551, simple_loss=0.3699, pruned_loss=0.1242, ctc_loss=0.2297, over 19266.00 frames. ], tot_loss[loss=0.3133, simple_loss=0.3401, pruned_loss=0.1042, ctc_loss=0.1954, over 3858684.65 frames. ], batch size: 71, lr: 3.05e-02, grad_scale: 32.0 +2024-08-25 07:07:47,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52629.333333333336, ans=0.1 +2024-08-25 07:08:18,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=52629.333333333336, ans=0.0 +2024-08-25 07:10:22,076 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.74 vs. limit=15.0 +2024-08-25 07:34:58,795 INFO [train.py:1114] (1/4) Epoch 4, batch 2450, loss[loss=0.4118, simple_loss=0.3863, pruned_loss=0.1578, ctc_loss=0.3041, over 13884.00 frames. ], tot_loss[loss=0.3219, simple_loss=0.3452, pruned_loss=0.1086, ctc_loss=0.2037, over 3734050.30 frames. ], batch size: 140, lr: 3.05e-02, grad_scale: 16.0 +2024-08-25 07:36:27,111 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.701e+02 2.096e+02 2.355e+02 2.735e+02 5.246e+02, threshold=4.710e+02, percent-clipped=1.0 +2024-08-25 07:43:07,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=53002.666666666664, ans=0.1 +2024-08-25 07:43:10,380 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.39 vs. limit=15.0 +2024-08-25 07:44:31,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=53056.0, ans=0.125 +2024-08-25 07:46:30,880 INFO [train.py:1114] (1/4) Epoch 5, batch 0, loss[loss=0.2997, simple_loss=0.3171, pruned_loss=0.1029, ctc_loss=0.1908, over 19837.00 frames. ], tot_loss[loss=0.2997, simple_loss=0.3171, pruned_loss=0.1029, ctc_loss=0.1908, over 19837.00 frames. ], batch size: 49, lr: 2.83e-02, grad_scale: 32.0 +2024-08-25 07:46:30,881 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-25 07:49:02,116 INFO [train.py:1146] (1/4) Epoch 5, validation: loss=0.2543, simple_loss=0.3259, pruned_loss=0.06691, ctc_loss=0.1221, over 944034.00 frames. +2024-08-25 07:49:02,117 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 14083MB +2024-08-25 07:50:55,240 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.14 vs. limit=15.0 +2024-08-25 07:52:00,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=53157.333333333336, ans=15.0 +2024-08-25 07:54:37,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=53157.333333333336, ans=0.0 +2024-08-25 07:57:02,485 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.71 vs. limit=15.0 +2024-08-25 07:58:12,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=53210.666666666664, ans=0.0 +2024-08-25 08:01:56,939 INFO [train.py:1114] (1/4) Epoch 5, batch 50, loss[loss=0.2747, simple_loss=0.3072, pruned_loss=0.08795, ctc_loss=0.1657, over 19700.00 frames. ], tot_loss[loss=0.3157, simple_loss=0.3415, pruned_loss=0.1053, ctc_loss=0.1982, over 844585.17 frames. ], batch size: 47, lr: 2.83e-02, grad_scale: 32.0 +2024-08-25 08:03:39,813 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.27 vs. limit=15.0 +2024-08-25 08:03:51,540 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.650e+02 1.984e+02 2.202e+02 2.522e+02 4.045e+02, threshold=4.404e+02, percent-clipped=0.0 +2024-08-25 08:06:10,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=53530.666666666664, ans=0.0 +2024-08-25 08:06:48,826 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=10.63 vs. limit=15.0 +2024-08-25 08:07:22,860 INFO [train.py:1114] (1/4) Epoch 5, batch 100, loss[loss=0.3123, simple_loss=0.3322, pruned_loss=0.1045, ctc_loss=0.2083, over 19728.00 frames. ], tot_loss[loss=0.3155, simple_loss=0.3423, pruned_loss=0.1048, ctc_loss=0.1981, over 1499416.85 frames. ], batch size: 51, lr: 2.82e-02, grad_scale: 32.0 +2024-08-25 08:08:16,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=53690.666666666664, ans=0.0 +2024-08-25 08:08:25,544 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.54 vs. limit=15.0 +2024-08-25 08:08:37,688 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.88 vs. limit=22.5 +2024-08-25 08:08:43,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.whiten.whitening_limit, batch_count=53744.0, ans=12.0 +2024-08-25 08:08:50,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=53744.0, ans=0.04949747468305833 +2024-08-25 08:09:05,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53797.333333333336, ans=0.1 +2024-08-25 08:09:45,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=53797.333333333336, ans=0.125 +2024-08-25 08:10:03,743 INFO [train.py:1114] (1/4) Epoch 5, batch 150, loss[loss=0.2441, simple_loss=0.2901, pruned_loss=0.07109, ctc_loss=0.1399, over 19717.00 frames. ], tot_loss[loss=0.3115, simple_loss=0.3393, pruned_loss=0.103, ctc_loss=0.1943, over 2027995.75 frames. ], batch size: 47, lr: 2.82e-02, grad_scale: 32.0 +2024-08-25 08:10:08,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=53904.0, ans=0.125 +2024-08-25 08:10:27,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=53904.0, ans=0.125 +2024-08-25 08:10:37,441 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.17 vs. limit=15.0 +2024-08-25 08:10:40,323 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.601e+02 2.115e+02 2.389e+02 2.764e+02 4.531e+02, threshold=4.777e+02, percent-clipped=1.0 +2024-08-25 08:11:10,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54010.666666666664, ans=0.1 +2024-08-25 08:11:10,302 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.79 vs. limit=22.5 +2024-08-25 08:11:11,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=54010.666666666664, ans=0.2 +2024-08-25 08:11:24,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=54064.0, ans=0.125 +2024-08-25 08:11:47,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=54117.333333333336, ans=0.2 +2024-08-25 08:12:01,136 INFO [train.py:1114] (1/4) Epoch 5, batch 200, loss[loss=0.3474, simple_loss=0.3674, pruned_loss=0.1172, ctc_loss=0.2323, over 18206.00 frames. ], tot_loss[loss=0.3068, simple_loss=0.3361, pruned_loss=0.1007, ctc_loss=0.1902, over 2435865.93 frames. ], batch size: 85, lr: 2.81e-02, grad_scale: 32.0 +2024-08-25 08:13:52,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=54170.666666666664, ans=0.0 +2024-08-25 08:15:10,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=54330.666666666664, ans=0.025 +2024-08-25 08:15:57,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=54437.333333333336, ans=0.0 +2024-08-25 08:15:58,938 INFO [train.py:1114] (1/4) Epoch 5, batch 250, loss[loss=0.3264, simple_loss=0.3502, pruned_loss=0.1113, ctc_loss=0.2001, over 19452.00 frames. ], tot_loss[loss=0.3063, simple_loss=0.3357, pruned_loss=0.1006, ctc_loss=0.1892, over 2756215.66 frames. ], batch size: 67, lr: 2.81e-02, grad_scale: 32.0 +2024-08-25 08:16:47,930 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.557e+02 1.969e+02 2.164e+02 2.373e+02 3.326e+02, threshold=4.328e+02, percent-clipped=0.0 +2024-08-25 08:16:51,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=54544.0, ans=0.125 +2024-08-25 08:17:00,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=54544.0, ans=0.2 +2024-08-25 08:17:18,969 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.62 vs. limit=15.0 +2024-08-25 08:17:20,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=54650.666666666664, ans=0.125 +2024-08-25 08:17:25,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=54650.666666666664, ans=0.0 +2024-08-25 08:17:25,917 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.27 vs. limit=22.5 +2024-08-25 08:17:32,561 INFO [train.py:1114] (1/4) Epoch 5, batch 300, loss[loss=0.307, simple_loss=0.3434, pruned_loss=0.09964, ctc_loss=0.1783, over 19520.00 frames. ], tot_loss[loss=0.304, simple_loss=0.334, pruned_loss=0.09959, ctc_loss=0.1872, over 3000581.82 frames. ], batch size: 61, lr: 2.81e-02, grad_scale: 32.0 +2024-08-25 08:17:42,253 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.46 vs. limit=22.5 +2024-08-25 08:17:47,396 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.59 vs. limit=12.0 +2024-08-25 08:17:56,686 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=6.89 vs. limit=12.0 +2024-08-25 08:18:29,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=54917.333333333336, ans=0.0 +2024-08-25 08:18:35,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=54917.333333333336, ans=0.0 +2024-08-25 08:18:36,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=54917.333333333336, ans=0.125 +2024-08-25 08:18:38,528 INFO [train.py:1114] (1/4) Epoch 5, batch 350, loss[loss=0.3063, simple_loss=0.3273, pruned_loss=0.1037, ctc_loss=0.1948, over 19774.00 frames. ], tot_loss[loss=0.3052, simple_loss=0.335, pruned_loss=0.1001, ctc_loss=0.1882, over 3190486.70 frames. ], batch size: 48, lr: 2.80e-02, grad_scale: 16.0 +2024-08-25 08:18:54,776 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.52 vs. limit=10.0 +2024-08-25 08:19:02,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=55024.0, ans=0.0 +2024-08-25 08:19:10,797 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 1.967e+02 2.265e+02 2.794e+02 4.039e+02, threshold=4.529e+02, percent-clipped=0.0 +2024-08-25 08:19:42,961 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.53 vs. limit=15.0 +2024-08-25 08:19:43,137 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.23 vs. limit=22.5 +2024-08-25 08:19:51,984 INFO [train.py:1114] (1/4) Epoch 5, batch 400, loss[loss=0.2861, simple_loss=0.3294, pruned_loss=0.08761, ctc_loss=0.1692, over 19505.00 frames. ], tot_loss[loss=0.305, simple_loss=0.3349, pruned_loss=0.09999, ctc_loss=0.1881, over 3341696.15 frames. ], batch size: 54, lr: 2.80e-02, grad_scale: 32.0 +2024-08-25 08:19:59,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=55237.333333333336, ans=0.125 +2024-08-25 08:20:03,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=55290.666666666664, ans=0.125 +2024-08-25 08:20:15,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=55290.666666666664, ans=0.2 +2024-08-25 08:20:20,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=55344.0, ans=0.0 +2024-08-25 08:20:40,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=55397.333333333336, ans=0.2 +2024-08-25 08:20:50,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=55397.333333333336, ans=0.125 +2024-08-25 08:21:19,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=55450.666666666664, ans=0.015 +2024-08-25 08:21:19,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=55450.666666666664, ans=0.0 +2024-08-25 08:21:27,045 INFO [train.py:1114] (1/4) Epoch 5, batch 450, loss[loss=0.2821, simple_loss=0.3289, pruned_loss=0.08455, ctc_loss=0.1653, over 19626.00 frames. ], tot_loss[loss=0.3035, simple_loss=0.334, pruned_loss=0.09922, ctc_loss=0.1867, over 3450532.18 frames. ], batch size: 55, lr: 2.79e-02, grad_scale: 32.0 +2024-08-25 08:21:31,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=55504.0, ans=0.025 +2024-08-25 08:21:45,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=55557.333333333336, ans=0.125 +2024-08-25 08:21:47,866 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.640e+02 2.008e+02 2.249e+02 2.774e+02 4.428e+02, threshold=4.498e+02, percent-clipped=0.0 +2024-08-25 08:21:58,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=55610.666666666664, ans=0.125 +2024-08-25 08:22:03,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=55664.0, ans=0.1 +2024-08-25 08:22:04,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=55664.0, ans=0.125 +2024-08-25 08:22:26,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=55717.333333333336, ans=0.125 +2024-08-25 08:22:42,209 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.01 vs. limit=15.0 +2024-08-25 08:22:44,524 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=15.03 vs. limit=15.0 +2024-08-25 08:22:58,212 INFO [train.py:1114] (1/4) Epoch 5, batch 500, loss[loss=0.3221, simple_loss=0.355, pruned_loss=0.1061, ctc_loss=0.1923, over 19671.00 frames. ], tot_loss[loss=0.3006, simple_loss=0.3318, pruned_loss=0.09794, ctc_loss=0.1839, over 3545615.28 frames. ], batch size: 63, lr: 2.79e-02, grad_scale: 32.0 +2024-08-25 08:23:24,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=55824.0, ans=0.125 +2024-08-25 08:23:26,191 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.18 vs. limit=22.5 +2024-08-25 08:23:30,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=55824.0, ans=0.125 +2024-08-25 08:23:40,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=55824.0, ans=0.0 +2024-08-25 08:23:57,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=55930.666666666664, ans=10.0 +2024-08-25 08:24:00,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55930.666666666664, ans=0.1 +2024-08-25 08:24:16,341 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.26 vs. limit=15.0 +2024-08-25 08:24:21,961 INFO [train.py:1114] (1/4) Epoch 5, batch 550, loss[loss=0.3362, simple_loss=0.3657, pruned_loss=0.1122, ctc_loss=0.2058, over 19310.00 frames. ], tot_loss[loss=0.3016, simple_loss=0.3324, pruned_loss=0.09841, ctc_loss=0.1849, over 3607774.45 frames. ], batch size: 71, lr: 2.78e-02, grad_scale: 32.0 +2024-08-25 08:24:24,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=56037.333333333336, ans=0.0 +2024-08-25 08:24:30,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=56037.333333333336, ans=0.125 +2024-08-25 08:24:47,071 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.533e+02 1.991e+02 2.247e+02 2.867e+02 6.260e+02, threshold=4.494e+02, percent-clipped=1.0 +2024-08-25 08:24:58,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=56144.0, ans=0.2 +2024-08-25 08:25:00,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=56197.333333333336, ans=0.125 +2024-08-25 08:25:13,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=56197.333333333336, ans=0.2 +2024-08-25 08:25:25,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=56250.666666666664, ans=0.125 +2024-08-25 08:25:37,706 INFO [train.py:1114] (1/4) Epoch 5, batch 600, loss[loss=0.3366, simple_loss=0.3555, pruned_loss=0.1159, ctc_loss=0.2143, over 19452.00 frames. ], tot_loss[loss=0.3013, simple_loss=0.3326, pruned_loss=0.09811, ctc_loss=0.1845, over 3664908.07 frames. ], batch size: 67, lr: 2.78e-02, grad_scale: 32.0 +2024-08-25 08:25:37,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=56304.0, ans=0.025 +2024-08-25 08:25:48,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=56304.0, ans=0.0 +2024-08-25 08:25:59,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=56357.333333333336, ans=0.2 +2024-08-25 08:26:20,349 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.18 vs. limit=15.0 +2024-08-25 08:26:33,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=56517.333333333336, ans=0.125 +2024-08-25 08:26:33,716 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.22 vs. limit=15.0 +2024-08-25 08:26:45,252 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.29 vs. limit=12.0 +2024-08-25 08:26:47,458 INFO [train.py:1114] (1/4) Epoch 5, batch 650, loss[loss=0.2969, simple_loss=0.328, pruned_loss=0.09665, ctc_loss=0.1814, over 19773.00 frames. ], tot_loss[loss=0.3004, simple_loss=0.3319, pruned_loss=0.09771, ctc_loss=0.1837, over 3716151.69 frames. ], batch size: 54, lr: 2.77e-02, grad_scale: 32.0 +2024-08-25 08:27:13,350 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.617e+02 1.957e+02 2.352e+02 2.685e+02 4.359e+02, threshold=4.704e+02, percent-clipped=0.0 +2024-08-25 08:27:25,261 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 08:27:51,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56784.0, ans=0.1 +2024-08-25 08:28:10,104 INFO [train.py:1114] (1/4) Epoch 5, batch 700, loss[loss=0.3208, simple_loss=0.3356, pruned_loss=0.1111, ctc_loss=0.2096, over 19703.00 frames. ], tot_loss[loss=0.3018, simple_loss=0.3326, pruned_loss=0.09846, ctc_loss=0.1849, over 3748123.76 frames. ], batch size: 51, lr: 2.77e-02, grad_scale: 32.0 +2024-08-25 08:28:22,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=56837.333333333336, ans=0.2 +2024-08-25 08:29:06,493 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.36 vs. limit=12.0 +2024-08-25 08:29:14,810 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.68 vs. limit=15.0 +2024-08-25 08:29:25,008 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.86 vs. limit=15.0 +2024-08-25 08:29:36,995 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.12 vs. limit=22.5 +2024-08-25 08:29:41,359 INFO [train.py:1114] (1/4) Epoch 5, batch 750, loss[loss=0.3078, simple_loss=0.3377, pruned_loss=0.1015, ctc_loss=0.1872, over 19491.00 frames. ], tot_loss[loss=0.3008, simple_loss=0.332, pruned_loss=0.09801, ctc_loss=0.1841, over 3774347.55 frames. ], batch size: 54, lr: 2.77e-02, grad_scale: 32.0 +2024-08-25 08:30:03,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=57104.0, ans=0.125 +2024-08-25 08:30:08,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=57104.0, ans=0.125 +2024-08-25 08:30:20,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=57157.333333333336, ans=0.2 +2024-08-25 08:30:28,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=57157.333333333336, ans=0.125 +2024-08-25 08:30:38,646 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.09 vs. limit=15.0 +2024-08-25 08:30:40,359 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.677e+02 2.099e+02 2.472e+02 3.181e+02 5.803e+02, threshold=4.945e+02, percent-clipped=2.0 +2024-08-25 08:31:06,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=57210.666666666664, ans=0.09899494936611666 +2024-08-25 08:31:07,397 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 08:32:01,792 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.44 vs. limit=22.5 +2024-08-25 08:32:05,738 INFO [train.py:1114] (1/4) Epoch 5, batch 800, loss[loss=0.2599, simple_loss=0.3026, pruned_loss=0.07906, ctc_loss=0.1477, over 19818.00 frames. ], tot_loss[loss=0.3003, simple_loss=0.3315, pruned_loss=0.09781, ctc_loss=0.1835, over 3795939.85 frames. ], batch size: 49, lr: 2.76e-02, grad_scale: 32.0 +2024-08-25 08:32:36,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=57424.0, ans=0.125 +2024-08-25 08:32:41,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57424.0, ans=0.1 +2024-08-25 08:33:37,745 INFO [train.py:1114] (1/4) Epoch 5, batch 850, loss[loss=0.316, simple_loss=0.3517, pruned_loss=0.1017, ctc_loss=0.1923, over 19645.00 frames. ], tot_loss[loss=0.299, simple_loss=0.3308, pruned_loss=0.09714, ctc_loss=0.1824, over 3814806.93 frames. ], batch size: 59, lr: 2.76e-02, grad_scale: 32.0 +2024-08-25 08:34:26,554 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.581e+02 1.963e+02 2.197e+02 2.544e+02 4.330e+02, threshold=4.395e+02, percent-clipped=0.0 +2024-08-25 08:34:26,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=57690.666666666664, ans=0.125 +2024-08-25 08:34:55,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=57797.333333333336, ans=0.025 +2024-08-25 08:35:08,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=57850.666666666664, ans=0.025 +2024-08-25 08:35:17,361 INFO [train.py:1114] (1/4) Epoch 5, batch 900, loss[loss=0.2814, simple_loss=0.3141, pruned_loss=0.08989, ctc_loss=0.1725, over 19813.00 frames. ], tot_loss[loss=0.3004, simple_loss=0.3316, pruned_loss=0.09782, ctc_loss=0.1838, over 3819785.67 frames. ], batch size: 49, lr: 2.75e-02, grad_scale: 32.0 +2024-08-25 08:35:28,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=57904.0, ans=0.05 +2024-08-25 08:35:45,668 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.46 vs. limit=15.0 +2024-08-25 08:36:06,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=58064.0, ans=22.5 +2024-08-25 08:36:28,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58117.333333333336, ans=0.1 +2024-08-25 08:36:30,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=58117.333333333336, ans=0.125 +2024-08-25 08:36:30,831 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.15 vs. limit=6.0 +2024-08-25 08:36:37,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=58117.333333333336, ans=0.2 +2024-08-25 08:36:41,309 INFO [train.py:1114] (1/4) Epoch 5, batch 950, loss[loss=0.3122, simple_loss=0.3373, pruned_loss=0.1034, ctc_loss=0.2005, over 19489.00 frames. ], tot_loss[loss=0.3003, simple_loss=0.3316, pruned_loss=0.09777, ctc_loss=0.1838, over 3821498.10 frames. ], batch size: 49, lr: 2.75e-02, grad_scale: 32.0 +2024-08-25 08:37:02,452 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.615e+02 2.021e+02 2.236e+02 2.607e+02 6.234e+02, threshold=4.471e+02, percent-clipped=1.0 +2024-08-25 08:37:14,493 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.84 vs. limit=15.0 +2024-08-25 08:37:38,723 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.45 vs. limit=12.0 +2024-08-25 08:37:49,067 INFO [train.py:1114] (1/4) Epoch 5, batch 1000, loss[loss=0.2729, simple_loss=0.3089, pruned_loss=0.08621, ctc_loss=0.1612, over 19869.00 frames. ], tot_loss[loss=0.3015, simple_loss=0.3324, pruned_loss=0.09831, ctc_loss=0.1848, over 3818493.34 frames. ], batch size: 52, lr: 2.74e-02, grad_scale: 32.0 +2024-08-25 08:37:50,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=58437.333333333336, ans=0.0 +2024-08-25 08:38:24,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=58490.666666666664, ans=0.2 +2024-08-25 08:38:51,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=58544.0, ans=0.0 +2024-08-25 08:39:01,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=58597.333333333336, ans=0.2 +2024-08-25 08:39:17,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=58650.666666666664, ans=0.0 +2024-08-25 08:39:18,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=58704.0, ans=0.025 +2024-08-25 08:39:20,331 INFO [train.py:1114] (1/4) Epoch 5, batch 1050, loss[loss=0.2895, simple_loss=0.3369, pruned_loss=0.08841, ctc_loss=0.1634, over 19830.00 frames. ], tot_loss[loss=0.3003, simple_loss=0.3315, pruned_loss=0.09774, ctc_loss=0.1839, over 3824036.28 frames. ], batch size: 57, lr: 2.74e-02, grad_scale: 32.0 +2024-08-25 08:39:30,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=58757.333333333336, ans=0.125 +2024-08-25 08:39:36,796 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.14 vs. limit=15.0 +2024-08-25 08:39:39,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=58757.333333333336, ans=0.125 +2024-08-25 08:39:41,241 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.532e+02 1.929e+02 2.228e+02 2.594e+02 4.447e+02, threshold=4.456e+02, percent-clipped=0.0 +2024-08-25 08:40:09,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=58864.0, ans=0.125 +2024-08-25 08:40:24,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=58864.0, ans=0.2 +2024-08-25 08:40:35,051 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.96 vs. limit=15.0 +2024-08-25 08:40:42,203 INFO [train.py:1114] (1/4) Epoch 5, batch 1100, loss[loss=0.2777, simple_loss=0.3122, pruned_loss=0.0876, ctc_loss=0.1702, over 19580.00 frames. ], tot_loss[loss=0.3002, simple_loss=0.3315, pruned_loss=0.09767, ctc_loss=0.1839, over 3831028.62 frames. ], batch size: 52, lr: 2.74e-02, grad_scale: 32.0 +2024-08-25 08:40:51,896 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.93 vs. limit=6.0 +2024-08-25 08:40:54,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=58970.666666666664, ans=0.0 +2024-08-25 08:40:56,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=58970.666666666664, ans=0.125 +2024-08-25 08:41:19,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=59077.333333333336, ans=0.2 +2024-08-25 08:41:22,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59077.333333333336, ans=0.1 +2024-08-25 08:41:33,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=59130.666666666664, ans=0.025 +2024-08-25 08:41:54,672 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.38 vs. limit=15.0 +2024-08-25 08:42:06,695 INFO [train.py:1114] (1/4) Epoch 5, batch 1150, loss[loss=0.2645, simple_loss=0.3135, pruned_loss=0.07846, ctc_loss=0.1462, over 19596.00 frames. ], tot_loss[loss=0.2998, simple_loss=0.3313, pruned_loss=0.09745, ctc_loss=0.1835, over 3830538.20 frames. ], batch size: 52, lr: 2.73e-02, grad_scale: 32.0 +2024-08-25 08:42:17,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=59237.333333333336, ans=0.09899494936611666 +2024-08-25 08:42:21,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=59237.333333333336, ans=0.125 +2024-08-25 08:42:38,150 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.520e+02 2.022e+02 2.244e+02 2.636e+02 4.087e+02, threshold=4.489e+02, percent-clipped=0.0 +2024-08-25 08:42:38,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=59290.666666666664, ans=0.125 +2024-08-25 08:43:17,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=59397.333333333336, ans=0.025 +2024-08-25 08:43:23,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=59450.666666666664, ans=0.125 +2024-08-25 08:43:33,331 INFO [train.py:1114] (1/4) Epoch 5, batch 1200, loss[loss=0.2956, simple_loss=0.3356, pruned_loss=0.09288, ctc_loss=0.1745, over 19832.00 frames. ], tot_loss[loss=0.3014, simple_loss=0.3325, pruned_loss=0.09822, ctc_loss=0.1847, over 3824495.26 frames. ], batch size: 57, lr: 2.73e-02, grad_scale: 32.0 +2024-08-25 08:43:34,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=59504.0, ans=0.125 +2024-08-25 08:43:42,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=59504.0, ans=0.0 +2024-08-25 08:43:56,315 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.63 vs. limit=22.5 +2024-08-25 08:44:00,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=59557.333333333336, ans=0.0 +2024-08-25 08:44:15,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=59610.666666666664, ans=0.025 +2024-08-25 08:44:49,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=59717.333333333336, ans=0.0 +2024-08-25 08:44:50,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59717.333333333336, ans=0.1 +2024-08-25 08:44:52,823 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.96 vs. limit=15.0 +2024-08-25 08:44:55,319 INFO [train.py:1114] (1/4) Epoch 5, batch 1250, loss[loss=0.3266, simple_loss=0.3507, pruned_loss=0.1118, ctc_loss=0.1971, over 19512.00 frames. ], tot_loss[loss=0.3006, simple_loss=0.3325, pruned_loss=0.09765, ctc_loss=0.1836, over 3842699.00 frames. ], batch size: 61, lr: 2.72e-02, grad_scale: 32.0 +2024-08-25 08:44:56,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=59770.666666666664, ans=0.125 +2024-08-25 08:45:01,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59770.666666666664, ans=0.1 +2024-08-25 08:45:21,211 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.499e+02 1.906e+02 2.098e+02 2.362e+02 4.005e+02, threshold=4.196e+02, percent-clipped=0.0 +2024-08-25 08:45:41,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=59930.666666666664, ans=0.0 +2024-08-25 08:45:46,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=59930.666666666664, ans=0.0 +2024-08-25 08:45:55,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=59984.0, ans=0.07 +2024-08-25 08:45:56,292 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.40 vs. limit=15.0 +2024-08-25 08:46:03,572 INFO [train.py:1114] (1/4) Epoch 5, batch 1300, loss[loss=0.331, simple_loss=0.3491, pruned_loss=0.1148, ctc_loss=0.2081, over 18800.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.3308, pruned_loss=0.09662, ctc_loss=0.1817, over 3846418.41 frames. ], batch size: 76, lr: 2.72e-02, grad_scale: 32.0 +2024-08-25 08:46:16,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=60037.333333333336, ans=0.0 +2024-08-25 08:46:22,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=60037.333333333336, ans=0.125 +2024-08-25 08:46:38,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=60090.666666666664, ans=0.125 +2024-08-25 08:46:43,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=60144.0, ans=0.125 +2024-08-25 08:46:59,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=60197.333333333336, ans=0.125 +2024-08-25 08:47:27,092 INFO [train.py:1114] (1/4) Epoch 5, batch 1350, loss[loss=0.2784, simple_loss=0.3267, pruned_loss=0.08333, ctc_loss=0.1584, over 19761.00 frames. ], tot_loss[loss=0.2967, simple_loss=0.3298, pruned_loss=0.09577, ctc_loss=0.1802, over 3857363.12 frames. ], batch size: 54, lr: 2.71e-02, grad_scale: 32.0 +2024-08-25 08:47:27,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=60304.0, ans=0.125 +2024-08-25 08:47:36,197 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.16 vs. limit=15.0 +2024-08-25 08:47:47,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=60357.333333333336, ans=0.1 +2024-08-25 08:47:52,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=60357.333333333336, ans=0.125 +2024-08-25 08:48:06,343 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.575e+02 1.950e+02 2.204e+02 2.621e+02 4.331e+02, threshold=4.409e+02, percent-clipped=1.0 +2024-08-25 08:48:23,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=60410.666666666664, ans=0.0 +2024-08-25 08:49:03,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=60517.333333333336, ans=0.125 +2024-08-25 08:49:14,319 INFO [train.py:1114] (1/4) Epoch 5, batch 1400, loss[loss=0.238, simple_loss=0.2796, pruned_loss=0.07164, ctc_loss=0.1327, over 19679.00 frames. ], tot_loss[loss=0.2963, simple_loss=0.3295, pruned_loss=0.09558, ctc_loss=0.1797, over 3863772.33 frames. ], batch size: 46, lr: 2.71e-02, grad_scale: 32.0 +2024-08-25 08:49:50,633 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.10 vs. limit=12.0 +2024-08-25 08:49:57,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=60730.666666666664, ans=0.125 +2024-08-25 08:55:49,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60784.0, ans=0.1 +2024-08-25 09:01:11,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=60784.0, ans=0.125 +2024-08-25 09:01:57,437 INFO [train.py:1114] (1/4) Epoch 5, batch 1450, loss[loss=0.3356, simple_loss=0.3626, pruned_loss=0.1121, ctc_loss=0.2113, over 19674.00 frames. ], tot_loss[loss=0.2982, simple_loss=0.3309, pruned_loss=0.09647, ctc_loss=0.1813, over 3862302.65 frames. ], batch size: 63, lr: 2.71e-02, grad_scale: 32.0 +2024-08-25 09:04:56,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=60837.333333333336, ans=0.2 +2024-08-25 09:05:53,386 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.25 vs. limit=15.0 +2024-08-25 09:08:29,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60890.666666666664, ans=0.1 +2024-08-25 09:11:44,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=60890.666666666664, ans=0.125 +2024-08-25 09:14:29,264 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.613e+02 1.942e+02 2.164e+02 2.480e+02 4.633e+02, threshold=4.329e+02, percent-clipped=1.0 +2024-08-25 09:36:13,470 INFO [train.py:1114] (1/4) Epoch 5, batch 1500, loss[loss=0.3141, simple_loss=0.3526, pruned_loss=0.1005, ctc_loss=0.1865, over 19569.00 frames. ], tot_loss[loss=0.2998, simple_loss=0.3319, pruned_loss=0.09729, ctc_loss=0.1827, over 3862689.19 frames. ], batch size: 57, lr: 2.70e-02, grad_scale: 32.0 +2024-08-25 09:42:53,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=61104.0, ans=0.125 +2024-08-25 09:42:54,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=61104.0, ans=0.0 +2024-08-25 09:54:06,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=61210.666666666664, ans=0.125 +2024-08-25 09:55:08,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=61264.0, ans=0.125 +2024-08-25 09:56:05,528 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:06:44,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=61370.666666666664, ans=0.125 +2024-08-25 10:06:52,303 INFO [train.py:1114] (1/4) Epoch 5, batch 1550, loss[loss=0.3129, simple_loss=0.3472, pruned_loss=0.1022, ctc_loss=0.1853, over 19639.00 frames. ], tot_loss[loss=0.2997, simple_loss=0.3317, pruned_loss=0.09724, ctc_loss=0.1828, over 3847007.99 frames. ], batch size: 60, lr: 2.70e-02, grad_scale: 16.0 +2024-08-25 10:10:30,098 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.67 vs. limit=12.0 +2024-08-25 10:11:27,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=61424.0, ans=0.025 +2024-08-25 10:14:47,416 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.601e+02 1.971e+02 2.260e+02 2.611e+02 5.554e+02, threshold=4.519e+02, percent-clipped=3.0 +2024-08-25 10:17:42,564 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.83 vs. limit=15.0 +2024-08-25 10:19:29,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61530.666666666664, ans=0.1 +2024-08-25 10:28:13,717 INFO [train.py:1114] (1/4) Epoch 5, batch 1600, loss[loss=0.3595, simple_loss=0.3729, pruned_loss=0.1269, ctc_loss=0.2309, over 19845.00 frames. ], tot_loss[loss=0.3002, simple_loss=0.332, pruned_loss=0.09756, ctc_loss=0.1835, over 3836328.59 frames. ], batch size: 57, lr: 2.69e-02, grad_scale: 32.0 +2024-08-25 10:28:15,432 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.60 vs. limit=15.0 +2024-08-25 10:28:35,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=61637.333333333336, ans=0.0 +2024-08-25 10:31:22,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=61690.666666666664, ans=0.125 +2024-08-25 10:31:23,102 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.06 vs. limit=15.0 +2024-08-25 10:39:31,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.28 vs. limit=6.0 +2024-08-25 10:39:56,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=61850.666666666664, ans=0.125 +2024-08-25 10:40:45,721 INFO [train.py:1114] (1/4) Epoch 5, batch 1650, loss[loss=0.3105, simple_loss=0.3417, pruned_loss=0.1019, ctc_loss=0.1888, over 19665.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.3308, pruned_loss=0.09671, ctc_loss=0.1817, over 3833090.56 frames. ], batch size: 59, lr: 2.69e-02, grad_scale: 32.0 +2024-08-25 10:41:02,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=61904.0, ans=0.125 +2024-08-25 10:42:04,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=61957.333333333336, ans=0.125 +2024-08-25 10:43:04,215 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.454e+02 1.985e+02 2.336e+02 2.616e+02 4.728e+02, threshold=4.672e+02, percent-clipped=1.0 +2024-08-25 10:43:35,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=62010.666666666664, ans=0.125 +2024-08-25 10:43:36,451 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:44:28,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=62064.0, ans=0.125 +2024-08-25 10:45:50,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62117.333333333336, ans=0.1 +2024-08-25 10:45:57,300 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.83 vs. limit=22.5 +2024-08-25 10:46:09,418 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.53 vs. limit=15.0 +2024-08-25 10:46:43,734 INFO [train.py:1114] (1/4) Epoch 5, batch 1700, loss[loss=0.2547, simple_loss=0.2837, pruned_loss=0.08221, ctc_loss=0.1532, over 19689.00 frames. ], tot_loss[loss=0.2978, simple_loss=0.3303, pruned_loss=0.09647, ctc_loss=0.1808, over 3847837.05 frames. ], batch size: 46, lr: 2.69e-02, grad_scale: 32.0 +2024-08-25 10:47:39,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=62170.666666666664, ans=0.0 +2024-08-25 10:48:07,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=62224.0, ans=0.0 +2024-08-25 10:48:07,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=62224.0, ans=0.0 +2024-08-25 10:48:59,145 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=2.801e+00 +2024-08-25 10:50:08,889 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.58 vs. limit=15.0 +2024-08-25 10:50:54,978 INFO [train.py:1114] (1/4) Epoch 5, batch 1750, loss[loss=0.2693, simple_loss=0.2987, pruned_loss=0.08654, ctc_loss=0.1672, over 19702.00 frames. ], tot_loss[loss=0.2969, simple_loss=0.3298, pruned_loss=0.09604, ctc_loss=0.1801, over 3852457.68 frames. ], batch size: 45, lr: 2.68e-02, grad_scale: 32.0 +2024-08-25 10:51:42,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=62490.666666666664, ans=0.0 +2024-08-25 10:53:52,981 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.475e+02 2.010e+02 2.326e+02 2.972e+02 6.446e+02, threshold=4.653e+02, percent-clipped=3.0 +2024-08-25 10:53:53,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=62544.0, ans=0.2 +2024-08-25 10:53:53,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=62544.0, ans=0.125 +2024-08-25 10:55:56,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=62597.333333333336, ans=0.1 +2024-08-25 10:56:33,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=62650.666666666664, ans=0.0 +2024-08-25 10:57:11,524 INFO [train.py:1114] (1/4) Epoch 5, batch 1800, loss[loss=0.318, simple_loss=0.3501, pruned_loss=0.1048, ctc_loss=0.1911, over 19609.00 frames. ], tot_loss[loss=0.2975, simple_loss=0.3301, pruned_loss=0.09632, ctc_loss=0.1808, over 3853524.62 frames. ], batch size: 55, lr: 2.68e-02, grad_scale: 32.0 +2024-08-25 10:57:11,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=62704.0, ans=0.0 +2024-08-25 10:57:55,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=62757.333333333336, ans=0.125 +2024-08-25 10:58:03,794 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.06 vs. limit=15.0 +2024-08-25 10:58:03,906 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.87 vs. limit=15.0 +2024-08-25 10:58:39,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=62864.0, ans=0.125 +2024-08-25 10:59:06,170 INFO [train.py:1114] (1/4) Epoch 5, batch 1850, loss[loss=0.3189, simple_loss=0.3508, pruned_loss=0.1043, ctc_loss=0.196, over 19587.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.3291, pruned_loss=0.09527, ctc_loss=0.1789, over 3856995.29 frames. ], batch size: 57, lr: 2.67e-02, grad_scale: 32.0 +2024-08-25 10:59:08,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=62970.666666666664, ans=0.125 +2024-08-25 10:59:23,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=63024.0, ans=0.025 +2024-08-25 10:59:32,455 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.560e+02 2.044e+02 2.314e+02 2.820e+02 4.474e+02, threshold=4.628e+02, percent-clipped=0.0 +2024-08-25 10:59:33,201 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.33 vs. limit=15.0 +2024-08-25 10:59:33,236 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.83 vs. limit=15.0 +2024-08-25 10:59:39,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=63077.333333333336, ans=0.0 +2024-08-25 10:59:50,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=63130.666666666664, ans=0.125 +2024-08-25 11:00:03,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=63184.0, ans=0.2 +2024-08-25 11:00:05,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63184.0, ans=0.1 +2024-08-25 11:00:08,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63184.0, ans=0.1 +2024-08-25 11:00:13,587 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.53 vs. limit=12.0 +2024-08-25 11:00:20,619 INFO [train.py:1114] (1/4) Epoch 5, batch 1900, loss[loss=0.2659, simple_loss=0.3229, pruned_loss=0.0733, ctc_loss=0.1556, over 19659.00 frames. ], tot_loss[loss=0.2952, simple_loss=0.3291, pruned_loss=0.09503, ctc_loss=0.1784, over 3862274.50 frames. ], batch size: 59, lr: 2.67e-02, grad_scale: 32.0 +2024-08-25 11:00:20,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=63237.333333333336, ans=0.125 +2024-08-25 11:00:42,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=63290.666666666664, ans=0.125 +2024-08-25 11:01:56,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=63397.333333333336, ans=0.0 +2024-08-25 11:02:04,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=63450.666666666664, ans=22.5 +2024-08-25 11:02:12,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=63450.666666666664, ans=0.0 +2024-08-25 11:02:12,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=63450.666666666664, ans=0.0 +2024-08-25 11:02:18,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=63504.0, ans=0.125 +2024-08-25 11:02:34,471 INFO [train.py:1114] (1/4) Epoch 5, batch 1950, loss[loss=0.276, simple_loss=0.3185, pruned_loss=0.08386, ctc_loss=0.1643, over 19601.00 frames. ], tot_loss[loss=0.2961, simple_loss=0.3306, pruned_loss=0.09511, ctc_loss=0.1786, over 3870831.22 frames. ], batch size: 52, lr: 2.67e-02, grad_scale: 32.0 +2024-08-25 11:03:02,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=63557.333333333336, ans=0.125 +2024-08-25 11:03:16,681 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.519e+02 1.932e+02 2.130e+02 2.461e+02 4.838e+02, threshold=4.259e+02, percent-clipped=1.0 +2024-08-25 11:03:19,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=63610.666666666664, ans=0.0 +2024-08-25 11:03:47,281 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.44 vs. limit=15.0 +2024-08-25 11:03:58,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=63664.0, ans=0.0 +2024-08-25 11:04:02,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=63717.333333333336, ans=0.0 +2024-08-25 11:04:14,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=63717.333333333336, ans=0.125 +2024-08-25 11:04:18,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.87 vs. limit=6.0 +2024-08-25 11:04:36,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=63770.666666666664, ans=0.125 +2024-08-25 11:04:37,798 INFO [train.py:1114] (1/4) Epoch 5, batch 2000, loss[loss=0.2531, simple_loss=0.2908, pruned_loss=0.07707, ctc_loss=0.153, over 19666.00 frames. ], tot_loss[loss=0.2982, simple_loss=0.332, pruned_loss=0.09618, ctc_loss=0.1802, over 3854746.18 frames. ], batch size: 45, lr: 2.66e-02, grad_scale: 32.0 +2024-08-25 11:04:51,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=63824.0, ans=0.2 +2024-08-25 11:04:57,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=63824.0, ans=0.125 +2024-08-25 11:04:59,495 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.83 vs. limit=15.0 +2024-08-25 11:05:16,904 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.28 vs. limit=15.0 +2024-08-25 11:05:41,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=63930.666666666664, ans=0.05 +2024-08-25 11:06:08,373 INFO [train.py:1114] (1/4) Epoch 5, batch 2050, loss[loss=0.2582, simple_loss=0.2964, pruned_loss=0.07919, ctc_loss=0.1541, over 19741.00 frames. ], tot_loss[loss=0.2976, simple_loss=0.331, pruned_loss=0.09604, ctc_loss=0.1802, over 3850981.22 frames. ], batch size: 47, lr: 2.66e-02, grad_scale: 32.0 +2024-08-25 11:06:15,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=64037.333333333336, ans=0.125 +2024-08-25 11:06:17,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=64037.333333333336, ans=0.1 +2024-08-25 11:06:29,151 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.597e+02 2.037e+02 2.272e+02 2.892e+02 6.343e+02, threshold=4.544e+02, percent-clipped=1.0 +2024-08-25 11:07:05,010 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.47 vs. limit=15.0 +2024-08-25 11:07:10,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=64197.333333333336, ans=0.125 +2024-08-25 11:07:12,589 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.47 vs. limit=15.0 +2024-08-25 11:07:16,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=64250.666666666664, ans=0.125 +2024-08-25 11:07:27,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=64250.666666666664, ans=0.125 +2024-08-25 11:07:48,387 INFO [train.py:1114] (1/4) Epoch 5, batch 2100, loss[loss=0.3108, simple_loss=0.3371, pruned_loss=0.1035, ctc_loss=0.1937, over 19764.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.3303, pruned_loss=0.09572, ctc_loss=0.1797, over 3858018.39 frames. ], batch size: 54, lr: 2.65e-02, grad_scale: 32.0 +2024-08-25 11:08:29,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=64410.666666666664, ans=0.0 +2024-08-25 11:08:56,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=64517.333333333336, ans=0.0 +2024-08-25 11:09:20,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64570.666666666664, ans=0.1 +2024-08-25 11:09:21,090 INFO [train.py:1114] (1/4) Epoch 5, batch 2150, loss[loss=0.2553, simple_loss=0.3013, pruned_loss=0.07592, ctc_loss=0.1437, over 19602.00 frames. ], tot_loss[loss=0.2958, simple_loss=0.3294, pruned_loss=0.09531, ctc_loss=0.1788, over 3868961.66 frames. ], batch size: 52, lr: 2.65e-02, grad_scale: 32.0 +2024-08-25 11:09:43,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=64624.0, ans=0.0 +2024-08-25 11:09:44,519 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.564e+02 2.041e+02 2.279e+02 2.689e+02 3.624e+02, threshold=4.557e+02, percent-clipped=0.0 +2024-08-25 11:10:00,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=64677.333333333336, ans=0.125 +2024-08-25 11:10:07,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=64730.666666666664, ans=0.125 +2024-08-25 11:10:17,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=64730.666666666664, ans=0.125 +2024-08-25 11:10:34,037 INFO [train.py:1114] (1/4) Epoch 5, batch 2200, loss[loss=0.3222, simple_loss=0.3449, pruned_loss=0.1088, ctc_loss=0.2048, over 19577.00 frames. ], tot_loss[loss=0.2953, simple_loss=0.329, pruned_loss=0.0951, ctc_loss=0.1785, over 3867474.71 frames. ], batch size: 57, lr: 2.65e-02, grad_scale: 32.0 +2024-08-25 11:10:39,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=64837.333333333336, ans=0.0 +2024-08-25 11:10:44,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=64890.666666666664, ans=0.125 +2024-08-25 11:11:06,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=64997.333333333336, ans=0.125 +2024-08-25 11:11:29,236 INFO [train.py:1114] (1/4) Epoch 5, batch 2250, loss[loss=0.3015, simple_loss=0.3496, pruned_loss=0.09211, ctc_loss=0.1729, over 19617.00 frames. ], tot_loss[loss=0.2941, simple_loss=0.3285, pruned_loss=0.09442, ctc_loss=0.1772, over 3868277.11 frames. ], batch size: 55, lr: 2.64e-02, grad_scale: 32.0 +2024-08-25 11:11:42,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.68 vs. limit=15.0 +2024-08-25 11:11:51,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=65157.333333333336, ans=0.0 +2024-08-25 11:11:51,987 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.714e+02 2.180e+02 2.514e+02 3.003e+02 5.559e+02, threshold=5.029e+02, percent-clipped=2.0 +2024-08-25 11:12:38,219 INFO [train.py:1114] (1/4) Epoch 5, batch 2300, loss[loss=0.2875, simple_loss=0.3194, pruned_loss=0.09271, ctc_loss=0.1753, over 19508.00 frames. ], tot_loss[loss=0.2941, simple_loss=0.3278, pruned_loss=0.0947, ctc_loss=0.1775, over 3861687.49 frames. ], batch size: 49, lr: 2.64e-02, grad_scale: 32.0 +2024-08-25 11:12:52,825 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.81 vs. limit=15.0 +2024-08-25 11:12:53,929 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.73 vs. limit=15.0 +2024-08-25 11:13:22,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=65530.666666666664, ans=0.1 +2024-08-25 11:13:22,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=65530.666666666664, ans=0.2 +2024-08-25 11:13:32,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=65584.0, ans=10.0 +2024-08-25 11:13:50,077 INFO [train.py:1114] (1/4) Epoch 5, batch 2350, loss[loss=0.2953, simple_loss=0.3422, pruned_loss=0.09061, ctc_loss=0.168, over 19686.00 frames. ], tot_loss[loss=0.2936, simple_loss=0.3274, pruned_loss=0.09452, ctc_loss=0.1771, over 3864002.50 frames. ], batch size: 63, lr: 2.63e-02, grad_scale: 32.0 +2024-08-25 11:13:50,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=65637.33333333333, ans=0.125 +2024-08-25 11:13:50,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=65637.33333333333, ans=15.0 +2024-08-25 11:13:57,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=65637.33333333333, ans=0.1 +2024-08-25 11:14:00,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=65637.33333333333, ans=0.09899494936611666 +2024-08-25 11:14:30,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=65690.66666666667, ans=0.025 +2024-08-25 11:14:31,465 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.391e+02 1.936e+02 2.303e+02 2.820e+02 4.151e+02, threshold=4.606e+02, percent-clipped=0.0 +2024-08-25 11:14:48,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=65744.0, ans=0.2 +2024-08-25 11:15:23,113 INFO [train.py:1114] (1/4) Epoch 5, batch 2400, loss[loss=0.3147, simple_loss=0.3418, pruned_loss=0.105, ctc_loss=0.1938, over 19270.00 frames. ], tot_loss[loss=0.2975, simple_loss=0.3306, pruned_loss=0.0962, ctc_loss=0.1801, over 3858281.04 frames. ], batch size: 71, lr: 2.63e-02, grad_scale: 32.0 +2024-08-25 11:15:54,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=65957.33333333333, ans=0.125 +2024-08-25 11:16:16,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=66064.0, ans=0.125 +2024-08-25 11:16:17,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=66064.0, ans=0.125 +2024-08-25 11:16:21,117 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:16:52,285 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.86 vs. limit=15.0 +2024-08-25 11:16:53,488 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.19 vs. limit=15.0 +2024-08-25 11:16:56,804 INFO [train.py:1114] (1/4) Epoch 5, batch 2450, loss[loss=0.3539, simple_loss=0.3547, pruned_loss=0.1294, ctc_loss=0.2356, over 13139.00 frames. ], tot_loss[loss=0.3058, simple_loss=0.3354, pruned_loss=0.1005, ctc_loss=0.1883, over 3727883.52 frames. ], batch size: 140, lr: 2.63e-02, grad_scale: 32.0 +2024-08-25 11:16:59,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=66170.66666666667, ans=0.0 +2024-08-25 11:17:07,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=66224.0, ans=0.125 +2024-08-25 11:17:43,156 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.516e+02 2.021e+02 2.221e+02 2.524e+02 3.558e+02, threshold=4.443e+02, percent-clipped=0.0 +2024-08-25 11:17:44,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=66277.33333333333, ans=0.0 +2024-08-25 11:17:45,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=66277.33333333333, ans=0.125 +2024-08-25 11:19:28,340 INFO [train.py:1114] (1/4) Epoch 6, batch 0, loss[loss=0.2869, simple_loss=0.3232, pruned_loss=0.09127, ctc_loss=0.1703, over 19399.00 frames. ], tot_loss[loss=0.2869, simple_loss=0.3232, pruned_loss=0.09127, ctc_loss=0.1703, over 19399.00 frames. ], batch size: 48, lr: 2.45e-02, grad_scale: 32.0 +2024-08-25 11:19:28,341 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-25 11:20:29,253 INFO [train.py:1146] (1/4) Epoch 6, validation: loss=0.2388, simple_loss=0.3147, pruned_loss=0.05993, ctc_loss=0.1076, over 944034.00 frames. +2024-08-25 11:20:29,254 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 14083MB +2024-08-25 11:20:29,652 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.41 vs. limit=15.0 +2024-08-25 11:20:45,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=66432.0, ans=0.0 +2024-08-25 11:21:17,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=66538.66666666667, ans=0.125 +2024-08-25 11:21:21,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=66592.0, ans=0.125 +2024-08-25 11:21:23,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=66592.0, ans=0.125 +2024-08-25 11:21:56,939 INFO [train.py:1114] (1/4) Epoch 6, batch 50, loss[loss=0.2498, simple_loss=0.2922, pruned_loss=0.07478, ctc_loss=0.1444, over 19715.00 frames. ], tot_loss[loss=0.2999, simple_loss=0.333, pruned_loss=0.09682, ctc_loss=0.183, over 844837.63 frames. ], batch size: 47, lr: 2.44e-02, grad_scale: 32.0 +2024-08-25 11:22:15,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=66698.66666666667, ans=0.125 +2024-08-25 11:22:36,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=66752.0, ans=0.0 +2024-08-25 11:22:50,724 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.580e+02 1.959e+02 2.174e+02 2.569e+02 5.460e+02, threshold=4.347e+02, percent-clipped=1.0 +2024-08-25 11:23:00,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=66805.33333333333, ans=0.125 +2024-08-25 11:23:09,438 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.10 vs. limit=12.0 +2024-08-25 11:23:10,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66858.66666666667, ans=0.1 +2024-08-25 11:23:16,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=66858.66666666667, ans=0.2 +2024-08-25 11:23:18,882 INFO [train.py:1114] (1/4) Epoch 6, batch 100, loss[loss=0.2911, simple_loss=0.3221, pruned_loss=0.09482, ctc_loss=0.1763, over 19702.00 frames. ], tot_loss[loss=0.2983, simple_loss=0.3325, pruned_loss=0.09586, ctc_loss=0.1809, over 1498587.70 frames. ], batch size: 51, lr: 2.44e-02, grad_scale: 32.0 +2024-08-25 11:24:13,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=67125.33333333333, ans=0.125 +2024-08-25 11:24:15,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=67125.33333333333, ans=0.07 +2024-08-25 11:24:21,818 INFO [train.py:1114] (1/4) Epoch 6, batch 150, loss[loss=0.2595, simple_loss=0.2932, pruned_loss=0.08255, ctc_loss=0.1518, over 19679.00 frames. ], tot_loss[loss=0.2932, simple_loss=0.3286, pruned_loss=0.09365, ctc_loss=0.1762, over 2027875.39 frames. ], batch size: 47, lr: 2.44e-02, grad_scale: 32.0 +2024-08-25 11:25:04,954 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.596e+02 1.947e+02 2.172e+02 2.650e+02 4.091e+02, threshold=4.343e+02, percent-clipped=0.0 +2024-08-25 11:25:21,749 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:25:34,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=67445.33333333333, ans=0.0 +2024-08-25 11:25:35,912 INFO [train.py:1114] (1/4) Epoch 6, batch 200, loss[loss=0.3294, simple_loss=0.3437, pruned_loss=0.1127, ctc_loss=0.2242, over 18471.00 frames. ], tot_loss[loss=0.2907, simple_loss=0.3264, pruned_loss=0.09263, ctc_loss=0.1744, over 2436571.14 frames. ], batch size: 86, lr: 2.43e-02, grad_scale: 32.0 +2024-08-25 11:25:43,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=67445.33333333333, ans=0.125 +2024-08-25 11:25:45,755 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.36 vs. limit=15.0 +2024-08-25 11:26:16,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=67552.0, ans=10.0 +2024-08-25 11:26:41,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=67605.33333333333, ans=0.2 +2024-08-25 11:26:54,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=67605.33333333333, ans=0.04949747468305833 +2024-08-25 11:26:57,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=67605.33333333333, ans=0.125 +2024-08-25 11:27:19,502 INFO [train.py:1114] (1/4) Epoch 6, batch 250, loss[loss=0.3111, simple_loss=0.3406, pruned_loss=0.1019, ctc_loss=0.1947, over 19431.00 frames. ], tot_loss[loss=0.2882, simple_loss=0.3247, pruned_loss=0.09139, ctc_loss=0.1722, over 2756537.75 frames. ], batch size: 67, lr: 2.43e-02, grad_scale: 32.0 +2024-08-25 11:27:36,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=67712.0, ans=0.125 +2024-08-25 11:28:16,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67818.66666666667, ans=0.1 +2024-08-25 11:28:18,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67818.66666666667, ans=0.1 +2024-08-25 11:28:23,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=67818.66666666667, ans=10.0 +2024-08-25 11:28:35,177 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.54 vs. limit=22.5 +2024-08-25 11:28:36,845 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.549e+02 1.900e+02 2.111e+02 2.483e+02 4.707e+02, threshold=4.222e+02, percent-clipped=1.0 +2024-08-25 11:29:09,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=67872.0, ans=0.2 +2024-08-25 11:29:16,627 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.57 vs. limit=15.0 +2024-08-25 11:29:20,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=67925.33333333333, ans=0.125 +2024-08-25 11:29:38,956 INFO [train.py:1114] (1/4) Epoch 6, batch 300, loss[loss=0.2959, simple_loss=0.3315, pruned_loss=0.09571, ctc_loss=0.1721, over 19497.00 frames. ], tot_loss[loss=0.2865, simple_loss=0.3236, pruned_loss=0.0906, ctc_loss=0.1705, over 3001602.54 frames. ], batch size: 61, lr: 2.43e-02, grad_scale: 32.0 +2024-08-25 11:29:43,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67978.66666666667, ans=0.1 +2024-08-25 11:30:56,105 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.36 vs. limit=15.0 +2024-08-25 11:30:57,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=68138.66666666667, ans=0.125 +2024-08-25 11:31:00,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=68138.66666666667, ans=0.07 +2024-08-25 11:31:28,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=68192.0, ans=0.125 +2024-08-25 11:31:29,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=68192.0, ans=0.0 +2024-08-25 11:31:39,928 INFO [train.py:1114] (1/4) Epoch 6, batch 350, loss[loss=0.2316, simple_loss=0.2785, pruned_loss=0.06675, ctc_loss=0.1283, over 19761.00 frames. ], tot_loss[loss=0.2885, simple_loss=0.3251, pruned_loss=0.09149, ctc_loss=0.1722, over 3191085.11 frames. ], batch size: 48, lr: 2.42e-02, grad_scale: 32.0 +2024-08-25 11:32:35,301 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.504e+02 2.039e+02 2.360e+02 2.872e+02 5.301e+02, threshold=4.720e+02, percent-clipped=2.0 +2024-08-25 11:32:47,025 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:33:02,559 INFO [train.py:1114] (1/4) Epoch 6, batch 400, loss[loss=0.2875, simple_loss=0.3306, pruned_loss=0.08845, ctc_loss=0.1687, over 19485.00 frames. ], tot_loss[loss=0.2871, simple_loss=0.3243, pruned_loss=0.09077, ctc_loss=0.171, over 3342801.21 frames. ], batch size: 54, lr: 2.42e-02, grad_scale: 32.0 +2024-08-25 11:33:02,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=68512.0, ans=0.0 +2024-08-25 11:33:15,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=68565.33333333333, ans=0.0 +2024-08-25 11:33:49,746 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.48 vs. limit=12.0 +2024-08-25 11:33:54,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=68672.0, ans=0.1 +2024-08-25 11:34:10,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=68725.33333333333, ans=0.0 +2024-08-25 11:34:11,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68725.33333333333, ans=0.1 +2024-08-25 11:34:13,402 INFO [train.py:1114] (1/4) Epoch 6, batch 450, loss[loss=0.2572, simple_loss=0.3079, pruned_loss=0.07515, ctc_loss=0.1403, over 19624.00 frames. ], tot_loss[loss=0.2867, simple_loss=0.3241, pruned_loss=0.0905, ctc_loss=0.1706, over 3451744.00 frames. ], batch size: 55, lr: 2.42e-02, grad_scale: 32.0 +2024-08-25 11:34:32,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=68832.0, ans=0.2 +2024-08-25 11:34:41,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=68885.33333333333, ans=0.2 +2024-08-25 11:34:49,661 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.571e+02 1.969e+02 2.191e+02 2.793e+02 4.218e+02, threshold=4.382e+02, percent-clipped=0.0 +2024-08-25 11:34:54,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=68938.66666666667, ans=15.0 +2024-08-25 11:34:59,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=68992.0, ans=0.1 +2024-08-25 11:35:09,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=69045.33333333333, ans=0.125 +2024-08-25 11:35:10,578 INFO [train.py:1114] (1/4) Epoch 6, batch 500, loss[loss=0.3109, simple_loss=0.342, pruned_loss=0.1016, ctc_loss=0.1916, over 19656.00 frames. ], tot_loss[loss=0.2864, simple_loss=0.3236, pruned_loss=0.09051, ctc_loss=0.1702, over 3546592.17 frames. ], batch size: 63, lr: 2.41e-02, grad_scale: 32.0 +2024-08-25 11:35:23,928 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.44 vs. limit=22.5 +2024-08-25 11:35:26,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=69098.66666666667, ans=0.125 +2024-08-25 11:36:10,426 INFO [train.py:1114] (1/4) Epoch 6, batch 550, loss[loss=0.296, simple_loss=0.3354, pruned_loss=0.09359, ctc_loss=0.1734, over 19238.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3231, pruned_loss=0.09018, ctc_loss=0.1696, over 3609361.26 frames. ], batch size: 71, lr: 2.41e-02, grad_scale: 32.0 +2024-08-25 11:36:46,540 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.559e+02 2.100e+02 2.439e+02 2.966e+02 5.259e+02, threshold=4.878e+02, percent-clipped=1.0 +2024-08-25 11:37:16,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=69525.33333333333, ans=0.0 +2024-08-25 11:37:18,166 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.00 vs. limit=15.0 +2024-08-25 11:37:28,780 INFO [train.py:1114] (1/4) Epoch 6, batch 600, loss[loss=0.3186, simple_loss=0.3479, pruned_loss=0.1059, ctc_loss=0.1936, over 19413.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.3231, pruned_loss=0.08981, ctc_loss=0.1688, over 3666238.52 frames. ], batch size: 67, lr: 2.41e-02, grad_scale: 32.0 +2024-08-25 11:37:43,376 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.89 vs. limit=15.0 +2024-08-25 11:37:49,653 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.35 vs. limit=12.0 +2024-08-25 11:37:58,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=69685.33333333333, ans=0.125 +2024-08-25 11:38:00,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=69685.33333333333, ans=0.125 +2024-08-25 11:38:46,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=69792.0, ans=0.2 +2024-08-25 11:38:56,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=69792.0, ans=0.2 +2024-08-25 11:38:58,902 INFO [train.py:1114] (1/4) Epoch 6, batch 650, loss[loss=0.2774, simple_loss=0.3159, pruned_loss=0.08728, ctc_loss=0.1609, over 19769.00 frames. ], tot_loss[loss=0.2832, simple_loss=0.3215, pruned_loss=0.08898, ctc_loss=0.1674, over 3717015.54 frames. ], batch size: 54, lr: 2.40e-02, grad_scale: 32.0 +2024-08-25 11:39:09,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=69898.66666666667, ans=0.1 +2024-08-25 11:39:50,467 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.537e+02 1.931e+02 2.137e+02 2.425e+02 3.711e+02, threshold=4.274e+02, percent-clipped=0.0 +2024-08-25 11:40:15,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=70112.0, ans=0.2 +2024-08-25 11:40:16,194 INFO [train.py:1114] (1/4) Epoch 6, batch 700, loss[loss=0.2688, simple_loss=0.3073, pruned_loss=0.08189, ctc_loss=0.1663, over 19712.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3214, pruned_loss=0.0888, ctc_loss=0.1671, over 3749297.31 frames. ], batch size: 51, lr: 2.40e-02, grad_scale: 32.0 +2024-08-25 11:40:18,062 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.52 vs. limit=12.0 +2024-08-25 11:40:19,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=70112.0, ans=0.125 +2024-08-25 11:40:24,833 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.75 vs. limit=15.0 +2024-08-25 11:40:42,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff2.min_abs, batch_count=70165.33333333333, ans=0.1 +2024-08-25 11:41:04,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=70165.33333333333, ans=0.0 +2024-08-25 11:41:13,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=70218.66666666667, ans=0.0 +2024-08-25 11:41:57,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.51 vs. limit=22.5 +2024-08-25 11:42:12,744 INFO [train.py:1114] (1/4) Epoch 6, batch 750, loss[loss=0.2784, simple_loss=0.3254, pruned_loss=0.0842, ctc_loss=0.1573, over 19507.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.3218, pruned_loss=0.08904, ctc_loss=0.1675, over 3775115.39 frames. ], batch size: 54, lr: 2.40e-02, grad_scale: 32.0 +2024-08-25 11:42:14,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=70378.66666666667, ans=0.125 +2024-08-25 11:42:17,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70378.66666666667, ans=0.1 +2024-08-25 11:42:32,169 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.60 vs. limit=12.0 +2024-08-25 11:43:09,526 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.653e+02 2.022e+02 2.297e+02 2.693e+02 4.652e+02, threshold=4.594e+02, percent-clipped=2.0 +2024-08-25 11:43:14,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-08-25 11:43:34,906 INFO [train.py:1114] (1/4) Epoch 6, batch 800, loss[loss=0.2598, simple_loss=0.2965, pruned_loss=0.07996, ctc_loss=0.1579, over 19422.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3218, pruned_loss=0.08913, ctc_loss=0.1675, over 3795598.18 frames. ], batch size: 48, lr: 2.39e-02, grad_scale: 32.0 +2024-08-25 11:43:45,835 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.40 vs. limit=6.0 +2024-08-25 11:43:48,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=70698.66666666667, ans=0.125 +2024-08-25 11:44:33,343 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.20 vs. limit=15.0 +2024-08-25 11:44:46,463 INFO [train.py:1114] (1/4) Epoch 6, batch 850, loss[loss=0.3207, simple_loss=0.3587, pruned_loss=0.1016, ctc_loss=0.1987, over 19650.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3212, pruned_loss=0.08851, ctc_loss=0.1663, over 3815002.64 frames. ], batch size: 59, lr: 2.39e-02, grad_scale: 32.0 +2024-08-25 11:45:37,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=71018.66666666667, ans=0.05 +2024-08-25 11:45:39,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=71018.66666666667, ans=0.125 +2024-08-25 11:45:45,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=71072.0, ans=0.0 +2024-08-25 11:45:46,248 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.501e+02 1.893e+02 2.077e+02 2.374e+02 4.075e+02, threshold=4.154e+02, percent-clipped=0.0 +2024-08-25 11:46:03,740 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.32 vs. limit=12.0 +2024-08-25 11:46:07,493 INFO [train.py:1114] (1/4) Epoch 6, batch 900, loss[loss=0.2943, simple_loss=0.3215, pruned_loss=0.09785, ctc_loss=0.1785, over 19409.00 frames. ], tot_loss[loss=0.2836, simple_loss=0.3218, pruned_loss=0.08922, ctc_loss=0.1673, over 3819354.15 frames. ], batch size: 48, lr: 2.39e-02, grad_scale: 16.0 +2024-08-25 11:46:09,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=71178.66666666667, ans=0.125 +2024-08-25 11:46:21,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=71232.0, ans=0.04949747468305833 +2024-08-25 11:46:32,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=71232.0, ans=0.0 +2024-08-25 11:47:16,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=71392.0, ans=0.1 +2024-08-25 11:47:21,583 INFO [train.py:1114] (1/4) Epoch 6, batch 950, loss[loss=0.2855, simple_loss=0.3095, pruned_loss=0.09544, ctc_loss=0.1765, over 19493.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.322, pruned_loss=0.08934, ctc_loss=0.1678, over 3821090.00 frames. ], batch size: 49, lr: 2.38e-02, grad_scale: 16.0 +2024-08-25 11:47:29,295 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:48:12,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=71552.0, ans=0.0 +2024-08-25 11:48:12,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=71552.0, ans=0.1 +2024-08-25 11:48:21,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=71605.33333333333, ans=0.125 +2024-08-25 11:48:23,514 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.560e+02 1.900e+02 2.167e+02 2.553e+02 4.088e+02, threshold=4.334e+02, percent-clipped=0.0 +2024-08-25 11:48:42,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71658.66666666667, ans=0.1 +2024-08-25 11:49:03,406 INFO [train.py:1114] (1/4) Epoch 6, batch 1000, loss[loss=0.2622, simple_loss=0.3052, pruned_loss=0.07931, ctc_loss=0.1516, over 19838.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.3235, pruned_loss=0.09039, ctc_loss=0.1697, over 3815904.45 frames. ], batch size: 52, lr: 2.38e-02, grad_scale: 16.0 +2024-08-25 11:49:04,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff3.min_abs, batch_count=71712.0, ans=0.2 +2024-08-25 11:49:13,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=71712.0, ans=0.125 +2024-08-25 11:49:14,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=71712.0, ans=0.125 +2024-08-25 11:49:14,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=71712.0, ans=0.125 +2024-08-25 11:49:43,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=71818.66666666667, ans=0.125 +2024-08-25 11:49:51,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=71818.66666666667, ans=0.125 +2024-08-25 11:50:06,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=71872.0, ans=0.125 +2024-08-25 11:50:18,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=71872.0, ans=0.0 +2024-08-25 11:50:57,802 INFO [train.py:1114] (1/4) Epoch 6, batch 1050, loss[loss=0.3094, simple_loss=0.3434, pruned_loss=0.09873, ctc_loss=0.1951, over 19835.00 frames. ], tot_loss[loss=0.2855, simple_loss=0.3226, pruned_loss=0.09031, ctc_loss=0.1697, over 3822457.66 frames. ], batch size: 57, lr: 2.37e-02, grad_scale: 16.0 +2024-08-25 11:51:08,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=71978.66666666667, ans=0.0 +2024-08-25 11:51:09,235 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.46 vs. limit=22.5 +2024-08-25 11:51:12,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=72032.0, ans=0.2 +2024-08-25 11:51:14,759 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:51:24,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=72032.0, ans=0.025 +2024-08-25 11:51:32,497 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.10 vs. limit=15.0 +2024-08-25 11:52:00,137 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.585e+02 1.944e+02 2.201e+02 2.550e+02 3.957e+02, threshold=4.403e+02, percent-clipped=0.0 +2024-08-25 11:52:25,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=72138.66666666667, ans=0.125 +2024-08-25 11:52:28,046 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.20 vs. limit=12.0 +2024-08-25 11:52:48,884 INFO [train.py:1114] (1/4) Epoch 6, batch 1100, loss[loss=0.2905, simple_loss=0.3208, pruned_loss=0.09312, ctc_loss=0.1847, over 19591.00 frames. ], tot_loss[loss=0.2845, simple_loss=0.3221, pruned_loss=0.08977, ctc_loss=0.1687, over 3829725.50 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 16.0 +2024-08-25 11:52:56,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72245.33333333333, ans=0.1 +2024-08-25 11:52:56,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=72245.33333333333, ans=0.125 +2024-08-25 11:52:59,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=72245.33333333333, ans=0.0 +2024-08-25 11:52:59,772 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.46 vs. limit=15.0 +2024-08-25 11:53:26,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=72352.0, ans=0.0 +2024-08-25 11:53:58,641 INFO [train.py:1114] (1/4) Epoch 6, batch 1150, loss[loss=0.2686, simple_loss=0.3134, pruned_loss=0.0816, ctc_loss=0.1514, over 19583.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3218, pruned_loss=0.08992, ctc_loss=0.1687, over 3827941.44 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 16.0 +2024-08-25 11:54:22,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=72565.33333333333, ans=0.07 +2024-08-25 11:54:35,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=72618.66666666667, ans=0.125 +2024-08-25 11:54:41,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=72672.0, ans=0.0 +2024-08-25 11:54:43,439 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 1.952e+02 2.194e+02 2.505e+02 4.680e+02, threshold=4.387e+02, percent-clipped=1.0 +2024-08-25 11:54:50,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=72672.0, ans=0.0 +2024-08-25 11:54:54,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=72725.33333333333, ans=0.125 +2024-08-25 11:55:00,501 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.55 vs. limit=5.0 +2024-08-25 11:55:04,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=72725.33333333333, ans=0.0 +2024-08-25 11:55:11,873 INFO [train.py:1114] (1/4) Epoch 6, batch 1200, loss[loss=0.3016, simple_loss=0.3494, pruned_loss=0.0913, ctc_loss=0.1778, over 19837.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.3232, pruned_loss=0.09048, ctc_loss=0.17, over 3823993.67 frames. ], batch size: 57, lr: 2.36e-02, grad_scale: 32.0 +2024-08-25 11:56:22,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=72938.66666666667, ans=0.0 +2024-08-25 11:56:25,416 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.03 vs. limit=12.0 +2024-08-25 11:56:26,781 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.47 vs. limit=15.0 +2024-08-25 11:56:31,860 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.04 vs. limit=10.0 +2024-08-25 11:56:43,211 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.37 vs. limit=10.0 +2024-08-25 11:56:55,076 INFO [train.py:1114] (1/4) Epoch 6, batch 1250, loss[loss=0.2973, simple_loss=0.34, pruned_loss=0.09391, ctc_loss=0.1671, over 19520.00 frames. ], tot_loss[loss=0.2855, simple_loss=0.3232, pruned_loss=0.09003, ctc_loss=0.1691, over 3841866.82 frames. ], batch size: 61, lr: 2.36e-02, grad_scale: 32.0 +2024-08-25 11:56:58,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=73045.33333333333, ans=0.125 +2024-08-25 11:57:05,382 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.57 vs. limit=15.0 +2024-08-25 11:57:15,820 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.77 vs. limit=15.0 +2024-08-25 11:57:28,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=73098.66666666667, ans=0.125 +2024-08-25 11:57:53,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=73152.0, ans=0.025 +2024-08-25 11:58:02,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=73205.33333333333, ans=0.2 +2024-08-25 11:58:03,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=73205.33333333333, ans=0.0 +2024-08-25 11:58:13,319 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.685e+02 2.073e+02 2.305e+02 2.660e+02 4.224e+02, threshold=4.609e+02, percent-clipped=0.0 +2024-08-25 11:58:19,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=73205.33333333333, ans=0.2 +2024-08-25 11:58:32,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=73258.66666666667, ans=0.125 +2024-08-25 11:58:43,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=73258.66666666667, ans=0.2 +2024-08-25 11:58:46,696 INFO [train.py:1114] (1/4) Epoch 6, batch 1300, loss[loss=0.3262, simple_loss=0.3529, pruned_loss=0.1092, ctc_loss=0.2027, over 18887.00 frames. ], tot_loss[loss=0.2838, simple_loss=0.3221, pruned_loss=0.08924, ctc_loss=0.1675, over 3845848.26 frames. ], batch size: 76, lr: 2.36e-02, grad_scale: 32.0 +2024-08-25 11:58:47,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=73312.0, ans=0.0 +2024-08-25 11:59:00,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=73312.0, ans=0.1 +2024-08-25 11:59:23,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=73365.33333333333, ans=0.1 +2024-08-25 11:59:28,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=73365.33333333333, ans=0.025 +2024-08-25 11:59:41,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=73418.66666666667, ans=0.125 +2024-08-25 11:59:50,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=73418.66666666667, ans=0.125 +2024-08-25 11:59:52,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=73472.0, ans=0.125 +2024-08-25 12:00:12,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=73525.33333333333, ans=0.0 +2024-08-25 12:00:19,977 INFO [train.py:1114] (1/4) Epoch 6, batch 1350, loss[loss=0.2887, simple_loss=0.3256, pruned_loss=0.09096, ctc_loss=0.1746, over 19757.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.3211, pruned_loss=0.08866, ctc_loss=0.1665, over 3856087.46 frames. ], batch size: 54, lr: 2.36e-02, grad_scale: 32.0 +2024-08-25 12:01:05,000 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.532e+02 2.025e+02 2.295e+02 2.579e+02 4.133e+02, threshold=4.590e+02, percent-clipped=0.0 +2024-08-25 12:01:05,216 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=1.479e-01 +2024-08-25 12:01:12,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73738.66666666667, ans=0.1 +2024-08-25 12:01:14,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=73738.66666666667, ans=0.0 +2024-08-25 12:01:30,913 INFO [train.py:1114] (1/4) Epoch 6, batch 1400, loss[loss=0.2709, simple_loss=0.3002, pruned_loss=0.08866, ctc_loss=0.1606, over 19654.00 frames. ], tot_loss[loss=0.2822, simple_loss=0.3209, pruned_loss=0.08852, ctc_loss=0.1663, over 3863972.67 frames. ], batch size: 46, lr: 2.35e-02, grad_scale: 32.0 +2024-08-25 12:02:05,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73898.66666666667, ans=0.1 +2024-08-25 12:02:08,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=73898.66666666667, ans=0.0 +2024-08-25 12:02:52,164 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:02:52,963 INFO [train.py:1114] (1/4) Epoch 6, batch 1450, loss[loss=0.2845, simple_loss=0.3316, pruned_loss=0.08719, ctc_loss=0.1576, over 19687.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.3214, pruned_loss=0.08853, ctc_loss=0.1664, over 3862438.35 frames. ], batch size: 63, lr: 2.35e-02, grad_scale: 32.0 +2024-08-25 12:02:54,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=74112.0, ans=0.035 +2024-08-25 12:02:55,961 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.70 vs. limit=22.5 +2024-08-25 12:03:04,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=74165.33333333333, ans=0.125 +2024-08-25 12:03:39,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=74218.66666666667, ans=0.0 +2024-08-25 12:03:48,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=74218.66666666667, ans=0.2 +2024-08-25 12:03:53,270 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.572e+02 1.998e+02 2.330e+02 2.811e+02 4.670e+02, threshold=4.661e+02, percent-clipped=1.0 +2024-08-25 12:04:03,408 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.94 vs. limit=15.0 +2024-08-25 12:04:04,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=74325.33333333333, ans=0.125 +2024-08-25 12:04:10,304 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.38 vs. limit=22.5 +2024-08-25 12:04:25,535 INFO [train.py:1114] (1/4) Epoch 6, batch 1500, loss[loss=0.2811, simple_loss=0.3184, pruned_loss=0.0883, ctc_loss=0.168, over 19592.00 frames. ], tot_loss[loss=0.2822, simple_loss=0.3214, pruned_loss=0.08834, ctc_loss=0.166, over 3862805.82 frames. ], batch size: 57, lr: 2.35e-02, grad_scale: 32.0 +2024-08-25 12:04:42,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=74432.0, ans=0.125 +2024-08-25 12:04:43,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=74432.0, ans=0.125 +2024-08-25 12:04:59,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=74485.33333333333, ans=0.0 +2024-08-25 12:04:59,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=74485.33333333333, ans=0.125 +2024-08-25 12:05:51,361 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.90 vs. limit=15.0 +2024-08-25 12:06:01,373 INFO [train.py:1114] (1/4) Epoch 6, batch 1550, loss[loss=0.2939, simple_loss=0.3352, pruned_loss=0.09201, ctc_loss=0.1713, over 19611.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3218, pruned_loss=0.08878, ctc_loss=0.1668, over 3848623.69 frames. ], batch size: 60, lr: 2.34e-02, grad_scale: 32.0 +2024-08-25 12:06:01,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=74645.33333333333, ans=0.0 +2024-08-25 12:06:24,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=74752.0, ans=0.2 +2024-08-25 12:06:37,903 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.607e+02 2.061e+02 2.512e+02 3.027e+02 4.789e+02, threshold=5.024e+02, percent-clipped=1.0 +2024-08-25 12:06:45,540 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.39 vs. limit=15.0 +2024-08-25 12:06:46,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=74858.66666666667, ans=0.025 +2024-08-25 12:07:01,756 INFO [train.py:1114] (1/4) Epoch 6, batch 1600, loss[loss=0.2881, simple_loss=0.333, pruned_loss=0.08872, ctc_loss=0.1642, over 19862.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.3205, pruned_loss=0.08814, ctc_loss=0.1656, over 3838342.69 frames. ], batch size: 57, lr: 2.34e-02, grad_scale: 32.0 +2024-08-25 12:07:03,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=74912.0, ans=0.0 +2024-08-25 12:07:14,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74965.33333333333, ans=0.1 +2024-08-25 12:07:17,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74965.33333333333, ans=0.1 +2024-08-25 12:07:23,357 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.67 vs. limit=10.0 +2024-08-25 12:07:45,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=75072.0, ans=0.125 +2024-08-25 12:07:48,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=75125.33333333333, ans=0.125 +2024-08-25 12:07:56,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff2.min_abs, batch_count=75125.33333333333, ans=0.1 +2024-08-25 12:08:00,980 INFO [train.py:1114] (1/4) Epoch 6, batch 1650, loss[loss=0.3019, simple_loss=0.3406, pruned_loss=0.09614, ctc_loss=0.1773, over 19633.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3204, pruned_loss=0.08808, ctc_loss=0.1653, over 3835386.23 frames. ], batch size: 59, lr: 2.34e-02, grad_scale: 32.0 +2024-08-25 12:08:02,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=75178.66666666667, ans=0.0 +2024-08-25 12:08:11,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=75232.0, ans=0.1 +2024-08-25 12:08:18,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=75232.0, ans=0.125 +2024-08-25 12:08:26,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=75285.33333333333, ans=0.0 +2024-08-25 12:08:37,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=75338.66666666667, ans=0.125 +2024-08-25 12:08:37,757 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.531e+02 1.893e+02 2.381e+02 2.784e+02 7.281e+02, threshold=4.762e+02, percent-clipped=1.0 +2024-08-25 12:08:45,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=75338.66666666667, ans=0.125 +2024-08-25 12:08:52,556 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:08:56,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=75392.0, ans=0.125 +2024-08-25 12:09:00,114 INFO [train.py:1114] (1/4) Epoch 6, batch 1700, loss[loss=0.2241, simple_loss=0.2694, pruned_loss=0.06461, ctc_loss=0.1238, over 19660.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3202, pruned_loss=0.0877, ctc_loss=0.1647, over 3848577.47 frames. ], batch size: 46, lr: 2.33e-02, grad_scale: 32.0 +2024-08-25 12:09:06,814 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.62 vs. limit=15.0 +2024-08-25 12:09:25,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=75552.0, ans=0.125 +2024-08-25 12:09:28,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=75552.0, ans=0.125 +2024-08-25 12:09:31,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75552.0, ans=0.1 +2024-08-25 12:09:55,906 INFO [train.py:1114] (1/4) Epoch 6, batch 1750, loss[loss=0.236, simple_loss=0.2838, pruned_loss=0.06793, ctc_loss=0.1307, over 19642.00 frames. ], tot_loss[loss=0.2804, simple_loss=0.3197, pruned_loss=0.08766, ctc_loss=0.1645, over 3853401.56 frames. ], batch size: 45, lr: 2.33e-02, grad_scale: 16.0 +2024-08-25 12:09:59,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=75712.0, ans=0.2 +2024-08-25 12:10:03,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=75712.0, ans=0.125 +2024-08-25 12:10:03,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=75712.0, ans=0.0 +2024-08-25 12:10:25,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75818.66666666667, ans=0.1 +2024-08-25 12:10:29,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=75872.0, ans=0.0 +2024-08-25 12:10:32,749 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.521e+02 1.890e+02 2.130e+02 2.587e+02 4.262e+02, threshold=4.260e+02, percent-clipped=0.0 +2024-08-25 12:10:50,356 INFO [train.py:1114] (1/4) Epoch 6, batch 1800, loss[loss=0.2852, simple_loss=0.3298, pruned_loss=0.08791, ctc_loss=0.1621, over 19621.00 frames. ], tot_loss[loss=0.2821, simple_loss=0.3208, pruned_loss=0.08845, ctc_loss=0.1659, over 3854754.97 frames. ], batch size: 55, lr: 2.33e-02, grad_scale: 8.0 +2024-08-25 12:10:52,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=75978.66666666667, ans=0.0 +2024-08-25 12:11:22,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=76138.66666666667, ans=0.1 +2024-08-25 12:11:36,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=76192.0, ans=0.125 +2024-08-25 12:11:44,875 INFO [train.py:1114] (1/4) Epoch 6, batch 1850, loss[loss=0.2582, simple_loss=0.3079, pruned_loss=0.07531, ctc_loss=0.1447, over 19601.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3203, pruned_loss=0.0882, ctc_loss=0.1653, over 3857229.03 frames. ], batch size: 57, lr: 2.32e-02, grad_scale: 8.0 +2024-08-25 12:11:45,455 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.02 vs. limit=15.0 +2024-08-25 12:11:46,627 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.55 vs. limit=15.0 +2024-08-25 12:11:49,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=76245.33333333333, ans=0.07 +2024-08-25 12:11:49,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76245.33333333333, ans=0.1 +2024-08-25 12:12:06,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=76352.0, ans=0.025 +2024-08-25 12:12:06,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=76352.0, ans=0.0 +2024-08-25 12:12:20,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=76405.33333333333, ans=0.125 +2024-08-25 12:12:22,231 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.502e+02 1.994e+02 2.285e+02 2.712e+02 4.413e+02, threshold=4.569e+02, percent-clipped=2.0 +2024-08-25 12:12:43,370 INFO [train.py:1114] (1/4) Epoch 6, batch 1900, loss[loss=0.2715, simple_loss=0.3216, pruned_loss=0.08023, ctc_loss=0.1524, over 19651.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3212, pruned_loss=0.08857, ctc_loss=0.1661, over 3861251.94 frames. ], batch size: 59, lr: 2.32e-02, grad_scale: 8.0 +2024-08-25 12:12:45,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=76512.0, ans=0.2 +2024-08-25 12:12:54,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=76565.33333333333, ans=0.2 +2024-08-25 12:12:54,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=76565.33333333333, ans=0.0 +2024-08-25 12:12:54,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=76565.33333333333, ans=0.2 +2024-08-25 12:12:54,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=76565.33333333333, ans=0.125 +2024-08-25 12:13:02,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=76565.33333333333, ans=0.125 +2024-08-25 12:13:09,242 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.60 vs. limit=10.0 +2024-08-25 12:13:25,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=76672.0, ans=0.125 +2024-08-25 12:13:30,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=76725.33333333333, ans=0.0 +2024-08-25 12:13:40,517 INFO [train.py:1114] (1/4) Epoch 6, batch 1950, loss[loss=0.261, simple_loss=0.3035, pruned_loss=0.07966, ctc_loss=0.1476, over 19585.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3223, pruned_loss=0.08896, ctc_loss=0.1668, over 3870447.06 frames. ], batch size: 52, lr: 2.32e-02, grad_scale: 8.0 +2024-08-25 12:13:45,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76778.66666666667, ans=0.1 +2024-08-25 12:14:05,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=76885.33333333333, ans=0.0 +2024-08-25 12:14:18,626 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.709e+02 1.890e+02 2.137e+02 2.349e+02 3.743e+02, threshold=4.275e+02, percent-clipped=0.0 +2024-08-25 12:14:28,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=76992.0, ans=0.125 +2024-08-25 12:14:35,993 INFO [train.py:1114] (1/4) Epoch 6, batch 2000, loss[loss=0.2677, simple_loss=0.3018, pruned_loss=0.08605, ctc_loss=0.1536, over 19686.00 frames. ], tot_loss[loss=0.2838, simple_loss=0.3225, pruned_loss=0.08916, ctc_loss=0.1669, over 3854966.03 frames. ], batch size: 45, lr: 2.31e-02, grad_scale: 16.0 +2024-08-25 12:14:59,405 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:15:26,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77258.66666666667, ans=0.1 +2024-08-25 12:15:27,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=77258.66666666667, ans=0.125 +2024-08-25 12:15:30,080 INFO [train.py:1114] (1/4) Epoch 6, batch 2050, loss[loss=0.2082, simple_loss=0.2619, pruned_loss=0.05527, ctc_loss=0.1098, over 19698.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3207, pruned_loss=0.0882, ctc_loss=0.1652, over 3851842.29 frames. ], batch size: 47, lr: 2.31e-02, grad_scale: 16.0 +2024-08-25 12:15:44,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.44 vs. limit=15.0 +2024-08-25 12:15:49,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=77365.33333333333, ans=0.025 +2024-08-25 12:15:50,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=77365.33333333333, ans=0.0 +2024-08-25 12:15:53,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=77365.33333333333, ans=0.2 +2024-08-25 12:16:04,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=77418.66666666667, ans=0.0 +2024-08-25 12:16:04,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=77418.66666666667, ans=0.125 +2024-08-25 12:16:06,100 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:16:13,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=77472.0, ans=0.125 +2024-08-25 12:16:14,693 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.405e+02 1.955e+02 2.380e+02 2.986e+02 1.021e+03, threshold=4.760e+02, percent-clipped=7.0 +2024-08-25 12:16:20,539 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.01 vs. limit=15.0 +2024-08-25 12:16:25,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=77525.33333333333, ans=0.125 +2024-08-25 12:16:32,225 INFO [train.py:1114] (1/4) Epoch 6, batch 2100, loss[loss=0.3168, simple_loss=0.3425, pruned_loss=0.1053, ctc_loss=0.2016, over 19786.00 frames. ], tot_loss[loss=0.2796, simple_loss=0.3195, pruned_loss=0.08717, ctc_loss=0.1633, over 3857919.47 frames. ], batch size: 54, lr: 2.31e-02, grad_scale: 16.0 +2024-08-25 12:16:54,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=77685.33333333333, ans=0.0 +2024-08-25 12:16:55,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=77685.33333333333, ans=0.2 +2024-08-25 12:17:03,396 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:17:14,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77738.66666666667, ans=0.1 +2024-08-25 12:17:23,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=77792.0, ans=0.125 +2024-08-25 12:17:28,076 INFO [train.py:1114] (1/4) Epoch 6, batch 2150, loss[loss=0.257, simple_loss=0.3059, pruned_loss=0.07625, ctc_loss=0.1392, over 19583.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3185, pruned_loss=0.08653, ctc_loss=0.1619, over 3869040.43 frames. ], batch size: 52, lr: 2.30e-02, grad_scale: 16.0 +2024-08-25 12:17:33,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=77845.33333333333, ans=0.2 +2024-08-25 12:17:43,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=77898.66666666667, ans=0.0 +2024-08-25 12:18:04,789 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.43 vs. limit=12.0 +2024-08-25 12:18:07,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=77952.0, ans=0.0 +2024-08-25 12:18:15,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=78005.33333333333, ans=0.1 +2024-08-25 12:18:19,497 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.583e+02 1.877e+02 2.258e+02 2.799e+02 6.726e+02, threshold=4.515e+02, percent-clipped=2.0 +2024-08-25 12:18:19,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=78005.33333333333, ans=0.125 +2024-08-25 12:18:25,172 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:19:06,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=78112.0, ans=0.0 +2024-08-25 12:19:06,973 INFO [train.py:1114] (1/4) Epoch 6, batch 2200, loss[loss=0.2829, simple_loss=0.3266, pruned_loss=0.08565, ctc_loss=0.1694, over 19604.00 frames. ], tot_loss[loss=0.2787, simple_loss=0.3187, pruned_loss=0.08683, ctc_loss=0.1628, over 3868027.54 frames. ], batch size: 57, lr: 2.30e-02, grad_scale: 16.0 +2024-08-25 12:19:17,046 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=2.522e-03 +2024-08-25 12:19:28,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=78218.66666666667, ans=0.125 +2024-08-25 12:19:45,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=78272.0, ans=0.2 +2024-08-25 12:20:02,390 INFO [train.py:1114] (1/4) Epoch 6, batch 2250, loss[loss=0.2947, simple_loss=0.3354, pruned_loss=0.09168, ctc_loss=0.1765, over 19604.00 frames. ], tot_loss[loss=0.2793, simple_loss=0.3192, pruned_loss=0.08704, ctc_loss=0.1633, over 3868605.32 frames. ], batch size: 55, lr: 2.30e-02, grad_scale: 16.0 +2024-08-25 12:20:16,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.49 vs. limit=15.0 +2024-08-25 12:20:18,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=78432.0, ans=0.2 +2024-08-25 12:20:30,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=78485.33333333333, ans=0.125 +2024-08-25 12:20:38,631 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.553e+02 2.005e+02 2.234e+02 2.581e+02 4.325e+02, threshold=4.468e+02, percent-clipped=0.0 +2024-08-25 12:20:52,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=78592.0, ans=0.2 +2024-08-25 12:20:56,310 INFO [train.py:1114] (1/4) Epoch 6, batch 2300, loss[loss=0.2316, simple_loss=0.2837, pruned_loss=0.06592, ctc_loss=0.1193, over 19509.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.3179, pruned_loss=0.0869, ctc_loss=0.1627, over 3862070.77 frames. ], batch size: 49, lr: 2.29e-02, grad_scale: 16.0 +2024-08-25 12:21:01,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=78645.33333333333, ans=0.0 +2024-08-25 12:21:14,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=78698.66666666667, ans=0.125 +2024-08-25 12:21:21,152 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.92 vs. limit=15.0 +2024-08-25 12:21:23,041 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:21:33,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=78805.33333333333, ans=0.125 +2024-08-25 12:21:46,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=78858.66666666667, ans=0.125 +2024-08-25 12:21:52,656 INFO [train.py:1114] (1/4) Epoch 6, batch 2350, loss[loss=0.2723, simple_loss=0.3204, pruned_loss=0.08259, ctc_loss=0.1476, over 19696.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.3181, pruned_loss=0.08685, ctc_loss=0.1625, over 3864413.14 frames. ], batch size: 63, lr: 2.29e-02, grad_scale: 16.0 +2024-08-25 12:22:28,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=79072.0, ans=0.04949747468305833 +2024-08-25 12:22:30,284 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.404e+02 2.097e+02 2.553e+02 3.084e+02 6.792e+02, threshold=5.106e+02, percent-clipped=2.0 +2024-08-25 12:22:32,056 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.20 vs. limit=15.0 +2024-08-25 12:22:41,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=79125.33333333333, ans=0.0 +2024-08-25 12:22:43,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=79125.33333333333, ans=0.025 +2024-08-25 12:22:47,955 INFO [train.py:1114] (1/4) Epoch 6, batch 2400, loss[loss=0.3048, simple_loss=0.337, pruned_loss=0.09917, ctc_loss=0.1857, over 19418.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.3209, pruned_loss=0.08816, ctc_loss=0.1647, over 3858376.52 frames. ], batch size: 67, lr: 2.29e-02, grad_scale: 32.0 +2024-08-25 12:22:58,890 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.87 vs. limit=22.5 +2024-08-25 12:23:00,051 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.24 vs. limit=15.0 +2024-08-25 12:23:06,116 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.40 vs. limit=15.0 +2024-08-25 12:23:07,124 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.78 vs. limit=15.0 +2024-08-25 12:23:32,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=79338.66666666667, ans=0.125 +2024-08-25 12:23:44,103 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.20 vs. limit=15.0 +2024-08-25 12:23:45,698 INFO [train.py:1114] (1/4) Epoch 6, batch 2450, loss[loss=0.3661, simple_loss=0.3674, pruned_loss=0.1329, ctc_loss=0.2476, over 13027.00 frames. ], tot_loss[loss=0.29, simple_loss=0.326, pruned_loss=0.09241, ctc_loss=0.173, over 3730840.69 frames. ], batch size: 140, lr: 2.29e-02, grad_scale: 32.0 +2024-08-25 12:23:50,737 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.97 vs. limit=6.0 +2024-08-25 12:24:16,722 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.36 vs. limit=15.0 +2024-08-25 12:25:42,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=79498.66666666667, ans=0.125 +2024-08-25 12:27:14,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=79552.0, ans=0.125 +2024-08-25 12:28:01,648 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.548e+02 2.056e+02 2.291e+02 2.526e+02 5.572e+02, threshold=4.582e+02, percent-clipped=1.0 +2024-08-25 12:29:27,604 INFO [train.py:1114] (1/4) Epoch 7, batch 0, loss[loss=0.2804, simple_loss=0.3119, pruned_loss=0.09087, ctc_loss=0.1677, over 19404.00 frames. ], tot_loss[loss=0.2804, simple_loss=0.3119, pruned_loss=0.09087, ctc_loss=0.1677, over 19404.00 frames. ], batch size: 48, lr: 2.14e-02, grad_scale: 32.0 +2024-08-25 12:29:27,605 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-25 12:29:44,285 INFO [train.py:1146] (1/4) Epoch 7, validation: loss=0.2269, simple_loss=0.307, pruned_loss=0.05393, ctc_loss=0.0975, over 944034.00 frames. +2024-08-25 12:29:44,285 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 14083MB +2024-08-25 12:29:47,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=79658.66666666667, ans=0.125 +2024-08-25 12:31:09,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=79765.33333333333, ans=0.1 +2024-08-25 12:31:41,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=79818.66666666667, ans=0.1 +2024-08-25 12:31:41,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=79818.66666666667, ans=0.0 +2024-08-25 12:33:04,672 INFO [train.py:1114] (1/4) Epoch 7, batch 50, loss[loss=0.2531, simple_loss=0.2944, pruned_loss=0.07757, ctc_loss=0.1413, over 19704.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.3232, pruned_loss=0.08922, ctc_loss=0.1693, over 845399.71 frames. ], batch size: 47, lr: 2.14e-02, grad_scale: 32.0 +2024-08-25 12:33:56,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=80085.33333333333, ans=0.2 +2024-08-25 12:34:17,264 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.570e+02 1.999e+02 2.246e+02 2.808e+02 5.514e+02, threshold=4.492e+02, percent-clipped=3.0 +2024-08-25 12:34:24,293 INFO [train.py:1114] (1/4) Epoch 7, batch 100, loss[loss=0.2355, simple_loss=0.2914, pruned_loss=0.06565, ctc_loss=0.1206, over 19722.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3226, pruned_loss=0.08785, ctc_loss=0.1661, over 1499399.80 frames. ], batch size: 51, lr: 2.13e-02, grad_scale: 32.0 +2024-08-25 12:34:34,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=80192.0, ans=0.04949747468305833 +2024-08-25 12:35:11,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=80405.33333333333, ans=0.0 +2024-08-25 12:35:21,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=80405.33333333333, ans=0.1 +2024-08-25 12:35:21,392 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.91 vs. limit=15.0 +2024-08-25 12:35:23,295 INFO [train.py:1114] (1/4) Epoch 7, batch 150, loss[loss=0.2494, simple_loss=0.2951, pruned_loss=0.07369, ctc_loss=0.1406, over 19722.00 frames. ], tot_loss[loss=0.2771, simple_loss=0.3188, pruned_loss=0.08537, ctc_loss=0.1615, over 2027591.77 frames. ], batch size: 47, lr: 2.13e-02, grad_scale: 32.0 +2024-08-25 12:35:30,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=80458.66666666667, ans=0.125 +2024-08-25 12:35:50,255 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.32 vs. limit=22.5 +2024-08-25 12:36:05,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=80618.66666666667, ans=0.04949747468305833 +2024-08-25 12:36:09,083 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.17 vs. limit=15.0 +2024-08-25 12:36:18,832 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.539e+02 1.959e+02 2.217e+02 2.953e+02 5.735e+02, threshold=4.434e+02, percent-clipped=2.0 +2024-08-25 12:36:26,006 INFO [train.py:1114] (1/4) Epoch 7, batch 200, loss[loss=0.2939, simple_loss=0.3357, pruned_loss=0.09189, ctc_loss=0.1707, over 18201.00 frames. ], tot_loss[loss=0.2736, simple_loss=0.3162, pruned_loss=0.08379, ctc_loss=0.1585, over 2435660.66 frames. ], batch size: 85, lr: 2.13e-02, grad_scale: 32.0 +2024-08-25 12:36:30,751 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:36:57,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80832.0, ans=0.1 +2024-08-25 12:37:00,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=80885.33333333333, ans=0.125 +2024-08-25 12:37:02,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80885.33333333333, ans=0.1 +2024-08-25 12:37:07,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80885.33333333333, ans=0.1 +2024-08-25 12:37:22,871 INFO [train.py:1114] (1/4) Epoch 7, batch 250, loss[loss=0.3101, simple_loss=0.341, pruned_loss=0.1018, ctc_loss=0.189, over 19374.00 frames. ], tot_loss[loss=0.274, simple_loss=0.3159, pruned_loss=0.08417, ctc_loss=0.1591, over 2757003.56 frames. ], batch size: 67, lr: 2.13e-02, grad_scale: 32.0 +2024-08-25 12:37:23,218 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:37:44,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=81045.33333333333, ans=0.0 +2024-08-25 12:38:07,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=81152.0, ans=0.125 +2024-08-25 12:38:14,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=81205.33333333333, ans=0.125 +2024-08-25 12:38:16,686 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.422e+02 1.901e+02 2.294e+02 2.833e+02 4.254e+02, threshold=4.587e+02, percent-clipped=0.0 +2024-08-25 12:38:17,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=81205.33333333333, ans=0.125 +2024-08-25 12:38:18,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=81205.33333333333, ans=0.0 +2024-08-25 12:38:22,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=81258.66666666667, ans=0.125 +2024-08-25 12:38:23,341 INFO [train.py:1114] (1/4) Epoch 7, batch 300, loss[loss=0.2854, simple_loss=0.3251, pruned_loss=0.08797, ctc_loss=0.1744, over 19528.00 frames. ], tot_loss[loss=0.2724, simple_loss=0.315, pruned_loss=0.08336, ctc_loss=0.1578, over 3002026.24 frames. ], batch size: 61, lr: 2.12e-02, grad_scale: 32.0 +2024-08-25 12:38:43,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=81312.0, ans=0.125 +2024-08-25 12:39:06,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=81418.66666666667, ans=0.04949747468305833 +2024-08-25 12:39:52,667 INFO [train.py:1114] (1/4) Epoch 7, batch 350, loss[loss=0.276, simple_loss=0.304, pruned_loss=0.08996, ctc_loss=0.1702, over 19747.00 frames. ], tot_loss[loss=0.2754, simple_loss=0.3168, pruned_loss=0.08488, ctc_loss=0.1605, over 3191705.25 frames. ], batch size: 48, lr: 2.12e-02, grad_scale: 32.0 +2024-08-25 12:40:21,750 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.32 vs. limit=22.5 +2024-08-25 12:40:35,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81685.33333333333, ans=0.1 +2024-08-25 12:40:43,962 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.560e+02 1.980e+02 2.268e+02 2.810e+02 5.782e+02, threshold=4.535e+02, percent-clipped=1.0 +2024-08-25 12:40:50,663 INFO [train.py:1114] (1/4) Epoch 7, batch 400, loss[loss=0.2924, simple_loss=0.3373, pruned_loss=0.08992, ctc_loss=0.1689, over 19508.00 frames. ], tot_loss[loss=0.2735, simple_loss=0.3158, pruned_loss=0.08389, ctc_loss=0.1585, over 3343929.88 frames. ], batch size: 54, lr: 2.12e-02, grad_scale: 32.0 +2024-08-25 12:40:53,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=81792.0, ans=0.2 +2024-08-25 12:41:01,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=81845.33333333333, ans=0.125 +2024-08-25 12:41:19,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=81898.66666666667, ans=0.125 +2024-08-25 12:41:21,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=81898.66666666667, ans=0.125 +2024-08-25 12:41:30,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=81952.0, ans=0.025 +2024-08-25 12:41:52,328 INFO [train.py:1114] (1/4) Epoch 7, batch 450, loss[loss=0.2488, simple_loss=0.3105, pruned_loss=0.06808, ctc_loss=0.1276, over 19623.00 frames. ], tot_loss[loss=0.2735, simple_loss=0.3157, pruned_loss=0.08397, ctc_loss=0.1585, over 3450438.20 frames. ], batch size: 55, lr: 2.11e-02, grad_scale: 32.0 +2024-08-25 12:42:28,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=82218.66666666667, ans=0.02 +2024-08-25 12:42:29,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=82218.66666666667, ans=0.05 +2024-08-25 12:42:30,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=82218.66666666667, ans=0.2 +2024-08-25 12:42:30,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82218.66666666667, ans=0.1 +2024-08-25 12:42:33,363 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.43 vs. limit=15.0 +2024-08-25 12:42:33,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=82218.66666666667, ans=0.07 +2024-08-25 12:42:43,142 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.611e+02 1.947e+02 2.448e+02 2.960e+02 4.262e+02, threshold=4.896e+02, percent-clipped=0.0 +2024-08-25 12:42:43,928 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.73 vs. limit=22.5 +2024-08-25 12:42:47,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=82272.0, ans=0.125 +2024-08-25 12:42:52,045 INFO [train.py:1114] (1/4) Epoch 7, batch 500, loss[loss=0.3005, simple_loss=0.3423, pruned_loss=0.09426, ctc_loss=0.1752, over 19678.00 frames. ], tot_loss[loss=0.2723, simple_loss=0.3149, pruned_loss=0.08346, ctc_loss=0.1572, over 3547137.89 frames. ], batch size: 63, lr: 2.11e-02, grad_scale: 32.0 +2024-08-25 12:42:52,611 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.95 vs. limit=15.0 +2024-08-25 12:42:55,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82325.33333333333, ans=0.1 +2024-08-25 12:42:57,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=82325.33333333333, ans=0.125 +2024-08-25 12:42:58,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=82325.33333333333, ans=0.1 +2024-08-25 12:43:03,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=82325.33333333333, ans=0.0 +2024-08-25 12:43:09,421 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.68 vs. limit=22.5 +2024-08-25 12:43:31,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=82485.33333333333, ans=0.125 +2024-08-25 12:43:34,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=82485.33333333333, ans=0.04949747468305833 +2024-08-25 12:43:41,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=82538.66666666667, ans=0.0 +2024-08-25 12:43:51,841 INFO [train.py:1114] (1/4) Epoch 7, batch 550, loss[loss=0.3222, simple_loss=0.3509, pruned_loss=0.1065, ctc_loss=0.2011, over 19309.00 frames. ], tot_loss[loss=0.2718, simple_loss=0.3145, pruned_loss=0.08327, ctc_loss=0.1565, over 3609288.66 frames. ], batch size: 71, lr: 2.11e-02, grad_scale: 32.0 +2024-08-25 12:43:53,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=82592.0, ans=0.125 +2024-08-25 12:43:58,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=82592.0, ans=0.2 +2024-08-25 12:44:44,958 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.503e+02 2.000e+02 2.364e+02 2.910e+02 5.356e+02, threshold=4.728e+02, percent-clipped=1.0 +2024-08-25 12:44:52,597 INFO [train.py:1114] (1/4) Epoch 7, batch 600, loss[loss=0.3204, simple_loss=0.347, pruned_loss=0.106, ctc_loss=0.2045, over 19404.00 frames. ], tot_loss[loss=0.2721, simple_loss=0.3148, pruned_loss=0.08341, ctc_loss=0.1567, over 3666929.36 frames. ], batch size: 67, lr: 2.11e-02, grad_scale: 16.0 +2024-08-25 12:44:56,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=82858.66666666667, ans=0.125 +2024-08-25 12:44:58,339 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.74 vs. limit=15.0 +2024-08-25 12:45:07,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=82912.0, ans=0.125 +2024-08-25 12:45:10,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=82912.0, ans=0.0 +2024-08-25 12:45:20,263 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.52 vs. limit=15.0 +2024-08-25 12:45:25,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=82965.33333333333, ans=0.125 +2024-08-25 12:45:41,029 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.23 vs. limit=15.0 +2024-08-25 12:45:51,129 INFO [train.py:1114] (1/4) Epoch 7, batch 650, loss[loss=0.2626, simple_loss=0.3086, pruned_loss=0.07932, ctc_loss=0.1449, over 19777.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.3134, pruned_loss=0.08258, ctc_loss=0.1552, over 3716872.42 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 16.0 +2024-08-25 12:45:59,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=83125.33333333333, ans=0.125 +2024-08-25 12:46:11,918 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.68 vs. limit=15.0 +2024-08-25 12:46:47,172 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.487e+02 1.844e+02 2.004e+02 2.285e+02 4.065e+02, threshold=4.009e+02, percent-clipped=0.0 +2024-08-25 12:46:52,901 INFO [train.py:1114] (1/4) Epoch 7, batch 700, loss[loss=0.2953, simple_loss=0.3257, pruned_loss=0.0965, ctc_loss=0.1797, over 19715.00 frames. ], tot_loss[loss=0.2717, simple_loss=0.3143, pruned_loss=0.08327, ctc_loss=0.1563, over 3748590.20 frames. ], batch size: 51, lr: 2.10e-02, grad_scale: 16.0 +2024-08-25 12:47:11,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=83445.33333333333, ans=0.125 +2024-08-25 12:47:18,191 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.03 vs. limit=10.0 +2024-08-25 12:47:25,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=83552.0, ans=0.125 +2024-08-25 12:47:46,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=83605.33333333333, ans=0.0 +2024-08-25 12:47:46,649 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.31 vs. limit=12.0 +2024-08-25 12:47:47,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=83605.33333333333, ans=0.125 +2024-08-25 12:47:49,553 INFO [train.py:1114] (1/4) Epoch 7, batch 750, loss[loss=0.2642, simple_loss=0.3114, pruned_loss=0.07869, ctc_loss=0.149, over 19489.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.3136, pruned_loss=0.08271, ctc_loss=0.1551, over 3773990.72 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 16.0 +2024-08-25 12:48:03,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1.whitening_limit, batch_count=83712.0, ans=10.0 +2024-08-25 12:48:27,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=83818.66666666667, ans=0.125 +2024-08-25 12:48:45,007 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.565e+02 1.885e+02 2.166e+02 2.690e+02 4.534e+02, threshold=4.331e+02, percent-clipped=3.0 +2024-08-25 12:48:49,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=83925.33333333333, ans=0.125 +2024-08-25 12:48:50,695 INFO [train.py:1114] (1/4) Epoch 7, batch 800, loss[loss=0.2378, simple_loss=0.2864, pruned_loss=0.06771, ctc_loss=0.1344, over 19412.00 frames. ], tot_loss[loss=0.2701, simple_loss=0.3133, pruned_loss=0.08254, ctc_loss=0.1547, over 3794781.94 frames. ], batch size: 48, lr: 2.10e-02, grad_scale: 32.0 +2024-08-25 12:48:59,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=83925.33333333333, ans=0.125 +2024-08-25 12:49:13,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=84032.0, ans=0.125 +2024-08-25 12:49:17,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=84032.0, ans=0.125 +2024-08-25 12:49:40,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=84138.66666666667, ans=0.07 +2024-08-25 12:49:51,365 INFO [train.py:1114] (1/4) Epoch 7, batch 850, loss[loss=0.2729, simple_loss=0.3229, pruned_loss=0.08135, ctc_loss=0.1503, over 19653.00 frames. ], tot_loss[loss=0.2688, simple_loss=0.3124, pruned_loss=0.08185, ctc_loss=0.1537, over 3814384.82 frames. ], batch size: 59, lr: 2.09e-02, grad_scale: 32.0 +2024-08-25 12:49:51,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=84192.0, ans=0.125 +2024-08-25 12:49:51,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84192.0, ans=0.1 +2024-08-25 12:49:54,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=84192.0, ans=0.125 +2024-08-25 12:49:56,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=84192.0, ans=0.0 +2024-08-25 12:50:21,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=84298.66666666667, ans=0.5 +2024-08-25 12:50:31,314 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=6.0 +2024-08-25 12:50:43,490 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.470e+02 1.946e+02 2.270e+02 2.825e+02 4.143e+02, threshold=4.540e+02, percent-clipped=0.0 +2024-08-25 12:50:49,138 INFO [train.py:1114] (1/4) Epoch 7, batch 900, loss[loss=0.2349, simple_loss=0.2832, pruned_loss=0.06809, ctc_loss=0.1258, over 19808.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.3135, pruned_loss=0.08276, ctc_loss=0.1551, over 3818924.23 frames. ], batch size: 49, lr: 2.09e-02, grad_scale: 32.0 +2024-08-25 12:51:00,651 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.91 vs. limit=15.0 +2024-08-25 12:51:02,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=84458.66666666667, ans=0.125 +2024-08-25 12:51:05,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=84458.66666666667, ans=0.125 +2024-08-25 12:51:08,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=84458.66666666667, ans=0.125 +2024-08-25 12:51:21,890 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.44 vs. limit=15.0 +2024-08-25 12:51:47,172 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.50 vs. limit=15.0 +2024-08-25 12:51:58,929 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.93 vs. limit=15.0 +2024-08-25 12:52:05,341 INFO [train.py:1114] (1/4) Epoch 7, batch 950, loss[loss=0.2285, simple_loss=0.2785, pruned_loss=0.06449, ctc_loss=0.1237, over 19496.00 frames. ], tot_loss[loss=0.2717, simple_loss=0.3142, pruned_loss=0.08333, ctc_loss=0.1564, over 3820132.42 frames. ], batch size: 49, lr: 2.09e-02, grad_scale: 16.0 +2024-08-25 12:52:20,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=84778.66666666667, ans=0.125 +2024-08-25 12:52:59,188 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.627e+02 2.065e+02 2.373e+02 2.949e+02 1.128e+03, threshold=4.746e+02, percent-clipped=6.0 +2024-08-25 12:53:00,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=84938.66666666667, ans=0.0 +2024-08-25 12:53:05,274 INFO [train.py:1114] (1/4) Epoch 7, batch 1000, loss[loss=0.2663, simple_loss=0.3103, pruned_loss=0.08027, ctc_loss=0.1542, over 19875.00 frames. ], tot_loss[loss=0.2733, simple_loss=0.3156, pruned_loss=0.08391, ctc_loss=0.1578, over 3815457.84 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 16.0 +2024-08-25 12:53:22,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.10 vs. limit=15.0 +2024-08-25 12:53:25,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=85045.33333333333, ans=0.025 +2024-08-25 12:53:38,642 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.25 vs. limit=22.5 +2024-08-25 12:53:45,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=85152.0, ans=0.125 +2024-08-25 12:53:51,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=85152.0, ans=0.05 +2024-08-25 12:53:57,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=85205.33333333333, ans=0.125 +2024-08-25 12:54:04,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=85258.66666666667, ans=15.0 +2024-08-25 12:54:05,089 INFO [train.py:1114] (1/4) Epoch 7, batch 1050, loss[loss=0.2865, simple_loss=0.3322, pruned_loss=0.08727, ctc_loss=0.1656, over 19844.00 frames. ], tot_loss[loss=0.2725, simple_loss=0.3149, pruned_loss=0.0836, ctc_loss=0.1571, over 3823090.38 frames. ], batch size: 57, lr: 2.08e-02, grad_scale: 16.0 +2024-08-25 12:54:15,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=85312.0, ans=0.125 +2024-08-25 12:54:41,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=85418.66666666667, ans=0.125 +2024-08-25 12:54:55,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=85472.0, ans=0.1 +2024-08-25 12:55:01,654 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.423e+02 1.918e+02 2.325e+02 2.776e+02 4.591e+02, threshold=4.650e+02, percent-clipped=1.0 +2024-08-25 12:55:02,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=85472.0, ans=0.0 +2024-08-25 12:55:05,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=85525.33333333333, ans=0.125 +2024-08-25 12:55:06,549 INFO [train.py:1114] (1/4) Epoch 7, batch 1100, loss[loss=0.2829, simple_loss=0.3202, pruned_loss=0.09011, ctc_loss=0.1636, over 19591.00 frames. ], tot_loss[loss=0.2715, simple_loss=0.3145, pruned_loss=0.08304, ctc_loss=0.1562, over 3830140.20 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 16.0 +2024-08-25 12:55:16,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=85525.33333333333, ans=0.2 +2024-08-25 12:55:24,755 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.94 vs. limit=15.0 +2024-08-25 12:55:26,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=85578.66666666667, ans=0.125 +2024-08-25 12:55:30,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=85632.0, ans=0.125 +2024-08-25 12:55:41,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=85685.33333333333, ans=0.0 +2024-08-25 12:55:52,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=85685.33333333333, ans=0.0 +2024-08-25 12:56:05,760 INFO [train.py:1114] (1/4) Epoch 7, batch 1150, loss[loss=0.2733, simple_loss=0.3126, pruned_loss=0.08446, ctc_loss=0.1629, over 19582.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3141, pruned_loss=0.08298, ctc_loss=0.1561, over 3828421.18 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 16.0 +2024-08-25 12:56:05,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=85792.0, ans=0.125 +2024-08-25 12:56:08,397 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:56:15,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=85792.0, ans=0.125 +2024-08-25 12:56:24,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=85845.33333333333, ans=0.025 +2024-08-25 12:56:36,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85898.66666666667, ans=0.1 +2024-08-25 12:57:02,979 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.571e+02 1.959e+02 2.167e+02 2.666e+02 4.946e+02, threshold=4.335e+02, percent-clipped=2.0 +2024-08-25 12:57:03,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=86005.33333333333, ans=0.125 +2024-08-25 12:57:07,688 INFO [train.py:1114] (1/4) Epoch 7, batch 1200, loss[loss=0.2661, simple_loss=0.3192, pruned_loss=0.07789, ctc_loss=0.1428, over 19830.00 frames. ], tot_loss[loss=0.2718, simple_loss=0.3146, pruned_loss=0.08322, ctc_loss=0.1566, over 3823710.99 frames. ], batch size: 57, lr: 2.07e-02, grad_scale: 32.0 +2024-08-25 12:57:21,789 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.69 vs. limit=10.0 +2024-08-25 12:57:30,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=86165.33333333333, ans=0.025 +2024-08-25 12:57:31,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.80 vs. limit=15.0 +2024-08-25 12:57:33,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=86165.33333333333, ans=0.0 +2024-08-25 12:58:05,923 INFO [train.py:1114] (1/4) Epoch 7, batch 1250, loss[loss=0.2911, simple_loss=0.3397, pruned_loss=0.08802, ctc_loss=0.166, over 19523.00 frames. ], tot_loss[loss=0.2711, simple_loss=0.3145, pruned_loss=0.08271, ctc_loss=0.1557, over 3842663.98 frames. ], batch size: 61, lr: 2.07e-02, grad_scale: 32.0 +2024-08-25 12:58:06,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=86325.33333333333, ans=0.035 +2024-08-25 12:58:14,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=86325.33333333333, ans=10.0 +2024-08-25 12:58:29,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=86378.66666666667, ans=0.125 +2024-08-25 12:58:34,082 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:58:44,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=86485.33333333333, ans=0.125 +2024-08-25 12:58:52,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=86485.33333333333, ans=0.2 +2024-08-25 12:59:02,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=86538.66666666667, ans=0.07 +2024-08-25 12:59:02,856 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.532e+02 1.964e+02 2.304e+02 2.729e+02 5.465e+02, threshold=4.608e+02, percent-clipped=2.0 +2024-08-25 12:59:06,939 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.31 vs. limit=6.0 +2024-08-25 12:59:07,513 INFO [train.py:1114] (1/4) Epoch 7, batch 1300, loss[loss=0.2964, simple_loss=0.3364, pruned_loss=0.0932, ctc_loss=0.1752, over 18921.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3134, pruned_loss=0.08205, ctc_loss=0.1544, over 3846659.20 frames. ], batch size: 76, lr: 2.07e-02, grad_scale: 32.0 +2024-08-25 12:59:20,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=86645.33333333333, ans=0.125 +2024-08-25 12:59:26,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=86645.33333333333, ans=0.0 +2024-08-25 12:59:38,969 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.56 vs. limit=15.0 +2024-08-25 13:00:02,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=86805.33333333333, ans=0.0 +2024-08-25 13:00:03,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=86805.33333333333, ans=0.125 +2024-08-25 13:00:04,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=86805.33333333333, ans=0.09899494936611666 +2024-08-25 13:00:07,958 INFO [train.py:1114] (1/4) Epoch 7, batch 1350, loss[loss=0.2597, simple_loss=0.3104, pruned_loss=0.07583, ctc_loss=0.1436, over 19786.00 frames. ], tot_loss[loss=0.2683, simple_loss=0.3126, pruned_loss=0.08147, ctc_loss=0.1529, over 3858723.75 frames. ], batch size: 54, lr: 2.07e-02, grad_scale: 32.0 +2024-08-25 13:00:20,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=86912.0, ans=0.125 +2024-08-25 13:00:46,473 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.73 vs. limit=10.0 +2024-08-25 13:01:58,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=87072.0, ans=0.2 +2024-08-25 13:01:59,607 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.534e+02 1.935e+02 2.309e+02 3.009e+02 4.449e+02, threshold=4.618e+02, percent-clipped=0.0 +2024-08-25 13:02:04,195 INFO [train.py:1114] (1/4) Epoch 7, batch 1400, loss[loss=0.1956, simple_loss=0.2526, pruned_loss=0.04965, ctc_loss=0.09842, over 19649.00 frames. ], tot_loss[loss=0.2676, simple_loss=0.3118, pruned_loss=0.08118, ctc_loss=0.1526, over 3865468.57 frames. ], batch size: 46, lr: 2.06e-02, grad_scale: 32.0 +2024-08-25 13:02:05,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=87125.33333333333, ans=0.0 +2024-08-25 13:02:06,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=87125.33333333333, ans=0.0 +2024-08-25 13:02:25,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=87178.66666666667, ans=0.0 +2024-08-25 13:02:32,570 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.82 vs. limit=10.0 +2024-08-25 13:02:51,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=87285.33333333333, ans=0.07 +2024-08-25 13:03:05,400 INFO [train.py:1114] (1/4) Epoch 7, batch 1450, loss[loss=0.2777, simple_loss=0.3265, pruned_loss=0.08398, ctc_loss=0.1522, over 19663.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.3127, pruned_loss=0.08156, ctc_loss=0.1534, over 3863342.27 frames. ], batch size: 63, lr: 2.06e-02, grad_scale: 16.0 +2024-08-25 13:03:18,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=87445.33333333333, ans=0.0 +2024-08-25 13:03:22,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87445.33333333333, ans=0.1 +2024-08-25 13:03:36,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=87498.66666666667, ans=0.09899494936611666 +2024-08-25 13:04:29,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=87552.0, ans=0.0 +2024-08-25 13:04:32,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=87552.0, ans=0.2 +2024-08-25 13:04:46,556 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.575e+02 2.015e+02 2.285e+02 2.716e+02 4.465e+02, threshold=4.569e+02, percent-clipped=0.0 +2024-08-25 13:04:50,185 INFO [train.py:1114] (1/4) Epoch 7, batch 1500, loss[loss=0.2528, simple_loss=0.3041, pruned_loss=0.07368, ctc_loss=0.1355, over 19582.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.313, pruned_loss=0.0815, ctc_loss=0.1534, over 3862746.88 frames. ], batch size: 57, lr: 2.06e-02, grad_scale: 16.0 +2024-08-25 13:05:02,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=87712.0, ans=0.0 +2024-08-25 13:05:25,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=87765.33333333333, ans=0.125 +2024-08-25 13:05:30,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=87765.33333333333, ans=0.0 +2024-08-25 13:05:33,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=87818.66666666667, ans=0.1 +2024-08-25 13:05:39,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=87818.66666666667, ans=0.09899494936611666 +2024-08-25 13:05:49,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=87872.0, ans=0.125 +2024-08-25 13:05:51,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=87872.0, ans=0.0 +2024-08-25 13:05:57,428 INFO [train.py:1114] (1/4) Epoch 7, batch 1550, loss[loss=0.2902, simple_loss=0.3303, pruned_loss=0.09178, ctc_loss=0.1663, over 19607.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.3132, pruned_loss=0.08173, ctc_loss=0.1539, over 3848878.45 frames. ], batch size: 60, lr: 2.06e-02, grad_scale: 16.0 +2024-08-25 13:06:01,328 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 13:06:11,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=87978.66666666667, ans=0.0 +2024-08-25 13:06:52,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=88138.66666666667, ans=0.0 +2024-08-25 13:06:55,901 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.554e+02 1.880e+02 2.225e+02 2.757e+02 4.141e+02, threshold=4.451e+02, percent-clipped=0.0 +2024-08-25 13:07:00,958 INFO [train.py:1114] (1/4) Epoch 7, batch 1600, loss[loss=0.2942, simple_loss=0.3341, pruned_loss=0.09276, ctc_loss=0.172, over 19853.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.3129, pruned_loss=0.08142, ctc_loss=0.1535, over 3837900.68 frames. ], batch size: 57, lr: 2.05e-02, grad_scale: 32.0 +2024-08-25 13:07:21,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=88245.33333333333, ans=0.125 +2024-08-25 13:07:41,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=88352.0, ans=0.1 +2024-08-25 13:07:53,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=88405.33333333333, ans=0.025 +2024-08-25 13:07:56,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=88405.33333333333, ans=0.0 +2024-08-25 13:07:58,840 INFO [train.py:1114] (1/4) Epoch 7, batch 1650, loss[loss=0.2529, simple_loss=0.3055, pruned_loss=0.07369, ctc_loss=0.1321, over 19670.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3128, pruned_loss=0.08169, ctc_loss=0.1537, over 3834353.21 frames. ], batch size: 59, lr: 2.05e-02, grad_scale: 32.0 +2024-08-25 13:08:17,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=88512.0, ans=0.125 +2024-08-25 13:08:24,544 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.23 vs. limit=15.0 +2024-08-25 13:08:32,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=88565.33333333333, ans=0.125 +2024-08-25 13:08:36,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=88618.66666666667, ans=0.125 +2024-08-25 13:08:42,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=88618.66666666667, ans=0.0 +2024-08-25 13:08:54,963 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.476e+02 1.917e+02 2.131e+02 2.729e+02 4.248e+02, threshold=4.261e+02, percent-clipped=0.0 +2024-08-25 13:08:58,383 INFO [train.py:1114] (1/4) Epoch 7, batch 1700, loss[loss=0.234, simple_loss=0.2778, pruned_loss=0.0696, ctc_loss=0.1276, over 19698.00 frames. ], tot_loss[loss=0.2671, simple_loss=0.312, pruned_loss=0.08079, ctc_loss=0.1518, over 3848276.44 frames. ], batch size: 46, lr: 2.05e-02, grad_scale: 32.0 +2024-08-25 13:09:05,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=88725.33333333333, ans=0.2 +2024-08-25 13:09:19,778 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.87 vs. limit=22.5 +2024-08-25 13:09:30,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=88832.0, ans=0.125 +2024-08-25 13:09:35,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=88885.33333333333, ans=0.1 +2024-08-25 13:09:45,773 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.64 vs. limit=15.0 +2024-08-25 13:09:46,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=88938.66666666667, ans=0.125 +2024-08-25 13:09:55,053 INFO [train.py:1114] (1/4) Epoch 7, batch 1750, loss[loss=0.2239, simple_loss=0.2707, pruned_loss=0.06378, ctc_loss=0.1237, over 19625.00 frames. ], tot_loss[loss=0.2675, simple_loss=0.3118, pruned_loss=0.08103, ctc_loss=0.1525, over 3852421.63 frames. ], batch size: 45, lr: 2.05e-02, grad_scale: 32.0 +2024-08-25 13:16:07,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=89045.33333333333, ans=0.2 +2024-08-25 13:16:07,808 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.24 vs. limit=15.0 +2024-08-25 13:17:21,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=89098.66666666667, ans=0.125 +2024-08-25 13:25:00,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=89152.0, ans=0.1 +2024-08-25 13:29:44,217 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.589e+02 1.972e+02 2.344e+02 2.828e+02 4.449e+02, threshold=4.688e+02, percent-clipped=1.0 +2024-08-25 13:29:47,698 INFO [train.py:1114] (1/4) Epoch 7, batch 1800, loss[loss=0.2834, simple_loss=0.327, pruned_loss=0.08713, ctc_loss=0.1638, over 19609.00 frames. ], tot_loss[loss=0.2674, simple_loss=0.3119, pruned_loss=0.08102, ctc_loss=0.1523, over 3853282.55 frames. ], batch size: 55, lr: 2.04e-02, grad_scale: 32.0 +2024-08-25 13:31:06,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=89258.66666666667, ans=0.0 +2024-08-25 13:31:14,467 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=21.58 vs. limit=22.5 +2024-08-25 13:38:20,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.61 vs. limit=10.0 +2024-08-25 13:38:36,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=89418.66666666667, ans=0.1 +2024-08-25 13:38:37,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=89418.66666666667, ans=0.025 +2024-08-25 13:38:42,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=89418.66666666667, ans=0.2 +2024-08-25 13:39:43,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=89472.0, ans=0.125 +2024-08-25 13:40:34,832 INFO [train.py:1114] (1/4) Epoch 7, batch 1850, loss[loss=0.272, simple_loss=0.324, pruned_loss=0.07912, ctc_loss=0.1546, over 19570.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3121, pruned_loss=0.08125, ctc_loss=0.1527, over 3857561.30 frames. ], batch size: 57, lr: 2.04e-02, grad_scale: 32.0 +2024-08-25 13:43:28,639 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.41 vs. limit=15.0 +2024-08-25 13:43:38,665 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.24 vs. limit=12.0 +2024-08-25 13:43:46,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=89738.66666666667, ans=0.0 +2024-08-25 13:44:01,309 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.521e+02 1.852e+02 2.070e+02 2.397e+02 4.608e+02, threshold=4.140e+02, percent-clipped=0.0 +2024-08-25 13:44:07,982 INFO [train.py:1114] (1/4) Epoch 7, batch 1900, loss[loss=0.2413, simple_loss=0.3042, pruned_loss=0.06451, ctc_loss=0.1236, over 19656.00 frames. ], tot_loss[loss=0.269, simple_loss=0.3133, pruned_loss=0.08165, ctc_loss=0.1537, over 3862690.98 frames. ], batch size: 59, lr: 2.04e-02, grad_scale: 32.0 +2024-08-25 13:44:18,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=89792.0, ans=0.125 +2024-08-25 13:44:24,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=89792.0, ans=0.1 +2024-08-25 13:44:28,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=89845.33333333333, ans=0.2 +2024-08-25 13:44:29,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=89845.33333333333, ans=0.125 +2024-08-25 13:44:29,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=89845.33333333333, ans=0.07 +2024-08-25 13:44:53,116 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 13:45:05,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=89952.0, ans=0.125 +2024-08-25 13:45:19,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=89952.0, ans=0.0 +2024-08-25 13:45:28,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90005.33333333333, ans=0.1 +2024-08-25 13:45:41,187 INFO [train.py:1114] (1/4) Epoch 7, batch 1950, loss[loss=0.2557, simple_loss=0.2998, pruned_loss=0.07757, ctc_loss=0.1413, over 19577.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.3134, pruned_loss=0.08118, ctc_loss=0.1527, over 3871580.56 frames. ], batch size: 52, lr: 2.04e-02, grad_scale: 16.0 +2024-08-25 13:45:54,319 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.29 vs. limit=12.0 +2024-08-25 13:45:55,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=90112.0, ans=0.125 +2024-08-25 13:46:02,201 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.01 vs. limit=15.0 +2024-08-25 13:46:42,773 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.614e+02 1.896e+02 2.177e+02 2.703e+02 3.964e+02, threshold=4.354e+02, percent-clipped=0.0 +2024-08-25 13:46:45,052 INFO [train.py:1114] (1/4) Epoch 7, batch 2000, loss[loss=0.2374, simple_loss=0.2886, pruned_loss=0.0681, ctc_loss=0.1251, over 19656.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.3146, pruned_loss=0.0821, ctc_loss=0.1545, over 3855939.06 frames. ], batch size: 45, lr: 2.03e-02, grad_scale: 32.0 +2024-08-25 13:46:50,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=90325.33333333333, ans=10.0 +2024-08-25 13:47:10,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=90432.0, ans=0.125 +2024-08-25 13:47:14,887 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.74 vs. limit=10.0 +2024-08-25 13:47:24,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=90485.33333333333, ans=0.0 +2024-08-25 13:47:41,004 INFO [train.py:1114] (1/4) Epoch 7, batch 2050, loss[loss=0.2333, simple_loss=0.2847, pruned_loss=0.06615, ctc_loss=0.1239, over 19726.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3134, pruned_loss=0.08187, ctc_loss=0.1544, over 3852162.00 frames. ], batch size: 47, lr: 2.03e-02, grad_scale: 32.0 +2024-08-25 13:47:56,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=90645.33333333333, ans=0.0 +2024-08-25 13:47:57,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=90645.33333333333, ans=0.1 +2024-08-25 13:48:00,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=90645.33333333333, ans=0.0 +2024-08-25 13:48:13,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=90698.66666666667, ans=0.125 +2024-08-25 13:48:23,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=90752.0, ans=0.125 +2024-08-25 13:48:36,339 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.580e+02 2.053e+02 2.413e+02 3.017e+02 5.203e+02, threshold=4.827e+02, percent-clipped=2.0 +2024-08-25 13:48:38,576 INFO [train.py:1114] (1/4) Epoch 7, batch 2100, loss[loss=0.2623, simple_loss=0.306, pruned_loss=0.07879, ctc_loss=0.1526, over 19754.00 frames. ], tot_loss[loss=0.2677, simple_loss=0.3124, pruned_loss=0.08093, ctc_loss=0.1527, over 3858882.17 frames. ], batch size: 54, lr: 2.03e-02, grad_scale: 32.0 +2024-08-25 13:49:02,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.57 vs. limit=12.0 +2024-08-25 13:49:20,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=91018.66666666667, ans=0.0 +2024-08-25 13:49:24,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=91018.66666666667, ans=0.025 +2024-08-25 13:49:30,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=91072.0, ans=0.2 +2024-08-25 13:49:34,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=91072.0, ans=0.125 +2024-08-25 13:49:42,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=91125.33333333333, ans=0.125 +2024-08-25 13:49:43,245 INFO [train.py:1114] (1/4) Epoch 7, batch 2150, loss[loss=0.2573, simple_loss=0.2995, pruned_loss=0.07831, ctc_loss=0.1461, over 19584.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3114, pruned_loss=0.08061, ctc_loss=0.1519, over 3869609.55 frames. ], batch size: 52, lr: 2.03e-02, grad_scale: 32.0 +2024-08-25 13:49:55,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=91178.66666666667, ans=0.125 +2024-08-25 13:49:56,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=91178.66666666667, ans=0.125 +2024-08-25 13:49:58,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=91178.66666666667, ans=0.125 +2024-08-25 13:49:58,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=91178.66666666667, ans=0.125 +2024-08-25 13:50:01,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91178.66666666667, ans=0.1 +2024-08-25 13:50:03,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=91178.66666666667, ans=0.125 +2024-08-25 13:50:12,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=91232.0, ans=0.125 +2024-08-25 13:50:20,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=91285.33333333333, ans=0.125 +2024-08-25 13:50:33,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff3.min_abs, batch_count=91338.66666666667, ans=0.2 +2024-08-25 13:50:36,445 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.440e+02 1.920e+02 2.200e+02 2.924e+02 5.090e+02, threshold=4.400e+02, percent-clipped=1.0 +2024-08-25 13:50:38,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=91392.0, ans=0.0 +2024-08-25 13:50:39,128 INFO [train.py:1114] (1/4) Epoch 7, batch 2200, loss[loss=0.2893, simple_loss=0.3313, pruned_loss=0.09015, ctc_loss=0.1677, over 19580.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3116, pruned_loss=0.08061, ctc_loss=0.1518, over 3868263.55 frames. ], batch size: 57, lr: 2.02e-02, grad_scale: 32.0 +2024-08-25 13:50:58,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91445.33333333333, ans=0.1 +2024-08-25 13:51:00,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=91498.66666666667, ans=0.0 +2024-08-25 13:51:09,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91498.66666666667, ans=0.1 +2024-08-25 13:51:16,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=91552.0, ans=0.0 +2024-08-25 13:51:22,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=91552.0, ans=0.125 +2024-08-25 13:51:34,995 INFO [train.py:1114] (1/4) Epoch 7, batch 2250, loss[loss=0.2456, simple_loss=0.306, pruned_loss=0.06734, ctc_loss=0.1264, over 19603.00 frames. ], tot_loss[loss=0.2672, simple_loss=0.3119, pruned_loss=0.08085, ctc_loss=0.1521, over 3867162.72 frames. ], batch size: 55, lr: 2.02e-02, grad_scale: 16.0 +2024-08-25 13:51:39,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.61 vs. limit=10.0 +2024-08-25 13:51:44,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=91712.0, ans=0.125 +2024-08-25 13:52:01,547 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.52 vs. limit=6.0 +2024-08-25 13:52:05,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=91765.33333333333, ans=0.125 +2024-08-25 13:52:14,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=91818.66666666667, ans=0.0 +2024-08-25 13:52:28,393 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.566e+02 2.146e+02 2.677e+02 3.204e+02 4.930e+02, threshold=5.354e+02, percent-clipped=3.0 +2024-08-25 13:52:29,557 INFO [train.py:1114] (1/4) Epoch 7, batch 2300, loss[loss=0.2404, simple_loss=0.2894, pruned_loss=0.07054, ctc_loss=0.1257, over 19505.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3113, pruned_loss=0.08071, ctc_loss=0.1518, over 3861012.41 frames. ], batch size: 49, lr: 2.02e-02, grad_scale: 16.0 +2024-08-25 13:52:33,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=91925.33333333333, ans=0.2 +2024-08-25 13:52:37,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=91925.33333333333, ans=0.0 +2024-08-25 13:52:47,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.73 vs. limit=10.0 +2024-08-25 13:52:57,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92032.0, ans=0.1 +2024-08-25 13:53:05,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92085.33333333333, ans=0.1 +2024-08-25 13:53:18,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=92138.66666666667, ans=0.025 +2024-08-25 13:53:23,070 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 13:53:25,163 INFO [train.py:1114] (1/4) Epoch 7, batch 2350, loss[loss=0.2673, simple_loss=0.3228, pruned_loss=0.07795, ctc_loss=0.14, over 19666.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3113, pruned_loss=0.08074, ctc_loss=0.1515, over 3863615.91 frames. ], batch size: 63, lr: 2.02e-02, grad_scale: 16.0 +2024-08-25 13:53:35,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=92245.33333333333, ans=0.0 +2024-08-25 13:53:51,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=92298.66666666667, ans=0.0 +2024-08-25 13:53:54,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=92298.66666666667, ans=0.0 +2024-08-25 13:54:10,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=92405.33333333333, ans=0.2 +2024-08-25 13:54:18,217 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.474e+02 1.985e+02 2.336e+02 2.802e+02 4.974e+02, threshold=4.671e+02, percent-clipped=0.0 +2024-08-25 13:54:19,273 INFO [train.py:1114] (1/4) Epoch 7, batch 2400, loss[loss=0.3083, simple_loss=0.3479, pruned_loss=0.0984, ctc_loss=0.1796, over 19295.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3129, pruned_loss=0.0815, ctc_loss=0.1526, over 3857314.72 frames. ], batch size: 71, lr: 2.01e-02, grad_scale: 32.0 +2024-08-25 13:54:28,777 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.38 vs. limit=15.0 +2024-08-25 13:54:44,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=92565.33333333333, ans=0.1 +2024-08-25 13:55:47,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92565.33333333333, ans=0.1 +2024-08-25 13:55:52,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=92618.66666666667, ans=0.125 +2024-08-25 13:56:02,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=92672.0, ans=0.0 +2024-08-25 13:56:10,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=92672.0, ans=0.09899494936611666 +2024-08-25 13:56:13,545 INFO [train.py:1114] (1/4) Epoch 7, batch 2450, loss[loss=0.3927, simple_loss=0.3779, pruned_loss=0.1476, ctc_loss=0.2807, over 13294.00 frames. ], tot_loss[loss=0.2775, simple_loss=0.3181, pruned_loss=0.08608, ctc_loss=0.1616, over 3729139.51 frames. ], batch size: 140, lr: 2.01e-02, grad_scale: 32.0 +2024-08-25 13:56:18,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=92725.33333333333, ans=0.125 +2024-08-25 13:56:19,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=92725.33333333333, ans=0.2 +2024-08-25 13:56:19,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=92725.33333333333, ans=0.0 +2024-08-25 13:56:42,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=92778.66666666667, ans=0.125 +2024-08-25 13:56:42,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=92778.66666666667, ans=0.125 +2024-08-25 13:57:01,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=92885.33333333333, ans=0.0 +2024-08-25 13:57:54,269 INFO [train.py:1114] (1/4) Epoch 8, batch 0, loss[loss=0.246, simple_loss=0.2955, pruned_loss=0.07066, ctc_loss=0.1379, over 19413.00 frames. ], tot_loss[loss=0.246, simple_loss=0.2955, pruned_loss=0.07066, ctc_loss=0.1379, over 19413.00 frames. ], batch size: 48, lr: 1.89e-02, grad_scale: 32.0 +2024-08-25 13:57:54,270 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-25 13:59:56,290 INFO [train.py:1146] (1/4) Epoch 8, validation: loss=0.2171, simple_loss=0.2997, pruned_loss=0.04948, ctc_loss=0.08904, over 944034.00 frames. +2024-08-25 13:59:56,291 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 14083MB +2024-08-25 13:59:56,762 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.49 vs. limit=15.0 +2024-08-25 14:00:05,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=92933.33333333333, ans=0.0 +2024-08-25 14:01:03,637 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.719e+02 2.158e+02 2.483e+02 2.902e+02 5.180e+02, threshold=4.965e+02, percent-clipped=2.0 +2024-08-25 14:01:42,491 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.71 vs. limit=15.0 +2024-08-25 14:02:04,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=93146.66666666667, ans=0.0 +2024-08-25 14:02:06,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=93146.66666666667, ans=0.1 +2024-08-25 14:02:13,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=93146.66666666667, ans=0.0 +2024-08-25 14:02:17,064 INFO [train.py:1114] (1/4) Epoch 8, batch 50, loss[loss=0.2386, simple_loss=0.2844, pruned_loss=0.06973, ctc_loss=0.1332, over 19725.00 frames. ], tot_loss[loss=0.2759, simple_loss=0.3178, pruned_loss=0.08489, ctc_loss=0.1606, over 845284.01 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 32.0 +2024-08-25 14:02:34,064 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.17 vs. limit=15.0 +2024-08-25 14:02:43,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=93306.66666666667, ans=0.125 +2024-08-25 14:03:01,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=93360.0, ans=0.125 +2024-08-25 14:04:49,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=93413.33333333333, ans=0.0 +2024-08-25 14:05:00,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=93413.33333333333, ans=0.0 +2024-08-25 14:05:03,232 INFO [train.py:1114] (1/4) Epoch 8, batch 100, loss[loss=0.2546, simple_loss=0.3013, pruned_loss=0.07476, ctc_loss=0.1462, over 19716.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.3157, pruned_loss=0.08176, ctc_loss=0.1541, over 1500494.30 frames. ], batch size: 51, lr: 1.89e-02, grad_scale: 32.0 +2024-08-25 14:05:14,933 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 1.910e+02 2.219e+02 2.660e+02 5.043e+02, threshold=4.439e+02, percent-clipped=1.0 +2024-08-25 14:05:16,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=93520.0, ans=0.0 +2024-08-25 14:05:44,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=93626.66666666667, ans=0.125 +2024-08-25 14:05:48,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=93626.66666666667, ans=0.04949747468305833 +2024-08-25 14:05:55,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=93680.0, ans=0.2 +2024-08-25 14:07:16,378 INFO [train.py:1114] (1/4) Epoch 8, batch 150, loss[loss=0.2213, simple_loss=0.2755, pruned_loss=0.06106, ctc_loss=0.1126, over 19719.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3109, pruned_loss=0.0794, ctc_loss=0.1488, over 2028940.94 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 32.0 +2024-08-25 14:07:29,176 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.34 vs. limit=15.0 +2024-08-25 14:07:31,714 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.09 vs. limit=15.0 +2024-08-25 14:08:14,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=93786.66666666667, ans=0.0 +2024-08-25 14:08:14,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=93786.66666666667, ans=0.5 +2024-08-25 14:09:07,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93840.0, ans=0.1 +2024-08-25 14:09:23,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=93893.33333333333, ans=0.125 +2024-08-25 14:09:27,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=93946.66666666667, ans=0.0 +2024-08-25 14:10:14,745 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.31 vs. limit=15.0 +2024-08-25 14:10:16,267 INFO [train.py:1114] (1/4) Epoch 8, batch 200, loss[loss=0.2691, simple_loss=0.3119, pruned_loss=0.08168, ctc_loss=0.1574, over 18206.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3076, pruned_loss=0.07724, ctc_loss=0.1451, over 2436792.66 frames. ], batch size: 85, lr: 1.88e-02, grad_scale: 32.0 +2024-08-25 14:10:22,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=94000.0, ans=0.2 +2024-08-25 14:10:29,222 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.854e+02 2.093e+02 2.544e+02 5.078e+02, threshold=4.187e+02, percent-clipped=1.0 +2024-08-25 14:10:33,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=94053.33333333333, ans=0.125 +2024-08-25 14:10:46,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=94106.66666666667, ans=0.125 +2024-08-25 14:10:59,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=94160.0, ans=0.2 +2024-08-25 14:11:07,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=94213.33333333333, ans=0.0 +2024-08-25 14:11:17,845 INFO [train.py:1114] (1/4) Epoch 8, batch 250, loss[loss=0.2827, simple_loss=0.3221, pruned_loss=0.08986, ctc_loss=0.1586, over 19411.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3082, pruned_loss=0.078, ctc_loss=0.1465, over 2756344.82 frames. ], batch size: 67, lr: 1.88e-02, grad_scale: 32.0 +2024-08-25 14:11:31,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=94266.66666666667, ans=0.125 +2024-08-25 14:12:28,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=94320.0, ans=0.125 +2024-08-25 14:12:51,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=94426.66666666667, ans=0.2 +2024-08-25 14:12:58,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=94426.66666666667, ans=0.0 +2024-08-25 14:13:02,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=94480.0, ans=0.0 +2024-08-25 14:13:13,476 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.33 vs. limit=15.0 +2024-08-25 14:13:16,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=94480.0, ans=0.0 +2024-08-25 14:13:21,907 INFO [train.py:1114] (1/4) Epoch 8, batch 300, loss[loss=0.268, simple_loss=0.3168, pruned_loss=0.07985, ctc_loss=0.149, over 19536.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3064, pruned_loss=0.07692, ctc_loss=0.1443, over 3000549.85 frames. ], batch size: 61, lr: 1.88e-02, grad_scale: 32.0 +2024-08-25 14:13:33,347 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.508e+02 1.987e+02 2.340e+02 3.022e+02 6.047e+02, threshold=4.681e+02, percent-clipped=9.0 +2024-08-25 14:13:44,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=94640.0, ans=0.1 +2024-08-25 14:13:45,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=94640.0, ans=0.125 +2024-08-25 14:13:49,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=94640.0, ans=0.1 +2024-08-25 14:14:31,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=94693.33333333333, ans=0.125 +2024-08-25 14:14:52,108 INFO [train.py:1114] (1/4) Epoch 8, batch 350, loss[loss=0.2446, simple_loss=0.2863, pruned_loss=0.07317, ctc_loss=0.1411, over 19761.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3075, pruned_loss=0.07712, ctc_loss=0.1448, over 3191442.03 frames. ], batch size: 48, lr: 1.88e-02, grad_scale: 32.0 +2024-08-25 14:16:35,417 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.51 vs. limit=10.0 +2024-08-25 14:16:38,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=95013.33333333333, ans=0.1 +2024-08-25 14:16:45,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=95013.33333333333, ans=0.1 +2024-08-25 14:16:50,820 INFO [train.py:1114] (1/4) Epoch 8, batch 400, loss[loss=0.2767, simple_loss=0.3185, pruned_loss=0.08633, ctc_loss=0.1556, over 19491.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.3066, pruned_loss=0.07653, ctc_loss=0.1438, over 3343449.44 frames. ], batch size: 54, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:17:03,866 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.620e+02 2.019e+02 2.528e+02 3.132e+02 5.852e+02, threshold=5.056e+02, percent-clipped=7.0 +2024-08-25 14:17:09,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=95120.0, ans=0.125 +2024-08-25 14:17:41,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=95280.0, ans=0.2 +2024-08-25 14:17:50,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=95280.0, ans=0.125 +2024-08-25 14:18:38,389 INFO [train.py:1114] (1/4) Epoch 8, batch 450, loss[loss=0.2598, simple_loss=0.3246, pruned_loss=0.07021, ctc_loss=0.1364, over 19623.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3067, pruned_loss=0.07688, ctc_loss=0.1444, over 3451718.36 frames. ], batch size: 55, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:18:42,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=95333.33333333333, ans=0.1 +2024-08-25 14:18:57,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=95386.66666666667, ans=0.125 +2024-08-25 14:18:57,929 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.83 vs. limit=15.0 +2024-08-25 14:19:31,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=95546.66666666667, ans=0.125 +2024-08-25 14:19:39,036 INFO [train.py:1114] (1/4) Epoch 8, batch 500, loss[loss=0.2579, simple_loss=0.3156, pruned_loss=0.07288, ctc_loss=0.1362, over 19638.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3053, pruned_loss=0.07618, ctc_loss=0.1432, over 3547664.10 frames. ], batch size: 63, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:19:52,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=95600.0, ans=0.2 +2024-08-25 14:21:37,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=95600.0, ans=0.1 +2024-08-25 14:21:37,854 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.13 vs. limit=15.0 +2024-08-25 14:21:42,069 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.507e+02 1.925e+02 2.242e+02 2.655e+02 4.786e+02, threshold=4.483e+02, percent-clipped=0.0 +2024-08-25 14:21:58,768 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=12.0 +2024-08-25 14:22:05,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=95760.0, ans=0.125 +2024-08-25 14:22:34,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=95813.33333333333, ans=0.125 +2024-08-25 14:22:36,073 INFO [train.py:1114] (1/4) Epoch 8, batch 550, loss[loss=0.2695, simple_loss=0.3166, pruned_loss=0.08083, ctc_loss=0.1519, over 19289.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3063, pruned_loss=0.07673, ctc_loss=0.1441, over 3609336.19 frames. ], batch size: 71, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:22:45,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=95866.66666666667, ans=0.1 +2024-08-25 14:24:05,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=95973.33333333333, ans=0.2 +2024-08-25 14:25:43,163 INFO [train.py:1114] (1/4) Epoch 8, batch 600, loss[loss=0.2668, simple_loss=0.3224, pruned_loss=0.07763, ctc_loss=0.1397, over 19378.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3068, pruned_loss=0.07691, ctc_loss=0.1445, over 3666215.50 frames. ], batch size: 67, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:25:54,325 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.517e+02 1.975e+02 2.461e+02 2.998e+02 6.685e+02, threshold=4.922e+02, percent-clipped=2.0 +2024-08-25 14:26:01,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=96186.66666666667, ans=0.125 +2024-08-25 14:26:24,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=96293.33333333333, ans=0.125 +2024-08-25 14:26:31,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=96293.33333333333, ans=0.2 +2024-08-25 14:27:51,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=96346.66666666667, ans=0.2 +2024-08-25 14:29:18,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=96346.66666666667, ans=0.1 +2024-08-25 14:29:20,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=96346.66666666667, ans=0.025 +2024-08-25 14:29:23,560 INFO [train.py:1114] (1/4) Epoch 8, batch 650, loss[loss=0.2582, simple_loss=0.3045, pruned_loss=0.07511, ctc_loss=0.1541, over 19759.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3062, pruned_loss=0.07656, ctc_loss=0.1443, over 3716209.91 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 32.0 +2024-08-25 14:29:23,692 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 14:29:36,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=96453.33333333333, ans=0.0 +2024-08-25 14:29:42,483 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.25 vs. limit=10.0 +2024-08-25 14:29:46,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=96453.33333333333, ans=10.0 +2024-08-25 14:30:03,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=96560.0, ans=0.0 +2024-08-25 14:30:07,731 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.75 vs. limit=22.5 +2024-08-25 14:30:52,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=96560.0, ans=0.0 +2024-08-25 14:31:06,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=96613.33333333333, ans=0.125 +2024-08-25 14:31:08,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=96613.33333333333, ans=0.125 +2024-08-25 14:31:24,402 INFO [train.py:1114] (1/4) Epoch 8, batch 700, loss[loss=0.2306, simple_loss=0.2872, pruned_loss=0.06264, ctc_loss=0.1218, over 19719.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3066, pruned_loss=0.0766, ctc_loss=0.1443, over 3749112.39 frames. ], batch size: 51, lr: 1.86e-02, grad_scale: 32.0 +2024-08-25 14:31:25,680 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.97 vs. limit=15.0 +2024-08-25 14:31:36,079 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.482e+02 1.952e+02 2.228e+02 2.907e+02 4.140e+02, threshold=4.456e+02, percent-clipped=0.0 +2024-08-25 14:31:54,117 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.29 vs. limit=6.0 +2024-08-25 14:31:54,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=96773.33333333333, ans=0.125 +2024-08-25 14:32:00,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=96773.33333333333, ans=0.125 +2024-08-25 14:32:04,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=96773.33333333333, ans=0.125 +2024-08-25 14:32:27,585 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.18 vs. limit=15.0 +2024-08-25 14:32:35,086 INFO [train.py:1114] (1/4) Epoch 8, batch 750, loss[loss=0.2512, simple_loss=0.3118, pruned_loss=0.06927, ctc_loss=0.1299, over 19493.00 frames. ], tot_loss[loss=0.2583, simple_loss=0.3062, pruned_loss=0.07648, ctc_loss=0.1438, over 3774857.95 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 32.0 +2024-08-25 14:32:44,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=96933.33333333333, ans=0.0 +2024-08-25 14:32:52,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=96986.66666666667, ans=0.0 +2024-08-25 14:33:15,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=97040.0, ans=0.125 +2024-08-25 14:33:22,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=97093.33333333333, ans=0.125 +2024-08-25 14:33:39,200 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.02 vs. limit=15.0 +2024-08-25 14:33:45,421 INFO [train.py:1114] (1/4) Epoch 8, batch 800, loss[loss=0.2205, simple_loss=0.2722, pruned_loss=0.06213, ctc_loss=0.1112, over 19804.00 frames. ], tot_loss[loss=0.2581, simple_loss=0.3061, pruned_loss=0.07637, ctc_loss=0.1434, over 3796328.98 frames. ], batch size: 49, lr: 1.86e-02, grad_scale: 32.0 +2024-08-25 14:34:27,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=97200.0, ans=0.125 +2024-08-25 14:34:31,437 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.07 vs. limit=15.0 +2024-08-25 14:34:33,562 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.39 vs. limit=15.0 +2024-08-25 14:34:35,085 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.511e+02 1.855e+02 2.176e+02 2.933e+02 4.905e+02, threshold=4.353e+02, percent-clipped=3.0 +2024-08-25 14:35:22,239 INFO [train.py:1114] (1/4) Epoch 8, batch 850, loss[loss=0.2519, simple_loss=0.3111, pruned_loss=0.06966, ctc_loss=0.1337, over 19646.00 frames. ], tot_loss[loss=0.2571, simple_loss=0.3055, pruned_loss=0.07587, ctc_loss=0.1423, over 3815639.42 frames. ], batch size: 59, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:35:26,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=97466.66666666667, ans=0.1 +2024-08-25 14:35:37,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=97520.0, ans=0.95 +2024-08-25 14:36:00,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=97626.66666666667, ans=0.2 +2024-08-25 14:36:01,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=97626.66666666667, ans=0.125 +2024-08-25 14:36:02,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=97626.66666666667, ans=0.025 +2024-08-25 14:36:13,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=97680.0, ans=0.1 +2024-08-25 14:36:19,793 INFO [train.py:1114] (1/4) Epoch 8, batch 900, loss[loss=0.2583, simple_loss=0.2945, pruned_loss=0.08169, ctc_loss=0.1465, over 19428.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.3063, pruned_loss=0.07658, ctc_loss=0.1435, over 3820034.18 frames. ], batch size: 48, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:38:24,996 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.98 vs. limit=15.0 +2024-08-25 14:38:28,622 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.79 vs. limit=15.0 +2024-08-25 14:38:28,755 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.54 vs. limit=15.0 +2024-08-25 14:38:30,474 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.590e+02 1.935e+02 2.327e+02 2.780e+02 5.034e+02, threshold=4.654e+02, percent-clipped=2.0 +2024-08-25 14:38:30,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_na.min_abs, batch_count=97786.66666666667, ans=0.02 +2024-08-25 14:38:35,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=97786.66666666667, ans=0.125 +2024-08-25 14:38:39,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=97786.66666666667, ans=0.1 +2024-08-25 14:38:50,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=97840.0, ans=0.0 +2024-08-25 14:39:10,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=97946.66666666667, ans=0.1 +2024-08-25 14:39:56,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=97946.66666666667, ans=0.125 +2024-08-25 14:40:01,371 INFO [train.py:1114] (1/4) Epoch 8, batch 950, loss[loss=0.2532, simple_loss=0.2885, pruned_loss=0.07917, ctc_loss=0.149, over 19506.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3069, pruned_loss=0.07704, ctc_loss=0.1443, over 3821119.37 frames. ], batch size: 49, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:40:17,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=98053.33333333333, ans=0.125 +2024-08-25 14:40:22,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=98053.33333333333, ans=0.025 +2024-08-25 14:41:20,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=98106.66666666667, ans=0.0 +2024-08-25 14:41:31,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=98160.0, ans=0.2 +2024-08-25 14:42:08,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=98160.0, ans=0.125 +2024-08-25 14:43:17,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=98213.33333333333, ans=0.0 +2024-08-25 14:43:17,673 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.38 vs. limit=22.5 +2024-08-25 14:43:26,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=98213.33333333333, ans=10.0 +2024-08-25 14:43:29,286 INFO [train.py:1114] (1/4) Epoch 8, batch 1000, loss[loss=0.2476, simple_loss=0.2964, pruned_loss=0.07145, ctc_loss=0.1397, over 19838.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3087, pruned_loss=0.0781, ctc_loss=0.1464, over 3817448.78 frames. ], batch size: 52, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:43:30,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=98266.66666666667, ans=0.125 +2024-08-25 14:43:39,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=98320.0, ans=0.04949747468305833 +2024-08-25 14:43:47,360 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.458e+02 2.014e+02 2.465e+02 3.304e+02 4.205e+02, threshold=4.930e+02, percent-clipped=0.0 +2024-08-25 14:43:50,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=98320.0, ans=0.0 +2024-08-25 14:43:52,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=98320.0, ans=0.125 +2024-08-25 14:46:05,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=98320.0, ans=0.125 +2024-08-25 14:46:32,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=98480.0, ans=0.05 +2024-08-25 14:46:35,501 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.59 vs. limit=22.5 +2024-08-25 14:46:37,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=98480.0, ans=0.125 +2024-08-25 14:46:44,411 INFO [train.py:1114] (1/4) Epoch 8, batch 1050, loss[loss=0.285, simple_loss=0.3254, pruned_loss=0.08865, ctc_loss=0.1684, over 19833.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3077, pruned_loss=0.07767, ctc_loss=0.1458, over 3823813.01 frames. ], batch size: 57, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:46:45,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=98533.33333333333, ans=0.0 +2024-08-25 14:46:49,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=98533.33333333333, ans=0.125 +2024-08-25 14:47:37,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=98746.66666666667, ans=0.0 +2024-08-25 14:47:44,591 INFO [train.py:1114] (1/4) Epoch 8, batch 1100, loss[loss=0.2333, simple_loss=0.2866, pruned_loss=0.06527, ctc_loss=0.1237, over 19576.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3069, pruned_loss=0.07688, ctc_loss=0.1444, over 3831686.92 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 32.0 +2024-08-25 14:48:13,768 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.527e+02 1.814e+02 2.071e+02 2.620e+02 3.682e+02, threshold=4.142e+02, percent-clipped=0.0 +2024-08-25 14:48:29,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=98853.33333333333, ans=0.125 +2024-08-25 14:48:40,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=98906.66666666667, ans=0.2 +2024-08-25 14:49:01,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=98906.66666666667, ans=0.125 +2024-08-25 14:49:02,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=98906.66666666667, ans=0.125 +2024-08-25 14:49:15,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=98960.0, ans=0.2 +2024-08-25 14:49:20,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=98960.0, ans=0.125 +2024-08-25 14:50:00,860 INFO [train.py:1114] (1/4) Epoch 8, batch 1150, loss[loss=0.254, simple_loss=0.2986, pruned_loss=0.07656, ctc_loss=0.141, over 19597.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.3078, pruned_loss=0.07768, ctc_loss=0.146, over 3830909.21 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 32.0 +2024-08-25 14:51:09,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=99120.0, ans=0.05 +2024-08-25 14:51:26,402 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.83 vs. limit=6.0 +2024-08-25 14:51:58,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=99173.33333333333, ans=0.0 +2024-08-25 14:52:04,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=99226.66666666667, ans=0.2 +2024-08-25 14:52:51,781 INFO [train.py:1114] (1/4) Epoch 8, batch 1200, loss[loss=0.2694, simple_loss=0.3202, pruned_loss=0.07986, ctc_loss=0.1474, over 19842.00 frames. ], tot_loss[loss=0.2604, simple_loss=0.3079, pruned_loss=0.07736, ctc_loss=0.1455, over 3824781.15 frames. ], batch size: 57, lr: 1.84e-02, grad_scale: 32.0 +2024-08-25 14:53:01,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99333.33333333333, ans=0.1 +2024-08-25 14:53:06,252 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.879e+02 2.149e+02 2.634e+02 4.011e+02, threshold=4.298e+02, percent-clipped=0.0 +2024-08-25 14:53:32,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=99493.33333333333, ans=0.0 +2024-08-25 14:53:48,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=99546.66666666667, ans=0.125 +2024-08-25 14:53:50,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=99546.66666666667, ans=0.125 +2024-08-25 14:53:52,342 INFO [train.py:1114] (1/4) Epoch 8, batch 1250, loss[loss=0.3024, simple_loss=0.3363, pruned_loss=0.09866, ctc_loss=0.1778, over 19526.00 frames. ], tot_loss[loss=0.2604, simple_loss=0.3083, pruned_loss=0.07715, ctc_loss=0.1453, over 3843129.06 frames. ], batch size: 61, lr: 1.84e-02, grad_scale: 32.0 +2024-08-25 14:54:05,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99653.33333333333, ans=0.1 +2024-08-25 14:54:06,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=99653.33333333333, ans=0.0 +2024-08-25 14:54:11,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=99653.33333333333, ans=0.125 +2024-08-25 14:54:40,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=99706.66666666667, ans=0.07 +2024-08-25 14:55:38,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=99706.66666666667, ans=0.2 +2024-08-25 14:55:45,373 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 14:55:50,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=99760.0, ans=0.125 +2024-08-25 14:55:54,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=99813.33333333333, ans=0.0 +2024-08-25 14:56:05,545 INFO [train.py:1114] (1/4) Epoch 8, batch 1300, loss[loss=0.2712, simple_loss=0.3122, pruned_loss=0.08421, ctc_loss=0.1542, over 18890.00 frames. ], tot_loss[loss=0.2581, simple_loss=0.3064, pruned_loss=0.07615, ctc_loss=0.1436, over 3846658.49 frames. ], batch size: 76, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 14:56:08,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=99866.66666666667, ans=0.0 +2024-08-25 14:56:17,013 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.458e+02 1.809e+02 2.147e+02 2.747e+02 4.726e+02, threshold=4.293e+02, percent-clipped=4.0 +2024-08-25 14:58:09,048 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 14:58:23,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100080.0, ans=0.1 +2024-08-25 14:58:53,773 INFO [train.py:1114] (1/4) Epoch 8, batch 1350, loss[loss=0.2668, simple_loss=0.3198, pruned_loss=0.07724, ctc_loss=0.1484, over 19792.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3066, pruned_loss=0.07639, ctc_loss=0.144, over 3858031.17 frames. ], batch size: 54, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 14:58:58,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100133.33333333333, ans=0.125 +2024-08-25 14:59:41,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=100346.66666666667, ans=0.95 +2024-08-25 14:59:51,903 INFO [train.py:1114] (1/4) Epoch 8, batch 1400, loss[loss=0.2218, simple_loss=0.2748, pruned_loss=0.06047, ctc_loss=0.1196, over 19658.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3061, pruned_loss=0.07595, ctc_loss=0.1429, over 3865207.46 frames. ], batch size: 46, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 14:59:53,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=100400.0, ans=0.125 +2024-08-25 14:59:54,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=100400.0, ans=0.0 +2024-08-25 14:59:54,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=100400.0, ans=0.2 +2024-08-25 15:00:00,539 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.65 vs. limit=22.5 +2024-08-25 15:00:03,300 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.557e+02 2.018e+02 2.600e+02 3.300e+02 7.375e+02, threshold=5.199e+02, percent-clipped=11.0 +2024-08-25 15:00:55,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=100666.66666666667, ans=0.125 +2024-08-25 15:00:56,747 INFO [train.py:1114] (1/4) Epoch 8, batch 1450, loss[loss=0.2663, simple_loss=0.318, pruned_loss=0.07862, ctc_loss=0.1433, over 19693.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.3072, pruned_loss=0.07683, ctc_loss=0.1443, over 3863108.04 frames. ], batch size: 63, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 15:00:56,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=100666.66666666667, ans=0.2 +2024-08-25 15:01:29,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=100773.33333333333, ans=0.125 +2024-08-25 15:01:46,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100880.0, ans=0.125 +2024-08-25 15:02:32,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=100880.0, ans=0.0 +2024-08-25 15:03:16,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=100933.33333333333, ans=0.2 +2024-08-25 15:03:17,701 INFO [train.py:1114] (1/4) Epoch 8, batch 1500, loss[loss=0.2619, simple_loss=0.315, pruned_loss=0.07554, ctc_loss=0.1445, over 19590.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.307, pruned_loss=0.07651, ctc_loss=0.1439, over 3862434.42 frames. ], batch size: 57, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 15:05:19,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=100933.33333333333, ans=0.125 +2024-08-25 15:05:24,436 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.596e+02 1.972e+02 2.271e+02 2.845e+02 5.404e+02, threshold=4.542e+02, percent-clipped=1.0 +2024-08-25 15:05:34,562 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.40 vs. limit=15.0 +2024-08-25 15:08:04,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=101040.0, ans=0.2 +2024-08-25 15:08:38,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=101040.0, ans=0.125 +2024-08-25 15:09:15,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=101093.33333333333, ans=0.2 +2024-08-25 15:09:36,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=101093.33333333333, ans=0.125 +2024-08-25 15:10:18,896 INFO [train.py:1114] (1/4) Epoch 8, batch 1550, loss[loss=0.277, simple_loss=0.3226, pruned_loss=0.08466, ctc_loss=0.1551, over 19627.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3077, pruned_loss=0.07722, ctc_loss=0.1451, over 3846907.76 frames. ], batch size: 60, lr: 1.82e-02, grad_scale: 32.0 +2024-08-25 15:12:28,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=101253.33333333333, ans=0.025 +2024-08-25 15:13:44,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=101360.0, ans=0.125 +2024-08-25 15:14:03,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=101413.33333333333, ans=0.0 +2024-08-25 15:14:11,856 INFO [train.py:1114] (1/4) Epoch 8, batch 1600, loss[loss=0.2509, simple_loss=0.3122, pruned_loss=0.06816, ctc_loss=0.1329, over 19828.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3076, pruned_loss=0.07727, ctc_loss=0.1454, over 3836701.43 frames. ], batch size: 57, lr: 1.82e-02, grad_scale: 32.0 +2024-08-25 15:14:12,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=101466.66666666667, ans=0.125 +2024-08-25 15:14:21,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=101466.66666666667, ans=0.1 +2024-08-25 15:14:31,972 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.575e+02 1.915e+02 2.222e+02 2.696e+02 4.640e+02, threshold=4.444e+02, percent-clipped=1.0 +2024-08-25 15:14:59,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=101573.33333333333, ans=0.125 +2024-08-25 15:15:01,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=101573.33333333333, ans=0.05 +2024-08-25 15:15:07,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=101626.66666666667, ans=0.0 +2024-08-25 15:15:22,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=101680.0, ans=0.0 +2024-08-25 15:15:30,476 INFO [train.py:1114] (1/4) Epoch 8, batch 1650, loss[loss=0.2637, simple_loss=0.3161, pruned_loss=0.07666, ctc_loss=0.1448, over 19665.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3073, pruned_loss=0.07731, ctc_loss=0.1455, over 3832380.28 frames. ], batch size: 59, lr: 1.82e-02, grad_scale: 32.0 +2024-08-25 15:16:01,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=101840.0, ans=0.025 +2024-08-25 15:16:14,082 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.64 vs. limit=10.0 +2024-08-25 15:16:24,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=101946.66666666667, ans=0.125 +2024-08-25 15:16:28,203 INFO [train.py:1114] (1/4) Epoch 8, batch 1700, loss[loss=0.1987, simple_loss=0.2532, pruned_loss=0.05151, ctc_loss=0.1031, over 19678.00 frames. ], tot_loss[loss=0.2581, simple_loss=0.3063, pruned_loss=0.07623, ctc_loss=0.1435, over 3846502.58 frames. ], batch size: 46, lr: 1.82e-02, grad_scale: 16.0 +2024-08-25 15:16:29,532 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 15:16:40,740 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.585e+02 1.920e+02 2.237e+02 2.711e+02 4.644e+02, threshold=4.474e+02, percent-clipped=2.0 +2024-08-25 15:16:57,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102106.66666666667, ans=0.1 +2024-08-25 15:17:27,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=102213.33333333333, ans=0.125 +2024-08-25 15:17:29,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=102213.33333333333, ans=0.04949747468305833 +2024-08-25 15:17:39,501 INFO [train.py:1114] (1/4) Epoch 8, batch 1750, loss[loss=0.2305, simple_loss=0.27, pruned_loss=0.06913, ctc_loss=0.1317, over 19665.00 frames. ], tot_loss[loss=0.2582, simple_loss=0.3062, pruned_loss=0.07639, ctc_loss=0.1436, over 3850806.49 frames. ], batch size: 45, lr: 1.82e-02, grad_scale: 16.0 +2024-08-25 15:18:05,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=102320.0, ans=0.125 +2024-08-25 15:18:10,193 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.33 vs. limit=10.0 +2024-08-25 15:19:54,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102373.33333333333, ans=0.1 +2024-08-25 15:20:22,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=102480.0, ans=0.0 +2024-08-25 15:20:25,838 INFO [train.py:1114] (1/4) Epoch 8, batch 1800, loss[loss=0.2799, simple_loss=0.3252, pruned_loss=0.08406, ctc_loss=0.1664, over 19613.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3071, pruned_loss=0.07707, ctc_loss=0.1447, over 3852697.43 frames. ], batch size: 55, lr: 1.81e-02, grad_scale: 16.0 +2024-08-25 15:20:29,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=102533.33333333333, ans=0.125 +2024-08-25 15:20:37,811 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.544e+02 1.874e+02 2.230e+02 2.859e+02 4.439e+02, threshold=4.460e+02, percent-clipped=0.0 +2024-08-25 15:23:31,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=102586.66666666667, ans=0.0 +2024-08-25 15:23:44,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=102640.0, ans=0.125 +2024-08-25 15:24:50,361 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.18 vs. limit=12.0 +2024-08-25 15:26:49,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=102693.33333333333, ans=0.0 +2024-08-25 15:28:59,117 INFO [train.py:1114] (1/4) Epoch 8, batch 1850, loss[loss=0.2837, simple_loss=0.3303, pruned_loss=0.0869, ctc_loss=0.1583, over 19586.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3066, pruned_loss=0.07665, ctc_loss=0.144, over 3856138.23 frames. ], batch size: 57, lr: 1.81e-02, grad_scale: 16.0 +2024-08-25 15:29:15,780 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.29 vs. limit=15.0 +2024-08-25 15:29:30,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.70 vs. limit=12.0 +2024-08-25 15:29:34,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=102960.0, ans=0.025 +2024-08-25 15:29:37,374 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.08 vs. limit=22.5 +2024-08-25 15:30:32,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=103013.33333333333, ans=0.2 +2024-08-25 15:30:33,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=103013.33333333333, ans=0.125 +2024-08-25 15:32:36,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=103013.33333333333, ans=0.125 +2024-08-25 15:32:38,667 INFO [train.py:1114] (1/4) Epoch 8, batch 1900, loss[loss=0.2615, simple_loss=0.322, pruned_loss=0.07241, ctc_loss=0.1404, over 19658.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.3066, pruned_loss=0.07634, ctc_loss=0.1435, over 3860755.12 frames. ], batch size: 59, lr: 1.81e-02, grad_scale: 16.0 +2024-08-25 15:32:40,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.92 vs. limit=15.0 +2024-08-25 15:32:52,960 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.596e+02 1.872e+02 2.139e+02 2.618e+02 5.849e+02, threshold=4.279e+02, percent-clipped=4.0 +2024-08-25 15:33:09,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=103173.33333333333, ans=0.125 +2024-08-25 15:33:18,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=103226.66666666667, ans=0.1 +2024-08-25 15:33:18,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=103226.66666666667, ans=0.0 +2024-08-25 15:33:25,838 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 15:33:26,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=103280.0, ans=0.0 +2024-08-25 15:33:32,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=103280.0, ans=0.125 +2024-08-25 15:33:37,635 INFO [train.py:1114] (1/4) Epoch 8, batch 1950, loss[loss=0.274, simple_loss=0.3115, pruned_loss=0.08684, ctc_loss=0.1571, over 19599.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3075, pruned_loss=0.07615, ctc_loss=0.1431, over 3870260.74 frames. ], batch size: 52, lr: 1.81e-02, grad_scale: 16.0 +2024-08-25 15:33:48,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=103333.33333333333, ans=0.125 +2024-08-25 15:33:53,261 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.88 vs. limit=15.0 +2024-08-25 15:34:23,750 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.89 vs. limit=15.0 +2024-08-25 15:34:39,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=103546.66666666667, ans=0.0 +2024-08-25 15:34:42,890 INFO [train.py:1114] (1/4) Epoch 8, batch 2000, loss[loss=0.2047, simple_loss=0.2581, pruned_loss=0.05481, ctc_loss=0.104, over 19638.00 frames. ], tot_loss[loss=0.2589, simple_loss=0.3077, pruned_loss=0.07637, ctc_loss=0.1434, over 3855069.17 frames. ], batch size: 45, lr: 1.81e-02, grad_scale: 32.0 +2024-08-25 15:34:55,660 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.405e+02 1.835e+02 2.022e+02 2.450e+02 4.734e+02, threshold=4.043e+02, percent-clipped=1.0 +2024-08-25 15:35:22,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=103760.0, ans=0.0 +2024-08-25 15:35:38,626 INFO [train.py:1114] (1/4) Epoch 8, batch 2050, loss[loss=0.2293, simple_loss=0.2852, pruned_loss=0.06334, ctc_loss=0.1168, over 19708.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3071, pruned_loss=0.07648, ctc_loss=0.1436, over 3850588.02 frames. ], batch size: 47, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:35:41,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=103866.66666666667, ans=0.0 +2024-08-25 15:35:44,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=103866.66666666667, ans=0.125 +2024-08-25 15:35:48,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103920.0, ans=0.1 +2024-08-25 15:35:53,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=103920.0, ans=0.07 +2024-08-25 15:35:58,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=103920.0, ans=0.125 +2024-08-25 15:36:29,308 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.00 vs. limit=15.0 +2024-08-25 15:36:32,753 INFO [train.py:1114] (1/4) Epoch 8, batch 2100, loss[loss=0.2195, simple_loss=0.2864, pruned_loss=0.05501, ctc_loss=0.1065, over 19768.00 frames. ], tot_loss[loss=0.2566, simple_loss=0.3057, pruned_loss=0.0754, ctc_loss=0.1416, over 3856941.52 frames. ], batch size: 54, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:36:39,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104133.33333333333, ans=0.1 +2024-08-25 15:36:44,887 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.433e+02 2.055e+02 2.348e+02 2.987e+02 4.948e+02, threshold=4.695e+02, percent-clipped=5.0 +2024-08-25 15:36:50,842 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.70 vs. limit=15.0 +2024-08-25 15:36:57,270 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 15:37:03,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=104240.0, ans=0.0 +2024-08-25 15:37:22,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=104346.66666666667, ans=0.125 +2024-08-25 15:37:27,172 INFO [train.py:1114] (1/4) Epoch 8, batch 2150, loss[loss=0.2449, simple_loss=0.2962, pruned_loss=0.06997, ctc_loss=0.1344, over 19573.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3045, pruned_loss=0.07472, ctc_loss=0.1406, over 3867606.77 frames. ], batch size: 52, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:37:28,843 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.42 vs. limit=12.0 +2024-08-25 15:38:18,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=104613.33333333333, ans=0.0 +2024-08-25 15:38:21,073 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 15:38:23,247 INFO [train.py:1114] (1/4) Epoch 8, batch 2200, loss[loss=0.2547, simple_loss=0.3103, pruned_loss=0.07287, ctc_loss=0.1332, over 19584.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3045, pruned_loss=0.0749, ctc_loss=0.141, over 3866375.89 frames. ], batch size: 57, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:38:26,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.63 vs. limit=15.0 +2024-08-25 15:38:35,667 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.489e+02 1.961e+02 2.280e+02 3.038e+02 5.675e+02, threshold=4.560e+02, percent-clipped=2.0 +2024-08-25 15:39:08,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=104880.0, ans=0.0 +2024-08-25 15:39:13,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=104880.0, ans=0.09899494936611666 +2024-08-25 15:39:19,052 INFO [train.py:1114] (1/4) Epoch 8, batch 2250, loss[loss=0.2414, simple_loss=0.2983, pruned_loss=0.06669, ctc_loss=0.1279, over 19622.00 frames. ], tot_loss[loss=0.2559, simple_loss=0.305, pruned_loss=0.07518, ctc_loss=0.1412, over 3866484.83 frames. ], batch size: 55, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:39:33,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=104986.66666666667, ans=0.0 +2024-08-25 15:39:36,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=104986.66666666667, ans=0.0 +2024-08-25 15:39:44,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=105040.0, ans=0.125 +2024-08-25 15:40:04,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=105146.66666666667, ans=0.2 +2024-08-25 15:40:14,489 INFO [train.py:1114] (1/4) Epoch 8, batch 2300, loss[loss=0.2074, simple_loss=0.2735, pruned_loss=0.05141, ctc_loss=0.09601, over 19506.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.304, pruned_loss=0.07509, ctc_loss=0.141, over 3861007.67 frames. ], batch size: 49, lr: 1.79e-02, grad_scale: 32.0 +2024-08-25 15:40:21,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=105200.0, ans=0.025 +2024-08-25 15:40:28,025 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.388e+02 1.907e+02 2.167e+02 2.593e+02 4.976e+02, threshold=4.335e+02, percent-clipped=1.0 +2024-08-25 15:40:55,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=105360.0, ans=0.0 +2024-08-25 15:41:03,985 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.44 vs. limit=15.0 +2024-08-25 15:41:11,091 INFO [train.py:1114] (1/4) Epoch 8, batch 2350, loss[loss=0.2948, simple_loss=0.3328, pruned_loss=0.09276, ctc_loss=0.1784, over 19670.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3041, pruned_loss=0.07519, ctc_loss=0.1411, over 3864515.36 frames. ], batch size: 63, lr: 1.79e-02, grad_scale: 32.0 +2024-08-25 15:41:23,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=105520.0, ans=0.0 +2024-08-25 15:41:32,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=105573.33333333333, ans=0.125 +2024-08-25 15:41:37,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=105573.33333333333, ans=0.025 +2024-08-25 15:41:37,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105573.33333333333, ans=0.1 +2024-08-25 15:42:06,002 INFO [train.py:1114] (1/4) Epoch 8, batch 2400, loss[loss=0.246, simple_loss=0.2989, pruned_loss=0.07061, ctc_loss=0.1299, over 19309.00 frames. ], tot_loss[loss=0.2572, simple_loss=0.306, pruned_loss=0.07575, ctc_loss=0.1422, over 3859355.61 frames. ], batch size: 71, lr: 1.79e-02, grad_scale: 32.0 +2024-08-25 15:42:17,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=105786.66666666667, ans=0.0 +2024-08-25 15:42:18,059 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.476e+02 1.983e+02 2.255e+02 2.870e+02 5.067e+02, threshold=4.510e+02, percent-clipped=2.0 +2024-08-25 15:42:21,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=105786.66666666667, ans=0.125 +2024-08-25 15:42:28,272 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.54 vs. limit=22.5 +2024-08-25 15:42:52,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=105946.66666666667, ans=0.125 +2024-08-25 15:42:54,452 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.99 vs. limit=10.0 +2024-08-25 15:43:01,711 INFO [train.py:1114] (1/4) Epoch 8, batch 2450, loss[loss=0.3259, simple_loss=0.3425, pruned_loss=0.1111, ctc_loss=0.2176, over 13681.00 frames. ], tot_loss[loss=0.2663, simple_loss=0.3115, pruned_loss=0.08031, ctc_loss=0.1511, over 3729889.19 frames. ], batch size: 140, lr: 1.79e-02, grad_scale: 32.0 +2024-08-25 15:43:07,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=106000.0, ans=0.125 +2024-08-25 15:43:09,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=106000.0, ans=0.015 +2024-08-25 15:43:11,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=106053.33333333333, ans=0.125 +2024-08-25 15:43:28,270 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.56 vs. limit=15.0 +2024-08-25 15:43:35,000 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.25 vs. limit=6.0 +2024-08-25 15:43:37,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=106160.0, ans=0.0 +2024-08-25 15:43:38,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=106160.0, ans=0.0 +2024-08-25 15:43:38,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=106160.0, ans=0.125 +2024-08-25 15:43:40,063 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=9.07 vs. limit=15.0 +2024-08-25 15:44:31,277 INFO [train.py:1114] (1/4) Epoch 9, batch 0, loss[loss=0.2654, simple_loss=0.3057, pruned_loss=0.08225, ctc_loss=0.1513, over 19801.00 frames. ], tot_loss[loss=0.2654, simple_loss=0.3057, pruned_loss=0.08225, ctc_loss=0.1513, over 19801.00 frames. ], batch size: 49, lr: 1.69e-02, grad_scale: 32.0 +2024-08-25 15:44:31,278 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-25 15:44:49,823 INFO [train.py:1146] (1/4) Epoch 9, validation: loss=0.21, simple_loss=0.2947, pruned_loss=0.04621, ctc_loss=0.08206, over 944034.00 frames. +2024-08-25 15:44:49,823 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 14083MB +2024-08-25 15:44:55,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=106208.0, ans=0.125 +2024-08-25 15:45:14,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.59 vs. limit=15.0 +2024-08-25 15:45:15,527 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.726e+02 2.154e+02 2.510e+02 2.953e+02 5.707e+02, threshold=5.019e+02, percent-clipped=2.0 +2024-08-25 15:46:09,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=106314.66666666667, ans=0.025 +2024-08-25 15:46:32,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=106421.33333333333, ans=0.1 +2024-08-25 15:46:36,875 INFO [train.py:1114] (1/4) Epoch 9, batch 50, loss[loss=0.2392, simple_loss=0.2857, pruned_loss=0.0697, ctc_loss=0.1332, over 19686.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3095, pruned_loss=0.07777, ctc_loss=0.1472, over 845285.85 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 32.0 +2024-08-25 15:46:41,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=106474.66666666667, ans=0.2 +2024-08-25 15:46:43,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106474.66666666667, ans=0.1 +2024-08-25 15:46:46,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=106474.66666666667, ans=0.2 +2024-08-25 15:46:55,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=106528.0, ans=0.025 +2024-08-25 15:46:59,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=106581.33333333333, ans=0.04949747468305833 +2024-08-25 15:47:02,442 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.83 vs. limit=15.0 +2024-08-25 15:47:27,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=106634.66666666667, ans=0.125 +2024-08-25 15:47:33,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=106688.0, ans=0.0 +2024-08-25 15:47:44,289 INFO [train.py:1114] (1/4) Epoch 9, batch 100, loss[loss=0.2289, simple_loss=0.2801, pruned_loss=0.06495, ctc_loss=0.1194, over 19705.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3082, pruned_loss=0.07651, ctc_loss=0.1437, over 1499557.75 frames. ], batch size: 51, lr: 1.69e-02, grad_scale: 32.0 +2024-08-25 15:47:44,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=106741.33333333333, ans=0.125 +2024-08-25 15:47:51,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=106741.33333333333, ans=0.2 +2024-08-25 15:48:09,484 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 1.842e+02 2.163e+02 2.785e+02 4.838e+02, threshold=4.326e+02, percent-clipped=0.0 +2024-08-25 15:48:10,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=106848.0, ans=0.2 +2024-08-25 15:48:41,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=107008.0, ans=0.125 +2024-08-25 15:48:41,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=107008.0, ans=0.2 +2024-08-25 15:48:42,144 INFO [train.py:1114] (1/4) Epoch 9, batch 150, loss[loss=0.2183, simple_loss=0.2755, pruned_loss=0.05778, ctc_loss=0.1136, over 19740.00 frames. ], tot_loss[loss=0.2559, simple_loss=0.3053, pruned_loss=0.07502, ctc_loss=0.1411, over 2028994.73 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 32.0 +2024-08-25 15:48:48,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=107008.0, ans=0.125 +2024-08-25 15:48:55,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=107061.33333333333, ans=0.0 +2024-08-25 15:49:02,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=107061.33333333333, ans=0.125 +2024-08-25 15:49:03,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107061.33333333333, ans=0.1 +2024-08-25 15:49:04,672 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=5.060e-03 +2024-08-25 15:49:04,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107061.33333333333, ans=0.125 +2024-08-25 15:49:18,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=107168.0, ans=0.125 +2024-08-25 15:49:23,364 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.27 vs. limit=15.0 +2024-08-25 15:49:34,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=107221.33333333333, ans=0.2 +2024-08-25 15:49:41,043 INFO [train.py:1114] (1/4) Epoch 9, batch 200, loss[loss=0.2935, simple_loss=0.3268, pruned_loss=0.0939, ctc_loss=0.1809, over 18126.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.3029, pruned_loss=0.07325, ctc_loss=0.1378, over 2436455.84 frames. ], batch size: 85, lr: 1.68e-02, grad_scale: 32.0 +2024-08-25 15:49:43,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=107274.66666666667, ans=0.0 +2024-08-25 15:49:47,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=107274.66666666667, ans=0.025 +2024-08-25 15:50:00,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107328.0, ans=0.125 +2024-08-25 15:50:03,408 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.21 vs. limit=15.0 +2024-08-25 15:50:06,165 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.398e+02 1.799e+02 2.039e+02 2.617e+02 5.282e+02, threshold=4.078e+02, percent-clipped=1.0 +2024-08-25 15:51:02,163 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.11 vs. limit=15.0 +2024-08-25 15:51:11,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=107488.0, ans=0.95 +2024-08-25 15:51:17,119 INFO [train.py:1114] (1/4) Epoch 9, batch 250, loss[loss=0.2388, simple_loss=0.298, pruned_loss=0.0661, ctc_loss=0.1186, over 19416.00 frames. ], tot_loss[loss=0.2527, simple_loss=0.3031, pruned_loss=0.0735, ctc_loss=0.138, over 2756445.89 frames. ], batch size: 67, lr: 1.68e-02, grad_scale: 32.0 +2024-08-25 15:51:18,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107541.33333333333, ans=0.1 +2024-08-25 15:51:39,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=107594.66666666667, ans=0.0 +2024-08-25 15:51:42,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=107648.0, ans=0.125 +2024-08-25 15:51:54,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=107701.33333333333, ans=0.125 +2024-08-25 15:51:54,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=107701.33333333333, ans=0.125 +2024-08-25 15:52:15,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=107754.66666666667, ans=0.0 +2024-08-25 15:52:18,771 INFO [train.py:1114] (1/4) Epoch 9, batch 300, loss[loss=0.2977, simple_loss=0.3352, pruned_loss=0.0955, ctc_loss=0.1732, over 19512.00 frames. ], tot_loss[loss=0.2527, simple_loss=0.3032, pruned_loss=0.07355, ctc_loss=0.138, over 3000901.66 frames. ], batch size: 61, lr: 1.68e-02, grad_scale: 16.0 +2024-08-25 15:52:24,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=107808.0, ans=0.05 +2024-08-25 15:52:24,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=107808.0, ans=0.0 +2024-08-25 15:52:26,709 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.05 vs. limit=10.0 +2024-08-25 15:52:31,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=107861.33333333333, ans=0.0 +2024-08-25 15:52:32,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=107861.33333333333, ans=0.125 +2024-08-25 15:52:38,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=107861.33333333333, ans=0.125 +2024-08-25 15:52:40,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=107861.33333333333, ans=0.0 +2024-08-25 15:52:47,052 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.324e+02 1.831e+02 2.248e+02 2.885e+02 5.251e+02, threshold=4.495e+02, percent-clipped=2.0 +2024-08-25 15:52:50,267 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.73 vs. limit=6.0 +2024-08-25 15:52:57,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=107968.0, ans=0.0 +2024-08-25 15:53:02,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=107968.0, ans=0.5 +2024-08-25 15:53:18,334 INFO [train.py:1114] (1/4) Epoch 9, batch 350, loss[loss=0.2022, simple_loss=0.2609, pruned_loss=0.05194, ctc_loss=0.09918, over 19739.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.3038, pruned_loss=0.074, ctc_loss=0.1389, over 3191307.58 frames. ], batch size: 48, lr: 1.68e-02, grad_scale: 16.0 +2024-08-25 15:53:18,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=108074.66666666667, ans=0.0 +2024-08-25 15:53:26,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=108074.66666666667, ans=0.07 +2024-08-25 15:53:39,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=108128.0, ans=0.125 +2024-08-25 15:53:54,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=108234.66666666667, ans=0.0 +2024-08-25 15:53:56,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=108234.66666666667, ans=0.2 +2024-08-25 15:53:56,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=108234.66666666667, ans=0.125 +2024-08-25 15:54:04,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=108288.0, ans=0.0 +2024-08-25 15:54:14,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.55 vs. limit=12.0 +2024-08-25 15:54:14,907 INFO [train.py:1114] (1/4) Epoch 9, batch 400, loss[loss=0.2737, simple_loss=0.3192, pruned_loss=0.08289, ctc_loss=0.156, over 19488.00 frames. ], tot_loss[loss=0.2532, simple_loss=0.3034, pruned_loss=0.07378, ctc_loss=0.1387, over 3343094.27 frames. ], batch size: 54, lr: 1.68e-02, grad_scale: 32.0 +2024-08-25 15:54:43,459 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.428e+02 2.039e+02 2.514e+02 3.062e+02 4.428e+02, threshold=5.028e+02, percent-clipped=0.0 +2024-08-25 15:54:43,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=108448.0, ans=0.0 +2024-08-25 15:55:10,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=108554.66666666667, ans=0.2 +2024-08-25 15:55:16,267 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 15:55:18,509 INFO [train.py:1114] (1/4) Epoch 9, batch 450, loss[loss=0.2339, simple_loss=0.2964, pruned_loss=0.06145, ctc_loss=0.1211, over 19607.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.303, pruned_loss=0.07341, ctc_loss=0.1377, over 3449872.69 frames. ], batch size: 55, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 15:55:38,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=108661.33333333333, ans=0.125 +2024-08-25 15:58:58,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=108821.33333333333, ans=0.125 +2024-08-25 15:59:00,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=108821.33333333333, ans=0.125 +2024-08-25 15:59:07,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=108821.33333333333, ans=0.0 +2024-08-25 15:59:11,154 INFO [train.py:1114] (1/4) Epoch 9, batch 500, loss[loss=0.2624, simple_loss=0.3172, pruned_loss=0.07408, ctc_loss=0.1488, over 19651.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3017, pruned_loss=0.07239, ctc_loss=0.136, over 3546527.31 frames. ], batch size: 63, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 15:59:14,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=108874.66666666667, ans=0.025 +2024-08-25 15:59:20,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=108874.66666666667, ans=0.0 +2024-08-25 15:59:30,022 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 15:59:37,505 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.583e+02 1.839e+02 2.298e+02 3.023e+02 4.931e+02, threshold=4.596e+02, percent-clipped=0.0 +2024-08-25 15:59:42,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=108981.33333333333, ans=0.125 +2024-08-25 15:59:46,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109034.66666666667, ans=0.1 +2024-08-25 15:59:48,794 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.47 vs. limit=10.0 +2024-08-25 16:00:08,629 INFO [train.py:1114] (1/4) Epoch 9, batch 550, loss[loss=0.2878, simple_loss=0.3341, pruned_loss=0.08796, ctc_loss=0.164, over 19302.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3019, pruned_loss=0.07262, ctc_loss=0.1364, over 3609589.52 frames. ], batch size: 71, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 16:00:15,227 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.64 vs. limit=22.5 +2024-08-25 16:00:32,653 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.90 vs. limit=15.0 +2024-08-25 16:00:42,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=109248.0, ans=22.5 +2024-08-25 16:00:44,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=109301.33333333333, ans=0.2 +2024-08-25 16:00:47,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.61 vs. limit=6.0 +2024-08-25 16:01:12,449 INFO [train.py:1114] (1/4) Epoch 9, batch 600, loss[loss=0.2614, simple_loss=0.3091, pruned_loss=0.07841, ctc_loss=0.1425, over 19431.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3017, pruned_loss=0.07246, ctc_loss=0.1362, over 3666994.55 frames. ], batch size: 67, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 16:01:29,379 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.63 vs. limit=15.0 +2024-08-25 16:01:44,300 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.91 vs. limit=15.0 +2024-08-25 16:01:51,491 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.960e+02 2.208e+02 2.721e+02 5.490e+02, threshold=4.416e+02, percent-clipped=2.0 +2024-08-25 16:01:52,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=109514.66666666667, ans=0.0 +2024-08-25 16:02:24,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=109568.0, ans=0.125 +2024-08-25 16:02:27,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=109568.0, ans=0.025 +2024-08-25 16:02:37,824 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.34 vs. limit=15.0 +2024-08-25 16:02:47,616 INFO [train.py:1114] (1/4) Epoch 9, batch 650, loss[loss=0.2303, simple_loss=0.2919, pruned_loss=0.06099, ctc_loss=0.117, over 19762.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3011, pruned_loss=0.07211, ctc_loss=0.1357, over 3717055.50 frames. ], batch size: 54, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 16:02:48,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=109674.66666666667, ans=0.0 +2024-08-25 16:03:47,849 INFO [train.py:1114] (1/4) Epoch 9, batch 700, loss[loss=0.2317, simple_loss=0.2908, pruned_loss=0.06299, ctc_loss=0.1163, over 19718.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3019, pruned_loss=0.07255, ctc_loss=0.1365, over 3748739.61 frames. ], batch size: 51, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 16:03:50,711 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.69 vs. limit=15.0 +2024-08-25 16:03:53,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=109941.33333333333, ans=0.125 +2024-08-25 16:04:03,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109994.66666666667, ans=0.1 +2024-08-25 16:04:10,817 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.12 vs. limit=22.5 +2024-08-25 16:04:14,378 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.641e+02 1.949e+02 2.382e+02 2.859e+02 4.618e+02, threshold=4.764e+02, percent-clipped=1.0 +2024-08-25 16:04:22,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=110101.33333333333, ans=0.0 +2024-08-25 16:04:37,581 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.00 vs. limit=22.5 +2024-08-25 16:04:41,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=110154.66666666667, ans=0.125 +2024-08-25 16:04:44,748 INFO [train.py:1114] (1/4) Epoch 9, batch 750, loss[loss=0.2462, simple_loss=0.3002, pruned_loss=0.07074, ctc_loss=0.1265, over 19491.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3006, pruned_loss=0.07191, ctc_loss=0.1351, over 3774634.43 frames. ], batch size: 54, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:04:59,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110261.33333333333, ans=0.1 +2024-08-25 16:05:20,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110314.66666666667, ans=0.1 +2024-08-25 16:05:23,062 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.86 vs. limit=15.0 +2024-08-25 16:05:31,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=110368.0, ans=0.125 +2024-08-25 16:05:44,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=110421.33333333333, ans=0.025 +2024-08-25 16:05:47,597 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.17 vs. limit=15.0 +2024-08-25 16:05:48,070 INFO [train.py:1114] (1/4) Epoch 9, batch 800, loss[loss=0.2709, simple_loss=0.2978, pruned_loss=0.08998, ctc_loss=0.1601, over 19420.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3003, pruned_loss=0.07183, ctc_loss=0.1349, over 3795830.93 frames. ], batch size: 48, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:06:00,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=110528.0, ans=0.125 +2024-08-25 16:06:11,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=110581.33333333333, ans=0.05 +2024-08-25 16:06:14,966 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.861e+02 2.104e+02 2.558e+02 4.618e+02, threshold=4.207e+02, percent-clipped=0.0 +2024-08-25 16:06:28,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=110634.66666666667, ans=0.125 +2024-08-25 16:06:28,935 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.06 vs. limit=22.5 +2024-08-25 16:06:31,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=110634.66666666667, ans=0.025 +2024-08-25 16:06:47,179 INFO [train.py:1114] (1/4) Epoch 9, batch 850, loss[loss=0.2594, simple_loss=0.3096, pruned_loss=0.07626, ctc_loss=0.1416, over 19651.00 frames. ], tot_loss[loss=0.25, simple_loss=0.301, pruned_loss=0.07228, ctc_loss=0.136, over 3815143.33 frames. ], batch size: 59, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:06:50,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=110741.33333333333, ans=0.0 +2024-08-25 16:07:08,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110794.66666666667, ans=0.1 +2024-08-25 16:07:09,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=110848.0, ans=0.125 +2024-08-25 16:07:21,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=110901.33333333333, ans=0.125 +2024-08-25 16:08:42,724 INFO [train.py:1114] (1/4) Epoch 9, batch 900, loss[loss=0.2403, simple_loss=0.2827, pruned_loss=0.07241, ctc_loss=0.1327, over 19821.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3011, pruned_loss=0.07271, ctc_loss=0.1366, over 3820006.16 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:09:12,335 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.426e+02 1.982e+02 2.328e+02 2.784e+02 5.806e+02, threshold=4.657e+02, percent-clipped=1.0 +2024-08-25 16:09:47,298 INFO [train.py:1114] (1/4) Epoch 9, batch 950, loss[loss=0.2206, simple_loss=0.2814, pruned_loss=0.05783, ctc_loss=0.1105, over 19494.00 frames. ], tot_loss[loss=0.2512, simple_loss=0.3014, pruned_loss=0.07302, ctc_loss=0.1374, over 3822352.50 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:10:14,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111381.33333333333, ans=0.1 +2024-08-25 16:10:34,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=111488.0, ans=0.0 +2024-08-25 16:10:38,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=111488.0, ans=0.125 +2024-08-25 16:10:43,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111488.0, ans=0.1 +2024-08-25 16:10:45,137 INFO [train.py:1114] (1/4) Epoch 9, batch 1000, loss[loss=0.2708, simple_loss=0.3142, pruned_loss=0.08277, ctc_loss=0.1546, over 19843.00 frames. ], tot_loss[loss=0.2528, simple_loss=0.3027, pruned_loss=0.07379, ctc_loss=0.1385, over 3816887.10 frames. ], batch size: 52, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:11:09,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=111648.0, ans=0.125 +2024-08-25 16:11:13,876 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 1.864e+02 2.156e+02 2.793e+02 4.751e+02, threshold=4.311e+02, percent-clipped=1.0 +2024-08-25 16:11:38,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111754.66666666667, ans=0.125 +2024-08-25 16:11:45,640 INFO [train.py:1114] (1/4) Epoch 9, batch 1050, loss[loss=0.2487, simple_loss=0.3096, pruned_loss=0.06659, ctc_loss=0.1368, over 19826.00 frames. ], tot_loss[loss=0.2533, simple_loss=0.3026, pruned_loss=0.07415, ctc_loss=0.1395, over 3823253.42 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:11:53,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=111808.0, ans=0.0 +2024-08-25 16:12:02,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=111861.33333333333, ans=0.2 +2024-08-25 16:12:17,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=111914.66666666667, ans=0.0 +2024-08-25 16:12:23,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=111968.0, ans=0.0 +2024-08-25 16:12:23,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=111968.0, ans=0.025 +2024-08-25 16:12:26,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111968.0, ans=0.1 +2024-08-25 16:12:51,803 INFO [train.py:1114] (1/4) Epoch 9, batch 1100, loss[loss=0.2276, simple_loss=0.2856, pruned_loss=0.06084, ctc_loss=0.1197, over 19600.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3016, pruned_loss=0.07332, ctc_loss=0.1381, over 3830916.43 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:13:03,151 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.88 vs. limit=22.5 +2024-08-25 16:13:19,826 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.558e+02 1.820e+02 2.090e+02 2.645e+02 4.523e+02, threshold=4.179e+02, percent-clipped=2.0 +2024-08-25 16:13:37,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=112234.66666666667, ans=0.2 +2024-08-25 16:13:48,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=112288.0, ans=0.025 +2024-08-25 16:13:50,954 INFO [train.py:1114] (1/4) Epoch 9, batch 1150, loss[loss=0.2117, simple_loss=0.2806, pruned_loss=0.05205, ctc_loss=0.09674, over 19561.00 frames. ], tot_loss[loss=0.2515, simple_loss=0.3014, pruned_loss=0.07318, ctc_loss=0.1379, over 3829625.20 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:13:57,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=112341.33333333333, ans=0.2 +2024-08-25 16:14:12,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=112394.66666666667, ans=0.0 +2024-08-25 16:14:19,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=112448.0, ans=0.125 +2024-08-25 16:14:42,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=112554.66666666667, ans=0.125 +2024-08-25 16:14:44,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=112554.66666666667, ans=0.0 +2024-08-25 16:14:51,113 INFO [train.py:1114] (1/4) Epoch 9, batch 1200, loss[loss=0.2432, simple_loss=0.3057, pruned_loss=0.06484, ctc_loss=0.1275, over 19844.00 frames. ], tot_loss[loss=0.2515, simple_loss=0.3017, pruned_loss=0.07308, ctc_loss=0.1378, over 3825238.18 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:15:53,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=112714.66666666667, ans=22.5 +2024-08-25 16:15:59,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=112714.66666666667, ans=0.0 +2024-08-25 16:16:01,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=112714.66666666667, ans=0.125 +2024-08-25 16:16:05,752 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.491e+02 1.875e+02 2.166e+02 2.598e+02 4.323e+02, threshold=4.331e+02, percent-clipped=2.0 +2024-08-25 16:16:17,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=112768.0, ans=0.125 +2024-08-25 16:16:18,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=112768.0, ans=0.125 +2024-08-25 16:16:21,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=112768.0, ans=0.09899494936611666 +2024-08-25 16:16:22,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=112768.0, ans=0.0 +2024-08-25 16:16:39,520 INFO [train.py:1114] (1/4) Epoch 9, batch 1250, loss[loss=0.2428, simple_loss=0.3024, pruned_loss=0.06553, ctc_loss=0.1302, over 19502.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3014, pruned_loss=0.07238, ctc_loss=0.1365, over 3844066.73 frames. ], batch size: 61, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:16:43,613 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.01 vs. limit=15.0 +2024-08-25 16:17:34,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113088.0, ans=0.1 +2024-08-25 16:17:40,919 INFO [train.py:1114] (1/4) Epoch 9, batch 1300, loss[loss=0.2666, simple_loss=0.3186, pruned_loss=0.07834, ctc_loss=0.1451, over 18755.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3003, pruned_loss=0.07192, ctc_loss=0.1356, over 3847285.77 frames. ], batch size: 76, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:17:48,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=113141.33333333333, ans=0.125 +2024-08-25 16:17:55,481 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.81 vs. limit=10.0 +2024-08-25 16:18:04,485 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.67 vs. limit=15.0 +2024-08-25 16:18:05,568 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.29 vs. limit=12.0 +2024-08-25 16:18:08,518 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.553e+02 1.959e+02 2.315e+02 2.984e+02 4.812e+02, threshold=4.630e+02, percent-clipped=1.0 +2024-08-25 16:18:13,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=113248.0, ans=0.125 +2024-08-25 16:18:15,039 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.68 vs. limit=10.0 +2024-08-25 16:18:30,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=113354.66666666667, ans=0.2 +2024-08-25 16:18:37,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=113354.66666666667, ans=0.125 +2024-08-25 16:18:39,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=113354.66666666667, ans=0.125 +2024-08-25 16:18:42,118 INFO [train.py:1114] (1/4) Epoch 9, batch 1350, loss[loss=0.2575, simple_loss=0.3113, pruned_loss=0.07477, ctc_loss=0.1354, over 19776.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.3002, pruned_loss=0.07177, ctc_loss=0.1352, over 3858500.78 frames. ], batch size: 54, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:18:49,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=113408.0, ans=0.0 +2024-08-25 16:19:00,304 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.90 vs. limit=22.5 +2024-08-25 16:19:03,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113461.33333333333, ans=0.1 +2024-08-25 16:19:15,421 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.37 vs. limit=15.0 +2024-08-25 16:19:21,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=113568.0, ans=0.2 +2024-08-25 16:19:28,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=113621.33333333333, ans=0.2 +2024-08-25 16:19:32,391 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.25 vs. limit=15.0 +2024-08-25 16:19:38,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=113674.66666666667, ans=0.2 +2024-08-25 16:19:40,005 INFO [train.py:1114] (1/4) Epoch 9, batch 1400, loss[loss=0.2267, simple_loss=0.2724, pruned_loss=0.06722, ctc_loss=0.1163, over 19683.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.3, pruned_loss=0.07185, ctc_loss=0.1353, over 3865413.39 frames. ], batch size: 46, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:19:49,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=113674.66666666667, ans=0.0 +2024-08-25 16:20:07,559 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.393e+02 1.860e+02 2.127e+02 2.545e+02 4.134e+02, threshold=4.253e+02, percent-clipped=0.0 +2024-08-25 16:20:09,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=113781.33333333333, ans=0.05 +2024-08-25 16:20:27,260 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:20:32,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=113888.0, ans=0.5 +2024-08-25 16:20:43,001 INFO [train.py:1114] (1/4) Epoch 9, batch 1450, loss[loss=0.2587, simple_loss=0.3196, pruned_loss=0.07284, ctc_loss=0.1306, over 19683.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3009, pruned_loss=0.07212, ctc_loss=0.1355, over 3862231.39 frames. ], batch size: 63, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:21:02,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=113994.66666666667, ans=0.125 +2024-08-25 16:21:25,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=114101.33333333333, ans=0.125 +2024-08-25 16:21:26,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=114101.33333333333, ans=0.125 +2024-08-25 16:21:45,884 INFO [train.py:1114] (1/4) Epoch 9, batch 1500, loss[loss=0.237, simple_loss=0.3039, pruned_loss=0.06191, ctc_loss=0.1157, over 19577.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3017, pruned_loss=0.0722, ctc_loss=0.1356, over 3862086.48 frames. ], batch size: 57, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:21:46,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=114208.0, ans=0.5 +2024-08-25 16:22:15,428 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.478e+02 1.928e+02 2.180e+02 2.740e+02 4.350e+02, threshold=4.360e+02, percent-clipped=2.0 +2024-08-25 16:22:37,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=114421.33333333333, ans=0.125 +2024-08-25 16:22:42,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=114421.33333333333, ans=0.025 +2024-08-25 16:22:45,650 INFO [train.py:1114] (1/4) Epoch 9, batch 1550, loss[loss=0.2578, simple_loss=0.3118, pruned_loss=0.07365, ctc_loss=0.1412, over 19601.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3021, pruned_loss=0.07263, ctc_loss=0.1364, over 3845598.20 frames. ], batch size: 60, lr: 1.64e-02, grad_scale: 16.0 +2024-08-25 16:22:47,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=114474.66666666667, ans=0.1 +2024-08-25 16:22:53,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=114474.66666666667, ans=0.025 +2024-08-25 16:23:03,563 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.65 vs. limit=22.5 +2024-08-25 16:23:06,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=114528.0, ans=0.0 +2024-08-25 16:23:08,087 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.38 vs. limit=15.0 +2024-08-25 16:23:21,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=114634.66666666667, ans=0.2 +2024-08-25 16:23:27,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=114634.66666666667, ans=0.125 +2024-08-25 16:23:40,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=114688.0, ans=0.125 +2024-08-25 16:23:47,226 INFO [train.py:1114] (1/4) Epoch 9, batch 1600, loss[loss=0.2453, simple_loss=0.3024, pruned_loss=0.06816, ctc_loss=0.1299, over 19845.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.302, pruned_loss=0.07258, ctc_loss=0.1365, over 3835446.47 frames. ], batch size: 57, lr: 1.63e-02, grad_scale: 32.0 +2024-08-25 16:23:57,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=114741.33333333333, ans=0.0 +2024-08-25 16:24:04,220 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.13 vs. limit=15.0 +2024-08-25 16:24:06,536 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.74 vs. limit=15.0 +2024-08-25 16:24:16,807 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.516e+02 1.930e+02 2.504e+02 3.084e+02 5.673e+02, threshold=5.009e+02, percent-clipped=4.0 +2024-08-25 16:24:27,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=114901.33333333333, ans=0.125 +2024-08-25 16:24:27,945 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.77 vs. limit=10.0 +2024-08-25 16:24:32,424 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.90 vs. limit=6.0 +2024-08-25 16:24:46,351 INFO [train.py:1114] (1/4) Epoch 9, batch 1650, loss[loss=0.2183, simple_loss=0.2869, pruned_loss=0.05438, ctc_loss=0.1021, over 19634.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3025, pruned_loss=0.07308, ctc_loss=0.1374, over 3831847.39 frames. ], batch size: 59, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:25:08,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=115114.66666666667, ans=0.2 +2024-08-25 16:25:20,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=115114.66666666667, ans=0.2 +2024-08-25 16:25:22,857 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.74 vs. limit=15.0 +2024-08-25 16:25:29,480 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:25:30,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=115168.0, ans=0.1 +2024-08-25 16:25:39,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=115221.33333333333, ans=0.125 +2024-08-25 16:25:40,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=115221.33333333333, ans=0.95 +2024-08-25 16:25:41,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=115221.33333333333, ans=0.09899494936611666 +2024-08-25 16:25:45,147 INFO [train.py:1114] (1/4) Epoch 9, batch 1700, loss[loss=0.2428, simple_loss=0.2858, pruned_loss=0.07353, ctc_loss=0.1317, over 19663.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3017, pruned_loss=0.07225, ctc_loss=0.1358, over 3846475.32 frames. ], batch size: 46, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:25:59,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115328.0, ans=0.1 +2024-08-25 16:26:05,117 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.70 vs. limit=22.5 +2024-08-25 16:26:10,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115381.33333333333, ans=0.1 +2024-08-25 16:26:11,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=115381.33333333333, ans=0.125 +2024-08-25 16:26:13,053 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.415e+02 1.773e+02 1.969e+02 2.283e+02 4.673e+02, threshold=3.938e+02, percent-clipped=0.0 +2024-08-25 16:26:25,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=115434.66666666667, ans=0.125 +2024-08-25 16:26:41,722 INFO [train.py:1114] (1/4) Epoch 9, batch 1750, loss[loss=0.19, simple_loss=0.2488, pruned_loss=0.04818, ctc_loss=0.08713, over 19655.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3008, pruned_loss=0.07202, ctc_loss=0.1353, over 3851275.88 frames. ], batch size: 45, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:26:47,474 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.03 vs. limit=10.0 +2024-08-25 16:28:05,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=115754.66666666667, ans=0.0 +2024-08-25 16:28:06,518 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.55 vs. limit=15.0 +2024-08-25 16:28:12,437 INFO [train.py:1114] (1/4) Epoch 9, batch 1800, loss[loss=0.2444, simple_loss=0.3069, pruned_loss=0.0664, ctc_loss=0.1231, over 19616.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3013, pruned_loss=0.07239, ctc_loss=0.1359, over 3855165.41 frames. ], batch size: 55, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:28:23,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=115808.0, ans=0.025 +2024-08-25 16:28:46,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=115914.66666666667, ans=0.125 +2024-08-25 16:28:48,999 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.488e+02 1.840e+02 2.097e+02 2.711e+02 4.220e+02, threshold=4.193e+02, percent-clipped=2.0 +2024-08-25 16:29:16,782 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:29:17,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=116021.33333333333, ans=0.1 +2024-08-25 16:29:25,117 INFO [train.py:1114] (1/4) Epoch 9, batch 1850, loss[loss=0.2755, simple_loss=0.3211, pruned_loss=0.08298, ctc_loss=0.16, over 19587.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3005, pruned_loss=0.07193, ctc_loss=0.135, over 3857559.76 frames. ], batch size: 57, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:29:29,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=116074.66666666667, ans=0.125 +2024-08-25 16:29:35,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=116128.0, ans=0.125 +2024-08-25 16:29:42,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=116128.0, ans=0.125 +2024-08-25 16:30:18,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=116128.0, ans=0.125 +2024-08-25 16:30:20,335 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.86 vs. limit=22.5 +2024-08-25 16:30:37,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=116234.66666666667, ans=0.125 +2024-08-25 16:30:42,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=116234.66666666667, ans=0.025 +2024-08-25 16:30:52,566 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.51 vs. limit=10.0 +2024-08-25 16:30:56,366 INFO [train.py:1114] (1/4) Epoch 9, batch 1900, loss[loss=0.2302, simple_loss=0.2956, pruned_loss=0.05962, ctc_loss=0.114, over 19648.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3011, pruned_loss=0.07188, ctc_loss=0.1348, over 3862589.14 frames. ], batch size: 59, lr: 1.62e-02, grad_scale: 16.0 +2024-08-25 16:30:58,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=116341.33333333333, ans=0.2 +2024-08-25 16:30:59,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=116341.33333333333, ans=0.125 +2024-08-25 16:32:00,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=116341.33333333333, ans=0.025 +2024-08-25 16:32:18,083 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.48 vs. limit=15.0 +2024-08-25 16:32:21,937 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.810e+02 2.075e+02 2.674e+02 4.757e+02, threshold=4.150e+02, percent-clipped=3.0 +2024-08-25 16:32:28,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=116501.33333333333, ans=0.0 +2024-08-25 16:33:06,104 INFO [train.py:1114] (1/4) Epoch 9, batch 1950, loss[loss=0.2129, simple_loss=0.2767, pruned_loss=0.05359, ctc_loss=0.1046, over 19604.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3022, pruned_loss=0.07224, ctc_loss=0.1356, over 3871366.78 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 16.0 +2024-08-25 16:33:15,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=116608.0, ans=10.0 +2024-08-25 16:33:19,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=116661.33333333333, ans=0.125 +2024-08-25 16:33:23,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=116661.33333333333, ans=0.125 +2024-08-25 16:33:29,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=116714.66666666667, ans=0.125 +2024-08-25 16:33:32,222 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.26 vs. limit=12.0 +2024-08-25 16:33:46,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=116768.0, ans=0.2 +2024-08-25 16:33:46,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=116768.0, ans=0.125 +2024-08-25 16:33:58,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=116821.33333333333, ans=0.1 +2024-08-25 16:34:02,732 INFO [train.py:1114] (1/4) Epoch 9, batch 2000, loss[loss=0.2129, simple_loss=0.2698, pruned_loss=0.05707, ctc_loss=0.105, over 19628.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3026, pruned_loss=0.07235, ctc_loss=0.1359, over 3855299.30 frames. ], batch size: 45, lr: 1.62e-02, grad_scale: 32.0 +2024-08-25 16:34:25,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=116981.33333333333, ans=0.0 +2024-08-25 16:34:30,973 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.555e+02 1.787e+02 2.122e+02 2.673e+02 5.196e+02, threshold=4.245e+02, percent-clipped=10.0 +2024-08-25 16:34:34,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=116981.33333333333, ans=0.1 +2024-08-25 16:34:39,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=117034.66666666667, ans=0.0 +2024-08-25 16:34:43,529 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.70 vs. limit=10.0 +2024-08-25 16:34:46,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117034.66666666667, ans=0.1 +2024-08-25 16:34:47,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=117088.0, ans=0.0 +2024-08-25 16:34:47,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=117088.0, ans=0.04949747468305833 +2024-08-25 16:34:52,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=117088.0, ans=0.2 +2024-08-25 16:34:59,590 INFO [train.py:1114] (1/4) Epoch 9, batch 2050, loss[loss=0.2023, simple_loss=0.2571, pruned_loss=0.05367, ctc_loss=0.1005, over 19729.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3019, pruned_loss=0.07227, ctc_loss=0.1359, over 3851579.32 frames. ], batch size: 47, lr: 1.62e-02, grad_scale: 32.0 +2024-08-25 16:35:39,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=117301.33333333333, ans=0.125 +2024-08-25 16:35:43,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.11 vs. limit=22.5 +2024-08-25 16:36:57,817 INFO [train.py:1114] (1/4) Epoch 9, batch 2100, loss[loss=0.2674, simple_loss=0.3169, pruned_loss=0.07894, ctc_loss=0.1498, over 19762.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3013, pruned_loss=0.072, ctc_loss=0.1353, over 3857799.30 frames. ], batch size: 54, lr: 1.62e-02, grad_scale: 32.0 +2024-08-25 16:37:00,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=117408.0, ans=0.125 +2024-08-25 16:37:38,955 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.501e+02 1.824e+02 2.012e+02 2.446e+02 4.504e+02, threshold=4.025e+02, percent-clipped=2.0 +2024-08-25 16:37:43,079 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=10.00 vs. limit=15.0 +2024-08-25 16:37:43,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=117568.0, ans=0.125 +2024-08-25 16:37:47,057 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.28 vs. limit=15.0 +2024-08-25 16:37:50,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=117568.0, ans=0.125 +2024-08-25 16:37:57,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=117621.33333333333, ans=0.125 +2024-08-25 16:38:01,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=117621.33333333333, ans=0.125 +2024-08-25 16:38:02,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=117621.33333333333, ans=0.025 +2024-08-25 16:38:04,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=117621.33333333333, ans=10.0 +2024-08-25 16:38:05,188 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.12 vs. limit=15.0 +2024-08-25 16:38:06,800 INFO [train.py:1114] (1/4) Epoch 9, batch 2150, loss[loss=0.2221, simple_loss=0.2786, pruned_loss=0.06116, ctc_loss=0.1085, over 19567.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.2998, pruned_loss=0.07105, ctc_loss=0.1334, over 3867762.36 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 32.0 +2024-08-25 16:38:12,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=117674.66666666667, ans=0.125 +2024-08-25 16:38:21,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=117728.0, ans=0.125 +2024-08-25 16:38:24,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=117728.0, ans=22.5 +2024-08-25 16:38:25,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=117728.0, ans=0.2 +2024-08-25 16:38:32,511 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.66 vs. limit=10.0 +2024-08-25 16:39:02,677 INFO [train.py:1114] (1/4) Epoch 9, batch 2200, loss[loss=0.2625, simple_loss=0.3215, pruned_loss=0.07289, ctc_loss=0.144, over 19601.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3, pruned_loss=0.07126, ctc_loss=0.1336, over 3866602.78 frames. ], batch size: 57, lr: 1.61e-02, grad_scale: 32.0 +2024-08-25 16:39:10,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=117941.33333333333, ans=0.5 +2024-08-25 16:39:19,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=117994.66666666667, ans=0.125 +2024-08-25 16:39:24,109 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.18 vs. limit=15.0 +2024-08-25 16:39:30,922 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.404e+02 1.840e+02 2.263e+02 2.882e+02 6.553e+02, threshold=4.526e+02, percent-clipped=9.0 +2024-08-25 16:39:30,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=118048.0, ans=0.125 +2024-08-25 16:39:35,702 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.92 vs. limit=15.0 +2024-08-25 16:39:44,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=118101.33333333333, ans=0.125 +2024-08-25 16:39:48,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=118154.66666666667, ans=0.125 +2024-08-25 16:39:49,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=118154.66666666667, ans=0.2 +2024-08-25 16:39:59,169 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.49 vs. limit=15.0 +2024-08-25 16:39:59,973 INFO [train.py:1114] (1/4) Epoch 9, batch 2250, loss[loss=0.2663, simple_loss=0.3161, pruned_loss=0.0779, ctc_loss=0.1521, over 19614.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.2998, pruned_loss=0.07104, ctc_loss=0.1334, over 3867023.90 frames. ], batch size: 55, lr: 1.61e-02, grad_scale: 16.0 +2024-08-25 16:40:00,343 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.77 vs. limit=22.5 +2024-08-25 16:40:12,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=118261.33333333333, ans=0.125 +2024-08-25 16:40:20,939 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.58 vs. limit=15.0 +2024-08-25 16:40:26,577 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.05 vs. limit=15.0 +2024-08-25 16:40:30,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=118314.66666666667, ans=10.0 +2024-08-25 16:40:46,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=118421.33333333333, ans=0.0 +2024-08-25 16:40:53,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=118474.66666666667, ans=0.125 +2024-08-25 16:40:54,819 INFO [train.py:1114] (1/4) Epoch 9, batch 2300, loss[loss=0.236, simple_loss=0.2865, pruned_loss=0.06772, ctc_loss=0.1255, over 19496.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.2986, pruned_loss=0.07092, ctc_loss=0.1332, over 3860668.95 frames. ], batch size: 49, lr: 1.61e-02, grad_scale: 16.0 +2024-08-25 16:41:24,912 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.547e+02 1.864e+02 2.265e+02 3.023e+02 5.230e+02, threshold=4.530e+02, percent-clipped=2.0 +2024-08-25 16:41:42,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=118688.0, ans=0.2 +2024-08-25 16:41:51,067 INFO [train.py:1114] (1/4) Epoch 9, batch 2350, loss[loss=0.2722, simple_loss=0.3179, pruned_loss=0.083, ctc_loss=0.1515, over 19669.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.2986, pruned_loss=0.07112, ctc_loss=0.1335, over 3863748.78 frames. ], batch size: 63, lr: 1.61e-02, grad_scale: 16.0 +2024-08-25 16:41:59,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=118741.33333333333, ans=0.125 +2024-08-25 16:42:00,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=118741.33333333333, ans=0.125 +2024-08-25 16:42:13,997 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.24 vs. limit=22.5 +2024-08-25 16:42:50,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=118954.66666666667, ans=0.0 +2024-08-25 16:43:01,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=119008.0, ans=0.1 +2024-08-25 16:43:01,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=119008.0, ans=0.125 +2024-08-25 16:43:02,772 INFO [train.py:1114] (1/4) Epoch 9, batch 2400, loss[loss=0.2995, simple_loss=0.3378, pruned_loss=0.0943, ctc_loss=0.1814, over 19240.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.301, pruned_loss=0.07205, ctc_loss=0.1353, over 3857864.72 frames. ], batch size: 71, lr: 1.61e-02, grad_scale: 32.0 +2024-08-25 16:43:08,424 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.07 vs. limit=10.0 +2024-08-25 16:43:32,522 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.581e+02 1.930e+02 2.301e+02 2.799e+02 4.768e+02, threshold=4.601e+02, percent-clipped=1.0 +2024-08-25 16:43:33,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=119114.66666666667, ans=0.025 +2024-08-25 16:43:40,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=119168.0, ans=0.2 +2024-08-25 16:43:59,357 INFO [train.py:1114] (1/4) Epoch 9, batch 2450, loss[loss=0.3035, simple_loss=0.3287, pruned_loss=0.101, ctc_loss=0.1905, over 13701.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3052, pruned_loss=0.07535, ctc_loss=0.1417, over 3731728.38 frames. ], batch size: 141, lr: 1.61e-02, grad_scale: 32.0 +2024-08-25 16:44:02,187 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.50 vs. limit=15.0 +2024-08-25 16:44:15,624 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.86 vs. limit=6.0 +2024-08-25 16:44:24,390 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.30 vs. limit=22.5 +2024-08-25 16:44:27,733 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.96 vs. limit=15.0 +2024-08-25 16:45:25,699 INFO [train.py:1114] (1/4) Epoch 10, batch 0, loss[loss=0.2229, simple_loss=0.2741, pruned_loss=0.06217, ctc_loss=0.1185, over 19818.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2741, pruned_loss=0.06217, ctc_loss=0.1185, over 19818.00 frames. ], batch size: 49, lr: 1.53e-02, grad_scale: 32.0 +2024-08-25 16:45:25,699 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-25 16:45:33,814 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.6490, 4.1308, 2.4382, 1.9018], device='cuda:1') +2024-08-25 16:46:37,100 INFO [train.py:1146] (1/4) Epoch 10, validation: loss=0.2041, simple_loss=0.2903, pruned_loss=0.04356, ctc_loss=0.07708, over 944034.00 frames. +2024-08-25 16:46:37,101 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 14083MB +2024-08-25 16:46:37,989 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.05 vs. limit=22.5 +2024-08-25 16:46:41,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=119482.66666666667, ans=0.0 +2024-08-25 16:47:12,492 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.82 vs. limit=15.0 +2024-08-25 16:47:17,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=119642.66666666667, ans=0.025 +2024-08-25 16:47:46,597 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.636e+02 1.955e+02 2.116e+02 2.362e+02 4.652e+02, threshold=4.231e+02, percent-clipped=1.0 +2024-08-25 16:48:28,303 INFO [train.py:1114] (1/4) Epoch 10, batch 50, loss[loss=0.2105, simple_loss=0.274, pruned_loss=0.05325, ctc_loss=0.1014, over 19716.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3022, pruned_loss=0.0727, ctc_loss=0.1375, over 843671.78 frames. ], batch size: 47, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:48:57,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=119749.33333333333, ans=0.125 +2024-08-25 16:50:42,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=119909.33333333333, ans=0.025 +2024-08-25 16:50:49,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=119909.33333333333, ans=0.125 +2024-08-25 16:51:13,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=119962.66666666667, ans=0.0 +2024-08-25 16:51:24,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=119962.66666666667, ans=0.125 +2024-08-25 16:51:25,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=119962.66666666667, ans=0.0 +2024-08-25 16:52:34,142 INFO [train.py:1114] (1/4) Epoch 10, batch 100, loss[loss=0.2142, simple_loss=0.2754, pruned_loss=0.05569, ctc_loss=0.1039, over 19715.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3037, pruned_loss=0.07275, ctc_loss=0.1374, over 1499199.10 frames. ], batch size: 51, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:53:09,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=120069.33333333333, ans=0.025 +2024-08-25 16:53:36,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=120176.0, ans=0.5 +2024-08-25 16:53:42,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=120176.0, ans=0.125 +2024-08-25 16:53:46,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.27 vs. limit=22.5 +2024-08-25 16:53:47,835 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.460e+02 1.798e+02 2.253e+02 2.860e+02 4.134e+02, threshold=4.507e+02, percent-clipped=0.0 +2024-08-25 16:54:30,607 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.43 vs. limit=15.0 +2024-08-25 16:54:33,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=120229.33333333333, ans=0.125 +2024-08-25 16:54:47,470 INFO [train.py:1114] (1/4) Epoch 10, batch 150, loss[loss=0.2182, simple_loss=0.2682, pruned_loss=0.06154, ctc_loss=0.1127, over 19685.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.3003, pruned_loss=0.07088, ctc_loss=0.1335, over 2028038.53 frames. ], batch size: 47, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:54:49,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=120282.66666666667, ans=0.125 +2024-08-25 16:54:56,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=120282.66666666667, ans=0.0 +2024-08-25 16:54:58,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=120282.66666666667, ans=0.125 +2024-08-25 16:54:58,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=120282.66666666667, ans=0.2 +2024-08-25 16:55:23,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=120389.33333333333, ans=0.125 +2024-08-25 16:55:40,802 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.05 vs. limit=15.0 +2024-08-25 16:55:52,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=120496.0, ans=0.125 +2024-08-25 16:56:01,811 INFO [train.py:1114] (1/4) Epoch 10, batch 200, loss[loss=0.2619, simple_loss=0.316, pruned_loss=0.07478, ctc_loss=0.1453, over 18236.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.2986, pruned_loss=0.06962, ctc_loss=0.1311, over 2435602.70 frames. ], batch size: 85, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:56:11,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=120549.33333333333, ans=0.125 +2024-08-25 16:56:20,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=120602.66666666667, ans=0.0 +2024-08-25 16:57:32,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=120656.0, ans=0.0 +2024-08-25 16:57:54,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=120656.0, ans=0.125 +2024-08-25 16:57:56,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120656.0, ans=0.1 +2024-08-25 16:57:58,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.28 vs. limit=22.5 +2024-08-25 16:57:59,392 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.24 vs. limit=15.0 +2024-08-25 16:58:07,754 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 1.824e+02 2.064e+02 2.548e+02 6.143e+02, threshold=4.128e+02, percent-clipped=2.0 +2024-08-25 16:58:30,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=120816.0, ans=0.125 +2024-08-25 16:58:32,999 INFO [train.py:1114] (1/4) Epoch 10, batch 250, loss[loss=0.278, simple_loss=0.32, pruned_loss=0.08522, ctc_loss=0.1638, over 19382.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.2987, pruned_loss=0.06987, ctc_loss=0.1314, over 2755365.69 frames. ], batch size: 67, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:58:34,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=120816.0, ans=0.125 +2024-08-25 16:59:04,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=120869.33333333333, ans=0.0 +2024-08-25 16:59:05,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=120869.33333333333, ans=0.125 +2024-08-25 16:59:42,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=120922.66666666667, ans=0.125 +2024-08-25 17:00:08,737 INFO [train.py:1114] (1/4) Epoch 10, batch 300, loss[loss=0.2551, simple_loss=0.3098, pruned_loss=0.07359, ctc_loss=0.1329, over 19524.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.2972, pruned_loss=0.06877, ctc_loss=0.1295, over 3000066.51 frames. ], batch size: 61, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 17:00:10,163 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.82 vs. limit=15.0 +2024-08-25 17:00:24,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=121136.0, ans=0.125 +2024-08-25 17:00:42,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=121189.33333333333, ans=0.125 +2024-08-25 17:00:43,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=121189.33333333333, ans=0.0 +2024-08-25 17:00:52,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=121242.66666666667, ans=0.0 +2024-08-25 17:01:01,167 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.522e+02 1.908e+02 2.186e+02 2.769e+02 4.118e+02, threshold=4.372e+02, percent-clipped=0.0 +2024-08-25 17:01:01,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=121242.66666666667, ans=0.0 +2024-08-25 17:01:10,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=121296.0, ans=0.2 +2024-08-25 17:02:40,277 INFO [train.py:1114] (1/4) Epoch 10, batch 350, loss[loss=0.237, simple_loss=0.2795, pruned_loss=0.07122, ctc_loss=0.1303, over 19801.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.2975, pruned_loss=0.06914, ctc_loss=0.1299, over 3189437.63 frames. ], batch size: 48, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 17:02:51,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=121402.66666666667, ans=0.2 +2024-08-25 17:02:55,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=121402.66666666667, ans=0.2 +2024-08-25 17:03:22,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=121509.33333333333, ans=0.09899494936611666 +2024-08-25 17:03:40,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=121562.66666666667, ans=0.2 +2024-08-25 17:03:42,423 INFO [train.py:1114] (1/4) Epoch 10, batch 400, loss[loss=0.2558, simple_loss=0.3063, pruned_loss=0.07464, ctc_loss=0.1398, over 19497.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.2966, pruned_loss=0.06872, ctc_loss=0.1291, over 3341910.85 frames. ], batch size: 54, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:03:42,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=121616.0, ans=0.0 +2024-08-25 17:03:44,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=121616.0, ans=0.025 +2024-08-25 17:03:49,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=121616.0, ans=0.125 +2024-08-25 17:04:00,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=121669.33333333333, ans=0.0 +2024-08-25 17:04:24,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=121776.0, ans=0.125 +2024-08-25 17:04:30,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=121776.0, ans=0.2 +2024-08-25 17:04:33,757 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.409e+02 1.874e+02 2.151e+02 2.761e+02 4.102e+02, threshold=4.302e+02, percent-clipped=0.0 +2024-08-25 17:04:35,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=121776.0, ans=0.0 +2024-08-25 17:04:50,467 INFO [train.py:1114] (1/4) Epoch 10, batch 450, loss[loss=0.22, simple_loss=0.2902, pruned_loss=0.05444, ctc_loss=0.1021, over 19622.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.2965, pruned_loss=0.06871, ctc_loss=0.1289, over 3450128.20 frames. ], batch size: 55, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:07:20,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=121936.0, ans=0.2 +2024-08-25 17:08:26,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=122042.66666666667, ans=0.125 +2024-08-25 17:09:02,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=122096.0, ans=0.0 +2024-08-25 17:09:04,039 INFO [train.py:1114] (1/4) Epoch 10, batch 500, loss[loss=0.2701, simple_loss=0.32, pruned_loss=0.08149, ctc_loss=0.1432, over 19674.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.2965, pruned_loss=0.06884, ctc_loss=0.1292, over 3545386.11 frames. ], batch size: 63, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:09:21,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122149.33333333333, ans=0.1 +2024-08-25 17:09:33,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=122202.66666666667, ans=0.125 +2024-08-25 17:09:57,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=122256.0, ans=0.0 +2024-08-25 17:10:35,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=122309.33333333333, ans=0.5 +2024-08-25 17:10:36,234 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.369e+02 1.797e+02 2.290e+02 2.870e+02 3.920e+02, threshold=4.579e+02, percent-clipped=0.0 +2024-08-25 17:10:37,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=122309.33333333333, ans=0.0 +2024-08-25 17:10:46,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=122362.66666666667, ans=0.125 +2024-08-25 17:10:51,436 INFO [train.py:1114] (1/4) Epoch 10, batch 550, loss[loss=0.2679, simple_loss=0.3077, pruned_loss=0.0832, ctc_loss=0.1541, over 19232.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.2967, pruned_loss=0.069, ctc_loss=0.1293, over 3607483.00 frames. ], batch size: 71, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:11:02,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=122416.0, ans=0.125 +2024-08-25 17:11:55,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=122469.33333333333, ans=0.2 +2024-08-25 17:11:57,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=122469.33333333333, ans=0.2 +2024-08-25 17:13:39,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=122522.66666666667, ans=0.125 +2024-08-25 17:13:50,632 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 17:13:59,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=122576.0, ans=0.125 +2024-08-25 17:14:10,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=122629.33333333333, ans=0.125 +2024-08-25 17:14:11,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=122629.33333333333, ans=0.0 +2024-08-25 17:14:20,771 INFO [train.py:1114] (1/4) Epoch 10, batch 600, loss[loss=0.2591, simple_loss=0.3094, pruned_loss=0.07619, ctc_loss=0.141, over 19386.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.2971, pruned_loss=0.06919, ctc_loss=0.1296, over 3665325.29 frames. ], batch size: 67, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:14:32,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122736.0, ans=0.1 +2024-08-25 17:14:33,905 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.41 vs. limit=15.0 +2024-08-25 17:14:50,614 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.41 vs. limit=6.0 +2024-08-25 17:14:59,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=122842.66666666667, ans=0.125 +2024-08-25 17:15:07,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=122842.66666666667, ans=0.0 +2024-08-25 17:15:08,643 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.448e+02 1.815e+02 2.061e+02 2.496e+02 4.365e+02, threshold=4.122e+02, percent-clipped=0.0 +2024-08-25 17:15:20,720 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.56 vs. limit=15.0 +2024-08-25 17:15:23,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=122949.33333333333, ans=0.125 +2024-08-25 17:15:24,361 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.39 vs. limit=10.0 +2024-08-25 17:15:24,790 INFO [train.py:1114] (1/4) Epoch 10, batch 650, loss[loss=0.2314, simple_loss=0.2901, pruned_loss=0.06289, ctc_loss=0.1171, over 19773.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.2966, pruned_loss=0.06878, ctc_loss=0.1292, over 3716018.99 frames. ], batch size: 54, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:15:25,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=122949.33333333333, ans=0.2 +2024-08-25 17:15:36,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=123002.66666666667, ans=0.125 +2024-08-25 17:15:41,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=123002.66666666667, ans=0.035 +2024-08-25 17:15:42,707 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.44 vs. limit=12.0 +2024-08-25 17:15:43,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=123002.66666666667, ans=0.125 +2024-08-25 17:15:45,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=123002.66666666667, ans=0.1 +2024-08-25 17:15:54,354 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.04 vs. limit=10.0 +2024-08-25 17:15:59,167 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.53 vs. limit=6.0 +2024-08-25 17:16:00,447 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.07 vs. limit=15.0 +2024-08-25 17:16:34,659 INFO [train.py:1114] (1/4) Epoch 10, batch 700, loss[loss=0.2321, simple_loss=0.2855, pruned_loss=0.06492, ctc_loss=0.1221, over 19726.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.2966, pruned_loss=0.06843, ctc_loss=0.1285, over 3747424.30 frames. ], batch size: 51, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:17:56,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=123322.66666666667, ans=0.125 +2024-08-25 17:18:04,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=123376.0, ans=0.125 +2024-08-25 17:18:09,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=123376.0, ans=0.2 +2024-08-25 17:18:13,483 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.562e+02 1.934e+02 2.276e+02 3.026e+02 5.626e+02, threshold=4.552e+02, percent-clipped=3.0 +2024-08-25 17:18:17,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=123429.33333333333, ans=0.0 +2024-08-25 17:18:28,248 INFO [train.py:1114] (1/4) Epoch 10, batch 750, loss[loss=0.2353, simple_loss=0.2904, pruned_loss=0.06547, ctc_loss=0.1229, over 19519.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2958, pruned_loss=0.068, ctc_loss=0.1276, over 3773650.40 frames. ], batch size: 54, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:18:31,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=123482.66666666667, ans=0.125 +2024-08-25 17:18:35,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=123482.66666666667, ans=0.025 +2024-08-25 17:18:54,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=123589.33333333333, ans=0.125 +2024-08-25 17:19:09,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=123642.66666666667, ans=0.125 +2024-08-25 17:19:18,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=123642.66666666667, ans=0.125 +2024-08-25 17:19:28,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=123696.0, ans=0.0 +2024-08-25 17:19:32,713 INFO [train.py:1114] (1/4) Epoch 10, batch 800, loss[loss=0.2082, simple_loss=0.2606, pruned_loss=0.05684, ctc_loss=0.1051, over 19818.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.2953, pruned_loss=0.06783, ctc_loss=0.1272, over 3795006.20 frames. ], batch size: 49, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:20:00,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=123802.66666666667, ans=0.125 +2024-08-25 17:20:07,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=123802.66666666667, ans=0.2 +2024-08-25 17:20:19,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=123856.0, ans=0.125 +2024-08-25 17:20:21,950 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.73 vs. limit=15.0 +2024-08-25 17:20:24,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=123909.33333333333, ans=0.125 +2024-08-25 17:20:33,020 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.431e+02 1.887e+02 2.136e+02 2.736e+02 3.984e+02, threshold=4.273e+02, percent-clipped=0.0 +2024-08-25 17:20:42,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=123962.66666666667, ans=0.125 +2024-08-25 17:20:47,943 INFO [train.py:1114] (1/4) Epoch 10, batch 850, loss[loss=0.2518, simple_loss=0.3066, pruned_loss=0.07221, ctc_loss=0.1314, over 19645.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.2954, pruned_loss=0.06787, ctc_loss=0.1273, over 3814424.14 frames. ], batch size: 59, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:20:49,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=124016.0, ans=0.1 +2024-08-25 17:20:59,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=124069.33333333333, ans=0.0 +2024-08-25 17:21:04,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=124069.33333333333, ans=0.125 +2024-08-25 17:21:20,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=124122.66666666667, ans=0.0 +2024-08-25 17:22:12,208 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.24 vs. limit=15.0 +2024-08-25 17:22:13,184 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 17:22:26,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=124229.33333333333, ans=0.125 +2024-08-25 17:22:28,575 INFO [train.py:1114] (1/4) Epoch 10, batch 900, loss[loss=0.2091, simple_loss=0.2682, pruned_loss=0.05457, ctc_loss=0.1021, over 19420.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2954, pruned_loss=0.06815, ctc_loss=0.1277, over 3819291.77 frames. ], batch size: 48, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:22:31,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124282.66666666667, ans=0.1 +2024-08-25 17:22:51,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=124389.33333333333, ans=0.0 +2024-08-25 17:23:13,945 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 1.854e+02 2.167e+02 2.763e+02 5.395e+02, threshold=4.333e+02, percent-clipped=2.0 +2024-08-25 17:23:30,292 INFO [train.py:1114] (1/4) Epoch 10, batch 950, loss[loss=0.2282, simple_loss=0.28, pruned_loss=0.0645, ctc_loss=0.1188, over 19481.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.2956, pruned_loss=0.06811, ctc_loss=0.128, over 3822031.47 frames. ], batch size: 49, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:23:30,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=124549.33333333333, ans=0.2 +2024-08-25 17:23:45,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=124602.66666666667, ans=0.125 +2024-08-25 17:24:09,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=124709.33333333333, ans=0.0 +2024-08-25 17:24:12,316 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.51 vs. limit=6.0 +2024-08-25 17:24:21,153 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.15 vs. limit=6.0 +2024-08-25 17:24:32,280 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=5.051e-03 +2024-08-25 17:24:34,415 INFO [train.py:1114] (1/4) Epoch 10, batch 1000, loss[loss=0.2264, simple_loss=0.2794, pruned_loss=0.06252, ctc_loss=0.121, over 19854.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.2964, pruned_loss=0.06878, ctc_loss=0.1294, over 3817081.22 frames. ], batch size: 52, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:24:44,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=124816.0, ans=0.0 +2024-08-25 17:25:06,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124922.66666666667, ans=0.1 +2024-08-25 17:25:11,543 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.28 vs. limit=15.0 +2024-08-25 17:25:18,038 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.505e+02 1.797e+02 2.069e+02 2.553e+02 4.130e+02, threshold=4.138e+02, percent-clipped=0.0 +2024-08-25 17:25:20,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=125029.33333333333, ans=0.125 +2024-08-25 17:25:24,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=125029.33333333333, ans=0.125 +2024-08-25 17:25:28,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=125029.33333333333, ans=0.1 +2024-08-25 17:25:33,285 INFO [train.py:1114] (1/4) Epoch 10, batch 1050, loss[loss=0.2983, simple_loss=0.3448, pruned_loss=0.09171, ctc_loss=0.1711, over 19837.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.296, pruned_loss=0.06877, ctc_loss=0.1294, over 3823053.40 frames. ], batch size: 57, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:26:12,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=125242.66666666667, ans=0.125 +2024-08-25 17:26:15,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=125242.66666666667, ans=0.125 +2024-08-25 17:26:17,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=125242.66666666667, ans=0.0 +2024-08-25 17:26:24,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=125296.0, ans=0.2 +2024-08-25 17:26:32,047 INFO [train.py:1114] (1/4) Epoch 10, batch 1100, loss[loss=0.2461, simple_loss=0.2965, pruned_loss=0.07154, ctc_loss=0.1316, over 19580.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.2952, pruned_loss=0.0683, ctc_loss=0.1285, over 3829407.05 frames. ], batch size: 52, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:26:45,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=125349.33333333333, ans=0.0 +2024-08-25 17:26:46,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=125402.66666666667, ans=0.2 +2024-08-25 17:26:49,984 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.95 vs. limit=15.0 +2024-08-25 17:27:18,171 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.422e+02 1.787e+02 2.060e+02 2.560e+02 4.808e+02, threshold=4.120e+02, percent-clipped=1.0 +2024-08-25 17:27:18,406 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 17:27:32,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=125616.0, ans=0.125 +2024-08-25 17:27:33,340 INFO [train.py:1114] (1/4) Epoch 10, batch 1150, loss[loss=0.2285, simple_loss=0.2857, pruned_loss=0.06201, ctc_loss=0.1181, over 19601.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.296, pruned_loss=0.06892, ctc_loss=0.1295, over 3829021.94 frames. ], batch size: 52, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:28:06,568 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.68 vs. limit=10.0 +2024-08-25 17:28:08,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=125722.66666666667, ans=0.125 +2024-08-25 17:28:21,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=125776.0, ans=0.025 +2024-08-25 17:28:25,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=125776.0, ans=0.0 +2024-08-25 17:28:27,945 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.34 vs. limit=15.0 +2024-08-25 17:28:44,405 INFO [train.py:1114] (1/4) Epoch 10, batch 1200, loss[loss=0.2733, simple_loss=0.322, pruned_loss=0.08155, ctc_loss=0.1539, over 19845.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.2969, pruned_loss=0.06934, ctc_loss=0.1303, over 3823979.26 frames. ], batch size: 57, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:29:01,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=125936.0, ans=0.0 +2024-08-25 17:29:11,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=125989.33333333333, ans=0.125 +2024-08-25 17:29:12,175 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.43 vs. limit=15.0 +2024-08-25 17:29:16,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=125989.33333333333, ans=0.0 +2024-08-25 17:29:22,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=126042.66666666667, ans=0.2 +2024-08-25 17:29:29,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=126042.66666666667, ans=0.125 +2024-08-25 17:29:30,099 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.522e+02 1.823e+02 2.047e+02 2.358e+02 4.051e+02, threshold=4.094e+02, percent-clipped=0.0 +2024-08-25 17:29:32,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=126096.0, ans=0.125 +2024-08-25 17:29:45,829 INFO [train.py:1114] (1/4) Epoch 10, batch 1250, loss[loss=0.2401, simple_loss=0.3031, pruned_loss=0.06496, ctc_loss=0.118, over 19506.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.2968, pruned_loss=0.06858, ctc_loss=0.1291, over 3841312.74 frames. ], batch size: 61, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:30:16,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=126256.0, ans=15.0 +2024-08-25 17:30:33,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=126256.0, ans=0.2 +2024-08-25 17:30:44,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=126309.33333333333, ans=0.0 +2024-08-25 17:30:59,772 INFO [train.py:1114] (1/4) Epoch 10, batch 1300, loss[loss=0.2753, simple_loss=0.3181, pruned_loss=0.08284, ctc_loss=0.1672, over 18932.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.2959, pruned_loss=0.06819, ctc_loss=0.1285, over 3844627.38 frames. ], batch size: 76, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:31:18,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126416.0, ans=0.1 +2024-08-25 17:31:35,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=126416.0, ans=0.125 +2024-08-25 17:31:36,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=126469.33333333333, ans=0.2 +2024-08-25 17:31:37,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=126469.33333333333, ans=0.125 +2024-08-25 17:32:13,113 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.515e+02 1.900e+02 2.303e+02 2.970e+02 5.096e+02, threshold=4.606e+02, percent-clipped=7.0 +2024-08-25 17:32:28,195 INFO [train.py:1114] (1/4) Epoch 10, batch 1350, loss[loss=0.2751, simple_loss=0.3231, pruned_loss=0.08164, ctc_loss=0.1597, over 19782.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2956, pruned_loss=0.06801, ctc_loss=0.1279, over 3855221.60 frames. ], batch size: 54, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:32:38,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=126736.0, ans=0.125 +2024-08-25 17:32:54,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=126789.33333333333, ans=0.0 +2024-08-25 17:32:57,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=126789.33333333333, ans=0.2 +2024-08-25 17:33:06,011 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=18.94 vs. limit=15.0 +2024-08-25 17:33:08,320 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.70 vs. limit=15.0 +2024-08-25 17:33:13,752 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.39 vs. limit=15.0 +2024-08-25 17:33:26,056 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.41 vs. limit=15.0 +2024-08-25 17:33:30,484 INFO [train.py:1114] (1/4) Epoch 10, batch 1400, loss[loss=0.2103, simple_loss=0.2589, pruned_loss=0.05847, ctc_loss=0.1119, over 19658.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.2951, pruned_loss=0.0678, ctc_loss=0.1275, over 3861749.60 frames. ], batch size: 46, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:33:35,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=126949.33333333333, ans=0.125 +2024-08-25 17:34:03,914 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.94 vs. limit=15.0 +2024-08-25 17:34:33,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=127109.33333333333, ans=0.0 +2024-08-25 17:34:34,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127109.33333333333, ans=0.1 +2024-08-25 17:34:42,447 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.525e+02 1.856e+02 2.167e+02 2.631e+02 4.500e+02, threshold=4.335e+02, percent-clipped=0.0 +2024-08-25 17:34:43,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=127109.33333333333, ans=0.2 +2024-08-25 17:34:55,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=127162.66666666667, ans=0.025 +2024-08-25 17:34:56,031 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.50 vs. limit=15.0 +2024-08-25 17:35:02,134 INFO [train.py:1114] (1/4) Epoch 10, batch 1450, loss[loss=0.2321, simple_loss=0.3034, pruned_loss=0.05878, ctc_loss=0.108, over 19707.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.2959, pruned_loss=0.06806, ctc_loss=0.1278, over 3860715.75 frames. ], batch size: 63, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:35:14,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=127269.33333333333, ans=0.5 +2024-08-25 17:35:20,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=127269.33333333333, ans=0.125 +2024-08-25 17:35:26,477 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=127322.66666666667, ans=0.125 +2024-08-25 17:35:33,967 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.35 vs. limit=15.0 +2024-08-25 17:35:38,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127376.0, ans=0.1 +2024-08-25 17:36:02,111 INFO [train.py:1114] (1/4) Epoch 10, batch 1500, loss[loss=0.2482, simple_loss=0.3115, pruned_loss=0.06753, ctc_loss=0.1246, over 19599.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.2965, pruned_loss=0.0683, ctc_loss=0.1283, over 3861030.36 frames. ], batch size: 57, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:36:07,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=127482.66666666667, ans=0.025 +2024-08-25 17:36:07,819 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.15 vs. limit=15.0 +2024-08-25 17:36:11,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=127482.66666666667, ans=0.0 +2024-08-25 17:36:13,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=127536.0, ans=0.025 +2024-08-25 17:36:17,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=127536.0, ans=0.0 +2024-08-25 17:36:23,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=127536.0, ans=0.125 +2024-08-25 17:36:55,677 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.462e+02 1.877e+02 2.186e+02 2.626e+02 4.478e+02, threshold=4.372e+02, percent-clipped=1.0 +2024-08-25 17:37:19,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127696.0, ans=0.1 +2024-08-25 17:37:24,240 INFO [train.py:1114] (1/4) Epoch 10, batch 1550, loss[loss=0.2678, simple_loss=0.3177, pruned_loss=0.07883, ctc_loss=0.1506, over 19591.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.2966, pruned_loss=0.06844, ctc_loss=0.1289, over 3846715.28 frames. ], batch size: 60, lr: 1.48e-02, grad_scale: 16.0 +2024-08-25 17:37:31,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=127749.33333333333, ans=0.2 +2024-08-25 17:37:36,374 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.01 vs. limit=22.5 +2024-08-25 17:37:40,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=127802.66666666667, ans=0.95 +2024-08-25 17:37:47,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=127856.0, ans=0.04949747468305833 +2024-08-25 17:37:55,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=127856.0, ans=0.2 +2024-08-25 17:38:02,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=127856.0, ans=0.0 +2024-08-25 17:38:54,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=127909.33333333333, ans=0.0 +2024-08-25 17:39:39,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=127909.33333333333, ans=0.2 +2024-08-25 17:39:39,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=127909.33333333333, ans=0.2 +2024-08-25 17:39:51,755 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.29 vs. limit=22.5 +2024-08-25 17:41:06,797 INFO [train.py:1114] (1/4) Epoch 10, batch 1600, loss[loss=0.2487, simple_loss=0.3028, pruned_loss=0.07028, ctc_loss=0.1349, over 19843.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.2972, pruned_loss=0.06905, ctc_loss=0.1302, over 3837124.92 frames. ], batch size: 57, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:42:54,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=128122.66666666667, ans=0.2 +2024-08-25 17:42:55,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=128122.66666666667, ans=0.0 +2024-08-25 17:42:57,612 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.55 vs. limit=5.0 +2024-08-25 17:43:24,241 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.528e+02 1.849e+02 2.080e+02 2.733e+02 5.175e+02, threshold=4.161e+02, percent-clipped=4.0 +2024-08-25 17:43:25,961 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.68 vs. limit=15.0 +2024-08-25 17:43:52,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=128229.33333333333, ans=0.125 +2024-08-25 17:43:55,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=128229.33333333333, ans=0.0 +2024-08-25 17:44:00,797 INFO [train.py:1114] (1/4) Epoch 10, batch 1650, loss[loss=0.235, simple_loss=0.3015, pruned_loss=0.06035, ctc_loss=0.1193, over 19672.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.2969, pruned_loss=0.06887, ctc_loss=0.1298, over 3833661.31 frames. ], batch size: 59, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:44:01,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=128282.66666666667, ans=0.0 +2024-08-25 17:44:13,899 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.29 vs. limit=12.0 +2024-08-25 17:44:37,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=128389.33333333333, ans=0.025 +2024-08-25 17:45:12,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=128496.0, ans=0.2 +2024-08-25 17:45:21,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=128549.33333333333, ans=0.125 +2024-08-25 17:45:46,331 INFO [train.py:1114] (1/4) Epoch 10, batch 1700, loss[loss=0.2091, simple_loss=0.2636, pruned_loss=0.05581, ctc_loss=0.1073, over 19655.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.2962, pruned_loss=0.06837, ctc_loss=0.1286, over 3847897.17 frames. ], batch size: 46, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:45:51,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=128549.33333333333, ans=0.125 +2024-08-25 17:46:39,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=128602.66666666667, ans=0.125 +2024-08-25 17:47:00,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=128656.0, ans=0.0 +2024-08-25 17:47:11,338 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.443e+02 1.773e+02 2.059e+02 2.527e+02 4.467e+02, threshold=4.119e+02, percent-clipped=1.0 +2024-08-25 17:48:11,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=128816.0, ans=0.025 +2024-08-25 17:48:12,458 INFO [train.py:1114] (1/4) Epoch 10, batch 1750, loss[loss=0.2173, simple_loss=0.2619, pruned_loss=0.06203, ctc_loss=0.1216, over 19670.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.296, pruned_loss=0.06853, ctc_loss=0.129, over 3851825.52 frames. ], batch size: 45, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:48:31,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=128869.33333333333, ans=0.025 +2024-08-25 17:48:42,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=128922.66666666667, ans=0.2 +2024-08-25 17:48:48,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=128976.0, ans=0.1 +2024-08-25 17:49:05,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=129029.33333333333, ans=0.125 +2024-08-25 17:49:11,938 INFO [train.py:1114] (1/4) Epoch 10, batch 1800, loss[loss=0.2361, simple_loss=0.2962, pruned_loss=0.06375, ctc_loss=0.1214, over 19616.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.2962, pruned_loss=0.06847, ctc_loss=0.1287, over 3852281.16 frames. ], batch size: 55, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 17:49:16,500 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.52 vs. limit=10.0 +2024-08-25 17:49:16,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=129082.66666666667, ans=0.125 +2024-08-25 17:49:19,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=129082.66666666667, ans=0.125 +2024-08-25 17:49:22,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=129082.66666666667, ans=0.125 +2024-08-25 18:01:23,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=129189.33333333333, ans=0.125 +2024-08-25 18:11:17,736 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.471e+02 1.930e+02 2.270e+02 3.115e+02 5.695e+02, threshold=4.540e+02, percent-clipped=10.0 +2024-08-25 18:13:37,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=129296.0, ans=0.0 +2024-08-25 18:14:47,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=129296.0, ans=0.125 +2024-08-25 18:19:59,304 INFO [train.py:1114] (1/4) Epoch 10, batch 1850, loss[loss=0.2254, simple_loss=0.2884, pruned_loss=0.05794, ctc_loss=0.1164, over 19591.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.2953, pruned_loss=0.06822, ctc_loss=0.1279, over 3854580.27 frames. ], batch size: 57, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:28:36,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=129509.33333333333, ans=0.2 +2024-08-25 18:29:00,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=129509.33333333333, ans=15.0 +2024-08-25 18:32:37,432 INFO [train.py:1114] (1/4) Epoch 10, batch 1900, loss[loss=0.2592, simple_loss=0.3179, pruned_loss=0.07307, ctc_loss=0.136, over 19661.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.296, pruned_loss=0.06838, ctc_loss=0.1284, over 3860649.34 frames. ], batch size: 59, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:34:00,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=129616.0, ans=0.0 +2024-08-25 18:35:37,452 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.41 vs. limit=10.0 +2024-08-25 18:36:57,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=129776.0, ans=0.025 +2024-08-25 18:36:59,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=129776.0, ans=0.0 +2024-08-25 18:37:11,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=129776.0, ans=0.2 +2024-08-25 18:37:13,087 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.64 vs. limit=15.0 +2024-08-25 18:37:43,363 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.539e+02 1.882e+02 2.156e+02 2.772e+02 4.689e+02, threshold=4.313e+02, percent-clipped=1.0 +2024-08-25 18:38:47,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=129829.33333333333, ans=0.025 +2024-08-25 18:38:48,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=129882.66666666667, ans=0.125 +2024-08-25 18:38:51,207 INFO [train.py:1114] (1/4) Epoch 10, batch 1950, loss[loss=0.2547, simple_loss=0.3037, pruned_loss=0.07466, ctc_loss=0.1412, over 19601.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.297, pruned_loss=0.06849, ctc_loss=0.1285, over 3869545.62 frames. ], batch size: 52, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:39:31,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=129882.66666666667, ans=0.125 +2024-08-25 18:39:32,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=129882.66666666667, ans=0.0 +2024-08-25 18:41:08,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=129989.33333333333, ans=10.0 +2024-08-25 18:41:23,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129989.33333333333, ans=0.1 +2024-08-25 18:41:31,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=129989.33333333333, ans=0.125 +2024-08-25 18:44:04,314 INFO [train.py:1114] (1/4) Epoch 10, batch 2000, loss[loss=0.2392, simple_loss=0.2839, pruned_loss=0.06995, ctc_loss=0.1362, over 19652.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.2976, pruned_loss=0.06863, ctc_loss=0.1288, over 3854942.46 frames. ], batch size: 45, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:46:12,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=130256.0, ans=0.125 +2024-08-25 18:47:32,405 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.525e+02 1.882e+02 2.262e+02 2.707e+02 4.864e+02, threshold=4.523e+02, percent-clipped=1.0 +2024-08-25 18:48:29,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=130362.66666666667, ans=0.125 +2024-08-25 18:48:39,777 INFO [train.py:1114] (1/4) Epoch 10, batch 2050, loss[loss=0.2379, simple_loss=0.2826, pruned_loss=0.06951, ctc_loss=0.1354, over 19689.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.2968, pruned_loss=0.06852, ctc_loss=0.1287, over 3851297.82 frames. ], batch size: 47, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:48:39,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=130416.0, ans=0.0 +2024-08-25 18:50:32,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=130522.66666666667, ans=0.2 +2024-08-25 18:51:07,400 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.31 vs. limit=15.0 +2024-08-25 18:52:20,496 INFO [train.py:1114] (1/4) Epoch 10, batch 2100, loss[loss=0.254, simple_loss=0.3018, pruned_loss=0.0748, ctc_loss=0.1414, over 19773.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.2957, pruned_loss=0.06764, ctc_loss=0.1273, over 3857717.85 frames. ], batch size: 54, lr: 1.47e-02, grad_scale: 16.0 +2024-08-25 18:52:24,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=130682.66666666667, ans=0.125 +2024-08-25 18:52:43,085 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.47 vs. limit=15.0 +2024-08-25 18:53:30,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130789.33333333333, ans=0.1 +2024-08-25 18:53:31,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130789.33333333333, ans=0.1 +2024-08-25 18:53:46,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=130842.66666666667, ans=0.125 +2024-08-25 18:53:51,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=130842.66666666667, ans=0.0 +2024-08-25 18:53:58,205 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.502e+02 1.839e+02 2.296e+02 2.721e+02 6.154e+02, threshold=4.593e+02, percent-clipped=3.0 +2024-08-25 18:54:01,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=130896.0, ans=0.125 +2024-08-25 18:54:37,549 INFO [train.py:1114] (1/4) Epoch 10, batch 2150, loss[loss=0.2184, simple_loss=0.2745, pruned_loss=0.05858, ctc_loss=0.1127, over 19584.00 frames. ], tot_loss[loss=0.241, simple_loss=0.2954, pruned_loss=0.06777, ctc_loss=0.1275, over 3868234.63 frames. ], batch size: 52, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 18:55:07,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=131002.66666666667, ans=0.125 +2024-08-25 18:55:39,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=131056.0, ans=0.125 +2024-08-25 18:55:52,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=131109.33333333334, ans=0.0 +2024-08-25 18:56:32,988 INFO [train.py:1114] (1/4) Epoch 10, batch 2200, loss[loss=0.2446, simple_loss=0.3048, pruned_loss=0.06774, ctc_loss=0.1224, over 19586.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.2949, pruned_loss=0.06724, ctc_loss=0.1268, over 3867086.24 frames. ], batch size: 57, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 18:56:42,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=131216.0, ans=0.2 +2024-08-25 18:57:12,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=131269.33333333334, ans=0.125 +2024-08-25 18:57:16,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=131322.66666666666, ans=0.2 +2024-08-25 18:57:32,410 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.85 vs. limit=5.0 +2024-08-25 18:57:51,458 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.449e+02 1.773e+02 2.006e+02 2.540e+02 3.937e+02, threshold=4.013e+02, percent-clipped=0.0 +2024-08-25 18:57:57,013 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=6.0 +2024-08-25 18:58:00,612 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.61 vs. limit=10.0 +2024-08-25 18:58:07,778 INFO [train.py:1114] (1/4) Epoch 10, batch 2250, loss[loss=0.2676, simple_loss=0.3163, pruned_loss=0.07967, ctc_loss=0.1487, over 19612.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.2954, pruned_loss=0.06746, ctc_loss=0.127, over 3866927.65 frames. ], batch size: 55, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 18:58:10,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=131482.66666666666, ans=0.0 +2024-08-25 18:58:17,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=131482.66666666666, ans=0.125 +2024-08-25 18:58:29,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=131536.0, ans=0.125 +2024-08-25 18:58:32,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=131589.33333333334, ans=0.2 +2024-08-25 18:58:32,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=131589.33333333334, ans=0.0 +2024-08-25 18:59:05,060 INFO [train.py:1114] (1/4) Epoch 10, batch 2300, loss[loss=0.2126, simple_loss=0.2733, pruned_loss=0.05439, ctc_loss=0.1077, over 19491.00 frames. ], tot_loss[loss=0.24, simple_loss=0.2947, pruned_loss=0.06733, ctc_loss=0.1266, over 3861842.14 frames. ], batch size: 49, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 18:59:09,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=131749.33333333334, ans=0.07 +2024-08-25 18:59:10,662 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 18:59:27,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.62 vs. limit=22.5 +2024-08-25 18:59:55,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=131909.33333333334, ans=0.125 +2024-08-25 19:00:00,732 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 1.815e+02 2.310e+02 2.961e+02 4.661e+02, threshold=4.621e+02, percent-clipped=5.0 +2024-08-25 19:00:03,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=131962.66666666666, ans=0.0 +2024-08-25 19:00:14,652 INFO [train.py:1114] (1/4) Epoch 10, batch 2350, loss[loss=0.2324, simple_loss=0.2927, pruned_loss=0.06164, ctc_loss=0.122, over 19659.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.2948, pruned_loss=0.06771, ctc_loss=0.1271, over 3863871.65 frames. ], batch size: 63, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 19:00:16,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=132016.0, ans=0.1 +2024-08-25 19:00:27,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=132069.33333333334, ans=0.125 +2024-08-25 19:00:35,524 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.95 vs. limit=10.0 +2024-08-25 19:00:37,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=132122.66666666666, ans=0.0 +2024-08-25 19:00:42,395 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.65 vs. limit=15.0 +2024-08-25 19:00:53,435 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.69 vs. limit=15.0 +2024-08-25 19:00:54,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=132176.0, ans=0.2 +2024-08-25 19:00:59,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=132229.33333333334, ans=0.025 +2024-08-25 19:01:00,049 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:01:01,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132229.33333333334, ans=0.1 +2024-08-25 19:01:11,688 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.91 vs. limit=22.5 +2024-08-25 19:01:13,177 INFO [train.py:1114] (1/4) Epoch 10, batch 2400, loss[loss=0.2284, simple_loss=0.292, pruned_loss=0.06032, ctc_loss=0.1104, over 19163.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2961, pruned_loss=0.0679, ctc_loss=0.1274, over 3857574.56 frames. ], batch size: 71, lr: 1.46e-02, grad_scale: 32.0 +2024-08-25 19:01:15,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=132282.66666666666, ans=0.0 +2024-08-25 19:01:40,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.42 vs. limit=15.0 +2024-08-25 19:01:45,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132389.33333333334, ans=0.1 +2024-08-25 19:02:10,731 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.475e+02 1.986e+02 2.279e+02 2.618e+02 8.799e+02, threshold=4.558e+02, percent-clipped=0.0 +2024-08-25 19:02:16,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=132496.0, ans=0.125 +2024-08-25 19:02:22,031 INFO [train.py:1114] (1/4) Epoch 10, batch 2450, loss[loss=0.3314, simple_loss=0.3434, pruned_loss=0.1161, ctc_loss=0.2181, over 13381.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.2999, pruned_loss=0.07092, ctc_loss=0.1333, over 3729699.44 frames. ], batch size: 140, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 19:02:28,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=132549.33333333334, ans=0.025 +2024-08-25 19:02:30,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=132549.33333333334, ans=0.0 +2024-08-25 19:02:48,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=132656.0, ans=0.125 +2024-08-25 19:04:28,599 INFO [train.py:1114] (1/4) Epoch 11, batch 0, loss[loss=0.2547, simple_loss=0.2951, pruned_loss=0.07833, ctc_loss=0.1444, over 19408.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.2951, pruned_loss=0.07833, ctc_loss=0.1444, over 19408.00 frames. ], batch size: 48, lr: 1.39e-02, grad_scale: 32.0 +2024-08-25 19:04:28,599 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-25 19:04:55,864 INFO [train.py:1146] (1/4) Epoch 11, validation: loss=0.2031, simple_loss=0.2887, pruned_loss=0.04339, ctc_loss=0.0768, over 944034.00 frames. +2024-08-25 19:04:55,865 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 14083MB +2024-08-25 19:04:58,749 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.41 vs. limit=22.5 +2024-08-25 19:05:28,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132864.0, ans=0.1 +2024-08-25 19:05:29,964 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.38 vs. limit=22.5 +2024-08-25 19:05:42,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=132917.33333333334, ans=0.1 +2024-08-25 19:05:48,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=132917.33333333334, ans=0.07 +2024-08-25 19:06:02,240 INFO [train.py:1114] (1/4) Epoch 11, batch 50, loss[loss=0.2133, simple_loss=0.2663, pruned_loss=0.05749, ctc_loss=0.113, over 19716.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.2961, pruned_loss=0.06922, ctc_loss=0.1307, over 845449.42 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 32.0 +2024-08-25 19:06:03,361 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.465e+02 2.050e+02 2.234e+02 2.552e+02 4.359e+02, threshold=4.468e+02, percent-clipped=1.0 +2024-08-25 19:06:09,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten.whitening_limit, batch_count=133024.0, ans=22.5 +2024-08-25 19:06:10,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=133024.0, ans=0.125 +2024-08-25 19:06:14,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=133024.0, ans=0.2 +2024-08-25 19:06:15,720 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.08 vs. limit=22.5 +2024-08-25 19:06:20,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=133077.33333333334, ans=0.0 +2024-08-25 19:06:26,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133077.33333333334, ans=0.1 +2024-08-25 19:06:38,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=133130.66666666666, ans=0.125 +2024-08-25 19:06:39,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=133184.0, ans=0.0 +2024-08-25 19:06:40,080 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.71 vs. limit=22.5 +2024-08-25 19:06:44,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=133184.0, ans=0.125 +2024-08-25 19:07:26,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=133237.33333333334, ans=0.025 +2024-08-25 19:07:42,552 INFO [train.py:1114] (1/4) Epoch 11, batch 100, loss[loss=0.2158, simple_loss=0.282, pruned_loss=0.05434, ctc_loss=0.1023, over 19715.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.2971, pruned_loss=0.06858, ctc_loss=0.129, over 1498424.97 frames. ], batch size: 51, lr: 1.39e-02, grad_scale: 32.0 +2024-08-25 19:07:45,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=133290.66666666666, ans=0.125 +2024-08-25 19:08:24,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=133450.66666666666, ans=0.0 +2024-08-25 19:08:26,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=133450.66666666666, ans=0.0 +2024-08-25 19:08:42,371 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.48 vs. limit=15.0 +2024-08-25 19:08:44,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133450.66666666666, ans=0.1 +2024-08-25 19:08:59,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=133504.0, ans=0.125 +2024-08-25 19:09:00,386 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.99 vs. limit=15.0 +2024-08-25 19:09:10,085 INFO [train.py:1114] (1/4) Epoch 11, batch 150, loss[loss=0.2383, simple_loss=0.2836, pruned_loss=0.07108, ctc_loss=0.1271, over 19708.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.2958, pruned_loss=0.06817, ctc_loss=0.1283, over 2026854.05 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 32.0 +2024-08-25 19:09:12,925 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.747e+02 2.015e+02 2.344e+02 3.708e+02, threshold=4.031e+02, percent-clipped=0.0 +2024-08-25 19:09:17,326 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.49 vs. limit=6.0 +2024-08-25 19:09:27,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=133610.66666666666, ans=0.0 +2024-08-25 19:09:48,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=133717.33333333334, ans=0.2 +2024-08-25 19:09:49,216 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.00 vs. limit=22.5 +2024-08-25 19:10:30,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=133770.66666666666, ans=0.125 +2024-08-25 19:10:34,570 INFO [train.py:1114] (1/4) Epoch 11, batch 200, loss[loss=0.2363, simple_loss=0.2972, pruned_loss=0.06337, ctc_loss=0.1214, over 18510.00 frames. ], tot_loss[loss=0.238, simple_loss=0.2931, pruned_loss=0.0664, ctc_loss=0.1251, over 2434621.63 frames. ], batch size: 85, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:10:35,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=133824.0, ans=0.0 +2024-08-25 19:11:14,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=133930.66666666666, ans=0.2 +2024-08-25 19:11:41,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=133984.0, ans=0.0 +2024-08-25 19:12:01,076 INFO [train.py:1114] (1/4) Epoch 11, batch 250, loss[loss=0.2616, simple_loss=0.3157, pruned_loss=0.07734, ctc_loss=0.1321, over 19382.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.2933, pruned_loss=0.06624, ctc_loss=0.1245, over 2754253.18 frames. ], batch size: 67, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:12:01,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=134090.66666666666, ans=0.1 +2024-08-25 19:12:02,124 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.716e+02 2.023e+02 2.469e+02 5.021e+02, threshold=4.046e+02, percent-clipped=3.0 +2024-08-25 19:12:29,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134197.33333333334, ans=0.1 +2024-08-25 19:12:30,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=134197.33333333334, ans=0.09899494936611666 +2024-08-25 19:12:33,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=134197.33333333334, ans=0.0 +2024-08-25 19:12:36,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=134197.33333333334, ans=0.015 +2024-08-25 19:12:37,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=134197.33333333334, ans=0.125 +2024-08-25 19:12:37,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=134197.33333333334, ans=0.125 +2024-08-25 19:12:56,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=134304.0, ans=0.0 +2024-08-25 19:13:03,623 INFO [train.py:1114] (1/4) Epoch 11, batch 300, loss[loss=0.2452, simple_loss=0.2969, pruned_loss=0.07051, ctc_loss=0.1314, over 19510.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.2925, pruned_loss=0.06553, ctc_loss=0.1232, over 3000172.55 frames. ], batch size: 61, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:13:03,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=134357.33333333334, ans=0.125 +2024-08-25 19:13:15,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=134357.33333333334, ans=0.2 +2024-08-25 19:13:53,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=134570.66666666666, ans=0.0 +2024-08-25 19:14:07,024 INFO [train.py:1114] (1/4) Epoch 11, batch 350, loss[loss=0.1997, simple_loss=0.2575, pruned_loss=0.05169, ctc_loss=0.09628, over 19724.00 frames. ], tot_loss[loss=0.237, simple_loss=0.293, pruned_loss=0.0658, ctc_loss=0.1234, over 3190539.03 frames. ], batch size: 48, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:14:08,113 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.422e+02 1.838e+02 2.258e+02 2.898e+02 4.827e+02, threshold=4.516e+02, percent-clipped=2.0 +2024-08-25 19:14:43,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=134677.33333333334, ans=0.0 +2024-08-25 19:14:46,697 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=1.491e-01 +2024-08-25 19:15:15,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=134784.0, ans=0.125 +2024-08-25 19:15:47,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=134837.33333333334, ans=0.125 +2024-08-25 19:15:57,899 INFO [train.py:1114] (1/4) Epoch 11, batch 400, loss[loss=0.2351, simple_loss=0.2936, pruned_loss=0.06457, ctc_loss=0.1186, over 19513.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.2927, pruned_loss=0.06543, ctc_loss=0.1232, over 3342563.84 frames. ], batch size: 54, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:16:06,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=134890.66666666666, ans=0.125 +2024-08-25 19:16:15,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=134944.0, ans=0.0 +2024-08-25 19:16:17,169 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.18 vs. limit=12.0 +2024-08-25 19:16:36,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=134997.33333333334, ans=0.1 +2024-08-25 19:16:40,395 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.95 vs. limit=6.0 +2024-08-25 19:16:41,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.49 vs. limit=12.0 +2024-08-25 19:16:42,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=134997.33333333334, ans=0.2 +2024-08-25 19:17:16,297 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.64 vs. limit=22.5 +2024-08-25 19:17:16,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=135104.0, ans=0.0 +2024-08-25 19:17:22,197 INFO [train.py:1114] (1/4) Epoch 11, batch 450, loss[loss=0.231, simple_loss=0.2928, pruned_loss=0.06168, ctc_loss=0.1145, over 19618.00 frames. ], tot_loss[loss=0.238, simple_loss=0.2934, pruned_loss=0.06633, ctc_loss=0.1248, over 3451323.42 frames. ], batch size: 55, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:17:31,724 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.328e+02 1.841e+02 2.102e+02 2.681e+02 4.407e+02, threshold=4.204e+02, percent-clipped=0.0 +2024-08-25 19:18:04,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=135317.33333333334, ans=0.0 +2024-08-25 19:18:34,755 INFO [train.py:1114] (1/4) Epoch 11, batch 500, loss[loss=0.2569, simple_loss=0.3064, pruned_loss=0.07462, ctc_loss=0.1452, over 19714.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.2924, pruned_loss=0.0658, ctc_loss=0.1238, over 3547378.58 frames. ], batch size: 63, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:18:36,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=135424.0, ans=0.0 +2024-08-25 19:19:12,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=135530.66666666666, ans=0.0 +2024-08-25 19:19:20,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=135530.66666666666, ans=0.0 +2024-08-25 19:19:33,602 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.56 vs. limit=15.0 +2024-08-25 19:19:48,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=135637.33333333334, ans=0.125 +2024-08-25 19:20:17,205 INFO [train.py:1114] (1/4) Epoch 11, batch 550, loss[loss=0.2685, simple_loss=0.3212, pruned_loss=0.07719, ctc_loss=0.1533, over 19374.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.2925, pruned_loss=0.06582, ctc_loss=0.124, over 3609558.62 frames. ], batch size: 71, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:20:18,391 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.344e+02 1.822e+02 2.069e+02 2.386e+02 4.149e+02, threshold=4.137e+02, percent-clipped=0.0 +2024-08-25 19:20:26,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.72 vs. limit=15.0 +2024-08-25 19:20:32,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=135690.66666666666, ans=0.125 +2024-08-25 19:20:40,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=135744.0, ans=0.0 +2024-08-25 19:20:40,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=135744.0, ans=0.125 +2024-08-25 19:20:56,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=135797.33333333334, ans=0.0 +2024-08-25 19:21:09,695 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.24 vs. limit=6.0 +2024-08-25 19:21:19,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=135904.0, ans=0.0 +2024-08-25 19:21:28,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=135957.33333333334, ans=0.2 +2024-08-25 19:21:30,816 INFO [train.py:1114] (1/4) Epoch 11, batch 600, loss[loss=0.2788, simple_loss=0.3282, pruned_loss=0.08448, ctc_loss=0.1508, over 19334.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.2931, pruned_loss=0.06614, ctc_loss=0.1244, over 3665906.05 frames. ], batch size: 67, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:22:25,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=136010.66666666666, ans=0.2 +2024-08-25 19:22:26,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=136010.66666666666, ans=0.2 +2024-08-25 19:22:44,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=136064.0, ans=0.0 +2024-08-25 19:22:57,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=136117.33333333334, ans=0.0 +2024-08-25 19:23:10,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=136170.66666666666, ans=0.0 +2024-08-25 19:23:54,562 INFO [train.py:1114] (1/4) Epoch 11, batch 650, loss[loss=0.222, simple_loss=0.2936, pruned_loss=0.05525, ctc_loss=0.09964, over 19775.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.2921, pruned_loss=0.06553, ctc_loss=0.123, over 3715925.17 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:23:55,633 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.531e+02 1.913e+02 2.094e+02 2.738e+02 4.984e+02, threshold=4.187e+02, percent-clipped=5.0 +2024-08-25 19:23:58,441 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.16 vs. limit=22.5 +2024-08-25 19:24:08,412 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.27 vs. limit=15.0 +2024-08-25 19:24:27,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=136277.33333333334, ans=0.5 +2024-08-25 19:25:04,435 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.75 vs. limit=15.0 +2024-08-25 19:25:34,148 INFO [train.py:1114] (1/4) Epoch 11, batch 700, loss[loss=0.221, simple_loss=0.2774, pruned_loss=0.05971, ctc_loss=0.1129, over 19718.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.2924, pruned_loss=0.06556, ctc_loss=0.1232, over 3747623.78 frames. ], batch size: 51, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:26:41,911 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.06 vs. limit=22.5 +2024-08-25 19:27:08,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=136597.33333333334, ans=0.0 +2024-08-25 19:27:38,370 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.06 vs. limit=15.0 +2024-08-25 19:27:47,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=136704.0, ans=0.1 +2024-08-25 19:28:07,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=136704.0, ans=0.0 +2024-08-25 19:28:09,983 INFO [train.py:1114] (1/4) Epoch 11, batch 750, loss[loss=0.2472, simple_loss=0.3048, pruned_loss=0.06943, ctc_loss=0.1267, over 19503.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.2917, pruned_loss=0.06534, ctc_loss=0.1225, over 3774889.90 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:28:25,947 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.371e+02 1.821e+02 2.028e+02 2.720e+02 4.524e+02, threshold=4.057e+02, percent-clipped=2.0 +2024-08-25 19:29:32,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=136864.0, ans=0.125 +2024-08-25 19:30:00,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=136917.33333333334, ans=0.025 +2024-08-25 19:30:15,273 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.66 vs. limit=15.0 +2024-08-25 19:30:52,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136970.66666666666, ans=0.1 +2024-08-25 19:30:53,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=136970.66666666666, ans=0.125 +2024-08-25 19:30:55,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=136970.66666666666, ans=0.125 +2024-08-25 19:32:06,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=136970.66666666666, ans=0.125 +2024-08-25 19:32:08,680 INFO [train.py:1114] (1/4) Epoch 11, batch 800, loss[loss=0.2227, simple_loss=0.277, pruned_loss=0.06165, ctc_loss=0.1128, over 19782.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.2922, pruned_loss=0.06554, ctc_loss=0.1231, over 3797096.71 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:32:08,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=137024.0, ans=0.0 +2024-08-25 19:32:24,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=137024.0, ans=0.035 +2024-08-25 19:32:54,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=137077.33333333334, ans=0.0 +2024-08-25 19:33:02,168 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.51 vs. limit=15.0 +2024-08-25 19:33:28,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137184.0, ans=0.1 +2024-08-25 19:33:49,189 INFO [train.py:1114] (1/4) Epoch 11, batch 850, loss[loss=0.2617, simple_loss=0.3119, pruned_loss=0.07766, ctc_loss=0.1403, over 19685.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.292, pruned_loss=0.06554, ctc_loss=0.1232, over 3816208.68 frames. ], batch size: 59, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:33:50,257 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.863e+02 2.065e+02 2.415e+02 4.305e+02, threshold=4.130e+02, percent-clipped=1.0 +2024-08-25 19:34:10,903 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.38 vs. limit=12.0 +2024-08-25 19:34:19,932 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.69 vs. limit=15.0 +2024-08-25 19:34:21,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=137397.33333333334, ans=0.125 +2024-08-25 19:34:30,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=137397.33333333334, ans=0.0 +2024-08-25 19:34:30,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=137397.33333333334, ans=0.125 +2024-08-25 19:34:31,871 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:34:33,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=137397.33333333334, ans=0.025 +2024-08-25 19:34:39,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=137450.66666666666, ans=0.0 +2024-08-25 19:34:49,049 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.56 vs. limit=12.0 +2024-08-25 19:35:04,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=137557.33333333334, ans=0.125 +2024-08-25 19:35:05,238 INFO [train.py:1114] (1/4) Epoch 11, batch 900, loss[loss=0.2189, simple_loss=0.2704, pruned_loss=0.06181, ctc_loss=0.1095, over 19413.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.2921, pruned_loss=0.06587, ctc_loss=0.1238, over 3819692.85 frames. ], batch size: 48, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:35:09,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=137557.33333333334, ans=0.0 +2024-08-25 19:35:10,592 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:36:09,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137717.33333333334, ans=0.1 +2024-08-25 19:36:30,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff2.min_abs, batch_count=137770.66666666666, ans=0.1 +2024-08-25 19:37:18,561 INFO [train.py:1114] (1/4) Epoch 11, batch 950, loss[loss=0.2138, simple_loss=0.2739, pruned_loss=0.05619, ctc_loss=0.1032, over 19495.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.2926, pruned_loss=0.06586, ctc_loss=0.1238, over 3821715.22 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:37:19,700 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.805e+02 2.081e+02 2.536e+02 4.211e+02, threshold=4.162e+02, percent-clipped=2.0 +2024-08-25 19:38:02,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=137984.0, ans=0.125 +2024-08-25 19:38:06,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=137984.0, ans=0.125 +2024-08-25 19:38:33,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=138037.33333333334, ans=0.0 +2024-08-25 19:38:48,988 INFO [train.py:1114] (1/4) Epoch 11, batch 1000, loss[loss=0.2223, simple_loss=0.2878, pruned_loss=0.0568, ctc_loss=0.108, over 19861.00 frames. ], tot_loss[loss=0.2384, simple_loss=0.2937, pruned_loss=0.06651, ctc_loss=0.1251, over 3818232.30 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:39:22,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=138144.0, ans=0.125 +2024-08-25 19:39:23,833 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.82 vs. limit=15.0 +2024-08-25 19:39:27,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=138197.33333333334, ans=0.0 +2024-08-25 19:40:14,844 INFO [train.py:1114] (1/4) Epoch 11, batch 1050, loss[loss=0.2418, simple_loss=0.2961, pruned_loss=0.06832, ctc_loss=0.1268, over 19836.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.2925, pruned_loss=0.06598, ctc_loss=0.1242, over 3825246.23 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:40:16,854 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.394e+02 1.874e+02 2.329e+02 2.645e+02 4.211e+02, threshold=4.658e+02, percent-clipped=2.0 +2024-08-25 19:40:26,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=138410.66666666666, ans=10.0 +2024-08-25 19:40:33,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=138410.66666666666, ans=0.0 +2024-08-25 19:40:39,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=138464.0, ans=0.125 +2024-08-25 19:40:39,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=138464.0, ans=0.2 +2024-08-25 19:40:51,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=138464.0, ans=0.125 +2024-08-25 19:40:52,056 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.09 vs. limit=8.0 +2024-08-25 19:41:18,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=138570.66666666666, ans=0.09899494936611666 +2024-08-25 19:41:19,797 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.36 vs. limit=15.0 +2024-08-25 19:41:25,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=138624.0, ans=0.125 +2024-08-25 19:41:26,113 INFO [train.py:1114] (1/4) Epoch 11, batch 1100, loss[loss=0.2279, simple_loss=0.2865, pruned_loss=0.06054, ctc_loss=0.1207, over 19595.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.2919, pruned_loss=0.06523, ctc_loss=0.1228, over 3832824.15 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:42:15,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=138677.33333333334, ans=0.125 +2024-08-25 19:43:00,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=138730.66666666666, ans=0.125 +2024-08-25 19:43:16,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=138784.0, ans=0.2 +2024-08-25 19:43:18,823 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.86 vs. limit=15.0 +2024-08-25 19:43:35,986 INFO [train.py:1114] (1/4) Epoch 11, batch 1150, loss[loss=0.2074, simple_loss=0.2725, pruned_loss=0.05143, ctc_loss=0.09873, over 19607.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.2916, pruned_loss=0.06515, ctc_loss=0.1226, over 3832353.99 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:43:37,195 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.425e+02 1.797e+02 2.039e+02 2.453e+02 4.580e+02, threshold=4.079e+02, percent-clipped=0.0 +2024-08-25 19:44:10,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=138997.33333333334, ans=0.2 +2024-08-25 19:44:39,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139104.0, ans=0.1 +2024-08-25 19:44:41,837 INFO [train.py:1114] (1/4) Epoch 11, batch 1200, loss[loss=0.2178, simple_loss=0.2868, pruned_loss=0.05383, ctc_loss=0.1029, over 19837.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.2928, pruned_loss=0.06565, ctc_loss=0.1237, over 3827484.35 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:44:49,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=139157.33333333334, ans=0.125 +2024-08-25 19:44:50,419 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.74 vs. limit=15.0 +2024-08-25 19:45:09,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=139210.66666666666, ans=0.125 +2024-08-25 19:45:33,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139264.0, ans=0.1 +2024-08-25 19:45:42,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=139317.33333333334, ans=0.0 +2024-08-25 19:46:00,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff2.min_abs, batch_count=139317.33333333334, ans=0.1 +2024-08-25 19:46:15,644 INFO [train.py:1114] (1/4) Epoch 11, batch 1250, loss[loss=0.237, simple_loss=0.299, pruned_loss=0.06306, ctc_loss=0.1221, over 19537.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.2929, pruned_loss=0.06554, ctc_loss=0.1235, over 3845387.56 frames. ], batch size: 61, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:46:16,708 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.516e+02 1.769e+02 1.992e+02 2.545e+02 3.633e+02, threshold=3.984e+02, percent-clipped=0.0 +2024-08-25 19:46:19,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139424.0, ans=0.1 +2024-08-25 19:46:39,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=139424.0, ans=0.025 +2024-08-25 19:46:45,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=139477.33333333334, ans=0.125 +2024-08-25 19:46:57,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=139530.66666666666, ans=0.1 +2024-08-25 19:47:08,016 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.07 vs. limit=22.5 +2024-08-25 19:47:18,248 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.20 vs. limit=10.0 +2024-08-25 19:47:21,714 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.49 vs. limit=15.0 +2024-08-25 19:47:22,581 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:47:33,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=139637.33333333334, ans=0.09899494936611666 +2024-08-25 19:47:33,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=139637.33333333334, ans=0.04949747468305833 +2024-08-25 19:47:40,577 INFO [train.py:1114] (1/4) Epoch 11, batch 1300, loss[loss=0.2857, simple_loss=0.3315, pruned_loss=0.08714, ctc_loss=0.1641, over 18837.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.2924, pruned_loss=0.06525, ctc_loss=0.123, over 3847789.11 frames. ], batch size: 76, lr: 1.36e-02, grad_scale: 16.0 +2024-08-25 19:47:43,530 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.21 vs. limit=15.0 +2024-08-25 19:48:06,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=139797.33333333334, ans=0.125 +2024-08-25 19:48:59,248 INFO [train.py:1114] (1/4) Epoch 11, batch 1350, loss[loss=0.2201, simple_loss=0.2859, pruned_loss=0.05661, ctc_loss=0.1027, over 19754.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.2919, pruned_loss=0.0648, ctc_loss=0.122, over 3858244.04 frames. ], batch size: 54, lr: 1.36e-02, grad_scale: 16.0 +2024-08-25 19:48:59,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=139957.33333333334, ans=0.125 +2024-08-25 19:49:01,647 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.430e+02 1.851e+02 2.124e+02 2.742e+02 4.665e+02, threshold=4.248e+02, percent-clipped=3.0 +2024-08-25 19:49:01,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=139957.33333333334, ans=0.07 +2024-08-25 19:49:10,247 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.73 vs. limit=15.0 +2024-08-25 19:49:17,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=140010.66666666666, ans=0.125 +2024-08-25 19:49:19,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=140010.66666666666, ans=0.0 +2024-08-25 19:49:56,215 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:50:07,172 INFO [train.py:1114] (1/4) Epoch 11, batch 1400, loss[loss=0.1681, simple_loss=0.2414, pruned_loss=0.03454, ctc_loss=0.06425, over 19659.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.2915, pruned_loss=0.06448, ctc_loss=0.1214, over 3865075.90 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 19:50:16,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=140224.0, ans=0.0 +2024-08-25 19:50:17,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=140224.0, ans=0.0 +2024-08-25 19:50:38,314 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.59 vs. limit=15.0 +2024-08-25 19:51:08,371 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:51:18,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=140384.0, ans=0.125 +2024-08-25 19:51:32,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=140437.33333333334, ans=0.125 +2024-08-25 19:51:42,651 INFO [train.py:1114] (1/4) Epoch 11, batch 1450, loss[loss=0.2292, simple_loss=0.2898, pruned_loss=0.06212, ctc_loss=0.111, over 19704.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.2929, pruned_loss=0.06549, ctc_loss=0.1232, over 3862923.73 frames. ], batch size: 63, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 19:51:45,007 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.475e+02 1.813e+02 2.052e+02 2.523e+02 4.896e+02, threshold=4.103e+02, percent-clipped=2.0 +2024-08-25 19:52:09,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140544.0, ans=0.1 +2024-08-25 19:52:12,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=140544.0, ans=0.125 +2024-08-25 19:52:14,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=140597.33333333334, ans=0.125 +2024-08-25 19:52:41,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=140650.66666666666, ans=0.125 +2024-08-25 19:52:54,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=140704.0, ans=0.125 +2024-08-25 19:53:16,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=140704.0, ans=0.125 +2024-08-25 19:53:19,916 INFO [train.py:1114] (1/4) Epoch 11, batch 1500, loss[loss=0.2112, simple_loss=0.2836, pruned_loss=0.0503, ctc_loss=0.09548, over 19592.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.2926, pruned_loss=0.06504, ctc_loss=0.1225, over 3862884.47 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 19:53:25,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=140757.33333333334, ans=0.0 +2024-08-25 19:53:39,455 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.48 vs. limit=10.0 +2024-08-25 19:53:51,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=140810.66666666666, ans=0.2 +2024-08-25 19:53:58,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=140810.66666666666, ans=0.125 +2024-08-25 19:54:01,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140810.66666666666, ans=0.1 +2024-08-25 19:54:07,961 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.29 vs. limit=15.0 +2024-08-25 19:54:17,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=140864.0, ans=0.125 +2024-08-25 19:54:28,002 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.38 vs. limit=6.0 +2024-08-25 19:54:31,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=140917.33333333334, ans=0.125 +2024-08-25 19:54:38,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=140917.33333333334, ans=0.2 +2024-08-25 19:55:01,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=140970.66666666666, ans=0.0 +2024-08-25 19:55:07,086 INFO [train.py:1114] (1/4) Epoch 11, batch 1550, loss[loss=0.2338, simple_loss=0.2904, pruned_loss=0.06632, ctc_loss=0.1112, over 19614.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.2918, pruned_loss=0.06466, ctc_loss=0.1217, over 3847926.47 frames. ], batch size: 60, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 19:55:10,259 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.30 vs. limit=15.0 +2024-08-25 19:55:10,756 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.421e+02 1.804e+02 2.014e+02 2.422e+02 4.168e+02, threshold=4.028e+02, percent-clipped=1.0 +2024-08-25 19:56:25,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=141184.0, ans=0.015 +2024-08-25 19:56:57,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=141184.0, ans=0.0 +2024-08-25 19:57:16,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=141237.33333333334, ans=0.125 +2024-08-25 19:57:19,239 INFO [train.py:1114] (1/4) Epoch 11, batch 1600, loss[loss=0.2712, simple_loss=0.3232, pruned_loss=0.07889, ctc_loss=0.1536, over 19840.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.2917, pruned_loss=0.06468, ctc_loss=0.1219, over 3837004.12 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 32.0 +2024-08-25 19:57:49,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=141290.66666666666, ans=0.95 +2024-08-25 19:58:40,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=141504.0, ans=0.1 +2024-08-25 19:59:27,400 INFO [train.py:1114] (1/4) Epoch 11, batch 1650, loss[loss=0.2221, simple_loss=0.2911, pruned_loss=0.05557, ctc_loss=0.1047, over 19656.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.291, pruned_loss=0.06447, ctc_loss=0.1214, over 3833068.39 frames. ], batch size: 59, lr: 1.35e-02, grad_scale: 32.0 +2024-08-25 19:59:29,887 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.768e+02 1.990e+02 2.303e+02 4.438e+02, threshold=3.979e+02, percent-clipped=2.0 +2024-08-25 19:59:46,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=141557.33333333334, ans=0.125 +2024-08-25 20:00:05,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=141610.66666666666, ans=0.0 +2024-08-25 20:00:23,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=141717.33333333334, ans=0.125 +2024-08-25 20:00:36,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=141717.33333333334, ans=0.125 +2024-08-25 20:01:17,966 INFO [train.py:1114] (1/4) Epoch 11, batch 1700, loss[loss=0.2108, simple_loss=0.2632, pruned_loss=0.05828, ctc_loss=0.1047, over 19652.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.2912, pruned_loss=0.06449, ctc_loss=0.1213, over 3847348.79 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 20:02:14,827 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.20 vs. limit=12.0 +2024-08-25 20:02:16,953 INFO [train.py:1114] (1/4) Epoch 11, batch 1750, loss[loss=0.2141, simple_loss=0.2611, pruned_loss=0.06121, ctc_loss=0.1117, over 19616.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.2907, pruned_loss=0.06403, ctc_loss=0.1204, over 3851715.47 frames. ], batch size: 45, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 20:02:20,529 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.412e+02 1.814e+02 2.107e+02 2.366e+02 3.890e+02, threshold=4.214e+02, percent-clipped=0.0 +2024-08-25 20:02:46,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=142144.0, ans=0.125 +2024-08-25 20:03:19,661 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.12 vs. limit=15.0 +2024-08-25 20:04:08,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=142304.0, ans=0.0 +2024-08-25 20:04:27,019 INFO [train.py:1114] (1/4) Epoch 11, batch 1800, loss[loss=0.2096, simple_loss=0.2846, pruned_loss=0.04834, ctc_loss=0.09483, over 19611.00 frames. ], tot_loss[loss=0.234, simple_loss=0.2909, pruned_loss=0.06439, ctc_loss=0.1209, over 3854080.64 frames. ], batch size: 55, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 20:04:27,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=142357.33333333334, ans=0.2 +2024-08-25 20:04:48,871 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.63 vs. limit=22.5 +2024-08-25 20:04:59,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=142410.66666666666, ans=0.1 +2024-08-25 20:05:03,645 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-08-25 20:05:18,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=142464.0, ans=0.2 +2024-08-25 20:06:15,201 INFO [train.py:1114] (1/4) Epoch 11, batch 1850, loss[loss=0.2437, simple_loss=0.3059, pruned_loss=0.06488, ctc_loss=0.1292, over 19581.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.2908, pruned_loss=0.0643, ctc_loss=0.1209, over 3856468.98 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 16.0 +2024-08-25 20:06:17,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=142624.0, ans=0.04949747468305833 +2024-08-25 20:06:18,513 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.477e+02 1.849e+02 2.256e+02 2.966e+02 5.642e+02, threshold=4.511e+02, percent-clipped=6.0 +2024-08-25 20:06:28,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=142677.33333333334, ans=0.125 +2024-08-25 20:06:31,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=142677.33333333334, ans=0.0 +2024-08-25 20:06:47,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=142677.33333333334, ans=0.125 +2024-08-25 20:06:59,383 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.82 vs. limit=15.0 +2024-08-25 20:07:23,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=142784.0, ans=0.0 +2024-08-25 20:07:37,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=142837.33333333334, ans=0.0 +2024-08-25 20:07:51,852 INFO [train.py:1114] (1/4) Epoch 11, batch 1900, loss[loss=0.2222, simple_loss=0.292, pruned_loss=0.05551, ctc_loss=0.1033, over 19649.00 frames. ], tot_loss[loss=0.234, simple_loss=0.2912, pruned_loss=0.06427, ctc_loss=0.1208, over 3861115.76 frames. ], batch size: 59, lr: 1.34e-02, grad_scale: 16.0 +2024-08-25 20:08:05,893 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.99 vs. limit=15.0 +2024-08-25 20:08:12,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=142944.0, ans=0.5 +2024-08-25 20:51:12,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143104.0, ans=0.1 +2024-08-25 20:55:30,008 INFO [train.py:1114] (1/4) Epoch 11, batch 1950, loss[loss=0.2146, simple_loss=0.2711, pruned_loss=0.05771, ctc_loss=0.1069, over 19586.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.2927, pruned_loss=0.06465, ctc_loss=0.1217, over 3870338.24 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 16.0 +2024-08-25 21:03:39,811 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.468e+02 1.850e+02 2.123e+02 2.695e+02 5.282e+02, threshold=4.246e+02, percent-clipped=2.0 +2024-08-25 21:05:40,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143157.33333333334, ans=0.1 +2024-08-25 21:09:35,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=143157.33333333334, ans=0.0 +2024-08-25 21:27:04,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=143264.0, ans=0.125 +2024-08-25 21:32:12,829 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.99 vs. limit=15.0 +2024-08-25 21:32:13,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=143317.33333333334, ans=0.0 +2024-08-25 21:46:38,196 INFO [train.py:1114] (1/4) Epoch 11, batch 2000, loss[loss=0.2072, simple_loss=0.2582, pruned_loss=0.05636, ctc_loss=0.1088, over 19632.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.2937, pruned_loss=0.06518, ctc_loss=0.1229, over 3855225.69 frames. ], batch size: 45, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 21:50:29,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=143424.0, ans=0.125 +2024-08-25 21:55:36,784 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 22:08:03,307 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.87 vs. limit=15.0 +2024-08-25 22:19:42,816 INFO [train.py:1114] (1/4) Epoch 11, batch 2050, loss[loss=0.2012, simple_loss=0.2613, pruned_loss=0.05097, ctc_loss=0.09813, over 19721.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.2925, pruned_loss=0.06476, ctc_loss=0.1219, over 3850917.53 frames. ], batch size: 47, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 22:20:13,485 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 1.838e+02 2.216e+02 2.724e+02 4.008e+02, threshold=4.432e+02, percent-clipped=0.0 +2024-08-25 22:20:42,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=143690.66666666666, ans=0.125 +2024-08-25 22:28:13,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=143850.66666666666, ans=0.125 +2024-08-25 22:32:18,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=143957.33333333334, ans=0.125 +2024-08-25 22:32:23,593 INFO [train.py:1114] (1/4) Epoch 11, batch 2100, loss[loss=0.2193, simple_loss=0.2869, pruned_loss=0.05539, ctc_loss=0.1022, over 19785.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.2915, pruned_loss=0.06448, ctc_loss=0.1213, over 3857044.90 frames. ], batch size: 54, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 22:34:54,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=144010.66666666666, ans=0.0 +2024-08-25 22:36:41,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=144064.0, ans=0.125 +2024-08-25 22:37:14,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=144117.33333333334, ans=0.125 +2024-08-25 22:38:06,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=144170.66666666666, ans=0.125 +2024-08-25 22:38:36,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=144170.66666666666, ans=0.2 +2024-08-25 22:39:07,553 INFO [train.py:1114] (1/4) Epoch 11, batch 2150, loss[loss=0.2224, simple_loss=0.2758, pruned_loss=0.06149, ctc_loss=0.1151, over 19581.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.2903, pruned_loss=0.06405, ctc_loss=0.1204, over 3867527.11 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 22:39:51,935 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.523e+02 1.804e+02 2.068e+02 2.942e+02 5.639e+02, threshold=4.136e+02, percent-clipped=4.0 +2024-08-25 22:40:48,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=144277.33333333334, ans=0.0 +2024-08-25 22:41:09,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=144277.33333333334, ans=0.025 +2024-08-25 22:41:30,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=144330.66666666666, ans=0.0 +2024-08-25 22:42:48,251 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.48 vs. limit=12.0 +2024-08-25 22:43:20,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=144384.0, ans=0.025 +2024-08-25 22:44:02,549 INFO [train.py:1114] (1/4) Epoch 11, batch 2200, loss[loss=0.2648, simple_loss=0.3198, pruned_loss=0.07659, ctc_loss=0.1417, over 19593.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.2903, pruned_loss=0.06431, ctc_loss=0.1209, over 3867221.50 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 22:44:05,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=144490.66666666666, ans=0.0 +2024-08-25 22:44:27,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=144490.66666666666, ans=0.0 +2024-08-25 22:45:41,702 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=7.853e-03 +2024-08-25 22:47:26,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=144650.66666666666, ans=0.0 +2024-08-25 22:48:53,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=144704.0, ans=0.125 +2024-08-25 22:49:00,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=144704.0, ans=0.1 +2024-08-25 22:49:02,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=144757.33333333334, ans=0.95 +2024-08-25 22:49:03,018 INFO [train.py:1114] (1/4) Epoch 11, batch 2250, loss[loss=0.2433, simple_loss=0.3067, pruned_loss=0.06566, ctc_loss=0.1217, over 19604.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.2906, pruned_loss=0.0644, ctc_loss=0.1211, over 3866737.60 frames. ], batch size: 55, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:49:09,610 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.411e+02 1.818e+02 2.110e+02 2.782e+02 6.628e+02, threshold=4.220e+02, percent-clipped=3.0 +2024-08-25 22:49:19,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=144757.33333333334, ans=0.0 +2024-08-25 22:49:55,007 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.00 vs. limit=15.0 +2024-08-25 22:50:09,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=144917.33333333334, ans=0.125 +2024-08-25 22:50:32,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144970.66666666666, ans=0.1 +2024-08-25 22:50:36,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=144970.66666666666, ans=0.125 +2024-08-25 22:50:46,947 INFO [train.py:1114] (1/4) Epoch 11, batch 2300, loss[loss=0.2255, simple_loss=0.2826, pruned_loss=0.06112, ctc_loss=0.1155, over 19489.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.2894, pruned_loss=0.0641, ctc_loss=0.1204, over 3861277.75 frames. ], batch size: 49, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:51:27,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=145077.33333333334, ans=0.07 +2024-08-25 22:52:02,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145130.66666666666, ans=0.1 +2024-08-25 22:52:38,596 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.36 vs. limit=15.0 +2024-08-25 22:52:55,272 INFO [train.py:1114] (1/4) Epoch 11, batch 2350, loss[loss=0.2606, simple_loss=0.3145, pruned_loss=0.07513, ctc_loss=0.1413, over 19640.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.2894, pruned_loss=0.06397, ctc_loss=0.12, over 3863502.55 frames. ], batch size: 63, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:53:01,237 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.317e+02 1.788e+02 2.141e+02 2.380e+02 3.835e+02, threshold=4.282e+02, percent-clipped=0.0 +2024-08-25 22:53:05,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=145290.66666666666, ans=0.125 +2024-08-25 22:53:05,468 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.86 vs. limit=15.0 +2024-08-25 22:53:27,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=145344.0, ans=0.0 +2024-08-25 22:53:40,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=145397.33333333334, ans=0.125 +2024-08-25 22:53:56,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=145450.66666666666, ans=0.0 +2024-08-25 22:54:01,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=145450.66666666666, ans=0.0 +2024-08-25 22:54:05,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=145450.66666666666, ans=0.2 +2024-08-25 22:54:16,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=145504.0, ans=0.125 +2024-08-25 22:54:26,001 INFO [train.py:1114] (1/4) Epoch 11, batch 2400, loss[loss=0.2354, simple_loss=0.2986, pruned_loss=0.06222, ctc_loss=0.1195, over 19286.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.292, pruned_loss=0.065, ctc_loss=0.1216, over 3858062.37 frames. ], batch size: 71, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:54:28,766 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.57 vs. limit=22.5 +2024-08-25 22:54:34,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=145557.33333333334, ans=0.2 +2024-08-25 22:54:40,289 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.27 vs. limit=6.0 +2024-08-25 22:54:55,115 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.33 vs. limit=22.5 +2024-08-25 22:54:56,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=145664.0, ans=0.025 +2024-08-25 22:54:59,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145664.0, ans=0.1 +2024-08-25 22:55:17,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145717.33333333334, ans=0.1 +2024-08-25 22:55:44,077 INFO [train.py:1114] (1/4) Epoch 11, batch 2450, loss[loss=0.3245, simple_loss=0.3395, pruned_loss=0.1126, ctc_loss=0.2106, over 13684.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.2967, pruned_loss=0.06861, ctc_loss=0.1288, over 3727836.25 frames. ], batch size: 141, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:56:00,762 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.910e+02 2.208e+02 2.594e+02 5.356e+02, threshold=4.415e+02, percent-clipped=1.0 +2024-08-25 22:56:01,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145824.0, ans=0.1 +2024-08-25 22:56:35,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=145877.33333333334, ans=0.125 +2024-08-25 22:57:15,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=145984.0, ans=0.125 +2024-08-25 22:57:16,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=145984.0, ans=0.125 +2024-08-25 22:58:44,108 INFO [train.py:1114] (1/4) Epoch 12, batch 0, loss[loss=0.2626, simple_loss=0.2967, pruned_loss=0.08263, ctc_loss=0.158, over 19403.00 frames. ], tot_loss[loss=0.2626, simple_loss=0.2967, pruned_loss=0.08263, ctc_loss=0.158, over 19403.00 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 22:58:44,109 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-25 22:59:47,573 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([0.0449, 2.7830, 3.2282, 2.5431], device='cuda:1') +2024-08-25 23:00:02,929 INFO [train.py:1146] (1/4) Epoch 12, validation: loss=0.1972, simple_loss=0.2841, pruned_loss=0.04086, ctc_loss=0.07109, over 944034.00 frames. +2024-08-25 23:00:02,930 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 14083MB +2024-08-25 23:00:39,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=146138.66666666666, ans=0.0 +2024-08-25 23:00:58,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=146245.33333333334, ans=0.125 +2024-08-25 23:01:08,426 INFO [train.py:1114] (1/4) Epoch 12, batch 50, loss[loss=0.1951, simple_loss=0.2596, pruned_loss=0.04782, ctc_loss=0.0871, over 19725.00 frames. ], tot_loss[loss=0.239, simple_loss=0.2941, pruned_loss=0.06656, ctc_loss=0.1273, over 845355.73 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:01:16,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=146298.66666666666, ans=0.09899494936611666 +2024-08-25 23:01:19,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=146298.66666666666, ans=0.0 +2024-08-25 23:01:20,907 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.82 vs. limit=15.0 +2024-08-25 23:01:27,720 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.425e+02 1.810e+02 2.073e+02 2.436e+02 4.057e+02, threshold=4.147e+02, percent-clipped=0.0 +2024-08-25 23:01:27,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=146352.0, ans=0.0 +2024-08-25 23:01:37,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=146405.33333333334, ans=0.125 +2024-08-25 23:01:38,982 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.58 vs. limit=22.5 +2024-08-25 23:01:45,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=146458.66666666666, ans=0.0 +2024-08-25 23:01:49,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=146458.66666666666, ans=0.1 +2024-08-25 23:01:53,837 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.72 vs. limit=6.0 +2024-08-25 23:01:56,886 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:02:20,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=146512.0, ans=0.2 +2024-08-25 23:02:22,998 INFO [train.py:1114] (1/4) Epoch 12, batch 100, loss[loss=0.2007, simple_loss=0.2679, pruned_loss=0.04797, ctc_loss=0.09366, over 19713.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.2952, pruned_loss=0.06632, ctc_loss=0.1261, over 1498997.21 frames. ], batch size: 51, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:02:31,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=146565.33333333334, ans=10.0 +2024-08-25 23:02:34,491 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.73 vs. limit=6.0 +2024-08-25 23:02:35,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=146565.33333333334, ans=0.125 +2024-08-25 23:02:40,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=146565.33333333334, ans=0.125 +2024-08-25 23:03:03,437 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=8.07 vs. limit=15.0 +2024-08-25 23:03:05,764 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.32 vs. limit=15.0 +2024-08-25 23:03:39,238 INFO [train.py:1114] (1/4) Epoch 12, batch 150, loss[loss=0.2033, simple_loss=0.2577, pruned_loss=0.05344, ctc_loss=0.1051, over 19728.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.2913, pruned_loss=0.06415, ctc_loss=0.1214, over 2027965.43 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:04:09,867 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.330e+02 1.659e+02 1.880e+02 2.314e+02 3.650e+02, threshold=3.760e+02, percent-clipped=0.0 +2024-08-25 23:04:15,863 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.93 vs. limit=12.0 +2024-08-25 23:04:19,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=146938.66666666666, ans=0.1 +2024-08-25 23:05:06,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=147098.66666666666, ans=0.0 +2024-08-25 23:05:07,036 INFO [train.py:1114] (1/4) Epoch 12, batch 200, loss[loss=0.2589, simple_loss=0.3152, pruned_loss=0.07364, ctc_loss=0.1383, over 18078.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.2895, pruned_loss=0.06304, ctc_loss=0.1193, over 2435354.71 frames. ], batch size: 85, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:05:59,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=147152.0, ans=0.125 +2024-08-25 23:06:14,182 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.15 vs. limit=12.0 +2024-08-25 23:06:18,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=147205.33333333334, ans=0.0 +2024-08-25 23:06:42,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=147258.66666666666, ans=0.025 +2024-08-25 23:06:54,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=147312.0, ans=0.125 +2024-08-25 23:07:01,821 INFO [train.py:1114] (1/4) Epoch 12, batch 250, loss[loss=0.2372, simple_loss=0.2966, pruned_loss=0.06511, ctc_loss=0.1186, over 19469.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.2886, pruned_loss=0.06259, ctc_loss=0.118, over 2757226.13 frames. ], batch size: 67, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:07:22,628 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.308e+02 1.825e+02 2.154e+02 2.499e+02 3.884e+02, threshold=4.307e+02, percent-clipped=2.0 +2024-08-25 23:07:25,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=147418.66666666666, ans=0.125 +2024-08-25 23:07:26,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147418.66666666666, ans=0.1 +2024-08-25 23:07:32,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=147472.0, ans=0.125 +2024-08-25 23:07:45,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=147525.33333333334, ans=0.0 +2024-08-25 23:07:52,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=147525.33333333334, ans=0.125 +2024-08-25 23:08:03,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=147578.66666666666, ans=0.2 +2024-08-25 23:08:07,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten.whitening_limit, batch_count=147578.66666666666, ans=15.0 +2024-08-25 23:08:07,423 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=7.50 vs. limit=15.0 +2024-08-25 23:08:10,577 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:08:13,950 INFO [train.py:1114] (1/4) Epoch 12, batch 300, loss[loss=0.2628, simple_loss=0.3183, pruned_loss=0.0747, ctc_loss=0.1447, over 19513.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2881, pruned_loss=0.0625, ctc_loss=0.1178, over 3000839.16 frames. ], batch size: 61, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:08:32,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=147685.33333333334, ans=0.125 +2024-08-25 23:08:39,836 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.40 vs. limit=10.0 +2024-08-25 23:08:53,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=147792.0, ans=0.05 +2024-08-25 23:08:58,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=147792.0, ans=0.125 +2024-08-25 23:09:11,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=147845.33333333334, ans=0.125 +2024-08-25 23:09:11,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=147845.33333333334, ans=0.0 +2024-08-25 23:09:17,473 INFO [train.py:1114] (1/4) Epoch 12, batch 350, loss[loss=0.2265, simple_loss=0.2813, pruned_loss=0.06278, ctc_loss=0.1154, over 19757.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.2886, pruned_loss=0.06257, ctc_loss=0.118, over 3190720.75 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:09:18,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=147898.66666666666, ans=0.125 +2024-08-25 23:09:36,448 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.749e+02 2.047e+02 2.740e+02 4.170e+02, threshold=4.094e+02, percent-clipped=0.0 +2024-08-25 23:09:45,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=148005.33333333334, ans=0.125 +2024-08-25 23:09:55,253 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.38 vs. limit=15.0 +2024-08-25 23:10:01,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=148058.66666666666, ans=0.125 +2024-08-25 23:10:10,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=148058.66666666666, ans=0.125 +2024-08-25 23:10:25,926 INFO [train.py:1114] (1/4) Epoch 12, batch 400, loss[loss=0.2158, simple_loss=0.2805, pruned_loss=0.05521, ctc_loss=0.1018, over 19497.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2884, pruned_loss=0.06242, ctc_loss=0.1176, over 3342696.82 frames. ], batch size: 54, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:11:02,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=148272.0, ans=0.2 +2024-08-25 23:11:18,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=148325.33333333334, ans=0.025 +2024-08-25 23:11:23,591 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.37 vs. limit=15.0 +2024-08-25 23:12:02,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=148378.66666666666, ans=0.125 +2024-08-25 23:12:04,218 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.27 vs. limit=12.0 +2024-08-25 23:12:05,894 INFO [train.py:1114] (1/4) Epoch 12, batch 450, loss[loss=0.2311, simple_loss=0.301, pruned_loss=0.05845, ctc_loss=0.1109, over 19609.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.2889, pruned_loss=0.06295, ctc_loss=0.1184, over 3451399.95 frames. ], batch size: 55, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:12:12,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=148432.0, ans=0.125 +2024-08-25 23:12:17,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=148432.0, ans=0.0 +2024-08-25 23:12:21,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=148485.33333333334, ans=0.2 +2024-08-25 23:12:28,364 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.359e+02 1.830e+02 2.201e+02 2.765e+02 4.484e+02, threshold=4.403e+02, percent-clipped=1.0 +2024-08-25 23:12:52,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=148592.0, ans=0.025 +2024-08-25 23:13:05,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=148645.33333333334, ans=0.0 +2024-08-25 23:13:21,163 INFO [train.py:1114] (1/4) Epoch 12, batch 500, loss[loss=0.2387, simple_loss=0.2971, pruned_loss=0.06522, ctc_loss=0.1247, over 19695.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.2877, pruned_loss=0.06242, ctc_loss=0.1172, over 3547732.00 frames. ], batch size: 63, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:13:54,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=148752.0, ans=15.0 +2024-08-25 23:14:28,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=148858.66666666666, ans=0.125 +2024-08-25 23:14:42,345 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.09 vs. limit=6.0 +2024-08-25 23:14:50,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=148912.0, ans=0.125 +2024-08-25 23:14:54,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=148912.0, ans=6.0 +2024-08-25 23:14:54,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=148912.0, ans=22.5 +2024-08-25 23:14:56,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=148912.0, ans=10.0 +2024-08-25 23:14:59,255 INFO [train.py:1114] (1/4) Epoch 12, batch 550, loss[loss=0.2819, simple_loss=0.3281, pruned_loss=0.08656, ctc_loss=0.1565, over 19247.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.2883, pruned_loss=0.06262, ctc_loss=0.1176, over 3608680.84 frames. ], batch size: 71, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:15:13,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=148965.33333333334, ans=0.125 +2024-08-25 23:15:16,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=148965.33333333334, ans=0.125 +2024-08-25 23:15:36,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=149018.66666666666, ans=0.0 +2024-08-25 23:15:42,253 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.325e+02 1.692e+02 2.049e+02 2.499e+02 4.022e+02, threshold=4.098e+02, percent-clipped=0.0 +2024-08-25 23:15:58,695 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.50 vs. limit=15.0 +2024-08-25 23:16:06,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=149072.0, ans=0.025 +2024-08-25 23:16:47,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=149178.66666666666, ans=0.0 +2024-08-25 23:16:54,724 INFO [train.py:1114] (1/4) Epoch 12, batch 600, loss[loss=0.2456, simple_loss=0.3091, pruned_loss=0.06765, ctc_loss=0.1169, over 19360.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.2883, pruned_loss=0.0623, ctc_loss=0.1169, over 3665956.24 frames. ], batch size: 67, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:17:02,059 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.99 vs. limit=6.0 +2024-08-25 23:17:11,535 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:17:14,373 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.59 vs. limit=10.0 +2024-08-25 23:18:47,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=149338.66666666666, ans=0.125 +2024-08-25 23:19:03,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149392.0, ans=0.1 +2024-08-25 23:19:07,058 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=5.91 vs. limit=15.0 +2024-08-25 23:19:21,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=149498.66666666666, ans=0.0 +2024-08-25 23:19:22,620 INFO [train.py:1114] (1/4) Epoch 12, batch 650, loss[loss=0.2555, simple_loss=0.3076, pruned_loss=0.07371, ctc_loss=0.1398, over 19754.00 frames. ], tot_loss[loss=0.229, simple_loss=0.2877, pruned_loss=0.06191, ctc_loss=0.1164, over 3716283.73 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:19:31,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=149498.66666666666, ans=0.125 +2024-08-25 23:19:35,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=149498.66666666666, ans=0.125 +2024-08-25 23:19:43,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=149552.0, ans=0.0 +2024-08-25 23:19:48,490 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.406e+02 1.911e+02 2.346e+02 2.911e+02 5.072e+02, threshold=4.691e+02, percent-clipped=6.0 +2024-08-25 23:20:04,964 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.93 vs. limit=10.0 +2024-08-25 23:20:08,990 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.38 vs. limit=15.0 +2024-08-25 23:20:37,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=149712.0, ans=0.125 +2024-08-25 23:20:47,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=149712.0, ans=0.02 +2024-08-25 23:20:48,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=149765.33333333334, ans=0.025 +2024-08-25 23:20:49,404 INFO [train.py:1114] (1/4) Epoch 12, batch 700, loss[loss=0.2117, simple_loss=0.2696, pruned_loss=0.05676, ctc_loss=0.1006, over 19713.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.288, pruned_loss=0.06203, ctc_loss=0.1165, over 3746477.32 frames. ], batch size: 51, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:21:33,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=149925.33333333334, ans=0.125 +2024-08-25 23:21:41,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=149978.66666666666, ans=0.0 +2024-08-25 23:21:51,364 INFO [train.py:1114] (1/4) Epoch 12, batch 750, loss[loss=0.2343, simple_loss=0.2988, pruned_loss=0.06137, ctc_loss=0.1175, over 19512.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.2877, pruned_loss=0.06189, ctc_loss=0.1161, over 3773749.03 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:22:11,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=150032.0, ans=0.0 +2024-08-25 23:22:14,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=150085.33333333334, ans=0.125 +2024-08-25 23:22:17,304 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.77 vs. limit=22.5 +2024-08-25 23:22:20,749 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.394e+02 1.992e+02 2.563e+02 3.460e+02 5.252e+02, threshold=5.125e+02, percent-clipped=3.0 +2024-08-25 23:22:35,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=150138.66666666666, ans=0.125 +2024-08-25 23:23:10,613 INFO [train.py:1114] (1/4) Epoch 12, batch 800, loss[loss=0.1824, simple_loss=0.2488, pruned_loss=0.04206, ctc_loss=0.07951, over 19417.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.2872, pruned_loss=0.06153, ctc_loss=0.1156, over 3795520.06 frames. ], batch size: 48, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:23:16,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=150298.66666666666, ans=0.125 +2024-08-25 23:23:19,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=150298.66666666666, ans=0.125 +2024-08-25 23:23:25,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=150352.0, ans=0.125 +2024-08-25 23:23:36,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=150405.33333333334, ans=0.035 +2024-08-25 23:24:01,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=150512.0, ans=0.0 +2024-08-25 23:24:07,596 INFO [train.py:1114] (1/4) Epoch 12, batch 850, loss[loss=0.2241, simple_loss=0.2918, pruned_loss=0.05665, ctc_loss=0.1077, over 19644.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.2874, pruned_loss=0.06165, ctc_loss=0.1159, over 3815292.38 frames. ], batch size: 59, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:24:15,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=150565.33333333334, ans=0.0 +2024-08-25 23:24:30,650 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.376e+02 1.732e+02 2.149e+02 2.756e+02 4.869e+02, threshold=4.297e+02, percent-clipped=0.0 +2024-08-25 23:24:39,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=150672.0, ans=0.025 +2024-08-25 23:24:54,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=150672.0, ans=0.05 +2024-08-25 23:25:14,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=150778.66666666666, ans=0.125 +2024-08-25 23:25:16,427 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.75 vs. limit=15.0 +2024-08-25 23:25:39,230 INFO [train.py:1114] (1/4) Epoch 12, batch 900, loss[loss=0.2267, simple_loss=0.2718, pruned_loss=0.06595, ctc_loss=0.1244, over 19398.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.2878, pruned_loss=0.06194, ctc_loss=0.1164, over 3818318.72 frames. ], batch size: 48, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:25:41,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=150832.0, ans=0.0 +2024-08-25 23:26:01,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=150832.0, ans=0.125 +2024-08-25 23:26:21,510 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.84 vs. limit=15.0 +2024-08-25 23:26:23,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=150938.66666666666, ans=0.2 +2024-08-25 23:26:28,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=150938.66666666666, ans=0.125 +2024-08-25 23:27:00,583 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.62 vs. limit=15.0 +2024-08-25 23:27:21,988 INFO [train.py:1114] (1/4) Epoch 12, batch 950, loss[loss=0.2084, simple_loss=0.2701, pruned_loss=0.05306, ctc_loss=0.1014, over 19481.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.2884, pruned_loss=0.06228, ctc_loss=0.1171, over 3818384.29 frames. ], batch size: 49, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:27:47,798 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.461e+02 1.727e+02 2.047e+02 2.468e+02 3.873e+02, threshold=4.093e+02, percent-clipped=0.0 +2024-08-25 23:28:00,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=151152.0, ans=0.125 +2024-08-25 23:28:26,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=151205.33333333334, ans=0.125 +2024-08-25 23:28:37,953 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.49 vs. limit=6.0 +2024-08-25 23:28:55,953 INFO [train.py:1114] (1/4) Epoch 12, batch 1000, loss[loss=0.2233, simple_loss=0.2808, pruned_loss=0.06077, ctc_loss=0.1107, over 19855.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.289, pruned_loss=0.06265, ctc_loss=0.1178, over 3815332.89 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:29:11,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=151365.33333333334, ans=0.0 +2024-08-25 23:30:33,334 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.46 vs. limit=6.0 +2024-08-25 23:30:41,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=151525.33333333334, ans=0.125 +2024-08-25 23:30:53,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=151578.66666666666, ans=0.0 +2024-08-25 23:30:55,449 INFO [train.py:1114] (1/4) Epoch 12, batch 1050, loss[loss=0.2228, simple_loss=0.2899, pruned_loss=0.05583, ctc_loss=0.1102, over 19847.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.2883, pruned_loss=0.06221, ctc_loss=0.1173, over 3821509.76 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:31:14,265 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 1.748e+02 2.222e+02 2.883e+02 4.562e+02, threshold=4.445e+02, percent-clipped=3.0 +2024-08-25 23:31:30,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=151738.66666666666, ans=0.0 +2024-08-25 23:31:49,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=151845.33333333334, ans=0.125 +2024-08-25 23:32:14,292 INFO [train.py:1114] (1/4) Epoch 12, batch 1100, loss[loss=0.2354, simple_loss=0.2906, pruned_loss=0.066, ctc_loss=0.1208, over 19578.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.288, pruned_loss=0.06218, ctc_loss=0.1173, over 3828704.68 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:32:16,882 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.07 vs. limit=15.0 +2024-08-25 23:32:18,250 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.43 vs. limit=10.0 +2024-08-25 23:32:26,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=151898.66666666666, ans=0.0 +2024-08-25 23:32:31,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151952.0, ans=0.1 +2024-08-25 23:32:37,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=151952.0, ans=0.125 +2024-08-25 23:32:49,991 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.94 vs. limit=15.0 +2024-08-25 23:33:13,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=152112.0, ans=0.0 +2024-08-25 23:33:20,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=152112.0, ans=0.125 +2024-08-25 23:33:32,434 INFO [train.py:1114] (1/4) Epoch 12, batch 1150, loss[loss=0.202, simple_loss=0.263, pruned_loss=0.05103, ctc_loss=0.09728, over 19595.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.2877, pruned_loss=0.06206, ctc_loss=0.117, over 3828532.61 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:33:46,291 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.19 vs. limit=15.0 +2024-08-25 23:33:49,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=152165.33333333334, ans=0.0 +2024-08-25 23:33:52,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=152218.66666666666, ans=0.125 +2024-08-25 23:34:07,234 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.375e+02 1.763e+02 2.002e+02 2.335e+02 5.298e+02, threshold=4.005e+02, percent-clipped=1.0 +2024-08-25 23:34:12,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=152218.66666666666, ans=0.1 +2024-08-25 23:34:18,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=152272.0, ans=0.025 +2024-08-25 23:34:26,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=152272.0, ans=0.125 +2024-08-25 23:34:30,145 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.28 vs. limit=15.0 +2024-08-25 23:34:39,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=152325.33333333334, ans=0.2 +2024-08-25 23:34:59,041 INFO [train.py:1114] (1/4) Epoch 12, batch 1200, loss[loss=0.2439, simple_loss=0.2991, pruned_loss=0.06838, ctc_loss=0.1297, over 19828.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.289, pruned_loss=0.06288, ctc_loss=0.1185, over 3824097.13 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:35:01,478 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:35:15,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=152485.33333333334, ans=0.1 +2024-08-25 23:35:52,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=152592.0, ans=0.0 +2024-08-25 23:36:09,952 INFO [train.py:1114] (1/4) Epoch 12, batch 1250, loss[loss=0.2464, simple_loss=0.301, pruned_loss=0.07083, ctc_loss=0.1256, over 19525.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.2895, pruned_loss=0.06274, ctc_loss=0.118, over 3842652.98 frames. ], batch size: 61, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:36:29,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=152752.0, ans=0.0 +2024-08-25 23:36:34,022 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.907e+02 2.265e+02 2.785e+02 4.753e+02, threshold=4.530e+02, percent-clipped=2.0 +2024-08-25 23:36:59,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152858.66666666666, ans=0.1 +2024-08-25 23:37:18,926 INFO [train.py:1114] (1/4) Epoch 12, batch 1300, loss[loss=0.2439, simple_loss=0.3011, pruned_loss=0.06828, ctc_loss=0.1256, over 18795.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.2879, pruned_loss=0.06178, ctc_loss=0.116, over 3846441.29 frames. ], batch size: 76, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:37:22,925 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.56 vs. limit=6.0 +2024-08-25 23:37:23,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=152965.33333333334, ans=0.125 +2024-08-25 23:37:42,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=153018.66666666666, ans=0.125 +2024-08-25 23:38:07,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=153125.33333333334, ans=0.0 +2024-08-25 23:38:28,991 INFO [train.py:1114] (1/4) Epoch 12, batch 1350, loss[loss=0.2165, simple_loss=0.2782, pruned_loss=0.05683, ctc_loss=0.103, over 19780.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.2873, pruned_loss=0.0615, ctc_loss=0.1155, over 3856928.48 frames. ], batch size: 54, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:38:41,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=153285.33333333334, ans=0.125 +2024-08-25 23:38:46,283 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.300e+02 1.707e+02 2.039e+02 2.408e+02 4.402e+02, threshold=4.078e+02, percent-clipped=0.0 +2024-08-25 23:38:49,149 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.79 vs. limit=15.0 +2024-08-25 23:39:05,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=153392.0, ans=0.025 +2024-08-25 23:39:19,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=153392.0, ans=0.2 +2024-08-25 23:39:21,495 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.04 vs. limit=15.0 +2024-08-25 23:39:38,366 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=20.90 vs. limit=22.5 +2024-08-25 23:39:41,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=153445.33333333334, ans=0.04949747468305833 +2024-08-25 23:39:43,074 INFO [train.py:1114] (1/4) Epoch 12, batch 1400, loss[loss=0.2275, simple_loss=0.272, pruned_loss=0.06678, ctc_loss=0.1237, over 19678.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.2873, pruned_loss=0.06148, ctc_loss=0.1154, over 3864355.01 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:39:50,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=153498.66666666666, ans=0.125 +2024-08-25 23:39:53,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=153498.66666666666, ans=0.125 +2024-08-25 23:40:17,868 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.69 vs. limit=22.5 +2024-08-25 23:40:19,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=153605.33333333334, ans=0.125 +2024-08-25 23:40:46,725 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.51 vs. limit=6.0 +2024-08-25 23:41:07,351 INFO [train.py:1114] (1/4) Epoch 12, batch 1450, loss[loss=0.2338, simple_loss=0.2937, pruned_loss=0.06282, ctc_loss=0.1205, over 19673.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.2882, pruned_loss=0.06197, ctc_loss=0.1164, over 3862587.11 frames. ], batch size: 63, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:41:27,996 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.446e+02 1.773e+02 2.135e+02 2.639e+02 4.435e+02, threshold=4.270e+02, percent-clipped=2.0 +2024-08-25 23:41:29,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=153818.66666666666, ans=0.125 +2024-08-25 23:41:54,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=153872.0, ans=0.0 +2024-08-25 23:41:56,654 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=7.74 vs. limit=15.0 +2024-08-25 23:42:20,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=153978.66666666666, ans=0.125 +2024-08-25 23:42:22,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=153978.66666666666, ans=0.125 +2024-08-25 23:42:43,046 INFO [train.py:1114] (1/4) Epoch 12, batch 1500, loss[loss=0.2262, simple_loss=0.2856, pruned_loss=0.06118, ctc_loss=0.111, over 19598.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.2885, pruned_loss=0.06204, ctc_loss=0.1167, over 3861806.77 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:43:07,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=154085.33333333334, ans=0.2 +2024-08-25 23:43:15,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=154138.66666666666, ans=0.0 +2024-08-25 23:44:09,910 INFO [train.py:1114] (1/4) Epoch 12, batch 1550, loss[loss=0.2354, simple_loss=0.3011, pruned_loss=0.0624, ctc_loss=0.1123, over 19594.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2886, pruned_loss=0.06233, ctc_loss=0.1171, over 3845995.35 frames. ], batch size: 60, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:44:13,032 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.36 vs. limit=15.0 +2024-08-25 23:44:20,937 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.05 vs. limit=6.0 +2024-08-25 23:44:22,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=154352.0, ans=0.2 +2024-08-25 23:44:43,860 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.525e+02 1.860e+02 2.194e+02 2.828e+02 4.590e+02, threshold=4.388e+02, percent-clipped=1.0 +2024-08-25 23:45:06,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=154405.33333333334, ans=0.125 +2024-08-25 23:45:09,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=154405.33333333334, ans=0.1 +2024-08-25 23:46:10,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=154405.33333333334, ans=0.125 +2024-08-25 23:46:12,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=154405.33333333334, ans=0.0 +2024-08-25 23:46:30,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=154512.0, ans=0.125 +2024-08-25 23:46:37,536 INFO [train.py:1114] (1/4) Epoch 12, batch 1600, loss[loss=0.2378, simple_loss=0.2987, pruned_loss=0.06416, ctc_loss=0.1211, over 19849.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.288, pruned_loss=0.06226, ctc_loss=0.1173, over 3835436.76 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:46:55,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=154565.33333333334, ans=0.2 +2024-08-25 23:47:19,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=154618.66666666666, ans=0.125 +2024-08-25 23:47:28,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=154618.66666666666, ans=0.0 +2024-08-25 23:47:32,593 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=1.219e-01 +2024-08-25 23:47:46,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=154672.0, ans=0.125 +2024-08-25 23:48:12,940 INFO [train.py:1114] (1/4) Epoch 12, batch 1650, loss[loss=0.2361, simple_loss=0.2967, pruned_loss=0.06416, ctc_loss=0.118, over 19656.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.2876, pruned_loss=0.06203, ctc_loss=0.1169, over 3831667.61 frames. ], batch size: 59, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:48:28,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=154885.33333333334, ans=0.0 +2024-08-25 23:48:32,982 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.442e+02 1.751e+02 2.060e+02 2.481e+02 4.497e+02, threshold=4.120e+02, percent-clipped=1.0 +2024-08-25 23:48:44,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=154938.66666666666, ans=0.125 +2024-08-25 23:48:44,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=154938.66666666666, ans=0.125 +2024-08-25 23:48:55,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=154992.0, ans=0.09899494936611666 +2024-08-25 23:49:12,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=155045.33333333334, ans=0.125 +2024-08-25 23:49:19,218 INFO [train.py:1114] (1/4) Epoch 12, batch 1700, loss[loss=0.2198, simple_loss=0.2694, pruned_loss=0.06251, ctc_loss=0.1126, over 19671.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.2876, pruned_loss=0.06186, ctc_loss=0.1165, over 3846379.08 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:49:26,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=155098.66666666666, ans=0.0 +2024-08-25 23:49:41,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=155205.33333333334, ans=0.125 +2024-08-25 23:49:58,643 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.27 vs. limit=15.0 +2024-08-25 23:50:19,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=155258.66666666666, ans=0.125 +2024-08-25 23:50:36,449 INFO [train.py:1114] (1/4) Epoch 12, batch 1750, loss[loss=0.2254, simple_loss=0.2735, pruned_loss=0.06374, ctc_loss=0.1245, over 19637.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.2872, pruned_loss=0.06141, ctc_loss=0.1156, over 3851033.26 frames. ], batch size: 45, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:50:57,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=155365.33333333334, ans=0.2 +2024-08-25 23:51:01,000 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:51:07,089 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.08 vs. limit=15.0 +2024-08-25 23:51:12,445 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.321e+02 1.691e+02 1.944e+02 2.310e+02 4.068e+02, threshold=3.888e+02, percent-clipped=0.0 +2024-08-25 23:51:17,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=155418.66666666666, ans=0.0 +2024-08-25 23:51:19,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=155472.0, ans=0.125 +2024-08-25 23:51:55,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=155578.66666666666, ans=0.125 +2024-08-25 23:52:01,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=155578.66666666666, ans=0.0 +2024-08-25 23:52:03,816 INFO [train.py:1114] (1/4) Epoch 12, batch 1800, loss[loss=0.2304, simple_loss=0.2962, pruned_loss=0.05912, ctc_loss=0.1162, over 19599.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.2877, pruned_loss=0.06167, ctc_loss=0.1161, over 3853096.14 frames. ], batch size: 55, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:52:11,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155632.0, ans=0.1 +2024-08-25 23:52:29,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=155685.33333333334, ans=0.0 +2024-08-25 23:52:30,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155685.33333333334, ans=0.1 +2024-08-25 23:53:00,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=155738.66666666666, ans=0.125 +2024-08-25 23:53:21,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=155792.0, ans=0.025 +2024-08-25 23:53:25,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=155792.0, ans=0.0 +2024-08-25 23:53:26,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=155792.0, ans=10.0 +2024-08-25 23:53:33,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=155845.33333333334, ans=0.0 +2024-08-25 23:54:05,046 INFO [train.py:1114] (1/4) Epoch 12, batch 1850, loss[loss=0.2767, simple_loss=0.3313, pruned_loss=0.0809, ctc_loss=0.1507, over 19597.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.2876, pruned_loss=0.06177, ctc_loss=0.1161, over 3855362.83 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:54:28,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=155898.66666666666, ans=0.125 +2024-08-25 23:54:33,711 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.33 vs. limit=6.0 +2024-08-25 23:54:44,471 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.53 vs. limit=15.0 +2024-08-25 23:54:44,938 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.455e+02 1.785e+02 2.050e+02 2.712e+02 4.249e+02, threshold=4.100e+02, percent-clipped=1.0 +2024-08-25 23:55:25,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=156058.66666666666, ans=0.2 +2024-08-25 23:55:26,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=156058.66666666666, ans=0.025 +2024-08-25 23:55:36,010 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.68 vs. limit=15.0 +2024-08-25 23:55:42,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=156165.33333333334, ans=0.125 +2024-08-25 23:56:02,274 INFO [train.py:1114] (1/4) Epoch 12, batch 1900, loss[loss=0.2147, simple_loss=0.2903, pruned_loss=0.04982, ctc_loss=0.09899, over 19655.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.2881, pruned_loss=0.0617, ctc_loss=0.1159, over 3861072.89 frames. ], batch size: 59, lr: 1.23e-02, grad_scale: 32.0 +2024-08-25 23:56:40,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=156218.66666666666, ans=0.125 +2024-08-25 23:57:14,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=156325.33333333334, ans=0.2 +2024-08-25 23:57:29,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=156325.33333333334, ans=0.125 +2024-08-25 23:57:34,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=156378.66666666666, ans=0.125 +2024-08-25 23:58:01,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=156378.66666666666, ans=0.025 +2024-08-25 23:58:09,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=156378.66666666666, ans=0.125 +2024-08-25 23:58:28,732 INFO [train.py:1114] (1/4) Epoch 12, batch 1950, loss[loss=0.2085, simple_loss=0.2685, pruned_loss=0.05487, ctc_loss=0.09672, over 19593.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.289, pruned_loss=0.0619, ctc_loss=0.1161, over 3869858.39 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 32.0 +2024-08-25 23:58:33,481 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.21 vs. limit=15.0 +2024-08-25 23:58:54,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=156432.0, ans=0.2 +2024-08-25 23:59:03,813 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.459e+02 1.700e+02 2.031e+02 2.417e+02 3.778e+02, threshold=4.063e+02, percent-clipped=0.0 +2024-08-25 23:59:12,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=156538.66666666666, ans=0.125 +2024-08-25 23:59:23,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=156538.66666666666, ans=0.125 +2024-08-25 23:59:33,258 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.71 vs. limit=12.0 +2024-08-25 23:59:35,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=156592.0, ans=0.0 +2024-08-25 23:59:46,476 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.44 vs. limit=15.0 +2024-08-25 23:59:51,578 INFO [train.py:1114] (1/4) Epoch 12, batch 2000, loss[loss=0.2147, simple_loss=0.2646, pruned_loss=0.05986, ctc_loss=0.1127, over 19656.00 frames. ], tot_loss[loss=0.231, simple_loss=0.2899, pruned_loss=0.06258, ctc_loss=0.1175, over 3853730.37 frames. ], batch size: 45, lr: 1.23e-02, grad_scale: 32.0 +2024-08-26 00:00:06,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=156752.0, ans=0.125 +2024-08-26 00:00:40,461 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 00:00:44,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=156858.66666666666, ans=0.125 +2024-08-26 00:01:27,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=156965.33333333334, ans=0.125 +2024-08-26 00:01:28,938 INFO [train.py:1114] (1/4) Epoch 12, batch 2050, loss[loss=0.2269, simple_loss=0.2749, pruned_loss=0.06446, ctc_loss=0.125, over 19725.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.2885, pruned_loss=0.06249, ctc_loss=0.1174, over 3850393.15 frames. ], batch size: 47, lr: 1.23e-02, grad_scale: 32.0 +2024-08-26 00:01:31,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=156965.33333333334, ans=0.125 +2024-08-26 00:01:39,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=157018.66666666666, ans=0.2 +2024-08-26 00:01:43,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=157018.66666666666, ans=0.0 +2024-08-26 00:01:46,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=157018.66666666666, ans=0.0 +2024-08-26 00:01:46,859 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.778e+02 1.977e+02 2.412e+02 4.440e+02, threshold=3.953e+02, percent-clipped=1.0 +2024-08-26 00:01:53,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=157072.0, ans=0.0 +2024-08-26 00:02:44,464 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.39 vs. limit=15.0 +2024-08-26 00:02:45,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=157178.66666666666, ans=0.0 +2024-08-26 00:02:45,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=157178.66666666666, ans=0.0 +2024-08-26 00:03:00,087 INFO [train.py:1114] (1/4) Epoch 12, batch 2100, loss[loss=0.2091, simple_loss=0.2776, pruned_loss=0.05139, ctc_loss=0.09445, over 19764.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.288, pruned_loss=0.06198, ctc_loss=0.1166, over 3858278.38 frames. ], batch size: 54, lr: 1.23e-02, grad_scale: 32.0 +2024-08-26 00:18:53,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=157285.33333333334, ans=0.0 +2024-08-26 00:19:56,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=157285.33333333334, ans=0.2 +2024-08-26 00:23:40,367 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.60 vs. limit=15.0 +2024-08-26 00:30:17,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=157338.66666666666, ans=0.1 +2024-08-26 00:40:42,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=157392.0, ans=0.125 +2024-08-26 00:56:07,935 INFO [train.py:1114] (1/4) Epoch 12, batch 2150, loss[loss=0.2283, simple_loss=0.2879, pruned_loss=0.06042, ctc_loss=0.1195, over 19563.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.2876, pruned_loss=0.06204, ctc_loss=0.1166, over 3869358.89 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 32.0 +2024-08-26 01:01:41,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=157498.66666666666, ans=0.025 +2024-08-26 01:03:05,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=157498.66666666666, ans=0.2 +2024-08-26 01:08:26,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=157552.0, ans=0.0 +2024-08-26 01:09:53,318 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.502e+02 1.788e+02 2.174e+02 2.705e+02 6.148e+02, threshold=4.348e+02, percent-clipped=11.0 +2024-08-26 01:13:04,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=157552.0, ans=0.0 +2024-08-26 01:16:37,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=157605.33333333334, ans=0.2 +2024-08-26 01:16:38,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=157605.33333333334, ans=0.0 +2024-08-26 01:18:15,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=157605.33333333334, ans=0.2 +2024-08-26 01:21:15,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=157605.33333333334, ans=0.0 +2024-08-26 01:21:15,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=157605.33333333334, ans=0.0 +2024-08-26 01:34:09,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=157712.0, ans=0.0 +2024-08-26 01:34:51,468 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.11 vs. limit=15.0 +2024-08-26 01:37:35,680 INFO [train.py:1114] (1/4) Epoch 12, batch 2200, loss[loss=0.2624, simple_loss=0.3178, pruned_loss=0.0753, ctc_loss=0.141, over 19595.00 frames. ], tot_loss[loss=0.229, simple_loss=0.2875, pruned_loss=0.062, ctc_loss=0.1166, over 3867203.55 frames. ], batch size: 57, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 01:44:47,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=157818.66666666666, ans=0.125 +2024-08-26 01:49:48,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=157872.0, ans=0.125 +2024-08-26 01:49:48,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=157872.0, ans=0.0 +2024-08-26 01:49:49,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=157925.33333333334, ans=0.0 +2024-08-26 01:57:30,326 INFO [train.py:1114] (1/4) Epoch 12, batch 2250, loss[loss=0.2416, simple_loss=0.2989, pruned_loss=0.06814, ctc_loss=0.12, over 19608.00 frames. ], tot_loss[loss=0.229, simple_loss=0.2876, pruned_loss=0.06196, ctc_loss=0.1163, over 3866797.67 frames. ], batch size: 55, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 02:04:09,786 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.77 vs. limit=22.5 +2024-08-26 02:04:28,486 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 1.839e+02 2.199e+02 2.577e+02 6.358e+02, threshold=4.399e+02, percent-clipped=1.0 +2024-08-26 02:10:33,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=158192.0, ans=0.125 +2024-08-26 02:13:21,298 INFO [train.py:1114] (1/4) Epoch 12, batch 2300, loss[loss=0.19, simple_loss=0.2611, pruned_loss=0.0428, ctc_loss=0.08317, over 19502.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.2869, pruned_loss=0.06205, ctc_loss=0.1165, over 3860138.16 frames. ], batch size: 49, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 02:15:17,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=158352.0, ans=0.125 +2024-08-26 02:16:57,515 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.92 vs. limit=15.0 +2024-08-26 02:19:29,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=158458.66666666666, ans=0.0 +2024-08-26 02:19:30,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=158458.66666666666, ans=0.125 +2024-08-26 02:20:36,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=158458.66666666666, ans=0.125 +2024-08-26 02:21:54,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=158512.0, ans=0.025 +2024-08-26 02:22:39,618 INFO [train.py:1114] (1/4) Epoch 12, batch 2350, loss[loss=0.2511, simple_loss=0.3073, pruned_loss=0.07012, ctc_loss=0.1367, over 19669.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.2872, pruned_loss=0.06258, ctc_loss=0.1177, over 3863839.87 frames. ], batch size: 63, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 02:22:51,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=158565.33333333334, ans=0.125 +2024-08-26 02:23:10,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158565.33333333334, ans=0.1 +2024-08-26 02:25:18,438 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.432e+02 1.991e+02 2.536e+02 3.183e+02 5.552e+02, threshold=5.072e+02, percent-clipped=5.0 +2024-08-26 02:26:00,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=158672.0, ans=10.0 +2024-08-26 02:26:35,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=158672.0, ans=0.0 +2024-08-26 02:27:08,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158725.33333333334, ans=0.1 +2024-08-26 02:30:58,348 INFO [train.py:1114] (1/4) Epoch 12, batch 2400, loss[loss=0.2902, simple_loss=0.3313, pruned_loss=0.09067, ctc_loss=0.1691, over 19313.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.2893, pruned_loss=0.06322, ctc_loss=0.1189, over 3858473.22 frames. ], batch size: 71, lr: 1.22e-02, grad_scale: 32.0 +2024-08-26 02:31:28,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158885.33333333334, ans=0.1 +2024-08-26 02:31:51,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=158885.33333333334, ans=0.2 +2024-08-26 02:35:10,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=158938.66666666666, ans=0.1 +2024-08-26 02:36:09,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=158938.66666666666, ans=0.0 +2024-08-26 02:36:18,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158992.0, ans=0.1 +2024-08-26 02:36:52,157 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 02:37:47,280 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 02:38:22,388 INFO [train.py:1114] (1/4) Epoch 12, batch 2450, loss[loss=0.3557, simple_loss=0.3573, pruned_loss=0.1296, ctc_loss=0.237, over 13343.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.2931, pruned_loss=0.066, ctc_loss=0.1244, over 3731368.21 frames. ], batch size: 140, lr: 1.22e-02, grad_scale: 32.0 +2024-08-26 02:39:03,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=159098.66666666666, ans=0.0 +2024-08-26 02:39:07,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=159098.66666666666, ans=0.125 +2024-08-26 02:39:10,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=159152.0, ans=0.07 +2024-08-26 02:39:42,309 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.506e+02 1.859e+02 2.162e+02 2.447e+02 4.124e+02, threshold=4.324e+02, percent-clipped=0.0 +2024-08-26 02:43:45,617 INFO [train.py:1114] (1/4) Epoch 13, batch 0, loss[loss=0.1992, simple_loss=0.2634, pruned_loss=0.04946, ctc_loss=0.09021, over 19821.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2634, pruned_loss=0.04946, ctc_loss=0.09021, over 19821.00 frames. ], batch size: 49, lr: 1.18e-02, grad_scale: 32.0 +2024-08-26 02:43:45,618 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-26 02:45:27,904 INFO [train.py:1146] (1/4) Epoch 13, validation: loss=0.1972, simple_loss=0.2835, pruned_loss=0.04113, ctc_loss=0.07151, over 944034.00 frames. +2024-08-26 02:45:27,905 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 14083MB +2024-08-26 02:45:29,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=159306.66666666666, ans=0.025 +2024-08-26 02:45:44,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=159306.66666666666, ans=0.125 +2024-08-26 02:45:57,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=159360.0, ans=0.125 +2024-08-26 02:45:58,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159360.0, ans=0.1 +2024-08-26 02:46:14,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=159413.33333333334, ans=0.04949747468305833 +2024-08-26 02:46:19,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=159413.33333333334, ans=0.125 +2024-08-26 02:46:24,854 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=5.236e-03 +2024-08-26 02:46:30,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=159413.33333333334, ans=0.125 +2024-08-26 02:46:53,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=159466.66666666666, ans=0.2 +2024-08-26 02:46:56,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=159466.66666666666, ans=0.0 +2024-08-26 02:48:02,402 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.70 vs. limit=6.0 +2024-08-26 02:48:06,368 INFO [train.py:1114] (1/4) Epoch 13, batch 50, loss[loss=0.181, simple_loss=0.244, pruned_loss=0.04319, ctc_loss=0.07873, over 19716.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.2882, pruned_loss=0.06144, ctc_loss=0.1163, over 844748.83 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:48:07,878 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.42 vs. limit=15.0 +2024-08-26 02:48:20,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=159626.66666666666, ans=0.035 +2024-08-26 02:48:34,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159680.0, ans=0.1 +2024-08-26 02:48:34,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=159680.0, ans=0.09899494936611666 +2024-08-26 02:48:55,465 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 1.822e+02 2.122e+02 2.766e+02 5.339e+02, threshold=4.244e+02, percent-clipped=3.0 +2024-08-26 02:49:23,059 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.66 vs. limit=15.0 +2024-08-26 02:49:26,905 INFO [train.py:1114] (1/4) Epoch 13, batch 100, loss[loss=0.2239, simple_loss=0.2807, pruned_loss=0.0604, ctc_loss=0.1155, over 19708.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.2894, pruned_loss=0.06134, ctc_loss=0.1157, over 1499477.02 frames. ], batch size: 51, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:49:29,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=159840.0, ans=0.0 +2024-08-26 02:49:32,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=159840.0, ans=0.07 +2024-08-26 02:49:49,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=159893.33333333334, ans=0.0 +2024-08-26 02:49:58,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=159893.33333333334, ans=0.125 +2024-08-26 02:50:41,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=160000.0, ans=0.125 +2024-08-26 02:50:45,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=160000.0, ans=0.125 +2024-08-26 02:51:17,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=160053.33333333334, ans=0.125 +2024-08-26 02:51:18,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=160053.33333333334, ans=0.0 +2024-08-26 02:51:27,234 INFO [train.py:1114] (1/4) Epoch 13, batch 150, loss[loss=0.1995, simple_loss=0.2576, pruned_loss=0.0522, ctc_loss=0.09226, over 19698.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.2864, pruned_loss=0.06004, ctc_loss=0.1129, over 2027175.31 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:51:42,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=160106.66666666666, ans=0.035 +2024-08-26 02:52:08,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160160.0, ans=0.1 +2024-08-26 02:52:26,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=160213.33333333334, ans=0.125 +2024-08-26 02:52:40,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=160213.33333333334, ans=0.125 +2024-08-26 02:52:42,811 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.80 vs. limit=6.0 +2024-08-26 02:52:48,527 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.368e+02 1.693e+02 1.889e+02 2.276e+02 3.515e+02, threshold=3.778e+02, percent-clipped=0.0 +2024-08-26 02:53:07,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160266.66666666666, ans=0.1 +2024-08-26 02:53:08,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160266.66666666666, ans=0.1 +2024-08-26 02:53:36,258 INFO [train.py:1114] (1/4) Epoch 13, batch 200, loss[loss=0.2553, simple_loss=0.3076, pruned_loss=0.07258, ctc_loss=0.1446, over 18394.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2852, pruned_loss=0.05977, ctc_loss=0.1126, over 2434816.66 frames. ], batch size: 85, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:54:07,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=160426.66666666666, ans=0.025 +2024-08-26 02:54:25,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=160480.0, ans=0.1 +2024-08-26 02:54:25,741 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.44 vs. limit=22.5 +2024-08-26 02:54:28,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=160480.0, ans=0.125 +2024-08-26 02:55:03,367 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.17 vs. limit=15.0 +2024-08-26 02:55:15,701 INFO [train.py:1114] (1/4) Epoch 13, batch 250, loss[loss=0.2365, simple_loss=0.2908, pruned_loss=0.06634, ctc_loss=0.1238, over 19336.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2852, pruned_loss=0.05979, ctc_loss=0.1125, over 2754771.31 frames. ], batch size: 67, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:55:18,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=160640.0, ans=0.0 +2024-08-26 02:55:22,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=160640.0, ans=0.5 +2024-08-26 02:55:23,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=160640.0, ans=0.125 +2024-08-26 02:55:25,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=160640.0, ans=0.1 +2024-08-26 02:55:32,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=160693.33333333334, ans=0.0 +2024-08-26 02:55:38,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=160746.66666666666, ans=0.025 +2024-08-26 02:55:47,653 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 1.754e+02 2.188e+02 2.577e+02 4.403e+02, threshold=4.375e+02, percent-clipped=2.0 +2024-08-26 02:55:47,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=160746.66666666666, ans=0.125 +2024-08-26 02:55:49,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=160800.0, ans=0.2 +2024-08-26 02:55:54,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=160800.0, ans=0.2 +2024-08-26 02:55:55,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=160800.0, ans=0.025 +2024-08-26 02:56:07,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160800.0, ans=0.1 +2024-08-26 02:56:07,249 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.32 vs. limit=22.5 +2024-08-26 02:56:12,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=160853.33333333334, ans=0.0 +2024-08-26 02:56:20,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=160853.33333333334, ans=0.0 +2024-08-26 02:56:24,168 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.82 vs. limit=15.0 +2024-08-26 02:56:43,557 INFO [train.py:1114] (1/4) Epoch 13, batch 300, loss[loss=0.2438, simple_loss=0.2962, pruned_loss=0.06964, ctc_loss=0.1301, over 19518.00 frames. ], tot_loss[loss=0.225, simple_loss=0.2851, pruned_loss=0.0599, ctc_loss=0.1128, over 3000163.21 frames. ], batch size: 61, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:56:46,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=160906.66666666666, ans=0.125 +2024-08-26 02:57:00,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=160960.0, ans=0.0 +2024-08-26 02:57:05,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=160960.0, ans=0.2 +2024-08-26 02:57:23,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=161013.33333333334, ans=0.025 +2024-08-26 02:57:35,547 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 02:57:37,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=161120.0, ans=0.125 +2024-08-26 02:57:50,481 INFO [train.py:1114] (1/4) Epoch 13, batch 350, loss[loss=0.1999, simple_loss=0.259, pruned_loss=0.05202, ctc_loss=0.09175, over 19765.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.2855, pruned_loss=0.06002, ctc_loss=0.1127, over 3189239.62 frames. ], batch size: 48, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:58:25,613 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.486e+02 1.772e+02 2.039e+02 2.354e+02 3.759e+02, threshold=4.079e+02, percent-clipped=0.0 +2024-08-26 02:58:54,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161386.66666666666, ans=0.1 +2024-08-26 02:59:24,150 INFO [train.py:1114] (1/4) Epoch 13, batch 400, loss[loss=0.2108, simple_loss=0.2807, pruned_loss=0.05184, ctc_loss=0.09298, over 19481.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2846, pruned_loss=0.05934, ctc_loss=0.1116, over 3341325.68 frames. ], batch size: 54, lr: 1.17e-02, grad_scale: 32.0 +2024-08-26 02:59:53,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=161440.0, ans=0.2 +2024-08-26 02:59:58,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=161493.33333333334, ans=0.0 +2024-08-26 03:00:04,885 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.55 vs. limit=15.0 +2024-08-26 03:00:39,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=161600.0, ans=0.125 +2024-08-26 03:00:39,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=161600.0, ans=0.07 +2024-08-26 03:00:43,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=161600.0, ans=0.0 +2024-08-26 03:01:53,834 INFO [train.py:1114] (1/4) Epoch 13, batch 450, loss[loss=0.2097, simple_loss=0.2838, pruned_loss=0.04922, ctc_loss=0.09268, over 19624.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.2851, pruned_loss=0.05972, ctc_loss=0.1125, over 3450443.00 frames. ], batch size: 55, lr: 1.17e-02, grad_scale: 32.0 +2024-08-26 03:01:55,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=161706.66666666666, ans=0.0 +2024-08-26 03:02:06,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=161706.66666666666, ans=0.025 +2024-08-26 03:02:43,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=161760.0, ans=0.125 +2024-08-26 03:03:10,109 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.726e+02 2.085e+02 2.754e+02 4.301e+02, threshold=4.170e+02, percent-clipped=3.0 +2024-08-26 03:03:51,753 INFO [train.py:1114] (1/4) Epoch 13, batch 500, loss[loss=0.2528, simple_loss=0.3014, pruned_loss=0.07671, ctc_loss=0.127, over 19660.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2841, pruned_loss=0.05929, ctc_loss=0.1116, over 3546503.37 frames. ], batch size: 63, lr: 1.17e-02, grad_scale: 32.0 +2024-08-26 03:04:15,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=161973.33333333334, ans=0.125 +2024-08-26 03:05:04,618 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.28 vs. limit=15.0 +2024-08-26 03:05:10,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=162080.0, ans=0.125 +2024-08-26 03:06:03,081 INFO [train.py:1114] (1/4) Epoch 13, batch 550, loss[loss=0.2419, simple_loss=0.2998, pruned_loss=0.06667, ctc_loss=0.1264, over 19314.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2838, pruned_loss=0.05928, ctc_loss=0.1116, over 3608218.83 frames. ], batch size: 71, lr: 1.17e-02, grad_scale: 32.0 +2024-08-26 03:06:14,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=162293.33333333334, ans=0.125 +2024-08-26 03:06:16,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162293.33333333334, ans=0.1 +2024-08-26 03:06:21,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=162293.33333333334, ans=0.2 +2024-08-26 03:06:33,530 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.66 vs. limit=15.0 +2024-08-26 03:06:42,115 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.39 vs. limit=15.0 +2024-08-26 03:06:47,057 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.318e+02 1.758e+02 1.954e+02 2.485e+02 4.688e+02, threshold=3.908e+02, percent-clipped=2.0 +2024-08-26 03:06:55,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=162400.0, ans=0.0 +2024-08-26 03:07:14,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=162453.33333333334, ans=0.125 +2024-08-26 03:07:24,268 INFO [train.py:1114] (1/4) Epoch 13, batch 600, loss[loss=0.2155, simple_loss=0.2833, pruned_loss=0.05416, ctc_loss=0.0984, over 19384.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2841, pruned_loss=0.05916, ctc_loss=0.1113, over 3667030.45 frames. ], batch size: 67, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:07:50,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=162560.0, ans=0.0 +2024-08-26 03:07:51,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=162560.0, ans=10.0 +2024-08-26 03:08:12,259 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.66 vs. limit=15.0 +2024-08-26 03:08:19,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=162613.33333333334, ans=0.1 +2024-08-26 03:08:25,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162666.66666666666, ans=0.1 +2024-08-26 03:08:29,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=162666.66666666666, ans=0.2 +2024-08-26 03:08:46,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=162720.0, ans=0.025 +2024-08-26 03:09:14,959 INFO [train.py:1114] (1/4) Epoch 13, batch 650, loss[loss=0.2037, simple_loss=0.2747, pruned_loss=0.04834, ctc_loss=0.08982, over 19763.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2838, pruned_loss=0.05902, ctc_loss=0.111, over 3717515.22 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:09:25,690 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.46 vs. limit=15.0 +2024-08-26 03:09:43,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=162826.66666666666, ans=0.125 +2024-08-26 03:09:56,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=162880.0, ans=0.0 +2024-08-26 03:09:56,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=162880.0, ans=0.0 +2024-08-26 03:10:02,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=162880.0, ans=0.2 +2024-08-26 03:10:09,866 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 1.755e+02 2.119e+02 2.960e+02 5.119e+02, threshold=4.237e+02, percent-clipped=6.0 +2024-08-26 03:10:28,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=162986.66666666666, ans=0.0 +2024-08-26 03:10:39,735 INFO [train.py:1114] (1/4) Epoch 13, batch 700, loss[loss=0.2101, simple_loss=0.2691, pruned_loss=0.05561, ctc_loss=0.09999, over 19725.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2842, pruned_loss=0.05912, ctc_loss=0.1109, over 3749066.62 frames. ], batch size: 51, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:11:04,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=163093.33333333334, ans=0.125 +2024-08-26 03:11:33,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=163200.0, ans=0.0 +2024-08-26 03:11:52,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=163253.33333333334, ans=0.0 +2024-08-26 03:11:55,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=163253.33333333334, ans=0.0 +2024-08-26 03:12:00,804 INFO [train.py:1114] (1/4) Epoch 13, batch 750, loss[loss=0.2101, simple_loss=0.2789, pruned_loss=0.05138, ctc_loss=0.09659, over 19474.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.284, pruned_loss=0.05919, ctc_loss=0.111, over 3774963.78 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:12:05,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=163306.66666666666, ans=0.125 +2024-08-26 03:12:07,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=163306.66666666666, ans=0.07 +2024-08-26 03:12:26,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=163413.33333333334, ans=0.125 +2024-08-26 03:12:28,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=163413.33333333334, ans=0.125 +2024-08-26 03:12:43,029 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.800e+02 2.310e+02 2.882e+02 4.749e+02, threshold=4.619e+02, percent-clipped=2.0 +2024-08-26 03:12:45,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=163466.66666666666, ans=0.2 +2024-08-26 03:12:46,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=163466.66666666666, ans=0.125 +2024-08-26 03:13:35,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=163520.0, ans=0.125 +2024-08-26 03:13:56,354 INFO [train.py:1114] (1/4) Epoch 13, batch 800, loss[loss=0.2159, simple_loss=0.2717, pruned_loss=0.05828, ctc_loss=0.109, over 19820.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2838, pruned_loss=0.05886, ctc_loss=0.1103, over 3796423.05 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:14:20,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=163626.66666666666, ans=0.125 +2024-08-26 03:14:29,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=163680.0, ans=0.125 +2024-08-26 03:14:32,920 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.98 vs. limit=15.0 +2024-08-26 03:14:33,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=163680.0, ans=0.125 +2024-08-26 03:14:52,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=163733.33333333334, ans=0.125 +2024-08-26 03:15:01,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=163786.66666666666, ans=0.035 +2024-08-26 03:15:01,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=163786.66666666666, ans=0.2 +2024-08-26 03:15:10,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=163786.66666666666, ans=0.1 +2024-08-26 03:15:12,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=163840.0, ans=0.04949747468305833 +2024-08-26 03:15:13,890 INFO [train.py:1114] (1/4) Epoch 13, batch 850, loss[loss=0.2274, simple_loss=0.2929, pruned_loss=0.05866, ctc_loss=0.1111, over 19658.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2833, pruned_loss=0.05874, ctc_loss=0.1102, over 3814761.56 frames. ], batch size: 59, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:15:16,638 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.32 vs. limit=22.5 +2024-08-26 03:15:33,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=163893.33333333334, ans=0.125 +2024-08-26 03:16:07,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=163946.66666666666, ans=0.125 +2024-08-26 03:16:11,622 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.448e+02 1.727e+02 1.948e+02 2.271e+02 3.773e+02, threshold=3.897e+02, percent-clipped=0.0 +2024-08-26 03:16:25,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=164053.33333333334, ans=0.0 +2024-08-26 03:16:39,635 INFO [train.py:1114] (1/4) Epoch 13, batch 900, loss[loss=0.2188, simple_loss=0.2686, pruned_loss=0.06142, ctc_loss=0.1152, over 19831.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.284, pruned_loss=0.0593, ctc_loss=0.1113, over 3819190.32 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:16:44,778 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.00 vs. limit=6.0 +2024-08-26 03:16:57,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=164160.0, ans=0.125 +2024-08-26 03:17:00,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=164160.0, ans=0.2 +2024-08-26 03:17:05,744 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.36 vs. limit=15.0 +2024-08-26 03:17:13,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=164213.33333333334, ans=0.125 +2024-08-26 03:17:15,811 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.87 vs. limit=15.0 +2024-08-26 03:17:30,860 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.25 vs. limit=15.0 +2024-08-26 03:17:35,240 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.04 vs. limit=15.0 +2024-08-26 03:17:39,435 INFO [train.py:1114] (1/4) Epoch 13, batch 950, loss[loss=0.2102, simple_loss=0.2709, pruned_loss=0.05451, ctc_loss=0.1015, over 19500.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2842, pruned_loss=0.0595, ctc_loss=0.1117, over 3820526.74 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:18:33,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=164426.66666666666, ans=0.125 +2024-08-26 03:18:41,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=164480.0, ans=0.125 +2024-08-26 03:18:41,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=164480.0, ans=0.125 +2024-08-26 03:18:52,284 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.763e+02 2.081e+02 2.549e+02 5.575e+02, threshold=4.162e+02, percent-clipped=2.0 +2024-08-26 03:18:54,105 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.66 vs. limit=10.0 +2024-08-26 03:19:00,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=164533.33333333334, ans=0.125 +2024-08-26 03:19:09,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=164533.33333333334, ans=0.125 +2024-08-26 03:19:29,883 INFO [train.py:1114] (1/4) Epoch 13, batch 1000, loss[loss=0.2352, simple_loss=0.2903, pruned_loss=0.06572, ctc_loss=0.1216, over 19861.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.2856, pruned_loss=0.06039, ctc_loss=0.1133, over 3816217.51 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:19:38,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=164640.0, ans=0.2 +2024-08-26 03:19:44,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=164693.33333333334, ans=0.0 +2024-08-26 03:19:45,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=164693.33333333334, ans=0.125 +2024-08-26 03:20:06,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=164746.66666666666, ans=0.0 +2024-08-26 03:20:15,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=164800.0, ans=0.125 +2024-08-26 03:20:31,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=164853.33333333334, ans=0.05 +2024-08-26 03:20:35,692 INFO [train.py:1114] (1/4) Epoch 13, batch 1050, loss[loss=0.2331, simple_loss=0.2911, pruned_loss=0.06369, ctc_loss=0.1193, over 19840.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.2849, pruned_loss=0.06024, ctc_loss=0.1131, over 3821903.62 frames. ], batch size: 57, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:20:44,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=164906.66666666666, ans=15.0 +2024-08-26 03:20:55,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=164960.0, ans=0.125 +2024-08-26 03:21:08,095 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.372e+02 1.698e+02 1.997e+02 2.318e+02 3.616e+02, threshold=3.994e+02, percent-clipped=0.0 +2024-08-26 03:21:31,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=165120.0, ans=0.025 +2024-08-26 03:21:44,513 INFO [train.py:1114] (1/4) Epoch 13, batch 1100, loss[loss=0.2319, simple_loss=0.2833, pruned_loss=0.06551, ctc_loss=0.1237, over 19579.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.2852, pruned_loss=0.0601, ctc_loss=0.113, over 3829803.11 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:22:00,908 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.30 vs. limit=12.0 +2024-08-26 03:22:04,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=165226.66666666666, ans=10.0 +2024-08-26 03:22:19,578 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 03:22:22,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=165333.33333333334, ans=0.5 +2024-08-26 03:22:35,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=165333.33333333334, ans=0.0 +2024-08-26 03:22:57,610 INFO [train.py:1114] (1/4) Epoch 13, batch 1150, loss[loss=0.2307, simple_loss=0.2883, pruned_loss=0.06225, ctc_loss=0.1216, over 19582.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.2858, pruned_loss=0.06056, ctc_loss=0.1139, over 3829936.96 frames. ], batch size: 52, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:23:10,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165493.33333333334, ans=0.1 +2024-08-26 03:23:11,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=165493.33333333334, ans=0.0 +2024-08-26 03:23:19,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=165493.33333333334, ans=0.125 +2024-08-26 03:23:38,674 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.345e+02 1.729e+02 2.006e+02 2.456e+02 7.202e+02, threshold=4.012e+02, percent-clipped=3.0 +2024-08-26 03:23:50,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=165600.0, ans=0.09899494936611666 +2024-08-26 03:23:54,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=165653.33333333334, ans=0.0 +2024-08-26 03:24:11,584 INFO [train.py:1114] (1/4) Epoch 13, batch 1200, loss[loss=0.2316, simple_loss=0.2999, pruned_loss=0.05814, ctc_loss=0.1177, over 19832.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.2869, pruned_loss=0.06092, ctc_loss=0.1147, over 3824766.47 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:24:31,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=165760.0, ans=0.2 +2024-08-26 03:25:15,635 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.21 vs. limit=12.0 +2024-08-26 03:25:21,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=165813.33333333334, ans=0.125 +2024-08-26 03:26:10,129 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.72 vs. limit=15.0 +2024-08-26 03:26:10,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=165920.0, ans=0.2 +2024-08-26 03:26:13,051 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.34 vs. limit=10.0 +2024-08-26 03:26:15,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=165920.0, ans=0.025 +2024-08-26 03:26:20,505 INFO [train.py:1114] (1/4) Epoch 13, batch 1250, loss[loss=0.2435, simple_loss=0.3022, pruned_loss=0.06778, ctc_loss=0.1232, over 19512.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.287, pruned_loss=0.06074, ctc_loss=0.1144, over 3842801.59 frames. ], batch size: 61, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:26:20,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=165973.33333333334, ans=0.1 +2024-08-26 03:27:02,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=166080.0, ans=0.125 +2024-08-26 03:27:22,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166080.0, ans=0.1 +2024-08-26 03:27:23,470 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.311e+02 1.715e+02 1.869e+02 2.285e+02 3.930e+02, threshold=3.738e+02, percent-clipped=0.0 +2024-08-26 03:27:35,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=166133.33333333334, ans=0.0 +2024-08-26 03:28:00,522 INFO [train.py:1114] (1/4) Epoch 13, batch 1300, loss[loss=0.2354, simple_loss=0.2913, pruned_loss=0.06582, ctc_loss=0.1198, over 18906.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.2857, pruned_loss=0.06002, ctc_loss=0.113, over 3847225.03 frames. ], batch size: 76, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:28:22,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=166293.33333333334, ans=0.125 +2024-08-26 03:28:39,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=166293.33333333334, ans=0.125 +2024-08-26 03:28:41,383 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.97 vs. limit=15.0 +2024-08-26 03:29:29,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=166400.0, ans=0.125 +2024-08-26 03:29:36,172 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 03:30:17,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=166453.33333333334, ans=0.125 +2024-08-26 03:30:19,039 INFO [train.py:1114] (1/4) Epoch 13, batch 1350, loss[loss=0.2256, simple_loss=0.288, pruned_loss=0.05968, ctc_loss=0.1094, over 19751.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.285, pruned_loss=0.05951, ctc_loss=0.1117, over 3858651.13 frames. ], batch size: 54, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:30:25,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=166506.66666666666, ans=0.025 +2024-08-26 03:31:08,775 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.377e+02 1.736e+02 2.053e+02 2.622e+02 5.263e+02, threshold=4.106e+02, percent-clipped=6.0 +2024-08-26 03:31:25,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=166720.0, ans=0.0 +2024-08-26 03:31:40,384 INFO [train.py:1114] (1/4) Epoch 13, batch 1400, loss[loss=0.1663, simple_loss=0.2377, pruned_loss=0.03389, ctc_loss=0.06789, over 19677.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2848, pruned_loss=0.0594, ctc_loss=0.1117, over 3864928.92 frames. ], batch size: 46, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:32:16,461 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.49 vs. limit=10.0 +2024-08-26 03:32:20,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=166826.66666666666, ans=0.125 +2024-08-26 03:32:48,387 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.24 vs. limit=15.0 +2024-08-26 03:32:55,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.74 vs. limit=15.0 +2024-08-26 03:33:13,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=166986.66666666666, ans=0.0 +2024-08-26 03:33:14,257 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.85 vs. limit=10.0 +2024-08-26 03:33:21,474 INFO [train.py:1114] (1/4) Epoch 13, batch 1450, loss[loss=0.2165, simple_loss=0.2824, pruned_loss=0.05522, ctc_loss=0.1005, over 19651.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.2852, pruned_loss=0.05958, ctc_loss=0.1119, over 3863219.48 frames. ], batch size: 63, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:33:31,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=167040.0, ans=0.2 +2024-08-26 03:33:47,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=167093.33333333334, ans=0.025 +2024-08-26 03:33:49,089 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.09 vs. limit=15.0 +2024-08-26 03:33:59,237 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.391e+02 1.756e+02 1.937e+02 2.380e+02 3.895e+02, threshold=3.874e+02, percent-clipped=0.0 +2024-08-26 03:34:06,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=167200.0, ans=0.125 +2024-08-26 03:34:06,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=167200.0, ans=0.0 +2024-08-26 03:34:12,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=167200.0, ans=0.0 +2024-08-26 03:34:17,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167200.0, ans=0.1 +2024-08-26 03:34:23,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=167253.33333333334, ans=0.125 +2024-08-26 03:34:33,941 INFO [train.py:1114] (1/4) Epoch 13, batch 1500, loss[loss=0.2373, simple_loss=0.2922, pruned_loss=0.06526, ctc_loss=0.1296, over 19591.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.2854, pruned_loss=0.0596, ctc_loss=0.1116, over 3862550.12 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:35:01,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=167413.33333333334, ans=0.125 +2024-08-26 03:35:03,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=167413.33333333334, ans=0.04949747468305833 +2024-08-26 03:35:34,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=167520.0, ans=0.125 +2024-08-26 03:35:39,239 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.00 vs. limit=12.0 +2024-08-26 03:35:40,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167520.0, ans=0.1 +2024-08-26 03:35:43,246 INFO [train.py:1114] (1/4) Epoch 13, batch 1550, loss[loss=0.2521, simple_loss=0.3118, pruned_loss=0.06983, ctc_loss=0.1319, over 19627.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.2859, pruned_loss=0.06012, ctc_loss=0.1127, over 3846724.73 frames. ], batch size: 60, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:35:46,335 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.92 vs. limit=6.0 +2024-08-26 03:35:58,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=167626.66666666666, ans=0.125 +2024-08-26 03:36:17,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=167626.66666666666, ans=0.07 +2024-08-26 03:36:35,368 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.327e+02 1.778e+02 2.054e+02 2.767e+02 5.252e+02, threshold=4.108e+02, percent-clipped=7.0 +2024-08-26 03:37:04,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=167840.0, ans=0.0 +2024-08-26 03:37:05,302 INFO [train.py:1114] (1/4) Epoch 13, batch 1600, loss[loss=0.1914, simple_loss=0.2677, pruned_loss=0.0418, ctc_loss=0.07855, over 19840.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2852, pruned_loss=0.05982, ctc_loss=0.1125, over 3834490.98 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:37:13,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=167840.0, ans=0.125 +2024-08-26 03:37:15,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=167840.0, ans=0.0 +2024-08-26 03:37:16,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=167840.0, ans=0.125 +2024-08-26 03:37:19,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=167840.0, ans=0.025 +2024-08-26 03:37:36,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=167893.33333333334, ans=0.125 +2024-08-26 03:38:35,982 INFO [train.py:1114] (1/4) Epoch 13, batch 1650, loss[loss=0.2036, simple_loss=0.282, pruned_loss=0.04452, ctc_loss=0.09014, over 19655.00 frames. ], tot_loss[loss=0.226, simple_loss=0.2858, pruned_loss=0.06034, ctc_loss=0.1136, over 3832591.27 frames. ], batch size: 59, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:39:07,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=168160.0, ans=0.125 +2024-08-26 03:39:19,473 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.85 vs. limit=22.5 +2024-08-26 03:39:20,047 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.425e+02 1.825e+02 2.209e+02 2.614e+02 4.167e+02, threshold=4.418e+02, percent-clipped=2.0 +2024-08-26 03:39:20,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.79 vs. limit=15.0 +2024-08-26 03:39:26,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168266.66666666666, ans=0.1 +2024-08-26 03:39:55,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=168320.0, ans=0.125 +2024-08-26 03:40:00,081 INFO [train.py:1114] (1/4) Epoch 13, batch 1700, loss[loss=0.1967, simple_loss=0.2548, pruned_loss=0.05045, ctc_loss=0.09452, over 19702.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2848, pruned_loss=0.05947, ctc_loss=0.112, over 3847218.47 frames. ], batch size: 46, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:40:17,288 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.16 vs. limit=6.0 +2024-08-26 03:40:31,833 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.98 vs. limit=12.0 +2024-08-26 03:40:45,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168480.0, ans=0.1 +2024-08-26 03:40:45,772 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.01 vs. limit=12.0 +2024-08-26 03:40:48,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=168533.33333333334, ans=0.2 +2024-08-26 03:41:05,341 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 03:41:17,852 INFO [train.py:1114] (1/4) Epoch 13, batch 1750, loss[loss=0.1969, simple_loss=0.2527, pruned_loss=0.05033, ctc_loss=0.1009, over 19684.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2842, pruned_loss=0.05904, ctc_loss=0.1114, over 3851805.56 frames. ], batch size: 45, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:41:33,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=168693.33333333334, ans=0.125 +2024-08-26 03:41:45,103 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.87 vs. limit=15.0 +2024-08-26 03:41:47,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=168693.33333333334, ans=0.2 +2024-08-26 03:41:57,317 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.37 vs. limit=15.0 +2024-08-26 03:42:01,096 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.702e+02 2.065e+02 2.813e+02 5.109e+02, threshold=4.129e+02, percent-clipped=2.0 +2024-08-26 03:42:45,931 INFO [train.py:1114] (1/4) Epoch 13, batch 1800, loss[loss=0.2092, simple_loss=0.2835, pruned_loss=0.04951, ctc_loss=0.08993, over 19620.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2846, pruned_loss=0.05945, ctc_loss=0.112, over 3853381.72 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:43:01,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=168960.0, ans=0.0 +2024-08-26 03:43:19,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=169013.33333333334, ans=0.0 +2024-08-26 03:43:34,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=169066.66666666666, ans=0.2 +2024-08-26 03:43:44,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=169120.0, ans=0.125 +2024-08-26 03:43:53,522 INFO [train.py:1114] (1/4) Epoch 13, batch 1850, loss[loss=0.23, simple_loss=0.29, pruned_loss=0.06177, ctc_loss=0.1163, over 19571.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2838, pruned_loss=0.05901, ctc_loss=0.1111, over 3856130.43 frames. ], batch size: 57, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:44:01,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=169173.33333333334, ans=0.0 +2024-08-26 03:44:14,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169226.66666666666, ans=0.1 +2024-08-26 03:44:29,680 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.525e+02 1.936e+02 2.666e+02 3.402e+02 5.252e+02, threshold=5.332e+02, percent-clipped=13.0 +2024-08-26 03:44:40,092 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.66 vs. limit=22.5 +2024-08-26 03:44:44,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=169386.66666666666, ans=0.0 +2024-08-26 03:44:56,560 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.93 vs. limit=10.0 +2024-08-26 03:45:07,835 INFO [train.py:1114] (1/4) Epoch 13, batch 1900, loss[loss=0.2239, simple_loss=0.2885, pruned_loss=0.05724, ctc_loss=0.112, over 19628.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2842, pruned_loss=0.05915, ctc_loss=0.1113, over 3861621.93 frames. ], batch size: 59, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:45:15,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=169440.0, ans=0.125 +2024-08-26 03:45:26,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.48 vs. limit=22.5 +2024-08-26 03:45:42,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff3.min_abs, batch_count=169546.66666666666, ans=0.2 +2024-08-26 03:45:43,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=169546.66666666666, ans=0.0 +2024-08-26 03:46:29,178 INFO [train.py:1114] (1/4) Epoch 13, batch 1950, loss[loss=0.2191, simple_loss=0.2824, pruned_loss=0.05702, ctc_loss=0.1044, over 19591.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2858, pruned_loss=0.05978, ctc_loss=0.1125, over 3870432.45 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:46:29,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=169706.66666666666, ans=0.125 +2024-08-26 03:46:57,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=169813.33333333334, ans=0.1 +2024-08-26 03:49:49,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=169813.33333333334, ans=0.125 +2024-08-26 03:50:26,628 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.484e+02 1.795e+02 2.018e+02 2.323e+02 3.502e+02, threshold=4.036e+02, percent-clipped=0.0 +2024-08-26 04:20:39,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=169920.0, ans=0.125 +2024-08-26 04:20:52,497 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.09 vs. limit=15.0 +2024-08-26 04:22:39,285 INFO [train.py:1114] (1/4) Epoch 13, batch 2000, loss[loss=0.1961, simple_loss=0.2496, pruned_loss=0.0519, ctc_loss=0.09707, over 19627.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.2865, pruned_loss=0.06022, ctc_loss=0.1133, over 3856227.76 frames. ], batch size: 45, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 04:40:08,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=170026.66666666666, ans=0.125 +2024-08-26 05:17:15,369 INFO [train.py:1114] (1/4) Epoch 13, batch 2050, loss[loss=0.1872, simple_loss=0.2487, pruned_loss=0.04588, ctc_loss=0.08504, over 19719.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2855, pruned_loss=0.0599, ctc_loss=0.1127, over 3851755.38 frames. ], batch size: 47, lr: 1.14e-02, grad_scale: 64.0 +2024-08-26 05:34:32,867 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.739e+02 2.095e+02 2.592e+02 3.598e+02, threshold=4.189e+02, percent-clipped=0.0 +2024-08-26 05:40:56,494 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.15 vs. limit=15.0 +2024-08-26 05:45:20,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=170506.66666666666, ans=0.125 +2024-08-26 05:45:21,774 INFO [train.py:1114] (1/4) Epoch 13, batch 2100, loss[loss=0.2283, simple_loss=0.2881, pruned_loss=0.06206, ctc_loss=0.1109, over 19769.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.2849, pruned_loss=0.05941, ctc_loss=0.1119, over 3858448.85 frames. ], batch size: 54, lr: 1.14e-02, grad_scale: 64.0 +2024-08-26 05:50:47,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=170560.0, ans=0.0 +2024-08-26 05:50:48,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=170560.0, ans=0.025 +2024-08-26 05:51:08,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=170560.0, ans=0.125 +2024-08-26 05:55:49,645 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.33 vs. limit=15.0 +2024-08-26 05:57:03,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=170720.0, ans=0.0 +2024-08-26 05:57:05,752 INFO [train.py:1114] (1/4) Epoch 13, batch 2150, loss[loss=0.1958, simple_loss=0.2681, pruned_loss=0.04454, ctc_loss=0.0859, over 19608.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2843, pruned_loss=0.05918, ctc_loss=0.1115, over 3869651.90 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 64.0 +2024-08-26 06:00:38,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=170826.66666666666, ans=0.125 +2024-08-26 06:02:00,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=170880.0, ans=0.0 +2024-08-26 06:02:10,734 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.397e+02 1.801e+02 2.071e+02 2.646e+02 5.963e+02, threshold=4.141e+02, percent-clipped=6.0 +2024-08-26 06:03:39,259 INFO [train.py:1114] (1/4) Epoch 13, batch 2200, loss[loss=0.2428, simple_loss=0.3005, pruned_loss=0.06773, ctc_loss=0.1242, over 19596.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2842, pruned_loss=0.059, ctc_loss=0.111, over 3868024.11 frames. ], batch size: 57, lr: 1.14e-02, grad_scale: 64.0 +2024-08-26 06:04:13,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=171093.33333333334, ans=10.0 +2024-08-26 06:04:45,624 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.90 vs. limit=15.0 +2024-08-26 06:04:56,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=171146.66666666666, ans=0.125 +2024-08-26 06:05:06,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=171200.0, ans=0.07 +2024-08-26 06:05:14,356 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.24 vs. limit=6.0 +2024-08-26 06:05:56,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=171253.33333333334, ans=0.125 +2024-08-26 06:06:16,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=171253.33333333334, ans=0.0 +2024-08-26 06:06:26,725 INFO [train.py:1114] (1/4) Epoch 13, batch 2250, loss[loss=0.2164, simple_loss=0.2807, pruned_loss=0.0547, ctc_loss=0.1069, over 19613.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.284, pruned_loss=0.05897, ctc_loss=0.1111, over 3867521.25 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 06:06:26,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=171306.66666666666, ans=0.125 +2024-08-26 06:07:52,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=171413.33333333334, ans=0.2 +2024-08-26 06:07:52,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=171413.33333333334, ans=0.125 +2024-08-26 06:08:14,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=171413.33333333334, ans=0.125 +2024-08-26 06:08:30,393 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.765e+02 2.070e+02 2.599e+02 3.761e+02, threshold=4.140e+02, percent-clipped=0.0 +2024-08-26 06:09:23,310 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.00 vs. limit=15.0 +2024-08-26 06:09:29,808 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.82 vs. limit=15.0 +2024-08-26 06:09:34,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=171466.66666666666, ans=0.0 +2024-08-26 06:10:19,747 INFO [train.py:1114] (1/4) Epoch 13, batch 2300, loss[loss=0.2071, simple_loss=0.2694, pruned_loss=0.05231, ctc_loss=0.1006, over 19513.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.2831, pruned_loss=0.05883, ctc_loss=0.111, over 3860632.66 frames. ], batch size: 49, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 06:10:25,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=171573.33333333334, ans=0.125 +2024-08-26 06:10:28,110 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.94 vs. limit=22.5 +2024-08-26 06:10:39,986 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.17 vs. limit=12.0 +2024-08-26 06:11:02,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=171680.0, ans=0.125 +2024-08-26 06:11:12,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=171680.0, ans=0.125 +2024-08-26 06:11:24,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=171733.33333333334, ans=0.0 +2024-08-26 06:11:27,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=171733.33333333334, ans=0.0 +2024-08-26 06:11:36,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=171786.66666666666, ans=0.125 +2024-08-26 06:11:42,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=171840.0, ans=0.07 +2024-08-26 06:11:43,311 INFO [train.py:1114] (1/4) Epoch 13, batch 2350, loss[loss=0.2195, simple_loss=0.2937, pruned_loss=0.05211, ctc_loss=0.1027, over 19661.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2831, pruned_loss=0.05874, ctc_loss=0.1106, over 3863505.81 frames. ], batch size: 63, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 06:11:51,375 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.38 vs. limit=15.0 +2024-08-26 06:12:09,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=171946.66666666666, ans=0.125 +2024-08-26 06:12:12,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=171946.66666666666, ans=0.125 +2024-08-26 06:12:16,619 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.342e+02 1.773e+02 2.247e+02 3.255e+02 4.983e+02, threshold=4.494e+02, percent-clipped=2.0 +2024-08-26 06:12:18,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=172000.0, ans=0.125 +2024-08-26 06:12:29,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172053.33333333334, ans=0.1 +2024-08-26 06:12:46,290 INFO [train.py:1114] (1/4) Epoch 13, batch 2400, loss[loss=0.2308, simple_loss=0.2934, pruned_loss=0.05998, ctc_loss=0.1207, over 19235.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2853, pruned_loss=0.05975, ctc_loss=0.1122, over 3858875.86 frames. ], batch size: 71, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 06:13:28,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=172213.33333333334, ans=0.125 +2024-08-26 06:13:53,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=172266.66666666666, ans=0.025 +2024-08-26 06:14:00,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=172320.0, ans=0.05 +2024-08-26 06:14:00,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=172320.0, ans=0.2 +2024-08-26 06:14:03,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=172320.0, ans=0.125 +2024-08-26 06:14:05,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=172320.0, ans=0.125 +2024-08-26 06:14:08,364 INFO [train.py:1114] (1/4) Epoch 13, batch 2450, loss[loss=0.3062, simple_loss=0.3326, pruned_loss=0.1018, ctc_loss=0.1907, over 13687.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.2901, pruned_loss=0.06343, ctc_loss=0.1196, over 3730503.06 frames. ], batch size: 142, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 06:14:17,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=172373.33333333334, ans=0.0 +2024-08-26 06:14:33,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=172480.0, ans=0.125 +2024-08-26 06:14:43,288 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 1.935e+02 2.072e+02 2.350e+02 4.711e+02, threshold=4.143e+02, percent-clipped=2.0 +2024-08-26 06:14:43,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=172533.33333333334, ans=0.0 +2024-08-26 06:16:27,503 INFO [train.py:1114] (1/4) Epoch 14, batch 0, loss[loss=0.2055, simple_loss=0.2686, pruned_loss=0.05116, ctc_loss=0.1002, over 19403.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2686, pruned_loss=0.05116, ctc_loss=0.1002, over 19403.00 frames. ], batch size: 48, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:16:27,504 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-26 06:17:53,178 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([0.1437, 3.0885, 3.6307, 2.8115], device='cuda:1') +2024-08-26 06:17:58,796 INFO [train.py:1146] (1/4) Epoch 14, validation: loss=0.1898, simple_loss=0.2778, pruned_loss=0.03769, ctc_loss=0.06578, over 944034.00 frames. +2024-08-26 06:18:12,592 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 14083MB +2024-08-26 06:18:29,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=172634.66666666666, ans=0.0 +2024-08-26 06:18:39,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=172634.66666666666, ans=0.0 +2024-08-26 06:18:56,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=172688.0, ans=0.125 +2024-08-26 06:19:03,547 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.62 vs. limit=15.0 +2024-08-26 06:19:25,355 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.36 vs. limit=12.0 +2024-08-26 06:19:53,748 INFO [train.py:1114] (1/4) Epoch 14, batch 50, loss[loss=0.2162, simple_loss=0.2658, pruned_loss=0.06082, ctc_loss=0.1123, over 19699.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.2864, pruned_loss=0.06032, ctc_loss=0.1134, over 844835.93 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:20:18,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=172848.0, ans=0.0 +2024-08-26 06:20:43,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=172954.66666666666, ans=0.2 +2024-08-26 06:20:53,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=172954.66666666666, ans=0.125 +2024-08-26 06:21:17,216 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.410e+02 1.738e+02 2.047e+02 2.487e+02 4.948e+02, threshold=4.095e+02, percent-clipped=4.0 +2024-08-26 06:21:48,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=173061.33333333334, ans=0.0 +2024-08-26 06:21:51,846 INFO [train.py:1114] (1/4) Epoch 14, batch 100, loss[loss=0.2132, simple_loss=0.2728, pruned_loss=0.05592, ctc_loss=0.104, over 19712.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.2875, pruned_loss=0.06052, ctc_loss=0.114, over 1499995.38 frames. ], batch size: 51, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:23:09,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=173328.0, ans=0.0 +2024-08-26 06:23:33,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=173381.33333333334, ans=0.025 +2024-08-26 06:23:38,133 INFO [train.py:1114] (1/4) Epoch 14, batch 150, loss[loss=0.2055, simple_loss=0.2629, pruned_loss=0.05363, ctc_loss=0.102, over 19717.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2839, pruned_loss=0.05839, ctc_loss=0.1102, over 2027349.01 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:23:51,259 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:24:03,850 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.90 vs. limit=22.5 +2024-08-26 06:24:07,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=173434.66666666666, ans=0.07 +2024-08-26 06:24:10,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=173434.66666666666, ans=0.125 +2024-08-26 06:24:29,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.72 vs. limit=15.0 +2024-08-26 06:24:32,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=173541.33333333334, ans=0.05 +2024-08-26 06:24:43,092 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:24:49,750 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 1.676e+02 1.898e+02 2.213e+02 4.155e+02, threshold=3.795e+02, percent-clipped=1.0 +2024-08-26 06:24:49,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=173594.66666666666, ans=0.125 +2024-08-26 06:24:57,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=173594.66666666666, ans=0.025 +2024-08-26 06:25:00,470 INFO [train.py:1114] (1/4) Epoch 14, batch 200, loss[loss=0.2395, simple_loss=0.2941, pruned_loss=0.06666, ctc_loss=0.129, over 18213.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2818, pruned_loss=0.05741, ctc_loss=0.1077, over 2434363.68 frames. ], batch size: 85, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:25:01,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=173648.0, ans=0.0 +2024-08-26 06:25:14,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=173648.0, ans=0.125 +2024-08-26 06:25:47,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=173754.66666666666, ans=0.5 +2024-08-26 06:26:01,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=173861.33333333334, ans=0.2 +2024-08-26 06:26:12,149 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.59 vs. limit=15.0 +2024-08-26 06:26:16,064 INFO [train.py:1114] (1/4) Epoch 14, batch 250, loss[loss=0.2392, simple_loss=0.2973, pruned_loss=0.06622, ctc_loss=0.1215, over 19420.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2823, pruned_loss=0.05748, ctc_loss=0.1083, over 2753707.57 frames. ], batch size: 67, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:26:16,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=173914.66666666666, ans=0.125 +2024-08-26 06:26:40,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=173968.0, ans=0.125 +2024-08-26 06:26:59,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=174021.33333333334, ans=0.125 +2024-08-26 06:27:18,000 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.683e+02 2.061e+02 2.648e+02 4.927e+02, threshold=4.123e+02, percent-clipped=4.0 +2024-08-26 06:27:18,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=174128.0, ans=0.2 +2024-08-26 06:27:22,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=174128.0, ans=0.025 +2024-08-26 06:27:28,129 INFO [train.py:1114] (1/4) Epoch 14, batch 300, loss[loss=0.2515, simple_loss=0.304, pruned_loss=0.07298, ctc_loss=0.1324, over 19548.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2823, pruned_loss=0.05751, ctc_loss=0.1084, over 2999566.01 frames. ], batch size: 61, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:27:36,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.06 vs. limit=22.5 +2024-08-26 06:27:48,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=174234.66666666666, ans=0.125 +2024-08-26 06:27:52,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=174288.0, ans=0.125 +2024-08-26 06:28:04,450 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.52 vs. limit=15.0 +2024-08-26 06:28:06,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=174288.0, ans=0.125 +2024-08-26 06:28:11,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174341.33333333334, ans=0.1 +2024-08-26 06:28:11,651 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.32 vs. limit=15.0 +2024-08-26 06:28:13,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=174341.33333333334, ans=0.125 +2024-08-26 06:28:34,436 INFO [train.py:1114] (1/4) Epoch 14, batch 350, loss[loss=0.171, simple_loss=0.2399, pruned_loss=0.03702, ctc_loss=0.07009, over 19763.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2826, pruned_loss=0.05731, ctc_loss=0.1078, over 3190345.80 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:28:43,187 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=7.09 vs. limit=15.0 +2024-08-26 06:28:53,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=174501.33333333334, ans=0.1 +2024-08-26 06:29:03,927 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:29:32,485 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.272e+02 1.657e+02 1.894e+02 2.440e+02 4.007e+02, threshold=3.787e+02, percent-clipped=0.0 +2024-08-26 06:29:42,962 INFO [train.py:1114] (1/4) Epoch 14, batch 400, loss[loss=0.2291, simple_loss=0.2913, pruned_loss=0.06002, ctc_loss=0.117, over 19500.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2823, pruned_loss=0.05734, ctc_loss=0.1079, over 3341930.78 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:29:48,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=174714.66666666666, ans=0.1 +2024-08-26 06:29:56,050 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=10.71 vs. limit=15.0 +2024-08-26 06:30:40,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=174874.66666666666, ans=0.95 +2024-08-26 06:30:40,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=174874.66666666666, ans=0.0 +2024-08-26 06:30:58,866 INFO [train.py:1114] (1/4) Epoch 14, batch 450, loss[loss=0.2125, simple_loss=0.2825, pruned_loss=0.05198, ctc_loss=0.09665, over 19622.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2827, pruned_loss=0.05793, ctc_loss=0.1087, over 3450368.11 frames. ], batch size: 55, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:31:14,818 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.17 vs. limit=15.0 +2024-08-26 06:31:19,427 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.66 vs. limit=12.0 +2024-08-26 06:31:51,404 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.21 vs. limit=15.0 +2024-08-26 06:31:59,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=175088.0, ans=0.035 +2024-08-26 06:32:32,617 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.297e+02 1.702e+02 1.875e+02 2.205e+02 3.904e+02, threshold=3.749e+02, percent-clipped=2.0 +2024-08-26 06:32:59,821 INFO [train.py:1114] (1/4) Epoch 14, batch 500, loss[loss=0.2416, simple_loss=0.2995, pruned_loss=0.0672, ctc_loss=0.1232, over 19650.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2819, pruned_loss=0.05764, ctc_loss=0.1083, over 3545951.97 frames. ], batch size: 63, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:33:33,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=175301.33333333334, ans=0.125 +2024-08-26 06:33:34,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=175301.33333333334, ans=0.125 +2024-08-26 06:34:00,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=175354.66666666666, ans=0.125 +2024-08-26 06:34:32,917 INFO [train.py:1114] (1/4) Epoch 14, batch 550, loss[loss=0.2288, simple_loss=0.2915, pruned_loss=0.05943, ctc_loss=0.118, over 19266.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2819, pruned_loss=0.05734, ctc_loss=0.108, over 3608026.80 frames. ], batch size: 71, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:34:41,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=175514.66666666666, ans=0.0 +2024-08-26 06:34:44,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=175514.66666666666, ans=0.2 +2024-08-26 06:35:13,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=175674.66666666666, ans=0.95 +2024-08-26 06:35:20,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=175674.66666666666, ans=0.5 +2024-08-26 06:35:32,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=175674.66666666666, ans=0.025 +2024-08-26 06:35:35,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=175728.0, ans=0.0 +2024-08-26 06:35:35,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=175728.0, ans=0.125 +2024-08-26 06:35:36,408 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.450e+02 1.729e+02 1.957e+02 2.291e+02 4.042e+02, threshold=3.913e+02, percent-clipped=2.0 +2024-08-26 06:36:05,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=175728.0, ans=0.0 +2024-08-26 06:36:15,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=175728.0, ans=0.125 +2024-08-26 06:36:18,838 INFO [train.py:1114] (1/4) Epoch 14, batch 600, loss[loss=0.2777, simple_loss=0.3209, pruned_loss=0.08546, ctc_loss=0.1587, over 19321.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2823, pruned_loss=0.05755, ctc_loss=0.1083, over 3664666.00 frames. ], batch size: 67, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:37:46,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175834.66666666666, ans=0.1 +2024-08-26 06:38:31,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=175834.66666666666, ans=0.0 +2024-08-26 06:39:25,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=176048.0, ans=0.05 +2024-08-26 06:39:25,933 INFO [train.py:1114] (1/4) Epoch 14, batch 650, loss[loss=0.2152, simple_loss=0.2852, pruned_loss=0.05222, ctc_loss=0.1019, over 19752.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2821, pruned_loss=0.0576, ctc_loss=0.1083, over 3715941.04 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:40:12,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176048.0, ans=0.1 +2024-08-26 06:40:13,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=176048.0, ans=0.1 +2024-08-26 06:41:31,385 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.301e+02 1.772e+02 2.123e+02 2.635e+02 4.354e+02, threshold=4.247e+02, percent-clipped=3.0 +2024-08-26 06:41:31,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176261.33333333334, ans=0.1 +2024-08-26 06:41:33,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=176261.33333333334, ans=0.2 +2024-08-26 06:41:37,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=176261.33333333334, ans=0.0 +2024-08-26 06:41:42,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=176261.33333333334, ans=0.125 +2024-08-26 06:41:45,013 INFO [train.py:1114] (1/4) Epoch 14, batch 700, loss[loss=0.2111, simple_loss=0.2715, pruned_loss=0.05454, ctc_loss=0.104, over 19721.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2827, pruned_loss=0.05776, ctc_loss=0.1086, over 3748643.73 frames. ], batch size: 51, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:41:56,863 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:42:07,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=176421.33333333334, ans=0.125 +2024-08-26 06:42:31,651 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.16 vs. limit=15.0 +2024-08-26 06:42:33,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=176474.66666666666, ans=0.125 +2024-08-26 06:42:37,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=176474.66666666666, ans=0.2 +2024-08-26 06:42:48,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=176528.0, ans=0.07 +2024-08-26 06:42:51,115 INFO [train.py:1114] (1/4) Epoch 14, batch 750, loss[loss=0.2212, simple_loss=0.285, pruned_loss=0.05666, ctc_loss=0.1103, over 19855.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2821, pruned_loss=0.0574, ctc_loss=0.1081, over 3776206.67 frames. ], batch size: 55, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:43:01,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=176581.33333333334, ans=0.125 +2024-08-26 06:44:17,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=176741.33333333334, ans=0.2 +2024-08-26 06:44:27,361 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.292e+02 1.803e+02 2.358e+02 3.080e+02 4.835e+02, threshold=4.715e+02, percent-clipped=7.0 +2024-08-26 06:44:41,972 INFO [train.py:1114] (1/4) Epoch 14, batch 800, loss[loss=0.1946, simple_loss=0.2561, pruned_loss=0.04803, ctc_loss=0.09247, over 19804.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2822, pruned_loss=0.05739, ctc_loss=0.1082, over 3797126.98 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:45:07,497 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.92 vs. limit=15.0 +2024-08-26 06:45:09,357 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.93 vs. limit=15.0 +2024-08-26 06:45:17,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=177008.0, ans=0.125 +2024-08-26 06:45:52,041 INFO [train.py:1114] (1/4) Epoch 14, batch 850, loss[loss=0.2126, simple_loss=0.2844, pruned_loss=0.05111, ctc_loss=0.09658, over 19658.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2821, pruned_loss=0.05773, ctc_loss=0.1088, over 3815224.61 frames. ], batch size: 59, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:46:34,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=177221.33333333334, ans=0.125 +2024-08-26 06:46:36,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177221.33333333334, ans=0.1 +2024-08-26 06:46:48,474 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.47 vs. limit=15.0 +2024-08-26 06:47:06,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=177274.66666666666, ans=0.125 +2024-08-26 06:47:11,726 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.690e+02 1.974e+02 2.351e+02 3.908e+02, threshold=3.948e+02, percent-clipped=0.0 +2024-08-26 06:47:15,740 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.81 vs. limit=15.0 +2024-08-26 06:47:18,356 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.28 vs. limit=22.5 +2024-08-26 06:47:24,573 INFO [train.py:1114] (1/4) Epoch 14, batch 900, loss[loss=0.1926, simple_loss=0.2601, pruned_loss=0.04556, ctc_loss=0.08517, over 19433.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2822, pruned_loss=0.05783, ctc_loss=0.1088, over 3818547.13 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:47:32,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=177381.33333333334, ans=0.125 +2024-08-26 06:48:02,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=177541.33333333334, ans=0.0 +2024-08-26 06:48:10,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177541.33333333334, ans=0.1 +2024-08-26 06:48:29,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=177594.66666666666, ans=0.125 +2024-08-26 06:48:38,037 INFO [train.py:1114] (1/4) Epoch 14, batch 950, loss[loss=0.2398, simple_loss=0.2821, pruned_loss=0.07171, ctc_loss=0.1353, over 19512.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2826, pruned_loss=0.05815, ctc_loss=0.1095, over 3821519.80 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:48:48,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=177648.0, ans=0.0 +2024-08-26 06:48:52,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=177701.33333333334, ans=0.125 +2024-08-26 06:49:02,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=177701.33333333334, ans=0.0 +2024-08-26 06:49:03,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=177701.33333333334, ans=0.0 +2024-08-26 06:49:03,724 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=177701.33333333334, ans=0.0 +2024-08-26 06:49:06,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177754.66666666666, ans=0.1 +2024-08-26 06:49:36,179 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.388e+02 1.810e+02 2.092e+02 2.519e+02 4.035e+02, threshold=4.185e+02, percent-clipped=1.0 +2024-08-26 06:50:06,704 INFO [train.py:1114] (1/4) Epoch 14, batch 1000, loss[loss=0.1924, simple_loss=0.2641, pruned_loss=0.04389, ctc_loss=0.0825, over 19863.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2833, pruned_loss=0.05845, ctc_loss=0.11, over 3818921.51 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:50:11,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=177914.66666666666, ans=0.125 +2024-08-26 06:50:22,812 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.02 vs. limit=15.0 +2024-08-26 06:51:21,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=178128.0, ans=0.125 +2024-08-26 06:51:23,205 INFO [train.py:1114] (1/4) Epoch 14, batch 1050, loss[loss=0.2502, simple_loss=0.3045, pruned_loss=0.07032, ctc_loss=0.1382, over 19861.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2823, pruned_loss=0.058, ctc_loss=0.109, over 3824616.59 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:51:47,371 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:51:57,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=178288.0, ans=0.125 +2024-08-26 06:52:11,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=178341.33333333334, ans=0.0 +2024-08-26 06:52:17,047 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.429e+02 1.767e+02 2.034e+02 2.568e+02 4.426e+02, threshold=4.067e+02, percent-clipped=2.0 +2024-08-26 06:52:39,168 INFO [train.py:1114] (1/4) Epoch 14, batch 1100, loss[loss=0.2238, simple_loss=0.2854, pruned_loss=0.05921, ctc_loss=0.1093, over 19567.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2818, pruned_loss=0.05758, ctc_loss=0.1084, over 3832045.48 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:52:43,174 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.09 vs. limit=12.0 +2024-08-26 06:53:02,203 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.55 vs. limit=6.0 +2024-08-26 06:53:13,317 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.51 vs. limit=6.0 +2024-08-26 06:53:35,391 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.27 vs. limit=15.0 +2024-08-26 06:53:36,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=178661.33333333334, ans=0.0 +2024-08-26 06:53:42,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=178661.33333333334, ans=0.125 +2024-08-26 06:53:49,717 INFO [train.py:1114] (1/4) Epoch 14, batch 1150, loss[loss=0.1772, simple_loss=0.2502, pruned_loss=0.03772, ctc_loss=0.07201, over 19594.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2818, pruned_loss=0.05789, ctc_loss=0.1091, over 3830601.15 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:53:56,102 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:54:12,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=178768.0, ans=0.125 +2024-08-26 06:54:24,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=178821.33333333334, ans=0.0 +2024-08-26 06:54:25,781 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=8.88 vs. limit=22.5 +2024-08-26 06:54:47,790 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.255e+02 1.672e+02 1.916e+02 2.259e+02 4.129e+02, threshold=3.832e+02, percent-clipped=1.0 +2024-08-26 06:54:58,198 INFO [train.py:1114] (1/4) Epoch 14, batch 1200, loss[loss=0.2426, simple_loss=0.3037, pruned_loss=0.0653, ctc_loss=0.1273, over 19831.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2831, pruned_loss=0.05843, ctc_loss=0.1101, over 3826118.36 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:54:58,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=178981.33333333334, ans=0.125 +2024-08-26 06:54:58,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=178981.33333333334, ans=0.0 +2024-08-26 06:55:08,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=179034.66666666666, ans=0.125 +2024-08-26 06:55:17,284 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.46 vs. limit=10.0 +2024-08-26 06:55:50,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=179194.66666666666, ans=0.125 +2024-08-26 06:55:55,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=179194.66666666666, ans=0.125 +2024-08-26 06:56:27,982 INFO [train.py:1114] (1/4) Epoch 14, batch 1250, loss[loss=0.2572, simple_loss=0.3099, pruned_loss=0.07477, ctc_loss=0.1373, over 19505.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2834, pruned_loss=0.05822, ctc_loss=0.1095, over 3843744.84 frames. ], batch size: 61, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:56:41,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179248.0, ans=0.1 +2024-08-26 06:56:51,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=179301.33333333334, ans=0.125 +2024-08-26 06:57:24,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=179354.66666666666, ans=0.125 +2024-08-26 06:57:58,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=179408.0, ans=0.125 +2024-08-26 06:58:02,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=179408.0, ans=0.2 +2024-08-26 06:58:13,348 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.449e+02 1.864e+02 2.134e+02 2.537e+02 3.723e+02, threshold=4.267e+02, percent-clipped=0.0 +2024-08-26 06:58:22,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=179461.33333333334, ans=0.0 +2024-08-26 06:58:22,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=179461.33333333334, ans=0.125 +2024-08-26 06:58:31,275 INFO [train.py:1114] (1/4) Epoch 14, batch 1300, loss[loss=0.2493, simple_loss=0.3052, pruned_loss=0.07115, ctc_loss=0.1276, over 18980.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2827, pruned_loss=0.05783, ctc_loss=0.109, over 3845894.64 frames. ], batch size: 76, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:58:37,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=179514.66666666666, ans=0.125 +2024-08-26 06:59:07,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=179568.0, ans=0.125 +2024-08-26 06:59:14,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=179621.33333333334, ans=0.125 +2024-08-26 07:00:05,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179674.66666666666, ans=0.1 +2024-08-26 07:00:06,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=179674.66666666666, ans=15.0 +2024-08-26 07:00:35,359 INFO [train.py:1114] (1/4) Epoch 14, batch 1350, loss[loss=0.2309, simple_loss=0.2904, pruned_loss=0.06253, ctc_loss=0.1158, over 19772.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2821, pruned_loss=0.05729, ctc_loss=0.1077, over 3856351.84 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:00:41,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=179781.33333333334, ans=0.0 +2024-08-26 07:00:57,827 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.37 vs. limit=12.0 +2024-08-26 07:02:26,032 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.464e+02 1.690e+02 1.870e+02 2.214e+02 3.706e+02, threshold=3.740e+02, percent-clipped=0.0 +2024-08-26 07:02:47,355 INFO [train.py:1114] (1/4) Epoch 14, batch 1400, loss[loss=0.1892, simple_loss=0.2495, pruned_loss=0.04718, ctc_loss=0.08634, over 19666.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2811, pruned_loss=0.0568, ctc_loss=0.1067, over 3863430.61 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:03:00,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=180048.0, ans=0.025 +2024-08-26 07:03:26,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=180101.33333333334, ans=0.0 +2024-08-26 07:03:28,259 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.17 vs. limit=6.0 +2024-08-26 07:03:43,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=180154.66666666666, ans=0.125 +2024-08-26 07:04:04,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=180208.0, ans=0.125 +2024-08-26 07:04:25,280 INFO [train.py:1114] (1/4) Epoch 14, batch 1450, loss[loss=0.2155, simple_loss=0.2856, pruned_loss=0.05341, ctc_loss=0.09656, over 19663.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2821, pruned_loss=0.05743, ctc_loss=0.1078, over 3861905.87 frames. ], batch size: 63, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:04:47,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=180368.0, ans=0.125 +2024-08-26 07:05:05,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=180368.0, ans=0.1 +2024-08-26 07:05:26,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=180474.66666666666, ans=0.125 +2024-08-26 07:05:36,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=180474.66666666666, ans=0.125 +2024-08-26 07:05:38,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=180474.66666666666, ans=0.125 +2024-08-26 07:05:38,578 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=12.08 vs. limit=15.0 +2024-08-26 07:05:41,165 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.314e+02 1.716e+02 1.963e+02 2.339e+02 6.137e+02, threshold=3.925e+02, percent-clipped=1.0 +2024-08-26 07:05:57,997 INFO [train.py:1114] (1/4) Epoch 14, batch 1500, loss[loss=0.2451, simple_loss=0.3022, pruned_loss=0.06745, ctc_loss=0.1329, over 19586.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2828, pruned_loss=0.05764, ctc_loss=0.1081, over 3861529.31 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:06:12,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=180634.66666666666, ans=0.0 +2024-08-26 07:06:49,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=180688.0, ans=0.125 +2024-08-26 07:07:05,130 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.97 vs. limit=15.0 +2024-08-26 07:07:07,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=180741.33333333334, ans=0.125 +2024-08-26 07:07:17,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=180794.66666666666, ans=0.125 +2024-08-26 07:07:17,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=180794.66666666666, ans=0.0 +2024-08-26 07:07:26,451 INFO [train.py:1114] (1/4) Epoch 14, batch 1550, loss[loss=0.2363, simple_loss=0.3052, pruned_loss=0.06148, ctc_loss=0.1111, over 19621.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2832, pruned_loss=0.05811, ctc_loss=0.1093, over 3846645.55 frames. ], batch size: 60, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:07:51,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=180901.33333333334, ans=0.125 +2024-08-26 07:07:54,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=180954.66666666666, ans=0.125 +2024-08-26 07:08:11,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=181008.0, ans=0.05 +2024-08-26 07:08:20,833 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.275e+02 1.735e+02 1.996e+02 2.323e+02 4.332e+02, threshold=3.992e+02, percent-clipped=2.0 +2024-08-26 07:08:45,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=181114.66666666666, ans=0.0 +2024-08-26 07:08:46,920 INFO [train.py:1114] (1/4) Epoch 14, batch 1600, loss[loss=0.1967, simple_loss=0.2748, pruned_loss=0.04284, ctc_loss=0.08241, over 19843.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2827, pruned_loss=0.05797, ctc_loss=0.1092, over 3835823.69 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:08:50,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=181114.66666666666, ans=0.125 +2024-08-26 07:09:24,974 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.40 vs. limit=15.0 +2024-08-26 07:09:33,206 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.26 vs. limit=22.5 +2024-08-26 07:10:02,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=181274.66666666666, ans=0.5 +2024-08-26 07:10:22,351 INFO [train.py:1114] (1/4) Epoch 14, batch 1650, loss[loss=0.2107, simple_loss=0.2832, pruned_loss=0.05065, ctc_loss=0.09226, over 19621.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2821, pruned_loss=0.05769, ctc_loss=0.1087, over 3832200.11 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:10:32,284 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.67 vs. limit=15.0 +2024-08-26 07:10:36,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=181434.66666666666, ans=0.125 +2024-08-26 07:10:51,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=181488.0, ans=0.0 +2024-08-26 07:10:54,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=181488.0, ans=0.125 +2024-08-26 07:10:56,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=181541.33333333334, ans=0.125 +2024-08-26 07:11:02,991 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.42 vs. limit=22.5 +2024-08-26 07:11:10,764 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.415e+02 1.857e+02 2.243e+02 2.957e+02 5.258e+02, threshold=4.486e+02, percent-clipped=5.0 +2024-08-26 07:11:14,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=181594.66666666666, ans=0.2 +2024-08-26 07:11:19,632 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.47 vs. limit=22.5 +2024-08-26 07:11:28,232 INFO [train.py:1114] (1/4) Epoch 14, batch 1700, loss[loss=0.1928, simple_loss=0.2532, pruned_loss=0.04628, ctc_loss=0.0996, over 19669.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2819, pruned_loss=0.05742, ctc_loss=0.1083, over 3846575.53 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:11:39,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=181701.33333333334, ans=0.2 +2024-08-26 07:11:55,658 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.99 vs. limit=22.5 +2024-08-26 07:12:04,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=181808.0, ans=0.0 +2024-08-26 07:12:15,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=181861.33333333334, ans=0.2 +2024-08-26 07:12:18,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=181861.33333333334, ans=0.025 +2024-08-26 07:12:20,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=181861.33333333334, ans=0.125 +2024-08-26 07:12:22,668 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.58 vs. limit=15.0 +2024-08-26 07:12:24,386 INFO [train.py:1114] (1/4) Epoch 14, batch 1750, loss[loss=0.1948, simple_loss=0.2574, pruned_loss=0.04904, ctc_loss=0.08515, over 19673.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2819, pruned_loss=0.05742, ctc_loss=0.1082, over 3852118.27 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:12:54,616 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.68 vs. limit=10.0 +2024-08-26 07:13:26,543 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=13.90 vs. limit=15.0 +2024-08-26 07:13:28,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=182074.66666666666, ans=0.125 +2024-08-26 07:13:35,899 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.427e+02 1.769e+02 2.123e+02 2.747e+02 4.234e+02, threshold=4.245e+02, percent-clipped=0.0 +2024-08-26 07:13:45,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=182128.0, ans=0.0 +2024-08-26 07:13:51,687 INFO [train.py:1114] (1/4) Epoch 14, batch 1800, loss[loss=0.2195, simple_loss=0.2878, pruned_loss=0.05413, ctc_loss=0.1073, over 19615.00 frames. ], tot_loss[loss=0.22, simple_loss=0.282, pruned_loss=0.05731, ctc_loss=0.1082, over 3853355.10 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:14:14,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=182288.0, ans=0.0 +2024-08-26 07:14:15,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=182288.0, ans=0.125 +2024-08-26 07:14:18,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=182288.0, ans=0.0 +2024-08-26 07:14:22,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=182288.0, ans=22.5 +2024-08-26 07:14:30,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=182341.33333333334, ans=0.025 +2024-08-26 07:14:42,327 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.32 vs. limit=10.0 +2024-08-26 07:14:49,558 INFO [train.py:1114] (1/4) Epoch 14, batch 1850, loss[loss=0.2, simple_loss=0.2773, pruned_loss=0.0443, ctc_loss=0.08498, over 19584.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2817, pruned_loss=0.05716, ctc_loss=0.1079, over 3856493.44 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:14:50,788 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 07:14:52,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=182448.0, ans=0.125 +2024-08-26 07:14:58,868 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 07:15:23,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=182608.0, ans=0.125 +2024-08-26 07:15:32,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=182608.0, ans=0.125 +2024-08-26 07:15:35,869 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.305e+02 1.755e+02 2.000e+02 2.500e+02 5.147e+02, threshold=4.001e+02, percent-clipped=3.0 +2024-08-26 07:15:42,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=182661.33333333334, ans=0.0 +2024-08-26 07:15:52,255 INFO [train.py:1114] (1/4) Epoch 14, batch 1900, loss[loss=0.2068, simple_loss=0.2819, pruned_loss=0.04819, ctc_loss=0.08824, over 19663.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.282, pruned_loss=0.05693, ctc_loss=0.1074, over 3860795.35 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:15:59,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=182714.66666666666, ans=0.2 +2024-08-26 07:16:05,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=182768.0, ans=0.125 +2024-08-26 07:16:16,060 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.82 vs. limit=15.0 +2024-08-26 07:16:22,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=182821.33333333334, ans=0.0 +2024-08-26 07:16:40,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=182874.66666666666, ans=0.125 +2024-08-26 07:16:56,691 INFO [train.py:1114] (1/4) Epoch 14, batch 1950, loss[loss=0.2259, simple_loss=0.2785, pruned_loss=0.06264, ctc_loss=0.1201, over 19584.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2828, pruned_loss=0.05699, ctc_loss=0.1075, over 3869924.75 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:17:15,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=182981.33333333334, ans=0.125 +2024-08-26 07:17:15,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=182981.33333333334, ans=0.125 +2024-08-26 07:17:18,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=183034.66666666666, ans=0.125 +2024-08-26 07:17:18,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=183034.66666666666, ans=0.07 +2024-08-26 07:17:31,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=183088.0, ans=0.0 +2024-08-26 07:17:51,472 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.06 vs. limit=22.5 +2024-08-26 07:17:51,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.39 vs. limit=10.0 +2024-08-26 07:17:55,521 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.375e+02 1.666e+02 1.941e+02 2.281e+02 4.229e+02, threshold=3.882e+02, percent-clipped=1.0 +2024-08-26 07:18:04,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=183194.66666666666, ans=0.125 +2024-08-26 07:18:14,087 INFO [train.py:1114] (1/4) Epoch 14, batch 2000, loss[loss=0.1862, simple_loss=0.2495, pruned_loss=0.04544, ctc_loss=0.08006, over 19667.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2828, pruned_loss=0.05695, ctc_loss=0.1072, over 3854683.60 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 64.0 +2024-08-26 07:18:25,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=183301.33333333334, ans=0.0 +2024-08-26 07:18:55,971 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.89 vs. limit=22.5 +2024-08-26 07:19:02,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=183461.33333333334, ans=0.125 +2024-08-26 07:19:09,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff3.min_abs, batch_count=183461.33333333334, ans=0.2 +2024-08-26 07:19:11,482 INFO [train.py:1114] (1/4) Epoch 14, batch 2050, loss[loss=0.2008, simple_loss=0.2599, pruned_loss=0.05121, ctc_loss=0.09832, over 19696.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2819, pruned_loss=0.05685, ctc_loss=0.1071, over 3850423.23 frames. ], batch size: 47, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:19:11,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=183514.66666666666, ans=0.0 +2024-08-26 07:19:18,398 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=24.28 vs. limit=22.5 +2024-08-26 07:19:27,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=183568.0, ans=0.0 +2024-08-26 07:20:51,590 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.432e+02 1.705e+02 1.994e+02 2.461e+02 3.917e+02, threshold=3.988e+02, percent-clipped=1.0 +2024-08-26 07:22:37,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=183728.0, ans=0.125 +2024-08-26 07:24:48,257 INFO [train.py:1114] (1/4) Epoch 14, batch 2100, loss[loss=0.2056, simple_loss=0.279, pruned_loss=0.04851, ctc_loss=0.08796, over 19783.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2811, pruned_loss=0.0565, ctc_loss=0.1063, over 3857559.04 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:41:47,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=183834.66666666666, ans=0.125 +2024-08-26 08:06:40,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=183994.66666666666, ans=0.04949747468305833 +2024-08-26 08:13:15,353 INFO [train.py:1114] (1/4) Epoch 14, batch 2150, loss[loss=0.2404, simple_loss=0.2951, pruned_loss=0.06717, ctc_loss=0.1283, over 19581.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2812, pruned_loss=0.05695, ctc_loss=0.107, over 3868055.93 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 08:13:16,008 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.97 vs. limit=6.0 +2024-08-26 08:15:51,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=184048.0, ans=0.0 +2024-08-26 08:16:34,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=184048.0, ans=0.2 +2024-08-26 08:22:50,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=184101.33333333334, ans=0.0 +2024-08-26 08:41:44,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184154.66666666666, ans=0.1 +2024-08-26 08:42:48,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184154.66666666666, ans=0.1 +2024-08-26 08:45:53,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=184208.0, ans=0.125 +2024-08-26 08:55:58,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=184208.0, ans=0.125 +2024-08-26 08:59:37,603 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.387e+02 1.765e+02 2.052e+02 2.784e+02 6.261e+02, threshold=4.104e+02, percent-clipped=7.0 +2024-08-26 09:01:23,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=184261.33333333334, ans=0.125 +2024-08-26 09:03:09,759 INFO [train.py:1114] (1/4) Epoch 14, batch 2200, loss[loss=0.2232, simple_loss=0.2847, pruned_loss=0.05904, ctc_loss=0.1089, over 19577.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.281, pruned_loss=0.05691, ctc_loss=0.107, over 3867342.29 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 09:03:10,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=184314.66666666666, ans=0.1 +2024-08-26 09:04:32,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=184314.66666666666, ans=0.125 +2024-08-26 09:05:56,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=184314.66666666666, ans=0.0 +2024-08-26 09:05:56,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=184314.66666666666, ans=0.0 +2024-08-26 09:18:44,587 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.97 vs. limit=22.5 +2024-08-26 09:20:13,652 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 09:20:23,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=184474.66666666666, ans=0.07 +2024-08-26 09:21:10,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=184528.0, ans=0.1 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-25-02-23-27-2 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-25-02-23-27-2 new file mode 100644 index 0000000000000000000000000000000000000000..5aa3cacb7efd787e8e4a76be2c82f81109a8f9e5 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-25-02-23-27-2 @@ -0,0 +1,4958 @@ +2024-08-25 02:23:27,590 INFO [train.py:1182] (2/4) Training started +2024-08-25 02:23:28,586 INFO [train.py:1192] (2/4) Device: cuda:2 +2024-08-25 02:23:28,589 INFO [train.py:1210] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2654.int.cedar.computecanada.ca', 'IP address': '172.16.146.91'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-25 02:23:28,590 INFO [train.py:1212] (2/4) About to create model +2024-08-25 02:23:29,302 INFO [train.py:1216] (2/4) Number of model parameters: 66367431 +2024-08-25 02:23:29,453 INFO [train.py:1231] (2/4) Using DDP +2024-08-25 02:23:51,117 INFO [asr_datamodule.py:894] (2/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-25 02:23:51,496 INFO [asr_datamodule.py:696] (2/4) Disable MUSAN +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:714] (2/4) Enable SpecAugment +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:715] (2/4) Time warp factor: 80 +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:725] (2/4) Num frame mask: 10 +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:738] (2/4) About to create train dataset +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:765] (2/4) Using DynamicBucketingSampler. +2024-08-25 02:23:53,043 INFO [asr_datamodule.py:782] (2/4) About to create train dataloader +2024-08-25 02:23:53,051 INFO [asr_datamodule.py:911] (2/4) About to get dev-clean cuts +2024-08-25 02:23:53,294 INFO [asr_datamodule.py:918] (2/4) About to get dev-other cuts +2024-08-25 02:23:53,346 INFO [asr_datamodule.py:814] (2/4) About to create dev dataset +2024-08-25 02:23:53,648 INFO [asr_datamodule.py:831] (2/4) About to create dev dataloader +2024-08-25 02:23:53,648 INFO [train.py:1435] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-25 02:27:50,712 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12243MB +2024-08-25 02:27:52,182 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12325MB +2024-08-25 02:28:01,912 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12325MB +2024-08-25 02:28:03,371 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12500MB +2024-08-25 02:28:26,154 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12500MB +2024-08-25 02:28:27,771 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12500MB +2024-08-25 02:29:13,747 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.33 vs. limit=7.5 +2024-08-25 02:29:16,117 INFO [train.py:1114] (2/4) Epoch 1, batch 0, loss[loss=8.848, simple_loss=7.222, pruned_loss=6.803, ctc_loss=4.724, over 19418.00 frames. ], tot_loss[loss=8.848, simple_loss=7.222, pruned_loss=6.803, ctc_loss=4.724, over 19418.00 frames. ], batch size: 48, lr: 2.25e-02, grad_scale: 1.0 +2024-08-25 02:29:16,117 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-25 02:29:29,432 INFO [train.py:1146] (2/4) Epoch 1, validation: loss=8.973, simple_loss=7.311, pruned_loss=6.819, ctc_loss=4.895, over 944034.00 frames. +2024-08-25 02:29:29,433 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12500MB +2024-08-25 02:29:30,017 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.48 vs. limit=7.5 +2024-08-25 02:29:31,313 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.68 vs. limit=7.5 +2024-08-25 02:30:09,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=53.333333333333336, ans=0.4975 +2024-08-25 02:30:23,440 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.714e+03 3.750e+03 4.817e+03 5.615e+03 6.551e+03, threshold=1.927e+04, percent-clipped=0.0 +2024-08-25 02:30:23,967 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=6.01 vs. limit=4.021333333333334 +2024-08-25 02:32:12,924 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=505.41 vs. limit=7.58 +2024-08-25 02:32:20,085 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=188.30 vs. limit=7.54 +2024-08-25 02:32:26,058 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.867e+02 1.019e+03 3.714e+03 5.063e+03 6.846e+03, threshold=1.486e+04, percent-clipped=0.0 +2024-08-25 02:32:36,312 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=389.92 vs. limit=5.053333333333334 +2024-08-25 02:33:25,259 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=80.14 vs. limit=7.56 +2024-08-25 02:33:29,576 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=25.19 vs. limit=5.053333333333334 +2024-08-25 02:33:30,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=213.33333333333334, ans=0.49 +2024-08-25 02:33:36,822 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+02 7.649e+02 1.076e+03 3.731e+03 6.846e+03, threshold=4.304e+03, percent-clipped=0.0 +2024-08-25 02:33:37,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=213.33333333333334, ans=0.04933333333333333 +2024-08-25 02:33:43,998 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.75 vs. limit=3.032 +2024-08-25 02:33:49,837 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=101.42 vs. limit=5.1066666666666665 +2024-08-25 02:34:03,164 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=27.19 vs. limit=7.6 +2024-08-25 02:34:04,730 INFO [train.py:1114] (2/4) Epoch 1, batch 50, loss[loss=1.432, simple_loss=1.066, pruned_loss=1.243, ctc_loss=1.133, over 19747.00 frames. ], tot_loss[loss=3.544, simple_loss=2.928, pruned_loss=2.545, ctc_loss=1.773, over 844203.64 frames. ], batch size: 47, lr: 2.48e-02, grad_scale: 0.25 +2024-08-25 02:34:06,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=38.73 vs. limit=7.7 +2024-08-25 02:34:15,465 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=73.95 vs. limit=5.133333333333334 +2024-08-25 02:34:32,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=320.0, ans=0.2048 +2024-08-25 02:34:51,720 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=57.13 vs. limit=7.62 +2024-08-25 02:34:54,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=320.0, ans=0.049 +2024-08-25 02:35:16,632 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=110.62 vs. limit=7.64 +2024-08-25 02:35:16,834 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=106.58 vs. limit=7.64 +2024-08-25 02:35:35,386 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=65.93 vs. limit=7.82 +2024-08-25 02:36:59,059 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.04 vs. limit=7.82 +2024-08-25 02:37:29,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.whiten.whitening_limit, batch_count=480.0, ans=4.192 +2024-08-25 02:37:34,420 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=76.91 vs. limit=7.68 +2024-08-25 02:37:51,136 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=216.82 vs. limit=7.7 +2024-08-25 02:37:51,536 INFO [train.py:1114] (2/4) Epoch 1, batch 100, loss[loss=1.379, simple_loss=0.988, pruned_loss=1.257, ctc_loss=1.165, over 19704.00 frames. ], tot_loss[loss=2.409, simple_loss=1.913, pruned_loss=1.86, ctc_loss=1.469, over 1496570.41 frames. ], batch size: 51, lr: 2.70e-02, grad_scale: 0.5 +2024-08-25 02:37:55,729 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.639e+01 1.517e+02 3.832e+02 1.019e+03 9.054e+03, threshold=7.665e+02, percent-clipped=2.0 +2024-08-25 02:38:22,896 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=36.74 vs. limit=7.94 +2024-08-25 02:38:47,819 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=28.22 vs. limit=7.98 +2024-08-25 02:38:55,263 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=15.56 vs. limit=5.173333333333334 +2024-08-25 02:39:02,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=693.3333333333334, ans=7.76 +2024-08-25 02:39:03,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=693.3333333333334, ans=0.17400000000000002 +2024-08-25 02:39:07,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=746.6666666666666, ans=0.21480000000000005 +2024-08-25 02:39:09,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=746.6666666666666, ans=0.46499999999999997 +2024-08-25 02:39:09,535 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=13.37 vs. limit=5.1866666666666665 +2024-08-25 02:39:14,036 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=218.16 vs. limit=5.373333333333333 +2024-08-25 02:39:16,670 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=53.84 vs. limit=7.78 +2024-08-25 02:39:17,828 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=53.53 vs. limit=8.06 +2024-08-25 02:39:18,155 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=26.65 vs. limit=8.06 +2024-08-25 02:39:21,921 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=129.01 vs. limit=5.4 +2024-08-25 02:39:22,873 INFO [train.py:1114] (2/4) Epoch 1, batch 150, loss[loss=1.165, simple_loss=0.8094, pruned_loss=1.018, ctc_loss=1.076, over 19716.00 frames. ], tot_loss[loss=1.941, simple_loss=1.49, pruned_loss=1.562, ctc_loss=1.343, over 2026976.04 frames. ], batch size: 47, lr: 2.93e-02, grad_scale: 0.5 +2024-08-25 02:39:30,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=800.0, ans=0.095 +2024-08-25 02:39:30,843 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=11.60 vs. limit=4.32 +2024-08-25 02:39:35,029 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=93.55 vs. limit=7.8 +2024-08-25 02:39:38,801 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=32.22 vs. limit=7.82 +2024-08-25 02:39:40,306 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=22.44 vs. limit=5.426666666666667 +2024-08-25 02:39:43,163 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=63.03 vs. limit=7.82 +2024-08-25 02:39:46,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=853.3333333333334, ans=0.09466666666666668 +2024-08-25 02:39:47,327 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=265.35 vs. limit=7.82 +2024-08-25 02:39:48,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=853.3333333333334, ans=0.08080000000000001 +2024-08-25 02:39:51,724 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=115.17 vs. limit=5.0 +2024-08-25 02:39:53,990 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=144.76 vs. limit=7.84 +2024-08-25 02:39:55,606 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.52 vs. limit=8.18 +2024-08-25 02:39:57,057 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=156.11 vs. limit=5.453333333333333 +2024-08-25 02:40:00,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=906.6666666666666, ans=0.5 +2024-08-25 02:40:14,740 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=72.24 vs. limit=7.86 +2024-08-25 02:40:14,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=65.59 vs. limit=7.86 +2024-08-25 02:40:21,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=1013.3333333333334, ans=0.4525 +2024-08-25 02:40:23,514 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=36.22 vs. limit=7.88 +2024-08-25 02:40:26,220 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=35.85 vs. limit=7.88 +2024-08-25 02:40:28,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=1013.3333333333334, ans=0.23986666666666667 +2024-08-25 02:40:28,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=1013.3333333333334, ans=0.4525 +2024-08-25 02:40:30,343 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=19.69 vs. limit=7.88 +2024-08-25 02:40:32,731 INFO [train.py:1114] (2/4) Epoch 1, batch 200, loss[loss=1.252, simple_loss=0.8637, pruned_loss=1.005, ctc_loss=1.203, over 18042.00 frames. ], tot_loss[loss=1.686, simple_loss=1.261, pruned_loss=1.373, ctc_loss=1.277, over 2434889.36 frames. ], batch size: 85, lr: 3.15e-02, grad_scale: 1.0 +2024-08-25 02:40:35,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=1066.6666666666667, ans=0.45 +2024-08-25 02:40:36,956 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.587e+01 1.185e+02 1.545e+02 1.999e+02 4.229e+02, threshold=3.089e+02, percent-clipped=0.0 +2024-08-25 02:40:37,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=1066.6666666666667, ans=0.45 +2024-08-25 02:40:57,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=1066.6666666666667, ans=0.16 +2024-08-25 02:41:06,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1120.0, ans=0.2888 +2024-08-25 02:41:16,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=1120.0, ans=0.158 +2024-08-25 02:41:28,640 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.90 vs. limit=8.38 +2024-08-25 02:41:29,924 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=15.29 vs. limit=4.469333333333333 +2024-08-25 02:41:31,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=95.50 vs. limit=7.94 +2024-08-25 02:41:36,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=1226.6666666666667, ans=0.07239999999999999 +2024-08-25 02:41:38,397 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=11.23 vs. limit=4.490666666666667 +2024-08-25 02:41:39,967 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=27.77 vs. limit=5.613333333333333 +2024-08-25 02:41:55,325 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.93 vs. limit=8.42 +2024-08-25 02:42:08,275 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=25.33 vs. limit=5.64 +2024-08-25 02:42:09,605 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=198.28 vs. limit=7.98 +2024-08-25 02:42:11,847 INFO [train.py:1114] (2/4) Epoch 1, batch 250, loss[loss=1.322, simple_loss=0.8959, pruned_loss=1.047, ctc_loss=1.297, over 19449.00 frames. ], tot_loss[loss=1.535, simple_loss=1.123, pruned_loss=1.249, ctc_loss=1.244, over 2755846.84 frames. ], batch size: 67, lr: 3.38e-02, grad_scale: 1.0 +2024-08-25 02:42:15,214 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=87.81 vs. limit=8.0 +2024-08-25 02:42:21,067 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=18.57 vs. limit=4.533333333333333 +2024-08-25 02:42:22,699 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=122.35 vs. limit=8.0 +2024-08-25 02:42:24,248 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=15.14 vs. limit=5.333333333333333 +2024-08-25 02:42:30,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=1386.6666666666667, ans=0.435 +2024-08-25 02:42:30,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=1386.6666666666667, ans=0.435 +2024-08-25 02:42:34,068 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.07 vs. limit=8.54 +2024-08-25 02:42:35,228 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=10.20 vs. limit=5.346666666666667 +2024-08-25 02:42:44,869 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=19.78 vs. limit=8.04 +2024-08-25 02:42:51,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1440.0, ans=0.28559999999999997 +2024-08-25 02:42:57,473 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=41.74 vs. limit=4.298666666666667 +2024-08-25 02:43:01,004 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.36 vs. limit=3.224 +2024-08-25 02:43:06,796 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.76 vs. limit=8.620000000000001 +2024-08-25 02:43:08,149 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=17.69 vs. limit=8.06 +2024-08-25 02:43:10,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=1546.6666666666667, ans=5.966666666666667 +2024-08-25 02:43:17,592 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=14.72 vs. limit=5.386666666666667 +2024-08-25 02:43:19,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=1546.6666666666667, ans=0.163 +2024-08-25 02:43:21,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1546.6666666666667, ans=0.2845333333333333 +2024-08-25 02:43:23,928 INFO [train.py:1114] (2/4) Epoch 1, batch 300, loss[loss=1.247, simple_loss=0.8344, pruned_loss=0.9714, ctc_loss=1.232, over 19531.00 frames. ], tot_loss[loss=1.435, simple_loss=1.029, pruned_loss=1.159, ctc_loss=1.221, over 2999758.88 frames. ], batch size: 61, lr: 3.60e-02, grad_scale: 2.0 +2024-08-25 02:43:24,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1600.0, ans=0.284 +2024-08-25 02:43:27,971 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.516e+01 1.281e+02 1.784e+02 2.457e+02 1.092e+03, threshold=3.568e+02, percent-clipped=12.0 +2024-08-25 02:43:29,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=1600.0, ans=0.22969848480983485 +2024-08-25 02:43:30,042 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=113.94 vs. limit=8.1 +2024-08-25 02:43:30,363 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=153.50 vs. limit=8.1 +2024-08-25 02:43:34,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=1600.0, ans=0.23399999999999999 +2024-08-25 02:43:35,223 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=97.53 vs. limit=8.1 +2024-08-25 02:43:36,777 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=84.58 vs. limit=8.12 +2024-08-25 02:43:42,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=1653.3333333333333, ans=0.08966666666666667 +2024-08-25 02:43:44,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1653.3333333333333, ans=0.28346666666666664 +2024-08-25 02:43:56,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=1706.6666666666667, ans=0.2829333333333333 +2024-08-25 02:43:57,766 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=57.52 vs. limit=8.14 +2024-08-25 02:44:03,311 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=41.99 vs. limit=8.78 +2024-08-25 02:44:04,818 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.06 vs. limit=5.8533333333333335 +2024-08-25 02:44:10,742 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=77.41 vs. limit=8.16 +2024-08-25 02:44:14,062 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.17 vs. limit=8.82 +2024-08-25 02:44:16,500 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=299.68 vs. limit=5.88 +2024-08-25 02:44:24,767 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=50.24 vs. limit=8.18 +2024-08-25 02:44:31,558 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.52 vs. limit=8.86 +2024-08-25 02:44:35,316 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.51 vs. limit=8.86 +2024-08-25 02:44:36,637 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=18.94 vs. limit=5.453333333333333 +2024-08-25 02:44:38,839 INFO [train.py:1114] (2/4) Epoch 1, batch 350, loss[loss=1.11, simple_loss=0.7368, pruned_loss=0.8626, ctc_loss=1.077, over 19758.00 frames. ], tot_loss[loss=1.368, simple_loss=0.9649, pruned_loss=1.096, ctc_loss=1.207, over 3190055.67 frames. ], batch size: 48, lr: 3.83e-02, grad_scale: 2.0 +2024-08-25 02:44:39,389 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=109.08 vs. limit=8.2 +2024-08-25 02:44:42,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=1866.6666666666667, ans=0.13 +2024-08-25 02:44:44,117 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=46.55 vs. limit=8.9 +2024-08-25 02:44:44,332 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=52.54 vs. limit=8.2 +2024-08-25 02:44:45,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=1866.6666666666667, ans=0.4125 +2024-08-25 02:44:49,971 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.34 vs. limit=8.9 +2024-08-25 02:45:01,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=111.48 vs. limit=8.22 +2024-08-25 02:45:14,317 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.66 vs. limit=5.986666666666666 +2024-08-25 02:45:22,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.41 vs. limit=8.24 +2024-08-25 02:45:23,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=2026.6666666666667, ans=0.405 +2024-08-25 02:45:31,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=2026.6666666666667, ans=0.405 +2024-08-25 02:46:55,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=2080.0, ans=0.23120000000000002 +2024-08-25 02:47:05,531 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=221.91 vs. limit=8.28 +2024-08-25 02:47:09,660 INFO [train.py:1114] (2/4) Epoch 1, batch 400, loss[loss=1.166, simple_loss=0.7666, pruned_loss=0.8862, ctc_loss=1.137, over 19484.00 frames. ], tot_loss[loss=1.319, simple_loss=0.9161, pruned_loss=1.046, ctc_loss=1.193, over 3342861.84 frames. ], batch size: 54, lr: 4.05e-02, grad_scale: 4.0 +2024-08-25 02:47:13,858 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.873e+01 1.501e+02 1.913e+02 2.464e+02 6.763e+02, threshold=3.826e+02, percent-clipped=7.0 +2024-08-25 02:47:17,256 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.18 vs. limit=5.533333333333333 +2024-08-25 02:47:17,339 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=7.27 vs. limit=4.8533333333333335 +2024-08-25 02:47:20,070 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.11 vs. limit=5.533333333333333 +2024-08-25 02:47:22,741 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.31 vs. limit=5.546666666666667 +2024-08-25 02:47:29,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=2186.6666666666665, ans=0.8234666666666667 +2024-08-25 02:47:29,780 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=42.14 vs. limit=8.32 +2024-08-25 02:47:42,143 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=26.35 vs. limit=6.12 +2024-08-25 02:47:50,298 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=38.16 vs. limit=8.34 +2024-08-25 02:47:54,471 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.25 vs. limit=9.22 +2024-08-25 02:48:00,795 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=76.96 vs. limit=8.36 +2024-08-25 02:48:02,481 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.67 vs. limit=9.22 +2024-08-25 02:48:08,220 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=33.17 vs. limit=8.38 +2024-08-25 02:48:10,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=2346.6666666666665, ans=0.39 +2024-08-25 02:48:13,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=2346.6666666666665, ans=0.0472 +2024-08-25 02:48:13,661 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.88 vs. limit=8.38 +2024-08-25 02:48:19,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=2400.0, ans=0.23600000000000002 +2024-08-25 02:48:21,766 INFO [train.py:1114] (2/4) Epoch 1, batch 450, loss[loss=1.192, simple_loss=0.7778, pruned_loss=0.8948, ctc_loss=1.154, over 19621.00 frames. ], tot_loss[loss=1.285, simple_loss=0.8812, pruned_loss=1.008, ctc_loss=1.183, over 3451125.70 frames. ], batch size: 55, lr: 4.28e-02, grad_scale: 4.0 +2024-08-25 02:48:30,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=2400.0, ans=0.11 +2024-08-25 02:48:35,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=2453.3333333333335, ans=0.385 +2024-08-25 02:48:37,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=2453.3333333333335, ans=0.385 +2024-08-25 02:48:46,800 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=26.02 vs. limit=8.42 +2024-08-25 02:48:50,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2506.6666666666665, ans=0.2749333333333333 +2024-08-25 02:49:07,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=2560.0, ans=0.0424 +2024-08-25 02:49:10,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=2560.0, ans=0.38 +2024-08-25 02:49:26,635 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=11.60 vs. limit=8.48 +2024-08-25 02:49:28,670 INFO [train.py:1114] (2/4) Epoch 1, batch 500, loss[loss=1.163, simple_loss=0.7682, pruned_loss=0.8285, ctc_loss=1.106, over 19683.00 frames. ], tot_loss[loss=1.25, simple_loss=0.8493, pruned_loss=0.9662, ctc_loss=1.161, over 3546755.56 frames. ], batch size: 63, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:49:31,898 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=18.49 vs. limit=8.5 +2024-08-25 02:49:32,574 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.093e+02 1.834e+02 2.411e+02 2.968e+02 6.409e+02, threshold=4.822e+02, percent-clipped=7.0 +2024-08-25 02:49:34,864 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.99 vs. limit=5.666666666666667 +2024-08-25 02:49:40,298 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=31.48 vs. limit=8.5 +2024-08-25 02:49:41,490 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=22.87 vs. limit=8.52 +2024-08-25 02:49:43,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2720.0, ans=0.2728 +2024-08-25 02:49:59,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=2773.3333333333335, ans=9.58 +2024-08-25 02:50:02,105 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=20.11 vs. limit=8.54 +2024-08-25 02:50:04,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=2773.3333333333335, ans=0.5 +2024-08-25 02:50:28,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=2880.0, ans=0.082 +2024-08-25 02:50:33,292 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.39 vs. limit=5.72 +2024-08-25 02:50:39,210 INFO [train.py:1114] (2/4) Epoch 1, batch 550, loss[loss=1.086, simple_loss=0.7335, pruned_loss=0.703, ctc_loss=1.046, over 19210.00 frames. ], tot_loss[loss=1.22, simple_loss=0.826, pruned_loss=0.9176, ctc_loss=1.139, over 3608352.33 frames. ], batch size: 71, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:50:50,552 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=2.621e+01 +2024-08-25 02:50:54,645 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.25 vs. limit=8.6 +2024-08-25 02:51:06,011 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=19.69 vs. limit=8.620000000000001 +2024-08-25 02:51:07,617 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.31 vs. limit=5.746666666666666 +2024-08-25 02:51:10,562 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.29 vs. limit=8.620000000000001 +2024-08-25 02:51:11,951 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=18.89 vs. limit=8.64 +2024-08-25 02:51:12,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=3040.0, ans=0.35750000000000004 +2024-08-25 02:51:14,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.00 vs. limit=6.52 +2024-08-25 02:51:14,304 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=19.93 vs. limit=8.64 +2024-08-25 02:51:19,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=3040.0, ans=0.35750000000000004 +2024-08-25 02:51:22,270 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.00 vs. limit=9.78 +2024-08-25 02:51:31,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3093.3333333333335, ans=0.355 +2024-08-25 02:51:31,440 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.15 vs. limit=8.66 +2024-08-25 02:51:36,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=3093.3333333333335, ans=0.355 +2024-08-25 02:51:38,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=3093.3333333333335, ans=0.09899494936611666 +2024-08-25 02:51:55,119 INFO [train.py:1114] (2/4) Epoch 1, batch 600, loss[loss=1.01, simple_loss=0.6983, pruned_loss=0.6066, ctc_loss=0.9588, over 19434.00 frames. ], tot_loss[loss=1.173, simple_loss=0.7964, pruned_loss=0.8504, ctc_loss=1.101, over 3666673.28 frames. ], batch size: 67, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:51:59,175 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 2.677e+02 3.553e+02 4.456e+02 9.241e+02, threshold=7.106e+02, percent-clipped=18.0 +2024-08-25 02:52:10,775 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.97 vs. limit=6.626666666666667 +2024-08-25 02:52:15,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=3253.3333333333335, ans=0.03983333333333333 +2024-08-25 02:52:16,123 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.11 vs. limit=5.8133333333333335 +2024-08-25 02:52:39,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=3360.0, ans=0.7824 +2024-08-25 02:52:59,430 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.61 vs. limit=8.78 +2024-08-25 02:53:00,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=3466.6666666666665, ans=0.7786666666666667 +2024-08-25 02:53:01,038 INFO [train.py:1114] (2/4) Epoch 1, batch 650, loss[loss=0.8771, simple_loss=0.6167, pruned_loss=0.4882, ctc_loss=0.8447, over 19734.00 frames. ], tot_loss[loss=1.112, simple_loss=0.7583, pruned_loss=0.7744, ctc_loss=1.048, over 3716831.64 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 4.0 +2024-08-25 02:53:09,643 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.67 vs. limit=10.1 +2024-08-25 02:53:22,524 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=11.51 vs. limit=10.14 +2024-08-25 02:53:25,037 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.00 vs. limit=8.82 +2024-08-25 02:53:27,680 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.38 vs. limit=10.18 +2024-08-25 02:53:41,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=3626.6666666666665, ans=0.06399999999999997 +2024-08-25 02:53:42,089 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.67 vs. limit=8.86 +2024-08-25 02:53:48,694 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.81 vs. limit=8.86 +2024-08-25 02:53:52,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=3680.0, ans=0.3275 +2024-08-25 02:53:52,916 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.19 vs. limit=8.879999999999999 +2024-08-25 02:54:00,807 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.63 vs. limit=5.4719999999999995 +2024-08-25 02:54:05,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten.whitening_limit, batch_count=3680.0, ans=10.26 +2024-08-25 02:54:09,032 INFO [train.py:1114] (2/4) Epoch 1, batch 700, loss[loss=0.7891, simple_loss=0.5561, pruned_loss=0.4348, ctc_loss=0.7494, over 19719.00 frames. ], tot_loss[loss=1.05, simple_loss=0.7214, pruned_loss=0.7014, ctc_loss=0.9923, over 3748642.13 frames. ], batch size: 51, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:54:14,196 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.736e+02 2.975e+02 3.878e+02 5.385e+02 1.936e+03, threshold=7.756e+02, percent-clipped=10.0 +2024-08-25 02:54:19,083 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.31 vs. limit=8.9 +2024-08-25 02:54:24,192 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.32 vs. limit=5.946666666666666 +2024-08-25 02:54:24,199 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.54 vs. limit=8.92 +2024-08-25 02:54:24,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=3786.6666666666665, ans=0.037000000000000005 +2024-08-25 02:54:45,282 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=8.206e+00 +2024-08-25 02:54:45,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=3840.0, ans=0.32 +2024-08-25 02:54:58,652 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=10.59 vs. limit=10.42 +2024-08-25 02:55:16,797 INFO [train.py:1114] (2/4) Epoch 1, batch 750, loss[loss=0.7646, simple_loss=0.556, pruned_loss=0.3858, ctc_loss=0.7202, over 19497.00 frames. ], tot_loss[loss=0.9866, simple_loss=0.6843, pruned_loss=0.6324, ctc_loss=0.932, over 3774549.71 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:55:24,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=4000.0, ans=0.04999999999999999 +2024-08-25 02:55:26,354 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.49 vs. limit=5.6 +2024-08-25 02:55:26,684 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.32 vs. limit=7.0 +2024-08-25 02:55:29,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn1.whiten.whitening_limit, batch_count=4000.0, ans=10.5 +2024-08-25 02:55:30,044 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.06 vs. limit=9.0 +2024-08-25 02:55:39,352 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.33 vs. limit=6.013333333333334 +2024-08-25 02:55:44,240 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.81 vs. limit=9.040000000000001 +2024-08-25 02:55:54,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=4106.666666666667, ans=0.049555555555555554 +2024-08-25 02:56:01,323 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.05 vs. limit=6.04 +2024-08-25 02:56:10,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=4213.333333333333, ans=0.7525333333333334 +2024-08-25 02:56:10,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.67 vs. limit=7.1066666666666665 +2024-08-25 02:56:24,854 INFO [train.py:1114] (2/4) Epoch 1, batch 800, loss[loss=0.6859, simple_loss=0.5084, pruned_loss=0.3403, ctc_loss=0.6129, over 19408.00 frames. ], tot_loss[loss=0.9291, simple_loss=0.6517, pruned_loss=0.5714, ctc_loss=0.8731, over 3794397.83 frames. ], batch size: 48, lr: 4.49e-02, grad_scale: 16.0 +2024-08-25 02:56:26,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=4266.666666666667, ans=0.3 +2024-08-25 02:56:29,877 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.884e+02 2.945e+02 3.956e+02 5.210e+02 9.107e+02, threshold=7.913e+02, percent-clipped=4.0 +2024-08-25 02:56:35,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=4266.666666666667, ans=0.3 +2024-08-25 02:56:36,740 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.35 vs. limit=10.7 +2024-08-25 02:57:01,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=4373.333333333333, ans=0.025 +2024-08-25 02:57:04,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=4426.666666666667, ans=0.04822222222222222 +2024-08-25 02:57:09,300 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=10.80 vs. limit=10.82 +2024-08-25 02:57:15,856 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.32 vs. limit=6.1066666666666665 +2024-08-25 02:57:16,981 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.30 vs. limit=6.12 +2024-08-25 02:57:17,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=4480.0, ans=0.035 +2024-08-25 02:57:23,407 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.37 vs. limit=9.18 +2024-08-25 02:57:30,599 INFO [train.py:1114] (2/4) Epoch 1, batch 850, loss[loss=0.7228, simple_loss=0.5409, pruned_loss=0.3425, ctc_loss=0.6577, over 19636.00 frames. ], tot_loss[loss=0.8749, simple_loss=0.6212, pruned_loss=0.517, ctc_loss=0.8154, over 3813762.31 frames. ], batch size: 59, lr: 4.49e-02, grad_scale: 16.0 +2024-08-25 02:57:31,416 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.96 vs. limit=5.8133333333333335 +2024-08-25 02:57:49,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=4586.666666666667, ans=0.28500000000000003 +2024-08-25 02:57:58,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=4640.0, ans=0.009860869565217392 +2024-08-25 02:58:11,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=4693.333333333333, ans=0.28 +2024-08-25 02:58:14,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=4693.333333333333, ans=0.025 +2024-08-25 02:58:18,593 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=6.30 vs. limit=5.8773333333333335 +2024-08-25 02:58:42,851 INFO [train.py:1114] (2/4) Epoch 1, batch 900, loss[loss=0.5936, simple_loss=0.4543, pruned_loss=0.2744, ctc_loss=0.5146, over 19400.00 frames. ], tot_loss[loss=0.8294, simple_loss=0.596, pruned_loss=0.4716, ctc_loss=0.7657, over 3818394.56 frames. ], batch size: 48, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 02:58:48,899 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.859e+02 2.783e+02 3.682e+02 4.971e+02 1.764e+03, threshold=7.364e+02, percent-clipped=6.0 +2024-08-25 02:58:53,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4800.0, ans=0.252 +2024-08-25 02:59:14,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=4906.666666666667, ans=0.04622222222222222 +2024-08-25 02:59:28,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=4960.0, ans=0.0345 +2024-08-25 02:59:38,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=5013.333333333333, ans=0.265 +2024-08-25 02:59:41,808 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.33 vs. limit=11.26 +2024-08-25 02:59:44,767 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.78 vs. limit=11.26 +2024-08-25 02:59:50,574 INFO [train.py:1114] (2/4) Epoch 1, batch 950, loss[loss=0.6082, simple_loss=0.4677, pruned_loss=0.2764, ctc_loss=0.5285, over 19500.00 frames. ], tot_loss[loss=0.7897, simple_loss=0.5745, pruned_loss=0.4334, ctc_loss=0.7208, over 3819361.22 frames. ], batch size: 49, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 03:00:06,525 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=10.98 vs. limit=11.34 +2024-08-25 03:00:08,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=5120.0, ans=0.009756521739130435 +2024-08-25 03:00:12,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=5120.0, ans=0.035 +2024-08-25 03:00:15,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=5173.333333333333, ans=0.045111111111111116 +2024-08-25 03:00:25,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=5173.333333333333, ans=0.009744927536231884 +2024-08-25 03:00:38,493 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.33 vs. limit=6.306666666666667 +2024-08-25 03:00:40,887 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.85 vs. limit=6.112 +2024-08-25 03:00:54,648 INFO [train.py:1114] (2/4) Epoch 1, batch 1000, loss[loss=0.5264, simple_loss=0.4264, pruned_loss=0.2175, ctc_loss=0.4347, over 19850.00 frames. ], tot_loss[loss=0.7523, simple_loss=0.5544, pruned_loss=0.3989, ctc_loss=0.6779, over 3815620.21 frames. ], batch size: 52, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 03:00:55,540 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.34 vs. limit=6.333333333333333 +2024-08-25 03:00:55,662 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.28 vs. limit=6.133333333333333 +2024-08-25 03:00:56,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=5333.333333333333, ans=0.025 +2024-08-25 03:01:00,962 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.21 vs. limit=3.8 +2024-08-25 03:01:01,317 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.763e+02 2.847e+02 3.463e+02 4.611e+02 9.717e+02, threshold=6.926e+02, percent-clipped=4.0 +2024-08-25 03:01:05,959 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.05 vs. limit=11.5 +2024-08-25 03:01:15,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=5386.666666666667, ans=0.0 +2024-08-25 03:01:37,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=5493.333333333333, ans=0.2425 +2024-08-25 03:01:47,197 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.44 vs. limit=11.620000000000001 +2024-08-25 03:02:07,676 INFO [train.py:1114] (2/4) Epoch 1, batch 1050, loss[loss=0.6015, simple_loss=0.4784, pruned_loss=0.258, ctc_loss=0.5053, over 19854.00 frames. ], tot_loss[loss=0.7169, simple_loss=0.5351, pruned_loss=0.3678, ctc_loss=0.638, over 3822603.09 frames. ], batch size: 57, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 03:02:18,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=5653.333333333333, ans=0.235 +2024-08-25 03:02:21,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=5653.333333333333, ans=0.235 +2024-08-25 03:02:38,446 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.71 vs. limit=9.64 +2024-08-25 03:02:51,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=5760.0, ans=0.22999999999999998 +2024-08-25 03:02:52,086 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.09 vs. limit=9.66 +2024-08-25 03:03:01,103 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.33 vs. limit=7.906666666666666 +2024-08-25 03:03:08,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=5813.333333333333, ans=0.04244444444444445 +2024-08-25 03:03:13,779 INFO [train.py:1114] (2/4) Epoch 1, batch 1100, loss[loss=0.5414, simple_loss=0.4379, pruned_loss=0.2296, ctc_loss=0.4363, over 19580.00 frames. ], tot_loss[loss=0.6851, simple_loss=0.5181, pruned_loss=0.3408, ctc_loss=0.6011, over 3829879.19 frames. ], batch size: 52, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 03:03:15,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=5866.666666666667, ans=0.22499999999999998 +2024-08-25 03:03:20,126 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.922e+02 2.626e+02 3.754e+02 4.559e+02 6.965e+02, threshold=7.509e+02, percent-clipped=1.0 +2024-08-25 03:03:26,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=5920.0, ans=0.2408 +2024-08-25 03:03:36,113 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.10 vs. limit=9.72 +2024-08-25 03:03:40,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=5973.333333333333, ans=0.24026666666666666 +2024-08-25 03:03:56,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=6026.666666666667, ans=0.6890666666666667 +2024-08-25 03:03:57,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=6026.666666666667, ans=0.23973333333333333 +2024-08-25 03:03:57,582 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=1.237e-01 +2024-08-25 03:03:57,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=6026.666666666667, ans=0.21750000000000003 +2024-08-25 03:04:18,513 INFO [train.py:1114] (2/4) Epoch 1, batch 1150, loss[loss=0.5575, simple_loss=0.4488, pruned_loss=0.2402, ctc_loss=0.4469, over 19591.00 frames. ], tot_loss[loss=0.6612, simple_loss=0.5057, pruned_loss=0.3204, ctc_loss=0.5726, over 3828919.76 frames. ], batch size: 52, lr: 4.47e-02, grad_scale: 8.0 +2024-08-25 03:04:42,409 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.58 vs. limit=8.093333333333334 +2024-08-25 03:04:44,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=6240.0, ans=0.20750000000000002 +2024-08-25 03:04:55,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=6240.0, ans=0.20750000000000002 +2024-08-25 03:04:55,562 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.88 vs. limit=3.936 +2024-08-25 03:05:01,257 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.57 vs. limit=8.146666666666667 +2024-08-25 03:05:19,575 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=3.128e-02 +2024-08-25 03:05:24,510 INFO [train.py:1114] (2/4) Epoch 1, batch 1200, loss[loss=0.5246, simple_loss=0.4423, pruned_loss=0.205, ctc_loss=0.4192, over 19843.00 frames. ], tot_loss[loss=0.6407, simple_loss=0.4955, pruned_loss=0.3029, ctc_loss=0.5477, over 3823874.27 frames. ], batch size: 57, lr: 4.47e-02, grad_scale: 16.0 +2024-08-25 03:05:30,705 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.839e+02 2.702e+02 3.344e+02 4.028e+02 1.038e+03, threshold=6.687e+02, percent-clipped=4.0 +2024-08-25 03:05:32,175 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:05:44,807 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.68 vs. limit=12.34 +2024-08-25 03:05:47,186 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=27.57 vs. limit=12.34 +2024-08-25 03:06:08,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=6560.0, ans=0.6704 +2024-08-25 03:06:12,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=6560.0, ans=0.03933333333333334 +2024-08-25 03:06:17,825 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.70 vs. limit=9.98 +2024-08-25 03:06:33,173 INFO [train.py:1114] (2/4) Epoch 1, batch 1250, loss[loss=0.5623, simple_loss=0.4612, pruned_loss=0.2331, ctc_loss=0.4539, over 19546.00 frames. ], tot_loss[loss=0.6181, simple_loss=0.4844, pruned_loss=0.2848, ctc_loss=0.5213, over 3841753.42 frames. ], batch size: 61, lr: 4.47e-02, grad_scale: 8.0 +2024-08-25 03:06:37,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=6666.666666666667, ans=0.6666666666666667 +2024-08-25 03:06:46,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=6720.0, ans=0.185 +2024-08-25 03:06:49,393 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.99 vs. limit=12.54 +2024-08-25 03:07:02,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=6773.333333333333, ans=0.1825 +2024-08-25 03:07:04,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=6773.333333333333, ans=0.07 +2024-08-25 03:07:06,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=6773.333333333333, ans=0.009397101449275363 +2024-08-25 03:07:08,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=6773.333333333333, ans=0.03844444444444445 +2024-08-25 03:07:29,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=6880.0, ans=9.3 +2024-08-25 03:07:53,439 INFO [train.py:1114] (2/4) Epoch 1, batch 1300, loss[loss=0.5511, simple_loss=0.4571, pruned_loss=0.2284, ctc_loss=0.4305, over 18845.00 frames. ], tot_loss[loss=0.5983, simple_loss=0.4742, pruned_loss=0.2699, ctc_loss=0.4984, over 3845178.19 frames. ], batch size: 76, lr: 4.47e-02, grad_scale: 8.0 +2024-08-25 03:07:54,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=6933.333333333333, ans=0.025 +2024-08-25 03:07:58,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=6933.333333333333, ans=0.07 +2024-08-25 03:08:00,990 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.769e+02 2.595e+02 3.171e+02 4.007e+02 5.829e+02, threshold=6.342e+02, percent-clipped=0.0 +2024-08-25 03:08:26,030 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.52 vs. limit=8.52 +2024-08-25 03:08:27,249 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.00 vs. limit=8.52 +2024-08-25 03:08:44,446 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.05 vs. limit=10.18 +2024-08-25 03:09:00,194 INFO [train.py:1114] (2/4) Epoch 1, batch 1350, loss[loss=0.5197, simple_loss=0.4369, pruned_loss=0.2118, ctc_loss=0.4021, over 19763.00 frames. ], tot_loss[loss=0.582, simple_loss=0.4663, pruned_loss=0.2576, ctc_loss=0.479, over 3856291.44 frames. ], batch size: 54, lr: 4.46e-02, grad_scale: 8.0 +2024-08-25 03:10:09,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=7253.333333333333, ans=0.17746666666666666 +2024-08-25 03:10:14,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=7253.333333333333, ans=0.22746666666666665 +2024-08-25 03:10:22,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=7306.666666666667, ans=0.15749999999999997 +2024-08-25 03:12:10,365 INFO [train.py:1114] (2/4) Epoch 1, batch 1400, loss[loss=0.4857, simple_loss=0.4067, pruned_loss=0.2009, ctc_loss=0.3733, over 19658.00 frames. ], tot_loss[loss=0.5666, simple_loss=0.4585, pruned_loss=0.2465, ctc_loss=0.4617, over 3863215.76 frames. ], batch size: 46, lr: 4.46e-02, grad_scale: 8.0 +2024-08-25 03:12:32,368 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.817e+02 2.490e+02 2.974e+02 4.034e+02 6.918e+02, threshold=5.948e+02, percent-clipped=1.0 +2024-08-25 03:13:07,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=7626.666666666667, ans=0.14250000000000002 +2024-08-25 03:13:09,549 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.79 vs. limit=13.219999999999999 +2024-08-25 03:13:28,420 INFO [train.py:1114] (2/4) Epoch 1, batch 1450, loss[loss=0.5191, simple_loss=0.4509, pruned_loss=0.2067, ctc_loss=0.3806, over 19655.00 frames. ], tot_loss[loss=0.5548, simple_loss=0.4532, pruned_loss=0.238, ctc_loss=0.4476, over 3861439.54 frames. ], batch size: 63, lr: 4.46e-02, grad_scale: 8.0 +2024-08-25 03:13:33,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=7733.333333333333, ans=0.034444444444444444 +2024-08-25 03:13:47,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7786.666666666667, ans=0.22213333333333332 +2024-08-25 03:13:56,443 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.66 vs. limit=4.176 +2024-08-25 03:14:03,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=7840.0, ans=0.1325 +2024-08-25 03:14:07,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=7893.333333333333, ans=0.03377777777777778 +2024-08-25 03:14:07,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=7893.333333333333, ans=0.13 +2024-08-25 03:14:15,413 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.09 vs. limit=4.184 +2024-08-25 03:14:17,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=7946.666666666667, ans=0.22053333333333333 +2024-08-25 03:14:20,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=7946.666666666667, ans=0.1275 +2024-08-25 03:14:22,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=7946.666666666667, ans=0.22053333333333333 +2024-08-25 03:14:29,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=8000.0, ans=0.03333333333333334 +2024-08-25 03:14:30,727 INFO [train.py:1114] (2/4) Epoch 1, batch 1500, loss[loss=0.5215, simple_loss=0.4483, pruned_loss=0.2095, ctc_loss=0.3949, over 19592.00 frames. ], tot_loss[loss=0.543, simple_loss=0.4479, pruned_loss=0.2297, ctc_loss=0.434, over 3861829.57 frames. ], batch size: 57, lr: 4.46e-02, grad_scale: 8.0 +2024-08-25 03:14:35,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=8000.0, ans=0.125 +2024-08-25 03:14:38,510 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.864e+02 2.576e+02 3.382e+02 4.091e+02 7.597e+02, threshold=6.763e+02, percent-clipped=6.0 +2024-08-25 03:14:42,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=8053.333333333333, ans=0.125 +2024-08-25 03:14:48,096 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.02 vs. limit=10.52 +2024-08-25 03:15:16,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=8160.0, ans=0.125 +2024-08-25 03:15:19,138 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.81 vs. limit=10.56 +2024-08-25 03:15:19,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=8160.0, ans=0.07 +2024-08-25 03:15:33,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=8213.333333333334, ans=0.21786666666666665 +2024-08-25 03:15:40,033 INFO [train.py:1114] (2/4) Epoch 1, batch 1550, loss[loss=0.4951, simple_loss=0.429, pruned_loss=0.1946, ctc_loss=0.3858, over 19606.00 frames. ], tot_loss[loss=0.5316, simple_loss=0.4424, pruned_loss=0.2224, ctc_loss=0.4213, over 3847991.69 frames. ], batch size: 60, lr: 4.45e-02, grad_scale: 8.0 +2024-08-25 03:15:41,868 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.77 vs. limit=9.133333333333333 +2024-08-25 03:15:43,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=8266.666666666666, ans=0.6106666666666667 +2024-08-25 03:15:43,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=8266.666666666666, ans=0.07 +2024-08-25 03:15:51,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8320.0, ans=0.2168 +2024-08-25 03:16:02,925 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.37 vs. limit=9.16 +2024-08-25 03:16:06,786 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.78 vs. limit=7.093333333333334 +2024-08-25 03:16:28,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8426.666666666666, ans=0.21573333333333333 +2024-08-25 03:16:29,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=8426.666666666666, ans=0.125 +2024-08-25 03:16:41,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=8480.0, ans=0.025 +2024-08-25 03:16:49,347 INFO [train.py:1114] (2/4) Epoch 1, batch 1600, loss[loss=0.4965, simple_loss=0.4421, pruned_loss=0.1915, ctc_loss=0.3713, over 19855.00 frames. ], tot_loss[loss=0.5225, simple_loss=0.4383, pruned_loss=0.2166, ctc_loss=0.4106, over 3835807.66 frames. ], batch size: 57, lr: 4.45e-02, grad_scale: 16.0 +2024-08-25 03:16:53,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=8533.333333333334, ans=0.125 +2024-08-25 03:16:59,543 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.906e+02 2.604e+02 3.125e+02 4.170e+02 2.617e+03, threshold=6.251e+02, percent-clipped=7.0 +2024-08-25 03:17:01,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=8533.333333333334, ans=0.125 +2024-08-25 03:17:01,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=8533.333333333334, ans=0.03111111111111111 +2024-08-25 03:17:01,629 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.66 vs. limit=7.133333333333334 +2024-08-25 03:17:02,860 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.19 vs. limit=7.133333333333334 +2024-08-25 03:17:04,103 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.95 vs. limit=13.94 +2024-08-25 03:17:10,132 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.49 vs. limit=13.94 +2024-08-25 03:17:24,109 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.87 vs. limit=7.4559999999999995 +2024-08-25 03:17:30,703 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.66 vs. limit=10.76 +2024-08-25 03:17:32,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=8693.333333333334, ans=0.125 +2024-08-25 03:19:08,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=8800.0, ans=0.125 +2024-08-25 03:19:09,317 INFO [train.py:1114] (2/4) Epoch 1, batch 1650, loss[loss=0.5024, simple_loss=0.4362, pruned_loss=0.2012, ctc_loss=0.3833, over 19650.00 frames. ], tot_loss[loss=0.5136, simple_loss=0.4344, pruned_loss=0.211, ctc_loss=0.4012, over 3832121.91 frames. ], batch size: 59, lr: 4.45e-02, grad_scale: 16.0 +2024-08-25 03:19:26,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=8853.333333333334, ans=0.025 +2024-08-25 03:19:47,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=8960.0, ans=0.008921739130434782 +2024-08-25 03:19:56,266 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.50 vs. limit=4.344 +2024-08-25 03:19:59,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=9013.333333333334, ans=0.20986666666666665 +2024-08-25 03:20:12,475 INFO [train.py:1114] (2/4) Epoch 1, batch 1700, loss[loss=0.4087, simple_loss=0.3647, pruned_loss=0.1587, ctc_loss=0.3085, over 19698.00 frames. ], tot_loss[loss=0.5038, simple_loss=0.4301, pruned_loss=0.2051, ctc_loss=0.391, over 3846604.51 frames. ], batch size: 46, lr: 4.44e-02, grad_scale: 16.0 +2024-08-25 03:20:19,823 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.828e+02 2.395e+02 2.888e+02 3.702e+02 8.491e+02, threshold=5.776e+02, percent-clipped=2.0 +2024-08-25 03:22:15,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=9226.666666666666, ans=0.125 +2024-08-25 03:22:16,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=9226.666666666666, ans=0.20773333333333333 +2024-08-25 03:22:31,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=9280.0, ans=0.125 +2024-08-25 03:22:33,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=9333.333333333334, ans=0.15666666666666665 +2024-08-25 03:22:33,848 INFO [train.py:1114] (2/4) Epoch 1, batch 1750, loss[loss=0.4041, simple_loss=0.3639, pruned_loss=0.157, ctc_loss=0.3003, over 19669.00 frames. ], tot_loss[loss=0.4944, simple_loss=0.426, pruned_loss=0.1996, ctc_loss=0.3811, over 3851010.48 frames. ], batch size: 45, lr: 4.44e-02, grad_scale: 16.0 +2024-08-25 03:22:37,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=9333.333333333334, ans=0.5733333333333334 +2024-08-25 03:22:46,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=9386.666666666666, ans=0.025 +2024-08-25 03:23:10,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=9493.333333333334, ans=0.5677333333333334 +2024-08-25 03:23:12,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=9493.333333333334, ans=0.008805797101449275 +2024-08-25 03:23:31,432 INFO [train.py:1114] (2/4) Epoch 1, batch 1800, loss[loss=0.4915, simple_loss=0.4403, pruned_loss=0.1923, ctc_loss=0.3721, over 19630.00 frames. ], tot_loss[loss=0.4892, simple_loss=0.4244, pruned_loss=0.1965, ctc_loss=0.3753, over 3851386.34 frames. ], batch size: 55, lr: 4.44e-02, grad_scale: 8.0 +2024-08-25 03:23:39,410 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.960e+02 2.646e+02 3.473e+02 4.220e+02 8.344e+02, threshold=6.945e+02, percent-clipped=3.0 +2024-08-25 03:23:43,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=9653.333333333334, ans=0.125 +2024-08-25 03:23:57,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=9706.666666666666, ans=0.04949747468305833 +2024-08-25 03:24:06,593 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.02 vs. limit=14.780000000000001 +2024-08-25 03:24:08,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=9760.0, ans=0.125 +2024-08-25 03:24:15,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=9760.0, ans=0.025 +2024-08-25 03:24:16,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=9760.0, ans=0.5584 +2024-08-25 03:24:28,023 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.07 vs. limit=4.4719999999999995 +2024-08-25 03:24:28,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=9813.333333333334, ans=0.20186666666666667 +2024-08-25 03:24:35,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=9866.666666666666, ans=0.0 +2024-08-25 03:24:35,828 INFO [train.py:1114] (2/4) Epoch 1, batch 1850, loss[loss=0.4668, simple_loss=0.4188, pruned_loss=0.1839, ctc_loss=0.3508, over 19577.00 frames. ], tot_loss[loss=0.4796, simple_loss=0.42, pruned_loss=0.1913, ctc_loss=0.3663, over 3855035.66 frames. ], batch size: 57, lr: 4.43e-02, grad_scale: 8.0 +2024-08-25 03:24:56,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=9920.0, ans=0.125 +2024-08-25 03:25:25,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10026.666666666666, ans=0.19973333333333332 +2024-08-25 03:25:32,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10080.0, ans=0.1992 +2024-08-25 03:25:40,382 INFO [train.py:1114] (2/4) Epoch 1, batch 1900, loss[loss=0.4433, simple_loss=0.4162, pruned_loss=0.1667, ctc_loss=0.328, over 19635.00 frames. ], tot_loss[loss=0.4743, simple_loss=0.4185, pruned_loss=0.1883, ctc_loss=0.3607, over 3860409.62 frames. ], batch size: 59, lr: 4.43e-02, grad_scale: 8.0 +2024-08-25 03:25:48,460 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.873e+02 2.554e+02 2.990e+02 4.033e+02 8.041e+02, threshold=5.979e+02, percent-clipped=3.0 +2024-08-25 03:25:58,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.whiten.whitening_limit, batch_count=10186.666666666666, ans=8.074666666666666 +2024-08-25 03:26:03,572 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:26:03,751 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.50 vs. limit=10.120000000000001 +2024-08-25 03:26:08,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=10240.0, ans=0.125 +2024-08-25 03:26:16,812 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=6.27 vs. limit=8.117333333333335 +2024-08-25 03:26:18,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=10293.333333333334, ans=0.125 +2024-08-25 03:26:27,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=10346.666666666666, ans=0.02355555555555556 +2024-08-25 03:26:38,015 INFO [train.py:1114] (2/4) Epoch 1, batch 1950, loss[loss=0.4276, simple_loss=0.3897, pruned_loss=0.167, ctc_loss=0.3235, over 19602.00 frames. ], tot_loss[loss=0.4684, simple_loss=0.4172, pruned_loss=0.1849, ctc_loss=0.355, over 3870016.29 frames. ], batch size: 52, lr: 4.43e-02, grad_scale: 8.0 +2024-08-25 03:26:55,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=10453.333333333334, ans=0.125 +2024-08-25 03:27:05,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=10506.666666666666, ans=0.022888888888888893 +2024-08-25 03:27:09,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=10506.666666666666, ans=0.125 +2024-08-25 03:27:13,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=10560.0, ans=0.125 +2024-08-25 03:27:20,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=10560.0, ans=0.125 +2024-08-25 03:27:30,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=10613.333333333334, ans=0.125 +2024-08-25 03:27:36,541 INFO [train.py:1114] (2/4) Epoch 1, batch 2000, loss[loss=0.3649, simple_loss=0.3508, pruned_loss=0.1366, ctc_loss=0.2646, over 19660.00 frames. ], tot_loss[loss=0.4647, simple_loss=0.416, pruned_loss=0.183, ctc_loss=0.3519, over 3853565.15 frames. ], batch size: 45, lr: 4.42e-02, grad_scale: 16.0 +2024-08-25 03:27:44,898 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.863e+02 2.508e+02 3.011e+02 3.695e+02 6.472e+02, threshold=6.022e+02, percent-clipped=1.0 +2024-08-25 03:27:49,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=10720.0, ans=0.5248 +2024-08-25 03:27:55,352 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.64 vs. limit=11.52 +2024-08-25 03:28:28,928 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.47 vs. limit=15.66 +2024-08-25 03:28:42,015 INFO [train.py:1114] (2/4) Epoch 1, batch 2050, loss[loss=0.4267, simple_loss=0.3921, pruned_loss=0.1671, ctc_loss=0.3179, over 19688.00 frames. ], tot_loss[loss=0.459, simple_loss=0.4133, pruned_loss=0.1806, ctc_loss=0.3464, over 3851076.60 frames. ], batch size: 47, lr: 4.42e-02, grad_scale: 16.0 +2024-08-25 03:29:06,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=10986.666666666666, ans=0.125 +2024-08-25 03:29:06,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=10986.666666666666, ans=0.025 +2024-08-25 03:29:14,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=10986.666666666666, ans=0.19013333333333332 +2024-08-25 03:29:20,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=10986.666666666666, ans=0.020888888888888894 +2024-08-25 03:30:30,609 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.44 vs. limit=4.664 +2024-08-25 03:30:57,667 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.48 vs. limit=11.68 +2024-08-25 03:31:02,654 INFO [train.py:1114] (2/4) Epoch 1, batch 2100, loss[loss=0.4241, simple_loss=0.4047, pruned_loss=0.1583, ctc_loss=0.3171, over 19755.00 frames. ], tot_loss[loss=0.4516, simple_loss=0.4101, pruned_loss=0.1767, ctc_loss=0.3397, over 3858190.80 frames. ], batch size: 54, lr: 4.42e-02, grad_scale: 16.0 +2024-08-25 03:31:02,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=11200.0, ans=0.020000000000000004 +2024-08-25 03:31:19,367 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.953e+02 2.443e+02 2.901e+02 4.101e+02 7.108e+02, threshold=5.802e+02, percent-clipped=5.0 +2024-08-25 03:31:25,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=11253.333333333334, ans=0.125 +2024-08-25 03:31:52,604 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:31:55,236 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.77 vs. limit=10.653333333333332 +2024-08-25 03:32:04,113 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.44 vs. limit=4.704 +2024-08-25 03:32:05,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=11360.0, ans=0.019333333333333338 +2024-08-25 03:32:14,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=11413.333333333334, ans=0.019111111111111106 +2024-08-25 03:32:32,790 INFO [train.py:1114] (2/4) Epoch 1, batch 2150, loss[loss=0.3661, simple_loss=0.3669, pruned_loss=0.1306, ctc_loss=0.2602, over 19853.00 frames. ], tot_loss[loss=0.4461, simple_loss=0.4078, pruned_loss=0.1738, ctc_loss=0.3344, over 3868217.85 frames. ], batch size: 52, lr: 4.41e-02, grad_scale: 8.0 +2024-08-25 03:32:44,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=11466.666666666666, ans=0.125 +2024-08-25 03:32:46,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=11466.666666666666, ans=0.008376811594202898 +2024-08-25 03:32:53,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=11520.0, ans=0.125 +2024-08-25 03:32:57,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=11520.0, ans=0.1848 +2024-08-25 03:33:23,984 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.25 vs. limit=16.18 +2024-08-25 03:33:27,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=11626.666666666666, ans=10.0 +2024-08-25 03:33:30,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=11626.666666666666, ans=0.125 +2024-08-25 03:33:49,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=11680.0, ans=0.025 +2024-08-25 03:33:49,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=11680.0, ans=0.125 +2024-08-25 03:33:52,950 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.60 vs. limit=11.879999999999999 +2024-08-25 03:33:57,385 INFO [train.py:1114] (2/4) Epoch 1, batch 2200, loss[loss=0.4278, simple_loss=0.4073, pruned_loss=0.1632, ctc_loss=0.3047, over 19608.00 frames. ], tot_loss[loss=0.4413, simple_loss=0.4058, pruned_loss=0.1713, ctc_loss=0.3297, over 3866449.55 frames. ], batch size: 57, lr: 4.41e-02, grad_scale: 8.0 +2024-08-25 03:33:57,926 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.41 vs. limit=16.3 +2024-08-25 03:34:07,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=11733.333333333334, ans=0.008318840579710145 +2024-08-25 03:34:08,401 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.911e+02 2.628e+02 3.380e+02 4.438e+02 7.655e+02, threshold=6.760e+02, percent-clipped=12.0 +2024-08-25 03:34:11,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=11786.666666666666, ans=0.125 +2024-08-25 03:34:24,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.32 vs. limit=11.940000000000001 +2024-08-25 03:35:03,292 INFO [train.py:1114] (2/4) Epoch 1, batch 2250, loss[loss=0.389, simple_loss=0.3903, pruned_loss=0.1389, ctc_loss=0.2747, over 19622.00 frames. ], tot_loss[loss=0.4376, simple_loss=0.4045, pruned_loss=0.1693, ctc_loss=0.3256, over 3867289.53 frames. ], batch size: 55, lr: 4.40e-02, grad_scale: 8.0 +2024-08-25 03:35:11,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=12000.0, ans=0.0 +2024-08-25 03:35:13,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12000.0, ans=0.18 +2024-08-25 03:35:23,114 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.44 vs. limit=16.54 +2024-08-25 03:35:46,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=12160.0, ans=0.3824 +2024-08-25 03:35:54,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=12213.333333333334, ans=0.0 +2024-08-25 03:36:00,468 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.68 vs. limit=11.106666666666667 +2024-08-25 03:36:03,074 INFO [train.py:1114] (2/4) Epoch 1, batch 2300, loss[loss=0.3813, simple_loss=0.3759, pruned_loss=0.139, ctc_loss=0.2721, over 19513.00 frames. ], tot_loss[loss=0.4328, simple_loss=0.4018, pruned_loss=0.167, ctc_loss=0.3208, over 3861513.50 frames. ], batch size: 49, lr: 4.40e-02, grad_scale: 8.0 +2024-08-25 03:36:12,284 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.784e+02 2.546e+02 3.099e+02 3.956e+02 8.242e+02, threshold=6.199e+02, percent-clipped=6.0 +2024-08-25 03:36:18,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=12320.0, ans=0.125 +2024-08-25 03:36:28,919 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:36:30,492 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.51 vs. limit=12.14 +2024-08-25 03:36:40,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=12426.666666666666, ans=0.07 +2024-08-25 03:36:46,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=12426.666666666666, ans=0.17573333333333335 +2024-08-25 03:36:47,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=12426.666666666666, ans=0.025 +2024-08-25 03:36:56,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=12480.0, ans=0.125 +2024-08-25 03:37:00,690 INFO [train.py:1114] (2/4) Epoch 1, batch 2350, loss[loss=0.4733, simple_loss=0.4317, pruned_loss=0.1884, ctc_loss=0.345, over 19677.00 frames. ], tot_loss[loss=0.4295, simple_loss=0.4006, pruned_loss=0.1652, ctc_loss=0.3172, over 3863663.06 frames. ], batch size: 63, lr: 4.40e-02, grad_scale: 8.0 +2024-08-25 03:37:06,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=12533.333333333334, ans=0.4613333333333333 +2024-08-25 03:37:21,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=12640.0, ans=0.125 +2024-08-25 03:37:22,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=12640.0, ans=0.014000000000000005 +2024-08-25 03:37:38,456 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.21 vs. limit=17.02 +2024-08-25 03:37:59,413 INFO [train.py:1114] (2/4) Epoch 1, batch 2400, loss[loss=0.4323, simple_loss=0.4116, pruned_loss=0.1647, ctc_loss=0.3092, over 19247.00 frames. ], tot_loss[loss=0.4288, simple_loss=0.4014, pruned_loss=0.1645, ctc_loss=0.316, over 3858080.96 frames. ], batch size: 71, lr: 4.39e-02, grad_scale: 16.0 +2024-08-25 03:38:08,241 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.904e+02 2.522e+02 3.053e+02 3.990e+02 1.210e+03, threshold=6.106e+02, percent-clipped=3.0 +2024-08-25 03:38:16,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=12853.333333333334, ans=0.013111111111111108 +2024-08-25 03:38:24,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=12906.666666666666, ans=0.125 +2024-08-25 03:38:29,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=12906.666666666666, ans=0.125 +2024-08-25 03:38:34,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=12960.0, ans=0.008052173913043479 +2024-08-25 03:38:39,200 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.16 vs. limit=11.48 +2024-08-25 03:39:03,897 INFO [train.py:1114] (2/4) Epoch 1, batch 2450, loss[loss=0.5347, simple_loss=0.4495, pruned_loss=0.2263, ctc_loss=0.4184, over 13508.00 frames. ], tot_loss[loss=0.4386, simple_loss=0.4067, pruned_loss=0.1699, ctc_loss=0.3247, over 3731381.37 frames. ], batch size: 141, lr: 4.39e-02, grad_scale: 16.0 +2024-08-25 03:39:21,473 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.60 vs. limit=12.42 +2024-08-25 03:39:26,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=13120.0, ans=0.012000000000000004 +2024-08-25 03:39:45,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=13226.666666666666, ans=0.125 +2024-08-25 03:40:43,714 INFO [train.py:1114] (2/4) Epoch 2, batch 0, loss[loss=0.4345, simple_loss=0.3942, pruned_loss=0.1723, ctc_loss=0.3258, over 19423.00 frames. ], tot_loss[loss=0.4345, simple_loss=0.3942, pruned_loss=0.1723, ctc_loss=0.3258, over 19423.00 frames. ], batch size: 48, lr: 4.30e-02, grad_scale: 32.0 +2024-08-25 03:40:43,715 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-25 03:40:55,165 INFO [train.py:1146] (2/4) Epoch 2, validation: loss=0.3317, simple_loss=0.3718, pruned_loss=0.1058, ctc_loss=0.2, over 944034.00 frames. +2024-08-25 03:40:55,166 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 14234MB +2024-08-25 03:41:04,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=13280.0, ans=0.125 +2024-08-25 03:41:10,790 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.94 vs. limit=17.5 +2024-08-25 03:41:12,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=13333.333333333334, ans=0.125 +2024-08-25 03:41:17,126 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.783e+02 2.388e+02 2.818e+02 3.444e+02 6.577e+02, threshold=5.636e+02, percent-clipped=3.0 +2024-08-25 03:41:21,200 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.85 vs. limit=17.54 +2024-08-25 03:41:57,983 INFO [train.py:1114] (2/4) Epoch 2, batch 50, loss[loss=0.3175, simple_loss=0.3239, pruned_loss=0.1119, ctc_loss=0.2181, over 19707.00 frames. ], tot_loss[loss=0.4185, simple_loss=0.397, pruned_loss=0.1592, ctc_loss=0.304, over 845808.55 frames. ], batch size: 47, lr: 4.29e-02, grad_scale: 16.0 +2024-08-25 03:42:09,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=13600.0, ans=0.125 +2024-08-25 03:42:12,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=13600.0, ans=0.025 +2024-08-25 03:42:19,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13600.0, ans=0.16399999999999998 +2024-08-25 03:42:30,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=13653.333333333334, ans=0.009777777777777774 +2024-08-25 03:43:11,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=13706.666666666666, ans=0.00955555555555556 +2024-08-25 03:43:29,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=13760.0, ans=0.125 +2024-08-25 03:43:36,892 INFO [train.py:1114] (2/4) Epoch 2, batch 100, loss[loss=0.4026, simple_loss=0.3766, pruned_loss=0.1556, ctc_loss=0.2935, over 19733.00 frames. ], tot_loss[loss=0.4197, simple_loss=0.3992, pruned_loss=0.1592, ctc_loss=0.3047, over 1500191.62 frames. ], batch size: 51, lr: 4.29e-02, grad_scale: 16.0 +2024-08-25 03:43:46,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.30 vs. limit=9.525333333333332 +2024-08-25 03:43:49,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=13813.333333333334, ans=0.125 +2024-08-25 03:43:55,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=13866.666666666666, ans=0.025 +2024-08-25 03:44:02,829 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.678e+02 2.500e+02 2.916e+02 3.893e+02 6.295e+02, threshold=5.832e+02, percent-clipped=2.0 +2024-08-25 03:44:23,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=13973.333333333334, ans=0.125 +2024-08-25 03:44:26,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=13973.333333333334, ans=0.125 +2024-08-25 03:44:34,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=14026.666666666666, ans=0.008222222222222228 +2024-08-25 03:44:42,812 INFO [train.py:1114] (2/4) Epoch 2, batch 150, loss[loss=0.3807, simple_loss=0.3596, pruned_loss=0.1454, ctc_loss=0.2774, over 19700.00 frames. ], tot_loss[loss=0.4097, simple_loss=0.3932, pruned_loss=0.1541, ctc_loss=0.2953, over 2029517.07 frames. ], batch size: 47, lr: 4.29e-02, grad_scale: 16.0 +2024-08-25 03:44:46,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=14080.0, ans=0.125 +2024-08-25 03:45:12,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=14186.666666666666, ans=0.007785507246376812 +2024-08-25 03:45:38,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=14293.333333333334, ans=0.8929333333333334 +2024-08-25 03:45:42,117 INFO [train.py:1114] (2/4) Epoch 2, batch 200, loss[loss=0.4261, simple_loss=0.3987, pruned_loss=0.1654, ctc_loss=0.3066, over 18357.00 frames. ], tot_loss[loss=0.4039, simple_loss=0.3896, pruned_loss=0.1511, ctc_loss=0.2898, over 2437654.93 frames. ], batch size: 85, lr: 4.28e-02, grad_scale: 16.0 +2024-08-25 03:45:48,670 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.47 vs. limit=12.173333333333332 +2024-08-25 03:45:51,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=14346.666666666666, ans=0.3978666666666667 +2024-08-25 03:46:06,465 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.951e+02 2.445e+02 2.940e+02 3.728e+02 6.995e+02, threshold=5.880e+02, percent-clipped=3.0 +2024-08-25 03:46:15,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=14453.333333333334, ans=0.007727536231884058 +2024-08-25 03:46:17,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=14453.333333333334, ans=0.0 +2024-08-25 03:46:33,733 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.15 vs. limit=18.42 +2024-08-25 03:46:42,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=14560.0, ans=0.125 +2024-08-25 03:46:45,948 INFO [train.py:1114] (2/4) Epoch 2, batch 250, loss[loss=0.3939, simple_loss=0.3942, pruned_loss=0.1401, ctc_loss=0.2834, over 19354.00 frames. ], tot_loss[loss=0.4035, simple_loss=0.3899, pruned_loss=0.1508, ctc_loss=0.2889, over 2756627.41 frames. ], batch size: 67, lr: 4.28e-02, grad_scale: 16.0 +2024-08-25 03:47:00,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=14666.666666666666, ans=0.125 +2024-08-25 03:47:05,066 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.74 vs. limit=12.333333333333332 +2024-08-25 03:47:16,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=14720.0, ans=0.15280000000000002 +2024-08-25 03:47:37,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=14826.666666666666, ans=0.125 +2024-08-25 03:47:50,839 INFO [train.py:1114] (2/4) Epoch 2, batch 300, loss[loss=0.4182, simple_loss=0.4049, pruned_loss=0.1584, ctc_loss=0.2869, over 19528.00 frames. ], tot_loss[loss=0.3996, simple_loss=0.3876, pruned_loss=0.1488, ctc_loss=0.2851, over 3001049.19 frames. ], batch size: 61, lr: 4.27e-02, grad_scale: 16.0 +2024-08-25 03:47:55,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=14880.0, ans=0.007634782608695653 +2024-08-25 03:48:11,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=14933.333333333334, ans=0.05 +2024-08-25 03:48:13,160 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.763e+02 2.396e+02 2.818e+02 3.488e+02 8.647e+02, threshold=5.636e+02, percent-clipped=6.0 +2024-08-25 03:48:37,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=15093.333333333334, ans=0.0037777777777777757 +2024-08-25 03:48:42,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=15093.333333333334, ans=0.0037777777777777757 +2024-08-25 03:48:50,380 INFO [train.py:1114] (2/4) Epoch 2, batch 350, loss[loss=0.3687, simple_loss=0.3624, pruned_loss=0.1356, ctc_loss=0.2598, over 19768.00 frames. ], tot_loss[loss=0.3984, simple_loss=0.3871, pruned_loss=0.1481, ctc_loss=0.2837, over 3190772.48 frames. ], batch size: 48, lr: 4.27e-02, grad_scale: 16.0 +2024-08-25 03:48:51,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=15146.666666666666, ans=0.125 +2024-08-25 03:49:33,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=15200.0, ans=0.42800000000000005 +2024-08-25 03:49:47,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=15253.333333333334, ans=0.125 +2024-08-25 03:50:01,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=15306.666666666666, ans=0.125 +2024-08-25 03:50:15,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=15360.0, ans=0.125 +2024-08-25 03:50:17,348 INFO [train.py:1114] (2/4) Epoch 2, batch 400, loss[loss=0.4301, simple_loss=0.406, pruned_loss=0.165, ctc_loss=0.3105, over 19488.00 frames. ], tot_loss[loss=0.3962, simple_loss=0.386, pruned_loss=0.1469, ctc_loss=0.2816, over 3343165.54 frames. ], batch size: 54, lr: 4.26e-02, grad_scale: 32.0 +2024-08-25 03:50:21,422 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.30 vs. limit=13.280000000000001 +2024-08-25 03:50:22,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=15413.333333333334, ans=0.125 +2024-08-25 03:50:22,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=15413.333333333334, ans=0.04949747468305833 +2024-08-25 03:50:23,912 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.88 vs. limit=13.280000000000001 +2024-08-25 03:50:25,823 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.515e+00 +2024-08-25 03:50:39,706 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.940e+02 2.407e+02 2.984e+02 3.456e+02 5.488e+02, threshold=5.968e+02, percent-clipped=0.0 +2024-08-25 03:50:41,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff2.min_abs, batch_count=15520.0, ans=0.1 +2024-08-25 03:50:44,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=15520.0, ans=0.125 +2024-08-25 03:51:19,363 INFO [train.py:1114] (2/4) Epoch 2, batch 450, loss[loss=0.3914, simple_loss=0.3954, pruned_loss=0.1404, ctc_loss=0.2664, over 19608.00 frames. ], tot_loss[loss=0.3954, simple_loss=0.3854, pruned_loss=0.1467, ctc_loss=0.2802, over 3449550.98 frames. ], batch size: 55, lr: 4.26e-02, grad_scale: 32.0 +2024-08-25 03:51:24,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15680.0, ans=0.14320000000000002 +2024-08-25 03:51:25,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=15680.0, ans=0.125 +2024-08-25 03:51:29,293 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:51:41,432 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=3.244e+00 +2024-08-25 03:51:46,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=15786.666666666666, ans=0.0 +2024-08-25 03:52:05,133 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.25 vs. limit=13.440000000000001 +2024-08-25 03:52:05,330 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.21 vs. limit=13.440000000000001 +2024-08-25 03:52:09,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=15893.333333333334, ans=0.125 +2024-08-25 03:52:21,862 INFO [train.py:1114] (2/4) Epoch 2, batch 500, loss[loss=0.3854, simple_loss=0.3897, pruned_loss=0.1372, ctc_loss=0.2666, over 19707.00 frames. ], tot_loss[loss=0.3938, simple_loss=0.3845, pruned_loss=0.1458, ctc_loss=0.2787, over 3545511.59 frames. ], batch size: 63, lr: 4.25e-02, grad_scale: 16.0 +2024-08-25 03:52:57,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=15946.666666666666, ans=0.00022222222222222088 +2024-08-25 03:52:57,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=15946.666666666666, ans=0.125 +2024-08-25 03:53:07,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=16000.0, ans=0.0 +2024-08-25 03:53:11,994 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.769e+02 2.425e+02 3.079e+02 3.995e+02 1.154e+03, threshold=6.159e+02, percent-clipped=13.0 +2024-08-25 03:53:13,828 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.35 vs. limit=10.421333333333333 +2024-08-25 03:53:31,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=16106.666666666666, ans=0.125 +2024-08-25 03:53:42,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=16160.0, ans=0.125 +2024-08-25 03:53:49,970 INFO [train.py:1114] (2/4) Epoch 2, batch 550, loss[loss=0.3779, simple_loss=0.3823, pruned_loss=0.1347, ctc_loss=0.2604, over 19245.00 frames. ], tot_loss[loss=0.3924, simple_loss=0.3839, pruned_loss=0.145, ctc_loss=0.2774, over 3607977.97 frames. ], batch size: 71, lr: 4.25e-02, grad_scale: 16.0 +2024-08-25 03:54:14,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=16320.0, ans=0.4448 +2024-08-25 03:54:48,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=16426.666666666668, ans=0.125 +2024-08-25 03:54:51,526 INFO [train.py:1114] (2/4) Epoch 2, batch 600, loss[loss=0.3973, simple_loss=0.3992, pruned_loss=0.1424, ctc_loss=0.2764, over 19326.00 frames. ], tot_loss[loss=0.3911, simple_loss=0.3836, pruned_loss=0.1442, ctc_loss=0.2756, over 3665666.24 frames. ], batch size: 67, lr: 4.24e-02, grad_scale: 16.0 +2024-08-25 03:54:53,376 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.02 vs. limit=13.68 +2024-08-25 03:54:54,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=16480.0, ans=0.13520000000000001 +2024-08-25 03:54:56,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=16480.0, ans=0.04949747468305833 +2024-08-25 03:54:57,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=16480.0, ans=0.32320000000000004 +2024-08-25 03:55:14,978 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.729e+02 2.336e+02 2.753e+02 3.494e+02 8.105e+02, threshold=5.507e+02, percent-clipped=1.0 +2024-08-25 03:55:26,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=16586.666666666668, ans=0.9158666666666666 +2024-08-25 03:55:50,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=16693.333333333332, ans=0.125 +2024-08-25 03:55:54,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=16693.333333333332, ans=13.759999999999998 +2024-08-25 03:55:56,175 INFO [train.py:1114] (2/4) Epoch 2, batch 650, loss[loss=0.3814, simple_loss=0.3757, pruned_loss=0.139, ctc_loss=0.2727, over 19772.00 frames. ], tot_loss[loss=0.3886, simple_loss=0.3817, pruned_loss=0.143, ctc_loss=0.2734, over 3716440.71 frames. ], batch size: 54, lr: 4.24e-02, grad_scale: 16.0 +2024-08-25 03:56:03,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=16746.666666666668, ans=0.07 +2024-08-25 03:56:16,584 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.93 vs. limit=20.1 +2024-08-25 03:56:45,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=16960.0, ans=0.125 +2024-08-25 03:56:45,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=16960.0, ans=0.3064 +2024-08-25 03:56:50,704 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.52 vs. limit=13.86 +2024-08-25 03:56:54,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=16960.0, ans=0.007182608695652175 +2024-08-25 03:56:56,406 INFO [train.py:1114] (2/4) Epoch 2, batch 700, loss[loss=0.3479, simple_loss=0.3539, pruned_loss=0.1236, ctc_loss=0.2363, over 19727.00 frames. ], tot_loss[loss=0.3877, simple_loss=0.3817, pruned_loss=0.1424, ctc_loss=0.2721, over 3748435.88 frames. ], batch size: 51, lr: 4.23e-02, grad_scale: 16.0 +2024-08-25 03:57:08,056 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.87 vs. limit=5.5600000000000005 +2024-08-25 03:57:17,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=17066.666666666668, ans=0.125 +2024-08-25 03:57:23,249 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.791e+02 2.519e+02 2.895e+02 3.628e+02 6.087e+02, threshold=5.790e+02, percent-clipped=2.0 +2024-08-25 03:57:43,131 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.21 vs. limit=7.434666666666667 +2024-08-25 03:58:01,102 INFO [train.py:1114] (2/4) Epoch 2, batch 750, loss[loss=0.378, simple_loss=0.3883, pruned_loss=0.1336, ctc_loss=0.2515, over 19498.00 frames. ], tot_loss[loss=0.3842, simple_loss=0.3795, pruned_loss=0.1408, ctc_loss=0.2685, over 3774949.39 frames. ], batch size: 54, lr: 4.23e-02, grad_scale: 16.0 +2024-08-25 03:58:25,124 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.20 vs. limit=14.02 +2024-08-25 04:00:00,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=17493.333333333332, ans=0.007066666666666667 +2024-08-25 04:00:06,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=17493.333333333332, ans=0.0 +2024-08-25 04:00:16,108 INFO [train.py:1114] (2/4) Epoch 2, batch 800, loss[loss=0.362, simple_loss=0.3596, pruned_loss=0.1324, ctc_loss=0.2491, over 19417.00 frames. ], tot_loss[loss=0.3815, simple_loss=0.378, pruned_loss=0.1394, ctc_loss=0.2658, over 3796406.38 frames. ], batch size: 48, lr: 4.22e-02, grad_scale: 32.0 +2024-08-25 04:00:30,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=17600.0, ans=0.28400000000000003 +2024-08-25 04:00:39,332 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.683e+02 2.611e+02 3.088e+02 3.881e+02 9.768e+02, threshold=6.176e+02, percent-clipped=6.0 +2024-08-25 04:01:14,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=17813.333333333332, ans=0.125 +2024-08-25 04:01:15,037 INFO [train.py:1114] (2/4) Epoch 2, batch 850, loss[loss=0.396, simple_loss=0.3894, pruned_loss=0.1453, ctc_loss=0.2799, over 19649.00 frames. ], tot_loss[loss=0.3787, simple_loss=0.3762, pruned_loss=0.1379, ctc_loss=0.2633, over 3815173.93 frames. ], batch size: 59, lr: 4.22e-02, grad_scale: 32.0 +2024-08-25 04:01:22,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=17813.333333333332, ans=0.125 +2024-08-25 04:01:27,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=17866.666666666668, ans=0.0 +2024-08-25 04:01:45,586 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.93 vs. limit=11.168 +2024-08-25 04:02:18,993 INFO [train.py:1114] (2/4) Epoch 2, batch 900, loss[loss=0.3415, simple_loss=0.3398, pruned_loss=0.1237, ctc_loss=0.239, over 19420.00 frames. ], tot_loss[loss=0.3802, simple_loss=0.3767, pruned_loss=0.139, ctc_loss=0.2645, over 3818259.72 frames. ], batch size: 48, lr: 4.21e-02, grad_scale: 8.0 +2024-08-25 04:02:45,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=18080.0, ans=0.06920000000000001 +2024-08-25 04:02:52,735 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.79 vs. limit=21.1 +2024-08-25 04:03:00,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=18186.666666666668, ans=0.125 +2024-08-25 04:03:03,829 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.814e+02 2.530e+02 3.033e+02 3.602e+02 3.379e+03, threshold=6.066e+02, percent-clipped=6.0 +2024-08-25 04:03:05,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=18186.666666666668, ans=0.125 +2024-08-25 04:03:12,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=18240.0, ans=0.125 +2024-08-25 04:03:36,915 INFO [train.py:1114] (2/4) Epoch 2, batch 950, loss[loss=0.3804, simple_loss=0.3732, pruned_loss=0.1396, ctc_loss=0.2708, over 19501.00 frames. ], tot_loss[loss=0.3806, simple_loss=0.3773, pruned_loss=0.139, ctc_loss=0.2646, over 3818663.54 frames. ], batch size: 49, lr: 4.21e-02, grad_scale: 8.0 +2024-08-25 04:03:45,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=18346.666666666668, ans=0.0 +2024-08-25 04:03:59,635 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.82 vs. limit=5.76 +2024-08-25 04:04:24,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=18506.666666666668, ans=0.0 +2024-08-25 04:04:24,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=18506.666666666668, ans=0.125 +2024-08-25 04:04:24,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=18506.666666666668, ans=0.125 +2024-08-25 04:04:30,621 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.47 vs. limit=14.46 +2024-08-25 04:04:31,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=18560.0, ans=0.125 +2024-08-25 04:04:32,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=18560.0, ans=0.125 +2024-08-25 04:04:39,322 INFO [train.py:1114] (2/4) Epoch 2, batch 1000, loss[loss=0.3065, simple_loss=0.336, pruned_loss=0.1002, ctc_loss=0.1913, over 19851.00 frames. ], tot_loss[loss=0.3824, simple_loss=0.3787, pruned_loss=0.1398, ctc_loss=0.2661, over 3815174.09 frames. ], batch size: 52, lr: 4.20e-02, grad_scale: 8.0 +2024-08-25 04:04:40,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=18613.333333333332, ans=0.125 +2024-08-25 04:04:44,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=18613.333333333332, ans=0.006823188405797102 +2024-08-25 04:05:05,789 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.709e+02 2.321e+02 2.743e+02 3.485e+02 6.350e+02, threshold=5.486e+02, percent-clipped=2.0 +2024-08-25 04:05:39,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18826.666666666668, ans=0.11173333333333332 +2024-08-25 04:05:41,818 INFO [train.py:1114] (2/4) Epoch 2, batch 1050, loss[loss=0.3961, simple_loss=0.3939, pruned_loss=0.1445, ctc_loss=0.2732, over 19847.00 frames. ], tot_loss[loss=0.3799, simple_loss=0.3771, pruned_loss=0.1385, ctc_loss=0.2641, over 3821771.57 frames. ], batch size: 57, lr: 4.20e-02, grad_scale: 8.0 +2024-08-25 04:05:42,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=18880.0, ans=0.0 +2024-08-25 04:05:46,422 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.20 vs. limit=11.552 +2024-08-25 04:05:50,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=18880.0, ans=0.125 +2024-08-25 04:06:01,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=18933.333333333332, ans=0.125 +2024-08-25 04:06:12,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=18986.666666666668, ans=0.125 +2024-08-25 04:06:40,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=19093.333333333332, ans=0.48639999999999994 +2024-08-25 04:06:43,810 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.71 vs. limit=14.573333333333334 +2024-08-25 04:06:44,165 INFO [train.py:1114] (2/4) Epoch 2, batch 1100, loss[loss=0.3128, simple_loss=0.3446, pruned_loss=0.1018, ctc_loss=0.1939, over 19590.00 frames. ], tot_loss[loss=0.3759, simple_loss=0.375, pruned_loss=0.1364, ctc_loss=0.26, over 3831001.47 frames. ], batch size: 52, lr: 4.19e-02, grad_scale: 8.0 +2024-08-25 04:07:11,083 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.799e+02 2.465e+02 2.960e+02 4.039e+02 7.406e+02, threshold=5.919e+02, percent-clipped=11.0 +2024-08-25 04:07:42,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=19306.666666666668, ans=0.125 +2024-08-25 04:08:01,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=19360.0, ans=0.006660869565217392 +2024-08-25 04:08:08,078 INFO [train.py:1114] (2/4) Epoch 2, batch 1150, loss[loss=0.3623, simple_loss=0.3656, pruned_loss=0.1309, ctc_loss=0.2428, over 19605.00 frames. ], tot_loss[loss=0.3751, simple_loss=0.3745, pruned_loss=0.136, ctc_loss=0.2589, over 3830964.62 frames. ], batch size: 52, lr: 4.19e-02, grad_scale: 8.0 +2024-08-25 04:08:34,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=19520.0, ans=0.09899494936611666 +2024-08-25 04:09:01,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=19626.666666666668, ans=0.125 +2024-08-25 04:09:01,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=19626.666666666668, ans=0.125 +2024-08-25 04:09:08,100 INFO [train.py:1114] (2/4) Epoch 2, batch 1200, loss[loss=0.3599, simple_loss=0.3737, pruned_loss=0.1244, ctc_loss=0.2434, over 19853.00 frames. ], tot_loss[loss=0.3767, simple_loss=0.3761, pruned_loss=0.1367, ctc_loss=0.26, over 3825842.79 frames. ], batch size: 57, lr: 4.18e-02, grad_scale: 16.0 +2024-08-25 04:09:08,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=19680.0, ans=0.006591304347826087 +2024-08-25 04:09:36,233 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.782e+02 2.637e+02 3.065e+02 4.000e+02 6.600e+02, threshold=6.130e+02, percent-clipped=2.0 +2024-08-25 04:09:50,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=19840.0, ans=0.10160000000000002 +2024-08-25 04:10:02,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=19893.333333333332, ans=0.0 +2024-08-25 04:10:11,973 INFO [train.py:1114] (2/4) Epoch 2, batch 1250, loss[loss=0.3563, simple_loss=0.3661, pruned_loss=0.1251, ctc_loss=0.2405, over 19508.00 frames. ], tot_loss[loss=0.3761, simple_loss=0.3762, pruned_loss=0.1362, ctc_loss=0.259, over 3843963.19 frames. ], batch size: 61, lr: 4.17e-02, grad_scale: 16.0 +2024-08-25 04:10:12,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=19946.666666666668, ans=0.0 +2024-08-25 04:10:16,264 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.66 vs. limit=22.46 +2024-08-25 04:10:19,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=19946.666666666668, ans=0.125 +2024-08-25 04:10:41,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=20053.333333333332, ans=0.0065101449275362325 +2024-08-25 04:10:42,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=20053.333333333332, ans=0.07 +2024-08-25 04:11:01,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=20160.0, ans=0.125 +2024-08-25 04:11:02,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=20160.0, ans=0.0 +2024-08-25 04:11:15,239 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.21 vs. limit=15.0 +2024-08-25 04:11:15,942 INFO [train.py:1114] (2/4) Epoch 2, batch 1300, loss[loss=0.4103, simple_loss=0.3966, pruned_loss=0.1539, ctc_loss=0.2903, over 18856.00 frames. ], tot_loss[loss=0.3737, simple_loss=0.3748, pruned_loss=0.135, ctc_loss=0.2566, over 3848656.50 frames. ], batch size: 76, lr: 4.17e-02, grad_scale: 16.0 +2024-08-25 04:11:25,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=20213.333333333332, ans=0.125 +2024-08-25 04:11:41,986 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.748e+02 2.187e+02 2.429e+02 2.931e+02 4.736e+02, threshold=4.858e+02, percent-clipped=0.0 +2024-08-25 04:11:46,254 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.62 vs. limit=15.0 +2024-08-25 04:11:50,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=20373.333333333332, ans=0.006440579710144928 +2024-08-25 04:12:00,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=20373.333333333332, ans=0.125 +2024-08-25 04:12:08,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=20426.666666666668, ans=0.2 +2024-08-25 04:12:12,615 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.94 vs. limit=12.0 +2024-08-25 04:12:13,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=20426.666666666668, ans=0.07 +2024-08-25 04:12:15,275 INFO [train.py:1114] (2/4) Epoch 2, batch 1350, loss[loss=0.3371, simple_loss=0.3593, pruned_loss=0.1141, ctc_loss=0.2164, over 19769.00 frames. ], tot_loss[loss=0.371, simple_loss=0.3733, pruned_loss=0.1336, ctc_loss=0.2537, over 3859181.43 frames. ], batch size: 54, lr: 4.16e-02, grad_scale: 16.0 +2024-08-25 04:12:36,973 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:12:37,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=20533.333333333332, ans=0.0 +2024-08-25 04:12:39,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=20586.666666666668, ans=0.125 +2024-08-25 04:12:51,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=20640.0, ans=0.0 +2024-08-25 04:12:57,482 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.55 vs. limit=22.5 +2024-08-25 04:13:03,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=20640.0, ans=0.125 +2024-08-25 04:13:18,546 INFO [train.py:1114] (2/4) Epoch 2, batch 1400, loss[loss=0.2914, simple_loss=0.3099, pruned_loss=0.09955, ctc_loss=0.1843, over 19692.00 frames. ], tot_loss[loss=0.3707, simple_loss=0.3731, pruned_loss=0.1335, ctc_loss=0.2532, over 3865934.75 frames. ], batch size: 46, lr: 4.16e-02, grad_scale: 16.0 +2024-08-25 04:13:59,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=20853.333333333332, ans=0.006336231884057971 +2024-08-25 04:14:03,166 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.799e+02 2.385e+02 2.674e+02 3.744e+02 6.684e+02, threshold=5.347e+02, percent-clipped=6.0 +2024-08-25 04:14:08,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20853.333333333332, ans=0.1 +2024-08-25 04:14:19,953 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.60 vs. limit=15.0 +2024-08-25 04:14:27,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=20960.0, ans=0.2 +2024-08-25 04:14:37,958 INFO [train.py:1114] (2/4) Epoch 2, batch 1450, loss[loss=0.3726, simple_loss=0.3849, pruned_loss=0.1321, ctc_loss=0.2401, over 19650.00 frames. ], tot_loss[loss=0.37, simple_loss=0.3729, pruned_loss=0.1332, ctc_loss=0.2518, over 3864306.48 frames. ], batch size: 63, lr: 4.15e-02, grad_scale: 16.0 +2024-08-25 04:14:38,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=21013.333333333332, ans=0.0 +2024-08-25 04:14:45,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=21013.333333333332, ans=0.025 +2024-08-25 04:14:46,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=21013.333333333332, ans=0.125 +2024-08-25 04:14:51,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=21066.666666666668, ans=0.1 +2024-08-25 04:15:51,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=21066.666666666668, ans=0.2 +2024-08-25 04:15:54,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=21120.0, ans=0.125 +2024-08-25 04:15:57,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=21120.0, ans=0.125 +2024-08-25 04:15:58,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=21120.0, ans=0.125 +2024-08-25 04:16:04,100 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=18.75 vs. limit=15.0 +2024-08-25 04:16:12,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=21173.333333333332, ans=0.025 +2024-08-25 04:16:12,745 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.44 vs. limit=15.0 +2024-08-25 04:16:33,079 INFO [train.py:1114] (2/4) Epoch 2, batch 1500, loss[loss=0.3686, simple_loss=0.3708, pruned_loss=0.1332, ctc_loss=0.2496, over 19595.00 frames. ], tot_loss[loss=0.3685, simple_loss=0.3723, pruned_loss=0.1323, ctc_loss=0.2503, over 3863794.10 frames. ], batch size: 57, lr: 4.15e-02, grad_scale: 16.0 +2024-08-25 04:16:38,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=21280.0, ans=0.006243478260869566 +2024-08-25 04:17:04,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=21386.666666666668, ans=0.1 +2024-08-25 04:17:08,003 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.803e+02 2.509e+02 2.906e+02 4.274e+02 8.598e+02, threshold=5.813e+02, percent-clipped=13.0 +2024-08-25 04:17:18,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=21440.0, ans=0.025 +2024-08-25 04:17:24,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=21440.0, ans=10.0 +2024-08-25 04:17:34,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=21493.333333333332, ans=0.125 +2024-08-25 04:17:42,735 INFO [train.py:1114] (2/4) Epoch 2, batch 1550, loss[loss=0.401, simple_loss=0.3992, pruned_loss=0.1461, ctc_loss=0.2765, over 19601.00 frames. ], tot_loss[loss=0.37, simple_loss=0.3729, pruned_loss=0.1332, ctc_loss=0.2515, over 3847595.28 frames. ], batch size: 60, lr: 4.14e-02, grad_scale: 16.0 +2024-08-25 04:17:48,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=21546.666666666668, ans=0.125 +2024-08-25 04:17:54,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=21546.666666666668, ans=0.2 +2024-08-25 04:17:58,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=21600.0, ans=0.0061739130434782605 +2024-08-25 04:18:04,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=21600.0, ans=0.125 +2024-08-25 04:18:06,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=21653.333333333332, ans=0.1 +2024-08-25 04:18:32,216 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.81 vs. limit=15.0 +2024-08-25 04:18:41,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=21760.0, ans=0.125 +2024-08-25 04:18:44,946 INFO [train.py:1114] (2/4) Epoch 2, batch 1600, loss[loss=0.3621, simple_loss=0.3755, pruned_loss=0.1262, ctc_loss=0.2407, over 19842.00 frames. ], tot_loss[loss=0.369, simple_loss=0.3722, pruned_loss=0.1328, ctc_loss=0.251, over 3836408.51 frames. ], batch size: 57, lr: 4.13e-02, grad_scale: 32.0 +2024-08-25 04:18:59,805 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.77 vs. limit=15.0 +2024-08-25 04:19:02,271 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.59 vs. limit=15.0 +2024-08-25 04:19:13,743 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.915e+02 2.370e+02 2.902e+02 3.664e+02 6.938e+02, threshold=5.803e+02, percent-clipped=2.0 +2024-08-25 04:19:14,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=21920.0, ans=0.125 +2024-08-25 04:19:23,706 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.57 vs. limit=15.0 +2024-08-25 04:19:27,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=21973.333333333332, ans=0.125 +2024-08-25 04:19:40,259 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.42 vs. limit=12.0 +2024-08-25 04:19:43,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=22026.666666666668, ans=0.125 +2024-08-25 04:19:44,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=22026.666666666668, ans=0.07 +2024-08-25 04:19:49,399 INFO [train.py:1114] (2/4) Epoch 2, batch 1650, loss[loss=0.3871, simple_loss=0.4007, pruned_loss=0.1353, ctc_loss=0.2575, over 19633.00 frames. ], tot_loss[loss=0.3693, simple_loss=0.3722, pruned_loss=0.1329, ctc_loss=0.2514, over 3832415.29 frames. ], batch size: 59, lr: 4.13e-02, grad_scale: 32.0 +2024-08-25 04:19:58,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=22080.0, ans=0.125 +2024-08-25 04:20:03,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22133.333333333332, ans=0.1 +2024-08-25 04:20:06,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=22133.333333333332, ans=0.006057971014492754 +2024-08-25 04:20:46,926 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.71 vs. limit=12.0 +2024-08-25 04:20:48,535 INFO [train.py:1114] (2/4) Epoch 2, batch 1700, loss[loss=0.3132, simple_loss=0.3289, pruned_loss=0.1062, ctc_loss=0.2129, over 19693.00 frames. ], tot_loss[loss=0.3664, simple_loss=0.3707, pruned_loss=0.1312, ctc_loss=0.2488, over 3846676.64 frames. ], batch size: 46, lr: 4.12e-02, grad_scale: 32.0 +2024-08-25 04:20:55,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=22346.666666666668, ans=0.0 +2024-08-25 04:20:58,714 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.52 vs. limit=22.5 +2024-08-25 04:20:59,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22400.0, ans=0.1 +2024-08-25 04:21:07,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=22400.0, ans=0.0 +2024-08-25 04:21:16,630 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.820e+02 2.264e+02 2.715e+02 3.253e+02 5.462e+02, threshold=5.430e+02, percent-clipped=0.0 +2024-08-25 04:21:27,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=22506.666666666668, ans=0.0 +2024-08-25 04:21:33,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=22506.666666666668, ans=0.1 +2024-08-25 04:21:45,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=22560.0, ans=0.025 +2024-08-25 04:21:48,269 INFO [train.py:1114] (2/4) Epoch 2, batch 1750, loss[loss=0.3319, simple_loss=0.3387, pruned_loss=0.1205, ctc_loss=0.2102, over 19650.00 frames. ], tot_loss[loss=0.3647, simple_loss=0.3693, pruned_loss=0.1306, ctc_loss=0.2471, over 3850482.56 frames. ], batch size: 45, lr: 4.12e-02, grad_scale: 32.0 +2024-08-25 04:22:27,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=22773.333333333332, ans=0.025 +2024-08-25 04:22:51,785 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.30 vs. limit=10.0 +2024-08-25 04:22:59,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=22826.666666666668, ans=0.025 +2024-08-25 04:23:02,446 INFO [train.py:1114] (2/4) Epoch 2, batch 1800, loss[loss=0.3667, simple_loss=0.3777, pruned_loss=0.1301, ctc_loss=0.2387, over 19615.00 frames. ], tot_loss[loss=0.3648, simple_loss=0.3694, pruned_loss=0.1307, ctc_loss=0.2471, over 3853017.99 frames. ], batch size: 55, lr: 4.11e-02, grad_scale: 32.0 +2024-08-25 04:23:09,259 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:23:11,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22880.0, ans=0.1 +2024-08-25 04:23:21,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=22933.333333333332, ans=0.125 +2024-08-25 04:23:28,009 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.805e+02 2.473e+02 2.913e+02 3.585e+02 6.262e+02, threshold=5.825e+02, percent-clipped=5.0 +2024-08-25 04:23:39,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=23040.0, ans=0.125 +2024-08-25 04:23:40,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=23040.0, ans=0.2 +2024-08-25 04:23:47,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=23093.333333333332, ans=0.0 +2024-08-25 04:23:54,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=23093.333333333332, ans=0.125 +2024-08-25 04:23:59,519 INFO [train.py:1114] (2/4) Epoch 2, batch 1850, loss[loss=0.3787, simple_loss=0.384, pruned_loss=0.1332, ctc_loss=0.2673, over 19588.00 frames. ], tot_loss[loss=0.3637, simple_loss=0.3689, pruned_loss=0.1301, ctc_loss=0.2458, over 3854273.72 frames. ], batch size: 57, lr: 4.11e-02, grad_scale: 32.0 +2024-08-25 04:24:11,816 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.86 vs. limit=6.0 +2024-08-25 04:24:13,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23200.0, ans=0.1 +2024-08-25 04:24:26,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23253.333333333332, ans=0.1 +2024-08-25 04:24:38,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=23306.666666666668, ans=0.125 +2024-08-25 04:24:53,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=23360.0, ans=0.2 +2024-08-25 04:24:54,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=23360.0, ans=0.025 +2024-08-25 04:24:56,429 INFO [train.py:1114] (2/4) Epoch 2, batch 1900, loss[loss=0.3979, simple_loss=0.3961, pruned_loss=0.1443, ctc_loss=0.2776, over 19637.00 frames. ], tot_loss[loss=0.3646, simple_loss=0.3696, pruned_loss=0.1305, ctc_loss=0.2463, over 3859639.00 frames. ], batch size: 59, lr: 4.10e-02, grad_scale: 32.0 +2024-08-25 04:25:21,302 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.736e+02 2.247e+02 2.781e+02 3.399e+02 7.136e+02, threshold=5.561e+02, percent-clipped=3.0 +2024-08-25 04:25:55,277 INFO [train.py:1114] (2/4) Epoch 2, batch 1950, loss[loss=0.3643, simple_loss=0.3668, pruned_loss=0.1344, ctc_loss=0.2323, over 19582.00 frames. ], tot_loss[loss=0.3641, simple_loss=0.3701, pruned_loss=0.1299, ctc_loss=0.2455, over 3868670.13 frames. ], batch size: 52, lr: 4.09e-02, grad_scale: 32.0 +2024-08-25 04:26:01,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=23680.0, ans=0.125 +2024-08-25 04:26:04,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=23680.0, ans=0.125 +2024-08-25 04:26:11,368 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.79 vs. limit=15.0 +2024-08-25 04:26:20,486 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.64 vs. limit=15.0 +2024-08-25 04:26:27,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=23786.666666666668, ans=0.125 +2024-08-25 04:26:43,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=23893.333333333332, ans=0.00567536231884058 +2024-08-25 04:26:49,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=23893.333333333332, ans=0.125 +2024-08-25 04:26:54,476 INFO [train.py:1114] (2/4) Epoch 2, batch 2000, loss[loss=0.3354, simple_loss=0.3334, pruned_loss=0.1226, ctc_loss=0.2303, over 19678.00 frames. ], tot_loss[loss=0.364, simple_loss=0.3702, pruned_loss=0.1299, ctc_loss=0.2453, over 3854662.64 frames. ], batch size: 45, lr: 4.09e-02, grad_scale: 32.0 +2024-08-25 04:26:54,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=23946.666666666668, ans=0.025 +2024-08-25 04:27:20,452 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.739e+02 2.625e+02 3.128e+02 3.968e+02 6.078e+02, threshold=6.255e+02, percent-clipped=2.0 +2024-08-25 04:27:41,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=24160.0, ans=0.2 +2024-08-25 04:27:42,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=24160.0, ans=0.0 +2024-08-25 04:27:51,150 INFO [train.py:1114] (2/4) Epoch 2, batch 2050, loss[loss=0.3056, simple_loss=0.3201, pruned_loss=0.1058, ctc_loss=0.199, over 19714.00 frames. ], tot_loss[loss=0.3626, simple_loss=0.3685, pruned_loss=0.1295, ctc_loss=0.2443, over 3850224.89 frames. ], batch size: 47, lr: 4.08e-02, grad_scale: 32.0 +2024-08-25 04:27:58,424 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.34 vs. limit=15.0 +2024-08-25 04:28:07,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=24266.666666666668, ans=0.025 +2024-08-25 04:28:17,725 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.02 vs. limit=6.0 +2024-08-25 04:28:22,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=24320.0, ans=0.125 +2024-08-25 04:28:27,450 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.12 vs. limit=10.0 +2024-08-25 04:28:29,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=24373.333333333332, ans=0.025 +2024-08-25 04:28:30,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=24373.333333333332, ans=0.0055710144927536235 +2024-08-25 04:28:44,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=24426.666666666668, ans=0.125 +2024-08-25 04:28:45,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=24426.666666666668, ans=0.0 +2024-08-25 04:28:47,785 INFO [train.py:1114] (2/4) Epoch 2, batch 2100, loss[loss=0.3591, simple_loss=0.3672, pruned_loss=0.1286, ctc_loss=0.2348, over 19778.00 frames. ], tot_loss[loss=0.3601, simple_loss=0.3669, pruned_loss=0.1283, ctc_loss=0.2422, over 3858887.10 frames. ], batch size: 54, lr: 4.08e-02, grad_scale: 32.0 +2024-08-25 04:28:48,323 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.76 vs. limit=15.0 +2024-08-25 04:28:48,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=24480.0, ans=0.125 +2024-08-25 04:29:04,628 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=15.0 +2024-08-25 04:29:14,134 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.675e+02 2.311e+02 2.619e+02 3.137e+02 5.086e+02, threshold=5.238e+02, percent-clipped=0.0 +2024-08-25 04:29:21,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=24640.0, ans=0.125 +2024-08-25 04:29:27,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=24640.0, ans=0.125 +2024-08-25 04:29:41,459 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.55 vs. limit=15.0 +2024-08-25 04:29:44,330 INFO [train.py:1114] (2/4) Epoch 2, batch 2150, loss[loss=0.3118, simple_loss=0.3386, pruned_loss=0.1028, ctc_loss=0.1986, over 19844.00 frames. ], tot_loss[loss=0.3576, simple_loss=0.3652, pruned_loss=0.127, ctc_loss=0.2397, over 3869343.48 frames. ], batch size: 52, lr: 4.07e-02, grad_scale: 32.0 +2024-08-25 04:29:54,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=24800.0, ans=0.125 +2024-08-25 04:29:57,329 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.76 vs. limit=15.0 +2024-08-25 04:30:08,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24853.333333333332, ans=0.1 +2024-08-25 04:30:12,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=24853.333333333332, ans=0.125 +2024-08-25 04:30:18,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=24906.666666666668, ans=0.125 +2024-08-25 04:30:22,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=24906.666666666668, ans=0.125 +2024-08-25 04:30:23,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=24906.666666666668, ans=0.0 +2024-08-25 04:30:39,661 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.14 vs. limit=15.0 +2024-08-25 04:30:40,047 INFO [train.py:1114] (2/4) Epoch 2, batch 2200, loss[loss=0.3499, simple_loss=0.3677, pruned_loss=0.1211, ctc_loss=0.2249, over 19581.00 frames. ], tot_loss[loss=0.3565, simple_loss=0.3648, pruned_loss=0.1264, ctc_loss=0.2385, over 3867352.30 frames. ], batch size: 57, lr: 4.06e-02, grad_scale: 32.0 +2024-08-25 04:30:45,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25013.333333333332, ans=0.1 +2024-08-25 04:30:58,005 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.60 vs. limit=15.0 +2024-08-25 04:31:02,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25120.0, ans=0.1 +2024-08-25 04:31:02,792 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.06 vs. limit=12.0 +2024-08-25 04:31:06,342 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.751e+02 2.398e+02 2.814e+02 3.505e+02 8.042e+02, threshold=5.628e+02, percent-clipped=3.0 +2024-08-25 04:31:09,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=25120.0, ans=0.0 +2024-08-25 04:31:13,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=25173.333333333332, ans=0.125 +2024-08-25 04:31:33,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=25226.666666666668, ans=0.5 +2024-08-25 04:31:37,458 INFO [train.py:1114] (2/4) Epoch 2, batch 2250, loss[loss=0.3276, simple_loss=0.3627, pruned_loss=0.1051, ctc_loss=0.206, over 19623.00 frames. ], tot_loss[loss=0.3569, simple_loss=0.3651, pruned_loss=0.1266, ctc_loss=0.2387, over 3866805.21 frames. ], batch size: 55, lr: 4.06e-02, grad_scale: 32.0 +2024-08-25 04:31:37,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=25280.0, ans=0.5 +2024-08-25 04:31:39,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=25280.0, ans=0.0 +2024-08-25 04:31:50,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=25333.333333333332, ans=0.125 +2024-08-25 04:32:09,702 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.72 vs. limit=22.5 +2024-08-25 04:32:33,459 INFO [train.py:1114] (2/4) Epoch 2, batch 2300, loss[loss=0.3403, simple_loss=0.3465, pruned_loss=0.1215, ctc_loss=0.2281, over 19496.00 frames. ], tot_loss[loss=0.3571, simple_loss=0.3647, pruned_loss=0.127, ctc_loss=0.239, over 3860956.39 frames. ], batch size: 49, lr: 4.05e-02, grad_scale: 16.0 +2024-08-25 04:32:40,327 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.553e-02 +2024-08-25 04:32:42,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=25546.666666666668, ans=0.1 +2024-08-25 04:32:59,595 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.45 vs. limit=22.5 +2024-08-25 04:33:02,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=25653.333333333332, ans=0.005292753623188406 +2024-08-25 04:33:03,049 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.730e+02 2.317e+02 2.709e+02 3.466e+02 6.027e+02, threshold=5.417e+02, percent-clipped=4.0 +2024-08-25 04:33:05,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25653.333333333332, ans=0.1 +2024-08-25 04:33:07,224 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.66 vs. limit=15.0 +2024-08-25 04:33:17,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=25706.666666666668, ans=0.125 +2024-08-25 04:33:28,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=25760.0, ans=0.07 +2024-08-25 04:33:32,438 INFO [train.py:1114] (2/4) Epoch 2, batch 2350, loss[loss=0.3592, simple_loss=0.3709, pruned_loss=0.1267, ctc_loss=0.2354, over 19700.00 frames. ], tot_loss[loss=0.3567, simple_loss=0.3645, pruned_loss=0.1268, ctc_loss=0.2386, over 3863339.85 frames. ], batch size: 63, lr: 4.04e-02, grad_scale: 16.0 +2024-08-25 04:33:39,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=25813.333333333332, ans=0.04949747468305833 +2024-08-25 04:33:54,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=25866.666666666668, ans=0.035 +2024-08-25 04:34:03,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=25920.0, ans=0.125 +2024-08-25 04:34:21,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26026.666666666668, ans=0.1 +2024-08-25 04:34:30,680 INFO [train.py:1114] (2/4) Epoch 2, batch 2400, loss[loss=0.3517, simple_loss=0.369, pruned_loss=0.1195, ctc_loss=0.2387, over 19256.00 frames. ], tot_loss[loss=0.3578, simple_loss=0.3661, pruned_loss=0.127, ctc_loss=0.2388, over 3857805.16 frames. ], batch size: 71, lr: 4.04e-02, grad_scale: 32.0 +2024-08-25 04:34:56,688 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.15 vs. limit=15.0 +2024-08-25 04:34:57,152 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.789e+02 2.184e+02 2.505e+02 3.102e+02 8.045e+02, threshold=5.010e+02, percent-clipped=5.0 +2024-08-25 04:34:57,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=26186.666666666668, ans=0.005176811594202899 +2024-08-25 04:35:27,214 INFO [train.py:1114] (2/4) Epoch 2, batch 2450, loss[loss=0.4411, simple_loss=0.4041, pruned_loss=0.1738, ctc_loss=0.326, over 12860.00 frames. ], tot_loss[loss=0.3675, simple_loss=0.3718, pruned_loss=0.132, ctc_loss=0.248, over 3730769.54 frames. ], batch size: 141, lr: 4.03e-02, grad_scale: 32.0 +2024-08-25 04:35:31,379 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.59 vs. limit=15.0 +2024-08-25 04:35:33,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=26346.666666666668, ans=0.005142028985507246 +2024-08-25 04:35:42,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=26400.0, ans=0.125 +2024-08-25 04:36:48,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=26554.666666666668, ans=0.07 +2024-08-25 04:36:55,755 INFO [train.py:1114] (2/4) Epoch 3, batch 0, loss[loss=0.3559, simple_loss=0.3586, pruned_loss=0.1273, ctc_loss=0.2467, over 19414.00 frames. ], tot_loss[loss=0.3559, simple_loss=0.3586, pruned_loss=0.1273, ctc_loss=0.2467, over 19414.00 frames. ], batch size: 48, lr: 3.83e-02, grad_scale: 32.0 +2024-08-25 04:36:55,756 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-25 04:37:08,025 INFO [train.py:1146] (2/4) Epoch 3, validation: loss=0.2847, simple_loss=0.3461, pruned_loss=0.08168, ctc_loss=0.1499, over 944034.00 frames. +2024-08-25 04:37:08,025 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 14234MB +2024-08-25 04:37:14,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=26554.666666666668, ans=0.125 +2024-08-25 04:37:24,390 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.03 vs. limit=22.5 +2024-08-25 04:37:27,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=26608.0, ans=0.0 +2024-08-25 04:37:32,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=26661.333333333332, ans=0.125 +2024-08-25 04:37:34,937 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:37:43,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=26714.666666666668, ans=0.125 +2024-08-25 04:37:50,808 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.817e+02 2.252e+02 2.580e+02 3.143e+02 6.401e+02, threshold=5.159e+02, percent-clipped=2.0 +2024-08-25 04:38:10,077 INFO [train.py:1114] (2/4) Epoch 3, batch 50, loss[loss=0.2736, simple_loss=0.3059, pruned_loss=0.08786, ctc_loss=0.1638, over 19698.00 frames. ], tot_loss[loss=0.3592, simple_loss=0.3679, pruned_loss=0.1272, ctc_loss=0.2404, over 844960.58 frames. ], batch size: 47, lr: 3.82e-02, grad_scale: 16.0 +2024-08-25 04:38:13,711 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:38:58,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=26928.0, ans=0.125 +2024-08-25 04:39:02,411 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.26 vs. limit=15.0 +2024-08-25 04:39:09,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=26981.333333333332, ans=0.2 +2024-08-25 04:39:09,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=26981.333333333332, ans=0.2 +2024-08-25 04:39:27,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27088.0, ans=0.1 +2024-08-25 04:39:28,419 INFO [train.py:1114] (2/4) Epoch 3, batch 100, loss[loss=0.3182, simple_loss=0.3404, pruned_loss=0.1071, ctc_loss=0.204, over 19726.00 frames. ], tot_loss[loss=0.3591, simple_loss=0.3682, pruned_loss=0.127, ctc_loss=0.2397, over 1500110.56 frames. ], batch size: 51, lr: 3.82e-02, grad_scale: 16.0 +2024-08-25 04:39:48,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=27141.333333333332, ans=0.125 +2024-08-25 04:39:49,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=27141.333333333332, ans=0.004969275362318841 +2024-08-25 04:40:11,092 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.865e+02 2.221e+02 2.583e+02 3.158e+02 4.904e+02, threshold=5.165e+02, percent-clipped=0.0 +2024-08-25 04:40:27,486 INFO [train.py:1114] (2/4) Epoch 3, batch 150, loss[loss=0.3183, simple_loss=0.3335, pruned_loss=0.1095, ctc_loss=0.2102, over 19704.00 frames. ], tot_loss[loss=0.3545, simple_loss=0.3651, pruned_loss=0.1249, ctc_loss=0.2353, over 2028676.42 frames. ], batch size: 47, lr: 3.81e-02, grad_scale: 16.0 +2024-08-25 04:40:29,043 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.59 vs. limit=15.0 +2024-08-25 04:40:29,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=27354.666666666668, ans=0.125 +2024-08-25 04:40:29,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=27354.666666666668, ans=0.035 +2024-08-25 04:40:30,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27354.666666666668, ans=0.1 +2024-08-25 04:40:45,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=27408.0, ans=0.2 +2024-08-25 04:40:54,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.86 vs. limit=22.5 +2024-08-25 04:41:16,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=27568.0, ans=0.125 +2024-08-25 04:41:26,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=27568.0, ans=0.125 +2024-08-25 04:41:28,580 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.19 vs. limit=22.5 +2024-08-25 04:41:29,363 INFO [train.py:1114] (2/4) Epoch 3, batch 200, loss[loss=0.4304, simple_loss=0.3999, pruned_loss=0.1705, ctc_loss=0.2997, over 18180.00 frames. ], tot_loss[loss=0.351, simple_loss=0.3622, pruned_loss=0.1235, ctc_loss=0.2321, over 2435563.59 frames. ], batch size: 85, lr: 3.80e-02, grad_scale: 16.0 +2024-08-25 04:42:00,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=27728.0, ans=0.0 +2024-08-25 04:42:14,178 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.479e+02 2.192e+02 2.550e+02 3.125e+02 5.269e+02, threshold=5.099e+02, percent-clipped=1.0 +2024-08-25 04:42:22,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=27834.666666666668, ans=0.125 +2024-08-25 04:42:35,064 INFO [train.py:1114] (2/4) Epoch 3, batch 250, loss[loss=0.3799, simple_loss=0.3775, pruned_loss=0.138, ctc_loss=0.2658, over 19405.00 frames. ], tot_loss[loss=0.3517, simple_loss=0.3628, pruned_loss=0.1237, ctc_loss=0.2328, over 2755297.25 frames. ], batch size: 67, lr: 3.80e-02, grad_scale: 16.0 +2024-08-25 04:42:37,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=27888.0, ans=0.125 +2024-08-25 04:42:49,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=27941.333333333332, ans=0.125 +2024-08-25 04:42:56,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=27941.333333333332, ans=0.125 +2024-08-25 04:43:09,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=28048.0, ans=0.125 +2024-08-25 04:43:17,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=28048.0, ans=0.125 +2024-08-25 04:43:32,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=28154.666666666668, ans=0.125 +2024-08-25 04:43:33,535 INFO [train.py:1114] (2/4) Epoch 3, batch 300, loss[loss=0.3625, simple_loss=0.3699, pruned_loss=0.129, ctc_loss=0.2426, over 19532.00 frames. ], tot_loss[loss=0.3489, simple_loss=0.3612, pruned_loss=0.1222, ctc_loss=0.2303, over 2999990.84 frames. ], batch size: 61, lr: 3.79e-02, grad_scale: 16.0 +2024-08-25 04:43:33,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=28154.666666666668, ans=0.004748985507246377 +2024-08-25 04:43:34,190 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.98 vs. limit=15.0 +2024-08-25 04:43:36,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=28154.666666666668, ans=0.125 +2024-08-25 04:43:43,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=28154.666666666668, ans=10.0 +2024-08-25 04:43:53,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=28208.0, ans=0.0 +2024-08-25 04:43:53,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28208.0, ans=0.1 +2024-08-25 04:44:07,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=28261.333333333332, ans=0.0047257971014492755 +2024-08-25 04:44:18,922 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.708e+02 2.242e+02 2.624e+02 3.299e+02 5.169e+02, threshold=5.248e+02, percent-clipped=1.0 +2024-08-25 04:44:25,832 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.57 vs. limit=6.0 +2024-08-25 04:44:35,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=28421.333333333332, ans=0.0 +2024-08-25 04:44:35,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=28421.333333333332, ans=0.0 +2024-08-25 04:44:36,152 INFO [train.py:1114] (2/4) Epoch 3, batch 350, loss[loss=0.3184, simple_loss=0.3279, pruned_loss=0.1136, ctc_loss=0.2046, over 19709.00 frames. ], tot_loss[loss=0.3483, simple_loss=0.3609, pruned_loss=0.1219, ctc_loss=0.2297, over 3190487.02 frames. ], batch size: 48, lr: 3.79e-02, grad_scale: 16.0 +2024-08-25 04:44:40,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=28421.333333333332, ans=0.025 +2024-08-25 04:44:46,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=28421.333333333332, ans=0.125 +2024-08-25 04:44:58,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=28474.666666666668, ans=0.0 +2024-08-25 04:45:08,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=28528.0, ans=0.125 +2024-08-25 04:45:43,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=28581.333333333332, ans=0.125 +2024-08-25 04:45:52,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28634.666666666668, ans=0.1 +2024-08-25 04:46:55,910 INFO [train.py:1114] (2/4) Epoch 3, batch 400, loss[loss=0.3496, simple_loss=0.3661, pruned_loss=0.1192, ctc_loss=0.2365, over 19501.00 frames. ], tot_loss[loss=0.3449, simple_loss=0.3588, pruned_loss=0.1202, ctc_loss=0.2266, over 3341906.26 frames. ], batch size: 54, lr: 3.78e-02, grad_scale: 32.0 +2024-08-25 04:47:14,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=28688.0, ans=0.125 +2024-08-25 04:47:16,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=28741.333333333332, ans=0.125 +2024-08-25 04:47:22,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=28741.333333333332, ans=0.125 +2024-08-25 04:47:34,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=28741.333333333332, ans=0.0 +2024-08-25 04:48:22,791 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.711e+02 2.232e+02 2.568e+02 3.025e+02 1.134e+03, threshold=5.136e+02, percent-clipped=4.0 +2024-08-25 04:48:27,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=28848.0, ans=0.125 +2024-08-25 04:48:39,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=28901.333333333332, ans=0.1 +2024-08-25 04:48:42,699 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:48:48,335 INFO [train.py:1114] (2/4) Epoch 3, batch 450, loss[loss=0.3157, simple_loss=0.3458, pruned_loss=0.1041, ctc_loss=0.1935, over 19616.00 frames. ], tot_loss[loss=0.3453, simple_loss=0.359, pruned_loss=0.1204, ctc_loss=0.2269, over 3450576.39 frames. ], batch size: 55, lr: 3.77e-02, grad_scale: 32.0 +2024-08-25 04:48:48,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=28954.666666666668, ans=0.0 +2024-08-25 04:48:51,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=28954.666666666668, ans=0.2 +2024-08-25 04:49:15,828 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.77 vs. limit=22.5 +2024-08-25 04:49:20,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=29061.333333333332, ans=0.125 +2024-08-25 04:49:28,017 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.78 vs. limit=15.0 +2024-08-25 04:49:42,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=29168.0, ans=0.004528695652173913 +2024-08-25 04:50:09,366 INFO [train.py:1114] (2/4) Epoch 3, batch 500, loss[loss=0.3291, simple_loss=0.3593, pruned_loss=0.1086, ctc_loss=0.2044, over 19718.00 frames. ], tot_loss[loss=0.3446, simple_loss=0.3582, pruned_loss=0.1202, ctc_loss=0.2266, over 3546365.53 frames. ], batch size: 63, lr: 3.77e-02, grad_scale: 32.0 +2024-08-25 04:50:25,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29274.666666666668, ans=0.1 +2024-08-25 04:50:27,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=29274.666666666668, ans=0.125 +2024-08-25 04:50:34,943 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.52 vs. limit=15.0 +2024-08-25 04:51:02,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=29381.333333333332, ans=0.125 +2024-08-25 04:51:09,141 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.756e+02 2.370e+02 2.734e+02 3.745e+02 5.336e+02, threshold=5.469e+02, percent-clipped=1.0 +2024-08-25 04:51:21,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=29434.666666666668, ans=0.09899494936611666 +2024-08-25 04:51:25,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=29434.666666666668, ans=0.125 +2024-08-25 04:51:28,441 INFO [train.py:1114] (2/4) Epoch 3, batch 550, loss[loss=0.3674, simple_loss=0.3779, pruned_loss=0.1294, ctc_loss=0.2455, over 19277.00 frames. ], tot_loss[loss=0.3464, simple_loss=0.3591, pruned_loss=0.1212, ctc_loss=0.2283, over 3609812.95 frames. ], batch size: 71, lr: 3.76e-02, grad_scale: 32.0 +2024-08-25 04:52:02,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29594.666666666668, ans=0.1 +2024-08-25 04:52:17,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=29648.0, ans=0.125 +2024-08-25 04:52:52,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29648.0, ans=0.1 +2024-08-25 04:52:53,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=29701.333333333332, ans=0.125 +2024-08-25 04:52:55,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=29701.333333333332, ans=0.0 +2024-08-25 04:53:06,030 INFO [train.py:1114] (2/4) Epoch 3, batch 600, loss[loss=0.4044, simple_loss=0.4007, pruned_loss=0.1499, ctc_loss=0.271, over 19378.00 frames. ], tot_loss[loss=0.346, simple_loss=0.3588, pruned_loss=0.121, ctc_loss=0.2277, over 3667534.10 frames. ], batch size: 67, lr: 3.76e-02, grad_scale: 32.0 +2024-08-25 04:53:49,301 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 2.141e+02 2.536e+02 3.031e+02 6.622e+02, threshold=5.071e+02, percent-clipped=2.0 +2024-08-25 04:53:50,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29914.666666666668, ans=0.1 +2024-08-25 04:53:55,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=29968.0, ans=0.2 +2024-08-25 04:54:05,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=30021.333333333332, ans=0.0 +2024-08-25 04:54:06,084 INFO [train.py:1114] (2/4) Epoch 3, batch 650, loss[loss=0.3284, simple_loss=0.3505, pruned_loss=0.1121, ctc_loss=0.2052, over 19773.00 frames. ], tot_loss[loss=0.3436, simple_loss=0.3571, pruned_loss=0.1199, ctc_loss=0.2259, over 3717508.31 frames. ], batch size: 54, lr: 3.75e-02, grad_scale: 32.0 +2024-08-25 04:54:12,804 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.42 vs. limit=12.0 +2024-08-25 04:54:18,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=30074.666666666668, ans=0.125 +2024-08-25 04:54:45,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=30128.0, ans=0.125 +2024-08-25 04:55:00,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=30181.333333333332, ans=0.2 +2024-08-25 04:55:03,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=30181.333333333332, ans=0.125 +2024-08-25 04:55:13,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=30234.666666666668, ans=0.125 +2024-08-25 04:55:13,609 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=26.43 vs. limit=22.5 +2024-08-25 04:55:19,039 INFO [train.py:1114] (2/4) Epoch 3, batch 700, loss[loss=0.2915, simple_loss=0.3251, pruned_loss=0.09382, ctc_loss=0.1755, over 19726.00 frames. ], tot_loss[loss=0.3439, simple_loss=0.3576, pruned_loss=0.1199, ctc_loss=0.2261, over 3749069.74 frames. ], batch size: 51, lr: 3.74e-02, grad_scale: 32.0 +2024-08-25 04:56:31,337 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.88 vs. limit=15.0 +2024-08-25 04:56:33,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=30448.0, ans=0.125 +2024-08-25 04:56:33,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=30448.0, ans=0.0 +2024-08-25 04:56:35,019 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.01 vs. limit=15.0 +2024-08-25 04:56:38,925 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.716e+02 2.292e+02 2.520e+02 3.192e+02 5.203e+02, threshold=5.040e+02, percent-clipped=1.0 +2024-08-25 04:56:39,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=30448.0, ans=0.0 +2024-08-25 04:56:55,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=30501.333333333332, ans=0.125 +2024-08-25 04:56:57,187 INFO [train.py:1114] (2/4) Epoch 3, batch 750, loss[loss=0.3515, simple_loss=0.3671, pruned_loss=0.1222, ctc_loss=0.2291, over 19501.00 frames. ], tot_loss[loss=0.3418, simple_loss=0.3564, pruned_loss=0.1188, ctc_loss=0.224, over 3774217.50 frames. ], batch size: 54, lr: 3.74e-02, grad_scale: 32.0 +2024-08-25 04:57:12,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=30608.0, ans=0.0 +2024-08-25 04:57:17,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=30608.0, ans=0.125 +2024-08-25 04:57:38,261 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:57:59,143 INFO [train.py:1114] (2/4) Epoch 3, batch 800, loss[loss=0.3255, simple_loss=0.3434, pruned_loss=0.1115, ctc_loss=0.2113, over 19404.00 frames. ], tot_loss[loss=0.3418, simple_loss=0.3563, pruned_loss=0.1188, ctc_loss=0.2238, over 3795776.34 frames. ], batch size: 48, lr: 3.73e-02, grad_scale: 32.0 +2024-08-25 04:57:59,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=30821.333333333332, ans=0.125 +2024-08-25 04:58:11,587 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.68 vs. limit=22.5 +2024-08-25 04:58:19,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=30874.666666666668, ans=0.00415768115942029 +2024-08-25 04:58:23,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=30928.0, ans=0.125 +2024-08-25 04:58:27,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=30928.0, ans=0.125 +2024-08-25 04:58:42,758 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.650e+02 2.211e+02 2.622e+02 3.205e+02 5.257e+02, threshold=5.244e+02, percent-clipped=1.0 +2024-08-25 04:58:42,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=30981.333333333332, ans=0.125 +2024-08-25 04:59:01,994 INFO [train.py:1114] (2/4) Epoch 3, batch 850, loss[loss=0.348, simple_loss=0.3721, pruned_loss=0.1169, ctc_loss=0.2256, over 19648.00 frames. ], tot_loss[loss=0.3417, simple_loss=0.3562, pruned_loss=0.1189, ctc_loss=0.2239, over 3815617.43 frames. ], batch size: 59, lr: 3.73e-02, grad_scale: 32.0 +2024-08-25 04:59:12,289 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.85 vs. limit=10.0 +2024-08-25 04:59:15,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=31141.333333333332, ans=0.125 +2024-08-25 04:59:32,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31194.666666666668, ans=0.1 +2024-08-25 04:59:38,724 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.41 vs. limit=15.0 +2024-08-25 04:59:43,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=31248.0, ans=0.2 +2024-08-25 04:59:47,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=31248.0, ans=0.125 +2024-08-25 04:59:55,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=31301.333333333332, ans=0.125 +2024-08-25 05:00:04,399 INFO [train.py:1114] (2/4) Epoch 3, batch 900, loss[loss=0.3138, simple_loss=0.3326, pruned_loss=0.106, ctc_loss=0.2073, over 19410.00 frames. ], tot_loss[loss=0.3435, simple_loss=0.3571, pruned_loss=0.1198, ctc_loss=0.2253, over 3818244.52 frames. ], batch size: 48, lr: 3.72e-02, grad_scale: 8.0 +2024-08-25 05:00:18,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=31408.0, ans=0.125 +2024-08-25 05:00:18,161 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:00:29,024 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.12 vs. limit=15.0 +2024-08-25 05:00:35,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=31461.333333333332, ans=0.125 +2024-08-25 05:00:54,419 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.711e+02 2.296e+02 2.736e+02 3.525e+02 1.528e+03, threshold=5.472e+02, percent-clipped=4.0 +2024-08-25 05:00:55,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=31568.0, ans=0.025 +2024-08-25 05:01:08,271 INFO [train.py:1114] (2/4) Epoch 3, batch 950, loss[loss=0.3435, simple_loss=0.3543, pruned_loss=0.1197, ctc_loss=0.2334, over 19489.00 frames. ], tot_loss[loss=0.3427, simple_loss=0.3568, pruned_loss=0.1195, ctc_loss=0.2243, over 3820235.48 frames. ], batch size: 49, lr: 3.71e-02, grad_scale: 8.0 +2024-08-25 05:01:15,914 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.24 vs. limit=15.0 +2024-08-25 05:01:19,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=31674.666666666668, ans=0.125 +2024-08-25 05:01:30,625 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.79 vs. limit=22.5 +2024-08-25 05:01:42,707 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.39 vs. limit=22.5 +2024-08-25 05:02:01,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=31834.666666666668, ans=0.0 +2024-08-25 05:02:08,115 INFO [train.py:1114] (2/4) Epoch 3, batch 1000, loss[loss=0.3315, simple_loss=0.3453, pruned_loss=0.1149, ctc_loss=0.2198, over 19853.00 frames. ], tot_loss[loss=0.3435, simple_loss=0.3575, pruned_loss=0.1197, ctc_loss=0.2248, over 3815699.50 frames. ], batch size: 52, lr: 3.71e-02, grad_scale: 8.0 +2024-08-25 05:02:34,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=31994.666666666668, ans=0.0 +2024-08-25 05:02:56,479 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.586e+02 2.163e+02 2.492e+02 3.027e+02 5.724e+02, threshold=4.983e+02, percent-clipped=1.0 +2024-08-25 05:02:56,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=32048.0, ans=0.125 +2024-08-25 05:03:04,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=32101.333333333332, ans=0.2 +2024-08-25 05:03:04,906 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.22 vs. limit=15.0 +2024-08-25 05:03:07,259 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.36 vs. limit=22.5 +2024-08-25 05:03:11,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=32101.333333333332, ans=0.0 +2024-08-25 05:03:13,747 INFO [train.py:1114] (2/4) Epoch 3, batch 1050, loss[loss=0.3558, simple_loss=0.3772, pruned_loss=0.1221, ctc_loss=0.2257, over 19842.00 frames. ], tot_loss[loss=0.3413, simple_loss=0.356, pruned_loss=0.1187, ctc_loss=0.223, over 3821755.97 frames. ], batch size: 57, lr: 3.70e-02, grad_scale: 8.0 +2024-08-25 05:03:15,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=32154.666666666668, ans=0.125 +2024-08-25 05:03:21,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=32154.666666666668, ans=0.125 +2024-08-25 05:03:29,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=32208.0, ans=0.125 +2024-08-25 05:04:11,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=32261.333333333332, ans=0.125 +2024-08-25 05:04:35,075 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.13 vs. limit=15.0 +2024-08-25 05:05:04,401 INFO [train.py:1114] (2/4) Epoch 3, batch 1100, loss[loss=0.3431, simple_loss=0.3525, pruned_loss=0.1201, ctc_loss=0.2337, over 19593.00 frames. ], tot_loss[loss=0.3419, simple_loss=0.3563, pruned_loss=0.119, ctc_loss=0.2238, over 3828945.65 frames. ], batch size: 52, lr: 3.70e-02, grad_scale: 8.0 +2024-08-25 05:05:05,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=32421.333333333332, ans=0.00382144927536232 +2024-08-25 05:05:42,529 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:05:51,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=32581.333333333332, ans=0.0037866666666666665 +2024-08-25 05:05:53,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=32581.333333333332, ans=0.0 +2024-08-25 05:05:57,409 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.51 vs. limit=15.0 +2024-08-25 05:06:00,571 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.823e+02 2.355e+02 2.517e+02 3.019e+02 4.945e+02, threshold=5.033e+02, percent-clipped=0.0 +2024-08-25 05:06:20,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=32634.666666666668, ans=0.125 +2024-08-25 05:06:23,043 INFO [train.py:1114] (2/4) Epoch 3, batch 1150, loss[loss=0.2916, simple_loss=0.3276, pruned_loss=0.09234, ctc_loss=0.1775, over 19602.00 frames. ], tot_loss[loss=0.3406, simple_loss=0.3552, pruned_loss=0.1184, ctc_loss=0.2226, over 3828331.19 frames. ], batch size: 52, lr: 3.69e-02, grad_scale: 8.0 +2024-08-25 05:06:38,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32741.333333333332, ans=0.1 +2024-08-25 05:06:39,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=32741.333333333332, ans=0.125 +2024-08-25 05:07:08,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=32848.0, ans=0.025 +2024-08-25 05:07:24,572 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.01 vs. limit=22.5 +2024-08-25 05:07:32,067 INFO [train.py:1114] (2/4) Epoch 3, batch 1200, loss[loss=0.337, simple_loss=0.3584, pruned_loss=0.115, ctc_loss=0.2136, over 19838.00 frames. ], tot_loss[loss=0.3405, simple_loss=0.3557, pruned_loss=0.1182, ctc_loss=0.2221, over 3824201.59 frames. ], batch size: 57, lr: 3.68e-02, grad_scale: 16.0 +2024-08-25 05:07:32,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=32954.666666666664, ans=0.125 +2024-08-25 05:08:06,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=33061.333333333336, ans=0.125 +2024-08-25 05:08:15,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=33114.666666666664, ans=0.125 +2024-08-25 05:08:17,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_na.min_abs, batch_count=33114.666666666664, ans=0.02 +2024-08-25 05:08:19,689 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.731e+02 2.128e+02 2.359e+02 2.757e+02 6.653e+02, threshold=4.718e+02, percent-clipped=2.0 +2024-08-25 05:08:38,027 INFO [train.py:1114] (2/4) Epoch 3, batch 1250, loss[loss=0.3956, simple_loss=0.3947, pruned_loss=0.144, ctc_loss=0.271, over 19526.00 frames. ], tot_loss[loss=0.3388, simple_loss=0.3552, pruned_loss=0.1172, ctc_loss=0.22, over 3841897.57 frames. ], batch size: 61, lr: 3.68e-02, grad_scale: 16.0 +2024-08-25 05:08:39,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=33221.333333333336, ans=0.1 +2024-08-25 05:08:43,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=33221.333333333336, ans=0.003647536231884058 +2024-08-25 05:08:55,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=33274.666666666664, ans=0.125 +2024-08-25 05:08:57,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=33274.666666666664, ans=0.125 +2024-08-25 05:09:04,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=33328.0, ans=0.09899494936611666 +2024-08-25 05:09:14,177 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=26.15 vs. limit=22.5 +2024-08-25 05:09:14,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=33328.0, ans=0.125 +2024-08-25 05:09:17,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=33381.333333333336, ans=0.125 +2024-08-25 05:09:24,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=33381.333333333336, ans=0.003612753623188405 +2024-08-25 05:09:31,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=33434.666666666664, ans=0.125 +2024-08-25 05:09:41,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=33488.0, ans=0.125 +2024-08-25 05:09:42,101 INFO [train.py:1114] (2/4) Epoch 3, batch 1300, loss[loss=0.3706, simple_loss=0.3743, pruned_loss=0.1341, ctc_loss=0.2468, over 18863.00 frames. ], tot_loss[loss=0.3389, simple_loss=0.3549, pruned_loss=0.1174, ctc_loss=0.2203, over 3846381.76 frames. ], batch size: 76, lr: 3.67e-02, grad_scale: 16.0 +2024-08-25 05:09:43,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=33488.0, ans=0.125 +2024-08-25 05:09:49,454 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.19 vs. limit=15.0 +2024-08-25 05:10:01,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=33541.333333333336, ans=0.1 +2024-08-25 05:10:01,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=33541.333333333336, ans=0.125 +2024-08-25 05:10:07,556 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.32 vs. limit=12.0 +2024-08-25 05:10:47,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=33648.0, ans=10.0 +2024-08-25 05:10:48,155 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.613e+02 2.161e+02 2.525e+02 2.896e+02 5.464e+02, threshold=5.050e+02, percent-clipped=3.0 +2024-08-25 05:10:59,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=33701.333333333336, ans=0.125 +2024-08-25 05:11:01,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=33754.666666666664, ans=0.1 +2024-08-25 05:11:02,312 INFO [train.py:1114] (2/4) Epoch 3, batch 1350, loss[loss=0.3368, simple_loss=0.3584, pruned_loss=0.1147, ctc_loss=0.2142, over 19784.00 frames. ], tot_loss[loss=0.3377, simple_loss=0.3541, pruned_loss=0.1168, ctc_loss=0.2194, over 3857682.71 frames. ], batch size: 54, lr: 3.67e-02, grad_scale: 16.0 +2024-08-25 05:11:07,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=33754.666666666664, ans=0.0 +2024-08-25 05:12:00,193 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.30 vs. limit=15.0 +2024-08-25 05:12:02,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=33914.666666666664, ans=0.125 +2024-08-25 05:12:24,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=33968.0, ans=0.125 +2024-08-25 05:12:26,307 INFO [train.py:1114] (2/4) Epoch 3, batch 1400, loss[loss=0.2862, simple_loss=0.311, pruned_loss=0.09492, ctc_loss=0.179, over 19666.00 frames. ], tot_loss[loss=0.3372, simple_loss=0.3538, pruned_loss=0.1166, ctc_loss=0.2187, over 3864933.31 frames. ], batch size: 46, lr: 3.66e-02, grad_scale: 16.0 +2024-08-25 05:12:27,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=34021.333333333336, ans=0.0 +2024-08-25 05:12:31,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=34021.333333333336, ans=0.125 +2024-08-25 05:12:31,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=34021.333333333336, ans=0.025 +2024-08-25 05:12:34,075 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=11.31 vs. limit=12.0 +2024-08-25 05:12:36,951 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.46 vs. limit=22.5 +2024-08-25 05:12:51,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=34074.666666666664, ans=0.125 +2024-08-25 05:12:51,578 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.69 vs. limit=15.0 +2024-08-25 05:13:12,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=34128.0, ans=0.125 +2024-08-25 05:13:16,003 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.67 vs. limit=12.0 +2024-08-25 05:13:21,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=34181.333333333336, ans=0.0 +2024-08-25 05:13:26,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=34181.333333333336, ans=0.125 +2024-08-25 05:13:26,915 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.29 vs. limit=15.0 +2024-08-25 05:13:31,977 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 2.210e+02 2.531e+02 3.096e+02 9.067e+02, threshold=5.062e+02, percent-clipped=2.0 +2024-08-25 05:14:22,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=34234.666666666664, ans=0.0 +2024-08-25 05:14:24,484 INFO [train.py:1114] (2/4) Epoch 3, batch 1450, loss[loss=0.4005, simple_loss=0.4013, pruned_loss=0.1453, ctc_loss=0.2729, over 19668.00 frames. ], tot_loss[loss=0.337, simple_loss=0.354, pruned_loss=0.1164, ctc_loss=0.2184, over 3863076.85 frames. ], batch size: 63, lr: 3.65e-02, grad_scale: 16.0 +2024-08-25 05:14:27,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34288.0, ans=0.1 +2024-08-25 05:14:45,536 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.64 vs. limit=5.0 +2024-08-25 05:15:02,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=34394.666666666664, ans=0.0 +2024-08-25 05:15:28,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=34501.333333333336, ans=0.2 +2024-08-25 05:15:28,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=34501.333333333336, ans=0.125 +2024-08-25 05:15:32,908 INFO [train.py:1114] (2/4) Epoch 3, batch 1500, loss[loss=0.3468, simple_loss=0.367, pruned_loss=0.1209, ctc_loss=0.2117, over 19571.00 frames. ], tot_loss[loss=0.3371, simple_loss=0.3541, pruned_loss=0.1164, ctc_loss=0.2183, over 3863439.76 frames. ], batch size: 57, lr: 3.65e-02, grad_scale: 16.0 +2024-08-25 05:15:35,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=34554.666666666664, ans=0.125 +2024-08-25 05:15:44,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=34608.0, ans=0.125 +2024-08-25 05:15:50,233 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.77 vs. limit=15.0 +2024-08-25 05:16:46,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=34714.666666666664, ans=0.003322898550724638 +2024-08-25 05:16:51,237 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.550e+02 2.151e+02 2.498e+02 3.151e+02 6.810e+02, threshold=4.996e+02, percent-clipped=2.0 +2024-08-25 05:20:00,630 INFO [train.py:1114] (2/4) Epoch 3, batch 1550, loss[loss=0.3686, simple_loss=0.3827, pruned_loss=0.1304, ctc_loss=0.2343, over 19608.00 frames. ], tot_loss[loss=0.3376, simple_loss=0.3541, pruned_loss=0.1168, ctc_loss=0.219, over 3847786.25 frames. ], batch size: 60, lr: 3.64e-02, grad_scale: 16.0 +2024-08-25 05:21:03,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=34928.0, ans=0.0 +2024-08-25 05:21:08,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.11 vs. limit=22.5 +2024-08-25 05:21:17,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=34981.333333333336, ans=0.125 +2024-08-25 05:22:04,973 INFO [train.py:1114] (2/4) Epoch 3, batch 1600, loss[loss=0.3123, simple_loss=0.3411, pruned_loss=0.1021, ctc_loss=0.1983, over 19818.00 frames. ], tot_loss[loss=0.337, simple_loss=0.3534, pruned_loss=0.1166, ctc_loss=0.2187, over 3836873.53 frames. ], batch size: 57, lr: 3.64e-02, grad_scale: 32.0 +2024-08-25 05:22:12,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35088.0, ans=0.1 +2024-08-25 05:22:25,104 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.72 vs. limit=6.0 +2024-08-25 05:22:57,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=35194.666666666664, ans=0.0 +2024-08-25 05:22:57,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=35194.666666666664, ans=0.0 +2024-08-25 05:23:25,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=35248.0, ans=0.125 +2024-08-25 05:23:43,076 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.751e+02 2.193e+02 2.529e+02 3.233e+02 6.645e+02, threshold=5.059e+02, percent-clipped=2.0 +2024-08-25 05:23:49,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=35301.333333333336, ans=0.2 +2024-08-25 05:24:22,993 INFO [train.py:1114] (2/4) Epoch 3, batch 1650, loss[loss=0.358, simple_loss=0.3721, pruned_loss=0.1252, ctc_loss=0.2337, over 19662.00 frames. ], tot_loss[loss=0.3372, simple_loss=0.3536, pruned_loss=0.1166, ctc_loss=0.2189, over 3832747.85 frames. ], batch size: 59, lr: 3.63e-02, grad_scale: 32.0 +2024-08-25 05:24:29,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=35354.666666666664, ans=0.125 +2024-08-25 05:25:24,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=35461.333333333336, ans=0.0 +2024-08-25 05:26:16,031 INFO [train.py:1114] (2/4) Epoch 3, batch 1700, loss[loss=0.2857, simple_loss=0.3033, pruned_loss=0.09726, ctc_loss=0.184, over 19677.00 frames. ], tot_loss[loss=0.3356, simple_loss=0.3527, pruned_loss=0.1157, ctc_loss=0.2176, over 3846372.42 frames. ], batch size: 46, lr: 3.62e-02, grad_scale: 32.0 +2024-08-25 05:26:28,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=35674.666666666664, ans=0.05 +2024-08-25 05:26:30,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=35674.666666666664, ans=0.125 +2024-08-25 05:26:51,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35728.0, ans=0.1 +2024-08-25 05:27:04,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=35781.333333333336, ans=0.125 +2024-08-25 05:27:09,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35781.333333333336, ans=0.1 +2024-08-25 05:27:10,195 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.755e+02 2.342e+02 2.819e+02 3.429e+02 5.215e+02, threshold=5.637e+02, percent-clipped=1.0 +2024-08-25 05:27:23,551 INFO [train.py:1114] (2/4) Epoch 3, batch 1750, loss[loss=0.3465, simple_loss=0.3529, pruned_loss=0.1259, ctc_loss=0.2211, over 19688.00 frames. ], tot_loss[loss=0.3346, simple_loss=0.352, pruned_loss=0.1153, ctc_loss=0.2165, over 3849674.85 frames. ], batch size: 45, lr: 3.62e-02, grad_scale: 32.0 +2024-08-25 05:27:30,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=35888.0, ans=0.1 +2024-08-25 05:27:40,712 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.27 vs. limit=15.0 +2024-08-25 05:27:45,867 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.01 vs. limit=22.5 +2024-08-25 05:27:56,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=35994.666666666664, ans=0.2 +2024-08-25 05:28:35,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=36101.333333333336, ans=0.05 +2024-08-25 05:28:38,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=36101.333333333336, ans=0.003021449275362318 +2024-08-25 05:28:41,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=36101.333333333336, ans=0.125 +2024-08-25 05:29:16,485 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:29:19,797 INFO [train.py:1114] (2/4) Epoch 3, batch 1800, loss[loss=0.3599, simple_loss=0.3742, pruned_loss=0.1253, ctc_loss=0.2374, over 19622.00 frames. ], tot_loss[loss=0.334, simple_loss=0.3518, pruned_loss=0.1149, ctc_loss=0.2158, over 3851657.31 frames. ], batch size: 55, lr: 3.61e-02, grad_scale: 16.0 +2024-08-25 05:31:29,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=36208.0, ans=0.125 +2024-08-25 05:31:29,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=36208.0, ans=0.0 +2024-08-25 05:31:58,635 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.624e+02 2.106e+02 2.466e+02 3.299e+02 1.077e+03, threshold=4.933e+02, percent-clipped=1.0 +2024-08-25 05:32:11,728 INFO [train.py:1114] (2/4) Epoch 3, batch 1850, loss[loss=0.3398, simple_loss=0.3645, pruned_loss=0.1127, ctc_loss=0.2241, over 19591.00 frames. ], tot_loss[loss=0.3344, simple_loss=0.352, pruned_loss=0.1151, ctc_loss=0.2163, over 3855442.08 frames. ], batch size: 57, lr: 3.61e-02, grad_scale: 16.0 +2024-08-25 05:32:35,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=36528.0, ans=0.09899494936611666 +2024-08-25 05:33:12,859 INFO [train.py:1114] (2/4) Epoch 3, batch 1900, loss[loss=0.3141, simple_loss=0.3534, pruned_loss=0.09898, ctc_loss=0.1921, over 19664.00 frames. ], tot_loss[loss=0.334, simple_loss=0.3525, pruned_loss=0.1147, ctc_loss=0.2154, over 3860442.41 frames. ], batch size: 59, lr: 3.60e-02, grad_scale: 16.0 +2024-08-25 05:33:18,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=36688.0, ans=0.125 +2024-08-25 05:33:35,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=36794.666666666664, ans=0.05 +2024-08-25 05:33:36,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=36794.666666666664, ans=0.0 +2024-08-25 05:33:52,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=36794.666666666664, ans=0.125 +2024-08-25 05:34:01,327 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.20 vs. limit=15.0 +2024-08-25 05:34:05,261 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.713e+02 2.260e+02 2.560e+02 3.105e+02 5.689e+02, threshold=5.120e+02, percent-clipped=2.0 +2024-08-25 05:34:06,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=36901.333333333336, ans=0.0 +2024-08-25 05:34:10,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=36901.333333333336, ans=0.2 +2024-08-25 05:34:49,852 INFO [train.py:1114] (2/4) Epoch 3, batch 1950, loss[loss=0.287, simple_loss=0.3223, pruned_loss=0.0924, ctc_loss=0.1674, over 19572.00 frames. ], tot_loss[loss=0.3351, simple_loss=0.3539, pruned_loss=0.115, ctc_loss=0.2157, over 3869401.96 frames. ], batch size: 52, lr: 3.59e-02, grad_scale: 16.0 +2024-08-25 05:34:53,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=36954.666666666664, ans=0.0 +2024-08-25 05:35:58,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37008.0, ans=0.1 +2024-08-25 05:36:03,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=37008.0, ans=0.2 +2024-08-25 05:36:04,964 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.49 vs. limit=15.0 +2024-08-25 05:36:26,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=37008.0, ans=0.2 +2024-08-25 05:36:32,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=37061.333333333336, ans=0.125 +2024-08-25 05:36:57,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=37168.0, ans=0.95 +2024-08-25 05:36:59,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=37168.0, ans=0.125 +2024-08-25 05:37:09,009 INFO [train.py:1114] (2/4) Epoch 3, batch 2000, loss[loss=0.3015, simple_loss=0.3163, pruned_loss=0.103, ctc_loss=0.2019, over 19680.00 frames. ], tot_loss[loss=0.3374, simple_loss=0.3552, pruned_loss=0.1162, ctc_loss=0.218, over 3853775.13 frames. ], batch size: 45, lr: 3.59e-02, grad_scale: 32.0 +2024-08-25 05:37:09,572 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.22 vs. limit=15.0 +2024-08-25 05:37:12,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=37221.333333333336, ans=0.2 +2024-08-25 05:37:22,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=37274.666666666664, ans=0.125 +2024-08-25 05:37:24,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=37274.666666666664, ans=0.0 +2024-08-25 05:37:31,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=37274.666666666664, ans=0.0 +2024-08-25 05:37:34,624 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.51 vs. limit=10.0 +2024-08-25 05:37:53,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=37381.333333333336, ans=0.125 +2024-08-25 05:37:56,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37381.333333333336, ans=0.1 +2024-08-25 05:38:02,426 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.775e+02 2.243e+02 2.650e+02 3.292e+02 1.299e+03, threshold=5.300e+02, percent-clipped=6.0 +2024-08-25 05:38:03,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=37434.666666666664, ans=0.125 +2024-08-25 05:38:04,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=37434.666666666664, ans=0.125 +2024-08-25 05:38:13,956 INFO [train.py:1114] (2/4) Epoch 3, batch 2050, loss[loss=0.2977, simple_loss=0.3238, pruned_loss=0.09856, ctc_loss=0.1862, over 19733.00 frames. ], tot_loss[loss=0.3371, simple_loss=0.3544, pruned_loss=0.1163, ctc_loss=0.2182, over 3850119.84 frames. ], batch size: 47, lr: 3.58e-02, grad_scale: 16.0 +2024-08-25 05:38:14,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=37488.0, ans=0.09899494936611666 +2024-08-25 05:38:24,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=37541.333333333336, ans=0.125 +2024-08-25 05:38:33,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=37541.333333333336, ans=0.2 +2024-08-25 05:38:37,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=37594.666666666664, ans=10.0 +2024-08-25 05:38:52,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=37648.0, ans=0.125 +2024-08-25 05:38:53,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=37648.0, ans=0.2 +2024-08-25 05:39:16,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=37701.333333333336, ans=0.125 +2024-08-25 05:39:40,789 INFO [train.py:1114] (2/4) Epoch 3, batch 2100, loss[loss=0.3486, simple_loss=0.3672, pruned_loss=0.1187, ctc_loss=0.2314, over 19774.00 frames. ], tot_loss[loss=0.3349, simple_loss=0.3529, pruned_loss=0.1152, ctc_loss=0.2163, over 3857857.51 frames. ], batch size: 54, lr: 3.58e-02, grad_scale: 16.0 +2024-08-25 05:40:14,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=37808.0, ans=0.5 +2024-08-25 05:40:43,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=37861.333333333336, ans=0.125 +2024-08-25 05:40:52,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37914.666666666664, ans=0.1 +2024-08-25 05:40:58,546 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.646e+02 2.072e+02 2.352e+02 2.718e+02 4.903e+02, threshold=4.703e+02, percent-clipped=0.0 +2024-08-25 05:41:10,107 INFO [train.py:1114] (2/4) Epoch 3, batch 2150, loss[loss=0.3098, simple_loss=0.3368, pruned_loss=0.1012, ctc_loss=0.2009, over 19867.00 frames. ], tot_loss[loss=0.3337, simple_loss=0.3519, pruned_loss=0.1147, ctc_loss=0.2153, over 3869079.99 frames. ], batch size: 52, lr: 3.57e-02, grad_scale: 16.0 +2024-08-25 05:41:18,650 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.85 vs. limit=6.0 +2024-08-25 05:41:19,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=38021.333333333336, ans=0.0 +2024-08-25 05:41:22,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=38074.666666666664, ans=0.2 +2024-08-25 05:41:45,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38128.0, ans=0.1 +2024-08-25 05:41:59,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=38181.333333333336, ans=0.07 +2024-08-25 05:42:42,901 INFO [train.py:1114] (2/4) Epoch 3, batch 2200, loss[loss=0.3267, simple_loss=0.3554, pruned_loss=0.1078, ctc_loss=0.2062, over 19555.00 frames. ], tot_loss[loss=0.3341, simple_loss=0.3522, pruned_loss=0.1148, ctc_loss=0.2159, over 3867006.86 frames. ], batch size: 57, lr: 3.56e-02, grad_scale: 16.0 +2024-08-25 05:42:56,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=38288.0, ans=0.0 +2024-08-25 05:42:56,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38288.0, ans=0.1 +2024-08-25 05:43:00,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38341.333333333336, ans=0.1 +2024-08-25 05:43:18,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=38394.666666666664, ans=0.125 +2024-08-25 05:43:25,033 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.30 vs. limit=22.5 +2024-08-25 05:43:33,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38501.333333333336, ans=0.1 +2024-08-25 05:43:34,309 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.802e+02 2.197e+02 2.629e+02 2.994e+02 6.107e+02, threshold=5.259e+02, percent-clipped=1.0 +2024-08-25 05:43:35,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=38501.333333333336, ans=0.05 +2024-08-25 05:43:36,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=38501.333333333336, ans=0.125 +2024-08-25 05:43:41,486 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.06 vs. limit=22.5 +2024-08-25 05:43:51,075 INFO [train.py:1114] (2/4) Epoch 3, batch 2250, loss[loss=0.2826, simple_loss=0.3211, pruned_loss=0.08741, ctc_loss=0.1733, over 19614.00 frames. ], tot_loss[loss=0.3336, simple_loss=0.3518, pruned_loss=0.1147, ctc_loss=0.2153, over 3866779.90 frames. ], batch size: 55, lr: 3.56e-02, grad_scale: 16.0 +2024-08-25 05:44:39,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=38661.333333333336, ans=6.0 +2024-08-25 05:45:15,073 INFO [train.py:1114] (2/4) Epoch 3, batch 2300, loss[loss=0.3082, simple_loss=0.3331, pruned_loss=0.1028, ctc_loss=0.1944, over 19505.00 frames. ], tot_loss[loss=0.333, simple_loss=0.3509, pruned_loss=0.1145, ctc_loss=0.2153, over 3860084.14 frames. ], batch size: 49, lr: 3.55e-02, grad_scale: 16.0 +2024-08-25 05:45:17,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=38821.333333333336, ans=0.125 +2024-08-25 05:45:37,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38874.666666666664, ans=0.1 +2024-08-25 05:47:15,680 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.630e+02 2.233e+02 2.542e+02 3.133e+02 7.552e+02, threshold=5.083e+02, percent-clipped=3.0 +2024-08-25 05:47:27,923 INFO [train.py:1114] (2/4) Epoch 3, batch 2350, loss[loss=0.3775, simple_loss=0.3851, pruned_loss=0.1337, ctc_loss=0.2567, over 19681.00 frames. ], tot_loss[loss=0.3316, simple_loss=0.3502, pruned_loss=0.1138, ctc_loss=0.2135, over 3862694.85 frames. ], batch size: 63, lr: 3.55e-02, grad_scale: 16.0 +2024-08-25 05:47:52,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=39194.666666666664, ans=0.2 +2024-08-25 05:48:01,375 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.09 vs. limit=6.0 +2024-08-25 05:48:10,008 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:48:24,941 INFO [train.py:1114] (2/4) Epoch 3, batch 2400, loss[loss=0.3767, simple_loss=0.3862, pruned_loss=0.133, ctc_loss=0.253, over 19295.00 frames. ], tot_loss[loss=0.3354, simple_loss=0.3532, pruned_loss=0.1156, ctc_loss=0.2163, over 3858731.18 frames. ], batch size: 71, lr: 3.54e-02, grad_scale: 32.0 +2024-08-25 05:48:36,645 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.40 vs. limit=15.0 +2024-08-25 05:48:38,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=39408.0, ans=0.0 +2024-08-25 05:48:38,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=39408.0, ans=0.125 +2024-08-25 05:48:49,777 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.71 vs. limit=6.0 +2024-08-25 05:48:51,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=39461.333333333336, ans=0.025 +2024-08-25 05:49:10,296 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.864e+02 2.241e+02 2.672e+02 3.161e+02 5.607e+02, threshold=5.344e+02, percent-clipped=4.0 +2024-08-25 05:49:26,435 INFO [train.py:1114] (2/4) Epoch 3, batch 2450, loss[loss=0.4669, simple_loss=0.4119, pruned_loss=0.1872, ctc_loss=0.3686, over 12663.00 frames. ], tot_loss[loss=0.3456, simple_loss=0.3589, pruned_loss=0.1209, ctc_loss=0.2265, over 3731513.20 frames. ], batch size: 140, lr: 3.53e-02, grad_scale: 32.0 +2024-08-25 05:49:39,618 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=18.45 vs. limit=15.0 +2024-08-25 05:49:47,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=39674.666666666664, ans=0.2 +2024-08-25 05:49:48,633 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.69 vs. limit=15.0 +2024-08-25 05:49:49,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39728.0, ans=0.1 +2024-08-25 05:50:01,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=39781.333333333336, ans=0.09899494936611666 +2024-08-25 05:50:50,838 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.57 vs. limit=12.0 +2024-08-25 05:51:05,719 INFO [train.py:1114] (2/4) Epoch 4, batch 0, loss[loss=0.3285, simple_loss=0.3448, pruned_loss=0.114, ctc_loss=0.2105, over 19424.00 frames. ], tot_loss[loss=0.3285, simple_loss=0.3448, pruned_loss=0.114, ctc_loss=0.2105, over 19424.00 frames. ], batch size: 48, lr: 3.30e-02, grad_scale: 32.0 +2024-08-25 05:51:05,719 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-25 05:51:35,404 INFO [train.py:1146] (2/4) Epoch 4, validation: loss=0.2629, simple_loss=0.3337, pruned_loss=0.07032, ctc_loss=0.1284, over 944034.00 frames. +2024-08-25 05:51:35,405 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 14234MB +2024-08-25 05:51:59,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=39936.0, ans=0.0 +2024-08-25 05:52:29,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=40042.666666666664, ans=0.125 +2024-08-25 05:52:41,516 INFO [train.py:1114] (2/4) Epoch 4, batch 50, loss[loss=0.2997, simple_loss=0.3187, pruned_loss=0.103, ctc_loss=0.1867, over 19707.00 frames. ], tot_loss[loss=0.3439, simple_loss=0.3577, pruned_loss=0.1199, ctc_loss=0.2257, over 844557.52 frames. ], batch size: 47, lr: 3.30e-02, grad_scale: 32.0 +2024-08-25 05:52:47,054 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.793e+02 2.147e+02 2.483e+02 2.920e+02 4.932e+02, threshold=4.967e+02, percent-clipped=0.0 +2024-08-25 05:53:08,333 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.98 vs. limit=15.0 +2024-08-25 05:53:24,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=40202.666666666664, ans=0.0 +2024-08-25 05:53:28,257 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.19 vs. limit=15.0 +2024-08-25 05:53:48,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=40256.0, ans=0.0 +2024-08-25 05:54:05,597 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.61 vs. limit=15.0 +2024-08-25 05:54:08,155 INFO [train.py:1114] (2/4) Epoch 4, batch 100, loss[loss=0.316, simple_loss=0.3419, pruned_loss=0.1072, ctc_loss=0.1888, over 19712.00 frames. ], tot_loss[loss=0.3366, simple_loss=0.3546, pruned_loss=0.1157, ctc_loss=0.2178, over 1498880.38 frames. ], batch size: 51, lr: 3.29e-02, grad_scale: 32.0 +2024-08-25 05:54:14,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=40362.666666666664, ans=0.0 +2024-08-25 05:54:32,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=40362.666666666664, ans=0.002095072463768116 +2024-08-25 05:54:36,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=40416.0, ans=0.0 +2024-08-25 05:54:37,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=40416.0, ans=0.025 +2024-08-25 05:54:41,946 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.64 vs. limit=15.0 +2024-08-25 05:55:17,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40469.333333333336, ans=0.1 +2024-08-25 05:55:38,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=40576.0, ans=0.125 +2024-08-25 05:55:39,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=40576.0, ans=0.125 +2024-08-25 05:55:51,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=40576.0, ans=0.125 +2024-08-25 05:55:55,482 INFO [train.py:1114] (2/4) Epoch 4, batch 150, loss[loss=0.2547, simple_loss=0.2945, pruned_loss=0.07853, ctc_loss=0.1447, over 19732.00 frames. ], tot_loss[loss=0.3274, simple_loss=0.349, pruned_loss=0.111, ctc_loss=0.2096, over 2028245.15 frames. ], batch size: 47, lr: 3.28e-02, grad_scale: 32.0 +2024-08-25 05:55:56,490 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.643e+02 2.033e+02 2.286e+02 2.661e+02 4.118e+02, threshold=4.571e+02, percent-clipped=0.0 +2024-08-25 05:56:17,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=40682.666666666664, ans=0.2 +2024-08-25 05:56:23,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=40736.0, ans=0.04949747468305833 +2024-08-25 05:56:36,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=40736.0, ans=0.125 +2024-08-25 05:56:52,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=40842.666666666664, ans=0.0 +2024-08-25 05:56:57,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=40842.666666666664, ans=0.125 +2024-08-25 05:57:00,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40842.666666666664, ans=0.1 +2024-08-25 05:57:03,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=40896.0, ans=0.125 +2024-08-25 05:57:04,737 INFO [train.py:1114] (2/4) Epoch 4, batch 200, loss[loss=0.3923, simple_loss=0.3891, pruned_loss=0.145, ctc_loss=0.2634, over 18389.00 frames. ], tot_loss[loss=0.325, simple_loss=0.3469, pruned_loss=0.11, ctc_loss=0.2076, over 2436426.00 frames. ], batch size: 86, lr: 3.28e-02, grad_scale: 32.0 +2024-08-25 05:57:25,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=40896.0, ans=0.125 +2024-08-25 05:57:30,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=40949.333333333336, ans=0.2 +2024-08-25 05:57:35,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=40949.333333333336, ans=0.125 +2024-08-25 05:57:35,521 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.60 vs. limit=15.0 +2024-08-25 05:57:40,877 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.12 vs. limit=6.0 +2024-08-25 05:57:49,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=41002.666666666664, ans=0.125 +2024-08-25 05:58:31,153 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.91 vs. limit=15.0 +2024-08-25 05:59:03,066 INFO [train.py:1114] (2/4) Epoch 4, batch 250, loss[loss=0.357, simple_loss=0.3653, pruned_loss=0.128, ctc_loss=0.2318, over 19404.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3461, pruned_loss=0.1092, ctc_loss=0.2058, over 2756072.04 frames. ], batch size: 67, lr: 3.27e-02, grad_scale: 32.0 +2024-08-25 05:59:04,091 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.652e+02 2.098e+02 2.387e+02 2.939e+02 4.251e+02, threshold=4.774e+02, percent-clipped=0.0 +2024-08-25 05:59:17,679 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.62 vs. limit=15.0 +2024-08-25 05:59:25,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=41216.0, ans=0.0019095652173913048 +2024-08-25 05:59:48,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=41322.666666666664, ans=0.125 +2024-08-25 05:59:49,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=41322.666666666664, ans=0.125 +2024-08-25 05:59:57,056 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:00:12,536 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-08-25 06:00:14,189 INFO [train.py:1114] (2/4) Epoch 4, batch 300, loss[loss=0.3781, simple_loss=0.3903, pruned_loss=0.1345, ctc_loss=0.2423, over 19523.00 frames. ], tot_loss[loss=0.3235, simple_loss=0.3461, pruned_loss=0.1093, ctc_loss=0.2059, over 3001487.19 frames. ], batch size: 61, lr: 3.27e-02, grad_scale: 32.0 +2024-08-25 06:00:14,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=41429.333333333336, ans=0.125 +2024-08-25 06:00:32,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=41482.666666666664, ans=0.025 +2024-08-25 06:00:33,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=41482.666666666664, ans=0.0 +2024-08-25 06:00:39,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=41536.0, ans=0.0 +2024-08-25 06:00:56,302 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.22 vs. limit=15.0 +2024-08-25 06:00:59,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=41536.0, ans=0.1 +2024-08-25 06:01:02,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=41536.0, ans=0.125 +2024-08-25 06:01:13,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=41589.333333333336, ans=22.5 +2024-08-25 06:01:36,652 INFO [train.py:1114] (2/4) Epoch 4, batch 350, loss[loss=0.2878, simple_loss=0.3165, pruned_loss=0.09426, ctc_loss=0.1765, over 19751.00 frames. ], tot_loss[loss=0.3245, simple_loss=0.3469, pruned_loss=0.1097, ctc_loss=0.2065, over 3191000.54 frames. ], batch size: 48, lr: 3.26e-02, grad_scale: 32.0 +2024-08-25 06:01:37,791 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.558e+02 2.143e+02 2.517e+02 2.887e+02 6.595e+02, threshold=5.034e+02, percent-clipped=1.0 +2024-08-25 06:01:45,677 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.08 vs. limit=15.0 +2024-08-25 06:02:16,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=41856.0, ans=0.125 +2024-08-25 06:02:17,509 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.42 vs. limit=15.0 +2024-08-25 06:02:19,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=41856.0, ans=0.0 +2024-08-25 06:02:38,803 INFO [train.py:1114] (2/4) Epoch 4, batch 400, loss[loss=0.3024, simple_loss=0.3412, pruned_loss=0.0943, ctc_loss=0.1875, over 19489.00 frames. ], tot_loss[loss=0.3231, simple_loss=0.3459, pruned_loss=0.109, ctc_loss=0.2052, over 3342031.38 frames. ], batch size: 54, lr: 3.26e-02, grad_scale: 32.0 +2024-08-25 06:03:02,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=42016.0, ans=0.125 +2024-08-25 06:03:06,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=42016.0, ans=0.125 +2024-08-25 06:03:10,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=42069.333333333336, ans=0.125 +2024-08-25 06:03:25,923 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.98 vs. limit=15.0 +2024-08-25 06:04:04,053 INFO [train.py:1114] (2/4) Epoch 4, batch 450, loss[loss=0.2943, simple_loss=0.3351, pruned_loss=0.09189, ctc_loss=0.1742, over 19613.00 frames. ], tot_loss[loss=0.3229, simple_loss=0.3458, pruned_loss=0.109, ctc_loss=0.205, over 3449509.33 frames. ], batch size: 55, lr: 3.25e-02, grad_scale: 32.0 +2024-08-25 06:04:06,521 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.656e+02 2.107e+02 2.479e+02 2.897e+02 5.564e+02, threshold=4.958e+02, percent-clipped=2.0 +2024-08-25 06:04:13,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42229.333333333336, ans=0.1 +2024-08-25 06:05:05,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=42389.333333333336, ans=0.125 +2024-08-25 06:05:29,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=42442.666666666664, ans=0.0 +2024-08-25 06:05:32,235 INFO [train.py:1114] (2/4) Epoch 4, batch 500, loss[loss=0.3203, simple_loss=0.3553, pruned_loss=0.1041, ctc_loss=0.1925, over 19651.00 frames. ], tot_loss[loss=0.3205, simple_loss=0.3439, pruned_loss=0.1079, ctc_loss=0.2032, over 3545956.63 frames. ], batch size: 63, lr: 3.25e-02, grad_scale: 32.0 +2024-08-25 06:05:39,936 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.42 vs. limit=10.0 +2024-08-25 06:05:45,867 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=12.51 vs. limit=15.0 +2024-08-25 06:05:48,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=42549.333333333336, ans=0.125 +2024-08-25 06:06:31,428 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.41 vs. limit=22.5 +2024-08-25 06:06:41,100 INFO [train.py:1114] (2/4) Epoch 4, batch 550, loss[loss=0.3367, simple_loss=0.3618, pruned_loss=0.1133, ctc_loss=0.2126, over 19266.00 frames. ], tot_loss[loss=0.3211, simple_loss=0.3444, pruned_loss=0.1082, ctc_loss=0.2035, over 3607040.34 frames. ], batch size: 71, lr: 3.24e-02, grad_scale: 16.0 +2024-08-25 06:06:44,774 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.740e+02 2.027e+02 2.416e+02 2.881e+02 5.051e+02, threshold=4.833e+02, percent-clipped=1.0 +2024-08-25 06:07:03,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=42816.0, ans=0.125 +2024-08-25 06:07:36,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42922.666666666664, ans=0.1 +2024-08-25 06:07:36,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=42922.666666666664, ans=0.0 +2024-08-25 06:07:41,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=42976.0, ans=0.0 +2024-08-25 06:07:47,534 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.50 vs. limit=15.0 +2024-08-25 06:07:50,587 INFO [train.py:1114] (2/4) Epoch 4, batch 600, loss[loss=0.3651, simple_loss=0.3733, pruned_loss=0.1306, ctc_loss=0.239, over 19372.00 frames. ], tot_loss[loss=0.3211, simple_loss=0.3446, pruned_loss=0.1082, ctc_loss=0.2032, over 3664404.59 frames. ], batch size: 67, lr: 3.24e-02, grad_scale: 16.0 +2024-08-25 06:08:00,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=43029.333333333336, ans=0.2 +2024-08-25 06:08:05,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=43082.666666666664, ans=0.05 +2024-08-25 06:08:07,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=43082.666666666664, ans=0.1 +2024-08-25 06:08:13,114 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.73 vs. limit=15.0 +2024-08-25 06:08:16,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=43136.0, ans=0.2 +2024-08-25 06:08:58,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=43242.666666666664, ans=0.125 +2024-08-25 06:09:00,704 INFO [train.py:1114] (2/4) Epoch 4, batch 650, loss[loss=0.3049, simple_loss=0.3351, pruned_loss=0.0993, ctc_loss=0.1903, over 19765.00 frames. ], tot_loss[loss=0.3204, simple_loss=0.3439, pruned_loss=0.108, ctc_loss=0.2024, over 3714588.22 frames. ], batch size: 54, lr: 3.23e-02, grad_scale: 16.0 +2024-08-25 06:09:12,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=43296.0, ans=0.0014573913043478253 +2024-08-25 06:09:15,857 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.456e+02 2.140e+02 2.544e+02 3.023e+02 7.017e+02, threshold=5.088e+02, percent-clipped=9.0 +2024-08-25 06:09:23,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43349.333333333336, ans=0.1 +2024-08-25 06:09:59,498 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.461e-02 +2024-08-25 06:10:03,417 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.97 vs. limit=15.0 +2024-08-25 06:10:07,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=43509.333333333336, ans=0.125 +2024-08-25 06:10:15,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=43509.333333333336, ans=0.125 +2024-08-25 06:10:18,931 INFO [train.py:1114] (2/4) Epoch 4, batch 700, loss[loss=0.2728, simple_loss=0.3148, pruned_loss=0.08298, ctc_loss=0.1619, over 19721.00 frames. ], tot_loss[loss=0.3216, simple_loss=0.3449, pruned_loss=0.1085, ctc_loss=0.2036, over 3746358.54 frames. ], batch size: 51, lr: 3.22e-02, grad_scale: 16.0 +2024-08-25 06:10:22,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=43562.666666666664, ans=0.125 +2024-08-25 06:10:30,144 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.97 vs. limit=15.0 +2024-08-25 06:10:36,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=43616.0, ans=0.2 +2024-08-25 06:11:10,052 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:11:12,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=43776.0, ans=0.125 +2024-08-25 06:11:20,059 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.94 vs. limit=6.0 +2024-08-25 06:11:23,840 INFO [train.py:1114] (2/4) Epoch 4, batch 750, loss[loss=0.284, simple_loss=0.3264, pruned_loss=0.08768, ctc_loss=0.1659, over 19518.00 frames. ], tot_loss[loss=0.3199, simple_loss=0.3438, pruned_loss=0.1076, ctc_loss=0.2019, over 3773031.37 frames. ], batch size: 54, lr: 3.22e-02, grad_scale: 16.0 +2024-08-25 06:11:28,468 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.94 vs. limit=15.0 +2024-08-25 06:11:28,686 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.530e+02 2.141e+02 2.481e+02 2.931e+02 4.472e+02, threshold=4.962e+02, percent-clipped=0.0 +2024-08-25 06:11:29,222 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.89 vs. limit=6.0 +2024-08-25 06:11:38,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=43829.333333333336, ans=0.0013414492753623195 +2024-08-25 06:11:42,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=43882.666666666664, ans=0.0 +2024-08-25 06:11:43,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=43882.666666666664, ans=0.001329855072463769 +2024-08-25 06:11:44,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=43882.666666666664, ans=0.05 +2024-08-25 06:12:01,865 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:12:19,614 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.01 vs. limit=15.0 +2024-08-25 06:12:23,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=44042.666666666664, ans=0.2 +2024-08-25 06:12:28,688 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.25 vs. limit=6.0 +2024-08-25 06:12:29,317 INFO [train.py:1114] (2/4) Epoch 4, batch 800, loss[loss=0.2731, simple_loss=0.3024, pruned_loss=0.08879, ctc_loss=0.1654, over 19794.00 frames. ], tot_loss[loss=0.3183, simple_loss=0.3428, pruned_loss=0.1069, ctc_loss=0.2001, over 3794584.01 frames. ], batch size: 49, lr: 3.21e-02, grad_scale: 32.0 +2024-08-25 06:12:38,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.96 vs. limit=6.0 +2024-08-25 06:12:49,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=44149.333333333336, ans=0.1 +2024-08-25 06:13:16,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=44309.333333333336, ans=0.0 +2024-08-25 06:13:28,057 INFO [train.py:1114] (2/4) Epoch 4, batch 850, loss[loss=0.3174, simple_loss=0.3493, pruned_loss=0.1037, ctc_loss=0.1954, over 19663.00 frames. ], tot_loss[loss=0.3168, simple_loss=0.3419, pruned_loss=0.1061, ctc_loss=0.1987, over 3814262.28 frames. ], batch size: 59, lr: 3.21e-02, grad_scale: 32.0 +2024-08-25 06:13:29,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=44362.666666666664, ans=0.0 +2024-08-25 06:13:31,265 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.656e+02 2.074e+02 2.402e+02 2.888e+02 5.555e+02, threshold=4.804e+02, percent-clipped=1.0 +2024-08-25 06:13:33,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=44362.666666666664, ans=0.0 +2024-08-25 06:13:41,185 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.69 vs. limit=15.0 +2024-08-25 06:13:43,493 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.97 vs. limit=15.0 +2024-08-25 06:13:48,249 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.38 vs. limit=22.5 +2024-08-25 06:13:48,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=44416.0, ans=0.1 +2024-08-25 06:13:50,606 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.32 vs. limit=15.0 +2024-08-25 06:14:14,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn2.whiten.whitening_limit, batch_count=44522.666666666664, ans=22.5 +2024-08-25 06:14:17,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=44576.0, ans=0.0 +2024-08-25 06:14:21,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=44576.0, ans=0.025 +2024-08-25 06:14:32,299 INFO [train.py:1114] (2/4) Epoch 4, batch 900, loss[loss=0.3589, simple_loss=0.3491, pruned_loss=0.1363, ctc_loss=0.2406, over 19806.00 frames. ], tot_loss[loss=0.3176, simple_loss=0.3423, pruned_loss=0.1065, ctc_loss=0.1995, over 3817793.98 frames. ], batch size: 49, lr: 3.20e-02, grad_scale: 32.0 +2024-08-25 06:14:33,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=44629.333333333336, ans=0.1 +2024-08-25 06:15:14,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=44789.333333333336, ans=0.2 +2024-08-25 06:15:14,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=44789.333333333336, ans=0.125 +2024-08-25 06:15:26,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=44842.666666666664, ans=0.125 +2024-08-25 06:15:33,491 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.15 vs. limit=22.5 +2024-08-25 06:15:38,547 INFO [train.py:1114] (2/4) Epoch 4, batch 950, loss[loss=0.2742, simple_loss=0.3109, pruned_loss=0.08751, ctc_loss=0.1563, over 19494.00 frames. ], tot_loss[loss=0.3181, simple_loss=0.3428, pruned_loss=0.1067, ctc_loss=0.2002, over 3819918.60 frames. ], batch size: 49, lr: 3.20e-02, grad_scale: 32.0 +2024-08-25 06:15:42,141 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.701e+02 2.101e+02 2.364e+02 2.735e+02 6.196e+02, threshold=4.728e+02, percent-clipped=2.0 +2024-08-25 06:15:48,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=44896.0, ans=0.0011095652173913045 +2024-08-25 06:16:02,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=44949.333333333336, ans=0.0010979710144927539 +2024-08-25 06:16:10,007 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:16:42,342 INFO [train.py:1114] (2/4) Epoch 4, batch 1000, loss[loss=0.2864, simple_loss=0.322, pruned_loss=0.09009, ctc_loss=0.1766, over 19849.00 frames. ], tot_loss[loss=0.319, simple_loss=0.3435, pruned_loss=0.1071, ctc_loss=0.2008, over 3814838.08 frames. ], batch size: 52, lr: 3.19e-02, grad_scale: 32.0 +2024-08-25 06:16:46,360 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.45 vs. limit=15.0 +2024-08-25 06:17:13,004 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.98 vs. limit=22.5 +2024-08-25 06:17:35,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=45322.666666666664, ans=0.09899494936611666 +2024-08-25 06:17:47,517 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.99 vs. limit=22.5 +2024-08-25 06:18:10,624 INFO [train.py:1114] (2/4) Epoch 4, batch 1050, loss[loss=0.3423, simple_loss=0.3633, pruned_loss=0.1168, ctc_loss=0.2191, over 19836.00 frames. ], tot_loss[loss=0.318, simple_loss=0.3426, pruned_loss=0.1067, ctc_loss=0.2002, over 3822005.55 frames. ], batch size: 57, lr: 3.19e-02, grad_scale: 16.0 +2024-08-25 06:18:24,441 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.09 vs. limit=22.5 +2024-08-25 06:18:26,186 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.527e+02 1.982e+02 2.200e+02 2.634e+02 5.388e+02, threshold=4.401e+02, percent-clipped=1.0 +2024-08-25 06:18:35,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=45482.666666666664, ans=0.09899494936611666 +2024-08-25 06:18:55,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=45536.0, ans=0.125 +2024-08-25 06:18:59,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=45536.0, ans=0.07 +2024-08-25 06:19:16,167 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.55 vs. limit=6.0 +2024-08-25 06:19:20,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=45642.666666666664, ans=0.2 +2024-08-25 06:19:21,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=45642.666666666664, ans=0.05 +2024-08-25 06:19:29,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=45642.666666666664, ans=0.0 +2024-08-25 06:19:36,304 INFO [train.py:1114] (2/4) Epoch 4, batch 1100, loss[loss=0.3009, simple_loss=0.3219, pruned_loss=0.1016, ctc_loss=0.1915, over 19606.00 frames. ], tot_loss[loss=0.3172, simple_loss=0.3421, pruned_loss=0.1063, ctc_loss=0.1995, over 3829079.95 frames. ], batch size: 52, lr: 3.18e-02, grad_scale: 16.0 +2024-08-25 06:19:57,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45749.333333333336, ans=0.1 +2024-08-25 06:20:04,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=45749.333333333336, ans=0.5 +2024-08-25 06:20:21,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=45856.0, ans=0.125 +2024-08-25 06:20:27,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=45856.0, ans=0.125 +2024-08-25 06:20:32,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=45856.0, ans=0.0 +2024-08-25 06:20:51,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=45962.666666666664, ans=0.125 +2024-08-25 06:20:52,258 INFO [train.py:1114] (2/4) Epoch 4, batch 1150, loss[loss=0.2674, simple_loss=0.3127, pruned_loss=0.08047, ctc_loss=0.1528, over 19592.00 frames. ], tot_loss[loss=0.3179, simple_loss=0.3423, pruned_loss=0.1067, ctc_loss=0.2004, over 3830223.75 frames. ], batch size: 52, lr: 3.18e-02, grad_scale: 16.0 +2024-08-25 06:20:57,032 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.742e+02 2.122e+02 2.390e+02 2.706e+02 4.199e+02, threshold=4.779e+02, percent-clipped=0.0 +2024-08-25 06:21:12,932 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=2.542e-03 +2024-08-25 06:21:20,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=46016.0, ans=0.0008660869565217388 +2024-08-25 06:21:25,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=46069.333333333336, ans=0.125 +2024-08-25 06:21:26,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=46069.333333333336, ans=0.125 +2024-08-25 06:21:35,984 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.21 vs. limit=15.0 +2024-08-25 06:21:53,431 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.08 vs. limit=22.5 +2024-08-25 06:21:59,986 INFO [train.py:1114] (2/4) Epoch 4, batch 1200, loss[loss=0.291, simple_loss=0.3327, pruned_loss=0.08995, ctc_loss=0.1735, over 19845.00 frames. ], tot_loss[loss=0.3182, simple_loss=0.3429, pruned_loss=0.1066, ctc_loss=0.2007, over 3825950.41 frames. ], batch size: 57, lr: 3.17e-02, grad_scale: 32.0 +2024-08-25 06:22:14,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=46229.333333333336, ans=0.0 +2024-08-25 06:22:30,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=46336.0, ans=0.09899494936611666 +2024-08-25 06:22:40,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=46336.0, ans=0.0 +2024-08-25 06:22:53,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=46389.333333333336, ans=0.0 +2024-08-25 06:23:00,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46389.333333333336, ans=0.1 +2024-08-25 06:23:14,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=46442.666666666664, ans=0.125 +2024-08-25 06:23:17,968 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:23:21,321 INFO [train.py:1114] (2/4) Epoch 4, batch 1250, loss[loss=0.3405, simple_loss=0.3626, pruned_loss=0.1153, ctc_loss=0.2197, over 19543.00 frames. ], tot_loss[loss=0.3179, simple_loss=0.3432, pruned_loss=0.1063, ctc_loss=0.2, over 3843771.03 frames. ], batch size: 61, lr: 3.17e-02, grad_scale: 32.0 +2024-08-25 06:23:26,220 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.617e+02 1.962e+02 2.225e+02 2.468e+02 3.508e+02, threshold=4.451e+02, percent-clipped=0.0 +2024-08-25 06:24:26,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=46656.0, ans=0.0007269565217391302 +2024-08-25 06:24:48,929 INFO [train.py:1114] (2/4) Epoch 4, batch 1300, loss[loss=0.313, simple_loss=0.3415, pruned_loss=0.1037, ctc_loss=0.1926, over 18907.00 frames. ], tot_loss[loss=0.317, simple_loss=0.3421, pruned_loss=0.106, ctc_loss=0.1994, over 3847068.18 frames. ], batch size: 76, lr: 3.16e-02, grad_scale: 32.0 +2024-08-25 06:24:55,029 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.32 vs. limit=15.0 +2024-08-25 06:25:05,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=46816.0, ans=0.125 +2024-08-25 06:25:52,894 INFO [train.py:1114] (2/4) Epoch 4, batch 1350, loss[loss=0.3131, simple_loss=0.3433, pruned_loss=0.1023, ctc_loss=0.1958, over 19758.00 frames. ], tot_loss[loss=0.3143, simple_loss=0.3405, pruned_loss=0.1047, ctc_loss=0.1966, over 3858266.94 frames. ], batch size: 54, lr: 3.16e-02, grad_scale: 32.0 +2024-08-25 06:25:54,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=47029.333333333336, ans=0.2 +2024-08-25 06:26:07,744 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.473e+02 2.269e+02 2.560e+02 3.229e+02 4.886e+02, threshold=5.120e+02, percent-clipped=5.0 +2024-08-25 06:26:37,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=47082.666666666664, ans=0.125 +2024-08-25 06:26:59,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=47189.333333333336, ans=0.125 +2024-08-25 06:27:15,316 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.45 vs. limit=15.0 +2024-08-25 06:27:20,739 INFO [train.py:1114] (2/4) Epoch 4, batch 1400, loss[loss=0.2384, simple_loss=0.2817, pruned_loss=0.06995, ctc_loss=0.1379, over 19705.00 frames. ], tot_loss[loss=0.3138, simple_loss=0.3399, pruned_loss=0.1045, ctc_loss=0.1965, over 3865064.39 frames. ], batch size: 46, lr: 3.15e-02, grad_scale: 32.0 +2024-08-25 06:27:27,137 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.65 vs. limit=15.0 +2024-08-25 06:27:27,278 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.25 vs. limit=15.0 +2024-08-25 06:27:52,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=47349.333333333336, ans=0.125 +2024-08-25 06:27:57,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=47349.333333333336, ans=0.1 +2024-08-25 06:28:02,386 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=2.522e-03 +2024-08-25 06:28:27,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=47456.0, ans=0.125 +2024-08-25 06:28:43,719 INFO [train.py:1114] (2/4) Epoch 4, batch 1450, loss[loss=0.3301, simple_loss=0.3592, pruned_loss=0.109, ctc_loss=0.2076, over 19676.00 frames. ], tot_loss[loss=0.3154, simple_loss=0.341, pruned_loss=0.1054, ctc_loss=0.1979, over 3863127.45 frames. ], batch size: 63, lr: 3.15e-02, grad_scale: 32.0 +2024-08-25 06:28:45,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=47562.666666666664, ans=0.0005298550724637686 +2024-08-25 06:28:48,581 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.613e+02 2.026e+02 2.327e+02 2.659e+02 4.329e+02, threshold=4.654e+02, percent-clipped=0.0 +2024-08-25 06:29:02,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.05 vs. limit=10.0 +2024-08-25 06:29:13,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=47669.333333333336, ans=0.0 +2024-08-25 06:29:21,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47722.666666666664, ans=0.1 +2024-08-25 06:29:44,349 INFO [train.py:1114] (2/4) Epoch 4, batch 1500, loss[loss=0.3336, simple_loss=0.3616, pruned_loss=0.1124, ctc_loss=0.2019, over 19574.00 frames. ], tot_loss[loss=0.3156, simple_loss=0.3415, pruned_loss=0.1053, ctc_loss=0.1978, over 3862632.16 frames. ], batch size: 57, lr: 3.14e-02, grad_scale: 16.0 +2024-08-25 06:29:50,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=47829.333333333336, ans=0.125 +2024-08-25 06:30:34,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=48042.666666666664, ans=0.0 +2024-08-25 06:31:38,010 INFO [train.py:1114] (2/4) Epoch 4, batch 1550, loss[loss=0.3627, simple_loss=0.3799, pruned_loss=0.126, ctc_loss=0.2338, over 19574.00 frames. ], tot_loss[loss=0.3157, simple_loss=0.3414, pruned_loss=0.1054, ctc_loss=0.198, over 3847553.78 frames. ], batch size: 60, lr: 3.14e-02, grad_scale: 16.0 +2024-08-25 06:31:49,980 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.489e+02 2.013e+02 2.262e+02 2.770e+02 1.090e+03, threshold=4.525e+02, percent-clipped=1.0 +2024-08-25 06:32:14,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=48149.333333333336, ans=0.0 +2024-08-25 06:32:43,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=48309.333333333336, ans=0.125 +2024-08-25 06:33:15,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=48309.333333333336, ans=0.025 +2024-08-25 06:33:26,254 INFO [train.py:1114] (2/4) Epoch 4, batch 1600, loss[loss=0.3508, simple_loss=0.3732, pruned_loss=0.1197, ctc_loss=0.2223, over 19829.00 frames. ], tot_loss[loss=0.3164, simple_loss=0.3417, pruned_loss=0.1058, ctc_loss=0.1985, over 3837485.55 frames. ], batch size: 57, lr: 3.13e-02, grad_scale: 32.0 +2024-08-25 06:33:28,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=48362.666666666664, ans=0.025 +2024-08-25 06:33:33,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=48362.666666666664, ans=0.125 +2024-08-25 06:33:36,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=48362.666666666664, ans=0.0 +2024-08-25 06:33:37,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=48416.0, ans=0.1 +2024-08-25 06:33:48,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=48416.0, ans=0.125 +2024-08-25 06:33:50,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=48416.0, ans=0.125 +2024-08-25 06:33:53,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=48416.0, ans=0.1 +2024-08-25 06:33:58,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48416.0, ans=0.1 +2024-08-25 06:34:40,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=48522.666666666664, ans=0.2 +2024-08-25 06:35:09,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=48576.0, ans=0.1 +2024-08-25 06:35:15,037 INFO [train.py:1114] (2/4) Epoch 4, batch 1650, loss[loss=0.3435, simple_loss=0.3656, pruned_loss=0.117, ctc_loss=0.2187, over 19647.00 frames. ], tot_loss[loss=0.3177, simple_loss=0.3423, pruned_loss=0.1065, ctc_loss=0.1999, over 3833438.53 frames. ], batch size: 59, lr: 3.13e-02, grad_scale: 32.0 +2024-08-25 06:35:21,183 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.567e+02 2.079e+02 2.506e+02 2.996e+02 5.422e+02, threshold=5.011e+02, percent-clipped=2.0 +2024-08-25 06:35:23,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=48629.333333333336, ans=0.0002979710144927535 +2024-08-25 06:35:27,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=48629.333333333336, ans=0.0 +2024-08-25 06:35:50,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=48682.666666666664, ans=0.125 +2024-08-25 06:35:58,766 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.26 vs. limit=12.0 +2024-08-25 06:36:32,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=48842.666666666664, ans=0.025 +2024-08-25 06:36:37,448 INFO [train.py:1114] (2/4) Epoch 4, batch 1700, loss[loss=0.2844, simple_loss=0.3075, pruned_loss=0.09548, ctc_loss=0.1757, over 19645.00 frames. ], tot_loss[loss=0.3162, simple_loss=0.3416, pruned_loss=0.1057, ctc_loss=0.1983, over 3847276.12 frames. ], batch size: 46, lr: 3.12e-02, grad_scale: 32.0 +2024-08-25 06:36:41,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=48896.0, ans=0.2 +2024-08-25 06:37:07,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=48949.333333333336, ans=0.125 +2024-08-25 06:37:22,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=49002.666666666664, ans=0.00021681159420289947 +2024-08-25 06:37:25,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=49002.666666666664, ans=0.0 +2024-08-25 06:37:28,881 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.17 vs. limit=15.0 +2024-08-25 06:38:15,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=49109.333333333336, ans=0.125 +2024-08-25 06:38:25,878 INFO [train.py:1114] (2/4) Epoch 4, batch 1750, loss[loss=0.3025, simple_loss=0.3156, pruned_loss=0.1059, ctc_loss=0.1941, over 19645.00 frames. ], tot_loss[loss=0.3145, simple_loss=0.3402, pruned_loss=0.105, ctc_loss=0.1971, over 3852181.27 frames. ], batch size: 45, lr: 3.11e-02, grad_scale: 32.0 +2024-08-25 06:38:29,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.03 vs. limit=22.5 +2024-08-25 06:38:31,264 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.88 vs. limit=15.0 +2024-08-25 06:38:33,085 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.628e+02 1.987e+02 2.278e+02 2.713e+02 5.908e+02, threshold=4.555e+02, percent-clipped=1.0 +2024-08-25 06:38:41,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=49216.0, ans=0.125 +2024-08-25 06:39:31,715 INFO [train.py:1114] (2/4) Epoch 4, batch 1800, loss[loss=0.309, simple_loss=0.3442, pruned_loss=0.1003, ctc_loss=0.1828, over 19611.00 frames. ], tot_loss[loss=0.3139, simple_loss=0.3399, pruned_loss=0.1047, ctc_loss=0.1963, over 3852946.72 frames. ], batch size: 55, lr: 3.11e-02, grad_scale: 32.0 +2024-08-25 06:40:10,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=49482.666666666664, ans=0.2 +2024-08-25 06:40:19,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=49536.0, ans=0.2 +2024-08-25 06:40:19,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=49536.0, ans=0.025 +2024-08-25 06:40:26,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=49536.0, ans=0.125 +2024-08-25 06:40:43,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=49642.666666666664, ans=0.125 +2024-08-25 06:40:54,735 INFO [train.py:1114] (2/4) Epoch 4, batch 1850, loss[loss=0.3356, simple_loss=0.3547, pruned_loss=0.1141, ctc_loss=0.2208, over 19561.00 frames. ], tot_loss[loss=0.3133, simple_loss=0.3395, pruned_loss=0.1044, ctc_loss=0.1958, over 3856583.60 frames. ], batch size: 57, lr: 3.10e-02, grad_scale: 32.0 +2024-08-25 06:41:01,667 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.611e+02 2.149e+02 2.307e+02 2.574e+02 4.619e+02, threshold=4.614e+02, percent-clipped=1.0 +2024-08-25 06:41:01,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=49696.0, ans=0.125 +2024-08-25 06:41:05,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=49696.0, ans=0.025 +2024-08-25 06:41:10,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=49749.333333333336, ans=0.1 +2024-08-25 06:41:15,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=49749.333333333336, ans=0.0 +2024-08-25 06:41:29,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=49802.666666666664, ans=0.125 +2024-08-25 06:41:50,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49909.333333333336, ans=0.1 +2024-08-25 06:41:59,170 INFO [train.py:1114] (2/4) Epoch 4, batch 1900, loss[loss=0.3183, simple_loss=0.3549, pruned_loss=0.1026, ctc_loss=0.1916, over 19638.00 frames. ], tot_loss[loss=0.3135, simple_loss=0.34, pruned_loss=0.1045, ctc_loss=0.1955, over 3861482.76 frames. ], batch size: 59, lr: 3.10e-02, grad_scale: 32.0 +2024-08-25 06:42:40,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=50016.0, ans=10.0 +2024-08-25 06:42:41,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=50016.0, ans=0.125 +2024-08-25 06:43:08,900 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.63 vs. limit=15.0 +2024-08-25 06:43:11,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=50122.666666666664, ans=0.125 +2024-08-25 06:43:39,882 INFO [train.py:1114] (2/4) Epoch 4, batch 1950, loss[loss=0.3125, simple_loss=0.3423, pruned_loss=0.1041, ctc_loss=0.1864, over 19573.00 frames. ], tot_loss[loss=0.315, simple_loss=0.3416, pruned_loss=0.1049, ctc_loss=0.1963, over 3870492.34 frames. ], batch size: 52, lr: 3.09e-02, grad_scale: 32.0 +2024-08-25 06:43:45,602 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.759e+02 2.065e+02 2.259e+02 2.635e+02 4.732e+02, threshold=4.517e+02, percent-clipped=1.0 +2024-08-25 06:43:47,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=50229.333333333336, ans=0.125 +2024-08-25 06:43:58,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50282.666666666664, ans=0.1 +2024-08-25 06:44:10,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=50336.0, ans=0.2 +2024-08-25 06:44:14,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=50389.333333333336, ans=0.1 +2024-08-25 06:44:23,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=50389.333333333336, ans=0.07 +2024-08-25 06:44:48,806 INFO [train.py:1114] (2/4) Epoch 4, batch 2000, loss[loss=0.2673, simple_loss=0.2954, pruned_loss=0.08693, ctc_loss=0.1633, over 19668.00 frames. ], tot_loss[loss=0.3151, simple_loss=0.3416, pruned_loss=0.1051, ctc_loss=0.1964, over 3853499.94 frames. ], batch size: 45, lr: 3.09e-02, grad_scale: 32.0 +2024-08-25 06:44:50,220 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.92 vs. limit=6.0 +2024-08-25 06:45:49,610 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.07 vs. limit=15.0 +2024-08-25 06:46:19,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=50709.333333333336, ans=0.0 +2024-08-25 06:46:22,676 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.66 vs. limit=10.0 +2024-08-25 06:46:34,389 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.37 vs. limit=15.0 +2024-08-25 06:46:35,053 INFO [train.py:1114] (2/4) Epoch 4, batch 2050, loss[loss=0.2525, simple_loss=0.2934, pruned_loss=0.07676, ctc_loss=0.1453, over 19717.00 frames. ], tot_loss[loss=0.3158, simple_loss=0.3415, pruned_loss=0.1056, ctc_loss=0.1976, over 3850611.27 frames. ], batch size: 47, lr: 3.08e-02, grad_scale: 32.0 +2024-08-25 06:46:45,615 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.515e+02 2.046e+02 2.338e+02 2.720e+02 4.537e+02, threshold=4.675e+02, percent-clipped=1.0 +2024-08-25 06:47:00,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=50816.0, ans=0.125 +2024-08-25 06:47:09,427 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.47 vs. limit=15.0 +2024-08-25 06:47:12,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=50869.333333333336, ans=0.2 +2024-08-25 06:47:15,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=50922.666666666664, ans=0.07 +2024-08-25 06:47:16,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=50922.666666666664, ans=0.0 +2024-08-25 06:47:28,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=50922.666666666664, ans=0.125 +2024-08-25 06:47:31,043 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.62 vs. limit=15.0 +2024-08-25 06:47:31,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=50976.0, ans=0.0 +2024-08-25 06:47:47,558 INFO [train.py:1114] (2/4) Epoch 4, batch 2100, loss[loss=0.3288, simple_loss=0.3478, pruned_loss=0.1126, ctc_loss=0.2116, over 19763.00 frames. ], tot_loss[loss=0.3147, simple_loss=0.3408, pruned_loss=0.105, ctc_loss=0.1967, over 3858330.99 frames. ], batch size: 54, lr: 3.08e-02, grad_scale: 32.0 +2024-08-25 06:47:52,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=51029.333333333336, ans=0.2 +2024-08-25 06:47:53,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51029.333333333336, ans=0.1 +2024-08-25 06:48:07,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=51029.333333333336, ans=0.0 +2024-08-25 06:48:21,757 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.37 vs. limit=22.5 +2024-08-25 06:48:26,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=51082.666666666664, ans=0.2 +2024-08-25 06:48:42,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=51136.0, ans=0.025 +2024-08-25 06:48:42,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=51136.0, ans=0.2 +2024-08-25 06:48:44,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=51136.0, ans=0.05 +2024-08-25 06:48:57,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51136.0, ans=0.1 +2024-08-25 06:48:57,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=51136.0, ans=0.125 +2024-08-25 06:48:59,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=51136.0, ans=0.0 +2024-08-25 06:49:02,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=51189.333333333336, ans=0.125 +2024-08-25 06:49:38,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=51242.666666666664, ans=0.2 +2024-08-25 06:49:45,556 INFO [train.py:1114] (2/4) Epoch 4, batch 2150, loss[loss=0.2835, simple_loss=0.3242, pruned_loss=0.08776, ctc_loss=0.1683, over 19858.00 frames. ], tot_loss[loss=0.3134, simple_loss=0.3397, pruned_loss=0.1044, ctc_loss=0.1955, over 3869328.31 frames. ], batch size: 52, lr: 3.07e-02, grad_scale: 32.0 +2024-08-25 06:49:48,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=51296.0, ans=0.0 +2024-08-25 06:49:54,452 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.724e+02 2.035e+02 2.305e+02 2.639e+02 4.596e+02, threshold=4.610e+02, percent-clipped=0.0 +2024-08-25 06:50:13,051 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.66 vs. limit=22.5 +2024-08-25 06:50:13,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=51349.333333333336, ans=0.125 +2024-08-25 06:50:24,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=51402.666666666664, ans=0.125 +2024-08-25 06:51:12,522 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.93 vs. limit=15.0 +2024-08-25 06:51:15,400 INFO [train.py:1114] (2/4) Epoch 4, batch 2200, loss[loss=0.2867, simple_loss=0.3244, pruned_loss=0.08946, ctc_loss=0.1752, over 19574.00 frames. ], tot_loss[loss=0.3125, simple_loss=0.3392, pruned_loss=0.1039, ctc_loss=0.1947, over 3868226.85 frames. ], batch size: 57, lr: 3.07e-02, grad_scale: 32.0 +2024-08-25 06:51:21,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=51562.666666666664, ans=0.125 +2024-08-25 06:51:44,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=51616.0, ans=0.0 +2024-08-25 06:52:00,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=51722.666666666664, ans=0.2 +2024-08-25 06:52:07,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=51722.666666666664, ans=0.125 +2024-08-25 06:52:14,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=51776.0, ans=0.125 +2024-08-25 06:52:24,911 INFO [train.py:1114] (2/4) Epoch 4, batch 2250, loss[loss=0.3165, simple_loss=0.3478, pruned_loss=0.1039, ctc_loss=0.1936, over 19617.00 frames. ], tot_loss[loss=0.3134, simple_loss=0.3399, pruned_loss=0.1044, ctc_loss=0.1956, over 3867770.48 frames. ], batch size: 55, lr: 3.06e-02, grad_scale: 32.0 +2024-08-25 06:52:32,001 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.699e+02 2.164e+02 2.622e+02 3.263e+02 6.940e+02, threshold=5.245e+02, percent-clipped=2.0 +2024-08-25 06:52:40,504 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.34 vs. limit=12.0 +2024-08-25 06:52:57,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=51936.0, ans=10.0 +2024-08-25 06:53:07,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=51989.333333333336, ans=0.025 +2024-08-25 06:53:16,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=52042.666666666664, ans=0.0 +2024-08-25 06:53:30,937 INFO [train.py:1114] (2/4) Epoch 4, batch 2300, loss[loss=0.2919, simple_loss=0.3267, pruned_loss=0.09497, ctc_loss=0.168, over 19514.00 frames. ], tot_loss[loss=0.3127, simple_loss=0.3389, pruned_loss=0.1041, ctc_loss=0.1954, over 3861902.31 frames. ], batch size: 49, lr: 3.06e-02, grad_scale: 32.0 +2024-08-25 06:53:43,294 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=9.47 vs. limit=12.0 +2024-08-25 06:54:07,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52202.666666666664, ans=0.1 +2024-08-25 06:54:09,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=52256.0, ans=0.1 +2024-08-25 06:54:26,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=52256.0, ans=0.125 +2024-08-25 06:54:29,643 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=52309.333333333336, ans=0.0 +2024-08-25 06:54:43,481 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.96 vs. limit=6.0 +2024-08-25 06:54:53,365 INFO [train.py:1114] (2/4) Epoch 4, batch 2350, loss[loss=0.3478, simple_loss=0.3687, pruned_loss=0.1189, ctc_loss=0.2226, over 19663.00 frames. ], tot_loss[loss=0.3116, simple_loss=0.3381, pruned_loss=0.1036, ctc_loss=0.1947, over 3863950.36 frames. ], batch size: 63, lr: 3.05e-02, grad_scale: 32.0 +2024-08-25 06:54:58,715 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.580e+02 2.121e+02 2.497e+02 3.048e+02 4.745e+02, threshold=4.995e+02, percent-clipped=0.0 +2024-08-25 06:55:03,399 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.94 vs. limit=15.0 +2024-08-25 06:55:04,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=52362.666666666664, ans=0.125 +2024-08-25 06:55:13,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=52416.0, ans=0.125 +2024-08-25 06:58:44,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=52469.333333333336, ans=0.125 +2024-08-25 07:06:41,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52576.0, ans=0.1 +2024-08-25 07:06:42,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=52576.0, ans=0.125 +2024-08-25 07:07:01,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=52576.0, ans=0.0 +2024-08-25 07:07:21,839 INFO [train.py:1114] (2/4) Epoch 4, batch 2400, loss[loss=0.336, simple_loss=0.3519, pruned_loss=0.1159, ctc_loss=0.2206, over 19289.00 frames. ], tot_loss[loss=0.3141, simple_loss=0.3405, pruned_loss=0.1046, ctc_loss=0.1963, over 3858454.32 frames. ], batch size: 71, lr: 3.05e-02, grad_scale: 32.0 +2024-08-25 07:10:21,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=52629.333333333336, ans=0.2 +2024-08-25 07:12:26,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=52682.666666666664, ans=0.0 +2024-08-25 07:18:51,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=52736.0, ans=0.125 +2024-08-25 07:19:50,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=52736.0, ans=0.09899494936611666 +2024-08-25 07:25:12,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=52789.333333333336, ans=0.09899494936611666 +2024-08-25 07:34:58,787 INFO [train.py:1114] (2/4) Epoch 4, batch 2450, loss[loss=0.3494, simple_loss=0.3485, pruned_loss=0.125, ctc_loss=0.2506, over 13795.00 frames. ], tot_loss[loss=0.3239, simple_loss=0.346, pruned_loss=0.1097, ctc_loss=0.206, over 3732216.21 frames. ], batch size: 140, lr: 3.05e-02, grad_scale: 16.0 +2024-08-25 07:36:27,110 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.701e+02 2.096e+02 2.355e+02 2.735e+02 5.246e+02, threshold=4.710e+02, percent-clipped=1.0 +2024-08-25 07:42:13,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff3.min_abs, batch_count=53002.666666666664, ans=0.2 +2024-08-25 07:43:06,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=53002.666666666664, ans=0.125 +2024-08-25 07:43:24,978 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.80 vs. limit=15.0 +2024-08-25 07:46:30,896 INFO [train.py:1114] (2/4) Epoch 5, batch 0, loss[loss=0.304, simple_loss=0.3263, pruned_loss=0.1033, ctc_loss=0.1875, over 19811.00 frames. ], tot_loss[loss=0.304, simple_loss=0.3263, pruned_loss=0.1033, ctc_loss=0.1875, over 19811.00 frames. ], batch size: 49, lr: 2.83e-02, grad_scale: 32.0 +2024-08-25 07:46:30,897 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-25 07:49:02,113 INFO [train.py:1146] (2/4) Epoch 5, validation: loss=0.2543, simple_loss=0.3259, pruned_loss=0.06691, ctc_loss=0.1221, over 944034.00 frames. +2024-08-25 07:49:02,114 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 14234MB +2024-08-25 07:55:35,559 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.37 vs. limit=15.0 +2024-08-25 07:57:00,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=53210.666666666664, ans=0.125 +2024-08-25 07:59:08,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=53264.0, ans=0.0 +2024-08-25 07:59:09,053 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.58 vs. limit=22.5 +2024-08-25 07:59:09,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=53264.0, ans=0.125 +2024-08-25 07:59:25,679 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 08:01:56,944 INFO [train.py:1114] (2/4) Epoch 5, batch 50, loss[loss=0.2555, simple_loss=0.2989, pruned_loss=0.07684, ctc_loss=0.1463, over 19692.00 frames. ], tot_loss[loss=0.313, simple_loss=0.3392, pruned_loss=0.104, ctc_loss=0.1967, over 843425.80 frames. ], batch size: 47, lr: 2.83e-02, grad_scale: 32.0 +2024-08-25 08:03:51,551 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.650e+02 1.984e+02 2.202e+02 2.522e+02 4.045e+02, threshold=4.404e+02, percent-clipped=0.0 +2024-08-25 08:04:17,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=53424.0, ans=0.0 +2024-08-25 08:06:10,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=53530.666666666664, ans=10.0 +2024-08-25 08:06:33,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=53530.666666666664, ans=0.0 +2024-08-25 08:06:33,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=53530.666666666664, ans=0.125 +2024-08-25 08:06:36,240 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 08:06:39,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=53530.666666666664, ans=0.0 +2024-08-25 08:07:17,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=53584.0, ans=0.0 +2024-08-25 08:07:22,869 INFO [train.py:1114] (2/4) Epoch 5, batch 100, loss[loss=0.3155, simple_loss=0.333, pruned_loss=0.1071, ctc_loss=0.2094, over 19718.00 frames. ], tot_loss[loss=0.3153, simple_loss=0.3426, pruned_loss=0.1046, ctc_loss=0.1974, over 1498153.10 frames. ], batch size: 51, lr: 2.82e-02, grad_scale: 32.0 +2024-08-25 08:07:44,867 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.03 vs. limit=22.5 +2024-08-25 08:08:16,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=53690.666666666664, ans=0.0 +2024-08-25 08:08:47,227 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.96 vs. limit=15.0 +2024-08-25 08:09:45,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=53797.333333333336, ans=0.125 +2024-08-25 08:10:03,778 INFO [train.py:1114] (2/4) Epoch 5, batch 150, loss[loss=0.3058, simple_loss=0.3216, pruned_loss=0.1063, ctc_loss=0.1936, over 19685.00 frames. ], tot_loss[loss=0.3097, simple_loss=0.3384, pruned_loss=0.102, ctc_loss=0.1924, over 2028120.81 frames. ], batch size: 47, lr: 2.82e-02, grad_scale: 32.0 +2024-08-25 08:10:08,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=53904.0, ans=0.125 +2024-08-25 08:10:28,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=53904.0, ans=0.125 +2024-08-25 08:10:40,322 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.601e+02 2.115e+02 2.389e+02 2.764e+02 4.531e+02, threshold=4.777e+02, percent-clipped=1.0 +2024-08-25 08:11:16,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=54064.0, ans=0.125 +2024-08-25 08:11:53,424 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.18 vs. limit=15.0 +2024-08-25 08:12:01,152 INFO [train.py:1114] (2/4) Epoch 5, batch 200, loss[loss=0.3726, simple_loss=0.3729, pruned_loss=0.1365, ctc_loss=0.2486, over 18092.00 frames. ], tot_loss[loss=0.3056, simple_loss=0.3352, pruned_loss=0.1002, ctc_loss=0.1889, over 2435948.10 frames. ], batch size: 85, lr: 2.81e-02, grad_scale: 32.0 +2024-08-25 08:13:57,960 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.10 vs. limit=15.0 +2024-08-25 08:13:58,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=54170.666666666664, ans=0.125 +2024-08-25 08:13:58,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=54170.666666666664, ans=0.0 +2024-08-25 08:14:34,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=54277.333333333336, ans=0.125 +2024-08-25 08:15:07,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=54330.666666666664, ans=0.0 +2024-08-25 08:15:58,937 INFO [train.py:1114] (2/4) Epoch 5, batch 250, loss[loss=0.3259, simple_loss=0.3524, pruned_loss=0.11, ctc_loss=0.1984, over 19349.00 frames. ], tot_loss[loss=0.305, simple_loss=0.3349, pruned_loss=0.09989, ctc_loss=0.1884, over 2756553.01 frames. ], batch size: 67, lr: 2.81e-02, grad_scale: 32.0 +2024-08-25 08:16:47,933 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.557e+02 1.969e+02 2.164e+02 2.373e+02 3.326e+02, threshold=4.328e+02, percent-clipped=0.0 +2024-08-25 08:17:04,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=54597.333333333336, ans=0.2 +2024-08-25 08:17:04,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=54597.333333333336, ans=0.125 +2024-08-25 08:17:06,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=54597.333333333336, ans=0.2 +2024-08-25 08:17:26,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=54650.666666666664, ans=0.125 +2024-08-25 08:17:32,566 INFO [train.py:1114] (2/4) Epoch 5, batch 300, loss[loss=0.318, simple_loss=0.3411, pruned_loss=0.1076, ctc_loss=0.1991, over 19510.00 frames. ], tot_loss[loss=0.3035, simple_loss=0.3337, pruned_loss=0.09925, ctc_loss=0.1872, over 3000980.14 frames. ], batch size: 61, lr: 2.81e-02, grad_scale: 32.0 +2024-08-25 08:17:53,424 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.93 vs. limit=10.0 +2024-08-25 08:18:12,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=54810.666666666664, ans=0.0 +2024-08-25 08:18:27,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=54917.333333333336, ans=0.09899494936611666 +2024-08-25 08:18:35,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=54917.333333333336, ans=0.09899494936611666 +2024-08-25 08:18:38,526 INFO [train.py:1114] (2/4) Epoch 5, batch 350, loss[loss=0.3103, simple_loss=0.3302, pruned_loss=0.1051, ctc_loss=0.2008, over 19759.00 frames. ], tot_loss[loss=0.3031, simple_loss=0.3339, pruned_loss=0.09899, ctc_loss=0.186, over 3190823.52 frames. ], batch size: 48, lr: 2.80e-02, grad_scale: 16.0 +2024-08-25 08:18:47,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54970.666666666664, ans=0.1 +2024-08-25 08:18:51,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=54970.666666666664, ans=0.1 +2024-08-25 08:19:01,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=55024.0, ans=0.0 +2024-08-25 08:19:10,792 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 1.967e+02 2.265e+02 2.794e+02 4.039e+02, threshold=4.529e+02, percent-clipped=0.0 +2024-08-25 08:19:18,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=55077.333333333336, ans=0.0 +2024-08-25 08:19:30,477 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.97 vs. limit=22.5 +2024-08-25 08:19:47,794 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.69 vs. limit=15.0 +2024-08-25 08:19:47,961 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.02 vs. limit=22.5 +2024-08-25 08:19:52,016 INFO [train.py:1114] (2/4) Epoch 5, batch 400, loss[loss=0.3009, simple_loss=0.34, pruned_loss=0.09407, ctc_loss=0.1843, over 19486.00 frames. ], tot_loss[loss=0.3024, simple_loss=0.3336, pruned_loss=0.09854, ctc_loss=0.1852, over 3342865.79 frames. ], batch size: 54, lr: 2.80e-02, grad_scale: 32.0 +2024-08-25 08:19:55,418 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.89 vs. limit=6.0 +2024-08-25 08:19:59,027 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.16 vs. limit=15.0 +2024-08-25 08:20:21,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=55344.0, ans=0.2 +2024-08-25 08:20:27,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=55344.0, ans=0.2 +2024-08-25 08:21:07,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=55450.666666666664, ans=0.0 +2024-08-25 08:21:10,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=55450.666666666664, ans=0.125 +2024-08-25 08:21:24,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55450.666666666664, ans=0.1 +2024-08-25 08:21:27,047 INFO [train.py:1114] (2/4) Epoch 5, batch 450, loss[loss=0.3207, simple_loss=0.352, pruned_loss=0.1055, ctc_loss=0.1961, over 19619.00 frames. ], tot_loss[loss=0.3025, simple_loss=0.3337, pruned_loss=0.09862, ctc_loss=0.1852, over 3452048.21 frames. ], batch size: 55, lr: 2.79e-02, grad_scale: 32.0 +2024-08-25 08:21:27,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=55504.0, ans=0.0 +2024-08-25 08:21:27,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=55504.0, ans=0.0 +2024-08-25 08:21:29,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=55504.0, ans=0.125 +2024-08-25 08:21:47,862 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.640e+02 2.008e+02 2.249e+02 2.774e+02 4.428e+02, threshold=4.498e+02, percent-clipped=0.0 +2024-08-25 08:21:55,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=55610.666666666664, ans=0.125 +2024-08-25 08:22:06,190 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 08:22:58,231 INFO [train.py:1114] (2/4) Epoch 5, batch 500, loss[loss=0.3169, simple_loss=0.3493, pruned_loss=0.1048, ctc_loss=0.1869, over 19677.00 frames. ], tot_loss[loss=0.3008, simple_loss=0.3323, pruned_loss=0.09786, ctc_loss=0.1837, over 3548439.66 frames. ], batch size: 63, lr: 2.79e-02, grad_scale: 32.0 +2024-08-25 08:23:29,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55824.0, ans=0.1 +2024-08-25 08:23:59,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=55930.666666666664, ans=0.2 +2024-08-25 08:24:21,970 INFO [train.py:1114] (2/4) Epoch 5, batch 550, loss[loss=0.3263, simple_loss=0.352, pruned_loss=0.1077, ctc_loss=0.2132, over 19298.00 frames. ], tot_loss[loss=0.301, simple_loss=0.3324, pruned_loss=0.09796, ctc_loss=0.1841, over 3609573.60 frames. ], batch size: 71, lr: 2.78e-02, grad_scale: 32.0 +2024-08-25 08:24:47,080 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.533e+02 1.991e+02 2.247e+02 2.867e+02 6.260e+02, threshold=4.494e+02, percent-clipped=1.0 +2024-08-25 08:24:58,547 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.90 vs. limit=10.0 +2024-08-25 08:25:37,726 INFO [train.py:1114] (2/4) Epoch 5, batch 600, loss[loss=0.3608, simple_loss=0.379, pruned_loss=0.1248, ctc_loss=0.2327, over 19362.00 frames. ], tot_loss[loss=0.3016, simple_loss=0.3333, pruned_loss=0.09812, ctc_loss=0.1842, over 3667641.63 frames. ], batch size: 67, lr: 2.78e-02, grad_scale: 32.0 +2024-08-25 08:26:13,199 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.07 vs. limit=15.0 +2024-08-25 08:26:25,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=56464.0, ans=0.2 +2024-08-25 08:26:27,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=56464.0, ans=0.125 +2024-08-25 08:26:47,455 INFO [train.py:1114] (2/4) Epoch 5, batch 650, loss[loss=0.3036, simple_loss=0.3337, pruned_loss=0.09983, ctc_loss=0.1846, over 19763.00 frames. ], tot_loss[loss=0.3015, simple_loss=0.3328, pruned_loss=0.09826, ctc_loss=0.1843, over 3717753.44 frames. ], batch size: 54, lr: 2.77e-02, grad_scale: 32.0 +2024-08-25 08:27:04,233 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.17 vs. limit=15.0 +2024-08-25 08:27:09,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=56624.0, ans=0.125 +2024-08-25 08:27:13,352 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.617e+02 1.957e+02 2.352e+02 2.685e+02 4.359e+02, threshold=4.704e+02, percent-clipped=0.0 +2024-08-25 08:27:29,690 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 08:27:36,573 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.25 vs. limit=22.5 +2024-08-25 08:28:10,098 INFO [train.py:1114] (2/4) Epoch 5, batch 700, loss[loss=0.2856, simple_loss=0.3184, pruned_loss=0.0925, ctc_loss=0.1695, over 19738.00 frames. ], tot_loss[loss=0.3019, simple_loss=0.3331, pruned_loss=0.09842, ctc_loss=0.1845, over 3749019.41 frames. ], batch size: 51, lr: 2.77e-02, grad_scale: 32.0 +2024-08-25 08:28:27,500 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.54 vs. limit=15.0 +2024-08-25 08:28:27,525 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.07 vs. limit=15.0 +2024-08-25 08:28:28,547 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=15.0 +2024-08-25 08:28:34,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.97 vs. limit=22.5 +2024-08-25 08:29:41,378 INFO [train.py:1114] (2/4) Epoch 5, batch 750, loss[loss=0.2903, simple_loss=0.3322, pruned_loss=0.09044, ctc_loss=0.1688, over 19513.00 frames. ], tot_loss[loss=0.3001, simple_loss=0.332, pruned_loss=0.09754, ctc_loss=0.1826, over 3774483.61 frames. ], batch size: 54, lr: 2.77e-02, grad_scale: 32.0 +2024-08-25 08:30:11,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=57104.0, ans=0.125 +2024-08-25 08:30:20,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=57157.333333333336, ans=0.125 +2024-08-25 08:30:38,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=57157.333333333336, ans=0.2 +2024-08-25 08:30:40,361 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.677e+02 2.099e+02 2.472e+02 3.181e+02 5.803e+02, threshold=4.945e+02, percent-clipped=2.0 +2024-08-25 08:30:50,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=57210.666666666664, ans=0.125 +2024-08-25 08:32:05,745 INFO [train.py:1114] (2/4) Epoch 5, batch 800, loss[loss=0.275, simple_loss=0.305, pruned_loss=0.08878, ctc_loss=0.1688, over 19784.00 frames. ], tot_loss[loss=0.2989, simple_loss=0.3312, pruned_loss=0.09691, ctc_loss=0.1818, over 3797225.22 frames. ], batch size: 49, lr: 2.76e-02, grad_scale: 32.0 +2024-08-25 08:32:10,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=57370.666666666664, ans=0.5 +2024-08-25 08:32:50,791 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.59 vs. limit=15.0 +2024-08-25 08:33:20,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=57530.666666666664, ans=0.125 +2024-08-25 08:33:37,744 INFO [train.py:1114] (2/4) Epoch 5, batch 850, loss[loss=0.2701, simple_loss=0.3234, pruned_loss=0.07727, ctc_loss=0.1559, over 19643.00 frames. ], tot_loss[loss=0.2972, simple_loss=0.33, pruned_loss=0.09609, ctc_loss=0.1805, over 3816385.78 frames. ], batch size: 59, lr: 2.76e-02, grad_scale: 32.0 +2024-08-25 08:33:42,799 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.691e-03 +2024-08-25 08:34:02,529 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.83 vs. limit=15.0 +2024-08-25 08:34:26,560 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.581e+02 1.963e+02 2.197e+02 2.544e+02 4.330e+02, threshold=4.395e+02, percent-clipped=0.0 +2024-08-25 08:34:51,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=57797.333333333336, ans=0.125 +2024-08-25 08:34:57,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=57850.666666666664, ans=0.125 +2024-08-25 08:35:08,650 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.54 vs. limit=15.0 +2024-08-25 08:35:17,364 INFO [train.py:1114] (2/4) Epoch 5, batch 900, loss[loss=0.2671, simple_loss=0.297, pruned_loss=0.08647, ctc_loss=0.1608, over 19431.00 frames. ], tot_loss[loss=0.2981, simple_loss=0.3305, pruned_loss=0.09662, ctc_loss=0.1813, over 3820078.46 frames. ], batch size: 48, lr: 2.75e-02, grad_scale: 32.0 +2024-08-25 08:35:17,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=57904.0, ans=0.05 +2024-08-25 08:35:24,689 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.17 vs. limit=6.0 +2024-08-25 08:35:32,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=57904.0, ans=0.125 +2024-08-25 08:35:40,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=57957.333333333336, ans=0.05 +2024-08-25 08:35:48,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=57957.333333333336, ans=0.125 +2024-08-25 08:36:22,513 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.68 vs. limit=15.0 +2024-08-25 08:36:23,300 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 08:36:28,057 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.89 vs. limit=15.0 +2024-08-25 08:36:37,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58117.333333333336, ans=0.1 +2024-08-25 08:36:41,314 INFO [train.py:1114] (2/4) Epoch 5, batch 950, loss[loss=0.2964, simple_loss=0.3175, pruned_loss=0.1004, ctc_loss=0.1866, over 19505.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3302, pruned_loss=0.09639, ctc_loss=0.181, over 3818664.89 frames. ], batch size: 49, lr: 2.75e-02, grad_scale: 32.0 +2024-08-25 08:36:48,817 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.99 vs. limit=15.0 +2024-08-25 08:36:49,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=58170.666666666664, ans=0.125 +2024-08-25 08:36:51,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=58224.0, ans=0.125 +2024-08-25 08:36:56,259 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.30 vs. limit=22.5 +2024-08-25 08:37:02,458 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.615e+02 2.021e+02 2.236e+02 2.607e+02 6.234e+02, threshold=4.471e+02, percent-clipped=1.0 +2024-08-25 08:37:15,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=58277.333333333336, ans=0.025 +2024-08-25 08:37:23,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=58330.666666666664, ans=0.2 +2024-08-25 08:37:44,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=58384.0, ans=0.125 +2024-08-25 08:37:49,069 INFO [train.py:1114] (2/4) Epoch 5, batch 1000, loss[loss=0.2682, simple_loss=0.3161, pruned_loss=0.07939, ctc_loss=0.1538, over 19850.00 frames. ], tot_loss[loss=0.3003, simple_loss=0.332, pruned_loss=0.09767, ctc_loss=0.1832, over 3815651.64 frames. ], batch size: 52, lr: 2.74e-02, grad_scale: 32.0 +2024-08-25 08:38:16,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=58490.666666666664, ans=0.125 +2024-08-25 08:38:42,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=58544.0, ans=0.1 +2024-08-25 08:39:00,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58597.333333333336, ans=0.1 +2024-08-25 08:39:20,351 INFO [train.py:1114] (2/4) Epoch 5, batch 1050, loss[loss=0.3409, simple_loss=0.3614, pruned_loss=0.1169, ctc_loss=0.2162, over 19839.00 frames. ], tot_loss[loss=0.3003, simple_loss=0.3316, pruned_loss=0.09782, ctc_loss=0.1833, over 3822385.69 frames. ], batch size: 57, lr: 2.74e-02, grad_scale: 32.0 +2024-08-25 08:39:27,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58704.0, ans=0.1 +2024-08-25 08:39:28,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=58704.0, ans=0.125 +2024-08-25 08:39:30,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=58757.333333333336, ans=0.0 +2024-08-25 08:39:34,666 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.12 vs. limit=22.5 +2024-08-25 08:39:41,248 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.532e+02 1.929e+02 2.228e+02 2.594e+02 4.447e+02, threshold=4.456e+02, percent-clipped=0.0 +2024-08-25 08:39:46,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=58757.333333333336, ans=0.125 +2024-08-25 08:40:22,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=58864.0, ans=0.0 +2024-08-25 08:40:38,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=58917.333333333336, ans=0.125 +2024-08-25 08:40:42,204 INFO [train.py:1114] (2/4) Epoch 5, batch 1100, loss[loss=0.2735, simple_loss=0.314, pruned_loss=0.08521, ctc_loss=0.1563, over 19577.00 frames. ], tot_loss[loss=0.3011, simple_loss=0.3319, pruned_loss=0.0983, ctc_loss=0.1844, over 3830046.76 frames. ], batch size: 52, lr: 2.74e-02, grad_scale: 32.0 +2024-08-25 08:40:57,657 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.99 vs. limit=22.5 +2024-08-25 08:41:02,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=59024.0, ans=0.1 +2024-08-25 08:41:34,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59130.666666666664, ans=0.1 +2024-08-25 08:41:35,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59130.666666666664, ans=0.1 +2024-08-25 08:41:51,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=59184.0, ans=0.125 +2024-08-25 08:41:54,272 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.80 vs. limit=15.0 +2024-08-25 08:42:06,690 INFO [train.py:1114] (2/4) Epoch 5, batch 1150, loss[loss=0.2968, simple_loss=0.3328, pruned_loss=0.09475, ctc_loss=0.1786, over 19601.00 frames. ], tot_loss[loss=0.3008, simple_loss=0.3317, pruned_loss=0.0981, ctc_loss=0.1842, over 3829557.59 frames. ], batch size: 52, lr: 2.73e-02, grad_scale: 32.0 +2024-08-25 08:42:12,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=59237.333333333336, ans=0.125 +2024-08-25 08:42:19,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=59237.333333333336, ans=0.125 +2024-08-25 08:42:27,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=59290.666666666664, ans=0.09899494936611666 +2024-08-25 08:42:30,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=59290.666666666664, ans=0.125 +2024-08-25 08:42:38,170 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.520e+02 2.022e+02 2.244e+02 2.636e+02 4.087e+02, threshold=4.489e+02, percent-clipped=0.0 +2024-08-25 08:42:57,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=59344.0, ans=0.0 +2024-08-25 08:42:59,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=59344.0, ans=0.125 +2024-08-25 08:43:33,326 INFO [train.py:1114] (2/4) Epoch 5, batch 1200, loss[loss=0.2901, simple_loss=0.3365, pruned_loss=0.0889, ctc_loss=0.1645, over 19839.00 frames. ], tot_loss[loss=0.3019, simple_loss=0.3327, pruned_loss=0.09848, ctc_loss=0.1851, over 3825952.48 frames. ], batch size: 57, lr: 2.73e-02, grad_scale: 32.0 +2024-08-25 08:43:36,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=59504.0, ans=0.125 +2024-08-25 08:43:52,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=59504.0, ans=0.125 +2024-08-25 08:43:54,879 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.51 vs. limit=6.0 +2024-08-25 08:43:59,641 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.47 vs. limit=15.0 +2024-08-25 08:44:14,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=59610.666666666664, ans=0.125 +2024-08-25 08:44:16,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=59610.666666666664, ans=0.0 +2024-08-25 08:44:24,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=59664.0, ans=0.125 +2024-08-25 08:44:27,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=59664.0, ans=0.0 +2024-08-25 08:44:55,327 INFO [train.py:1114] (2/4) Epoch 5, batch 1250, loss[loss=0.2701, simple_loss=0.3208, pruned_loss=0.07936, ctc_loss=0.152, over 19528.00 frames. ], tot_loss[loss=0.3007, simple_loss=0.3325, pruned_loss=0.0977, ctc_loss=0.1837, over 3843642.12 frames. ], batch size: 61, lr: 2.72e-02, grad_scale: 32.0 +2024-08-25 08:45:03,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=59770.666666666664, ans=0.125 +2024-08-25 08:45:21,219 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.499e+02 1.906e+02 2.098e+02 2.362e+02 4.005e+02, threshold=4.196e+02, percent-clipped=0.0 +2024-08-25 08:45:27,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=59877.333333333336, ans=0.0 +2024-08-25 08:45:53,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=59984.0, ans=0.2 +2024-08-25 08:45:56,535 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.60 vs. limit=22.5 +2024-08-25 08:46:03,575 INFO [train.py:1114] (2/4) Epoch 5, batch 1300, loss[loss=0.3041, simple_loss=0.3356, pruned_loss=0.09962, ctc_loss=0.1834, over 18890.00 frames. ], tot_loss[loss=0.2995, simple_loss=0.3315, pruned_loss=0.09712, ctc_loss=0.1829, over 3846186.35 frames. ], batch size: 76, lr: 2.72e-02, grad_scale: 32.0 +2024-08-25 08:46:27,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60090.666666666664, ans=0.1 +2024-08-25 08:46:30,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=60090.666666666664, ans=0.125 +2024-08-25 08:46:36,192 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.80 vs. limit=15.0 +2024-08-25 08:47:00,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=60197.333333333336, ans=0.0 +2024-08-25 08:47:06,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=60197.333333333336, ans=0.125 +2024-08-25 08:47:20,024 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.49 vs. limit=15.0 +2024-08-25 08:47:27,091 INFO [train.py:1114] (2/4) Epoch 5, batch 1350, loss[loss=0.2882, simple_loss=0.3332, pruned_loss=0.0879, ctc_loss=0.1687, over 19786.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3304, pruned_loss=0.09626, ctc_loss=0.1811, over 3857362.45 frames. ], batch size: 54, lr: 2.71e-02, grad_scale: 32.0 +2024-08-25 08:47:31,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=60304.0, ans=10.0 +2024-08-25 08:48:06,335 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.575e+02 1.950e+02 2.204e+02 2.621e+02 4.331e+02, threshold=4.409e+02, percent-clipped=1.0 +2024-08-25 08:48:13,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=60410.666666666664, ans=0.125 +2024-08-25 08:49:09,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=60517.333333333336, ans=0.125 +2024-08-25 08:49:14,323 INFO [train.py:1114] (2/4) Epoch 5, batch 1400, loss[loss=0.2557, simple_loss=0.2938, pruned_loss=0.07982, ctc_loss=0.1449, over 19671.00 frames. ], tot_loss[loss=0.296, simple_loss=0.3294, pruned_loss=0.09539, ctc_loss=0.1794, over 3864150.13 frames. ], batch size: 46, lr: 2.71e-02, grad_scale: 32.0 +2024-08-25 08:49:18,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60570.666666666664, ans=0.1 +2024-08-25 08:49:31,620 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.32 vs. limit=10.0 +2024-08-25 08:49:38,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=60624.0, ans=0.0 +2024-08-25 08:49:38,590 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.30 vs. limit=22.5 +2024-08-25 09:01:57,443 INFO [train.py:1114] (2/4) Epoch 5, batch 1450, loss[loss=0.3223, simple_loss=0.351, pruned_loss=0.1076, ctc_loss=0.1956, over 19676.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.3303, pruned_loss=0.09571, ctc_loss=0.1799, over 3862100.12 frames. ], batch size: 63, lr: 2.71e-02, grad_scale: 32.0 +2024-08-25 09:07:40,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60837.333333333336, ans=0.1 +2024-08-25 09:12:44,217 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.62 vs. limit=10.0 +2024-08-25 09:14:29,264 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.613e+02 1.942e+02 2.164e+02 2.480e+02 4.633e+02, threshold=4.329e+02, percent-clipped=1.0 +2024-08-25 09:15:31,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=60944.0, ans=0.125 +2024-08-25 09:24:21,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=60997.333333333336, ans=0.125 +2024-08-25 09:24:21,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=60997.333333333336, ans=0.0 +2024-08-25 09:25:28,209 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.70 vs. limit=15.0 +2024-08-25 09:25:28,350 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.52 vs. limit=6.0 +2024-08-25 09:26:47,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=60997.333333333336, ans=0.0 +2024-08-25 09:34:05,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=61050.666666666664, ans=0.2 +2024-08-25 09:36:13,473 INFO [train.py:1114] (2/4) Epoch 5, batch 1500, loss[loss=0.308, simple_loss=0.3456, pruned_loss=0.09762, ctc_loss=0.1881, over 19586.00 frames. ], tot_loss[loss=0.297, simple_loss=0.3304, pruned_loss=0.09576, ctc_loss=0.1801, over 3861725.35 frames. ], batch size: 57, lr: 2.70e-02, grad_scale: 32.0 +2024-08-25 09:44:18,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=61157.333333333336, ans=0.2 +2024-08-25 09:49:01,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=61157.333333333336, ans=0.125 +2024-08-25 09:57:47,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=61264.0, ans=0.125 +2024-08-25 09:58:47,691 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.08 vs. limit=15.0 +2024-08-25 10:00:40,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.66 vs. limit=12.0 +2024-08-25 10:00:40,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=61264.0, ans=0.0 +2024-08-25 10:06:52,302 INFO [train.py:1114] (2/4) Epoch 5, batch 1550, loss[loss=0.324, simple_loss=0.3515, pruned_loss=0.1075, ctc_loss=0.2037, over 19584.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3307, pruned_loss=0.09615, ctc_loss=0.181, over 3847400.37 frames. ], batch size: 60, lr: 2.70e-02, grad_scale: 16.0 +2024-08-25 10:12:14,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=61424.0, ans=0.0 +2024-08-25 10:13:22,588 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.97 vs. limit=22.5 +2024-08-25 10:14:16,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=61424.0, ans=0.0 +2024-08-25 10:14:47,414 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.601e+02 1.971e+02 2.260e+02 2.611e+02 5.554e+02, threshold=4.519e+02, percent-clipped=3.0 +2024-08-25 10:25:58,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=61584.0, ans=0.125 +2024-08-25 10:28:13,711 INFO [train.py:1114] (2/4) Epoch 5, batch 1600, loss[loss=0.2897, simple_loss=0.3281, pruned_loss=0.09212, ctc_loss=0.1674, over 19851.00 frames. ], tot_loss[loss=0.2958, simple_loss=0.3295, pruned_loss=0.09523, ctc_loss=0.1792, over 3835621.73 frames. ], batch size: 57, lr: 2.69e-02, grad_scale: 32.0 +2024-08-25 10:28:35,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=61637.333333333336, ans=0.0 +2024-08-25 10:30:45,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=61690.666666666664, ans=0.125 +2024-08-25 10:31:37,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=61690.666666666664, ans=0.125 +2024-08-25 10:36:46,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=61797.333333333336, ans=0.125 +2024-08-25 10:39:04,865 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.62 vs. limit=15.0 +2024-08-25 10:40:18,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=61850.666666666664, ans=0.125 +2024-08-25 10:40:18,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=61850.666666666664, ans=0.5 +2024-08-25 10:40:45,722 INFO [train.py:1114] (2/4) Epoch 5, batch 1650, loss[loss=0.3146, simple_loss=0.3487, pruned_loss=0.1007, ctc_loss=0.1978, over 19673.00 frames. ], tot_loss[loss=0.2958, simple_loss=0.3295, pruned_loss=0.09519, ctc_loss=0.1794, over 3832831.11 frames. ], batch size: 59, lr: 2.69e-02, grad_scale: 32.0 +2024-08-25 10:40:49,507 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.91 vs. limit=22.5 +2024-08-25 10:43:04,120 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.454e+02 1.985e+02 2.336e+02 2.616e+02 4.728e+02, threshold=4.672e+02, percent-clipped=1.0 +2024-08-25 10:43:35,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=62010.666666666664, ans=0.125 +2024-08-25 10:43:52,913 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.42 vs. limit=15.0 +2024-08-25 10:46:21,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=62117.333333333336, ans=0.0 +2024-08-25 10:46:43,742 INFO [train.py:1114] (2/4) Epoch 5, batch 1700, loss[loss=0.2799, simple_loss=0.3072, pruned_loss=0.09206, ctc_loss=0.1714, over 19662.00 frames. ], tot_loss[loss=0.2948, simple_loss=0.329, pruned_loss=0.09469, ctc_loss=0.1783, over 3847310.42 frames. ], batch size: 46, lr: 2.69e-02, grad_scale: 32.0 +2024-08-25 10:48:01,659 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.05 vs. limit=10.0 +2024-08-25 10:48:25,776 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.65 vs. limit=15.0 +2024-08-25 10:48:56,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=62277.333333333336, ans=0.0 +2024-08-25 10:48:57,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=62277.333333333336, ans=0.125 +2024-08-25 10:48:59,783 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.02 vs. limit=15.0 +2024-08-25 10:49:17,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62277.333333333336, ans=0.1 +2024-08-25 10:50:17,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=62384.0, ans=0.125 +2024-08-25 10:50:54,972 INFO [train.py:1114] (2/4) Epoch 5, batch 1750, loss[loss=0.2526, simple_loss=0.2903, pruned_loss=0.07711, ctc_loss=0.1518, over 19603.00 frames. ], tot_loss[loss=0.2953, simple_loss=0.329, pruned_loss=0.09506, ctc_loss=0.179, over 3851911.58 frames. ], batch size: 45, lr: 2.68e-02, grad_scale: 32.0 +2024-08-25 10:51:05,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=62437.333333333336, ans=0.025 +2024-08-25 10:51:09,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=62437.333333333336, ans=0.025 +2024-08-25 10:51:13,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=62437.333333333336, ans=0.2 +2024-08-25 10:51:35,962 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.59 vs. limit=6.0 +2024-08-25 10:51:43,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.31 vs. limit=15.0 +2024-08-25 10:53:52,982 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.475e+02 2.010e+02 2.326e+02 2.972e+02 6.446e+02, threshold=4.653e+02, percent-clipped=3.0 +2024-08-25 10:56:53,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=62650.666666666664, ans=0.125 +2024-08-25 10:57:11,558 INFO [train.py:1114] (2/4) Epoch 5, batch 1800, loss[loss=0.3243, simple_loss=0.3541, pruned_loss=0.1074, ctc_loss=0.1994, over 19623.00 frames. ], tot_loss[loss=0.2954, simple_loss=0.3292, pruned_loss=0.09501, ctc_loss=0.1787, over 3853742.09 frames. ], batch size: 55, lr: 2.68e-02, grad_scale: 32.0 +2024-08-25 10:57:28,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=62704.0, ans=0.125 +2024-08-25 10:57:55,950 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.65 vs. limit=15.0 +2024-08-25 10:58:25,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=62810.666666666664, ans=0.0 +2024-08-25 10:58:39,366 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.90 vs. limit=6.0 +2024-08-25 10:58:42,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=62864.0, ans=0.125 +2024-08-25 10:58:57,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=62917.333333333336, ans=0.0 +2024-08-25 10:58:59,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=62917.333333333336, ans=0.0 +2024-08-25 10:59:01,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=62917.333333333336, ans=0.2 +2024-08-25 10:59:06,170 INFO [train.py:1114] (2/4) Epoch 5, batch 1850, loss[loss=0.3015, simple_loss=0.3459, pruned_loss=0.09423, ctc_loss=0.1717, over 19584.00 frames. ], tot_loss[loss=0.294, simple_loss=0.3284, pruned_loss=0.0943, ctc_loss=0.1774, over 3856585.11 frames. ], batch size: 57, lr: 2.67e-02, grad_scale: 32.0 +2024-08-25 10:59:12,847 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.04 vs. limit=15.0 +2024-08-25 10:59:14,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=62970.666666666664, ans=0.125 +2024-08-25 10:59:25,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63024.0, ans=0.1 +2024-08-25 10:59:32,447 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.560e+02 2.044e+02 2.314e+02 2.820e+02 4.474e+02, threshold=4.628e+02, percent-clipped=0.0 +2024-08-25 10:59:42,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=63077.333333333336, ans=0.125 +2024-08-25 10:59:53,773 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.34 vs. limit=15.0 +2024-08-25 11:00:20,610 INFO [train.py:1114] (2/4) Epoch 5, batch 1900, loss[loss=0.2921, simple_loss=0.3301, pruned_loss=0.09192, ctc_loss=0.1756, over 19653.00 frames. ], tot_loss[loss=0.2948, simple_loss=0.3293, pruned_loss=0.09454, ctc_loss=0.1779, over 3860612.57 frames. ], batch size: 59, lr: 2.67e-02, grad_scale: 32.0 +2024-08-25 11:00:20,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=63237.333333333336, ans=0.125 +2024-08-25 11:00:26,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=63237.333333333336, ans=0.125 +2024-08-25 11:00:28,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.89 vs. limit=15.0 +2024-08-25 11:00:34,222 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:00:40,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=63290.666666666664, ans=0.2 +2024-08-25 11:01:09,526 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.29 vs. limit=8.0 +2024-08-25 11:01:10,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=63344.0, ans=0.125 +2024-08-25 11:01:55,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=63397.333333333336, ans=0.0 +2024-08-25 11:01:58,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=63450.666666666664, ans=0.2 +2024-08-25 11:02:34,501 INFO [train.py:1114] (2/4) Epoch 5, batch 1950, loss[loss=0.3133, simple_loss=0.3371, pruned_loss=0.1055, ctc_loss=0.196, over 19587.00 frames. ], tot_loss[loss=0.2948, simple_loss=0.3298, pruned_loss=0.09443, ctc_loss=0.1775, over 3869917.03 frames. ], batch size: 52, lr: 2.67e-02, grad_scale: 32.0 +2024-08-25 11:02:42,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=63504.0, ans=0.0 +2024-08-25 11:02:58,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63557.333333333336, ans=0.1 +2024-08-25 11:03:16,680 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.519e+02 1.932e+02 2.130e+02 2.461e+02 4.838e+02, threshold=4.259e+02, percent-clipped=1.0 +2024-08-25 11:03:45,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=63610.666666666664, ans=0.125 +2024-08-25 11:04:15,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=63717.333333333336, ans=0.125 +2024-08-25 11:04:25,921 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.34 vs. limit=15.0 +2024-08-25 11:04:37,798 INFO [train.py:1114] (2/4) Epoch 5, batch 2000, loss[loss=0.2706, simple_loss=0.2952, pruned_loss=0.0901, ctc_loss=0.1645, over 19684.00 frames. ], tot_loss[loss=0.2965, simple_loss=0.3308, pruned_loss=0.09531, ctc_loss=0.1789, over 3853024.26 frames. ], batch size: 45, lr: 2.66e-02, grad_scale: 32.0 +2024-08-25 11:04:58,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=63824.0, ans=0.0 +2024-08-25 11:04:59,388 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.46 vs. limit=6.0 +2024-08-25 11:05:01,323 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.535e-03 +2024-08-25 11:05:02,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=63824.0, ans=0.125 +2024-08-25 11:05:34,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=63930.666666666664, ans=0.0 +2024-08-25 11:06:07,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=64037.333333333336, ans=0.125 +2024-08-25 11:06:08,374 INFO [train.py:1114] (2/4) Epoch 5, batch 2050, loss[loss=0.2516, simple_loss=0.2979, pruned_loss=0.07407, ctc_loss=0.1432, over 19738.00 frames. ], tot_loss[loss=0.2951, simple_loss=0.3293, pruned_loss=0.09484, ctc_loss=0.1781, over 3850417.08 frames. ], batch size: 47, lr: 2.66e-02, grad_scale: 32.0 +2024-08-25 11:06:24,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=64090.666666666664, ans=0.125 +2024-08-25 11:06:29,147 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.597e+02 2.037e+02 2.272e+02 2.892e+02 6.343e+02, threshold=4.544e+02, percent-clipped=1.0 +2024-08-25 11:06:49,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=64144.0, ans=0.2 +2024-08-25 11:06:52,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=64197.333333333336, ans=0.0 +2024-08-25 11:07:05,100 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.34 vs. limit=6.0 +2024-08-25 11:07:30,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=64250.666666666664, ans=0.0 +2024-08-25 11:07:48,389 INFO [train.py:1114] (2/4) Epoch 5, batch 2100, loss[loss=0.2622, simple_loss=0.3108, pruned_loss=0.07832, ctc_loss=0.1423, over 19770.00 frames. ], tot_loss[loss=0.2931, simple_loss=0.328, pruned_loss=0.09384, ctc_loss=0.1762, over 3858122.29 frames. ], batch size: 54, lr: 2.65e-02, grad_scale: 32.0 +2024-08-25 11:07:49,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64304.0, ans=0.1 +2024-08-25 11:07:49,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=64304.0, ans=0.2 +2024-08-25 11:07:54,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.63 vs. limit=15.0 +2024-08-25 11:08:13,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=64357.333333333336, ans=0.125 +2024-08-25 11:08:33,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=64464.0, ans=0.1 +2024-08-25 11:08:52,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=64464.0, ans=0.0 +2024-08-25 11:09:21,109 INFO [train.py:1114] (2/4) Epoch 5, batch 2150, loss[loss=0.2681, simple_loss=0.3152, pruned_loss=0.08152, ctc_loss=0.1447, over 19854.00 frames. ], tot_loss[loss=0.2927, simple_loss=0.3274, pruned_loss=0.09379, ctc_loss=0.1761, over 3869402.62 frames. ], batch size: 52, lr: 2.65e-02, grad_scale: 32.0 +2024-08-25 11:09:23,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=64570.666666666664, ans=0.125 +2024-08-25 11:09:34,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64624.0, ans=0.1 +2024-08-25 11:09:44,528 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.564e+02 2.041e+02 2.279e+02 2.689e+02 3.624e+02, threshold=4.557e+02, percent-clipped=0.0 +2024-08-25 11:10:26,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=64784.0, ans=0.5 +2024-08-25 11:10:34,041 INFO [train.py:1114] (2/4) Epoch 5, batch 2200, loss[loss=0.3016, simple_loss=0.3423, pruned_loss=0.095, ctc_loss=0.1774, over 19582.00 frames. ], tot_loss[loss=0.2933, simple_loss=0.3277, pruned_loss=0.09406, ctc_loss=0.1768, over 3867300.66 frames. ], batch size: 57, lr: 2.65e-02, grad_scale: 32.0 +2024-08-25 11:11:19,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=65050.666666666664, ans=0.04949747468305833 +2024-08-25 11:11:20,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=65050.666666666664, ans=0.0 +2024-08-25 11:11:23,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=65050.666666666664, ans=0.04949747468305833 +2024-08-25 11:11:29,236 INFO [train.py:1114] (2/4) Epoch 5, batch 2250, loss[loss=0.2917, simple_loss=0.3377, pruned_loss=0.08941, ctc_loss=0.1674, over 19599.00 frames. ], tot_loss[loss=0.2938, simple_loss=0.3281, pruned_loss=0.09427, ctc_loss=0.1773, over 3867046.89 frames. ], batch size: 55, lr: 2.64e-02, grad_scale: 32.0 +2024-08-25 11:11:35,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=65104.0, ans=0.0 +2024-08-25 11:11:45,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=65157.333333333336, ans=0.025 +2024-08-25 11:11:51,987 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.714e+02 2.180e+02 2.514e+02 3.003e+02 5.559e+02, threshold=5.029e+02, percent-clipped=2.0 +2024-08-25 11:12:17,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=65264.0, ans=0.0 +2024-08-25 11:12:38,250 INFO [train.py:1114] (2/4) Epoch 5, batch 2300, loss[loss=0.2553, simple_loss=0.2986, pruned_loss=0.07762, ctc_loss=0.142, over 19505.00 frames. ], tot_loss[loss=0.293, simple_loss=0.3268, pruned_loss=0.09416, ctc_loss=0.1771, over 3860953.91 frames. ], batch size: 49, lr: 2.64e-02, grad_scale: 32.0 +2024-08-25 11:13:16,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=65530.666666666664, ans=0.125 +2024-08-25 11:13:21,594 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.76 vs. limit=15.0 +2024-08-25 11:13:30,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=65584.0, ans=0.125 +2024-08-25 11:13:30,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=65584.0, ans=0.2 +2024-08-25 11:13:33,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=65584.0, ans=0.0 +2024-08-25 11:13:37,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=65584.0, ans=6.0 +2024-08-25 11:13:38,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=65584.0, ans=0.125 +2024-08-25 11:13:40,595 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:13:50,095 INFO [train.py:1114] (2/4) Epoch 5, batch 2350, loss[loss=0.3255, simple_loss=0.3597, pruned_loss=0.1064, ctc_loss=0.196, over 19668.00 frames. ], tot_loss[loss=0.2921, simple_loss=0.3264, pruned_loss=0.09368, ctc_loss=0.1759, over 3863446.28 frames. ], batch size: 63, lr: 2.63e-02, grad_scale: 32.0 +2024-08-25 11:14:31,467 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.391e+02 1.936e+02 2.303e+02 2.820e+02 4.151e+02, threshold=4.606e+02, percent-clipped=0.0 +2024-08-25 11:14:56,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=65797.33333333333, ans=0.1 +2024-08-25 11:14:56,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=65797.33333333333, ans=0.125 +2024-08-25 11:15:01,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=65797.33333333333, ans=0.04949747468305833 +2024-08-25 11:15:23,151 INFO [train.py:1114] (2/4) Epoch 5, batch 2400, loss[loss=0.3012, simple_loss=0.3465, pruned_loss=0.09296, ctc_loss=0.1753, over 19291.00 frames. ], tot_loss[loss=0.295, simple_loss=0.3292, pruned_loss=0.09489, ctc_loss=0.1776, over 3857528.14 frames. ], batch size: 71, lr: 2.63e-02, grad_scale: 32.0 +2024-08-25 11:15:34,826 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.81 vs. limit=15.0 +2024-08-25 11:15:41,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=65957.33333333333, ans=0.125 +2024-08-25 11:16:07,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=66010.66666666667, ans=0.0 +2024-08-25 11:16:12,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=66064.0, ans=0.2 +2024-08-25 11:16:33,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=66064.0, ans=0.125 +2024-08-25 11:16:40,591 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.63 vs. limit=15.0 +2024-08-25 11:16:56,808 INFO [train.py:1114] (2/4) Epoch 5, batch 2450, loss[loss=0.3843, simple_loss=0.3775, pruned_loss=0.1422, ctc_loss=0.2666, over 14018.00 frames. ], tot_loss[loss=0.3044, simple_loss=0.3347, pruned_loss=0.09968, ctc_loss=0.1869, over 3729975.43 frames. ], batch size: 141, lr: 2.63e-02, grad_scale: 32.0 +2024-08-25 11:17:16,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=66224.0, ans=0.125 +2024-08-25 11:17:17,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=66224.0, ans=0.125 +2024-08-25 11:17:39,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=66224.0, ans=0.0 +2024-08-25 11:17:41,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=66224.0, ans=0.2 +2024-08-25 11:17:43,161 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.516e+02 2.021e+02 2.221e+02 2.524e+02 3.558e+02, threshold=4.443e+02, percent-clipped=0.0 +2024-08-25 11:17:45,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=66277.33333333333, ans=0.125 +2024-08-25 11:17:54,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=66277.33333333333, ans=0.125 +2024-08-25 11:17:56,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=66277.33333333333, ans=0.125 +2024-08-25 11:17:59,730 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.91 vs. limit=15.0 +2024-08-25 11:18:01,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66330.66666666667, ans=0.1 +2024-08-25 11:18:03,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=66330.66666666667, ans=0.125 +2024-08-25 11:19:28,342 INFO [train.py:1114] (2/4) Epoch 6, batch 0, loss[loss=0.3045, simple_loss=0.3251, pruned_loss=0.1046, ctc_loss=0.187, over 19792.00 frames. ], tot_loss[loss=0.3045, simple_loss=0.3251, pruned_loss=0.1046, ctc_loss=0.187, over 19792.00 frames. ], batch size: 49, lr: 2.45e-02, grad_scale: 32.0 +2024-08-25 11:19:28,342 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-25 11:20:29,250 INFO [train.py:1146] (2/4) Epoch 6, validation: loss=0.2388, simple_loss=0.3147, pruned_loss=0.05993, ctc_loss=0.1076, over 944034.00 frames. +2024-08-25 11:20:29,251 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 14234MB +2024-08-25 11:20:29,672 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.82 vs. limit=15.0 +2024-08-25 11:20:48,592 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=15.52 vs. limit=15.0 +2024-08-25 11:21:07,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66538.66666666667, ans=0.1 +2024-08-25 11:21:25,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=66592.0, ans=0.125 +2024-08-25 11:21:56,937 INFO [train.py:1114] (2/4) Epoch 6, batch 50, loss[loss=0.2449, simple_loss=0.2931, pruned_loss=0.0717, ctc_loss=0.1333, over 19723.00 frames. ], tot_loss[loss=0.297, simple_loss=0.3303, pruned_loss=0.09584, ctc_loss=0.1803, over 845158.26 frames. ], batch size: 47, lr: 2.44e-02, grad_scale: 32.0 +2024-08-25 11:21:58,760 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.52 vs. limit=12.0 +2024-08-25 11:22:02,109 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.66 vs. limit=6.0 +2024-08-25 11:22:12,254 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.44 vs. limit=22.5 +2024-08-25 11:22:24,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=66752.0, ans=0.125 +2024-08-25 11:22:35,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=66752.0, ans=0.125 +2024-08-25 11:22:38,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=66752.0, ans=0.125 +2024-08-25 11:22:45,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=66752.0, ans=0.025 +2024-08-25 11:22:50,716 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.580e+02 1.959e+02 2.174e+02 2.569e+02 5.460e+02, threshold=4.347e+02, percent-clipped=1.0 +2024-08-25 11:23:07,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=66858.66666666667, ans=0.125 +2024-08-25 11:23:11,739 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.83 vs. limit=22.5 +2024-08-25 11:23:18,888 INFO [train.py:1114] (2/4) Epoch 6, batch 100, loss[loss=0.2726, simple_loss=0.3096, pruned_loss=0.08551, ctc_loss=0.1613, over 19707.00 frames. ], tot_loss[loss=0.2964, simple_loss=0.3308, pruned_loss=0.09516, ctc_loss=0.1793, over 1498722.63 frames. ], batch size: 51, lr: 2.44e-02, grad_scale: 32.0 +2024-08-25 11:23:38,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=66965.33333333333, ans=0.025 +2024-08-25 11:24:15,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=67125.33333333333, ans=0.125 +2024-08-25 11:24:21,823 INFO [train.py:1114] (2/4) Epoch 6, batch 150, loss[loss=0.2842, simple_loss=0.3069, pruned_loss=0.09597, ctc_loss=0.1738, over 19740.00 frames. ], tot_loss[loss=0.2926, simple_loss=0.3279, pruned_loss=0.0935, ctc_loss=0.1758, over 2027884.99 frames. ], batch size: 47, lr: 2.44e-02, grad_scale: 32.0 +2024-08-25 11:24:38,621 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-08-25 11:25:04,952 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.596e+02 1.947e+02 2.172e+02 2.650e+02 4.091e+02, threshold=4.343e+02, percent-clipped=0.0 +2024-08-25 11:25:20,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=67392.0, ans=0.2 +2024-08-25 11:25:22,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67392.0, ans=0.1 +2024-08-25 11:25:35,918 INFO [train.py:1114] (2/4) Epoch 6, batch 200, loss[loss=0.3581, simple_loss=0.3678, pruned_loss=0.1272, ctc_loss=0.2352, over 18250.00 frames. ], tot_loss[loss=0.2919, simple_loss=0.3269, pruned_loss=0.09333, ctc_loss=0.1754, over 2435615.73 frames. ], batch size: 85, lr: 2.43e-02, grad_scale: 32.0 +2024-08-25 11:25:59,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.27 vs. limit=15.0 +2024-08-25 11:26:02,704 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.84 vs. limit=12.0 +2024-08-25 11:26:04,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=67498.66666666667, ans=0.125 +2024-08-25 11:26:12,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.whiten.whitening_limit, batch_count=67552.0, ans=15.0 +2024-08-25 11:26:37,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=67552.0, ans=0.035 +2024-08-25 11:27:19,526 INFO [train.py:1114] (2/4) Epoch 6, batch 250, loss[loss=0.2868, simple_loss=0.3348, pruned_loss=0.0865, ctc_loss=0.1644, over 19388.00 frames. ], tot_loss[loss=0.2907, simple_loss=0.3261, pruned_loss=0.09274, ctc_loss=0.1745, over 2755871.45 frames. ], batch size: 67, lr: 2.43e-02, grad_scale: 32.0 +2024-08-25 11:27:30,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=67712.0, ans=0.125 +2024-08-25 11:27:44,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=67765.33333333333, ans=0.07 +2024-08-25 11:28:14,795 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.43 vs. limit=15.0 +2024-08-25 11:28:36,847 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.549e+02 1.900e+02 2.111e+02 2.483e+02 4.707e+02, threshold=4.222e+02, percent-clipped=1.0 +2024-08-25 11:29:32,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67925.33333333333, ans=0.1 +2024-08-25 11:29:38,980 INFO [train.py:1114] (2/4) Epoch 6, batch 300, loss[loss=0.3361, simple_loss=0.3501, pruned_loss=0.1177, ctc_loss=0.2169, over 19499.00 frames. ], tot_loss[loss=0.2895, simple_loss=0.3252, pruned_loss=0.09218, ctc_loss=0.1735, over 3000609.96 frames. ], batch size: 61, lr: 2.43e-02, grad_scale: 32.0 +2024-08-25 11:30:18,410 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.10 vs. limit=15.0 +2024-08-25 11:30:40,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=68085.33333333333, ans=0.125 +2024-08-25 11:30:43,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=68085.33333333333, ans=0.125 +2024-08-25 11:30:59,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=68138.66666666667, ans=0.0 +2024-08-25 11:31:00,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=68138.66666666667, ans=0.1 +2024-08-25 11:31:07,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=68138.66666666667, ans=0.025 +2024-08-25 11:31:09,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=68138.66666666667, ans=0.0 +2024-08-25 11:31:10,715 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.93 vs. limit=15.0 +2024-08-25 11:31:26,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=68192.0, ans=0.125 +2024-08-25 11:31:39,944 INFO [train.py:1114] (2/4) Epoch 6, batch 350, loss[loss=0.2304, simple_loss=0.2774, pruned_loss=0.06631, ctc_loss=0.127, over 19758.00 frames. ], tot_loss[loss=0.2887, simple_loss=0.3248, pruned_loss=0.09175, ctc_loss=0.1726, over 3191275.62 frames. ], batch size: 48, lr: 2.42e-02, grad_scale: 32.0 +2024-08-25 11:31:47,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=68245.33333333333, ans=0.0 +2024-08-25 11:32:15,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=68298.66666666667, ans=0.2 +2024-08-25 11:32:35,309 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.504e+02 2.039e+02 2.360e+02 2.872e+02 5.301e+02, threshold=4.720e+02, percent-clipped=2.0 +2024-08-25 11:32:40,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=68405.33333333333, ans=0.0 +2024-08-25 11:33:02,107 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.30 vs. limit=15.0 +2024-08-25 11:33:02,560 INFO [train.py:1114] (2/4) Epoch 6, batch 400, loss[loss=0.313, simple_loss=0.3419, pruned_loss=0.1046, ctc_loss=0.1873, over 19497.00 frames. ], tot_loss[loss=0.2878, simple_loss=0.3243, pruned_loss=0.09137, ctc_loss=0.1716, over 3342789.81 frames. ], batch size: 54, lr: 2.42e-02, grad_scale: 32.0 +2024-08-25 11:33:12,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=68512.0, ans=0.0 +2024-08-25 11:33:12,401 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.15 vs. limit=22.5 +2024-08-25 11:33:17,095 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.80 vs. limit=6.0 +2024-08-25 11:33:18,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68565.33333333333, ans=0.1 +2024-08-25 11:33:20,616 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.76 vs. limit=10.0 +2024-08-25 11:33:24,798 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:33:58,783 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.67 vs. limit=15.0 +2024-08-25 11:34:06,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=68725.33333333333, ans=0.125 +2024-08-25 11:34:13,430 INFO [train.py:1114] (2/4) Epoch 6, batch 450, loss[loss=0.267, simple_loss=0.32, pruned_loss=0.07664, ctc_loss=0.1519, over 19615.00 frames. ], tot_loss[loss=0.2863, simple_loss=0.3236, pruned_loss=0.09044, ctc_loss=0.1702, over 3451041.50 frames. ], batch size: 55, lr: 2.42e-02, grad_scale: 32.0 +2024-08-25 11:34:23,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=68778.66666666667, ans=0.2 +2024-08-25 11:34:30,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68832.0, ans=0.1 +2024-08-25 11:34:46,990 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.23 vs. limit=15.0 +2024-08-25 11:34:48,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=68938.66666666667, ans=0.2 +2024-08-25 11:34:49,670 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.571e+02 1.969e+02 2.191e+02 2.793e+02 4.218e+02, threshold=4.382e+02, percent-clipped=0.0 +2024-08-25 11:34:50,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=68938.66666666667, ans=0.125 +2024-08-25 11:34:59,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=68992.0, ans=0.0 +2024-08-25 11:35:09,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=69045.33333333333, ans=0.2 +2024-08-25 11:35:10,603 INFO [train.py:1114] (2/4) Epoch 6, batch 500, loss[loss=0.3129, simple_loss=0.337, pruned_loss=0.1065, ctc_loss=0.1895, over 19687.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3222, pruned_loss=0.08971, ctc_loss=0.1688, over 3545450.38 frames. ], batch size: 63, lr: 2.41e-02, grad_scale: 32.0 +2024-08-25 11:35:14,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=69045.33333333333, ans=0.125 +2024-08-25 11:35:21,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69098.66666666667, ans=0.1 +2024-08-25 11:35:40,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=69152.0, ans=0.125 +2024-08-25 11:35:41,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69152.0, ans=0.1 +2024-08-25 11:36:10,444 INFO [train.py:1114] (2/4) Epoch 6, batch 550, loss[loss=0.324, simple_loss=0.3464, pruned_loss=0.1078, ctc_loss=0.215, over 19302.00 frames. ], tot_loss[loss=0.2858, simple_loss=0.3228, pruned_loss=0.09039, ctc_loss=0.17, over 3606897.15 frames. ], batch size: 71, lr: 2.41e-02, grad_scale: 32.0 +2024-08-25 11:36:16,789 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.249e-02 +2024-08-25 11:36:25,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=69365.33333333333, ans=0.125 +2024-08-25 11:36:46,536 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.559e+02 2.100e+02 2.439e+02 2.966e+02 5.259e+02, threshold=4.878e+02, percent-clipped=1.0 +2024-08-25 11:36:54,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=69472.0, ans=0.2 +2024-08-25 11:37:07,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=69525.33333333333, ans=0.2 +2024-08-25 11:37:07,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=69525.33333333333, ans=0.2 +2024-08-25 11:37:12,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=69525.33333333333, ans=0.2 +2024-08-25 11:37:27,253 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.08 vs. limit=10.0 +2024-08-25 11:37:28,774 INFO [train.py:1114] (2/4) Epoch 6, batch 600, loss[loss=0.3176, simple_loss=0.3481, pruned_loss=0.105, ctc_loss=0.1928, over 19395.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3229, pruned_loss=0.09023, ctc_loss=0.1699, over 3664060.99 frames. ], batch size: 67, lr: 2.41e-02, grad_scale: 32.0 +2024-08-25 11:37:50,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=69632.0, ans=0.125 +2024-08-25 11:38:04,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=69685.33333333333, ans=0.125 +2024-08-25 11:38:11,836 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.46 vs. limit=6.0 +2024-08-25 11:38:58,901 INFO [train.py:1114] (2/4) Epoch 6, batch 650, loss[loss=0.2643, simple_loss=0.3162, pruned_loss=0.07817, ctc_loss=0.1401, over 19766.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3226, pruned_loss=0.09012, ctc_loss=0.1695, over 3714679.70 frames. ], batch size: 54, lr: 2.40e-02, grad_scale: 32.0 +2024-08-25 11:39:05,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=69845.33333333333, ans=0.125 +2024-08-25 11:39:06,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=69845.33333333333, ans=0.0 +2024-08-25 11:39:50,474 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.537e+02 1.931e+02 2.137e+02 2.425e+02 3.711e+02, threshold=4.274e+02, percent-clipped=0.0 +2024-08-25 11:40:16,220 INFO [train.py:1114] (2/4) Epoch 6, batch 700, loss[loss=0.2961, simple_loss=0.3159, pruned_loss=0.09884, ctc_loss=0.1966, over 19709.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.3227, pruned_loss=0.08985, ctc_loss=0.1692, over 3746895.95 frames. ], batch size: 51, lr: 2.40e-02, grad_scale: 32.0 +2024-08-25 11:40:41,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=70165.33333333333, ans=0.125 +2024-08-25 11:41:40,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=70272.0, ans=0.125 +2024-08-25 11:42:12,745 INFO [train.py:1114] (2/4) Epoch 6, batch 750, loss[loss=0.2856, simple_loss=0.3339, pruned_loss=0.08503, ctc_loss=0.1681, over 19511.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3223, pruned_loss=0.08969, ctc_loss=0.1689, over 3773056.43 frames. ], batch size: 54, lr: 2.40e-02, grad_scale: 32.0 +2024-08-25 11:42:37,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=70485.33333333333, ans=0.2 +2024-08-25 11:43:04,764 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.72 vs. limit=10.0 +2024-08-25 11:43:06,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=70538.66666666667, ans=0.2 +2024-08-25 11:43:09,521 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.653e+02 2.022e+02 2.297e+02 2.693e+02 4.652e+02, threshold=4.594e+02, percent-clipped=2.0 +2024-08-25 11:43:10,149 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.28 vs. limit=15.0 +2024-08-25 11:43:14,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=70538.66666666667, ans=0.0 +2024-08-25 11:43:21,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=70592.0, ans=0.5 +2024-08-25 11:43:27,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70592.0, ans=0.1 +2024-08-25 11:43:31,985 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.44 vs. limit=22.5 +2024-08-25 11:43:34,909 INFO [train.py:1114] (2/4) Epoch 6, batch 800, loss[loss=0.2428, simple_loss=0.2848, pruned_loss=0.07288, ctc_loss=0.1377, over 19389.00 frames. ], tot_loss[loss=0.2844, simple_loss=0.3222, pruned_loss=0.08957, ctc_loss=0.1685, over 3794047.98 frames. ], batch size: 48, lr: 2.39e-02, grad_scale: 32.0 +2024-08-25 11:43:36,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=70645.33333333333, ans=0.125 +2024-08-25 11:44:24,816 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.93 vs. limit=15.0 +2024-08-25 11:44:46,476 INFO [train.py:1114] (2/4) Epoch 6, batch 850, loss[loss=0.2826, simple_loss=0.3208, pruned_loss=0.08789, ctc_loss=0.1717, over 19665.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3215, pruned_loss=0.08909, ctc_loss=0.1675, over 3813304.36 frames. ], batch size: 59, lr: 2.39e-02, grad_scale: 32.0 +2024-08-25 11:45:13,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=70965.33333333333, ans=0.0 +2024-08-25 11:45:15,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=70965.33333333333, ans=0.1 +2024-08-25 11:45:15,883 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.47 vs. limit=15.0 +2024-08-25 11:45:20,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=70965.33333333333, ans=0.0 +2024-08-25 11:45:46,252 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.501e+02 1.893e+02 2.077e+02 2.374e+02 4.075e+02, threshold=4.154e+02, percent-clipped=0.0 +2024-08-25 11:45:55,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71125.33333333333, ans=0.1 +2024-08-25 11:46:02,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=71125.33333333333, ans=0.125 +2024-08-25 11:46:07,487 INFO [train.py:1114] (2/4) Epoch 6, batch 900, loss[loss=0.2727, simple_loss=0.3026, pruned_loss=0.08787, ctc_loss=0.1678, over 19785.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3215, pruned_loss=0.08919, ctc_loss=0.1676, over 3818368.18 frames. ], batch size: 49, lr: 2.39e-02, grad_scale: 16.0 +2024-08-25 11:46:19,514 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.04 vs. limit=15.0 +2024-08-25 11:46:31,548 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.98 vs. limit=6.0 +2024-08-25 11:46:32,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=71232.0, ans=0.2 +2024-08-25 11:46:43,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=71285.33333333333, ans=0.025 +2024-08-25 11:46:54,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=71338.66666666667, ans=0.0 +2024-08-25 11:47:01,115 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.27 vs. limit=22.5 +2024-08-25 11:47:12,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=71392.0, ans=0.2 +2024-08-25 11:47:21,596 INFO [train.py:1114] (2/4) Epoch 6, batch 950, loss[loss=0.2941, simple_loss=0.3186, pruned_loss=0.09886, ctc_loss=0.1795, over 19507.00 frames. ], tot_loss[loss=0.2832, simple_loss=0.3216, pruned_loss=0.08894, ctc_loss=0.1673, over 3820269.97 frames. ], batch size: 49, lr: 2.38e-02, grad_scale: 16.0 +2024-08-25 11:47:22,610 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.88 vs. limit=22.5 +2024-08-25 11:47:26,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=71445.33333333333, ans=0.1 +2024-08-25 11:47:28,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=71445.33333333333, ans=0.0 +2024-08-25 11:47:42,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.85 vs. limit=15.0 +2024-08-25 11:48:12,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=71552.0, ans=0.0 +2024-08-25 11:48:13,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=71552.0, ans=0.1 +2024-08-25 11:48:21,823 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.67 vs. limit=15.0 +2024-08-25 11:48:23,508 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.560e+02 1.900e+02 2.167e+02 2.553e+02 4.088e+02, threshold=4.334e+02, percent-clipped=0.0 +2024-08-25 11:48:25,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=71605.33333333333, ans=0.0 +2024-08-25 11:49:03,420 INFO [train.py:1114] (2/4) Epoch 6, batch 1000, loss[loss=0.2573, simple_loss=0.2977, pruned_loss=0.07851, ctc_loss=0.1498, over 19872.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3224, pruned_loss=0.08968, ctc_loss=0.1686, over 3816465.51 frames. ], batch size: 52, lr: 2.38e-02, grad_scale: 16.0 +2024-08-25 11:49:10,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=71712.0, ans=0.125 +2024-08-25 11:49:13,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=71712.0, ans=0.125 +2024-08-25 11:49:14,925 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.07 vs. limit=15.0 +2024-08-25 11:49:25,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=71765.33333333333, ans=0.07 +2024-08-25 11:50:05,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=71872.0, ans=0.0 +2024-08-25 11:50:33,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=71925.33333333333, ans=0.125 +2024-08-25 11:50:46,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=71925.33333333333, ans=0.125 +2024-08-25 11:50:57,806 INFO [train.py:1114] (2/4) Epoch 6, batch 1050, loss[loss=0.2971, simple_loss=0.3388, pruned_loss=0.09373, ctc_loss=0.1699, over 19849.00 frames. ], tot_loss[loss=0.2838, simple_loss=0.3215, pruned_loss=0.08939, ctc_loss=0.1681, over 3823461.72 frames. ], batch size: 57, lr: 2.37e-02, grad_scale: 16.0 +2024-08-25 11:51:03,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=71978.66666666667, ans=0.1 +2024-08-25 11:51:42,584 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.48 vs. limit=12.0 +2024-08-25 11:51:43,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=72085.33333333333, ans=0.025 +2024-08-25 11:51:53,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=72085.33333333333, ans=0.07 +2024-08-25 11:52:00,134 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.585e+02 1.944e+02 2.201e+02 2.550e+02 3.957e+02, threshold=4.403e+02, percent-clipped=0.0 +2024-08-25 11:52:48,882 INFO [train.py:1114] (2/4) Epoch 6, batch 1100, loss[loss=0.2398, simple_loss=0.2933, pruned_loss=0.06758, ctc_loss=0.1278, over 19575.00 frames. ], tot_loss[loss=0.283, simple_loss=0.321, pruned_loss=0.08902, ctc_loss=0.1674, over 3830010.05 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 16.0 +2024-08-25 11:53:07,834 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.85 vs. limit=22.5 +2024-08-25 11:53:58,660 INFO [train.py:1114] (2/4) Epoch 6, batch 1150, loss[loss=0.3008, simple_loss=0.3298, pruned_loss=0.09939, ctc_loss=0.1825, over 19575.00 frames. ], tot_loss[loss=0.2845, simple_loss=0.3218, pruned_loss=0.08988, ctc_loss=0.1687, over 3829261.88 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 16.0 +2024-08-25 11:54:04,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=72512.0, ans=0.2 +2024-08-25 11:54:27,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=72565.33333333333, ans=0.0 +2024-08-25 11:54:43,452 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 1.952e+02 2.194e+02 2.505e+02 4.680e+02, threshold=4.387e+02, percent-clipped=1.0 +2024-08-25 11:54:54,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=72725.33333333333, ans=0.025 +2024-08-25 11:55:06,394 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:55:08,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=72725.33333333333, ans=0.125 +2024-08-25 11:55:08,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=72725.33333333333, ans=0.0 +2024-08-25 11:55:09,122 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.35 vs. limit=15.0 +2024-08-25 11:55:11,898 INFO [train.py:1114] (2/4) Epoch 6, batch 1200, loss[loss=0.2657, simple_loss=0.3201, pruned_loss=0.07814, ctc_loss=0.1377, over 19839.00 frames. ], tot_loss[loss=0.2859, simple_loss=0.3229, pruned_loss=0.09048, ctc_loss=0.17, over 3825281.55 frames. ], batch size: 57, lr: 2.36e-02, grad_scale: 32.0 +2024-08-25 11:55:22,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72778.66666666667, ans=0.1 +2024-08-25 11:55:50,345 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.03 vs. limit=15.0 +2024-08-25 11:56:20,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=72938.66666666667, ans=0.125 +2024-08-25 11:56:25,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=72938.66666666667, ans=0.0 +2024-08-25 11:56:41,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72992.0, ans=0.1 +2024-08-25 11:56:44,369 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.53 vs. limit=15.0 +2024-08-25 11:56:53,105 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.32 vs. limit=12.0 +2024-08-25 11:56:55,077 INFO [train.py:1114] (2/4) Epoch 6, batch 1250, loss[loss=0.3028, simple_loss=0.331, pruned_loss=0.1005, ctc_loss=0.1839, over 19541.00 frames. ], tot_loss[loss=0.286, simple_loss=0.3235, pruned_loss=0.09033, ctc_loss=0.1698, over 3842971.79 frames. ], batch size: 61, lr: 2.36e-02, grad_scale: 32.0 +2024-08-25 11:57:34,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=73152.0, ans=0.09899494936611666 +2024-08-25 11:57:55,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=73152.0, ans=0.125 +2024-08-25 11:58:03,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=73205.33333333333, ans=0.5 +2024-08-25 11:58:13,325 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.685e+02 2.073e+02 2.305e+02 2.660e+02 4.224e+02, threshold=4.609e+02, percent-clipped=0.0 +2024-08-25 11:58:26,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=73258.66666666667, ans=0.125 +2024-08-25 11:58:35,040 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.08 vs. limit=22.5 +2024-08-25 11:58:46,686 INFO [train.py:1114] (2/4) Epoch 6, batch 1300, loss[loss=0.2985, simple_loss=0.3413, pruned_loss=0.09138, ctc_loss=0.1821, over 18813.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.3225, pruned_loss=0.08925, ctc_loss=0.168, over 3845774.18 frames. ], batch size: 76, lr: 2.36e-02, grad_scale: 32.0 +2024-08-25 11:58:46,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=73312.0, ans=0.125 +2024-08-25 11:59:03,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=73312.0, ans=0.125 +2024-08-25 11:59:34,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=73365.33333333333, ans=0.05 +2024-08-25 11:59:53,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=73472.0, ans=0.2 +2024-08-25 12:00:13,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=73525.33333333333, ans=0.0 +2024-08-25 12:00:19,994 INFO [train.py:1114] (2/4) Epoch 6, batch 1350, loss[loss=0.2976, simple_loss=0.334, pruned_loss=0.09379, ctc_loss=0.1841, over 19767.00 frames. ], tot_loss[loss=0.2838, simple_loss=0.3222, pruned_loss=0.08914, ctc_loss=0.1675, over 3857695.64 frames. ], batch size: 54, lr: 2.36e-02, grad_scale: 32.0 +2024-08-25 12:00:56,390 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.74 vs. limit=15.0 +2024-08-25 12:01:01,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=73738.66666666667, ans=0.125 +2024-08-25 12:01:04,995 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.532e+02 2.025e+02 2.295e+02 2.579e+02 4.133e+02, threshold=4.590e+02, percent-clipped=0.0 +2024-08-25 12:01:30,906 INFO [train.py:1114] (2/4) Epoch 6, batch 1400, loss[loss=0.2691, simple_loss=0.3019, pruned_loss=0.08707, ctc_loss=0.1554, over 19662.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3217, pruned_loss=0.08881, ctc_loss=0.1667, over 3865002.34 frames. ], batch size: 46, lr: 2.35e-02, grad_scale: 32.0 +2024-08-25 12:01:34,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=73845.33333333333, ans=0.125 +2024-08-25 12:01:50,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=73898.66666666667, ans=0.1 +2024-08-25 12:02:22,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=73952.0, ans=0.2 +2024-08-25 12:02:39,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=74058.66666666667, ans=0.2 +2024-08-25 12:02:40,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=74058.66666666667, ans=0.05 +2024-08-25 12:02:52,962 INFO [train.py:1114] (2/4) Epoch 6, batch 1450, loss[loss=0.2876, simple_loss=0.3269, pruned_loss=0.09001, ctc_loss=0.1706, over 19682.00 frames. ], tot_loss[loss=0.2842, simple_loss=0.3224, pruned_loss=0.08937, ctc_loss=0.1679, over 3862462.85 frames. ], batch size: 63, lr: 2.35e-02, grad_scale: 32.0 +2024-08-25 12:03:07,855 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.46 vs. limit=12.0 +2024-08-25 12:03:44,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74218.66666666667, ans=0.1 +2024-08-25 12:03:53,268 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.572e+02 1.998e+02 2.330e+02 2.811e+02 4.670e+02, threshold=4.661e+02, percent-clipped=1.0 +2024-08-25 12:04:24,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=74378.66666666667, ans=0.125 +2024-08-25 12:04:25,538 INFO [train.py:1114] (2/4) Epoch 6, batch 1500, loss[loss=0.2853, simple_loss=0.3285, pruned_loss=0.08809, ctc_loss=0.1645, over 19537.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3231, pruned_loss=0.08994, ctc_loss=0.1689, over 3861892.67 frames. ], batch size: 57, lr: 2.35e-02, grad_scale: 32.0 +2024-08-25 12:04:28,582 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.64 vs. limit=10.0 +2024-08-25 12:04:50,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=74485.33333333333, ans=0.0 +2024-08-25 12:05:38,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=74538.66666666667, ans=0.125 +2024-08-25 12:05:38,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=74538.66666666667, ans=0.125 +2024-08-25 12:05:42,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=74538.66666666667, ans=0.025 +2024-08-25 12:05:43,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74538.66666666667, ans=0.1 +2024-08-25 12:06:01,376 INFO [train.py:1114] (2/4) Epoch 6, batch 1550, loss[loss=0.3382, simple_loss=0.3583, pruned_loss=0.1152, ctc_loss=0.2191, over 19637.00 frames. ], tot_loss[loss=0.2859, simple_loss=0.3233, pruned_loss=0.09032, ctc_loss=0.1698, over 3847052.20 frames. ], batch size: 60, lr: 2.34e-02, grad_scale: 32.0 +2024-08-25 12:06:06,686 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.47 vs. limit=12.0 +2024-08-25 12:06:11,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=74698.66666666667, ans=0.125 +2024-08-25 12:06:12,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=74698.66666666667, ans=0.125 +2024-08-25 12:06:14,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=74698.66666666667, ans=0.2 +2024-08-25 12:06:19,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=74698.66666666667, ans=0.125 +2024-08-25 12:06:37,903 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.607e+02 2.061e+02 2.512e+02 3.027e+02 4.789e+02, threshold=5.024e+02, percent-clipped=1.0 +2024-08-25 12:07:01,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=74912.0, ans=0.0 +2024-08-25 12:07:01,753 INFO [train.py:1114] (2/4) Epoch 6, batch 1600, loss[loss=0.2728, simple_loss=0.3298, pruned_loss=0.07823, ctc_loss=0.1483, over 19842.00 frames. ], tot_loss[loss=0.2843, simple_loss=0.3224, pruned_loss=0.08945, ctc_loss=0.1682, over 3836413.87 frames. ], batch size: 57, lr: 2.34e-02, grad_scale: 32.0 +2024-08-25 12:07:58,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=75125.33333333333, ans=0.125 +2024-08-25 12:08:00,999 INFO [train.py:1114] (2/4) Epoch 6, batch 1650, loss[loss=0.3018, simple_loss=0.3386, pruned_loss=0.0964, ctc_loss=0.1802, over 19652.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3215, pruned_loss=0.08905, ctc_loss=0.1673, over 3832415.40 frames. ], batch size: 59, lr: 2.34e-02, grad_scale: 32.0 +2024-08-25 12:08:07,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=75178.66666666667, ans=0.125 +2024-08-25 12:08:15,269 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.78 vs. limit=22.5 +2024-08-25 12:08:16,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=75232.0, ans=0.125 +2024-08-25 12:08:17,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=75232.0, ans=0.1 +2024-08-25 12:08:34,877 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.97 vs. limit=12.0 +2024-08-25 12:08:37,766 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.531e+02 1.893e+02 2.381e+02 2.784e+02 7.281e+02, threshold=4.762e+02, percent-clipped=1.0 +2024-08-25 12:08:44,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=75338.66666666667, ans=0.0 +2024-08-25 12:08:49,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=75392.0, ans=0.125 +2024-08-25 12:08:57,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys.whitening_limit, batch_count=75392.0, ans=6.0 +2024-08-25 12:09:00,131 INFO [train.py:1114] (2/4) Epoch 6, batch 1700, loss[loss=0.2442, simple_loss=0.2858, pruned_loss=0.07321, ctc_loss=0.1403, over 19646.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.3207, pruned_loss=0.08802, ctc_loss=0.1658, over 3847042.32 frames. ], batch size: 46, lr: 2.33e-02, grad_scale: 32.0 +2024-08-25 12:09:02,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=75445.33333333333, ans=0.0 +2024-08-25 12:09:20,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=75498.66666666667, ans=0.0 +2024-08-25 12:09:20,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=75498.66666666667, ans=10.0 +2024-08-25 12:09:37,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=75605.33333333333, ans=0.0 +2024-08-25 12:09:37,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=75605.33333333333, ans=0.125 +2024-08-25 12:09:55,935 INFO [train.py:1114] (2/4) Epoch 6, batch 1750, loss[loss=0.2421, simple_loss=0.2762, pruned_loss=0.07497, ctc_loss=0.1455, over 19618.00 frames. ], tot_loss[loss=0.2804, simple_loss=0.3198, pruned_loss=0.08748, ctc_loss=0.1648, over 3852057.66 frames. ], batch size: 45, lr: 2.33e-02, grad_scale: 16.0 +2024-08-25 12:10:08,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=75765.33333333333, ans=0.0 +2024-08-25 12:10:10,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=75765.33333333333, ans=0.125 +2024-08-25 12:10:13,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=75765.33333333333, ans=0.125 +2024-08-25 12:10:32,754 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.521e+02 1.890e+02 2.130e+02 2.587e+02 4.262e+02, threshold=4.260e+02, percent-clipped=0.0 +2024-08-25 12:10:35,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=75872.0, ans=0.0 +2024-08-25 12:10:38,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=75925.33333333333, ans=0.0 +2024-08-25 12:10:41,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=75925.33333333333, ans=0.1 +2024-08-25 12:10:50,378 INFO [train.py:1114] (2/4) Epoch 6, batch 1800, loss[loss=0.3008, simple_loss=0.3378, pruned_loss=0.09652, ctc_loss=0.1772, over 19612.00 frames. ], tot_loss[loss=0.28, simple_loss=0.3198, pruned_loss=0.08725, ctc_loss=0.1644, over 3854282.85 frames. ], batch size: 55, lr: 2.33e-02, grad_scale: 8.0 +2024-08-25 12:11:03,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=76032.0, ans=0.1 +2024-08-25 12:11:13,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=76085.33333333333, ans=0.0 +2024-08-25 12:11:21,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=76085.33333333333, ans=0.2 +2024-08-25 12:11:35,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.73 vs. limit=10.0 +2024-08-25 12:11:36,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76192.0, ans=0.1 +2024-08-25 12:11:44,880 INFO [train.py:1114] (2/4) Epoch 6, batch 1850, loss[loss=0.2926, simple_loss=0.3356, pruned_loss=0.09107, ctc_loss=0.1688, over 19589.00 frames. ], tot_loss[loss=0.2798, simple_loss=0.3193, pruned_loss=0.08725, ctc_loss=0.1644, over 3857785.01 frames. ], batch size: 57, lr: 2.32e-02, grad_scale: 8.0 +2024-08-25 12:11:45,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=76245.33333333333, ans=0.0 +2024-08-25 12:12:01,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=76298.66666666667, ans=0.125 +2024-08-25 12:12:06,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=76352.0, ans=0.0 +2024-08-25 12:12:22,229 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.502e+02 1.994e+02 2.285e+02 2.712e+02 4.413e+02, threshold=4.569e+02, percent-clipped=2.0 +2024-08-25 12:12:43,373 INFO [train.py:1114] (2/4) Epoch 6, batch 1900, loss[loss=0.303, simple_loss=0.342, pruned_loss=0.09536, ctc_loss=0.1831, over 19662.00 frames. ], tot_loss[loss=0.2793, simple_loss=0.3195, pruned_loss=0.08687, ctc_loss=0.1636, over 3862819.37 frames. ], batch size: 59, lr: 2.32e-02, grad_scale: 8.0 +2024-08-25 12:12:53,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=76565.33333333333, ans=0.0 +2024-08-25 12:13:02,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=76565.33333333333, ans=0.2 +2024-08-25 12:13:04,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=76618.66666666667, ans=0.0 +2024-08-25 12:13:05,574 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.38 vs. limit=15.0 +2024-08-25 12:13:07,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=76618.66666666667, ans=0.125 +2024-08-25 12:13:11,182 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.38 vs. limit=15.0 +2024-08-25 12:13:40,540 INFO [train.py:1114] (2/4) Epoch 6, batch 1950, loss[loss=0.2702, simple_loss=0.3098, pruned_loss=0.08335, ctc_loss=0.1595, over 19603.00 frames. ], tot_loss[loss=0.2796, simple_loss=0.3203, pruned_loss=0.08675, ctc_loss=0.1635, over 3871571.98 frames. ], batch size: 52, lr: 2.32e-02, grad_scale: 8.0 +2024-08-25 12:13:49,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=76778.66666666667, ans=0.2 +2024-08-25 12:14:04,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=76885.33333333333, ans=0.125 +2024-08-25 12:14:11,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=76885.33333333333, ans=0.1 +2024-08-25 12:14:13,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=76938.66666666667, ans=0.125 +2024-08-25 12:14:15,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=76938.66666666667, ans=0.1 +2024-08-25 12:14:18,626 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.709e+02 1.890e+02 2.137e+02 2.349e+02 3.743e+02, threshold=4.275e+02, percent-clipped=0.0 +2024-08-25 12:14:35,984 INFO [train.py:1114] (2/4) Epoch 6, batch 2000, loss[loss=0.2617, simple_loss=0.2892, pruned_loss=0.08493, ctc_loss=0.1608, over 19653.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3208, pruned_loss=0.08736, ctc_loss=0.1646, over 3855460.36 frames. ], batch size: 45, lr: 2.31e-02, grad_scale: 16.0 +2024-08-25 12:14:38,373 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:14:39,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=77045.33333333333, ans=0.0 +2024-08-25 12:14:49,952 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.16 vs. limit=15.0 +2024-08-25 12:15:28,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77258.66666666667, ans=0.1 +2024-08-25 12:15:30,091 INFO [train.py:1114] (2/4) Epoch 6, batch 2050, loss[loss=0.2523, simple_loss=0.2929, pruned_loss=0.0767, ctc_loss=0.1457, over 19722.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.32, pruned_loss=0.08765, ctc_loss=0.1649, over 3851218.18 frames. ], batch size: 47, lr: 2.31e-02, grad_scale: 16.0 +2024-08-25 12:15:44,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=77312.0, ans=0.125 +2024-08-25 12:15:56,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=77365.33333333333, ans=0.125 +2024-08-25 12:15:59,899 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.34 vs. limit=15.0 +2024-08-25 12:16:00,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=77418.66666666667, ans=0.07 +2024-08-25 12:16:14,687 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.405e+02 1.955e+02 2.380e+02 2.986e+02 1.021e+03, threshold=4.760e+02, percent-clipped=7.0 +2024-08-25 12:16:17,373 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.23 vs. limit=15.0 +2024-08-25 12:16:32,240 INFO [train.py:1114] (2/4) Epoch 6, batch 2100, loss[loss=0.2725, simple_loss=0.3147, pruned_loss=0.08303, ctc_loss=0.1608, over 19787.00 frames. ], tot_loss[loss=0.2793, simple_loss=0.3192, pruned_loss=0.08697, ctc_loss=0.1636, over 3858067.76 frames. ], batch size: 54, lr: 2.31e-02, grad_scale: 16.0 +2024-08-25 12:16:37,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=77578.66666666667, ans=0.0 +2024-08-25 12:16:53,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=77685.33333333333, ans=0.0 +2024-08-25 12:17:07,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=77738.66666666667, ans=0.0 +2024-08-25 12:17:28,069 INFO [train.py:1114] (2/4) Epoch 6, batch 2150, loss[loss=0.2863, simple_loss=0.3202, pruned_loss=0.09269, ctc_loss=0.1674, over 19851.00 frames. ], tot_loss[loss=0.2787, simple_loss=0.3188, pruned_loss=0.08674, ctc_loss=0.1629, over 3868909.89 frames. ], batch size: 52, lr: 2.30e-02, grad_scale: 16.0 +2024-08-25 12:17:38,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=77898.66666666667, ans=0.125 +2024-08-25 12:17:45,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=77898.66666666667, ans=0.0 +2024-08-25 12:18:07,069 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.08 vs. limit=15.0 +2024-08-25 12:18:17,795 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.01 vs. limit=15.0 +2024-08-25 12:18:19,486 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.583e+02 1.877e+02 2.258e+02 2.799e+02 6.726e+02, threshold=4.515e+02, percent-clipped=2.0 +2024-08-25 12:18:25,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.whiten.whitening_limit, batch_count=78058.66666666667, ans=12.0 +2024-08-25 12:19:01,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=78058.66666666667, ans=0.07 +2024-08-25 12:19:06,971 INFO [train.py:1114] (2/4) Epoch 6, batch 2200, loss[loss=0.2865, simple_loss=0.332, pruned_loss=0.08719, ctc_loss=0.1664, over 19581.00 frames. ], tot_loss[loss=0.2785, simple_loss=0.3186, pruned_loss=0.08672, ctc_loss=0.1627, over 3867615.28 frames. ], batch size: 57, lr: 2.30e-02, grad_scale: 16.0 +2024-08-25 12:19:20,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=78165.33333333333, ans=0.0 +2024-08-25 12:19:26,273 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.11 vs. limit=10.0 +2024-08-25 12:19:28,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=78218.66666666667, ans=0.025 +2024-08-25 12:20:02,383 INFO [train.py:1114] (2/4) Epoch 6, batch 2250, loss[loss=0.2441, simple_loss=0.3026, pruned_loss=0.06713, ctc_loss=0.1284, over 19622.00 frames. ], tot_loss[loss=0.2789, simple_loss=0.3188, pruned_loss=0.08686, ctc_loss=0.1631, over 3866433.00 frames. ], batch size: 55, lr: 2.30e-02, grad_scale: 16.0 +2024-08-25 12:20:14,730 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.00 vs. limit=15.0 +2024-08-25 12:20:15,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=78432.0, ans=0.025 +2024-08-25 12:20:36,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=78538.66666666667, ans=0.125 +2024-08-25 12:20:38,633 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.553e+02 2.005e+02 2.234e+02 2.581e+02 4.325e+02, threshold=4.468e+02, percent-clipped=0.0 +2024-08-25 12:20:42,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=78538.66666666667, ans=0.125 +2024-08-25 12:20:44,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78592.0, ans=0.1 +2024-08-25 12:20:56,304 INFO [train.py:1114] (2/4) Epoch 6, batch 2300, loss[loss=0.2481, simple_loss=0.2906, pruned_loss=0.07506, ctc_loss=0.1389, over 19498.00 frames. ], tot_loss[loss=0.2772, simple_loss=0.3174, pruned_loss=0.08617, ctc_loss=0.1617, over 3860656.19 frames. ], batch size: 49, lr: 2.29e-02, grad_scale: 16.0 +2024-08-25 12:20:59,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=78645.33333333333, ans=0.125 +2024-08-25 12:21:04,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78645.33333333333, ans=0.1 +2024-08-25 12:21:18,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=78752.0, ans=0.125 +2024-08-25 12:21:22,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=78752.0, ans=0.025 +2024-08-25 12:21:45,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=78858.66666666667, ans=0.2 +2024-08-25 12:21:52,649 INFO [train.py:1114] (2/4) Epoch 6, batch 2350, loss[loss=0.3017, simple_loss=0.343, pruned_loss=0.09596, ctc_loss=0.1714, over 19700.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.318, pruned_loss=0.08672, ctc_loss=0.1625, over 3863792.78 frames. ], batch size: 63, lr: 2.29e-02, grad_scale: 16.0 +2024-08-25 12:21:59,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78912.0, ans=0.1 +2024-08-25 12:22:26,446 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.00 vs. limit=15.0 +2024-08-25 12:22:28,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=79072.0, ans=0.2 +2024-08-25 12:22:30,287 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.404e+02 2.097e+02 2.553e+02 3.084e+02 6.792e+02, threshold=5.106e+02, percent-clipped=2.0 +2024-08-25 12:22:44,418 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.89 vs. limit=22.5 +2024-08-25 12:22:46,249 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.82 vs. limit=15.0 +2024-08-25 12:22:47,979 INFO [train.py:1114] (2/4) Epoch 6, batch 2400, loss[loss=0.284, simple_loss=0.3308, pruned_loss=0.08641, ctc_loss=0.1608, over 19254.00 frames. ], tot_loss[loss=0.2805, simple_loss=0.3204, pruned_loss=0.08752, ctc_loss=0.1638, over 3857246.83 frames. ], batch size: 71, lr: 2.29e-02, grad_scale: 32.0 +2024-08-25 12:23:15,364 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:23:35,492 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.54 vs. limit=15.0 +2024-08-25 12:23:45,690 INFO [train.py:1114] (2/4) Epoch 6, batch 2450, loss[loss=0.3147, simple_loss=0.3289, pruned_loss=0.1088, ctc_loss=0.207, over 13151.00 frames. ], tot_loss[loss=0.2892, simple_loss=0.3256, pruned_loss=0.09189, ctc_loss=0.1724, over 3729667.10 frames. ], batch size: 140, lr: 2.29e-02, grad_scale: 32.0 +2024-08-25 12:23:48,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=79445.33333333333, ans=0.125 +2024-08-25 12:23:49,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=79445.33333333333, ans=0.125 +2024-08-25 12:24:14,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=79498.66666666667, ans=0.0 +2024-08-25 12:26:11,531 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.78 vs. limit=22.5 +2024-08-25 12:26:37,494 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=8.09 vs. limit=12.0 +2024-08-25 12:27:38,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=79605.33333333333, ans=0.0 +2024-08-25 12:27:38,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=79605.33333333333, ans=0.2 +2024-08-25 12:28:01,634 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.548e+02 2.056e+02 2.291e+02 2.526e+02 5.572e+02, threshold=4.582e+02, percent-clipped=1.0 +2024-08-25 12:28:10,368 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.82 vs. limit=15.0 +2024-08-25 12:29:27,615 INFO [train.py:1114] (2/4) Epoch 7, batch 0, loss[loss=0.2845, simple_loss=0.3123, pruned_loss=0.09362, ctc_loss=0.1739, over 19811.00 frames. ], tot_loss[loss=0.2845, simple_loss=0.3123, pruned_loss=0.09362, ctc_loss=0.1739, over 19811.00 frames. ], batch size: 49, lr: 2.14e-02, grad_scale: 32.0 +2024-08-25 12:29:27,615 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-25 12:29:44,290 INFO [train.py:1146] (2/4) Epoch 7, validation: loss=0.2269, simple_loss=0.307, pruned_loss=0.05393, ctc_loss=0.0975, over 944034.00 frames. +2024-08-25 12:29:44,290 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 14234MB +2024-08-25 12:29:45,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=79658.66666666667, ans=10.0 +2024-08-25 12:29:45,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=79658.66666666667, ans=0.04949747468305833 +2024-08-25 12:29:45,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.09 vs. limit=22.5 +2024-08-25 12:29:47,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=79658.66666666667, ans=0.1 +2024-08-25 12:30:05,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=79658.66666666667, ans=0.0 +2024-08-25 12:30:24,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=79712.0, ans=0.125 +2024-08-25 12:31:10,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=79765.33333333333, ans=0.125 +2024-08-25 12:33:04,672 INFO [train.py:1114] (2/4) Epoch 7, batch 50, loss[loss=0.2518, simple_loss=0.295, pruned_loss=0.07432, ctc_loss=0.1498, over 19711.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3232, pruned_loss=0.08843, ctc_loss=0.1663, over 843578.70 frames. ], batch size: 47, lr: 2.14e-02, grad_scale: 32.0 +2024-08-25 12:33:07,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=79925.33333333333, ans=0.125 +2024-08-25 12:33:19,865 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=13.98 vs. limit=12.0 +2024-08-25 12:33:43,956 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=20.20 vs. limit=22.5 +2024-08-25 12:33:57,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=80085.33333333333, ans=0.125 +2024-08-25 12:34:17,266 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.570e+02 1.999e+02 2.246e+02 2.808e+02 5.514e+02, threshold=4.492e+02, percent-clipped=3.0 +2024-08-25 12:34:24,281 INFO [train.py:1114] (2/4) Epoch 7, batch 100, loss[loss=0.2408, simple_loss=0.2945, pruned_loss=0.06666, ctc_loss=0.1342, over 19722.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.3228, pruned_loss=0.08866, ctc_loss=0.1669, over 1498623.17 frames. ], batch size: 51, lr: 2.13e-02, grad_scale: 32.0 +2024-08-25 12:34:30,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=80192.0, ans=0.04949747468305833 +2024-08-25 12:34:54,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=80298.66666666667, ans=0.0 +2024-08-25 12:35:06,506 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.08 vs. limit=22.5 +2024-08-25 12:35:13,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=80405.33333333333, ans=0.025 +2024-08-25 12:35:23,290 INFO [train.py:1114] (2/4) Epoch 7, batch 150, loss[loss=0.2655, simple_loss=0.3002, pruned_loss=0.08394, ctc_loss=0.1574, over 19710.00 frames. ], tot_loss[loss=0.2792, simple_loss=0.3197, pruned_loss=0.08668, ctc_loss=0.1635, over 2027456.19 frames. ], batch size: 47, lr: 2.13e-02, grad_scale: 32.0 +2024-08-25 12:35:23,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=80458.66666666667, ans=0.125 +2024-08-25 12:35:24,064 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.37 vs. limit=8.0 +2024-08-25 12:35:34,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=80458.66666666667, ans=0.025 +2024-08-25 12:35:36,499 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.66 vs. limit=15.0 +2024-08-25 12:35:38,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80512.0, ans=0.1 +2024-08-25 12:35:38,645 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.38 vs. limit=12.0 +2024-08-25 12:35:40,974 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.95 vs. limit=22.5 +2024-08-25 12:36:15,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=80672.0, ans=0.125 +2024-08-25 12:36:18,829 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.539e+02 1.959e+02 2.217e+02 2.953e+02 5.735e+02, threshold=4.434e+02, percent-clipped=2.0 +2024-08-25 12:36:22,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=80672.0, ans=0.07 +2024-08-25 12:36:26,003 INFO [train.py:1114] (2/4) Epoch 7, batch 200, loss[loss=0.3542, simple_loss=0.3656, pruned_loss=0.1247, ctc_loss=0.2333, over 18253.00 frames. ], tot_loss[loss=0.2764, simple_loss=0.3176, pruned_loss=0.08547, ctc_loss=0.1609, over 2436140.00 frames. ], batch size: 85, lr: 2.13e-02, grad_scale: 32.0 +2024-08-25 12:36:29,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=80725.33333333333, ans=0.125 +2024-08-25 12:36:37,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=80778.66666666667, ans=0.125 +2024-08-25 12:36:50,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=80832.0, ans=0.0 +2024-08-25 12:36:53,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=80832.0, ans=0.125 +2024-08-25 12:37:03,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=80885.33333333333, ans=0.1 +2024-08-25 12:37:04,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=80885.33333333333, ans=0.125 +2024-08-25 12:37:04,686 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.56 vs. limit=6.0 +2024-08-25 12:37:19,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=80938.66666666667, ans=0.125 +2024-08-25 12:37:22,879 INFO [train.py:1114] (2/4) Epoch 7, batch 250, loss[loss=0.3292, simple_loss=0.3559, pruned_loss=0.1108, ctc_loss=0.2024, over 19380.00 frames. ], tot_loss[loss=0.2752, simple_loss=0.3169, pruned_loss=0.08478, ctc_loss=0.1597, over 2756169.51 frames. ], batch size: 67, lr: 2.13e-02, grad_scale: 32.0 +2024-08-25 12:38:16,698 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.422e+02 1.901e+02 2.294e+02 2.833e+02 4.254e+02, threshold=4.587e+02, percent-clipped=0.0 +2024-08-25 12:38:21,940 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.48 vs. limit=15.0 +2024-08-25 12:38:23,364 INFO [train.py:1114] (2/4) Epoch 7, batch 300, loss[loss=0.3232, simple_loss=0.353, pruned_loss=0.1068, ctc_loss=0.1993, over 19535.00 frames. ], tot_loss[loss=0.2742, simple_loss=0.3159, pruned_loss=0.08438, ctc_loss=0.1593, over 3001114.53 frames. ], batch size: 61, lr: 2.12e-02, grad_scale: 32.0 +2024-08-25 12:38:34,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=81312.0, ans=0.09899494936611666 +2024-08-25 12:38:35,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=81312.0, ans=0.125 +2024-08-25 12:38:41,801 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.24 vs. limit=10.0 +2024-08-25 12:38:42,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=81312.0, ans=0.125 +2024-08-25 12:38:47,328 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.81 vs. limit=15.0 +2024-08-25 12:39:06,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=81418.66666666667, ans=0.05 +2024-08-25 12:39:07,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=81418.66666666667, ans=0.125 +2024-08-25 12:39:44,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=81472.0, ans=0.125 +2024-08-25 12:39:52,684 INFO [train.py:1114] (2/4) Epoch 7, batch 350, loss[loss=0.2622, simple_loss=0.2975, pruned_loss=0.08301, ctc_loss=0.152, over 19740.00 frames. ], tot_loss[loss=0.2732, simple_loss=0.3155, pruned_loss=0.08381, ctc_loss=0.1582, over 3191443.24 frames. ], batch size: 48, lr: 2.12e-02, grad_scale: 32.0 +2024-08-25 12:40:01,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=81525.33333333333, ans=0.1 +2024-08-25 12:40:05,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=81578.66666666667, ans=0.125 +2024-08-25 12:40:25,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=81632.0, ans=0.125 +2024-08-25 12:40:43,966 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.560e+02 1.980e+02 2.268e+02 2.810e+02 5.782e+02, threshold=4.535e+02, percent-clipped=1.0 +2024-08-25 12:40:45,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=81738.66666666667, ans=0.125 +2024-08-25 12:40:50,666 INFO [train.py:1114] (2/4) Epoch 7, batch 400, loss[loss=0.256, simple_loss=0.3114, pruned_loss=0.07327, ctc_loss=0.1352, over 19495.00 frames. ], tot_loss[loss=0.2725, simple_loss=0.315, pruned_loss=0.0835, ctc_loss=0.1575, over 3343247.64 frames. ], batch size: 54, lr: 2.12e-02, grad_scale: 32.0 +2024-08-25 12:40:52,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=81792.0, ans=0.125 +2024-08-25 12:41:00,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=81792.0, ans=0.125 +2024-08-25 12:41:03,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=81845.33333333333, ans=0.125 +2024-08-25 12:41:09,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=81845.33333333333, ans=0.0 +2024-08-25 12:41:18,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81898.66666666667, ans=0.1 +2024-08-25 12:41:22,165 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.73 vs. limit=15.0 +2024-08-25 12:41:52,351 INFO [train.py:1114] (2/4) Epoch 7, batch 450, loss[loss=0.2554, simple_loss=0.3104, pruned_loss=0.0725, ctc_loss=0.1383, over 19607.00 frames. ], tot_loss[loss=0.2732, simple_loss=0.3154, pruned_loss=0.08387, ctc_loss=0.1581, over 3450420.32 frames. ], batch size: 55, lr: 2.11e-02, grad_scale: 32.0 +2024-08-25 12:42:01,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82058.66666666667, ans=0.1 +2024-08-25 12:42:08,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=82112.0, ans=0.125 +2024-08-25 12:42:23,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=82165.33333333333, ans=0.125 +2024-08-25 12:42:26,483 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.89 vs. limit=22.5 +2024-08-25 12:42:43,144 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.611e+02 1.947e+02 2.448e+02 2.960e+02 4.262e+02, threshold=4.896e+02, percent-clipped=0.0 +2024-08-25 12:42:51,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.57 vs. limit=22.5 +2024-08-25 12:42:52,049 INFO [train.py:1114] (2/4) Epoch 7, batch 500, loss[loss=0.3172, simple_loss=0.3429, pruned_loss=0.1059, ctc_loss=0.1989, over 19666.00 frames. ], tot_loss[loss=0.2727, simple_loss=0.3151, pruned_loss=0.08364, ctc_loss=0.1575, over 3546182.01 frames. ], batch size: 63, lr: 2.11e-02, grad_scale: 32.0 +2024-08-25 12:43:02,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=82325.33333333333, ans=0.2 +2024-08-25 12:43:02,566 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.37 vs. limit=22.5 +2024-08-25 12:43:07,270 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=8.51 vs. limit=12.0 +2024-08-25 12:43:12,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=82378.66666666667, ans=0.125 +2024-08-25 12:43:16,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=82432.0, ans=0.0 +2024-08-25 12:43:17,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=82432.0, ans=0.125 +2024-08-25 12:43:45,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=82538.66666666667, ans=0.0 +2024-08-25 12:43:45,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=82538.66666666667, ans=0.125 +2024-08-25 12:43:51,834 INFO [train.py:1114] (2/4) Epoch 7, batch 550, loss[loss=0.3026, simple_loss=0.3413, pruned_loss=0.09491, ctc_loss=0.1853, over 19277.00 frames. ], tot_loss[loss=0.2735, simple_loss=0.3154, pruned_loss=0.08411, ctc_loss=0.1584, over 3608003.59 frames. ], batch size: 71, lr: 2.11e-02, grad_scale: 32.0 +2024-08-25 12:44:04,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=82645.33333333333, ans=0.125 +2024-08-25 12:44:35,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=82752.0, ans=0.0 +2024-08-25 12:44:44,962 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.503e+02 2.000e+02 2.364e+02 2.910e+02 5.356e+02, threshold=4.728e+02, percent-clipped=1.0 +2024-08-25 12:44:52,599 INFO [train.py:1114] (2/4) Epoch 7, batch 600, loss[loss=0.3198, simple_loss=0.3495, pruned_loss=0.1068, ctc_loss=0.1916, over 19349.00 frames. ], tot_loss[loss=0.2739, simple_loss=0.3162, pruned_loss=0.08418, ctc_loss=0.1584, over 3666110.17 frames. ], batch size: 67, lr: 2.11e-02, grad_scale: 16.0 +2024-08-25 12:44:59,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=82858.66666666667, ans=0.0 +2024-08-25 12:45:02,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=82858.66666666667, ans=0.0 +2024-08-25 12:45:12,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=82912.0, ans=0.2 +2024-08-25 12:45:28,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn1.whiten.whitening_limit, batch_count=83018.66666666667, ans=22.5 +2024-08-25 12:45:38,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=83072.0, ans=0.125 +2024-08-25 12:45:46,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=83072.0, ans=0.0 +2024-08-25 12:45:51,130 INFO [train.py:1114] (2/4) Epoch 7, batch 650, loss[loss=0.291, simple_loss=0.334, pruned_loss=0.08936, ctc_loss=0.1729, over 19768.00 frames. ], tot_loss[loss=0.273, simple_loss=0.3153, pruned_loss=0.08383, ctc_loss=0.1578, over 3716263.92 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 16.0 +2024-08-25 12:46:02,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=83178.66666666667, ans=0.125 +2024-08-25 12:46:09,143 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.01 vs. limit=22.5 +2024-08-25 12:46:32,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=83285.33333333333, ans=0.025 +2024-08-25 12:46:47,177 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.487e+02 1.844e+02 2.004e+02 2.285e+02 4.065e+02, threshold=4.009e+02, percent-clipped=0.0 +2024-08-25 12:46:52,901 INFO [train.py:1114] (2/4) Epoch 7, batch 700, loss[loss=0.2352, simple_loss=0.2894, pruned_loss=0.0652, ctc_loss=0.1266, over 19700.00 frames. ], tot_loss[loss=0.2731, simple_loss=0.3152, pruned_loss=0.08387, ctc_loss=0.1579, over 3748949.27 frames. ], batch size: 51, lr: 2.10e-02, grad_scale: 16.0 +2024-08-25 12:47:07,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=83445.33333333333, ans=0.125 +2024-08-25 12:47:13,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=83445.33333333333, ans=0.125 +2024-08-25 12:47:13,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=83445.33333333333, ans=0.125 +2024-08-25 12:47:21,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=83498.66666666667, ans=0.1 +2024-08-25 12:47:49,583 INFO [train.py:1114] (2/4) Epoch 7, batch 750, loss[loss=0.2957, simple_loss=0.335, pruned_loss=0.09344, ctc_loss=0.1737, over 19515.00 frames. ], tot_loss[loss=0.2717, simple_loss=0.3143, pruned_loss=0.08326, ctc_loss=0.1567, over 3774137.13 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 16.0 +2024-08-25 12:48:12,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=83712.0, ans=0.5 +2024-08-25 12:48:26,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=83818.66666666667, ans=0.125 +2024-08-25 12:48:45,006 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.565e+02 1.885e+02 2.166e+02 2.690e+02 4.534e+02, threshold=4.331e+02, percent-clipped=3.0 +2024-08-25 12:48:50,697 INFO [train.py:1114] (2/4) Epoch 7, batch 800, loss[loss=0.2385, simple_loss=0.2879, pruned_loss=0.06835, ctc_loss=0.1311, over 19792.00 frames. ], tot_loss[loss=0.2716, simple_loss=0.3139, pruned_loss=0.08327, ctc_loss=0.1566, over 3796585.84 frames. ], batch size: 49, lr: 2.10e-02, grad_scale: 32.0 +2024-08-25 12:48:58,126 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.74 vs. limit=22.5 +2024-08-25 12:48:59,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=83925.33333333333, ans=0.025 +2024-08-25 12:49:01,280 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.21 vs. limit=15.0 +2024-08-25 12:49:17,128 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:49:25,712 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.37 vs. limit=22.5 +2024-08-25 12:49:39,439 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.32 vs. limit=12.0 +2024-08-25 12:49:40,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=84138.66666666667, ans=0.025 +2024-08-25 12:49:42,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=84138.66666666667, ans=0.125 +2024-08-25 12:49:45,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84138.66666666667, ans=0.1 +2024-08-25 12:49:45,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=84138.66666666667, ans=0.125 +2024-08-25 12:49:48,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=84138.66666666667, ans=0.05 +2024-08-25 12:49:49,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.65 vs. limit=22.5 +2024-08-25 12:49:51,358 INFO [train.py:1114] (2/4) Epoch 7, batch 850, loss[loss=0.272, simple_loss=0.3224, pruned_loss=0.08153, ctc_loss=0.1462, over 19644.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3129, pruned_loss=0.08228, ctc_loss=0.1547, over 3815446.47 frames. ], batch size: 59, lr: 2.09e-02, grad_scale: 32.0 +2024-08-25 12:49:52,932 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.75 vs. limit=10.0 +2024-08-25 12:50:11,551 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.76 vs. limit=15.0 +2024-08-25 12:50:15,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=84298.66666666667, ans=0.0 +2024-08-25 12:50:20,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=84298.66666666667, ans=0.0 +2024-08-25 12:50:25,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=84352.0, ans=0.0 +2024-08-25 12:50:34,830 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.44 vs. limit=15.0 +2024-08-25 12:50:43,490 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.470e+02 1.946e+02 2.270e+02 2.825e+02 4.143e+02, threshold=4.540e+02, percent-clipped=0.0 +2024-08-25 12:50:47,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=84405.33333333333, ans=0.125 +2024-08-25 12:50:49,134 INFO [train.py:1114] (2/4) Epoch 7, batch 900, loss[loss=0.2712, simple_loss=0.3067, pruned_loss=0.08663, ctc_loss=0.1562, over 19388.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3128, pruned_loss=0.08211, ctc_loss=0.1543, over 3818886.99 frames. ], batch size: 48, lr: 2.09e-02, grad_scale: 32.0 +2024-08-25 12:51:02,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=84458.66666666667, ans=0.0 +2024-08-25 12:51:20,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84512.0, ans=0.1 +2024-08-25 12:51:21,950 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.63 vs. limit=22.5 +2024-08-25 12:51:55,586 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.76 vs. limit=22.5 +2024-08-25 12:52:01,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=84672.0, ans=0.0 +2024-08-25 12:52:04,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=84725.33333333333, ans=0.05 +2024-08-25 12:52:05,349 INFO [train.py:1114] (2/4) Epoch 7, batch 950, loss[loss=0.2373, simple_loss=0.285, pruned_loss=0.06898, ctc_loss=0.1293, over 19514.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3125, pruned_loss=0.08172, ctc_loss=0.1537, over 3819380.73 frames. ], batch size: 49, lr: 2.09e-02, grad_scale: 16.0 +2024-08-25 12:52:06,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84725.33333333333, ans=0.1 +2024-08-25 12:52:10,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=84725.33333333333, ans=0.2 +2024-08-25 12:52:30,446 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:52:30,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=84832.0, ans=0.125 +2024-08-25 12:52:35,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=84832.0, ans=0.125 +2024-08-25 12:52:53,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=84938.66666666667, ans=10.0 +2024-08-25 12:52:59,189 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.627e+02 2.065e+02 2.373e+02 2.949e+02 1.128e+03, threshold=4.746e+02, percent-clipped=6.0 +2024-08-25 12:53:05,293 INFO [train.py:1114] (2/4) Epoch 7, batch 1000, loss[loss=0.2456, simple_loss=0.3001, pruned_loss=0.06883, ctc_loss=0.1334, over 19845.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.314, pruned_loss=0.08272, ctc_loss=0.1553, over 3815233.65 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 16.0 +2024-08-25 12:53:15,871 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.92 vs. limit=15.0 +2024-08-25 12:53:46,035 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.80 vs. limit=15.0 +2024-08-25 12:53:47,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=85152.0, ans=0.0 +2024-08-25 12:53:49,649 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.83 vs. limit=15.0 +2024-08-25 12:54:05,111 INFO [train.py:1114] (2/4) Epoch 7, batch 1050, loss[loss=0.2614, simple_loss=0.3088, pruned_loss=0.07822, ctc_loss=0.144, over 19839.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3131, pruned_loss=0.0822, ctc_loss=0.1542, over 3822145.97 frames. ], batch size: 57, lr: 2.08e-02, grad_scale: 16.0 +2024-08-25 12:54:05,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=85258.66666666667, ans=0.0 +2024-08-25 12:54:13,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=85258.66666666667, ans=0.125 +2024-08-25 12:54:17,035 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.98 vs. limit=22.5 +2024-08-25 12:54:30,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=85365.33333333333, ans=0.04949747468305833 +2024-08-25 12:54:30,248 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.46 vs. limit=15.0 +2024-08-25 12:54:44,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=85418.66666666667, ans=0.025 +2024-08-25 12:54:46,330 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.43 vs. limit=22.5 +2024-08-25 12:54:52,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=85472.0, ans=0.125 +2024-08-25 12:54:54,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=85472.0, ans=0.2 +2024-08-25 12:55:01,667 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.423e+02 1.918e+02 2.325e+02 2.776e+02 4.591e+02, threshold=4.650e+02, percent-clipped=1.0 +2024-08-25 12:55:06,561 INFO [train.py:1114] (2/4) Epoch 7, batch 1100, loss[loss=0.2653, simple_loss=0.3026, pruned_loss=0.08327, ctc_loss=0.1537, over 19583.00 frames. ], tot_loss[loss=0.2692, simple_loss=0.3126, pruned_loss=0.08211, ctc_loss=0.1541, over 3830826.14 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 16.0 +2024-08-25 12:55:14,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85525.33333333333, ans=0.1 +2024-08-25 12:55:22,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=85578.66666666667, ans=0.015 +2024-08-25 12:55:27,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=85578.66666666667, ans=0.125 +2024-08-25 12:55:51,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=85685.33333333333, ans=0.125 +2024-08-25 12:56:05,756 INFO [train.py:1114] (2/4) Epoch 7, batch 1150, loss[loss=0.2723, simple_loss=0.3014, pruned_loss=0.08952, ctc_loss=0.1603, over 19580.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3129, pruned_loss=0.08224, ctc_loss=0.1543, over 3830840.94 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 16.0 +2024-08-25 12:56:14,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=85792.0, ans=0.1 +2024-08-25 12:56:15,732 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.33 vs. limit=15.0 +2024-08-25 12:56:25,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=85845.33333333333, ans=0.1 +2024-08-25 12:56:33,186 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.36 vs. limit=15.0 +2024-08-25 12:56:34,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=85898.66666666667, ans=0.05 +2024-08-25 12:56:38,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=85898.66666666667, ans=0.025 +2024-08-25 12:56:43,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=85952.0, ans=0.125 +2024-08-25 12:56:58,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=86005.33333333333, ans=0.1 +2024-08-25 12:57:02,979 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.571e+02 1.959e+02 2.167e+02 2.666e+02 4.946e+02, threshold=4.335e+02, percent-clipped=2.0 +2024-08-25 12:57:03,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=86005.33333333333, ans=0.0 +2024-08-25 12:57:07,708 INFO [train.py:1114] (2/4) Epoch 7, batch 1200, loss[loss=0.2819, simple_loss=0.3277, pruned_loss=0.08491, ctc_loss=0.1659, over 19829.00 frames. ], tot_loss[loss=0.2712, simple_loss=0.3142, pruned_loss=0.08293, ctc_loss=0.1558, over 3825576.42 frames. ], batch size: 57, lr: 2.07e-02, grad_scale: 32.0 +2024-08-25 12:57:48,758 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.78 vs. limit=15.0 +2024-08-25 12:57:50,230 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.32 vs. limit=8.0 +2024-08-25 12:57:50,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=86218.66666666667, ans=0.025 +2024-08-25 12:58:05,922 INFO [train.py:1114] (2/4) Epoch 7, batch 1250, loss[loss=0.281, simple_loss=0.3224, pruned_loss=0.08779, ctc_loss=0.16, over 19526.00 frames. ], tot_loss[loss=0.2714, simple_loss=0.3148, pruned_loss=0.08289, ctc_loss=0.1556, over 3843045.10 frames. ], batch size: 61, lr: 2.07e-02, grad_scale: 32.0 +2024-08-25 12:58:07,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=86325.33333333333, ans=0.125 +2024-08-25 12:58:24,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=86378.66666666667, ans=0.025 +2024-08-25 12:58:48,487 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.96 vs. limit=15.0 +2024-08-25 12:58:55,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=86538.66666666667, ans=0.04949747468305833 +2024-08-25 12:58:59,737 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:59:02,859 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.532e+02 1.964e+02 2.304e+02 2.729e+02 5.465e+02, threshold=4.608e+02, percent-clipped=2.0 +2024-08-25 12:59:05,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.22 vs. limit=22.5 +2024-08-25 12:59:07,507 INFO [train.py:1114] (2/4) Epoch 7, batch 1300, loss[loss=0.3072, simple_loss=0.3416, pruned_loss=0.09971, ctc_loss=0.1834, over 18934.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.3136, pruned_loss=0.08213, ctc_loss=0.1542, over 3846312.24 frames. ], batch size: 76, lr: 2.07e-02, grad_scale: 32.0 +2024-08-25 12:59:13,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=86592.0, ans=0.125 +2024-08-25 12:59:18,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=86645.33333333333, ans=0.125 +2024-08-25 12:59:19,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=86645.33333333333, ans=0.125 +2024-08-25 12:59:31,681 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:59:44,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=86752.0, ans=0.2 +2024-08-25 12:59:46,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=86752.0, ans=0.1 +2024-08-25 13:00:07,968 INFO [train.py:1114] (2/4) Epoch 7, batch 1350, loss[loss=0.2717, simple_loss=0.3078, pruned_loss=0.08587, ctc_loss=0.1598, over 19768.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3128, pruned_loss=0.08167, ctc_loss=0.1532, over 3856738.72 frames. ], batch size: 54, lr: 2.07e-02, grad_scale: 32.0 +2024-08-25 13:00:09,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=86858.66666666667, ans=0.0 +2024-08-25 13:00:23,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=86912.0, ans=0.05 +2024-08-25 13:00:45,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=87018.66666666667, ans=0.125 +2024-08-25 13:00:49,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=87018.66666666667, ans=0.125 +2024-08-25 13:01:52,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=87072.0, ans=0.0 +2024-08-25 13:01:57,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=87072.0, ans=0.125 +2024-08-25 13:01:59,607 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.534e+02 1.935e+02 2.309e+02 3.009e+02 4.449e+02, threshold=4.618e+02, percent-clipped=0.0 +2024-08-25 13:02:03,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87125.33333333333, ans=0.1 +2024-08-25 13:02:03,720 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.36 vs. limit=22.5 +2024-08-25 13:02:04,217 INFO [train.py:1114] (2/4) Epoch 7, batch 1400, loss[loss=0.2375, simple_loss=0.2827, pruned_loss=0.07037, ctc_loss=0.129, over 19666.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3127, pruned_loss=0.08155, ctc_loss=0.153, over 3863622.39 frames. ], batch size: 46, lr: 2.06e-02, grad_scale: 32.0 +2024-08-25 13:02:05,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=87125.33333333333, ans=0.0 +2024-08-25 13:02:12,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=87125.33333333333, ans=0.125 +2024-08-25 13:02:13,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=87125.33333333333, ans=0.0 +2024-08-25 13:02:14,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=87178.66666666667, ans=0.0 +2024-08-25 13:02:14,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=87178.66666666667, ans=0.5 +2024-08-25 13:02:19,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=87178.66666666667, ans=0.125 +2024-08-25 13:02:21,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=87178.66666666667, ans=0.125 +2024-08-25 13:02:32,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=87232.0, ans=0.0 +2024-08-25 13:02:37,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=87232.0, ans=0.125 +2024-08-25 13:02:57,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=87338.66666666667, ans=0.125 +2024-08-25 13:03:05,391 INFO [train.py:1114] (2/4) Epoch 7, batch 1450, loss[loss=0.2671, simple_loss=0.3136, pruned_loss=0.07905, ctc_loss=0.1562, over 19690.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.314, pruned_loss=0.08238, ctc_loss=0.1546, over 3861288.08 frames. ], batch size: 63, lr: 2.06e-02, grad_scale: 16.0 +2024-08-25 13:03:31,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=87498.66666666667, ans=0.0 +2024-08-25 13:03:33,988 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.89 vs. limit=15.0 +2024-08-25 13:03:34,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=87498.66666666667, ans=0.95 +2024-08-25 13:04:46,558 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.575e+02 2.015e+02 2.285e+02 2.716e+02 4.465e+02, threshold=4.569e+02, percent-clipped=0.0 +2024-08-25 13:04:50,184 INFO [train.py:1114] (2/4) Epoch 7, batch 1500, loss[loss=0.2986, simple_loss=0.3332, pruned_loss=0.09486, ctc_loss=0.1859, over 19598.00 frames. ], tot_loss[loss=0.2699, simple_loss=0.3139, pruned_loss=0.08212, ctc_loss=0.1543, over 3861141.13 frames. ], batch size: 57, lr: 2.06e-02, grad_scale: 16.0 +2024-08-25 13:04:57,054 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.64 vs. limit=15.0 +2024-08-25 13:05:12,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=87712.0, ans=0.125 +2024-08-25 13:05:17,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=87712.0, ans=0.025 +2024-08-25 13:05:26,019 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.85 vs. limit=15.0 +2024-08-25 13:05:29,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=87765.33333333333, ans=0.025 +2024-08-25 13:05:57,430 INFO [train.py:1114] (2/4) Epoch 7, batch 1550, loss[loss=0.2392, simple_loss=0.2978, pruned_loss=0.06563, ctc_loss=0.1233, over 19605.00 frames. ], tot_loss[loss=0.2701, simple_loss=0.3139, pruned_loss=0.08217, ctc_loss=0.1546, over 3846994.56 frames. ], batch size: 60, lr: 2.06e-02, grad_scale: 16.0 +2024-08-25 13:06:00,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=87925.33333333333, ans=0.125 +2024-08-25 13:06:12,561 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.49 vs. limit=15.0 +2024-08-25 13:06:17,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=87978.66666666667, ans=0.125 +2024-08-25 13:06:22,716 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.20 vs. limit=22.5 +2024-08-25 13:06:45,524 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.59 vs. limit=15.0 +2024-08-25 13:06:46,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=88138.66666666667, ans=0.0 +2024-08-25 13:06:55,903 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.554e+02 1.880e+02 2.225e+02 2.757e+02 4.141e+02, threshold=4.451e+02, percent-clipped=0.0 +2024-08-25 13:07:00,957 INFO [train.py:1114] (2/4) Epoch 7, batch 1600, loss[loss=0.2382, simple_loss=0.3022, pruned_loss=0.06386, ctc_loss=0.1163, over 19855.00 frames. ], tot_loss[loss=0.269, simple_loss=0.3131, pruned_loss=0.0817, ctc_loss=0.1537, over 3836156.02 frames. ], batch size: 57, lr: 2.05e-02, grad_scale: 32.0 +2024-08-25 13:07:01,401 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.97 vs. limit=15.0 +2024-08-25 13:07:10,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=88192.0, ans=0.125 +2024-08-25 13:07:10,973 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=15.0 +2024-08-25 13:07:35,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=88352.0, ans=0.0 +2024-08-25 13:07:44,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=88352.0, ans=0.2 +2024-08-25 13:07:47,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=88405.33333333333, ans=10.0 +2024-08-25 13:07:58,853 INFO [train.py:1114] (2/4) Epoch 7, batch 1650, loss[loss=0.2673, simple_loss=0.3142, pruned_loss=0.07986, ctc_loss=0.1518, over 19662.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3129, pruned_loss=0.08175, ctc_loss=0.1538, over 3832636.10 frames. ], batch size: 59, lr: 2.05e-02, grad_scale: 32.0 +2024-08-25 13:08:06,919 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.81 vs. limit=15.0 +2024-08-25 13:08:34,333 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.42 vs. limit=15.0 +2024-08-25 13:08:54,962 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.476e+02 1.917e+02 2.131e+02 2.729e+02 4.248e+02, threshold=4.261e+02, percent-clipped=0.0 +2024-08-25 13:08:58,413 INFO [train.py:1114] (2/4) Epoch 7, batch 1700, loss[loss=0.247, simple_loss=0.2878, pruned_loss=0.07432, ctc_loss=0.1441, over 19706.00 frames. ], tot_loss[loss=0.2682, simple_loss=0.3124, pruned_loss=0.08139, ctc_loss=0.153, over 3847235.14 frames. ], batch size: 46, lr: 2.05e-02, grad_scale: 32.0 +2024-08-25 13:09:08,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=88725.33333333333, ans=0.0 +2024-08-25 13:09:14,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=88778.66666666667, ans=0.125 +2024-08-25 13:09:23,188 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.62 vs. limit=6.0 +2024-08-25 13:09:27,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=88832.0, ans=0.125 +2024-08-25 13:09:47,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=88938.66666666667, ans=0.125 +2024-08-25 13:09:55,059 INFO [train.py:1114] (2/4) Epoch 7, batch 1750, loss[loss=0.2247, simple_loss=0.2764, pruned_loss=0.06302, ctc_loss=0.1176, over 19661.00 frames. ], tot_loss[loss=0.2671, simple_loss=0.3116, pruned_loss=0.08084, ctc_loss=0.1522, over 3851361.92 frames. ], batch size: 45, lr: 2.05e-02, grad_scale: 32.0 +2024-08-25 13:17:19,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=89098.66666666667, ans=0.125 +2024-08-25 13:17:19,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=89098.66666666667, ans=0.125 +2024-08-25 13:17:24,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=89098.66666666667, ans=0.1 +2024-08-25 13:17:36,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=89152.0, ans=0.0 +2024-08-25 13:25:10,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=89205.33333333333, ans=0.1 +2024-08-25 13:29:44,220 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.589e+02 1.972e+02 2.344e+02 2.828e+02 4.449e+02, threshold=4.688e+02, percent-clipped=1.0 +2024-08-25 13:29:47,702 INFO [train.py:1114] (2/4) Epoch 7, batch 1800, loss[loss=0.2591, simple_loss=0.317, pruned_loss=0.07361, ctc_loss=0.1348, over 19615.00 frames. ], tot_loss[loss=0.2671, simple_loss=0.3119, pruned_loss=0.08077, ctc_loss=0.152, over 3853077.02 frames. ], batch size: 55, lr: 2.04e-02, grad_scale: 32.0 +2024-08-25 13:36:55,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=89365.33333333333, ans=0.2 +2024-08-25 13:37:26,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.67 vs. limit=15.0 +2024-08-25 13:38:00,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=89365.33333333333, ans=0.125 +2024-08-25 13:39:06,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=89418.66666666667, ans=0.125 +2024-08-25 13:39:29,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=89472.0, ans=0.125 +2024-08-25 13:39:35,207 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.92 vs. limit=15.0 +2024-08-25 13:39:43,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=89472.0, ans=15.0 +2024-08-25 13:40:34,835 INFO [train.py:1114] (2/4) Epoch 7, batch 1850, loss[loss=0.2615, simple_loss=0.3223, pruned_loss=0.07346, ctc_loss=0.1342, over 19598.00 frames. ], tot_loss[loss=0.267, simple_loss=0.3116, pruned_loss=0.08076, ctc_loss=0.1521, over 3856867.39 frames. ], batch size: 57, lr: 2.04e-02, grad_scale: 32.0 +2024-08-25 13:42:29,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=89578.66666666667, ans=0.125 +2024-08-25 13:42:30,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=89632.0, ans=0.125 +2024-08-25 13:43:18,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=89685.33333333333, ans=0.125 +2024-08-25 13:44:00,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=89738.66666666667, ans=0.0 +2024-08-25 13:44:01,300 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.521e+02 1.852e+02 2.070e+02 2.397e+02 4.608e+02, threshold=4.140e+02, percent-clipped=0.0 +2024-08-25 13:44:01,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=89738.66666666667, ans=0.0 +2024-08-25 13:44:05,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=89792.0, ans=0.2 +2024-08-25 13:44:07,979 INFO [train.py:1114] (2/4) Epoch 7, batch 1900, loss[loss=0.2524, simple_loss=0.315, pruned_loss=0.06741, ctc_loss=0.1373, over 19692.00 frames. ], tot_loss[loss=0.2673, simple_loss=0.3121, pruned_loss=0.08086, ctc_loss=0.1521, over 3861026.33 frames. ], batch size: 59, lr: 2.04e-02, grad_scale: 32.0 +2024-08-25 13:44:09,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=89792.0, ans=0.0 +2024-08-25 13:44:09,637 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.09 vs. limit=15.0 +2024-08-25 13:45:01,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=89898.66666666667, ans=0.2 +2024-08-25 13:45:21,764 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=9.14 vs. limit=15.0 +2024-08-25 13:45:33,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=90005.33333333333, ans=0.125 +2024-08-25 13:45:39,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=90058.66666666667, ans=0.0 +2024-08-25 13:45:41,191 INFO [train.py:1114] (2/4) Epoch 7, batch 1950, loss[loss=0.2692, simple_loss=0.3106, pruned_loss=0.08288, ctc_loss=0.1554, over 19590.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3134, pruned_loss=0.08138, ctc_loss=0.1532, over 3870001.50 frames. ], batch size: 52, lr: 2.04e-02, grad_scale: 16.0 +2024-08-25 13:45:54,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=90058.66666666667, ans=0.07 +2024-08-25 13:45:56,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=90112.0, ans=0.0 +2024-08-25 13:46:10,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=90165.33333333333, ans=0.0 +2024-08-25 13:46:29,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=90218.66666666667, ans=0.125 +2024-08-25 13:46:38,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=90272.0, ans=0.125 +2024-08-25 13:46:39,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=90272.0, ans=0.09899494936611666 +2024-08-25 13:46:41,970 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.66 vs. limit=15.0 +2024-08-25 13:46:42,775 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.614e+02 1.896e+02 2.177e+02 2.703e+02 3.964e+02, threshold=4.354e+02, percent-clipped=0.0 +2024-08-25 13:46:45,048 INFO [train.py:1114] (2/4) Epoch 7, batch 2000, loss[loss=0.2505, simple_loss=0.2845, pruned_loss=0.07945, ctc_loss=0.1442, over 19660.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3137, pruned_loss=0.08171, ctc_loss=0.1534, over 3854536.84 frames. ], batch size: 45, lr: 2.03e-02, grad_scale: 32.0 +2024-08-25 13:46:52,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90325.33333333333, ans=0.1 +2024-08-25 13:47:07,978 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.75 vs. limit=15.0 +2024-08-25 13:47:24,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=90485.33333333333, ans=0.025 +2024-08-25 13:47:26,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=90485.33333333333, ans=0.125 +2024-08-25 13:47:36,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=90538.66666666667, ans=0.125 +2024-08-25 13:47:41,009 INFO [train.py:1114] (2/4) Epoch 7, batch 2050, loss[loss=0.2319, simple_loss=0.273, pruned_loss=0.06969, ctc_loss=0.1284, over 19706.00 frames. ], tot_loss[loss=0.2676, simple_loss=0.3122, pruned_loss=0.08112, ctc_loss=0.1522, over 3850158.93 frames. ], batch size: 47, lr: 2.03e-02, grad_scale: 32.0 +2024-08-25 13:47:44,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=90592.0, ans=0.0 +2024-08-25 13:47:46,643 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 13:47:57,793 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=8.92 vs. limit=12.0 +2024-08-25 13:48:09,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=90698.66666666667, ans=0.0 +2024-08-25 13:48:16,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=90752.0, ans=0.125 +2024-08-25 13:48:27,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90805.33333333333, ans=0.1 +2024-08-25 13:48:35,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=90805.33333333333, ans=0.125 +2024-08-25 13:48:36,335 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.580e+02 2.053e+02 2.413e+02 3.017e+02 5.203e+02, threshold=4.827e+02, percent-clipped=2.0 +2024-08-25 13:48:36,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=90805.33333333333, ans=0.0 +2024-08-25 13:48:38,600 INFO [train.py:1114] (2/4) Epoch 7, batch 2100, loss[loss=0.2708, simple_loss=0.3194, pruned_loss=0.08063, ctc_loss=0.1523, over 19772.00 frames. ], tot_loss[loss=0.2671, simple_loss=0.3117, pruned_loss=0.08091, ctc_loss=0.1517, over 3857497.22 frames. ], batch size: 54, lr: 2.03e-02, grad_scale: 32.0 +2024-08-25 13:49:00,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=90912.0, ans=0.125 +2024-08-25 13:49:01,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90912.0, ans=0.1 +2024-08-25 13:49:08,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90965.33333333333, ans=0.1 +2024-08-25 13:49:22,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=91018.66666666667, ans=0.125 +2024-08-25 13:49:25,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.87 vs. limit=15.0 +2024-08-25 13:49:33,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91072.0, ans=0.1 +2024-08-25 13:49:35,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=91072.0, ans=0.125 +2024-08-25 13:49:43,235 INFO [train.py:1114] (2/4) Epoch 7, batch 2150, loss[loss=0.2486, simple_loss=0.2953, pruned_loss=0.07305, ctc_loss=0.1393, over 19856.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.3107, pruned_loss=0.08043, ctc_loss=0.1508, over 3868550.33 frames. ], batch size: 52, lr: 2.03e-02, grad_scale: 32.0 +2024-08-25 13:49:43,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=91125.33333333333, ans=0.125 +2024-08-25 13:49:51,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=91125.33333333333, ans=0.0 +2024-08-25 13:50:04,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=91232.0, ans=0.0 +2024-08-25 13:50:05,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=91232.0, ans=0.125 +2024-08-25 13:50:07,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=91232.0, ans=0.0 +2024-08-25 13:50:21,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=91285.33333333333, ans=0.1 +2024-08-25 13:50:36,448 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.440e+02 1.920e+02 2.200e+02 2.924e+02 5.090e+02, threshold=4.400e+02, percent-clipped=1.0 +2024-08-25 13:50:39,128 INFO [train.py:1114] (2/4) Epoch 7, batch 2200, loss[loss=0.2646, simple_loss=0.3177, pruned_loss=0.07534, ctc_loss=0.1522, over 19579.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.3107, pruned_loss=0.08037, ctc_loss=0.1506, over 3867474.02 frames. ], batch size: 57, lr: 2.02e-02, grad_scale: 32.0 +2024-08-25 13:50:39,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=91392.0, ans=0.125 +2024-08-25 13:51:22,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=91552.0, ans=0.0 +2024-08-25 13:51:22,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=91552.0, ans=0.0 +2024-08-25 13:51:22,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=91552.0, ans=0.0 +2024-08-25 13:51:31,170 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.51 vs. limit=22.5 +2024-08-25 13:51:34,988 INFO [train.py:1114] (2/4) Epoch 7, batch 2250, loss[loss=0.2613, simple_loss=0.3115, pruned_loss=0.07694, ctc_loss=0.1434, over 19606.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.3109, pruned_loss=0.08052, ctc_loss=0.1509, over 3867046.13 frames. ], batch size: 55, lr: 2.02e-02, grad_scale: 16.0 +2024-08-25 13:51:42,752 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.99 vs. limit=15.0 +2024-08-25 13:51:43,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=91658.66666666667, ans=0.0 +2024-08-25 13:51:43,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=91658.66666666667, ans=0.025 +2024-08-25 13:51:54,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=91712.0, ans=0.0 +2024-08-25 13:51:56,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.85 vs. limit=6.0 +2024-08-25 13:52:22,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=91872.0, ans=0.0 +2024-08-25 13:52:25,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=91872.0, ans=0.07 +2024-08-25 13:52:28,398 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.566e+02 2.146e+02 2.677e+02 3.204e+02 4.930e+02, threshold=5.354e+02, percent-clipped=3.0 +2024-08-25 13:52:29,555 INFO [train.py:1114] (2/4) Epoch 7, batch 2300, loss[loss=0.2475, simple_loss=0.2935, pruned_loss=0.07346, ctc_loss=0.1364, over 19512.00 frames. ], tot_loss[loss=0.2665, simple_loss=0.3106, pruned_loss=0.08092, ctc_loss=0.1516, over 3860740.89 frames. ], batch size: 49, lr: 2.02e-02, grad_scale: 16.0 +2024-08-25 13:52:36,934 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.75 vs. limit=6.0 +2024-08-25 13:52:43,451 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.88 vs. limit=22.5 +2024-08-25 13:52:59,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=92032.0, ans=0.1 +2024-08-25 13:53:14,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92138.66666666667, ans=0.1 +2024-08-25 13:53:24,736 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.55 vs. limit=22.5 +2024-08-25 13:53:25,163 INFO [train.py:1114] (2/4) Epoch 7, batch 2350, loss[loss=0.2983, simple_loss=0.3382, pruned_loss=0.09478, ctc_loss=0.1721, over 19682.00 frames. ], tot_loss[loss=0.2676, simple_loss=0.3111, pruned_loss=0.08151, ctc_loss=0.1528, over 3863305.38 frames. ], batch size: 63, lr: 2.02e-02, grad_scale: 16.0 +2024-08-25 13:53:25,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=92192.0, ans=0.0 +2024-08-25 13:53:28,789 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.05 vs. limit=15.0 +2024-08-25 13:53:30,141 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.45 vs. limit=15.0 +2024-08-25 13:53:45,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=92298.66666666667, ans=0.0 +2024-08-25 13:53:48,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=92298.66666666667, ans=0.125 +2024-08-25 13:54:00,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=92352.0, ans=0.0 +2024-08-25 13:54:08,437 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.14 vs. limit=12.0 +2024-08-25 13:54:18,221 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.474e+02 1.985e+02 2.336e+02 2.802e+02 4.974e+02, threshold=4.671e+02, percent-clipped=0.0 +2024-08-25 13:54:19,268 INFO [train.py:1114] (2/4) Epoch 7, batch 2400, loss[loss=0.2687, simple_loss=0.3214, pruned_loss=0.07856, ctc_loss=0.1472, over 19256.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3133, pruned_loss=0.08224, ctc_loss=0.1538, over 3857578.70 frames. ], batch size: 71, lr: 2.01e-02, grad_scale: 32.0 +2024-08-25 13:54:33,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=92512.0, ans=22.5 +2024-08-25 13:54:45,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=92565.33333333333, ans=0.125 +2024-08-25 13:55:47,138 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.30 vs. limit=10.0 +2024-08-25 13:55:50,097 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 13:56:09,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=92672.0, ans=0.0 +2024-08-25 13:56:11,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=92672.0, ans=0.125 +2024-08-25 13:56:13,555 INFO [train.py:1114] (2/4) Epoch 7, batch 2450, loss[loss=0.3821, simple_loss=0.3762, pruned_loss=0.1428, ctc_loss=0.2561, over 13041.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3181, pruned_loss=0.08607, ctc_loss=0.1616, over 3730281.85 frames. ], batch size: 140, lr: 2.01e-02, grad_scale: 32.0 +2024-08-25 13:56:53,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=92832.0, ans=0.0 +2024-08-25 13:57:54,288 INFO [train.py:1114] (2/4) Epoch 8, batch 0, loss[loss=0.2416, simple_loss=0.2875, pruned_loss=0.07149, ctc_loss=0.1321, over 19399.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.2875, pruned_loss=0.07149, ctc_loss=0.1321, over 19399.00 frames. ], batch size: 48, lr: 1.89e-02, grad_scale: 32.0 +2024-08-25 13:57:54,289 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-25 13:59:56,298 INFO [train.py:1146] (2/4) Epoch 8, validation: loss=0.2171, simple_loss=0.2997, pruned_loss=0.04948, ctc_loss=0.08904, over 944034.00 frames. +2024-08-25 13:59:56,299 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 14234MB +2024-08-25 13:59:56,850 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.81 vs. limit=15.0 +2024-08-25 14:01:03,639 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.719e+02 2.158e+02 2.483e+02 2.902e+02 5.180e+02, threshold=4.965e+02, percent-clipped=2.0 +2024-08-25 14:01:11,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=92986.66666666667, ans=0.2 +2024-08-25 14:01:43,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=93040.0, ans=0.125 +2024-08-25 14:02:12,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=93146.66666666667, ans=0.09899494936611666 +2024-08-25 14:02:16,261 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.33 vs. limit=22.5 +2024-08-25 14:02:17,063 INFO [train.py:1114] (2/4) Epoch 8, batch 50, loss[loss=0.212, simple_loss=0.2734, pruned_loss=0.05481, ctc_loss=0.1024, over 19716.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3135, pruned_loss=0.08193, ctc_loss=0.1552, over 843278.58 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 32.0 +2024-08-25 14:02:20,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=93200.0, ans=0.125 +2024-08-25 14:02:24,583 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.67 vs. limit=12.0 +2024-08-25 14:02:30,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93253.33333333333, ans=0.1 +2024-08-25 14:02:54,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=93360.0, ans=0.125 +2024-08-25 14:02:59,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=93360.0, ans=0.125 +2024-08-25 14:05:00,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=93413.33333333333, ans=0.015 +2024-08-25 14:05:03,235 INFO [train.py:1114] (2/4) Epoch 8, batch 100, loss[loss=0.2577, simple_loss=0.3111, pruned_loss=0.07395, ctc_loss=0.1409, over 19700.00 frames. ], tot_loss[loss=0.2711, simple_loss=0.3157, pruned_loss=0.08214, ctc_loss=0.1556, over 1498192.96 frames. ], batch size: 51, lr: 1.89e-02, grad_scale: 32.0 +2024-08-25 14:05:14,933 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 1.910e+02 2.219e+02 2.660e+02 5.043e+02, threshold=4.439e+02, percent-clipped=1.0 +2024-08-25 14:05:29,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=93573.33333333333, ans=0.0 +2024-08-25 14:05:48,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=93626.66666666667, ans=0.025 +2024-08-25 14:07:11,334 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.07 vs. limit=15.0 +2024-08-25 14:07:16,377 INFO [train.py:1114] (2/4) Epoch 8, batch 150, loss[loss=0.248, simple_loss=0.2911, pruned_loss=0.07479, ctc_loss=0.1385, over 19729.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.3118, pruned_loss=0.07986, ctc_loss=0.1503, over 2026296.86 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 32.0 +2024-08-25 14:08:13,096 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.54 vs. limit=8.0 +2024-08-25 14:08:14,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=93786.66666666667, ans=0.125 +2024-08-25 14:08:14,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93786.66666666667, ans=0.1 +2024-08-25 14:09:23,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=93893.33333333333, ans=0.125 +2024-08-25 14:09:25,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=93893.33333333333, ans=0.0 +2024-08-25 14:10:16,285 INFO [train.py:1114] (2/4) Epoch 8, batch 200, loss[loss=0.3218, simple_loss=0.3483, pruned_loss=0.1085, ctc_loss=0.1962, over 18115.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3096, pruned_loss=0.07878, ctc_loss=0.1485, over 2433444.82 frames. ], batch size: 85, lr: 1.88e-02, grad_scale: 32.0 +2024-08-25 14:10:29,225 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.854e+02 2.093e+02 2.544e+02 5.078e+02, threshold=4.187e+02, percent-clipped=1.0 +2024-08-25 14:10:34,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=94053.33333333333, ans=0.0 +2024-08-25 14:10:46,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=94106.66666666667, ans=0.125 +2024-08-25 14:10:57,137 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.21 vs. limit=15.0 +2024-08-25 14:11:07,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=94213.33333333333, ans=0.0 +2024-08-25 14:11:17,834 INFO [train.py:1114] (2/4) Epoch 8, batch 250, loss[loss=0.2921, simple_loss=0.3294, pruned_loss=0.09414, ctc_loss=0.1664, over 19420.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3097, pruned_loss=0.07875, ctc_loss=0.1483, over 2753964.23 frames. ], batch size: 67, lr: 1.88e-02, grad_scale: 32.0 +2024-08-25 14:11:18,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=94266.66666666667, ans=0.125 +2024-08-25 14:12:17,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=94266.66666666667, ans=0.125 +2024-08-25 14:13:12,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=94480.0, ans=0.125 +2024-08-25 14:13:21,901 INFO [train.py:1114] (2/4) Epoch 8, batch 300, loss[loss=0.313, simple_loss=0.3414, pruned_loss=0.1031, ctc_loss=0.1958, over 19526.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.3101, pruned_loss=0.07928, ctc_loss=0.1492, over 2999223.72 frames. ], batch size: 61, lr: 1.88e-02, grad_scale: 32.0 +2024-08-25 14:13:33,345 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.508e+02 1.987e+02 2.340e+02 3.022e+02 6.047e+02, threshold=4.681e+02, percent-clipped=9.0 +2024-08-25 14:14:19,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=94640.0, ans=0.125 +2024-08-25 14:14:21,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=94640.0, ans=0.0 +2024-08-25 14:14:27,895 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.90 vs. limit=15.0 +2024-08-25 14:14:28,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=94693.33333333333, ans=0.125 +2024-08-25 14:14:32,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=94693.33333333333, ans=0.2 +2024-08-25 14:14:41,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=94746.66666666667, ans=0.0 +2024-08-25 14:14:52,105 INFO [train.py:1114] (2/4) Epoch 8, batch 350, loss[loss=0.244, simple_loss=0.2835, pruned_loss=0.07255, ctc_loss=0.1484, over 19759.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3093, pruned_loss=0.07825, ctc_loss=0.1473, over 3189915.13 frames. ], batch size: 48, lr: 1.88e-02, grad_scale: 32.0 +2024-08-25 14:15:03,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=94853.33333333333, ans=0.125 +2024-08-25 14:16:28,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=94960.0, ans=0.125 +2024-08-25 14:16:39,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=95013.33333333333, ans=0.125 +2024-08-25 14:16:50,822 INFO [train.py:1114] (2/4) Epoch 8, batch 400, loss[loss=0.2681, simple_loss=0.3147, pruned_loss=0.08002, ctc_loss=0.1537, over 19503.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3081, pruned_loss=0.07733, ctc_loss=0.1456, over 3341611.21 frames. ], batch size: 54, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:16:53,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=95066.66666666667, ans=0.0 +2024-08-25 14:16:58,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=95066.66666666667, ans=0.0 +2024-08-25 14:17:03,860 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.620e+02 2.019e+02 2.528e+02 3.132e+02 5.852e+02, threshold=5.056e+02, percent-clipped=7.0 +2024-08-25 14:17:07,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=95120.0, ans=0.1 +2024-08-25 14:17:32,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=95226.66666666667, ans=0.2 +2024-08-25 14:18:38,406 INFO [train.py:1114] (2/4) Epoch 8, batch 450, loss[loss=0.2655, simple_loss=0.3104, pruned_loss=0.08006, ctc_loss=0.151, over 19616.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3084, pruned_loss=0.07789, ctc_loss=0.1466, over 3450604.61 frames. ], batch size: 55, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:18:43,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=95333.33333333333, ans=0.125 +2024-08-25 14:18:43,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=95333.33333333333, ans=0.1 +2024-08-25 14:18:45,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=95333.33333333333, ans=0.125 +2024-08-25 14:18:52,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=95386.66666666667, ans=0.125 +2024-08-25 14:19:10,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=95440.0, ans=0.125 +2024-08-25 14:19:15,774 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.30 vs. limit=12.0 +2024-08-25 14:19:16,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=95493.33333333333, ans=0.125 +2024-08-25 14:19:19,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=95493.33333333333, ans=0.2 +2024-08-25 14:19:19,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=95493.33333333333, ans=0.125 +2024-08-25 14:19:25,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=95493.33333333333, ans=0.125 +2024-08-25 14:19:39,033 INFO [train.py:1114] (2/4) Epoch 8, batch 500, loss[loss=0.2805, simple_loss=0.3252, pruned_loss=0.08674, ctc_loss=0.1558, over 19651.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3079, pruned_loss=0.07772, ctc_loss=0.1463, over 3546424.92 frames. ], batch size: 63, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:19:52,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=95600.0, ans=0.125 +2024-08-25 14:21:42,068 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.507e+02 1.925e+02 2.242e+02 2.655e+02 4.786e+02, threshold=4.483e+02, percent-clipped=0.0 +2024-08-25 14:21:44,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=95653.33333333333, ans=0.125 +2024-08-25 14:21:58,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=95706.66666666667, ans=0.125 +2024-08-25 14:22:36,071 INFO [train.py:1114] (2/4) Epoch 8, batch 550, loss[loss=0.279, simple_loss=0.3232, pruned_loss=0.08496, ctc_loss=0.1624, over 19348.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3084, pruned_loss=0.07841, ctc_loss=0.1477, over 3609011.82 frames. ], batch size: 71, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:22:41,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=95866.66666666667, ans=0.025 +2024-08-25 14:23:55,613 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.77 vs. limit=15.0 +2024-08-25 14:24:04,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=95920.0, ans=0.09899494936611666 +2024-08-25 14:25:24,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=96026.66666666667, ans=0.0 +2024-08-25 14:25:43,178 INFO [train.py:1114] (2/4) Epoch 8, batch 600, loss[loss=0.2854, simple_loss=0.3257, pruned_loss=0.089, ctc_loss=0.168, over 19422.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.3078, pruned_loss=0.07782, ctc_loss=0.1467, over 3667148.39 frames. ], batch size: 67, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:25:54,322 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.517e+02 1.975e+02 2.461e+02 2.998e+02 6.685e+02, threshold=4.922e+02, percent-clipped=2.0 +2024-08-25 14:26:05,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=96186.66666666667, ans=0.0 +2024-08-25 14:26:31,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=96293.33333333333, ans=0.2 +2024-08-25 14:29:19,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=96346.66666666667, ans=0.2 +2024-08-25 14:29:20,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96346.66666666667, ans=0.1 +2024-08-25 14:29:23,583 INFO [train.py:1114] (2/4) Epoch 8, batch 650, loss[loss=0.2806, simple_loss=0.3218, pruned_loss=0.08705, ctc_loss=0.1631, over 19771.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3071, pruned_loss=0.07736, ctc_loss=0.1461, over 3717737.71 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 32.0 +2024-08-25 14:29:35,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=96453.33333333333, ans=0.125 +2024-08-25 14:31:06,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=96613.33333333333, ans=0.2 +2024-08-25 14:31:24,402 INFO [train.py:1114] (2/4) Epoch 8, batch 700, loss[loss=0.2604, simple_loss=0.3039, pruned_loss=0.07856, ctc_loss=0.1496, over 19715.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3081, pruned_loss=0.07759, ctc_loss=0.1468, over 3749215.44 frames. ], batch size: 51, lr: 1.86e-02, grad_scale: 32.0 +2024-08-25 14:31:31,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.76 vs. limit=15.0 +2024-08-25 14:31:36,078 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.482e+02 1.952e+02 2.228e+02 2.907e+02 4.140e+02, threshold=4.456e+02, percent-clipped=0.0 +2024-08-25 14:31:36,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=96720.0, ans=0.125 +2024-08-25 14:31:52,710 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.26 vs. limit=22.5 +2024-08-25 14:31:57,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=96773.33333333333, ans=0.2 +2024-08-25 14:32:21,413 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.08 vs. limit=10.0 +2024-08-25 14:32:26,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=96880.0, ans=0.125 +2024-08-25 14:32:35,099 INFO [train.py:1114] (2/4) Epoch 8, batch 750, loss[loss=0.2894, simple_loss=0.3323, pruned_loss=0.08849, ctc_loss=0.1736, over 19518.00 frames. ], tot_loss[loss=0.2604, simple_loss=0.3078, pruned_loss=0.0773, ctc_loss=0.1462, over 3775297.67 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 32.0 +2024-08-25 14:32:38,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=96933.33333333333, ans=0.125 +2024-08-25 14:32:57,755 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.78 vs. limit=15.0 +2024-08-25 14:33:05,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=97040.0, ans=0.0 +2024-08-25 14:33:08,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=97040.0, ans=0.2 +2024-08-25 14:33:08,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=97040.0, ans=0.0 +2024-08-25 14:33:32,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=97093.33333333333, ans=0.0 +2024-08-25 14:33:41,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=97146.66666666667, ans=0.035 +2024-08-25 14:33:41,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=97146.66666666667, ans=0.125 +2024-08-25 14:33:44,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=97200.0, ans=0.125 +2024-08-25 14:33:45,429 INFO [train.py:1114] (2/4) Epoch 8, batch 800, loss[loss=0.211, simple_loss=0.2703, pruned_loss=0.05474, ctc_loss=0.1056, over 19412.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3072, pruned_loss=0.07699, ctc_loss=0.1457, over 3795967.75 frames. ], batch size: 48, lr: 1.86e-02, grad_scale: 32.0 +2024-08-25 14:34:35,078 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.511e+02 1.855e+02 2.176e+02 2.933e+02 4.905e+02, threshold=4.353e+02, percent-clipped=3.0 +2024-08-25 14:34:40,295 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 14:34:49,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=97306.66666666667, ans=0.1 +2024-08-25 14:35:01,201 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.58 vs. limit=15.0 +2024-08-25 14:35:09,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=97413.33333333333, ans=0.125 +2024-08-25 14:35:22,241 INFO [train.py:1114] (2/4) Epoch 8, batch 850, loss[loss=0.2815, simple_loss=0.3273, pruned_loss=0.08392, ctc_loss=0.1697, over 19658.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.307, pruned_loss=0.07704, ctc_loss=0.1453, over 3815049.30 frames. ], batch size: 59, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:35:38,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=97520.0, ans=0.0 +2024-08-25 14:35:54,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=97573.33333333333, ans=0.125 +2024-08-25 14:35:55,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=97626.66666666667, ans=0.125 +2024-08-25 14:35:58,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=97626.66666666667, ans=0.125 +2024-08-25 14:36:05,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=97626.66666666667, ans=0.0 +2024-08-25 14:36:10,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=97680.0, ans=0.125 +2024-08-25 14:36:12,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=97680.0, ans=0.1 +2024-08-25 14:36:15,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=97680.0, ans=0.0 +2024-08-25 14:36:19,810 INFO [train.py:1114] (2/4) Epoch 8, batch 900, loss[loss=0.2898, simple_loss=0.3149, pruned_loss=0.09547, ctc_loss=0.1843, over 19796.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3071, pruned_loss=0.07736, ctc_loss=0.1455, over 3819189.68 frames. ], batch size: 49, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:38:24,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=97733.33333333333, ans=0.015 +2024-08-25 14:38:30,482 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.590e+02 1.935e+02 2.327e+02 2.780e+02 5.034e+02, threshold=4.654e+02, percent-clipped=2.0 +2024-08-25 14:39:05,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=97893.33333333333, ans=0.0 +2024-08-25 14:39:10,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=97946.66666666667, ans=0.1 +2024-08-25 14:39:56,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=97946.66666666667, ans=0.0 +2024-08-25 14:40:01,392 INFO [train.py:1114] (2/4) Epoch 8, batch 950, loss[loss=0.2269, simple_loss=0.2795, pruned_loss=0.06344, ctc_loss=0.1188, over 19477.00 frames. ], tot_loss[loss=0.2618, simple_loss=0.3082, pruned_loss=0.0783, ctc_loss=0.1473, over 3820856.06 frames. ], batch size: 49, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:40:08,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=98000.0, ans=6.0 +2024-08-25 14:40:16,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=98053.33333333333, ans=0.125 +2024-08-25 14:41:22,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=98106.66666666667, ans=0.0 +2024-08-25 14:43:16,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=98213.33333333333, ans=0.125 +2024-08-25 14:43:18,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=98213.33333333333, ans=0.125 +2024-08-25 14:43:19,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=98213.33333333333, ans=0.125 +2024-08-25 14:43:23,181 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.13 vs. limit=6.0 +2024-08-25 14:43:26,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=98213.33333333333, ans=0.025 +2024-08-25 14:43:29,288 INFO [train.py:1114] (2/4) Epoch 8, batch 1000, loss[loss=0.2173, simple_loss=0.2772, pruned_loss=0.05722, ctc_loss=0.1073, over 19843.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3089, pruned_loss=0.07886, ctc_loss=0.1481, over 3817543.07 frames. ], batch size: 52, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:43:29,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=98266.66666666667, ans=0.0 +2024-08-25 14:43:31,160 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.53 vs. limit=10.0 +2024-08-25 14:43:37,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=98266.66666666667, ans=0.125 +2024-08-25 14:43:37,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=98266.66666666667, ans=0.07 +2024-08-25 14:43:47,365 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.458e+02 2.014e+02 2.465e+02 3.304e+02 4.205e+02, threshold=4.930e+02, percent-clipped=0.0 +2024-08-25 14:43:47,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=98320.0, ans=0.025 +2024-08-25 14:46:09,305 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.84 vs. limit=6.0 +2024-08-25 14:46:19,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=98426.66666666667, ans=0.0 +2024-08-25 14:46:30,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=98480.0, ans=0.2 +2024-08-25 14:46:35,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=98480.0, ans=0.1 +2024-08-25 14:46:44,426 INFO [train.py:1114] (2/4) Epoch 8, batch 1050, loss[loss=0.2676, simple_loss=0.3102, pruned_loss=0.07958, ctc_loss=0.1645, over 19822.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3078, pruned_loss=0.07801, ctc_loss=0.1467, over 3824746.11 frames. ], batch size: 57, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:46:45,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98533.33333333333, ans=0.1 +2024-08-25 14:46:59,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=98586.66666666667, ans=0.015 +2024-08-25 14:47:21,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=98693.33333333333, ans=0.125 +2024-08-25 14:47:32,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=98746.66666666667, ans=0.025 +2024-08-25 14:47:42,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=98746.66666666667, ans=0.125 +2024-08-25 14:47:44,604 INFO [train.py:1114] (2/4) Epoch 8, batch 1100, loss[loss=0.2536, simple_loss=0.3052, pruned_loss=0.07268, ctc_loss=0.1415, over 19583.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3075, pruned_loss=0.07771, ctc_loss=0.1465, over 3831219.00 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 32.0 +2024-08-25 14:47:53,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=98800.0, ans=0.125 +2024-08-25 14:47:58,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=98853.33333333333, ans=0.125 +2024-08-25 14:48:13,768 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.527e+02 1.814e+02 2.071e+02 2.620e+02 3.682e+02, threshold=4.142e+02, percent-clipped=0.0 +2024-08-25 14:48:36,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=98853.33333333333, ans=0.125 +2024-08-25 14:48:37,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98853.33333333333, ans=0.1 +2024-08-25 14:49:11,021 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.82 vs. limit=22.5 +2024-08-25 14:49:53,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=99013.33333333333, ans=0.125 +2024-08-25 14:50:00,857 INFO [train.py:1114] (2/4) Epoch 8, batch 1150, loss[loss=0.2466, simple_loss=0.2974, pruned_loss=0.07087, ctc_loss=0.1352, over 19609.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.3074, pruned_loss=0.07764, ctc_loss=0.1462, over 3830468.15 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 32.0 +2024-08-25 14:51:03,375 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.82 vs. limit=15.0 +2024-08-25 14:51:03,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=99066.66666666667, ans=0.125 +2024-08-25 14:51:05,760 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.16 vs. limit=10.0 +2024-08-25 14:51:11,181 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.51 vs. limit=10.0 +2024-08-25 14:51:26,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=99173.33333333333, ans=0.0 +2024-08-25 14:52:33,137 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.11 vs. limit=10.0 +2024-08-25 14:52:35,450 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.65 vs. limit=8.0 +2024-08-25 14:52:48,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=99280.0, ans=0.0 +2024-08-25 14:52:50,328 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.67 vs. limit=10.0 +2024-08-25 14:52:50,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=99333.33333333333, ans=0.125 +2024-08-25 14:52:51,786 INFO [train.py:1114] (2/4) Epoch 8, batch 1200, loss[loss=0.2825, simple_loss=0.3282, pruned_loss=0.08476, ctc_loss=0.1683, over 19841.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3088, pruned_loss=0.07843, ctc_loss=0.1477, over 3825339.57 frames. ], batch size: 57, lr: 1.84e-02, grad_scale: 32.0 +2024-08-25 14:53:02,016 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.59 vs. limit=15.0 +2024-08-25 14:53:06,255 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.879e+02 2.149e+02 2.634e+02 4.011e+02, threshold=4.298e+02, percent-clipped=0.0 +2024-08-25 14:53:09,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=99386.66666666667, ans=0.0 +2024-08-25 14:53:25,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=99440.0, ans=0.125 +2024-08-25 14:53:29,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=99493.33333333333, ans=0.1 +2024-08-25 14:53:52,368 INFO [train.py:1114] (2/4) Epoch 8, batch 1250, loss[loss=0.299, simple_loss=0.334, pruned_loss=0.09791, ctc_loss=0.1703, over 19507.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3084, pruned_loss=0.07775, ctc_loss=0.1461, over 3843318.97 frames. ], batch size: 61, lr: 1.84e-02, grad_scale: 32.0 +2024-08-25 14:55:33,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.40 vs. limit=15.0 +2024-08-25 14:56:01,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=99813.33333333333, ans=0.1 +2024-08-25 14:56:05,545 INFO [train.py:1114] (2/4) Epoch 8, batch 1300, loss[loss=0.2956, simple_loss=0.3326, pruned_loss=0.094, ctc_loss=0.1768, over 18875.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3077, pruned_loss=0.07735, ctc_loss=0.1454, over 3845959.95 frames. ], batch size: 76, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 14:56:17,010 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.458e+02 1.809e+02 2.147e+02 2.747e+02 4.726e+02, threshold=4.293e+02, percent-clipped=4.0 +2024-08-25 14:56:20,945 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.98 vs. limit=22.5 +2024-08-25 14:56:29,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99973.33333333333, ans=0.1 +2024-08-25 14:57:48,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=99973.33333333333, ans=0.2 +2024-08-25 14:57:50,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=99973.33333333333, ans=0.0 +2024-08-25 14:57:51,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=99973.33333333333, ans=0.2 +2024-08-25 14:58:01,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=100026.66666666667, ans=0.125 +2024-08-25 14:58:03,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=100026.66666666667, ans=0.09899494936611666 +2024-08-25 14:58:50,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=100080.0, ans=0.0 +2024-08-25 14:58:53,781 INFO [train.py:1114] (2/4) Epoch 8, batch 1350, loss[loss=0.2659, simple_loss=0.3065, pruned_loss=0.0825, ctc_loss=0.1508, over 19755.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3068, pruned_loss=0.07657, ctc_loss=0.1439, over 3857252.32 frames. ], batch size: 54, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 14:58:54,488 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.92 vs. limit=15.0 +2024-08-25 14:58:59,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=100133.33333333333, ans=0.0 +2024-08-25 14:59:22,841 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.55 vs. limit=12.0 +2024-08-25 14:59:22,899 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.69 vs. limit=6.0 +2024-08-25 14:59:27,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=100293.33333333333, ans=0.125 +2024-08-25 14:59:40,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=100346.66666666667, ans=10.0 +2024-08-25 14:59:47,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=100346.66666666667, ans=0.125 +2024-08-25 14:59:51,900 INFO [train.py:1114] (2/4) Epoch 8, batch 1400, loss[loss=0.2263, simple_loss=0.2771, pruned_loss=0.06346, ctc_loss=0.1213, over 19645.00 frames. ], tot_loss[loss=0.258, simple_loss=0.306, pruned_loss=0.07632, ctc_loss=0.1431, over 3864103.57 frames. ], batch size: 46, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 14:59:58,136 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.61 vs. limit=22.5 +2024-08-25 15:00:02,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=100453.33333333333, ans=0.2 +2024-08-25 15:00:03,305 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.557e+02 2.018e+02 2.600e+02 3.300e+02 7.375e+02, threshold=5.199e+02, percent-clipped=11.0 +2024-08-25 15:00:04,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=100453.33333333333, ans=0.2 +2024-08-25 15:00:07,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=100453.33333333333, ans=0.0 +2024-08-25 15:00:13,035 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 15:00:22,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100506.66666666667, ans=0.1 +2024-08-25 15:00:56,740 INFO [train.py:1114] (2/4) Epoch 8, batch 1450, loss[loss=0.2476, simple_loss=0.3046, pruned_loss=0.06818, ctc_loss=0.1359, over 19681.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3071, pruned_loss=0.07683, ctc_loss=0.1439, over 3862316.08 frames. ], batch size: 63, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 15:01:02,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=100666.66666666667, ans=0.125 +2024-08-25 15:01:04,915 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 15:01:15,750 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.35 vs. limit=15.0 +2024-08-25 15:01:16,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=100720.0, ans=0.025 +2024-08-25 15:01:21,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=100773.33333333333, ans=0.04949747468305833 +2024-08-25 15:01:37,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=100826.66666666667, ans=0.125 +2024-08-25 15:03:17,729 INFO [train.py:1114] (2/4) Epoch 8, batch 1500, loss[loss=0.2435, simple_loss=0.3, pruned_loss=0.06795, ctc_loss=0.1274, over 19585.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.3078, pruned_loss=0.0771, ctc_loss=0.1447, over 3861611.60 frames. ], batch size: 57, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 15:05:16,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=100933.33333333333, ans=0.125 +2024-08-25 15:05:24,434 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.596e+02 1.972e+02 2.271e+02 2.845e+02 5.404e+02, threshold=4.542e+02, percent-clipped=1.0 +2024-08-25 15:07:40,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=100986.66666666667, ans=0.2 +2024-08-25 15:08:28,002 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.59 vs. limit=12.0 +2024-08-25 15:09:24,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=101093.33333333333, ans=0.025 +2024-08-25 15:10:05,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=101146.66666666667, ans=0.0 +2024-08-25 15:10:18,911 INFO [train.py:1114] (2/4) Epoch 8, batch 1550, loss[loss=0.2624, simple_loss=0.3141, pruned_loss=0.07691, ctc_loss=0.142, over 19589.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3076, pruned_loss=0.07716, ctc_loss=0.1451, over 3846035.29 frames. ], batch size: 60, lr: 1.82e-02, grad_scale: 32.0 +2024-08-25 15:10:51,271 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.55 vs. limit=10.0 +2024-08-25 15:12:29,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=101253.33333333333, ans=0.125 +2024-08-25 15:12:42,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=101306.66666666667, ans=0.0 +2024-08-25 15:13:04,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=101360.0, ans=0.125 +2024-08-25 15:13:08,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=101360.0, ans=0.125 +2024-08-25 15:13:17,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=101360.0, ans=0.125 +2024-08-25 15:13:48,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=101413.33333333333, ans=0.0 +2024-08-25 15:13:57,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=101413.33333333333, ans=0.0 +2024-08-25 15:14:07,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101413.33333333333, ans=0.1 +2024-08-25 15:14:11,853 INFO [train.py:1114] (2/4) Epoch 8, batch 1600, loss[loss=0.2635, simple_loss=0.3213, pruned_loss=0.07349, ctc_loss=0.1468, over 19851.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3069, pruned_loss=0.07677, ctc_loss=0.1443, over 3835025.06 frames. ], batch size: 57, lr: 1.82e-02, grad_scale: 32.0 +2024-08-25 15:14:31,968 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.575e+02 1.915e+02 2.222e+02 2.696e+02 4.640e+02, threshold=4.444e+02, percent-clipped=1.0 +2024-08-25 15:14:43,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=101520.0, ans=0.0 +2024-08-25 15:14:56,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=101573.33333333333, ans=0.0 +2024-08-25 15:15:14,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=101626.66666666667, ans=0.0 +2024-08-25 15:15:30,467 INFO [train.py:1114] (2/4) Epoch 8, batch 1650, loss[loss=0.2446, simple_loss=0.3035, pruned_loss=0.06712, ctc_loss=0.1285, over 19654.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3067, pruned_loss=0.07689, ctc_loss=0.1446, over 3830497.38 frames. ], batch size: 59, lr: 1.82e-02, grad_scale: 32.0 +2024-08-25 15:15:33,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=101733.33333333333, ans=0.125 +2024-08-25 15:15:35,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=101733.33333333333, ans=0.0 +2024-08-25 15:15:55,567 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.67 vs. limit=15.0 +2024-08-25 15:16:25,252 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 15:16:28,210 INFO [train.py:1114] (2/4) Epoch 8, batch 1700, loss[loss=0.2218, simple_loss=0.2671, pruned_loss=0.0649, ctc_loss=0.1166, over 19672.00 frames. ], tot_loss[loss=0.2581, simple_loss=0.306, pruned_loss=0.07632, ctc_loss=0.1437, over 3845037.64 frames. ], batch size: 46, lr: 1.82e-02, grad_scale: 16.0 +2024-08-25 15:16:33,846 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.37 vs. limit=5.0 +2024-08-25 15:16:37,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=102000.0, ans=0.125 +2024-08-25 15:16:40,733 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.585e+02 1.920e+02 2.237e+02 2.711e+02 4.644e+02, threshold=4.474e+02, percent-clipped=2.0 +2024-08-25 15:16:46,189 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.16 vs. limit=6.0 +2024-08-25 15:16:48,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=102053.33333333333, ans=0.125 +2024-08-25 15:17:14,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=102160.0, ans=0.125 +2024-08-25 15:17:15,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=102160.0, ans=0.125 +2024-08-25 15:17:39,506 INFO [train.py:1114] (2/4) Epoch 8, batch 1750, loss[loss=0.2376, simple_loss=0.2814, pruned_loss=0.0692, ctc_loss=0.1385, over 19663.00 frames. ], tot_loss[loss=0.2567, simple_loss=0.3051, pruned_loss=0.07568, ctc_loss=0.1425, over 3849345.66 frames. ], batch size: 45, lr: 1.82e-02, grad_scale: 16.0 +2024-08-25 15:17:49,486 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.50 vs. limit=15.0 +2024-08-25 15:19:51,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=102373.33333333333, ans=0.0 +2024-08-25 15:20:03,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=102426.66666666667, ans=0.0 +2024-08-25 15:20:04,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=102426.66666666667, ans=0.125 +2024-08-25 15:20:19,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=102480.0, ans=0.0 +2024-08-25 15:20:25,840 INFO [train.py:1114] (2/4) Epoch 8, batch 1800, loss[loss=0.2742, simple_loss=0.3211, pruned_loss=0.08251, ctc_loss=0.1556, over 19624.00 frames. ], tot_loss[loss=0.2566, simple_loss=0.3051, pruned_loss=0.07557, ctc_loss=0.1422, over 3851056.13 frames. ], batch size: 55, lr: 1.81e-02, grad_scale: 16.0 +2024-08-25 15:20:33,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=102533.33333333333, ans=0.2 +2024-08-25 15:20:37,816 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.544e+02 1.874e+02 2.230e+02 2.859e+02 4.439e+02, threshold=4.460e+02, percent-clipped=0.0 +2024-08-25 15:23:39,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=102640.0, ans=0.0 +2024-08-25 15:24:47,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=102640.0, ans=0.125 +2024-08-25 15:24:54,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=102693.33333333333, ans=0.125 +2024-08-25 15:26:49,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102693.33333333333, ans=0.1 +2024-08-25 15:26:52,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=102746.66666666667, ans=0.0 +2024-08-25 15:26:54,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=102746.66666666667, ans=0.02 +2024-08-25 15:28:59,121 INFO [train.py:1114] (2/4) Epoch 8, batch 1850, loss[loss=0.251, simple_loss=0.3114, pruned_loss=0.07005, ctc_loss=0.1264, over 19594.00 frames. ], tot_loss[loss=0.2565, simple_loss=0.3049, pruned_loss=0.07561, ctc_loss=0.1424, over 3854441.68 frames. ], batch size: 57, lr: 1.81e-02, grad_scale: 16.0 +2024-08-25 15:29:06,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=102800.0, ans=0.0 +2024-08-25 15:29:17,936 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.90 vs. limit=15.0 +2024-08-25 15:30:28,776 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.84 vs. limit=15.0 +2024-08-25 15:32:38,690 INFO [train.py:1114] (2/4) Epoch 8, batch 1900, loss[loss=0.2593, simple_loss=0.3177, pruned_loss=0.07187, ctc_loss=0.143, over 19626.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3065, pruned_loss=0.07672, ctc_loss=0.1441, over 3859222.94 frames. ], batch size: 59, lr: 1.81e-02, grad_scale: 16.0 +2024-08-25 15:32:52,960 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.596e+02 1.872e+02 2.139e+02 2.618e+02 5.849e+02, threshold=4.279e+02, percent-clipped=4.0 +2024-08-25 15:32:59,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=103120.0, ans=0.0 +2024-08-25 15:33:07,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=103173.33333333333, ans=0.2 +2024-08-25 15:33:07,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=103173.33333333333, ans=0.0 +2024-08-25 15:33:08,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=103173.33333333333, ans=0.0 +2024-08-25 15:33:15,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=103226.66666666667, ans=0.125 +2024-08-25 15:33:21,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=103226.66666666667, ans=0.0 +2024-08-25 15:33:37,637 INFO [train.py:1114] (2/4) Epoch 8, batch 1950, loss[loss=0.2459, simple_loss=0.3, pruned_loss=0.07002, ctc_loss=0.1293, over 19608.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3073, pruned_loss=0.07641, ctc_loss=0.1433, over 3868747.99 frames. ], batch size: 52, lr: 1.81e-02, grad_scale: 16.0 +2024-08-25 15:33:55,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=103333.33333333333, ans=0.125 +2024-08-25 15:33:58,574 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.72 vs. limit=15.0 +2024-08-25 15:34:06,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=103386.66666666667, ans=0.0 +2024-08-25 15:34:17,814 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.32 vs. limit=10.0 +2024-08-25 15:34:42,899 INFO [train.py:1114] (2/4) Epoch 8, batch 2000, loss[loss=0.236, simple_loss=0.2726, pruned_loss=0.0731, ctc_loss=0.133, over 19671.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3073, pruned_loss=0.07645, ctc_loss=0.1432, over 3852877.11 frames. ], batch size: 45, lr: 1.81e-02, grad_scale: 32.0 +2024-08-25 15:34:47,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=103600.0, ans=0.0 +2024-08-25 15:34:55,652 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.405e+02 1.835e+02 2.022e+02 2.450e+02 4.734e+02, threshold=4.043e+02, percent-clipped=1.0 +2024-08-25 15:35:05,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=103706.66666666667, ans=0.0 +2024-08-25 15:35:11,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=103706.66666666667, ans=0.125 +2024-08-25 15:35:37,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103866.66666666667, ans=0.1 +2024-08-25 15:35:38,622 INFO [train.py:1114] (2/4) Epoch 8, batch 2050, loss[loss=0.2506, simple_loss=0.295, pruned_loss=0.07426, ctc_loss=0.1442, over 19713.00 frames. ], tot_loss[loss=0.2582, simple_loss=0.3065, pruned_loss=0.07635, ctc_loss=0.1432, over 3850629.72 frames. ], batch size: 47, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:35:42,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=103866.66666666667, ans=0.07 +2024-08-25 15:35:47,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=103866.66666666667, ans=0.125 +2024-08-25 15:36:14,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=104026.66666666667, ans=0.0 +2024-08-25 15:36:23,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=104080.0, ans=0.125 +2024-08-25 15:36:32,764 INFO [train.py:1114] (2/4) Epoch 8, batch 2100, loss[loss=0.2686, simple_loss=0.315, pruned_loss=0.08059, ctc_loss=0.1525, over 19765.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.305, pruned_loss=0.07509, ctc_loss=0.141, over 3858009.65 frames. ], batch size: 54, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:36:44,887 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.433e+02 2.055e+02 2.348e+02 2.987e+02 4.948e+02, threshold=4.695e+02, percent-clipped=5.0 +2024-08-25 15:37:27,205 INFO [train.py:1114] (2/4) Epoch 8, batch 2150, loss[loss=0.2297, simple_loss=0.2911, pruned_loss=0.0601, ctc_loss=0.1202, over 19862.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.3034, pruned_loss=0.07414, ctc_loss=0.1393, over 3868816.83 frames. ], batch size: 52, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:37:50,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=104506.66666666667, ans=0.125 +2024-08-25 15:37:58,326 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.19 vs. limit=6.0 +2024-08-25 15:38:02,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=104560.0, ans=0.125 +2024-08-25 15:38:06,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=104560.0, ans=0.2 +2024-08-25 15:38:10,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=104613.33333333333, ans=0.125 +2024-08-25 15:38:14,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=104613.33333333333, ans=0.0 +2024-08-25 15:38:15,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=104613.33333333333, ans=0.125 +2024-08-25 15:38:23,247 INFO [train.py:1114] (2/4) Epoch 8, batch 2200, loss[loss=0.2725, simple_loss=0.3115, pruned_loss=0.08387, ctc_loss=0.1645, over 19589.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3047, pruned_loss=0.07505, ctc_loss=0.1409, over 3866506.60 frames. ], batch size: 57, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:38:25,995 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.76 vs. limit=6.0 +2024-08-25 15:38:31,426 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.13 vs. limit=15.0 +2024-08-25 15:38:32,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=104666.66666666667, ans=0.0 +2024-08-25 15:38:34,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=104720.0, ans=0.125 +2024-08-25 15:38:35,670 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.489e+02 1.961e+02 2.280e+02 3.038e+02 5.675e+02, threshold=4.560e+02, percent-clipped=2.0 +2024-08-25 15:38:41,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=104720.0, ans=0.125 +2024-08-25 15:38:54,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=104773.33333333333, ans=6.0 +2024-08-25 15:38:55,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=104826.66666666667, ans=0.0 +2024-08-25 15:39:01,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=104826.66666666667, ans=0.07 +2024-08-25 15:39:09,504 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.88 vs. limit=22.5 +2024-08-25 15:39:12,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=104880.0, ans=0.0 +2024-08-25 15:39:12,926 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.53 vs. limit=15.0 +2024-08-25 15:39:19,052 INFO [train.py:1114] (2/4) Epoch 8, batch 2250, loss[loss=0.2501, simple_loss=0.3052, pruned_loss=0.07015, ctc_loss=0.1369, over 19607.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3046, pruned_loss=0.07473, ctc_loss=0.1403, over 3866394.67 frames. ], batch size: 55, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:39:26,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=104933.33333333333, ans=0.0 +2024-08-25 15:39:39,442 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.27 vs. limit=12.0 +2024-08-25 15:39:46,990 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.41 vs. limit=12.0 +2024-08-25 15:39:54,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=105093.33333333333, ans=0.2 +2024-08-25 15:40:14,510 INFO [train.py:1114] (2/4) Epoch 8, batch 2300, loss[loss=0.2248, simple_loss=0.2805, pruned_loss=0.06179, ctc_loss=0.114, over 19512.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3039, pruned_loss=0.07497, ctc_loss=0.1408, over 3859835.77 frames. ], batch size: 49, lr: 1.79e-02, grad_scale: 32.0 +2024-08-25 15:40:28,018 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.388e+02 1.907e+02 2.167e+02 2.593e+02 4.976e+02, threshold=4.335e+02, percent-clipped=1.0 +2024-08-25 15:40:33,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.93 vs. limit=15.0 +2024-08-25 15:40:50,973 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.38 vs. limit=15.0 +2024-08-25 15:41:02,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=105413.33333333333, ans=0.04949747468305833 +2024-08-25 15:41:11,095 INFO [train.py:1114] (2/4) Epoch 8, batch 2350, loss[loss=0.2856, simple_loss=0.3331, pruned_loss=0.08655, ctc_loss=0.1626, over 19671.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3039, pruned_loss=0.07496, ctc_loss=0.1406, over 3862756.37 frames. ], batch size: 63, lr: 1.79e-02, grad_scale: 32.0 +2024-08-25 15:41:11,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=105466.66666666667, ans=0.125 +2024-08-25 15:41:31,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=105573.33333333333, ans=0.125 +2024-08-25 15:41:34,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=105573.33333333333, ans=0.125 +2024-08-25 15:41:37,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=105573.33333333333, ans=0.0 +2024-08-25 15:41:41,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=105573.33333333333, ans=15.0 +2024-08-25 15:41:41,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=105573.33333333333, ans=0.2 +2024-08-25 15:41:58,587 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.76 vs. limit=15.0 +2024-08-25 15:42:06,022 INFO [train.py:1114] (2/4) Epoch 8, batch 2400, loss[loss=0.2595, simple_loss=0.312, pruned_loss=0.07531, ctc_loss=0.141, over 19234.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3059, pruned_loss=0.07586, ctc_loss=0.1422, over 3857949.06 frames. ], batch size: 71, lr: 1.79e-02, grad_scale: 32.0 +2024-08-25 15:42:18,065 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.476e+02 1.983e+02 2.255e+02 2.870e+02 5.067e+02, threshold=4.510e+02, percent-clipped=2.0 +2024-08-25 15:42:22,935 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.23 vs. limit=22.5 +2024-08-25 15:42:27,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105840.0, ans=0.1 +2024-08-25 15:42:33,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=105840.0, ans=0.125 +2024-08-25 15:42:35,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=105840.0, ans=0.125 +2024-08-25 15:42:44,196 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.64 vs. limit=15.0 +2024-08-25 15:42:51,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=105946.66666666667, ans=0.125 +2024-08-25 15:42:52,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=105946.66666666667, ans=0.125 +2024-08-25 15:43:01,715 INFO [train.py:1114] (2/4) Epoch 8, batch 2450, loss[loss=0.3284, simple_loss=0.3451, pruned_loss=0.1121, ctc_loss=0.2186, over 13439.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.3109, pruned_loss=0.07977, ctc_loss=0.1499, over 3729396.28 frames. ], batch size: 140, lr: 1.79e-02, grad_scale: 32.0 +2024-08-25 15:43:05,672 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.46 vs. limit=22.5 +2024-08-25 15:43:19,567 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=25.13 vs. limit=22.5 +2024-08-25 15:44:31,271 INFO [train.py:1114] (2/4) Epoch 9, batch 0, loss[loss=0.2534, simple_loss=0.2945, pruned_loss=0.07741, ctc_loss=0.1439, over 19818.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.2945, pruned_loss=0.07741, ctc_loss=0.1439, over 19818.00 frames. ], batch size: 49, lr: 1.69e-02, grad_scale: 32.0 +2024-08-25 15:44:31,272 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-25 15:44:49,827 INFO [train.py:1146] (2/4) Epoch 9, validation: loss=0.21, simple_loss=0.2947, pruned_loss=0.04621, ctc_loss=0.08206, over 944034.00 frames. +2024-08-25 15:44:49,828 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 14234MB +2024-08-25 15:44:53,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=106208.0, ans=0.0 +2024-08-25 15:44:58,022 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.28 vs. limit=22.5 +2024-08-25 15:45:13,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=106314.66666666667, ans=0.125 +2024-08-25 15:45:15,530 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.726e+02 2.154e+02 2.510e+02 2.953e+02 5.707e+02, threshold=5.019e+02, percent-clipped=2.0 +2024-08-25 15:46:12,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=106368.0, ans=0.1 +2024-08-25 15:46:13,563 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.91 vs. limit=15.0 +2024-08-25 15:46:22,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=106368.0, ans=0.125 +2024-08-25 15:46:23,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=106421.33333333333, ans=0.125 +2024-08-25 15:46:28,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=106421.33333333333, ans=0.04949747468305833 +2024-08-25 15:46:30,472 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.01 vs. limit=6.0 +2024-08-25 15:46:34,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.79 vs. limit=15.0 +2024-08-25 15:46:36,880 INFO [train.py:1114] (2/4) Epoch 9, batch 50, loss[loss=0.223, simple_loss=0.2753, pruned_loss=0.06137, ctc_loss=0.1198, over 19720.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3098, pruned_loss=0.07727, ctc_loss=0.1458, over 844815.75 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 32.0 +2024-08-25 15:47:04,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=106581.33333333333, ans=0.0 +2024-08-25 15:47:08,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=106581.33333333333, ans=0.2 +2024-08-25 15:47:28,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106634.66666666667, ans=0.1 +2024-08-25 15:47:44,291 INFO [train.py:1114] (2/4) Epoch 9, batch 100, loss[loss=0.2921, simple_loss=0.3272, pruned_loss=0.09505, ctc_loss=0.1672, over 19704.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3103, pruned_loss=0.07721, ctc_loss=0.1452, over 1499610.03 frames. ], batch size: 51, lr: 1.69e-02, grad_scale: 32.0 +2024-08-25 15:48:09,488 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 1.842e+02 2.163e+02 2.785e+02 4.838e+02, threshold=4.326e+02, percent-clipped=0.0 +2024-08-25 15:48:12,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=106848.0, ans=0.125 +2024-08-25 15:48:29,880 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.28 vs. limit=10.0 +2024-08-25 15:48:42,140 INFO [train.py:1114] (2/4) Epoch 9, batch 150, loss[loss=0.2221, simple_loss=0.2782, pruned_loss=0.06094, ctc_loss=0.1104, over 19714.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.3066, pruned_loss=0.0753, ctc_loss=0.1416, over 2027682.09 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 32.0 +2024-08-25 15:48:42,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=107008.0, ans=0.125 +2024-08-25 15:49:05,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107114.66666666667, ans=0.1 +2024-08-25 15:49:29,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=107221.33333333333, ans=0.125 +2024-08-25 15:49:41,043 INFO [train.py:1114] (2/4) Epoch 9, batch 200, loss[loss=0.2629, simple_loss=0.3166, pruned_loss=0.07516, ctc_loss=0.1472, over 18323.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3036, pruned_loss=0.07379, ctc_loss=0.139, over 2435014.65 frames. ], batch size: 85, lr: 1.68e-02, grad_scale: 32.0 +2024-08-25 15:49:57,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=107328.0, ans=0.125 +2024-08-25 15:50:06,167 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.398e+02 1.799e+02 2.039e+02 2.617e+02 5.282e+02, threshold=4.078e+02, percent-clipped=1.0 +2024-08-25 15:50:45,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=107381.33333333333, ans=0.0 +2024-08-25 15:50:51,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=107434.66666666667, ans=0.125 +2024-08-25 15:51:17,129 INFO [train.py:1114] (2/4) Epoch 9, batch 250, loss[loss=0.2604, simple_loss=0.3123, pruned_loss=0.07452, ctc_loss=0.1486, over 19373.00 frames. ], tot_loss[loss=0.2529, simple_loss=0.3032, pruned_loss=0.07358, ctc_loss=0.1387, over 2755550.65 frames. ], batch size: 67, lr: 1.68e-02, grad_scale: 32.0 +2024-08-25 15:51:17,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=107541.33333333333, ans=0.015 +2024-08-25 15:51:18,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=107541.33333333333, ans=0.0 +2024-08-25 15:51:54,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=107701.33333333333, ans=0.0 +2024-08-25 15:51:56,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=107701.33333333333, ans=0.0 +2024-08-25 15:52:18,792 INFO [train.py:1114] (2/4) Epoch 9, batch 300, loss[loss=0.3005, simple_loss=0.3343, pruned_loss=0.09742, ctc_loss=0.1796, over 19520.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3022, pruned_loss=0.07296, ctc_loss=0.1377, over 2999755.96 frames. ], batch size: 61, lr: 1.68e-02, grad_scale: 16.0 +2024-08-25 15:52:27,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=107808.0, ans=0.0 +2024-08-25 15:52:30,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=107861.33333333333, ans=0.025 +2024-08-25 15:52:35,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=107861.33333333333, ans=0.2 +2024-08-25 15:52:37,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=107861.33333333333, ans=0.2 +2024-08-25 15:52:47,059 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.324e+02 1.831e+02 2.248e+02 2.885e+02 5.251e+02, threshold=4.495e+02, percent-clipped=2.0 +2024-08-25 15:52:55,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=107968.0, ans=0.125 +2024-08-25 15:53:18,372 INFO [train.py:1114] (2/4) Epoch 9, batch 350, loss[loss=0.2259, simple_loss=0.2769, pruned_loss=0.06326, ctc_loss=0.1208, over 19763.00 frames. ], tot_loss[loss=0.2528, simple_loss=0.3031, pruned_loss=0.07356, ctc_loss=0.1384, over 3188993.13 frames. ], batch size: 48, lr: 1.68e-02, grad_scale: 16.0 +2024-08-25 15:53:37,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=108128.0, ans=0.1 +2024-08-25 15:53:40,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=108181.33333333333, ans=0.125 +2024-08-25 15:53:48,922 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.83 vs. limit=22.5 +2024-08-25 15:53:53,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=108234.66666666667, ans=0.125 +2024-08-25 15:54:11,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=108288.0, ans=0.09899494936611666 +2024-08-25 15:54:13,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=108288.0, ans=0.0 +2024-08-25 15:54:14,928 INFO [train.py:1114] (2/4) Epoch 9, batch 400, loss[loss=0.2292, simple_loss=0.294, pruned_loss=0.05889, ctc_loss=0.1165, over 19495.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.302, pruned_loss=0.07294, ctc_loss=0.1373, over 3340930.96 frames. ], batch size: 54, lr: 1.68e-02, grad_scale: 32.0 +2024-08-25 15:54:18,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108341.33333333333, ans=0.0 +2024-08-25 15:54:25,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=108394.66666666667, ans=0.025 +2024-08-25 15:54:43,459 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.428e+02 2.039e+02 2.514e+02 3.062e+02 4.428e+02, threshold=5.028e+02, percent-clipped=0.0 +2024-08-25 15:55:08,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108554.66666666667, ans=0.0 +2024-08-25 15:55:11,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=108554.66666666667, ans=0.125 +2024-08-25 15:55:17,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=108608.0, ans=0.125 +2024-08-25 15:55:18,543 INFO [train.py:1114] (2/4) Epoch 9, batch 450, loss[loss=0.2674, simple_loss=0.3222, pruned_loss=0.07734, ctc_loss=0.1449, over 19606.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.3028, pruned_loss=0.07345, ctc_loss=0.138, over 3448148.97 frames. ], batch size: 55, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 15:55:38,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=108661.33333333333, ans=0.125 +2024-08-25 15:55:43,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=108714.66666666667, ans=0.07 +2024-08-25 15:55:50,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=108714.66666666667, ans=0.125 +2024-08-25 15:57:29,909 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.94 vs. limit=15.0 +2024-08-25 15:58:59,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=108821.33333333333, ans=0.125 +2024-08-25 15:59:00,643 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.88 vs. limit=12.0 +2024-08-25 15:59:07,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=108821.33333333333, ans=0.1 +2024-08-25 15:59:11,169 INFO [train.py:1114] (2/4) Epoch 9, batch 500, loss[loss=0.2814, simple_loss=0.3226, pruned_loss=0.08672, ctc_loss=0.1671, over 19674.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3017, pruned_loss=0.07307, ctc_loss=0.1373, over 3544506.49 frames. ], batch size: 63, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 15:59:21,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=108928.0, ans=0.125 +2024-08-25 15:59:37,508 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.583e+02 1.839e+02 2.298e+02 3.023e+02 4.931e+02, threshold=4.596e+02, percent-clipped=0.0 +2024-08-25 15:59:42,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=108981.33333333333, ans=0.2 +2024-08-25 15:59:57,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=109088.0, ans=0.0 +2024-08-25 16:00:00,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=109088.0, ans=0.125 +2024-08-25 16:00:08,617 INFO [train.py:1114] (2/4) Epoch 9, batch 550, loss[loss=0.2741, simple_loss=0.3262, pruned_loss=0.08143, ctc_loss=0.1477, over 19227.00 frames. ], tot_loss[loss=0.252, simple_loss=0.302, pruned_loss=0.07337, ctc_loss=0.1379, over 3606222.95 frames. ], batch size: 71, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 16:00:19,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=109194.66666666667, ans=0.0 +2024-08-25 16:00:25,743 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.21 vs. limit=15.0 +2024-08-25 16:00:44,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109301.33333333333, ans=0.1 +2024-08-25 16:01:12,456 INFO [train.py:1114] (2/4) Epoch 9, batch 600, loss[loss=0.2608, simple_loss=0.3108, pruned_loss=0.07711, ctc_loss=0.1414, over 19389.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3022, pruned_loss=0.07321, ctc_loss=0.1377, over 3663539.65 frames. ], batch size: 67, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 16:01:31,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=109408.0, ans=0.035 +2024-08-25 16:01:43,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.63 vs. limit=15.0 +2024-08-25 16:01:51,491 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.960e+02 2.208e+02 2.721e+02 5.490e+02, threshold=4.416e+02, percent-clipped=2.0 +2024-08-25 16:02:21,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=109514.66666666667, ans=0.125 +2024-08-25 16:02:29,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=109568.0, ans=0.125 +2024-08-25 16:02:37,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109621.33333333333, ans=0.1 +2024-08-25 16:02:47,615 INFO [train.py:1114] (2/4) Epoch 9, batch 650, loss[loss=0.2573, simple_loss=0.3043, pruned_loss=0.07646, ctc_loss=0.1435, over 19766.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3013, pruned_loss=0.07282, ctc_loss=0.137, over 3713816.89 frames. ], batch size: 54, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 16:02:49,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=109674.66666666667, ans=0.125 +2024-08-25 16:03:07,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=109728.0, ans=0.125 +2024-08-25 16:03:15,538 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.91 vs. limit=15.0 +2024-08-25 16:03:36,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109888.0, ans=0.1 +2024-08-25 16:03:44,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=109888.0, ans=0.125 +2024-08-25 16:03:47,855 INFO [train.py:1114] (2/4) Epoch 9, batch 700, loss[loss=0.2273, simple_loss=0.281, pruned_loss=0.06275, ctc_loss=0.1202, over 19724.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3013, pruned_loss=0.07264, ctc_loss=0.1366, over 3746667.85 frames. ], batch size: 51, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 16:03:53,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=109941.33333333333, ans=0.125 +2024-08-25 16:04:00,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=109994.66666666667, ans=0.2 +2024-08-25 16:04:09,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=109994.66666666667, ans=0.2 +2024-08-25 16:04:14,384 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.641e+02 1.949e+02 2.382e+02 2.859e+02 4.618e+02, threshold=4.764e+02, percent-clipped=1.0 +2024-08-25 16:04:44,759 INFO [train.py:1114] (2/4) Epoch 9, batch 750, loss[loss=0.2402, simple_loss=0.3029, pruned_loss=0.06417, ctc_loss=0.1227, over 19493.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.3017, pruned_loss=0.07333, ctc_loss=0.1378, over 3772977.45 frames. ], batch size: 54, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:05:00,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=110261.33333333333, ans=0.0 +2024-08-25 16:05:21,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=110314.66666666667, ans=0.015 +2024-08-25 16:05:29,005 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.21 vs. limit=22.5 +2024-08-25 16:05:31,175 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.71 vs. limit=6.0 +2024-08-25 16:05:31,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=110368.0, ans=0.125 +2024-08-25 16:05:32,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=110368.0, ans=0.0 +2024-08-25 16:05:37,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110421.33333333333, ans=0.1 +2024-08-25 16:05:43,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=110421.33333333333, ans=0.0 +2024-08-25 16:05:48,074 INFO [train.py:1114] (2/4) Epoch 9, batch 800, loss[loss=0.2233, simple_loss=0.2749, pruned_loss=0.06195, ctc_loss=0.1193, over 19792.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.3017, pruned_loss=0.07338, ctc_loss=0.1382, over 3794489.35 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:06:03,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=110528.0, ans=0.125 +2024-08-25 16:06:14,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=110581.33333333333, ans=0.05 +2024-08-25 16:06:14,969 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.861e+02 2.104e+02 2.558e+02 4.618e+02, threshold=4.207e+02, percent-clipped=0.0 +2024-08-25 16:06:30,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=110634.66666666667, ans=0.0 +2024-08-25 16:06:47,178 INFO [train.py:1114] (2/4) Epoch 9, batch 850, loss[loss=0.2422, simple_loss=0.3048, pruned_loss=0.06552, ctc_loss=0.121, over 19666.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.301, pruned_loss=0.07278, ctc_loss=0.137, over 3814034.66 frames. ], batch size: 59, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:07:03,860 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.59 vs. limit=22.5 +2024-08-25 16:07:10,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=110848.0, ans=0.0 +2024-08-25 16:07:23,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=110901.33333333333, ans=0.125 +2024-08-25 16:07:29,873 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.14 vs. limit=15.0 +2024-08-25 16:08:42,723 INFO [train.py:1114] (2/4) Epoch 9, batch 900, loss[loss=0.2277, simple_loss=0.2789, pruned_loss=0.06485, ctc_loss=0.1169, over 19414.00 frames. ], tot_loss[loss=0.2512, simple_loss=0.3015, pruned_loss=0.07305, ctc_loss=0.1374, over 3818465.82 frames. ], batch size: 48, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:09:03,915 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.91 vs. limit=15.0 +2024-08-25 16:09:12,342 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.426e+02 1.982e+02 2.328e+02 2.784e+02 5.806e+02, threshold=4.657e+02, percent-clipped=1.0 +2024-08-25 16:09:18,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=111114.66666666667, ans=0.2 +2024-08-25 16:09:41,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=111221.33333333333, ans=0.125 +2024-08-25 16:09:43,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=111221.33333333333, ans=0.2 +2024-08-25 16:09:47,298 INFO [train.py:1114] (2/4) Epoch 9, batch 950, loss[loss=0.2195, simple_loss=0.2727, pruned_loss=0.06085, ctc_loss=0.1113, over 19493.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3012, pruned_loss=0.07287, ctc_loss=0.1369, over 3821199.31 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:09:49,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=111274.66666666667, ans=0.125 +2024-08-25 16:09:55,066 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.24 vs. limit=15.0 +2024-08-25 16:09:59,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=111328.0, ans=0.0 +2024-08-25 16:10:01,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=111328.0, ans=0.1 +2024-08-25 16:10:05,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=111328.0, ans=0.025 +2024-08-25 16:10:15,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=111381.33333333333, ans=0.125 +2024-08-25 16:10:22,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=111434.66666666667, ans=0.0 +2024-08-25 16:10:23,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=111434.66666666667, ans=0.125 +2024-08-25 16:10:37,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=111488.0, ans=0.0 +2024-08-25 16:10:45,141 INFO [train.py:1114] (2/4) Epoch 9, batch 1000, loss[loss=0.2633, simple_loss=0.3115, pruned_loss=0.07805, ctc_loss=0.1476, over 19842.00 frames. ], tot_loss[loss=0.2529, simple_loss=0.3027, pruned_loss=0.07386, ctc_loss=0.1386, over 3816943.94 frames. ], batch size: 52, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:10:53,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=111541.33333333333, ans=0.0 +2024-08-25 16:11:05,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111594.66666666667, ans=0.125 +2024-08-25 16:11:11,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=111648.0, ans=0.125 +2024-08-25 16:11:11,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=111648.0, ans=0.0 +2024-08-25 16:11:13,886 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 1.864e+02 2.156e+02 2.793e+02 4.751e+02, threshold=4.311e+02, percent-clipped=1.0 +2024-08-25 16:11:20,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=111648.0, ans=0.125 +2024-08-25 16:11:23,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=111701.33333333333, ans=0.0 +2024-08-25 16:11:45,641 INFO [train.py:1114] (2/4) Epoch 9, batch 1050, loss[loss=0.2406, simple_loss=0.2984, pruned_loss=0.06517, ctc_loss=0.1314, over 19836.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.3019, pruned_loss=0.07375, ctc_loss=0.1386, over 3823969.67 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:12:04,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=111861.33333333333, ans=0.0 +2024-08-25 16:12:14,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=111914.66666666667, ans=0.125 +2024-08-25 16:12:17,016 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:12:22,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=111914.66666666667, ans=0.125 +2024-08-25 16:12:36,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=112021.33333333333, ans=0.0 +2024-08-25 16:12:37,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=112021.33333333333, ans=0.125 +2024-08-25 16:12:51,813 INFO [train.py:1114] (2/4) Epoch 9, batch 1100, loss[loss=0.2358, simple_loss=0.2978, pruned_loss=0.06334, ctc_loss=0.1181, over 19581.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3011, pruned_loss=0.07284, ctc_loss=0.137, over 3831415.43 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:13:03,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=112128.0, ans=0.125 +2024-08-25 16:13:10,471 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.76 vs. limit=6.0 +2024-08-25 16:13:19,826 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.558e+02 1.820e+02 2.090e+02 2.645e+02 4.523e+02, threshold=4.179e+02, percent-clipped=2.0 +2024-08-25 16:13:35,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=112234.66666666667, ans=0.125 +2024-08-25 16:13:37,951 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.63 vs. limit=15.0 +2024-08-25 16:13:45,647 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.09 vs. limit=15.0 +2024-08-25 16:13:47,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=112288.0, ans=0.125 +2024-08-25 16:13:50,952 INFO [train.py:1114] (2/4) Epoch 9, batch 1150, loss[loss=0.2767, simple_loss=0.3152, pruned_loss=0.08728, ctc_loss=0.159, over 19579.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3012, pruned_loss=0.07299, ctc_loss=0.1373, over 3830568.01 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:14:10,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=112394.66666666667, ans=0.125 +2024-08-25 16:14:40,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112554.66666666667, ans=0.1 +2024-08-25 16:14:44,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=112554.66666666667, ans=0.0 +2024-08-25 16:14:51,130 INFO [train.py:1114] (2/4) Epoch 9, batch 1200, loss[loss=0.2535, simple_loss=0.3102, pruned_loss=0.07138, ctc_loss=0.1353, over 19858.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.3021, pruned_loss=0.07313, ctc_loss=0.1377, over 3825514.24 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:14:52,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=112608.0, ans=0.0 +2024-08-25 16:14:55,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112608.0, ans=0.1 +2024-08-25 16:15:51,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=112661.33333333333, ans=0.0 +2024-08-25 16:16:05,757 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.491e+02 1.875e+02 2.166e+02 2.598e+02 4.323e+02, threshold=4.331e+02, percent-clipped=2.0 +2024-08-25 16:16:11,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=112714.66666666667, ans=15.0 +2024-08-25 16:16:12,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=112714.66666666667, ans=0.1 +2024-08-25 16:16:14,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=112768.0, ans=0.2 +2024-08-25 16:16:17,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=112768.0, ans=0.0 +2024-08-25 16:16:19,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=112768.0, ans=0.125 +2024-08-25 16:16:30,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112821.33333333333, ans=0.1 +2024-08-25 16:16:38,900 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.87 vs. limit=15.0 +2024-08-25 16:16:39,514 INFO [train.py:1114] (2/4) Epoch 9, batch 1250, loss[loss=0.2781, simple_loss=0.3287, pruned_loss=0.08243, ctc_loss=0.1568, over 19524.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3026, pruned_loss=0.0732, ctc_loss=0.138, over 3842813.32 frames. ], batch size: 61, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:17:28,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=113088.0, ans=0.125 +2024-08-25 16:17:29,649 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:17:33,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=113088.0, ans=0.2 +2024-08-25 16:17:40,927 INFO [train.py:1114] (2/4) Epoch 9, batch 1300, loss[loss=0.2541, simple_loss=0.3081, pruned_loss=0.0738, ctc_loss=0.131, over 18940.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.302, pruned_loss=0.07293, ctc_loss=0.1374, over 3845203.26 frames. ], batch size: 76, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:17:41,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=113141.33333333333, ans=0.5 +2024-08-25 16:18:08,524 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.553e+02 1.959e+02 2.315e+02 2.984e+02 4.812e+02, threshold=4.630e+02, percent-clipped=1.0 +2024-08-25 16:18:08,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=113248.0, ans=0.0 +2024-08-25 16:18:17,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=113301.33333333333, ans=0.2 +2024-08-25 16:18:21,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=113301.33333333333, ans=0.125 +2024-08-25 16:18:25,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=113301.33333333333, ans=0.125 +2024-08-25 16:18:29,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=113354.66666666667, ans=0.125 +2024-08-25 16:18:37,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=113354.66666666667, ans=0.125 +2024-08-25 16:18:42,152 INFO [train.py:1114] (2/4) Epoch 9, batch 1350, loss[loss=0.2634, simple_loss=0.3035, pruned_loss=0.08165, ctc_loss=0.1502, over 19767.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3013, pruned_loss=0.07237, ctc_loss=0.1362, over 3856385.13 frames. ], batch size: 54, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:18:42,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=113408.0, ans=0.125 +2024-08-25 16:19:20,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=113568.0, ans=0.125 +2024-08-25 16:19:34,560 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.77 vs. limit=15.0 +2024-08-25 16:19:38,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=113674.66666666667, ans=0.2 +2024-08-25 16:19:40,007 INFO [train.py:1114] (2/4) Epoch 9, batch 1400, loss[loss=0.2087, simple_loss=0.2616, pruned_loss=0.05751, ctc_loss=0.1019, over 19642.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3008, pruned_loss=0.07213, ctc_loss=0.1357, over 3863319.77 frames. ], batch size: 46, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:19:43,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=113674.66666666667, ans=0.125 +2024-08-25 16:19:48,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=113674.66666666667, ans=0.0 +2024-08-25 16:20:07,561 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.393e+02 1.860e+02 2.127e+02 2.545e+02 4.134e+02, threshold=4.253e+02, percent-clipped=0.0 +2024-08-25 16:20:21,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=113834.66666666667, ans=0.2 +2024-08-25 16:20:43,000 INFO [train.py:1114] (2/4) Epoch 9, batch 1450, loss[loss=0.2956, simple_loss=0.3349, pruned_loss=0.09308, ctc_loss=0.175, over 19659.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3018, pruned_loss=0.07277, ctc_loss=0.137, over 3861359.41 frames. ], batch size: 63, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:20:48,294 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.48 vs. limit=15.0 +2024-08-25 16:21:01,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=113994.66666666667, ans=0.125 +2024-08-25 16:21:09,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=114048.0, ans=0.125 +2024-08-25 16:21:17,993 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.06 vs. limit=15.0 +2024-08-25 16:21:18,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=114101.33333333333, ans=0.07 +2024-08-25 16:21:33,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=114154.66666666667, ans=0.0 +2024-08-25 16:21:45,886 INFO [train.py:1114] (2/4) Epoch 9, batch 1500, loss[loss=0.2324, simple_loss=0.3, pruned_loss=0.05977, ctc_loss=0.1133, over 19598.00 frames. ], tot_loss[loss=0.2522, simple_loss=0.3026, pruned_loss=0.07328, ctc_loss=0.1379, over 3861063.45 frames. ], batch size: 57, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:21:56,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=114261.33333333333, ans=0.0 +2024-08-25 16:22:07,815 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.77 vs. limit=10.0 +2024-08-25 16:22:13,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=114314.66666666667, ans=0.0 +2024-08-25 16:22:15,434 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.478e+02 1.928e+02 2.180e+02 2.740e+02 4.350e+02, threshold=4.360e+02, percent-clipped=2.0 +2024-08-25 16:22:19,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=114314.66666666667, ans=0.0 +2024-08-25 16:22:32,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=114368.0, ans=0.125 +2024-08-25 16:22:39,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=114421.33333333333, ans=0.125 +2024-08-25 16:22:45,655 INFO [train.py:1114] (2/4) Epoch 9, batch 1550, loss[loss=0.2599, simple_loss=0.3098, pruned_loss=0.0757, ctc_loss=0.1463, over 19612.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3023, pruned_loss=0.07313, ctc_loss=0.1377, over 3845360.99 frames. ], batch size: 60, lr: 1.64e-02, grad_scale: 16.0 +2024-08-25 16:22:55,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=114474.66666666667, ans=0.025 +2024-08-25 16:23:21,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=114634.66666666667, ans=0.0 +2024-08-25 16:23:27,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=114634.66666666667, ans=0.125 +2024-08-25 16:23:47,229 INFO [train.py:1114] (2/4) Epoch 9, batch 1600, loss[loss=0.27, simple_loss=0.3221, pruned_loss=0.07922, ctc_loss=0.1489, over 19833.00 frames. ], tot_loss[loss=0.2512, simple_loss=0.3018, pruned_loss=0.07284, ctc_loss=0.1373, over 3835886.41 frames. ], batch size: 57, lr: 1.63e-02, grad_scale: 32.0 +2024-08-25 16:23:48,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=114741.33333333333, ans=0.05 +2024-08-25 16:23:57,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=114741.33333333333, ans=0.125 +2024-08-25 16:24:13,979 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=12.84 vs. limit=15.0 +2024-08-25 16:24:16,807 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.516e+02 1.930e+02 2.504e+02 3.084e+02 5.673e+02, threshold=5.009e+02, percent-clipped=4.0 +2024-08-25 16:24:45,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115008.0, ans=0.1 +2024-08-25 16:24:46,367 INFO [train.py:1114] (2/4) Epoch 9, batch 1650, loss[loss=0.2617, simple_loss=0.3127, pruned_loss=0.07632, ctc_loss=0.1454, over 19659.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3019, pruned_loss=0.07293, ctc_loss=0.1374, over 3832639.29 frames. ], batch size: 59, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:24:51,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=115008.0, ans=0.0 +2024-08-25 16:24:53,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115008.0, ans=0.1 +2024-08-25 16:25:09,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=115114.66666666667, ans=0.0 +2024-08-25 16:25:12,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=115114.66666666667, ans=0.125 +2024-08-25 16:25:23,727 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.56 vs. limit=22.5 +2024-08-25 16:25:31,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=115168.0, ans=0.125 +2024-08-25 16:25:38,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=115221.33333333333, ans=0.125 +2024-08-25 16:25:44,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=115274.66666666667, ans=0.125 +2024-08-25 16:25:45,178 INFO [train.py:1114] (2/4) Epoch 9, batch 1700, loss[loss=0.2098, simple_loss=0.2622, pruned_loss=0.05643, ctc_loss=0.1114, over 19674.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3009, pruned_loss=0.07197, ctc_loss=0.1357, over 3847087.37 frames. ], batch size: 46, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:25:45,786 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.03 vs. limit=15.0 +2024-08-25 16:25:49,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=115274.66666666667, ans=0.0 +2024-08-25 16:25:59,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=115328.0, ans=0.125 +2024-08-25 16:26:08,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=115381.33333333333, ans=0.0 +2024-08-25 16:26:10,413 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.37 vs. limit=15.0 +2024-08-25 16:26:13,047 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.415e+02 1.773e+02 1.969e+02 2.283e+02 4.673e+02, threshold=3.938e+02, percent-clipped=0.0 +2024-08-25 16:26:30,041 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.93 vs. limit=22.5 +2024-08-25 16:26:31,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=115488.0, ans=0.05 +2024-08-25 16:26:41,717 INFO [train.py:1114] (2/4) Epoch 9, batch 1750, loss[loss=0.2035, simple_loss=0.2616, pruned_loss=0.05237, ctc_loss=0.1015, over 19680.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.2997, pruned_loss=0.07112, ctc_loss=0.1343, over 3851166.61 frames. ], batch size: 45, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:26:42,314 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.52 vs. limit=15.0 +2024-08-25 16:26:47,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=115541.33333333333, ans=0.025 +2024-08-25 16:27:27,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=115594.66666666667, ans=0.0 +2024-08-25 16:27:30,782 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.12 vs. limit=22.5 +2024-08-25 16:27:36,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=115594.66666666667, ans=0.0 +2024-08-25 16:27:49,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=115701.33333333333, ans=0.125 +2024-08-25 16:28:01,794 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.41 vs. limit=22.5 +2024-08-25 16:28:04,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=115754.66666666667, ans=0.125 +2024-08-25 16:28:08,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=115754.66666666667, ans=0.2 +2024-08-25 16:28:12,438 INFO [train.py:1114] (2/4) Epoch 9, batch 1800, loss[loss=0.2867, simple_loss=0.3257, pruned_loss=0.09149, ctc_loss=0.1617, over 19621.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.2998, pruned_loss=0.07115, ctc_loss=0.1343, over 3852683.02 frames. ], batch size: 55, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:28:27,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=115808.0, ans=0.04949747468305833 +2024-08-25 16:28:44,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=115914.66666666667, ans=0.125 +2024-08-25 16:28:49,005 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.488e+02 1.840e+02 2.097e+02 2.711e+02 4.220e+02, threshold=4.193e+02, percent-clipped=2.0 +2024-08-25 16:28:56,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=115914.66666666667, ans=0.0 +2024-08-25 16:29:16,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=116021.33333333333, ans=0.0 +2024-08-25 16:29:20,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=116021.33333333333, ans=0.125 +2024-08-25 16:29:25,109 INFO [train.py:1114] (2/4) Epoch 9, batch 1850, loss[loss=0.2424, simple_loss=0.3005, pruned_loss=0.06602, ctc_loss=0.1306, over 19592.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.2988, pruned_loss=0.07037, ctc_loss=0.1327, over 3857004.18 frames. ], batch size: 57, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:29:37,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=116128.0, ans=0.0 +2024-08-25 16:29:41,422 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.50 vs. limit=22.5 +2024-08-25 16:29:43,774 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.36 vs. limit=22.5 +2024-08-25 16:30:20,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=116181.33333333333, ans=0.025 +2024-08-25 16:30:44,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=116288.0, ans=0.2 +2024-08-25 16:30:46,797 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:30:56,364 INFO [train.py:1114] (2/4) Epoch 9, batch 1900, loss[loss=0.2496, simple_loss=0.312, pruned_loss=0.06844, ctc_loss=0.1259, over 19680.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3002, pruned_loss=0.07124, ctc_loss=0.1341, over 3861462.69 frames. ], batch size: 59, lr: 1.62e-02, grad_scale: 16.0 +2024-08-25 16:31:01,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=116341.33333333333, ans=0.125 +2024-08-25 16:32:04,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=116394.66666666667, ans=0.2 +2024-08-25 16:32:16,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=116448.0, ans=0.125 +2024-08-25 16:32:21,938 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.810e+02 2.075e+02 2.674e+02 4.757e+02, threshold=4.150e+02, percent-clipped=3.0 +2024-08-25 16:32:24,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=116448.0, ans=0.0 +2024-08-25 16:32:31,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=116501.33333333333, ans=15.0 +2024-08-25 16:33:05,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=116608.0, ans=0.125 +2024-08-25 16:33:06,107 INFO [train.py:1114] (2/4) Epoch 9, batch 1950, loss[loss=0.2626, simple_loss=0.3082, pruned_loss=0.0788, ctc_loss=0.1486, over 19584.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3011, pruned_loss=0.07128, ctc_loss=0.1339, over 3869958.39 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 16.0 +2024-08-25 16:33:10,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=116608.0, ans=0.125 +2024-08-25 16:33:32,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=116714.66666666667, ans=0.125 +2024-08-25 16:33:49,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=116768.0, ans=0.125 +2024-08-25 16:34:02,733 INFO [train.py:1114] (2/4) Epoch 9, batch 2000, loss[loss=0.2139, simple_loss=0.2656, pruned_loss=0.06021, ctc_loss=0.1047, over 19662.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.302, pruned_loss=0.0721, ctc_loss=0.1354, over 3854092.86 frames. ], batch size: 45, lr: 1.62e-02, grad_scale: 32.0 +2024-08-25 16:34:30,980 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.555e+02 1.787e+02 2.122e+02 2.673e+02 5.196e+02, threshold=4.245e+02, percent-clipped=10.0 +2024-08-25 16:34:51,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=117088.0, ans=0.0 +2024-08-25 16:34:51,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=117088.0, ans=0.125 +2024-08-25 16:34:59,599 INFO [train.py:1114] (2/4) Epoch 9, batch 2050, loss[loss=0.2139, simple_loss=0.2678, pruned_loss=0.05845, ctc_loss=0.1076, over 19713.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3011, pruned_loss=0.07208, ctc_loss=0.1353, over 3849627.61 frames. ], batch size: 47, lr: 1.62e-02, grad_scale: 32.0 +2024-08-25 16:35:14,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=117194.66666666667, ans=0.0 +2024-08-25 16:35:21,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=117248.0, ans=0.125 +2024-08-25 16:35:26,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=117248.0, ans=0.0 +2024-08-25 16:35:35,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=117301.33333333333, ans=0.0 +2024-08-25 16:36:04,957 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.08 vs. limit=15.0 +2024-08-25 16:36:11,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=117354.66666666667, ans=0.05 +2024-08-25 16:36:55,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=117354.66666666667, ans=0.2 +2024-08-25 16:36:57,853 INFO [train.py:1114] (2/4) Epoch 9, batch 2100, loss[loss=0.2273, simple_loss=0.2794, pruned_loss=0.06379, ctc_loss=0.1191, over 19747.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.2999, pruned_loss=0.07151, ctc_loss=0.1343, over 3858525.45 frames. ], batch size: 54, lr: 1.62e-02, grad_scale: 32.0 +2024-08-25 16:37:24,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=117461.33333333333, ans=0.125 +2024-08-25 16:37:24,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=117461.33333333333, ans=0.125 +2024-08-25 16:37:26,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=117461.33333333333, ans=0.0 +2024-08-25 16:37:36,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=117514.66666666667, ans=0.125 +2024-08-25 16:37:38,958 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.501e+02 1.824e+02 2.012e+02 2.446e+02 4.504e+02, threshold=4.025e+02, percent-clipped=2.0 +2024-08-25 16:37:41,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=117514.66666666667, ans=0.0 +2024-08-25 16:37:43,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=117568.0, ans=0.125 +2024-08-25 16:37:51,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=117568.0, ans=0.125 +2024-08-25 16:38:01,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=117621.33333333333, ans=0.035 +2024-08-25 16:38:06,801 INFO [train.py:1114] (2/4) Epoch 9, batch 2150, loss[loss=0.2373, simple_loss=0.2869, pruned_loss=0.06665, ctc_loss=0.1362, over 19882.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.2989, pruned_loss=0.07095, ctc_loss=0.1332, over 3869334.64 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 32.0 +2024-08-25 16:38:09,587 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.77 vs. limit=15.0 +2024-08-25 16:38:27,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=117728.0, ans=0.07 +2024-08-25 16:38:30,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=117781.33333333333, ans=10.0 +2024-08-25 16:38:31,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=117781.33333333333, ans=0.0 +2024-08-25 16:38:43,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=117834.66666666667, ans=0.0 +2024-08-25 16:38:46,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=117834.66666666667, ans=0.125 +2024-08-25 16:38:47,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=117834.66666666667, ans=0.2 +2024-08-25 16:39:02,681 INFO [train.py:1114] (2/4) Epoch 9, batch 2200, loss[loss=0.2689, simple_loss=0.3206, pruned_loss=0.07815, ctc_loss=0.1521, over 19586.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.2991, pruned_loss=0.07099, ctc_loss=0.1332, over 3867498.81 frames. ], batch size: 57, lr: 1.61e-02, grad_scale: 32.0 +2024-08-25 16:39:03,199 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.18 vs. limit=10.0 +2024-08-25 16:39:06,845 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.29 vs. limit=12.0 +2024-08-25 16:39:30,924 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.404e+02 1.840e+02 2.263e+02 2.882e+02 6.553e+02, threshold=4.526e+02, percent-clipped=9.0 +2024-08-25 16:39:37,613 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.33 vs. limit=15.0 +2024-08-25 16:39:48,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=118154.66666666667, ans=0.1 +2024-08-25 16:39:59,969 INFO [train.py:1114] (2/4) Epoch 9, batch 2250, loss[loss=0.2263, simple_loss=0.2903, pruned_loss=0.05929, ctc_loss=0.1094, over 19599.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.2989, pruned_loss=0.07069, ctc_loss=0.1328, over 3866850.68 frames. ], batch size: 55, lr: 1.61e-02, grad_scale: 16.0 +2024-08-25 16:40:04,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=118208.0, ans=0.125 +2024-08-25 16:40:04,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=118208.0, ans=0.125 +2024-08-25 16:40:17,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=118261.33333333333, ans=0.125 +2024-08-25 16:40:26,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=118314.66666666667, ans=0.125 +2024-08-25 16:40:37,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=118368.0, ans=0.1 +2024-08-25 16:40:47,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=118421.33333333333, ans=0.2 +2024-08-25 16:40:54,817 INFO [train.py:1114] (2/4) Epoch 9, batch 2300, loss[loss=0.2097, simple_loss=0.2686, pruned_loss=0.05458, ctc_loss=0.1039, over 19500.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.2989, pruned_loss=0.07125, ctc_loss=0.1338, over 3860346.86 frames. ], batch size: 49, lr: 1.61e-02, grad_scale: 16.0 +2024-08-25 16:40:56,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=118474.66666666667, ans=0.07 +2024-08-25 16:41:03,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=118474.66666666667, ans=0.125 +2024-08-25 16:41:17,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=118581.33333333333, ans=0.2 +2024-08-25 16:41:23,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=118581.33333333333, ans=0.0 +2024-08-25 16:41:24,915 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.547e+02 1.864e+02 2.265e+02 3.023e+02 5.230e+02, threshold=4.530e+02, percent-clipped=2.0 +2024-08-25 16:41:33,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=118634.66666666667, ans=0.0 +2024-08-25 16:41:33,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=118634.66666666667, ans=0.0 +2024-08-25 16:41:51,064 INFO [train.py:1114] (2/4) Epoch 9, batch 2350, loss[loss=0.2722, simple_loss=0.3206, pruned_loss=0.08243, ctc_loss=0.1471, over 19656.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.2989, pruned_loss=0.07127, ctc_loss=0.1336, over 3863287.06 frames. ], batch size: 63, lr: 1.61e-02, grad_scale: 16.0 +2024-08-25 16:41:55,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=118741.33333333333, ans=0.1 +2024-08-25 16:41:55,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=118741.33333333333, ans=0.0 +2024-08-25 16:42:03,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=118794.66666666667, ans=0.0 +2024-08-25 16:42:07,942 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.45 vs. limit=10.0 +2024-08-25 16:42:30,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=118848.0, ans=0.125 +2024-08-25 16:42:50,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=118954.66666666667, ans=0.125 +2024-08-25 16:43:02,778 INFO [train.py:1114] (2/4) Epoch 9, batch 2400, loss[loss=0.2613, simple_loss=0.3102, pruned_loss=0.0778, ctc_loss=0.1422, over 19295.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3016, pruned_loss=0.07241, ctc_loss=0.1355, over 3857793.22 frames. ], batch size: 71, lr: 1.61e-02, grad_scale: 32.0 +2024-08-25 16:43:06,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=119008.0, ans=0.125 +2024-08-25 16:43:11,865 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:43:13,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=119008.0, ans=0.125 +2024-08-25 16:43:17,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=119061.33333333333, ans=0.125 +2024-08-25 16:43:24,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=119061.33333333333, ans=0.0 +2024-08-25 16:43:28,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=119114.66666666667, ans=0.2 +2024-08-25 16:43:31,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=119114.66666666667, ans=0.125 +2024-08-25 16:43:32,521 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.581e+02 1.930e+02 2.301e+02 2.799e+02 4.768e+02, threshold=4.601e+02, percent-clipped=1.0 +2024-08-25 16:43:38,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=119168.0, ans=0.025 +2024-08-25 16:43:39,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=119168.0, ans=0.0 +2024-08-25 16:43:44,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=119168.0, ans=0.2 +2024-08-25 16:43:59,368 INFO [train.py:1114] (2/4) Epoch 9, batch 2450, loss[loss=0.3162, simple_loss=0.3343, pruned_loss=0.1067, ctc_loss=0.2116, over 13407.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.306, pruned_loss=0.07596, ctc_loss=0.1426, over 3728202.43 frames. ], batch size: 141, lr: 1.61e-02, grad_scale: 32.0 +2024-08-25 16:43:59,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=119274.66666666667, ans=0.125 +2024-08-25 16:44:20,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=119381.33333333333, ans=0.125 +2024-08-25 16:44:28,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119381.33333333333, ans=0.1 +2024-08-25 16:44:29,612 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=8.571e-02 +2024-08-25 16:45:25,698 INFO [train.py:1114] (2/4) Epoch 10, batch 0, loss[loss=0.2264, simple_loss=0.2835, pruned_loss=0.06158, ctc_loss=0.1153, over 19806.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.2835, pruned_loss=0.06158, ctc_loss=0.1153, over 19806.00 frames. ], batch size: 49, lr: 1.53e-02, grad_scale: 32.0 +2024-08-25 16:45:25,699 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-25 16:46:37,102 INFO [train.py:1146] (2/4) Epoch 10, validation: loss=0.2041, simple_loss=0.2903, pruned_loss=0.04356, ctc_loss=0.07708, over 944034.00 frames. +2024-08-25 16:46:37,103 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 14234MB +2024-08-25 16:46:44,349 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.74 vs. limit=12.0 +2024-08-25 16:46:49,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=119536.0, ans=0.125 +2024-08-25 16:46:55,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=119536.0, ans=0.025 +2024-08-25 16:46:56,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=119536.0, ans=0.0 +2024-08-25 16:46:58,459 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.45 vs. limit=6.0 +2024-08-25 16:47:46,596 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.636e+02 1.955e+02 2.116e+02 2.362e+02 4.652e+02, threshold=4.231e+02, percent-clipped=1.0 +2024-08-25 16:47:50,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.93 vs. limit=15.0 +2024-08-25 16:48:18,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=119696.0, ans=0.125 +2024-08-25 16:48:20,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=119696.0, ans=0.125 +2024-08-25 16:48:28,323 INFO [train.py:1114] (2/4) Epoch 10, batch 50, loss[loss=0.2137, simple_loss=0.2664, pruned_loss=0.05856, ctc_loss=0.1099, over 19753.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3026, pruned_loss=0.07235, ctc_loss=0.1366, over 844913.23 frames. ], batch size: 47, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:49:10,489 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.48 vs. limit=22.5 +2024-08-25 16:49:17,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=119749.33333333333, ans=0.07 +2024-08-25 16:49:50,354 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.98 vs. limit=15.0 +2024-08-25 16:50:01,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=119802.66666666667, ans=0.0 +2024-08-25 16:50:41,621 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.54 vs. limit=22.5 +2024-08-25 16:50:44,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=119909.33333333333, ans=0.025 +2024-08-25 16:50:51,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=119909.33333333333, ans=10.0 +2024-08-25 16:51:10,538 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.64 vs. limit=15.0 +2024-08-25 16:51:25,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=119962.66666666667, ans=0.025 +2024-08-25 16:51:37,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=119962.66666666667, ans=0.1 +2024-08-25 16:52:34,138 INFO [train.py:1114] (2/4) Epoch 10, batch 100, loss[loss=0.2242, simple_loss=0.2773, pruned_loss=0.06246, ctc_loss=0.1156, over 19728.00 frames. ], tot_loss[loss=0.2522, simple_loss=0.3041, pruned_loss=0.07265, ctc_loss=0.1377, over 1498754.48 frames. ], batch size: 51, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:52:57,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=120069.33333333333, ans=0.0 +2024-08-25 16:53:10,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=120069.33333333333, ans=0.07 +2024-08-25 16:53:24,677 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.13 vs. limit=22.5 +2024-08-25 16:53:28,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=120122.66666666667, ans=0.1 +2024-08-25 16:53:35,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=120176.0, ans=0.04949747468305833 +2024-08-25 16:53:46,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=120176.0, ans=0.0 +2024-08-25 16:53:47,838 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.460e+02 1.798e+02 2.253e+02 2.860e+02 4.134e+02, threshold=4.507e+02, percent-clipped=0.0 +2024-08-25 16:54:47,468 INFO [train.py:1114] (2/4) Epoch 10, batch 150, loss[loss=0.2231, simple_loss=0.2709, pruned_loss=0.06283, ctc_loss=0.1243, over 19717.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.302, pruned_loss=0.07197, ctc_loss=0.1361, over 2027457.93 frames. ], batch size: 47, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:54:58,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=120282.66666666667, ans=0.125 +2024-08-25 16:55:01,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.63 vs. limit=15.0 +2024-08-25 16:55:59,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=120496.0, ans=0.0 +2024-08-25 16:56:01,844 INFO [train.py:1114] (2/4) Epoch 10, batch 200, loss[loss=0.297, simple_loss=0.3344, pruned_loss=0.09328, ctc_loss=0.1829, over 18194.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.2998, pruned_loss=0.07116, ctc_loss=0.1342, over 2435318.47 frames. ], batch size: 85, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:56:19,731 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.63 vs. limit=15.0 +2024-08-25 16:57:34,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=120656.0, ans=0.125 +2024-08-25 16:57:34,397 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.21 vs. limit=10.0 +2024-08-25 16:58:07,756 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 1.824e+02 2.064e+02 2.548e+02 6.143e+02, threshold=4.128e+02, percent-clipped=2.0 +2024-08-25 16:58:16,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=120762.66666666667, ans=0.1 +2024-08-25 16:58:18,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=120762.66666666667, ans=0.125 +2024-08-25 16:58:33,002 INFO [train.py:1114] (2/4) Epoch 10, batch 250, loss[loss=0.2739, simple_loss=0.3213, pruned_loss=0.08282, ctc_loss=0.1522, over 19422.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.2986, pruned_loss=0.07032, ctc_loss=0.1326, over 2755755.23 frames. ], batch size: 67, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:59:07,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=120922.66666666667, ans=0.025 +2024-08-25 16:59:42,633 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.37 vs. limit=15.0 +2024-08-25 16:59:53,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=120976.0, ans=0.125 +2024-08-25 16:59:54,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=120976.0, ans=0.04949747468305833 +2024-08-25 17:00:02,354 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.31 vs. limit=12.0 +2024-08-25 17:00:08,743 INFO [train.py:1114] (2/4) Epoch 10, batch 300, loss[loss=0.2622, simple_loss=0.312, pruned_loss=0.07772, ctc_loss=0.1426, over 19522.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.2977, pruned_loss=0.06963, ctc_loss=0.1311, over 2999790.65 frames. ], batch size: 61, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 17:00:13,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=121082.66666666667, ans=0.0 +2024-08-25 17:00:22,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=121082.66666666667, ans=0.125 +2024-08-25 17:00:33,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=121136.0, ans=0.125 +2024-08-25 17:01:01,167 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.522e+02 1.908e+02 2.186e+02 2.769e+02 4.118e+02, threshold=4.372e+02, percent-clipped=0.0 +2024-08-25 17:01:06,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=121296.0, ans=0.0 +2024-08-25 17:01:11,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=121296.0, ans=0.2 +2024-08-25 17:02:40,279 INFO [train.py:1114] (2/4) Epoch 10, batch 350, loss[loss=0.2296, simple_loss=0.2774, pruned_loss=0.06621, ctc_loss=0.1233, over 19752.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.2977, pruned_loss=0.06953, ctc_loss=0.1307, over 3189245.73 frames. ], batch size: 48, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 17:02:57,698 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.592e-03 +2024-08-25 17:03:07,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=121456.0, ans=0.0 +2024-08-25 17:03:21,559 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.76 vs. limit=6.0 +2024-08-25 17:03:24,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121509.33333333333, ans=0.1 +2024-08-25 17:03:42,419 INFO [train.py:1114] (2/4) Epoch 10, batch 400, loss[loss=0.229, simple_loss=0.3023, pruned_loss=0.05634, ctc_loss=0.1075, over 19490.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.2969, pruned_loss=0.06881, ctc_loss=0.1293, over 3340173.43 frames. ], batch size: 54, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:03:47,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=121616.0, ans=0.025 +2024-08-25 17:03:49,247 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.70 vs. limit=22.5 +2024-08-25 17:03:53,891 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.60 vs. limit=12.0 +2024-08-25 17:04:23,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121776.0, ans=0.1 +2024-08-25 17:04:28,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=121776.0, ans=0.025 +2024-08-25 17:04:33,752 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.409e+02 1.874e+02 2.151e+02 2.761e+02 4.102e+02, threshold=4.302e+02, percent-clipped=0.0 +2024-08-25 17:04:34,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=121776.0, ans=0.0 +2024-08-25 17:04:50,468 INFO [train.py:1114] (2/4) Epoch 10, batch 450, loss[loss=0.2551, simple_loss=0.3137, pruned_loss=0.071, ctc_loss=0.1365, over 19622.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.298, pruned_loss=0.06957, ctc_loss=0.1306, over 3449928.55 frames. ], batch size: 55, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:07:43,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=121989.33333333333, ans=0.125 +2024-08-25 17:07:59,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122042.66666666667, ans=0.1 +2024-08-25 17:08:34,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=122096.0, ans=15.0 +2024-08-25 17:09:04,034 INFO [train.py:1114] (2/4) Epoch 10, batch 500, loss[loss=0.2555, simple_loss=0.3126, pruned_loss=0.07282, ctc_loss=0.1321, over 19669.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.2969, pruned_loss=0.06885, ctc_loss=0.1291, over 3545733.99 frames. ], batch size: 63, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:09:43,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=122256.0, ans=0.125 +2024-08-25 17:10:07,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=122256.0, ans=0.0 +2024-08-25 17:10:20,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=122309.33333333333, ans=0.0 +2024-08-25 17:10:36,239 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.369e+02 1.797e+02 2.290e+02 2.870e+02 3.920e+02, threshold=4.579e+02, percent-clipped=0.0 +2024-08-25 17:10:37,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122309.33333333333, ans=0.1 +2024-08-25 17:10:38,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=122362.66666666667, ans=0.125 +2024-08-25 17:10:43,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=122362.66666666667, ans=0.0 +2024-08-25 17:10:51,448 INFO [train.py:1114] (2/4) Epoch 10, batch 550, loss[loss=0.2667, simple_loss=0.3155, pruned_loss=0.07907, ctc_loss=0.1494, over 19210.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.2969, pruned_loss=0.06882, ctc_loss=0.1292, over 3607666.55 frames. ], batch size: 71, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:11:05,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122416.0, ans=0.1 +2024-08-25 17:11:10,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=122416.0, ans=0.0 +2024-08-25 17:11:51,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=122469.33333333333, ans=0.0 +2024-08-25 17:11:57,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=122469.33333333333, ans=0.125 +2024-08-25 17:13:39,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122522.66666666667, ans=0.1 +2024-08-25 17:13:40,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=122522.66666666667, ans=0.2 +2024-08-25 17:13:45,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=122522.66666666667, ans=0.0 +2024-08-25 17:14:20,770 INFO [train.py:1114] (2/4) Epoch 10, batch 600, loss[loss=0.2783, simple_loss=0.3251, pruned_loss=0.08431, ctc_loss=0.1573, over 19421.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.2974, pruned_loss=0.06915, ctc_loss=0.1299, over 3665635.06 frames. ], batch size: 67, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:14:33,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=122736.0, ans=0.025 +2024-08-25 17:14:59,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=122842.66666666667, ans=0.125 +2024-08-25 17:15:07,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=122842.66666666667, ans=0.0 +2024-08-25 17:15:08,639 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.448e+02 1.815e+02 2.061e+02 2.496e+02 4.365e+02, threshold=4.122e+02, percent-clipped=0.0 +2024-08-25 17:15:11,360 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.21 vs. limit=15.0 +2024-08-25 17:15:23,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=122949.33333333333, ans=0.125 +2024-08-25 17:15:24,813 INFO [train.py:1114] (2/4) Epoch 10, batch 650, loss[loss=0.2138, simple_loss=0.2828, pruned_loss=0.05273, ctc_loss=0.0981, over 19758.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.2965, pruned_loss=0.06869, ctc_loss=0.1291, over 3715687.27 frames. ], batch size: 54, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:15:35,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=123002.66666666667, ans=0.2 +2024-08-25 17:16:01,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=123109.33333333333, ans=10.0 +2024-08-25 17:16:04,875 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.44 vs. limit=22.5 +2024-08-25 17:16:33,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=123216.0, ans=0.125 +2024-08-25 17:16:34,666 INFO [train.py:1114] (2/4) Epoch 10, batch 700, loss[loss=0.2411, simple_loss=0.2892, pruned_loss=0.06964, ctc_loss=0.1343, over 19725.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.2971, pruned_loss=0.06901, ctc_loss=0.1295, over 3748664.78 frames. ], batch size: 51, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:17:33,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=123216.0, ans=0.125 +2024-08-25 17:17:39,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=123269.33333333333, ans=0.0 +2024-08-25 17:17:56,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=123322.66666666667, ans=0.125 +2024-08-25 17:18:00,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=123322.66666666667, ans=0.125 +2024-08-25 17:18:04,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=123376.0, ans=0.0 +2024-08-25 17:18:05,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=123376.0, ans=0.125 +2024-08-25 17:18:13,484 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.562e+02 1.934e+02 2.276e+02 3.026e+02 5.626e+02, threshold=4.552e+02, percent-clipped=3.0 +2024-08-25 17:18:17,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=123429.33333333333, ans=0.0 +2024-08-25 17:18:26,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=123429.33333333333, ans=0.05 +2024-08-25 17:18:28,246 INFO [train.py:1114] (2/4) Epoch 10, batch 750, loss[loss=0.229, simple_loss=0.294, pruned_loss=0.05981, ctc_loss=0.1112, over 19517.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.2964, pruned_loss=0.0686, ctc_loss=0.1287, over 3775255.51 frames. ], batch size: 54, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:18:31,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=123482.66666666667, ans=0.1 +2024-08-25 17:18:47,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=123536.0, ans=0.125 +2024-08-25 17:19:07,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=123589.33333333333, ans=0.1 +2024-08-25 17:19:10,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=123642.66666666667, ans=0.1 +2024-08-25 17:19:23,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=123696.0, ans=0.0 +2024-08-25 17:19:32,723 INFO [train.py:1114] (2/4) Epoch 10, batch 800, loss[loss=0.2221, simple_loss=0.2664, pruned_loss=0.06592, ctc_loss=0.115, over 19798.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.2956, pruned_loss=0.06797, ctc_loss=0.1277, over 3797522.83 frames. ], batch size: 49, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:19:58,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=123749.33333333333, ans=0.2 +2024-08-25 17:20:05,595 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.20 vs. limit=15.0 +2024-08-25 17:20:08,040 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.46 vs. limit=15.0 +2024-08-25 17:20:17,629 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=5.388e-03 +2024-08-25 17:20:29,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=123909.33333333333, ans=0.2 +2024-08-25 17:20:33,027 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.431e+02 1.887e+02 2.136e+02 2.736e+02 3.984e+02, threshold=4.273e+02, percent-clipped=0.0 +2024-08-25 17:20:47,976 INFO [train.py:1114] (2/4) Epoch 10, batch 850, loss[loss=0.2324, simple_loss=0.3015, pruned_loss=0.05974, ctc_loss=0.1096, over 19643.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.2957, pruned_loss=0.06804, ctc_loss=0.1279, over 3814987.05 frames. ], batch size: 59, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:20:50,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=124016.0, ans=0.125 +2024-08-25 17:20:51,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=124016.0, ans=0.125 +2024-08-25 17:20:52,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=124016.0, ans=0.125 +2024-08-25 17:20:55,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=124016.0, ans=0.0 +2024-08-25 17:21:02,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=124069.33333333333, ans=0.0 +2024-08-25 17:21:08,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=124069.33333333333, ans=0.1 +2024-08-25 17:21:12,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=124122.66666666667, ans=0.125 +2024-08-25 17:21:30,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=124176.0, ans=0.025 +2024-08-25 17:22:28,571 INFO [train.py:1114] (2/4) Epoch 10, batch 900, loss[loss=0.2143, simple_loss=0.2666, pruned_loss=0.05823, ctc_loss=0.114, over 19427.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.2956, pruned_loss=0.06833, ctc_loss=0.1285, over 3818467.02 frames. ], batch size: 48, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:22:48,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=124336.0, ans=0.125 +2024-08-25 17:23:13,943 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 1.854e+02 2.167e+02 2.763e+02 5.395e+02, threshold=4.333e+02, percent-clipped=2.0 +2024-08-25 17:23:19,683 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.02 vs. limit=22.5 +2024-08-25 17:23:23,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=124496.0, ans=0.0 +2024-08-25 17:23:30,317 INFO [train.py:1114] (2/4) Epoch 10, batch 950, loss[loss=0.2552, simple_loss=0.301, pruned_loss=0.0756, ctc_loss=0.1454, over 19486.00 frames. ], tot_loss[loss=0.242, simple_loss=0.296, pruned_loss=0.06832, ctc_loss=0.1286, over 3819321.92 frames. ], batch size: 49, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:23:35,092 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 17:24:01,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=124656.0, ans=0.2 +2024-08-25 17:24:08,754 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.84 vs. limit=15.0 +2024-08-25 17:24:09,867 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.56 vs. limit=22.5 +2024-08-25 17:24:34,417 INFO [train.py:1114] (2/4) Epoch 10, batch 1000, loss[loss=0.219, simple_loss=0.2846, pruned_loss=0.05609, ctc_loss=0.1033, over 19840.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.2973, pruned_loss=0.06907, ctc_loss=0.1301, over 3814320.55 frames. ], batch size: 52, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:24:45,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=124869.33333333333, ans=0.125 +2024-08-25 17:25:04,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=124922.66666666667, ans=0.0 +2024-08-25 17:25:18,039 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.505e+02 1.797e+02 2.069e+02 2.553e+02 4.130e+02, threshold=4.138e+02, percent-clipped=0.0 +2024-08-25 17:25:27,921 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.49 vs. limit=15.0 +2024-08-25 17:25:33,303 INFO [train.py:1114] (2/4) Epoch 10, batch 1050, loss[loss=0.2932, simple_loss=0.3399, pruned_loss=0.08973, ctc_loss=0.1679, over 19855.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.2968, pruned_loss=0.06901, ctc_loss=0.1299, over 3821281.08 frames. ], batch size: 57, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:25:33,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=125082.66666666667, ans=0.0 +2024-08-25 17:25:33,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=125082.66666666667, ans=0.0 +2024-08-25 17:26:11,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=125242.66666666667, ans=0.125 +2024-08-25 17:26:15,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125242.66666666667, ans=0.1 +2024-08-25 17:26:24,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=125296.0, ans=0.2 +2024-08-25 17:26:32,051 INFO [train.py:1114] (2/4) Epoch 10, batch 1100, loss[loss=0.2329, simple_loss=0.2957, pruned_loss=0.06182, ctc_loss=0.1164, over 19592.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.2965, pruned_loss=0.06898, ctc_loss=0.1299, over 3828470.94 frames. ], batch size: 52, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:26:54,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=125402.66666666667, ans=0.125 +2024-08-25 17:27:18,174 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.422e+02 1.787e+02 2.060e+02 2.560e+02 4.808e+02, threshold=4.120e+02, percent-clipped=1.0 +2024-08-25 17:27:28,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=125562.66666666667, ans=0.125 +2024-08-25 17:27:33,346 INFO [train.py:1114] (2/4) Epoch 10, batch 1150, loss[loss=0.2652, simple_loss=0.3071, pruned_loss=0.08131, ctc_loss=0.1516, over 19580.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.2966, pruned_loss=0.06911, ctc_loss=0.1303, over 3829001.93 frames. ], batch size: 52, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:28:38,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=125829.33333333333, ans=0.125 +2024-08-25 17:28:41,357 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.30 vs. limit=22.5 +2024-08-25 17:28:44,448 INFO [train.py:1114] (2/4) Epoch 10, batch 1200, loss[loss=0.2628, simple_loss=0.3117, pruned_loss=0.07787, ctc_loss=0.1455, over 19841.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.2972, pruned_loss=0.06919, ctc_loss=0.1305, over 3825625.47 frames. ], batch size: 57, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:28:55,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=125936.0, ans=0.0 +2024-08-25 17:29:28,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=126042.66666666667, ans=0.0 +2024-08-25 17:29:29,737 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.42 vs. limit=6.0 +2024-08-25 17:29:30,099 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.522e+02 1.823e+02 2.047e+02 2.358e+02 4.051e+02, threshold=4.094e+02, percent-clipped=0.0 +2024-08-25 17:29:45,850 INFO [train.py:1114] (2/4) Epoch 10, batch 1250, loss[loss=0.2641, simple_loss=0.3141, pruned_loss=0.07821, ctc_loss=0.1442, over 19509.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.2969, pruned_loss=0.06853, ctc_loss=0.1292, over 3843280.40 frames. ], batch size: 61, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:29:46,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=126149.33333333333, ans=0.025 +2024-08-25 17:29:52,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=126149.33333333333, ans=0.0 +2024-08-25 17:30:18,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=126256.0, ans=0.125 +2024-08-25 17:30:47,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=126362.66666666667, ans=0.07 +2024-08-25 17:30:55,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=126362.66666666667, ans=0.0 +2024-08-25 17:30:56,998 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.34 vs. limit=6.0 +2024-08-25 17:30:59,766 INFO [train.py:1114] (2/4) Epoch 10, batch 1300, loss[loss=0.2687, simple_loss=0.3083, pruned_loss=0.08352, ctc_loss=0.1553, over 18891.00 frames. ], tot_loss[loss=0.242, simple_loss=0.2963, pruned_loss=0.06817, ctc_loss=0.1282, over 3846876.52 frames. ], batch size: 76, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:31:18,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=126416.0, ans=0.125 +2024-08-25 17:31:22,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=126416.0, ans=0.0 +2024-08-25 17:32:04,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.07 vs. limit=15.0 +2024-08-25 17:32:13,114 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.515e+02 1.900e+02 2.303e+02 2.970e+02 5.096e+02, threshold=4.606e+02, percent-clipped=7.0 +2024-08-25 17:32:23,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=126629.33333333333, ans=0.125 +2024-08-25 17:32:28,199 INFO [train.py:1114] (2/4) Epoch 10, batch 1350, loss[loss=0.2132, simple_loss=0.2819, pruned_loss=0.05284, ctc_loss=0.09704, over 19765.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.2959, pruned_loss=0.06799, ctc_loss=0.1277, over 3857709.68 frames. ], batch size: 54, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:32:45,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=126736.0, ans=0.125 +2024-08-25 17:32:46,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=126736.0, ans=0.025 +2024-08-25 17:32:48,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126736.0, ans=0.1 +2024-08-25 17:32:53,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=126789.33333333333, ans=0.0 +2024-08-25 17:33:06,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=126842.66666666667, ans=0.125 +2024-08-25 17:33:23,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=126896.0, ans=0.0 +2024-08-25 17:33:30,493 INFO [train.py:1114] (2/4) Epoch 10, batch 1400, loss[loss=0.2142, simple_loss=0.2657, pruned_loss=0.05915, ctc_loss=0.1108, over 19682.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.2956, pruned_loss=0.06789, ctc_loss=0.1273, over 3865118.71 frames. ], batch size: 46, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:33:38,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=126949.33333333333, ans=0.0 +2024-08-25 17:33:41,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=127002.66666666667, ans=0.125 +2024-08-25 17:34:34,278 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.46 vs. limit=15.0 +2024-08-25 17:34:37,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=127109.33333333333, ans=0.125 +2024-08-25 17:34:42,451 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.525e+02 1.856e+02 2.167e+02 2.631e+02 4.500e+02, threshold=4.335e+02, percent-clipped=0.0 +2024-08-25 17:34:52,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=127162.66666666667, ans=0.125 +2024-08-25 17:35:02,157 INFO [train.py:1114] (2/4) Epoch 10, batch 1450, loss[loss=0.2497, simple_loss=0.3111, pruned_loss=0.06952, ctc_loss=0.123, over 19658.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.2961, pruned_loss=0.06803, ctc_loss=0.1277, over 3863069.26 frames. ], batch size: 63, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:35:11,315 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.30 vs. limit=15.0 +2024-08-25 17:35:32,636 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.55 vs. limit=12.0 +2024-08-25 17:35:36,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127322.66666666667, ans=0.1 +2024-08-25 17:36:02,103 INFO [train.py:1114] (2/4) Epoch 10, batch 1500, loss[loss=0.2613, simple_loss=0.3102, pruned_loss=0.07701, ctc_loss=0.1459, over 19582.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.2967, pruned_loss=0.06839, ctc_loss=0.1281, over 3862044.66 frames. ], batch size: 57, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:36:09,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=127482.66666666667, ans=0.0 +2024-08-25 17:36:11,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=127482.66666666667, ans=0.95 +2024-08-25 17:36:14,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=127536.0, ans=0.125 +2024-08-25 17:36:33,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=127589.33333333333, ans=10.0 +2024-08-25 17:36:37,979 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.32 vs. limit=6.0 +2024-08-25 17:36:38,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=127589.33333333333, ans=0.0 +2024-08-25 17:36:55,675 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.462e+02 1.877e+02 2.186e+02 2.626e+02 4.478e+02, threshold=4.372e+02, percent-clipped=1.0 +2024-08-25 17:37:03,528 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.78 vs. limit=15.0 +2024-08-25 17:37:06,777 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.63 vs. limit=15.0 +2024-08-25 17:37:24,239 INFO [train.py:1114] (2/4) Epoch 10, batch 1550, loss[loss=0.2517, simple_loss=0.306, pruned_loss=0.07211, ctc_loss=0.1331, over 19608.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.2971, pruned_loss=0.06882, ctc_loss=0.1293, over 3845947.00 frames. ], batch size: 60, lr: 1.48e-02, grad_scale: 16.0 +2024-08-25 17:37:42,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=127802.66666666667, ans=0.0 +2024-08-25 17:37:44,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=127802.66666666667, ans=0.035 +2024-08-25 17:38:00,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127856.0, ans=0.1 +2024-08-25 17:38:54,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=127909.33333333333, ans=0.0 +2024-08-25 17:39:51,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=127962.66666666667, ans=0.0 +2024-08-25 17:39:52,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=127962.66666666667, ans=0.2 +2024-08-25 17:41:06,795 INFO [train.py:1114] (2/4) Epoch 10, batch 1600, loss[loss=0.2298, simple_loss=0.2991, pruned_loss=0.05804, ctc_loss=0.111, over 19828.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.2966, pruned_loss=0.06876, ctc_loss=0.1293, over 3834494.88 frames. ], batch size: 57, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:43:00,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=128122.66666666667, ans=0.0 +2024-08-25 17:43:05,807 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.37 vs. limit=22.5 +2024-08-25 17:43:14,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.67 vs. limit=10.0 +2024-08-25 17:43:24,235 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.528e+02 1.849e+02 2.080e+02 2.733e+02 5.175e+02, threshold=4.161e+02, percent-clipped=4.0 +2024-08-25 17:43:38,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=128229.33333333333, ans=0.125 +2024-08-25 17:43:57,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=128229.33333333333, ans=0.0 +2024-08-25 17:44:00,800 INFO [train.py:1114] (2/4) Epoch 10, batch 1650, loss[loss=0.2318, simple_loss=0.2943, pruned_loss=0.06087, ctc_loss=0.119, over 19667.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.2962, pruned_loss=0.06861, ctc_loss=0.129, over 3831066.69 frames. ], batch size: 59, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:44:11,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=128336.0, ans=0.0 +2024-08-25 17:44:30,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=128336.0, ans=0.0 +2024-08-25 17:44:35,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=128336.0, ans=0.5 +2024-08-25 17:44:42,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=128389.33333333333, ans=0.125 +2024-08-25 17:45:07,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=128442.66666666667, ans=0.2 +2024-08-25 17:45:12,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=128496.0, ans=0.125 +2024-08-25 17:45:15,709 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 17:45:19,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=128496.0, ans=10.0 +2024-08-25 17:45:46,329 INFO [train.py:1114] (2/4) Epoch 10, batch 1700, loss[loss=0.1979, simple_loss=0.2558, pruned_loss=0.05235, ctc_loss=0.0879, over 19668.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.2953, pruned_loss=0.06771, ctc_loss=0.1272, over 3845478.45 frames. ], batch size: 46, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:45:47,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=128549.33333333333, ans=0.2 +2024-08-25 17:46:34,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=128602.66666666667, ans=0.0 +2024-08-25 17:46:42,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=128602.66666666667, ans=0.025 +2024-08-25 17:46:43,634 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.22 vs. limit=15.0 +2024-08-25 17:46:44,769 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.35 vs. limit=15.0 +2024-08-25 17:46:45,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=128602.66666666667, ans=0.0 +2024-08-25 17:46:49,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128656.0, ans=0.1 +2024-08-25 17:46:56,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=128656.0, ans=0.125 +2024-08-25 17:47:10,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=128709.33333333333, ans=0.1 +2024-08-25 17:47:11,342 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.443e+02 1.773e+02 2.059e+02 2.527e+02 4.467e+02, threshold=4.119e+02, percent-clipped=1.0 +2024-08-25 17:48:12,452 INFO [train.py:1114] (2/4) Epoch 10, batch 1750, loss[loss=0.2286, simple_loss=0.2756, pruned_loss=0.06602, ctc_loss=0.124, over 19644.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.2941, pruned_loss=0.06693, ctc_loss=0.1257, over 3850000.57 frames. ], batch size: 45, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:48:23,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=128816.0, ans=0.125 +2024-08-25 17:48:41,328 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.47 vs. limit=12.0 +2024-08-25 17:48:44,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=128922.66666666667, ans=0.125 +2024-08-25 17:48:44,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=128922.66666666667, ans=0.2 +2024-08-25 17:48:45,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=128922.66666666667, ans=0.125 +2024-08-25 17:48:52,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=128976.0, ans=0.125 +2024-08-25 17:49:07,591 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=25.14 vs. limit=22.5 +2024-08-25 17:49:11,954 INFO [train.py:1114] (2/4) Epoch 10, batch 1800, loss[loss=0.2408, simple_loss=0.2996, pruned_loss=0.06642, ctc_loss=0.1229, over 19621.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.2944, pruned_loss=0.06704, ctc_loss=0.1258, over 3852826.67 frames. ], batch size: 55, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 17:49:18,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=129082.66666666667, ans=0.125 +2024-08-25 17:50:11,884 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.36 vs. limit=15.0 +2024-08-25 17:51:01,883 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.74 vs. limit=15.0 +2024-08-25 18:04:15,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=129189.33333333333, ans=0.025 +2024-08-25 18:11:17,742 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.471e+02 1.930e+02 2.270e+02 3.115e+02 5.695e+02, threshold=4.540e+02, percent-clipped=10.0 +2024-08-25 18:14:48,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=129296.0, ans=0.07 +2024-08-25 18:19:59,309 INFO [train.py:1114] (2/4) Epoch 10, batch 1850, loss[loss=0.2797, simple_loss=0.3267, pruned_loss=0.08389, ctc_loss=0.162, over 19576.00 frames. ], tot_loss[loss=0.241, simple_loss=0.2951, pruned_loss=0.06797, ctc_loss=0.1273, over 3855910.98 frames. ], batch size: 57, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:24:28,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=129402.66666666667, ans=0.0 +2024-08-25 18:25:40,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=129402.66666666667, ans=0.0 +2024-08-25 18:29:52,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=129509.33333333333, ans=0.0 +2024-08-25 18:32:27,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=129616.0, ans=0.0 +2024-08-25 18:32:37,448 INFO [train.py:1114] (2/4) Epoch 10, batch 1900, loss[loss=0.2249, simple_loss=0.2924, pruned_loss=0.05678, ctc_loss=0.1097, over 19650.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2958, pruned_loss=0.06805, ctc_loss=0.1274, over 3860739.14 frames. ], batch size: 59, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:36:34,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=129722.66666666667, ans=0.0 +2024-08-25 18:36:36,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=129722.66666666667, ans=0.2 +2024-08-25 18:36:40,992 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.76 vs. limit=15.0 +2024-08-25 18:37:43,375 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.539e+02 1.882e+02 2.156e+02 2.772e+02 4.689e+02, threshold=4.313e+02, percent-clipped=1.0 +2024-08-25 18:37:57,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=129829.33333333333, ans=0.125 +2024-08-25 18:38:10,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=129829.33333333333, ans=0.125 +2024-08-25 18:38:20,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=129829.33333333333, ans=0.0 +2024-08-25 18:38:48,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129882.66666666667, ans=0.1 +2024-08-25 18:38:48,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.38 vs. limit=10.0 +2024-08-25 18:38:51,205 INFO [train.py:1114] (2/4) Epoch 10, batch 1950, loss[loss=0.2314, simple_loss=0.288, pruned_loss=0.06345, ctc_loss=0.1196, over 19575.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.2963, pruned_loss=0.06766, ctc_loss=0.1269, over 3869785.17 frames. ], batch size: 52, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:38:59,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=129882.66666666667, ans=0.125 +2024-08-25 18:39:16,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=129882.66666666667, ans=0.09899494936611666 +2024-08-25 18:40:12,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=129936.0, ans=0.0 +2024-08-25 18:41:32,650 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.44 vs. limit=10.0 +2024-08-25 18:42:12,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=130042.66666666667, ans=0.09899494936611666 +2024-08-25 18:44:04,322 INFO [train.py:1114] (2/4) Epoch 10, batch 2000, loss[loss=0.2306, simple_loss=0.2708, pruned_loss=0.06969, ctc_loss=0.1274, over 19625.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.2974, pruned_loss=0.06839, ctc_loss=0.1285, over 3855609.89 frames. ], batch size: 45, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:44:41,709 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.24 vs. limit=10.0 +2024-08-25 18:47:32,409 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.525e+02 1.882e+02 2.262e+02 2.707e+02 4.864e+02, threshold=4.523e+02, percent-clipped=1.0 +2024-08-25 18:48:39,775 INFO [train.py:1114] (2/4) Epoch 10, batch 2050, loss[loss=0.2005, simple_loss=0.2631, pruned_loss=0.04943, ctc_loss=0.09763, over 19751.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.2964, pruned_loss=0.06846, ctc_loss=0.1285, over 3851886.73 frames. ], batch size: 47, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:49:59,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=130469.33333333333, ans=0.2 +2024-08-25 18:50:02,438 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.05 vs. limit=15.0 +2024-08-25 18:50:28,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=130522.66666666667, ans=0.125 +2024-08-25 18:51:57,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=130682.66666666667, ans=0.125 +2024-08-25 18:52:20,501 INFO [train.py:1114] (2/4) Epoch 10, batch 2100, loss[loss=0.2137, simple_loss=0.277, pruned_loss=0.05548, ctc_loss=0.09852, over 19774.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.2955, pruned_loss=0.06794, ctc_loss=0.1276, over 3858696.97 frames. ], batch size: 54, lr: 1.47e-02, grad_scale: 16.0 +2024-08-25 18:52:25,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=130682.66666666667, ans=0.125 +2024-08-25 18:52:29,935 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.19 vs. limit=22.5 +2024-08-25 18:52:56,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=130736.0, ans=0.025 +2024-08-25 18:52:57,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=130736.0, ans=0.125 +2024-08-25 18:53:29,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=130789.33333333333, ans=0.125 +2024-08-25 18:53:46,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=130842.66666666667, ans=0.1 +2024-08-25 18:53:48,071 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.09 vs. limit=22.5 +2024-08-25 18:53:49,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=130842.66666666667, ans=0.125 +2024-08-25 18:53:56,452 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.73 vs. limit=15.0 +2024-08-25 18:53:58,207 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.502e+02 1.839e+02 2.296e+02 2.721e+02 6.154e+02, threshold=4.593e+02, percent-clipped=3.0 +2024-08-25 18:54:23,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=130896.0, ans=0.0 +2024-08-25 18:54:37,554 INFO [train.py:1114] (2/4) Epoch 10, batch 2150, loss[loss=0.2019, simple_loss=0.2696, pruned_loss=0.04862, ctc_loss=0.09217, over 19872.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.295, pruned_loss=0.0677, ctc_loss=0.1271, over 3869140.22 frames. ], batch size: 52, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 18:54:53,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=130949.33333333333, ans=0.125 +2024-08-25 18:55:54,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=131109.33333333334, ans=0.125 +2024-08-25 18:56:32,993 INFO [train.py:1114] (2/4) Epoch 10, batch 2200, loss[loss=0.2906, simple_loss=0.3352, pruned_loss=0.09019, ctc_loss=0.1644, over 19591.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.2953, pruned_loss=0.06779, ctc_loss=0.1273, over 3867343.91 frames. ], batch size: 57, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 18:56:44,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=131216.0, ans=0.5 +2024-08-25 18:56:46,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=131216.0, ans=0.5 +2024-08-25 18:57:09,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=131269.33333333334, ans=0.2 +2024-08-25 18:57:20,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=131322.66666666666, ans=0.05 +2024-08-25 18:57:36,510 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.70 vs. limit=22.5 +2024-08-25 18:57:49,136 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.15 vs. limit=15.0 +2024-08-25 18:57:49,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=131376.0, ans=0.2 +2024-08-25 18:57:51,453 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.449e+02 1.773e+02 2.006e+02 2.540e+02 3.937e+02, threshold=4.013e+02, percent-clipped=0.0 +2024-08-25 18:57:59,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=131429.33333333334, ans=0.125 +2024-08-25 18:58:07,805 INFO [train.py:1114] (2/4) Epoch 10, batch 2250, loss[loss=0.2596, simple_loss=0.3145, pruned_loss=0.07363, ctc_loss=0.1434, over 19614.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.295, pruned_loss=0.0675, ctc_loss=0.1268, over 3867782.49 frames. ], batch size: 55, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 18:58:12,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=131482.66666666666, ans=0.0 +2024-08-25 18:58:56,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=131696.0, ans=0.125 +2024-08-25 18:58:59,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=131696.0, ans=0.125 +2024-08-25 18:59:03,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=131696.0, ans=0.125 +2024-08-25 18:59:05,060 INFO [train.py:1114] (2/4) Epoch 10, batch 2300, loss[loss=0.2235, simple_loss=0.2778, pruned_loss=0.06199, ctc_loss=0.1129, over 19489.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.2947, pruned_loss=0.06789, ctc_loss=0.1276, over 3861314.54 frames. ], batch size: 49, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 18:59:15,109 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.19 vs. limit=15.0 +2024-08-25 18:59:56,959 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.26 vs. limit=15.0 +2024-08-25 19:00:00,735 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 1.815e+02 2.310e+02 2.961e+02 4.661e+02, threshold=4.621e+02, percent-clipped=5.0 +2024-08-25 19:00:00,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131962.66666666666, ans=0.1 +2024-08-25 19:00:03,375 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.14 vs. limit=6.0 +2024-08-25 19:00:12,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131962.66666666666, ans=0.1 +2024-08-25 19:00:14,656 INFO [train.py:1114] (2/4) Epoch 10, batch 2350, loss[loss=0.2708, simple_loss=0.3256, pruned_loss=0.07929, ctc_loss=0.1434, over 19675.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.2949, pruned_loss=0.06807, ctc_loss=0.128, over 3863755.80 frames. ], batch size: 63, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 19:00:18,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=132016.0, ans=0.0 +2024-08-25 19:00:23,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=132016.0, ans=0.035 +2024-08-25 19:00:25,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=132069.33333333334, ans=0.0 +2024-08-25 19:00:27,667 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:00:33,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=132069.33333333334, ans=0.2 +2024-08-25 19:00:39,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=132122.66666666666, ans=10.0 +2024-08-25 19:00:46,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=132122.66666666666, ans=0.025 +2024-08-25 19:00:49,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=132176.0, ans=0.0 +2024-08-25 19:01:13,184 INFO [train.py:1114] (2/4) Epoch 10, batch 2400, loss[loss=0.2424, simple_loss=0.3, pruned_loss=0.06585, ctc_loss=0.1328, over 19302.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.2966, pruned_loss=0.0684, ctc_loss=0.1284, over 3858059.15 frames. ], batch size: 71, lr: 1.46e-02, grad_scale: 32.0 +2024-08-25 19:01:19,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=132282.66666666666, ans=0.125 +2024-08-25 19:01:27,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.99 vs. limit=15.0 +2024-08-25 19:01:29,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=132336.0, ans=0.04949747468305833 +2024-08-25 19:01:42,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=132389.33333333334, ans=0.125 +2024-08-25 19:01:47,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=132389.33333333334, ans=0.0 +2024-08-25 19:01:51,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=132442.66666666666, ans=0.125 +2024-08-25 19:01:52,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=132442.66666666666, ans=0.125 +2024-08-25 19:02:10,727 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.475e+02 1.986e+02 2.279e+02 2.618e+02 8.799e+02, threshold=4.558e+02, percent-clipped=0.0 +2024-08-25 19:02:16,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132496.0, ans=0.1 +2024-08-25 19:02:22,024 INFO [train.py:1114] (2/4) Epoch 10, batch 2450, loss[loss=0.275, simple_loss=0.3092, pruned_loss=0.08885, ctc_loss=0.1578, over 13563.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3015, pruned_loss=0.07226, ctc_loss=0.1359, over 3731945.56 frames. ], batch size: 140, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 19:02:37,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=132602.66666666666, ans=0.07 +2024-08-25 19:03:06,174 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=2.520e-03 +2024-08-25 19:04:28,602 INFO [train.py:1114] (2/4) Epoch 11, batch 0, loss[loss=0.2587, simple_loss=0.3022, pruned_loss=0.07923, ctc_loss=0.142, over 19782.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3022, pruned_loss=0.07923, ctc_loss=0.142, over 19782.00 frames. ], batch size: 49, lr: 1.39e-02, grad_scale: 32.0 +2024-08-25 19:04:28,602 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-25 19:04:55,858 INFO [train.py:1146] (2/4) Epoch 11, validation: loss=0.2031, simple_loss=0.2887, pruned_loss=0.04339, ctc_loss=0.0768, over 944034.00 frames. +2024-08-25 19:04:55,859 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 14234MB +2024-08-25 19:04:58,784 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.89 vs. limit=22.5 +2024-08-25 19:05:11,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=132810.66666666666, ans=0.0 +2024-08-25 19:05:45,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=132917.33333333334, ans=0.0 +2024-08-25 19:05:58,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=132970.66666666666, ans=0.125 +2024-08-25 19:06:01,699 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.76 vs. limit=15.0 +2024-08-25 19:06:02,247 INFO [train.py:1114] (2/4) Epoch 11, batch 50, loss[loss=0.2113, simple_loss=0.2664, pruned_loss=0.05708, ctc_loss=0.1048, over 19726.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.2988, pruned_loss=0.0697, ctc_loss=0.1318, over 844464.80 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 32.0 +2024-08-25 19:06:03,364 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.465e+02 2.050e+02 2.234e+02 2.552e+02 4.359e+02, threshold=4.468e+02, percent-clipped=1.0 +2024-08-25 19:06:06,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=133024.0, ans=0.125 +2024-08-25 19:06:09,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=133024.0, ans=0.125 +2024-08-25 19:06:09,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=133024.0, ans=0.125 +2024-08-25 19:06:48,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=133184.0, ans=0.125 +2024-08-25 19:07:30,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=133237.33333333334, ans=0.0 +2024-08-25 19:07:42,580 INFO [train.py:1114] (2/4) Epoch 11, batch 100, loss[loss=0.2159, simple_loss=0.2801, pruned_loss=0.05561, ctc_loss=0.101, over 19710.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.2985, pruned_loss=0.06873, ctc_loss=0.1298, over 1499152.40 frames. ], batch size: 51, lr: 1.39e-02, grad_scale: 32.0 +2024-08-25 19:08:03,730 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:08:15,191 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.15 vs. limit=15.0 +2024-08-25 19:08:44,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=133450.66666666666, ans=0.025 +2024-08-25 19:08:51,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=133504.0, ans=0.125 +2024-08-25 19:08:57,315 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.10 vs. limit=12.0 +2024-08-25 19:09:10,095 INFO [train.py:1114] (2/4) Epoch 11, batch 150, loss[loss=0.2527, simple_loss=0.2858, pruned_loss=0.07956, ctc_loss=0.1512, over 19685.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.296, pruned_loss=0.06753, ctc_loss=0.1276, over 2028078.50 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 32.0 +2024-08-25 19:09:12,933 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.747e+02 2.015e+02 2.344e+02 3.708e+02, threshold=4.031e+02, percent-clipped=0.0 +2024-08-25 19:09:15,082 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.23 vs. limit=22.5 +2024-08-25 19:09:17,364 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.43 vs. limit=10.0 +2024-08-25 19:09:18,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133557.33333333334, ans=0.1 +2024-08-25 19:09:49,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=133717.33333333334, ans=0.0 +2024-08-25 19:09:51,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=133717.33333333334, ans=0.2 +2024-08-25 19:10:00,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=133770.66666666666, ans=0.125 +2024-08-25 19:10:21,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=133770.66666666666, ans=0.0 +2024-08-25 19:10:34,572 INFO [train.py:1114] (2/4) Epoch 11, batch 200, loss[loss=0.2429, simple_loss=0.3026, pruned_loss=0.06619, ctc_loss=0.1273, over 18112.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.2946, pruned_loss=0.06664, ctc_loss=0.1259, over 2434418.27 frames. ], batch size: 85, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:10:45,800 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.93 vs. limit=15.0 +2024-08-25 19:11:00,325 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.02 vs. limit=22.5 +2024-08-25 19:11:40,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=133984.0, ans=0.125 +2024-08-25 19:11:45,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=133984.0, ans=0.1 +2024-08-25 19:11:51,079 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.98 vs. limit=15.0 +2024-08-25 19:11:55,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=134037.33333333334, ans=0.125 +2024-08-25 19:12:01,108 INFO [train.py:1114] (2/4) Epoch 11, batch 250, loss[loss=0.2451, simple_loss=0.3032, pruned_loss=0.06876, ctc_loss=0.1238, over 19405.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.295, pruned_loss=0.06701, ctc_loss=0.1264, over 2754380.29 frames. ], batch size: 67, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:12:02,129 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.716e+02 2.023e+02 2.469e+02 5.021e+02, threshold=4.046e+02, percent-clipped=3.0 +2024-08-25 19:12:09,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=134090.66666666666, ans=0.125 +2024-08-25 19:12:22,143 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.36 vs. limit=15.0 +2024-08-25 19:12:32,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=134197.33333333334, ans=0.2 +2024-08-25 19:12:40,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=134250.66666666666, ans=0.2 +2024-08-25 19:12:41,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=134250.66666666666, ans=0.0 +2024-08-25 19:12:42,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=134250.66666666666, ans=0.2 +2024-08-25 19:13:03,635 INFO [train.py:1114] (2/4) Epoch 11, batch 300, loss[loss=0.2722, simple_loss=0.3141, pruned_loss=0.08392, ctc_loss=0.1561, over 19517.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.2937, pruned_loss=0.06635, ctc_loss=0.125, over 2998972.64 frames. ], batch size: 61, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:13:03,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=134357.33333333334, ans=0.2 +2024-08-25 19:13:07,597 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.27 vs. limit=15.0 +2024-08-25 19:13:08,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=134357.33333333334, ans=0.2 +2024-08-25 19:13:20,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=134410.66666666666, ans=0.125 +2024-08-25 19:13:50,762 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.68 vs. limit=15.0 +2024-08-25 19:14:00,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=134570.66666666666, ans=0.025 +2024-08-25 19:14:07,016 INFO [train.py:1114] (2/4) Epoch 11, batch 350, loss[loss=0.2289, simple_loss=0.2755, pruned_loss=0.06651, ctc_loss=0.1232, over 19767.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.294, pruned_loss=0.0665, ctc_loss=0.1253, over 3189034.92 frames. ], batch size: 48, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:14:08,115 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.422e+02 1.838e+02 2.258e+02 2.898e+02 4.827e+02, threshold=4.516e+02, percent-clipped=2.0 +2024-08-25 19:14:40,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=134677.33333333334, ans=0.2 +2024-08-25 19:15:12,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=134784.0, ans=0.125 +2024-08-25 19:15:45,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=134837.33333333334, ans=0.2 +2024-08-25 19:15:57,919 INFO [train.py:1114] (2/4) Epoch 11, batch 400, loss[loss=0.2225, simple_loss=0.2854, pruned_loss=0.05771, ctc_loss=0.1102, over 19512.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.293, pruned_loss=0.06599, ctc_loss=0.1243, over 3342037.91 frames. ], batch size: 54, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:16:03,679 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:16:12,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=134944.0, ans=0.125 +2024-08-25 19:16:44,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=134997.33333333334, ans=0.0 +2024-08-25 19:16:48,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=135050.66666666666, ans=0.07 +2024-08-25 19:16:49,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=135050.66666666666, ans=0.015 +2024-08-25 19:16:59,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=135050.66666666666, ans=0.125 +2024-08-25 19:17:01,217 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.72 vs. limit=15.0 +2024-08-25 19:17:04,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=135104.0, ans=0.125 +2024-08-25 19:17:22,193 INFO [train.py:1114] (2/4) Epoch 11, batch 450, loss[loss=0.2598, simple_loss=0.3149, pruned_loss=0.07479, ctc_loss=0.1375, over 19623.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.2932, pruned_loss=0.0662, ctc_loss=0.1245, over 3450391.50 frames. ], batch size: 55, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:17:31,731 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.328e+02 1.841e+02 2.102e+02 2.681e+02 4.407e+02, threshold=4.204e+02, percent-clipped=0.0 +2024-08-25 19:17:34,649 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.83 vs. limit=15.0 +2024-08-25 19:17:41,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=135210.66666666666, ans=0.07 +2024-08-25 19:17:59,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=135264.0, ans=0.125 +2024-08-25 19:18:12,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=135317.33333333334, ans=0.125 +2024-08-25 19:18:34,753 INFO [train.py:1114] (2/4) Epoch 11, batch 500, loss[loss=0.2278, simple_loss=0.2931, pruned_loss=0.05968, ctc_loss=0.1082, over 19673.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.292, pruned_loss=0.0657, ctc_loss=0.1235, over 3545705.63 frames. ], batch size: 63, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:18:36,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=135424.0, ans=0.125 +2024-08-25 19:18:53,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.50 vs. limit=10.0 +2024-08-25 19:19:34,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=135584.0, ans=0.2 +2024-08-25 19:19:37,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=135584.0, ans=0.125 +2024-08-25 19:19:52,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=135637.33333333334, ans=0.0 +2024-08-25 19:20:17,219 INFO [train.py:1114] (2/4) Epoch 11, batch 550, loss[loss=0.2472, simple_loss=0.3028, pruned_loss=0.07013, ctc_loss=0.1282, over 19228.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.2922, pruned_loss=0.06584, ctc_loss=0.1238, over 3609068.07 frames. ], batch size: 71, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:20:18,400 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.344e+02 1.822e+02 2.069e+02 2.386e+02 4.149e+02, threshold=4.137e+02, percent-clipped=0.0 +2024-08-25 19:20:25,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=135690.66666666666, ans=0.2 +2024-08-25 19:20:36,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=135744.0, ans=0.025 +2024-08-25 19:20:42,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=135744.0, ans=0.125 +2024-08-25 19:20:47,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=135744.0, ans=0.125 +2024-08-25 19:20:58,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=135797.33333333334, ans=0.0 +2024-08-25 19:21:30,819 INFO [train.py:1114] (2/4) Epoch 11, batch 600, loss[loss=0.2543, simple_loss=0.3083, pruned_loss=0.07296, ctc_loss=0.136, over 19405.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.2926, pruned_loss=0.06573, ctc_loss=0.1236, over 3666460.41 frames. ], batch size: 67, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:21:32,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135957.33333333334, ans=0.1 +2024-08-25 19:22:26,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=136010.66666666666, ans=0.125 +2024-08-25 19:22:30,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=136010.66666666666, ans=0.125 +2024-08-25 19:22:34,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=136064.0, ans=0.05 +2024-08-25 19:22:47,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=136064.0, ans=0.125 +2024-08-25 19:22:49,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=136064.0, ans=0.125 +2024-08-25 19:23:41,691 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.63 vs. limit=15.0 +2024-08-25 19:23:54,560 INFO [train.py:1114] (2/4) Epoch 11, batch 650, loss[loss=0.2411, simple_loss=0.2943, pruned_loss=0.06968, ctc_loss=0.1214, over 19769.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.2917, pruned_loss=0.06522, ctc_loss=0.1225, over 3716733.32 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:23:55,631 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.531e+02 1.913e+02 2.094e+02 2.738e+02 4.984e+02, threshold=4.187e+02, percent-clipped=5.0 +2024-08-25 19:24:28,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=136277.33333333334, ans=0.125 +2024-08-25 19:24:50,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=136384.0, ans=0.035 +2024-08-25 19:24:59,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=136384.0, ans=0.125 +2024-08-25 19:25:34,149 INFO [train.py:1114] (2/4) Epoch 11, batch 700, loss[loss=0.2506, simple_loss=0.2923, pruned_loss=0.07472, ctc_loss=0.1486, over 19736.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.2913, pruned_loss=0.06465, ctc_loss=0.1216, over 3748573.45 frames. ], batch size: 51, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:25:53,544 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.27 vs. limit=22.5 +2024-08-25 19:26:32,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=136544.0, ans=0.125 +2024-08-25 19:26:42,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=136597.33333333334, ans=0.0 +2024-08-25 19:27:40,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=136650.66666666666, ans=0.1 +2024-08-25 19:28:07,172 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.36 vs. limit=22.5 +2024-08-25 19:28:10,010 INFO [train.py:1114] (2/4) Epoch 11, batch 750, loss[loss=0.2302, simple_loss=0.2818, pruned_loss=0.06478, ctc_loss=0.1227, over 19502.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.2913, pruned_loss=0.06483, ctc_loss=0.1219, over 3773497.42 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:28:25,946 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.371e+02 1.821e+02 2.028e+02 2.720e+02 4.524e+02, threshold=4.057e+02, percent-clipped=2.0 +2024-08-25 19:28:57,728 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=6.355e-03 +2024-08-25 19:28:59,539 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.33 vs. limit=22.5 +2024-08-25 19:29:04,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=136864.0, ans=0.2 +2024-08-25 19:32:08,683 INFO [train.py:1114] (2/4) Epoch 11, batch 800, loss[loss=0.2061, simple_loss=0.2589, pruned_loss=0.05573, ctc_loss=0.1044, over 19800.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.2906, pruned_loss=0.06457, ctc_loss=0.1215, over 3795681.07 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:32:13,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=137024.0, ans=0.125 +2024-08-25 19:32:59,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=137077.33333333334, ans=0.0 +2024-08-25 19:33:38,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=137237.33333333334, ans=0.015 +2024-08-25 19:33:49,223 INFO [train.py:1114] (2/4) Epoch 11, batch 850, loss[loss=0.2347, simple_loss=0.3036, pruned_loss=0.05975, ctc_loss=0.1158, over 19636.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.2901, pruned_loss=0.06433, ctc_loss=0.1211, over 3814999.06 frames. ], batch size: 59, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:33:50,257 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.863e+02 2.065e+02 2.415e+02 4.305e+02, threshold=4.130e+02, percent-clipped=1.0 +2024-08-25 19:33:52,724 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.37 vs. limit=12.0 +2024-08-25 19:34:29,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=137397.33333333334, ans=0.0 +2024-08-25 19:34:42,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=137450.66666666666, ans=0.025 +2024-08-25 19:35:05,245 INFO [train.py:1114] (2/4) Epoch 11, batch 900, loss[loss=0.2041, simple_loss=0.2568, pruned_loss=0.05526, ctc_loss=0.1021, over 19422.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.2913, pruned_loss=0.06542, ctc_loss=0.1229, over 3820246.27 frames. ], batch size: 48, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:35:20,368 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.13 vs. limit=6.0 +2024-08-25 19:35:35,933 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.50 vs. limit=15.0 +2024-08-25 19:36:06,050 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.32 vs. limit=22.5 +2024-08-25 19:36:11,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=137717.33333333334, ans=0.125 +2024-08-25 19:36:12,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=137717.33333333334, ans=0.2 +2024-08-25 19:36:14,302 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.48 vs. limit=15.0 +2024-08-25 19:36:47,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137770.66666666666, ans=0.1 +2024-08-25 19:37:18,563 INFO [train.py:1114] (2/4) Epoch 11, batch 950, loss[loss=0.2466, simple_loss=0.2907, pruned_loss=0.07224, ctc_loss=0.1451, over 19493.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.292, pruned_loss=0.06558, ctc_loss=0.1234, over 3820822.53 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:37:19,703 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.805e+02 2.081e+02 2.536e+02 4.211e+02, threshold=4.162e+02, percent-clipped=2.0 +2024-08-25 19:37:39,141 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.68 vs. limit=15.0 +2024-08-25 19:38:13,527 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.42 vs. limit=10.0 +2024-08-25 19:38:16,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=138037.33333333334, ans=0.125 +2024-08-25 19:38:33,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=138037.33333333334, ans=0.025 +2024-08-25 19:38:48,994 INFO [train.py:1114] (2/4) Epoch 11, batch 1000, loss[loss=0.2291, simple_loss=0.289, pruned_loss=0.06125, ctc_loss=0.1169, over 19845.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.2926, pruned_loss=0.06581, ctc_loss=0.1239, over 3816113.47 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:39:37,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=138197.33333333334, ans=0.025 +2024-08-25 19:40:03,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=138304.0, ans=0.125 +2024-08-25 19:40:14,840 INFO [train.py:1114] (2/4) Epoch 11, batch 1050, loss[loss=0.2624, simple_loss=0.3178, pruned_loss=0.07502, ctc_loss=0.1422, over 19836.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.2924, pruned_loss=0.06576, ctc_loss=0.1238, over 3822419.89 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:40:16,858 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.394e+02 1.874e+02 2.329e+02 2.645e+02 4.211e+02, threshold=4.658e+02, percent-clipped=2.0 +2024-08-25 19:40:27,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=138410.66666666666, ans=0.125 +2024-08-25 19:40:35,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=138410.66666666666, ans=0.0 +2024-08-25 19:40:38,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=138464.0, ans=0.125 +2024-08-25 19:40:44,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=138464.0, ans=0.0 +2024-08-25 19:41:00,979 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.38 vs. limit=15.0 +2024-08-25 19:41:19,771 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.23 vs. limit=15.0 +2024-08-25 19:41:25,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=138624.0, ans=0.025 +2024-08-25 19:41:26,115 INFO [train.py:1114] (2/4) Epoch 11, batch 1100, loss[loss=0.2379, simple_loss=0.2898, pruned_loss=0.06713, ctc_loss=0.1295, over 19600.00 frames. ], tot_loss[loss=0.236, simple_loss=0.2919, pruned_loss=0.06545, ctc_loss=0.1233, over 3829270.27 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:41:26,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=138624.0, ans=0.025 +2024-08-25 19:41:55,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=138624.0, ans=10.0 +2024-08-25 19:42:17,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=138677.33333333334, ans=0.07 +2024-08-25 19:42:19,461 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.55 vs. limit=15.0 +2024-08-25 19:43:01,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=138730.66666666666, ans=0.125 +2024-08-25 19:43:14,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=138784.0, ans=0.125 +2024-08-25 19:43:21,696 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.31 vs. limit=15.0 +2024-08-25 19:43:35,984 INFO [train.py:1114] (2/4) Epoch 11, batch 1150, loss[loss=0.2199, simple_loss=0.2844, pruned_loss=0.05689, ctc_loss=0.104, over 19572.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.2916, pruned_loss=0.0652, ctc_loss=0.1229, over 3828256.24 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:43:37,191 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.425e+02 1.797e+02 2.039e+02 2.453e+02 4.580e+02, threshold=4.079e+02, percent-clipped=0.0 +2024-08-25 19:44:21,544 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.57 vs. limit=6.0 +2024-08-25 19:44:34,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=139104.0, ans=0.125 +2024-08-25 19:44:41,854 INFO [train.py:1114] (2/4) Epoch 11, batch 1200, loss[loss=0.2514, simple_loss=0.3083, pruned_loss=0.07009, ctc_loss=0.136, over 19840.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.2931, pruned_loss=0.066, ctc_loss=0.1244, over 3823999.86 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:44:47,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=139157.33333333334, ans=0.125 +2024-08-25 19:44:57,225 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.26 vs. limit=6.0 +2024-08-25 19:45:09,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=139210.66666666666, ans=0.125 +2024-08-25 19:45:39,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=139264.0, ans=0.0 +2024-08-25 19:46:15,671 INFO [train.py:1114] (2/4) Epoch 11, batch 1250, loss[loss=0.2598, simple_loss=0.3129, pruned_loss=0.0757, ctc_loss=0.1384, over 19537.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.2932, pruned_loss=0.06576, ctc_loss=0.1239, over 3842558.34 frames. ], batch size: 61, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:46:16,709 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.516e+02 1.769e+02 1.992e+02 2.545e+02 3.633e+02, threshold=3.984e+02, percent-clipped=0.0 +2024-08-25 19:46:17,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=139424.0, ans=0.125 +2024-08-25 19:46:57,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=139530.66666666666, ans=0.125 +2024-08-25 19:47:40,592 INFO [train.py:1114] (2/4) Epoch 11, batch 1300, loss[loss=0.2689, simple_loss=0.3143, pruned_loss=0.08176, ctc_loss=0.1498, over 18926.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.2925, pruned_loss=0.06532, ctc_loss=0.123, over 3846410.19 frames. ], batch size: 76, lr: 1.36e-02, grad_scale: 16.0 +2024-08-25 19:48:31,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=139850.66666666666, ans=0.125 +2024-08-25 19:48:31,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=139850.66666666666, ans=0.0 +2024-08-25 19:48:59,268 INFO [train.py:1114] (2/4) Epoch 11, batch 1350, loss[loss=0.2165, simple_loss=0.285, pruned_loss=0.05329, ctc_loss=0.1036, over 19745.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.2917, pruned_loss=0.06495, ctc_loss=0.1222, over 3857898.02 frames. ], batch size: 54, lr: 1.36e-02, grad_scale: 16.0 +2024-08-25 19:49:01,651 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.430e+02 1.851e+02 2.124e+02 2.742e+02 4.665e+02, threshold=4.248e+02, percent-clipped=3.0 +2024-08-25 19:49:13,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=140010.66666666666, ans=0.125 +2024-08-25 19:49:14,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=140010.66666666666, ans=0.0 +2024-08-25 19:49:15,243 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.84 vs. limit=15.0 +2024-08-25 19:49:31,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=140064.0, ans=0.125 +2024-08-25 19:49:49,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140117.33333333334, ans=0.1 +2024-08-25 19:50:07,177 INFO [train.py:1114] (2/4) Epoch 11, batch 1400, loss[loss=0.2115, simple_loss=0.2651, pruned_loss=0.0574, ctc_loss=0.108, over 19688.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.2916, pruned_loss=0.06511, ctc_loss=0.1222, over 3865014.61 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 19:50:13,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_na.min_abs, batch_count=140224.0, ans=0.02 +2024-08-25 19:50:13,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=140224.0, ans=0.125 +2024-08-25 19:50:14,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=140224.0, ans=0.0 +2024-08-25 19:50:17,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=140224.0, ans=0.2 +2024-08-25 19:50:47,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=140277.33333333334, ans=0.025 +2024-08-25 19:50:52,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=140330.66666666666, ans=0.0 +2024-08-25 19:51:02,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=140330.66666666666, ans=0.1 +2024-08-25 19:51:03,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=140330.66666666666, ans=0.2 +2024-08-25 19:51:14,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140330.66666666666, ans=0.1 +2024-08-25 19:51:38,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=140437.33333333334, ans=0.05 +2024-08-25 19:51:39,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=140437.33333333334, ans=0.125 +2024-08-25 19:51:42,658 INFO [train.py:1114] (2/4) Epoch 11, batch 1450, loss[loss=0.224, simple_loss=0.2923, pruned_loss=0.05619, ctc_loss=0.1083, over 19682.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.2923, pruned_loss=0.06521, ctc_loss=0.1228, over 3862777.01 frames. ], batch size: 63, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 19:51:44,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140490.66666666666, ans=0.1 +2024-08-25 19:51:45,015 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.475e+02 1.813e+02 2.052e+02 2.523e+02 4.896e+02, threshold=4.103e+02, percent-clipped=2.0 +2024-08-25 19:51:47,674 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.41 vs. limit=12.0 +2024-08-25 19:51:58,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=140544.0, ans=22.5 +2024-08-25 19:52:06,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=140544.0, ans=0.1 +2024-08-25 19:52:07,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=140544.0, ans=0.025 +2024-08-25 19:52:07,978 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.89 vs. limit=15.0 +2024-08-25 19:52:15,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=140597.33333333334, ans=0.125 +2024-08-25 19:52:26,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=140597.33333333334, ans=0.025 +2024-08-25 19:52:51,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=140650.66666666666, ans=0.125 +2024-08-25 19:53:19,915 INFO [train.py:1114] (2/4) Epoch 11, batch 1500, loss[loss=0.2271, simple_loss=0.3035, pruned_loss=0.05378, ctc_loss=0.1077, over 19580.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.2927, pruned_loss=0.06523, ctc_loss=0.1229, over 3861628.27 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 19:53:24,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=140757.33333333334, ans=0.125 +2024-08-25 19:53:24,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=140757.33333333334, ans=0.125 +2024-08-25 19:53:27,523 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.26 vs. limit=22.5 +2024-08-25 19:53:49,141 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:53:54,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=140810.66666666666, ans=0.0 +2024-08-25 19:54:15,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=140864.0, ans=0.125 +2024-08-25 19:55:07,099 INFO [train.py:1114] (2/4) Epoch 11, batch 1550, loss[loss=0.2759, simple_loss=0.3236, pruned_loss=0.08434, ctc_loss=0.1488, over 19619.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.2929, pruned_loss=0.06585, ctc_loss=0.1242, over 3845590.94 frames. ], batch size: 60, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 19:55:10,749 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.421e+02 1.804e+02 2.014e+02 2.422e+02 4.168e+02, threshold=4.028e+02, percent-clipped=1.0 +2024-08-25 19:55:37,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=141024.0, ans=0.0 +2024-08-25 19:56:10,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=141130.66666666666, ans=0.125 +2024-08-25 19:56:59,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=141184.0, ans=0.2 +2024-08-25 19:57:02,243 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.59 vs. limit=8.0 +2024-08-25 19:57:12,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=141237.33333333334, ans=0.125 +2024-08-25 19:57:19,250 INFO [train.py:1114] (2/4) Epoch 11, batch 1600, loss[loss=0.2602, simple_loss=0.3239, pruned_loss=0.07158, ctc_loss=0.1336, over 19841.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.2927, pruned_loss=0.06571, ctc_loss=0.1241, over 3834471.70 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 32.0 +2024-08-25 19:58:10,249 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.24 vs. limit=15.0 +2024-08-25 19:58:15,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=141397.33333333334, ans=0.04949747468305833 +2024-08-25 19:58:41,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=141504.0, ans=0.0 +2024-08-25 19:59:26,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=141557.33333333334, ans=0.125 +2024-08-25 19:59:27,422 INFO [train.py:1114] (2/4) Epoch 11, batch 1650, loss[loss=0.228, simple_loss=0.2901, pruned_loss=0.06027, ctc_loss=0.1133, over 19663.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.2923, pruned_loss=0.06556, ctc_loss=0.1239, over 3830731.41 frames. ], batch size: 59, lr: 1.35e-02, grad_scale: 32.0 +2024-08-25 19:59:29,884 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.768e+02 1.990e+02 2.303e+02 4.438e+02, threshold=3.979e+02, percent-clipped=2.0 +2024-08-25 19:59:43,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=141557.33333333334, ans=0.09899494936611666 +2024-08-25 19:59:45,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=141557.33333333334, ans=0.0 +2024-08-25 20:00:00,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=141610.66666666666, ans=0.125 +2024-08-25 20:00:00,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=141610.66666666666, ans=0.0 +2024-08-25 20:00:30,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff3.min_abs, batch_count=141717.33333333334, ans=0.2 +2024-08-25 20:00:33,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.79 vs. limit=15.0 +2024-08-25 20:00:41,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=141770.66666666666, ans=0.125 +2024-08-25 20:01:05,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=141770.66666666666, ans=0.0 +2024-08-25 20:01:12,651 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.82 vs. limit=15.0 +2024-08-25 20:01:17,961 INFO [train.py:1114] (2/4) Epoch 11, batch 1700, loss[loss=0.1996, simple_loss=0.2583, pruned_loss=0.05101, ctc_loss=0.09732, over 19670.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.2917, pruned_loss=0.0652, ctc_loss=0.1231, over 3845271.98 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 20:01:38,878 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.29 vs. limit=15.0 +2024-08-25 20:01:46,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=141930.66666666666, ans=0.125 +2024-08-25 20:01:55,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=141984.0, ans=0.1 +2024-08-25 20:02:16,948 INFO [train.py:1114] (2/4) Epoch 11, batch 1750, loss[loss=0.1829, simple_loss=0.2441, pruned_loss=0.04398, ctc_loss=0.08453, over 19655.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.2915, pruned_loss=0.06491, ctc_loss=0.1224, over 3850929.04 frames. ], batch size: 45, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 20:02:20,525 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.412e+02 1.814e+02 2.107e+02 2.366e+02 3.890e+02, threshold=4.214e+02, percent-clipped=0.0 +2024-08-25 20:02:40,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=142090.66666666666, ans=0.0 +2024-08-25 20:02:46,731 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.82 vs. limit=15.0 +2024-08-25 20:03:06,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=142144.0, ans=0.125 +2024-08-25 20:03:06,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=142144.0, ans=0.0 +2024-08-25 20:03:17,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=142197.33333333334, ans=0.0 +2024-08-25 20:03:28,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=142250.66666666666, ans=0.0 +2024-08-25 20:03:58,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=142250.66666666666, ans=0.125 +2024-08-25 20:04:02,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=142304.0, ans=0.0 +2024-08-25 20:04:24,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=142304.0, ans=0.125 +2024-08-25 20:04:27,053 INFO [train.py:1114] (2/4) Epoch 11, batch 1800, loss[loss=0.2466, simple_loss=0.3039, pruned_loss=0.0687, ctc_loss=0.1295, over 19597.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.2914, pruned_loss=0.06478, ctc_loss=0.1222, over 3852758.36 frames. ], batch size: 55, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 20:05:03,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=142410.66666666666, ans=0.125 +2024-08-25 20:05:23,563 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.00 vs. limit=15.0 +2024-08-25 20:05:30,152 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.35 vs. limit=15.0 +2024-08-25 20:05:32,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=142517.33333333334, ans=0.125 +2024-08-25 20:05:41,131 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.64 vs. limit=5.0 +2024-08-25 20:06:15,204 INFO [train.py:1114] (2/4) Epoch 11, batch 1850, loss[loss=0.2118, simple_loss=0.2922, pruned_loss=0.04666, ctc_loss=0.09512, over 19561.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.2907, pruned_loss=0.06419, ctc_loss=0.1211, over 3856960.27 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 16.0 +2024-08-25 20:06:18,513 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.477e+02 1.849e+02 2.256e+02 2.966e+02 5.642e+02, threshold=4.511e+02, percent-clipped=6.0 +2024-08-25 20:06:25,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=142677.33333333334, ans=0.125 +2024-08-25 20:07:20,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=142784.0, ans=0.035 +2024-08-25 20:07:38,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=142837.33333333334, ans=0.07 +2024-08-25 20:07:51,845 INFO [train.py:1114] (2/4) Epoch 11, batch 1900, loss[loss=0.2654, simple_loss=0.3237, pruned_loss=0.07497, ctc_loss=0.143, over 19655.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.2914, pruned_loss=0.06429, ctc_loss=0.121, over 3861403.51 frames. ], batch size: 59, lr: 1.34e-02, grad_scale: 16.0 +2024-08-25 20:07:56,851 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.95 vs. limit=22.5 +2024-08-25 20:12:09,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=142997.33333333334, ans=0.125 +2024-08-25 20:37:10,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=143050.66666666666, ans=0.125 +2024-08-25 20:37:10,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=143050.66666666666, ans=0.025 +2024-08-25 20:39:08,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=143104.0, ans=0.0 +2024-08-25 20:55:30,027 INFO [train.py:1114] (2/4) Epoch 11, batch 1950, loss[loss=0.2467, simple_loss=0.2992, pruned_loss=0.07177, ctc_loss=0.1268, over 19592.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.2925, pruned_loss=0.06473, ctc_loss=0.1217, over 3870394.31 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 16.0 +2024-08-25 21:03:39,808 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.468e+02 1.850e+02 2.123e+02 2.695e+02 5.282e+02, threshold=4.246e+02, percent-clipped=2.0 +2024-08-25 21:15:08,854 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.88 vs. limit=22.5 +2024-08-25 21:22:29,344 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.82 vs. limit=15.0 +2024-08-25 21:28:58,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=143264.0, ans=0.125 +2024-08-25 21:32:13,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=143317.33333333334, ans=0.2 +2024-08-25 21:32:13,900 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.64 vs. limit=15.0 +2024-08-25 21:38:45,259 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.73 vs. limit=15.0 +2024-08-25 21:44:03,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=143370.66666666666, ans=0.0 +2024-08-25 21:46:38,196 INFO [train.py:1114] (2/4) Epoch 11, batch 2000, loss[loss=0.2161, simple_loss=0.2609, pruned_loss=0.06247, ctc_loss=0.1157, over 19610.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.2931, pruned_loss=0.06526, ctc_loss=0.1227, over 3855528.82 frames. ], batch size: 45, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 21:57:15,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143424.0, ans=0.1 +2024-08-25 22:03:02,216 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.53 vs. limit=10.0 +2024-08-25 22:06:23,651 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.25 vs. limit=12.0 +2024-08-25 22:09:03,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=143584.0, ans=0.1 +2024-08-25 22:15:14,852 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.19 vs. limit=22.5 +2024-08-25 22:19:42,827 INFO [train.py:1114] (2/4) Epoch 11, batch 2050, loss[loss=0.2174, simple_loss=0.2687, pruned_loss=0.06091, ctc_loss=0.1107, over 19726.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.2915, pruned_loss=0.06478, ctc_loss=0.1216, over 3851246.06 frames. ], batch size: 47, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 22:20:13,483 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 1.838e+02 2.216e+02 2.724e+02 4.008e+02, threshold=4.432e+02, percent-clipped=0.0 +2024-08-25 22:27:00,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=143797.33333333334, ans=0.125 +2024-08-25 22:28:46,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=143850.66666666666, ans=0.125 +2024-08-25 22:32:23,629 INFO [train.py:1114] (2/4) Epoch 11, batch 2100, loss[loss=0.2403, simple_loss=0.2968, pruned_loss=0.0666, ctc_loss=0.1264, over 19773.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.2912, pruned_loss=0.06459, ctc_loss=0.1213, over 3858179.99 frames. ], batch size: 54, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 22:33:48,814 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.24 vs. limit=22.5 +2024-08-25 22:34:13,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=143957.33333333334, ans=0.0 +2024-08-25 22:34:13,809 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 22:34:13,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=143957.33333333334, ans=0.2 +2024-08-25 22:34:32,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=143957.33333333334, ans=0.2 +2024-08-25 22:34:32,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=143957.33333333334, ans=0.1 +2024-08-25 22:35:39,441 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.75 vs. limit=15.0 +2024-08-25 22:35:47,823 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.22 vs. limit=15.0 +2024-08-25 22:36:29,783 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=24.45 vs. limit=22.5 +2024-08-25 22:36:59,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=144117.33333333334, ans=0.0 +2024-08-25 22:37:44,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=144117.33333333334, ans=0.2 +2024-08-25 22:37:46,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=144170.66666666666, ans=0.04949747468305833 +2024-08-25 22:39:07,578 INFO [train.py:1114] (2/4) Epoch 11, batch 2150, loss[loss=0.2315, simple_loss=0.292, pruned_loss=0.0617, ctc_loss=0.1191, over 19856.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.2901, pruned_loss=0.06378, ctc_loss=0.1199, over 3869964.16 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 22:39:51,935 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.523e+02 1.804e+02 2.068e+02 2.942e+02 5.639e+02, threshold=4.136e+02, percent-clipped=4.0 +2024-08-25 22:40:53,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=144277.33333333334, ans=0.125 +2024-08-25 22:42:57,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=144384.0, ans=0.1 +2024-08-25 22:42:57,787 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.67 vs. limit=15.0 +2024-08-25 22:43:53,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=144437.33333333334, ans=0.025 +2024-08-25 22:44:02,560 INFO [train.py:1114] (2/4) Epoch 11, batch 2200, loss[loss=0.2492, simple_loss=0.3004, pruned_loss=0.07214, ctc_loss=0.1343, over 19595.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.2901, pruned_loss=0.06379, ctc_loss=0.1199, over 3868361.65 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 22:44:52,357 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.47 vs. limit=22.5 +2024-08-25 22:46:43,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=144597.33333333334, ans=0.125 +2024-08-25 22:47:10,877 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.51 vs. limit=15.0 +2024-08-25 22:47:21,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=144650.66666666666, ans=0.125 +2024-08-25 22:48:34,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144704.0, ans=0.1 +2024-08-25 22:48:34,577 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.23 vs. limit=15.0 +2024-08-25 22:48:35,130 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 22:48:51,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=144704.0, ans=0.125 +2024-08-25 22:49:03,012 INFO [train.py:1114] (2/4) Epoch 11, batch 2250, loss[loss=0.2271, simple_loss=0.2952, pruned_loss=0.0566, ctc_loss=0.1142, over 19626.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.2901, pruned_loss=0.0635, ctc_loss=0.1193, over 3867766.00 frames. ], batch size: 55, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:49:04,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=144757.33333333334, ans=0.2 +2024-08-25 22:49:05,163 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.42 vs. limit=10.0 +2024-08-25 22:49:09,619 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.411e+02 1.818e+02 2.110e+02 2.782e+02 6.628e+02, threshold=4.220e+02, percent-clipped=3.0 +2024-08-25 22:49:34,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=144810.66666666666, ans=0.125 +2024-08-25 22:50:08,869 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.36 vs. limit=15.0 +2024-08-25 22:50:35,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=144970.66666666666, ans=0.125 +2024-08-25 22:50:46,943 INFO [train.py:1114] (2/4) Epoch 11, batch 2300, loss[loss=0.2071, simple_loss=0.2727, pruned_loss=0.05177, ctc_loss=0.09501, over 19502.00 frames. ], tot_loss[loss=0.233, simple_loss=0.2899, pruned_loss=0.06397, ctc_loss=0.1202, over 3860445.86 frames. ], batch size: 49, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:51:07,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145024.0, ans=0.1 +2024-08-25 22:51:09,884 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.52 vs. limit=10.0 +2024-08-25 22:51:17,745 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.77 vs. limit=10.0 +2024-08-25 22:51:24,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=145077.33333333334, ans=0.125 +2024-08-25 22:51:38,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145130.66666666666, ans=0.1 +2024-08-25 22:51:40,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=145130.66666666666, ans=0.125 +2024-08-25 22:52:10,079 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.30 vs. limit=15.0 +2024-08-25 22:52:34,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=145184.0, ans=0.125 +2024-08-25 22:52:55,275 INFO [train.py:1114] (2/4) Epoch 11, batch 2350, loss[loss=0.2579, simple_loss=0.3097, pruned_loss=0.07451, ctc_loss=0.1425, over 19690.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.2902, pruned_loss=0.06413, ctc_loss=0.1205, over 3862862.35 frames. ], batch size: 63, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:53:01,234 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.317e+02 1.788e+02 2.141e+02 2.380e+02 3.835e+02, threshold=4.282e+02, percent-clipped=0.0 +2024-08-25 22:53:05,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=145290.66666666666, ans=0.2 +2024-08-25 22:53:08,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=145290.66666666666, ans=0.05 +2024-08-25 22:53:52,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=145450.66666666666, ans=0.0 +2024-08-25 22:53:55,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=145450.66666666666, ans=0.125 +2024-08-25 22:54:01,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=145450.66666666666, ans=0.0 +2024-08-25 22:54:18,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=145504.0, ans=22.5 +2024-08-25 22:54:25,997 INFO [train.py:1114] (2/4) Epoch 11, batch 2400, loss[loss=0.2677, simple_loss=0.3155, pruned_loss=0.07804, ctc_loss=0.1595, over 19284.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.293, pruned_loss=0.06521, ctc_loss=0.1224, over 3857542.92 frames. ], batch size: 71, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:54:28,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=145557.33333333334, ans=0.125 +2024-08-25 22:55:10,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=145717.33333333334, ans=0.0 +2024-08-25 22:55:17,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=145717.33333333334, ans=0.025 +2024-08-25 22:55:20,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=145717.33333333334, ans=0.0 +2024-08-25 22:55:36,681 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.16 vs. limit=15.0 +2024-08-25 22:55:44,080 INFO [train.py:1114] (2/4) Epoch 11, batch 2450, loss[loss=0.3178, simple_loss=0.3364, pruned_loss=0.1084, ctc_loss=0.2058, over 12991.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.2976, pruned_loss=0.06893, ctc_loss=0.1294, over 3729308.10 frames. ], batch size: 142, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:56:00,766 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.910e+02 2.208e+02 2.594e+02 5.356e+02, threshold=4.415e+02, percent-clipped=1.0 +2024-08-25 22:56:03,435 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.96 vs. limit=15.0 +2024-08-25 22:56:27,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=145877.33333333334, ans=0.2 +2024-08-25 22:56:33,625 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.95 vs. limit=15.0 +2024-08-25 22:56:51,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=145930.66666666666, ans=0.125 +2024-08-25 22:57:16,047 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.85 vs. limit=15.0 +2024-08-25 22:57:26,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=145984.0, ans=0.0 +2024-08-25 22:58:44,107 INFO [train.py:1114] (2/4) Epoch 12, batch 0, loss[loss=0.2337, simple_loss=0.2767, pruned_loss=0.06915, ctc_loss=0.1313, over 19829.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.2767, pruned_loss=0.06915, ctc_loss=0.1313, over 19829.00 frames. ], batch size: 49, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 22:58:44,108 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-25 23:00:02,926 INFO [train.py:1146] (2/4) Epoch 12, validation: loss=0.1972, simple_loss=0.2841, pruned_loss=0.04086, ctc_loss=0.07109, over 944034.00 frames. +2024-08-25 23:00:02,926 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 14234MB +2024-08-25 23:00:03,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146032.0, ans=0.1 +2024-08-25 23:00:06,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=146032.0, ans=0.125 +2024-08-25 23:00:15,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=146032.0, ans=0.125 +2024-08-25 23:00:20,776 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.00 vs. limit=12.0 +2024-08-25 23:00:23,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=146085.33333333334, ans=0.125 +2024-08-25 23:00:33,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=146138.66666666666, ans=0.125 +2024-08-25 23:01:07,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=146298.66666666666, ans=0.1 +2024-08-25 23:01:08,429 INFO [train.py:1114] (2/4) Epoch 12, batch 50, loss[loss=0.2179, simple_loss=0.2717, pruned_loss=0.05926, ctc_loss=0.1139, over 19714.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.2948, pruned_loss=0.0655, ctc_loss=0.1244, over 844495.10 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:01:17,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=146298.66666666666, ans=0.125 +2024-08-25 23:01:22,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=146352.0, ans=0.07 +2024-08-25 23:01:25,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=146352.0, ans=10.0 +2024-08-25 23:01:26,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=146352.0, ans=0.125 +2024-08-25 23:01:27,719 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.425e+02 1.810e+02 2.073e+02 2.436e+02 4.057e+02, threshold=4.147e+02, percent-clipped=0.0 +2024-08-25 23:01:42,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146405.33333333334, ans=0.1 +2024-08-25 23:01:43,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=146405.33333333334, ans=0.125 +2024-08-25 23:02:22,993 INFO [train.py:1114] (2/4) Epoch 12, batch 100, loss[loss=0.2072, simple_loss=0.273, pruned_loss=0.05184, ctc_loss=0.09412, over 19727.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.2965, pruned_loss=0.06664, ctc_loss=0.1259, over 1498946.70 frames. ], batch size: 51, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:02:34,411 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.04 vs. limit=15.0 +2024-08-25 23:02:36,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=146565.33333333334, ans=0.04949747468305833 +2024-08-25 23:02:54,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=146618.66666666666, ans=0.05 +2024-08-25 23:03:23,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=146725.33333333334, ans=0.0 +2024-08-25 23:03:23,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=146725.33333333334, ans=0.125 +2024-08-25 23:03:39,247 INFO [train.py:1114] (2/4) Epoch 12, batch 150, loss[loss=0.235, simple_loss=0.2798, pruned_loss=0.06925, ctc_loss=0.1295, over 19733.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.2921, pruned_loss=0.06427, ctc_loss=0.1216, over 2027009.71 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:03:57,609 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.23 vs. limit=15.0 +2024-08-25 23:04:04,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=146885.33333333334, ans=0.125 +2024-08-25 23:04:09,868 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.330e+02 1.659e+02 1.880e+02 2.314e+02 3.650e+02, threshold=3.760e+02, percent-clipped=0.0 +2024-08-25 23:04:38,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=146938.66666666666, ans=0.0 +2024-08-25 23:04:55,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=147045.33333333334, ans=0.125 +2024-08-25 23:05:07,058 INFO [train.py:1114] (2/4) Epoch 12, batch 200, loss[loss=0.2655, simple_loss=0.3094, pruned_loss=0.07936, ctc_loss=0.1574, over 18276.00 frames. ], tot_loss[loss=0.232, simple_loss=0.2896, pruned_loss=0.06325, ctc_loss=0.1196, over 2434759.55 frames. ], batch size: 85, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:06:49,930 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.87 vs. limit=22.5 +2024-08-25 23:07:01,821 INFO [train.py:1114] (2/4) Epoch 12, batch 250, loss[loss=0.2413, simple_loss=0.3012, pruned_loss=0.06589, ctc_loss=0.1238, over 19430.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.29, pruned_loss=0.06332, ctc_loss=0.1194, over 2755371.50 frames. ], batch size: 67, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:07:22,628 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.308e+02 1.825e+02 2.154e+02 2.499e+02 3.884e+02, threshold=4.307e+02, percent-clipped=2.0 +2024-08-25 23:07:48,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=147525.33333333334, ans=0.125 +2024-08-25 23:07:52,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=147525.33333333334, ans=0.125 +2024-08-25 23:08:05,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=147578.66666666666, ans=0.0 +2024-08-25 23:08:13,959 INFO [train.py:1114] (2/4) Epoch 12, batch 300, loss[loss=0.2744, simple_loss=0.3165, pruned_loss=0.08425, ctc_loss=0.1594, over 19497.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.2895, pruned_loss=0.06323, ctc_loss=0.1193, over 3001501.23 frames. ], batch size: 61, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:08:19,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=147632.0, ans=0.125 +2024-08-25 23:08:33,821 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:08:34,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=147685.33333333334, ans=0.125 +2024-08-25 23:08:51,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=147792.0, ans=0.125 +2024-08-25 23:08:59,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=147792.0, ans=0.0 +2024-08-25 23:09:03,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=147845.33333333334, ans=0.0 +2024-08-25 23:09:11,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=147845.33333333334, ans=0.0 +2024-08-25 23:09:17,494 INFO [train.py:1114] (2/4) Epoch 12, batch 350, loss[loss=0.2161, simple_loss=0.2762, pruned_loss=0.05651, ctc_loss=0.1074, over 19756.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.2901, pruned_loss=0.06326, ctc_loss=0.1193, over 3191725.20 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:09:25,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=147898.66666666666, ans=0.125 +2024-08-25 23:09:27,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=147952.0, ans=0.125 +2024-08-25 23:09:36,446 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.749e+02 2.047e+02 2.740e+02 4.170e+02, threshold=4.094e+02, percent-clipped=0.0 +2024-08-25 23:09:39,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=147952.0, ans=0.1 +2024-08-25 23:09:44,894 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:09:56,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=148058.66666666666, ans=0.04949747468305833 +2024-08-25 23:10:10,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=148058.66666666666, ans=0.1 +2024-08-25 23:10:21,656 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.89 vs. limit=15.0 +2024-08-25 23:10:25,921 INFO [train.py:1114] (2/4) Epoch 12, batch 400, loss[loss=0.2202, simple_loss=0.2847, pruned_loss=0.05535, ctc_loss=0.1126, over 19493.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.2891, pruned_loss=0.06258, ctc_loss=0.1179, over 3343684.26 frames. ], batch size: 54, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:10:44,701 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:11:30,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148378.66666666666, ans=0.1 +2024-08-25 23:12:05,900 INFO [train.py:1114] (2/4) Epoch 12, batch 450, loss[loss=0.221, simple_loss=0.2884, pruned_loss=0.05611, ctc_loss=0.1035, over 19628.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.2895, pruned_loss=0.06309, ctc_loss=0.119, over 3452496.09 frames. ], batch size: 55, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:12:28,367 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.359e+02 1.830e+02 2.201e+02 2.765e+02 4.484e+02, threshold=4.403e+02, percent-clipped=1.0 +2024-08-25 23:12:37,599 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:12:54,580 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:13:21,168 INFO [train.py:1114] (2/4) Epoch 12, batch 500, loss[loss=0.2282, simple_loss=0.2882, pruned_loss=0.06214, ctc_loss=0.1096, over 19702.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.2879, pruned_loss=0.06222, ctc_loss=0.1172, over 3547344.25 frames. ], batch size: 63, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:13:23,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=148698.66666666666, ans=0.125 +2024-08-25 23:13:33,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=148698.66666666666, ans=0.05 +2024-08-25 23:13:58,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=148805.33333333334, ans=0.025 +2024-08-25 23:14:02,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=148805.33333333334, ans=0.0 +2024-08-25 23:14:06,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=148805.33333333334, ans=0.125 +2024-08-25 23:14:31,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=148858.66666666666, ans=0.0 +2024-08-25 23:14:43,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148912.0, ans=0.1 +2024-08-25 23:14:53,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=148912.0, ans=0.1 +2024-08-25 23:14:58,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=148965.33333333334, ans=0.0 +2024-08-25 23:14:58,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=148965.33333333334, ans=0.0 +2024-08-25 23:14:59,255 INFO [train.py:1114] (2/4) Epoch 12, batch 550, loss[loss=0.2535, simple_loss=0.3118, pruned_loss=0.06976, ctc_loss=0.1391, over 19297.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.2883, pruned_loss=0.0623, ctc_loss=0.1174, over 3608911.18 frames. ], batch size: 71, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:15:03,293 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.25 vs. limit=15.0 +2024-08-25 23:15:17,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=148965.33333333334, ans=0.04949747468305833 +2024-08-25 23:15:41,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=149018.66666666666, ans=0.125 +2024-08-25 23:15:42,257 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.325e+02 1.692e+02 2.049e+02 2.499e+02 4.022e+02, threshold=4.098e+02, percent-clipped=0.0 +2024-08-25 23:16:10,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=149125.33333333334, ans=0.125 +2024-08-25 23:16:25,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149125.33333333334, ans=0.1 +2024-08-25 23:16:54,711 INFO [train.py:1114] (2/4) Epoch 12, batch 600, loss[loss=0.2629, simple_loss=0.3103, pruned_loss=0.07813, ctc_loss=0.1478, over 19397.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.289, pruned_loss=0.06248, ctc_loss=0.1179, over 3665652.38 frames. ], batch size: 67, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:19:18,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=149445.33333333334, ans=0.125 +2024-08-25 23:19:22,637 INFO [train.py:1114] (2/4) Epoch 12, batch 650, loss[loss=0.2137, simple_loss=0.2851, pruned_loss=0.05204, ctc_loss=0.09533, over 19779.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2886, pruned_loss=0.06235, ctc_loss=0.1175, over 3716151.89 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:19:36,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=149498.66666666666, ans=0.125 +2024-08-25 23:19:42,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=149552.0, ans=0.0 +2024-08-25 23:19:48,496 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.406e+02 1.911e+02 2.346e+02 2.911e+02 5.072e+02, threshold=4.691e+02, percent-clipped=6.0 +2024-08-25 23:20:32,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=149658.66666666666, ans=0.125 +2024-08-25 23:20:34,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=149658.66666666666, ans=0.125 +2024-08-25 23:20:36,691 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=11.30 vs. limit=15.0 +2024-08-25 23:20:41,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=149712.0, ans=0.125 +2024-08-25 23:20:47,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=149712.0, ans=0.0 +2024-08-25 23:20:49,425 INFO [train.py:1114] (2/4) Epoch 12, batch 700, loss[loss=0.2433, simple_loss=0.2915, pruned_loss=0.07119, ctc_loss=0.1314, over 19718.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.2891, pruned_loss=0.06267, ctc_loss=0.1181, over 3749713.72 frames. ], batch size: 51, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:21:19,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=149872.0, ans=0.2 +2024-08-25 23:21:27,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=149925.33333333334, ans=0.125 +2024-08-25 23:21:30,381 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.33 vs. limit=15.0 +2024-08-25 23:21:51,378 INFO [train.py:1114] (2/4) Epoch 12, batch 750, loss[loss=0.2312, simple_loss=0.2895, pruned_loss=0.06259, ctc_loss=0.1193, over 19501.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.2884, pruned_loss=0.06228, ctc_loss=0.1171, over 3775081.97 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:22:20,742 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.394e+02 1.992e+02 2.563e+02 3.460e+02 5.252e+02, threshold=5.125e+02, percent-clipped=3.0 +2024-08-25 23:22:23,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=150085.33333333334, ans=0.125 +2024-08-25 23:22:42,256 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.81 vs. limit=15.0 +2024-08-25 23:22:52,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=150192.0, ans=0.0 +2024-08-25 23:23:10,644 INFO [train.py:1114] (2/4) Epoch 12, batch 800, loss[loss=0.2028, simple_loss=0.2613, pruned_loss=0.05325, ctc_loss=0.09463, over 19432.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2885, pruned_loss=0.06237, ctc_loss=0.1175, over 3796040.55 frames. ], batch size: 48, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:23:22,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=150352.0, ans=0.125 +2024-08-25 23:23:30,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=150352.0, ans=0.2 +2024-08-25 23:23:43,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=150458.66666666666, ans=0.125 +2024-08-25 23:24:01,026 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.56 vs. limit=10.0 +2024-08-25 23:24:06,616 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.98 vs. limit=6.0 +2024-08-25 23:24:07,604 INFO [train.py:1114] (2/4) Epoch 12, batch 850, loss[loss=0.2331, simple_loss=0.2965, pruned_loss=0.06032, ctc_loss=0.1229, over 19668.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.288, pruned_loss=0.06184, ctc_loss=0.1167, over 3814711.63 frames. ], batch size: 59, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:24:15,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=150565.33333333334, ans=0.125 +2024-08-25 23:24:30,654 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.376e+02 1.732e+02 2.149e+02 2.756e+02 4.869e+02, threshold=4.297e+02, percent-clipped=0.0 +2024-08-25 23:24:50,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=150672.0, ans=0.125 +2024-08-25 23:25:06,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=150725.33333333334, ans=0.125 +2024-08-25 23:25:07,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=150725.33333333334, ans=0.02 +2024-08-25 23:25:09,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=150725.33333333334, ans=0.0 +2024-08-25 23:25:23,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=150778.66666666666, ans=0.0 +2024-08-25 23:25:25,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=150778.66666666666, ans=0.0 +2024-08-25 23:25:27,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=150778.66666666666, ans=0.125 +2024-08-25 23:25:39,230 INFO [train.py:1114] (2/4) Epoch 12, batch 900, loss[loss=0.2102, simple_loss=0.2666, pruned_loss=0.05507, ctc_loss=0.1092, over 19437.00 frames. ], tot_loss[loss=0.23, simple_loss=0.2883, pruned_loss=0.06238, ctc_loss=0.1175, over 3818187.22 frames. ], batch size: 48, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:27:22,010 INFO [train.py:1114] (2/4) Epoch 12, batch 950, loss[loss=0.2505, simple_loss=0.2954, pruned_loss=0.07499, ctc_loss=0.1388, over 19500.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.2891, pruned_loss=0.06297, ctc_loss=0.1186, over 3819957.49 frames. ], batch size: 49, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:27:47,795 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.461e+02 1.727e+02 2.047e+02 2.468e+02 3.873e+02, threshold=4.093e+02, percent-clipped=0.0 +2024-08-25 23:28:33,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=151258.66666666666, ans=0.125 +2024-08-25 23:28:42,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=151312.0, ans=0.0 +2024-08-25 23:28:46,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=151312.0, ans=0.125 +2024-08-25 23:28:55,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=151365.33333333334, ans=0.125 +2024-08-25 23:28:55,275 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.58 vs. limit=22.5 +2024-08-25 23:28:55,980 INFO [train.py:1114] (2/4) Epoch 12, batch 1000, loss[loss=0.2046, simple_loss=0.2714, pruned_loss=0.05042, ctc_loss=0.09228, over 19826.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.2902, pruned_loss=0.06358, ctc_loss=0.1198, over 3816165.31 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:30:28,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=151472.0, ans=0.2 +2024-08-25 23:30:34,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.97 vs. limit=22.5 +2024-08-25 23:30:39,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=151525.33333333334, ans=0.125 +2024-08-25 23:30:48,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=151578.66666666666, ans=0.125 +2024-08-25 23:30:55,440 INFO [train.py:1114] (2/4) Epoch 12, batch 1050, loss[loss=0.2489, simple_loss=0.303, pruned_loss=0.07082, ctc_loss=0.133, over 19819.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.2891, pruned_loss=0.06334, ctc_loss=0.1191, over 3822284.84 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:31:07,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=151685.33333333334, ans=0.0 +2024-08-25 23:31:14,270 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 1.748e+02 2.222e+02 2.883e+02 4.562e+02, threshold=4.445e+02, percent-clipped=3.0 +2024-08-25 23:31:32,725 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.69 vs. limit=15.0 +2024-08-25 23:31:35,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=151792.0, ans=0.0 +2024-08-25 23:31:45,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=151845.33333333334, ans=0.125 +2024-08-25 23:32:07,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=151845.33333333334, ans=0.2 +2024-08-25 23:32:14,328 INFO [train.py:1114] (2/4) Epoch 12, batch 1100, loss[loss=0.2353, simple_loss=0.2926, pruned_loss=0.06348, ctc_loss=0.1276, over 19567.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.2885, pruned_loss=0.06268, ctc_loss=0.1179, over 3830763.85 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:32:14,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=151898.66666666666, ans=0.0 +2024-08-25 23:32:38,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=151952.0, ans=0.0 +2024-08-25 23:33:00,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=152058.66666666666, ans=0.0 +2024-08-25 23:33:32,430 INFO [train.py:1114] (2/4) Epoch 12, batch 1150, loss[loss=0.2246, simple_loss=0.2814, pruned_loss=0.06008, ctc_loss=0.1193, over 19596.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.2882, pruned_loss=0.06257, ctc_loss=0.118, over 3828319.46 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:33:32,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=152165.33333333334, ans=0.07 +2024-08-25 23:33:35,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=152165.33333333334, ans=0.09899494936611666 +2024-08-25 23:34:06,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=152218.66666666666, ans=0.125 +2024-08-25 23:34:07,237 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.375e+02 1.763e+02 2.002e+02 2.335e+02 5.298e+02, threshold=4.005e+02, percent-clipped=1.0 +2024-08-25 23:34:08,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=152218.66666666666, ans=0.125 +2024-08-25 23:34:19,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=152272.0, ans=0.125 +2024-08-25 23:34:23,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=152272.0, ans=0.0 +2024-08-25 23:34:28,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=152325.33333333334, ans=0.2 +2024-08-25 23:34:49,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152378.66666666666, ans=0.1 +2024-08-25 23:34:58,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=152432.0, ans=0.025 +2024-08-25 23:34:59,034 INFO [train.py:1114] (2/4) Epoch 12, batch 1200, loss[loss=0.2615, simple_loss=0.3184, pruned_loss=0.07369, ctc_loss=0.1431, over 19823.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.2897, pruned_loss=0.06338, ctc_loss=0.1201, over 3823869.62 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:35:07,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=152432.0, ans=0.2 +2024-08-25 23:35:29,574 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.13 vs. limit=12.0 +2024-08-25 23:35:36,902 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.60 vs. limit=10.0 +2024-08-25 23:35:52,300 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:35:58,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=152645.33333333334, ans=0.125 +2024-08-25 23:36:09,948 INFO [train.py:1114] (2/4) Epoch 12, batch 1250, loss[loss=0.2711, simple_loss=0.3231, pruned_loss=0.07995, ctc_loss=0.1482, over 19530.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.2899, pruned_loss=0.06335, ctc_loss=0.1197, over 3842096.02 frames. ], batch size: 61, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:36:34,022 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.907e+02 2.265e+02 2.785e+02 4.753e+02, threshold=4.530e+02, percent-clipped=2.0 +2024-08-25 23:36:37,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=152752.0, ans=0.025 +2024-08-25 23:36:38,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152752.0, ans=0.1 +2024-08-25 23:36:47,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=152805.33333333334, ans=15.0 +2024-08-25 23:36:52,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=152805.33333333334, ans=0.125 +2024-08-25 23:37:18,929 INFO [train.py:1114] (2/4) Epoch 12, batch 1300, loss[loss=0.2528, simple_loss=0.3009, pruned_loss=0.07371, ctc_loss=0.1432, over 18846.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.2889, pruned_loss=0.06284, ctc_loss=0.1187, over 3844930.74 frames. ], batch size: 76, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:37:34,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=153018.66666666666, ans=0.0 +2024-08-25 23:37:45,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=153018.66666666666, ans=0.0 +2024-08-25 23:38:11,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=153125.33333333334, ans=0.125 +2024-08-25 23:38:21,961 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.50 vs. limit=15.0 +2024-08-25 23:38:29,006 INFO [train.py:1114] (2/4) Epoch 12, batch 1350, loss[loss=0.2257, simple_loss=0.2825, pruned_loss=0.06083, ctc_loss=0.118, over 19773.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.2885, pruned_loss=0.06237, ctc_loss=0.1177, over 3857406.97 frames. ], batch size: 54, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:38:45,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=153285.33333333334, ans=0.125 +2024-08-25 23:38:46,279 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.300e+02 1.707e+02 2.039e+02 2.408e+02 4.402e+02, threshold=4.078e+02, percent-clipped=0.0 +2024-08-25 23:39:16,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=153392.0, ans=0.0 +2024-08-25 23:39:41,529 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.44 vs. limit=6.0 +2024-08-25 23:39:43,101 INFO [train.py:1114] (2/4) Epoch 12, batch 1400, loss[loss=0.1803, simple_loss=0.2412, pruned_loss=0.04315, ctc_loss=0.08264, over 19659.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2883, pruned_loss=0.06241, ctc_loss=0.1177, over 3864657.87 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:39:58,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=153552.0, ans=0.125 +2024-08-25 23:40:26,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=153658.66666666666, ans=0.07 +2024-08-25 23:40:46,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=153658.66666666666, ans=0.0 +2024-08-25 23:41:07,351 INFO [train.py:1114] (2/4) Epoch 12, batch 1450, loss[loss=0.2401, simple_loss=0.2985, pruned_loss=0.06619, ctc_loss=0.1233, over 19637.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.2888, pruned_loss=0.06246, ctc_loss=0.1174, over 3863077.00 frames. ], batch size: 63, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:41:12,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=153765.33333333334, ans=0.125 +2024-08-25 23:41:27,995 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.446e+02 1.773e+02 2.135e+02 2.639e+02 4.435e+02, threshold=4.270e+02, percent-clipped=2.0 +2024-08-25 23:41:35,629 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.50 vs. limit=15.0 +2024-08-25 23:41:55,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=153872.0, ans=0.125 +2024-08-25 23:42:07,708 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=15.0 +2024-08-25 23:42:27,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=153978.66666666666, ans=0.0 +2024-08-25 23:42:43,046 INFO [train.py:1114] (2/4) Epoch 12, batch 1500, loss[loss=0.2334, simple_loss=0.3008, pruned_loss=0.06017, ctc_loss=0.114, over 19566.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.2893, pruned_loss=0.06239, ctc_loss=0.1174, over 3863064.16 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:42:56,981 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.93 vs. limit=15.0 +2024-08-25 23:43:12,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=154085.33333333334, ans=0.125 +2024-08-25 23:43:24,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=154138.66666666666, ans=0.2 +2024-08-25 23:43:26,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154192.0, ans=0.1 +2024-08-25 23:43:46,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=154192.0, ans=0.125 +2024-08-25 23:43:53,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=154192.0, ans=0.0 +2024-08-25 23:44:00,940 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.56 vs. limit=12.0 +2024-08-25 23:44:01,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=154245.33333333334, ans=0.125 +2024-08-25 23:44:08,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=154298.66666666666, ans=0.1 +2024-08-25 23:44:09,907 INFO [train.py:1114] (2/4) Epoch 12, batch 1550, loss[loss=0.2373, simple_loss=0.3027, pruned_loss=0.06216, ctc_loss=0.1188, over 19609.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.2897, pruned_loss=0.06296, ctc_loss=0.1187, over 3847709.59 frames. ], batch size: 60, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:44:19,047 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.07 vs. limit=6.0 +2024-08-25 23:44:21,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=154352.0, ans=0.015 +2024-08-25 23:44:43,873 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.525e+02 1.860e+02 2.194e+02 2.828e+02 4.590e+02, threshold=4.388e+02, percent-clipped=1.0 +2024-08-25 23:44:46,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=154352.0, ans=0.125 +2024-08-25 23:45:14,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=154405.33333333334, ans=0.09899494936611666 +2024-08-25 23:46:36,679 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.45 vs. limit=22.5 +2024-08-25 23:46:37,536 INFO [train.py:1114] (2/4) Epoch 12, batch 1600, loss[loss=0.2224, simple_loss=0.2911, pruned_loss=0.05592, ctc_loss=0.1045, over 19841.00 frames. ], tot_loss[loss=0.232, simple_loss=0.2899, pruned_loss=0.06319, ctc_loss=0.119, over 3835769.24 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:46:55,195 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.75 vs. limit=22.5 +2024-08-25 23:47:19,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=154618.66666666666, ans=0.125 +2024-08-25 23:47:24,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=154618.66666666666, ans=0.5 +2024-08-25 23:47:28,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=154618.66666666666, ans=0.125 +2024-08-25 23:47:34,108 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=8.87 vs. limit=22.5 +2024-08-25 23:47:38,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=154672.0, ans=0.125 +2024-08-25 23:47:41,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=154672.0, ans=0.125 +2024-08-25 23:47:52,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=154725.33333333334, ans=0.05 +2024-08-25 23:48:12,940 INFO [train.py:1114] (2/4) Epoch 12, batch 1650, loss[loss=0.2293, simple_loss=0.287, pruned_loss=0.0622, ctc_loss=0.1179, over 19655.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.2887, pruned_loss=0.06237, ctc_loss=0.1176, over 3832969.62 frames. ], batch size: 59, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:48:32,978 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.442e+02 1.751e+02 2.060e+02 2.481e+02 4.497e+02, threshold=4.120e+02, percent-clipped=1.0 +2024-08-25 23:48:35,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=154885.33333333334, ans=0.0 +2024-08-25 23:49:02,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=154992.0, ans=0.2 +2024-08-25 23:49:10,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=155045.33333333334, ans=0.0 +2024-08-25 23:49:19,224 INFO [train.py:1114] (2/4) Epoch 12, batch 1700, loss[loss=0.1994, simple_loss=0.2551, pruned_loss=0.05178, ctc_loss=0.1004, over 19677.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.2883, pruned_loss=0.06198, ctc_loss=0.1166, over 3846956.48 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:49:31,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=155152.0, ans=0.0 +2024-08-25 23:49:56,568 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.58 vs. limit=6.0 +2024-08-25 23:50:36,445 INFO [train.py:1114] (2/4) Epoch 12, batch 1750, loss[loss=0.2287, simple_loss=0.279, pruned_loss=0.06467, ctc_loss=0.1229, over 19695.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.2876, pruned_loss=0.06166, ctc_loss=0.1159, over 3852611.75 frames. ], batch size: 45, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:51:12,451 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.321e+02 1.691e+02 1.944e+02 2.310e+02 4.068e+02, threshold=3.888e+02, percent-clipped=0.0 +2024-08-25 23:51:21,890 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.60 vs. limit=15.0 +2024-08-25 23:51:34,418 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:51:45,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=155525.33333333334, ans=0.0 +2024-08-25 23:52:01,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155578.66666666666, ans=0.1 +2024-08-25 23:52:03,843 INFO [train.py:1114] (2/4) Epoch 12, batch 1800, loss[loss=0.2368, simple_loss=0.2965, pruned_loss=0.06326, ctc_loss=0.1263, over 19613.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.2877, pruned_loss=0.06153, ctc_loss=0.1158, over 3854669.63 frames. ], batch size: 55, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:52:15,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=155632.0, ans=0.125 +2024-08-25 23:52:32,420 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.78 vs. limit=22.5 +2024-08-25 23:53:01,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=155738.66666666666, ans=0.125 +2024-08-25 23:53:28,643 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=155792.0, ans=0.125 +2024-08-25 23:53:37,088 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.33 vs. limit=22.5 +2024-08-25 23:53:58,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=155845.33333333334, ans=10.0 +2024-08-25 23:54:05,050 INFO [train.py:1114] (2/4) Epoch 12, batch 1850, loss[loss=0.2382, simple_loss=0.2994, pruned_loss=0.0638, ctc_loss=0.1236, over 19610.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.2872, pruned_loss=0.06124, ctc_loss=0.1152, over 3858666.05 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:54:25,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=155898.66666666666, ans=0.1 +2024-08-25 23:54:33,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=155898.66666666666, ans=0.0 +2024-08-25 23:54:41,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=155952.0, ans=0.02 +2024-08-25 23:54:44,941 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.455e+02 1.785e+02 2.050e+02 2.712e+02 4.249e+02, threshold=4.100e+02, percent-clipped=1.0 +2024-08-25 23:55:35,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=156112.0, ans=0.2 +2024-08-25 23:56:02,277 INFO [train.py:1114] (2/4) Epoch 12, batch 1900, loss[loss=0.2371, simple_loss=0.2974, pruned_loss=0.0631, ctc_loss=0.1264, over 19657.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.2878, pruned_loss=0.06137, ctc_loss=0.1152, over 3862714.37 frames. ], batch size: 59, lr: 1.23e-02, grad_scale: 32.0 +2024-08-25 23:56:24,365 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.21 vs. limit=15.0 +2024-08-25 23:56:33,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=156218.66666666666, ans=0.95 +2024-08-25 23:57:02,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=156272.0, ans=0.025 +2024-08-25 23:57:03,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=156272.0, ans=0.2 +2024-08-25 23:57:15,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156325.33333333334, ans=0.1 +2024-08-25 23:57:17,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=156325.33333333334, ans=0.0 +2024-08-25 23:58:03,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=156378.66666666666, ans=0.0 +2024-08-25 23:58:09,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=156378.66666666666, ans=0.0 +2024-08-25 23:58:28,736 INFO [train.py:1114] (2/4) Epoch 12, batch 1950, loss[loss=0.2397, simple_loss=0.2874, pruned_loss=0.06965, ctc_loss=0.1318, over 19590.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.2879, pruned_loss=0.06088, ctc_loss=0.1143, over 3870854.82 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 32.0 +2024-08-25 23:59:02,885 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:59:03,814 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.459e+02 1.700e+02 2.031e+02 2.417e+02 3.778e+02, threshold=4.063e+02, percent-clipped=0.0 +2024-08-25 23:59:07,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=156485.33333333334, ans=0.125 +2024-08-25 23:59:21,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=156538.66666666666, ans=0.125 +2024-08-25 23:59:47,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=156645.33333333334, ans=0.125 +2024-08-25 23:59:51,576 INFO [train.py:1114] (2/4) Epoch 12, batch 2000, loss[loss=0.2557, simple_loss=0.2929, pruned_loss=0.0792, ctc_loss=0.15, over 19677.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.289, pruned_loss=0.06162, ctc_loss=0.1158, over 3854634.36 frames. ], batch size: 45, lr: 1.23e-02, grad_scale: 32.0 +2024-08-26 00:00:02,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=156752.0, ans=0.0 +2024-08-26 00:00:08,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=156752.0, ans=0.0 +2024-08-26 00:00:15,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=156805.33333333334, ans=0.125 +2024-08-26 00:01:25,982 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.57 vs. limit=15.0 +2024-08-26 00:01:28,937 INFO [train.py:1114] (2/4) Epoch 12, batch 2050, loss[loss=0.2029, simple_loss=0.2577, pruned_loss=0.05464, ctc_loss=0.09721, over 19728.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.2878, pruned_loss=0.0617, ctc_loss=0.1159, over 3850292.88 frames. ], batch size: 47, lr: 1.23e-02, grad_scale: 32.0 +2024-08-26 00:01:46,857 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.778e+02 1.977e+02 2.412e+02 4.440e+02, threshold=3.953e+02, percent-clipped=1.0 +2024-08-26 00:01:52,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=157072.0, ans=0.0 +2024-08-26 00:01:54,581 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.63 vs. limit=15.0 +2024-08-26 00:01:59,615 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.09 vs. limit=12.0 +2024-08-26 00:02:19,500 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.28 vs. limit=22.5 +2024-08-26 00:02:21,580 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.99 vs. limit=15.0 +2024-08-26 00:02:27,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=157125.33333333334, ans=0.0 +2024-08-26 00:02:28,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff2.min_abs, batch_count=157125.33333333334, ans=0.1 +2024-08-26 00:02:36,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=157125.33333333334, ans=0.125 +2024-08-26 00:02:55,211 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 00:02:56,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=157178.66666666666, ans=0.0 +2024-08-26 00:02:58,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=157178.66666666666, ans=0.5 +2024-08-26 00:03:00,085 INFO [train.py:1114] (2/4) Epoch 12, batch 2100, loss[loss=0.2289, simple_loss=0.2876, pruned_loss=0.06256, ctc_loss=0.1127, over 19773.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.2875, pruned_loss=0.06151, ctc_loss=0.1155, over 3857594.39 frames. ], batch size: 54, lr: 1.23e-02, grad_scale: 32.0 +2024-08-26 00:08:51,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=157232.0, ans=0.125 +2024-08-26 00:27:53,771 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.41 vs. limit=10.0 +2024-08-26 00:48:05,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=157445.33333333334, ans=0.0 +2024-08-26 00:54:25,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=157445.33333333334, ans=0.125 +2024-08-26 00:56:07,936 INFO [train.py:1114] (2/4) Epoch 12, batch 2150, loss[loss=0.2029, simple_loss=0.2668, pruned_loss=0.05093, ctc_loss=0.09285, over 19868.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.287, pruned_loss=0.06127, ctc_loss=0.115, over 3868522.75 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 32.0 +2024-08-26 01:08:01,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=157552.0, ans=0.125 +2024-08-26 01:09:53,319 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.502e+02 1.788e+02 2.174e+02 2.705e+02 6.148e+02, threshold=4.348e+02, percent-clipped=11.0 +2024-08-26 01:21:15,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=157605.33333333334, ans=0.0 +2024-08-26 01:25:03,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=157658.66666666666, ans=0.0 +2024-08-26 01:28:45,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=157658.66666666666, ans=0.0 +2024-08-26 01:37:35,704 INFO [train.py:1114] (2/4) Epoch 12, batch 2200, loss[loss=0.2349, simple_loss=0.2988, pruned_loss=0.06338, ctc_loss=0.1109, over 19607.00 frames. ], tot_loss[loss=0.227, simple_loss=0.2865, pruned_loss=0.06088, ctc_loss=0.1143, over 3866806.24 frames. ], batch size: 57, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 01:38:03,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=157765.33333333334, ans=0.0 +2024-08-26 01:46:52,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157872.0, ans=0.1 +2024-08-26 01:49:48,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157872.0, ans=0.1 +2024-08-26 01:56:52,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=157978.66666666666, ans=0.125 +2024-08-26 01:57:30,325 INFO [train.py:1114] (2/4) Epoch 12, batch 2250, loss[loss=0.2448, simple_loss=0.302, pruned_loss=0.06736, ctc_loss=0.1319, over 19601.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.2871, pruned_loss=0.06132, ctc_loss=0.1151, over 3867192.81 frames. ], batch size: 55, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 01:58:21,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158032.0, ans=0.1 +2024-08-26 02:02:02,366 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.19 vs. limit=15.0 +2024-08-26 02:02:03,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158032.0, ans=0.1 +2024-08-26 02:03:20,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=158085.33333333334, ans=0.0 +2024-08-26 02:04:28,486 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 1.839e+02 2.199e+02 2.577e+02 6.358e+02, threshold=4.399e+02, percent-clipped=1.0 +2024-08-26 02:07:31,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=158138.66666666666, ans=0.125 +2024-08-26 02:08:01,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=158192.0, ans=0.125 +2024-08-26 02:10:34,837 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.32 vs. limit=15.0 +2024-08-26 02:11:55,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158245.33333333334, ans=0.1 +2024-08-26 02:13:20,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=158298.66666666666, ans=0.09899494936611666 +2024-08-26 02:13:21,320 INFO [train.py:1114] (2/4) Epoch 12, batch 2300, loss[loss=0.2315, simple_loss=0.2882, pruned_loss=0.06361, ctc_loss=0.119, over 19501.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.2864, pruned_loss=0.06159, ctc_loss=0.1156, over 3860788.69 frames. ], batch size: 49, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 02:13:39,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=158298.66666666666, ans=0.125 +2024-08-26 02:16:18,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=158405.33333333334, ans=0.125 +2024-08-26 02:16:56,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=158405.33333333334, ans=0.125 +2024-08-26 02:22:37,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=158565.33333333334, ans=0.0 +2024-08-26 02:22:39,617 INFO [train.py:1114] (2/4) Epoch 12, batch 2350, loss[loss=0.2391, simple_loss=0.2992, pruned_loss=0.0653, ctc_loss=0.1209, over 19678.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.286, pruned_loss=0.06162, ctc_loss=0.1156, over 3863829.97 frames. ], batch size: 63, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 02:24:54,974 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.28 vs. limit=15.0 +2024-08-26 02:25:18,450 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.432e+02 1.991e+02 2.536e+02 3.183e+02 5.552e+02, threshold=5.072e+02, percent-clipped=5.0 +2024-08-26 02:27:03,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=158725.33333333334, ans=0.0 +2024-08-26 02:28:05,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=158725.33333333334, ans=0.125 +2024-08-26 02:28:12,094 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.66 vs. limit=10.0 +2024-08-26 02:28:29,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158778.66666666666, ans=0.1 +2024-08-26 02:30:27,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=158778.66666666666, ans=0.125 +2024-08-26 02:30:58,400 INFO [train.py:1114] (2/4) Epoch 12, batch 2400, loss[loss=0.2765, simple_loss=0.3198, pruned_loss=0.08412, ctc_loss=0.1624, over 19328.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.289, pruned_loss=0.0628, ctc_loss=0.1177, over 3858772.25 frames. ], batch size: 71, lr: 1.22e-02, grad_scale: 32.0 +2024-08-26 02:31:29,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=158885.33333333334, ans=0.125 +2024-08-26 02:36:35,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=158992.0, ans=0.125 +2024-08-26 02:37:04,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=158992.0, ans=0.125 +2024-08-26 02:37:51,191 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.48 vs. limit=15.0 +2024-08-26 02:38:02,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=159045.33333333334, ans=0.125 +2024-08-26 02:38:22,397 INFO [train.py:1114] (2/4) Epoch 12, batch 2450, loss[loss=0.3385, simple_loss=0.338, pruned_loss=0.1205, ctc_loss=0.2451, over 13609.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.2932, pruned_loss=0.06602, ctc_loss=0.1241, over 3731400.11 frames. ], batch size: 140, lr: 1.22e-02, grad_scale: 32.0 +2024-08-26 02:39:04,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=159098.66666666666, ans=0.125 +2024-08-26 02:39:14,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=159152.0, ans=0.125 +2024-08-26 02:39:42,316 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.506e+02 1.859e+02 2.162e+02 2.447e+02 4.124e+02, threshold=4.324e+02, percent-clipped=0.0 +2024-08-26 02:40:01,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=159152.0, ans=0.025 +2024-08-26 02:40:38,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=159205.33333333334, ans=0.07 +2024-08-26 02:40:53,015 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.92 vs. limit=22.5 +2024-08-26 02:41:13,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=159258.66666666666, ans=0.125 +2024-08-26 02:41:13,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=159258.66666666666, ans=0.125 +2024-08-26 02:43:45,623 INFO [train.py:1114] (2/4) Epoch 13, batch 0, loss[loss=0.2146, simple_loss=0.2728, pruned_loss=0.05671, ctc_loss=0.1077, over 19813.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2728, pruned_loss=0.05671, ctc_loss=0.1077, over 19813.00 frames. ], batch size: 49, lr: 1.18e-02, grad_scale: 32.0 +2024-08-26 02:43:45,624 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-26 02:45:27,908 INFO [train.py:1146] (2/4) Epoch 13, validation: loss=0.1972, simple_loss=0.2835, pruned_loss=0.04113, ctc_loss=0.07151, over 944034.00 frames. +2024-08-26 02:45:27,909 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 14234MB +2024-08-26 02:45:29,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=159306.66666666666, ans=0.025 +2024-08-26 02:45:31,683 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.74 vs. limit=15.0 +2024-08-26 02:45:44,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=159306.66666666666, ans=0.125 +2024-08-26 02:46:11,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=159360.0, ans=0.125 +2024-08-26 02:46:20,055 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.11 vs. limit=22.5 +2024-08-26 02:46:24,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=159413.33333333334, ans=0.125 +2024-08-26 02:46:53,724 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.24 vs. limit=15.0 +2024-08-26 02:46:54,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=159466.66666666666, ans=0.2 +2024-08-26 02:48:06,383 INFO [train.py:1114] (2/4) Epoch 13, batch 50, loss[loss=0.1943, simple_loss=0.2539, pruned_loss=0.0482, ctc_loss=0.09562, over 19726.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.2895, pruned_loss=0.06243, ctc_loss=0.1182, over 844763.42 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:48:17,659 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.32 vs. limit=12.0 +2024-08-26 02:48:30,545 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.30 vs. limit=22.5 +2024-08-26 02:48:32,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=159626.66666666666, ans=0.1 +2024-08-26 02:48:33,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=159626.66666666666, ans=0.0 +2024-08-26 02:48:38,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=159680.0, ans=0.125 +2024-08-26 02:48:55,468 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 1.822e+02 2.122e+02 2.766e+02 5.339e+02, threshold=4.244e+02, percent-clipped=3.0 +2024-08-26 02:49:01,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=159733.33333333334, ans=0.2 +2024-08-26 02:49:26,910 INFO [train.py:1114] (2/4) Epoch 13, batch 100, loss[loss=0.2208, simple_loss=0.283, pruned_loss=0.05801, ctc_loss=0.1067, over 19715.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.2903, pruned_loss=0.06182, ctc_loss=0.1173, over 1499351.95 frames. ], batch size: 51, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:49:29,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=159840.0, ans=0.125 +2024-08-26 02:49:35,109 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.64 vs. limit=15.0 +2024-08-26 02:49:40,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=159840.0, ans=0.125 +2024-08-26 02:49:54,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=159893.33333333334, ans=0.2 +2024-08-26 02:50:00,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=159946.66666666666, ans=0.125 +2024-08-26 02:50:25,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=160000.0, ans=0.125 +2024-08-26 02:50:58,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=160000.0, ans=0.125 +2024-08-26 02:51:13,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=160053.33333333334, ans=0.125 +2024-08-26 02:51:16,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=160053.33333333334, ans=0.125 +2024-08-26 02:51:27,230 INFO [train.py:1114] (2/4) Epoch 13, batch 150, loss[loss=0.2065, simple_loss=0.2592, pruned_loss=0.05585, ctc_loss=0.1053, over 19689.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.2873, pruned_loss=0.06045, ctc_loss=0.114, over 2027622.75 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:52:39,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=160213.33333333334, ans=0.125 +2024-08-26 02:52:48,524 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.368e+02 1.693e+02 1.889e+02 2.276e+02 3.515e+02, threshold=3.778e+02, percent-clipped=0.0 +2024-08-26 02:53:09,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160266.66666666666, ans=0.1 +2024-08-26 02:53:36,256 INFO [train.py:1114] (2/4) Epoch 13, batch 200, loss[loss=0.2841, simple_loss=0.3246, pruned_loss=0.0871, ctc_loss=0.1736, over 18259.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.2863, pruned_loss=0.06006, ctc_loss=0.1133, over 2434814.35 frames. ], batch size: 85, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:54:07,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=160426.66666666666, ans=0.025 +2024-08-26 02:54:09,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=160426.66666666666, ans=0.125 +2024-08-26 02:54:11,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=160480.0, ans=0.5 +2024-08-26 02:55:04,400 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.33 vs. limit=12.0 +2024-08-26 02:55:15,707 INFO [train.py:1114] (2/4) Epoch 13, batch 250, loss[loss=0.2621, simple_loss=0.3092, pruned_loss=0.07815, ctc_loss=0.1469, over 19360.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.286, pruned_loss=0.05979, ctc_loss=0.1131, over 2754451.47 frames. ], batch size: 67, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:55:18,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=160640.0, ans=0.125 +2024-08-26 02:55:37,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=160746.66666666666, ans=0.125 +2024-08-26 02:55:40,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160746.66666666666, ans=0.1 +2024-08-26 02:55:47,663 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 1.754e+02 2.188e+02 2.577e+02 4.403e+02, threshold=4.375e+02, percent-clipped=2.0 +2024-08-26 02:55:48,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=160746.66666666666, ans=0.2 +2024-08-26 02:56:01,573 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.91 vs. limit=15.0 +2024-08-26 02:56:10,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=160853.33333333334, ans=0.0 +2024-08-26 02:56:15,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=160853.33333333334, ans=0.07 +2024-08-26 02:56:43,563 INFO [train.py:1114] (2/4) Epoch 13, batch 300, loss[loss=0.243, simple_loss=0.3006, pruned_loss=0.06819, ctc_loss=0.1227, over 19511.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.2851, pruned_loss=0.05938, ctc_loss=0.1123, over 2999845.44 frames. ], batch size: 61, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:57:24,390 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.50 vs. limit=6.0 +2024-08-26 02:57:36,731 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 02:57:42,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff2.min_abs, batch_count=161120.0, ans=0.1 +2024-08-26 02:57:46,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=161120.0, ans=0.1 +2024-08-26 02:57:47,824 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.80 vs. limit=15.0 +2024-08-26 02:57:50,481 INFO [train.py:1114] (2/4) Epoch 13, batch 350, loss[loss=0.2167, simple_loss=0.2675, pruned_loss=0.06074, ctc_loss=0.1112, over 19755.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.2851, pruned_loss=0.05926, ctc_loss=0.1118, over 3189809.24 frames. ], batch size: 48, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:58:04,801 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.39 vs. limit=22.5 +2024-08-26 02:58:13,161 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.24 vs. limit=15.0 +2024-08-26 02:58:13,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=161226.66666666666, ans=0.125 +2024-08-26 02:58:25,611 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.486e+02 1.772e+02 2.039e+02 2.354e+02 3.759e+02, threshold=4.079e+02, percent-clipped=0.0 +2024-08-26 02:58:36,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=161333.33333333334, ans=0.0 +2024-08-26 02:59:24,150 INFO [train.py:1114] (2/4) Epoch 13, batch 400, loss[loss=0.2183, simple_loss=0.2878, pruned_loss=0.05329, ctc_loss=0.1055, over 19504.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2846, pruned_loss=0.05922, ctc_loss=0.1117, over 3340937.66 frames. ], batch size: 54, lr: 1.17e-02, grad_scale: 32.0 +2024-08-26 02:59:55,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=161493.33333333334, ans=0.0 +2024-08-26 02:59:56,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161493.33333333334, ans=0.1 +2024-08-26 02:59:56,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=161493.33333333334, ans=0.0 +2024-08-26 03:00:32,909 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.35 vs. limit=15.0 +2024-08-26 03:00:41,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=161600.0, ans=0.025 +2024-08-26 03:00:41,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=161600.0, ans=0.125 +2024-08-26 03:01:53,844 INFO [train.py:1114] (2/4) Epoch 13, batch 450, loss[loss=0.217, simple_loss=0.288, pruned_loss=0.05206, ctc_loss=0.1045, over 19610.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.285, pruned_loss=0.05951, ctc_loss=0.1122, over 3448536.75 frames. ], batch size: 55, lr: 1.17e-02, grad_scale: 32.0 +2024-08-26 03:01:55,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=161706.66666666666, ans=0.0 +2024-08-26 03:01:56,659 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.21 vs. limit=15.0 +2024-08-26 03:02:43,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=161760.0, ans=0.0 +2024-08-26 03:03:10,112 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.726e+02 2.085e+02 2.754e+02 4.301e+02, threshold=4.170e+02, percent-clipped=3.0 +2024-08-26 03:03:23,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161920.0, ans=0.1 +2024-08-26 03:03:41,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=161920.0, ans=0.0 +2024-08-26 03:03:42,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=161920.0, ans=0.0 +2024-08-26 03:03:51,743 INFO [train.py:1114] (2/4) Epoch 13, batch 500, loss[loss=0.2653, simple_loss=0.3156, pruned_loss=0.07838, ctc_loss=0.1454, over 19656.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2847, pruned_loss=0.05939, ctc_loss=0.1119, over 3544900.86 frames. ], batch size: 63, lr: 1.17e-02, grad_scale: 32.0 +2024-08-26 03:04:14,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=161973.33333333334, ans=0.125 +2024-08-26 03:04:16,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=161973.33333333334, ans=0.125 +2024-08-26 03:04:44,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162026.66666666666, ans=0.1 +2024-08-26 03:05:20,930 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.64 vs. limit=15.0 +2024-08-26 03:05:28,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=162133.33333333334, ans=0.125 +2024-08-26 03:05:42,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=162186.66666666666, ans=0.125 +2024-08-26 03:06:03,078 INFO [train.py:1114] (2/4) Epoch 13, batch 550, loss[loss=0.2592, simple_loss=0.315, pruned_loss=0.07291, ctc_loss=0.1441, over 19232.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2848, pruned_loss=0.05945, ctc_loss=0.1121, over 3608365.90 frames. ], batch size: 71, lr: 1.17e-02, grad_scale: 32.0 +2024-08-26 03:06:04,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=162240.0, ans=10.0 +2024-08-26 03:06:27,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=162293.33333333334, ans=0.025 +2024-08-26 03:06:47,052 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.318e+02 1.758e+02 1.954e+02 2.485e+02 4.688e+02, threshold=3.908e+02, percent-clipped=2.0 +2024-08-26 03:07:12,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=162453.33333333334, ans=0.125 +2024-08-26 03:07:14,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162453.33333333334, ans=0.1 +2024-08-26 03:07:24,257 INFO [train.py:1114] (2/4) Epoch 13, batch 600, loss[loss=0.2636, simple_loss=0.3144, pruned_loss=0.07767, ctc_loss=0.1437, over 19429.00 frames. ], tot_loss[loss=0.225, simple_loss=0.2854, pruned_loss=0.05977, ctc_loss=0.1125, over 3664877.48 frames. ], batch size: 67, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:07:36,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=162506.66666666666, ans=0.0 +2024-08-26 03:07:44,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=162560.0, ans=0.125 +2024-08-26 03:07:50,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162560.0, ans=0.1 +2024-08-26 03:08:16,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162613.33333333334, ans=0.1 +2024-08-26 03:08:29,444 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 03:08:35,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=162666.66666666666, ans=0.0 +2024-08-26 03:08:37,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=162666.66666666666, ans=0.0 +2024-08-26 03:09:14,956 INFO [train.py:1114] (2/4) Epoch 13, batch 650, loss[loss=0.229, simple_loss=0.2906, pruned_loss=0.06073, ctc_loss=0.115, over 19758.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.2851, pruned_loss=0.05963, ctc_loss=0.1125, over 3715505.86 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:09:15,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=162773.33333333334, ans=0.0 +2024-08-26 03:09:22,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=162773.33333333334, ans=0.125 +2024-08-26 03:09:25,671 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.13 vs. limit=15.0 +2024-08-26 03:09:44,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=162826.66666666666, ans=0.2 +2024-08-26 03:09:45,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=162826.66666666666, ans=0.0 +2024-08-26 03:09:57,794 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.29 vs. limit=15.0 +2024-08-26 03:10:09,872 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 1.755e+02 2.119e+02 2.960e+02 5.119e+02, threshold=4.237e+02, percent-clipped=6.0 +2024-08-26 03:10:30,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=162986.66666666666, ans=0.2 +2024-08-26 03:10:39,731 INFO [train.py:1114] (2/4) Epoch 13, batch 700, loss[loss=0.1995, simple_loss=0.2662, pruned_loss=0.04774, ctc_loss=0.09344, over 19719.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2849, pruned_loss=0.0595, ctc_loss=0.1118, over 3747011.77 frames. ], batch size: 51, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:10:56,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=163093.33333333334, ans=0.125 +2024-08-26 03:10:58,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=163093.33333333334, ans=0.125 +2024-08-26 03:11:37,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=163200.0, ans=0.2 +2024-08-26 03:11:45,438 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.39 vs. limit=15.0 +2024-08-26 03:12:00,818 INFO [train.py:1114] (2/4) Epoch 13, batch 750, loss[loss=0.2506, simple_loss=0.3073, pruned_loss=0.07125, ctc_loss=0.1286, over 19490.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2845, pruned_loss=0.0594, ctc_loss=0.1114, over 3773780.70 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:12:09,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=163306.66666666666, ans=0.2 +2024-08-26 03:12:30,501 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.93 vs. limit=15.0 +2024-08-26 03:12:36,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=163413.33333333334, ans=0.0 +2024-08-26 03:12:36,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=163413.33333333334, ans=6.0 +2024-08-26 03:12:37,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=163413.33333333334, ans=0.125 +2024-08-26 03:12:43,037 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.800e+02 2.310e+02 2.882e+02 4.749e+02, threshold=4.619e+02, percent-clipped=2.0 +2024-08-26 03:12:51,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=163466.66666666666, ans=0.125 +2024-08-26 03:13:31,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=163520.0, ans=0.0 +2024-08-26 03:13:37,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=163520.0, ans=0.125 +2024-08-26 03:13:53,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163573.33333333334, ans=0.1 +2024-08-26 03:13:56,370 INFO [train.py:1114] (2/4) Epoch 13, batch 800, loss[loss=0.2063, simple_loss=0.2668, pruned_loss=0.05306, ctc_loss=0.09921, over 19410.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.2849, pruned_loss=0.05981, ctc_loss=0.1122, over 3794107.70 frames. ], batch size: 48, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:14:05,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=163573.33333333334, ans=0.0 +2024-08-26 03:14:11,929 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.07 vs. limit=22.5 +2024-08-26 03:14:12,391 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.73 vs. limit=22.5 +2024-08-26 03:14:14,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=163573.33333333334, ans=0.125 +2024-08-26 03:14:39,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=163680.0, ans=0.2 +2024-08-26 03:14:52,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=163733.33333333334, ans=0.0 +2024-08-26 03:14:56,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=163733.33333333334, ans=0.0 +2024-08-26 03:15:12,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=163840.0, ans=0.0 +2024-08-26 03:15:13,896 INFO [train.py:1114] (2/4) Epoch 13, batch 850, loss[loss=0.2136, simple_loss=0.2934, pruned_loss=0.04787, ctc_loss=0.09498, over 19639.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2842, pruned_loss=0.05914, ctc_loss=0.1113, over 3813447.23 frames. ], batch size: 59, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:15:18,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=163840.0, ans=0.125 +2024-08-26 03:15:56,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=163946.66666666666, ans=0.0 +2024-08-26 03:16:06,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=163946.66666666666, ans=0.125 +2024-08-26 03:16:11,619 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.448e+02 1.727e+02 1.948e+02 2.271e+02 3.773e+02, threshold=3.897e+02, percent-clipped=0.0 +2024-08-26 03:16:12,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=163946.66666666666, ans=0.1 +2024-08-26 03:16:26,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=164053.33333333334, ans=0.125 +2024-08-26 03:16:39,637 INFO [train.py:1114] (2/4) Epoch 13, batch 900, loss[loss=0.1888, simple_loss=0.2553, pruned_loss=0.04427, ctc_loss=0.08421, over 19813.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.2845, pruned_loss=0.05956, ctc_loss=0.1119, over 3818229.55 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:16:40,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=164106.66666666666, ans=0.125 +2024-08-26 03:16:42,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164106.66666666666, ans=0.1 +2024-08-26 03:16:57,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=164160.0, ans=0.2 +2024-08-26 03:17:00,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=164160.0, ans=0.125 +2024-08-26 03:17:05,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=164213.33333333334, ans=0.0 +2024-08-26 03:17:39,435 INFO [train.py:1114] (2/4) Epoch 13, batch 950, loss[loss=0.2111, simple_loss=0.2664, pruned_loss=0.05591, ctc_loss=0.11, over 19505.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.284, pruned_loss=0.05939, ctc_loss=0.1117, over 3818816.00 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:17:39,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=164373.33333333334, ans=0.07 +2024-08-26 03:17:43,193 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 03:17:57,979 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 03:18:34,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=164426.66666666666, ans=0.125 +2024-08-26 03:18:52,290 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.763e+02 2.081e+02 2.549e+02 5.575e+02, threshold=4.162e+02, percent-clipped=2.0 +2024-08-26 03:19:07,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=164533.33333333334, ans=0.04949747468305833 +2024-08-26 03:19:10,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=164533.33333333334, ans=0.125 +2024-08-26 03:19:29,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=164640.0, ans=0.0 +2024-08-26 03:19:29,891 INFO [train.py:1114] (2/4) Epoch 13, batch 1000, loss[loss=0.2237, simple_loss=0.2844, pruned_loss=0.05862, ctc_loss=0.1145, over 19842.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.2857, pruned_loss=0.06018, ctc_loss=0.1132, over 3814582.96 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:19:45,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=164693.33333333334, ans=0.125 +2024-08-26 03:19:48,980 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 03:20:23,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=164853.33333333334, ans=0.0 +2024-08-26 03:20:24,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=164853.33333333334, ans=0.1 +2024-08-26 03:20:26,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=164853.33333333334, ans=0.0 +2024-08-26 03:20:35,692 INFO [train.py:1114] (2/4) Epoch 13, batch 1050, loss[loss=0.2264, simple_loss=0.2973, pruned_loss=0.05568, ctc_loss=0.1103, over 19838.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.2851, pruned_loss=0.06021, ctc_loss=0.1132, over 3821495.00 frames. ], batch size: 57, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:20:44,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=164906.66666666666, ans=0.125 +2024-08-26 03:20:46,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=164960.0, ans=0.125 +2024-08-26 03:20:46,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=164960.0, ans=0.09899494936611666 +2024-08-26 03:20:48,603 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.37 vs. limit=15.0 +2024-08-26 03:20:54,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=164960.0, ans=0.125 +2024-08-26 03:21:08,096 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.372e+02 1.698e+02 1.997e+02 2.318e+02 3.616e+02, threshold=3.994e+02, percent-clipped=0.0 +2024-08-26 03:21:24,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=165066.66666666666, ans=0.125 +2024-08-26 03:21:31,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=165120.0, ans=0.1 +2024-08-26 03:21:32,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=165120.0, ans=0.2 +2024-08-26 03:21:42,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165120.0, ans=0.1 +2024-08-26 03:21:44,513 INFO [train.py:1114] (2/4) Epoch 13, batch 1100, loss[loss=0.208, simple_loss=0.2777, pruned_loss=0.05078, ctc_loss=0.09204, over 19589.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.2845, pruned_loss=0.05988, ctc_loss=0.1126, over 3830301.92 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:21:49,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=165173.33333333334, ans=0.0 +2024-08-26 03:22:45,857 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.38 vs. limit=15.0 +2024-08-26 03:22:57,622 INFO [train.py:1114] (2/4) Epoch 13, batch 1150, loss[loss=0.2159, simple_loss=0.2737, pruned_loss=0.05742, ctc_loss=0.1082, over 19614.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2838, pruned_loss=0.0596, ctc_loss=0.112, over 3830394.96 frames. ], batch size: 52, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:23:02,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=165440.0, ans=0.0 +2024-08-26 03:23:10,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=165493.33333333334, ans=0.125 +2024-08-26 03:23:19,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=165493.33333333334, ans=0.125 +2024-08-26 03:23:24,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=165493.33333333334, ans=0.125 +2024-08-26 03:23:38,673 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.345e+02 1.729e+02 2.006e+02 2.456e+02 7.202e+02, threshold=4.012e+02, percent-clipped=3.0 +2024-08-26 03:23:51,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=165600.0, ans=0.2 +2024-08-26 03:23:54,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=165653.33333333334, ans=0.5 +2024-08-26 03:23:57,277 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 03:23:59,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=165653.33333333334, ans=0.0 +2024-08-26 03:24:03,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=165653.33333333334, ans=0.125 +2024-08-26 03:24:11,580 INFO [train.py:1114] (2/4) Epoch 13, batch 1200, loss[loss=0.2124, simple_loss=0.2803, pruned_loss=0.05303, ctc_loss=0.09605, over 19834.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.2845, pruned_loss=0.05973, ctc_loss=0.1122, over 3826231.54 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:24:19,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=165706.66666666666, ans=0.2 +2024-08-26 03:24:30,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=165706.66666666666, ans=0.125 +2024-08-26 03:24:54,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=165813.33333333334, ans=0.0 +2024-08-26 03:25:15,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=165813.33333333334, ans=0.04949747468305833 +2024-08-26 03:25:23,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165866.66666666666, ans=0.1 +2024-08-26 03:25:58,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=165866.66666666666, ans=0.2 +2024-08-26 03:26:20,507 INFO [train.py:1114] (2/4) Epoch 13, batch 1250, loss[loss=0.2469, simple_loss=0.3022, pruned_loss=0.06892, ctc_loss=0.1342, over 19497.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.285, pruned_loss=0.05974, ctc_loss=0.1121, over 3844509.72 frames. ], batch size: 61, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:26:20,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=165973.33333333334, ans=0.125 +2024-08-26 03:26:32,651 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.05 vs. limit=15.0 +2024-08-26 03:26:45,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=166026.66666666666, ans=0.125 +2024-08-26 03:27:02,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=166080.0, ans=0.125 +2024-08-26 03:27:23,465 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.311e+02 1.715e+02 1.869e+02 2.285e+02 3.930e+02, threshold=3.738e+02, percent-clipped=0.0 +2024-08-26 03:27:38,041 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.14 vs. limit=6.0 +2024-08-26 03:27:39,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=166133.33333333334, ans=0.2 +2024-08-26 03:28:00,525 INFO [train.py:1114] (2/4) Epoch 13, batch 1300, loss[loss=0.2667, simple_loss=0.3093, pruned_loss=0.08235, ctc_loss=0.1486, over 18884.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2836, pruned_loss=0.05898, ctc_loss=0.1109, over 3848264.69 frames. ], batch size: 76, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:28:00,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=166240.0, ans=0.0 +2024-08-26 03:28:38,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=166293.33333333334, ans=0.125 +2024-08-26 03:30:08,053 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 03:30:18,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=166506.66666666666, ans=0.0 +2024-08-26 03:30:19,038 INFO [train.py:1114] (2/4) Epoch 13, batch 1350, loss[loss=0.2249, simple_loss=0.2885, pruned_loss=0.05803, ctc_loss=0.1134, over 19774.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.2835, pruned_loss=0.05875, ctc_loss=0.1104, over 3859343.46 frames. ], batch size: 54, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:30:46,465 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.76 vs. limit=22.5 +2024-08-26 03:31:08,778 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.377e+02 1.736e+02 2.053e+02 2.622e+02 5.263e+02, threshold=4.106e+02, percent-clipped=6.0 +2024-08-26 03:31:20,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=166666.66666666666, ans=0.0 +2024-08-26 03:31:36,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=166720.0, ans=0.0 +2024-08-26 03:31:40,380 INFO [train.py:1114] (2/4) Epoch 13, batch 1400, loss[loss=0.2044, simple_loss=0.2645, pruned_loss=0.05185, ctc_loss=0.1013, over 19667.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2831, pruned_loss=0.05841, ctc_loss=0.1098, over 3865710.98 frames. ], batch size: 46, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:31:53,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=166773.33333333334, ans=0.0 +2024-08-26 03:32:14,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=166773.33333333334, ans=0.125 +2024-08-26 03:32:31,744 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.42 vs. limit=15.0 +2024-08-26 03:32:36,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=166880.0, ans=0.5 +2024-08-26 03:32:55,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=166933.33333333334, ans=0.2 +2024-08-26 03:33:16,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=166986.66666666666, ans=0.125 +2024-08-26 03:33:21,478 INFO [train.py:1114] (2/4) Epoch 13, batch 1450, loss[loss=0.2403, simple_loss=0.2961, pruned_loss=0.06772, ctc_loss=0.1227, over 19671.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2839, pruned_loss=0.05883, ctc_loss=0.1106, over 3863673.82 frames. ], batch size: 63, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:33:33,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=167093.33333333334, ans=0.0 +2024-08-26 03:33:44,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=167093.33333333334, ans=0.0 +2024-08-26 03:33:59,239 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.391e+02 1.756e+02 1.937e+02 2.380e+02 3.895e+02, threshold=3.874e+02, percent-clipped=0.0 +2024-08-26 03:34:33,954 INFO [train.py:1114] (2/4) Epoch 13, batch 1500, loss[loss=0.2403, simple_loss=0.2974, pruned_loss=0.06641, ctc_loss=0.1258, over 19606.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.2851, pruned_loss=0.05954, ctc_loss=0.1121, over 3862842.30 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:34:54,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=167360.0, ans=0.125 +2024-08-26 03:35:43,267 INFO [train.py:1114] (2/4) Epoch 13, batch 1550, loss[loss=0.2204, simple_loss=0.2919, pruned_loss=0.05532, ctc_loss=0.09591, over 19635.00 frames. ], tot_loss[loss=0.225, simple_loss=0.2854, pruned_loss=0.05975, ctc_loss=0.1128, over 3848410.62 frames. ], batch size: 60, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:35:59,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=167626.66666666666, ans=0.125 +2024-08-26 03:36:22,751 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.33 vs. limit=10.0 +2024-08-26 03:36:35,367 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.327e+02 1.778e+02 2.054e+02 2.767e+02 5.252e+02, threshold=4.108e+02, percent-clipped=7.0 +2024-08-26 03:36:41,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.44 vs. limit=15.0 +2024-08-26 03:37:04,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=167840.0, ans=0.0 +2024-08-26 03:37:05,323 INFO [train.py:1114] (2/4) Epoch 13, batch 1600, loss[loss=0.2266, simple_loss=0.2956, pruned_loss=0.05739, ctc_loss=0.107, over 19844.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.2853, pruned_loss=0.05966, ctc_loss=0.1127, over 3837793.79 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:37:16,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=167840.0, ans=0.2 +2024-08-26 03:37:42,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167946.66666666666, ans=0.1 +2024-08-26 03:38:21,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=168000.0, ans=0.0 +2024-08-26 03:38:35,984 INFO [train.py:1114] (2/4) Epoch 13, batch 1650, loss[loss=0.199, simple_loss=0.2718, pruned_loss=0.04636, ctc_loss=0.08361, over 19668.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2851, pruned_loss=0.0598, ctc_loss=0.1128, over 3833703.32 frames. ], batch size: 59, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:38:36,546 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.26 vs. limit=12.0 +2024-08-26 03:38:44,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168106.66666666666, ans=0.1 +2024-08-26 03:38:47,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=168106.66666666666, ans=0.0 +2024-08-26 03:38:58,494 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.51 vs. limit=10.0 +2024-08-26 03:39:06,360 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.35 vs. limit=15.0 +2024-08-26 03:39:20,047 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.425e+02 1.825e+02 2.209e+02 2.614e+02 4.167e+02, threshold=4.418e+02, percent-clipped=2.0 +2024-08-26 03:39:28,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=168266.66666666666, ans=0.125 +2024-08-26 03:39:56,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=168320.0, ans=0.0 +2024-08-26 03:40:00,077 INFO [train.py:1114] (2/4) Epoch 13, batch 1700, loss[loss=0.203, simple_loss=0.2607, pruned_loss=0.05303, ctc_loss=0.0978, over 19655.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.2852, pruned_loss=0.05992, ctc_loss=0.1128, over 3847716.76 frames. ], batch size: 46, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:40:24,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=168426.66666666666, ans=0.125 +2024-08-26 03:40:35,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=168426.66666666666, ans=0.0 +2024-08-26 03:40:42,528 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.82 vs. limit=15.0 +2024-08-26 03:40:47,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=168533.33333333334, ans=0.2 +2024-08-26 03:41:02,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=168586.66666666666, ans=0.0 +2024-08-26 03:41:17,849 INFO [train.py:1114] (2/4) Epoch 13, batch 1750, loss[loss=0.1786, simple_loss=0.2406, pruned_loss=0.04139, ctc_loss=0.08448, over 19679.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2844, pruned_loss=0.05925, ctc_loss=0.1117, over 3852034.96 frames. ], batch size: 45, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:41:19,636 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.14 vs. limit=15.0 +2024-08-26 03:41:34,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=22.79 vs. limit=22.5 +2024-08-26 03:41:42,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=168693.33333333334, ans=0.1 +2024-08-26 03:41:48,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=168693.33333333334, ans=0.125 +2024-08-26 03:41:58,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=168746.66666666666, ans=0.0 +2024-08-26 03:41:59,753 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.17 vs. limit=22.5 +2024-08-26 03:42:01,095 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.702e+02 2.065e+02 2.813e+02 5.109e+02, threshold=4.129e+02, percent-clipped=2.0 +2024-08-26 03:42:11,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=168746.66666666666, ans=0.125 +2024-08-26 03:42:42,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=168853.33333333334, ans=0.125 +2024-08-26 03:42:43,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=168853.33333333334, ans=0.025 +2024-08-26 03:42:45,934 INFO [train.py:1114] (2/4) Epoch 13, batch 1800, loss[loss=0.2249, simple_loss=0.2881, pruned_loss=0.05769, ctc_loss=0.1155, over 19617.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2846, pruned_loss=0.05935, ctc_loss=0.1118, over 3853948.22 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:42:57,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=168960.0, ans=0.125 +2024-08-26 03:42:58,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=168960.0, ans=0.125 +2024-08-26 03:43:01,397 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.69 vs. limit=22.5 +2024-08-26 03:43:04,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=168960.0, ans=0.125 +2024-08-26 03:43:16,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=169013.33333333334, ans=0.0 +2024-08-26 03:43:26,509 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.52 vs. limit=10.0 +2024-08-26 03:43:36,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=169066.66666666666, ans=0.125 +2024-08-26 03:43:47,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=169120.0, ans=0.125 +2024-08-26 03:43:53,525 INFO [train.py:1114] (2/4) Epoch 13, batch 1850, loss[loss=0.2189, simple_loss=0.289, pruned_loss=0.05442, ctc_loss=0.09984, over 19599.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2841, pruned_loss=0.05878, ctc_loss=0.1105, over 3857857.64 frames. ], batch size: 57, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:44:05,142 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.29 vs. limit=10.0 +2024-08-26 03:44:17,195 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.38 vs. limit=15.0 +2024-08-26 03:44:29,687 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.525e+02 1.936e+02 2.666e+02 3.402e+02 5.252e+02, threshold=5.332e+02, percent-clipped=13.0 +2024-08-26 03:44:43,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=169386.66666666666, ans=0.125 +2024-08-26 03:44:57,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=169386.66666666666, ans=0.0 +2024-08-26 03:45:07,843 INFO [train.py:1114] (2/4) Epoch 13, batch 1900, loss[loss=0.227, simple_loss=0.2928, pruned_loss=0.0573, ctc_loss=0.1164, over 19655.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2846, pruned_loss=0.05882, ctc_loss=0.1105, over 3862789.69 frames. ], batch size: 59, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:45:09,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=169440.0, ans=0.025 +2024-08-26 03:45:20,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=169493.33333333334, ans=0.125 +2024-08-26 03:45:28,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=169493.33333333334, ans=0.0 +2024-08-26 03:45:43,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=169546.66666666666, ans=0.04949747468305833 +2024-08-26 03:45:51,693 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.26 vs. limit=5.0 +2024-08-26 03:46:04,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=169600.0, ans=0.2 +2024-08-26 03:46:22,593 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.08 vs. limit=6.0 +2024-08-26 03:46:24,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=169706.66666666666, ans=0.0 +2024-08-26 03:46:29,183 INFO [train.py:1114] (2/4) Epoch 13, batch 1950, loss[loss=0.2233, simple_loss=0.2818, pruned_loss=0.06087, ctc_loss=0.1077, over 19586.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2858, pruned_loss=0.05918, ctc_loss=0.111, over 3871322.34 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:46:29,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=169706.66666666666, ans=0.1 +2024-08-26 03:46:29,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=169706.66666666666, ans=0.125 +2024-08-26 03:46:54,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=169760.0, ans=0.125 +2024-08-26 03:46:59,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=169813.33333333334, ans=0.1 +2024-08-26 03:50:25,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=169813.33333333334, ans=0.0 +2024-08-26 03:50:26,627 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.484e+02 1.795e+02 2.018e+02 2.323e+02 3.502e+02, threshold=4.036e+02, percent-clipped=0.0 +2024-08-26 03:54:17,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=169866.66666666666, ans=0.125 +2024-08-26 04:20:39,931 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.14 vs. limit=22.5 +2024-08-26 04:22:39,304 INFO [train.py:1114] (2/4) Epoch 13, batch 2000, loss[loss=0.1778, simple_loss=0.2411, pruned_loss=0.04243, ctc_loss=0.07417, over 19663.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.2865, pruned_loss=0.05975, ctc_loss=0.112, over 3856689.37 frames. ], batch size: 45, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 04:39:36,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=170026.66666666666, ans=0.125 +2024-08-26 05:07:53,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=170186.66666666666, ans=0.125 +2024-08-26 05:13:23,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=170186.66666666666, ans=0.125 +2024-08-26 05:17:15,382 INFO [train.py:1114] (2/4) Epoch 13, batch 2050, loss[loss=0.1956, simple_loss=0.2558, pruned_loss=0.04949, ctc_loss=0.09092, over 19705.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.2853, pruned_loss=0.05947, ctc_loss=0.1116, over 3851357.49 frames. ], batch size: 47, lr: 1.14e-02, grad_scale: 64.0 +2024-08-26 05:18:37,069 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.29 vs. limit=6.0 +2024-08-26 05:25:20,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=170293.33333333334, ans=0.125 +2024-08-26 05:25:21,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=170293.33333333334, ans=0.125 +2024-08-26 05:34:32,866 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.739e+02 2.095e+02 2.592e+02 3.598e+02, threshold=4.189e+02, percent-clipped=0.0 +2024-08-26 05:34:44,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=170400.0, ans=0.125 +2024-08-26 05:43:17,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=170453.33333333334, ans=0.04949747468305833 +2024-08-26 05:45:21,769 INFO [train.py:1114] (2/4) Epoch 13, batch 2100, loss[loss=0.2535, simple_loss=0.3039, pruned_loss=0.07408, ctc_loss=0.1373, over 19768.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2843, pruned_loss=0.05896, ctc_loss=0.1107, over 3858507.93 frames. ], batch size: 54, lr: 1.14e-02, grad_scale: 64.0 +2024-08-26 05:50:50,951 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 05:51:19,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=170560.0, ans=0.125 +2024-08-26 05:52:06,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=170613.33333333334, ans=0.0 +2024-08-26 05:53:15,374 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.98 vs. limit=15.0 +2024-08-26 05:57:05,746 INFO [train.py:1114] (2/4) Epoch 13, batch 2150, loss[loss=0.1881, simple_loss=0.2537, pruned_loss=0.04417, ctc_loss=0.08574, over 19851.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2838, pruned_loss=0.05872, ctc_loss=0.1105, over 3869917.21 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 64.0 +2024-08-26 05:58:18,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=170773.33333333334, ans=0.125 +2024-08-26 06:01:05,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=170880.0, ans=0.0 +2024-08-26 06:01:18,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=170880.0, ans=0.125 +2024-08-26 06:02:10,744 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.397e+02 1.801e+02 2.071e+02 2.646e+02 5.963e+02, threshold=4.141e+02, percent-clipped=6.0 +2024-08-26 06:03:10,857 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.57 vs. limit=22.5 +2024-08-26 06:03:39,253 INFO [train.py:1114] (2/4) Epoch 13, batch 2200, loss[loss=0.2476, simple_loss=0.3103, pruned_loss=0.06696, ctc_loss=0.1276, over 19581.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2841, pruned_loss=0.05882, ctc_loss=0.1107, over 3867915.10 frames. ], batch size: 57, lr: 1.14e-02, grad_scale: 64.0 +2024-08-26 06:05:01,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=171146.66666666666, ans=0.125 +2024-08-26 06:05:27,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=171200.0, ans=0.07 +2024-08-26 06:05:56,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=171253.33333333334, ans=0.0 +2024-08-26 06:06:26,731 INFO [train.py:1114] (2/4) Epoch 13, batch 2250, loss[loss=0.2113, simple_loss=0.2813, pruned_loss=0.05129, ctc_loss=0.097, over 19610.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2844, pruned_loss=0.05882, ctc_loss=0.1106, over 3867059.55 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 06:06:30,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=171306.66666666666, ans=0.025 +2024-08-26 06:08:13,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=171413.33333333334, ans=0.125 +2024-08-26 06:08:30,395 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.765e+02 2.070e+02 2.599e+02 3.761e+02, threshold=4.140e+02, percent-clipped=0.0 +2024-08-26 06:08:30,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=171466.66666666666, ans=0.125 +2024-08-26 06:09:00,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=171466.66666666666, ans=0.125 +2024-08-26 06:09:43,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171520.0, ans=0.1 +2024-08-26 06:10:19,750 INFO [train.py:1114] (2/4) Epoch 13, batch 2300, loss[loss=0.2254, simple_loss=0.2872, pruned_loss=0.05972, ctc_loss=0.1106, over 19507.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2837, pruned_loss=0.05899, ctc_loss=0.111, over 3860537.43 frames. ], batch size: 49, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 06:10:27,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=171573.33333333334, ans=0.125 +2024-08-26 06:10:30,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=171573.33333333334, ans=0.2 +2024-08-26 06:10:43,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=171626.66666666666, ans=0.125 +2024-08-26 06:10:43,276 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.28 vs. limit=15.0 +2024-08-26 06:10:52,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171626.66666666666, ans=0.1 +2024-08-26 06:11:02,462 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.67 vs. limit=15.0 +2024-08-26 06:11:19,266 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.27 vs. limit=10.0 +2024-08-26 06:11:43,348 INFO [train.py:1114] (2/4) Epoch 13, batch 2350, loss[loss=0.2523, simple_loss=0.3092, pruned_loss=0.0705, ctc_loss=0.1358, over 19662.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2843, pruned_loss=0.05935, ctc_loss=0.1114, over 3863155.69 frames. ], batch size: 63, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 06:11:48,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=171840.0, ans=0.2 +2024-08-26 06:11:52,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=171840.0, ans=0.1 +2024-08-26 06:12:10,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=171946.66666666666, ans=0.025 +2024-08-26 06:12:16,618 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.342e+02 1.773e+02 2.247e+02 3.255e+02 4.983e+02, threshold=4.494e+02, percent-clipped=2.0 +2024-08-26 06:12:18,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=172000.0, ans=0.1 +2024-08-26 06:12:19,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=172000.0, ans=0.0 +2024-08-26 06:12:27,453 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=8.45 vs. limit=22.5 +2024-08-26 06:12:46,281 INFO [train.py:1114] (2/4) Epoch 13, batch 2400, loss[loss=0.2384, simple_loss=0.2967, pruned_loss=0.06553, ctc_loss=0.1227, over 19359.00 frames. ], tot_loss[loss=0.226, simple_loss=0.2865, pruned_loss=0.06016, ctc_loss=0.1127, over 3857901.38 frames. ], batch size: 71, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 06:13:13,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=172160.0, ans=0.07 +2024-08-26 06:13:40,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=172266.66666666666, ans=0.125 +2024-08-26 06:14:08,368 INFO [train.py:1114] (2/4) Epoch 13, batch 2450, loss[loss=0.3115, simple_loss=0.3348, pruned_loss=0.1036, ctc_loss=0.2028, over 13446.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.2901, pruned_loss=0.06268, ctc_loss=0.118, over 3732248.48 frames. ], batch size: 140, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 06:14:08,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=172373.33333333334, ans=0.125 +2024-08-26 06:14:24,558 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=23.26 vs. limit=22.5 +2024-08-26 06:14:37,459 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.88 vs. limit=15.0 +2024-08-26 06:14:41,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=172480.0, ans=0.025 +2024-08-26 06:14:42,013 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.83 vs. limit=15.0 +2024-08-26 06:14:43,308 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 1.935e+02 2.072e+02 2.350e+02 4.711e+02, threshold=4.143e+02, percent-clipped=2.0 +2024-08-26 06:14:44,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=172533.33333333334, ans=0.125 +2024-08-26 06:15:44,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=172581.33333333334, ans=0.04949747468305833 +2024-08-26 06:16:27,496 INFO [train.py:1114] (2/4) Epoch 14, batch 0, loss[loss=0.2015, simple_loss=0.256, pruned_loss=0.05389, ctc_loss=0.09807, over 19789.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.256, pruned_loss=0.05389, ctc_loss=0.09807, over 19789.00 frames. ], batch size: 49, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:16:27,497 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-26 06:17:53,188 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([0.1962, 3.1745, 3.7702, 2.8743], device='cuda:2') +2024-08-26 06:17:58,793 INFO [train.py:1146] (2/4) Epoch 14, validation: loss=0.1898, simple_loss=0.2778, pruned_loss=0.03769, ctc_loss=0.06578, over 944034.00 frames. +2024-08-26 06:18:12,591 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 14234MB +2024-08-26 06:18:22,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=172634.66666666666, ans=0.125 +2024-08-26 06:19:03,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=172688.0, ans=0.0 +2024-08-26 06:19:53,743 INFO [train.py:1114] (2/4) Epoch 14, batch 50, loss[loss=0.2167, simple_loss=0.2747, pruned_loss=0.05789, ctc_loss=0.1076, over 19751.00 frames. ], tot_loss[loss=0.228, simple_loss=0.2879, pruned_loss=0.06106, ctc_loss=0.1151, over 845084.53 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:20:17,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=172848.0, ans=0.0 +2024-08-26 06:20:27,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=172901.33333333334, ans=0.125 +2024-08-26 06:20:52,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=172954.66666666666, ans=0.125 +2024-08-26 06:21:17,218 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.410e+02 1.738e+02 2.047e+02 2.487e+02 4.948e+02, threshold=4.095e+02, percent-clipped=4.0 +2024-08-26 06:21:21,524 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:21:51,866 INFO [train.py:1114] (2/4) Epoch 14, batch 100, loss[loss=0.2066, simple_loss=0.271, pruned_loss=0.05254, ctc_loss=0.09275, over 19711.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.2881, pruned_loss=0.06079, ctc_loss=0.1145, over 1499046.71 frames. ], batch size: 51, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:22:29,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=173221.33333333334, ans=0.125 +2024-08-26 06:23:33,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=173381.33333333334, ans=0.2 +2024-08-26 06:23:38,125 INFO [train.py:1114] (2/4) Epoch 14, batch 150, loss[loss=0.1981, simple_loss=0.2593, pruned_loss=0.04973, ctc_loss=0.09348, over 19692.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2846, pruned_loss=0.05869, ctc_loss=0.1106, over 2028604.36 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:23:54,849 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.50 vs. limit=12.0 +2024-08-26 06:24:33,108 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.66 vs. limit=15.0 +2024-08-26 06:24:35,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=173541.33333333334, ans=0.05 +2024-08-26 06:24:49,764 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 1.676e+02 1.898e+02 2.213e+02 4.155e+02, threshold=3.795e+02, percent-clipped=1.0 +2024-08-26 06:24:57,011 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:24:59,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=173648.0, ans=0.025 +2024-08-26 06:25:00,487 INFO [train.py:1114] (2/4) Epoch 14, batch 200, loss[loss=0.2392, simple_loss=0.2954, pruned_loss=0.06596, ctc_loss=0.1277, over 18454.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2835, pruned_loss=0.0583, ctc_loss=0.1099, over 2436843.10 frames. ], batch size: 85, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:25:23,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173701.33333333334, ans=0.1 +2024-08-26 06:25:50,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=173808.0, ans=0.025 +2024-08-26 06:25:50,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=173808.0, ans=0.07 +2024-08-26 06:25:58,192 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.23 vs. limit=12.0 +2024-08-26 06:26:05,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=173861.33333333334, ans=0.125 +2024-08-26 06:26:07,084 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.17 vs. limit=15.0 +2024-08-26 06:26:10,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=173861.33333333334, ans=0.125 +2024-08-26 06:26:16,064 INFO [train.py:1114] (2/4) Epoch 14, batch 250, loss[loss=0.2497, simple_loss=0.3098, pruned_loss=0.06896, ctc_loss=0.1292, over 19384.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.284, pruned_loss=0.05877, ctc_loss=0.1107, over 2757067.39 frames. ], batch size: 67, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:26:16,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=173914.66666666666, ans=0.1 +2024-08-26 06:26:17,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=173914.66666666666, ans=0.0 +2024-08-26 06:26:54,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=174021.33333333334, ans=0.0 +2024-08-26 06:27:18,007 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.683e+02 2.061e+02 2.648e+02 4.927e+02, threshold=4.123e+02, percent-clipped=4.0 +2024-08-26 06:27:20,829 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.16 vs. limit=15.0 +2024-08-26 06:27:25,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=174128.0, ans=0.125 +2024-08-26 06:27:28,130 INFO [train.py:1114] (2/4) Epoch 14, batch 300, loss[loss=0.2478, simple_loss=0.3065, pruned_loss=0.06916, ctc_loss=0.1272, over 19542.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2837, pruned_loss=0.05847, ctc_loss=0.11, over 3001239.55 frames. ], batch size: 61, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:27:50,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=174288.0, ans=0.0 +2024-08-26 06:28:31,235 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.26 vs. limit=15.0 +2024-08-26 06:28:34,461 INFO [train.py:1114] (2/4) Epoch 14, batch 350, loss[loss=0.2057, simple_loss=0.2669, pruned_loss=0.05264, ctc_loss=0.09775, over 19738.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2836, pruned_loss=0.05844, ctc_loss=0.1098, over 3191462.47 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:28:40,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174448.0, ans=0.1 +2024-08-26 06:28:54,881 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.16 vs. limit=15.0 +2024-08-26 06:29:06,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=174554.66666666666, ans=0.0 +2024-08-26 06:29:11,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=174554.66666666666, ans=0.125 +2024-08-26 06:29:20,299 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.75 vs. limit=22.5 +2024-08-26 06:29:20,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=174608.0, ans=0.0 +2024-08-26 06:29:32,485 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.272e+02 1.657e+02 1.894e+02 2.440e+02 4.007e+02, threshold=3.787e+02, percent-clipped=0.0 +2024-08-26 06:29:42,968 INFO [train.py:1114] (2/4) Epoch 14, batch 400, loss[loss=0.1951, simple_loss=0.2685, pruned_loss=0.04362, ctc_loss=0.0862, over 19482.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2821, pruned_loss=0.0575, ctc_loss=0.108, over 3343716.94 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:29:51,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=174714.66666666666, ans=0.125 +2024-08-26 06:29:55,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=174768.0, ans=0.0 +2024-08-26 06:30:07,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=174768.0, ans=0.0 +2024-08-26 06:30:08,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=174768.0, ans=0.125 +2024-08-26 06:30:58,898 INFO [train.py:1114] (2/4) Epoch 14, batch 450, loss[loss=0.2307, simple_loss=0.2876, pruned_loss=0.0632, ctc_loss=0.1187, over 19609.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2822, pruned_loss=0.05744, ctc_loss=0.1079, over 3449888.47 frames. ], batch size: 55, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:32:03,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=175088.0, ans=10.0 +2024-08-26 06:32:20,203 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.13 vs. limit=15.0 +2024-08-26 06:32:24,923 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:32:25,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=175141.33333333334, ans=0.125 +2024-08-26 06:32:32,623 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.297e+02 1.702e+02 1.875e+02 2.205e+02 3.904e+02, threshold=3.749e+02, percent-clipped=2.0 +2024-08-26 06:32:51,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175194.66666666666, ans=0.1 +2024-08-26 06:32:59,839 INFO [train.py:1114] (2/4) Epoch 14, batch 500, loss[loss=0.2622, simple_loss=0.3175, pruned_loss=0.07478, ctc_loss=0.1435, over 19681.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2817, pruned_loss=0.05738, ctc_loss=0.108, over 3545845.53 frames. ], batch size: 63, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:33:26,937 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.20 vs. limit=22.5 +2024-08-26 06:33:34,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=175301.33333333334, ans=0.0 +2024-08-26 06:33:36,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=175301.33333333334, ans=0.95 +2024-08-26 06:33:38,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=175301.33333333334, ans=0.2 +2024-08-26 06:34:03,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=175408.0, ans=0.1 +2024-08-26 06:34:13,945 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:34:32,907 INFO [train.py:1114] (2/4) Epoch 14, batch 550, loss[loss=0.2312, simple_loss=0.2901, pruned_loss=0.06335, ctc_loss=0.1143, over 19311.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2819, pruned_loss=0.05759, ctc_loss=0.1086, over 3607179.29 frames. ], batch size: 71, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:34:36,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=175514.66666666666, ans=0.0 +2024-08-26 06:35:08,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=175621.33333333334, ans=0.125 +2024-08-26 06:35:14,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=175674.66666666666, ans=0.2 +2024-08-26 06:35:25,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=175674.66666666666, ans=0.125 +2024-08-26 06:35:36,408 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.450e+02 1.729e+02 1.957e+02 2.291e+02 4.042e+02, threshold=3.913e+02, percent-clipped=2.0 +2024-08-26 06:36:16,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=175728.0, ans=0.2 +2024-08-26 06:36:18,836 INFO [train.py:1114] (2/4) Epoch 14, batch 600, loss[loss=0.2534, simple_loss=0.3137, pruned_loss=0.07132, ctc_loss=0.1262, over 19341.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2819, pruned_loss=0.05751, ctc_loss=0.1084, over 3663666.03 frames. ], batch size: 67, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:37:45,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=175834.66666666666, ans=0.0 +2024-08-26 06:38:24,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=175834.66666666666, ans=0.125 +2024-08-26 06:38:31,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=175834.66666666666, ans=0.125 +2024-08-26 06:38:31,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=175834.66666666666, ans=0.125 +2024-08-26 06:38:36,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=175888.0, ans=0.0 +2024-08-26 06:39:00,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=175941.33333333334, ans=0.125 +2024-08-26 06:39:25,932 INFO [train.py:1114] (2/4) Epoch 14, batch 650, loss[loss=0.2078, simple_loss=0.2682, pruned_loss=0.05443, ctc_loss=0.09625, over 19778.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.281, pruned_loss=0.05695, ctc_loss=0.1073, over 3714475.89 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:40:33,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=176101.33333333334, ans=0.0 +2024-08-26 06:40:37,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=176101.33333333334, ans=0.0 +2024-08-26 06:40:54,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=176154.66666666666, ans=0.0 +2024-08-26 06:40:58,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=176154.66666666666, ans=0.0 +2024-08-26 06:41:30,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=176261.33333333334, ans=0.125 +2024-08-26 06:41:31,383 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.301e+02 1.772e+02 2.123e+02 2.635e+02 4.354e+02, threshold=4.247e+02, percent-clipped=3.0 +2024-08-26 06:41:34,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=176261.33333333334, ans=0.0 +2024-08-26 06:41:45,010 INFO [train.py:1114] (2/4) Epoch 14, batch 700, loss[loss=0.2119, simple_loss=0.2791, pruned_loss=0.0526, ctc_loss=0.09863, over 19746.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2814, pruned_loss=0.05702, ctc_loss=0.1076, over 3746176.45 frames. ], batch size: 51, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:41:52,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=176314.66666666666, ans=0.125 +2024-08-26 06:41:53,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=176314.66666666666, ans=0.125 +2024-08-26 06:41:58,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=176368.0, ans=0.025 +2024-08-26 06:42:35,891 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.55 vs. limit=6.0 +2024-08-26 06:42:38,287 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.21 vs. limit=12.0 +2024-08-26 06:42:46,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176528.0, ans=0.1 +2024-08-26 06:42:47,078 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.74 vs. limit=12.0 +2024-08-26 06:42:48,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=176528.0, ans=0.025 +2024-08-26 06:42:51,117 INFO [train.py:1114] (2/4) Epoch 14, batch 750, loss[loss=0.2168, simple_loss=0.2863, pruned_loss=0.05314, ctc_loss=0.1029, over 19499.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2813, pruned_loss=0.05693, ctc_loss=0.1073, over 3773023.39 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:43:03,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=176581.33333333334, ans=0.0 +2024-08-26 06:43:03,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=176581.33333333334, ans=0.125 +2024-08-26 06:44:15,626 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.10 vs. limit=15.0 +2024-08-26 06:44:27,362 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.292e+02 1.803e+02 2.358e+02 3.080e+02 4.835e+02, threshold=4.715e+02, percent-clipped=7.0 +2024-08-26 06:44:41,972 INFO [train.py:1114] (2/4) Epoch 14, batch 800, loss[loss=0.2336, simple_loss=0.2824, pruned_loss=0.06663, ctc_loss=0.1288, over 19393.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2818, pruned_loss=0.05697, ctc_loss=0.1075, over 3795191.57 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:45:26,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177008.0, ans=0.1 +2024-08-26 06:45:47,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=177061.33333333334, ans=0.0 +2024-08-26 06:45:52,067 INFO [train.py:1114] (2/4) Epoch 14, batch 850, loss[loss=0.2289, simple_loss=0.2926, pruned_loss=0.0599, ctc_loss=0.1137, over 19643.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2818, pruned_loss=0.05734, ctc_loss=0.1081, over 3814842.26 frames. ], batch size: 59, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:46:19,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=177114.66666666666, ans=0.2 +2024-08-26 06:46:33,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=177168.0, ans=0.125 +2024-08-26 06:46:35,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177221.33333333334, ans=0.1 +2024-08-26 06:47:02,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=177274.66666666666, ans=0.2 +2024-08-26 06:47:08,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=177274.66666666666, ans=0.2 +2024-08-26 06:47:11,733 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.690e+02 1.974e+02 2.351e+02 3.908e+02, threshold=3.948e+02, percent-clipped=0.0 +2024-08-26 06:47:21,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177328.0, ans=0.1 +2024-08-26 06:47:24,596 INFO [train.py:1114] (2/4) Epoch 14, batch 900, loss[loss=0.2134, simple_loss=0.2618, pruned_loss=0.05975, ctc_loss=0.1137, over 19417.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2818, pruned_loss=0.05751, ctc_loss=0.1085, over 3817936.02 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:47:49,462 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.90 vs. limit=12.0 +2024-08-26 06:48:00,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=177541.33333333334, ans=0.125 +2024-08-26 06:48:09,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177541.33333333334, ans=0.1 +2024-08-26 06:48:29,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=177594.66666666666, ans=0.125 +2024-08-26 06:48:31,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=177594.66666666666, ans=0.125 +2024-08-26 06:48:38,066 INFO [train.py:1114] (2/4) Epoch 14, batch 950, loss[loss=0.1939, simple_loss=0.2579, pruned_loss=0.04706, ctc_loss=0.08947, over 19488.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2823, pruned_loss=0.05786, ctc_loss=0.1092, over 3819119.82 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:48:53,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=177701.33333333334, ans=0.09899494936611666 +2024-08-26 06:48:59,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=177701.33333333334, ans=0.125 +2024-08-26 06:49:04,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=177701.33333333334, ans=0.2 +2024-08-26 06:49:06,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177754.66666666666, ans=0.1 +2024-08-26 06:49:20,230 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.33 vs. limit=22.5 +2024-08-26 06:49:21,457 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.96 vs. limit=15.0 +2024-08-26 06:49:30,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=177808.0, ans=0.0 +2024-08-26 06:49:35,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=177861.33333333334, ans=0.2 +2024-08-26 06:49:36,177 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.388e+02 1.810e+02 2.092e+02 2.519e+02 4.035e+02, threshold=4.185e+02, percent-clipped=1.0 +2024-08-26 06:50:06,704 INFO [train.py:1114] (2/4) Epoch 14, batch 1000, loss[loss=0.1986, simple_loss=0.2649, pruned_loss=0.04755, ctc_loss=0.09288, over 19852.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.283, pruned_loss=0.05816, ctc_loss=0.1098, over 3815772.92 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:50:08,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=177914.66666666666, ans=0.0 +2024-08-26 06:50:08,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177914.66666666666, ans=0.1 +2024-08-26 06:51:23,238 INFO [train.py:1114] (2/4) Epoch 14, batch 1050, loss[loss=0.2383, simple_loss=0.3009, pruned_loss=0.0645, ctc_loss=0.1169, over 19852.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2821, pruned_loss=0.05788, ctc_loss=0.109, over 3822703.55 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:51:29,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=178181.33333333334, ans=0.0 +2024-08-26 06:51:51,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=178288.0, ans=0.125 +2024-08-26 06:52:03,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=178341.33333333334, ans=0.07 +2024-08-26 06:52:03,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=178341.33333333334, ans=0.125 +2024-08-26 06:52:17,044 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.429e+02 1.767e+02 2.034e+02 2.568e+02 4.426e+02, threshold=4.067e+02, percent-clipped=2.0 +2024-08-26 06:52:39,172 INFO [train.py:1114] (2/4) Epoch 14, batch 1100, loss[loss=0.1891, simple_loss=0.2586, pruned_loss=0.043, ctc_loss=0.08402, over 19588.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2815, pruned_loss=0.05725, ctc_loss=0.1078, over 3830526.86 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:53:11,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=178554.66666666666, ans=0.025 +2024-08-26 06:53:16,594 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=20.26 vs. limit=22.5 +2024-08-26 06:53:18,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=178608.0, ans=0.0 +2024-08-26 06:53:42,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=178661.33333333334, ans=0.125 +2024-08-26 06:53:49,716 INFO [train.py:1114] (2/4) Epoch 14, batch 1150, loss[loss=0.2121, simple_loss=0.2786, pruned_loss=0.05272, ctc_loss=0.1004, over 19586.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2819, pruned_loss=0.05759, ctc_loss=0.1084, over 3831181.97 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:53:52,936 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.90 vs. limit=22.5 +2024-08-26 06:53:56,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=178714.66666666666, ans=0.125 +2024-08-26 06:54:15,530 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.25 vs. limit=12.0 +2024-08-26 06:54:23,069 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.03 vs. limit=10.0 +2024-08-26 06:54:24,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=178821.33333333334, ans=0.2 +2024-08-26 06:54:32,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=178821.33333333334, ans=0.125 +2024-08-26 06:54:38,219 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.20 vs. limit=22.5 +2024-08-26 06:54:47,785 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.255e+02 1.672e+02 1.916e+02 2.259e+02 4.129e+02, threshold=3.832e+02, percent-clipped=1.0 +2024-08-26 06:54:58,220 INFO [train.py:1114] (2/4) Epoch 14, batch 1200, loss[loss=0.2267, simple_loss=0.2924, pruned_loss=0.058, ctc_loss=0.1125, over 19837.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2826, pruned_loss=0.0578, ctc_loss=0.1088, over 3826923.42 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:55:04,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=178981.33333333334, ans=0.2 +2024-08-26 06:55:05,374 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:55:09,195 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.10 vs. limit=15.0 +2024-08-26 06:55:14,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=179034.66666666666, ans=0.125 +2024-08-26 06:55:20,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=179034.66666666666, ans=0.125 +2024-08-26 06:55:27,822 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.82 vs. limit=15.0 +2024-08-26 06:55:52,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=179194.66666666666, ans=10.0 +2024-08-26 06:56:28,019 INFO [train.py:1114] (2/4) Epoch 14, batch 1250, loss[loss=0.2374, simple_loss=0.3013, pruned_loss=0.0639, ctc_loss=0.1144, over 19523.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2833, pruned_loss=0.05811, ctc_loss=0.1091, over 3844708.95 frames. ], batch size: 61, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:56:38,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff2.min_abs, batch_count=179248.0, ans=0.1 +2024-08-26 06:56:39,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff3.min_abs, batch_count=179248.0, ans=0.2 +2024-08-26 06:57:34,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=179408.0, ans=0.125 +2024-08-26 06:57:38,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=179408.0, ans=0.0 +2024-08-26 06:58:13,353 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.449e+02 1.864e+02 2.134e+02 2.537e+02 3.723e+02, threshold=4.267e+02, percent-clipped=0.0 +2024-08-26 06:58:15,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=179461.33333333334, ans=0.0 +2024-08-26 06:58:23,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=179461.33333333334, ans=0.125 +2024-08-26 06:58:27,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=179461.33333333334, ans=0.02 +2024-08-26 06:58:29,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=179461.33333333334, ans=0.025 +2024-08-26 06:58:30,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=179514.66666666666, ans=0.125 +2024-08-26 06:58:31,297 INFO [train.py:1114] (2/4) Epoch 14, batch 1300, loss[loss=0.2668, simple_loss=0.3179, pruned_loss=0.07862, ctc_loss=0.1463, over 18859.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2826, pruned_loss=0.05783, ctc_loss=0.1085, over 3848027.07 frames. ], batch size: 76, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:58:37,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=179514.66666666666, ans=0.2 +2024-08-26 06:59:10,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=179568.0, ans=0.0 +2024-08-26 06:59:36,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=179621.33333333334, ans=0.125 +2024-08-26 07:00:16,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=179728.0, ans=0.125 +2024-08-26 07:00:33,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=179728.0, ans=0.125 +2024-08-26 07:00:35,388 INFO [train.py:1114] (2/4) Epoch 14, batch 1350, loss[loss=0.2403, simple_loss=0.2901, pruned_loss=0.069, ctc_loss=0.1313, over 19790.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2821, pruned_loss=0.05768, ctc_loss=0.1082, over 3857906.85 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:00:38,729 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 07:00:46,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=179781.33333333334, ans=0.0 +2024-08-26 07:00:57,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=179781.33333333334, ans=0.0 +2024-08-26 07:01:26,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=179834.66666666666, ans=0.0 +2024-08-26 07:01:32,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=179888.0, ans=0.125 +2024-08-26 07:02:12,324 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.91 vs. limit=15.0 +2024-08-26 07:02:21,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=179994.66666666666, ans=0.125 +2024-08-26 07:02:26,032 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.464e+02 1.690e+02 1.870e+02 2.214e+02 3.706e+02, threshold=3.740e+02, percent-clipped=0.0 +2024-08-26 07:02:27,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=179994.66666666666, ans=0.2 +2024-08-26 07:02:47,345 INFO [train.py:1114] (2/4) Epoch 14, batch 1400, loss[loss=0.1956, simple_loss=0.2562, pruned_loss=0.04947, ctc_loss=0.08989, over 19660.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2815, pruned_loss=0.05764, ctc_loss=0.1078, over 3865083.37 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:02:53,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.41 vs. limit=15.0 +2024-08-26 07:03:10,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=180048.0, ans=0.025 +2024-08-26 07:03:28,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=180101.33333333334, ans=0.125 +2024-08-26 07:03:34,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=180154.66666666666, ans=0.125 +2024-08-26 07:03:55,220 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.35 vs. limit=22.5 +2024-08-26 07:04:09,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=180261.33333333334, ans=0.125 +2024-08-26 07:04:13,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=180261.33333333334, ans=0.2 +2024-08-26 07:04:13,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=180261.33333333334, ans=0.0 +2024-08-26 07:04:25,279 INFO [train.py:1114] (2/4) Epoch 14, batch 1450, loss[loss=0.2631, simple_loss=0.3147, pruned_loss=0.07806, ctc_loss=0.1382, over 19663.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2827, pruned_loss=0.05818, ctc_loss=0.1089, over 3861354.24 frames. ], batch size: 63, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:04:38,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=180314.66666666666, ans=0.125 +2024-08-26 07:05:03,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=180368.0, ans=0.1 +2024-08-26 07:05:14,676 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=180421.33333333334, ans=0.0 +2024-08-26 07:05:15,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=180421.33333333334, ans=0.125 +2024-08-26 07:05:15,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=180421.33333333334, ans=10.0 +2024-08-26 07:05:28,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=180474.66666666666, ans=0.125 +2024-08-26 07:05:40,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=180528.0, ans=0.0 +2024-08-26 07:05:41,174 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.314e+02 1.716e+02 1.963e+02 2.339e+02 6.137e+02, threshold=3.925e+02, percent-clipped=1.0 +2024-08-26 07:05:57,996 INFO [train.py:1114] (2/4) Epoch 14, batch 1500, loss[loss=0.2123, simple_loss=0.2836, pruned_loss=0.05208, ctc_loss=0.09211, over 19592.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2826, pruned_loss=0.05748, ctc_loss=0.1077, over 3861121.37 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:06:04,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=180581.33333333334, ans=0.125 +2024-08-26 07:06:07,016 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.21 vs. limit=15.0 +2024-08-26 07:06:36,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=180634.66666666666, ans=0.125 +2024-08-26 07:06:58,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180688.0, ans=0.1 +2024-08-26 07:06:59,208 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.39 vs. limit=22.5 +2024-08-26 07:07:02,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=180741.33333333334, ans=0.125 +2024-08-26 07:07:02,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=180741.33333333334, ans=0.125 +2024-08-26 07:07:23,917 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.12 vs. limit=12.0 +2024-08-26 07:07:24,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180794.66666666666, ans=0.1 +2024-08-26 07:07:26,414 INFO [train.py:1114] (2/4) Epoch 14, batch 1550, loss[loss=0.2455, simple_loss=0.3054, pruned_loss=0.0678, ctc_loss=0.1251, over 19599.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2827, pruned_loss=0.0577, ctc_loss=0.1082, over 3844900.86 frames. ], batch size: 60, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:07:37,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=180848.0, ans=0.125 +2024-08-26 07:07:39,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=180848.0, ans=0.0 +2024-08-26 07:08:03,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=180954.66666666666, ans=0.0 +2024-08-26 07:08:18,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=181061.33333333334, ans=0.05 +2024-08-26 07:08:20,836 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.275e+02 1.735e+02 1.996e+02 2.323e+02 4.332e+02, threshold=3.992e+02, percent-clipped=2.0 +2024-08-26 07:08:46,920 INFO [train.py:1114] (2/4) Epoch 14, batch 1600, loss[loss=0.2351, simple_loss=0.3016, pruned_loss=0.06205, ctc_loss=0.1111, over 19846.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2825, pruned_loss=0.05768, ctc_loss=0.1081, over 3833861.75 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:08:48,624 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.92 vs. limit=15.0 +2024-08-26 07:08:51,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=181114.66666666666, ans=0.125 +2024-08-26 07:08:58,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=181114.66666666666, ans=0.0 +2024-08-26 07:09:19,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=181221.33333333334, ans=0.125 +2024-08-26 07:09:21,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=181221.33333333334, ans=0.025 +2024-08-26 07:09:55,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=181274.66666666666, ans=0.1 +2024-08-26 07:10:12,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=181328.0, ans=0.125 +2024-08-26 07:10:20,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=181328.0, ans=0.125 +2024-08-26 07:10:22,384 INFO [train.py:1114] (2/4) Epoch 14, batch 1650, loss[loss=0.2333, simple_loss=0.2992, pruned_loss=0.06024, ctc_loss=0.1174, over 19648.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2823, pruned_loss=0.05772, ctc_loss=0.1082, over 3830692.64 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:11:10,762 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.415e+02 1.857e+02 2.243e+02 2.957e+02 5.258e+02, threshold=4.486e+02, percent-clipped=5.0 +2024-08-26 07:11:28,232 INFO [train.py:1114] (2/4) Epoch 14, batch 1700, loss[loss=0.1672, simple_loss=0.2317, pruned_loss=0.0372, ctc_loss=0.07066, over 19674.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2818, pruned_loss=0.05735, ctc_loss=0.1075, over 3844833.60 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:11:29,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=181648.0, ans=0.0 +2024-08-26 07:11:48,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=181701.33333333334, ans=0.125 +2024-08-26 07:11:54,676 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.56 vs. limit=15.0 +2024-08-26 07:12:08,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=181808.0, ans=0.125 +2024-08-26 07:12:13,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=181861.33333333334, ans=0.2 +2024-08-26 07:12:14,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=181861.33333333334, ans=0.0 +2024-08-26 07:12:20,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=181861.33333333334, ans=0.0 +2024-08-26 07:12:24,393 INFO [train.py:1114] (2/4) Epoch 14, batch 1750, loss[loss=0.2026, simple_loss=0.2542, pruned_loss=0.05471, ctc_loss=0.1038, over 19631.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2813, pruned_loss=0.05702, ctc_loss=0.107, over 3850031.43 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:12:31,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=181914.66666666666, ans=10.0 +2024-08-26 07:13:16,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=182021.33333333334, ans=0.125 +2024-08-26 07:13:30,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=182074.66666666666, ans=0.0 +2024-08-26 07:13:35,899 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.427e+02 1.769e+02 2.123e+02 2.747e+02 4.234e+02, threshold=4.245e+02, percent-clipped=0.0 +2024-08-26 07:13:51,682 INFO [train.py:1114] (2/4) Epoch 14, batch 1800, loss[loss=0.2475, simple_loss=0.3034, pruned_loss=0.06934, ctc_loss=0.1326, over 19616.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2814, pruned_loss=0.05719, ctc_loss=0.1075, over 3851925.79 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:14:10,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182234.66666666666, ans=0.1 +2024-08-26 07:14:10,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=182234.66666666666, ans=0.1 +2024-08-26 07:14:19,706 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.81 vs. limit=15.0 +2024-08-26 07:14:25,238 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.30 vs. limit=15.0 +2024-08-26 07:14:32,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=182341.33333333334, ans=0.0 +2024-08-26 07:14:33,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=182341.33333333334, ans=0.125 +2024-08-26 07:14:49,564 INFO [train.py:1114] (2/4) Epoch 14, batch 1850, loss[loss=0.2182, simple_loss=0.2853, pruned_loss=0.05494, ctc_loss=0.1031, over 19581.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2811, pruned_loss=0.05696, ctc_loss=0.1071, over 3855220.69 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:15:03,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=182501.33333333334, ans=0.07 +2024-08-26 07:15:09,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=182501.33333333334, ans=0.125 +2024-08-26 07:15:35,879 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.305e+02 1.755e+02 2.000e+02 2.500e+02 5.147e+02, threshold=4.001e+02, percent-clipped=3.0 +2024-08-26 07:15:46,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=182661.33333333334, ans=0.0 +2024-08-26 07:15:47,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=182661.33333333334, ans=0.1 +2024-08-26 07:15:52,251 INFO [train.py:1114] (2/4) Epoch 14, batch 1900, loss[loss=0.2098, simple_loss=0.2823, pruned_loss=0.04953, ctc_loss=0.09546, over 19652.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2816, pruned_loss=0.05687, ctc_loss=0.107, over 3859926.01 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:15:55,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=182714.66666666666, ans=0.0 +2024-08-26 07:16:08,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=182768.0, ans=0.0 +2024-08-26 07:16:17,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=182821.33333333334, ans=0.125 +2024-08-26 07:16:23,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=182821.33333333334, ans=0.125 +2024-08-26 07:16:56,703 INFO [train.py:1114] (2/4) Epoch 14, batch 1950, loss[loss=0.2198, simple_loss=0.2835, pruned_loss=0.05702, ctc_loss=0.1053, over 19597.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2833, pruned_loss=0.05746, ctc_loss=0.108, over 3869013.35 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:17:24,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=183034.66666666666, ans=0.125 +2024-08-26 07:17:24,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=183034.66666666666, ans=0.0 +2024-08-26 07:17:38,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=183088.0, ans=0.0 +2024-08-26 07:17:51,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=183141.33333333334, ans=0.0 +2024-08-26 07:17:51,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=183141.33333333334, ans=0.125 +2024-08-26 07:17:55,523 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.375e+02 1.666e+02 1.941e+02 2.281e+02 4.229e+02, threshold=3.882e+02, percent-clipped=1.0 +2024-08-26 07:18:06,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183194.66666666666, ans=0.1 +2024-08-26 07:18:08,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=183194.66666666666, ans=0.125 +2024-08-26 07:18:14,087 INFO [train.py:1114] (2/4) Epoch 14, batch 2000, loss[loss=0.2163, simple_loss=0.2616, pruned_loss=0.06125, ctc_loss=0.121, over 19640.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2839, pruned_loss=0.05783, ctc_loss=0.1087, over 3853811.42 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 64.0 +2024-08-26 07:18:23,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff2.min_abs, batch_count=183248.0, ans=0.1 +2024-08-26 07:19:11,490 INFO [train.py:1114] (2/4) Epoch 14, batch 2050, loss[loss=0.2132, simple_loss=0.2672, pruned_loss=0.05788, ctc_loss=0.1088, over 19736.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2832, pruned_loss=0.05788, ctc_loss=0.1087, over 3849695.00 frames. ], batch size: 47, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:19:12,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=183514.66666666666, ans=0.125 +2024-08-26 07:19:43,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=183621.33333333334, ans=0.2 +2024-08-26 07:19:54,330 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.04 vs. limit=8.0 +2024-08-26 07:20:51,590 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.432e+02 1.705e+02 1.994e+02 2.461e+02 3.917e+02, threshold=3.988e+02, percent-clipped=1.0 +2024-08-26 07:23:16,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=183728.0, ans=0.125 +2024-08-26 07:24:48,251 INFO [train.py:1114] (2/4) Epoch 14, batch 2100, loss[loss=0.206, simple_loss=0.2731, pruned_loss=0.05027, ctc_loss=0.09588, over 19785.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2822, pruned_loss=0.05744, ctc_loss=0.1077, over 3857710.45 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:26:26,037 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.46 vs. limit=6.0 +2024-08-26 07:26:28,439 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.34 vs. limit=15.0 +2024-08-26 07:28:51,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=183781.33333333334, ans=0.125 +2024-08-26 07:31:06,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=183781.33333333334, ans=0.1 +2024-08-26 07:43:35,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=183834.66666666666, ans=0.2 +2024-08-26 07:45:05,517 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.05 vs. limit=6.0 +2024-08-26 07:50:00,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=183888.0, ans=0.125 +2024-08-26 07:50:01,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=183888.0, ans=0.125 +2024-08-26 08:00:56,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=183941.33333333334, ans=0.07 +2024-08-26 08:13:15,356 INFO [train.py:1114] (2/4) Epoch 14, batch 2150, loss[loss=0.1965, simple_loss=0.2658, pruned_loss=0.04592, ctc_loss=0.08843, over 19866.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2815, pruned_loss=0.05714, ctc_loss=0.1071, over 3868069.69 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 08:20:19,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184101.33333333334, ans=0.1 +2024-08-26 08:27:10,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=184101.33333333334, ans=0.125 +2024-08-26 08:27:10,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=184101.33333333334, ans=0.125 +2024-08-26 08:59:37,608 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.387e+02 1.765e+02 2.052e+02 2.784e+02 6.261e+02, threshold=4.104e+02, percent-clipped=7.0 +2024-08-26 09:03:09,761 INFO [train.py:1114] (2/4) Epoch 14, batch 2200, loss[loss=0.2297, simple_loss=0.2943, pruned_loss=0.06055, ctc_loss=0.1099, over 19569.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2818, pruned_loss=0.05726, ctc_loss=0.1073, over 3866659.28 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 09:07:05,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=184314.66666666666, ans=0.0 +2024-08-26 09:10:29,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184368.0, ans=0.1 +2024-08-26 09:11:10,017 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 09:18:48,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=184474.66666666666, ans=0.0 +2024-08-26 09:19:01,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=184474.66666666666, ans=0.0 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-25-02-23-27-3 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-25-02-23-27-3 new file mode 100644 index 0000000000000000000000000000000000000000..17477cab5f5c6136e1c75e5c7ec9bba25414b1cd --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-25-02-23-27-3 @@ -0,0 +1,4906 @@ +2024-08-25 02:23:27,610 INFO [train.py:1182] (3/4) Training started +2024-08-25 02:23:27,611 INFO [train.py:1192] (3/4) Device: cuda:3 +2024-08-25 02:23:27,697 INFO [train.py:1210] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2654.int.cedar.computecanada.ca', 'IP address': '172.16.146.91'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-25 02:23:27,697 INFO [train.py:1212] (3/4) About to create model +2024-08-25 02:23:29,322 INFO [train.py:1216] (3/4) Number of model parameters: 66367431 +2024-08-25 02:23:29,428 INFO [train.py:1231] (3/4) Using DDP +2024-08-25 02:23:51,124 INFO [asr_datamodule.py:894] (3/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:696] (3/4) Disable MUSAN +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:714] (3/4) Enable SpecAugment +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:715] (3/4) Time warp factor: 80 +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:725] (3/4) Num frame mask: 10 +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:738] (3/4) About to create train dataset +2024-08-25 02:23:51,497 INFO [asr_datamodule.py:765] (3/4) Using DynamicBucketingSampler. +2024-08-25 02:23:53,043 INFO [asr_datamodule.py:782] (3/4) About to create train dataloader +2024-08-25 02:23:53,051 INFO [asr_datamodule.py:911] (3/4) About to get dev-clean cuts +2024-08-25 02:23:53,293 INFO [asr_datamodule.py:918] (3/4) About to get dev-other cuts +2024-08-25 02:23:53,346 INFO [asr_datamodule.py:814] (3/4) About to create dev dataset +2024-08-25 02:23:53,653 INFO [asr_datamodule.py:831] (3/4) About to create dev dataloader +2024-08-25 02:23:53,654 INFO [train.py:1435] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-25 02:27:50,705 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12271MB +2024-08-25 02:27:52,174 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12346MB +2024-08-25 02:28:01,912 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12346MB +2024-08-25 02:28:03,370 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12346MB +2024-08-25 02:28:25,875 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=256, metric=45.71 vs. limit=7.5 +2024-08-25 02:28:26,152 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12346MB +2024-08-25 02:28:27,777 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12543MB +2024-08-25 02:29:16,119 INFO [train.py:1114] (3/4) Epoch 1, batch 0, loss[loss=8.814, simple_loss=7.18, pruned_loss=6.734, ctc_loss=4.794, over 19817.00 frames. ], tot_loss[loss=8.814, simple_loss=7.18, pruned_loss=6.734, ctc_loss=4.794, over 19817.00 frames. ], batch size: 49, lr: 2.25e-02, grad_scale: 1.0 +2024-08-25 02:29:16,120 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-25 02:29:29,433 INFO [train.py:1146] (3/4) Epoch 1, validation: loss=8.973, simple_loss=7.311, pruned_loss=6.819, ctc_loss=4.895, over 944034.00 frames. +2024-08-25 02:29:29,434 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12543MB +2024-08-25 02:29:31,369 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.80 vs. limit=7.5 +2024-08-25 02:29:38,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=0.0, ans=0.5 +2024-08-25 02:30:01,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=0.0, ans=0.2 +2024-08-25 02:30:14,648 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.62 vs. limit=5.013333333333334 +2024-08-25 02:30:23,438 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.714e+03 3.750e+03 4.817e+03 5.615e+03 6.551e+03, threshold=1.927e+04, percent-clipped=0.0 +2024-08-25 02:30:48,434 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=492.70 vs. limit=7.54 +2024-08-25 02:30:48,682 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.98 vs. limit=7.52 +2024-08-25 02:32:26,054 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.867e+02 1.019e+03 3.714e+03 5.063e+03 6.846e+03, threshold=1.486e+04, percent-clipped=0.0 +2024-08-25 02:32:37,481 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=63.16 vs. limit=5.026666666666666 +2024-08-25 02:32:51,802 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=349.16 vs. limit=7.56 +2024-08-25 02:32:52,214 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.09 vs. limit=4.064 +2024-08-25 02:33:26,774 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=139.28 vs. limit=7.66 +2024-08-25 02:33:35,945 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=52.92 vs. limit=7.58 +2024-08-25 02:33:36,811 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+02 7.649e+02 1.076e+03 3.731e+03 6.846e+03, threshold=4.304e+03, percent-clipped=0.0 +2024-08-25 02:34:04,702 INFO [train.py:1114] (3/4) Epoch 1, batch 50, loss[loss=1.449, simple_loss=1.079, pruned_loss=1.264, ctc_loss=1.142, over 19710.00 frames. ], tot_loss[loss=3.551, simple_loss=2.933, pruned_loss=2.55, ctc_loss=1.777, over 845595.77 frames. ], batch size: 47, lr: 2.48e-02, grad_scale: 0.25 +2024-08-25 02:34:06,446 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=10.88 vs. limit=4.1066666666666665 +2024-08-25 02:34:15,547 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=87.90 vs. limit=7.6 +2024-08-25 02:34:22,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=266.6666666666667, ans=0.09833333333333334 +2024-08-25 02:34:26,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=320.0, ans=0.485 +2024-08-25 02:34:30,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=320.0, ans=0.049 +2024-08-25 02:35:07,300 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=31.08 vs. limit=7.78 +2024-08-25 02:35:08,683 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=375.51 vs. limit=7.64 +2024-08-25 02:35:14,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=373.3333333333333, ans=0.4825 +2024-08-25 02:35:17,627 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=127.77 vs. limit=7.64 +2024-08-25 02:35:28,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.whiten.whitening_limit, batch_count=426.6666666666667, ans=4.1706666666666665 +2024-08-25 02:35:30,933 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=65.72 vs. limit=7.66 +2024-08-25 02:37:30,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=480.0, ans=0.223 +2024-08-25 02:37:30,768 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=20.83 vs. limit=7.68 +2024-08-25 02:37:42,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=40.75 vs. limit=7.86 +2024-08-25 02:37:47,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=480.0, ans=0.4775 +2024-08-25 02:37:50,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=533.3333333333334, ans=0.475 +2024-08-25 02:37:51,544 INFO [train.py:1114] (3/4) Epoch 1, batch 100, loss[loss=1.321, simple_loss=0.9457, pruned_loss=1.205, ctc_loss=1.121, over 19721.00 frames. ], tot_loss[loss=2.416, simple_loss=1.918, pruned_loss=1.865, ctc_loss=1.473, over 1499372.19 frames. ], batch size: 51, lr: 2.70e-02, grad_scale: 0.5 +2024-08-25 02:37:55,738 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 7.639e+01 1.517e+02 3.832e+02 1.019e+03 9.054e+03, threshold=7.665e+02, percent-clipped=2.0 +2024-08-25 02:38:04,173 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=82.33 vs. limit=7.7 +2024-08-25 02:38:04,539 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=25.01 vs. limit=7.7 +2024-08-25 02:38:08,733 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 02:38:09,263 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=37.86 vs. limit=7.94 +2024-08-25 02:38:24,633 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=85.48 vs. limit=5.1466666666666665 +2024-08-25 02:38:35,188 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=38.13 vs. limit=7.72 +2024-08-25 02:38:36,218 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=77.19 vs. limit=5.32 +2024-08-25 02:38:36,704 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=55.71 vs. limit=7.74 +2024-08-25 02:38:41,824 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=140.28 vs. limit=5.32 +2024-08-25 02:38:52,551 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=35.68 vs. limit=7.98 +2024-08-25 02:38:53,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=78.17 vs. limit=7.76 +2024-08-25 02:39:03,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=693.3333333333334, ans=0.4675 +2024-08-25 02:39:05,324 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=71.19 vs. limit=7.76 +2024-08-25 02:39:07,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn1.whiten.whitening_limit, batch_count=693.3333333333334, ans=8.02 +2024-08-25 02:39:09,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=746.6666666666666, ans=0.208 +2024-08-25 02:39:09,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=746.6666666666666, ans=0.46499999999999997 +2024-08-25 02:39:11,152 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=34.00 vs. limit=7.78 +2024-08-25 02:39:13,715 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=68.01 vs. limit=7.78 +2024-08-25 02:39:16,479 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=11.15 vs. limit=5.1866666666666665 +2024-08-25 02:39:17,993 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=68.11 vs. limit=7.78 +2024-08-25 02:39:22,878 INFO [train.py:1114] (3/4) Epoch 1, batch 150, loss[loss=1.161, simple_loss=0.8052, pruned_loss=1.019, ctc_loss=1.075, over 19706.00 frames. ], tot_loss[loss=1.943, simple_loss=1.492, pruned_loss=1.564, ctc_loss=1.344, over 2028131.03 frames. ], batch size: 47, lr: 2.93e-02, grad_scale: 0.5 +2024-08-25 02:39:39,098 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=19.92 vs. limit=7.82 +2024-08-25 02:39:39,202 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=72.71 vs. limit=5.426666666666667 +2024-08-25 02:39:51,412 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=32.54 vs. limit=8.18 +2024-08-25 02:39:54,167 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=33.23 vs. limit=5.453333333333333 +2024-08-25 02:39:58,139 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=19.61 vs. limit=7.84 +2024-08-25 02:40:04,186 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=14.35 vs. limit=4.384 +2024-08-25 02:40:08,301 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=96.49 vs. limit=5.48 +2024-08-25 02:40:12,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=960.0, ans=0.8664000000000001 +2024-08-25 02:40:12,468 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=380.50 vs. limit=7.86 +2024-08-25 02:40:17,848 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=46.32 vs. limit=7.88 +2024-08-25 02:40:17,883 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=26.09 vs. limit=7.88 +2024-08-25 02:40:19,454 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=25.01 vs. limit=7.88 +2024-08-25 02:40:32,726 INFO [train.py:1114] (3/4) Epoch 1, batch 200, loss[loss=1.242, simple_loss=0.8555, pruned_loss=0.9993, ctc_loss=1.196, over 18388.00 frames. ], tot_loss[loss=1.689, simple_loss=1.263, pruned_loss=1.374, ctc_loss=1.279, over 2436267.97 frames. ], batch size: 85, lr: 3.15e-02, grad_scale: 1.0 +2024-08-25 02:40:32,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=1066.6666666666667, ans=0.19 +2024-08-25 02:40:33,209 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=64.14 vs. limit=7.9 +2024-08-25 02:40:35,794 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 02:40:36,946 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.587e+01 1.185e+02 1.545e+02 1.999e+02 4.229e+02, threshold=3.089e+02, percent-clipped=0.0 +2024-08-25 02:41:05,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=1120.0, ans=0.0748 +2024-08-25 02:41:05,999 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=10.26 vs. limit=5.28 +2024-08-25 02:41:11,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1120.0, ans=0.2888 +2024-08-25 02:41:15,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=1120.0, ans=0.4475 +2024-08-25 02:41:20,875 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 02:41:22,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=1173.3333333333333, ans=0.445 +2024-08-25 02:41:22,493 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.58 vs. limit=3.176 +2024-08-25 02:41:31,174 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=14.97 vs. limit=5.293333333333333 +2024-08-25 02:41:31,244 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=40.64 vs. limit=7.94 +2024-08-25 02:41:31,421 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.17 vs. limit=8.38 +2024-08-25 02:41:34,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=1173.3333333333333, ans=0.28826666666666667 +2024-08-25 02:41:35,557 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=16.76 vs. limit=5.586666666666667 +2024-08-25 02:41:42,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=15.52 vs. limit=4.490666666666667 +2024-08-25 02:41:54,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1226.6666666666667, ans=0.28773333333333334 +2024-08-25 02:41:57,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=1280.0, ans=0.2872 +2024-08-25 02:42:01,661 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=48.02 vs. limit=7.98 +2024-08-25 02:42:07,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=1280.0, ans=0.44 +2024-08-25 02:42:11,852 INFO [train.py:1114] (3/4) Epoch 1, batch 250, loss[loss=1.204, simple_loss=0.8178, pruned_loss=0.9562, ctc_loss=1.172, over 19410.00 frames. ], tot_loss[loss=1.536, simple_loss=1.124, pruned_loss=1.249, ctc_loss=1.244, over 2756308.20 frames. ], batch size: 67, lr: 3.38e-02, grad_scale: 1.0 +2024-08-25 02:42:12,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=1333.3333333333333, ans=0.8533333333333334 +2024-08-25 02:42:17,052 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.89 vs. limit=8.5 +2024-08-25 02:42:29,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=1386.6666666666667, ans=0.28613333333333335 +2024-08-25 02:42:40,511 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=79.17 vs. limit=8.54 +2024-08-25 02:42:45,891 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=93.90 vs. limit=8.04 +2024-08-25 02:42:47,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=1440.0, ans=0.8496 +2024-08-25 02:42:52,947 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=25.29 vs. limit=8.58 +2024-08-25 02:42:59,648 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=132.28 vs. limit=8.06 +2024-08-25 02:42:59,837 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=17.41 vs. limit=5.373333333333333 +2024-08-25 02:43:01,033 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=219.38 vs. limit=8.06 +2024-08-25 02:43:04,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1493.3333333333333, ans=0.28506666666666663 +2024-08-25 02:43:07,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=1493.3333333333333, ans=0.43 +2024-08-25 02:43:10,560 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=99.45 vs. limit=8.08 +2024-08-25 02:43:11,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=1546.6666666666667, ans=0.4275 +2024-08-25 02:43:12,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=1546.6666666666667, ans=0.142 +2024-08-25 02:43:13,573 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=49.82 vs. limit=8.08 +2024-08-25 02:43:18,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=1546.6666666666667, ans=0.4275 +2024-08-25 02:43:21,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=1546.6666666666667, ans=0.142 +2024-08-25 02:43:23,908 INFO [train.py:1114] (3/4) Epoch 1, batch 300, loss[loss=1.262, simple_loss=0.8449, pruned_loss=0.9881, ctc_loss=1.239, over 19532.00 frames. ], tot_loss[loss=1.435, simple_loss=1.03, pruned_loss=1.159, ctc_loss=1.22, over 3001332.80 frames. ], batch size: 61, lr: 3.60e-02, grad_scale: 2.0 +2024-08-25 02:43:24,350 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=15.73 vs. limit=4.64 +2024-08-25 02:43:25,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1600.0, ans=0.284 +2024-08-25 02:43:27,477 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.84 vs. limit=5.4 +2024-08-25 02:43:27,973 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.516e+01 1.281e+02 1.784e+02 2.457e+02 1.092e+03, threshold=3.568e+02, percent-clipped=12.0 +2024-08-25 02:43:33,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=1600.0, ans=0.425 +2024-08-25 02:43:41,023 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=131.44 vs. limit=8.12 +2024-08-25 02:43:42,303 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=39.90 vs. limit=8.74 +2024-08-25 02:44:00,592 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=31.17 vs. limit=8.78 +2024-08-25 02:44:16,649 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=11.52 vs. limit=4.704 +2024-08-25 02:44:22,371 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.06 vs. limit=8.82 +2024-08-25 02:44:24,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=1813.3333333333333, ans=0.08866666666666667 +2024-08-25 02:44:27,488 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=198.81 vs. limit=8.18 +2024-08-25 02:44:31,769 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=59.97 vs. limit=8.18 +2024-08-25 02:44:35,132 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=149.78 vs. limit=8.18 +2024-08-25 02:44:38,846 INFO [train.py:1114] (3/4) Epoch 1, batch 350, loss[loss=1.112, simple_loss=0.7322, pruned_loss=0.8619, ctc_loss=1.109, over 19760.00 frames. ], tot_loss[loss=1.368, simple_loss=0.9653, pruned_loss=1.096, ctc_loss=1.207, over 3191955.11 frames. ], batch size: 48, lr: 3.83e-02, grad_scale: 2.0 +2024-08-25 02:44:45,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=1866.6666666666667, ans=0.4125 +2024-08-25 02:44:48,863 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=18.14 vs. limit=5.933333333333334 +2024-08-25 02:44:52,571 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=28.47 vs. limit=8.22 +2024-08-25 02:44:53,986 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=10.86 vs. limit=5.48 +2024-08-25 02:44:58,362 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.82 vs. limit=8.94 +2024-08-25 02:45:03,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=105.44 vs. limit=8.22 +2024-08-25 02:45:18,696 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=55.98 vs. limit=8.24 +2024-08-25 02:45:20,009 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=89.70 vs. limit=8.24 +2024-08-25 02:45:21,141 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=110.33 vs. limit=8.24 +2024-08-25 02:45:21,406 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=35.53 vs. limit=8.24 +2024-08-25 02:45:22,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=1973.3333333333333, ans=0.8309333333333333 +2024-08-25 02:45:22,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=1973.3333333333333, ans=0.4075 +2024-08-25 02:45:23,943 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=28.72 vs. limit=8.26 +2024-08-25 02:45:25,411 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.83 vs. limit=9.02 +2024-08-25 02:45:27,840 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.61 vs. limit=9.02 +2024-08-25 02:45:32,312 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=50.93 vs. limit=8.26 +2024-08-25 02:45:33,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=2026.6666666666667, ans=0.08733333333333333 +2024-08-25 02:45:38,916 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=81.20 vs. limit=8.28 +2024-08-25 02:45:39,242 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=182.31 vs. limit=8.28 +2024-08-25 02:46:54,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=2080.0, ans=0.8272 +2024-08-25 02:47:06,886 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.16 vs. limit=6.04 +2024-08-25 02:47:08,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=41.59 vs. limit=8.3 +2024-08-25 02:47:09,645 INFO [train.py:1114] (3/4) Epoch 1, batch 400, loss[loss=1.163, simple_loss=0.7652, pruned_loss=0.8849, ctc_loss=1.133, over 19503.00 frames. ], tot_loss[loss=1.317, simple_loss=0.9148, pruned_loss=1.044, ctc_loss=1.191, over 3344538.75 frames. ], batch size: 54, lr: 4.05e-02, grad_scale: 4.0 +2024-08-25 02:47:13,855 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.873e+01 1.501e+02 1.913e+02 2.464e+02 6.763e+02, threshold=3.826e+02, percent-clipped=7.0 +2024-08-25 02:47:25,636 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.68 vs. limit=5.546666666666667 +2024-08-25 02:47:37,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=2186.6666666666665, ans=0.8234666666666667 +2024-08-25 02:47:38,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=2186.6666666666665, ans=9.14 +2024-08-25 02:47:41,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=2240.0, ans=0.0496 +2024-08-25 02:47:44,969 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=27.93 vs. limit=8.34 +2024-08-25 02:47:49,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=2240.0, ans=0.395 +2024-08-25 02:47:59,525 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=85.83 vs. limit=8.36 +2024-08-25 02:47:59,678 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=18.35 vs. limit=8.36 +2024-08-25 02:48:00,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=2293.3333333333335, ans=0.3925 +2024-08-25 02:48:00,718 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=47.90 vs. limit=8.36 +2024-08-25 02:48:01,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=2293.3333333333335, ans=0.3925 +2024-08-25 02:48:04,968 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=60.18 vs. limit=8.36 +2024-08-25 02:48:19,324 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=182.79 vs. limit=8.38 +2024-08-25 02:48:21,725 INFO [train.py:1114] (3/4) Epoch 1, batch 450, loss[loss=1.206, simple_loss=0.7863, pruned_loss=0.9028, ctc_loss=1.176, over 19616.00 frames. ], tot_loss[loss=1.282, simple_loss=0.8796, pruned_loss=1.006, ctc_loss=1.179, over 3453240.13 frames. ], batch size: 55, lr: 4.28e-02, grad_scale: 4.0 +2024-08-25 02:48:22,367 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=17.89 vs. limit=8.4 +2024-08-25 02:48:22,458 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.29 vs. limit=9.3 +2024-08-25 02:48:23,619 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=15.54 vs. limit=8.4 +2024-08-25 02:48:25,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=2400.0, ans=0.3875 +2024-08-25 02:48:28,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=2400.0, ans=0.23600000000000002 +2024-08-25 02:48:38,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.23 vs. limit=9.34 +2024-08-25 02:48:39,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=2453.3333333333335, ans=0.385 +2024-08-25 02:48:46,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.03 vs. limit=9.34 +2024-08-25 02:48:48,316 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=7.10 vs. limit=5.0 +2024-08-25 02:48:49,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=2506.6666666666665, ans=0.3825 +2024-08-25 02:48:57,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=2506.6666666666665, ans=0.3825 +2024-08-25 02:49:05,452 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.31 vs. limit=6.28 +2024-08-25 02:49:08,184 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.34 vs. limit=9.42 +2024-08-25 02:49:08,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=2560.0, ans=0.38 +2024-08-25 02:49:09,130 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=13.15 vs. limit=8.46 +2024-08-25 02:49:12,103 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.27 vs. limit=9.42 +2024-08-25 02:49:19,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=2613.3333333333335, ans=0.041199999999999994 +2024-08-25 02:49:28,641 INFO [train.py:1114] (3/4) Epoch 1, batch 500, loss[loss=1.208, simple_loss=0.7978, pruned_loss=0.8577, ctc_loss=1.154, over 19686.00 frames. ], tot_loss[loss=1.25, simple_loss=0.8494, pruned_loss=0.9661, ctc_loss=1.16, over 3547250.01 frames. ], batch size: 63, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:49:30,941 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=12.23 vs. limit=8.5 +2024-08-25 02:49:32,567 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.093e+02 1.834e+02 2.411e+02 2.968e+02 6.409e+02, threshold=4.822e+02, percent-clipped=7.0 +2024-08-25 02:49:45,479 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.94 vs. limit=9.54 +2024-08-25 02:49:48,116 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=17.38 vs. limit=8.52 +2024-08-25 02:49:50,844 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.31 vs. limit=9.54 +2024-08-25 02:50:09,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=2826.6666666666665, ans=0.094 +2024-08-25 02:50:11,675 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.04 vs. limit=9.620000000000001 +2024-08-25 02:50:15,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=2826.6666666666665, ans=0.3675 +2024-08-25 02:50:19,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=2826.6666666666665, ans=0.094 +2024-08-25 02:50:19,527 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.04 vs. limit=9.620000000000001 +2024-08-25 02:50:20,939 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.14 vs. limit=6.413333333333333 +2024-08-25 02:50:21,248 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=20.79 vs. limit=8.56 +2024-08-25 02:50:30,372 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=10.71 vs. limit=9.66 +2024-08-25 02:50:30,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=2880.0, ans=8.58 +2024-08-25 02:50:32,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_na.min_abs, batch_count=2880.0, ans=0.015519999999999999 +2024-08-25 02:50:39,214 INFO [train.py:1114] (3/4) Epoch 1, batch 550, loss[loss=1.167, simple_loss=0.7934, pruned_loss=0.7574, ctc_loss=1.096, over 19216.00 frames. ], tot_loss[loss=1.22, simple_loss=0.8261, pruned_loss=0.9176, ctc_loss=1.138, over 3608044.28 frames. ], batch size: 71, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:50:50,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=2933.3333333333335, ans=0.3625 +2024-08-25 02:51:08,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=2986.6666666666665, ans=0.36 +2024-08-25 02:51:11,114 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=1.769e+01 +2024-08-25 02:51:12,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=3040.0, ans=0.0316 +2024-08-25 02:51:13,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=3040.0, ans=6.9 +2024-08-25 02:51:15,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=3040.0, ans=0.7936000000000001 +2024-08-25 02:51:34,160 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=10.86 vs. limit=9.82 +2024-08-25 02:51:44,471 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.34 vs. limit=8.68 +2024-08-25 02:51:53,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3200.0, ans=0.268 +2024-08-25 02:51:55,117 INFO [train.py:1114] (3/4) Epoch 1, batch 600, loss[loss=1.042, simple_loss=0.715, pruned_loss=0.6269, ctc_loss=1.01, over 19385.00 frames. ], tot_loss[loss=1.175, simple_loss=0.7973, pruned_loss=0.8511, ctc_loss=1.102, over 3666354.70 frames. ], batch size: 67, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:51:55,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=3200.0, ans=0.268 +2024-08-25 02:51:59,173 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 2.677e+02 3.553e+02 4.456e+02 9.241e+02, threshold=7.106e+02, percent-clipped=18.0 +2024-08-25 02:51:59,952 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.41 vs. limit=8.7 +2024-08-25 02:52:00,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=3200.0, ans=0.35 +2024-08-25 02:52:02,446 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=10.73 vs. limit=9.9 +2024-08-25 02:52:07,919 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.35 vs. limit=9.94 +2024-08-25 02:52:20,444 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.78 vs. limit=9.94 +2024-08-25 02:52:21,618 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=9.42 vs. limit=5.826666666666666 +2024-08-25 02:52:42,302 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.98 vs. limit=6.68 +2024-08-25 02:52:58,068 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.63 vs. limit=5.8533333333333335 +2024-08-25 02:53:01,026 INFO [train.py:1114] (3/4) Epoch 1, batch 650, loss[loss=0.8302, simple_loss=0.5855, pruned_loss=0.4569, ctc_loss=0.8022, over 19771.00 frames. ], tot_loss[loss=1.112, simple_loss=0.7586, pruned_loss=0.7743, ctc_loss=1.048, over 3716874.53 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 4.0 +2024-08-25 02:53:14,884 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.05 vs. limit=5.88 +2024-08-25 02:53:19,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=3520.0, ans=0.01808 +2024-08-25 02:53:48,686 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.12 vs. limit=8.86 +2024-08-25 02:53:52,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=3680.0, ans=0.03999999999999998 +2024-08-25 02:53:52,876 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.15 vs. limit=8.879999999999999 +2024-08-25 02:53:53,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=3680.0, ans=0.017199999999999993 +2024-08-25 02:53:54,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=3680.0, ans=0.3275 +2024-08-25 02:54:09,016 INFO [train.py:1114] (3/4) Epoch 1, batch 700, loss[loss=0.8421, simple_loss=0.5906, pruned_loss=0.4705, ctc_loss=0.7974, over 19732.00 frames. ], tot_loss[loss=1.051, simple_loss=0.7226, pruned_loss=0.7019, ctc_loss=0.993, over 3748893.67 frames. ], batch size: 51, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:54:14,192 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.736e+02 2.975e+02 3.878e+02 5.385e+02 1.936e+03, threshold=7.756e+02, percent-clipped=10.0 +2024-08-25 02:54:16,357 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.32 vs. limit=8.9 +2024-08-25 02:54:21,644 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.70 vs. limit=10.34 +2024-08-25 02:54:36,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=3840.0, ans=0.013600000000000001 +2024-08-25 02:54:53,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=10.56 vs. limit=10.42 +2024-08-25 02:54:57,484 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=11.79 vs. limit=10.42 +2024-08-25 02:55:16,797 INFO [train.py:1114] (3/4) Epoch 1, batch 750, loss[loss=0.8315, simple_loss=0.6012, pruned_loss=0.4291, ctc_loss=0.7765, over 19497.00 frames. ], tot_loss[loss=0.9888, simple_loss=0.6859, pruned_loss=0.6337, ctc_loss=0.9341, over 3776162.02 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 02:55:50,923 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.13 vs. limit=10.58 +2024-08-25 02:55:56,240 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.51 vs. limit=10.58 +2024-08-25 02:56:07,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=4160.0, ans=0.0 +2024-08-25 02:56:20,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=4213.333333333333, ans=0.3025 +2024-08-25 02:56:24,860 INFO [train.py:1114] (3/4) Epoch 1, batch 800, loss[loss=0.7114, simple_loss=0.5194, pruned_loss=0.3592, ctc_loss=0.6532, over 19823.00 frames. ], tot_loss[loss=0.9304, simple_loss=0.6523, pruned_loss=0.5724, ctc_loss=0.8747, over 3797711.47 frames. ], batch size: 49, lr: 4.49e-02, grad_scale: 16.0 +2024-08-25 02:56:26,844 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.09 vs. limit=9.1 +2024-08-25 02:56:29,873 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.884e+02 2.945e+02 3.956e+02 5.210e+02 9.107e+02, threshold=7.913e+02, percent-clipped=4.0 +2024-08-25 02:56:35,583 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.05 vs. limit=10.7 +2024-08-25 02:56:49,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.35 vs. limit=10.74 +2024-08-25 02:56:50,427 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.36 vs. limit=9.14 +2024-08-25 02:57:01,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=4373.333333333333, ans=0.29500000000000004 +2024-08-25 02:57:04,413 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.95 vs. limit=9.16 +2024-08-25 02:57:13,053 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.63 vs. limit=5.770666666666667 +2024-08-25 02:57:19,333 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.86 vs. limit=10.86 +2024-08-25 02:57:27,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=4480.0, ans=0.7432000000000001 +2024-08-25 02:57:30,595 INFO [train.py:1114] (3/4) Epoch 1, batch 850, loss[loss=0.7144, simple_loss=0.5373, pruned_loss=0.3422, ctc_loss=0.631, over 19649.00 frames. ], tot_loss[loss=0.8764, simple_loss=0.622, pruned_loss=0.518, ctc_loss=0.8169, over 3815877.14 frames. ], batch size: 59, lr: 4.49e-02, grad_scale: 16.0 +2024-08-25 02:57:33,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=4533.333333333333, ans=0.7413333333333334 +2024-08-25 02:57:49,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=4586.666666666667, ans=0.26880000000000004 +2024-08-25 02:58:07,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=4640.0, ans=0.2825 +2024-08-25 02:58:42,811 INFO [train.py:1114] (3/4) Epoch 1, batch 900, loss[loss=0.6219, simple_loss=0.4747, pruned_loss=0.2905, ctc_loss=0.5369, over 19807.00 frames. ], tot_loss[loss=0.8294, simple_loss=0.5959, pruned_loss=0.4722, ctc_loss=0.7652, over 3820209.74 frames. ], batch size: 49, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 02:58:44,673 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.00 vs. limit=11.1 +2024-08-25 02:58:48,908 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.859e+02 2.783e+02 3.682e+02 4.971e+02 1.764e+03, threshold=7.364e+02, percent-clipped=6.0 +2024-08-25 02:58:50,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=4800.0, ans=0.732 +2024-08-25 02:58:55,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=4853.333333333333, ans=0.00981449275362319 +2024-08-25 02:59:04,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=4853.333333333333, ans=0.09899494936611666 +2024-08-25 02:59:29,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=4960.0, ans=0.009791304347826088 +2024-08-25 02:59:38,098 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.49 vs. limit=6.253333333333333 +2024-08-25 02:59:50,553 INFO [train.py:1114] (3/4) Epoch 1, batch 950, loss[loss=0.5815, simple_loss=0.4529, pruned_loss=0.2567, ctc_loss=0.5036, over 19512.00 frames. ], tot_loss[loss=0.7887, simple_loss=0.5736, pruned_loss=0.4332, ctc_loss=0.7197, over 3821989.98 frames. ], batch size: 49, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 02:59:50,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=5066.666666666667, ans=0.2625 +2024-08-25 02:59:55,824 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=5.323e-03 +2024-08-25 03:00:04,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=5120.0, ans=0.26 +2024-08-25 03:00:06,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=5120.0, ans=0.26 +2024-08-25 03:00:33,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=5226.666666666667, ans=0.04488888888888889 +2024-08-25 03:00:45,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=5280.0, ans=0.2525 +2024-08-25 03:00:53,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=5333.333333333333, ans=0.25 +2024-08-25 03:00:54,642 INFO [train.py:1114] (3/4) Epoch 1, batch 1000, loss[loss=0.5876, simple_loss=0.462, pruned_loss=0.2558, ctc_loss=0.5019, over 19852.00 frames. ], tot_loss[loss=0.7529, simple_loss=0.5544, pruned_loss=0.3998, ctc_loss=0.6785, over 3818261.56 frames. ], batch size: 52, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 03:00:56,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=5333.333333333333, ans=0.25 +2024-08-25 03:01:00,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=5333.333333333333, ans=0.25 +2024-08-25 03:01:01,324 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.763e+02 2.847e+02 3.463e+02 4.611e+02 9.717e+02, threshold=6.926e+02, percent-clipped=4.0 +2024-08-25 03:01:10,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=5386.666666666667, ans=0.2475 +2024-08-25 03:01:10,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=5386.666666666667, ans=0.07 +2024-08-25 03:01:13,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=5386.666666666667, ans=0.24613333333333332 +2024-08-25 03:01:28,001 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.84 vs. limit=7.720000000000001 +2024-08-25 03:01:33,113 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.40 vs. limit=6.36 +2024-08-25 03:01:33,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=5440.0, ans=0.245 +2024-08-25 03:01:48,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=5493.333333333333, ans=0.2425 +2024-08-25 03:02:07,680 INFO [train.py:1114] (3/4) Epoch 1, batch 1050, loss[loss=0.6458, simple_loss=0.5106, pruned_loss=0.285, ctc_loss=0.5304, over 19830.00 frames. ], tot_loss[loss=0.7148, simple_loss=0.5338, pruned_loss=0.3669, ctc_loss=0.6352, over 3824917.30 frames. ], batch size: 57, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 03:02:09,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten.whitening_limit, batch_count=5600.0, ans=11.7 +2024-08-25 03:02:15,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=5600.0, ans=0.2375 +2024-08-25 03:02:42,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=5706.666666666667, ans=0.23249999999999998 +2024-08-25 03:02:51,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=5760.0, ans=0.2424 +2024-08-25 03:03:13,746 INFO [train.py:1114] (3/4) Epoch 1, batch 1100, loss[loss=0.5029, simple_loss=0.4111, pruned_loss=0.2055, ctc_loss=0.413, over 19577.00 frames. ], tot_loss[loss=0.6832, simple_loss=0.517, pruned_loss=0.3399, ctc_loss=0.5987, over 3832258.35 frames. ], batch size: 52, lr: 4.48e-02, grad_scale: 8.0 +2024-08-25 03:03:14,270 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.01 vs. limit=11.9 +2024-08-25 03:03:17,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=5866.666666666667, ans=0.0 +2024-08-25 03:03:20,123 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.922e+02 2.626e+02 3.754e+02 4.559e+02 6.965e+02, threshold=7.509e+02, percent-clipped=1.0 +2024-08-25 03:03:34,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=5920.0, ans=0.009582608695652174 +2024-08-25 03:03:37,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=5920.0, ans=0.6928000000000001 +2024-08-25 03:03:51,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=6026.666666666667, ans=0.041555555555555554 +2024-08-25 03:03:54,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=6026.666666666667, ans=0.031166666666666665 +2024-08-25 03:04:14,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=6080.0, ans=0.04133333333333333 +2024-08-25 03:04:17,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=6133.333333333333, ans=0.04111111111111111 +2024-08-25 03:04:18,508 INFO [train.py:1114] (3/4) Epoch 1, batch 1150, loss[loss=0.5375, simple_loss=0.4394, pruned_loss=0.2262, ctc_loss=0.4237, over 19574.00 frames. ], tot_loss[loss=0.6591, simple_loss=0.5045, pruned_loss=0.3192, ctc_loss=0.5702, over 3829761.48 frames. ], batch size: 52, lr: 4.47e-02, grad_scale: 8.0 +2024-08-25 03:04:27,201 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.71 vs. limit=8.066666666666666 +2024-08-25 03:04:42,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=6186.666666666667, ans=0.04088888888888889 +2024-08-25 03:05:04,956 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.62 vs. limit=6.573333333333333 +2024-08-25 03:05:23,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=6400.0, ans=0.2 +2024-08-25 03:05:24,507 INFO [train.py:1114] (3/4) Epoch 1, batch 1200, loss[loss=0.5894, simple_loss=0.48, pruned_loss=0.2459, ctc_loss=0.4801, over 19835.00 frames. ], tot_loss[loss=0.6376, simple_loss=0.4939, pruned_loss=0.3008, ctc_loss=0.5452, over 3825141.66 frames. ], batch size: 57, lr: 4.47e-02, grad_scale: 16.0 +2024-08-25 03:05:30,706 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.839e+02 2.702e+02 3.344e+02 4.028e+02 1.038e+03, threshold=6.687e+02, percent-clipped=4.0 +2024-08-25 03:05:48,672 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.14 vs. limit=12.34 +2024-08-25 03:06:30,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=6613.333333333333, ans=0.03911111111111111 +2024-08-25 03:06:33,173 INFO [train.py:1114] (3/4) Epoch 1, batch 1250, loss[loss=0.5828, simple_loss=0.4744, pruned_loss=0.2438, ctc_loss=0.4758, over 19546.00 frames. ], tot_loss[loss=0.6158, simple_loss=0.4831, pruned_loss=0.2833, ctc_loss=0.5193, over 3843098.91 frames. ], batch size: 61, lr: 4.47e-02, grad_scale: 8.0 +2024-08-25 03:06:59,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=6773.333333333333, ans=0.1825 +2024-08-25 03:07:02,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=6773.333333333333, ans=0.1825 +2024-08-25 03:07:08,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=6773.333333333333, ans=0.1825 +2024-08-25 03:07:20,843 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=7.004e-02 +2024-08-25 03:07:31,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=6880.0, ans=0.07 +2024-08-25 03:07:53,434 INFO [train.py:1114] (3/4) Epoch 1, batch 1300, loss[loss=0.6086, simple_loss=0.4867, pruned_loss=0.2655, ctc_loss=0.4911, over 18922.00 frames. ], tot_loss[loss=0.5979, simple_loss=0.474, pruned_loss=0.2695, ctc_loss=0.4982, over 3847791.71 frames. ], batch size: 76, lr: 4.47e-02, grad_scale: 8.0 +2024-08-25 03:07:55,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=6933.333333333333, ans=0.175 +2024-08-25 03:07:58,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=6933.333333333333, ans=0.175 +2024-08-25 03:08:00,267 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.44 vs. limit=10.1 +2024-08-25 03:08:00,989 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.769e+02 2.595e+02 3.171e+02 4.007e+02 5.829e+02, threshold=6.342e+02, percent-clipped=0.0 +2024-08-25 03:08:29,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=7040.0, ans=8.52 +2024-08-25 03:08:50,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=7146.666666666667, ans=0.16499999999999998 +2024-08-25 03:08:54,454 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.29 vs. limit=10.18 +2024-08-25 03:09:00,189 INFO [train.py:1114] (3/4) Epoch 1, batch 1350, loss[loss=0.4972, simple_loss=0.4234, pruned_loss=0.1963, ctc_loss=0.3893, over 19754.00 frames. ], tot_loss[loss=0.5814, simple_loss=0.4658, pruned_loss=0.2572, ctc_loss=0.4792, over 3858533.61 frames. ], batch size: 54, lr: 4.46e-02, grad_scale: 8.0 +2024-08-25 03:10:25,730 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:10:30,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=7306.666666666667, ans=0.0 +2024-08-25 03:12:10,363 INFO [train.py:1114] (3/4) Epoch 1, batch 1400, loss[loss=0.4248, simple_loss=0.3704, pruned_loss=0.1633, ctc_loss=0.3244, over 19661.00 frames. ], tot_loss[loss=0.5644, simple_loss=0.4573, pruned_loss=0.2451, ctc_loss=0.46, over 3865756.22 frames. ], batch size: 46, lr: 4.46e-02, grad_scale: 8.0 +2024-08-25 03:12:15,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.29 vs. limit=10.3 +2024-08-25 03:12:32,379 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.817e+02 2.490e+02 2.974e+02 4.034e+02 6.918e+02, threshold=5.948e+02, percent-clipped=1.0 +2024-08-25 03:12:55,049 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.73 vs. limit=10.34 +2024-08-25 03:13:02,033 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=7.10 vs. limit=7.029333333333334 +2024-08-25 03:13:13,315 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.10 vs. limit=8.813333333333333 +2024-08-25 03:13:18,918 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=1.624e-02 +2024-08-25 03:13:21,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=7680.0, ans=0.14 +2024-08-25 03:13:28,401 INFO [train.py:1114] (3/4) Epoch 1, batch 1450, loss[loss=0.5137, simple_loss=0.4464, pruned_loss=0.2009, ctc_loss=0.39, over 19644.00 frames. ], tot_loss[loss=0.5523, simple_loss=0.4515, pruned_loss=0.2366, ctc_loss=0.4456, over 3863304.18 frames. ], batch size: 63, lr: 4.46e-02, grad_scale: 8.0 +2024-08-25 03:13:47,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=7786.666666666667, ans=0.025 +2024-08-25 03:13:56,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=7840.0, ans=0.1325 +2024-08-25 03:13:56,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.43 vs. limit=13.379999999999999 +2024-08-25 03:14:09,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=7893.333333333333, ans=0.04949747468305833 +2024-08-25 03:14:10,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=7893.333333333333, ans=0.13 +2024-08-25 03:14:11,285 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.76 vs. limit=7.157333333333334 +2024-08-25 03:14:23,650 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.43 vs. limit=10.48 +2024-08-25 03:14:29,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=8000.0, ans=0.125 +2024-08-25 03:14:30,703 INFO [train.py:1114] (3/4) Epoch 1, batch 1500, loss[loss=0.5283, simple_loss=0.4556, pruned_loss=0.2093, ctc_loss=0.4066, over 19599.00 frames. ], tot_loss[loss=0.5432, simple_loss=0.4479, pruned_loss=0.2298, ctc_loss=0.4342, over 3863668.54 frames. ], batch size: 57, lr: 4.46e-02, grad_scale: 8.0 +2024-08-25 03:14:38,510 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.864e+02 2.576e+02 3.382e+02 4.091e+02 7.597e+02, threshold=6.763e+02, percent-clipped=6.0 +2024-08-25 03:14:42,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=8053.333333333333, ans=0.09899494936611666 +2024-08-25 03:14:52,672 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.62 vs. limit=7.013333333333334 +2024-08-25 03:15:11,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=8160.0, ans=0.125 +2024-08-25 03:15:13,989 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.69 vs. limit=13.620000000000001 +2024-08-25 03:15:19,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=8160.0, ans=0.1 +2024-08-25 03:15:40,015 INFO [train.py:1114] (3/4) Epoch 1, batch 1550, loss[loss=0.5335, simple_loss=0.4589, pruned_loss=0.2132, ctc_loss=0.4116, over 19615.00 frames. ], tot_loss[loss=0.5328, simple_loss=0.4431, pruned_loss=0.223, ctc_loss=0.4226, over 3847809.76 frames. ], batch size: 60, lr: 4.45e-02, grad_scale: 8.0 +2024-08-25 03:15:46,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=8266.666666666666, ans=0.025 +2024-08-25 03:16:11,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=8373.333333333334, ans=0.00904927536231884 +2024-08-25 03:16:26,119 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.68 vs. limit=13.82 +2024-08-25 03:16:27,474 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.00 vs. limit=9.213333333333333 +2024-08-25 03:16:41,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=8480.0, ans=0.00902608695652174 +2024-08-25 03:16:43,725 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.17 vs. limit=10.68 +2024-08-25 03:16:46,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=8480.0, ans=0.125 +2024-08-25 03:16:49,342 INFO [train.py:1114] (3/4) Epoch 1, batch 1600, loss[loss=0.5179, simple_loss=0.4443, pruned_loss=0.2092, ctc_loss=0.3994, over 19840.00 frames. ], tot_loss[loss=0.5244, simple_loss=0.4391, pruned_loss=0.2178, ctc_loss=0.413, over 3837198.14 frames. ], batch size: 57, lr: 4.45e-02, grad_scale: 16.0 +2024-08-25 03:16:59,533 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.906e+02 2.604e+02 3.125e+02 4.170e+02 2.617e+03, threshold=6.251e+02, percent-clipped=7.0 +2024-08-25 03:17:04,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=8586.666666666666, ans=0.09899494936611666 +2024-08-25 03:17:09,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=8586.666666666666, ans=0.125 +2024-08-25 03:17:10,954 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=2.598e-03 +2024-08-25 03:17:22,690 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.40 vs. limit=13.98 +2024-08-25 03:17:26,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=8640.0, ans=0.2136 +2024-08-25 03:17:37,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=8693.333333333334, ans=0.125 +2024-08-25 03:18:57,667 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.30 vs. limit=14.059999999999999 +2024-08-25 03:18:58,913 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.18 vs. limit=14.059999999999999 +2024-08-25 03:19:09,301 INFO [train.py:1114] (3/4) Epoch 1, batch 1650, loss[loss=0.5449, simple_loss=0.4644, pruned_loss=0.22, ctc_loss=0.4336, over 19670.00 frames. ], tot_loss[loss=0.5151, simple_loss=0.4351, pruned_loss=0.2119, ctc_loss=0.403, over 3834030.75 frames. ], batch size: 59, lr: 4.45e-02, grad_scale: 16.0 +2024-08-25 03:19:17,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=8800.0, ans=0.125 +2024-08-25 03:19:19,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8800.0, ans=0.212 +2024-08-25 03:19:26,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=8853.333333333334, ans=0.029777777777777778 +2024-08-25 03:19:37,892 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.84 vs. limit=10.84 +2024-08-25 03:19:52,367 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.08 vs. limit=4.344 +2024-08-25 03:19:53,665 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.08 vs. limit=14.219999999999999 +2024-08-25 03:19:59,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=9013.333333333334, ans=0.8401333333333333 +2024-08-25 03:20:12,475 INFO [train.py:1114] (3/4) Epoch 1, batch 1700, loss[loss=0.3802, simple_loss=0.3528, pruned_loss=0.1412, ctc_loss=0.2761, over 19677.00 frames. ], tot_loss[loss=0.5042, simple_loss=0.4304, pruned_loss=0.2052, ctc_loss=0.3912, over 3848340.67 frames. ], batch size: 46, lr: 4.44e-02, grad_scale: 16.0 +2024-08-25 03:20:19,814 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.828e+02 2.395e+02 2.888e+02 3.702e+02 8.491e+02, threshold=5.776e+02, percent-clipped=2.0 +2024-08-25 03:20:22,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=9066.666666666666, ans=0.20933333333333334 +2024-08-25 03:20:26,461 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.14 vs. limit=10.92 +2024-08-25 03:20:35,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=9120.0, ans=0.5808 +2024-08-25 03:20:41,415 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.39 vs. limit=10.94 +2024-08-25 03:20:52,443 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:22:14,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=9226.666666666666, ans=0.20773333333333333 +2024-08-25 03:22:23,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=9280.0, ans=0.025 +2024-08-25 03:22:33,848 INFO [train.py:1114] (3/4) Epoch 1, batch 1750, loss[loss=0.4259, simple_loss=0.3779, pruned_loss=0.1699, ctc_loss=0.3132, over 19672.00 frames. ], tot_loss[loss=0.4942, simple_loss=0.4257, pruned_loss=0.1996, ctc_loss=0.3813, over 3853438.43 frames. ], batch size: 45, lr: 4.44e-02, grad_scale: 16.0 +2024-08-25 03:22:42,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=9333.333333333334, ans=0.20666666666666667 +2024-08-25 03:22:49,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=9386.666666666666, ans=0.025 +2024-08-25 03:22:50,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=9386.666666666666, ans=0.125 +2024-08-25 03:22:56,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=9440.0, ans=0.125 +2024-08-25 03:23:02,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=9440.0, ans=0.027333333333333334 +2024-08-25 03:23:07,467 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.39 vs. limit=14.620000000000001 +2024-08-25 03:23:27,059 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.61 vs. limit=11.08 +2024-08-25 03:23:29,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=9546.666666666666, ans=0.125 +2024-08-25 03:23:31,431 INFO [train.py:1114] (3/4) Epoch 1, batch 1800, loss[loss=0.4637, simple_loss=0.4305, pruned_loss=0.1753, ctc_loss=0.3371, over 19613.00 frames. ], tot_loss[loss=0.4855, simple_loss=0.4221, pruned_loss=0.1946, ctc_loss=0.372, over 3854221.20 frames. ], batch size: 55, lr: 4.44e-02, grad_scale: 8.0 +2024-08-25 03:23:38,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=9600.0, ans=0.125 +2024-08-25 03:23:39,410 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.960e+02 2.646e+02 3.473e+02 4.220e+02 8.344e+02, threshold=6.945e+02, percent-clipped=3.0 +2024-08-25 03:24:14,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=9760.0, ans=0.5584 +2024-08-25 03:24:35,841 INFO [train.py:1114] (3/4) Epoch 1, batch 1850, loss[loss=0.4616, simple_loss=0.4149, pruned_loss=0.1825, ctc_loss=0.3424, over 19592.00 frames. ], tot_loss[loss=0.4766, simple_loss=0.4183, pruned_loss=0.1897, ctc_loss=0.3632, over 3859030.19 frames. ], batch size: 57, lr: 4.43e-02, grad_scale: 8.0 +2024-08-25 03:24:43,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=9866.666666666666, ans=0.008724637681159421 +2024-08-25 03:24:52,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=9920.0, ans=0.125 +2024-08-25 03:25:12,710 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.34 vs. limit=9.986666666666668 +2024-08-25 03:25:15,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=10026.666666666666, ans=0.125 +2024-08-25 03:25:30,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=10080.0, ans=0.125 +2024-08-25 03:25:32,821 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.27 vs. limit=15.059999999999999 +2024-08-25 03:25:40,384 INFO [train.py:1114] (3/4) Epoch 1, batch 1900, loss[loss=0.4538, simple_loss=0.4204, pruned_loss=0.1713, ctc_loss=0.3477, over 19655.00 frames. ], tot_loss[loss=0.4735, simple_loss=0.4178, pruned_loss=0.188, ctc_loss=0.3601, over 3863302.01 frames. ], batch size: 59, lr: 4.43e-02, grad_scale: 8.0 +2024-08-25 03:25:48,462 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.873e+02 2.554e+02 2.990e+02 4.033e+02 8.041e+02, threshold=5.979e+02, percent-clipped=3.0 +2024-08-25 03:26:02,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=10240.0, ans=0.125 +2024-08-25 03:26:07,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=10240.0, ans=0.5416000000000001 +2024-08-25 03:26:14,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=10293.333333333334, ans=11.36 +2024-08-25 03:26:18,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=10293.333333333334, ans=0.35440000000000005 +2024-08-25 03:26:21,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=10293.333333333334, ans=0.008631884057971015 +2024-08-25 03:26:24,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=10293.333333333334, ans=0.35440000000000005 +2024-08-25 03:26:38,014 INFO [train.py:1114] (3/4) Epoch 1, batch 1950, loss[loss=0.4127, simple_loss=0.3907, pruned_loss=0.1565, ctc_loss=0.2976, over 19608.00 frames. ], tot_loss[loss=0.4682, simple_loss=0.4167, pruned_loss=0.1849, ctc_loss=0.3545, over 3871559.95 frames. ], batch size: 52, lr: 4.43e-02, grad_scale: 8.0 +2024-08-25 03:26:39,933 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.22 vs. limit=15.3 +2024-08-25 03:27:02,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=10506.666666666666, ans=0.125 +2024-08-25 03:27:19,133 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.63 vs. limit=4.584 +2024-08-25 03:27:23,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=10613.333333333334, ans=0.02244444444444444 +2024-08-25 03:27:32,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=10613.333333333334, ans=0.02244444444444444 +2024-08-25 03:27:36,524 INFO [train.py:1114] (3/4) Epoch 1, batch 2000, loss[loss=0.398, simple_loss=0.3674, pruned_loss=0.1543, ctc_loss=0.3001, over 19619.00 frames. ], tot_loss[loss=0.4638, simple_loss=0.4155, pruned_loss=0.1827, ctc_loss=0.3505, over 3857002.57 frames. ], batch size: 45, lr: 4.42e-02, grad_scale: 16.0 +2024-08-25 03:27:44,894 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.863e+02 2.508e+02 3.011e+02 3.695e+02 6.472e+02, threshold=6.022e+02, percent-clipped=1.0 +2024-08-25 03:27:46,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=10666.666666666666, ans=0.5266666666666667 +2024-08-25 03:28:01,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=10773.333333333334, ans=0.021777777777777774 +2024-08-25 03:28:08,108 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.52 vs. limit=11.54 +2024-08-25 03:28:13,915 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.77 vs. limit=11.559999999999999 +2024-08-25 03:28:19,914 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.36 vs. limit=4.6240000000000006 +2024-08-25 03:28:41,983 INFO [train.py:1114] (3/4) Epoch 1, batch 2050, loss[loss=0.3962, simple_loss=0.366, pruned_loss=0.1551, ctc_loss=0.2908, over 19731.00 frames. ], tot_loss[loss=0.4567, simple_loss=0.4121, pruned_loss=0.1794, ctc_loss=0.3437, over 3852270.95 frames. ], batch size: 47, lr: 4.42e-02, grad_scale: 16.0 +2024-08-25 03:28:43,759 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.20 vs. limit=11.6 +2024-08-25 03:28:53,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=10933.333333333334, ans=0.5173333333333334 +2024-08-25 03:29:13,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=10986.666666666666, ans=0.07 +2024-08-25 03:30:20,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=11093.333333333334, ans=0.0 +2024-08-25 03:30:34,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=11146.666666666666, ans=10.0 +2024-08-25 03:30:37,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=11146.666666666666, ans=0.020222222222222228 +2024-08-25 03:30:37,317 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.40 vs. limit=7.786666666666667 +2024-08-25 03:31:02,649 INFO [train.py:1114] (3/4) Epoch 1, batch 2100, loss[loss=0.3888, simple_loss=0.3831, pruned_loss=0.1421, ctc_loss=0.2757, over 19783.00 frames. ], tot_loss[loss=0.4496, simple_loss=0.4088, pruned_loss=0.1757, ctc_loss=0.3375, over 3858651.34 frames. ], batch size: 54, lr: 4.42e-02, grad_scale: 16.0 +2024-08-25 03:31:08,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11200.0, ans=0.188 +2024-08-25 03:31:19,369 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.953e+02 2.443e+02 2.901e+02 4.101e+02 7.108e+02, threshold=5.802e+02, percent-clipped=5.0 +2024-08-25 03:31:25,415 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.34 vs. limit=10.626666666666667 +2024-08-25 03:31:29,583 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.42 vs. limit=11.719999999999999 +2024-08-25 03:31:39,436 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.82 vs. limit=10.626666666666667 +2024-08-25 03:31:41,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=11306.666666666666, ans=0.05 +2024-08-25 03:32:07,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=11360.0, ans=0.019333333333333338 +2024-08-25 03:32:10,766 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.63 vs. limit=4.712 +2024-08-25 03:32:26,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=11413.333333333334, ans=0.0 +2024-08-25 03:32:32,794 INFO [train.py:1114] (3/4) Epoch 1, batch 2150, loss[loss=0.4278, simple_loss=0.4032, pruned_loss=0.164, ctc_loss=0.311, over 19848.00 frames. ], tot_loss[loss=0.4442, simple_loss=0.4067, pruned_loss=0.1729, ctc_loss=0.3319, over 3870799.03 frames. ], batch size: 52, lr: 4.41e-02, grad_scale: 8.0 +2024-08-25 03:32:42,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=11466.666666666666, ans=0.018888888888888893 +2024-08-25 03:33:34,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=11626.666666666666, ans=0.18373333333333333 +2024-08-25 03:33:39,405 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.02 vs. limit=11.86 +2024-08-25 03:33:45,667 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.73 vs. limit=11.879999999999999 +2024-08-25 03:33:57,385 INFO [train.py:1114] (3/4) Epoch 1, batch 2200, loss[loss=0.4565, simple_loss=0.4271, pruned_loss=0.1755, ctc_loss=0.3374, over 19589.00 frames. ], tot_loss[loss=0.4407, simple_loss=0.4054, pruned_loss=0.1711, ctc_loss=0.3289, over 3868780.41 frames. ], batch size: 57, lr: 4.41e-02, grad_scale: 8.0 +2024-08-25 03:33:58,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=11733.333333333334, ans=0.025 +2024-08-25 03:34:06,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=11733.333333333334, ans=0.18266666666666664 +2024-08-25 03:34:08,401 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.911e+02 2.628e+02 3.380e+02 4.438e+02 7.655e+02, threshold=6.760e+02, percent-clipped=12.0 +2024-08-25 03:34:09,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=11786.666666666666, ans=0.125 +2024-08-25 03:34:40,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=11893.333333333334, ans=0.125 +2024-08-25 03:35:03,300 INFO [train.py:1114] (3/4) Epoch 1, batch 2250, loss[loss=0.4221, simple_loss=0.4117, pruned_loss=0.1572, ctc_loss=0.2952, over 19623.00 frames. ], tot_loss[loss=0.437, simple_loss=0.4043, pruned_loss=0.1691, ctc_loss=0.3244, over 3868154.38 frames. ], batch size: 55, lr: 4.40e-02, grad_scale: 8.0 +2024-08-25 03:35:04,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12000.0, ans=0.18 +2024-08-25 03:35:10,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=12000.0, ans=0.01666666666666667 +2024-08-25 03:35:34,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=12106.666666666666, ans=0.016222222222222228 +2024-08-25 03:35:35,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=12106.666666666666, ans=0.025 +2024-08-25 03:35:39,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12160.0, ans=0.1784 +2024-08-25 03:35:50,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=12213.333333333334, ans=0.025 +2024-08-25 03:35:54,905 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.11 vs. limit=8.053333333333335 +2024-08-25 03:36:01,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=12213.333333333334, ans=0.008214492753623188 +2024-08-25 03:36:03,060 INFO [train.py:1114] (3/4) Epoch 1, batch 2300, loss[loss=0.3666, simple_loss=0.3645, pruned_loss=0.132, ctc_loss=0.2619, over 19494.00 frames. ], tot_loss[loss=0.432, simple_loss=0.4014, pruned_loss=0.1667, ctc_loss=0.3195, over 3861800.96 frames. ], batch size: 49, lr: 4.40e-02, grad_scale: 8.0 +2024-08-25 03:36:06,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=12266.666666666666, ans=0.05 +2024-08-25 03:36:12,293 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.784e+02 2.546e+02 3.099e+02 3.956e+02 8.242e+02, threshold=6.199e+02, percent-clipped=6.0 +2024-08-25 03:36:22,689 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.20 vs. limit=8.928 +2024-08-25 03:36:29,106 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.75 vs. limit=4.856 +2024-08-25 03:36:40,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=12426.666666666666, ans=0.125 +2024-08-25 03:36:57,944 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.79 vs. limit=12.18 +2024-08-25 03:37:00,703 INFO [train.py:1114] (3/4) Epoch 1, batch 2350, loss[loss=0.4845, simple_loss=0.4408, pruned_loss=0.1913, ctc_loss=0.3639, over 19678.00 frames. ], tot_loss[loss=0.4282, simple_loss=0.3999, pruned_loss=0.1645, ctc_loss=0.316, over 3863961.48 frames. ], batch size: 63, lr: 4.40e-02, grad_scale: 8.0 +2024-08-25 03:37:16,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=12586.666666666666, ans=0.125 +2024-08-25 03:37:29,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=12640.0, ans=10.0 +2024-08-25 03:37:41,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=12693.333333333334, ans=0.125 +2024-08-25 03:37:41,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=12693.333333333334, ans=0.125 +2024-08-25 03:37:47,928 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.15 vs. limit=17.060000000000002 +2024-08-25 03:37:53,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=12746.666666666666, ans=0.008098550724637681 +2024-08-25 03:37:55,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=12746.666666666666, ans=0.008098550724637681 +2024-08-25 03:37:59,414 INFO [train.py:1114] (3/4) Epoch 1, batch 2400, loss[loss=0.4501, simple_loss=0.4235, pruned_loss=0.1723, ctc_loss=0.3306, over 19283.00 frames. ], tot_loss[loss=0.4309, simple_loss=0.4028, pruned_loss=0.1656, ctc_loss=0.3177, over 3858589.54 frames. ], batch size: 71, lr: 4.39e-02, grad_scale: 16.0 +2024-08-25 03:38:08,240 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.904e+02 2.522e+02 3.053e+02 3.990e+02 1.210e+03, threshold=6.106e+02, percent-clipped=3.0 +2024-08-25 03:38:12,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=12853.333333333334, ans=0.125 +2024-08-25 03:38:19,684 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=20.84 vs. limit=12.32 +2024-08-25 03:38:20,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=12906.666666666666, ans=0.125 +2024-08-25 03:38:31,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=12906.666666666666, ans=0.00806376811594203 +2024-08-25 03:38:47,815 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=22.13 vs. limit=12.379999999999999 +2024-08-25 03:38:53,242 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.43 vs. limit=12.379999999999999 +2024-08-25 03:39:03,891 INFO [train.py:1114] (3/4) Epoch 1, batch 2450, loss[loss=0.5241, simple_loss=0.4447, pruned_loss=0.2163, ctc_loss=0.4273, over 13283.00 frames. ], tot_loss[loss=0.4386, simple_loss=0.4073, pruned_loss=0.1696, ctc_loss=0.3247, over 3730669.53 frames. ], batch size: 141, lr: 4.39e-02, grad_scale: 16.0 +2024-08-25 03:39:07,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=13066.666666666666, ans=0.012222222222222225 +2024-08-25 03:39:16,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=13120.0, ans=0.125 +2024-08-25 03:39:17,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.68 vs. limit=12.42 +2024-08-25 03:39:23,828 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.48 vs. limit=8.28 +2024-08-25 03:39:25,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=13120.0, ans=0.125 +2024-08-25 03:39:38,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=13226.666666666666, ans=0.125 +2024-08-25 03:40:43,720 INFO [train.py:1114] (3/4) Epoch 2, batch 0, loss[loss=0.4414, simple_loss=0.3996, pruned_loss=0.1745, ctc_loss=0.3355, over 19819.00 frames. ], tot_loss[loss=0.4414, simple_loss=0.3996, pruned_loss=0.1745, ctc_loss=0.3355, over 19819.00 frames. ], batch size: 49, lr: 4.30e-02, grad_scale: 32.0 +2024-08-25 03:40:43,721 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-25 03:40:51,833 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.7546, 3.5714, 2.7950, 3.5909], device='cuda:3') +2024-08-25 03:40:53,928 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([1.5933, 3.0181, 3.3461, 3.1235], device='cuda:3') +2024-08-25 03:40:55,166 INFO [train.py:1146] (3/4) Epoch 2, validation: loss=0.3317, simple_loss=0.3718, pruned_loss=0.1058, ctc_loss=0.2, over 944034.00 frames. +2024-08-25 03:40:55,167 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 14072MB +2024-08-25 03:41:12,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=13333.333333333334, ans=0.007971014492753623 +2024-08-25 03:41:17,114 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.783e+02 2.388e+02 2.818e+02 3.444e+02 6.577e+02, threshold=5.636e+02, percent-clipped=3.0 +2024-08-25 03:41:21,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.96 vs. limit=9.354666666666667 +2024-08-25 03:41:22,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=13386.666666666666, ans=0.4008 +2024-08-25 03:41:24,521 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.96 vs. limit=5.008 +2024-08-25 03:41:29,553 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.91 vs. limit=12.52 +2024-08-25 03:41:57,958 INFO [train.py:1114] (3/4) Epoch 2, batch 50, loss[loss=0.3573, simple_loss=0.3566, pruned_loss=0.1298, ctc_loss=0.2458, over 19694.00 frames. ], tot_loss[loss=0.4201, simple_loss=0.3989, pruned_loss=0.1595, ctc_loss=0.3058, over 844976.60 frames. ], batch size: 47, lr: 4.29e-02, grad_scale: 16.0 +2024-08-25 03:42:08,841 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.17 vs. limit=8.386666666666667 +2024-08-25 03:43:15,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=13706.666666666666, ans=0.007889855072463769 +2024-08-25 03:43:18,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=13706.666666666666, ans=0.007889855072463769 +2024-08-25 03:43:36,873 INFO [train.py:1114] (3/4) Epoch 2, batch 100, loss[loss=0.4122, simple_loss=0.3896, pruned_loss=0.1577, ctc_loss=0.2983, over 19724.00 frames. ], tot_loss[loss=0.4193, simple_loss=0.3996, pruned_loss=0.1588, ctc_loss=0.3037, over 1499358.22 frames. ], batch size: 51, lr: 4.29e-02, grad_scale: 16.0 +2024-08-25 03:43:42,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=13813.333333333334, ans=0.125 +2024-08-25 03:44:02,831 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.678e+02 2.500e+02 2.916e+02 3.893e+02 6.295e+02, threshold=5.832e+02, percent-clipped=2.0 +2024-08-25 03:44:19,358 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.85 vs. limit=17.98 +2024-08-25 03:44:20,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=13973.333333333334, ans=0.07 +2024-08-25 03:44:34,089 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.17 vs. limit=8.506666666666666 +2024-08-25 03:44:38,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14026.666666666666, ans=0.15973333333333334 +2024-08-25 03:44:42,784 INFO [train.py:1114] (3/4) Epoch 2, batch 150, loss[loss=0.3652, simple_loss=0.3575, pruned_loss=0.1352, ctc_loss=0.2564, over 19711.00 frames. ], tot_loss[loss=0.4102, simple_loss=0.3941, pruned_loss=0.1541, ctc_loss=0.2951, over 2027441.50 frames. ], batch size: 47, lr: 4.29e-02, grad_scale: 16.0 +2024-08-25 03:44:49,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=14080.0, ans=0.125 +2024-08-25 03:45:14,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=14186.666666666666, ans=0.025 +2024-08-25 03:45:18,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=14240.0, ans=0.15760000000000002 +2024-08-25 03:45:22,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=14240.0, ans=0.09899494936611666 +2024-08-25 03:45:34,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=14293.333333333334, ans=0.125 +2024-08-25 03:45:42,121 INFO [train.py:1114] (3/4) Epoch 2, batch 200, loss[loss=0.44, simple_loss=0.4145, pruned_loss=0.1677, ctc_loss=0.3252, over 18255.00 frames. ], tot_loss[loss=0.4037, simple_loss=0.3899, pruned_loss=0.1508, ctc_loss=0.2896, over 2435304.42 frames. ], batch size: 85, lr: 4.28e-02, grad_scale: 16.0 +2024-08-25 03:46:06,457 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.951e+02 2.445e+02 2.940e+02 3.728e+02 6.995e+02, threshold=5.880e+02, percent-clipped=3.0 +2024-08-25 03:46:06,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=14453.333333333334, ans=0.125 +2024-08-25 03:46:45,921 INFO [train.py:1114] (3/4) Epoch 2, batch 250, loss[loss=0.4225, simple_loss=0.4091, pruned_loss=0.1576, ctc_loss=0.3019, over 19406.00 frames. ], tot_loss[loss=0.4032, simple_loss=0.3895, pruned_loss=0.1507, ctc_loss=0.2887, over 2754696.94 frames. ], batch size: 67, lr: 4.28e-02, grad_scale: 16.0 +2024-08-25 03:46:55,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=14613.333333333334, ans=0.0057777777777777775 +2024-08-25 03:47:00,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=14666.666666666666, ans=0.125 +2024-08-25 03:47:19,367 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.91 vs. limit=13.02 +2024-08-25 03:47:33,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.24 vs. limit=13.04 +2024-08-25 03:47:50,845 INFO [train.py:1114] (3/4) Epoch 2, batch 300, loss[loss=0.3829, simple_loss=0.3838, pruned_loss=0.1366, ctc_loss=0.272, over 19500.00 frames. ], tot_loss[loss=0.4, simple_loss=0.3879, pruned_loss=0.149, ctc_loss=0.2854, over 2999500.08 frames. ], batch size: 61, lr: 4.27e-02, grad_scale: 16.0 +2024-08-25 03:47:52,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=14880.0, ans=0.004666666666666666 +2024-08-25 03:48:13,154 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.763e+02 2.396e+02 2.818e+02 3.488e+02 8.647e+02, threshold=5.636e+02, percent-clipped=6.0 +2024-08-25 03:48:34,326 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.17 vs. limit=8.76 +2024-08-25 03:48:36,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=15040.0, ans=12.52 +2024-08-25 03:48:37,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=15093.333333333334, ans=0.37173333333333336 +2024-08-25 03:48:50,394 INFO [train.py:1114] (3/4) Epoch 2, batch 350, loss[loss=0.3854, simple_loss=0.3657, pruned_loss=0.1469, ctc_loss=0.2783, over 19780.00 frames. ], tot_loss[loss=0.3977, simple_loss=0.3871, pruned_loss=0.1476, ctc_loss=0.2831, over 3190079.29 frames. ], batch size: 48, lr: 4.27e-02, grad_scale: 16.0 +2024-08-25 03:49:37,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=15253.333333333334, ans=0.125 +2024-08-25 03:49:56,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=15306.666666666666, ans=0.14693333333333333 +2024-08-25 03:50:06,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=15360.0, ans=0.1464 +2024-08-25 03:50:17,354 INFO [train.py:1114] (3/4) Epoch 2, batch 400, loss[loss=0.3822, simple_loss=0.3915, pruned_loss=0.135, ctc_loss=0.2574, over 19496.00 frames. ], tot_loss[loss=0.3965, simple_loss=0.3863, pruned_loss=0.147, ctc_loss=0.2819, over 3342521.02 frames. ], batch size: 54, lr: 4.26e-02, grad_scale: 32.0 +2024-08-25 03:50:32,991 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:50:36,785 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.77 vs. limit=13.3 +2024-08-25 03:50:39,712 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.940e+02 2.407e+02 2.984e+02 3.456e+02 5.488e+02, threshold=5.968e+02, percent-clipped=0.0 +2024-08-25 03:51:15,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=15680.0, ans=0.14320000000000002 +2024-08-25 03:51:19,335 INFO [train.py:1114] (3/4) Epoch 2, batch 450, loss[loss=0.3785, simple_loss=0.3751, pruned_loss=0.1387, ctc_loss=0.2613, over 19623.00 frames. ], tot_loss[loss=0.3952, simple_loss=0.3856, pruned_loss=0.1464, ctc_loss=0.28, over 3449587.75 frames. ], batch size: 55, lr: 4.26e-02, grad_scale: 32.0 +2024-08-25 03:51:34,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=15733.333333333334, ans=0.125 +2024-08-25 03:51:36,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=15733.333333333334, ans=0.125 +2024-08-25 03:51:40,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=15733.333333333334, ans=0.125 +2024-08-25 03:51:46,549 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.10 vs. limit=19.34 +2024-08-25 03:51:50,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=15786.666666666666, ans=0.125 +2024-08-25 03:51:51,144 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.18 vs. limit=13.42 +2024-08-25 03:52:14,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=15893.333333333334, ans=0.125 +2024-08-25 03:52:17,538 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:52:21,862 INFO [train.py:1114] (3/4) Epoch 2, batch 500, loss[loss=0.409, simple_loss=0.398, pruned_loss=0.1518, ctc_loss=0.2908, over 19682.00 frames. ], tot_loss[loss=0.3916, simple_loss=0.3833, pruned_loss=0.1446, ctc_loss=0.2768, over 3545523.35 frames. ], batch size: 63, lr: 4.25e-02, grad_scale: 16.0 +2024-08-25 03:53:00,741 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.29 vs. limit=13.0 +2024-08-25 03:53:11,992 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.769e+02 2.425e+02 3.079e+02 3.995e+02 1.154e+03, threshold=6.159e+02, percent-clipped=13.0 +2024-08-25 03:53:33,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=16106.666666666666, ans=0.125 +2024-08-25 03:53:33,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=16106.666666666666, ans=0.3362666666666667 +2024-08-25 03:53:44,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=16160.0, ans=0.125 +2024-08-25 03:53:49,955 INFO [train.py:1114] (3/4) Epoch 2, batch 550, loss[loss=0.43, simple_loss=0.4179, pruned_loss=0.1602, ctc_loss=0.3043, over 19287.00 frames. ], tot_loss[loss=0.3902, simple_loss=0.3826, pruned_loss=0.1439, ctc_loss=0.275, over 3607067.87 frames. ], batch size: 71, lr: 4.25e-02, grad_scale: 16.0 +2024-08-25 03:53:51,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=16213.333333333334, ans=0.125 +2024-08-25 03:54:04,865 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 03:54:08,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.09 vs. limit=13.6 +2024-08-25 03:54:09,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=16266.666666666666, ans=10.0 +2024-08-25 03:54:14,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=16320.0, ans=0.007321739130434783 +2024-08-25 03:54:15,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=16320.0, ans=0.0 +2024-08-25 03:54:26,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=16373.333333333334, ans=0.125 +2024-08-25 03:54:31,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=16373.333333333334, ans=0.125 +2024-08-25 03:54:51,528 INFO [train.py:1114] (3/4) Epoch 2, batch 600, loss[loss=0.4684, simple_loss=0.4332, pruned_loss=0.1815, ctc_loss=0.3515, over 19377.00 frames. ], tot_loss[loss=0.3886, simple_loss=0.3819, pruned_loss=0.143, ctc_loss=0.2731, over 3665159.69 frames. ], batch size: 67, lr: 4.24e-02, grad_scale: 16.0 +2024-08-25 03:54:57,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=16480.0, ans=0.125 +2024-08-25 03:54:57,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=16480.0, ans=0.125 +2024-08-25 03:55:03,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=16533.333333333332, ans=0.025 +2024-08-25 03:55:04,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=16533.333333333332, ans=0.125 +2024-08-25 03:55:14,973 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.729e+02 2.336e+02 2.753e+02 3.494e+02 8.105e+02, threshold=5.507e+02, percent-clipped=1.0 +2024-08-25 03:55:15,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=16586.666666666668, ans=0.125 +2024-08-25 03:55:20,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=16586.666666666668, ans=0.13413333333333333 +2024-08-25 03:55:46,962 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=11.85 vs. limit=13.759999999999998 +2024-08-25 03:55:56,157 INFO [train.py:1114] (3/4) Epoch 2, batch 650, loss[loss=0.3535, simple_loss=0.3639, pruned_loss=0.1241, ctc_loss=0.2372, over 19757.00 frames. ], tot_loss[loss=0.3854, simple_loss=0.3801, pruned_loss=0.1413, ctc_loss=0.27, over 3715297.44 frames. ], batch size: 54, lr: 4.24e-02, grad_scale: 16.0 +2024-08-25 03:56:45,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=16960.0, ans=0.13040000000000002 +2024-08-25 03:56:49,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=16960.0, ans=0.0 +2024-08-25 03:56:56,407 INFO [train.py:1114] (3/4) Epoch 2, batch 700, loss[loss=0.391, simple_loss=0.3847, pruned_loss=0.1444, ctc_loss=0.2712, over 19719.00 frames. ], tot_loss[loss=0.3845, simple_loss=0.38, pruned_loss=0.1407, ctc_loss=0.2689, over 3747694.77 frames. ], batch size: 51, lr: 4.23e-02, grad_scale: 16.0 +2024-08-25 03:56:57,213 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.94 vs. limit=13.879999999999999 +2024-08-25 03:57:00,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=17013.333333333332, ans=0.12986666666666669 +2024-08-25 03:57:03,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=17013.333333333332, ans=0.125 +2024-08-25 03:57:05,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=17013.333333333332, ans=0.125 +2024-08-25 03:57:12,991 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.94 vs. limit=13.9 +2024-08-25 03:57:23,237 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.791e+02 2.519e+02 2.895e+02 3.628e+02 6.087e+02, threshold=5.790e+02, percent-clipped=2.0 +2024-08-25 03:57:28,008 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.01 vs. limit=20.34 +2024-08-25 03:57:53,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=17226.666666666668, ans=0.125 +2024-08-25 03:57:56,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=17226.666666666668, ans=0.125 +2024-08-25 03:58:01,089 INFO [train.py:1114] (3/4) Epoch 2, batch 750, loss[loss=0.3548, simple_loss=0.3697, pruned_loss=0.1219, ctc_loss=0.24, over 19518.00 frames. ], tot_loss[loss=0.3825, simple_loss=0.3787, pruned_loss=0.1397, ctc_loss=0.2671, over 3772835.56 frames. ], batch size: 54, lr: 4.23e-02, grad_scale: 16.0 +2024-08-25 03:58:19,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=17333.333333333332, ans=0.0 +2024-08-25 03:58:37,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=17440.0, ans=0.9243999999999999 +2024-08-25 03:58:40,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=17440.0, ans=0.125 +2024-08-25 04:00:11,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=17493.333333333332, ans=0.007066666666666667 +2024-08-25 04:00:16,087 INFO [train.py:1114] (3/4) Epoch 2, batch 800, loss[loss=0.3323, simple_loss=0.3423, pruned_loss=0.1174, ctc_loss=0.2188, over 19833.00 frames. ], tot_loss[loss=0.3819, simple_loss=0.3785, pruned_loss=0.1394, ctc_loss=0.2663, over 3794352.44 frames. ], batch size: 49, lr: 4.22e-02, grad_scale: 32.0 +2024-08-25 04:00:18,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=17546.666666666668, ans=0.2858666666666667 +2024-08-25 04:00:39,330 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.683e+02 2.611e+02 3.088e+02 3.881e+02 9.768e+02, threshold=6.176e+02, percent-clipped=6.0 +2024-08-25 04:00:39,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=17653.333333333332, ans=0.0 +2024-08-25 04:01:02,578 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:01:12,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=17760.0, ans=0.0 +2024-08-25 04:01:15,042 INFO [train.py:1114] (3/4) Epoch 2, batch 850, loss[loss=0.4404, simple_loss=0.4167, pruned_loss=0.1681, ctc_loss=0.32, over 19660.00 frames. ], tot_loss[loss=0.3792, simple_loss=0.3768, pruned_loss=0.1381, ctc_loss=0.2637, over 3814929.35 frames. ], batch size: 59, lr: 4.22e-02, grad_scale: 32.0 +2024-08-25 04:01:32,458 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.94 vs. limit=11.146666666666668 +2024-08-25 04:02:11,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=18026.666666666668, ans=0.0 +2024-08-25 04:02:18,995 INFO [train.py:1114] (3/4) Epoch 2, batch 900, loss[loss=0.3678, simple_loss=0.3639, pruned_loss=0.1333, ctc_loss=0.2625, over 19419.00 frames. ], tot_loss[loss=0.38, simple_loss=0.3772, pruned_loss=0.1386, ctc_loss=0.2641, over 3818536.94 frames. ], batch size: 48, lr: 4.21e-02, grad_scale: 8.0 +2024-08-25 04:02:45,637 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.62 vs. limit=11.232 +2024-08-25 04:03:03,829 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.814e+02 2.530e+02 3.033e+02 3.602e+02 3.379e+03, threshold=6.066e+02, percent-clipped=6.0 +2024-08-25 04:03:06,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=18186.666666666668, ans=0.125 +2024-08-25 04:03:30,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=18293.333333333332, ans=0.025 +2024-08-25 04:03:34,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=18293.333333333332, ans=0.11706666666666668 +2024-08-25 04:03:36,918 INFO [train.py:1114] (3/4) Epoch 2, batch 950, loss[loss=0.3708, simple_loss=0.3672, pruned_loss=0.136, ctc_loss=0.2555, over 19495.00 frames. ], tot_loss[loss=0.379, simple_loss=0.3766, pruned_loss=0.138, ctc_loss=0.2629, over 3819408.56 frames. ], batch size: 49, lr: 4.21e-02, grad_scale: 8.0 +2024-08-25 04:04:01,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=18400.0, ans=0.256 +2024-08-25 04:04:09,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=18453.333333333332, ans=0.25413333333333343 +2024-08-25 04:04:11,740 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.23 vs. limit=11.381333333333334 +2024-08-25 04:04:12,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=18453.333333333332, ans=0.125 +2024-08-25 04:04:18,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=18506.666666666668, ans=0.25226666666666675 +2024-08-25 04:04:27,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=18560.0, ans=0.006834782608695652 +2024-08-25 04:04:33,047 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.40 vs. limit=21.42 +2024-08-25 04:04:39,319 INFO [train.py:1114] (3/4) Epoch 2, batch 1000, loss[loss=0.4116, simple_loss=0.3915, pruned_loss=0.1559, ctc_loss=0.3, over 19839.00 frames. ], tot_loss[loss=0.3792, simple_loss=0.377, pruned_loss=0.1381, ctc_loss=0.2627, over 3815288.34 frames. ], batch size: 52, lr: 4.20e-02, grad_scale: 8.0 +2024-08-25 04:05:05,784 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.709e+02 2.321e+02 2.743e+02 3.485e+02 6.350e+02, threshold=5.486e+02, percent-clipped=2.0 +2024-08-25 04:05:06,745 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.15 vs. limit=14.52 +2024-08-25 04:05:09,142 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.23 vs. limit=5.808 +2024-08-25 04:05:30,651 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.22 vs. limit=21.619999999999997 +2024-08-25 04:05:41,812 INFO [train.py:1114] (3/4) Epoch 2, batch 1050, loss[loss=0.4339, simple_loss=0.4255, pruned_loss=0.1617, ctc_loss=0.2972, over 19854.00 frames. ], tot_loss[loss=0.3768, simple_loss=0.3757, pruned_loss=0.1369, ctc_loss=0.2603, over 3821712.04 frames. ], batch size: 57, lr: 4.20e-02, grad_scale: 8.0 +2024-08-25 04:06:34,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=19093.333333333332, ans=0.23173333333333346 +2024-08-25 04:06:38,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=19093.333333333332, ans=0.125 +2024-08-25 04:06:44,159 INFO [train.py:1114] (3/4) Epoch 2, batch 1100, loss[loss=0.3538, simple_loss=0.3589, pruned_loss=0.1249, ctc_loss=0.2472, over 19594.00 frames. ], tot_loss[loss=0.3757, simple_loss=0.3751, pruned_loss=0.1364, ctc_loss=0.259, over 3829183.81 frames. ], batch size: 52, lr: 4.19e-02, grad_scale: 8.0 +2024-08-25 04:07:00,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=19200.0, ans=0.125 +2024-08-25 04:07:11,083 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.799e+02 2.465e+02 2.960e+02 4.039e+02 7.406e+02, threshold=5.919e+02, percent-clipped=11.0 +2024-08-25 04:08:08,080 INFO [train.py:1114] (3/4) Epoch 2, batch 1150, loss[loss=0.359, simple_loss=0.3671, pruned_loss=0.1285, ctc_loss=0.2344, over 19585.00 frames. ], tot_loss[loss=0.3757, simple_loss=0.3751, pruned_loss=0.1364, ctc_loss=0.2587, over 3829453.92 frames. ], batch size: 52, lr: 4.19e-02, grad_scale: 8.0 +2024-08-25 04:08:08,761 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.94 vs. limit=11.765333333333333 +2024-08-25 04:08:09,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=19413.333333333332, ans=0.125 +2024-08-25 04:08:13,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=19413.333333333332, ans=0.125 +2024-08-25 04:08:17,674 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.61 vs. limit=14.780000000000001 +2024-08-25 04:08:18,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=19413.333333333332, ans=0.125 +2024-08-25 04:08:26,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=19466.666666666668, ans=0.05533333333333329 +2024-08-25 04:08:29,653 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.20 vs. limit=22.1 +2024-08-25 04:08:50,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=19573.333333333332, ans=0.21493333333333342 +2024-08-25 04:08:55,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=19626.666666666668, ans=0.125 +2024-08-25 04:08:55,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=19626.666666666668, ans=0.025 +2024-08-25 04:08:58,225 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.83 vs. limit=14.86 +2024-08-25 04:09:01,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=19626.666666666668, ans=0.125 +2024-08-25 04:09:08,082 INFO [train.py:1114] (3/4) Epoch 2, batch 1200, loss[loss=0.3684, simple_loss=0.3832, pruned_loss=0.1276, ctc_loss=0.2464, over 19835.00 frames. ], tot_loss[loss=0.3769, simple_loss=0.3762, pruned_loss=0.1368, ctc_loss=0.2595, over 3825699.81 frames. ], batch size: 57, lr: 4.18e-02, grad_scale: 16.0 +2024-08-25 04:09:26,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=19733.333333333332, ans=0.125 +2024-08-25 04:09:32,229 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.02 vs. limit=14.9 +2024-08-25 04:09:34,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=19786.666666666668, ans=0.0065681159420289854 +2024-08-25 04:09:36,230 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.782e+02 2.637e+02 3.065e+02 4.000e+02 6.600e+02, threshold=6.130e+02, percent-clipped=2.0 +2024-08-25 04:09:57,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=19840.0, ans=0.125 +2024-08-25 04:10:03,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=19893.333333333332, ans=0.125 +2024-08-25 04:10:08,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=19893.333333333332, ans=0.125 +2024-08-25 04:10:11,975 INFO [train.py:1114] (3/4) Epoch 2, batch 1250, loss[loss=0.4067, simple_loss=0.4018, pruned_loss=0.1494, ctc_loss=0.2817, over 19560.00 frames. ], tot_loss[loss=0.3758, simple_loss=0.376, pruned_loss=0.1361, ctc_loss=0.2581, over 3843546.43 frames. ], batch size: 61, lr: 4.17e-02, grad_scale: 16.0 +2024-08-25 04:10:27,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=20000.0, ans=10.0 +2024-08-25 04:10:29,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=20000.0, ans=0.2 +2024-08-25 04:10:42,267 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.61 vs. limit=12.0 +2024-08-25 04:10:52,886 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.05 vs. limit=15.0 +2024-08-25 04:11:07,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=20160.0, ans=0.125 +2024-08-25 04:11:08,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=20160.0, ans=0.125 +2024-08-25 04:11:14,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=20213.333333333332, ans=0.025 +2024-08-25 04:11:15,946 INFO [train.py:1114] (3/4) Epoch 2, batch 1300, loss[loss=0.468, simple_loss=0.4275, pruned_loss=0.1864, ctc_loss=0.3392, over 18948.00 frames. ], tot_loss[loss=0.3742, simple_loss=0.375, pruned_loss=0.1353, ctc_loss=0.2566, over 3847154.83 frames. ], batch size: 76, lr: 4.17e-02, grad_scale: 16.0 +2024-08-25 04:11:28,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=20266.666666666668, ans=0.125 +2024-08-25 04:11:32,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=20266.666666666668, ans=0.1 +2024-08-25 04:11:36,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=20266.666666666668, ans=0.2 +2024-08-25 04:11:41,996 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.748e+02 2.187e+02 2.429e+02 2.931e+02 4.736e+02, threshold=4.858e+02, percent-clipped=0.0 +2024-08-25 04:11:48,331 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:11:55,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20373.333333333332, ans=0.1 +2024-08-25 04:11:57,351 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.28 vs. limit=15.0 +2024-08-25 04:11:59,778 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=18.89 vs. limit=22.5 +2024-08-25 04:12:04,403 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.21 vs. limit=15.0 +2024-08-25 04:12:15,273 INFO [train.py:1114] (3/4) Epoch 2, batch 1350, loss[loss=0.3496, simple_loss=0.3623, pruned_loss=0.1202, ctc_loss=0.2413, over 19775.00 frames. ], tot_loss[loss=0.3707, simple_loss=0.373, pruned_loss=0.1336, ctc_loss=0.2531, over 3858684.74 frames. ], batch size: 54, lr: 4.16e-02, grad_scale: 16.0 +2024-08-25 04:12:39,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=20586.666666666668, ans=0.025 +2024-08-25 04:13:09,087 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:13:11,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=20693.333333333332, ans=0.0 +2024-08-25 04:13:12,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=20693.333333333332, ans=0.125 +2024-08-25 04:13:18,526 INFO [train.py:1114] (3/4) Epoch 2, batch 1400, loss[loss=0.2981, simple_loss=0.3192, pruned_loss=0.09802, ctc_loss=0.2026, over 19670.00 frames. ], tot_loss[loss=0.3679, simple_loss=0.3714, pruned_loss=0.1322, ctc_loss=0.2503, over 3865506.63 frames. ], batch size: 46, lr: 4.16e-02, grad_scale: 16.0 +2024-08-25 04:14:03,158 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.799e+02 2.385e+02 2.674e+02 3.744e+02 6.684e+02, threshold=5.347e+02, percent-clipped=6.0 +2024-08-25 04:14:08,271 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.506e-03 +2024-08-25 04:14:08,586 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.19 vs. limit=15.0 +2024-08-25 04:14:11,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=20853.333333333332, ans=0.125 +2024-08-25 04:14:13,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=20906.666666666668, ans=0.0 +2024-08-25 04:14:19,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=20906.666666666668, ans=10.0 +2024-08-25 04:14:19,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20906.666666666668, ans=0.1 +2024-08-25 04:14:36,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=21013.333333333332, ans=0.125 +2024-08-25 04:14:37,938 INFO [train.py:1114] (3/4) Epoch 2, batch 1450, loss[loss=0.3879, simple_loss=0.391, pruned_loss=0.14, ctc_loss=0.262, over 19681.00 frames. ], tot_loss[loss=0.369, simple_loss=0.3722, pruned_loss=0.1327, ctc_loss=0.2512, over 3862745.91 frames. ], batch size: 63, lr: 4.15e-02, grad_scale: 16.0 +2024-08-25 04:14:42,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=21013.333333333332, ans=0.2 +2024-08-25 04:15:58,395 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.39 vs. limit=10.0 +2024-08-25 04:16:00,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=21120.0, ans=0.2 +2024-08-25 04:16:10,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=21173.333333333332, ans=0.125 +2024-08-25 04:16:11,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=21173.333333333332, ans=0.04949747468305833 +2024-08-25 04:16:13,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=21173.333333333332, ans=0.5 +2024-08-25 04:16:28,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=21226.666666666668, ans=0.125 +2024-08-25 04:16:30,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=21226.666666666668, ans=0.006255072463768116 +2024-08-25 04:16:33,077 INFO [train.py:1114] (3/4) Epoch 2, batch 1500, loss[loss=0.3892, simple_loss=0.3928, pruned_loss=0.1415, ctc_loss=0.2568, over 19574.00 frames. ], tot_loss[loss=0.3682, simple_loss=0.372, pruned_loss=0.1321, ctc_loss=0.2502, over 3862326.39 frames. ], batch size: 57, lr: 4.15e-02, grad_scale: 16.0 +2024-08-25 04:16:33,827 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.12 vs. limit=15.0 +2024-08-25 04:16:36,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=21280.0, ans=0.1 +2024-08-25 04:16:44,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=21333.333333333332, ans=0.125 +2024-08-25 04:17:08,007 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.803e+02 2.509e+02 2.906e+02 4.274e+02 8.598e+02, threshold=5.813e+02, percent-clipped=13.0 +2024-08-25 04:17:19,228 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:17:21,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=21440.0, ans=0.125 +2024-08-25 04:17:36,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=21493.333333333332, ans=0.125 +2024-08-25 04:17:38,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=21493.333333333332, ans=0.125 +2024-08-25 04:17:42,728 INFO [train.py:1114] (3/4) Epoch 2, batch 1550, loss[loss=0.3934, simple_loss=0.3964, pruned_loss=0.143, ctc_loss=0.2606, over 19604.00 frames. ], tot_loss[loss=0.3698, simple_loss=0.3727, pruned_loss=0.1331, ctc_loss=0.2518, over 3847724.41 frames. ], batch size: 60, lr: 4.14e-02, grad_scale: 16.0 +2024-08-25 04:17:47,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=21546.666666666668, ans=10.0 +2024-08-25 04:18:03,026 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.64 vs. limit=15.0 +2024-08-25 04:18:33,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=21760.0, ans=0.125 +2024-08-25 04:18:44,957 INFO [train.py:1114] (3/4) Epoch 2, batch 1600, loss[loss=0.3975, simple_loss=0.3924, pruned_loss=0.1471, ctc_loss=0.2708, over 19836.00 frames. ], tot_loss[loss=0.3696, simple_loss=0.3724, pruned_loss=0.133, ctc_loss=0.2519, over 3835939.44 frames. ], batch size: 57, lr: 4.13e-02, grad_scale: 32.0 +2024-08-25 04:18:56,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=21866.666666666668, ans=0.04949747468305833 +2024-08-25 04:19:13,741 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.915e+02 2.370e+02 2.902e+02 3.664e+02 6.938e+02, threshold=5.803e+02, percent-clipped=2.0 +2024-08-25 04:19:42,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22026.666666666668, ans=0.1 +2024-08-25 04:19:42,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=22026.666666666668, ans=0.2 +2024-08-25 04:19:49,396 INFO [train.py:1114] (3/4) Epoch 2, batch 1650, loss[loss=0.3698, simple_loss=0.3857, pruned_loss=0.1282, ctc_loss=0.2438, over 19647.00 frames. ], tot_loss[loss=0.3688, simple_loss=0.3718, pruned_loss=0.1327, ctc_loss=0.2511, over 3832389.91 frames. ], batch size: 59, lr: 4.13e-02, grad_scale: 32.0 +2024-08-25 04:19:57,323 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.32 vs. limit=12.0 +2024-08-25 04:20:02,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=22133.333333333332, ans=0.2 +2024-08-25 04:20:27,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=22240.0, ans=0.125 +2024-08-25 04:20:38,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=22293.333333333332, ans=0.0 +2024-08-25 04:20:46,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=22293.333333333332, ans=0.125 +2024-08-25 04:20:48,535 INFO [train.py:1114] (3/4) Epoch 2, batch 1700, loss[loss=0.3243, simple_loss=0.3327, pruned_loss=0.1124, ctc_loss=0.228, over 19685.00 frames. ], tot_loss[loss=0.3662, simple_loss=0.3704, pruned_loss=0.1313, ctc_loss=0.2486, over 3846786.50 frames. ], batch size: 46, lr: 4.12e-02, grad_scale: 32.0 +2024-08-25 04:21:04,218 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:21:11,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=22400.0, ans=0.125 +2024-08-25 04:21:14,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=22453.333333333332, ans=0.125 +2024-08-25 04:21:16,630 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.820e+02 2.264e+02 2.715e+02 3.253e+02 5.462e+02, threshold=5.430e+02, percent-clipped=0.0 +2024-08-25 04:21:29,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.60 vs. limit=22.5 +2024-08-25 04:21:31,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=22506.666666666668, ans=0.125 +2024-08-25 04:21:40,954 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.04 vs. limit=6.0 +2024-08-25 04:21:48,267 INFO [train.py:1114] (3/4) Epoch 2, batch 1750, loss[loss=0.3623, simple_loss=0.3546, pruned_loss=0.1357, ctc_loss=0.2466, over 19653.00 frames. ], tot_loss[loss=0.3652, simple_loss=0.3697, pruned_loss=0.1309, ctc_loss=0.2475, over 3852038.36 frames. ], batch size: 45, lr: 4.12e-02, grad_scale: 32.0 +2024-08-25 04:21:51,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=22613.333333333332, ans=0.125 +2024-08-25 04:21:58,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=22666.666666666668, ans=0.005942028985507246 +2024-08-25 04:22:12,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=22720.0, ans=0.125 +2024-08-25 04:22:13,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=22720.0, ans=0.125 +2024-08-25 04:22:28,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=22773.333333333332, ans=0.125 +2024-08-25 04:22:28,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=22773.333333333332, ans=0.0 +2024-08-25 04:22:53,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=22826.666666666668, ans=0.0 +2024-08-25 04:23:02,442 INFO [train.py:1114] (3/4) Epoch 2, batch 1800, loss[loss=0.3418, simple_loss=0.3637, pruned_loss=0.1153, ctc_loss=0.2233, over 19625.00 frames. ], tot_loss[loss=0.3646, simple_loss=0.3694, pruned_loss=0.1305, ctc_loss=0.247, over 3853550.70 frames. ], batch size: 55, lr: 4.11e-02, grad_scale: 32.0 +2024-08-25 04:23:08,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=22880.0, ans=0.2 +2024-08-25 04:23:28,005 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.805e+02 2.473e+02 2.913e+02 3.585e+02 6.262e+02, threshold=5.825e+02, percent-clipped=5.0 +2024-08-25 04:23:35,271 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.94 vs. limit=22.5 +2024-08-25 04:23:40,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=23040.0, ans=0.125 +2024-08-25 04:23:48,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=23093.333333333332, ans=0.07 +2024-08-25 04:23:59,517 INFO [train.py:1114] (3/4) Epoch 2, batch 1850, loss[loss=0.3764, simple_loss=0.3855, pruned_loss=0.1322, ctc_loss=0.2575, over 19602.00 frames. ], tot_loss[loss=0.3636, simple_loss=0.3691, pruned_loss=0.1299, ctc_loss=0.2456, over 3856750.56 frames. ], batch size: 57, lr: 4.11e-02, grad_scale: 32.0 +2024-08-25 04:24:01,490 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.32 vs. limit=15.0 +2024-08-25 04:24:03,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23146.666666666668, ans=0.1 +2024-08-25 04:24:15,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=23200.0, ans=0.0 +2024-08-25 04:24:25,577 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=4.67 vs. limit=15.0 +2024-08-25 04:24:41,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=23306.666666666668, ans=0.0 +2024-08-25 04:24:56,425 INFO [train.py:1114] (3/4) Epoch 2, batch 1900, loss[loss=0.347, simple_loss=0.3708, pruned_loss=0.1172, ctc_loss=0.2218, over 19659.00 frames. ], tot_loss[loss=0.3618, simple_loss=0.3685, pruned_loss=0.1288, ctc_loss=0.2436, over 3861920.01 frames. ], batch size: 59, lr: 4.10e-02, grad_scale: 32.0 +2024-08-25 04:24:57,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=23413.333333333332, ans=0.0057797101449275365 +2024-08-25 04:25:07,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=23466.666666666668, ans=0.005768115942028985 +2024-08-25 04:25:12,590 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.65 vs. limit=22.5 +2024-08-25 04:25:14,835 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.61 vs. limit=15.0 +2024-08-25 04:25:16,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=23466.666666666668, ans=0.125 +2024-08-25 04:25:21,313 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.736e+02 2.247e+02 2.781e+02 3.399e+02 7.136e+02, threshold=5.561e+02, percent-clipped=3.0 +2024-08-25 04:25:28,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=23520.0, ans=0.0 +2024-08-25 04:25:55,283 INFO [train.py:1114] (3/4) Epoch 2, batch 1950, loss[loss=0.3432, simple_loss=0.3551, pruned_loss=0.1218, ctc_loss=0.219, over 19583.00 frames. ], tot_loss[loss=0.3615, simple_loss=0.3692, pruned_loss=0.1284, ctc_loss=0.2427, over 3870854.51 frames. ], batch size: 52, lr: 4.09e-02, grad_scale: 32.0 +2024-08-25 04:26:09,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=23733.333333333332, ans=0.005710144927536232 +2024-08-25 04:26:10,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=23733.333333333332, ans=0.125 +2024-08-25 04:26:11,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=23733.333333333332, ans=0.025 +2024-08-25 04:26:24,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=23786.666666666668, ans=0.0 +2024-08-25 04:26:30,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=23840.0, ans=0.125 +2024-08-25 04:26:40,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=23840.0, ans=0.125 +2024-08-25 04:26:46,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=23893.333333333332, ans=0.125 +2024-08-25 04:26:54,475 INFO [train.py:1114] (3/4) Epoch 2, batch 2000, loss[loss=0.2948, simple_loss=0.3168, pruned_loss=0.0991, ctc_loss=0.1867, over 19653.00 frames. ], tot_loss[loss=0.3636, simple_loss=0.3702, pruned_loss=0.1296, ctc_loss=0.2446, over 3855664.61 frames. ], batch size: 45, lr: 4.09e-02, grad_scale: 32.0 +2024-08-25 04:26:59,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=23946.666666666668, ans=0.125 +2024-08-25 04:27:20,444 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.739e+02 2.625e+02 3.128e+02 3.968e+02 6.078e+02, threshold=6.255e+02, percent-clipped=2.0 +2024-08-25 04:27:26,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24053.333333333332, ans=0.1 +2024-08-25 04:27:51,117 INFO [train.py:1114] (3/4) Epoch 2, batch 2050, loss[loss=0.303, simple_loss=0.3207, pruned_loss=0.1046, ctc_loss=0.19, over 19710.00 frames. ], tot_loss[loss=0.3612, simple_loss=0.3682, pruned_loss=0.1286, ctc_loss=0.2427, over 3851469.00 frames. ], batch size: 47, lr: 4.08e-02, grad_scale: 32.0 +2024-08-25 04:28:05,448 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.16 vs. limit=15.0 +2024-08-25 04:28:10,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=24266.666666666668, ans=0.125 +2024-08-25 04:28:13,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=24320.0, ans=0.005582608695652174 +2024-08-25 04:28:14,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=24320.0, ans=0.0 +2024-08-25 04:28:14,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=24320.0, ans=0.125 +2024-08-25 04:28:14,661 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.12 vs. limit=12.0 +2024-08-25 04:28:19,317 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.68 vs. limit=15.0 +2024-08-25 04:28:28,691 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.81 vs. limit=22.5 +2024-08-25 04:28:32,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=24373.333333333332, ans=0.2 +2024-08-25 04:28:47,789 INFO [train.py:1114] (3/4) Epoch 2, batch 2100, loss[loss=0.3409, simple_loss=0.3615, pruned_loss=0.1156, ctc_loss=0.2227, over 19771.00 frames. ], tot_loss[loss=0.3594, simple_loss=0.3669, pruned_loss=0.1278, ctc_loss=0.2411, over 3858121.59 frames. ], batch size: 54, lr: 4.08e-02, grad_scale: 32.0 +2024-08-25 04:28:48,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=24480.0, ans=0.125 +2024-08-25 04:28:57,414 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.91 vs. limit=22.5 +2024-08-25 04:29:04,242 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:29:14,130 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.675e+02 2.311e+02 2.619e+02 3.137e+02 5.086e+02, threshold=5.238e+02, percent-clipped=0.0 +2024-08-25 04:29:14,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24586.666666666668, ans=0.1 +2024-08-25 04:29:28,037 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.28 vs. limit=22.5 +2024-08-25 04:29:32,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=24693.333333333332, ans=0.005501449275362319 +2024-08-25 04:29:43,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=24746.666666666668, ans=0.125 +2024-08-25 04:29:44,331 INFO [train.py:1114] (3/4) Epoch 2, batch 2150, loss[loss=0.3222, simple_loss=0.3447, pruned_loss=0.1073, ctc_loss=0.2124, over 19847.00 frames. ], tot_loss[loss=0.3565, simple_loss=0.365, pruned_loss=0.1263, ctc_loss=0.2381, over 3870257.90 frames. ], batch size: 52, lr: 4.07e-02, grad_scale: 32.0 +2024-08-25 04:29:58,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24800.0, ans=0.1 +2024-08-25 04:30:03,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=24800.0, ans=0.125 +2024-08-25 04:30:04,825 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:30:09,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=24853.333333333332, ans=0.125 +2024-08-25 04:30:10,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=24853.333333333332, ans=0.125 +2024-08-25 04:30:13,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=24853.333333333332, ans=0.125 +2024-08-25 04:30:24,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=24906.666666666668, ans=0.0 +2024-08-25 04:30:32,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24960.0, ans=0.1 +2024-08-25 04:30:35,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=24960.0, ans=0.07 +2024-08-25 04:30:40,040 INFO [train.py:1114] (3/4) Epoch 2, batch 2200, loss[loss=0.3701, simple_loss=0.3771, pruned_loss=0.1312, ctc_loss=0.2521, over 19587.00 frames. ], tot_loss[loss=0.3554, simple_loss=0.3646, pruned_loss=0.1257, ctc_loss=0.237, over 3869038.25 frames. ], batch size: 57, lr: 4.06e-02, grad_scale: 32.0 +2024-08-25 04:30:40,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=25013.333333333332, ans=0.125 +2024-08-25 04:30:46,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=25013.333333333332, ans=0.0 +2024-08-25 04:30:55,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=25066.666666666668, ans=0.125 +2024-08-25 04:31:06,340 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.751e+02 2.398e+02 2.814e+02 3.505e+02 8.042e+02, threshold=5.628e+02, percent-clipped=3.0 +2024-08-25 04:31:17,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=25173.333333333332, ans=0.125 +2024-08-25 04:31:21,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=25173.333333333332, ans=0.95 +2024-08-25 04:31:23,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=25173.333333333332, ans=0.125 +2024-08-25 04:31:27,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25226.666666666668, ans=0.1 +2024-08-25 04:31:31,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=25226.666666666668, ans=0.0 +2024-08-25 04:31:37,462 INFO [train.py:1114] (3/4) Epoch 2, batch 2250, loss[loss=0.3619, simple_loss=0.3826, pruned_loss=0.1227, ctc_loss=0.2394, over 19606.00 frames. ], tot_loss[loss=0.3557, simple_loss=0.3648, pruned_loss=0.1258, ctc_loss=0.2375, over 3868519.86 frames. ], batch size: 55, lr: 4.06e-02, grad_scale: 32.0 +2024-08-25 04:31:42,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25280.0, ans=0.1 +2024-08-25 04:31:49,078 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.74 vs. limit=15.0 +2024-08-25 04:31:57,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=25333.333333333332, ans=0.0 +2024-08-25 04:32:02,798 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.33 vs. limit=6.0 +2024-08-25 04:32:08,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=25386.666666666668, ans=0.125 +2024-08-25 04:32:16,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=25440.0, ans=0.0053391304347826084 +2024-08-25 04:32:21,529 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:32:29,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=25493.333333333332, ans=0.0 +2024-08-25 04:32:30,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=25493.333333333332, ans=0.2 +2024-08-25 04:32:33,457 INFO [train.py:1114] (3/4) Epoch 2, batch 2300, loss[loss=0.3435, simple_loss=0.3516, pruned_loss=0.1224, ctc_loss=0.2268, over 19513.00 frames. ], tot_loss[loss=0.3553, simple_loss=0.3639, pruned_loss=0.1259, ctc_loss=0.2371, over 3862000.82 frames. ], batch size: 49, lr: 4.05e-02, grad_scale: 16.0 +2024-08-25 04:32:45,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=25600.0, ans=0.025 +2024-08-25 04:32:47,527 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.65 vs. limit=5.0 +2024-08-25 04:32:51,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=25600.0, ans=0.04949747468305833 +2024-08-25 04:32:55,968 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.80 vs. limit=15.0 +2024-08-25 04:33:03,045 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.730e+02 2.317e+02 2.709e+02 3.466e+02 6.027e+02, threshold=5.417e+02, percent-clipped=4.0 +2024-08-25 04:33:04,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=25653.333333333332, ans=0.0 +2024-08-25 04:33:04,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=25653.333333333332, ans=0.125 +2024-08-25 04:33:17,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=25706.666666666668, ans=0.04949747468305833 +2024-08-25 04:33:20,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=25760.0, ans=0.05 +2024-08-25 04:33:27,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=25760.0, ans=0.125 +2024-08-25 04:33:32,438 INFO [train.py:1114] (3/4) Epoch 2, batch 2350, loss[loss=0.3448, simple_loss=0.3661, pruned_loss=0.117, ctc_loss=0.2237, over 19655.00 frames. ], tot_loss[loss=0.3551, simple_loss=0.3636, pruned_loss=0.1259, ctc_loss=0.2369, over 3864149.94 frames. ], batch size: 63, lr: 4.04e-02, grad_scale: 16.0 +2024-08-25 04:33:34,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=25813.333333333332, ans=0.2 +2024-08-25 04:33:46,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=25866.666666666668, ans=0.1 +2024-08-25 04:34:11,562 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.58 vs. limit=15.0 +2024-08-25 04:34:25,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=26026.666666666668, ans=0.1 +2024-08-25 04:34:25,491 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.19 vs. limit=10.0 +2024-08-25 04:34:30,678 INFO [train.py:1114] (3/4) Epoch 2, batch 2400, loss[loss=0.3553, simple_loss=0.3678, pruned_loss=0.1233, ctc_loss=0.2405, over 19243.00 frames. ], tot_loss[loss=0.3588, simple_loss=0.3668, pruned_loss=0.1275, ctc_loss=0.2396, over 3858949.39 frames. ], batch size: 71, lr: 4.04e-02, grad_scale: 32.0 +2024-08-25 04:34:36,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=26080.0, ans=0.125 +2024-08-25 04:34:40,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26133.333333333332, ans=0.1 +2024-08-25 04:34:57,158 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.789e+02 2.184e+02 2.505e+02 3.102e+02 8.045e+02, threshold=5.010e+02, percent-clipped=5.0 +2024-08-25 04:34:59,049 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.66 vs. limit=22.5 +2024-08-25 04:34:59,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=26186.666666666668, ans=0.125 +2024-08-25 04:35:00,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=26186.666666666668, ans=0.2 +2024-08-25 04:35:06,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=26240.0, ans=0.125 +2024-08-25 04:35:27,216 INFO [train.py:1114] (3/4) Epoch 2, batch 2450, loss[loss=0.4348, simple_loss=0.4045, pruned_loss=0.1706, ctc_loss=0.3098, over 13311.00 frames. ], tot_loss[loss=0.3686, simple_loss=0.3722, pruned_loss=0.1327, ctc_loss=0.2491, over 3728997.81 frames. ], batch size: 140, lr: 4.03e-02, grad_scale: 32.0 +2024-08-25 04:35:51,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=26453.333333333332, ans=0.125 +2024-08-25 04:36:50,859 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.83 vs. limit=6.0 +2024-08-25 04:36:55,752 INFO [train.py:1114] (3/4) Epoch 3, batch 0, loss[loss=0.3412, simple_loss=0.3479, pruned_loss=0.1227, ctc_loss=0.2227, over 19796.00 frames. ], tot_loss[loss=0.3412, simple_loss=0.3479, pruned_loss=0.1227, ctc_loss=0.2227, over 19796.00 frames. ], batch size: 49, lr: 3.83e-02, grad_scale: 32.0 +2024-08-25 04:36:55,753 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-25 04:37:08,021 INFO [train.py:1146] (3/4) Epoch 3, validation: loss=0.2847, simple_loss=0.3461, pruned_loss=0.08168, ctc_loss=0.1499, over 944034.00 frames. +2024-08-25 04:37:08,022 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 14072MB +2024-08-25 04:37:28,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=26608.0, ans=0.125 +2024-08-25 04:37:45,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=26714.666666666668, ans=0.125 +2024-08-25 04:37:50,807 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.817e+02 2.252e+02 2.580e+02 3.143e+02 6.401e+02, threshold=5.159e+02, percent-clipped=2.0 +2024-08-25 04:38:10,078 INFO [train.py:1114] (3/4) Epoch 3, batch 50, loss[loss=0.3261, simple_loss=0.3443, pruned_loss=0.1111, ctc_loss=0.2142, over 19705.00 frames. ], tot_loss[loss=0.3662, simple_loss=0.3716, pruned_loss=0.1309, ctc_loss=0.2474, over 844545.25 frames. ], batch size: 47, lr: 3.82e-02, grad_scale: 16.0 +2024-08-25 04:38:45,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=26874.666666666668, ans=0.125 +2024-08-25 04:38:48,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=26928.0, ans=0.125 +2024-08-25 04:38:51,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=26928.0, ans=0.125 +2024-08-25 04:39:07,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=26981.333333333332, ans=0.0 +2024-08-25 04:39:14,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=26981.333333333332, ans=0.2 +2024-08-25 04:39:28,423 INFO [train.py:1114] (3/4) Epoch 3, batch 100, loss[loss=0.3169, simple_loss=0.3371, pruned_loss=0.108, ctc_loss=0.2016, over 19696.00 frames. ], tot_loss[loss=0.3653, simple_loss=0.372, pruned_loss=0.1302, ctc_loss=0.2453, over 1498666.18 frames. ], batch size: 51, lr: 3.82e-02, grad_scale: 16.0 +2024-08-25 04:39:40,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=27141.333333333332, ans=0.5 +2024-08-25 04:39:46,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=27141.333333333332, ans=0.04949747468305833 +2024-08-25 04:39:54,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=27194.666666666668, ans=0.125 +2024-08-25 04:40:11,091 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.865e+02 2.221e+02 2.583e+02 3.158e+02 4.904e+02, threshold=5.165e+02, percent-clipped=0.0 +2024-08-25 04:40:27,485 INFO [train.py:1114] (3/4) Epoch 3, batch 150, loss[loss=0.3696, simple_loss=0.3601, pruned_loss=0.1381, ctc_loss=0.2568, over 19710.00 frames. ], tot_loss[loss=0.3562, simple_loss=0.3654, pruned_loss=0.1261, ctc_loss=0.237, over 2028092.68 frames. ], batch size: 47, lr: 3.81e-02, grad_scale: 16.0 +2024-08-25 04:40:35,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=27354.666666666668, ans=0.0 +2024-08-25 04:40:57,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=27461.333333333332, ans=0.125 +2024-08-25 04:41:07,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=27514.666666666668, ans=0.004888115942028985 +2024-08-25 04:41:23,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=27568.0, ans=0.125 +2024-08-25 04:41:29,363 INFO [train.py:1114] (3/4) Epoch 3, batch 200, loss[loss=0.3931, simple_loss=0.3906, pruned_loss=0.1445, ctc_loss=0.2664, over 18470.00 frames. ], tot_loss[loss=0.3517, simple_loss=0.3623, pruned_loss=0.124, ctc_loss=0.233, over 2435922.79 frames. ], batch size: 85, lr: 3.80e-02, grad_scale: 16.0 +2024-08-25 04:41:33,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=27621.333333333332, ans=0.2 +2024-08-25 04:42:06,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27781.333333333332, ans=0.1 +2024-08-25 04:42:14,170 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.479e+02 2.192e+02 2.550e+02 3.125e+02 5.269e+02, threshold=5.099e+02, percent-clipped=1.0 +2024-08-25 04:42:33,259 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.54 vs. limit=10.0 +2024-08-25 04:42:35,062 INFO [train.py:1114] (3/4) Epoch 3, batch 250, loss[loss=0.3965, simple_loss=0.3988, pruned_loss=0.1444, ctc_loss=0.2633, over 19425.00 frames. ], tot_loss[loss=0.3504, simple_loss=0.3617, pruned_loss=0.1233, ctc_loss=0.2314, over 2756123.64 frames. ], batch size: 67, lr: 3.80e-02, grad_scale: 16.0 +2024-08-25 04:42:59,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=27994.666666666668, ans=0.0 +2024-08-25 04:43:00,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=27994.666666666668, ans=0.125 +2024-08-25 04:43:02,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=27994.666666666668, ans=0.125 +2024-08-25 04:43:18,846 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=4.14 vs. limit=15.0 +2024-08-25 04:43:33,531 INFO [train.py:1114] (3/4) Epoch 3, batch 300, loss[loss=0.4106, simple_loss=0.4041, pruned_loss=0.1545, ctc_loss=0.2703, over 19505.00 frames. ], tot_loss[loss=0.3477, simple_loss=0.3599, pruned_loss=0.122, ctc_loss=0.2287, over 3001487.37 frames. ], batch size: 61, lr: 3.79e-02, grad_scale: 16.0 +2024-08-25 04:43:45,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=28208.0, ans=0.0 +2024-08-25 04:44:05,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=28261.333333333332, ans=0.125 +2024-08-25 04:44:18,923 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.708e+02 2.242e+02 2.624e+02 3.299e+02 5.169e+02, threshold=5.248e+02, percent-clipped=1.0 +2024-08-25 04:44:21,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=28314.666666666668, ans=0.125 +2024-08-25 04:44:21,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=28314.666666666668, ans=0.125 +2024-08-25 04:44:36,139 INFO [train.py:1114] (3/4) Epoch 3, batch 350, loss[loss=0.3032, simple_loss=0.3204, pruned_loss=0.1047, ctc_loss=0.1915, over 19746.00 frames. ], tot_loss[loss=0.3463, simple_loss=0.3594, pruned_loss=0.1211, ctc_loss=0.2274, over 3191503.78 frames. ], batch size: 48, lr: 3.79e-02, grad_scale: 16.0 +2024-08-25 04:44:49,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=28474.666666666668, ans=0.125 +2024-08-25 04:44:50,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=28474.666666666668, ans=0.2 +2024-08-25 04:45:00,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=28528.0, ans=0.1 +2024-08-25 04:45:36,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=28581.333333333332, ans=0.125 +2024-08-25 04:45:52,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=28634.666666666668, ans=0.00464463768115942 +2024-08-25 04:46:41,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=28634.666666666668, ans=0.95 +2024-08-25 04:46:55,918 INFO [train.py:1114] (3/4) Epoch 3, batch 400, loss[loss=0.3328, simple_loss=0.3601, pruned_loss=0.111, ctc_loss=0.2091, over 19500.00 frames. ], tot_loss[loss=0.3465, simple_loss=0.3595, pruned_loss=0.1211, ctc_loss=0.228, over 3341862.26 frames. ], batch size: 54, lr: 3.78e-02, grad_scale: 32.0 +2024-08-25 04:47:59,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=28848.0, ans=0.125 +2024-08-25 04:48:22,798 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.711e+02 2.232e+02 2.568e+02 3.025e+02 1.134e+03, threshold=5.136e+02, percent-clipped=4.0 +2024-08-25 04:48:27,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=28848.0, ans=0.0 +2024-08-25 04:48:47,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=28954.666666666668, ans=0.125 +2024-08-25 04:48:48,325 INFO [train.py:1114] (3/4) Epoch 3, batch 450, loss[loss=0.3278, simple_loss=0.3502, pruned_loss=0.1095, ctc_loss=0.2157, over 19607.00 frames. ], tot_loss[loss=0.3462, simple_loss=0.3595, pruned_loss=0.1209, ctc_loss=0.2275, over 3449635.25 frames. ], batch size: 55, lr: 3.77e-02, grad_scale: 32.0 +2024-08-25 04:49:04,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=29008.0, ans=0.004563478260869565 +2024-08-25 04:49:07,113 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:49:08,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=29008.0, ans=0.125 +2024-08-25 04:49:12,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=29061.333333333332, ans=0.125 +2024-08-25 04:49:22,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29061.333333333332, ans=0.1 +2024-08-25 04:49:35,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=29114.666666666668, ans=0.0 +2024-08-25 04:50:09,364 INFO [train.py:1114] (3/4) Epoch 3, batch 500, loss[loss=0.3872, simple_loss=0.3863, pruned_loss=0.1406, ctc_loss=0.2674, over 19696.00 frames. ], tot_loss[loss=0.3455, simple_loss=0.3585, pruned_loss=0.1209, ctc_loss=0.227, over 3545514.73 frames. ], batch size: 63, lr: 3.77e-02, grad_scale: 32.0 +2024-08-25 04:50:12,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=29221.333333333332, ans=0.125 +2024-08-25 04:50:56,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=29328.0, ans=0.1 +2024-08-25 04:51:09,139 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.756e+02 2.370e+02 2.734e+02 3.745e+02 5.336e+02, threshold=5.469e+02, percent-clipped=1.0 +2024-08-25 04:51:20,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=29434.666666666668, ans=0.0 +2024-08-25 04:51:28,436 INFO [train.py:1114] (3/4) Epoch 3, batch 550, loss[loss=0.3999, simple_loss=0.3967, pruned_loss=0.1447, ctc_loss=0.2838, over 19297.00 frames. ], tot_loss[loss=0.3442, simple_loss=0.3579, pruned_loss=0.1201, ctc_loss=0.2256, over 3608395.42 frames. ], batch size: 71, lr: 3.76e-02, grad_scale: 32.0 +2024-08-25 04:51:42,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=29541.333333333332, ans=0.125 +2024-08-25 04:51:47,941 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.05 vs. limit=22.5 +2024-08-25 04:51:50,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=29594.666666666668, ans=0.125 +2024-08-25 04:51:55,653 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.07 vs. limit=15.0 +2024-08-25 04:52:54,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=29701.333333333332, ans=0.125 +2024-08-25 04:53:06,025 INFO [train.py:1114] (3/4) Epoch 3, batch 600, loss[loss=0.3771, simple_loss=0.3835, pruned_loss=0.1362, ctc_loss=0.2457, over 19344.00 frames. ], tot_loss[loss=0.3418, simple_loss=0.3566, pruned_loss=0.1189, ctc_loss=0.2231, over 3665281.45 frames. ], batch size: 67, lr: 3.76e-02, grad_scale: 32.0 +2024-08-25 04:53:06,692 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.14 vs. limit=15.0 +2024-08-25 04:53:12,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=29754.666666666668, ans=0.004401159420289855 +2024-08-25 04:53:13,783 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.49 vs. limit=15.0 +2024-08-25 04:53:17,340 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.25 vs. limit=15.0 +2024-08-25 04:53:24,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=29808.0, ans=0.5 +2024-08-25 04:53:45,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=29914.666666666668, ans=0.125 +2024-08-25 04:53:49,306 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 2.141e+02 2.536e+02 3.031e+02 6.622e+02, threshold=5.071e+02, percent-clipped=2.0 +2024-08-25 04:53:59,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=29968.0, ans=0.125 +2024-08-25 04:54:00,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=29968.0, ans=0.004354782608695653 +2024-08-25 04:54:05,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30021.333333333332, ans=0.1 +2024-08-25 04:54:06,081 INFO [train.py:1114] (3/4) Epoch 3, batch 650, loss[loss=0.3295, simple_loss=0.3517, pruned_loss=0.1113, ctc_loss=0.212, over 19747.00 frames. ], tot_loss[loss=0.3409, simple_loss=0.3558, pruned_loss=0.1185, ctc_loss=0.2225, over 3715143.58 frames. ], batch size: 54, lr: 3.75e-02, grad_scale: 32.0 +2024-08-25 04:54:13,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=30021.333333333332, ans=0.1 +2024-08-25 04:54:15,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=30021.333333333332, ans=0.09899494936611666 +2024-08-25 04:54:19,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=30074.666666666668, ans=0.125 +2024-08-25 04:54:34,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=30074.666666666668, ans=0.125 +2024-08-25 04:54:45,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=30128.0, ans=0.1 +2024-08-25 04:54:51,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=30128.0, ans=0.0 +2024-08-25 04:54:51,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=30128.0, ans=0.125 +2024-08-25 04:54:51,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=30128.0, ans=0.125 +2024-08-25 04:54:57,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=30181.333333333332, ans=0.125 +2024-08-25 04:55:11,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=30234.666666666668, ans=0.1 +2024-08-25 04:55:19,017 INFO [train.py:1114] (3/4) Epoch 3, batch 700, loss[loss=0.344, simple_loss=0.3571, pruned_loss=0.1205, ctc_loss=0.2251, over 19722.00 frames. ], tot_loss[loss=0.3424, simple_loss=0.3569, pruned_loss=0.1192, ctc_loss=0.2237, over 3747185.94 frames. ], batch size: 51, lr: 3.74e-02, grad_scale: 32.0 +2024-08-25 04:55:20,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=30288.0, ans=0.1 +2024-08-25 04:55:22,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=30288.0, ans=0.125 +2024-08-25 04:55:23,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=30288.0, ans=0.0 +2024-08-25 04:55:44,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=30394.666666666668, ans=0.025 +2024-08-25 04:55:45,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=30394.666666666668, ans=0.2 +2024-08-25 04:56:38,924 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.716e+02 2.292e+02 2.520e+02 3.192e+02 5.203e+02, threshold=5.040e+02, percent-clipped=1.0 +2024-08-25 04:56:56,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=30554.666666666668, ans=0.125 +2024-08-25 04:56:57,173 INFO [train.py:1114] (3/4) Epoch 3, batch 750, loss[loss=0.3526, simple_loss=0.3711, pruned_loss=0.1221, ctc_loss=0.2243, over 19504.00 frames. ], tot_loss[loss=0.3409, simple_loss=0.3558, pruned_loss=0.1185, ctc_loss=0.2225, over 3774363.66 frames. ], batch size: 54, lr: 3.74e-02, grad_scale: 32.0 +2024-08-25 04:56:57,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=30554.666666666668, ans=0.125 +2024-08-25 04:56:59,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=30554.666666666668, ans=0.125 +2024-08-25 04:57:11,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=30608.0, ans=0.125 +2024-08-25 04:57:37,567 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.69 vs. limit=22.5 +2024-08-25 04:57:59,145 INFO [train.py:1114] (3/4) Epoch 3, batch 800, loss[loss=0.3183, simple_loss=0.3336, pruned_loss=0.1106, ctc_loss=0.2047, over 19416.00 frames. ], tot_loss[loss=0.3398, simple_loss=0.3552, pruned_loss=0.1179, ctc_loss=0.2213, over 3794555.81 frames. ], batch size: 48, lr: 3.73e-02, grad_scale: 32.0 +2024-08-25 04:58:23,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=30928.0, ans=0.025 +2024-08-25 04:58:37,593 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.75 vs. limit=10.0 +2024-08-25 04:58:42,758 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.650e+02 2.211e+02 2.622e+02 3.205e+02 5.257e+02, threshold=5.244e+02, percent-clipped=1.0 +2024-08-25 04:58:46,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=31034.666666666668, ans=0.2 +2024-08-25 04:58:54,236 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.98 vs. limit=22.5 +2024-08-25 04:58:59,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=31034.666666666668, ans=0.0 +2024-08-25 04:58:59,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=31034.666666666668, ans=0.2 +2024-08-25 04:59:01,979 INFO [train.py:1114] (3/4) Epoch 3, batch 850, loss[loss=0.3491, simple_loss=0.3694, pruned_loss=0.1185, ctc_loss=0.2299, over 19669.00 frames. ], tot_loss[loss=0.3402, simple_loss=0.3557, pruned_loss=0.1181, ctc_loss=0.2217, over 3814526.82 frames. ], batch size: 59, lr: 3.73e-02, grad_scale: 32.0 +2024-08-25 04:59:11,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=31088.0, ans=0.125 +2024-08-25 04:59:25,659 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.64 vs. limit=5.0 +2024-08-25 04:59:37,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=31248.0, ans=0.125 +2024-08-25 04:59:43,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=31248.0, ans=0.0 +2024-08-25 04:59:57,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31301.333333333332, ans=0.1 +2024-08-25 05:00:02,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=31301.333333333332, ans=0.125 +2024-08-25 05:00:04,399 INFO [train.py:1114] (3/4) Epoch 3, batch 900, loss[loss=0.3031, simple_loss=0.3222, pruned_loss=0.1033, ctc_loss=0.1937, over 19806.00 frames. ], tot_loss[loss=0.3417, simple_loss=0.3563, pruned_loss=0.1189, ctc_loss=0.2232, over 3817708.33 frames. ], batch size: 49, lr: 3.72e-02, grad_scale: 8.0 +2024-08-25 05:00:26,338 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.98 vs. limit=15.0 +2024-08-25 05:00:27,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=31408.0, ans=0.2 +2024-08-25 05:00:40,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=31461.333333333332, ans=0.125 +2024-08-25 05:00:47,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=31514.666666666668, ans=0.125 +2024-08-25 05:00:54,420 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.711e+02 2.296e+02 2.736e+02 3.525e+02 1.528e+03, threshold=5.472e+02, percent-clipped=4.0 +2024-08-25 05:01:06,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=31568.0, ans=0.125 +2024-08-25 05:01:08,275 INFO [train.py:1114] (3/4) Epoch 3, batch 950, loss[loss=0.2841, simple_loss=0.3124, pruned_loss=0.0925, ctc_loss=0.1767, over 19519.00 frames. ], tot_loss[loss=0.3412, simple_loss=0.3561, pruned_loss=0.1186, ctc_loss=0.2228, over 3820935.01 frames. ], batch size: 49, lr: 3.71e-02, grad_scale: 8.0 +2024-08-25 05:01:09,882 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.48 vs. limit=22.5 +2024-08-25 05:01:10,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=31621.333333333332, ans=0.125 +2024-08-25 05:01:13,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=31621.333333333332, ans=0.125 +2024-08-25 05:01:14,801 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.28 vs. limit=22.5 +2024-08-25 05:01:19,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=31674.666666666668, ans=0.003983768115942029 +2024-08-25 05:01:52,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31781.333333333332, ans=0.1 +2024-08-25 05:01:52,315 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.33 vs. limit=10.0 +2024-08-25 05:01:53,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=31781.333333333332, ans=0.015 +2024-08-25 05:01:55,739 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=31834.666666666668, ans=0.0 +2024-08-25 05:02:07,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=31888.0, ans=0.2 +2024-08-25 05:02:08,113 INFO [train.py:1114] (3/4) Epoch 3, batch 1000, loss[loss=0.3313, simple_loss=0.3407, pruned_loss=0.1182, ctc_loss=0.2139, over 19866.00 frames. ], tot_loss[loss=0.343, simple_loss=0.3572, pruned_loss=0.1195, ctc_loss=0.2244, over 3817290.58 frames. ], batch size: 52, lr: 3.71e-02, grad_scale: 8.0 +2024-08-25 05:02:13,693 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.47 vs. limit=22.5 +2024-08-25 05:02:30,301 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.42 vs. limit=15.0 +2024-08-25 05:02:34,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31994.666666666668, ans=0.1 +2024-08-25 05:02:41,385 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.25 vs. limit=22.5 +2024-08-25 05:02:56,471 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.586e+02 2.163e+02 2.492e+02 3.027e+02 5.724e+02, threshold=4.983e+02, percent-clipped=1.0 +2024-08-25 05:03:06,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=32101.333333333332, ans=0.125 +2024-08-25 05:03:13,725 INFO [train.py:1114] (3/4) Epoch 3, batch 1050, loss[loss=0.3691, simple_loss=0.3704, pruned_loss=0.1356, ctc_loss=0.2415, over 19833.00 frames. ], tot_loss[loss=0.3424, simple_loss=0.3566, pruned_loss=0.1193, ctc_loss=0.2241, over 3823057.75 frames. ], batch size: 57, lr: 3.70e-02, grad_scale: 8.0 +2024-08-25 05:03:14,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32154.666666666668, ans=0.1 +2024-08-25 05:03:23,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=32154.666666666668, ans=0.2 +2024-08-25 05:04:02,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=32208.0, ans=0.0 +2024-08-25 05:04:14,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=32261.333333333332, ans=0.0 +2024-08-25 05:05:02,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=32368.0, ans=0.125 +2024-08-25 05:05:04,405 INFO [train.py:1114] (3/4) Epoch 3, batch 1100, loss[loss=0.3154, simple_loss=0.3372, pruned_loss=0.1058, ctc_loss=0.2053, over 19594.00 frames. ], tot_loss[loss=0.3407, simple_loss=0.3558, pruned_loss=0.1183, ctc_loss=0.2226, over 3830355.15 frames. ], batch size: 52, lr: 3.70e-02, grad_scale: 8.0 +2024-08-25 05:06:00,567 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.823e+02 2.355e+02 2.517e+02 3.019e+02 4.945e+02, threshold=5.033e+02, percent-clipped=0.0 +2024-08-25 05:06:03,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=32634.666666666668, ans=0.1 +2024-08-25 05:06:23,030 INFO [train.py:1114] (3/4) Epoch 3, batch 1150, loss[loss=0.3148, simple_loss=0.3414, pruned_loss=0.105, ctc_loss=0.1958, over 19570.00 frames. ], tot_loss[loss=0.3405, simple_loss=0.3554, pruned_loss=0.1183, ctc_loss=0.2223, over 3829999.35 frames. ], batch size: 52, lr: 3.69e-02, grad_scale: 8.0 +2024-08-25 05:06:53,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=32794.666666666664, ans=0.125 +2024-08-25 05:07:07,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=32848.0, ans=0.0037286956521739127 +2024-08-25 05:07:11,667 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=12.0 +2024-08-25 05:07:32,065 INFO [train.py:1114] (3/4) Epoch 3, batch 1200, loss[loss=0.3661, simple_loss=0.3807, pruned_loss=0.1275, ctc_loss=0.2411, over 19848.00 frames. ], tot_loss[loss=0.3409, simple_loss=0.3559, pruned_loss=0.1184, ctc_loss=0.2226, over 3825593.25 frames. ], batch size: 57, lr: 3.68e-02, grad_scale: 16.0 +2024-08-25 05:07:32,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=32954.666666666664, ans=0.0037055072463768124 +2024-08-25 05:07:34,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=32954.666666666664, ans=0.0 +2024-08-25 05:07:50,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=33008.0, ans=0.04949747468305833 +2024-08-25 05:08:01,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=33061.333333333336, ans=0.125 +2024-08-25 05:08:19,682 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.731e+02 2.128e+02 2.359e+02 2.757e+02 6.653e+02, threshold=4.718e+02, percent-clipped=2.0 +2024-08-25 05:08:22,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=33168.0, ans=0.125 +2024-08-25 05:08:30,374 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.19 vs. limit=15.0 +2024-08-25 05:08:35,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=33168.0, ans=0.2 +2024-08-25 05:08:38,022 INFO [train.py:1114] (3/4) Epoch 3, batch 1250, loss[loss=0.3761, simple_loss=0.3821, pruned_loss=0.1361, ctc_loss=0.2444, over 19540.00 frames. ], tot_loss[loss=0.3403, simple_loss=0.3563, pruned_loss=0.1178, ctc_loss=0.2216, over 3843355.26 frames. ], batch size: 61, lr: 3.68e-02, grad_scale: 16.0 +2024-08-25 05:09:15,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=33328.0, ans=0.2 +2024-08-25 05:09:29,754 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.78 vs. limit=15.0 +2024-08-25 05:09:36,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=33434.666666666664, ans=0.125 +2024-08-25 05:09:36,718 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.09 vs. limit=10.0 +2024-08-25 05:09:42,106 INFO [train.py:1114] (3/4) Epoch 3, batch 1300, loss[loss=0.4156, simple_loss=0.4035, pruned_loss=0.1568, ctc_loss=0.285, over 18785.00 frames. ], tot_loss[loss=0.3389, simple_loss=0.355, pruned_loss=0.1172, ctc_loss=0.2205, over 3846466.03 frames. ], batch size: 76, lr: 3.67e-02, grad_scale: 16.0 +2024-08-25 05:09:44,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=33488.0, ans=0.035 +2024-08-25 05:09:49,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=33488.0, ans=0.125 +2024-08-25 05:09:58,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=33541.333333333336, ans=0.025 +2024-08-25 05:10:03,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=33541.333333333336, ans=0.0 +2024-08-25 05:10:04,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=33594.666666666664, ans=0.003566376811594203 +2024-08-25 05:10:05,060 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=21.30 vs. limit=22.5 +2024-08-25 05:10:29,945 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.58 vs. limit=10.0 +2024-08-25 05:10:30,082 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.16 vs. limit=10.0 +2024-08-25 05:10:34,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=33594.666666666664, ans=0.0 +2024-08-25 05:10:36,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=33594.666666666664, ans=0.09899494936611666 +2024-08-25 05:10:39,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=33648.0, ans=0.015 +2024-08-25 05:10:41,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=33648.0, ans=0.0035547826086956523 +2024-08-25 05:10:48,148 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.613e+02 2.161e+02 2.525e+02 2.896e+02 5.464e+02, threshold=5.050e+02, percent-clipped=3.0 +2024-08-25 05:11:02,303 INFO [train.py:1114] (3/4) Epoch 3, batch 1350, loss[loss=0.2848, simple_loss=0.3271, pruned_loss=0.08652, ctc_loss=0.1739, over 19785.00 frames. ], tot_loss[loss=0.3372, simple_loss=0.3538, pruned_loss=0.1164, ctc_loss=0.2191, over 3858148.22 frames. ], batch size: 54, lr: 3.67e-02, grad_scale: 16.0 +2024-08-25 05:11:10,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=33754.666666666664, ans=0.125 +2024-08-25 05:11:18,988 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.72 vs. limit=6.0 +2024-08-25 05:11:40,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=33808.0, ans=0.0 +2024-08-25 05:11:48,311 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.68 vs. limit=22.5 +2024-08-25 05:11:56,188 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=33861.333333333336, ans=0.125 +2024-08-25 05:11:57,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=33861.333333333336, ans=0.025 +2024-08-25 05:12:02,733 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.99 vs. limit=15.0 +2024-08-25 05:12:06,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=33914.666666666664, ans=0.05 +2024-08-25 05:12:20,994 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.15 vs. limit=15.0 +2024-08-25 05:12:26,313 INFO [train.py:1114] (3/4) Epoch 3, batch 1400, loss[loss=0.3144, simple_loss=0.3285, pruned_loss=0.1088, ctc_loss=0.207, over 19685.00 frames. ], tot_loss[loss=0.3358, simple_loss=0.3532, pruned_loss=0.1157, ctc_loss=0.2176, over 3865726.13 frames. ], batch size: 46, lr: 3.66e-02, grad_scale: 16.0 +2024-08-25 05:12:30,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=34021.333333333336, ans=0.125 +2024-08-25 05:13:31,980 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 2.210e+02 2.531e+02 3.096e+02 9.067e+02, threshold=5.062e+02, percent-clipped=2.0 +2024-08-25 05:13:32,709 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.37 vs. limit=12.0 +2024-08-25 05:14:14,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34234.666666666664, ans=0.1 +2024-08-25 05:14:21,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=34234.666666666664, ans=0.125 +2024-08-25 05:14:24,485 INFO [train.py:1114] (3/4) Epoch 3, batch 1450, loss[loss=0.3823, simple_loss=0.3842, pruned_loss=0.1387, ctc_loss=0.2577, over 19664.00 frames. ], tot_loss[loss=0.3372, simple_loss=0.3543, pruned_loss=0.1163, ctc_loss=0.2184, over 3863914.65 frames. ], batch size: 63, lr: 3.65e-02, grad_scale: 16.0 +2024-08-25 05:14:47,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=34341.333333333336, ans=0.5 +2024-08-25 05:14:49,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=34341.333333333336, ans=0.125 +2024-08-25 05:15:15,780 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.03 vs. limit=15.0 +2024-08-25 05:15:32,910 INFO [train.py:1114] (3/4) Epoch 3, batch 1500, loss[loss=0.3522, simple_loss=0.3639, pruned_loss=0.1236, ctc_loss=0.2331, over 19574.00 frames. ], tot_loss[loss=0.3376, simple_loss=0.3548, pruned_loss=0.1165, ctc_loss=0.2189, over 3864371.48 frames. ], batch size: 57, lr: 3.65e-02, grad_scale: 16.0 +2024-08-25 05:15:54,230 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.75 vs. limit=15.0 +2024-08-25 05:16:38,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=34661.333333333336, ans=0.125 +2024-08-25 05:16:38,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=34661.333333333336, ans=0.125 +2024-08-25 05:16:51,231 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.550e+02 2.151e+02 2.498e+02 3.151e+02 6.810e+02, threshold=4.996e+02, percent-clipped=2.0 +2024-08-25 05:19:31,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=34768.0, ans=0.125 +2024-08-25 05:20:00,632 INFO [train.py:1114] (3/4) Epoch 3, batch 1550, loss[loss=0.3347, simple_loss=0.3613, pruned_loss=0.1129, ctc_loss=0.2057, over 19610.00 frames. ], tot_loss[loss=0.3376, simple_loss=0.3546, pruned_loss=0.1165, ctc_loss=0.2189, over 3849402.66 frames. ], batch size: 60, lr: 3.64e-02, grad_scale: 16.0 +2024-08-25 05:20:19,237 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=14.81 vs. limit=15.0 +2024-08-25 05:20:19,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=34874.666666666664, ans=0.0 +2024-08-25 05:20:20,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=34874.666666666664, ans=0.125 +2024-08-25 05:20:32,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=34928.0, ans=0.125 +2024-08-25 05:21:29,593 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.11 vs. limit=22.5 +2024-08-25 05:22:04,968 INFO [train.py:1114] (3/4) Epoch 3, batch 1600, loss[loss=0.3409, simple_loss=0.3601, pruned_loss=0.1173, ctc_loss=0.218, over 19838.00 frames. ], tot_loss[loss=0.338, simple_loss=0.3545, pruned_loss=0.1169, ctc_loss=0.2194, over 3838381.03 frames. ], batch size: 57, lr: 3.64e-02, grad_scale: 32.0 +2024-08-25 05:22:12,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=35088.0, ans=0.125 +2024-08-25 05:22:57,555 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.61 vs. limit=15.0 +2024-08-25 05:23:43,086 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.751e+02 2.193e+02 2.529e+02 3.233e+02 6.645e+02, threshold=5.059e+02, percent-clipped=2.0 +2024-08-25 05:23:43,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=35248.0, ans=0.125 +2024-08-25 05:24:22,997 INFO [train.py:1114] (3/4) Epoch 3, batch 1650, loss[loss=0.2871, simple_loss=0.3312, pruned_loss=0.08768, ctc_loss=0.1691, over 19671.00 frames. ], tot_loss[loss=0.3381, simple_loss=0.3545, pruned_loss=0.1169, ctc_loss=0.2195, over 3833842.25 frames. ], batch size: 59, lr: 3.63e-02, grad_scale: 32.0 +2024-08-25 05:24:31,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=35354.666666666664, ans=0.0 +2024-08-25 05:25:11,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=35408.0, ans=0.2 +2024-08-25 05:25:24,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=35461.333333333336, ans=0.0031605797101449274 +2024-08-25 05:25:27,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=35461.333333333336, ans=0.2 +2024-08-25 05:26:02,557 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.04 vs. limit=22.5 +2024-08-25 05:26:16,042 INFO [train.py:1114] (3/4) Epoch 3, batch 1700, loss[loss=0.3123, simple_loss=0.324, pruned_loss=0.1081, ctc_loss=0.2107, over 19677.00 frames. ], tot_loss[loss=0.3366, simple_loss=0.3536, pruned_loss=0.1162, ctc_loss=0.2177, over 3847148.22 frames. ], batch size: 46, lr: 3.62e-02, grad_scale: 32.0 +2024-08-25 05:26:18,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.59 vs. limit=15.0 +2024-08-25 05:26:20,053 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.98 vs. limit=22.5 +2024-08-25 05:26:26,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=35674.666666666664, ans=0.125 +2024-08-25 05:26:30,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=35674.666666666664, ans=0.2 +2024-08-25 05:27:02,478 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:27:10,191 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.755e+02 2.342e+02 2.819e+02 3.429e+02 5.215e+02, threshold=5.637e+02, percent-clipped=1.0 +2024-08-25 05:27:10,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=35781.333333333336, ans=0.003091014492753622 +2024-08-25 05:27:23,555 INFO [train.py:1114] (3/4) Epoch 3, batch 1750, loss[loss=0.3217, simple_loss=0.3322, pruned_loss=0.1145, ctc_loss=0.2058, over 19625.00 frames. ], tot_loss[loss=0.3359, simple_loss=0.3533, pruned_loss=0.1158, ctc_loss=0.2171, over 3852016.43 frames. ], batch size: 45, lr: 3.62e-02, grad_scale: 32.0 +2024-08-25 05:27:25,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=35888.0, ans=0.125 +2024-08-25 05:27:25,556 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.58 vs. limit=22.5 +2024-08-25 05:27:40,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=35941.333333333336, ans=0.125 +2024-08-25 05:27:40,691 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.96 vs. limit=22.5 +2024-08-25 05:28:38,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36101.333333333336, ans=0.1 +2024-08-25 05:29:19,789 INFO [train.py:1114] (3/4) Epoch 3, batch 1800, loss[loss=0.3556, simple_loss=0.3749, pruned_loss=0.1215, ctc_loss=0.2333, over 19605.00 frames. ], tot_loss[loss=0.3357, simple_loss=0.3534, pruned_loss=0.1157, ctc_loss=0.217, over 3853205.65 frames. ], batch size: 55, lr: 3.61e-02, grad_scale: 16.0 +2024-08-25 05:31:33,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36208.0, ans=0.1 +2024-08-25 05:31:47,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=36314.666666666664, ans=0.125 +2024-08-25 05:31:58,650 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.624e+02 2.106e+02 2.466e+02 3.299e+02 1.077e+03, threshold=4.933e+02, percent-clipped=1.0 +2024-08-25 05:32:10,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=36368.0, ans=22.5 +2024-08-25 05:32:11,706 INFO [train.py:1114] (3/4) Epoch 3, batch 1850, loss[loss=0.3554, simple_loss=0.3659, pruned_loss=0.1247, ctc_loss=0.239, over 19592.00 frames. ], tot_loss[loss=0.3342, simple_loss=0.3521, pruned_loss=0.115, ctc_loss=0.2158, over 3857174.83 frames. ], batch size: 57, lr: 3.61e-02, grad_scale: 16.0 +2024-08-25 05:32:14,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=36421.333333333336, ans=0.125 +2024-08-25 05:32:14,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=36421.333333333336, ans=0.125 +2024-08-25 05:32:19,326 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.14 vs. limit=22.5 +2024-08-25 05:32:19,557 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.33 vs. limit=12.0 +2024-08-25 05:33:05,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=36634.666666666664, ans=0.035 +2024-08-25 05:33:06,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=36634.666666666664, ans=0.002905507246376812 +2024-08-25 05:33:12,872 INFO [train.py:1114] (3/4) Epoch 3, batch 1900, loss[loss=0.3653, simple_loss=0.3783, pruned_loss=0.1276, ctc_loss=0.2429, over 19621.00 frames. ], tot_loss[loss=0.3356, simple_loss=0.3531, pruned_loss=0.1156, ctc_loss=0.2169, over 3861414.14 frames. ], batch size: 59, lr: 3.60e-02, grad_scale: 16.0 +2024-08-25 05:33:15,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=36688.0, ans=0.125 +2024-08-25 05:33:40,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36794.666666666664, ans=0.1 +2024-08-25 05:34:05,261 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.713e+02 2.260e+02 2.560e+02 3.105e+02 5.689e+02, threshold=5.120e+02, percent-clipped=2.0 +2024-08-25 05:34:05,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.whiten.whitening_limit, batch_count=36901.333333333336, ans=12.0 +2024-08-25 05:34:12,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=36901.333333333336, ans=0.125 +2024-08-25 05:34:47,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=36901.333333333336, ans=0.0 +2024-08-25 05:34:49,850 INFO [train.py:1114] (3/4) Epoch 3, batch 1950, loss[loss=0.333, simple_loss=0.3441, pruned_loss=0.1163, ctc_loss=0.223, over 19586.00 frames. ], tot_loss[loss=0.3369, simple_loss=0.3547, pruned_loss=0.1161, ctc_loss=0.2174, over 3869991.40 frames. ], batch size: 52, lr: 3.59e-02, grad_scale: 16.0 +2024-08-25 05:36:31,725 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.17 vs. limit=15.0 +2024-08-25 05:36:45,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=37114.666666666664, ans=0.125 +2024-08-25 05:36:59,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=37168.0, ans=0.125 +2024-08-25 05:37:04,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=37168.0, ans=0.2 +2024-08-25 05:37:09,013 INFO [train.py:1114] (3/4) Epoch 3, batch 2000, loss[loss=0.2943, simple_loss=0.3121, pruned_loss=0.1008, ctc_loss=0.1875, over 19631.00 frames. ], tot_loss[loss=0.3377, simple_loss=0.3552, pruned_loss=0.1164, ctc_loss=0.2182, over 3854736.13 frames. ], batch size: 45, lr: 3.59e-02, grad_scale: 32.0 +2024-08-25 05:37:10,806 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.65 vs. limit=15.0 +2024-08-25 05:37:35,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=37328.0, ans=0.002754782608695652 +2024-08-25 05:37:45,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=37381.333333333336, ans=0.125 +2024-08-25 05:37:57,169 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.61 vs. limit=6.0 +2024-08-25 05:38:02,418 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.775e+02 2.243e+02 2.650e+02 3.292e+02 1.299e+03, threshold=5.300e+02, percent-clipped=6.0 +2024-08-25 05:38:10,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=37434.666666666664, ans=15.0 +2024-08-25 05:38:13,916 INFO [train.py:1114] (3/4) Epoch 3, batch 2050, loss[loss=0.2891, simple_loss=0.315, pruned_loss=0.0966, ctc_loss=0.1752, over 19692.00 frames. ], tot_loss[loss=0.3357, simple_loss=0.3534, pruned_loss=0.1157, ctc_loss=0.2168, over 3850078.92 frames. ], batch size: 47, lr: 3.58e-02, grad_scale: 16.0 +2024-08-25 05:38:47,930 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.07 vs. limit=15.0 +2024-08-25 05:39:04,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=37701.333333333336, ans=0.025 +2024-08-25 05:39:07,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=37701.333333333336, ans=0.125 +2024-08-25 05:39:40,803 INFO [train.py:1114] (3/4) Epoch 3, batch 2100, loss[loss=0.3438, simple_loss=0.3603, pruned_loss=0.1176, ctc_loss=0.2301, over 19758.00 frames. ], tot_loss[loss=0.335, simple_loss=0.3529, pruned_loss=0.1153, ctc_loss=0.2163, over 3857945.32 frames. ], batch size: 54, lr: 3.58e-02, grad_scale: 16.0 +2024-08-25 05:40:00,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=37808.0, ans=0.002650434782608696 +2024-08-25 05:40:07,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=37808.0, ans=0.125 +2024-08-25 05:40:15,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=37808.0, ans=0.0 +2024-08-25 05:40:42,308 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.72 vs. limit=15.0 +2024-08-25 05:40:45,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=37861.333333333336, ans=0.0026388405797101453 +2024-08-25 05:40:58,553 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.646e+02 2.072e+02 2.352e+02 2.718e+02 4.903e+02, threshold=4.703e+02, percent-clipped=0.0 +2024-08-25 05:41:00,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=37968.0, ans=0.125 +2024-08-25 05:41:05,154 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.61 vs. limit=22.5 +2024-08-25 05:41:05,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=37968.0, ans=0.125 +2024-08-25 05:41:10,091 INFO [train.py:1114] (3/4) Epoch 3, batch 2150, loss[loss=0.2877, simple_loss=0.324, pruned_loss=0.09031, ctc_loss=0.1769, over 19869.00 frames. ], tot_loss[loss=0.3338, simple_loss=0.352, pruned_loss=0.1148, ctc_loss=0.2152, over 3869275.29 frames. ], batch size: 52, lr: 3.57e-02, grad_scale: 16.0 +2024-08-25 05:41:14,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=38021.333333333336, ans=0.2 +2024-08-25 05:41:18,600 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=26.32 vs. limit=22.5 +2024-08-25 05:41:20,829 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.30 vs. limit=15.0 +2024-08-25 05:41:24,295 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.59 vs. limit=22.5 +2024-08-25 05:42:24,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=38181.333333333336, ans=0.125 +2024-08-25 05:42:42,871 INFO [train.py:1114] (3/4) Epoch 3, batch 2200, loss[loss=0.3519, simple_loss=0.3714, pruned_loss=0.1207, ctc_loss=0.2276, over 19596.00 frames. ], tot_loss[loss=0.3331, simple_loss=0.3518, pruned_loss=0.1143, ctc_loss=0.2142, over 3867911.36 frames. ], batch size: 57, lr: 3.56e-02, grad_scale: 16.0 +2024-08-25 05:42:43,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=38288.0, ans=0.0025460869565217398 +2024-08-25 05:42:52,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=38288.0, ans=0.015 +2024-08-25 05:43:02,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=38341.333333333336, ans=0.07 +2024-08-25 05:43:11,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38394.666666666664, ans=0.1 +2024-08-25 05:43:23,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=38448.0, ans=0.0 +2024-08-25 05:43:29,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=38448.0, ans=0.09899494936611666 +2024-08-25 05:43:34,311 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.802e+02 2.197e+02 2.629e+02 2.994e+02 6.107e+02, threshold=5.259e+02, percent-clipped=1.0 +2024-08-25 05:43:51,063 INFO [train.py:1114] (3/4) Epoch 3, batch 2250, loss[loss=0.319, simple_loss=0.3472, pruned_loss=0.1057, ctc_loss=0.1986, over 19614.00 frames. ], tot_loss[loss=0.3327, simple_loss=0.3514, pruned_loss=0.1142, ctc_loss=0.2139, over 3868294.23 frames. ], batch size: 55, lr: 3.56e-02, grad_scale: 16.0 +2024-08-25 05:43:51,711 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.90 vs. limit=6.0 +2024-08-25 05:44:16,923 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.44 vs. limit=22.5 +2024-08-25 05:44:20,818 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.80 vs. limit=6.0 +2024-08-25 05:44:34,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=38661.333333333336, ans=0.1 +2024-08-25 05:44:42,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=38661.333333333336, ans=0.0 +2024-08-25 05:44:50,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=38714.666666666664, ans=0.125 +2024-08-25 05:45:15,057 INFO [train.py:1114] (3/4) Epoch 3, batch 2300, loss[loss=0.2903, simple_loss=0.3166, pruned_loss=0.09609, ctc_loss=0.1796, over 19512.00 frames. ], tot_loss[loss=0.3326, simple_loss=0.3507, pruned_loss=0.1144, ctc_loss=0.2145, over 3860984.35 frames. ], batch size: 49, lr: 3.55e-02, grad_scale: 16.0 +2024-08-25 05:45:15,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=38821.333333333336, ans=0.0 +2024-08-25 05:45:19,913 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.81 vs. limit=15.0 +2024-08-25 05:45:23,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38821.333333333336, ans=0.1 +2024-08-25 05:46:12,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=38928.0, ans=0.2 +2024-08-25 05:46:21,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=38981.333333333336, ans=0.5 +2024-08-25 05:47:15,679 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.630e+02 2.233e+02 2.542e+02 3.133e+02 7.552e+02, threshold=5.083e+02, percent-clipped=3.0 +2024-08-25 05:47:23,739 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=39034.666666666664, ans=0.05 +2024-08-25 05:47:27,930 INFO [train.py:1114] (3/4) Epoch 3, batch 2350, loss[loss=0.3271, simple_loss=0.3545, pruned_loss=0.1114, ctc_loss=0.1921, over 19691.00 frames. ], tot_loss[loss=0.3334, simple_loss=0.3511, pruned_loss=0.1149, ctc_loss=0.215, over 3863350.47 frames. ], batch size: 63, lr: 3.55e-02, grad_scale: 16.0 +2024-08-25 05:47:32,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=39088.0, ans=0.0 +2024-08-25 05:48:07,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=39248.0, ans=0.2 +2024-08-25 05:48:13,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=39301.333333333336, ans=0.125 +2024-08-25 05:48:21,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=39301.333333333336, ans=0.0 +2024-08-25 05:48:24,937 INFO [train.py:1114] (3/4) Epoch 3, batch 2400, loss[loss=0.3348, simple_loss=0.3608, pruned_loss=0.113, ctc_loss=0.2069, over 19154.00 frames. ], tot_loss[loss=0.3353, simple_loss=0.353, pruned_loss=0.1156, ctc_loss=0.216, over 3857217.27 frames. ], batch size: 71, lr: 3.54e-02, grad_scale: 32.0 +2024-08-25 05:48:47,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=39461.333333333336, ans=0.125 +2024-08-25 05:49:04,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=39514.666666666664, ans=0.125 +2024-08-25 05:49:04,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=39514.666666666664, ans=0.025 +2024-08-25 05:49:10,300 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.864e+02 2.241e+02 2.672e+02 3.161e+02 5.607e+02, threshold=5.344e+02, percent-clipped=4.0 +2024-08-25 05:49:26,439 INFO [train.py:1114] (3/4) Epoch 3, batch 2450, loss[loss=0.4617, simple_loss=0.4144, pruned_loss=0.1857, ctc_loss=0.344, over 13418.00 frames. ], tot_loss[loss=0.3446, simple_loss=0.3584, pruned_loss=0.1204, ctc_loss=0.2251, over 3731394.84 frames. ], batch size: 141, lr: 3.53e-02, grad_scale: 32.0 +2024-08-25 05:50:01,549 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.25 vs. limit=12.0 +2024-08-25 05:50:52,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.69 vs. limit=12.0 +2024-08-25 05:51:05,723 INFO [train.py:1114] (3/4) Epoch 4, batch 0, loss[loss=0.342, simple_loss=0.3501, pruned_loss=0.1227, ctc_loss=0.2215, over 19391.00 frames. ], tot_loss[loss=0.342, simple_loss=0.3501, pruned_loss=0.1227, ctc_loss=0.2215, over 19391.00 frames. ], batch size: 48, lr: 3.30e-02, grad_scale: 32.0 +2024-08-25 05:51:05,724 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-25 05:51:35,399 INFO [train.py:1146] (3/4) Epoch 4, validation: loss=0.2629, simple_loss=0.3337, pruned_loss=0.07032, ctc_loss=0.1284, over 944034.00 frames. +2024-08-25 05:51:35,400 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 14072MB +2024-08-25 05:52:07,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=39936.0, ans=0.125 +2024-08-25 05:52:41,488 INFO [train.py:1114] (3/4) Epoch 4, batch 50, loss[loss=0.2953, simple_loss=0.3134, pruned_loss=0.1028, ctc_loss=0.1789, over 19726.00 frames. ], tot_loss[loss=0.337, simple_loss=0.3541, pruned_loss=0.1166, ctc_loss=0.2169, over 843670.46 frames. ], batch size: 47, lr: 3.30e-02, grad_scale: 32.0 +2024-08-25 05:52:47,060 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.793e+02 2.147e+02 2.483e+02 2.920e+02 4.932e+02, threshold=4.967e+02, percent-clipped=0.0 +2024-08-25 05:53:07,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=40149.333333333336, ans=0.125 +2024-08-25 05:53:38,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=40202.666666666664, ans=0.125 +2024-08-25 05:53:57,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=40309.333333333336, ans=0.125 +2024-08-25 05:54:08,135 INFO [train.py:1114] (3/4) Epoch 4, batch 100, loss[loss=0.2934, simple_loss=0.3325, pruned_loss=0.0942, ctc_loss=0.1646, over 19704.00 frames. ], tot_loss[loss=0.3364, simple_loss=0.355, pruned_loss=0.1156, ctc_loss=0.2164, over 1498193.89 frames. ], batch size: 51, lr: 3.29e-02, grad_scale: 32.0 +2024-08-25 05:54:08,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40362.666666666664, ans=0.1 +2024-08-25 05:54:09,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=40362.666666666664, ans=0.2 +2024-08-25 05:54:27,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=40362.666666666664, ans=0.125 +2024-08-25 05:54:35,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=40416.0, ans=0.2 +2024-08-25 05:55:14,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=40469.333333333336, ans=0.0 +2024-08-25 05:55:55,485 INFO [train.py:1114] (3/4) Epoch 4, batch 150, loss[loss=0.3126, simple_loss=0.3176, pruned_loss=0.1134, ctc_loss=0.2022, over 19710.00 frames. ], tot_loss[loss=0.331, simple_loss=0.3507, pruned_loss=0.1132, ctc_loss=0.2122, over 2026939.58 frames. ], batch size: 47, lr: 3.28e-02, grad_scale: 32.0 +2024-08-25 05:55:56,482 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.643e+02 2.033e+02 2.286e+02 2.661e+02 4.118e+02, threshold=4.571e+02, percent-clipped=0.0 +2024-08-25 05:56:13,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=40682.666666666664, ans=0.125 +2024-08-25 05:56:20,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.41 vs. limit=15.0 +2024-08-25 05:56:22,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=40736.0, ans=0.125 +2024-08-25 05:56:31,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=40736.0, ans=0.2 +2024-08-25 05:56:49,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=40789.333333333336, ans=0.125 +2024-08-25 05:56:51,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=40789.333333333336, ans=0.2 +2024-08-25 05:56:51,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=40789.333333333336, ans=0.125 +2024-08-25 05:56:52,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=40842.666666666664, ans=0.125 +2024-08-25 05:57:00,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=40842.666666666664, ans=0.125 +2024-08-25 05:57:01,863 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.14 vs. limit=15.0 +2024-08-25 05:57:04,737 INFO [train.py:1114] (3/4) Epoch 4, batch 200, loss[loss=0.4021, simple_loss=0.395, pruned_loss=0.1498, ctc_loss=0.2743, over 18226.00 frames. ], tot_loss[loss=0.3277, simple_loss=0.3486, pruned_loss=0.1116, ctc_loss=0.2091, over 2434884.14 frames. ], batch size: 85, lr: 3.28e-02, grad_scale: 32.0 +2024-08-25 05:57:23,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.08 vs. limit=15.0 +2024-08-25 05:57:27,156 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.26 vs. limit=10.0 +2024-08-25 05:57:40,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=40949.333333333336, ans=0.125 +2024-08-25 05:57:42,844 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:58:06,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=41056.0, ans=0.125 +2024-08-25 05:58:51,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=41109.333333333336, ans=0.125 +2024-08-25 05:58:56,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=41109.333333333336, ans=0.125 +2024-08-25 05:59:03,061 INFO [train.py:1114] (3/4) Epoch 4, batch 250, loss[loss=0.3417, simple_loss=0.3625, pruned_loss=0.1184, ctc_loss=0.2103, over 19431.00 frames. ], tot_loss[loss=0.325, simple_loss=0.3469, pruned_loss=0.1102, ctc_loss=0.2069, over 2755249.50 frames. ], batch size: 67, lr: 3.27e-02, grad_scale: 32.0 +2024-08-25 05:59:04,093 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.652e+02 2.098e+02 2.387e+02 2.939e+02 4.251e+02, threshold=4.774e+02, percent-clipped=0.0 +2024-08-25 05:59:26,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=41216.0, ans=0.125 +2024-08-25 05:59:34,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=41269.333333333336, ans=0.0 +2024-08-25 05:59:46,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=41269.333333333336, ans=0.125 +2024-08-25 05:59:48,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=41322.666666666664, ans=0.125 +2024-08-25 05:59:54,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=41322.666666666664, ans=0.0 +2024-08-25 06:00:05,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41376.0, ans=0.1 +2024-08-25 06:00:10,352 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.93 vs. limit=10.0 +2024-08-25 06:00:14,169 INFO [train.py:1114] (3/4) Epoch 4, batch 300, loss[loss=0.3111, simple_loss=0.3348, pruned_loss=0.1047, ctc_loss=0.1952, over 19531.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.3453, pruned_loss=0.1094, ctc_loss=0.2058, over 3000480.96 frames. ], batch size: 61, lr: 3.27e-02, grad_scale: 32.0 +2024-08-25 06:00:54,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=41536.0, ans=0.125 +2024-08-25 06:00:54,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=41536.0, ans=0.125 +2024-08-25 06:01:16,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=41642.666666666664, ans=0.0 +2024-08-25 06:01:19,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.30 vs. limit=15.0 +2024-08-25 06:01:27,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=41696.0, ans=0.1 +2024-08-25 06:01:35,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=41696.0, ans=0.1 +2024-08-25 06:01:36,621 INFO [train.py:1114] (3/4) Epoch 4, batch 350, loss[loss=0.2768, simple_loss=0.3085, pruned_loss=0.08896, ctc_loss=0.1678, over 19754.00 frames. ], tot_loss[loss=0.322, simple_loss=0.3449, pruned_loss=0.1087, ctc_loss=0.2046, over 3189928.60 frames. ], batch size: 48, lr: 3.26e-02, grad_scale: 32.0 +2024-08-25 06:01:37,789 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.558e+02 2.143e+02 2.517e+02 2.887e+02 6.595e+02, threshold=5.034e+02, percent-clipped=1.0 +2024-08-25 06:01:39,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=41696.0, ans=0.05 +2024-08-25 06:01:46,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41696.0, ans=0.1 +2024-08-25 06:01:49,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=41749.333333333336, ans=0.125 +2024-08-25 06:02:06,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=41802.666666666664, ans=0.0 +2024-08-25 06:02:07,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=41802.666666666664, ans=0.0 +2024-08-25 06:02:38,783 INFO [train.py:1114] (3/4) Epoch 4, batch 400, loss[loss=0.3066, simple_loss=0.3373, pruned_loss=0.1012, ctc_loss=0.1842, over 19504.00 frames. ], tot_loss[loss=0.3215, simple_loss=0.3448, pruned_loss=0.1083, ctc_loss=0.2037, over 3342217.39 frames. ], batch size: 54, lr: 3.26e-02, grad_scale: 32.0 +2024-08-25 06:02:48,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=41962.666666666664, ans=0.001747246376811595 +2024-08-25 06:03:06,469 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.02 vs. limit=22.5 +2024-08-25 06:03:14,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=42069.333333333336, ans=0.125 +2024-08-25 06:03:24,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=42122.666666666664, ans=0.125 +2024-08-25 06:04:04,056 INFO [train.py:1114] (3/4) Epoch 4, batch 450, loss[loss=0.351, simple_loss=0.3675, pruned_loss=0.1207, ctc_loss=0.2324, over 19615.00 frames. ], tot_loss[loss=0.3219, simple_loss=0.3449, pruned_loss=0.1087, ctc_loss=0.2041, over 3450482.58 frames. ], batch size: 55, lr: 3.25e-02, grad_scale: 32.0 +2024-08-25 06:04:06,524 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.656e+02 2.107e+02 2.479e+02 2.897e+02 5.564e+02, threshold=4.958e+02, percent-clipped=2.0 +2024-08-25 06:04:12,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=42229.333333333336, ans=0.125 +2024-08-25 06:04:13,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42229.333333333336, ans=0.1 +2024-08-25 06:04:32,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=42336.0, ans=0.125 +2024-08-25 06:04:36,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=42336.0, ans=0.125 +2024-08-25 06:04:39,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42336.0, ans=0.1 +2024-08-25 06:04:53,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=42336.0, ans=0.125 +2024-08-25 06:04:55,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=42389.333333333336, ans=0.0016544927536231869 +2024-08-25 06:05:02,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=42389.333333333336, ans=0.125 +2024-08-25 06:05:03,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42389.333333333336, ans=0.1 +2024-08-25 06:05:15,385 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.60 vs. limit=10.0 +2024-08-25 06:05:32,244 INFO [train.py:1114] (3/4) Epoch 4, batch 500, loss[loss=0.3389, simple_loss=0.3669, pruned_loss=0.1133, ctc_loss=0.2106, over 19663.00 frames. ], tot_loss[loss=0.3198, simple_loss=0.3435, pruned_loss=0.1077, ctc_loss=0.2021, over 3545452.44 frames. ], batch size: 63, lr: 3.25e-02, grad_scale: 32.0 +2024-08-25 06:05:42,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=42496.0, ans=0.125 +2024-08-25 06:05:49,417 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.98 vs. limit=22.5 +2024-08-25 06:05:57,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=42602.666666666664, ans=0.0 +2024-08-25 06:06:00,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=42602.666666666664, ans=0.1 +2024-08-25 06:06:28,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=42709.333333333336, ans=0.025 +2024-08-25 06:06:41,087 INFO [train.py:1114] (3/4) Epoch 4, batch 550, loss[loss=0.313, simple_loss=0.3414, pruned_loss=0.1038, ctc_loss=0.1923, over 19267.00 frames. ], tot_loss[loss=0.3197, simple_loss=0.3435, pruned_loss=0.1076, ctc_loss=0.202, over 3608723.52 frames. ], batch size: 71, lr: 3.24e-02, grad_scale: 16.0 +2024-08-25 06:06:44,779 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.740e+02 2.027e+02 2.416e+02 2.881e+02 5.051e+02, threshold=4.833e+02, percent-clipped=1.0 +2024-08-25 06:06:45,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42762.666666666664, ans=0.1 +2024-08-25 06:06:46,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=42762.666666666664, ans=0.125 +2024-08-25 06:06:55,649 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.16 vs. limit=15.0 +2024-08-25 06:07:07,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=42869.333333333336, ans=0.0 +2024-08-25 06:07:17,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=42869.333333333336, ans=0.07 +2024-08-25 06:07:18,327 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.03 vs. limit=15.0 +2024-08-25 06:07:23,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=42922.666666666664, ans=0.2 +2024-08-25 06:07:40,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=42976.0, ans=0.2 +2024-08-25 06:07:41,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=42976.0, ans=0.2 +2024-08-25 06:07:42,945 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.78 vs. limit=6.0 +2024-08-25 06:07:50,586 INFO [train.py:1114] (3/4) Epoch 4, batch 600, loss[loss=0.3903, simple_loss=0.385, pruned_loss=0.1452, ctc_loss=0.263, over 19423.00 frames. ], tot_loss[loss=0.3188, simple_loss=0.3432, pruned_loss=0.107, ctc_loss=0.2011, over 3665691.44 frames. ], batch size: 67, lr: 3.24e-02, grad_scale: 16.0 +2024-08-25 06:07:52,632 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.09 vs. limit=5.0 +2024-08-25 06:07:53,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=43029.333333333336, ans=0.125 +2024-08-25 06:08:15,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=43136.0, ans=0.0014921739130434788 +2024-08-25 06:08:43,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=43242.666666666664, ans=0.125 +2024-08-25 06:08:57,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=43242.666666666664, ans=0.125 +2024-08-25 06:09:00,685 INFO [train.py:1114] (3/4) Epoch 4, batch 650, loss[loss=0.3069, simple_loss=0.342, pruned_loss=0.09763, ctc_loss=0.1915, over 19783.00 frames. ], tot_loss[loss=0.3172, simple_loss=0.3421, pruned_loss=0.1063, ctc_loss=0.1995, over 3716000.01 frames. ], batch size: 54, lr: 3.23e-02, grad_scale: 16.0 +2024-08-25 06:09:15,859 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.456e+02 2.140e+02 2.544e+02 3.023e+02 7.017e+02, threshold=5.088e+02, percent-clipped=9.0 +2024-08-25 06:09:23,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=43349.333333333336, ans=0.125 +2024-08-25 06:09:49,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=43402.666666666664, ans=0.125 +2024-08-25 06:10:14,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=43509.333333333336, ans=0.2 +2024-08-25 06:10:16,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=43509.333333333336, ans=0.2 +2024-08-25 06:10:18,932 INFO [train.py:1114] (3/4) Epoch 4, batch 700, loss[loss=0.2677, simple_loss=0.3108, pruned_loss=0.08223, ctc_loss=0.1506, over 19732.00 frames. ], tot_loss[loss=0.3181, simple_loss=0.3427, pruned_loss=0.1066, ctc_loss=0.2006, over 3748854.21 frames. ], batch size: 51, lr: 3.22e-02, grad_scale: 16.0 +2024-08-25 06:10:27,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=43562.666666666664, ans=0.0 +2024-08-25 06:10:34,565 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.39 vs. limit=15.0 +2024-08-25 06:10:37,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=43616.0, ans=0.125 +2024-08-25 06:10:50,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=43669.333333333336, ans=0.125 +2024-08-25 06:10:50,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=43669.333333333336, ans=0.0 +2024-08-25 06:10:53,038 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.35 vs. limit=22.5 +2024-08-25 06:11:09,209 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.68 vs. limit=12.0 +2024-08-25 06:11:18,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=43776.0, ans=0.125 +2024-08-25 06:11:21,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=43776.0, ans=0.2 +2024-08-25 06:11:23,834 INFO [train.py:1114] (3/4) Epoch 4, batch 750, loss[loss=0.3695, simple_loss=0.3745, pruned_loss=0.1322, ctc_loss=0.2506, over 19507.00 frames. ], tot_loss[loss=0.3169, simple_loss=0.3419, pruned_loss=0.1061, ctc_loss=0.1995, over 3775312.06 frames. ], batch size: 54, lr: 3.22e-02, grad_scale: 16.0 +2024-08-25 06:11:28,681 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.530e+02 2.141e+02 2.481e+02 2.931e+02 4.472e+02, threshold=4.962e+02, percent-clipped=0.0 +2024-08-25 06:11:39,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=43882.666666666664, ans=0.125 +2024-08-25 06:11:39,821 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.90 vs. limit=6.0 +2024-08-25 06:11:52,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=43882.666666666664, ans=0.001329855072463769 +2024-08-25 06:12:05,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=43989.333333333336, ans=0.125 +2024-08-25 06:12:20,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=44042.666666666664, ans=0.2 +2024-08-25 06:12:27,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=44042.666666666664, ans=0.2 +2024-08-25 06:12:29,312 INFO [train.py:1114] (3/4) Epoch 4, batch 800, loss[loss=0.2853, simple_loss=0.3064, pruned_loss=0.09612, ctc_loss=0.18, over 19817.00 frames. ], tot_loss[loss=0.3167, simple_loss=0.3417, pruned_loss=0.106, ctc_loss=0.1994, over 3796139.58 frames. ], batch size: 49, lr: 3.21e-02, grad_scale: 32.0 +2024-08-25 06:12:29,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=44096.0, ans=0.0 +2024-08-25 06:12:39,692 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.54 vs. limit=6.0 +2024-08-25 06:12:50,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=44149.333333333336, ans=0.05 +2024-08-25 06:12:51,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=44149.333333333336, ans=0.125 +2024-08-25 06:12:55,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=44202.666666666664, ans=0.125 +2024-08-25 06:13:07,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=44256.0, ans=0.0012486956521739132 +2024-08-25 06:13:13,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=44256.0, ans=15.0 +2024-08-25 06:13:19,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=44309.333333333336, ans=0.125 +2024-08-25 06:13:28,037 INFO [train.py:1114] (3/4) Epoch 4, batch 850, loss[loss=0.3354, simple_loss=0.3603, pruned_loss=0.1127, ctc_loss=0.2128, over 19650.00 frames. ], tot_loss[loss=0.3161, simple_loss=0.3411, pruned_loss=0.1057, ctc_loss=0.1989, over 3815674.58 frames. ], batch size: 59, lr: 3.21e-02, grad_scale: 32.0 +2024-08-25 06:13:28,697 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.57 vs. limit=15.0 +2024-08-25 06:13:31,252 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.656e+02 2.074e+02 2.402e+02 2.888e+02 5.555e+02, threshold=4.804e+02, percent-clipped=1.0 +2024-08-25 06:13:39,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=44416.0, ans=0.1 +2024-08-25 06:13:51,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=44469.333333333336, ans=15.0 +2024-08-25 06:14:00,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=44469.333333333336, ans=0.125 +2024-08-25 06:14:03,466 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.39 vs. limit=10.0 +2024-08-25 06:14:09,195 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.20 vs. limit=15.0 +2024-08-25 06:14:11,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=44522.666666666664, ans=0.2 +2024-08-25 06:14:32,266 INFO [train.py:1114] (3/4) Epoch 4, batch 900, loss[loss=0.3161, simple_loss=0.332, pruned_loss=0.1102, ctc_loss=0.1999, over 19805.00 frames. ], tot_loss[loss=0.3178, simple_loss=0.3421, pruned_loss=0.1066, ctc_loss=0.2006, over 3819998.93 frames. ], batch size: 49, lr: 3.20e-02, grad_scale: 32.0 +2024-08-25 06:14:33,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=44629.333333333336, ans=0.125 +2024-08-25 06:14:56,452 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:15:29,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=44842.666666666664, ans=0.07 +2024-08-25 06:15:35,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=44842.666666666664, ans=0.0 +2024-08-25 06:15:38,553 INFO [train.py:1114] (3/4) Epoch 4, batch 950, loss[loss=0.2728, simple_loss=0.315, pruned_loss=0.08352, ctc_loss=0.159, over 19522.00 frames. ], tot_loss[loss=0.318, simple_loss=0.3423, pruned_loss=0.1067, ctc_loss=0.2006, over 3822507.68 frames. ], batch size: 49, lr: 3.20e-02, grad_scale: 32.0 +2024-08-25 06:15:42,138 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.701e+02 2.101e+02 2.364e+02 2.735e+02 6.196e+02, threshold=4.728e+02, percent-clipped=2.0 +2024-08-25 06:15:59,824 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.49 vs. limit=15.0 +2024-08-25 06:16:12,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=45002.666666666664, ans=0.125 +2024-08-25 06:16:21,072 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.79 vs. limit=15.0 +2024-08-25 06:16:21,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=45056.0, ans=0.125 +2024-08-25 06:16:35,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=45109.333333333336, ans=0.125 +2024-08-25 06:16:42,346 INFO [train.py:1114] (3/4) Epoch 4, batch 1000, loss[loss=0.3016, simple_loss=0.3237, pruned_loss=0.1024, ctc_loss=0.1866, over 19859.00 frames. ], tot_loss[loss=0.3198, simple_loss=0.3437, pruned_loss=0.1075, ctc_loss=0.202, over 3818724.57 frames. ], batch size: 52, lr: 3.19e-02, grad_scale: 32.0 +2024-08-25 06:17:06,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=45216.0, ans=0.0 +2024-08-25 06:17:48,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=45322.666666666664, ans=0.0010168115942028998 +2024-08-25 06:18:00,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=45376.0, ans=0.0 +2024-08-25 06:18:10,597 INFO [train.py:1114] (3/4) Epoch 4, batch 1050, loss[loss=0.348, simple_loss=0.3686, pruned_loss=0.1196, ctc_loss=0.2207, over 19842.00 frames. ], tot_loss[loss=0.3182, simple_loss=0.3424, pruned_loss=0.1069, ctc_loss=0.201, over 3824088.49 frames. ], batch size: 57, lr: 3.19e-02, grad_scale: 16.0 +2024-08-25 06:18:26,178 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.527e+02 1.982e+02 2.200e+02 2.634e+02 5.388e+02, threshold=4.401e+02, percent-clipped=1.0 +2024-08-25 06:18:26,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45429.333333333336, ans=0.1 +2024-08-25 06:18:27,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45429.333333333336, ans=0.1 +2024-08-25 06:18:31,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=45429.333333333336, ans=0.125 +2024-08-25 06:18:41,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=45482.666666666664, ans=0.125 +2024-08-25 06:18:44,949 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.45 vs. limit=10.0 +2024-08-25 06:18:53,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.52 vs. limit=6.0 +2024-08-25 06:19:04,937 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=12.0 +2024-08-25 06:19:30,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=45642.666666666664, ans=0.0 +2024-08-25 06:19:31,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45642.666666666664, ans=0.1 +2024-08-25 06:19:36,308 INFO [train.py:1114] (3/4) Epoch 4, batch 1100, loss[loss=0.2936, simple_loss=0.3243, pruned_loss=0.09547, ctc_loss=0.1798, over 19586.00 frames. ], tot_loss[loss=0.3172, simple_loss=0.342, pruned_loss=0.1062, ctc_loss=0.1998, over 3832440.39 frames. ], batch size: 52, lr: 3.18e-02, grad_scale: 16.0 +2024-08-25 06:19:39,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=45696.0, ans=0.125 +2024-08-25 06:19:44,050 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.14 vs. limit=12.0 +2024-08-25 06:19:48,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=45696.0, ans=0.2 +2024-08-25 06:19:53,079 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=5.188e-03 +2024-08-25 06:19:57,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=45749.333333333336, ans=0.125 +2024-08-25 06:20:09,389 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.94 vs. limit=15.0 +2024-08-25 06:20:47,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.23 vs. limit=6.0 +2024-08-25 06:20:48,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=45909.333333333336, ans=0.2 +2024-08-25 06:20:52,241 INFO [train.py:1114] (3/4) Epoch 4, batch 1150, loss[loss=0.3327, simple_loss=0.3432, pruned_loss=0.1168, ctc_loss=0.2211, over 19569.00 frames. ], tot_loss[loss=0.3182, simple_loss=0.3422, pruned_loss=0.1069, ctc_loss=0.2009, over 3830740.64 frames. ], batch size: 52, lr: 3.18e-02, grad_scale: 16.0 +2024-08-25 06:20:57,033 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.742e+02 2.122e+02 2.390e+02 2.706e+02 4.199e+02, threshold=4.779e+02, percent-clipped=0.0 +2024-08-25 06:21:31,145 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.96 vs. limit=15.0 +2024-08-25 06:21:32,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=46069.333333333336, ans=0.025 +2024-08-25 06:21:37,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=46122.666666666664, ans=0.125 +2024-08-25 06:21:43,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=46122.666666666664, ans=0.2 +2024-08-25 06:21:59,995 INFO [train.py:1114] (3/4) Epoch 4, batch 1200, loss[loss=0.3151, simple_loss=0.3409, pruned_loss=0.1047, ctc_loss=0.1995, over 19841.00 frames. ], tot_loss[loss=0.3201, simple_loss=0.3436, pruned_loss=0.1078, ctc_loss=0.2025, over 3825768.46 frames. ], batch size: 57, lr: 3.17e-02, grad_scale: 32.0 +2024-08-25 06:22:06,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=46229.333333333336, ans=0.125 +2024-08-25 06:22:14,930 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.35 vs. limit=5.0 +2024-08-25 06:22:38,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=46336.0, ans=0.125 +2024-08-25 06:22:55,999 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.20 vs. limit=15.0 +2024-08-25 06:23:21,322 INFO [train.py:1114] (3/4) Epoch 4, batch 1250, loss[loss=0.3157, simple_loss=0.3496, pruned_loss=0.1031, ctc_loss=0.1892, over 19502.00 frames. ], tot_loss[loss=0.3198, simple_loss=0.344, pruned_loss=0.1075, ctc_loss=0.2018, over 3844202.12 frames. ], batch size: 61, lr: 3.17e-02, grad_scale: 32.0 +2024-08-25 06:23:26,217 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.617e+02 1.962e+02 2.225e+02 2.468e+02 3.508e+02, threshold=4.451e+02, percent-clipped=0.0 +2024-08-25 06:23:30,925 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.08 vs. limit=6.0 +2024-08-25 06:23:35,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=46496.0, ans=0.2 +2024-08-25 06:23:50,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=46549.333333333336, ans=0.125 +2024-08-25 06:24:12,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=46602.666666666664, ans=0.0 +2024-08-25 06:24:36,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=46709.333333333336, ans=0.0007153623188405796 +2024-08-25 06:24:39,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=46709.333333333336, ans=0.125 +2024-08-25 06:24:43,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=46709.333333333336, ans=0.04949747468305833 +2024-08-25 06:24:48,924 INFO [train.py:1114] (3/4) Epoch 4, batch 1300, loss[loss=0.3391, simple_loss=0.359, pruned_loss=0.1157, ctc_loss=0.2198, over 18910.00 frames. ], tot_loss[loss=0.3179, simple_loss=0.3427, pruned_loss=0.1065, ctc_loss=0.2002, over 3847151.65 frames. ], batch size: 76, lr: 3.16e-02, grad_scale: 32.0 +2024-08-25 06:24:50,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=46762.666666666664, ans=0.125 +2024-08-25 06:25:15,167 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.47 vs. limit=15.0 +2024-08-25 06:25:26,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=46922.666666666664, ans=0.2 +2024-08-25 06:25:43,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=46976.0, ans=0.125 +2024-08-25 06:25:52,878 INFO [train.py:1114] (3/4) Epoch 4, batch 1350, loss[loss=0.3235, simple_loss=0.348, pruned_loss=0.1078, ctc_loss=0.2083, over 19767.00 frames. ], tot_loss[loss=0.3171, simple_loss=0.3424, pruned_loss=0.1061, ctc_loss=0.1991, over 3859051.87 frames. ], batch size: 54, lr: 3.16e-02, grad_scale: 32.0 +2024-08-25 06:26:07,746 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.473e+02 2.269e+02 2.560e+02 3.229e+02 4.886e+02, threshold=5.120e+02, percent-clipped=5.0 +2024-08-25 06:26:26,309 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.11 vs. limit=15.0 +2024-08-25 06:26:32,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47082.666666666664, ans=0.1 +2024-08-25 06:26:32,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=47082.666666666664, ans=0.025 +2024-08-25 06:26:38,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=47082.666666666664, ans=0.125 +2024-08-25 06:26:41,476 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.12 vs. limit=10.0 +2024-08-25 06:26:53,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=47136.0, ans=0.125 +2024-08-25 06:26:53,925 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.84 vs. limit=12.0 +2024-08-25 06:27:11,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=47242.666666666664, ans=0.1 +2024-08-25 06:27:20,738 INFO [train.py:1114] (3/4) Epoch 4, batch 1400, loss[loss=0.2977, simple_loss=0.3138, pruned_loss=0.1043, ctc_loss=0.1827, over 19683.00 frames. ], tot_loss[loss=0.3163, simple_loss=0.3418, pruned_loss=0.1056, ctc_loss=0.1987, over 3865983.97 frames. ], batch size: 46, lr: 3.15e-02, grad_scale: 32.0 +2024-08-25 06:27:39,127 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:27:45,648 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.49 vs. limit=22.5 +2024-08-25 06:28:02,597 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.37 vs. limit=22.5 +2024-08-25 06:28:10,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=47402.666666666664, ans=0.025 +2024-08-25 06:28:22,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=47456.0, ans=0.025 +2024-08-25 06:28:36,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=47509.333333333336, ans=0.125 +2024-08-25 06:28:43,704 INFO [train.py:1114] (3/4) Epoch 4, batch 1450, loss[loss=0.2829, simple_loss=0.3361, pruned_loss=0.08215, ctc_loss=0.1632, over 19707.00 frames. ], tot_loss[loss=0.3189, simple_loss=0.3435, pruned_loss=0.1069, ctc_loss=0.2012, over 3862963.78 frames. ], batch size: 63, lr: 3.15e-02, grad_scale: 32.0 +2024-08-25 06:28:45,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=47562.666666666664, ans=0.0 +2024-08-25 06:28:45,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=47562.666666666664, ans=0.0005298550724637686 +2024-08-25 06:28:48,584 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.613e+02 2.026e+02 2.327e+02 2.659e+02 4.329e+02, threshold=4.654e+02, percent-clipped=0.0 +2024-08-25 06:29:08,192 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.18 vs. limit=15.0 +2024-08-25 06:29:10,528 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.06 vs. limit=22.5 +2024-08-25 06:29:11,434 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.29 vs. limit=12.0 +2024-08-25 06:29:28,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=47722.666666666664, ans=0.125 +2024-08-25 06:29:38,172 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.90 vs. limit=22.5 +2024-08-25 06:29:44,347 INFO [train.py:1114] (3/4) Epoch 4, batch 1500, loss[loss=0.2726, simple_loss=0.3284, pruned_loss=0.07739, ctc_loss=0.1549, over 19585.00 frames. ], tot_loss[loss=0.3181, simple_loss=0.3431, pruned_loss=0.1064, ctc_loss=0.2004, over 3862305.15 frames. ], batch size: 57, lr: 3.14e-02, grad_scale: 16.0 +2024-08-25 06:29:46,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=47829.333333333336, ans=0.00047188405797101395 +2024-08-25 06:30:01,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=47882.666666666664, ans=0.2 +2024-08-25 06:30:03,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=47882.666666666664, ans=0.1 +2024-08-25 06:30:05,990 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.49 vs. limit=15.0 +2024-08-25 06:30:13,139 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.80 vs. limit=15.0 +2024-08-25 06:30:38,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=48042.666666666664, ans=0.1 +2024-08-25 06:31:30,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=48042.666666666664, ans=0.09899494936611666 +2024-08-25 06:31:31,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=48042.666666666664, ans=0.2 +2024-08-25 06:31:38,021 INFO [train.py:1114] (3/4) Epoch 4, batch 1550, loss[loss=0.3299, simple_loss=0.3651, pruned_loss=0.1084, ctc_loss=0.195, over 19628.00 frames. ], tot_loss[loss=0.3194, simple_loss=0.3438, pruned_loss=0.1072, ctc_loss=0.2017, over 3846711.23 frames. ], batch size: 60, lr: 3.14e-02, grad_scale: 16.0 +2024-08-25 06:31:49,992 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.489e+02 2.013e+02 2.262e+02 2.770e+02 1.090e+03, threshold=4.525e+02, percent-clipped=1.0 +2024-08-25 06:31:59,432 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.59 vs. limit=12.0 +2024-08-25 06:32:14,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=48149.333333333336, ans=0.125 +2024-08-25 06:32:15,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=48149.333333333336, ans=0.125 +2024-08-25 06:32:24,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=48202.666666666664, ans=0.125 +2024-08-25 06:32:24,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=48202.666666666664, ans=0.125 +2024-08-25 06:32:33,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=48256.0, ans=0.125 +2024-08-25 06:32:33,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=48256.0, ans=0.125 +2024-08-25 06:33:21,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=48362.666666666664, ans=0.125 +2024-08-25 06:33:26,250 INFO [train.py:1114] (3/4) Epoch 4, batch 1600, loss[loss=0.2705, simple_loss=0.3207, pruned_loss=0.07922, ctc_loss=0.1548, over 19850.00 frames. ], tot_loss[loss=0.3192, simple_loss=0.3434, pruned_loss=0.1072, ctc_loss=0.2014, over 3836199.50 frames. ], batch size: 57, lr: 3.13e-02, grad_scale: 32.0 +2024-08-25 06:33:32,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=48362.666666666664, ans=0.125 +2024-08-25 06:33:52,614 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.79 vs. limit=15.0 +2024-08-25 06:33:59,173 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.38 vs. limit=22.5 +2024-08-25 06:34:13,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=48469.333333333336, ans=0.125 +2024-08-25 06:35:15,009 INFO [train.py:1114] (3/4) Epoch 4, batch 1650, loss[loss=0.3283, simple_loss=0.3499, pruned_loss=0.1112, ctc_loss=0.2109, over 19672.00 frames. ], tot_loss[loss=0.319, simple_loss=0.343, pruned_loss=0.1072, ctc_loss=0.2015, over 3832501.89 frames. ], batch size: 59, lr: 3.13e-02, grad_scale: 32.0 +2024-08-25 06:35:21,181 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.567e+02 2.079e+02 2.506e+02 2.996e+02 5.422e+02, threshold=5.011e+02, percent-clipped=2.0 +2024-08-25 06:36:19,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=48789.333333333336, ans=0.125 +2024-08-25 06:36:23,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=48789.333333333336, ans=0.0 +2024-08-25 06:36:37,432 INFO [train.py:1114] (3/4) Epoch 4, batch 1700, loss[loss=0.2967, simple_loss=0.3166, pruned_loss=0.09946, ctc_loss=0.1948, over 19656.00 frames. ], tot_loss[loss=0.318, simple_loss=0.3424, pruned_loss=0.1067, ctc_loss=0.2005, over 3846039.49 frames. ], batch size: 46, lr: 3.12e-02, grad_scale: 32.0 +2024-08-25 06:37:03,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=48949.333333333336, ans=0.125 +2024-08-25 06:37:15,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=49002.666666666664, ans=0.125 +2024-08-25 06:37:19,048 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.47 vs. limit=22.5 +2024-08-25 06:37:26,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=49002.666666666664, ans=0.125 +2024-08-25 06:37:39,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=49056.0, ans=0.0 +2024-08-25 06:37:43,144 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:37:58,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=49056.0, ans=0.0 +2024-08-25 06:38:18,580 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:38:25,885 INFO [train.py:1114] (3/4) Epoch 4, batch 1750, loss[loss=0.3124, simple_loss=0.3224, pruned_loss=0.1099, ctc_loss=0.2061, over 19646.00 frames. ], tot_loss[loss=0.3163, simple_loss=0.3411, pruned_loss=0.1059, ctc_loss=0.1991, over 3852194.08 frames. ], batch size: 45, lr: 3.11e-02, grad_scale: 32.0 +2024-08-25 06:38:33,086 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.628e+02 1.987e+02 2.278e+02 2.713e+02 5.908e+02, threshold=4.555e+02, percent-clipped=1.0 +2024-08-25 06:38:55,259 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.17 vs. limit=15.0 +2024-08-25 06:38:56,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=49269.333333333336, ans=0.035 +2024-08-25 06:39:31,714 INFO [train.py:1114] (3/4) Epoch 4, batch 1800, loss[loss=0.3047, simple_loss=0.3404, pruned_loss=0.0989, ctc_loss=0.178, over 19617.00 frames. ], tot_loss[loss=0.3165, simple_loss=0.3413, pruned_loss=0.106, ctc_loss=0.199, over 3854640.81 frames. ], batch size: 55, lr: 3.11e-02, grad_scale: 32.0 +2024-08-25 06:39:33,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=49429.333333333336, ans=0.025 +2024-08-25 06:39:34,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=49429.333333333336, ans=0.125 +2024-08-25 06:40:18,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=49536.0, ans=0.2 +2024-08-25 06:40:25,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=49536.0, ans=0.04949747468305833 +2024-08-25 06:40:37,173 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.51 vs. limit=15.0 +2024-08-25 06:40:39,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=49589.333333333336, ans=0.125 +2024-08-25 06:40:54,714 INFO [train.py:1114] (3/4) Epoch 4, batch 1850, loss[loss=0.3613, simple_loss=0.3767, pruned_loss=0.1256, ctc_loss=0.2367, over 19608.00 frames. ], tot_loss[loss=0.3152, simple_loss=0.3406, pruned_loss=0.1054, ctc_loss=0.1977, over 3857733.12 frames. ], batch size: 57, lr: 3.10e-02, grad_scale: 32.0 +2024-08-25 06:41:01,669 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.611e+02 2.149e+02 2.307e+02 2.574e+02 4.619e+02, threshold=4.614e+02, percent-clipped=1.0 +2024-08-25 06:41:12,845 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.96 vs. limit=22.5 +2024-08-25 06:41:23,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=49802.666666666664, ans=0.0 +2024-08-25 06:41:59,174 INFO [train.py:1114] (3/4) Epoch 4, batch 1900, loss[loss=0.3494, simple_loss=0.3701, pruned_loss=0.1189, ctc_loss=0.2271, over 19650.00 frames. ], tot_loss[loss=0.3158, simple_loss=0.3414, pruned_loss=0.1055, ctc_loss=0.1978, over 3862698.23 frames. ], batch size: 59, lr: 3.10e-02, grad_scale: 32.0 +2024-08-25 06:42:58,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=50069.333333333336, ans=0.2 +2024-08-25 06:43:00,435 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:43:10,930 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:43:37,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=50176.0, ans=0.2 +2024-08-25 06:43:39,882 INFO [train.py:1114] (3/4) Epoch 4, batch 1950, loss[loss=0.3153, simple_loss=0.3383, pruned_loss=0.1064, ctc_loss=0.1987, over 19594.00 frames. ], tot_loss[loss=0.3163, simple_loss=0.3426, pruned_loss=0.1055, ctc_loss=0.1976, over 3871808.73 frames. ], batch size: 52, lr: 3.09e-02, grad_scale: 32.0 +2024-08-25 06:43:45,596 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.759e+02 2.065e+02 2.259e+02 2.635e+02 4.732e+02, threshold=4.517e+02, percent-clipped=1.0 +2024-08-25 06:43:51,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=50282.666666666664, ans=0.125 +2024-08-25 06:43:55,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=50282.666666666664, ans=0.125 +2024-08-25 06:44:13,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=50389.333333333336, ans=0.125 +2024-08-25 06:44:27,809 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.33 vs. limit=15.0 +2024-08-25 06:44:42,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=50496.0, ans=0.125 +2024-08-25 06:44:48,799 INFO [train.py:1114] (3/4) Epoch 4, batch 2000, loss[loss=0.264, simple_loss=0.2976, pruned_loss=0.08286, ctc_loss=0.1616, over 19633.00 frames. ], tot_loss[loss=0.3168, simple_loss=0.3427, pruned_loss=0.1058, ctc_loss=0.1982, over 3856302.96 frames. ], batch size: 45, lr: 3.09e-02, grad_scale: 32.0 +2024-08-25 06:44:54,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=50496.0, ans=0.0 +2024-08-25 06:45:57,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=50602.666666666664, ans=0.025 +2024-08-25 06:46:02,181 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.18 vs. limit=22.5 +2024-08-25 06:46:12,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=50656.0, ans=0.125 +2024-08-25 06:46:13,859 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.33 vs. limit=15.0 +2024-08-25 06:46:35,030 INFO [train.py:1114] (3/4) Epoch 4, batch 2050, loss[loss=0.3002, simple_loss=0.3249, pruned_loss=0.1006, ctc_loss=0.1861, over 19726.00 frames. ], tot_loss[loss=0.3165, simple_loss=0.3418, pruned_loss=0.106, ctc_loss=0.1979, over 3852633.65 frames. ], batch size: 47, lr: 3.08e-02, grad_scale: 32.0 +2024-08-25 06:46:45,625 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.515e+02 2.046e+02 2.338e+02 2.720e+02 4.537e+02, threshold=4.675e+02, percent-clipped=1.0 +2024-08-25 06:46:53,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=50816.0, ans=0.125 +2024-08-25 06:46:55,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=50816.0, ans=0.125 +2024-08-25 06:47:12,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=50869.333333333336, ans=0.0 +2024-08-25 06:47:37,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50976.0, ans=0.1 +2024-08-25 06:47:44,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=50976.0, ans=0.025 +2024-08-25 06:47:47,562 INFO [train.py:1114] (3/4) Epoch 4, batch 2100, loss[loss=0.3393, simple_loss=0.3585, pruned_loss=0.1176, ctc_loss=0.2119, over 19752.00 frames. ], tot_loss[loss=0.3147, simple_loss=0.3406, pruned_loss=0.1051, ctc_loss=0.1967, over 3859963.60 frames. ], batch size: 54, lr: 3.08e-02, grad_scale: 32.0 +2024-08-25 06:47:51,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=51029.333333333336, ans=0.125 +2024-08-25 06:48:05,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=51029.333333333336, ans=0.125 +2024-08-25 06:48:39,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=51082.666666666664, ans=0.2 +2024-08-25 06:49:03,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51189.333333333336, ans=0.1 +2024-08-25 06:49:06,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=51189.333333333336, ans=0.0 +2024-08-25 06:49:08,186 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.60 vs. limit=15.0 +2024-08-25 06:49:17,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=51242.666666666664, ans=0.0 +2024-08-25 06:49:45,533 INFO [train.py:1114] (3/4) Epoch 4, batch 2150, loss[loss=0.3061, simple_loss=0.3328, pruned_loss=0.1018, ctc_loss=0.1898, over 19850.00 frames. ], tot_loss[loss=0.3134, simple_loss=0.3394, pruned_loss=0.1045, ctc_loss=0.1958, over 3870785.76 frames. ], batch size: 52, lr: 3.07e-02, grad_scale: 32.0 +2024-08-25 06:49:54,448 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.724e+02 2.035e+02 2.305e+02 2.639e+02 4.596e+02, threshold=4.610e+02, percent-clipped=0.0 +2024-08-25 06:50:41,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=51402.666666666664, ans=0.1 +2024-08-25 06:51:12,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=51509.333333333336, ans=0.125 +2024-08-25 06:51:14,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=51562.666666666664, ans=0.2 +2024-08-25 06:51:15,393 INFO [train.py:1114] (3/4) Epoch 4, batch 2200, loss[loss=0.3651, simple_loss=0.3702, pruned_loss=0.1319, ctc_loss=0.2403, over 19595.00 frames. ], tot_loss[loss=0.3129, simple_loss=0.3393, pruned_loss=0.1042, ctc_loss=0.1953, over 3868547.89 frames. ], batch size: 57, lr: 3.07e-02, grad_scale: 32.0 +2024-08-25 06:51:26,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=51616.0, ans=0.125 +2024-08-25 06:51:34,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=51616.0, ans=0.0 +2024-08-25 06:51:45,066 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.93 vs. limit=22.5 +2024-08-25 06:51:53,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=51669.333333333336, ans=0.125 +2024-08-25 06:52:05,826 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.55 vs. limit=15.0 +2024-08-25 06:52:18,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=51776.0, ans=0.0 +2024-08-25 06:52:24,910 INFO [train.py:1114] (3/4) Epoch 4, batch 2250, loss[loss=0.3042, simple_loss=0.3396, pruned_loss=0.09705, ctc_loss=0.1869, over 19616.00 frames. ], tot_loss[loss=0.3125, simple_loss=0.3392, pruned_loss=0.1039, ctc_loss=0.195, over 3868291.30 frames. ], batch size: 55, lr: 3.06e-02, grad_scale: 32.0 +2024-08-25 06:52:31,994 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.699e+02 2.164e+02 2.622e+02 3.263e+02 6.940e+02, threshold=5.245e+02, percent-clipped=2.0 +2024-08-25 06:52:32,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=51829.333333333336, ans=0.025 +2024-08-25 06:52:50,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=51936.0, ans=0.125 +2024-08-25 06:52:53,388 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.08 vs. limit=15.0 +2024-08-25 06:52:57,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=51936.0, ans=0.2 +2024-08-25 06:52:58,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51989.333333333336, ans=0.1 +2024-08-25 06:52:59,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=51989.333333333336, ans=0.125 +2024-08-25 06:53:00,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=51989.333333333336, ans=0.125 +2024-08-25 06:53:05,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=51989.333333333336, ans=0.125 +2024-08-25 06:53:09,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=51989.333333333336, ans=0.0 +2024-08-25 06:53:10,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=52042.666666666664, ans=0.125 +2024-08-25 06:53:15,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=52042.666666666664, ans=0.125 +2024-08-25 06:53:26,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=52042.666666666664, ans=0.1 +2024-08-25 06:53:27,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=52042.666666666664, ans=0.0 +2024-08-25 06:53:30,931 INFO [train.py:1114] (3/4) Epoch 4, batch 2300, loss[loss=0.2712, simple_loss=0.3088, pruned_loss=0.08436, ctc_loss=0.1619, over 19494.00 frames. ], tot_loss[loss=0.3114, simple_loss=0.3381, pruned_loss=0.1035, ctc_loss=0.1944, over 3861732.83 frames. ], batch size: 49, lr: 3.06e-02, grad_scale: 32.0 +2024-08-25 06:53:41,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=52096.0, ans=0.0 +2024-08-25 06:53:44,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=52096.0, ans=0.05 +2024-08-25 06:54:15,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=52256.0, ans=0.125 +2024-08-25 06:54:23,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=52256.0, ans=0.0 +2024-08-25 06:54:33,424 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.08 vs. limit=6.0 +2024-08-25 06:54:53,355 INFO [train.py:1114] (3/4) Epoch 4, batch 2350, loss[loss=0.3277, simple_loss=0.3614, pruned_loss=0.1087, ctc_loss=0.1915, over 19687.00 frames. ], tot_loss[loss=0.311, simple_loss=0.3377, pruned_loss=0.1033, ctc_loss=0.1939, over 3864397.32 frames. ], batch size: 63, lr: 3.05e-02, grad_scale: 32.0 +2024-08-25 06:54:55,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=52362.666666666664, ans=15.0 +2024-08-25 06:54:58,714 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.580e+02 2.121e+02 2.497e+02 3.048e+02 4.745e+02, threshold=4.995e+02, percent-clipped=0.0 +2024-08-25 06:55:38,782 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.93 vs. limit=10.0 +2024-08-25 06:56:03,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=52469.333333333336, ans=0.0 +2024-08-25 06:59:35,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=52522.666666666664, ans=0.0 +2024-08-25 07:00:47,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=52576.0, ans=0.125 +2024-08-25 07:00:48,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=52576.0, ans=15.0 +2024-08-25 07:07:21,823 INFO [train.py:1114] (3/4) Epoch 4, batch 2400, loss[loss=0.2933, simple_loss=0.3336, pruned_loss=0.0911, ctc_loss=0.177, over 19311.00 frames. ], tot_loss[loss=0.3141, simple_loss=0.3404, pruned_loss=0.1046, ctc_loss=0.1962, over 3858609.02 frames. ], batch size: 71, lr: 3.05e-02, grad_scale: 32.0 +2024-08-25 07:09:08,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=52629.333333333336, ans=0.125 +2024-08-25 07:19:37,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52736.0, ans=0.1 +2024-08-25 07:21:10,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=52736.0, ans=0.0 +2024-08-25 07:21:10,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=52736.0, ans=0.125 +2024-08-25 07:21:52,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=52736.0, ans=0.0 +2024-08-25 07:33:05,323 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 07:34:58,797 INFO [train.py:1114] (3/4) Epoch 4, batch 2450, loss[loss=0.3799, simple_loss=0.3676, pruned_loss=0.1421, ctc_loss=0.2703, over 13352.00 frames. ], tot_loss[loss=0.3226, simple_loss=0.3454, pruned_loss=0.109, ctc_loss=0.2046, over 3729049.40 frames. ], batch size: 140, lr: 3.05e-02, grad_scale: 16.0 +2024-08-25 07:36:27,118 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.701e+02 2.096e+02 2.355e+02 2.735e+02 5.246e+02, threshold=4.710e+02, percent-clipped=1.0 +2024-08-25 07:37:21,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=52949.333333333336, ans=0.2 +2024-08-25 07:40:48,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=53002.666666666664, ans=0.125 +2024-08-25 07:43:06,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=53002.666666666664, ans=0.125 +2024-08-25 07:46:30,891 INFO [train.py:1114] (3/4) Epoch 5, batch 0, loss[loss=0.3131, simple_loss=0.3271, pruned_loss=0.1096, ctc_loss=0.1994, over 19433.00 frames. ], tot_loss[loss=0.3131, simple_loss=0.3271, pruned_loss=0.1096, ctc_loss=0.1994, over 19433.00 frames. ], batch size: 48, lr: 2.83e-02, grad_scale: 32.0 +2024-08-25 07:46:30,891 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-25 07:49:02,123 INFO [train.py:1146] (3/4) Epoch 5, validation: loss=0.2543, simple_loss=0.3259, pruned_loss=0.06691, ctc_loss=0.1221, over 944034.00 frames. +2024-08-25 07:49:02,124 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 14072MB +2024-08-25 07:54:37,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=53157.333333333336, ans=0.0 +2024-08-25 07:57:03,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=53210.666666666664, ans=0.2 +2024-08-25 07:58:10,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=53210.666666666664, ans=0.125 +2024-08-25 07:59:10,141 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.00 vs. limit=12.0 +2024-08-25 08:00:17,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=53317.333333333336, ans=0.0 +2024-08-25 08:01:56,939 INFO [train.py:1114] (3/4) Epoch 5, batch 50, loss[loss=0.2714, simple_loss=0.3019, pruned_loss=0.08808, ctc_loss=0.1616, over 19704.00 frames. ], tot_loss[loss=0.3167, simple_loss=0.3418, pruned_loss=0.106, ctc_loss=0.1996, over 843823.62 frames. ], batch size: 47, lr: 2.83e-02, grad_scale: 32.0 +2024-08-25 08:03:51,548 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.650e+02 1.984e+02 2.202e+02 2.522e+02 4.045e+02, threshold=4.404e+02, percent-clipped=0.0 +2024-08-25 08:04:58,847 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.78 vs. limit=15.0 +2024-08-25 08:05:13,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=53477.333333333336, ans=0.2 +2024-08-25 08:05:40,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=53477.333333333336, ans=0.025 +2024-08-25 08:07:22,869 INFO [train.py:1114] (3/4) Epoch 5, batch 100, loss[loss=0.281, simple_loss=0.3208, pruned_loss=0.08716, ctc_loss=0.1672, over 19698.00 frames. ], tot_loss[loss=0.3146, simple_loss=0.3418, pruned_loss=0.1044, ctc_loss=0.1964, over 1498517.54 frames. ], batch size: 51, lr: 2.82e-02, grad_scale: 32.0 +2024-08-25 08:07:24,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=53637.333333333336, ans=0.09899494936611666 +2024-08-25 08:08:14,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=53690.666666666664, ans=0.125 +2024-08-25 08:08:27,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=53690.666666666664, ans=0.1 +2024-08-25 08:08:44,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=53744.0, ans=0.0 +2024-08-25 08:08:46,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=53744.0, ans=0.125 +2024-08-25 08:09:42,245 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.06 vs. limit=15.0 +2024-08-25 08:09:54,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=53850.666666666664, ans=0.125 +2024-08-25 08:10:03,748 INFO [train.py:1114] (3/4) Epoch 5, batch 150, loss[loss=0.2503, simple_loss=0.2959, pruned_loss=0.07488, ctc_loss=0.1374, over 19709.00 frames. ], tot_loss[loss=0.31, simple_loss=0.3383, pruned_loss=0.1024, ctc_loss=0.1926, over 2027402.71 frames. ], batch size: 47, lr: 2.82e-02, grad_scale: 32.0 +2024-08-25 08:10:09,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=53904.0, ans=0.0 +2024-08-25 08:10:31,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=53957.333333333336, ans=0.0 +2024-08-25 08:10:40,320 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.601e+02 2.115e+02 2.389e+02 2.764e+02 4.531e+02, threshold=4.777e+02, percent-clipped=1.0 +2024-08-25 08:10:42,970 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.93 vs. limit=6.0 +2024-08-25 08:11:09,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=54010.666666666664, ans=0.05 +2024-08-25 08:11:34,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=54064.0, ans=0.2 +2024-08-25 08:12:01,138 INFO [train.py:1114] (3/4) Epoch 5, batch 200, loss[loss=0.3733, simple_loss=0.3809, pruned_loss=0.1349, ctc_loss=0.2395, over 18289.00 frames. ], tot_loss[loss=0.3079, simple_loss=0.3367, pruned_loss=0.1015, ctc_loss=0.1904, over 2435589.46 frames. ], batch size: 85, lr: 2.81e-02, grad_scale: 32.0 +2024-08-25 08:15:43,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=54384.0, ans=0.0 +2024-08-25 08:15:56,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=54384.0, ans=0.015 +2024-08-25 08:15:58,937 INFO [train.py:1114] (3/4) Epoch 5, batch 250, loss[loss=0.3509, simple_loss=0.3635, pruned_loss=0.1248, ctc_loss=0.2218, over 19386.00 frames. ], tot_loss[loss=0.3051, simple_loss=0.335, pruned_loss=0.1001, ctc_loss=0.188, over 2755462.74 frames. ], batch size: 67, lr: 2.81e-02, grad_scale: 32.0 +2024-08-25 08:16:47,937 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.557e+02 1.969e+02 2.164e+02 2.373e+02 3.326e+02, threshold=4.328e+02, percent-clipped=0.0 +2024-08-25 08:16:49,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=54490.666666666664, ans=0.125 +2024-08-25 08:16:59,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=54544.0, ans=0.0 +2024-08-25 08:17:02,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=54544.0, ans=0.0 +2024-08-25 08:17:09,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=54597.333333333336, ans=0.0 +2024-08-25 08:17:26,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=54650.666666666664, ans=0.125 +2024-08-25 08:17:32,568 INFO [train.py:1114] (3/4) Epoch 5, batch 300, loss[loss=0.3492, simple_loss=0.3663, pruned_loss=0.1215, ctc_loss=0.2229, over 19483.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.3345, pruned_loss=0.09987, ctc_loss=0.1873, over 2998701.36 frames. ], batch size: 61, lr: 2.81e-02, grad_scale: 32.0 +2024-08-25 08:17:58,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54810.666666666664, ans=0.1 +2024-08-25 08:18:38,534 INFO [train.py:1114] (3/4) Epoch 5, batch 350, loss[loss=0.3095, simple_loss=0.324, pruned_loss=0.1078, ctc_loss=0.1987, over 19765.00 frames. ], tot_loss[loss=0.3054, simple_loss=0.335, pruned_loss=0.1003, ctc_loss=0.1882, over 3188431.96 frames. ], batch size: 48, lr: 2.80e-02, grad_scale: 16.0 +2024-08-25 08:18:38,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=54970.666666666664, ans=0.125 +2024-08-25 08:18:53,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=55024.0, ans=0.125 +2024-08-25 08:19:00,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=55024.0, ans=0.0 +2024-08-25 08:19:08,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=55024.0, ans=0.125 +2024-08-25 08:19:10,175 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=15.0 +2024-08-25 08:19:10,799 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 1.967e+02 2.265e+02 2.794e+02 4.039e+02, threshold=4.529e+02, percent-clipped=0.0 +2024-08-25 08:19:51,988 INFO [train.py:1114] (3/4) Epoch 5, batch 400, loss[loss=0.3323, simple_loss=0.3573, pruned_loss=0.1112, ctc_loss=0.2122, over 19493.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.3345, pruned_loss=0.09981, ctc_loss=0.1878, over 3340848.49 frames. ], batch size: 54, lr: 2.80e-02, grad_scale: 32.0 +2024-08-25 08:19:52,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=55237.333333333336, ans=0.125 +2024-08-25 08:20:14,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=55290.666666666664, ans=0.125 +2024-08-25 08:20:28,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=55344.0, ans=0.125 +2024-08-25 08:21:10,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=55450.666666666664, ans=0.125 +2024-08-25 08:21:27,044 INFO [train.py:1114] (3/4) Epoch 5, batch 450, loss[loss=0.3558, simple_loss=0.3694, pruned_loss=0.1245, ctc_loss=0.2335, over 19616.00 frames. ], tot_loss[loss=0.3053, simple_loss=0.3348, pruned_loss=0.1003, ctc_loss=0.1885, over 3449345.08 frames. ], batch size: 55, lr: 2.79e-02, grad_scale: 32.0 +2024-08-25 08:21:47,864 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.640e+02 2.008e+02 2.249e+02 2.774e+02 4.428e+02, threshold=4.498e+02, percent-clipped=0.0 +2024-08-25 08:21:55,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=55610.666666666664, ans=0.0 +2024-08-25 08:22:01,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=55664.0, ans=0.125 +2024-08-25 08:22:01,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=55664.0, ans=0.0 +2024-08-25 08:22:02,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=55664.0, ans=0.2 +2024-08-25 08:22:04,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=55664.0, ans=0.09899494936611666 +2024-08-25 08:22:21,899 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.19 vs. limit=12.0 +2024-08-25 08:22:32,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=55717.333333333336, ans=0.04949747468305833 +2024-08-25 08:22:58,217 INFO [train.py:1114] (3/4) Epoch 5, batch 500, loss[loss=0.3398, simple_loss=0.3697, pruned_loss=0.1133, ctc_loss=0.2082, over 19658.00 frames. ], tot_loss[loss=0.3028, simple_loss=0.3333, pruned_loss=0.099, ctc_loss=0.1859, over 3544776.48 frames. ], batch size: 63, lr: 2.79e-02, grad_scale: 32.0 +2024-08-25 08:23:14,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=55770.666666666664, ans=0.0 +2024-08-25 08:23:14,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=55770.666666666664, ans=0.0 +2024-08-25 08:23:22,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=55824.0, ans=0.0 +2024-08-25 08:23:51,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=55877.333333333336, ans=0.2 +2024-08-25 08:23:52,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=55877.333333333336, ans=0.035 +2024-08-25 08:24:00,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=55930.666666666664, ans=0.2 +2024-08-25 08:24:11,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=55984.0, ans=0.95 +2024-08-25 08:24:21,967 INFO [train.py:1114] (3/4) Epoch 5, batch 550, loss[loss=0.289, simple_loss=0.3317, pruned_loss=0.08991, ctc_loss=0.1661, over 19241.00 frames. ], tot_loss[loss=0.3028, simple_loss=0.3333, pruned_loss=0.09896, ctc_loss=0.1862, over 3607987.84 frames. ], batch size: 71, lr: 2.78e-02, grad_scale: 32.0 +2024-08-25 08:24:47,069 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.533e+02 1.991e+02 2.247e+02 2.867e+02 6.260e+02, threshold=4.494e+02, percent-clipped=1.0 +2024-08-25 08:24:48,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=56144.0, ans=0.125 +2024-08-25 08:25:26,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=56250.666666666664, ans=0.125 +2024-08-25 08:25:37,713 INFO [train.py:1114] (3/4) Epoch 5, batch 600, loss[loss=0.3262, simple_loss=0.3538, pruned_loss=0.109, ctc_loss=0.2014, over 19442.00 frames. ], tot_loss[loss=0.3014, simple_loss=0.3328, pruned_loss=0.09811, ctc_loss=0.1845, over 3666424.63 frames. ], batch size: 67, lr: 2.78e-02, grad_scale: 32.0 +2024-08-25 08:25:52,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=56304.0, ans=0.125 +2024-08-25 08:26:03,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=56357.333333333336, ans=0.125 +2024-08-25 08:26:06,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=56410.666666666664, ans=0.2 +2024-08-25 08:26:21,379 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.87 vs. limit=22.5 +2024-08-25 08:26:38,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=56517.333333333336, ans=0.125 +2024-08-25 08:26:47,451 INFO [train.py:1114] (3/4) Epoch 5, batch 650, loss[loss=0.2607, simple_loss=0.3107, pruned_loss=0.0764, ctc_loss=0.1446, over 19771.00 frames. ], tot_loss[loss=0.2997, simple_loss=0.3315, pruned_loss=0.09727, ctc_loss=0.1835, over 3716413.52 frames. ], batch size: 54, lr: 2.77e-02, grad_scale: 32.0 +2024-08-25 08:27:13,346 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.617e+02 1.957e+02 2.352e+02 2.685e+02 4.359e+02, threshold=4.704e+02, percent-clipped=0.0 +2024-08-25 08:27:15,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=56677.333333333336, ans=0.125 +2024-08-25 08:27:55,532 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.30 vs. limit=15.0 +2024-08-25 08:28:10,106 INFO [train.py:1114] (3/4) Epoch 5, batch 700, loss[loss=0.2729, simple_loss=0.3119, pruned_loss=0.08381, ctc_loss=0.1658, over 19719.00 frames. ], tot_loss[loss=0.3002, simple_loss=0.3322, pruned_loss=0.09736, ctc_loss=0.1836, over 3748716.32 frames. ], batch size: 51, lr: 2.77e-02, grad_scale: 32.0 +2024-08-25 08:28:26,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=56890.666666666664, ans=0.0 +2024-08-25 08:28:42,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=56890.666666666664, ans=0.0 +2024-08-25 08:29:20,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=56997.333333333336, ans=0.0 +2024-08-25 08:29:29,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=57050.666666666664, ans=0.125 +2024-08-25 08:29:41,359 INFO [train.py:1114] (3/4) Epoch 5, batch 750, loss[loss=0.3003, simple_loss=0.3374, pruned_loss=0.09591, ctc_loss=0.1787, over 19497.00 frames. ], tot_loss[loss=0.2989, simple_loss=0.3313, pruned_loss=0.09676, ctc_loss=0.1824, over 3774382.21 frames. ], batch size: 54, lr: 2.77e-02, grad_scale: 32.0 +2024-08-25 08:30:02,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57104.0, ans=0.1 +2024-08-25 08:30:08,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57104.0, ans=0.1 +2024-08-25 08:30:26,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=57157.333333333336, ans=0.125 +2024-08-25 08:30:40,363 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.677e+02 2.099e+02 2.472e+02 3.181e+02 5.803e+02, threshold=4.945e+02, percent-clipped=2.0 +2024-08-25 08:31:04,165 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.71 vs. limit=12.0 +2024-08-25 08:31:16,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=57264.0, ans=0.125 +2024-08-25 08:31:25,959 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.53 vs. limit=12.0 +2024-08-25 08:31:31,921 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.88 vs. limit=15.0 +2024-08-25 08:31:42,817 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.78 vs. limit=15.0 +2024-08-25 08:31:58,395 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.31 vs. limit=15.0 +2024-08-25 08:31:59,201 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.84 vs. limit=15.0 +2024-08-25 08:32:05,741 INFO [train.py:1114] (3/4) Epoch 5, batch 800, loss[loss=0.2659, simple_loss=0.3082, pruned_loss=0.08133, ctc_loss=0.1523, over 19418.00 frames. ], tot_loss[loss=0.2982, simple_loss=0.331, pruned_loss=0.09644, ctc_loss=0.1815, over 3795967.69 frames. ], batch size: 48, lr: 2.76e-02, grad_scale: 32.0 +2024-08-25 08:32:36,831 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.59 vs. limit=15.0 +2024-08-25 08:32:55,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=57477.333333333336, ans=0.0 +2024-08-25 08:33:24,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=57584.0, ans=0.125 +2024-08-25 08:33:34,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=57584.0, ans=0.5 +2024-08-25 08:33:37,755 INFO [train.py:1114] (3/4) Epoch 5, batch 850, loss[loss=0.2944, simple_loss=0.333, pruned_loss=0.09327, ctc_loss=0.1733, over 19656.00 frames. ], tot_loss[loss=0.2976, simple_loss=0.3303, pruned_loss=0.0962, ctc_loss=0.181, over 3814629.94 frames. ], batch size: 59, lr: 2.76e-02, grad_scale: 32.0 +2024-08-25 08:34:26,552 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.581e+02 1.963e+02 2.197e+02 2.544e+02 4.330e+02, threshold=4.395e+02, percent-clipped=0.0 +2024-08-25 08:34:28,027 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 08:34:33,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=57744.0, ans=0.125 +2024-08-25 08:34:36,518 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.05 vs. limit=10.0 +2024-08-25 08:34:48,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=57797.333333333336, ans=0.125 +2024-08-25 08:34:50,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=57797.333333333336, ans=0.0 +2024-08-25 08:34:56,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57850.666666666664, ans=0.1 +2024-08-25 08:35:13,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=57850.666666666664, ans=0.125 +2024-08-25 08:35:17,361 INFO [train.py:1114] (3/4) Epoch 5, batch 900, loss[loss=0.2724, simple_loss=0.3041, pruned_loss=0.08732, ctc_loss=0.1651, over 19418.00 frames. ], tot_loss[loss=0.2989, simple_loss=0.3309, pruned_loss=0.097, ctc_loss=0.1823, over 3818402.36 frames. ], batch size: 48, lr: 2.75e-02, grad_scale: 32.0 +2024-08-25 08:35:28,791 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 08:35:39,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=57957.333333333336, ans=0.125 +2024-08-25 08:35:56,009 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.65 vs. limit=15.0 +2024-08-25 08:36:00,209 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.41 vs. limit=22.5 +2024-08-25 08:36:23,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=58117.333333333336, ans=0.125 +2024-08-25 08:36:41,308 INFO [train.py:1114] (3/4) Epoch 5, batch 950, loss[loss=0.2884, simple_loss=0.3172, pruned_loss=0.09439, ctc_loss=0.1769, over 19513.00 frames. ], tot_loss[loss=0.2993, simple_loss=0.3311, pruned_loss=0.09717, ctc_loss=0.1828, over 3819311.00 frames. ], batch size: 49, lr: 2.75e-02, grad_scale: 32.0 +2024-08-25 08:37:00,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=58224.0, ans=0.025 +2024-08-25 08:37:02,455 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.615e+02 2.021e+02 2.236e+02 2.607e+02 6.234e+02, threshold=4.471e+02, percent-clipped=1.0 +2024-08-25 08:37:02,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=58224.0, ans=0.0 +2024-08-25 08:37:14,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=58277.333333333336, ans=0.0 +2024-08-25 08:37:19,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=58277.333333333336, ans=0.0 +2024-08-25 08:37:22,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=58277.333333333336, ans=0.2 +2024-08-25 08:37:29,309 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.96 vs. limit=15.0 +2024-08-25 08:37:37,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=58384.0, ans=0.125 +2024-08-25 08:37:49,069 INFO [train.py:1114] (3/4) Epoch 5, batch 1000, loss[loss=0.2972, simple_loss=0.3264, pruned_loss=0.09688, ctc_loss=0.1859, over 19854.00 frames. ], tot_loss[loss=0.3007, simple_loss=0.332, pruned_loss=0.09792, ctc_loss=0.184, over 3815031.46 frames. ], batch size: 52, lr: 2.74e-02, grad_scale: 32.0 +2024-08-25 08:39:12,428 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.09 vs. limit=15.0 +2024-08-25 08:39:14,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=58650.666666666664, ans=0.125 +2024-08-25 08:39:18,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58704.0, ans=0.1 +2024-08-25 08:39:20,335 INFO [train.py:1114] (3/4) Epoch 5, batch 1050, loss[loss=0.3122, simple_loss=0.3428, pruned_loss=0.1019, ctc_loss=0.1944, over 19846.00 frames. ], tot_loss[loss=0.3002, simple_loss=0.3315, pruned_loss=0.09768, ctc_loss=0.1838, over 3822360.93 frames. ], batch size: 57, lr: 2.74e-02, grad_scale: 32.0 +2024-08-25 08:39:41,251 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.532e+02 1.929e+02 2.228e+02 2.594e+02 4.447e+02, threshold=4.456e+02, percent-clipped=0.0 +2024-08-25 08:40:02,335 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.31 vs. limit=15.0 +2024-08-25 08:40:20,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=58864.0, ans=0.125 +2024-08-25 08:40:32,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=58917.333333333336, ans=0.025 +2024-08-25 08:40:42,208 INFO [train.py:1114] (3/4) Epoch 5, batch 1100, loss[loss=0.3192, simple_loss=0.3393, pruned_loss=0.1088, ctc_loss=0.2039, over 19583.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3305, pruned_loss=0.09682, ctc_loss=0.1826, over 3828880.11 frames. ], batch size: 52, lr: 2.74e-02, grad_scale: 32.0 +2024-08-25 08:41:00,927 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 08:41:35,709 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.90 vs. limit=22.5 +2024-08-25 08:42:06,695 INFO [train.py:1114] (3/4) Epoch 5, batch 1150, loss[loss=0.2831, simple_loss=0.3196, pruned_loss=0.09051, ctc_loss=0.1642, over 19581.00 frames. ], tot_loss[loss=0.2994, simple_loss=0.3308, pruned_loss=0.09732, ctc_loss=0.1833, over 3827292.51 frames. ], batch size: 52, lr: 2.73e-02, grad_scale: 32.0 +2024-08-25 08:42:17,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=59237.333333333336, ans=0.0 +2024-08-25 08:42:24,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=59290.666666666664, ans=0.09899494936611666 +2024-08-25 08:42:38,153 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.520e+02 2.022e+02 2.244e+02 2.636e+02 4.087e+02, threshold=4.489e+02, percent-clipped=0.0 +2024-08-25 08:42:44,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=59344.0, ans=0.125 +2024-08-25 08:43:33,325 INFO [train.py:1114] (3/4) Epoch 5, batch 1200, loss[loss=0.3044, simple_loss=0.3437, pruned_loss=0.09605, ctc_loss=0.1822, over 19834.00 frames. ], tot_loss[loss=0.3, simple_loss=0.3317, pruned_loss=0.09749, ctc_loss=0.1835, over 3823627.46 frames. ], batch size: 57, lr: 2.73e-02, grad_scale: 32.0 +2024-08-25 08:44:02,918 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.17 vs. limit=15.0 +2024-08-25 08:44:12,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=59610.666666666664, ans=0.125 +2024-08-25 08:44:13,618 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.66 vs. limit=22.5 +2024-08-25 08:44:15,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=59610.666666666664, ans=0.0 +2024-08-25 08:44:16,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=59610.666666666664, ans=0.09899494936611666 +2024-08-25 08:44:25,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=59664.0, ans=0.125 +2024-08-25 08:44:41,802 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.83 vs. limit=15.0 +2024-08-25 08:44:53,028 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=7.68 vs. limit=12.0 +2024-08-25 08:44:55,327 INFO [train.py:1114] (3/4) Epoch 5, batch 1250, loss[loss=0.3264, simple_loss=0.3613, pruned_loss=0.1062, ctc_loss=0.1978, over 19515.00 frames. ], tot_loss[loss=0.299, simple_loss=0.3317, pruned_loss=0.09678, ctc_loss=0.182, over 3842236.16 frames. ], batch size: 61, lr: 2.72e-02, grad_scale: 32.0 +2024-08-25 08:44:59,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=59770.666666666664, ans=0.0 +2024-08-25 08:45:21,216 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.499e+02 1.906e+02 2.098e+02 2.362e+02 4.005e+02, threshold=4.196e+02, percent-clipped=0.0 +2024-08-25 08:45:25,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=59877.333333333336, ans=0.0 +2024-08-25 08:45:29,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=59877.333333333336, ans=0.2 +2024-08-25 08:45:39,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=59930.666666666664, ans=0.125 +2024-08-25 08:46:03,578 INFO [train.py:1114] (3/4) Epoch 5, batch 1300, loss[loss=0.3446, simple_loss=0.3554, pruned_loss=0.1227, ctc_loss=0.2209, over 18794.00 frames. ], tot_loss[loss=0.2975, simple_loss=0.3305, pruned_loss=0.09607, ctc_loss=0.1807, over 3846118.66 frames. ], batch size: 76, lr: 2.72e-02, grad_scale: 32.0 +2024-08-25 08:46:19,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=60037.333333333336, ans=0.125 +2024-08-25 08:47:14,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=60250.666666666664, ans=0.125 +2024-08-25 08:47:27,093 INFO [train.py:1114] (3/4) Epoch 5, batch 1350, loss[loss=0.2997, simple_loss=0.3294, pruned_loss=0.09788, ctc_loss=0.1855, over 19778.00 frames. ], tot_loss[loss=0.2958, simple_loss=0.3292, pruned_loss=0.09533, ctc_loss=0.1794, over 3857716.81 frames. ], batch size: 54, lr: 2.71e-02, grad_scale: 32.0 +2024-08-25 08:47:32,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=60304.0, ans=0.2 +2024-08-25 08:48:06,336 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.575e+02 1.950e+02 2.204e+02 2.621e+02 4.331e+02, threshold=4.409e+02, percent-clipped=1.0 +2024-08-25 08:48:12,250 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.72 vs. limit=15.0 +2024-08-25 08:48:13,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=60410.666666666664, ans=0.125 +2024-08-25 08:48:22,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=60410.666666666664, ans=0.125 +2024-08-25 08:49:12,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=60517.333333333336, ans=0.125 +2024-08-25 08:49:13,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=60570.666666666664, ans=0.125 +2024-08-25 08:49:14,314 INFO [train.py:1114] (3/4) Epoch 5, batch 1400, loss[loss=0.2452, simple_loss=0.2903, pruned_loss=0.07154, ctc_loss=0.1426, over 19634.00 frames. ], tot_loss[loss=0.2949, simple_loss=0.3288, pruned_loss=0.09486, ctc_loss=0.1784, over 3864927.74 frames. ], batch size: 46, lr: 2.71e-02, grad_scale: 32.0 +2024-08-25 08:49:22,876 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.83 vs. limit=22.5 +2024-08-25 08:49:25,533 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.24 vs. limit=6.0 +2024-08-25 08:49:46,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=60677.333333333336, ans=0.025 +2024-08-25 08:49:47,550 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.73 vs. limit=15.0 +2024-08-25 08:58:06,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=60784.0, ans=0.0 +2024-08-25 08:58:37,060 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.82 vs. limit=15.0 +2024-08-25 09:00:00,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=60784.0, ans=0.0 +2024-08-25 09:01:57,446 INFO [train.py:1114] (3/4) Epoch 5, batch 1450, loss[loss=0.2885, simple_loss=0.3445, pruned_loss=0.08403, ctc_loss=0.1611, over 19664.00 frames. ], tot_loss[loss=0.2962, simple_loss=0.3297, pruned_loss=0.09539, ctc_loss=0.1795, over 3862918.22 frames. ], batch size: 63, lr: 2.71e-02, grad_scale: 32.0 +2024-08-25 09:11:45,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=60890.666666666664, ans=0.125 +2024-08-25 09:14:29,261 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.613e+02 1.942e+02 2.164e+02 2.480e+02 4.633e+02, threshold=4.329e+02, percent-clipped=1.0 +2024-08-25 09:18:15,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=60944.0, ans=0.125 +2024-08-25 09:19:20,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=60944.0, ans=0.125 +2024-08-25 09:22:22,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=60997.333333333336, ans=0.125 +2024-08-25 09:27:50,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=60997.333333333336, ans=0.125 +2024-08-25 09:27:51,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=60997.333333333336, ans=0.07 +2024-08-25 09:27:54,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=60997.333333333336, ans=0.09899494936611666 +2024-08-25 09:35:37,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=61050.666666666664, ans=0.0 +2024-08-25 09:36:13,475 INFO [train.py:1114] (3/4) Epoch 5, batch 1500, loss[loss=0.3197, simple_loss=0.3507, pruned_loss=0.1061, ctc_loss=0.191, over 19600.00 frames. ], tot_loss[loss=0.297, simple_loss=0.3303, pruned_loss=0.09584, ctc_loss=0.1802, over 3862222.53 frames. ], batch size: 57, lr: 2.70e-02, grad_scale: 32.0 +2024-08-25 09:42:54,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=61104.0, ans=0.0 +2024-08-25 09:50:12,643 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.18 vs. limit=15.0 +2024-08-25 10:02:13,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=61317.333333333336, ans=0.125 +2024-08-25 10:03:52,725 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=3.858e-02 +2024-08-25 10:05:15,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=61317.333333333336, ans=0.2 +2024-08-25 10:06:52,313 INFO [train.py:1114] (3/4) Epoch 5, batch 1550, loss[loss=0.3324, simple_loss=0.3532, pruned_loss=0.114, ctc_loss=0.2095, over 19602.00 frames. ], tot_loss[loss=0.2975, simple_loss=0.3304, pruned_loss=0.09618, ctc_loss=0.1809, over 3846393.16 frames. ], batch size: 60, lr: 2.70e-02, grad_scale: 16.0 +2024-08-25 10:10:28,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=61370.666666666664, ans=0.0 +2024-08-25 10:12:35,281 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.49 vs. limit=15.0 +2024-08-25 10:14:17,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=61424.0, ans=0.125 +2024-08-25 10:14:47,423 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.601e+02 1.971e+02 2.260e+02 2.611e+02 5.554e+02, threshold=4.519e+02, percent-clipped=3.0 +2024-08-25 10:18:14,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=61530.666666666664, ans=0.125 +2024-08-25 10:18:59,013 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.15 vs. limit=22.5 +2024-08-25 10:23:07,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=61530.666666666664, ans=0.125 +2024-08-25 10:26:34,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61584.0, ans=0.1 +2024-08-25 10:28:13,716 INFO [train.py:1114] (3/4) Epoch 5, batch 1600, loss[loss=0.3018, simple_loss=0.3442, pruned_loss=0.09453, ctc_loss=0.1755, over 19830.00 frames. ], tot_loss[loss=0.2972, simple_loss=0.3299, pruned_loss=0.09605, ctc_loss=0.1808, over 3834995.66 frames. ], batch size: 57, lr: 2.69e-02, grad_scale: 32.0 +2024-08-25 10:35:03,622 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.61 vs. limit=15.0 +2024-08-25 10:35:04,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61744.0, ans=0.1 +2024-08-25 10:35:04,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=61744.0, ans=0.0 +2024-08-25 10:40:45,725 INFO [train.py:1114] (3/4) Epoch 5, batch 1650, loss[loss=0.2742, simple_loss=0.3288, pruned_loss=0.07958, ctc_loss=0.1509, over 19679.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.3299, pruned_loss=0.09619, ctc_loss=0.1809, over 3830975.06 frames. ], batch size: 59, lr: 2.69e-02, grad_scale: 32.0 +2024-08-25 10:41:37,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=61904.0, ans=0.125 +2024-08-25 10:42:22,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=61957.333333333336, ans=0.2 +2024-08-25 10:43:04,116 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.454e+02 1.985e+02 2.336e+02 2.616e+02 4.728e+02, threshold=4.672e+02, percent-clipped=1.0 +2024-08-25 10:43:52,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=62010.666666666664, ans=0.125 +2024-08-25 10:44:58,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=62064.0, ans=0.125 +2024-08-25 10:45:48,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=62117.333333333336, ans=0.07 +2024-08-25 10:46:43,739 INFO [train.py:1114] (3/4) Epoch 5, batch 1700, loss[loss=0.2579, simple_loss=0.2946, pruned_loss=0.07991, ctc_loss=0.1532, over 19650.00 frames. ], tot_loss[loss=0.2971, simple_loss=0.3298, pruned_loss=0.09611, ctc_loss=0.1805, over 3845488.88 frames. ], batch size: 46, lr: 2.69e-02, grad_scale: 32.0 +2024-08-25 10:48:01,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=62224.0, ans=0.0 +2024-08-25 10:49:18,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=62330.666666666664, ans=0.05 +2024-08-25 10:49:29,213 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.46 vs. limit=6.0 +2024-08-25 10:49:57,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=62330.666666666664, ans=0.0 +2024-08-25 10:50:15,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=62384.0, ans=0.125 +2024-08-25 10:50:16,711 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.88 vs. limit=15.0 +2024-08-25 10:50:54,973 INFO [train.py:1114] (3/4) Epoch 5, batch 1750, loss[loss=0.2663, simple_loss=0.2989, pruned_loss=0.08538, ctc_loss=0.1575, over 19660.00 frames. ], tot_loss[loss=0.2955, simple_loss=0.329, pruned_loss=0.09518, ctc_loss=0.1791, over 3850086.81 frames. ], batch size: 45, lr: 2.68e-02, grad_scale: 32.0 +2024-08-25 10:51:32,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=62437.333333333336, ans=0.025 +2024-08-25 10:53:52,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=62490.666666666664, ans=0.025 +2024-08-25 10:53:52,980 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.475e+02 2.010e+02 2.326e+02 2.972e+02 6.446e+02, threshold=4.653e+02, percent-clipped=3.0 +2024-08-25 10:53:53,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=62544.0, ans=0.125 +2024-08-25 10:57:03,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=62650.666666666664, ans=0.125 +2024-08-25 10:57:03,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=62650.666666666664, ans=0.025 +2024-08-25 10:57:11,529 INFO [train.py:1114] (3/4) Epoch 5, batch 1800, loss[loss=0.297, simple_loss=0.3359, pruned_loss=0.09437, ctc_loss=0.1737, over 19601.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.3301, pruned_loss=0.09579, ctc_loss=0.1801, over 3852345.49 frames. ], batch size: 55, lr: 2.68e-02, grad_scale: 32.0 +2024-08-25 10:57:58,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62757.333333333336, ans=0.1 +2024-08-25 10:58:12,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=62757.333333333336, ans=0.0 +2024-08-25 10:58:26,279 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.28 vs. limit=15.0 +2024-08-25 10:58:27,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=62864.0, ans=0.0 +2024-08-25 10:58:40,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=62864.0, ans=0.125 +2024-08-25 10:58:54,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=62917.333333333336, ans=0.125 +2024-08-25 10:59:02,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=62917.333333333336, ans=0.1 +2024-08-25 10:59:06,166 INFO [train.py:1114] (3/4) Epoch 5, batch 1850, loss[loss=0.3142, simple_loss=0.3518, pruned_loss=0.1003, ctc_loss=0.1897, over 19604.00 frames. ], tot_loss[loss=0.2963, simple_loss=0.3297, pruned_loss=0.09558, ctc_loss=0.1794, over 3855212.65 frames. ], batch size: 57, lr: 2.67e-02, grad_scale: 32.0 +2024-08-25 10:59:07,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=62970.666666666664, ans=0.125 +2024-08-25 10:59:12,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=62970.666666666664, ans=0.2 +2024-08-25 10:59:17,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=62970.666666666664, ans=0.1 +2024-08-25 10:59:20,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=63024.0, ans=0.125 +2024-08-25 10:59:32,442 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.560e+02 2.044e+02 2.314e+02 2.820e+02 4.474e+02, threshold=4.628e+02, percent-clipped=0.0 +2024-08-25 10:59:35,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=63077.333333333336, ans=0.0 +2024-08-25 10:59:49,341 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.17 vs. limit=22.5 +2024-08-25 11:00:09,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=63184.0, ans=0.125 +2024-08-25 11:00:20,616 INFO [train.py:1114] (3/4) Epoch 5, batch 1900, loss[loss=0.3041, simple_loss=0.3486, pruned_loss=0.09439, ctc_loss=0.1769, over 19617.00 frames. ], tot_loss[loss=0.2965, simple_loss=0.3301, pruned_loss=0.09557, ctc_loss=0.1795, over 3860616.91 frames. ], batch size: 59, lr: 2.67e-02, grad_scale: 32.0 +2024-08-25 11:00:28,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=63237.333333333336, ans=0.1 +2024-08-25 11:00:46,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=63290.666666666664, ans=0.07 +2024-08-25 11:00:55,216 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.09 vs. limit=15.0 +2024-08-25 11:00:55,305 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.47 vs. limit=12.0 +2024-08-25 11:01:13,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=63344.0, ans=0.1 +2024-08-25 11:01:32,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=63397.333333333336, ans=0.125 +2024-08-25 11:01:55,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=63397.333333333336, ans=0.125 +2024-08-25 11:01:57,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=63397.333333333336, ans=0.05 +2024-08-25 11:02:16,861 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=9.94 vs. limit=12.0 +2024-08-25 11:02:34,471 INFO [train.py:1114] (3/4) Epoch 5, batch 1950, loss[loss=0.2657, simple_loss=0.3086, pruned_loss=0.08044, ctc_loss=0.1548, over 19587.00 frames. ], tot_loss[loss=0.2966, simple_loss=0.3307, pruned_loss=0.09535, ctc_loss=0.1792, over 3869650.84 frames. ], batch size: 52, lr: 2.67e-02, grad_scale: 32.0 +2024-08-25 11:03:16,687 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.519e+02 1.932e+02 2.130e+02 2.461e+02 4.838e+02, threshold=4.259e+02, percent-clipped=1.0 +2024-08-25 11:03:49,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=63664.0, ans=0.95 +2024-08-25 11:03:49,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=63664.0, ans=0.125 +2024-08-25 11:04:37,797 INFO [train.py:1114] (3/4) Epoch 5, batch 2000, loss[loss=0.2495, simple_loss=0.2934, pruned_loss=0.0747, ctc_loss=0.1406, over 19657.00 frames. ], tot_loss[loss=0.2979, simple_loss=0.3315, pruned_loss=0.09609, ctc_loss=0.1805, over 3856342.36 frames. ], batch size: 45, lr: 2.66e-02, grad_scale: 32.0 +2024-08-25 11:04:47,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=63770.666666666664, ans=0.125 +2024-08-25 11:04:49,825 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.69 vs. limit=15.0 +2024-08-25 11:04:50,703 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=12.19 vs. limit=15.0 +2024-08-25 11:05:12,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.65 vs. limit=12.0 +2024-08-25 11:05:38,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=63930.666666666664, ans=0.125 +2024-08-25 11:05:44,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=63930.666666666664, ans=0.0 +2024-08-25 11:06:04,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=63984.0, ans=0.025 +2024-08-25 11:06:08,383 INFO [train.py:1114] (3/4) Epoch 5, batch 2050, loss[loss=0.2388, simple_loss=0.2814, pruned_loss=0.07183, ctc_loss=0.1314, over 19708.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.3304, pruned_loss=0.09561, ctc_loss=0.1797, over 3853329.76 frames. ], batch size: 47, lr: 2.66e-02, grad_scale: 32.0 +2024-08-25 11:06:29,162 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.597e+02 2.037e+02 2.272e+02 2.892e+02 6.343e+02, threshold=4.544e+02, percent-clipped=1.0 +2024-08-25 11:06:50,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=64144.0, ans=0.125 +2024-08-25 11:07:48,384 INFO [train.py:1114] (3/4) Epoch 5, batch 2100, loss[loss=0.2929, simple_loss=0.3252, pruned_loss=0.09436, ctc_loss=0.1794, over 19774.00 frames. ], tot_loss[loss=0.2953, simple_loss=0.3293, pruned_loss=0.09496, ctc_loss=0.1784, over 3860031.85 frames. ], batch size: 54, lr: 2.65e-02, grad_scale: 32.0 +2024-08-25 11:08:25,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=64410.666666666664, ans=0.0 +2024-08-25 11:08:33,630 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=7.93 vs. limit=12.0 +2024-08-25 11:08:37,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=64464.0, ans=0.125 +2024-08-25 11:08:49,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=64464.0, ans=0.2 +2024-08-25 11:09:21,094 INFO [train.py:1114] (3/4) Epoch 5, batch 2150, loss[loss=0.2858, simple_loss=0.3204, pruned_loss=0.09111, ctc_loss=0.1725, over 19858.00 frames. ], tot_loss[loss=0.2938, simple_loss=0.3279, pruned_loss=0.09439, ctc_loss=0.1772, over 3871060.79 frames. ], batch size: 52, lr: 2.65e-02, grad_scale: 32.0 +2024-08-25 11:09:29,130 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.23 vs. limit=22.5 +2024-08-25 11:09:34,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64624.0, ans=0.1 +2024-08-25 11:09:37,025 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.57 vs. limit=8.0 +2024-08-25 11:09:44,518 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.564e+02 2.041e+02 2.279e+02 2.689e+02 3.624e+02, threshold=4.557e+02, percent-clipped=0.0 +2024-08-25 11:09:44,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=64677.333333333336, ans=0.025 +2024-08-25 11:10:09,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=64730.666666666664, ans=0.0 +2024-08-25 11:10:13,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=64730.666666666664, ans=0.125 +2024-08-25 11:10:17,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=64730.666666666664, ans=0.07 +2024-08-25 11:10:25,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=64784.0, ans=0.125 +2024-08-25 11:10:34,041 INFO [train.py:1114] (3/4) Epoch 5, batch 2200, loss[loss=0.286, simple_loss=0.3341, pruned_loss=0.08678, ctc_loss=0.1609, over 19586.00 frames. ], tot_loss[loss=0.294, simple_loss=0.3283, pruned_loss=0.09442, ctc_loss=0.1772, over 3868287.77 frames. ], batch size: 57, lr: 2.65e-02, grad_scale: 32.0 +2024-08-25 11:10:40,539 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.79 vs. limit=12.0 +2024-08-25 11:10:43,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=64837.333333333336, ans=0.025 +2024-08-25 11:11:05,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=64944.0, ans=0.5 +2024-08-25 11:11:29,238 INFO [train.py:1114] (3/4) Epoch 5, batch 2250, loss[loss=0.3058, simple_loss=0.3386, pruned_loss=0.1001, ctc_loss=0.1819, over 19612.00 frames. ], tot_loss[loss=0.2928, simple_loss=0.3278, pruned_loss=0.0938, ctc_loss=0.1758, over 3867721.41 frames. ], batch size: 55, lr: 2.64e-02, grad_scale: 32.0 +2024-08-25 11:11:51,986 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.714e+02 2.180e+02 2.514e+02 3.003e+02 5.559e+02, threshold=5.029e+02, percent-clipped=2.0 +2024-08-25 11:11:59,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=65210.666666666664, ans=0.09899494936611666 +2024-08-25 11:12:34,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=65317.333333333336, ans=0.1 +2024-08-25 11:12:36,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=65317.333333333336, ans=0.2 +2024-08-25 11:12:38,227 INFO [train.py:1114] (3/4) Epoch 5, batch 2300, loss[loss=0.267, simple_loss=0.3127, pruned_loss=0.08052, ctc_loss=0.1504, over 19506.00 frames. ], tot_loss[loss=0.2926, simple_loss=0.3271, pruned_loss=0.09383, ctc_loss=0.1759, over 3862186.62 frames. ], batch size: 49, lr: 2.64e-02, grad_scale: 32.0 +2024-08-25 11:12:56,253 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=9.41 vs. limit=15.0 +2024-08-25 11:13:16,009 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.47 vs. limit=6.0 +2024-08-25 11:13:50,080 INFO [train.py:1114] (3/4) Epoch 5, batch 2350, loss[loss=0.2938, simple_loss=0.3386, pruned_loss=0.09051, ctc_loss=0.17, over 19668.00 frames. ], tot_loss[loss=0.2917, simple_loss=0.3264, pruned_loss=0.09344, ctc_loss=0.1751, over 3864782.88 frames. ], batch size: 63, lr: 2.63e-02, grad_scale: 32.0 +2024-08-25 11:13:53,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=65637.33333333333, ans=0.2 +2024-08-25 11:13:57,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=65637.33333333333, ans=0.1 +2024-08-25 11:14:00,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=65637.33333333333, ans=0.125 +2024-08-25 11:14:17,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=65637.33333333333, ans=0.125 +2024-08-25 11:14:26,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=65690.66666666667, ans=0.125 +2024-08-25 11:14:31,469 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.391e+02 1.936e+02 2.303e+02 2.820e+02 4.151e+02, threshold=4.606e+02, percent-clipped=0.0 +2024-08-25 11:14:55,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=65797.33333333333, ans=0.125 +2024-08-25 11:14:56,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=65797.33333333333, ans=0.125 +2024-08-25 11:14:58,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=65797.33333333333, ans=0.025 +2024-08-25 11:15:23,115 INFO [train.py:1114] (3/4) Epoch 5, batch 2400, loss[loss=0.3049, simple_loss=0.3454, pruned_loss=0.09768, ctc_loss=0.1725, over 19361.00 frames. ], tot_loss[loss=0.295, simple_loss=0.3293, pruned_loss=0.09487, ctc_loss=0.1773, over 3859883.67 frames. ], batch size: 71, lr: 2.63e-02, grad_scale: 32.0 +2024-08-25 11:15:29,528 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.03 vs. limit=15.0 +2024-08-25 11:15:39,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=65957.33333333333, ans=0.125 +2024-08-25 11:15:55,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=65957.33333333333, ans=0.09899494936611666 +2024-08-25 11:15:59,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=66010.66666666667, ans=0.125 +2024-08-25 11:16:31,428 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.83 vs. limit=15.0 +2024-08-25 11:16:35,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=66117.33333333333, ans=0.025 +2024-08-25 11:16:53,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=66170.66666666667, ans=0.0 +2024-08-25 11:16:56,811 INFO [train.py:1114] (3/4) Epoch 5, batch 2450, loss[loss=0.3492, simple_loss=0.3598, pruned_loss=0.1228, ctc_loss=0.2327, over 14190.00 frames. ], tot_loss[loss=0.3038, simple_loss=0.3344, pruned_loss=0.0994, ctc_loss=0.1861, over 3732592.76 frames. ], batch size: 140, lr: 2.63e-02, grad_scale: 32.0 +2024-08-25 11:17:39,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66224.0, ans=0.1 +2024-08-25 11:17:43,153 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.516e+02 2.021e+02 2.221e+02 2.524e+02 3.558e+02, threshold=4.443e+02, percent-clipped=0.0 +2024-08-25 11:18:05,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66330.66666666667, ans=0.1 +2024-08-25 11:19:28,342 INFO [train.py:1114] (3/4) Epoch 6, batch 0, loss[loss=0.2974, simple_loss=0.3213, pruned_loss=0.1006, ctc_loss=0.1807, over 19443.00 frames. ], tot_loss[loss=0.2974, simple_loss=0.3213, pruned_loss=0.1006, ctc_loss=0.1807, over 19443.00 frames. ], batch size: 48, lr: 2.45e-02, grad_scale: 32.0 +2024-08-25 11:19:28,342 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-25 11:20:29,253 INFO [train.py:1146] (3/4) Epoch 6, validation: loss=0.2388, simple_loss=0.3147, pruned_loss=0.05993, ctc_loss=0.1076, over 944034.00 frames. +2024-08-25 11:20:29,254 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 14072MB +2024-08-25 11:20:29,695 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.91 vs. limit=15.0 +2024-08-25 11:20:32,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=66378.66666666667, ans=0.05 +2024-08-25 11:20:45,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=66432.0, ans=0.0 +2024-08-25 11:20:55,184 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.15 vs. limit=10.0 +2024-08-25 11:21:00,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=66485.33333333333, ans=0.0 +2024-08-25 11:21:24,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=66592.0, ans=0.0 +2024-08-25 11:21:26,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=66592.0, ans=0.125 +2024-08-25 11:21:56,940 INFO [train.py:1114] (3/4) Epoch 6, batch 50, loss[loss=0.2767, simple_loss=0.307, pruned_loss=0.09066, ctc_loss=0.1629, over 19697.00 frames. ], tot_loss[loss=0.3008, simple_loss=0.3322, pruned_loss=0.09778, ctc_loss=0.1848, over 844717.56 frames. ], batch size: 47, lr: 2.44e-02, grad_scale: 32.0 +2024-08-25 11:22:10,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66645.33333333333, ans=0.1 +2024-08-25 11:22:46,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=66752.0, ans=0.0 +2024-08-25 11:22:50,713 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.580e+02 1.959e+02 2.174e+02 2.569e+02 5.460e+02, threshold=4.347e+02, percent-clipped=1.0 +2024-08-25 11:22:52,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=66805.33333333333, ans=0.125 +2024-08-25 11:23:08,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=66858.66666666667, ans=0.125 +2024-08-25 11:23:12,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=66858.66666666667, ans=0.2 +2024-08-25 11:23:18,887 INFO [train.py:1114] (3/4) Epoch 6, batch 100, loss[loss=0.2478, simple_loss=0.2958, pruned_loss=0.07283, ctc_loss=0.1353, over 19740.00 frames. ], tot_loss[loss=0.2975, simple_loss=0.332, pruned_loss=0.09562, ctc_loss=0.1792, over 1499352.90 frames. ], batch size: 51, lr: 2.44e-02, grad_scale: 32.0 +2024-08-25 11:23:25,997 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.27 vs. limit=15.0 +2024-08-25 11:23:32,955 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.91 vs. limit=15.0 +2024-08-25 11:23:40,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=66965.33333333333, ans=0.0 +2024-08-25 11:23:45,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=67018.66666666667, ans=0.125 +2024-08-25 11:23:54,760 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.72 vs. limit=15.0 +2024-08-25 11:23:56,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=67018.66666666667, ans=0.05 +2024-08-25 11:24:07,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=67072.0, ans=0.125 +2024-08-25 11:24:21,826 INFO [train.py:1114] (3/4) Epoch 6, batch 150, loss[loss=0.2355, simple_loss=0.2801, pruned_loss=0.06991, ctc_loss=0.1277, over 19692.00 frames. ], tot_loss[loss=0.2915, simple_loss=0.3279, pruned_loss=0.09268, ctc_loss=0.1743, over 2028735.89 frames. ], batch size: 47, lr: 2.44e-02, grad_scale: 32.0 +2024-08-25 11:24:24,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67178.66666666667, ans=0.1 +2024-08-25 11:25:04,948 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.596e+02 1.947e+02 2.172e+02 2.650e+02 4.091e+02, threshold=4.343e+02, percent-clipped=0.0 +2024-08-25 11:25:06,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=67338.66666666667, ans=0.125 +2024-08-25 11:25:16,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=67338.66666666667, ans=0.2 +2024-08-25 11:25:16,352 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.22 vs. limit=22.5 +2024-08-25 11:25:17,555 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.87 vs. limit=15.0 +2024-08-25 11:25:35,916 INFO [train.py:1114] (3/4) Epoch 6, batch 200, loss[loss=0.3299, simple_loss=0.3523, pruned_loss=0.1122, ctc_loss=0.2081, over 18149.00 frames. ], tot_loss[loss=0.2876, simple_loss=0.3251, pruned_loss=0.09083, ctc_loss=0.171, over 2435769.68 frames. ], batch size: 85, lr: 2.43e-02, grad_scale: 32.0 +2024-08-25 11:25:46,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=67445.33333333333, ans=0.025 +2024-08-25 11:25:51,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=67445.33333333333, ans=0.0 +2024-08-25 11:26:34,121 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.92 vs. limit=15.0 +2024-08-25 11:26:34,160 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.62 vs. limit=15.0 +2024-08-25 11:27:11,150 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.32 vs. limit=15.0 +2024-08-25 11:27:19,510 INFO [train.py:1114] (3/4) Epoch 6, batch 250, loss[loss=0.3351, simple_loss=0.351, pruned_loss=0.1159, ctc_loss=0.2184, over 19379.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.3252, pruned_loss=0.09117, ctc_loss=0.1717, over 2756426.68 frames. ], batch size: 67, lr: 2.43e-02, grad_scale: 32.0 +2024-08-25 11:27:52,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67765.33333333333, ans=0.1 +2024-08-25 11:27:53,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=67765.33333333333, ans=0.0 +2024-08-25 11:27:55,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=67765.33333333333, ans=0.2 +2024-08-25 11:28:18,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=67818.66666666667, ans=0.025 +2024-08-25 11:28:36,840 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.549e+02 1.900e+02 2.111e+02 2.483e+02 4.707e+02, threshold=4.222e+02, percent-clipped=1.0 +2024-08-25 11:29:06,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=67872.0, ans=0.125 +2024-08-25 11:29:20,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=67925.33333333333, ans=0.0 +2024-08-25 11:29:38,949 INFO [train.py:1114] (3/4) Epoch 6, batch 300, loss[loss=0.3085, simple_loss=0.346, pruned_loss=0.09899, ctc_loss=0.1826, over 19516.00 frames. ], tot_loss[loss=0.2868, simple_loss=0.3239, pruned_loss=0.09071, ctc_loss=0.1708, over 3000834.48 frames. ], batch size: 61, lr: 2.43e-02, grad_scale: 32.0 +2024-08-25 11:30:15,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=68032.0, ans=0.025 +2024-08-25 11:30:22,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=68032.0, ans=0.125 +2024-08-25 11:30:23,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=68032.0, ans=0.0 +2024-08-25 11:31:11,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=68192.0, ans=0.07 +2024-08-25 11:31:25,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=68192.0, ans=0.2 +2024-08-25 11:31:39,920 INFO [train.py:1114] (3/4) Epoch 6, batch 350, loss[loss=0.2357, simple_loss=0.2866, pruned_loss=0.06749, ctc_loss=0.1245, over 19773.00 frames. ], tot_loss[loss=0.2867, simple_loss=0.3239, pruned_loss=0.09067, ctc_loss=0.1704, over 3190322.04 frames. ], batch size: 48, lr: 2.42e-02, grad_scale: 32.0 +2024-08-25 11:31:42,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=68245.33333333333, ans=0.2 +2024-08-25 11:32:02,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=68245.33333333333, ans=0.0 +2024-08-25 11:32:25,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68352.0, ans=0.1 +2024-08-25 11:32:30,239 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.67 vs. limit=15.0 +2024-08-25 11:32:35,304 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.504e+02 2.039e+02 2.360e+02 2.872e+02 5.301e+02, threshold=4.720e+02, percent-clipped=2.0 +2024-08-25 11:32:41,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=68405.33333333333, ans=0.125 +2024-08-25 11:32:48,248 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:32:49,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68458.66666666667, ans=0.1 +2024-08-25 11:32:52,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=68458.66666666667, ans=0.0 +2024-08-25 11:33:02,558 INFO [train.py:1114] (3/4) Epoch 6, batch 400, loss[loss=0.2996, simple_loss=0.3353, pruned_loss=0.09586, ctc_loss=0.1802, over 19500.00 frames. ], tot_loss[loss=0.2848, simple_loss=0.3226, pruned_loss=0.08972, ctc_loss=0.1688, over 3341848.39 frames. ], batch size: 54, lr: 2.42e-02, grad_scale: 32.0 +2024-08-25 11:33:20,586 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.38 vs. limit=15.0 +2024-08-25 11:33:59,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=68672.0, ans=0.0 +2024-08-25 11:34:12,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=68778.66666666667, ans=0.025 +2024-08-25 11:34:13,410 INFO [train.py:1114] (3/4) Epoch 6, batch 450, loss[loss=0.2968, simple_loss=0.3334, pruned_loss=0.09422, ctc_loss=0.1795, over 19615.00 frames. ], tot_loss[loss=0.2858, simple_loss=0.3234, pruned_loss=0.09016, ctc_loss=0.1695, over 3450445.40 frames. ], batch size: 55, lr: 2.42e-02, grad_scale: 32.0 +2024-08-25 11:34:17,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=68778.66666666667, ans=0.0 +2024-08-25 11:34:30,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=68832.0, ans=0.125 +2024-08-25 11:34:39,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=68885.33333333333, ans=0.0 +2024-08-25 11:34:49,666 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.571e+02 1.969e+02 2.191e+02 2.793e+02 4.218e+02, threshold=4.382e+02, percent-clipped=0.0 +2024-08-25 11:34:49,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=68938.66666666667, ans=0.125 +2024-08-25 11:34:55,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=68938.66666666667, ans=0.125 +2024-08-25 11:34:57,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=68938.66666666667, ans=0.125 +2024-08-25 11:34:58,261 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.12 vs. limit=15.0 +2024-08-25 11:35:08,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=68992.0, ans=0.125 +2024-08-25 11:35:10,580 INFO [train.py:1114] (3/4) Epoch 6, batch 500, loss[loss=0.2753, simple_loss=0.3272, pruned_loss=0.08207, ctc_loss=0.148, over 19677.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.3221, pruned_loss=0.08933, ctc_loss=0.1685, over 3545248.39 frames. ], batch size: 63, lr: 2.41e-02, grad_scale: 32.0 +2024-08-25 11:35:25,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=69098.66666666667, ans=0.125 +2024-08-25 11:35:30,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69098.66666666667, ans=0.1 +2024-08-25 11:35:46,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=69205.33333333333, ans=0.0 +2024-08-25 11:35:52,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=69205.33333333333, ans=0.125 +2024-08-25 11:36:00,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=69258.66666666667, ans=0.125 +2024-08-25 11:36:10,425 INFO [train.py:1114] (3/4) Epoch 6, batch 550, loss[loss=0.2711, simple_loss=0.3221, pruned_loss=0.08077, ctc_loss=0.1465, over 19338.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3229, pruned_loss=0.08997, ctc_loss=0.1694, over 3607603.70 frames. ], batch size: 71, lr: 2.41e-02, grad_scale: 32.0 +2024-08-25 11:36:14,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=69312.0, ans=0.025 +2024-08-25 11:36:20,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=69312.0, ans=0.0 +2024-08-25 11:36:29,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=69365.33333333333, ans=0.125 +2024-08-25 11:36:36,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=69418.66666666667, ans=0.125 +2024-08-25 11:36:42,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=69418.66666666667, ans=0.125 +2024-08-25 11:36:46,537 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.559e+02 2.100e+02 2.439e+02 2.966e+02 5.259e+02, threshold=4.878e+02, percent-clipped=1.0 +2024-08-25 11:37:12,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=69525.33333333333, ans=0.2 +2024-08-25 11:37:27,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69578.66666666667, ans=0.1 +2024-08-25 11:37:28,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.19 vs. limit=15.0 +2024-08-25 11:37:28,776 INFO [train.py:1114] (3/4) Epoch 6, batch 600, loss[loss=0.3162, simple_loss=0.3479, pruned_loss=0.1049, ctc_loss=0.1871, over 19379.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.323, pruned_loss=0.08991, ctc_loss=0.1691, over 3666523.33 frames. ], batch size: 67, lr: 2.41e-02, grad_scale: 32.0 +2024-08-25 11:37:29,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69578.66666666667, ans=0.1 +2024-08-25 11:37:29,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=69578.66666666667, ans=0.0 +2024-08-25 11:38:41,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=69792.0, ans=0.125 +2024-08-25 11:38:58,896 INFO [train.py:1114] (3/4) Epoch 6, batch 650, loss[loss=0.2815, simple_loss=0.3189, pruned_loss=0.08963, ctc_loss=0.1623, over 19764.00 frames. ], tot_loss[loss=0.2848, simple_loss=0.3225, pruned_loss=0.08979, ctc_loss=0.1689, over 3716957.91 frames. ], batch size: 54, lr: 2.40e-02, grad_scale: 32.0 +2024-08-25 11:39:50,473 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.537e+02 1.931e+02 2.137e+02 2.425e+02 3.711e+02, threshold=4.274e+02, percent-clipped=0.0 +2024-08-25 11:39:55,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=70005.33333333333, ans=0.125 +2024-08-25 11:40:16,192 INFO [train.py:1114] (3/4) Epoch 6, batch 700, loss[loss=0.2706, simple_loss=0.3119, pruned_loss=0.08378, ctc_loss=0.1544, over 19726.00 frames. ], tot_loss[loss=0.2852, simple_loss=0.323, pruned_loss=0.08986, ctc_loss=0.1693, over 3747934.63 frames. ], batch size: 51, lr: 2.40e-02, grad_scale: 32.0 +2024-08-25 11:41:16,544 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.71 vs. limit=10.0 +2024-08-25 11:41:22,935 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.83 vs. limit=15.0 +2024-08-25 11:41:33,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=70272.0, ans=0.2 +2024-08-25 11:42:06,461 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.55 vs. limit=12.0 +2024-08-25 11:42:12,749 INFO [train.py:1114] (3/4) Epoch 6, batch 750, loss[loss=0.2881, simple_loss=0.3276, pruned_loss=0.08983, ctc_loss=0.1723, over 19486.00 frames. ], tot_loss[loss=0.2844, simple_loss=0.3223, pruned_loss=0.08957, ctc_loss=0.1684, over 3774371.07 frames. ], batch size: 54, lr: 2.40e-02, grad_scale: 32.0 +2024-08-25 11:42:38,712 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.23 vs. limit=15.0 +2024-08-25 11:43:09,519 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.653e+02 2.022e+02 2.297e+02 2.693e+02 4.652e+02, threshold=4.594e+02, percent-clipped=2.0 +2024-08-25 11:43:32,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=70592.0, ans=0.2 +2024-08-25 11:43:34,912 INFO [train.py:1114] (3/4) Epoch 6, batch 800, loss[loss=0.2412, simple_loss=0.2863, pruned_loss=0.07236, ctc_loss=0.1285, over 19804.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3225, pruned_loss=0.08969, ctc_loss=0.1684, over 3796209.12 frames. ], batch size: 49, lr: 2.39e-02, grad_scale: 32.0 +2024-08-25 11:43:43,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=70645.33333333333, ans=0.125 +2024-08-25 11:44:09,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=70698.66666666667, ans=0.125 +2024-08-25 11:44:46,463 INFO [train.py:1114] (3/4) Epoch 6, batch 850, loss[loss=0.3056, simple_loss=0.3423, pruned_loss=0.09668, ctc_loss=0.189, over 19654.00 frames. ], tot_loss[loss=0.2831, simple_loss=0.3213, pruned_loss=0.08908, ctc_loss=0.167, over 3815529.01 frames. ], batch size: 59, lr: 2.39e-02, grad_scale: 32.0 +2024-08-25 11:44:51,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=70912.0, ans=0.125 +2024-08-25 11:45:37,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71018.66666666667, ans=0.1 +2024-08-25 11:45:40,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=71018.66666666667, ans=0.2 +2024-08-25 11:45:46,254 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.501e+02 1.893e+02 2.077e+02 2.374e+02 4.075e+02, threshold=4.154e+02, percent-clipped=0.0 +2024-08-25 11:45:54,513 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:45:56,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=71125.33333333333, ans=0.0 +2024-08-25 11:46:03,648 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.33 vs. limit=15.0 +2024-08-25 11:46:07,494 INFO [train.py:1114] (3/4) Epoch 6, batch 900, loss[loss=0.2893, simple_loss=0.3124, pruned_loss=0.09728, ctc_loss=0.1793, over 19424.00 frames. ], tot_loss[loss=0.284, simple_loss=0.3218, pruned_loss=0.08953, ctc_loss=0.1678, over 3818432.12 frames. ], batch size: 48, lr: 2.39e-02, grad_scale: 16.0 +2024-08-25 11:46:18,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.64 vs. limit=15.0 +2024-08-25 11:46:26,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=71232.0, ans=0.125 +2024-08-25 11:46:35,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=71232.0, ans=0.2 +2024-08-25 11:46:59,457 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.05 vs. limit=15.0 +2024-08-25 11:47:14,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=71392.0, ans=0.07 +2024-08-25 11:47:21,584 INFO [train.py:1114] (3/4) Epoch 6, batch 950, loss[loss=0.2862, simple_loss=0.3131, pruned_loss=0.09352, ctc_loss=0.1809, over 19512.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3227, pruned_loss=0.09014, ctc_loss=0.1693, over 3820133.53 frames. ], batch size: 49, lr: 2.38e-02, grad_scale: 16.0 +2024-08-25 11:47:34,452 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.81 vs. limit=15.0 +2024-08-25 11:47:56,614 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.84 vs. limit=15.0 +2024-08-25 11:48:03,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=71552.0, ans=0.0 +2024-08-25 11:48:09,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=71552.0, ans=0.1 +2024-08-25 11:48:23,508 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.560e+02 1.900e+02 2.167e+02 2.553e+02 4.088e+02, threshold=4.334e+02, percent-clipped=0.0 +2024-08-25 11:48:32,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=71605.33333333333, ans=0.0 +2024-08-25 11:48:33,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=71605.33333333333, ans=0.1 +2024-08-25 11:48:36,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=71658.66666666667, ans=0.125 +2024-08-25 11:49:01,764 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.46 vs. limit=12.0 +2024-08-25 11:49:03,398 INFO [train.py:1114] (3/4) Epoch 6, batch 1000, loss[loss=0.3056, simple_loss=0.3383, pruned_loss=0.09923, ctc_loss=0.1864, over 19863.00 frames. ], tot_loss[loss=0.2855, simple_loss=0.3229, pruned_loss=0.0902, ctc_loss=0.1694, over 3816607.31 frames. ], batch size: 52, lr: 2.38e-02, grad_scale: 16.0 +2024-08-25 11:49:10,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71712.0, ans=0.1 +2024-08-25 11:50:03,444 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:50:04,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=71872.0, ans=0.0 +2024-08-25 11:50:33,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=71925.33333333333, ans=0.2 +2024-08-25 11:50:49,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=71925.33333333333, ans=0.0 +2024-08-25 11:50:57,799 INFO [train.py:1114] (3/4) Epoch 6, batch 1050, loss[loss=0.3167, simple_loss=0.3496, pruned_loss=0.1014, ctc_loss=0.2026, over 19840.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.322, pruned_loss=0.08988, ctc_loss=0.169, over 3823402.83 frames. ], batch size: 57, lr: 2.37e-02, grad_scale: 16.0 +2024-08-25 11:51:03,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=71978.66666666667, ans=0.125 +2024-08-25 11:51:57,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=72138.66666666667, ans=0.04949747468305833 +2024-08-25 11:51:58,523 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.58 vs. limit=15.0 +2024-08-25 11:51:58,780 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.33 vs. limit=5.0 +2024-08-25 11:52:00,143 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.585e+02 1.944e+02 2.201e+02 2.550e+02 3.957e+02, threshold=4.403e+02, percent-clipped=0.0 +2024-08-25 11:52:48,880 INFO [train.py:1114] (3/4) Epoch 6, batch 1100, loss[loss=0.2727, simple_loss=0.3146, pruned_loss=0.08556, ctc_loss=0.1491, over 19584.00 frames. ], tot_loss[loss=0.2849, simple_loss=0.3223, pruned_loss=0.08994, ctc_loss=0.169, over 3831345.65 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 16.0 +2024-08-25 11:52:53,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=72245.33333333333, ans=0.125 +2024-08-25 11:52:56,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=72245.33333333333, ans=0.025 +2024-08-25 11:53:00,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=72245.33333333333, ans=0.2 +2024-08-25 11:53:14,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=72298.66666666667, ans=0.0 +2024-08-25 11:53:26,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=72352.0, ans=0.125 +2024-08-25 11:53:30,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=72405.33333333333, ans=15.0 +2024-08-25 11:53:41,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=72458.66666666667, ans=0.0 +2024-08-25 11:53:56,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=72458.66666666667, ans=0.125 +2024-08-25 11:53:58,643 INFO [train.py:1114] (3/4) Epoch 6, batch 1150, loss[loss=0.2977, simple_loss=0.3283, pruned_loss=0.09665, ctc_loss=0.1843, over 19595.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.3223, pruned_loss=0.0901, ctc_loss=0.1695, over 3830517.04 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 16.0 +2024-08-25 11:54:04,249 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.38 vs. limit=15.0 +2024-08-25 11:54:43,449 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 1.952e+02 2.194e+02 2.505e+02 4.680e+02, threshold=4.387e+02, percent-clipped=1.0 +2024-08-25 11:54:54,236 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.14 vs. limit=15.0 +2024-08-25 11:55:02,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=72725.33333333333, ans=0.125 +2024-08-25 11:55:10,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=72778.66666666667, ans=0.1 +2024-08-25 11:55:11,874 INFO [train.py:1114] (3/4) Epoch 6, batch 1200, loss[loss=0.3051, simple_loss=0.3461, pruned_loss=0.09474, ctc_loss=0.1864, over 19839.00 frames. ], tot_loss[loss=0.2862, simple_loss=0.3234, pruned_loss=0.09044, ctc_loss=0.1703, over 3825177.28 frames. ], batch size: 57, lr: 2.36e-02, grad_scale: 32.0 +2024-08-25 11:55:27,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=72832.0, ans=0.025 +2024-08-25 11:55:48,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=72885.33333333333, ans=0.2 +2024-08-25 11:55:48,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72885.33333333333, ans=0.1 +2024-08-25 11:56:12,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=72938.66666666667, ans=0.2 +2024-08-25 11:56:15,099 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 11:56:24,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=72938.66666666667, ans=0.125 +2024-08-25 11:56:55,077 INFO [train.py:1114] (3/4) Epoch 6, batch 1250, loss[loss=0.2969, simple_loss=0.3433, pruned_loss=0.0909, ctc_loss=0.1718, over 19508.00 frames. ], tot_loss[loss=0.2849, simple_loss=0.3229, pruned_loss=0.08967, ctc_loss=0.1689, over 3843224.54 frames. ], batch size: 61, lr: 2.36e-02, grad_scale: 32.0 +2024-08-25 11:57:04,067 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.74 vs. limit=15.0 +2024-08-25 11:57:12,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=73098.66666666667, ans=0.0 +2024-08-25 11:57:17,539 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.60 vs. limit=15.0 +2024-08-25 11:58:13,322 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.685e+02 2.073e+02 2.305e+02 2.660e+02 4.224e+02, threshold=4.609e+02, percent-clipped=0.0 +2024-08-25 11:58:15,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=73205.33333333333, ans=0.09899494936611666 +2024-08-25 11:58:32,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=73258.66666666667, ans=0.125 +2024-08-25 11:58:46,693 INFO [train.py:1114] (3/4) Epoch 6, batch 1300, loss[loss=0.3047, simple_loss=0.3318, pruned_loss=0.101, ctc_loss=0.1888, over 18852.00 frames. ], tot_loss[loss=0.2838, simple_loss=0.3221, pruned_loss=0.08916, ctc_loss=0.1679, over 3846827.06 frames. ], batch size: 76, lr: 2.36e-02, grad_scale: 32.0 +2024-08-25 11:59:01,299 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.04 vs. limit=15.0 +2024-08-25 11:59:03,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=73312.0, ans=0.125 +2024-08-25 11:59:07,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=73312.0, ans=0.0 +2024-08-25 11:59:35,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=73365.33333333333, ans=0.2 +2024-08-25 11:59:35,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=73365.33333333333, ans=0.125 +2024-08-25 11:59:35,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=73365.33333333333, ans=0.125 +2024-08-25 12:00:19,973 INFO [train.py:1114] (3/4) Epoch 6, batch 1350, loss[loss=0.2734, simple_loss=0.3202, pruned_loss=0.08184, ctc_loss=0.1573, over 19779.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.3219, pruned_loss=0.08899, ctc_loss=0.1672, over 3858002.22 frames. ], batch size: 54, lr: 2.36e-02, grad_scale: 32.0 +2024-08-25 12:00:27,318 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:00:34,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=73578.66666666667, ans=0.125 +2024-08-25 12:00:43,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=73632.0, ans=0.0 +2024-08-25 12:00:53,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=73685.33333333333, ans=0.1 +2024-08-25 12:01:04,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=73738.66666666667, ans=0.2 +2024-08-25 12:01:04,998 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.532e+02 2.025e+02 2.295e+02 2.579e+02 4.133e+02, threshold=4.590e+02, percent-clipped=0.0 +2024-08-25 12:01:12,883 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.82 vs. limit=15.0 +2024-08-25 12:01:30,923 INFO [train.py:1114] (3/4) Epoch 6, batch 1400, loss[loss=0.2435, simple_loss=0.2828, pruned_loss=0.07507, ctc_loss=0.1353, over 19687.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3222, pruned_loss=0.08922, ctc_loss=0.1677, over 3864324.59 frames. ], batch size: 46, lr: 2.35e-02, grad_scale: 32.0 +2024-08-25 12:01:38,421 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.79 vs. limit=22.5 +2024-08-25 12:01:51,404 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.59 vs. limit=15.0 +2024-08-25 12:01:54,294 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:02:32,343 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.61 vs. limit=15.0 +2024-08-25 12:02:52,965 INFO [train.py:1114] (3/4) Epoch 6, batch 1450, loss[loss=0.2972, simple_loss=0.3448, pruned_loss=0.09154, ctc_loss=0.1661, over 19683.00 frames. ], tot_loss[loss=0.285, simple_loss=0.323, pruned_loss=0.08974, ctc_loss=0.1689, over 3862527.25 frames. ], batch size: 63, lr: 2.35e-02, grad_scale: 32.0 +2024-08-25 12:03:02,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=74112.0, ans=0.0 +2024-08-25 12:03:40,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=74218.66666666667, ans=0.125 +2024-08-25 12:03:53,270 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.572e+02 1.998e+02 2.330e+02 2.811e+02 4.670e+02, threshold=4.661e+02, percent-clipped=1.0 +2024-08-25 12:03:59,125 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.23 vs. limit=15.0 +2024-08-25 12:04:24,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=74378.66666666667, ans=0.125 +2024-08-25 12:04:25,540 INFO [train.py:1114] (3/4) Epoch 6, batch 1500, loss[loss=0.3065, simple_loss=0.3405, pruned_loss=0.09953, ctc_loss=0.1838, over 19602.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3233, pruned_loss=0.09014, ctc_loss=0.1696, over 3861644.56 frames. ], batch size: 57, lr: 2.35e-02, grad_scale: 32.0 +2024-08-25 12:04:35,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=74378.66666666667, ans=0.125 +2024-08-25 12:04:39,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74432.0, ans=0.1 +2024-08-25 12:04:50,656 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.62 vs. limit=10.0 +2024-08-25 12:05:37,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=74538.66666666667, ans=0.2 +2024-08-25 12:05:44,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=74538.66666666667, ans=0.2 +2024-08-25 12:05:59,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=74592.0, ans=0.125 +2024-08-25 12:06:00,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=74645.33333333333, ans=0.125 +2024-08-25 12:06:01,369 INFO [train.py:1114] (3/4) Epoch 6, batch 1550, loss[loss=0.2748, simple_loss=0.3192, pruned_loss=0.08372, ctc_loss=0.1576, over 19622.00 frames. ], tot_loss[loss=0.2864, simple_loss=0.3236, pruned_loss=0.09057, ctc_loss=0.1701, over 3846569.08 frames. ], batch size: 60, lr: 2.34e-02, grad_scale: 32.0 +2024-08-25 12:06:28,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=74752.0, ans=0.125 +2024-08-25 12:06:37,897 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.607e+02 2.061e+02 2.512e+02 3.027e+02 4.789e+02, threshold=5.024e+02, percent-clipped=1.0 +2024-08-25 12:06:48,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74858.66666666667, ans=0.1 +2024-08-25 12:07:01,751 INFO [train.py:1114] (3/4) Epoch 6, batch 1600, loss[loss=0.3129, simple_loss=0.3435, pruned_loss=0.1023, ctc_loss=0.1943, over 19836.00 frames. ], tot_loss[loss=0.2862, simple_loss=0.3232, pruned_loss=0.09061, ctc_loss=0.1701, over 3836820.87 frames. ], batch size: 57, lr: 2.34e-02, grad_scale: 32.0 +2024-08-25 12:07:08,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.24 vs. limit=15.0 +2024-08-25 12:07:09,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=74912.0, ans=0.2 +2024-08-25 12:07:29,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=75018.66666666667, ans=0.0 +2024-08-25 12:07:32,673 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.26 vs. limit=15.0 +2024-08-25 12:07:32,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.92 vs. limit=15.0 +2024-08-25 12:08:00,978 INFO [train.py:1114] (3/4) Epoch 6, batch 1650, loss[loss=0.2827, simple_loss=0.3297, pruned_loss=0.08627, ctc_loss=0.1579, over 19675.00 frames. ], tot_loss[loss=0.2855, simple_loss=0.3228, pruned_loss=0.09016, ctc_loss=0.1694, over 3833812.59 frames. ], batch size: 59, lr: 2.34e-02, grad_scale: 32.0 +2024-08-25 12:08:03,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=75178.66666666667, ans=0.125 +2024-08-25 12:08:05,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=75178.66666666667, ans=0.5 +2024-08-25 12:08:21,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75232.0, ans=0.1 +2024-08-25 12:08:23,551 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.07 vs. limit=6.0 +2024-08-25 12:08:37,753 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.531e+02 1.893e+02 2.381e+02 2.784e+02 7.281e+02, threshold=4.762e+02, percent-clipped=1.0 +2024-08-25 12:08:45,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=75338.66666666667, ans=0.09899494936611666 +2024-08-25 12:09:00,111 INFO [train.py:1114] (3/4) Epoch 6, batch 1700, loss[loss=0.251, simple_loss=0.2899, pruned_loss=0.07601, ctc_loss=0.1504, over 19682.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3213, pruned_loss=0.08919, ctc_loss=0.1674, over 3848443.30 frames. ], batch size: 46, lr: 2.33e-02, grad_scale: 32.0 +2024-08-25 12:09:00,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=75445.33333333333, ans=0.05 +2024-08-25 12:09:10,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=75498.66666666667, ans=0.04949747468305833 +2024-08-25 12:09:13,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=75498.66666666667, ans=0.125 +2024-08-25 12:09:19,754 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.62 vs. limit=15.0 +2024-08-25 12:09:22,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=75552.0, ans=0.125 +2024-08-25 12:09:28,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=75552.0, ans=0.125 +2024-08-25 12:09:44,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.62 vs. limit=15.0 +2024-08-25 12:09:50,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=75658.66666666667, ans=0.0 +2024-08-25 12:09:54,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=75658.66666666667, ans=0.125 +2024-08-25 12:09:55,903 INFO [train.py:1114] (3/4) Epoch 6, batch 1750, loss[loss=0.2429, simple_loss=0.2811, pruned_loss=0.07523, ctc_loss=0.1356, over 19670.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3199, pruned_loss=0.08801, ctc_loss=0.1652, over 3852158.83 frames. ], batch size: 45, lr: 2.33e-02, grad_scale: 16.0 +2024-08-25 12:09:57,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=75712.0, ans=0.125 +2024-08-25 12:10:03,234 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.23 vs. limit=15.0 +2024-08-25 12:10:14,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=75765.33333333333, ans=0.2 +2024-08-25 12:10:16,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=75818.66666666667, ans=0.125 +2024-08-25 12:10:20,366 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.34 vs. limit=15.0 +2024-08-25 12:10:22,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75818.66666666667, ans=0.1 +2024-08-25 12:10:24,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=75818.66666666667, ans=0.0 +2024-08-25 12:10:26,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=75818.66666666667, ans=0.125 +2024-08-25 12:10:29,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=75872.0, ans=22.5 +2024-08-25 12:10:32,753 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.521e+02 1.890e+02 2.130e+02 2.587e+02 4.262e+02, threshold=4.260e+02, percent-clipped=0.0 +2024-08-25 12:10:33,318 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.59 vs. limit=15.0 +2024-08-25 12:10:46,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=75925.33333333333, ans=0.0 +2024-08-25 12:10:50,352 INFO [train.py:1114] (3/4) Epoch 6, batch 1800, loss[loss=0.292, simple_loss=0.3353, pruned_loss=0.08939, ctc_loss=0.1747, over 19619.00 frames. ], tot_loss[loss=0.2805, simple_loss=0.3199, pruned_loss=0.08767, ctc_loss=0.1646, over 3853493.58 frames. ], batch size: 55, lr: 2.33e-02, grad_scale: 8.0 +2024-08-25 12:10:54,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=75978.66666666667, ans=0.125 +2024-08-25 12:10:58,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=75978.66666666667, ans=0.125 +2024-08-25 12:11:22,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=76138.66666666667, ans=0.0 +2024-08-25 12:11:30,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=76138.66666666667, ans=0.0 +2024-08-25 12:11:44,385 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.16 vs. limit=15.0 +2024-08-25 12:11:44,877 INFO [train.py:1114] (3/4) Epoch 6, batch 1850, loss[loss=0.2916, simple_loss=0.3352, pruned_loss=0.08976, ctc_loss=0.1712, over 19592.00 frames. ], tot_loss[loss=0.2793, simple_loss=0.3193, pruned_loss=0.08701, ctc_loss=0.1634, over 3857006.66 frames. ], batch size: 57, lr: 2.32e-02, grad_scale: 8.0 +2024-08-25 12:11:45,503 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.76 vs. limit=15.0 +2024-08-25 12:11:48,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=76245.33333333333, ans=0.1 +2024-08-25 12:11:48,436 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.20 vs. limit=15.0 +2024-08-25 12:11:52,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=76245.33333333333, ans=0.0 +2024-08-25 12:12:01,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=76298.66666666667, ans=0.125 +2024-08-25 12:12:14,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=76352.0, ans=0.09899494936611666 +2024-08-25 12:12:20,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=76405.33333333333, ans=0.0 +2024-08-25 12:12:22,237 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.502e+02 1.994e+02 2.285e+02 2.712e+02 4.413e+02, threshold=4.569e+02, percent-clipped=2.0 +2024-08-25 12:12:43,375 INFO [train.py:1114] (3/4) Epoch 6, batch 1900, loss[loss=0.2779, simple_loss=0.3267, pruned_loss=0.08245, ctc_loss=0.1606, over 19647.00 frames. ], tot_loss[loss=0.2795, simple_loss=0.3196, pruned_loss=0.08705, ctc_loss=0.1635, over 3861518.57 frames. ], batch size: 59, lr: 2.32e-02, grad_scale: 8.0 +2024-08-25 12:12:48,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=76512.0, ans=0.125 +2024-08-25 12:12:51,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=76512.0, ans=0.0 +2024-08-25 12:12:56,943 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.61 vs. limit=15.0 +2024-08-25 12:13:09,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=76618.66666666667, ans=0.125 +2024-08-25 12:13:15,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=76672.0, ans=0.0 +2024-08-25 12:13:17,481 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.20 vs. limit=15.0 +2024-08-25 12:13:20,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=76672.0, ans=0.2 +2024-08-25 12:13:20,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=76672.0, ans=0.125 +2024-08-25 12:13:22,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=76672.0, ans=0.2 +2024-08-25 12:13:27,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=76672.0, ans=0.0 +2024-08-25 12:13:32,246 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.35 vs. limit=10.0 +2024-08-25 12:13:36,853 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.63 vs. limit=22.5 +2024-08-25 12:13:40,518 INFO [train.py:1114] (3/4) Epoch 6, batch 1950, loss[loss=0.2533, simple_loss=0.3036, pruned_loss=0.07437, ctc_loss=0.1357, over 19569.00 frames. ], tot_loss[loss=0.2811, simple_loss=0.3209, pruned_loss=0.08768, ctc_loss=0.1648, over 3870912.95 frames. ], batch size: 52, lr: 2.32e-02, grad_scale: 8.0 +2024-08-25 12:13:44,253 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.97 vs. limit=15.0 +2024-08-25 12:13:45,669 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.43 vs. limit=15.0 +2024-08-25 12:14:11,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=76885.33333333333, ans=0.125 +2024-08-25 12:14:12,592 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.89 vs. limit=15.0 +2024-08-25 12:14:18,631 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.709e+02 1.890e+02 2.137e+02 2.349e+02 3.743e+02, threshold=4.275e+02, percent-clipped=0.0 +2024-08-25 12:14:35,989 INFO [train.py:1114] (3/4) Epoch 6, batch 2000, loss[loss=0.2555, simple_loss=0.293, pruned_loss=0.07994, ctc_loss=0.1452, over 19647.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.3211, pruned_loss=0.08789, ctc_loss=0.1653, over 3855858.15 frames. ], batch size: 45, lr: 2.31e-02, grad_scale: 16.0 +2024-08-25 12:14:56,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=77098.66666666667, ans=0.07 +2024-08-25 12:14:59,733 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.59 vs. limit=12.0 +2024-08-25 12:15:15,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=77205.33333333333, ans=0.125 +2024-08-25 12:15:28,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=77258.66666666667, ans=0.07 +2024-08-25 12:15:28,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=77258.66666666667, ans=0.125 +2024-08-25 12:15:30,080 INFO [train.py:1114] (3/4) Epoch 6, batch 2050, loss[loss=0.2429, simple_loss=0.2853, pruned_loss=0.07247, ctc_loss=0.1387, over 19719.00 frames. ], tot_loss[loss=0.28, simple_loss=0.3197, pruned_loss=0.08729, ctc_loss=0.1644, over 3852479.07 frames. ], batch size: 47, lr: 2.31e-02, grad_scale: 16.0 +2024-08-25 12:15:59,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77418.66666666667, ans=0.1 +2024-08-25 12:16:14,693 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.405e+02 1.955e+02 2.380e+02 2.986e+02 1.021e+03, threshold=4.760e+02, percent-clipped=7.0 +2024-08-25 12:16:18,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=77472.0, ans=0.0 +2024-08-25 12:16:21,514 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.08 vs. limit=15.0 +2024-08-25 12:16:29,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=77525.33333333333, ans=0.0 +2024-08-25 12:16:30,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=77525.33333333333, ans=0.2 +2024-08-25 12:16:32,226 INFO [train.py:1114] (3/4) Epoch 6, batch 2100, loss[loss=0.2365, simple_loss=0.2993, pruned_loss=0.06184, ctc_loss=0.1251, over 19757.00 frames. ], tot_loss[loss=0.2794, simple_loss=0.3193, pruned_loss=0.087, ctc_loss=0.1638, over 3859139.25 frames. ], batch size: 54, lr: 2.31e-02, grad_scale: 16.0 +2024-08-25 12:16:35,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=77578.66666666667, ans=0.0 +2024-08-25 12:16:45,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=77632.0, ans=0.07 +2024-08-25 12:16:49,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=77632.0, ans=0.0 +2024-08-25 12:17:00,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=77685.33333333333, ans=0.0 +2024-08-25 12:17:20,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=77792.0, ans=0.0 +2024-08-25 12:17:28,071 INFO [train.py:1114] (3/4) Epoch 6, batch 2150, loss[loss=0.2684, simple_loss=0.3135, pruned_loss=0.08004, ctc_loss=0.1583, over 19868.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.3179, pruned_loss=0.08602, ctc_loss=0.1617, over 3870860.64 frames. ], batch size: 52, lr: 2.30e-02, grad_scale: 16.0 +2024-08-25 12:17:30,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=77845.33333333333, ans=0.04949747468305833 +2024-08-25 12:17:31,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=77845.33333333333, ans=0.025 +2024-08-25 12:17:43,188 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=77898.66666666667, ans=0.07 +2024-08-25 12:18:05,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77952.0, ans=0.1 +2024-08-25 12:18:12,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=77952.0, ans=0.2 +2024-08-25 12:18:15,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=78005.33333333333, ans=0.5 +2024-08-25 12:18:19,488 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.583e+02 1.877e+02 2.258e+02 2.799e+02 6.726e+02, threshold=4.515e+02, percent-clipped=2.0 +2024-08-25 12:18:28,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=78058.66666666667, ans=0.0 +2024-08-25 12:19:06,982 INFO [train.py:1114] (3/4) Epoch 6, batch 2200, loss[loss=0.3126, simple_loss=0.3478, pruned_loss=0.1012, ctc_loss=0.1874, over 19579.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.318, pruned_loss=0.08612, ctc_loss=0.1617, over 3868103.31 frames. ], batch size: 57, lr: 2.30e-02, grad_scale: 16.0 +2024-08-25 12:19:13,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78112.0, ans=0.1 +2024-08-25 12:19:21,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=78165.33333333333, ans=0.125 +2024-08-25 12:19:23,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=78165.33333333333, ans=0.2 +2024-08-25 12:19:24,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=78165.33333333333, ans=0.125 +2024-08-25 12:19:27,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=78218.66666666667, ans=0.125 +2024-08-25 12:20:02,381 INFO [train.py:1114] (3/4) Epoch 6, batch 2250, loss[loss=0.2691, simple_loss=0.3259, pruned_loss=0.07726, ctc_loss=0.1444, over 19627.00 frames. ], tot_loss[loss=0.2776, simple_loss=0.3182, pruned_loss=0.08617, ctc_loss=0.1618, over 3868286.74 frames. ], batch size: 55, lr: 2.30e-02, grad_scale: 16.0 +2024-08-25 12:20:16,923 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.62 vs. limit=15.0 +2024-08-25 12:20:38,624 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.553e+02 2.005e+02 2.234e+02 2.581e+02 4.325e+02, threshold=4.468e+02, percent-clipped=0.0 +2024-08-25 12:20:47,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=78592.0, ans=0.125 +2024-08-25 12:20:56,306 INFO [train.py:1114] (3/4) Epoch 6, batch 2300, loss[loss=0.2494, simple_loss=0.2939, pruned_loss=0.07436, ctc_loss=0.1405, over 19502.00 frames. ], tot_loss[loss=0.2772, simple_loss=0.3174, pruned_loss=0.08615, ctc_loss=0.1617, over 3861804.80 frames. ], batch size: 49, lr: 2.29e-02, grad_scale: 16.0 +2024-08-25 12:21:11,059 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.76 vs. limit=22.5 +2024-08-25 12:21:17,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=78698.66666666667, ans=0.125 +2024-08-25 12:21:24,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=78752.0, ans=0.05 +2024-08-25 12:21:28,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=78752.0, ans=0.1 +2024-08-25 12:21:41,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=78858.66666666667, ans=0.125 +2024-08-25 12:21:49,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=78858.66666666667, ans=0.0 +2024-08-25 12:21:52,656 INFO [train.py:1114] (3/4) Epoch 6, batch 2350, loss[loss=0.2916, simple_loss=0.3362, pruned_loss=0.09028, ctc_loss=0.166, over 19652.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.3175, pruned_loss=0.08623, ctc_loss=0.1615, over 3863942.86 frames. ], batch size: 63, lr: 2.29e-02, grad_scale: 16.0 +2024-08-25 12:21:53,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=78912.0, ans=0.125 +2024-08-25 12:21:54,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=78912.0, ans=0.125 +2024-08-25 12:22:17,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=79018.66666666667, ans=0.025 +2024-08-25 12:22:24,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=79072.0, ans=0.125 +2024-08-25 12:22:27,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=79072.0, ans=0.125 +2024-08-25 12:22:30,291 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.404e+02 2.097e+02 2.553e+02 3.084e+02 6.792e+02, threshold=5.106e+02, percent-clipped=2.0 +2024-08-25 12:22:47,956 INFO [train.py:1114] (3/4) Epoch 6, batch 2400, loss[loss=0.2689, simple_loss=0.3162, pruned_loss=0.08023, ctc_loss=0.1527, over 19333.00 frames. ], tot_loss[loss=0.2794, simple_loss=0.3197, pruned_loss=0.08697, ctc_loss=0.163, over 3859078.99 frames. ], batch size: 71, lr: 2.29e-02, grad_scale: 32.0 +2024-08-25 12:23:01,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=79232.0, ans=0.125 +2024-08-25 12:23:16,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=79285.33333333333, ans=0.125 +2024-08-25 12:23:45,695 INFO [train.py:1114] (3/4) Epoch 6, batch 2450, loss[loss=0.3666, simple_loss=0.3637, pruned_loss=0.1345, ctc_loss=0.2517, over 13517.00 frames. ], tot_loss[loss=0.2877, simple_loss=0.3245, pruned_loss=0.09123, ctc_loss=0.171, over 3733220.80 frames. ], batch size: 141, lr: 2.29e-02, grad_scale: 32.0 +2024-08-25 12:23:48,667 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.20 vs. limit=22.5 +2024-08-25 12:24:35,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=79498.66666666667, ans=0.125 +2024-08-25 12:27:49,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=79605.33333333333, ans=0.025 +2024-08-25 12:28:01,642 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.548e+02 2.056e+02 2.291e+02 2.526e+02 5.572e+02, threshold=4.582e+02, percent-clipped=1.0 +2024-08-25 12:29:27,604 INFO [train.py:1114] (3/4) Epoch 7, batch 0, loss[loss=0.2943, simple_loss=0.326, pruned_loss=0.09684, ctc_loss=0.1723, over 19820.00 frames. ], tot_loss[loss=0.2943, simple_loss=0.326, pruned_loss=0.09684, ctc_loss=0.1723, over 19820.00 frames. ], batch size: 49, lr: 2.14e-02, grad_scale: 32.0 +2024-08-25 12:29:27,605 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-25 12:29:44,280 INFO [train.py:1146] (3/4) Epoch 7, validation: loss=0.2269, simple_loss=0.307, pruned_loss=0.05393, ctc_loss=0.0975, over 944034.00 frames. +2024-08-25 12:29:44,281 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 14072MB +2024-08-25 12:29:47,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=79658.66666666667, ans=0.125 +2024-08-25 12:30:16,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.09 vs. limit=22.5 +2024-08-25 12:30:20,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.00 vs. limit=10.0 +2024-08-25 12:31:23,253 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.58 vs. limit=22.5 +2024-08-25 12:31:28,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=79818.66666666667, ans=0.07 +2024-08-25 12:31:35,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=79818.66666666667, ans=0.2 +2024-08-25 12:33:04,668 INFO [train.py:1114] (3/4) Epoch 7, batch 50, loss[loss=0.2438, simple_loss=0.2919, pruned_loss=0.07049, ctc_loss=0.1366, over 19712.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3225, pruned_loss=0.08895, ctc_loss=0.1675, over 843690.50 frames. ], batch size: 47, lr: 2.14e-02, grad_scale: 32.0 +2024-08-25 12:33:14,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=79925.33333333333, ans=0.2 +2024-08-25 12:33:38,330 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:33:57,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=80085.33333333333, ans=0.0 +2024-08-25 12:34:17,264 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.570e+02 1.999e+02 2.246e+02 2.808e+02 5.514e+02, threshold=4.492e+02, percent-clipped=3.0 +2024-08-25 12:34:24,293 INFO [train.py:1114] (3/4) Epoch 7, batch 100, loss[loss=0.2334, simple_loss=0.2852, pruned_loss=0.06524, ctc_loss=0.1277, over 19716.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3225, pruned_loss=0.08838, ctc_loss=0.1666, over 1498479.81 frames. ], batch size: 51, lr: 2.13e-02, grad_scale: 32.0 +2024-08-25 12:34:40,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=80245.33333333333, ans=0.0 +2024-08-25 12:34:54,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=80298.66666666667, ans=0.09899494936611666 +2024-08-25 12:34:58,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80352.0, ans=0.1 +2024-08-25 12:35:08,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=80352.0, ans=0.125 +2024-08-25 12:35:10,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=80405.33333333333, ans=0.025 +2024-08-25 12:35:23,289 INFO [train.py:1114] (3/4) Epoch 7, batch 150, loss[loss=0.2098, simple_loss=0.2712, pruned_loss=0.0533, ctc_loss=0.1045, over 19754.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.3185, pruned_loss=0.08548, ctc_loss=0.1608, over 2028113.79 frames. ], batch size: 47, lr: 2.13e-02, grad_scale: 32.0 +2024-08-25 12:35:26,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=80458.66666666667, ans=0.09899494936611666 +2024-08-25 12:35:30,478 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.60 vs. limit=22.5 +2024-08-25 12:35:36,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=80512.0, ans=0.125 +2024-08-25 12:35:46,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=80512.0, ans=0.125 +2024-08-25 12:36:01,850 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.52 vs. limit=15.0 +2024-08-25 12:36:03,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=80618.66666666667, ans=0.125 +2024-08-25 12:36:04,132 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.94 vs. limit=12.0 +2024-08-25 12:36:09,136 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.27 vs. limit=12.0 +2024-08-25 12:36:12,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=80618.66666666667, ans=0.2 +2024-08-25 12:36:15,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=80672.0, ans=0.125 +2024-08-25 12:36:18,824 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.539e+02 1.959e+02 2.217e+02 2.953e+02 5.735e+02, threshold=4.434e+02, percent-clipped=2.0 +2024-08-25 12:36:26,007 INFO [train.py:1114] (3/4) Epoch 7, batch 200, loss[loss=0.3082, simple_loss=0.3388, pruned_loss=0.0998, ctc_loss=0.1948, over 18310.00 frames. ], tot_loss[loss=0.2742, simple_loss=0.3161, pruned_loss=0.08428, ctc_loss=0.1591, over 2434946.64 frames. ], batch size: 85, lr: 2.13e-02, grad_scale: 32.0 +2024-08-25 12:36:26,208 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:36:47,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=80832.0, ans=0.0 +2024-08-25 12:36:49,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=80832.0, ans=0.2 +2024-08-25 12:36:57,800 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.08 vs. limit=15.0 +2024-08-25 12:36:59,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=80885.33333333333, ans=0.125 +2024-08-25 12:37:01,194 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.20 vs. limit=12.0 +2024-08-25 12:37:22,863 INFO [train.py:1114] (3/4) Epoch 7, batch 250, loss[loss=0.3218, simple_loss=0.3508, pruned_loss=0.1066, ctc_loss=0.1987, over 19305.00 frames. ], tot_loss[loss=0.2754, simple_loss=0.3168, pruned_loss=0.08493, ctc_loss=0.1604, over 2754437.13 frames. ], batch size: 67, lr: 2.13e-02, grad_scale: 32.0 +2024-08-25 12:37:40,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=81045.33333333333, ans=0.125 +2024-08-25 12:38:01,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=81152.0, ans=0.125 +2024-08-25 12:38:04,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=81152.0, ans=0.125 +2024-08-25 12:38:16,696 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.422e+02 1.901e+02 2.294e+02 2.833e+02 4.254e+02, threshold=4.587e+02, percent-clipped=0.0 +2024-08-25 12:38:17,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=81205.33333333333, ans=0.0 +2024-08-25 12:38:23,061 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.24 vs. limit=6.0 +2024-08-25 12:38:23,347 INFO [train.py:1114] (3/4) Epoch 7, batch 300, loss[loss=0.2797, simple_loss=0.3235, pruned_loss=0.08738, ctc_loss=0.1526, over 19536.00 frames. ], tot_loss[loss=0.274, simple_loss=0.316, pruned_loss=0.08423, ctc_loss=0.1587, over 3000366.08 frames. ], batch size: 61, lr: 2.12e-02, grad_scale: 32.0 +2024-08-25 12:38:30,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81258.66666666667, ans=0.1 +2024-08-25 12:38:34,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=81312.0, ans=10.0 +2024-08-25 12:38:38,075 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.98 vs. limit=15.0 +2024-08-25 12:38:39,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=81312.0, ans=0.0 +2024-08-25 12:38:41,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=81312.0, ans=0.125 +2024-08-25 12:38:56,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=81365.33333333333, ans=0.125 +2024-08-25 12:38:57,379 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.83 vs. limit=12.0 +2024-08-25 12:39:01,915 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.63 vs. limit=15.0 +2024-08-25 12:39:52,666 INFO [train.py:1114] (3/4) Epoch 7, batch 350, loss[loss=0.2494, simple_loss=0.2937, pruned_loss=0.07346, ctc_loss=0.1451, over 19739.00 frames. ], tot_loss[loss=0.2743, simple_loss=0.3164, pruned_loss=0.08428, ctc_loss=0.1592, over 3189719.98 frames. ], batch size: 48, lr: 2.12e-02, grad_scale: 32.0 +2024-08-25 12:39:52,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=81525.33333333333, ans=0.125 +2024-08-25 12:39:57,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=81525.33333333333, ans=0.125 +2024-08-25 12:40:05,015 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.46 vs. limit=6.0 +2024-08-25 12:40:41,309 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.35 vs. limit=15.0 +2024-08-25 12:40:43,970 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.560e+02 1.980e+02 2.268e+02 2.810e+02 5.782e+02, threshold=4.535e+02, percent-clipped=1.0 +2024-08-25 12:40:49,288 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.62 vs. limit=15.0 +2024-08-25 12:40:50,663 INFO [train.py:1114] (3/4) Epoch 7, batch 400, loss[loss=0.2756, simple_loss=0.325, pruned_loss=0.08155, ctc_loss=0.1577, over 19482.00 frames. ], tot_loss[loss=0.2732, simple_loss=0.3158, pruned_loss=0.08369, ctc_loss=0.1582, over 3341175.01 frames. ], batch size: 54, lr: 2.12e-02, grad_scale: 32.0 +2024-08-25 12:40:57,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=81792.0, ans=0.025 +2024-08-25 12:40:59,208 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.95 vs. limit=6.0 +2024-08-25 12:41:08,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=81845.33333333333, ans=0.05 +2024-08-25 12:41:21,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=81898.66666666667, ans=0.125 +2024-08-25 12:41:24,114 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.38 vs. limit=15.0 +2024-08-25 12:41:30,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=81952.0, ans=0.125 +2024-08-25 12:41:41,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=82005.33333333333, ans=0.0 +2024-08-25 12:41:49,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82005.33333333333, ans=0.1 +2024-08-25 12:41:52,335 INFO [train.py:1114] (3/4) Epoch 7, batch 450, loss[loss=0.2853, simple_loss=0.3278, pruned_loss=0.08781, ctc_loss=0.1681, over 19613.00 frames. ], tot_loss[loss=0.2739, simple_loss=0.3163, pruned_loss=0.08401, ctc_loss=0.1586, over 3449286.53 frames. ], batch size: 55, lr: 2.11e-02, grad_scale: 32.0 +2024-08-25 12:42:02,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=82058.66666666667, ans=0.125 +2024-08-25 12:42:22,496 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:42:37,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82272.0, ans=0.1 +2024-08-25 12:42:40,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=82272.0, ans=0.125 +2024-08-25 12:42:43,144 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.611e+02 1.947e+02 2.448e+02 2.960e+02 4.262e+02, threshold=4.896e+02, percent-clipped=0.0 +2024-08-25 12:42:46,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82272.0, ans=0.1 +2024-08-25 12:42:52,051 INFO [train.py:1114] (3/4) Epoch 7, batch 500, loss[loss=0.2713, simple_loss=0.3231, pruned_loss=0.07985, ctc_loss=0.1497, over 19659.00 frames. ], tot_loss[loss=0.2719, simple_loss=0.3147, pruned_loss=0.0832, ctc_loss=0.1569, over 3543684.98 frames. ], batch size: 63, lr: 2.11e-02, grad_scale: 32.0 +2024-08-25 12:42:55,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=82325.33333333333, ans=0.125 +2024-08-25 12:43:00,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=82325.33333333333, ans=0.125 +2024-08-25 12:43:14,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=82378.66666666667, ans=0.125 +2024-08-25 12:43:32,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82485.33333333333, ans=0.1 +2024-08-25 12:43:38,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=82485.33333333333, ans=0.5 +2024-08-25 12:43:51,836 INFO [train.py:1114] (3/4) Epoch 7, batch 550, loss[loss=0.2888, simple_loss=0.3248, pruned_loss=0.09182, ctc_loss=0.1729, over 19276.00 frames. ], tot_loss[loss=0.2722, simple_loss=0.3149, pruned_loss=0.08332, ctc_loss=0.157, over 3605939.25 frames. ], batch size: 71, lr: 2.11e-02, grad_scale: 32.0 +2024-08-25 12:43:53,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82592.0, ans=0.1 +2024-08-25 12:44:32,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=82752.0, ans=0.0 +2024-08-25 12:44:33,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=82752.0, ans=0.125 +2024-08-25 12:44:43,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82805.33333333333, ans=0.1 +2024-08-25 12:44:44,975 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.503e+02 2.000e+02 2.364e+02 2.910e+02 5.356e+02, threshold=4.728e+02, percent-clipped=1.0 +2024-08-25 12:44:52,598 INFO [train.py:1114] (3/4) Epoch 7, batch 600, loss[loss=0.3211, simple_loss=0.3527, pruned_loss=0.1049, ctc_loss=0.1993, over 19414.00 frames. ], tot_loss[loss=0.2707, simple_loss=0.3141, pruned_loss=0.08248, ctc_loss=0.1558, over 3664664.55 frames. ], batch size: 67, lr: 2.11e-02, grad_scale: 16.0 +2024-08-25 12:45:04,379 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.31 vs. limit=22.5 +2024-08-25 12:45:24,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=82965.33333333333, ans=0.125 +2024-08-25 12:45:36,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=83018.66666666667, ans=0.125 +2024-08-25 12:45:51,128 INFO [train.py:1114] (3/4) Epoch 7, batch 650, loss[loss=0.2733, simple_loss=0.3153, pruned_loss=0.08426, ctc_loss=0.1567, over 19781.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.3133, pruned_loss=0.08212, ctc_loss=0.1552, over 3715434.76 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 16.0 +2024-08-25 12:46:01,989 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.50 vs. limit=15.0 +2024-08-25 12:46:12,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=83178.66666666667, ans=0.125 +2024-08-25 12:46:47,174 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.487e+02 1.844e+02 2.004e+02 2.285e+02 4.065e+02, threshold=4.009e+02, percent-clipped=0.0 +2024-08-25 12:46:52,102 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.26 vs. limit=15.0 +2024-08-25 12:46:52,899 INFO [train.py:1114] (3/4) Epoch 7, batch 700, loss[loss=0.2778, simple_loss=0.322, pruned_loss=0.08479, ctc_loss=0.1598, over 19722.00 frames. ], tot_loss[loss=0.2709, simple_loss=0.3143, pruned_loss=0.08265, ctc_loss=0.1558, over 3747583.31 frames. ], batch size: 51, lr: 2.10e-02, grad_scale: 16.0 +2024-08-25 12:47:21,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=83498.66666666667, ans=0.0 +2024-08-25 12:47:33,196 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.81 vs. limit=10.0 +2024-08-25 12:47:49,560 INFO [train.py:1114] (3/4) Epoch 7, batch 750, loss[loss=0.2791, simple_loss=0.3164, pruned_loss=0.0877, ctc_loss=0.1661, over 19486.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.3139, pruned_loss=0.08271, ctc_loss=0.1557, over 3774145.79 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 16.0 +2024-08-25 12:48:05,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=83712.0, ans=0.125 +2024-08-25 12:48:06,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=83712.0, ans=0.125 +2024-08-25 12:48:06,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=83712.0, ans=6.0 +2024-08-25 12:48:10,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=83712.0, ans=0.125 +2024-08-25 12:48:11,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=83712.0, ans=0.125 +2024-08-25 12:48:42,065 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.33 vs. limit=22.5 +2024-08-25 12:48:45,001 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.565e+02 1.885e+02 2.166e+02 2.690e+02 4.534e+02, threshold=4.331e+02, percent-clipped=3.0 +2024-08-25 12:48:46,368 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:48:47,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=83872.0, ans=0.2 +2024-08-25 12:48:50,700 INFO [train.py:1114] (3/4) Epoch 7, batch 800, loss[loss=0.2643, simple_loss=0.3072, pruned_loss=0.08022, ctc_loss=0.1524, over 19402.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.3137, pruned_loss=0.08243, ctc_loss=0.1551, over 3794791.59 frames. ], batch size: 48, lr: 2.10e-02, grad_scale: 32.0 +2024-08-25 12:48:51,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=83925.33333333333, ans=0.0 +2024-08-25 12:48:54,197 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 12:49:00,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=83978.66666666667, ans=0.0 +2024-08-25 12:49:03,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=83978.66666666667, ans=0.2 +2024-08-25 12:49:10,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=83978.66666666667, ans=0.125 +2024-08-25 12:49:11,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=83978.66666666667, ans=0.1 +2024-08-25 12:49:24,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=84085.33333333333, ans=0.0 +2024-08-25 12:49:25,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=84085.33333333333, ans=0.125 +2024-08-25 12:49:27,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=84085.33333333333, ans=0.125 +2024-08-25 12:49:27,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=84085.33333333333, ans=0.125 +2024-08-25 12:49:38,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=84138.66666666667, ans=0.0 +2024-08-25 12:49:51,366 INFO [train.py:1114] (3/4) Epoch 7, batch 850, loss[loss=0.2915, simple_loss=0.3364, pruned_loss=0.0899, ctc_loss=0.1669, over 19645.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.3139, pruned_loss=0.08268, ctc_loss=0.1557, over 3814345.83 frames. ], batch size: 59, lr: 2.09e-02, grad_scale: 32.0 +2024-08-25 12:49:52,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=84192.0, ans=10.0 +2024-08-25 12:49:59,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=84192.0, ans=0.125 +2024-08-25 12:50:22,209 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.25 vs. limit=15.0 +2024-08-25 12:50:25,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=84352.0, ans=0.5 +2024-08-25 12:50:27,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=84352.0, ans=0.125 +2024-08-25 12:50:43,488 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.470e+02 1.946e+02 2.270e+02 2.825e+02 4.143e+02, threshold=4.540e+02, percent-clipped=0.0 +2024-08-25 12:50:49,141 INFO [train.py:1114] (3/4) Epoch 7, batch 900, loss[loss=0.2394, simple_loss=0.2813, pruned_loss=0.07136, ctc_loss=0.1368, over 19424.00 frames. ], tot_loss[loss=0.2721, simple_loss=0.3148, pruned_loss=0.08338, ctc_loss=0.1566, over 3817862.89 frames. ], batch size: 48, lr: 2.09e-02, grad_scale: 32.0 +2024-08-25 12:51:12,225 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.04 vs. limit=12.0 +2024-08-25 12:51:16,704 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.29 vs. limit=22.5 +2024-08-25 12:51:41,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84618.66666666667, ans=0.1 +2024-08-25 12:51:58,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=84672.0, ans=0.0 +2024-08-25 12:52:05,346 INFO [train.py:1114] (3/4) Epoch 7, batch 950, loss[loss=0.2633, simple_loss=0.3023, pruned_loss=0.08158, ctc_loss=0.153, over 19497.00 frames. ], tot_loss[loss=0.2726, simple_loss=0.3151, pruned_loss=0.08366, ctc_loss=0.1571, over 3819933.60 frames. ], batch size: 49, lr: 2.09e-02, grad_scale: 16.0 +2024-08-25 12:52:11,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=84725.33333333333, ans=0.1 +2024-08-25 12:52:25,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=84778.66666666667, ans=0.125 +2024-08-25 12:52:34,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=84832.0, ans=0.125 +2024-08-25 12:52:59,184 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.627e+02 2.065e+02 2.373e+02 2.949e+02 1.128e+03, threshold=4.746e+02, percent-clipped=6.0 +2024-08-25 12:53:05,268 INFO [train.py:1114] (3/4) Epoch 7, batch 1000, loss[loss=0.2518, simple_loss=0.2987, pruned_loss=0.07423, ctc_loss=0.1411, over 19861.00 frames. ], tot_loss[loss=0.2746, simple_loss=0.3163, pruned_loss=0.0847, ctc_loss=0.1589, over 3816057.85 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 16.0 +2024-08-25 12:53:18,216 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.48 vs. limit=22.5 +2024-08-25 12:53:19,674 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.58 vs. limit=6.0 +2024-08-25 12:53:20,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=85045.33333333333, ans=0.2 +2024-08-25 12:53:22,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=85045.33333333333, ans=0.0 +2024-08-25 12:53:27,898 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.75 vs. limit=10.0 +2024-08-25 12:53:49,603 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.85 vs. limit=15.0 +2024-08-25 12:54:05,088 INFO [train.py:1114] (3/4) Epoch 7, batch 1050, loss[loss=0.281, simple_loss=0.3251, pruned_loss=0.08592, ctc_loss=0.1628, over 19825.00 frames. ], tot_loss[loss=0.273, simple_loss=0.3151, pruned_loss=0.08396, ctc_loss=0.1575, over 3822662.85 frames. ], batch size: 57, lr: 2.08e-02, grad_scale: 16.0 +2024-08-25 12:54:36,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=85365.33333333333, ans=0.1 +2024-08-25 12:54:36,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=85365.33333333333, ans=0.025 +2024-08-25 12:54:43,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=85418.66666666667, ans=0.125 +2024-08-25 12:54:47,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=85418.66666666667, ans=0.125 +2024-08-25 12:54:54,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=85472.0, ans=0.125 +2024-08-25 12:54:57,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=85472.0, ans=0.2 +2024-08-25 12:55:01,659 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.423e+02 1.918e+02 2.325e+02 2.776e+02 4.591e+02, threshold=4.650e+02, percent-clipped=1.0 +2024-08-25 12:55:02,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=85472.0, ans=0.125 +2024-08-25 12:55:05,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=85525.33333333333, ans=0.125 +2024-08-25 12:55:06,551 INFO [train.py:1114] (3/4) Epoch 7, batch 1100, loss[loss=0.2974, simple_loss=0.3298, pruned_loss=0.09705, ctc_loss=0.1775, over 19586.00 frames. ], tot_loss[loss=0.2715, simple_loss=0.3143, pruned_loss=0.08317, ctc_loss=0.1561, over 3829769.58 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 16.0 +2024-08-25 12:55:08,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=85525.33333333333, ans=0.0 +2024-08-25 12:56:01,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=85738.66666666667, ans=0.125 +2024-08-25 12:56:05,757 INFO [train.py:1114] (3/4) Epoch 7, batch 1150, loss[loss=0.2483, simple_loss=0.2949, pruned_loss=0.07354, ctc_loss=0.1363, over 19586.00 frames. ], tot_loss[loss=0.2716, simple_loss=0.3141, pruned_loss=0.08328, ctc_loss=0.1563, over 3829012.43 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 16.0 +2024-08-25 12:56:08,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=85792.0, ans=0.125 +2024-08-25 12:56:11,221 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.89 vs. limit=10.0 +2024-08-25 12:56:21,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=85845.33333333333, ans=0.1 +2024-08-25 12:56:25,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=85845.33333333333, ans=0.125 +2024-08-25 12:56:57,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=86005.33333333333, ans=0.0 +2024-08-25 12:56:58,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=86005.33333333333, ans=0.125 +2024-08-25 12:57:02,236 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.94 vs. limit=15.0 +2024-08-25 12:57:02,979 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.571e+02 1.959e+02 2.167e+02 2.666e+02 4.946e+02, threshold=4.335e+02, percent-clipped=2.0 +2024-08-25 12:57:05,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=86005.33333333333, ans=0.025 +2024-08-25 12:57:07,690 INFO [train.py:1114] (3/4) Epoch 7, batch 1200, loss[loss=0.2728, simple_loss=0.322, pruned_loss=0.08066, ctc_loss=0.1556, over 19842.00 frames. ], tot_loss[loss=0.273, simple_loss=0.3151, pruned_loss=0.08389, ctc_loss=0.1576, over 3824988.64 frames. ], batch size: 57, lr: 2.07e-02, grad_scale: 32.0 +2024-08-25 12:57:17,293 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.99 vs. limit=15.0 +2024-08-25 12:57:36,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=86165.33333333333, ans=0.1 +2024-08-25 12:57:56,088 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.35 vs. limit=6.0 +2024-08-25 12:57:58,444 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.02 vs. limit=22.5 +2024-08-25 12:58:05,919 INFO [train.py:1114] (3/4) Epoch 7, batch 1250, loss[loss=0.2644, simple_loss=0.3201, pruned_loss=0.07603, ctc_loss=0.1414, over 19533.00 frames. ], tot_loss[loss=0.2717, simple_loss=0.3147, pruned_loss=0.08308, ctc_loss=0.156, over 3843369.71 frames. ], batch size: 61, lr: 2.07e-02, grad_scale: 32.0 +2024-08-25 12:58:06,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=86325.33333333333, ans=0.1 +2024-08-25 12:58:12,348 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.50 vs. limit=15.0 +2024-08-25 12:58:28,684 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.50 vs. limit=6.0 +2024-08-25 12:58:41,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=86432.0, ans=0.025 +2024-08-25 12:58:41,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=86432.0, ans=0.0 +2024-08-25 12:58:47,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=86485.33333333333, ans=0.125 +2024-08-25 12:58:48,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=86485.33333333333, ans=0.2 +2024-08-25 12:59:02,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=86538.66666666667, ans=0.125 +2024-08-25 12:59:02,855 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.532e+02 1.964e+02 2.304e+02 2.729e+02 5.465e+02, threshold=4.608e+02, percent-clipped=2.0 +2024-08-25 12:59:07,515 INFO [train.py:1114] (3/4) Epoch 7, batch 1300, loss[loss=0.2675, simple_loss=0.3096, pruned_loss=0.08184, ctc_loss=0.1544, over 18798.00 frames. ], tot_loss[loss=0.27, simple_loss=0.3136, pruned_loss=0.08228, ctc_loss=0.1545, over 3845458.99 frames. ], batch size: 76, lr: 2.07e-02, grad_scale: 32.0 +2024-08-25 12:59:07,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=86592.0, ans=0.125 +2024-08-25 12:59:19,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=86645.33333333333, ans=0.125 +2024-08-25 12:59:20,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=86645.33333333333, ans=0.125 +2024-08-25 12:59:34,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=86698.66666666667, ans=0.2 +2024-08-25 12:59:38,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=86698.66666666667, ans=0.0 +2024-08-25 12:59:45,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=86752.0, ans=0.0 +2024-08-25 12:59:51,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=86752.0, ans=0.0 +2024-08-25 13:00:04,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86805.33333333333, ans=0.1 +2024-08-25 13:00:07,957 INFO [train.py:1114] (3/4) Epoch 7, batch 1350, loss[loss=0.2905, simple_loss=0.324, pruned_loss=0.09312, ctc_loss=0.1771, over 19761.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3126, pruned_loss=0.0815, ctc_loss=0.1532, over 3857306.00 frames. ], batch size: 54, lr: 2.07e-02, grad_scale: 32.0 +2024-08-25 13:00:22,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=86912.0, ans=0.125 +2024-08-25 13:00:22,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=86912.0, ans=0.0 +2024-08-25 13:00:29,375 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.64 vs. limit=6.0 +2024-08-25 13:00:35,108 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.74 vs. limit=15.0 +2024-08-25 13:00:39,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=86965.33333333333, ans=0.0 +2024-08-25 13:01:52,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=87072.0, ans=0.0 +2024-08-25 13:01:59,605 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.534e+02 1.935e+02 2.309e+02 3.009e+02 4.449e+02, threshold=4.618e+02, percent-clipped=0.0 +2024-08-25 13:02:04,191 INFO [train.py:1114] (3/4) Epoch 7, batch 1400, loss[loss=0.2143, simple_loss=0.2697, pruned_loss=0.05821, ctc_loss=0.1061, over 19672.00 frames. ], tot_loss[loss=0.2692, simple_loss=0.3129, pruned_loss=0.08193, ctc_loss=0.1541, over 3863716.77 frames. ], batch size: 46, lr: 2.06e-02, grad_scale: 32.0 +2024-08-25 13:02:13,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=87125.33333333333, ans=0.0 +2024-08-25 13:02:55,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=87338.66666666667, ans=0.0 +2024-08-25 13:02:58,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=87338.66666666667, ans=0.0 +2024-08-25 13:03:02,255 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 13:03:05,400 INFO [train.py:1114] (3/4) Epoch 7, batch 1450, loss[loss=0.288, simple_loss=0.3331, pruned_loss=0.088, ctc_loss=0.1673, over 19679.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3133, pruned_loss=0.08211, ctc_loss=0.1546, over 3861825.90 frames. ], batch size: 63, lr: 2.06e-02, grad_scale: 16.0 +2024-08-25 13:03:07,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=87392.0, ans=0.0 +2024-08-25 13:03:26,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=87445.33333333333, ans=0.125 +2024-08-25 13:03:35,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=87498.66666666667, ans=0.0 +2024-08-25 13:04:26,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=87552.0, ans=0.125 +2024-08-25 13:04:30,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=87552.0, ans=0.2 +2024-08-25 13:04:32,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=87552.0, ans=0.0 +2024-08-25 13:04:34,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=87552.0, ans=0.125 +2024-08-25 13:04:38,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=87605.33333333333, ans=0.0 +2024-08-25 13:04:46,559 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.575e+02 2.015e+02 2.285e+02 2.716e+02 4.465e+02, threshold=4.569e+02, percent-clipped=0.0 +2024-08-25 13:04:50,187 INFO [train.py:1114] (3/4) Epoch 7, batch 1500, loss[loss=0.2821, simple_loss=0.3261, pruned_loss=0.0858, ctc_loss=0.1665, over 19590.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3139, pruned_loss=0.08227, ctc_loss=0.1549, over 3860974.96 frames. ], batch size: 57, lr: 2.06e-02, grad_scale: 16.0 +2024-08-25 13:04:55,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=87658.66666666667, ans=0.0 +2024-08-25 13:05:21,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=87765.33333333333, ans=0.125 +2024-08-25 13:05:37,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=87818.66666666667, ans=0.125 +2024-08-25 13:05:39,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=87818.66666666667, ans=0.015 +2024-08-25 13:05:57,427 INFO [train.py:1114] (3/4) Epoch 7, batch 1550, loss[loss=0.2968, simple_loss=0.3401, pruned_loss=0.09256, ctc_loss=0.1709, over 19590.00 frames. ], tot_loss[loss=0.271, simple_loss=0.3143, pruned_loss=0.08276, ctc_loss=0.1556, over 3846531.22 frames. ], batch size: 60, lr: 2.06e-02, grad_scale: 16.0 +2024-08-25 13:06:05,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=87925.33333333333, ans=0.2 +2024-08-25 13:06:11,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=87978.66666666667, ans=0.125 +2024-08-25 13:06:34,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=88085.33333333333, ans=0.0 +2024-08-25 13:06:55,898 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.554e+02 1.880e+02 2.225e+02 2.757e+02 4.141e+02, threshold=4.451e+02, percent-clipped=0.0 +2024-08-25 13:07:00,952 INFO [train.py:1114] (3/4) Epoch 7, batch 1600, loss[loss=0.2773, simple_loss=0.3206, pruned_loss=0.08464, ctc_loss=0.1618, over 19851.00 frames. ], tot_loss[loss=0.2715, simple_loss=0.3145, pruned_loss=0.08307, ctc_loss=0.1561, over 3835162.06 frames. ], batch size: 57, lr: 2.05e-02, grad_scale: 32.0 +2024-08-25 13:07:24,576 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.37 vs. limit=15.0 +2024-08-25 13:07:36,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=88352.0, ans=0.125 +2024-08-25 13:07:53,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=88405.33333333333, ans=0.0 +2024-08-25 13:07:58,847 INFO [train.py:1114] (3/4) Epoch 7, batch 1650, loss[loss=0.2507, simple_loss=0.31, pruned_loss=0.0691, ctc_loss=0.133, over 19636.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3141, pruned_loss=0.083, ctc_loss=0.1559, over 3832284.41 frames. ], batch size: 59, lr: 2.05e-02, grad_scale: 32.0 +2024-08-25 13:08:07,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=88458.66666666667, ans=0.125 +2024-08-25 13:08:21,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=88512.0, ans=0.2 +2024-08-25 13:08:25,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=88565.33333333333, ans=0.0 +2024-08-25 13:08:47,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=88672.0, ans=0.125 +2024-08-25 13:08:54,968 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.476e+02 1.917e+02 2.131e+02 2.729e+02 4.248e+02, threshold=4.261e+02, percent-clipped=0.0 +2024-08-25 13:08:58,381 INFO [train.py:1114] (3/4) Epoch 7, batch 1700, loss[loss=0.2143, simple_loss=0.2592, pruned_loss=0.06156, ctc_loss=0.1159, over 19685.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.3133, pruned_loss=0.0822, ctc_loss=0.1546, over 3847011.65 frames. ], batch size: 46, lr: 2.05e-02, grad_scale: 32.0 +2024-08-25 13:09:06,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=88725.33333333333, ans=0.125 +2024-08-25 13:09:12,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=88778.66666666667, ans=0.2 +2024-08-25 13:09:16,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=88778.66666666667, ans=0.1 +2024-08-25 13:09:50,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=88938.66666666667, ans=0.2 +2024-08-25 13:09:53,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=88938.66666666667, ans=0.95 +2024-08-25 13:09:55,061 INFO [train.py:1114] (3/4) Epoch 7, batch 1750, loss[loss=0.2475, simple_loss=0.289, pruned_loss=0.07482, ctc_loss=0.141, over 19671.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3125, pruned_loss=0.08172, ctc_loss=0.1538, over 3852333.62 frames. ], batch size: 45, lr: 2.05e-02, grad_scale: 32.0 +2024-08-25 13:09:58,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=88992.0, ans=0.125 +2024-08-25 13:11:03,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=88992.0, ans=0.0 +2024-08-25 13:16:07,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=89045.33333333333, ans=0.09899494936611666 +2024-08-25 13:17:17,231 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.06 vs. limit=22.5 +2024-08-25 13:17:35,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=89152.0, ans=0.125 +2024-08-25 13:17:36,522 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 13:25:02,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=89152.0, ans=0.125 +2024-08-25 13:25:11,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=89205.33333333333, ans=0.0 +2024-08-25 13:29:44,218 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.589e+02 1.972e+02 2.344e+02 2.828e+02 4.449e+02, threshold=4.688e+02, percent-clipped=1.0 +2024-08-25 13:29:47,700 INFO [train.py:1114] (3/4) Epoch 7, batch 1800, loss[loss=0.2623, simple_loss=0.3149, pruned_loss=0.07566, ctc_loss=0.1459, over 19612.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.3126, pruned_loss=0.08192, ctc_loss=0.1542, over 3854206.50 frames. ], batch size: 55, lr: 2.04e-02, grad_scale: 32.0 +2024-08-25 13:31:00,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=89258.66666666667, ans=0.025 +2024-08-25 13:31:12,336 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.39 vs. limit=15.0 +2024-08-25 13:37:24,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=89365.33333333333, ans=0.125 +2024-08-25 13:38:30,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=89418.66666666667, ans=0.0 +2024-08-25 13:38:36,878 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.95 vs. limit=15.0 +2024-08-25 13:40:34,862 INFO [train.py:1114] (3/4) Epoch 7, batch 1850, loss[loss=0.2804, simple_loss=0.3212, pruned_loss=0.08607, ctc_loss=0.1684, over 19581.00 frames. ], tot_loss[loss=0.2688, simple_loss=0.3125, pruned_loss=0.08181, ctc_loss=0.154, over 3857402.30 frames. ], batch size: 57, lr: 2.04e-02, grad_scale: 32.0 +2024-08-25 13:41:30,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=89578.66666666667, ans=0.2 +2024-08-25 13:41:37,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=89578.66666666667, ans=0.125 +2024-08-25 13:42:31,287 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.18 vs. limit=15.0 +2024-08-25 13:42:49,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=89632.0, ans=0.0 +2024-08-25 13:44:01,301 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.521e+02 1.852e+02 2.070e+02 2.397e+02 4.608e+02, threshold=4.140e+02, percent-clipped=0.0 +2024-08-25 13:44:07,952 INFO [train.py:1114] (3/4) Epoch 7, batch 1900, loss[loss=0.2444, simple_loss=0.3173, pruned_loss=0.06249, ctc_loss=0.1165, over 19648.00 frames. ], tot_loss[loss=0.2688, simple_loss=0.313, pruned_loss=0.08158, ctc_loss=0.1536, over 3861127.34 frames. ], batch size: 59, lr: 2.04e-02, grad_scale: 32.0 +2024-08-25 13:45:36,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=90005.33333333333, ans=0.0 +2024-08-25 13:45:41,198 INFO [train.py:1114] (3/4) Epoch 7, batch 1950, loss[loss=0.2734, simple_loss=0.3188, pruned_loss=0.08279, ctc_loss=0.156, over 19592.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3142, pruned_loss=0.08167, ctc_loss=0.1537, over 3870000.94 frames. ], batch size: 52, lr: 2.04e-02, grad_scale: 16.0 +2024-08-25 13:45:41,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=90058.66666666667, ans=0.0 +2024-08-25 13:46:06,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=90112.0, ans=0.025 +2024-08-25 13:46:28,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=90218.66666666667, ans=0.025 +2024-08-25 13:46:29,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=90218.66666666667, ans=0.1 +2024-08-25 13:46:29,806 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.20 vs. limit=6.0 +2024-08-25 13:46:34,523 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.64 vs. limit=10.0 +2024-08-25 13:46:42,769 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.614e+02 1.896e+02 2.177e+02 2.703e+02 3.964e+02, threshold=4.354e+02, percent-clipped=0.0 +2024-08-25 13:46:45,061 INFO [train.py:1114] (3/4) Epoch 7, batch 2000, loss[loss=0.2254, simple_loss=0.2721, pruned_loss=0.0653, ctc_loss=0.1204, over 19647.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.3146, pruned_loss=0.08216, ctc_loss=0.1545, over 3853995.48 frames. ], batch size: 45, lr: 2.03e-02, grad_scale: 32.0 +2024-08-25 13:46:57,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=90378.66666666667, ans=0.95 +2024-08-25 13:47:01,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90378.66666666667, ans=0.1 +2024-08-25 13:47:16,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=90432.0, ans=0.125 +2024-08-25 13:47:28,107 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.93 vs. limit=15.0 +2024-08-25 13:47:36,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=90538.66666666667, ans=0.0 +2024-08-25 13:47:41,004 INFO [train.py:1114] (3/4) Epoch 7, batch 2050, loss[loss=0.2475, simple_loss=0.2873, pruned_loss=0.07588, ctc_loss=0.1396, over 19728.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3136, pruned_loss=0.08194, ctc_loss=0.1542, over 3850036.56 frames. ], batch size: 47, lr: 2.03e-02, grad_scale: 32.0 +2024-08-25 13:47:45,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90592.0, ans=0.1 +2024-08-25 13:47:58,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=90645.33333333333, ans=0.125 +2024-08-25 13:48:08,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=90698.66666666667, ans=0.125 +2024-08-25 13:48:19,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90752.0, ans=0.1 +2024-08-25 13:48:26,780 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 13:48:36,338 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.580e+02 2.053e+02 2.413e+02 3.017e+02 5.203e+02, threshold=4.827e+02, percent-clipped=2.0 +2024-08-25 13:48:38,604 INFO [train.py:1114] (3/4) Epoch 7, batch 2100, loss[loss=0.2616, simple_loss=0.3127, pruned_loss=0.07656, ctc_loss=0.1432, over 19769.00 frames. ], tot_loss[loss=0.2673, simple_loss=0.3122, pruned_loss=0.08079, ctc_loss=0.1521, over 3858319.64 frames. ], batch size: 54, lr: 2.03e-02, grad_scale: 32.0 +2024-08-25 13:48:39,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=90858.66666666667, ans=0.0 +2024-08-25 13:48:55,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=90912.0, ans=0.0 +2024-08-25 13:48:57,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=90912.0, ans=0.0 +2024-08-25 13:48:59,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=90912.0, ans=0.07 +2024-08-25 13:49:01,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=90912.0, ans=0.1 +2024-08-25 13:49:23,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=91018.66666666667, ans=0.0 +2024-08-25 13:49:30,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=91018.66666666667, ans=0.025 +2024-08-25 13:49:33,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=91072.0, ans=0.0 +2024-08-25 13:49:41,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=91072.0, ans=0.125 +2024-08-25 13:49:43,244 INFO [train.py:1114] (3/4) Epoch 7, batch 2150, loss[loss=0.2565, simple_loss=0.3044, pruned_loss=0.07614, ctc_loss=0.1409, over 19860.00 frames. ], tot_loss[loss=0.2656, simple_loss=0.3108, pruned_loss=0.08009, ctc_loss=0.1507, over 3869465.65 frames. ], batch size: 52, lr: 2.03e-02, grad_scale: 32.0 +2024-08-25 13:49:43,614 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.22 vs. limit=6.0 +2024-08-25 13:50:07,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=91232.0, ans=0.125 +2024-08-25 13:50:23,392 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.75 vs. limit=22.5 +2024-08-25 13:50:29,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=91338.66666666667, ans=0.125 +2024-08-25 13:50:35,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91338.66666666667, ans=0.1 +2024-08-25 13:50:36,452 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.440e+02 1.920e+02 2.200e+02 2.924e+02 5.090e+02, threshold=4.400e+02, percent-clipped=1.0 +2024-08-25 13:50:39,137 INFO [train.py:1114] (3/4) Epoch 7, batch 2200, loss[loss=0.2414, simple_loss=0.3066, pruned_loss=0.06342, ctc_loss=0.1233, over 19586.00 frames. ], tot_loss[loss=0.2657, simple_loss=0.311, pruned_loss=0.08007, ctc_loss=0.1508, over 3868252.74 frames. ], batch size: 57, lr: 2.02e-02, grad_scale: 32.0 +2024-08-25 13:50:44,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=91392.0, ans=0.125 +2024-08-25 13:50:51,215 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.46 vs. limit=12.0 +2024-08-25 13:50:57,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=91445.33333333333, ans=0.015 +2024-08-25 13:51:08,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=91498.66666666667, ans=0.125 +2024-08-25 13:51:13,514 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.88 vs. limit=15.0 +2024-08-25 13:51:33,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=91658.66666666667, ans=0.125 +2024-08-25 13:51:34,993 INFO [train.py:1114] (3/4) Epoch 7, batch 2250, loss[loss=0.279, simple_loss=0.3189, pruned_loss=0.08712, ctc_loss=0.162, over 19598.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3118, pruned_loss=0.08057, ctc_loss=0.1516, over 3868037.68 frames. ], batch size: 55, lr: 2.02e-02, grad_scale: 16.0 +2024-08-25 13:51:53,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=91712.0, ans=0.0 +2024-08-25 13:51:54,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=91712.0, ans=0.05 +2024-08-25 13:52:10,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=91818.66666666667, ans=0.2 +2024-08-25 13:52:25,692 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.89 vs. limit=15.0 +2024-08-25 13:52:28,396 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.566e+02 2.146e+02 2.677e+02 3.204e+02 4.930e+02, threshold=5.354e+02, percent-clipped=3.0 +2024-08-25 13:52:29,555 INFO [train.py:1114] (3/4) Epoch 7, batch 2300, loss[loss=0.2586, simple_loss=0.3032, pruned_loss=0.07827, ctc_loss=0.1439, over 19499.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3112, pruned_loss=0.08092, ctc_loss=0.1522, over 3860645.93 frames. ], batch size: 49, lr: 2.02e-02, grad_scale: 16.0 +2024-08-25 13:52:36,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=91925.33333333333, ans=0.125 +2024-08-25 13:52:43,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=91978.66666666667, ans=0.125 +2024-08-25 13:52:50,136 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.68 vs. limit=15.0 +2024-08-25 13:53:25,153 INFO [train.py:1114] (3/4) Epoch 7, batch 2350, loss[loss=0.2513, simple_loss=0.3063, pruned_loss=0.07176, ctc_loss=0.1321, over 19666.00 frames. ], tot_loss[loss=0.267, simple_loss=0.311, pruned_loss=0.08107, ctc_loss=0.152, over 3862855.98 frames. ], batch size: 63, lr: 2.02e-02, grad_scale: 16.0 +2024-08-25 13:53:27,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=92192.0, ans=0.025 +2024-08-25 13:53:28,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=92192.0, ans=0.07 +2024-08-25 13:53:44,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=92245.33333333333, ans=0.0 +2024-08-25 13:53:46,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=92298.66666666667, ans=0.2 +2024-08-25 13:53:47,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=92298.66666666667, ans=0.125 +2024-08-25 13:53:49,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=92298.66666666667, ans=0.125 +2024-08-25 13:53:50,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=92298.66666666667, ans=0.0 +2024-08-25 13:53:56,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=92352.0, ans=0.1 +2024-08-25 13:54:18,219 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.474e+02 1.985e+02 2.336e+02 2.802e+02 4.974e+02, threshold=4.671e+02, percent-clipped=0.0 +2024-08-25 13:54:19,278 INFO [train.py:1114] (3/4) Epoch 7, batch 2400, loss[loss=0.2554, simple_loss=0.3126, pruned_loss=0.07284, ctc_loss=0.1314, over 19292.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3139, pruned_loss=0.08238, ctc_loss=0.1543, over 3857788.50 frames. ], batch size: 71, lr: 2.01e-02, grad_scale: 32.0 +2024-08-25 13:54:21,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92458.66666666667, ans=0.1 +2024-08-25 13:54:21,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff2.min_abs, batch_count=92458.66666666667, ans=0.1 +2024-08-25 13:54:22,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=92458.66666666667, ans=0.0 +2024-08-25 13:54:22,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=92458.66666666667, ans=0.125 +2024-08-25 13:54:23,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=92458.66666666667, ans=0.125 +2024-08-25 13:54:31,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=92512.0, ans=0.0 +2024-08-25 13:55:53,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=92618.66666666667, ans=0.2 +2024-08-25 13:56:03,417 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=9.609e-01 +2024-08-25 13:56:05,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=92672.0, ans=0.07 +2024-08-25 13:56:05,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=92672.0, ans=0.125 +2024-08-25 13:56:13,547 INFO [train.py:1114] (3/4) Epoch 7, batch 2450, loss[loss=0.3709, simple_loss=0.3636, pruned_loss=0.1388, ctc_loss=0.2512, over 13682.00 frames. ], tot_loss[loss=0.278, simple_loss=0.3186, pruned_loss=0.08629, ctc_loss=0.162, over 3730784.06 frames. ], batch size: 140, lr: 2.01e-02, grad_scale: 32.0 +2024-08-25 13:56:44,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=92778.66666666667, ans=0.0 +2024-08-25 13:56:57,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=92885.33333333333, ans=0.2 +2024-08-25 13:57:54,269 INFO [train.py:1114] (3/4) Epoch 8, batch 0, loss[loss=0.2478, simple_loss=0.2915, pruned_loss=0.07495, ctc_loss=0.1355, over 19816.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.2915, pruned_loss=0.07495, ctc_loss=0.1355, over 19816.00 frames. ], batch size: 49, lr: 1.89e-02, grad_scale: 32.0 +2024-08-25 13:57:54,270 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-25 13:59:56,296 INFO [train.py:1146] (3/4) Epoch 8, validation: loss=0.2171, simple_loss=0.2997, pruned_loss=0.04948, ctc_loss=0.08904, over 944034.00 frames. +2024-08-25 13:59:56,297 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 14072MB +2024-08-25 13:59:56,784 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.80 vs. limit=15.0 +2024-08-25 14:01:03,644 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.719e+02 2.158e+02 2.483e+02 2.902e+02 5.180e+02, threshold=4.965e+02, percent-clipped=2.0 +2024-08-25 14:01:10,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=92986.66666666667, ans=0.125 +2024-08-25 14:02:04,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=93146.66666666667, ans=0.125 +2024-08-25 14:02:06,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93146.66666666667, ans=0.1 +2024-08-25 14:02:13,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=93146.66666666667, ans=0.025 +2024-08-25 14:02:17,067 INFO [train.py:1114] (3/4) Epoch 8, batch 50, loss[loss=0.2487, simple_loss=0.2929, pruned_loss=0.07385, ctc_loss=0.1417, over 19730.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.3147, pruned_loss=0.08233, ctc_loss=0.1555, over 845075.57 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 32.0 +2024-08-25 14:02:32,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=93253.33333333333, ans=0.0 +2024-08-25 14:02:32,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=93253.33333333333, ans=0.125 +2024-08-25 14:02:46,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=93306.66666666667, ans=0.125 +2024-08-25 14:02:49,729 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.25 vs. limit=15.0 +2024-08-25 14:05:03,226 INFO [train.py:1114] (3/4) Epoch 8, batch 100, loss[loss=0.2546, simple_loss=0.2982, pruned_loss=0.07646, ctc_loss=0.145, over 19721.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3159, pruned_loss=0.08228, ctc_loss=0.1551, over 1500583.81 frames. ], batch size: 51, lr: 1.89e-02, grad_scale: 32.0 +2024-08-25 14:05:14,938 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 1.910e+02 2.219e+02 2.660e+02 5.043e+02, threshold=4.439e+02, percent-clipped=1.0 +2024-08-25 14:05:15,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=93520.0, ans=0.125 +2024-08-25 14:05:25,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=93520.0, ans=0.125 +2024-08-25 14:05:40,803 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.55 vs. limit=15.0 +2024-08-25 14:05:43,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=93626.66666666667, ans=0.09899494936611666 +2024-08-25 14:05:45,245 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.74 vs. limit=22.5 +2024-08-25 14:05:53,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=93680.0, ans=0.0 +2024-08-25 14:07:14,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=93680.0, ans=0.2 +2024-08-25 14:07:16,383 INFO [train.py:1114] (3/4) Epoch 8, batch 150, loss[loss=0.2325, simple_loss=0.2818, pruned_loss=0.06641, ctc_loss=0.1262, over 19701.00 frames. ], tot_loss[loss=0.2654, simple_loss=0.3114, pruned_loss=0.07967, ctc_loss=0.1501, over 2027987.36 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 32.0 +2024-08-25 14:07:16,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93733.33333333333, ans=0.1 +2024-08-25 14:08:12,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=93786.66666666667, ans=0.125 +2024-08-25 14:09:12,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=93840.0, ans=0.125 +2024-08-25 14:10:15,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=94000.0, ans=0.0 +2024-08-25 14:10:16,272 INFO [train.py:1114] (3/4) Epoch 8, batch 200, loss[loss=0.3256, simple_loss=0.353, pruned_loss=0.1073, ctc_loss=0.2089, over 18240.00 frames. ], tot_loss[loss=0.2637, simple_loss=0.3097, pruned_loss=0.07904, ctc_loss=0.1488, over 2435470.75 frames. ], batch size: 85, lr: 1.88e-02, grad_scale: 32.0 +2024-08-25 14:10:20,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=94000.0, ans=0.0 +2024-08-25 14:10:29,224 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.854e+02 2.093e+02 2.544e+02 5.078e+02, threshold=4.187e+02, percent-clipped=1.0 +2024-08-25 14:10:36,956 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.28 vs. limit=6.0 +2024-08-25 14:10:38,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=94053.33333333333, ans=0.1 +2024-08-25 14:10:39,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=94106.66666666667, ans=0.2 +2024-08-25 14:10:43,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=94106.66666666667, ans=10.0 +2024-08-25 14:10:48,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=94106.66666666667, ans=0.0 +2024-08-25 14:10:49,717 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 14:10:55,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=94160.0, ans=0.125 +2024-08-25 14:11:05,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=94213.33333333333, ans=0.0 +2024-08-25 14:11:07,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=94213.33333333333, ans=0.125 +2024-08-25 14:11:17,842 INFO [train.py:1114] (3/4) Epoch 8, batch 250, loss[loss=0.2901, simple_loss=0.334, pruned_loss=0.0919, ctc_loss=0.1561, over 19306.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3094, pruned_loss=0.07866, ctc_loss=0.1481, over 2755765.24 frames. ], batch size: 67, lr: 1.88e-02, grad_scale: 32.0 +2024-08-25 14:11:27,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=94266.66666666667, ans=0.2 +2024-08-25 14:12:26,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=94320.0, ans=0.04949747468305833 +2024-08-25 14:13:10,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=94480.0, ans=0.04949747468305833 +2024-08-25 14:13:21,902 INFO [train.py:1114] (3/4) Epoch 8, batch 300, loss[loss=0.2471, simple_loss=0.3091, pruned_loss=0.06679, ctc_loss=0.1286, over 19546.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.309, pruned_loss=0.07824, ctc_loss=0.1473, over 3000913.20 frames. ], batch size: 61, lr: 1.88e-02, grad_scale: 32.0 +2024-08-25 14:13:28,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=94533.33333333333, ans=0.2 +2024-08-25 14:13:33,354 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.508e+02 1.987e+02 2.340e+02 3.022e+02 6.047e+02, threshold=4.681e+02, percent-clipped=9.0 +2024-08-25 14:13:34,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=94586.66666666667, ans=0.2 +2024-08-25 14:13:44,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=94640.0, ans=0.0 +2024-08-25 14:13:45,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=94640.0, ans=0.1 +2024-08-25 14:14:22,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=94640.0, ans=0.2 +2024-08-25 14:14:52,110 INFO [train.py:1114] (3/4) Epoch 8, batch 350, loss[loss=0.2167, simple_loss=0.2687, pruned_loss=0.05773, ctc_loss=0.123, over 19750.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3085, pruned_loss=0.07791, ctc_loss=0.1464, over 3189723.59 frames. ], batch size: 48, lr: 1.88e-02, grad_scale: 32.0 +2024-08-25 14:14:55,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=94800.0, ans=0.0 +2024-08-25 14:15:43,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=94853.33333333333, ans=0.125 +2024-08-25 14:15:45,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=94853.33333333333, ans=0.125 +2024-08-25 14:15:52,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=94906.66666666667, ans=0.1 +2024-08-25 14:16:26,835 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.31 vs. limit=12.0 +2024-08-25 14:16:46,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=95013.33333333333, ans=0.1 +2024-08-25 14:16:47,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=95013.33333333333, ans=0.04949747468305833 +2024-08-25 14:16:50,820 INFO [train.py:1114] (3/4) Epoch 8, batch 400, loss[loss=0.2709, simple_loss=0.3178, pruned_loss=0.08067, ctc_loss=0.1568, over 19466.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.3081, pruned_loss=0.07753, ctc_loss=0.1461, over 3341207.10 frames. ], batch size: 54, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:16:58,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=95066.66666666667, ans=0.0 +2024-08-25 14:17:00,495 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.84 vs. limit=15.0 +2024-08-25 14:17:03,858 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.620e+02 2.019e+02 2.528e+02 3.132e+02 5.852e+02, threshold=5.056e+02, percent-clipped=7.0 +2024-08-25 14:17:12,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.93 vs. limit=15.0 +2024-08-25 14:17:15,524 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.61 vs. limit=15.0 +2024-08-25 14:17:16,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=95173.33333333333, ans=0.2 +2024-08-25 14:17:44,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=95280.0, ans=0.125 +2024-08-25 14:17:50,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=95280.0, ans=0.1 +2024-08-25 14:18:36,768 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.63 vs. limit=15.0 +2024-08-25 14:18:38,393 INFO [train.py:1114] (3/4) Epoch 8, batch 450, loss[loss=0.2783, simple_loss=0.3237, pruned_loss=0.08384, ctc_loss=0.163, over 19616.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3086, pruned_loss=0.0781, ctc_loss=0.1474, over 3451064.08 frames. ], batch size: 55, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:18:52,286 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.59 vs. limit=15.0 +2024-08-25 14:19:05,240 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.44 vs. limit=15.0 +2024-08-25 14:19:10,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=95440.0, ans=0.1 +2024-08-25 14:19:32,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=95546.66666666667, ans=0.125 +2024-08-25 14:19:39,033 INFO [train.py:1114] (3/4) Epoch 8, batch 500, loss[loss=0.2596, simple_loss=0.3172, pruned_loss=0.07321, ctc_loss=0.1388, over 19674.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3071, pruned_loss=0.0769, ctc_loss=0.145, over 3546644.13 frames. ], batch size: 63, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:21:42,076 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.507e+02 1.925e+02 2.242e+02 2.655e+02 4.786e+02, threshold=4.483e+02, percent-clipped=0.0 +2024-08-25 14:21:43,722 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=13.99 vs. limit=15.0 +2024-08-25 14:21:45,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=95653.33333333333, ans=0.2 +2024-08-25 14:22:03,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=95760.0, ans=0.0 +2024-08-25 14:22:27,758 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.69 vs. limit=6.0 +2024-08-25 14:22:28,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=95813.33333333333, ans=0.025 +2024-08-25 14:22:28,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=95813.33333333333, ans=0.5 +2024-08-25 14:22:36,073 INFO [train.py:1114] (3/4) Epoch 8, batch 550, loss[loss=0.2844, simple_loss=0.3281, pruned_loss=0.0879, ctc_loss=0.162, over 19235.00 frames. ], tot_loss[loss=0.2582, simple_loss=0.3063, pruned_loss=0.07632, ctc_loss=0.1436, over 3609559.30 frames. ], batch size: 71, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:22:39,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=95866.66666666667, ans=0.125 +2024-08-25 14:23:55,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=95920.0, ans=0.0 +2024-08-25 14:23:59,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=95920.0, ans=0.125 +2024-08-25 14:25:20,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.73 vs. limit=15.0 +2024-08-25 14:25:26,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=96026.66666666667, ans=0.125 +2024-08-25 14:25:34,156 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 14:25:43,162 INFO [train.py:1114] (3/4) Epoch 8, batch 600, loss[loss=0.2677, simple_loss=0.319, pruned_loss=0.07937, ctc_loss=0.1442, over 19404.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3069, pruned_loss=0.07633, ctc_loss=0.1436, over 3666516.95 frames. ], batch size: 67, lr: 1.87e-02, grad_scale: 32.0 +2024-08-25 14:25:54,322 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.517e+02 1.975e+02 2.461e+02 2.998e+02 6.685e+02, threshold=4.922e+02, percent-clipped=2.0 +2024-08-25 14:26:23,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=96293.33333333333, ans=0.125 +2024-08-25 14:29:18,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=96346.66666666667, ans=0.04949747468305833 +2024-08-25 14:29:23,574 INFO [train.py:1114] (3/4) Epoch 8, batch 650, loss[loss=0.268, simple_loss=0.3188, pruned_loss=0.07962, ctc_loss=0.1449, over 19766.00 frames. ], tot_loss[loss=0.2579, simple_loss=0.3061, pruned_loss=0.07618, ctc_loss=0.1434, over 3716400.18 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 32.0 +2024-08-25 14:29:30,026 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.16 vs. limit=15.0 +2024-08-25 14:29:46,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=96453.33333333333, ans=10.0 +2024-08-25 14:30:52,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=96560.0, ans=0.2 +2024-08-25 14:30:53,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=96560.0, ans=0.0 +2024-08-25 14:31:04,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=96613.33333333333, ans=0.025 +2024-08-25 14:31:04,613 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.90 vs. limit=15.0 +2024-08-25 14:31:23,586 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 14:31:24,399 INFO [train.py:1114] (3/4) Epoch 8, batch 700, loss[loss=0.2366, simple_loss=0.2908, pruned_loss=0.06588, ctc_loss=0.1263, over 19724.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3061, pruned_loss=0.076, ctc_loss=0.1429, over 3748173.97 frames. ], batch size: 51, lr: 1.86e-02, grad_scale: 32.0 +2024-08-25 14:31:36,075 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.482e+02 1.952e+02 2.228e+02 2.907e+02 4.140e+02, threshold=4.456e+02, percent-clipped=0.0 +2024-08-25 14:31:48,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=96720.0, ans=0.125 +2024-08-25 14:32:06,951 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.81 vs. limit=6.0 +2024-08-25 14:32:10,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=96826.66666666667, ans=0.2 +2024-08-25 14:32:35,094 INFO [train.py:1114] (3/4) Epoch 8, batch 750, loss[loss=0.2576, simple_loss=0.3102, pruned_loss=0.07447, ctc_loss=0.1399, over 19509.00 frames. ], tot_loss[loss=0.2567, simple_loss=0.3053, pruned_loss=0.07564, ctc_loss=0.142, over 3774076.83 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 32.0 +2024-08-25 14:32:35,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=96933.33333333333, ans=0.125 +2024-08-25 14:32:38,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=96933.33333333333, ans=0.0 +2024-08-25 14:32:44,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=96933.33333333333, ans=0.0 +2024-08-25 14:33:04,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=97040.0, ans=0.125 +2024-08-25 14:33:24,037 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.42 vs. limit=10.0 +2024-08-25 14:33:32,416 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.525e-03 +2024-08-25 14:33:34,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=97146.66666666667, ans=0.125 +2024-08-25 14:33:44,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=97200.0, ans=0.0 +2024-08-25 14:33:45,415 INFO [train.py:1114] (3/4) Epoch 8, batch 800, loss[loss=0.2251, simple_loss=0.2763, pruned_loss=0.06359, ctc_loss=0.1167, over 19806.00 frames. ], tot_loss[loss=0.257, simple_loss=0.3055, pruned_loss=0.07583, ctc_loss=0.1425, over 3795285.75 frames. ], batch size: 49, lr: 1.86e-02, grad_scale: 32.0 +2024-08-25 14:34:33,007 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 14:34:35,086 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.511e+02 1.855e+02 2.176e+02 2.933e+02 4.905e+02, threshold=4.353e+02, percent-clipped=3.0 +2024-08-25 14:35:04,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=97360.0, ans=10.0 +2024-08-25 14:35:10,785 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.94 vs. limit=15.0 +2024-08-25 14:35:22,238 INFO [train.py:1114] (3/4) Epoch 8, batch 850, loss[loss=0.2731, simple_loss=0.322, pruned_loss=0.08247, ctc_loss=0.1479, over 19658.00 frames. ], tot_loss[loss=0.2581, simple_loss=0.306, pruned_loss=0.07638, ctc_loss=0.1436, over 3815373.82 frames. ], batch size: 59, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:35:39,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=97520.0, ans=0.125 +2024-08-25 14:35:47,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=97573.33333333333, ans=0.125 +2024-08-25 14:35:48,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=97573.33333333333, ans=0.125 +2024-08-25 14:36:04,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=97626.66666666667, ans=0.1 +2024-08-25 14:36:19,786 INFO [train.py:1114] (3/4) Epoch 8, batch 900, loss[loss=0.2519, simple_loss=0.293, pruned_loss=0.07669, ctc_loss=0.1438, over 19419.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.3065, pruned_loss=0.07655, ctc_loss=0.1441, over 3817949.58 frames. ], batch size: 48, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:38:27,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=97733.33333333333, ans=0.025 +2024-08-25 14:38:30,484 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.590e+02 1.935e+02 2.327e+02 2.780e+02 5.034e+02, threshold=4.654e+02, percent-clipped=2.0 +2024-08-25 14:38:52,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=97840.0, ans=0.125 +2024-08-25 14:38:56,596 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=2.695e-03 +2024-08-25 14:38:58,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=97893.33333333333, ans=0.125 +2024-08-25 14:40:01,369 INFO [train.py:1114] (3/4) Epoch 8, batch 950, loss[loss=0.2596, simple_loss=0.2963, pruned_loss=0.08246, ctc_loss=0.1449, over 19496.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3066, pruned_loss=0.0767, ctc_loss=0.1441, over 3819282.94 frames. ], batch size: 49, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:40:20,432 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.64 vs. limit=22.5 +2024-08-25 14:42:03,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=98160.0, ans=0.125 +2024-08-25 14:42:04,996 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.86 vs. limit=10.0 +2024-08-25 14:42:05,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=98160.0, ans=0.1 +2024-08-25 14:43:29,287 INFO [train.py:1114] (3/4) Epoch 8, batch 1000, loss[loss=0.2084, simple_loss=0.2756, pruned_loss=0.05185, ctc_loss=0.09379, over 19860.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3074, pruned_loss=0.07713, ctc_loss=0.1448, over 3815697.00 frames. ], batch size: 52, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:43:31,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=98266.66666666667, ans=0.125 +2024-08-25 14:43:36,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=98266.66666666667, ans=0.125 +2024-08-25 14:43:47,366 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.458e+02 2.014e+02 2.465e+02 3.304e+02 4.205e+02, threshold=4.930e+02, percent-clipped=0.0 +2024-08-25 14:43:49,233 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.56 vs. limit=12.0 +2024-08-25 14:46:06,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=98320.0, ans=0.07 +2024-08-25 14:46:27,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=98426.66666666667, ans=0.0 +2024-08-25 14:46:36,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=98480.0, ans=0.0 +2024-08-25 14:46:39,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=98480.0, ans=0.025 +2024-08-25 14:46:41,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=98480.0, ans=0.0 +2024-08-25 14:46:44,408 INFO [train.py:1114] (3/4) Epoch 8, batch 1050, loss[loss=0.2376, simple_loss=0.303, pruned_loss=0.06182, ctc_loss=0.1215, over 19846.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3071, pruned_loss=0.07699, ctc_loss=0.1445, over 3822946.03 frames. ], batch size: 57, lr: 1.85e-02, grad_scale: 32.0 +2024-08-25 14:46:53,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=98533.33333333333, ans=0.125 +2024-08-25 14:47:14,835 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.13 vs. limit=15.0 +2024-08-25 14:47:26,158 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.36 vs. limit=22.5 +2024-08-25 14:47:38,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=98746.66666666667, ans=0.2 +2024-08-25 14:47:42,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten.whitening_limit, batch_count=98746.66666666667, ans=22.5 +2024-08-25 14:47:44,587 INFO [train.py:1114] (3/4) Epoch 8, batch 1100, loss[loss=0.2491, simple_loss=0.3072, pruned_loss=0.0687, ctc_loss=0.134, over 19584.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.3073, pruned_loss=0.07689, ctc_loss=0.1447, over 3831513.25 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 32.0 +2024-08-25 14:48:13,773 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.527e+02 1.814e+02 2.071e+02 2.620e+02 3.682e+02, threshold=4.142e+02, percent-clipped=0.0 +2024-08-25 14:49:20,533 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.56 vs. limit=12.0 +2024-08-25 14:49:50,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=99013.33333333333, ans=0.0 +2024-08-25 14:50:00,867 INFO [train.py:1114] (3/4) Epoch 8, batch 1150, loss[loss=0.2382, simple_loss=0.2962, pruned_loss=0.06697, ctc_loss=0.1158, over 19593.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.307, pruned_loss=0.07697, ctc_loss=0.1445, over 3830419.49 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 32.0 +2024-08-25 14:51:18,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=99120.0, ans=0.125 +2024-08-25 14:51:18,423 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.01 vs. limit=15.0 +2024-08-25 14:52:31,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99226.66666666667, ans=0.1 +2024-08-25 14:52:33,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff2.min_abs, batch_count=99226.66666666667, ans=0.1 +2024-08-25 14:52:45,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=99280.0, ans=0.0 +2024-08-25 14:52:46,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99280.0, ans=0.1 +2024-08-25 14:52:51,785 INFO [train.py:1114] (3/4) Epoch 8, batch 1200, loss[loss=0.2465, simple_loss=0.3019, pruned_loss=0.06897, ctc_loss=0.1329, over 19836.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.3077, pruned_loss=0.07706, ctc_loss=0.1447, over 3825608.64 frames. ], batch size: 57, lr: 1.84e-02, grad_scale: 32.0 +2024-08-25 14:52:57,191 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.32 vs. limit=15.0 +2024-08-25 14:53:06,250 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.879e+02 2.149e+02 2.634e+02 4.011e+02, threshold=4.298e+02, percent-clipped=0.0 +2024-08-25 14:53:08,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99386.66666666667, ans=0.1 +2024-08-25 14:53:48,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=99546.66666666667, ans=0.0 +2024-08-25 14:53:52,344 INFO [train.py:1114] (3/4) Epoch 8, batch 1250, loss[loss=0.2561, simple_loss=0.3127, pruned_loss=0.07327, ctc_loss=0.1324, over 19545.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3075, pruned_loss=0.07654, ctc_loss=0.1436, over 3843494.08 frames. ], batch size: 61, lr: 1.84e-02, grad_scale: 32.0 +2024-08-25 14:54:04,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.64 vs. limit=15.0 +2024-08-25 14:55:27,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=99706.66666666667, ans=0.025 +2024-08-25 14:55:39,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=99760.0, ans=0.125 +2024-08-25 14:55:44,527 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.71 vs. limit=15.0 +2024-08-25 14:55:45,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=99760.0, ans=0.125 +2024-08-25 14:55:51,283 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.48 vs. limit=10.0 +2024-08-25 14:55:55,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=99813.33333333333, ans=0.0 +2024-08-25 14:56:05,547 INFO [train.py:1114] (3/4) Epoch 8, batch 1300, loss[loss=0.272, simple_loss=0.315, pruned_loss=0.08249, ctc_loss=0.16, over 18841.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.3067, pruned_loss=0.07647, ctc_loss=0.1436, over 3846002.07 frames. ], batch size: 76, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 14:56:17,010 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.458e+02 1.809e+02 2.147e+02 2.747e+02 4.726e+02, threshold=4.293e+02, percent-clipped=4.0 +2024-08-25 14:56:22,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=99920.0, ans=0.125 +2024-08-25 14:58:00,713 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.13 vs. limit=6.0 +2024-08-25 14:58:02,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=100026.66666666667, ans=0.0 +2024-08-25 14:58:48,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=100080.0, ans=0.125 +2024-08-25 14:58:49,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=100080.0, ans=0.125 +2024-08-25 14:58:50,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=100080.0, ans=0.0 +2024-08-25 14:58:53,765 INFO [train.py:1114] (3/4) Epoch 8, batch 1350, loss[loss=0.2561, simple_loss=0.3056, pruned_loss=0.07419, ctc_loss=0.1456, over 19766.00 frames. ], tot_loss[loss=0.2577, simple_loss=0.3062, pruned_loss=0.07604, ctc_loss=0.1428, over 3857437.49 frames. ], batch size: 54, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 14:59:07,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=100186.66666666667, ans=0.125 +2024-08-25 14:59:10,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100186.66666666667, ans=0.125 +2024-08-25 14:59:20,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=100240.0, ans=0.2 +2024-08-25 14:59:30,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=100293.33333333333, ans=0.5 +2024-08-25 14:59:31,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100293.33333333333, ans=0.1 +2024-08-25 14:59:44,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=100346.66666666667, ans=0.05 +2024-08-25 14:59:46,620 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 14:59:51,904 INFO [train.py:1114] (3/4) Epoch 8, batch 1400, loss[loss=0.2425, simple_loss=0.2815, pruned_loss=0.07416, ctc_loss=0.1382, over 19649.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3066, pruned_loss=0.07665, ctc_loss=0.1438, over 3863980.64 frames. ], batch size: 46, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 14:59:53,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=100400.0, ans=0.0 +2024-08-25 14:59:54,688 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.26 vs. limit=12.0 +2024-08-25 15:00:03,306 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.557e+02 2.018e+02 2.600e+02 3.300e+02 7.375e+02, threshold=5.199e+02, percent-clipped=11.0 +2024-08-25 15:00:43,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=100613.33333333333, ans=0.125 +2024-08-25 15:00:56,110 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.16 vs. limit=15.0 +2024-08-25 15:00:56,211 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.19 vs. limit=15.0 +2024-08-25 15:00:56,739 INFO [train.py:1114] (3/4) Epoch 8, batch 1450, loss[loss=0.2314, simple_loss=0.3029, pruned_loss=0.05843, ctc_loss=0.1077, over 19702.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3076, pruned_loss=0.07733, ctc_loss=0.145, over 3861968.41 frames. ], batch size: 63, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 15:01:27,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=100773.33333333333, ans=0.5 +2024-08-25 15:01:29,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=100773.33333333333, ans=0.5 +2024-08-25 15:01:37,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=100826.66666666667, ans=0.125 +2024-08-25 15:01:47,215 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.34 vs. limit=22.5 +2024-08-25 15:03:17,725 INFO [train.py:1114] (3/4) Epoch 8, batch 1500, loss[loss=0.2624, simple_loss=0.3218, pruned_loss=0.07299, ctc_loss=0.1425, over 19581.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3079, pruned_loss=0.07735, ctc_loss=0.1451, over 3861705.14 frames. ], batch size: 57, lr: 1.83e-02, grad_scale: 32.0 +2024-08-25 15:04:56,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=100933.33333333333, ans=0.0 +2024-08-25 15:05:23,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=100986.66666666667, ans=0.015 +2024-08-25 15:05:24,439 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.596e+02 1.972e+02 2.271e+02 2.845e+02 5.404e+02, threshold=4.542e+02, percent-clipped=1.0 +2024-08-25 15:05:38,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=100986.66666666667, ans=0.125 +2024-08-25 15:07:43,787 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 15:10:00,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=101146.66666666667, ans=0.125 +2024-08-25 15:10:11,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=101146.66666666667, ans=0.125 +2024-08-25 15:10:18,892 INFO [train.py:1114] (3/4) Epoch 8, batch 1550, loss[loss=0.2931, simple_loss=0.3416, pruned_loss=0.08891, ctc_loss=0.1672, over 19613.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.3079, pruned_loss=0.07752, ctc_loss=0.1456, over 3846191.26 frames. ], batch size: 60, lr: 1.82e-02, grad_scale: 32.0 +2024-08-25 15:12:39,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=101306.66666666667, ans=0.125 +2024-08-25 15:13:04,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=101360.0, ans=0.125 +2024-08-25 15:14:02,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=101413.33333333333, ans=0.0 +2024-08-25 15:14:11,854 INFO [train.py:1114] (3/4) Epoch 8, batch 1600, loss[loss=0.2444, simple_loss=0.308, pruned_loss=0.06541, ctc_loss=0.1251, over 19838.00 frames. ], tot_loss[loss=0.2604, simple_loss=0.3078, pruned_loss=0.07743, ctc_loss=0.1453, over 3835823.42 frames. ], batch size: 57, lr: 1.82e-02, grad_scale: 32.0 +2024-08-25 15:14:22,194 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.06 vs. limit=15.0 +2024-08-25 15:14:31,979 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.575e+02 1.915e+02 2.222e+02 2.696e+02 4.640e+02, threshold=4.444e+02, percent-clipped=1.0 +2024-08-25 15:14:43,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=101520.0, ans=0.125 +2024-08-25 15:15:16,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=101680.0, ans=0.0 +2024-08-25 15:15:17,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=101680.0, ans=0.125 +2024-08-25 15:15:23,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=101680.0, ans=0.125 +2024-08-25 15:15:30,475 INFO [train.py:1114] (3/4) Epoch 8, batch 1650, loss[loss=0.3117, simple_loss=0.3453, pruned_loss=0.1006, ctc_loss=0.1922, over 19651.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3078, pruned_loss=0.07762, ctc_loss=0.1461, over 3833814.27 frames. ], batch size: 59, lr: 1.82e-02, grad_scale: 32.0 +2024-08-25 15:15:36,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_na.min_abs, batch_count=101733.33333333333, ans=0.02 +2024-08-25 15:15:38,272 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.99 vs. limit=15.0 +2024-08-25 15:15:40,174 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 15:15:50,867 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.60 vs. limit=22.5 +2024-08-25 15:15:51,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.08 vs. limit=22.5 +2024-08-25 15:16:28,205 INFO [train.py:1114] (3/4) Epoch 8, batch 1700, loss[loss=0.2196, simple_loss=0.2675, pruned_loss=0.0631, ctc_loss=0.1138, over 19700.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3074, pruned_loss=0.07724, ctc_loss=0.1453, over 3847576.27 frames. ], batch size: 46, lr: 1.82e-02, grad_scale: 16.0 +2024-08-25 15:16:40,736 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.585e+02 1.920e+02 2.237e+02 2.711e+02 4.644e+02, threshold=4.474e+02, percent-clipped=2.0 +2024-08-25 15:17:01,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=102106.66666666667, ans=0.125 +2024-08-25 15:17:15,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=102160.0, ans=0.125 +2024-08-25 15:17:39,498 INFO [train.py:1114] (3/4) Epoch 8, batch 1750, loss[loss=0.2554, simple_loss=0.2976, pruned_loss=0.0778, ctc_loss=0.1439, over 19644.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3066, pruned_loss=0.07658, ctc_loss=0.1441, over 3852621.28 frames. ], batch size: 45, lr: 1.82e-02, grad_scale: 16.0 +2024-08-25 15:17:43,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=102266.66666666667, ans=0.0 +2024-08-25 15:17:45,677 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.13 vs. limit=12.0 +2024-08-25 15:17:45,952 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.57 vs. limit=5.0 +2024-08-25 15:18:04,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=102320.0, ans=0.125 +2024-08-25 15:20:04,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=102426.66666666667, ans=0.0 +2024-08-25 15:20:12,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=102426.66666666667, ans=0.125 +2024-08-25 15:20:17,948 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.22 vs. limit=15.0 +2024-08-25 15:20:25,848 INFO [train.py:1114] (3/4) Epoch 8, batch 1800, loss[loss=0.2683, simple_loss=0.3145, pruned_loss=0.08117, ctc_loss=0.1494, over 19618.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.3064, pruned_loss=0.07664, ctc_loss=0.1439, over 3853804.88 frames. ], batch size: 55, lr: 1.81e-02, grad_scale: 16.0 +2024-08-25 15:20:37,815 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.544e+02 1.874e+02 2.230e+02 2.859e+02 4.439e+02, threshold=4.460e+02, percent-clipped=0.0 +2024-08-25 15:20:38,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=102586.66666666667, ans=0.0 +2024-08-25 15:20:39,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=102586.66666666667, ans=0.125 +2024-08-25 15:23:34,822 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=1.615e-02 +2024-08-25 15:23:36,318 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.75 vs. limit=22.5 +2024-08-25 15:24:44,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=102640.0, ans=0.0 +2024-08-25 15:26:48,858 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.98 vs. limit=15.0 +2024-08-25 15:26:49,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=102693.33333333333, ans=0.0 +2024-08-25 15:26:49,894 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.84 vs. limit=22.5 +2024-08-25 15:26:54,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=102746.66666666667, ans=0.125 +2024-08-25 15:28:56,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=102746.66666666667, ans=0.025 +2024-08-25 15:28:58,632 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.35 vs. limit=22.5 +2024-08-25 15:28:59,113 INFO [train.py:1114] (3/4) Epoch 8, batch 1850, loss[loss=0.2546, simple_loss=0.311, pruned_loss=0.07073, ctc_loss=0.1421, over 19596.00 frames. ], tot_loss[loss=0.2583, simple_loss=0.3063, pruned_loss=0.07644, ctc_loss=0.1437, over 3856677.92 frames. ], batch size: 57, lr: 1.81e-02, grad_scale: 16.0 +2024-08-25 15:29:28,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=102906.66666666667, ans=0.125 +2024-08-25 15:29:42,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=102960.0, ans=15.0 +2024-08-25 15:29:46,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=102960.0, ans=0.125 +2024-08-25 15:32:38,672 INFO [train.py:1114] (3/4) Epoch 8, batch 1900, loss[loss=0.2448, simple_loss=0.3177, pruned_loss=0.06188, ctc_loss=0.1202, over 19665.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.3068, pruned_loss=0.07629, ctc_loss=0.1434, over 3860546.74 frames. ], batch size: 59, lr: 1.81e-02, grad_scale: 16.0 +2024-08-25 15:32:52,964 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.596e+02 1.872e+02 2.139e+02 2.618e+02 5.849e+02, threshold=4.279e+02, percent-clipped=4.0 +2024-08-25 15:33:12,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=103173.33333333333, ans=0.0 +2024-08-25 15:33:19,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=103226.66666666667, ans=0.125 +2024-08-25 15:33:20,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103226.66666666667, ans=0.1 +2024-08-25 15:33:30,532 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.56 vs. limit=22.5 +2024-08-25 15:33:31,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=103280.0, ans=0.0 +2024-08-25 15:33:37,630 INFO [train.py:1114] (3/4) Epoch 8, batch 1950, loss[loss=0.2423, simple_loss=0.2944, pruned_loss=0.06967, ctc_loss=0.127, over 19582.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3077, pruned_loss=0.07654, ctc_loss=0.1436, over 3869516.51 frames. ], batch size: 52, lr: 1.81e-02, grad_scale: 16.0 +2024-08-25 15:33:54,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=103333.33333333333, ans=0.125 +2024-08-25 15:34:14,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=103440.0, ans=0.0 +2024-08-25 15:34:23,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=103493.33333333333, ans=0.125 +2024-08-25 15:34:34,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=103546.66666666667, ans=0.125 +2024-08-25 15:34:36,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=103546.66666666667, ans=0.05 +2024-08-25 15:34:42,882 INFO [train.py:1114] (3/4) Epoch 8, batch 2000, loss[loss=0.2327, simple_loss=0.278, pruned_loss=0.06873, ctc_loss=0.125, over 19650.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3083, pruned_loss=0.07693, ctc_loss=0.1444, over 3854954.45 frames. ], batch size: 45, lr: 1.81e-02, grad_scale: 32.0 +2024-08-25 15:34:44,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=103600.0, ans=0.125 +2024-08-25 15:34:46,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.69 vs. limit=12.0 +2024-08-25 15:34:48,564 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.82 vs. limit=22.5 +2024-08-25 15:34:55,658 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.405e+02 1.835e+02 2.022e+02 2.450e+02 4.734e+02, threshold=4.043e+02, percent-clipped=1.0 +2024-08-25 15:34:57,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=103653.33333333333, ans=0.0 +2024-08-25 15:35:17,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=103760.0, ans=0.1 +2024-08-25 15:35:24,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=103760.0, ans=0.125 +2024-08-25 15:35:25,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=103760.0, ans=0.125 +2024-08-25 15:35:32,565 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.90 vs. limit=15.0 +2024-08-25 15:35:36,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=103813.33333333333, ans=0.025 +2024-08-25 15:35:38,626 INFO [train.py:1114] (3/4) Epoch 8, batch 2050, loss[loss=0.2508, simple_loss=0.2872, pruned_loss=0.07842, ctc_loss=0.1439, over 19689.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.307, pruned_loss=0.07686, ctc_loss=0.1444, over 3851533.26 frames. ], batch size: 47, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:35:41,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=103866.66666666667, ans=0.0 +2024-08-25 15:35:48,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=103920.0, ans=0.125 +2024-08-25 15:35:58,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=103920.0, ans=0.0 +2024-08-25 15:35:58,188 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=103920.0, ans=0.0 +2024-08-25 15:36:11,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=104026.66666666667, ans=0.125 +2024-08-25 15:36:26,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104080.0, ans=0.1 +2024-08-25 15:36:30,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=104080.0, ans=0.0 +2024-08-25 15:36:32,748 INFO [train.py:1114] (3/4) Epoch 8, batch 2100, loss[loss=0.236, simple_loss=0.2994, pruned_loss=0.06166, ctc_loss=0.1232, over 19763.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3059, pruned_loss=0.07579, ctc_loss=0.1425, over 3858443.25 frames. ], batch size: 54, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:36:44,892 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.433e+02 2.055e+02 2.348e+02 2.987e+02 4.948e+02, threshold=4.695e+02, percent-clipped=5.0 +2024-08-25 15:37:02,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=104240.0, ans=0.0 +2024-08-25 15:37:11,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=104293.33333333333, ans=0.07 +2024-08-25 15:37:18,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.94 vs. limit=22.5 +2024-08-25 15:37:26,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=104400.0, ans=0.0 +2024-08-25 15:37:27,186 INFO [train.py:1114] (3/4) Epoch 8, batch 2150, loss[loss=0.2284, simple_loss=0.2906, pruned_loss=0.05968, ctc_loss=0.1169, over 19847.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3042, pruned_loss=0.07474, ctc_loss=0.1406, over 3870537.57 frames. ], batch size: 52, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:37:52,708 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.18 vs. limit=15.0 +2024-08-25 15:38:01,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=104560.0, ans=0.125 +2024-08-25 15:38:05,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=104560.0, ans=0.0 +2024-08-25 15:38:10,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=104613.33333333333, ans=0.125 +2024-08-25 15:38:23,248 INFO [train.py:1114] (3/4) Epoch 8, batch 2200, loss[loss=0.28, simple_loss=0.3188, pruned_loss=0.0883, ctc_loss=0.1616, over 19586.00 frames. ], tot_loss[loss=0.2559, simple_loss=0.3046, pruned_loss=0.07525, ctc_loss=0.1416, over 3869428.46 frames. ], batch size: 57, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:38:28,181 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.70 vs. limit=15.0 +2024-08-25 15:38:29,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=104666.66666666667, ans=0.125 +2024-08-25 15:38:33,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=104720.0, ans=0.125 +2024-08-25 15:38:35,670 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.489e+02 1.961e+02 2.280e+02 3.038e+02 5.675e+02, threshold=4.560e+02, percent-clipped=2.0 +2024-08-25 15:38:52,864 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.22 vs. limit=22.5 +2024-08-25 15:38:53,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=104773.33333333333, ans=0.125 +2024-08-25 15:38:54,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=104773.33333333333, ans=0.0 +2024-08-25 15:38:56,198 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.96 vs. limit=15.0 +2024-08-25 15:39:10,694 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.71 vs. limit=15.0 +2024-08-25 15:39:16,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=104880.0, ans=0.125 +2024-08-25 15:39:19,051 INFO [train.py:1114] (3/4) Epoch 8, batch 2250, loss[loss=0.2554, simple_loss=0.3087, pruned_loss=0.07228, ctc_loss=0.1437, over 19610.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.3045, pruned_loss=0.07527, ctc_loss=0.1415, over 3868784.71 frames. ], batch size: 55, lr: 1.80e-02, grad_scale: 32.0 +2024-08-25 15:39:30,803 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.43 vs. limit=22.5 +2024-08-25 15:39:31,730 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.86 vs. limit=15.0 +2024-08-25 15:39:41,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=105040.0, ans=0.09899494936611666 +2024-08-25 15:39:43,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105040.0, ans=0.1 +2024-08-25 15:39:53,531 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.12 vs. limit=15.0 +2024-08-25 15:40:04,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=105146.66666666667, ans=0.0 +2024-08-25 15:40:07,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=105146.66666666667, ans=0.2 +2024-08-25 15:40:14,494 INFO [train.py:1114] (3/4) Epoch 8, batch 2300, loss[loss=0.237, simple_loss=0.2901, pruned_loss=0.0668, ctc_loss=0.1259, over 19479.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.304, pruned_loss=0.07544, ctc_loss=0.1417, over 3862172.15 frames. ], batch size: 49, lr: 1.79e-02, grad_scale: 32.0 +2024-08-25 15:40:15,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=105200.0, ans=0.125 +2024-08-25 15:40:28,020 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.388e+02 1.907e+02 2.167e+02 2.593e+02 4.976e+02, threshold=4.335e+02, percent-clipped=1.0 +2024-08-25 15:40:28,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=105253.33333333333, ans=0.125 +2024-08-25 15:40:36,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=105253.33333333333, ans=0.125 +2024-08-25 15:40:41,009 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.73 vs. limit=6.0 +2024-08-25 15:41:04,034 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.34 vs. limit=6.0 +2024-08-25 15:41:06,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=105413.33333333333, ans=0.125 +2024-08-25 15:41:11,097 INFO [train.py:1114] (3/4) Epoch 8, batch 2350, loss[loss=0.2796, simple_loss=0.3264, pruned_loss=0.08427, ctc_loss=0.1606, over 19709.00 frames. ], tot_loss[loss=0.2559, simple_loss=0.3043, pruned_loss=0.07545, ctc_loss=0.1415, over 3863657.07 frames. ], batch size: 63, lr: 1.79e-02, grad_scale: 32.0 +2024-08-25 15:41:24,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=105520.0, ans=0.0 +2024-08-25 15:41:34,536 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.28 vs. limit=6.0 +2024-08-25 15:41:40,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=105573.33333333333, ans=0.125 +2024-08-25 15:41:42,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=105573.33333333333, ans=0.125 +2024-08-25 15:42:06,006 INFO [train.py:1114] (3/4) Epoch 8, batch 2400, loss[loss=0.288, simple_loss=0.3362, pruned_loss=0.08826, ctc_loss=0.1583, over 19299.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.3073, pruned_loss=0.07687, ctc_loss=0.1442, over 3857803.31 frames. ], batch size: 71, lr: 1.79e-02, grad_scale: 32.0 +2024-08-25 15:42:18,058 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.476e+02 1.983e+02 2.255e+02 2.870e+02 5.067e+02, threshold=4.510e+02, percent-clipped=2.0 +2024-08-25 15:42:29,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=105840.0, ans=0.5 +2024-08-25 15:42:44,102 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.81 vs. limit=22.5 +2024-08-25 15:43:01,713 INFO [train.py:1114] (3/4) Epoch 8, batch 2450, loss[loss=0.3468, simple_loss=0.3508, pruned_loss=0.1241, ctc_loss=0.2362, over 13559.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3118, pruned_loss=0.08064, ctc_loss=0.1517, over 3729557.09 frames. ], batch size: 140, lr: 1.79e-02, grad_scale: 32.0 +2024-08-25 15:43:04,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=106000.0, ans=0.125 +2024-08-25 15:43:19,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=106053.33333333333, ans=15.0 +2024-08-25 15:43:24,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=106106.66666666667, ans=0.0 +2024-08-25 15:44:31,283 INFO [train.py:1114] (3/4) Epoch 9, batch 0, loss[loss=0.2321, simple_loss=0.2806, pruned_loss=0.06637, ctc_loss=0.1273, over 19434.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.2806, pruned_loss=0.06637, ctc_loss=0.1273, over 19434.00 frames. ], batch size: 48, lr: 1.69e-02, grad_scale: 32.0 +2024-08-25 15:44:31,283 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-25 15:44:49,829 INFO [train.py:1146] (3/4) Epoch 9, validation: loss=0.21, simple_loss=0.2947, pruned_loss=0.04621, ctc_loss=0.08206, over 944034.00 frames. +2024-08-25 15:44:49,829 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 14072MB +2024-08-25 15:44:52,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=106208.0, ans=0.125 +2024-08-25 15:44:53,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=106208.0, ans=0.125 +2024-08-25 15:44:55,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=106208.0, ans=0.0 +2024-08-25 15:45:15,533 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.726e+02 2.154e+02 2.510e+02 2.953e+02 5.707e+02, threshold=5.019e+02, percent-clipped=2.0 +2024-08-25 15:45:19,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=106314.66666666667, ans=0.125 +2024-08-25 15:46:20,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=106368.0, ans=0.05 +2024-08-25 15:46:22,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=106368.0, ans=0.07 +2024-08-25 15:46:36,875 INFO [train.py:1114] (3/4) Epoch 9, batch 50, loss[loss=0.2374, simple_loss=0.2863, pruned_loss=0.0684, ctc_loss=0.1295, over 19699.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.3084, pruned_loss=0.07757, ctc_loss=0.1454, over 844655.46 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 32.0 +2024-08-25 15:46:44,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=106474.66666666667, ans=0.125 +2024-08-25 15:46:55,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=106528.0, ans=0.05 +2024-08-25 15:47:08,607 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.52 vs. limit=15.0 +2024-08-25 15:47:17,356 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.67 vs. limit=22.5 +2024-08-25 15:47:28,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=106634.66666666667, ans=0.035 +2024-08-25 15:47:34,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=106688.0, ans=0.0 +2024-08-25 15:47:35,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=106688.0, ans=0.125 +2024-08-25 15:47:42,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=106688.0, ans=0.2 +2024-08-25 15:47:44,293 INFO [train.py:1114] (3/4) Epoch 9, batch 100, loss[loss=0.2293, simple_loss=0.2863, pruned_loss=0.0625, ctc_loss=0.1183, over 19735.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3084, pruned_loss=0.07697, ctc_loss=0.1446, over 1499016.47 frames. ], batch size: 51, lr: 1.69e-02, grad_scale: 32.0 +2024-08-25 15:47:52,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106741.33333333333, ans=0.1 +2024-08-25 15:48:00,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=106794.66666666667, ans=0.035 +2024-08-25 15:48:08,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=106848.0, ans=0.2 +2024-08-25 15:48:09,483 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 1.842e+02 2.163e+02 2.785e+02 4.838e+02, threshold=4.326e+02, percent-clipped=0.0 +2024-08-25 15:48:09,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=106848.0, ans=0.0 +2024-08-25 15:48:16,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=106848.0, ans=0.125 +2024-08-25 15:48:42,140 INFO [train.py:1114] (3/4) Epoch 9, batch 150, loss[loss=0.2106, simple_loss=0.2736, pruned_loss=0.05368, ctc_loss=0.1008, over 19680.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3042, pruned_loss=0.07422, ctc_loss=0.1398, over 2026886.82 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 32.0 +2024-08-25 15:48:45,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=107008.0, ans=0.125 +2024-08-25 15:48:46,869 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.86 vs. limit=22.5 +2024-08-25 15:48:50,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=107008.0, ans=0.125 +2024-08-25 15:48:54,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=107061.33333333333, ans=0.125 +2024-08-25 15:49:02,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=107061.33333333333, ans=10.0 +2024-08-25 15:49:04,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=107061.33333333333, ans=0.0 +2024-08-25 15:49:05,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=107114.66666666667, ans=0.125 +2024-08-25 15:49:08,566 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.25 vs. limit=5.0 +2024-08-25 15:49:20,942 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.31 vs. limit=15.0 +2024-08-25 15:49:38,208 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.34 vs. limit=15.0 +2024-08-25 15:49:41,042 INFO [train.py:1114] (3/4) Epoch 9, batch 200, loss[loss=0.2853, simple_loss=0.3255, pruned_loss=0.08863, ctc_loss=0.1697, over 18314.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3024, pruned_loss=0.07296, ctc_loss=0.1373, over 2435917.01 frames. ], batch size: 85, lr: 1.68e-02, grad_scale: 32.0 +2024-08-25 15:50:01,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=107328.0, ans=0.0 +2024-08-25 15:50:06,171 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.398e+02 1.799e+02 2.039e+02 2.617e+02 5.282e+02, threshold=4.078e+02, percent-clipped=1.0 +2024-08-25 15:50:43,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=107381.33333333333, ans=0.125 +2024-08-25 15:50:46,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=107381.33333333333, ans=0.0 +2024-08-25 15:50:53,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=107434.66666666667, ans=0.0 +2024-08-25 15:50:54,244 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.53 vs. limit=15.0 +2024-08-25 15:51:09,683 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.18 vs. limit=6.0 +2024-08-25 15:51:14,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107488.0, ans=0.1 +2024-08-25 15:51:17,116 INFO [train.py:1114] (3/4) Epoch 9, batch 250, loss[loss=0.266, simple_loss=0.3185, pruned_loss=0.07843, ctc_loss=0.1417, over 19343.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.3028, pruned_loss=0.07342, ctc_loss=0.1381, over 2755622.38 frames. ], batch size: 67, lr: 1.68e-02, grad_scale: 32.0 +2024-08-25 15:51:21,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=107541.33333333333, ans=0.0 +2024-08-25 15:51:36,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=107594.66666666667, ans=0.2 +2024-08-25 15:52:09,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=107754.66666666667, ans=0.125 +2024-08-25 15:52:10,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=107754.66666666667, ans=0.0 +2024-08-25 15:52:13,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=107754.66666666667, ans=10.0 +2024-08-25 15:52:13,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=107754.66666666667, ans=0.0 +2024-08-25 15:52:14,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=107754.66666666667, ans=0.125 +2024-08-25 15:52:15,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=107754.66666666667, ans=0.125 +2024-08-25 15:52:18,772 INFO [train.py:1114] (3/4) Epoch 9, batch 300, loss[loss=0.2936, simple_loss=0.3399, pruned_loss=0.09013, ctc_loss=0.1677, over 19523.00 frames. ], tot_loss[loss=0.253, simple_loss=0.303, pruned_loss=0.07373, ctc_loss=0.1387, over 3000418.68 frames. ], batch size: 61, lr: 1.68e-02, grad_scale: 16.0 +2024-08-25 15:52:38,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107861.33333333333, ans=0.1 +2024-08-25 15:52:39,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=107861.33333333333, ans=0.2 +2024-08-25 15:52:47,050 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.324e+02 1.831e+02 2.248e+02 2.885e+02 5.251e+02, threshold=4.495e+02, percent-clipped=2.0 +2024-08-25 15:52:57,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=107968.0, ans=0.0 +2024-08-25 15:52:58,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=107968.0, ans=0.2 +2024-08-25 15:53:06,000 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.46 vs. limit=15.0 +2024-08-25 15:53:10,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=108021.33333333333, ans=0.0 +2024-08-25 15:53:18,332 INFO [train.py:1114] (3/4) Epoch 9, batch 350, loss[loss=0.2489, simple_loss=0.2984, pruned_loss=0.07315, ctc_loss=0.133, over 19744.00 frames. ], tot_loss[loss=0.2529, simple_loss=0.3034, pruned_loss=0.07354, ctc_loss=0.1382, over 3190835.51 frames. ], batch size: 48, lr: 1.68e-02, grad_scale: 16.0 +2024-08-25 15:53:20,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=108074.66666666667, ans=0.125 +2024-08-25 15:53:39,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=108128.0, ans=0.125 +2024-08-25 15:53:55,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=108234.66666666667, ans=0.0 +2024-08-25 15:54:05,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=108288.0, ans=0.0 +2024-08-25 15:54:07,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=108288.0, ans=0.125 +2024-08-25 15:54:14,909 INFO [train.py:1114] (3/4) Epoch 9, batch 400, loss[loss=0.2372, simple_loss=0.2956, pruned_loss=0.06489, ctc_loss=0.1226, over 19519.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3026, pruned_loss=0.07303, ctc_loss=0.1374, over 3342303.24 frames. ], batch size: 54, lr: 1.68e-02, grad_scale: 32.0 +2024-08-25 15:54:16,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=108341.33333333333, ans=0.0 +2024-08-25 15:54:16,617 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.40 vs. limit=12.0 +2024-08-25 15:54:43,458 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.428e+02 2.039e+02 2.514e+02 3.062e+02 4.428e+02, threshold=5.028e+02, percent-clipped=0.0 +2024-08-25 15:54:50,212 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.66 vs. limit=15.0 +2024-08-25 15:55:05,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108554.66666666667, ans=0.1 +2024-08-25 15:55:18,512 INFO [train.py:1114] (3/4) Epoch 9, batch 450, loss[loss=0.2361, simple_loss=0.2983, pruned_loss=0.06248, ctc_loss=0.1223, over 19625.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3029, pruned_loss=0.0731, ctc_loss=0.1376, over 3451742.59 frames. ], batch size: 55, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 15:55:38,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=108661.33333333333, ans=0.2 +2024-08-25 15:55:51,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108768.0, ans=0.1 +2024-08-25 15:57:34,576 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.66 vs. limit=6.0 +2024-08-25 15:58:59,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=108821.33333333333, ans=0.125 +2024-08-25 15:59:11,160 INFO [train.py:1114] (3/4) Epoch 9, batch 500, loss[loss=0.2646, simple_loss=0.3219, pruned_loss=0.075, ctc_loss=0.1429, over 19699.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3025, pruned_loss=0.07314, ctc_loss=0.1378, over 3546623.14 frames. ], batch size: 63, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 15:59:23,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108928.0, ans=0.1 +2024-08-25 15:59:37,499 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.583e+02 1.839e+02 2.298e+02 3.023e+02 4.931e+02, threshold=4.596e+02, percent-clipped=0.0 +2024-08-25 15:59:50,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=109034.66666666667, ans=0.125 +2024-08-25 16:00:08,624 INFO [train.py:1114] (3/4) Epoch 9, batch 550, loss[loss=0.2649, simple_loss=0.3146, pruned_loss=0.07926, ctc_loss=0.1416, over 19302.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.3022, pruned_loss=0.07318, ctc_loss=0.1379, over 3608514.03 frames. ], batch size: 71, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 16:00:20,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=109194.66666666667, ans=0.09899494936611666 +2024-08-25 16:00:37,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=109248.0, ans=0.0 +2024-08-25 16:01:12,454 INFO [train.py:1114] (3/4) Epoch 9, batch 600, loss[loss=0.2694, simple_loss=0.3181, pruned_loss=0.08136, ctc_loss=0.1448, over 19437.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.3029, pruned_loss=0.07353, ctc_loss=0.1382, over 3665876.29 frames. ], batch size: 67, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 16:01:39,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=109461.33333333333, ans=0.0 +2024-08-25 16:01:51,487 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.960e+02 2.208e+02 2.721e+02 5.490e+02, threshold=4.416e+02, percent-clipped=2.0 +2024-08-25 16:02:21,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=109514.66666666667, ans=0.2 +2024-08-25 16:02:25,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=109568.0, ans=0.125 +2024-08-25 16:02:27,622 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.93 vs. limit=22.5 +2024-08-25 16:02:28,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=109568.0, ans=0.0 +2024-08-25 16:02:33,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109568.0, ans=0.1 +2024-08-25 16:02:37,318 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:02:47,613 INFO [train.py:1114] (3/4) Epoch 9, batch 650, loss[loss=0.24, simple_loss=0.3068, pruned_loss=0.06316, ctc_loss=0.117, over 19767.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3013, pruned_loss=0.07242, ctc_loss=0.1361, over 3716373.21 frames. ], batch size: 54, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 16:02:48,979 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:03:14,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=109781.33333333333, ans=0.0 +2024-08-25 16:03:16,707 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:03:23,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=109834.66666666667, ans=0.125 +2024-08-25 16:03:25,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109834.66666666667, ans=0.1 +2024-08-25 16:03:30,912 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.38 vs. limit=12.0 +2024-08-25 16:03:35,531 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.85 vs. limit=22.5 +2024-08-25 16:03:47,858 INFO [train.py:1114] (3/4) Epoch 9, batch 700, loss[loss=0.2317, simple_loss=0.2903, pruned_loss=0.06276, ctc_loss=0.1191, over 19716.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3012, pruned_loss=0.07222, ctc_loss=0.1359, over 3747551.70 frames. ], batch size: 51, lr: 1.67e-02, grad_scale: 32.0 +2024-08-25 16:03:49,624 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.90 vs. limit=15.0 +2024-08-25 16:04:04,235 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:04:14,379 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.641e+02 1.949e+02 2.382e+02 2.859e+02 4.618e+02, threshold=4.764e+02, percent-clipped=1.0 +2024-08-25 16:04:15,743 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=1.751e-01 +2024-08-25 16:04:29,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=110101.33333333333, ans=0.0 +2024-08-25 16:04:30,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=110101.33333333333, ans=0.0 +2024-08-25 16:04:36,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.whiten.whitening_limit, batch_count=110154.66666666667, ans=15.0 +2024-08-25 16:04:37,094 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.81 vs. limit=6.0 +2024-08-25 16:04:44,749 INFO [train.py:1114] (3/4) Epoch 9, batch 750, loss[loss=0.245, simple_loss=0.3024, pruned_loss=0.06797, ctc_loss=0.1289, over 19492.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3012, pruned_loss=0.07224, ctc_loss=0.1358, over 3774251.77 frames. ], batch size: 54, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:04:51,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=110208.0, ans=0.125 +2024-08-25 16:05:00,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=110261.33333333333, ans=0.0 +2024-08-25 16:05:40,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=110421.33333333333, ans=0.0 +2024-08-25 16:05:48,067 INFO [train.py:1114] (3/4) Epoch 9, batch 800, loss[loss=0.2523, simple_loss=0.2927, pruned_loss=0.07578, ctc_loss=0.1508, over 19402.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3007, pruned_loss=0.07199, ctc_loss=0.1357, over 3794706.26 frames. ], batch size: 48, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:05:56,707 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.11 vs. limit=10.0 +2024-08-25 16:06:01,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=110528.0, ans=0.2 +2024-08-25 16:06:14,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=110581.33333333333, ans=0.2 +2024-08-25 16:06:14,964 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.861e+02 2.104e+02 2.558e+02 4.618e+02, threshold=4.207e+02, percent-clipped=0.0 +2024-08-25 16:06:22,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=110634.66666666667, ans=0.2 +2024-08-25 16:06:25,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=110634.66666666667, ans=0.1 +2024-08-25 16:06:45,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=110688.0, ans=0.2 +2024-08-25 16:06:47,181 INFO [train.py:1114] (3/4) Epoch 9, batch 850, loss[loss=0.2522, simple_loss=0.3061, pruned_loss=0.07318, ctc_loss=0.1298, over 19644.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3003, pruned_loss=0.07183, ctc_loss=0.1354, over 3813664.29 frames. ], batch size: 59, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:07:09,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=110848.0, ans=0.125 +2024-08-25 16:07:28,085 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:07:34,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=110954.66666666667, ans=0.125 +2024-08-25 16:08:39,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=110954.66666666667, ans=0.2 +2024-08-25 16:08:42,729 INFO [train.py:1114] (3/4) Epoch 9, batch 900, loss[loss=0.2663, simple_loss=0.3013, pruned_loss=0.08307, ctc_loss=0.1628, over 19412.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3005, pruned_loss=0.07216, ctc_loss=0.136, over 3817494.92 frames. ], batch size: 48, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:09:12,348 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.426e+02 1.982e+02 2.328e+02 2.784e+02 5.806e+02, threshold=4.657e+02, percent-clipped=1.0 +2024-08-25 16:09:18,926 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.70 vs. limit=15.0 +2024-08-25 16:09:20,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=111168.0, ans=0.0 +2024-08-25 16:09:24,564 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.71 vs. limit=6.0 +2024-08-25 16:09:47,296 INFO [train.py:1114] (3/4) Epoch 9, batch 950, loss[loss=0.2272, simple_loss=0.2826, pruned_loss=0.06272, ctc_loss=0.1162, over 19494.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3012, pruned_loss=0.07267, ctc_loss=0.1371, over 3818012.97 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:09:56,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=111274.66666666667, ans=0.125 +2024-08-25 16:10:29,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=111434.66666666667, ans=0.0 +2024-08-25 16:10:30,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=111434.66666666667, ans=0.125 +2024-08-25 16:10:37,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=111488.0, ans=0.125 +2024-08-25 16:10:45,138 INFO [train.py:1114] (3/4) Epoch 9, batch 1000, loss[loss=0.2237, simple_loss=0.2804, pruned_loss=0.0604, ctc_loss=0.1153, over 19873.00 frames. ], tot_loss[loss=0.2515, simple_loss=0.3017, pruned_loss=0.07305, ctc_loss=0.1379, over 3813686.65 frames. ], batch size: 52, lr: 1.66e-02, grad_scale: 32.0 +2024-08-25 16:11:03,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=111594.66666666667, ans=0.0 +2024-08-25 16:11:13,873 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 1.864e+02 2.156e+02 2.793e+02 4.751e+02, threshold=4.311e+02, percent-clipped=1.0 +2024-08-25 16:11:20,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=111648.0, ans=0.0 +2024-08-25 16:11:21,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=111701.33333333333, ans=0.025 +2024-08-25 16:11:21,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=111701.33333333333, ans=0.125 +2024-08-25 16:11:31,201 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.82 vs. limit=15.0 +2024-08-25 16:11:39,396 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.08 vs. limit=15.0 +2024-08-25 16:11:45,639 INFO [train.py:1114] (3/4) Epoch 9, batch 1050, loss[loss=0.2467, simple_loss=0.3022, pruned_loss=0.06981, ctc_loss=0.1288, over 19837.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3013, pruned_loss=0.07296, ctc_loss=0.1374, over 3819984.06 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:11:48,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111808.0, ans=0.1 +2024-08-25 16:12:06,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=111861.33333333333, ans=0.0 +2024-08-25 16:12:14,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=111914.66666666667, ans=0.0 +2024-08-25 16:12:20,949 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.49 vs. limit=15.0 +2024-08-25 16:12:32,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=111968.0, ans=0.0 +2024-08-25 16:12:41,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=112021.33333333333, ans=0.125 +2024-08-25 16:12:51,813 INFO [train.py:1114] (3/4) Epoch 9, batch 1100, loss[loss=0.255, simple_loss=0.2967, pruned_loss=0.07714, ctc_loss=0.1477, over 19591.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3005, pruned_loss=0.0722, ctc_loss=0.136, over 3828029.26 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:12:54,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=112074.66666666667, ans=0.1 +2024-08-25 16:13:19,831 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.558e+02 1.820e+02 2.090e+02 2.645e+02 4.523e+02, threshold=4.179e+02, percent-clipped=2.0 +2024-08-25 16:13:20,625 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.52 vs. limit=15.0 +2024-08-25 16:13:26,060 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.13 vs. limit=15.0 +2024-08-25 16:13:50,951 INFO [train.py:1114] (3/4) Epoch 9, batch 1150, loss[loss=0.2418, simple_loss=0.2902, pruned_loss=0.07009, ctc_loss=0.1331, over 19586.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3006, pruned_loss=0.07235, ctc_loss=0.1364, over 3828256.56 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:14:11,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=112394.66666666667, ans=0.025 +2024-08-25 16:14:30,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=112501.33333333333, ans=0.1 +2024-08-25 16:14:30,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=112501.33333333333, ans=0.125 +2024-08-25 16:14:49,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=112608.0, ans=0.125 +2024-08-25 16:14:51,118 INFO [train.py:1114] (3/4) Epoch 9, batch 1200, loss[loss=0.2562, simple_loss=0.3109, pruned_loss=0.07379, ctc_loss=0.1351, over 19818.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3014, pruned_loss=0.07255, ctc_loss=0.1367, over 3824123.47 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:14:51,562 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.05 vs. limit=6.0 +2024-08-25 16:14:59,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=112608.0, ans=0.125 +2024-08-25 16:15:10,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=112661.33333333333, ans=0.0 +2024-08-25 16:15:51,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=112661.33333333333, ans=0.125 +2024-08-25 16:15:52,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=112714.66666666667, ans=0.125 +2024-08-25 16:16:05,754 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.491e+02 1.875e+02 2.166e+02 2.598e+02 4.323e+02, threshold=4.331e+02, percent-clipped=2.0 +2024-08-25 16:16:17,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=112768.0, ans=0.0 +2024-08-25 16:16:37,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=112821.33333333333, ans=0.0 +2024-08-25 16:16:38,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=112874.66666666667, ans=0.125 +2024-08-25 16:16:39,519 INFO [train.py:1114] (3/4) Epoch 9, batch 1250, loss[loss=0.2501, simple_loss=0.3027, pruned_loss=0.07163, ctc_loss=0.1355, over 19537.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3018, pruned_loss=0.07262, ctc_loss=0.1367, over 3842786.22 frames. ], batch size: 61, lr: 1.65e-02, grad_scale: 32.0 +2024-08-25 16:16:40,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=112874.66666666667, ans=0.2 +2024-08-25 16:17:38,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=113088.0, ans=0.04949747468305833 +2024-08-25 16:17:39,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=113141.33333333333, ans=0.95 +2024-08-25 16:17:39,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=113141.33333333333, ans=0.125 +2024-08-25 16:17:40,925 INFO [train.py:1114] (3/4) Epoch 9, batch 1300, loss[loss=0.2641, simple_loss=0.3093, pruned_loss=0.08024, ctc_loss=0.1461, over 18758.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3008, pruned_loss=0.07225, ctc_loss=0.1361, over 3846324.30 frames. ], batch size: 76, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:17:43,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=113141.33333333333, ans=0.0 +2024-08-25 16:18:08,515 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.553e+02 1.959e+02 2.315e+02 2.984e+02 4.812e+02, threshold=4.630e+02, percent-clipped=1.0 +2024-08-25 16:18:23,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=113301.33333333333, ans=0.0 +2024-08-25 16:18:26,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=113301.33333333333, ans=0.07 +2024-08-25 16:18:30,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=113354.66666666667, ans=0.125 +2024-08-25 16:18:37,855 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.13 vs. limit=15.0 +2024-08-25 16:18:42,120 INFO [train.py:1114] (3/4) Epoch 9, batch 1350, loss[loss=0.2442, simple_loss=0.3013, pruned_loss=0.06763, ctc_loss=0.1296, over 19765.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3003, pruned_loss=0.07179, ctc_loss=0.1351, over 3857696.96 frames. ], batch size: 54, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:18:49,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=113408.0, ans=0.0 +2024-08-25 16:19:04,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=113514.66666666667, ans=0.0 +2024-08-25 16:19:12,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=113514.66666666667, ans=0.07 +2024-08-25 16:19:26,183 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.67 vs. limit=6.0 +2024-08-25 16:19:40,004 INFO [train.py:1114] (3/4) Epoch 9, batch 1400, loss[loss=0.2381, simple_loss=0.28, pruned_loss=0.07137, ctc_loss=0.1339, over 19689.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3, pruned_loss=0.0718, ctc_loss=0.1348, over 3864354.05 frames. ], batch size: 46, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:19:42,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=113674.66666666667, ans=0.2 +2024-08-25 16:20:07,554 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.393e+02 1.860e+02 2.127e+02 2.545e+02 4.134e+02, threshold=4.253e+02, percent-clipped=0.0 +2024-08-25 16:20:07,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=113781.33333333333, ans=0.0 +2024-08-25 16:20:43,001 INFO [train.py:1114] (3/4) Epoch 9, batch 1450, loss[loss=0.2662, simple_loss=0.3124, pruned_loss=0.08079, ctc_loss=0.1459, over 19658.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3007, pruned_loss=0.07209, ctc_loss=0.1354, over 3861727.81 frames. ], batch size: 63, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:20:46,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=113941.33333333333, ans=0.0 +2024-08-25 16:21:00,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113994.66666666667, ans=0.1 +2024-08-25 16:21:11,826 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.88 vs. limit=15.0 +2024-08-25 16:21:18,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=114101.33333333333, ans=0.0 +2024-08-25 16:21:24,886 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.79 vs. limit=6.0 +2024-08-25 16:21:36,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=114154.66666666667, ans=0.025 +2024-08-25 16:21:45,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=114208.0, ans=0.1 +2024-08-25 16:21:45,893 INFO [train.py:1114] (3/4) Epoch 9, batch 1500, loss[loss=0.2273, simple_loss=0.2912, pruned_loss=0.05954, ctc_loss=0.1106, over 19577.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3009, pruned_loss=0.07205, ctc_loss=0.1352, over 3861680.15 frames. ], batch size: 57, lr: 1.64e-02, grad_scale: 32.0 +2024-08-25 16:22:01,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114261.33333333333, ans=0.1 +2024-08-25 16:22:06,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=114261.33333333333, ans=0.0 +2024-08-25 16:22:08,788 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.42 vs. limit=15.0 +2024-08-25 16:22:15,431 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.478e+02 1.928e+02 2.180e+02 2.740e+02 4.350e+02, threshold=4.360e+02, percent-clipped=2.0 +2024-08-25 16:22:24,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=114368.0, ans=0.0 +2024-08-25 16:22:42,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=114421.33333333333, ans=0.0 +2024-08-25 16:22:45,650 INFO [train.py:1114] (3/4) Epoch 9, batch 1550, loss[loss=0.2893, simple_loss=0.3304, pruned_loss=0.08958, ctc_loss=0.1726, over 19610.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3013, pruned_loss=0.07256, ctc_loss=0.1363, over 3846864.95 frames. ], batch size: 60, lr: 1.64e-02, grad_scale: 16.0 +2024-08-25 16:22:57,769 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.15 vs. limit=15.0 +2024-08-25 16:22:58,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=114528.0, ans=0.0 +2024-08-25 16:23:05,630 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.81 vs. limit=15.0 +2024-08-25 16:23:08,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=114581.33333333333, ans=0.125 +2024-08-25 16:23:18,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=114581.33333333333, ans=0.2 +2024-08-25 16:23:18,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=114581.33333333333, ans=0.0 +2024-08-25 16:23:42,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten.whitening_limit, batch_count=114688.0, ans=22.5 +2024-08-25 16:23:47,226 INFO [train.py:1114] (3/4) Epoch 9, batch 1600, loss[loss=0.2217, simple_loss=0.2913, pruned_loss=0.0547, ctc_loss=0.1069, over 19832.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3013, pruned_loss=0.07253, ctc_loss=0.1364, over 3836243.84 frames. ], batch size: 57, lr: 1.63e-02, grad_scale: 32.0 +2024-08-25 16:24:12,758 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.35 vs. limit=12.0 +2024-08-25 16:24:16,813 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.516e+02 1.930e+02 2.504e+02 3.084e+02 5.673e+02, threshold=5.009e+02, percent-clipped=4.0 +2024-08-25 16:24:46,353 INFO [train.py:1114] (3/4) Epoch 9, batch 1650, loss[loss=0.2868, simple_loss=0.333, pruned_loss=0.08857, ctc_loss=0.1587, over 19664.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.3023, pruned_loss=0.07325, ctc_loss=0.1375, over 3833329.32 frames. ], batch size: 59, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:24:55,623 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:25:13,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=115114.66666666667, ans=0.025 +2024-08-25 16:25:39,946 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.17 vs. limit=10.0 +2024-08-25 16:25:45,147 INFO [train.py:1114] (3/4) Epoch 9, batch 1700, loss[loss=0.2292, simple_loss=0.2727, pruned_loss=0.06789, ctc_loss=0.1248, over 19672.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3018, pruned_loss=0.07264, ctc_loss=0.1366, over 3847405.19 frames. ], batch size: 46, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:25:45,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=115274.66666666667, ans=0.125 +2024-08-25 16:26:13,046 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.415e+02 1.773e+02 1.969e+02 2.283e+02 4.673e+02, threshold=3.938e+02, percent-clipped=0.0 +2024-08-25 16:26:15,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=115381.33333333333, ans=0.125 +2024-08-25 16:26:27,657 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.66 vs. limit=15.0 +2024-08-25 16:26:40,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=115541.33333333333, ans=0.0 +2024-08-25 16:26:41,720 INFO [train.py:1114] (3/4) Epoch 9, batch 1750, loss[loss=0.2218, simple_loss=0.2702, pruned_loss=0.06366, ctc_loss=0.1154, over 19620.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3011, pruned_loss=0.07202, ctc_loss=0.1352, over 3850976.40 frames. ], batch size: 45, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:27:25,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=115541.33333333333, ans=0.125 +2024-08-25 16:27:26,742 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.87 vs. limit=22.5 +2024-08-25 16:28:06,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=115754.66666666667, ans=0.2 +2024-08-25 16:28:12,434 INFO [train.py:1114] (3/4) Epoch 9, batch 1800, loss[loss=0.229, simple_loss=0.2914, pruned_loss=0.05937, ctc_loss=0.1196, over 19613.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3007, pruned_loss=0.07174, ctc_loss=0.1347, over 3851723.27 frames. ], batch size: 55, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:28:30,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=115808.0, ans=0.125 +2024-08-25 16:28:45,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=115914.66666666667, ans=0.125 +2024-08-25 16:28:46,409 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.29 vs. limit=15.0 +2024-08-25 16:28:49,002 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.488e+02 1.840e+02 2.097e+02 2.711e+02 4.220e+02, threshold=4.193e+02, percent-clipped=2.0 +2024-08-25 16:28:53,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=115914.66666666667, ans=0.1 +2024-08-25 16:28:57,596 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.13 vs. limit=15.0 +2024-08-25 16:29:04,213 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 16:29:05,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=115968.0, ans=0.125 +2024-08-25 16:29:25,112 INFO [train.py:1114] (3/4) Epoch 9, batch 1850, loss[loss=0.2478, simple_loss=0.3124, pruned_loss=0.06781, ctc_loss=0.1193, over 19589.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3004, pruned_loss=0.07164, ctc_loss=0.1344, over 3856604.38 frames. ], batch size: 57, lr: 1.63e-02, grad_scale: 16.0 +2024-08-25 16:29:43,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=116128.0, ans=0.125 +2024-08-25 16:30:24,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=116181.33333333333, ans=0.0 +2024-08-25 16:30:27,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=116181.33333333333, ans=0.2 +2024-08-25 16:30:29,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=116181.33333333333, ans=0.125 +2024-08-25 16:30:50,208 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.61 vs. limit=15.0 +2024-08-25 16:30:52,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=116288.0, ans=0.0 +2024-08-25 16:30:56,362 INFO [train.py:1114] (3/4) Epoch 9, batch 1900, loss[loss=0.2895, simple_loss=0.3266, pruned_loss=0.09156, ctc_loss=0.173, over 19651.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.3008, pruned_loss=0.07169, ctc_loss=0.1342, over 3860965.01 frames. ], batch size: 59, lr: 1.62e-02, grad_scale: 16.0 +2024-08-25 16:30:58,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=116341.33333333333, ans=0.125 +2024-08-25 16:31:03,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=116341.33333333333, ans=0.0 +2024-08-25 16:32:03,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=116394.66666666667, ans=0.125 +2024-08-25 16:32:06,803 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.11 vs. limit=12.0 +2024-08-25 16:32:16,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=116448.0, ans=0.0 +2024-08-25 16:32:21,938 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.810e+02 2.075e+02 2.674e+02 4.757e+02, threshold=4.150e+02, percent-clipped=3.0 +2024-08-25 16:32:30,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=116501.33333333333, ans=0.2 +2024-08-25 16:32:43,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=116554.66666666667, ans=0.125 +2024-08-25 16:32:47,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=116554.66666666667, ans=0.0 +2024-08-25 16:33:06,102 INFO [train.py:1114] (3/4) Epoch 9, batch 1950, loss[loss=0.2082, simple_loss=0.2718, pruned_loss=0.05215, ctc_loss=0.1005, over 19588.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3018, pruned_loss=0.07195, ctc_loss=0.1347, over 3870204.12 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 16.0 +2024-08-25 16:33:10,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=116608.0, ans=0.2 +2024-08-25 16:33:25,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=116661.33333333333, ans=0.125 +2024-08-25 16:33:39,682 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.27 vs. limit=22.5 +2024-08-25 16:33:47,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=116768.0, ans=0.2 +2024-08-25 16:33:51,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=116821.33333333333, ans=0.125 +2024-08-25 16:34:02,729 INFO [train.py:1114] (3/4) Epoch 9, batch 2000, loss[loss=0.2175, simple_loss=0.2711, pruned_loss=0.05937, ctc_loss=0.1129, over 19670.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3023, pruned_loss=0.07234, ctc_loss=0.1353, over 3855943.67 frames. ], batch size: 45, lr: 1.62e-02, grad_scale: 32.0 +2024-08-25 16:34:03,298 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.48 vs. limit=15.0 +2024-08-25 16:34:11,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=116874.66666666667, ans=0.0 +2024-08-25 16:34:16,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=116928.0, ans=0.125 +2024-08-25 16:34:28,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=116981.33333333333, ans=0.05 +2024-08-25 16:34:30,973 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.555e+02 1.787e+02 2.122e+02 2.673e+02 5.196e+02, threshold=4.245e+02, percent-clipped=10.0 +2024-08-25 16:34:38,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=117034.66666666667, ans=0.0 +2024-08-25 16:34:59,603 INFO [train.py:1114] (3/4) Epoch 9, batch 2050, loss[loss=0.2349, simple_loss=0.275, pruned_loss=0.06998, ctc_loss=0.1372, over 19728.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3006, pruned_loss=0.07194, ctc_loss=0.1346, over 3851663.93 frames. ], batch size: 47, lr: 1.62e-02, grad_scale: 32.0 +2024-08-25 16:35:13,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=117194.66666666667, ans=0.025 +2024-08-25 16:35:25,367 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.65 vs. limit=15.0 +2024-08-25 16:36:57,338 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.40 vs. limit=15.0 +2024-08-25 16:36:57,814 INFO [train.py:1114] (3/4) Epoch 9, batch 2100, loss[loss=0.2505, simple_loss=0.3086, pruned_loss=0.06987, ctc_loss=0.1314, over 19760.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.2999, pruned_loss=0.07142, ctc_loss=0.1338, over 3857941.15 frames. ], batch size: 54, lr: 1.62e-02, grad_scale: 32.0 +2024-08-25 16:37:33,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=117514.66666666667, ans=0.0 +2024-08-25 16:37:38,955 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.501e+02 1.824e+02 2.012e+02 2.446e+02 4.504e+02, threshold=4.025e+02, percent-clipped=2.0 +2024-08-25 16:37:41,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=117514.66666666667, ans=0.0 +2024-08-25 16:37:57,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=117621.33333333333, ans=0.125 +2024-08-25 16:37:59,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=117621.33333333333, ans=0.0 +2024-08-25 16:37:59,644 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.93 vs. limit=15.0 +2024-08-25 16:38:05,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=117674.66666666667, ans=0.2 +2024-08-25 16:38:06,799 INFO [train.py:1114] (3/4) Epoch 9, batch 2150, loss[loss=0.222, simple_loss=0.2829, pruned_loss=0.05861, ctc_loss=0.1097, over 19849.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.2995, pruned_loss=0.07099, ctc_loss=0.133, over 3869521.36 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 32.0 +2024-08-25 16:38:19,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=117728.0, ans=0.125 +2024-08-25 16:38:22,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=117728.0, ans=0.125 +2024-08-25 16:38:24,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=117728.0, ans=0.125 +2024-08-25 16:38:27,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=117728.0, ans=0.125 +2024-08-25 16:38:35,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=117781.33333333333, ans=0.0 +2024-08-25 16:38:37,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=117781.33333333333, ans=0.125 +2024-08-25 16:38:38,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117781.33333333333, ans=0.1 +2024-08-25 16:38:45,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=117834.66666666667, ans=0.0 +2024-08-25 16:38:49,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=117834.66666666667, ans=0.09899494936611666 +2024-08-25 16:38:53,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=117888.0, ans=0.2 +2024-08-25 16:39:02,679 INFO [train.py:1114] (3/4) Epoch 9, batch 2200, loss[loss=0.2637, simple_loss=0.3162, pruned_loss=0.07629, ctc_loss=0.1464, over 19598.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.2994, pruned_loss=0.07105, ctc_loss=0.133, over 3867350.51 frames. ], batch size: 57, lr: 1.61e-02, grad_scale: 32.0 +2024-08-25 16:39:12,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=117941.33333333333, ans=0.07 +2024-08-25 16:39:14,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=117994.66666666667, ans=0.0 +2024-08-25 16:39:15,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=117994.66666666667, ans=0.125 +2024-08-25 16:39:15,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=117994.66666666667, ans=0.0 +2024-08-25 16:39:30,919 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.404e+02 1.840e+02 2.263e+02 2.882e+02 6.553e+02, threshold=4.526e+02, percent-clipped=9.0 +2024-08-25 16:39:59,974 INFO [train.py:1114] (3/4) Epoch 9, batch 2250, loss[loss=0.2371, simple_loss=0.3053, pruned_loss=0.05965, ctc_loss=0.1238, over 19612.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3002, pruned_loss=0.0714, ctc_loss=0.1338, over 3867293.27 frames. ], batch size: 55, lr: 1.61e-02, grad_scale: 16.0 +2024-08-25 16:40:03,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=118208.0, ans=0.1 +2024-08-25 16:40:39,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=118368.0, ans=0.04949747468305833 +2024-08-25 16:40:50,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=118421.33333333333, ans=15.0 +2024-08-25 16:40:54,820 INFO [train.py:1114] (3/4) Epoch 9, batch 2300, loss[loss=0.242, simple_loss=0.2916, pruned_loss=0.06953, ctc_loss=0.1337, over 19512.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.2996, pruned_loss=0.07161, ctc_loss=0.134, over 3860876.33 frames. ], batch size: 49, lr: 1.61e-02, grad_scale: 16.0 +2024-08-25 16:41:09,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=118528.0, ans=0.125 +2024-08-25 16:41:23,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=118581.33333333333, ans=0.0 +2024-08-25 16:41:24,912 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.547e+02 1.864e+02 2.265e+02 3.023e+02 5.230e+02, threshold=4.530e+02, percent-clipped=2.0 +2024-08-25 16:41:26,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=118581.33333333333, ans=0.125 +2024-08-25 16:41:26,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=118581.33333333333, ans=0.1 +2024-08-25 16:41:26,553 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.46 vs. limit=10.0 +2024-08-25 16:41:48,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=118688.0, ans=0.025 +2024-08-25 16:41:51,086 INFO [train.py:1114] (3/4) Epoch 9, batch 2350, loss[loss=0.2787, simple_loss=0.3218, pruned_loss=0.08684, ctc_loss=0.1548, over 19655.00 frames. ], tot_loss[loss=0.249, simple_loss=0.2999, pruned_loss=0.07209, ctc_loss=0.1349, over 3863477.13 frames. ], batch size: 63, lr: 1.61e-02, grad_scale: 16.0 +2024-08-25 16:42:10,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=118794.66666666667, ans=0.95 +2024-08-25 16:42:12,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=118848.0, ans=0.035 +2024-08-25 16:42:39,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=118901.33333333333, ans=0.125 +2024-08-25 16:42:46,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=118901.33333333333, ans=0.125 +2024-08-25 16:42:47,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=118901.33333333333, ans=0.2 +2024-08-25 16:42:51,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=118954.66666666667, ans=0.0 +2024-08-25 16:42:52,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=118954.66666666667, ans=0.0 +2024-08-25 16:43:02,772 INFO [train.py:1114] (3/4) Epoch 9, batch 2400, loss[loss=0.2874, simple_loss=0.3348, pruned_loss=0.08829, ctc_loss=0.1586, over 19275.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.3025, pruned_loss=0.07327, ctc_loss=0.1369, over 3857594.78 frames. ], batch size: 71, lr: 1.61e-02, grad_scale: 32.0 +2024-08-25 16:43:08,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=119008.0, ans=0.0 +2024-08-25 16:43:09,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=119008.0, ans=0.0 +2024-08-25 16:43:09,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=119008.0, ans=0.125 +2024-08-25 16:43:31,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=119114.66666666667, ans=0.2 +2024-08-25 16:43:32,520 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.581e+02 1.930e+02 2.301e+02 2.799e+02 4.768e+02, threshold=4.601e+02, percent-clipped=1.0 +2024-08-25 16:43:39,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=119168.0, ans=0.0 +2024-08-25 16:43:46,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=119168.0, ans=0.125 +2024-08-25 16:43:57,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=119221.33333333333, ans=0.0 +2024-08-25 16:43:59,354 INFO [train.py:1114] (3/4) Epoch 9, batch 2450, loss[loss=0.3338, simple_loss=0.3434, pruned_loss=0.1158, ctc_loss=0.2314, over 13405.00 frames. ], tot_loss[loss=0.2589, simple_loss=0.3067, pruned_loss=0.07675, ctc_loss=0.1438, over 3729870.34 frames. ], batch size: 141, lr: 1.61e-02, grad_scale: 32.0 +2024-08-25 16:44:21,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=119381.33333333333, ans=0.0 +2024-08-25 16:44:29,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=119381.33333333333, ans=0.125 +2024-08-25 16:44:31,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=119434.66666666667, ans=0.05 +2024-08-25 16:44:33,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=119434.66666666667, ans=0.0 +2024-08-25 16:44:34,109 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.52 vs. limit=22.5 +2024-08-25 16:44:34,870 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.09 vs. limit=15.0 +2024-08-25 16:45:25,708 INFO [train.py:1114] (3/4) Epoch 10, batch 0, loss[loss=0.2212, simple_loss=0.2731, pruned_loss=0.06173, ctc_loss=0.1146, over 19409.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2731, pruned_loss=0.06173, ctc_loss=0.1146, over 19409.00 frames. ], batch size: 48, lr: 1.53e-02, grad_scale: 32.0 +2024-08-25 16:45:25,708 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-25 16:46:37,107 INFO [train.py:1146] (3/4) Epoch 10, validation: loss=0.2041, simple_loss=0.2903, pruned_loss=0.04356, ctc_loss=0.07708, over 944034.00 frames. +2024-08-25 16:46:37,107 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 14072MB +2024-08-25 16:47:46,601 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.636e+02 1.955e+02 2.116e+02 2.362e+02 4.652e+02, threshold=4.231e+02, percent-clipped=1.0 +2024-08-25 16:47:50,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=119696.0, ans=0.2 +2024-08-25 16:48:28,309 INFO [train.py:1114] (3/4) Epoch 10, batch 50, loss[loss=0.2172, simple_loss=0.2685, pruned_loss=0.06094, ctc_loss=0.1104, over 19730.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.301, pruned_loss=0.07159, ctc_loss=0.1354, over 846230.90 frames. ], batch size: 47, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:48:57,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=119749.33333333333, ans=0.2 +2024-08-25 16:48:58,840 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-08-25 16:49:17,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=119749.33333333333, ans=0.1 +2024-08-25 16:49:17,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=119749.33333333333, ans=0.1 +2024-08-25 16:50:08,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119856.0, ans=0.1 +2024-08-25 16:50:44,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=119909.33333333333, ans=0.07 +2024-08-25 16:50:45,035 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.17 vs. limit=15.0 +2024-08-25 16:50:46,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.05 vs. limit=22.5 +2024-08-25 16:52:34,135 INFO [train.py:1114] (3/4) Epoch 10, batch 100, loss[loss=0.2373, simple_loss=0.2927, pruned_loss=0.0651, ctc_loss=0.129, over 19727.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3017, pruned_loss=0.07102, ctc_loss=0.1345, over 1499330.95 frames. ], batch size: 51, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:52:54,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120069.33333333333, ans=0.1 +2024-08-25 16:53:28,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=120122.66666666667, ans=0.125 +2024-08-25 16:53:30,686 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.89 vs. limit=15.0 +2024-08-25 16:53:35,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=120176.0, ans=0.0 +2024-08-25 16:53:39,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=120176.0, ans=0.07 +2024-08-25 16:53:47,835 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.460e+02 1.798e+02 2.253e+02 2.860e+02 4.134e+02, threshold=4.507e+02, percent-clipped=0.0 +2024-08-25 16:54:42,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120229.33333333333, ans=0.1 +2024-08-25 16:54:44,416 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.81 vs. limit=15.0 +2024-08-25 16:54:47,480 INFO [train.py:1114] (3/4) Epoch 10, batch 150, loss[loss=0.2073, simple_loss=0.2657, pruned_loss=0.05417, ctc_loss=0.1013, over 19722.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.2992, pruned_loss=0.0695, ctc_loss=0.1319, over 2027878.69 frames. ], batch size: 47, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:55:20,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=120389.33333333333, ans=0.125 +2024-08-25 16:55:25,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=120389.33333333333, ans=0.125 +2024-08-25 16:55:38,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=120442.66666666667, ans=0.125 +2024-08-25 16:56:01,812 INFO [train.py:1114] (3/4) Epoch 10, batch 200, loss[loss=0.2778, simple_loss=0.3198, pruned_loss=0.08562, ctc_loss=0.1616, over 18295.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.297, pruned_loss=0.06858, ctc_loss=0.13, over 2435690.75 frames. ], batch size: 85, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:56:11,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=120549.33333333333, ans=0.025 +2024-08-25 16:56:13,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=120602.66666666667, ans=0.125 +2024-08-25 16:57:32,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=120656.0, ans=0.125 +2024-08-25 16:57:51,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=120656.0, ans=0.0 +2024-08-25 16:57:56,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=120656.0, ans=0.0 +2024-08-25 16:58:07,756 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 1.824e+02 2.064e+02 2.548e+02 6.143e+02, threshold=4.128e+02, percent-clipped=2.0 +2024-08-25 16:58:11,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=120709.33333333333, ans=0.125 +2024-08-25 16:58:32,999 INFO [train.py:1114] (3/4) Epoch 10, batch 250, loss[loss=0.2732, simple_loss=0.3196, pruned_loss=0.0825, ctc_loss=0.1546, over 19432.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.2975, pruned_loss=0.06891, ctc_loss=0.1304, over 2755567.99 frames. ], batch size: 67, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 16:58:37,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=120816.0, ans=0.125 +2024-08-25 16:59:00,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=120869.33333333333, ans=0.125 +2024-08-25 16:59:04,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=120869.33333333333, ans=0.125 +2024-08-25 16:59:08,315 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.92 vs. limit=6.0 +2024-08-25 16:59:53,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=120976.0, ans=0.0 +2024-08-25 17:00:00,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=121029.33333333333, ans=0.09899494936611666 +2024-08-25 17:00:08,736 INFO [train.py:1114] (3/4) Epoch 10, batch 300, loss[loss=0.2579, simple_loss=0.3074, pruned_loss=0.07685, ctc_loss=0.1371, over 19521.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.2975, pruned_loss=0.06933, ctc_loss=0.1309, over 3001630.94 frames. ], batch size: 61, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 17:00:23,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121082.66666666667, ans=0.1 +2024-08-25 17:00:45,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=121189.33333333333, ans=0.125 +2024-08-25 17:01:00,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=121242.66666666667, ans=0.125 +2024-08-25 17:01:01,165 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.522e+02 1.908e+02 2.186e+02 2.769e+02 4.118e+02, threshold=4.372e+02, percent-clipped=0.0 +2024-08-25 17:02:40,288 INFO [train.py:1114] (3/4) Epoch 10, batch 350, loss[loss=0.2584, simple_loss=0.2941, pruned_loss=0.08119, ctc_loss=0.1511, over 19727.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.2983, pruned_loss=0.06984, ctc_loss=0.1321, over 3191366.70 frames. ], batch size: 48, lr: 1.52e-02, grad_scale: 32.0 +2024-08-25 17:02:51,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=121402.66666666667, ans=0.0 +2024-08-25 17:02:58,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=121402.66666666667, ans=0.2 +2024-08-25 17:03:28,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=121509.33333333333, ans=0.125 +2024-08-25 17:03:42,416 INFO [train.py:1114] (3/4) Epoch 10, batch 400, loss[loss=0.2481, simple_loss=0.3002, pruned_loss=0.07166, ctc_loss=0.1316, over 19501.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.2975, pruned_loss=0.06938, ctc_loss=0.131, over 3343145.60 frames. ], batch size: 54, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:03:48,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=121616.0, ans=0.125 +2024-08-25 17:04:04,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=121669.33333333333, ans=0.025 +2024-08-25 17:04:30,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=121776.0, ans=0.1 +2024-08-25 17:04:33,756 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.409e+02 1.874e+02 2.151e+02 2.761e+02 4.102e+02, threshold=4.302e+02, percent-clipped=0.0 +2024-08-25 17:04:41,403 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.63 vs. limit=15.0 +2024-08-25 17:04:42,433 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.23 vs. limit=22.5 +2024-08-25 17:04:50,467 INFO [train.py:1114] (3/4) Epoch 10, batch 450, loss[loss=0.2898, simple_loss=0.3291, pruned_loss=0.09264, ctc_loss=0.1631, over 19624.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.2972, pruned_loss=0.06923, ctc_loss=0.1306, over 3451118.13 frames. ], batch size: 55, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:07:40,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=121989.33333333333, ans=0.2 +2024-08-25 17:07:47,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=121989.33333333333, ans=0.035 +2024-08-25 17:07:47,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=121989.33333333333, ans=0.125 +2024-08-25 17:08:13,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=122042.66666666667, ans=0.0 +2024-08-25 17:08:38,543 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.86 vs. limit=6.0 +2024-08-25 17:09:01,411 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.84 vs. limit=15.0 +2024-08-25 17:09:04,036 INFO [train.py:1114] (3/4) Epoch 10, batch 500, loss[loss=0.2659, simple_loss=0.3148, pruned_loss=0.07869, ctc_loss=0.1487, over 19624.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.2966, pruned_loss=0.06933, ctc_loss=0.1306, over 3546995.54 frames. ], batch size: 63, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:09:09,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=122149.33333333333, ans=0.2 +2024-08-25 17:09:22,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=122149.33333333333, ans=0.1 +2024-08-25 17:09:26,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=122202.66666666667, ans=0.2 +2024-08-25 17:09:31,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=122202.66666666667, ans=0.025 +2024-08-25 17:09:57,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=122256.0, ans=0.2 +2024-08-25 17:10:26,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=122309.33333333333, ans=0.2 +2024-08-25 17:10:36,238 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.369e+02 1.797e+02 2.290e+02 2.870e+02 3.920e+02, threshold=4.579e+02, percent-clipped=0.0 +2024-08-25 17:10:40,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=122362.66666666667, ans=0.0 +2024-08-25 17:10:44,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=122362.66666666667, ans=0.0 +2024-08-25 17:10:47,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=122362.66666666667, ans=0.0 +2024-08-25 17:10:51,445 INFO [train.py:1114] (3/4) Epoch 10, batch 550, loss[loss=0.2765, simple_loss=0.3226, pruned_loss=0.08432, ctc_loss=0.1543, over 19411.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.2967, pruned_loss=0.06941, ctc_loss=0.1307, over 3607964.60 frames. ], batch size: 71, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:11:08,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=122416.0, ans=0.0 +2024-08-25 17:11:10,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.39 vs. limit=15.0 +2024-08-25 17:11:50,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=122416.0, ans=0.05 +2024-08-25 17:11:51,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=122469.33333333333, ans=0.125 +2024-08-25 17:13:37,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=122522.66666666667, ans=0.125 +2024-08-25 17:14:20,766 INFO [train.py:1114] (3/4) Epoch 10, batch 600, loss[loss=0.2505, simple_loss=0.3065, pruned_loss=0.07072, ctc_loss=0.1328, over 19394.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.2966, pruned_loss=0.06912, ctc_loss=0.1302, over 3665406.62 frames. ], batch size: 67, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:14:26,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=122682.66666666667, ans=0.0 +2024-08-25 17:14:48,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=122789.33333333333, ans=0.0 +2024-08-25 17:15:08,631 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.448e+02 1.815e+02 2.061e+02 2.496e+02 4.365e+02, threshold=4.122e+02, percent-clipped=0.0 +2024-08-25 17:15:08,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=122842.66666666667, ans=0.025 +2024-08-25 17:15:13,217 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 17:15:19,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=122896.0, ans=0.0 +2024-08-25 17:15:24,797 INFO [train.py:1114] (3/4) Epoch 10, batch 650, loss[loss=0.2515, simple_loss=0.3052, pruned_loss=0.07193, ctc_loss=0.135, over 19772.00 frames. ], tot_loss[loss=0.243, simple_loss=0.2959, pruned_loss=0.06906, ctc_loss=0.1299, over 3716276.14 frames. ], batch size: 54, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:15:30,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=122949.33333333333, ans=0.125 +2024-08-25 17:15:38,254 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.53 vs. limit=15.0 +2024-08-25 17:15:42,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=123002.66666666667, ans=15.0 +2024-08-25 17:15:51,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=123056.0, ans=0.0 +2024-08-25 17:16:04,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=123109.33333333333, ans=0.125 +2024-08-25 17:16:34,661 INFO [train.py:1114] (3/4) Epoch 10, batch 700, loss[loss=0.2169, simple_loss=0.2816, pruned_loss=0.05533, ctc_loss=0.1037, over 19732.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.2962, pruned_loss=0.06887, ctc_loss=0.1294, over 3748551.67 frames. ], batch size: 51, lr: 1.51e-02, grad_scale: 32.0 +2024-08-25 17:17:38,934 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.99 vs. limit=15.0 +2024-08-25 17:17:55,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.90 vs. limit=15.0 +2024-08-25 17:18:12,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=123376.0, ans=0.05 +2024-08-25 17:18:13,482 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.562e+02 1.934e+02 2.276e+02 3.026e+02 5.626e+02, threshold=4.552e+02, percent-clipped=3.0 +2024-08-25 17:18:25,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=123429.33333333333, ans=0.125 +2024-08-25 17:18:28,243 INFO [train.py:1114] (3/4) Epoch 10, batch 750, loss[loss=0.2293, simple_loss=0.2945, pruned_loss=0.06032, ctc_loss=0.1087, over 19492.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.2957, pruned_loss=0.06861, ctc_loss=0.1288, over 3775306.47 frames. ], batch size: 54, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:18:28,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=123482.66666666667, ans=0.0 +2024-08-25 17:18:47,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=123536.0, ans=0.125 +2024-08-25 17:19:30,793 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.16 vs. limit=22.5 +2024-08-25 17:19:32,721 INFO [train.py:1114] (3/4) Epoch 10, batch 800, loss[loss=0.2283, simple_loss=0.2781, pruned_loss=0.06354, ctc_loss=0.1285, over 19415.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.2958, pruned_loss=0.06865, ctc_loss=0.129, over 3796476.02 frames. ], batch size: 48, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:20:27,600 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.19 vs. limit=22.5 +2024-08-25 17:20:33,019 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.431e+02 1.887e+02 2.136e+02 2.736e+02 3.984e+02, threshold=4.273e+02, percent-clipped=0.0 +2024-08-25 17:20:34,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=123909.33333333333, ans=0.0 +2024-08-25 17:20:47,942 INFO [train.py:1114] (3/4) Epoch 10, batch 850, loss[loss=0.2407, simple_loss=0.3087, pruned_loss=0.06273, ctc_loss=0.1178, over 19669.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.2958, pruned_loss=0.0685, ctc_loss=0.1286, over 3814970.14 frames. ], batch size: 59, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:20:54,486 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.79 vs. limit=10.0 +2024-08-25 17:21:05,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=124069.33333333333, ans=0.125 +2024-08-25 17:21:08,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124069.33333333333, ans=0.1 +2024-08-25 17:21:11,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=124122.66666666667, ans=0.2 +2024-08-25 17:21:27,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=124176.0, ans=0.05 +2024-08-25 17:21:30,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=124176.0, ans=0.04949747468305833 +2024-08-25 17:22:19,906 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=6.63 vs. limit=12.0 +2024-08-25 17:22:25,611 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.09 vs. limit=22.5 +2024-08-25 17:22:28,568 INFO [train.py:1114] (3/4) Epoch 10, batch 900, loss[loss=0.2038, simple_loss=0.2624, pruned_loss=0.05234, ctc_loss=0.1014, over 19809.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.2964, pruned_loss=0.06903, ctc_loss=0.1294, over 3819622.48 frames. ], batch size: 49, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:22:48,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=124336.0, ans=0.125 +2024-08-25 17:23:13,942 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 1.854e+02 2.167e+02 2.763e+02 5.395e+02, threshold=4.333e+02, percent-clipped=2.0 +2024-08-25 17:23:25,960 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 17:23:30,294 INFO [train.py:1114] (3/4) Epoch 10, batch 950, loss[loss=0.2356, simple_loss=0.2884, pruned_loss=0.06731, ctc_loss=0.1206, over 19502.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.2967, pruned_loss=0.06914, ctc_loss=0.1299, over 3821051.06 frames. ], batch size: 49, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:23:36,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=124549.33333333333, ans=0.0 +2024-08-25 17:23:36,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.61 vs. limit=10.0 +2024-08-25 17:23:40,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=124602.66666666667, ans=0.125 +2024-08-25 17:24:06,615 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.25 vs. limit=15.0 +2024-08-25 17:24:09,958 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.23 vs. limit=22.5 +2024-08-25 17:24:17,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=124762.66666666667, ans=0.035 +2024-08-25 17:24:17,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=124762.66666666667, ans=0.0 +2024-08-25 17:24:24,731 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.95 vs. limit=6.0 +2024-08-25 17:24:33,751 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.89 vs. limit=22.5 +2024-08-25 17:24:34,422 INFO [train.py:1114] (3/4) Epoch 10, batch 1000, loss[loss=0.2035, simple_loss=0.2712, pruned_loss=0.04919, ctc_loss=0.09347, over 19847.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.297, pruned_loss=0.06888, ctc_loss=0.1297, over 3815840.92 frames. ], batch size: 52, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:24:41,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=124816.0, ans=0.09899494936611666 +2024-08-25 17:25:03,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=124922.66666666667, ans=0.0 +2024-08-25 17:25:18,037 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.505e+02 1.797e+02 2.069e+02 2.553e+02 4.130e+02, threshold=4.138e+02, percent-clipped=0.0 +2024-08-25 17:25:27,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=125029.33333333333, ans=0.125 +2024-08-25 17:25:32,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125082.66666666667, ans=0.1 +2024-08-25 17:25:33,291 INFO [train.py:1114] (3/4) Epoch 10, batch 1050, loss[loss=0.2351, simple_loss=0.2967, pruned_loss=0.06251, ctc_loss=0.121, over 19850.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.296, pruned_loss=0.06868, ctc_loss=0.1294, over 3822334.63 frames. ], batch size: 57, lr: 1.50e-02, grad_scale: 32.0 +2024-08-25 17:25:33,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=125082.66666666667, ans=0.5 +2024-08-25 17:25:42,353 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.08 vs. limit=15.0 +2024-08-25 17:25:47,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=125136.0, ans=0.025 +2024-08-25 17:26:25,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=125296.0, ans=0.025 +2024-08-25 17:26:30,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=125296.0, ans=0.125 +2024-08-25 17:26:32,057 INFO [train.py:1114] (3/4) Epoch 10, batch 1100, loss[loss=0.2082, simple_loss=0.271, pruned_loss=0.05248, ctc_loss=0.101, over 19583.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.2959, pruned_loss=0.06842, ctc_loss=0.129, over 3829859.54 frames. ], batch size: 52, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:26:32,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=125349.33333333333, ans=0.025 +2024-08-25 17:26:33,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=125349.33333333333, ans=0.125 +2024-08-25 17:26:38,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=125349.33333333333, ans=0.2 +2024-08-25 17:27:18,169 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.422e+02 1.787e+02 2.060e+02 2.560e+02 4.808e+02, threshold=4.120e+02, percent-clipped=1.0 +2024-08-25 17:27:22,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=125562.66666666667, ans=0.125 +2024-08-25 17:27:33,335 INFO [train.py:1114] (3/4) Epoch 10, batch 1150, loss[loss=0.2369, simple_loss=0.2929, pruned_loss=0.06559, ctc_loss=0.1242, over 19598.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.2961, pruned_loss=0.0684, ctc_loss=0.1289, over 3829334.24 frames. ], batch size: 52, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:28:06,509 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.58 vs. limit=15.0 +2024-08-25 17:28:09,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=125722.66666666667, ans=0.0 +2024-08-25 17:28:15,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125776.0, ans=0.1 +2024-08-25 17:28:26,106 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.08 vs. limit=15.0 +2024-08-25 17:28:44,411 INFO [train.py:1114] (3/4) Epoch 10, batch 1200, loss[loss=0.2582, simple_loss=0.3151, pruned_loss=0.07161, ctc_loss=0.1449, over 19833.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.297, pruned_loss=0.06896, ctc_loss=0.1301, over 3823765.45 frames. ], batch size: 57, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:28:44,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=125882.66666666667, ans=0.125 +2024-08-25 17:28:45,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125882.66666666667, ans=0.1 +2024-08-25 17:28:50,581 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.04 vs. limit=15.0 +2024-08-25 17:29:00,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=125936.0, ans=0.05 +2024-08-25 17:29:09,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=125936.0, ans=0.025 +2024-08-25 17:29:30,099 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.522e+02 1.823e+02 2.047e+02 2.358e+02 4.051e+02, threshold=4.094e+02, percent-clipped=0.0 +2024-08-25 17:29:30,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=126042.66666666667, ans=0.125 +2024-08-25 17:29:35,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=126096.0, ans=0.125 +2024-08-25 17:29:45,836 INFO [train.py:1114] (3/4) Epoch 10, batch 1250, loss[loss=0.2569, simple_loss=0.3067, pruned_loss=0.07535, ctc_loss=0.1409, over 19520.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.2969, pruned_loss=0.06848, ctc_loss=0.1289, over 3842282.76 frames. ], batch size: 61, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:29:54,359 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.26 vs. limit=15.0 +2024-08-25 17:29:57,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=126202.66666666667, ans=0.125 +2024-08-25 17:30:45,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=126309.33333333333, ans=0.125 +2024-08-25 17:30:59,769 INFO [train.py:1114] (3/4) Epoch 10, batch 1300, loss[loss=0.2912, simple_loss=0.3271, pruned_loss=0.09214, ctc_loss=0.1775, over 18801.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.2957, pruned_loss=0.06782, ctc_loss=0.1276, over 3845848.70 frames. ], batch size: 76, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:31:23,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=126416.0, ans=0.2 +2024-08-25 17:31:52,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=126522.66666666667, ans=0.025 +2024-08-25 17:32:01,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=126522.66666666667, ans=0.025 +2024-08-25 17:32:13,112 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.515e+02 1.900e+02 2.303e+02 2.970e+02 5.096e+02, threshold=4.606e+02, percent-clipped=7.0 +2024-08-25 17:32:24,438 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.76 vs. limit=15.0 +2024-08-25 17:32:28,187 INFO [train.py:1114] (3/4) Epoch 10, batch 1350, loss[loss=0.2459, simple_loss=0.3005, pruned_loss=0.06791, ctc_loss=0.1388, over 19779.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.2949, pruned_loss=0.06735, ctc_loss=0.1268, over 3856321.71 frames. ], batch size: 54, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:32:36,834 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.48 vs. limit=15.0 +2024-08-25 17:32:37,790 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.44 vs. limit=22.5 +2024-08-25 17:32:44,939 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.46 vs. limit=10.0 +2024-08-25 17:33:11,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=126842.66666666667, ans=0.5 +2024-08-25 17:33:23,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=126896.0, ans=0.125 +2024-08-25 17:33:30,485 INFO [train.py:1114] (3/4) Epoch 10, batch 1400, loss[loss=0.2006, simple_loss=0.2565, pruned_loss=0.05253, ctc_loss=0.09926, over 19703.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.2946, pruned_loss=0.06718, ctc_loss=0.1265, over 3863368.53 frames. ], batch size: 46, lr: 1.49e-02, grad_scale: 32.0 +2024-08-25 17:33:31,194 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.10 vs. limit=15.0 +2024-08-25 17:33:46,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=127002.66666666667, ans=0.0 +2024-08-25 17:33:57,882 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.63 vs. limit=15.0 +2024-08-25 17:34:02,710 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.71 vs. limit=12.0 +2024-08-25 17:34:42,450 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.525e+02 1.856e+02 2.167e+02 2.631e+02 4.500e+02, threshold=4.335e+02, percent-clipped=0.0 +2024-08-25 17:34:56,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=127162.66666666667, ans=0.125 +2024-08-25 17:34:59,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=127162.66666666667, ans=0.0 +2024-08-25 17:35:02,135 INFO [train.py:1114] (3/4) Epoch 10, batch 1450, loss[loss=0.2693, simple_loss=0.3248, pruned_loss=0.07849, ctc_loss=0.1421, over 19678.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.296, pruned_loss=0.06796, ctc_loss=0.128, over 3862457.50 frames. ], batch size: 63, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:35:09,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=127216.0, ans=0.125 +2024-08-25 17:35:15,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=127269.33333333333, ans=0.125 +2024-08-25 17:35:35,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=127322.66666666667, ans=10.0 +2024-08-25 17:35:41,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127376.0, ans=0.1 +2024-08-25 17:35:50,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=127429.33333333333, ans=0.125 +2024-08-25 17:36:02,113 INFO [train.py:1114] (3/4) Epoch 10, batch 1500, loss[loss=0.2462, simple_loss=0.2978, pruned_loss=0.07008, ctc_loss=0.136, over 19590.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.2962, pruned_loss=0.06797, ctc_loss=0.1282, over 3861690.34 frames. ], batch size: 57, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:36:08,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=127482.66666666667, ans=0.2 +2024-08-25 17:36:15,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=127536.0, ans=0.125 +2024-08-25 17:36:18,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=127536.0, ans=0.125 +2024-08-25 17:36:42,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127589.33333333333, ans=0.1 +2024-08-25 17:36:44,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=127589.33333333333, ans=0.0 +2024-08-25 17:36:55,679 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.462e+02 1.877e+02 2.186e+02 2.626e+02 4.478e+02, threshold=4.372e+02, percent-clipped=1.0 +2024-08-25 17:36:58,295 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 17:37:00,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=127696.0, ans=0.0 +2024-08-25 17:37:04,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127696.0, ans=0.1 +2024-08-25 17:37:06,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=127696.0, ans=0.0 +2024-08-25 17:37:24,242 INFO [train.py:1114] (3/4) Epoch 10, batch 1550, loss[loss=0.2469, simple_loss=0.3051, pruned_loss=0.06812, ctc_loss=0.131, over 19597.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.2958, pruned_loss=0.06793, ctc_loss=0.1282, over 3846689.75 frames. ], batch size: 60, lr: 1.48e-02, grad_scale: 16.0 +2024-08-25 17:37:42,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=127802.66666666667, ans=0.125 +2024-08-25 17:37:47,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=127856.0, ans=0.125 +2024-08-25 17:37:53,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=127856.0, ans=0.125 +2024-08-25 17:37:55,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127856.0, ans=0.1 +2024-08-25 17:38:50,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=127909.33333333333, ans=0.0 +2024-08-25 17:38:54,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=127909.33333333333, ans=0.2 +2024-08-25 17:39:41,581 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.16 vs. limit=10.0 +2024-08-25 17:39:42,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=127909.33333333333, ans=0.05 +2024-08-25 17:39:52,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=127962.66666666667, ans=0.125 +2024-08-25 17:41:06,796 INFO [train.py:1114] (3/4) Epoch 10, batch 1600, loss[loss=0.2428, simple_loss=0.3047, pruned_loss=0.06512, ctc_loss=0.1267, over 19835.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.2956, pruned_loss=0.06791, ctc_loss=0.1282, over 3835877.42 frames. ], batch size: 57, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:41:07,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=128016.0, ans=0.0 +2024-08-25 17:42:01,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=128016.0, ans=0.2 +2024-08-25 17:42:41,254 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.68 vs. limit=15.0 +2024-08-25 17:42:52,715 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=8.97 vs. limit=22.5 +2024-08-25 17:43:05,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=128176.0, ans=0.125 +2024-08-25 17:43:24,235 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.528e+02 1.849e+02 2.080e+02 2.733e+02 5.175e+02, threshold=4.161e+02, percent-clipped=4.0 +2024-08-25 17:44:00,803 INFO [train.py:1114] (3/4) Epoch 10, batch 1650, loss[loss=0.2488, simple_loss=0.3076, pruned_loss=0.06891, ctc_loss=0.1305, over 19638.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.2962, pruned_loss=0.06841, ctc_loss=0.129, over 3832221.01 frames. ], batch size: 59, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:44:33,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=128336.0, ans=0.125 +2024-08-25 17:45:04,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=128442.66666666667, ans=0.09899494936611666 +2024-08-25 17:45:46,331 INFO [train.py:1114] (3/4) Epoch 10, batch 1700, loss[loss=0.2161, simple_loss=0.2682, pruned_loss=0.06013, ctc_loss=0.1096, over 19679.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.296, pruned_loss=0.06804, ctc_loss=0.1282, over 3846404.40 frames. ], batch size: 46, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:46:47,594 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=7.90 vs. limit=15.0 +2024-08-25 17:46:48,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=128656.0, ans=0.125 +2024-08-25 17:47:00,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=128656.0, ans=0.2 +2024-08-25 17:47:01,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=128709.33333333333, ans=0.125 +2024-08-25 17:47:11,338 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.443e+02 1.773e+02 2.059e+02 2.527e+02 4.467e+02, threshold=4.119e+02, percent-clipped=1.0 +2024-08-25 17:47:11,996 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.27 vs. limit=12.0 +2024-08-25 17:48:02,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=128762.66666666667, ans=0.125 +2024-08-25 17:48:11,671 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.10 vs. limit=10.0 +2024-08-25 17:48:12,452 INFO [train.py:1114] (3/4) Epoch 10, batch 1750, loss[loss=0.2251, simple_loss=0.2738, pruned_loss=0.06378, ctc_loss=0.1223, over 19678.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.2955, pruned_loss=0.06756, ctc_loss=0.1274, over 3851655.37 frames. ], batch size: 45, lr: 1.48e-02, grad_scale: 32.0 +2024-08-25 17:48:45,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=128922.66666666667, ans=0.125 +2024-08-25 17:49:11,936 INFO [train.py:1114] (3/4) Epoch 10, batch 1800, loss[loss=0.2313, simple_loss=0.2978, pruned_loss=0.06002, ctc_loss=0.1118, over 19609.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.2955, pruned_loss=0.0674, ctc_loss=0.127, over 3853702.55 frames. ], batch size: 55, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:06:40,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=129242.66666666667, ans=0.125 +2024-08-25 18:06:56,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=129242.66666666667, ans=0.125 +2024-08-25 18:11:17,736 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.471e+02 1.930e+02 2.270e+02 3.115e+02 5.695e+02, threshold=4.540e+02, percent-clipped=10.0 +2024-08-25 18:11:53,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=129296.0, ans=6.0 +2024-08-25 18:18:09,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=129296.0, ans=0.0 +2024-08-25 18:19:59,304 INFO [train.py:1114] (3/4) Epoch 10, batch 1850, loss[loss=0.252, simple_loss=0.3123, pruned_loss=0.07035, ctc_loss=0.1272, over 19591.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.2953, pruned_loss=0.06728, ctc_loss=0.1266, over 3856696.98 frames. ], batch size: 57, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:21:03,468 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.30 vs. limit=15.0 +2024-08-25 18:24:29,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=129402.66666666667, ans=0.0 +2024-08-25 18:26:29,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=129456.0, ans=0.05 +2024-08-25 18:29:14,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129509.33333333333, ans=0.1 +2024-08-25 18:29:52,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=129509.33333333333, ans=0.125 +2024-08-25 18:29:55,395 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.18 vs. limit=22.5 +2024-08-25 18:32:37,434 INFO [train.py:1114] (3/4) Epoch 10, batch 1900, loss[loss=0.2485, simple_loss=0.3102, pruned_loss=0.06805, ctc_loss=0.1269, over 19625.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.2963, pruned_loss=0.06761, ctc_loss=0.1271, over 3861802.46 frames. ], batch size: 59, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:34:01,819 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.78 vs. limit=15.0 +2024-08-25 18:36:07,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=129722.66666666667, ans=0.0 +2024-08-25 18:36:20,197 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.50 vs. limit=22.5 +2024-08-25 18:37:35,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=129776.0, ans=0.0 +2024-08-25 18:37:43,365 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.539e+02 1.882e+02 2.156e+02 2.772e+02 4.689e+02, threshold=4.313e+02, percent-clipped=1.0 +2024-08-25 18:38:51,207 INFO [train.py:1114] (3/4) Epoch 10, batch 1950, loss[loss=0.2294, simple_loss=0.2827, pruned_loss=0.06295, ctc_loss=0.1255, over 19584.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.2973, pruned_loss=0.06781, ctc_loss=0.1275, over 3870507.56 frames. ], batch size: 52, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:39:43,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=129882.66666666667, ans=0.125 +2024-08-25 18:39:46,077 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.04 vs. limit=6.0 +2024-08-25 18:39:59,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=129936.0, ans=0.125 +2024-08-25 18:40:13,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=129936.0, ans=0.2 +2024-08-25 18:41:08,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129989.33333333333, ans=0.1 +2024-08-25 18:42:32,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=130042.66666666667, ans=0.025 +2024-08-25 18:44:04,306 INFO [train.py:1114] (3/4) Epoch 10, batch 2000, loss[loss=0.2056, simple_loss=0.2607, pruned_loss=0.05463, ctc_loss=0.1028, over 19650.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.2982, pruned_loss=0.06858, ctc_loss=0.1291, over 3854930.75 frames. ], batch size: 45, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:44:08,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=130149.33333333333, ans=0.2 +2024-08-25 18:44:14,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=130149.33333333333, ans=0.125 +2024-08-25 18:44:14,724 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.45 vs. limit=15.0 +2024-08-25 18:44:41,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=130202.66666666667, ans=0.2 +2024-08-25 18:47:32,410 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.525e+02 1.882e+02 2.262e+02 2.707e+02 4.864e+02, threshold=4.523e+02, percent-clipped=1.0 +2024-08-25 18:48:05,138 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.05 vs. limit=22.5 +2024-08-25 18:48:30,485 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.70 vs. limit=15.0 +2024-08-25 18:48:39,775 INFO [train.py:1114] (3/4) Epoch 10, batch 2050, loss[loss=0.2463, simple_loss=0.2872, pruned_loss=0.07489, ctc_loss=0.1392, over 19715.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.2974, pruned_loss=0.06862, ctc_loss=0.1292, over 3852752.19 frames. ], batch size: 47, lr: 1.47e-02, grad_scale: 32.0 +2024-08-25 18:50:05,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=130469.33333333333, ans=0.2 +2024-08-25 18:51:34,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=130629.33333333333, ans=0.0 +2024-08-25 18:52:20,494 INFO [train.py:1114] (3/4) Epoch 10, batch 2100, loss[loss=0.2506, simple_loss=0.3066, pruned_loss=0.07029, ctc_loss=0.135, over 19771.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.2964, pruned_loss=0.06805, ctc_loss=0.128, over 3859455.41 frames. ], batch size: 54, lr: 1.47e-02, grad_scale: 16.0 +2024-08-25 18:52:45,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=130736.0, ans=0.125 +2024-08-25 18:53:32,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=130789.33333333333, ans=0.125 +2024-08-25 18:53:46,792 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.57 vs. limit=15.0 +2024-08-25 18:53:58,208 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.502e+02 1.839e+02 2.296e+02 2.721e+02 6.154e+02, threshold=4.593e+02, percent-clipped=3.0 +2024-08-25 18:54:33,454 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.50 vs. limit=22.5 +2024-08-25 18:54:37,548 INFO [train.py:1114] (3/4) Epoch 10, batch 2150, loss[loss=0.2265, simple_loss=0.2833, pruned_loss=0.06206, ctc_loss=0.114, over 19864.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.296, pruned_loss=0.06824, ctc_loss=0.1282, over 3870385.41 frames. ], batch size: 52, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 18:54:44,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=130949.33333333333, ans=0.2 +2024-08-25 18:54:56,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=131002.66666666667, ans=0.0 +2024-08-25 18:54:57,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=131002.66666666667, ans=0.0 +2024-08-25 18:55:38,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=131056.0, ans=0.0 +2024-08-25 18:56:02,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=131162.66666666666, ans=0.125 +2024-08-25 18:56:19,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=131162.66666666666, ans=0.025 +2024-08-25 18:56:32,989 INFO [train.py:1114] (3/4) Epoch 10, batch 2200, loss[loss=0.2643, simple_loss=0.3134, pruned_loss=0.07904, ctc_loss=0.143, over 19586.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2959, pruned_loss=0.06788, ctc_loss=0.1278, over 3868666.47 frames. ], batch size: 57, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 18:56:43,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=131216.0, ans=0.5 +2024-08-25 18:57:00,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131216.0, ans=0.1 +2024-08-25 18:57:26,596 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.33 vs. limit=12.0 +2024-08-25 18:57:38,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=131376.0, ans=0.2 +2024-08-25 18:57:51,460 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.449e+02 1.773e+02 2.006e+02 2.540e+02 3.937e+02, threshold=4.013e+02, percent-clipped=0.0 +2024-08-25 18:57:51,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=131429.33333333334, ans=0.125 +2024-08-25 18:58:07,783 INFO [train.py:1114] (3/4) Epoch 10, batch 2250, loss[loss=0.2689, simple_loss=0.3221, pruned_loss=0.07805, ctc_loss=0.1488, over 19619.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.296, pruned_loss=0.06787, ctc_loss=0.128, over 3868653.23 frames. ], batch size: 55, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 18:58:30,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131589.33333333334, ans=0.1 +2024-08-25 18:58:39,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=131589.33333333334, ans=0.2 +2024-08-25 18:58:50,867 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.83 vs. limit=22.5 +2024-08-25 18:59:03,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=131696.0, ans=0.125 +2024-08-25 18:59:05,056 INFO [train.py:1114] (3/4) Epoch 10, batch 2300, loss[loss=0.2424, simple_loss=0.2941, pruned_loss=0.06929, ctc_loss=0.1302, over 19503.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.2952, pruned_loss=0.06773, ctc_loss=0.1278, over 3862127.26 frames. ], batch size: 49, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 18:59:14,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=131749.33333333334, ans=0.125 +2024-08-25 19:00:00,734 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 1.815e+02 2.310e+02 2.961e+02 4.661e+02, threshold=4.621e+02, percent-clipped=5.0 +2024-08-25 19:00:14,654 INFO [train.py:1114] (3/4) Epoch 10, batch 2350, loss[loss=0.2849, simple_loss=0.3248, pruned_loss=0.09101, ctc_loss=0.1573, over 19679.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.2948, pruned_loss=0.06752, ctc_loss=0.1272, over 3864104.51 frames. ], batch size: 63, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 19:00:26,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=132069.33333333334, ans=0.0 +2024-08-25 19:00:37,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=132122.66666666666, ans=0.125 +2024-08-25 19:00:38,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=132122.66666666666, ans=0.0 +2024-08-25 19:00:41,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=132122.66666666666, ans=0.125 +2024-08-25 19:00:49,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132176.0, ans=0.1 +2024-08-25 19:00:56,921 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.96 vs. limit=15.0 +2024-08-25 19:01:13,181 INFO [train.py:1114] (3/4) Epoch 10, batch 2400, loss[loss=0.255, simple_loss=0.3159, pruned_loss=0.07177, ctc_loss=0.1263, over 19248.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.2969, pruned_loss=0.06832, ctc_loss=0.1282, over 3859369.59 frames. ], batch size: 71, lr: 1.46e-02, grad_scale: 32.0 +2024-08-25 19:01:13,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=132282.66666666666, ans=0.125 +2024-08-25 19:01:37,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=132389.33333333334, ans=0.2 +2024-08-25 19:01:39,020 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.42 vs. limit=6.0 +2024-08-25 19:01:43,851 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=5.70 vs. limit=15.0 +2024-08-25 19:01:44,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=132389.33333333334, ans=0.5 +2024-08-25 19:01:59,785 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.51 vs. limit=15.0 +2024-08-25 19:02:10,724 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.475e+02 1.986e+02 2.279e+02 2.618e+02 8.799e+02, threshold=4.558e+02, percent-clipped=0.0 +2024-08-25 19:02:22,037 INFO [train.py:1114] (3/4) Epoch 10, batch 2450, loss[loss=0.3245, simple_loss=0.3373, pruned_loss=0.1128, ctc_loss=0.2156, over 13096.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3014, pruned_loss=0.07219, ctc_loss=0.1358, over 3729036.20 frames. ], batch size: 140, lr: 1.46e-02, grad_scale: 16.0 +2024-08-25 19:02:38,028 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.53 vs. limit=15.0 +2024-08-25 19:02:42,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=132602.66666666666, ans=0.1 +2024-08-25 19:02:52,715 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.13 vs. limit=15.0 +2024-08-25 19:02:53,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=132656.0, ans=0.09899494936611666 +2024-08-25 19:02:55,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=132656.0, ans=0.025 +2024-08-25 19:04:28,597 INFO [train.py:1114] (3/4) Epoch 11, batch 0, loss[loss=0.2237, simple_loss=0.2734, pruned_loss=0.06367, ctc_loss=0.1168, over 19802.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2734, pruned_loss=0.06367, ctc_loss=0.1168, over 19802.00 frames. ], batch size: 49, lr: 1.39e-02, grad_scale: 32.0 +2024-08-25 19:04:28,597 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-25 19:04:55,855 INFO [train.py:1146] (3/4) Epoch 11, validation: loss=0.2031, simple_loss=0.2887, pruned_loss=0.04339, ctc_loss=0.0768, over 944034.00 frames. +2024-08-25 19:04:55,856 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 14072MB +2024-08-25 19:05:00,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=132757.33333333334, ans=0.1 +2024-08-25 19:05:11,727 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.01 vs. limit=15.0 +2024-08-25 19:05:18,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=132864.0, ans=0.125 +2024-08-25 19:05:34,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132864.0, ans=0.1 +2024-08-25 19:05:34,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=132864.0, ans=0.2 +2024-08-25 19:05:38,279 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.39 vs. limit=5.0 +2024-08-25 19:05:40,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=132917.33333333334, ans=0.0 +2024-08-25 19:05:48,668 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.11 vs. limit=22.5 +2024-08-25 19:06:02,242 INFO [train.py:1114] (3/4) Epoch 11, batch 50, loss[loss=0.2059, simple_loss=0.2664, pruned_loss=0.05372, ctc_loss=0.0949, over 19733.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.2969, pruned_loss=0.06729, ctc_loss=0.1275, over 844726.23 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 32.0 +2024-08-25 19:06:02,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=133024.0, ans=0.0 +2024-08-25 19:06:03,364 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.465e+02 2.050e+02 2.234e+02 2.552e+02 4.359e+02, threshold=4.468e+02, percent-clipped=1.0 +2024-08-25 19:06:08,054 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:06:08,356 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.42 vs. limit=15.0 +2024-08-25 19:06:09,587 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.36 vs. limit=10.0 +2024-08-25 19:06:22,498 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:06:32,737 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.74 vs. limit=15.0 +2024-08-25 19:06:51,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=133184.0, ans=0.1 +2024-08-25 19:07:42,555 INFO [train.py:1114] (3/4) Epoch 11, batch 100, loss[loss=0.2213, simple_loss=0.2768, pruned_loss=0.06084, ctc_loss=0.1103, over 19711.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.296, pruned_loss=0.06663, ctc_loss=0.1259, over 1499036.81 frames. ], batch size: 51, lr: 1.39e-02, grad_scale: 32.0 +2024-08-25 19:07:57,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=133344.0, ans=0.125 +2024-08-25 19:08:08,539 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.17 vs. limit=15.0 +2024-08-25 19:08:24,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=133450.66666666666, ans=0.125 +2024-08-25 19:08:58,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133504.0, ans=0.1 +2024-08-25 19:09:10,084 INFO [train.py:1114] (3/4) Epoch 11, batch 150, loss[loss=0.2098, simple_loss=0.2604, pruned_loss=0.0578, ctc_loss=0.1091, over 19699.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.2935, pruned_loss=0.06573, ctc_loss=0.1241, over 2027778.22 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 32.0 +2024-08-25 19:09:12,928 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.747e+02 2.015e+02 2.344e+02 3.708e+02, threshold=4.031e+02, percent-clipped=0.0 +2024-08-25 19:09:24,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=133610.66666666666, ans=0.2 +2024-08-25 19:09:46,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=133717.33333333334, ans=0.0 +2024-08-25 19:10:05,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=133770.66666666666, ans=0.5 +2024-08-25 19:10:18,158 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.41 vs. limit=15.0 +2024-08-25 19:10:23,692 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.98 vs. limit=22.5 +2024-08-25 19:10:34,566 INFO [train.py:1114] (3/4) Epoch 11, batch 200, loss[loss=0.2589, simple_loss=0.3131, pruned_loss=0.07482, ctc_loss=0.1377, over 18237.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.2916, pruned_loss=0.06486, ctc_loss=0.1223, over 2435452.68 frames. ], batch size: 85, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:10:49,158 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.73 vs. limit=12.0 +2024-08-25 19:11:34,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=133930.66666666666, ans=0.125 +2024-08-25 19:11:40,944 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.00 vs. limit=15.0 +2024-08-25 19:11:50,037 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.63 vs. limit=22.5 +2024-08-25 19:12:01,079 INFO [train.py:1114] (3/4) Epoch 11, batch 250, loss[loss=0.2585, simple_loss=0.3109, pruned_loss=0.07641, ctc_loss=0.1331, over 19402.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.2922, pruned_loss=0.0655, ctc_loss=0.1231, over 2756090.28 frames. ], batch size: 67, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:12:02,123 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.716e+02 2.023e+02 2.469e+02 5.021e+02, threshold=4.046e+02, percent-clipped=3.0 +2024-08-25 19:12:03,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=134090.66666666666, ans=0.125 +2024-08-25 19:12:09,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=134090.66666666666, ans=0.125 +2024-08-25 19:12:18,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=134144.0, ans=0.1 +2024-08-25 19:12:20,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=134144.0, ans=0.125 +2024-08-25 19:12:21,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=134144.0, ans=0.125 +2024-08-25 19:12:22,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=134144.0, ans=0.0 +2024-08-25 19:12:24,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=134144.0, ans=0.125 +2024-08-25 19:12:32,830 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.61 vs. limit=15.0 +2024-08-25 19:12:32,889 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.30 vs. limit=10.0 +2024-08-25 19:12:52,076 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.02 vs. limit=12.0 +2024-08-25 19:12:52,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=134304.0, ans=0.025 +2024-08-25 19:13:03,619 INFO [train.py:1114] (3/4) Epoch 11, batch 300, loss[loss=0.2627, simple_loss=0.3095, pruned_loss=0.07903, ctc_loss=0.1448, over 19547.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.2918, pruned_loss=0.06523, ctc_loss=0.1224, over 3001824.53 frames. ], batch size: 61, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:13:20,702 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.91 vs. limit=15.0 +2024-08-25 19:13:49,570 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.40 vs. limit=15.0 +2024-08-25 19:13:50,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=134517.33333333334, ans=0.0 +2024-08-25 19:14:07,024 INFO [train.py:1114] (3/4) Epoch 11, batch 350, loss[loss=0.2076, simple_loss=0.2697, pruned_loss=0.05264, ctc_loss=0.1006, over 19756.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.2921, pruned_loss=0.06523, ctc_loss=0.1223, over 3191252.45 frames. ], batch size: 48, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:14:08,113 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.422e+02 1.838e+02 2.258e+02 2.898e+02 4.827e+02, threshold=4.516e+02, percent-clipped=2.0 +2024-08-25 19:14:14,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=134624.0, ans=0.125 +2024-08-25 19:14:36,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=134624.0, ans=0.0 +2024-08-25 19:14:54,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=134730.66666666666, ans=0.0 +2024-08-25 19:15:02,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=134730.66666666666, ans=0.0 +2024-08-25 19:15:10,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=134784.0, ans=0.07 +2024-08-25 19:15:16,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=134784.0, ans=0.025 +2024-08-25 19:15:47,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134837.33333333334, ans=0.1 +2024-08-25 19:15:55,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=134837.33333333334, ans=0.0 +2024-08-25 19:15:57,892 INFO [train.py:1114] (3/4) Epoch 11, batch 400, loss[loss=0.2236, simple_loss=0.2879, pruned_loss=0.05721, ctc_loss=0.112, over 19498.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.2926, pruned_loss=0.06572, ctc_loss=0.1233, over 3343130.63 frames. ], batch size: 54, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:16:02,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=134890.66666666666, ans=0.0 +2024-08-25 19:16:41,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=134997.33333333334, ans=0.1 +2024-08-25 19:17:21,518 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.57 vs. limit=15.0 +2024-08-25 19:17:22,192 INFO [train.py:1114] (3/4) Epoch 11, batch 450, loss[loss=0.2219, simple_loss=0.2925, pruned_loss=0.0544, ctc_loss=0.1062, over 19600.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.2925, pruned_loss=0.06567, ctc_loss=0.1232, over 3451343.18 frames. ], batch size: 55, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:17:31,719 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.328e+02 1.841e+02 2.102e+02 2.681e+02 4.407e+02, threshold=4.204e+02, percent-clipped=0.0 +2024-08-25 19:17:44,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=135210.66666666666, ans=0.125 +2024-08-25 19:17:49,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=135210.66666666666, ans=0.025 +2024-08-25 19:18:00,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=135264.0, ans=0.0 +2024-08-25 19:18:22,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135370.66666666666, ans=0.1 +2024-08-25 19:18:34,757 INFO [train.py:1114] (3/4) Epoch 11, batch 500, loss[loss=0.2374, simple_loss=0.3018, pruned_loss=0.06346, ctc_loss=0.1152, over 19630.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.2916, pruned_loss=0.06506, ctc_loss=0.1223, over 3546681.93 frames. ], batch size: 63, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:18:36,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=135424.0, ans=0.125 +2024-08-25 19:19:50,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135637.33333333334, ans=0.1 +2024-08-25 19:19:50,720 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:19:58,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=135637.33333333334, ans=0.125 +2024-08-25 19:20:17,214 INFO [train.py:1114] (3/4) Epoch 11, batch 550, loss[loss=0.2386, simple_loss=0.3058, pruned_loss=0.06259, ctc_loss=0.1157, over 19359.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.2916, pruned_loss=0.06495, ctc_loss=0.1222, over 3609335.63 frames. ], batch size: 71, lr: 1.38e-02, grad_scale: 32.0 +2024-08-25 19:20:18,397 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.344e+02 1.822e+02 2.069e+02 2.386e+02 4.149e+02, threshold=4.137e+02, percent-clipped=0.0 +2024-08-25 19:20:33,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=135744.0, ans=0.125 +2024-08-25 19:20:44,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=135744.0, ans=0.0 +2024-08-25 19:20:49,213 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.04 vs. limit=10.0 +2024-08-25 19:20:50,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=135797.33333333334, ans=0.125 +2024-08-25 19:21:01,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=135797.33333333334, ans=0.0 +2024-08-25 19:21:15,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=135904.0, ans=0.125 +2024-08-25 19:21:18,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=135904.0, ans=0.125 +2024-08-25 19:21:30,819 INFO [train.py:1114] (3/4) Epoch 11, batch 600, loss[loss=0.2645, simple_loss=0.3119, pruned_loss=0.0785, ctc_loss=0.15, over 19401.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.2919, pruned_loss=0.06513, ctc_loss=0.1226, over 3666839.14 frames. ], batch size: 67, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:21:44,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=135957.33333333334, ans=0.125 +2024-08-25 19:21:47,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=135957.33333333334, ans=0.2 +2024-08-25 19:21:50,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=136010.66666666666, ans=0.05 +2024-08-25 19:22:32,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=136010.66666666666, ans=0.0 +2024-08-25 19:22:50,544 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=5.926e-03 +2024-08-25 19:23:02,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=136117.33333333334, ans=0.1 +2024-08-25 19:23:10,175 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.43 vs. limit=15.0 +2024-08-25 19:23:42,569 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.08 vs. limit=15.0 +2024-08-25 19:23:54,564 INFO [train.py:1114] (3/4) Epoch 11, batch 650, loss[loss=0.2162, simple_loss=0.2877, pruned_loss=0.05195, ctc_loss=0.1022, over 19771.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.2919, pruned_loss=0.06513, ctc_loss=0.1225, over 3716528.57 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:23:55,640 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.531e+02 1.913e+02 2.094e+02 2.738e+02 4.984e+02, threshold=4.187e+02, percent-clipped=5.0 +2024-08-25 19:23:57,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=136224.0, ans=0.0 +2024-08-25 19:23:58,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=136224.0, ans=10.0 +2024-08-25 19:23:58,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=136224.0, ans=0.1 +2024-08-25 19:24:39,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=136330.66666666666, ans=0.125 +2024-08-25 19:24:47,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=136330.66666666666, ans=0.0 +2024-08-25 19:25:34,154 INFO [train.py:1114] (3/4) Epoch 11, batch 700, loss[loss=0.2386, simple_loss=0.2886, pruned_loss=0.06881, ctc_loss=0.1277, over 19728.00 frames. ], tot_loss[loss=0.237, simple_loss=0.2929, pruned_loss=0.06582, ctc_loss=0.1236, over 3747550.11 frames. ], batch size: 51, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:28:09,977 INFO [train.py:1114] (3/4) Epoch 11, batch 750, loss[loss=0.2355, simple_loss=0.2939, pruned_loss=0.06396, ctc_loss=0.1227, over 19492.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.2921, pruned_loss=0.06535, ctc_loss=0.1228, over 3772940.14 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:28:25,942 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.371e+02 1.821e+02 2.028e+02 2.720e+02 4.524e+02, threshold=4.057e+02, percent-clipped=2.0 +2024-08-25 19:28:38,787 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.93 vs. limit=15.0 +2024-08-25 19:28:49,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=136810.66666666666, ans=0.125 +2024-08-25 19:28:50,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.54 vs. limit=10.0 +2024-08-25 19:28:51,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=136810.66666666666, ans=0.0 +2024-08-25 19:32:08,677 INFO [train.py:1114] (3/4) Epoch 11, batch 800, loss[loss=0.2083, simple_loss=0.2713, pruned_loss=0.05363, ctc_loss=0.09542, over 19814.00 frames. ], tot_loss[loss=0.236, simple_loss=0.2923, pruned_loss=0.06527, ctc_loss=0.1227, over 3794555.52 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:32:18,202 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.24 vs. limit=6.0 +2024-08-25 19:32:23,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=137024.0, ans=0.025 +2024-08-25 19:32:53,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=137077.33333333334, ans=0.2 +2024-08-25 19:32:54,764 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.39 vs. limit=15.0 +2024-08-25 19:33:06,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=137130.66666666666, ans=0.1 +2024-08-25 19:33:15,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=137130.66666666666, ans=0.0 +2024-08-25 19:33:19,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=137130.66666666666, ans=0.1 +2024-08-25 19:33:49,181 INFO [train.py:1114] (3/4) Epoch 11, batch 850, loss[loss=0.23, simple_loss=0.3012, pruned_loss=0.05746, ctc_loss=0.1095, over 19679.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.2916, pruned_loss=0.06499, ctc_loss=0.1222, over 3813600.62 frames. ], batch size: 59, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:33:50,252 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.863e+02 2.065e+02 2.415e+02 4.305e+02, threshold=4.130e+02, percent-clipped=1.0 +2024-08-25 19:34:09,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=137344.0, ans=0.0 +2024-08-25 19:34:12,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=137344.0, ans=0.035 +2024-08-25 19:34:18,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=137397.33333333334, ans=0.125 +2024-08-25 19:34:40,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=137450.66666666666, ans=0.125 +2024-08-25 19:34:47,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=137450.66666666666, ans=0.0 +2024-08-25 19:34:53,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=137504.0, ans=0.2 +2024-08-25 19:34:57,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=137504.0, ans=0.0 +2024-08-25 19:35:00,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=137504.0, ans=0.2 +2024-08-25 19:35:00,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=137504.0, ans=0.07 +2024-08-25 19:35:01,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=137504.0, ans=0.125 +2024-08-25 19:35:05,246 INFO [train.py:1114] (3/4) Epoch 11, batch 900, loss[loss=0.232, simple_loss=0.2804, pruned_loss=0.06606, ctc_loss=0.1285, over 19418.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.2919, pruned_loss=0.06515, ctc_loss=0.1225, over 3817804.25 frames. ], batch size: 48, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:35:07,922 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.70 vs. limit=15.0 +2024-08-25 19:35:35,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=137664.0, ans=0.125 +2024-08-25 19:35:48,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=137664.0, ans=0.125 +2024-08-25 19:35:52,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=137664.0, ans=0.5 +2024-08-25 19:36:04,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=137717.33333333334, ans=0.0 +2024-08-25 19:36:06,110 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.15 vs. limit=15.0 +2024-08-25 19:36:06,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=137717.33333333334, ans=10.0 +2024-08-25 19:36:49,861 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:37:18,564 INFO [train.py:1114] (3/4) Epoch 11, batch 950, loss[loss=0.2342, simple_loss=0.2882, pruned_loss=0.06574, ctc_loss=0.122, over 19512.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.2924, pruned_loss=0.06539, ctc_loss=0.1228, over 3819341.80 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 32.0 +2024-08-25 19:37:19,700 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.805e+02 2.081e+02 2.536e+02 4.211e+02, threshold=4.162e+02, percent-clipped=2.0 +2024-08-25 19:37:25,956 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.77 vs. limit=6.0 +2024-08-25 19:37:44,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=137930.66666666666, ans=0.0 +2024-08-25 19:37:48,998 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.75 vs. limit=22.5 +2024-08-25 19:37:53,895 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.95 vs. limit=12.0 +2024-08-25 19:37:59,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=137930.66666666666, ans=0.2 +2024-08-25 19:37:59,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=137930.66666666666, ans=0.0 +2024-08-25 19:38:03,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=137984.0, ans=0.2 +2024-08-25 19:38:31,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=138037.33333333334, ans=0.05 +2024-08-25 19:38:48,985 INFO [train.py:1114] (3/4) Epoch 11, batch 1000, loss[loss=0.2479, simple_loss=0.2997, pruned_loss=0.07132, ctc_loss=0.1335, over 19850.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.2936, pruned_loss=0.06619, ctc_loss=0.1243, over 3815711.39 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:39:27,610 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.50 vs. limit=15.0 +2024-08-25 19:39:36,984 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.99 vs. limit=6.0 +2024-08-25 19:39:40,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=138250.66666666666, ans=0.125 +2024-08-25 19:40:11,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=138304.0, ans=0.0 +2024-08-25 19:40:14,838 INFO [train.py:1114] (3/4) Epoch 11, batch 1050, loss[loss=0.2412, simple_loss=0.2964, pruned_loss=0.06707, ctc_loss=0.13, over 19839.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.2928, pruned_loss=0.06591, ctc_loss=0.1239, over 3822587.29 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:40:16,356 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.04 vs. limit=22.5 +2024-08-25 19:40:16,851 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.394e+02 1.874e+02 2.329e+02 2.645e+02 4.211e+02, threshold=4.658e+02, percent-clipped=2.0 +2024-08-25 19:40:19,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=138357.33333333334, ans=0.125 +2024-08-25 19:40:22,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=138357.33333333334, ans=0.125 +2024-08-25 19:40:23,877 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.54 vs. limit=22.5 +2024-08-25 19:40:24,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=138357.33333333334, ans=0.125 +2024-08-25 19:40:24,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138357.33333333334, ans=0.1 +2024-08-25 19:40:28,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=138410.66666666666, ans=0.125 +2024-08-25 19:40:55,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138517.33333333334, ans=0.1 +2024-08-25 19:41:24,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=138570.66666666666, ans=0.0 +2024-08-25 19:41:26,113 INFO [train.py:1114] (3/4) Epoch 11, batch 1100, loss[loss=0.253, simple_loss=0.2994, pruned_loss=0.0757, ctc_loss=0.1381, over 19576.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.2919, pruned_loss=0.06529, ctc_loss=0.123, over 3830551.23 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:42:17,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=138677.33333333334, ans=0.125 +2024-08-25 19:42:51,271 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 19:43:02,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=138730.66666666666, ans=0.0 +2024-08-25 19:43:08,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=138784.0, ans=0.1 +2024-08-25 19:43:35,986 INFO [train.py:1114] (3/4) Epoch 11, batch 1150, loss[loss=0.2022, simple_loss=0.2668, pruned_loss=0.05075, ctc_loss=0.09036, over 19582.00 frames. ], tot_loss[loss=0.236, simple_loss=0.2919, pruned_loss=0.06545, ctc_loss=0.1231, over 3828472.64 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:43:37,194 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.425e+02 1.797e+02 2.039e+02 2.453e+02 4.580e+02, threshold=4.079e+02, percent-clipped=0.0 +2024-08-25 19:43:53,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=138944.0, ans=0.0 +2024-08-25 19:44:06,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138997.33333333334, ans=0.1 +2024-08-25 19:44:09,424 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.71 vs. limit=15.0 +2024-08-25 19:44:23,828 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.67 vs. limit=6.0 +2024-08-25 19:44:29,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=139104.0, ans=0.125 +2024-08-25 19:44:41,838 INFO [train.py:1114] (3/4) Epoch 11, batch 1200, loss[loss=0.2355, simple_loss=0.2937, pruned_loss=0.06542, ctc_loss=0.1162, over 19827.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.2936, pruned_loss=0.06633, ctc_loss=0.1249, over 3824357.90 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:44:46,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=139157.33333333334, ans=0.125 +2024-08-25 19:44:48,371 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.76 vs. limit=22.5 +2024-08-25 19:44:53,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=139210.66666666666, ans=0.125 +2024-08-25 19:45:00,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=139210.66666666666, ans=0.2 +2024-08-25 19:45:55,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=139317.33333333334, ans=10.0 +2024-08-25 19:46:02,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=139370.66666666666, ans=0.125 +2024-08-25 19:46:06,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=139370.66666666666, ans=0.125 +2024-08-25 19:46:15,637 INFO [train.py:1114] (3/4) Epoch 11, batch 1250, loss[loss=0.2728, simple_loss=0.3198, pruned_loss=0.08152, ctc_loss=0.157, over 19501.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.2937, pruned_loss=0.06585, ctc_loss=0.1238, over 3842991.96 frames. ], batch size: 61, lr: 1.36e-02, grad_scale: 32.0 +2024-08-25 19:46:16,712 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.516e+02 1.769e+02 1.992e+02 2.545e+02 3.633e+02, threshold=3.984e+02, percent-clipped=0.0 +2024-08-25 19:46:24,139 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.85 vs. limit=15.0 +2024-08-25 19:46:39,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=139424.0, ans=0.125 +2024-08-25 19:46:45,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=139477.33333333334, ans=0.1 +2024-08-25 19:47:07,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=139530.66666666666, ans=0.0 +2024-08-25 19:47:18,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=139584.0, ans=0.125 +2024-08-25 19:47:23,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=139584.0, ans=0.2 +2024-08-25 19:47:40,574 INFO [train.py:1114] (3/4) Epoch 11, batch 1300, loss[loss=0.2374, simple_loss=0.3067, pruned_loss=0.0601, ctc_loss=0.1197, over 18834.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.2925, pruned_loss=0.06503, ctc_loss=0.1223, over 3847251.73 frames. ], batch size: 76, lr: 1.36e-02, grad_scale: 16.0 +2024-08-25 19:48:01,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=139744.0, ans=0.125 +2024-08-25 19:48:04,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139797.33333333334, ans=0.1 +2024-08-25 19:48:10,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=139797.33333333334, ans=0.2 +2024-08-25 19:48:22,889 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.48 vs. limit=10.0 +2024-08-25 19:48:23,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=139850.66666666666, ans=0.2 +2024-08-25 19:48:29,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=139850.66666666666, ans=0.125 +2024-08-25 19:48:31,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=139850.66666666666, ans=0.125 +2024-08-25 19:48:58,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139957.33333333334, ans=0.1 +2024-08-25 19:48:58,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=139957.33333333334, ans=0.125 +2024-08-25 19:48:59,248 INFO [train.py:1114] (3/4) Epoch 11, batch 1350, loss[loss=0.2487, simple_loss=0.3036, pruned_loss=0.07042, ctc_loss=0.1323, over 19752.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.2918, pruned_loss=0.0646, ctc_loss=0.1214, over 3858638.85 frames. ], batch size: 54, lr: 1.36e-02, grad_scale: 16.0 +2024-08-25 19:49:00,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=139957.33333333334, ans=0.125 +2024-08-25 19:49:01,642 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.430e+02 1.851e+02 2.124e+02 2.742e+02 4.665e+02, threshold=4.248e+02, percent-clipped=3.0 +2024-08-25 19:49:10,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff3.min_abs, batch_count=140010.66666666666, ans=0.2 +2024-08-25 19:49:27,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=140064.0, ans=0.5 +2024-08-25 19:49:43,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=140117.33333333334, ans=0.0 +2024-08-25 19:49:49,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=140117.33333333334, ans=0.125 +2024-08-25 19:49:49,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=140117.33333333334, ans=0.025 +2024-08-25 19:49:51,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=140170.66666666666, ans=0.1 +2024-08-25 19:49:53,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=140170.66666666666, ans=0.125 +2024-08-25 19:50:07,175 INFO [train.py:1114] (3/4) Epoch 11, batch 1400, loss[loss=0.1862, simple_loss=0.2479, pruned_loss=0.04541, ctc_loss=0.08398, over 19677.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.2913, pruned_loss=0.06439, ctc_loss=0.1208, over 3865655.93 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 19:50:09,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=140224.0, ans=0.0 +2024-08-25 19:50:41,705 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.06 vs. limit=6.0 +2024-08-25 19:51:37,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.38 vs. limit=10.0 +2024-08-25 19:51:42,655 INFO [train.py:1114] (3/4) Epoch 11, batch 1450, loss[loss=0.2659, simple_loss=0.3195, pruned_loss=0.07782, ctc_loss=0.1416, over 19655.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.2919, pruned_loss=0.06448, ctc_loss=0.1208, over 3863849.63 frames. ], batch size: 63, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 19:51:45,009 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.475e+02 1.813e+02 2.052e+02 2.523e+02 4.896e+02, threshold=4.103e+02, percent-clipped=2.0 +2024-08-25 19:52:10,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=140544.0, ans=0.0 +2024-08-25 19:52:20,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=140597.33333333334, ans=0.125 +2024-08-25 19:52:36,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=140597.33333333334, ans=0.125 +2024-08-25 19:52:41,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=140650.66666666666, ans=0.125 +2024-08-25 19:52:45,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140650.66666666666, ans=0.1 +2024-08-25 19:52:47,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=140650.66666666666, ans=0.125 +2024-08-25 19:52:58,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=140704.0, ans=0.125 +2024-08-25 19:53:19,918 INFO [train.py:1114] (3/4) Epoch 11, batch 1500, loss[loss=0.2658, simple_loss=0.3155, pruned_loss=0.07867, ctc_loss=0.1467, over 19575.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.2922, pruned_loss=0.06493, ctc_loss=0.1218, over 3863318.59 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 19:53:49,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=140810.66666666666, ans=0.125 +2024-08-25 19:54:14,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=140864.0, ans=0.025 +2024-08-25 19:55:07,083 INFO [train.py:1114] (3/4) Epoch 11, batch 1550, loss[loss=0.279, simple_loss=0.3229, pruned_loss=0.08576, ctc_loss=0.1592, over 19600.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.2918, pruned_loss=0.06463, ctc_loss=0.1216, over 3848889.14 frames. ], batch size: 60, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 19:55:08,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=141024.0, ans=0.2 +2024-08-25 19:55:10,753 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.421e+02 1.804e+02 2.014e+02 2.422e+02 4.168e+02, threshold=4.028e+02, percent-clipped=1.0 +2024-08-25 19:55:38,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=141024.0, ans=0.0 +2024-08-25 19:56:14,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=141130.66666666666, ans=0.0 +2024-08-25 19:57:19,245 INFO [train.py:1114] (3/4) Epoch 11, batch 1600, loss[loss=0.2431, simple_loss=0.3059, pruned_loss=0.06614, ctc_loss=0.12, over 19856.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.292, pruned_loss=0.065, ctc_loss=0.1225, over 3836420.81 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 32.0 +2024-08-25 19:58:15,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=141397.33333333334, ans=0.2 +2024-08-25 19:58:18,380 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.58 vs. limit=12.0 +2024-08-25 19:58:27,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=141450.66666666666, ans=0.025 +2024-08-25 19:59:27,408 INFO [train.py:1114] (3/4) Epoch 11, batch 1650, loss[loss=0.2424, simple_loss=0.2986, pruned_loss=0.06728, ctc_loss=0.129, over 19653.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.292, pruned_loss=0.06508, ctc_loss=0.1229, over 3833698.53 frames. ], batch size: 59, lr: 1.35e-02, grad_scale: 32.0 +2024-08-25 19:59:29,880 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.768e+02 1.990e+02 2.303e+02 4.438e+02, threshold=3.979e+02, percent-clipped=2.0 +2024-08-25 19:59:42,734 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.55 vs. limit=6.0 +2024-08-25 19:59:48,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=141557.33333333334, ans=0.125 +2024-08-25 19:59:53,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141610.66666666666, ans=0.1 +2024-08-25 20:00:10,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=141664.0, ans=0.125 +2024-08-25 20:00:14,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=141664.0, ans=0.0 +2024-08-25 20:01:05,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=141770.66666666666, ans=0.125 +2024-08-25 20:01:05,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=141770.66666666666, ans=0.2 +2024-08-25 20:01:17,965 INFO [train.py:1114] (3/4) Epoch 11, batch 1700, loss[loss=0.2206, simple_loss=0.2713, pruned_loss=0.06185, ctc_loss=0.1154, over 19690.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.2912, pruned_loss=0.06431, ctc_loss=0.1216, over 3847872.20 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 20:01:39,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141930.66666666666, ans=0.1 +2024-08-25 20:01:43,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=141930.66666666666, ans=0.125 +2024-08-25 20:01:55,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=141984.0, ans=0.0 +2024-08-25 20:02:12,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=142037.33333333334, ans=0.125 +2024-08-25 20:02:16,946 INFO [train.py:1114] (3/4) Epoch 11, batch 1750, loss[loss=0.2299, simple_loss=0.2776, pruned_loss=0.06704, ctc_loss=0.1202, over 19668.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.2905, pruned_loss=0.06399, ctc_loss=0.1208, over 3852452.67 frames. ], batch size: 45, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 20:02:20,530 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.412e+02 1.814e+02 2.107e+02 2.366e+02 3.890e+02, threshold=4.214e+02, percent-clipped=0.0 +2024-08-25 20:02:42,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=142090.66666666666, ans=0.125 +2024-08-25 20:03:14,530 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 20:03:20,741 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.87 vs. limit=15.0 +2024-08-25 20:04:10,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=142304.0, ans=0.125 +2024-08-25 20:04:27,016 INFO [train.py:1114] (3/4) Epoch 11, batch 1800, loss[loss=0.2145, simple_loss=0.2903, pruned_loss=0.05096, ctc_loss=0.09218, over 19622.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.2913, pruned_loss=0.06443, ctc_loss=0.1212, over 3853593.72 frames. ], batch size: 55, lr: 1.35e-02, grad_scale: 16.0 +2024-08-25 20:04:30,803 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.84 vs. limit=15.0 +2024-08-25 20:04:48,974 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.66 vs. limit=15.0 +2024-08-25 20:04:51,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=142410.66666666666, ans=0.2 +2024-08-25 20:05:21,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=142464.0, ans=0.125 +2024-08-25 20:05:29,895 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.63 vs. limit=15.0 +2024-08-25 20:05:50,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=142570.66666666666, ans=0.0 +2024-08-25 20:06:13,793 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.27 vs. limit=22.5 +2024-08-25 20:06:15,202 INFO [train.py:1114] (3/4) Epoch 11, batch 1850, loss[loss=0.2266, simple_loss=0.2888, pruned_loss=0.05982, ctc_loss=0.1122, over 19599.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.2912, pruned_loss=0.06457, ctc_loss=0.1217, over 3856713.00 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 16.0 +2024-08-25 20:06:18,509 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.477e+02 1.849e+02 2.256e+02 2.966e+02 5.642e+02, threshold=4.511e+02, percent-clipped=6.0 +2024-08-25 20:06:26,814 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=5.95 vs. limit=15.0 +2024-08-25 20:06:59,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=142730.66666666666, ans=0.025 +2024-08-25 20:07:12,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=142784.0, ans=0.125 +2024-08-25 20:07:23,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=142784.0, ans=0.125 +2024-08-25 20:07:51,842 INFO [train.py:1114] (3/4) Epoch 11, batch 1900, loss[loss=0.2322, simple_loss=0.2971, pruned_loss=0.06045, ctc_loss=0.1161, over 19657.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.2919, pruned_loss=0.065, ctc_loss=0.1224, over 3860517.49 frames. ], batch size: 59, lr: 1.34e-02, grad_scale: 16.0 +2024-08-25 20:08:15,489 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 20:08:20,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=142944.0, ans=0.0 +2024-08-25 20:32:27,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=143050.66666666666, ans=0.2 +2024-08-25 20:51:09,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=143104.0, ans=0.125 +2024-08-25 20:55:30,008 INFO [train.py:1114] (3/4) Epoch 11, batch 1950, loss[loss=0.2227, simple_loss=0.2861, pruned_loss=0.05711, ctc_loss=0.1129, over 19600.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.2928, pruned_loss=0.06487, ctc_loss=0.122, over 3869579.71 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 16.0 +2024-08-25 21:03:39,809 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.468e+02 1.850e+02 2.123e+02 2.695e+02 5.282e+02, threshold=4.246e+02, percent-clipped=2.0 +2024-08-25 21:09:35,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=143157.33333333334, ans=0.125 +2024-08-25 21:29:54,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=143264.0, ans=0.2 +2024-08-25 21:35:18,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=143317.33333333334, ans=0.125 +2024-08-25 21:45:05,548 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.47 vs. limit=12.0 +2024-08-25 21:46:16,253 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.77 vs. limit=22.5 +2024-08-25 21:46:38,198 INFO [train.py:1114] (3/4) Epoch 11, batch 2000, loss[loss=0.1827, simple_loss=0.248, pruned_loss=0.04247, ctc_loss=0.08091, over 19639.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.293, pruned_loss=0.06496, ctc_loss=0.1222, over 3854321.89 frames. ], batch size: 45, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 21:57:33,450 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.30 vs. limit=15.0 +2024-08-25 21:58:41,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=143477.33333333334, ans=0.015 +2024-08-25 22:03:56,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=143477.33333333334, ans=0.125 +2024-08-25 22:05:28,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=143530.66666666666, ans=0.025 +2024-08-25 22:08:39,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=143584.0, ans=0.025 +2024-08-25 22:19:42,816 INFO [train.py:1114] (3/4) Epoch 11, batch 2050, loss[loss=0.2247, simple_loss=0.2685, pruned_loss=0.06558, ctc_loss=0.1242, over 19703.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.2922, pruned_loss=0.06488, ctc_loss=0.1221, over 3851667.99 frames. ], batch size: 47, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 22:20:13,479 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 1.838e+02 2.216e+02 2.724e+02 4.008e+02, threshold=4.432e+02, percent-clipped=0.0 +2024-08-25 22:20:41,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=143690.66666666666, ans=0.125 +2024-08-25 22:27:32,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=143850.66666666666, ans=0.0 +2024-08-25 22:27:58,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=143850.66666666666, ans=0.1 +2024-08-25 22:27:59,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143850.66666666666, ans=0.1 +2024-08-25 22:28:12,007 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.91 vs. limit=22.5 +2024-08-25 22:31:28,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=143904.0, ans=0.025 +2024-08-25 22:32:23,591 INFO [train.py:1114] (3/4) Epoch 11, batch 2100, loss[loss=0.2381, simple_loss=0.3032, pruned_loss=0.0624, ctc_loss=0.1203, over 19761.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.2919, pruned_loss=0.0645, ctc_loss=0.1215, over 3858196.46 frames. ], batch size: 54, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 22:33:21,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=143957.33333333334, ans=0.125 +2024-08-25 22:34:33,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=143957.33333333334, ans=0.125 +2024-08-25 22:34:44,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=144010.66666666666, ans=0.125 +2024-08-25 22:34:44,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144010.66666666666, ans=0.1 +2024-08-25 22:34:54,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=144010.66666666666, ans=0.0 +2024-08-25 22:35:12,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=144010.66666666666, ans=0.05 +2024-08-25 22:36:29,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=144064.0, ans=0.125 +2024-08-25 22:36:29,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=144064.0, ans=0.125 +2024-08-25 22:37:30,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=144117.33333333334, ans=0.09899494936611666 +2024-08-25 22:39:07,559 INFO [train.py:1114] (3/4) Epoch 11, batch 2150, loss[loss=0.2432, simple_loss=0.2901, pruned_loss=0.07063, ctc_loss=0.1374, over 19854.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.2914, pruned_loss=0.06448, ctc_loss=0.1213, over 3869561.22 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 22:39:51,928 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.523e+02 1.804e+02 2.068e+02 2.942e+02 5.639e+02, threshold=4.136e+02, percent-clipped=4.0 +2024-08-25 22:41:20,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=144277.33333333334, ans=0.2 +2024-08-25 22:42:45,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=144330.66666666666, ans=0.0 +2024-08-25 22:43:18,843 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.64 vs. limit=5.0 +2024-08-25 22:43:19,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=144384.0, ans=0.0 +2024-08-25 22:44:02,557 INFO [train.py:1114] (3/4) Epoch 11, batch 2200, loss[loss=0.2409, simple_loss=0.3024, pruned_loss=0.06485, ctc_loss=0.1242, over 19598.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.2907, pruned_loss=0.06412, ctc_loss=0.1205, over 3868060.34 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 32.0 +2024-08-25 22:46:31,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=144544.0, ans=0.125 +2024-08-25 22:47:00,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=144597.33333333334, ans=0.2 +2024-08-25 22:47:17,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=144597.33333333334, ans=0.0 +2024-08-25 22:49:03,016 INFO [train.py:1114] (3/4) Epoch 11, batch 2250, loss[loss=0.2473, simple_loss=0.3063, pruned_loss=0.06805, ctc_loss=0.1303, over 19619.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.2912, pruned_loss=0.06441, ctc_loss=0.1209, over 3867730.12 frames. ], batch size: 55, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:49:09,610 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.411e+02 1.818e+02 2.110e+02 2.782e+02 6.628e+02, threshold=4.220e+02, percent-clipped=3.0 +2024-08-25 22:49:55,011 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.16 vs. limit=6.0 +2024-08-25 22:50:09,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=144917.33333333334, ans=0.07 +2024-08-25 22:50:37,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=145024.0, ans=0.125 +2024-08-25 22:50:46,952 INFO [train.py:1114] (3/4) Epoch 11, batch 2300, loss[loss=0.2298, simple_loss=0.2884, pruned_loss=0.06295, ctc_loss=0.1131, over 19514.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.2896, pruned_loss=0.064, ctc_loss=0.1202, over 3860895.39 frames. ], batch size: 49, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:50:53,183 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.58 vs. limit=6.0 +2024-08-25 22:51:17,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=145024.0, ans=0.125 +2024-08-25 22:51:32,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=145077.33333333334, ans=0.2 +2024-08-25 22:51:36,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145130.66666666666, ans=0.1 +2024-08-25 22:52:12,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145184.0, ans=0.1 +2024-08-25 22:52:37,397 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=9.72 vs. limit=12.0 +2024-08-25 22:52:55,277 INFO [train.py:1114] (3/4) Epoch 11, batch 2350, loss[loss=0.2518, simple_loss=0.3071, pruned_loss=0.07109, ctc_loss=0.136, over 19652.00 frames. ], tot_loss[loss=0.233, simple_loss=0.2896, pruned_loss=0.06416, ctc_loss=0.1204, over 3863648.94 frames. ], batch size: 63, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:53:00,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=145290.66666666666, ans=0.125 +2024-08-25 22:53:01,239 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.317e+02 1.788e+02 2.141e+02 2.380e+02 3.835e+02, threshold=4.282e+02, percent-clipped=0.0 +2024-08-25 22:53:06,385 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.30 vs. limit=15.0 +2024-08-25 22:53:11,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=145290.66666666666, ans=0.025 +2024-08-25 22:53:26,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145344.0, ans=0.1 +2024-08-25 22:53:34,785 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.51 vs. limit=6.0 +2024-08-25 22:53:44,240 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=145397.33333333334, ans=0.2 +2024-08-25 22:53:46,771 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.46 vs. limit=6.0 +2024-08-25 22:54:15,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=145504.0, ans=0.125 +2024-08-25 22:54:25,998 INFO [train.py:1114] (3/4) Epoch 11, batch 2400, loss[loss=0.2372, simple_loss=0.2985, pruned_loss=0.06285, ctc_loss=0.1256, over 19293.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.292, pruned_loss=0.06508, ctc_loss=0.122, over 3857595.91 frames. ], batch size: 71, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:54:46,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145610.66666666666, ans=0.1 +2024-08-25 22:55:10,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=145717.33333333334, ans=0.125 +2024-08-25 22:55:23,707 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.57 vs. limit=15.0 +2024-08-25 22:55:36,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=145770.66666666666, ans=0.025 +2024-08-25 22:55:44,071 INFO [train.py:1114] (3/4) Epoch 11, batch 2450, loss[loss=0.3527, simple_loss=0.3499, pruned_loss=0.13, ctc_loss=0.2383, over 13244.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.2963, pruned_loss=0.06863, ctc_loss=0.1288, over 3729666.82 frames. ], batch size: 140, lr: 1.33e-02, grad_scale: 32.0 +2024-08-25 22:56:00,766 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.910e+02 2.208e+02 2.594e+02 5.356e+02, threshold=4.415e+02, percent-clipped=1.0 +2024-08-25 22:58:44,112 INFO [train.py:1114] (3/4) Epoch 12, batch 0, loss[loss=0.2733, simple_loss=0.3043, pruned_loss=0.08727, ctc_loss=0.1694, over 19401.00 frames. ], tot_loss[loss=0.2733, simple_loss=0.3043, pruned_loss=0.08727, ctc_loss=0.1694, over 19401.00 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 22:58:44,113 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-25 22:59:53,981 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([0.0477, 2.7592, 3.2232, 2.5012], device='cuda:3') +2024-08-25 23:00:02,931 INFO [train.py:1146] (3/4) Epoch 12, validation: loss=0.1972, simple_loss=0.2841, pruned_loss=0.04086, ctc_loss=0.07109, over 944034.00 frames. +2024-08-25 23:00:02,931 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 14072MB +2024-08-25 23:00:15,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=146032.0, ans=0.2 +2024-08-25 23:00:58,595 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.30 vs. limit=15.0 +2024-08-25 23:01:02,854 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.36 vs. limit=10.0 +2024-08-25 23:01:08,422 INFO [train.py:1114] (3/4) Epoch 12, batch 50, loss[loss=0.2109, simple_loss=0.2621, pruned_loss=0.05843, ctc_loss=0.1068, over 19703.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.2919, pruned_loss=0.06546, ctc_loss=0.1236, over 845316.12 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:01:17,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=146298.66666666666, ans=0.125 +2024-08-25 23:01:20,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=146298.66666666666, ans=0.125 +2024-08-25 23:01:23,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=146352.0, ans=0.125 +2024-08-25 23:01:26,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=146352.0, ans=0.125 +2024-08-25 23:01:27,723 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.425e+02 1.810e+02 2.073e+02 2.436e+02 4.057e+02, threshold=4.147e+02, percent-clipped=0.0 +2024-08-25 23:01:42,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=146405.33333333334, ans=15.0 +2024-08-25 23:01:45,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=146458.66666666666, ans=0.1 +2024-08-25 23:01:51,839 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.35 vs. limit=15.0 +2024-08-25 23:02:20,271 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.98 vs. limit=22.5 +2024-08-25 23:02:23,000 INFO [train.py:1114] (3/4) Epoch 12, batch 100, loss[loss=0.2313, simple_loss=0.2898, pruned_loss=0.06322, ctc_loss=0.1158, over 19712.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.2943, pruned_loss=0.06564, ctc_loss=0.1242, over 1500217.78 frames. ], batch size: 51, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:02:33,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=146565.33333333334, ans=0.2 +2024-08-25 23:02:54,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=146618.66666666666, ans=0.1 +2024-08-25 23:02:57,102 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.36 vs. limit=15.0 +2024-08-25 23:03:39,245 INFO [train.py:1114] (3/4) Epoch 12, batch 150, loss[loss=0.1931, simple_loss=0.2523, pruned_loss=0.04974, ctc_loss=0.08593, over 19714.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.2912, pruned_loss=0.06367, ctc_loss=0.1204, over 2028992.09 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:03:51,624 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.13 vs. limit=15.0 +2024-08-25 23:04:05,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=146885.33333333334, ans=0.125 +2024-08-25 23:04:09,868 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.330e+02 1.659e+02 1.880e+02 2.314e+02 3.650e+02, threshold=3.760e+02, percent-clipped=0.0 +2024-08-25 23:04:23,314 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:04:39,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=146938.66666666666, ans=0.07 +2024-08-25 23:04:55,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=147045.33333333334, ans=0.0 +2024-08-25 23:05:07,043 INFO [train.py:1114] (3/4) Epoch 12, batch 200, loss[loss=0.2466, simple_loss=0.3007, pruned_loss=0.07015, ctc_loss=0.1306, over 18303.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.2904, pruned_loss=0.06318, ctc_loss=0.1193, over 2435906.35 frames. ], batch size: 85, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:05:13,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=147098.66666666666, ans=0.0 +2024-08-25 23:05:34,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=147152.0, ans=0.5 +2024-08-25 23:06:02,909 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.45 vs. limit=6.0 +2024-08-25 23:06:21,790 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:06:55,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=147312.0, ans=0.0 +2024-08-25 23:07:01,816 INFO [train.py:1114] (3/4) Epoch 12, batch 250, loss[loss=0.2457, simple_loss=0.304, pruned_loss=0.06866, ctc_loss=0.1249, over 19363.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.2896, pruned_loss=0.06282, ctc_loss=0.1188, over 2755304.30 frames. ], batch size: 67, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:07:04,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=147365.33333333334, ans=0.0 +2024-08-25 23:07:22,624 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.308e+02 1.825e+02 2.154e+02 2.499e+02 3.884e+02, threshold=4.307e+02, percent-clipped=2.0 +2024-08-25 23:07:26,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=147418.66666666666, ans=0.0 +2024-08-25 23:07:45,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=147525.33333333334, ans=0.0 +2024-08-25 23:07:52,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=147525.33333333334, ans=0.0 +2024-08-25 23:08:07,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=147578.66666666666, ans=0.2 +2024-08-25 23:08:13,950 INFO [train.py:1114] (3/4) Epoch 12, batch 300, loss[loss=0.2383, simple_loss=0.2958, pruned_loss=0.06578, ctc_loss=0.1232, over 19550.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.2888, pruned_loss=0.06256, ctc_loss=0.118, over 2999102.31 frames. ], batch size: 61, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:08:29,398 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.67 vs. limit=15.0 +2024-08-25 23:08:47,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=147738.66666666666, ans=10.0 +2024-08-25 23:09:03,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=147845.33333333334, ans=0.0 +2024-08-25 23:09:03,444 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.19 vs. limit=22.5 +2024-08-25 23:09:12,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.37 vs. limit=6.0 +2024-08-25 23:09:17,476 INFO [train.py:1114] (3/4) Epoch 12, batch 350, loss[loss=0.2275, simple_loss=0.2797, pruned_loss=0.06359, ctc_loss=0.1205, over 19741.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.2895, pruned_loss=0.0626, ctc_loss=0.118, over 3190049.49 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:09:36,453 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.749e+02 2.047e+02 2.740e+02 4.170e+02, threshold=4.094e+02, percent-clipped=0.0 +2024-08-25 23:10:00,536 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.37 vs. limit=15.0 +2024-08-25 23:10:10,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=148058.66666666666, ans=0.2 +2024-08-25 23:10:12,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148058.66666666666, ans=0.1 +2024-08-25 23:10:12,488 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.29 vs. limit=12.0 +2024-08-25 23:10:25,929 INFO [train.py:1114] (3/4) Epoch 12, batch 400, loss[loss=0.2268, simple_loss=0.2922, pruned_loss=0.0592, ctc_loss=0.1075, over 19493.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.2885, pruned_loss=0.06222, ctc_loss=0.1171, over 3341862.98 frames. ], batch size: 54, lr: 1.27e-02, grad_scale: 32.0 +2024-08-25 23:10:56,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=148218.66666666666, ans=0.0 +2024-08-25 23:11:13,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=148325.33333333334, ans=0.02 +2024-08-25 23:11:23,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=148325.33333333334, ans=0.125 +2024-08-25 23:11:27,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=148325.33333333334, ans=0.0 +2024-08-25 23:11:33,443 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.56 vs. limit=5.0 +2024-08-25 23:11:51,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148378.66666666666, ans=0.1 +2024-08-25 23:11:54,375 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.94 vs. limit=15.0 +2024-08-25 23:12:05,083 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.64 vs. limit=15.0 +2024-08-25 23:12:05,893 INFO [train.py:1114] (3/4) Epoch 12, batch 450, loss[loss=0.2278, simple_loss=0.2911, pruned_loss=0.05858, ctc_loss=0.1184, over 19604.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.2887, pruned_loss=0.06214, ctc_loss=0.117, over 3449575.95 frames. ], batch size: 55, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:12:07,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=148432.0, ans=0.125 +2024-08-25 23:12:28,369 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.359e+02 1.830e+02 2.201e+02 2.765e+02 4.484e+02, threshold=4.403e+02, percent-clipped=1.0 +2024-08-25 23:12:35,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.53 vs. limit=6.0 +2024-08-25 23:12:38,894 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:13:21,163 INFO [train.py:1114] (3/4) Epoch 12, batch 500, loss[loss=0.2495, simple_loss=0.3105, pruned_loss=0.06948, ctc_loss=0.1239, over 19711.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.2881, pruned_loss=0.06225, ctc_loss=0.1171, over 3546020.89 frames. ], batch size: 63, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:13:23,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=148698.66666666666, ans=0.2 +2024-08-25 23:13:51,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=148752.0, ans=0.1 +2024-08-25 23:13:55,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=148805.33333333334, ans=0.125 +2024-08-25 23:14:15,406 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:14:40,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148858.66666666666, ans=0.1 +2024-08-25 23:14:52,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=148912.0, ans=6.0 +2024-08-25 23:14:52,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=148912.0, ans=22.5 +2024-08-25 23:14:54,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=148912.0, ans=10.0 +2024-08-25 23:14:59,259 INFO [train.py:1114] (3/4) Epoch 12, batch 550, loss[loss=0.2404, simple_loss=0.3026, pruned_loss=0.0646, ctc_loss=0.1224, over 19283.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.2884, pruned_loss=0.06254, ctc_loss=0.1177, over 3607276.06 frames. ], batch size: 71, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:15:03,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=148965.33333333334, ans=0.125 +2024-08-25 23:15:13,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=148965.33333333334, ans=0.125 +2024-08-25 23:15:42,261 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.325e+02 1.692e+02 2.049e+02 2.499e+02 4.022e+02, threshold=4.098e+02, percent-clipped=0.0 +2024-08-25 23:15:58,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=149072.0, ans=0.125 +2024-08-25 23:16:00,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=149072.0, ans=0.125 +2024-08-25 23:16:06,873 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.36 vs. limit=15.0 +2024-08-25 23:16:26,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=149125.33333333334, ans=0.125 +2024-08-25 23:16:38,580 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:16:54,713 INFO [train.py:1114] (3/4) Epoch 12, batch 600, loss[loss=0.266, simple_loss=0.3128, pruned_loss=0.08027, ctc_loss=0.1465, over 19326.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.2885, pruned_loss=0.06273, ctc_loss=0.1178, over 3665697.83 frames. ], batch size: 67, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:17:21,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=149285.33333333334, ans=0.1 +2024-08-25 23:18:47,511 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=3.930e-01 +2024-08-25 23:19:03,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=149392.0, ans=0.0 +2024-08-25 23:19:09,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=149445.33333333334, ans=0.125 +2024-08-25 23:19:16,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=149445.33333333334, ans=0.1 +2024-08-25 23:19:22,622 INFO [train.py:1114] (3/4) Epoch 12, batch 650, loss[loss=0.2331, simple_loss=0.2925, pruned_loss=0.06241, ctc_loss=0.1223, over 19780.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.2875, pruned_loss=0.06211, ctc_loss=0.1168, over 3716004.42 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:19:33,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149498.66666666666, ans=0.1 +2024-08-25 23:19:43,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=149552.0, ans=0.125 +2024-08-25 23:19:47,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=149552.0, ans=0.125 +2024-08-25 23:19:48,488 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.406e+02 1.911e+02 2.346e+02 2.911e+02 5.072e+02, threshold=4.691e+02, percent-clipped=6.0 +2024-08-25 23:19:54,548 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:20:41,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=149712.0, ans=0.07 +2024-08-25 23:20:49,402 INFO [train.py:1114] (3/4) Epoch 12, batch 700, loss[loss=0.2191, simple_loss=0.277, pruned_loss=0.05893, ctc_loss=0.1085, over 19722.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.2876, pruned_loss=0.06196, ctc_loss=0.1165, over 3748205.91 frames. ], batch size: 51, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:20:59,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149765.33333333334, ans=0.1 +2024-08-25 23:21:09,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149818.66666666666, ans=0.1 +2024-08-25 23:21:43,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=149978.66666666666, ans=0.125 +2024-08-25 23:21:47,357 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.56 vs. limit=15.0 +2024-08-25 23:21:49,543 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.55 vs. limit=6.0 +2024-08-25 23:21:51,356 INFO [train.py:1114] (3/4) Epoch 12, batch 750, loss[loss=0.2367, simple_loss=0.2936, pruned_loss=0.06499, ctc_loss=0.1246, over 19494.00 frames. ], tot_loss[loss=0.23, simple_loss=0.288, pruned_loss=0.0625, ctc_loss=0.1176, over 3772565.94 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:21:51,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=150032.0, ans=0.04949747468305833 +2024-08-25 23:22:09,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=150032.0, ans=0.0 +2024-08-25 23:22:14,643 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:22:17,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150085.33333333334, ans=0.1 +2024-08-25 23:22:20,744 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.394e+02 1.992e+02 2.563e+02 3.460e+02 5.252e+02, threshold=5.125e+02, percent-clipped=3.0 +2024-08-25 23:22:38,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=150138.66666666666, ans=0.125 +2024-08-25 23:22:40,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=150138.66666666666, ans=0.2 +2024-08-25 23:23:10,621 INFO [train.py:1114] (3/4) Epoch 12, batch 800, loss[loss=0.1996, simple_loss=0.2636, pruned_loss=0.05021, ctc_loss=0.08812, over 19402.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2882, pruned_loss=0.06254, ctc_loss=0.1174, over 3793435.95 frames. ], batch size: 48, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:23:58,465 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.16 vs. limit=15.0 +2024-08-25 23:24:07,591 INFO [train.py:1114] (3/4) Epoch 12, batch 850, loss[loss=0.2332, simple_loss=0.2956, pruned_loss=0.06074, ctc_loss=0.1231, over 19661.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.2878, pruned_loss=0.06231, ctc_loss=0.1171, over 3813485.28 frames. ], batch size: 59, lr: 1.26e-02, grad_scale: 32.0 +2024-08-25 23:24:07,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=150565.33333333334, ans=0.0 +2024-08-25 23:24:07,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=150565.33333333334, ans=0.125 +2024-08-25 23:24:22,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=150618.66666666666, ans=0.0 +2024-08-25 23:24:30,647 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.376e+02 1.732e+02 2.149e+02 2.756e+02 4.869e+02, threshold=4.297e+02, percent-clipped=0.0 +2024-08-25 23:24:31,415 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.29 vs. limit=22.5 +2024-08-25 23:24:32,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=150618.66666666666, ans=0.2 +2024-08-25 23:24:50,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=150672.0, ans=0.0 +2024-08-25 23:24:51,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=150672.0, ans=0.125 +2024-08-25 23:25:06,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=150725.33333333334, ans=0.125 +2024-08-25 23:25:23,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150778.66666666666, ans=0.1 +2024-08-25 23:25:39,233 INFO [train.py:1114] (3/4) Epoch 12, batch 900, loss[loss=0.2076, simple_loss=0.2677, pruned_loss=0.05272, ctc_loss=0.1054, over 19783.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.2883, pruned_loss=0.06269, ctc_loss=0.1177, over 3817612.49 frames. ], batch size: 49, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:25:43,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=150832.0, ans=0.0 +2024-08-25 23:25:50,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=150832.0, ans=0.0 +2024-08-25 23:26:04,277 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.24 vs. limit=22.5 +2024-08-25 23:26:18,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=150938.66666666666, ans=0.125 +2024-08-25 23:26:35,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=150992.0, ans=0.0 +2024-08-25 23:26:55,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=151045.33333333334, ans=0.125 +2024-08-25 23:27:21,992 INFO [train.py:1114] (3/4) Epoch 12, batch 950, loss[loss=0.2108, simple_loss=0.2654, pruned_loss=0.05731, ctc_loss=0.1039, over 19499.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.2891, pruned_loss=0.06309, ctc_loss=0.1186, over 3819388.05 frames. ], batch size: 49, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:27:45,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=151152.0, ans=0.0 +2024-08-25 23:27:47,801 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.461e+02 1.727e+02 2.047e+02 2.468e+02 3.873e+02, threshold=4.093e+02, percent-clipped=0.0 +2024-08-25 23:28:21,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=151205.33333333334, ans=0.0 +2024-08-25 23:28:55,953 INFO [train.py:1114] (3/4) Epoch 12, batch 1000, loss[loss=0.2264, simple_loss=0.2855, pruned_loss=0.061, ctc_loss=0.1134, over 19860.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.2901, pruned_loss=0.06385, ctc_loss=0.1201, over 3815895.59 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:30:02,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=151365.33333333334, ans=0.125 +2024-08-25 23:30:07,744 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.83 vs. limit=15.0 +2024-08-25 23:30:12,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151418.66666666666, ans=0.1 +2024-08-25 23:30:43,329 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.24 vs. limit=15.0 +2024-08-25 23:30:46,363 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.22 vs. limit=22.5 +2024-08-25 23:30:49,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=151578.66666666666, ans=0.025 +2024-08-25 23:30:55,445 INFO [train.py:1114] (3/4) Epoch 12, batch 1050, loss[loss=0.2292, simple_loss=0.2869, pruned_loss=0.06279, ctc_loss=0.115, over 19840.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.2889, pruned_loss=0.06312, ctc_loss=0.1187, over 3822208.51 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:31:14,267 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 1.748e+02 2.222e+02 2.883e+02 4.562e+02, threshold=4.445e+02, percent-clipped=3.0 +2024-08-25 23:31:15,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=151685.33333333334, ans=0.025 +2024-08-25 23:31:18,291 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.15 vs. limit=22.5 +2024-08-25 23:31:24,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=151738.66666666666, ans=0.0 +2024-08-25 23:31:39,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=151792.0, ans=0.125 +2024-08-25 23:31:46,089 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.87 vs. limit=15.0 +2024-08-25 23:32:14,302 INFO [train.py:1114] (3/4) Epoch 12, batch 1100, loss[loss=0.2477, simple_loss=0.3005, pruned_loss=0.07169, ctc_loss=0.129, over 19596.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.2883, pruned_loss=0.06281, ctc_loss=0.1183, over 3831298.08 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:32:27,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=151898.66666666666, ans=0.0 +2024-08-25 23:32:37,909 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.88 vs. limit=15.0 +2024-08-25 23:32:48,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=152005.33333333334, ans=0.125 +2024-08-25 23:32:49,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=152005.33333333334, ans=0.125 +2024-08-25 23:32:54,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=152058.66666666666, ans=0.0 +2024-08-25 23:32:58,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=152058.66666666666, ans=0.125 +2024-08-25 23:33:01,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=152058.66666666666, ans=0.025 +2024-08-25 23:33:05,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=152058.66666666666, ans=0.125 +2024-08-25 23:33:21,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=152112.0, ans=0.125 +2024-08-25 23:33:32,438 INFO [train.py:1114] (3/4) Epoch 12, batch 1150, loss[loss=0.2419, simple_loss=0.2967, pruned_loss=0.06791, ctc_loss=0.1281, over 19574.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.2885, pruned_loss=0.06296, ctc_loss=0.1185, over 3830196.69 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:33:32,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=152165.33333333334, ans=0.0 +2024-08-25 23:34:03,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=152218.66666666666, ans=0.125 +2024-08-25 23:34:07,239 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.375e+02 1.763e+02 2.002e+02 2.335e+02 5.298e+02, threshold=4.005e+02, percent-clipped=1.0 +2024-08-25 23:34:33,739 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=152325.33333333334, ans=0.2 +2024-08-25 23:34:56,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152378.66666666666, ans=0.1 +2024-08-25 23:34:59,036 INFO [train.py:1114] (3/4) Epoch 12, batch 1200, loss[loss=0.253, simple_loss=0.3011, pruned_loss=0.07422, ctc_loss=0.1412, over 19844.00 frames. ], tot_loss[loss=0.232, simple_loss=0.2895, pruned_loss=0.0633, ctc_loss=0.1195, over 3825521.01 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:35:25,611 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.89 vs. limit=15.0 +2024-08-25 23:35:44,336 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.87 vs. limit=15.0 +2024-08-25 23:35:45,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.80 vs. limit=15.0 +2024-08-25 23:36:08,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152698.66666666666, ans=0.1 +2024-08-25 23:36:09,955 INFO [train.py:1114] (3/4) Epoch 12, batch 1250, loss[loss=0.2408, simple_loss=0.2922, pruned_loss=0.06905, ctc_loss=0.1281, over 19521.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.2892, pruned_loss=0.0629, ctc_loss=0.1185, over 3843627.66 frames. ], batch size: 61, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:36:10,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=152698.66666666666, ans=0.125 +2024-08-25 23:36:10,523 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.99 vs. limit=15.0 +2024-08-25 23:36:11,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=15.0 +2024-08-25 23:36:34,026 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.907e+02 2.265e+02 2.785e+02 4.753e+02, threshold=4.530e+02, percent-clipped=2.0 +2024-08-25 23:36:51,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=152805.33333333334, ans=0.125 +2024-08-25 23:36:57,458 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.03 vs. limit=15.0 +2024-08-25 23:37:18,927 INFO [train.py:1114] (3/4) Epoch 12, batch 1300, loss[loss=0.2246, simple_loss=0.2921, pruned_loss=0.05682, ctc_loss=0.1089, over 18845.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.2878, pruned_loss=0.06198, ctc_loss=0.1167, over 3848769.64 frames. ], batch size: 76, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:37:30,004 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.18 vs. limit=12.0 +2024-08-25 23:37:48,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=153072.0, ans=0.04949747468305833 +2024-08-25 23:37:51,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=153072.0, ans=0.125 +2024-08-25 23:38:28,987 INFO [train.py:1114] (3/4) Epoch 12, batch 1350, loss[loss=0.2477, simple_loss=0.2993, pruned_loss=0.07157, ctc_loss=0.1326, over 19765.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.2878, pruned_loss=0.06206, ctc_loss=0.1167, over 3858914.98 frames. ], batch size: 54, lr: 1.25e-02, grad_scale: 32.0 +2024-08-25 23:38:38,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=153232.0, ans=0.0 +2024-08-25 23:38:46,285 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.300e+02 1.707e+02 2.039e+02 2.408e+02 4.402e+02, threshold=4.078e+02, percent-clipped=0.0 +2024-08-25 23:38:46,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=153285.33333333334, ans=0.125 +2024-08-25 23:39:18,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=153392.0, ans=0.0 +2024-08-25 23:39:20,199 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.01 vs. limit=15.0 +2024-08-25 23:39:32,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=153445.33333333334, ans=0.125 +2024-08-25 23:39:43,073 INFO [train.py:1114] (3/4) Epoch 12, batch 1400, loss[loss=0.1911, simple_loss=0.2496, pruned_loss=0.04732, ctc_loss=0.09509, over 19697.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.2878, pruned_loss=0.0622, ctc_loss=0.1171, over 3865222.20 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:39:45,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=153498.66666666666, ans=0.125 +2024-08-25 23:40:18,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=153605.33333333334, ans=0.0 +2024-08-25 23:40:28,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=153658.66666666666, ans=0.2 +2024-08-25 23:41:07,357 INFO [train.py:1114] (3/4) Epoch 12, batch 1450, loss[loss=0.2603, simple_loss=0.3218, pruned_loss=0.07253, ctc_loss=0.1341, over 19683.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2885, pruned_loss=0.06231, ctc_loss=0.1175, over 3862585.95 frames. ], batch size: 63, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:41:15,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=153765.33333333334, ans=0.125 +2024-08-25 23:41:15,643 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.34 vs. limit=15.0 +2024-08-25 23:41:26,391 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.30 vs. limit=10.0 +2024-08-25 23:41:27,990 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.446e+02 1.773e+02 2.135e+02 2.639e+02 4.435e+02, threshold=4.270e+02, percent-clipped=2.0 +2024-08-25 23:41:57,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=153872.0, ans=0.0 +2024-08-25 23:42:42,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=154032.0, ans=0.2 +2024-08-25 23:42:43,053 INFO [train.py:1114] (3/4) Epoch 12, batch 1500, loss[loss=0.25, simple_loss=0.3048, pruned_loss=0.07104, ctc_loss=0.1326, over 19599.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.2884, pruned_loss=0.06206, ctc_loss=0.117, over 3863490.21 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:42:50,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=154032.0, ans=0.125 +2024-08-25 23:42:50,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=154032.0, ans=22.5 +2024-08-25 23:43:21,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=154138.66666666666, ans=0.125 +2024-08-25 23:43:23,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=154138.66666666666, ans=0.125 +2024-08-25 23:43:50,031 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.12 vs. limit=6.0 +2024-08-25 23:44:04,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=154245.33333333334, ans=0.0 +2024-08-25 23:44:06,093 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.17 vs. limit=22.5 +2024-08-25 23:44:09,914 INFO [train.py:1114] (3/4) Epoch 12, batch 1550, loss[loss=0.2431, simple_loss=0.2959, pruned_loss=0.06895, ctc_loss=0.131, over 19622.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.289, pruned_loss=0.06276, ctc_loss=0.1182, over 3847926.80 frames. ], batch size: 60, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:44:16,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=154298.66666666666, ans=0.125 +2024-08-25 23:44:21,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=154352.0, ans=0.0 +2024-08-25 23:44:43,862 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.525e+02 1.860e+02 2.194e+02 2.828e+02 4.590e+02, threshold=4.388e+02, percent-clipped=1.0 +2024-08-25 23:45:04,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154405.33333333334, ans=0.1 +2024-08-25 23:46:25,364 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.18 vs. limit=12.0 +2024-08-25 23:46:27,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=154512.0, ans=0.125 +2024-08-25 23:46:32,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=154512.0, ans=0.125 +2024-08-25 23:46:36,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=154565.33333333334, ans=0.125 +2024-08-25 23:46:37,539 INFO [train.py:1114] (3/4) Epoch 12, batch 1600, loss[loss=0.2247, simple_loss=0.2882, pruned_loss=0.05838, ctc_loss=0.1108, over 19843.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.289, pruned_loss=0.06288, ctc_loss=0.1184, over 3836593.08 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:46:58,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=154565.33333333334, ans=0.125 +2024-08-25 23:47:14,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=154565.33333333334, ans=0.05 +2024-08-25 23:47:15,058 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.74 vs. limit=15.0 +2024-08-25 23:47:45,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=154672.0, ans=0.125 +2024-08-25 23:48:12,969 INFO [train.py:1114] (3/4) Epoch 12, batch 1650, loss[loss=0.2274, simple_loss=0.2961, pruned_loss=0.05808, ctc_loss=0.1062, over 19664.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.2888, pruned_loss=0.06303, ctc_loss=0.1186, over 3832756.55 frames. ], batch size: 59, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:48:17,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=154832.0, ans=0.2 +2024-08-25 23:48:20,245 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 23:48:22,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=154832.0, ans=0.1 +2024-08-25 23:48:32,978 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.442e+02 1.751e+02 2.060e+02 2.481e+02 4.497e+02, threshold=4.120e+02, percent-clipped=1.0 +2024-08-25 23:48:35,721 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=8.901e-02 +2024-08-25 23:48:42,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=154938.66666666666, ans=0.0 +2024-08-25 23:48:52,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=154938.66666666666, ans=0.125 +2024-08-25 23:48:52,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=154938.66666666666, ans=0.0 +2024-08-25 23:49:09,643 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.07 vs. limit=15.0 +2024-08-25 23:49:11,080 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.67 vs. limit=15.0 +2024-08-25 23:49:19,229 INFO [train.py:1114] (3/4) Epoch 12, batch 1700, loss[loss=0.2117, simple_loss=0.2582, pruned_loss=0.06007, ctc_loss=0.1125, over 19666.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.2887, pruned_loss=0.06278, ctc_loss=0.1181, over 3847160.88 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:49:46,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=155205.33333333334, ans=0.0 +2024-08-25 23:50:23,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=155258.66666666666, ans=0.2 +2024-08-25 23:50:24,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=155312.0, ans=0.125 +2024-08-25 23:50:36,445 INFO [train.py:1114] (3/4) Epoch 12, batch 1750, loss[loss=0.218, simple_loss=0.2649, pruned_loss=0.0628, ctc_loss=0.1139, over 19645.00 frames. ], tot_loss[loss=0.23, simple_loss=0.2882, pruned_loss=0.0624, ctc_loss=0.1174, over 3851893.16 frames. ], batch size: 45, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:50:59,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=155365.33333333334, ans=0.0 +2024-08-25 23:51:08,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=155418.66666666666, ans=0.0 +2024-08-25 23:51:12,443 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.321e+02 1.691e+02 1.944e+02 2.310e+02 4.068e+02, threshold=3.888e+02, percent-clipped=0.0 +2024-08-25 23:51:19,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=155472.0, ans=0.2 +2024-08-25 23:51:29,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=155472.0, ans=0.125 +2024-08-25 23:51:45,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=155525.33333333334, ans=0.125 +2024-08-25 23:51:56,045 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.68 vs. limit=15.0 +2024-08-25 23:52:03,817 INFO [train.py:1114] (3/4) Epoch 12, batch 1800, loss[loss=0.2132, simple_loss=0.2808, pruned_loss=0.05324, ctc_loss=0.09782, over 19599.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.2882, pruned_loss=0.06253, ctc_loss=0.1176, over 3853702.72 frames. ], batch size: 55, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:52:08,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=155632.0, ans=0.125 +2024-08-25 23:52:42,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=155685.33333333334, ans=0.0 +2024-08-25 23:52:43,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=155685.33333333334, ans=0.0 +2024-08-25 23:53:22,070 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=15.0 +2024-08-25 23:54:05,050 INFO [train.py:1114] (3/4) Epoch 12, batch 1850, loss[loss=0.2333, simple_loss=0.2973, pruned_loss=0.06151, ctc_loss=0.1155, over 19581.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.2879, pruned_loss=0.06205, ctc_loss=0.1169, over 3855951.22 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-25 23:54:39,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=155952.0, ans=0.125 +2024-08-25 23:54:44,939 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.455e+02 1.785e+02 2.050e+02 2.712e+02 4.249e+02, threshold=4.100e+02, percent-clipped=1.0 +2024-08-25 23:55:03,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=156005.33333333334, ans=0.5 +2024-08-25 23:55:16,178 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.00 vs. limit=12.0 +2024-08-25 23:55:26,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=156058.66666666666, ans=0.5 +2024-08-25 23:55:38,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=156112.0, ans=0.5 +2024-08-25 23:56:02,279 INFO [train.py:1114] (3/4) Epoch 12, batch 1900, loss[loss=0.2259, simple_loss=0.298, pruned_loss=0.05654, ctc_loss=0.1015, over 19656.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.2878, pruned_loss=0.06178, ctc_loss=0.1162, over 3860707.06 frames. ], batch size: 59, lr: 1.23e-02, grad_scale: 32.0 +2024-08-25 23:56:02,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=156165.33333333334, ans=0.1 +2024-08-25 23:56:30,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=156218.66666666666, ans=0.0 +2024-08-25 23:56:52,095 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.17 vs. limit=22.5 +2024-08-25 23:57:05,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=156325.33333333334, ans=0.125 +2024-08-25 23:57:30,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=156325.33333333334, ans=0.125 +2024-08-25 23:57:35,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=156378.66666666666, ans=0.125 +2024-08-25 23:58:03,005 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.66 vs. limit=15.0 +2024-08-25 23:58:05,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=156378.66666666666, ans=0.0 +2024-08-25 23:58:28,733 INFO [train.py:1114] (3/4) Epoch 12, batch 1950, loss[loss=0.1953, simple_loss=0.2725, pruned_loss=0.04309, ctc_loss=0.08007, over 19587.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.2881, pruned_loss=0.06119, ctc_loss=0.115, over 3869202.79 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 32.0 +2024-08-25 23:58:54,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=156432.0, ans=0.04949747468305833 +2024-08-25 23:59:03,809 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.459e+02 1.700e+02 2.031e+02 2.417e+02 3.778e+02, threshold=4.063e+02, percent-clipped=0.0 +2024-08-25 23:59:37,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156645.33333333334, ans=0.1 +2024-08-25 23:59:51,574 INFO [train.py:1114] (3/4) Epoch 12, batch 2000, loss[loss=0.2117, simple_loss=0.259, pruned_loss=0.06005, ctc_loss=0.1107, over 19645.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.2884, pruned_loss=0.06117, ctc_loss=0.1151, over 3855475.62 frames. ], batch size: 45, lr: 1.23e-02, grad_scale: 32.0 +2024-08-26 00:00:07,387 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.42 vs. limit=15.0 +2024-08-26 00:00:30,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=156805.33333333334, ans=0.125 +2024-08-26 00:00:37,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=156858.66666666666, ans=0.125 +2024-08-26 00:01:02,651 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.80 vs. limit=22.5 +2024-08-26 00:01:08,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=156912.0, ans=0.2 +2024-08-26 00:01:22,788 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.41 vs. limit=12.0 +2024-08-26 00:01:26,012 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.06 vs. limit=15.0 +2024-08-26 00:01:27,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=156965.33333333334, ans=0.125 +2024-08-26 00:01:28,934 INFO [train.py:1114] (3/4) Epoch 12, batch 2050, loss[loss=0.2013, simple_loss=0.2637, pruned_loss=0.0497, ctc_loss=0.0987, over 19724.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.2881, pruned_loss=0.06144, ctc_loss=0.1155, over 3851501.54 frames. ], batch size: 47, lr: 1.23e-02, grad_scale: 32.0 +2024-08-26 00:01:39,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=157018.66666666666, ans=0.0 +2024-08-26 00:01:46,854 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.778e+02 1.977e+02 2.412e+02 4.440e+02, threshold=3.953e+02, percent-clipped=1.0 +2024-08-26 00:01:51,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=157072.0, ans=0.0 +2024-08-26 00:02:19,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=157072.0, ans=0.125 +2024-08-26 00:03:00,089 INFO [train.py:1114] (3/4) Epoch 12, batch 2100, loss[loss=0.2328, simple_loss=0.2913, pruned_loss=0.06313, ctc_loss=0.1201, over 19761.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.2876, pruned_loss=0.06125, ctc_loss=0.1153, over 3858296.91 frames. ], batch size: 54, lr: 1.23e-02, grad_scale: 32.0 +2024-08-26 00:36:32,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=157392.0, ans=0.125 +2024-08-26 00:40:03,633 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=2.649e-03 +2024-08-26 00:49:44,343 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.64 vs. limit=15.0 +2024-08-26 00:52:11,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=157445.33333333334, ans=0.125 +2024-08-26 00:56:02,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=157498.66666666666, ans=0.125 +2024-08-26 00:56:07,938 INFO [train.py:1114] (3/4) Epoch 12, batch 2150, loss[loss=0.2237, simple_loss=0.2765, pruned_loss=0.06315, ctc_loss=0.1115, over 19848.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.287, pruned_loss=0.06116, ctc_loss=0.1151, over 3869541.67 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 32.0 +2024-08-26 01:08:00,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=157552.0, ans=0.125 +2024-08-26 01:09:53,315 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.502e+02 1.788e+02 2.174e+02 2.705e+02 6.148e+02, threshold=4.348e+02, percent-clipped=11.0 +2024-08-26 01:34:52,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=157712.0, ans=0.125 +2024-08-26 01:37:28,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=157765.33333333334, ans=0.0 +2024-08-26 01:37:35,690 INFO [train.py:1114] (3/4) Epoch 12, batch 2200, loss[loss=0.2226, simple_loss=0.2926, pruned_loss=0.05495, ctc_loss=0.1067, over 19575.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.2869, pruned_loss=0.06104, ctc_loss=0.115, over 3867635.59 frames. ], batch size: 57, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 01:39:25,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=157765.33333333334, ans=0.125 +2024-08-26 01:39:25,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=157765.33333333334, ans=0.0 +2024-08-26 01:39:40,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=157765.33333333334, ans=0.0 +2024-08-26 01:43:04,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157818.66666666666, ans=0.1 +2024-08-26 01:43:19,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157818.66666666666, ans=0.1 +2024-08-26 01:44:08,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=157818.66666666666, ans=0.0 +2024-08-26 01:49:04,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=157872.0, ans=0.04949747468305833 +2024-08-26 01:50:17,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157925.33333333334, ans=0.1 +2024-08-26 01:53:53,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=157925.33333333334, ans=0.125 +2024-08-26 01:57:29,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=158032.0, ans=0.125 +2024-08-26 01:57:30,320 INFO [train.py:1114] (3/4) Epoch 12, batch 2250, loss[loss=0.2146, simple_loss=0.283, pruned_loss=0.05217, ctc_loss=0.1045, over 19635.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.287, pruned_loss=0.06104, ctc_loss=0.1149, over 3868137.32 frames. ], batch size: 55, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 02:02:32,847 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.78 vs. limit=15.0 +2024-08-26 02:03:20,022 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.61 vs. limit=22.5 +2024-08-26 02:03:57,656 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 02:04:28,479 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 1.839e+02 2.199e+02 2.577e+02 6.358e+02, threshold=4.399e+02, percent-clipped=1.0 +2024-08-26 02:05:36,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158138.66666666666, ans=0.1 +2024-08-26 02:07:31,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=158138.66666666666, ans=0.0 +2024-08-26 02:10:34,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=158245.33333333334, ans=0.125 +2024-08-26 02:10:34,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=158245.33333333334, ans=15.0 +2024-08-26 02:13:21,298 INFO [train.py:1114] (3/4) Epoch 12, batch 2300, loss[loss=0.2083, simple_loss=0.2694, pruned_loss=0.05339, ctc_loss=0.1009, over 19507.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.2865, pruned_loss=0.06136, ctc_loss=0.1155, over 3861735.12 frames. ], batch size: 49, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 02:14:01,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=158298.66666666666, ans=0.0 +2024-08-26 02:14:01,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=158298.66666666666, ans=0.2 +2024-08-26 02:14:59,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=158352.0, ans=0.125 +2024-08-26 02:15:00,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=158352.0, ans=0.2 +2024-08-26 02:16:17,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=158405.33333333334, ans=0.0 +2024-08-26 02:20:01,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=158458.66666666666, ans=0.025 +2024-08-26 02:22:39,623 INFO [train.py:1114] (3/4) Epoch 12, batch 2350, loss[loss=0.2548, simple_loss=0.3095, pruned_loss=0.07378, ctc_loss=0.1313, over 19675.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.2867, pruned_loss=0.06175, ctc_loss=0.1161, over 3864136.16 frames. ], batch size: 63, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 02:23:23,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=158565.33333333334, ans=0.0 +2024-08-26 02:23:52,665 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.74 vs. limit=15.0 +2024-08-26 02:25:18,441 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.432e+02 1.991e+02 2.536e+02 3.183e+02 5.552e+02, threshold=5.072e+02, percent-clipped=5.0 +2024-08-26 02:25:33,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=158672.0, ans=0.2 +2024-08-26 02:25:39,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=158672.0, ans=0.2 +2024-08-26 02:27:03,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=158725.33333333334, ans=0.0 +2024-08-26 02:28:23,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=158778.66666666666, ans=0.125 +2024-08-26 02:30:54,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=158778.66666666666, ans=0.0 +2024-08-26 02:30:58,339 INFO [train.py:1114] (3/4) Epoch 12, batch 2400, loss[loss=0.2266, simple_loss=0.2923, pruned_loss=0.05776, ctc_loss=0.1136, over 19333.00 frames. ], tot_loss[loss=0.231, simple_loss=0.2894, pruned_loss=0.06276, ctc_loss=0.1177, over 3857334.42 frames. ], batch size: 71, lr: 1.22e-02, grad_scale: 32.0 +2024-08-26 02:31:10,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=158832.0, ans=0.125 +2024-08-26 02:31:12,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=158832.0, ans=0.0 +2024-08-26 02:31:28,962 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 02:35:11,956 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.23 vs. limit=15.0 +2024-08-26 02:37:05,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=158992.0, ans=0.07 +2024-08-26 02:38:19,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=159045.33333333334, ans=0.125 +2024-08-26 02:38:22,380 INFO [train.py:1114] (3/4) Epoch 12, batch 2450, loss[loss=0.3018, simple_loss=0.3308, pruned_loss=0.09814, ctc_loss=0.1913, over 13367.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.2936, pruned_loss=0.06576, ctc_loss=0.1237, over 3729493.54 frames. ], batch size: 140, lr: 1.22e-02, grad_scale: 32.0 +2024-08-26 02:38:43,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159098.66666666666, ans=0.1 +2024-08-26 02:39:05,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=159098.66666666666, ans=0.125 +2024-08-26 02:39:12,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=159152.0, ans=0.125 +2024-08-26 02:39:42,305 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.506e+02 1.859e+02 2.162e+02 2.447e+02 4.124e+02, threshold=4.324e+02, percent-clipped=0.0 +2024-08-26 02:40:40,799 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.08 vs. limit=15.0 +2024-08-26 02:40:53,155 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.35 vs. limit=15.0 +2024-08-26 02:43:45,616 INFO [train.py:1114] (3/4) Epoch 13, batch 0, loss[loss=0.2296, simple_loss=0.2769, pruned_loss=0.06711, ctc_loss=0.1201, over 19823.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.2769, pruned_loss=0.06711, ctc_loss=0.1201, over 19823.00 frames. ], batch size: 49, lr: 1.18e-02, grad_scale: 32.0 +2024-08-26 02:43:45,617 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-26 02:45:27,903 INFO [train.py:1146] (3/4) Epoch 13, validation: loss=0.1972, simple_loss=0.2835, pruned_loss=0.04113, ctc_loss=0.07151, over 944034.00 frames. +2024-08-26 02:45:27,904 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 14072MB +2024-08-26 02:45:31,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=159306.66666666666, ans=0.0 +2024-08-26 02:45:54,855 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.77 vs. limit=22.5 +2024-08-26 02:46:14,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=159413.33333333334, ans=0.2 +2024-08-26 02:46:22,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=159413.33333333334, ans=0.04949747468305833 +2024-08-26 02:48:06,363 INFO [train.py:1114] (3/4) Epoch 13, batch 50, loss[loss=0.2081, simple_loss=0.2665, pruned_loss=0.05372, ctc_loss=0.1059, over 19731.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.2899, pruned_loss=0.06301, ctc_loss=0.1185, over 844332.57 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:48:06,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=159573.33333333334, ans=0.2 +2024-08-26 02:48:55,465 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 1.822e+02 2.122e+02 2.766e+02 5.339e+02, threshold=4.244e+02, percent-clipped=3.0 +2024-08-26 02:48:59,636 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.92 vs. limit=15.0 +2024-08-26 02:49:20,891 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.10 vs. limit=22.5 +2024-08-26 02:49:26,907 INFO [train.py:1114] (3/4) Epoch 13, batch 100, loss[loss=0.24, simple_loss=0.2925, pruned_loss=0.06859, ctc_loss=0.126, over 19736.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.2908, pruned_loss=0.06268, ctc_loss=0.118, over 1499336.38 frames. ], batch size: 51, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:49:29,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=159840.0, ans=0.125 +2024-08-26 02:49:31,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=159840.0, ans=0.125 +2024-08-26 02:49:51,628 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.21 vs. limit=15.0 +2024-08-26 02:50:20,151 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 02:50:22,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=159946.66666666666, ans=0.125 +2024-08-26 02:50:40,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=160000.0, ans=0.025 +2024-08-26 02:50:46,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160000.0, ans=0.1 +2024-08-26 02:51:02,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=160053.33333333334, ans=0.125 +2024-08-26 02:51:27,229 INFO [train.py:1114] (3/4) Epoch 13, batch 150, loss[loss=0.2312, simple_loss=0.2744, pruned_loss=0.06927, ctc_loss=0.1234, over 19721.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.2871, pruned_loss=0.06053, ctc_loss=0.1137, over 2028110.84 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:51:45,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=160106.66666666666, ans=0.07 +2024-08-26 02:52:36,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=160213.33333333334, ans=0.125 +2024-08-26 02:52:48,526 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.368e+02 1.693e+02 1.889e+02 2.276e+02 3.515e+02, threshold=3.778e+02, percent-clipped=0.0 +2024-08-26 02:53:36,263 INFO [train.py:1114] (3/4) Epoch 13, batch 200, loss[loss=0.2952, simple_loss=0.3317, pruned_loss=0.09343, ctc_loss=0.1798, over 18464.00 frames. ], tot_loss[loss=0.227, simple_loss=0.2865, pruned_loss=0.06088, ctc_loss=0.1146, over 2435881.61 frames. ], batch size: 85, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:53:38,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=160373.33333333334, ans=0.125 +2024-08-26 02:53:46,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=160373.33333333334, ans=0.125 +2024-08-26 02:54:11,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=160480.0, ans=0.035 +2024-08-26 02:54:27,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=160480.0, ans=0.0 +2024-08-26 02:54:51,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160533.33333333334, ans=0.1 +2024-08-26 02:55:15,708 INFO [train.py:1114] (3/4) Epoch 13, batch 250, loss[loss=0.2501, simple_loss=0.311, pruned_loss=0.06899, ctc_loss=0.128, over 19430.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.2873, pruned_loss=0.06137, ctc_loss=0.1156, over 2756746.88 frames. ], batch size: 67, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:55:24,254 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.98 vs. limit=6.0 +2024-08-26 02:55:34,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=160693.33333333334, ans=0.125 +2024-08-26 02:55:40,635 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.44 vs. limit=15.0 +2024-08-26 02:55:44,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=160746.66666666666, ans=0.125 +2024-08-26 02:55:45,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=160746.66666666666, ans=0.125 +2024-08-26 02:55:47,660 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 1.754e+02 2.188e+02 2.577e+02 4.403e+02, threshold=4.375e+02, percent-clipped=2.0 +2024-08-26 02:55:48,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=160800.0, ans=0.0 +2024-08-26 02:56:09,487 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.49 vs. limit=10.0 +2024-08-26 02:56:43,559 INFO [train.py:1114] (3/4) Epoch 13, batch 300, loss[loss=0.2306, simple_loss=0.2969, pruned_loss=0.0601, ctc_loss=0.1106, over 19508.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.2866, pruned_loss=0.0609, ctc_loss=0.1148, over 3001398.51 frames. ], batch size: 61, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:57:37,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=161120.0, ans=0.0 +2024-08-26 02:57:50,480 INFO [train.py:1114] (3/4) Epoch 13, batch 350, loss[loss=0.213, simple_loss=0.2684, pruned_loss=0.05757, ctc_loss=0.1063, over 19777.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.2871, pruned_loss=0.06111, ctc_loss=0.1151, over 3190647.47 frames. ], batch size: 48, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 02:58:08,302 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.59 vs. limit=15.0 +2024-08-26 02:58:25,614 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.486e+02 1.772e+02 2.039e+02 2.354e+02 3.759e+02, threshold=4.079e+02, percent-clipped=0.0 +2024-08-26 02:58:37,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=161333.33333333334, ans=0.125 +2024-08-26 02:59:24,148 INFO [train.py:1114] (3/4) Epoch 13, batch 400, loss[loss=0.2504, simple_loss=0.3053, pruned_loss=0.07128, ctc_loss=0.1325, over 19494.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.2871, pruned_loss=0.06131, ctc_loss=0.1152, over 3343038.56 frames. ], batch size: 54, lr: 1.17e-02, grad_scale: 32.0 +2024-08-26 02:59:29,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=161440.0, ans=0.125 +2024-08-26 02:59:52,485 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.58 vs. limit=10.0 +2024-08-26 02:59:54,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=161440.0, ans=0.0 +2024-08-26 03:00:56,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=161653.33333333334, ans=0.2 +2024-08-26 03:00:58,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161653.33333333334, ans=0.1 +2024-08-26 03:01:25,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=161653.33333333334, ans=0.0 +2024-08-26 03:01:42,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161706.66666666666, ans=0.1 +2024-08-26 03:01:53,833 INFO [train.py:1114] (3/4) Epoch 13, batch 450, loss[loss=0.242, simple_loss=0.3026, pruned_loss=0.06418, ctc_loss=0.1324, over 19622.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.2872, pruned_loss=0.06115, ctc_loss=0.1148, over 3451375.65 frames. ], batch size: 55, lr: 1.17e-02, grad_scale: 32.0 +2024-08-26 03:02:13,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=161760.0, ans=0.125 +2024-08-26 03:03:10,112 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.726e+02 2.085e+02 2.754e+02 4.301e+02, threshold=4.170e+02, percent-clipped=3.0 +2024-08-26 03:03:51,746 INFO [train.py:1114] (3/4) Epoch 13, batch 500, loss[loss=0.2415, simple_loss=0.3005, pruned_loss=0.06648, ctc_loss=0.1237, over 19673.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.2861, pruned_loss=0.06085, ctc_loss=0.1143, over 3546446.50 frames. ], batch size: 63, lr: 1.17e-02, grad_scale: 32.0 +2024-08-26 03:04:15,958 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.20 vs. limit=15.0 +2024-08-26 03:04:38,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=162026.66666666666, ans=0.125 +2024-08-26 03:05:03,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=162080.0, ans=0.0 +2024-08-26 03:05:11,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff2.min_abs, batch_count=162080.0, ans=0.1 +2024-08-26 03:05:17,514 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.16 vs. limit=22.5 +2024-08-26 03:05:24,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=162133.33333333334, ans=0.0 +2024-08-26 03:05:47,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=162186.66666666666, ans=0.125 +2024-08-26 03:06:03,079 INFO [train.py:1114] (3/4) Epoch 13, batch 550, loss[loss=0.2227, simple_loss=0.2862, pruned_loss=0.0576, ctc_loss=0.1102, over 19375.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.2861, pruned_loss=0.0608, ctc_loss=0.1142, over 3609598.00 frames. ], batch size: 71, lr: 1.17e-02, grad_scale: 32.0 +2024-08-26 03:06:12,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=162240.0, ans=0.125 +2024-08-26 03:06:14,253 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.96 vs. limit=15.0 +2024-08-26 03:06:19,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=162293.33333333334, ans=0.025 +2024-08-26 03:06:21,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=162293.33333333334, ans=0.125 +2024-08-26 03:06:37,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=162346.66666666666, ans=0.0 +2024-08-26 03:06:44,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=162346.66666666666, ans=0.0 +2024-08-26 03:06:47,056 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.318e+02 1.758e+02 1.954e+02 2.485e+02 4.688e+02, threshold=3.908e+02, percent-clipped=2.0 +2024-08-26 03:07:11,086 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 03:07:24,266 INFO [train.py:1114] (3/4) Epoch 13, batch 600, loss[loss=0.2241, simple_loss=0.2972, pruned_loss=0.05455, ctc_loss=0.105, over 19364.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.2855, pruned_loss=0.06005, ctc_loss=0.113, over 3666015.15 frames. ], batch size: 67, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:07:41,977 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.00 vs. limit=22.5 +2024-08-26 03:07:46,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=162560.0, ans=0.125 +2024-08-26 03:07:47,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=162560.0, ans=0.125 +2024-08-26 03:08:27,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=162666.66666666666, ans=0.0 +2024-08-26 03:09:12,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=162720.0, ans=0.125 +2024-08-26 03:09:12,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=162720.0, ans=0.0 +2024-08-26 03:09:14,966 INFO [train.py:1114] (3/4) Epoch 13, batch 650, loss[loss=0.2214, simple_loss=0.2806, pruned_loss=0.05899, ctc_loss=0.1104, over 19780.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.2846, pruned_loss=0.05953, ctc_loss=0.112, over 3716724.57 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:09:29,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=162773.33333333334, ans=0.125 +2024-08-26 03:09:30,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=162826.66666666666, ans=0.125 +2024-08-26 03:09:41,619 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.91 vs. limit=8.0 +2024-08-26 03:09:44,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=162826.66666666666, ans=0.0 +2024-08-26 03:09:53,299 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.02 vs. limit=15.0 +2024-08-26 03:09:54,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162880.0, ans=0.1 +2024-08-26 03:09:58,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=162880.0, ans=0.125 +2024-08-26 03:10:08,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162880.0, ans=0.1 +2024-08-26 03:10:09,864 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 1.755e+02 2.119e+02 2.960e+02 5.119e+02, threshold=4.237e+02, percent-clipped=6.0 +2024-08-26 03:10:24,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=162933.33333333334, ans=0.125 +2024-08-26 03:10:27,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=162986.66666666666, ans=0.125 +2024-08-26 03:10:30,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=162986.66666666666, ans=0.07 +2024-08-26 03:10:32,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=162986.66666666666, ans=0.0 +2024-08-26 03:10:39,733 INFO [train.py:1114] (3/4) Epoch 13, batch 700, loss[loss=0.2075, simple_loss=0.2691, pruned_loss=0.05257, ctc_loss=0.1019, over 19732.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.285, pruned_loss=0.05988, ctc_loss=0.1126, over 3748157.86 frames. ], batch size: 51, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:10:40,249 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.80 vs. limit=22.5 +2024-08-26 03:10:40,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.79 vs. limit=15.0 +2024-08-26 03:10:52,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=163093.33333333334, ans=0.125 +2024-08-26 03:10:56,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=163093.33333333334, ans=0.125 +2024-08-26 03:11:07,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=163146.66666666666, ans=0.125 +2024-08-26 03:11:27,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=163200.0, ans=0.2 +2024-08-26 03:11:50,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=163253.33333333334, ans=0.125 +2024-08-26 03:11:52,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=163253.33333333334, ans=0.125 +2024-08-26 03:12:00,798 INFO [train.py:1114] (3/4) Epoch 13, batch 750, loss[loss=0.2547, simple_loss=0.3048, pruned_loss=0.07325, ctc_loss=0.1451, over 19486.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2841, pruned_loss=0.05948, ctc_loss=0.1119, over 3774054.91 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:12:08,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163306.66666666666, ans=0.1 +2024-08-26 03:12:16,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=163360.0, ans=0.2 +2024-08-26 03:12:41,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=163413.33333333334, ans=0.125 +2024-08-26 03:12:43,023 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.800e+02 2.310e+02 2.882e+02 4.749e+02, threshold=4.619e+02, percent-clipped=2.0 +2024-08-26 03:12:43,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=163413.33333333334, ans=0.125 +2024-08-26 03:12:44,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=163466.66666666666, ans=0.025 +2024-08-26 03:12:51,884 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.19 vs. limit=15.0 +2024-08-26 03:13:43,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=163520.0, ans=0.0 +2024-08-26 03:13:56,358 INFO [train.py:1114] (3/4) Epoch 13, batch 800, loss[loss=0.212, simple_loss=0.2647, pruned_loss=0.05762, ctc_loss=0.1103, over 19823.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2839, pruned_loss=0.05942, ctc_loss=0.1117, over 3795086.49 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:14:26,170 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.31 vs. limit=15.0 +2024-08-26 03:14:28,536 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.67 vs. limit=22.5 +2024-08-26 03:14:29,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=163680.0, ans=0.1 +2024-08-26 03:14:35,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.68 vs. limit=6.0 +2024-08-26 03:14:47,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=163733.33333333334, ans=0.125 +2024-08-26 03:15:00,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=163733.33333333334, ans=0.2 +2024-08-26 03:15:09,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=163786.66666666666, ans=0.2 +2024-08-26 03:15:13,888 INFO [train.py:1114] (3/4) Epoch 13, batch 850, loss[loss=0.2436, simple_loss=0.3068, pruned_loss=0.06527, ctc_loss=0.1246, over 19677.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2838, pruned_loss=0.05932, ctc_loss=0.1116, over 3813675.41 frames. ], batch size: 59, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:15:55,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=163893.33333333334, ans=0.0 +2024-08-26 03:15:58,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=163946.66666666666, ans=0.025 +2024-08-26 03:16:04,442 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.85 vs. limit=15.0 +2024-08-26 03:16:11,621 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.448e+02 1.727e+02 1.948e+02 2.271e+02 3.773e+02, threshold=3.897e+02, percent-clipped=0.0 +2024-08-26 03:16:35,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=164053.33333333334, ans=0.125 +2024-08-26 03:16:39,635 INFO [train.py:1114] (3/4) Epoch 13, batch 900, loss[loss=0.2259, simple_loss=0.2823, pruned_loss=0.06184, ctc_loss=0.1144, over 19407.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2845, pruned_loss=0.05959, ctc_loss=0.1122, over 3816310.23 frames. ], batch size: 48, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:16:42,520 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.39 vs. limit=15.0 +2024-08-26 03:16:52,662 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.39 vs. limit=15.0 +2024-08-26 03:17:39,435 INFO [train.py:1114] (3/4) Epoch 13, batch 950, loss[loss=0.2307, simple_loss=0.2829, pruned_loss=0.0648, ctc_loss=0.1222, over 19485.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.285, pruned_loss=0.05992, ctc_loss=0.1126, over 3817781.80 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:17:53,172 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 03:18:24,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=164426.66666666666, ans=0.0 +2024-08-26 03:18:38,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=164480.0, ans=0.125 +2024-08-26 03:18:52,283 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.763e+02 2.081e+02 2.549e+02 5.575e+02, threshold=4.162e+02, percent-clipped=2.0 +2024-08-26 03:18:55,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=164533.33333333334, ans=0.2 +2024-08-26 03:19:15,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=164586.66666666666, ans=0.0 +2024-08-26 03:19:29,889 INFO [train.py:1114] (3/4) Epoch 13, batch 1000, loss[loss=0.219, simple_loss=0.2812, pruned_loss=0.05664, ctc_loss=0.1089, over 19858.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.2859, pruned_loss=0.06079, ctc_loss=0.1143, over 3813887.63 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:19:31,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=164640.0, ans=0.125 +2024-08-26 03:19:55,741 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.64 vs. limit=15.0 +2024-08-26 03:20:16,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=164800.0, ans=0.07 +2024-08-26 03:20:21,140 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.00 vs. limit=15.0 +2024-08-26 03:20:35,692 INFO [train.py:1114] (3/4) Epoch 13, batch 1050, loss[loss=0.2481, simple_loss=0.3028, pruned_loss=0.07082, ctc_loss=0.1292, over 19851.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.285, pruned_loss=0.0604, ctc_loss=0.1137, over 3821461.55 frames. ], batch size: 57, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:20:41,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=164906.66666666666, ans=0.125 +2024-08-26 03:20:41,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=164906.66666666666, ans=0.1 +2024-08-26 03:21:05,253 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.27 vs. limit=15.0 +2024-08-26 03:21:08,101 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.372e+02 1.698e+02 1.997e+02 2.318e+02 3.616e+02, threshold=3.994e+02, percent-clipped=0.0 +2024-08-26 03:21:14,253 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 03:21:18,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=165066.66666666666, ans=0.0 +2024-08-26 03:21:32,855 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.37 vs. limit=10.0 +2024-08-26 03:21:37,051 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.15 vs. limit=10.0 +2024-08-26 03:21:40,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=165120.0, ans=0.125 +2024-08-26 03:21:42,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=165120.0, ans=0.125 +2024-08-26 03:21:44,513 INFO [train.py:1114] (3/4) Epoch 13, batch 1100, loss[loss=0.2617, simple_loss=0.3039, pruned_loss=0.07937, ctc_loss=0.1518, over 19582.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.2855, pruned_loss=0.06058, ctc_loss=0.1142, over 3828495.58 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 03:21:54,291 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.04 vs. limit=15.0 +2024-08-26 03:21:57,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=165226.66666666666, ans=0.125 +2024-08-26 03:22:08,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=165226.66666666666, ans=0.1 +2024-08-26 03:22:12,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=165280.0, ans=0.0 +2024-08-26 03:22:21,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=165333.33333333334, ans=0.0 +2024-08-26 03:22:46,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=165386.66666666666, ans=0.025 +2024-08-26 03:22:54,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=165386.66666666666, ans=0.125 +2024-08-26 03:22:57,608 INFO [train.py:1114] (3/4) Epoch 13, batch 1150, loss[loss=0.238, simple_loss=0.3059, pruned_loss=0.06172, ctc_loss=0.1166, over 19573.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.2858, pruned_loss=0.06088, ctc_loss=0.1146, over 3826488.39 frames. ], batch size: 52, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:23:10,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=165493.33333333334, ans=0.125 +2024-08-26 03:23:15,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=165493.33333333334, ans=0.125 +2024-08-26 03:23:38,672 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.345e+02 1.729e+02 2.006e+02 2.456e+02 7.202e+02, threshold=4.012e+02, percent-clipped=3.0 +2024-08-26 03:23:42,122 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.31 vs. limit=22.5 +2024-08-26 03:23:44,666 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.80 vs. limit=15.0 +2024-08-26 03:23:45,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=165600.0, ans=0.025 +2024-08-26 03:23:47,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=165600.0, ans=0.0 +2024-08-26 03:23:51,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=165600.0, ans=0.125 +2024-08-26 03:24:03,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=165653.33333333334, ans=0.025 +2024-08-26 03:24:09,372 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.54 vs. limit=6.0 +2024-08-26 03:24:11,586 INFO [train.py:1114] (3/4) Epoch 13, batch 1200, loss[loss=0.2527, simple_loss=0.3101, pruned_loss=0.07018, ctc_loss=0.1372, over 19836.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.2871, pruned_loss=0.06122, ctc_loss=0.1152, over 3822233.55 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:24:45,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=165760.0, ans=0.0 +2024-08-26 03:24:53,219 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.73 vs. limit=15.0 +2024-08-26 03:24:53,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=165813.33333333334, ans=0.125 +2024-08-26 03:25:23,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=165866.66666666666, ans=0.125 +2024-08-26 03:25:25,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=165866.66666666666, ans=0.0 +2024-08-26 03:26:20,505 INFO [train.py:1114] (3/4) Epoch 13, batch 1250, loss[loss=0.2245, simple_loss=0.2958, pruned_loss=0.05548, ctc_loss=0.1055, over 19559.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.2872, pruned_loss=0.06082, ctc_loss=0.1145, over 3841072.53 frames. ], batch size: 61, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:26:28,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=165973.33333333334, ans=0.025 +2024-08-26 03:26:33,646 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.48 vs. limit=22.5 +2024-08-26 03:26:50,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=166026.66666666666, ans=0.0 +2024-08-26 03:26:58,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=166080.0, ans=0.0 +2024-08-26 03:27:23,462 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.311e+02 1.715e+02 1.869e+02 2.285e+02 3.930e+02, threshold=3.738e+02, percent-clipped=0.0 +2024-08-26 03:28:00,524 INFO [train.py:1114] (3/4) Epoch 13, batch 1300, loss[loss=0.2574, simple_loss=0.307, pruned_loss=0.07534, ctc_loss=0.1427, over 18839.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.2866, pruned_loss=0.06072, ctc_loss=0.1143, over 3844356.87 frames. ], batch size: 76, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:28:04,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166240.0, ans=0.1 +2024-08-26 03:28:38,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=166293.33333333334, ans=0.0 +2024-08-26 03:29:19,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=166346.66666666666, ans=0.0 +2024-08-26 03:30:19,039 INFO [train.py:1114] (3/4) Epoch 13, batch 1350, loss[loss=0.215, simple_loss=0.2825, pruned_loss=0.05329, ctc_loss=0.1023, over 19755.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.2865, pruned_loss=0.06053, ctc_loss=0.1139, over 3856048.39 frames. ], batch size: 54, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:30:29,038 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=7.185e-02 +2024-08-26 03:30:55,256 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.86 vs. limit=15.0 +2024-08-26 03:31:08,779 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.377e+02 1.736e+02 2.053e+02 2.622e+02 5.263e+02, threshold=4.106e+02, percent-clipped=6.0 +2024-08-26 03:31:09,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=166613.33333333334, ans=0.125 +2024-08-26 03:31:20,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=166666.66666666666, ans=0.0 +2024-08-26 03:31:40,383 INFO [train.py:1114] (3/4) Epoch 13, batch 1400, loss[loss=0.1921, simple_loss=0.2527, pruned_loss=0.04755, ctc_loss=0.09107, over 19652.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.2852, pruned_loss=0.06002, ctc_loss=0.1133, over 3862951.88 frames. ], batch size: 46, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:31:49,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=166773.33333333334, ans=0.125 +2024-08-26 03:31:49,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=166773.33333333334, ans=0.125 +2024-08-26 03:31:55,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=166773.33333333334, ans=0.125 +2024-08-26 03:32:14,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=166773.33333333334, ans=0.0 +2024-08-26 03:32:39,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=166880.0, ans=0.025 +2024-08-26 03:32:59,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=166933.33333333334, ans=0.0 +2024-08-26 03:33:20,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=167040.0, ans=0.2 +2024-08-26 03:33:21,481 INFO [train.py:1114] (3/4) Epoch 13, batch 1450, loss[loss=0.2239, simple_loss=0.2965, pruned_loss=0.05474, ctc_loss=0.1046, over 19659.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.2861, pruned_loss=0.06038, ctc_loss=0.1139, over 3861419.28 frames. ], batch size: 63, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:33:59,234 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.391e+02 1.756e+02 1.937e+02 2.380e+02 3.895e+02, threshold=3.874e+02, percent-clipped=0.0 +2024-08-26 03:34:09,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=167200.0, ans=0.05 +2024-08-26 03:34:22,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=167253.33333333334, ans=0.2 +2024-08-26 03:34:23,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=167253.33333333334, ans=0.125 +2024-08-26 03:34:32,315 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.13 vs. limit=15.0 +2024-08-26 03:34:33,948 INFO [train.py:1114] (3/4) Epoch 13, batch 1500, loss[loss=0.2601, simple_loss=0.3139, pruned_loss=0.0758, ctc_loss=0.1368, over 19575.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.2859, pruned_loss=0.05986, ctc_loss=0.1128, over 3861637.72 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:35:12,760 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.21 vs. limit=15.0 +2024-08-26 03:35:16,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=167466.66666666666, ans=0.2 +2024-08-26 03:35:43,253 INFO [train.py:1114] (3/4) Epoch 13, batch 1550, loss[loss=0.2167, simple_loss=0.2894, pruned_loss=0.0533, ctc_loss=0.09328, over 19596.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.2858, pruned_loss=0.06002, ctc_loss=0.113, over 3845387.19 frames. ], batch size: 60, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:35:47,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=167573.33333333334, ans=0.025 +2024-08-26 03:35:49,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=167573.33333333334, ans=0.0 +2024-08-26 03:36:35,361 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.327e+02 1.778e+02 2.054e+02 2.767e+02 5.252e+02, threshold=4.108e+02, percent-clipped=7.0 +2024-08-26 03:36:47,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=167733.33333333334, ans=0.125 +2024-08-26 03:36:56,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.21 vs. limit=22.5 +2024-08-26 03:37:03,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=167786.66666666666, ans=0.0 +2024-08-26 03:37:05,305 INFO [train.py:1114] (3/4) Epoch 13, batch 1600, loss[loss=0.2097, simple_loss=0.284, pruned_loss=0.04839, ctc_loss=0.09669, over 19833.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.2855, pruned_loss=0.05998, ctc_loss=0.1131, over 3834864.77 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:37:05,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=167840.0, ans=0.2 +2024-08-26 03:37:10,631 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.90 vs. limit=15.0 +2024-08-26 03:38:12,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=168000.0, ans=0.125 +2024-08-26 03:38:12,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=168000.0, ans=0.1 +2024-08-26 03:38:33,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=168053.33333333334, ans=0.04949747468305833 +2024-08-26 03:38:33,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=168053.33333333334, ans=0.0 +2024-08-26 03:38:35,988 INFO [train.py:1114] (3/4) Epoch 13, batch 1650, loss[loss=0.2346, simple_loss=0.2956, pruned_loss=0.063, ctc_loss=0.1192, over 19674.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.2861, pruned_loss=0.06045, ctc_loss=0.1143, over 3831702.51 frames. ], batch size: 59, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 03:39:20,049 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.425e+02 1.825e+02 2.209e+02 2.614e+02 4.167e+02, threshold=4.418e+02, percent-clipped=2.0 +2024-08-26 03:40:00,075 INFO [train.py:1114] (3/4) Epoch 13, batch 1700, loss[loss=0.221, simple_loss=0.264, pruned_loss=0.06478, ctc_loss=0.1214, over 19690.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.2861, pruned_loss=0.06055, ctc_loss=0.1142, over 3846185.05 frames. ], batch size: 46, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:40:12,931 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.64 vs. limit=15.0 +2024-08-26 03:40:13,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=168373.33333333334, ans=0.2 +2024-08-26 03:40:20,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=168426.66666666666, ans=0.125 +2024-08-26 03:40:33,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=168426.66666666666, ans=0.0 +2024-08-26 03:41:17,849 INFO [train.py:1114] (3/4) Epoch 13, batch 1750, loss[loss=0.1875, simple_loss=0.2506, pruned_loss=0.04469, ctc_loss=0.08762, over 19656.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.2854, pruned_loss=0.05998, ctc_loss=0.113, over 3851308.13 frames. ], batch size: 45, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:41:18,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=168640.0, ans=0.125 +2024-08-26 03:41:25,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168640.0, ans=0.1 +2024-08-26 03:41:28,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=168640.0, ans=0.125 +2024-08-26 03:41:58,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=168746.66666666666, ans=0.0 +2024-08-26 03:41:59,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=168746.66666666666, ans=0.125 +2024-08-26 03:42:01,092 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.702e+02 2.065e+02 2.813e+02 5.109e+02, threshold=4.129e+02, percent-clipped=2.0 +2024-08-26 03:42:12,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168800.0, ans=0.1 +2024-08-26 03:42:21,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=168800.0, ans=0.025 +2024-08-26 03:42:33,914 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.70 vs. limit=12.0 +2024-08-26 03:42:45,936 INFO [train.py:1114] (3/4) Epoch 13, batch 1800, loss[loss=0.2382, simple_loss=0.2973, pruned_loss=0.06532, ctc_loss=0.121, over 19609.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.2854, pruned_loss=0.05991, ctc_loss=0.1125, over 3853420.17 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:42:50,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=168906.66666666666, ans=0.2 +2024-08-26 03:43:21,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=169013.33333333334, ans=0.0 +2024-08-26 03:43:24,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=169066.66666666666, ans=0.125 +2024-08-26 03:43:44,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=169120.0, ans=0.05 +2024-08-26 03:43:53,525 INFO [train.py:1114] (3/4) Epoch 13, batch 1850, loss[loss=0.2534, simple_loss=0.3093, pruned_loss=0.0713, ctc_loss=0.1372, over 19604.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2847, pruned_loss=0.05944, ctc_loss=0.1116, over 3856241.64 frames. ], batch size: 57, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:44:29,680 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.525e+02 1.936e+02 2.666e+02 3.402e+02 5.252e+02, threshold=5.332e+02, percent-clipped=13.0 +2024-08-26 03:45:00,066 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.53 vs. limit=22.5 +2024-08-26 03:45:04,199 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.43 vs. limit=10.0 +2024-08-26 03:45:05,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=169386.66666666666, ans=0.125 +2024-08-26 03:45:07,842 INFO [train.py:1114] (3/4) Epoch 13, batch 1900, loss[loss=0.2494, simple_loss=0.3115, pruned_loss=0.06866, ctc_loss=0.1249, over 19681.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2857, pruned_loss=0.05991, ctc_loss=0.1123, over 3860124.61 frames. ], batch size: 59, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:45:28,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=169493.33333333334, ans=0.125 +2024-08-26 03:45:31,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=169546.66666666666, ans=0.0 +2024-08-26 03:46:03,550 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.31 vs. limit=12.0 +2024-08-26 03:46:05,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169653.33333333334, ans=0.1 +2024-08-26 03:46:12,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.95 vs. limit=12.0 +2024-08-26 03:46:29,183 INFO [train.py:1114] (3/4) Epoch 13, batch 1950, loss[loss=0.1922, simple_loss=0.2602, pruned_loss=0.04518, ctc_loss=0.08431, over 19598.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.2868, pruned_loss=0.06038, ctc_loss=0.1133, over 3869711.64 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 03:46:47,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=169760.0, ans=0.125 +2024-08-26 03:46:58,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=169813.33333333334, ans=0.125 +2024-08-26 03:47:25,830 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=11.63 vs. limit=15.0 +2024-08-26 03:49:49,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=169813.33333333334, ans=0.2 +2024-08-26 03:50:26,630 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.484e+02 1.795e+02 2.018e+02 2.323e+02 3.502e+02, threshold=4.036e+02, percent-clipped=0.0 +2024-08-26 04:04:15,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=169920.0, ans=0.025 +2024-08-26 04:22:39,285 INFO [train.py:1114] (3/4) Epoch 13, batch 2000, loss[loss=0.2111, simple_loss=0.2634, pruned_loss=0.05766, ctc_loss=0.1085, over 19651.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.287, pruned_loss=0.06042, ctc_loss=0.1134, over 3853944.93 frames. ], batch size: 45, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 04:25:47,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=169973.33333333334, ans=0.125 +2024-08-26 04:34:06,143 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.53 vs. limit=10.0 +2024-08-26 05:06:38,773 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 05:09:29,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=170186.66666666666, ans=0.2 +2024-08-26 05:15:36,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff3.min_abs, batch_count=170186.66666666666, ans=0.2 +2024-08-26 05:17:15,369 INFO [train.py:1114] (3/4) Epoch 13, batch 2050, loss[loss=0.2044, simple_loss=0.2605, pruned_loss=0.05413, ctc_loss=0.1, over 19702.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.2856, pruned_loss=0.05984, ctc_loss=0.1124, over 3850415.50 frames. ], batch size: 47, lr: 1.14e-02, grad_scale: 64.0 +2024-08-26 05:17:51,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=170240.0, ans=0.0 +2024-08-26 05:34:32,866 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.739e+02 2.095e+02 2.592e+02 3.598e+02, threshold=4.189e+02, percent-clipped=0.0 +2024-08-26 05:36:11,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=170400.0, ans=0.2 +2024-08-26 05:44:04,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=170506.66666666666, ans=0.0 +2024-08-26 05:45:21,777 INFO [train.py:1114] (3/4) Epoch 13, batch 2100, loss[loss=0.2355, simple_loss=0.2904, pruned_loss=0.06602, ctc_loss=0.1215, over 19765.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2843, pruned_loss=0.05896, ctc_loss=0.1108, over 3857634.22 frames. ], batch size: 54, lr: 1.14e-02, grad_scale: 64.0 +2024-08-26 05:45:23,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=170506.66666666666, ans=0.0 +2024-08-26 05:45:23,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=170506.66666666666, ans=0.0 +2024-08-26 05:51:34,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=170560.0, ans=0.125 +2024-08-26 05:55:01,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=170666.66666666666, ans=0.0 +2024-08-26 05:56:34,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=170720.0, ans=0.2 +2024-08-26 05:57:05,749 INFO [train.py:1114] (3/4) Epoch 13, batch 2150, loss[loss=0.2127, simple_loss=0.2696, pruned_loss=0.0568, ctc_loss=0.1053, over 19866.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2829, pruned_loss=0.0581, ctc_loss=0.1095, over 3868862.44 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 64.0 +2024-08-26 06:00:46,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=170826.66666666666, ans=0.2 +2024-08-26 06:00:47,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=170826.66666666666, ans=0.0 +2024-08-26 06:02:02,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=170880.0, ans=15.0 +2024-08-26 06:02:10,730 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.397e+02 1.801e+02 2.071e+02 2.646e+02 5.963e+02, threshold=4.141e+02, percent-clipped=6.0 +2024-08-26 06:03:09,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=170986.66666666666, ans=0.2 +2024-08-26 06:03:12,976 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:03:16,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=170986.66666666666, ans=0.125 +2024-08-26 06:03:39,302 INFO [train.py:1114] (3/4) Epoch 13, batch 2200, loss[loss=0.2274, simple_loss=0.2927, pruned_loss=0.05837, ctc_loss=0.1135, over 19591.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.2837, pruned_loss=0.05864, ctc_loss=0.1105, over 3867637.94 frames. ], batch size: 57, lr: 1.14e-02, grad_scale: 64.0 +2024-08-26 06:03:49,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=171040.0, ans=0.125 +2024-08-26 06:04:06,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=171093.33333333334, ans=0.0 +2024-08-26 06:04:24,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=171093.33333333334, ans=0.2 +2024-08-26 06:04:34,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=171093.33333333334, ans=0.125 +2024-08-26 06:05:06,984 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.59 vs. limit=15.0 +2024-08-26 06:05:31,078 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.05 vs. limit=22.5 +2024-08-26 06:06:26,730 INFO [train.py:1114] (3/4) Epoch 13, batch 2250, loss[loss=0.2335, simple_loss=0.2999, pruned_loss=0.0602, ctc_loss=0.1165, over 19632.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2838, pruned_loss=0.05868, ctc_loss=0.1104, over 3867400.59 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 06:06:27,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=171306.66666666666, ans=0.035 +2024-08-26 06:06:36,891 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.56 vs. limit=15.0 +2024-08-26 06:07:27,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171360.0, ans=0.1 +2024-08-26 06:08:30,393 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.765e+02 2.070e+02 2.599e+02 3.761e+02, threshold=4.140e+02, percent-clipped=0.0 +2024-08-26 06:09:34,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=171466.66666666666, ans=0.125 +2024-08-26 06:09:39,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171520.0, ans=0.1 +2024-08-26 06:10:19,744 INFO [train.py:1114] (3/4) Epoch 13, batch 2300, loss[loss=0.2423, simple_loss=0.2926, pruned_loss=0.07072, ctc_loss=0.1264, over 19500.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2839, pruned_loss=0.05923, ctc_loss=0.1114, over 3861275.80 frames. ], batch size: 49, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 06:10:25,753 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.30 vs. limit=15.0 +2024-08-26 06:10:45,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171626.66666666666, ans=0.1 +2024-08-26 06:11:23,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=171733.33333333334, ans=0.2 +2024-08-26 06:11:43,310 INFO [train.py:1114] (3/4) Epoch 13, batch 2350, loss[loss=0.2372, simple_loss=0.3042, pruned_loss=0.06223, ctc_loss=0.1141, over 19703.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2831, pruned_loss=0.05872, ctc_loss=0.1104, over 3863262.31 frames. ], batch size: 63, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 06:12:01,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=171893.33333333334, ans=0.07 +2024-08-26 06:12:05,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=171946.66666666666, ans=0.0 +2024-08-26 06:12:06,768 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.52 vs. limit=22.5 +2024-08-26 06:12:07,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=171946.66666666666, ans=0.125 +2024-08-26 06:12:16,624 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.342e+02 1.773e+02 2.247e+02 3.255e+02 4.983e+02, threshold=4.494e+02, percent-clipped=2.0 +2024-08-26 06:12:21,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=172000.0, ans=0.125 +2024-08-26 06:12:27,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=172000.0, ans=0.0 +2024-08-26 06:12:30,878 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.52 vs. limit=15.0 +2024-08-26 06:12:31,640 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:12:41,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=172053.33333333334, ans=0.125 +2024-08-26 06:12:46,283 INFO [train.py:1114] (3/4) Epoch 13, batch 2400, loss[loss=0.2132, simple_loss=0.2838, pruned_loss=0.05156, ctc_loss=0.09849, over 19317.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.2861, pruned_loss=0.05999, ctc_loss=0.1127, over 3857386.77 frames. ], batch size: 71, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 06:13:22,369 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.74 vs. limit=22.5 +2024-08-26 06:13:29,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.46 vs. limit=12.0 +2024-08-26 06:13:41,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=172266.66666666666, ans=0.125 +2024-08-26 06:13:51,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=172266.66666666666, ans=0.2 +2024-08-26 06:13:52,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=172266.66666666666, ans=0.04949747468305833 +2024-08-26 06:14:08,369 INFO [train.py:1114] (3/4) Epoch 13, batch 2450, loss[loss=0.2929, simple_loss=0.3216, pruned_loss=0.09474, ctc_loss=0.1868, over 13453.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.2903, pruned_loss=0.0633, ctc_loss=0.1195, over 3730288.78 frames. ], batch size: 140, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 06:14:23,117 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.56 vs. limit=15.0 +2024-08-26 06:14:29,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=172426.66666666666, ans=0.2 +2024-08-26 06:14:30,488 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.47 vs. limit=22.5 +2024-08-26 06:14:32,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=172480.0, ans=0.125 +2024-08-26 06:14:43,290 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 1.935e+02 2.072e+02 2.350e+02 4.711e+02, threshold=4.143e+02, percent-clipped=2.0 +2024-08-26 06:14:43,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172533.33333333334, ans=0.1 +2024-08-26 06:14:50,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=172533.33333333334, ans=0.09899494936611666 +2024-08-26 06:15:44,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=172581.33333333334, ans=0.04949747468305833 +2024-08-26 06:16:27,497 INFO [train.py:1114] (3/4) Epoch 14, batch 0, loss[loss=0.2015, simple_loss=0.2615, pruned_loss=0.05192, ctc_loss=0.09419, over 19809.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2615, pruned_loss=0.05192, ctc_loss=0.09419, over 19809.00 frames. ], batch size: 49, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:16:27,497 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-26 06:17:54,189 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([0.2040, 3.1870, 3.7603, 2.9080], device='cuda:3') +2024-08-26 06:17:58,795 INFO [train.py:1146] (3/4) Epoch 14, validation: loss=0.1898, simple_loss=0.2778, pruned_loss=0.03769, ctc_loss=0.06578, over 944034.00 frames. +2024-08-26 06:18:12,591 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 14072MB +2024-08-26 06:18:29,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172634.66666666666, ans=0.1 +2024-08-26 06:18:33,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=172634.66666666666, ans=0.0 +2024-08-26 06:18:57,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=172688.0, ans=0.0 +2024-08-26 06:19:48,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=172794.66666666666, ans=0.125 +2024-08-26 06:19:53,753 INFO [train.py:1114] (3/4) Epoch 14, batch 50, loss[loss=0.1952, simple_loss=0.2579, pruned_loss=0.04869, ctc_loss=0.08786, over 19744.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2845, pruned_loss=0.05889, ctc_loss=0.1117, over 844617.17 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:20:17,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=172848.0, ans=0.0 +2024-08-26 06:20:41,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=172901.33333333334, ans=0.125 +2024-08-26 06:21:15,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=173061.33333333334, ans=0.2 +2024-08-26 06:21:17,214 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.410e+02 1.738e+02 2.047e+02 2.487e+02 4.948e+02, threshold=4.095e+02, percent-clipped=4.0 +2024-08-26 06:21:48,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=173061.33333333334, ans=0.0 +2024-08-26 06:21:51,844 INFO [train.py:1114] (3/4) Epoch 14, batch 100, loss[loss=0.2423, simple_loss=0.2947, pruned_loss=0.06908, ctc_loss=0.1296, over 19736.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.2867, pruned_loss=0.06008, ctc_loss=0.1132, over 1499497.26 frames. ], batch size: 51, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:22:05,285 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.10 vs. limit=22.5 +2024-08-26 06:22:20,490 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:22:26,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=173221.33333333334, ans=0.125 +2024-08-26 06:22:52,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173274.66666666666, ans=0.1 +2024-08-26 06:23:28,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=173328.0, ans=0.125 +2024-08-26 06:23:33,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=173381.33333333334, ans=0.125 +2024-08-26 06:23:38,126 INFO [train.py:1114] (3/4) Epoch 14, batch 150, loss[loss=0.2089, simple_loss=0.2613, pruned_loss=0.05643, ctc_loss=0.1092, over 19682.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2845, pruned_loss=0.05887, ctc_loss=0.1107, over 2027234.41 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:23:51,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=173381.33333333334, ans=0.0 +2024-08-26 06:24:27,299 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.96 vs. limit=15.0 +2024-08-26 06:24:28,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=173488.0, ans=0.0 +2024-08-26 06:24:49,742 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 1.676e+02 1.898e+02 2.213e+02 4.155e+02, threshold=3.795e+02, percent-clipped=1.0 +2024-08-26 06:25:00,462 INFO [train.py:1114] (3/4) Epoch 14, batch 200, loss[loss=0.2636, simple_loss=0.3077, pruned_loss=0.0796, ctc_loss=0.1506, over 18104.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2829, pruned_loss=0.05843, ctc_loss=0.11, over 2434650.11 frames. ], batch size: 85, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:25:04,164 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.68 vs. limit=15.0 +2024-08-26 06:25:37,094 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.72 vs. limit=12.0 +2024-08-26 06:25:40,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=173754.66666666666, ans=0.0 +2024-08-26 06:25:49,108 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.51 vs. limit=15.0 +2024-08-26 06:25:49,884 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:26:05,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=173861.33333333334, ans=0.2 +2024-08-26 06:26:10,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=173861.33333333334, ans=0.0 +2024-08-26 06:26:16,071 INFO [train.py:1114] (3/4) Epoch 14, batch 250, loss[loss=0.234, simple_loss=0.2987, pruned_loss=0.06192, ctc_loss=0.1139, over 19411.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2826, pruned_loss=0.05807, ctc_loss=0.1092, over 2753935.14 frames. ], batch size: 67, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:26:17,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=173914.66666666666, ans=0.125 +2024-08-26 06:26:39,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=173968.0, ans=0.125 +2024-08-26 06:26:42,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=173968.0, ans=0.125 +2024-08-26 06:26:46,586 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.41 vs. limit=10.0 +2024-08-26 06:26:49,893 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.83 vs. limit=15.0 +2024-08-26 06:27:09,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=174074.66666666666, ans=0.025 +2024-08-26 06:27:18,000 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.683e+02 2.061e+02 2.648e+02 4.927e+02, threshold=4.123e+02, percent-clipped=4.0 +2024-08-26 06:27:25,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=174128.0, ans=0.125 +2024-08-26 06:27:28,134 INFO [train.py:1114] (3/4) Epoch 14, batch 300, loss[loss=0.2284, simple_loss=0.2867, pruned_loss=0.06308, ctc_loss=0.1099, over 19518.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2821, pruned_loss=0.05807, ctc_loss=0.1091, over 2999168.00 frames. ], batch size: 61, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 06:28:16,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174341.33333333334, ans=0.1 +2024-08-26 06:28:17,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=174341.33333333334, ans=0.2 +2024-08-26 06:28:34,438 INFO [train.py:1114] (3/4) Epoch 14, batch 350, loss[loss=0.1806, simple_loss=0.2486, pruned_loss=0.0412, ctc_loss=0.07537, over 19738.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2827, pruned_loss=0.05819, ctc_loss=0.1092, over 3190097.11 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:29:12,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174554.66666666666, ans=0.1 +2024-08-26 06:29:15,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=174554.66666666666, ans=0.125 +2024-08-26 06:29:16,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=174554.66666666666, ans=0.125 +2024-08-26 06:29:19,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=174608.0, ans=0.2 +2024-08-26 06:29:32,485 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.272e+02 1.657e+02 1.894e+02 2.440e+02 4.007e+02, threshold=3.787e+02, percent-clipped=0.0 +2024-08-26 06:29:42,961 INFO [train.py:1114] (3/4) Epoch 14, batch 400, loss[loss=0.2178, simple_loss=0.2853, pruned_loss=0.05595, ctc_loss=0.09592, over 19499.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2825, pruned_loss=0.05817, ctc_loss=0.1092, over 3342697.91 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:30:09,896 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.42 vs. limit=15.0 +2024-08-26 06:30:16,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=174768.0, ans=0.1 +2024-08-26 06:30:58,872 INFO [train.py:1114] (3/4) Epoch 14, batch 450, loss[loss=0.2287, simple_loss=0.2884, pruned_loss=0.06125, ctc_loss=0.116, over 19617.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2824, pruned_loss=0.05814, ctc_loss=0.1092, over 3451257.46 frames. ], batch size: 55, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:31:58,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175088.0, ans=0.1 +2024-08-26 06:32:32,609 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.297e+02 1.702e+02 1.875e+02 2.205e+02 3.904e+02, threshold=3.749e+02, percent-clipped=2.0 +2024-08-26 06:32:59,821 INFO [train.py:1114] (3/4) Epoch 14, batch 500, loss[loss=0.221, simple_loss=0.2893, pruned_loss=0.05606, ctc_loss=0.1015, over 19678.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2818, pruned_loss=0.05789, ctc_loss=0.1091, over 3547006.36 frames. ], batch size: 63, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:33:04,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=175248.0, ans=0.0 +2024-08-26 06:33:50,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=175354.66666666666, ans=0.2 +2024-08-26 06:34:13,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=175408.0, ans=0.0 +2024-08-26 06:34:14,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=175408.0, ans=0.125 +2024-08-26 06:34:23,145 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.22 vs. limit=6.0 +2024-08-26 06:34:32,917 INFO [train.py:1114] (3/4) Epoch 14, batch 550, loss[loss=0.2591, simple_loss=0.3066, pruned_loss=0.07753, ctc_loss=0.1413, over 19229.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2821, pruned_loss=0.05815, ctc_loss=0.1094, over 3608381.59 frames. ], batch size: 71, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:34:42,565 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.34 vs. limit=12.0 +2024-08-26 06:34:44,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=175514.66666666666, ans=0.125 +2024-08-26 06:34:55,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=175568.0, ans=0.0 +2024-08-26 06:35:05,620 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.84 vs. limit=15.0 +2024-08-26 06:35:14,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=175674.66666666666, ans=0.04949747468305833 +2024-08-26 06:35:23,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=175674.66666666666, ans=0.125 +2024-08-26 06:35:25,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=175674.66666666666, ans=0.125 +2024-08-26 06:35:36,414 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.450e+02 1.729e+02 1.957e+02 2.291e+02 4.042e+02, threshold=3.913e+02, percent-clipped=2.0 +2024-08-26 06:36:00,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=175728.0, ans=0.025 +2024-08-26 06:36:17,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=175781.33333333334, ans=0.125 +2024-08-26 06:36:18,839 INFO [train.py:1114] (3/4) Epoch 14, batch 600, loss[loss=0.2306, simple_loss=0.2943, pruned_loss=0.0619, ctc_loss=0.1075, over 19367.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2821, pruned_loss=0.05791, ctc_loss=0.1089, over 3664729.30 frames. ], batch size: 67, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:38:33,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=175834.66666666666, ans=0.025 +2024-08-26 06:38:56,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=175941.33333333334, ans=0.125 +2024-08-26 06:39:00,572 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.10 vs. limit=22.5 +2024-08-26 06:39:25,941 INFO [train.py:1114] (3/4) Epoch 14, batch 650, loss[loss=0.2255, simple_loss=0.2864, pruned_loss=0.06071, ctc_loss=0.1078, over 19768.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2809, pruned_loss=0.05711, ctc_loss=0.1075, over 3715303.48 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:39:28,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=176048.0, ans=0.125 +2024-08-26 06:40:32,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=176101.33333333334, ans=0.0 +2024-08-26 06:40:51,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=176154.66666666666, ans=0.0 +2024-08-26 06:41:22,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=176208.0, ans=0.125 +2024-08-26 06:41:27,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=176261.33333333334, ans=0.2 +2024-08-26 06:41:31,385 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.301e+02 1.772e+02 2.123e+02 2.635e+02 4.354e+02, threshold=4.247e+02, percent-clipped=3.0 +2024-08-26 06:41:34,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=176261.33333333334, ans=0.5 +2024-08-26 06:41:45,010 INFO [train.py:1114] (3/4) Epoch 14, batch 700, loss[loss=0.2053, simple_loss=0.2698, pruned_loss=0.05052, ctc_loss=0.0992, over 19748.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2812, pruned_loss=0.05697, ctc_loss=0.1074, over 3747071.82 frames. ], batch size: 51, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:41:58,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=176368.0, ans=0.2 +2024-08-26 06:42:02,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=176368.0, ans=0.0 +2024-08-26 06:42:51,119 INFO [train.py:1114] (3/4) Epoch 14, batch 750, loss[loss=0.2154, simple_loss=0.2782, pruned_loss=0.05522, ctc_loss=0.1055, over 19497.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2811, pruned_loss=0.05705, ctc_loss=0.1074, over 3774604.89 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:43:05,196 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=8.15 vs. limit=15.0 +2024-08-26 06:43:09,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176634.66666666666, ans=0.1 +2024-08-26 06:43:13,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=176634.66666666666, ans=0.125 +2024-08-26 06:44:26,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=176794.66666666666, ans=0.025 +2024-08-26 06:44:27,358 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.292e+02 1.803e+02 2.358e+02 3.080e+02 4.835e+02, threshold=4.715e+02, percent-clipped=7.0 +2024-08-26 06:44:27,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=176794.66666666666, ans=0.125 +2024-08-26 06:44:39,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=176794.66666666666, ans=0.125 +2024-08-26 06:44:41,967 INFO [train.py:1114] (3/4) Epoch 14, batch 800, loss[loss=0.1913, simple_loss=0.2512, pruned_loss=0.04898, ctc_loss=0.08347, over 19821.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2813, pruned_loss=0.05723, ctc_loss=0.1077, over 3796395.50 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:44:47,349 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.81 vs. limit=15.0 +2024-08-26 06:44:50,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=176848.0, ans=0.125 +2024-08-26 06:45:02,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=176901.33333333334, ans=0.125 +2024-08-26 06:45:39,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=177061.33333333334, ans=0.125 +2024-08-26 06:45:52,039 INFO [train.py:1114] (3/4) Epoch 14, batch 850, loss[loss=0.2188, simple_loss=0.2985, pruned_loss=0.05004, ctc_loss=0.09747, over 19649.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2813, pruned_loss=0.05737, ctc_loss=0.108, over 3815210.30 frames. ], batch size: 59, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:46:11,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=177114.66666666666, ans=0.125 +2024-08-26 06:46:16,653 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.41 vs. limit=6.0 +2024-08-26 06:46:37,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=177221.33333333334, ans=0.2 +2024-08-26 06:47:11,725 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.690e+02 1.974e+02 2.351e+02 3.908e+02, threshold=3.948e+02, percent-clipped=0.0 +2024-08-26 06:47:21,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=177328.0, ans=0.125 +2024-08-26 06:47:24,578 INFO [train.py:1114] (3/4) Epoch 14, batch 900, loss[loss=0.194, simple_loss=0.2578, pruned_loss=0.04778, ctc_loss=0.08644, over 19814.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2821, pruned_loss=0.05787, ctc_loss=0.1087, over 3820149.52 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:47:30,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=177381.33333333334, ans=0.125 +2024-08-26 06:47:35,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177434.66666666666, ans=0.1 +2024-08-26 06:47:40,031 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.41 vs. limit=22.5 +2024-08-26 06:47:41,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=177434.66666666666, ans=0.125 +2024-08-26 06:47:58,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=177488.0, ans=0.0 +2024-08-26 06:48:01,203 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.77 vs. limit=22.5 +2024-08-26 06:48:19,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=177594.66666666666, ans=0.125 +2024-08-26 06:48:31,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=177594.66666666666, ans=0.0 +2024-08-26 06:48:35,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177594.66666666666, ans=0.1 +2024-08-26 06:48:38,039 INFO [train.py:1114] (3/4) Epoch 14, batch 950, loss[loss=0.2294, simple_loss=0.2811, pruned_loss=0.06521, ctc_loss=0.1183, over 19503.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2827, pruned_loss=0.05819, ctc_loss=0.1092, over 3821034.01 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 06:48:39,726 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.47 vs. limit=15.0 +2024-08-26 06:49:00,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=177701.33333333334, ans=0.125 +2024-08-26 06:49:08,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=177754.66666666666, ans=0.125 +2024-08-26 06:49:18,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=177754.66666666666, ans=0.0 +2024-08-26 06:49:26,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=177808.0, ans=0.2 +2024-08-26 06:49:36,177 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.388e+02 1.810e+02 2.092e+02 2.519e+02 4.035e+02, threshold=4.185e+02, percent-clipped=1.0 +2024-08-26 06:50:05,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177914.66666666666, ans=0.1 +2024-08-26 06:50:06,711 INFO [train.py:1114] (3/4) Epoch 14, batch 1000, loss[loss=0.2184, simple_loss=0.2805, pruned_loss=0.05672, ctc_loss=0.1071, over 19846.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2839, pruned_loss=0.0587, ctc_loss=0.1102, over 3817118.64 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:50:17,843 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=7.734e-03 +2024-08-26 06:50:26,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177968.0, ans=0.1 +2024-08-26 06:50:31,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=177968.0, ans=0.125 +2024-08-26 06:50:45,500 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.46 vs. limit=15.0 +2024-08-26 06:50:50,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178074.66666666666, ans=0.1 +2024-08-26 06:51:03,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=178128.0, ans=0.125 +2024-08-26 06:51:05,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=178128.0, ans=0.015 +2024-08-26 06:51:21,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=178128.0, ans=0.1 +2024-08-26 06:51:23,212 INFO [train.py:1114] (3/4) Epoch 14, batch 1050, loss[loss=0.2164, simple_loss=0.2826, pruned_loss=0.05522, ctc_loss=0.09966, over 19823.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2832, pruned_loss=0.05855, ctc_loss=0.1098, over 3824209.19 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:51:58,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=178288.0, ans=0.125 +2024-08-26 06:52:01,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178341.33333333334, ans=0.1 +2024-08-26 06:52:09,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=178341.33333333334, ans=0.0 +2024-08-26 06:52:17,049 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.429e+02 1.767e+02 2.034e+02 2.568e+02 4.426e+02, threshold=4.067e+02, percent-clipped=2.0 +2024-08-26 06:52:39,166 INFO [train.py:1114] (3/4) Epoch 14, batch 1100, loss[loss=0.2066, simple_loss=0.2783, pruned_loss=0.0489, ctc_loss=0.09278, over 19572.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2828, pruned_loss=0.05822, ctc_loss=0.1094, over 3832090.41 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:52:41,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=178448.0, ans=0.0 +2024-08-26 06:53:06,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=178554.66666666666, ans=0.0 +2024-08-26 06:53:37,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_na.min_abs, batch_count=178661.33333333334, ans=0.02 +2024-08-26 06:53:49,720 INFO [train.py:1114] (3/4) Epoch 14, batch 1150, loss[loss=0.2277, simple_loss=0.2861, pruned_loss=0.0617, ctc_loss=0.1149, over 19592.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2825, pruned_loss=0.0582, ctc_loss=0.1094, over 3830063.13 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:54:05,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=178714.66666666666, ans=0.125 +2024-08-26 06:54:09,193 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 06:54:09,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=178768.0, ans=0.125 +2024-08-26 06:54:13,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=178768.0, ans=0.125 +2024-08-26 06:54:16,665 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.70 vs. limit=6.0 +2024-08-26 06:54:32,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=178821.33333333334, ans=0.125 +2024-08-26 06:54:40,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=178874.66666666666, ans=0.025 +2024-08-26 06:54:47,789 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.255e+02 1.672e+02 1.916e+02 2.259e+02 4.129e+02, threshold=3.832e+02, percent-clipped=1.0 +2024-08-26 06:54:58,206 INFO [train.py:1114] (3/4) Epoch 14, batch 1200, loss[loss=0.2114, simple_loss=0.2822, pruned_loss=0.05054, ctc_loss=0.09882, over 19835.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2838, pruned_loss=0.05893, ctc_loss=0.1109, over 3826323.75 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:55:12,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=179034.66666666666, ans=0.125 +2024-08-26 06:55:20,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=179034.66666666666, ans=0.0 +2024-08-26 06:55:20,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=179034.66666666666, ans=0.125 +2024-08-26 06:55:36,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179141.33333333334, ans=0.1 +2024-08-26 06:55:39,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=179141.33333333334, ans=0.2 +2024-08-26 06:56:27,981 INFO [train.py:1114] (3/4) Epoch 14, batch 1250, loss[loss=0.2594, simple_loss=0.311, pruned_loss=0.07541, ctc_loss=0.1421, over 19526.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2839, pruned_loss=0.05867, ctc_loss=0.1101, over 3844490.95 frames. ], batch size: 61, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:56:45,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=179301.33333333334, ans=0.2 +2024-08-26 06:56:46,444 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.20 vs. limit=6.0 +2024-08-26 06:57:07,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=179301.33333333334, ans=0.0 +2024-08-26 06:57:11,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=179354.66666666666, ans=0.0 +2024-08-26 06:58:13,352 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.449e+02 1.864e+02 2.134e+02 2.537e+02 3.723e+02, threshold=4.267e+02, percent-clipped=0.0 +2024-08-26 06:58:31,270 INFO [train.py:1114] (3/4) Epoch 14, batch 1300, loss[loss=0.2556, simple_loss=0.3057, pruned_loss=0.07511, ctc_loss=0.1384, over 18977.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2827, pruned_loss=0.05795, ctc_loss=0.109, over 3849016.59 frames. ], batch size: 76, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 06:58:34,553 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.87 vs. limit=15.0 +2024-08-26 06:58:58,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=179514.66666666666, ans=0.125 +2024-08-26 06:58:58,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179514.66666666666, ans=0.1 +2024-08-26 06:58:59,970 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.31 vs. limit=15.0 +2024-08-26 06:59:18,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=179621.33333333334, ans=0.2 +2024-08-26 06:59:38,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=179621.33333333334, ans=0.125 +2024-08-26 07:00:15,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179728.0, ans=0.1 +2024-08-26 07:00:19,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=179728.0, ans=0.125 +2024-08-26 07:00:23,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.66 vs. limit=15.0 +2024-08-26 07:00:31,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=179728.0, ans=0.125 +2024-08-26 07:00:35,359 INFO [train.py:1114] (3/4) Epoch 14, batch 1350, loss[loss=0.2029, simple_loss=0.2719, pruned_loss=0.04847, ctc_loss=0.0927, over 19782.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2823, pruned_loss=0.05769, ctc_loss=0.1086, over 3860373.05 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:01:10,593 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.15 vs. limit=10.0 +2024-08-26 07:01:26,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=179834.66666666666, ans=0.125 +2024-08-26 07:01:37,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=179888.0, ans=0.125 +2024-08-26 07:01:39,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=179888.0, ans=0.125 +2024-08-26 07:01:42,722 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.80 vs. limit=12.0 +2024-08-26 07:02:09,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=179941.33333333334, ans=0.125 +2024-08-26 07:02:11,204 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.07 vs. limit=15.0 +2024-08-26 07:02:26,031 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.464e+02 1.690e+02 1.870e+02 2.214e+02 3.706e+02, threshold=3.740e+02, percent-clipped=0.0 +2024-08-26 07:02:46,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=180048.0, ans=0.2 +2024-08-26 07:02:47,353 INFO [train.py:1114] (3/4) Epoch 14, batch 1400, loss[loss=0.1921, simple_loss=0.2463, pruned_loss=0.04974, ctc_loss=0.09597, over 19689.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2819, pruned_loss=0.0576, ctc_loss=0.1084, over 3866720.71 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:03:00,264 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=8.149e-02 +2024-08-26 07:03:00,575 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.00 vs. limit=15.0 +2024-08-26 07:03:15,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=180101.33333333334, ans=0.125 +2024-08-26 07:03:35,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=180154.66666666666, ans=0.025 +2024-08-26 07:04:17,102 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.07 vs. limit=15.0 +2024-08-26 07:04:25,275 INFO [train.py:1114] (3/4) Epoch 14, batch 1450, loss[loss=0.2236, simple_loss=0.2877, pruned_loss=0.05868, ctc_loss=0.1053, over 19664.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2829, pruned_loss=0.05806, ctc_loss=0.1093, over 3864940.50 frames. ], batch size: 63, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:04:30,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=180314.66666666666, ans=0.125 +2024-08-26 07:04:38,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=180314.66666666666, ans=0.2 +2024-08-26 07:04:44,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=180314.66666666666, ans=0.0 +2024-08-26 07:05:12,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=180421.33333333334, ans=0.125 +2024-08-26 07:05:28,573 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.73 vs. limit=15.0 +2024-08-26 07:05:38,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=180474.66666666666, ans=0.125 +2024-08-26 07:05:38,521 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.97 vs. limit=15.0 +2024-08-26 07:05:41,194 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.314e+02 1.716e+02 1.963e+02 2.339e+02 6.137e+02, threshold=3.925e+02, percent-clipped=1.0 +2024-08-26 07:05:57,992 INFO [train.py:1114] (3/4) Epoch 14, batch 1500, loss[loss=0.2479, simple_loss=0.3095, pruned_loss=0.06755, ctc_loss=0.128, over 19584.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2825, pruned_loss=0.05763, ctc_loss=0.1083, over 3864728.47 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:06:05,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=180581.33333333334, ans=0.125 +2024-08-26 07:06:08,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=180634.66666666666, ans=0.2 +2024-08-26 07:06:47,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=180688.0, ans=0.125 +2024-08-26 07:07:06,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=180741.33333333334, ans=0.1 +2024-08-26 07:07:26,416 INFO [train.py:1114] (3/4) Epoch 14, batch 1550, loss[loss=0.2353, simple_loss=0.2955, pruned_loss=0.06458, ctc_loss=0.1149, over 19609.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2824, pruned_loss=0.0576, ctc_loss=0.1084, over 3848764.53 frames. ], batch size: 60, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:07:26,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=180848.0, ans=0.125 +2024-08-26 07:07:47,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=180901.33333333334, ans=0.2 +2024-08-26 07:07:56,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=180954.66666666666, ans=0.125 +2024-08-26 07:08:02,303 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.04 vs. limit=22.5 +2024-08-26 07:08:09,621 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.13 vs. limit=15.0 +2024-08-26 07:08:20,840 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.275e+02 1.735e+02 1.996e+02 2.323e+02 4.332e+02, threshold=3.992e+02, percent-clipped=2.0 +2024-08-26 07:08:29,936 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.14 vs. limit=15.0 +2024-08-26 07:08:46,918 INFO [train.py:1114] (3/4) Epoch 14, batch 1600, loss[loss=0.2182, simple_loss=0.2815, pruned_loss=0.05496, ctc_loss=0.1122, over 19841.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.282, pruned_loss=0.05757, ctc_loss=0.1083, over 3837124.52 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 07:08:50,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=181114.66666666666, ans=0.0 +2024-08-26 07:08:50,829 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.90 vs. limit=15.0 +2024-08-26 07:09:18,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=181221.33333333334, ans=0.0 +2024-08-26 07:09:24,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=181221.33333333334, ans=0.0 +2024-08-26 07:10:18,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=181328.0, ans=0.125 +2024-08-26 07:10:22,352 INFO [train.py:1114] (3/4) Epoch 14, batch 1650, loss[loss=0.2378, simple_loss=0.2936, pruned_loss=0.06451, ctc_loss=0.1323, over 19635.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2823, pruned_loss=0.05795, ctc_loss=0.1092, over 3834583.61 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:11:06,564 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.61 vs. limit=15.0 +2024-08-26 07:11:07,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=181541.33333333334, ans=0.125 +2024-08-26 07:11:10,758 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.415e+02 1.857e+02 2.243e+02 2.957e+02 5.258e+02, threshold=4.486e+02, percent-clipped=5.0 +2024-08-26 07:11:23,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=181594.66666666666, ans=0.125 +2024-08-26 07:11:27,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=181648.0, ans=0.1 +2024-08-26 07:11:28,239 INFO [train.py:1114] (3/4) Epoch 14, batch 1700, loss[loss=0.1764, simple_loss=0.241, pruned_loss=0.03982, ctc_loss=0.08018, over 19673.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2817, pruned_loss=0.05728, ctc_loss=0.1079, over 3848471.89 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:11:44,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=181701.33333333334, ans=0.5 +2024-08-26 07:11:44,236 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 07:12:04,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=181808.0, ans=0.125 +2024-08-26 07:12:05,636 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.14 vs. limit=15.0 +2024-08-26 07:12:24,387 INFO [train.py:1114] (3/4) Epoch 14, batch 1750, loss[loss=0.1921, simple_loss=0.253, pruned_loss=0.04799, ctc_loss=0.08786, over 19696.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2817, pruned_loss=0.05745, ctc_loss=0.1082, over 3854184.62 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:12:42,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=181968.0, ans=0.0 +2024-08-26 07:12:58,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=182021.33333333334, ans=0.015 +2024-08-26 07:13:14,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182021.33333333334, ans=0.1 +2024-08-26 07:13:27,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=182074.66666666666, ans=0.125 +2024-08-26 07:13:35,919 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.427e+02 1.769e+02 2.123e+02 2.747e+02 4.234e+02, threshold=4.245e+02, percent-clipped=0.0 +2024-08-26 07:13:38,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=182128.0, ans=0.125 +2024-08-26 07:13:46,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182128.0, ans=0.1 +2024-08-26 07:13:51,688 INFO [train.py:1114] (3/4) Epoch 14, batch 1800, loss[loss=0.2355, simple_loss=0.2956, pruned_loss=0.06293, ctc_loss=0.1242, over 19601.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.282, pruned_loss=0.05769, ctc_loss=0.1086, over 3855590.03 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:14:04,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=182234.66666666666, ans=0.0 +2024-08-26 07:14:10,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=182234.66666666666, ans=0.125 +2024-08-26 07:14:49,559 INFO [train.py:1114] (3/4) Epoch 14, batch 1850, loss[loss=0.204, simple_loss=0.2762, pruned_loss=0.04671, ctc_loss=0.09587, over 19599.00 frames. ], tot_loss[loss=0.22, simple_loss=0.282, pruned_loss=0.05742, ctc_loss=0.108, over 3858221.10 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:15:07,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=182501.33333333334, ans=0.2 +2024-08-26 07:15:35,880 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.305e+02 1.755e+02 2.000e+02 2.500e+02 5.147e+02, threshold=4.001e+02, percent-clipped=3.0 +2024-08-26 07:15:46,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182661.33333333334, ans=0.1 +2024-08-26 07:15:52,243 INFO [train.py:1114] (3/4) Epoch 14, batch 1900, loss[loss=0.237, simple_loss=0.2997, pruned_loss=0.06393, ctc_loss=0.1163, over 19678.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2827, pruned_loss=0.05757, ctc_loss=0.1084, over 3862304.89 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:16:01,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=182714.66666666666, ans=0.125 +2024-08-26 07:16:06,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=182768.0, ans=0.0 +2024-08-26 07:16:11,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=182768.0, ans=0.025 +2024-08-26 07:16:22,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=182821.33333333334, ans=0.125 +2024-08-26 07:16:56,692 INFO [train.py:1114] (3/4) Epoch 14, batch 1950, loss[loss=0.191, simple_loss=0.2604, pruned_loss=0.04392, ctc_loss=0.08439, over 19580.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2838, pruned_loss=0.0578, ctc_loss=0.1087, over 3871186.13 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:16:58,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=182981.33333333334, ans=0.125 +2024-08-26 07:17:31,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=183088.0, ans=0.125 +2024-08-26 07:17:33,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=183088.0, ans=0.0 +2024-08-26 07:17:51,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183141.33333333334, ans=0.1 +2024-08-26 07:17:55,528 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.375e+02 1.666e+02 1.941e+02 2.281e+02 4.229e+02, threshold=3.882e+02, percent-clipped=1.0 +2024-08-26 07:18:05,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=183194.66666666666, ans=0.0 +2024-08-26 07:18:14,089 INFO [train.py:1114] (3/4) Epoch 14, batch 2000, loss[loss=0.2122, simple_loss=0.2676, pruned_loss=0.05706, ctc_loss=0.1068, over 19655.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2842, pruned_loss=0.0581, ctc_loss=0.1095, over 3856216.08 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 64.0 +2024-08-26 07:18:28,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=183301.33333333334, ans=0.125 +2024-08-26 07:18:30,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=183301.33333333334, ans=0.04949747468305833 +2024-08-26 07:18:46,577 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 07:19:11,483 INFO [train.py:1114] (3/4) Epoch 14, batch 2050, loss[loss=0.1956, simple_loss=0.2574, pruned_loss=0.04928, ctc_loss=0.08794, over 19725.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2835, pruned_loss=0.05821, ctc_loss=0.1097, over 3853148.84 frames. ], batch size: 47, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:19:39,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=183568.0, ans=0.2 +2024-08-26 07:19:45,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=183621.33333333334, ans=0.125 +2024-08-26 07:19:46,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=183621.33333333334, ans=0.0 +2024-08-26 07:19:52,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=183621.33333333334, ans=0.125 +2024-08-26 07:19:53,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183674.66666666666, ans=0.1 +2024-08-26 07:19:54,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183674.66666666666, ans=0.1 +2024-08-26 07:19:58,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=183674.66666666666, ans=0.2 +2024-08-26 07:20:49,711 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 07:20:51,591 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.432e+02 1.705e+02 1.994e+02 2.461e+02 3.917e+02, threshold=3.988e+02, percent-clipped=1.0 +2024-08-26 07:24:48,260 INFO [train.py:1114] (3/4) Epoch 14, batch 2100, loss[loss=0.2434, simple_loss=0.2954, pruned_loss=0.06934, ctc_loss=0.1319, over 19763.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2823, pruned_loss=0.05752, ctc_loss=0.1086, over 3859523.11 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 07:35:53,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183834.66666666666, ans=0.1 +2024-08-26 07:43:34,451 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.97 vs. limit=22.5 +2024-08-26 07:52:29,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=183888.0, ans=0.0 +2024-08-26 08:02:07,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=183994.66666666666, ans=0.125 +2024-08-26 08:07:07,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=183994.66666666666, ans=0.0 +2024-08-26 08:09:58,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=183994.66666666666, ans=0.2 +2024-08-26 08:13:15,354 INFO [train.py:1114] (3/4) Epoch 14, batch 2150, loss[loss=0.1871, simple_loss=0.2652, pruned_loss=0.03962, ctc_loss=0.07465, over 19831.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2818, pruned_loss=0.05736, ctc_loss=0.1081, over 3870758.51 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 08:16:18,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=184048.0, ans=0.125 +2024-08-26 08:27:10,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=184101.33333333334, ans=0.0 +2024-08-26 08:42:47,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=184154.66666666666, ans=0.125 +2024-08-26 08:51:19,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=184208.0, ans=0.5 +2024-08-26 08:59:37,607 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.387e+02 1.765e+02 2.052e+02 2.784e+02 6.261e+02, threshold=4.104e+02, percent-clipped=7.0 +2024-08-26 08:59:59,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184261.33333333334, ans=0.1 +2024-08-26 09:03:09,764 INFO [train.py:1114] (3/4) Epoch 14, batch 2200, loss[loss=0.236, simple_loss=0.3022, pruned_loss=0.06183, ctc_loss=0.1152, over 19593.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.282, pruned_loss=0.05747, ctc_loss=0.1082, over 3869156.33 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 09:18:44,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=184474.66666666666, ans=0.125 +2024-08-26 09:18:48,244 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.08 vs. limit=15.0 +2024-08-26 09:20:23,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=184474.66666666666, ans=0.125 +2024-08-26 09:20:25,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=184528.0, ans=0.0 +2024-08-26 09:20:37,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=184528.0, ans=0.0 +2024-08-26 09:20:37,787 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.96 vs. limit=15.0 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-02-20-12-0 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-02-20-12-0 new file mode 100644 index 0000000000000000000000000000000000000000..d425c44f15a08b7d0a771d8d8e0155f402ffbeb0 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-02-20-12-0 @@ -0,0 +1,41 @@ +2024-08-29 02:20:12,557 INFO [train.py:1182] (0/4) Training started +2024-08-29 02:20:16,779 INFO [train.py:1192] (0/4) Device: cuda:0 +2024-08-29 02:20:24,018 INFO [train.py:1210] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2655.int.cedar.computecanada.ca', 'IP address': '172.16.146.92'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 14, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 02:20:24,018 INFO [train.py:1212] (0/4) About to create model +2024-08-29 02:20:26,032 INFO [train.py:1216] (0/4) Number of model parameters: 66367431 +2024-08-29 02:20:26,744 INFO [checkpoint.py:112] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-13.pt +2024-08-29 02:21:07,937 INFO [checkpoint.py:131] (0/4) Loading averaged model +2024-08-29 02:21:11,401 INFO [train.py:1231] (0/4) Using DDP +2024-08-29 02:22:07,014 INFO [train.py:1243] (0/4) Loading optimizer state dict +2024-08-29 02:22:07,205 INFO [train.py:1251] (0/4) Loading scheduler state dict +2024-08-29 02:22:07,205 INFO [asr_datamodule.py:894] (0/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:696] (0/4) Disable MUSAN +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:714] (0/4) Enable SpecAugment +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:715] (0/4) Time warp factor: 80 +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:725] (0/4) Num frame mask: 10 +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:738] (0/4) About to create train dataset +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:765] (0/4) Using DynamicBucketingSampler. +2024-08-29 02:22:08,990 INFO [asr_datamodule.py:782] (0/4) About to create train dataloader +2024-08-29 02:22:08,995 INFO [asr_datamodule.py:911] (0/4) About to get dev-clean cuts +2024-08-29 02:22:09,604 INFO [asr_datamodule.py:918] (0/4) About to get dev-other cuts +2024-08-29 02:22:09,940 INFO [asr_datamodule.py:814] (0/4) About to create dev dataset +2024-08-29 02:22:10,267 INFO [asr_datamodule.py:831] (0/4) About to create dev dataloader +2024-08-29 02:22:10,267 INFO [train.py:1435] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 02:28:00,136 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12840MB +2024-08-29 02:28:01,730 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12916MB +2024-08-29 02:30:56,404 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12916MB +2024-08-29 02:30:57,389 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=256, metric=7.97 vs. limit=7.5 +2024-08-29 02:31:02,586 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13085MB +2024-08-29 02:32:39,929 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13085MB +2024-08-29 02:32:41,489 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13085MB +2024-08-29 02:32:41,507 INFO [train.py:1344] (0/4) Loading grad scaler state dict +2024-08-29 02:39:21,843 INFO [train.py:1114] (0/4) Epoch 14, batch 0, loss[loss=0.2177, simple_loss=0.2753, pruned_loss=0.05867, ctc_loss=0.107, over 19413.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2753, pruned_loss=0.05867, ctc_loss=0.107, over 19413.00 frames. ], batch size: 48, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 02:39:21,844 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-29 02:41:04,595 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([0.0927, 2.8748, 3.3499, 2.5743], device='cuda:0') +2024-08-29 02:43:58,039 INFO [train.py:1146] (0/4) Epoch 14, validation: loss=0.1913, simple_loss=0.2789, pruned_loss=0.03846, ctc_loss=0.06724, over 944034.00 frames. +2024-08-29 02:43:58,039 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13085MB +2024-08-29 03:22:08,041 INFO [train.py:1050] (0/4) Caught exception: [Rank 0] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=245, OpType=ALLREDUCE, NumelIn=841, NumelOut=841, Timeout(ms)=600000) ran for 600003 milliseconds before timing out.. +2024-08-29 03:22:08,192 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/bad-model-0.pt +2024-08-29 03:22:23,650 INFO [train.py:1413] (0/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/batch-323d3ab0-f35c-b8a9-fce5-e9d717208331.pt +2024-08-29 03:22:48,082 INFO [train.py:1419] (0/4) features shape: torch.Size([48, 1633, 80]) +2024-08-29 03:22:48,091 INFO [train.py:1423] (0/4) num tokens: 3861 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-02-20-12-1 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-02-20-12-1 new file mode 100644 index 0000000000000000000000000000000000000000..9ec2a62c51e91c18116602e9c7ec298810612c14 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-02-20-12-1 @@ -0,0 +1,40 @@ +2024-08-29 02:20:12,771 INFO [train.py:1182] (1/4) Training started +2024-08-29 02:20:12,772 INFO [train.py:1192] (1/4) Device: cuda:1 +2024-08-29 02:20:24,017 INFO [train.py:1210] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2655.int.cedar.computecanada.ca', 'IP address': '172.16.146.92'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 14, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 02:20:42,972 INFO [train.py:1212] (1/4) About to create model +2024-08-29 02:20:43,668 INFO [train.py:1216] (1/4) Number of model parameters: 66367431 +2024-08-29 02:20:43,668 INFO [checkpoint.py:112] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-13.pt +2024-08-29 02:21:11,391 INFO [train.py:1231] (1/4) Using DDP +2024-08-29 02:22:07,014 INFO [train.py:1243] (1/4) Loading optimizer state dict +2024-08-29 02:22:07,213 INFO [train.py:1251] (1/4) Loading scheduler state dict +2024-08-29 02:22:07,213 INFO [asr_datamodule.py:894] (1/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:696] (1/4) Disable MUSAN +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:714] (1/4) Enable SpecAugment +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:715] (1/4) Time warp factor: 80 +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:725] (1/4) Num frame mask: 10 +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:738] (1/4) About to create train dataset +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:765] (1/4) Using DynamicBucketingSampler. +2024-08-29 02:22:08,990 INFO [asr_datamodule.py:782] (1/4) About to create train dataloader +2024-08-29 02:22:08,995 INFO [asr_datamodule.py:911] (1/4) About to get dev-clean cuts +2024-08-29 02:22:09,604 INFO [asr_datamodule.py:918] (1/4) About to get dev-other cuts +2024-08-29 02:22:09,940 INFO [asr_datamodule.py:814] (1/4) About to create dev dataset +2024-08-29 02:22:10,261 INFO [asr_datamodule.py:831] (1/4) About to create dev dataloader +2024-08-29 02:22:10,261 INFO [train.py:1435] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 02:27:59,124 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.58 vs. limit=3.0 +2024-08-29 02:28:00,137 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13363MB +2024-08-29 02:28:01,726 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 02:30:56,400 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 02:31:02,582 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 02:32:39,930 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 02:32:40,850 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=4.55 vs. limit=3.0 +2024-08-29 02:32:41,485 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 02:32:41,508 INFO [train.py:1344] (1/4) Loading grad scaler state dict +2024-08-29 02:39:21,860 INFO [train.py:1114] (1/4) Epoch 14, batch 0, loss[loss=0.2055, simple_loss=0.2686, pruned_loss=0.05116, ctc_loss=0.1002, over 19403.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2686, pruned_loss=0.05116, ctc_loss=0.1002, over 19403.00 frames. ], batch size: 48, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 02:39:21,861 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-29 02:43:58,038 INFO [train.py:1146] (1/4) Epoch 14, validation: loss=0.1913, simple_loss=0.2789, pruned_loss=0.03846, ctc_loss=0.06724, over 944034.00 frames. +2024-08-29 02:43:58,039 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 03:22:08,039 INFO [train.py:1050] (1/4) Caught exception: [Rank 1] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=245, OpType=ALLREDUCE, NumelIn=841, NumelOut=841, Timeout(ms)=600000) ran for 600003 milliseconds before timing out.. +2024-08-29 03:22:08,041 INFO [checkpoint.py:75] (1/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/bad-model-1.pt +2024-08-29 03:24:03,407 INFO [train.py:1413] (1/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/batch-323d3ab0-f35c-b8a9-fce5-e9d717208331.pt +2024-08-29 03:24:03,503 INFO [train.py:1419] (1/4) features shape: torch.Size([49, 1632, 80]) +2024-08-29 03:24:03,505 INFO [train.py:1423] (1/4) num tokens: 3786 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-02-20-12-2 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-02-20-12-2 new file mode 100644 index 0000000000000000000000000000000000000000..92cac4aa36ba1d0c647bbc9b552ca5ab8f0ac007 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-02-20-12-2 @@ -0,0 +1,39 @@ +2024-08-29 02:20:12,763 INFO [train.py:1182] (2/4) Training started +2024-08-29 02:20:12,764 INFO [train.py:1192] (2/4) Device: cuda:2 +2024-08-29 02:20:24,017 INFO [train.py:1210] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2655.int.cedar.computecanada.ca', 'IP address': '172.16.146.92'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 14, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 02:20:24,017 INFO [train.py:1212] (2/4) About to create model +2024-08-29 02:20:26,034 INFO [train.py:1216] (2/4) Number of model parameters: 66367431 +2024-08-29 02:20:26,744 INFO [checkpoint.py:112] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-13.pt +2024-08-29 02:21:11,391 INFO [train.py:1231] (2/4) Using DDP +2024-08-29 02:22:07,024 INFO [train.py:1243] (2/4) Loading optimizer state dict +2024-08-29 02:22:07,222 INFO [train.py:1251] (2/4) Loading scheduler state dict +2024-08-29 02:22:07,222 INFO [asr_datamodule.py:894] (2/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:696] (2/4) Disable MUSAN +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:714] (2/4) Enable SpecAugment +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:715] (2/4) Time warp factor: 80 +2024-08-29 02:22:07,424 INFO [asr_datamodule.py:725] (2/4) Num frame mask: 10 +2024-08-29 02:22:07,424 INFO [asr_datamodule.py:738] (2/4) About to create train dataset +2024-08-29 02:22:07,424 INFO [asr_datamodule.py:765] (2/4) Using DynamicBucketingSampler. +2024-08-29 02:22:09,032 INFO [asr_datamodule.py:782] (2/4) About to create train dataloader +2024-08-29 02:22:09,034 INFO [asr_datamodule.py:911] (2/4) About to get dev-clean cuts +2024-08-29 02:22:09,604 INFO [asr_datamodule.py:918] (2/4) About to get dev-other cuts +2024-08-29 02:22:09,940 INFO [asr_datamodule.py:814] (2/4) About to create dev dataset +2024-08-29 02:22:10,266 INFO [asr_datamodule.py:831] (2/4) About to create dev dataloader +2024-08-29 02:22:10,267 INFO [train.py:1435] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 02:28:00,138 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12782MB +2024-08-29 02:28:01,723 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12849MB +2024-08-29 02:30:56,401 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12967MB +2024-08-29 02:30:57,521 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=12.51 vs. limit=7.5 +2024-08-29 02:31:02,578 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12967MB +2024-08-29 02:32:39,932 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12967MB +2024-08-29 02:32:41,480 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12967MB +2024-08-29 02:32:41,498 INFO [train.py:1344] (2/4) Loading grad scaler state dict +2024-08-29 02:39:21,843 INFO [train.py:1114] (2/4) Epoch 14, batch 0, loss[loss=0.2015, simple_loss=0.256, pruned_loss=0.05389, ctc_loss=0.09807, over 19789.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.256, pruned_loss=0.05389, ctc_loss=0.09807, over 19789.00 frames. ], batch size: 49, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 02:39:21,843 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-29 02:43:58,044 INFO [train.py:1146] (2/4) Epoch 14, validation: loss=0.1913, simple_loss=0.2789, pruned_loss=0.03846, ctc_loss=0.06724, over 944034.00 frames. +2024-08-29 02:43:58,045 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12967MB +2024-08-29 03:12:13,916 INFO [train.py:1050] (2/4) Caught exception: [Rank 2] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=233, OpType=ALLREDUCE, NumelIn=841, NumelOut=841, Timeout(ms)=600000) ran for 600008 milliseconds before timing out.. +2024-08-29 03:12:13,918 INFO [checkpoint.py:75] (2/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/bad-model-2.pt +2024-08-29 03:12:24,827 INFO [train.py:1413] (2/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/batch-323d3ab0-f35c-b8a9-fce5-e9d717208331.pt +2024-08-29 03:12:26,813 INFO [train.py:1419] (2/4) features shape: torch.Size([76, 1045, 80]) +2024-08-29 03:12:26,816 INFO [train.py:1423] (2/4) num tokens: 4179 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-02-20-12-3 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-02-20-12-3 new file mode 100644 index 0000000000000000000000000000000000000000..d78c66fdd209e36e8d5df1e62d9e239c64f0007c --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-02-20-12-3 @@ -0,0 +1,41 @@ +2024-08-29 02:20:12,771 INFO [train.py:1182] (3/4) Training started +2024-08-29 02:20:12,772 INFO [train.py:1192] (3/4) Device: cuda:3 +2024-08-29 02:20:24,017 INFO [train.py:1210] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2655.int.cedar.computecanada.ca', 'IP address': '172.16.146.92'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 14, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 02:20:52,701 INFO [train.py:1212] (3/4) About to create model +2024-08-29 02:20:53,396 INFO [train.py:1216] (3/4) Number of model parameters: 66367431 +2024-08-29 02:20:53,396 INFO [checkpoint.py:112] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-13.pt +2024-08-29 02:21:11,368 INFO [train.py:1231] (3/4) Using DDP +2024-08-29 02:22:07,023 INFO [train.py:1243] (3/4) Loading optimizer state dict +2024-08-29 02:22:07,184 INFO [train.py:1251] (3/4) Loading scheduler state dict +2024-08-29 02:22:07,184 INFO [asr_datamodule.py:894] (3/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:696] (3/4) Disable MUSAN +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:714] (3/4) Enable SpecAugment +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:715] (3/4) Time warp factor: 80 +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:725] (3/4) Num frame mask: 10 +2024-08-29 02:22:07,423 INFO [asr_datamodule.py:738] (3/4) About to create train dataset +2024-08-29 02:22:07,424 INFO [asr_datamodule.py:765] (3/4) Using DynamicBucketingSampler. +2024-08-29 02:22:09,006 INFO [asr_datamodule.py:782] (3/4) About to create train dataloader +2024-08-29 02:22:09,008 INFO [asr_datamodule.py:911] (3/4) About to get dev-clean cuts +2024-08-29 02:22:09,604 INFO [asr_datamodule.py:918] (3/4) About to get dev-other cuts +2024-08-29 02:22:09,941 INFO [asr_datamodule.py:814] (3/4) About to create dev dataset +2024-08-29 02:22:10,264 INFO [asr_datamodule.py:831] (3/4) About to create dev dataloader +2024-08-29 02:22:10,264 INFO [train.py:1435] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 02:28:00,136 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12808MB +2024-08-29 02:28:01,730 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-29 02:30:56,394 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-29 02:30:57,360 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=256, metric=7.92 vs. limit=7.5 +2024-08-29 02:31:02,579 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-29 02:32:39,932 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-29 02:32:41,479 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-29 02:32:41,499 INFO [train.py:1344] (3/4) Loading grad scaler state dict +2024-08-29 02:39:00,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=172581.33333333334, ans=0.04949747468305833 +2024-08-29 02:39:21,844 INFO [train.py:1114] (3/4) Epoch 14, batch 0, loss[loss=0.2015, simple_loss=0.2615, pruned_loss=0.05192, ctc_loss=0.09419, over 19809.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2615, pruned_loss=0.05192, ctc_loss=0.09419, over 19809.00 frames. ], batch size: 49, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 02:39:21,844 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-29 02:41:44,001 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([0.1110, 2.9698, 3.4716, 2.6793], device='cuda:3') +2024-08-29 02:43:58,036 INFO [train.py:1146] (3/4) Epoch 14, validation: loss=0.1913, simple_loss=0.2789, pruned_loss=0.03846, ctc_loss=0.06724, over 944034.00 frames. +2024-08-29 02:43:58,037 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 13097MB +2024-08-29 03:22:08,040 INFO [train.py:1050] (3/4) Caught exception: [Rank 3] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=245, OpType=ALLREDUCE, NumelIn=841, NumelOut=841, Timeout(ms)=600000) ran for 600001 milliseconds before timing out.. +2024-08-29 03:22:08,042 INFO [checkpoint.py:75] (3/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/bad-model-3.pt +2024-08-29 03:22:09,462 INFO [train.py:1413] (3/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/batch-323d3ab0-f35c-b8a9-fce5-e9d717208331.pt +2024-08-29 03:22:38,838 INFO [train.py:1419] (3/4) features shape: torch.Size([49, 1632, 80]) +2024-08-29 03:22:38,841 INFO [train.py:1423] (3/4) num tokens: 3961 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-10-29-34-0 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-10-29-34-0 new file mode 100644 index 0000000000000000000000000000000000000000..abc413646faf51152965d222afc16ce84568e2b5 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-10-29-34-0 @@ -0,0 +1,41 @@ +2024-08-29 10:29:34,831 INFO [train.py:1182] (0/4) Training started +2024-08-29 10:29:43,675 INFO [train.py:1192] (0/4) Device: cuda:0 +2024-08-29 10:29:48,609 INFO [train.py:1210] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2558.int.cedar.computecanada.ca', 'IP address': '172.16.145.251'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 14, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 10:29:48,610 INFO [train.py:1212] (0/4) About to create model +2024-08-29 10:29:49,338 INFO [train.py:1216] (0/4) Number of model parameters: 66367431 +2024-08-29 10:29:49,916 INFO [checkpoint.py:112] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-13.pt +2024-08-29 10:30:17,107 INFO [checkpoint.py:131] (0/4) Loading averaged model +2024-08-29 10:30:25,093 INFO [train.py:1231] (0/4) Using DDP +2024-08-29 10:30:35,005 INFO [train.py:1243] (0/4) Loading optimizer state dict +2024-08-29 10:30:56,630 INFO [train.py:1251] (0/4) Loading scheduler state dict +2024-08-29 10:30:56,631 INFO [asr_datamodule.py:894] (0/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 10:30:56,633 INFO [asr_datamodule.py:696] (0/4) Disable MUSAN +2024-08-29 10:30:56,633 INFO [asr_datamodule.py:714] (0/4) Enable SpecAugment +2024-08-29 10:30:56,633 INFO [asr_datamodule.py:715] (0/4) Time warp factor: 80 +2024-08-29 10:30:56,633 INFO [asr_datamodule.py:725] (0/4) Num frame mask: 10 +2024-08-29 10:30:56,633 INFO [asr_datamodule.py:738] (0/4) About to create train dataset +2024-08-29 10:30:56,633 INFO [asr_datamodule.py:765] (0/4) Using DynamicBucketingSampler. +2024-08-29 10:30:58,222 INFO [asr_datamodule.py:782] (0/4) About to create train dataloader +2024-08-29 10:30:58,222 INFO [asr_datamodule.py:911] (0/4) About to get dev-clean cuts +2024-08-29 10:30:58,224 INFO [asr_datamodule.py:918] (0/4) About to get dev-other cuts +2024-08-29 10:30:58,225 INFO [asr_datamodule.py:814] (0/4) About to create dev dataset +2024-08-29 10:30:58,550 INFO [asr_datamodule.py:831] (0/4) About to create dev dataloader +2024-08-29 10:30:58,550 INFO [train.py:1435] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 10:42:07,077 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12840MB +2024-08-29 10:42:08,615 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12916MB +2024-08-29 10:47:00,571 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12916MB +2024-08-29 10:47:01,535 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=256, metric=7.97 vs. limit=7.5 +2024-08-29 10:47:01,970 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13085MB +2024-08-29 10:54:13,225 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13085MB +2024-08-29 10:54:14,754 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13085MB +2024-08-29 10:54:14,770 INFO [train.py:1344] (0/4) Loading grad scaler state dict +2024-08-29 10:56:29,891 INFO [train.py:1114] (0/4) Epoch 14, batch 0, loss[loss=0.2177, simple_loss=0.2753, pruned_loss=0.05867, ctc_loss=0.107, over 19413.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2753, pruned_loss=0.05867, ctc_loss=0.107, over 19413.00 frames. ], batch size: 48, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 10:56:29,892 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-29 11:02:14,718 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([0.0927, 2.8748, 3.3499, 2.5743], device='cuda:0') +2024-08-29 11:03:12,764 INFO [train.py:1146] (0/4) Epoch 14, validation: loss=0.1913, simple_loss=0.2789, pruned_loss=0.03846, ctc_loss=0.06724, over 944034.00 frames. +2024-08-29 11:03:12,765 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13085MB +2024-08-29 11:27:38,684 INFO [train.py:1050] (0/4) Caught exception: [Rank 0] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=209, OpType=ALLREDUCE, NumelIn=841, NumelOut=841, Timeout(ms)=600000) ran for 600004 milliseconds before timing out.. +2024-08-29 11:27:38,688 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/bad-model-0.pt +2024-08-29 11:27:43,350 INFO [train.py:1413] (0/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/batch-323d3ab0-f35c-b8a9-fce5-e9d717208331.pt +2024-08-29 11:37:59,919 INFO [train.py:1419] (0/4) features shape: torch.Size([50, 1582, 80]) +2024-08-29 11:38:00,088 INFO [train.py:1423] (0/4) num tokens: 4102 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-10-29-35-1 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-10-29-35-1 new file mode 100644 index 0000000000000000000000000000000000000000..995dd307ac763724ffc6f14b48564ff8d5c46283 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-10-29-35-1 @@ -0,0 +1,35 @@ +2024-08-29 10:29:35,106 INFO [train.py:1182] (1/4) Training started +2024-08-29 10:29:35,107 INFO [train.py:1192] (1/4) Device: cuda:1 +2024-08-29 10:29:35,925 INFO [train.py:1210] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2558.int.cedar.computecanada.ca', 'IP address': '172.16.145.251'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 14, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 10:29:35,925 INFO [train.py:1212] (1/4) About to create model +2024-08-29 10:29:37,620 INFO [train.py:1216] (1/4) Number of model parameters: 66367431 +2024-08-29 10:29:37,755 INFO [checkpoint.py:112] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-13.pt +2024-08-29 10:30:25,092 INFO [train.py:1231] (1/4) Using DDP +2024-08-29 10:30:35,008 INFO [train.py:1243] (1/4) Loading optimizer state dict +2024-08-29 10:30:35,206 INFO [train.py:1251] (1/4) Loading scheduler state dict +2024-08-29 10:30:35,206 INFO [asr_datamodule.py:894] (1/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 10:30:39,345 INFO [asr_datamodule.py:696] (1/4) Disable MUSAN +2024-08-29 10:30:39,345 INFO [asr_datamodule.py:714] (1/4) Enable SpecAugment +2024-08-29 10:30:39,345 INFO [asr_datamodule.py:715] (1/4) Time warp factor: 80 +2024-08-29 10:30:39,345 INFO [asr_datamodule.py:725] (1/4) Num frame mask: 10 +2024-08-29 10:30:39,345 INFO [asr_datamodule.py:738] (1/4) About to create train dataset +2024-08-29 10:30:39,345 INFO [asr_datamodule.py:765] (1/4) Using DynamicBucketingSampler. +2024-08-29 10:30:40,956 INFO [asr_datamodule.py:782] (1/4) About to create train dataloader +2024-08-29 10:30:40,964 INFO [asr_datamodule.py:911] (1/4) About to get dev-clean cuts +2024-08-29 10:30:41,292 INFO [asr_datamodule.py:918] (1/4) About to get dev-other cuts +2024-08-29 10:30:41,498 INFO [asr_datamodule.py:814] (1/4) About to create dev dataset +2024-08-29 10:30:41,818 INFO [asr_datamodule.py:831] (1/4) About to create dev dataloader +2024-08-29 10:30:41,818 INFO [train.py:1435] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 10:41:41,672 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.58 vs. limit=3.0 +2024-08-29 10:42:07,083 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13363MB +2024-08-29 10:42:08,626 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 10:47:00,568 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 10:47:01,963 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 10:54:13,225 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 10:54:14,126 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=4.55 vs. limit=3.0 +2024-08-29 10:54:14,749 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 10:54:14,767 INFO [train.py:1344] (1/4) Loading grad scaler state dict +2024-08-29 10:56:29,892 INFO [train.py:1114] (1/4) Epoch 14, batch 0, loss[loss=0.2055, simple_loss=0.2686, pruned_loss=0.05116, ctc_loss=0.1002, over 19403.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2686, pruned_loss=0.05116, ctc_loss=0.1002, over 19403.00 frames. ], batch size: 48, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 10:56:29,892 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-29 11:03:12,766 INFO [train.py:1146] (1/4) Epoch 14, validation: loss=0.1913, simple_loss=0.2789, pruned_loss=0.03846, ctc_loss=0.06724, over 944034.00 frames. +2024-08-29 11:03:12,766 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 13467MB diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-10-29-35-2 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-10-29-35-2 new file mode 100644 index 0000000000000000000000000000000000000000..0b153bb65e1e91d3feb5f5572e08633cd6f48a38 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-10-29-35-2 @@ -0,0 +1,39 @@ +2024-08-29 10:29:35,200 INFO [train.py:1182] (2/4) Training started +2024-08-29 10:29:35,201 INFO [train.py:1192] (2/4) Device: cuda:2 +2024-08-29 10:29:35,925 INFO [train.py:1210] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2558.int.cedar.computecanada.ca', 'IP address': '172.16.145.251'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 14, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 10:29:35,925 INFO [train.py:1212] (2/4) About to create model +2024-08-29 10:29:37,659 INFO [train.py:1216] (2/4) Number of model parameters: 66367431 +2024-08-29 10:29:37,755 INFO [checkpoint.py:112] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-13.pt +2024-08-29 10:30:25,067 INFO [train.py:1231] (2/4) Using DDP +2024-08-29 10:30:35,008 INFO [train.py:1243] (2/4) Loading optimizer state dict +2024-08-29 10:30:35,161 INFO [train.py:1251] (2/4) Loading scheduler state dict +2024-08-29 10:30:35,161 INFO [asr_datamodule.py:894] (2/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 10:30:39,345 INFO [asr_datamodule.py:696] (2/4) Disable MUSAN +2024-08-29 10:30:39,345 INFO [asr_datamodule.py:714] (2/4) Enable SpecAugment +2024-08-29 10:30:39,345 INFO [asr_datamodule.py:715] (2/4) Time warp factor: 80 +2024-08-29 10:30:39,345 INFO [asr_datamodule.py:725] (2/4) Num frame mask: 10 +2024-08-29 10:30:39,346 INFO [asr_datamodule.py:738] (2/4) About to create train dataset +2024-08-29 10:30:39,346 INFO [asr_datamodule.py:765] (2/4) Using DynamicBucketingSampler. +2024-08-29 10:30:40,957 INFO [asr_datamodule.py:782] (2/4) About to create train dataloader +2024-08-29 10:30:40,964 INFO [asr_datamodule.py:911] (2/4) About to get dev-clean cuts +2024-08-29 10:30:41,292 INFO [asr_datamodule.py:918] (2/4) About to get dev-other cuts +2024-08-29 10:30:41,498 INFO [asr_datamodule.py:814] (2/4) About to create dev dataset +2024-08-29 10:30:41,823 INFO [asr_datamodule.py:831] (2/4) About to create dev dataloader +2024-08-29 10:30:41,823 INFO [train.py:1435] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 10:42:07,078 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12782MB +2024-08-29 10:42:08,620 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12849MB +2024-08-29 10:47:00,570 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12967MB +2024-08-29 10:47:01,745 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=12.51 vs. limit=7.5 +2024-08-29 10:47:01,974 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12967MB +2024-08-29 10:54:13,229 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12967MB +2024-08-29 10:54:14,759 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12967MB +2024-08-29 10:54:14,778 INFO [train.py:1344] (2/4) Loading grad scaler state dict +2024-08-29 10:56:29,907 INFO [train.py:1114] (2/4) Epoch 14, batch 0, loss[loss=0.2015, simple_loss=0.256, pruned_loss=0.05389, ctc_loss=0.09807, over 19789.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.256, pruned_loss=0.05389, ctc_loss=0.09807, over 19789.00 frames. ], batch size: 49, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 10:56:29,908 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-29 11:03:12,766 INFO [train.py:1146] (2/4) Epoch 14, validation: loss=0.1913, simple_loss=0.2789, pruned_loss=0.03846, ctc_loss=0.06724, over 944034.00 frames. +2024-08-29 11:03:12,767 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12967MB +2024-08-29 11:27:38,684 INFO [train.py:1050] (2/4) Caught exception: [Rank 2] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=209, OpType=ALLREDUCE, NumelIn=841, NumelOut=841, Timeout(ms)=600000) ran for 600000 milliseconds before timing out.. +2024-08-29 11:27:38,686 INFO [checkpoint.py:75] (2/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/bad-model-2.pt +2024-08-29 11:28:00,514 INFO [train.py:1413] (2/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/batch-323d3ab0-f35c-b8a9-fce5-e9d717208331.pt +2024-08-29 11:37:54,610 INFO [train.py:1419] (2/4) features shape: torch.Size([50, 1582, 80]) +2024-08-29 11:37:54,614 INFO [train.py:1423] (2/4) num tokens: 4031 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-10-29-35-3 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-10-29-35-3 new file mode 100644 index 0000000000000000000000000000000000000000..5315595b745a3c3f497ffa8b54c5331706077b0d --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-10-29-35-3 @@ -0,0 +1,41 @@ +2024-08-29 10:29:35,106 INFO [train.py:1182] (3/4) Training started +2024-08-29 10:29:35,107 INFO [train.py:1192] (3/4) Device: cuda:3 +2024-08-29 10:29:35,925 INFO [train.py:1210] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2558.int.cedar.computecanada.ca', 'IP address': '172.16.145.251'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 14, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 10:29:35,925 INFO [train.py:1212] (3/4) About to create model +2024-08-29 10:29:37,639 INFO [train.py:1216] (3/4) Number of model parameters: 66367431 +2024-08-29 10:29:37,755 INFO [checkpoint.py:112] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-13.pt +2024-08-29 10:30:25,092 INFO [train.py:1231] (3/4) Using DDP +2024-08-29 10:30:35,013 INFO [train.py:1243] (3/4) Loading optimizer state dict +2024-08-29 10:30:35,218 INFO [train.py:1251] (3/4) Loading scheduler state dict +2024-08-29 10:30:35,218 INFO [asr_datamodule.py:894] (3/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 10:30:39,345 INFO [asr_datamodule.py:696] (3/4) Disable MUSAN +2024-08-29 10:30:39,345 INFO [asr_datamodule.py:714] (3/4) Enable SpecAugment +2024-08-29 10:30:39,345 INFO [asr_datamodule.py:715] (3/4) Time warp factor: 80 +2024-08-29 10:30:39,345 INFO [asr_datamodule.py:725] (3/4) Num frame mask: 10 +2024-08-29 10:30:39,345 INFO [asr_datamodule.py:738] (3/4) About to create train dataset +2024-08-29 10:30:39,345 INFO [asr_datamodule.py:765] (3/4) Using DynamicBucketingSampler. +2024-08-29 10:30:40,960 INFO [asr_datamodule.py:782] (3/4) About to create train dataloader +2024-08-29 10:30:40,966 INFO [asr_datamodule.py:911] (3/4) About to get dev-clean cuts +2024-08-29 10:30:41,293 INFO [asr_datamodule.py:918] (3/4) About to get dev-other cuts +2024-08-29 10:30:41,498 INFO [asr_datamodule.py:814] (3/4) About to create dev dataset +2024-08-29 10:30:41,824 INFO [asr_datamodule.py:831] (3/4) About to create dev dataloader +2024-08-29 10:30:41,824 INFO [train.py:1435] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 10:42:07,076 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12808MB +2024-08-29 10:42:08,617 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-29 10:47:00,566 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-29 10:47:01,543 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=256, metric=7.92 vs. limit=7.5 +2024-08-29 10:47:01,971 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-29 10:54:13,227 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-29 10:54:14,746 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-29 10:54:14,766 INFO [train.py:1344] (3/4) Loading grad scaler state dict +2024-08-29 10:55:56,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=172581.33333333334, ans=0.04949747468305833 +2024-08-29 10:56:29,894 INFO [train.py:1114] (3/4) Epoch 14, batch 0, loss[loss=0.2015, simple_loss=0.2615, pruned_loss=0.05192, ctc_loss=0.09419, over 19809.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2615, pruned_loss=0.05192, ctc_loss=0.09419, over 19809.00 frames. ], batch size: 49, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 10:56:29,894 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-29 11:02:14,721 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([0.1110, 2.9698, 3.4716, 2.6793], device='cuda:3') +2024-08-29 11:03:12,756 INFO [train.py:1146] (3/4) Epoch 14, validation: loss=0.1913, simple_loss=0.2789, pruned_loss=0.03846, ctc_loss=0.06724, over 944034.00 frames. +2024-08-29 11:03:12,757 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 13097MB +2024-08-29 11:27:38,684 INFO [train.py:1050] (3/4) Caught exception: [Rank 3] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=209, OpType=ALLREDUCE, NumelIn=841, NumelOut=841, Timeout(ms)=600000) ran for 600000 milliseconds before timing out.. +2024-08-29 11:27:38,687 INFO [checkpoint.py:75] (3/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/bad-model-3.pt +2024-08-29 11:27:40,372 INFO [train.py:1413] (3/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/batch-323d3ab0-f35c-b8a9-fce5-e9d717208331.pt +2024-08-29 11:37:47,768 INFO [train.py:1419] (3/4) features shape: torch.Size([50, 1582, 80]) +2024-08-29 11:37:51,441 INFO [train.py:1423] (3/4) num tokens: 4029 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-13-08-37-0 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-13-08-37-0 new file mode 100644 index 0000000000000000000000000000000000000000..f16196b1a56d01bd40c7c4c89a9a61876bc8a41e --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-13-08-37-0 @@ -0,0 +1,1138 @@ +2024-08-29 13:08:37,942 INFO [train.py:1182] (0/4) Training started +2024-08-29 13:08:37,946 INFO [train.py:1192] (0/4) Device: cuda:0 +2024-08-29 13:08:38,138 INFO [train.py:1210] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 14, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 13:08:38,138 INFO [train.py:1212] (0/4) About to create model +2024-08-29 13:08:39,851 INFO [train.py:1216] (0/4) Number of model parameters: 66367431 +2024-08-29 13:08:40,405 INFO [checkpoint.py:112] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-13.pt +2024-08-29 13:08:58,973 INFO [checkpoint.py:131] (0/4) Loading averaged model +2024-08-29 13:08:59,464 INFO [train.py:1231] (0/4) Using DDP +2024-08-29 13:09:40,408 INFO [train.py:1243] (0/4) Loading optimizer state dict +2024-08-29 13:09:40,607 INFO [train.py:1251] (0/4) Loading scheduler state dict +2024-08-29 13:09:40,607 INFO [asr_datamodule.py:894] (0/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 13:09:40,690 INFO [asr_datamodule.py:696] (0/4) Disable MUSAN +2024-08-29 13:09:40,691 INFO [asr_datamodule.py:714] (0/4) Enable SpecAugment +2024-08-29 13:09:40,691 INFO [asr_datamodule.py:715] (0/4) Time warp factor: 80 +2024-08-29 13:09:40,691 INFO [asr_datamodule.py:725] (0/4) Num frame mask: 10 +2024-08-29 13:09:40,691 INFO [asr_datamodule.py:738] (0/4) About to create train dataset +2024-08-29 13:09:40,691 INFO [asr_datamodule.py:765] (0/4) Using DynamicBucketingSampler. +2024-08-29 13:09:42,269 INFO [asr_datamodule.py:782] (0/4) About to create train dataloader +2024-08-29 13:09:42,276 INFO [asr_datamodule.py:911] (0/4) About to get dev-clean cuts +2024-08-29 13:09:42,374 INFO [asr_datamodule.py:918] (0/4) About to get dev-other cuts +2024-08-29 13:09:42,446 INFO [asr_datamodule.py:814] (0/4) About to create dev dataset +2024-08-29 13:09:42,767 INFO [asr_datamodule.py:831] (0/4) About to create dev dataloader +2024-08-29 13:09:42,767 INFO [train.py:1435] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 13:14:18,567 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12840MB +2024-08-29 13:14:21,284 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12916MB +2024-08-29 13:14:38,626 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12916MB +2024-08-29 13:14:39,612 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=256, metric=7.97 vs. limit=7.5 +2024-08-29 13:14:45,671 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13085MB +2024-08-29 13:15:10,817 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13085MB +2024-08-29 13:15:12,349 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13085MB +2024-08-29 13:15:12,368 INFO [train.py:1344] (0/4) Loading grad scaler state dict +2024-08-29 13:16:15,202 INFO [train.py:1114] (0/4) Epoch 14, batch 0, loss[loss=0.2177, simple_loss=0.2753, pruned_loss=0.05867, ctc_loss=0.107, over 19413.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2753, pruned_loss=0.05867, ctc_loss=0.107, over 19413.00 frames. ], batch size: 48, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:16:15,203 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-29 13:16:26,166 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([0.0927, 2.8748, 3.3499, 2.5743], device='cuda:0') +2024-08-29 13:16:31,408 INFO [train.py:1146] (0/4) Epoch 14, validation: loss=0.1913, simple_loss=0.2789, pruned_loss=0.03846, ctc_loss=0.06724, over 944034.00 frames. +2024-08-29 13:16:31,409 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13085MB +2024-08-29 13:24:30,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=172688.0, ans=0.025 +2024-08-29 13:25:31,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=172741.33333333334, ans=0.125 +2024-08-29 13:26:36,724 INFO [train.py:1114] (0/4) Epoch 14, batch 50, loss[loss=0.2105, simple_loss=0.2695, pruned_loss=0.05525, ctc_loss=0.1026, over 19713.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2858, pruned_loss=0.05843, ctc_loss=0.1106, over 844800.35 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:26:37,527 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.08 vs. limit=22.5 +2024-08-29 13:27:00,165 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.69 vs. limit=15.0 +2024-08-29 13:27:22,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=172901.33333333334, ans=0.125 +2024-08-29 13:30:17,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=172901.33333333334, ans=0.125 +2024-08-29 13:30:45,602 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.72 vs. limit=15.0 +2024-08-29 13:30:55,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=172954.66666666666, ans=0.2 +2024-08-29 13:31:58,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=173008.0, ans=0.5 +2024-08-29 13:32:29,770 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.410e+02 1.749e+02 1.974e+02 2.504e+02 4.970e+02, threshold=3.948e+02, percent-clipped=4.0 +2024-08-29 13:32:38,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.28 vs. limit=10.0 +2024-08-29 13:32:58,198 INFO [train.py:1114] (0/4) Epoch 14, batch 100, loss[loss=0.2323, simple_loss=0.2886, pruned_loss=0.06317, ctc_loss=0.1243, over 19723.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.2882, pruned_loss=0.05994, ctc_loss=0.1132, over 1499143.56 frames. ], batch size: 51, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:33:05,086 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.38 vs. limit=15.0 +2024-08-29 13:34:27,512 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.24 vs. limit=15.0 +2024-08-29 13:34:34,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=173274.66666666666, ans=0.04949747468305833 +2024-08-29 13:34:42,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=173274.66666666666, ans=0.125 +2024-08-29 13:35:53,136 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.85 vs. limit=10.0 +2024-08-29 13:36:03,007 INFO [train.py:1114] (0/4) Epoch 14, batch 150, loss[loss=0.1995, simple_loss=0.2572, pruned_loss=0.05198, ctc_loss=0.09456, over 19719.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2857, pruned_loss=0.05877, ctc_loss=0.1108, over 2027452.09 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:36:03,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=173381.33333333334, ans=0.125 +2024-08-29 13:36:15,037 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.92 vs. limit=15.0 +2024-08-29 13:36:55,773 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.86 vs. limit=10.0 +2024-08-29 13:37:13,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=173541.33333333334, ans=0.125 +2024-08-29 13:37:17,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=173594.66666666666, ans=0.125 +2024-08-29 13:37:19,615 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.730e+02 2.035e+02 2.422e+02 3.683e+02, threshold=4.071e+02, percent-clipped=0.0 +2024-08-29 13:37:30,504 INFO [train.py:1114] (0/4) Epoch 14, batch 200, loss[loss=0.2796, simple_loss=0.3228, pruned_loss=0.08462, ctc_loss=0.1678, over 18353.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2831, pruned_loss=0.05821, ctc_loss=0.1097, over 2435099.22 frames. ], batch size: 85, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:37:35,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173648.0, ans=0.1 +2024-08-29 13:37:41,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=173648.0, ans=0.125 +2024-08-29 13:37:47,047 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.60 vs. limit=8.0 +2024-08-29 13:38:46,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=173701.33333333334, ans=0.07 +2024-08-29 13:42:15,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=173861.33333333334, ans=0.125 +2024-08-29 13:42:18,835 INFO [train.py:1114] (0/4) Epoch 14, batch 250, loss[loss=0.2507, simple_loss=0.2993, pruned_loss=0.07243, ctc_loss=0.1431, over 19394.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2833, pruned_loss=0.0584, ctc_loss=0.1102, over 2755806.45 frames. ], batch size: 67, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:43:11,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=173914.66666666666, ans=0.125 +2024-08-29 13:43:43,081 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=10.54 vs. limit=15.0 +2024-08-29 13:44:08,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=174074.66666666666, ans=0.0 +2024-08-29 13:44:13,474 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.413e+02 1.787e+02 2.022e+02 2.717e+02 4.953e+02, threshold=4.043e+02, percent-clipped=2.0 +2024-08-29 13:44:43,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=174128.0, ans=0.125 +2024-08-29 13:44:46,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=174128.0, ans=0.125 +2024-08-29 13:44:52,079 INFO [train.py:1114] (0/4) Epoch 14, batch 300, loss[loss=0.2361, simple_loss=0.2883, pruned_loss=0.06748, ctc_loss=0.1226, over 19521.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2827, pruned_loss=0.05827, ctc_loss=0.1099, over 3001481.83 frames. ], batch size: 61, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:45:17,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=174288.0, ans=0.125 +2024-08-29 13:45:41,952 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.34 vs. limit=10.0 +2024-08-29 13:45:58,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=174394.66666666666, ans=0.125 +2024-08-29 13:46:13,841 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.30 vs. limit=22.5 +2024-08-29 13:46:17,854 INFO [train.py:1114] (0/4) Epoch 14, batch 350, loss[loss=0.1933, simple_loss=0.2531, pruned_loss=0.04839, ctc_loss=0.0919, over 19759.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2823, pruned_loss=0.05781, ctc_loss=0.1089, over 3191850.72 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 16.0 +2024-08-29 13:46:18,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=174448.0, ans=0.125 +2024-08-29 13:46:25,797 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.96 vs. limit=10.0 +2024-08-29 13:46:28,127 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.78 vs. limit=15.0 +2024-08-29 13:47:29,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=174608.0, ans=0.125 +2024-08-29 13:47:39,426 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.793e+02 2.058e+02 2.658e+02 4.429e+02, threshold=4.116e+02, percent-clipped=3.0 +2024-08-29 13:48:31,273 INFO [train.py:1114] (0/4) Epoch 14, batch 400, loss[loss=0.2258, simple_loss=0.2894, pruned_loss=0.05975, ctc_loss=0.1065, over 19496.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2821, pruned_loss=0.05787, ctc_loss=0.1089, over 3342545.93 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:49:13,711 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.876e-03 +2024-08-29 13:50:10,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=174821.33333333334, ans=0.2 +2024-08-29 13:50:12,227 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.60 vs. limit=15.0 +2024-08-29 13:50:57,536 INFO [train.py:1114] (0/4) Epoch 14, batch 450, loss[loss=0.2367, simple_loss=0.2999, pruned_loss=0.06201, ctc_loss=0.1237, over 19609.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2829, pruned_loss=0.05829, ctc_loss=0.1099, over 3450915.96 frames. ], batch size: 55, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:51:00,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=174981.33333333334, ans=0.125 +2024-08-29 13:51:11,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175034.66666666666, ans=0.1 +2024-08-29 13:51:26,784 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 13:51:46,130 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.41 vs. limit=15.0 +2024-08-29 13:51:50,571 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.713e+02 1.900e+02 2.415e+02 4.159e+02, threshold=3.800e+02, percent-clipped=2.0 +2024-08-29 13:52:14,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=175194.66666666666, ans=0.5 +2024-08-29 13:52:16,290 INFO [train.py:1114] (0/4) Epoch 14, batch 500, loss[loss=0.2462, simple_loss=0.3152, pruned_loss=0.065, ctc_loss=0.118, over 19681.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2823, pruned_loss=0.05801, ctc_loss=0.1094, over 3546652.26 frames. ], batch size: 63, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:52:18,069 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.57 vs. limit=15.0 +2024-08-29 13:52:26,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=175248.0, ans=0.125 +2024-08-29 13:52:47,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=175354.66666666666, ans=0.125 +2024-08-29 13:52:47,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=175354.66666666666, ans=0.125 +2024-08-29 13:52:49,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=175354.66666666666, ans=0.0 +2024-08-29 13:52:54,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=175354.66666666666, ans=0.2 +2024-08-29 13:53:07,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=175408.0, ans=0.125 +2024-08-29 13:53:22,230 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.15 vs. limit=22.5 +2024-08-29 13:53:23,936 INFO [train.py:1114] (0/4) Epoch 14, batch 550, loss[loss=0.2481, simple_loss=0.3075, pruned_loss=0.0687, ctc_loss=0.1283, over 19273.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2826, pruned_loss=0.058, ctc_loss=0.1095, over 3608532.80 frames. ], batch size: 71, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:53:24,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=175514.66666666666, ans=0.025 +2024-08-29 13:53:36,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=175568.0, ans=0.125 +2024-08-29 13:53:36,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=175568.0, ans=0.0 +2024-08-29 13:54:06,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=175674.66666666666, ans=0.0 +2024-08-29 13:54:10,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=175674.66666666666, ans=0.2 +2024-08-29 13:54:18,067 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.404e+02 1.725e+02 1.963e+02 2.348e+02 4.063e+02, threshold=3.927e+02, percent-clipped=2.0 +2024-08-29 13:54:20,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=175728.0, ans=0.025 +2024-08-29 13:54:24,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=175728.0, ans=0.125 +2024-08-29 13:54:27,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=175781.33333333334, ans=0.2 +2024-08-29 13:54:28,215 INFO [train.py:1114] (0/4) Epoch 14, batch 600, loss[loss=0.2468, simple_loss=0.3053, pruned_loss=0.06894, ctc_loss=0.126, over 19370.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2832, pruned_loss=0.05814, ctc_loss=0.1097, over 3666425.54 frames. ], batch size: 67, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:54:42,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=175834.66666666666, ans=0.1 +2024-08-29 13:55:03,507 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.51 vs. limit=15.0 +2024-08-29 13:55:30,840 INFO [train.py:1114] (0/4) Epoch 14, batch 650, loss[loss=0.2249, simple_loss=0.2942, pruned_loss=0.05575, ctc_loss=0.1105, over 19762.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2827, pruned_loss=0.05783, ctc_loss=0.1091, over 3716649.43 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:55:33,385 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 13:55:40,669 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 13:55:46,123 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.65 vs. limit=15.0 +2024-08-29 13:55:49,594 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.33 vs. limit=15.0 +2024-08-29 13:55:59,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=176154.66666666666, ans=0.125 +2024-08-29 13:56:08,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=176154.66666666666, ans=0.0 +2024-08-29 13:56:13,720 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=5.98 vs. limit=15.0 +2024-08-29 13:56:14,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=176208.0, ans=0.125 +2024-08-29 13:56:23,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=176261.33333333334, ans=0.125 +2024-08-29 13:56:24,629 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.743e+02 2.058e+02 2.560e+02 4.338e+02, threshold=4.116e+02, percent-clipped=4.0 +2024-08-29 13:56:34,647 INFO [train.py:1114] (0/4) Epoch 14, batch 700, loss[loss=0.1954, simple_loss=0.2666, pruned_loss=0.04456, ctc_loss=0.08781, over 19729.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2832, pruned_loss=0.05802, ctc_loss=0.1096, over 3748596.56 frames. ], batch size: 51, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:56:34,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=176314.66666666666, ans=0.125 +2024-08-29 13:56:36,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=176314.66666666666, ans=0.2 +2024-08-29 13:56:51,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=176368.0, ans=0.5 +2024-08-29 13:57:42,081 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.20 vs. limit=12.0 +2024-08-29 13:58:07,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=176528.0, ans=0.125 +2024-08-29 13:58:12,866 INFO [train.py:1114] (0/4) Epoch 14, batch 750, loss[loss=0.2129, simple_loss=0.2794, pruned_loss=0.05363, ctc_loss=0.09797, over 19505.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2824, pruned_loss=0.0577, ctc_loss=0.1088, over 3775051.89 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:58:18,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.28 vs. limit=15.0 +2024-08-29 13:58:19,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176581.33333333334, ans=0.1 +2024-08-29 13:58:21,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=176581.33333333334, ans=0.025 +2024-08-29 13:58:27,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=176634.66666666666, ans=0.0 +2024-08-29 13:58:41,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176688.0, ans=0.1 +2024-08-29 13:58:44,087 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=7.78 vs. limit=15.0 +2024-08-29 13:58:58,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=176741.33333333334, ans=0.125 +2024-08-29 13:59:04,792 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.96 vs. limit=15.0 +2024-08-29 13:59:06,508 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.910e+02 2.277e+02 2.884e+02 4.780e+02, threshold=4.554e+02, percent-clipped=3.0 +2024-08-29 13:59:06,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=176794.66666666666, ans=0.0 +2024-08-29 13:59:26,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=176794.66666666666, ans=0.025 +2024-08-29 13:59:28,747 INFO [train.py:1114] (0/4) Epoch 14, batch 800, loss[loss=0.1867, simple_loss=0.2534, pruned_loss=0.04332, ctc_loss=0.08361, over 19409.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2819, pruned_loss=0.05738, ctc_loss=0.1082, over 3795596.12 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:59:49,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=176901.33333333334, ans=0.05 +2024-08-29 14:01:16,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=177008.0, ans=0.025 +2024-08-29 14:01:17,618 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.59 vs. limit=10.0 +2024-08-29 14:01:18,487 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.66 vs. limit=15.0 +2024-08-29 14:02:31,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=177008.0, ans=0.125 +2024-08-29 14:02:39,433 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.22 vs. limit=15.0 +2024-08-29 14:02:49,613 INFO [train.py:1114] (0/4) Epoch 14, batch 850, loss[loss=0.2317, simple_loss=0.2998, pruned_loss=0.05868, ctc_loss=0.1153, over 19642.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2817, pruned_loss=0.0575, ctc_loss=0.1084, over 3814865.22 frames. ], batch size: 59, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 14:03:03,641 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.30 vs. limit=15.0 +2024-08-29 14:03:09,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=177168.0, ans=0.0 +2024-08-29 14:03:10,997 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.12 vs. limit=12.0 +2024-08-29 14:03:13,239 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.21 vs. limit=6.0 +2024-08-29 14:03:15,986 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.96 vs. limit=22.5 +2024-08-29 14:03:40,321 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.405e+02 1.703e+02 1.970e+02 2.385e+02 3.831e+02, threshold=3.939e+02, percent-clipped=0.0 +2024-08-29 14:03:49,897 INFO [train.py:1114] (0/4) Epoch 14, batch 900, loss[loss=0.2209, simple_loss=0.2741, pruned_loss=0.06039, ctc_loss=0.1171, over 19784.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.282, pruned_loss=0.05769, ctc_loss=0.1086, over 3818440.25 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 14:03:50,635 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.26 vs. limit=22.5 +2024-08-29 14:04:04,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=177434.66666666666, ans=0.125 +2024-08-29 14:04:17,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=177488.0, ans=0.125 +2024-08-29 14:04:22,840 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.08 vs. limit=12.0 +2024-08-29 14:04:26,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177541.33333333334, ans=0.1 +2024-08-29 14:04:28,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=177541.33333333334, ans=0.125 +2024-08-29 14:04:40,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=177594.66666666666, ans=0.0 +2024-08-29 14:04:52,321 INFO [train.py:1114] (0/4) Epoch 14, batch 950, loss[loss=0.2047, simple_loss=0.2673, pruned_loss=0.05199, ctc_loss=0.09517, over 19503.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2823, pruned_loss=0.05788, ctc_loss=0.1089, over 3820636.09 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 14:05:06,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=177701.33333333334, ans=0.125 +2024-08-29 14:05:09,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=177701.33333333334, ans=0.125 +2024-08-29 14:05:14,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=177701.33333333334, ans=0.125 +2024-08-29 14:05:30,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=177754.66666666666, ans=0.0 +2024-08-29 14:06:00,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=177808.0, ans=0.0 +2024-08-29 14:06:03,821 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=9.36 vs. limit=15.0 +2024-08-29 14:06:19,921 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.335e+02 1.740e+02 1.996e+02 2.581e+02 3.979e+02, threshold=3.992e+02, percent-clipped=2.0 +2024-08-29 14:06:28,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=177861.33333333334, ans=0.125 +2024-08-29 14:07:04,890 INFO [train.py:1114] (0/4) Epoch 14, batch 1000, loss[loss=0.1979, simple_loss=0.2589, pruned_loss=0.05, ctc_loss=0.09211, over 19854.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2829, pruned_loss=0.05839, ctc_loss=0.1098, over 3816509.39 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:07:06,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=177914.66666666666, ans=0.09899494936611666 +2024-08-29 14:08:06,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177968.0, ans=0.1 +2024-08-29 14:08:08,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177968.0, ans=0.1 +2024-08-29 14:08:26,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=178021.33333333334, ans=0.2 +2024-08-29 14:08:48,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=178128.0, ans=0.0 +2024-08-29 14:08:56,377 INFO [train.py:1114] (0/4) Epoch 14, batch 1050, loss[loss=0.2091, simple_loss=0.2817, pruned_loss=0.04913, ctc_loss=0.09538, over 19845.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2819, pruned_loss=0.05771, ctc_loss=0.1086, over 3823872.80 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:09:05,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=178181.33333333334, ans=0.0 +2024-08-29 14:09:46,665 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.811e+02 2.215e+02 2.668e+02 4.320e+02, threshold=4.429e+02, percent-clipped=1.0 +2024-08-29 14:09:51,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=178394.66666666666, ans=0.125 +2024-08-29 14:10:24,276 INFO [train.py:1114] (0/4) Epoch 14, batch 1100, loss[loss=0.1921, simple_loss=0.2624, pruned_loss=0.04454, ctc_loss=0.08194, over 19612.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2812, pruned_loss=0.05724, ctc_loss=0.1079, over 3831458.89 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:14:46,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=178608.0, ans=0.0 +2024-08-29 14:14:54,175 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.19 vs. limit=12.0 +2024-08-29 14:16:18,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=178608.0, ans=0.1 +2024-08-29 14:17:48,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178661.33333333334, ans=0.1 +2024-08-29 14:19:15,474 INFO [train.py:1114] (0/4) Epoch 14, batch 1150, loss[loss=0.2197, simple_loss=0.2785, pruned_loss=0.05788, ctc_loss=0.1127, over 19593.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2812, pruned_loss=0.05741, ctc_loss=0.1082, over 3828830.16 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:19:20,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=178714.66666666666, ans=0.0 +2024-08-29 14:19:22,864 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.39 vs. limit=12.0 +2024-08-29 14:19:29,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=178714.66666666666, ans=0.125 +2024-08-29 14:19:45,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=178768.0, ans=0.0 +2024-08-29 14:20:36,919 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:22:13,338 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.342e+02 1.701e+02 1.876e+02 2.352e+02 3.362e+02, threshold=3.753e+02, percent-clipped=0.0 +2024-08-29 14:22:33,798 INFO [train.py:1114] (0/4) Epoch 14, batch 1200, loss[loss=0.2318, simple_loss=0.2973, pruned_loss=0.06034, ctc_loss=0.114, over 19839.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2828, pruned_loss=0.05804, ctc_loss=0.1095, over 3825820.21 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:22:47,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178981.33333333334, ans=0.1 +2024-08-29 14:23:17,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=178981.33333333334, ans=0.07 +2024-08-29 14:23:48,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=179034.66666666666, ans=0.125 +2024-08-29 14:23:48,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=179034.66666666666, ans=0.125 +2024-08-29 14:23:51,642 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.04 vs. limit=15.0 +2024-08-29 14:24:24,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=179088.0, ans=0.125 +2024-08-29 14:24:28,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=179088.0, ans=0.125 +2024-08-29 14:24:29,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=179088.0, ans=0.0 +2024-08-29 14:25:06,596 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.51 vs. limit=10.0 +2024-08-29 14:26:20,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=179194.66666666666, ans=0.0 +2024-08-29 14:29:54,001 INFO [train.py:1114] (0/4) Epoch 14, batch 1250, loss[loss=0.2112, simple_loss=0.2843, pruned_loss=0.0502, ctc_loss=0.09421, over 19531.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2829, pruned_loss=0.05771, ctc_loss=0.1086, over 3843334.00 frames. ], batch size: 61, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:30:08,625 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.33 vs. limit=15.0 +2024-08-29 14:31:51,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=179248.0, ans=0.125 +2024-08-29 14:31:55,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=179301.33333333334, ans=0.125 +2024-08-29 14:32:24,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=179408.0, ans=0.125 +2024-08-29 14:32:24,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=179408.0, ans=0.04949747468305833 +2024-08-29 14:32:41,060 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.327e+02 1.718e+02 2.120e+02 2.679e+02 4.271e+02, threshold=4.240e+02, percent-clipped=3.0 +2024-08-29 14:32:44,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=179461.33333333334, ans=0.125 +2024-08-29 14:33:09,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=179514.66666666666, ans=0.125 +2024-08-29 14:33:10,231 INFO [train.py:1114] (0/4) Epoch 14, batch 1300, loss[loss=0.2381, simple_loss=0.2987, pruned_loss=0.06345, ctc_loss=0.1266, over 18833.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2819, pruned_loss=0.05703, ctc_loss=0.1074, over 3847330.15 frames. ], batch size: 76, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:34:11,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=179568.0, ans=0.025 +2024-08-29 14:34:12,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=179568.0, ans=0.125 +2024-08-29 14:35:12,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=179728.0, ans=0.0 +2024-08-29 14:35:41,876 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.33 vs. limit=22.5 +2024-08-29 14:35:42,417 INFO [train.py:1114] (0/4) Epoch 14, batch 1350, loss[loss=0.2201, simple_loss=0.278, pruned_loss=0.05916, ctc_loss=0.1096, over 19770.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2818, pruned_loss=0.05714, ctc_loss=0.1075, over 3858687.41 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:35:44,309 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.95 vs. limit=15.0 +2024-08-29 14:36:07,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=179834.66666666666, ans=0.1 +2024-08-29 14:36:11,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=179834.66666666666, ans=0.2 +2024-08-29 14:36:20,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=179888.0, ans=0.0 +2024-08-29 14:39:13,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=179994.66666666666, ans=0.0 +2024-08-29 14:40:29,505 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.654e+02 1.881e+02 2.431e+02 4.376e+02, threshold=3.761e+02, percent-clipped=1.0 +2024-08-29 14:41:36,157 INFO [train.py:1114] (0/4) Epoch 14, batch 1400, loss[loss=0.1883, simple_loss=0.2586, pruned_loss=0.0425, ctc_loss=0.08245, over 19674.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.282, pruned_loss=0.05728, ctc_loss=0.1076, over 3864940.55 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:41:52,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=180101.33333333334, ans=0.0 +2024-08-29 14:41:54,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.82 vs. limit=15.0 +2024-08-29 14:42:09,954 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.26 vs. limit=6.0 +2024-08-29 14:42:19,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=180208.0, ans=0.125 +2024-08-29 14:42:39,846 INFO [train.py:1114] (0/4) Epoch 14, batch 1450, loss[loss=0.2351, simple_loss=0.2945, pruned_loss=0.06375, ctc_loss=0.1203, over 19668.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2823, pruned_loss=0.05716, ctc_loss=0.1075, over 3862868.32 frames. ], batch size: 63, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:43:57,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=180421.33333333334, ans=0.125 +2024-08-29 14:44:04,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=180421.33333333334, ans=0.0 +2024-08-29 14:44:05,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=180474.66666666666, ans=0.125 +2024-08-29 14:44:05,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=180474.66666666666, ans=10.0 +2024-08-29 14:44:19,804 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.272e+02 1.699e+02 1.929e+02 2.254e+02 4.469e+02, threshold=3.859e+02, percent-clipped=1.0 +2024-08-29 14:44:20,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=180528.0, ans=0.0 +2024-08-29 14:45:06,833 INFO [train.py:1114] (0/4) Epoch 14, batch 1500, loss[loss=0.2157, simple_loss=0.2827, pruned_loss=0.05425, ctc_loss=0.1006, over 19588.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2825, pruned_loss=0.0573, ctc_loss=0.1078, over 3861664.46 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:45:10,399 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.62 vs. limit=12.0 +2024-08-29 14:45:23,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=180634.66666666666, ans=0.0 +2024-08-29 14:45:48,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=180634.66666666666, ans=0.0 +2024-08-29 14:45:57,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=180688.0, ans=0.1 +2024-08-29 14:46:11,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180741.33333333334, ans=0.1 +2024-08-29 14:46:12,855 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.03 vs. limit=22.5 +2024-08-29 14:46:16,444 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.44 vs. limit=15.0 +2024-08-29 14:46:17,698 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.44 vs. limit=15.0 +2024-08-29 14:46:25,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180794.66666666666, ans=0.1 +2024-08-29 14:46:27,478 INFO [train.py:1114] (0/4) Epoch 14, batch 1550, loss[loss=0.2477, simple_loss=0.3021, pruned_loss=0.06973, ctc_loss=0.1347, over 19602.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2824, pruned_loss=0.05759, ctc_loss=0.1086, over 3847541.67 frames. ], batch size: 60, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:46:42,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=180901.33333333334, ans=0.1 +2024-08-29 14:48:37,430 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.702e+02 2.011e+02 2.397e+02 3.479e+02, threshold=4.023e+02, percent-clipped=0.0 +2024-08-29 14:48:41,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=181061.33333333334, ans=0.0 +2024-08-29 14:48:45,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=181061.33333333334, ans=0.125 +2024-08-29 14:48:45,430 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.20 vs. limit=15.0 +2024-08-29 14:48:47,150 INFO [train.py:1114] (0/4) Epoch 14, batch 1600, loss[loss=0.2003, simple_loss=0.2737, pruned_loss=0.04675, ctc_loss=0.0835, over 19828.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2816, pruned_loss=0.05708, ctc_loss=0.1076, over 3837473.80 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:50:21,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=181274.66666666666, ans=0.025 +2024-08-29 14:51:10,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=181328.0, ans=0.5 +2024-08-29 14:51:29,835 INFO [train.py:1114] (0/4) Epoch 14, batch 1650, loss[loss=0.2255, simple_loss=0.2958, pruned_loss=0.0557, ctc_loss=0.1098, over 19654.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2817, pruned_loss=0.05731, ctc_loss=0.108, over 3833134.13 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:51:32,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=181381.33333333334, ans=0.0 +2024-08-29 14:51:43,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=181381.33333333334, ans=0.0 +2024-08-29 14:51:44,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=181434.66666666666, ans=0.0 +2024-08-29 14:51:45,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=181434.66666666666, ans=0.2 +2024-08-29 14:52:11,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=181488.0, ans=0.125 +2024-08-29 14:52:18,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=181541.33333333334, ans=0.125 +2024-08-29 14:52:24,814 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.61 vs. limit=15.0 +2024-08-29 14:52:28,557 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.415e+02 1.808e+02 2.247e+02 2.720e+02 5.029e+02, threshold=4.494e+02, percent-clipped=3.0 +2024-08-29 14:52:32,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=181594.66666666666, ans=0.0 +2024-08-29 14:52:38,136 INFO [train.py:1114] (0/4) Epoch 14, batch 1700, loss[loss=0.1751, simple_loss=0.2403, pruned_loss=0.04029, ctc_loss=0.07342, over 19703.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2815, pruned_loss=0.05704, ctc_loss=0.1074, over 3847514.77 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:52:48,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=181701.33333333334, ans=10.0 +2024-08-29 14:52:48,649 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:52:53,899 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.91 vs. limit=15.0 +2024-08-29 14:53:46,198 INFO [train.py:1114] (0/4) Epoch 14, batch 1750, loss[loss=0.1844, simple_loss=0.2383, pruned_loss=0.04689, ctc_loss=0.09163, over 19617.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2808, pruned_loss=0.05664, ctc_loss=0.1068, over 3852353.23 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:53:46,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=181914.66666666666, ans=0.125 +2024-08-29 14:53:48,031 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.72 vs. limit=22.5 +2024-08-29 14:54:11,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=182021.33333333334, ans=0.95 +2024-08-29 14:54:36,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=182021.33333333334, ans=0.125 +2024-08-29 14:56:19,214 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.53 vs. limit=15.0 +2024-08-29 14:56:23,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=182128.0, ans=0.0 +2024-08-29 14:56:25,492 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.355e+02 1.791e+02 2.085e+02 2.712e+02 5.021e+02, threshold=4.170e+02, percent-clipped=2.0 +2024-08-29 14:56:29,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=182128.0, ans=0.0 +2024-08-29 14:56:29,736 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.51 vs. limit=22.5 +2024-08-29 14:56:34,698 INFO [train.py:1114] (0/4) Epoch 14, batch 1800, loss[loss=0.2055, simple_loss=0.2682, pruned_loss=0.05217, ctc_loss=0.09629, over 19592.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2813, pruned_loss=0.05681, ctc_loss=0.1071, over 3852353.41 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:57:15,550 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.36 vs. limit=15.0 +2024-08-29 14:57:16,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=182181.33333333334, ans=0.2 +2024-08-29 14:57:17,700 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.09 vs. limit=15.0 +2024-08-29 14:58:02,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=182394.66666666666, ans=0.0 +2024-08-29 14:58:07,414 INFO [train.py:1114] (0/4) Epoch 14, batch 1850, loss[loss=0.2178, simple_loss=0.2869, pruned_loss=0.05453, ctc_loss=0.09922, over 19596.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2812, pruned_loss=0.05664, ctc_loss=0.1066, over 3854748.92 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:59:37,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=182501.33333333334, ans=0.125 +2024-08-29 14:59:39,953 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.95 vs. limit=22.5 +2024-08-29 15:00:37,432 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.91 vs. limit=22.5 +2024-08-29 15:03:29,637 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.679e+02 1.934e+02 2.278e+02 6.084e+02, threshold=3.868e+02, percent-clipped=1.0 +2024-08-29 15:03:40,818 INFO [train.py:1114] (0/4) Epoch 14, batch 1900, loss[loss=0.2196, simple_loss=0.2991, pruned_loss=0.05018, ctc_loss=0.0994, over 19628.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2817, pruned_loss=0.05675, ctc_loss=0.1067, over 3861097.39 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:03:53,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182768.0, ans=0.1 +2024-08-29 15:03:54,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=182768.0, ans=0.04949747468305833 +2024-08-29 15:04:23,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=182768.0, ans=0.0 +2024-08-29 15:04:23,925 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.09 vs. limit=22.5 +2024-08-29 15:04:43,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=182874.66666666666, ans=15.0 +2024-08-29 15:05:00,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=182928.0, ans=0.125 +2024-08-29 15:05:18,969 INFO [train.py:1114] (0/4) Epoch 14, batch 1950, loss[loss=0.1912, simple_loss=0.2613, pruned_loss=0.04436, ctc_loss=0.08118, over 19583.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2828, pruned_loss=0.05689, ctc_loss=0.107, over 3870300.04 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:05:42,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183088.0, ans=0.1 +2024-08-29 15:05:45,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=183088.0, ans=0.0 +2024-08-29 15:05:45,844 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=8.27 vs. limit=12.0 +2024-08-29 15:05:50,112 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.50 vs. limit=15.0 +2024-08-29 15:05:53,334 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:06:03,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=183194.66666666666, ans=0.05 +2024-08-29 15:06:06,639 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.416e+02 1.683e+02 1.939e+02 2.319e+02 3.642e+02, threshold=3.877e+02, percent-clipped=0.0 +2024-08-29 15:06:41,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=183194.66666666666, ans=0.04949747468305833 +2024-08-29 15:06:48,421 INFO [train.py:1114] (0/4) Epoch 14, batch 2000, loss[loss=0.1879, simple_loss=0.2501, pruned_loss=0.04524, ctc_loss=0.08805, over 19643.00 frames. ], tot_loss[loss=0.22, simple_loss=0.283, pruned_loss=0.05703, ctc_loss=0.1074, over 3854479.09 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:07:01,879 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.17 vs. limit=22.5 +2024-08-29 15:07:02,056 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.82 vs. limit=10.0 +2024-08-29 15:07:21,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183408.0, ans=0.1 +2024-08-29 15:07:45,830 INFO [train.py:1114] (0/4) Epoch 14, batch 2050, loss[loss=0.1983, simple_loss=0.2545, pruned_loss=0.05182, ctc_loss=0.09622, over 19725.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2823, pruned_loss=0.05726, ctc_loss=0.1077, over 3850346.95 frames. ], batch size: 47, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:07:49,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=183514.66666666666, ans=0.125 +2024-08-29 15:08:41,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.94 vs. limit=15.0 +2024-08-29 15:09:31,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=183674.66666666666, ans=0.125 +2024-08-29 15:09:39,971 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.349e+02 1.749e+02 1.987e+02 2.455e+02 3.413e+02, threshold=3.973e+02, percent-clipped=0.0 +2024-08-29 15:09:48,906 INFO [train.py:1114] (0/4) Epoch 14, batch 2100, loss[loss=0.2037, simple_loss=0.2715, pruned_loss=0.04929, ctc_loss=0.09321, over 19758.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2815, pruned_loss=0.05701, ctc_loss=0.1074, over 3857367.71 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:09:51,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=183781.33333333334, ans=0.125 +2024-08-29 15:09:58,489 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.02 vs. limit=6.0 +2024-08-29 15:10:25,706 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.27 vs. limit=15.0 +2024-08-29 15:10:49,071 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:10:57,777 INFO [train.py:1114] (0/4) Epoch 14, batch 2150, loss[loss=0.2131, simple_loss=0.2802, pruned_loss=0.05318, ctc_loss=0.09924, over 19570.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2814, pruned_loss=0.05717, ctc_loss=0.1076, over 3868244.08 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:10:58,227 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.85 vs. limit=15.0 +2024-08-29 15:11:05,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=184048.0, ans=0.2 +2024-08-29 15:11:24,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184154.66666666666, ans=0.1 +2024-08-29 15:11:33,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=184208.0, ans=0.0 +2024-08-29 15:11:35,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184208.0, ans=0.1 +2024-08-29 15:11:42,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=184261.33333333334, ans=0.125 +2024-08-29 15:11:44,635 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.399e+02 1.765e+02 2.209e+02 2.742e+02 6.061e+02, threshold=4.418e+02, percent-clipped=6.0 +2024-08-29 15:12:09,370 INFO [train.py:1114] (0/4) Epoch 14, batch 2200, loss[loss=0.2306, simple_loss=0.2943, pruned_loss=0.06077, ctc_loss=0.1134, over 19567.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2811, pruned_loss=0.05689, ctc_loss=0.107, over 3866837.54 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:12:10,723 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:12:17,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=184314.66666666666, ans=0.0 +2024-08-29 15:12:22,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184368.0, ans=0.1 +2024-08-29 15:13:08,099 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.06 vs. limit=22.5 +2024-08-29 15:13:47,040 INFO [train.py:1114] (0/4) Epoch 14, batch 2250, loss[loss=0.2197, simple_loss=0.2901, pruned_loss=0.05351, ctc_loss=0.1059, over 19595.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2814, pruned_loss=0.0569, ctc_loss=0.1069, over 3866495.61 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:14:04,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=184634.66666666666, ans=0.0 +2024-08-29 15:14:09,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=184688.0, ans=0.125 +2024-08-29 15:14:13,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=184688.0, ans=0.125 +2024-08-29 15:14:21,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=184741.33333333334, ans=0.125 +2024-08-29 15:14:28,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=184741.33333333334, ans=0.05 +2024-08-29 15:14:30,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=184741.33333333334, ans=0.125 +2024-08-29 15:14:34,163 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.407e+02 1.796e+02 2.116e+02 2.512e+02 3.767e+02, threshold=4.231e+02, percent-clipped=0.0 +2024-08-29 15:14:43,286 INFO [train.py:1114] (0/4) Epoch 14, batch 2300, loss[loss=0.2066, simple_loss=0.2635, pruned_loss=0.05529, ctc_loss=0.09767, over 19508.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2809, pruned_loss=0.05729, ctc_loss=0.1076, over 3860452.97 frames. ], batch size: 49, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:14:43,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=184848.0, ans=0.125 +2024-08-29 15:14:46,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=184848.0, ans=0.125 +2024-08-29 15:15:18,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=185008.0, ans=0.0 +2024-08-29 15:15:40,918 INFO [train.py:1114] (0/4) Epoch 14, batch 2350, loss[loss=0.2539, simple_loss=0.3058, pruned_loss=0.07385, ctc_loss=0.1359, over 19697.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2806, pruned_loss=0.05712, ctc_loss=0.1071, over 3864029.08 frames. ], batch size: 63, lr: 1.05e-02, grad_scale: 64.0 +2024-08-29 15:15:49,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185114.66666666666, ans=0.1 +2024-08-29 15:15:55,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=185168.0, ans=0.125 +2024-08-29 15:16:00,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=185168.0, ans=0.125 +2024-08-29 15:16:07,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=185221.33333333334, ans=0.0 +2024-08-29 15:16:07,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=185221.33333333334, ans=0.1 +2024-08-29 15:16:13,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=185274.66666666666, ans=0.125 +2024-08-29 15:16:28,749 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.299e+02 1.724e+02 2.017e+02 2.647e+02 4.792e+02, threshold=4.034e+02, percent-clipped=3.0 +2024-08-29 15:16:30,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=185328.0, ans=0.2 +2024-08-29 15:16:31,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=185328.0, ans=0.2 +2024-08-29 15:16:36,506 INFO [train.py:1114] (0/4) Epoch 14, batch 2400, loss[loss=0.2144, simple_loss=0.2835, pruned_loss=0.05247, ctc_loss=0.1011, over 19439.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2825, pruned_loss=0.05767, ctc_loss=0.1081, over 3858659.76 frames. ], batch size: 67, lr: 1.05e-02, grad_scale: 32.0 +2024-08-29 15:16:50,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=185381.33333333334, ans=0.125 +2024-08-29 15:17:13,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=185541.33333333334, ans=0.125 +2024-08-29 15:17:38,563 INFO [train.py:1114] (0/4) Epoch 14, batch 2450, loss[loss=0.2832, simple_loss=0.3165, pruned_loss=0.0906, ctc_loss=0.1717, over 13414.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.2865, pruned_loss=0.06066, ctc_loss=0.1142, over 3733724.63 frames. ], batch size: 143, lr: 1.05e-02, grad_scale: 32.0 +2024-08-29 15:18:01,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=185754.66666666666, ans=0.0 +2024-08-29 15:18:05,308 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.65 vs. limit=15.0 +2024-08-29 15:18:22,141 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-14.pt +2024-08-29 15:19:09,367 INFO [train.py:1114] (0/4) Epoch 15, batch 0, loss[loss=0.2255, simple_loss=0.2792, pruned_loss=0.06351, ctc_loss=0.1121, over 19411.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.2792, pruned_loss=0.06351, ctc_loss=0.1121, over 19411.00 frames. ], batch size: 48, lr: 1.02e-02, grad_scale: 32.0 +2024-08-29 15:19:09,369 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-29 15:19:20,877 INFO [train.py:1146] (0/4) Epoch 15, validation: loss=0.1908, simple_loss=0.2785, pruned_loss=0.03825, ctc_loss=0.06651, over 944034.00 frames. +2024-08-29 15:19:20,878 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13465MB +2024-08-29 15:19:22,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=185856.0, ans=0.125 +2024-08-29 15:19:25,781 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.539e+02 1.942e+02 2.136e+02 2.424e+02 3.799e+02, threshold=4.272e+02, percent-clipped=0.0 +2024-08-29 15:19:30,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=185856.0, ans=0.025 +2024-08-29 15:19:36,102 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:19:37,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=185909.33333333334, ans=0.125 +2024-08-29 15:19:38,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=185909.33333333334, ans=0.0 +2024-08-29 15:19:51,760 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.55 vs. limit=15.0 +2024-08-29 15:20:13,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=186069.33333333334, ans=0.125 +2024-08-29 15:20:21,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=186069.33333333334, ans=22.5 +2024-08-29 15:20:25,223 INFO [train.py:1114] (0/4) Epoch 15, batch 50, loss[loss=0.1992, simple_loss=0.2638, pruned_loss=0.0491, ctc_loss=0.09092, over 19710.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2847, pruned_loss=0.05854, ctc_loss=0.1104, over 843490.78 frames. ], batch size: 47, lr: 1.02e-02, grad_scale: 32.0 +2024-08-29 15:20:25,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=186122.66666666666, ans=0.07 +2024-08-29 15:20:34,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=186122.66666666666, ans=0.125 +2024-08-29 15:20:59,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=186229.33333333334, ans=0.0 +2024-08-29 15:21:11,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=186282.66666666666, ans=0.125 +2024-08-29 15:21:25,440 INFO [train.py:1114] (0/4) Epoch 15, batch 100, loss[loss=0.2085, simple_loss=0.267, pruned_loss=0.05381, ctc_loss=0.1061, over 19716.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2852, pruned_loss=0.05789, ctc_loss=0.1099, over 1497089.71 frames. ], batch size: 51, lr: 1.02e-02, grad_scale: 32.0 +2024-08-29 15:21:25,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=186389.33333333334, ans=0.05 +2024-08-29 15:21:30,090 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.489e+02 1.739e+02 1.952e+02 2.450e+02 4.288e+02, threshold=3.904e+02, percent-clipped=1.0 +2024-08-29 15:21:30,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=186389.33333333334, ans=0.125 +2024-08-29 15:21:43,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=186442.66666666666, ans=0.2 +2024-08-29 15:22:00,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=186496.0, ans=0.125 +2024-08-29 15:22:04,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=186549.33333333334, ans=0.0 +2024-08-29 15:22:10,736 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.96 vs. limit=15.0 +2024-08-29 15:22:29,380 INFO [train.py:1114] (0/4) Epoch 15, batch 150, loss[loss=0.2449, simple_loss=0.2822, pruned_loss=0.07664, ctc_loss=0.1355, over 19727.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2829, pruned_loss=0.0573, ctc_loss=0.108, over 2025845.62 frames. ], batch size: 47, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:22:33,935 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.09 vs. limit=15.0 +2024-08-29 15:23:02,411 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.07 vs. limit=15.0 +2024-08-29 15:23:12,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=186816.0, ans=0.125 +2024-08-29 15:23:22,358 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.19 vs. limit=15.0 +2024-08-29 15:23:28,621 INFO [train.py:1114] (0/4) Epoch 15, batch 200, loss[loss=0.2158, simple_loss=0.2874, pruned_loss=0.05199, ctc_loss=0.1005, over 18056.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2807, pruned_loss=0.05648, ctc_loss=0.1064, over 2434205.28 frames. ], batch size: 85, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:23:44,488 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.690e+02 2.002e+02 2.433e+02 3.884e+02, threshold=4.003e+02, percent-clipped=0.0 +2024-08-29 15:24:05,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=187029.33333333334, ans=0.125 +2024-08-29 15:24:10,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=187029.33333333334, ans=0.1 +2024-08-29 15:24:18,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=187082.66666666666, ans=0.0 +2024-08-29 15:24:59,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=187189.33333333334, ans=0.125 +2024-08-29 15:25:01,156 INFO [train.py:1114] (0/4) Epoch 15, batch 250, loss[loss=0.2387, simple_loss=0.3013, pruned_loss=0.06471, ctc_loss=0.1166, over 19445.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2813, pruned_loss=0.05662, ctc_loss=0.1067, over 2754227.35 frames. ], batch size: 67, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:25:06,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.14 vs. limit=6.0 +2024-08-29 15:25:16,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=187242.66666666666, ans=0.125 +2024-08-29 15:25:16,422 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.79 vs. limit=15.0 +2024-08-29 15:26:33,415 INFO [train.py:1114] (0/4) Epoch 15, batch 300, loss[loss=0.2452, simple_loss=0.3037, pruned_loss=0.06892, ctc_loss=0.1224, over 19512.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2803, pruned_loss=0.05621, ctc_loss=0.1058, over 2999797.41 frames. ], batch size: 61, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:26:37,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=187456.0, ans=0.125 +2024-08-29 15:26:38,059 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.385e+02 1.706e+02 2.088e+02 2.592e+02 3.748e+02, threshold=4.177e+02, percent-clipped=0.0 +2024-08-29 15:26:39,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=187456.0, ans=0.0 +2024-08-29 15:26:43,155 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.83 vs. limit=15.0 +2024-08-29 15:27:34,910 INFO [train.py:1114] (0/4) Epoch 15, batch 350, loss[loss=0.2171, simple_loss=0.2686, pruned_loss=0.06009, ctc_loss=0.1134, over 19746.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2805, pruned_loss=0.05604, ctc_loss=0.1052, over 3189316.06 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 16.0 +2024-08-29 15:27:37,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=187722.66666666666, ans=0.09899494936611666 +2024-08-29 15:27:39,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=187722.66666666666, ans=0.125 +2024-08-29 15:28:12,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=187829.33333333334, ans=0.125 +2024-08-29 15:28:27,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=187936.0, ans=0.125 +2024-08-29 15:28:38,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=187989.33333333334, ans=0.025 +2024-08-29 15:28:38,929 INFO [train.py:1114] (0/4) Epoch 15, batch 400, loss[loss=0.2058, simple_loss=0.2807, pruned_loss=0.04741, ctc_loss=0.09023, over 19497.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.28, pruned_loss=0.05589, ctc_loss=0.105, over 3342205.60 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:28:44,504 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.321e+02 1.706e+02 2.043e+02 2.587e+02 5.210e+02, threshold=4.085e+02, percent-clipped=2.0 +2024-08-29 15:29:23,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=188042.66666666666, ans=0.02 +2024-08-29 15:29:39,925 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.07 vs. limit=15.0 +2024-08-29 15:29:46,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=188149.33333333334, ans=0.125 +2024-08-29 15:30:00,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=188202.66666666666, ans=0.0 +2024-08-29 15:30:07,909 INFO [train.py:1114] (0/4) Epoch 15, batch 450, loss[loss=0.197, simple_loss=0.2794, pruned_loss=0.04149, ctc_loss=0.07894, over 19609.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2804, pruned_loss=0.05593, ctc_loss=0.1055, over 3449783.93 frames. ], batch size: 55, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:30:15,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=188256.0, ans=0.125 +2024-08-29 15:30:19,018 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.85 vs. limit=15.0 +2024-08-29 15:30:24,838 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.15 vs. limit=15.0 +2024-08-29 15:30:39,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=188362.66666666666, ans=0.2 +2024-08-29 15:30:41,231 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:31:09,327 INFO [train.py:1114] (0/4) Epoch 15, batch 500, loss[loss=0.2263, simple_loss=0.2972, pruned_loss=0.05772, ctc_loss=0.0998, over 19712.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2798, pruned_loss=0.05572, ctc_loss=0.1049, over 3544980.18 frames. ], batch size: 63, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:31:14,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=188522.66666666666, ans=0.5 +2024-08-29 15:31:15,126 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.348e+02 1.681e+02 1.897e+02 2.177e+02 4.545e+02, threshold=3.794e+02, percent-clipped=1.0 +2024-08-29 15:31:15,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=188522.66666666666, ans=0.2 +2024-08-29 15:31:16,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=188522.66666666666, ans=0.125 +2024-08-29 15:31:20,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=188576.0, ans=0.125 +2024-08-29 15:31:26,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=188576.0, ans=0.0 +2024-08-29 15:31:32,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=188629.33333333334, ans=0.0 +2024-08-29 15:31:32,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=188629.33333333334, ans=0.0 +2024-08-29 15:32:34,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=188682.66666666666, ans=0.125 +2024-08-29 15:32:42,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=188682.66666666666, ans=15.0 +2024-08-29 15:32:46,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=188736.0, ans=0.125 +2024-08-29 15:32:47,702 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.58 vs. limit=15.0 +2024-08-29 15:32:47,944 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.66 vs. limit=22.5 +2024-08-29 15:32:59,021 INFO [train.py:1114] (0/4) Epoch 15, batch 550, loss[loss=0.24, simple_loss=0.2972, pruned_loss=0.06659, ctc_loss=0.1241, over 19249.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.28, pruned_loss=0.05585, ctc_loss=0.1053, over 3605052.13 frames. ], batch size: 71, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:33:03,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=188789.33333333334, ans=0.125 +2024-08-29 15:33:43,865 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.35 vs. limit=10.0 +2024-08-29 15:33:50,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=188842.66666666666, ans=0.125 +2024-08-29 15:33:51,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=188842.66666666666, ans=0.02 +2024-08-29 15:33:51,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=188842.66666666666, ans=0.125 +2024-08-29 15:33:54,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=188896.0, ans=0.125 +2024-08-29 15:33:59,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=188896.0, ans=0.125 +2024-08-29 15:34:05,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=188949.33333333334, ans=0.1 +2024-08-29 15:34:09,808 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.76 vs. limit=6.0 +2024-08-29 15:34:12,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=188949.33333333334, ans=0.125 +2024-08-29 15:34:28,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189002.66666666666, ans=0.1 +2024-08-29 15:34:30,453 INFO [train.py:1114] (0/4) Epoch 15, batch 600, loss[loss=0.2549, simple_loss=0.3119, pruned_loss=0.07278, ctc_loss=0.1307, over 19440.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2801, pruned_loss=0.0557, ctc_loss=0.1049, over 3663301.89 frames. ], batch size: 67, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:34:34,762 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.75 vs. limit=15.0 +2024-08-29 15:34:36,393 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.830e+02 2.111e+02 2.732e+02 4.380e+02, threshold=4.223e+02, percent-clipped=4.0 +2024-08-29 15:34:48,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=189109.33333333334, ans=0.2 +2024-08-29 15:34:49,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=189109.33333333334, ans=0.0 +2024-08-29 15:34:50,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=189109.33333333334, ans=0.125 +2024-08-29 15:34:56,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189162.66666666666, ans=0.1 +2024-08-29 15:34:57,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=189162.66666666666, ans=10.0 +2024-08-29 15:35:03,601 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:35:12,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189216.0, ans=0.1 +2024-08-29 15:35:19,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=189269.33333333334, ans=0.125 +2024-08-29 15:35:23,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189269.33333333334, ans=0.1 +2024-08-29 15:35:31,335 INFO [train.py:1114] (0/4) Epoch 15, batch 650, loss[loss=0.2139, simple_loss=0.2788, pruned_loss=0.05401, ctc_loss=0.1023, over 19768.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2792, pruned_loss=0.05516, ctc_loss=0.1038, over 3714569.97 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:35:36,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=189322.66666666666, ans=0.2 +2024-08-29 15:35:45,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=189376.0, ans=0.2 +2024-08-29 15:35:53,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=189376.0, ans=0.1 +2024-08-29 15:37:52,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=189429.33333333334, ans=0.5 +2024-08-29 15:38:05,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=189482.66666666666, ans=0.125 +2024-08-29 15:38:05,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=189482.66666666666, ans=0.125 +2024-08-29 15:38:20,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=189536.0, ans=0.07 +2024-08-29 15:38:22,782 INFO [train.py:1114] (0/4) Epoch 15, batch 700, loss[loss=0.1795, simple_loss=0.2488, pruned_loss=0.03991, ctc_loss=0.07592, over 19719.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2798, pruned_loss=0.05541, ctc_loss=0.1042, over 3746563.06 frames. ], batch size: 51, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:38:28,535 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.396e+02 1.846e+02 2.430e+02 3.057e+02 4.272e+02, threshold=4.860e+02, percent-clipped=1.0 +2024-08-29 15:38:29,203 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.24 vs. limit=10.0 +2024-08-29 15:38:33,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=189642.66666666666, ans=0.125 +2024-08-29 15:38:59,245 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.63 vs. limit=15.0 +2024-08-29 15:39:07,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=189749.33333333334, ans=0.125 +2024-08-29 15:39:21,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=189802.66666666666, ans=0.0 +2024-08-29 15:39:25,973 INFO [train.py:1114] (0/4) Epoch 15, batch 750, loss[loss=0.267, simple_loss=0.3161, pruned_loss=0.07776, ctc_loss=0.1557, over 19502.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2793, pruned_loss=0.05529, ctc_loss=0.1037, over 3773103.81 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:39:38,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=189909.33333333334, ans=0.125 +2024-08-29 15:39:40,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=189909.33333333334, ans=0.025 +2024-08-29 15:39:54,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=189962.66666666666, ans=0.025 +2024-08-29 15:39:55,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=189962.66666666666, ans=0.125 +2024-08-29 15:39:56,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=189962.66666666666, ans=0.0 +2024-08-29 15:40:01,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=190016.0, ans=0.125 +2024-08-29 15:40:22,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=190069.33333333334, ans=0.0 +2024-08-29 15:40:28,210 INFO [train.py:1114] (0/4) Epoch 15, batch 800, loss[loss=0.1895, simple_loss=0.2521, pruned_loss=0.04608, ctc_loss=0.0867, over 19806.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2786, pruned_loss=0.0549, ctc_loss=0.103, over 3796038.15 frames. ], batch size: 49, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:40:34,419 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.355e+02 1.728e+02 2.068e+02 2.494e+02 4.984e+02, threshold=4.135e+02, percent-clipped=1.0 +2024-08-29 15:40:34,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190122.66666666666, ans=0.1 +2024-08-29 15:40:44,189 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:40:51,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=190229.33333333334, ans=0.0 +2024-08-29 15:41:00,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=190229.33333333334, ans=0.125 +2024-08-29 15:41:04,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=190282.66666666666, ans=0.125 +2024-08-29 15:41:26,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=190336.0, ans=0.0 +2024-08-29 15:41:30,882 INFO [train.py:1114] (0/4) Epoch 15, batch 850, loss[loss=0.2374, simple_loss=0.3029, pruned_loss=0.0632, ctc_loss=0.1138, over 19670.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2786, pruned_loss=0.05522, ctc_loss=0.1036, over 3816028.85 frames. ], batch size: 59, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:42:03,267 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.89 vs. limit=15.0 +2024-08-29 15:42:32,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190602.66666666666, ans=0.1 +2024-08-29 15:42:34,722 INFO [train.py:1114] (0/4) Epoch 15, batch 900, loss[loss=0.2189, simple_loss=0.2731, pruned_loss=0.06054, ctc_loss=0.109, over 19402.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2791, pruned_loss=0.0556, ctc_loss=0.1042, over 3818918.45 frames. ], batch size: 48, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:42:40,575 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.386e+02 1.760e+02 2.061e+02 2.441e+02 4.748e+02, threshold=4.121e+02, percent-clipped=4.0 +2024-08-29 15:43:05,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=190709.33333333334, ans=0.125 +2024-08-29 15:43:12,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190762.66666666666, ans=0.1 +2024-08-29 15:43:22,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=190816.0, ans=0.0 +2024-08-29 15:43:47,861 INFO [train.py:1114] (0/4) Epoch 15, batch 950, loss[loss=0.216, simple_loss=0.2788, pruned_loss=0.05603, ctc_loss=0.1031, over 19500.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2796, pruned_loss=0.05601, ctc_loss=0.1049, over 3820506.49 frames. ], batch size: 49, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:44:06,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=190976.0, ans=0.2 +2024-08-29 15:44:09,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=190976.0, ans=0.025 +2024-08-29 15:44:12,764 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.84 vs. limit=6.0 +2024-08-29 15:44:22,842 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.14 vs. limit=15.0 +2024-08-29 15:44:37,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=191136.0, ans=0.125 +2024-08-29 15:44:38,657 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.48 vs. limit=15.0 +2024-08-29 15:44:44,421 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.34 vs. limit=15.0 +2024-08-29 15:44:48,254 INFO [train.py:1114] (0/4) Epoch 15, batch 1000, loss[loss=0.223, simple_loss=0.2758, pruned_loss=0.06184, ctc_loss=0.1166, over 19842.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.28, pruned_loss=0.05624, ctc_loss=0.1054, over 3817654.75 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:44:49,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=191189.33333333334, ans=0.0 +2024-08-29 15:44:56,618 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.63 vs. limit=15.0 +2024-08-29 15:44:56,846 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.479e+02 1.691e+02 1.934e+02 2.300e+02 3.610e+02, threshold=3.869e+02, percent-clipped=0.0 +2024-08-29 15:45:16,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=191296.0, ans=0.125 +2024-08-29 15:45:38,169 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.90 vs. limit=15.0 +2024-08-29 15:45:42,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=191402.66666666666, ans=0.125 +2024-08-29 15:45:53,390 INFO [train.py:1114] (0/4) Epoch 15, batch 1050, loss[loss=0.2172, simple_loss=0.2795, pruned_loss=0.05651, ctc_loss=0.1046, over 19840.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2793, pruned_loss=0.05615, ctc_loss=0.1053, over 3823923.35 frames. ], batch size: 57, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:46:15,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=191509.33333333334, ans=0.125 +2024-08-29 15:46:16,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=191509.33333333334, ans=0.125 +2024-08-29 15:46:21,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=191562.66666666666, ans=0.2 +2024-08-29 15:46:54,818 INFO [train.py:1114] (0/4) Epoch 15, batch 1100, loss[loss=0.1898, simple_loss=0.2642, pruned_loss=0.04222, ctc_loss=0.07748, over 19589.00 frames. ], tot_loss[loss=0.216, simple_loss=0.279, pruned_loss=0.0556, ctc_loss=0.1044, over 3831082.54 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:46:59,200 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=9.69 vs. limit=15.0 +2024-08-29 15:47:17,592 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.340e+02 1.746e+02 1.965e+02 2.496e+02 3.903e+02, threshold=3.929e+02, percent-clipped=1.0 +2024-08-29 15:47:32,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=191776.0, ans=0.125 +2024-08-29 15:47:33,173 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.95 vs. limit=15.0 +2024-08-29 15:47:43,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=191829.33333333334, ans=0.0 +2024-08-29 15:47:51,085 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.05 vs. limit=8.0 +2024-08-29 15:48:10,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=191936.0, ans=0.0 +2024-08-29 15:48:12,562 INFO [train.py:1114] (0/4) Epoch 15, batch 1150, loss[loss=0.2341, simple_loss=0.2931, pruned_loss=0.06416, ctc_loss=0.1167, over 19572.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2794, pruned_loss=0.05582, ctc_loss=0.1049, over 3828740.94 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:48:14,157 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/checkpoint-36000.pt +2024-08-29 15:48:33,080 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.03 vs. limit=12.0 +2024-08-29 15:48:33,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=192042.66666666666, ans=0.125 +2024-08-29 15:48:42,385 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=5.424e-03 +2024-08-29 15:48:59,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=192149.33333333334, ans=0.025 +2024-08-29 15:49:19,923 INFO [train.py:1114] (0/4) Epoch 15, batch 1200, loss[loss=0.2055, simple_loss=0.2801, pruned_loss=0.04722, ctc_loss=0.09125, over 19835.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2801, pruned_loss=0.05592, ctc_loss=0.1055, over 3822545.46 frames. ], batch size: 57, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:49:20,861 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.54 vs. limit=15.0 +2024-08-29 15:49:24,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=192256.0, ans=0.125 +2024-08-29 15:49:26,210 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 1.719e+02 2.001e+02 2.349e+02 3.398e+02, threshold=4.002e+02, percent-clipped=0.0 +2024-08-29 15:49:57,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=192362.66666666666, ans=0.125 +2024-08-29 15:50:10,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=192416.0, ans=0.0 +2024-08-29 15:50:24,187 INFO [train.py:1114] (0/4) Epoch 15, batch 1250, loss[loss=0.2371, simple_loss=0.299, pruned_loss=0.06459, ctc_loss=0.115, over 19506.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2807, pruned_loss=0.05596, ctc_loss=0.1053, over 3841818.15 frames. ], batch size: 61, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:50:24,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=192522.66666666666, ans=0.125 +2024-08-29 15:50:39,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.64 vs. limit=15.0 +2024-08-29 15:50:44,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=192576.0, ans=0.025 +2024-08-29 15:50:51,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=192629.33333333334, ans=0.2 +2024-08-29 15:51:17,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=192736.0, ans=0.0 +2024-08-29 15:51:18,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=192736.0, ans=0.05 +2024-08-29 15:51:21,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=192736.0, ans=0.0 +2024-08-29 15:51:25,192 INFO [train.py:1114] (0/4) Epoch 15, batch 1300, loss[loss=0.2566, simple_loss=0.3086, pruned_loss=0.07423, ctc_loss=0.1404, over 18896.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.28, pruned_loss=0.05565, ctc_loss=0.1049, over 3846791.24 frames. ], batch size: 76, lr: 9.99e-03, grad_scale: 32.0 +2024-08-29 15:51:27,111 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.35 vs. limit=15.0 +2024-08-29 15:52:15,027 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.668e+02 1.955e+02 2.455e+02 4.261e+02, threshold=3.910e+02, percent-clipped=2.0 +2024-08-29 15:52:50,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=192949.33333333334, ans=0.0 +2024-08-29 15:52:54,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=192949.33333333334, ans=0.125 +2024-08-29 15:53:04,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=193002.66666666666, ans=0.2 +2024-08-29 15:53:10,999 INFO [train.py:1114] (0/4) Epoch 15, batch 1350, loss[loss=0.1984, simple_loss=0.2619, pruned_loss=0.0488, ctc_loss=0.09307, over 19767.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2787, pruned_loss=0.05471, ctc_loss=0.103, over 3857434.60 frames. ], batch size: 54, lr: 9.98e-03, grad_scale: 32.0 +2024-08-29 15:53:12,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=193056.0, ans=0.0 +2024-08-29 15:53:19,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=193056.0, ans=0.0 +2024-08-29 15:53:57,673 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.97 vs. limit=10.0 +2024-08-29 15:54:14,980 INFO [train.py:1114] (0/4) Epoch 15, batch 1400, loss[loss=0.1577, simple_loss=0.2283, pruned_loss=0.03145, ctc_loss=0.06026, over 19663.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2785, pruned_loss=0.05484, ctc_loss=0.1032, over 3864045.08 frames. ], batch size: 46, lr: 9.98e-03, grad_scale: 32.0 +2024-08-29 15:54:35,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=193322.66666666666, ans=0.125 +2024-08-29 15:54:37,475 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 1.658e+02 1.833e+02 2.351e+02 3.730e+02, threshold=3.665e+02, percent-clipped=0.0 +2024-08-29 15:54:45,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=193376.0, ans=0.0 +2024-08-29 15:55:27,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=193482.66666666666, ans=0.125 +2024-08-29 15:55:35,067 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.09 vs. limit=15.0 +2024-08-29 15:55:43,676 INFO [train.py:1114] (0/4) Epoch 15, batch 1450, loss[loss=0.2079, simple_loss=0.2816, pruned_loss=0.04975, ctc_loss=0.08664, over 19645.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2797, pruned_loss=0.05556, ctc_loss=0.1044, over 3861696.22 frames. ], batch size: 63, lr: 9.97e-03, grad_scale: 32.0 +2024-08-29 15:55:50,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.24 vs. limit=22.5 +2024-08-29 15:55:55,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=193642.66666666666, ans=0.0 +2024-08-29 15:56:45,766 INFO [train.py:1114] (0/4) Epoch 15, batch 1500, loss[loss=0.2324, simple_loss=0.2992, pruned_loss=0.06031, ctc_loss=0.1125, over 19575.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.28, pruned_loss=0.05547, ctc_loss=0.1042, over 3861285.39 frames. ], batch size: 57, lr: 9.96e-03, grad_scale: 32.0 +2024-08-29 15:56:52,431 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.416e+02 1.660e+02 1.885e+02 2.337e+02 4.281e+02, threshold=3.770e+02, percent-clipped=2.0 +2024-08-29 15:56:54,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=193856.0, ans=0.125 +2024-08-29 15:57:28,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=194016.0, ans=0.125 +2024-08-29 15:57:30,938 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.39 vs. limit=12.0 +2024-08-29 15:57:40,294 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:57:49,276 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=2.504e-03 +2024-08-29 15:57:51,457 INFO [train.py:1114] (0/4) Epoch 15, batch 1550, loss[loss=0.2202, simple_loss=0.2874, pruned_loss=0.05595, ctc_loss=0.1027, over 19610.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2794, pruned_loss=0.0552, ctc_loss=0.1037, over 3847267.13 frames. ], batch size: 60, lr: 9.96e-03, grad_scale: 32.0 +2024-08-29 15:57:56,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=194122.66666666666, ans=0.125 +2024-08-29 15:58:15,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=194176.0, ans=0.125 +2024-08-29 15:58:16,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=194229.33333333334, ans=0.2 +2024-08-29 15:58:19,230 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.09 vs. limit=15.0 +2024-08-29 15:58:25,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=194229.33333333334, ans=0.125 +2024-08-29 15:58:29,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=194282.66666666666, ans=0.125 +2024-08-29 15:58:29,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=194282.66666666666, ans=0.125 +2024-08-29 15:58:35,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=194282.66666666666, ans=0.1 +2024-08-29 15:58:47,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=194336.0, ans=0.0 +2024-08-29 15:58:53,438 INFO [train.py:1114] (0/4) Epoch 15, batch 1600, loss[loss=0.2281, simple_loss=0.2967, pruned_loss=0.05748, ctc_loss=0.1112, over 19833.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2796, pruned_loss=0.05534, ctc_loss=0.1041, over 3836125.80 frames. ], batch size: 57, lr: 9.95e-03, grad_scale: 32.0 +2024-08-29 15:58:59,525 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.762e+02 2.164e+02 2.478e+02 4.927e+02, threshold=4.328e+02, percent-clipped=7.0 +2024-08-29 15:59:52,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=194442.66666666666, ans=0.0 +2024-08-29 15:59:59,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=194496.0, ans=0.125 +2024-08-29 16:00:35,210 INFO [train.py:1114] (0/4) Epoch 15, batch 1650, loss[loss=0.2116, simple_loss=0.2844, pruned_loss=0.05079, ctc_loss=0.09311, over 19643.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2796, pruned_loss=0.05554, ctc_loss=0.1045, over 3832052.74 frames. ], batch size: 59, lr: 9.94e-03, grad_scale: 32.0 +2024-08-29 16:00:35,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=194656.0, ans=0.0 +2024-08-29 16:00:46,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=194709.33333333334, ans=0.125 +2024-08-29 16:00:54,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=194709.33333333334, ans=0.0 +2024-08-29 16:00:58,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=194762.66666666666, ans=0.0 +2024-08-29 16:01:04,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=194762.66666666666, ans=0.125 +2024-08-29 16:01:11,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=194762.66666666666, ans=0.07 +2024-08-29 16:01:14,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=194816.0, ans=0.0 +2024-08-29 16:01:15,186 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.94 vs. limit=22.5 +2024-08-29 16:01:26,769 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.28 vs. limit=15.0 +2024-08-29 16:01:32,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=194869.33333333334, ans=0.125 +2024-08-29 16:01:38,037 INFO [train.py:1114] (0/4) Epoch 15, batch 1700, loss[loss=0.1783, simple_loss=0.24, pruned_loss=0.04232, ctc_loss=0.07976, over 19682.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2798, pruned_loss=0.05556, ctc_loss=0.1045, over 3846484.66 frames. ], batch size: 46, lr: 9.94e-03, grad_scale: 32.0 +2024-08-29 16:01:44,061 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 1.696e+02 2.083e+02 2.797e+02 4.802e+02, threshold=4.167e+02, percent-clipped=3.0 +2024-08-29 16:01:52,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=194976.0, ans=0.125 +2024-08-29 16:01:54,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=194976.0, ans=0.0 +2024-08-29 16:02:00,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=195029.33333333334, ans=0.125 +2024-08-29 16:02:09,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=195029.33333333334, ans=0.0 +2024-08-29 16:02:10,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=195029.33333333334, ans=0.125 +2024-08-29 16:02:18,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=195082.66666666666, ans=0.07 +2024-08-29 16:02:25,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=195082.66666666666, ans=0.125 +2024-08-29 16:02:32,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=195136.0, ans=0.09899494936611666 +2024-08-29 16:02:39,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=195189.33333333334, ans=0.125 +2024-08-29 16:02:40,469 INFO [train.py:1114] (0/4) Epoch 15, batch 1750, loss[loss=0.2011, simple_loss=0.2584, pruned_loss=0.05301, ctc_loss=0.09414, over 19659.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2797, pruned_loss=0.05562, ctc_loss=0.1046, over 3851315.28 frames. ], batch size: 45, lr: 9.93e-03, grad_scale: 32.0 +2024-08-29 16:03:02,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=195296.0, ans=0.125 +2024-08-29 16:03:05,459 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.28 vs. limit=22.5 +2024-08-29 16:03:18,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=195349.33333333334, ans=15.0 +2024-08-29 16:03:18,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=195349.33333333334, ans=0.2 +2024-08-29 16:03:28,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=195402.66666666666, ans=0.125 +2024-08-29 16:03:29,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=195402.66666666666, ans=0.125 +2024-08-29 16:03:37,900 INFO [train.py:1114] (0/4) Epoch 15, batch 1800, loss[loss=0.2222, simple_loss=0.2905, pruned_loss=0.05508, ctc_loss=0.1092, over 19617.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2795, pruned_loss=0.05539, ctc_loss=0.1042, over 3852338.26 frames. ], batch size: 55, lr: 9.92e-03, grad_scale: 32.0 +2024-08-29 16:03:43,639 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.390e+02 1.702e+02 2.083e+02 2.690e+02 4.339e+02, threshold=4.166e+02, percent-clipped=1.0 +2024-08-29 16:04:05,408 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.22 vs. limit=15.0 +2024-08-29 16:04:23,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=195669.33333333334, ans=0.07 +2024-08-29 16:04:23,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=195669.33333333334, ans=0.125 +2024-08-29 16:04:26,988 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 16:04:34,674 INFO [train.py:1114] (0/4) Epoch 15, batch 1850, loss[loss=0.2097, simple_loss=0.2862, pruned_loss=0.04804, ctc_loss=0.09257, over 19592.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2794, pruned_loss=0.05543, ctc_loss=0.1043, over 3856614.83 frames. ], batch size: 57, lr: 9.92e-03, grad_scale: 32.0 +2024-08-29 16:04:50,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=195776.0, ans=0.125 +2024-08-29 16:05:02,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=195829.33333333334, ans=0.125 +2024-08-29 16:05:29,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=195936.0, ans=0.025 +2024-08-29 16:05:35,597 INFO [train.py:1114] (0/4) Epoch 15, batch 1900, loss[loss=0.2077, simple_loss=0.2851, pruned_loss=0.04659, ctc_loss=0.09306, over 19668.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2798, pruned_loss=0.05528, ctc_loss=0.104, over 3862380.04 frames. ], batch size: 59, lr: 9.91e-03, grad_scale: 32.0 +2024-08-29 16:05:40,976 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.393e+02 1.724e+02 2.102e+02 3.115e+02 5.340e+02, threshold=4.204e+02, percent-clipped=3.0 +2024-08-29 16:05:50,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=196042.66666666666, ans=0.125 +2024-08-29 16:05:57,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=196042.66666666666, ans=0.95 +2024-08-29 16:06:02,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=196096.0, ans=0.125 +2024-08-29 16:06:02,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=196096.0, ans=0.125 +2024-08-29 16:06:10,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=196149.33333333334, ans=0.0 +2024-08-29 16:06:21,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=196202.66666666666, ans=0.1 +2024-08-29 16:06:32,338 INFO [train.py:1114] (0/4) Epoch 15, batch 1950, loss[loss=0.1992, simple_loss=0.2664, pruned_loss=0.04831, ctc_loss=0.08829, over 19589.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2804, pruned_loss=0.05524, ctc_loss=0.1038, over 3870699.90 frames. ], batch size: 52, lr: 9.90e-03, grad_scale: 32.0 +2024-08-29 16:06:55,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=196309.33333333334, ans=0.0 +2024-08-29 16:06:57,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=196309.33333333334, ans=0.0 +2024-08-29 16:06:59,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=196309.33333333334, ans=0.125 +2024-08-29 16:07:35,224 INFO [train.py:1114] (0/4) Epoch 15, batch 2000, loss[loss=0.1953, simple_loss=0.254, pruned_loss=0.04913, ctc_loss=0.09612, over 19619.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2805, pruned_loss=0.05508, ctc_loss=0.1035, over 3855755.56 frames. ], batch size: 45, lr: 9.90e-03, grad_scale: 32.0 +2024-08-29 16:07:41,140 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.454e+02 1.618e+02 1.832e+02 2.132e+02 4.362e+02, threshold=3.664e+02, percent-clipped=1.0 +2024-08-29 16:08:10,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=196682.66666666666, ans=0.125 +2024-08-29 16:08:21,725 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.46 vs. limit=15.0 +2024-08-29 16:08:22,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=196736.0, ans=0.125 +2024-08-29 16:08:23,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=196736.0, ans=0.2 +2024-08-29 16:08:32,365 INFO [train.py:1114] (0/4) Epoch 15, batch 2050, loss[loss=0.1991, simple_loss=0.257, pruned_loss=0.05101, ctc_loss=0.09781, over 19726.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2796, pruned_loss=0.05504, ctc_loss=0.1035, over 3851699.63 frames. ], batch size: 47, lr: 9.89e-03, grad_scale: 32.0 +2024-08-29 16:08:44,417 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 16:09:17,363 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.01 vs. limit=22.5 +2024-08-29 16:09:24,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197002.66666666666, ans=0.1 +2024-08-29 16:09:27,726 INFO [train.py:1114] (0/4) Epoch 15, batch 2100, loss[loss=0.2271, simple_loss=0.2875, pruned_loss=0.05964, ctc_loss=0.1184, over 19763.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2788, pruned_loss=0.05468, ctc_loss=0.1028, over 3858793.46 frames. ], batch size: 54, lr: 9.88e-03, grad_scale: 32.0 +2024-08-29 16:09:33,397 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.265e+02 1.691e+02 1.929e+02 2.354e+02 3.359e+02, threshold=3.858e+02, percent-clipped=0.0 +2024-08-29 16:09:38,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=197109.33333333334, ans=0.125 +2024-08-29 16:09:44,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=197109.33333333334, ans=0.125 +2024-08-29 16:10:06,170 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=4.475e-02 +2024-08-29 16:10:16,584 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.52 vs. limit=22.5 +2024-08-29 16:10:19,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=197269.33333333334, ans=0.2 +2024-08-29 16:10:25,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=197322.66666666666, ans=0.0 +2024-08-29 16:10:26,364 INFO [train.py:1114] (0/4) Epoch 15, batch 2150, loss[loss=0.2114, simple_loss=0.276, pruned_loss=0.05372, ctc_loss=0.09831, over 19595.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2783, pruned_loss=0.05452, ctc_loss=0.1023, over 3869908.97 frames. ], batch size: 52, lr: 9.88e-03, grad_scale: 32.0 +2024-08-29 16:10:31,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197322.66666666666, ans=0.1 +2024-08-29 16:10:36,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=197322.66666666666, ans=0.0 +2024-08-29 16:10:40,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=197376.0, ans=0.125 +2024-08-29 16:10:44,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197376.0, ans=0.1 +2024-08-29 16:12:14,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=197482.66666666666, ans=0.1 +2024-08-29 16:12:15,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=197482.66666666666, ans=0.025 +2024-08-29 16:12:31,457 INFO [train.py:1114] (0/4) Epoch 15, batch 2200, loss[loss=0.2079, simple_loss=0.2841, pruned_loss=0.04744, ctc_loss=0.09196, over 19603.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.279, pruned_loss=0.05474, ctc_loss=0.1029, over 3866967.17 frames. ], batch size: 57, lr: 9.87e-03, grad_scale: 32.0 +2024-08-29 16:12:36,859 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.412e+02 1.787e+02 2.154e+02 2.730e+02 5.047e+02, threshold=4.308e+02, percent-clipped=4.0 +2024-08-29 16:12:54,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=197696.0, ans=0.0 +2024-08-29 16:12:57,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=197696.0, ans=0.125 +2024-08-29 16:13:25,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=197802.66666666666, ans=0.125 +2024-08-29 16:13:25,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=197802.66666666666, ans=0.125 +2024-08-29 16:13:29,255 INFO [train.py:1114] (0/4) Epoch 15, batch 2250, loss[loss=0.2243, simple_loss=0.2964, pruned_loss=0.05642, ctc_loss=0.09872, over 19614.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2795, pruned_loss=0.05509, ctc_loss=0.1034, over 3866993.22 frames. ], batch size: 55, lr: 9.87e-03, grad_scale: 32.0 +2024-08-29 16:13:31,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=197856.0, ans=0.125 +2024-08-29 16:13:35,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=197856.0, ans=0.125 +2024-08-29 16:14:45,283 INFO [train.py:1114] (0/4) Epoch 15, batch 2300, loss[loss=0.1824, simple_loss=0.248, pruned_loss=0.04301, ctc_loss=0.07703, over 19503.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2783, pruned_loss=0.05515, ctc_loss=0.1036, over 3861395.04 frames. ], batch size: 49, lr: 9.86e-03, grad_scale: 32.0 +2024-08-29 16:14:45,974 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.12 vs. limit=10.0 +2024-08-29 16:14:50,777 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.357e+02 1.686e+02 1.986e+02 2.467e+02 4.553e+02, threshold=3.971e+02, percent-clipped=1.0 +2024-08-29 16:14:59,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=198176.0, ans=0.125 +2024-08-29 16:14:59,587 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.28 vs. limit=15.0 +2024-08-29 16:15:06,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=198176.0, ans=0.0 +2024-08-29 16:15:16,355 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.14 vs. limit=15.0 +2024-08-29 16:15:20,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=198282.66666666666, ans=0.125 +2024-08-29 16:15:21,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=198282.66666666666, ans=0.0 +2024-08-29 16:15:22,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=198282.66666666666, ans=0.0 +2024-08-29 16:15:30,441 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.05 vs. limit=8.0 +2024-08-29 16:15:43,134 INFO [train.py:1114] (0/4) Epoch 15, batch 2350, loss[loss=0.2468, simple_loss=0.3063, pruned_loss=0.06758, ctc_loss=0.1304, over 19705.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2779, pruned_loss=0.05496, ctc_loss=0.103, over 3864267.56 frames. ], batch size: 63, lr: 9.85e-03, grad_scale: 64.0 +2024-08-29 16:15:57,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=198442.66666666666, ans=0.09899494936611666 +2024-08-29 16:15:59,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=198442.66666666666, ans=0.125 +2024-08-29 16:16:16,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=198496.0, ans=0.0 +2024-08-29 16:16:38,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198602.66666666666, ans=0.1 +2024-08-29 16:16:42,890 INFO [train.py:1114] (0/4) Epoch 15, batch 2400, loss[loss=0.2105, simple_loss=0.2844, pruned_loss=0.04959, ctc_loss=0.09354, over 19363.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.28, pruned_loss=0.0555, ctc_loss=0.104, over 3858534.45 frames. ], batch size: 67, lr: 9.85e-03, grad_scale: 64.0 +2024-08-29 16:16:48,397 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.659e+02 1.944e+02 2.492e+02 3.873e+02, threshold=3.888e+02, percent-clipped=0.0 +2024-08-29 16:16:52,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=198656.0, ans=0.125 +2024-08-29 16:16:52,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198656.0, ans=0.1 +2024-08-29 16:16:53,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=198709.33333333334, ans=0.0 +2024-08-29 16:16:53,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=198709.33333333334, ans=0.125 +2024-08-29 16:16:54,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=198709.33333333334, ans=0.2 +2024-08-29 16:16:59,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=198709.33333333334, ans=0.0 +2024-08-29 16:17:52,431 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.15 vs. limit=15.0 +2024-08-29 16:17:54,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198709.33333333334, ans=0.1 +2024-08-29 16:18:01,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=198762.66666666666, ans=0.125 +2024-08-29 16:18:10,683 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.89 vs. limit=10.0 +2024-08-29 16:18:33,181 INFO [train.py:1114] (0/4) Epoch 15, batch 2450, loss[loss=0.2701, simple_loss=0.3043, pruned_loss=0.08653, ctc_loss=0.1573, over 13381.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2837, pruned_loss=0.05836, ctc_loss=0.1098, over 3731305.61 frames. ], batch size: 141, lr: 9.84e-03, grad_scale: 32.0 +2024-08-29 16:18:49,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198976.0, ans=0.1 +2024-08-29 16:18:56,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=199029.33333333334, ans=0.125 +2024-08-29 16:18:56,839 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 16:19:04,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.09 vs. limit=15.0 +2024-08-29 16:19:16,586 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-15.pt +2024-08-29 16:20:18,426 INFO [train.py:1114] (0/4) Epoch 16, batch 0, loss[loss=0.1889, simple_loss=0.2524, pruned_loss=0.04568, ctc_loss=0.08503, over 19833.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2524, pruned_loss=0.04568, ctc_loss=0.08503, over 19833.00 frames. ], batch size: 49, lr: 9.52e-03, grad_scale: 32.0 +2024-08-29 16:20:18,427 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-29 16:20:28,423 INFO [train.py:1146] (0/4) Epoch 16, validation: loss=0.1867, simple_loss=0.2755, pruned_loss=0.03636, ctc_loss=0.06317, over 944034.00 frames. +2024-08-29 16:20:28,424 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13544MB +2024-08-29 16:20:28,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=199130.66666666666, ans=0.0 +2024-08-29 16:20:37,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=199130.66666666666, ans=0.125 +2024-08-29 16:20:47,355 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.24 vs. limit=22.5 +2024-08-29 16:20:48,967 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.810e+02 1.998e+02 2.276e+02 3.528e+02, threshold=3.997e+02, percent-clipped=0.0 +2024-08-29 16:21:11,762 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 16:21:32,424 INFO [train.py:1114] (0/4) Epoch 16, batch 50, loss[loss=0.1812, simple_loss=0.2488, pruned_loss=0.04118, ctc_loss=0.0783, over 19702.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2814, pruned_loss=0.0567, ctc_loss=0.1074, over 845293.61 frames. ], batch size: 47, lr: 9.51e-03, grad_scale: 32.0 +2024-08-29 16:21:32,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=199397.33333333334, ans=0.125 +2024-08-29 16:22:27,461 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.73 vs. limit=15.0 +2024-08-29 16:22:30,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=199610.66666666666, ans=0.125 +2024-08-29 16:22:40,111 INFO [train.py:1114] (0/4) Epoch 16, batch 100, loss[loss=0.2142, simple_loss=0.2757, pruned_loss=0.05466, ctc_loss=0.1086, over 19711.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2831, pruned_loss=0.05656, ctc_loss=0.1072, over 1499669.28 frames. ], batch size: 51, lr: 9.51e-03, grad_scale: 32.0 +2024-08-29 16:23:08,063 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.352e+02 1.815e+02 2.137e+02 2.569e+02 4.869e+02, threshold=4.274e+02, percent-clipped=1.0 +2024-08-29 16:23:14,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=199717.33333333334, ans=0.125 +2024-08-29 16:23:15,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=199770.66666666666, ans=0.0 +2024-08-29 16:23:23,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199770.66666666666, ans=0.1 +2024-08-29 16:35:02,696 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.94 vs. limit=22.5 +2024-08-29 16:35:20,728 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.18 vs. limit=15.0 +2024-08-29 16:37:11,209 INFO [train.py:1114] (0/4) Epoch 16, batch 150, loss[loss=0.1901, simple_loss=0.2471, pruned_loss=0.04851, ctc_loss=0.09035, over 19725.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2784, pruned_loss=0.05401, ctc_loss=0.1022, over 2028484.55 frames. ], batch size: 47, lr: 9.50e-03, grad_scale: 32.0 +2024-08-29 16:40:06,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=199984.0, ans=0.0 +2024-08-29 16:45:46,802 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 16:47:01,760 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.96 vs. limit=15.0 +2024-08-29 16:47:20,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=200144.0, ans=0.0 +2024-08-29 16:48:04,609 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 16:48:05,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=200197.33333333334, ans=0.0 +2024-08-29 16:48:05,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=200197.33333333334, ans=0.125 +2024-08-29 16:48:09,838 INFO [train.py:1114] (0/4) Epoch 16, batch 200, loss[loss=0.2386, simple_loss=0.2913, pruned_loss=0.06769, ctc_loss=0.1261, over 18100.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2773, pruned_loss=0.05353, ctc_loss=0.1012, over 2435806.75 frames. ], batch size: 85, lr: 9.49e-03, grad_scale: 32.0 +2024-08-29 16:49:17,163 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.22 vs. limit=6.0 +2024-08-29 16:49:58,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=200197.33333333334, ans=0.125 +2024-08-29 16:50:01,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=200197.33333333334, ans=0.0 +2024-08-29 16:53:29,823 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.321e+02 1.834e+02 2.227e+02 2.815e+02 4.534e+02, threshold=4.454e+02, percent-clipped=1.0 +2024-08-29 16:54:13,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=200304.0, ans=0.125 +2024-08-29 16:55:31,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=200357.33333333334, ans=0.125 +2024-08-29 16:55:53,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=200357.33333333334, ans=0.2 +2024-08-29 16:56:00,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200357.33333333334, ans=0.1 +2024-08-29 16:56:29,781 INFO [train.py:1114] (0/4) Epoch 16, batch 250, loss[loss=0.2291, simple_loss=0.2902, pruned_loss=0.06038, ctc_loss=0.118, over 19425.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2776, pruned_loss=0.05394, ctc_loss=0.1017, over 2756064.49 frames. ], batch size: 67, lr: 9.49e-03, grad_scale: 32.0 +2024-08-29 16:58:34,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=200517.33333333334, ans=0.025 +2024-08-29 16:58:41,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=200570.66666666666, ans=0.2 +2024-08-29 16:58:56,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=200570.66666666666, ans=0.2 +2024-08-29 16:59:18,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=200624.0, ans=0.125 +2024-08-29 17:03:13,498 INFO [train.py:1114] (0/4) Epoch 16, batch 300, loss[loss=0.2483, simple_loss=0.3013, pruned_loss=0.07164, ctc_loss=0.13, over 19542.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2775, pruned_loss=0.05398, ctc_loss=0.1018, over 3000525.35 frames. ], batch size: 61, lr: 9.48e-03, grad_scale: 32.0 +2024-08-29 17:03:25,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=200730.66666666666, ans=0.125 +2024-08-29 17:03:36,035 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.345e+02 1.663e+02 1.972e+02 2.398e+02 4.674e+02, threshold=3.943e+02, percent-clipped=1.0 +2024-08-29 17:07:21,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=200890.66666666666, ans=0.0 +2024-08-29 17:07:54,518 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.18 vs. limit=10.0 +2024-08-29 17:08:30,922 INFO [train.py:1114] (0/4) Epoch 16, batch 350, loss[loss=0.189, simple_loss=0.2521, pruned_loss=0.04501, ctc_loss=0.08945, over 19764.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2784, pruned_loss=0.05422, ctc_loss=0.1023, over 3189776.14 frames. ], batch size: 48, lr: 9.48e-03, grad_scale: 32.0 +2024-08-29 17:08:34,701 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 17:10:15,068 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.81 vs. limit=15.0 +2024-08-29 17:10:17,359 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.31 vs. limit=15.0 +2024-08-29 17:11:34,872 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.07 vs. limit=12.0 +2024-08-29 17:13:16,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201264.0, ans=0.1 +2024-08-29 17:13:17,607 INFO [train.py:1114] (0/4) Epoch 16, batch 400, loss[loss=0.2288, simple_loss=0.2882, pruned_loss=0.06133, ctc_loss=0.1171, over 19493.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2779, pruned_loss=0.054, ctc_loss=0.1018, over 3341541.24 frames. ], batch size: 54, lr: 9.47e-03, grad_scale: 32.0 +2024-08-29 17:13:20,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=201264.0, ans=0.1 +2024-08-29 17:13:42,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=201317.33333333334, ans=0.0 +2024-08-29 17:15:51,032 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.292e+02 1.714e+02 1.905e+02 2.508e+02 3.565e+02, threshold=3.811e+02, percent-clipped=0.0 +2024-08-29 17:17:00,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=201477.33333333334, ans=0.1 +2024-08-29 17:17:07,844 INFO [train.py:1114] (0/4) Epoch 16, batch 450, loss[loss=0.2373, simple_loss=0.3003, pruned_loss=0.06337, ctc_loss=0.1189, over 19623.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.278, pruned_loss=0.05428, ctc_loss=0.1021, over 3449745.03 frames. ], batch size: 55, lr: 9.46e-03, grad_scale: 32.0 +2024-08-29 17:21:13,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=201690.66666666666, ans=0.2 +2024-08-29 17:21:24,927 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.97 vs. limit=22.5 +2024-08-29 17:21:31,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=201744.0, ans=0.0 +2024-08-29 17:21:57,440 INFO [train.py:1114] (0/4) Epoch 16, batch 500, loss[loss=0.2126, simple_loss=0.2822, pruned_loss=0.05282, ctc_loss=0.0933, over 19663.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2764, pruned_loss=0.05346, ctc_loss=0.1005, over 3545183.97 frames. ], batch size: 63, lr: 9.46e-03, grad_scale: 32.0 +2024-08-29 17:22:03,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=201797.33333333334, ans=0.125 +2024-08-29 17:22:46,947 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.430e+02 1.689e+02 2.169e+02 2.570e+02 5.370e+02, threshold=4.338e+02, percent-clipped=3.0 +2024-08-29 17:23:06,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=201850.66666666666, ans=0.125 +2024-08-29 17:23:16,445 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.06 vs. limit=15.0 +2024-08-29 17:23:26,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2.whitening_limit, batch_count=201904.0, ans=15.0 +2024-08-29 17:23:49,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=202010.66666666666, ans=0.05 +2024-08-29 17:24:02,844 INFO [train.py:1114] (0/4) Epoch 16, batch 550, loss[loss=0.1928, simple_loss=0.2731, pruned_loss=0.04042, ctc_loss=0.0789, over 19241.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2763, pruned_loss=0.05344, ctc_loss=0.1006, over 3606918.73 frames. ], batch size: 71, lr: 9.45e-03, grad_scale: 32.0 +2024-08-29 17:24:06,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=202064.0, ans=0.0 +2024-08-29 17:24:41,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=202170.66666666666, ans=0.0 +2024-08-29 17:24:50,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=202224.0, ans=0.125 +2024-08-29 17:24:51,148 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.83 vs. limit=15.0 +2024-08-29 17:24:51,505 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.01 vs. limit=15.0 +2024-08-29 17:24:56,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=202224.0, ans=0.125 +2024-08-29 17:25:11,147 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.23 vs. limit=6.0 +2024-08-29 17:25:21,558 INFO [train.py:1114] (0/4) Epoch 16, batch 600, loss[loss=0.2233, simple_loss=0.294, pruned_loss=0.05698, ctc_loss=0.09633, over 19441.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2763, pruned_loss=0.05321, ctc_loss=0.09994, over 3664117.11 frames. ], batch size: 67, lr: 9.45e-03, grad_scale: 32.0 +2024-08-29 17:25:25,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=202330.66666666666, ans=0.2 +2024-08-29 17:26:27,334 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.86 vs. limit=12.0 +2024-08-29 17:27:04,563 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.277e+02 1.652e+02 1.934e+02 2.290e+02 3.719e+02, threshold=3.867e+02, percent-clipped=0.0 +2024-08-29 17:31:02,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=202597.33333333334, ans=0.125 +2024-08-29 17:31:03,783 INFO [train.py:1114] (0/4) Epoch 16, batch 650, loss[loss=0.2092, simple_loss=0.2726, pruned_loss=0.05153, ctc_loss=0.1069, over 19769.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2759, pruned_loss=0.05311, ctc_loss=0.09994, over 3715349.48 frames. ], batch size: 54, lr: 9.44e-03, grad_scale: 32.0 +2024-08-29 17:32:25,225 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.69 vs. limit=22.5 +2024-08-29 17:32:46,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=202704.0, ans=0.0 +2024-08-29 17:32:49,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=202757.33333333334, ans=0.125 +2024-08-29 17:32:49,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202757.33333333334, ans=0.1 +2024-08-29 17:32:55,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=202757.33333333334, ans=0.125 +2024-08-29 17:34:02,127 INFO [train.py:1114] (0/4) Epoch 16, batch 700, loss[loss=0.1962, simple_loss=0.262, pruned_loss=0.04739, ctc_loss=0.08921, over 19717.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.276, pruned_loss=0.05293, ctc_loss=0.0995, over 3747773.79 frames. ], batch size: 51, lr: 9.43e-03, grad_scale: 32.0 +2024-08-29 17:34:02,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=202864.0, ans=0.125 +2024-08-29 17:34:09,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=202864.0, ans=0.1 +2024-08-29 17:34:15,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=202917.33333333334, ans=0.0 +2024-08-29 17:35:12,325 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.286e+02 1.755e+02 2.110e+02 2.761e+02 5.047e+02, threshold=4.220e+02, percent-clipped=5.0 +2024-08-29 17:35:35,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=202970.66666666666, ans=0.0 +2024-08-29 17:41:01,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=203077.33333333334, ans=0.125 +2024-08-29 17:41:53,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=203077.33333333334, ans=0.125 +2024-08-29 17:42:01,680 INFO [train.py:1114] (0/4) Epoch 16, batch 750, loss[loss=0.2356, simple_loss=0.2978, pruned_loss=0.06221, ctc_loss=0.1226, over 19493.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2758, pruned_loss=0.0529, ctc_loss=0.09961, over 3772968.21 frames. ], batch size: 54, lr: 9.43e-03, grad_scale: 32.0 +2024-08-29 17:42:08,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=203130.66666666666, ans=0.2 +2024-08-29 17:42:12,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=203130.66666666666, ans=0.1 +2024-08-29 17:42:35,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=203237.33333333334, ans=0.2 +2024-08-29 17:42:41,466 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.22 vs. limit=15.0 +2024-08-29 17:44:19,408 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.46 vs. limit=15.0 +2024-08-29 17:46:10,172 INFO [train.py:1114] (0/4) Epoch 16, batch 800, loss[loss=0.1972, simple_loss=0.2635, pruned_loss=0.04698, ctc_loss=0.0921, over 19828.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2757, pruned_loss=0.05294, ctc_loss=0.0997, over 3794945.14 frames. ], batch size: 49, lr: 9.42e-03, grad_scale: 32.0 +2024-08-29 17:46:10,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=203397.33333333334, ans=0.125 +2024-08-29 17:46:10,471 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 17:47:32,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=203397.33333333334, ans=0.0 +2024-08-29 17:48:02,084 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.05 vs. limit=22.5 +2024-08-29 17:48:15,894 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.745e+02 2.069e+02 2.556e+02 3.770e+02, threshold=4.138e+02, percent-clipped=0.0 +2024-08-29 17:48:27,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=203504.0, ans=0.0 +2024-08-29 17:48:54,742 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.08 vs. limit=15.0 +2024-08-29 17:48:55,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=203610.66666666666, ans=0.125 +2024-08-29 17:49:06,950 INFO [train.py:1114] (0/4) Epoch 16, batch 850, loss[loss=0.2044, simple_loss=0.2803, pruned_loss=0.04638, ctc_loss=0.08933, over 19643.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2758, pruned_loss=0.05304, ctc_loss=0.0998, over 3814588.77 frames. ], batch size: 59, lr: 9.42e-03, grad_scale: 32.0 +2024-08-29 17:49:09,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=203664.0, ans=0.025 +2024-08-29 17:49:30,281 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.61 vs. limit=15.0 +2024-08-29 17:49:32,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=203770.66666666666, ans=0.125 +2024-08-29 17:50:13,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=203877.33333333334, ans=0.125 +2024-08-29 17:50:21,093 INFO [train.py:1114] (0/4) Epoch 16, batch 900, loss[loss=0.2148, simple_loss=0.2766, pruned_loss=0.05601, ctc_loss=0.1025, over 19403.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2765, pruned_loss=0.05344, ctc_loss=0.1005, over 3818681.23 frames. ], batch size: 48, lr: 9.41e-03, grad_scale: 32.0 +2024-08-29 17:50:34,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=203930.66666666666, ans=0.125 +2024-08-29 17:50:48,717 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.434e+02 1.676e+02 1.827e+02 2.350e+02 4.099e+02, threshold=3.653e+02, percent-clipped=0.0 +2024-08-29 17:51:22,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.whiten.whitening_limit, batch_count=204037.33333333334, ans=15.0 +2024-08-29 17:53:37,507 INFO [train.py:1114] (0/4) Epoch 16, batch 950, loss[loss=0.1989, simple_loss=0.2625, pruned_loss=0.04887, ctc_loss=0.0937, over 19500.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2768, pruned_loss=0.05342, ctc_loss=0.1007, over 3821331.88 frames. ], batch size: 49, lr: 9.40e-03, grad_scale: 32.0 +2024-08-29 17:54:06,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=204197.33333333334, ans=0.125 +2024-08-29 17:54:21,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=204250.66666666666, ans=0.07 +2024-08-29 17:54:32,968 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.56 vs. limit=10.0 +2024-08-29 17:54:56,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=204357.33333333334, ans=0.125 +2024-08-29 17:54:57,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=204357.33333333334, ans=0.125 +2024-08-29 17:54:58,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=204357.33333333334, ans=0.125 +2024-08-29 17:55:46,650 INFO [train.py:1114] (0/4) Epoch 16, batch 1000, loss[loss=0.2144, simple_loss=0.2734, pruned_loss=0.05637, ctc_loss=0.1069, over 19863.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2774, pruned_loss=0.05375, ctc_loss=0.1012, over 3817099.88 frames. ], batch size: 52, lr: 9.40e-03, grad_scale: 32.0 +2024-08-29 17:56:07,211 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.334e+02 1.649e+02 1.918e+02 2.268e+02 3.238e+02, threshold=3.836e+02, percent-clipped=0.0 +2024-08-29 17:57:03,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=204570.66666666666, ans=0.1 +2024-08-29 17:57:51,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=204677.33333333334, ans=0.2 +2024-08-29 17:57:54,925 INFO [train.py:1114] (0/4) Epoch 16, batch 1050, loss[loss=0.2117, simple_loss=0.2814, pruned_loss=0.05122, ctc_loss=0.09913, over 19848.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2771, pruned_loss=0.05377, ctc_loss=0.1013, over 3823437.21 frames. ], batch size: 57, lr: 9.39e-03, grad_scale: 32.0 +2024-08-29 17:58:31,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=204730.66666666666, ans=0.2 +2024-08-29 17:58:36,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=204730.66666666666, ans=0.125 +2024-08-29 17:59:17,539 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.14 vs. limit=10.0 +2024-08-29 17:59:50,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=204784.0, ans=0.0 +2024-08-29 17:59:54,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=204784.0, ans=0.0 +2024-08-29 18:00:19,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=204890.66666666666, ans=10.0 +2024-08-29 18:00:52,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=204997.33333333334, ans=0.0 +2024-08-29 18:00:53,294 INFO [train.py:1114] (0/4) Epoch 16, batch 1100, loss[loss=0.2029, simple_loss=0.2722, pruned_loss=0.0487, ctc_loss=0.09062, over 19578.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.277, pruned_loss=0.05386, ctc_loss=0.1015, over 3830357.68 frames. ], batch size: 52, lr: 9.39e-03, grad_scale: 16.0 +2024-08-29 18:00:55,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=204997.33333333334, ans=0.125 +2024-08-29 18:01:25,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205050.66666666666, ans=0.1 +2024-08-29 18:01:27,932 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.389e+02 1.694e+02 1.874e+02 2.325e+02 3.063e+02, threshold=3.748e+02, percent-clipped=0.0 +2024-08-29 18:01:42,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=205104.0, ans=0.125 +2024-08-29 18:02:43,504 INFO [train.py:1114] (0/4) Epoch 16, batch 1150, loss[loss=0.2199, simple_loss=0.2754, pruned_loss=0.0592, ctc_loss=0.1147, over 19586.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2769, pruned_loss=0.05409, ctc_loss=0.1019, over 3828742.07 frames. ], batch size: 52, lr: 9.38e-03, grad_scale: 16.0 +2024-08-29 18:03:07,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=205370.66666666666, ans=0.0 +2024-08-29 18:03:08,917 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.98 vs. limit=15.0 +2024-08-29 18:03:31,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=205477.33333333334, ans=0.04949747468305833 +2024-08-29 18:03:43,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=205530.66666666666, ans=0.0 +2024-08-29 18:03:45,116 INFO [train.py:1114] (0/4) Epoch 16, batch 1200, loss[loss=0.216, simple_loss=0.2795, pruned_loss=0.05588, ctc_loss=0.1018, over 19826.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2775, pruned_loss=0.05427, ctc_loss=0.1023, over 3825157.00 frames. ], batch size: 57, lr: 9.38e-03, grad_scale: 32.0 +2024-08-29 18:03:47,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=205530.66666666666, ans=0.07 +2024-08-29 18:04:06,312 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 1.725e+02 2.012e+02 2.470e+02 3.418e+02, threshold=4.024e+02, percent-clipped=0.0 +2024-08-29 18:04:12,655 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.04 vs. limit=22.5 +2024-08-29 18:04:18,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=205637.33333333334, ans=0.2 +2024-08-29 18:04:18,428 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.28 vs. limit=15.0 +2024-08-29 18:04:50,748 INFO [train.py:1114] (0/4) Epoch 16, batch 1250, loss[loss=0.269, simple_loss=0.3141, pruned_loss=0.08125, ctc_loss=0.1535, over 19537.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2782, pruned_loss=0.05428, ctc_loss=0.1022, over 3843181.77 frames. ], batch size: 61, lr: 9.37e-03, grad_scale: 32.0 +2024-08-29 18:04:58,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=205797.33333333334, ans=0.025 +2024-08-29 18:05:35,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=205904.0, ans=0.125 +2024-08-29 18:05:55,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=205957.33333333334, ans=0.025 +2024-08-29 18:06:24,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=206010.66666666666, ans=0.2 +2024-08-29 18:06:35,900 INFO [train.py:1114] (0/4) Epoch 16, batch 1300, loss[loss=0.2407, simple_loss=0.296, pruned_loss=0.06776, ctc_loss=0.1248, over 18946.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2771, pruned_loss=0.05355, ctc_loss=0.1011, over 3847064.72 frames. ], batch size: 76, lr: 9.36e-03, grad_scale: 32.0 +2024-08-29 18:06:38,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=206064.0, ans=0.0 +2024-08-29 18:06:41,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=206064.0, ans=0.125 +2024-08-29 18:06:41,925 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.22 vs. limit=12.0 +2024-08-29 18:06:57,557 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.302e+02 1.716e+02 2.090e+02 2.690e+02 4.268e+02, threshold=4.180e+02, percent-clipped=3.0 +2024-08-29 18:07:09,553 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.21 vs. limit=12.0 +2024-08-29 18:07:15,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=206224.0, ans=0.025 +2024-08-29 18:07:19,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=206224.0, ans=0.0 +2024-08-29 18:07:34,536 INFO [train.py:1114] (0/4) Epoch 16, batch 1350, loss[loss=0.2033, simple_loss=0.2743, pruned_loss=0.04793, ctc_loss=0.09084, over 19769.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2767, pruned_loss=0.05321, ctc_loss=0.1004, over 3859016.68 frames. ], batch size: 54, lr: 9.36e-03, grad_scale: 32.0 +2024-08-29 18:08:18,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=206437.33333333334, ans=0.2 +2024-08-29 18:08:23,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=206490.66666666666, ans=0.125 +2024-08-29 18:09:52,340 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=6.0 +2024-08-29 18:10:09,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=206544.0, ans=0.125 +2024-08-29 18:10:12,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=206544.0, ans=0.025 +2024-08-29 18:10:12,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=206544.0, ans=0.125 +2024-08-29 18:10:56,014 INFO [train.py:1114] (0/4) Epoch 16, batch 1400, loss[loss=0.1664, simple_loss=0.2294, pruned_loss=0.03752, ctc_loss=0.07102, over 19644.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2764, pruned_loss=0.05318, ctc_loss=0.1005, over 3865651.80 frames. ], batch size: 46, lr: 9.35e-03, grad_scale: 32.0 +2024-08-29 18:11:03,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=206597.33333333334, ans=0.125 +2024-08-29 18:12:41,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=206597.33333333334, ans=0.09899494936611666 +2024-08-29 18:13:15,203 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.407e+02 1.659e+02 1.830e+02 2.117e+02 3.619e+02, threshold=3.659e+02, percent-clipped=0.0 +2024-08-29 18:13:31,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=206704.0, ans=0.125 +2024-08-29 18:14:30,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=206757.33333333334, ans=0.125 +2024-08-29 18:14:31,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=206757.33333333334, ans=0.125 +2024-08-29 18:14:40,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=206810.66666666666, ans=0.0 +2024-08-29 18:14:43,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=206810.66666666666, ans=0.125 +2024-08-29 18:14:49,675 INFO [train.py:1114] (0/4) Epoch 16, batch 1450, loss[loss=0.2157, simple_loss=0.2826, pruned_loss=0.05385, ctc_loss=0.1026, over 19666.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2771, pruned_loss=0.05348, ctc_loss=0.1011, over 3862662.88 frames. ], batch size: 63, lr: 9.35e-03, grad_scale: 32.0 +2024-08-29 18:15:03,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=206864.0, ans=0.125 +2024-08-29 18:15:44,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=207024.0, ans=0.1 +2024-08-29 18:16:04,448 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.16 vs. limit=12.0 +2024-08-29 18:16:10,746 INFO [train.py:1114] (0/4) Epoch 16, batch 1500, loss[loss=0.2058, simple_loss=0.2871, pruned_loss=0.04497, ctc_loss=0.08629, over 19605.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2776, pruned_loss=0.05349, ctc_loss=0.101, over 3862701.02 frames. ], batch size: 57, lr: 9.34e-03, grad_scale: 32.0 +2024-08-29 18:16:32,418 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.680e+02 1.893e+02 2.490e+02 3.994e+02, threshold=3.786e+02, percent-clipped=1.0 +2024-08-29 18:16:34,099 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.13 vs. limit=6.0 +2024-08-29 18:16:34,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207237.33333333334, ans=0.1 +2024-08-29 18:16:35,279 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.27 vs. limit=15.0 +2024-08-29 18:16:36,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=207237.33333333334, ans=0.5 +2024-08-29 18:16:37,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=207237.33333333334, ans=0.0 +2024-08-29 18:17:19,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=207344.0, ans=0.1 +2024-08-29 18:17:24,227 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.09 vs. limit=15.0 +2024-08-29 18:17:34,469 INFO [train.py:1114] (0/4) Epoch 16, batch 1550, loss[loss=0.2309, simple_loss=0.2942, pruned_loss=0.06051, ctc_loss=0.1167, over 19591.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2778, pruned_loss=0.05378, ctc_loss=0.1016, over 3846710.12 frames. ], batch size: 60, lr: 9.33e-03, grad_scale: 32.0 +2024-08-29 18:17:47,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=207450.66666666666, ans=0.125 +2024-08-29 18:17:59,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=207504.0, ans=0.0 +2024-08-29 18:18:00,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=207504.0, ans=0.2 +2024-08-29 18:19:42,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=207610.66666666666, ans=0.025 +2024-08-29 18:19:55,360 INFO [train.py:1114] (0/4) Epoch 16, batch 1600, loss[loss=0.2088, simple_loss=0.272, pruned_loss=0.0525, ctc_loss=0.1013, over 19829.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.277, pruned_loss=0.05343, ctc_loss=0.101, over 3835215.71 frames. ], batch size: 57, lr: 9.33e-03, grad_scale: 32.0 +2024-08-29 18:20:28,806 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.22 vs. limit=15.0 +2024-08-29 18:21:42,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=207664.0, ans=0.125 +2024-08-29 18:21:55,755 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.773e+02 1.965e+02 2.508e+02 5.321e+02, threshold=3.930e+02, percent-clipped=3.0 +2024-08-29 18:21:55,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=207717.33333333334, ans=0.125 +2024-08-29 18:22:14,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=207824.0, ans=0.04949747468305833 +2024-08-29 18:22:52,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=207877.33333333334, ans=0.125 +2024-08-29 18:23:01,487 INFO [train.py:1114] (0/4) Epoch 16, batch 1650, loss[loss=0.1982, simple_loss=0.2782, pruned_loss=0.0426, ctc_loss=0.08261, over 19647.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2773, pruned_loss=0.0539, ctc_loss=0.102, over 3832569.16 frames. ], batch size: 59, lr: 9.32e-03, grad_scale: 32.0 +2024-08-29 18:23:09,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.73 vs. limit=15.0 +2024-08-29 18:23:29,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=207984.0, ans=0.125 +2024-08-29 18:24:40,014 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.58 vs. limit=5.0 +2024-08-29 18:24:41,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=208090.66666666666, ans=0.05 +2024-08-29 18:24:56,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=208144.0, ans=0.125 +2024-08-29 18:26:13,696 INFO [train.py:1114] (0/4) Epoch 16, batch 1700, loss[loss=0.1783, simple_loss=0.238, pruned_loss=0.04258, ctc_loss=0.08366, over 19662.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2773, pruned_loss=0.05377, ctc_loss=0.1019, over 3846213.36 frames. ], batch size: 46, lr: 9.32e-03, grad_scale: 32.0 +2024-08-29 18:26:34,625 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.431e+02 1.759e+02 2.180e+02 2.878e+02 5.111e+02, threshold=4.361e+02, percent-clipped=4.0 +2024-08-29 18:26:49,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=208357.33333333334, ans=0.125 +2024-08-29 18:27:00,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=208410.66666666666, ans=0.0 +2024-08-29 18:27:13,585 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.64 vs. limit=15.0 +2024-08-29 18:27:13,923 INFO [train.py:1114] (0/4) Epoch 16, batch 1750, loss[loss=0.169, simple_loss=0.2368, pruned_loss=0.03701, ctc_loss=0.06785, over 19699.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2769, pruned_loss=0.05377, ctc_loss=0.1015, over 3850569.47 frames. ], batch size: 45, lr: 9.31e-03, grad_scale: 32.0 +2024-08-29 18:27:15,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=208464.0, ans=0.125 +2024-08-29 18:27:50,810 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.96 vs. limit=12.0 +2024-08-29 18:27:51,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=208517.33333333334, ans=0.125 +2024-08-29 18:27:54,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=208517.33333333334, ans=0.1 +2024-08-29 18:29:17,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=208624.0, ans=0.125 +2024-08-29 18:29:20,578 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.72 vs. limit=22.5 +2024-08-29 18:29:22,737 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.49 vs. limit=15.0 +2024-08-29 18:29:23,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=208624.0, ans=0.125 +2024-08-29 18:30:17,798 INFO [train.py:1114] (0/4) Epoch 16, batch 1800, loss[loss=0.2226, simple_loss=0.2805, pruned_loss=0.0589, ctc_loss=0.117, over 19609.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2771, pruned_loss=0.05405, ctc_loss=0.1018, over 3853165.70 frames. ], batch size: 55, lr: 9.31e-03, grad_scale: 32.0 +2024-08-29 18:30:22,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=208730.66666666666, ans=0.0 +2024-08-29 18:30:41,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=208784.0, ans=0.125 +2024-08-29 18:30:45,747 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.293e+02 1.693e+02 1.985e+02 2.381e+02 4.228e+02, threshold=3.971e+02, percent-clipped=0.0 +2024-08-29 18:31:20,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=208944.0, ans=0.0 +2024-08-29 18:31:23,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=208944.0, ans=0.125 +2024-08-29 18:31:30,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten.whitening_limit, batch_count=208944.0, ans=15.0 +2024-08-29 18:31:41,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=208944.0, ans=0.125 +2024-08-29 18:31:45,953 INFO [train.py:1114] (0/4) Epoch 16, batch 1850, loss[loss=0.2382, simple_loss=0.3049, pruned_loss=0.06175, ctc_loss=0.1202, over 19587.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2762, pruned_loss=0.05334, ctc_loss=0.1003, over 3856066.70 frames. ], batch size: 57, lr: 9.30e-03, grad_scale: 32.0 +2024-08-29 18:32:16,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=208997.33333333334, ans=0.1 +2024-08-29 18:32:27,709 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.59 vs. limit=15.0 +2024-08-29 18:32:54,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=209157.33333333334, ans=0.125 +2024-08-29 18:32:57,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.whiten.whitening_limit, batch_count=209157.33333333334, ans=15.0 +2024-08-29 18:33:04,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=209157.33333333334, ans=0.125 +2024-08-29 18:33:06,591 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.89 vs. limit=12.0 +2024-08-29 18:33:13,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=209210.66666666666, ans=0.0 +2024-08-29 18:33:17,368 INFO [train.py:1114] (0/4) Epoch 16, batch 1900, loss[loss=0.2187, simple_loss=0.2936, pruned_loss=0.05123, ctc_loss=0.1034, over 19654.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2767, pruned_loss=0.05325, ctc_loss=0.1001, over 3861856.26 frames. ], batch size: 59, lr: 9.29e-03, grad_scale: 32.0 +2024-08-29 18:33:28,185 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.32 vs. limit=15.0 +2024-08-29 18:33:40,796 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.419e+02 1.785e+02 2.354e+02 2.964e+02 6.037e+02, threshold=4.708e+02, percent-clipped=9.0 +2024-08-29 18:33:46,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=209370.66666666666, ans=0.125 +2024-08-29 18:33:47,084 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.04 vs. limit=15.0 +2024-08-29 18:33:55,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=209424.0, ans=0.0 +2024-08-29 18:33:58,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=209424.0, ans=0.125 +2024-08-29 18:34:30,827 INFO [train.py:1114] (0/4) Epoch 16, batch 1950, loss[loss=0.2053, simple_loss=0.2661, pruned_loss=0.05257, ctc_loss=0.09872, over 19579.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2776, pruned_loss=0.05325, ctc_loss=0.1001, over 3870276.46 frames. ], batch size: 52, lr: 9.29e-03, grad_scale: 32.0 +2024-08-29 18:34:37,080 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.54 vs. limit=15.0 +2024-08-29 18:35:33,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=209690.66666666666, ans=0.125 +2024-08-29 18:35:39,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209744.0, ans=0.1 +2024-08-29 18:35:41,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=209744.0, ans=0.125 +2024-08-29 18:35:42,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209744.0, ans=0.1 +2024-08-29 18:35:48,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=209744.0, ans=0.0 +2024-08-29 18:35:51,684 INFO [train.py:1114] (0/4) Epoch 16, batch 2000, loss[loss=0.1946, simple_loss=0.2479, pruned_loss=0.05156, ctc_loss=0.09541, over 19680.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2782, pruned_loss=0.05359, ctc_loss=0.1008, over 3856251.01 frames. ], batch size: 45, lr: 9.28e-03, grad_scale: 32.0 +2024-08-29 18:35:54,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=209797.33333333334, ans=0.125 +2024-08-29 18:35:59,408 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.46 vs. limit=22.5 +2024-08-29 18:36:00,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=209797.33333333334, ans=0.5 +2024-08-29 18:36:05,404 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 18:36:05,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=209850.66666666666, ans=0.125 +2024-08-29 18:36:05,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=209850.66666666666, ans=0.2 +2024-08-29 18:36:07,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=209850.66666666666, ans=0.125 +2024-08-29 18:36:08,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=209850.66666666666, ans=0.2 +2024-08-29 18:36:13,157 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.312e+02 1.666e+02 1.888e+02 2.185e+02 3.516e+02, threshold=3.775e+02, percent-clipped=0.0 +2024-08-29 18:37:02,162 INFO [train.py:1114] (0/4) Epoch 16, batch 2050, loss[loss=0.1932, simple_loss=0.2522, pruned_loss=0.04872, ctc_loss=0.09173, over 19724.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2772, pruned_loss=0.05359, ctc_loss=0.1007, over 3852418.76 frames. ], batch size: 47, lr: 9.28e-03, grad_scale: 32.0 +2024-08-29 18:37:47,214 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.09 vs. limit=15.0 +2024-08-29 18:38:21,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=210170.66666666666, ans=10.0 +2024-08-29 18:38:59,620 INFO [train.py:1114] (0/4) Epoch 16, batch 2100, loss[loss=0.2214, simple_loss=0.285, pruned_loss=0.05701, ctc_loss=0.1093, over 19785.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2767, pruned_loss=0.05317, ctc_loss=0.1002, over 3859205.20 frames. ], batch size: 54, lr: 9.27e-03, grad_scale: 32.0 +2024-08-29 18:39:02,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210330.66666666666, ans=0.1 +2024-08-29 18:39:11,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=210384.0, ans=0.125 +2024-08-29 18:39:22,238 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.344e+02 1.792e+02 2.112e+02 2.675e+02 4.176e+02, threshold=4.223e+02, percent-clipped=3.0 +2024-08-29 18:39:40,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=210490.66666666666, ans=0.0 +2024-08-29 18:39:56,495 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.07 vs. limit=15.0 +2024-08-29 18:39:57,981 INFO [train.py:1114] (0/4) Epoch 16, batch 2150, loss[loss=0.2026, simple_loss=0.2657, pruned_loss=0.0516, ctc_loss=0.09064, over 19604.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2756, pruned_loss=0.05266, ctc_loss=0.09911, over 3869138.34 frames. ], batch size: 52, lr: 9.27e-03, grad_scale: 32.0 +2024-08-29 18:39:59,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=210597.33333333334, ans=0.125 +2024-08-29 18:40:09,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=210650.66666666666, ans=0.0 +2024-08-29 18:40:27,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=210650.66666666666, ans=6.0 +2024-08-29 18:40:45,986 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.32 vs. limit=6.0 +2024-08-29 18:41:06,167 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.42 vs. limit=12.0 +2024-08-29 18:41:06,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=210810.66666666666, ans=0.1 +2024-08-29 18:41:08,971 INFO [train.py:1114] (0/4) Epoch 16, batch 2200, loss[loss=0.2045, simple_loss=0.2745, pruned_loss=0.04844, ctc_loss=0.09367, over 19590.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2757, pruned_loss=0.05278, ctc_loss=0.09945, over 3866681.74 frames. ], batch size: 57, lr: 9.26e-03, grad_scale: 32.0 +2024-08-29 18:41:09,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=210864.0, ans=0.0 +2024-08-29 18:41:11,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=210864.0, ans=0.0 +2024-08-29 18:41:22,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=210917.33333333334, ans=0.0 +2024-08-29 18:41:29,787 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.327e+02 1.757e+02 2.042e+02 2.598e+02 4.148e+02, threshold=4.084e+02, percent-clipped=0.0 +2024-08-29 18:42:04,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=210970.66666666666, ans=0.0 +2024-08-29 18:42:15,440 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.02 vs. limit=15.0 +2024-08-29 18:42:28,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=211077.33333333334, ans=0.125 +2024-08-29 18:42:31,569 INFO [train.py:1114] (0/4) Epoch 16, batch 2250, loss[loss=0.2206, simple_loss=0.2898, pruned_loss=0.05386, ctc_loss=0.1091, over 19629.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2764, pruned_loss=0.05311, ctc_loss=0.1001, over 3866935.30 frames. ], batch size: 55, lr: 9.25e-03, grad_scale: 32.0 +2024-08-29 18:42:36,795 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.37 vs. limit=15.0 +2024-08-29 18:42:42,983 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.56 vs. limit=15.0 +2024-08-29 18:42:46,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211184.0, ans=0.1 +2024-08-29 18:43:17,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=211344.0, ans=0.0 +2024-08-29 18:43:27,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=211344.0, ans=0.0 +2024-08-29 18:44:22,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=211344.0, ans=0.125 +2024-08-29 18:44:24,268 INFO [train.py:1114] (0/4) Epoch 16, batch 2300, loss[loss=0.2157, simple_loss=0.2691, pruned_loss=0.05912, ctc_loss=0.1101, over 19502.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.276, pruned_loss=0.05349, ctc_loss=0.1007, over 3861191.67 frames. ], batch size: 49, lr: 9.25e-03, grad_scale: 32.0 +2024-08-29 18:44:29,586 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.81 vs. limit=22.5 +2024-08-29 18:45:03,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=211450.66666666666, ans=0.0 +2024-08-29 18:45:09,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=211450.66666666666, ans=0.125 +2024-08-29 18:45:10,438 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.413e+02 1.785e+02 2.121e+02 2.618e+02 4.213e+02, threshold=4.241e+02, percent-clipped=2.0 +2024-08-29 18:45:18,431 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 18:45:26,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=211557.33333333334, ans=0.025 +2024-08-29 18:45:26,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=211557.33333333334, ans=0.0 +2024-08-29 18:45:34,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=211610.66666666666, ans=0.2 +2024-08-29 18:45:39,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=211610.66666666666, ans=0.2 +2024-08-29 18:45:59,061 INFO [train.py:1114] (0/4) Epoch 16, batch 2350, loss[loss=0.2325, simple_loss=0.2935, pruned_loss=0.06398, ctc_loss=0.1091, over 19649.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2763, pruned_loss=0.05378, ctc_loss=0.1008, over 3862969.39 frames. ], batch size: 63, lr: 9.24e-03, grad_scale: 32.0 +2024-08-29 18:46:10,091 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=4.979e-02 +2024-08-29 18:46:40,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=211824.0, ans=0.125 +2024-08-29 18:46:41,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=211824.0, ans=0.0 +2024-08-29 18:46:43,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=211824.0, ans=0.125 +2024-08-29 18:46:57,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=211877.33333333334, ans=0.1 +2024-08-29 18:47:00,341 INFO [train.py:1114] (0/4) Epoch 16, batch 2400, loss[loss=0.2214, simple_loss=0.2937, pruned_loss=0.05396, ctc_loss=0.1029, over 19453.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2783, pruned_loss=0.05441, ctc_loss=0.1017, over 3857514.17 frames. ], batch size: 67, lr: 9.24e-03, grad_scale: 32.0 +2024-08-29 18:47:03,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=211930.66666666666, ans=0.125 +2024-08-29 18:47:09,866 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.35 vs. limit=10.0 +2024-08-29 18:47:20,732 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.486e+02 1.800e+02 2.132e+02 2.653e+02 4.129e+02, threshold=4.264e+02, percent-clipped=0.0 +2024-08-29 18:47:56,917 INFO [train.py:1114] (0/4) Epoch 16, batch 2450, loss[loss=0.2771, simple_loss=0.3084, pruned_loss=0.08855, ctc_loss=0.1719, over 13465.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2821, pruned_loss=0.05729, ctc_loss=0.1075, over 3733785.23 frames. ], batch size: 140, lr: 9.23e-03, grad_scale: 32.0 +2024-08-29 18:48:07,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=212197.33333333334, ans=0.125 +2024-08-29 18:48:31,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=212304.0, ans=0.025 +2024-08-29 18:48:45,437 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-16.pt +2024-08-29 18:55:35,482 INFO [train.py:1114] (0/4) Epoch 17, batch 0, loss[loss=0.2431, simple_loss=0.286, pruned_loss=0.07327, ctc_loss=0.1339, over 19800.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.286, pruned_loss=0.07327, ctc_loss=0.1339, over 19800.00 frames. ], batch size: 49, lr: 8.95e-03, grad_scale: 32.0 +2024-08-29 18:55:35,484 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-29 18:56:01,361 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.1628, 4.4569, 3.9691, 4.1827], device='cuda:0') +2024-08-29 18:56:01,791 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([1.8841, 2.0599, 3.4462, 3.5285], device='cuda:0') +2024-08-29 18:56:04,690 INFO [train.py:1146] (0/4) Epoch 17, validation: loss=0.1843, simple_loss=0.2733, pruned_loss=0.03544, ctc_loss=0.06098, over 944034.00 frames. +2024-08-29 18:56:04,692 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13691MB +2024-08-29 18:56:54,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=212458.66666666666, ans=0.125 +2024-08-29 18:56:56,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=212458.66666666666, ans=0.125 +2024-08-29 18:58:11,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=212458.66666666666, ans=0.125 +2024-08-29 18:58:20,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=212512.0, ans=0.125 +2024-08-29 18:58:20,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=212512.0, ans=0.125 +2024-08-29 18:58:29,111 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.51 vs. limit=12.0 +2024-08-29 18:58:30,846 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.493e+02 1.824e+02 2.030e+02 2.233e+02 3.073e+02, threshold=4.061e+02, percent-clipped=0.0 +2024-08-29 18:58:34,789 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.96 vs. limit=22.5 +2024-08-29 19:05:26,875 INFO [train.py:1114] (0/4) Epoch 17, batch 50, loss[loss=0.1819, simple_loss=0.2478, pruned_loss=0.04161, ctc_loss=0.08164, over 19711.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2777, pruned_loss=0.05312, ctc_loss=0.1011, over 844772.04 frames. ], batch size: 47, lr: 8.94e-03, grad_scale: 32.0 +2024-08-29 19:07:29,742 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.60 vs. limit=10.0 +2024-08-29 19:07:31,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=212672.0, ans=0.125 +2024-08-29 19:07:53,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=212725.33333333334, ans=0.0 +2024-08-29 19:08:30,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=212832.0, ans=0.125 +2024-08-29 19:08:52,256 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.30 vs. limit=15.0 +2024-08-29 19:08:52,562 INFO [train.py:1114] (0/4) Epoch 17, batch 100, loss[loss=0.1848, simple_loss=0.255, pruned_loss=0.04151, ctc_loss=0.07903, over 19727.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2796, pruned_loss=0.05439, ctc_loss=0.103, over 1498273.11 frames. ], batch size: 51, lr: 8.94e-03, grad_scale: 32.0 +2024-08-29 19:09:16,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=213045.33333333334, ans=0.125 +2024-08-29 19:09:25,912 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.707e+02 1.910e+02 2.335e+02 3.363e+02, threshold=3.820e+02, percent-clipped=0.0 +2024-08-29 19:09:29,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=213098.66666666666, ans=0.5 +2024-08-29 19:09:37,946 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 19:09:58,148 INFO [train.py:1114] (0/4) Epoch 17, batch 150, loss[loss=0.2011, simple_loss=0.2579, pruned_loss=0.05256, ctc_loss=0.09771, over 19685.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2774, pruned_loss=0.05373, ctc_loss=0.1016, over 2027311.53 frames. ], batch size: 47, lr: 8.93e-03, grad_scale: 32.0 +2024-08-29 19:12:29,049 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/checkpoint-40000.pt +2024-08-29 19:16:11,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=213365.33333333334, ans=0.125 +2024-08-29 19:16:25,552 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 19:16:28,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=213472.0, ans=0.125 +2024-08-29 19:16:29,779 INFO [train.py:1114] (0/4) Epoch 17, batch 200, loss[loss=0.2251, simple_loss=0.2875, pruned_loss=0.05821, ctc_loss=0.1158, over 18082.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2761, pruned_loss=0.05311, ctc_loss=0.1003, over 2434813.02 frames. ], batch size: 85, lr: 8.93e-03, grad_scale: 32.0 +2024-08-29 19:24:57,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=213472.0, ans=0.125 +2024-08-29 19:25:04,090 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.17 vs. limit=22.5 +2024-08-29 19:27:12,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213525.33333333334, ans=0.1 +2024-08-29 19:27:13,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=213525.33333333334, ans=0.125 +2024-08-29 19:27:57,285 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.724e+02 1.931e+02 2.405e+02 4.691e+02, threshold=3.862e+02, percent-clipped=4.0 +2024-08-29 19:28:33,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=213685.33333333334, ans=0.1 +2024-08-29 19:28:38,487 INFO [train.py:1114] (0/4) Epoch 17, batch 250, loss[loss=0.226, simple_loss=0.2832, pruned_loss=0.06089, ctc_loss=0.1177, over 19421.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2756, pruned_loss=0.05304, ctc_loss=0.09995, over 2755282.98 frames. ], batch size: 67, lr: 8.92e-03, grad_scale: 32.0 +2024-08-29 19:30:01,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=213952.0, ans=0.2 +2024-08-29 19:30:03,451 INFO [train.py:1114] (0/4) Epoch 17, batch 300, loss[loss=0.251, simple_loss=0.3031, pruned_loss=0.07208, ctc_loss=0.1367, over 19530.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2748, pruned_loss=0.05262, ctc_loss=0.09908, over 3000773.43 frames. ], batch size: 61, lr: 8.92e-03, grad_scale: 32.0 +2024-08-29 19:32:02,229 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.689e+02 1.972e+02 2.447e+02 4.331e+02, threshold=3.945e+02, percent-clipped=1.0 +2024-08-29 19:32:19,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214165.33333333334, ans=0.1 +2024-08-29 19:32:41,692 INFO [train.py:1114] (0/4) Epoch 17, batch 350, loss[loss=0.2015, simple_loss=0.2611, pruned_loss=0.05113, ctc_loss=0.09911, over 19745.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2755, pruned_loss=0.05256, ctc_loss=0.09925, over 3190658.25 frames. ], batch size: 48, lr: 8.91e-03, grad_scale: 32.0 +2024-08-29 19:32:51,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=214272.0, ans=0.1 +2024-08-29 19:34:08,229 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.08 vs. limit=15.0 +2024-08-29 19:34:18,290 INFO [train.py:1114] (0/4) Epoch 17, batch 400, loss[loss=0.2022, simple_loss=0.2822, pruned_loss=0.04416, ctc_loss=0.08466, over 19506.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2753, pruned_loss=0.0523, ctc_loss=0.09851, over 3343295.33 frames. ], batch size: 54, lr: 8.91e-03, grad_scale: 32.0 +2024-08-29 19:34:38,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=214538.66666666666, ans=0.125 +2024-08-29 19:34:54,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=214592.0, ans=0.0 +2024-08-29 19:35:31,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=214645.33333333334, ans=0.125 +2024-08-29 19:35:39,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=214645.33333333334, ans=0.125 +2024-08-29 19:36:30,690 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.299e+02 1.665e+02 1.964e+02 2.553e+02 4.238e+02, threshold=3.929e+02, percent-clipped=2.0 +2024-08-29 19:37:24,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=214698.66666666666, ans=0.0 +2024-08-29 19:37:57,089 INFO [train.py:1114] (0/4) Epoch 17, batch 450, loss[loss=0.2257, simple_loss=0.2935, pruned_loss=0.05695, ctc_loss=0.1102, over 19619.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2756, pruned_loss=0.05251, ctc_loss=0.09866, over 3449981.31 frames. ], batch size: 55, lr: 8.90e-03, grad_scale: 32.0 +2024-08-29 19:37:58,021 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.72 vs. limit=15.0 +2024-08-29 19:38:13,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=214805.33333333334, ans=0.125 +2024-08-29 19:39:29,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=214912.0, ans=0.0 +2024-08-29 19:39:53,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=214965.33333333334, ans=0.0 +2024-08-29 19:40:26,604 INFO [train.py:1114] (0/4) Epoch 17, batch 500, loss[loss=0.2231, simple_loss=0.2932, pruned_loss=0.05562, ctc_loss=0.1042, over 19663.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2747, pruned_loss=0.05188, ctc_loss=0.09752, over 3545189.32 frames. ], batch size: 63, lr: 8.90e-03, grad_scale: 32.0 +2024-08-29 19:40:27,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.39 vs. limit=15.0 +2024-08-29 19:40:29,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=215072.0, ans=0.125 +2024-08-29 19:40:30,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215072.0, ans=0.1 +2024-08-29 19:40:30,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=215072.0, ans=0.1 +2024-08-29 19:40:36,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=215072.0, ans=0.2 +2024-08-29 19:41:55,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=215178.66666666666, ans=0.035 +2024-08-29 19:42:38,137 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.765e+02 1.983e+02 2.603e+02 4.687e+02, threshold=3.966e+02, percent-clipped=3.0 +2024-08-29 19:43:10,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=215232.0, ans=0.0 +2024-08-29 19:43:45,805 INFO [train.py:1114] (0/4) Epoch 17, batch 550, loss[loss=0.2106, simple_loss=0.2854, pruned_loss=0.04923, ctc_loss=0.09326, over 19206.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2747, pruned_loss=0.05185, ctc_loss=0.09746, over 3607486.88 frames. ], batch size: 71, lr: 8.89e-03, grad_scale: 32.0 +2024-08-29 19:44:10,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=215338.66666666666, ans=0.5 +2024-08-29 19:45:00,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=215392.0, ans=0.125 +2024-08-29 19:45:28,568 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.87 vs. limit=22.5 +2024-08-29 19:46:02,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=215498.66666666666, ans=0.0 +2024-08-29 19:46:13,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff3.min_abs, batch_count=215552.0, ans=0.2 +2024-08-29 19:47:01,450 INFO [train.py:1114] (0/4) Epoch 17, batch 600, loss[loss=0.2217, simple_loss=0.2863, pruned_loss=0.05838, ctc_loss=0.1006, over 19446.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2747, pruned_loss=0.05163, ctc_loss=0.09708, over 3664553.91 frames. ], batch size: 67, lr: 8.88e-03, grad_scale: 64.0 +2024-08-29 19:47:39,641 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.81 vs. limit=15.0 +2024-08-29 19:47:49,417 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.74 vs. limit=15.0 +2024-08-29 19:48:09,363 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.31 vs. limit=10.0 +2024-08-29 19:48:19,060 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.690e+02 1.951e+02 2.307e+02 4.172e+02, threshold=3.901e+02, percent-clipped=2.0 +2024-08-29 19:48:27,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=215765.33333333334, ans=0.0 +2024-08-29 19:48:27,630 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.50 vs. limit=22.5 +2024-08-29 19:49:06,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=215818.66666666666, ans=0.125 +2024-08-29 19:49:21,617 INFO [train.py:1114] (0/4) Epoch 17, batch 650, loss[loss=0.1963, simple_loss=0.2638, pruned_loss=0.04705, ctc_loss=0.08668, over 19762.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2739, pruned_loss=0.05141, ctc_loss=0.09665, over 3715413.88 frames. ], batch size: 54, lr: 8.88e-03, grad_scale: 64.0 +2024-08-29 19:49:42,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=215925.33333333334, ans=0.125 +2024-08-29 19:49:44,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=215925.33333333334, ans=0.125 +2024-08-29 19:50:28,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=216032.0, ans=0.2 +2024-08-29 19:51:32,034 INFO [train.py:1114] (0/4) Epoch 17, batch 700, loss[loss=0.2105, simple_loss=0.2803, pruned_loss=0.05173, ctc_loss=0.09324, over 19728.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.275, pruned_loss=0.05171, ctc_loss=0.09712, over 3747076.79 frames. ], batch size: 51, lr: 8.87e-03, grad_scale: 64.0 +2024-08-29 19:51:33,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216138.66666666666, ans=0.1 +2024-08-29 19:51:56,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=216245.33333333334, ans=0.2 +2024-08-29 19:52:43,598 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.757e+02 1.978e+02 2.439e+02 3.670e+02, threshold=3.956e+02, percent-clipped=0.0 +2024-08-29 19:52:48,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=216298.66666666666, ans=0.125 +2024-08-29 19:53:32,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=216298.66666666666, ans=0.125 +2024-08-29 19:53:39,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=216352.0, ans=0.09899494936611666 +2024-08-29 19:53:46,884 INFO [train.py:1114] (0/4) Epoch 17, batch 750, loss[loss=0.1971, simple_loss=0.2772, pruned_loss=0.04325, ctc_loss=0.07651, over 19495.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2748, pruned_loss=0.05155, ctc_loss=0.09683, over 3773742.85 frames. ], batch size: 54, lr: 8.87e-03, grad_scale: 64.0 +2024-08-29 19:54:30,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=216405.33333333334, ans=15.0 +2024-08-29 20:06:06,360 INFO [train.py:1050] (0/4) Caught exception: [Rank 0] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=98948, OpType=ALLREDUCE, NumelIn=841, NumelOut=841, Timeout(ms)=600000) ran for 600000 milliseconds before timing out.. +2024-08-29 20:06:06,361 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/bad-model-0.pt +2024-08-29 20:06:09,238 INFO [train.py:1413] (0/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/batch-bd023447-34aa-53ff-3278-e0a594ac807a.pt +2024-08-29 20:07:54,230 INFO [train.py:1419] (0/4) features shape: torch.Size([53, 1497, 80]) +2024-08-29 20:07:54,232 INFO [train.py:1423] (0/4) num tokens: 4003 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-13-08-38-1 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-13-08-38-1 new file mode 100644 index 0000000000000000000000000000000000000000..083bebac6759fd1199f37434b78ebd346fab4a62 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-13-08-38-1 @@ -0,0 +1,1139 @@ +2024-08-29 13:08:38,318 INFO [train.py:1182] (1/4) Training started +2024-08-29 13:08:38,319 INFO [train.py:1192] (1/4) Device: cuda:1 +2024-08-29 13:08:38,321 INFO [train.py:1210] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 14, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 13:08:38,322 INFO [train.py:1212] (1/4) About to create model +2024-08-29 13:08:39,835 INFO [train.py:1216] (1/4) Number of model parameters: 66367431 +2024-08-29 13:08:39,891 INFO [checkpoint.py:112] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-13.pt +2024-08-29 13:08:59,021 INFO [train.py:1231] (1/4) Using DDP +2024-08-29 13:09:40,461 INFO [train.py:1243] (1/4) Loading optimizer state dict +2024-08-29 13:09:40,782 INFO [train.py:1251] (1/4) Loading scheduler state dict +2024-08-29 13:09:40,783 INFO [asr_datamodule.py:894] (1/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 13:09:40,789 INFO [asr_datamodule.py:696] (1/4) Disable MUSAN +2024-08-29 13:09:40,789 INFO [asr_datamodule.py:714] (1/4) Enable SpecAugment +2024-08-29 13:09:40,789 INFO [asr_datamodule.py:715] (1/4) Time warp factor: 80 +2024-08-29 13:09:40,790 INFO [asr_datamodule.py:725] (1/4) Num frame mask: 10 +2024-08-29 13:09:40,790 INFO [asr_datamodule.py:738] (1/4) About to create train dataset +2024-08-29 13:09:40,790 INFO [asr_datamodule.py:765] (1/4) Using DynamicBucketingSampler. +2024-08-29 13:09:42,376 INFO [asr_datamodule.py:782] (1/4) About to create train dataloader +2024-08-29 13:09:42,378 INFO [asr_datamodule.py:911] (1/4) About to get dev-clean cuts +2024-08-29 13:09:42,379 INFO [asr_datamodule.py:918] (1/4) About to get dev-other cuts +2024-08-29 13:09:42,446 INFO [asr_datamodule.py:814] (1/4) About to create dev dataset +2024-08-29 13:09:42,770 INFO [asr_datamodule.py:831] (1/4) About to create dev dataloader +2024-08-29 13:09:42,771 INFO [train.py:1435] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 13:13:40,052 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.58 vs. limit=3.0 +2024-08-29 13:14:18,565 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13363MB +2024-08-29 13:14:21,278 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 13:14:38,617 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 13:14:45,672 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 13:15:10,806 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 13:15:11,674 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=4.55 vs. limit=3.0 +2024-08-29 13:15:12,353 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 13:15:12,372 INFO [train.py:1344] (1/4) Loading grad scaler state dict +2024-08-29 13:16:15,181 INFO [train.py:1114] (1/4) Epoch 14, batch 0, loss[loss=0.2055, simple_loss=0.2686, pruned_loss=0.05116, ctc_loss=0.1002, over 19403.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2686, pruned_loss=0.05116, ctc_loss=0.1002, over 19403.00 frames. ], batch size: 48, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:16:15,181 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-29 13:16:31,404 INFO [train.py:1146] (1/4) Epoch 14, validation: loss=0.1913, simple_loss=0.2789, pruned_loss=0.03846, ctc_loss=0.06724, over 944034.00 frames. +2024-08-29 13:16:31,405 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 13467MB +2024-08-29 13:20:30,964 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.71 vs. limit=15.0 +2024-08-29 13:24:20,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=172688.0, ans=0.125 +2024-08-29 13:24:21,050 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.40 vs. limit=12.0 +2024-08-29 13:24:48,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=172688.0, ans=0.0 +2024-08-29 13:25:58,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=172794.66666666666, ans=0.1 +2024-08-29 13:26:36,726 INFO [train.py:1114] (1/4) Epoch 14, batch 50, loss[loss=0.2173, simple_loss=0.2665, pruned_loss=0.06156, ctc_loss=0.1127, over 19699.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2858, pruned_loss=0.05956, ctc_loss=0.1123, over 844835.93 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:26:49,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=172848.0, ans=0.0 +2024-08-29 13:27:08,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=172848.0, ans=0.0 +2024-08-29 13:30:25,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=172901.33333333334, ans=0.1 +2024-08-29 13:30:32,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=172901.33333333334, ans=0.125 +2024-08-29 13:30:46,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=172954.66666666666, ans=0.0 +2024-08-29 13:30:46,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=172954.66666666666, ans=0.0 +2024-08-29 13:32:29,768 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.410e+02 1.749e+02 1.974e+02 2.504e+02 4.970e+02, threshold=3.948e+02, percent-clipped=4.0 +2024-08-29 13:32:58,194 INFO [train.py:1114] (1/4) Epoch 14, batch 100, loss[loss=0.22, simple_loss=0.2774, pruned_loss=0.05873, ctc_loss=0.1131, over 19712.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.287, pruned_loss=0.05992, ctc_loss=0.1133, over 1499995.38 frames. ], batch size: 51, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:33:31,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=173168.0, ans=0.125 +2024-08-29 13:34:41,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=173274.66666666666, ans=0.125 +2024-08-29 13:35:52,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=173328.0, ans=0.125 +2024-08-29 13:36:02,973 INFO [train.py:1114] (1/4) Epoch 14, batch 150, loss[loss=0.2055, simple_loss=0.2633, pruned_loss=0.05313, ctc_loss=0.1038, over 19717.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2835, pruned_loss=0.05793, ctc_loss=0.1095, over 2027349.01 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:36:03,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=173381.33333333334, ans=0.025 +2024-08-29 13:36:04,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=173381.33333333334, ans=0.2 +2024-08-29 13:36:21,054 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.45 vs. limit=22.5 +2024-08-29 13:36:22,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=173434.66666666666, ans=0.04949747468305833 +2024-08-29 13:36:47,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=173488.0, ans=0.125 +2024-08-29 13:37:19,620 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.730e+02 2.035e+02 2.422e+02 3.683e+02, threshold=4.071e+02, percent-clipped=0.0 +2024-08-29 13:37:21,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=173594.66666666666, ans=0.125 +2024-08-29 13:37:30,501 INFO [train.py:1114] (1/4) Epoch 14, batch 200, loss[loss=0.2308, simple_loss=0.2892, pruned_loss=0.06283, ctc_loss=0.1167, over 18213.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2814, pruned_loss=0.05694, ctc_loss=0.1071, over 2434363.68 frames. ], batch size: 85, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:37:31,272 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.01 vs. limit=15.0 +2024-08-29 13:37:33,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=173648.0, ans=0.2 +2024-08-29 13:37:35,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=173648.0, ans=0.125 +2024-08-29 13:37:43,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=173701.33333333334, ans=0.1 +2024-08-29 13:37:50,008 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.41 vs. limit=15.0 +2024-08-29 13:40:14,108 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.762e-03 +2024-08-29 13:40:51,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=173808.0, ans=0.125 +2024-08-29 13:41:36,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=173861.33333333334, ans=0.0 +2024-08-29 13:42:16,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=13.55 vs. limit=15.0 +2024-08-29 13:42:18,842 INFO [train.py:1114] (1/4) Epoch 14, batch 250, loss[loss=0.2318, simple_loss=0.2932, pruned_loss=0.06315, ctc_loss=0.1102, over 19420.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.282, pruned_loss=0.05716, ctc_loss=0.1079, over 2753707.57 frames. ], batch size: 67, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:43:03,852 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.47 vs. limit=6.0 +2024-08-29 13:43:57,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=174021.33333333334, ans=0.2 +2024-08-29 13:44:01,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=174074.66666666666, ans=0.125 +2024-08-29 13:44:11,808 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.51 vs. limit=15.0 +2024-08-29 13:44:13,473 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.413e+02 1.787e+02 2.022e+02 2.717e+02 4.953e+02, threshold=4.043e+02, percent-clipped=2.0 +2024-08-29 13:44:52,056 INFO [train.py:1114] (1/4) Epoch 14, batch 300, loss[loss=0.264, simple_loss=0.3105, pruned_loss=0.07908, ctc_loss=0.1484, over 19548.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2822, pruned_loss=0.05737, ctc_loss=0.1081, over 2999566.01 frames. ], batch size: 61, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:45:16,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=174288.0, ans=0.025 +2024-08-29 13:46:17,851 INFO [train.py:1114] (1/4) Epoch 14, batch 350, loss[loss=0.1752, simple_loss=0.2415, pruned_loss=0.04017, ctc_loss=0.07129, over 19763.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2829, pruned_loss=0.05757, ctc_loss=0.1084, over 3190345.80 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 16.0 +2024-08-29 13:46:24,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=174448.0, ans=0.125 +2024-08-29 13:47:25,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=174608.0, ans=0.0 +2024-08-29 13:47:33,992 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.62 vs. limit=22.5 +2024-08-29 13:47:39,416 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.793e+02 2.058e+02 2.658e+02 4.429e+02, threshold=4.116e+02, percent-clipped=3.0 +2024-08-29 13:48:31,275 INFO [train.py:1114] (1/4) Epoch 14, batch 400, loss[loss=0.2267, simple_loss=0.2899, pruned_loss=0.05903, ctc_loss=0.1134, over 19500.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2829, pruned_loss=0.05788, ctc_loss=0.109, over 3341930.78 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:50:41,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=174874.66666666666, ans=0.0 +2024-08-29 13:50:54,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=174928.0, ans=0.0 +2024-08-29 13:50:57,540 INFO [train.py:1114] (1/4) Epoch 14, batch 450, loss[loss=0.2046, simple_loss=0.2781, pruned_loss=0.04812, ctc_loss=0.08732, over 19622.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2829, pruned_loss=0.05801, ctc_loss=0.109, over 3450368.11 frames. ], batch size: 55, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:51:48,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=175194.66666666666, ans=0.125 +2024-08-29 13:51:50,579 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.713e+02 1.900e+02 2.415e+02 4.159e+02, threshold=3.800e+02, percent-clipped=2.0 +2024-08-29 13:52:16,314 INFO [train.py:1114] (1/4) Epoch 14, batch 500, loss[loss=0.2297, simple_loss=0.2931, pruned_loss=0.06113, ctc_loss=0.1101, over 19650.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.282, pruned_loss=0.05761, ctc_loss=0.1083, over 3545951.97 frames. ], batch size: 63, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:52:40,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=175301.33333333334, ans=0.2 +2024-08-29 13:53:02,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=175408.0, ans=0.2 +2024-08-29 13:53:12,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=175461.33333333334, ans=0.125 +2024-08-29 13:53:17,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175461.33333333334, ans=0.1 +2024-08-29 13:53:23,922 INFO [train.py:1114] (1/4) Epoch 14, batch 550, loss[loss=0.2261, simple_loss=0.2908, pruned_loss=0.05747, ctc_loss=0.1162, over 19266.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2818, pruned_loss=0.05716, ctc_loss=0.1076, over 3608026.80 frames. ], batch size: 71, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:53:30,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=175514.66666666666, ans=0.05 +2024-08-29 13:53:41,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=175568.0, ans=0.125 +2024-08-29 13:53:41,856 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.81 vs. limit=22.5 +2024-08-29 13:53:50,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=175621.33333333334, ans=0.125 +2024-08-29 13:53:50,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=175621.33333333334, ans=0.0 +2024-08-29 13:54:12,500 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.85 vs. limit=6.0 +2024-08-29 13:54:18,075 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.404e+02 1.725e+02 1.963e+02 2.348e+02 4.063e+02, threshold=3.927e+02, percent-clipped=2.0 +2024-08-29 13:54:18,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=175728.0, ans=0.0 +2024-08-29 13:54:22,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.40 vs. limit=15.0 +2024-08-29 13:54:23,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175728.0, ans=0.1 +2024-08-29 13:54:27,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=175781.33333333334, ans=0.025 +2024-08-29 13:54:28,216 INFO [train.py:1114] (1/4) Epoch 14, batch 600, loss[loss=0.2724, simple_loss=0.3185, pruned_loss=0.082, ctc_loss=0.1559, over 19321.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2823, pruned_loss=0.05751, ctc_loss=0.1082, over 3664666.00 frames. ], batch size: 67, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:54:32,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=175781.33333333334, ans=0.2 +2024-08-29 13:54:39,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175834.66666666666, ans=0.1 +2024-08-29 13:54:58,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175888.0, ans=0.1 +2024-08-29 13:55:04,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=175941.33333333334, ans=0.0 +2024-08-29 13:55:05,811 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.10 vs. limit=15.0 +2024-08-29 13:55:22,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=175994.66666666666, ans=0.1 +2024-08-29 13:55:30,814 INFO [train.py:1114] (1/4) Epoch 14, batch 650, loss[loss=0.2148, simple_loss=0.2849, pruned_loss=0.05261, ctc_loss=0.09861, over 19752.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2823, pruned_loss=0.05771, ctc_loss=0.1087, over 3715941.04 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:55:42,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=176101.33333333334, ans=0.0 +2024-08-29 13:55:45,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=176101.33333333334, ans=0.0 +2024-08-29 13:55:59,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=176154.66666666666, ans=0.0 +2024-08-29 13:56:04,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=176154.66666666666, ans=0.04949747468305833 +2024-08-29 13:56:14,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=176208.0, ans=0.125 +2024-08-29 13:56:20,649 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.54 vs. limit=15.0 +2024-08-29 13:56:24,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176261.33333333334, ans=0.1 +2024-08-29 13:56:24,636 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.743e+02 2.058e+02 2.560e+02 4.338e+02, threshold=4.116e+02, percent-clipped=4.0 +2024-08-29 13:56:28,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=176261.33333333334, ans=0.2 +2024-08-29 13:56:34,649 INFO [train.py:1114] (1/4) Epoch 14, batch 700, loss[loss=0.1908, simple_loss=0.2601, pruned_loss=0.0447, ctc_loss=0.08039, over 19721.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2824, pruned_loss=0.05746, ctc_loss=0.1082, over 3748643.73 frames. ], batch size: 51, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:56:40,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=176314.66666666666, ans=0.0 +2024-08-29 13:57:35,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=176368.0, ans=0.125 +2024-08-29 13:57:43,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176421.33333333334, ans=0.1 +2024-08-29 13:58:03,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=176528.0, ans=0.125 +2024-08-29 13:58:08,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=176528.0, ans=0.025 +2024-08-29 13:58:09,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=176528.0, ans=0.2 +2024-08-29 13:58:12,845 INFO [train.py:1114] (1/4) Epoch 14, batch 750, loss[loss=0.2208, simple_loss=0.284, pruned_loss=0.05653, ctc_loss=0.1115, over 19855.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2817, pruned_loss=0.05711, ctc_loss=0.1074, over 3776206.67 frames. ], batch size: 55, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:58:20,540 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.82 vs. limit=22.5 +2024-08-29 13:58:25,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=176634.66666666666, ans=0.2 +2024-08-29 13:58:25,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=176634.66666666666, ans=0.2 +2024-08-29 13:58:35,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=176634.66666666666, ans=0.125 +2024-08-29 13:58:35,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=176634.66666666666, ans=0.07 +2024-08-29 13:58:47,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=176688.0, ans=0.125 +2024-08-29 13:58:53,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=176741.33333333334, ans=0.125 +2024-08-29 13:59:06,503 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.910e+02 2.277e+02 2.884e+02 4.780e+02, threshold=4.554e+02, percent-clipped=3.0 +2024-08-29 13:59:28,730 INFO [train.py:1114] (1/4) Epoch 14, batch 800, loss[loss=0.1821, simple_loss=0.2497, pruned_loss=0.04109, ctc_loss=0.08083, over 19804.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2821, pruned_loss=0.05739, ctc_loss=0.1081, over 3797126.98 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:59:28,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=176848.0, ans=0.025 +2024-08-29 13:59:31,455 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.02 vs. limit=12.0 +2024-08-29 13:59:38,875 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.25 vs. limit=15.0 +2024-08-29 13:59:38,915 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.27 vs. limit=15.0 +2024-08-29 13:59:44,580 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.37 vs. limit=6.0 +2024-08-29 14:01:15,119 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.56 vs. limit=10.0 +2024-08-29 14:02:36,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=177061.33333333334, ans=0.125 +2024-08-29 14:02:41,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=177061.33333333334, ans=0.125 +2024-08-29 14:02:49,620 INFO [train.py:1114] (1/4) Epoch 14, batch 850, loss[loss=0.2069, simple_loss=0.2805, pruned_loss=0.04845, ctc_loss=0.09075, over 19658.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.282, pruned_loss=0.0576, ctc_loss=0.1084, over 3815224.61 frames. ], batch size: 59, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 14:02:55,211 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.96 vs. limit=15.0 +2024-08-29 14:03:01,946 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:03:04,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=177168.0, ans=0.0 +2024-08-29 14:03:40,317 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.405e+02 1.703e+02 1.970e+02 2.385e+02 3.831e+02, threshold=3.939e+02, percent-clipped=0.0 +2024-08-29 14:03:49,898 INFO [train.py:1114] (1/4) Epoch 14, batch 900, loss[loss=0.207, simple_loss=0.2675, pruned_loss=0.05334, ctc_loss=0.09936, over 19433.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2824, pruned_loss=0.05798, ctc_loss=0.109, over 3818547.13 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 14:04:15,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=177488.0, ans=0.125 +2024-08-29 14:04:45,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=177594.66666666666, ans=0.125 +2024-08-29 14:04:52,320 INFO [train.py:1114] (1/4) Epoch 14, batch 950, loss[loss=0.2387, simple_loss=0.2805, pruned_loss=0.0714, ctc_loss=0.1354, over 19512.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2823, pruned_loss=0.05788, ctc_loss=0.1089, over 3821519.80 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 14:04:54,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=177648.0, ans=0.125 +2024-08-29 14:05:01,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=177648.0, ans=0.0 +2024-08-29 14:05:03,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=177701.33333333334, ans=0.125 +2024-08-29 14:05:07,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=177701.33333333334, ans=0.125 +2024-08-29 14:05:13,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=177701.33333333334, ans=0.125 +2024-08-29 14:05:26,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=177754.66666666666, ans=0.125 +2024-08-29 14:06:02,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=177808.0, ans=0.125 +2024-08-29 14:06:18,421 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.94 vs. limit=15.0 +2024-08-29 14:06:19,919 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.335e+02 1.740e+02 1.996e+02 2.581e+02 3.979e+02, threshold=3.992e+02, percent-clipped=2.0 +2024-08-29 14:07:04,890 INFO [train.py:1114] (1/4) Epoch 14, batch 1000, loss[loss=0.1933, simple_loss=0.2643, pruned_loss=0.04452, ctc_loss=0.08323, over 19863.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2833, pruned_loss=0.0584, ctc_loss=0.1099, over 3818921.51 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:07:10,372 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:07:19,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=177914.66666666666, ans=0.125 +2024-08-29 14:08:29,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=178021.33333333334, ans=0.125 +2024-08-29 14:08:30,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=178074.66666666666, ans=0.05 +2024-08-29 14:08:33,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=178074.66666666666, ans=0.125 +2024-08-29 14:08:56,378 INFO [train.py:1114] (1/4) Epoch 14, batch 1050, loss[loss=0.2189, simple_loss=0.2868, pruned_loss=0.05452, ctc_loss=0.1048, over 19861.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2825, pruned_loss=0.0581, ctc_loss=0.1093, over 3824616.59 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:09:09,206 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.57 vs. limit=10.0 +2024-08-29 14:09:09,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=178234.66666666666, ans=0.125 +2024-08-29 14:09:14,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=178234.66666666666, ans=0.025 +2024-08-29 14:09:20,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=178288.0, ans=0.07 +2024-08-29 14:09:27,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=178288.0, ans=0.125 +2024-08-29 14:09:30,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=178288.0, ans=0.125 +2024-08-29 14:09:44,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=178394.66666666666, ans=0.0 +2024-08-29 14:09:46,659 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.811e+02 2.215e+02 2.668e+02 4.320e+02, threshold=4.429e+02, percent-clipped=1.0 +2024-08-29 14:10:24,260 INFO [train.py:1114] (1/4) Epoch 14, batch 1100, loss[loss=0.2085, simple_loss=0.2777, pruned_loss=0.05102, ctc_loss=0.09312, over 19567.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2822, pruned_loss=0.05792, ctc_loss=0.109, over 3832045.48 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:13:37,850 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.43 vs. limit=15.0 +2024-08-29 14:13:47,690 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.32 vs. limit=15.0 +2024-08-29 14:15:40,954 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.20 vs. limit=12.0 +2024-08-29 14:19:03,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=178661.33333333334, ans=0.125 +2024-08-29 14:19:08,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=178661.33333333334, ans=0.125 +2024-08-29 14:19:15,469 INFO [train.py:1114] (1/4) Epoch 14, batch 1150, loss[loss=0.184, simple_loss=0.2537, pruned_loss=0.04113, ctc_loss=0.08028, over 19594.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2819, pruned_loss=0.05792, ctc_loss=0.1092, over 3830601.15 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:19:46,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=178768.0, ans=0.125 +2024-08-29 14:20:02,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=178768.0, ans=0.125 +2024-08-29 14:20:22,688 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:20:36,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=178821.33333333334, ans=0.125 +2024-08-29 14:21:23,614 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.78 vs. limit=10.0 +2024-08-29 14:21:36,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=178874.66666666666, ans=0.125 +2024-08-29 14:22:13,328 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.342e+02 1.701e+02 1.876e+02 2.352e+02 3.362e+02, threshold=3.753e+02, percent-clipped=0.0 +2024-08-29 14:22:33,801 INFO [train.py:1114] (1/4) Epoch 14, batch 1200, loss[loss=0.2405, simple_loss=0.3038, pruned_loss=0.06372, ctc_loss=0.1245, over 19831.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2832, pruned_loss=0.05834, ctc_loss=0.1101, over 3826118.36 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:23:26,052 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.09 vs. limit=12.0 +2024-08-29 14:23:59,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=179034.66666666666, ans=0.125 +2024-08-29 14:24:14,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=179034.66666666666, ans=0.125 +2024-08-29 14:24:14,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=179034.66666666666, ans=0.0 +2024-08-29 14:24:30,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=179088.0, ans=0.125 +2024-08-29 14:24:42,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179141.33333333334, ans=0.1 +2024-08-29 14:25:06,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=179141.33333333334, ans=0.2 +2024-08-29 14:29:53,986 INFO [train.py:1114] (1/4) Epoch 14, batch 1250, loss[loss=0.2384, simple_loss=0.2996, pruned_loss=0.06547, ctc_loss=0.1157, over 19505.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2832, pruned_loss=0.05783, ctc_loss=0.1089, over 3843744.84 frames. ], batch size: 61, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:29:54,509 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=8.64 vs. limit=15.0 +2024-08-29 14:30:09,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=179248.0, ans=15.0 +2024-08-29 14:32:08,119 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.03 vs. limit=22.5 +2024-08-29 14:32:11,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=179301.33333333334, ans=0.1 +2024-08-29 14:32:12,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=179354.66666666666, ans=0.0 +2024-08-29 14:32:29,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=179408.0, ans=0.2 +2024-08-29 14:32:35,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=179408.0, ans=0.125 +2024-08-29 14:32:39,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=179461.33333333334, ans=15.0 +2024-08-29 14:32:41,060 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.327e+02 1.718e+02 2.120e+02 2.679e+02 4.271e+02, threshold=4.240e+02, percent-clipped=3.0 +2024-08-29 14:32:51,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=179461.33333333334, ans=0.0 +2024-08-29 14:33:10,211 INFO [train.py:1114] (1/4) Epoch 14, batch 1300, loss[loss=0.2809, simple_loss=0.3229, pruned_loss=0.088, ctc_loss=0.1575, over 18980.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2824, pruned_loss=0.05739, ctc_loss=0.1081, over 3845894.64 frames. ], batch size: 76, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:34:01,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=179514.66666666666, ans=0.125 +2024-08-29 14:34:03,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=179514.66666666666, ans=0.05 +2024-08-29 14:34:09,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179568.0, ans=0.1 +2024-08-29 14:34:40,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=179621.33333333334, ans=0.025 +2024-08-29 14:34:41,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=179621.33333333334, ans=0.125 +2024-08-29 14:35:27,707 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.97 vs. limit=22.5 +2024-08-29 14:35:42,416 INFO [train.py:1114] (1/4) Epoch 14, batch 1350, loss[loss=0.2211, simple_loss=0.2842, pruned_loss=0.05783, ctc_loss=0.1058, over 19772.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2821, pruned_loss=0.05729, ctc_loss=0.1077, over 3856351.84 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:36:09,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.71 vs. limit=22.5 +2024-08-29 14:36:16,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=179834.66666666666, ans=0.125 +2024-08-29 14:36:28,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=179888.0, ans=0.125 +2024-08-29 14:40:29,507 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.654e+02 1.881e+02 2.431e+02 4.376e+02, threshold=3.761e+02, percent-clipped=1.0 +2024-08-29 14:41:36,136 INFO [train.py:1114] (1/4) Epoch 14, batch 1400, loss[loss=0.1907, simple_loss=0.2498, pruned_loss=0.04794, ctc_loss=0.08931, over 19666.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2817, pruned_loss=0.05731, ctc_loss=0.1076, over 3863430.61 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:41:36,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=180048.0, ans=0.1 +2024-08-29 14:41:37,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=180048.0, ans=0.0 +2024-08-29 14:41:54,715 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.45 vs. limit=12.0 +2024-08-29 14:42:39,827 INFO [train.py:1114] (1/4) Epoch 14, batch 1450, loss[loss=0.2385, simple_loss=0.2986, pruned_loss=0.06508, ctc_loss=0.1208, over 19663.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2824, pruned_loss=0.0576, ctc_loss=0.1082, over 3861905.87 frames. ], batch size: 63, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:42:41,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=180314.66666666666, ans=0.125 +2024-08-29 14:42:44,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=180314.66666666666, ans=0.0 +2024-08-29 14:42:47,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=180314.66666666666, ans=0.07 +2024-08-29 14:42:48,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=180314.66666666666, ans=0.0 +2024-08-29 14:43:05,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=180421.33333333334, ans=0.2 +2024-08-29 14:44:10,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180474.66666666666, ans=0.1 +2024-08-29 14:44:19,128 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.30 vs. limit=12.0 +2024-08-29 14:44:19,806 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.272e+02 1.699e+02 1.929e+02 2.254e+02 4.469e+02, threshold=3.859e+02, percent-clipped=1.0 +2024-08-29 14:45:03,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=180528.0, ans=0.125 +2024-08-29 14:45:06,818 INFO [train.py:1114] (1/4) Epoch 14, batch 1500, loss[loss=0.2226, simple_loss=0.2898, pruned_loss=0.05664, ctc_loss=0.1055, over 19586.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2828, pruned_loss=0.05767, ctc_loss=0.1082, over 3861529.31 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:45:07,242 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:45:12,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=180581.33333333334, ans=0.125 +2024-08-29 14:45:16,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=180581.33333333334, ans=0.125 +2024-08-29 14:45:16,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=12.63 vs. limit=15.0 +2024-08-29 14:45:46,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=180634.66666666666, ans=0.2 +2024-08-29 14:46:22,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=180794.66666666666, ans=0.0 +2024-08-29 14:46:25,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=180794.66666666666, ans=0.035 +2024-08-29 14:46:27,475 INFO [train.py:1114] (1/4) Epoch 14, batch 1550, loss[loss=0.2563, simple_loss=0.3165, pruned_loss=0.07186, ctc_loss=0.1313, over 19621.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2829, pruned_loss=0.05773, ctc_loss=0.1086, over 3846645.55 frames. ], batch size: 60, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:46:55,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=180954.66666666666, ans=0.1 +2024-08-29 14:48:27,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=181008.0, ans=0.0 +2024-08-29 14:48:37,412 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.702e+02 2.011e+02 2.397e+02 3.479e+02, threshold=4.023e+02, percent-clipped=0.0 +2024-08-29 14:48:47,139 INFO [train.py:1114] (1/4) Epoch 14, batch 1600, loss[loss=0.2097, simple_loss=0.2816, pruned_loss=0.04984, ctc_loss=0.09518, over 19843.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2822, pruned_loss=0.05739, ctc_loss=0.108, over 3835823.69 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:51:09,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=181274.66666666666, ans=0.1 +2024-08-29 14:51:10,901 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.74 vs. limit=15.0 +2024-08-29 14:51:13,537 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:51:21,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=181328.0, ans=0.0 +2024-08-29 14:51:29,810 INFO [train.py:1114] (1/4) Epoch 14, batch 1650, loss[loss=0.232, simple_loss=0.2957, pruned_loss=0.06139, ctc_loss=0.1139, over 19621.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2819, pruned_loss=0.05736, ctc_loss=0.108, over 3832200.11 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:51:46,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=181434.66666666666, ans=0.125 +2024-08-29 14:51:48,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=181434.66666666666, ans=0.125 +2024-08-29 14:51:51,002 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.94 vs. limit=15.0 +2024-08-29 14:51:53,383 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.42 vs. limit=10.0 +2024-08-29 14:52:28,555 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.415e+02 1.808e+02 2.247e+02 2.720e+02 5.029e+02, threshold=4.494e+02, percent-clipped=3.0 +2024-08-29 14:52:31,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=181594.66666666666, ans=0.125 +2024-08-29 14:52:38,137 INFO [train.py:1114] (1/4) Epoch 14, batch 1700, loss[loss=0.1954, simple_loss=0.2557, pruned_loss=0.04734, ctc_loss=0.1012, over 19669.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2817, pruned_loss=0.05723, ctc_loss=0.1077, over 3846575.53 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:52:51,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=181701.33333333334, ans=0.5 +2024-08-29 14:52:51,288 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.36 vs. limit=15.0 +2024-08-29 14:53:07,746 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.23 vs. limit=10.0 +2024-08-29 14:53:11,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=181808.0, ans=0.125 +2024-08-29 14:53:13,214 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.61 vs. limit=15.0 +2024-08-29 14:53:24,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=181861.33333333334, ans=0.2 +2024-08-29 14:53:43,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=181861.33333333334, ans=0.0 +2024-08-29 14:53:46,192 INFO [train.py:1114] (1/4) Epoch 14, batch 1750, loss[loss=0.2128, simple_loss=0.266, pruned_loss=0.05882, ctc_loss=0.1046, over 19673.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2813, pruned_loss=0.05692, ctc_loss=0.107, over 3852118.27 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:53:51,222 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.00 vs. limit=15.0 +2024-08-29 14:53:52,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=181914.66666666666, ans=0.07 +2024-08-29 14:53:54,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=181914.66666666666, ans=0.125 +2024-08-29 14:53:54,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=181914.66666666666, ans=0.07 +2024-08-29 14:54:36,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=182021.33333333334, ans=0.2 +2024-08-29 14:54:36,517 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.86 vs. limit=6.0 +2024-08-29 14:55:00,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=182021.33333333334, ans=0.0 +2024-08-29 14:55:02,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=182074.66666666666, ans=0.5 +2024-08-29 14:55:07,779 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:56:25,486 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.355e+02 1.791e+02 2.085e+02 2.712e+02 5.021e+02, threshold=4.170e+02, percent-clipped=2.0 +2024-08-29 14:56:30,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=182128.0, ans=0.125 +2024-08-29 14:56:34,697 INFO [train.py:1114] (1/4) Epoch 14, batch 1800, loss[loss=0.1978, simple_loss=0.2758, pruned_loss=0.04307, ctc_loss=0.08433, over 19615.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.282, pruned_loss=0.05724, ctc_loss=0.1078, over 3853355.10 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:57:23,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=182234.66666666666, ans=0.125 +2024-08-29 14:57:43,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=182341.33333333334, ans=0.1 +2024-08-29 14:57:56,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=182394.66666666666, ans=0.0 +2024-08-29 14:57:56,438 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.25 vs. limit=12.0 +2024-08-29 14:57:58,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=182394.66666666666, ans=0.07 +2024-08-29 14:58:05,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.98 vs. limit=22.5 +2024-08-29 14:58:07,409 INFO [train.py:1114] (1/4) Epoch 14, batch 1850, loss[loss=0.1958, simple_loss=0.274, pruned_loss=0.04251, ctc_loss=0.08157, over 19584.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2816, pruned_loss=0.05706, ctc_loss=0.1074, over 3856493.44 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:58:08,163 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.31 vs. limit=15.0 +2024-08-29 14:58:12,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=182448.0, ans=0.0 +2024-08-29 15:00:44,749 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:03:25,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=182608.0, ans=0.125 +2024-08-29 15:03:29,627 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.679e+02 1.934e+02 2.278e+02 6.084e+02, threshold=3.868e+02, percent-clipped=1.0 +2024-08-29 15:03:40,811 INFO [train.py:1114] (1/4) Epoch 14, batch 1900, loss[loss=0.2211, simple_loss=0.2908, pruned_loss=0.05518, ctc_loss=0.1026, over 19663.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2822, pruned_loss=0.05708, ctc_loss=0.1075, over 3860795.35 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:03:42,551 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=3.74 vs. limit=12.0 +2024-08-29 15:03:47,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=182714.66666666666, ans=0.125 +2024-08-29 15:03:48,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=182714.66666666666, ans=0.125 +2024-08-29 15:03:54,006 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.36 vs. limit=6.0 +2024-08-29 15:04:54,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=182928.0, ans=0.0 +2024-08-29 15:04:56,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=182928.0, ans=0.2 +2024-08-29 15:05:18,945 INFO [train.py:1114] (1/4) Epoch 14, batch 1950, loss[loss=0.2162, simple_loss=0.2737, pruned_loss=0.0582, ctc_loss=0.1056, over 19584.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2829, pruned_loss=0.05716, ctc_loss=0.1075, over 3869924.75 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:05:25,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=182981.33333333334, ans=0.1 +2024-08-29 15:05:33,002 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.04 vs. limit=15.0 +2024-08-29 15:05:35,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=183034.66666666666, ans=0.125 +2024-08-29 15:05:39,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=183034.66666666666, ans=0.0 +2024-08-29 15:05:51,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=183088.0, ans=0.125 +2024-08-29 15:05:51,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=183088.0, ans=0.125 +2024-08-29 15:05:56,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=183141.33333333334, ans=0.05 +2024-08-29 15:06:04,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=183194.66666666666, ans=0.125 +2024-08-29 15:06:06,640 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.416e+02 1.683e+02 1.939e+02 2.319e+02 3.642e+02, threshold=3.877e+02, percent-clipped=0.0 +2024-08-29 15:06:45,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=183194.66666666666, ans=0.0 +2024-08-29 15:06:48,390 INFO [train.py:1114] (1/4) Epoch 14, batch 2000, loss[loss=0.1776, simple_loss=0.2447, pruned_loss=0.04086, ctc_loss=0.07181, over 19667.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2829, pruned_loss=0.05711, ctc_loss=0.1073, over 3854683.60 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:06:53,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=183248.0, ans=0.0 +2024-08-29 15:06:55,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.67 vs. limit=15.0 +2024-08-29 15:07:34,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=183461.33333333334, ans=0.125 +2024-08-29 15:07:45,838 INFO [train.py:1114] (1/4) Epoch 14, batch 2050, loss[loss=0.2029, simple_loss=0.2626, pruned_loss=0.05177, ctc_loss=0.09918, over 19696.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2817, pruned_loss=0.05684, ctc_loss=0.1069, over 3850423.23 frames. ], batch size: 47, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:07:59,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=183568.0, ans=0.0 +2024-08-29 15:08:38,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=183621.33333333334, ans=0.125 +2024-08-29 15:08:38,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183621.33333333334, ans=0.1 +2024-08-29 15:09:20,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=183621.33333333334, ans=0.125 +2024-08-29 15:09:39,970 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.349e+02 1.749e+02 1.987e+02 2.455e+02 3.413e+02, threshold=3.973e+02, percent-clipped=0.0 +2024-08-29 15:09:46,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=183728.0, ans=0.125 +2024-08-29 15:09:48,904 INFO [train.py:1114] (1/4) Epoch 14, batch 2100, loss[loss=0.1978, simple_loss=0.2753, pruned_loss=0.04463, ctc_loss=0.07736, over 19783.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2809, pruned_loss=0.05641, ctc_loss=0.1059, over 3857559.04 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:10:05,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=183834.66666666666, ans=0.125 +2024-08-29 15:10:06,307 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.47 vs. limit=15.0 +2024-08-29 15:10:21,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183834.66666666666, ans=0.1 +2024-08-29 15:10:26,431 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.58 vs. limit=15.0 +2024-08-29 15:10:40,973 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=9.93 vs. limit=15.0 +2024-08-29 15:10:57,758 INFO [train.py:1114] (1/4) Epoch 14, batch 2150, loss[loss=0.2364, simple_loss=0.2919, pruned_loss=0.06565, ctc_loss=0.1241, over 19581.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2804, pruned_loss=0.05633, ctc_loss=0.1057, over 3868055.93 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:10:59,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=184048.0, ans=0.125 +2024-08-29 15:11:22,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=184154.66666666666, ans=0.125 +2024-08-29 15:11:31,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=184208.0, ans=0.025 +2024-08-29 15:11:44,626 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.399e+02 1.765e+02 2.209e+02 2.742e+02 6.061e+02, threshold=4.418e+02, percent-clipped=6.0 +2024-08-29 15:12:04,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=184261.33333333334, ans=0.125 +2024-08-29 15:12:09,353 INFO [train.py:1114] (1/4) Epoch 14, batch 2200, loss[loss=0.232, simple_loss=0.2903, pruned_loss=0.06305, ctc_loss=0.119, over 19577.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2799, pruned_loss=0.05589, ctc_loss=0.105, over 3867342.29 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:12:19,045 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.04 vs. limit=15.0 +2024-08-29 15:13:13,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=184421.33333333334, ans=0.0 +2024-08-29 15:13:15,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=184421.33333333334, ans=0.0 +2024-08-29 15:13:19,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=184421.33333333334, ans=0.0 +2024-08-29 15:13:19,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=184421.33333333334, ans=15.0 +2024-08-29 15:13:40,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=184528.0, ans=0.125 +2024-08-29 15:13:47,040 INFO [train.py:1114] (1/4) Epoch 14, batch 2250, loss[loss=0.2245, simple_loss=0.2899, pruned_loss=0.05685, ctc_loss=0.1132, over 19609.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2804, pruned_loss=0.05645, ctc_loss=0.106, over 3867767.48 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:13:58,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=184634.66666666666, ans=0.07 +2024-08-29 15:14:03,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=184634.66666666666, ans=0.0 +2024-08-29 15:14:23,546 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.81 vs. limit=15.0 +2024-08-29 15:14:28,997 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:14:31,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=184794.66666666666, ans=0.125 +2024-08-29 15:14:33,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=184794.66666666666, ans=0.125 +2024-08-29 15:14:34,163 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.407e+02 1.796e+02 2.116e+02 2.512e+02 3.767e+02, threshold=4.231e+02, percent-clipped=0.0 +2024-08-29 15:14:39,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=184794.66666666666, ans=0.125 +2024-08-29 15:14:43,287 INFO [train.py:1114] (1/4) Epoch 14, batch 2300, loss[loss=0.1991, simple_loss=0.2607, pruned_loss=0.04998, ctc_loss=0.09384, over 19503.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2799, pruned_loss=0.05676, ctc_loss=0.1064, over 3861437.78 frames. ], batch size: 49, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:14:46,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=184848.0, ans=0.125 +2024-08-29 15:14:58,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=184901.33333333334, ans=0.125 +2024-08-29 15:15:02,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=184901.33333333334, ans=0.0 +2024-08-29 15:15:22,793 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.88 vs. limit=15.0 +2024-08-29 15:15:40,907 INFO [train.py:1114] (1/4) Epoch 14, batch 2350, loss[loss=0.2358, simple_loss=0.2907, pruned_loss=0.06608, ctc_loss=0.1217, over 19691.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.28, pruned_loss=0.05689, ctc_loss=0.1064, over 3864115.47 frames. ], batch size: 63, lr: 1.05e-02, grad_scale: 64.0 +2024-08-29 15:15:46,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=185114.66666666666, ans=0.0 +2024-08-29 15:15:46,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=185114.66666666666, ans=0.125 +2024-08-29 15:16:06,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.54 vs. limit=22.5 +2024-08-29 15:16:06,476 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.38 vs. limit=15.0 +2024-08-29 15:16:28,757 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.299e+02 1.724e+02 2.017e+02 2.647e+02 4.792e+02, threshold=4.034e+02, percent-clipped=3.0 +2024-08-29 15:16:32,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=185328.0, ans=0.0 +2024-08-29 15:16:36,505 INFO [train.py:1114] (1/4) Epoch 14, batch 2400, loss[loss=0.2476, simple_loss=0.2999, pruned_loss=0.07166, ctc_loss=0.1299, over 19263.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2825, pruned_loss=0.05796, ctc_loss=0.1085, over 3858838.15 frames. ], batch size: 71, lr: 1.05e-02, grad_scale: 32.0 +2024-08-29 15:16:36,761 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:17:30,034 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.95 vs. limit=15.0 +2024-08-29 15:17:32,235 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.26 vs. limit=15.0 +2024-08-29 15:17:32,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=9.38 vs. limit=15.0 +2024-08-29 15:17:38,563 INFO [train.py:1114] (1/4) Epoch 14, batch 2450, loss[loss=0.3235, simple_loss=0.3436, pruned_loss=0.1087, ctc_loss=0.2148, over 13328.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.287, pruned_loss=0.06127, ctc_loss=0.1152, over 3731819.87 frames. ], batch size: 140, lr: 1.05e-02, grad_scale: 32.0 +2024-08-29 15:17:41,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=185648.0, ans=0.025 +2024-08-29 15:17:44,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=185648.0, ans=0.2 +2024-08-29 15:17:48,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=185648.0, ans=0.0 +2024-08-29 15:18:08,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=185754.66666666666, ans=0.2 +2024-08-29 15:18:09,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=185754.66666666666, ans=0.125 +2024-08-29 15:18:14,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=185808.0, ans=0.0 +2024-08-29 15:18:15,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=185808.0, ans=0.1 +2024-08-29 15:19:09,390 INFO [train.py:1114] (1/4) Epoch 15, batch 0, loss[loss=0.2221, simple_loss=0.274, pruned_loss=0.06146, ctc_loss=0.118, over 19792.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.274, pruned_loss=0.06146, ctc_loss=0.118, over 19792.00 frames. ], batch size: 49, lr: 1.02e-02, grad_scale: 32.0 +2024-08-29 15:19:09,391 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-29 15:19:20,886 INFO [train.py:1146] (1/4) Epoch 15, validation: loss=0.1908, simple_loss=0.2785, pruned_loss=0.03825, ctc_loss=0.06651, over 944034.00 frames. +2024-08-29 15:19:20,887 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 13709MB +2024-08-29 15:19:22,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=185856.0, ans=0.125 +2024-08-29 15:19:24,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=185856.0, ans=0.2 +2024-08-29 15:19:25,768 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.539e+02 1.942e+02 2.136e+02 2.424e+02 3.799e+02, threshold=4.272e+02, percent-clipped=0.0 +2024-08-29 15:19:56,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=186016.0, ans=0.125 +2024-08-29 15:20:00,901 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.90 vs. limit=22.5 +2024-08-29 15:20:13,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=186069.33333333334, ans=0.2 +2024-08-29 15:20:25,208 INFO [train.py:1114] (1/4) Epoch 15, batch 50, loss[loss=0.1742, simple_loss=0.2402, pruned_loss=0.03922, ctc_loss=0.07426, over 19715.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2834, pruned_loss=0.05894, ctc_loss=0.1123, over 844130.59 frames. ], batch size: 47, lr: 1.02e-02, grad_scale: 32.0 +2024-08-29 15:20:28,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=186122.66666666666, ans=0.025 +2024-08-29 15:21:00,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=186282.66666666666, ans=0.035 +2024-08-29 15:21:06,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=186282.66666666666, ans=0.0 +2024-08-29 15:21:25,442 INFO [train.py:1114] (1/4) Epoch 15, batch 100, loss[loss=0.1884, simple_loss=0.2603, pruned_loss=0.04254, ctc_loss=0.07877, over 19731.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2857, pruned_loss=0.05954, ctc_loss=0.1126, over 1499265.19 frames. ], batch size: 51, lr: 1.02e-02, grad_scale: 32.0 +2024-08-29 15:21:30,085 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.489e+02 1.739e+02 1.952e+02 2.450e+02 4.288e+02, threshold=3.904e+02, percent-clipped=1.0 +2024-08-29 15:21:30,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=186389.33333333334, ans=0.2 +2024-08-29 15:22:01,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=186549.33333333334, ans=0.2 +2024-08-29 15:22:03,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=186549.33333333334, ans=0.125 +2024-08-29 15:22:29,353 INFO [train.py:1114] (1/4) Epoch 15, batch 150, loss[loss=0.2032, simple_loss=0.2547, pruned_loss=0.05499, ctc_loss=0.1043, over 19736.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2821, pruned_loss=0.05763, ctc_loss=0.1089, over 2027143.42 frames. ], batch size: 47, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:23:05,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=186816.0, ans=0.0 +2024-08-29 15:23:05,565 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.48 vs. limit=6.0 +2024-08-29 15:23:28,627 INFO [train.py:1114] (1/4) Epoch 15, batch 200, loss[loss=0.2464, simple_loss=0.3075, pruned_loss=0.06668, ctc_loss=0.13, over 18371.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2804, pruned_loss=0.05682, ctc_loss=0.1073, over 2433712.99 frames. ], batch size: 85, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:23:44,482 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.690e+02 2.002e+02 2.433e+02 3.884e+02, threshold=4.003e+02, percent-clipped=0.0 +2024-08-29 15:24:00,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=186976.0, ans=0.0 +2024-08-29 15:24:16,917 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.50 vs. limit=6.0 +2024-08-29 15:24:20,386 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.70 vs. limit=22.5 +2024-08-29 15:24:22,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=187082.66666666666, ans=0.0 +2024-08-29 15:24:33,782 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.36 vs. limit=6.0 +2024-08-29 15:25:01,148 INFO [train.py:1114] (1/4) Epoch 15, batch 250, loss[loss=0.2394, simple_loss=0.3063, pruned_loss=0.06325, ctc_loss=0.115, over 19426.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2807, pruned_loss=0.05678, ctc_loss=0.1068, over 2754856.41 frames. ], batch size: 67, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:25:31,894 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.86 vs. limit=15.0 +2024-08-29 15:25:42,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=187349.33333333334, ans=0.125 +2024-08-29 15:25:53,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=187402.66666666666, ans=0.125 +2024-08-29 15:25:57,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=187402.66666666666, ans=0.2 +2024-08-29 15:26:33,388 INFO [train.py:1114] (1/4) Epoch 15, batch 300, loss[loss=0.2345, simple_loss=0.2912, pruned_loss=0.06552, ctc_loss=0.1167, over 19546.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2798, pruned_loss=0.05623, ctc_loss=0.106, over 2999580.57 frames. ], batch size: 61, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:26:38,060 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.385e+02 1.706e+02 2.088e+02 2.592e+02 3.748e+02, threshold=4.177e+02, percent-clipped=0.0 +2024-08-29 15:26:44,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=187509.33333333334, ans=0.2 +2024-08-29 15:26:45,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=187509.33333333334, ans=0.125 +2024-08-29 15:27:12,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=187616.0, ans=0.125 +2024-08-29 15:27:29,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=187669.33333333334, ans=0.125 +2024-08-29 15:27:34,910 INFO [train.py:1114] (1/4) Epoch 15, batch 350, loss[loss=0.1985, simple_loss=0.2557, pruned_loss=0.05166, ctc_loss=0.09502, over 19736.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2808, pruned_loss=0.05647, ctc_loss=0.106, over 3189607.36 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 16.0 +2024-08-29 15:27:41,666 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.35 vs. limit=15.0 +2024-08-29 15:27:46,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=187776.0, ans=0.025 +2024-08-29 15:27:52,390 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.88 vs. limit=6.0 +2024-08-29 15:27:58,850 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.30 vs. limit=15.0 +2024-08-29 15:28:15,917 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.13 vs. limit=15.0 +2024-08-29 15:28:30,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=187936.0, ans=0.1 +2024-08-29 15:28:38,893 INFO [train.py:1114] (1/4) Epoch 15, batch 400, loss[loss=0.2093, simple_loss=0.285, pruned_loss=0.04894, ctc_loss=0.08944, over 19491.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2797, pruned_loss=0.05568, ctc_loss=0.1046, over 3341688.53 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:28:44,510 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.321e+02 1.706e+02 2.043e+02 2.587e+02 5.210e+02, threshold=4.085e+02, percent-clipped=2.0 +2024-08-29 15:29:14,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=187989.33333333334, ans=0.1 +2024-08-29 15:29:16,140 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.55 vs. limit=6.0 +2024-08-29 15:29:31,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=188096.0, ans=0.0 +2024-08-29 15:29:37,823 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.85 vs. limit=15.0 +2024-08-29 15:29:38,728 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.71 vs. limit=15.0 +2024-08-29 15:29:40,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=188149.33333333334, ans=0.125 +2024-08-29 15:29:47,615 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=9.68 vs. limit=22.5 +2024-08-29 15:30:07,910 INFO [train.py:1114] (1/4) Epoch 15, batch 450, loss[loss=0.2045, simple_loss=0.2766, pruned_loss=0.04802, ctc_loss=0.09077, over 19602.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2793, pruned_loss=0.05555, ctc_loss=0.1043, over 3448438.69 frames. ], batch size: 55, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:30:21,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=188309.33333333334, ans=0.025 +2024-08-29 15:30:22,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=188309.33333333334, ans=0.2 +2024-08-29 15:31:09,326 INFO [train.py:1114] (1/4) Epoch 15, batch 500, loss[loss=0.2313, simple_loss=0.2992, pruned_loss=0.06019, ctc_loss=0.1077, over 19663.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2786, pruned_loss=0.05505, ctc_loss=0.1038, over 3544209.62 frames. ], batch size: 63, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:31:10,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=188522.66666666666, ans=0.0 +2024-08-29 15:31:15,120 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.348e+02 1.681e+02 1.897e+02 2.177e+02 4.545e+02, threshold=3.794e+02, percent-clipped=1.0 +2024-08-29 15:31:23,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=188576.0, ans=0.0 +2024-08-29 15:31:29,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=188576.0, ans=0.125 +2024-08-29 15:32:21,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=188629.33333333334, ans=0.0 +2024-08-29 15:32:27,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=188629.33333333334, ans=0.125 +2024-08-29 15:32:48,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=188736.0, ans=0.0 +2024-08-29 15:32:54,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=188736.0, ans=0.1 +2024-08-29 15:32:55,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=188736.0, ans=0.125 +2024-08-29 15:32:56,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=188736.0, ans=0.125 +2024-08-29 15:32:59,017 INFO [train.py:1114] (1/4) Epoch 15, batch 550, loss[loss=0.2321, simple_loss=0.2939, pruned_loss=0.06092, ctc_loss=0.1211, over 19161.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2787, pruned_loss=0.05491, ctc_loss=0.1034, over 3606403.89 frames. ], batch size: 71, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:33:04,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=188789.33333333334, ans=0.125 +2024-08-29 15:33:45,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=188842.66666666666, ans=0.0 +2024-08-29 15:34:02,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=188896.0, ans=0.0 +2024-08-29 15:34:03,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=188896.0, ans=0.125 +2024-08-29 15:34:12,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=188949.33333333334, ans=0.2 +2024-08-29 15:34:30,455 INFO [train.py:1114] (1/4) Epoch 15, batch 600, loss[loss=0.2472, simple_loss=0.3108, pruned_loss=0.06694, ctc_loss=0.1244, over 19405.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2791, pruned_loss=0.05499, ctc_loss=0.1034, over 3665565.83 frames. ], batch size: 67, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:34:34,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189056.0, ans=0.1 +2024-08-29 15:34:36,392 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.830e+02 2.111e+02 2.732e+02 4.380e+02, threshold=4.223e+02, percent-clipped=4.0 +2024-08-29 15:35:11,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=189216.0, ans=0.125 +2024-08-29 15:35:15,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189216.0, ans=0.1 +2024-08-29 15:35:25,985 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.44 vs. limit=22.5 +2024-08-29 15:35:31,329 INFO [train.py:1114] (1/4) Epoch 15, batch 650, loss[loss=0.1885, simple_loss=0.2646, pruned_loss=0.04073, ctc_loss=0.07733, over 19759.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2782, pruned_loss=0.05466, ctc_loss=0.1027, over 3715728.51 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:35:34,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=189322.66666666666, ans=0.125 +2024-08-29 15:35:34,512 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.40 vs. limit=12.0 +2024-08-29 15:35:36,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=189322.66666666666, ans=0.125 +2024-08-29 15:35:42,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=189376.0, ans=0.125 +2024-08-29 15:35:48,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=189376.0, ans=0.125 +2024-08-29 15:37:43,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.68 vs. limit=15.0 +2024-08-29 15:38:06,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=189482.66666666666, ans=0.125 +2024-08-29 15:38:22,772 INFO [train.py:1114] (1/4) Epoch 15, batch 700, loss[loss=0.186, simple_loss=0.2532, pruned_loss=0.04343, ctc_loss=0.07997, over 19748.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2782, pruned_loss=0.05464, ctc_loss=0.1027, over 3748378.29 frames. ], batch size: 51, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:38:28,535 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.396e+02 1.846e+02 2.430e+02 3.057e+02 4.272e+02, threshold=4.860e+02, percent-clipped=1.0 +2024-08-29 15:38:30,025 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:38:38,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=189642.66666666666, ans=0.125 +2024-08-29 15:38:49,310 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.36 vs. limit=15.0 +2024-08-29 15:38:49,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189696.0, ans=0.1 +2024-08-29 15:39:00,414 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.64 vs. limit=15.0 +2024-08-29 15:39:01,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=189749.33333333334, ans=10.0 +2024-08-29 15:39:05,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=189749.33333333334, ans=10.0 +2024-08-29 15:39:10,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=189749.33333333334, ans=0.2 +2024-08-29 15:39:22,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=189802.66666666666, ans=0.05 +2024-08-29 15:39:25,949 INFO [train.py:1114] (1/4) Epoch 15, batch 750, loss[loss=0.22, simple_loss=0.2824, pruned_loss=0.05601, ctc_loss=0.1137, over 19478.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2782, pruned_loss=0.05499, ctc_loss=0.1035, over 3775378.62 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:39:33,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=189856.0, ans=10.0 +2024-08-29 15:39:33,555 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.39 vs. limit=10.0 +2024-08-29 15:39:39,788 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.86 vs. limit=15.0 +2024-08-29 15:39:39,903 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.61 vs. limit=15.0 +2024-08-29 15:39:42,495 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.75 vs. limit=15.0 +2024-08-29 15:39:46,015 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.39 vs. limit=15.0 +2024-08-29 15:39:56,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=189962.66666666666, ans=0.125 +2024-08-29 15:40:00,220 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:40:15,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=190069.33333333334, ans=0.0 +2024-08-29 15:40:16,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=190069.33333333334, ans=0.125 +2024-08-29 15:40:18,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=190069.33333333334, ans=6.0 +2024-08-29 15:40:28,195 INFO [train.py:1114] (1/4) Epoch 15, batch 800, loss[loss=0.1933, simple_loss=0.2553, pruned_loss=0.0474, ctc_loss=0.09119, over 19404.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.279, pruned_loss=0.05542, ctc_loss=0.1041, over 3797168.46 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:40:34,418 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.355e+02 1.728e+02 2.068e+02 2.494e+02 4.984e+02, threshold=4.135e+02, percent-clipped=1.0 +2024-08-29 15:40:35,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=190122.66666666666, ans=0.0 +2024-08-29 15:40:38,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=190122.66666666666, ans=0.0 +2024-08-29 15:40:47,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=190176.0, ans=0.125 +2024-08-29 15:41:03,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=190282.66666666666, ans=0.0 +2024-08-29 15:41:13,284 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.89 vs. limit=15.0 +2024-08-29 15:41:30,888 INFO [train.py:1114] (1/4) Epoch 15, batch 850, loss[loss=0.2247, simple_loss=0.2927, pruned_loss=0.05646, ctc_loss=0.1094, over 19649.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2788, pruned_loss=0.05524, ctc_loss=0.1039, over 3816716.82 frames. ], batch size: 59, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:41:33,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=190389.33333333334, ans=0.2 +2024-08-29 15:41:51,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=190442.66666666666, ans=0.125 +2024-08-29 15:41:56,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=190496.0, ans=0.125 +2024-08-29 15:42:01,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=190496.0, ans=0.025 +2024-08-29 15:42:14,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=190549.33333333334, ans=0.125 +2024-08-29 15:42:22,627 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.05 vs. limit=15.0 +2024-08-29 15:42:23,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=190602.66666666666, ans=0.0 +2024-08-29 15:42:34,681 INFO [train.py:1114] (1/4) Epoch 15, batch 900, loss[loss=0.2028, simple_loss=0.259, pruned_loss=0.05306, ctc_loss=0.1011, over 19801.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2792, pruned_loss=0.05556, ctc_loss=0.1043, over 3819125.90 frames. ], batch size: 49, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:42:40,563 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.386e+02 1.760e+02 2.061e+02 2.441e+02 4.748e+02, threshold=4.121e+02, percent-clipped=4.0 +2024-08-29 15:43:24,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.54 vs. limit=15.0 +2024-08-29 15:43:38,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=190869.33333333334, ans=0.125 +2024-08-29 15:43:47,837 INFO [train.py:1114] (1/4) Epoch 15, batch 950, loss[loss=0.2075, simple_loss=0.271, pruned_loss=0.05176, ctc_loss=0.1012, over 19487.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2796, pruned_loss=0.05561, ctc_loss=0.1047, over 3820889.42 frames. ], batch size: 49, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:43:57,136 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.94 vs. limit=22.5 +2024-08-29 15:44:19,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=191029.33333333334, ans=0.0 +2024-08-29 15:44:26,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191082.66666666666, ans=0.1 +2024-08-29 15:44:32,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=191082.66666666666, ans=0.125 +2024-08-29 15:44:42,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191136.0, ans=0.1 +2024-08-29 15:44:47,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=191189.33333333334, ans=0.0 +2024-08-29 15:44:48,254 INFO [train.py:1114] (1/4) Epoch 15, batch 1000, loss[loss=0.1775, simple_loss=0.2477, pruned_loss=0.03963, ctc_loss=0.07025, over 19861.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2799, pruned_loss=0.05573, ctc_loss=0.1049, over 3815966.68 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:44:56,840 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.479e+02 1.691e+02 1.934e+02 2.300e+02 3.610e+02, threshold=3.869e+02, percent-clipped=0.0 +2024-08-29 15:45:08,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=191242.66666666666, ans=0.125 +2024-08-29 15:45:25,141 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.19 vs. limit=12.0 +2024-08-29 15:45:37,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=191402.66666666666, ans=0.125 +2024-08-29 15:45:47,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=191402.66666666666, ans=0.05 +2024-08-29 15:45:53,382 INFO [train.py:1114] (1/4) Epoch 15, batch 1050, loss[loss=0.2086, simple_loss=0.2814, pruned_loss=0.04917, ctc_loss=0.09373, over 19833.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2797, pruned_loss=0.05587, ctc_loss=0.1052, over 3823073.04 frames. ], batch size: 57, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:46:38,467 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.18 vs. limit=12.0 +2024-08-29 15:46:39,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=191616.0, ans=0.125 +2024-08-29 15:46:54,815 INFO [train.py:1114] (1/4) Epoch 15, batch 1100, loss[loss=0.2013, simple_loss=0.2731, pruned_loss=0.04669, ctc_loss=0.09038, over 19583.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2796, pruned_loss=0.0558, ctc_loss=0.1052, over 3830141.95 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:47:17,589 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.340e+02 1.746e+02 1.965e+02 2.496e+02 3.903e+02, threshold=3.929e+02, percent-clipped=1.0 +2024-08-29 15:47:18,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=191722.66666666666, ans=0.2 +2024-08-29 15:47:21,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=191722.66666666666, ans=0.0 +2024-08-29 15:47:26,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=191776.0, ans=0.125 +2024-08-29 15:47:39,193 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.74 vs. limit=12.0 +2024-08-29 15:48:12,563 INFO [train.py:1114] (1/4) Epoch 15, batch 1150, loss[loss=0.2105, simple_loss=0.2757, pruned_loss=0.05332, ctc_loss=0.09671, over 19589.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2787, pruned_loss=0.05515, ctc_loss=0.1039, over 3829266.93 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:48:27,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=192042.66666666666, ans=0.05 +2024-08-29 15:48:34,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=192042.66666666666, ans=0.04949747468305833 +2024-08-29 15:49:19,911 INFO [train.py:1114] (1/4) Epoch 15, batch 1200, loss[loss=0.219, simple_loss=0.2907, pruned_loss=0.05399, ctc_loss=0.09857, over 19846.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2794, pruned_loss=0.05529, ctc_loss=0.1042, over 3825173.40 frames. ], batch size: 57, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:49:26,212 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 1.719e+02 2.001e+02 2.349e+02 3.398e+02, threshold=4.002e+02, percent-clipped=0.0 +2024-08-29 15:49:26,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=192256.0, ans=0.1 +2024-08-29 15:49:36,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=192309.33333333334, ans=0.125 +2024-08-29 15:50:06,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=192416.0, ans=0.125 +2024-08-29 15:50:24,184 INFO [train.py:1114] (1/4) Epoch 15, batch 1250, loss[loss=0.2384, simple_loss=0.2907, pruned_loss=0.06797, ctc_loss=0.1253, over 19519.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2796, pruned_loss=0.05517, ctc_loss=0.1038, over 3843000.09 frames. ], batch size: 61, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:50:30,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=192522.66666666666, ans=0.0 +2024-08-29 15:51:06,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=192682.66666666666, ans=0.0 +2024-08-29 15:51:12,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=192736.0, ans=0.0 +2024-08-29 15:51:20,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=192736.0, ans=0.2 +2024-08-29 15:51:25,192 INFO [train.py:1114] (1/4) Epoch 15, batch 1300, loss[loss=0.2607, simple_loss=0.3093, pruned_loss=0.07754, ctc_loss=0.1425, over 18934.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2789, pruned_loss=0.05472, ctc_loss=0.1029, over 3846476.85 frames. ], batch size: 76, lr: 9.99e-03, grad_scale: 32.0 +2024-08-29 15:52:15,025 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.668e+02 1.955e+02 2.455e+02 4.261e+02, threshold=3.910e+02, percent-clipped=2.0 +2024-08-29 15:52:16,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=192789.33333333334, ans=0.09899494936611666 +2024-08-29 15:52:18,506 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.25 vs. limit=22.5 +2024-08-29 15:52:28,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=192842.66666666666, ans=0.125 +2024-08-29 15:52:28,828 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.60 vs. limit=15.0 +2024-08-29 15:52:34,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=192896.0, ans=0.1 +2024-08-29 15:52:45,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=192949.33333333334, ans=0.125 +2024-08-29 15:52:54,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=192949.33333333334, ans=0.125 +2024-08-29 15:53:10,978 INFO [train.py:1114] (1/4) Epoch 15, batch 1350, loss[loss=0.2072, simple_loss=0.2761, pruned_loss=0.05074, ctc_loss=0.09195, over 19755.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2785, pruned_loss=0.05448, ctc_loss=0.1025, over 3858434.80 frames. ], batch size: 54, lr: 9.98e-03, grad_scale: 32.0 +2024-08-29 15:53:22,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=193109.33333333334, ans=0.2 +2024-08-29 15:53:37,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.95 vs. limit=22.5 +2024-08-29 15:53:40,028 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.98 vs. limit=15.0 +2024-08-29 15:53:47,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=193216.0, ans=0.025 +2024-08-29 15:54:14,966 INFO [train.py:1114] (1/4) Epoch 15, batch 1400, loss[loss=0.1795, simple_loss=0.2404, pruned_loss=0.04285, ctc_loss=0.08194, over 19677.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.278, pruned_loss=0.05426, ctc_loss=0.1021, over 3864695.51 frames. ], batch size: 46, lr: 9.98e-03, grad_scale: 32.0 +2024-08-29 15:54:37,465 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 1.658e+02 1.833e+02 2.351e+02 3.730e+02, threshold=3.665e+02, percent-clipped=0.0 +2024-08-29 15:54:39,389 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.31 vs. limit=15.0 +2024-08-29 15:54:52,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn2.whiten.whitening_limit, batch_count=193376.0, ans=22.5 +2024-08-29 15:54:53,599 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.33 vs. limit=15.0 +2024-08-29 15:54:57,001 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.35 vs. limit=6.0 +2024-08-29 15:54:59,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=193429.33333333334, ans=0.07 +2024-08-29 15:55:10,880 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.00 vs. limit=22.5 +2024-08-29 15:55:16,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=193482.66666666666, ans=0.0 +2024-08-29 15:55:21,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=193482.66666666666, ans=0.0 +2024-08-29 15:55:25,538 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.58 vs. limit=22.5 +2024-08-29 15:55:32,371 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:55:42,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=193589.33333333334, ans=0.1 +2024-08-29 15:55:43,671 INFO [train.py:1114] (1/4) Epoch 15, batch 1450, loss[loss=0.2163, simple_loss=0.289, pruned_loss=0.05227, ctc_loss=0.09776, over 19683.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2795, pruned_loss=0.0551, ctc_loss=0.1038, over 3862898.74 frames. ], batch size: 63, lr: 9.97e-03, grad_scale: 32.0 +2024-08-29 15:55:48,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=193589.33333333334, ans=0.04949747468305833 +2024-08-29 15:55:51,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=193589.33333333334, ans=0.025 +2024-08-29 15:56:09,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=193696.0, ans=0.2 +2024-08-29 15:56:29,778 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.44 vs. limit=15.0 +2024-08-29 15:56:32,217 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.37 vs. limit=15.0 +2024-08-29 15:56:45,749 INFO [train.py:1114] (1/4) Epoch 15, batch 1500, loss[loss=0.2007, simple_loss=0.2768, pruned_loss=0.0449, ctc_loss=0.08704, over 19570.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2798, pruned_loss=0.05511, ctc_loss=0.1036, over 3862677.43 frames. ], batch size: 57, lr: 9.96e-03, grad_scale: 32.0 +2024-08-29 15:56:52,428 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.416e+02 1.660e+02 1.885e+02 2.337e+02 4.281e+02, threshold=3.770e+02, percent-clipped=2.0 +2024-08-29 15:56:55,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=193856.0, ans=0.125 +2024-08-29 15:56:58,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=193909.33333333334, ans=0.125 +2024-08-29 15:57:05,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=193909.33333333334, ans=0.125 +2024-08-29 15:57:10,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=193909.33333333334, ans=0.1 +2024-08-29 15:57:23,771 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.25 vs. limit=6.0 +2024-08-29 15:57:25,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=194016.0, ans=0.0 +2024-08-29 15:57:26,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=194016.0, ans=0.0 +2024-08-29 15:57:40,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=194069.33333333334, ans=0.2 +2024-08-29 15:57:41,933 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.11 vs. limit=15.0 +2024-08-29 15:57:51,459 INFO [train.py:1114] (1/4) Epoch 15, batch 1550, loss[loss=0.2549, simple_loss=0.3081, pruned_loss=0.07401, ctc_loss=0.1341, over 19586.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.28, pruned_loss=0.05562, ctc_loss=0.1045, over 3845845.14 frames. ], batch size: 60, lr: 9.96e-03, grad_scale: 32.0 +2024-08-29 15:57:51,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=194122.66666666666, ans=0.0 +2024-08-29 15:58:01,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.66 vs. limit=15.0 +2024-08-29 15:58:21,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=194229.33333333334, ans=0.025 +2024-08-29 15:58:29,940 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.12 vs. limit=15.0 +2024-08-29 15:58:34,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=194282.66666666666, ans=0.125 +2024-08-29 15:58:53,438 INFO [train.py:1114] (1/4) Epoch 15, batch 1600, loss[loss=0.242, simple_loss=0.301, pruned_loss=0.06656, ctc_loss=0.1244, over 19833.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2802, pruned_loss=0.05575, ctc_loss=0.1049, over 3835219.53 frames. ], batch size: 57, lr: 9.95e-03, grad_scale: 32.0 +2024-08-29 15:58:55,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=194389.33333333334, ans=0.025 +2024-08-29 15:58:59,524 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.762e+02 2.164e+02 2.478e+02 4.927e+02, threshold=4.328e+02, percent-clipped=7.0 +2024-08-29 15:58:59,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=194389.33333333334, ans=0.0 +2024-08-29 16:00:11,013 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.09 vs. limit=6.0 +2024-08-29 16:00:30,011 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.59 vs. limit=15.0 +2024-08-29 16:00:35,206 INFO [train.py:1114] (1/4) Epoch 15, batch 1650, loss[loss=0.2289, simple_loss=0.2922, pruned_loss=0.06001, ctc_loss=0.114, over 19646.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2803, pruned_loss=0.05584, ctc_loss=0.1051, over 3831815.30 frames. ], batch size: 59, lr: 9.94e-03, grad_scale: 32.0 +2024-08-29 16:00:40,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=194656.0, ans=0.125 +2024-08-29 16:01:19,876 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.25 vs. limit=12.0 +2024-08-29 16:01:21,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=194816.0, ans=0.125 +2024-08-29 16:01:32,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=194869.33333333334, ans=0.125 +2024-08-29 16:01:38,038 INFO [train.py:1114] (1/4) Epoch 15, batch 1700, loss[loss=0.1995, simple_loss=0.2555, pruned_loss=0.05276, ctc_loss=0.095, over 19675.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2802, pruned_loss=0.05576, ctc_loss=0.1051, over 3846198.71 frames. ], batch size: 46, lr: 9.94e-03, grad_scale: 32.0 +2024-08-29 16:01:40,849 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.66 vs. limit=15.0 +2024-08-29 16:01:43,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=194922.66666666666, ans=0.125 +2024-08-29 16:01:44,059 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 1.696e+02 2.083e+02 2.797e+02 4.802e+02, threshold=4.167e+02, percent-clipped=3.0 +2024-08-29 16:01:46,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=194922.66666666666, ans=0.0 +2024-08-29 16:01:47,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.23 vs. limit=22.5 +2024-08-29 16:01:47,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=194922.66666666666, ans=0.0 +2024-08-29 16:01:52,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=194976.0, ans=0.125 +2024-08-29 16:01:53,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.out_whiten.whitening_limit, batch_count=194976.0, ans=8.0 +2024-08-29 16:02:09,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=195029.33333333334, ans=0.125 +2024-08-29 16:02:29,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=195136.0, ans=0.025 +2024-08-29 16:02:40,451 INFO [train.py:1114] (1/4) Epoch 15, batch 1750, loss[loss=0.1754, simple_loss=0.2433, pruned_loss=0.03895, ctc_loss=0.07434, over 19658.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2794, pruned_loss=0.05542, ctc_loss=0.1042, over 3851342.47 frames. ], batch size: 45, lr: 9.93e-03, grad_scale: 32.0 +2024-08-29 16:02:41,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=195189.33333333334, ans=0.0 +2024-08-29 16:02:44,558 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=6.537e-02 +2024-08-29 16:02:44,885 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.79 vs. limit=10.0 +2024-08-29 16:03:10,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=195296.0, ans=0.0 +2024-08-29 16:03:21,464 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.00 vs. limit=12.0 +2024-08-29 16:03:26,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=195402.66666666666, ans=0.0 +2024-08-29 16:03:28,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=195402.66666666666, ans=0.0 +2024-08-29 16:03:31,947 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.99 vs. limit=12.0 +2024-08-29 16:03:33,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=195402.66666666666, ans=0.04949747468305833 +2024-08-29 16:03:37,863 INFO [train.py:1114] (1/4) Epoch 15, batch 1800, loss[loss=0.2336, simple_loss=0.2969, pruned_loss=0.0619, ctc_loss=0.1163, over 19593.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2791, pruned_loss=0.05533, ctc_loss=0.104, over 3852161.23 frames. ], batch size: 55, lr: 9.92e-03, grad_scale: 32.0 +2024-08-29 16:03:43,643 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.390e+02 1.702e+02 2.083e+02 2.690e+02 4.339e+02, threshold=4.166e+02, percent-clipped=1.0 +2024-08-29 16:03:57,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=195509.33333333334, ans=0.0 +2024-08-29 16:03:59,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=195562.66666666666, ans=0.125 +2024-08-29 16:04:07,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=195562.66666666666, ans=0.025 +2024-08-29 16:04:21,178 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.17 vs. limit=15.0 +2024-08-29 16:04:31,943 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.38 vs. limit=15.0 +2024-08-29 16:04:32,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=195669.33333333334, ans=0.0 +2024-08-29 16:04:34,674 INFO [train.py:1114] (1/4) Epoch 15, batch 1850, loss[loss=0.2161, simple_loss=0.2916, pruned_loss=0.05158, ctc_loss=0.09373, over 19563.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2789, pruned_loss=0.05507, ctc_loss=0.1035, over 3854557.53 frames. ], batch size: 57, lr: 9.92e-03, grad_scale: 32.0 +2024-08-29 16:04:52,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=195776.0, ans=0.025 +2024-08-29 16:04:53,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=195776.0, ans=0.125 +2024-08-29 16:04:53,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=195776.0, ans=0.0 +2024-08-29 16:05:13,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=195882.66666666666, ans=0.125 +2024-08-29 16:05:16,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=195882.66666666666, ans=0.0 +2024-08-29 16:05:35,601 INFO [train.py:1114] (1/4) Epoch 15, batch 1900, loss[loss=0.2064, simple_loss=0.281, pruned_loss=0.04739, ctc_loss=0.09226, over 19630.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2792, pruned_loss=0.05487, ctc_loss=0.1031, over 3859475.40 frames. ], batch size: 59, lr: 9.91e-03, grad_scale: 32.0 +2024-08-29 16:05:36,075 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.69 vs. limit=15.0 +2024-08-29 16:05:40,973 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.393e+02 1.724e+02 2.102e+02 3.115e+02 5.340e+02, threshold=4.204e+02, percent-clipped=3.0 +2024-08-29 16:05:59,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196096.0, ans=0.1 +2024-08-29 16:05:59,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=196096.0, ans=0.125 +2024-08-29 16:06:11,902 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.46 vs. limit=22.5 +2024-08-29 16:06:12,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=196149.33333333334, ans=0.125 +2024-08-29 16:06:19,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=196149.33333333334, ans=0.0 +2024-08-29 16:06:32,313 INFO [train.py:1114] (1/4) Epoch 15, batch 1950, loss[loss=0.1829, simple_loss=0.2569, pruned_loss=0.03933, ctc_loss=0.07545, over 19582.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2796, pruned_loss=0.05457, ctc_loss=0.1027, over 3868676.38 frames. ], batch size: 52, lr: 9.90e-03, grad_scale: 32.0 +2024-08-29 16:06:34,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=196256.0, ans=0.05 +2024-08-29 16:06:59,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=196309.33333333334, ans=0.125 +2024-08-29 16:07:10,611 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.89 vs. limit=6.0 +2024-08-29 16:07:28,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=196469.33333333334, ans=0.0 +2024-08-29 16:07:35,229 INFO [train.py:1114] (1/4) Epoch 15, batch 2000, loss[loss=0.1889, simple_loss=0.2495, pruned_loss=0.04694, ctc_loss=0.08584, over 19662.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2806, pruned_loss=0.05515, ctc_loss=0.1036, over 3854088.50 frames. ], batch size: 45, lr: 9.90e-03, grad_scale: 32.0 +2024-08-29 16:07:41,140 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.454e+02 1.618e+02 1.832e+02 2.132e+02 4.362e+02, threshold=3.664e+02, percent-clipped=1.0 +2024-08-29 16:07:49,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=196576.0, ans=0.125 +2024-08-29 16:08:07,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=196629.33333333334, ans=0.125 +2024-08-29 16:08:32,347 INFO [train.py:1114] (1/4) Epoch 15, batch 2050, loss[loss=0.1715, simple_loss=0.2376, pruned_loss=0.0386, ctc_loss=0.07079, over 19731.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2795, pruned_loss=0.05507, ctc_loss=0.1035, over 3851358.94 frames. ], batch size: 47, lr: 9.89e-03, grad_scale: 32.0 +2024-08-29 16:08:53,821 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.33 vs. limit=15.0 +2024-08-29 16:08:56,332 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.29 vs. limit=15.0 +2024-08-29 16:09:10,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=196949.33333333334, ans=10.0 +2024-08-29 16:09:17,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.20 vs. limit=15.0 +2024-08-29 16:09:27,728 INFO [train.py:1114] (1/4) Epoch 15, batch 2100, loss[loss=0.2058, simple_loss=0.2708, pruned_loss=0.05181, ctc_loss=0.09275, over 19772.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2788, pruned_loss=0.0548, ctc_loss=0.1032, over 3859328.21 frames. ], batch size: 54, lr: 9.88e-03, grad_scale: 32.0 +2024-08-29 16:09:33,407 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.265e+02 1.691e+02 1.929e+02 2.354e+02 3.359e+02, threshold=3.858e+02, percent-clipped=0.0 +2024-08-29 16:09:38,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=197109.33333333334, ans=0.125 +2024-08-29 16:09:43,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=197109.33333333334, ans=0.125 +2024-08-29 16:09:58,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=197162.66666666666, ans=0.0 +2024-08-29 16:09:59,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=197162.66666666666, ans=0.025 +2024-08-29 16:10:12,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=197216.0, ans=0.2 +2024-08-29 16:10:16,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=197269.33333333334, ans=0.125 +2024-08-29 16:10:20,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=197269.33333333334, ans=0.09899494936611666 +2024-08-29 16:10:26,356 INFO [train.py:1114] (1/4) Epoch 15, batch 2150, loss[loss=0.2104, simple_loss=0.2807, pruned_loss=0.05093, ctc_loss=0.09551, over 19579.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.279, pruned_loss=0.05525, ctc_loss=0.1037, over 3869107.04 frames. ], batch size: 52, lr: 9.88e-03, grad_scale: 32.0 +2024-08-29 16:10:33,212 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.40 vs. limit=15.0 +2024-08-29 16:12:01,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=197429.33333333334, ans=10.0 +2024-08-29 16:12:08,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=197482.66666666666, ans=0.125 +2024-08-29 16:12:17,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=197482.66666666666, ans=0.125 +2024-08-29 16:12:24,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=197536.0, ans=0.2 +2024-08-29 16:12:31,424 INFO [train.py:1114] (1/4) Epoch 15, batch 2200, loss[loss=0.2421, simple_loss=0.3038, pruned_loss=0.06477, ctc_loss=0.1269, over 19604.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2789, pruned_loss=0.05501, ctc_loss=0.1033, over 3868156.74 frames. ], batch size: 57, lr: 9.87e-03, grad_scale: 32.0 +2024-08-29 16:12:36,856 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.412e+02 1.787e+02 2.154e+02 2.730e+02 5.047e+02, threshold=4.308e+02, percent-clipped=4.0 +2024-08-29 16:12:47,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=197642.66666666666, ans=0.125 +2024-08-29 16:13:29,254 INFO [train.py:1114] (1/4) Epoch 15, batch 2250, loss[loss=0.2128, simple_loss=0.2898, pruned_loss=0.04987, ctc_loss=0.09016, over 19619.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2786, pruned_loss=0.05458, ctc_loss=0.1025, over 3868504.27 frames. ], batch size: 55, lr: 9.87e-03, grad_scale: 32.0 +2024-08-29 16:14:15,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=197962.66666666666, ans=0.0 +2024-08-29 16:14:16,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=197962.66666666666, ans=0.025 +2024-08-29 16:14:45,290 INFO [train.py:1114] (1/4) Epoch 15, batch 2300, loss[loss=0.2013, simple_loss=0.2656, pruned_loss=0.04905, ctc_loss=0.09707, over 19482.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2778, pruned_loss=0.05475, ctc_loss=0.1029, over 3861569.81 frames. ], batch size: 49, lr: 9.86e-03, grad_scale: 32.0 +2024-08-29 16:14:50,773 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.357e+02 1.686e+02 1.986e+02 2.467e+02 4.553e+02, threshold=3.971e+02, percent-clipped=1.0 +2024-08-29 16:14:59,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=198176.0, ans=0.05 +2024-08-29 16:14:59,542 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=7.48 vs. limit=15.0 +2024-08-29 16:15:04,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=198176.0, ans=0.125 +2024-08-29 16:15:10,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=198229.33333333334, ans=0.125 +2024-08-29 16:15:16,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=198229.33333333334, ans=0.125 +2024-08-29 16:15:22,485 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.81 vs. limit=15.0 +2024-08-29 16:15:26,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198282.66666666666, ans=0.1 +2024-08-29 16:15:30,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198336.0, ans=0.1 +2024-08-29 16:15:31,398 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.03 vs. limit=15.0 +2024-08-29 16:15:34,372 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 16:15:43,139 INFO [train.py:1114] (1/4) Epoch 15, batch 2350, loss[loss=0.2133, simple_loss=0.284, pruned_loss=0.0511, ctc_loss=0.1009, over 19666.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.278, pruned_loss=0.05482, ctc_loss=0.1031, over 3863692.55 frames. ], batch size: 63, lr: 9.85e-03, grad_scale: 64.0 +2024-08-29 16:15:44,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=198389.33333333334, ans=0.0 +2024-08-29 16:15:50,808 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.96 vs. limit=22.5 +2024-08-29 16:16:14,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=198496.0, ans=0.125 +2024-08-29 16:16:16,679 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.44 vs. limit=22.5 +2024-08-29 16:16:20,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=198549.33333333334, ans=0.0 +2024-08-29 16:16:23,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=198549.33333333334, ans=0.125 +2024-08-29 16:16:27,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=198549.33333333334, ans=0.0 +2024-08-29 16:16:31,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=198602.66666666666, ans=0.0 +2024-08-29 16:16:42,886 INFO [train.py:1114] (1/4) Epoch 15, batch 2400, loss[loss=0.2262, simple_loss=0.2944, pruned_loss=0.0574, ctc_loss=0.1081, over 19295.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2798, pruned_loss=0.05527, ctc_loss=0.1038, over 3856804.39 frames. ], batch size: 71, lr: 9.85e-03, grad_scale: 64.0 +2024-08-29 16:16:48,396 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.659e+02 1.944e+02 2.492e+02 3.873e+02, threshold=3.888e+02, percent-clipped=0.0 +2024-08-29 16:17:56,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=198762.66666666666, ans=0.125 +2024-08-29 16:18:10,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=198816.0, ans=0.125 +2024-08-29 16:18:17,863 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=12.09 vs. limit=15.0 +2024-08-29 16:18:18,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=198816.0, ans=0.125 +2024-08-29 16:18:25,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=198869.33333333334, ans=0.1 +2024-08-29 16:18:33,178 INFO [train.py:1114] (1/4) Epoch 15, batch 2450, loss[loss=0.3031, simple_loss=0.3291, pruned_loss=0.1001, ctc_loss=0.1923, over 12649.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2837, pruned_loss=0.05813, ctc_loss=0.1095, over 3727997.94 frames. ], batch size: 140, lr: 9.84e-03, grad_scale: 32.0 +2024-08-29 16:20:18,419 INFO [train.py:1114] (1/4) Epoch 16, batch 0, loss[loss=0.2006, simple_loss=0.2618, pruned_loss=0.05249, ctc_loss=0.08623, over 19798.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2618, pruned_loss=0.05249, ctc_loss=0.08623, over 19798.00 frames. ], batch size: 49, lr: 9.52e-03, grad_scale: 32.0 +2024-08-29 16:20:18,420 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-29 16:20:28,428 INFO [train.py:1146] (1/4) Epoch 16, validation: loss=0.1867, simple_loss=0.2755, pruned_loss=0.03636, ctc_loss=0.06317, over 944034.00 frames. +2024-08-29 16:20:28,429 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 13709MB +2024-08-29 16:20:38,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=199130.66666666666, ans=0.0 +2024-08-29 16:20:44,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=199184.0, ans=0.125 +2024-08-29 16:20:48,968 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.810e+02 1.998e+02 2.276e+02 3.528e+02, threshold=3.997e+02, percent-clipped=0.0 +2024-08-29 16:21:08,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=199290.66666666666, ans=0.125 +2024-08-29 16:21:30,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=199344.0, ans=0.2 +2024-08-29 16:21:32,417 INFO [train.py:1114] (1/4) Epoch 16, batch 50, loss[loss=0.1878, simple_loss=0.2524, pruned_loss=0.04509, ctc_loss=0.08228, over 19724.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2797, pruned_loss=0.05648, ctc_loss=0.1065, over 845315.06 frames. ], batch size: 47, lr: 9.51e-03, grad_scale: 32.0 +2024-08-29 16:22:09,374 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.11 vs. limit=15.0 +2024-08-29 16:22:27,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=199610.66666666666, ans=0.125 +2024-08-29 16:22:28,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=199610.66666666666, ans=0.125 +2024-08-29 16:22:40,106 INFO [train.py:1114] (1/4) Epoch 16, batch 100, loss[loss=0.2069, simple_loss=0.2683, pruned_loss=0.05265, ctc_loss=0.1004, over 19711.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2813, pruned_loss=0.0557, ctc_loss=0.1055, over 1500370.32 frames. ], batch size: 51, lr: 9.51e-03, grad_scale: 32.0 +2024-08-29 16:22:40,923 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.58 vs. limit=22.5 +2024-08-29 16:23:05,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=199717.33333333334, ans=0.2 +2024-08-29 16:23:08,062 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.352e+02 1.815e+02 2.137e+02 2.569e+02 4.869e+02, threshold=4.274e+02, percent-clipped=1.0 +2024-08-29 16:23:21,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=199770.66666666666, ans=0.07 +2024-08-29 16:23:25,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=199770.66666666666, ans=0.0 +2024-08-29 16:23:27,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=199824.0, ans=0.0 +2024-08-29 16:37:11,211 INFO [train.py:1114] (1/4) Epoch 16, batch 150, loss[loss=0.1847, simple_loss=0.2424, pruned_loss=0.04628, ctc_loss=0.08619, over 19726.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2791, pruned_loss=0.0546, ctc_loss=0.1024, over 2028336.53 frames. ], batch size: 47, lr: 9.50e-03, grad_scale: 32.0 +2024-08-29 16:40:06,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=199984.0, ans=0.0 +2024-08-29 16:40:58,081 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.02 vs. limit=15.0 +2024-08-29 16:41:12,147 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.02 vs. limit=15.0 +2024-08-29 16:44:45,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=200037.33333333334, ans=0.2 +2024-08-29 16:48:09,839 INFO [train.py:1114] (1/4) Epoch 16, batch 200, loss[loss=0.2419, simple_loss=0.2951, pruned_loss=0.06806, ctc_loss=0.1317, over 17985.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2768, pruned_loss=0.05333, ctc_loss=0.1001, over 2435728.98 frames. ], batch size: 85, lr: 9.49e-03, grad_scale: 32.0 +2024-08-29 16:49:58,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=200197.33333333334, ans=0.0 +2024-08-29 16:49:58,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=200197.33333333334, ans=0.05 +2024-08-29 16:53:29,819 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.321e+02 1.834e+02 2.227e+02 2.815e+02 4.534e+02, threshold=4.454e+02, percent-clipped=1.0 +2024-08-29 16:55:54,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=200357.33333333334, ans=0.125 +2024-08-29 16:56:29,763 INFO [train.py:1114] (1/4) Epoch 16, batch 250, loss[loss=0.2098, simple_loss=0.2774, pruned_loss=0.05116, ctc_loss=0.09941, over 19429.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2776, pruned_loss=0.05382, ctc_loss=0.1011, over 2754843.08 frames. ], batch size: 67, lr: 9.49e-03, grad_scale: 32.0 +2024-08-29 16:56:42,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=200464.0, ans=0.0 +2024-08-29 16:56:43,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200464.0, ans=0.1 +2024-08-29 16:56:44,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=200464.0, ans=0.125 +2024-08-29 16:56:47,146 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.20 vs. limit=12.0 +2024-08-29 16:58:32,477 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=200517.33333333334, ans=0.125 +2024-08-29 16:58:42,192 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.89 vs. limit=22.5 +2024-08-29 16:58:55,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=200570.66666666666, ans=0.025 +2024-08-29 16:59:14,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200624.0, ans=0.1 +2024-08-29 17:01:55,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=200677.33333333334, ans=0.05 +2024-08-29 17:03:13,494 INFO [train.py:1114] (1/4) Epoch 16, batch 300, loss[loss=0.2228, simple_loss=0.2864, pruned_loss=0.05822, ctc_loss=0.1068, over 19518.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2777, pruned_loss=0.05428, ctc_loss=0.1022, over 3000764.20 frames. ], batch size: 61, lr: 9.48e-03, grad_scale: 32.0 +2024-08-29 17:03:24,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=200730.66666666666, ans=0.0 +2024-08-29 17:03:36,040 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.345e+02 1.663e+02 1.972e+02 2.398e+02 4.674e+02, threshold=3.943e+02, percent-clipped=1.0 +2024-08-29 17:08:30,914 INFO [train.py:1114] (1/4) Epoch 16, batch 350, loss[loss=0.2104, simple_loss=0.2664, pruned_loss=0.05703, ctc_loss=0.1007, over 19773.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2785, pruned_loss=0.05463, ctc_loss=0.1027, over 3190855.31 frames. ], batch size: 48, lr: 9.48e-03, grad_scale: 32.0 +2024-08-29 17:12:09,156 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.94 vs. limit=12.0 +2024-08-29 17:12:10,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=201104.0, ans=0.125 +2024-08-29 17:13:11,905 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 17:13:12,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=201210.66666666666, ans=0.1 +2024-08-29 17:13:17,604 INFO [train.py:1114] (1/4) Epoch 16, batch 400, loss[loss=0.2153, simple_loss=0.2772, pruned_loss=0.05562, ctc_loss=0.1053, over 19477.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.278, pruned_loss=0.05428, ctc_loss=0.1023, over 3342904.94 frames. ], batch size: 54, lr: 9.47e-03, grad_scale: 32.0 +2024-08-29 17:15:51,036 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.292e+02 1.714e+02 1.905e+02 2.508e+02 3.565e+02, threshold=3.811e+02, percent-clipped=0.0 +2024-08-29 17:16:24,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=201370.66666666666, ans=0.07 +2024-08-29 17:16:51,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=201477.33333333334, ans=0.125 +2024-08-29 17:17:01,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=201477.33333333334, ans=0.1 +2024-08-29 17:17:07,839 INFO [train.py:1114] (1/4) Epoch 16, batch 450, loss[loss=0.2288, simple_loss=0.2913, pruned_loss=0.06077, ctc_loss=0.1117, over 19611.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2783, pruned_loss=0.05443, ctc_loss=0.1024, over 3450336.79 frames. ], batch size: 55, lr: 9.46e-03, grad_scale: 32.0 +2024-08-29 17:20:49,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=201637.33333333334, ans=0.2 +2024-08-29 17:21:10,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=201690.66666666666, ans=0.125 +2024-08-29 17:21:57,452 INFO [train.py:1114] (1/4) Epoch 16, batch 500, loss[loss=0.2213, simple_loss=0.2954, pruned_loss=0.05465, ctc_loss=0.09452, over 19649.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2778, pruned_loss=0.05449, ctc_loss=0.1025, over 3545536.02 frames. ], batch size: 63, lr: 9.46e-03, grad_scale: 32.0 +2024-08-29 17:21:59,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=201797.33333333334, ans=0.125 +2024-08-29 17:22:46,942 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.430e+02 1.689e+02 2.169e+02 2.570e+02 5.370e+02, threshold=4.338e+02, percent-clipped=3.0 +2024-08-29 17:22:47,653 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.22 vs. limit=15.0 +2024-08-29 17:23:10,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=201904.0, ans=0.125 +2024-08-29 17:23:43,703 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.40 vs. limit=12.0 +2024-08-29 17:23:52,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=202010.66666666666, ans=0.0 +2024-08-29 17:24:02,319 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.21 vs. limit=15.0 +2024-08-29 17:24:02,822 INFO [train.py:1114] (1/4) Epoch 16, batch 550, loss[loss=0.2558, simple_loss=0.3144, pruned_loss=0.072, ctc_loss=0.1333, over 19295.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2779, pruned_loss=0.0544, ctc_loss=0.1025, over 3607433.72 frames. ], batch size: 71, lr: 9.45e-03, grad_scale: 32.0 +2024-08-29 17:24:05,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=202064.0, ans=0.125 +2024-08-29 17:24:24,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=202064.0, ans=0.0 +2024-08-29 17:24:29,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=202117.33333333334, ans=0.0 +2024-08-29 17:24:36,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=202170.66666666666, ans=0.025 +2024-08-29 17:24:44,025 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.42 vs. limit=15.0 +2024-08-29 17:25:21,524 INFO [train.py:1114] (1/4) Epoch 16, batch 600, loss[loss=0.2304, simple_loss=0.2968, pruned_loss=0.05941, ctc_loss=0.113, over 19341.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2778, pruned_loss=0.05393, ctc_loss=0.1017, over 3665042.59 frames. ], batch size: 67, lr: 9.45e-03, grad_scale: 32.0 +2024-08-29 17:25:25,324 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 17:25:27,933 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.40 vs. limit=15.0 +2024-08-29 17:27:03,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=202384.0, ans=0.0 +2024-08-29 17:27:04,566 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.277e+02 1.652e+02 1.934e+02 2.290e+02 3.719e+02, threshold=3.867e+02, percent-clipped=0.0 +2024-08-29 17:28:12,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=202437.33333333334, ans=0.5 +2024-08-29 17:30:09,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=202490.66666666666, ans=0.125 +2024-08-29 17:30:09,235 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 17:31:03,786 INFO [train.py:1114] (1/4) Epoch 16, batch 650, loss[loss=0.2008, simple_loss=0.2684, pruned_loss=0.04866, ctc_loss=0.08987, over 19775.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2771, pruned_loss=0.05351, ctc_loss=0.101, over 3716361.67 frames. ], batch size: 54, lr: 9.44e-03, grad_scale: 32.0 +2024-08-29 17:32:04,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=202597.33333333334, ans=0.125 +2024-08-29 17:32:16,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=202597.33333333334, ans=0.0 +2024-08-29 17:32:24,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=202650.66666666666, ans=0.04949747468305833 +2024-08-29 17:32:36,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=202704.0, ans=0.125 +2024-08-29 17:33:07,523 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.70 vs. limit=15.0 +2024-08-29 17:33:43,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=202810.66666666666, ans=0.2 +2024-08-29 17:34:02,107 INFO [train.py:1114] (1/4) Epoch 16, batch 700, loss[loss=0.2245, simple_loss=0.2864, pruned_loss=0.05863, ctc_loss=0.1132, over 19726.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.278, pruned_loss=0.05418, ctc_loss=0.1022, over 3748555.01 frames. ], batch size: 51, lr: 9.43e-03, grad_scale: 32.0 +2024-08-29 17:35:12,332 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.286e+02 1.755e+02 2.110e+02 2.761e+02 5.047e+02, threshold=4.220e+02, percent-clipped=5.0 +2024-08-29 17:36:00,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=202970.66666666666, ans=0.1 +2024-08-29 17:36:22,102 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.62 vs. limit=15.0 +2024-08-29 17:36:22,185 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.83 vs. limit=10.0 +2024-08-29 17:36:22,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=202970.66666666666, ans=0.125 +2024-08-29 17:38:14,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=203024.0, ans=0.125 +2024-08-29 17:41:59,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=203077.33333333334, ans=0.125 +2024-08-29 17:42:00,558 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.08 vs. limit=12.0 +2024-08-29 17:42:01,673 INFO [train.py:1114] (1/4) Epoch 16, batch 750, loss[loss=0.1938, simple_loss=0.2683, pruned_loss=0.0438, ctc_loss=0.0794, over 19527.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2771, pruned_loss=0.05376, ctc_loss=0.1013, over 3773192.21 frames. ], batch size: 54, lr: 9.43e-03, grad_scale: 32.0 +2024-08-29 17:42:27,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=203184.0, ans=0.2 +2024-08-29 17:46:04,711 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.02 vs. limit=15.0 +2024-08-29 17:46:10,162 INFO [train.py:1114] (1/4) Epoch 16, batch 800, loss[loss=0.2058, simple_loss=0.2618, pruned_loss=0.05492, ctc_loss=0.1001, over 19808.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2767, pruned_loss=0.05347, ctc_loss=0.1005, over 3794738.18 frames. ], batch size: 49, lr: 9.42e-03, grad_scale: 32.0 +2024-08-29 17:48:03,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=203450.66666666666, ans=0.05 +2024-08-29 17:48:06,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.32 vs. limit=15.0 +2024-08-29 17:48:15,895 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.745e+02 2.069e+02 2.556e+02 3.770e+02, threshold=4.138e+02, percent-clipped=0.0 +2024-08-29 17:49:06,932 INFO [train.py:1114] (1/4) Epoch 16, batch 850, loss[loss=0.2226, simple_loss=0.2896, pruned_loss=0.0572, ctc_loss=0.1032, over 19655.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2762, pruned_loss=0.05338, ctc_loss=0.1002, over 3814579.97 frames. ], batch size: 59, lr: 9.42e-03, grad_scale: 32.0 +2024-08-29 17:49:40,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=203770.66666666666, ans=0.0 +2024-08-29 17:50:21,098 INFO [train.py:1114] (1/4) Epoch 16, batch 900, loss[loss=0.1854, simple_loss=0.2494, pruned_loss=0.04475, ctc_loss=0.07982, over 19423.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2769, pruned_loss=0.05393, ctc_loss=0.1014, over 3817507.33 frames. ], batch size: 48, lr: 9.41e-03, grad_scale: 32.0 +2024-08-29 17:50:48,724 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.434e+02 1.676e+02 1.827e+02 2.350e+02 4.099e+02, threshold=3.653e+02, percent-clipped=0.0 +2024-08-29 17:51:16,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204037.33333333334, ans=0.1 +2024-08-29 17:53:23,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=204144.0, ans=0.0 +2024-08-29 17:53:32,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=204144.0, ans=0.125 +2024-08-29 17:53:37,493 INFO [train.py:1114] (1/4) Epoch 16, batch 950, loss[loss=0.2095, simple_loss=0.2706, pruned_loss=0.05239, ctc_loss=0.1088, over 19489.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.277, pruned_loss=0.05383, ctc_loss=0.1013, over 3820500.71 frames. ], batch size: 49, lr: 9.40e-03, grad_scale: 32.0 +2024-08-29 17:54:15,957 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 17:54:39,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=204250.66666666666, ans=0.0 +2024-08-29 17:54:54,857 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.25 vs. limit=15.0 +2024-08-29 17:54:59,155 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.77 vs. limit=15.0 +2024-08-29 17:55:35,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=204410.66666666666, ans=0.2 +2024-08-29 17:55:43,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=204410.66666666666, ans=0.125 +2024-08-29 17:55:46,640 INFO [train.py:1114] (1/4) Epoch 16, batch 1000, loss[loss=0.2193, simple_loss=0.2817, pruned_loss=0.05655, ctc_loss=0.1095, over 19869.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2782, pruned_loss=0.0543, ctc_loss=0.1022, over 3817813.39 frames. ], batch size: 52, lr: 9.40e-03, grad_scale: 32.0 +2024-08-29 17:56:07,201 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.334e+02 1.649e+02 1.918e+02 2.268e+02 3.238e+02, threshold=3.836e+02, percent-clipped=0.0 +2024-08-29 17:57:00,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204570.66666666666, ans=0.1 +2024-08-29 17:57:43,276 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.49 vs. limit=15.0 +2024-08-29 17:57:54,926 INFO [train.py:1114] (1/4) Epoch 16, batch 1050, loss[loss=0.2436, simple_loss=0.2993, pruned_loss=0.06845, ctc_loss=0.1272, over 19829.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2775, pruned_loss=0.05414, ctc_loss=0.1017, over 3823563.85 frames. ], batch size: 57, lr: 9.39e-03, grad_scale: 32.0 +2024-08-29 17:57:55,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=204730.66666666666, ans=0.125 +2024-08-29 17:58:28,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=204730.66666666666, ans=0.0 +2024-08-29 17:58:29,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=204730.66666666666, ans=0.125 +2024-08-29 17:58:29,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204730.66666666666, ans=0.1 +2024-08-29 17:58:30,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=204730.66666666666, ans=0.1 +2024-08-29 18:00:49,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=204944.0, ans=0.125 +2024-08-29 18:00:53,273 INFO [train.py:1114] (1/4) Epoch 16, batch 1100, loss[loss=0.2166, simple_loss=0.2754, pruned_loss=0.05747, ctc_loss=0.1071, over 19592.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2774, pruned_loss=0.05422, ctc_loss=0.102, over 3829492.88 frames. ], batch size: 52, lr: 9.39e-03, grad_scale: 16.0 +2024-08-29 18:01:10,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=204997.33333333334, ans=0.125 +2024-08-29 18:01:19,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=205050.66666666666, ans=0.0 +2024-08-29 18:01:27,925 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.389e+02 1.694e+02 1.874e+02 2.325e+02 3.063e+02, threshold=3.748e+02, percent-clipped=0.0 +2024-08-29 18:02:43,480 INFO [train.py:1114] (1/4) Epoch 16, batch 1150, loss[loss=0.2003, simple_loss=0.2648, pruned_loss=0.0488, ctc_loss=0.0956, over 19567.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2773, pruned_loss=0.05422, ctc_loss=0.1022, over 3828230.62 frames. ], batch size: 52, lr: 9.38e-03, grad_scale: 16.0 +2024-08-29 18:02:46,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=205264.0, ans=0.125 +2024-08-29 18:02:48,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=205264.0, ans=0.125 +2024-08-29 18:03:01,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=205317.33333333334, ans=0.0 +2024-08-29 18:03:02,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=205317.33333333334, ans=0.5 +2024-08-29 18:03:19,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=205424.0, ans=0.125 +2024-08-29 18:03:20,895 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.19 vs. limit=15.0 +2024-08-29 18:03:40,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=205477.33333333334, ans=0.125 +2024-08-29 18:03:45,100 INFO [train.py:1114] (1/4) Epoch 16, batch 1200, loss[loss=0.215, simple_loss=0.286, pruned_loss=0.05197, ctc_loss=0.1001, over 19850.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2786, pruned_loss=0.0544, ctc_loss=0.1026, over 3824031.93 frames. ], batch size: 57, lr: 9.38e-03, grad_scale: 32.0 +2024-08-29 18:03:50,357 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.62 vs. limit=15.0 +2024-08-29 18:04:06,312 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 1.725e+02 2.012e+02 2.470e+02 3.418e+02, threshold=4.024e+02, percent-clipped=0.0 +2024-08-29 18:04:07,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=205637.33333333334, ans=0.125 +2024-08-29 18:04:40,675 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.30 vs. limit=15.0 +2024-08-29 18:04:50,727 INFO [train.py:1114] (1/4) Epoch 16, batch 1250, loss[loss=0.2415, simple_loss=0.3028, pruned_loss=0.066, ctc_loss=0.1205, over 19502.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2784, pruned_loss=0.05426, ctc_loss=0.1023, over 3842506.11 frames. ], batch size: 61, lr: 9.37e-03, grad_scale: 32.0 +2024-08-29 18:05:22,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=205850.66666666666, ans=0.125 +2024-08-29 18:05:33,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=205904.0, ans=0.0 +2024-08-29 18:05:35,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=205904.0, ans=0.1 +2024-08-29 18:05:49,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=205957.33333333334, ans=0.025 +2024-08-29 18:05:50,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=205957.33333333334, ans=10.0 +2024-08-29 18:06:24,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=206010.66666666666, ans=0.0 +2024-08-29 18:06:29,216 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.05 vs. limit=15.0 +2024-08-29 18:06:31,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=206010.66666666666, ans=0.125 +2024-08-29 18:06:33,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=206010.66666666666, ans=0.125 +2024-08-29 18:06:35,884 INFO [train.py:1114] (1/4) Epoch 16, batch 1300, loss[loss=0.2207, simple_loss=0.2892, pruned_loss=0.05525, ctc_loss=0.104, over 18933.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2777, pruned_loss=0.05374, ctc_loss=0.1012, over 3846574.33 frames. ], batch size: 76, lr: 9.36e-03, grad_scale: 32.0 +2024-08-29 18:06:45,580 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.54 vs. limit=22.5 +2024-08-29 18:06:56,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=206117.33333333334, ans=0.5 +2024-08-29 18:06:57,554 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.302e+02 1.716e+02 2.090e+02 2.690e+02 4.268e+02, threshold=4.180e+02, percent-clipped=3.0 +2024-08-29 18:07:04,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=206170.66666666666, ans=0.0 +2024-08-29 18:07:07,384 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.96 vs. limit=22.5 +2024-08-29 18:07:12,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=206224.0, ans=0.125 +2024-08-29 18:07:18,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=206224.0, ans=0.2 +2024-08-29 18:07:22,864 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.60 vs. limit=15.0 +2024-08-29 18:07:23,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=206277.33333333334, ans=0.125 +2024-08-29 18:07:34,540 INFO [train.py:1114] (1/4) Epoch 16, batch 1350, loss[loss=0.2193, simple_loss=0.2821, pruned_loss=0.05672, ctc_loss=0.1076, over 19737.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2772, pruned_loss=0.05339, ctc_loss=0.1006, over 3857255.26 frames. ], batch size: 54, lr: 9.36e-03, grad_scale: 32.0 +2024-08-29 18:09:45,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206490.66666666666, ans=0.1 +2024-08-29 18:09:47,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=206490.66666666666, ans=0.0 +2024-08-29 18:09:47,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=206490.66666666666, ans=0.025 +2024-08-29 18:09:47,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=206490.66666666666, ans=0.125 +2024-08-29 18:10:13,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=206544.0, ans=0.125 +2024-08-29 18:10:56,015 INFO [train.py:1114] (1/4) Epoch 16, batch 1400, loss[loss=0.2021, simple_loss=0.2627, pruned_loss=0.05119, ctc_loss=0.09769, over 19665.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2769, pruned_loss=0.05331, ctc_loss=0.1004, over 3863890.49 frames. ], batch size: 46, lr: 9.35e-03, grad_scale: 32.0 +2024-08-29 18:10:58,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=206597.33333333334, ans=0.025 +2024-08-29 18:10:59,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=206597.33333333334, ans=0.0 +2024-08-29 18:12:49,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=206650.66666666666, ans=0.0 +2024-08-29 18:13:15,196 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.407e+02 1.659e+02 1.830e+02 2.117e+02 3.619e+02, threshold=3.659e+02, percent-clipped=0.0 +2024-08-29 18:13:24,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=206704.0, ans=0.025 +2024-08-29 18:13:26,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=206704.0, ans=0.0 +2024-08-29 18:14:30,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=206757.33333333334, ans=0.125 +2024-08-29 18:14:31,298 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.49 vs. limit=12.0 +2024-08-29 18:14:31,520 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.39 vs. limit=12.0 +2024-08-29 18:14:34,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=206757.33333333334, ans=0.0 +2024-08-29 18:14:38,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=206810.66666666666, ans=0.2 +2024-08-29 18:14:41,518 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.04 vs. limit=15.0 +2024-08-29 18:14:49,669 INFO [train.py:1114] (1/4) Epoch 16, batch 1450, loss[loss=0.2204, simple_loss=0.2821, pruned_loss=0.05816, ctc_loss=0.1062, over 19670.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2774, pruned_loss=0.0534, ctc_loss=0.1006, over 3862124.09 frames. ], batch size: 63, lr: 9.35e-03, grad_scale: 32.0 +2024-08-29 18:14:50,200 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.31 vs. limit=15.0 +2024-08-29 18:15:27,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=206970.66666666666, ans=0.07 +2024-08-29 18:15:35,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=206970.66666666666, ans=0.125 +2024-08-29 18:15:37,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=207024.0, ans=0.025 +2024-08-29 18:16:06,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=207077.33333333334, ans=0.09899494936611666 +2024-08-29 18:16:06,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=207077.33333333334, ans=0.125 +2024-08-29 18:16:10,749 INFO [train.py:1114] (1/4) Epoch 16, batch 1500, loss[loss=0.2386, simple_loss=0.2999, pruned_loss=0.06431, ctc_loss=0.1217, over 19580.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2776, pruned_loss=0.05328, ctc_loss=0.1005, over 3862803.67 frames. ], batch size: 57, lr: 9.34e-03, grad_scale: 32.0 +2024-08-29 18:16:26,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=207184.0, ans=0.2 +2024-08-29 18:16:32,415 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.680e+02 1.893e+02 2.490e+02 3.994e+02, threshold=3.786e+02, percent-clipped=1.0 +2024-08-29 18:16:41,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=207237.33333333334, ans=0.0 +2024-08-29 18:17:33,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=207397.33333333334, ans=0.125 +2024-08-29 18:17:34,473 INFO [train.py:1114] (1/4) Epoch 16, batch 1550, loss[loss=0.2362, simple_loss=0.2973, pruned_loss=0.06391, ctc_loss=0.1183, over 19604.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2787, pruned_loss=0.05408, ctc_loss=0.1022, over 3847412.94 frames. ], batch size: 60, lr: 9.33e-03, grad_scale: 32.0 +2024-08-29 18:17:35,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=207397.33333333334, ans=0.0 +2024-08-29 18:17:36,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=207397.33333333334, ans=0.0 +2024-08-29 18:17:38,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=207397.33333333334, ans=0.0 +2024-08-29 18:19:24,027 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.23 vs. limit=15.0 +2024-08-29 18:19:55,347 INFO [train.py:1114] (1/4) Epoch 16, batch 1600, loss[loss=0.236, simple_loss=0.3005, pruned_loss=0.06185, ctc_loss=0.1194, over 19845.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2784, pruned_loss=0.05423, ctc_loss=0.1024, over 3835699.28 frames. ], batch size: 57, lr: 9.33e-03, grad_scale: 32.0 +2024-08-29 18:19:55,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=207664.0, ans=0.2 +2024-08-29 18:20:27,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=207664.0, ans=0.035 +2024-08-29 18:20:34,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=207664.0, ans=0.125 +2024-08-29 18:21:41,882 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.10 vs. limit=6.0 +2024-08-29 18:21:55,746 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.773e+02 1.965e+02 2.508e+02 5.321e+02, threshold=3.930e+02, percent-clipped=3.0 +2024-08-29 18:21:57,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=207770.66666666666, ans=0.125 +2024-08-29 18:22:00,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=207770.66666666666, ans=0.1 +2024-08-29 18:22:06,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=207770.66666666666, ans=0.125 +2024-08-29 18:22:11,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.62 vs. limit=10.0 +2024-08-29 18:22:12,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=207824.0, ans=0.125 +2024-08-29 18:22:12,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=207824.0, ans=0.0 +2024-08-29 18:22:53,651 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 18:23:00,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207930.66666666666, ans=0.1 +2024-08-29 18:23:01,481 INFO [train.py:1114] (1/4) Epoch 16, batch 1650, loss[loss=0.2219, simple_loss=0.2963, pruned_loss=0.05367, ctc_loss=0.1004, over 19618.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2785, pruned_loss=0.05465, ctc_loss=0.1032, over 3831939.66 frames. ], batch size: 59, lr: 9.32e-03, grad_scale: 32.0 +2024-08-29 18:23:05,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=207930.66666666666, ans=0.125 +2024-08-29 18:24:25,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=208037.33333333334, ans=0.125 +2024-08-29 18:24:26,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=208037.33333333334, ans=0.125 +2024-08-29 18:24:40,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=208090.66666666666, ans=0.125 +2024-08-29 18:24:53,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=208144.0, ans=0.025 +2024-08-29 18:24:58,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=208197.33333333334, ans=0.025 +2024-08-29 18:26:13,693 INFO [train.py:1114] (1/4) Epoch 16, batch 1700, loss[loss=0.1729, simple_loss=0.2448, pruned_loss=0.03593, ctc_loss=0.0729, over 19656.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2779, pruned_loss=0.05407, ctc_loss=0.102, over 3846742.17 frames. ], batch size: 46, lr: 9.32e-03, grad_scale: 32.0 +2024-08-29 18:26:34,599 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.431e+02 1.759e+02 2.180e+02 2.878e+02 5.111e+02, threshold=4.361e+02, percent-clipped=4.0 +2024-08-29 18:26:48,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=208304.0, ans=0.2 +2024-08-29 18:26:50,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=208357.33333333334, ans=0.125 +2024-08-29 18:26:58,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=208357.33333333334, ans=0.125 +2024-08-29 18:27:13,924 INFO [train.py:1114] (1/4) Epoch 16, batch 1750, loss[loss=0.1749, simple_loss=0.2389, pruned_loss=0.04079, ctc_loss=0.07308, over 19637.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2773, pruned_loss=0.05407, ctc_loss=0.1021, over 3850980.22 frames. ], batch size: 45, lr: 9.31e-03, grad_scale: 32.0 +2024-08-29 18:27:14,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=208464.0, ans=0.125 +2024-08-29 18:27:49,482 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.49 vs. limit=22.5 +2024-08-29 18:29:02,202 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.76 vs. limit=15.0 +2024-08-29 18:29:23,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=208624.0, ans=0.0 +2024-08-29 18:30:06,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=208677.33333333334, ans=0.1 +2024-08-29 18:30:17,802 INFO [train.py:1114] (1/4) Epoch 16, batch 1800, loss[loss=0.2122, simple_loss=0.2815, pruned_loss=0.05236, ctc_loss=0.09556, over 19619.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2771, pruned_loss=0.05385, ctc_loss=0.1017, over 3853296.31 frames. ], batch size: 55, lr: 9.31e-03, grad_scale: 32.0 +2024-08-29 18:30:25,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=208730.66666666666, ans=0.125 +2024-08-29 18:30:37,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=208784.0, ans=0.1 +2024-08-29 18:30:45,747 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.293e+02 1.693e+02 1.985e+02 2.381e+02 4.228e+02, threshold=3.971e+02, percent-clipped=0.0 +2024-08-29 18:31:20,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=208944.0, ans=0.0 +2024-08-29 18:31:22,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=208944.0, ans=0.125 +2024-08-29 18:31:45,939 INFO [train.py:1114] (1/4) Epoch 16, batch 1850, loss[loss=0.2497, simple_loss=0.3056, pruned_loss=0.07034, ctc_loss=0.1329, over 19586.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2771, pruned_loss=0.05402, ctc_loss=0.1019, over 3856883.61 frames. ], batch size: 57, lr: 9.30e-03, grad_scale: 32.0 +2024-08-29 18:32:48,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209104.0, ans=0.1 +2024-08-29 18:33:06,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=209210.66666666666, ans=0.125 +2024-08-29 18:33:10,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=209210.66666666666, ans=0.05 +2024-08-29 18:33:13,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=209210.66666666666, ans=0.0 +2024-08-29 18:33:17,353 INFO [train.py:1114] (1/4) Epoch 16, batch 1900, loss[loss=0.2047, simple_loss=0.2799, pruned_loss=0.04699, ctc_loss=0.08852, over 19647.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.278, pruned_loss=0.05439, ctc_loss=0.1024, over 3860956.87 frames. ], batch size: 59, lr: 9.29e-03, grad_scale: 32.0 +2024-08-29 18:33:18,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=209264.0, ans=0.035 +2024-08-29 18:33:19,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=209264.0, ans=0.0 +2024-08-29 18:33:40,786 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.419e+02 1.785e+02 2.354e+02 2.964e+02 6.037e+02, threshold=4.708e+02, percent-clipped=9.0 +2024-08-29 18:33:52,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=209370.66666666666, ans=0.125 +2024-08-29 18:33:53,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=209424.0, ans=0.125 +2024-08-29 18:34:30,814 INFO [train.py:1114] (1/4) Epoch 16, batch 1950, loss[loss=0.2213, simple_loss=0.2852, pruned_loss=0.05764, ctc_loss=0.1055, over 19605.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2785, pruned_loss=0.05418, ctc_loss=0.1019, over 3870048.19 frames. ], batch size: 52, lr: 9.29e-03, grad_scale: 32.0 +2024-08-29 18:34:35,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=209530.66666666666, ans=0.025 +2024-08-29 18:34:42,901 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.34 vs. limit=6.0 +2024-08-29 18:35:06,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=209584.0, ans=0.0 +2024-08-29 18:35:08,010 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.40 vs. limit=8.0 +2024-08-29 18:35:15,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=209637.33333333334, ans=0.025 +2024-08-29 18:35:22,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=209690.66666666666, ans=0.2 +2024-08-29 18:35:38,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=209744.0, ans=0.0 +2024-08-29 18:35:42,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=209744.0, ans=0.125 +2024-08-29 18:35:51,668 INFO [train.py:1114] (1/4) Epoch 16, batch 2000, loss[loss=0.2059, simple_loss=0.2582, pruned_loss=0.05672, ctc_loss=0.1005, over 19660.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2796, pruned_loss=0.0549, ctc_loss=0.1031, over 3855245.04 frames. ], batch size: 45, lr: 9.28e-03, grad_scale: 32.0 +2024-08-29 18:36:03,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=209850.66666666666, ans=0.0 +2024-08-29 18:36:09,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=209850.66666666666, ans=0.125 +2024-08-29 18:36:13,162 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.312e+02 1.666e+02 1.888e+02 2.185e+02 3.516e+02, threshold=3.775e+02, percent-clipped=0.0 +2024-08-29 18:36:14,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=209904.0, ans=0.125 +2024-08-29 18:36:18,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=209904.0, ans=0.125 +2024-08-29 18:37:02,153 INFO [train.py:1114] (1/4) Epoch 16, batch 2050, loss[loss=0.2005, simple_loss=0.255, pruned_loss=0.05287, ctc_loss=0.1008, over 19737.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2783, pruned_loss=0.05458, ctc_loss=0.1026, over 3852853.59 frames. ], batch size: 47, lr: 9.28e-03, grad_scale: 32.0 +2024-08-29 18:37:33,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=210064.0, ans=10.0 +2024-08-29 18:37:36,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=210064.0, ans=0.95 +2024-08-29 18:37:46,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=210117.33333333334, ans=0.125 +2024-08-29 18:37:51,241 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.15 vs. limit=22.5 +2024-08-29 18:37:52,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=210170.66666666666, ans=0.025 +2024-08-29 18:37:57,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210170.66666666666, ans=0.1 +2024-08-29 18:38:16,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=210170.66666666666, ans=0.125 +2024-08-29 18:38:32,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=210224.0, ans=0.125 +2024-08-29 18:38:48,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=210277.33333333334, ans=0.2 +2024-08-29 18:38:59,606 INFO [train.py:1114] (1/4) Epoch 16, batch 2100, loss[loss=0.2034, simple_loss=0.2746, pruned_loss=0.04882, ctc_loss=0.08652, over 19766.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2778, pruned_loss=0.05422, ctc_loss=0.102, over 3859400.07 frames. ], batch size: 54, lr: 9.27e-03, grad_scale: 32.0 +2024-08-29 18:39:08,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=210330.66666666666, ans=0.0 +2024-08-29 18:39:22,241 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.344e+02 1.792e+02 2.112e+02 2.675e+02 4.176e+02, threshold=4.223e+02, percent-clipped=3.0 +2024-08-29 18:39:28,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210437.33333333334, ans=0.1 +2024-08-29 18:39:33,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=210437.33333333334, ans=0.0 +2024-08-29 18:39:37,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=210490.66666666666, ans=0.025 +2024-08-29 18:39:57,967 INFO [train.py:1114] (1/4) Epoch 16, batch 2150, loss[loss=0.1858, simple_loss=0.2578, pruned_loss=0.0414, ctc_loss=0.07755, over 19576.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2773, pruned_loss=0.05417, ctc_loss=0.1018, over 3869308.08 frames. ], batch size: 52, lr: 9.27e-03, grad_scale: 32.0 +2024-08-29 18:40:05,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=210597.33333333334, ans=0.125 +2024-08-29 18:40:07,054 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.89 vs. limit=15.0 +2024-08-29 18:40:47,976 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.36 vs. limit=15.0 +2024-08-29 18:41:08,955 INFO [train.py:1114] (1/4) Epoch 16, batch 2200, loss[loss=0.2042, simple_loss=0.2731, pruned_loss=0.04846, ctc_loss=0.09591, over 19591.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2768, pruned_loss=0.0537, ctc_loss=0.101, over 3867258.39 frames. ], batch size: 57, lr: 9.26e-03, grad_scale: 32.0 +2024-08-29 18:41:29,789 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.327e+02 1.757e+02 2.042e+02 2.598e+02 4.148e+02, threshold=4.084e+02, percent-clipped=0.0 +2024-08-29 18:42:31,175 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.57 vs. limit=22.5 +2024-08-29 18:42:31,565 INFO [train.py:1114] (1/4) Epoch 16, batch 2250, loss[loss=0.2181, simple_loss=0.2856, pruned_loss=0.0553, ctc_loss=0.1, over 19622.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2773, pruned_loss=0.05405, ctc_loss=0.1016, over 3866351.87 frames. ], batch size: 55, lr: 9.25e-03, grad_scale: 32.0 +2024-08-29 18:42:48,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=211184.0, ans=0.1 +2024-08-29 18:42:52,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=211184.0, ans=0.1 +2024-08-29 18:43:15,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=211290.66666666666, ans=0.0 +2024-08-29 18:43:17,738 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.94 vs. limit=15.0 +2024-08-29 18:44:22,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.97 vs. limit=6.0 +2024-08-29 18:44:24,272 INFO [train.py:1114] (1/4) Epoch 16, batch 2300, loss[loss=0.1881, simple_loss=0.2587, pruned_loss=0.04296, ctc_loss=0.07883, over 19515.00 frames. ], tot_loss[loss=0.212, simple_loss=0.276, pruned_loss=0.05371, ctc_loss=0.1012, over 3859699.18 frames. ], batch size: 49, lr: 9.25e-03, grad_scale: 32.0 +2024-08-29 18:45:10,437 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.413e+02 1.785e+02 2.121e+02 2.618e+02 4.213e+02, threshold=4.241e+02, percent-clipped=2.0 +2024-08-29 18:45:10,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211450.66666666666, ans=0.1 +2024-08-29 18:45:16,239 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 18:45:59,060 INFO [train.py:1114] (1/4) Epoch 16, batch 2350, loss[loss=0.2311, simple_loss=0.2954, pruned_loss=0.06194, ctc_loss=0.1077, over 19677.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2759, pruned_loss=0.05356, ctc_loss=0.101, over 3862859.07 frames. ], batch size: 63, lr: 9.24e-03, grad_scale: 32.0 +2024-08-29 18:46:17,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=211717.33333333334, ans=0.0 +2024-08-29 18:46:39,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=211824.0, ans=0.2 +2024-08-29 18:46:54,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211877.33333333334, ans=0.1 +2024-08-29 18:47:00,338 INFO [train.py:1114] (1/4) Epoch 16, batch 2400, loss[loss=0.2201, simple_loss=0.288, pruned_loss=0.05495, ctc_loss=0.1058, over 19323.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2777, pruned_loss=0.05399, ctc_loss=0.102, over 3858282.58 frames. ], batch size: 71, lr: 9.24e-03, grad_scale: 32.0 +2024-08-29 18:47:00,877 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.01 vs. limit=15.0 +2024-08-29 18:47:05,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.86 vs. limit=15.0 +2024-08-29 18:47:13,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=211984.0, ans=0.125 +2024-08-29 18:47:20,731 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.486e+02 1.800e+02 2.132e+02 2.653e+02 4.129e+02, threshold=4.264e+02, percent-clipped=0.0 +2024-08-29 18:47:23,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=212037.33333333334, ans=0.0 +2024-08-29 18:47:37,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=212090.66666666666, ans=0.125 +2024-08-29 18:47:55,321 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.81 vs. limit=10.0 +2024-08-29 18:47:56,900 INFO [train.py:1114] (1/4) Epoch 16, batch 2450, loss[loss=0.2555, simple_loss=0.2986, pruned_loss=0.0777, ctc_loss=0.1427, over 13173.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2817, pruned_loss=0.0569, ctc_loss=0.1076, over 3731705.48 frames. ], batch size: 143, lr: 9.23e-03, grad_scale: 32.0 +2024-08-29 18:48:05,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=212197.33333333334, ans=0.0 +2024-08-29 18:48:05,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=212197.33333333334, ans=0.0 +2024-08-29 18:48:10,077 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=18.52 vs. limit=15.0 +2024-08-29 18:48:34,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=212357.33333333334, ans=0.0 +2024-08-29 18:48:34,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212357.33333333334, ans=0.1 +2024-08-29 18:55:35,479 INFO [train.py:1114] (1/4) Epoch 17, batch 0, loss[loss=0.2156, simple_loss=0.2669, pruned_loss=0.05999, ctc_loss=0.1109, over 19820.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2669, pruned_loss=0.05999, ctc_loss=0.1109, over 19820.00 frames. ], batch size: 49, lr: 8.95e-03, grad_scale: 32.0 +2024-08-29 18:55:35,479 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-29 18:56:00,095 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.1579, 4.4619, 3.9628, 4.1698], device='cuda:1') +2024-08-29 18:56:00,527 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([1.9178, 2.1115, 3.4138, 3.5431], device='cuda:1') +2024-08-29 18:56:04,690 INFO [train.py:1146] (1/4) Epoch 17, validation: loss=0.1843, simple_loss=0.2733, pruned_loss=0.03544, ctc_loss=0.06098, over 944034.00 frames. +2024-08-29 18:56:04,691 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 13746MB +2024-08-29 18:56:54,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=212458.66666666666, ans=0.04949747468305833 +2024-08-29 18:56:56,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=212458.66666666666, ans=0.2 +2024-08-29 18:57:44,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=212458.66666666666, ans=6.0 +2024-08-29 18:58:13,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=212458.66666666666, ans=0.0 +2024-08-29 18:58:17,814 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.35 vs. limit=15.0 +2024-08-29 18:58:22,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=212512.0, ans=0.125 +2024-08-29 18:58:30,839 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.493e+02 1.824e+02 2.030e+02 2.233e+02 3.073e+02, threshold=4.061e+02, percent-clipped=0.0 +2024-08-29 18:58:36,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212565.33333333334, ans=0.1 +2024-08-29 18:58:39,267 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.44 vs. limit=15.0 +2024-08-29 19:05:26,872 INFO [train.py:1114] (1/4) Epoch 17, batch 50, loss[loss=0.2075, simple_loss=0.2707, pruned_loss=0.05266, ctc_loss=0.09763, over 19696.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2789, pruned_loss=0.05382, ctc_loss=0.103, over 844645.99 frames. ], batch size: 47, lr: 8.94e-03, grad_scale: 32.0 +2024-08-29 19:08:06,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=212778.66666666666, ans=0.0 +2024-08-29 19:08:10,282 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.94 vs. limit=22.5 +2024-08-29 19:08:14,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=212778.66666666666, ans=0.05 +2024-08-29 19:08:24,119 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.47 vs. limit=6.0 +2024-08-29 19:08:31,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=212832.0, ans=0.125 +2024-08-29 19:08:52,530 INFO [train.py:1114] (1/4) Epoch 17, batch 100, loss[loss=0.1961, simple_loss=0.2684, pruned_loss=0.04431, ctc_loss=0.08798, over 19711.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2809, pruned_loss=0.05483, ctc_loss=0.1037, over 1499050.01 frames. ], batch size: 51, lr: 8.94e-03, grad_scale: 32.0 +2024-08-29 19:09:13,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=212992.0, ans=0.025 +2024-08-29 19:09:16,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=213045.33333333334, ans=0.1 +2024-08-29 19:09:25,903 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.707e+02 1.910e+02 2.335e+02 3.363e+02, threshold=3.820e+02, percent-clipped=0.0 +2024-08-29 19:09:30,764 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 19:09:38,206 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.86 vs. limit=10.0 +2024-08-29 19:09:58,145 INFO [train.py:1114] (1/4) Epoch 17, batch 150, loss[loss=0.1623, simple_loss=0.229, pruned_loss=0.03399, ctc_loss=0.06913, over 19703.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2776, pruned_loss=0.05326, ctc_loss=0.1004, over 2028977.84 frames. ], batch size: 47, lr: 8.93e-03, grad_scale: 32.0 +2024-08-29 19:10:52,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=213205.33333333334, ans=0.0 +2024-08-29 19:10:52,510 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.53 vs. limit=15.0 +2024-08-29 19:12:27,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=213312.0, ans=0.125 +2024-08-29 19:16:05,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=213365.33333333334, ans=0.125 +2024-08-29 19:16:15,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=213365.33333333334, ans=0.025 +2024-08-29 19:16:24,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213418.66666666666, ans=0.1 +2024-08-29 19:16:29,778 INFO [train.py:1114] (1/4) Epoch 17, batch 200, loss[loss=0.227, simple_loss=0.289, pruned_loss=0.06031, ctc_loss=0.1113, over 18150.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2769, pruned_loss=0.05285, ctc_loss=0.09965, over 2437042.07 frames. ], batch size: 85, lr: 8.93e-03, grad_scale: 32.0 +2024-08-29 19:16:34,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=213472.0, ans=0.125 +2024-08-29 19:24:57,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=213472.0, ans=0.125 +2024-08-29 19:25:01,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=213472.0, ans=0.1 +2024-08-29 19:27:12,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=213525.33333333334, ans=0.125 +2024-08-29 19:27:23,591 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.72 vs. limit=15.0 +2024-08-29 19:27:57,278 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.724e+02 1.931e+02 2.405e+02 4.691e+02, threshold=3.862e+02, percent-clipped=4.0 +2024-08-29 19:28:20,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=213685.33333333334, ans=0.025 +2024-08-29 19:28:32,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.59 vs. limit=15.0 +2024-08-29 19:28:36,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213685.33333333334, ans=0.1 +2024-08-29 19:28:38,490 INFO [train.py:1114] (1/4) Epoch 17, batch 250, loss[loss=0.2089, simple_loss=0.2837, pruned_loss=0.04961, ctc_loss=0.08752, over 19395.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2764, pruned_loss=0.05266, ctc_loss=0.09906, over 2757247.20 frames. ], batch size: 67, lr: 8.92e-03, grad_scale: 32.0 +2024-08-29 19:29:12,343 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.04 vs. limit=12.0 +2024-08-29 19:29:46,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.74 vs. limit=22.5 +2024-08-29 19:30:02,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=214005.33333333334, ans=0.125 +2024-08-29 19:30:03,450 INFO [train.py:1114] (1/4) Epoch 17, batch 300, loss[loss=0.2456, simple_loss=0.3028, pruned_loss=0.06936, ctc_loss=0.124, over 19510.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2754, pruned_loss=0.05233, ctc_loss=0.09848, over 3001398.30 frames. ], batch size: 61, lr: 8.92e-03, grad_scale: 32.0 +2024-08-29 19:31:58,268 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.01 vs. limit=15.0 +2024-08-29 19:32:02,219 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.689e+02 1.972e+02 2.447e+02 4.331e+02, threshold=3.945e+02, percent-clipped=1.0 +2024-08-29 19:32:17,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=214165.33333333334, ans=0.125 +2024-08-29 19:32:41,676 INFO [train.py:1114] (1/4) Epoch 17, batch 350, loss[loss=0.2143, simple_loss=0.2706, pruned_loss=0.05697, ctc_loss=0.1099, over 19756.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2765, pruned_loss=0.05266, ctc_loss=0.09935, over 3191743.97 frames. ], batch size: 48, lr: 8.91e-03, grad_scale: 32.0 +2024-08-29 19:32:43,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214272.0, ans=0.1 +2024-08-29 19:33:04,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=214325.33333333334, ans=0.125 +2024-08-29 19:33:45,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=214432.0, ans=0.0 +2024-08-29 19:33:54,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=214432.0, ans=0.125 +2024-08-29 19:34:03,456 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.82 vs. limit=15.0 +2024-08-29 19:34:08,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=214485.33333333334, ans=0.0 +2024-08-29 19:34:18,289 INFO [train.py:1114] (1/4) Epoch 17, batch 400, loss[loss=0.1982, simple_loss=0.2759, pruned_loss=0.04333, ctc_loss=0.08452, over 19487.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2755, pruned_loss=0.05199, ctc_loss=0.09798, over 3343628.60 frames. ], batch size: 54, lr: 8.91e-03, grad_scale: 32.0 +2024-08-29 19:34:39,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=214538.66666666666, ans=0.125 +2024-08-29 19:34:50,726 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.16 vs. limit=22.5 +2024-08-29 19:35:28,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=214592.0, ans=0.125 +2024-08-29 19:35:29,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214592.0, ans=0.1 +2024-08-29 19:35:33,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214645.33333333334, ans=0.1 +2024-08-29 19:35:37,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=214645.33333333334, ans=0.125 +2024-08-29 19:36:30,687 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.299e+02 1.665e+02 1.964e+02 2.553e+02 4.238e+02, threshold=3.929e+02, percent-clipped=2.0 +2024-08-29 19:37:49,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=214752.0, ans=0.125 +2024-08-29 19:37:57,091 INFO [train.py:1114] (1/4) Epoch 17, batch 450, loss[loss=0.205, simple_loss=0.2852, pruned_loss=0.04567, ctc_loss=0.08386, over 19605.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2754, pruned_loss=0.05196, ctc_loss=0.0978, over 3450831.97 frames. ], batch size: 55, lr: 8.90e-03, grad_scale: 32.0 +2024-08-29 19:38:15,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=214805.33333333334, ans=0.125 +2024-08-29 19:38:32,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=214858.66666666666, ans=0.125 +2024-08-29 19:38:42,641 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 19:38:58,782 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.23 vs. limit=15.0 +2024-08-29 19:39:30,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.49 vs. limit=15.0 +2024-08-29 19:40:14,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=215018.66666666666, ans=0.02 +2024-08-29 19:40:26,570 INFO [train.py:1114] (1/4) Epoch 17, batch 500, loss[loss=0.2001, simple_loss=0.2795, pruned_loss=0.04451, ctc_loss=0.07937, over 19661.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2744, pruned_loss=0.05156, ctc_loss=0.09707, over 3546821.36 frames. ], batch size: 63, lr: 8.90e-03, grad_scale: 32.0 +2024-08-29 19:41:54,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=215178.66666666666, ans=0.0 +2024-08-29 19:42:38,132 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.765e+02 1.983e+02 2.603e+02 4.687e+02, threshold=3.966e+02, percent-clipped=3.0 +2024-08-29 19:43:10,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=215232.0, ans=0.125 +2024-08-29 19:43:32,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=215232.0, ans=0.125 +2024-08-29 19:43:33,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=215285.33333333334, ans=0.025 +2024-08-29 19:43:45,810 INFO [train.py:1114] (1/4) Epoch 17, batch 550, loss[loss=0.2209, simple_loss=0.2842, pruned_loss=0.05764, ctc_loss=0.1058, over 19299.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2742, pruned_loss=0.05173, ctc_loss=0.09741, over 3608064.98 frames. ], batch size: 71, lr: 8.89e-03, grad_scale: 32.0 +2024-08-29 19:44:10,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=215338.66666666666, ans=0.0 +2024-08-29 19:45:00,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=215392.0, ans=0.125 +2024-08-29 19:46:07,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=215552.0, ans=0.2 +2024-08-29 19:46:11,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=215552.0, ans=15.0 +2024-08-29 19:46:12,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=215552.0, ans=0.125 +2024-08-29 19:47:01,419 INFO [train.py:1114] (1/4) Epoch 17, batch 600, loss[loss=0.2426, simple_loss=0.3044, pruned_loss=0.06544, ctc_loss=0.1247, over 19430.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2744, pruned_loss=0.0517, ctc_loss=0.09714, over 3665994.82 frames. ], batch size: 67, lr: 8.88e-03, grad_scale: 64.0 +2024-08-29 19:47:01,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=215605.33333333334, ans=0.125 +2024-08-29 19:47:06,214 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 19:47:41,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=215658.66666666666, ans=0.0 +2024-08-29 19:47:49,286 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.94 vs. limit=15.0 +2024-08-29 19:48:19,060 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.690e+02 1.951e+02 2.307e+02 4.172e+02, threshold=3.901e+02, percent-clipped=2.0 +2024-08-29 19:48:32,501 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.56 vs. limit=22.5 +2024-08-29 19:49:04,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=215818.66666666666, ans=0.125 +2024-08-29 19:49:05,827 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.80 vs. limit=15.0 +2024-08-29 19:49:17,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=215818.66666666666, ans=0.125 +2024-08-29 19:49:21,622 INFO [train.py:1114] (1/4) Epoch 17, batch 650, loss[loss=0.2027, simple_loss=0.2669, pruned_loss=0.0497, ctc_loss=0.0977, over 19764.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2737, pruned_loss=0.05148, ctc_loss=0.09689, over 3716311.99 frames. ], batch size: 54, lr: 8.88e-03, grad_scale: 64.0 +2024-08-29 19:49:27,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=215872.0, ans=0.0 +2024-08-29 19:49:33,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=215872.0, ans=0.2 +2024-08-29 19:49:39,932 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.78 vs. limit=22.5 +2024-08-29 19:50:19,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=215978.66666666666, ans=0.0 +2024-08-29 19:51:32,001 INFO [train.py:1114] (1/4) Epoch 17, batch 700, loss[loss=0.1928, simple_loss=0.2621, pruned_loss=0.04503, ctc_loss=0.08341, over 19726.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2745, pruned_loss=0.0517, ctc_loss=0.09731, over 3749338.22 frames. ], batch size: 51, lr: 8.87e-03, grad_scale: 64.0 +2024-08-29 19:51:32,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=216138.66666666666, ans=0.125 +2024-08-29 19:52:43,598 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.757e+02 1.978e+02 2.439e+02 3.670e+02, threshold=3.956e+02, percent-clipped=0.0 +2024-08-29 19:53:36,104 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.77 vs. limit=5.0 +2024-08-29 19:53:42,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=216352.0, ans=0.05 +2024-08-29 19:53:46,884 INFO [train.py:1114] (1/4) Epoch 17, batch 750, loss[loss=0.2077, simple_loss=0.2808, pruned_loss=0.04834, ctc_loss=0.09486, over 19479.00 frames. ], tot_loss[loss=0.208, simple_loss=0.274, pruned_loss=0.05158, ctc_loss=0.09707, over 3775726.61 frames. ], batch size: 54, lr: 8.87e-03, grad_scale: 64.0 +2024-08-29 19:54:29,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=216405.33333333334, ans=0.125 +2024-08-29 19:55:25,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=216458.66666666666, ans=0.125 +2024-08-29 19:55:48,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=216458.66666666666, ans=0.125 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-13-08-38-2 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-13-08-38-2 new file mode 100644 index 0000000000000000000000000000000000000000..1d4982eea27f10654d1ea4c2a7ae5e4e367da32d --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-13-08-38-2 @@ -0,0 +1,1191 @@ +2024-08-29 13:08:38,314 INFO [train.py:1182] (2/4) Training started +2024-08-29 13:08:41,303 INFO [train.py:1192] (2/4) Device: cuda:2 +2024-08-29 13:08:41,306 INFO [train.py:1210] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 14, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 13:08:41,307 INFO [train.py:1212] (2/4) About to create model +2024-08-29 13:08:41,985 INFO [train.py:1216] (2/4) Number of model parameters: 66367431 +2024-08-29 13:08:41,985 INFO [checkpoint.py:112] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-13.pt +2024-08-29 13:08:59,021 INFO [train.py:1231] (2/4) Using DDP +2024-08-29 13:09:40,406 INFO [train.py:1243] (2/4) Loading optimizer state dict +2024-08-29 13:09:40,610 INFO [train.py:1251] (2/4) Loading scheduler state dict +2024-08-29 13:09:40,611 INFO [asr_datamodule.py:894] (2/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 13:09:40,690 INFO [asr_datamodule.py:696] (2/4) Disable MUSAN +2024-08-29 13:09:40,690 INFO [asr_datamodule.py:714] (2/4) Enable SpecAugment +2024-08-29 13:09:40,690 INFO [asr_datamodule.py:715] (2/4) Time warp factor: 80 +2024-08-29 13:09:40,691 INFO [asr_datamodule.py:725] (2/4) Num frame mask: 10 +2024-08-29 13:09:40,691 INFO [asr_datamodule.py:738] (2/4) About to create train dataset +2024-08-29 13:09:40,691 INFO [asr_datamodule.py:765] (2/4) Using DynamicBucketingSampler. +2024-08-29 13:09:42,276 INFO [asr_datamodule.py:782] (2/4) About to create train dataloader +2024-08-29 13:09:42,276 INFO [asr_datamodule.py:911] (2/4) About to get dev-clean cuts +2024-08-29 13:09:42,374 INFO [asr_datamodule.py:918] (2/4) About to get dev-other cuts +2024-08-29 13:09:42,446 INFO [asr_datamodule.py:814] (2/4) About to create dev dataset +2024-08-29 13:09:42,771 INFO [asr_datamodule.py:831] (2/4) About to create dev dataloader +2024-08-29 13:09:42,771 INFO [train.py:1435] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 13:14:18,572 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12782MB +2024-08-29 13:14:21,282 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12849MB +2024-08-29 13:14:38,616 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12967MB +2024-08-29 13:14:39,780 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=12.51 vs. limit=7.5 +2024-08-29 13:14:45,668 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12967MB +2024-08-29 13:15:10,810 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12967MB +2024-08-29 13:15:12,351 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12967MB +2024-08-29 13:15:12,373 INFO [train.py:1344] (2/4) Loading grad scaler state dict +2024-08-29 13:16:15,184 INFO [train.py:1114] (2/4) Epoch 14, batch 0, loss[loss=0.2015, simple_loss=0.256, pruned_loss=0.05389, ctc_loss=0.09807, over 19789.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.256, pruned_loss=0.05389, ctc_loss=0.09807, over 19789.00 frames. ], batch size: 49, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:16:15,185 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-29 13:16:31,404 INFO [train.py:1146] (2/4) Epoch 14, validation: loss=0.1913, simple_loss=0.2789, pruned_loss=0.03846, ctc_loss=0.06724, over 944034.00 frames. +2024-08-29 13:16:31,405 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12967MB +2024-08-29 13:21:54,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=172634.66666666666, ans=0.125 +2024-08-29 13:24:29,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=172688.0, ans=0.0 +2024-08-29 13:24:55,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=172688.0, ans=0.0 +2024-08-29 13:25:31,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=172741.33333333334, ans=0.0 +2024-08-29 13:26:36,385 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.07 vs. limit=15.0 +2024-08-29 13:26:36,728 INFO [train.py:1114] (2/4) Epoch 14, batch 50, loss[loss=0.1944, simple_loss=0.262, pruned_loss=0.04625, ctc_loss=0.08578, over 19751.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.288, pruned_loss=0.06104, ctc_loss=0.1157, over 845084.53 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:27:33,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=172901.33333333334, ans=0.125 +2024-08-29 13:30:41,138 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.85 vs. limit=15.0 +2024-08-29 13:30:52,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=172954.66666666666, ans=0.125 +2024-08-29 13:30:52,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=172954.66666666666, ans=0.125 +2024-08-29 13:32:29,769 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.410e+02 1.749e+02 1.974e+02 2.504e+02 4.970e+02, threshold=3.948e+02, percent-clipped=4.0 +2024-08-29 13:32:58,225 INFO [train.py:1114] (2/4) Epoch 14, batch 100, loss[loss=0.225, simple_loss=0.2822, pruned_loss=0.06166, ctc_loss=0.1112, over 19711.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.2892, pruned_loss=0.06157, ctc_loss=0.1164, over 1499046.71 frames. ], batch size: 51, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:34:18,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=173221.33333333334, ans=0.0 +2024-08-29 13:34:47,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=173274.66666666666, ans=0.0 +2024-08-29 13:36:02,976 INFO [train.py:1114] (2/4) Epoch 14, batch 150, loss[loss=0.1864, simple_loss=0.2532, pruned_loss=0.04365, ctc_loss=0.08054, over 19692.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2848, pruned_loss=0.05882, ctc_loss=0.1109, over 2028604.36 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:36:03,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=173381.33333333334, ans=0.0 +2024-08-29 13:36:27,316 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.62 vs. limit=15.0 +2024-08-29 13:37:19,618 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.730e+02 2.035e+02 2.422e+02 3.683e+02, threshold=4.071e+02, percent-clipped=0.0 +2024-08-29 13:37:21,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=173594.66666666666, ans=0.0 +2024-08-29 13:37:29,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=173648.0, ans=0.025 +2024-08-29 13:37:30,507 INFO [train.py:1114] (2/4) Epoch 14, batch 200, loss[loss=0.2416, simple_loss=0.2965, pruned_loss=0.06757, ctc_loss=0.1291, over 18454.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2838, pruned_loss=0.05848, ctc_loss=0.1103, over 2436843.10 frames. ], batch size: 85, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:37:42,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=173648.0, ans=0.125 +2024-08-29 13:37:43,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=173701.33333333334, ans=0.0 +2024-08-29 13:38:14,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=173701.33333333334, ans=0.2 +2024-08-29 13:38:46,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=173701.33333333334, ans=0.0 +2024-08-29 13:39:53,314 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.55 vs. limit=10.0 +2024-08-29 13:40:12,291 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.21 vs. limit=15.0 +2024-08-29 13:40:51,996 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.46 vs. limit=10.0 +2024-08-29 13:42:18,839 INFO [train.py:1114] (2/4) Epoch 14, batch 250, loss[loss=0.2443, simple_loss=0.307, pruned_loss=0.06637, ctc_loss=0.1224, over 19384.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.284, pruned_loss=0.05871, ctc_loss=0.1107, over 2757067.39 frames. ], batch size: 67, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:43:11,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=173914.66666666666, ans=0.0 +2024-08-29 13:43:44,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173968.0, ans=0.1 +2024-08-29 13:43:56,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=174021.33333333334, ans=0.0 +2024-08-29 13:44:13,476 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.413e+02 1.787e+02 2.022e+02 2.717e+02 4.953e+02, threshold=4.043e+02, percent-clipped=2.0 +2024-08-29 13:44:51,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=174181.33333333334, ans=0.2 +2024-08-29 13:44:52,061 INFO [train.py:1114] (2/4) Epoch 14, batch 300, loss[loss=0.2384, simple_loss=0.3008, pruned_loss=0.06413, ctc_loss=0.1195, over 19542.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2835, pruned_loss=0.05826, ctc_loss=0.1096, over 3001239.55 frames. ], batch size: 61, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:45:02,470 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.39 vs. limit=12.0 +2024-08-29 13:46:17,858 INFO [train.py:1114] (2/4) Epoch 14, batch 350, loss[loss=0.1895, simple_loss=0.2577, pruned_loss=0.04455, ctc_loss=0.08061, over 19738.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2839, pruned_loss=0.05875, ctc_loss=0.1104, over 3191462.47 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 16.0 +2024-08-29 13:47:23,127 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.46 vs. limit=10.0 +2024-08-29 13:47:31,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=174608.0, ans=0.2 +2024-08-29 13:47:32,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=174608.0, ans=0.125 +2024-08-29 13:47:33,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=174608.0, ans=0.025 +2024-08-29 13:47:39,415 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.793e+02 2.058e+02 2.658e+02 4.429e+02, threshold=4.116e+02, percent-clipped=3.0 +2024-08-29 13:48:31,289 INFO [train.py:1114] (2/4) Epoch 14, batch 400, loss[loss=0.2226, simple_loss=0.2834, pruned_loss=0.05753, ctc_loss=0.1169, over 19482.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2826, pruned_loss=0.05798, ctc_loss=0.109, over 3343716.94 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:48:48,941 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.27 vs. limit=22.5 +2024-08-29 13:48:49,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=174768.0, ans=10.0 +2024-08-29 13:50:10,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=174821.33333333334, ans=0.0 +2024-08-29 13:50:50,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174928.0, ans=0.1 +2024-08-29 13:50:50,939 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.16 vs. limit=15.0 +2024-08-29 13:50:55,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=174928.0, ans=0.025 +2024-08-29 13:50:57,558 INFO [train.py:1114] (2/4) Epoch 14, batch 450, loss[loss=0.2201, simple_loss=0.2812, pruned_loss=0.05784, ctc_loss=0.1085, over 19609.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2828, pruned_loss=0.05797, ctc_loss=0.1091, over 3449888.47 frames. ], batch size: 55, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:51:26,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=175088.0, ans=0.125 +2024-08-29 13:51:34,298 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.23 vs. limit=15.0 +2024-08-29 13:51:35,595 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.47 vs. limit=22.5 +2024-08-29 13:51:39,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=175141.33333333334, ans=10.0 +2024-08-29 13:51:46,108 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.20 vs. limit=15.0 +2024-08-29 13:51:50,564 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.713e+02 1.900e+02 2.415e+02 4.159e+02, threshold=3.800e+02, percent-clipped=2.0 +2024-08-29 13:52:14,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=175194.66666666666, ans=0.0 +2024-08-29 13:52:16,283 INFO [train.py:1114] (2/4) Epoch 14, batch 500, loss[loss=0.2506, simple_loss=0.3101, pruned_loss=0.06955, ctc_loss=0.1299, over 19681.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2822, pruned_loss=0.0579, ctc_loss=0.1091, over 3545845.53 frames. ], batch size: 63, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:52:45,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.02 vs. limit=15.0 +2024-08-29 13:52:46,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=175301.33333333334, ans=0.2 +2024-08-29 13:52:48,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=175354.66666666666, ans=0.0 +2024-08-29 13:52:51,298 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.16 vs. limit=12.0 +2024-08-29 13:52:59,642 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.29 vs. limit=10.0 +2024-08-29 13:53:06,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=175408.0, ans=0.125 +2024-08-29 13:53:12,488 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 13:53:14,958 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.48 vs. limit=15.0 +2024-08-29 13:53:16,439 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.78 vs. limit=15.0 +2024-08-29 13:53:22,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=175514.66666666666, ans=0.125 +2024-08-29 13:53:23,921 INFO [train.py:1114] (2/4) Epoch 14, batch 550, loss[loss=0.2655, simple_loss=0.3084, pruned_loss=0.08149, ctc_loss=0.1492, over 19311.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2822, pruned_loss=0.05798, ctc_loss=0.1093, over 3607179.29 frames. ], batch size: 71, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:53:34,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=175514.66666666666, ans=0.0 +2024-08-29 13:53:34,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=175514.66666666666, ans=0.05 +2024-08-29 13:53:34,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=175514.66666666666, ans=0.05 +2024-08-29 13:54:00,891 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.22 vs. limit=22.5 +2024-08-29 13:54:07,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=175674.66666666666, ans=0.07 +2024-08-29 13:54:09,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=175674.66666666666, ans=0.2 +2024-08-29 13:54:18,078 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.404e+02 1.725e+02 1.963e+02 2.348e+02 4.063e+02, threshold=3.927e+02, percent-clipped=2.0 +2024-08-29 13:54:28,212 INFO [train.py:1114] (2/4) Epoch 14, batch 600, loss[loss=0.2636, simple_loss=0.3204, pruned_loss=0.07603, ctc_loss=0.137, over 19341.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2821, pruned_loss=0.05777, ctc_loss=0.1088, over 3663666.03 frames. ], batch size: 67, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:54:49,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=175834.66666666666, ans=0.125 +2024-08-29 13:54:50,519 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.34 vs. limit=6.0 +2024-08-29 13:55:09,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175941.33333333334, ans=0.1 +2024-08-29 13:55:09,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=175941.33333333334, ans=0.125 +2024-08-29 13:55:22,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=175994.66666666666, ans=0.2 +2024-08-29 13:55:30,806 INFO [train.py:1114] (2/4) Epoch 14, batch 650, loss[loss=0.1931, simple_loss=0.2602, pruned_loss=0.04665, ctc_loss=0.08168, over 19778.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2814, pruned_loss=0.05735, ctc_loss=0.1079, over 3714475.89 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:55:35,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=176048.0, ans=0.5 +2024-08-29 13:55:45,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=176101.33333333334, ans=0.0 +2024-08-29 13:55:49,334 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.29 vs. limit=15.0 +2024-08-29 13:55:59,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176154.66666666666, ans=0.1 +2024-08-29 13:56:19,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=176208.0, ans=0.1 +2024-08-29 13:56:24,635 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.743e+02 2.058e+02 2.560e+02 4.338e+02, threshold=4.116e+02, percent-clipped=4.0 +2024-08-29 13:56:30,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=176261.33333333334, ans=0.125 +2024-08-29 13:56:34,654 INFO [train.py:1114] (2/4) Epoch 14, batch 700, loss[loss=0.2009, simple_loss=0.2719, pruned_loss=0.04738, ctc_loss=0.08777, over 19746.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.281, pruned_loss=0.05665, ctc_loss=0.1067, over 3746176.45 frames. ], batch size: 51, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:56:52,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=176368.0, ans=0.0 +2024-08-29 13:56:53,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176368.0, ans=0.1 +2024-08-29 13:57:37,034 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.09 vs. limit=12.0 +2024-08-29 13:57:48,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=176474.66666666666, ans=0.2 +2024-08-29 13:57:51,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.84 vs. limit=10.0 +2024-08-29 13:58:00,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=176528.0, ans=0.025 +2024-08-29 13:58:02,121 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.37 vs. limit=6.0 +2024-08-29 13:58:05,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=176528.0, ans=0.125 +2024-08-29 13:58:06,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=176528.0, ans=0.125 +2024-08-29 13:58:12,842 INFO [train.py:1114] (2/4) Epoch 14, batch 750, loss[loss=0.2264, simple_loss=0.2916, pruned_loss=0.05847, ctc_loss=0.1104, over 19499.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2803, pruned_loss=0.05598, ctc_loss=0.1054, over 3773023.39 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:58:16,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=176581.33333333334, ans=0.0 +2024-08-29 13:58:24,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=176634.66666666666, ans=0.0 +2024-08-29 13:58:24,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=176634.66666666666, ans=0.0 +2024-08-29 13:58:26,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=176634.66666666666, ans=0.2 +2024-08-29 13:58:38,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=176688.0, ans=0.035 +2024-08-29 13:58:59,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=176741.33333333334, ans=0.2 +2024-08-29 13:59:04,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=176794.66666666666, ans=0.125 +2024-08-29 13:59:06,499 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.910e+02 2.277e+02 2.884e+02 4.780e+02, threshold=4.554e+02, percent-clipped=3.0 +2024-08-29 13:59:23,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=176794.66666666666, ans=0.025 +2024-08-29 13:59:25,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=176794.66666666666, ans=0.0 +2024-08-29 13:59:28,726 INFO [train.py:1114] (2/4) Epoch 14, batch 800, loss[loss=0.1979, simple_loss=0.2629, pruned_loss=0.0481, ctc_loss=0.09169, over 19393.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2811, pruned_loss=0.0564, ctc_loss=0.1064, over 3795191.57 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:59:34,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=176848.0, ans=10.0 +2024-08-29 13:59:38,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=176848.0, ans=0.025 +2024-08-29 13:59:44,748 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.34 vs. limit=15.0 +2024-08-29 13:59:45,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=176901.33333333334, ans=0.125 +2024-08-29 13:59:53,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=176901.33333333334, ans=0.025 +2024-08-29 14:01:19,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=177008.0, ans=0.0 +2024-08-29 14:01:22,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=177008.0, ans=0.125 +2024-08-29 14:02:30,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=177008.0, ans=0.0 +2024-08-29 14:02:49,613 INFO [train.py:1114] (2/4) Epoch 14, batch 850, loss[loss=0.2079, simple_loss=0.2803, pruned_loss=0.049, ctc_loss=0.09358, over 19643.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2809, pruned_loss=0.05649, ctc_loss=0.1065, over 3814842.26 frames. ], batch size: 59, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 14:02:52,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=177114.66666666666, ans=0.07 +2024-08-29 14:03:10,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=177168.0, ans=0.0 +2024-08-29 14:03:21,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177221.33333333334, ans=0.1 +2024-08-29 14:03:22,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=177221.33333333334, ans=0.1 +2024-08-29 14:03:40,314 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.405e+02 1.703e+02 1.970e+02 2.385e+02 3.831e+02, threshold=3.939e+02, percent-clipped=0.0 +2024-08-29 14:03:49,900 INFO [train.py:1114] (2/4) Epoch 14, batch 900, loss[loss=0.2043, simple_loss=0.2573, pruned_loss=0.05479, ctc_loss=0.1042, over 19417.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2811, pruned_loss=0.05687, ctc_loss=0.1072, over 3817936.02 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 14:04:00,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=177434.66666666666, ans=0.0 +2024-08-29 14:04:05,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=177434.66666666666, ans=0.125 +2024-08-29 14:04:16,889 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.65 vs. limit=15.0 +2024-08-29 14:04:22,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=177488.0, ans=0.2 +2024-08-29 14:04:34,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=177541.33333333334, ans=0.0 +2024-08-29 14:04:34,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=177541.33333333334, ans=0.125 +2024-08-29 14:04:35,643 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=177541.33333333334, ans=0.125 +2024-08-29 14:04:44,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=177594.66666666666, ans=0.2 +2024-08-29 14:04:52,318 INFO [train.py:1114] (2/4) Epoch 14, batch 950, loss[loss=0.1934, simple_loss=0.2577, pruned_loss=0.04707, ctc_loss=0.08743, over 19488.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2815, pruned_loss=0.05718, ctc_loss=0.1078, over 3819119.82 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 14:04:53,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=177648.0, ans=0.2 +2024-08-29 14:05:03,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=177701.33333333334, ans=0.0 +2024-08-29 14:05:12,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177701.33333333334, ans=0.1 +2024-08-29 14:05:22,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177754.66666666666, ans=0.1 +2024-08-29 14:05:26,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=177754.66666666666, ans=0.125 +2024-08-29 14:05:26,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=177754.66666666666, ans=0.025 +2024-08-29 14:05:30,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=177754.66666666666, ans=0.125 +2024-08-29 14:06:19,921 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.335e+02 1.740e+02 1.996e+02 2.581e+02 3.979e+02, threshold=3.992e+02, percent-clipped=2.0 +2024-08-29 14:06:21,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=177861.33333333334, ans=15.0 +2024-08-29 14:06:24,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=177861.33333333334, ans=0.125 +2024-08-29 14:07:04,899 INFO [train.py:1114] (2/4) Epoch 14, batch 1000, loss[loss=0.2194, simple_loss=0.2774, pruned_loss=0.05777, ctc_loss=0.1146, over 19852.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2826, pruned_loss=0.05785, ctc_loss=0.109, over 3815772.92 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:08:06,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=177968.0, ans=0.1 +2024-08-29 14:08:25,717 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:08:30,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=178074.66666666666, ans=0.125 +2024-08-29 14:08:37,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=178074.66666666666, ans=0.125 +2024-08-29 14:08:42,042 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.67 vs. limit=15.0 +2024-08-29 14:08:56,383 INFO [train.py:1114] (2/4) Epoch 14, batch 1050, loss[loss=0.2395, simple_loss=0.3022, pruned_loss=0.06401, ctc_loss=0.1217, over 19852.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2819, pruned_loss=0.05766, ctc_loss=0.1084, over 3822703.55 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:08:59,599 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.13 vs. limit=15.0 +2024-08-29 14:09:20,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=178288.0, ans=0.125 +2024-08-29 14:09:21,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=178288.0, ans=0.0 +2024-08-29 14:09:36,356 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.74 vs. limit=12.0 +2024-08-29 14:09:46,651 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.811e+02 2.215e+02 2.668e+02 4.320e+02, threshold=4.429e+02, percent-clipped=1.0 +2024-08-29 14:10:20,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=178394.66666666666, ans=0.125 +2024-08-29 14:10:24,267 INFO [train.py:1114] (2/4) Epoch 14, batch 1100, loss[loss=0.1842, simple_loss=0.256, pruned_loss=0.03997, ctc_loss=0.08112, over 19588.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.281, pruned_loss=0.05678, ctc_loss=0.1069, over 3830526.86 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:13:47,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.14 vs. limit=12.0 +2024-08-29 14:14:51,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=178608.0, ans=0.09899494936611666 +2024-08-29 14:15:44,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=178608.0, ans=0.1 +2024-08-29 14:19:15,472 INFO [train.py:1114] (2/4) Epoch 14, batch 1150, loss[loss=0.2052, simple_loss=0.2738, pruned_loss=0.04951, ctc_loss=0.09424, over 19586.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2813, pruned_loss=0.05705, ctc_loss=0.1073, over 3831181.97 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:20:19,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=178821.33333333334, ans=0.035 +2024-08-29 14:20:32,902 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.43 vs. limit=10.0 +2024-08-29 14:21:27,807 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:22:13,324 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.342e+02 1.701e+02 1.876e+02 2.352e+02 3.362e+02, threshold=3.753e+02, percent-clipped=0.0 +2024-08-29 14:22:14,839 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.04 vs. limit=6.0 +2024-08-29 14:22:33,812 INFO [train.py:1114] (2/4) Epoch 14, batch 1200, loss[loss=0.2178, simple_loss=0.2867, pruned_loss=0.05444, ctc_loss=0.09984, over 19837.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2826, pruned_loss=0.05779, ctc_loss=0.1087, over 3826923.42 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:22:47,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=178981.33333333334, ans=0.04949747468305833 +2024-08-29 14:23:24,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=178981.33333333334, ans=0.125 +2024-08-29 14:23:48,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=179034.66666666666, ans=0.2 +2024-08-29 14:24:07,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=179034.66666666666, ans=0.125 +2024-08-29 14:25:21,664 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.66 vs. limit=15.0 +2024-08-29 14:29:53,984 INFO [train.py:1114] (2/4) Epoch 14, batch 1250, loss[loss=0.2319, simple_loss=0.2986, pruned_loss=0.06079, ctc_loss=0.1088, over 19523.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2834, pruned_loss=0.05817, ctc_loss=0.1091, over 3844708.95 frames. ], batch size: 61, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:32:20,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=179354.66666666666, ans=0.125 +2024-08-29 14:32:41,060 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.327e+02 1.718e+02 2.120e+02 2.679e+02 4.271e+02, threshold=4.240e+02, percent-clipped=3.0 +2024-08-29 14:32:45,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=179461.33333333334, ans=10.0 +2024-08-29 14:32:52,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=179514.66666666666, ans=0.125 +2024-08-29 14:33:10,209 INFO [train.py:1114] (2/4) Epoch 14, batch 1300, loss[loss=0.2749, simple_loss=0.3228, pruned_loss=0.08324, ctc_loss=0.1513, over 18859.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2826, pruned_loss=0.0579, ctc_loss=0.1086, over 3848027.07 frames. ], batch size: 76, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:33:54,989 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:34:06,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=179568.0, ans=0.2 +2024-08-29 14:35:05,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=179674.66666666666, ans=0.2 +2024-08-29 14:35:09,058 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.23 vs. limit=15.0 +2024-08-29 14:35:12,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=179728.0, ans=0.5 +2024-08-29 14:35:31,101 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.30 vs. limit=15.0 +2024-08-29 14:35:39,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=179728.0, ans=0.0 +2024-08-29 14:35:40,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=179728.0, ans=0.0 +2024-08-29 14:35:42,425 INFO [train.py:1114] (2/4) Epoch 14, batch 1350, loss[loss=0.2239, simple_loss=0.2803, pruned_loss=0.06077, ctc_loss=0.115, over 19790.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2818, pruned_loss=0.05749, ctc_loss=0.1078, over 3857906.85 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:36:05,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=179834.66666666666, ans=0.125 +2024-08-29 14:36:11,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=179834.66666666666, ans=0.0 +2024-08-29 14:36:26,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=179888.0, ans=0.0 +2024-08-29 14:40:29,507 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.654e+02 1.881e+02 2.431e+02 4.376e+02, threshold=3.761e+02, percent-clipped=1.0 +2024-08-29 14:41:36,141 INFO [train.py:1114] (2/4) Epoch 14, batch 1400, loss[loss=0.2007, simple_loss=0.2613, pruned_loss=0.05072, ctc_loss=0.0969, over 19660.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2814, pruned_loss=0.0575, ctc_loss=0.1077, over 3865083.37 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:41:54,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=180101.33333333334, ans=0.0 +2024-08-29 14:42:23,018 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.93 vs. limit=22.5 +2024-08-29 14:42:39,827 INFO [train.py:1114] (2/4) Epoch 14, batch 1450, loss[loss=0.2624, simple_loss=0.3146, pruned_loss=0.07781, ctc_loss=0.1366, over 19663.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2827, pruned_loss=0.05808, ctc_loss=0.1088, over 3861354.24 frames. ], batch size: 63, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:42:40,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=180314.66666666666, ans=0.0 +2024-08-29 14:42:41,653 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.94 vs. limit=12.0 +2024-08-29 14:42:47,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=180314.66666666666, ans=0.0 +2024-08-29 14:42:51,992 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.22 vs. limit=6.0 +2024-08-29 14:43:07,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=180421.33333333334, ans=0.0 +2024-08-29 14:43:09,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=180421.33333333334, ans=0.0 +2024-08-29 14:44:06,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180474.66666666666, ans=0.1 +2024-08-29 14:44:07,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=180474.66666666666, ans=0.125 +2024-08-29 14:44:08,922 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.22 vs. limit=15.0 +2024-08-29 14:44:17,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=180528.0, ans=0.125 +2024-08-29 14:44:19,812 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.272e+02 1.699e+02 1.929e+02 2.254e+02 4.469e+02, threshold=3.859e+02, percent-clipped=1.0 +2024-08-29 14:45:06,807 INFO [train.py:1114] (2/4) Epoch 14, batch 1500, loss[loss=0.2304, simple_loss=0.2932, pruned_loss=0.06126, ctc_loss=0.1125, over 19592.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.283, pruned_loss=0.05786, ctc_loss=0.1086, over 3861121.37 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:45:08,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=180581.33333333334, ans=0.125 +2024-08-29 14:45:11,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180581.33333333334, ans=0.1 +2024-08-29 14:45:23,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=180634.66666666666, ans=10.0 +2024-08-29 14:45:43,286 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:45:57,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=180688.0, ans=0.0 +2024-08-29 14:46:21,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=180794.66666666666, ans=0.0 +2024-08-29 14:46:27,494 INFO [train.py:1114] (2/4) Epoch 14, batch 1550, loss[loss=0.2407, simple_loss=0.3035, pruned_loss=0.06454, ctc_loss=0.1219, over 19599.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2827, pruned_loss=0.0576, ctc_loss=0.1082, over 3844900.86 frames. ], batch size: 60, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:46:27,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=180848.0, ans=0.125 +2024-08-29 14:46:37,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=180848.0, ans=0.0 +2024-08-29 14:46:45,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=180901.33333333334, ans=0.1 +2024-08-29 14:46:48,241 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.86 vs. limit=12.0 +2024-08-29 14:46:57,077 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.96 vs. limit=6.0 +2024-08-29 14:48:37,417 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.702e+02 2.011e+02 2.397e+02 3.479e+02, threshold=4.023e+02, percent-clipped=0.0 +2024-08-29 14:48:47,139 INFO [train.py:1114] (2/4) Epoch 14, batch 1600, loss[loss=0.2142, simple_loss=0.291, pruned_loss=0.05048, ctc_loss=0.09136, over 19846.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2828, pruned_loss=0.05796, ctc_loss=0.1086, over 3833861.75 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:48:48,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=181114.66666666666, ans=0.125 +2024-08-29 14:49:01,407 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.99 vs. limit=6.0 +2024-08-29 14:49:04,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=181168.0, ans=0.125 +2024-08-29 14:49:50,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181221.33333333334, ans=0.1 +2024-08-29 14:51:00,295 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.62 vs. limit=22.5 +2024-08-29 14:51:08,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=181274.66666666666, ans=0.0 +2024-08-29 14:51:29,813 INFO [train.py:1114] (2/4) Epoch 14, batch 1650, loss[loss=0.2336, simple_loss=0.3, pruned_loss=0.06046, ctc_loss=0.1158, over 19648.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2827, pruned_loss=0.05808, ctc_loss=0.1089, over 3830692.64 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:51:33,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=181381.33333333334, ans=0.0 +2024-08-29 14:51:44,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181434.66666666666, ans=0.1 +2024-08-29 14:51:46,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=181434.66666666666, ans=0.125 +2024-08-29 14:51:54,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=181434.66666666666, ans=0.125 +2024-08-29 14:51:59,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=181488.0, ans=0.025 +2024-08-29 14:52:11,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=181488.0, ans=0.0 +2024-08-29 14:52:27,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=181594.66666666666, ans=0.125 +2024-08-29 14:52:28,555 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.415e+02 1.808e+02 2.247e+02 2.720e+02 5.029e+02, threshold=4.494e+02, percent-clipped=3.0 +2024-08-29 14:52:31,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=181594.66666666666, ans=0.1 +2024-08-29 14:52:32,435 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:52:33,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=181594.66666666666, ans=0.0 +2024-08-29 14:52:38,159 INFO [train.py:1114] (2/4) Epoch 14, batch 1700, loss[loss=0.1758, simple_loss=0.235, pruned_loss=0.04201, ctc_loss=0.08143, over 19674.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2822, pruned_loss=0.05771, ctc_loss=0.1081, over 3844833.60 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:52:44,665 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.46 vs. limit=10.0 +2024-08-29 14:52:55,066 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.56 vs. limit=22.5 +2024-08-29 14:52:57,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=181701.33333333334, ans=10.0 +2024-08-29 14:53:01,963 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.43 vs. limit=15.0 +2024-08-29 14:53:10,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=181754.66666666666, ans=0.0 +2024-08-29 14:53:12,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=181808.0, ans=0.0 +2024-08-29 14:53:13,212 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.30 vs. limit=12.0 +2024-08-29 14:53:17,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=181808.0, ans=0.125 +2024-08-29 14:53:46,215 INFO [train.py:1114] (2/4) Epoch 14, batch 1750, loss[loss=0.2062, simple_loss=0.2557, pruned_loss=0.05652, ctc_loss=0.1093, over 19631.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2816, pruned_loss=0.0573, ctc_loss=0.1077, over 3850031.43 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:53:46,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=181914.66666666666, ans=0.0 +2024-08-29 14:54:02,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=181968.0, ans=0.025 +2024-08-29 14:54:36,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=182021.33333333334, ans=0.2 +2024-08-29 14:55:03,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=182074.66666666666, ans=0.2 +2024-08-29 14:55:10,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182074.66666666666, ans=0.1 +2024-08-29 14:56:25,498 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.355e+02 1.791e+02 2.085e+02 2.712e+02 5.021e+02, threshold=4.170e+02, percent-clipped=2.0 +2024-08-29 14:56:27,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=182128.0, ans=0.95 +2024-08-29 14:56:31,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=182128.0, ans=0.0 +2024-08-29 14:56:34,707 INFO [train.py:1114] (2/4) Epoch 14, batch 1800, loss[loss=0.2414, simple_loss=0.2993, pruned_loss=0.06631, ctc_loss=0.1272, over 19616.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2821, pruned_loss=0.05777, ctc_loss=0.1086, over 3851925.79 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:57:14,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=182181.33333333334, ans=0.125 +2024-08-29 14:57:15,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=182181.33333333334, ans=0.0 +2024-08-29 14:58:07,427 INFO [train.py:1114] (2/4) Epoch 14, batch 1850, loss[loss=0.2124, simple_loss=0.2823, pruned_loss=0.0521, ctc_loss=0.09589, over 19581.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2815, pruned_loss=0.0573, ctc_loss=0.1077, over 3855220.69 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:58:09,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182448.0, ans=0.1 +2024-08-29 14:59:45,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=182554.66666666666, ans=0.1 +2024-08-29 14:59:45,530 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.34 vs. limit=15.0 +2024-08-29 15:00:48,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=182608.0, ans=0.125 +2024-08-29 15:03:29,626 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.679e+02 1.934e+02 2.278e+02 6.084e+02, threshold=3.868e+02, percent-clipped=1.0 +2024-08-29 15:03:40,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.68 vs. limit=15.0 +2024-08-29 15:03:40,833 INFO [train.py:1114] (2/4) Epoch 14, batch 1900, loss[loss=0.2228, simple_loss=0.29, pruned_loss=0.05598, ctc_loss=0.1089, over 19652.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2819, pruned_loss=0.05714, ctc_loss=0.1075, over 3859926.01 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:03:46,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=182714.66666666666, ans=0.0 +2024-08-29 15:03:51,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=182768.0, ans=0.125 +2024-08-29 15:03:55,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=182768.0, ans=0.125 +2024-08-29 15:04:54,067 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.81 vs. limit=15.0 +2024-08-29 15:05:18,937 INFO [train.py:1114] (2/4) Epoch 14, batch 1950, loss[loss=0.2148, simple_loss=0.2797, pruned_loss=0.05439, ctc_loss=0.1027, over 19597.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2835, pruned_loss=0.0576, ctc_loss=0.1081, over 3869013.35 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:05:38,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183034.66666666666, ans=0.1 +2024-08-29 15:05:39,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=183034.66666666666, ans=0.125 +2024-08-29 15:05:52,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=183141.33333333334, ans=0.125 +2024-08-29 15:05:53,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=183141.33333333334, ans=0.125 +2024-08-29 15:05:55,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=183141.33333333334, ans=0.125 +2024-08-29 15:06:06,636 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.416e+02 1.683e+02 1.939e+02 2.319e+02 3.642e+02, threshold=3.877e+02, percent-clipped=0.0 +2024-08-29 15:06:44,339 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.45 vs. limit=15.0 +2024-08-29 15:06:48,389 INFO [train.py:1114] (2/4) Epoch 14, batch 2000, loss[loss=0.2042, simple_loss=0.2545, pruned_loss=0.05597, ctc_loss=0.105, over 19640.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2838, pruned_loss=0.05782, ctc_loss=0.1084, over 3853811.42 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:06:49,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183248.0, ans=0.1 +2024-08-29 15:07:00,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=183301.33333333334, ans=0.125 +2024-08-29 15:07:32,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=183408.0, ans=0.125 +2024-08-29 15:07:45,827 INFO [train.py:1114] (2/4) Epoch 14, batch 2050, loss[loss=0.1948, simple_loss=0.2566, pruned_loss=0.049, ctc_loss=0.0877, over 19736.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2829, pruned_loss=0.05766, ctc_loss=0.1082, over 3849695.00 frames. ], batch size: 47, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:07:46,418 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.37 vs. limit=22.5 +2024-08-29 15:07:57,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=183568.0, ans=0.0 +2024-08-29 15:08:03,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=183568.0, ans=0.125 +2024-08-29 15:08:39,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=183621.33333333334, ans=0.125 +2024-08-29 15:08:40,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183621.33333333334, ans=0.1 +2024-08-29 15:08:41,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=183621.33333333334, ans=0.125 +2024-08-29 15:09:22,579 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.25 vs. limit=22.5 +2024-08-29 15:09:27,255 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.60 vs. limit=15.0 +2024-08-29 15:09:30,357 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:09:33,848 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.39 vs. limit=22.5 +2024-08-29 15:09:39,965 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.349e+02 1.749e+02 1.987e+02 2.455e+02 3.413e+02, threshold=3.973e+02, percent-clipped=0.0 +2024-08-29 15:09:48,926 INFO [train.py:1114] (2/4) Epoch 14, batch 2100, loss[loss=0.2118, simple_loss=0.277, pruned_loss=0.0529, ctc_loss=0.1021, over 19785.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2825, pruned_loss=0.05765, ctc_loss=0.1081, over 3857710.45 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:09:52,004 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.63 vs. limit=15.0 +2024-08-29 15:10:53,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=183994.66666666666, ans=0.07 +2024-08-29 15:10:57,760 INFO [train.py:1114] (2/4) Epoch 14, batch 2150, loss[loss=0.1764, simple_loss=0.254, pruned_loss=0.03595, ctc_loss=0.06709, over 19866.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2814, pruned_loss=0.05707, ctc_loss=0.107, over 3868069.69 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:11:00,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=184048.0, ans=0.125 +2024-08-29 15:11:19,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=184154.66666666666, ans=0.2 +2024-08-29 15:11:36,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=184208.0, ans=0.125 +2024-08-29 15:11:44,630 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.399e+02 1.765e+02 2.209e+02 2.742e+02 6.061e+02, threshold=4.418e+02, percent-clipped=6.0 +2024-08-29 15:12:07,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=184261.33333333334, ans=0.025 +2024-08-29 15:12:09,348 INFO [train.py:1114] (2/4) Epoch 14, batch 2200, loss[loss=0.2518, simple_loss=0.3063, pruned_loss=0.07219, ctc_loss=0.1321, over 19569.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2812, pruned_loss=0.05685, ctc_loss=0.1064, over 3866659.28 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:12:12,257 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.48 vs. limit=15.0 +2024-08-29 15:12:14,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184314.66666666666, ans=0.1 +2024-08-29 15:13:15,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=184421.33333333334, ans=0.125 +2024-08-29 15:13:17,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=184421.33333333334, ans=0.025 +2024-08-29 15:13:47,045 INFO [train.py:1114] (2/4) Epoch 14, batch 2250, loss[loss=0.2068, simple_loss=0.2751, pruned_loss=0.05087, ctc_loss=0.09203, over 19629.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.281, pruned_loss=0.05664, ctc_loss=0.1062, over 3866444.72 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:14:02,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184634.66666666666, ans=0.1 +2024-08-29 15:14:08,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=184688.0, ans=0.125 +2024-08-29 15:14:11,119 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.97 vs. limit=15.0 +2024-08-29 15:14:15,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=184688.0, ans=0.125 +2024-08-29 15:14:26,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184741.33333333334, ans=0.1 +2024-08-29 15:14:34,160 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.407e+02 1.796e+02 2.116e+02 2.512e+02 3.767e+02, threshold=4.231e+02, percent-clipped=0.0 +2024-08-29 15:14:37,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=184794.66666666666, ans=0.0 +2024-08-29 15:14:43,318 INFO [train.py:1114] (2/4) Epoch 14, batch 2300, loss[loss=0.209, simple_loss=0.2684, pruned_loss=0.05468, ctc_loss=0.1007, over 19501.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2805, pruned_loss=0.0569, ctc_loss=0.1069, over 3860560.42 frames. ], batch size: 49, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:14:49,200 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.00 vs. limit=15.0 +2024-08-29 15:14:53,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=184901.33333333334, ans=0.2 +2024-08-29 15:14:56,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=184901.33333333334, ans=0.125 +2024-08-29 15:15:14,723 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.99 vs. limit=10.0 +2024-08-29 15:15:25,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=185008.0, ans=0.125 +2024-08-29 15:15:30,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=185061.33333333334, ans=0.05 +2024-08-29 15:15:40,919 INFO [train.py:1114] (2/4) Epoch 14, batch 2350, loss[loss=0.2295, simple_loss=0.2962, pruned_loss=0.05984, ctc_loss=0.1076, over 19632.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2798, pruned_loss=0.05663, ctc_loss=0.1062, over 3863177.60 frames. ], batch size: 63, lr: 1.05e-02, grad_scale: 64.0 +2024-08-29 15:15:41,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=185114.66666666666, ans=0.125 +2024-08-29 15:15:52,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=185168.0, ans=0.2 +2024-08-29 15:15:59,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=185168.0, ans=0.125 +2024-08-29 15:15:59,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=185168.0, ans=0.1 +2024-08-29 15:16:08,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=185221.33333333334, ans=0.125 +2024-08-29 15:16:23,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=185274.66666666666, ans=0.125 +2024-08-29 15:16:25,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=185328.0, ans=0.0 +2024-08-29 15:16:28,749 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.299e+02 1.724e+02 2.017e+02 2.647e+02 4.792e+02, threshold=4.034e+02, percent-clipped=3.0 +2024-08-29 15:16:36,537 INFO [train.py:1114] (2/4) Epoch 14, batch 2400, loss[loss=0.2426, simple_loss=0.3009, pruned_loss=0.067, ctc_loss=0.1259, over 19310.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2827, pruned_loss=0.05786, ctc_loss=0.1083, over 3857715.38 frames. ], batch size: 71, lr: 1.05e-02, grad_scale: 32.0 +2024-08-29 15:16:54,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=185434.66666666666, ans=0.025 +2024-08-29 15:17:27,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.34 vs. limit=22.5 +2024-08-29 15:17:38,556 INFO [train.py:1114] (2/4) Epoch 14, batch 2450, loss[loss=0.2777, simple_loss=0.3102, pruned_loss=0.08884, ctc_loss=0.169, over 14029.00 frames. ], tot_loss[loss=0.226, simple_loss=0.2861, pruned_loss=0.06026, ctc_loss=0.1133, over 3730481.11 frames. ], batch size: 140, lr: 1.05e-02, grad_scale: 32.0 +2024-08-29 15:17:49,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=185701.33333333334, ans=0.125 +2024-08-29 15:17:55,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=185701.33333333334, ans=0.0 +2024-08-29 15:18:02,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=185754.66666666666, ans=0.125 +2024-08-29 15:18:13,061 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:18:20,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=185808.0, ans=0.0 +2024-08-29 15:18:20,929 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.44 vs. limit=6.0 +2024-08-29 15:19:09,362 INFO [train.py:1114] (2/4) Epoch 15, batch 0, loss[loss=0.1964, simple_loss=0.2563, pruned_loss=0.04926, ctc_loss=0.09507, over 19833.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2563, pruned_loss=0.04926, ctc_loss=0.09507, over 19833.00 frames. ], batch size: 49, lr: 1.02e-02, grad_scale: 32.0 +2024-08-29 15:19:09,363 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-29 15:19:20,878 INFO [train.py:1146] (2/4) Epoch 15, validation: loss=0.1908, simple_loss=0.2785, pruned_loss=0.03825, ctc_loss=0.06651, over 944034.00 frames. +2024-08-29 15:19:20,878 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 13710MB +2024-08-29 15:19:22,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=185856.0, ans=0.125 +2024-08-29 15:19:24,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=185856.0, ans=0.2 +2024-08-29 15:19:25,766 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.539e+02 1.942e+02 2.136e+02 2.424e+02 3.799e+02, threshold=4.272e+02, percent-clipped=0.0 +2024-08-29 15:19:37,142 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:19:49,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=185962.66666666666, ans=0.125 +2024-08-29 15:19:56,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=186016.0, ans=0.125 +2024-08-29 15:19:57,209 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:20:15,978 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:20:25,214 INFO [train.py:1114] (2/4) Epoch 15, batch 50, loss[loss=0.1832, simple_loss=0.2494, pruned_loss=0.04233, ctc_loss=0.08059, over 19709.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.2867, pruned_loss=0.06009, ctc_loss=0.1142, over 844718.33 frames. ], batch size: 47, lr: 1.02e-02, grad_scale: 32.0 +2024-08-29 15:20:49,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=186229.33333333334, ans=0.2 +2024-08-29 15:20:59,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=186229.33333333334, ans=0.0 +2024-08-29 15:21:12,731 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.47 vs. limit=6.0 +2024-08-29 15:21:25,451 INFO [train.py:1114] (2/4) Epoch 15, batch 100, loss[loss=0.2268, simple_loss=0.2878, pruned_loss=0.06056, ctc_loss=0.1119, over 19719.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.2862, pruned_loss=0.05977, ctc_loss=0.1133, over 1498447.04 frames. ], batch size: 51, lr: 1.02e-02, grad_scale: 32.0 +2024-08-29 15:21:30,082 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.489e+02 1.739e+02 1.952e+02 2.450e+02 4.288e+02, threshold=3.904e+02, percent-clipped=1.0 +2024-08-29 15:21:39,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=186442.66666666666, ans=0.2 +2024-08-29 15:21:43,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=186442.66666666666, ans=0.125 +2024-08-29 15:22:03,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=186549.33333333334, ans=0.2 +2024-08-29 15:22:14,075 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=8.28 vs. limit=22.5 +2024-08-29 15:22:29,350 INFO [train.py:1114] (2/4) Epoch 15, batch 150, loss[loss=0.2191, simple_loss=0.2601, pruned_loss=0.0647, ctc_loss=0.1215, over 19674.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2817, pruned_loss=0.05696, ctc_loss=0.1074, over 2027404.41 frames. ], batch size: 47, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:22:33,738 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.51 vs. limit=12.0 +2024-08-29 15:22:40,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=186709.33333333334, ans=0.125 +2024-08-29 15:22:47,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=186709.33333333334, ans=0.125 +2024-08-29 15:23:00,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=186762.66666666666, ans=0.0 +2024-08-29 15:23:28,623 INFO [train.py:1114] (2/4) Epoch 15, batch 200, loss[loss=0.2522, simple_loss=0.3013, pruned_loss=0.07285, ctc_loss=0.1436, over 18235.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2805, pruned_loss=0.05644, ctc_loss=0.1064, over 2435581.35 frames. ], batch size: 85, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:23:44,481 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.690e+02 2.002e+02 2.433e+02 3.884e+02, threshold=4.003e+02, percent-clipped=0.0 +2024-08-29 15:23:53,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=186976.0, ans=0.125 +2024-08-29 15:24:21,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=187082.66666666666, ans=0.125 +2024-08-29 15:24:53,144 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.47 vs. limit=15.0 +2024-08-29 15:24:57,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=187136.0, ans=0.0 +2024-08-29 15:24:59,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff2.min_abs, batch_count=187189.33333333334, ans=0.1 +2024-08-29 15:25:01,150 INFO [train.py:1114] (2/4) Epoch 15, batch 250, loss[loss=0.2548, simple_loss=0.3134, pruned_loss=0.07205, ctc_loss=0.13, over 19383.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2809, pruned_loss=0.05678, ctc_loss=0.107, over 2755509.28 frames. ], batch size: 67, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:25:17,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=187242.66666666666, ans=0.1 +2024-08-29 15:25:24,669 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.72 vs. limit=15.0 +2024-08-29 15:25:39,511 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.67 vs. limit=22.5 +2024-08-29 15:25:45,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=187349.33333333334, ans=0.125 +2024-08-29 15:26:00,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=187402.66666666666, ans=0.125 +2024-08-29 15:26:33,390 INFO [train.py:1114] (2/4) Epoch 15, batch 300, loss[loss=0.2356, simple_loss=0.2965, pruned_loss=0.06355, ctc_loss=0.1193, over 19522.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2802, pruned_loss=0.05614, ctc_loss=0.1058, over 3000361.20 frames. ], batch size: 61, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:26:38,053 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.385e+02 1.706e+02 2.088e+02 2.592e+02 3.748e+02, threshold=4.177e+02, percent-clipped=0.0 +2024-08-29 15:26:38,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=187456.0, ans=0.0 +2024-08-29 15:26:39,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=187456.0, ans=0.125 +2024-08-29 15:27:07,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=187562.66666666666, ans=0.2 +2024-08-29 15:27:21,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=187616.0, ans=0.025 +2024-08-29 15:27:25,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=187669.33333333334, ans=0.1 +2024-08-29 15:27:34,916 INFO [train.py:1114] (2/4) Epoch 15, batch 350, loss[loss=0.1902, simple_loss=0.252, pruned_loss=0.04709, ctc_loss=0.08577, over 19738.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2807, pruned_loss=0.05627, ctc_loss=0.1059, over 3190095.41 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 16.0 +2024-08-29 15:27:53,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=187776.0, ans=0.0 +2024-08-29 15:27:56,231 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.33 vs. limit=10.0 +2024-08-29 15:28:07,206 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.01 vs. limit=12.0 +2024-08-29 15:28:12,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=187829.33333333334, ans=0.95 +2024-08-29 15:28:13,676 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.99 vs. limit=15.0 +2024-08-29 15:28:15,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=187882.66666666666, ans=0.1 +2024-08-29 15:28:38,900 INFO [train.py:1114] (2/4) Epoch 15, batch 400, loss[loss=0.2253, simple_loss=0.2917, pruned_loss=0.05702, ctc_loss=0.1123, over 19479.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2802, pruned_loss=0.05609, ctc_loss=0.1054, over 3342533.88 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:28:39,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=187989.33333333334, ans=0.05 +2024-08-29 15:28:44,516 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.321e+02 1.706e+02 2.043e+02 2.587e+02 5.210e+02, threshold=4.085e+02, percent-clipped=2.0 +2024-08-29 15:29:18,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=188042.66666666666, ans=0.0 +2024-08-29 15:29:38,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=188096.0, ans=0.09899494936611666 +2024-08-29 15:29:38,857 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.07 vs. limit=15.0 +2024-08-29 15:29:48,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=188149.33333333334, ans=0.0 +2024-08-29 15:30:07,910 INFO [train.py:1114] (2/4) Epoch 15, batch 450, loss[loss=0.2276, simple_loss=0.299, pruned_loss=0.05719, ctc_loss=0.1045, over 19616.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2808, pruned_loss=0.05655, ctc_loss=0.1063, over 3451192.28 frames. ], batch size: 55, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:30:16,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=188256.0, ans=0.0 +2024-08-29 15:30:19,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=188309.33333333334, ans=0.0 +2024-08-29 15:30:31,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=188362.66666666666, ans=0.1 +2024-08-29 15:30:35,711 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.67 vs. limit=15.0 +2024-08-29 15:30:40,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=188362.66666666666, ans=0.125 +2024-08-29 15:31:02,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=188469.33333333334, ans=0.125 +2024-08-29 15:31:09,365 INFO [train.py:1114] (2/4) Epoch 15, batch 500, loss[loss=0.2374, simple_loss=0.3027, pruned_loss=0.06264, ctc_loss=0.1173, over 19721.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2797, pruned_loss=0.05599, ctc_loss=0.1052, over 3546389.06 frames. ], batch size: 63, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:31:14,505 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.65 vs. limit=15.0 +2024-08-29 15:31:15,121 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.348e+02 1.681e+02 1.897e+02 2.177e+02 4.545e+02, threshold=3.794e+02, percent-clipped=1.0 +2024-08-29 15:31:34,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=188629.33333333334, ans=0.125 +2024-08-29 15:31:34,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=188629.33333333334, ans=0.125 +2024-08-29 15:32:20,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=188629.33333333334, ans=0.0 +2024-08-29 15:32:20,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=188629.33333333334, ans=0.025 +2024-08-29 15:32:26,519 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.31 vs. limit=15.0 +2024-08-29 15:32:31,319 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.61 vs. limit=22.5 +2024-08-29 15:32:42,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=188736.0, ans=0.025 +2024-08-29 15:32:43,128 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.00 vs. limit=15.0 +2024-08-29 15:32:45,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=188736.0, ans=0.0 +2024-08-29 15:32:51,285 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.49 vs. limit=8.0 +2024-08-29 15:32:59,036 INFO [train.py:1114] (2/4) Epoch 15, batch 550, loss[loss=0.2403, simple_loss=0.2983, pruned_loss=0.06631, ctc_loss=0.1241, over 19235.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.28, pruned_loss=0.0561, ctc_loss=0.1054, over 3607735.30 frames. ], batch size: 71, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:33:04,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=188789.33333333334, ans=0.0 +2024-08-29 15:33:05,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=188789.33333333334, ans=0.0 +2024-08-29 15:33:06,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=188789.33333333334, ans=0.125 +2024-08-29 15:33:54,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=188896.0, ans=0.125 +2024-08-29 15:33:59,060 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.05 vs. limit=15.0 +2024-08-29 15:34:01,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=188896.0, ans=0.125 +2024-08-29 15:34:30,461 INFO [train.py:1114] (2/4) Epoch 15, batch 600, loss[loss=0.2652, simple_loss=0.3206, pruned_loss=0.07608, ctc_loss=0.1441, over 19372.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2798, pruned_loss=0.05586, ctc_loss=0.1049, over 3666449.23 frames. ], batch size: 67, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:34:30,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=189056.0, ans=0.1 +2024-08-29 15:34:33,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=189056.0, ans=0.0 +2024-08-29 15:34:36,390 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.830e+02 2.111e+02 2.732e+02 4.380e+02, threshold=4.223e+02, percent-clipped=4.0 +2024-08-29 15:34:36,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189056.0, ans=0.1 +2024-08-29 15:34:45,950 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.72 vs. limit=15.0 +2024-08-29 15:34:51,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=189109.33333333334, ans=0.0 +2024-08-29 15:35:26,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=189269.33333333334, ans=0.09899494936611666 +2024-08-29 15:35:29,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189269.33333333334, ans=0.1 +2024-08-29 15:35:31,353 INFO [train.py:1114] (2/4) Epoch 15, batch 650, loss[loss=0.2279, simple_loss=0.2874, pruned_loss=0.06106, ctc_loss=0.1157, over 19753.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2798, pruned_loss=0.05612, ctc_loss=0.1057, over 3717593.53 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:35:35,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=189322.66666666666, ans=0.2 +2024-08-29 15:35:45,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=189376.0, ans=0.125 +2024-08-29 15:37:41,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=189376.0, ans=0.0 +2024-08-29 15:37:42,833 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.38 vs. limit=15.0 +2024-08-29 15:37:55,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_na.min_abs, batch_count=189429.33333333334, ans=0.02 +2024-08-29 15:38:08,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=189482.66666666666, ans=0.125 +2024-08-29 15:38:08,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=189482.66666666666, ans=0.1 +2024-08-29 15:38:14,737 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.46 vs. limit=15.0 +2024-08-29 15:38:22,771 INFO [train.py:1114] (2/4) Epoch 15, batch 700, loss[loss=0.2265, simple_loss=0.287, pruned_loss=0.06042, ctc_loss=0.1127, over 19734.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2805, pruned_loss=0.05636, ctc_loss=0.1061, over 3749162.36 frames. ], batch size: 51, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:38:24,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=189589.33333333334, ans=0.125 +2024-08-29 15:38:28,537 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.396e+02 1.846e+02 2.430e+02 3.057e+02 4.272e+02, threshold=4.860e+02, percent-clipped=1.0 +2024-08-29 15:38:32,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=189589.33333333334, ans=0.04949747468305833 +2024-08-29 15:38:55,938 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.07 vs. limit=6.0 +2024-08-29 15:39:01,676 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.17 vs. limit=15.0 +2024-08-29 15:39:03,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=189749.33333333334, ans=0.125 +2024-08-29 15:39:15,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=189802.66666666666, ans=0.125 +2024-08-29 15:39:25,955 INFO [train.py:1114] (2/4) Epoch 15, batch 750, loss[loss=0.2427, simple_loss=0.3011, pruned_loss=0.06657, ctc_loss=0.1277, over 19494.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2795, pruned_loss=0.05589, ctc_loss=0.1051, over 3775088.89 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:39:26,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=189856.0, ans=0.0 +2024-08-29 15:39:28,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=189856.0, ans=0.2 +2024-08-29 15:39:30,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=189856.0, ans=0.125 +2024-08-29 15:39:51,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=189962.66666666666, ans=0.125 +2024-08-29 15:39:51,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=189962.66666666666, ans=0.1 +2024-08-29 15:40:02,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=190016.0, ans=0.125 +2024-08-29 15:40:16,958 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.59 vs. limit=15.0 +2024-08-29 15:40:28,196 INFO [train.py:1114] (2/4) Epoch 15, batch 800, loss[loss=0.2287, simple_loss=0.2735, pruned_loss=0.06742, ctc_loss=0.1228, over 19409.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2792, pruned_loss=0.05573, ctc_loss=0.1046, over 3796237.68 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:40:34,422 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.355e+02 1.728e+02 2.068e+02 2.494e+02 4.984e+02, threshold=4.135e+02, percent-clipped=1.0 +2024-08-29 15:40:50,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=190176.0, ans=0.125 +2024-08-29 15:41:21,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=190336.0, ans=0.0 +2024-08-29 15:41:22,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=190336.0, ans=0.125 +2024-08-29 15:41:30,888 INFO [train.py:1114] (2/4) Epoch 15, batch 850, loss[loss=0.2367, simple_loss=0.3061, pruned_loss=0.06064, ctc_loss=0.115, over 19641.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2793, pruned_loss=0.05576, ctc_loss=0.1047, over 3814513.23 frames. ], batch size: 59, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:41:31,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=190389.33333333334, ans=0.0 +2024-08-29 15:41:36,258 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.20 vs. limit=10.0 +2024-08-29 15:41:39,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=190389.33333333334, ans=0.0 +2024-08-29 15:41:42,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=190442.66666666666, ans=0.0 +2024-08-29 15:41:52,680 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.95 vs. limit=15.0 +2024-08-29 15:41:56,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=190496.0, ans=0.125 +2024-08-29 15:41:58,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=190496.0, ans=0.125 +2024-08-29 15:42:05,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=190549.33333333334, ans=0.025 +2024-08-29 15:42:34,686 INFO [train.py:1114] (2/4) Epoch 15, batch 900, loss[loss=0.1983, simple_loss=0.2597, pruned_loss=0.0494, ctc_loss=0.09539, over 19803.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2795, pruned_loss=0.05571, ctc_loss=0.1048, over 3818417.52 frames. ], batch size: 49, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:42:39,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=190656.0, ans=0.0 +2024-08-29 15:42:40,572 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.386e+02 1.760e+02 2.061e+02 2.441e+02 4.748e+02, threshold=4.121e+02, percent-clipped=4.0 +2024-08-29 15:42:58,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=190709.33333333334, ans=0.125 +2024-08-29 15:43:08,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=190762.66666666666, ans=0.2 +2024-08-29 15:43:17,789 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.30 vs. limit=15.0 +2024-08-29 15:43:19,929 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:43:23,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=190816.0, ans=0.2 +2024-08-29 15:43:47,830 INFO [train.py:1114] (2/4) Epoch 15, batch 950, loss[loss=0.1848, simple_loss=0.2499, pruned_loss=0.04397, ctc_loss=0.07963, over 19507.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2801, pruned_loss=0.05591, ctc_loss=0.1053, over 3821132.33 frames. ], batch size: 49, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:43:51,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=190922.66666666666, ans=0.1 +2024-08-29 15:43:59,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190976.0, ans=0.1 +2024-08-29 15:44:09,342 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.21 vs. limit=15.0 +2024-08-29 15:44:14,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=191029.33333333334, ans=0.0 +2024-08-29 15:44:20,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=191029.33333333334, ans=0.125 +2024-08-29 15:44:22,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=191029.33333333334, ans=0.05 +2024-08-29 15:44:26,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=191082.66666666666, ans=0.95 +2024-08-29 15:44:29,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=191082.66666666666, ans=0.125 +2024-08-29 15:44:48,251 INFO [train.py:1114] (2/4) Epoch 15, batch 1000, loss[loss=0.1911, simple_loss=0.2622, pruned_loss=0.04306, ctc_loss=0.08457, over 19856.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2805, pruned_loss=0.05602, ctc_loss=0.1053, over 3818637.71 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:44:49,161 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.59 vs. limit=12.0 +2024-08-29 15:44:56,848 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.479e+02 1.691e+02 1.934e+02 2.300e+02 3.610e+02, threshold=3.869e+02, percent-clipped=0.0 +2024-08-29 15:44:59,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=191189.33333333334, ans=0.125 +2024-08-29 15:45:01,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=191242.66666666666, ans=0.125 +2024-08-29 15:45:20,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=191296.0, ans=0.125 +2024-08-29 15:45:22,861 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.38 vs. limit=22.5 +2024-08-29 15:45:44,186 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.91 vs. limit=15.0 +2024-08-29 15:45:52,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=191456.0, ans=0.125 +2024-08-29 15:45:53,379 INFO [train.py:1114] (2/4) Epoch 15, batch 1050, loss[loss=0.207, simple_loss=0.2837, pruned_loss=0.04759, ctc_loss=0.08767, over 19838.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2791, pruned_loss=0.05545, ctc_loss=0.104, over 3825181.74 frames. ], batch size: 57, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:45:58,151 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.14 vs. limit=12.0 +2024-08-29 15:46:22,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=191562.66666666666, ans=0.0 +2024-08-29 15:46:23,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=191562.66666666666, ans=0.2 +2024-08-29 15:46:26,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=191562.66666666666, ans=0.2 +2024-08-29 15:46:38,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=191616.0, ans=0.125 +2024-08-29 15:46:42,642 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.75 vs. limit=10.0 +2024-08-29 15:46:53,183 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.17 vs. limit=12.0 +2024-08-29 15:46:54,810 INFO [train.py:1114] (2/4) Epoch 15, batch 1100, loss[loss=0.1951, simple_loss=0.2629, pruned_loss=0.04664, ctc_loss=0.0851, over 19591.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2789, pruned_loss=0.05536, ctc_loss=0.1038, over 3832078.55 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:46:57,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191722.66666666666, ans=0.1 +2024-08-29 15:47:17,585 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.340e+02 1.746e+02 1.965e+02 2.496e+02 3.903e+02, threshold=3.929e+02, percent-clipped=1.0 +2024-08-29 15:47:17,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=191722.66666666666, ans=0.5 +2024-08-29 15:47:23,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=191776.0, ans=0.035 +2024-08-29 15:47:27,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=191776.0, ans=0.125 +2024-08-29 15:47:30,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=191776.0, ans=0.125 +2024-08-29 15:47:31,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=11.18 vs. limit=15.0 +2024-08-29 15:48:09,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=191936.0, ans=0.0 +2024-08-29 15:48:12,567 INFO [train.py:1114] (2/4) Epoch 15, batch 1150, loss[loss=0.2081, simple_loss=0.2745, pruned_loss=0.0515, ctc_loss=0.09673, over 19580.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2791, pruned_loss=0.05553, ctc_loss=0.1043, over 3829412.36 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:48:18,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=191989.33333333334, ans=0.125 +2024-08-29 15:48:18,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=191989.33333333334, ans=0.125 +2024-08-29 15:48:20,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=191989.33333333334, ans=0.125 +2024-08-29 15:48:20,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191989.33333333334, ans=0.1 +2024-08-29 15:48:23,730 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.21 vs. limit=15.0 +2024-08-29 15:48:26,971 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.28 vs. limit=22.5 +2024-08-29 15:48:29,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=192042.66666666666, ans=0.2 +2024-08-29 15:48:29,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192042.66666666666, ans=0.1 +2024-08-29 15:48:39,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=192096.0, ans=0.125 +2024-08-29 15:49:07,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=192202.66666666666, ans=0.0 +2024-08-29 15:49:11,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=192202.66666666666, ans=0.0 +2024-08-29 15:49:19,909 INFO [train.py:1114] (2/4) Epoch 15, batch 1200, loss[loss=0.2037, simple_loss=0.2715, pruned_loss=0.04996, ctc_loss=0.08978, over 19831.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2796, pruned_loss=0.05558, ctc_loss=0.1046, over 3824886.04 frames. ], batch size: 57, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:49:26,221 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 1.719e+02 2.001e+02 2.349e+02 3.398e+02, threshold=4.002e+02, percent-clipped=0.0 +2024-08-29 15:49:46,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=192309.33333333334, ans=0.0 +2024-08-29 15:49:47,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=192362.66666666666, ans=0.0 +2024-08-29 15:50:00,955 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.81 vs. limit=15.0 +2024-08-29 15:50:01,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=192416.0, ans=0.125 +2024-08-29 15:50:24,206 INFO [train.py:1114] (2/4) Epoch 15, batch 1250, loss[loss=0.2307, simple_loss=0.2943, pruned_loss=0.06236, ctc_loss=0.1059, over 19539.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2799, pruned_loss=0.05533, ctc_loss=0.1041, over 3842529.22 frames. ], batch size: 61, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:50:51,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=192629.33333333334, ans=0.125 +2024-08-29 15:51:00,542 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:51:11,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=192682.66666666666, ans=0.07 +2024-08-29 15:51:12,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=192736.0, ans=0.125 +2024-08-29 15:51:21,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=192736.0, ans=0.0 +2024-08-29 15:51:25,224 INFO [train.py:1114] (2/4) Epoch 15, batch 1300, loss[loss=0.2247, simple_loss=0.2925, pruned_loss=0.05732, ctc_loss=0.1058, over 18994.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2792, pruned_loss=0.05504, ctc_loss=0.1037, over 3846946.16 frames. ], batch size: 76, lr: 9.99e-03, grad_scale: 32.0 +2024-08-29 15:52:15,030 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.668e+02 1.955e+02 2.455e+02 4.261e+02, threshold=3.910e+02, percent-clipped=2.0 +2024-08-29 15:52:27,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=192842.66666666666, ans=0.125 +2024-08-29 15:52:29,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=192842.66666666666, ans=0.025 +2024-08-29 15:52:36,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=192896.0, ans=0.125 +2024-08-29 15:52:59,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=193002.66666666666, ans=0.125 +2024-08-29 15:53:10,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=193056.0, ans=0.125 +2024-08-29 15:53:11,012 INFO [train.py:1114] (2/4) Epoch 15, batch 1350, loss[loss=0.214, simple_loss=0.2827, pruned_loss=0.05244, ctc_loss=0.101, over 19791.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2784, pruned_loss=0.05459, ctc_loss=0.1026, over 3857593.22 frames. ], batch size: 54, lr: 9.98e-03, grad_scale: 32.0 +2024-08-29 15:53:17,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=193056.0, ans=0.125 +2024-08-29 15:53:22,541 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.22 vs. limit=22.5 +2024-08-29 15:53:22,738 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.50 vs. limit=15.0 +2024-08-29 15:53:24,001 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.73 vs. limit=15.0 +2024-08-29 15:53:36,517 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.00 vs. limit=15.0 +2024-08-29 15:53:50,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=193216.0, ans=0.125 +2024-08-29 15:54:04,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=193269.33333333334, ans=0.125 +2024-08-29 15:54:08,509 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.85 vs. limit=15.0 +2024-08-29 15:54:08,584 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.93 vs. limit=15.0 +2024-08-29 15:54:14,986 INFO [train.py:1114] (2/4) Epoch 15, batch 1400, loss[loss=0.1862, simple_loss=0.2488, pruned_loss=0.0452, ctc_loss=0.08282, over 19685.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2779, pruned_loss=0.05454, ctc_loss=0.1024, over 3864412.35 frames. ], batch size: 46, lr: 9.98e-03, grad_scale: 32.0 +2024-08-29 15:54:36,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=193322.66666666666, ans=0.125 +2024-08-29 15:54:37,466 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 1.658e+02 1.833e+02 2.351e+02 3.730e+02, threshold=3.665e+02, percent-clipped=0.0 +2024-08-29 15:54:37,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=193322.66666666666, ans=0.125 +2024-08-29 15:54:49,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=193376.0, ans=0.125 +2024-08-29 15:55:19,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=193482.66666666666, ans=0.125 +2024-08-29 15:55:20,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=193482.66666666666, ans=0.2 +2024-08-29 15:55:43,679 INFO [train.py:1114] (2/4) Epoch 15, batch 1450, loss[loss=0.2581, simple_loss=0.3129, pruned_loss=0.07341, ctc_loss=0.1412, over 19631.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.279, pruned_loss=0.05514, ctc_loss=0.1038, over 3862827.17 frames. ], batch size: 63, lr: 9.97e-03, grad_scale: 32.0 +2024-08-29 15:56:03,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=193642.66666666666, ans=0.04949747468305833 +2024-08-29 15:56:06,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=193642.66666666666, ans=0.09899494936611666 +2024-08-29 15:56:34,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=193802.66666666666, ans=0.125 +2024-08-29 15:56:45,752 INFO [train.py:1114] (2/4) Epoch 15, batch 1500, loss[loss=0.2343, simple_loss=0.3054, pruned_loss=0.05908, ctc_loss=0.1126, over 19600.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2798, pruned_loss=0.05557, ctc_loss=0.1045, over 3862635.72 frames. ], batch size: 57, lr: 9.96e-03, grad_scale: 32.0 +2024-08-29 15:56:50,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=193856.0, ans=0.125 +2024-08-29 15:56:52,436 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.416e+02 1.660e+02 1.885e+02 2.337e+02 4.281e+02, threshold=3.770e+02, percent-clipped=2.0 +2024-08-29 15:57:02,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=193909.33333333334, ans=0.125 +2024-08-29 15:57:04,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=193909.33333333334, ans=0.125 +2024-08-29 15:57:51,480 INFO [train.py:1114] (2/4) Epoch 15, batch 1550, loss[loss=0.246, simple_loss=0.3022, pruned_loss=0.06823, ctc_loss=0.1334, over 19585.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2802, pruned_loss=0.05583, ctc_loss=0.1056, over 3846467.81 frames. ], batch size: 60, lr: 9.96e-03, grad_scale: 32.0 +2024-08-29 15:57:51,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=194122.66666666666, ans=0.0 +2024-08-29 15:58:13,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=194176.0, ans=0.0 +2024-08-29 15:58:21,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=194229.33333333334, ans=0.125 +2024-08-29 15:58:22,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=194229.33333333334, ans=0.0 +2024-08-29 15:58:34,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=194282.66666666666, ans=0.125 +2024-08-29 15:58:53,431 INFO [train.py:1114] (2/4) Epoch 15, batch 1600, loss[loss=0.2094, simple_loss=0.2783, pruned_loss=0.05062, ctc_loss=0.09817, over 19824.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2799, pruned_loss=0.05556, ctc_loss=0.105, over 3835679.45 frames. ], batch size: 57, lr: 9.95e-03, grad_scale: 32.0 +2024-08-29 15:58:59,523 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.762e+02 2.164e+02 2.478e+02 4.927e+02, threshold=4.328e+02, percent-clipped=7.0 +2024-08-29 15:58:59,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=194389.33333333334, ans=0.07 +2024-08-29 16:00:03,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=194496.0, ans=0.0 +2024-08-29 16:00:25,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=194602.66666666666, ans=0.2 +2024-08-29 16:00:35,225 INFO [train.py:1114] (2/4) Epoch 15, batch 1650, loss[loss=0.2143, simple_loss=0.2895, pruned_loss=0.05157, ctc_loss=0.08966, over 19655.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2794, pruned_loss=0.05541, ctc_loss=0.1046, over 3831237.10 frames. ], batch size: 59, lr: 9.94e-03, grad_scale: 32.0 +2024-08-29 16:00:55,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=194709.33333333334, ans=0.2 +2024-08-29 16:01:01,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=194762.66666666666, ans=0.125 +2024-08-29 16:01:24,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=194816.0, ans=0.125 +2024-08-29 16:01:29,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=194869.33333333334, ans=0.0 +2024-08-29 16:01:34,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=194869.33333333334, ans=0.125 +2024-08-29 16:01:38,040 INFO [train.py:1114] (2/4) Epoch 15, batch 1700, loss[loss=0.1731, simple_loss=0.2369, pruned_loss=0.03976, ctc_loss=0.07424, over 19696.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2792, pruned_loss=0.05539, ctc_loss=0.1043, over 3845903.82 frames. ], batch size: 46, lr: 9.94e-03, grad_scale: 32.0 +2024-08-29 16:01:44,063 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 1.696e+02 2.083e+02 2.797e+02 4.802e+02, threshold=4.167e+02, percent-clipped=3.0 +2024-08-29 16:01:45,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=194922.66666666666, ans=0.125 +2024-08-29 16:01:45,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194922.66666666666, ans=0.1 +2024-08-29 16:01:50,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=194976.0, ans=0.0 +2024-08-29 16:01:57,692 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.64 vs. limit=15.0 +2024-08-29 16:01:58,755 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.14 vs. limit=6.0 +2024-08-29 16:01:59,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=194976.0, ans=0.2 +2024-08-29 16:02:02,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=195029.33333333334, ans=0.0 +2024-08-29 16:02:04,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=195029.33333333334, ans=0.2 +2024-08-29 16:02:23,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=195082.66666666666, ans=0.125 +2024-08-29 16:02:32,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195136.0, ans=0.1 +2024-08-29 16:02:40,447 INFO [train.py:1114] (2/4) Epoch 15, batch 1750, loss[loss=0.1776, simple_loss=0.2427, pruned_loss=0.04107, ctc_loss=0.07602, over 19637.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2787, pruned_loss=0.05514, ctc_loss=0.1039, over 3851631.43 frames. ], batch size: 45, lr: 9.93e-03, grad_scale: 32.0 +2024-08-29 16:02:40,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=195189.33333333334, ans=0.0 +2024-08-29 16:02:47,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=195189.33333333334, ans=0.125 +2024-08-29 16:02:47,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=195189.33333333334, ans=0.125 +2024-08-29 16:03:01,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=195242.66666666666, ans=0.125 +2024-08-29 16:03:06,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=195296.0, ans=0.2 +2024-08-29 16:03:10,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195296.0, ans=0.1 +2024-08-29 16:03:19,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=195349.33333333334, ans=0.2 +2024-08-29 16:03:21,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=195349.33333333334, ans=10.0 +2024-08-29 16:03:37,862 INFO [train.py:1114] (2/4) Epoch 15, batch 1800, loss[loss=0.2086, simple_loss=0.2788, pruned_loss=0.05049, ctc_loss=0.09367, over 19621.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2793, pruned_loss=0.05544, ctc_loss=0.1045, over 3853079.40 frames. ], batch size: 55, lr: 9.92e-03, grad_scale: 32.0 +2024-08-29 16:03:43,647 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.390e+02 1.702e+02 2.083e+02 2.690e+02 4.339e+02, threshold=4.166e+02, percent-clipped=1.0 +2024-08-29 16:03:50,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=195509.33333333334, ans=0.0 +2024-08-29 16:03:57,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=195509.33333333334, ans=0.125 +2024-08-29 16:03:58,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195509.33333333334, ans=0.1 +2024-08-29 16:04:09,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=195562.66666666666, ans=0.0 +2024-08-29 16:04:33,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=195722.66666666666, ans=0.07 +2024-08-29 16:04:34,690 INFO [train.py:1114] (2/4) Epoch 15, batch 1850, loss[loss=0.2293, simple_loss=0.2974, pruned_loss=0.05852, ctc_loss=0.1102, over 19593.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.279, pruned_loss=0.05541, ctc_loss=0.1043, over 3856062.68 frames. ], batch size: 57, lr: 9.92e-03, grad_scale: 32.0 +2024-08-29 16:04:40,819 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.91 vs. limit=22.5 +2024-08-29 16:04:46,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=195776.0, ans=0.1 +2024-08-29 16:05:02,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=195829.33333333334, ans=0.0 +2024-08-29 16:05:05,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=195829.33333333334, ans=0.2 +2024-08-29 16:05:11,772 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.54 vs. limit=15.0 +2024-08-29 16:05:14,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=195882.66666666666, ans=0.035 +2024-08-29 16:05:15,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=195882.66666666666, ans=0.025 +2024-08-29 16:05:35,629 INFO [train.py:1114] (2/4) Epoch 15, batch 1900, loss[loss=0.2004, simple_loss=0.2839, pruned_loss=0.04155, ctc_loss=0.08457, over 19673.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2793, pruned_loss=0.05531, ctc_loss=0.1042, over 3860767.44 frames. ], batch size: 59, lr: 9.91e-03, grad_scale: 32.0 +2024-08-29 16:05:35,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=195989.33333333334, ans=0.125 +2024-08-29 16:05:36,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195989.33333333334, ans=0.1 +2024-08-29 16:05:40,981 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.393e+02 1.724e+02 2.102e+02 3.115e+02 5.340e+02, threshold=4.204e+02, percent-clipped=3.0 +2024-08-29 16:05:46,230 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.29 vs. limit=15.0 +2024-08-29 16:05:55,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=196042.66666666666, ans=0.125 +2024-08-29 16:06:01,868 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.43 vs. limit=15.0 +2024-08-29 16:06:12,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=196149.33333333334, ans=0.5 +2024-08-29 16:06:32,317 INFO [train.py:1114] (2/4) Epoch 15, batch 1950, loss[loss=0.1898, simple_loss=0.2597, pruned_loss=0.04342, ctc_loss=0.08289, over 19582.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2804, pruned_loss=0.05547, ctc_loss=0.1043, over 3870328.65 frames. ], batch size: 52, lr: 9.90e-03, grad_scale: 32.0 +2024-08-29 16:06:35,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=196256.0, ans=0.125 +2024-08-29 16:06:52,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=196309.33333333334, ans=0.125 +2024-08-29 16:07:35,229 INFO [train.py:1114] (2/4) Epoch 15, batch 2000, loss[loss=0.1816, simple_loss=0.2425, pruned_loss=0.0434, ctc_loss=0.08482, over 19658.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2809, pruned_loss=0.0559, ctc_loss=0.1052, over 3855587.52 frames. ], batch size: 45, lr: 9.90e-03, grad_scale: 32.0 +2024-08-29 16:07:41,140 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.454e+02 1.618e+02 1.832e+02 2.132e+02 4.362e+02, threshold=3.664e+02, percent-clipped=1.0 +2024-08-29 16:07:42,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=196522.66666666666, ans=0.125 +2024-08-29 16:08:32,344 INFO [train.py:1114] (2/4) Epoch 15, batch 2050, loss[loss=0.1999, simple_loss=0.2554, pruned_loss=0.05251, ctc_loss=0.09842, over 19698.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2797, pruned_loss=0.0555, ctc_loss=0.1045, over 3851679.79 frames. ], batch size: 47, lr: 9.89e-03, grad_scale: 32.0 +2024-08-29 16:08:48,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=196842.66666666666, ans=0.125 +2024-08-29 16:09:15,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197002.66666666666, ans=0.1 +2024-08-29 16:09:16,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=197002.66666666666, ans=0.125 +2024-08-29 16:09:17,561 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=17.53 vs. limit=22.5 +2024-08-29 16:09:20,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=197002.66666666666, ans=0.125 +2024-08-29 16:09:27,758 INFO [train.py:1114] (2/4) Epoch 15, batch 2100, loss[loss=0.2023, simple_loss=0.2783, pruned_loss=0.04464, ctc_loss=0.09274, over 19786.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.279, pruned_loss=0.05493, ctc_loss=0.1033, over 3858826.06 frames. ], batch size: 54, lr: 9.88e-03, grad_scale: 32.0 +2024-08-29 16:09:33,398 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.265e+02 1.691e+02 1.929e+02 2.354e+02 3.359e+02, threshold=3.858e+02, percent-clipped=0.0 +2024-08-29 16:09:46,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=197109.33333333334, ans=0.1 +2024-08-29 16:09:49,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=197162.66666666666, ans=0.125 +2024-08-29 16:09:51,267 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.44 vs. limit=22.5 +2024-08-29 16:10:26,363 INFO [train.py:1114] (2/4) Epoch 15, batch 2150, loss[loss=0.1985, simple_loss=0.2646, pruned_loss=0.04798, ctc_loss=0.09123, over 19846.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2786, pruned_loss=0.05498, ctc_loss=0.1032, over 3869447.86 frames. ], batch size: 52, lr: 9.88e-03, grad_scale: 32.0 +2024-08-29 16:10:36,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=197322.66666666666, ans=0.0 +2024-08-29 16:10:37,487 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.22 vs. limit=15.0 +2024-08-29 16:10:39,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=197376.0, ans=0.2 +2024-08-29 16:11:58,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=197429.33333333334, ans=0.125 +2024-08-29 16:11:58,465 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.62 vs. limit=15.0 +2024-08-29 16:12:11,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=197482.66666666666, ans=0.125 +2024-08-29 16:12:14,669 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.36 vs. limit=15.0 +2024-08-29 16:12:18,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=197482.66666666666, ans=0.125 +2024-08-29 16:12:27,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=197536.0, ans=0.125 +2024-08-29 16:12:31,431 INFO [train.py:1114] (2/4) Epoch 15, batch 2200, loss[loss=0.2221, simple_loss=0.2927, pruned_loss=0.05426, ctc_loss=0.1076, over 19603.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2787, pruned_loss=0.05505, ctc_loss=0.1035, over 3868541.93 frames. ], batch size: 57, lr: 9.87e-03, grad_scale: 32.0 +2024-08-29 16:12:36,863 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.412e+02 1.787e+02 2.154e+02 2.730e+02 5.047e+02, threshold=4.308e+02, percent-clipped=4.0 +2024-08-29 16:12:38,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=197589.33333333334, ans=0.0 +2024-08-29 16:13:29,266 INFO [train.py:1114] (2/4) Epoch 15, batch 2250, loss[loss=0.2184, simple_loss=0.2852, pruned_loss=0.05588, ctc_loss=0.09942, over 19601.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.279, pruned_loss=0.05501, ctc_loss=0.1034, over 3867770.91 frames. ], batch size: 55, lr: 9.87e-03, grad_scale: 32.0 +2024-08-29 16:13:35,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=197856.0, ans=0.025 +2024-08-29 16:14:00,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=197909.33333333334, ans=0.125 +2024-08-29 16:14:03,332 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=8.13 vs. limit=22.5 +2024-08-29 16:14:09,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=197962.66666666666, ans=0.95 +2024-08-29 16:14:15,912 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.50 vs. limit=6.0 +2024-08-29 16:14:35,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=198069.33333333334, ans=0.2 +2024-08-29 16:14:39,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=198069.33333333334, ans=0.125 +2024-08-29 16:14:45,316 INFO [train.py:1114] (2/4) Epoch 15, batch 2300, loss[loss=0.1915, simple_loss=0.2676, pruned_loss=0.04184, ctc_loss=0.0795, over 19491.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2786, pruned_loss=0.05524, ctc_loss=0.1038, over 3860747.25 frames. ], batch size: 49, lr: 9.86e-03, grad_scale: 32.0 +2024-08-29 16:14:45,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198122.66666666666, ans=0.1 +2024-08-29 16:14:46,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=198122.66666666666, ans=0.025 +2024-08-29 16:14:50,773 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.357e+02 1.686e+02 1.986e+02 2.467e+02 4.553e+02, threshold=3.971e+02, percent-clipped=1.0 +2024-08-29 16:14:55,937 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.03 vs. limit=15.0 +2024-08-29 16:15:23,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=198282.66666666666, ans=0.0 +2024-08-29 16:15:43,152 INFO [train.py:1114] (2/4) Epoch 15, batch 2350, loss[loss=0.2244, simple_loss=0.2853, pruned_loss=0.05981, ctc_loss=0.1095, over 19656.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2784, pruned_loss=0.05521, ctc_loss=0.1038, over 3863738.90 frames. ], batch size: 63, lr: 9.85e-03, grad_scale: 64.0 +2024-08-29 16:15:52,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=198389.33333333334, ans=0.0 +2024-08-29 16:16:01,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=198442.66666666666, ans=0.125 +2024-08-29 16:16:42,894 INFO [train.py:1114] (2/4) Epoch 15, batch 2400, loss[loss=0.2204, simple_loss=0.2922, pruned_loss=0.054, ctc_loss=0.1014, over 19323.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2809, pruned_loss=0.05616, ctc_loss=0.1054, over 3857244.99 frames. ], batch size: 71, lr: 9.85e-03, grad_scale: 64.0 +2024-08-29 16:16:47,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=198656.0, ans=0.125 +2024-08-29 16:16:48,397 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.659e+02 1.944e+02 2.492e+02 3.873e+02, threshold=3.888e+02, percent-clipped=0.0 +2024-08-29 16:16:49,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=198656.0, ans=0.125 +2024-08-29 16:16:54,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=198709.33333333334, ans=0.1 +2024-08-29 16:17:55,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=198709.33333333334, ans=0.125 +2024-08-29 16:17:55,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=198709.33333333334, ans=0.125 +2024-08-29 16:17:59,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=198762.66666666666, ans=0.0 +2024-08-29 16:18:13,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=198816.0, ans=0.125 +2024-08-29 16:18:33,178 INFO [train.py:1114] (2/4) Epoch 15, batch 2450, loss[loss=0.2908, simple_loss=0.3216, pruned_loss=0.09428, ctc_loss=0.1788, over 13574.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2849, pruned_loss=0.05891, ctc_loss=0.1114, over 3730810.64 frames. ], batch size: 140, lr: 9.84e-03, grad_scale: 32.0 +2024-08-29 16:18:41,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=198922.66666666666, ans=0.0 +2024-08-29 16:18:44,394 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.79 vs. limit=15.0 +2024-08-29 16:18:55,159 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.04 vs. limit=15.0 +2024-08-29 16:19:00,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=199029.33333333334, ans=0.5 +2024-08-29 16:19:13,405 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.32 vs. limit=15.0 +2024-08-29 16:20:18,425 INFO [train.py:1114] (2/4) Epoch 16, batch 0, loss[loss=0.2139, simple_loss=0.2757, pruned_loss=0.05488, ctc_loss=0.106, over 19810.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2757, pruned_loss=0.05488, ctc_loss=0.106, over 19810.00 frames. ], batch size: 49, lr: 9.52e-03, grad_scale: 32.0 +2024-08-29 16:20:18,425 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-29 16:20:28,424 INFO [train.py:1146] (2/4) Epoch 16, validation: loss=0.1867, simple_loss=0.2755, pruned_loss=0.03636, ctc_loss=0.06317, over 944034.00 frames. +2024-08-29 16:20:28,424 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 13710MB +2024-08-29 16:20:34,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=199130.66666666666, ans=0.125 +2024-08-29 16:20:48,964 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.810e+02 1.998e+02 2.276e+02 3.528e+02, threshold=3.997e+02, percent-clipped=0.0 +2024-08-29 16:21:26,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=199344.0, ans=0.125 +2024-08-29 16:21:28,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=199344.0, ans=0.125 +2024-08-29 16:21:30,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=199344.0, ans=0.125 +2024-08-29 16:21:32,416 INFO [train.py:1114] (2/4) Epoch 16, batch 50, loss[loss=0.1996, simple_loss=0.2655, pruned_loss=0.04806, ctc_loss=0.09386, over 19693.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.284, pruned_loss=0.05776, ctc_loss=0.11, over 845655.20 frames. ], batch size: 47, lr: 9.51e-03, grad_scale: 32.0 +2024-08-29 16:21:36,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=199397.33333333334, ans=0.0 +2024-08-29 16:21:55,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=199504.0, ans=0.0 +2024-08-29 16:22:03,629 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.00 vs. limit=22.5 +2024-08-29 16:22:27,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=199610.66666666666, ans=0.2 +2024-08-29 16:22:40,107 INFO [train.py:1114] (2/4) Epoch 16, batch 100, loss[loss=0.1939, simple_loss=0.2585, pruned_loss=0.04715, ctc_loss=0.08736, over 19733.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2835, pruned_loss=0.05693, ctc_loss=0.1078, over 1500622.48 frames. ], batch size: 51, lr: 9.51e-03, grad_scale: 32.0 +2024-08-29 16:23:05,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=199717.33333333334, ans=0.125 +2024-08-29 16:23:06,386 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.97 vs. limit=15.0 +2024-08-29 16:23:08,066 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.352e+02 1.815e+02 2.137e+02 2.569e+02 4.869e+02, threshold=4.274e+02, percent-clipped=1.0 +2024-08-29 16:23:22,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=199770.66666666666, ans=6.0 +2024-08-29 16:23:28,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=199824.0, ans=0.0 +2024-08-29 16:23:29,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=199824.0, ans=0.125 +2024-08-29 16:30:57,494 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.47 vs. limit=15.0 +2024-08-29 16:37:11,226 INFO [train.py:1114] (2/4) Epoch 16, batch 150, loss[loss=0.2009, simple_loss=0.2533, pruned_loss=0.05439, ctc_loss=0.09929, over 19735.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.28, pruned_loss=0.05486, ctc_loss=0.1036, over 2028424.02 frames. ], batch size: 47, lr: 9.50e-03, grad_scale: 32.0 +2024-08-29 16:43:33,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=200037.33333333334, ans=0.0 +2024-08-29 16:47:05,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=200144.0, ans=0.125 +2024-08-29 16:48:09,841 INFO [train.py:1114] (2/4) Epoch 16, batch 200, loss[loss=0.248, simple_loss=0.3041, pruned_loss=0.06928, ctc_loss=0.1334, over 18244.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2796, pruned_loss=0.05486, ctc_loss=0.1032, over 2435460.16 frames. ], batch size: 85, lr: 9.49e-03, grad_scale: 32.0 +2024-08-29 16:49:57,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=200197.33333333334, ans=0.125 +2024-08-29 16:49:57,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=200197.33333333334, ans=0.125 +2024-08-29 16:49:58,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=200197.33333333334, ans=0.0 +2024-08-29 16:53:29,822 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.321e+02 1.834e+02 2.227e+02 2.815e+02 4.534e+02, threshold=4.454e+02, percent-clipped=1.0 +2024-08-29 16:53:35,445 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.63 vs. limit=15.0 +2024-08-29 16:54:00,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200304.0, ans=0.1 +2024-08-29 16:54:08,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=200304.0, ans=0.0 +2024-08-29 16:54:29,213 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.39 vs. limit=15.0 +2024-08-29 16:55:30,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=200357.33333333334, ans=0.125 +2024-08-29 16:56:00,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=200357.33333333334, ans=0.2 +2024-08-29 16:56:29,762 INFO [train.py:1114] (2/4) Epoch 16, batch 250, loss[loss=0.2318, simple_loss=0.3022, pruned_loss=0.05891, ctc_loss=0.1088, over 19404.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2799, pruned_loss=0.05514, ctc_loss=0.1033, over 2755380.50 frames. ], batch size: 67, lr: 9.49e-03, grad_scale: 32.0 +2024-08-29 16:56:47,504 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.26 vs. limit=12.0 +2024-08-29 16:58:24,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=200464.0, ans=0.0 +2024-08-29 16:59:04,418 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.31 vs. limit=15.0 +2024-08-29 17:01:55,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=200677.33333333334, ans=0.125 +2024-08-29 17:03:13,502 INFO [train.py:1114] (2/4) Epoch 16, batch 300, loss[loss=0.2498, simple_loss=0.3113, pruned_loss=0.069, ctc_loss=0.1257, over 19539.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2781, pruned_loss=0.05411, ctc_loss=0.1018, over 3000463.33 frames. ], batch size: 61, lr: 9.48e-03, grad_scale: 32.0 +2024-08-29 17:03:36,033 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.345e+02 1.663e+02 1.972e+02 2.398e+02 4.674e+02, threshold=3.943e+02, percent-clipped=1.0 +2024-08-29 17:04:35,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=200837.33333333334, ans=0.125 +2024-08-29 17:08:30,909 INFO [train.py:1114] (2/4) Epoch 16, batch 350, loss[loss=0.1857, simple_loss=0.2519, pruned_loss=0.04225, ctc_loss=0.08741, over 19766.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2784, pruned_loss=0.05417, ctc_loss=0.102, over 3190334.08 frames. ], batch size: 48, lr: 9.48e-03, grad_scale: 32.0 +2024-08-29 17:12:34,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=201157.33333333334, ans=0.2 +2024-08-29 17:13:14,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=201210.66666666666, ans=10.0 +2024-08-29 17:13:14,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=201210.66666666666, ans=0.2 +2024-08-29 17:13:17,636 INFO [train.py:1114] (2/4) Epoch 16, batch 400, loss[loss=0.228, simple_loss=0.2928, pruned_loss=0.05946, ctc_loss=0.1109, over 19496.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2778, pruned_loss=0.05402, ctc_loss=0.1016, over 3342254.71 frames. ], batch size: 54, lr: 9.47e-03, grad_scale: 32.0 +2024-08-29 17:13:45,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=201317.33333333334, ans=0.125 +2024-08-29 17:14:43,813 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.32 vs. limit=15.0 +2024-08-29 17:15:51,037 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.292e+02 1.714e+02 1.905e+02 2.508e+02 3.565e+02, threshold=3.811e+02, percent-clipped=0.0 +2024-08-29 17:16:24,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=201370.66666666666, ans=0.1 +2024-08-29 17:17:07,856 INFO [train.py:1114] (2/4) Epoch 16, batch 450, loss[loss=0.212, simple_loss=0.2857, pruned_loss=0.04991, ctc_loss=0.09618, over 19604.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2784, pruned_loss=0.05423, ctc_loss=0.102, over 3451391.03 frames. ], batch size: 55, lr: 9.46e-03, grad_scale: 32.0 +2024-08-29 17:21:12,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=201690.66666666666, ans=0.5 +2024-08-29 17:21:57,475 INFO [train.py:1114] (2/4) Epoch 16, batch 500, loss[loss=0.2285, simple_loss=0.2895, pruned_loss=0.06116, ctc_loss=0.1127, over 19690.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2771, pruned_loss=0.0536, ctc_loss=0.101, over 3546751.85 frames. ], batch size: 63, lr: 9.46e-03, grad_scale: 32.0 +2024-08-29 17:22:02,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=201797.33333333334, ans=0.125 +2024-08-29 17:22:46,941 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.430e+02 1.689e+02 2.169e+02 2.570e+02 5.370e+02, threshold=4.338e+02, percent-clipped=3.0 +2024-08-29 17:23:43,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=201957.33333333334, ans=0.125 +2024-08-29 17:23:54,683 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.50 vs. limit=15.0 +2024-08-29 17:23:58,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=202010.66666666666, ans=0.0 +2024-08-29 17:24:00,239 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.18 vs. limit=15.0 +2024-08-29 17:24:02,393 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.70 vs. limit=22.5 +2024-08-29 17:24:02,823 INFO [train.py:1114] (2/4) Epoch 16, batch 550, loss[loss=0.224, simple_loss=0.2926, pruned_loss=0.05677, ctc_loss=0.1046, over 19263.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2781, pruned_loss=0.05407, ctc_loss=0.1019, over 3607782.23 frames. ], batch size: 71, lr: 9.45e-03, grad_scale: 32.0 +2024-08-29 17:24:07,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202064.0, ans=0.1 +2024-08-29 17:24:26,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=202117.33333333334, ans=0.2 +2024-08-29 17:24:48,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=202170.66666666666, ans=0.0 +2024-08-29 17:25:01,332 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 17:25:07,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=202277.33333333334, ans=0.0 +2024-08-29 17:25:15,251 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.24 vs. limit=22.5 +2024-08-29 17:25:21,518 INFO [train.py:1114] (2/4) Epoch 16, batch 600, loss[loss=0.2229, simple_loss=0.2816, pruned_loss=0.06015, ctc_loss=0.1097, over 19406.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2775, pruned_loss=0.05362, ctc_loss=0.1012, over 3666050.09 frames. ], batch size: 67, lr: 9.45e-03, grad_scale: 32.0 +2024-08-29 17:25:21,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=202330.66666666666, ans=10.0 +2024-08-29 17:25:21,956 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.60 vs. limit=12.0 +2024-08-29 17:26:15,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=202330.66666666666, ans=15.0 +2024-08-29 17:26:28,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202384.0, ans=0.1 +2024-08-29 17:27:04,571 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.277e+02 1.652e+02 1.934e+02 2.290e+02 3.719e+02, threshold=3.867e+02, percent-clipped=0.0 +2024-08-29 17:27:59,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=202437.33333333334, ans=0.0 +2024-08-29 17:31:03,799 INFO [train.py:1114] (2/4) Epoch 16, batch 650, loss[loss=0.2005, simple_loss=0.2713, pruned_loss=0.04744, ctc_loss=0.08696, over 19768.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2764, pruned_loss=0.05311, ctc_loss=0.1, over 3716557.92 frames. ], batch size: 54, lr: 9.44e-03, grad_scale: 32.0 +2024-08-29 17:32:10,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=202597.33333333334, ans=0.125 +2024-08-29 17:32:16,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=202597.33333333334, ans=0.07 +2024-08-29 17:32:51,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=202757.33333333334, ans=0.0 +2024-08-29 17:33:12,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=202810.66666666666, ans=0.0 +2024-08-29 17:33:47,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=202810.66666666666, ans=0.125 +2024-08-29 17:33:59,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=202810.66666666666, ans=0.0 +2024-08-29 17:34:02,139 INFO [train.py:1114] (2/4) Epoch 16, batch 700, loss[loss=0.1926, simple_loss=0.2673, pruned_loss=0.0425, ctc_loss=0.08229, over 19714.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2766, pruned_loss=0.05315, ctc_loss=0.1003, over 3747994.16 frames. ], batch size: 51, lr: 9.43e-03, grad_scale: 32.0 +2024-08-29 17:35:12,329 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.286e+02 1.755e+02 2.110e+02 2.761e+02 5.047e+02, threshold=4.220e+02, percent-clipped=5.0 +2024-08-29 17:35:36,305 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.43 vs. limit=10.0 +2024-08-29 17:36:26,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=202970.66666666666, ans=0.2 +2024-08-29 17:41:52,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=203077.33333333334, ans=0.125 +2024-08-29 17:42:01,660 INFO [train.py:1114] (2/4) Epoch 16, batch 750, loss[loss=0.2288, simple_loss=0.2881, pruned_loss=0.06193, ctc_loss=0.1141, over 19509.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2757, pruned_loss=0.05286, ctc_loss=0.09961, over 3774184.24 frames. ], batch size: 54, lr: 9.43e-03, grad_scale: 32.0 +2024-08-29 17:42:07,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=203130.66666666666, ans=0.125 +2024-08-29 17:42:07,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=203130.66666666666, ans=0.0 +2024-08-29 17:42:15,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=203130.66666666666, ans=0.125 +2024-08-29 17:42:33,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=203237.33333333334, ans=0.2 +2024-08-29 17:46:05,955 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.90 vs. limit=10.0 +2024-08-29 17:46:10,166 INFO [train.py:1114] (2/4) Epoch 16, batch 800, loss[loss=0.1791, simple_loss=0.2471, pruned_loss=0.04081, ctc_loss=0.0735, over 19413.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2759, pruned_loss=0.05308, ctc_loss=0.1001, over 3795165.12 frames. ], batch size: 48, lr: 9.42e-03, grad_scale: 32.0 +2024-08-29 17:48:15,893 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.745e+02 2.069e+02 2.556e+02 3.770e+02, threshold=4.138e+02, percent-clipped=0.0 +2024-08-29 17:49:06,932 INFO [train.py:1114] (2/4) Epoch 16, batch 850, loss[loss=0.2089, simple_loss=0.2795, pruned_loss=0.05105, ctc_loss=0.0903, over 19676.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2765, pruned_loss=0.05346, ctc_loss=0.1008, over 3814723.49 frames. ], batch size: 59, lr: 9.42e-03, grad_scale: 32.0 +2024-08-29 17:49:08,595 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.61 vs. limit=15.0 +2024-08-29 17:49:10,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=203664.0, ans=0.025 +2024-08-29 17:49:21,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=203717.33333333334, ans=0.125 +2024-08-29 17:49:24,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=203717.33333333334, ans=0.0 +2024-08-29 17:49:46,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=203824.0, ans=0.05 +2024-08-29 17:50:21,093 INFO [train.py:1114] (2/4) Epoch 16, batch 900, loss[loss=0.2114, simple_loss=0.268, pruned_loss=0.05651, ctc_loss=0.1047, over 19797.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2773, pruned_loss=0.05408, ctc_loss=0.1018, over 3819248.79 frames. ], batch size: 49, lr: 9.41e-03, grad_scale: 32.0 +2024-08-29 17:50:31,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=203930.66666666666, ans=0.1 +2024-08-29 17:50:38,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=203930.66666666666, ans=0.125 +2024-08-29 17:50:48,718 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.434e+02 1.676e+02 1.827e+02 2.350e+02 4.099e+02, threshold=3.653e+02, percent-clipped=0.0 +2024-08-29 17:53:24,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204144.0, ans=0.1 +2024-08-29 17:53:28,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=204144.0, ans=0.0 +2024-08-29 17:53:34,438 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.58 vs. limit=15.0 +2024-08-29 17:53:36,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.whiten.whitening_limit, batch_count=204197.33333333334, ans=12.0 +2024-08-29 17:53:37,487 INFO [train.py:1114] (2/4) Epoch 16, batch 950, loss[loss=0.2416, simple_loss=0.2811, pruned_loss=0.07375, ctc_loss=0.1366, over 19492.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2783, pruned_loss=0.05491, ctc_loss=0.1033, over 3821051.82 frames. ], batch size: 49, lr: 9.40e-03, grad_scale: 32.0 +2024-08-29 17:54:09,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=204197.33333333334, ans=0.0 +2024-08-29 17:54:09,702 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.19 vs. limit=15.0 +2024-08-29 17:55:45,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=204464.0, ans=0.0 +2024-08-29 17:55:46,643 INFO [train.py:1114] (2/4) Epoch 16, batch 1000, loss[loss=0.1847, simple_loss=0.2602, pruned_loss=0.03905, ctc_loss=0.0776, over 19853.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2788, pruned_loss=0.05503, ctc_loss=0.1037, over 3817152.03 frames. ], batch size: 52, lr: 9.40e-03, grad_scale: 32.0 +2024-08-29 17:55:59,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=204517.33333333334, ans=0.0 +2024-08-29 17:56:07,199 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.334e+02 1.649e+02 1.918e+02 2.268e+02 3.238e+02, threshold=3.836e+02, percent-clipped=0.0 +2024-08-29 17:56:53,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=204570.66666666666, ans=0.0 +2024-08-29 17:57:39,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=204624.0, ans=0.0 +2024-08-29 17:57:49,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=204677.33333333334, ans=0.2 +2024-08-29 17:57:54,935 INFO [train.py:1114] (2/4) Epoch 16, batch 1050, loss[loss=0.2255, simple_loss=0.2934, pruned_loss=0.05762, ctc_loss=0.1062, over 19846.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2783, pruned_loss=0.05492, ctc_loss=0.1033, over 3822679.86 frames. ], batch size: 57, lr: 9.39e-03, grad_scale: 32.0 +2024-08-29 17:59:15,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=204784.0, ans=0.0 +2024-08-29 17:59:20,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=204784.0, ans=0.025 +2024-08-29 17:59:50,030 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 18:00:15,655 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.21 vs. limit=15.0 +2024-08-29 18:00:18,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=204890.66666666666, ans=0.95 +2024-08-29 18:00:29,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=204890.66666666666, ans=10.0 +2024-08-29 18:00:30,494 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.66 vs. limit=22.5 +2024-08-29 18:00:37,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=204944.0, ans=0.2 +2024-08-29 18:00:45,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.23 vs. limit=15.0 +2024-08-29 18:00:53,274 INFO [train.py:1114] (2/4) Epoch 16, batch 1100, loss[loss=0.1937, simple_loss=0.2653, pruned_loss=0.04562, ctc_loss=0.07705, over 19591.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2776, pruned_loss=0.05435, ctc_loss=0.1023, over 3829363.67 frames. ], batch size: 52, lr: 9.39e-03, grad_scale: 16.0 +2024-08-29 18:00:53,867 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.00 vs. limit=22.5 +2024-08-29 18:01:14,286 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.16 vs. limit=22.5 +2024-08-29 18:01:16,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=205050.66666666666, ans=0.125 +2024-08-29 18:01:21,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205050.66666666666, ans=0.1 +2024-08-29 18:01:27,926 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.389e+02 1.694e+02 1.874e+02 2.325e+02 3.063e+02, threshold=3.748e+02, percent-clipped=0.0 +2024-08-29 18:01:40,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=205104.0, ans=0.0 +2024-08-29 18:02:31,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=205210.66666666666, ans=0.125 +2024-08-29 18:02:43,482 INFO [train.py:1114] (2/4) Epoch 16, batch 1150, loss[loss=0.1997, simple_loss=0.2617, pruned_loss=0.0494, ctc_loss=0.09744, over 19594.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2773, pruned_loss=0.05428, ctc_loss=0.102, over 3828213.04 frames. ], batch size: 52, lr: 9.38e-03, grad_scale: 16.0 +2024-08-29 18:02:49,208 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.85 vs. limit=15.0 +2024-08-29 18:03:01,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=205317.33333333334, ans=0.025 +2024-08-29 18:03:01,747 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.85 vs. limit=10.0 +2024-08-29 18:03:09,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=205370.66666666666, ans=0.0 +2024-08-29 18:03:25,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=205424.0, ans=0.125 +2024-08-29 18:03:26,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=205424.0, ans=0.125 +2024-08-29 18:03:33,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=205477.33333333334, ans=0.125 +2024-08-29 18:03:34,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=205477.33333333334, ans=0.125 +2024-08-29 18:03:40,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=205477.33333333334, ans=0.125 +2024-08-29 18:03:45,087 INFO [train.py:1114] (2/4) Epoch 16, batch 1200, loss[loss=0.2324, simple_loss=0.2974, pruned_loss=0.06049, ctc_loss=0.1163, over 19854.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2779, pruned_loss=0.05427, ctc_loss=0.1021, over 3824553.84 frames. ], batch size: 57, lr: 9.38e-03, grad_scale: 32.0 +2024-08-29 18:03:47,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=205530.66666666666, ans=0.125 +2024-08-29 18:03:54,174 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.15 vs. limit=22.5 +2024-08-29 18:04:06,319 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 1.725e+02 2.012e+02 2.470e+02 3.418e+02, threshold=4.024e+02, percent-clipped=0.0 +2024-08-29 18:04:12,362 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 18:04:50,730 INFO [train.py:1114] (2/4) Epoch 16, batch 1250, loss[loss=0.2212, simple_loss=0.2862, pruned_loss=0.05793, ctc_loss=0.1008, over 19543.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2781, pruned_loss=0.05422, ctc_loss=0.1019, over 3843199.45 frames. ], batch size: 61, lr: 9.37e-03, grad_scale: 32.0 +2024-08-29 18:05:16,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=205797.33333333334, ans=0.0 +2024-08-29 18:05:32,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=205904.0, ans=0.2 +2024-08-29 18:05:38,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=205904.0, ans=0.0 +2024-08-29 18:05:39,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=205904.0, ans=0.125 +2024-08-29 18:05:45,843 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.81 vs. limit=10.0 +2024-08-29 18:05:58,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=205957.33333333334, ans=0.025 +2024-08-29 18:06:21,281 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.47 vs. limit=15.0 +2024-08-29 18:06:22,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=205957.33333333334, ans=0.0 +2024-08-29 18:06:35,888 INFO [train.py:1114] (2/4) Epoch 16, batch 1300, loss[loss=0.2713, simple_loss=0.3177, pruned_loss=0.08123, ctc_loss=0.1563, over 18896.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2773, pruned_loss=0.05386, ctc_loss=0.1012, over 3846464.53 frames. ], batch size: 76, lr: 9.36e-03, grad_scale: 32.0 +2024-08-29 18:06:38,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=206064.0, ans=0.1 +2024-08-29 18:06:46,580 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.20 vs. limit=15.0 +2024-08-29 18:06:57,547 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.302e+02 1.716e+02 2.090e+02 2.690e+02 4.268e+02, threshold=4.180e+02, percent-clipped=3.0 +2024-08-29 18:07:34,536 INFO [train.py:1114] (2/4) Epoch 16, batch 1350, loss[loss=0.2206, simple_loss=0.2834, pruned_loss=0.05728, ctc_loss=0.1084, over 19779.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2771, pruned_loss=0.05369, ctc_loss=0.101, over 3856788.89 frames. ], batch size: 54, lr: 9.36e-03, grad_scale: 32.0 +2024-08-29 18:07:54,346 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.82 vs. limit=6.0 +2024-08-29 18:08:14,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=206437.33333333334, ans=0.1 +2024-08-29 18:09:47,309 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.65 vs. limit=15.0 +2024-08-29 18:10:08,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=206490.66666666666, ans=0.0 +2024-08-29 18:10:12,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=206544.0, ans=0.125 +2024-08-29 18:10:56,042 INFO [train.py:1114] (2/4) Epoch 16, batch 1400, loss[loss=0.1717, simple_loss=0.2377, pruned_loss=0.03885, ctc_loss=0.07008, over 19676.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2772, pruned_loss=0.05392, ctc_loss=0.1013, over 3863689.80 frames. ], batch size: 46, lr: 9.35e-03, grad_scale: 32.0 +2024-08-29 18:11:02,537 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.61 vs. limit=15.0 +2024-08-29 18:13:15,203 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.407e+02 1.659e+02 1.830e+02 2.117e+02 3.619e+02, threshold=3.659e+02, percent-clipped=0.0 +2024-08-29 18:13:25,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=206704.0, ans=0.125 +2024-08-29 18:13:26,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=206704.0, ans=0.125 +2024-08-29 18:14:17,904 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.51 vs. limit=15.0 +2024-08-29 18:14:29,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=206757.33333333334, ans=0.0 +2024-08-29 18:14:35,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=206810.66666666666, ans=0.0 +2024-08-29 18:14:36,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=206810.66666666666, ans=0.125 +2024-08-29 18:14:42,496 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 18:14:49,664 INFO [train.py:1114] (2/4) Epoch 16, batch 1450, loss[loss=0.2283, simple_loss=0.2874, pruned_loss=0.06094, ctc_loss=0.1182, over 19694.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2777, pruned_loss=0.05415, ctc_loss=0.1018, over 3862915.83 frames. ], batch size: 63, lr: 9.35e-03, grad_scale: 32.0 +2024-08-29 18:15:17,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=206917.33333333334, ans=0.05 +2024-08-29 18:15:22,861 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.30 vs. limit=15.0 +2024-08-29 18:15:28,643 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=206970.66666666666, ans=0.125 +2024-08-29 18:16:10,738 INFO [train.py:1114] (2/4) Epoch 16, batch 1500, loss[loss=0.2188, simple_loss=0.2884, pruned_loss=0.05343, ctc_loss=0.1059, over 19577.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2775, pruned_loss=0.05383, ctc_loss=0.1012, over 3862313.53 frames. ], batch size: 57, lr: 9.34e-03, grad_scale: 32.0 +2024-08-29 18:16:22,288 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.06 vs. limit=6.0 +2024-08-29 18:16:32,421 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.680e+02 1.893e+02 2.490e+02 3.994e+02, threshold=3.786e+02, percent-clipped=1.0 +2024-08-29 18:16:50,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207290.66666666666, ans=0.1 +2024-08-29 18:17:17,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=207344.0, ans=0.0 +2024-08-29 18:17:19,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=207344.0, ans=0.1 +2024-08-29 18:17:31,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=207344.0, ans=0.0 +2024-08-29 18:17:34,476 INFO [train.py:1114] (2/4) Epoch 16, batch 1550, loss[loss=0.2143, simple_loss=0.2844, pruned_loss=0.05306, ctc_loss=0.09529, over 19614.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2777, pruned_loss=0.05406, ctc_loss=0.1017, over 3846657.29 frames. ], batch size: 60, lr: 9.33e-03, grad_scale: 32.0 +2024-08-29 18:17:45,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=207450.66666666666, ans=0.125 +2024-08-29 18:17:47,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=207450.66666666666, ans=0.1 +2024-08-29 18:18:00,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=207504.0, ans=0.0 +2024-08-29 18:19:22,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=207504.0, ans=0.125 +2024-08-29 18:19:46,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=207610.66666666666, ans=0.125 +2024-08-29 18:19:55,343 INFO [train.py:1114] (2/4) Epoch 16, batch 1600, loss[loss=0.2272, simple_loss=0.2937, pruned_loss=0.0584, ctc_loss=0.1098, over 19857.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.277, pruned_loss=0.05363, ctc_loss=0.101, over 3836976.80 frames. ], batch size: 57, lr: 9.33e-03, grad_scale: 32.0 +2024-08-29 18:21:45,090 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.98 vs. limit=15.0 +2024-08-29 18:21:48,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=207717.33333333334, ans=0.125 +2024-08-29 18:21:55,744 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.773e+02 1.965e+02 2.508e+02 5.321e+02, threshold=3.930e+02, percent-clipped=3.0 +2024-08-29 18:21:56,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=207717.33333333334, ans=0.125 +2024-08-29 18:22:13,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=207824.0, ans=0.0 +2024-08-29 18:22:14,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=207824.0, ans=0.2 +2024-08-29 18:22:21,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.86 vs. limit=10.0 +2024-08-29 18:22:51,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=207877.33333333334, ans=0.07 +2024-08-29 18:23:01,504 INFO [train.py:1114] (2/4) Epoch 16, batch 1650, loss[loss=0.2119, simple_loss=0.2813, pruned_loss=0.05271, ctc_loss=0.09248, over 19662.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.277, pruned_loss=0.05346, ctc_loss=0.1006, over 3832569.65 frames. ], batch size: 59, lr: 9.32e-03, grad_scale: 32.0 +2024-08-29 18:23:06,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.08 vs. limit=6.0 +2024-08-29 18:23:09,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=207930.66666666666, ans=0.2 +2024-08-29 18:23:47,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=207984.0, ans=0.125 +2024-08-29 18:23:48,060 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.10 vs. limit=15.0 +2024-08-29 18:24:17,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=208037.33333333334, ans=0.0 +2024-08-29 18:24:40,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=208090.66666666666, ans=0.2 +2024-08-29 18:24:58,900 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.73 vs. limit=12.0 +2024-08-29 18:26:13,690 INFO [train.py:1114] (2/4) Epoch 16, batch 1700, loss[loss=0.2063, simple_loss=0.2625, pruned_loss=0.0544, ctc_loss=0.1031, over 19681.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2767, pruned_loss=0.05316, ctc_loss=0.1, over 3846790.89 frames. ], batch size: 46, lr: 9.32e-03, grad_scale: 32.0 +2024-08-29 18:26:17,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=208197.33333333334, ans=0.125 +2024-08-29 18:26:26,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=208250.66666666666, ans=0.1 +2024-08-29 18:26:30,618 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.36 vs. limit=15.0 +2024-08-29 18:26:34,594 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.431e+02 1.759e+02 2.180e+02 2.878e+02 5.111e+02, threshold=4.361e+02, percent-clipped=4.0 +2024-08-29 18:26:35,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=208304.0, ans=0.125 +2024-08-29 18:27:10,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=208410.66666666666, ans=0.0 +2024-08-29 18:27:13,922 INFO [train.py:1114] (2/4) Epoch 16, batch 1750, loss[loss=0.191, simple_loss=0.2499, pruned_loss=0.04782, ctc_loss=0.09117, over 19663.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2763, pruned_loss=0.053, ctc_loss=0.09979, over 3850963.00 frames. ], batch size: 45, lr: 9.31e-03, grad_scale: 32.0 +2024-08-29 18:27:52,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.whiten.whitening_limit, batch_count=208517.33333333334, ans=12.0 +2024-08-29 18:29:08,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=208570.66666666666, ans=0.125 +2024-08-29 18:29:11,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=208570.66666666666, ans=0.1 +2024-08-29 18:29:19,266 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.24 vs. limit=15.0 +2024-08-29 18:29:20,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=208624.0, ans=0.2 +2024-08-29 18:30:02,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=208624.0, ans=0.125 +2024-08-29 18:30:17,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=208730.66666666666, ans=0.125 +2024-08-29 18:30:17,807 INFO [train.py:1114] (2/4) Epoch 16, batch 1800, loss[loss=0.2243, simple_loss=0.2916, pruned_loss=0.05658, ctc_loss=0.1096, over 19613.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2766, pruned_loss=0.05304, ctc_loss=0.09997, over 3852362.47 frames. ], batch size: 55, lr: 9.31e-03, grad_scale: 32.0 +2024-08-29 18:30:34,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=208784.0, ans=0.125 +2024-08-29 18:30:45,742 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.293e+02 1.693e+02 1.985e+02 2.381e+02 4.228e+02, threshold=3.971e+02, percent-clipped=0.0 +2024-08-29 18:30:58,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=208837.33333333334, ans=0.1 +2024-08-29 18:31:26,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.25 vs. limit=6.0 +2024-08-29 18:31:45,937 INFO [train.py:1114] (2/4) Epoch 16, batch 1850, loss[loss=0.2274, simple_loss=0.2931, pruned_loss=0.05955, ctc_loss=0.1063, over 19584.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2765, pruned_loss=0.053, ctc_loss=0.09977, over 3855814.81 frames. ], batch size: 57, lr: 9.30e-03, grad_scale: 32.0 +2024-08-29 18:31:57,183 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.39 vs. limit=22.5 +2024-08-29 18:32:10,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=208997.33333333334, ans=0.125 +2024-08-29 18:32:13,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=208997.33333333334, ans=0.035 +2024-08-29 18:32:51,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=209157.33333333334, ans=0.125 +2024-08-29 18:32:51,944 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.30 vs. limit=15.0 +2024-08-29 18:32:53,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=209157.33333333334, ans=0.025 +2024-08-29 18:33:06,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=209210.66666666666, ans=22.5 +2024-08-29 18:33:08,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=209210.66666666666, ans=0.125 +2024-08-29 18:33:17,354 INFO [train.py:1114] (2/4) Epoch 16, batch 1900, loss[loss=0.2241, simple_loss=0.2963, pruned_loss=0.05482, ctc_loss=0.1054, over 19668.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2774, pruned_loss=0.05345, ctc_loss=0.1005, over 3860371.95 frames. ], batch size: 59, lr: 9.29e-03, grad_scale: 32.0 +2024-08-29 18:33:28,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=209317.33333333334, ans=0.125 +2024-08-29 18:33:36,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=209317.33333333334, ans=0.125 +2024-08-29 18:33:36,244 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 18:33:40,790 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.419e+02 1.785e+02 2.354e+02 2.964e+02 6.037e+02, threshold=4.708e+02, percent-clipped=9.0 +2024-08-29 18:34:03,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=209424.0, ans=0.04949747468305833 +2024-08-29 18:34:11,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=209477.33333333334, ans=0.125 +2024-08-29 18:34:30,807 INFO [train.py:1114] (2/4) Epoch 16, batch 1950, loss[loss=0.2008, simple_loss=0.2748, pruned_loss=0.04581, ctc_loss=0.08767, over 19581.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2785, pruned_loss=0.05372, ctc_loss=0.1011, over 3869552.69 frames. ], batch size: 52, lr: 9.29e-03, grad_scale: 32.0 +2024-08-29 18:34:40,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=209530.66666666666, ans=0.125 +2024-08-29 18:35:03,424 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.89 vs. limit=15.0 +2024-08-29 18:35:21,779 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.29 vs. limit=22.5 +2024-08-29 18:35:23,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=209690.66666666666, ans=0.125 +2024-08-29 18:35:27,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=209690.66666666666, ans=0.125 +2024-08-29 18:35:34,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=209744.0, ans=0.2 +2024-08-29 18:35:42,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=209744.0, ans=0.125 +2024-08-29 18:35:46,778 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.05 vs. limit=15.0 +2024-08-29 18:35:48,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209744.0, ans=0.1 +2024-08-29 18:35:51,674 INFO [train.py:1114] (2/4) Epoch 16, batch 2000, loss[loss=0.1655, simple_loss=0.234, pruned_loss=0.03539, ctc_loss=0.06541, over 19647.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2787, pruned_loss=0.05387, ctc_loss=0.1016, over 3853865.43 frames. ], batch size: 45, lr: 9.28e-03, grad_scale: 32.0 +2024-08-29 18:35:52,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=209797.33333333334, ans=0.125 +2024-08-29 18:35:54,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=209797.33333333334, ans=0.125 +2024-08-29 18:36:03,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209850.66666666666, ans=0.1 +2024-08-29 18:36:13,158 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.312e+02 1.666e+02 1.888e+02 2.185e+02 3.516e+02, threshold=3.775e+02, percent-clipped=0.0 +2024-08-29 18:36:30,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=209957.33333333334, ans=0.0 +2024-08-29 18:36:30,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=209957.33333333334, ans=0.025 +2024-08-29 18:36:31,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=209957.33333333334, ans=0.125 +2024-08-29 18:36:37,679 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.52 vs. limit=10.0 +2024-08-29 18:36:49,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=209957.33333333334, ans=0.125 +2024-08-29 18:37:02,151 INFO [train.py:1114] (2/4) Epoch 16, batch 2050, loss[loss=0.2107, simple_loss=0.2642, pruned_loss=0.05787, ctc_loss=0.1038, over 19714.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2778, pruned_loss=0.05387, ctc_loss=0.1016, over 3850267.81 frames. ], batch size: 47, lr: 9.28e-03, grad_scale: 32.0 +2024-08-29 18:37:41,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.25 vs. limit=6.0 +2024-08-29 18:38:17,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=210170.66666666666, ans=0.0 +2024-08-29 18:38:44,388 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.33 vs. limit=15.0 +2024-08-29 18:38:59,605 INFO [train.py:1114] (2/4) Epoch 16, batch 2100, loss[loss=0.2271, simple_loss=0.2879, pruned_loss=0.06084, ctc_loss=0.1114, over 19770.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2778, pruned_loss=0.05408, ctc_loss=0.1021, over 3857183.94 frames. ], batch size: 54, lr: 9.27e-03, grad_scale: 32.0 +2024-08-29 18:39:09,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=210384.0, ans=0.125 +2024-08-29 18:39:19,479 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.00 vs. limit=6.0 +2024-08-29 18:39:22,238 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.344e+02 1.792e+02 2.112e+02 2.675e+02 4.176e+02, threshold=4.223e+02, percent-clipped=3.0 +2024-08-29 18:39:23,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=210437.33333333334, ans=0.125 +2024-08-29 18:39:52,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210544.0, ans=0.1 +2024-08-29 18:39:53,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=210544.0, ans=0.0 +2024-08-29 18:39:57,960 INFO [train.py:1114] (2/4) Epoch 16, batch 2150, loss[loss=0.1979, simple_loss=0.2634, pruned_loss=0.04814, ctc_loss=0.09041, over 19853.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2766, pruned_loss=0.05342, ctc_loss=0.1007, over 3867734.26 frames. ], batch size: 52, lr: 9.27e-03, grad_scale: 32.0 +2024-08-29 18:40:05,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=210597.33333333334, ans=0.0 +2024-08-29 18:40:55,491 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.96 vs. limit=15.0 +2024-08-29 18:41:00,749 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.54 vs. limit=12.0 +2024-08-29 18:41:01,662 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.74 vs. limit=15.0 +2024-08-29 18:41:04,882 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.02 vs. limit=15.0 +2024-08-29 18:41:06,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=210810.66666666666, ans=15.0 +2024-08-29 18:41:08,950 INFO [train.py:1114] (2/4) Epoch 16, batch 2200, loss[loss=0.239, simple_loss=0.3015, pruned_loss=0.06409, ctc_loss=0.1204, over 19582.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.277, pruned_loss=0.05366, ctc_loss=0.1009, over 3865825.84 frames. ], batch size: 57, lr: 9.26e-03, grad_scale: 32.0 +2024-08-29 18:41:29,791 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.327e+02 1.757e+02 2.042e+02 2.598e+02 4.148e+02, threshold=4.084e+02, percent-clipped=0.0 +2024-08-29 18:41:58,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=210970.66666666666, ans=0.0 +2024-08-29 18:42:13,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=211024.0, ans=0.125 +2024-08-29 18:42:31,579 INFO [train.py:1114] (2/4) Epoch 16, batch 2250, loss[loss=0.2055, simple_loss=0.2823, pruned_loss=0.04758, ctc_loss=0.08367, over 19607.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2779, pruned_loss=0.05424, ctc_loss=0.1019, over 3865772.78 frames. ], batch size: 55, lr: 9.25e-03, grad_scale: 32.0 +2024-08-29 18:42:40,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211130.66666666666, ans=0.1 +2024-08-29 18:42:42,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=211184.0, ans=0.125 +2024-08-29 18:43:00,676 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.50 vs. limit=15.0 +2024-08-29 18:43:13,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=211290.66666666666, ans=0.125 +2024-08-29 18:44:24,290 INFO [train.py:1114] (2/4) Epoch 16, batch 2300, loss[loss=0.1897, simple_loss=0.255, pruned_loss=0.04568, ctc_loss=0.08266, over 19504.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2771, pruned_loss=0.05418, ctc_loss=0.1017, over 3860309.09 frames. ], batch size: 49, lr: 9.25e-03, grad_scale: 32.0 +2024-08-29 18:44:29,163 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 18:44:58,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=211397.33333333334, ans=0.0 +2024-08-29 18:44:59,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=211397.33333333334, ans=0.0 +2024-08-29 18:45:01,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=211450.66666666666, ans=0.2 +2024-08-29 18:45:05,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=211450.66666666666, ans=0.0 +2024-08-29 18:45:10,441 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.413e+02 1.785e+02 2.121e+02 2.618e+02 4.213e+02, threshold=4.241e+02, percent-clipped=2.0 +2024-08-29 18:45:16,585 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=8.47 vs. limit=15.0 +2024-08-29 18:45:31,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=211557.33333333334, ans=0.2 +2024-08-29 18:45:34,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=211610.66666666666, ans=0.2 +2024-08-29 18:45:59,063 INFO [train.py:1114] (2/4) Epoch 16, batch 2350, loss[loss=0.2154, simple_loss=0.2906, pruned_loss=0.05129, ctc_loss=0.09405, over 19678.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2771, pruned_loss=0.05414, ctc_loss=0.1018, over 3863352.78 frames. ], batch size: 63, lr: 9.24e-03, grad_scale: 32.0 +2024-08-29 18:46:05,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=211664.0, ans=0.0 +2024-08-29 18:46:11,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=211717.33333333334, ans=0.0 +2024-08-29 18:46:39,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=211824.0, ans=0.125 +2024-08-29 18:46:45,253 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.06 vs. limit=15.0 +2024-08-29 18:46:47,276 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.63 vs. limit=15.0 +2024-08-29 18:46:52,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=211877.33333333334, ans=0.1 +2024-08-29 18:47:00,335 INFO [train.py:1114] (2/4) Epoch 16, batch 2400, loss[loss=0.2439, simple_loss=0.2983, pruned_loss=0.06797, ctc_loss=0.1338, over 19443.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2791, pruned_loss=0.05482, ctc_loss=0.1029, over 3857038.45 frames. ], batch size: 71, lr: 9.24e-03, grad_scale: 32.0 +2024-08-29 18:47:07,365 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.33 vs. limit=15.0 +2024-08-29 18:47:08,325 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 18:47:20,729 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.486e+02 1.800e+02 2.132e+02 2.653e+02 4.129e+02, threshold=4.264e+02, percent-clipped=0.0 +2024-08-29 18:47:22,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212037.33333333334, ans=0.1 +2024-08-29 18:47:27,677 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.23 vs. limit=15.0 +2024-08-29 18:47:50,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=212144.0, ans=0.0 +2024-08-29 18:47:56,903 INFO [train.py:1114] (2/4) Epoch 16, batch 2450, loss[loss=0.271, simple_loss=0.3128, pruned_loss=0.08227, ctc_loss=0.1618, over 12918.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2826, pruned_loss=0.05731, ctc_loss=0.1081, over 3728353.95 frames. ], batch size: 140, lr: 9.23e-03, grad_scale: 32.0 +2024-08-29 18:48:00,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=212197.33333333334, ans=0.125 +2024-08-29 18:48:18,668 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.61 vs. limit=15.0 +2024-08-29 18:48:32,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=212304.0, ans=0.0 +2024-08-29 18:55:35,507 INFO [train.py:1114] (2/4) Epoch 17, batch 0, loss[loss=0.1952, simple_loss=0.2559, pruned_loss=0.04946, ctc_loss=0.08907, over 19416.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2559, pruned_loss=0.04946, ctc_loss=0.08907, over 19416.00 frames. ], batch size: 48, lr: 8.95e-03, grad_scale: 32.0 +2024-08-29 18:55:35,507 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-29 18:55:47,290 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.5471, 3.1733, 2.2076, 2.8463], device='cuda:2') +2024-08-29 18:56:01,940 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.0126, 4.3107, 3.7867, 4.0314], device='cuda:2') +2024-08-29 18:56:02,371 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([1.9238, 2.1456, 3.4846, 3.6289], device='cuda:2') +2024-08-29 18:56:04,687 INFO [train.py:1146] (2/4) Epoch 17, validation: loss=0.1843, simple_loss=0.2733, pruned_loss=0.03544, ctc_loss=0.06098, over 944034.00 frames. +2024-08-29 18:56:04,687 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 13710MB +2024-08-29 18:56:11,220 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.07 vs. limit=22.5 +2024-08-29 18:56:56,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=212458.66666666666, ans=0.125 +2024-08-29 18:57:44,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=212458.66666666666, ans=0.125 +2024-08-29 18:58:11,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212458.66666666666, ans=0.1 +2024-08-29 18:58:20,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=212512.0, ans=10.0 +2024-08-29 18:58:22,220 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=9.80 vs. limit=15.0 +2024-08-29 18:58:29,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212512.0, ans=0.1 +2024-08-29 18:58:30,847 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.493e+02 1.824e+02 2.030e+02 2.233e+02 3.073e+02, threshold=4.061e+02, percent-clipped=0.0 +2024-08-29 18:58:38,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=212565.33333333334, ans=0.125 +2024-08-29 18:58:55,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=212618.66666666666, ans=0.0 +2024-08-29 19:05:21,284 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.07 vs. limit=22.5 +2024-08-29 19:05:21,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=212618.66666666666, ans=0.2 +2024-08-29 19:05:26,891 INFO [train.py:1114] (2/4) Epoch 17, batch 50, loss[loss=0.1845, simple_loss=0.2497, pruned_loss=0.04289, ctc_loss=0.08394, over 19722.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2781, pruned_loss=0.05418, ctc_loss=0.1026, over 845315.44 frames. ], batch size: 47, lr: 8.94e-03, grad_scale: 32.0 +2024-08-29 19:07:29,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=212672.0, ans=0.125 +2024-08-29 19:07:37,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=212672.0, ans=0.125 +2024-08-29 19:07:54,495 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.91 vs. limit=15.0 +2024-08-29 19:08:15,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=212778.66666666666, ans=0.05 +2024-08-29 19:08:31,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=212832.0, ans=0.125 +2024-08-29 19:08:52,527 INFO [train.py:1114] (2/4) Epoch 17, batch 100, loss[loss=0.1877, simple_loss=0.2638, pruned_loss=0.04047, ctc_loss=0.07672, over 19753.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2776, pruned_loss=0.0535, ctc_loss=0.1012, over 1498877.26 frames. ], batch size: 51, lr: 8.94e-03, grad_scale: 32.0 +2024-08-29 19:08:57,930 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.02 vs. limit=10.0 +2024-08-29 19:09:04,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=212992.0, ans=0.0 +2024-08-29 19:09:06,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=212992.0, ans=0.125 +2024-08-29 19:09:25,905 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.707e+02 1.910e+02 2.335e+02 3.363e+02, threshold=3.820e+02, percent-clipped=0.0 +2024-08-29 19:09:34,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=213098.66666666666, ans=0.0 +2024-08-29 19:09:36,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=213098.66666666666, ans=0.125 +2024-08-29 19:09:53,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=213152.0, ans=0.125 +2024-08-29 19:09:55,289 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.39 vs. limit=22.5 +2024-08-29 19:09:58,140 INFO [train.py:1114] (2/4) Epoch 17, batch 150, loss[loss=0.1768, simple_loss=0.2476, pruned_loss=0.03841, ctc_loss=0.07289, over 19722.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2759, pruned_loss=0.0524, ctc_loss=0.09899, over 2027544.43 frames. ], batch size: 47, lr: 8.93e-03, grad_scale: 32.0 +2024-08-29 19:12:14,614 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.58 vs. limit=22.5 +2024-08-29 19:12:24,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=213312.0, ans=0.025 +2024-08-29 19:16:09,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=213365.33333333334, ans=0.0 +2024-08-29 19:16:10,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=213365.33333333334, ans=0.025 +2024-08-29 19:16:17,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=213418.66666666666, ans=0.125 +2024-08-29 19:16:20,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=213418.66666666666, ans=0.125 +2024-08-29 19:16:29,773 INFO [train.py:1114] (2/4) Epoch 17, batch 200, loss[loss=0.2228, simple_loss=0.2834, pruned_loss=0.05948, ctc_loss=0.1082, over 18397.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2749, pruned_loss=0.05217, ctc_loss=0.09843, over 2435139.54 frames. ], batch size: 85, lr: 8.93e-03, grad_scale: 32.0 +2024-08-29 19:25:01,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=213472.0, ans=0.125 +2024-08-29 19:26:30,513 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.59 vs. limit=6.0 +2024-08-29 19:27:13,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=213525.33333333334, ans=0.2 +2024-08-29 19:27:57,285 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.724e+02 1.931e+02 2.405e+02 4.691e+02, threshold=3.862e+02, percent-clipped=4.0 +2024-08-29 19:28:22,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=213685.33333333334, ans=0.09899494936611666 +2024-08-29 19:28:31,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=213685.33333333334, ans=0.125 +2024-08-29 19:28:32,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=213685.33333333334, ans=0.04949747468305833 +2024-08-29 19:28:38,496 INFO [train.py:1114] (2/4) Epoch 17, batch 250, loss[loss=0.2113, simple_loss=0.2798, pruned_loss=0.0518, ctc_loss=0.09803, over 19307.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2749, pruned_loss=0.05209, ctc_loss=0.09832, over 2755756.17 frames. ], batch size: 67, lr: 8.92e-03, grad_scale: 32.0 +2024-08-29 19:28:38,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=213738.66666666666, ans=0.2 +2024-08-29 19:29:14,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=213792.0, ans=0.125 +2024-08-29 19:29:31,723 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.22 vs. limit=15.0 +2024-08-29 19:29:40,014 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.30 vs. limit=10.0 +2024-08-29 19:29:44,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=213898.66666666666, ans=0.125 +2024-08-29 19:30:03,484 INFO [train.py:1114] (2/4) Epoch 17, batch 300, loss[loss=0.2625, simple_loss=0.3106, pruned_loss=0.07829, ctc_loss=0.1445, over 19521.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2754, pruned_loss=0.05218, ctc_loss=0.09871, over 3000078.39 frames. ], batch size: 61, lr: 8.92e-03, grad_scale: 32.0 +2024-08-29 19:30:03,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=214005.33333333334, ans=0.125 +2024-08-29 19:30:03,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=214005.33333333334, ans=0.0 +2024-08-29 19:30:25,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=214058.66666666666, ans=0.125 +2024-08-29 19:31:15,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=214112.0, ans=0.0 +2024-08-29 19:32:02,220 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.689e+02 1.972e+02 2.447e+02 4.331e+02, threshold=3.945e+02, percent-clipped=1.0 +2024-08-29 19:32:29,316 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.71 vs. limit=15.0 +2024-08-29 19:32:41,672 INFO [train.py:1114] (2/4) Epoch 17, batch 350, loss[loss=0.1969, simple_loss=0.2561, pruned_loss=0.04954, ctc_loss=0.09658, over 19751.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2759, pruned_loss=0.05248, ctc_loss=0.09934, over 3189055.41 frames. ], batch size: 48, lr: 8.91e-03, grad_scale: 32.0 +2024-08-29 19:32:48,341 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.34 vs. limit=10.0 +2024-08-29 19:33:32,197 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.21 vs. limit=15.0 +2024-08-29 19:33:43,915 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.49 vs. limit=12.0 +2024-08-29 19:34:13,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=214485.33333333334, ans=0.125 +2024-08-29 19:34:18,285 INFO [train.py:1114] (2/4) Epoch 17, batch 400, loss[loss=0.211, simple_loss=0.2806, pruned_loss=0.05164, ctc_loss=0.09515, over 19496.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2751, pruned_loss=0.05201, ctc_loss=0.09856, over 3340401.63 frames. ], batch size: 54, lr: 8.91e-03, grad_scale: 32.0 +2024-08-29 19:34:21,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=214538.66666666666, ans=0.2 +2024-08-29 19:34:39,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=214538.66666666666, ans=0.125 +2024-08-29 19:34:44,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=214592.0, ans=0.1 +2024-08-29 19:35:29,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214592.0, ans=0.1 +2024-08-29 19:35:31,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=214645.33333333334, ans=0.125 +2024-08-29 19:35:39,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214645.33333333334, ans=0.1 +2024-08-29 19:35:43,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=214645.33333333334, ans=0.125 +2024-08-29 19:36:30,693 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.299e+02 1.665e+02 1.964e+02 2.553e+02 4.238e+02, threshold=3.929e+02, percent-clipped=2.0 +2024-08-29 19:37:35,490 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.83 vs. limit=12.0 +2024-08-29 19:37:38,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=214698.66666666666, ans=0.1 +2024-08-29 19:37:57,110 INFO [train.py:1114] (2/4) Epoch 17, batch 450, loss[loss=0.2365, simple_loss=0.2915, pruned_loss=0.06436, ctc_loss=0.1319, over 19617.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2752, pruned_loss=0.05186, ctc_loss=0.09807, over 3449884.10 frames. ], batch size: 55, lr: 8.90e-03, grad_scale: 32.0 +2024-08-29 19:38:13,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=214805.33333333334, ans=0.125 +2024-08-29 19:38:16,239 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 19:38:16,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=214805.33333333334, ans=0.2 +2024-08-29 19:38:43,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=214858.66666666666, ans=0.125 +2024-08-29 19:38:47,968 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 19:39:40,608 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.16 vs. limit=15.0 +2024-08-29 19:40:13,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=215018.66666666666, ans=0.0 +2024-08-29 19:40:15,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215018.66666666666, ans=0.1 +2024-08-29 19:40:17,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=215018.66666666666, ans=0.0 +2024-08-29 19:40:26,571 INFO [train.py:1114] (2/4) Epoch 17, batch 500, loss[loss=0.2139, simple_loss=0.2847, pruned_loss=0.05203, ctc_loss=0.09735, over 19708.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2745, pruned_loss=0.05158, ctc_loss=0.09746, over 3545931.45 frames. ], batch size: 63, lr: 8.90e-03, grad_scale: 32.0 +2024-08-29 19:40:28,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=215072.0, ans=0.2 +2024-08-29 19:40:33,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=215072.0, ans=0.125 +2024-08-29 19:41:41,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=215125.33333333334, ans=0.125 +2024-08-29 19:41:41,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=215125.33333333334, ans=10.0 +2024-08-29 19:42:01,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=215178.66666666666, ans=0.125 +2024-08-29 19:42:03,261 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.46 vs. limit=15.0 +2024-08-29 19:42:27,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=215178.66666666666, ans=0.0 +2024-08-29 19:42:38,137 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.765e+02 1.983e+02 2.603e+02 4.687e+02, threshold=3.966e+02, percent-clipped=3.0 +2024-08-29 19:42:38,918 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.40 vs. limit=15.0 +2024-08-29 19:43:34,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=215285.33333333334, ans=0.025 +2024-08-29 19:43:45,815 INFO [train.py:1114] (2/4) Epoch 17, batch 550, loss[loss=0.212, simple_loss=0.2756, pruned_loss=0.05377, ctc_loss=0.1021, over 19279.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2741, pruned_loss=0.05138, ctc_loss=0.0971, over 3608240.69 frames. ], batch size: 71, lr: 8.89e-03, grad_scale: 32.0 +2024-08-29 19:45:01,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=215392.0, ans=0.0 +2024-08-29 19:45:58,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=215498.66666666666, ans=0.125 +2024-08-29 19:46:13,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=215552.0, ans=0.125 +2024-08-29 19:46:53,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=215552.0, ans=0.125 +2024-08-29 19:47:01,417 INFO [train.py:1114] (2/4) Epoch 17, batch 600, loss[loss=0.247, simple_loss=0.3067, pruned_loss=0.06831, ctc_loss=0.1265, over 19456.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2746, pruned_loss=0.05162, ctc_loss=0.09735, over 3666029.34 frames. ], batch size: 67, lr: 8.88e-03, grad_scale: 64.0 +2024-08-29 19:47:08,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=215605.33333333334, ans=0.0 +2024-08-29 19:47:48,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=215658.66666666666, ans=0.0 +2024-08-29 19:48:03,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=215712.0, ans=0.2 +2024-08-29 19:48:19,070 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.690e+02 1.951e+02 2.307e+02 4.172e+02, threshold=3.901e+02, percent-clipped=2.0 +2024-08-29 19:49:21,617 INFO [train.py:1114] (2/4) Epoch 17, batch 650, loss[loss=0.2126, simple_loss=0.2809, pruned_loss=0.05252, ctc_loss=0.09804, over 19776.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2742, pruned_loss=0.05145, ctc_loss=0.09681, over 3716225.09 frames. ], batch size: 54, lr: 8.88e-03, grad_scale: 64.0 +2024-08-29 19:49:44,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=215925.33333333334, ans=0.0 +2024-08-29 19:51:27,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=216085.33333333334, ans=0.0 +2024-08-29 19:51:32,000 INFO [train.py:1114] (2/4) Epoch 17, batch 700, loss[loss=0.202, simple_loss=0.269, pruned_loss=0.04898, ctc_loss=0.09263, over 19707.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2751, pruned_loss=0.05211, ctc_loss=0.09801, over 3747768.85 frames. ], batch size: 51, lr: 8.87e-03, grad_scale: 64.0 +2024-08-29 19:51:35,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216138.66666666666, ans=0.1 +2024-08-29 19:51:36,932 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.19 vs. limit=15.0 +2024-08-29 19:51:42,772 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.19 vs. limit=15.0 +2024-08-29 19:52:43,602 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.757e+02 1.978e+02 2.439e+02 3.670e+02, threshold=3.956e+02, percent-clipped=0.0 +2024-08-29 19:53:30,412 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.26 vs. limit=12.0 +2024-08-29 19:53:31,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216298.66666666666, ans=0.1 +2024-08-29 19:53:46,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=216405.33333333334, ans=0.2 +2024-08-29 19:53:46,882 INFO [train.py:1114] (2/4) Epoch 17, batch 750, loss[loss=0.179, simple_loss=0.2559, pruned_loss=0.03701, ctc_loss=0.07007, over 19516.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2745, pruned_loss=0.0519, ctc_loss=0.09765, over 3774513.43 frames. ], batch size: 54, lr: 8.87e-03, grad_scale: 64.0 +2024-08-29 19:54:28,783 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.46 vs. limit=15.0 +2024-08-29 19:55:53,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=216512.0, ans=0.125 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-13-08-38-3 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-13-08-38-3 new file mode 100644 index 0000000000000000000000000000000000000000..a819b776d3bc4bf1105e74276aa574012e838552 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-29-13-08-38-3 @@ -0,0 +1,1138 @@ +2024-08-29 13:08:38,566 INFO [train.py:1182] (3/4) Training started +2024-08-29 13:09:26,759 INFO [train.py:1192] (3/4) Device: cuda:3 +2024-08-29 13:09:26,762 INFO [train.py:1210] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2549.int.cedar.computecanada.ca', 'IP address': '172.16.145.242'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 14, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 13:09:26,762 INFO [train.py:1212] (3/4) About to create model +2024-08-29 13:09:27,468 INFO [train.py:1216] (3/4) Number of model parameters: 66367431 +2024-08-29 13:09:27,468 INFO [checkpoint.py:112] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-13.pt +2024-08-29 13:09:28,644 INFO [train.py:1231] (3/4) Using DDP +2024-08-29 13:09:40,407 INFO [train.py:1243] (3/4) Loading optimizer state dict +2024-08-29 13:09:40,535 INFO [train.py:1251] (3/4) Loading scheduler state dict +2024-08-29 13:09:40,535 INFO [asr_datamodule.py:894] (3/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 13:09:40,690 INFO [asr_datamodule.py:696] (3/4) Disable MUSAN +2024-08-29 13:09:40,690 INFO [asr_datamodule.py:714] (3/4) Enable SpecAugment +2024-08-29 13:09:40,690 INFO [asr_datamodule.py:715] (3/4) Time warp factor: 80 +2024-08-29 13:09:40,691 INFO [asr_datamodule.py:725] (3/4) Num frame mask: 10 +2024-08-29 13:09:40,691 INFO [asr_datamodule.py:738] (3/4) About to create train dataset +2024-08-29 13:09:40,691 INFO [asr_datamodule.py:765] (3/4) Using DynamicBucketingSampler. +2024-08-29 13:09:42,272 INFO [asr_datamodule.py:782] (3/4) About to create train dataloader +2024-08-29 13:09:42,276 INFO [asr_datamodule.py:911] (3/4) About to get dev-clean cuts +2024-08-29 13:09:42,374 INFO [asr_datamodule.py:918] (3/4) About to get dev-other cuts +2024-08-29 13:09:42,447 INFO [asr_datamodule.py:814] (3/4) About to create dev dataset +2024-08-29 13:09:42,768 INFO [asr_datamodule.py:831] (3/4) About to create dev dataloader +2024-08-29 13:09:42,877 INFO [train.py:1435] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 13:14:18,566 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12808MB +2024-08-29 13:14:21,279 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-29 13:14:38,612 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-29 13:14:39,590 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=256, metric=7.92 vs. limit=7.5 +2024-08-29 13:14:45,670 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-29 13:15:10,807 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-29 13:15:12,348 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-29 13:15:12,368 INFO [train.py:1344] (3/4) Loading grad scaler state dict +2024-08-29 13:16:12,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=172581.33333333334, ans=0.04949747468305833 +2024-08-29 13:16:15,181 INFO [train.py:1114] (3/4) Epoch 14, batch 0, loss[loss=0.2015, simple_loss=0.2615, pruned_loss=0.05192, ctc_loss=0.09419, over 19809.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2615, pruned_loss=0.05192, ctc_loss=0.09419, over 19809.00 frames. ], batch size: 49, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:16:15,181 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-29 13:16:26,276 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([0.1110, 2.9698, 3.4716, 2.6793], device='cuda:3') +2024-08-29 13:16:31,394 INFO [train.py:1146] (3/4) Epoch 14, validation: loss=0.1913, simple_loss=0.2789, pruned_loss=0.03846, ctc_loss=0.06724, over 944034.00 frames. +2024-08-29 13:16:31,395 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 13097MB +2024-08-29 13:24:21,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=172688.0, ans=0.025 +2024-08-29 13:26:36,728 INFO [train.py:1114] (3/4) Epoch 14, batch 50, loss[loss=0.1855, simple_loss=0.2522, pruned_loss=0.04355, ctc_loss=0.07898, over 19744.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.287, pruned_loss=0.06114, ctc_loss=0.1164, over 844617.17 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:27:17,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=172901.33333333334, ans=0.1 +2024-08-29 13:30:21,683 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.70 vs. limit=15.0 +2024-08-29 13:32:00,551 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=19.07 vs. limit=22.5 +2024-08-29 13:32:29,768 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.410e+02 1.749e+02 1.974e+02 2.504e+02 4.970e+02, threshold=3.948e+02, percent-clipped=4.0 +2024-08-29 13:32:58,201 INFO [train.py:1114] (3/4) Epoch 14, batch 100, loss[loss=0.223, simple_loss=0.2842, pruned_loss=0.05935, ctc_loss=0.1077, over 19736.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.2877, pruned_loss=0.06091, ctc_loss=0.1149, over 1499497.26 frames. ], batch size: 51, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:33:33,729 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.33 vs. limit=22.5 +2024-08-29 13:34:41,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=173274.66666666666, ans=0.125 +2024-08-29 13:35:59,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=173328.0, ans=0.125 +2024-08-29 13:36:02,982 INFO [train.py:1114] (3/4) Epoch 14, batch 150, loss[loss=0.1819, simple_loss=0.2454, pruned_loss=0.04306, ctc_loss=0.08076, over 19682.00 frames. ], tot_loss[loss=0.224, simple_loss=0.285, pruned_loss=0.05922, ctc_loss=0.1115, over 2027234.41 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:36:04,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=173381.33333333334, ans=0.125 +2024-08-29 13:36:05,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=173381.33333333334, ans=0.0 +2024-08-29 13:36:21,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=173434.66666666666, ans=0.09899494936611666 +2024-08-29 13:36:24,570 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.01 vs. limit=15.0 +2024-08-29 13:37:07,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=173541.33333333334, ans=0.0 +2024-08-29 13:37:15,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=173541.33333333334, ans=0.025 +2024-08-29 13:37:16,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173541.33333333334, ans=0.1 +2024-08-29 13:37:19,616 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.730e+02 2.035e+02 2.422e+02 3.683e+02, threshold=4.071e+02, percent-clipped=0.0 +2024-08-29 13:37:22,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=173594.66666666666, ans=0.0 +2024-08-29 13:37:30,510 INFO [train.py:1114] (3/4) Epoch 14, batch 200, loss[loss=0.2587, simple_loss=0.3053, pruned_loss=0.07685, ctc_loss=0.146, over 18104.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2834, pruned_loss=0.05883, ctc_loss=0.1107, over 2434650.11 frames. ], batch size: 85, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:37:40,193 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.24 vs. limit=15.0 +2024-08-29 13:37:44,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=173701.33333333334, ans=15.0 +2024-08-29 13:37:48,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=173701.33333333334, ans=0.0 +2024-08-29 13:38:48,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=173754.66666666666, ans=0.0 +2024-08-29 13:41:34,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=173861.33333333334, ans=0.1 +2024-08-29 13:42:18,842 INFO [train.py:1114] (3/4) Epoch 14, batch 250, loss[loss=0.2527, simple_loss=0.3082, pruned_loss=0.07224, ctc_loss=0.1318, over 19411.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2833, pruned_loss=0.05876, ctc_loss=0.1106, over 2753935.14 frames. ], batch size: 67, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:43:10,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=173914.66666666666, ans=0.0 +2024-08-29 13:43:15,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=173914.66666666666, ans=0.125 +2024-08-29 13:43:49,657 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.08 vs. limit=12.0 +2024-08-29 13:43:53,343 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.69 vs. limit=15.0 +2024-08-29 13:44:12,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=174128.0, ans=0.035 +2024-08-29 13:44:13,475 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.413e+02 1.787e+02 2.022e+02 2.717e+02 4.953e+02, threshold=4.043e+02, percent-clipped=2.0 +2024-08-29 13:44:52,057 INFO [train.py:1114] (3/4) Epoch 14, batch 300, loss[loss=0.2255, simple_loss=0.2851, pruned_loss=0.06144, ctc_loss=0.1077, over 19518.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2821, pruned_loss=0.05811, ctc_loss=0.109, over 2999168.00 frames. ], batch size: 61, lr: 1.09e-02, grad_scale: 32.0 +2024-08-29 13:45:55,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=174394.66666666666, ans=0.025 +2024-08-29 13:46:17,856 INFO [train.py:1114] (3/4) Epoch 14, batch 350, loss[loss=0.1967, simple_loss=0.2568, pruned_loss=0.05007, ctc_loss=0.09137, over 19738.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2825, pruned_loss=0.05811, ctc_loss=0.1089, over 3190097.11 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 16.0 +2024-08-29 13:47:22,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=174554.66666666666, ans=0.125 +2024-08-29 13:47:31,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=174608.0, ans=0.2 +2024-08-29 13:47:39,425 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.793e+02 2.058e+02 2.658e+02 4.429e+02, threshold=4.116e+02, percent-clipped=3.0 +2024-08-29 13:48:31,277 INFO [train.py:1114] (3/4) Epoch 14, batch 400, loss[loss=0.2272, simple_loss=0.2911, pruned_loss=0.0604, ctc_loss=0.1059, over 19499.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2822, pruned_loss=0.05793, ctc_loss=0.1086, over 3342697.91 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:48:51,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=174768.0, ans=0.125 +2024-08-29 13:50:13,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=174821.33333333334, ans=0.125 +2024-08-29 13:50:47,576 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.98 vs. limit=15.0 +2024-08-29 13:50:57,540 INFO [train.py:1114] (3/4) Epoch 14, batch 450, loss[loss=0.2348, simple_loss=0.292, pruned_loss=0.06435, ctc_loss=0.1222, over 19617.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2822, pruned_loss=0.05793, ctc_loss=0.1088, over 3451257.46 frames. ], batch size: 55, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:51:01,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=174981.33333333334, ans=0.0 +2024-08-29 13:51:10,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=175034.66666666666, ans=0.0 +2024-08-29 13:51:38,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=175141.33333333334, ans=0.025 +2024-08-29 13:51:42,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=175141.33333333334, ans=0.125 +2024-08-29 13:51:50,568 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.713e+02 1.900e+02 2.415e+02 4.159e+02, threshold=3.800e+02, percent-clipped=2.0 +2024-08-29 13:52:16,300 INFO [train.py:1114] (3/4) Epoch 14, batch 500, loss[loss=0.2389, simple_loss=0.2998, pruned_loss=0.06514, ctc_loss=0.1196, over 19678.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2817, pruned_loss=0.05785, ctc_loss=0.1089, over 3547006.36 frames. ], batch size: 63, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:52:47,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=175354.66666666666, ans=0.95 +2024-08-29 13:52:56,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=175354.66666666666, ans=0.0 +2024-08-29 13:53:06,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff2.min_abs, batch_count=175408.0, ans=0.1 +2024-08-29 13:53:11,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=175461.33333333334, ans=0.07 +2024-08-29 13:53:23,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=175514.66666666666, ans=0.125 +2024-08-29 13:53:23,935 INFO [train.py:1114] (3/4) Epoch 14, batch 550, loss[loss=0.2486, simple_loss=0.3011, pruned_loss=0.07156, ctc_loss=0.1324, over 19229.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.282, pruned_loss=0.05805, ctc_loss=0.1092, over 3608381.59 frames. ], batch size: 71, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:53:26,093 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.72 vs. limit=15.0 +2024-08-29 13:53:35,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=175568.0, ans=0.0 +2024-08-29 13:54:10,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175674.66666666666, ans=0.1 +2024-08-29 13:54:18,068 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.404e+02 1.725e+02 1.963e+02 2.348e+02 4.063e+02, threshold=3.927e+02, percent-clipped=2.0 +2024-08-29 13:54:20,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=175728.0, ans=0.025 +2024-08-29 13:54:28,216 INFO [train.py:1114] (3/4) Epoch 14, batch 600, loss[loss=0.2343, simple_loss=0.2965, pruned_loss=0.06367, ctc_loss=0.1122, over 19367.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2821, pruned_loss=0.05784, ctc_loss=0.1087, over 3664729.30 frames. ], batch size: 67, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:54:32,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=175781.33333333334, ans=0.2 +2024-08-29 13:54:34,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=175781.33333333334, ans=0.2 +2024-08-29 13:54:40,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=175834.66666666666, ans=0.125 +2024-08-29 13:54:55,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=175888.0, ans=0.2 +2024-08-29 13:54:58,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=175888.0, ans=0.5 +2024-08-29 13:55:23,462 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.78 vs. limit=15.0 +2024-08-29 13:55:30,812 INFO [train.py:1114] (3/4) Epoch 14, batch 650, loss[loss=0.2182, simple_loss=0.282, pruned_loss=0.05675, ctc_loss=0.1024, over 19768.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2805, pruned_loss=0.05671, ctc_loss=0.1066, over 3715303.48 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:55:42,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=176101.33333333334, ans=0.2 +2024-08-29 13:55:45,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=176101.33333333334, ans=0.035 +2024-08-29 13:55:46,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=176101.33333333334, ans=0.015 +2024-08-29 13:56:06,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=176154.66666666666, ans=0.125 +2024-08-29 13:56:23,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=176261.33333333334, ans=10.0 +2024-08-29 13:56:24,632 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.743e+02 2.058e+02 2.560e+02 4.338e+02, threshold=4.116e+02, percent-clipped=4.0 +2024-08-29 13:56:33,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=176314.66666666666, ans=0.0 +2024-08-29 13:56:34,654 INFO [train.py:1114] (3/4) Epoch 14, batch 700, loss[loss=0.2153, simple_loss=0.2742, pruned_loss=0.05627, ctc_loss=0.1095, over 19748.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2809, pruned_loss=0.05674, ctc_loss=0.1068, over 3747071.82 frames. ], batch size: 51, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:56:42,585 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.71 vs. limit=15.0 +2024-08-29 13:57:36,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=176421.33333333334, ans=0.125 +2024-08-29 13:57:59,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=176474.66666666666, ans=0.2 +2024-08-29 13:58:12,843 INFO [train.py:1114] (3/4) Epoch 14, batch 750, loss[loss=0.2152, simple_loss=0.2786, pruned_loss=0.0547, ctc_loss=0.1058, over 19497.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.281, pruned_loss=0.05692, ctc_loss=0.107, over 3774604.89 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:58:14,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=176581.33333333334, ans=0.0 +2024-08-29 13:58:15,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=176581.33333333334, ans=0.0 +2024-08-29 13:58:19,498 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.39 vs. limit=10.0 +2024-08-29 13:58:22,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=176581.33333333334, ans=0.0 +2024-08-29 13:58:24,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=176634.66666666666, ans=0.025 +2024-08-29 13:58:53,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=176741.33333333334, ans=0.125 +2024-08-29 13:59:06,505 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.910e+02 2.277e+02 2.884e+02 4.780e+02, threshold=4.554e+02, percent-clipped=3.0 +2024-08-29 13:59:20,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=176794.66666666666, ans=0.025 +2024-08-29 13:59:27,098 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.90 vs. limit=22.5 +2024-08-29 13:59:28,726 INFO [train.py:1114] (3/4) Epoch 14, batch 800, loss[loss=0.1844, simple_loss=0.2463, pruned_loss=0.0456, ctc_loss=0.07821, over 19821.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2808, pruned_loss=0.05683, ctc_loss=0.1066, over 3796395.50 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 13:59:39,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=176848.0, ans=0.2 +2024-08-29 14:01:03,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=176954.66666666666, ans=0.025 +2024-08-29 14:01:13,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=176954.66666666666, ans=0.025 +2024-08-29 14:02:27,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177008.0, ans=0.1 +2024-08-29 14:02:31,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177008.0, ans=0.1 +2024-08-29 14:02:49,613 INFO [train.py:1114] (3/4) Epoch 14, batch 850, loss[loss=0.2321, simple_loss=0.3057, pruned_loss=0.05655, ctc_loss=0.1137, over 19649.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2811, pruned_loss=0.05717, ctc_loss=0.1073, over 3815210.30 frames. ], batch size: 59, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 14:03:11,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=177168.0, ans=0.125 +2024-08-29 14:03:40,323 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.405e+02 1.703e+02 1.970e+02 2.385e+02 3.831e+02, threshold=3.939e+02, percent-clipped=0.0 +2024-08-29 14:03:46,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=177328.0, ans=0.125 +2024-08-29 14:03:49,900 INFO [train.py:1114] (3/4) Epoch 14, batch 900, loss[loss=0.1848, simple_loss=0.2528, pruned_loss=0.04288, ctc_loss=0.0775, over 19814.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.282, pruned_loss=0.05782, ctc_loss=0.1082, over 3820149.52 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 14:03:50,590 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.94 vs. limit=15.0 +2024-08-29 14:04:12,009 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=9.53 vs. limit=15.0 +2024-08-29 14:04:24,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=177541.33333333334, ans=0.125 +2024-08-29 14:04:35,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177541.33333333334, ans=0.1 +2024-08-29 14:04:46,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=177594.66666666666, ans=0.0 +2024-08-29 14:04:52,321 INFO [train.py:1114] (3/4) Epoch 14, batch 950, loss[loss=0.2287, simple_loss=0.2813, pruned_loss=0.06452, ctc_loss=0.1176, over 19503.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2823, pruned_loss=0.0578, ctc_loss=0.1083, over 3821034.01 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-29 14:05:06,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=177701.33333333334, ans=0.035 +2024-08-29 14:05:08,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177701.33333333334, ans=0.1 +2024-08-29 14:05:13,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177701.33333333334, ans=0.1 +2024-08-29 14:05:13,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=177701.33333333334, ans=15.0 +2024-08-29 14:05:26,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177754.66666666666, ans=0.1 +2024-08-29 14:05:30,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177754.66666666666, ans=0.1 +2024-08-29 14:06:19,917 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.335e+02 1.740e+02 1.996e+02 2.581e+02 3.979e+02, threshold=3.992e+02, percent-clipped=2.0 +2024-08-29 14:07:04,893 INFO [train.py:1114] (3/4) Epoch 14, batch 1000, loss[loss=0.2216, simple_loss=0.282, pruned_loss=0.05833, ctc_loss=0.1115, over 19846.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2838, pruned_loss=0.0586, ctc_loss=0.1099, over 3817118.64 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:07:14,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=177914.66666666666, ans=0.0 +2024-08-29 14:08:08,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177968.0, ans=0.1 +2024-08-29 14:08:25,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=178021.33333333334, ans=0.2 +2024-08-29 14:08:30,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=178074.66666666666, ans=0.0 +2024-08-29 14:08:30,978 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=7.24 vs. limit=15.0 +2024-08-29 14:08:34,515 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.70 vs. limit=10.0 +2024-08-29 14:08:46,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=178128.0, ans=0.0 +2024-08-29 14:08:56,372 INFO [train.py:1114] (3/4) Epoch 14, batch 1050, loss[loss=0.2197, simple_loss=0.2838, pruned_loss=0.05698, ctc_loss=0.1039, over 19823.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2829, pruned_loss=0.05817, ctc_loss=0.109, over 3824209.19 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:09:01,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=178181.33333333334, ans=0.125 +2024-08-29 14:09:03,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=178181.33333333334, ans=0.5 +2024-08-29 14:09:08,802 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.27 vs. limit=22.5 +2024-08-29 14:09:37,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=178341.33333333334, ans=0.125 +2024-08-29 14:09:42,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=178341.33333333334, ans=0.125 +2024-08-29 14:09:46,184 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.45 vs. limit=10.0 +2024-08-29 14:09:46,658 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.811e+02 2.215e+02 2.668e+02 4.320e+02, threshold=4.429e+02, percent-clipped=1.0 +2024-08-29 14:10:24,264 INFO [train.py:1114] (3/4) Epoch 14, batch 1100, loss[loss=0.2127, simple_loss=0.2816, pruned_loss=0.05215, ctc_loss=0.09853, over 19572.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2824, pruned_loss=0.05782, ctc_loss=0.1086, over 3832090.41 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:13:47,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=178501.33333333334, ans=0.0 +2024-08-29 14:16:19,653 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:18:53,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=178661.33333333334, ans=0.125 +2024-08-29 14:19:15,474 INFO [train.py:1114] (3/4) Epoch 14, batch 1150, loss[loss=0.238, simple_loss=0.2916, pruned_loss=0.0673, ctc_loss=0.1242, over 19592.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.282, pruned_loss=0.05762, ctc_loss=0.1083, over 3830063.13 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:19:28,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178714.66666666666, ans=0.1 +2024-08-29 14:19:46,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=178768.0, ans=0.0 +2024-08-29 14:19:46,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=178768.0, ans=0.125 +2024-08-29 14:22:13,328 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.342e+02 1.701e+02 1.876e+02 2.352e+02 3.362e+02, threshold=3.753e+02, percent-clipped=0.0 +2024-08-29 14:22:33,806 INFO [train.py:1114] (3/4) Epoch 14, batch 1200, loss[loss=0.2031, simple_loss=0.278, pruned_loss=0.04622, ctc_loss=0.08951, over 19835.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2832, pruned_loss=0.0583, ctc_loss=0.1096, over 3826323.75 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:23:27,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178981.33333333334, ans=0.1 +2024-08-29 14:24:14,678 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:24:14,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=179034.66666666666, ans=0.125 +2024-08-29 14:24:21,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=179088.0, ans=0.125 +2024-08-29 14:24:24,692 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.93 vs. limit=15.0 +2024-08-29 14:24:27,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=179088.0, ans=0.125 +2024-08-29 14:24:33,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=179088.0, ans=0.125 +2024-08-29 14:25:42,514 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.66 vs. limit=15.0 +2024-08-29 14:29:53,988 INFO [train.py:1114] (3/4) Epoch 14, batch 1250, loss[loss=0.2471, simple_loss=0.3037, pruned_loss=0.06953, ctc_loss=0.1283, over 19526.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2837, pruned_loss=0.05835, ctc_loss=0.1096, over 3844490.95 frames. ], batch size: 61, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:30:29,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=179248.0, ans=0.125 +2024-08-29 14:31:44,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=179248.0, ans=0.125 +2024-08-29 14:31:59,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=179301.33333333334, ans=0.025 +2024-08-29 14:32:26,316 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.45 vs. limit=15.0 +2024-08-29 14:32:30,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=179408.0, ans=0.125 +2024-08-29 14:32:41,058 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.327e+02 1.718e+02 2.120e+02 2.679e+02 4.271e+02, threshold=4.240e+02, percent-clipped=3.0 +2024-08-29 14:32:48,996 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.72 vs. limit=22.5 +2024-08-29 14:32:52,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=179514.66666666666, ans=0.125 +2024-08-29 14:33:10,211 INFO [train.py:1114] (3/4) Epoch 14, batch 1300, loss[loss=0.2511, simple_loss=0.3026, pruned_loss=0.07268, ctc_loss=0.1357, over 18977.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.282, pruned_loss=0.05726, ctc_loss=0.1076, over 3849016.59 frames. ], batch size: 76, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:33:58,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179514.66666666666, ans=0.1 +2024-08-29 14:33:59,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=179514.66666666666, ans=0.125 +2024-08-29 14:34:06,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=179568.0, ans=0.2 +2024-08-29 14:34:15,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=179568.0, ans=0.1 +2024-08-29 14:35:11,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=179674.66666666666, ans=0.125 +2024-08-29 14:35:41,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=179781.33333333334, ans=0.125 +2024-08-29 14:35:42,424 INFO [train.py:1114] (3/4) Epoch 14, batch 1350, loss[loss=0.202, simple_loss=0.2723, pruned_loss=0.04809, ctc_loss=0.0885, over 19782.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2817, pruned_loss=0.05715, ctc_loss=0.1073, over 3860373.05 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:36:00,146 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:36:04,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=179781.33333333334, ans=0.0 +2024-08-29 14:36:12,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=179834.66666666666, ans=0.0 +2024-08-29 14:36:14,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.46 vs. limit=6.0 +2024-08-29 14:40:27,057 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:40:29,505 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.654e+02 1.881e+02 2.431e+02 4.376e+02, threshold=3.761e+02, percent-clipped=1.0 +2024-08-29 14:40:34,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=179994.66666666666, ans=0.125 +2024-08-29 14:41:36,143 INFO [train.py:1114] (3/4) Epoch 14, batch 1400, loss[loss=0.1769, simple_loss=0.2384, pruned_loss=0.04185, ctc_loss=0.07916, over 19689.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2816, pruned_loss=0.05728, ctc_loss=0.1076, over 3866720.71 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:41:52,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=180101.33333333334, ans=0.125 +2024-08-29 14:42:00,347 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:42:17,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=180208.0, ans=0.125 +2024-08-29 14:42:25,315 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.93 vs. limit=15.0 +2024-08-29 14:42:35,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=180261.33333333334, ans=0.0 +2024-08-29 14:42:37,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=180261.33333333334, ans=0.0 +2024-08-29 14:42:38,123 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.27 vs. limit=15.0 +2024-08-29 14:42:39,832 INFO [train.py:1114] (3/4) Epoch 14, batch 1450, loss[loss=0.2168, simple_loss=0.2834, pruned_loss=0.05551, ctc_loss=0.09833, over 19664.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2826, pruned_loss=0.05775, ctc_loss=0.1085, over 3864940.50 frames. ], batch size: 63, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:42:54,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=180368.0, ans=0.125 +2024-08-29 14:43:05,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=180421.33333333334, ans=0.05 +2024-08-29 14:44:05,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=180474.66666666666, ans=0.0 +2024-08-29 14:44:06,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=180474.66666666666, ans=0.2 +2024-08-29 14:44:19,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=180528.0, ans=0.0 +2024-08-29 14:44:19,802 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.272e+02 1.699e+02 1.929e+02 2.254e+02 4.469e+02, threshold=3.859e+02, percent-clipped=1.0 +2024-08-29 14:45:04,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=180528.0, ans=0.05 +2024-08-29 14:45:06,812 INFO [train.py:1114] (3/4) Epoch 14, batch 1500, loss[loss=0.2386, simple_loss=0.3042, pruned_loss=0.06322, ctc_loss=0.1166, over 19584.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2823, pruned_loss=0.05748, ctc_loss=0.1079, over 3864728.47 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:45:24,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=180634.66666666666, ans=0.025 +2024-08-29 14:45:58,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=180688.0, ans=0.1 +2024-08-29 14:46:08,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=180741.33333333334, ans=0.0 +2024-08-29 14:46:12,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=180741.33333333334, ans=0.2 +2024-08-29 14:46:24,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=180794.66666666666, ans=0.125 +2024-08-29 14:46:27,471 INFO [train.py:1114] (3/4) Epoch 14, batch 1550, loss[loss=0.2506, simple_loss=0.3044, pruned_loss=0.07213, ctc_loss=0.1315, over 19609.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2824, pruned_loss=0.05758, ctc_loss=0.1082, over 3848764.53 frames. ], batch size: 60, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:46:46,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=180901.33333333334, ans=0.2 +2024-08-29 14:46:56,932 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.67 vs. limit=15.0 +2024-08-29 14:47:01,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=180954.66666666666, ans=0.95 +2024-08-29 14:47:48,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=181008.0, ans=0.0 +2024-08-29 14:48:37,408 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.702e+02 2.011e+02 2.397e+02 3.479e+02, threshold=4.023e+02, percent-clipped=0.0 +2024-08-29 14:48:47,138 INFO [train.py:1114] (3/4) Epoch 14, batch 1600, loss[loss=0.2187, simple_loss=0.2823, pruned_loss=0.05514, ctc_loss=0.1118, over 19841.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2818, pruned_loss=0.05749, ctc_loss=0.1079, over 3837124.52 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-29 14:49:03,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=181168.0, ans=0.2 +2024-08-29 14:49:03,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=181168.0, ans=0.04949747468305833 +2024-08-29 14:49:51,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.54 vs. limit=15.0 +2024-08-29 14:50:20,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=181221.33333333334, ans=0.025 +2024-08-29 14:51:28,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=181381.33333333334, ans=0.0 +2024-08-29 14:51:29,815 INFO [train.py:1114] (3/4) Epoch 14, batch 1650, loss[loss=0.2281, simple_loss=0.2885, pruned_loss=0.05934, ctc_loss=0.1226, over 19635.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2821, pruned_loss=0.05769, ctc_loss=0.1084, over 3834583.61 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:51:40,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=181381.33333333334, ans=0.125 +2024-08-29 14:52:19,831 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.07 vs. limit=15.0 +2024-08-29 14:52:28,554 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.415e+02 1.808e+02 2.247e+02 2.720e+02 5.029e+02, threshold=4.494e+02, percent-clipped=3.0 +2024-08-29 14:52:29,389 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.59 vs. limit=15.0 +2024-08-29 14:52:35,002 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.67 vs. limit=22.5 +2024-08-29 14:52:38,137 INFO [train.py:1114] (3/4) Epoch 14, batch 1700, loss[loss=0.2017, simple_loss=0.2567, pruned_loss=0.05132, ctc_loss=0.1103, over 19673.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2818, pruned_loss=0.05741, ctc_loss=0.1081, over 3848471.89 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:52:41,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.09 vs. limit=15.0 +2024-08-29 14:52:45,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=181648.0, ans=0.125 +2024-08-29 14:52:49,269 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.89 vs. limit=10.0 +2024-08-29 14:53:19,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=181808.0, ans=0.2 +2024-08-29 14:53:23,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=181861.33333333334, ans=0.0 +2024-08-29 14:53:23,281 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.01 vs. limit=15.0 +2024-08-29 14:53:46,197 INFO [train.py:1114] (3/4) Epoch 14, batch 1750, loss[loss=0.2285, simple_loss=0.2731, pruned_loss=0.06732, ctc_loss=0.123, over 19696.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2814, pruned_loss=0.05725, ctc_loss=0.1078, over 3854184.62 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:53:58,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=181968.0, ans=0.2 +2024-08-29 14:54:37,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=182021.33333333334, ans=0.125 +2024-08-29 14:55:00,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=182021.33333333334, ans=0.0 +2024-08-29 14:55:03,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=182074.66666666666, ans=0.025 +2024-08-29 14:55:10,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=182074.66666666666, ans=0.2 +2024-08-29 14:56:25,491 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.355e+02 1.791e+02 2.085e+02 2.712e+02 5.021e+02, threshold=4.170e+02, percent-clipped=2.0 +2024-08-29 14:56:34,703 INFO [train.py:1114] (3/4) Epoch 14, batch 1800, loss[loss=0.2298, simple_loss=0.2925, pruned_loss=0.06015, ctc_loss=0.1169, over 19601.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2814, pruned_loss=0.05704, ctc_loss=0.1072, over 3855590.03 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:57:15,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.75 vs. limit=15.0 +2024-08-29 14:57:22,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182234.66666666666, ans=0.1 +2024-08-29 14:57:26,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=182234.66666666666, ans=0.125 +2024-08-29 14:57:31,015 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.15 vs. limit=15.0 +2024-08-29 14:57:42,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182288.0, ans=0.1 +2024-08-29 14:57:54,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=182394.66666666666, ans=0.0 +2024-08-29 14:58:03,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182394.66666666666, ans=0.1 +2024-08-29 14:58:05,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=182394.66666666666, ans=0.025 +2024-08-29 14:58:07,414 INFO [train.py:1114] (3/4) Epoch 14, batch 1850, loss[loss=0.2225, simple_loss=0.2851, pruned_loss=0.05743, ctc_loss=0.1128, over 19599.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2815, pruned_loss=0.05695, ctc_loss=0.107, over 3858221.10 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 14:58:16,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=182448.0, ans=0.0 +2024-08-29 15:00:44,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182608.0, ans=0.1 +2024-08-29 15:03:25,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=182608.0, ans=0.0 +2024-08-29 15:03:29,627 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.679e+02 1.934e+02 2.278e+02 6.084e+02, threshold=3.868e+02, percent-clipped=1.0 +2024-08-29 15:03:40,813 INFO [train.py:1114] (3/4) Epoch 14, batch 1900, loss[loss=0.2269, simple_loss=0.2936, pruned_loss=0.05862, ctc_loss=0.1076, over 19678.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2822, pruned_loss=0.05703, ctc_loss=0.1074, over 3862304.89 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:03:51,537 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.52 vs. limit=15.0 +2024-08-29 15:03:55,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=182768.0, ans=0.125 +2024-08-29 15:04:35,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=182821.33333333334, ans=0.125 +2024-08-29 15:04:43,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=182874.66666666666, ans=0.0 +2024-08-29 15:04:52,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=182928.0, ans=0.0 +2024-08-29 15:05:18,943 INFO [train.py:1114] (3/4) Epoch 14, batch 1950, loss[loss=0.2073, simple_loss=0.268, pruned_loss=0.05295, ctc_loss=0.1015, over 19580.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2834, pruned_loss=0.05746, ctc_loss=0.108, over 3871186.13 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:05:39,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=183034.66666666666, ans=0.0 +2024-08-29 15:05:41,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=183088.0, ans=0.0 +2024-08-29 15:05:42,351 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.83 vs. limit=22.5 +2024-08-29 15:05:57,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=183141.33333333334, ans=0.125 +2024-08-29 15:06:06,635 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.416e+02 1.683e+02 1.939e+02 2.319e+02 3.642e+02, threshold=3.877e+02, percent-clipped=0.0 +2024-08-29 15:06:44,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=183194.66666666666, ans=0.125 +2024-08-29 15:06:48,390 INFO [train.py:1114] (3/4) Epoch 14, batch 2000, loss[loss=0.1939, simple_loss=0.2571, pruned_loss=0.04752, ctc_loss=0.08904, over 19655.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2836, pruned_loss=0.0576, ctc_loss=0.1085, over 3856216.08 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:07:00,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=183301.33333333334, ans=0.125 +2024-08-29 15:07:06,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=183301.33333333334, ans=0.5 +2024-08-29 15:07:19,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=183354.66666666666, ans=0.05 +2024-08-29 15:07:24,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.28 vs. limit=15.0 +2024-08-29 15:07:43,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183461.33333333334, ans=0.1 +2024-08-29 15:07:45,831 INFO [train.py:1114] (3/4) Epoch 14, batch 2050, loss[loss=0.1959, simple_loss=0.2567, pruned_loss=0.04964, ctc_loss=0.08948, over 19725.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2831, pruned_loss=0.05789, ctc_loss=0.1092, over 3853148.84 frames. ], batch size: 47, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:07:52,316 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.34 vs. limit=10.0 +2024-08-29 15:08:39,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183621.33333333334, ans=0.1 +2024-08-29 15:09:21,512 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.69 vs. limit=22.5 +2024-08-29 15:09:33,016 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.50 vs. limit=22.5 +2024-08-29 15:09:39,965 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.349e+02 1.749e+02 1.987e+02 2.455e+02 3.413e+02, threshold=3.973e+02, percent-clipped=0.0 +2024-08-29 15:09:47,048 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=3.666e-02 +2024-08-29 15:09:48,913 INFO [train.py:1114] (3/4) Epoch 14, batch 2100, loss[loss=0.2216, simple_loss=0.2838, pruned_loss=0.05831, ctc_loss=0.107, over 19763.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2819, pruned_loss=0.05712, ctc_loss=0.1078, over 3859523.11 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:10:21,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=183834.66666666666, ans=0.0 +2024-08-29 15:10:33,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183888.0, ans=0.1 +2024-08-29 15:10:52,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=183994.66666666666, ans=0.0 +2024-08-29 15:10:57,751 INFO [train.py:1114] (3/4) Epoch 14, batch 2150, loss[loss=0.215, simple_loss=0.2825, pruned_loss=0.05304, ctc_loss=0.1034, over 19831.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2813, pruned_loss=0.05689, ctc_loss=0.1072, over 3870758.51 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:11:00,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=184048.0, ans=0.125 +2024-08-29 15:11:10,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.83 vs. limit=12.0 +2024-08-29 15:11:16,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=184101.33333333334, ans=0.125 +2024-08-29 15:11:24,129 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.22 vs. limit=22.5 +2024-08-29 15:11:35,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=184208.0, ans=0.025 +2024-08-29 15:11:44,623 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.399e+02 1.765e+02 2.209e+02 2.742e+02 6.061e+02, threshold=4.418e+02, percent-clipped=6.0 +2024-08-29 15:12:09,346 INFO [train.py:1114] (3/4) Epoch 14, batch 2200, loss[loss=0.2476, simple_loss=0.3099, pruned_loss=0.06692, ctc_loss=0.1285, over 19593.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2814, pruned_loss=0.05687, ctc_loss=0.1072, over 3869156.33 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:12:10,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=184314.66666666666, ans=0.0 +2024-08-29 15:12:25,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=184368.0, ans=0.95 +2024-08-29 15:13:23,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184474.66666666666, ans=0.1 +2024-08-29 15:13:29,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=184474.66666666666, ans=0.125 +2024-08-29 15:13:33,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=184474.66666666666, ans=0.125 +2024-08-29 15:13:34,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=184528.0, ans=0.0 +2024-08-29 15:13:40,085 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=10.25 vs. limit=15.0 +2024-08-29 15:13:47,033 INFO [train.py:1114] (3/4) Epoch 14, batch 2250, loss[loss=0.2288, simple_loss=0.2925, pruned_loss=0.05975, ctc_loss=0.114, over 19619.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2814, pruned_loss=0.05673, ctc_loss=0.107, over 3868179.89 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:13:55,328 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.60 vs. limit=22.5 +2024-08-29 15:14:11,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=184688.0, ans=0.125 +2024-08-29 15:14:13,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=184688.0, ans=0.125 +2024-08-29 15:14:16,368 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.00 vs. limit=15.0 +2024-08-29 15:14:19,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184741.33333333334, ans=0.1 +2024-08-29 15:14:28,081 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=7.78 vs. limit=15.0 +2024-08-29 15:14:33,559 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.78 vs. limit=15.0 +2024-08-29 15:14:34,167 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.407e+02 1.796e+02 2.116e+02 2.512e+02 3.767e+02, threshold=4.231e+02, percent-clipped=0.0 +2024-08-29 15:14:34,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=184794.66666666666, ans=0.0 +2024-08-29 15:14:39,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=184794.66666666666, ans=0.125 +2024-08-29 15:14:43,288 INFO [train.py:1114] (3/4) Epoch 14, batch 2300, loss[loss=0.2378, simple_loss=0.2894, pruned_loss=0.06759, ctc_loss=0.1278, over 19511.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2812, pruned_loss=0.05736, ctc_loss=0.108, over 3862283.03 frames. ], batch size: 49, lr: 1.06e-02, grad_scale: 32.0 +2024-08-29 15:14:49,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=184848.0, ans=0.125 +2024-08-29 15:14:52,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184848.0, ans=0.1 +2024-08-29 15:14:56,525 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.25 vs. limit=15.0 +2024-08-29 15:14:58,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=184901.33333333334, ans=0.125 +2024-08-29 15:15:03,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=184901.33333333334, ans=0.2 +2024-08-29 15:15:17,572 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.58 vs. limit=22.5 +2024-08-29 15:15:40,915 INFO [train.py:1114] (3/4) Epoch 14, batch 2350, loss[loss=0.2209, simple_loss=0.2846, pruned_loss=0.05787, ctc_loss=0.1038, over 19685.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.281, pruned_loss=0.05727, ctc_loss=0.1076, over 3864424.63 frames. ], batch size: 63, lr: 1.05e-02, grad_scale: 64.0 +2024-08-29 15:15:42,690 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.43 vs. limit=12.0 +2024-08-29 15:15:50,349 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=8.35 vs. limit=22.5 +2024-08-29 15:16:01,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=185168.0, ans=0.125 +2024-08-29 15:16:10,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=185221.33333333334, ans=0.125 +2024-08-29 15:16:11,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=185221.33333333334, ans=0.0 +2024-08-29 15:16:18,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=185274.66666666666, ans=0.0 +2024-08-29 15:16:28,743 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.299e+02 1.724e+02 2.017e+02 2.647e+02 4.792e+02, threshold=4.034e+02, percent-clipped=3.0 +2024-08-29 15:16:36,505 INFO [train.py:1114] (3/4) Epoch 14, batch 2400, loss[loss=0.2076, simple_loss=0.2835, pruned_loss=0.04765, ctc_loss=0.09089, over 19265.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2829, pruned_loss=0.05793, ctc_loss=0.1087, over 3858364.47 frames. ], batch size: 71, lr: 1.05e-02, grad_scale: 32.0 +2024-08-29 15:17:10,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=185488.0, ans=0.125 +2024-08-29 15:17:30,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=185594.66666666666, ans=0.1 +2024-08-29 15:17:38,575 INFO [train.py:1114] (3/4) Epoch 14, batch 2450, loss[loss=0.2401, simple_loss=0.2918, pruned_loss=0.06864, ctc_loss=0.1279, over 13755.00 frames. ], tot_loss[loss=0.226, simple_loss=0.2863, pruned_loss=0.06023, ctc_loss=0.1132, over 3730663.52 frames. ], batch size: 140, lr: 1.05e-02, grad_scale: 32.0 +2024-08-29 15:17:41,373 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.39 vs. limit=10.0 +2024-08-29 15:17:48,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=185648.0, ans=0.2 +2024-08-29 15:17:48,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.40 vs. limit=10.0 +2024-08-29 15:17:54,933 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:18:00,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=185754.66666666666, ans=0.0 +2024-08-29 15:18:03,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=185754.66666666666, ans=0.025 +2024-08-29 15:18:18,835 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=10.76 vs. limit=15.0 +2024-08-29 15:19:09,366 INFO [train.py:1114] (3/4) Epoch 15, batch 0, loss[loss=0.2154, simple_loss=0.2655, pruned_loss=0.06015, ctc_loss=0.1124, over 19803.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2655, pruned_loss=0.06015, ctc_loss=0.1124, over 19803.00 frames. ], batch size: 49, lr: 1.02e-02, grad_scale: 32.0 +2024-08-29 15:19:09,367 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-29 15:19:20,880 INFO [train.py:1146] (3/4) Epoch 15, validation: loss=0.1908, simple_loss=0.2785, pruned_loss=0.03825, ctc_loss=0.06651, over 944034.00 frames. +2024-08-29 15:19:20,880 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 13716MB +2024-08-29 15:19:22,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=185856.0, ans=0.125 +2024-08-29 15:19:25,766 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.539e+02 1.942e+02 2.136e+02 2.424e+02 3.799e+02, threshold=4.272e+02, percent-clipped=0.0 +2024-08-29 15:19:32,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=185909.33333333334, ans=0.125 +2024-08-29 15:19:38,586 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.22 vs. limit=15.0 +2024-08-29 15:19:56,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=186016.0, ans=0.0 +2024-08-29 15:20:15,882 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:20:25,217 INFO [train.py:1114] (3/4) Epoch 15, batch 50, loss[loss=0.1826, simple_loss=0.2529, pruned_loss=0.0408, ctc_loss=0.07666, over 19728.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2841, pruned_loss=0.05873, ctc_loss=0.1109, over 845438.59 frames. ], batch size: 47, lr: 1.02e-02, grad_scale: 32.0 +2024-08-29 15:20:48,266 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=12.31 vs. limit=15.0 +2024-08-29 15:20:59,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=186229.33333333334, ans=0.0 +2024-08-29 15:21:05,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=186282.66666666666, ans=0.125 +2024-08-29 15:21:08,995 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.27 vs. limit=10.0 +2024-08-29 15:21:11,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=186282.66666666666, ans=0.09899494936611666 +2024-08-29 15:21:25,436 INFO [train.py:1114] (3/4) Epoch 15, batch 100, loss[loss=0.1999, simple_loss=0.2707, pruned_loss=0.04725, ctc_loss=0.08665, over 19720.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2853, pruned_loss=0.05833, ctc_loss=0.1101, over 1500617.17 frames. ], batch size: 51, lr: 1.02e-02, grad_scale: 32.0 +2024-08-29 15:21:30,077 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.489e+02 1.739e+02 1.952e+02 2.450e+02 4.288e+02, threshold=3.904e+02, percent-clipped=1.0 +2024-08-29 15:21:32,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=186389.33333333334, ans=0.0 +2024-08-29 15:21:43,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=186442.66666666666, ans=0.125 +2024-08-29 15:21:46,682 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.79 vs. limit=15.0 +2024-08-29 15:21:50,469 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.67 vs. limit=12.0 +2024-08-29 15:21:59,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=186496.0, ans=0.1 +2024-08-29 15:22:29,354 INFO [train.py:1114] (3/4) Epoch 15, batch 150, loss[loss=0.2192, simple_loss=0.2726, pruned_loss=0.06102, ctc_loss=0.1094, over 19726.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2827, pruned_loss=0.05713, ctc_loss=0.1078, over 2029400.20 frames. ], batch size: 47, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:22:36,258 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.94 vs. limit=22.5 +2024-08-29 15:23:12,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=186816.0, ans=0.0 +2024-08-29 15:23:28,625 INFO [train.py:1114] (3/4) Epoch 15, batch 200, loss[loss=0.2107, simple_loss=0.2789, pruned_loss=0.05141, ctc_loss=0.09941, over 18291.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2808, pruned_loss=0.0564, ctc_loss=0.1066, over 2436424.74 frames. ], batch size: 85, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:23:29,088 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.04 vs. limit=15.0 +2024-08-29 15:23:44,479 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.690e+02 2.002e+02 2.433e+02 3.884e+02, threshold=4.003e+02, percent-clipped=0.0 +2024-08-29 15:23:57,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=186976.0, ans=0.125 +2024-08-29 15:25:01,144 INFO [train.py:1114] (3/4) Epoch 15, batch 250, loss[loss=0.247, simple_loss=0.3024, pruned_loss=0.06897, ctc_loss=0.1339, over 19362.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2819, pruned_loss=0.0572, ctc_loss=0.1081, over 2756645.61 frames. ], batch size: 67, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:25:45,177 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=8.51 vs. limit=22.5 +2024-08-29 15:25:48,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=187349.33333333334, ans=0.1 +2024-08-29 15:26:33,393 INFO [train.py:1114] (3/4) Epoch 15, batch 300, loss[loss=0.2533, simple_loss=0.3093, pruned_loss=0.07137, ctc_loss=0.1362, over 19527.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2815, pruned_loss=0.0571, ctc_loss=0.1079, over 3001399.26 frames. ], batch size: 61, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:26:38,059 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.385e+02 1.706e+02 2.088e+02 2.592e+02 3.748e+02, threshold=4.177e+02, percent-clipped=0.0 +2024-08-29 15:27:24,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=187669.33333333334, ans=0.125 +2024-08-29 15:27:30,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=187669.33333333334, ans=0.125 +2024-08-29 15:27:34,913 INFO [train.py:1114] (3/4) Epoch 15, batch 350, loss[loss=0.164, simple_loss=0.2346, pruned_loss=0.03411, ctc_loss=0.06262, over 19756.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2814, pruned_loss=0.05687, ctc_loss=0.1075, over 3191349.76 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 16.0 +2024-08-29 15:27:46,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=187776.0, ans=0.1 +2024-08-29 15:28:12,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=187829.33333333334, ans=0.125 +2024-08-29 15:28:28,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=187936.0, ans=0.2 +2024-08-29 15:28:32,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=187936.0, ans=0.125 +2024-08-29 15:28:32,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=187936.0, ans=0.0 +2024-08-29 15:28:38,897 INFO [train.py:1114] (3/4) Epoch 15, batch 400, loss[loss=0.2196, simple_loss=0.2894, pruned_loss=0.05281, ctc_loss=0.1104, over 19495.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2805, pruned_loss=0.05615, ctc_loss=0.1061, over 3343169.69 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:28:44,507 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.321e+02 1.706e+02 2.043e+02 2.587e+02 5.210e+02, threshold=4.085e+02, percent-clipped=2.0 +2024-08-29 15:29:22,825 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.99 vs. limit=15.0 +2024-08-29 15:29:34,933 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.87 vs. limit=15.0 +2024-08-29 15:29:41,955 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:30:07,911 INFO [train.py:1114] (3/4) Epoch 15, batch 450, loss[loss=0.1945, simple_loss=0.2724, pruned_loss=0.042, ctc_loss=0.08143, over 19615.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2807, pruned_loss=0.05627, ctc_loss=0.1062, over 3451307.41 frames. ], batch size: 55, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:30:10,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=188256.0, ans=0.04949747468305833 +2024-08-29 15:30:17,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=188256.0, ans=0.025 +2024-08-29 15:30:23,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=188309.33333333334, ans=0.125 +2024-08-29 15:30:43,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=188416.0, ans=0.125 +2024-08-29 15:31:07,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=188469.33333333334, ans=0.025 +2024-08-29 15:31:09,330 INFO [train.py:1114] (3/4) Epoch 15, batch 500, loss[loss=0.2358, simple_loss=0.2971, pruned_loss=0.06482, ctc_loss=0.1123, over 19672.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2797, pruned_loss=0.05582, ctc_loss=0.1052, over 3546011.40 frames. ], batch size: 63, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:31:15,115 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.348e+02 1.681e+02 1.897e+02 2.177e+02 4.545e+02, threshold=3.794e+02, percent-clipped=1.0 +2024-08-29 15:31:20,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=188576.0, ans=0.125 +2024-08-29 15:31:23,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=188576.0, ans=0.0 +2024-08-29 15:31:31,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=188629.33333333334, ans=0.125 +2024-08-29 15:32:29,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=188682.66666666666, ans=0.2 +2024-08-29 15:32:37,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=188682.66666666666, ans=0.025 +2024-08-29 15:32:45,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=188736.0, ans=0.025 +2024-08-29 15:32:46,788 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.38 vs. limit=15.0 +2024-08-29 15:32:59,020 INFO [train.py:1114] (3/4) Epoch 15, batch 550, loss[loss=0.2556, simple_loss=0.3083, pruned_loss=0.07355, ctc_loss=0.1398, over 19330.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2798, pruned_loss=0.05598, ctc_loss=0.1053, over 3607152.83 frames. ], batch size: 71, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:33:06,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=188789.33333333334, ans=0.125 +2024-08-29 15:33:39,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=188789.33333333334, ans=0.025 +2024-08-29 15:33:52,225 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.60 vs. limit=15.0 +2024-08-29 15:33:55,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=188896.0, ans=0.09899494936611666 +2024-08-29 15:34:03,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=188896.0, ans=0.0 +2024-08-29 15:34:09,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=188949.33333333334, ans=0.0 +2024-08-29 15:34:16,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188949.33333333334, ans=0.1 +2024-08-29 15:34:30,453 INFO [train.py:1114] (3/4) Epoch 15, batch 600, loss[loss=0.2281, simple_loss=0.2957, pruned_loss=0.05904, ctc_loss=0.1062, over 19413.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2801, pruned_loss=0.05595, ctc_loss=0.105, over 3665156.89 frames. ], batch size: 67, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:34:36,387 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.830e+02 2.111e+02 2.732e+02 4.380e+02, threshold=4.223e+02, percent-clipped=4.0 +2024-08-29 15:34:55,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=189162.66666666666, ans=0.0 +2024-08-29 15:34:58,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=189162.66666666666, ans=0.2 +2024-08-29 15:35:01,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=189162.66666666666, ans=0.1 +2024-08-29 15:35:04,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=189162.66666666666, ans=0.07 +2024-08-29 15:35:19,013 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.02 vs. limit=15.0 +2024-08-29 15:35:22,271 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.98 vs. limit=22.5 +2024-08-29 15:35:31,329 INFO [train.py:1114] (3/4) Epoch 15, batch 650, loss[loss=0.2038, simple_loss=0.2714, pruned_loss=0.04943, ctc_loss=0.0932, over 19787.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2789, pruned_loss=0.05506, ctc_loss=0.1037, over 3715693.55 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:35:36,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=189322.66666666666, ans=0.125 +2024-08-29 15:35:40,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=189322.66666666666, ans=0.0 +2024-08-29 15:35:41,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=189322.66666666666, ans=0.125 +2024-08-29 15:35:44,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=189376.0, ans=0.2 +2024-08-29 15:37:58,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=189482.66666666666, ans=0.125 +2024-08-29 15:38:00,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=189482.66666666666, ans=0.125 +2024-08-29 15:38:03,039 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.53 vs. limit=15.0 +2024-08-29 15:38:08,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=189482.66666666666, ans=0.125 +2024-08-29 15:38:22,779 INFO [train.py:1114] (3/4) Epoch 15, batch 700, loss[loss=0.2081, simple_loss=0.2704, pruned_loss=0.05327, ctc_loss=0.09811, over 19715.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.28, pruned_loss=0.05574, ctc_loss=0.1048, over 3748440.39 frames. ], batch size: 51, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:38:28,534 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.396e+02 1.846e+02 2.430e+02 3.057e+02 4.272e+02, threshold=4.860e+02, percent-clipped=1.0 +2024-08-29 15:38:51,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=189696.0, ans=0.125 +2024-08-29 15:39:25,952 INFO [train.py:1114] (3/4) Epoch 15, batch 750, loss[loss=0.2383, simple_loss=0.2943, pruned_loss=0.06593, ctc_loss=0.1261, over 19493.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2793, pruned_loss=0.05543, ctc_loss=0.1041, over 3775284.96 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:39:30,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=189856.0, ans=0.125 +2024-08-29 15:39:41,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=189909.33333333334, ans=0.0 +2024-08-29 15:39:43,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=189909.33333333334, ans=0.125 +2024-08-29 15:39:59,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=189962.66666666666, ans=0.0 +2024-08-29 15:40:10,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=190016.0, ans=0.125 +2024-08-29 15:40:15,480 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=6.152e-02 +2024-08-29 15:40:28,197 INFO [train.py:1114] (3/4) Epoch 15, batch 800, loss[loss=0.1963, simple_loss=0.255, pruned_loss=0.04942, ctc_loss=0.09701, over 19789.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2789, pruned_loss=0.05515, ctc_loss=0.1038, over 3796497.36 frames. ], batch size: 49, lr: 1.01e-02, grad_scale: 32.0 +2024-08-29 15:40:34,420 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.355e+02 1.728e+02 2.068e+02 2.494e+02 4.984e+02, threshold=4.135e+02, percent-clipped=1.0 +2024-08-29 15:40:48,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=190176.0, ans=0.0 +2024-08-29 15:41:03,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190282.66666666666, ans=0.1 +2024-08-29 15:41:21,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190336.0, ans=0.1 +2024-08-29 15:41:30,889 INFO [train.py:1114] (3/4) Epoch 15, batch 850, loss[loss=0.2429, simple_loss=0.3039, pruned_loss=0.06593, ctc_loss=0.125, over 19675.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2787, pruned_loss=0.05499, ctc_loss=0.1036, over 3815182.91 frames. ], batch size: 59, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:41:39,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=190389.33333333334, ans=0.0 +2024-08-29 15:41:48,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=190442.66666666666, ans=0.0 +2024-08-29 15:41:53,973 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.80 vs. limit=15.0 +2024-08-29 15:41:58,267 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:42:13,313 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.69 vs. limit=15.0 +2024-08-29 15:42:34,688 INFO [train.py:1114] (3/4) Epoch 15, batch 900, loss[loss=0.1921, simple_loss=0.2605, pruned_loss=0.04502, ctc_loss=0.08434, over 19821.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2788, pruned_loss=0.05516, ctc_loss=0.104, over 3818290.68 frames. ], batch size: 49, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:42:40,569 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.386e+02 1.760e+02 2.061e+02 2.441e+02 4.748e+02, threshold=4.121e+02, percent-clipped=4.0 +2024-08-29 15:43:10,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=190762.66666666666, ans=0.2 +2024-08-29 15:43:18,988 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.35 vs. limit=22.5 +2024-08-29 15:43:28,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=190816.0, ans=0.0 +2024-08-29 15:43:44,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=190869.33333333334, ans=0.1 +2024-08-29 15:43:45,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=190922.66666666666, ans=0.07 +2024-08-29 15:43:47,839 INFO [train.py:1114] (3/4) Epoch 15, batch 950, loss[loss=0.2196, simple_loss=0.2736, pruned_loss=0.06054, ctc_loss=0.1113, over 19516.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2795, pruned_loss=0.05575, ctc_loss=0.1053, over 3821023.95 frames. ], batch size: 49, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:44:03,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=190976.0, ans=0.0 +2024-08-29 15:44:07,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=190976.0, ans=0.125 +2024-08-29 15:44:08,012 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.95 vs. limit=22.5 +2024-08-29 15:44:21,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=191029.33333333334, ans=0.125 +2024-08-29 15:44:22,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=191029.33333333334, ans=0.125 +2024-08-29 15:44:26,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=191082.66666666666, ans=0.0 +2024-08-29 15:44:40,904 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.92 vs. limit=22.5 +2024-08-29 15:44:48,249 INFO [train.py:1114] (3/4) Epoch 15, batch 1000, loss[loss=0.1821, simple_loss=0.2542, pruned_loss=0.04024, ctc_loss=0.07365, over 19857.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2801, pruned_loss=0.05618, ctc_loss=0.1063, over 3817004.52 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:44:54,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=191189.33333333334, ans=0.0 +2024-08-29 15:44:56,839 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.479e+02 1.691e+02 1.934e+02 2.300e+02 3.610e+02, threshold=3.869e+02, percent-clipped=0.0 +2024-08-29 15:45:29,742 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.17 vs. limit=15.0 +2024-08-29 15:45:41,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=191402.66666666666, ans=0.125 +2024-08-29 15:45:42,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=191402.66666666666, ans=0.125 +2024-08-29 15:45:52,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=191456.0, ans=0.0 +2024-08-29 15:45:53,376 INFO [train.py:1114] (3/4) Epoch 15, batch 1050, loss[loss=0.2148, simple_loss=0.2883, pruned_loss=0.05133, ctc_loss=0.09686, over 19829.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2794, pruned_loss=0.05601, ctc_loss=0.1058, over 3822843.87 frames. ], batch size: 57, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:46:21,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=191562.66666666666, ans=0.125 +2024-08-29 15:46:25,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=191562.66666666666, ans=0.125 +2024-08-29 15:46:36,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=191616.0, ans=0.125 +2024-08-29 15:46:53,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=191722.66666666666, ans=0.0 +2024-08-29 15:46:54,816 INFO [train.py:1114] (3/4) Epoch 15, batch 1100, loss[loss=0.1992, simple_loss=0.2714, pruned_loss=0.0457, ctc_loss=0.08912, over 19589.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2797, pruned_loss=0.05596, ctc_loss=0.1057, over 3831076.80 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:46:57,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=191722.66666666666, ans=0.125 +2024-08-29 15:47:17,581 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.340e+02 1.746e+02 1.965e+02 2.496e+02 3.903e+02, threshold=3.929e+02, percent-clipped=1.0 +2024-08-29 15:47:17,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=191722.66666666666, ans=0.2 +2024-08-29 15:47:30,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=191776.0, ans=0.0 +2024-08-29 15:47:45,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191829.33333333334, ans=0.1 +2024-08-29 15:48:02,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=191936.0, ans=0.0 +2024-08-29 15:48:08,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=191936.0, ans=0.125 +2024-08-29 15:48:09,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191936.0, ans=0.1 +2024-08-29 15:48:09,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=191936.0, ans=0.0 +2024-08-29 15:48:12,572 INFO [train.py:1114] (3/4) Epoch 15, batch 1150, loss[loss=0.2333, simple_loss=0.2886, pruned_loss=0.06437, ctc_loss=0.1231, over 19581.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2798, pruned_loss=0.05598, ctc_loss=0.1055, over 3828649.12 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:48:18,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=191989.33333333334, ans=0.2 +2024-08-29 15:48:21,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=191989.33333333334, ans=0.0 +2024-08-29 15:48:22,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=191989.33333333334, ans=0.0 +2024-08-29 15:48:25,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=192042.66666666666, ans=0.025 +2024-08-29 15:48:29,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=192042.66666666666, ans=0.04949747468305833 +2024-08-29 15:48:31,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=192042.66666666666, ans=0.125 +2024-08-29 15:49:04,857 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.75 vs. limit=22.5 +2024-08-29 15:49:08,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=192202.66666666666, ans=0.0 +2024-08-29 15:49:11,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192202.66666666666, ans=0.1 +2024-08-29 15:49:16,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=192202.66666666666, ans=0.1 +2024-08-29 15:49:17,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=192202.66666666666, ans=0.0 +2024-08-29 15:49:19,908 INFO [train.py:1114] (3/4) Epoch 15, batch 1200, loss[loss=0.2343, simple_loss=0.2996, pruned_loss=0.06097, ctc_loss=0.1176, over 19844.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2812, pruned_loss=0.05674, ctc_loss=0.107, over 3825254.90 frames. ], batch size: 57, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:49:26,217 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 1.719e+02 2.001e+02 2.349e+02 3.398e+02, threshold=4.002e+02, percent-clipped=0.0 +2024-08-29 15:49:28,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.08 vs. limit=22.5 +2024-08-29 15:49:36,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192309.33333333334, ans=0.1 +2024-08-29 15:50:01,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=192416.0, ans=0.0 +2024-08-29 15:50:04,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=192416.0, ans=0.125 +2024-08-29 15:50:04,328 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.56 vs. limit=15.0 +2024-08-29 15:50:18,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=192469.33333333334, ans=0.0 +2024-08-29 15:50:24,185 INFO [train.py:1114] (3/4) Epoch 15, batch 1250, loss[loss=0.268, simple_loss=0.3139, pruned_loss=0.08021, ctc_loss=0.1544, over 19533.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2817, pruned_loss=0.05671, ctc_loss=0.1069, over 3842670.75 frames. ], batch size: 61, lr: 1.00e-02, grad_scale: 32.0 +2024-08-29 15:50:24,709 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.97 vs. limit=22.5 +2024-08-29 15:50:50,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=192629.33333333334, ans=0.0 +2024-08-29 15:51:02,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=192682.66666666666, ans=0.0 +2024-08-29 15:51:13,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=192736.0, ans=0.0 +2024-08-29 15:51:25,187 INFO [train.py:1114] (3/4) Epoch 15, batch 1300, loss[loss=0.2156, simple_loss=0.2829, pruned_loss=0.05371, ctc_loss=0.1021, over 18844.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2805, pruned_loss=0.05602, ctc_loss=0.1057, over 3846304.33 frames. ], batch size: 76, lr: 9.99e-03, grad_scale: 32.0 +2024-08-29 15:51:30,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=192789.33333333334, ans=0.1 +2024-08-29 15:52:15,028 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.668e+02 1.955e+02 2.455e+02 4.261e+02, threshold=3.910e+02, percent-clipped=2.0 +2024-08-29 15:52:20,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=192842.66666666666, ans=0.0 +2024-08-29 15:52:28,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192842.66666666666, ans=0.1 +2024-08-29 15:52:34,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=192896.0, ans=0.0 +2024-08-29 15:52:36,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=192896.0, ans=0.125 +2024-08-29 15:52:36,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=192896.0, ans=0.1 +2024-08-29 15:53:03,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=193002.66666666666, ans=0.1 +2024-08-29 15:53:07,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=193002.66666666666, ans=0.125 +2024-08-29 15:53:10,983 INFO [train.py:1114] (3/4) Epoch 15, batch 1350, loss[loss=0.2185, simple_loss=0.2877, pruned_loss=0.05406, ctc_loss=0.1028, over 19755.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2794, pruned_loss=0.05542, ctc_loss=0.1044, over 3857938.98 frames. ], batch size: 54, lr: 9.98e-03, grad_scale: 32.0 +2024-08-29 15:53:15,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=193056.0, ans=0.2 +2024-08-29 15:53:28,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=193109.33333333334, ans=0.2 +2024-08-29 15:53:36,341 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.22 vs. limit=15.0 +2024-08-29 15:54:14,978 INFO [train.py:1114] (3/4) Epoch 15, batch 1400, loss[loss=0.1799, simple_loss=0.2446, pruned_loss=0.04175, ctc_loss=0.07907, over 19659.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2789, pruned_loss=0.05525, ctc_loss=0.1041, over 3864298.37 frames. ], batch size: 46, lr: 9.98e-03, grad_scale: 32.0 +2024-08-29 15:54:36,722 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.20 vs. limit=22.5 +2024-08-29 15:54:37,471 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 1.658e+02 1.833e+02 2.351e+02 3.730e+02, threshold=3.665e+02, percent-clipped=0.0 +2024-08-29 15:55:08,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=193429.33333333334, ans=0.125 +2024-08-29 15:55:11,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=193429.33333333334, ans=0.0 +2024-08-29 15:55:43,694 INFO [train.py:1114] (3/4) Epoch 15, batch 1450, loss[loss=0.2404, simple_loss=0.3049, pruned_loss=0.06375, ctc_loss=0.1209, over 19675.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2799, pruned_loss=0.05592, ctc_loss=0.1053, over 3862218.51 frames. ], batch size: 63, lr: 9.97e-03, grad_scale: 32.0 +2024-08-29 15:55:43,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=193589.33333333334, ans=0.2 +2024-08-29 15:55:52,027 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:56:35,564 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.18 vs. limit=6.0 +2024-08-29 15:56:43,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=193802.66666666666, ans=0.0 +2024-08-29 15:56:45,749 INFO [train.py:1114] (3/4) Epoch 15, batch 1500, loss[loss=0.2268, simple_loss=0.2902, pruned_loss=0.05967, ctc_loss=0.1104, over 19575.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2801, pruned_loss=0.05592, ctc_loss=0.1055, over 3862151.23 frames. ], batch size: 57, lr: 9.96e-03, grad_scale: 32.0 +2024-08-29 15:56:52,420 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.416e+02 1.660e+02 1.885e+02 2.337e+02 4.281e+02, threshold=3.770e+02, percent-clipped=2.0 +2024-08-29 15:57:03,892 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=3.78 vs. limit=12.0 +2024-08-29 15:57:05,277 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.91 vs. limit=22.5 +2024-08-29 15:57:25,325 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.13 vs. limit=6.0 +2024-08-29 15:57:36,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=194069.33333333334, ans=0.0 +2024-08-29 15:57:51,459 INFO [train.py:1114] (3/4) Epoch 15, batch 1550, loss[loss=0.2528, simple_loss=0.3069, pruned_loss=0.0718, ctc_loss=0.138, over 19612.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2802, pruned_loss=0.05615, ctc_loss=0.1061, over 3846859.26 frames. ], batch size: 60, lr: 9.96e-03, grad_scale: 32.0 +2024-08-29 15:57:58,760 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.12 vs. limit=15.0 +2024-08-29 15:58:19,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=194229.33333333334, ans=0.125 +2024-08-29 15:58:21,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=194229.33333333334, ans=0.05 +2024-08-29 15:58:23,984 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.18 vs. limit=15.0 +2024-08-29 15:58:29,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=194282.66666666666, ans=0.0 +2024-08-29 15:58:31,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=194282.66666666666, ans=0.125 +2024-08-29 15:58:35,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=194282.66666666666, ans=0.025 +2024-08-29 15:58:52,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=194389.33333333334, ans=0.0 +2024-08-29 15:58:53,438 INFO [train.py:1114] (3/4) Epoch 15, batch 1600, loss[loss=0.2103, simple_loss=0.2843, pruned_loss=0.04943, ctc_loss=0.09369, over 19843.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2796, pruned_loss=0.05597, ctc_loss=0.1055, over 3836220.94 frames. ], batch size: 57, lr: 9.95e-03, grad_scale: 32.0 +2024-08-29 15:58:59,523 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.762e+02 2.164e+02 2.478e+02 4.927e+02, threshold=4.328e+02, percent-clipped=7.0 +2024-08-29 15:59:53,268 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.28 vs. limit=15.0 +2024-08-29 15:59:57,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=194442.66666666666, ans=0.2 +2024-08-29 16:00:05,068 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.81 vs. limit=22.5 +2024-08-29 16:00:10,521 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 16:00:21,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=194549.33333333334, ans=0.0 +2024-08-29 16:00:35,212 INFO [train.py:1114] (3/4) Epoch 15, batch 1650, loss[loss=0.2232, simple_loss=0.2823, pruned_loss=0.05951, ctc_loss=0.1126, over 19672.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2794, pruned_loss=0.05582, ctc_loss=0.1052, over 3833576.65 frames. ], batch size: 59, lr: 9.94e-03, grad_scale: 32.0 +2024-08-29 16:00:40,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=194656.0, ans=0.125 +2024-08-29 16:00:44,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=194656.0, ans=0.125 +2024-08-29 16:00:51,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=194709.33333333334, ans=0.025 +2024-08-29 16:01:03,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=194762.66666666666, ans=0.07 +2024-08-29 16:01:05,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194762.66666666666, ans=0.1 +2024-08-29 16:01:20,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=194816.0, ans=0.0 +2024-08-29 16:01:21,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=194816.0, ans=0.125 +2024-08-29 16:01:30,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=194869.33333333334, ans=0.025 +2024-08-29 16:01:38,034 INFO [train.py:1114] (3/4) Epoch 15, batch 1700, loss[loss=0.1963, simple_loss=0.2552, pruned_loss=0.04992, ctc_loss=0.09407, over 19681.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2792, pruned_loss=0.05537, ctc_loss=0.1043, over 3847382.16 frames. ], batch size: 46, lr: 9.94e-03, grad_scale: 32.0 +2024-08-29 16:01:44,058 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 1.696e+02 2.083e+02 2.797e+02 4.802e+02, threshold=4.167e+02, percent-clipped=3.0 +2024-08-29 16:01:47,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=194922.66666666666, ans=0.1 +2024-08-29 16:01:59,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=194976.0, ans=0.125 +2024-08-29 16:02:10,642 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.48 vs. limit=15.0 +2024-08-29 16:02:17,936 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.14 vs. limit=15.0 +2024-08-29 16:02:40,456 INFO [train.py:1114] (3/4) Epoch 15, batch 1750, loss[loss=0.1845, simple_loss=0.2473, pruned_loss=0.04466, ctc_loss=0.08095, over 19631.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2791, pruned_loss=0.05554, ctc_loss=0.1044, over 3852360.31 frames. ], batch size: 45, lr: 9.93e-03, grad_scale: 32.0 +2024-08-29 16:02:57,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=195242.66666666666, ans=0.125 +2024-08-29 16:02:57,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195242.66666666666, ans=0.1 +2024-08-29 16:02:59,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=195242.66666666666, ans=0.0 +2024-08-29 16:03:16,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195349.33333333334, ans=0.1 +2024-08-29 16:03:16,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=195349.33333333334, ans=0.125 +2024-08-29 16:03:21,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=195349.33333333334, ans=0.09899494936611666 +2024-08-29 16:03:31,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=195402.66666666666, ans=0.0 +2024-08-29 16:03:32,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=195402.66666666666, ans=0.125 +2024-08-29 16:03:37,864 INFO [train.py:1114] (3/4) Epoch 15, batch 1800, loss[loss=0.2241, simple_loss=0.2856, pruned_loss=0.05822, ctc_loss=0.1155, over 19619.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2791, pruned_loss=0.05543, ctc_loss=0.1042, over 3854242.67 frames. ], batch size: 55, lr: 9.92e-03, grad_scale: 32.0 +2024-08-29 16:03:43,641 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.390e+02 1.702e+02 2.083e+02 2.690e+02 4.339e+02, threshold=4.166e+02, percent-clipped=1.0 +2024-08-29 16:03:57,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=195509.33333333334, ans=0.0 +2024-08-29 16:04:00,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=195562.66666666666, ans=0.0 +2024-08-29 16:04:13,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=195616.0, ans=0.125 +2024-08-29 16:04:20,374 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.02 vs. limit=15.0 +2024-08-29 16:04:28,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=195669.33333333334, ans=0.5 +2024-08-29 16:04:34,673 INFO [train.py:1114] (3/4) Epoch 15, batch 1850, loss[loss=0.2337, simple_loss=0.3017, pruned_loss=0.06078, ctc_loss=0.1106, over 19596.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2792, pruned_loss=0.05546, ctc_loss=0.1041, over 3857016.34 frames. ], batch size: 57, lr: 9.92e-03, grad_scale: 32.0 +2024-08-29 16:04:36,465 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.70 vs. limit=15.0 +2024-08-29 16:04:55,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=195829.33333333334, ans=0.0 +2024-08-29 16:04:59,754 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.24 vs. limit=15.0 +2024-08-29 16:05:16,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=195882.66666666666, ans=0.0 +2024-08-29 16:05:21,624 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.93 vs. limit=15.0 +2024-08-29 16:05:23,666 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.21 vs. limit=6.0 +2024-08-29 16:05:28,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=195936.0, ans=22.5 +2024-08-29 16:05:35,600 INFO [train.py:1114] (3/4) Epoch 15, batch 1900, loss[loss=0.1888, simple_loss=0.2737, pruned_loss=0.03713, ctc_loss=0.07414, over 19674.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2794, pruned_loss=0.05541, ctc_loss=0.1041, over 3861337.71 frames. ], batch size: 59, lr: 9.91e-03, grad_scale: 32.0 +2024-08-29 16:05:40,971 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.393e+02 1.724e+02 2.102e+02 3.115e+02 5.340e+02, threshold=4.204e+02, percent-clipped=3.0 +2024-08-29 16:05:46,880 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 16:05:51,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=196042.66666666666, ans=0.1 +2024-08-29 16:06:03,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=196096.0, ans=0.125 +2024-08-29 16:06:04,717 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 16:06:05,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=196096.0, ans=0.0 +2024-08-29 16:06:11,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=196149.33333333334, ans=0.0 +2024-08-29 16:06:12,641 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=8.35 vs. limit=22.5 +2024-08-29 16:06:32,319 INFO [train.py:1114] (3/4) Epoch 15, batch 1950, loss[loss=0.1998, simple_loss=0.2673, pruned_loss=0.04744, ctc_loss=0.0938, over 19588.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2803, pruned_loss=0.05537, ctc_loss=0.1038, over 3870605.46 frames. ], batch size: 52, lr: 9.90e-03, grad_scale: 32.0 +2024-08-29 16:06:45,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=196256.0, ans=0.125 +2024-08-29 16:06:46,175 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.90 vs. limit=15.0 +2024-08-29 16:06:56,403 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.10 vs. limit=6.0 +2024-08-29 16:07:18,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=196416.0, ans=0.07 +2024-08-29 16:07:35,223 INFO [train.py:1114] (3/4) Epoch 15, batch 2000, loss[loss=0.1731, simple_loss=0.2412, pruned_loss=0.03823, ctc_loss=0.07157, over 19650.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2805, pruned_loss=0.05535, ctc_loss=0.1038, over 3854597.70 frames. ], batch size: 45, lr: 9.90e-03, grad_scale: 32.0 +2024-08-29 16:07:41,135 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.454e+02 1.618e+02 1.832e+02 2.132e+02 4.362e+02, threshold=3.664e+02, percent-clipped=1.0 +2024-08-29 16:08:13,237 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 16:08:23,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=196736.0, ans=0.125 +2024-08-29 16:08:32,352 INFO [train.py:1114] (3/4) Epoch 15, batch 2050, loss[loss=0.2242, simple_loss=0.2718, pruned_loss=0.06372, ctc_loss=0.1228, over 19729.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2799, pruned_loss=0.05554, ctc_loss=0.1043, over 3850197.16 frames. ], batch size: 47, lr: 9.89e-03, grad_scale: 32.0 +2024-08-29 16:08:42,761 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.48 vs. limit=22.5 +2024-08-29 16:08:45,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=196842.66666666666, ans=0.95 +2024-08-29 16:09:00,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=196896.0, ans=0.125 +2024-08-29 16:09:05,224 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.90 vs. limit=15.0 +2024-08-29 16:09:05,467 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.16 vs. limit=12.0 +2024-08-29 16:09:07,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=196949.33333333334, ans=0.125 +2024-08-29 16:09:11,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=196949.33333333334, ans=0.1 +2024-08-29 16:09:27,721 INFO [train.py:1114] (3/4) Epoch 15, batch 2100, loss[loss=0.231, simple_loss=0.2922, pruned_loss=0.06283, ctc_loss=0.1102, over 19757.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2793, pruned_loss=0.05518, ctc_loss=0.1036, over 3858054.45 frames. ], batch size: 54, lr: 9.88e-03, grad_scale: 32.0 +2024-08-29 16:09:33,396 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.265e+02 1.691e+02 1.929e+02 2.354e+02 3.359e+02, threshold=3.858e+02, percent-clipped=0.0 +2024-08-29 16:09:40,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=197109.33333333334, ans=0.2 +2024-08-29 16:10:06,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=197216.0, ans=0.125 +2024-08-29 16:10:22,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=197269.33333333334, ans=0.0 +2024-08-29 16:10:26,361 INFO [train.py:1114] (3/4) Epoch 15, batch 2150, loss[loss=0.2106, simple_loss=0.2756, pruned_loss=0.05246, ctc_loss=0.1014, over 19846.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2787, pruned_loss=0.05507, ctc_loss=0.1034, over 3870024.16 frames. ], batch size: 52, lr: 9.88e-03, grad_scale: 32.0 +2024-08-29 16:10:34,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=197322.66666666666, ans=0.0 +2024-08-29 16:12:01,895 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.49 vs. limit=15.0 +2024-08-29 16:12:03,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=197429.33333333334, ans=0.125 +2024-08-29 16:12:14,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197482.66666666666, ans=0.1 +2024-08-29 16:12:20,821 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.41 vs. limit=12.0 +2024-08-29 16:12:25,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197536.0, ans=0.1 +2024-08-29 16:12:31,434 INFO [train.py:1114] (3/4) Epoch 15, batch 2200, loss[loss=0.221, simple_loss=0.2887, pruned_loss=0.05551, ctc_loss=0.1055, over 19591.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2788, pruned_loss=0.05503, ctc_loss=0.1034, over 3868630.81 frames. ], batch size: 57, lr: 9.87e-03, grad_scale: 32.0 +2024-08-29 16:12:36,854 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.412e+02 1.787e+02 2.154e+02 2.730e+02 5.047e+02, threshold=4.308e+02, percent-clipped=4.0 +2024-08-29 16:12:48,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=197642.66666666666, ans=0.125 +2024-08-29 16:12:49,884 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.03 vs. limit=12.0 +2024-08-29 16:12:55,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=197696.0, ans=0.07 +2024-08-29 16:13:23,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=197802.66666666666, ans=0.0 +2024-08-29 16:13:29,260 INFO [train.py:1114] (3/4) Epoch 15, batch 2250, loss[loss=0.1955, simple_loss=0.2769, pruned_loss=0.04144, ctc_loss=0.07798, over 19617.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2792, pruned_loss=0.05517, ctc_loss=0.1038, over 3868730.69 frames. ], batch size: 55, lr: 9.87e-03, grad_scale: 32.0 +2024-08-29 16:13:29,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=197856.0, ans=0.0 +2024-08-29 16:13:32,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=197856.0, ans=0.125 +2024-08-29 16:13:56,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=197856.0, ans=0.125 +2024-08-29 16:14:32,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=198069.33333333334, ans=0.125 +2024-08-29 16:14:38,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=198069.33333333334, ans=0.125 +2024-08-29 16:14:43,821 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.18 vs. limit=10.0 +2024-08-29 16:14:45,279 INFO [train.py:1114] (3/4) Epoch 15, batch 2300, loss[loss=0.1973, simple_loss=0.2634, pruned_loss=0.04813, ctc_loss=0.08737, over 19511.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2783, pruned_loss=0.0552, ctc_loss=0.1036, over 3862189.96 frames. ], batch size: 49, lr: 9.86e-03, grad_scale: 32.0 +2024-08-29 16:14:50,767 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.357e+02 1.686e+02 1.986e+02 2.467e+02 4.553e+02, threshold=3.971e+02, percent-clipped=1.0 +2024-08-29 16:14:55,636 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=5.232e-03 +2024-08-29 16:15:12,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=198229.33333333334, ans=0.125 +2024-08-29 16:15:19,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=198282.66666666666, ans=0.5 +2024-08-29 16:15:43,140 INFO [train.py:1114] (3/4) Epoch 15, batch 2350, loss[loss=0.2245, simple_loss=0.2961, pruned_loss=0.05562, ctc_loss=0.1039, over 19665.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2777, pruned_loss=0.05487, ctc_loss=0.103, over 3864387.38 frames. ], batch size: 63, lr: 9.85e-03, grad_scale: 64.0 +2024-08-29 16:15:45,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=198389.33333333334, ans=0.0 +2024-08-29 16:15:49,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=198389.33333333334, ans=0.125 +2024-08-29 16:15:50,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=198389.33333333334, ans=0.0 +2024-08-29 16:15:53,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=198442.66666666666, ans=0.125 +2024-08-29 16:16:08,070 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.96 vs. limit=15.0 +2024-08-29 16:16:10,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.40 vs. limit=15.0 +2024-08-29 16:16:13,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=198496.0, ans=0.125 +2024-08-29 16:16:25,077 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 16:16:42,892 INFO [train.py:1114] (3/4) Epoch 15, batch 2400, loss[loss=0.2445, simple_loss=0.3031, pruned_loss=0.06856, ctc_loss=0.1217, over 19298.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2802, pruned_loss=0.05587, ctc_loss=0.1047, over 3858996.48 frames. ], batch size: 71, lr: 9.85e-03, grad_scale: 64.0 +2024-08-29 16:16:46,737 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.20 vs. limit=10.0 +2024-08-29 16:16:48,392 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.659e+02 1.944e+02 2.492e+02 3.873e+02, threshold=3.888e+02, percent-clipped=0.0 +2024-08-29 16:16:48,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=198656.0, ans=0.2 +2024-08-29 16:16:48,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198656.0, ans=0.1 +2024-08-29 16:16:58,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=198709.33333333334, ans=0.0 +2024-08-29 16:16:59,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=198709.33333333334, ans=0.125 +2024-08-29 16:18:01,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=198762.66666666666, ans=0.95 +2024-08-29 16:18:30,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.80 vs. limit=22.5 +2024-08-29 16:18:33,172 INFO [train.py:1114] (3/4) Epoch 15, batch 2450, loss[loss=0.294, simple_loss=0.3207, pruned_loss=0.09749, ctc_loss=0.181, over 13070.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2843, pruned_loss=0.0588, ctc_loss=0.1107, over 3732883.61 frames. ], batch size: 140, lr: 9.84e-03, grad_scale: 32.0 +2024-08-29 16:18:53,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=198976.0, ans=0.0 +2024-08-29 16:19:04,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=199029.33333333334, ans=0.2 +2024-08-29 16:19:05,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=199029.33333333334, ans=0.025 +2024-08-29 16:19:05,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=199029.33333333334, ans=0.125 +2024-08-29 16:19:06,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=199082.66666666666, ans=0.0 +2024-08-29 16:19:12,393 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.31 vs. limit=15.0 +2024-08-29 16:20:18,424 INFO [train.py:1114] (3/4) Epoch 16, batch 0, loss[loss=0.1853, simple_loss=0.2482, pruned_loss=0.04497, ctc_loss=0.08101, over 19393.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2482, pruned_loss=0.04497, ctc_loss=0.08101, over 19393.00 frames. ], batch size: 48, lr: 9.52e-03, grad_scale: 32.0 +2024-08-29 16:20:18,424 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-29 16:20:28,425 INFO [train.py:1146] (3/4) Epoch 16, validation: loss=0.1867, simple_loss=0.2755, pruned_loss=0.03636, ctc_loss=0.06317, over 944034.00 frames. +2024-08-29 16:20:28,426 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 13716MB +2024-08-29 16:20:28,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=199130.66666666666, ans=0.0 +2024-08-29 16:20:34,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=199130.66666666666, ans=0.125 +2024-08-29 16:20:40,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=199184.0, ans=0.125 +2024-08-29 16:20:48,958 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.810e+02 1.998e+02 2.276e+02 3.528e+02, threshold=3.997e+02, percent-clipped=0.0 +2024-08-29 16:20:50,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=199184.0, ans=0.125 +2024-08-29 16:21:07,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=199290.66666666666, ans=0.2 +2024-08-29 16:21:08,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=199290.66666666666, ans=0.025 +2024-08-29 16:21:32,417 INFO [train.py:1114] (3/4) Epoch 16, batch 50, loss[loss=0.2027, simple_loss=0.2594, pruned_loss=0.05301, ctc_loss=0.09983, over 19689.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2804, pruned_loss=0.05563, ctc_loss=0.1051, over 843817.09 frames. ], batch size: 47, lr: 9.51e-03, grad_scale: 32.0 +2024-08-29 16:21:32,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=199397.33333333334, ans=0.0 +2024-08-29 16:22:28,631 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.21 vs. limit=15.0 +2024-08-29 16:22:40,105 INFO [train.py:1114] (3/4) Epoch 16, batch 100, loss[loss=0.1886, simple_loss=0.2613, pruned_loss=0.04167, ctc_loss=0.0814, over 19715.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2833, pruned_loss=0.05667, ctc_loss=0.107, over 1497155.71 frames. ], batch size: 51, lr: 9.51e-03, grad_scale: 32.0 +2024-08-29 16:23:04,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=199717.33333333334, ans=15.0 +2024-08-29 16:23:07,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=199717.33333333334, ans=0.125 +2024-08-29 16:23:08,063 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.352e+02 1.815e+02 2.137e+02 2.569e+02 4.869e+02, threshold=4.274e+02, percent-clipped=1.0 +2024-08-29 16:23:13,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=199717.33333333334, ans=0.125 +2024-08-29 16:30:49,566 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 16:37:11,211 INFO [train.py:1114] (3/4) Epoch 16, batch 150, loss[loss=0.1851, simple_loss=0.2432, pruned_loss=0.04602, ctc_loss=0.08715, over 19693.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.279, pruned_loss=0.05463, ctc_loss=0.1031, over 2025780.12 frames. ], batch size: 47, lr: 9.50e-03, grad_scale: 32.0 +2024-08-29 16:45:46,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=200090.66666666666, ans=0.2 +2024-08-29 16:46:51,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=200144.0, ans=0.0 +2024-08-29 16:47:03,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=200144.0, ans=0.125 +2024-08-29 16:48:03,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200144.0, ans=0.1 +2024-08-29 16:48:04,008 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.92 vs. limit=15.0 +2024-08-29 16:48:09,836 INFO [train.py:1114] (3/4) Epoch 16, batch 200, loss[loss=0.2428, simple_loss=0.3015, pruned_loss=0.06637, ctc_loss=0.1284, over 18381.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2781, pruned_loss=0.05396, ctc_loss=0.1018, over 2433521.47 frames. ], batch size: 85, lr: 9.49e-03, grad_scale: 32.0 +2024-08-29 16:49:16,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=200197.33333333334, ans=0.125 +2024-08-29 16:49:56,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=200197.33333333334, ans=0.0 +2024-08-29 16:49:56,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=200197.33333333334, ans=0.025 +2024-08-29 16:50:14,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=200250.66666666666, ans=0.025 +2024-08-29 16:53:29,825 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.321e+02 1.834e+02 2.227e+02 2.815e+02 4.534e+02, threshold=4.454e+02, percent-clipped=1.0 +2024-08-29 16:53:55,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200304.0, ans=0.1 +2024-08-29 16:56:29,762 INFO [train.py:1114] (3/4) Epoch 16, batch 250, loss[loss=0.2679, simple_loss=0.3151, pruned_loss=0.08053, ctc_loss=0.1491, over 19394.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2787, pruned_loss=0.05419, ctc_loss=0.1023, over 2754594.03 frames. ], batch size: 67, lr: 9.49e-03, grad_scale: 32.0 +2024-08-29 16:56:40,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=200464.0, ans=0.125 +2024-08-29 16:58:34,374 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.99 vs. limit=15.0 +2024-08-29 16:58:51,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=200570.66666666666, ans=0.035 +2024-08-29 16:58:54,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200570.66666666666, ans=0.1 +2024-08-29 16:59:04,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=200624.0, ans=0.0 +2024-08-29 16:59:34,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=200677.33333333334, ans=0.2 +2024-08-29 17:01:56,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=200677.33333333334, ans=0.95 +2024-08-29 17:02:02,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.30 vs. limit=15.0 +2024-08-29 17:02:22,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=200677.33333333334, ans=0.2 +2024-08-29 17:03:13,497 INFO [train.py:1114] (3/4) Epoch 16, batch 300, loss[loss=0.2191, simple_loss=0.287, pruned_loss=0.05477, ctc_loss=0.104, over 19561.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.278, pruned_loss=0.05396, ctc_loss=0.102, over 3000245.92 frames. ], batch size: 61, lr: 9.48e-03, grad_scale: 32.0 +2024-08-29 17:03:15,008 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.86 vs. limit=10.0 +2024-08-29 17:03:15,174 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.00 vs. limit=10.0 +2024-08-29 17:03:36,032 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.345e+02 1.663e+02 1.972e+02 2.398e+02 4.674e+02, threshold=3.943e+02, percent-clipped=1.0 +2024-08-29 17:03:37,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=200784.0, ans=0.2 +2024-08-29 17:04:38,870 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.54 vs. limit=15.0 +2024-08-29 17:06:05,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=200837.33333333334, ans=0.0 +2024-08-29 17:07:54,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=200890.66666666666, ans=0.0 +2024-08-29 17:08:30,915 INFO [train.py:1114] (3/4) Epoch 16, batch 350, loss[loss=0.1888, simple_loss=0.2524, pruned_loss=0.04695, ctc_loss=0.07831, over 19765.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2786, pruned_loss=0.05403, ctc_loss=0.1019, over 3190397.90 frames. ], batch size: 48, lr: 9.48e-03, grad_scale: 32.0 +2024-08-29 17:08:33,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=200997.33333333334, ans=0.125 +2024-08-29 17:12:11,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=201104.0, ans=0.125 +2024-08-29 17:12:33,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=201157.33333333334, ans=0.125 +2024-08-29 17:13:10,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=201210.66666666666, ans=0.1 +2024-08-29 17:13:11,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201210.66666666666, ans=0.1 +2024-08-29 17:13:12,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=201210.66666666666, ans=0.125 +2024-08-29 17:13:17,606 INFO [train.py:1114] (3/4) Epoch 16, batch 400, loss[loss=0.21, simple_loss=0.2793, pruned_loss=0.05131, ctc_loss=0.09505, over 19500.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2771, pruned_loss=0.05331, ctc_loss=0.1006, over 3343357.30 frames. ], batch size: 54, lr: 9.47e-03, grad_scale: 32.0 +2024-08-29 17:13:28,209 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.10 vs. limit=22.5 +2024-08-29 17:13:36,238 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.60 vs. limit=15.0 +2024-08-29 17:13:45,753 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.35 vs. limit=15.0 +2024-08-29 17:15:22,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=201317.33333333334, ans=0.125 +2024-08-29 17:15:51,033 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.292e+02 1.714e+02 1.905e+02 2.508e+02 3.565e+02, threshold=3.811e+02, percent-clipped=0.0 +2024-08-29 17:16:23,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=201370.66666666666, ans=0.125 +2024-08-29 17:16:35,593 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=3.91 vs. limit=12.0 +2024-08-29 17:16:38,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=201424.0, ans=0.125 +2024-08-29 17:16:49,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.whiten.whitening_limit, batch_count=201477.33333333334, ans=15.0 +2024-08-29 17:17:07,840 INFO [train.py:1114] (3/4) Epoch 16, batch 450, loss[loss=0.207, simple_loss=0.2811, pruned_loss=0.04721, ctc_loss=0.09633, over 19621.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2778, pruned_loss=0.05385, ctc_loss=0.1015, over 3450908.43 frames. ], batch size: 55, lr: 9.46e-03, grad_scale: 32.0 +2024-08-29 17:18:11,441 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.30 vs. limit=15.0 +2024-08-29 17:20:51,372 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=15.0 +2024-08-29 17:21:30,730 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.57 vs. limit=15.0 +2024-08-29 17:21:35,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=201744.0, ans=0.0 +2024-08-29 17:21:57,452 INFO [train.py:1114] (3/4) Epoch 16, batch 500, loss[loss=0.2327, simple_loss=0.3048, pruned_loss=0.05828, ctc_loss=0.1102, over 19654.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2775, pruned_loss=0.05373, ctc_loss=0.1014, over 3546959.70 frames. ], batch size: 63, lr: 9.46e-03, grad_scale: 32.0 +2024-08-29 17:21:57,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=201797.33333333334, ans=0.125 +2024-08-29 17:22:46,943 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.430e+02 1.689e+02 2.169e+02 2.570e+02 5.370e+02, threshold=4.338e+02, percent-clipped=3.0 +2024-08-29 17:23:27,246 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.86 vs. limit=15.0 +2024-08-29 17:23:57,685 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.94 vs. limit=15.0 +2024-08-29 17:23:58,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=202010.66666666666, ans=0.125 +2024-08-29 17:24:02,823 INFO [train.py:1114] (3/4) Epoch 16, batch 550, loss[loss=0.2366, simple_loss=0.2953, pruned_loss=0.06411, ctc_loss=0.1242, over 19186.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2767, pruned_loss=0.05312, ctc_loss=0.1005, over 3608772.35 frames. ], batch size: 71, lr: 9.45e-03, grad_scale: 32.0 +2024-08-29 17:24:16,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=202064.0, ans=0.0 +2024-08-29 17:24:32,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=202117.33333333334, ans=0.125 +2024-08-29 17:24:33,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=202117.33333333334, ans=0.07 +2024-08-29 17:24:34,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=202117.33333333334, ans=0.2 +2024-08-29 17:24:49,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202170.66666666666, ans=0.1 +2024-08-29 17:24:58,134 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.10 vs. limit=22.5 +2024-08-29 17:25:11,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=202277.33333333334, ans=0.2 +2024-08-29 17:25:21,525 INFO [train.py:1114] (3/4) Epoch 16, batch 600, loss[loss=0.2605, simple_loss=0.3058, pruned_loss=0.07822, ctc_loss=0.147, over 19353.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2767, pruned_loss=0.05319, ctc_loss=0.1005, over 3666381.67 frames. ], batch size: 67, lr: 9.45e-03, grad_scale: 32.0 +2024-08-29 17:26:15,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=202330.66666666666, ans=0.125 +2024-08-29 17:26:49,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=202384.0, ans=0.07 +2024-08-29 17:27:03,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=202384.0, ans=0.1 +2024-08-29 17:27:04,568 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.277e+02 1.652e+02 1.934e+02 2.290e+02 3.719e+02, threshold=3.867e+02, percent-clipped=0.0 +2024-08-29 17:28:13,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=202437.33333333334, ans=0.0 +2024-08-29 17:28:46,559 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.91 vs. limit=22.5 +2024-08-29 17:29:44,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=202490.66666666666, ans=0.025 +2024-08-29 17:30:10,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202490.66666666666, ans=0.1 +2024-08-29 17:30:20,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=202544.0, ans=0.125 +2024-08-29 17:30:44,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=202544.0, ans=0.1 +2024-08-29 17:31:03,780 INFO [train.py:1114] (3/4) Epoch 16, batch 650, loss[loss=0.2253, simple_loss=0.2814, pruned_loss=0.06199, ctc_loss=0.1132, over 19773.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2759, pruned_loss=0.05285, ctc_loss=0.09992, over 3716583.34 frames. ], batch size: 54, lr: 9.44e-03, grad_scale: 32.0 +2024-08-29 17:32:04,620 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 17:32:55,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=202757.33333333334, ans=0.125 +2024-08-29 17:33:46,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=202810.66666666666, ans=0.125 +2024-08-29 17:34:02,109 INFO [train.py:1114] (3/4) Epoch 16, batch 700, loss[loss=0.2052, simple_loss=0.2716, pruned_loss=0.05088, ctc_loss=0.0925, over 19721.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2765, pruned_loss=0.05306, ctc_loss=0.1002, over 3748262.13 frames. ], batch size: 51, lr: 9.43e-03, grad_scale: 32.0 +2024-08-29 17:34:17,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=202917.33333333334, ans=0.025 +2024-08-29 17:35:12,327 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.286e+02 1.755e+02 2.110e+02 2.761e+02 5.047e+02, threshold=4.220e+02, percent-clipped=5.0 +2024-08-29 17:36:00,935 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.40 vs. limit=15.0 +2024-08-29 17:36:24,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=202970.66666666666, ans=0.0 +2024-08-29 17:36:32,830 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 17:38:14,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=203024.0, ans=0.125 +2024-08-29 17:38:16,526 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.72 vs. limit=15.0 +2024-08-29 17:42:01,665 INFO [train.py:1114] (3/4) Epoch 16, batch 750, loss[loss=0.2213, simple_loss=0.2884, pruned_loss=0.05615, ctc_loss=0.1045, over 19488.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2759, pruned_loss=0.05282, ctc_loss=0.09953, over 3773674.92 frames. ], batch size: 54, lr: 9.43e-03, grad_scale: 32.0 +2024-08-29 17:42:03,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=203130.66666666666, ans=0.1 +2024-08-29 17:42:03,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=203130.66666666666, ans=0.0 +2024-08-29 17:42:12,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=203130.66666666666, ans=0.025 +2024-08-29 17:42:32,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=203184.0, ans=0.0 +2024-08-29 17:42:48,134 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.93 vs. limit=15.0 +2024-08-29 17:46:05,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=203344.0, ans=0.125 +2024-08-29 17:46:10,162 INFO [train.py:1114] (3/4) Epoch 16, batch 800, loss[loss=0.1928, simple_loss=0.2654, pruned_loss=0.04245, ctc_loss=0.08811, over 19796.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2755, pruned_loss=0.05271, ctc_loss=0.09925, over 3795955.34 frames. ], batch size: 49, lr: 9.42e-03, grad_scale: 32.0 +2024-08-29 17:48:05,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=203450.66666666666, ans=0.2 +2024-08-29 17:48:15,896 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.745e+02 2.069e+02 2.556e+02 3.770e+02, threshold=4.138e+02, percent-clipped=0.0 +2024-08-29 17:48:24,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=203504.0, ans=0.025 +2024-08-29 17:48:31,955 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.92 vs. limit=15.0 +2024-08-29 17:48:42,491 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.58 vs. limit=15.0 +2024-08-29 17:49:06,936 INFO [train.py:1114] (3/4) Epoch 16, batch 850, loss[loss=0.2056, simple_loss=0.277, pruned_loss=0.04808, ctc_loss=0.09494, over 19678.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2758, pruned_loss=0.05289, ctc_loss=0.09981, over 3815245.16 frames. ], batch size: 59, lr: 9.42e-03, grad_scale: 32.0 +2024-08-29 17:49:18,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=203664.0, ans=0.1 +2024-08-29 17:49:40,384 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.35 vs. limit=22.5 +2024-08-29 17:50:21,093 INFO [train.py:1114] (3/4) Epoch 16, batch 900, loss[loss=0.2062, simple_loss=0.2619, pruned_loss=0.05546, ctc_loss=0.09898, over 19440.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2763, pruned_loss=0.05348, ctc_loss=0.1007, over 3818838.11 frames. ], batch size: 48, lr: 9.41e-03, grad_scale: 32.0 +2024-08-29 17:50:30,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=203930.66666666666, ans=0.0 +2024-08-29 17:50:33,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=203930.66666666666, ans=0.1 +2024-08-29 17:50:37,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=203930.66666666666, ans=0.125 +2024-08-29 17:50:48,727 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.434e+02 1.676e+02 1.827e+02 2.350e+02 4.099e+02, threshold=3.653e+02, percent-clipped=0.0 +2024-08-29 17:51:19,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=204037.33333333334, ans=0.125 +2024-08-29 17:53:28,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=204144.0, ans=0.0 +2024-08-29 17:53:37,491 INFO [train.py:1114] (3/4) Epoch 16, batch 950, loss[loss=0.1943, simple_loss=0.2581, pruned_loss=0.04797, ctc_loss=0.08628, over 19475.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2769, pruned_loss=0.05383, ctc_loss=0.1011, over 3819271.02 frames. ], batch size: 49, lr: 9.40e-03, grad_scale: 32.0 +2024-08-29 17:54:04,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=204197.33333333334, ans=0.0 +2024-08-29 17:54:06,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=204197.33333333334, ans=0.2 +2024-08-29 17:54:33,828 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.49 vs. limit=10.0 +2024-08-29 17:54:34,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=204250.66666666666, ans=0.2 +2024-08-29 17:54:57,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=204357.33333333334, ans=0.125 +2024-08-29 17:55:46,647 INFO [train.py:1114] (3/4) Epoch 16, batch 1000, loss[loss=0.2103, simple_loss=0.2702, pruned_loss=0.05417, ctc_loss=0.1053, over 19848.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2783, pruned_loss=0.05446, ctc_loss=0.1024, over 3815685.28 frames. ], batch size: 52, lr: 9.40e-03, grad_scale: 32.0 +2024-08-29 17:56:04,620 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.89 vs. limit=22.5 +2024-08-29 17:56:07,202 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.334e+02 1.649e+02 1.918e+02 2.268e+02 3.238e+02, threshold=3.836e+02, percent-clipped=0.0 +2024-08-29 17:56:58,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.46 vs. limit=15.0 +2024-08-29 17:57:31,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=204624.0, ans=0.125 +2024-08-29 17:57:39,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=204624.0, ans=0.0 +2024-08-29 17:57:54,923 INFO [train.py:1114] (3/4) Epoch 16, batch 1050, loss[loss=0.2358, simple_loss=0.3024, pruned_loss=0.06251, ctc_loss=0.1105, over 19848.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2777, pruned_loss=0.0544, ctc_loss=0.1022, over 3822643.35 frames. ], batch size: 57, lr: 9.39e-03, grad_scale: 32.0 +2024-08-29 17:58:32,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=204730.66666666666, ans=0.125 +2024-08-29 17:58:33,069 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.98 vs. limit=15.0 +2024-08-29 17:58:35,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=204730.66666666666, ans=0.025 +2024-08-29 17:59:51,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=204784.0, ans=0.0 +2024-08-29 18:00:09,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=204837.33333333334, ans=0.0 +2024-08-29 18:00:21,173 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.01 vs. limit=15.0 +2024-08-29 18:00:53,277 INFO [train.py:1114] (3/4) Epoch 16, batch 1100, loss[loss=0.2242, simple_loss=0.2877, pruned_loss=0.05941, ctc_loss=0.1046, over 19573.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.278, pruned_loss=0.05464, ctc_loss=0.1026, over 3830346.76 frames. ], batch size: 52, lr: 9.39e-03, grad_scale: 16.0 +2024-08-29 18:01:10,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=204997.33333333334, ans=0.125 +2024-08-29 18:01:21,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=205050.66666666666, ans=0.0 +2024-08-29 18:01:27,924 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.389e+02 1.694e+02 1.874e+02 2.325e+02 3.063e+02, threshold=3.748e+02, percent-clipped=0.0 +2024-08-29 18:01:53,174 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.09 vs. limit=10.0 +2024-08-29 18:01:56,580 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.67 vs. limit=15.0 +2024-08-29 18:02:27,560 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.50 vs. limit=6.0 +2024-08-29 18:02:38,217 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.34 vs. limit=15.0 +2024-08-29 18:02:38,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=205210.66666666666, ans=0.125 +2024-08-29 18:02:43,481 INFO [train.py:1114] (3/4) Epoch 16, batch 1150, loss[loss=0.2022, simple_loss=0.2655, pruned_loss=0.05062, ctc_loss=0.09409, over 19595.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2779, pruned_loss=0.05486, ctc_loss=0.1031, over 3828951.50 frames. ], batch size: 52, lr: 9.38e-03, grad_scale: 16.0 +2024-08-29 18:02:50,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=205264.0, ans=0.125 +2024-08-29 18:03:00,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=205317.33333333334, ans=0.0 +2024-08-29 18:03:14,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=205370.66666666666, ans=0.0 +2024-08-29 18:03:31,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=205477.33333333334, ans=0.0 +2024-08-29 18:03:41,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=205477.33333333334, ans=0.0 +2024-08-29 18:03:45,089 INFO [train.py:1114] (3/4) Epoch 16, batch 1200, loss[loss=0.2286, simple_loss=0.2945, pruned_loss=0.05886, ctc_loss=0.1122, over 19837.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.279, pruned_loss=0.05503, ctc_loss=0.1035, over 3825450.87 frames. ], batch size: 57, lr: 9.38e-03, grad_scale: 32.0 +2024-08-29 18:03:48,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=205530.66666666666, ans=0.125 +2024-08-29 18:03:52,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=205530.66666666666, ans=0.125 +2024-08-29 18:03:52,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=205530.66666666666, ans=0.2 +2024-08-29 18:03:59,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=205584.0, ans=0.0 +2024-08-29 18:04:02,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=205584.0, ans=0.125 +2024-08-29 18:04:06,045 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.98 vs. limit=15.0 +2024-08-29 18:04:06,318 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 1.725e+02 2.012e+02 2.470e+02 3.418e+02, threshold=4.024e+02, percent-clipped=0.0 +2024-08-29 18:04:08,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=205637.33333333334, ans=0.2 +2024-08-29 18:04:45,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=205744.0, ans=0.125 +2024-08-29 18:04:50,733 INFO [train.py:1114] (3/4) Epoch 16, batch 1250, loss[loss=0.2429, simple_loss=0.2998, pruned_loss=0.06801, ctc_loss=0.1248, over 19533.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.279, pruned_loss=0.05475, ctc_loss=0.1029, over 3843629.75 frames. ], batch size: 61, lr: 9.37e-03, grad_scale: 32.0 +2024-08-29 18:04:55,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=205797.33333333334, ans=0.0 +2024-08-29 18:05:48,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=205957.33333333334, ans=0.125 +2024-08-29 18:06:32,847 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.84 vs. limit=10.0 +2024-08-29 18:06:33,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=206010.66666666666, ans=0.0 +2024-08-29 18:06:35,884 INFO [train.py:1114] (3/4) Epoch 16, batch 1300, loss[loss=0.236, simple_loss=0.2915, pruned_loss=0.06604, ctc_loss=0.1211, over 18726.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2777, pruned_loss=0.05402, ctc_loss=0.1015, over 3847465.37 frames. ], batch size: 76, lr: 9.36e-03, grad_scale: 32.0 +2024-08-29 18:06:47,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=206117.33333333334, ans=0.0 +2024-08-29 18:06:57,555 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.302e+02 1.716e+02 2.090e+02 2.690e+02 4.268e+02, threshold=4.180e+02, percent-clipped=3.0 +2024-08-29 18:07:22,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=206277.33333333334, ans=0.125 +2024-08-29 18:07:31,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=206277.33333333334, ans=0.1 +2024-08-29 18:07:34,544 INFO [train.py:1114] (3/4) Epoch 16, batch 1350, loss[loss=0.2105, simple_loss=0.2817, pruned_loss=0.05093, ctc_loss=0.09367, over 19771.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.278, pruned_loss=0.05431, ctc_loss=0.1021, over 3856232.56 frames. ], batch size: 54, lr: 9.36e-03, grad_scale: 32.0 +2024-08-29 18:07:40,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=206330.66666666666, ans=0.125 +2024-08-29 18:07:54,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=206384.0, ans=0.0 +2024-08-29 18:07:57,539 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 18:10:13,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206544.0, ans=0.1 +2024-08-29 18:10:22,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=206597.33333333334, ans=0.0 +2024-08-29 18:10:56,016 INFO [train.py:1114] (3/4) Epoch 16, batch 1400, loss[loss=0.1763, simple_loss=0.2408, pruned_loss=0.0403, ctc_loss=0.07789, over 19667.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2781, pruned_loss=0.05445, ctc_loss=0.1023, over 3863166.97 frames. ], batch size: 46, lr: 9.35e-03, grad_scale: 32.0 +2024-08-29 18:10:57,926 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.35 vs. limit=15.0 +2024-08-29 18:11:02,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=206597.33333333334, ans=0.125 +2024-08-29 18:13:15,197 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.407e+02 1.659e+02 1.830e+02 2.117e+02 3.619e+02, threshold=3.659e+02, percent-clipped=0.0 +2024-08-29 18:14:20,342 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.15 vs. limit=15.0 +2024-08-29 18:14:49,672 INFO [train.py:1114] (3/4) Epoch 16, batch 1450, loss[loss=0.218, simple_loss=0.2807, pruned_loss=0.05661, ctc_loss=0.1054, over 19669.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2781, pruned_loss=0.05418, ctc_loss=0.1017, over 3862211.86 frames. ], batch size: 63, lr: 9.35e-03, grad_scale: 32.0 +2024-08-29 18:15:10,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=206864.0, ans=0.1 +2024-08-29 18:15:16,083 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.04 vs. limit=22.5 +2024-08-29 18:15:29,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=206970.66666666666, ans=0.05 +2024-08-29 18:15:40,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=207024.0, ans=0.125 +2024-08-29 18:15:55,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=207024.0, ans=0.0 +2024-08-29 18:15:56,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=207024.0, ans=0.125 +2024-08-29 18:16:10,740 INFO [train.py:1114] (3/4) Epoch 16, batch 1500, loss[loss=0.1954, simple_loss=0.2782, pruned_loss=0.04008, ctc_loss=0.08101, over 19576.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2781, pruned_loss=0.05398, ctc_loss=0.1012, over 3862602.86 frames. ], batch size: 57, lr: 9.34e-03, grad_scale: 32.0 +2024-08-29 18:16:20,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=207130.66666666666, ans=0.05 +2024-08-29 18:16:32,416 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.680e+02 1.893e+02 2.490e+02 3.994e+02, threshold=3.786e+02, percent-clipped=1.0 +2024-08-29 18:16:49,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=207290.66666666666, ans=0.125 +2024-08-29 18:17:34,470 INFO [train.py:1114] (3/4) Epoch 16, batch 1550, loss[loss=0.2278, simple_loss=0.2923, pruned_loss=0.05991, ctc_loss=0.1086, over 19608.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.278, pruned_loss=0.05406, ctc_loss=0.1018, over 3847341.01 frames. ], batch size: 60, lr: 9.33e-03, grad_scale: 32.0 +2024-08-29 18:17:38,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff2.min_abs, batch_count=207397.33333333334, ans=0.1 +2024-08-29 18:17:52,779 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.60 vs. limit=22.5 +2024-08-29 18:17:59,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=207504.0, ans=0.125 +2024-08-29 18:18:00,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=207504.0, ans=0.1 +2024-08-29 18:19:22,737 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 18:19:32,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=207557.33333333334, ans=0.0 +2024-08-29 18:19:44,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=207610.66666666666, ans=0.025 +2024-08-29 18:19:55,348 INFO [train.py:1114] (3/4) Epoch 16, batch 1600, loss[loss=0.2209, simple_loss=0.2872, pruned_loss=0.05686, ctc_loss=0.1024, over 19828.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.278, pruned_loss=0.05421, ctc_loss=0.1021, over 3837493.35 frames. ], batch size: 57, lr: 9.33e-03, grad_scale: 32.0 +2024-08-29 18:21:55,747 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.773e+02 1.965e+02 2.508e+02 5.321e+02, threshold=3.930e+02, percent-clipped=3.0 +2024-08-29 18:22:20,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=207877.33333333334, ans=0.0 +2024-08-29 18:22:20,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=207877.33333333334, ans=0.0 +2024-08-29 18:22:53,544 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=207877.33333333334, ans=0.125 +2024-08-29 18:23:01,490 INFO [train.py:1114] (3/4) Epoch 16, batch 1650, loss[loss=0.1923, simple_loss=0.2724, pruned_loss=0.04045, ctc_loss=0.07838, over 19646.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2773, pruned_loss=0.05373, ctc_loss=0.1014, over 3834750.07 frames. ], batch size: 59, lr: 9.32e-03, grad_scale: 32.0 +2024-08-29 18:24:27,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=208037.33333333334, ans=0.2 +2024-08-29 18:24:29,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=208037.33333333334, ans=0.125 +2024-08-29 18:24:33,862 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.86 vs. limit=15.0 +2024-08-29 18:24:35,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=208090.66666666666, ans=0.1 +2024-08-29 18:24:49,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=208144.0, ans=0.125 +2024-08-29 18:24:51,250 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.39 vs. limit=15.0 +2024-08-29 18:24:53,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=208144.0, ans=0.025 +2024-08-29 18:26:13,695 INFO [train.py:1114] (3/4) Epoch 16, batch 1700, loss[loss=0.1905, simple_loss=0.253, pruned_loss=0.04637, ctc_loss=0.08794, over 19643.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2767, pruned_loss=0.05317, ctc_loss=0.1006, over 3848173.81 frames. ], batch size: 46, lr: 9.32e-03, grad_scale: 32.0 +2024-08-29 18:26:21,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=208197.33333333334, ans=0.2 +2024-08-29 18:26:34,605 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.431e+02 1.759e+02 2.180e+02 2.878e+02 5.111e+02, threshold=4.361e+02, percent-clipped=4.0 +2024-08-29 18:26:43,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=208304.0, ans=0.2 +2024-08-29 18:26:43,739 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.85 vs. limit=10.0 +2024-08-29 18:26:47,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=208304.0, ans=0.2 +2024-08-29 18:26:50,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=208357.33333333334, ans=0.125 +2024-08-29 18:26:58,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=208357.33333333334, ans=0.0 +2024-08-29 18:27:03,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=208410.66666666666, ans=0.1 +2024-08-29 18:27:08,507 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=8.76 vs. limit=22.5 +2024-08-29 18:27:13,925 INFO [train.py:1114] (3/4) Epoch 16, batch 1750, loss[loss=0.1891, simple_loss=0.2482, pruned_loss=0.04801, ctc_loss=0.0849, over 19636.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2764, pruned_loss=0.05313, ctc_loss=0.1006, over 3852413.95 frames. ], batch size: 45, lr: 9.31e-03, grad_scale: 32.0 +2024-08-29 18:27:14,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=208464.0, ans=0.1 +2024-08-29 18:27:40,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=208464.0, ans=0.125 +2024-08-29 18:27:44,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=208464.0, ans=0.125 +2024-08-29 18:27:47,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=208517.33333333334, ans=0.125 +2024-08-29 18:27:50,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=208517.33333333334, ans=0.1 +2024-08-29 18:27:51,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=208517.33333333334, ans=0.125 +2024-08-29 18:29:16,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=208624.0, ans=0.2 +2024-08-29 18:30:09,185 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 18:30:12,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=208677.33333333334, ans=0.025 +2024-08-29 18:30:12,950 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.67 vs. limit=6.0 +2024-08-29 18:30:14,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=208677.33333333334, ans=0.0 +2024-08-29 18:30:17,798 INFO [train.py:1114] (3/4) Epoch 16, batch 1800, loss[loss=0.1844, simple_loss=0.2651, pruned_loss=0.03759, ctc_loss=0.07119, over 19627.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2767, pruned_loss=0.05309, ctc_loss=0.1005, over 3853919.08 frames. ], batch size: 55, lr: 9.31e-03, grad_scale: 32.0 +2024-08-29 18:30:26,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.73 vs. limit=22.5 +2024-08-29 18:30:32,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=208784.0, ans=0.0 +2024-08-29 18:30:41,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=208784.0, ans=0.0 +2024-08-29 18:30:45,743 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.293e+02 1.693e+02 1.985e+02 2.381e+02 4.228e+02, threshold=3.971e+02, percent-clipped=0.0 +2024-08-29 18:30:59,039 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.50 vs. limit=15.0 +2024-08-29 18:31:03,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=208890.66666666666, ans=0.125 +2024-08-29 18:31:10,840 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.27 vs. limit=15.0 +2024-08-29 18:31:43,419 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.46 vs. limit=15.0 +2024-08-29 18:31:44,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=208997.33333333334, ans=0.125 +2024-08-29 18:31:44,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=208997.33333333334, ans=0.05 +2024-08-29 18:31:45,936 INFO [train.py:1114] (3/4) Epoch 16, batch 1850, loss[loss=0.2081, simple_loss=0.2844, pruned_loss=0.049, ctc_loss=0.08461, over 19584.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2768, pruned_loss=0.0532, ctc_loss=0.1005, over 3855947.60 frames. ], batch size: 57, lr: 9.30e-03, grad_scale: 32.0 +2024-08-29 18:31:55,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=208997.33333333334, ans=0.125 +2024-08-29 18:32:36,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=209050.66666666666, ans=0.0 +2024-08-29 18:32:40,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=209104.0, ans=0.125 +2024-08-29 18:32:53,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=209157.33333333334, ans=0.125 +2024-08-29 18:32:54,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209157.33333333334, ans=0.1 +2024-08-29 18:33:17,358 INFO [train.py:1114] (3/4) Epoch 16, batch 1900, loss[loss=0.2091, simple_loss=0.2779, pruned_loss=0.05064, ctc_loss=0.09732, over 19658.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2771, pruned_loss=0.05348, ctc_loss=0.1009, over 3860573.92 frames. ], batch size: 59, lr: 9.29e-03, grad_scale: 32.0 +2024-08-29 18:33:29,166 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.54 vs. limit=15.0 +2024-08-29 18:33:36,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=209317.33333333334, ans=0.2 +2024-08-29 18:33:40,786 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.419e+02 1.785e+02 2.354e+02 2.964e+02 6.037e+02, threshold=4.708e+02, percent-clipped=9.0 +2024-08-29 18:33:41,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209370.66666666666, ans=0.1 +2024-08-29 18:33:45,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=209370.66666666666, ans=0.125 +2024-08-29 18:33:47,043 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.35 vs. limit=22.5 +2024-08-29 18:34:08,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=209477.33333333334, ans=0.025 +2024-08-29 18:34:30,809 INFO [train.py:1114] (3/4) Epoch 16, batch 1950, loss[loss=0.2277, simple_loss=0.283, pruned_loss=0.06336, ctc_loss=0.1141, over 19597.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2778, pruned_loss=0.05369, ctc_loss=0.1011, over 3869340.09 frames. ], batch size: 52, lr: 9.29e-03, grad_scale: 32.0 +2024-08-29 18:34:51,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=209530.66666666666, ans=0.125 +2024-08-29 18:34:52,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=209530.66666666666, ans=0.125 +2024-08-29 18:35:22,876 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.41 vs. limit=22.5 +2024-08-29 18:35:45,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=209744.0, ans=0.125 +2024-08-29 18:35:49,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=209744.0, ans=0.125 +2024-08-29 18:35:51,672 INFO [train.py:1114] (3/4) Epoch 16, batch 2000, loss[loss=0.1858, simple_loss=0.2511, pruned_loss=0.0432, ctc_loss=0.08509, over 19634.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2777, pruned_loss=0.05342, ctc_loss=0.1009, over 3853325.83 frames. ], batch size: 45, lr: 9.28e-03, grad_scale: 32.0 +2024-08-29 18:35:56,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=209797.33333333334, ans=0.125 +2024-08-29 18:36:00,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=209797.33333333334, ans=0.125 +2024-08-29 18:36:13,169 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.312e+02 1.666e+02 1.888e+02 2.185e+02 3.516e+02, threshold=3.775e+02, percent-clipped=0.0 +2024-08-29 18:36:17,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=209904.0, ans=0.0 +2024-08-29 18:36:19,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=209904.0, ans=0.125 +2024-08-29 18:36:31,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=209957.33333333334, ans=0.125 +2024-08-29 18:37:02,154 INFO [train.py:1114] (3/4) Epoch 16, batch 2050, loss[loss=0.2076, simple_loss=0.2572, pruned_loss=0.0584, ctc_loss=0.1029, over 19677.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2771, pruned_loss=0.05377, ctc_loss=0.1016, over 3850064.81 frames. ], batch size: 47, lr: 9.28e-03, grad_scale: 32.0 +2024-08-29 18:37:03,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=210064.0, ans=0.0 +2024-08-29 18:37:51,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=210170.66666666666, ans=0.2 +2024-08-29 18:38:39,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=210224.0, ans=0.5 +2024-08-29 18:38:59,605 INFO [train.py:1114] (3/4) Epoch 16, batch 2100, loss[loss=0.1917, simple_loss=0.2623, pruned_loss=0.04469, ctc_loss=0.07934, over 19758.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2761, pruned_loss=0.05308, ctc_loss=0.1003, over 3858504.80 frames. ], batch size: 54, lr: 9.27e-03, grad_scale: 32.0 +2024-08-29 18:38:59,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=210330.66666666666, ans=0.125 +2024-08-29 18:39:22,234 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.344e+02 1.792e+02 2.112e+02 2.675e+02 4.176e+02, threshold=4.223e+02, percent-clipped=3.0 +2024-08-29 18:39:41,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210490.66666666666, ans=0.1 +2024-08-29 18:39:43,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=210490.66666666666, ans=0.2 +2024-08-29 18:39:46,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=210544.0, ans=0.125 +2024-08-29 18:39:55,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=210544.0, ans=0.125 +2024-08-29 18:39:57,958 INFO [train.py:1114] (3/4) Epoch 16, batch 2150, loss[loss=0.1844, simple_loss=0.2483, pruned_loss=0.04396, ctc_loss=0.08131, over 19857.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2751, pruned_loss=0.05249, ctc_loss=0.09901, over 3870464.23 frames. ], batch size: 52, lr: 9.27e-03, grad_scale: 32.0 +2024-08-29 18:40:05,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=210597.33333333334, ans=0.125 +2024-08-29 18:40:31,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=210650.66666666666, ans=0.95 +2024-08-29 18:40:37,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=210704.0, ans=0.025 +2024-08-29 18:40:56,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=210757.33333333334, ans=0.125 +2024-08-29 18:41:02,772 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.81 vs. limit=15.0 +2024-08-29 18:41:08,443 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.57 vs. limit=22.5 +2024-08-29 18:41:08,958 INFO [train.py:1114] (3/4) Epoch 16, batch 2200, loss[loss=0.1989, simple_loss=0.2649, pruned_loss=0.04712, ctc_loss=0.09674, over 19609.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2753, pruned_loss=0.05263, ctc_loss=0.09912, over 3868836.05 frames. ], batch size: 57, lr: 9.26e-03, grad_scale: 32.0 +2024-08-29 18:41:17,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=210864.0, ans=0.125 +2024-08-29 18:41:27,677 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.04 vs. limit=22.5 +2024-08-29 18:41:29,786 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.327e+02 1.757e+02 2.042e+02 2.598e+02 4.148e+02, threshold=4.084e+02, percent-clipped=0.0 +2024-08-29 18:42:31,571 INFO [train.py:1114] (3/4) Epoch 16, batch 2250, loss[loss=0.222, simple_loss=0.292, pruned_loss=0.05515, ctc_loss=0.1045, over 19617.00 frames. ], tot_loss[loss=0.211, simple_loss=0.276, pruned_loss=0.05301, ctc_loss=0.09973, over 3868508.86 frames. ], batch size: 55, lr: 9.25e-03, grad_scale: 32.0 +2024-08-29 18:42:43,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=211184.0, ans=0.0 +2024-08-29 18:42:57,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=211237.33333333334, ans=0.125 +2024-08-29 18:42:58,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=211237.33333333334, ans=0.125 +2024-08-29 18:43:08,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=211290.66666666666, ans=0.0 +2024-08-29 18:44:24,271 INFO [train.py:1114] (3/4) Epoch 16, batch 2300, loss[loss=0.1672, simple_loss=0.2366, pruned_loss=0.0355, ctc_loss=0.06695, over 19500.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.275, pruned_loss=0.05307, ctc_loss=0.0999, over 3861386.43 frames. ], batch size: 49, lr: 9.25e-03, grad_scale: 32.0 +2024-08-29 18:44:29,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=211397.33333333334, ans=0.125 +2024-08-29 18:44:29,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211397.33333333334, ans=0.1 +2024-08-29 18:44:56,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=211397.33333333334, ans=0.125 +2024-08-29 18:45:10,434 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.413e+02 1.785e+02 2.121e+02 2.618e+02 4.213e+02, threshold=4.241e+02, percent-clipped=2.0 +2024-08-29 18:45:14,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=211504.0, ans=0.0 +2024-08-29 18:45:33,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=211557.33333333334, ans=0.0 +2024-08-29 18:45:34,693 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.04 vs. limit=15.0 +2024-08-29 18:45:40,096 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.13 vs. limit=15.0 +2024-08-29 18:45:59,066 INFO [train.py:1114] (3/4) Epoch 16, batch 2350, loss[loss=0.2199, simple_loss=0.2906, pruned_loss=0.05404, ctc_loss=0.1027, over 19654.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2754, pruned_loss=0.05334, ctc_loss=0.1004, over 3863092.72 frames. ], batch size: 63, lr: 9.24e-03, grad_scale: 32.0 +2024-08-29 18:45:59,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=211664.0, ans=0.125 +2024-08-29 18:46:02,687 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.20 vs. limit=15.0 +2024-08-29 18:47:00,334 INFO [train.py:1114] (3/4) Epoch 16, batch 2400, loss[loss=0.2102, simple_loss=0.2779, pruned_loss=0.05197, ctc_loss=0.09643, over 19287.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2777, pruned_loss=0.0544, ctc_loss=0.1022, over 3857264.20 frames. ], batch size: 71, lr: 9.24e-03, grad_scale: 32.0 +2024-08-29 18:47:05,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=211930.66666666666, ans=0.125 +2024-08-29 18:47:12,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=211984.0, ans=0.0 +2024-08-29 18:47:20,729 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.486e+02 1.800e+02 2.132e+02 2.653e+02 4.129e+02, threshold=4.264e+02, percent-clipped=0.0 +2024-08-29 18:47:28,853 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.30 vs. limit=10.0 +2024-08-29 18:47:42,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=212090.66666666666, ans=0.0 +2024-08-29 18:47:54,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=212144.0, ans=0.2 +2024-08-29 18:47:56,898 INFO [train.py:1114] (3/4) Epoch 16, batch 2450, loss[loss=0.2648, simple_loss=0.3085, pruned_loss=0.08038, ctc_loss=0.1508, over 13699.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2814, pruned_loss=0.05703, ctc_loss=0.1075, over 3730318.33 frames. ], batch size: 140, lr: 9.23e-03, grad_scale: 32.0 +2024-08-29 18:48:14,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=212250.66666666666, ans=0.125 +2024-08-29 18:48:24,212 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.77 vs. limit=15.0 +2024-08-29 18:48:25,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=212304.0, ans=0.025 +2024-08-29 18:48:38,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=212357.33333333334, ans=0.125 +2024-08-29 18:55:35,478 INFO [train.py:1114] (3/4) Epoch 17, batch 0, loss[loss=0.2025, simple_loss=0.2612, pruned_loss=0.05238, ctc_loss=0.09771, over 19407.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2612, pruned_loss=0.05238, ctc_loss=0.09771, over 19407.00 frames. ], batch size: 48, lr: 8.95e-03, grad_scale: 32.0 +2024-08-29 18:55:35,479 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-29 18:55:50,207 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.5304, 3.1551, 2.2166, 2.8777], device='cuda:3') +2024-08-29 18:56:00,898 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.0061, 4.3021, 3.7727, 4.0091], device='cuda:3') +2024-08-29 18:56:01,327 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([1.8984, 2.0968, 3.4919, 3.5788], device='cuda:3') +2024-08-29 18:56:04,690 INFO [train.py:1146] (3/4) Epoch 17, validation: loss=0.1843, simple_loss=0.2733, pruned_loss=0.03544, ctc_loss=0.06098, over 944034.00 frames. +2024-08-29 18:56:04,691 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 13732MB +2024-08-29 18:56:54,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=212458.66666666666, ans=0.0 +2024-08-29 18:58:11,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=212458.66666666666, ans=0.125 +2024-08-29 18:58:20,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=212512.0, ans=0.025 +2024-08-29 18:58:21,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=212512.0, ans=0.0 +2024-08-29 18:58:30,844 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.493e+02 1.824e+02 2.030e+02 2.233e+02 3.073e+02, threshold=4.061e+02, percent-clipped=0.0 +2024-08-29 18:58:34,755 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.78 vs. limit=22.5 +2024-08-29 18:58:40,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=212565.33333333334, ans=0.0 +2024-08-29 19:05:26,875 INFO [train.py:1114] (3/4) Epoch 17, batch 50, loss[loss=0.1911, simple_loss=0.2521, pruned_loss=0.04759, ctc_loss=0.08721, over 19722.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.279, pruned_loss=0.05533, ctc_loss=0.1046, over 844761.44 frames. ], batch size: 47, lr: 8.94e-03, grad_scale: 32.0 +2024-08-29 19:07:29,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=212672.0, ans=0.125 +2024-08-29 19:07:49,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=212672.0, ans=0.0 +2024-08-29 19:07:51,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212725.33333333334, ans=0.1 +2024-08-29 19:08:18,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=212778.66666666666, ans=0.125 +2024-08-29 19:08:25,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.96 vs. limit=6.0 +2024-08-29 19:08:42,647 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.65 vs. limit=15.0 +2024-08-29 19:08:47,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=212885.33333333334, ans=0.125 +2024-08-29 19:08:52,525 INFO [train.py:1114] (3/4) Epoch 17, batch 100, loss[loss=0.188, simple_loss=0.2601, pruned_loss=0.04181, ctc_loss=0.08056, over 19722.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2794, pruned_loss=0.05451, ctc_loss=0.1033, over 1498730.66 frames. ], batch size: 51, lr: 8.94e-03, grad_scale: 32.0 +2024-08-29 19:09:03,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=212992.0, ans=0.125 +2024-08-29 19:09:04,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=212992.0, ans=0.0 +2024-08-29 19:09:17,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=213045.33333333334, ans=0.125 +2024-08-29 19:09:25,907 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.707e+02 1.910e+02 2.335e+02 3.363e+02, threshold=3.820e+02, percent-clipped=0.0 +2024-08-29 19:09:49,578 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.43 vs. limit=6.0 +2024-08-29 19:09:58,139 INFO [train.py:1114] (3/4) Epoch 17, batch 150, loss[loss=0.1753, simple_loss=0.2434, pruned_loss=0.03915, ctc_loss=0.07237, over 19720.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2766, pruned_loss=0.0528, ctc_loss=0.1002, over 2026289.94 frames. ], batch size: 47, lr: 8.93e-03, grad_scale: 32.0 +2024-08-29 19:10:52,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=213205.33333333334, ans=0.125 +2024-08-29 19:10:55,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=213205.33333333334, ans=0.1 +2024-08-29 19:12:23,559 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.63 vs. limit=15.0 +2024-08-29 19:16:16,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=213365.33333333334, ans=0.125 +2024-08-29 19:16:19,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=213418.66666666666, ans=0.125 +2024-08-29 19:16:25,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=213418.66666666666, ans=0.125 +2024-08-29 19:16:29,776 INFO [train.py:1114] (3/4) Epoch 17, batch 200, loss[loss=0.2501, simple_loss=0.2997, pruned_loss=0.07368, ctc_loss=0.1328, over 18235.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2752, pruned_loss=0.052, ctc_loss=0.09824, over 2433635.33 frames. ], batch size: 85, lr: 8.93e-03, grad_scale: 32.0 +2024-08-29 19:24:58,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=213472.0, ans=0.2 +2024-08-29 19:26:28,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=213525.33333333334, ans=0.025 +2024-08-29 19:27:11,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=213525.33333333334, ans=0.2 +2024-08-29 19:27:57,283 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.724e+02 1.931e+02 2.405e+02 4.691e+02, threshold=3.862e+02, percent-clipped=4.0 +2024-08-29 19:28:21,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=213685.33333333334, ans=0.125 +2024-08-29 19:28:32,759 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.73 vs. limit=22.5 +2024-08-29 19:28:38,488 INFO [train.py:1114] (3/4) Epoch 17, batch 250, loss[loss=0.2238, simple_loss=0.2894, pruned_loss=0.05786, ctc_loss=0.1064, over 19400.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.275, pruned_loss=0.05153, ctc_loss=0.09742, over 2755065.21 frames. ], batch size: 67, lr: 8.92e-03, grad_scale: 32.0 +2024-08-29 19:29:27,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=213845.33333333334, ans=0.05 +2024-08-29 19:30:02,636 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.18 vs. limit=15.0 +2024-08-29 19:30:03,449 INFO [train.py:1114] (3/4) Epoch 17, batch 300, loss[loss=0.247, simple_loss=0.3024, pruned_loss=0.07035, ctc_loss=0.127, over 19534.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2748, pruned_loss=0.0516, ctc_loss=0.09749, over 2999463.88 frames. ], batch size: 61, lr: 8.92e-03, grad_scale: 32.0 +2024-08-29 19:30:18,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=214058.66666666666, ans=0.09899494936611666 +2024-08-29 19:31:09,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=214112.0, ans=0.0 +2024-08-29 19:31:57,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=214112.0, ans=0.0 +2024-08-29 19:32:01,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=214112.0, ans=0.125 +2024-08-29 19:32:02,225 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.689e+02 1.972e+02 2.447e+02 4.331e+02, threshold=3.945e+02, percent-clipped=1.0 +2024-08-29 19:32:41,675 INFO [train.py:1114] (3/4) Epoch 17, batch 350, loss[loss=0.1916, simple_loss=0.2523, pruned_loss=0.04675, ctc_loss=0.09314, over 19779.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2759, pruned_loss=0.05244, ctc_loss=0.09878, over 3190999.16 frames. ], batch size: 48, lr: 8.91e-03, grad_scale: 32.0 +2024-08-29 19:33:00,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=214325.33333333334, ans=0.0 +2024-08-29 19:33:08,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=214325.33333333334, ans=0.5 +2024-08-29 19:33:57,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=214432.0, ans=0.025 +2024-08-29 19:34:01,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=214432.0, ans=0.0 +2024-08-29 19:34:04,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=214485.33333333334, ans=0.125 +2024-08-29 19:34:10,705 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.16 vs. limit=15.0 +2024-08-29 19:34:18,295 INFO [train.py:1114] (3/4) Epoch 17, batch 400, loss[loss=0.2065, simple_loss=0.2757, pruned_loss=0.05032, ctc_loss=0.0919, over 19500.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2754, pruned_loss=0.0522, ctc_loss=0.09822, over 3343531.34 frames. ], batch size: 54, lr: 8.91e-03, grad_scale: 32.0 +2024-08-29 19:34:39,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=214538.66666666666, ans=0.0 +2024-08-29 19:35:44,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=214645.33333333334, ans=0.2 +2024-08-29 19:36:30,683 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.299e+02 1.665e+02 1.964e+02 2.553e+02 4.238e+02, threshold=3.929e+02, percent-clipped=2.0 +2024-08-29 19:37:57,082 INFO [train.py:1114] (3/4) Epoch 17, batch 450, loss[loss=0.2184, simple_loss=0.2884, pruned_loss=0.05357, ctc_loss=0.1033, over 19614.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2763, pruned_loss=0.05259, ctc_loss=0.09908, over 3451227.36 frames. ], batch size: 55, lr: 8.90e-03, grad_scale: 32.0 +2024-08-29 19:38:14,230 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.10 vs. limit=22.5 +2024-08-29 19:40:16,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=215018.66666666666, ans=10.0 +2024-08-29 19:40:17,352 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.16 vs. limit=22.5 +2024-08-29 19:40:20,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=215018.66666666666, ans=0.0 +2024-08-29 19:40:26,571 INFO [train.py:1114] (3/4) Epoch 17, batch 500, loss[loss=0.2207, simple_loss=0.2909, pruned_loss=0.05551, ctc_loss=0.09866, over 19684.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2759, pruned_loss=0.05254, ctc_loss=0.09905, over 3546818.49 frames. ], batch size: 63, lr: 8.90e-03, grad_scale: 32.0 +2024-08-29 19:40:30,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=215072.0, ans=0.125 +2024-08-29 19:42:38,140 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.765e+02 1.983e+02 2.603e+02 4.687e+02, threshold=3.966e+02, percent-clipped=3.0 +2024-08-29 19:43:26,470 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.76 vs. limit=6.0 +2024-08-29 19:43:45,807 INFO [train.py:1114] (3/4) Epoch 17, batch 550, loss[loss=0.2507, simple_loss=0.304, pruned_loss=0.07231, ctc_loss=0.1317, over 19302.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2759, pruned_loss=0.05263, ctc_loss=0.09925, over 3608612.57 frames. ], batch size: 71, lr: 8.89e-03, grad_scale: 32.0 +2024-08-29 19:44:04,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=215338.66666666666, ans=0.125 +2024-08-29 19:44:50,186 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 19:44:58,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=215338.66666666666, ans=0.5 +2024-08-29 19:45:01,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=215392.0, ans=0.2 +2024-08-29 19:46:01,935 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.24 vs. limit=15.0 +2024-08-29 19:46:53,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215552.0, ans=0.1 +2024-08-29 19:46:57,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=215552.0, ans=0.125 +2024-08-29 19:47:01,424 INFO [train.py:1114] (3/4) Epoch 17, batch 600, loss[loss=0.2518, simple_loss=0.2991, pruned_loss=0.07511, ctc_loss=0.1355, over 19313.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2758, pruned_loss=0.05258, ctc_loss=0.0992, over 3666177.61 frames. ], batch size: 67, lr: 8.88e-03, grad_scale: 64.0 +2024-08-29 19:47:07,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=215605.33333333334, ans=0.0 +2024-08-29 19:47:11,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.85 vs. limit=15.0 +2024-08-29 19:48:19,079 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.690e+02 1.951e+02 2.307e+02 4.172e+02, threshold=3.901e+02, percent-clipped=2.0 +2024-08-29 19:48:25,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=215765.33333333334, ans=0.125 +2024-08-29 19:48:32,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=215818.66666666666, ans=0.0 +2024-08-29 19:49:17,788 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.82 vs. limit=22.5 +2024-08-29 19:49:21,615 INFO [train.py:1114] (3/4) Epoch 17, batch 650, loss[loss=0.2145, simple_loss=0.2757, pruned_loss=0.05577, ctc_loss=0.1046, over 19764.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2752, pruned_loss=0.0524, ctc_loss=0.09876, over 3716226.04 frames. ], batch size: 54, lr: 8.88e-03, grad_scale: 64.0 +2024-08-29 19:49:38,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=215925.33333333334, ans=0.07 +2024-08-29 19:50:09,093 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.03 vs. limit=15.0 +2024-08-29 19:50:20,447 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=5.78 vs. limit=15.0 +2024-08-29 19:51:21,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=216085.33333333334, ans=0.125 +2024-08-29 19:51:32,005 INFO [train.py:1114] (3/4) Epoch 17, batch 700, loss[loss=0.1982, simple_loss=0.2659, pruned_loss=0.04686, ctc_loss=0.09199, over 19711.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2756, pruned_loss=0.05251, ctc_loss=0.0988, over 3749092.86 frames. ], batch size: 51, lr: 8.87e-03, grad_scale: 64.0 +2024-08-29 19:51:32,467 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=8.69 vs. limit=22.5 +2024-08-29 19:51:42,749 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.50 vs. limit=12.0 +2024-08-29 19:51:58,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=216245.33333333334, ans=0.05 +2024-08-29 19:52:43,598 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.757e+02 1.978e+02 2.439e+02 3.670e+02, threshold=3.956e+02, percent-clipped=0.0 +2024-08-29 19:53:44,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=216352.0, ans=0.125 +2024-08-29 19:53:46,878 INFO [train.py:1114] (3/4) Epoch 17, batch 750, loss[loss=0.2171, simple_loss=0.2886, pruned_loss=0.05215, ctc_loss=0.103, over 19502.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2753, pruned_loss=0.05234, ctc_loss=0.09856, over 3775151.58 frames. ], batch size: 54, lr: 8.87e-03, grad_scale: 64.0 +2024-08-29 19:54:34,363 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.90 vs. limit=15.0 +2024-08-29 19:55:07,963 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.13 vs. limit=15.0 +2024-08-29 19:55:11,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216405.33333333334, ans=0.1 +2024-08-29 19:55:48,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=216458.66666666666, ans=0.125 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-12-44-46-0 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-12-44-46-0 new file mode 100644 index 0000000000000000000000000000000000000000..b0b51f050bee6b14e38de69bd49ca6b14ba4a513 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-12-44-46-0 @@ -0,0 +1,564 @@ +2024-08-30 12:44:46,451 INFO [train.py:1182] (0/4) Training started +2024-08-30 12:44:46,908 INFO [train.py:1192] (0/4) Device: cuda:0 +2024-08-30 12:44:47,090 INFO [train.py:1210] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2651.int.cedar.computecanada.ca', 'IP address': '172.16.146.88'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 17, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-30 12:44:47,091 INFO [train.py:1212] (0/4) About to create model +2024-08-30 12:44:48,184 INFO [train.py:1216] (0/4) Number of model parameters: 66367431 +2024-08-30 12:44:48,766 INFO [checkpoint.py:112] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-16.pt +2024-08-30 12:45:01,679 INFO [checkpoint.py:131] (0/4) Loading averaged model +2024-08-30 12:45:02,095 INFO [train.py:1231] (0/4) Using DDP +2024-08-30 12:45:06,256 INFO [train.py:1243] (0/4) Loading optimizer state dict +2024-08-30 12:45:06,460 INFO [train.py:1251] (0/4) Loading scheduler state dict +2024-08-30 12:45:06,460 INFO [asr_datamodule.py:894] (0/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-30 12:45:06,665 INFO [asr_datamodule.py:696] (0/4) Disable MUSAN +2024-08-30 12:45:06,665 INFO [asr_datamodule.py:714] (0/4) Enable SpecAugment +2024-08-30 12:45:06,665 INFO [asr_datamodule.py:715] (0/4) Time warp factor: 80 +2024-08-30 12:45:06,665 INFO [asr_datamodule.py:725] (0/4) Num frame mask: 10 +2024-08-30 12:45:06,665 INFO [asr_datamodule.py:738] (0/4) About to create train dataset +2024-08-30 12:45:06,666 INFO [asr_datamodule.py:765] (0/4) Using DynamicBucketingSampler. +2024-08-30 12:45:08,257 INFO [asr_datamodule.py:782] (0/4) About to create train dataloader +2024-08-30 12:45:08,264 INFO [asr_datamodule.py:911] (0/4) About to get dev-clean cuts +2024-08-30 12:45:08,444 INFO [asr_datamodule.py:918] (0/4) About to get dev-other cuts +2024-08-30 12:45:08,576 INFO [asr_datamodule.py:814] (0/4) About to create dev dataset +2024-08-30 12:45:08,904 INFO [asr_datamodule.py:831] (0/4) About to create dev dataloader +2024-08-30 12:45:08,904 INFO [train.py:1435] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 12:51:17,012 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12840MB +2024-08-30 12:51:18,479 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12916MB +2024-08-30 12:53:02,305 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12916MB +2024-08-30 12:53:03,308 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=4.25 vs. limit=5.0 +2024-08-30 12:53:03,666 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13093MB +2024-08-30 12:54:12,750 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13093MB +2024-08-30 12:54:14,354 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13093MB +2024-08-30 12:54:14,375 INFO [train.py:1344] (0/4) Loading grad scaler state dict +2024-08-30 12:55:06,918 INFO [train.py:1114] (0/4) Epoch 17, batch 0, loss[loss=0.2431, simple_loss=0.286, pruned_loss=0.07327, ctc_loss=0.1339, over 19800.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.286, pruned_loss=0.07327, ctc_loss=0.1339, over 19800.00 frames. ], batch size: 49, lr: 8.95e-03, grad_scale: 32.0 +2024-08-30 12:55:06,919 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-30 12:55:26,654 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.7976, 2.9573, 4.0454, 4.1840], device='cuda:0') +2024-08-30 12:55:27,223 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([1.7737, 2.2076, 1.6605, 2.0207, 2.2583, 2.3793, 2.3037, 1.7608], + device='cuda:0') +2024-08-30 12:55:31,713 INFO [train.py:1146] (0/4) Epoch 17, validation: loss=0.185, simple_loss=0.2737, pruned_loss=0.03584, ctc_loss=0.06176, over 944034.00 frames. +2024-08-30 12:55:31,713 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13093MB +2024-08-30 12:55:32,459 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.47 vs. limit=22.5 +2024-08-30 12:56:12,782 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.93 vs. limit=15.0 +2024-08-30 13:01:17,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=212512.0, ans=0.0 +2024-08-30 13:06:19,314 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.426e+02 1.860e+02 2.030e+02 2.233e+02 2.993e+02, threshold=4.061e+02, percent-clipped=0.0 +2024-08-30 13:06:53,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=212565.33333333334, ans=0.125 +2024-08-30 13:07:11,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=212565.33333333334, ans=0.125 +2024-08-30 13:09:56,386 INFO [train.py:1114] (0/4) Epoch 17, batch 50, loss[loss=0.1764, simple_loss=0.244, pruned_loss=0.03928, ctc_loss=0.0757, over 19711.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2783, pruned_loss=0.0537, ctc_loss=0.1018, over 844772.04 frames. ], batch size: 47, lr: 8.94e-03, grad_scale: 32.0 +2024-08-30 13:09:56,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=212672.0, ans=0.125 +2024-08-30 13:16:09,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=212725.33333333334, ans=0.0 +2024-08-30 13:16:44,134 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 13:18:37,724 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.25 vs. limit=15.0 +2024-08-30 13:18:56,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=212885.33333333334, ans=0.125 +2024-08-30 13:19:01,169 INFO [train.py:1114] (0/4) Epoch 17, batch 100, loss[loss=0.1973, simple_loss=0.2612, pruned_loss=0.04844, ctc_loss=0.09124, over 19727.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2798, pruned_loss=0.05447, ctc_loss=0.103, over 1498273.11 frames. ], batch size: 51, lr: 8.94e-03, grad_scale: 32.0 +2024-08-30 13:19:24,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=212992.0, ans=0.125 +2024-08-30 13:19:25,371 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.69 vs. limit=6.0 +2024-08-30 13:19:26,196 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.68 vs. limit=22.5 +2024-08-30 13:20:07,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=212992.0, ans=0.125 +2024-08-30 13:23:11,083 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.345e+02 1.706e+02 1.953e+02 2.287e+02 3.713e+02, threshold=3.906e+02, percent-clipped=0.0 +2024-08-30 13:24:10,825 INFO [train.py:1114] (0/4) Epoch 17, batch 150, loss[loss=0.2364, simple_loss=0.2768, pruned_loss=0.07104, ctc_loss=0.1349, over 19685.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2779, pruned_loss=0.0541, ctc_loss=0.1023, over 2027311.53 frames. ], batch size: 47, lr: 8.93e-03, grad_scale: 32.0 +2024-08-30 13:24:43,783 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/checkpoint-40000.pt +2024-08-30 13:25:06,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=213312.0, ans=0.125 +2024-08-30 13:27:19,148 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.01 vs. limit=15.0 +2024-08-30 13:27:35,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=213365.33333333334, ans=0.0 +2024-08-30 13:27:36,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213365.33333333334, ans=0.1 +2024-08-30 13:27:51,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=213365.33333333334, ans=0.1 +2024-08-30 13:27:55,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=213418.66666666666, ans=0.125 +2024-08-30 13:28:02,238 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 13:28:05,732 INFO [train.py:1114] (0/4) Epoch 17, batch 200, loss[loss=0.2567, simple_loss=0.3049, pruned_loss=0.07503, ctc_loss=0.146, over 18082.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2767, pruned_loss=0.05355, ctc_loss=0.1014, over 2434813.02 frames. ], batch size: 85, lr: 8.93e-03, grad_scale: 32.0 +2024-08-30 13:28:12,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=213472.0, ans=0.2 +2024-08-30 13:28:15,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=213472.0, ans=0.125 +2024-08-30 13:28:17,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213525.33333333334, ans=0.1 +2024-08-30 13:28:19,773 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 13:28:24,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=213525.33333333334, ans=0.2 +2024-08-30 13:28:28,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=213578.66666666666, ans=0.0 +2024-08-30 13:28:40,335 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.384e+02 1.731e+02 1.992e+02 2.666e+02 4.093e+02, threshold=3.983e+02, percent-clipped=1.0 +2024-08-30 13:28:40,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=213632.0, ans=0.125 +2024-08-30 13:28:42,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213632.0, ans=0.1 +2024-08-30 13:29:02,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=213685.33333333334, ans=0.0 +2024-08-30 13:29:05,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=213685.33333333334, ans=0.125 +2024-08-30 13:29:06,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=213738.66666666666, ans=0.1 +2024-08-30 13:29:07,531 INFO [train.py:1114] (0/4) Epoch 17, batch 250, loss[loss=0.2387, simple_loss=0.2913, pruned_loss=0.06706, ctc_loss=0.13, over 19421.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2758, pruned_loss=0.0531, ctc_loss=0.1005, over 2755282.98 frames. ], batch size: 67, lr: 8.92e-03, grad_scale: 32.0 +2024-08-30 13:29:07,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=213738.66666666666, ans=0.0 +2024-08-30 13:29:13,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=213738.66666666666, ans=0.2 +2024-08-30 13:29:13,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=213738.66666666666, ans=0.0 +2024-08-30 13:29:24,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=213792.0, ans=0.95 +2024-08-30 13:29:39,228 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.03 vs. limit=15.0 +2024-08-30 13:29:53,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=213898.66666666666, ans=0.125 +2024-08-30 13:29:57,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=213898.66666666666, ans=0.125 +2024-08-30 13:30:13,960 INFO [train.py:1114] (0/4) Epoch 17, batch 300, loss[loss=0.2388, simple_loss=0.2943, pruned_loss=0.06636, ctc_loss=0.1264, over 19530.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2746, pruned_loss=0.05232, ctc_loss=0.09883, over 3000773.43 frames. ], batch size: 61, lr: 8.92e-03, grad_scale: 32.0 +2024-08-30 13:30:19,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=214005.33333333334, ans=0.125 +2024-08-30 13:30:28,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=214058.66666666666, ans=0.125 +2024-08-30 13:30:40,783 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.67 vs. limit=15.0 +2024-08-30 13:30:51,813 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.284e+02 1.663e+02 1.872e+02 2.298e+02 3.693e+02, threshold=3.744e+02, percent-clipped=0.0 +2024-08-30 13:31:12,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=214218.66666666666, ans=0.125 +2024-08-30 13:31:25,596 INFO [train.py:1114] (0/4) Epoch 17, batch 350, loss[loss=0.1931, simple_loss=0.2561, pruned_loss=0.04653, ctc_loss=0.09266, over 19745.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2755, pruned_loss=0.05258, ctc_loss=0.09958, over 3190658.25 frames. ], batch size: 48, lr: 8.91e-03, grad_scale: 32.0 +2024-08-30 13:31:27,607 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.66 vs. limit=6.0 +2024-08-30 13:31:37,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=214325.33333333334, ans=0.125 +2024-08-30 13:31:39,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=214325.33333333334, ans=0.125 +2024-08-30 13:31:41,459 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.79 vs. limit=10.0 +2024-08-30 13:32:07,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214432.0, ans=0.1 +2024-08-30 13:32:08,981 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.90 vs. limit=15.0 +2024-08-30 13:32:10,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=214432.0, ans=0.125 +2024-08-30 13:32:20,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=214485.33333333334, ans=0.0 +2024-08-30 13:32:24,589 INFO [train.py:1114] (0/4) Epoch 17, batch 400, loss[loss=0.1967, simple_loss=0.2789, pruned_loss=0.04147, ctc_loss=0.07919, over 19506.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2751, pruned_loss=0.05215, ctc_loss=0.09842, over 3343295.33 frames. ], batch size: 54, lr: 8.91e-03, grad_scale: 32.0 +2024-08-30 13:32:29,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=214538.66666666666, ans=0.125 +2024-08-30 13:32:50,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=214645.33333333334, ans=0.0 +2024-08-30 13:33:00,602 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.11 vs. limit=6.0 +2024-08-30 13:33:01,006 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.238e+02 1.640e+02 1.901e+02 2.325e+02 4.074e+02, threshold=3.801e+02, percent-clipped=1.0 +2024-08-30 13:33:21,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=214752.0, ans=0.015 +2024-08-30 13:33:24,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=214752.0, ans=0.025 +2024-08-30 13:33:26,243 INFO [train.py:1114] (0/4) Epoch 17, batch 450, loss[loss=0.199, simple_loss=0.2789, pruned_loss=0.0429, ctc_loss=0.08315, over 19619.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2749, pruned_loss=0.05187, ctc_loss=0.09762, over 3449981.31 frames. ], batch size: 55, lr: 8.90e-03, grad_scale: 32.0 +2024-08-30 13:33:58,471 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.85 vs. limit=15.0 +2024-08-30 13:34:03,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=214858.66666666666, ans=0.0 +2024-08-30 13:38:32,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=214912.0, ans=0.2 +2024-08-30 13:38:36,378 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.20 vs. limit=15.0 +2024-08-30 13:43:52,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=215018.66666666666, ans=0.0 +2024-08-30 13:43:56,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=215018.66666666666, ans=0.125 +2024-08-30 13:44:05,809 INFO [train.py:1114] (0/4) Epoch 17, batch 500, loss[loss=0.2189, simple_loss=0.2915, pruned_loss=0.05321, ctc_loss=0.09982, over 19663.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2742, pruned_loss=0.05137, ctc_loss=0.09665, over 3545189.32 frames. ], batch size: 63, lr: 8.90e-03, grad_scale: 32.0 +2024-08-30 13:44:38,546 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.78 vs. limit=15.0 +2024-08-30 13:44:48,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=215125.33333333334, ans=0.125 +2024-08-30 13:44:48,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=215125.33333333334, ans=0.125 +2024-08-30 13:44:48,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=215125.33333333334, ans=0.0 +2024-08-30 13:45:06,446 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.780e+02 2.026e+02 2.589e+02 4.105e+02, threshold=4.052e+02, percent-clipped=2.0 +2024-08-30 13:45:08,130 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.78 vs. limit=15.0 +2024-08-30 13:45:31,469 INFO [train.py:1114] (0/4) Epoch 17, batch 550, loss[loss=0.2128, simple_loss=0.2858, pruned_loss=0.05059, ctc_loss=0.09647, over 19206.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2746, pruned_loss=0.05167, ctc_loss=0.09726, over 3607486.88 frames. ], batch size: 71, lr: 8.89e-03, grad_scale: 32.0 +2024-08-30 13:45:31,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=215338.66666666666, ans=0.07 +2024-08-30 13:45:59,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=215445.33333333334, ans=0.0 +2024-08-30 13:46:20,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=215552.0, ans=0.0 +2024-08-30 13:47:16,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=215552.0, ans=0.125 +2024-08-30 13:47:20,214 INFO [train.py:1114] (0/4) Epoch 17, batch 600, loss[loss=0.2332, simple_loss=0.2927, pruned_loss=0.06459, ctc_loss=0.1111, over 19446.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2745, pruned_loss=0.05148, ctc_loss=0.09687, over 3664553.91 frames. ], batch size: 67, lr: 8.88e-03, grad_scale: 64.0 +2024-08-30 13:47:22,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=215605.33333333334, ans=0.125 +2024-08-30 13:47:53,791 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.276e+02 1.647e+02 1.940e+02 2.383e+02 4.124e+02, threshold=3.879e+02, percent-clipped=1.0 +2024-08-30 13:48:27,126 INFO [train.py:1114] (0/4) Epoch 17, batch 650, loss[loss=0.2001, simple_loss=0.2664, pruned_loss=0.04901, ctc_loss=0.08935, over 19762.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2738, pruned_loss=0.05121, ctc_loss=0.09637, over 3715413.88 frames. ], batch size: 54, lr: 8.88e-03, grad_scale: 32.0 +2024-08-30 13:51:32,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=215925.33333333334, ans=0.125 +2024-08-30 13:52:18,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=216032.0, ans=0.025 +2024-08-30 13:52:36,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=216032.0, ans=0.125 +2024-08-30 13:53:17,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=216085.33333333334, ans=0.125 +2024-08-30 13:54:00,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=216085.33333333334, ans=0.125 +2024-08-30 14:00:36,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216085.33333333334, ans=0.1 +2024-08-30 14:06:19,750 INFO [train.py:1114] (0/4) Epoch 17, batch 700, loss[loss=0.2137, simple_loss=0.2823, pruned_loss=0.05325, ctc_loss=0.09654, over 19728.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2748, pruned_loss=0.0515, ctc_loss=0.09689, over 3747076.79 frames. ], batch size: 51, lr: 8.87e-03, grad_scale: 32.0 +2024-08-30 14:06:25,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=216138.66666666666, ans=0.125 +2024-08-30 14:08:55,349 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.82 vs. limit=15.0 +2024-08-30 14:09:33,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=216245.33333333334, ans=0.0 +2024-08-30 14:09:58,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=216245.33333333334, ans=0.0 +2024-08-30 14:12:13,786 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.374e+02 1.667e+02 2.137e+02 2.601e+02 4.284e+02, threshold=4.274e+02, percent-clipped=4.0 +2024-08-30 14:17:27,437 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.16 vs. limit=15.0 +2024-08-30 14:17:28,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=216352.0, ans=0.0 +2024-08-30 14:17:35,091 INFO [train.py:1114] (0/4) Epoch 17, batch 750, loss[loss=0.2182, simple_loss=0.2887, pruned_loss=0.05451, ctc_loss=0.09658, over 19495.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2745, pruned_loss=0.05123, ctc_loss=0.09622, over 3773742.85 frames. ], batch size: 54, lr: 8.87e-03, grad_scale: 32.0 +2024-08-30 14:17:42,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=216405.33333333334, ans=0.125 +2024-08-30 14:17:45,789 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.41 vs. limit=15.0 +2024-08-30 14:17:58,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=216458.66666666666, ans=0.09899494936611666 +2024-08-30 14:19:07,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=216512.0, ans=0.125 +2024-08-30 14:19:33,904 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.81 vs. limit=10.0 +2024-08-30 14:19:43,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=216565.33333333334, ans=0.1 +2024-08-30 14:19:44,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=216565.33333333334, ans=0.0 +2024-08-30 14:19:50,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=216618.66666666666, ans=0.0 +2024-08-30 14:20:37,986 INFO [train.py:1114] (0/4) Epoch 17, batch 800, loss[loss=0.1776, simple_loss=0.2456, pruned_loss=0.04007, ctc_loss=0.07368, over 19403.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2745, pruned_loss=0.05156, ctc_loss=0.09689, over 3795895.10 frames. ], batch size: 48, lr: 8.86e-03, grad_scale: 32.0 +2024-08-30 14:27:07,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=216725.33333333334, ans=0.125 +2024-08-30 14:29:23,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=216725.33333333334, ans=0.125 +2024-08-30 14:29:49,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=216725.33333333334, ans=0.125 +2024-08-30 14:30:46,455 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:31:18,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=216778.66666666666, ans=0.125 +2024-08-30 14:31:25,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=216778.66666666666, ans=0.2 +2024-08-30 14:31:32,323 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.355e+02 1.715e+02 2.071e+02 2.537e+02 3.967e+02, threshold=4.143e+02, percent-clipped=0.0 +2024-08-30 14:32:27,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216885.33333333334, ans=0.1 +2024-08-30 14:32:34,223 INFO [train.py:1114] (0/4) Epoch 17, batch 850, loss[loss=0.2197, simple_loss=0.2925, pruned_loss=0.05341, ctc_loss=0.1002, over 19634.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2747, pruned_loss=0.05174, ctc_loss=0.09728, over 3815942.73 frames. ], batch size: 59, lr: 8.86e-03, grad_scale: 32.0 +2024-08-30 14:32:39,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=216938.66666666666, ans=0.0 +2024-08-30 14:32:53,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216992.0, ans=0.1 +2024-08-30 14:33:00,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=216992.0, ans=0.0 +2024-08-30 14:33:02,482 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.86 vs. limit=15.0 +2024-08-30 14:33:07,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=217045.33333333334, ans=0.05 +2024-08-30 14:33:12,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=217045.33333333334, ans=0.0 +2024-08-30 14:34:07,706 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.93 vs. limit=15.0 +2024-08-30 14:34:26,357 INFO [train.py:1114] (0/4) Epoch 17, batch 900, loss[loss=0.1797, simple_loss=0.2473, pruned_loss=0.04139, ctc_loss=0.07326, over 19407.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2753, pruned_loss=0.05233, ctc_loss=0.09825, over 3820019.15 frames. ], batch size: 48, lr: 8.85e-03, grad_scale: 32.0 +2024-08-30 14:35:33,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=217205.33333333334, ans=0.125 +2024-08-30 14:36:59,134 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.623e+02 1.810e+02 2.233e+02 4.039e+02, threshold=3.621e+02, percent-clipped=0.0 +2024-08-30 14:37:24,543 INFO [train.py:1114] (0/4) Epoch 17, batch 950, loss[loss=0.2161, simple_loss=0.2748, pruned_loss=0.05674, ctc_loss=0.1099, over 19522.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2753, pruned_loss=0.05209, ctc_loss=0.09803, over 3821118.76 frames. ], batch size: 49, lr: 8.85e-03, grad_scale: 32.0 +2024-08-30 14:38:29,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=217525.33333333334, ans=0.125 +2024-08-30 14:38:53,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=217632.0, ans=10.0 +2024-08-30 14:39:16,466 INFO [train.py:1114] (0/4) Epoch 17, batch 1000, loss[loss=0.2146, simple_loss=0.2739, pruned_loss=0.05678, ctc_loss=0.1046, over 19843.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2759, pruned_loss=0.05226, ctc_loss=0.09851, over 3816399.91 frames. ], batch size: 52, lr: 8.84e-03, grad_scale: 32.0 +2024-08-30 14:39:23,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=217738.66666666666, ans=0.2 +2024-08-30 14:39:25,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=217738.66666666666, ans=0.125 +2024-08-30 14:39:30,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217792.0, ans=0.1 +2024-08-30 14:39:33,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=217792.0, ans=0.125 +2024-08-30 14:39:52,706 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.292e+02 1.648e+02 1.905e+02 2.181e+02 3.196e+02, threshold=3.810e+02, percent-clipped=0.0 +2024-08-30 14:39:54,203 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:39:55,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=217898.66666666666, ans=0.125 +2024-08-30 14:39:59,375 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.01 vs. limit=22.5 +2024-08-30 14:40:08,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217952.0, ans=0.1 +2024-08-30 14:40:20,762 INFO [train.py:1114] (0/4) Epoch 17, batch 1050, loss[loss=0.2268, simple_loss=0.292, pruned_loss=0.0594, ctc_loss=0.1072, over 19845.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2757, pruned_loss=0.05246, ctc_loss=0.09866, over 3821538.54 frames. ], batch size: 57, lr: 8.84e-03, grad_scale: 32.0 +2024-08-30 14:40:27,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=218005.33333333334, ans=0.025 +2024-08-30 14:40:32,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=218058.66666666666, ans=0.07 +2024-08-30 14:40:43,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=218058.66666666666, ans=0.0 +2024-08-30 14:40:53,295 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:40:59,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=218165.33333333334, ans=0.125 +2024-08-30 14:41:17,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=218218.66666666666, ans=0.2 +2024-08-30 14:41:24,159 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.69 vs. limit=15.0 +2024-08-30 14:41:24,730 INFO [train.py:1114] (0/4) Epoch 17, batch 1100, loss[loss=0.2019, simple_loss=0.2731, pruned_loss=0.04692, ctc_loss=0.09225, over 19593.00 frames. ], tot_loss[loss=0.209, simple_loss=0.275, pruned_loss=0.05195, ctc_loss=0.09782, over 3829762.59 frames. ], batch size: 52, lr: 8.83e-03, grad_scale: 32.0 +2024-08-30 14:41:26,196 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:41:48,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=218378.66666666666, ans=0.0 +2024-08-30 14:41:49,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=218378.66666666666, ans=0.0 +2024-08-30 14:42:23,467 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.634e+02 1.909e+02 2.238e+02 3.833e+02, threshold=3.817e+02, percent-clipped=1.0 +2024-08-30 14:42:24,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=218432.0, ans=0.0 +2024-08-30 14:43:03,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.41 vs. limit=12.0 +2024-08-30 14:43:15,271 INFO [train.py:1114] (0/4) Epoch 17, batch 1150, loss[loss=0.2131, simple_loss=0.2753, pruned_loss=0.05501, ctc_loss=0.1022, over 19563.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2757, pruned_loss=0.05255, ctc_loss=0.09904, over 3829113.45 frames. ], batch size: 52, lr: 8.83e-03, grad_scale: 32.0 +2024-08-30 14:43:28,298 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.97 vs. limit=22.5 +2024-08-30 14:43:45,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=218645.33333333334, ans=0.95 +2024-08-30 14:43:48,005 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:44:08,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=218752.0, ans=0.0 +2024-08-30 14:44:20,107 INFO [train.py:1114] (0/4) Epoch 17, batch 1200, loss[loss=0.2089, simple_loss=0.2819, pruned_loss=0.05022, ctc_loss=0.08873, over 19840.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2762, pruned_loss=0.05271, ctc_loss=0.09928, over 3824969.65 frames. ], batch size: 57, lr: 8.82e-03, grad_scale: 32.0 +2024-08-30 14:44:34,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=218858.66666666666, ans=0.125 +2024-08-30 14:46:08,768 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.467e+02 1.734e+02 1.937e+02 2.235e+02 3.279e+02, threshold=3.874e+02, percent-clipped=0.0 +2024-08-30 14:46:09,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=218965.33333333334, ans=0.125 +2024-08-30 14:46:21,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219018.66666666666, ans=0.1 +2024-08-30 14:46:22,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=219018.66666666666, ans=0.125 +2024-08-30 14:46:25,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=219018.66666666666, ans=0.0 +2024-08-30 14:46:31,314 INFO [train.py:1114] (0/4) Epoch 17, batch 1250, loss[loss=0.2317, simple_loss=0.2958, pruned_loss=0.06126, ctc_loss=0.1126, over 19509.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2763, pruned_loss=0.05251, ctc_loss=0.09884, over 3843027.80 frames. ], batch size: 61, lr: 8.82e-03, grad_scale: 32.0 +2024-08-30 14:46:34,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.55 vs. limit=10.0 +2024-08-30 14:46:56,794 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.24 vs. limit=6.0 +2024-08-30 14:46:58,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=219178.66666666666, ans=0.125 +2024-08-30 14:48:27,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=219232.0, ans=0.0 +2024-08-30 14:48:46,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=219285.33333333334, ans=0.025 +2024-08-30 14:48:52,365 INFO [train.py:1114] (0/4) Epoch 17, batch 1300, loss[loss=0.2228, simple_loss=0.2937, pruned_loss=0.05481, ctc_loss=0.1058, over 18867.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2749, pruned_loss=0.05177, ctc_loss=0.09724, over 3845692.99 frames. ], batch size: 76, lr: 8.81e-03, grad_scale: 32.0 +2024-08-30 14:48:58,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=219338.66666666666, ans=0.125 +2024-08-30 14:49:07,922 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.22 vs. limit=6.0 +2024-08-30 14:49:23,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=219445.33333333334, ans=0.125 +2024-08-30 14:49:29,487 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.393e+02 1.750e+02 2.054e+02 2.564e+02 3.826e+02, threshold=4.108e+02, percent-clipped=0.0 +2024-08-30 14:49:30,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=219498.66666666666, ans=0.125 +2024-08-30 14:49:41,826 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:49:42,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=219552.0, ans=0.0 +2024-08-30 14:49:45,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=219552.0, ans=0.125 +2024-08-30 14:50:08,947 INFO [train.py:1114] (0/4) Epoch 17, batch 1350, loss[loss=0.195, simple_loss=0.2619, pruned_loss=0.04701, ctc_loss=0.08509, over 19764.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2745, pruned_loss=0.0518, ctc_loss=0.0972, over 3857308.50 frames. ], batch size: 54, lr: 8.81e-03, grad_scale: 32.0 +2024-08-30 14:50:13,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=219605.33333333334, ans=0.125 +2024-08-30 14:50:34,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=219712.0, ans=0.0 +2024-08-30 14:50:35,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=219712.0, ans=0.125 +2024-08-30 14:50:58,093 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.90 vs. limit=6.0 +2024-08-30 14:51:09,340 INFO [train.py:1114] (0/4) Epoch 17, batch 1400, loss[loss=0.1849, simple_loss=0.2507, pruned_loss=0.04286, ctc_loss=0.08332, over 19668.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2748, pruned_loss=0.05217, ctc_loss=0.09791, over 3864290.68 frames. ], batch size: 46, lr: 8.80e-03, grad_scale: 32.0 +2024-08-30 14:51:28,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=219872.0, ans=0.125 +2024-08-30 14:51:58,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=219978.66666666666, ans=0.2 +2024-08-30 14:52:01,611 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.357e+02 1.697e+02 1.910e+02 2.399e+02 4.058e+02, threshold=3.819e+02, percent-clipped=0.0 +2024-08-30 14:52:02,307 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=10.08 vs. limit=15.0 +2024-08-30 14:52:26,366 INFO [train.py:1114] (0/4) Epoch 17, batch 1450, loss[loss=0.228, simple_loss=0.2942, pruned_loss=0.05819, ctc_loss=0.1135, over 19666.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2753, pruned_loss=0.05211, ctc_loss=0.09817, over 3863106.64 frames. ], batch size: 63, lr: 8.80e-03, grad_scale: 32.0 +2024-08-30 14:52:26,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=220138.66666666666, ans=0.125 +2024-08-30 14:52:36,231 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.54 vs. limit=10.0 +2024-08-30 14:52:39,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=220138.66666666666, ans=0.07 +2024-08-30 14:52:43,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=220138.66666666666, ans=0.2 +2024-08-30 14:53:09,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=220245.33333333334, ans=0.025 +2024-08-30 14:53:19,796 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.19 vs. limit=15.0 +2024-08-30 14:53:48,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=220298.66666666666, ans=0.05 +2024-08-30 14:53:55,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=220298.66666666666, ans=0.0 +2024-08-30 14:54:01,887 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.44 vs. limit=22.5 +2024-08-30 14:54:12,187 INFO [train.py:1114] (0/4) Epoch 17, batch 1500, loss[loss=0.2059, simple_loss=0.2764, pruned_loss=0.04811, ctc_loss=0.09788, over 19599.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2754, pruned_loss=0.05208, ctc_loss=0.09829, over 3863722.97 frames. ], batch size: 57, lr: 8.79e-03, grad_scale: 32.0 +2024-08-30 14:54:17,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=220405.33333333334, ans=0.125 +2024-08-30 14:54:36,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=220458.66666666666, ans=0.125 +2024-08-30 14:54:54,678 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.719e+02 1.906e+02 2.293e+02 3.704e+02, threshold=3.812e+02, percent-clipped=0.0 +2024-08-30 14:54:59,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=220565.33333333334, ans=0.0 +2024-08-30 14:54:59,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=220565.33333333334, ans=0.5 +2024-08-30 14:55:07,390 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.46 vs. limit=12.0 +2024-08-30 14:55:17,132 INFO [train.py:1114] (0/4) Epoch 17, batch 1550, loss[loss=0.2258, simple_loss=0.2889, pruned_loss=0.05916, ctc_loss=0.1111, over 19627.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2755, pruned_loss=0.0523, ctc_loss=0.09869, over 3848490.77 frames. ], batch size: 60, lr: 8.79e-03, grad_scale: 32.0 +2024-08-30 14:55:31,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=220672.0, ans=0.125 +2024-08-30 14:55:33,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=220672.0, ans=0.125 +2024-08-30 14:55:36,197 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:55:58,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=220778.66666666666, ans=0.125 +2024-08-30 14:56:01,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=220778.66666666666, ans=0.125 +2024-08-30 14:56:18,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=220885.33333333334, ans=0.125 +2024-08-30 14:56:19,992 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.25 vs. limit=6.0 +2024-08-30 14:56:25,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=220885.33333333334, ans=0.125 +2024-08-30 14:56:27,383 INFO [train.py:1114] (0/4) Epoch 17, batch 1600, loss[loss=0.1895, simple_loss=0.2625, pruned_loss=0.04284, ctc_loss=0.07719, over 19833.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2752, pruned_loss=0.05196, ctc_loss=0.09797, over 3837459.82 frames. ], batch size: 57, lr: 8.78e-03, grad_scale: 32.0 +2024-08-30 14:56:27,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=220938.66666666666, ans=0.025 +2024-08-30 14:56:31,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=220938.66666666666, ans=0.0 +2024-08-30 14:56:35,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=220938.66666666666, ans=0.0 +2024-08-30 14:57:30,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=220992.0, ans=0.125 +2024-08-30 14:57:38,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=221045.33333333334, ans=0.0 +2024-08-30 15:00:47,802 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.328e+02 1.738e+02 2.160e+02 2.635e+02 3.870e+02, threshold=4.320e+02, percent-clipped=2.0 +2024-08-30 15:00:53,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=221098.66666666666, ans=0.5 +2024-08-30 15:02:45,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=221152.0, ans=0.0 +2024-08-30 15:02:56,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=221152.0, ans=0.125 +2024-08-30 15:03:53,983 INFO [train.py:1114] (0/4) Epoch 17, batch 1650, loss[loss=0.2153, simple_loss=0.2893, pruned_loss=0.05021, ctc_loss=0.1022, over 19655.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2747, pruned_loss=0.05179, ctc_loss=0.09778, over 3833286.80 frames. ], batch size: 59, lr: 8.77e-03, grad_scale: 32.0 +2024-08-30 15:04:00,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=221205.33333333334, ans=0.0 +2024-08-30 15:05:53,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=221312.0, ans=0.0 +2024-08-30 15:07:30,024 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.70 vs. limit=22.5 +2024-08-30 15:07:35,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=221365.33333333334, ans=0.125 +2024-08-30 15:07:51,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=221418.66666666666, ans=0.125 +2024-08-30 15:07:58,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=221418.66666666666, ans=0.125 +2024-08-30 15:08:00,827 INFO [train.py:1114] (0/4) Epoch 17, batch 1700, loss[loss=0.184, simple_loss=0.2449, pruned_loss=0.04389, ctc_loss=0.08818, over 19662.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2746, pruned_loss=0.05177, ctc_loss=0.09765, over 3846857.71 frames. ], batch size: 46, lr: 8.77e-03, grad_scale: 32.0 +2024-08-30 15:08:01,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=221472.0, ans=0.0 +2024-08-30 15:08:11,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=221525.33333333334, ans=10.0 +2024-08-30 15:08:23,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=221578.66666666666, ans=0.125 +2024-08-30 15:08:35,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=221632.0, ans=0.125 +2024-08-30 15:08:36,788 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.427e+02 1.717e+02 1.998e+02 2.422e+02 4.059e+02, threshold=3.996e+02, percent-clipped=0.0 +2024-08-30 15:09:34,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221632.0, ans=0.1 +2024-08-30 15:09:36,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=221632.0, ans=0.2 +2024-08-30 15:09:36,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=221632.0, ans=0.0 +2024-08-30 15:09:49,515 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.77 vs. limit=22.5 +2024-08-30 15:09:50,072 INFO [train.py:1114] (0/4) Epoch 17, batch 1750, loss[loss=0.2138, simple_loss=0.2661, pruned_loss=0.05908, ctc_loss=0.1084, over 19628.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2741, pruned_loss=0.05151, ctc_loss=0.09706, over 3850512.50 frames. ], batch size: 45, lr: 8.76e-03, grad_scale: 32.0 +2024-08-30 15:10:02,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=221792.0, ans=0.025 +2024-08-30 15:10:03,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=221792.0, ans=0.0 +2024-08-30 15:10:12,808 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.25 vs. limit=12.0 +2024-08-30 15:10:14,113 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.85 vs. limit=15.0 +2024-08-30 15:10:35,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=221952.0, ans=0.0 +2024-08-30 15:10:37,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=221952.0, ans=0.125 +2024-08-30 15:10:46,086 INFO [train.py:1114] (0/4) Epoch 17, batch 1800, loss[loss=0.1971, simple_loss=0.2673, pruned_loss=0.04599, ctc_loss=0.08755, over 19609.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2745, pruned_loss=0.05174, ctc_loss=0.09745, over 3852141.97 frames. ], batch size: 55, lr: 8.76e-03, grad_scale: 32.0 +2024-08-30 15:10:47,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222005.33333333334, ans=0.1 +2024-08-30 15:10:47,691 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.70 vs. limit=15.0 +2024-08-30 15:11:23,234 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.330e+02 1.773e+02 2.029e+02 2.607e+02 4.351e+02, threshold=4.057e+02, percent-clipped=1.0 +2024-08-30 15:11:32,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=222218.66666666666, ans=0.125 +2024-08-30 15:11:43,561 INFO [train.py:1114] (0/4) Epoch 17, batch 1850, loss[loss=0.2194, simple_loss=0.2945, pruned_loss=0.05209, ctc_loss=0.1002, over 19585.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.274, pruned_loss=0.05141, ctc_loss=0.09672, over 3855966.23 frames. ], batch size: 57, lr: 8.75e-03, grad_scale: 16.0 +2024-08-30 15:12:04,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=222378.66666666666, ans=0.125 +2024-08-30 15:12:31,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=222485.33333333334, ans=22.5 +2024-08-30 15:12:37,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=222485.33333333334, ans=0.125 +2024-08-30 15:12:40,701 INFO [train.py:1114] (0/4) Epoch 17, batch 1900, loss[loss=0.1972, simple_loss=0.2785, pruned_loss=0.04168, ctc_loss=0.08127, over 19659.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2746, pruned_loss=0.05143, ctc_loss=0.09666, over 3861031.91 frames. ], batch size: 59, lr: 8.75e-03, grad_scale: 16.0 +2024-08-30 15:12:52,467 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.50 vs. limit=12.0 +2024-08-30 15:13:11,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=222645.33333333334, ans=0.125 +2024-08-30 15:13:18,236 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.682e+02 1.950e+02 2.328e+02 4.923e+02, threshold=3.901e+02, percent-clipped=3.0 +2024-08-30 15:13:30,378 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.27 vs. limit=15.0 +2024-08-30 15:13:38,414 INFO [train.py:1114] (0/4) Epoch 17, batch 1950, loss[loss=0.2147, simple_loss=0.2811, pruned_loss=0.0535, ctc_loss=0.1031, over 19559.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.276, pruned_loss=0.05205, ctc_loss=0.09786, over 3869686.70 frames. ], batch size: 52, lr: 8.74e-03, grad_scale: 16.0 +2024-08-30 15:14:18,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=222805.33333333334, ans=0.125 +2024-08-30 15:14:36,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=222858.66666666666, ans=0.125 +2024-08-30 15:14:40,452 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.52 vs. limit=15.0 +2024-08-30 15:15:10,929 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.81 vs. limit=22.5 +2024-08-30 15:15:27,049 INFO [train.py:1114] (0/4) Epoch 17, batch 2000, loss[loss=0.1624, simple_loss=0.2342, pruned_loss=0.03194, ctc_loss=0.06686, over 19628.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2762, pruned_loss=0.05179, ctc_loss=0.09761, over 3854277.82 frames. ], batch size: 45, lr: 8.74e-03, grad_scale: 32.0 +2024-08-30 15:16:03,257 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.692e+02 2.099e+02 2.435e+02 3.373e+02, threshold=4.199e+02, percent-clipped=0.0 +2024-08-30 15:16:18,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=223285.33333333334, ans=0.125 +2024-08-30 15:16:38,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=223285.33333333334, ans=0.125 +2024-08-30 15:16:42,836 INFO [train.py:1114] (0/4) Epoch 17, batch 2050, loss[loss=0.1772, simple_loss=0.2454, pruned_loss=0.03987, ctc_loss=0.07327, over 19707.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2751, pruned_loss=0.05204, ctc_loss=0.09802, over 3850647.74 frames. ], batch size: 47, lr: 8.73e-03, grad_scale: 32.0 +2024-08-30 15:17:01,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=223338.66666666666, ans=0.125 +2024-08-30 15:18:19,221 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.21 vs. limit=15.0 +2024-08-30 15:18:40,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=223445.33333333334, ans=0.125 +2024-08-30 15:20:15,748 INFO [train.py:1114] (0/4) Epoch 17, batch 2100, loss[loss=0.2162, simple_loss=0.2854, pruned_loss=0.05242, ctc_loss=0.1055, over 19773.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2746, pruned_loss=0.0518, ctc_loss=0.09754, over 3857785.87 frames. ], batch size: 54, lr: 8.73e-03, grad_scale: 32.0 +2024-08-30 15:20:39,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=223605.33333333334, ans=0.125 +2024-08-30 15:20:44,447 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.27 vs. limit=15.0 +2024-08-30 15:20:46,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=223605.33333333334, ans=22.5 +2024-08-30 15:20:58,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=223605.33333333334, ans=0.0 +2024-08-30 15:21:03,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223658.66666666666, ans=0.1 +2024-08-30 15:21:15,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=223712.0, ans=0.0 +2024-08-30 15:21:23,085 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.29 vs. limit=22.5 +2024-08-30 15:21:23,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=223765.33333333334, ans=0.0 +2024-08-30 15:21:41,987 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.235e+02 1.693e+02 2.019e+02 2.546e+02 6.032e+02, threshold=4.039e+02, percent-clipped=5.0 +2024-08-30 15:21:44,365 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 15:21:51,394 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.99 vs. limit=15.0 +2024-08-30 15:21:51,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=223818.66666666666, ans=0.0 +2024-08-30 15:21:55,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=223818.66666666666, ans=0.125 +2024-08-30 15:22:02,851 INFO [train.py:1114] (0/4) Epoch 17, batch 2150, loss[loss=0.2008, simple_loss=0.2674, pruned_loss=0.04893, ctc_loss=0.09069, over 19594.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.274, pruned_loss=0.05147, ctc_loss=0.09675, over 3868200.70 frames. ], batch size: 52, lr: 8.72e-03, grad_scale: 32.0 +2024-08-30 15:22:04,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=223872.0, ans=0.5 +2024-08-30 15:22:06,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=223872.0, ans=0.125 +2024-08-30 15:22:08,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=223872.0, ans=0.2 +2024-08-30 15:22:24,361 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.02 vs. limit=12.0 +2024-08-30 15:22:29,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=223978.66666666666, ans=0.125 +2024-08-30 15:22:34,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=223978.66666666666, ans=0.2 +2024-08-30 15:22:42,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=224032.0, ans=0.125 +2024-08-30 15:22:46,821 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.27 vs. limit=15.0 +2024-08-30 15:22:58,293 INFO [train.py:1114] (0/4) Epoch 17, batch 2200, loss[loss=0.2313, simple_loss=0.2981, pruned_loss=0.05927, ctc_loss=0.1151, over 19586.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2736, pruned_loss=0.05101, ctc_loss=0.09589, over 3867184.45 frames. ], batch size: 57, lr: 8.72e-03, grad_scale: 32.0 +2024-08-30 15:23:01,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=224138.66666666666, ans=0.125 +2024-08-30 15:23:34,323 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.21 vs. limit=22.5 +2024-08-30 15:23:39,324 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 15:23:42,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=224298.66666666666, ans=0.025 +2024-08-30 15:23:53,332 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.675e+02 1.986e+02 2.371e+02 4.244e+02, threshold=3.972e+02, percent-clipped=2.0 +2024-08-30 15:23:54,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=224298.66666666666, ans=0.125 +2024-08-30 15:24:13,617 INFO [train.py:1114] (0/4) Epoch 17, batch 2250, loss[loss=0.2106, simple_loss=0.2834, pruned_loss=0.0492, ctc_loss=0.09857, over 19615.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.274, pruned_loss=0.05122, ctc_loss=0.09627, over 3867576.77 frames. ], batch size: 55, lr: 8.71e-03, grad_scale: 32.0 +2024-08-30 15:24:28,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=224458.66666666666, ans=0.015 +2024-08-30 15:25:44,793 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 15:25:54,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=224512.0, ans=0.025 +2024-08-30 15:26:56,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=224565.33333333334, ans=0.2 +2024-08-30 15:27:11,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=224618.66666666666, ans=0.125 +2024-08-30 15:27:15,752 INFO [train.py:1114] (0/4) Epoch 17, batch 2300, loss[loss=0.1762, simple_loss=0.2496, pruned_loss=0.03707, ctc_loss=0.07161, over 19495.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2736, pruned_loss=0.05151, ctc_loss=0.09671, over 3861679.45 frames. ], batch size: 49, lr: 8.71e-03, grad_scale: 32.0 +2024-08-30 15:27:27,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=224672.0, ans=0.125 +2024-08-30 15:28:32,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=224778.66666666666, ans=0.125 +2024-08-30 15:28:45,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=224832.0, ans=0.09899494936611666 +2024-08-30 15:28:46,695 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.385e+02 1.759e+02 2.126e+02 2.592e+02 4.068e+02, threshold=4.252e+02, percent-clipped=2.0 +2024-08-30 15:28:46,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=224832.0, ans=0.0 +2024-08-30 15:28:49,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=224832.0, ans=0.1 +2024-08-30 15:29:04,185 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 15:29:49,494 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.77 vs. limit=15.0 +2024-08-30 15:29:51,084 INFO [train.py:1114] (0/4) Epoch 17, batch 2350, loss[loss=0.2031, simple_loss=0.28, pruned_loss=0.04582, ctc_loss=0.08652, over 19653.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2734, pruned_loss=0.05166, ctc_loss=0.09696, over 3864539.45 frames. ], batch size: 63, lr: 8.70e-03, grad_scale: 32.0 +2024-08-30 15:30:02,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=224992.0, ans=0.1 +2024-08-30 15:30:43,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=225045.33333333334, ans=0.95 +2024-08-30 15:31:51,959 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.84 vs. limit=12.0 +2024-08-30 15:32:54,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=225152.0, ans=0.2 +2024-08-30 15:32:55,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=225152.0, ans=0.2 +2024-08-30 15:32:58,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=225152.0, ans=0.2 +2024-08-30 15:33:11,237 INFO [train.py:1114] (0/4) Epoch 17, batch 2400, loss[loss=0.2476, simple_loss=0.3034, pruned_loss=0.06972, ctc_loss=0.1307, over 19364.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2766, pruned_loss=0.05334, ctc_loss=0.09985, over 3858657.20 frames. ], batch size: 67, lr: 8.70e-03, grad_scale: 32.0 +2024-08-30 15:33:44,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=225312.0, ans=0.125 +2024-08-30 15:33:48,810 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.365e+02 1.684e+02 1.880e+02 2.443e+02 3.780e+02, threshold=3.760e+02, percent-clipped=0.0 +2024-08-30 15:34:08,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=225418.66666666666, ans=0.125 +2024-08-30 15:34:10,341 INFO [train.py:1114] (0/4) Epoch 17, batch 2450, loss[loss=0.3143, simple_loss=0.3359, pruned_loss=0.1035, ctc_loss=0.2142, over 13661.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2805, pruned_loss=0.05616, ctc_loss=0.1058, over 3734008.74 frames. ], batch size: 140, lr: 8.69e-03, grad_scale: 16.0 +2024-08-30 15:34:14,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=225472.0, ans=0.125 +2024-08-30 15:34:44,532 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=1.108e-01 +2024-08-30 15:34:45,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=225472.0, ans=0.0 +2024-08-30 15:34:52,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225525.33333333334, ans=0.1 +2024-08-30 15:35:00,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=225578.66666666666, ans=0.125 +2024-08-30 15:35:03,221 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.68 vs. limit=15.0 +2024-08-30 15:35:22,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=225632.0, ans=0.025 +2024-08-30 15:35:40,793 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-17.pt +2024-08-30 15:38:02,850 INFO [train.py:1114] (0/4) Epoch 18, batch 0, loss[loss=0.2041, simple_loss=0.2608, pruned_loss=0.05338, ctc_loss=0.1015, over 19813.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2608, pruned_loss=0.05338, ctc_loss=0.1015, over 19813.00 frames. ], batch size: 49, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 15:38:02,851 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-30 15:39:11,427 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.4945, 3.0577, 2.0697, 2.5956], device='cuda:0') +2024-08-30 15:39:34,945 INFO [train.py:1146] (0/4) Epoch 18, validation: loss=0.1864, simple_loss=0.2743, pruned_loss=0.0364, ctc_loss=0.06401, over 944034.00 frames. +2024-08-30 15:39:34,945 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13588MB +2024-08-30 15:39:39,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=225680.0, ans=0.125 +2024-08-30 15:39:51,532 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.07 vs. limit=6.0 +2024-08-30 15:40:29,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=225786.66666666666, ans=0.125 +2024-08-30 15:40:30,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=225786.66666666666, ans=0.125 +2024-08-30 15:40:35,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.11 vs. limit=15.0 +2024-08-30 15:40:58,099 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.452e+02 1.919e+02 2.092e+02 2.421e+02 5.568e+02, threshold=4.185e+02, percent-clipped=4.0 +2024-08-30 15:41:04,970 INFO [train.py:1114] (0/4) Epoch 18, batch 50, loss[loss=0.1841, simple_loss=0.253, pruned_loss=0.04164, ctc_loss=0.07987, over 19716.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2794, pruned_loss=0.0545, ctc_loss=0.103, over 845746.19 frames. ], batch size: 47, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 15:41:18,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225946.66666666666, ans=0.1 +2024-08-30 15:41:59,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=226000.0, ans=0.1 +2024-08-30 15:42:02,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=226053.33333333334, ans=0.0 +2024-08-30 15:42:12,885 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.84 vs. limit=15.0 +2024-08-30 15:42:28,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=226160.0, ans=0.0 +2024-08-30 15:44:07,510 INFO [train.py:1114] (0/4) Epoch 18, batch 100, loss[loss=0.211, simple_loss=0.2725, pruned_loss=0.05408, ctc_loss=0.1033, over 19718.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2792, pruned_loss=0.05356, ctc_loss=0.1006, over 1499054.83 frames. ], batch size: 51, lr: 8.43e-03, grad_scale: 32.0 +2024-08-30 15:44:09,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=226213.33333333334, ans=0.025 +2024-08-30 15:45:01,907 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.418e+02 1.711e+02 1.973e+02 2.383e+02 4.146e+02, threshold=3.946e+02, percent-clipped=0.0 +2024-08-30 15:45:03,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=226426.66666666666, ans=0.015 +2024-08-30 15:45:10,569 INFO [train.py:1114] (0/4) Epoch 18, batch 150, loss[loss=0.1915, simple_loss=0.2504, pruned_loss=0.04914, ctc_loss=0.08582, over 19715.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2754, pruned_loss=0.05179, ctc_loss=0.09749, over 2028052.35 frames. ], batch size: 47, lr: 8.43e-03, grad_scale: 32.0 +2024-08-30 15:45:10,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=226480.0, ans=0.1 +2024-08-30 15:45:31,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=226533.33333333334, ans=0.125 +2024-08-30 15:45:42,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=226586.66666666666, ans=10.0 +2024-08-30 15:46:00,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=226640.0, ans=0.125 +2024-08-30 15:46:10,998 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.45 vs. limit=22.5 +2024-08-30 15:46:16,526 INFO [train.py:1114] (0/4) Epoch 18, batch 200, loss[loss=0.2309, simple_loss=0.2918, pruned_loss=0.06218, ctc_loss=0.114, over 18248.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2737, pruned_loss=0.05066, ctc_loss=0.0954, over 2435754.48 frames. ], batch size: 85, lr: 8.42e-03, grad_scale: 32.0 +2024-08-30 15:46:33,285 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.65 vs. limit=15.0 +2024-08-30 15:46:45,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=226853.33333333334, ans=0.05 +2024-08-30 15:46:51,315 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 15:47:08,533 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.332e+02 1.794e+02 2.164e+02 2.564e+02 4.131e+02, threshold=4.328e+02, percent-clipped=1.0 +2024-08-30 15:47:20,539 INFO [train.py:1114] (0/4) Epoch 18, batch 250, loss[loss=0.2179, simple_loss=0.2815, pruned_loss=0.05683, ctc_loss=0.1014, over 19404.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2742, pruned_loss=0.05106, ctc_loss=0.09602, over 2755802.13 frames. ], batch size: 67, lr: 8.42e-03, grad_scale: 32.0 +2024-08-30 15:47:32,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=227066.66666666666, ans=0.125 +2024-08-30 15:48:32,082 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.27 vs. limit=15.0 +2024-08-30 15:48:37,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.02 vs. limit=22.5 +2024-08-30 15:48:45,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227120.0, ans=0.1 +2024-08-30 15:48:53,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227120.0, ans=0.1 +2024-08-30 15:49:06,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=227173.33333333334, ans=0.125 +2024-08-30 15:49:06,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=227173.33333333334, ans=0.125 +2024-08-30 15:49:22,642 INFO [train.py:1114] (0/4) Epoch 18, batch 300, loss[loss=0.2088, simple_loss=0.2777, pruned_loss=0.05118, ctc_loss=0.09382, over 19499.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.274, pruned_loss=0.05103, ctc_loss=0.09617, over 3000417.31 frames. ], batch size: 61, lr: 8.41e-03, grad_scale: 32.0 +2024-08-30 15:50:55,056 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.06 vs. limit=12.0 +2024-08-30 15:51:03,703 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.94 vs. limit=15.0 +2024-08-30 15:51:40,144 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.292e+02 1.730e+02 1.916e+02 2.273e+02 3.732e+02, threshold=3.832e+02, percent-clipped=0.0 +2024-08-30 15:51:48,908 INFO [train.py:1114] (0/4) Epoch 18, batch 350, loss[loss=0.1864, simple_loss=0.2452, pruned_loss=0.04666, ctc_loss=0.08558, over 19741.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2737, pruned_loss=0.05089, ctc_loss=0.09588, over 3191008.27 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 32.0 +2024-08-30 15:52:08,103 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.59 vs. limit=22.5 +2024-08-30 15:52:08,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=227600.0, ans=0.125 +2024-08-30 15:52:13,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=227600.0, ans=0.0 +2024-08-30 15:52:34,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=227706.66666666666, ans=0.125 +2024-08-30 15:52:51,854 INFO [train.py:1114] (0/4) Epoch 18, batch 400, loss[loss=0.1975, simple_loss=0.2709, pruned_loss=0.04422, ctc_loss=0.08937, over 19506.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.274, pruned_loss=0.05109, ctc_loss=0.09619, over 3343005.44 frames. ], batch size: 54, lr: 8.40e-03, grad_scale: 32.0 +2024-08-30 15:52:59,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=227813.33333333334, ans=0.125 +2024-08-30 15:54:16,400 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.287e+02 1.651e+02 1.862e+02 2.258e+02 4.636e+02, threshold=3.723e+02, percent-clipped=1.0 +2024-08-30 15:54:16,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=228026.66666666666, ans=0.125 +2024-08-30 15:54:17,687 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 15:54:25,953 INFO [train.py:1114] (0/4) Epoch 18, batch 450, loss[loss=0.1884, simple_loss=0.2645, pruned_loss=0.04072, ctc_loss=0.07721, over 19611.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2732, pruned_loss=0.05055, ctc_loss=0.09533, over 3451708.44 frames. ], batch size: 55, lr: 8.40e-03, grad_scale: 32.0 +2024-08-30 15:54:26,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.10 vs. limit=10.0 +2024-08-30 15:54:58,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228186.66666666666, ans=0.1 +2024-08-30 15:55:15,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=228240.0, ans=0.125 +2024-08-30 15:55:22,157 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.10 vs. limit=12.0 +2024-08-30 15:55:37,515 INFO [train.py:1114] (0/4) Epoch 18, batch 500, loss[loss=0.1938, simple_loss=0.2659, pruned_loss=0.04491, ctc_loss=0.07995, over 19634.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2727, pruned_loss=0.05056, ctc_loss=0.09544, over 3546079.07 frames. ], batch size: 63, lr: 8.39e-03, grad_scale: 32.0 +2024-08-30 15:55:41,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=228346.66666666666, ans=0.0 +2024-08-30 15:56:16,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=228400.0, ans=0.0 +2024-08-30 15:56:22,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=228453.33333333334, ans=0.125 +2024-08-30 15:56:25,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228453.33333333334, ans=0.1 +2024-08-30 15:56:38,293 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.86 vs. limit=15.0 +2024-08-30 15:56:40,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=228453.33333333334, ans=15.0 +2024-08-30 15:56:44,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=228506.66666666666, ans=0.0 +2024-08-30 15:57:00,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=228506.66666666666, ans=0.125 +2024-08-30 15:57:03,388 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.03 vs. limit=22.5 +2024-08-30 15:57:52,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=228560.0, ans=0.125 +2024-08-30 15:57:54,266 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.350e+02 1.602e+02 1.832e+02 2.190e+02 3.877e+02, threshold=3.665e+02, percent-clipped=2.0 +2024-08-30 15:57:59,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=228560.0, ans=0.125 +2024-08-30 15:58:00,977 INFO [train.py:1114] (0/4) Epoch 18, batch 550, loss[loss=0.2026, simple_loss=0.2776, pruned_loss=0.04643, ctc_loss=0.08688, over 19273.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2729, pruned_loss=0.05051, ctc_loss=0.09524, over 3608445.94 frames. ], batch size: 71, lr: 8.39e-03, grad_scale: 32.0 +2024-08-30 15:58:44,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228613.33333333334, ans=0.1 +2024-08-30 15:58:58,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=228666.66666666666, ans=0.125 +2024-08-30 16:00:50,094 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.32 vs. limit=10.0 +2024-08-30 16:00:54,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=228720.0, ans=0.025 +2024-08-30 16:01:05,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=228720.0, ans=0.2 +2024-08-30 16:01:09,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=228720.0, ans=0.125 +2024-08-30 16:01:35,825 INFO [train.py:1114] (0/4) Epoch 18, batch 600, loss[loss=0.2306, simple_loss=0.2883, pruned_loss=0.06276, ctc_loss=0.1182, over 19393.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2727, pruned_loss=0.05029, ctc_loss=0.09454, over 3666186.54 frames. ], batch size: 67, lr: 8.38e-03, grad_scale: 32.0 +2024-08-30 16:01:39,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=228880.0, ans=0.125 +2024-08-30 16:02:21,003 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.45 vs. limit=22.5 +2024-08-30 16:03:04,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=228986.66666666666, ans=0.1 +2024-08-30 16:03:13,335 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.08 vs. limit=15.0 +2024-08-30 16:03:47,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=229040.0, ans=0.125 +2024-08-30 16:04:29,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=229040.0, ans=0.125 +2024-08-30 16:04:35,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229040.0, ans=0.1 +2024-08-30 16:04:41,755 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.417e+02 1.726e+02 2.045e+02 2.727e+02 4.181e+02, threshold=4.090e+02, percent-clipped=7.0 +2024-08-30 16:04:48,712 INFO [train.py:1114] (0/4) Epoch 18, batch 650, loss[loss=0.19, simple_loss=0.2615, pruned_loss=0.04304, ctc_loss=0.08085, over 19762.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2718, pruned_loss=0.04999, ctc_loss=0.09404, over 3716392.84 frames. ], batch size: 54, lr: 8.38e-03, grad_scale: 32.0 +2024-08-30 16:05:08,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=229146.66666666666, ans=0.125 +2024-08-30 16:06:24,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=229200.0, ans=0.125 +2024-08-30 16:06:36,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=229253.33333333334, ans=0.125 +2024-08-30 16:06:43,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229306.66666666666, ans=0.1 +2024-08-30 16:06:51,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=229306.66666666666, ans=0.125 +2024-08-30 16:07:18,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=229360.0, ans=0.025 +2024-08-30 16:07:32,102 INFO [train.py:1114] (0/4) Epoch 18, batch 700, loss[loss=0.1917, simple_loss=0.2595, pruned_loss=0.04617, ctc_loss=0.07901, over 19714.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2721, pruned_loss=0.05001, ctc_loss=0.09406, over 3749192.59 frames. ], batch size: 51, lr: 8.37e-03, grad_scale: 32.0 +2024-08-30 16:07:37,248 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.09 vs. limit=15.0 +2024-08-30 16:07:39,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=229413.33333333334, ans=22.5 +2024-08-30 16:08:08,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=229573.33333333334, ans=0.125 +2024-08-30 16:08:27,249 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.709e+02 1.988e+02 2.480e+02 4.374e+02, threshold=3.975e+02, percent-clipped=1.0 +2024-08-30 16:08:33,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=229680.0, ans=0.2 +2024-08-30 16:08:34,086 INFO [train.py:1114] (0/4) Epoch 18, batch 750, loss[loss=0.2257, simple_loss=0.2905, pruned_loss=0.05788, ctc_loss=0.1128, over 19502.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2726, pruned_loss=0.05039, ctc_loss=0.09487, over 3774959.33 frames. ], batch size: 54, lr: 8.37e-03, grad_scale: 32.0 +2024-08-30 16:08:46,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=229680.0, ans=0.125 +2024-08-30 16:09:13,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=229840.0, ans=0.2 +2024-08-30 16:09:34,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=229893.33333333334, ans=0.125 +2024-08-30 16:09:38,056 INFO [train.py:1114] (0/4) Epoch 18, batch 800, loss[loss=0.2044, simple_loss=0.2605, pruned_loss=0.05406, ctc_loss=0.1006, over 19807.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2722, pruned_loss=0.05018, ctc_loss=0.09443, over 3796787.06 frames. ], batch size: 49, lr: 8.37e-03, grad_scale: 32.0 +2024-08-30 16:09:56,826 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.35 vs. limit=15.0 +2024-08-30 16:10:07,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=230000.0, ans=0.025 +2024-08-30 16:10:31,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=230106.66666666666, ans=0.2 +2024-08-30 16:11:30,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=230160.0, ans=0.0 +2024-08-30 16:11:34,979 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.239e+02 1.744e+02 1.950e+02 2.451e+02 4.139e+02, threshold=3.901e+02, percent-clipped=0.0 +2024-08-30 16:11:47,902 INFO [train.py:1114] (0/4) Epoch 18, batch 850, loss[loss=0.2136, simple_loss=0.2869, pruned_loss=0.05022, ctc_loss=0.09987, over 19633.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2722, pruned_loss=0.05019, ctc_loss=0.09441, over 3815142.45 frames. ], batch size: 59, lr: 8.36e-03, grad_scale: 32.0 +2024-08-30 16:11:57,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=230213.33333333334, ans=0.125 +2024-08-30 16:12:16,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=230320.0, ans=0.125 +2024-08-30 16:12:25,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=230320.0, ans=0.125 +2024-08-30 16:12:26,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=230373.33333333334, ans=0.2 +2024-08-30 16:12:48,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=230426.66666666666, ans=0.07 +2024-08-30 16:12:57,689 INFO [train.py:1114] (0/4) Epoch 18, batch 900, loss[loss=0.197, simple_loss=0.2602, pruned_loss=0.04913, ctc_loss=0.08885, over 19425.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2727, pruned_loss=0.05055, ctc_loss=0.09512, over 3819486.86 frames. ], batch size: 48, lr: 8.36e-03, grad_scale: 32.0 +2024-08-30 16:12:59,214 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.08 vs. limit=12.0 +2024-08-30 16:13:28,886 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.47 vs. limit=15.0 +2024-08-30 16:13:44,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=230693.33333333334, ans=0.0 +2024-08-30 16:13:49,524 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.356e+02 1.771e+02 2.097e+02 2.541e+02 3.279e+02, threshold=4.195e+02, percent-clipped=1.0 +2024-08-30 16:13:56,603 INFO [train.py:1114] (0/4) Epoch 18, batch 950, loss[loss=0.1883, simple_loss=0.2564, pruned_loss=0.0437, ctc_loss=0.08186, over 19484.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2733, pruned_loss=0.05071, ctc_loss=0.09546, over 3820910.45 frames. ], batch size: 49, lr: 8.35e-03, grad_scale: 32.0 +2024-08-30 16:14:50,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=230746.66666666666, ans=0.0 +2024-08-30 16:15:13,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=230853.33333333334, ans=0.125 +2024-08-30 16:15:14,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=230853.33333333334, ans=0.2 +2024-08-30 16:15:34,029 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.04 vs. limit=10.0 +2024-08-30 16:15:39,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=230960.0, ans=0.125 +2024-08-30 16:15:41,762 INFO [train.py:1114] (0/4) Epoch 18, batch 1000, loss[loss=0.1873, simple_loss=0.2579, pruned_loss=0.04173, ctc_loss=0.08273, over 19851.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2741, pruned_loss=0.05112, ctc_loss=0.09635, over 3815875.19 frames. ], batch size: 52, lr: 8.35e-03, grad_scale: 32.0 +2024-08-30 16:15:51,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=231013.33333333334, ans=0.0 +2024-08-30 16:15:55,446 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.67 vs. limit=6.0 +2024-08-30 16:16:20,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231120.0, ans=0.1 +2024-08-30 16:16:46,157 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.681e+02 1.935e+02 2.141e+02 3.468e+02, threshold=3.870e+02, percent-clipped=0.0 +2024-08-30 16:16:46,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=231226.66666666666, ans=0.125 +2024-08-30 16:16:53,208 INFO [train.py:1114] (0/4) Epoch 18, batch 1050, loss[loss=0.224, simple_loss=0.2943, pruned_loss=0.05624, ctc_loss=0.1031, over 19852.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2734, pruned_loss=0.05094, ctc_loss=0.0959, over 3822177.37 frames. ], batch size: 57, lr: 8.34e-03, grad_scale: 32.0 +2024-08-30 16:16:56,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=231280.0, ans=0.2 +2024-08-30 16:17:59,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=231386.66666666666, ans=0.125 +2024-08-30 16:18:00,843 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.01 vs. limit=15.0 +2024-08-30 16:18:01,884 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.20 vs. limit=15.0 +2024-08-30 16:18:29,881 INFO [train.py:1114] (0/4) Epoch 18, batch 1100, loss[loss=0.2234, simple_loss=0.2836, pruned_loss=0.05964, ctc_loss=0.11, over 19596.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2727, pruned_loss=0.05052, ctc_loss=0.0952, over 3829672.02 frames. ], batch size: 52, lr: 8.34e-03, grad_scale: 32.0 +2024-08-30 16:18:57,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=231653.33333333334, ans=0.125 +2024-08-30 16:19:03,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=231653.33333333334, ans=0.2 +2024-08-30 16:19:24,175 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.348e+02 1.668e+02 1.884e+02 2.263e+02 3.606e+02, threshold=3.767e+02, percent-clipped=0.0 +2024-08-30 16:19:52,650 INFO [train.py:1114] (0/4) Epoch 18, batch 1150, loss[loss=0.1929, simple_loss=0.2616, pruned_loss=0.045, ctc_loss=0.0857, over 19608.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2731, pruned_loss=0.05087, ctc_loss=0.09608, over 3827298.87 frames. ], batch size: 52, lr: 8.33e-03, grad_scale: 32.0 +2024-08-30 16:19:55,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=231813.33333333334, ans=0.125 +2024-08-30 16:19:56,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231813.33333333334, ans=0.1 +2024-08-30 16:19:57,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=231813.33333333334, ans=0.125 +2024-08-30 16:20:00,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=231813.33333333334, ans=0.2 +2024-08-30 16:22:11,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=231866.66666666666, ans=0.0 +2024-08-30 16:22:28,879 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.40 vs. limit=15.0 +2024-08-30 16:22:56,529 INFO [train.py:1114] (0/4) Epoch 18, batch 1200, loss[loss=0.2079, simple_loss=0.2849, pruned_loss=0.04709, ctc_loss=0.09157, over 19837.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2746, pruned_loss=0.05148, ctc_loss=0.09725, over 3824043.11 frames. ], batch size: 57, lr: 8.33e-03, grad_scale: 32.0 +2024-08-30 16:22:58,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=232080.0, ans=0.035 +2024-08-30 16:23:43,454 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.05 vs. limit=15.0 +2024-08-30 16:23:45,955 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.383e+02 1.656e+02 1.841e+02 2.164e+02 3.391e+02, threshold=3.682e+02, percent-clipped=0.0 +2024-08-30 16:23:48,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=232293.33333333334, ans=0.0 +2024-08-30 16:23:52,935 INFO [train.py:1114] (0/4) Epoch 18, batch 1250, loss[loss=0.2104, simple_loss=0.2778, pruned_loss=0.05201, ctc_loss=0.09746, over 19507.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2753, pruned_loss=0.0516, ctc_loss=0.09737, over 3842402.06 frames. ], batch size: 61, lr: 8.32e-03, grad_scale: 32.0 +2024-08-30 16:24:06,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=232400.0, ans=0.125 +2024-08-30 16:24:09,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=232400.0, ans=0.0 +2024-08-30 16:24:10,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=232400.0, ans=0.125 +2024-08-30 16:24:27,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=232453.33333333334, ans=0.2 +2024-08-30 16:24:34,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=232506.66666666666, ans=0.1 +2024-08-30 16:25:36,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=232506.66666666666, ans=0.125 +2024-08-30 16:25:38,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=232506.66666666666, ans=0.1 +2024-08-30 16:25:53,657 INFO [train.py:1114] (0/4) Epoch 18, batch 1300, loss[loss=0.2042, simple_loss=0.2777, pruned_loss=0.04738, ctc_loss=0.08971, over 18824.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2738, pruned_loss=0.05101, ctc_loss=0.09608, over 3845983.72 frames. ], batch size: 76, lr: 8.32e-03, grad_scale: 32.0 +2024-08-30 16:32:42,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=232773.33333333334, ans=0.125 +2024-08-30 16:35:48,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=232826.66666666666, ans=0.1 +2024-08-30 16:45:23,080 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.785e+02 2.170e+02 2.759e+02 4.331e+02, threshold=4.339e+02, percent-clipped=5.0 +2024-08-30 17:02:45,885 INFO [train.py:1114] (0/4) Epoch 18, batch 1350, loss[loss=0.2027, simple_loss=0.2613, pruned_loss=0.05242, ctc_loss=0.09782, over 19761.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2736, pruned_loss=0.05108, ctc_loss=0.0961, over 3857349.73 frames. ], batch size: 54, lr: 8.31e-03, grad_scale: 32.0 +2024-08-30 17:12:54,858 INFO [train.py:1050] (0/4) Caught exception: [Rank 0] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=46170, OpType=ALLREDUCE, NumelIn=841, NumelOut=841, Timeout(ms)=600000) ran for 600000 milliseconds before timing out.. +2024-08-30 17:12:54,860 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/bad-model-0.pt +2024-08-30 17:12:57,589 INFO [train.py:1413] (0/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/batch-a689ee27-eec1-83b6-15a8-f48f39643825.pt +2024-08-30 17:12:57,636 INFO [train.py:1419] (0/4) features shape: torch.Size([56, 1420, 80]) +2024-08-30 17:12:57,638 INFO [train.py:1423] (0/4) num tokens: 4279 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-12-44-46-1 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-12-44-46-1 new file mode 100644 index 0000000000000000000000000000000000000000..3f183ee8a127d7c14aa40c3299de6856ab759c38 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-12-44-46-1 @@ -0,0 +1,577 @@ +2024-08-30 12:44:46,730 INFO [train.py:1182] (1/4) Training started +2024-08-30 12:44:48,385 INFO [train.py:1192] (1/4) Device: cuda:1 +2024-08-30 12:44:48,388 INFO [train.py:1210] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2651.int.cedar.computecanada.ca', 'IP address': '172.16.146.88'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 17, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-30 12:44:48,388 INFO [train.py:1212] (1/4) About to create model +2024-08-30 12:44:49,100 INFO [train.py:1216] (1/4) Number of model parameters: 66367431 +2024-08-30 12:44:49,100 INFO [checkpoint.py:112] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-16.pt +2024-08-30 12:45:01,813 INFO [train.py:1231] (1/4) Using DDP +2024-08-30 12:45:06,258 INFO [train.py:1243] (1/4) Loading optimizer state dict +2024-08-30 12:45:06,457 INFO [train.py:1251] (1/4) Loading scheduler state dict +2024-08-30 12:45:06,457 INFO [asr_datamodule.py:894] (1/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-30 12:45:06,664 INFO [asr_datamodule.py:696] (1/4) Disable MUSAN +2024-08-30 12:45:06,664 INFO [asr_datamodule.py:714] (1/4) Enable SpecAugment +2024-08-30 12:45:06,664 INFO [asr_datamodule.py:715] (1/4) Time warp factor: 80 +2024-08-30 12:45:06,665 INFO [asr_datamodule.py:725] (1/4) Num frame mask: 10 +2024-08-30 12:45:06,665 INFO [asr_datamodule.py:738] (1/4) About to create train dataset +2024-08-30 12:45:06,665 INFO [asr_datamodule.py:765] (1/4) Using DynamicBucketingSampler. +2024-08-30 12:45:08,260 INFO [asr_datamodule.py:782] (1/4) About to create train dataloader +2024-08-30 12:45:08,264 INFO [asr_datamodule.py:911] (1/4) About to get dev-clean cuts +2024-08-30 12:45:08,444 INFO [asr_datamodule.py:918] (1/4) About to get dev-other cuts +2024-08-30 12:45:08,576 INFO [asr_datamodule.py:814] (1/4) About to create dev dataset +2024-08-30 12:45:08,901 INFO [asr_datamodule.py:831] (1/4) About to create dev dataloader +2024-08-30 12:45:08,901 INFO [train.py:1435] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 12:51:15,857 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.24 vs. limit=3.0 +2024-08-30 12:51:17,012 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13363MB +2024-08-30 12:51:18,484 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-30 12:53:02,305 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-30 12:53:03,662 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-30 12:54:12,744 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-30 12:54:14,357 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13467MB +2024-08-30 12:54:14,377 INFO [train.py:1344] (1/4) Loading grad scaler state dict +2024-08-30 12:55:00,325 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.57 vs. limit=15.0 +2024-08-30 12:55:06,915 INFO [train.py:1114] (1/4) Epoch 17, batch 0, loss[loss=0.185, simple_loss=0.2497, pruned_loss=0.04405, ctc_loss=0.08062, over 19820.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2497, pruned_loss=0.04405, ctc_loss=0.08062, over 19820.00 frames. ], batch size: 49, lr: 8.95e-03, grad_scale: 32.0 +2024-08-30 12:55:06,915 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-30 12:55:26,654 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([1.7543, 2.1106, 1.5990, 1.9654, 2.1607, 2.2839, 2.1657, 1.7068], + device='cuda:1') +2024-08-30 12:55:31,711 INFO [train.py:1146] (1/4) Epoch 17, validation: loss=0.185, simple_loss=0.2737, pruned_loss=0.03584, ctc_loss=0.06176, over 944034.00 frames. +2024-08-30 12:55:31,712 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 13467MB +2024-08-30 12:55:59,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=212458.66666666666, ans=0.0 +2024-08-30 12:56:10,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=212458.66666666666, ans=0.125 +2024-08-30 12:56:13,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=212458.66666666666, ans=0.05 +2024-08-30 12:56:42,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=212458.66666666666, ans=0.2 +2024-08-30 12:58:34,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=212458.66666666666, ans=0.125 +2024-08-30 12:59:52,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212512.0, ans=0.1 +2024-08-30 13:01:17,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=212512.0, ans=0.125 +2024-08-30 13:06:19,314 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.426e+02 1.860e+02 2.030e+02 2.233e+02 2.993e+02, threshold=4.061e+02, percent-clipped=0.0 +2024-08-30 13:09:44,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=212672.0, ans=0.125 +2024-08-30 13:09:56,407 INFO [train.py:1114] (1/4) Epoch 17, batch 50, loss[loss=0.1964, simple_loss=0.2636, pruned_loss=0.04701, ctc_loss=0.08799, over 19696.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2794, pruned_loss=0.05445, ctc_loss=0.104, over 844645.99 frames. ], batch size: 47, lr: 8.94e-03, grad_scale: 32.0 +2024-08-30 13:09:59,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=212672.0, ans=0.125 +2024-08-30 13:15:56,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=212672.0, ans=0.0 +2024-08-30 13:16:10,042 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 13:18:30,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=212832.0, ans=0.125 +2024-08-30 13:18:39,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=212832.0, ans=0.2 +2024-08-30 13:19:01,137 INFO [train.py:1114] (1/4) Epoch 17, batch 100, loss[loss=0.2007, simple_loss=0.2719, pruned_loss=0.0464, ctc_loss=0.09168, over 19711.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2806, pruned_loss=0.05462, ctc_loss=0.103, over 1499050.01 frames. ], batch size: 51, lr: 8.94e-03, grad_scale: 32.0 +2024-08-30 13:19:07,920 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.52 vs. limit=15.0 +2024-08-30 13:19:25,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=212992.0, ans=0.125 +2024-08-30 13:22:04,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=213045.33333333334, ans=0.0 +2024-08-30 13:22:37,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=213045.33333333334, ans=0.125 +2024-08-30 13:23:11,084 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.345e+02 1.706e+02 1.953e+02 2.287e+02 3.713e+02, threshold=3.906e+02, percent-clipped=0.0 +2024-08-30 13:23:53,177 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.35 vs. limit=15.0 +2024-08-30 13:24:10,835 INFO [train.py:1114] (1/4) Epoch 17, batch 150, loss[loss=0.1898, simple_loss=0.2448, pruned_loss=0.04741, ctc_loss=0.09987, over 19703.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.278, pruned_loss=0.05362, ctc_loss=0.101, over 2028977.84 frames. ], batch size: 47, lr: 8.93e-03, grad_scale: 32.0 +2024-08-30 13:24:21,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=213205.33333333334, ans=0.125 +2024-08-30 13:24:35,648 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.15 vs. limit=12.0 +2024-08-30 13:25:09,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=213312.0, ans=0.0 +2024-08-30 13:27:36,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=213365.33333333334, ans=0.0 +2024-08-30 13:27:49,622 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.68 vs. limit=10.0 +2024-08-30 13:27:50,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=213365.33333333334, ans=0.125 +2024-08-30 13:27:59,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=213418.66666666666, ans=0.0 +2024-08-30 13:28:05,733 INFO [train.py:1114] (1/4) Epoch 17, batch 200, loss[loss=0.227, simple_loss=0.2885, pruned_loss=0.06039, ctc_loss=0.1118, over 18150.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2769, pruned_loss=0.05286, ctc_loss=0.09969, over 2437042.07 frames. ], batch size: 85, lr: 8.93e-03, grad_scale: 32.0 +2024-08-30 13:28:06,078 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=1.854e-02 +2024-08-30 13:28:09,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=213472.0, ans=0.0 +2024-08-30 13:28:30,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=213578.66666666666, ans=0.125 +2024-08-30 13:28:31,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=213578.66666666666, ans=0.125 +2024-08-30 13:28:32,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=213578.66666666666, ans=0.125 +2024-08-30 13:28:40,338 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.384e+02 1.731e+02 1.992e+02 2.666e+02 4.093e+02, threshold=3.983e+02, percent-clipped=1.0 +2024-08-30 13:28:58,899 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.03 vs. limit=22.5 +2024-08-30 13:29:05,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=213685.33333333334, ans=0.0 +2024-08-30 13:29:07,557 INFO [train.py:1114] (1/4) Epoch 17, batch 250, loss[loss=0.2289, simple_loss=0.296, pruned_loss=0.05953, ctc_loss=0.1072, over 19395.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2771, pruned_loss=0.05331, ctc_loss=0.1003, over 2757247.20 frames. ], batch size: 67, lr: 8.92e-03, grad_scale: 32.0 +2024-08-30 13:29:10,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=213738.66666666666, ans=0.0 +2024-08-30 13:29:39,009 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.32 vs. limit=12.0 +2024-08-30 13:29:54,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213898.66666666666, ans=0.1 +2024-08-30 13:30:13,962 INFO [train.py:1114] (1/4) Epoch 17, batch 300, loss[loss=0.2405, simple_loss=0.2992, pruned_loss=0.06715, ctc_loss=0.1188, over 19510.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.276, pruned_loss=0.05275, ctc_loss=0.0994, over 3001398.30 frames. ], batch size: 61, lr: 8.92e-03, grad_scale: 32.0 +2024-08-30 13:30:51,817 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.284e+02 1.663e+02 1.872e+02 2.298e+02 3.693e+02, threshold=3.744e+02, percent-clipped=0.0 +2024-08-30 13:30:54,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=214165.33333333334, ans=0.2 +2024-08-30 13:31:01,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=214165.33333333334, ans=0.2 +2024-08-30 13:31:12,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=214218.66666666666, ans=10.0 +2024-08-30 13:31:21,419 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.52 vs. limit=22.5 +2024-08-30 13:31:25,598 INFO [train.py:1114] (1/4) Epoch 17, batch 350, loss[loss=0.1843, simple_loss=0.2542, pruned_loss=0.04182, ctc_loss=0.07697, over 19756.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2762, pruned_loss=0.05242, ctc_loss=0.09879, over 3191743.97 frames. ], batch size: 48, lr: 8.91e-03, grad_scale: 32.0 +2024-08-30 13:31:27,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=214272.0, ans=0.2 +2024-08-30 13:31:28,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=214272.0, ans=0.07 +2024-08-30 13:31:29,922 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.42 vs. limit=15.0 +2024-08-30 13:31:44,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=214325.33333333334, ans=0.5 +2024-08-30 13:32:04,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=214432.0, ans=0.125 +2024-08-30 13:32:07,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=214432.0, ans=0.125 +2024-08-30 13:32:12,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=214485.33333333334, ans=0.125 +2024-08-30 13:32:19,297 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.24 vs. limit=15.0 +2024-08-30 13:32:24,581 INFO [train.py:1114] (1/4) Epoch 17, batch 400, loss[loss=0.1905, simple_loss=0.2712, pruned_loss=0.03987, ctc_loss=0.07503, over 19487.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2754, pruned_loss=0.05185, ctc_loss=0.09784, over 3343628.60 frames. ], batch size: 54, lr: 8.91e-03, grad_scale: 32.0 +2024-08-30 13:32:30,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=214538.66666666666, ans=0.05 +2024-08-30 13:32:33,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=214538.66666666666, ans=0.125 +2024-08-30 13:32:39,878 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.49 vs. limit=22.5 +2024-08-30 13:32:55,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=214645.33333333334, ans=0.0 +2024-08-30 13:32:55,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=214645.33333333334, ans=0.125 +2024-08-30 13:33:01,010 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.238e+02 1.640e+02 1.901e+02 2.325e+02 4.074e+02, threshold=3.801e+02, percent-clipped=1.0 +2024-08-30 13:33:07,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=214698.66666666666, ans=0.125 +2024-08-30 13:33:11,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=214698.66666666666, ans=0.2 +2024-08-30 13:33:20,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=214752.0, ans=0.125 +2024-08-30 13:33:24,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=214752.0, ans=0.125 +2024-08-30 13:33:26,239 INFO [train.py:1114] (1/4) Epoch 17, batch 450, loss[loss=0.2215, simple_loss=0.2941, pruned_loss=0.05462, ctc_loss=0.09914, over 19605.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2758, pruned_loss=0.05227, ctc_loss=0.09853, over 3450831.97 frames. ], batch size: 55, lr: 8.90e-03, grad_scale: 32.0 +2024-08-30 13:33:57,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.25 vs. limit=22.5 +2024-08-30 13:34:05,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=214858.66666666666, ans=0.2 +2024-08-30 13:43:51,933 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.66 vs. limit=5.0 +2024-08-30 13:44:01,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=215018.66666666666, ans=0.0 +2024-08-30 13:44:02,846 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.22 vs. limit=6.0 +2024-08-30 13:44:05,801 INFO [train.py:1114] (1/4) Epoch 17, batch 500, loss[loss=0.2163, simple_loss=0.29, pruned_loss=0.0526, ctc_loss=0.09377, over 19661.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.275, pruned_loss=0.05211, ctc_loss=0.09837, over 3546821.36 frames. ], batch size: 63, lr: 8.90e-03, grad_scale: 32.0 +2024-08-30 13:44:39,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=215072.0, ans=0.125 +2024-08-30 13:44:39,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=215072.0, ans=0.0 +2024-08-30 13:44:43,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=215125.33333333334, ans=0.2 +2024-08-30 13:44:50,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=215125.33333333334, ans=0.125 +2024-08-30 13:44:50,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=215125.33333333334, ans=0.0 +2024-08-30 13:44:59,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=215178.66666666666, ans=0.125 +2024-08-30 13:45:06,448 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.780e+02 2.026e+02 2.589e+02 4.105e+02, threshold=4.052e+02, percent-clipped=2.0 +2024-08-30 13:45:10,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=215232.0, ans=0.2 +2024-08-30 13:45:16,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=215232.0, ans=0.0 +2024-08-30 13:45:31,485 INFO [train.py:1114] (1/4) Epoch 17, batch 550, loss[loss=0.2281, simple_loss=0.2875, pruned_loss=0.06192, ctc_loss=0.1121, over 19299.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2745, pruned_loss=0.05198, ctc_loss=0.09804, over 3608064.98 frames. ], batch size: 71, lr: 8.89e-03, grad_scale: 32.0 +2024-08-30 13:45:42,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=215392.0, ans=10.0 +2024-08-30 13:45:59,614 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.00 vs. limit=12.0 +2024-08-30 13:46:00,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=215445.33333333334, ans=0.125 +2024-08-30 13:46:22,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215552.0, ans=0.1 +2024-08-30 13:47:18,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215552.0, ans=0.1 +2024-08-30 13:47:20,209 INFO [train.py:1114] (1/4) Epoch 17, batch 600, loss[loss=0.2238, simple_loss=0.2943, pruned_loss=0.05595, ctc_loss=0.1035, over 19430.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2744, pruned_loss=0.05178, ctc_loss=0.09741, over 3665994.82 frames. ], batch size: 67, lr: 8.88e-03, grad_scale: 64.0 +2024-08-30 13:47:23,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=215605.33333333334, ans=0.0 +2024-08-30 13:47:24,234 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.14 vs. limit=15.0 +2024-08-30 13:47:27,906 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.23 vs. limit=22.5 +2024-08-30 13:47:32,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=215658.66666666666, ans=0.0 +2024-08-30 13:47:33,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=215658.66666666666, ans=0.125 +2024-08-30 13:47:49,969 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=8.17 vs. limit=15.0 +2024-08-30 13:47:51,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=215712.0, ans=0.2 +2024-08-30 13:47:53,788 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.276e+02 1.647e+02 1.940e+02 2.383e+02 4.124e+02, threshold=3.879e+02, percent-clipped=1.0 +2024-08-30 13:48:15,477 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=215818.66666666666, ans=0.125 +2024-08-30 13:48:27,126 INFO [train.py:1114] (1/4) Epoch 17, batch 650, loss[loss=0.2036, simple_loss=0.2676, pruned_loss=0.05028, ctc_loss=0.09769, over 19764.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2737, pruned_loss=0.05152, ctc_loss=0.09708, over 3716311.99 frames. ], batch size: 54, lr: 8.88e-03, grad_scale: 32.0 +2024-08-30 13:52:37,308 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:00:35,373 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.13 vs. limit=15.0 +2024-08-30 14:06:19,773 INFO [train.py:1114] (1/4) Epoch 17, batch 700, loss[loss=0.2039, simple_loss=0.2687, pruned_loss=0.05024, ctc_loss=0.09628, over 19726.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2748, pruned_loss=0.05202, ctc_loss=0.09805, over 3749338.22 frames. ], batch size: 51, lr: 8.87e-03, grad_scale: 32.0 +2024-08-30 14:10:26,198 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.27 vs. limit=6.0 +2024-08-30 14:11:44,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=216298.66666666666, ans=0.0 +2024-08-30 14:12:13,779 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.374e+02 1.667e+02 2.137e+02 2.601e+02 4.284e+02, threshold=4.274e+02, percent-clipped=4.0 +2024-08-30 14:16:29,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=216298.66666666666, ans=0.1 +2024-08-30 14:17:07,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216352.0, ans=0.1 +2024-08-30 14:17:35,073 INFO [train.py:1114] (1/4) Epoch 17, batch 750, loss[loss=0.2177, simple_loss=0.2865, pruned_loss=0.05373, ctc_loss=0.1037, over 19479.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2741, pruned_loss=0.05169, ctc_loss=0.09742, over 3775726.61 frames. ], batch size: 54, lr: 8.87e-03, grad_scale: 32.0 +2024-08-30 14:17:41,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=216405.33333333334, ans=0.0 +2024-08-30 14:17:42,173 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.98 vs. limit=6.0 +2024-08-30 14:17:59,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=216458.66666666666, ans=0.125 +2024-08-30 14:18:42,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=216458.66666666666, ans=0.125 +2024-08-30 14:19:07,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=216512.0, ans=0.07 +2024-08-30 14:20:37,994 INFO [train.py:1114] (1/4) Epoch 17, batch 800, loss[loss=0.2131, simple_loss=0.2588, pruned_loss=0.06203, ctc_loss=0.1084, over 19834.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2741, pruned_loss=0.05178, ctc_loss=0.09759, over 3795773.92 frames. ], batch size: 49, lr: 8.86e-03, grad_scale: 32.0 +2024-08-30 14:29:21,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=216725.33333333334, ans=0.2 +2024-08-30 14:29:36,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=216725.33333333334, ans=0.125 +2024-08-30 14:31:11,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=216725.33333333334, ans=0.2 +2024-08-30 14:31:26,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=216778.66666666666, ans=0.0 +2024-08-30 14:31:32,316 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.355e+02 1.715e+02 2.071e+02 2.537e+02 3.967e+02, threshold=4.143e+02, percent-clipped=0.0 +2024-08-30 14:31:48,354 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:32:34,214 INFO [train.py:1114] (1/4) Epoch 17, batch 850, loss[loss=0.2318, simple_loss=0.2969, pruned_loss=0.06046, ctc_loss=0.1142, over 19643.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2743, pruned_loss=0.05207, ctc_loss=0.09812, over 3815745.90 frames. ], batch size: 59, lr: 8.86e-03, grad_scale: 32.0 +2024-08-30 14:32:37,372 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.70 vs. limit=12.0 +2024-08-30 14:32:39,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=216938.66666666666, ans=0.025 +2024-08-30 14:32:46,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=216938.66666666666, ans=0.2 +2024-08-30 14:32:55,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=216992.0, ans=0.1 +2024-08-30 14:34:26,341 INFO [train.py:1114] (1/4) Epoch 17, batch 900, loss[loss=0.193, simple_loss=0.2537, pruned_loss=0.04824, ctc_loss=0.08981, over 19805.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2747, pruned_loss=0.0523, ctc_loss=0.09855, over 3819499.53 frames. ], batch size: 49, lr: 8.85e-03, grad_scale: 32.0 +2024-08-30 14:36:01,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=217205.33333333334, ans=0.025 +2024-08-30 14:36:28,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=217258.66666666666, ans=0.125 +2024-08-30 14:36:39,130 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.81 vs. limit=22.5 +2024-08-30 14:36:59,138 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.623e+02 1.810e+02 2.233e+02 4.039e+02, threshold=3.621e+02, percent-clipped=0.0 +2024-08-30 14:37:10,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=217365.33333333334, ans=0.125 +2024-08-30 14:37:24,549 INFO [train.py:1114] (1/4) Epoch 17, batch 950, loss[loss=0.1812, simple_loss=0.2448, pruned_loss=0.04265, ctc_loss=0.08089, over 19497.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.275, pruned_loss=0.05222, ctc_loss=0.09853, over 3821559.39 frames. ], batch size: 49, lr: 8.85e-03, grad_scale: 32.0 +2024-08-30 14:38:43,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=217578.66666666666, ans=0.125 +2024-08-30 14:38:45,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=217578.66666666666, ans=0.2 +2024-08-30 14:38:45,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=217578.66666666666, ans=0.07 +2024-08-30 14:38:57,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=217632.0, ans=0.2 +2024-08-30 14:39:07,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217685.33333333334, ans=0.1 +2024-08-30 14:39:10,780 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.43 vs. limit=22.5 +2024-08-30 14:39:16,493 INFO [train.py:1114] (1/4) Epoch 17, batch 1000, loss[loss=0.2122, simple_loss=0.2749, pruned_loss=0.05384, ctc_loss=0.1047, over 19883.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2758, pruned_loss=0.0526, ctc_loss=0.09924, over 3817867.61 frames. ], batch size: 52, lr: 8.84e-03, grad_scale: 32.0 +2024-08-30 14:39:18,658 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.22 vs. limit=22.5 +2024-08-30 14:39:22,940 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:39:42,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=217845.33333333334, ans=0.125 +2024-08-30 14:39:52,694 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.292e+02 1.648e+02 1.905e+02 2.181e+02 3.196e+02, threshold=3.810e+02, percent-clipped=0.0 +2024-08-30 14:40:16,951 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.72 vs. limit=15.0 +2024-08-30 14:40:20,752 INFO [train.py:1114] (1/4) Epoch 17, batch 1050, loss[loss=0.2112, simple_loss=0.286, pruned_loss=0.0494, ctc_loss=0.0941, over 19821.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2751, pruned_loss=0.05251, ctc_loss=0.09907, over 3824747.58 frames. ], batch size: 57, lr: 8.84e-03, grad_scale: 32.0 +2024-08-30 14:40:29,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=218005.33333333334, ans=0.125 +2024-08-30 14:40:40,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218058.66666666666, ans=0.1 +2024-08-30 14:40:46,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=218112.0, ans=0.0 +2024-08-30 14:40:58,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=218165.33333333334, ans=0.125 +2024-08-30 14:41:13,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=218218.66666666666, ans=0.125 +2024-08-30 14:41:24,736 INFO [train.py:1114] (1/4) Epoch 17, batch 1100, loss[loss=0.2051, simple_loss=0.2768, pruned_loss=0.04946, ctc_loss=0.08593, over 19584.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2752, pruned_loss=0.05223, ctc_loss=0.09854, over 3831537.03 frames. ], batch size: 52, lr: 8.83e-03, grad_scale: 32.0 +2024-08-30 14:41:25,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.21 vs. limit=15.0 +2024-08-30 14:41:43,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=218325.33333333334, ans=0.125 +2024-08-30 14:42:15,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=218378.66666666666, ans=0.0 +2024-08-30 14:42:23,474 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.634e+02 1.909e+02 2.238e+02 3.833e+02, threshold=3.817e+02, percent-clipped=1.0 +2024-08-30 14:43:09,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=218485.33333333334, ans=0.05 +2024-08-30 14:43:11,407 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.53 vs. limit=15.0 +2024-08-30 14:43:15,280 INFO [train.py:1114] (1/4) Epoch 17, batch 1150, loss[loss=0.1994, simple_loss=0.2652, pruned_loss=0.04819, ctc_loss=0.09286, over 19583.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2751, pruned_loss=0.05231, ctc_loss=0.09855, over 3831079.30 frames. ], batch size: 52, lr: 8.83e-03, grad_scale: 32.0 +2024-08-30 14:43:23,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=218538.66666666666, ans=0.0 +2024-08-30 14:43:28,265 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.98 vs. limit=10.0 +2024-08-30 14:43:29,804 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.82 vs. limit=15.0 +2024-08-30 14:43:32,950 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.06 vs. limit=15.0 +2024-08-30 14:43:34,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=218592.0, ans=0.0 +2024-08-30 14:43:37,056 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.99 vs. limit=12.0 +2024-08-30 14:43:44,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=218645.33333333334, ans=0.0 +2024-08-30 14:43:44,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=218645.33333333334, ans=0.025 +2024-08-30 14:43:47,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=218645.33333333334, ans=10.0 +2024-08-30 14:43:51,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=218645.33333333334, ans=0.025 +2024-08-30 14:43:57,646 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.15 vs. limit=6.0 +2024-08-30 14:44:00,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=218698.66666666666, ans=0.025 +2024-08-30 14:44:03,732 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=6.43 vs. limit=15.0 +2024-08-30 14:44:20,110 INFO [train.py:1114] (1/4) Epoch 17, batch 1200, loss[loss=0.2096, simple_loss=0.2797, pruned_loss=0.04973, ctc_loss=0.1003, over 19841.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2757, pruned_loss=0.05252, ctc_loss=0.09901, over 3825453.22 frames. ], batch size: 57, lr: 8.82e-03, grad_scale: 32.0 +2024-08-30 14:45:47,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218858.66666666666, ans=0.1 +2024-08-30 14:45:57,132 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.53 vs. limit=15.0 +2024-08-30 14:46:08,768 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.467e+02 1.734e+02 1.937e+02 2.235e+02 3.279e+02, threshold=3.874e+02, percent-clipped=0.0 +2024-08-30 14:46:11,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=218965.33333333334, ans=0.0 +2024-08-30 14:46:13,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=218965.33333333334, ans=0.0 +2024-08-30 14:46:24,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=219018.66666666666, ans=0.2 +2024-08-30 14:46:25,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219018.66666666666, ans=0.1 +2024-08-30 14:46:31,316 INFO [train.py:1114] (1/4) Epoch 17, batch 1250, loss[loss=0.2208, simple_loss=0.2855, pruned_loss=0.05735, ctc_loss=0.1037, over 19520.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2761, pruned_loss=0.05266, ctc_loss=0.09929, over 3843634.46 frames. ], batch size: 61, lr: 8.82e-03, grad_scale: 32.0 +2024-08-30 14:46:49,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=219125.33333333334, ans=0.0 +2024-08-30 14:46:50,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=219125.33333333334, ans=0.2 +2024-08-30 14:46:59,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=219178.66666666666, ans=0.125 +2024-08-30 14:48:33,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=219232.0, ans=0.025 +2024-08-30 14:48:46,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=219285.33333333334, ans=0.0 +2024-08-30 14:48:52,374 INFO [train.py:1114] (1/4) Epoch 17, batch 1300, loss[loss=0.212, simple_loss=0.2801, pruned_loss=0.05187, ctc_loss=0.1003, over 18867.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2752, pruned_loss=0.05242, ctc_loss=0.09884, over 3847381.66 frames. ], batch size: 76, lr: 8.81e-03, grad_scale: 32.0 +2024-08-30 14:49:00,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=219338.66666666666, ans=0.025 +2024-08-30 14:49:01,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=219338.66666666666, ans=0.125 +2024-08-30 14:49:29,493 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.393e+02 1.750e+02 2.054e+02 2.564e+02 3.826e+02, threshold=4.108e+02, percent-clipped=0.0 +2024-08-30 14:49:40,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=219498.66666666666, ans=0.125 +2024-08-30 14:49:42,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=219552.0, ans=0.0 +2024-08-30 14:50:08,917 INFO [train.py:1114] (1/4) Epoch 17, batch 1350, loss[loss=0.2047, simple_loss=0.2833, pruned_loss=0.0465, ctc_loss=0.08258, over 19769.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2742, pruned_loss=0.05164, ctc_loss=0.09729, over 3857863.66 frames. ], batch size: 54, lr: 8.81e-03, grad_scale: 32.0 +2024-08-30 14:50:14,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=219605.33333333334, ans=0.125 +2024-08-30 14:50:39,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=219712.0, ans=0.2 +2024-08-30 14:50:55,627 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.20 vs. limit=22.5 +2024-08-30 14:51:00,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=219818.66666666666, ans=0.125 +2024-08-30 14:51:03,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=219818.66666666666, ans=0.125 +2024-08-30 14:51:03,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=219818.66666666666, ans=0.125 +2024-08-30 14:51:09,327 INFO [train.py:1114] (1/4) Epoch 17, batch 1400, loss[loss=0.2064, simple_loss=0.2657, pruned_loss=0.05256, ctc_loss=0.1046, over 19679.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.274, pruned_loss=0.05189, ctc_loss=0.09756, over 3864231.98 frames. ], batch size: 46, lr: 8.80e-03, grad_scale: 32.0 +2024-08-30 14:51:28,450 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:52:01,616 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.357e+02 1.697e+02 1.910e+02 2.399e+02 4.058e+02, threshold=3.819e+02, percent-clipped=0.0 +2024-08-30 14:52:06,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=220032.0, ans=0.0 +2024-08-30 14:52:23,361 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.42 vs. limit=10.0 +2024-08-30 14:52:26,401 INFO [train.py:1114] (1/4) Epoch 17, batch 1450, loss[loss=0.2025, simple_loss=0.2831, pruned_loss=0.04493, ctc_loss=0.07981, over 19695.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2746, pruned_loss=0.05186, ctc_loss=0.09746, over 3862167.13 frames. ], batch size: 63, lr: 8.80e-03, grad_scale: 32.0 +2024-08-30 14:52:43,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=220138.66666666666, ans=0.125 +2024-08-30 14:53:17,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220245.33333333334, ans=0.1 +2024-08-30 14:53:54,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=220298.66666666666, ans=0.2 +2024-08-30 14:53:56,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=220298.66666666666, ans=0.025 +2024-08-30 14:54:08,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=220352.0, ans=0.09899494936611666 +2024-08-30 14:54:12,192 INFO [train.py:1114] (1/4) Epoch 17, batch 1500, loss[loss=0.2069, simple_loss=0.2782, pruned_loss=0.04934, ctc_loss=0.09232, over 19565.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2754, pruned_loss=0.05218, ctc_loss=0.09801, over 3862445.51 frames. ], batch size: 57, lr: 8.79e-03, grad_scale: 32.0 +2024-08-30 14:54:12,995 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.82 vs. limit=15.0 +2024-08-30 14:54:36,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=220458.66666666666, ans=0.125 +2024-08-30 14:54:54,687 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.719e+02 1.906e+02 2.293e+02 3.704e+02, threshold=3.812e+02, percent-clipped=0.0 +2024-08-30 14:55:17,160 INFO [train.py:1114] (1/4) Epoch 17, batch 1550, loss[loss=0.2397, simple_loss=0.2994, pruned_loss=0.0646, ctc_loss=0.127, over 19604.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2758, pruned_loss=0.0526, ctc_loss=0.09884, over 3847588.66 frames. ], batch size: 60, lr: 8.79e-03, grad_scale: 32.0 +2024-08-30 14:55:29,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=220672.0, ans=0.2 +2024-08-30 14:55:48,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=220725.33333333334, ans=0.0 +2024-08-30 14:55:59,841 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.26 vs. limit=15.0 +2024-08-30 14:56:01,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=220778.66666666666, ans=0.0 +2024-08-30 14:56:07,627 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:56:20,101 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.57 vs. limit=15.0 +2024-08-30 14:56:22,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=220885.33333333334, ans=0.1 +2024-08-30 14:56:27,380 INFO [train.py:1114] (1/4) Epoch 17, batch 1600, loss[loss=0.2205, simple_loss=0.2876, pruned_loss=0.05587, ctc_loss=0.1042, over 19845.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2756, pruned_loss=0.05225, ctc_loss=0.09831, over 3836188.10 frames. ], batch size: 57, lr: 8.78e-03, grad_scale: 32.0 +2024-08-30 14:57:21,817 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.31 vs. limit=22.5 +2024-08-30 14:57:23,113 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.60 vs. limit=15.0 +2024-08-30 14:57:30,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=220992.0, ans=0.95 +2024-08-30 14:57:34,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=220992.0, ans=0.025 +2024-08-30 14:57:36,292 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.54 vs. limit=10.0 +2024-08-30 15:00:47,791 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.328e+02 1.738e+02 2.160e+02 2.635e+02 3.870e+02, threshold=4.320e+02, percent-clipped=2.0 +2024-08-30 15:03:54,012 INFO [train.py:1114] (1/4) Epoch 17, batch 1650, loss[loss=0.1968, simple_loss=0.2729, pruned_loss=0.04115, ctc_loss=0.09577, over 19679.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2755, pruned_loss=0.05229, ctc_loss=0.09861, over 3831818.43 frames. ], batch size: 59, lr: 8.77e-03, grad_scale: 32.0 +2024-08-30 15:05:32,949 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.73 vs. limit=15.0 +2024-08-30 15:05:48,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=221258.66666666666, ans=0.2 +2024-08-30 15:05:48,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=221258.66666666666, ans=0.2 +2024-08-30 15:07:23,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=221312.0, ans=0.125 +2024-08-30 15:07:44,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=221365.33333333334, ans=0.04949747468305833 +2024-08-30 15:08:00,849 INFO [train.py:1114] (1/4) Epoch 17, batch 1700, loss[loss=0.1992, simple_loss=0.2579, pruned_loss=0.05036, ctc_loss=0.09976, over 19675.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2752, pruned_loss=0.05198, ctc_loss=0.09817, over 3845734.92 frames. ], batch size: 46, lr: 8.77e-03, grad_scale: 32.0 +2024-08-30 15:08:23,496 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.34 vs. limit=15.0 +2024-08-30 15:08:24,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=221578.66666666666, ans=0.1 +2024-08-30 15:08:30,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221578.66666666666, ans=0.1 +2024-08-30 15:08:36,780 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.427e+02 1.717e+02 1.998e+02 2.422e+02 4.059e+02, threshold=3.996e+02, percent-clipped=0.0 +2024-08-30 15:09:29,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=221632.0, ans=0.0 +2024-08-30 15:09:43,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=221685.33333333334, ans=0.0 +2024-08-30 15:09:50,037 INFO [train.py:1114] (1/4) Epoch 17, batch 1750, loss[loss=0.1984, simple_loss=0.251, pruned_loss=0.05337, ctc_loss=0.09733, over 19647.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2744, pruned_loss=0.05174, ctc_loss=0.09771, over 3850469.71 frames. ], batch size: 45, lr: 8.76e-03, grad_scale: 32.0 +2024-08-30 15:10:08,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221792.0, ans=0.1 +2024-08-30 15:10:13,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=221845.33333333334, ans=0.2 +2024-08-30 15:10:36,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=221952.0, ans=0.2 +2024-08-30 15:10:46,112 INFO [train.py:1114] (1/4) Epoch 17, batch 1800, loss[loss=0.2167, simple_loss=0.285, pruned_loss=0.05222, ctc_loss=0.1097, over 19604.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2749, pruned_loss=0.05193, ctc_loss=0.09808, over 3851798.91 frames. ], batch size: 55, lr: 8.76e-03, grad_scale: 32.0 +2024-08-30 15:11:22,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=222165.33333333334, ans=0.2 +2024-08-30 15:11:23,242 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.330e+02 1.773e+02 2.029e+02 2.607e+02 4.351e+02, threshold=4.057e+02, percent-clipped=1.0 +2024-08-30 15:11:30,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=222165.33333333334, ans=0.1 +2024-08-30 15:11:41,628 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 15:11:43,553 INFO [train.py:1114] (1/4) Epoch 17, batch 1850, loss[loss=0.1908, simple_loss=0.2773, pruned_loss=0.03862, ctc_loss=0.06776, over 19592.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2747, pruned_loss=0.05166, ctc_loss=0.09743, over 3854949.06 frames. ], batch size: 57, lr: 8.75e-03, grad_scale: 16.0 +2024-08-30 15:11:59,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=222325.33333333334, ans=0.0 +2024-08-30 15:12:12,153 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.91 vs. limit=6.0 +2024-08-30 15:12:29,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=222485.33333333334, ans=0.0 +2024-08-30 15:12:32,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=222485.33333333334, ans=0.2 +2024-08-30 15:12:39,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=222538.66666666666, ans=0.125 +2024-08-30 15:12:40,701 INFO [train.py:1114] (1/4) Epoch 17, batch 1900, loss[loss=0.2275, simple_loss=0.3001, pruned_loss=0.0563, ctc_loss=0.1055, over 19647.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2751, pruned_loss=0.05167, ctc_loss=0.09751, over 3860703.66 frames. ], batch size: 59, lr: 8.75e-03, grad_scale: 16.0 +2024-08-30 15:12:40,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=222538.66666666666, ans=0.0 +2024-08-30 15:13:06,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222645.33333333334, ans=0.1 +2024-08-30 15:13:12,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=222645.33333333334, ans=0.025 +2024-08-30 15:13:18,246 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.682e+02 1.950e+02 2.328e+02 4.923e+02, threshold=3.901e+02, percent-clipped=3.0 +2024-08-30 15:13:26,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=222752.0, ans=0.125 +2024-08-30 15:13:38,419 INFO [train.py:1114] (1/4) Epoch 17, batch 1950, loss[loss=0.2349, simple_loss=0.2872, pruned_loss=0.06664, ctc_loss=0.1231, over 19595.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2763, pruned_loss=0.05212, ctc_loss=0.09822, over 3870458.50 frames. ], batch size: 52, lr: 8.74e-03, grad_scale: 16.0 +2024-08-30 15:14:36,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=222858.66666666666, ans=0.125 +2024-08-30 15:14:53,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=222965.33333333334, ans=0.125 +2024-08-30 15:15:06,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=222965.33333333334, ans=10.0 +2024-08-30 15:15:27,067 INFO [train.py:1114] (1/4) Epoch 17, batch 2000, loss[loss=0.1733, simple_loss=0.2377, pruned_loss=0.03989, ctc_loss=0.07271, over 19625.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2768, pruned_loss=0.05239, ctc_loss=0.0986, over 3856646.93 frames. ], batch size: 45, lr: 8.74e-03, grad_scale: 32.0 +2024-08-30 15:15:51,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=223178.66666666666, ans=0.125 +2024-08-30 15:15:58,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=223178.66666666666, ans=0.0 +2024-08-30 15:16:03,265 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.692e+02 2.099e+02 2.435e+02 3.373e+02, threshold=4.199e+02, percent-clipped=0.0 +2024-08-30 15:16:05,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=223232.0, ans=0.125 +2024-08-30 15:16:06,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=223232.0, ans=0.125 +2024-08-30 15:16:14,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=223285.33333333334, ans=0.1 +2024-08-30 15:16:18,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=223285.33333333334, ans=0.2 +2024-08-30 15:16:38,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=223285.33333333334, ans=0.2 +2024-08-30 15:16:42,844 INFO [train.py:1114] (1/4) Epoch 17, batch 2050, loss[loss=0.176, simple_loss=0.242, pruned_loss=0.04027, ctc_loss=0.07354, over 19729.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2759, pruned_loss=0.05248, ctc_loss=0.09883, over 3852531.93 frames. ], batch size: 47, lr: 8.73e-03, grad_scale: 32.0 +2024-08-30 15:16:55,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223338.66666666666, ans=0.1 +2024-08-30 15:17:01,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=223338.66666666666, ans=0.1 +2024-08-30 15:18:23,842 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.46 vs. limit=22.5 +2024-08-30 15:18:33,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=223445.33333333334, ans=0.125 +2024-08-30 15:20:01,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=223498.66666666666, ans=0.0 +2024-08-30 15:20:11,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=223552.0, ans=0.125 +2024-08-30 15:20:13,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=223552.0, ans=0.125 +2024-08-30 15:20:15,771 INFO [train.py:1114] (1/4) Epoch 17, batch 2100, loss[loss=0.2026, simple_loss=0.2685, pruned_loss=0.04907, ctc_loss=0.09674, over 19766.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2748, pruned_loss=0.05183, ctc_loss=0.09776, over 3859529.90 frames. ], batch size: 54, lr: 8.73e-03, grad_scale: 32.0 +2024-08-30 15:21:01,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=223658.66666666666, ans=0.0 +2024-08-30 15:21:18,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=223712.0, ans=0.125 +2024-08-30 15:21:41,984 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.235e+02 1.693e+02 2.019e+02 2.546e+02 6.032e+02, threshold=4.039e+02, percent-clipped=5.0 +2024-08-30 15:21:50,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=223818.66666666666, ans=0.125 +2024-08-30 15:21:57,530 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=2.789e-02 +2024-08-30 15:22:02,825 INFO [train.py:1114] (1/4) Epoch 17, batch 2150, loss[loss=0.184, simple_loss=0.2558, pruned_loss=0.04112, ctc_loss=0.07483, over 19570.00 frames. ], tot_loss[loss=0.208, simple_loss=0.274, pruned_loss=0.05159, ctc_loss=0.09701, over 3870502.16 frames. ], batch size: 52, lr: 8.72e-03, grad_scale: 32.0 +2024-08-30 15:22:11,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=223872.0, ans=0.95 +2024-08-30 15:22:47,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=224085.33333333334, ans=0.125 +2024-08-30 15:22:54,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=224085.33333333334, ans=0.0 +2024-08-30 15:22:58,283 INFO [train.py:1114] (1/4) Epoch 17, batch 2200, loss[loss=0.2049, simple_loss=0.2794, pruned_loss=0.04698, ctc_loss=0.09133, over 19579.00 frames. ], tot_loss[loss=0.208, simple_loss=0.274, pruned_loss=0.05159, ctc_loss=0.09688, over 3867850.87 frames. ], batch size: 57, lr: 8.72e-03, grad_scale: 32.0 +2024-08-30 15:23:08,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=224138.66666666666, ans=0.125 +2024-08-30 15:23:31,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=224245.33333333334, ans=0.125 +2024-08-30 15:23:41,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=224298.66666666666, ans=0.125 +2024-08-30 15:23:53,347 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.675e+02 1.986e+02 2.371e+02 4.244e+02, threshold=3.972e+02, percent-clipped=2.0 +2024-08-30 15:23:55,403 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.54 vs. limit=6.0 +2024-08-30 15:24:13,614 INFO [train.py:1114] (1/4) Epoch 17, batch 2250, loss[loss=0.208, simple_loss=0.2793, pruned_loss=0.04969, ctc_loss=0.09318, over 19616.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2746, pruned_loss=0.05193, ctc_loss=0.09755, over 3867771.57 frames. ], batch size: 55, lr: 8.71e-03, grad_scale: 32.0 +2024-08-30 15:24:19,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=224405.33333333334, ans=0.125 +2024-08-30 15:24:26,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=224458.66666666666, ans=0.0 +2024-08-30 15:24:28,798 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.03 vs. limit=15.0 +2024-08-30 15:25:42,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=224458.66666666666, ans=0.0 +2024-08-30 15:27:01,336 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.13 vs. limit=22.5 +2024-08-30 15:27:08,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=224618.66666666666, ans=0.125 +2024-08-30 15:27:09,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=224618.66666666666, ans=0.07 +2024-08-30 15:27:11,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=224618.66666666666, ans=0.0 +2024-08-30 15:27:15,732 INFO [train.py:1114] (1/4) Epoch 17, batch 2300, loss[loss=0.1708, simple_loss=0.2383, pruned_loss=0.03778, ctc_loss=0.06935, over 19506.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2741, pruned_loss=0.05212, ctc_loss=0.09806, over 3861446.76 frames. ], batch size: 49, lr: 8.71e-03, grad_scale: 32.0 +2024-08-30 15:28:29,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=224725.33333333334, ans=0.125 +2024-08-30 15:28:43,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=224832.0, ans=0.09899494936611666 +2024-08-30 15:28:44,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=224832.0, ans=0.0 +2024-08-30 15:28:46,693 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.385e+02 1.759e+02 2.126e+02 2.592e+02 4.068e+02, threshold=4.252e+02, percent-clipped=2.0 +2024-08-30 15:29:50,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=224938.66666666666, ans=0.1 +2024-08-30 15:29:51,108 INFO [train.py:1114] (1/4) Epoch 17, batch 2350, loss[loss=0.2271, simple_loss=0.2928, pruned_loss=0.05941, ctc_loss=0.1067, over 19670.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2746, pruned_loss=0.05237, ctc_loss=0.09846, over 3863977.51 frames. ], batch size: 63, lr: 8.70e-03, grad_scale: 32.0 +2024-08-30 15:29:53,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=224938.66666666666, ans=0.125 +2024-08-30 15:30:00,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=224938.66666666666, ans=0.2 +2024-08-30 15:30:36,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=224992.0, ans=0.07 +2024-08-30 15:31:54,162 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.83 vs. limit=15.0 +2024-08-30 15:32:00,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=225098.66666666666, ans=0.0 +2024-08-30 15:32:52,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=225098.66666666666, ans=0.09899494936611666 +2024-08-30 15:33:11,256 INFO [train.py:1114] (1/4) Epoch 17, batch 2400, loss[loss=0.2376, simple_loss=0.3021, pruned_loss=0.0617, ctc_loss=0.1243, over 19262.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.276, pruned_loss=0.0527, ctc_loss=0.09901, over 3858306.35 frames. ], batch size: 71, lr: 8.70e-03, grad_scale: 32.0 +2024-08-30 15:33:33,581 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.86 vs. limit=15.0 +2024-08-30 15:33:34,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=225312.0, ans=15.0 +2024-08-30 15:33:48,275 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.38 vs. limit=6.0 +2024-08-30 15:33:48,809 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.365e+02 1.684e+02 1.880e+02 2.443e+02 3.780e+02, threshold=3.760e+02, percent-clipped=0.0 +2024-08-30 15:34:08,661 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.57 vs. limit=15.0 +2024-08-30 15:34:10,336 INFO [train.py:1114] (1/4) Epoch 17, batch 2450, loss[loss=0.2668, simple_loss=0.3082, pruned_loss=0.08333, ctc_loss=0.1469, over 13116.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2799, pruned_loss=0.05543, ctc_loss=0.1045, over 3731084.46 frames. ], batch size: 140, lr: 8.69e-03, grad_scale: 16.0 +2024-08-30 15:34:11,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=225472.0, ans=0.125 +2024-08-30 15:34:14,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=225472.0, ans=0.025 +2024-08-30 15:34:18,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=225472.0, ans=10.0 +2024-08-30 15:34:50,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=225525.33333333334, ans=0.09899494936611666 +2024-08-30 15:34:51,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=225525.33333333334, ans=0.125 +2024-08-30 15:34:59,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=225578.66666666666, ans=0.125 +2024-08-30 15:35:02,985 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=8.48 vs. limit=22.5 +2024-08-30 15:35:03,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=225578.66666666666, ans=0.125 +2024-08-30 15:35:09,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.11 vs. limit=15.0 +2024-08-30 15:38:02,874 INFO [train.py:1114] (1/4) Epoch 18, batch 0, loss[loss=0.1932, simple_loss=0.2527, pruned_loss=0.04852, ctc_loss=0.09187, over 19410.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2527, pruned_loss=0.04852, ctc_loss=0.09187, over 19410.00 frames. ], batch size: 48, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 15:38:02,874 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-30 15:38:16,595 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.9873, 3.3705, 2.6267, 3.1732], device='cuda:1') +2024-08-30 15:39:34,943 INFO [train.py:1146] (1/4) Epoch 18, validation: loss=0.1864, simple_loss=0.2743, pruned_loss=0.0364, ctc_loss=0.06401, over 944034.00 frames. +2024-08-30 15:39:34,944 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 13639MB +2024-08-30 15:39:39,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=225680.0, ans=0.125 +2024-08-30 15:40:05,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225733.33333333334, ans=0.1 +2024-08-30 15:40:20,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=225786.66666666666, ans=0.0 +2024-08-30 15:40:25,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=225786.66666666666, ans=0.125 +2024-08-30 15:40:30,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225786.66666666666, ans=0.1 +2024-08-30 15:40:47,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=225840.0, ans=0.125 +2024-08-30 15:40:58,103 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.452e+02 1.919e+02 2.092e+02 2.421e+02 5.568e+02, threshold=4.185e+02, percent-clipped=4.0 +2024-08-30 15:41:03,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225893.33333333334, ans=0.1 +2024-08-30 15:41:04,966 INFO [train.py:1114] (1/4) Epoch 18, batch 50, loss[loss=0.1582, simple_loss=0.2327, pruned_loss=0.0302, ctc_loss=0.05849, over 19735.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2757, pruned_loss=0.05252, ctc_loss=0.1004, over 843693.80 frames. ], batch size: 47, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 15:41:05,554 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.70 vs. limit=15.0 +2024-08-30 15:41:16,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=225946.66666666666, ans=0.09899494936611666 +2024-08-30 15:41:46,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=225946.66666666666, ans=0.0 +2024-08-30 15:42:01,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=226053.33333333334, ans=0.025 +2024-08-30 15:42:27,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=226160.0, ans=0.0 +2024-08-30 15:42:28,860 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.79 vs. limit=15.0 +2024-08-30 15:44:07,530 INFO [train.py:1114] (1/4) Epoch 18, batch 100, loss[loss=0.199, simple_loss=0.2665, pruned_loss=0.04874, ctc_loss=0.0852, over 19719.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2771, pruned_loss=0.05266, ctc_loss=0.1004, over 1498452.85 frames. ], batch size: 51, lr: 8.43e-03, grad_scale: 32.0 +2024-08-30 15:44:17,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=226213.33333333334, ans=15.0 +2024-08-30 15:44:34,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=226320.0, ans=0.125 +2024-08-30 15:44:43,558 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.42 vs. limit=15.0 +2024-08-30 15:44:58,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=226426.66666666666, ans=0.07 +2024-08-30 15:45:01,910 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.418e+02 1.711e+02 1.973e+02 2.383e+02 4.146e+02, threshold=3.946e+02, percent-clipped=0.0 +2024-08-30 15:45:10,543 INFO [train.py:1114] (1/4) Epoch 18, batch 150, loss[loss=0.1954, simple_loss=0.2555, pruned_loss=0.04927, ctc_loss=0.09182, over 19694.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2751, pruned_loss=0.05225, ctc_loss=0.09937, over 2027899.17 frames. ], batch size: 47, lr: 8.43e-03, grad_scale: 32.0 +2024-08-30 15:45:11,998 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 15:45:24,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=226533.33333333334, ans=0.0 +2024-08-30 15:45:26,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=226533.33333333334, ans=0.125 +2024-08-30 15:45:27,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=226533.33333333334, ans=0.125 +2024-08-30 15:45:28,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226533.33333333334, ans=0.1 +2024-08-30 15:45:28,898 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 15:45:35,102 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.23 vs. limit=15.0 +2024-08-30 15:45:39,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=226586.66666666666, ans=0.035 +2024-08-30 15:45:43,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.54 vs. limit=12.0 +2024-08-30 15:46:10,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=226693.33333333334, ans=0.125 +2024-08-30 15:46:16,548 INFO [train.py:1114] (1/4) Epoch 18, batch 200, loss[loss=0.2091, simple_loss=0.2801, pruned_loss=0.0504, ctc_loss=0.09337, over 18231.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2739, pruned_loss=0.05144, ctc_loss=0.09761, over 2435641.98 frames. ], batch size: 85, lr: 8.42e-03, grad_scale: 32.0 +2024-08-30 15:46:23,911 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.95 vs. limit=15.0 +2024-08-30 15:46:45,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=226853.33333333334, ans=0.0 +2024-08-30 15:47:08,529 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.332e+02 1.794e+02 2.164e+02 2.564e+02 4.131e+02, threshold=4.328e+02, percent-clipped=1.0 +2024-08-30 15:47:20,541 INFO [train.py:1114] (1/4) Epoch 18, batch 250, loss[loss=0.2314, simple_loss=0.2971, pruned_loss=0.05931, ctc_loss=0.1175, over 19375.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2738, pruned_loss=0.05121, ctc_loss=0.097, over 2757155.58 frames. ], batch size: 67, lr: 8.42e-03, grad_scale: 32.0 +2024-08-30 15:47:25,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=227013.33333333334, ans=0.125 +2024-08-30 15:47:32,899 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.39 vs. limit=12.0 +2024-08-30 15:48:36,401 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.34 vs. limit=22.5 +2024-08-30 15:49:09,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227226.66666666666, ans=0.1 +2024-08-30 15:49:21,133 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.44 vs. limit=15.0 +2024-08-30 15:49:22,654 INFO [train.py:1114] (1/4) Epoch 18, batch 300, loss[loss=0.224, simple_loss=0.2859, pruned_loss=0.05819, ctc_loss=0.1146, over 19505.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2728, pruned_loss=0.05064, ctc_loss=0.09594, over 3002465.67 frames. ], batch size: 61, lr: 8.41e-03, grad_scale: 32.0 +2024-08-30 15:51:20,878 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 15:51:27,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=227440.0, ans=0.125 +2024-08-30 15:51:27,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=227440.0, ans=0.025 +2024-08-30 15:51:40,146 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.292e+02 1.730e+02 1.916e+02 2.273e+02 3.732e+02, threshold=3.832e+02, percent-clipped=0.0 +2024-08-30 15:51:41,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=227493.33333333334, ans=0.2 +2024-08-30 15:51:48,895 INFO [train.py:1114] (1/4) Epoch 18, batch 350, loss[loss=0.1884, simple_loss=0.2494, pruned_loss=0.04613, ctc_loss=0.08774, over 19745.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2731, pruned_loss=0.05065, ctc_loss=0.09572, over 3192448.38 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 32.0 +2024-08-30 15:52:05,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=227600.0, ans=0.0 +2024-08-30 15:52:32,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=227706.66666666666, ans=0.2 +2024-08-30 15:52:38,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=227760.0, ans=0.125 +2024-08-30 15:52:51,832 INFO [train.py:1114] (1/4) Epoch 18, batch 400, loss[loss=0.202, simple_loss=0.2743, pruned_loss=0.04733, ctc_loss=0.08765, over 19499.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2731, pruned_loss=0.0506, ctc_loss=0.09547, over 3342545.79 frames. ], batch size: 54, lr: 8.40e-03, grad_scale: 32.0 +2024-08-30 15:52:54,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=227813.33333333334, ans=0.125 +2024-08-30 15:53:01,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=227813.33333333334, ans=0.2 +2024-08-30 15:53:08,946 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 15:53:10,568 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.88 vs. limit=15.0 +2024-08-30 15:53:15,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=227920.0, ans=0.125 +2024-08-30 15:53:17,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=227920.0, ans=0.125 +2024-08-30 15:54:08,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=227973.33333333334, ans=6.0 +2024-08-30 15:54:09,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=227973.33333333334, ans=0.125 +2024-08-30 15:54:09,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=227973.33333333334, ans=0.1 +2024-08-30 15:54:12,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.66 vs. limit=15.0 +2024-08-30 15:54:16,393 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.287e+02 1.651e+02 1.862e+02 2.258e+02 4.636e+02, threshold=3.723e+02, percent-clipped=1.0 +2024-08-30 15:54:17,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=228026.66666666666, ans=0.125 +2024-08-30 15:54:24,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=228080.0, ans=0.125 +2024-08-30 15:54:24,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=228080.0, ans=0.025 +2024-08-30 15:54:25,942 INFO [train.py:1114] (1/4) Epoch 18, batch 450, loss[loss=0.1989, simple_loss=0.2781, pruned_loss=0.04369, ctc_loss=0.08104, over 19601.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2735, pruned_loss=0.05112, ctc_loss=0.09624, over 3451509.51 frames. ], batch size: 55, lr: 8.40e-03, grad_scale: 32.0 +2024-08-30 15:54:58,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=228186.66666666666, ans=0.125 +2024-08-30 15:55:11,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=228240.0, ans=0.0 +2024-08-30 15:55:37,536 INFO [train.py:1114] (1/4) Epoch 18, batch 500, loss[loss=0.2274, simple_loss=0.2951, pruned_loss=0.05904, ctc_loss=0.1042, over 19696.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2723, pruned_loss=0.05039, ctc_loss=0.09506, over 3546801.94 frames. ], batch size: 63, lr: 8.39e-03, grad_scale: 32.0 +2024-08-30 15:55:40,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=228346.66666666666, ans=0.125 +2024-08-30 15:55:47,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=228346.66666666666, ans=0.5 +2024-08-30 15:56:25,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=228453.33333333334, ans=0.5 +2024-08-30 15:56:37,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=228453.33333333334, ans=0.125 +2024-08-30 15:57:50,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=228560.0, ans=0.2 +2024-08-30 15:57:54,273 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.350e+02 1.602e+02 1.832e+02 2.190e+02 3.877e+02, threshold=3.665e+02, percent-clipped=2.0 +2024-08-30 15:58:00,971 INFO [train.py:1114] (1/4) Epoch 18, batch 550, loss[loss=0.2147, simple_loss=0.2868, pruned_loss=0.05194, ctc_loss=0.09692, over 19259.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2723, pruned_loss=0.05053, ctc_loss=0.09513, over 3609220.25 frames. ], batch size: 71, lr: 8.39e-03, grad_scale: 32.0 +2024-08-30 15:58:38,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=228613.33333333334, ans=0.0 +2024-08-30 15:58:55,992 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 16:00:51,926 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 16:00:53,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=228720.0, ans=0.125 +2024-08-30 16:01:35,843 INFO [train.py:1114] (1/4) Epoch 18, batch 600, loss[loss=0.229, simple_loss=0.3018, pruned_loss=0.05653, ctc_loss=0.108, over 19439.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2727, pruned_loss=0.05071, ctc_loss=0.09544, over 3667203.93 frames. ], batch size: 67, lr: 8.38e-03, grad_scale: 32.0 +2024-08-30 16:01:38,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=228880.0, ans=0.125 +2024-08-30 16:02:34,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=228933.33333333334, ans=0.125 +2024-08-30 16:03:08,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228986.66666666666, ans=0.1 +2024-08-30 16:04:28,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=229040.0, ans=0.125 +2024-08-30 16:04:31,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=229040.0, ans=0.025 +2024-08-30 16:04:31,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=229040.0, ans=0.0 +2024-08-30 16:04:41,751 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.417e+02 1.726e+02 2.045e+02 2.727e+02 4.181e+02, threshold=4.090e+02, percent-clipped=7.0 +2024-08-30 16:04:46,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=229093.33333333334, ans=0.125 +2024-08-30 16:04:48,706 INFO [train.py:1114] (1/4) Epoch 18, batch 650, loss[loss=0.1937, simple_loss=0.2684, pruned_loss=0.04397, ctc_loss=0.07789, over 19779.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2721, pruned_loss=0.05064, ctc_loss=0.09521, over 3716775.23 frames. ], batch size: 54, lr: 8.38e-03, grad_scale: 32.0 +2024-08-30 16:04:48,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=229146.66666666666, ans=0.125 +2024-08-30 16:05:08,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=229146.66666666666, ans=0.1 +2024-08-30 16:06:17,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=229200.0, ans=0.125 +2024-08-30 16:06:30,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=229200.0, ans=0.125 +2024-08-30 16:06:37,752 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.97 vs. limit=15.0 +2024-08-30 16:06:39,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=229253.33333333334, ans=0.125 +2024-08-30 16:06:47,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=229306.66666666666, ans=0.125 +2024-08-30 16:07:14,533 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.87 vs. limit=22.5 +2024-08-30 16:07:24,403 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.30 vs. limit=6.0 +2024-08-30 16:07:32,101 INFO [train.py:1114] (1/4) Epoch 18, batch 700, loss[loss=0.2059, simple_loss=0.2678, pruned_loss=0.05285, ctc_loss=0.09561, over 19707.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2727, pruned_loss=0.05077, ctc_loss=0.09536, over 3748465.26 frames. ], batch size: 51, lr: 8.37e-03, grad_scale: 32.0 +2024-08-30 16:07:32,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=229413.33333333334, ans=0.09899494936611666 +2024-08-30 16:07:34,047 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.50 vs. limit=22.5 +2024-08-30 16:07:34,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229413.33333333334, ans=0.1 +2024-08-30 16:07:38,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=229413.33333333334, ans=0.125 +2024-08-30 16:07:45,620 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.20 vs. limit=22.5 +2024-08-30 16:07:55,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=229520.0, ans=0.125 +2024-08-30 16:08:13,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=229573.33333333334, ans=0.125 +2024-08-30 16:08:27,237 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.709e+02 1.988e+02 2.480e+02 4.374e+02, threshold=3.975e+02, percent-clipped=1.0 +2024-08-30 16:08:33,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=229680.0, ans=0.125 +2024-08-30 16:08:34,067 INFO [train.py:1114] (1/4) Epoch 18, batch 750, loss[loss=0.2341, simple_loss=0.2947, pruned_loss=0.0637, ctc_loss=0.1153, over 19498.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2723, pruned_loss=0.05051, ctc_loss=0.09496, over 3775010.57 frames. ], batch size: 54, lr: 8.37e-03, grad_scale: 32.0 +2024-08-30 16:08:49,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=229733.33333333334, ans=0.0 +2024-08-30 16:09:17,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=229840.0, ans=0.125 +2024-08-30 16:09:23,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=229840.0, ans=0.125 +2024-08-30 16:09:34,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=229893.33333333334, ans=0.125 +2024-08-30 16:09:35,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=229893.33333333334, ans=0.125 +2024-08-30 16:09:36,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=229893.33333333334, ans=22.5 +2024-08-30 16:09:38,078 INFO [train.py:1114] (1/4) Epoch 18, batch 800, loss[loss=0.1909, simple_loss=0.2614, pruned_loss=0.04367, ctc_loss=0.08254, over 19409.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2718, pruned_loss=0.04994, ctc_loss=0.0941, over 3795416.46 frames. ], batch size: 48, lr: 8.37e-03, grad_scale: 32.0 +2024-08-30 16:09:52,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=229946.66666666666, ans=0.0 +2024-08-30 16:09:52,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=229946.66666666666, ans=0.125 +2024-08-30 16:09:55,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=229946.66666666666, ans=0.0 +2024-08-30 16:10:08,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=230000.0, ans=0.125 +2024-08-30 16:10:12,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=230053.33333333334, ans=0.0 +2024-08-30 16:10:37,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=230160.0, ans=0.0 +2024-08-30 16:11:34,980 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.239e+02 1.744e+02 1.950e+02 2.451e+02 4.139e+02, threshold=3.901e+02, percent-clipped=0.0 +2024-08-30 16:11:44,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=230160.0, ans=0.0 +2024-08-30 16:11:47,905 INFO [train.py:1114] (1/4) Epoch 18, batch 850, loss[loss=0.2182, simple_loss=0.2857, pruned_loss=0.05439, ctc_loss=0.105, over 19648.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2719, pruned_loss=0.05002, ctc_loss=0.09424, over 3814604.01 frames. ], batch size: 59, lr: 8.36e-03, grad_scale: 32.0 +2024-08-30 16:11:58,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=230266.66666666666, ans=0.07 +2024-08-30 16:12:14,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=230320.0, ans=0.0 +2024-08-30 16:12:18,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=230320.0, ans=0.2 +2024-08-30 16:12:38,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230373.33333333334, ans=0.1 +2024-08-30 16:12:48,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=230426.66666666666, ans=0.125 +2024-08-30 16:12:57,693 INFO [train.py:1114] (1/4) Epoch 18, batch 900, loss[loss=0.1984, simple_loss=0.2542, pruned_loss=0.05146, ctc_loss=0.09935, over 19809.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2722, pruned_loss=0.05028, ctc_loss=0.09481, over 3819625.95 frames. ], batch size: 49, lr: 8.36e-03, grad_scale: 32.0 +2024-08-30 16:13:11,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=230533.33333333334, ans=0.07 +2024-08-30 16:13:21,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=230586.66666666666, ans=0.0 +2024-08-30 16:13:22,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=230586.66666666666, ans=0.0 +2024-08-30 16:13:49,531 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.356e+02 1.771e+02 2.097e+02 2.541e+02 3.279e+02, threshold=4.195e+02, percent-clipped=1.0 +2024-08-30 16:13:56,603 INFO [train.py:1114] (1/4) Epoch 18, batch 950, loss[loss=0.1923, simple_loss=0.258, pruned_loss=0.04653, ctc_loss=0.08353, over 19518.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2727, pruned_loss=0.05089, ctc_loss=0.09603, over 3819326.72 frames. ], batch size: 49, lr: 8.35e-03, grad_scale: 32.0 +2024-08-30 16:14:51,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230746.66666666666, ans=0.1 +2024-08-30 16:15:07,715 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.18 vs. limit=15.0 +2024-08-30 16:15:16,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=230853.33333333334, ans=0.125 +2024-08-30 16:15:16,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230853.33333333334, ans=0.1 +2024-08-30 16:15:38,514 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=2.745e-03 +2024-08-30 16:15:40,063 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=3.94 vs. limit=15.0 +2024-08-30 16:15:41,780 INFO [train.py:1114] (1/4) Epoch 18, batch 1000, loss[loss=0.19, simple_loss=0.2568, pruned_loss=0.04508, ctc_loss=0.08242, over 19849.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2733, pruned_loss=0.05106, ctc_loss=0.09637, over 3815683.01 frames. ], batch size: 52, lr: 8.35e-03, grad_scale: 32.0 +2024-08-30 16:16:07,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=231120.0, ans=0.5 +2024-08-30 16:16:27,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=231120.0, ans=0.125 +2024-08-30 16:16:46,163 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.681e+02 1.935e+02 2.141e+02 3.468e+02, threshold=3.870e+02, percent-clipped=0.0 +2024-08-30 16:16:51,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=231226.66666666666, ans=0.1 +2024-08-30 16:16:53,181 INFO [train.py:1114] (1/4) Epoch 18, batch 1050, loss[loss=0.1955, simple_loss=0.2699, pruned_loss=0.04371, ctc_loss=0.08398, over 19852.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2731, pruned_loss=0.05131, ctc_loss=0.09676, over 3823270.16 frames. ], batch size: 57, lr: 8.34e-03, grad_scale: 32.0 +2024-08-30 16:16:53,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231280.0, ans=0.1 +2024-08-30 16:16:57,510 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.66 vs. limit=15.0 +2024-08-30 16:17:08,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=231333.33333333334, ans=0.125 +2024-08-30 16:17:10,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=231333.33333333334, ans=0.125 +2024-08-30 16:17:17,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=231386.66666666666, ans=0.125 +2024-08-30 16:18:04,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=231440.0, ans=0.125 +2024-08-30 16:18:11,592 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.64 vs. limit=22.5 +2024-08-30 16:18:13,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=231440.0, ans=0.125 +2024-08-30 16:18:29,888 INFO [train.py:1114] (1/4) Epoch 18, batch 1100, loss[loss=0.2012, simple_loss=0.2682, pruned_loss=0.04853, ctc_loss=0.09288, over 19594.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2725, pruned_loss=0.05078, ctc_loss=0.0959, over 3830526.83 frames. ], batch size: 52, lr: 8.34e-03, grad_scale: 32.0 +2024-08-30 16:18:44,891 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.44 vs. limit=22.5 +2024-08-30 16:18:53,751 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.37 vs. limit=15.0 +2024-08-30 16:19:24,183 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.348e+02 1.668e+02 1.884e+02 2.263e+02 3.606e+02, threshold=3.767e+02, percent-clipped=0.0 +2024-08-30 16:19:48,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=231760.0, ans=0.1 +2024-08-30 16:19:52,628 INFO [train.py:1114] (1/4) Epoch 18, batch 1150, loss[loss=0.2151, simple_loss=0.2788, pruned_loss=0.05544, ctc_loss=0.1015, over 19588.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2724, pruned_loss=0.05071, ctc_loss=0.0958, over 3830167.85 frames. ], batch size: 52, lr: 8.33e-03, grad_scale: 32.0 +2024-08-30 16:19:55,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=231813.33333333334, ans=0.125 +2024-08-30 16:20:00,315 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.72 vs. limit=10.0 +2024-08-30 16:20:06,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=231866.66666666666, ans=0.0 +2024-08-30 16:22:20,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=231920.0, ans=0.0 +2024-08-30 16:22:56,526 INFO [train.py:1114] (1/4) Epoch 18, batch 1200, loss[loss=0.1942, simple_loss=0.2771, pruned_loss=0.03994, ctc_loss=0.07877, over 19831.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2739, pruned_loss=0.051, ctc_loss=0.0965, over 3825669.59 frames. ], batch size: 57, lr: 8.33e-03, grad_scale: 32.0 +2024-08-30 16:23:05,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=232080.0, ans=0.0 +2024-08-30 16:23:10,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=232133.33333333334, ans=0.0 +2024-08-30 16:23:24,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=232186.66666666666, ans=0.125 +2024-08-30 16:23:26,168 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.29 vs. limit=10.0 +2024-08-30 16:23:30,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=232240.0, ans=0.125 +2024-08-30 16:23:33,059 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.67 vs. limit=15.0 +2024-08-30 16:23:34,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=232240.0, ans=0.125 +2024-08-30 16:23:38,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=232240.0, ans=0.0 +2024-08-30 16:23:38,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=232240.0, ans=0.125 +2024-08-30 16:23:39,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=232240.0, ans=0.125 +2024-08-30 16:23:39,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=232240.0, ans=0.1 +2024-08-30 16:23:45,954 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.383e+02 1.656e+02 1.841e+02 2.164e+02 3.391e+02, threshold=3.682e+02, percent-clipped=0.0 +2024-08-30 16:23:52,933 INFO [train.py:1114] (1/4) Epoch 18, batch 1250, loss[loss=0.2267, simple_loss=0.2931, pruned_loss=0.05839, ctc_loss=0.1086, over 19525.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2741, pruned_loss=0.0511, ctc_loss=0.09647, over 3842781.75 frames. ], batch size: 61, lr: 8.32e-03, grad_scale: 32.0 +2024-08-30 16:23:57,412 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.67 vs. limit=15.0 +2024-08-30 16:24:16,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=232453.33333333334, ans=0.0 +2024-08-30 16:24:18,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=232453.33333333334, ans=10.0 +2024-08-30 16:25:41,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=232560.0, ans=0.125 +2024-08-30 16:25:42,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=232560.0, ans=0.0 +2024-08-30 16:25:45,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=232560.0, ans=0.1 +2024-08-30 16:25:53,676 INFO [train.py:1114] (1/4) Epoch 18, batch 1300, loss[loss=0.2079, simple_loss=0.2827, pruned_loss=0.04905, ctc_loss=0.08728, over 18867.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2728, pruned_loss=0.05038, ctc_loss=0.09502, over 3846959.32 frames. ], batch size: 76, lr: 8.32e-03, grad_scale: 32.0 +2024-08-30 16:32:25,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=232666.66666666666, ans=0.1 +2024-08-30 16:45:22,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=232826.66666666666, ans=0.125 +2024-08-30 16:45:23,079 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.785e+02 2.170e+02 2.759e+02 4.331e+02, threshold=4.339e+02, percent-clipped=5.0 +2024-08-30 16:54:23,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=232826.66666666666, ans=0.2 +2024-08-30 17:02:45,881 INFO [train.py:1114] (1/4) Epoch 18, batch 1350, loss[loss=0.1762, simple_loss=0.2595, pruned_loss=0.03357, ctc_loss=0.06426, over 19771.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2726, pruned_loss=0.05023, ctc_loss=0.09447, over 3856082.84 frames. ], batch size: 54, lr: 8.31e-03, grad_scale: 32.0 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-12-44-46-2 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-12-44-46-2 new file mode 100644 index 0000000000000000000000000000000000000000..3aa1ec07c94fb274b65677b9f760ba02eb0dbdd6 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-12-44-46-2 @@ -0,0 +1,570 @@ +2024-08-30 12:44:46,732 INFO [train.py:1182] (2/4) Training started +2024-08-30 12:44:53,509 INFO [train.py:1192] (2/4) Device: cuda:2 +2024-08-30 12:44:53,511 INFO [train.py:1210] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2651.int.cedar.computecanada.ca', 'IP address': '172.16.146.88'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 17, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-30 12:44:53,512 INFO [train.py:1212] (2/4) About to create model +2024-08-30 12:44:54,217 INFO [train.py:1216] (2/4) Number of model parameters: 66367431 +2024-08-30 12:44:54,218 INFO [checkpoint.py:112] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-16.pt +2024-08-30 12:45:01,882 INFO [train.py:1231] (2/4) Using DDP +2024-08-30 12:45:06,262 INFO [train.py:1243] (2/4) Loading optimizer state dict +2024-08-30 12:45:06,460 INFO [train.py:1251] (2/4) Loading scheduler state dict +2024-08-30 12:45:06,461 INFO [asr_datamodule.py:894] (2/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-30 12:45:06,664 INFO [asr_datamodule.py:696] (2/4) Disable MUSAN +2024-08-30 12:45:06,701 INFO [asr_datamodule.py:714] (2/4) Enable SpecAugment +2024-08-30 12:45:06,701 INFO [asr_datamodule.py:715] (2/4) Time warp factor: 80 +2024-08-30 12:45:06,701 INFO [asr_datamodule.py:725] (2/4) Num frame mask: 10 +2024-08-30 12:45:06,701 INFO [asr_datamodule.py:738] (2/4) About to create train dataset +2024-08-30 12:45:06,701 INFO [asr_datamodule.py:765] (2/4) Using DynamicBucketingSampler. +2024-08-30 12:45:08,264 INFO [asr_datamodule.py:782] (2/4) About to create train dataloader +2024-08-30 12:45:08,265 INFO [asr_datamodule.py:911] (2/4) About to get dev-clean cuts +2024-08-30 12:45:08,444 INFO [asr_datamodule.py:918] (2/4) About to get dev-other cuts +2024-08-30 12:45:08,576 INFO [asr_datamodule.py:814] (2/4) About to create dev dataset +2024-08-30 12:45:08,902 INFO [asr_datamodule.py:831] (2/4) About to create dev dataloader +2024-08-30 12:45:08,902 INFO [train.py:1435] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 12:51:17,009 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12782MB +2024-08-30 12:51:18,486 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12849MB +2024-08-30 12:53:02,306 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12967MB +2024-08-30 12:53:03,454 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=8.24 vs. limit=7.5 +2024-08-30 12:53:03,661 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12967MB +2024-08-30 12:54:12,747 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12967MB +2024-08-30 12:54:14,357 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12967MB +2024-08-30 12:54:14,376 INFO [train.py:1344] (2/4) Loading grad scaler state dict +2024-08-30 12:54:59,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=11.34 vs. limit=15.0 +2024-08-30 12:55:06,919 INFO [train.py:1114] (2/4) Epoch 17, batch 0, loss[loss=0.1952, simple_loss=0.2558, pruned_loss=0.04945, ctc_loss=0.08906, over 19416.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2558, pruned_loss=0.04945, ctc_loss=0.08906, over 19416.00 frames. ], batch size: 48, lr: 8.95e-03, grad_scale: 32.0 +2024-08-30 12:55:06,920 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-30 12:55:27,184 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([6.0834, 5.5112, 5.0666, 5.1729], device='cuda:2') +2024-08-30 12:55:27,653 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([1.8662, 2.0752, 3.2524, 3.4011], device='cuda:2') +2024-08-30 12:55:31,712 INFO [train.py:1146] (2/4) Epoch 17, validation: loss=0.185, simple_loss=0.2737, pruned_loss=0.03584, ctc_loss=0.06176, over 944034.00 frames. +2024-08-30 12:55:31,713 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12967MB +2024-08-30 12:55:33,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=212405.33333333334, ans=0.125 +2024-08-30 12:56:02,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=212458.66666666666, ans=0.125 +2024-08-30 12:56:20,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=212458.66666666666, ans=0.025 +2024-08-30 13:06:19,314 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.426e+02 1.860e+02 2.030e+02 2.233e+02 2.993e+02, threshold=4.061e+02, percent-clipped=0.0 +2024-08-30 13:06:19,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=212565.33333333334, ans=0.125 +2024-08-30 13:07:20,627 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.05 vs. limit=6.0 +2024-08-30 13:09:56,387 INFO [train.py:1114] (2/4) Epoch 17, batch 50, loss[loss=0.1763, simple_loss=0.2446, pruned_loss=0.03925, ctc_loss=0.07347, over 19722.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2782, pruned_loss=0.05445, ctc_loss=0.1029, over 845315.44 frames. ], batch size: 47, lr: 8.94e-03, grad_scale: 32.0 +2024-08-30 13:10:01,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=212672.0, ans=0.125 +2024-08-30 13:11:45,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=212672.0, ans=0.0 +2024-08-30 13:16:42,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212725.33333333334, ans=0.1 +2024-08-30 13:18:12,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=212778.66666666666, ans=0.125 +2024-08-30 13:18:12,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=212778.66666666666, ans=0.05 +2024-08-30 13:18:13,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=212778.66666666666, ans=0.125 +2024-08-30 13:18:31,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=212832.0, ans=0.125 +2024-08-30 13:19:01,146 INFO [train.py:1114] (2/4) Epoch 17, batch 100, loss[loss=0.1981, simple_loss=0.2692, pruned_loss=0.04602, ctc_loss=0.08747, over 19753.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2772, pruned_loss=0.05326, ctc_loss=0.1003, over 1498877.26 frames. ], batch size: 51, lr: 8.94e-03, grad_scale: 32.0 +2024-08-30 13:19:17,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212992.0, ans=0.1 +2024-08-30 13:21:59,155 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.59 vs. limit=22.5 +2024-08-30 13:23:00,811 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.80 vs. limit=12.0 +2024-08-30 13:23:11,084 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.345e+02 1.706e+02 1.953e+02 2.287e+02 3.713e+02, threshold=3.906e+02, percent-clipped=0.0 +2024-08-30 13:24:07,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.06 vs. limit=15.0 +2024-08-30 13:24:10,866 INFO [train.py:1114] (2/4) Epoch 17, batch 150, loss[loss=0.1857, simple_loss=0.2527, pruned_loss=0.04277, ctc_loss=0.08313, over 19722.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2756, pruned_loss=0.0522, ctc_loss=0.0986, over 2027544.43 frames. ], batch size: 47, lr: 8.93e-03, grad_scale: 32.0 +2024-08-30 13:24:14,151 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.90 vs. limit=6.0 +2024-08-30 13:27:35,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=213365.33333333334, ans=0.125 +2024-08-30 13:27:51,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=213365.33333333334, ans=0.125 +2024-08-30 13:27:55,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=213418.66666666666, ans=0.125 +2024-08-30 13:28:05,733 INFO [train.py:1114] (2/4) Epoch 17, batch 200, loss[loss=0.2337, simple_loss=0.2901, pruned_loss=0.06478, ctc_loss=0.1196, over 18397.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.275, pruned_loss=0.05219, ctc_loss=0.09867, over 2435139.54 frames. ], batch size: 85, lr: 8.93e-03, grad_scale: 32.0 +2024-08-30 13:28:16,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=213525.33333333334, ans=0.09899494936611666 +2024-08-30 13:28:40,334 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.384e+02 1.731e+02 1.992e+02 2.666e+02 4.093e+02, threshold=3.983e+02, percent-clipped=1.0 +2024-08-30 13:28:55,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=213685.33333333334, ans=0.125 +2024-08-30 13:29:01,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=213685.33333333334, ans=0.0 +2024-08-30 13:29:04,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=213685.33333333334, ans=0.1 +2024-08-30 13:29:07,536 INFO [train.py:1114] (2/4) Epoch 17, batch 250, loss[loss=0.2099, simple_loss=0.2793, pruned_loss=0.05083, ctc_loss=0.09702, over 19307.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2744, pruned_loss=0.05153, ctc_loss=0.09732, over 2755756.17 frames. ], batch size: 67, lr: 8.92e-03, grad_scale: 32.0 +2024-08-30 13:29:51,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=213898.66666666666, ans=0.025 +2024-08-30 13:29:52,080 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.21 vs. limit=10.0 +2024-08-30 13:30:02,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=213952.0, ans=0.125 +2024-08-30 13:30:13,963 INFO [train.py:1114] (2/4) Epoch 17, batch 300, loss[loss=0.2338, simple_loss=0.294, pruned_loss=0.06369, ctc_loss=0.1155, over 19521.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2748, pruned_loss=0.05154, ctc_loss=0.09736, over 3000078.39 frames. ], batch size: 61, lr: 8.92e-03, grad_scale: 32.0 +2024-08-30 13:30:16,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=214005.33333333334, ans=0.125 +2024-08-30 13:30:35,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=214058.66666666666, ans=0.0 +2024-08-30 13:30:45,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=214112.0, ans=0.125 +2024-08-30 13:30:51,813 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.284e+02 1.663e+02 1.872e+02 2.298e+02 3.693e+02, threshold=3.744e+02, percent-clipped=0.0 +2024-08-30 13:31:25,595 INFO [train.py:1114] (2/4) Epoch 17, batch 350, loss[loss=0.2074, simple_loss=0.2638, pruned_loss=0.05408, ctc_loss=0.1073, over 19751.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2751, pruned_loss=0.05169, ctc_loss=0.09759, over 3189055.41 frames. ], batch size: 48, lr: 8.91e-03, grad_scale: 32.0 +2024-08-30 13:31:25,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=214272.0, ans=0.125 +2024-08-30 13:31:28,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=214272.0, ans=0.125 +2024-08-30 13:31:45,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.97 vs. limit=15.0 +2024-08-30 13:32:03,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=214432.0, ans=0.125 +2024-08-30 13:32:08,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=214432.0, ans=0.125 +2024-08-30 13:32:24,581 INFO [train.py:1114] (2/4) Epoch 17, batch 400, loss[loss=0.2113, simple_loss=0.2814, pruned_loss=0.05133, ctc_loss=0.0964, over 19496.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2742, pruned_loss=0.05122, ctc_loss=0.09685, over 3340401.63 frames. ], batch size: 54, lr: 8.91e-03, grad_scale: 32.0 +2024-08-30 13:32:30,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=214538.66666666666, ans=0.0 +2024-08-30 13:33:01,009 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.238e+02 1.640e+02 1.901e+02 2.325e+02 4.074e+02, threshold=3.801e+02, percent-clipped=1.0 +2024-08-30 13:33:20,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=214752.0, ans=0.0 +2024-08-30 13:33:26,242 INFO [train.py:1114] (2/4) Epoch 17, batch 450, loss[loss=0.1993, simple_loss=0.2716, pruned_loss=0.04533, ctc_loss=0.09091, over 19617.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2749, pruned_loss=0.05159, ctc_loss=0.09742, over 3449884.10 frames. ], batch size: 55, lr: 8.90e-03, grad_scale: 32.0 +2024-08-30 13:34:00,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=214858.66666666666, ans=0.0 +2024-08-30 13:34:00,621 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.17 vs. limit=15.0 +2024-08-30 13:34:02,177 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.38 vs. limit=6.0 +2024-08-30 13:34:07,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214858.66666666666, ans=0.1 +2024-08-30 13:38:13,354 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.37 vs. limit=12.0 +2024-08-30 13:38:29,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=214912.0, ans=0.0 +2024-08-30 13:38:30,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=214912.0, ans=0.2 +2024-08-30 13:43:53,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=215018.66666666666, ans=0.025 +2024-08-30 13:44:00,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215018.66666666666, ans=0.1 +2024-08-30 13:44:05,807 INFO [train.py:1114] (2/4) Epoch 17, batch 500, loss[loss=0.234, simple_loss=0.2974, pruned_loss=0.06128, ctc_loss=0.1202, over 19708.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2744, pruned_loss=0.05148, ctc_loss=0.09717, over 3545931.45 frames. ], batch size: 63, lr: 8.90e-03, grad_scale: 32.0 +2024-08-30 13:44:35,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=215072.0, ans=0.125 +2024-08-30 13:44:57,579 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.17 vs. limit=10.0 +2024-08-30 13:45:04,962 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.19 vs. limit=22.5 +2024-08-30 13:45:06,443 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.780e+02 2.026e+02 2.589e+02 4.105e+02, threshold=4.052e+02, percent-clipped=2.0 +2024-08-30 13:45:11,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=215232.0, ans=0.2 +2024-08-30 13:45:31,464 INFO [train.py:1114] (2/4) Epoch 17, batch 550, loss[loss=0.2212, simple_loss=0.2817, pruned_loss=0.05796, ctc_loss=0.112, over 19279.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2739, pruned_loss=0.05121, ctc_loss=0.09666, over 3608240.69 frames. ], batch size: 71, lr: 8.89e-03, grad_scale: 32.0 +2024-08-30 13:45:36,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=215338.66666666666, ans=0.5 +2024-08-30 13:45:52,209 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 13:45:52,622 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.06 vs. limit=15.0 +2024-08-30 13:45:55,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=215445.33333333334, ans=0.0 +2024-08-30 13:46:10,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.whiten.whitening_limit, batch_count=215498.66666666666, ans=12.0 +2024-08-30 13:46:12,889 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.66 vs. limit=15.0 +2024-08-30 13:46:16,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215498.66666666666, ans=0.1 +2024-08-30 13:46:26,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215552.0, ans=0.1 +2024-08-30 13:46:26,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=215552.0, ans=0.0 +2024-08-30 13:46:27,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=215552.0, ans=0.125 +2024-08-30 13:47:20,208 INFO [train.py:1114] (2/4) Epoch 17, batch 600, loss[loss=0.2404, simple_loss=0.3028, pruned_loss=0.06502, ctc_loss=0.1201, over 19456.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2743, pruned_loss=0.05135, ctc_loss=0.09664, over 3666029.34 frames. ], batch size: 67, lr: 8.88e-03, grad_scale: 64.0 +2024-08-30 13:47:29,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=215605.33333333334, ans=0.125 +2024-08-30 13:47:32,371 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.64 vs. limit=22.5 +2024-08-30 13:47:45,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=215712.0, ans=0.0 +2024-08-30 13:47:53,791 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.276e+02 1.647e+02 1.940e+02 2.383e+02 4.124e+02, threshold=3.879e+02, percent-clipped=1.0 +2024-08-30 13:47:54,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=215765.33333333334, ans=0.125 +2024-08-30 13:48:05,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=215765.33333333334, ans=0.0 +2024-08-30 13:48:17,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=215818.66666666666, ans=0.0 +2024-08-30 13:48:27,128 INFO [train.py:1114] (2/4) Epoch 17, batch 650, loss[loss=0.2122, simple_loss=0.2806, pruned_loss=0.05187, ctc_loss=0.1003, over 19776.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2739, pruned_loss=0.05119, ctc_loss=0.09626, over 3716225.09 frames. ], batch size: 54, lr: 8.88e-03, grad_scale: 32.0 +2024-08-30 13:50:17,385 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.15 vs. limit=6.0 +2024-08-30 13:51:24,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=215925.33333333334, ans=0.0 +2024-08-30 13:51:25,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215925.33333333334, ans=0.1 +2024-08-30 13:52:31,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=216032.0, ans=0.2 +2024-08-30 13:52:35,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=216032.0, ans=0.0 +2024-08-30 13:53:20,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216085.33333333334, ans=0.1 +2024-08-30 14:04:24,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216085.33333333334, ans=0.1 +2024-08-30 14:06:19,758 INFO [train.py:1114] (2/4) Epoch 17, batch 700, loss[loss=0.1879, simple_loss=0.2623, pruned_loss=0.04087, ctc_loss=0.07939, over 19707.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2748, pruned_loss=0.05184, ctc_loss=0.09754, over 3747768.85 frames. ], batch size: 51, lr: 8.87e-03, grad_scale: 32.0 +2024-08-30 14:06:27,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=216138.66666666666, ans=0.125 +2024-08-30 14:07:09,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=216192.0, ans=0.025 +2024-08-30 14:12:13,791 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.374e+02 1.667e+02 2.137e+02 2.601e+02 4.284e+02, threshold=4.274e+02, percent-clipped=4.0 +2024-08-30 14:16:31,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=216298.66666666666, ans=0.125 +2024-08-30 14:17:08,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=216352.0, ans=0.125 +2024-08-30 14:17:27,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=216352.0, ans=0.125 +2024-08-30 14:17:35,080 INFO [train.py:1114] (2/4) Epoch 17, batch 750, loss[loss=0.1921, simple_loss=0.2638, pruned_loss=0.04346, ctc_loss=0.08339, over 19516.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2745, pruned_loss=0.05188, ctc_loss=0.09772, over 3774513.43 frames. ], batch size: 54, lr: 8.87e-03, grad_scale: 32.0 +2024-08-30 14:17:39,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=216405.33333333334, ans=0.125 +2024-08-30 14:18:08,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=216458.66666666666, ans=0.125 +2024-08-30 14:18:59,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=216512.0, ans=0.125 +2024-08-30 14:19:30,934 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.63 vs. limit=10.0 +2024-08-30 14:19:31,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=216565.33333333334, ans=0.1 +2024-08-30 14:19:34,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=216565.33333333334, ans=0.0 +2024-08-30 14:19:48,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=216618.66666666666, ans=0.0 +2024-08-30 14:20:37,995 INFO [train.py:1114] (2/4) Epoch 17, batch 800, loss[loss=0.1639, simple_loss=0.2384, pruned_loss=0.03221, ctc_loss=0.06252, over 19431.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2746, pruned_loss=0.05177, ctc_loss=0.09773, over 3795456.66 frames. ], batch size: 48, lr: 8.86e-03, grad_scale: 32.0 +2024-08-30 14:22:01,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=216672.0, ans=10.0 +2024-08-30 14:31:25,884 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.25 vs. limit=15.0 +2024-08-30 14:31:32,320 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.355e+02 1.715e+02 2.071e+02 2.537e+02 3.967e+02, threshold=4.143e+02, percent-clipped=0.0 +2024-08-30 14:32:33,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=216938.66666666666, ans=0.125 +2024-08-30 14:32:34,223 INFO [train.py:1114] (2/4) Epoch 17, batch 850, loss[loss=0.2342, simple_loss=0.2938, pruned_loss=0.06327, ctc_loss=0.1199, over 19651.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.274, pruned_loss=0.05137, ctc_loss=0.09688, over 3815335.46 frames. ], batch size: 59, lr: 8.86e-03, grad_scale: 32.0 +2024-08-30 14:32:37,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=216938.66666666666, ans=0.1 +2024-08-30 14:32:39,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=216938.66666666666, ans=0.025 +2024-08-30 14:32:53,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=216992.0, ans=0.0 +2024-08-30 14:33:08,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=217045.33333333334, ans=0.0 +2024-08-30 14:33:25,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=217098.66666666666, ans=0.0 +2024-08-30 14:34:06,367 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.75 vs. limit=15.0 +2024-08-30 14:34:26,344 INFO [train.py:1114] (2/4) Epoch 17, batch 900, loss[loss=0.2044, simple_loss=0.2682, pruned_loss=0.05046, ctc_loss=0.09912, over 19394.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2748, pruned_loss=0.05196, ctc_loss=0.0977, over 3819723.78 frames. ], batch size: 48, lr: 8.85e-03, grad_scale: 32.0 +2024-08-30 14:35:58,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=217205.33333333334, ans=0.125 +2024-08-30 14:36:00,629 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:36:28,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=217258.66666666666, ans=0.2 +2024-08-30 14:36:46,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=217312.0, ans=0.125 +2024-08-30 14:36:59,138 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.623e+02 1.810e+02 2.233e+02 4.039e+02, threshold=3.621e+02, percent-clipped=0.0 +2024-08-30 14:37:03,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=217365.33333333334, ans=0.125 +2024-08-30 14:37:09,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=217365.33333333334, ans=0.125 +2024-08-30 14:37:24,537 INFO [train.py:1114] (2/4) Epoch 17, batch 950, loss[loss=0.2009, simple_loss=0.2648, pruned_loss=0.05043, ctc_loss=0.09046, over 19494.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2755, pruned_loss=0.05246, ctc_loss=0.09879, over 3820675.06 frames. ], batch size: 49, lr: 8.85e-03, grad_scale: 32.0 +2024-08-30 14:38:26,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.31 vs. limit=22.5 +2024-08-30 14:38:37,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=217578.66666666666, ans=0.125 +2024-08-30 14:38:41,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=217578.66666666666, ans=0.125 +2024-08-30 14:38:43,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=217578.66666666666, ans=0.125 +2024-08-30 14:38:43,164 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:38:47,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217578.66666666666, ans=0.1 +2024-08-30 14:38:51,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217632.0, ans=0.1 +2024-08-30 14:39:04,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=217685.33333333334, ans=0.125 +2024-08-30 14:39:16,468 INFO [train.py:1114] (2/4) Epoch 17, batch 1000, loss[loss=0.2093, simple_loss=0.2747, pruned_loss=0.05225, ctc_loss=0.09862, over 19852.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2761, pruned_loss=0.0527, ctc_loss=0.09925, over 3817731.00 frames. ], batch size: 52, lr: 8.84e-03, grad_scale: 32.0 +2024-08-30 14:39:33,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217792.0, ans=0.1 +2024-08-30 14:39:36,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=217792.0, ans=0.025 +2024-08-30 14:39:38,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=217792.0, ans=0.125 +2024-08-30 14:39:48,385 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.04 vs. limit=6.0 +2024-08-30 14:39:52,695 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.292e+02 1.648e+02 1.905e+02 2.181e+02 3.196e+02, threshold=3.810e+02, percent-clipped=0.0 +2024-08-30 14:39:58,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=217898.66666666666, ans=0.0 +2024-08-30 14:40:13,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=217952.0, ans=0.07 +2024-08-30 14:40:17,832 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.81 vs. limit=22.5 +2024-08-30 14:40:18,083 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=9.69 vs. limit=15.0 +2024-08-30 14:40:20,739 INFO [train.py:1114] (2/4) Epoch 17, batch 1050, loss[loss=0.2084, simple_loss=0.2766, pruned_loss=0.05076, ctc_loss=0.09691, over 19848.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.275, pruned_loss=0.05221, ctc_loss=0.09831, over 3825551.01 frames. ], batch size: 57, lr: 8.84e-03, grad_scale: 32.0 +2024-08-30 14:40:27,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218005.33333333334, ans=0.1 +2024-08-30 14:40:28,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=218005.33333333334, ans=0.0 +2024-08-30 14:40:36,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=218058.66666666666, ans=0.07 +2024-08-30 14:40:46,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=218112.0, ans=0.125 +2024-08-30 14:40:57,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=218165.33333333334, ans=0.125 +2024-08-30 14:41:15,404 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=3.97 vs. limit=12.0 +2024-08-30 14:41:16,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=218218.66666666666, ans=0.125 +2024-08-30 14:41:22,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=218218.66666666666, ans=0.0 +2024-08-30 14:41:23,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=218272.0, ans=0.125 +2024-08-30 14:41:24,731 INFO [train.py:1114] (2/4) Epoch 17, batch 1100, loss[loss=0.1895, simple_loss=0.2676, pruned_loss=0.04011, ctc_loss=0.07793, over 19592.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2749, pruned_loss=0.05209, ctc_loss=0.09814, over 3832810.09 frames. ], batch size: 52, lr: 8.83e-03, grad_scale: 32.0 +2024-08-30 14:41:28,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=218272.0, ans=0.125 +2024-08-30 14:41:30,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218272.0, ans=0.1 +2024-08-30 14:41:44,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=218325.33333333334, ans=0.125 +2024-08-30 14:42:16,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218378.66666666666, ans=0.1 +2024-08-30 14:42:23,465 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.634e+02 1.909e+02 2.238e+02 3.833e+02, threshold=3.817e+02, percent-clipped=1.0 +2024-08-30 14:43:15,267 INFO [train.py:1114] (2/4) Epoch 17, batch 1150, loss[loss=0.1868, simple_loss=0.2555, pruned_loss=0.04262, ctc_loss=0.08214, over 19591.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.275, pruned_loss=0.05222, ctc_loss=0.09841, over 3830356.08 frames. ], batch size: 52, lr: 8.83e-03, grad_scale: 32.0 +2024-08-30 14:43:23,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=218538.66666666666, ans=0.0 +2024-08-30 14:43:42,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=218592.0, ans=0.125 +2024-08-30 14:43:44,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=218645.33333333334, ans=0.2 +2024-08-30 14:43:44,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218645.33333333334, ans=0.1 +2024-08-30 14:43:50,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=218645.33333333334, ans=0.0 +2024-08-30 14:43:56,231 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.01 vs. limit=22.5 +2024-08-30 14:44:04,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.20 vs. limit=6.0 +2024-08-30 14:44:12,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff3.min_abs, batch_count=218752.0, ans=0.2 +2024-08-30 14:44:16,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.59 vs. limit=15.0 +2024-08-30 14:44:20,101 INFO [train.py:1114] (2/4) Epoch 17, batch 1200, loss[loss=0.2177, simple_loss=0.2849, pruned_loss=0.05574, ctc_loss=0.09774, over 19842.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2762, pruned_loss=0.05268, ctc_loss=0.09929, over 3825167.37 frames. ], batch size: 57, lr: 8.82e-03, grad_scale: 32.0 +2024-08-30 14:44:20,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=218805.33333333334, ans=0.2 +2024-08-30 14:46:08,195 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.28 vs. limit=15.0 +2024-08-30 14:46:08,763 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.467e+02 1.734e+02 1.937e+02 2.235e+02 3.279e+02, threshold=3.874e+02, percent-clipped=0.0 +2024-08-30 14:46:26,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=219018.66666666666, ans=0.04949747468305833 +2024-08-30 14:46:31,313 INFO [train.py:1114] (2/4) Epoch 17, batch 1250, loss[loss=0.2169, simple_loss=0.2782, pruned_loss=0.05727, ctc_loss=0.1026, over 19515.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2767, pruned_loss=0.05278, ctc_loss=0.09932, over 3843189.79 frames. ], batch size: 61, lr: 8.82e-03, grad_scale: 32.0 +2024-08-30 14:46:39,340 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.21 vs. limit=12.0 +2024-08-30 14:48:24,474 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.86 vs. limit=15.0 +2024-08-30 14:48:52,392 INFO [train.py:1114] (2/4) Epoch 17, batch 1300, loss[loss=0.2187, simple_loss=0.2868, pruned_loss=0.05462, ctc_loss=0.1034, over 18942.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2758, pruned_loss=0.0523, ctc_loss=0.09846, over 3847215.40 frames. ], batch size: 76, lr: 8.81e-03, grad_scale: 32.0 +2024-08-30 14:49:29,490 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.393e+02 1.750e+02 2.054e+02 2.564e+02 3.826e+02, threshold=4.108e+02, percent-clipped=0.0 +2024-08-30 14:49:32,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=219498.66666666666, ans=0.125 +2024-08-30 14:49:44,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=219552.0, ans=0.07 +2024-08-30 14:50:00,460 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.30 vs. limit=12.0 +2024-08-30 14:50:08,914 INFO [train.py:1114] (2/4) Epoch 17, batch 1350, loss[loss=0.2038, simple_loss=0.2692, pruned_loss=0.05046, ctc_loss=0.09351, over 19785.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2753, pruned_loss=0.05218, ctc_loss=0.09819, over 3857406.84 frames. ], batch size: 54, lr: 8.81e-03, grad_scale: 32.0 +2024-08-30 14:50:16,263 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.00 vs. limit=15.0 +2024-08-30 14:50:17,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=219605.33333333334, ans=0.0 +2024-08-30 14:50:17,546 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.10 vs. limit=12.0 +2024-08-30 14:50:19,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=219658.66666666666, ans=0.0 +2024-08-30 14:50:24,794 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:50:34,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=219712.0, ans=0.125 +2024-08-30 14:51:04,674 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.70 vs. limit=6.0 +2024-08-30 14:51:09,317 INFO [train.py:1114] (2/4) Epoch 17, batch 1400, loss[loss=0.1717, simple_loss=0.2298, pruned_loss=0.04066, ctc_loss=0.08087, over 19655.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2745, pruned_loss=0.05173, ctc_loss=0.09733, over 3863333.21 frames. ], batch size: 46, lr: 8.80e-03, grad_scale: 32.0 +2024-08-30 14:51:27,409 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.85 vs. limit=22.5 +2024-08-30 14:52:01,609 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.357e+02 1.697e+02 1.910e+02 2.399e+02 4.058e+02, threshold=3.819e+02, percent-clipped=0.0 +2024-08-30 14:52:08,059 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.64 vs. limit=15.0 +2024-08-30 14:52:24,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=220085.33333333334, ans=0.025 +2024-08-30 14:52:25,662 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.44 vs. limit=15.0 +2024-08-30 14:52:26,360 INFO [train.py:1114] (2/4) Epoch 17, batch 1450, loss[loss=0.2146, simple_loss=0.2918, pruned_loss=0.05032, ctc_loss=0.09184, over 19634.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.275, pruned_loss=0.05188, ctc_loss=0.09757, over 3862191.42 frames. ], batch size: 63, lr: 8.80e-03, grad_scale: 32.0 +2024-08-30 14:52:42,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=220138.66666666666, ans=0.125 +2024-08-30 14:52:59,470 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.32 vs. limit=10.0 +2024-08-30 14:54:12,185 INFO [train.py:1114] (2/4) Epoch 17, batch 1500, loss[loss=0.208, simple_loss=0.2727, pruned_loss=0.05123, ctc_loss=0.102, over 19586.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2754, pruned_loss=0.05208, ctc_loss=0.09809, over 3862121.78 frames. ], batch size: 57, lr: 8.79e-03, grad_scale: 32.0 +2024-08-30 14:54:33,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=220458.66666666666, ans=0.125 +2024-08-30 14:54:40,381 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.36 vs. limit=10.0 +2024-08-30 14:54:43,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=220512.0, ans=0.0 +2024-08-30 14:54:47,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=220512.0, ans=0.125 +2024-08-30 14:54:49,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=220512.0, ans=0.125 +2024-08-30 14:54:54,682 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.719e+02 1.906e+02 2.293e+02 3.704e+02, threshold=3.812e+02, percent-clipped=0.0 +2024-08-30 14:55:02,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=220565.33333333334, ans=0.125 +2024-08-30 14:55:03,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=220565.33333333334, ans=0.07 +2024-08-30 14:55:17,122 INFO [train.py:1114] (2/4) Epoch 17, batch 1550, loss[loss=0.2365, simple_loss=0.3022, pruned_loss=0.06304, ctc_loss=0.112, over 19616.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2756, pruned_loss=0.05241, ctc_loss=0.09874, over 3845614.46 frames. ], batch size: 60, lr: 8.79e-03, grad_scale: 32.0 +2024-08-30 14:55:47,493 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.28 vs. limit=22.5 +2024-08-30 14:55:56,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=220778.66666666666, ans=0.125 +2024-08-30 14:55:56,951 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.34 vs. limit=12.0 +2024-08-30 14:56:01,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=220778.66666666666, ans=0.025 +2024-08-30 14:56:27,382 INFO [train.py:1114] (2/4) Epoch 17, batch 1600, loss[loss=0.2328, simple_loss=0.2953, pruned_loss=0.06139, ctc_loss=0.1187, over 19832.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2752, pruned_loss=0.05242, ctc_loss=0.09873, over 3835786.34 frames. ], batch size: 57, lr: 8.78e-03, grad_scale: 32.0 +2024-08-30 14:57:18,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220938.66666666666, ans=0.1 +2024-08-30 14:57:21,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=220992.0, ans=15.0 +2024-08-30 14:58:34,663 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.55 vs. limit=22.5 +2024-08-30 15:00:47,791 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.328e+02 1.738e+02 2.160e+02 2.635e+02 3.870e+02, threshold=4.320e+02, percent-clipped=2.0 +2024-08-30 15:00:50,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=221098.66666666666, ans=0.0 +2024-08-30 15:00:51,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=221098.66666666666, ans=0.125 +2024-08-30 15:02:48,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=221152.0, ans=0.025 +2024-08-30 15:03:53,982 INFO [train.py:1114] (2/4) Epoch 17, batch 1650, loss[loss=0.2128, simple_loss=0.2812, pruned_loss=0.05343, ctc_loss=0.09384, over 19655.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2746, pruned_loss=0.05213, ctc_loss=0.09827, over 3833058.26 frames. ], batch size: 59, lr: 8.77e-03, grad_scale: 32.0 +2024-08-30 15:03:57,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=221205.33333333334, ans=0.2 +2024-08-30 15:03:59,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=221205.33333333334, ans=0.05 +2024-08-30 15:05:39,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=221258.66666666666, ans=0.0 +2024-08-30 15:05:51,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=221258.66666666666, ans=0.0 +2024-08-30 15:07:26,267 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.70 vs. limit=15.0 +2024-08-30 15:07:44,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=221365.33333333334, ans=22.5 +2024-08-30 15:07:47,345 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.66 vs. limit=8.0 +2024-08-30 15:08:00,829 INFO [train.py:1114] (2/4) Epoch 17, batch 1700, loss[loss=0.1937, simple_loss=0.2501, pruned_loss=0.05023, ctc_loss=0.0918, over 19677.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2745, pruned_loss=0.05179, ctc_loss=0.09761, over 3847660.59 frames. ], batch size: 46, lr: 8.77e-03, grad_scale: 32.0 +2024-08-30 15:08:06,819 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.25 vs. limit=22.5 +2024-08-30 15:08:18,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=221525.33333333334, ans=0.125 +2024-08-30 15:08:36,782 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.427e+02 1.717e+02 1.998e+02 2.422e+02 4.059e+02, threshold=3.996e+02, percent-clipped=0.0 +2024-08-30 15:09:50,038 INFO [train.py:1114] (2/4) Epoch 17, batch 1750, loss[loss=0.1998, simple_loss=0.2555, pruned_loss=0.05229, ctc_loss=0.09908, over 19643.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2738, pruned_loss=0.05143, ctc_loss=0.09694, over 3852036.08 frames. ], batch size: 45, lr: 8.76e-03, grad_scale: 32.0 +2024-08-30 15:09:57,897 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.63 vs. limit=12.0 +2024-08-30 15:10:03,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=221792.0, ans=0.125 +2024-08-30 15:10:11,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=221845.33333333334, ans=0.125 +2024-08-30 15:10:46,082 INFO [train.py:1114] (2/4) Epoch 17, batch 1800, loss[loss=0.1995, simple_loss=0.2735, pruned_loss=0.04593, ctc_loss=0.08381, over 19605.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.274, pruned_loss=0.05157, ctc_loss=0.09693, over 3853124.07 frames. ], batch size: 55, lr: 8.76e-03, grad_scale: 32.0 +2024-08-30 15:11:21,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=222165.33333333334, ans=0.125 +2024-08-30 15:11:23,232 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.330e+02 1.773e+02 2.029e+02 2.607e+02 4.351e+02, threshold=4.057e+02, percent-clipped=1.0 +2024-08-30 15:11:25,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=222165.33333333334, ans=0.125 +2024-08-30 15:11:36,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=222218.66666666666, ans=0.125 +2024-08-30 15:11:37,311 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.28 vs. limit=12.0 +2024-08-30 15:11:38,212 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.42 vs. limit=22.5 +2024-08-30 15:11:43,556 INFO [train.py:1114] (2/4) Epoch 17, batch 1850, loss[loss=0.2005, simple_loss=0.2762, pruned_loss=0.04428, ctc_loss=0.09059, over 19604.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2739, pruned_loss=0.05151, ctc_loss=0.0968, over 3856458.31 frames. ], batch size: 57, lr: 8.75e-03, grad_scale: 16.0 +2024-08-30 15:12:17,802 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.30 vs. limit=15.0 +2024-08-30 15:12:19,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=222432.0, ans=0.125 +2024-08-30 15:12:23,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=222432.0, ans=0.0 +2024-08-30 15:12:24,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=222432.0, ans=0.125 +2024-08-30 15:12:37,429 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.02 vs. limit=22.5 +2024-08-30 15:12:40,691 INFO [train.py:1114] (2/4) Epoch 17, batch 1900, loss[loss=0.1881, simple_loss=0.2748, pruned_loss=0.03728, ctc_loss=0.06736, over 19617.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2741, pruned_loss=0.05122, ctc_loss=0.09622, over 3861433.48 frames. ], batch size: 59, lr: 8.75e-03, grad_scale: 16.0 +2024-08-30 15:12:41,474 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.11 vs. limit=12.0 +2024-08-30 15:13:03,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=222645.33333333334, ans=0.125 +2024-08-30 15:13:18,238 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.682e+02 1.950e+02 2.328e+02 4.923e+02, threshold=3.901e+02, percent-clipped=3.0 +2024-08-30 15:13:34,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=222752.0, ans=0.0 +2024-08-30 15:13:38,420 INFO [train.py:1114] (2/4) Epoch 17, batch 1950, loss[loss=0.1948, simple_loss=0.2617, pruned_loss=0.04641, ctc_loss=0.08783, over 19600.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2749, pruned_loss=0.05127, ctc_loss=0.09621, over 3870786.46 frames. ], batch size: 52, lr: 8.74e-03, grad_scale: 16.0 +2024-08-30 15:14:32,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=222858.66666666666, ans=0.04949747468305833 +2024-08-30 15:15:27,050 INFO [train.py:1114] (2/4) Epoch 17, batch 2000, loss[loss=0.185, simple_loss=0.2425, pruned_loss=0.04637, ctc_loss=0.08667, over 19661.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.275, pruned_loss=0.05139, ctc_loss=0.09656, over 3856145.28 frames. ], batch size: 45, lr: 8.74e-03, grad_scale: 32.0 +2024-08-30 15:15:29,724 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.85 vs. limit=22.5 +2024-08-30 15:15:38,804 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=8.27 vs. limit=22.5 +2024-08-30 15:15:44,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=223125.33333333334, ans=0.0 +2024-08-30 15:15:50,525 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.06 vs. limit=6.0 +2024-08-30 15:15:51,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=223178.66666666666, ans=0.125 +2024-08-30 15:15:51,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=223178.66666666666, ans=0.2 +2024-08-30 15:15:54,643 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=223178.66666666666, ans=0.0 +2024-08-30 15:15:56,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=223178.66666666666, ans=0.125 +2024-08-30 15:16:03,258 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.692e+02 2.099e+02 2.435e+02 3.373e+02, threshold=4.199e+02, percent-clipped=0.0 +2024-08-30 15:16:05,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=223232.0, ans=0.07 +2024-08-30 15:16:08,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=223232.0, ans=0.125 +2024-08-30 15:16:11,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=223285.33333333334, ans=0.125 +2024-08-30 15:16:12,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=223285.33333333334, ans=0.125 +2024-08-30 15:16:16,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=223285.33333333334, ans=0.025 +2024-08-30 15:16:42,837 INFO [train.py:1114] (2/4) Epoch 17, batch 2050, loss[loss=0.1693, simple_loss=0.2368, pruned_loss=0.03745, ctc_loss=0.06731, over 19708.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2747, pruned_loss=0.05181, ctc_loss=0.09733, over 3852292.04 frames. ], batch size: 47, lr: 8.73e-03, grad_scale: 32.0 +2024-08-30 15:18:19,214 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.13 vs. limit=6.0 +2024-08-30 15:18:29,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=223392.0, ans=0.2 +2024-08-30 15:20:15,747 INFO [train.py:1114] (2/4) Epoch 17, batch 2100, loss[loss=0.2146, simple_loss=0.2782, pruned_loss=0.05455, ctc_loss=0.1048, over 19765.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2742, pruned_loss=0.05155, ctc_loss=0.09705, over 3859849.16 frames. ], batch size: 54, lr: 8.73e-03, grad_scale: 32.0 +2024-08-30 15:21:08,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=223658.66666666666, ans=0.125 +2024-08-30 15:21:41,987 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.235e+02 1.693e+02 2.019e+02 2.546e+02 6.032e+02, threshold=4.039e+02, percent-clipped=5.0 +2024-08-30 15:21:43,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223765.33333333334, ans=0.1 +2024-08-30 15:21:51,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=223818.66666666666, ans=0.125 +2024-08-30 15:21:58,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223818.66666666666, ans=0.1 +2024-08-30 15:22:02,823 INFO [train.py:1114] (2/4) Epoch 17, batch 2150, loss[loss=0.2174, simple_loss=0.2713, pruned_loss=0.05951, ctc_loss=0.1113, over 19847.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2741, pruned_loss=0.0516, ctc_loss=0.09707, over 3870273.52 frames. ], batch size: 52, lr: 8.72e-03, grad_scale: 32.0 +2024-08-30 15:22:05,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=223872.0, ans=0.125 +2024-08-30 15:22:22,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=223925.33333333334, ans=0.125 +2024-08-30 15:22:26,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=223978.66666666666, ans=0.0 +2024-08-30 15:22:58,284 INFO [train.py:1114] (2/4) Epoch 17, batch 2200, loss[loss=0.2448, simple_loss=0.3009, pruned_loss=0.06857, ctc_loss=0.1291, over 19591.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2744, pruned_loss=0.05176, ctc_loss=0.09743, over 3869561.05 frames. ], batch size: 57, lr: 8.72e-03, grad_scale: 32.0 +2024-08-30 15:23:03,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=224138.66666666666, ans=0.1 +2024-08-30 15:23:21,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=224192.0, ans=0.07 +2024-08-30 15:23:53,333 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.675e+02 1.986e+02 2.371e+02 4.244e+02, threshold=3.972e+02, percent-clipped=2.0 +2024-08-30 15:24:07,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=224352.0, ans=0.125 +2024-08-30 15:24:13,611 INFO [train.py:1114] (2/4) Epoch 17, batch 2250, loss[loss=0.217, simple_loss=0.2906, pruned_loss=0.05139, ctc_loss=0.1014, over 19606.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2744, pruned_loss=0.05174, ctc_loss=0.09739, over 3868300.94 frames. ], batch size: 55, lr: 8.71e-03, grad_scale: 32.0 +2024-08-30 15:24:18,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=224405.33333333334, ans=0.125 +2024-08-30 15:25:46,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=224512.0, ans=0.2 +2024-08-30 15:26:47,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=224512.0, ans=0.125 +2024-08-30 15:27:03,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=224618.66666666666, ans=0.125 +2024-08-30 15:27:14,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=224672.0, ans=0.125 +2024-08-30 15:27:15,735 INFO [train.py:1114] (2/4) Epoch 17, batch 2300, loss[loss=0.1872, simple_loss=0.2581, pruned_loss=0.04325, ctc_loss=0.07445, over 19502.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2739, pruned_loss=0.05198, ctc_loss=0.09767, over 3862443.18 frames. ], batch size: 49, lr: 8.71e-03, grad_scale: 32.0 +2024-08-30 15:27:20,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=224672.0, ans=0.125 +2024-08-30 15:27:23,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=224672.0, ans=0.0 +2024-08-30 15:28:25,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=224725.33333333334, ans=0.2 +2024-08-30 15:28:29,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=224725.33333333334, ans=0.025 +2024-08-30 15:28:46,688 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.385e+02 1.759e+02 2.126e+02 2.592e+02 4.068e+02, threshold=4.252e+02, percent-clipped=2.0 +2024-08-30 15:28:59,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=224885.33333333334, ans=0.125 +2024-08-30 15:29:48,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=224885.33333333334, ans=0.125 +2024-08-30 15:29:51,085 INFO [train.py:1114] (2/4) Epoch 17, batch 2350, loss[loss=0.242, simple_loss=0.3064, pruned_loss=0.06454, ctc_loss=0.1212, over 19675.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2741, pruned_loss=0.05214, ctc_loss=0.09784, over 3864783.52 frames. ], batch size: 63, lr: 8.70e-03, grad_scale: 32.0 +2024-08-30 15:29:53,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=224938.66666666666, ans=0.125 +2024-08-30 15:30:02,805 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.05 vs. limit=15.0 +2024-08-30 15:30:28,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=224992.0, ans=0.0 +2024-08-30 15:30:28,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=224992.0, ans=0.05 +2024-08-30 15:30:32,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=224992.0, ans=0.125 +2024-08-30 15:31:55,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=225045.33333333334, ans=0.0 +2024-08-30 15:33:11,237 INFO [train.py:1114] (2/4) Epoch 17, batch 2400, loss[loss=0.2186, simple_loss=0.2835, pruned_loss=0.05496, ctc_loss=0.1093, over 19315.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2765, pruned_loss=0.05308, ctc_loss=0.09946, over 3858493.92 frames. ], batch size: 71, lr: 8.70e-03, grad_scale: 32.0 +2024-08-30 15:33:30,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=225258.66666666666, ans=0.125 +2024-08-30 15:33:34,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=225312.0, ans=0.125 +2024-08-30 15:33:35,952 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.15 vs. limit=12.0 +2024-08-30 15:33:48,812 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.365e+02 1.684e+02 1.880e+02 2.443e+02 3.780e+02, threshold=3.760e+02, percent-clipped=0.0 +2024-08-30 15:34:10,328 INFO [train.py:1114] (2/4) Epoch 17, batch 2450, loss[loss=0.2978, simple_loss=0.3236, pruned_loss=0.09849, ctc_loss=0.1875, over 13434.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2808, pruned_loss=0.05599, ctc_loss=0.1055, over 3729261.65 frames. ], batch size: 140, lr: 8.69e-03, grad_scale: 16.0 +2024-08-30 15:34:10,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=225472.0, ans=0.125 +2024-08-30 15:34:12,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=225472.0, ans=0.0 +2024-08-30 15:34:45,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=225472.0, ans=0.125 +2024-08-30 15:34:54,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=225525.33333333334, ans=0.1 +2024-08-30 15:34:55,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=225525.33333333334, ans=0.0 +2024-08-30 15:34:59,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=225578.66666666666, ans=10.0 +2024-08-30 15:38:02,847 INFO [train.py:1114] (2/4) Epoch 18, batch 0, loss[loss=0.192, simple_loss=0.2512, pruned_loss=0.04765, ctc_loss=0.09374, over 19432.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2512, pruned_loss=0.04765, ctc_loss=0.09374, over 19432.00 frames. ], batch size: 48, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 15:38:02,848 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-30 15:38:39,840 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.4936, 3.0624, 2.1043, 2.6462], device='cuda:2') +2024-08-30 15:39:34,946 INFO [train.py:1146] (2/4) Epoch 18, validation: loss=0.1864, simple_loss=0.2743, pruned_loss=0.0364, ctc_loss=0.06401, over 944034.00 frames. +2024-08-30 15:39:34,947 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 13562MB +2024-08-30 15:39:39,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=225680.0, ans=0.125 +2024-08-30 15:40:20,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=225786.66666666666, ans=0.0 +2024-08-30 15:40:29,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=225786.66666666666, ans=0.0 +2024-08-30 15:40:33,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.00 vs. limit=15.0 +2024-08-30 15:40:47,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=225840.0, ans=0.125 +2024-08-30 15:40:58,092 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.452e+02 1.919e+02 2.092e+02 2.421e+02 5.568e+02, threshold=4.185e+02, percent-clipped=4.0 +2024-08-30 15:40:59,162 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.76 vs. limit=15.0 +2024-08-30 15:41:02,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=225893.33333333334, ans=0.07 +2024-08-30 15:41:04,965 INFO [train.py:1114] (2/4) Epoch 18, batch 50, loss[loss=0.1731, simple_loss=0.2371, pruned_loss=0.0395, ctc_loss=0.07531, over 19736.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2761, pruned_loss=0.05232, ctc_loss=0.09881, over 844774.48 frames. ], batch size: 47, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 15:42:00,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=226053.33333333334, ans=0.0 +2024-08-30 15:44:07,505 INFO [train.py:1114] (2/4) Epoch 18, batch 100, loss[loss=0.187, simple_loss=0.2598, pruned_loss=0.04168, ctc_loss=0.07718, over 19708.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2785, pruned_loss=0.05331, ctc_loss=0.09997, over 1499238.30 frames. ], batch size: 51, lr: 8.43e-03, grad_scale: 32.0 +2024-08-30 15:44:10,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=226213.33333333334, ans=0.2 +2024-08-30 15:44:14,034 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.43 vs. limit=22.5 +2024-08-30 15:44:28,954 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.34 vs. limit=15.0 +2024-08-30 15:44:40,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=226320.0, ans=0.04949747468305833 +2024-08-30 15:44:59,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=226426.66666666666, ans=0.125 +2024-08-30 15:45:01,911 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.418e+02 1.711e+02 1.973e+02 2.383e+02 4.146e+02, threshold=3.946e+02, percent-clipped=0.0 +2024-08-30 15:45:10,549 INFO [train.py:1114] (2/4) Epoch 18, batch 150, loss[loss=0.1889, simple_loss=0.2549, pruned_loss=0.04414, ctc_loss=0.08667, over 19719.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2747, pruned_loss=0.05107, ctc_loss=0.09587, over 2028180.04 frames. ], batch size: 47, lr: 8.43e-03, grad_scale: 32.0 +2024-08-30 15:45:19,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226480.0, ans=0.1 +2024-08-30 15:45:26,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=226533.33333333334, ans=0.0 +2024-08-30 15:45:27,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=226533.33333333334, ans=0.125 +2024-08-30 15:45:30,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=226533.33333333334, ans=0.0 +2024-08-30 15:45:40,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226586.66666666666, ans=0.1 +2024-08-30 15:45:43,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=226586.66666666666, ans=0.125 +2024-08-30 15:46:03,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=226693.33333333334, ans=0.0 +2024-08-30 15:46:16,528 INFO [train.py:1114] (2/4) Epoch 18, batch 200, loss[loss=0.2072, simple_loss=0.2755, pruned_loss=0.04951, ctc_loss=0.09987, over 18272.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2737, pruned_loss=0.05095, ctc_loss=0.09567, over 2435566.49 frames. ], batch size: 85, lr: 8.42e-03, grad_scale: 32.0 +2024-08-30 15:46:17,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=226746.66666666666, ans=0.0 +2024-08-30 15:46:27,542 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.79 vs. limit=15.0 +2024-08-30 15:46:29,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=226800.0, ans=0.125 +2024-08-30 15:46:54,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=226906.66666666666, ans=0.125 +2024-08-30 15:47:08,526 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.332e+02 1.794e+02 2.164e+02 2.564e+02 4.131e+02, threshold=4.328e+02, percent-clipped=1.0 +2024-08-30 15:47:20,540 INFO [train.py:1114] (2/4) Epoch 18, batch 250, loss[loss=0.2632, simple_loss=0.3191, pruned_loss=0.07563, ctc_loss=0.1402, over 19347.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2742, pruned_loss=0.05149, ctc_loss=0.09685, over 2754676.87 frames. ], batch size: 67, lr: 8.42e-03, grad_scale: 32.0 +2024-08-30 15:47:23,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=227013.33333333334, ans=0.025 +2024-08-30 15:47:23,648 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=18.07 vs. limit=22.5 +2024-08-30 15:47:27,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=227013.33333333334, ans=0.0 +2024-08-30 15:47:32,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=227066.66666666666, ans=0.0 +2024-08-30 15:48:55,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=227173.33333333334, ans=0.0 +2024-08-30 15:49:22,637 INFO [train.py:1114] (2/4) Epoch 18, batch 300, loss[loss=0.2147, simple_loss=0.2799, pruned_loss=0.05502, ctc_loss=0.09868, over 19550.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2735, pruned_loss=0.05103, ctc_loss=0.09594, over 3000148.78 frames. ], batch size: 61, lr: 8.41e-03, grad_scale: 32.0 +2024-08-30 15:51:23,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=227440.0, ans=0.2 +2024-08-30 15:51:27,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=227440.0, ans=0.0 +2024-08-30 15:51:30,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=227440.0, ans=0.95 +2024-08-30 15:51:31,312 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=8.20 vs. limit=22.5 +2024-08-30 15:51:34,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=227493.33333333334, ans=0.2 +2024-08-30 15:51:39,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=227493.33333333334, ans=0.125 +2024-08-30 15:51:40,143 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.292e+02 1.730e+02 1.916e+02 2.273e+02 3.732e+02, threshold=3.832e+02, percent-clipped=0.0 +2024-08-30 15:51:45,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=227493.33333333334, ans=0.125 +2024-08-30 15:51:48,899 INFO [train.py:1114] (2/4) Epoch 18, batch 350, loss[loss=0.1741, simple_loss=0.2424, pruned_loss=0.03833, ctc_loss=0.0727, over 19783.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2739, pruned_loss=0.05112, ctc_loss=0.09602, over 3189797.23 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 32.0 +2024-08-30 15:51:50,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=227546.66666666666, ans=0.025 +2024-08-30 15:52:23,399 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.12 vs. limit=15.0 +2024-08-30 15:52:29,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=227706.66666666666, ans=0.0 +2024-08-30 15:52:40,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=227760.0, ans=0.125 +2024-08-30 15:52:44,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=227760.0, ans=0.0 +2024-08-30 15:52:51,839 INFO [train.py:1114] (2/4) Epoch 18, batch 400, loss[loss=0.2107, simple_loss=0.288, pruned_loss=0.04792, ctc_loss=0.09374, over 19482.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2738, pruned_loss=0.05116, ctc_loss=0.0962, over 3341232.34 frames. ], batch size: 54, lr: 8.40e-03, grad_scale: 32.0 +2024-08-30 15:52:53,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=227813.33333333334, ans=0.0 +2024-08-30 15:52:55,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=227813.33333333334, ans=0.125 +2024-08-30 15:54:16,394 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.287e+02 1.651e+02 1.862e+02 2.258e+02 4.636e+02, threshold=3.723e+02, percent-clipped=1.0 +2024-08-30 15:54:20,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=228026.66666666666, ans=0.125 +2024-08-30 15:54:20,499 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.29 vs. limit=22.5 +2024-08-30 15:54:25,942 INFO [train.py:1114] (2/4) Epoch 18, batch 450, loss[loss=0.2245, simple_loss=0.2914, pruned_loss=0.05591, ctc_loss=0.1144, over 19625.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2743, pruned_loss=0.05133, ctc_loss=0.09659, over 3450729.30 frames. ], batch size: 55, lr: 8.40e-03, grad_scale: 32.0 +2024-08-30 15:54:26,941 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.30 vs. limit=22.5 +2024-08-30 15:54:50,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=228133.33333333334, ans=0.125 +2024-08-30 15:55:04,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=228186.66666666666, ans=0.125 +2024-08-30 15:55:17,168 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.25 vs. limit=15.0 +2024-08-30 15:55:18,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=228240.0, ans=0.025 +2024-08-30 15:55:21,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=228240.0, ans=0.1 +2024-08-30 15:55:22,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=228293.33333333334, ans=0.125 +2024-08-30 15:55:37,509 INFO [train.py:1114] (2/4) Epoch 18, batch 500, loss[loss=0.2214, simple_loss=0.2927, pruned_loss=0.05513, ctc_loss=0.09975, over 19681.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2735, pruned_loss=0.05099, ctc_loss=0.09611, over 3546019.83 frames. ], batch size: 63, lr: 8.39e-03, grad_scale: 32.0 +2024-08-30 15:55:45,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=228346.66666666666, ans=0.125 +2024-08-30 15:55:46,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.38 vs. limit=10.0 +2024-08-30 15:55:50,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=228400.0, ans=0.025 +2024-08-30 15:57:00,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=228506.66666666666, ans=0.0 +2024-08-30 15:57:50,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=228560.0, ans=0.125 +2024-08-30 15:57:54,269 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.350e+02 1.602e+02 1.832e+02 2.190e+02 3.877e+02, threshold=3.665e+02, percent-clipped=2.0 +2024-08-30 15:57:58,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=228560.0, ans=0.0 +2024-08-30 15:58:00,971 INFO [train.py:1114] (2/4) Epoch 18, batch 550, loss[loss=0.2293, simple_loss=0.2885, pruned_loss=0.06216, ctc_loss=0.1147, over 19242.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2735, pruned_loss=0.05102, ctc_loss=0.09619, over 3608988.72 frames. ], batch size: 71, lr: 8.39e-03, grad_scale: 32.0 +2024-08-30 15:58:50,081 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.55 vs. limit=6.0 +2024-08-30 16:01:05,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=228720.0, ans=0.125 +2024-08-30 16:01:07,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=228720.0, ans=0.125 +2024-08-30 16:01:35,822 INFO [train.py:1114] (2/4) Epoch 18, batch 600, loss[loss=0.2275, simple_loss=0.2938, pruned_loss=0.05909, ctc_loss=0.1075, over 19360.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2731, pruned_loss=0.05068, ctc_loss=0.09546, over 3665746.22 frames. ], batch size: 67, lr: 8.38e-03, grad_scale: 32.0 +2024-08-30 16:02:23,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=228933.33333333334, ans=0.125 +2024-08-30 16:02:33,404 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.34 vs. limit=15.0 +2024-08-30 16:03:04,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=228986.66666666666, ans=0.125 +2024-08-30 16:04:37,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=229093.33333333334, ans=0.125 +2024-08-30 16:04:39,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=229093.33333333334, ans=0.025 +2024-08-30 16:04:39,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=229093.33333333334, ans=0.0 +2024-08-30 16:04:41,752 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.417e+02 1.726e+02 2.045e+02 2.727e+02 4.181e+02, threshold=4.090e+02, percent-clipped=7.0 +2024-08-30 16:04:48,705 INFO [train.py:1114] (2/4) Epoch 18, batch 650, loss[loss=0.2118, simple_loss=0.2823, pruned_loss=0.0511, ctc_loss=0.0976, over 19756.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.273, pruned_loss=0.05103, ctc_loss=0.09612, over 3716208.52 frames. ], batch size: 54, lr: 8.38e-03, grad_scale: 32.0 +2024-08-30 16:04:59,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=229146.66666666666, ans=0.125 +2024-08-30 16:05:06,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=229146.66666666666, ans=0.125 +2024-08-30 16:06:18,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=229200.0, ans=0.0 +2024-08-30 16:06:23,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=229200.0, ans=0.125 +2024-08-30 16:06:31,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=229253.33333333334, ans=0.0 +2024-08-30 16:06:35,350 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.42 vs. limit=12.0 +2024-08-30 16:06:36,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=229253.33333333334, ans=0.0 +2024-08-30 16:06:36,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=229253.33333333334, ans=0.09899494936611666 +2024-08-30 16:06:44,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=229306.66666666666, ans=0.125 +2024-08-30 16:06:47,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229306.66666666666, ans=0.1 +2024-08-30 16:07:14,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=229306.66666666666, ans=0.125 +2024-08-30 16:07:25,685 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.79 vs. limit=22.5 +2024-08-30 16:07:32,106 INFO [train.py:1114] (2/4) Epoch 18, batch 700, loss[loss=0.1846, simple_loss=0.2526, pruned_loss=0.04275, ctc_loss=0.07763, over 19722.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2732, pruned_loss=0.05105, ctc_loss=0.09619, over 3749014.97 frames. ], batch size: 51, lr: 8.37e-03, grad_scale: 32.0 +2024-08-30 16:07:37,643 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.47 vs. limit=22.5 +2024-08-30 16:07:41,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=229413.33333333334, ans=0.04949747468305833 +2024-08-30 16:07:44,217 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 16:07:45,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=229466.66666666666, ans=0.0 +2024-08-30 16:07:51,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229466.66666666666, ans=0.1 +2024-08-30 16:08:15,813 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 16:08:27,242 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.709e+02 1.988e+02 2.480e+02 4.374e+02, threshold=3.975e+02, percent-clipped=1.0 +2024-08-30 16:08:34,072 INFO [train.py:1114] (2/4) Epoch 18, batch 750, loss[loss=0.2142, simple_loss=0.2806, pruned_loss=0.0534, ctc_loss=0.1026, over 19859.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2726, pruned_loss=0.05056, ctc_loss=0.09512, over 3775543.53 frames. ], batch size: 55, lr: 8.37e-03, grad_scale: 32.0 +2024-08-30 16:08:44,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=229680.0, ans=0.0 +2024-08-30 16:08:56,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229733.33333333334, ans=0.1 +2024-08-30 16:08:58,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=229733.33333333334, ans=0.125 +2024-08-30 16:09:38,058 INFO [train.py:1114] (2/4) Epoch 18, batch 800, loss[loss=0.1779, simple_loss=0.2457, pruned_loss=0.03961, ctc_loss=0.07702, over 19430.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.273, pruned_loss=0.05076, ctc_loss=0.09554, over 3796776.82 frames. ], batch size: 48, lr: 8.37e-03, grad_scale: 32.0 +2024-08-30 16:09:55,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=229946.66666666666, ans=0.125 +2024-08-30 16:09:56,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=229946.66666666666, ans=0.125 +2024-08-30 16:09:57,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=229946.66666666666, ans=0.125 +2024-08-30 16:10:01,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=230000.0, ans=0.2 +2024-08-30 16:10:19,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=230053.33333333334, ans=0.125 +2024-08-30 16:10:24,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=230106.66666666666, ans=0.125 +2024-08-30 16:11:34,977 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.239e+02 1.744e+02 1.950e+02 2.451e+02 4.139e+02, threshold=3.901e+02, percent-clipped=0.0 +2024-08-30 16:11:43,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=230160.0, ans=0.0 +2024-08-30 16:11:47,909 INFO [train.py:1114] (2/4) Epoch 18, batch 850, loss[loss=0.2309, simple_loss=0.3038, pruned_loss=0.05721, ctc_loss=0.1087, over 19630.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2727, pruned_loss=0.05071, ctc_loss=0.09548, over 3815628.64 frames. ], batch size: 59, lr: 8.36e-03, grad_scale: 32.0 +2024-08-30 16:11:53,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=230213.33333333334, ans=0.0 +2024-08-30 16:12:09,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=230266.66666666666, ans=0.125 +2024-08-30 16:12:09,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=230266.66666666666, ans=15.0 +2024-08-30 16:12:12,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=230266.66666666666, ans=0.0 +2024-08-30 16:12:44,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=230373.33333333334, ans=0.07 +2024-08-30 16:12:57,696 INFO [train.py:1114] (2/4) Epoch 18, batch 900, loss[loss=0.1953, simple_loss=0.2542, pruned_loss=0.0494, ctc_loss=0.09432, over 19406.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2731, pruned_loss=0.05114, ctc_loss=0.09621, over 3819241.07 frames. ], batch size: 48, lr: 8.36e-03, grad_scale: 32.0 +2024-08-30 16:12:58,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=230480.0, ans=0.5 +2024-08-30 16:13:02,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=230480.0, ans=0.125 +2024-08-30 16:13:38,084 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 16:13:40,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=230640.0, ans=0.0 +2024-08-30 16:13:47,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=230693.33333333334, ans=0.09899494936611666 +2024-08-30 16:13:49,528 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.356e+02 1.771e+02 2.097e+02 2.541e+02 3.279e+02, threshold=4.195e+02, percent-clipped=1.0 +2024-08-30 16:13:56,603 INFO [train.py:1114] (2/4) Epoch 18, batch 950, loss[loss=0.1845, simple_loss=0.2518, pruned_loss=0.04331, ctc_loss=0.07649, over 19504.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2729, pruned_loss=0.05101, ctc_loss=0.09592, over 3821614.48 frames. ], batch size: 49, lr: 8.35e-03, grad_scale: 32.0 +2024-08-30 16:15:41,762 INFO [train.py:1114] (2/4) Epoch 18, batch 1000, loss[loss=0.2103, simple_loss=0.2829, pruned_loss=0.04947, ctc_loss=0.09696, over 19859.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.274, pruned_loss=0.05153, ctc_loss=0.09694, over 3816720.59 frames. ], batch size: 52, lr: 8.35e-03, grad_scale: 32.0 +2024-08-30 16:15:50,434 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 16:15:51,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=231013.33333333334, ans=0.125 +2024-08-30 16:15:53,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=231066.66666666666, ans=0.125 +2024-08-30 16:16:03,784 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.64 vs. limit=15.0 +2024-08-30 16:16:06,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=231120.0, ans=0.125 +2024-08-30 16:16:19,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=231120.0, ans=0.125 +2024-08-30 16:16:30,059 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.68 vs. limit=15.0 +2024-08-30 16:16:34,538 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.17 vs. limit=15.0 +2024-08-30 16:16:46,153 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.681e+02 1.935e+02 2.141e+02 3.468e+02, threshold=3.870e+02, percent-clipped=0.0 +2024-08-30 16:16:53,184 INFO [train.py:1114] (2/4) Epoch 18, batch 1050, loss[loss=0.22, simple_loss=0.2942, pruned_loss=0.05356, ctc_loss=0.09691, over 19821.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2734, pruned_loss=0.0513, ctc_loss=0.09659, over 3823860.40 frames. ], batch size: 57, lr: 8.34e-03, grad_scale: 32.0 +2024-08-30 16:16:54,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=231280.0, ans=0.0 +2024-08-30 16:16:54,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=231280.0, ans=0.125 +2024-08-30 16:17:01,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=231280.0, ans=0.2 +2024-08-30 16:17:04,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=231280.0, ans=10.0 +2024-08-30 16:17:12,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=231333.33333333334, ans=0.125 +2024-08-30 16:18:00,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=231386.66666666666, ans=0.125 +2024-08-30 16:18:12,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=231440.0, ans=0.2 +2024-08-30 16:18:29,881 INFO [train.py:1114] (2/4) Epoch 18, batch 1100, loss[loss=0.2055, simple_loss=0.2764, pruned_loss=0.04901, ctc_loss=0.09142, over 19597.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2732, pruned_loss=0.05097, ctc_loss=0.0958, over 3831830.36 frames. ], batch size: 52, lr: 8.34e-03, grad_scale: 32.0 +2024-08-30 16:18:43,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=231600.0, ans=0.125 +2024-08-30 16:18:54,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=231653.33333333334, ans=0.125 +2024-08-30 16:18:57,072 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.16 vs. limit=22.5 +2024-08-30 16:19:24,182 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.348e+02 1.668e+02 1.884e+02 2.263e+02 3.606e+02, threshold=3.767e+02, percent-clipped=0.0 +2024-08-30 16:19:52,622 INFO [train.py:1114] (2/4) Epoch 18, batch 1150, loss[loss=0.1914, simple_loss=0.2623, pruned_loss=0.04286, ctc_loss=0.08684, over 19594.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2727, pruned_loss=0.05081, ctc_loss=0.09569, over 3830669.25 frames. ], batch size: 52, lr: 8.33e-03, grad_scale: 32.0 +2024-08-30 16:19:53,026 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.31 vs. limit=15.0 +2024-08-30 16:20:02,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=231813.33333333334, ans=0.125 +2024-08-30 16:20:04,979 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.66 vs. limit=15.0 +2024-08-30 16:22:09,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=231866.66666666666, ans=0.125 +2024-08-30 16:22:12,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=231866.66666666666, ans=0.09899494936611666 +2024-08-30 16:22:13,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=231866.66666666666, ans=0.125 +2024-08-30 16:22:19,378 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.96 vs. limit=15.0 +2024-08-30 16:22:31,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=231973.33333333334, ans=0.0 +2024-08-30 16:22:34,151 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=8.129e-03 +2024-08-30 16:22:41,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=231973.33333333334, ans=0.125 +2024-08-30 16:22:43,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=232026.66666666666, ans=0.0 +2024-08-30 16:22:56,523 INFO [train.py:1114] (2/4) Epoch 18, batch 1200, loss[loss=0.2189, simple_loss=0.2844, pruned_loss=0.05578, ctc_loss=0.1045, over 19844.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2738, pruned_loss=0.05148, ctc_loss=0.09682, over 3825491.91 frames. ], batch size: 57, lr: 8.33e-03, grad_scale: 32.0 +2024-08-30 16:23:00,085 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 16:23:10,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=232133.33333333334, ans=0.0 +2024-08-30 16:23:17,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=232186.66666666666, ans=0.125 +2024-08-30 16:23:21,451 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.35 vs. limit=6.0 +2024-08-30 16:23:31,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=232240.0, ans=0.1 +2024-08-30 16:23:33,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232240.0, ans=0.1 +2024-08-30 16:23:43,338 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.62 vs. limit=12.0 +2024-08-30 16:23:45,952 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.383e+02 1.656e+02 1.841e+02 2.164e+02 3.391e+02, threshold=3.682e+02, percent-clipped=0.0 +2024-08-30 16:23:48,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=232293.33333333334, ans=0.125 +2024-08-30 16:23:52,937 INFO [train.py:1114] (2/4) Epoch 18, batch 1250, loss[loss=0.219, simple_loss=0.2868, pruned_loss=0.0558, ctc_loss=0.09885, over 19524.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2745, pruned_loss=0.05141, ctc_loss=0.09652, over 3842929.75 frames. ], batch size: 61, lr: 8.32e-03, grad_scale: 32.0 +2024-08-30 16:24:04,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=232400.0, ans=0.025 +2024-08-30 16:24:04,367 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.51 vs. limit=22.5 +2024-08-30 16:25:43,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=232560.0, ans=0.2 +2024-08-30 16:25:44,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232560.0, ans=0.1 +2024-08-30 16:25:45,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=232560.0, ans=0.0 +2024-08-30 16:25:53,661 INFO [train.py:1114] (2/4) Epoch 18, batch 1300, loss[loss=0.2509, simple_loss=0.3043, pruned_loss=0.07282, ctc_loss=0.1296, over 18811.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2741, pruned_loss=0.05135, ctc_loss=0.09634, over 3845733.83 frames. ], batch size: 76, lr: 8.32e-03, grad_scale: 32.0 +2024-08-30 16:32:11,333 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.86 vs. limit=12.0 +2024-08-30 16:32:19,319 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 16:32:29,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=232720.0, ans=0.0 +2024-08-30 16:32:29,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=232720.0, ans=0.2 +2024-08-30 16:32:37,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=232720.0, ans=0.125 +2024-08-30 16:45:23,074 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.785e+02 2.170e+02 2.759e+02 4.331e+02, threshold=4.339e+02, percent-clipped=5.0 +2024-08-30 16:54:23,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232826.66666666666, ans=0.1 +2024-08-30 16:57:13,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=232826.66666666666, ans=0.0 +2024-08-30 17:02:45,880 INFO [train.py:1114] (2/4) Epoch 18, batch 1350, loss[loss=0.215, simple_loss=0.2813, pruned_loss=0.05396, ctc_loss=0.1019, over 19781.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2734, pruned_loss=0.05107, ctc_loss=0.09581, over 3856639.95 frames. ], batch size: 54, lr: 8.31e-03, grad_scale: 32.0 +2024-08-30 17:02:46,431 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.68 vs. limit=15.0 +2024-08-30 17:12:54,858 INFO [train.py:1050] (2/4) Caught exception: [Rank 2] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=46170, OpType=ALLREDUCE, NumelIn=841, NumelOut=841, Timeout(ms)=600000) ran for 600008 milliseconds before timing out.. +2024-08-30 17:12:54,860 INFO [checkpoint.py:75] (2/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/bad-model-2.pt +2024-08-30 17:12:57,794 INFO [train.py:1413] (2/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/batch-a689ee27-eec1-83b6-15a8-f48f39643825.pt +2024-08-30 17:12:57,842 INFO [train.py:1419] (2/4) features shape: torch.Size([56, 1419, 80]) +2024-08-30 17:12:57,845 INFO [train.py:1423] (2/4) num tokens: 4237 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-12-44-46-3 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-12-44-46-3 new file mode 100644 index 0000000000000000000000000000000000000000..ee732e72cd8a5afc915a389862e54b1503a59d04 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-12-44-46-3 @@ -0,0 +1,544 @@ +2024-08-30 12:44:46,727 INFO [train.py:1182] (3/4) Training started +2024-08-30 12:44:48,619 INFO [train.py:1192] (3/4) Device: cuda:3 +2024-08-30 12:44:48,621 INFO [train.py:1210] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2651.int.cedar.computecanada.ca', 'IP address': '172.16.146.88'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 17, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-30 12:44:48,622 INFO [train.py:1212] (3/4) About to create model +2024-08-30 12:44:49,314 INFO [train.py:1216] (3/4) Number of model parameters: 66367431 +2024-08-30 12:44:49,315 INFO [checkpoint.py:112] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-16.pt +2024-08-30 12:45:01,822 INFO [train.py:1231] (3/4) Using DDP +2024-08-30 12:45:06,262 INFO [train.py:1243] (3/4) Loading optimizer state dict +2024-08-30 12:45:06,460 INFO [train.py:1251] (3/4) Loading scheduler state dict +2024-08-30 12:45:06,460 INFO [asr_datamodule.py:894] (3/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-30 12:45:06,664 INFO [asr_datamodule.py:696] (3/4) Disable MUSAN +2024-08-30 12:45:06,664 INFO [asr_datamodule.py:714] (3/4) Enable SpecAugment +2024-08-30 12:45:06,664 INFO [asr_datamodule.py:715] (3/4) Time warp factor: 80 +2024-08-30 12:45:06,665 INFO [asr_datamodule.py:725] (3/4) Num frame mask: 10 +2024-08-30 12:45:06,665 INFO [asr_datamodule.py:738] (3/4) About to create train dataset +2024-08-30 12:45:06,665 INFO [asr_datamodule.py:765] (3/4) Using DynamicBucketingSampler. +2024-08-30 12:45:08,257 INFO [asr_datamodule.py:782] (3/4) About to create train dataloader +2024-08-30 12:45:08,264 INFO [asr_datamodule.py:911] (3/4) About to get dev-clean cuts +2024-08-30 12:45:08,444 INFO [asr_datamodule.py:918] (3/4) About to get dev-other cuts +2024-08-30 12:45:08,576 INFO [asr_datamodule.py:814] (3/4) About to create dev dataset +2024-08-30 12:45:08,900 INFO [asr_datamodule.py:831] (3/4) About to create dev dataloader +2024-08-30 12:45:08,901 INFO [train.py:1435] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 12:51:17,011 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12808MB +2024-08-30 12:51:18,481 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-30 12:53:02,313 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-30 12:53:03,663 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-30 12:54:12,743 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-30 12:54:13,487 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=256, metric=14.79 vs. limit=7.5 +2024-08-30 12:54:14,359 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-30 12:54:14,401 INFO [train.py:1344] (3/4) Loading grad scaler state dict +2024-08-30 12:55:04,828 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.02 vs. limit=15.0 +2024-08-30 12:55:06,918 INFO [train.py:1114] (3/4) Epoch 17, batch 0, loss[loss=0.2025, simple_loss=0.2612, pruned_loss=0.05238, ctc_loss=0.09774, over 19407.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2612, pruned_loss=0.05238, ctc_loss=0.09774, over 19407.00 frames. ], batch size: 48, lr: 8.95e-03, grad_scale: 32.0 +2024-08-30 12:55:06,919 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-30 12:55:27,039 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([1.7218, 2.1221, 1.5530, 1.9445, 2.1680, 2.2712, 2.1419, 1.6912], + device='cuda:3') +2024-08-30 12:55:31,700 INFO [train.py:1146] (3/4) Epoch 17, validation: loss=0.185, simple_loss=0.2737, pruned_loss=0.03584, ctc_loss=0.06176, over 944034.00 frames. +2024-08-30 12:55:31,701 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12885MB +2024-08-30 12:55:33,425 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.85 vs. limit=22.5 +2024-08-30 13:01:17,379 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 13:06:19,316 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.426e+02 1.860e+02 2.030e+02 2.233e+02 2.993e+02, threshold=4.061e+02, percent-clipped=0.0 +2024-08-30 13:09:34,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=212618.66666666666, ans=0.0 +2024-08-30 13:09:34,642 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.94 vs. limit=22.5 +2024-08-30 13:09:43,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=212618.66666666666, ans=0.2 +2024-08-30 13:09:56,388 INFO [train.py:1114] (3/4) Epoch 17, batch 50, loss[loss=0.1951, simple_loss=0.2543, pruned_loss=0.0494, ctc_loss=0.09256, over 19722.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.279, pruned_loss=0.05541, ctc_loss=0.1047, over 844761.44 frames. ], batch size: 47, lr: 8.94e-03, grad_scale: 32.0 +2024-08-30 13:15:57,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=212725.33333333334, ans=0.0 +2024-08-30 13:16:03,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=212725.33333333334, ans=0.0 +2024-08-30 13:16:42,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212725.33333333334, ans=0.1 +2024-08-30 13:18:57,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212885.33333333334, ans=0.1 +2024-08-30 13:18:58,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=212885.33333333334, ans=0.2 +2024-08-30 13:18:58,995 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.98 vs. limit=15.0 +2024-08-30 13:19:01,151 INFO [train.py:1114] (3/4) Epoch 17, batch 100, loss[loss=0.2147, simple_loss=0.2758, pruned_loss=0.05536, ctc_loss=0.1072, over 19722.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2798, pruned_loss=0.05494, ctc_loss=0.1043, over 1498730.66 frames. ], batch size: 51, lr: 8.94e-03, grad_scale: 32.0 +2024-08-30 13:19:03,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=212938.66666666666, ans=0.2 +2024-08-30 13:19:06,716 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.79 vs. limit=15.0 +2024-08-30 13:19:23,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=212992.0, ans=0.09899494936611666 +2024-08-30 13:20:53,595 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 13:22:02,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=213045.33333333334, ans=0.125 +2024-08-30 13:23:01,092 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.02 vs. limit=15.0 +2024-08-30 13:23:11,081 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.345e+02 1.706e+02 1.953e+02 2.287e+02 3.713e+02, threshold=3.906e+02, percent-clipped=0.0 +2024-08-30 13:23:30,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=213098.66666666666, ans=0.5 +2024-08-30 13:24:10,834 INFO [train.py:1114] (3/4) Epoch 17, batch 150, loss[loss=0.1641, simple_loss=0.2363, pruned_loss=0.03382, ctc_loss=0.0607, over 19720.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2778, pruned_loss=0.05387, ctc_loss=0.1024, over 2026289.94 frames. ], batch size: 47, lr: 8.93e-03, grad_scale: 32.0 +2024-08-30 13:27:25,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213365.33333333334, ans=0.1 +2024-08-30 13:27:53,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=213418.66666666666, ans=0.0 +2024-08-30 13:27:56,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=213418.66666666666, ans=0.125 +2024-08-30 13:28:03,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=213418.66666666666, ans=0.0 +2024-08-30 13:28:05,727 INFO [train.py:1114] (3/4) Epoch 17, batch 200, loss[loss=0.2245, simple_loss=0.2875, pruned_loss=0.05906, ctc_loss=0.1086, over 18235.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2767, pruned_loss=0.05327, ctc_loss=0.1007, over 2433635.33 frames. ], batch size: 85, lr: 8.93e-03, grad_scale: 32.0 +2024-08-30 13:28:07,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=213472.0, ans=0.125 +2024-08-30 13:28:15,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=213472.0, ans=0.2 +2024-08-30 13:28:24,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=213525.33333333334, ans=0.07 +2024-08-30 13:28:30,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=213578.66666666666, ans=0.125 +2024-08-30 13:28:40,338 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.384e+02 1.731e+02 1.992e+02 2.666e+02 4.093e+02, threshold=3.983e+02, percent-clipped=1.0 +2024-08-30 13:28:57,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213685.33333333334, ans=0.1 +2024-08-30 13:29:07,540 INFO [train.py:1114] (3/4) Epoch 17, batch 250, loss[loss=0.2155, simple_loss=0.2846, pruned_loss=0.05364, ctc_loss=0.0977, over 19400.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2761, pruned_loss=0.0525, ctc_loss=0.09926, over 2755065.21 frames. ], batch size: 67, lr: 8.92e-03, grad_scale: 32.0 +2024-08-30 13:29:14,200 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.95 vs. limit=6.0 +2024-08-30 13:29:19,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=213792.0, ans=0.125 +2024-08-30 13:29:39,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=213845.33333333334, ans=0.125 +2024-08-30 13:29:52,906 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 13:30:13,962 INFO [train.py:1114] (3/4) Epoch 17, batch 300, loss[loss=0.2492, simple_loss=0.3034, pruned_loss=0.07158, ctc_loss=0.1296, over 19534.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2753, pruned_loss=0.05207, ctc_loss=0.09817, over 2999463.88 frames. ], batch size: 61, lr: 8.92e-03, grad_scale: 32.0 +2024-08-30 13:30:24,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=214058.66666666666, ans=0.0 +2024-08-30 13:30:28,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=214058.66666666666, ans=0.025 +2024-08-30 13:30:50,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=214112.0, ans=0.035 +2024-08-30 13:30:51,811 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.284e+02 1.663e+02 1.872e+02 2.298e+02 3.693e+02, threshold=3.744e+02, percent-clipped=0.0 +2024-08-30 13:31:02,066 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.78 vs. limit=15.0 +2024-08-30 13:31:25,600 INFO [train.py:1114] (3/4) Epoch 17, batch 350, loss[loss=0.1712, simple_loss=0.2415, pruned_loss=0.03629, ctc_loss=0.07087, over 19779.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2761, pruned_loss=0.05256, ctc_loss=0.09883, over 3190999.16 frames. ], batch size: 48, lr: 8.91e-03, grad_scale: 32.0 +2024-08-30 13:31:29,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214272.0, ans=0.1 +2024-08-30 13:31:38,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=214325.33333333334, ans=0.125 +2024-08-30 13:31:44,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=214325.33333333334, ans=0.0 +2024-08-30 13:32:07,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=214432.0, ans=0.125 +2024-08-30 13:32:18,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=214485.33333333334, ans=0.025 +2024-08-30 13:32:24,586 INFO [train.py:1114] (3/4) Epoch 17, batch 400, loss[loss=0.2203, simple_loss=0.2825, pruned_loss=0.05707, ctc_loss=0.1099, over 19500.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2754, pruned_loss=0.05218, ctc_loss=0.09806, over 3343531.34 frames. ], batch size: 54, lr: 8.91e-03, grad_scale: 32.0 +2024-08-30 13:32:35,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=214592.0, ans=0.1 +2024-08-30 13:32:56,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214645.33333333334, ans=0.1 +2024-08-30 13:32:56,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=214645.33333333334, ans=0.125 +2024-08-30 13:33:01,000 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.238e+02 1.640e+02 1.901e+02 2.325e+02 4.074e+02, threshold=3.801e+02, percent-clipped=1.0 +2024-08-30 13:33:08,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=214698.66666666666, ans=0.125 +2024-08-30 13:33:12,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=214698.66666666666, ans=0.0 +2024-08-30 13:33:26,237 INFO [train.py:1114] (3/4) Epoch 17, batch 450, loss[loss=0.2071, simple_loss=0.2826, pruned_loss=0.04749, ctc_loss=0.09185, over 19614.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2758, pruned_loss=0.05212, ctc_loss=0.09815, over 3451227.36 frames. ], batch size: 55, lr: 8.90e-03, grad_scale: 32.0 +2024-08-30 13:33:30,437 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.95 vs. limit=22.5 +2024-08-30 13:33:36,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=214805.33333333334, ans=0.125 +2024-08-30 13:38:31,119 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.40 vs. limit=12.0 +2024-08-30 13:38:32,397 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.82 vs. limit=15.0 +2024-08-30 13:38:33,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=214912.0, ans=0.125 +2024-08-30 13:38:33,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214912.0, ans=0.1 +2024-08-30 13:43:52,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=215018.66666666666, ans=0.125 +2024-08-30 13:44:02,832 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.56 vs. limit=10.0 +2024-08-30 13:44:05,799 INFO [train.py:1114] (3/4) Epoch 17, batch 500, loss[loss=0.2222, simple_loss=0.2917, pruned_loss=0.05627, ctc_loss=0.1004, over 19684.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2753, pruned_loss=0.05193, ctc_loss=0.09801, over 3546818.49 frames. ], batch size: 63, lr: 8.90e-03, grad_scale: 32.0 +2024-08-30 13:44:53,035 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.09 vs. limit=15.0 +2024-08-30 13:44:58,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215178.66666666666, ans=0.1 +2024-08-30 13:45:06,443 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.780e+02 2.026e+02 2.589e+02 4.105e+02, threshold=4.052e+02, percent-clipped=2.0 +2024-08-30 13:45:31,472 INFO [train.py:1114] (3/4) Epoch 17, batch 550, loss[loss=0.2148, simple_loss=0.2835, pruned_loss=0.05417, ctc_loss=0.09472, over 19302.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2751, pruned_loss=0.05189, ctc_loss=0.09777, over 3608612.57 frames. ], batch size: 71, lr: 8.89e-03, grad_scale: 32.0 +2024-08-30 13:45:32,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=215338.66666666666, ans=0.025 +2024-08-30 13:45:40,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215338.66666666666, ans=0.1 +2024-08-30 13:45:50,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=215392.0, ans=0.0 +2024-08-30 13:46:28,907 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.21 vs. limit=15.0 +2024-08-30 13:46:30,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=215552.0, ans=0.125 +2024-08-30 13:47:20,209 INFO [train.py:1114] (3/4) Epoch 17, batch 600, loss[loss=0.2494, simple_loss=0.2986, pruned_loss=0.07367, ctc_loss=0.132, over 19313.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2753, pruned_loss=0.05217, ctc_loss=0.09837, over 3666177.61 frames. ], batch size: 67, lr: 8.88e-03, grad_scale: 64.0 +2024-08-30 13:47:26,850 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.54 vs. limit=15.0 +2024-08-30 13:47:31,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215658.66666666666, ans=0.1 +2024-08-30 13:47:36,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=215658.66666666666, ans=0.0 +2024-08-30 13:47:40,692 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.46 vs. limit=15.0 +2024-08-30 13:47:49,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=215712.0, ans=0.125 +2024-08-30 13:47:53,783 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.276e+02 1.647e+02 1.940e+02 2.383e+02 4.124e+02, threshold=3.879e+02, percent-clipped=1.0 +2024-08-30 13:48:17,035 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.21 vs. limit=22.5 +2024-08-30 13:48:27,129 INFO [train.py:1114] (3/4) Epoch 17, batch 650, loss[loss=0.2217, simple_loss=0.2785, pruned_loss=0.05982, ctc_loss=0.1132, over 19764.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2748, pruned_loss=0.05205, ctc_loss=0.09814, over 3716226.04 frames. ], batch size: 54, lr: 8.88e-03, grad_scale: 32.0 +2024-08-30 13:50:13,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=215872.0, ans=0.125 +2024-08-30 13:51:32,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=215925.33333333334, ans=0.125 +2024-08-30 13:51:48,700 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.18 vs. limit=10.0 +2024-08-30 13:51:58,389 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.45 vs. limit=12.0 +2024-08-30 13:52:27,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=216032.0, ans=0.2 +2024-08-30 13:52:37,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=216032.0, ans=0.0 +2024-08-30 14:06:19,755 INFO [train.py:1114] (3/4) Epoch 17, batch 700, loss[loss=0.1851, simple_loss=0.2597, pruned_loss=0.04017, ctc_loss=0.07546, over 19711.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2752, pruned_loss=0.05213, ctc_loss=0.09815, over 3749092.86 frames. ], batch size: 51, lr: 8.87e-03, grad_scale: 32.0 +2024-08-30 14:06:38,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=216138.66666666666, ans=0.125 +2024-08-30 14:12:13,783 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.374e+02 1.667e+02 2.137e+02 2.601e+02 4.284e+02, threshold=4.274e+02, percent-clipped=4.0 +2024-08-30 14:17:08,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=216352.0, ans=0.125 +2024-08-30 14:17:08,960 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.58 vs. limit=15.0 +2024-08-30 14:17:09,906 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:17:35,071 INFO [train.py:1114] (3/4) Epoch 17, batch 750, loss[loss=0.2041, simple_loss=0.2805, pruned_loss=0.04528, ctc_loss=0.09269, over 19502.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2748, pruned_loss=0.05193, ctc_loss=0.09786, over 3775151.58 frames. ], batch size: 54, lr: 8.87e-03, grad_scale: 32.0 +2024-08-30 14:17:39,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=216405.33333333334, ans=0.125 +2024-08-30 14:17:58,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=216458.66666666666, ans=0.125 +2024-08-30 14:18:08,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=216458.66666666666, ans=0.07 +2024-08-30 14:19:12,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=216565.33333333334, ans=0.2 +2024-08-30 14:19:42,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=216565.33333333334, ans=0.125 +2024-08-30 14:19:46,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216565.33333333334, ans=0.1 +2024-08-30 14:20:37,989 INFO [train.py:1114] (3/4) Epoch 17, batch 800, loss[loss=0.1907, simple_loss=0.256, pruned_loss=0.04604, ctc_loss=0.08348, over 19408.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.274, pruned_loss=0.05161, ctc_loss=0.09728, over 3796424.97 frames. ], batch size: 48, lr: 8.86e-03, grad_scale: 32.0 +2024-08-30 14:20:44,040 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.02 vs. limit=6.0 +2024-08-30 14:22:02,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=216672.0, ans=0.0 +2024-08-30 14:27:06,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216672.0, ans=0.1 +2024-08-30 14:29:36,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216725.33333333334, ans=0.1 +2024-08-30 14:31:17,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=216778.66666666666, ans=0.125 +2024-08-30 14:31:25,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=216778.66666666666, ans=0.125 +2024-08-30 14:31:32,315 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.355e+02 1.715e+02 2.071e+02 2.537e+02 3.967e+02, threshold=4.143e+02, percent-clipped=0.0 +2024-08-30 14:32:34,219 INFO [train.py:1114] (3/4) Epoch 17, batch 850, loss[loss=0.1969, simple_loss=0.2707, pruned_loss=0.04466, ctc_loss=0.08437, over 19644.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2742, pruned_loss=0.05176, ctc_loss=0.09755, over 3815832.83 frames. ], batch size: 59, lr: 8.86e-03, grad_scale: 32.0 +2024-08-30 14:32:38,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=216938.66666666666, ans=0.0 +2024-08-30 14:32:39,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=216938.66666666666, ans=0.1 +2024-08-30 14:32:54,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216992.0, ans=0.1 +2024-08-30 14:32:58,699 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.32 vs. limit=15.0 +2024-08-30 14:33:06,941 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.28 vs. limit=15.0 +2024-08-30 14:33:13,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217045.33333333334, ans=0.1 +2024-08-30 14:33:20,903 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.73 vs. limit=15.0 +2024-08-30 14:33:27,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=217152.0, ans=0.025 +2024-08-30 14:34:09,065 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.07 vs. limit=15.0 +2024-08-30 14:34:26,337 INFO [train.py:1114] (3/4) Epoch 17, batch 900, loss[loss=0.2032, simple_loss=0.2626, pruned_loss=0.05264, ctc_loss=0.09609, over 19787.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2741, pruned_loss=0.05181, ctc_loss=0.09747, over 3820435.20 frames. ], batch size: 49, lr: 8.85e-03, grad_scale: 32.0 +2024-08-30 14:36:02,134 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.77 vs. limit=12.0 +2024-08-30 14:36:51,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=217312.0, ans=0.125 +2024-08-30 14:36:59,136 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.623e+02 1.810e+02 2.233e+02 4.039e+02, threshold=3.621e+02, percent-clipped=0.0 +2024-08-30 14:37:24,535 INFO [train.py:1114] (3/4) Epoch 17, batch 950, loss[loss=0.1661, simple_loss=0.24, pruned_loss=0.03312, ctc_loss=0.06503, over 19513.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2747, pruned_loss=0.05215, ctc_loss=0.09795, over 3821540.41 frames. ], batch size: 49, lr: 8.85e-03, grad_scale: 32.0 +2024-08-30 14:39:04,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217685.33333333334, ans=0.1 +2024-08-30 14:39:05,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=217685.33333333334, ans=0.1 +2024-08-30 14:39:16,467 INFO [train.py:1114] (3/4) Epoch 17, batch 1000, loss[loss=0.2193, simple_loss=0.2832, pruned_loss=0.05562, ctc_loss=0.1107, over 19848.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2752, pruned_loss=0.05241, ctc_loss=0.09846, over 3816977.63 frames. ], batch size: 52, lr: 8.84e-03, grad_scale: 32.0 +2024-08-30 14:39:25,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=217738.66666666666, ans=0.125 +2024-08-30 14:39:26,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=217738.66666666666, ans=0.2 +2024-08-30 14:39:43,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=217845.33333333334, ans=0.035 +2024-08-30 14:39:52,694 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.292e+02 1.648e+02 1.905e+02 2.181e+02 3.196e+02, threshold=3.810e+02, percent-clipped=0.0 +2024-08-30 14:40:20,745 INFO [train.py:1114] (3/4) Epoch 17, batch 1050, loss[loss=0.2335, simple_loss=0.2936, pruned_loss=0.06267, ctc_loss=0.1199, over 19835.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2753, pruned_loss=0.05267, ctc_loss=0.09897, over 3823093.81 frames. ], batch size: 57, lr: 8.84e-03, grad_scale: 32.0 +2024-08-30 14:40:26,527 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:41:14,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=218218.66666666666, ans=0.0 +2024-08-30 14:41:24,726 INFO [train.py:1114] (3/4) Epoch 17, batch 1100, loss[loss=0.1822, simple_loss=0.2539, pruned_loss=0.03999, ctc_loss=0.07604, over 19602.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2746, pruned_loss=0.05221, ctc_loss=0.0981, over 3829569.24 frames. ], batch size: 52, lr: 8.83e-03, grad_scale: 32.0 +2024-08-30 14:41:33,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=218272.0, ans=0.125 +2024-08-30 14:41:35,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=218325.33333333334, ans=0.2 +2024-08-30 14:41:36,707 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:41:41,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=218325.33333333334, ans=0.125 +2024-08-30 14:41:41,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=218325.33333333334, ans=0.0 +2024-08-30 14:41:44,176 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.17 vs. limit=15.0 +2024-08-30 14:42:15,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=218378.66666666666, ans=0.125 +2024-08-30 14:42:23,464 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.634e+02 1.909e+02 2.238e+02 3.833e+02, threshold=3.817e+02, percent-clipped=1.0 +2024-08-30 14:42:58,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=218432.0, ans=0.125 +2024-08-30 14:43:15,268 INFO [train.py:1114] (3/4) Epoch 17, batch 1150, loss[loss=0.1917, simple_loss=0.2642, pruned_loss=0.04377, ctc_loss=0.07898, over 19592.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2752, pruned_loss=0.05268, ctc_loss=0.09906, over 3829957.78 frames. ], batch size: 52, lr: 8.83e-03, grad_scale: 32.0 +2024-08-30 14:43:30,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=218592.0, ans=0.035 +2024-08-30 14:43:33,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=218592.0, ans=0.0 +2024-08-30 14:43:46,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=218645.33333333334, ans=0.0 +2024-08-30 14:43:51,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=218645.33333333334, ans=0.025 +2024-08-30 14:43:57,325 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.70 vs. limit=6.0 +2024-08-30 14:44:01,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=218698.66666666666, ans=0.0 +2024-08-30 14:44:20,106 INFO [train.py:1114] (3/4) Epoch 17, batch 1200, loss[loss=0.2099, simple_loss=0.2831, pruned_loss=0.04996, ctc_loss=0.09175, over 19837.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2757, pruned_loss=0.05293, ctc_loss=0.09946, over 3825542.18 frames. ], batch size: 57, lr: 8.82e-03, grad_scale: 32.0 +2024-08-30 14:44:30,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218805.33333333334, ans=0.1 +2024-08-30 14:44:32,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=218858.66666666666, ans=0.1 +2024-08-30 14:45:49,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=218858.66666666666, ans=0.0 +2024-08-30 14:46:08,767 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.467e+02 1.734e+02 1.937e+02 2.235e+02 3.279e+02, threshold=3.874e+02, percent-clipped=0.0 +2024-08-30 14:46:18,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=219018.66666666666, ans=0.0 +2024-08-30 14:46:24,548 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.40 vs. limit=10.0 +2024-08-30 14:46:31,312 INFO [train.py:1114] (3/4) Epoch 17, batch 1250, loss[loss=0.2528, simple_loss=0.3054, pruned_loss=0.07248, ctc_loss=0.138, over 19542.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2759, pruned_loss=0.05268, ctc_loss=0.0989, over 3844081.90 frames. ], batch size: 61, lr: 8.82e-03, grad_scale: 32.0 +2024-08-30 14:48:32,145 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:48:50,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219338.66666666666, ans=0.1 +2024-08-30 14:48:52,362 INFO [train.py:1114] (3/4) Epoch 17, batch 1300, loss[loss=0.2484, simple_loss=0.3045, pruned_loss=0.07053, ctc_loss=0.1283, over 18817.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2752, pruned_loss=0.05241, ctc_loss=0.09848, over 3846339.73 frames. ], batch size: 76, lr: 8.81e-03, grad_scale: 32.0 +2024-08-30 14:48:57,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219338.66666666666, ans=0.1 +2024-08-30 14:49:19,643 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.59 vs. limit=12.0 +2024-08-30 14:49:29,483 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.393e+02 1.750e+02 2.054e+02 2.564e+02 3.826e+02, threshold=4.108e+02, percent-clipped=0.0 +2024-08-30 14:49:45,255 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:50:02,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=219552.0, ans=0.125 +2024-08-30 14:50:04,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=219552.0, ans=0.0 +2024-08-30 14:50:08,920 INFO [train.py:1114] (3/4) Epoch 17, batch 1350, loss[loss=0.2055, simple_loss=0.2754, pruned_loss=0.04974, ctc_loss=0.09015, over 19783.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.275, pruned_loss=0.05228, ctc_loss=0.09824, over 3856219.38 frames. ], batch size: 54, lr: 8.81e-03, grad_scale: 32.0 +2024-08-30 14:50:27,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=219658.66666666666, ans=0.125 +2024-08-30 14:50:34,985 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.95 vs. limit=15.0 +2024-08-30 14:50:59,939 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:51:00,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219818.66666666666, ans=0.1 +2024-08-30 14:51:09,317 INFO [train.py:1114] (3/4) Epoch 17, batch 1400, loss[loss=0.1673, simple_loss=0.2346, pruned_loss=0.03613, ctc_loss=0.06948, over 19664.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2745, pruned_loss=0.05209, ctc_loss=0.09786, over 3863442.84 frames. ], batch size: 46, lr: 8.80e-03, grad_scale: 32.0 +2024-08-30 14:51:27,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=219872.0, ans=0.125 +2024-08-30 14:51:30,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=219872.0, ans=0.0 +2024-08-30 14:52:01,611 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.357e+02 1.697e+02 1.910e+02 2.399e+02 4.058e+02, threshold=3.819e+02, percent-clipped=0.0 +2024-08-30 14:52:14,350 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 14:52:15,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=220085.33333333334, ans=0.0 +2024-08-30 14:52:26,358 INFO [train.py:1114] (3/4) Epoch 17, batch 1450, loss[loss=0.2129, simple_loss=0.2813, pruned_loss=0.05308, ctc_loss=0.09589, over 19651.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2755, pruned_loss=0.05268, ctc_loss=0.09899, over 3860840.47 frames. ], batch size: 63, lr: 8.80e-03, grad_scale: 32.0 +2024-08-30 14:52:43,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=220138.66666666666, ans=0.04949747468305833 +2024-08-30 14:52:54,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=220192.0, ans=0.125 +2024-08-30 14:53:05,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=220192.0, ans=0.125 +2024-08-30 14:54:12,184 INFO [train.py:1114] (3/4) Epoch 17, batch 1500, loss[loss=0.2, simple_loss=0.2789, pruned_loss=0.0434, ctc_loss=0.08561, over 19580.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2754, pruned_loss=0.05228, ctc_loss=0.09825, over 3861253.07 frames. ], batch size: 57, lr: 8.79e-03, grad_scale: 32.0 +2024-08-30 14:54:17,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220405.33333333334, ans=0.1 +2024-08-30 14:54:22,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=220405.33333333334, ans=0.2 +2024-08-30 14:54:23,214 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.24 vs. limit=15.0 +2024-08-30 14:54:47,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=220512.0, ans=0.2 +2024-08-30 14:54:51,619 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.29 vs. limit=15.0 +2024-08-30 14:54:54,682 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.719e+02 1.906e+02 2.293e+02 3.704e+02, threshold=3.812e+02, percent-clipped=0.0 +2024-08-30 14:55:00,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=220565.33333333334, ans=0.125 +2024-08-30 14:55:03,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=220565.33333333334, ans=0.125 +2024-08-30 14:55:09,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=220618.66666666666, ans=0.025 +2024-08-30 14:55:11,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=220618.66666666666, ans=0.2 +2024-08-30 14:55:17,131 INFO [train.py:1114] (3/4) Epoch 17, batch 1550, loss[loss=0.2152, simple_loss=0.2827, pruned_loss=0.0535, ctc_loss=0.1017, over 19608.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.275, pruned_loss=0.0523, ctc_loss=0.09848, over 3845897.95 frames. ], batch size: 60, lr: 8.79e-03, grad_scale: 32.0 +2024-08-30 14:55:31,627 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.25 vs. limit=15.0 +2024-08-30 14:55:36,483 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.90 vs. limit=22.5 +2024-08-30 14:56:04,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=220832.0, ans=0.07 +2024-08-30 14:56:10,534 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.61 vs. limit=15.0 +2024-08-30 14:56:26,696 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.57 vs. limit=15.0 +2024-08-30 14:56:27,379 INFO [train.py:1114] (3/4) Epoch 17, batch 1600, loss[loss=0.226, simple_loss=0.2852, pruned_loss=0.05961, ctc_loss=0.119, over 19834.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2749, pruned_loss=0.05217, ctc_loss=0.09838, over 3834852.77 frames. ], batch size: 57, lr: 8.78e-03, grad_scale: 32.0 +2024-08-30 14:56:35,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220938.66666666666, ans=0.1 +2024-08-30 14:57:23,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=220992.0, ans=0.025 +2024-08-30 14:58:32,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=221045.33333333334, ans=0.07 +2024-08-30 14:58:43,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=221045.33333333334, ans=0.0 +2024-08-30 15:00:47,795 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.328e+02 1.738e+02 2.160e+02 2.635e+02 3.870e+02, threshold=4.320e+02, percent-clipped=2.0 +2024-08-30 15:00:54,225 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.91 vs. limit=15.0 +2024-08-30 15:02:48,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=221152.0, ans=0.125 +2024-08-30 15:03:53,977 INFO [train.py:1114] (3/4) Epoch 17, batch 1650, loss[loss=0.1953, simple_loss=0.2701, pruned_loss=0.04329, ctc_loss=0.08511, over 19642.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2749, pruned_loss=0.05225, ctc_loss=0.09832, over 3830114.97 frames. ], batch size: 59, lr: 8.77e-03, grad_scale: 32.0 +2024-08-30 15:05:45,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221258.66666666666, ans=0.1 +2024-08-30 15:05:48,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=221258.66666666666, ans=0.05 +2024-08-30 15:07:23,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=221312.0, ans=0.05 +2024-08-30 15:07:35,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=221365.33333333334, ans=0.125 +2024-08-30 15:07:39,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=221365.33333333334, ans=0.2 +2024-08-30 15:07:43,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=221365.33333333334, ans=0.125 +2024-08-30 15:07:45,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=221365.33333333334, ans=0.1 +2024-08-30 15:07:50,908 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.49 vs. limit=15.0 +2024-08-30 15:08:00,827 INFO [train.py:1114] (3/4) Epoch 17, batch 1700, loss[loss=0.1962, simple_loss=0.2516, pruned_loss=0.05096, ctc_loss=0.09716, over 19691.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2753, pruned_loss=0.05224, ctc_loss=0.09822, over 3844046.33 frames. ], batch size: 46, lr: 8.77e-03, grad_scale: 32.0 +2024-08-30 15:08:16,924 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.18 vs. limit=10.0 +2024-08-30 15:08:31,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=221578.66666666666, ans=0.125 +2024-08-30 15:08:33,903 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.91 vs. limit=22.5 +2024-08-30 15:08:36,778 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.427e+02 1.717e+02 1.998e+02 2.422e+02 4.059e+02, threshold=3.996e+02, percent-clipped=0.0 +2024-08-30 15:09:33,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=221632.0, ans=0.0 +2024-08-30 15:09:35,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221632.0, ans=0.1 +2024-08-30 15:09:43,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=221685.33333333334, ans=0.125 +2024-08-30 15:09:50,037 INFO [train.py:1114] (3/4) Epoch 17, batch 1750, loss[loss=0.1956, simple_loss=0.2525, pruned_loss=0.04989, ctc_loss=0.09737, over 19662.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2748, pruned_loss=0.05211, ctc_loss=0.09791, over 3849771.43 frames. ], batch size: 45, lr: 8.76e-03, grad_scale: 32.0 +2024-08-30 15:09:56,053 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.45 vs. limit=15.0 +2024-08-30 15:09:56,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=221738.66666666666, ans=0.125 +2024-08-30 15:10:11,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=221845.33333333334, ans=0.0 +2024-08-30 15:10:21,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221845.33333333334, ans=0.1 +2024-08-30 15:10:43,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221952.0, ans=0.1 +2024-08-30 15:10:44,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=221952.0, ans=0.125 +2024-08-30 15:10:46,091 INFO [train.py:1114] (3/4) Epoch 17, batch 1800, loss[loss=0.229, simple_loss=0.2949, pruned_loss=0.0578, ctc_loss=0.1185, over 19626.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2748, pruned_loss=0.05219, ctc_loss=0.09795, over 3852200.51 frames. ], batch size: 55, lr: 8.76e-03, grad_scale: 32.0 +2024-08-30 15:11:06,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=222058.66666666666, ans=0.1 +2024-08-30 15:11:11,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=222112.0, ans=0.025 +2024-08-30 15:11:23,235 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.330e+02 1.773e+02 2.029e+02 2.607e+02 4.351e+02, threshold=4.057e+02, percent-clipped=1.0 +2024-08-30 15:11:29,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=222165.33333333334, ans=0.125 +2024-08-30 15:11:43,557 INFO [train.py:1114] (3/4) Epoch 17, batch 1850, loss[loss=0.2157, simple_loss=0.2859, pruned_loss=0.05364, ctc_loss=0.09557, over 19583.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2746, pruned_loss=0.05201, ctc_loss=0.09777, over 3856199.12 frames. ], batch size: 57, lr: 8.75e-03, grad_scale: 16.0 +2024-08-30 15:11:50,863 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.34 vs. limit=15.0 +2024-08-30 15:12:06,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222378.66666666666, ans=0.1 +2024-08-30 15:12:09,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=222378.66666666666, ans=0.0 +2024-08-30 15:12:10,964 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.38 vs. limit=15.0 +2024-08-30 15:12:18,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=222432.0, ans=0.125 +2024-08-30 15:12:39,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=222538.66666666666, ans=0.125 +2024-08-30 15:12:40,699 INFO [train.py:1114] (3/4) Epoch 17, batch 1900, loss[loss=0.2126, simple_loss=0.2907, pruned_loss=0.04913, ctc_loss=0.09088, over 19666.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.275, pruned_loss=0.05206, ctc_loss=0.0979, over 3861529.04 frames. ], batch size: 59, lr: 8.75e-03, grad_scale: 16.0 +2024-08-30 15:12:41,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=222538.66666666666, ans=0.125 +2024-08-30 15:12:47,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=222538.66666666666, ans=0.125 +2024-08-30 15:13:07,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=222645.33333333334, ans=0.125 +2024-08-30 15:13:18,233 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.682e+02 1.950e+02 2.328e+02 4.923e+02, threshold=3.901e+02, percent-clipped=3.0 +2024-08-30 15:13:20,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=222698.66666666666, ans=15.0 +2024-08-30 15:13:25,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=222698.66666666666, ans=0.025 +2024-08-30 15:13:36,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=222752.0, ans=0.1 +2024-08-30 15:13:38,420 INFO [train.py:1114] (3/4) Epoch 17, batch 1950, loss[loss=0.1951, simple_loss=0.2646, pruned_loss=0.04541, ctc_loss=0.08691, over 19590.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2763, pruned_loss=0.0524, ctc_loss=0.09864, over 3870508.85 frames. ], batch size: 52, lr: 8.74e-03, grad_scale: 16.0 +2024-08-30 15:14:29,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222858.66666666666, ans=0.1 +2024-08-30 15:14:38,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222912.0, ans=0.1 +2024-08-30 15:14:45,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=222912.0, ans=0.125 +2024-08-30 15:14:49,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=222912.0, ans=0.04949747468305833 +2024-08-30 15:14:53,090 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.47 vs. limit=22.5 +2024-08-30 15:15:27,049 INFO [train.py:1114] (3/4) Epoch 17, batch 2000, loss[loss=0.1798, simple_loss=0.2432, pruned_loss=0.0425, ctc_loss=0.07824, over 19678.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2769, pruned_loss=0.05275, ctc_loss=0.09954, over 3855534.91 frames. ], batch size: 45, lr: 8.74e-03, grad_scale: 32.0 +2024-08-30 15:15:29,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=223072.0, ans=0.0 +2024-08-30 15:16:03,254 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.692e+02 2.099e+02 2.435e+02 3.373e+02, threshold=4.199e+02, percent-clipped=0.0 +2024-08-30 15:16:08,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=223232.0, ans=0.0 +2024-08-30 15:16:12,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=223285.33333333334, ans=0.2 +2024-08-30 15:16:13,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=223285.33333333334, ans=0.125 +2024-08-30 15:16:42,838 INFO [train.py:1114] (3/4) Epoch 17, batch 2050, loss[loss=0.1936, simple_loss=0.2544, pruned_loss=0.0485, ctc_loss=0.08957, over 19740.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2762, pruned_loss=0.05274, ctc_loss=0.09979, over 3852671.33 frames. ], batch size: 47, lr: 8.73e-03, grad_scale: 32.0 +2024-08-30 15:18:41,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=223445.33333333334, ans=0.125 +2024-08-30 15:19:49,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=223445.33333333334, ans=0.125 +2024-08-30 15:19:54,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=223498.66666666666, ans=0.125 +2024-08-30 15:20:15,747 INFO [train.py:1114] (3/4) Epoch 17, batch 2100, loss[loss=0.1886, simple_loss=0.2633, pruned_loss=0.04156, ctc_loss=0.07706, over 19778.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2756, pruned_loss=0.05228, ctc_loss=0.09891, over 3859898.41 frames. ], batch size: 54, lr: 8.73e-03, grad_scale: 32.0 +2024-08-30 15:20:46,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=223605.33333333334, ans=0.0 +2024-08-30 15:20:59,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=223658.66666666666, ans=0.125 +2024-08-30 15:21:23,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=223765.33333333334, ans=0.125 +2024-08-30 15:21:41,979 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.235e+02 1.693e+02 2.019e+02 2.546e+02 6.032e+02, threshold=4.039e+02, percent-clipped=5.0 +2024-08-30 15:21:46,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=223765.33333333334, ans=0.0 +2024-08-30 15:21:54,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=223818.66666666666, ans=0.2 +2024-08-30 15:21:54,198 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.33 vs. limit=15.0 +2024-08-30 15:22:02,825 INFO [train.py:1114] (3/4) Epoch 17, batch 2150, loss[loss=0.1748, simple_loss=0.2511, pruned_loss=0.03574, ctc_loss=0.06774, over 19843.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2748, pruned_loss=0.05186, ctc_loss=0.09781, over 3871519.46 frames. ], batch size: 52, lr: 8.72e-03, grad_scale: 32.0 +2024-08-30 15:22:24,237 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.12 vs. limit=10.0 +2024-08-30 15:22:24,483 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.99 vs. limit=15.0 +2024-08-30 15:22:24,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=223978.66666666666, ans=0.125 +2024-08-30 15:22:58,283 INFO [train.py:1114] (3/4) Epoch 17, batch 2200, loss[loss=0.1982, simple_loss=0.2789, pruned_loss=0.04246, ctc_loss=0.08161, over 19578.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2744, pruned_loss=0.05161, ctc_loss=0.09744, over 3870160.92 frames. ], batch size: 57, lr: 8.72e-03, grad_scale: 32.0 +2024-08-30 15:23:07,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.35 vs. limit=12.0 +2024-08-30 15:23:08,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=224138.66666666666, ans=0.0 +2024-08-30 15:23:36,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=224245.33333333334, ans=0.035 +2024-08-30 15:23:39,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=224245.33333333334, ans=0.025 +2024-08-30 15:23:53,333 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.675e+02 1.986e+02 2.371e+02 4.244e+02, threshold=3.972e+02, percent-clipped=2.0 +2024-08-30 15:24:13,607 INFO [train.py:1114] (3/4) Epoch 17, batch 2250, loss[loss=0.2248, simple_loss=0.2938, pruned_loss=0.05596, ctc_loss=0.1098, over 19621.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2745, pruned_loss=0.05146, ctc_loss=0.09708, over 3869960.68 frames. ], batch size: 55, lr: 8.71e-03, grad_scale: 32.0 +2024-08-30 15:25:41,969 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.97 vs. limit=15.0 +2024-08-30 15:25:45,206 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.22 vs. limit=12.0 +2024-08-30 15:25:48,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=224512.0, ans=0.5 +2024-08-30 15:25:51,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=224512.0, ans=0.0 +2024-08-30 15:27:05,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=224618.66666666666, ans=0.0 +2024-08-30 15:27:15,737 INFO [train.py:1114] (3/4) Epoch 17, batch 2300, loss[loss=0.1765, simple_loss=0.2453, pruned_loss=0.0381, ctc_loss=0.07875, over 19502.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2736, pruned_loss=0.05137, ctc_loss=0.09688, over 3863663.04 frames. ], batch size: 49, lr: 8.71e-03, grad_scale: 32.0 +2024-08-30 15:28:39,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=224778.66666666666, ans=0.125 +2024-08-30 15:28:40,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=224778.66666666666, ans=0.1 +2024-08-30 15:28:43,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=224832.0, ans=0.5 +2024-08-30 15:28:46,693 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.385e+02 1.759e+02 2.126e+02 2.592e+02 4.068e+02, threshold=4.252e+02, percent-clipped=2.0 +2024-08-30 15:28:58,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=224885.33333333334, ans=0.125 +2024-08-30 15:29:48,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=224885.33333333334, ans=0.2 +2024-08-30 15:29:51,077 INFO [train.py:1114] (3/4) Epoch 17, batch 2350, loss[loss=0.2353, simple_loss=0.2964, pruned_loss=0.06359, ctc_loss=0.1175, over 19646.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2737, pruned_loss=0.05161, ctc_loss=0.09724, over 3866110.76 frames. ], batch size: 63, lr: 8.70e-03, grad_scale: 32.0 +2024-08-30 15:29:51,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=224938.66666666666, ans=0.2 +2024-08-30 15:29:54,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=224938.66666666666, ans=0.125 +2024-08-30 15:30:01,985 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.95 vs. limit=15.0 +2024-08-30 15:30:42,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=225045.33333333334, ans=0.05 +2024-08-30 15:30:42,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=225045.33333333334, ans=0.125 +2024-08-30 15:32:49,543 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.18 vs. limit=15.0 +2024-08-30 15:33:11,234 INFO [train.py:1114] (3/4) Epoch 17, batch 2400, loss[loss=0.2491, simple_loss=0.3107, pruned_loss=0.0681, ctc_loss=0.1284, over 19290.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2764, pruned_loss=0.05273, ctc_loss=0.0991, over 3860060.04 frames. ], batch size: 71, lr: 8.70e-03, grad_scale: 32.0 +2024-08-30 15:33:43,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=225312.0, ans=0.125 +2024-08-30 15:33:48,810 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.365e+02 1.684e+02 1.880e+02 2.443e+02 3.780e+02, threshold=3.760e+02, percent-clipped=0.0 +2024-08-30 15:33:51,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=225365.33333333334, ans=0.2 +2024-08-30 15:33:51,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=225365.33333333334, ans=0.2 +2024-08-30 15:34:10,333 INFO [train.py:1114] (3/4) Epoch 17, batch 2450, loss[loss=0.2847, simple_loss=0.3147, pruned_loss=0.09183, ctc_loss=0.1777, over 13396.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2798, pruned_loss=0.05528, ctc_loss=0.1041, over 3730534.66 frames. ], batch size: 140, lr: 8.69e-03, grad_scale: 16.0 +2024-08-30 15:34:50,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=225525.33333333334, ans=0.2 +2024-08-30 15:34:59,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=225578.66666666666, ans=0.07 +2024-08-30 15:35:06,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=225578.66666666666, ans=0.125 +2024-08-30 15:35:21,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=225632.0, ans=0.125 +2024-08-30 15:35:22,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225632.0, ans=0.1 +2024-08-30 15:38:02,848 INFO [train.py:1114] (3/4) Epoch 18, batch 0, loss[loss=0.2024, simple_loss=0.2593, pruned_loss=0.05157, ctc_loss=0.1056, over 19798.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2593, pruned_loss=0.05157, ctc_loss=0.1056, over 19798.00 frames. ], batch size: 49, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 15:38:02,848 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-30 15:38:18,420 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.7964, 2.0165, 1.7763, 1.9403], device='cuda:3') +2024-08-30 15:39:31,452 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.4950, 3.0552, 2.0864, 2.6519], device='cuda:3') +2024-08-30 15:39:34,936 INFO [train.py:1146] (3/4) Epoch 18, validation: loss=0.1864, simple_loss=0.2743, pruned_loss=0.0364, ctc_loss=0.06401, over 944034.00 frames. +2024-08-30 15:39:34,938 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 13606MB +2024-08-30 15:39:39,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=225680.0, ans=0.125 +2024-08-30 15:39:51,494 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.81 vs. limit=15.0 +2024-08-30 15:40:21,197 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.72 vs. limit=15.0 +2024-08-30 15:40:26,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=225786.66666666666, ans=0.125 +2024-08-30 15:40:35,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=225840.0, ans=0.04949747468305833 +2024-08-30 15:40:56,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=225893.33333333334, ans=0.125 +2024-08-30 15:40:58,093 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.452e+02 1.919e+02 2.092e+02 2.421e+02 5.568e+02, threshold=4.185e+02, percent-clipped=4.0 +2024-08-30 15:41:04,970 INFO [train.py:1114] (3/4) Epoch 18, batch 50, loss[loss=0.1739, simple_loss=0.2429, pruned_loss=0.0384, ctc_loss=0.07026, over 19711.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2767, pruned_loss=0.05275, ctc_loss=0.1009, over 844773.76 frames. ], batch size: 47, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 15:42:05,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=226053.33333333334, ans=0.125 +2024-08-30 15:42:27,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226160.0, ans=0.1 +2024-08-30 15:42:28,849 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.09 vs. limit=15.0 +2024-08-30 15:44:01,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=226160.0, ans=0.125 +2024-08-30 15:44:07,543 INFO [train.py:1114] (3/4) Epoch 18, batch 100, loss[loss=0.186, simple_loss=0.2589, pruned_loss=0.04085, ctc_loss=0.07841, over 19730.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2782, pruned_loss=0.05259, ctc_loss=0.1005, over 1499130.94 frames. ], batch size: 51, lr: 8.43e-03, grad_scale: 32.0 +2024-08-30 15:44:10,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=226213.33333333334, ans=0.125 +2024-08-30 15:44:16,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=226213.33333333334, ans=0.2 +2024-08-30 15:44:33,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=226320.0, ans=0.125 +2024-08-30 15:44:40,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=226320.0, ans=0.125 +2024-08-30 15:44:43,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=226320.0, ans=0.07 +2024-08-30 15:45:01,912 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.418e+02 1.711e+02 1.973e+02 2.383e+02 4.146e+02, threshold=3.946e+02, percent-clipped=0.0 +2024-08-30 15:45:03,501 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.32 vs. limit=15.0 +2024-08-30 15:45:10,544 INFO [train.py:1114] (3/4) Epoch 18, batch 150, loss[loss=0.1807, simple_loss=0.2466, pruned_loss=0.04193, ctc_loss=0.07737, over 19710.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2751, pruned_loss=0.05134, ctc_loss=0.09743, over 2028556.29 frames. ], batch size: 47, lr: 8.43e-03, grad_scale: 32.0 +2024-08-30 15:45:10,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=226480.0, ans=0.025 +2024-08-30 15:45:20,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=226480.0, ans=0.0 +2024-08-30 15:45:23,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=226533.33333333334, ans=0.0 +2024-08-30 15:45:25,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=226533.33333333334, ans=0.015 +2024-08-30 15:45:40,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=226586.66666666666, ans=0.07 +2024-08-30 15:45:58,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=226640.0, ans=0.0 +2024-08-30 15:46:16,532 INFO [train.py:1114] (3/4) Epoch 18, batch 200, loss[loss=0.26, simple_loss=0.3089, pruned_loss=0.07483, ctc_loss=0.1535, over 18383.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.274, pruned_loss=0.05092, ctc_loss=0.09645, over 2435908.49 frames. ], batch size: 86, lr: 8.42e-03, grad_scale: 32.0 +2024-08-30 15:46:17,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=226746.66666666666, ans=0.125 +2024-08-30 15:46:29,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=226800.0, ans=0.125 +2024-08-30 15:46:58,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=226906.66666666666, ans=0.125 +2024-08-30 15:46:58,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=226906.66666666666, ans=0.0 +2024-08-30 15:46:59,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=226906.66666666666, ans=0.0 +2024-08-30 15:47:08,529 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.332e+02 1.794e+02 2.164e+02 2.564e+02 4.131e+02, threshold=4.328e+02, percent-clipped=1.0 +2024-08-30 15:47:14,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=227013.33333333334, ans=10.0 +2024-08-30 15:47:20,534 INFO [train.py:1114] (3/4) Epoch 18, batch 250, loss[loss=0.2425, simple_loss=0.3013, pruned_loss=0.06676, ctc_loss=0.1254, over 19385.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2741, pruned_loss=0.05131, ctc_loss=0.09694, over 2755239.67 frames. ], batch size: 67, lr: 8.42e-03, grad_scale: 32.0 +2024-08-30 15:47:22,232 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.38 vs. limit=15.0 +2024-08-30 15:47:30,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=227013.33333333334, ans=0.09899494936611666 +2024-08-30 15:48:37,518 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.25 vs. limit=15.0 +2024-08-30 15:48:44,129 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 15:49:17,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=227226.66666666666, ans=0.125 +2024-08-30 15:49:22,642 INFO [train.py:1114] (3/4) Epoch 18, batch 300, loss[loss=0.2247, simple_loss=0.287, pruned_loss=0.06062, ctc_loss=0.103, over 19510.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.273, pruned_loss=0.05066, ctc_loss=0.09536, over 3000722.02 frames. ], batch size: 61, lr: 8.41e-03, grad_scale: 32.0 +2024-08-30 15:50:47,654 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.84 vs. limit=12.0 +2024-08-30 15:50:59,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=227333.33333333334, ans=0.1 +2024-08-30 15:51:39,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=227493.33333333334, ans=0.2 +2024-08-30 15:51:40,146 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.292e+02 1.730e+02 1.916e+02 2.273e+02 3.732e+02, threshold=3.832e+02, percent-clipped=0.0 +2024-08-30 15:51:48,900 INFO [train.py:1114] (3/4) Epoch 18, batch 350, loss[loss=0.1863, simple_loss=0.2453, pruned_loss=0.04536, ctc_loss=0.09167, over 19798.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2737, pruned_loss=0.05084, ctc_loss=0.09563, over 3190837.21 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 32.0 +2024-08-30 15:52:00,631 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.84 vs. limit=22.5 +2024-08-30 15:52:12,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=227600.0, ans=0.0 +2024-08-30 15:52:32,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=227706.66666666666, ans=0.0 +2024-08-30 15:52:41,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=227760.0, ans=0.0 +2024-08-30 15:52:51,836 INFO [train.py:1114] (3/4) Epoch 18, batch 400, loss[loss=0.1932, simple_loss=0.2775, pruned_loss=0.03909, ctc_loss=0.07679, over 19478.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2738, pruned_loss=0.05098, ctc_loss=0.09601, over 3342300.72 frames. ], batch size: 54, lr: 8.40e-03, grad_scale: 32.0 +2024-08-30 15:53:05,417 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 15:53:14,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227920.0, ans=0.1 +2024-08-30 15:54:16,395 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.287e+02 1.651e+02 1.862e+02 2.258e+02 4.636e+02, threshold=3.723e+02, percent-clipped=1.0 +2024-08-30 15:54:24,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=228080.0, ans=0.0 +2024-08-30 15:54:25,943 INFO [train.py:1114] (3/4) Epoch 18, batch 450, loss[loss=0.2074, simple_loss=0.2776, pruned_loss=0.04995, ctc_loss=0.09312, over 19604.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2741, pruned_loss=0.05118, ctc_loss=0.09618, over 3450789.67 frames. ], batch size: 55, lr: 8.40e-03, grad_scale: 32.0 +2024-08-30 15:54:38,251 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.24 vs. limit=15.0 +2024-08-30 15:54:59,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.22 vs. limit=22.5 +2024-08-30 15:55:13,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=228240.0, ans=0.2 +2024-08-30 15:55:14,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=228240.0, ans=0.125 +2024-08-30 15:55:33,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.76 vs. limit=22.5 +2024-08-30 15:55:37,514 INFO [train.py:1114] (3/4) Epoch 18, batch 500, loss[loss=0.2338, simple_loss=0.299, pruned_loss=0.06119, ctc_loss=0.1154, over 19664.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2732, pruned_loss=0.05075, ctc_loss=0.09534, over 3545366.65 frames. ], batch size: 63, lr: 8.39e-03, grad_scale: 32.0 +2024-08-30 15:55:37,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=228346.66666666666, ans=0.07 +2024-08-30 15:55:49,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=228400.0, ans=0.07 +2024-08-30 15:56:36,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=228453.33333333334, ans=0.125 +2024-08-30 15:56:43,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=228506.66666666666, ans=0.125 +2024-08-30 15:57:47,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=228506.66666666666, ans=0.0 +2024-08-30 15:57:54,272 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.350e+02 1.602e+02 1.832e+02 2.190e+02 3.877e+02, threshold=3.665e+02, percent-clipped=2.0 +2024-08-30 15:58:00,982 INFO [train.py:1114] (3/4) Epoch 18, batch 550, loss[loss=0.2323, simple_loss=0.2952, pruned_loss=0.06194, ctc_loss=0.1139, over 19350.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2734, pruned_loss=0.05091, ctc_loss=0.09564, over 3608466.68 frames. ], batch size: 71, lr: 8.39e-03, grad_scale: 32.0 +2024-08-30 15:58:58,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=228666.66666666666, ans=0.0 +2024-08-30 16:00:54,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=228720.0, ans=0.125 +2024-08-30 16:01:22,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=228826.66666666666, ans=0.125 +2024-08-30 16:01:33,966 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.73 vs. limit=15.0 +2024-08-30 16:01:35,826 INFO [train.py:1114] (3/4) Epoch 18, batch 600, loss[loss=0.2209, simple_loss=0.277, pruned_loss=0.05927, ctc_loss=0.1157, over 19320.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2734, pruned_loss=0.05106, ctc_loss=0.09603, over 3665320.01 frames. ], batch size: 67, lr: 8.38e-03, grad_scale: 32.0 +2024-08-30 16:02:29,470 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.11 vs. limit=22.5 +2024-08-30 16:02:34,800 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.59 vs. limit=6.0 +2024-08-30 16:03:46,431 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.26 vs. limit=12.0 +2024-08-30 16:04:41,753 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.417e+02 1.726e+02 2.045e+02 2.727e+02 4.181e+02, threshold=4.090e+02, percent-clipped=7.0 +2024-08-30 16:04:43,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=229093.33333333334, ans=0.09899494936611666 +2024-08-30 16:04:44,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=229093.33333333334, ans=0.1 +2024-08-30 16:04:45,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229093.33333333334, ans=0.1 +2024-08-30 16:04:48,708 INFO [train.py:1114] (3/4) Epoch 18, batch 650, loss[loss=0.176, simple_loss=0.2595, pruned_loss=0.03386, ctc_loss=0.06195, over 19775.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2727, pruned_loss=0.05051, ctc_loss=0.09503, over 3715876.17 frames. ], batch size: 54, lr: 8.38e-03, grad_scale: 32.0 +2024-08-30 16:06:22,468 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.15 vs. limit=15.0 +2024-08-30 16:06:23,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229200.0, ans=0.1 +2024-08-30 16:06:38,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=229253.33333333334, ans=0.2 +2024-08-30 16:06:51,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=229306.66666666666, ans=0.025 +2024-08-30 16:07:16,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=229360.0, ans=0.025 +2024-08-30 16:07:27,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=229360.0, ans=0.09899494936611666 +2024-08-30 16:07:29,450 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.40 vs. limit=22.5 +2024-08-30 16:07:30,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229360.0, ans=0.1 +2024-08-30 16:07:32,102 INFO [train.py:1114] (3/4) Epoch 18, batch 700, loss[loss=0.1875, simple_loss=0.2572, pruned_loss=0.04219, ctc_loss=0.08339, over 19735.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2729, pruned_loss=0.05048, ctc_loss=0.09495, over 3746552.88 frames. ], batch size: 51, lr: 8.37e-03, grad_scale: 32.0 +2024-08-30 16:07:35,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=229413.33333333334, ans=0.2 +2024-08-30 16:07:36,194 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.65 vs. limit=12.0 +2024-08-30 16:07:51,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=229466.66666666666, ans=0.2 +2024-08-30 16:08:04,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=229520.0, ans=0.0 +2024-08-30 16:08:17,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=229573.33333333334, ans=0.0 +2024-08-30 16:08:27,240 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.709e+02 1.988e+02 2.480e+02 4.374e+02, threshold=3.975e+02, percent-clipped=1.0 +2024-08-30 16:08:34,067 INFO [train.py:1114] (3/4) Epoch 18, batch 750, loss[loss=0.2178, simple_loss=0.2783, pruned_loss=0.05683, ctc_loss=0.1091, over 19502.00 frames. ], tot_loss[loss=0.205, simple_loss=0.272, pruned_loss=0.05008, ctc_loss=0.09439, over 3773339.17 frames. ], batch size: 54, lr: 8.37e-03, grad_scale: 32.0 +2024-08-30 16:08:40,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=229680.0, ans=0.2 +2024-08-30 16:08:43,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=229680.0, ans=0.125 +2024-08-30 16:08:48,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=229733.33333333334, ans=0.125 +2024-08-30 16:09:13,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=229840.0, ans=0.125 +2024-08-30 16:09:14,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=229840.0, ans=0.125 +2024-08-30 16:09:16,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=229840.0, ans=0.0 +2024-08-30 16:09:31,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=229893.33333333334, ans=0.125 +2024-08-30 16:09:35,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229893.33333333334, ans=0.1 +2024-08-30 16:09:38,054 INFO [train.py:1114] (3/4) Epoch 18, batch 800, loss[loss=0.1901, simple_loss=0.253, pruned_loss=0.04663, ctc_loss=0.08497, over 19800.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2725, pruned_loss=0.05039, ctc_loss=0.09493, over 3794534.88 frames. ], batch size: 49, lr: 8.37e-03, grad_scale: 32.0 +2024-08-30 16:10:01,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=230000.0, ans=0.125 +2024-08-30 16:10:22,011 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.08 vs. limit=12.0 +2024-08-30 16:10:28,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=230106.66666666666, ans=0.125 +2024-08-30 16:10:30,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=230106.66666666666, ans=0.1 +2024-08-30 16:11:34,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=230160.0, ans=0.0 +2024-08-30 16:11:34,981 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.239e+02 1.744e+02 1.950e+02 2.451e+02 4.139e+02, threshold=3.901e+02, percent-clipped=0.0 +2024-08-30 16:11:47,902 INFO [train.py:1114] (3/4) Epoch 18, batch 850, loss[loss=0.19, simple_loss=0.2696, pruned_loss=0.04035, ctc_loss=0.07439, over 19618.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2716, pruned_loss=0.05012, ctc_loss=0.09422, over 3812874.07 frames. ], batch size: 59, lr: 8.36e-03, grad_scale: 32.0 +2024-08-30 16:11:54,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=230213.33333333334, ans=0.0 +2024-08-30 16:11:58,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=230266.66666666666, ans=0.95 +2024-08-30 16:12:02,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=230266.66666666666, ans=0.0 +2024-08-30 16:12:09,617 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 16:12:21,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=230320.0, ans=0.125 +2024-08-30 16:12:30,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230373.33333333334, ans=0.1 +2024-08-30 16:12:48,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=230426.66666666666, ans=0.0 +2024-08-30 16:12:57,694 INFO [train.py:1114] (3/4) Epoch 18, batch 900, loss[loss=0.1832, simple_loss=0.2457, pruned_loss=0.04403, ctc_loss=0.0816, over 19824.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2721, pruned_loss=0.05064, ctc_loss=0.09513, over 3817990.98 frames. ], batch size: 49, lr: 8.36e-03, grad_scale: 32.0 +2024-08-30 16:12:58,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=230480.0, ans=0.125 +2024-08-30 16:13:17,885 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.63 vs. limit=15.0 +2024-08-30 16:13:33,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=230640.0, ans=0.125 +2024-08-30 16:13:41,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=230640.0, ans=0.0 +2024-08-30 16:13:49,529 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.356e+02 1.771e+02 2.097e+02 2.541e+02 3.279e+02, threshold=4.195e+02, percent-clipped=1.0 +2024-08-30 16:13:51,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=230693.33333333334, ans=0.0 +2024-08-30 16:13:56,598 INFO [train.py:1114] (3/4) Epoch 18, batch 950, loss[loss=0.2131, simple_loss=0.2687, pruned_loss=0.0571, ctc_loss=0.1084, over 19489.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2729, pruned_loss=0.05133, ctc_loss=0.09635, over 3820036.99 frames. ], batch size: 49, lr: 8.35e-03, grad_scale: 32.0 +2024-08-30 16:14:49,557 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.39 vs. limit=12.0 +2024-08-30 16:15:24,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=230906.66666666666, ans=0.125 +2024-08-30 16:15:24,566 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.30 vs. limit=15.0 +2024-08-30 16:15:41,760 INFO [train.py:1114] (3/4) Epoch 18, batch 1000, loss[loss=0.2131, simple_loss=0.2786, pruned_loss=0.05255, ctc_loss=0.1063, over 19864.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2744, pruned_loss=0.05206, ctc_loss=0.09773, over 3816317.46 frames. ], batch size: 52, lr: 8.35e-03, grad_scale: 32.0 +2024-08-30 16:15:56,507 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.16 vs. limit=15.0 +2024-08-30 16:16:04,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=231066.66666666666, ans=0.025 +2024-08-30 16:16:34,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=231173.33333333334, ans=0.2 +2024-08-30 16:16:40,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231226.66666666666, ans=0.1 +2024-08-30 16:16:44,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=231226.66666666666, ans=0.125 +2024-08-30 16:16:46,157 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.681e+02 1.935e+02 2.141e+02 3.468e+02, threshold=3.870e+02, percent-clipped=0.0 +2024-08-30 16:16:53,180 INFO [train.py:1114] (3/4) Epoch 18, batch 1050, loss[loss=0.1995, simple_loss=0.279, pruned_loss=0.04329, ctc_loss=0.0834, over 19827.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2736, pruned_loss=0.05162, ctc_loss=0.09701, over 3823782.84 frames. ], batch size: 57, lr: 8.34e-03, grad_scale: 32.0 +2024-08-30 16:18:04,458 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.73 vs. limit=6.0 +2024-08-30 16:18:29,885 INFO [train.py:1114] (3/4) Epoch 18, batch 1100, loss[loss=0.1948, simple_loss=0.2684, pruned_loss=0.04425, ctc_loss=0.08193, over 19595.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2733, pruned_loss=0.05115, ctc_loss=0.09609, over 3831868.12 frames. ], batch size: 52, lr: 8.34e-03, grad_scale: 32.0 +2024-08-30 16:18:35,279 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.41 vs. limit=15.0 +2024-08-30 16:18:39,750 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.24 vs. limit=15.0 +2024-08-30 16:19:01,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=231653.33333333334, ans=0.0 +2024-08-30 16:19:01,621 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.26 vs. limit=15.0 +2024-08-30 16:19:19,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=231760.0, ans=0.1 +2024-08-30 16:19:24,181 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.348e+02 1.668e+02 1.884e+02 2.263e+02 3.606e+02, threshold=3.767e+02, percent-clipped=0.0 +2024-08-30 16:19:52,625 INFO [train.py:1114] (3/4) Epoch 18, batch 1150, loss[loss=0.2027, simple_loss=0.2703, pruned_loss=0.04845, ctc_loss=0.09539, over 19585.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2735, pruned_loss=0.05139, ctc_loss=0.09647, over 3828941.83 frames. ], batch size: 52, lr: 8.33e-03, grad_scale: 32.0 +2024-08-30 16:19:58,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=231813.33333333334, ans=0.09899494936611666 +2024-08-30 16:20:05,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=231866.66666666666, ans=0.125 +2024-08-30 16:22:23,866 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 16:22:51,706 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.17 vs. limit=10.0 +2024-08-30 16:22:56,527 INFO [train.py:1114] (3/4) Epoch 18, batch 1200, loss[loss=0.2036, simple_loss=0.2796, pruned_loss=0.04706, ctc_loss=0.08376, over 19846.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.274, pruned_loss=0.05131, ctc_loss=0.09649, over 3824642.10 frames. ], batch size: 57, lr: 8.33e-03, grad_scale: 32.0 +2024-08-30 16:23:02,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=232080.0, ans=0.125 +2024-08-30 16:23:05,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=232080.0, ans=0.2 +2024-08-30 16:23:17,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=232186.66666666666, ans=0.2 +2024-08-30 16:23:29,403 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.16 vs. limit=15.0 +2024-08-30 16:23:31,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=232240.0, ans=0.125 +2024-08-30 16:23:45,952 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.383e+02 1.656e+02 1.841e+02 2.164e+02 3.391e+02, threshold=3.682e+02, percent-clipped=0.0 +2024-08-30 16:23:52,939 INFO [train.py:1114] (3/4) Epoch 18, batch 1250, loss[loss=0.212, simple_loss=0.2879, pruned_loss=0.04999, ctc_loss=0.09056, over 19519.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2742, pruned_loss=0.05105, ctc_loss=0.0958, over 3842414.52 frames. ], batch size: 61, lr: 8.32e-03, grad_scale: 32.0 +2024-08-30 16:24:01,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=232346.66666666666, ans=10.0 +2024-08-30 16:24:25,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=232453.33333333334, ans=0.0 +2024-08-30 16:24:33,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=232506.66666666666, ans=0.0 +2024-08-30 16:25:38,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=232506.66666666666, ans=0.0 +2024-08-30 16:25:53,661 INFO [train.py:1114] (3/4) Epoch 18, batch 1300, loss[loss=0.2071, simple_loss=0.2706, pruned_loss=0.05204, ctc_loss=0.09905, over 18885.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2739, pruned_loss=0.05086, ctc_loss=0.0956, over 3845881.93 frames. ], batch size: 76, lr: 8.32e-03, grad_scale: 32.0 +2024-08-30 16:32:09,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=232613.33333333334, ans=0.0 +2024-08-30 16:32:25,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=232666.66666666666, ans=0.125 +2024-08-30 16:32:32,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=232720.0, ans=0.125 +2024-08-30 16:32:40,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=232773.33333333334, ans=0.2 +2024-08-30 16:35:45,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=232773.33333333334, ans=0.125 +2024-08-30 16:45:23,076 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.785e+02 2.170e+02 2.759e+02 4.331e+02, threshold=4.339e+02, percent-clipped=5.0 +2024-08-30 16:57:19,369 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.98 vs. limit=15.0 +2024-08-30 17:02:45,886 INFO [train.py:1114] (3/4) Epoch 18, batch 1350, loss[loss=0.2046, simple_loss=0.2722, pruned_loss=0.05017, ctc_loss=0.09185, over 19779.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2736, pruned_loss=0.05092, ctc_loss=0.09559, over 3857636.80 frames. ], batch size: 54, lr: 8.31e-03, grad_scale: 32.0 +2024-08-30 17:12:54,858 INFO [train.py:1050] (3/4) Caught exception: [Rank 3] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=46170, OpType=ALLREDUCE, NumelIn=841, NumelOut=841, Timeout(ms)=600000) ran for 600006 milliseconds before timing out.. +2024-08-30 17:12:54,859 INFO [checkpoint.py:75] (3/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/bad-model-3.pt +2024-08-30 17:12:56,257 INFO [train.py:1413] (3/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/batch-a689ee27-eec1-83b6-15a8-f48f39643825.pt +2024-08-30 17:12:56,300 INFO [train.py:1419] (3/4) features shape: torch.Size([56, 1420, 80]) +2024-08-30 17:12:56,303 INFO [train.py:1423] (3/4) num tokens: 4370 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-20-06-52-0 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-20-06-52-0 new file mode 100644 index 0000000000000000000000000000000000000000..d89c8c71ebbd7c4f6fade0fb7f3961fe8849fa95 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-20-06-52-0 @@ -0,0 +1,43 @@ +2024-08-30 20:06:52,423 INFO [train.py:1182] (0/4) Training started +2024-08-30 20:06:52,427 INFO [train.py:1192] (0/4) Device: cuda:0 +2024-08-30 20:06:52,566 INFO [train.py:1210] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2647.int.cedar.computecanada.ca', 'IP address': '172.16.146.84'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 18, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-30 20:06:52,566 INFO [train.py:1212] (0/4) About to create model +2024-08-30 20:06:54,344 INFO [train.py:1216] (0/4) Number of model parameters: 66367431 +2024-08-30 20:06:54,894 INFO [checkpoint.py:112] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-17.pt +2024-08-30 20:09:02,216 INFO [checkpoint.py:131] (0/4) Loading averaged model +2024-08-30 20:09:02,658 INFO [train.py:1231] (0/4) Using DDP +2024-08-30 20:09:07,155 INFO [train.py:1243] (0/4) Loading optimizer state dict +2024-08-30 20:17:08,517 INFO [train.py:1251] (0/4) Loading scheduler state dict +2024-08-30 20:17:08,518 INFO [asr_datamodule.py:894] (0/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-30 20:20:46,621 INFO [asr_datamodule.py:696] (0/4) Disable MUSAN +2024-08-30 20:20:46,868 INFO [asr_datamodule.py:714] (0/4) Enable SpecAugment +2024-08-30 20:20:46,868 INFO [asr_datamodule.py:715] (0/4) Time warp factor: 80 +2024-08-30 20:20:46,868 INFO [asr_datamodule.py:725] (0/4) Num frame mask: 10 +2024-08-30 20:20:46,869 INFO [asr_datamodule.py:738] (0/4) About to create train dataset +2024-08-30 20:20:46,869 INFO [asr_datamodule.py:765] (0/4) Using DynamicBucketingSampler. +2024-08-30 20:20:48,445 INFO [asr_datamodule.py:782] (0/4) About to create train dataloader +2024-08-30 20:20:48,451 INFO [asr_datamodule.py:911] (0/4) About to get dev-clean cuts +2024-08-30 20:21:06,542 INFO [asr_datamodule.py:918] (0/4) About to get dev-other cuts +2024-08-30 20:22:04,978 INFO [asr_datamodule.py:814] (0/4) About to create dev dataset +2024-08-30 20:22:05,299 INFO [asr_datamodule.py:831] (0/4) About to create dev dataloader +2024-08-30 20:22:05,299 INFO [train.py:1435] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 20:26:29,960 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12840MB +2024-08-30 20:26:34,938 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12916MB +2024-08-30 20:29:01,578 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12916MB +2024-08-30 20:29:02,551 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=4.29 vs. limit=5.0 +2024-08-30 20:29:03,107 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13093MB +2024-08-30 20:29:43,853 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13093MB +2024-08-30 20:29:45,409 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13093MB +2024-08-30 20:29:45,427 INFO [train.py:1344] (0/4) Loading grad scaler state dict +2024-08-30 20:31:27,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.48 vs. limit=6.0 +2024-08-30 20:31:36,630 INFO [train.py:1114] (0/4) Epoch 18, batch 0, loss[loss=0.1814, simple_loss=0.2487, pruned_loss=0.04175, ctc_loss=0.0762, over 19813.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2487, pruned_loss=0.04175, ctc_loss=0.0762, over 19813.00 frames. ], batch size: 49, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 20:31:38,746 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-30 20:32:36,423 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([1.3326, 1.0374, 1.5079, 0.7235, 1.4933, 1.5508, 1.6642, 1.3541], + device='cuda:0') +2024-08-30 20:32:45,078 INFO [train.py:1146] (0/4) Epoch 18, validation: loss=0.1864, simple_loss=0.2743, pruned_loss=0.03646, ctc_loss=0.06397, over 944034.00 frames. +2024-08-30 20:32:45,078 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13093MB +2024-08-30 20:34:50,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=225680.0, ans=0.0 +2024-08-30 20:55:42,260 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.70 vs. limit=15.0 +2024-08-30 20:56:22,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=225786.66666666666, ans=0.2 +2024-08-30 20:56:22,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=225786.66666666666, ans=0.125 +2024-08-30 20:57:18,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=225786.66666666666, ans=0.2 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-20-06-52-1 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-20-06-52-1 new file mode 100644 index 0000000000000000000000000000000000000000..3d407219226f57fb85626f7e050edd2381434198 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-20-06-52-1 @@ -0,0 +1,45 @@ +2024-08-30 20:06:52,808 INFO [train.py:1182] (1/4) Training started +2024-08-30 20:06:52,809 INFO [train.py:1192] (1/4) Device: cuda:1 +2024-08-30 20:06:52,812 INFO [train.py:1210] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2647.int.cedar.computecanada.ca', 'IP address': '172.16.146.84'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 18, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-30 20:06:52,812 INFO [train.py:1212] (1/4) About to create model +2024-08-30 20:06:54,273 INFO [train.py:1216] (1/4) Number of model parameters: 66367431 +2024-08-30 20:06:54,312 INFO [checkpoint.py:112] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-17.pt +2024-08-30 20:09:02,596 INFO [train.py:1231] (1/4) Using DDP +2024-08-30 20:09:07,158 INFO [train.py:1243] (1/4) Loading optimizer state dict +2024-08-30 20:17:08,539 INFO [train.py:1251] (1/4) Loading scheduler state dict +2024-08-30 20:17:08,539 INFO [asr_datamodule.py:894] (1/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-30 20:20:46,622 INFO [asr_datamodule.py:696] (1/4) Disable MUSAN +2024-08-30 20:20:46,868 INFO [asr_datamodule.py:714] (1/4) Enable SpecAugment +2024-08-30 20:20:46,868 INFO [asr_datamodule.py:715] (1/4) Time warp factor: 80 +2024-08-30 20:20:46,869 INFO [asr_datamodule.py:725] (1/4) Num frame mask: 10 +2024-08-30 20:20:46,869 INFO [asr_datamodule.py:738] (1/4) About to create train dataset +2024-08-30 20:20:46,869 INFO [asr_datamodule.py:765] (1/4) Using DynamicBucketingSampler. +2024-08-30 20:20:48,457 INFO [asr_datamodule.py:782] (1/4) About to create train dataloader +2024-08-30 20:20:48,458 INFO [asr_datamodule.py:911] (1/4) About to get dev-clean cuts +2024-08-30 20:21:06,543 INFO [asr_datamodule.py:918] (1/4) About to get dev-other cuts +2024-08-30 20:22:04,978 INFO [asr_datamodule.py:814] (1/4) About to create dev dataset +2024-08-30 20:22:05,300 INFO [asr_datamodule.py:831] (1/4) About to create dev dataloader +2024-08-30 20:22:05,300 INFO [train.py:1435] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 20:26:28,265 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.35 vs. limit=3.0 +2024-08-30 20:26:29,961 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13363MB +2024-08-30 20:26:34,937 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13490MB +2024-08-30 20:29:01,588 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13490MB +2024-08-30 20:29:03,112 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13490MB +2024-08-30 20:29:43,852 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13490MB +2024-08-30 20:29:44,978 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=384, metric=9.29 vs. limit=5.0 +2024-08-30 20:29:45,417 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13490MB +2024-08-30 20:29:45,440 INFO [train.py:1344] (1/4) Loading grad scaler state dict +2024-08-30 20:31:36,630 INFO [train.py:1114] (1/4) Epoch 18, batch 0, loss[loss=0.1932, simple_loss=0.2527, pruned_loss=0.04853, ctc_loss=0.09187, over 19410.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2527, pruned_loss=0.04853, ctc_loss=0.09187, over 19410.00 frames. ], batch size: 48, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 20:31:38,746 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-30 20:32:45,076 INFO [train.py:1146] (1/4) Epoch 18, validation: loss=0.1864, simple_loss=0.2743, pruned_loss=0.03646, ctc_loss=0.06397, over 944034.00 frames. +2024-08-30 20:32:45,077 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 13490MB +2024-08-30 20:32:51,964 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.51 vs. limit=15.0 +2024-08-30 20:34:49,772 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.71 vs. limit=6.0 +2024-08-30 20:47:12,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=225733.33333333334, ans=0.0 +2024-08-30 20:52:48,949 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.61 vs. limit=10.0 +2024-08-30 20:58:14,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=225840.0, ans=0.125 +2024-08-30 21:10:11,094 INFO [train.py:1050] (1/4) Caught exception: [Rank 1] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=593, OpType=ALLREDUCE, NumelIn=841, NumelOut=841, Timeout(ms)=600000) ran for 600007 milliseconds before timing out.. +2024-08-30 21:10:11,097 INFO [checkpoint.py:75] (1/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/bad-model-1.pt +2024-08-30 21:10:16,373 INFO [train.py:1413] (1/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/batch-a689ee27-eec1-83b6-15a8-f48f39643825.pt +2024-08-30 21:10:16,429 INFO [train.py:1419] (1/4) features shape: torch.Size([85, 933, 80]) +2024-08-30 21:10:16,432 INFO [train.py:1423] (1/4) num tokens: 4004 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-20-06-52-2 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-20-06-52-2 new file mode 100644 index 0000000000000000000000000000000000000000..f00bc8bc15e628c563ae312a8ef3a7ce971ef9da --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-20-06-52-2 @@ -0,0 +1,44 @@ +2024-08-30 20:06:52,811 INFO [train.py:1182] (2/4) Training started +2024-08-30 20:06:52,812 INFO [train.py:1192] (2/4) Device: cuda:2 +2024-08-30 20:06:52,814 INFO [train.py:1210] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2647.int.cedar.computecanada.ca', 'IP address': '172.16.146.84'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 18, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-30 20:06:52,814 INFO [train.py:1212] (2/4) About to create model +2024-08-30 20:06:54,260 INFO [train.py:1216] (2/4) Number of model parameters: 66367431 +2024-08-30 20:06:54,312 INFO [checkpoint.py:112] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-17.pt +2024-08-30 20:09:02,612 INFO [train.py:1231] (2/4) Using DDP +2024-08-30 20:09:07,155 INFO [train.py:1243] (2/4) Loading optimizer state dict +2024-08-30 20:17:08,543 INFO [train.py:1251] (2/4) Loading scheduler state dict +2024-08-30 20:17:08,544 INFO [asr_datamodule.py:894] (2/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-30 20:20:46,622 INFO [asr_datamodule.py:696] (2/4) Disable MUSAN +2024-08-30 20:20:46,868 INFO [asr_datamodule.py:714] (2/4) Enable SpecAugment +2024-08-30 20:20:46,868 INFO [asr_datamodule.py:715] (2/4) Time warp factor: 80 +2024-08-30 20:20:46,869 INFO [asr_datamodule.py:725] (2/4) Num frame mask: 10 +2024-08-30 20:20:46,869 INFO [asr_datamodule.py:738] (2/4) About to create train dataset +2024-08-30 20:20:46,869 INFO [asr_datamodule.py:765] (2/4) Using DynamicBucketingSampler. +2024-08-30 20:20:48,466 INFO [asr_datamodule.py:782] (2/4) About to create train dataloader +2024-08-30 20:20:48,467 INFO [asr_datamodule.py:911] (2/4) About to get dev-clean cuts +2024-08-30 20:21:06,542 INFO [asr_datamodule.py:918] (2/4) About to get dev-other cuts +2024-08-30 20:22:04,978 INFO [asr_datamodule.py:814] (2/4) About to create dev dataset +2024-08-30 20:22:05,301 INFO [asr_datamodule.py:831] (2/4) About to create dev dataloader +2024-08-30 20:22:05,302 INFO [train.py:1435] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 20:26:29,960 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12782MB +2024-08-30 20:26:34,942 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12849MB +2024-08-30 20:29:01,588 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 13069MB +2024-08-30 20:29:02,828 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=256, metric=8.54 vs. limit=7.5 +2024-08-30 20:29:03,110 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 13069MB +2024-08-30 20:29:43,852 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 13069MB +2024-08-30 20:29:45,410 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 13069MB +2024-08-30 20:29:45,429 INFO [train.py:1344] (2/4) Loading grad scaler state dict +2024-08-30 20:31:36,626 INFO [train.py:1114] (2/4) Epoch 18, batch 0, loss[loss=0.1826, simple_loss=0.2456, pruned_loss=0.04306, ctc_loss=0.08405, over 19432.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2456, pruned_loss=0.04306, ctc_loss=0.08405, over 19432.00 frames. ], batch size: 48, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 20:31:38,746 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-30 20:32:33,196 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([1.3674, 1.1105, 1.6340, 0.7188, 1.6513, 1.7595, 1.8532, 1.6009], + device='cuda:2') +2024-08-30 20:32:45,076 INFO [train.py:1146] (2/4) Epoch 18, validation: loss=0.1864, simple_loss=0.2743, pruned_loss=0.03646, ctc_loss=0.06397, over 944034.00 frames. +2024-08-30 20:32:45,077 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 13069MB +2024-08-30 20:32:53,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.87 vs. limit=22.5 +2024-08-30 20:34:49,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=225680.0, ans=0.0 +2024-08-30 20:46:48,454 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.39 vs. limit=15.0 +2024-08-30 21:10:11,093 INFO [train.py:1050] (2/4) Caught exception: [Rank 2] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=593, OpType=ALLREDUCE, NumelIn=841, NumelOut=841, Timeout(ms)=600000) ran for 600006 milliseconds before timing out.. +2024-08-30 21:10:11,094 INFO [checkpoint.py:75] (2/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/bad-model-2.pt +2024-08-30 21:10:12,589 INFO [train.py:1413] (2/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/batch-a689ee27-eec1-83b6-15a8-f48f39643825.pt +2024-08-30 21:10:12,755 INFO [train.py:1419] (2/4) features shape: torch.Size([85, 936, 80]) +2024-08-30 21:10:12,757 INFO [train.py:1423] (2/4) num tokens: 4032 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-20-06-52-3 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-20-06-52-3 new file mode 100644 index 0000000000000000000000000000000000000000..b346abeaa39538dcb74762e147ef636942aea75f --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-20-06-52-3 @@ -0,0 +1,41 @@ +2024-08-30 20:06:52,810 INFO [train.py:1182] (3/4) Training started +2024-08-30 20:06:52,811 INFO [train.py:1192] (3/4) Device: cuda:3 +2024-08-30 20:06:52,813 INFO [train.py:1210] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2647.int.cedar.computecanada.ca', 'IP address': '172.16.146.84'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 18, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-30 20:06:52,814 INFO [train.py:1212] (3/4) About to create model +2024-08-30 20:06:54,261 INFO [train.py:1216] (3/4) Number of model parameters: 66367431 +2024-08-30 20:06:54,312 INFO [checkpoint.py:112] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-17.pt +2024-08-30 20:09:02,570 INFO [train.py:1231] (3/4) Using DDP +2024-08-30 20:09:07,155 INFO [train.py:1243] (3/4) Loading optimizer state dict +2024-08-30 20:17:08,501 INFO [train.py:1251] (3/4) Loading scheduler state dict +2024-08-30 20:17:08,502 INFO [asr_datamodule.py:894] (3/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-30 20:20:46,622 INFO [asr_datamodule.py:696] (3/4) Disable MUSAN +2024-08-30 20:20:46,868 INFO [asr_datamodule.py:714] (3/4) Enable SpecAugment +2024-08-30 20:20:46,868 INFO [asr_datamodule.py:715] (3/4) Time warp factor: 80 +2024-08-30 20:20:46,869 INFO [asr_datamodule.py:725] (3/4) Num frame mask: 10 +2024-08-30 20:20:46,869 INFO [asr_datamodule.py:738] (3/4) About to create train dataset +2024-08-30 20:20:46,869 INFO [asr_datamodule.py:765] (3/4) Using DynamicBucketingSampler. +2024-08-30 20:20:48,445 INFO [asr_datamodule.py:782] (3/4) About to create train dataloader +2024-08-30 20:20:48,451 INFO [asr_datamodule.py:911] (3/4) About to get dev-clean cuts +2024-08-30 20:21:06,542 INFO [asr_datamodule.py:918] (3/4) About to get dev-other cuts +2024-08-30 20:22:04,978 INFO [asr_datamodule.py:814] (3/4) About to create dev dataset +2024-08-30 20:22:05,306 INFO [asr_datamodule.py:831] (3/4) About to create dev dataloader +2024-08-30 20:22:05,307 INFO [train.py:1435] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 20:26:29,960 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12808MB +2024-08-30 20:26:34,941 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-30 20:29:01,585 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-30 20:29:02,557 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=192, metric=4.30 vs. limit=5.0 +2024-08-30 20:29:03,107 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-30 20:29:43,858 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-30 20:29:45,405 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-30 20:29:45,428 INFO [train.py:1344] (3/4) Loading grad scaler state dict +2024-08-30 20:31:23,012 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.51 vs. limit=6.0 +2024-08-30 20:31:36,630 INFO [train.py:1114] (3/4) Epoch 18, batch 0, loss[loss=0.1891, simple_loss=0.2515, pruned_loss=0.04522, ctc_loss=0.09044, over 19798.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2515, pruned_loss=0.04522, ctc_loss=0.09044, over 19798.00 frames. ], batch size: 49, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 20:31:38,746 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-30 20:32:38,508 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([1.3005, 0.9812, 1.4801, 0.7409, 1.4295, 1.5490, 1.6372, 1.3385], + device='cuda:3') +2024-08-30 20:32:45,068 INFO [train.py:1146] (3/4) Epoch 18, validation: loss=0.1864, simple_loss=0.2743, pruned_loss=0.03646, ctc_loss=0.06397, over 944034.00 frames. +2024-08-30 20:32:45,069 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 13097MB +2024-08-30 20:34:50,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=225680.0, ans=0.1 +2024-08-30 20:54:09,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=225786.66666666666, ans=0.0 +2024-08-30 20:55:52,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=225786.66666666666, ans=0.125 +2024-08-30 20:59:09,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=225840.0, ans=0.125 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-21-35-28-0 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-21-35-28-0 new file mode 100644 index 0000000000000000000000000000000000000000..c096d42f8a5c092b015964f6e936f36bb51e61ec --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-21-35-28-0 @@ -0,0 +1,47 @@ +2024-08-30 21:35:28,104 INFO [train.py:1182] (0/4) Training started +2024-08-30 21:35:28,112 INFO [train.py:1192] (0/4) Device: cuda:0 +2024-08-30 21:35:28,717 INFO [train.py:1210] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2539.int.cedar.computecanada.ca', 'IP address': '172.16.145.232'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 18, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-30 21:35:28,718 INFO [train.py:1212] (0/4) About to create model +2024-08-30 21:35:30,104 INFO [train.py:1216] (0/4) Number of model parameters: 66367431 +2024-08-30 21:35:30,652 INFO [checkpoint.py:112] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-17.pt +2024-08-30 21:39:40,123 INFO [checkpoint.py:131] (0/4) Loading averaged model +2024-08-30 21:39:40,535 INFO [train.py:1231] (0/4) Using DDP +2024-08-30 21:39:47,388 INFO [train.py:1243] (0/4) Loading optimizer state dict +2024-08-30 21:39:54,492 INFO [train.py:1251] (0/4) Loading scheduler state dict +2024-08-30 21:39:54,492 INFO [asr_datamodule.py:894] (0/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-30 21:39:55,314 INFO [asr_datamodule.py:696] (0/4) Disable MUSAN +2024-08-30 21:39:55,314 INFO [asr_datamodule.py:714] (0/4) Enable SpecAugment +2024-08-30 21:39:55,314 INFO [asr_datamodule.py:715] (0/4) Time warp factor: 80 +2024-08-30 21:39:55,314 INFO [asr_datamodule.py:725] (0/4) Num frame mask: 10 +2024-08-30 21:39:55,314 INFO [asr_datamodule.py:738] (0/4) About to create train dataset +2024-08-30 21:39:55,314 INFO [asr_datamodule.py:765] (0/4) Using DynamicBucketingSampler. +2024-08-30 21:39:56,913 INFO [asr_datamodule.py:782] (0/4) About to create train dataloader +2024-08-30 21:39:56,921 INFO [asr_datamodule.py:911] (0/4) About to get dev-clean cuts +2024-08-30 21:39:57,110 INFO [asr_datamodule.py:918] (0/4) About to get dev-other cuts +2024-08-30 21:39:57,483 INFO [asr_datamodule.py:814] (0/4) About to create dev dataset +2024-08-30 21:39:57,807 INFO [asr_datamodule.py:831] (0/4) About to create dev dataloader +2024-08-30 21:39:57,807 INFO [train.py:1435] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 21:45:27,165 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12840MB +2024-08-30 21:45:28,623 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12916MB +2024-08-30 21:46:07,693 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12916MB +2024-08-30 21:46:18,191 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=4.29 vs. limit=5.0 +2024-08-30 21:46:18,706 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13093MB +2024-08-30 21:47:44,993 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13093MB +2024-08-30 21:47:46,565 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13093MB +2024-08-30 21:47:46,582 INFO [train.py:1344] (0/4) Loading grad scaler state dict +2024-08-30 21:50:09,083 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.48 vs. limit=6.0 +2024-08-30 21:50:11,573 INFO [train.py:1114] (0/4) Epoch 18, batch 0, loss[loss=0.1814, simple_loss=0.2487, pruned_loss=0.04175, ctc_loss=0.0762, over 19813.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2487, pruned_loss=0.04175, ctc_loss=0.0762, over 19813.00 frames. ], batch size: 49, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 21:50:11,574 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-30 21:50:40,978 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([1.3326, 1.0374, 1.5079, 0.7235, 1.4933, 1.5508, 1.6642, 1.3541], + device='cuda:0') +2024-08-30 21:51:06,633 INFO [train.py:1146] (0/4) Epoch 18, validation: loss=0.1864, simple_loss=0.2743, pruned_loss=0.03646, ctc_loss=0.06397, over 944034.00 frames. +2024-08-30 21:51:06,633 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13093MB +2024-08-30 21:52:24,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=225680.0, ans=0.0 +2024-08-30 21:59:02,272 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.66 vs. limit=15.0 +2024-08-30 21:59:14,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=225786.66666666666, ans=0.2 +2024-08-30 21:59:14,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=225786.66666666666, ans=0.125 +2024-08-30 21:59:15,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=225786.66666666666, ans=0.2 +2024-08-30 22:03:46,337 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.408e+02 1.919e+02 2.092e+02 2.381e+02 5.934e+02, threshold=4.185e+02, percent-clipped=5.0 +2024-08-30 22:04:57,926 INFO [train.py:1114] (0/4) Epoch 18, batch 50, loss[loss=0.1874, simple_loss=0.2541, pruned_loss=0.04354, ctc_loss=0.08426, over 19716.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2783, pruned_loss=0.05351, ctc_loss=0.1012, over 845746.19 frames. ], batch size: 47, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 22:07:11,370 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.73 vs. limit=15.0 +2024-08-30 22:24:47,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=226160.0, ans=0.0 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-21-35-28-1 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-21-35-28-1 new file mode 100644 index 0000000000000000000000000000000000000000..a7e38d976c9073a6f296239850a4278e0c880b79 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-21-35-28-1 @@ -0,0 +1,56 @@ +2024-08-30 21:35:28,366 INFO [train.py:1182] (1/4) Training started +2024-08-30 21:35:28,366 INFO [train.py:1192] (1/4) Device: cuda:1 +2024-08-30 21:35:28,717 INFO [train.py:1210] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2539.int.cedar.computecanada.ca', 'IP address': '172.16.145.232'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 18, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-30 21:35:28,717 INFO [train.py:1212] (1/4) About to create model +2024-08-30 21:35:30,102 INFO [train.py:1216] (1/4) Number of model parameters: 66367431 +2024-08-30 21:35:30,216 INFO [checkpoint.py:112] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-17.pt +2024-08-30 21:39:40,401 INFO [train.py:1231] (1/4) Using DDP +2024-08-30 21:39:47,395 INFO [train.py:1243] (1/4) Loading optimizer state dict +2024-08-30 21:39:54,485 INFO [train.py:1251] (1/4) Loading scheduler state dict +2024-08-30 21:39:54,485 INFO [asr_datamodule.py:894] (1/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-30 21:39:55,313 INFO [asr_datamodule.py:696] (1/4) Disable MUSAN +2024-08-30 21:39:55,313 INFO [asr_datamodule.py:714] (1/4) Enable SpecAugment +2024-08-30 21:39:55,313 INFO [asr_datamodule.py:715] (1/4) Time warp factor: 80 +2024-08-30 21:39:55,313 INFO [asr_datamodule.py:725] (1/4) Num frame mask: 10 +2024-08-30 21:39:55,314 INFO [asr_datamodule.py:738] (1/4) About to create train dataset +2024-08-30 21:39:55,314 INFO [asr_datamodule.py:765] (1/4) Using DynamicBucketingSampler. +2024-08-30 21:39:56,914 INFO [asr_datamodule.py:782] (1/4) About to create train dataloader +2024-08-30 21:39:56,921 INFO [asr_datamodule.py:911] (1/4) About to get dev-clean cuts +2024-08-30 21:39:57,110 INFO [asr_datamodule.py:918] (1/4) About to get dev-other cuts +2024-08-30 21:39:57,483 INFO [asr_datamodule.py:814] (1/4) About to create dev dataset +2024-08-30 21:39:57,813 INFO [asr_datamodule.py:831] (1/4) About to create dev dataloader +2024-08-30 21:39:57,814 INFO [train.py:1435] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 21:45:25,347 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.35 vs. limit=3.0 +2024-08-30 21:45:27,163 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13363MB +2024-08-30 21:45:28,618 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13490MB +2024-08-30 21:46:07,692 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13490MB +2024-08-30 21:46:18,716 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13490MB +2024-08-30 21:47:44,986 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13490MB +2024-08-30 21:47:46,092 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=384, metric=9.29 vs. limit=5.0 +2024-08-30 21:47:46,562 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13490MB +2024-08-30 21:47:46,583 INFO [train.py:1344] (1/4) Loading grad scaler state dict +2024-08-30 21:50:11,562 INFO [train.py:1114] (1/4) Epoch 18, batch 0, loss[loss=0.1932, simple_loss=0.2527, pruned_loss=0.04853, ctc_loss=0.09187, over 19410.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2527, pruned_loss=0.04853, ctc_loss=0.09187, over 19410.00 frames. ], batch size: 48, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 21:50:11,563 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-30 21:51:06,631 INFO [train.py:1146] (1/4) Epoch 18, validation: loss=0.1864, simple_loss=0.2743, pruned_loss=0.03646, ctc_loss=0.06397, over 944034.00 frames. +2024-08-30 21:51:06,631 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 13490MB +2024-08-30 21:51:13,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.51 vs. limit=15.0 +2024-08-30 21:52:06,788 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.71 vs. limit=6.0 +2024-08-30 21:55:54,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=225733.33333333334, ans=0.0 +2024-08-30 21:57:28,046 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.64 vs. limit=10.0 +2024-08-30 21:59:39,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=225840.0, ans=0.125 +2024-08-30 22:03:09,068 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.35 vs. limit=12.0 +2024-08-30 22:03:46,337 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.408e+02 1.919e+02 2.092e+02 2.381e+02 5.934e+02, threshold=4.185e+02, percent-clipped=5.0 +2024-08-30 22:04:57,923 INFO [train.py:1114] (1/4) Epoch 18, batch 50, loss[loss=0.1577, simple_loss=0.2323, pruned_loss=0.03009, ctc_loss=0.05751, over 19735.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2747, pruned_loss=0.05175, ctc_loss=0.09898, over 843693.80 frames. ], batch size: 47, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 22:06:03,334 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.87 vs. limit=6.0 +2024-08-30 22:06:35,044 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-30 22:06:35,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=225946.66666666666, ans=0.125 +2024-08-30 22:11:29,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=226000.0, ans=0.125 +2024-08-30 22:21:10,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=226106.66666666666, ans=0.95 +2024-08-30 22:23:47,470 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.79 vs. limit=15.0 +2024-08-30 22:26:49,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=226160.0, ans=0.125 +2024-08-30 22:26:51,129 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.70 vs. limit=15.0 +2024-08-30 22:44:30,258 INFO [train.py:1050] (1/4) Caught exception: [Rank 1] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=1277, OpType=ALLREDUCE, NumelIn=841, NumelOut=841, Timeout(ms)=600000) ran for 600005 milliseconds before timing out.. +2024-08-30 22:44:30,295 INFO [checkpoint.py:75] (1/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/bad-model-1.pt +2024-08-30 22:45:51,840 INFO [train.py:1413] (1/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/batch-a689ee27-eec1-83b6-15a8-f48f39643825.pt +2024-08-30 22:45:51,961 INFO [train.py:1419] (1/4) features shape: torch.Size([61, 1293, 80]) +2024-08-30 22:45:51,964 INFO [train.py:1423] (1/4) num tokens: 4246 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-21-35-28-2 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-21-35-28-2 new file mode 100644 index 0000000000000000000000000000000000000000..55e292856b9413e16533585436669d5d9677cf5c --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-21-35-28-2 @@ -0,0 +1,53 @@ +2024-08-30 21:35:28,367 INFO [train.py:1182] (2/4) Training started +2024-08-30 21:35:28,369 INFO [train.py:1192] (2/4) Device: cuda:2 +2024-08-30 21:35:28,716 INFO [train.py:1210] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2539.int.cedar.computecanada.ca', 'IP address': '172.16.145.232'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 18, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-30 21:35:28,717 INFO [train.py:1212] (2/4) About to create model +2024-08-30 21:35:30,089 INFO [train.py:1216] (2/4) Number of model parameters: 66367431 +2024-08-30 21:35:30,216 INFO [checkpoint.py:112] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-17.pt +2024-08-30 21:39:40,389 INFO [train.py:1231] (2/4) Using DDP +2024-08-30 21:39:47,387 INFO [train.py:1243] (2/4) Loading optimizer state dict +2024-08-30 21:39:54,477 INFO [train.py:1251] (2/4) Loading scheduler state dict +2024-08-30 21:39:54,477 INFO [asr_datamodule.py:894] (2/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-30 21:39:55,313 INFO [asr_datamodule.py:696] (2/4) Disable MUSAN +2024-08-30 21:39:55,313 INFO [asr_datamodule.py:714] (2/4) Enable SpecAugment +2024-08-30 21:39:55,313 INFO [asr_datamodule.py:715] (2/4) Time warp factor: 80 +2024-08-30 21:39:55,314 INFO [asr_datamodule.py:725] (2/4) Num frame mask: 10 +2024-08-30 21:39:55,314 INFO [asr_datamodule.py:738] (2/4) About to create train dataset +2024-08-30 21:39:55,314 INFO [asr_datamodule.py:765] (2/4) Using DynamicBucketingSampler. +2024-08-30 21:39:56,919 INFO [asr_datamodule.py:782] (2/4) About to create train dataloader +2024-08-30 21:39:56,921 INFO [asr_datamodule.py:911] (2/4) About to get dev-clean cuts +2024-08-30 21:39:57,110 INFO [asr_datamodule.py:918] (2/4) About to get dev-other cuts +2024-08-30 21:39:57,483 INFO [asr_datamodule.py:814] (2/4) About to create dev dataset +2024-08-30 21:39:57,809 INFO [asr_datamodule.py:831] (2/4) About to create dev dataloader +2024-08-30 21:39:57,809 INFO [train.py:1435] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 21:45:27,164 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12782MB +2024-08-30 21:45:28,616 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12849MB +2024-08-30 21:46:07,701 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 13069MB +2024-08-30 21:46:18,426 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=256, metric=8.54 vs. limit=7.5 +2024-08-30 21:46:18,718 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 13069MB +2024-08-30 21:47:44,988 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 13069MB +2024-08-30 21:47:46,563 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 13069MB +2024-08-30 21:47:46,583 INFO [train.py:1344] (2/4) Loading grad scaler state dict +2024-08-30 21:50:11,564 INFO [train.py:1114] (2/4) Epoch 18, batch 0, loss[loss=0.1826, simple_loss=0.2456, pruned_loss=0.04306, ctc_loss=0.08405, over 19432.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2456, pruned_loss=0.04306, ctc_loss=0.08405, over 19432.00 frames. ], batch size: 48, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 21:50:11,564 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-30 21:50:23,214 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([1.3674, 1.1105, 1.6340, 0.7188, 1.6513, 1.7595, 1.8532, 1.6009], + device='cuda:2') +2024-08-30 21:51:06,624 INFO [train.py:1146] (2/4) Epoch 18, validation: loss=0.1864, simple_loss=0.2743, pruned_loss=0.03646, ctc_loss=0.06397, over 944034.00 frames. +2024-08-30 21:51:06,625 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 13069MB +2024-08-30 21:51:37,598 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.87 vs. limit=22.5 +2024-08-30 21:52:07,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=225680.0, ans=0.0 +2024-08-30 21:56:26,027 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.37 vs. limit=15.0 +2024-08-30 22:03:46,337 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.408e+02 1.919e+02 2.092e+02 2.381e+02 5.934e+02, threshold=4.185e+02, percent-clipped=5.0 +2024-08-30 22:04:55,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225946.66666666666, ans=0.1 +2024-08-30 22:04:57,945 INFO [train.py:1114] (2/4) Epoch 18, batch 50, loss[loss=0.1957, simple_loss=0.2513, pruned_loss=0.05004, ctc_loss=0.1001, over 19736.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2768, pruned_loss=0.05278, ctc_loss=0.09989, over 844774.48 frames. ], batch size: 47, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 22:06:15,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=225946.66666666666, ans=0.025 +2024-08-30 22:06:27,586 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.92 vs. limit=15.0 +2024-08-30 22:09:47,317 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.18 vs. limit=15.0 +2024-08-30 22:12:12,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=226053.33333333334, ans=0.0 +2024-08-30 22:24:23,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=226160.0, ans=0.125 +2024-08-30 22:25:46,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=226160.0, ans=0.2 +2024-08-30 22:44:30,258 INFO [train.py:1050] (2/4) Caught exception: [Rank 2] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=1277, OpType=ALLREDUCE, NumelIn=841, NumelOut=841, Timeout(ms)=600000) ran for 600002 milliseconds before timing out.. +2024-08-30 22:44:30,684 INFO [checkpoint.py:75] (2/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/bad-model-2.pt +2024-08-30 22:44:32,230 INFO [train.py:1413] (2/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/batch-a689ee27-eec1-83b6-15a8-f48f39643825.pt +2024-08-30 22:44:41,079 INFO [train.py:1419] (2/4) features shape: torch.Size([61, 1295, 80]) +2024-08-30 22:44:41,082 INFO [train.py:1423] (2/4) num tokens: 4260 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-21-35-28-3 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-21-35-28-3 new file mode 100644 index 0000000000000000000000000000000000000000..7b713734f93e24863ec3b77900924c450d9c92b0 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-30-21-35-28-3 @@ -0,0 +1,52 @@ +2024-08-30 21:35:28,367 INFO [train.py:1182] (3/4) Training started +2024-08-30 21:35:28,370 INFO [train.py:1192] (3/4) Device: cuda:3 +2024-08-30 21:35:28,717 INFO [train.py:1210] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2539.int.cedar.computecanada.ca', 'IP address': '172.16.145.232'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 18, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-30 21:35:28,717 INFO [train.py:1212] (3/4) About to create model +2024-08-30 21:35:30,110 INFO [train.py:1216] (3/4) Number of model parameters: 66367431 +2024-08-30 21:35:30,217 INFO [checkpoint.py:112] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-17.pt +2024-08-30 21:39:40,402 INFO [train.py:1231] (3/4) Using DDP +2024-08-30 21:39:47,405 INFO [train.py:1243] (3/4) Loading optimizer state dict +2024-08-30 21:39:54,490 INFO [train.py:1251] (3/4) Loading scheduler state dict +2024-08-30 21:39:54,490 INFO [asr_datamodule.py:894] (3/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-30 21:39:55,313 INFO [asr_datamodule.py:696] (3/4) Disable MUSAN +2024-08-30 21:39:55,313 INFO [asr_datamodule.py:714] (3/4) Enable SpecAugment +2024-08-30 21:39:55,313 INFO [asr_datamodule.py:715] (3/4) Time warp factor: 80 +2024-08-30 21:39:55,313 INFO [asr_datamodule.py:725] (3/4) Num frame mask: 10 +2024-08-30 21:39:55,313 INFO [asr_datamodule.py:738] (3/4) About to create train dataset +2024-08-30 21:39:55,314 INFO [asr_datamodule.py:765] (3/4) Using DynamicBucketingSampler. +2024-08-30 21:39:56,914 INFO [asr_datamodule.py:782] (3/4) About to create train dataloader +2024-08-30 21:39:56,921 INFO [asr_datamodule.py:911] (3/4) About to get dev-clean cuts +2024-08-30 21:39:57,110 INFO [asr_datamodule.py:918] (3/4) About to get dev-other cuts +2024-08-30 21:39:57,484 INFO [asr_datamodule.py:814] (3/4) About to create dev dataset +2024-08-30 21:39:57,810 INFO [asr_datamodule.py:831] (3/4) About to create dev dataloader +2024-08-30 21:39:57,810 INFO [train.py:1435] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-30 21:45:27,163 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12808MB +2024-08-30 21:45:28,618 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-30 21:46:07,688 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-30 21:46:18,207 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=192, metric=4.30 vs. limit=5.0 +2024-08-30 21:46:18,706 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-30 21:47:44,986 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-30 21:47:46,564 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-30 21:47:46,575 INFO [train.py:1344] (3/4) Loading grad scaler state dict +2024-08-30 21:50:06,648 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.51 vs. limit=6.0 +2024-08-30 21:50:11,560 INFO [train.py:1114] (3/4) Epoch 18, batch 0, loss[loss=0.1891, simple_loss=0.2515, pruned_loss=0.04522, ctc_loss=0.09044, over 19798.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2515, pruned_loss=0.04522, ctc_loss=0.09044, over 19798.00 frames. ], batch size: 49, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 21:50:11,561 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-30 21:50:59,757 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([1.3005, 0.9812, 1.4801, 0.7409, 1.4295, 1.5490, 1.6372, 1.3385], + device='cuda:3') +2024-08-30 21:51:06,624 INFO [train.py:1146] (3/4) Epoch 18, validation: loss=0.1864, simple_loss=0.2743, pruned_loss=0.03646, ctc_loss=0.06397, over 944034.00 frames. +2024-08-30 21:51:06,625 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 13097MB +2024-08-30 21:52:30,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=225680.0, ans=0.1 +2024-08-30 21:58:05,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=225786.66666666666, ans=0.0 +2024-08-30 21:59:03,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=225786.66666666666, ans=0.125 +2024-08-30 22:00:59,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=225840.0, ans=0.125 +2024-08-30 22:03:46,338 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.408e+02 1.919e+02 2.092e+02 2.381e+02 5.934e+02, threshold=4.185e+02, percent-clipped=5.0 +2024-08-30 22:04:52,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=225946.66666666666, ans=0.0 +2024-08-30 22:04:57,926 INFO [train.py:1114] (3/4) Epoch 18, batch 50, loss[loss=0.1828, simple_loss=0.2487, pruned_loss=0.04244, ctc_loss=0.0803, over 19711.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2763, pruned_loss=0.05243, ctc_loss=0.09984, over 844773.76 frames. ], batch size: 47, lr: 8.44e-03, grad_scale: 32.0 +2024-08-30 22:11:24,706 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.98 vs. limit=15.0 +2024-08-30 22:12:57,269 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.33 vs. limit=15.0 +2024-08-30 22:20:51,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=226106.66666666666, ans=0.025 +2024-08-30 22:44:30,258 INFO [train.py:1050] (3/4) Caught exception: [Rank 3] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=1277, OpType=ALLREDUCE, NumelIn=841, NumelOut=841, Timeout(ms)=600000) ran for 600000 milliseconds before timing out.. +2024-08-30 22:44:30,291 INFO [checkpoint.py:75] (3/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/bad-model-3.pt +2024-08-30 22:44:31,758 INFO [train.py:1413] (3/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/batch-a689ee27-eec1-83b6-15a8-f48f39643825.pt +2024-08-30 22:44:40,498 INFO [train.py:1419] (3/4) features shape: torch.Size([61, 1294, 80]) +2024-08-30 22:44:40,500 INFO [train.py:1423] (3/4) num tokens: 4370 diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-31-13-15-01-0 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-31-13-15-01-0 new file mode 100644 index 0000000000000000000000000000000000000000..166821eb5ceeb50c7d69db7b5ee5ca118ae98d7d --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-31-13-15-01-0 @@ -0,0 +1,1057 @@ +2024-08-31 13:15:01,118 INFO [train.py:1182] (0/4) Training started +2024-08-31 13:15:03,782 INFO [train.py:1192] (0/4) Device: cuda:0 +2024-08-31 13:15:03,785 INFO [train.py:1210] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2535.int.cedar.computecanada.ca', 'IP address': '172.16.145.228'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 18, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-31 13:15:03,785 INFO [train.py:1212] (0/4) About to create model +2024-08-31 13:15:10,365 INFO [train.py:1216] (0/4) Number of model parameters: 66367431 +2024-08-31 13:15:45,928 INFO [checkpoint.py:112] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-17.pt +2024-08-31 13:16:01,508 INFO [checkpoint.py:131] (0/4) Loading averaged model +2024-08-31 13:16:01,896 INFO [train.py:1231] (0/4) Using DDP +2024-08-31 13:16:07,013 INFO [train.py:1243] (0/4) Loading optimizer state dict +2024-08-31 13:16:46,064 INFO [train.py:1251] (0/4) Loading scheduler state dict +2024-08-31 13:16:46,065 INFO [asr_datamodule.py:894] (0/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-31 13:16:46,068 INFO [asr_datamodule.py:696] (0/4) Disable MUSAN +2024-08-31 13:16:46,068 INFO [asr_datamodule.py:714] (0/4) Enable SpecAugment +2024-08-31 13:16:46,068 INFO [asr_datamodule.py:715] (0/4) Time warp factor: 80 +2024-08-31 13:16:46,068 INFO [asr_datamodule.py:725] (0/4) Num frame mask: 10 +2024-08-31 13:16:46,068 INFO [asr_datamodule.py:738] (0/4) About to create train dataset +2024-08-31 13:16:46,069 INFO [asr_datamodule.py:765] (0/4) Using DynamicBucketingSampler. +2024-08-31 13:16:47,640 INFO [asr_datamodule.py:782] (0/4) About to create train dataloader +2024-08-31 13:16:50,429 INFO [asr_datamodule.py:911] (0/4) About to get dev-clean cuts +2024-08-31 13:16:50,430 INFO [asr_datamodule.py:918] (0/4) About to get dev-other cuts +2024-08-31 13:16:50,431 INFO [asr_datamodule.py:814] (0/4) About to create dev dataset +2024-08-31 13:16:50,754 INFO [asr_datamodule.py:831] (0/4) About to create dev dataloader +2024-08-31 13:16:50,755 INFO [train.py:1435] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 13:22:43,895 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12840MB +2024-08-31 13:22:45,376 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12916MB +2024-08-31 13:23:02,014 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12916MB +2024-08-31 13:23:02,981 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=4.29 vs. limit=5.0 +2024-08-31 13:23:03,527 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13093MB +2024-08-31 13:24:12,102 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13093MB +2024-08-31 13:24:13,682 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 13093MB +2024-08-31 13:24:13,696 INFO [train.py:1344] (0/4) Loading grad scaler state dict +2024-08-31 13:25:02,922 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.48 vs. limit=6.0 +2024-08-31 13:25:06,944 INFO [train.py:1114] (0/4) Epoch 18, batch 0, loss[loss=0.1814, simple_loss=0.2487, pruned_loss=0.04175, ctc_loss=0.0762, over 19813.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2487, pruned_loss=0.04175, ctc_loss=0.0762, over 19813.00 frames. ], batch size: 49, lr: 8.44e-03, grad_scale: 32.0 +2024-08-31 13:25:06,945 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-31 13:25:28,551 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([1.3326, 1.0374, 1.5079, 0.7235, 1.4933, 1.5508, 1.6642, 1.3541], + device='cuda:0') +2024-08-31 13:25:49,907 INFO [train.py:1146] (0/4) Epoch 18, validation: loss=0.1864, simple_loss=0.2743, pruned_loss=0.03646, ctc_loss=0.06397, over 944034.00 frames. +2024-08-31 13:25:49,908 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13093MB +2024-08-31 13:27:20,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=225680.0, ans=0.0 +2024-08-31 13:39:08,521 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.70 vs. limit=15.0 +2024-08-31 13:41:45,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=225786.66666666666, ans=0.2 +2024-08-31 13:41:45,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=225786.66666666666, ans=0.125 +2024-08-31 13:42:04,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=225786.66666666666, ans=0.2 +2024-08-31 13:48:13,676 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.468e+02 1.934e+02 2.118e+02 2.433e+02 6.228e+02, threshold=4.237e+02, percent-clipped=5.0 +2024-08-31 13:56:46,282 INFO [train.py:1114] (0/4) Epoch 18, batch 50, loss[loss=0.1862, simple_loss=0.2537, pruned_loss=0.0428, ctc_loss=0.0826, over 19716.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2786, pruned_loss=0.05368, ctc_loss=0.1016, over 845746.19 frames. ], batch size: 47, lr: 8.44e-03, grad_scale: 32.0 +2024-08-31 14:00:20,725 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.80 vs. limit=15.0 +2024-08-31 14:11:27,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=226160.0, ans=0.0 +2024-08-31 14:15:00,235 INFO [train.py:1114] (0/4) Epoch 18, batch 100, loss[loss=0.1941, simple_loss=0.2618, pruned_loss=0.04594, ctc_loss=0.0861, over 19718.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2789, pruned_loss=0.05323, ctc_loss=0.1002, over 1499054.83 frames. ], batch size: 51, lr: 8.43e-03, grad_scale: 32.0 +2024-08-31 14:32:19,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=226426.66666666666, ans=0.2 +2024-08-31 14:32:51,600 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.685e+02 1.949e+02 2.332e+02 3.525e+02, threshold=3.898e+02, percent-clipped=0.0 +2024-08-31 14:34:04,415 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.99 vs. limit=10.0 +2024-08-31 14:34:38,789 INFO [train.py:1114] (0/4) Epoch 18, batch 150, loss[loss=0.1976, simple_loss=0.254, pruned_loss=0.05219, ctc_loss=0.09204, over 19715.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2751, pruned_loss=0.05165, ctc_loss=0.09722, over 2028052.35 frames. ], batch size: 47, lr: 8.43e-03, grad_scale: 32.0 +2024-08-31 14:45:09,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=226533.33333333334, ans=0.0 +2024-08-31 14:45:16,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=226533.33333333334, ans=0.0 +2024-08-31 14:47:41,105 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.32 vs. limit=22.5 +2024-08-31 14:49:47,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=226586.66666666666, ans=0.2 +2024-08-31 14:50:22,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=226640.0, ans=0.125 +2024-08-31 15:05:15,394 INFO [train.py:1114] (0/4) Epoch 18, batch 200, loss[loss=0.2387, simple_loss=0.2965, pruned_loss=0.06626, ctc_loss=0.121, over 18248.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2736, pruned_loss=0.05063, ctc_loss=0.09535, over 2435754.48 frames. ], batch size: 85, lr: 8.42e-03, grad_scale: 32.0 +2024-08-31 15:08:17,632 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.78 vs. limit=15.0 +2024-08-31 15:15:21,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=226906.66666666666, ans=0.0 +2024-08-31 15:16:06,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226906.66666666666, ans=0.1 +2024-08-31 15:17:44,778 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.325e+02 1.761e+02 2.086e+02 2.524e+02 4.159e+02, threshold=4.172e+02, percent-clipped=2.0 +2024-08-31 15:17:59,750 INFO [train.py:1114] (0/4) Epoch 18, batch 250, loss[loss=0.2088, simple_loss=0.2768, pruned_loss=0.05176, ctc_loss=0.09344, over 19404.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2743, pruned_loss=0.05121, ctc_loss=0.09645, over 2755802.13 frames. ], batch size: 67, lr: 8.42e-03, grad_scale: 32.0 +2024-08-31 15:21:18,070 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=2.544e-03 +2024-08-31 15:22:32,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227120.0, ans=0.1 +2024-08-31 15:22:59,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=227173.33333333334, ans=0.0 +2024-08-31 15:24:04,300 INFO [train.py:1114] (0/4) Epoch 18, batch 300, loss[loss=0.232, simple_loss=0.291, pruned_loss=0.06337, ctc_loss=0.1157, over 19499.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2735, pruned_loss=0.05064, ctc_loss=0.09544, over 3000417.31 frames. ], batch size: 61, lr: 8.41e-03, grad_scale: 32.0 +2024-08-31 15:24:58,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=227333.33333333334, ans=0.2 +2024-08-31 15:25:05,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=227333.33333333334, ans=0.125 +2024-08-31 15:25:05,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=227333.33333333334, ans=0.2 +2024-08-31 15:29:14,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=227440.0, ans=0.0 +2024-08-31 15:29:15,823 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.57 vs. limit=15.0 +2024-08-31 15:30:29,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=227493.33333333334, ans=0.2 +2024-08-31 15:30:47,329 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.305e+02 1.680e+02 1.932e+02 2.386e+02 3.920e+02, threshold=3.864e+02, percent-clipped=0.0 +2024-08-31 15:31:47,642 INFO [train.py:1114] (0/4) Epoch 18, batch 350, loss[loss=0.1769, simple_loss=0.2411, pruned_loss=0.04046, ctc_loss=0.07922, over 19741.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2735, pruned_loss=0.05075, ctc_loss=0.09563, over 3191008.27 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 32.0 +2024-08-31 15:33:40,244 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.16 vs. limit=15.0 +2024-08-31 15:33:47,376 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.40 vs. limit=15.0 +2024-08-31 15:33:50,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=227653.33333333334, ans=0.125 +2024-08-31 15:33:56,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=227706.66666666666, ans=0.0 +2024-08-31 15:34:38,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=227760.0, ans=0.125 +2024-08-31 15:34:38,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=227760.0, ans=0.2 +2024-08-31 15:34:57,667 INFO [train.py:1114] (0/4) Epoch 18, batch 400, loss[loss=0.2183, simple_loss=0.282, pruned_loss=0.05565, ctc_loss=0.1082, over 19506.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2738, pruned_loss=0.05095, ctc_loss=0.09579, over 3343005.44 frames. ], batch size: 54, lr: 8.40e-03, grad_scale: 32.0 +2024-08-31 15:35:07,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=227813.33333333334, ans=0.2 +2024-08-31 15:35:16,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=227866.66666666666, ans=0.0 +2024-08-31 15:35:47,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=227866.66666666666, ans=0.125 +2024-08-31 15:36:46,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=227920.0, ans=0.2 +2024-08-31 15:36:46,402 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=8.23 vs. limit=22.5 +2024-08-31 15:36:48,778 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.52 vs. limit=10.0 +2024-08-31 15:37:06,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=228026.66666666666, ans=0.0 +2024-08-31 15:37:11,052 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.718e+02 1.967e+02 2.336e+02 3.401e+02, threshold=3.934e+02, percent-clipped=0.0 +2024-08-31 15:37:37,963 INFO [train.py:1114] (0/4) Epoch 18, batch 450, loss[loss=0.2167, simple_loss=0.2828, pruned_loss=0.05421, ctc_loss=0.1054, over 19611.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2735, pruned_loss=0.05084, ctc_loss=0.09578, over 3451708.44 frames. ], batch size: 55, lr: 8.40e-03, grad_scale: 32.0 +2024-08-31 15:39:42,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=228186.66666666666, ans=0.1 +2024-08-31 15:39:47,846 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.54 vs. limit=15.0 +2024-08-31 15:39:53,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=228240.0, ans=0.125 +2024-08-31 15:39:53,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228240.0, ans=0.1 +2024-08-31 15:40:18,487 INFO [train.py:1114] (0/4) Epoch 18, batch 500, loss[loss=0.1928, simple_loss=0.2659, pruned_loss=0.04374, ctc_loss=0.08065, over 19634.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2727, pruned_loss=0.0505, ctc_loss=0.09535, over 3546079.07 frames. ], batch size: 63, lr: 8.39e-03, grad_scale: 32.0 +2024-08-31 15:40:44,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=228453.33333333334, ans=0.2 +2024-08-31 15:41:10,341 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.331e+02 1.618e+02 1.812e+02 2.329e+02 3.946e+02, threshold=3.624e+02, percent-clipped=1.0 +2024-08-31 15:41:12,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=228560.0, ans=0.125 +2024-08-31 15:41:15,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=228560.0, ans=0.1 +2024-08-31 15:41:17,482 INFO [train.py:1114] (0/4) Epoch 18, batch 550, loss[loss=0.2071, simple_loss=0.2797, pruned_loss=0.04888, ctc_loss=0.09156, over 19273.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2729, pruned_loss=0.05035, ctc_loss=0.09503, over 3608445.94 frames. ], batch size: 71, lr: 8.39e-03, grad_scale: 32.0 +2024-08-31 15:41:19,072 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 15:42:34,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys.whitening_limit, batch_count=228666.66666666666, ans=6.0 +2024-08-31 15:43:27,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=228720.0, ans=0.0 +2024-08-31 15:43:57,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=228826.66666666666, ans=0.95 +2024-08-31 15:44:18,843 INFO [train.py:1114] (0/4) Epoch 18, batch 600, loss[loss=0.2282, simple_loss=0.2872, pruned_loss=0.06148, ctc_loss=0.1154, over 19393.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.273, pruned_loss=0.05046, ctc_loss=0.09506, over 3666186.54 frames. ], batch size: 67, lr: 8.38e-03, grad_scale: 32.0 +2024-08-31 15:44:26,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=228880.0, ans=0.125 +2024-08-31 15:44:48,016 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.76 vs. limit=10.0 +2024-08-31 15:44:52,838 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.48 vs. limit=6.0 +2024-08-31 15:44:54,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228986.66666666666, ans=0.1 +2024-08-31 15:45:09,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=229040.0, ans=0.125 +2024-08-31 15:45:13,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=229040.0, ans=0.1 +2024-08-31 15:45:16,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=229040.0, ans=0.125 +2024-08-31 15:45:16,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=229040.0, ans=0.125 +2024-08-31 15:45:20,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=229040.0, ans=0.2 +2024-08-31 15:45:28,759 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.314e+02 1.735e+02 2.092e+02 3.203e+02 5.009e+02, threshold=4.184e+02, percent-clipped=13.0 +2024-08-31 15:45:38,279 INFO [train.py:1114] (0/4) Epoch 18, batch 650, loss[loss=0.1955, simple_loss=0.2651, pruned_loss=0.04564, ctc_loss=0.08656, over 19762.00 frames. ], tot_loss[loss=0.205, simple_loss=0.272, pruned_loss=0.05012, ctc_loss=0.09435, over 3716392.84 frames. ], batch size: 54, lr: 8.38e-03, grad_scale: 32.0 +2024-08-31 15:46:15,092 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.62 vs. limit=15.0 +2024-08-31 15:46:18,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=229146.66666666666, ans=0.025 +2024-08-31 15:46:20,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229146.66666666666, ans=0.1 +2024-08-31 15:46:30,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=229200.0, ans=0.0 +2024-08-31 15:46:46,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=229253.33333333334, ans=0.125 +2024-08-31 15:47:04,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=229360.0, ans=0.125 +2024-08-31 15:47:16,563 INFO [train.py:1114] (0/4) Epoch 18, batch 700, loss[loss=0.2004, simple_loss=0.2642, pruned_loss=0.05098, ctc_loss=0.08681, over 19714.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2725, pruned_loss=0.0503, ctc_loss=0.09465, over 3749192.59 frames. ], batch size: 51, lr: 8.37e-03, grad_scale: 16.0 +2024-08-31 15:47:21,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=229413.33333333334, ans=0.125 +2024-08-31 15:47:25,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=229413.33333333334, ans=0.125 +2024-08-31 15:47:25,901 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.12 vs. limit=15.0 +2024-08-31 15:47:31,884 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.55 vs. limit=15.0 +2024-08-31 15:47:54,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229573.33333333334, ans=0.1 +2024-08-31 15:48:10,575 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.394e+02 1.672e+02 1.935e+02 2.401e+02 4.868e+02, threshold=3.870e+02, percent-clipped=1.0 +2024-08-31 15:48:16,523 INFO [train.py:1114] (0/4) Epoch 18, batch 750, loss[loss=0.2233, simple_loss=0.2905, pruned_loss=0.05572, ctc_loss=0.112, over 19502.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2726, pruned_loss=0.05037, ctc_loss=0.09477, over 3774959.33 frames. ], batch size: 54, lr: 8.37e-03, grad_scale: 16.0 +2024-08-31 15:48:22,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=229680.0, ans=0.2 +2024-08-31 15:48:27,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=229733.33333333334, ans=0.125 +2024-08-31 15:48:31,085 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 15:49:02,461 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.67 vs. limit=15.0 +2024-08-31 15:49:28,024 INFO [train.py:1114] (0/4) Epoch 18, batch 800, loss[loss=0.1859, simple_loss=0.2494, pruned_loss=0.04468, ctc_loss=0.08261, over 19807.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2725, pruned_loss=0.05037, ctc_loss=0.09481, over 3796787.06 frames. ], batch size: 49, lr: 8.37e-03, grad_scale: 32.0 +2024-08-31 15:49:32,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=229946.66666666666, ans=0.0 +2024-08-31 15:49:49,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=230000.0, ans=0.125 +2024-08-31 15:50:27,782 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.682e+02 1.957e+02 2.333e+02 3.697e+02, threshold=3.913e+02, percent-clipped=0.0 +2024-08-31 15:50:33,684 INFO [train.py:1114] (0/4) Epoch 18, batch 850, loss[loss=0.2076, simple_loss=0.2823, pruned_loss=0.04736, ctc_loss=0.09556, over 19633.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2727, pruned_loss=0.05065, ctc_loss=0.09541, over 3815142.45 frames. ], batch size: 59, lr: 8.36e-03, grad_scale: 32.0 +2024-08-31 15:51:21,846 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.80 vs. limit=10.0 +2024-08-31 15:51:30,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=230266.66666666666, ans=0.2 +2024-08-31 15:51:59,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=230373.33333333334, ans=0.125 +2024-08-31 15:52:12,855 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.91 vs. limit=6.0 +2024-08-31 15:52:15,924 INFO [train.py:1114] (0/4) Epoch 18, batch 900, loss[loss=0.204, simple_loss=0.2645, pruned_loss=0.05281, ctc_loss=0.09467, over 19425.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2728, pruned_loss=0.05062, ctc_loss=0.0954, over 3819486.86 frames. ], batch size: 48, lr: 8.36e-03, grad_scale: 32.0 +2024-08-31 15:52:24,589 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.51 vs. limit=15.0 +2024-08-31 15:52:25,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=230480.0, ans=0.125 +2024-08-31 15:52:46,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=230586.66666666666, ans=0.125 +2024-08-31 15:53:00,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=230640.0, ans=0.0 +2024-08-31 15:53:01,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=230640.0, ans=0.0 +2024-08-31 15:53:02,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=230640.0, ans=0.2 +2024-08-31 15:53:02,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=230640.0, ans=0.125 +2024-08-31 15:53:12,022 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.268e+02 1.645e+02 1.872e+02 2.411e+02 3.930e+02, threshold=3.745e+02, percent-clipped=1.0 +2024-08-31 15:53:46,102 INFO [train.py:1114] (0/4) Epoch 18, batch 950, loss[loss=0.2076, simple_loss=0.2671, pruned_loss=0.05377, ctc_loss=0.1015, over 19484.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2732, pruned_loss=0.05056, ctc_loss=0.09528, over 3820910.45 frames. ], batch size: 49, lr: 8.35e-03, grad_scale: 32.0 +2024-08-31 15:53:47,915 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.86 vs. limit=15.0 +2024-08-31 15:53:56,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=230746.66666666666, ans=0.2 +2024-08-31 15:54:22,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=230906.66666666666, ans=0.125 +2024-08-31 15:54:28,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=230906.66666666666, ans=0.07 +2024-08-31 15:54:38,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=230960.0, ans=0.5 +2024-08-31 15:54:43,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=230960.0, ans=0.2 +2024-08-31 15:54:48,308 INFO [train.py:1114] (0/4) Epoch 18, batch 1000, loss[loss=0.1981, simple_loss=0.2647, pruned_loss=0.04651, ctc_loss=0.09609, over 19851.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2739, pruned_loss=0.05078, ctc_loss=0.09595, over 3815875.19 frames. ], batch size: 52, lr: 8.35e-03, grad_scale: 32.0 +2024-08-31 15:54:56,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=231013.33333333334, ans=0.0 +2024-08-31 15:54:59,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=231013.33333333334, ans=0.125 +2024-08-31 15:55:15,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=231120.0, ans=0.125 +2024-08-31 15:55:18,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=231120.0, ans=0.0 +2024-08-31 15:55:19,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=231120.0, ans=0.0 +2024-08-31 15:55:33,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231120.0, ans=0.1 +2024-08-31 15:55:54,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=231226.66666666666, ans=0.025 +2024-08-31 15:55:55,133 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.285e+02 1.660e+02 1.836e+02 2.172e+02 3.389e+02, threshold=3.673e+02, percent-clipped=0.0 +2024-08-31 15:56:01,084 INFO [train.py:1114] (0/4) Epoch 18, batch 1050, loss[loss=0.2183, simple_loss=0.2904, pruned_loss=0.0534, ctc_loss=0.09846, over 19852.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2733, pruned_loss=0.0507, ctc_loss=0.09563, over 3822177.37 frames. ], batch size: 57, lr: 8.34e-03, grad_scale: 32.0 +2024-08-31 15:56:05,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=231280.0, ans=0.5 +2024-08-31 15:56:37,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=231440.0, ans=0.0 +2024-08-31 15:56:49,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=231493.33333333334, ans=0.125 +2024-08-31 15:56:59,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231493.33333333334, ans=0.1 +2024-08-31 15:57:01,157 INFO [train.py:1114] (0/4) Epoch 18, batch 1100, loss[loss=0.2016, simple_loss=0.2716, pruned_loss=0.04796, ctc_loss=0.08936, over 19596.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2727, pruned_loss=0.05044, ctc_loss=0.0952, over 3829672.02 frames. ], batch size: 52, lr: 8.34e-03, grad_scale: 32.0 +2024-08-31 15:57:05,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=231546.66666666666, ans=0.0 +2024-08-31 15:57:13,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=231546.66666666666, ans=0.125 +2024-08-31 15:57:22,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=231600.0, ans=0.125 +2024-08-31 15:57:36,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=231653.33333333334, ans=0.125 +2024-08-31 15:57:56,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=231760.0, ans=0.125 +2024-08-31 15:57:58,311 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.372e+02 1.608e+02 1.860e+02 2.284e+02 4.941e+02, threshold=3.719e+02, percent-clipped=1.0 +2024-08-31 15:58:04,201 INFO [train.py:1114] (0/4) Epoch 18, batch 1150, loss[loss=0.195, simple_loss=0.2622, pruned_loss=0.0462, ctc_loss=0.08858, over 19608.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2731, pruned_loss=0.05076, ctc_loss=0.09602, over 3827298.87 frames. ], batch size: 52, lr: 8.33e-03, grad_scale: 32.0 +2024-08-31 15:58:10,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231813.33333333334, ans=0.1 +2024-08-31 15:58:10,938 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.96 vs. limit=15.0 +2024-08-31 15:58:17,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231813.33333333334, ans=0.1 +2024-08-31 15:58:23,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231866.66666666666, ans=0.1 +2024-08-31 15:58:31,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=231866.66666666666, ans=0.2 +2024-08-31 15:58:32,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=231866.66666666666, ans=0.2 +2024-08-31 15:58:41,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=231920.0, ans=0.125 +2024-08-31 15:58:50,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=231920.0, ans=0.125 +2024-08-31 15:59:17,228 INFO [train.py:1114] (0/4) Epoch 18, batch 1200, loss[loss=0.2195, simple_loss=0.2913, pruned_loss=0.05367, ctc_loss=0.1011, over 19837.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2745, pruned_loss=0.05132, ctc_loss=0.09701, over 3824043.11 frames. ], batch size: 57, lr: 8.33e-03, grad_scale: 32.0 +2024-08-31 15:59:24,296 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.33 vs. limit=10.0 +2024-08-31 15:59:58,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=232240.0, ans=15.0 +2024-08-31 16:00:02,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=232240.0, ans=0.125 +2024-08-31 16:00:07,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232293.33333333334, ans=0.1 +2024-08-31 16:00:07,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=232293.33333333334, ans=0.07 +2024-08-31 16:00:12,193 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.411e+02 1.681e+02 1.869e+02 2.236e+02 3.755e+02, threshold=3.738e+02, percent-clipped=1.0 +2024-08-31 16:00:18,281 INFO [train.py:1114] (0/4) Epoch 18, batch 1250, loss[loss=0.2426, simple_loss=0.296, pruned_loss=0.06858, ctc_loss=0.1301, over 19507.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2754, pruned_loss=0.05167, ctc_loss=0.0976, over 3842402.06 frames. ], batch size: 61, lr: 8.32e-03, grad_scale: 32.0 +2024-08-31 16:00:25,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=232346.66666666666, ans=0.2 +2024-08-31 16:00:40,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=232400.0, ans=0.025 +2024-08-31 16:01:12,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=232560.0, ans=0.025 +2024-08-31 16:01:16,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=232560.0, ans=0.125 +2024-08-31 16:01:22,409 INFO [train.py:1114] (0/4) Epoch 18, batch 1300, loss[loss=0.2198, simple_loss=0.2854, pruned_loss=0.05603, ctc_loss=0.1054, over 18824.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2739, pruned_loss=0.05103, ctc_loss=0.09614, over 3845983.72 frames. ], batch size: 76, lr: 8.32e-03, grad_scale: 32.0 +2024-08-31 16:01:34,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=232666.66666666666, ans=0.2 +2024-08-31 16:01:41,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=232666.66666666666, ans=0.125 +2024-08-31 16:01:50,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=232720.0, ans=10.0 +2024-08-31 16:01:51,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=232720.0, ans=0.125 +2024-08-31 16:02:01,861 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:02:20,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=232826.66666666666, ans=0.125 +2024-08-31 16:02:21,663 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.351e+02 1.758e+02 2.176e+02 2.645e+02 4.342e+02, threshold=4.353e+02, percent-clipped=3.0 +2024-08-31 16:02:27,589 INFO [train.py:1114] (0/4) Epoch 18, batch 1350, loss[loss=0.2034, simple_loss=0.2618, pruned_loss=0.05271, ctc_loss=0.09872, over 19761.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2732, pruned_loss=0.05071, ctc_loss=0.09531, over 3857349.73 frames. ], batch size: 54, lr: 8.31e-03, grad_scale: 32.0 +2024-08-31 16:02:47,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=232933.33333333334, ans=0.125 +2024-08-31 16:03:11,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=233040.0, ans=0.2 +2024-08-31 16:03:27,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=233093.33333333334, ans=0.0 +2024-08-31 16:03:29,589 INFO [train.py:1114] (0/4) Epoch 18, batch 1400, loss[loss=0.1968, simple_loss=0.2528, pruned_loss=0.05174, ctc_loss=0.09347, over 19657.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2735, pruned_loss=0.05097, ctc_loss=0.09583, over 3864164.22 frames. ], batch size: 46, lr: 8.31e-03, grad_scale: 32.0 +2024-08-31 16:03:40,440 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.22 vs. limit=22.5 +2024-08-31 16:04:10,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=233253.33333333334, ans=0.05 +2024-08-31 16:04:12,260 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.06 vs. limit=6.0 +2024-08-31 16:04:18,679 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.30 vs. limit=15.0 +2024-08-31 16:04:36,286 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.322e+02 1.655e+02 1.916e+02 2.338e+02 3.956e+02, threshold=3.832e+02, percent-clipped=0.0 +2024-08-31 16:04:42,278 INFO [train.py:1114] (0/4) Epoch 18, batch 1450, loss[loss=0.2014, simple_loss=0.2796, pruned_loss=0.04547, ctc_loss=0.08067, over 19668.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2736, pruned_loss=0.05086, ctc_loss=0.09551, over 3861721.28 frames. ], batch size: 63, lr: 8.30e-03, grad_scale: 32.0 +2024-08-31 16:05:13,169 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.45 vs. limit=15.0 +2024-08-31 16:05:25,173 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.32 vs. limit=15.0 +2024-08-31 16:05:42,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=233626.66666666666, ans=0.125 +2024-08-31 16:05:48,801 INFO [train.py:1114] (0/4) Epoch 18, batch 1500, loss[loss=0.2056, simple_loss=0.2747, pruned_loss=0.04873, ctc_loss=0.09765, over 19584.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2742, pruned_loss=0.05114, ctc_loss=0.09614, over 3861790.55 frames. ], batch size: 57, lr: 8.30e-03, grad_scale: 32.0 +2024-08-31 16:05:49,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=233680.0, ans=0.0 +2024-08-31 16:05:54,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=233680.0, ans=0.0 +2024-08-31 16:06:04,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=233733.33333333334, ans=0.1 +2024-08-31 16:06:05,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233733.33333333334, ans=0.1 +2024-08-31 16:06:35,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=233840.0, ans=0.125 +2024-08-31 16:06:50,189 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.336e+02 1.669e+02 1.866e+02 2.355e+02 3.552e+02, threshold=3.733e+02, percent-clipped=0.0 +2024-08-31 16:07:06,039 INFO [train.py:1114] (0/4) Epoch 18, batch 1550, loss[loss=0.229, simple_loss=0.294, pruned_loss=0.06025, ctc_loss=0.1087, over 19603.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2741, pruned_loss=0.05141, ctc_loss=0.09686, over 3845633.94 frames. ], batch size: 60, lr: 8.30e-03, grad_scale: 32.0 +2024-08-31 16:07:10,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=233946.66666666666, ans=0.125 +2024-08-31 16:07:21,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=234000.0, ans=0.2 +2024-08-31 16:07:27,608 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.06 vs. limit=15.0 +2024-08-31 16:07:36,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234053.33333333334, ans=0.1 +2024-08-31 16:08:07,316 INFO [train.py:1114] (0/4) Epoch 18, batch 1600, loss[loss=0.2593, simple_loss=0.3127, pruned_loss=0.07495, ctc_loss=0.1402, over 19848.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.274, pruned_loss=0.05127, ctc_loss=0.09655, over 3835104.16 frames. ], batch size: 57, lr: 8.29e-03, grad_scale: 32.0 +2024-08-31 16:08:09,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=234213.33333333334, ans=0.125 +2024-08-31 16:08:19,432 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.28 vs. limit=22.5 +2024-08-31 16:08:27,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=234266.66666666666, ans=0.0 +2024-08-31 16:08:39,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=234320.0, ans=0.2 +2024-08-31 16:09:20,624 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.787e+02 2.153e+02 2.672e+02 5.491e+02, threshold=4.305e+02, percent-clipped=8.0 +2024-08-31 16:09:23,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=234426.66666666666, ans=0.025 +2024-08-31 16:09:25,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=234480.0, ans=0.125 +2024-08-31 16:09:26,612 INFO [train.py:1114] (0/4) Epoch 18, batch 1650, loss[loss=0.1976, simple_loss=0.2797, pruned_loss=0.04086, ctc_loss=0.08446, over 19673.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2733, pruned_loss=0.05092, ctc_loss=0.09615, over 3832693.46 frames. ], batch size: 59, lr: 8.29e-03, grad_scale: 32.0 +2024-08-31 16:09:29,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=234480.0, ans=0.0 +2024-08-31 16:12:47,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234533.33333333334, ans=0.1 +2024-08-31 16:13:05,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=234533.33333333334, ans=0.125 +2024-08-31 16:13:42,415 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/checkpoint-44000.pt +2024-08-31 16:14:08,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=234693.33333333334, ans=0.125 +2024-08-31 16:14:13,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234693.33333333334, ans=0.1 +2024-08-31 16:14:15,749 INFO [train.py:1114] (0/4) Epoch 18, batch 1700, loss[loss=0.1767, simple_loss=0.2427, pruned_loss=0.04058, ctc_loss=0.0741, over 19685.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2732, pruned_loss=0.05075, ctc_loss=0.09573, over 3846869.25 frames. ], batch size: 46, lr: 8.28e-03, grad_scale: 32.0 +2024-08-31 16:14:17,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=234746.66666666666, ans=0.04949747468305833 +2024-08-31 16:14:40,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=234853.33333333334, ans=0.125 +2024-08-31 16:14:41,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=234853.33333333334, ans=0.125 +2024-08-31 16:14:48,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=234853.33333333334, ans=0.125 +2024-08-31 16:14:51,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=234906.66666666666, ans=0.05 +2024-08-31 16:14:52,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=234906.66666666666, ans=0.09899494936611666 +2024-08-31 16:14:54,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=234906.66666666666, ans=0.125 +2024-08-31 16:15:07,767 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.273e+02 1.694e+02 2.038e+02 2.484e+02 5.869e+02, threshold=4.076e+02, percent-clipped=3.0 +2024-08-31 16:15:13,540 INFO [train.py:1114] (0/4) Epoch 18, batch 1750, loss[loss=0.194, simple_loss=0.2542, pruned_loss=0.04847, ctc_loss=0.09223, over 19659.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2727, pruned_loss=0.05061, ctc_loss=0.09556, over 3850963.92 frames. ], batch size: 45, lr: 8.28e-03, grad_scale: 32.0 +2024-08-31 16:15:40,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=235066.66666666666, ans=0.0 +2024-08-31 16:16:11,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=235226.66666666666, ans=0.125 +2024-08-31 16:16:18,926 INFO [train.py:1114] (0/4) Epoch 18, batch 1800, loss[loss=0.2069, simple_loss=0.279, pruned_loss=0.0482, ctc_loss=0.0961, over 19610.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.273, pruned_loss=0.05071, ctc_loss=0.09551, over 3852616.49 frames. ], batch size: 55, lr: 8.27e-03, grad_scale: 32.0 +2024-08-31 16:16:43,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=235386.66666666666, ans=0.125 +2024-08-31 16:16:45,032 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.27 vs. limit=10.0 +2024-08-31 16:16:52,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=235440.0, ans=0.0 +2024-08-31 16:16:58,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=235440.0, ans=0.2 +2024-08-31 16:17:12,080 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.320e+02 1.739e+02 2.099e+02 2.606e+02 4.220e+02, threshold=4.197e+02, percent-clipped=1.0 +2024-08-31 16:17:16,671 INFO [train.py:1114] (0/4) Epoch 18, batch 1850, loss[loss=0.2314, simple_loss=0.296, pruned_loss=0.06048, ctc_loss=0.1147, over 19590.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2735, pruned_loss=0.05093, ctc_loss=0.09598, over 3855800.83 frames. ], batch size: 57, lr: 8.27e-03, grad_scale: 16.0 +2024-08-31 16:17:16,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=235546.66666666666, ans=0.025 +2024-08-31 16:17:37,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=235600.0, ans=0.125 +2024-08-31 16:17:42,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=235600.0, ans=0.0 +2024-08-31 16:17:45,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=235653.33333333334, ans=0.125 +2024-08-31 16:17:51,489 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.41 vs. limit=15.0 +2024-08-31 16:17:58,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=235706.66666666666, ans=0.0 +2024-08-31 16:18:12,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=235760.0, ans=0.0 +2024-08-31 16:18:21,114 INFO [train.py:1114] (0/4) Epoch 18, batch 1900, loss[loss=0.197, simple_loss=0.2778, pruned_loss=0.04149, ctc_loss=0.08285, over 19649.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2738, pruned_loss=0.05091, ctc_loss=0.09572, over 3860635.93 frames. ], batch size: 59, lr: 8.26e-03, grad_scale: 16.0 +2024-08-31 16:18:39,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=235866.66666666666, ans=0.05 +2024-08-31 16:18:47,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=235920.0, ans=0.125 +2024-08-31 16:19:14,240 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.296e+02 1.623e+02 1.837e+02 2.195e+02 5.135e+02, threshold=3.673e+02, percent-clipped=2.0 +2024-08-31 16:19:18,758 INFO [train.py:1114] (0/4) Epoch 18, batch 1950, loss[loss=0.2206, simple_loss=0.2807, pruned_loss=0.05795, ctc_loss=0.1115, over 19579.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2747, pruned_loss=0.05108, ctc_loss=0.09583, over 3870189.79 frames. ], batch size: 52, lr: 8.26e-03, grad_scale: 16.0 +2024-08-31 16:20:22,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=236133.33333333334, ans=0.0 +2024-08-31 16:20:22,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=236133.33333333334, ans=0.125 +2024-08-31 16:20:41,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=236186.66666666666, ans=0.0 +2024-08-31 16:21:12,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=236293.33333333334, ans=0.125 +2024-08-31 16:21:21,680 INFO [train.py:1114] (0/4) Epoch 18, batch 2000, loss[loss=0.1879, simple_loss=0.2471, pruned_loss=0.04608, ctc_loss=0.09141, over 19655.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2751, pruned_loss=0.05115, ctc_loss=0.0962, over 3855253.43 frames. ], batch size: 45, lr: 8.25e-03, grad_scale: 32.0 +2024-08-31 16:21:32,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=236400.0, ans=0.125 +2024-08-31 16:21:54,730 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.39 vs. limit=15.0 +2024-08-31 16:21:58,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=236506.66666666666, ans=0.125 +2024-08-31 16:22:01,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=236506.66666666666, ans=0.125 +2024-08-31 16:22:06,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236560.0, ans=0.1 +2024-08-31 16:22:09,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=236560.0, ans=0.0 +2024-08-31 16:22:14,726 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.400e+02 1.704e+02 2.096e+02 2.751e+02 4.638e+02, threshold=4.193e+02, percent-clipped=6.0 +2024-08-31 16:22:16,324 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.12 vs. limit=12.0 +2024-08-31 16:22:19,160 INFO [train.py:1114] (0/4) Epoch 18, batch 2050, loss[loss=0.1735, simple_loss=0.2419, pruned_loss=0.03802, ctc_loss=0.07251, over 19732.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2741, pruned_loss=0.05116, ctc_loss=0.09626, over 3850587.51 frames. ], batch size: 47, lr: 8.25e-03, grad_scale: 32.0 +2024-08-31 16:22:19,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=236613.33333333334, ans=0.0 +2024-08-31 16:22:48,820 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.94 vs. limit=22.5 +2024-08-31 16:23:16,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=236826.66666666666, ans=0.125 +2024-08-31 16:23:16,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=236826.66666666666, ans=0.0 +2024-08-31 16:23:21,344 INFO [train.py:1114] (0/4) Epoch 18, batch 2100, loss[loss=0.2001, simple_loss=0.2706, pruned_loss=0.04705, ctc_loss=0.08882, over 19774.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2741, pruned_loss=0.05122, ctc_loss=0.09644, over 3858150.81 frames. ], batch size: 54, lr: 8.25e-03, grad_scale: 32.0 +2024-08-31 16:23:27,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=236880.0, ans=0.125 +2024-08-31 16:23:30,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=236880.0, ans=0.1 +2024-08-31 16:23:43,007 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.37 vs. limit=10.0 +2024-08-31 16:23:57,513 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.79 vs. limit=6.0 +2024-08-31 16:24:07,426 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.31 vs. limit=15.0 +2024-08-31 16:24:08,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=237040.0, ans=0.125 +2024-08-31 16:24:13,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=237093.33333333334, ans=0.125 +2024-08-31 16:24:16,392 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.88 vs. limit=6.0 +2024-08-31 16:24:25,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=237093.33333333334, ans=0.125 +2024-08-31 16:24:27,117 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.628e+02 1.802e+02 2.351e+02 4.404e+02, threshold=3.604e+02, percent-clipped=1.0 +2024-08-31 16:24:27,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=237093.33333333334, ans=0.0 +2024-08-31 16:24:31,675 INFO [train.py:1114] (0/4) Epoch 18, batch 2150, loss[loss=0.1928, simple_loss=0.2653, pruned_loss=0.0439, ctc_loss=0.08122, over 19594.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2728, pruned_loss=0.05048, ctc_loss=0.09506, over 3868534.95 frames. ], batch size: 52, lr: 8.24e-03, grad_scale: 32.0 +2024-08-31 16:24:41,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=237146.66666666666, ans=0.125 +2024-08-31 16:24:42,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237146.66666666666, ans=0.1 +2024-08-31 16:24:46,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.02 vs. limit=15.0 +2024-08-31 16:24:49,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=237200.0, ans=0.07 +2024-08-31 16:25:03,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=237253.33333333334, ans=0.125 +2024-08-31 16:25:31,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=237360.0, ans=0.125 +2024-08-31 16:25:37,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=237360.0, ans=0.025 +2024-08-31 16:25:37,239 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.76 vs. limit=6.0 +2024-08-31 16:25:40,257 INFO [train.py:1114] (0/4) Epoch 18, batch 2200, loss[loss=0.212, simple_loss=0.2745, pruned_loss=0.05391, ctc_loss=0.1042, over 19586.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2727, pruned_loss=0.05028, ctc_loss=0.09476, over 3867468.35 frames. ], batch size: 57, lr: 8.24e-03, grad_scale: 32.0 +2024-08-31 16:25:49,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=237413.33333333334, ans=0.125 +2024-08-31 16:25:50,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=237413.33333333334, ans=0.125 +2024-08-31 16:26:33,721 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.369e+02 1.652e+02 1.938e+02 2.493e+02 4.901e+02, threshold=3.877e+02, percent-clipped=6.0 +2024-08-31 16:26:33,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=237626.66666666666, ans=0.0 +2024-08-31 16:26:38,344 INFO [train.py:1114] (0/4) Epoch 18, batch 2250, loss[loss=0.205, simple_loss=0.2827, pruned_loss=0.04618, ctc_loss=0.08718, over 19603.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2735, pruned_loss=0.05069, ctc_loss=0.09536, over 3867383.66 frames. ], batch size: 55, lr: 8.23e-03, grad_scale: 32.0 +2024-08-31 16:27:19,095 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:27:48,125 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.94 vs. limit=6.0 +2024-08-31 16:27:50,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.04 vs. limit=15.0 +2024-08-31 16:27:51,886 INFO [train.py:1114] (0/4) Epoch 18, batch 2300, loss[loss=0.1914, simple_loss=0.2602, pruned_loss=0.04518, ctc_loss=0.08068, over 19496.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2724, pruned_loss=0.05051, ctc_loss=0.09497, over 3860900.49 frames. ], batch size: 49, lr: 8.23e-03, grad_scale: 32.0 +2024-08-31 16:27:52,046 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:28:05,935 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.06 vs. limit=15.0 +2024-08-31 16:28:13,638 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:28:47,463 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.355e+02 1.696e+02 1.848e+02 2.393e+02 3.836e+02, threshold=3.696e+02, percent-clipped=0.0 +2024-08-31 16:29:07,662 INFO [train.py:1114] (0/4) Epoch 18, batch 2350, loss[loss=0.2186, simple_loss=0.2904, pruned_loss=0.05367, ctc_loss=0.09878, over 19647.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2723, pruned_loss=0.0504, ctc_loss=0.09478, over 3863844.14 frames. ], batch size: 63, lr: 8.22e-03, grad_scale: 32.0 +2024-08-31 16:29:23,195 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.97 vs. limit=15.0 +2024-08-31 16:29:27,500 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.89 vs. limit=12.0 +2024-08-31 16:29:31,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=238320.0, ans=0.2 +2024-08-31 16:29:42,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238373.33333333334, ans=0.1 +2024-08-31 16:30:24,981 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=3.89 vs. limit=12.0 +2024-08-31 16:30:28,643 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.21 vs. limit=10.0 +2024-08-31 16:30:38,501 INFO [train.py:1114] (0/4) Epoch 18, batch 2400, loss[loss=0.2092, simple_loss=0.2786, pruned_loss=0.05087, ctc_loss=0.09501, over 19382.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2748, pruned_loss=0.05163, ctc_loss=0.09683, over 3858008.55 frames. ], batch size: 67, lr: 8.22e-03, grad_scale: 32.0 +2024-08-31 16:30:59,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=238533.33333333334, ans=0.125 +2024-08-31 16:31:06,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=238586.66666666666, ans=0.125 +2024-08-31 16:31:20,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.33 vs. limit=22.5 +2024-08-31 16:31:23,649 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.57 vs. limit=10.0 +2024-08-31 16:31:24,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=238640.0, ans=0.125 +2024-08-31 16:31:25,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=238640.0, ans=0.125 +2024-08-31 16:31:39,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=238693.33333333334, ans=0.125 +2024-08-31 16:31:40,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=238693.33333333334, ans=0.2 +2024-08-31 16:31:47,476 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.444e+02 1.682e+02 1.835e+02 2.125e+02 4.662e+02, threshold=3.671e+02, percent-clipped=5.0 +2024-08-31 16:31:52,089 INFO [train.py:1114] (0/4) Epoch 18, batch 2450, loss[loss=0.2604, simple_loss=0.3038, pruned_loss=0.07831, ctc_loss=0.1507, over 13211.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2782, pruned_loss=0.05401, ctc_loss=0.1017, over 3733992.69 frames. ], batch size: 140, lr: 8.21e-03, grad_scale: 32.0 +2024-08-31 16:32:29,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=238906.66666666666, ans=0.0 +2024-08-31 16:32:31,927 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.48 vs. limit=6.0 +2024-08-31 16:32:40,065 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-18.pt +2024-08-31 16:33:43,925 INFO [train.py:1114] (0/4) Epoch 19, batch 0, loss[loss=0.2257, simple_loss=0.2816, pruned_loss=0.06286, ctc_loss=0.1104, over 19421.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.2816, pruned_loss=0.06286, ctc_loss=0.1104, over 19421.00 frames. ], batch size: 48, lr: 7.99e-03, grad_scale: 32.0 +2024-08-31 16:33:43,926 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-31 16:33:51,182 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.0565, 2.2308, 2.9810, 3.4345], device='cuda:0') +2024-08-31 16:34:00,545 INFO [train.py:1146] (0/4) Epoch 19, validation: loss=0.1846, simple_loss=0.2728, pruned_loss=0.03584, ctc_loss=0.06159, over 944034.00 frames. +2024-08-31 16:34:01,380 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13675MB +2024-08-31 16:34:02,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=238954.66666666666, ans=0.125 +2024-08-31 16:34:17,502 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.69 vs. limit=15.0 +2024-08-31 16:34:28,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=239061.33333333334, ans=0.0 +2024-08-31 16:34:43,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=239114.66666666666, ans=0.125 +2024-08-31 16:34:48,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=239114.66666666666, ans=0.0 +2024-08-31 16:35:00,300 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.98 vs. limit=10.0 +2024-08-31 16:35:04,411 INFO [train.py:1114] (0/4) Epoch 19, batch 50, loss[loss=0.1963, simple_loss=0.2596, pruned_loss=0.04818, ctc_loss=0.09174, over 19679.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2756, pruned_loss=0.05164, ctc_loss=0.09704, over 845033.20 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-31 16:35:12,503 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.230e+02 1.795e+02 2.006e+02 2.342e+02 4.821e+02, threshold=4.012e+02, percent-clipped=4.0 +2024-08-31 16:35:13,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=239221.33333333334, ans=0.0 +2024-08-31 16:35:32,064 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.40 vs. limit=10.0 +2024-08-31 16:35:38,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=239381.33333333334, ans=0.025 +2024-08-31 16:35:41,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=239381.33333333334, ans=0.0 +2024-08-31 16:35:45,404 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.66 vs. limit=6.0 +2024-08-31 16:35:48,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=239381.33333333334, ans=0.0 +2024-08-31 16:35:53,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239434.66666666666, ans=0.1 +2024-08-31 16:35:54,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=239434.66666666666, ans=0.125 +2024-08-31 16:36:02,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=239488.0, ans=0.025 +2024-08-31 16:36:03,810 INFO [train.py:1114] (0/4) Epoch 19, batch 100, loss[loss=0.1759, simple_loss=0.2497, pruned_loss=0.03689, ctc_loss=0.07089, over 19728.00 frames. ], tot_loss[loss=0.21, simple_loss=0.277, pruned_loss=0.05188, ctc_loss=0.09826, over 1498716.65 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 32.0 +2024-08-31 16:36:14,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=239488.0, ans=0.0 +2024-08-31 16:36:46,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=239648.0, ans=0.0 +2024-08-31 16:36:51,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.12 vs. limit=10.0 +2024-08-31 16:37:06,676 INFO [train.py:1114] (0/4) Epoch 19, batch 150, loss[loss=0.1766, simple_loss=0.2442, pruned_loss=0.03942, ctc_loss=0.07549, over 19754.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2736, pruned_loss=0.05025, ctc_loss=0.09537, over 2027546.02 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-31 16:37:10,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=239754.66666666666, ans=0.125 +2024-08-31 16:37:10,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=239754.66666666666, ans=0.09899494936611666 +2024-08-31 16:37:15,235 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.762e+02 1.953e+02 2.445e+02 3.524e+02, threshold=3.906e+02, percent-clipped=0.0 +2024-08-31 16:37:17,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=239808.0, ans=0.125 +2024-08-31 16:37:25,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239808.0, ans=0.1 +2024-08-31 16:37:29,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239808.0, ans=0.125 +2024-08-31 16:37:43,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=239914.66666666666, ans=0.0 +2024-08-31 16:37:56,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=239968.0, ans=0.025 +2024-08-31 16:38:07,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=239968.0, ans=0.025 +2024-08-31 16:38:09,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=239968.0, ans=0.125 +2024-08-31 16:38:14,087 INFO [train.py:1114] (0/4) Epoch 19, batch 200, loss[loss=0.2108, simple_loss=0.2765, pruned_loss=0.05182, ctc_loss=0.1036, over 18444.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.271, pruned_loss=0.04897, ctc_loss=0.09241, over 2435779.19 frames. ], batch size: 85, lr: 7.97e-03, grad_scale: 32.0 +2024-08-31 16:38:19,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=240021.33333333334, ans=0.125 +2024-08-31 16:38:19,498 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.96 vs. limit=22.5 +2024-08-31 16:38:20,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240021.33333333334, ans=0.1 +2024-08-31 16:38:27,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=240074.66666666666, ans=0.07 +2024-08-31 16:38:36,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=240128.0, ans=0.2 +2024-08-31 16:38:39,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=240128.0, ans=0.0 +2024-08-31 16:38:44,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff2.min_abs, batch_count=240128.0, ans=0.1 +2024-08-31 16:38:53,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=240181.33333333334, ans=0.125 +2024-08-31 16:39:01,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=240234.66666666666, ans=0.125 +2024-08-31 16:39:13,535 INFO [train.py:1114] (0/4) Epoch 19, batch 250, loss[loss=0.217, simple_loss=0.2828, pruned_loss=0.0553, ctc_loss=0.1015, over 19356.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2706, pruned_loss=0.04899, ctc_loss=0.0924, over 2756817.04 frames. ], batch size: 67, lr: 7.97e-03, grad_scale: 32.0 +2024-08-31 16:39:27,155 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.313e+02 1.733e+02 2.186e+02 2.853e+02 4.755e+02, threshold=4.372e+02, percent-clipped=7.0 +2024-08-31 16:40:01,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=240448.0, ans=0.125 +2024-08-31 16:40:06,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=240448.0, ans=0.125 +2024-08-31 16:40:09,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=240501.33333333334, ans=0.125 +2024-08-31 16:40:11,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.00 vs. limit=15.0 +2024-08-31 16:40:12,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=240501.33333333334, ans=0.125 +2024-08-31 16:40:19,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=240554.66666666666, ans=0.125 +2024-08-31 16:40:20,379 INFO [train.py:1114] (0/4) Epoch 19, batch 300, loss[loss=0.2278, simple_loss=0.2955, pruned_loss=0.05849, ctc_loss=0.1078, over 19482.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2703, pruned_loss=0.04882, ctc_loss=0.0919, over 3000995.93 frames. ], batch size: 61, lr: 7.96e-03, grad_scale: 32.0 +2024-08-31 16:40:30,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=240554.66666666666, ans=0.0 +2024-08-31 16:40:41,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240608.0, ans=0.1 +2024-08-31 16:40:42,572 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.22 vs. limit=6.0 +2024-08-31 16:40:52,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=240661.33333333334, ans=0.0 +2024-08-31 16:40:58,757 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.37 vs. limit=15.0 +2024-08-31 16:41:04,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=240714.66666666666, ans=0.125 +2024-08-31 16:41:06,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=240714.66666666666, ans=0.0 +2024-08-31 16:41:17,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240768.0, ans=0.1 +2024-08-31 16:41:21,966 INFO [train.py:1114] (0/4) Epoch 19, batch 350, loss[loss=0.1908, simple_loss=0.2551, pruned_loss=0.04546, ctc_loss=0.08886, over 19748.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2711, pruned_loss=0.04889, ctc_loss=0.09209, over 3190586.01 frames. ], batch size: 48, lr: 7.96e-03, grad_scale: 32.0 +2024-08-31 16:41:22,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=240821.33333333334, ans=0.0 +2024-08-31 16:41:30,308 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.408e+02 1.653e+02 1.904e+02 2.349e+02 4.016e+02, threshold=3.809e+02, percent-clipped=0.0 +2024-08-31 16:42:25,376 INFO [train.py:1114] (0/4) Epoch 19, batch 400, loss[loss=0.2128, simple_loss=0.2805, pruned_loss=0.0534, ctc_loss=0.09571, over 19494.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2711, pruned_loss=0.04919, ctc_loss=0.0926, over 3342052.19 frames. ], batch size: 54, lr: 7.95e-03, grad_scale: 32.0 +2024-08-31 16:42:36,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=241088.0, ans=0.125 +2024-08-31 16:43:02,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=241194.66666666666, ans=0.0 +2024-08-31 16:43:15,976 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.99 vs. limit=15.0 +2024-08-31 16:43:21,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=241301.33333333334, ans=0.025 +2024-08-31 16:43:32,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=241301.33333333334, ans=0.2 +2024-08-31 16:43:34,368 INFO [train.py:1114] (0/4) Epoch 19, batch 450, loss[loss=0.199, simple_loss=0.2793, pruned_loss=0.0417, ctc_loss=0.08852, over 19626.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2713, pruned_loss=0.04932, ctc_loss=0.0928, over 3450414.33 frames. ], batch size: 55, lr: 7.95e-03, grad_scale: 32.0 +2024-08-31 16:43:42,750 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.686e+02 1.896e+02 2.370e+02 4.152e+02, threshold=3.792e+02, percent-clipped=1.0 +2024-08-31 16:43:47,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=241408.0, ans=0.0 +2024-08-31 16:43:48,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=241408.0, ans=0.125 +2024-08-31 16:43:49,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=241408.0, ans=0.025 +2024-08-31 16:43:54,240 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.18 vs. limit=12.0 +2024-08-31 16:43:57,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=241461.33333333334, ans=0.0 +2024-08-31 16:43:58,575 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.35 vs. limit=22.5 +2024-08-31 16:44:03,058 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.76 vs. limit=12.0 +2024-08-31 16:44:06,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=241461.33333333334, ans=0.2 +2024-08-31 16:44:35,440 INFO [train.py:1114] (0/4) Epoch 19, batch 500, loss[loss=0.2209, simple_loss=0.2861, pruned_loss=0.05703, ctc_loss=0.1038, over 19656.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2705, pruned_loss=0.04898, ctc_loss=0.09221, over 3545263.34 frames. ], batch size: 63, lr: 7.95e-03, grad_scale: 32.0 +2024-08-31 16:44:36,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=241621.33333333334, ans=0.125 +2024-08-31 16:45:17,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=241781.33333333334, ans=0.2 +2024-08-31 16:45:30,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=241834.66666666666, ans=0.025 +2024-08-31 16:45:49,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=241834.66666666666, ans=0.0 +2024-08-31 16:45:50,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-31 16:46:04,258 INFO [train.py:1114] (0/4) Epoch 19, batch 550, loss[loss=0.1871, simple_loss=0.2622, pruned_loss=0.04076, ctc_loss=0.07639, over 19368.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2711, pruned_loss=0.04922, ctc_loss=0.0927, over 3607835.04 frames. ], batch size: 71, lr: 7.94e-03, grad_scale: 32.0 +2024-08-31 16:46:04,798 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.44 vs. limit=15.0 +2024-08-31 16:46:12,723 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.697e+02 1.983e+02 2.191e+02 3.507e+02, threshold=3.966e+02, percent-clipped=0.0 +2024-08-31 16:46:29,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=241941.33333333334, ans=0.125 +2024-08-31 16:46:48,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=241994.66666666666, ans=0.2 +2024-08-31 16:46:48,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=241994.66666666666, ans=0.0 +2024-08-31 16:47:13,009 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.89 vs. limit=6.0 +2024-08-31 16:47:16,272 INFO [train.py:1114] (0/4) Epoch 19, batch 600, loss[loss=0.2246, simple_loss=0.2889, pruned_loss=0.05831, ctc_loss=0.1095, over 19400.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2713, pruned_loss=0.04937, ctc_loss=0.093, over 3666587.04 frames. ], batch size: 67, lr: 7.94e-03, grad_scale: 32.0 +2024-08-31 16:47:16,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=242154.66666666666, ans=0.1 +2024-08-31 16:47:22,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=242154.66666666666, ans=0.125 +2024-08-31 16:47:26,170 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.40 vs. limit=22.5 +2024-08-31 16:48:15,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242314.66666666666, ans=0.1 +2024-08-31 16:48:17,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=242314.66666666666, ans=0.125 +2024-08-31 16:48:21,242 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:48:27,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=242368.0, ans=0.125 +2024-08-31 16:48:32,840 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.14 vs. limit=10.0 +2024-08-31 16:48:36,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242368.0, ans=0.1 +2024-08-31 16:48:39,673 INFO [train.py:1114] (0/4) Epoch 19, batch 650, loss[loss=0.1616, simple_loss=0.2391, pruned_loss=0.03006, ctc_loss=0.06003, over 19773.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2702, pruned_loss=0.04888, ctc_loss=0.092, over 3716546.96 frames. ], batch size: 54, lr: 7.93e-03, grad_scale: 32.0 +2024-08-31 16:48:41,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=242421.33333333334, ans=0.09899494936611666 +2024-08-31 16:48:48,397 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 1.784e+02 2.044e+02 2.793e+02 4.792e+02, threshold=4.088e+02, percent-clipped=6.0 +2024-08-31 16:48:59,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=242474.66666666666, ans=0.125 +2024-08-31 16:49:31,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=242634.66666666666, ans=0.125 +2024-08-31 16:49:38,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=242634.66666666666, ans=0.0 +2024-08-31 16:49:41,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=242634.66666666666, ans=0.0 +2024-08-31 16:50:02,075 INFO [train.py:1114] (0/4) Epoch 19, batch 700, loss[loss=0.1919, simple_loss=0.2632, pruned_loss=0.04391, ctc_loss=0.08173, over 19733.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2704, pruned_loss=0.04907, ctc_loss=0.0922, over 3748806.62 frames. ], batch size: 51, lr: 7.93e-03, grad_scale: 32.0 +2024-08-31 16:50:06,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=242688.0, ans=0.0 +2024-08-31 16:50:07,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=242688.0, ans=0.2 +2024-08-31 16:50:48,781 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:52:16,517 INFO [train.py:1114] (0/4) Epoch 19, batch 750, loss[loss=0.2168, simple_loss=0.2923, pruned_loss=0.05183, ctc_loss=0.09421, over 19496.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2703, pruned_loss=0.04896, ctc_loss=0.09206, over 3774571.12 frames. ], batch size: 54, lr: 7.92e-03, grad_scale: 32.0 +2024-08-31 16:52:40,601 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.383e+02 1.707e+02 2.012e+02 2.576e+02 4.596e+02, threshold=4.024e+02, percent-clipped=2.0 +2024-08-31 16:52:42,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=242954.66666666666, ans=0.125 +2024-08-31 16:52:48,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=243008.0, ans=0.025 +2024-08-31 16:52:50,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=243008.0, ans=0.125 +2024-08-31 16:53:02,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=243061.33333333334, ans=0.025 +2024-08-31 16:53:36,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=243168.0, ans=0.04949747468305833 +2024-08-31 16:53:40,980 INFO [train.py:1114] (0/4) Epoch 19, batch 800, loss[loss=0.1921, simple_loss=0.2552, pruned_loss=0.0471, ctc_loss=0.08684, over 19802.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2707, pruned_loss=0.04917, ctc_loss=0.0922, over 3795407.26 frames. ], batch size: 49, lr: 7.92e-03, grad_scale: 32.0 +2024-08-31 16:53:42,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243221.33333333334, ans=0.1 +2024-08-31 16:53:57,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=243274.66666666666, ans=0.125 +2024-08-31 16:54:36,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=243434.66666666666, ans=0.125 +2024-08-31 16:54:52,035 INFO [train.py:1114] (0/4) Epoch 19, batch 850, loss[loss=0.2035, simple_loss=0.2792, pruned_loss=0.04676, ctc_loss=0.08545, over 19670.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2704, pruned_loss=0.04899, ctc_loss=0.0921, over 3815217.60 frames. ], batch size: 59, lr: 7.92e-03, grad_scale: 32.0 +2024-08-31 16:55:00,085 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.677e+02 1.837e+02 2.316e+02 3.927e+02, threshold=3.675e+02, percent-clipped=0.0 +2024-08-31 16:55:05,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=243541.33333333334, ans=0.125 +2024-08-31 16:55:15,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=243594.66666666666, ans=0.0 +2024-08-31 16:55:18,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=243594.66666666666, ans=0.5 +2024-08-31 16:55:24,814 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.45 vs. limit=15.0 +2024-08-31 16:55:33,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=243648.0, ans=0.125 +2024-08-31 16:55:40,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=243648.0, ans=0.0 +2024-08-31 16:55:46,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=243701.33333333334, ans=0.125 +2024-08-31 16:55:46,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=243701.33333333334, ans=0.2 +2024-08-31 16:55:47,094 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.50 vs. limit=22.5 +2024-08-31 16:55:50,194 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:55:55,903 INFO [train.py:1114] (0/4) Epoch 19, batch 900, loss[loss=0.2093, simple_loss=0.2662, pruned_loss=0.05476, ctc_loss=0.1071, over 19806.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2708, pruned_loss=0.04945, ctc_loss=0.09282, over 3819233.74 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-31 16:56:01,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=243754.66666666666, ans=0.05 +2024-08-31 16:56:05,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=243754.66666666666, ans=0.125 +2024-08-31 16:56:23,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=243861.33333333334, ans=0.125 +2024-08-31 16:56:45,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=243914.66666666666, ans=0.125 +2024-08-31 16:56:46,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=243914.66666666666, ans=0.0 +2024-08-31 16:56:47,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=243914.66666666666, ans=0.0 +2024-08-31 16:57:05,844 INFO [train.py:1114] (0/4) Epoch 19, batch 950, loss[loss=0.2005, simple_loss=0.2598, pruned_loss=0.0505, ctc_loss=0.1005, over 19501.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2715, pruned_loss=0.04979, ctc_loss=0.0937, over 3820559.24 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-31 16:57:14,286 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.368e+02 1.751e+02 2.034e+02 2.400e+02 3.857e+02, threshold=4.067e+02, percent-clipped=1.0 +2024-08-31 16:57:20,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=244074.66666666666, ans=0.0 +2024-08-31 16:57:26,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=244074.66666666666, ans=0.0 +2024-08-31 16:57:27,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=244074.66666666666, ans=0.125 +2024-08-31 16:57:56,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=244234.66666666666, ans=0.2 +2024-08-31 16:58:03,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=244234.66666666666, ans=0.2 +2024-08-31 16:58:06,278 INFO [train.py:1114] (0/4) Epoch 19, batch 1000, loss[loss=0.1849, simple_loss=0.2643, pruned_loss=0.03882, ctc_loss=0.06929, over 19856.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2728, pruned_loss=0.05043, ctc_loss=0.09494, over 3815653.69 frames. ], batch size: 52, lr: 7.90e-03, grad_scale: 32.0 +2024-08-31 16:58:52,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=244341.33333333334, ans=0.015 +2024-08-31 16:58:54,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=244341.33333333334, ans=0.2 +2024-08-31 16:59:10,100 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.84 vs. limit=15.0 +2024-08-31 16:59:12,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=244448.0, ans=0.125 +2024-08-31 16:59:49,199 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.91 vs. limit=22.5 +2024-08-31 16:59:52,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=244501.33333333334, ans=0.5 +2024-08-31 17:00:09,426 INFO [train.py:1114] (0/4) Epoch 19, batch 1050, loss[loss=0.2263, simple_loss=0.2924, pruned_loss=0.05835, ctc_loss=0.1086, over 19834.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2718, pruned_loss=0.05005, ctc_loss=0.09419, over 3822306.47 frames. ], batch size: 57, lr: 7.90e-03, grad_scale: 32.0 +2024-08-31 17:00:13,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244554.66666666666, ans=0.1 +2024-08-31 17:00:17,641 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.651e+02 1.935e+02 2.361e+02 3.363e+02, threshold=3.870e+02, percent-clipped=0.0 +2024-08-31 17:00:20,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=244608.0, ans=0.125 +2024-08-31 17:00:21,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=244608.0, ans=0.0 +2024-08-31 17:00:22,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=244608.0, ans=0.2 +2024-08-31 17:01:12,068 INFO [train.py:1114] (0/4) Epoch 19, batch 1100, loss[loss=0.2089, simple_loss=0.2694, pruned_loss=0.05361, ctc_loss=0.103, over 19574.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2715, pruned_loss=0.04974, ctc_loss=0.09362, over 3830280.28 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-31 17:01:16,284 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.50 vs. limit=15.0 +2024-08-31 17:01:58,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=244981.33333333334, ans=0.125 +2024-08-31 17:02:30,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=245034.66666666666, ans=0.125 +2024-08-31 17:02:43,473 INFO [train.py:1114] (0/4) Epoch 19, batch 1150, loss[loss=0.1662, simple_loss=0.2384, pruned_loss=0.0345, ctc_loss=0.06259, over 19596.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2712, pruned_loss=0.04946, ctc_loss=0.09313, over 3828089.38 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-31 17:03:11,402 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.331e+02 1.693e+02 1.899e+02 2.295e+02 3.327e+02, threshold=3.798e+02, percent-clipped=0.0 +2024-08-31 17:03:14,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.87 vs. limit=15.0 +2024-08-31 17:03:16,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=245141.33333333334, ans=0.125 +2024-08-31 17:03:18,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=245141.33333333334, ans=0.05 +2024-08-31 17:03:22,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=245141.33333333334, ans=0.95 +2024-08-31 17:03:54,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245301.33333333334, ans=0.1 +2024-08-31 17:03:56,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=245301.33333333334, ans=0.125 +2024-08-31 17:03:59,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1.whitening_limit, batch_count=245301.33333333334, ans=10.0 +2024-08-31 17:04:00,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=245301.33333333334, ans=0.0 +2024-08-31 17:04:04,676 INFO [train.py:1114] (0/4) Epoch 19, batch 1200, loss[loss=0.1832, simple_loss=0.2596, pruned_loss=0.03881, ctc_loss=0.07307, over 19828.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2718, pruned_loss=0.04969, ctc_loss=0.09361, over 3823389.80 frames. ], batch size: 57, lr: 7.89e-03, grad_scale: 32.0 +2024-08-31 17:04:29,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=245408.0, ans=0.0 +2024-08-31 17:04:33,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=245461.33333333334, ans=0.125 +2024-08-31 17:04:44,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=245514.66666666666, ans=0.2 +2024-08-31 17:04:46,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=245514.66666666666, ans=0.125 +2024-08-31 17:04:51,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=245514.66666666666, ans=0.125 +2024-08-31 17:05:08,561 INFO [train.py:1114] (0/4) Epoch 19, batch 1250, loss[loss=0.2005, simple_loss=0.27, pruned_loss=0.04723, ctc_loss=0.09154, over 19532.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2723, pruned_loss=0.04944, ctc_loss=0.09306, over 3841958.57 frames. ], batch size: 61, lr: 7.88e-03, grad_scale: 32.0 +2024-08-31 17:05:12,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=245621.33333333334, ans=0.125 +2024-08-31 17:05:14,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=245621.33333333334, ans=0.125 +2024-08-31 17:05:16,756 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.647e+02 1.911e+02 2.205e+02 3.499e+02, threshold=3.822e+02, percent-clipped=0.0 +2024-08-31 17:05:50,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=245728.0, ans=0.025 +2024-08-31 17:05:58,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=245781.33333333334, ans=0.125 +2024-08-31 17:05:58,820 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.71 vs. limit=15.0 +2024-08-31 17:06:14,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=245834.66666666666, ans=0.025 +2024-08-31 17:06:19,683 INFO [train.py:1114] (0/4) Epoch 19, batch 1300, loss[loss=0.2038, simple_loss=0.2746, pruned_loss=0.04908, ctc_loss=0.08737, over 18889.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.272, pruned_loss=0.04965, ctc_loss=0.09353, over 3846145.41 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 32.0 +2024-08-31 17:06:23,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=245888.0, ans=0.125 +2024-08-31 17:06:32,006 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.27 vs. limit=22.5 +2024-08-31 17:07:02,146 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.74 vs. limit=10.0 +2024-08-31 17:07:24,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246154.66666666666, ans=0.1 +2024-08-31 17:07:25,628 INFO [train.py:1114] (0/4) Epoch 19, batch 1350, loss[loss=0.1918, simple_loss=0.2649, pruned_loss=0.04307, ctc_loss=0.08142, over 19769.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2714, pruned_loss=0.04925, ctc_loss=0.09269, over 3856948.23 frames. ], batch size: 54, lr: 7.87e-03, grad_scale: 64.0 +2024-08-31 17:07:39,276 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.398e+02 1.765e+02 2.070e+02 2.720e+02 4.418e+02, threshold=4.141e+02, percent-clipped=1.0 +2024-08-31 17:07:42,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=246208.0, ans=0.125 +2024-08-31 17:07:51,298 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.66 vs. limit=22.5 +2024-08-31 17:07:53,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246208.0, ans=0.1 +2024-08-31 17:08:21,820 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:08:35,875 INFO [train.py:1114] (0/4) Epoch 19, batch 1400, loss[loss=0.1698, simple_loss=0.2367, pruned_loss=0.03697, ctc_loss=0.07205, over 19657.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2709, pruned_loss=0.04903, ctc_loss=0.0921, over 3863914.06 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 64.0 +2024-08-31 17:09:14,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=246528.0, ans=0.035 +2024-08-31 17:09:21,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=246528.0, ans=0.125 +2024-08-31 17:09:27,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=246581.33333333334, ans=0.125 +2024-08-31 17:09:41,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=246634.66666666666, ans=0.2 +2024-08-31 17:09:44,885 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.45 vs. limit=22.5 +2024-08-31 17:09:45,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=246634.66666666666, ans=0.0 +2024-08-31 17:09:47,428 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.66 vs. limit=6.0 +2024-08-31 17:09:50,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=246634.66666666666, ans=0.125 +2024-08-31 17:09:53,651 INFO [train.py:1114] (0/4) Epoch 19, batch 1450, loss[loss=0.2218, simple_loss=0.2845, pruned_loss=0.05707, ctc_loss=0.1126, over 19667.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2713, pruned_loss=0.04943, ctc_loss=0.09277, over 3862670.75 frames. ], batch size: 63, lr: 7.87e-03, grad_scale: 64.0 +2024-08-31 17:09:53,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=246688.0, ans=0.0 +2024-08-31 17:10:02,069 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.290e+02 1.691e+02 1.919e+02 2.362e+02 3.353e+02, threshold=3.838e+02, percent-clipped=0.0 +2024-08-31 17:10:06,372 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=3.88 vs. limit=12.0 +2024-08-31 17:11:36,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=246794.66666666666, ans=0.125 +2024-08-31 17:11:50,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=246848.0, ans=0.125 +2024-08-31 17:12:08,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=246901.33333333334, ans=0.0 +2024-08-31 17:12:09,648 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.70 vs. limit=15.0 +2024-08-31 17:12:12,391 INFO [train.py:1114] (0/4) Epoch 19, batch 1500, loss[loss=0.2129, simple_loss=0.2834, pruned_loss=0.051, ctc_loss=0.1009, over 19578.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2718, pruned_loss=0.0495, ctc_loss=0.09303, over 3862431.58 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 64.0 +2024-08-31 17:12:13,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=246954.66666666666, ans=0.125 +2024-08-31 17:13:04,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=247008.0, ans=0.0 +2024-08-31 17:13:58,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=247114.66666666666, ans=0.125 +2024-08-31 17:14:15,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247114.66666666666, ans=0.1 +2024-08-31 17:14:33,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=247168.0, ans=0.2 +2024-08-31 17:14:37,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247221.33333333334, ans=0.1 +2024-08-31 17:14:38,391 INFO [train.py:1114] (0/4) Epoch 19, batch 1550, loss[loss=0.2528, simple_loss=0.3064, pruned_loss=0.073, ctc_loss=0.133, over 19613.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2719, pruned_loss=0.04993, ctc_loss=0.09384, over 3847641.78 frames. ], batch size: 60, lr: 7.86e-03, grad_scale: 64.0 +2024-08-31 17:14:46,783 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.654e+02 1.883e+02 2.328e+02 3.879e+02, threshold=3.765e+02, percent-clipped=1.0 +2024-08-31 17:14:46,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=247221.33333333334, ans=0.0 +2024-08-31 17:15:12,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=247274.66666666666, ans=0.0 +2024-08-31 17:15:21,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=247328.0, ans=0.2 +2024-08-31 17:15:24,813 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.40 vs. limit=12.0 +2024-08-31 17:15:29,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=247328.0, ans=0.125 +2024-08-31 17:16:19,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=247381.33333333334, ans=15.0 +2024-08-31 17:16:21,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=247381.33333333334, ans=0.125 +2024-08-31 17:16:25,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=247381.33333333334, ans=0.125 +2024-08-31 17:16:32,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=247434.66666666666, ans=0.0 +2024-08-31 17:16:37,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=247434.66666666666, ans=0.125 +2024-08-31 17:16:40,622 INFO [train.py:1114] (0/4) Epoch 19, batch 1600, loss[loss=0.1953, simple_loss=0.2725, pruned_loss=0.04341, ctc_loss=0.0782, over 19846.00 frames. ], tot_loss[loss=0.205, simple_loss=0.272, pruned_loss=0.05012, ctc_loss=0.09442, over 3836418.62 frames. ], batch size: 57, lr: 7.85e-03, grad_scale: 64.0 +2024-08-31 17:16:59,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=247541.33333333334, ans=0.2 +2024-08-31 17:17:07,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=247594.66666666666, ans=0.0 +2024-08-31 17:17:23,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=247648.0, ans=0.2 +2024-08-31 17:17:42,002 INFO [train.py:1114] (0/4) Epoch 19, batch 1650, loss[loss=0.2182, simple_loss=0.2889, pruned_loss=0.05319, ctc_loss=0.1028, over 19628.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2719, pruned_loss=0.05015, ctc_loss=0.09429, over 3833472.39 frames. ], batch size: 59, lr: 7.85e-03, grad_scale: 64.0 +2024-08-31 17:17:43,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=247754.66666666666, ans=0.125 +2024-08-31 17:17:50,564 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.367e+02 1.753e+02 1.927e+02 2.360e+02 4.500e+02, threshold=3.853e+02, percent-clipped=4.0 +2024-08-31 17:17:55,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=247808.0, ans=0.0 +2024-08-31 17:18:04,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247861.33333333334, ans=0.1 +2024-08-31 17:18:07,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.63 vs. limit=12.0 +2024-08-31 17:18:20,285 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.68 vs. limit=15.0 +2024-08-31 17:18:42,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=247968.0, ans=0.125 +2024-08-31 17:18:44,906 INFO [train.py:1114] (0/4) Epoch 19, batch 1700, loss[loss=0.1786, simple_loss=0.2457, pruned_loss=0.04108, ctc_loss=0.07345, over 19657.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2712, pruned_loss=0.04946, ctc_loss=0.09286, over 3847350.29 frames. ], batch size: 46, lr: 7.84e-03, grad_scale: 64.0 +2024-08-31 17:18:50,381 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.55 vs. limit=12.0 +2024-08-31 17:18:55,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=248021.33333333334, ans=0.025 +2024-08-31 17:19:11,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=248128.0, ans=0.2 +2024-08-31 17:19:52,910 INFO [train.py:1114] (0/4) Epoch 19, batch 1750, loss[loss=0.18, simple_loss=0.245, pruned_loss=0.04152, ctc_loss=0.07996, over 19654.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2705, pruned_loss=0.04902, ctc_loss=0.09214, over 3852186.62 frames. ], batch size: 45, lr: 7.84e-03, grad_scale: 32.0 +2024-08-31 17:19:54,840 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.24 vs. limit=6.0 +2024-08-31 17:20:02,153 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.715e+02 1.941e+02 2.441e+02 4.524e+02, threshold=3.882e+02, percent-clipped=3.0 +2024-08-31 17:20:12,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248341.33333333334, ans=0.1 +2024-08-31 17:20:16,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=248394.66666666666, ans=0.0 +2024-08-31 17:20:20,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=248394.66666666666, ans=0.0 +2024-08-31 17:20:25,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=248394.66666666666, ans=15.0 +2024-08-31 17:20:40,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248501.33333333334, ans=0.1 +2024-08-31 17:20:44,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=248501.33333333334, ans=0.0 +2024-08-31 17:20:46,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=248501.33333333334, ans=0.0 +2024-08-31 17:20:49,896 INFO [train.py:1114] (0/4) Epoch 19, batch 1800, loss[loss=0.2011, simple_loss=0.2755, pruned_loss=0.04618, ctc_loss=0.08578, over 19604.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2709, pruned_loss=0.04896, ctc_loss=0.09206, over 3854361.58 frames. ], batch size: 55, lr: 7.84e-03, grad_scale: 32.0 +2024-08-31 17:20:50,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=248554.66666666666, ans=0.07 +2024-08-31 17:21:19,603 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:21:27,970 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.99 vs. limit=22.5 +2024-08-31 17:21:45,305 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.40 vs. limit=12.0 +2024-08-31 17:21:45,429 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.54 vs. limit=6.0 +2024-08-31 17:21:46,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=248821.33333333334, ans=0.2 +2024-08-31 17:21:47,148 INFO [train.py:1114] (0/4) Epoch 19, batch 1850, loss[loss=0.2268, simple_loss=0.2923, pruned_loss=0.05818, ctc_loss=0.1122, over 19595.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2709, pruned_loss=0.04887, ctc_loss=0.09184, over 3857567.59 frames. ], batch size: 57, lr: 7.83e-03, grad_scale: 32.0 +2024-08-31 17:21:52,320 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.12 vs. limit=22.5 +2024-08-31 17:21:56,039 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 1.825e+02 2.203e+02 3.044e+02 4.782e+02, threshold=4.406e+02, percent-clipped=6.0 +2024-08-31 17:22:15,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=248874.66666666666, ans=0.125 +2024-08-31 17:22:19,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248928.0, ans=0.1 +2024-08-31 17:22:34,351 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.17 vs. limit=15.0 +2024-08-31 17:22:52,464 INFO [train.py:1114] (0/4) Epoch 19, batch 1900, loss[loss=0.2045, simple_loss=0.2877, pruned_loss=0.04483, ctc_loss=0.07912, over 19668.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2717, pruned_loss=0.04913, ctc_loss=0.09226, over 3862216.50 frames. ], batch size: 59, lr: 7.83e-03, grad_scale: 32.0 +2024-08-31 17:22:55,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=249088.0, ans=0.0 +2024-08-31 17:22:59,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=249088.0, ans=0.0 +2024-08-31 17:23:08,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=249141.33333333334, ans=10.0 +2024-08-31 17:23:13,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249194.66666666666, ans=0.1 +2024-08-31 17:23:22,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249194.66666666666, ans=0.1 +2024-08-31 17:23:23,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=249194.66666666666, ans=0.125 +2024-08-31 17:23:34,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=249248.0, ans=0.05 +2024-08-31 17:23:35,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=249248.0, ans=0.125 +2024-08-31 17:23:48,980 INFO [train.py:1114] (0/4) Epoch 19, batch 1950, loss[loss=0.1738, simple_loss=0.2493, pruned_loss=0.03633, ctc_loss=0.0643, over 19586.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2722, pruned_loss=0.04906, ctc_loss=0.09216, over 3871303.88 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 32.0 +2024-08-31 17:23:58,748 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.354e+02 1.608e+02 1.802e+02 2.157e+02 4.545e+02, threshold=3.604e+02, percent-clipped=1.0 +2024-08-31 17:24:00,131 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:24:06,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249408.0, ans=0.1 +2024-08-31 17:24:13,469 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.08 vs. limit=15.0 +2024-08-31 17:24:22,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=249514.66666666666, ans=0.0 +2024-08-31 17:24:25,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=249514.66666666666, ans=0.0 +2024-08-31 17:24:50,835 INFO [train.py:1114] (0/4) Epoch 19, batch 2000, loss[loss=0.1675, simple_loss=0.2328, pruned_loss=0.03686, ctc_loss=0.0712, over 19614.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2728, pruned_loss=0.04943, ctc_loss=0.09284, over 3856103.74 frames. ], batch size: 45, lr: 7.82e-03, grad_scale: 32.0 +2024-08-31 17:24:51,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=249621.33333333334, ans=0.015 +2024-08-31 17:24:57,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=249621.33333333334, ans=0.125 +2024-08-31 17:25:05,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.whiten.whitening_limit, batch_count=249674.66666666666, ans=12.0 +2024-08-31 17:25:06,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=249674.66666666666, ans=0.2 +2024-08-31 17:25:08,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=249674.66666666666, ans=10.0 +2024-08-31 17:25:25,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=249781.33333333334, ans=0.2 +2024-08-31 17:25:41,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=249834.66666666666, ans=0.0 +2024-08-31 17:25:41,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=249834.66666666666, ans=0.05 +2024-08-31 17:25:46,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=249888.0, ans=0.0 +2024-08-31 17:25:47,787 INFO [train.py:1114] (0/4) Epoch 19, batch 2050, loss[loss=0.1773, simple_loss=0.2406, pruned_loss=0.04211, ctc_loss=0.07482, over 19697.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2717, pruned_loss=0.04945, ctc_loss=0.09289, over 3852752.59 frames. ], batch size: 47, lr: 7.82e-03, grad_scale: 32.0 +2024-08-31 17:25:52,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=249888.0, ans=0.125 +2024-08-31 17:25:57,144 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.442e+02 1.719e+02 2.018e+02 2.402e+02 3.677e+02, threshold=4.037e+02, percent-clipped=1.0 +2024-08-31 17:25:57,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_na.min_abs, batch_count=249888.0, ans=0.02 +2024-08-31 17:25:59,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=249941.33333333334, ans=0.0 +2024-08-31 17:26:19,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249994.66666666666, ans=0.1 +2024-08-31 17:26:36,248 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.34 vs. limit=15.0 +2024-08-31 17:26:38,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=250101.33333333334, ans=0.05 +2024-08-31 17:26:44,682 INFO [train.py:1114] (0/4) Epoch 19, batch 2100, loss[loss=0.1905, simple_loss=0.269, pruned_loss=0.04051, ctc_loss=0.07741, over 19782.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2712, pruned_loss=0.0493, ctc_loss=0.09268, over 3859487.84 frames. ], batch size: 54, lr: 7.81e-03, grad_scale: 32.0 +2024-08-31 17:26:49,077 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.65 vs. limit=15.0 +2024-08-31 17:26:50,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=250154.66666666666, ans=0.2 +2024-08-31 17:27:13,443 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.32 vs. limit=15.0 +2024-08-31 17:27:40,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=250368.0, ans=0.125 +2024-08-31 17:27:41,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=250421.33333333334, ans=0.0 +2024-08-31 17:27:42,494 INFO [train.py:1114] (0/4) Epoch 19, batch 2150, loss[loss=0.1875, simple_loss=0.2625, pruned_loss=0.04045, ctc_loss=0.07902, over 19568.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2705, pruned_loss=0.0489, ctc_loss=0.09186, over 3870372.76 frames. ], batch size: 52, lr: 7.81e-03, grad_scale: 32.0 +2024-08-31 17:27:48,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=250421.33333333334, ans=0.125 +2024-08-31 17:27:51,496 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.322e+02 1.672e+02 1.975e+02 2.523e+02 4.782e+02, threshold=3.951e+02, percent-clipped=2.0 +2024-08-31 17:27:57,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=250474.66666666666, ans=0.125 +2024-08-31 17:28:08,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=250528.0, ans=0.0 +2024-08-31 17:28:22,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=250581.33333333334, ans=0.025 +2024-08-31 17:28:39,684 INFO [train.py:1114] (0/4) Epoch 19, batch 2200, loss[loss=0.202, simple_loss=0.2777, pruned_loss=0.04651, ctc_loss=0.08319, over 19587.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2709, pruned_loss=0.04919, ctc_loss=0.09236, over 3869227.07 frames. ], batch size: 57, lr: 7.80e-03, grad_scale: 32.0 +2024-08-31 17:29:14,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=250848.0, ans=0.0 +2024-08-31 17:29:30,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250901.33333333334, ans=0.1 +2024-08-31 17:29:38,737 INFO [train.py:1114] (0/4) Epoch 19, batch 2250, loss[loss=0.2313, simple_loss=0.2941, pruned_loss=0.06104, ctc_loss=0.1163, over 19623.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2713, pruned_loss=0.04911, ctc_loss=0.09233, over 3869499.94 frames. ], batch size: 55, lr: 7.80e-03, grad_scale: 32.0 +2024-08-31 17:29:47,357 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.680e+02 1.896e+02 2.375e+02 5.292e+02, threshold=3.791e+02, percent-clipped=4.0 +2024-08-31 17:30:00,929 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.74 vs. limit=15.0 +2024-08-31 17:30:12,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=251061.33333333334, ans=6.0 +2024-08-31 17:30:20,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251114.66666666666, ans=0.1 +2024-08-31 17:30:37,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=251168.0, ans=0.125 +2024-08-31 17:30:40,038 INFO [train.py:1114] (0/4) Epoch 19, batch 2300, loss[loss=0.1763, simple_loss=0.2474, pruned_loss=0.03839, ctc_loss=0.07109, over 19501.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2706, pruned_loss=0.04925, ctc_loss=0.09267, over 3863587.28 frames. ], batch size: 49, lr: 7.80e-03, grad_scale: 32.0 +2024-08-31 17:30:47,900 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.13 vs. limit=6.0 +2024-08-31 17:31:05,087 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=3.251e-02 +2024-08-31 17:31:26,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=251434.66666666666, ans=0.125 +2024-08-31 17:31:27,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=251434.66666666666, ans=0.125 +2024-08-31 17:31:36,397 INFO [train.py:1114] (0/4) Epoch 19, batch 2350, loss[loss=0.2031, simple_loss=0.2766, pruned_loss=0.04711, ctc_loss=0.0883, over 19649.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2705, pruned_loss=0.04936, ctc_loss=0.09277, over 3865582.44 frames. ], batch size: 63, lr: 7.79e-03, grad_scale: 32.0 +2024-08-31 17:31:45,227 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.365e+02 1.718e+02 2.013e+02 2.563e+02 3.706e+02, threshold=4.026e+02, percent-clipped=0.0 +2024-08-31 17:31:50,880 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:32:04,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=251594.66666666666, ans=0.0 +2024-08-31 17:32:05,525 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.34 vs. limit=12.0 +2024-08-31 17:32:13,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=251648.0, ans=0.125 +2024-08-31 17:32:17,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=251648.0, ans=0.125 +2024-08-31 17:32:23,830 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.43 vs. limit=15.0 +2024-08-31 17:32:27,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=251701.33333333334, ans=0.125 +2024-08-31 17:32:36,572 INFO [train.py:1114] (0/4) Epoch 19, batch 2400, loss[loss=0.2285, simple_loss=0.2898, pruned_loss=0.05982, ctc_loss=0.1186, over 19372.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2726, pruned_loss=0.05013, ctc_loss=0.09401, over 3861154.57 frames. ], batch size: 67, lr: 7.79e-03, grad_scale: 32.0 +2024-08-31 17:32:48,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=251808.0, ans=0.2 +2024-08-31 17:32:48,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=251808.0, ans=0.125 +2024-08-31 17:32:50,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=251808.0, ans=0.04949747468305833 +2024-08-31 17:33:39,847 INFO [train.py:1114] (0/4) Epoch 19, batch 2450, loss[loss=0.25, simple_loss=0.2952, pruned_loss=0.0744, ctc_loss=0.1397, over 12910.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.276, pruned_loss=0.0522, ctc_loss=0.09809, over 3737846.95 frames. ], batch size: 141, lr: 7.78e-03, grad_scale: 32.0 +2024-08-31 17:33:43,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=252021.33333333334, ans=0.125 +2024-08-31 17:33:48,950 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.471e+02 1.610e+02 1.856e+02 2.081e+02 3.075e+02, threshold=3.711e+02, percent-clipped=0.0 +2024-08-31 17:34:01,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=252128.0, ans=0.0 +2024-08-31 17:34:07,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=252128.0, ans=0.0 +2024-08-31 17:34:09,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=252128.0, ans=0.125 +2024-08-31 17:34:24,890 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-19.pt +2024-08-31 17:36:18,526 INFO [train.py:1114] (0/4) Epoch 20, batch 0, loss[loss=0.2057, simple_loss=0.26, pruned_loss=0.05526, ctc_loss=0.1024, over 19809.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.26, pruned_loss=0.05526, ctc_loss=0.1024, over 19809.00 frames. ], batch size: 49, lr: 7.58e-03, grad_scale: 32.0 +2024-08-31 17:36:18,527 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-31 17:36:23,388 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.1181, 2.3030, 2.5674, 2.2271], device='cuda:0') +2024-08-31 17:36:28,430 INFO [train.py:1146] (0/4) Epoch 20, validation: loss=0.1834, simple_loss=0.2715, pruned_loss=0.03542, ctc_loss=0.061, over 944034.00 frames. +2024-08-31 17:36:28,431 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13675MB +2024-08-31 17:36:41,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=252288.0, ans=0.025 +2024-08-31 17:36:50,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=252288.0, ans=0.125 +2024-08-31 17:36:50,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=252288.0, ans=0.2 +2024-08-31 17:36:53,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=252341.33333333334, ans=0.125 +2024-08-31 17:37:06,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=252394.66666666666, ans=0.0 +2024-08-31 17:37:07,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=252394.66666666666, ans=0.025 +2024-08-31 17:37:27,964 INFO [train.py:1114] (0/4) Epoch 20, batch 50, loss[loss=0.1701, simple_loss=0.2445, pruned_loss=0.03522, ctc_loss=0.0633, over 19720.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2734, pruned_loss=0.05082, ctc_loss=0.09645, over 843606.59 frames. ], batch size: 47, lr: 7.58e-03, grad_scale: 32.0 +2024-08-31 17:37:51,156 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.696e+02 1.962e+02 2.261e+02 4.473e+02, threshold=3.923e+02, percent-clipped=2.0 +2024-08-31 17:40:42,351 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.39 vs. limit=12.0 +2024-08-31 17:40:42,651 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.48 vs. limit=22.5 +2024-08-31 17:41:08,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=252714.66666666666, ans=0.125 +2024-08-31 17:41:27,195 INFO [train.py:1114] (0/4) Epoch 20, batch 100, loss[loss=0.1731, simple_loss=0.2453, pruned_loss=0.03703, ctc_loss=0.06724, over 19733.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2745, pruned_loss=0.05095, ctc_loss=0.09572, over 1498532.54 frames. ], batch size: 51, lr: 7.57e-03, grad_scale: 32.0 +2024-08-31 17:41:50,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=252821.33333333334, ans=0.125 +2024-08-31 17:42:39,608 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.77 vs. limit=22.5 +2024-08-31 17:42:51,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=252874.66666666666, ans=0.2 +2024-08-31 17:44:06,478 INFO [train.py:1114] (0/4) Epoch 20, batch 150, loss[loss=0.1966, simple_loss=0.2546, pruned_loss=0.05114, ctc_loss=0.09064, over 19685.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2705, pruned_loss=0.04878, ctc_loss=0.09152, over 2026778.29 frames. ], batch size: 47, lr: 7.57e-03, grad_scale: 32.0 +2024-08-31 17:44:06,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=253034.66666666666, ans=0.0 +2024-08-31 17:44:59,735 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.301e+02 1.634e+02 1.821e+02 2.194e+02 3.683e+02, threshold=3.641e+02, percent-clipped=0.0 +2024-08-31 17:45:01,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=253088.0, ans=0.125 +2024-08-31 17:45:21,251 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.54 vs. limit=15.0 +2024-08-31 17:45:32,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=253194.66666666666, ans=0.05 +2024-08-31 17:45:58,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=253301.33333333334, ans=0.2 +2024-08-31 17:45:59,902 INFO [train.py:1114] (0/4) Epoch 20, batch 200, loss[loss=0.2194, simple_loss=0.2903, pruned_loss=0.0537, ctc_loss=0.1028, over 18195.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2689, pruned_loss=0.04802, ctc_loss=0.09044, over 2434933.67 frames. ], batch size: 85, lr: 7.56e-03, grad_scale: 32.0 +2024-08-31 17:46:14,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=253354.66666666666, ans=0.1 +2024-08-31 17:46:33,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=253408.0, ans=0.125 +2024-08-31 17:46:48,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=253408.0, ans=0.125 +2024-08-31 17:46:51,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=253408.0, ans=0.0 +2024-08-31 17:47:15,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=253514.66666666666, ans=0.125 +2024-08-31 17:47:18,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=253514.66666666666, ans=0.125 +2024-08-31 17:47:20,549 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.43 vs. limit=15.0 +2024-08-31 17:47:28,658 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.31 vs. limit=15.0 +2024-08-31 17:47:33,339 INFO [train.py:1114] (0/4) Epoch 20, batch 250, loss[loss=0.2131, simple_loss=0.2827, pruned_loss=0.0524, ctc_loss=0.09707, over 19420.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.269, pruned_loss=0.04776, ctc_loss=0.0901, over 2755716.60 frames. ], batch size: 67, lr: 7.56e-03, grad_scale: 32.0 +2024-08-31 17:47:43,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=253568.0, ans=0.0 +2024-08-31 17:47:49,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=253621.33333333334, ans=0.025 +2024-08-31 17:47:56,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=253621.33333333334, ans=0.2 +2024-08-31 17:47:59,368 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.745e+02 2.044e+02 2.602e+02 4.259e+02, threshold=4.089e+02, percent-clipped=6.0 +2024-08-31 17:47:59,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=253621.33333333334, ans=0.2 +2024-08-31 17:48:59,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=253674.66666666666, ans=0.125 +2024-08-31 17:49:36,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=253728.0, ans=0.125 +2024-08-31 17:49:37,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253728.0, ans=0.1 +2024-08-31 17:50:00,343 INFO [train.py:1114] (0/4) Epoch 20, batch 300, loss[loss=0.2118, simple_loss=0.2859, pruned_loss=0.05027, ctc_loss=0.09284, over 19549.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2691, pruned_loss=0.04801, ctc_loss=0.09042, over 3001088.90 frames. ], batch size: 61, lr: 7.56e-03, grad_scale: 32.0 +2024-08-31 17:50:08,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=253834.66666666666, ans=0.125 +2024-08-31 17:50:16,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=253888.0, ans=0.125 +2024-08-31 17:50:31,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=253941.33333333334, ans=0.0 +2024-08-31 17:50:44,329 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.60 vs. limit=22.5 +2024-08-31 17:50:45,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=253994.66666666666, ans=0.2 +2024-08-31 17:51:05,485 INFO [train.py:1114] (0/4) Epoch 20, batch 350, loss[loss=0.1813, simple_loss=0.2485, pruned_loss=0.04141, ctc_loss=0.07822, over 19726.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2697, pruned_loss=0.04815, ctc_loss=0.09048, over 3190959.39 frames. ], batch size: 48, lr: 7.55e-03, grad_scale: 32.0 +2024-08-31 17:51:07,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=254101.33333333334, ans=0.0 +2024-08-31 17:51:18,185 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.38 vs. limit=15.0 +2024-08-31 17:51:26,955 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.703e+02 1.946e+02 2.321e+02 4.034e+02, threshold=3.891e+02, percent-clipped=0.0 +2024-08-31 17:51:28,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=254208.0, ans=0.125 +2024-08-31 17:51:30,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=254208.0, ans=0.125 +2024-08-31 17:52:02,475 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.32 vs. limit=22.5 +2024-08-31 17:52:04,333 INFO [train.py:1114] (0/4) Epoch 20, batch 400, loss[loss=0.1946, simple_loss=0.2634, pruned_loss=0.04572, ctc_loss=0.08578, over 19510.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2694, pruned_loss=0.04806, ctc_loss=0.09006, over 3342257.31 frames. ], batch size: 54, lr: 7.55e-03, grad_scale: 32.0 +2024-08-31 17:52:37,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=254474.66666666666, ans=0.125 +2024-08-31 17:52:39,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=254474.66666666666, ans=0.125 +2024-08-31 17:52:46,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=254528.0, ans=0.125 +2024-08-31 17:52:55,203 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.29 vs. limit=15.0 +2024-08-31 17:53:10,612 INFO [train.py:1114] (0/4) Epoch 20, batch 450, loss[loss=0.2087, simple_loss=0.2826, pruned_loss=0.04782, ctc_loss=0.09797, over 19600.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2699, pruned_loss=0.04836, ctc_loss=0.09067, over 3451494.20 frames. ], batch size: 55, lr: 7.55e-03, grad_scale: 32.0 +2024-08-31 17:53:20,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=254634.66666666666, ans=0.125 +2024-08-31 17:53:31,692 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.356e+02 1.627e+02 1.777e+02 2.217e+02 3.582e+02, threshold=3.554e+02, percent-clipped=0.0 +2024-08-31 17:53:46,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=254741.33333333334, ans=0.125 +2024-08-31 17:53:58,593 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.48 vs. limit=15.0 +2024-08-31 17:53:58,673 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.50 vs. limit=10.0 +2024-08-31 17:54:15,353 INFO [train.py:1114] (0/4) Epoch 20, batch 500, loss[loss=0.211, simple_loss=0.2748, pruned_loss=0.05355, ctc_loss=0.1004, over 19680.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2692, pruned_loss=0.04806, ctc_loss=0.09022, over 3545952.35 frames. ], batch size: 63, lr: 7.54e-03, grad_scale: 32.0 +2024-08-31 17:54:34,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=254954.66666666666, ans=0.025 +2024-08-31 17:54:54,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=255061.33333333334, ans=0.125 +2024-08-31 17:54:56,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=255061.33333333334, ans=0.125 +2024-08-31 17:55:14,668 INFO [train.py:1114] (0/4) Epoch 20, batch 550, loss[loss=0.2262, simple_loss=0.2913, pruned_loss=0.05725, ctc_loss=0.1164, over 19253.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2694, pruned_loss=0.04804, ctc_loss=0.09027, over 3608173.82 frames. ], batch size: 71, lr: 7.54e-03, grad_scale: 32.0 +2024-08-31 17:55:35,928 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.279e+02 1.640e+02 1.908e+02 2.178e+02 3.229e+02, threshold=3.816e+02, percent-clipped=0.0 +2024-08-31 17:55:46,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=255274.66666666666, ans=0.0 +2024-08-31 17:55:52,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=255328.0, ans=0.2 +2024-08-31 17:56:20,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=255381.33333333334, ans=10.0 +2024-08-31 17:56:22,748 INFO [train.py:1114] (0/4) Epoch 20, batch 600, loss[loss=0.2241, simple_loss=0.294, pruned_loss=0.05678, ctc_loss=0.1014, over 19331.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.27, pruned_loss=0.04832, ctc_loss=0.09088, over 3665746.35 frames. ], batch size: 67, lr: 7.53e-03, grad_scale: 32.0 +2024-08-31 17:56:47,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=255541.33333333334, ans=0.125 +2024-08-31 17:57:04,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=255594.66666666666, ans=0.025 +2024-08-31 17:57:07,796 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.80 vs. limit=15.0 +2024-08-31 17:57:18,488 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.05 vs. limit=12.0 +2024-08-31 17:57:22,366 INFO [train.py:1114] (0/4) Epoch 20, batch 650, loss[loss=0.1978, simple_loss=0.2695, pruned_loss=0.0451, ctc_loss=0.0898, over 19755.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2696, pruned_loss=0.0481, ctc_loss=0.09067, over 3716724.37 frames. ], batch size: 54, lr: 7.53e-03, grad_scale: 32.0 +2024-08-31 17:57:28,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=255701.33333333334, ans=0.125 +2024-08-31 17:57:37,710 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.54 vs. limit=6.0 +2024-08-31 17:57:44,316 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.759e+02 2.153e+02 2.838e+02 5.166e+02, threshold=4.306e+02, percent-clipped=8.0 +2024-08-31 17:57:52,733 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:57:55,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=255808.0, ans=0.125 +2024-08-31 17:58:03,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=255861.33333333334, ans=0.07 +2024-08-31 17:58:12,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=255914.66666666666, ans=0.0 +2024-08-31 17:58:22,788 INFO [train.py:1114] (0/4) Epoch 20, batch 700, loss[loss=0.2241, simple_loss=0.2827, pruned_loss=0.06002, ctc_loss=0.1137, over 19713.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2704, pruned_loss=0.04829, ctc_loss=0.09091, over 3748489.04 frames. ], batch size: 51, lr: 7.53e-03, grad_scale: 32.0 +2024-08-31 17:58:28,900 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/checkpoint-48000.pt +2024-08-31 17:58:36,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=256021.33333333334, ans=0.025 +2024-08-31 17:58:36,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256021.33333333334, ans=0.1 +2024-08-31 17:58:38,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=256021.33333333334, ans=0.025 +2024-08-31 17:59:11,428 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.55 vs. limit=15.0 +2024-08-31 17:59:24,901 INFO [train.py:1114] (0/4) Epoch 20, batch 750, loss[loss=0.2021, simple_loss=0.2768, pruned_loss=0.04667, ctc_loss=0.08524, over 19516.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2696, pruned_loss=0.04779, ctc_loss=0.08997, over 3775536.21 frames. ], batch size: 54, lr: 7.52e-03, grad_scale: 32.0 +2024-08-31 17:59:30,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=256234.66666666666, ans=0.0 +2024-08-31 17:59:38,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=256234.66666666666, ans=0.125 +2024-08-31 17:59:58,590 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.267e+02 1.642e+02 1.855e+02 2.095e+02 3.716e+02, threshold=3.709e+02, percent-clipped=0.0 +2024-08-31 18:00:09,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=256341.33333333334, ans=0.025 +2024-08-31 18:00:11,643 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.68 vs. limit=22.5 +2024-08-31 18:00:12,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=256394.66666666666, ans=0.125 +2024-08-31 18:00:18,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=256394.66666666666, ans=0.2 +2024-08-31 18:00:30,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=256448.0, ans=0.125 +2024-08-31 18:00:42,912 INFO [train.py:1114] (0/4) Epoch 20, batch 800, loss[loss=0.1949, simple_loss=0.2594, pruned_loss=0.04564, ctc_loss=0.09784, over 19816.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.27, pruned_loss=0.04819, ctc_loss=0.09077, over 3797324.35 frames. ], batch size: 49, lr: 7.52e-03, grad_scale: 32.0 +2024-08-31 18:00:43,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=256501.33333333334, ans=0.125 +2024-08-31 18:00:53,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=256554.66666666666, ans=0.2 +2024-08-31 18:01:02,847 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.77 vs. limit=6.0 +2024-08-31 18:01:02,877 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.50 vs. limit=6.0 +2024-08-31 18:01:07,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=256608.0, ans=0.025 +2024-08-31 18:01:23,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=256661.33333333334, ans=0.125 +2024-08-31 18:01:28,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=256661.33333333334, ans=0.125 +2024-08-31 18:01:35,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=256714.66666666666, ans=0.125 +2024-08-31 18:01:43,045 INFO [train.py:1114] (0/4) Epoch 20, batch 850, loss[loss=0.1963, simple_loss=0.2706, pruned_loss=0.04362, ctc_loss=0.08694, over 19647.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2695, pruned_loss=0.04797, ctc_loss=0.09052, over 3816331.02 frames. ], batch size: 59, lr: 7.51e-03, grad_scale: 32.0 +2024-08-31 18:01:50,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=256768.0, ans=0.125 +2024-08-31 18:01:52,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=256768.0, ans=0.125 +2024-08-31 18:02:04,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256821.33333333334, ans=0.1 +2024-08-31 18:02:04,847 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.69 vs. limit=15.0 +2024-08-31 18:02:05,168 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.672e+02 2.009e+02 2.661e+02 4.692e+02, threshold=4.019e+02, percent-clipped=5.0 +2024-08-31 18:02:12,087 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.40 vs. limit=15.0 +2024-08-31 18:02:23,402 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.72 vs. limit=15.0 +2024-08-31 18:02:42,831 INFO [train.py:1114] (0/4) Epoch 20, batch 900, loss[loss=0.1851, simple_loss=0.2423, pruned_loss=0.04661, ctc_loss=0.08655, over 19410.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2696, pruned_loss=0.04831, ctc_loss=0.09123, over 3819704.95 frames. ], batch size: 48, lr: 7.51e-03, grad_scale: 32.0 +2024-08-31 18:02:43,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=257034.66666666666, ans=0.1 +2024-08-31 18:02:47,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=257034.66666666666, ans=0.1 +2024-08-31 18:03:14,904 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=8.78 vs. limit=12.0 +2024-08-31 18:03:17,244 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.13 vs. limit=22.5 +2024-08-31 18:03:30,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=257194.66666666666, ans=0.07 +2024-08-31 18:03:36,788 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:03:36,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=257194.66666666666, ans=0.025 +2024-08-31 18:03:48,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=257248.0, ans=0.125 +2024-08-31 18:03:50,691 INFO [train.py:1114] (0/4) Epoch 20, batch 950, loss[loss=0.1922, simple_loss=0.2528, pruned_loss=0.04815, ctc_loss=0.08825, over 19494.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2697, pruned_loss=0.04827, ctc_loss=0.0911, over 3820524.61 frames. ], batch size: 49, lr: 7.51e-03, grad_scale: 32.0 +2024-08-31 18:04:12,191 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.312e+02 1.674e+02 1.914e+02 2.385e+02 5.476e+02, threshold=3.829e+02, percent-clipped=1.0 +2024-08-31 18:05:00,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=257461.33333333334, ans=0.125 +2024-08-31 18:05:01,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=257461.33333333334, ans=0.125 +2024-08-31 18:05:20,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=257514.66666666666, ans=0.125 +2024-08-31 18:05:24,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=257568.0, ans=0.125 +2024-08-31 18:05:25,081 INFO [train.py:1114] (0/4) Epoch 20, batch 1000, loss[loss=0.1769, simple_loss=0.2485, pruned_loss=0.03827, ctc_loss=0.07181, over 19855.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2707, pruned_loss=0.04902, ctc_loss=0.09247, over 3817259.76 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 32.0 +2024-08-31 18:05:40,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=257621.33333333334, ans=0.125 +2024-08-31 18:10:04,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=257621.33333333334, ans=0.125 +2024-08-31 18:11:53,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=257728.0, ans=0.125 +2024-08-31 18:12:03,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=257781.33333333334, ans=0.125 +2024-08-31 18:12:06,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=257781.33333333334, ans=0.0 +2024-08-31 18:12:15,980 INFO [train.py:1114] (0/4) Epoch 20, batch 1050, loss[loss=0.1939, simple_loss=0.2671, pruned_loss=0.04334, ctc_loss=0.08471, over 19842.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2699, pruned_loss=0.04845, ctc_loss=0.09131, over 3823486.45 frames. ], batch size: 57, lr: 7.50e-03, grad_scale: 32.0 +2024-08-31 18:12:22,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=257834.66666666666, ans=0.0 +2024-08-31 18:12:25,179 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=20.18 vs. limit=22.5 +2024-08-31 18:12:32,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=257888.0, ans=0.0 +2024-08-31 18:12:34,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=257888.0, ans=0.125 +2024-08-31 18:12:37,419 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.683e+02 1.941e+02 2.234e+02 3.103e+02, threshold=3.882e+02, percent-clipped=0.0 +2024-08-31 18:13:01,936 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:13:13,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.whiten.whitening_limit, batch_count=258048.0, ans=12.0 +2024-08-31 18:13:25,853 INFO [train.py:1114] (0/4) Epoch 20, batch 1100, loss[loss=0.1911, simple_loss=0.2625, pruned_loss=0.04364, ctc_loss=0.08088, over 19588.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2701, pruned_loss=0.04855, ctc_loss=0.09135, over 3831554.41 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 32.0 +2024-08-31 18:13:58,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=258208.0, ans=0.125 +2024-08-31 18:14:26,123 INFO [train.py:1114] (0/4) Epoch 20, batch 1150, loss[loss=0.1985, simple_loss=0.271, pruned_loss=0.0448, ctc_loss=0.09081, over 19585.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2699, pruned_loss=0.04852, ctc_loss=0.09132, over 3828701.45 frames. ], batch size: 52, lr: 7.49e-03, grad_scale: 32.0 +2024-08-31 18:15:12,216 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.349e+02 1.657e+02 1.937e+02 2.398e+02 3.976e+02, threshold=3.875e+02, percent-clipped=1.0 +2024-08-31 18:15:35,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258528.0, ans=0.1 +2024-08-31 18:15:51,938 INFO [train.py:1114] (0/4) Epoch 20, batch 1200, loss[loss=0.2287, simple_loss=0.2975, pruned_loss=0.05827, ctc_loss=0.1082, over 19840.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2708, pruned_loss=0.04873, ctc_loss=0.09188, over 3825325.48 frames. ], batch size: 57, lr: 7.49e-03, grad_scale: 32.0 +2024-08-31 18:16:34,040 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.37 vs. limit=15.0 +2024-08-31 18:16:36,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=258794.66666666666, ans=0.2 +2024-08-31 18:16:54,775 INFO [train.py:1114] (0/4) Epoch 20, batch 1250, loss[loss=0.1978, simple_loss=0.2748, pruned_loss=0.04409, ctc_loss=0.08164, over 19541.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.271, pruned_loss=0.04892, ctc_loss=0.0921, over 3843320.66 frames. ], batch size: 61, lr: 7.48e-03, grad_scale: 32.0 +2024-08-31 18:17:17,015 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.31 vs. limit=10.0 +2024-08-31 18:17:20,830 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.340e+02 1.673e+02 1.864e+02 2.243e+02 4.460e+02, threshold=3.727e+02, percent-clipped=1.0 +2024-08-31 18:17:27,279 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.10 vs. limit=6.0 +2024-08-31 18:17:48,937 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=8.394e-02 +2024-08-31 18:18:00,199 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.35 vs. limit=15.0 +2024-08-31 18:18:05,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259114.66666666666, ans=0.1 +2024-08-31 18:19:03,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=259114.66666666666, ans=0.125 +2024-08-31 18:19:05,802 INFO [train.py:1114] (0/4) Epoch 20, batch 1300, loss[loss=0.2163, simple_loss=0.2805, pruned_loss=0.05623, ctc_loss=0.09898, over 18810.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2701, pruned_loss=0.04846, ctc_loss=0.09129, over 3844907.25 frames. ], batch size: 76, lr: 7.48e-03, grad_scale: 32.0 +2024-08-31 18:19:09,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=259168.0, ans=0.2 +2024-08-31 18:19:09,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=259168.0, ans=0.125 +2024-08-31 18:19:12,404 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:19:25,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=259221.33333333334, ans=0.125 +2024-08-31 18:19:30,863 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.94 vs. limit=15.0 +2024-08-31 18:19:38,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259274.66666666666, ans=0.1 +2024-08-31 18:19:49,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=259328.0, ans=0.0 +2024-08-31 18:19:52,650 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.25 vs. limit=22.5 +2024-08-31 18:19:53,482 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.01 vs. limit=10.0 +2024-08-31 18:20:12,168 INFO [train.py:1114] (0/4) Epoch 20, batch 1350, loss[loss=0.1951, simple_loss=0.2658, pruned_loss=0.04398, ctc_loss=0.09078, over 19761.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2695, pruned_loss=0.04808, ctc_loss=0.0906, over 3855646.42 frames. ], batch size: 54, lr: 7.48e-03, grad_scale: 32.0 +2024-08-31 18:20:34,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=259488.0, ans=0.0 +2024-08-31 18:20:38,777 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.436e+02 1.677e+02 1.917e+02 2.382e+02 4.193e+02, threshold=3.834e+02, percent-clipped=5.0 +2024-08-31 18:20:51,510 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:21:07,780 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.43 vs. limit=22.5 +2024-08-31 18:21:08,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=259648.0, ans=0.125 +2024-08-31 18:21:14,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=259648.0, ans=0.025 +2024-08-31 18:21:16,776 INFO [train.py:1114] (0/4) Epoch 20, batch 1400, loss[loss=0.1715, simple_loss=0.2359, pruned_loss=0.03902, ctc_loss=0.07262, over 19682.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2695, pruned_loss=0.04806, ctc_loss=0.09038, over 3862772.72 frames. ], batch size: 46, lr: 7.47e-03, grad_scale: 32.0 +2024-08-31 18:21:24,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=259701.33333333334, ans=0.2 +2024-08-31 18:21:53,135 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.18 vs. limit=22.5 +2024-08-31 18:22:00,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=259861.33333333334, ans=0.0 +2024-08-31 18:22:01,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=259861.33333333334, ans=0.125 +2024-08-31 18:22:50,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=259914.66666666666, ans=0.2 +2024-08-31 18:22:53,581 INFO [train.py:1114] (0/4) Epoch 20, batch 1450, loss[loss=0.2078, simple_loss=0.2759, pruned_loss=0.05122, ctc_loss=0.09322, over 19667.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2701, pruned_loss=0.04832, ctc_loss=0.09097, over 3861785.17 frames. ], batch size: 63, lr: 7.47e-03, grad_scale: 32.0 +2024-08-31 18:23:06,409 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.32 vs. limit=15.0 +2024-08-31 18:23:17,231 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.407e+02 1.776e+02 2.029e+02 2.458e+02 5.712e+02, threshold=4.059e+02, percent-clipped=1.0 +2024-08-31 18:23:27,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=260074.66666666666, ans=0.0 +2024-08-31 18:23:38,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260128.0, ans=0.1 +2024-08-31 18:23:47,690 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.65 vs. limit=22.5 +2024-08-31 18:23:54,002 INFO [train.py:1114] (0/4) Epoch 20, batch 1500, loss[loss=0.2057, simple_loss=0.2798, pruned_loss=0.04819, ctc_loss=0.0882, over 19584.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2703, pruned_loss=0.04827, ctc_loss=0.09099, over 3861372.31 frames. ], batch size: 57, lr: 7.47e-03, grad_scale: 32.0 +2024-08-31 18:24:14,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=260288.0, ans=0.0 +2024-08-31 18:24:20,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=260341.33333333334, ans=0.0 +2024-08-31 18:24:21,778 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:25:19,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=260394.66666666666, ans=0.0 +2024-08-31 18:25:34,752 INFO [train.py:1114] (0/4) Epoch 20, batch 1550, loss[loss=0.2229, simple_loss=0.2908, pruned_loss=0.05664, ctc_loss=0.1043, over 19610.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2702, pruned_loss=0.04845, ctc_loss=0.09129, over 3846392.59 frames. ], batch size: 60, lr: 7.46e-03, grad_scale: 32.0 +2024-08-31 18:26:10,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=260501.33333333334, ans=0.1 +2024-08-31 18:26:24,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=260554.66666666666, ans=10.0 +2024-08-31 18:26:30,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=260554.66666666666, ans=0.04949747468305833 +2024-08-31 18:26:33,183 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.367e+02 1.748e+02 2.049e+02 2.466e+02 3.855e+02, threshold=4.097e+02, percent-clipped=0.0 +2024-08-31 18:27:18,517 INFO [train.py:1114] (0/4) Epoch 20, batch 1600, loss[loss=0.2263, simple_loss=0.293, pruned_loss=0.05743, ctc_loss=0.1118, over 19843.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.27, pruned_loss=0.04852, ctc_loss=0.09162, over 3834899.02 frames. ], batch size: 57, lr: 7.46e-03, grad_scale: 32.0 +2024-08-31 18:27:33,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=260821.33333333334, ans=0.0 +2024-08-31 18:27:47,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=260874.66666666666, ans=0.125 +2024-08-31 18:27:55,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=260928.0, ans=0.2 +2024-08-31 18:28:10,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=260981.33333333334, ans=0.125 +2024-08-31 18:28:30,409 INFO [train.py:1114] (0/4) Epoch 20, batch 1650, loss[loss=0.1896, simple_loss=0.271, pruned_loss=0.03898, ctc_loss=0.0756, over 19656.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.27, pruned_loss=0.0483, ctc_loss=0.09121, over 3830835.84 frames. ], batch size: 59, lr: 7.45e-03, grad_scale: 32.0 +2024-08-31 18:28:41,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=261088.0, ans=0.2 +2024-08-31 18:28:53,153 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.350e+02 1.719e+02 2.026e+02 2.553e+02 4.958e+02, threshold=4.052e+02, percent-clipped=3.0 +2024-08-31 18:28:54,789 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.79 vs. limit=15.0 +2024-08-31 18:29:21,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=261248.0, ans=0.0 +2024-08-31 18:29:27,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=261248.0, ans=0.04949747468305833 +2024-08-31 18:29:29,537 INFO [train.py:1114] (0/4) Epoch 20, batch 1700, loss[loss=0.1908, simple_loss=0.2448, pruned_loss=0.04995, ctc_loss=0.09213, over 19688.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.27, pruned_loss=0.04829, ctc_loss=0.09112, over 3845161.14 frames. ], batch size: 46, lr: 7.45e-03, grad_scale: 32.0 +2024-08-31 18:29:34,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten.whitening_limit, batch_count=261301.33333333334, ans=15.0 +2024-08-31 18:29:34,972 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.30 vs. limit=15.0 +2024-08-31 18:29:51,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=261354.66666666666, ans=0.2 +2024-08-31 18:30:15,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=261461.33333333334, ans=10.0 +2024-08-31 18:31:14,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=261514.66666666666, ans=0.125 +2024-08-31 18:31:18,021 INFO [train.py:1114] (0/4) Epoch 20, batch 1750, loss[loss=0.1716, simple_loss=0.2408, pruned_loss=0.03694, ctc_loss=0.07129, over 19683.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2691, pruned_loss=0.04786, ctc_loss=0.09037, over 3849342.05 frames. ], batch size: 45, lr: 7.45e-03, grad_scale: 32.0 +2024-08-31 18:31:31,004 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.30 vs. limit=6.0 +2024-08-31 18:31:31,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=261621.33333333334, ans=0.125 +2024-08-31 18:31:39,996 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.679e+02 1.951e+02 2.329e+02 4.159e+02, threshold=3.901e+02, percent-clipped=0.0 +2024-08-31 18:31:52,940 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.89 vs. limit=15.0 +2024-08-31 18:31:54,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=261728.0, ans=0.125 +2024-08-31 18:32:13,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=261781.33333333334, ans=0.125 +2024-08-31 18:32:13,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=261781.33333333334, ans=0.0 +2024-08-31 18:32:15,183 INFO [train.py:1114] (0/4) Epoch 20, batch 1800, loss[loss=0.2031, simple_loss=0.276, pruned_loss=0.04674, ctc_loss=0.09174, over 19608.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2698, pruned_loss=0.04839, ctc_loss=0.09144, over 3852540.42 frames. ], batch size: 55, lr: 7.44e-03, grad_scale: 32.0 +2024-08-31 18:32:36,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=261888.0, ans=0.125 +2024-08-31 18:33:16,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=261994.66666666666, ans=0.0 +2024-08-31 18:33:20,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=261994.66666666666, ans=0.2 +2024-08-31 18:33:24,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=262048.0, ans=0.1 +2024-08-31 18:33:34,515 INFO [train.py:1114] (0/4) Epoch 20, batch 1850, loss[loss=0.2141, simple_loss=0.2863, pruned_loss=0.05172, ctc_loss=0.09602, over 19577.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2696, pruned_loss=0.04817, ctc_loss=0.091, over 3856244.40 frames. ], batch size: 57, lr: 7.44e-03, grad_scale: 32.0 +2024-08-31 18:33:47,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=262154.6666666667, ans=0.125 +2024-08-31 18:33:56,006 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.842e+02 2.206e+02 3.038e+02 4.306e+02, threshold=4.411e+02, percent-clipped=5.0 +2024-08-31 18:33:57,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=262208.0, ans=0.1 +2024-08-31 18:33:57,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=262208.0, ans=0.125 +2024-08-31 18:34:12,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=262261.3333333333, ans=0.125 +2024-08-31 18:34:29,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=262314.6666666667, ans=0.2 +2024-08-31 18:34:36,224 INFO [train.py:1114] (0/4) Epoch 20, batch 1900, loss[loss=0.1796, simple_loss=0.2639, pruned_loss=0.0344, ctc_loss=0.06637, over 19631.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2698, pruned_loss=0.04805, ctc_loss=0.09072, over 3861887.47 frames. ], batch size: 59, lr: 7.44e-03, grad_scale: 32.0 +2024-08-31 18:34:37,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=262368.0, ans=0.0 +2024-08-31 18:34:39,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=262368.0, ans=0.125 +2024-08-31 18:34:42,443 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=5.308e-03 +2024-08-31 18:35:04,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=262474.6666666667, ans=0.125 +2024-08-31 18:35:30,449 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.35 vs. limit=15.0 +2024-08-31 18:35:34,437 INFO [train.py:1114] (0/4) Epoch 20, batch 1950, loss[loss=0.1728, simple_loss=0.2518, pruned_loss=0.03434, ctc_loss=0.06314, over 19595.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2705, pruned_loss=0.04799, ctc_loss=0.09056, over 3870448.19 frames. ], batch size: 52, lr: 7.43e-03, grad_scale: 32.0 +2024-08-31 18:35:40,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=262634.6666666667, ans=0.125 +2024-08-31 18:35:50,557 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.45 vs. limit=15.0 +2024-08-31 18:35:55,619 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.650e+02 1.780e+02 2.101e+02 3.496e+02, threshold=3.560e+02, percent-clipped=0.0 +2024-08-31 18:35:55,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=262741.3333333333, ans=0.5 +2024-08-31 18:36:09,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=262794.6666666667, ans=0.125 +2024-08-31 18:36:19,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=262848.0, ans=0.0 +2024-08-31 18:36:23,570 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=8.27 vs. limit=22.5 +2024-08-31 18:36:25,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=262848.0, ans=0.0 +2024-08-31 18:36:29,411 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.13 vs. limit=10.0 +2024-08-31 18:36:31,281 INFO [train.py:1114] (0/4) Epoch 20, batch 2000, loss[loss=0.1899, simple_loss=0.2517, pruned_loss=0.04571, ctc_loss=0.09178, over 19654.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.271, pruned_loss=0.04819, ctc_loss=0.09107, over 3855021.75 frames. ], batch size: 45, lr: 7.43e-03, grad_scale: 32.0 +2024-08-31 18:36:45,418 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:37:02,717 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.98 vs. limit=10.0 +2024-08-31 18:37:05,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=263008.0, ans=0.125 +2024-08-31 18:37:14,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=263061.3333333333, ans=0.125 +2024-08-31 18:37:31,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=263168.0, ans=0.0 +2024-08-31 18:37:32,649 INFO [train.py:1114] (0/4) Epoch 20, batch 2050, loss[loss=0.1669, simple_loss=0.2355, pruned_loss=0.0358, ctc_loss=0.0668, over 19708.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.27, pruned_loss=0.04815, ctc_loss=0.09072, over 3851251.97 frames. ], batch size: 47, lr: 7.42e-03, grad_scale: 32.0 +2024-08-31 18:37:52,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=263221.3333333333, ans=0.0 +2024-08-31 18:38:02,077 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.334e+02 1.724e+02 2.041e+02 2.585e+02 3.821e+02, threshold=4.082e+02, percent-clipped=5.0 +2024-08-31 18:38:24,982 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=7.82 vs. limit=15.0 +2024-08-31 18:38:36,463 INFO [train.py:1114] (0/4) Epoch 20, batch 2100, loss[loss=0.1875, simple_loss=0.2571, pruned_loss=0.04325, ctc_loss=0.07879, over 19771.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2691, pruned_loss=0.04769, ctc_loss=0.08996, over 3858598.14 frames. ], batch size: 54, lr: 7.42e-03, grad_scale: 32.0 +2024-08-31 18:38:36,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=263434.6666666667, ans=10.0 +2024-08-31 18:39:10,006 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.19 vs. limit=15.0 +2024-08-31 18:39:17,734 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.08 vs. limit=10.0 +2024-08-31 18:39:22,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=263648.0, ans=0.2 +2024-08-31 18:39:29,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263648.0, ans=0.1 +2024-08-31 18:39:32,896 INFO [train.py:1114] (0/4) Epoch 20, batch 2150, loss[loss=0.2061, simple_loss=0.2733, pruned_loss=0.0505, ctc_loss=0.09496, over 19570.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2683, pruned_loss=0.04735, ctc_loss=0.08904, over 3868795.35 frames. ], batch size: 52, lr: 7.42e-03, grad_scale: 32.0 +2024-08-31 18:39:49,883 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:39:58,526 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.627e+02 1.896e+02 2.393e+02 5.058e+02, threshold=3.792e+02, percent-clipped=5.0 +2024-08-31 18:40:02,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=263808.0, ans=0.125 +2024-08-31 18:40:02,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=263808.0, ans=0.0 +2024-08-31 18:40:04,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.90 vs. limit=6.0 +2024-08-31 18:41:03,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=263914.6666666667, ans=0.025 +2024-08-31 18:41:08,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=263968.0, ans=0.2 +2024-08-31 18:41:08,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=263968.0, ans=0.125 +2024-08-31 18:41:09,870 INFO [train.py:1114] (0/4) Epoch 20, batch 2200, loss[loss=0.1812, simple_loss=0.2603, pruned_loss=0.0368, ctc_loss=0.07123, over 19581.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.268, pruned_loss=0.0471, ctc_loss=0.08845, over 3867840.37 frames. ], batch size: 57, lr: 7.41e-03, grad_scale: 32.0 +2024-08-31 18:41:27,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=264021.3333333333, ans=15.0 +2024-08-31 18:41:27,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=264021.3333333333, ans=0.025 +2024-08-31 18:41:35,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=264021.3333333333, ans=0.125 +2024-08-31 18:42:17,185 INFO [train.py:1114] (0/4) Epoch 20, batch 2250, loss[loss=0.2105, simple_loss=0.2782, pruned_loss=0.05194, ctc_loss=0.09704, over 19618.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2684, pruned_loss=0.04737, ctc_loss=0.08891, over 3866799.94 frames. ], batch size: 55, lr: 7.41e-03, grad_scale: 32.0 +2024-08-31 18:42:23,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=264234.6666666667, ans=0.0 +2024-08-31 18:42:31,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=264288.0, ans=0.04949747468305833 +2024-08-31 18:42:35,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=264288.0, ans=0.0 +2024-08-31 18:42:42,058 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.705e+02 2.149e+02 2.747e+02 5.291e+02, threshold=4.298e+02, percent-clipped=7.0 +2024-08-31 18:42:54,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=264394.6666666667, ans=0.125 +2024-08-31 18:42:54,705 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:43:03,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=264394.6666666667, ans=0.0 +2024-08-31 18:43:04,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=264448.0, ans=0.09899494936611666 +2024-08-31 18:43:07,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=264448.0, ans=0.2 +2024-08-31 18:43:07,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=264448.0, ans=0.0 +2024-08-31 18:43:16,654 INFO [train.py:1114] (0/4) Epoch 20, batch 2300, loss[loss=0.1914, simple_loss=0.262, pruned_loss=0.04385, ctc_loss=0.08277, over 19496.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2683, pruned_loss=0.04765, ctc_loss=0.08977, over 3859390.45 frames. ], batch size: 49, lr: 7.41e-03, grad_scale: 32.0 +2024-08-31 18:43:29,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264554.6666666667, ans=0.1 +2024-08-31 18:43:29,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=264554.6666666667, ans=10.0 +2024-08-31 18:43:34,635 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.30 vs. limit=12.0 +2024-08-31 18:43:37,945 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.41 vs. limit=15.0 +2024-08-31 18:43:43,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=264608.0, ans=0.125 +2024-08-31 18:43:52,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=264661.3333333333, ans=0.0 +2024-08-31 18:43:54,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=264661.3333333333, ans=0.1 +2024-08-31 18:43:59,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=264661.3333333333, ans=0.125 +2024-08-31 18:44:02,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=264714.6666666667, ans=0.125 +2024-08-31 18:44:02,493 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.79 vs. limit=15.0 +2024-08-31 18:44:04,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=264714.6666666667, ans=0.2 +2024-08-31 18:44:09,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=264714.6666666667, ans=0.035 +2024-08-31 18:44:10,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=264714.6666666667, ans=0.0 +2024-08-31 18:44:12,809 INFO [train.py:1114] (0/4) Epoch 20, batch 2350, loss[loss=0.2247, simple_loss=0.2873, pruned_loss=0.05931, ctc_loss=0.1085, over 19669.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2683, pruned_loss=0.04781, ctc_loss=0.08982, over 3862188.44 frames. ], batch size: 63, lr: 7.40e-03, grad_scale: 32.0 +2024-08-31 18:44:19,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=264768.0, ans=0.125 +2024-08-31 18:44:32,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=264821.3333333333, ans=0.0 +2024-08-31 18:44:47,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=264821.3333333333, ans=0.0 +2024-08-31 18:44:49,416 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.185e+02 1.669e+02 1.905e+02 2.325e+02 3.822e+02, threshold=3.811e+02, percent-clipped=0.0 +2024-08-31 18:45:10,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=264928.0, ans=0.125 +2024-08-31 18:45:14,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=264981.3333333333, ans=0.125 +2024-08-31 18:45:24,108 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.90 vs. limit=6.0 +2024-08-31 18:45:26,860 INFO [train.py:1114] (0/4) Epoch 20, batch 2400, loss[loss=0.2131, simple_loss=0.2865, pruned_loss=0.05141, ctc_loss=0.09208, over 19336.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2704, pruned_loss=0.04856, ctc_loss=0.09105, over 3857312.51 frames. ], batch size: 67, lr: 7.40e-03, grad_scale: 32.0 +2024-08-31 18:45:35,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=265034.6666666667, ans=0.125 +2024-08-31 18:45:49,469 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:46:02,492 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.01 vs. limit=15.0 +2024-08-31 18:46:15,291 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.66 vs. limit=15.0 +2024-08-31 18:46:18,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265248.0, ans=0.1 +2024-08-31 18:46:21,586 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.13 vs. limit=22.5 +2024-08-31 18:46:23,878 INFO [train.py:1114] (0/4) Epoch 20, batch 2450, loss[loss=0.2568, simple_loss=0.3013, pruned_loss=0.07732, ctc_loss=0.1444, over 13541.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2743, pruned_loss=0.05117, ctc_loss=0.09641, over 3731858.51 frames. ], batch size: 141, lr: 7.40e-03, grad_scale: 32.0 +2024-08-31 18:46:29,247 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=10.73 vs. limit=15.0 +2024-08-31 18:46:36,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265354.6666666667, ans=0.1 +2024-08-31 18:46:38,422 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=15.26 vs. limit=15.0 +2024-08-31 18:46:43,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=265354.6666666667, ans=0.04949747468305833 +2024-08-31 18:46:45,870 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.336e+02 1.663e+02 1.874e+02 2.086e+02 3.013e+02, threshold=3.749e+02, percent-clipped=0.0 +2024-08-31 18:46:47,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=265408.0, ans=0.125 +2024-08-31 18:46:55,859 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.02 vs. limit=6.0 +2024-08-31 18:47:05,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=265461.3333333333, ans=0.07 +2024-08-31 18:47:07,547 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-20.pt +2024-08-31 18:50:18,403 INFO [train.py:1387] (0/4) Done! diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-31-13-15-01-1 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-31-13-15-01-1 new file mode 100644 index 0000000000000000000000000000000000000000..ba19f760f9262ffb9db9cbd9f254e9d3e9b4a09e --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-31-13-15-01-1 @@ -0,0 +1,1030 @@ +2024-08-31 13:15:01,250 INFO [train.py:1182] (1/4) Training started +2024-08-31 13:15:01,251 INFO [train.py:1192] (1/4) Device: cuda:1 +2024-08-31 13:15:02,197 INFO [train.py:1210] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2535.int.cedar.computecanada.ca', 'IP address': '172.16.145.228'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 18, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-31 13:15:02,197 INFO [train.py:1212] (1/4) About to create model +2024-08-31 13:15:10,397 INFO [train.py:1216] (1/4) Number of model parameters: 66367431 +2024-08-31 13:15:10,438 INFO [checkpoint.py:112] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-17.pt +2024-08-31 13:16:01,653 INFO [train.py:1231] (1/4) Using DDP +2024-08-31 13:16:07,014 INFO [train.py:1243] (1/4) Loading optimizer state dict +2024-08-31 13:16:07,183 INFO [train.py:1251] (1/4) Loading scheduler state dict +2024-08-31 13:16:07,183 INFO [asr_datamodule.py:894] (1/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-31 13:16:07,277 INFO [asr_datamodule.py:696] (1/4) Disable MUSAN +2024-08-31 13:16:07,277 INFO [asr_datamodule.py:714] (1/4) Enable SpecAugment +2024-08-31 13:16:07,277 INFO [asr_datamodule.py:715] (1/4) Time warp factor: 80 +2024-08-31 13:16:07,277 INFO [asr_datamodule.py:725] (1/4) Num frame mask: 10 +2024-08-31 13:16:07,277 INFO [asr_datamodule.py:738] (1/4) About to create train dataset +2024-08-31 13:16:07,277 INFO [asr_datamodule.py:765] (1/4) Using DynamicBucketingSampler. +2024-08-31 13:16:08,889 INFO [asr_datamodule.py:782] (1/4) About to create train dataloader +2024-08-31 13:16:08,891 INFO [asr_datamodule.py:911] (1/4) About to get dev-clean cuts +2024-08-31 13:16:09,671 INFO [asr_datamodule.py:918] (1/4) About to get dev-other cuts +2024-08-31 13:16:10,141 INFO [asr_datamodule.py:814] (1/4) About to create dev dataset +2024-08-31 13:16:10,463 INFO [asr_datamodule.py:831] (1/4) About to create dev dataloader +2024-08-31 13:16:10,464 INFO [train.py:1435] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 13:22:40,297 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.35 vs. limit=3.0 +2024-08-31 13:22:43,898 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13363MB +2024-08-31 13:22:45,382 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13490MB +2024-08-31 13:23:02,022 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13490MB +2024-08-31 13:23:03,533 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13490MB +2024-08-31 13:24:12,098 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13490MB +2024-08-31 13:24:13,264 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=384, metric=9.29 vs. limit=5.0 +2024-08-31 13:24:13,685 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 13490MB +2024-08-31 13:24:13,707 INFO [train.py:1344] (1/4) Loading grad scaler state dict +2024-08-31 13:25:06,949 INFO [train.py:1114] (1/4) Epoch 18, batch 0, loss[loss=0.1932, simple_loss=0.2527, pruned_loss=0.04853, ctc_loss=0.09187, over 19410.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2527, pruned_loss=0.04853, ctc_loss=0.09187, over 19410.00 frames. ], batch size: 48, lr: 8.44e-03, grad_scale: 32.0 +2024-08-31 13:25:06,950 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-31 13:25:49,897 INFO [train.py:1146] (1/4) Epoch 18, validation: loss=0.1864, simple_loss=0.2743, pruned_loss=0.03646, ctc_loss=0.06397, over 944034.00 frames. +2024-08-31 13:25:49,898 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 13490MB +2024-08-31 13:25:51,798 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.51 vs. limit=15.0 +2024-08-31 13:27:19,496 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.71 vs. limit=6.0 +2024-08-31 13:32:00,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=225733.33333333334, ans=0.0 +2024-08-31 13:35:15,721 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.62 vs. limit=10.0 +2024-08-31 13:44:22,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=225840.0, ans=0.125 +2024-08-31 13:48:08,452 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.31 vs. limit=12.0 +2024-08-31 13:48:13,677 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.468e+02 1.934e+02 2.118e+02 2.433e+02 6.228e+02, threshold=4.237e+02, percent-clipped=5.0 +2024-08-31 13:56:46,308 INFO [train.py:1114] (1/4) Epoch 18, batch 50, loss[loss=0.1581, simple_loss=0.2327, pruned_loss=0.03018, ctc_loss=0.05788, over 19735.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2748, pruned_loss=0.05185, ctc_loss=0.09917, over 843693.80 frames. ], batch size: 47, lr: 8.44e-03, grad_scale: 32.0 +2024-08-31 13:56:58,782 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.86 vs. limit=6.0 +2024-08-31 13:57:42,689 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 13:57:42,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=225946.66666666666, ans=0.125 +2024-08-31 14:00:54,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=226000.0, ans=0.125 +2024-08-31 14:08:12,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=226106.66666666666, ans=0.95 +2024-08-31 14:10:04,285 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.05 vs. limit=15.0 +2024-08-31 14:13:50,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=226160.0, ans=0.125 +2024-08-31 14:13:52,350 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.15 vs. limit=15.0 +2024-08-31 14:15:00,240 INFO [train.py:1114] (1/4) Epoch 18, batch 100, loss[loss=0.2118, simple_loss=0.2736, pruned_loss=0.05532, ctc_loss=0.09851, over 19719.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.278, pruned_loss=0.05344, ctc_loss=0.1018, over 1498452.85 frames. ], batch size: 51, lr: 8.43e-03, grad_scale: 32.0 +2024-08-31 14:16:32,436 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.25 vs. limit=15.0 +2024-08-31 14:17:26,789 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.61 vs. limit=15.0 +2024-08-31 14:26:16,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=226320.0, ans=0.2 +2024-08-31 14:28:46,390 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 14:28:55,982 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.15 vs. limit=15.0 +2024-08-31 14:30:19,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=226373.33333333334, ans=0.0 +2024-08-31 14:32:23,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=226426.66666666666, ans=0.0 +2024-08-31 14:32:51,601 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.685e+02 1.949e+02 2.332e+02 3.525e+02, threshold=3.898e+02, percent-clipped=0.0 +2024-08-31 14:34:13,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=226480.0, ans=0.125 +2024-08-31 14:34:38,796 INFO [train.py:1114] (1/4) Epoch 18, batch 150, loss[loss=0.1912, simple_loss=0.2527, pruned_loss=0.04776, ctc_loss=0.0857, over 19694.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2756, pruned_loss=0.05262, ctc_loss=0.09994, over 2027899.17 frames. ], batch size: 47, lr: 8.43e-03, grad_scale: 32.0 +2024-08-31 14:43:39,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=226533.33333333334, ans=0.0 +2024-08-31 14:44:39,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=226533.33333333334, ans=0.025 +2024-08-31 14:47:14,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=226586.66666666666, ans=0.2 +2024-08-31 14:57:50,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=226693.33333333334, ans=0.125 +2024-08-31 15:01:40,800 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 15:05:15,400 INFO [train.py:1114] (1/4) Epoch 18, batch 200, loss[loss=0.2068, simple_loss=0.2791, pruned_loss=0.04895, ctc_loss=0.0915, over 18231.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2741, pruned_loss=0.05166, ctc_loss=0.09776, over 2435641.98 frames. ], batch size: 85, lr: 8.42e-03, grad_scale: 32.0 +2024-08-31 15:05:49,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=226746.66666666666, ans=0.125 +2024-08-31 15:07:59,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=226800.0, ans=0.125 +2024-08-31 15:08:19,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=226800.0, ans=0.125 +2024-08-31 15:15:21,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=226906.66666666666, ans=0.125 +2024-08-31 15:17:44,784 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.325e+02 1.761e+02 2.086e+02 2.524e+02 4.159e+02, threshold=4.172e+02, percent-clipped=2.0 +2024-08-31 15:17:59,767 INFO [train.py:1114] (1/4) Epoch 18, batch 250, loss[loss=0.2447, simple_loss=0.3054, pruned_loss=0.06604, ctc_loss=0.1299, over 19375.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2741, pruned_loss=0.05152, ctc_loss=0.09743, over 2757155.58 frames. ], batch size: 67, lr: 8.42e-03, grad_scale: 32.0 +2024-08-31 15:19:02,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=227013.33333333334, ans=0.025 +2024-08-31 15:19:03,356 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.87 vs. limit=22.5 +2024-08-31 15:19:04,235 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.89 vs. limit=22.5 +2024-08-31 15:19:50,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=227013.33333333334, ans=0.125 +2024-08-31 15:20:38,212 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.12 vs. limit=15.0 +2024-08-31 15:22:03,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=227120.0, ans=0.1 +2024-08-31 15:22:34,723 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.35 vs. limit=10.0 +2024-08-31 15:23:39,296 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.47 vs. limit=15.0 +2024-08-31 15:24:04,304 INFO [train.py:1114] (1/4) Epoch 18, batch 300, loss[loss=0.2333, simple_loss=0.2905, pruned_loss=0.06308, ctc_loss=0.1249, over 19505.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2735, pruned_loss=0.05131, ctc_loss=0.09706, over 3002465.67 frames. ], batch size: 61, lr: 8.41e-03, grad_scale: 32.0 +2024-08-31 15:24:20,886 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.93 vs. limit=22.5 +2024-08-31 15:25:12,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=227333.33333333334, ans=0.125 +2024-08-31 15:28:01,309 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.57 vs. limit=15.0 +2024-08-31 15:28:07,123 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.43 vs. limit=15.0 +2024-08-31 15:29:15,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=227440.0, ans=0.025 +2024-08-31 15:30:47,328 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.305e+02 1.680e+02 1.932e+02 2.386e+02 3.920e+02, threshold=3.864e+02, percent-clipped=0.0 +2024-08-31 15:31:02,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=227493.33333333334, ans=0.125 +2024-08-31 15:31:46,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=227546.66666666666, ans=0.025 +2024-08-31 15:31:47,659 INFO [train.py:1114] (1/4) Epoch 18, batch 350, loss[loss=0.1831, simple_loss=0.2466, pruned_loss=0.0438, ctc_loss=0.07996, over 19745.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2738, pruned_loss=0.05127, ctc_loss=0.09692, over 3192448.38 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 32.0 +2024-08-31 15:32:19,561 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.74 vs. limit=22.5 +2024-08-31 15:32:35,677 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.12 vs. limit=12.0 +2024-08-31 15:33:38,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=227600.0, ans=0.2 +2024-08-31 15:33:50,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=227653.33333333334, ans=0.0 +2024-08-31 15:33:59,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=227706.66666666666, ans=0.125 +2024-08-31 15:34:01,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=227706.66666666666, ans=0.125 +2024-08-31 15:34:57,669 INFO [train.py:1114] (1/4) Epoch 18, batch 400, loss[loss=0.2276, simple_loss=0.2889, pruned_loss=0.06065, ctc_loss=0.1126, over 19499.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2736, pruned_loss=0.05113, ctc_loss=0.09642, over 3342545.79 frames. ], batch size: 54, lr: 8.40e-03, grad_scale: 32.0 +2024-08-31 15:35:13,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=227813.33333333334, ans=0.125 +2024-08-31 15:36:52,695 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.50 vs. limit=12.0 +2024-08-31 15:37:11,053 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.718e+02 1.967e+02 2.336e+02 3.401e+02, threshold=3.934e+02, percent-clipped=0.0 +2024-08-31 15:37:37,963 INFO [train.py:1114] (1/4) Epoch 18, batch 450, loss[loss=0.2014, simple_loss=0.2795, pruned_loss=0.04476, ctc_loss=0.08432, over 19601.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2738, pruned_loss=0.05139, ctc_loss=0.09667, over 3451509.51 frames. ], batch size: 55, lr: 8.40e-03, grad_scale: 32.0 +2024-08-31 15:39:33,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228186.66666666666, ans=0.1 +2024-08-31 15:39:36,463 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.06 vs. limit=15.0 +2024-08-31 15:39:46,369 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 15:39:56,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=228240.0, ans=0.125 +2024-08-31 15:40:13,729 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.56 vs. limit=22.5 +2024-08-31 15:40:18,507 INFO [train.py:1114] (1/4) Epoch 18, batch 500, loss[loss=0.21, simple_loss=0.285, pruned_loss=0.05015, ctc_loss=0.08691, over 19696.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2728, pruned_loss=0.05088, ctc_loss=0.09594, over 3546801.94 frames. ], batch size: 63, lr: 8.39e-03, grad_scale: 32.0 +2024-08-31 15:40:22,351 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.84 vs. limit=15.0 +2024-08-31 15:40:34,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=228400.0, ans=0.125 +2024-08-31 15:40:38,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=228400.0, ans=0.05 +2024-08-31 15:40:39,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=228400.0, ans=0.0 +2024-08-31 15:40:57,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=228506.66666666666, ans=0.09899494936611666 +2024-08-31 15:40:59,812 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.17 vs. limit=6.0 +2024-08-31 15:41:10,338 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.331e+02 1.618e+02 1.812e+02 2.329e+02 3.946e+02, threshold=3.624e+02, percent-clipped=1.0 +2024-08-31 15:41:15,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=228560.0, ans=0.0 +2024-08-31 15:41:17,480 INFO [train.py:1114] (1/4) Epoch 18, batch 550, loss[loss=0.2307, simple_loss=0.2951, pruned_loss=0.06066, ctc_loss=0.1124, over 19259.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.273, pruned_loss=0.05118, ctc_loss=0.09636, over 3609220.25 frames. ], batch size: 71, lr: 8.39e-03, grad_scale: 32.0 +2024-08-31 15:41:24,098 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.89 vs. limit=15.0 +2024-08-31 15:42:35,777 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 15:44:01,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=228826.66666666666, ans=0.125 +2024-08-31 15:44:16,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=228826.66666666666, ans=0.125 +2024-08-31 15:44:18,830 INFO [train.py:1114] (1/4) Epoch 18, batch 600, loss[loss=0.2344, simple_loss=0.3042, pruned_loss=0.06002, ctc_loss=0.1114, over 19439.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2734, pruned_loss=0.05124, ctc_loss=0.09633, over 3667203.93 frames. ], batch size: 67, lr: 8.38e-03, grad_scale: 32.0 +2024-08-31 15:44:29,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=228880.0, ans=0.0 +2024-08-31 15:45:03,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=228986.66666666666, ans=0.09899494936611666 +2024-08-31 15:45:10,461 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.76 vs. limit=8.0 +2024-08-31 15:45:12,648 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.35 vs. limit=10.0 +2024-08-31 15:45:16,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=229040.0, ans=0.125 +2024-08-31 15:45:28,765 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.314e+02 1.735e+02 2.092e+02 3.203e+02 5.009e+02, threshold=4.184e+02, percent-clipped=13.0 +2024-08-31 15:45:38,300 INFO [train.py:1114] (1/4) Epoch 18, batch 650, loss[loss=0.2054, simple_loss=0.2752, pruned_loss=0.04922, ctc_loss=0.09283, over 19779.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2724, pruned_loss=0.05078, ctc_loss=0.09546, over 3716775.23 frames. ], batch size: 54, lr: 8.38e-03, grad_scale: 32.0 +2024-08-31 15:46:25,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=229200.0, ans=0.125 +2024-08-31 15:46:37,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=229200.0, ans=0.2 +2024-08-31 15:46:40,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=229253.33333333334, ans=0.125 +2024-08-31 15:46:50,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=229306.66666666666, ans=0.125 +2024-08-31 15:46:53,998 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=8.76 vs. limit=22.5 +2024-08-31 15:46:55,991 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 15:46:59,999 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 15:47:14,857 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.36 vs. limit=15.0 +2024-08-31 15:47:16,575 INFO [train.py:1114] (1/4) Epoch 18, batch 700, loss[loss=0.1961, simple_loss=0.2617, pruned_loss=0.0477, ctc_loss=0.08768, over 19707.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2727, pruned_loss=0.05073, ctc_loss=0.09538, over 3748465.26 frames. ], batch size: 51, lr: 8.37e-03, grad_scale: 16.0 +2024-08-31 15:47:21,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=229413.33333333334, ans=0.125 +2024-08-31 15:47:25,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=229413.33333333334, ans=0.025 +2024-08-31 15:48:10,583 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.394e+02 1.672e+02 1.935e+02 2.401e+02 4.868e+02, threshold=3.870e+02, percent-clipped=1.0 +2024-08-31 15:48:16,539 INFO [train.py:1114] (1/4) Epoch 18, batch 750, loss[loss=0.2018, simple_loss=0.2768, pruned_loss=0.0469, ctc_loss=0.08244, over 19498.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2724, pruned_loss=0.05053, ctc_loss=0.09508, over 3775010.57 frames. ], batch size: 54, lr: 8.37e-03, grad_scale: 16.0 +2024-08-31 15:48:25,716 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.20 vs. limit=15.0 +2024-08-31 15:48:28,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=229733.33333333334, ans=0.0 +2024-08-31 15:48:55,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.93 vs. limit=15.0 +2024-08-31 15:48:56,736 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.71 vs. limit=15.0 +2024-08-31 15:49:00,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=229786.66666666666, ans=0.0 +2024-08-31 15:49:10,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=229840.0, ans=0.2 +2024-08-31 15:49:15,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=229893.33333333334, ans=0.0 +2024-08-31 15:49:28,026 INFO [train.py:1114] (1/4) Epoch 18, batch 800, loss[loss=0.1961, simple_loss=0.2641, pruned_loss=0.04711, ctc_loss=0.0848, over 19409.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2721, pruned_loss=0.05015, ctc_loss=0.09452, over 3795416.46 frames. ], batch size: 48, lr: 8.37e-03, grad_scale: 32.0 +2024-08-31 15:49:30,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=229946.66666666666, ans=0.125 +2024-08-31 15:49:31,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=229946.66666666666, ans=0.125 +2024-08-31 15:49:31,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=229946.66666666666, ans=22.5 +2024-08-31 15:49:56,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=230053.33333333334, ans=0.125 +2024-08-31 15:50:27,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=230160.0, ans=0.025 +2024-08-31 15:50:27,775 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.682e+02 1.957e+02 2.333e+02 3.697e+02, threshold=3.913e+02, percent-clipped=0.0 +2024-08-31 15:50:33,685 INFO [train.py:1114] (1/4) Epoch 18, batch 850, loss[loss=0.2088, simple_loss=0.2816, pruned_loss=0.04877, ctc_loss=0.09612, over 19648.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2722, pruned_loss=0.05024, ctc_loss=0.09467, over 3814604.01 frames. ], batch size: 59, lr: 8.36e-03, grad_scale: 32.0 +2024-08-31 15:50:38,333 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.13 vs. limit=15.0 +2024-08-31 15:51:18,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=230213.33333333334, ans=0.125 +2024-08-31 15:51:29,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=230266.66666666666, ans=0.125 +2024-08-31 15:51:30,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=230266.66666666666, ans=0.2 +2024-08-31 15:51:30,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=230266.66666666666, ans=0.125 +2024-08-31 15:51:35,318 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.03 vs. limit=22.5 +2024-08-31 15:52:04,381 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.85 vs. limit=12.0 +2024-08-31 15:52:15,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=230480.0, ans=0.5 +2024-08-31 15:52:15,925 INFO [train.py:1114] (1/4) Epoch 18, batch 900, loss[loss=0.1971, simple_loss=0.253, pruned_loss=0.05074, ctc_loss=0.09931, over 19809.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2724, pruned_loss=0.05049, ctc_loss=0.09529, over 3819625.95 frames. ], batch size: 49, lr: 8.36e-03, grad_scale: 32.0 +2024-08-31 15:52:16,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=230480.0, ans=0.2 +2024-08-31 15:52:50,565 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.98 vs. limit=15.0 +2024-08-31 15:53:02,126 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.79 vs. limit=15.0 +2024-08-31 15:53:12,032 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.268e+02 1.645e+02 1.872e+02 2.411e+02 3.930e+02, threshold=3.745e+02, percent-clipped=1.0 +2024-08-31 15:53:45,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=230746.66666666666, ans=0.125 +2024-08-31 15:53:46,107 INFO [train.py:1114] (1/4) Epoch 18, batch 950, loss[loss=0.1914, simple_loss=0.2568, pruned_loss=0.04631, ctc_loss=0.08355, over 19518.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2729, pruned_loss=0.05103, ctc_loss=0.09632, over 3819326.72 frames. ], batch size: 49, lr: 8.35e-03, grad_scale: 32.0 +2024-08-31 15:53:46,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=230746.66666666666, ans=0.2 +2024-08-31 15:53:54,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=230746.66666666666, ans=0.125 +2024-08-31 15:54:41,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=230960.0, ans=0.125 +2024-08-31 15:54:48,314 INFO [train.py:1114] (1/4) Epoch 18, batch 1000, loss[loss=0.1849, simple_loss=0.254, pruned_loss=0.04222, ctc_loss=0.07855, over 19849.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2735, pruned_loss=0.05124, ctc_loss=0.09666, over 3815683.01 frames. ], batch size: 52, lr: 8.35e-03, grad_scale: 32.0 +2024-08-31 15:55:42,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=231173.33333333334, ans=0.125 +2024-08-31 15:55:50,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=231226.66666666666, ans=0.0 +2024-08-31 15:55:55,134 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.285e+02 1.660e+02 1.836e+02 2.172e+02 3.389e+02, threshold=3.673e+02, percent-clipped=0.0 +2024-08-31 15:56:01,084 INFO [train.py:1114] (1/4) Epoch 18, batch 1050, loss[loss=0.1946, simple_loss=0.2695, pruned_loss=0.04314, ctc_loss=0.08355, over 19852.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.273, pruned_loss=0.05124, ctc_loss=0.09652, over 3823270.16 frames. ], batch size: 57, lr: 8.34e-03, grad_scale: 32.0 +2024-08-31 15:56:01,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=231280.0, ans=0.2 +2024-08-31 15:56:01,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231280.0, ans=0.1 +2024-08-31 15:56:08,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=231280.0, ans=10.0 +2024-08-31 15:56:31,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=231386.66666666666, ans=0.0 +2024-08-31 15:56:54,899 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.20 vs. limit=15.0 +2024-08-31 15:57:01,163 INFO [train.py:1114] (1/4) Epoch 18, batch 1100, loss[loss=0.191, simple_loss=0.2635, pruned_loss=0.04255, ctc_loss=0.08364, over 19594.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2724, pruned_loss=0.05063, ctc_loss=0.09553, over 3830526.83 frames. ], batch size: 52, lr: 8.34e-03, grad_scale: 32.0 +2024-08-31 15:57:01,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=231546.66666666666, ans=0.0 +2024-08-31 15:57:29,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=231653.33333333334, ans=0.025 +2024-08-31 15:57:30,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=231653.33333333334, ans=0.0 +2024-08-31 15:57:50,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=231706.66666666666, ans=0.5 +2024-08-31 15:57:58,312 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.372e+02 1.608e+02 1.860e+02 2.284e+02 4.941e+02, threshold=3.719e+02, percent-clipped=1.0 +2024-08-31 15:58:04,200 INFO [train.py:1114] (1/4) Epoch 18, batch 1150, loss[loss=0.2214, simple_loss=0.2834, pruned_loss=0.0581, ctc_loss=0.1082, over 19588.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2725, pruned_loss=0.05063, ctc_loss=0.09569, over 3830167.85 frames. ], batch size: 52, lr: 8.33e-03, grad_scale: 32.0 +2024-08-31 15:58:04,881 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.43 vs. limit=12.0 +2024-08-31 15:58:16,372 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.12 vs. limit=15.0 +2024-08-31 15:58:31,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=231866.66666666666, ans=0.04949747468305833 +2024-08-31 15:58:45,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231920.0, ans=0.1 +2024-08-31 15:58:46,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=231920.0, ans=0.125 +2024-08-31 15:58:58,324 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 15:59:05,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=232026.66666666666, ans=0.125 +2024-08-31 15:59:17,267 INFO [train.py:1114] (1/4) Epoch 18, batch 1200, loss[loss=0.196, simple_loss=0.277, pruned_loss=0.04135, ctc_loss=0.08085, over 19831.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.274, pruned_loss=0.05098, ctc_loss=0.09645, over 3825669.59 frames. ], batch size: 57, lr: 8.33e-03, grad_scale: 32.0 +2024-08-31 15:59:28,841 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.25 vs. limit=15.0 +2024-08-31 15:59:34,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=232133.33333333334, ans=0.125 +2024-08-31 15:59:40,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=232186.66666666666, ans=0.125 +2024-08-31 16:00:07,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=232293.33333333334, ans=0.125 +2024-08-31 16:00:11,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=232293.33333333334, ans=0.2 +2024-08-31 16:00:12,199 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.411e+02 1.681e+02 1.869e+02 2.236e+02 3.755e+02, threshold=3.738e+02, percent-clipped=1.0 +2024-08-31 16:00:12,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=232293.33333333334, ans=0.125 +2024-08-31 16:00:18,295 INFO [train.py:1114] (1/4) Epoch 18, batch 1250, loss[loss=0.2382, simple_loss=0.2988, pruned_loss=0.0645, ctc_loss=0.1214, over 19525.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2743, pruned_loss=0.05122, ctc_loss=0.09672, over 3842781.75 frames. ], batch size: 61, lr: 8.32e-03, grad_scale: 32.0 +2024-08-31 16:00:44,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=232453.33333333334, ans=0.1 +2024-08-31 16:00:47,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=232453.33333333334, ans=0.1 +2024-08-31 16:00:54,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=232453.33333333334, ans=0.2 +2024-08-31 16:01:22,428 INFO [train.py:1114] (1/4) Epoch 18, batch 1300, loss[loss=0.2103, simple_loss=0.2835, pruned_loss=0.0505, ctc_loss=0.09039, over 18867.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2733, pruned_loss=0.0508, ctc_loss=0.09586, over 3846959.32 frames. ], batch size: 76, lr: 8.32e-03, grad_scale: 32.0 +2024-08-31 16:01:27,335 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.58 vs. limit=12.0 +2024-08-31 16:01:55,345 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.01 vs. limit=10.0 +2024-08-31 16:02:05,655 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=4.815e-02 +2024-08-31 16:02:19,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=232826.66666666666, ans=0.125 +2024-08-31 16:02:21,669 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.351e+02 1.758e+02 2.176e+02 2.645e+02 4.342e+02, threshold=4.353e+02, percent-clipped=3.0 +2024-08-31 16:02:25,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=232826.66666666666, ans=0.0 +2024-08-31 16:02:27,608 INFO [train.py:1114] (1/4) Epoch 18, batch 1350, loss[loss=0.1755, simple_loss=0.2589, pruned_loss=0.03359, ctc_loss=0.06248, over 19771.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2733, pruned_loss=0.05085, ctc_loss=0.09578, over 3856082.84 frames. ], batch size: 54, lr: 8.31e-03, grad_scale: 32.0 +2024-08-31 16:03:05,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=233040.0, ans=0.2 +2024-08-31 16:03:06,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=233040.0, ans=0.125 +2024-08-31 16:03:29,598 INFO [train.py:1114] (1/4) Epoch 18, batch 1400, loss[loss=0.1683, simple_loss=0.2327, pruned_loss=0.03691, ctc_loss=0.07522, over 19659.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.273, pruned_loss=0.05064, ctc_loss=0.09553, over 3863302.16 frames. ], batch size: 46, lr: 8.31e-03, grad_scale: 32.0 +2024-08-31 16:03:48,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=233146.66666666666, ans=0.125 +2024-08-31 16:03:59,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=233200.0, ans=0.2 +2024-08-31 16:04:35,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=233360.0, ans=10.0 +2024-08-31 16:04:36,297 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.322e+02 1.655e+02 1.916e+02 2.338e+02 3.956e+02, threshold=3.832e+02, percent-clipped=0.0 +2024-08-31 16:04:38,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=233360.0, ans=0.1 +2024-08-31 16:04:39,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=233360.0, ans=15.0 +2024-08-31 16:04:42,282 INFO [train.py:1114] (1/4) Epoch 18, batch 1450, loss[loss=0.2286, simple_loss=0.2907, pruned_loss=0.06098, ctc_loss=0.1114, over 19656.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2737, pruned_loss=0.05098, ctc_loss=0.09617, over 3861502.71 frames. ], batch size: 63, lr: 8.30e-03, grad_scale: 32.0 +2024-08-31 16:04:55,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=233466.66666666666, ans=0.2 +2024-08-31 16:05:24,211 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.16 vs. limit=12.0 +2024-08-31 16:05:25,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=233573.33333333334, ans=0.2 +2024-08-31 16:05:27,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=233573.33333333334, ans=0.125 +2024-08-31 16:05:32,417 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:05:48,802 INFO [train.py:1114] (1/4) Epoch 18, batch 1500, loss[loss=0.2329, simple_loss=0.2935, pruned_loss=0.06136, ctc_loss=0.1239, over 19574.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2738, pruned_loss=0.05079, ctc_loss=0.09591, over 3860727.16 frames. ], batch size: 57, lr: 8.30e-03, grad_scale: 32.0 +2024-08-31 16:06:24,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=233786.66666666666, ans=0.125 +2024-08-31 16:06:25,426 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.54 vs. limit=10.0 +2024-08-31 16:06:49,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=233893.33333333334, ans=0.0 +2024-08-31 16:06:50,190 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.336e+02 1.669e+02 1.866e+02 2.355e+02 3.552e+02, threshold=3.733e+02, percent-clipped=0.0 +2024-08-31 16:07:06,040 INFO [train.py:1114] (1/4) Epoch 18, batch 1550, loss[loss=0.2042, simple_loss=0.2788, pruned_loss=0.0482, ctc_loss=0.08296, over 19621.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2738, pruned_loss=0.05112, ctc_loss=0.0965, over 3845155.05 frames. ], batch size: 60, lr: 8.30e-03, grad_scale: 32.0 +2024-08-31 16:07:24,155 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.49 vs. limit=15.0 +2024-08-31 16:07:31,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=234053.33333333334, ans=10.0 +2024-08-31 16:07:32,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=234053.33333333334, ans=0.0 +2024-08-31 16:07:33,551 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:07:39,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=234053.33333333334, ans=0.125 +2024-08-31 16:08:07,325 INFO [train.py:1114] (1/4) Epoch 18, batch 1600, loss[loss=0.2083, simple_loss=0.2824, pruned_loss=0.04838, ctc_loss=0.09364, over 19841.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2735, pruned_loss=0.0512, ctc_loss=0.09653, over 3834361.49 frames. ], batch size: 57, lr: 8.29e-03, grad_scale: 32.0 +2024-08-31 16:08:20,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=234266.66666666666, ans=0.0 +2024-08-31 16:08:21,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=234266.66666666666, ans=0.05 +2024-08-31 16:08:25,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=234266.66666666666, ans=0.125 +2024-08-31 16:08:27,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=234266.66666666666, ans=0.0 +2024-08-31 16:08:47,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=234373.33333333334, ans=0.2 +2024-08-31 16:09:20,625 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.787e+02 2.153e+02 2.672e+02 5.491e+02, threshold=4.305e+02, percent-clipped=8.0 +2024-08-31 16:09:22,693 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.71 vs. limit=22.5 +2024-08-31 16:09:26,597 INFO [train.py:1114] (1/4) Epoch 18, batch 1650, loss[loss=0.1947, simple_loss=0.2716, pruned_loss=0.04349, ctc_loss=0.07737, over 19662.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2728, pruned_loss=0.05074, ctc_loss=0.09569, over 3831307.67 frames. ], batch size: 59, lr: 8.29e-03, grad_scale: 32.0 +2024-08-31 16:09:28,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.88 vs. limit=15.0 +2024-08-31 16:09:29,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=234480.0, ans=0.125 +2024-08-31 16:09:31,171 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.11 vs. limit=15.0 +2024-08-31 16:13:02,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=234533.33333333334, ans=0.125 +2024-08-31 16:13:15,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=234533.33333333334, ans=0.125 +2024-08-31 16:13:32,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=234586.66666666666, ans=0.0 +2024-08-31 16:14:10,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=234693.33333333334, ans=0.025 +2024-08-31 16:14:15,791 INFO [train.py:1114] (1/4) Epoch 18, batch 1700, loss[loss=0.1806, simple_loss=0.2423, pruned_loss=0.04302, ctc_loss=0.08185, over 19665.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2729, pruned_loss=0.05066, ctc_loss=0.09536, over 3846215.94 frames. ], batch size: 46, lr: 8.28e-03, grad_scale: 32.0 +2024-08-31 16:14:42,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=234853.33333333334, ans=0.0 +2024-08-31 16:14:55,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=234906.66666666666, ans=0.04949747468305833 +2024-08-31 16:15:02,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234960.0, ans=0.1 +2024-08-31 16:15:07,769 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.273e+02 1.694e+02 2.038e+02 2.484e+02 5.869e+02, threshold=4.076e+02, percent-clipped=3.0 +2024-08-31 16:15:12,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=235013.33333333334, ans=0.2 +2024-08-31 16:15:13,565 INFO [train.py:1114] (1/4) Epoch 18, batch 1750, loss[loss=0.1797, simple_loss=0.2431, pruned_loss=0.04258, ctc_loss=0.07779, over 19628.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2725, pruned_loss=0.05047, ctc_loss=0.09498, over 3851893.90 frames. ], batch size: 45, lr: 8.28e-03, grad_scale: 32.0 +2024-08-31 16:15:43,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=235120.0, ans=0.0 +2024-08-31 16:15:53,682 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.96 vs. limit=15.0 +2024-08-31 16:15:54,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=235120.0, ans=0.125 +2024-08-31 16:15:56,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=235173.33333333334, ans=0.0 +2024-08-31 16:16:04,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=235173.33333333334, ans=0.125 +2024-08-31 16:16:09,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=235226.66666666666, ans=0.125 +2024-08-31 16:16:14,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=235226.66666666666, ans=0.1 +2024-08-31 16:16:18,932 INFO [train.py:1114] (1/4) Epoch 18, batch 1800, loss[loss=0.2024, simple_loss=0.2773, pruned_loss=0.04564, ctc_loss=0.09089, over 19601.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2728, pruned_loss=0.05048, ctc_loss=0.095, over 3852585.51 frames. ], batch size: 55, lr: 8.27e-03, grad_scale: 32.0 +2024-08-31 16:16:19,336 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.98 vs. limit=15.0 +2024-08-31 16:16:39,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=235333.33333333334, ans=0.1 +2024-08-31 16:16:44,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=235386.66666666666, ans=0.125 +2024-08-31 16:16:56,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=235440.0, ans=0.0 +2024-08-31 16:17:12,067 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.320e+02 1.739e+02 2.099e+02 2.606e+02 4.220e+02, threshold=4.197e+02, percent-clipped=1.0 +2024-08-31 16:17:16,667 INFO [train.py:1114] (1/4) Epoch 18, batch 1850, loss[loss=0.2217, simple_loss=0.2861, pruned_loss=0.05678, ctc_loss=0.1093, over 19574.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2727, pruned_loss=0.05015, ctc_loss=0.09438, over 3855922.66 frames. ], batch size: 57, lr: 8.27e-03, grad_scale: 16.0 +2024-08-31 16:17:35,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.25 vs. limit=10.0 +2024-08-31 16:17:36,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=235600.0, ans=0.125 +2024-08-31 16:17:58,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=235706.66666666666, ans=0.125 +2024-08-31 16:18:05,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=235706.66666666666, ans=0.125 +2024-08-31 16:18:06,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=235706.66666666666, ans=0.125 +2024-08-31 16:18:21,111 INFO [train.py:1114] (1/4) Epoch 18, batch 1900, loss[loss=0.2238, simple_loss=0.2891, pruned_loss=0.05746, ctc_loss=0.109, over 19667.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2733, pruned_loss=0.05047, ctc_loss=0.095, over 3860504.63 frames. ], batch size: 59, lr: 8.26e-03, grad_scale: 16.0 +2024-08-31 16:18:35,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=235866.66666666666, ans=0.0 +2024-08-31 16:18:40,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=235866.66666666666, ans=0.1 +2024-08-31 16:18:42,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=235866.66666666666, ans=0.0 +2024-08-31 16:18:44,821 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:18:46,240 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.63 vs. limit=22.5 +2024-08-31 16:18:49,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235920.0, ans=0.1 +2024-08-31 16:19:07,733 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.16 vs. limit=10.0 +2024-08-31 16:19:11,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=236026.66666666666, ans=0.2 +2024-08-31 16:19:14,248 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.296e+02 1.623e+02 1.837e+02 2.195e+02 5.135e+02, threshold=3.673e+02, percent-clipped=2.0 +2024-08-31 16:19:18,782 INFO [train.py:1114] (1/4) Epoch 18, batch 1950, loss[loss=0.2194, simple_loss=0.2845, pruned_loss=0.05676, ctc_loss=0.1018, over 19581.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.274, pruned_loss=0.05029, ctc_loss=0.09451, over 3869594.56 frames. ], batch size: 52, lr: 8.26e-03, grad_scale: 16.0 +2024-08-31 16:20:39,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=236186.66666666666, ans=0.0 +2024-08-31 16:20:42,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=236186.66666666666, ans=0.1 +2024-08-31 16:20:43,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.28 vs. limit=6.0 +2024-08-31 16:21:16,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=236293.33333333334, ans=0.125 +2024-08-31 16:21:20,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=236346.66666666666, ans=0.0 +2024-08-31 16:21:21,687 INFO [train.py:1114] (1/4) Epoch 18, batch 2000, loss[loss=0.1834, simple_loss=0.2447, pruned_loss=0.04456, ctc_loss=0.0826, over 19639.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2748, pruned_loss=0.05073, ctc_loss=0.09524, over 3853760.36 frames. ], batch size: 45, lr: 8.25e-03, grad_scale: 32.0 +2024-08-31 16:21:33,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=236400.0, ans=0.125 +2024-08-31 16:22:04,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=236506.66666666666, ans=0.2 +2024-08-31 16:22:14,728 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.400e+02 1.704e+02 2.096e+02 2.751e+02 4.638e+02, threshold=4.193e+02, percent-clipped=6.0 +2024-08-31 16:22:19,182 INFO [train.py:1114] (1/4) Epoch 18, batch 2050, loss[loss=0.1828, simple_loss=0.2455, pruned_loss=0.04419, ctc_loss=0.07938, over 19710.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2735, pruned_loss=0.05044, ctc_loss=0.09496, over 3849371.92 frames. ], batch size: 47, lr: 8.25e-03, grad_scale: 32.0 +2024-08-31 16:22:44,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=236720.0, ans=0.125 +2024-08-31 16:22:45,078 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:23:05,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=236773.33333333334, ans=0.125 +2024-08-31 16:23:12,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=236826.66666666666, ans=0.125 +2024-08-31 16:23:21,355 INFO [train.py:1114] (1/4) Epoch 18, batch 2100, loss[loss=0.1979, simple_loss=0.2727, pruned_loss=0.04538, ctc_loss=0.08077, over 19787.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2726, pruned_loss=0.04995, ctc_loss=0.09404, over 3857332.35 frames. ], batch size: 54, lr: 8.25e-03, grad_scale: 32.0 +2024-08-31 16:23:24,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=236880.0, ans=0.2 +2024-08-31 16:23:24,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=236880.0, ans=0.025 +2024-08-31 16:23:31,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=236880.0, ans=0.0 +2024-08-31 16:23:42,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=236933.33333333334, ans=0.125 +2024-08-31 16:23:51,919 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.61 vs. limit=15.0 +2024-08-31 16:23:53,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=236986.66666666666, ans=0.125 +2024-08-31 16:24:09,565 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:24:16,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=237093.33333333334, ans=0.0 +2024-08-31 16:24:27,118 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.628e+02 1.802e+02 2.351e+02 4.404e+02, threshold=3.604e+02, percent-clipped=1.0 +2024-08-31 16:24:31,673 INFO [train.py:1114] (1/4) Epoch 18, batch 2150, loss[loss=0.2093, simple_loss=0.2733, pruned_loss=0.05262, ctc_loss=0.1001, over 19581.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2716, pruned_loss=0.0495, ctc_loss=0.09317, over 3867655.65 frames. ], batch size: 52, lr: 8.24e-03, grad_scale: 32.0 +2024-08-31 16:25:07,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=237306.66666666666, ans=0.125 +2024-08-31 16:25:10,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=237306.66666666666, ans=0.0 +2024-08-31 16:25:26,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=237306.66666666666, ans=0.2 +2024-08-31 16:25:27,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=237306.66666666666, ans=0.2 +2024-08-31 16:25:29,767 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=7.84 vs. limit=15.0 +2024-08-31 16:25:38,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=237360.0, ans=0.0 +2024-08-31 16:25:40,255 INFO [train.py:1114] (1/4) Epoch 18, batch 2200, loss[loss=0.2103, simple_loss=0.2828, pruned_loss=0.04998, ctc_loss=0.0947, over 19570.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2715, pruned_loss=0.04938, ctc_loss=0.09301, over 3867394.06 frames. ], batch size: 57, lr: 8.24e-03, grad_scale: 32.0 +2024-08-31 16:25:51,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=237413.33333333334, ans=0.125 +2024-08-31 16:25:56,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=237466.66666666666, ans=0.125 +2024-08-31 16:26:11,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=237520.0, ans=0.125 +2024-08-31 16:26:17,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=237573.33333333334, ans=0.125 +2024-08-31 16:26:19,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=237573.33333333334, ans=0.2 +2024-08-31 16:26:33,726 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.369e+02 1.652e+02 1.938e+02 2.493e+02 4.901e+02, threshold=3.877e+02, percent-clipped=6.0 +2024-08-31 16:26:38,361 INFO [train.py:1114] (1/4) Epoch 18, batch 2250, loss[loss=0.2019, simple_loss=0.2739, pruned_loss=0.04659, ctc_loss=0.09207, over 19609.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2719, pruned_loss=0.04957, ctc_loss=0.09342, over 3866849.03 frames. ], batch size: 55, lr: 8.23e-03, grad_scale: 32.0 +2024-08-31 16:26:56,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=237680.0, ans=0.2 +2024-08-31 16:27:06,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=237733.33333333334, ans=0.2 +2024-08-31 16:27:31,346 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.45 vs. limit=15.0 +2024-08-31 16:27:35,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=237840.0, ans=0.0 +2024-08-31 16:27:51,909 INFO [train.py:1114] (1/4) Epoch 18, batch 2300, loss[loss=0.2068, simple_loss=0.274, pruned_loss=0.05089, ctc_loss=0.09445, over 19507.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2711, pruned_loss=0.04961, ctc_loss=0.09351, over 3861725.30 frames. ], batch size: 49, lr: 8.23e-03, grad_scale: 32.0 +2024-08-31 16:28:05,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=238000.0, ans=0.125 +2024-08-31 16:28:23,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238053.33333333334, ans=0.1 +2024-08-31 16:28:34,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=238106.66666666666, ans=0.125 +2024-08-31 16:28:47,469 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.355e+02 1.696e+02 1.848e+02 2.393e+02 3.836e+02, threshold=3.696e+02, percent-clipped=0.0 +2024-08-31 16:29:07,670 INFO [train.py:1114] (1/4) Epoch 18, batch 2350, loss[loss=0.2231, simple_loss=0.2888, pruned_loss=0.05802, ctc_loss=0.1037, over 19669.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2714, pruned_loss=0.04979, ctc_loss=0.09368, over 3864437.57 frames. ], batch size: 63, lr: 8.22e-03, grad_scale: 32.0 +2024-08-31 16:29:16,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=238213.33333333334, ans=0.0 +2024-08-31 16:29:23,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=238266.66666666666, ans=0.125 +2024-08-31 16:29:34,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=238320.0, ans=0.125 +2024-08-31 16:29:36,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=238320.0, ans=0.0 +2024-08-31 16:29:40,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.98 vs. limit=22.5 +2024-08-31 16:30:22,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=238373.33333333334, ans=0.125 +2024-08-31 16:30:38,524 INFO [train.py:1114] (1/4) Epoch 18, batch 2400, loss[loss=0.2113, simple_loss=0.2768, pruned_loss=0.05258, ctc_loss=0.1014, over 19258.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2732, pruned_loss=0.05044, ctc_loss=0.09475, over 3859107.89 frames. ], batch size: 71, lr: 8.22e-03, grad_scale: 32.0 +2024-08-31 16:30:40,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=238480.0, ans=0.5 +2024-08-31 16:30:44,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=238480.0, ans=0.2 +2024-08-31 16:30:50,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238533.33333333334, ans=0.1 +2024-08-31 16:30:52,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=238533.33333333334, ans=0.2 +2024-08-31 16:30:59,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238533.33333333334, ans=0.1 +2024-08-31 16:31:02,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=238586.66666666666, ans=0.125 +2024-08-31 16:31:04,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=238586.66666666666, ans=0.0 +2024-08-31 16:31:19,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=238640.0, ans=0.025 +2024-08-31 16:31:30,736 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.26 vs. limit=15.0 +2024-08-31 16:31:42,405 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.51 vs. limit=15.0 +2024-08-31 16:31:44,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=238693.33333333334, ans=0.125 +2024-08-31 16:31:47,472 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.444e+02 1.682e+02 1.835e+02 2.125e+02 4.662e+02, threshold=3.671e+02, percent-clipped=5.0 +2024-08-31 16:31:52,085 INFO [train.py:1114] (1/4) Epoch 18, batch 2450, loss[loss=0.2599, simple_loss=0.2987, pruned_loss=0.08088, ctc_loss=0.1485, over 12966.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2773, pruned_loss=0.05318, ctc_loss=0.1005, over 3729718.85 frames. ], batch size: 141, lr: 8.21e-03, grad_scale: 32.0 +2024-08-31 16:32:29,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=238906.66666666666, ans=0.0 +2024-08-31 16:32:30,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=238906.66666666666, ans=0.07 +2024-08-31 16:32:31,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=238906.66666666666, ans=0.125 +2024-08-31 16:33:43,933 INFO [train.py:1114] (1/4) Epoch 19, batch 0, loss[loss=0.2162, simple_loss=0.2716, pruned_loss=0.0588, ctc_loss=0.1081, over 19417.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2716, pruned_loss=0.0588, ctc_loss=0.1081, over 19417.00 frames. ], batch size: 48, lr: 7.99e-03, grad_scale: 32.0 +2024-08-31 16:33:43,934 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-31 16:33:52,403 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.0505, 2.2462, 2.9520, 3.4045], device='cuda:1') +2024-08-31 16:34:00,545 INFO [train.py:1146] (1/4) Epoch 19, validation: loss=0.1846, simple_loss=0.2728, pruned_loss=0.03584, ctc_loss=0.06159, over 944034.00 frames. +2024-08-31 16:34:01,380 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 13681MB +2024-08-31 16:34:02,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=238954.66666666666, ans=0.125 +2024-08-31 16:34:21,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=239008.0, ans=0.1 +2024-08-31 16:34:28,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239061.33333333334, ans=0.1 +2024-08-31 16:34:36,687 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.04 vs. limit=15.0 +2024-08-31 16:35:04,408 INFO [train.py:1114] (1/4) Epoch 19, batch 50, loss[loss=0.2033, simple_loss=0.2578, pruned_loss=0.05355, ctc_loss=0.1043, over 19694.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2764, pruned_loss=0.05387, ctc_loss=0.1024, over 843816.80 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-31 16:35:12,511 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.230e+02 1.795e+02 2.006e+02 2.342e+02 4.821e+02, threshold=4.012e+02, percent-clipped=4.0 +2024-08-31 16:35:36,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=239328.0, ans=0.2 +2024-08-31 16:35:40,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=239381.33333333334, ans=0.025 +2024-08-31 16:35:58,661 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.83 vs. limit=12.0 +2024-08-31 16:35:59,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=239434.66666666666, ans=15.0 +2024-08-31 16:36:03,816 INFO [train.py:1114] (1/4) Epoch 19, batch 100, loss[loss=0.1634, simple_loss=0.2376, pruned_loss=0.03238, ctc_loss=0.06118, over 19715.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2759, pruned_loss=0.05254, ctc_loss=0.09952, over 1498509.86 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 32.0 +2024-08-31 16:36:14,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=239488.0, ans=0.2 +2024-08-31 16:36:15,584 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.24 vs. limit=22.5 +2024-08-31 16:36:16,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=239541.33333333334, ans=0.2 +2024-08-31 16:36:28,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=239594.66666666666, ans=0.125 +2024-08-31 16:36:50,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=239648.0, ans=0.125 +2024-08-31 16:36:54,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239701.33333333334, ans=0.125 +2024-08-31 16:37:00,770 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.91 vs. limit=10.0 +2024-08-31 16:37:06,688 INFO [train.py:1114] (1/4) Epoch 19, batch 150, loss[loss=0.1831, simple_loss=0.2456, pruned_loss=0.0445, ctc_loss=0.07927, over 19722.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2732, pruned_loss=0.0508, ctc_loss=0.09615, over 2027222.49 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-31 16:37:15,238 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.762e+02 1.953e+02 2.445e+02 3.524e+02, threshold=3.906e+02, percent-clipped=0.0 +2024-08-31 16:37:24,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=239808.0, ans=0.125 +2024-08-31 16:37:44,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=239914.66666666666, ans=0.0 +2024-08-31 16:37:44,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=239914.66666666666, ans=0.0 +2024-08-31 16:37:48,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239914.66666666666, ans=0.125 +2024-08-31 16:37:52,843 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.41 vs. limit=15.0 +2024-08-31 16:37:55,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=239968.0, ans=0.09899494936611666 +2024-08-31 16:37:59,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=239968.0, ans=0.0 +2024-08-31 16:38:07,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=239968.0, ans=0.0 +2024-08-31 16:38:14,093 INFO [train.py:1114] (1/4) Epoch 19, batch 200, loss[loss=0.2249, simple_loss=0.2907, pruned_loss=0.05707, ctc_loss=0.1125, over 18110.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2724, pruned_loss=0.05041, ctc_loss=0.09536, over 2435690.74 frames. ], batch size: 85, lr: 7.97e-03, grad_scale: 32.0 +2024-08-31 16:38:26,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=240074.66666666666, ans=0.125 +2024-08-31 16:38:35,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=240074.66666666666, ans=0.125 +2024-08-31 16:38:37,310 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.96 vs. limit=15.0 +2024-08-31 16:38:39,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=240128.0, ans=0.125 +2024-08-31 16:38:46,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=240128.0, ans=10.0 +2024-08-31 16:38:47,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=240128.0, ans=0.95 +2024-08-31 16:39:05,409 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:39:11,661 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.58 vs. limit=22.5 +2024-08-31 16:39:13,565 INFO [train.py:1114] (1/4) Epoch 19, batch 250, loss[loss=0.2361, simple_loss=0.2945, pruned_loss=0.06406, ctc_loss=0.1241, over 19370.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2721, pruned_loss=0.05015, ctc_loss=0.09461, over 2755982.86 frames. ], batch size: 67, lr: 7.97e-03, grad_scale: 32.0 +2024-08-31 16:39:24,853 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:39:26,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=240288.0, ans=0.125 +2024-08-31 16:39:27,165 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.313e+02 1.733e+02 2.186e+02 2.853e+02 4.755e+02, threshold=4.372e+02, percent-clipped=7.0 +2024-08-31 16:40:02,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=240448.0, ans=0.125 +2024-08-31 16:40:02,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=240448.0, ans=0.125 +2024-08-31 16:40:06,670 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.51 vs. limit=22.5 +2024-08-31 16:40:16,018 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.26 vs. limit=15.0 +2024-08-31 16:40:20,390 INFO [train.py:1114] (1/4) Epoch 19, batch 300, loss[loss=0.2541, simple_loss=0.3092, pruned_loss=0.07284, ctc_loss=0.1333, over 19524.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2715, pruned_loss=0.04979, ctc_loss=0.09376, over 3000158.22 frames. ], batch size: 61, lr: 7.96e-03, grad_scale: 32.0 +2024-08-31 16:40:27,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=240554.66666666666, ans=0.125 +2024-08-31 16:40:30,353 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:40:35,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=240608.0, ans=0.0 +2024-08-31 16:40:48,738 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.50 vs. limit=5.0 +2024-08-31 16:40:59,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=240714.66666666666, ans=0.07 +2024-08-31 16:41:01,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=240714.66666666666, ans=0.125 +2024-08-31 16:41:15,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=240768.0, ans=0.0 +2024-08-31 16:41:16,964 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.42 vs. limit=15.0 +2024-08-31 16:41:21,985 INFO [train.py:1114] (1/4) Epoch 19, batch 350, loss[loss=0.1658, simple_loss=0.2399, pruned_loss=0.03265, ctc_loss=0.06588, over 19750.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.272, pruned_loss=0.04985, ctc_loss=0.09382, over 3190941.52 frames. ], batch size: 48, lr: 7.96e-03, grad_scale: 32.0 +2024-08-31 16:41:24,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240821.33333333334, ans=0.1 +2024-08-31 16:41:30,314 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.408e+02 1.653e+02 1.904e+02 2.349e+02 4.016e+02, threshold=3.809e+02, percent-clipped=0.0 +2024-08-31 16:41:45,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=240874.66666666666, ans=0.125 +2024-08-31 16:42:03,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=240981.33333333334, ans=0.2 +2024-08-31 16:42:07,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=240981.33333333334, ans=0.125 +2024-08-31 16:42:14,416 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.80 vs. limit=15.0 +2024-08-31 16:42:25,388 INFO [train.py:1114] (1/4) Epoch 19, batch 400, loss[loss=0.192, simple_loss=0.2688, pruned_loss=0.04239, ctc_loss=0.07595, over 19516.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2715, pruned_loss=0.0495, ctc_loss=0.09312, over 3342063.45 frames. ], batch size: 54, lr: 7.95e-03, grad_scale: 32.0 +2024-08-31 16:42:39,210 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.72 vs. limit=15.0 +2024-08-31 16:42:41,800 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.33 vs. limit=15.0 +2024-08-31 16:42:55,797 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=8.27 vs. limit=22.5 +2024-08-31 16:42:58,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=241194.66666666666, ans=0.125 +2024-08-31 16:43:11,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=241248.0, ans=0.0 +2024-08-31 16:43:20,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=241301.33333333334, ans=0.125 +2024-08-31 16:43:26,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=241301.33333333334, ans=0.2 +2024-08-31 16:43:34,374 INFO [train.py:1114] (1/4) Epoch 19, batch 450, loss[loss=0.2314, simple_loss=0.3037, pruned_loss=0.05776, ctc_loss=0.109, over 19611.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2725, pruned_loss=0.05005, ctc_loss=0.09407, over 3450262.93 frames. ], batch size: 55, lr: 7.95e-03, grad_scale: 32.0 +2024-08-31 16:43:42,750 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.686e+02 1.896e+02 2.370e+02 4.152e+02, threshold=3.792e+02, percent-clipped=1.0 +2024-08-31 16:43:44,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=241354.66666666666, ans=0.0 +2024-08-31 16:43:59,559 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.84 vs. limit=10.0 +2024-08-31 16:44:35,480 INFO [train.py:1114] (1/4) Epoch 19, batch 500, loss[loss=0.2038, simple_loss=0.2766, pruned_loss=0.04752, ctc_loss=0.08992, over 19638.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2716, pruned_loss=0.04959, ctc_loss=0.09334, over 3545831.45 frames. ], batch size: 63, lr: 7.95e-03, grad_scale: 32.0 +2024-08-31 16:44:37,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=241621.33333333334, ans=0.125 +2024-08-31 16:44:45,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=241621.33333333334, ans=0.125 +2024-08-31 16:44:54,047 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.04 vs. limit=15.0 +2024-08-31 16:44:55,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=241674.66666666666, ans=0.0 +2024-08-31 16:44:56,205 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.75 vs. limit=22.5 +2024-08-31 16:45:11,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=241781.33333333334, ans=0.2 +2024-08-31 16:45:28,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-31 16:46:04,286 INFO [train.py:1114] (1/4) Epoch 19, batch 550, loss[loss=0.2331, simple_loss=0.2967, pruned_loss=0.06188, ctc_loss=0.1142, over 19283.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2715, pruned_loss=0.0495, ctc_loss=0.09311, over 3608551.17 frames. ], batch size: 71, lr: 7.94e-03, grad_scale: 32.0 +2024-08-31 16:46:07,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=241888.0, ans=0.125 +2024-08-31 16:46:10,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=241888.0, ans=0.0 +2024-08-31 16:46:12,724 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.697e+02 1.983e+02 2.191e+02 3.507e+02, threshold=3.966e+02, percent-clipped=0.0 +2024-08-31 16:46:13,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff2.min_abs, batch_count=241888.0, ans=0.1 +2024-08-31 16:46:35,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=241941.33333333334, ans=0.125 +2024-08-31 16:46:41,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=241994.66666666666, ans=0.125 +2024-08-31 16:46:45,232 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.53 vs. limit=15.0 +2024-08-31 16:46:47,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=241994.66666666666, ans=0.0 +2024-08-31 16:46:47,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=241994.66666666666, ans=0.125 +2024-08-31 16:47:06,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=242101.33333333334, ans=0.2 +2024-08-31 16:47:10,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=242101.33333333334, ans=0.2 +2024-08-31 16:47:14,509 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.14 vs. limit=10.0 +2024-08-31 16:47:16,282 INFO [train.py:1114] (1/4) Epoch 19, batch 600, loss[loss=0.2385, simple_loss=0.3089, pruned_loss=0.06077, ctc_loss=0.1163, over 19427.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2718, pruned_loss=0.04963, ctc_loss=0.09322, over 3666336.13 frames. ], batch size: 67, lr: 7.94e-03, grad_scale: 32.0 +2024-08-31 16:47:27,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=242154.66666666666, ans=0.125 +2024-08-31 16:48:29,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=242368.0, ans=0.0 +2024-08-31 16:48:31,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=242368.0, ans=0.2 +2024-08-31 16:48:39,709 INFO [train.py:1114] (1/4) Epoch 19, batch 650, loss[loss=0.2055, simple_loss=0.2809, pruned_loss=0.04695, ctc_loss=0.0906, over 19773.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2714, pruned_loss=0.04953, ctc_loss=0.09307, over 3717066.29 frames. ], batch size: 54, lr: 7.93e-03, grad_scale: 32.0 +2024-08-31 16:48:39,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff2.min_abs, batch_count=242421.33333333334, ans=0.1 +2024-08-31 16:48:48,390 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 1.784e+02 2.044e+02 2.793e+02 4.792e+02, threshold=4.088e+02, percent-clipped=6.0 +2024-08-31 16:49:19,135 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.03 vs. limit=15.0 +2024-08-31 16:49:23,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=242581.33333333334, ans=0.125 +2024-08-31 16:49:31,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=242634.66666666666, ans=0.125 +2024-08-31 16:49:32,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=242634.66666666666, ans=0.125 +2024-08-31 16:49:38,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=242634.66666666666, ans=0.125 +2024-08-31 16:49:41,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=242634.66666666666, ans=0.05 +2024-08-31 16:50:02,119 INFO [train.py:1114] (1/4) Epoch 19, batch 700, loss[loss=0.1968, simple_loss=0.2607, pruned_loss=0.04727, ctc_loss=0.09596, over 19732.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2718, pruned_loss=0.04941, ctc_loss=0.0929, over 3749165.96 frames. ], batch size: 51, lr: 7.93e-03, grad_scale: 32.0 +2024-08-31 16:50:04,419 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=9.32 vs. limit=15.0 +2024-08-31 16:50:08,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=242688.0, ans=0.0 +2024-08-31 16:50:10,180 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.89 vs. limit=6.0 +2024-08-31 16:50:13,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=242688.0, ans=0.95 +2024-08-31 16:50:21,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=242741.33333333334, ans=0.0 +2024-08-31 16:50:40,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242794.66666666666, ans=0.1 +2024-08-31 16:51:20,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=242901.33333333334, ans=0.025 +2024-08-31 16:51:21,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=242901.33333333334, ans=0.125 +2024-08-31 16:51:22,096 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.74 vs. limit=15.0 +2024-08-31 16:52:16,541 INFO [train.py:1114] (1/4) Epoch 19, batch 750, loss[loss=0.2062, simple_loss=0.2806, pruned_loss=0.04712, ctc_loss=0.09386, over 19487.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2713, pruned_loss=0.04927, ctc_loss=0.09282, over 3774464.38 frames. ], batch size: 54, lr: 7.92e-03, grad_scale: 32.0 +2024-08-31 16:52:21,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=242954.66666666666, ans=0.125 +2024-08-31 16:52:40,611 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.383e+02 1.707e+02 2.012e+02 2.576e+02 4.596e+02, threshold=4.024e+02, percent-clipped=2.0 +2024-08-31 16:52:45,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=243008.0, ans=0.125 +2024-08-31 16:52:48,041 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.50 vs. limit=15.0 +2024-08-31 16:52:58,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=243061.33333333334, ans=0.125 +2024-08-31 16:53:04,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=243061.33333333334, ans=0.125 +2024-08-31 16:53:05,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=243061.33333333334, ans=0.0 +2024-08-31 16:53:05,867 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.49 vs. limit=6.0 +2024-08-31 16:53:37,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=243168.0, ans=0.125 +2024-08-31 16:53:40,085 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:53:40,993 INFO [train.py:1114] (1/4) Epoch 19, batch 800, loss[loss=0.1737, simple_loss=0.2415, pruned_loss=0.03913, ctc_loss=0.06914, over 19841.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2708, pruned_loss=0.04904, ctc_loss=0.09246, over 3796520.43 frames. ], batch size: 49, lr: 7.92e-03, grad_scale: 32.0 +2024-08-31 16:53:45,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=243221.33333333334, ans=0.125 +2024-08-31 16:53:52,269 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:54:10,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=243328.0, ans=0.125 +2024-08-31 16:54:35,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=243434.66666666666, ans=0.1 +2024-08-31 16:54:52,032 INFO [train.py:1114] (1/4) Epoch 19, batch 850, loss[loss=0.2213, simple_loss=0.2903, pruned_loss=0.05513, ctc_loss=0.1052, over 19650.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2708, pruned_loss=0.04917, ctc_loss=0.09257, over 3814948.74 frames. ], batch size: 59, lr: 7.92e-03, grad_scale: 32.0 +2024-08-31 16:54:54,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=243488.0, ans=0.09899494936611666 +2024-08-31 16:55:00,086 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.677e+02 1.837e+02 2.316e+02 3.927e+02, threshold=3.675e+02, percent-clipped=0.0 +2024-08-31 16:55:00,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=243488.0, ans=0.1 +2024-08-31 16:55:05,649 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.14 vs. limit=15.0 +2024-08-31 16:55:13,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=243541.33333333334, ans=0.0 +2024-08-31 16:55:20,177 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.19 vs. limit=15.0 +2024-08-31 16:55:26,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=243648.0, ans=0.125 +2024-08-31 16:55:34,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=243648.0, ans=0.125 +2024-08-31 16:55:38,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=243648.0, ans=0.125 +2024-08-31 16:55:41,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=243648.0, ans=0.09899494936611666 +2024-08-31 16:55:41,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=243648.0, ans=0.125 +2024-08-31 16:55:43,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=243701.33333333334, ans=0.125 +2024-08-31 16:55:55,902 INFO [train.py:1114] (1/4) Epoch 19, batch 900, loss[loss=0.1762, simple_loss=0.2479, pruned_loss=0.03853, ctc_loss=0.06871, over 19823.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2708, pruned_loss=0.04929, ctc_loss=0.0929, over 3819183.23 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-31 16:56:03,894 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.01 vs. limit=8.0 +2024-08-31 16:56:26,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=243861.33333333334, ans=0.0 +2024-08-31 16:56:33,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243861.33333333334, ans=0.1 +2024-08-31 16:56:38,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=243861.33333333334, ans=0.0 +2024-08-31 16:56:41,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=243914.66666666666, ans=0.125 +2024-08-31 16:57:03,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243968.0, ans=0.1 +2024-08-31 16:57:05,834 INFO [train.py:1114] (1/4) Epoch 19, batch 950, loss[loss=0.1946, simple_loss=0.2576, pruned_loss=0.04823, ctc_loss=0.08791, over 19491.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.271, pruned_loss=0.04942, ctc_loss=0.09309, over 3820498.17 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-31 16:57:10,110 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.17 vs. limit=15.0 +2024-08-31 16:57:14,299 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.368e+02 1.751e+02 2.034e+02 2.400e+02 3.857e+02, threshold=4.067e+02, percent-clipped=1.0 +2024-08-31 16:57:27,858 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:57:31,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=244128.0, ans=0.025 +2024-08-31 16:58:04,883 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.03 vs. limit=15.0 +2024-08-31 16:58:06,311 INFO [train.py:1114] (1/4) Epoch 19, batch 1000, loss[loss=0.169, simple_loss=0.2443, pruned_loss=0.03353, ctc_loss=0.06665, over 19867.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2721, pruned_loss=0.04996, ctc_loss=0.0941, over 3816829.39 frames. ], batch size: 52, lr: 7.90e-03, grad_scale: 32.0 +2024-08-31 16:58:50,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=244341.33333333334, ans=0.125 +2024-08-31 16:58:56,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244341.33333333334, ans=0.1 +2024-08-31 17:00:06,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244501.33333333334, ans=0.1 +2024-08-31 17:00:09,461 INFO [train.py:1114] (1/4) Epoch 19, batch 1050, loss[loss=0.2224, simple_loss=0.286, pruned_loss=0.05777, ctc_loss=0.1083, over 19850.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2714, pruned_loss=0.04985, ctc_loss=0.09402, over 3822541.22 frames. ], batch size: 57, lr: 7.90e-03, grad_scale: 32.0 +2024-08-31 17:00:14,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=244554.66666666666, ans=0.125 +2024-08-31 17:00:15,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244554.66666666666, ans=0.1 +2024-08-31 17:00:17,657 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.651e+02 1.935e+02 2.361e+02 3.363e+02, threshold=3.870e+02, percent-clipped=0.0 +2024-08-31 17:00:27,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=244608.0, ans=0.125 +2024-08-31 17:00:36,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=244661.33333333334, ans=0.125 +2024-08-31 17:01:12,065 INFO [train.py:1114] (1/4) Epoch 19, batch 1100, loss[loss=0.2024, simple_loss=0.268, pruned_loss=0.0499, ctc_loss=0.09242, over 19592.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2716, pruned_loss=0.04996, ctc_loss=0.09412, over 3830857.76 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-31 17:01:28,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=244874.66666666666, ans=0.125 +2024-08-31 17:02:04,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=244981.33333333334, ans=0.2 +2024-08-31 17:02:43,478 INFO [train.py:1114] (1/4) Epoch 19, batch 1150, loss[loss=0.217, simple_loss=0.2791, pruned_loss=0.05689, ctc_loss=0.1028, over 19598.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2718, pruned_loss=0.05011, ctc_loss=0.09442, over 3830026.08 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-31 17:03:11,399 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.331e+02 1.693e+02 1.899e+02 2.295e+02 3.327e+02, threshold=3.798e+02, percent-clipped=0.0 +2024-08-31 17:03:53,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.94 vs. limit=22.5 +2024-08-31 17:04:04,674 INFO [train.py:1114] (1/4) Epoch 19, batch 1200, loss[loss=0.2019, simple_loss=0.2755, pruned_loss=0.04601, ctc_loss=0.09064, over 19840.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2727, pruned_loss=0.05036, ctc_loss=0.09494, over 3824926.60 frames. ], batch size: 57, lr: 7.89e-03, grad_scale: 32.0 +2024-08-31 17:04:07,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=245354.66666666666, ans=0.025 +2024-08-31 17:04:26,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=245408.0, ans=0.04949747468305833 +2024-08-31 17:04:30,597 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.99 vs. limit=15.0 +2024-08-31 17:04:38,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=245461.33333333334, ans=0.0 +2024-08-31 17:04:46,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=245514.66666666666, ans=0.5 +2024-08-31 17:05:06,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=245568.0, ans=0.125 +2024-08-31 17:05:08,562 INFO [train.py:1114] (1/4) Epoch 19, batch 1250, loss[loss=0.2327, simple_loss=0.2932, pruned_loss=0.06306, ctc_loss=0.1152, over 19547.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2738, pruned_loss=0.05091, ctc_loss=0.09565, over 3843041.14 frames. ], batch size: 61, lr: 7.88e-03, grad_scale: 32.0 +2024-08-31 17:05:09,272 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.10 vs. limit=15.0 +2024-08-31 17:05:16,762 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.647e+02 1.911e+02 2.205e+02 3.499e+02, threshold=3.822e+02, percent-clipped=0.0 +2024-08-31 17:05:35,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=245728.0, ans=0.125 +2024-08-31 17:06:14,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=245834.66666666666, ans=0.025 +2024-08-31 17:06:15,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=245834.66666666666, ans=0.04949747468305833 +2024-08-31 17:06:19,695 INFO [train.py:1114] (1/4) Epoch 19, batch 1300, loss[loss=0.2115, simple_loss=0.2792, pruned_loss=0.05261, ctc_loss=0.09661, over 18776.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2732, pruned_loss=0.05055, ctc_loss=0.09491, over 3846494.21 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 32.0 +2024-08-31 17:06:24,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=245888.0, ans=0.125 +2024-08-31 17:06:37,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=245941.33333333334, ans=0.125 +2024-08-31 17:06:51,803 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.80 vs. limit=10.0 +2024-08-31 17:07:01,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=246048.0, ans=0.0 +2024-08-31 17:07:25,626 INFO [train.py:1114] (1/4) Epoch 19, batch 1350, loss[loss=0.1789, simple_loss=0.2554, pruned_loss=0.03692, ctc_loss=0.07114, over 19783.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2721, pruned_loss=0.04997, ctc_loss=0.09376, over 3856904.11 frames. ], batch size: 54, lr: 7.87e-03, grad_scale: 64.0 +2024-08-31 17:07:35,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246154.66666666666, ans=0.1 +2024-08-31 17:07:39,279 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.398e+02 1.765e+02 2.070e+02 2.720e+02 4.418e+02, threshold=4.141e+02, percent-clipped=1.0 +2024-08-31 17:08:18,091 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:08:27,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=246368.0, ans=0.125 +2024-08-31 17:08:35,882 INFO [train.py:1114] (1/4) Epoch 19, batch 1400, loss[loss=0.1559, simple_loss=0.2238, pruned_loss=0.03152, ctc_loss=0.06248, over 19689.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2718, pruned_loss=0.04999, ctc_loss=0.09384, over 3864018.00 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 64.0 +2024-08-31 17:08:43,652 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.52 vs. limit=22.5 +2024-08-31 17:09:08,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=246474.66666666666, ans=0.125 +2024-08-31 17:09:22,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=246528.0, ans=0.125 +2024-08-31 17:09:34,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=246581.33333333334, ans=0.0 +2024-08-31 17:09:52,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=246688.0, ans=0.125 +2024-08-31 17:09:53,676 INFO [train.py:1114] (1/4) Epoch 19, batch 1450, loss[loss=0.2106, simple_loss=0.2802, pruned_loss=0.05232, ctc_loss=0.09106, over 19664.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2723, pruned_loss=0.05015, ctc_loss=0.09413, over 3863145.24 frames. ], batch size: 63, lr: 7.87e-03, grad_scale: 64.0 +2024-08-31 17:10:02,078 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.290e+02 1.691e+02 1.919e+02 2.362e+02 3.353e+02, threshold=3.838e+02, percent-clipped=0.0 +2024-08-31 17:10:02,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246688.0, ans=0.1 +2024-08-31 17:10:08,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=246741.33333333334, ans=0.125 +2024-08-31 17:11:07,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=246741.33333333334, ans=0.125 +2024-08-31 17:11:30,391 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=11.07 vs. limit=12.0 +2024-08-31 17:11:42,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=246848.0, ans=0.125 +2024-08-31 17:12:10,413 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:12:12,398 INFO [train.py:1114] (1/4) Epoch 19, batch 1500, loss[loss=0.2263, simple_loss=0.2921, pruned_loss=0.05872, ctc_loss=0.1076, over 19575.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2726, pruned_loss=0.05002, ctc_loss=0.09416, over 3862569.89 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 64.0 +2024-08-31 17:12:14,716 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.43 vs. limit=15.0 +2024-08-31 17:12:22,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=246954.66666666666, ans=0.0 +2024-08-31 17:13:04,361 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.75 vs. limit=6.0 +2024-08-31 17:13:08,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=247008.0, ans=0.0 +2024-08-31 17:13:45,561 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.08 vs. limit=15.0 +2024-08-31 17:14:14,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=247114.66666666666, ans=0.0 +2024-08-31 17:14:20,584 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.60 vs. limit=15.0 +2024-08-31 17:14:26,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=247168.0, ans=0.0 +2024-08-31 17:14:33,507 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:14:36,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=247168.0, ans=0.125 +2024-08-31 17:14:36,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=247168.0, ans=0.125 +2024-08-31 17:14:38,417 INFO [train.py:1114] (1/4) Epoch 19, batch 1550, loss[loss=0.2281, simple_loss=0.2896, pruned_loss=0.06051, ctc_loss=0.1137, over 19583.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2725, pruned_loss=0.05013, ctc_loss=0.09462, over 3847411.22 frames. ], batch size: 60, lr: 7.86e-03, grad_scale: 64.0 +2024-08-31 17:14:41,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=247221.33333333334, ans=0.2 +2024-08-31 17:14:43,940 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.07 vs. limit=22.5 +2024-08-31 17:14:46,785 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.654e+02 1.883e+02 2.328e+02 3.879e+02, threshold=3.765e+02, percent-clipped=1.0 +2024-08-31 17:14:47,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=247221.33333333334, ans=0.125 +2024-08-31 17:15:14,920 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:15:15,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=247274.66666666666, ans=0.0 +2024-08-31 17:16:14,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=247381.33333333334, ans=0.0 +2024-08-31 17:16:14,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=247381.33333333334, ans=0.1 +2024-08-31 17:16:19,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247381.33333333334, ans=0.1 +2024-08-31 17:16:29,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=247434.66666666666, ans=0.025 +2024-08-31 17:16:40,657 INFO [train.py:1114] (1/4) Epoch 19, batch 1600, loss[loss=0.2051, simple_loss=0.2834, pruned_loss=0.04576, ctc_loss=0.08834, over 19843.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2724, pruned_loss=0.05024, ctc_loss=0.09512, over 3836078.74 frames. ], batch size: 57, lr: 7.85e-03, grad_scale: 64.0 +2024-08-31 17:16:53,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=247541.33333333334, ans=0.0 +2024-08-31 17:16:54,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=247541.33333333334, ans=0.125 +2024-08-31 17:16:59,559 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.44 vs. limit=15.0 +2024-08-31 17:17:01,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=247541.33333333334, ans=0.125 +2024-08-31 17:17:25,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=247648.0, ans=0.2 +2024-08-31 17:17:35,491 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.12 vs. limit=10.0 +2024-08-31 17:17:42,018 INFO [train.py:1114] (1/4) Epoch 19, batch 1650, loss[loss=0.2155, simple_loss=0.2882, pruned_loss=0.05147, ctc_loss=0.09983, over 19660.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2721, pruned_loss=0.05015, ctc_loss=0.0948, over 3832558.49 frames. ], batch size: 59, lr: 7.85e-03, grad_scale: 64.0 +2024-08-31 17:17:49,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=247754.66666666666, ans=0.0 +2024-08-31 17:17:50,573 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.367e+02 1.753e+02 1.927e+02 2.360e+02 4.500e+02, threshold=3.853e+02, percent-clipped=4.0 +2024-08-31 17:17:58,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=247808.0, ans=0.125 +2024-08-31 17:18:36,080 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.92 vs. limit=12.0 +2024-08-31 17:18:44,942 INFO [train.py:1114] (1/4) Epoch 19, batch 1700, loss[loss=0.1859, simple_loss=0.2457, pruned_loss=0.04561, ctc_loss=0.08694, over 19679.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2715, pruned_loss=0.0495, ctc_loss=0.09357, over 3846785.53 frames. ], batch size: 46, lr: 7.84e-03, grad_scale: 64.0 +2024-08-31 17:18:57,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=248074.66666666666, ans=0.125 +2024-08-31 17:19:10,052 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=7.98 vs. limit=22.5 +2024-08-31 17:19:46,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=248234.66666666666, ans=0.0 +2024-08-31 17:19:52,928 INFO [train.py:1114] (1/4) Epoch 19, batch 1750, loss[loss=0.1832, simple_loss=0.246, pruned_loss=0.04336, ctc_loss=0.0842, over 19659.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2712, pruned_loss=0.04945, ctc_loss=0.09322, over 3852102.13 frames. ], batch size: 45, lr: 7.84e-03, grad_scale: 32.0 +2024-08-31 17:19:57,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=248288.0, ans=0.0 +2024-08-31 17:20:02,151 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.715e+02 1.941e+02 2.441e+02 4.524e+02, threshold=3.882e+02, percent-clipped=3.0 +2024-08-31 17:20:15,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.whiten.whitening_limit, batch_count=248394.66666666666, ans=12.0 +2024-08-31 17:20:21,095 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.08 vs. limit=22.5 +2024-08-31 17:20:28,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248448.0, ans=0.1 +2024-08-31 17:20:35,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=248448.0, ans=0.125 +2024-08-31 17:20:39,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=248501.33333333334, ans=0.0 +2024-08-31 17:20:45,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=248501.33333333334, ans=0.07 +2024-08-31 17:20:49,927 INFO [train.py:1114] (1/4) Epoch 19, batch 1800, loss[loss=0.2012, simple_loss=0.2743, pruned_loss=0.04682, ctc_loss=0.08615, over 19617.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2711, pruned_loss=0.04941, ctc_loss=0.09298, over 3853513.81 frames. ], batch size: 55, lr: 7.84e-03, grad_scale: 32.0 +2024-08-31 17:21:00,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=248608.0, ans=0.2 +2024-08-31 17:21:01,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=248608.0, ans=0.2 +2024-08-31 17:21:08,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=248608.0, ans=0.125 +2024-08-31 17:21:36,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248768.0, ans=0.1 +2024-08-31 17:21:39,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=248768.0, ans=0.025 +2024-08-31 17:21:41,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=248768.0, ans=0.125 +2024-08-31 17:21:46,637 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.26 vs. limit=15.0 +2024-08-31 17:21:47,149 INFO [train.py:1114] (1/4) Epoch 19, batch 1850, loss[loss=0.2161, simple_loss=0.2909, pruned_loss=0.05045, ctc_loss=0.1007, over 19581.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2716, pruned_loss=0.04965, ctc_loss=0.0935, over 3856194.78 frames. ], batch size: 57, lr: 7.83e-03, grad_scale: 32.0 +2024-08-31 17:21:56,043 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 1.825e+02 2.203e+02 3.044e+02 4.782e+02, threshold=4.406e+02, percent-clipped=6.0 +2024-08-31 17:22:35,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248981.33333333334, ans=0.1 +2024-08-31 17:22:52,487 INFO [train.py:1114] (1/4) Epoch 19, batch 1900, loss[loss=0.1989, simple_loss=0.2733, pruned_loss=0.04549, ctc_loss=0.08392, over 19643.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2719, pruned_loss=0.04962, ctc_loss=0.09317, over 3860254.22 frames. ], batch size: 59, lr: 7.83e-03, grad_scale: 32.0 +2024-08-31 17:22:53,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=249088.0, ans=0.0 +2024-08-31 17:22:57,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=249088.0, ans=0.0 +2024-08-31 17:23:00,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=249088.0, ans=0.0 +2024-08-31 17:23:30,954 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.53 vs. limit=6.0 +2024-08-31 17:23:36,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=249301.33333333334, ans=0.125 +2024-08-31 17:23:38,300 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.49 vs. limit=22.5 +2024-08-31 17:23:45,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=249301.33333333334, ans=0.125 +2024-08-31 17:23:49,002 INFO [train.py:1114] (1/4) Epoch 19, batch 1950, loss[loss=0.2048, simple_loss=0.269, pruned_loss=0.0522, ctc_loss=0.09071, over 19609.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2732, pruned_loss=0.04963, ctc_loss=0.09343, over 3869695.05 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 32.0 +2024-08-31 17:23:49,481 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.95 vs. limit=6.0 +2024-08-31 17:23:55,357 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.76 vs. limit=6.0 +2024-08-31 17:23:58,747 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.354e+02 1.608e+02 1.802e+02 2.157e+02 4.545e+02, threshold=3.604e+02, percent-clipped=1.0 +2024-08-31 17:24:04,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=249408.0, ans=0.125 +2024-08-31 17:24:50,835 INFO [train.py:1114] (1/4) Epoch 19, batch 2000, loss[loss=0.1755, simple_loss=0.2382, pruned_loss=0.04067, ctc_loss=0.07859, over 19645.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2739, pruned_loss=0.05002, ctc_loss=0.09443, over 3855107.31 frames. ], batch size: 45, lr: 7.82e-03, grad_scale: 32.0 +2024-08-31 17:25:18,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=249728.0, ans=0.125 +2024-08-31 17:25:33,070 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:25:35,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=249834.66666666666, ans=0.2 +2024-08-31 17:25:45,653 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:25:47,800 INFO [train.py:1114] (1/4) Epoch 19, batch 2050, loss[loss=0.1758, simple_loss=0.253, pruned_loss=0.03547, ctc_loss=0.06902, over 19722.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2728, pruned_loss=0.0499, ctc_loss=0.09424, over 3851539.82 frames. ], batch size: 47, lr: 7.82e-03, grad_scale: 32.0 +2024-08-31 17:25:51,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=249888.0, ans=0.125 +2024-08-31 17:25:57,144 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.442e+02 1.719e+02 2.018e+02 2.402e+02 3.677e+02, threshold=4.037e+02, percent-clipped=1.0 +2024-08-31 17:26:02,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=249941.33333333334, ans=0.125 +2024-08-31 17:26:02,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=249941.33333333334, ans=0.125 +2024-08-31 17:26:06,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=249941.33333333334, ans=0.1 +2024-08-31 17:26:15,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249994.66666666666, ans=0.1 +2024-08-31 17:26:24,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=250048.0, ans=0.0 +2024-08-31 17:26:44,677 INFO [train.py:1114] (1/4) Epoch 19, batch 2100, loss[loss=0.195, simple_loss=0.2715, pruned_loss=0.04337, ctc_loss=0.07943, over 19776.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2725, pruned_loss=0.04982, ctc_loss=0.09408, over 3858510.10 frames. ], batch size: 54, lr: 7.81e-03, grad_scale: 32.0 +2024-08-31 17:26:47,767 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.42 vs. limit=15.0 +2024-08-31 17:27:27,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=250314.66666666666, ans=0.125 +2024-08-31 17:27:35,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=250368.0, ans=0.125 +2024-08-31 17:27:38,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=250368.0, ans=0.125 +2024-08-31 17:27:42,496 INFO [train.py:1114] (1/4) Epoch 19, batch 2150, loss[loss=0.1971, simple_loss=0.2655, pruned_loss=0.04612, ctc_loss=0.09097, over 19581.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2719, pruned_loss=0.0497, ctc_loss=0.09375, over 3868401.60 frames. ], batch size: 52, lr: 7.81e-03, grad_scale: 32.0 +2024-08-31 17:27:51,501 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.322e+02 1.672e+02 1.975e+02 2.523e+02 4.782e+02, threshold=3.951e+02, percent-clipped=2.0 +2024-08-31 17:27:58,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=250474.66666666666, ans=0.125 +2024-08-31 17:28:10,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250528.0, ans=0.1 +2024-08-31 17:28:22,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=250581.33333333334, ans=0.07 +2024-08-31 17:28:25,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=250581.33333333334, ans=0.025 +2024-08-31 17:28:39,694 INFO [train.py:1114] (1/4) Epoch 19, batch 2200, loss[loss=0.2226, simple_loss=0.2853, pruned_loss=0.05711, ctc_loss=0.1143, over 19583.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2715, pruned_loss=0.04958, ctc_loss=0.09335, over 3866828.23 frames. ], batch size: 57, lr: 7.80e-03, grad_scale: 32.0 +2024-08-31 17:28:51,949 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.13 vs. limit=22.5 +2024-08-31 17:29:00,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=250741.33333333334, ans=0.04949747468305833 +2024-08-31 17:29:09,123 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.26 vs. limit=15.0 +2024-08-31 17:29:18,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=250848.0, ans=0.0 +2024-08-31 17:29:20,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=250848.0, ans=0.2 +2024-08-31 17:29:25,512 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.40 vs. limit=15.0 +2024-08-31 17:29:38,764 INFO [train.py:1114] (1/4) Epoch 19, batch 2250, loss[loss=0.2205, simple_loss=0.2902, pruned_loss=0.05462, ctc_loss=0.104, over 19610.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2716, pruned_loss=0.04972, ctc_loss=0.09352, over 3867414.95 frames. ], batch size: 55, lr: 7.80e-03, grad_scale: 32.0 +2024-08-31 17:29:47,351 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.680e+02 1.896e+02 2.375e+02 5.292e+02, threshold=3.791e+02, percent-clipped=4.0 +2024-08-31 17:30:19,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=251114.66666666666, ans=0.125 +2024-08-31 17:30:32,286 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:30:38,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=251168.0, ans=0.125 +2024-08-31 17:30:40,044 INFO [train.py:1114] (1/4) Epoch 19, batch 2300, loss[loss=0.1959, simple_loss=0.2594, pruned_loss=0.0482, ctc_loss=0.09006, over 19536.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2707, pruned_loss=0.04956, ctc_loss=0.09322, over 3861612.16 frames. ], batch size: 49, lr: 7.80e-03, grad_scale: 32.0 +2024-08-31 17:31:04,565 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.59 vs. limit=15.0 +2024-08-31 17:31:09,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=251328.0, ans=0.125 +2024-08-31 17:31:19,138 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=5.79 vs. limit=15.0 +2024-08-31 17:31:26,710 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.74 vs. limit=15.0 +2024-08-31 17:31:32,463 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:31:36,393 INFO [train.py:1114] (1/4) Epoch 19, batch 2350, loss[loss=0.2125, simple_loss=0.2791, pruned_loss=0.05307, ctc_loss=0.09944, over 19657.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2707, pruned_loss=0.04951, ctc_loss=0.09307, over 3863892.88 frames. ], batch size: 63, lr: 7.79e-03, grad_scale: 32.0 +2024-08-31 17:31:45,227 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.365e+02 1.718e+02 2.013e+02 2.563e+02 3.706e+02, threshold=4.026e+02, percent-clipped=0.0 +2024-08-31 17:32:00,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=251541.33333333334, ans=0.125 +2024-08-31 17:32:10,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=251594.66666666666, ans=0.0 +2024-08-31 17:32:14,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=251648.0, ans=0.0 +2024-08-31 17:32:14,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=251648.0, ans=0.125 +2024-08-31 17:32:14,885 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.78 vs. limit=15.0 +2024-08-31 17:32:15,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=251648.0, ans=0.025 +2024-08-31 17:32:28,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=251701.33333333334, ans=0.125 +2024-08-31 17:32:36,598 INFO [train.py:1114] (1/4) Epoch 19, batch 2400, loss[loss=0.2215, simple_loss=0.2909, pruned_loss=0.05588, ctc_loss=0.1007, over 19414.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2727, pruned_loss=0.05028, ctc_loss=0.09429, over 3858113.05 frames. ], batch size: 67, lr: 7.79e-03, grad_scale: 32.0 +2024-08-31 17:33:08,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=251861.33333333334, ans=0.2 +2024-08-31 17:33:39,852 INFO [train.py:1114] (1/4) Epoch 19, batch 2450, loss[loss=0.2403, simple_loss=0.2847, pruned_loss=0.07172, ctc_loss=0.1312, over 13285.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2759, pruned_loss=0.05257, ctc_loss=0.09886, over 3731183.79 frames. ], batch size: 141, lr: 7.78e-03, grad_scale: 32.0 +2024-08-31 17:33:42,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252021.33333333334, ans=0.1 +2024-08-31 17:33:48,950 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.471e+02 1.610e+02 1.856e+02 2.081e+02 3.075e+02, threshold=3.711e+02, percent-clipped=0.0 +2024-08-31 17:34:17,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=252181.33333333334, ans=0.0 +2024-08-31 17:34:19,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=252181.33333333334, ans=0.0 +2024-08-31 17:36:18,534 INFO [train.py:1114] (1/4) Epoch 20, batch 0, loss[loss=0.2366, simple_loss=0.2866, pruned_loss=0.06771, ctc_loss=0.128, over 19805.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.2866, pruned_loss=0.06771, ctc_loss=0.128, over 19805.00 frames. ], batch size: 49, lr: 7.58e-03, grad_scale: 32.0 +2024-08-31 17:36:18,535 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-31 17:36:23,463 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.1277, 2.3304, 2.5698, 2.2196], device='cuda:1') +2024-08-31 17:36:28,439 INFO [train.py:1146] (1/4) Epoch 20, validation: loss=0.1834, simple_loss=0.2715, pruned_loss=0.03542, ctc_loss=0.061, over 944034.00 frames. +2024-08-31 17:36:28,440 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 13681MB +2024-08-31 17:36:42,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=252288.0, ans=0.025 +2024-08-31 17:36:44,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=252288.0, ans=0.1 +2024-08-31 17:36:49,687 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.42 vs. limit=22.5 +2024-08-31 17:36:57,602 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.57 vs. limit=22.5 +2024-08-31 17:37:11,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252394.66666666666, ans=0.1 +2024-08-31 17:37:22,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=252448.0, ans=0.0 +2024-08-31 17:37:27,989 INFO [train.py:1114] (1/4) Epoch 20, batch 50, loss[loss=0.1755, simple_loss=0.2421, pruned_loss=0.03919, ctc_loss=0.07613, over 19711.00 frames. ], tot_loss[loss=0.205, simple_loss=0.273, pruned_loss=0.04972, ctc_loss=0.09411, over 845201.66 frames. ], batch size: 47, lr: 7.58e-03, grad_scale: 32.0 +2024-08-31 17:37:43,992 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.64 vs. limit=15.0 +2024-08-31 17:37:51,156 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.696e+02 1.962e+02 2.261e+02 4.473e+02, threshold=3.923e+02, percent-clipped=2.0 +2024-08-31 17:41:27,216 INFO [train.py:1114] (1/4) Epoch 20, batch 100, loss[loss=0.1835, simple_loss=0.249, pruned_loss=0.04294, ctc_loss=0.08039, over 19724.00 frames. ], tot_loss[loss=0.206, simple_loss=0.274, pruned_loss=0.05, ctc_loss=0.09478, over 1498926.28 frames. ], batch size: 51, lr: 7.57e-03, grad_scale: 32.0 +2024-08-31 17:41:30,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=252768.0, ans=0.0 +2024-08-31 17:44:06,481 INFO [train.py:1114] (1/4) Epoch 20, batch 150, loss[loss=0.206, simple_loss=0.2612, pruned_loss=0.05413, ctc_loss=0.1065, over 19725.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2711, pruned_loss=0.04909, ctc_loss=0.09312, over 2028652.43 frames. ], batch size: 47, lr: 7.57e-03, grad_scale: 32.0 +2024-08-31 17:44:59,740 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.301e+02 1.634e+02 1.821e+02 2.194e+02 3.683e+02, threshold=3.641e+02, percent-clipped=0.0 +2024-08-31 17:45:25,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=253141.33333333334, ans=0.125 +2024-08-31 17:45:59,900 INFO [train.py:1114] (1/4) Epoch 20, batch 200, loss[loss=0.2289, simple_loss=0.2934, pruned_loss=0.05984, ctc_loss=0.1116, over 18278.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2707, pruned_loss=0.0493, ctc_loss=0.09329, over 2436406.66 frames. ], batch size: 85, lr: 7.56e-03, grad_scale: 32.0 +2024-08-31 17:46:03,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=253301.33333333334, ans=0.125 +2024-08-31 17:46:07,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=253301.33333333334, ans=0.125 +2024-08-31 17:46:15,769 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.98 vs. limit=15.0 +2024-08-31 17:46:51,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=253408.0, ans=0.1 +2024-08-31 17:47:14,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=253461.33333333334, ans=0.0 +2024-08-31 17:47:17,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=253514.66666666666, ans=0.07 +2024-08-31 17:47:20,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=253514.66666666666, ans=0.025 +2024-08-31 17:47:32,398 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:47:33,345 INFO [train.py:1114] (1/4) Epoch 20, batch 250, loss[loss=0.2025, simple_loss=0.2742, pruned_loss=0.04797, ctc_loss=0.08746, over 19412.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2709, pruned_loss=0.04902, ctc_loss=0.09255, over 2755617.90 frames. ], batch size: 67, lr: 7.56e-03, grad_scale: 32.0 +2024-08-31 17:47:45,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=253568.0, ans=0.0 +2024-08-31 17:47:58,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=253621.33333333334, ans=0.035 +2024-08-31 17:47:59,362 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.745e+02 2.044e+02 2.602e+02 4.259e+02, threshold=4.089e+02, percent-clipped=6.0 +2024-08-31 17:49:24,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253674.66666666666, ans=0.1 +2024-08-31 17:49:51,098 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.76 vs. limit=15.0 +2024-08-31 17:50:00,350 INFO [train.py:1114] (1/4) Epoch 20, batch 300, loss[loss=0.2364, simple_loss=0.2917, pruned_loss=0.06589, ctc_loss=0.1233, over 19501.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2706, pruned_loss=0.04893, ctc_loss=0.09214, over 3001492.53 frames. ], batch size: 61, lr: 7.56e-03, grad_scale: 32.0 +2024-08-31 17:50:08,309 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.00 vs. limit=10.0 +2024-08-31 17:50:47,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=253994.66666666666, ans=0.125 +2024-08-31 17:50:52,944 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.77 vs. limit=15.0 +2024-08-31 17:51:05,501 INFO [train.py:1114] (1/4) Epoch 20, batch 350, loss[loss=0.1822, simple_loss=0.2503, pruned_loss=0.04173, ctc_loss=0.07652, over 19766.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2703, pruned_loss=0.04899, ctc_loss=0.09246, over 3190578.29 frames. ], batch size: 48, lr: 7.55e-03, grad_scale: 32.0 +2024-08-31 17:51:23,946 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.78 vs. limit=12.0 +2024-08-31 17:51:26,966 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.703e+02 1.946e+02 2.321e+02 4.034e+02, threshold=3.891e+02, percent-clipped=0.0 +2024-08-31 17:51:28,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=254208.0, ans=0.5 +2024-08-31 17:52:04,345 INFO [train.py:1114] (1/4) Epoch 20, batch 400, loss[loss=0.1979, simple_loss=0.2727, pruned_loss=0.04474, ctc_loss=0.08406, over 19857.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2694, pruned_loss=0.04826, ctc_loss=0.09112, over 3344185.38 frames. ], batch size: 55, lr: 7.55e-03, grad_scale: 32.0 +2024-08-31 17:52:20,022 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=8.54 vs. limit=15.0 +2024-08-31 17:52:42,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=254474.66666666666, ans=0.125 +2024-08-31 17:53:10,613 INFO [train.py:1114] (1/4) Epoch 20, batch 450, loss[loss=0.1812, simple_loss=0.2639, pruned_loss=0.03557, ctc_loss=0.06817, over 19601.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2697, pruned_loss=0.04833, ctc_loss=0.09118, over 3453379.41 frames. ], batch size: 55, lr: 7.55e-03, grad_scale: 32.0 +2024-08-31 17:53:13,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=254634.66666666666, ans=0.0 +2024-08-31 17:53:19,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=254634.66666666666, ans=0.0 +2024-08-31 17:53:31,702 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.356e+02 1.627e+02 1.777e+02 2.217e+02 3.582e+02, threshold=3.554e+02, percent-clipped=0.0 +2024-08-31 17:53:42,080 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.65 vs. limit=22.5 +2024-08-31 17:53:48,251 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.39 vs. limit=22.5 +2024-08-31 17:53:50,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=254741.33333333334, ans=0.125 +2024-08-31 17:54:15,355 INFO [train.py:1114] (1/4) Epoch 20, batch 500, loss[loss=0.2202, simple_loss=0.2877, pruned_loss=0.05586, ctc_loss=0.1025, over 19698.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2693, pruned_loss=0.04801, ctc_loss=0.09086, over 3548352.56 frames. ], batch size: 63, lr: 7.54e-03, grad_scale: 32.0 +2024-08-31 17:54:45,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=255008.0, ans=0.125 +2024-08-31 17:55:09,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.24 vs. limit=15.0 +2024-08-31 17:55:14,675 INFO [train.py:1114] (1/4) Epoch 20, batch 550, loss[loss=0.21, simple_loss=0.2769, pruned_loss=0.05233, ctc_loss=0.09616, over 19308.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2694, pruned_loss=0.04833, ctc_loss=0.09135, over 3609596.37 frames. ], batch size: 71, lr: 7.54e-03, grad_scale: 32.0 +2024-08-31 17:55:16,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=255168.0, ans=0.2 +2024-08-31 17:55:23,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=255168.0, ans=15.0 +2024-08-31 17:55:24,700 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:55:35,939 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.279e+02 1.640e+02 1.908e+02 2.178e+02 3.229e+02, threshold=3.816e+02, percent-clipped=0.0 +2024-08-31 17:55:50,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=255274.66666666666, ans=0.025 +2024-08-31 17:55:54,524 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.29 vs. limit=10.0 +2024-08-31 17:56:03,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=255328.0, ans=0.0 +2024-08-31 17:56:16,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=255381.33333333334, ans=0.125 +2024-08-31 17:56:22,749 INFO [train.py:1114] (1/4) Epoch 20, batch 600, loss[loss=0.2371, simple_loss=0.2975, pruned_loss=0.06486, ctc_loss=0.1175, over 19349.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2695, pruned_loss=0.04837, ctc_loss=0.09111, over 3666314.13 frames. ], batch size: 67, lr: 7.53e-03, grad_scale: 32.0 +2024-08-31 17:56:25,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=255434.66666666666, ans=0.95 +2024-08-31 17:56:29,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=255434.66666666666, ans=0.125 +2024-08-31 17:56:31,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=255434.66666666666, ans=0.125 +2024-08-31 17:56:33,976 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.51 vs. limit=15.0 +2024-08-31 17:57:06,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=255594.66666666666, ans=0.025 +2024-08-31 17:57:22,374 INFO [train.py:1114] (1/4) Epoch 20, batch 650, loss[loss=0.1935, simple_loss=0.2756, pruned_loss=0.04124, ctc_loss=0.07218, over 19776.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2689, pruned_loss=0.04805, ctc_loss=0.09046, over 3716846.63 frames. ], batch size: 54, lr: 7.53e-03, grad_scale: 32.0 +2024-08-31 17:57:44,323 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.759e+02 2.153e+02 2.838e+02 5.166e+02, threshold=4.306e+02, percent-clipped=8.0 +2024-08-31 17:57:58,069 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.39 vs. limit=15.0 +2024-08-31 17:57:58,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=255861.33333333334, ans=0.125 +2024-08-31 17:58:22,790 INFO [train.py:1114] (1/4) Epoch 20, batch 700, loss[loss=0.1821, simple_loss=0.2522, pruned_loss=0.04178, ctc_loss=0.07107, over 19719.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2696, pruned_loss=0.04832, ctc_loss=0.09107, over 3749179.31 frames. ], batch size: 51, lr: 7.53e-03, grad_scale: 32.0 +2024-08-31 17:59:11,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=256181.33333333334, ans=0.125 +2024-08-31 17:59:24,262 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.37 vs. limit=12.0 +2024-08-31 17:59:24,904 INFO [train.py:1114] (1/4) Epoch 20, batch 750, loss[loss=0.22, simple_loss=0.2891, pruned_loss=0.05513, ctc_loss=0.1018, over 19487.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2686, pruned_loss=0.04769, ctc_loss=0.08984, over 3775736.67 frames. ], batch size: 54, lr: 7.52e-03, grad_scale: 32.0 +2024-08-31 17:59:46,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=256288.0, ans=0.125 +2024-08-31 17:59:57,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=256288.0, ans=10.0 +2024-08-31 17:59:58,588 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.267e+02 1.642e+02 1.855e+02 2.095e+02 3.716e+02, threshold=3.709e+02, percent-clipped=0.0 +2024-08-31 17:59:59,205 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.93 vs. limit=6.0 +2024-08-31 18:00:00,563 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.65 vs. limit=15.0 +2024-08-31 18:00:08,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=256341.33333333334, ans=0.07 +2024-08-31 18:00:22,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=256394.66666666666, ans=0.0 +2024-08-31 18:00:40,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=256448.0, ans=0.125 +2024-08-31 18:00:40,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=256448.0, ans=0.0 +2024-08-31 18:00:41,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=256501.33333333334, ans=0.125 +2024-08-31 18:00:42,941 INFO [train.py:1114] (1/4) Epoch 20, batch 800, loss[loss=0.206, simple_loss=0.2657, pruned_loss=0.05255, ctc_loss=0.1031, over 19810.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2687, pruned_loss=0.04762, ctc_loss=0.08974, over 3797557.31 frames. ], batch size: 49, lr: 7.52e-03, grad_scale: 32.0 +2024-08-31 18:01:00,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=256554.66666666666, ans=0.2 +2024-08-31 18:01:29,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=256714.66666666666, ans=0.125 +2024-08-31 18:01:30,312 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.67 vs. limit=15.0 +2024-08-31 18:01:31,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=256714.66666666666, ans=0.125 +2024-08-31 18:01:38,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=256714.66666666666, ans=0.125 +2024-08-31 18:01:43,070 INFO [train.py:1114] (1/4) Epoch 20, batch 850, loss[loss=0.2041, simple_loss=0.28, pruned_loss=0.04646, ctc_loss=0.08795, over 19661.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2685, pruned_loss=0.04752, ctc_loss=0.08946, over 3816420.74 frames. ], batch size: 59, lr: 7.51e-03, grad_scale: 32.0 +2024-08-31 18:01:54,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=256821.33333333334, ans=0.1 +2024-08-31 18:01:59,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=256821.33333333334, ans=0.125 +2024-08-31 18:02:00,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=256821.33333333334, ans=0.2 +2024-08-31 18:02:05,170 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.672e+02 2.009e+02 2.661e+02 4.692e+02, threshold=4.019e+02, percent-clipped=5.0 +2024-08-31 18:02:41,077 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.89 vs. limit=22.5 +2024-08-31 18:02:42,834 INFO [train.py:1114] (1/4) Epoch 20, batch 900, loss[loss=0.1924, simple_loss=0.2531, pruned_loss=0.04902, ctc_loss=0.08416, over 19408.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2688, pruned_loss=0.04796, ctc_loss=0.0903, over 3819263.87 frames. ], batch size: 48, lr: 7.51e-03, grad_scale: 32.0 +2024-08-31 18:02:49,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=257034.66666666666, ans=0.125 +2024-08-31 18:03:01,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=257088.0, ans=0.125 +2024-08-31 18:03:04,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=257088.0, ans=0.0 +2024-08-31 18:03:05,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=257088.0, ans=0.125 +2024-08-31 18:03:15,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=257141.33333333334, ans=0.125 +2024-08-31 18:03:29,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=257194.66666666666, ans=0.0 +2024-08-31 18:03:29,849 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.33 vs. limit=22.5 +2024-08-31 18:03:39,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257248.0, ans=0.1 +2024-08-31 18:03:45,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=257248.0, ans=10.0 +2024-08-31 18:03:50,708 INFO [train.py:1114] (1/4) Epoch 20, batch 950, loss[loss=0.2093, simple_loss=0.2668, pruned_loss=0.05598, ctc_loss=0.09961, over 19481.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.27, pruned_loss=0.04853, ctc_loss=0.0915, over 3819608.55 frames. ], batch size: 49, lr: 7.51e-03, grad_scale: 32.0 +2024-08-31 18:03:53,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=257301.33333333334, ans=0.125 +2024-08-31 18:04:02,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=257354.66666666666, ans=0.025 +2024-08-31 18:04:08,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=257354.66666666666, ans=15.0 +2024-08-31 18:04:11,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=257354.66666666666, ans=0.125 +2024-08-31 18:04:12,200 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.312e+02 1.674e+02 1.914e+02 2.385e+02 5.476e+02, threshold=3.829e+02, percent-clipped=1.0 +2024-08-31 18:05:02,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=257461.33333333334, ans=0.07 +2024-08-31 18:05:05,279 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.39 vs. limit=6.0 +2024-08-31 18:05:15,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=257514.66666666666, ans=0.09899494936611666 +2024-08-31 18:05:19,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=257514.66666666666, ans=0.05 +2024-08-31 18:05:25,089 INFO [train.py:1114] (1/4) Epoch 20, batch 1000, loss[loss=0.1935, simple_loss=0.2575, pruned_loss=0.04641, ctc_loss=0.0919, over 19840.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2705, pruned_loss=0.04899, ctc_loss=0.0923, over 3816334.17 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 32.0 +2024-08-31 18:05:29,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=257568.0, ans=0.025 +2024-08-31 18:12:01,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=257781.33333333334, ans=0.125 +2024-08-31 18:12:15,984 INFO [train.py:1114] (1/4) Epoch 20, batch 1050, loss[loss=0.2002, simple_loss=0.2743, pruned_loss=0.04536, ctc_loss=0.08874, over 19825.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2691, pruned_loss=0.04835, ctc_loss=0.09128, over 3821748.40 frames. ], batch size: 57, lr: 7.50e-03, grad_scale: 32.0 +2024-08-31 18:12:22,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=257834.66666666666, ans=0.125 +2024-08-31 18:12:34,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=257888.0, ans=0.125 +2024-08-31 18:12:36,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=257888.0, ans=0.04949747468305833 +2024-08-31 18:12:37,419 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.683e+02 1.941e+02 2.234e+02 3.103e+02, threshold=3.882e+02, percent-clipped=0.0 +2024-08-31 18:13:09,818 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.34 vs. limit=6.0 +2024-08-31 18:13:20,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=258048.0, ans=0.125 +2024-08-31 18:13:25,869 INFO [train.py:1114] (1/4) Epoch 20, batch 1100, loss[loss=0.2061, simple_loss=0.2734, pruned_loss=0.05073, ctc_loss=0.09322, over 19589.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2692, pruned_loss=0.04823, ctc_loss=0.09106, over 3829696.72 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 32.0 +2024-08-31 18:13:26,574 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.58 vs. limit=6.0 +2024-08-31 18:13:28,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=258101.33333333334, ans=0.0 +2024-08-31 18:14:20,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=258314.66666666666, ans=0.0 +2024-08-31 18:14:26,123 INFO [train.py:1114] (1/4) Epoch 20, batch 1150, loss[loss=0.1871, simple_loss=0.2615, pruned_loss=0.04179, ctc_loss=0.07271, over 19589.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2693, pruned_loss=0.04832, ctc_loss=0.09137, over 3829160.70 frames. ], batch size: 52, lr: 7.49e-03, grad_scale: 32.0 +2024-08-31 18:15:12,224 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.349e+02 1.657e+02 1.937e+02 2.398e+02 3.976e+02, threshold=3.875e+02, percent-clipped=1.0 +2024-08-31 18:15:19,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=258474.66666666666, ans=0.025 +2024-08-31 18:15:25,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=258528.0, ans=0.125 +2024-08-31 18:15:32,897 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.11 vs. limit=10.0 +2024-08-31 18:15:33,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258528.0, ans=0.1 +2024-08-31 18:15:51,942 INFO [train.py:1114] (1/4) Epoch 20, batch 1200, loss[loss=0.2099, simple_loss=0.2798, pruned_loss=0.05086, ctc_loss=0.09572, over 19848.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2698, pruned_loss=0.04847, ctc_loss=0.09168, over 3824397.58 frames. ], batch size: 57, lr: 7.49e-03, grad_scale: 32.0 +2024-08-31 18:15:52,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=258634.66666666666, ans=0.125 +2024-08-31 18:16:17,081 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:16:35,317 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.88 vs. limit=15.0 +2024-08-31 18:16:54,797 INFO [train.py:1114] (1/4) Epoch 20, batch 1250, loss[loss=0.217, simple_loss=0.2817, pruned_loss=0.05575, ctc_loss=0.1023, over 19518.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2696, pruned_loss=0.048, ctc_loss=0.09059, over 3842508.95 frames. ], batch size: 61, lr: 7.48e-03, grad_scale: 32.0 +2024-08-31 18:17:05,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=258954.66666666666, ans=0.125 +2024-08-31 18:17:06,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=258954.66666666666, ans=0.125 +2024-08-31 18:17:20,828 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.340e+02 1.673e+02 1.864e+02 2.243e+02 4.460e+02, threshold=3.727e+02, percent-clipped=1.0 +2024-08-31 18:17:37,032 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.11 vs. limit=10.0 +2024-08-31 18:17:50,702 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.05 vs. limit=12.0 +2024-08-31 18:17:55,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.43 vs. limit=6.0 +2024-08-31 18:18:00,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=259114.66666666666, ans=0.0 +2024-08-31 18:18:05,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=259114.66666666666, ans=0.125 +2024-08-31 18:19:05,809 INFO [train.py:1114] (1/4) Epoch 20, batch 1300, loss[loss=0.2345, simple_loss=0.2982, pruned_loss=0.06082, ctc_loss=0.123, over 18844.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2694, pruned_loss=0.04789, ctc_loss=0.09062, over 3846249.38 frames. ], batch size: 76, lr: 7.48e-03, grad_scale: 32.0 +2024-08-31 18:19:11,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=259168.0, ans=0.125 +2024-08-31 18:19:12,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=259168.0, ans=0.125 +2024-08-31 18:20:12,172 INFO [train.py:1114] (1/4) Epoch 20, batch 1350, loss[loss=0.1949, simple_loss=0.2675, pruned_loss=0.04289, ctc_loss=0.09153, over 19790.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2696, pruned_loss=0.04777, ctc_loss=0.09023, over 3857281.69 frames. ], batch size: 54, lr: 7.48e-03, grad_scale: 32.0 +2024-08-31 18:20:13,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=259434.66666666666, ans=0.125 +2024-08-31 18:20:38,783 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.436e+02 1.677e+02 1.917e+02 2.382e+02 4.193e+02, threshold=3.834e+02, percent-clipped=5.0 +2024-08-31 18:20:41,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=259541.33333333334, ans=0.125 +2024-08-31 18:21:16,779 INFO [train.py:1114] (1/4) Epoch 20, batch 1400, loss[loss=0.1594, simple_loss=0.2303, pruned_loss=0.03243, ctc_loss=0.05891, over 19662.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2694, pruned_loss=0.04778, ctc_loss=0.09024, over 3864220.63 frames. ], batch size: 46, lr: 7.47e-03, grad_scale: 32.0 +2024-08-31 18:21:19,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259701.33333333334, ans=0.1 +2024-08-31 18:21:26,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=259701.33333333334, ans=0.0 +2024-08-31 18:21:42,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=259754.66666666666, ans=0.0 +2024-08-31 18:22:00,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259861.33333333334, ans=0.1 +2024-08-31 18:22:13,309 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.35 vs. limit=15.0 +2024-08-31 18:22:30,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=259914.66666666666, ans=0.0 +2024-08-31 18:22:31,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259914.66666666666, ans=0.1 +2024-08-31 18:22:33,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=259914.66666666666, ans=0.0 +2024-08-31 18:22:53,605 INFO [train.py:1114] (1/4) Epoch 20, batch 1450, loss[loss=0.214, simple_loss=0.282, pruned_loss=0.05303, ctc_loss=0.1002, over 19660.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2699, pruned_loss=0.04782, ctc_loss=0.09022, over 3862364.72 frames. ], batch size: 63, lr: 7.47e-03, grad_scale: 32.0 +2024-08-31 18:23:11,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=260021.33333333334, ans=0.125 +2024-08-31 18:23:13,278 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.88 vs. limit=15.0 +2024-08-31 18:23:14,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=260021.33333333334, ans=0.125 +2024-08-31 18:23:17,246 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.407e+02 1.776e+02 2.029e+02 2.458e+02 5.712e+02, threshold=4.059e+02, percent-clipped=1.0 +2024-08-31 18:23:19,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=260074.66666666666, ans=0.125 +2024-08-31 18:23:27,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=260074.66666666666, ans=0.125 +2024-08-31 18:23:34,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=260128.0, ans=0.125 +2024-08-31 18:23:41,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=260181.33333333334, ans=0.125 +2024-08-31 18:23:42,986 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.12 vs. limit=15.0 +2024-08-31 18:23:49,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260181.33333333334, ans=0.1 +2024-08-31 18:23:50,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260181.33333333334, ans=0.1 +2024-08-31 18:23:54,007 INFO [train.py:1114] (1/4) Epoch 20, batch 1500, loss[loss=0.1961, simple_loss=0.2718, pruned_loss=0.04438, ctc_loss=0.07923, over 19567.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.27, pruned_loss=0.04776, ctc_loss=0.08992, over 3862278.54 frames. ], batch size: 57, lr: 7.47e-03, grad_scale: 32.0 +2024-08-31 18:24:08,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260288.0, ans=0.1 +2024-08-31 18:24:18,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=260341.33333333334, ans=0.0 +2024-08-31 18:24:26,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=260341.33333333334, ans=0.125 +2024-08-31 18:24:31,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=260394.66666666666, ans=0.125 +2024-08-31 18:25:34,748 INFO [train.py:1114] (1/4) Epoch 20, batch 1550, loss[loss=0.2083, simple_loss=0.28, pruned_loss=0.04975, ctc_loss=0.09263, over 19615.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2698, pruned_loss=0.0479, ctc_loss=0.0903, over 3845948.64 frames. ], batch size: 60, lr: 7.46e-03, grad_scale: 32.0 +2024-08-31 18:26:01,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=260501.33333333334, ans=0.125 +2024-08-31 18:26:05,544 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.514e-03 +2024-08-31 18:26:23,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=260554.66666666666, ans=0.04949747468305833 +2024-08-31 18:26:25,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=260554.66666666666, ans=0.125 +2024-08-31 18:26:33,206 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.367e+02 1.748e+02 2.049e+02 2.466e+02 3.855e+02, threshold=4.097e+02, percent-clipped=0.0 +2024-08-31 18:27:11,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=260714.66666666666, ans=0.125 +2024-08-31 18:27:11,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=260714.66666666666, ans=0.125 +2024-08-31 18:27:12,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=260714.66666666666, ans=0.125 +2024-08-31 18:27:16,001 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.47 vs. limit=12.0 +2024-08-31 18:27:18,529 INFO [train.py:1114] (1/4) Epoch 20, batch 1600, loss[loss=0.1904, simple_loss=0.2684, pruned_loss=0.04097, ctc_loss=0.07613, over 19846.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2693, pruned_loss=0.04773, ctc_loss=0.09007, over 3836160.04 frames. ], batch size: 57, lr: 7.46e-03, grad_scale: 32.0 +2024-08-31 18:27:35,804 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.03 vs. limit=12.0 +2024-08-31 18:27:46,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=260874.66666666666, ans=0.05 +2024-08-31 18:28:30,411 INFO [train.py:1114] (1/4) Epoch 20, batch 1650, loss[loss=0.1867, simple_loss=0.2665, pruned_loss=0.0381, ctc_loss=0.07678, over 19659.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2692, pruned_loss=0.0477, ctc_loss=0.09008, over 3834174.80 frames. ], batch size: 59, lr: 7.45e-03, grad_scale: 32.0 +2024-08-31 18:28:31,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=261034.66666666666, ans=0.0 +2024-08-31 18:28:53,165 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.350e+02 1.719e+02 2.026e+02 2.553e+02 4.958e+02, threshold=4.052e+02, percent-clipped=3.0 +2024-08-31 18:29:18,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=261248.0, ans=0.2 +2024-08-31 18:29:29,547 INFO [train.py:1114] (1/4) Epoch 20, batch 1700, loss[loss=0.1806, simple_loss=0.2416, pruned_loss=0.04409, ctc_loss=0.07844, over 19673.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.269, pruned_loss=0.0474, ctc_loss=0.0894, over 3848230.73 frames. ], batch size: 46, lr: 7.45e-03, grad_scale: 32.0 +2024-08-31 18:29:39,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=261301.33333333334, ans=0.125 +2024-08-31 18:30:04,960 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.14 vs. limit=12.0 +2024-08-31 18:30:05,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=261461.33333333334, ans=0.0 +2024-08-31 18:30:06,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=261461.33333333334, ans=0.125 +2024-08-31 18:30:12,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=261461.33333333334, ans=0.1 +2024-08-31 18:30:13,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=261461.33333333334, ans=0.2 +2024-08-31 18:30:16,981 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.88 vs. limit=12.0 +2024-08-31 18:31:18,040 INFO [train.py:1114] (1/4) Epoch 20, batch 1750, loss[loss=0.1609, simple_loss=0.2333, pruned_loss=0.03291, ctc_loss=0.05685, over 19648.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2689, pruned_loss=0.04732, ctc_loss=0.08926, over 3852510.28 frames. ], batch size: 45, lr: 7.45e-03, grad_scale: 32.0 +2024-08-31 18:31:22,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=261568.0, ans=0.05 +2024-08-31 18:31:29,909 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:31:39,999 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.679e+02 1.951e+02 2.329e+02 4.159e+02, threshold=3.901e+02, percent-clipped=0.0 +2024-08-31 18:31:41,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=261674.66666666666, ans=0.2 +2024-08-31 18:31:41,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=261674.66666666666, ans=0.125 +2024-08-31 18:31:49,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=261674.66666666666, ans=0.0 +2024-08-31 18:32:15,183 INFO [train.py:1114] (1/4) Epoch 20, batch 1800, loss[loss=0.1972, simple_loss=0.2709, pruned_loss=0.04465, ctc_loss=0.0854, over 19620.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2694, pruned_loss=0.04754, ctc_loss=0.08965, over 3854196.06 frames. ], batch size: 55, lr: 7.44e-03, grad_scale: 32.0 +2024-08-31 18:32:22,160 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.59 vs. limit=15.0 +2024-08-31 18:32:22,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=261834.66666666666, ans=0.125 +2024-08-31 18:33:06,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=261941.33333333334, ans=0.0 +2024-08-31 18:33:10,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=261994.66666666666, ans=0.2 +2024-08-31 18:33:12,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=261994.66666666666, ans=0.125 +2024-08-31 18:33:27,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=262048.0, ans=0.0 +2024-08-31 18:33:34,488 INFO [train.py:1114] (1/4) Epoch 20, batch 1850, loss[loss=0.2338, simple_loss=0.2986, pruned_loss=0.06229, ctc_loss=0.1111, over 19588.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2688, pruned_loss=0.04733, ctc_loss=0.08915, over 3857282.87 frames. ], batch size: 57, lr: 7.44e-03, grad_scale: 32.0 +2024-08-31 18:33:36,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=262101.33333333334, ans=0.0 +2024-08-31 18:33:56,010 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.842e+02 2.206e+02 3.038e+02 4.306e+02, threshold=4.411e+02, percent-clipped=5.0 +2024-08-31 18:34:06,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=262208.0, ans=0.125 +2024-08-31 18:34:19,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=262261.3333333333, ans=0.0 +2024-08-31 18:34:28,906 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.21 vs. limit=15.0 +2024-08-31 18:34:36,265 INFO [train.py:1114] (1/4) Epoch 20, batch 1900, loss[loss=0.2069, simple_loss=0.2819, pruned_loss=0.04726, ctc_loss=0.09333, over 19660.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2698, pruned_loss=0.0479, ctc_loss=0.09013, over 3861469.85 frames. ], batch size: 59, lr: 7.44e-03, grad_scale: 32.0 +2024-08-31 18:34:50,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=262421.3333333333, ans=0.0 +2024-08-31 18:34:52,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262421.3333333333, ans=0.1 +2024-08-31 18:35:08,227 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.51 vs. limit=15.0 +2024-08-31 18:35:11,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=262528.0, ans=0.125 +2024-08-31 18:35:12,544 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.40 vs. limit=12.0 +2024-08-31 18:35:13,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=262528.0, ans=0.2 +2024-08-31 18:35:34,439 INFO [train.py:1114] (1/4) Epoch 20, batch 1950, loss[loss=0.1971, simple_loss=0.262, pruned_loss=0.04786, ctc_loss=0.09118, over 19589.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2708, pruned_loss=0.04826, ctc_loss=0.09073, over 3870567.74 frames. ], batch size: 52, lr: 7.43e-03, grad_scale: 32.0 +2024-08-31 18:35:39,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=262634.6666666667, ans=0.125 +2024-08-31 18:35:55,636 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.650e+02 1.780e+02 2.101e+02 3.496e+02, threshold=3.560e+02, percent-clipped=0.0 +2024-08-31 18:35:58,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=262741.3333333333, ans=0.0 +2024-08-31 18:36:14,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=262794.6666666667, ans=0.2 +2024-08-31 18:36:15,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=262794.6666666667, ans=0.125 +2024-08-31 18:36:16,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=262794.6666666667, ans=10.0 +2024-08-31 18:36:23,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=262848.0, ans=0.125 +2024-08-31 18:36:28,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262848.0, ans=0.1 +2024-08-31 18:36:31,287 INFO [train.py:1114] (1/4) Epoch 20, batch 2000, loss[loss=0.1995, simple_loss=0.2528, pruned_loss=0.053, ctc_loss=0.1006, over 19661.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2717, pruned_loss=0.04878, ctc_loss=0.09184, over 3854852.83 frames. ], batch size: 45, lr: 7.43e-03, grad_scale: 32.0 +2024-08-31 18:36:41,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=262954.6666666667, ans=0.2 +2024-08-31 18:36:50,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=262954.6666666667, ans=0.125 +2024-08-31 18:37:14,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=263061.3333333333, ans=0.0 +2024-08-31 18:37:17,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=263061.3333333333, ans=0.125 +2024-08-31 18:37:32,660 INFO [train.py:1114] (1/4) Epoch 20, batch 2050, loss[loss=0.1951, simple_loss=0.2561, pruned_loss=0.04843, ctc_loss=0.09314, over 19690.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2711, pruned_loss=0.04877, ctc_loss=0.09182, over 3849839.36 frames. ], batch size: 47, lr: 7.42e-03, grad_scale: 32.0 +2024-08-31 18:38:01,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=263221.3333333333, ans=0.125 +2024-08-31 18:38:02,083 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.334e+02 1.724e+02 2.041e+02 2.585e+02 3.821e+02, threshold=4.082e+02, percent-clipped=5.0 +2024-08-31 18:38:19,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=263328.0, ans=0.2 +2024-08-31 18:38:36,471 INFO [train.py:1114] (1/4) Epoch 20, batch 2100, loss[loss=0.1913, simple_loss=0.2678, pruned_loss=0.04181, ctc_loss=0.07799, over 19757.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2707, pruned_loss=0.04849, ctc_loss=0.09145, over 3857655.54 frames. ], batch size: 54, lr: 7.42e-03, grad_scale: 32.0 +2024-08-31 18:38:49,583 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.62 vs. limit=22.5 +2024-08-31 18:39:22,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=263648.0, ans=0.0 +2024-08-31 18:39:25,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263648.0, ans=0.1 +2024-08-31 18:39:32,922 INFO [train.py:1114] (1/4) Epoch 20, batch 2150, loss[loss=0.1795, simple_loss=0.2545, pruned_loss=0.03743, ctc_loss=0.07416, over 19590.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2699, pruned_loss=0.04844, ctc_loss=0.09121, over 3868423.44 frames. ], batch size: 52, lr: 7.42e-03, grad_scale: 32.0 +2024-08-31 18:39:37,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=263701.3333333333, ans=0.125 +2024-08-31 18:39:46,819 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.37 vs. limit=15.0 +2024-08-31 18:39:51,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=263754.6666666667, ans=0.1 +2024-08-31 18:39:58,523 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.627e+02 1.896e+02 2.393e+02 5.058e+02, threshold=3.792e+02, percent-clipped=5.0 +2024-08-31 18:40:00,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263808.0, ans=0.1 +2024-08-31 18:40:02,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=263808.0, ans=0.0 +2024-08-31 18:40:25,418 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.20 vs. limit=15.0 +2024-08-31 18:40:25,676 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.02 vs. limit=12.0 +2024-08-31 18:40:32,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.06 vs. limit=22.5 +2024-08-31 18:40:33,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=263914.6666666667, ans=0.0 +2024-08-31 18:40:33,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.90 vs. limit=15.0 +2024-08-31 18:41:09,871 INFO [train.py:1114] (1/4) Epoch 20, batch 2200, loss[loss=0.1907, simple_loss=0.2671, pruned_loss=0.04243, ctc_loss=0.0735, over 19583.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2693, pruned_loss=0.048, ctc_loss=0.09043, over 3866680.19 frames. ], batch size: 57, lr: 7.41e-03, grad_scale: 32.0 +2024-08-31 18:41:13,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=263968.0, ans=0.125 +2024-08-31 18:41:49,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=264074.6666666667, ans=0.125 +2024-08-31 18:41:55,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=264128.0, ans=0.2 +2024-08-31 18:42:03,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=264128.0, ans=0.125 +2024-08-31 18:42:17,184 INFO [train.py:1114] (1/4) Epoch 20, batch 2250, loss[loss=0.2108, simple_loss=0.2838, pruned_loss=0.0501, ctc_loss=0.09403, over 19619.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2699, pruned_loss=0.04822, ctc_loss=0.09092, over 3866736.95 frames. ], batch size: 55, lr: 7.41e-03, grad_scale: 32.0 +2024-08-31 18:42:41,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=264288.0, ans=0.125 +2024-08-31 18:42:42,066 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.705e+02 2.149e+02 2.747e+02 5.291e+02, threshold=4.298e+02, percent-clipped=7.0 +2024-08-31 18:42:43,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.36 vs. limit=22.5 +2024-08-31 18:43:04,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=264448.0, ans=0.0 +2024-08-31 18:43:12,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.78 vs. limit=15.0 +2024-08-31 18:43:12,664 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.22 vs. limit=15.0 +2024-08-31 18:43:12,731 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.01 vs. limit=15.0 +2024-08-31 18:43:13,127 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.62 vs. limit=15.0 +2024-08-31 18:43:16,695 INFO [train.py:1114] (1/4) Epoch 20, batch 2300, loss[loss=0.1928, simple_loss=0.2551, pruned_loss=0.04794, ctc_loss=0.08668, over 19515.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2692, pruned_loss=0.04819, ctc_loss=0.09093, over 3860575.75 frames. ], batch size: 49, lr: 7.41e-03, grad_scale: 32.0 +2024-08-31 18:43:24,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=264501.3333333333, ans=0.125 +2024-08-31 18:43:37,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=264554.6666666667, ans=0.2 +2024-08-31 18:43:44,139 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.64 vs. limit=15.0 +2024-08-31 18:43:46,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=264608.0, ans=0.2 +2024-08-31 18:43:49,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=264608.0, ans=0.0 +2024-08-31 18:43:49,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=264608.0, ans=0.2 +2024-08-31 18:43:51,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=264661.3333333333, ans=0.0 +2024-08-31 18:44:12,820 INFO [train.py:1114] (1/4) Epoch 20, batch 2350, loss[loss=0.2139, simple_loss=0.2799, pruned_loss=0.05489, ctc_loss=0.09521, over 19677.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2686, pruned_loss=0.04789, ctc_loss=0.09009, over 3863388.31 frames. ], batch size: 63, lr: 7.40e-03, grad_scale: 32.0 +2024-08-31 18:44:15,379 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.70 vs. limit=22.5 +2024-08-31 18:44:18,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=264768.0, ans=0.2 +2024-08-31 18:44:49,425 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.185e+02 1.669e+02 1.905e+02 2.325e+02 3.822e+02, threshold=3.811e+02, percent-clipped=0.0 +2024-08-31 18:44:55,895 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.22 vs. limit=6.0 +2024-08-31 18:45:26,864 INFO [train.py:1114] (1/4) Epoch 20, batch 2400, loss[loss=0.2161, simple_loss=0.2809, pruned_loss=0.05454, ctc_loss=0.1053, over 19227.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2706, pruned_loss=0.04871, ctc_loss=0.0915, over 3857894.04 frames. ], batch size: 71, lr: 7.40e-03, grad_scale: 32.0 +2024-08-31 18:45:27,560 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.01 vs. limit=15.0 +2024-08-31 18:45:33,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=265034.6666666667, ans=0.0 +2024-08-31 18:45:42,217 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.83 vs. limit=15.0 +2024-08-31 18:45:47,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=265088.0, ans=0.05 +2024-08-31 18:46:15,484 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=12.08 vs. limit=15.0 +2024-08-31 18:46:23,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=265301.3333333333, ans=0.125 +2024-08-31 18:46:23,884 INFO [train.py:1114] (1/4) Epoch 20, batch 2450, loss[loss=0.2325, simple_loss=0.2872, pruned_loss=0.06531, ctc_loss=0.1181, over 13318.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2739, pruned_loss=0.05106, ctc_loss=0.09615, over 3731381.34 frames. ], batch size: 140, lr: 7.40e-03, grad_scale: 32.0 +2024-08-31 18:46:30,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=265301.3333333333, ans=0.125 +2024-08-31 18:46:43,068 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.86 vs. limit=15.0 +2024-08-31 18:46:45,877 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.336e+02 1.663e+02 1.874e+02 2.086e+02 3.013e+02, threshold=3.749e+02, percent-clipped=0.0 +2024-08-31 18:47:04,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=265461.3333333333, ans=0.1 +2024-08-31 18:47:07,639 INFO [train.py:1387] (1/4) Done! diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-31-13-15-01-2 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-31-13-15-01-2 new file mode 100644 index 0000000000000000000000000000000000000000..7dcef8de004a2bbe2d96dd7f78a58440951a1015 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-31-13-15-01-2 @@ -0,0 +1,1097 @@ +2024-08-31 13:15:01,244 INFO [train.py:1182] (2/4) Training started +2024-08-31 13:15:02,665 INFO [train.py:1192] (2/4) Device: cuda:2 +2024-08-31 13:15:02,669 INFO [train.py:1210] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2535.int.cedar.computecanada.ca', 'IP address': '172.16.145.228'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 18, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-31 13:15:02,669 INFO [train.py:1212] (2/4) About to create model +2024-08-31 13:15:10,388 INFO [train.py:1216] (2/4) Number of model parameters: 66367431 +2024-08-31 13:15:10,438 INFO [checkpoint.py:112] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-17.pt +2024-08-31 13:16:01,722 INFO [train.py:1231] (2/4) Using DDP +2024-08-31 13:16:07,014 INFO [train.py:1243] (2/4) Loading optimizer state dict +2024-08-31 13:16:07,207 INFO [train.py:1251] (2/4) Loading scheduler state dict +2024-08-31 13:16:07,208 INFO [asr_datamodule.py:894] (2/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-31 13:16:07,275 INFO [asr_datamodule.py:696] (2/4) Disable MUSAN +2024-08-31 13:16:07,276 INFO [asr_datamodule.py:714] (2/4) Enable SpecAugment +2024-08-31 13:16:07,276 INFO [asr_datamodule.py:715] (2/4) Time warp factor: 80 +2024-08-31 13:16:07,276 INFO [asr_datamodule.py:725] (2/4) Num frame mask: 10 +2024-08-31 13:16:07,276 INFO [asr_datamodule.py:738] (2/4) About to create train dataset +2024-08-31 13:16:07,276 INFO [asr_datamodule.py:765] (2/4) Using DynamicBucketingSampler. +2024-08-31 13:16:08,877 INFO [asr_datamodule.py:782] (2/4) About to create train dataloader +2024-08-31 13:16:08,883 INFO [asr_datamodule.py:911] (2/4) About to get dev-clean cuts +2024-08-31 13:16:09,671 INFO [asr_datamodule.py:918] (2/4) About to get dev-other cuts +2024-08-31 13:16:10,141 INFO [asr_datamodule.py:814] (2/4) About to create dev dataset +2024-08-31 13:16:10,465 INFO [asr_datamodule.py:831] (2/4) About to create dev dataloader +2024-08-31 13:16:10,466 INFO [train.py:1435] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 13:22:43,896 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12782MB +2024-08-31 13:22:45,384 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12849MB +2024-08-31 13:23:02,017 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 13069MB +2024-08-31 13:23:03,248 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=256, metric=8.54 vs. limit=7.5 +2024-08-31 13:23:03,530 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 13069MB +2024-08-31 13:24:12,098 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 13069MB +2024-08-31 13:24:13,687 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 13069MB +2024-08-31 13:24:13,707 INFO [train.py:1344] (2/4) Loading grad scaler state dict +2024-08-31 13:25:06,943 INFO [train.py:1114] (2/4) Epoch 18, batch 0, loss[loss=0.1826, simple_loss=0.2456, pruned_loss=0.04306, ctc_loss=0.08405, over 19432.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2456, pruned_loss=0.04306, ctc_loss=0.08405, over 19432.00 frames. ], batch size: 48, lr: 8.44e-03, grad_scale: 32.0 +2024-08-31 13:25:06,944 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-31 13:25:31,865 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([1.3674, 1.1105, 1.6340, 0.7188, 1.6513, 1.7595, 1.8532, 1.6009], + device='cuda:2') +2024-08-31 13:25:49,908 INFO [train.py:1146] (2/4) Epoch 18, validation: loss=0.1864, simple_loss=0.2743, pruned_loss=0.03646, ctc_loss=0.06397, over 944034.00 frames. +2024-08-31 13:25:49,909 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 13069MB +2024-08-31 13:25:51,829 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.87 vs. limit=22.5 +2024-08-31 13:27:19,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=225680.0, ans=0.0 +2024-08-31 13:32:33,653 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.39 vs. limit=15.0 +2024-08-31 13:48:13,677 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.468e+02 1.934e+02 2.118e+02 2.433e+02 6.228e+02, threshold=4.237e+02, percent-clipped=5.0 +2024-08-31 13:56:45,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225946.66666666666, ans=0.1 +2024-08-31 13:56:46,289 INFO [train.py:1114] (2/4) Epoch 18, batch 50, loss[loss=0.197, simple_loss=0.2517, pruned_loss=0.05073, ctc_loss=0.1018, over 19736.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.277, pruned_loss=0.05294, ctc_loss=0.1004, over 844774.48 frames. ], batch size: 47, lr: 8.44e-03, grad_scale: 32.0 +2024-08-31 13:57:26,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=225946.66666666666, ans=0.025 +2024-08-31 13:57:32,050 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.94 vs. limit=15.0 +2024-08-31 14:00:23,349 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.26 vs. limit=15.0 +2024-08-31 14:00:59,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=226053.33333333334, ans=0.0 +2024-08-31 14:09:11,734 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.26 vs. limit=6.0 +2024-08-31 14:11:29,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=226160.0, ans=0.0 +2024-08-31 14:11:30,626 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 14:13:36,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=226160.0, ans=0.125 +2024-08-31 14:13:38,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=226160.0, ans=0.0 +2024-08-31 14:13:50,103 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.48 vs. limit=22.5 +2024-08-31 14:15:00,241 INFO [train.py:1114] (2/4) Epoch 18, batch 100, loss[loss=0.1836, simple_loss=0.2584, pruned_loss=0.03984, ctc_loss=0.07282, over 19708.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2786, pruned_loss=0.05329, ctc_loss=0.09989, over 1499238.30 frames. ], batch size: 51, lr: 8.43e-03, grad_scale: 32.0 +2024-08-31 14:15:17,141 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.12 vs. limit=15.0 +2024-08-31 14:15:17,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=226213.33333333334, ans=15.0 +2024-08-31 14:16:55,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=226213.33333333334, ans=0.2 +2024-08-31 14:25:28,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=226320.0, ans=0.125 +2024-08-31 14:28:22,027 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.87 vs. limit=15.0 +2024-08-31 14:29:05,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226373.33333333334, ans=0.1 +2024-08-31 14:30:18,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=226373.33333333334, ans=0.1 +2024-08-31 14:31:33,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=226426.66666666666, ans=0.0 +2024-08-31 14:32:50,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=226426.66666666666, ans=0.0 +2024-08-31 14:32:51,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.29 vs. limit=15.0 +2024-08-31 14:32:51,605 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.685e+02 1.949e+02 2.332e+02 3.525e+02, threshold=3.898e+02, percent-clipped=0.0 +2024-08-31 14:34:38,790 INFO [train.py:1114] (2/4) Epoch 18, batch 150, loss[loss=0.1868, simple_loss=0.2539, pruned_loss=0.04315, ctc_loss=0.08347, over 19719.00 frames. ], tot_loss[loss=0.208, simple_loss=0.275, pruned_loss=0.05127, ctc_loss=0.09632, over 2028180.04 frames. ], batch size: 47, lr: 8.43e-03, grad_scale: 32.0 +2024-08-31 14:45:09,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=226533.33333333334, ans=0.0 +2024-08-31 14:45:55,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=226533.33333333334, ans=0.0 +2024-08-31 14:45:55,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.97 vs. limit=15.0 +2024-08-31 14:49:44,034 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.49 vs. limit=12.0 +2024-08-31 14:55:51,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=226640.0, ans=0.2 +2024-08-31 15:01:41,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=226693.33333333334, ans=0.125 +2024-08-31 15:03:26,245 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=5.92 vs. limit=15.0 +2024-08-31 15:05:15,420 INFO [train.py:1114] (2/4) Epoch 18, batch 200, loss[loss=0.2233, simple_loss=0.2852, pruned_loss=0.05773, ctc_loss=0.1151, over 18272.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2739, pruned_loss=0.05108, ctc_loss=0.09591, over 2435566.49 frames. ], batch size: 85, lr: 8.42e-03, grad_scale: 32.0 +2024-08-31 15:07:59,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=226800.0, ans=0.025 +2024-08-31 15:07:59,521 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.60 vs. limit=10.0 +2024-08-31 15:10:48,589 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.34 vs. limit=6.0 +2024-08-31 15:15:18,879 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.94 vs. limit=22.5 +2024-08-31 15:16:13,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=226906.66666666666, ans=0.125 +2024-08-31 15:16:14,330 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.28 vs. limit=15.0 +2024-08-31 15:17:11,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=226960.0, ans=0.125 +2024-08-31 15:17:44,784 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.325e+02 1.761e+02 2.086e+02 2.524e+02 4.159e+02, threshold=4.172e+02, percent-clipped=2.0 +2024-08-31 15:17:59,747 INFO [train.py:1114] (2/4) Epoch 18, batch 250, loss[loss=0.2549, simple_loss=0.3142, pruned_loss=0.07141, ctc_loss=0.1318, over 19347.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2742, pruned_loss=0.05145, ctc_loss=0.09669, over 2754676.87 frames. ], batch size: 67, lr: 8.42e-03, grad_scale: 32.0 +2024-08-31 15:19:14,674 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.19 vs. limit=10.0 +2024-08-31 15:21:17,326 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.54 vs. limit=22.5 +2024-08-31 15:22:47,238 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.85 vs. limit=15.0 +2024-08-31 15:22:58,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=227173.33333333334, ans=0.0 +2024-08-31 15:22:58,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=227173.33333333334, ans=0.125 +2024-08-31 15:24:04,330 INFO [train.py:1114] (2/4) Epoch 18, batch 300, loss[loss=0.2481, simple_loss=0.2983, pruned_loss=0.07267, ctc_loss=0.1314, over 19550.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.274, pruned_loss=0.05152, ctc_loss=0.09687, over 3000148.78 frames. ], batch size: 61, lr: 8.41e-03, grad_scale: 32.0 +2024-08-31 15:30:28,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=227493.33333333334, ans=0.125 +2024-08-31 15:30:47,329 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.305e+02 1.680e+02 1.932e+02 2.386e+02 3.920e+02, threshold=3.864e+02, percent-clipped=0.0 +2024-08-31 15:31:47,643 INFO [train.py:1114] (2/4) Epoch 18, batch 350, loss[loss=0.1857, simple_loss=0.249, pruned_loss=0.0446, ctc_loss=0.08302, over 19783.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2748, pruned_loss=0.05187, ctc_loss=0.09758, over 3189797.23 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 32.0 +2024-08-31 15:32:05,406 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.14 vs. limit=15.0 +2024-08-31 15:32:35,527 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.11 vs. limit=15.0 +2024-08-31 15:33:57,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=227706.66666666666, ans=0.025 +2024-08-31 15:34:33,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff3.min_abs, batch_count=227760.0, ans=0.2 +2024-08-31 15:34:39,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227760.0, ans=0.1 +2024-08-31 15:34:57,666 INFO [train.py:1114] (2/4) Epoch 18, batch 400, loss[loss=0.2101, simple_loss=0.2875, pruned_loss=0.04798, ctc_loss=0.09204, over 19482.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2743, pruned_loss=0.05162, ctc_loss=0.09712, over 3341232.34 frames. ], batch size: 54, lr: 8.40e-03, grad_scale: 32.0 +2024-08-31 15:35:09,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=227813.33333333334, ans=0.125 +2024-08-31 15:35:12,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=227813.33333333334, ans=0.025 +2024-08-31 15:35:14,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=227813.33333333334, ans=0.125 +2024-08-31 15:37:09,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=228026.66666666666, ans=0.125 +2024-08-31 15:37:11,053 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.718e+02 1.967e+02 2.336e+02 3.401e+02, threshold=3.934e+02, percent-clipped=0.0 +2024-08-31 15:37:37,967 INFO [train.py:1114] (2/4) Epoch 18, batch 450, loss[loss=0.2387, simple_loss=0.3003, pruned_loss=0.06281, ctc_loss=0.1289, over 19625.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2747, pruned_loss=0.05173, ctc_loss=0.09742, over 3450729.30 frames. ], batch size: 55, lr: 8.40e-03, grad_scale: 32.0 +2024-08-31 15:39:32,283 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 15:39:51,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228240.0, ans=0.1 +2024-08-31 15:39:53,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=228240.0, ans=0.125 +2024-08-31 15:39:56,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=228240.0, ans=0.125 +2024-08-31 15:39:56,660 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.80 vs. limit=10.0 +2024-08-31 15:40:01,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=228293.33333333334, ans=0.2 +2024-08-31 15:40:01,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=228293.33333333334, ans=0.0 +2024-08-31 15:40:18,492 INFO [train.py:1114] (2/4) Epoch 18, batch 500, loss[loss=0.2065, simple_loss=0.2849, pruned_loss=0.04699, ctc_loss=0.08539, over 19681.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2738, pruned_loss=0.05124, ctc_loss=0.09659, over 3546019.83 frames. ], batch size: 63, lr: 8.39e-03, grad_scale: 32.0 +2024-08-31 15:40:19,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=228346.66666666666, ans=0.125 +2024-08-31 15:40:39,303 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.03 vs. limit=10.0 +2024-08-31 15:40:41,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=228453.33333333334, ans=0.0 +2024-08-31 15:40:42,632 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.38 vs. limit=6.0 +2024-08-31 15:40:45,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=228453.33333333334, ans=0.125 +2024-08-31 15:40:50,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228453.33333333334, ans=0.1 +2024-08-31 15:40:52,240 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=10.24 vs. limit=15.0 +2024-08-31 15:41:10,342 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.331e+02 1.618e+02 1.812e+02 2.329e+02 3.946e+02, threshold=3.624e+02, percent-clipped=1.0 +2024-08-31 15:41:17,481 INFO [train.py:1114] (2/4) Epoch 18, batch 550, loss[loss=0.2256, simple_loss=0.2873, pruned_loss=0.05965, ctc_loss=0.1113, over 19242.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2738, pruned_loss=0.05134, ctc_loss=0.09678, over 3608988.72 frames. ], batch size: 71, lr: 8.39e-03, grad_scale: 32.0 +2024-08-31 15:42:31,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=228666.66666666666, ans=0.2 +2024-08-31 15:43:41,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=228773.33333333334, ans=0.2 +2024-08-31 15:44:00,806 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.23 vs. limit=22.5 +2024-08-31 15:44:18,818 INFO [train.py:1114] (2/4) Epoch 18, batch 600, loss[loss=0.2245, simple_loss=0.2926, pruned_loss=0.0577, ctc_loss=0.1026, over 19360.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2732, pruned_loss=0.05072, ctc_loss=0.09536, over 3665746.22 frames. ], batch size: 67, lr: 8.38e-03, grad_scale: 32.0 +2024-08-31 15:44:38,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=228933.33333333334, ans=0.0 +2024-08-31 15:45:28,763 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.314e+02 1.735e+02 2.092e+02 3.203e+02 5.009e+02, threshold=4.184e+02, percent-clipped=13.0 +2024-08-31 15:45:30,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229093.33333333334, ans=0.1 +2024-08-31 15:45:38,279 INFO [train.py:1114] (2/4) Epoch 18, batch 650, loss[loss=0.2041, simple_loss=0.2782, pruned_loss=0.04732, ctc_loss=0.08861, over 19756.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2727, pruned_loss=0.05068, ctc_loss=0.09538, over 3716208.52 frames. ], batch size: 54, lr: 8.38e-03, grad_scale: 32.0 +2024-08-31 15:46:20,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=229146.66666666666, ans=0.0 +2024-08-31 15:46:24,699 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.40 vs. limit=15.0 +2024-08-31 15:46:27,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=229200.0, ans=0.025 +2024-08-31 15:46:30,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229200.0, ans=0.1 +2024-08-31 15:46:51,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=229306.66666666666, ans=0.0 +2024-08-31 15:47:16,567 INFO [train.py:1114] (2/4) Epoch 18, batch 700, loss[loss=0.1828, simple_loss=0.2521, pruned_loss=0.04181, ctc_loss=0.07494, over 19722.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2731, pruned_loss=0.05086, ctc_loss=0.09588, over 3749014.97 frames. ], batch size: 51, lr: 8.37e-03, grad_scale: 16.0 +2024-08-31 15:48:03,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=229626.66666666666, ans=0.125 +2024-08-31 15:48:03,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=229626.66666666666, ans=0.125 +2024-08-31 15:48:04,312 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.99 vs. limit=22.5 +2024-08-31 15:48:10,583 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.394e+02 1.672e+02 1.935e+02 2.401e+02 4.868e+02, threshold=3.870e+02, percent-clipped=1.0 +2024-08-31 15:48:12,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=229626.66666666666, ans=0.0 +2024-08-31 15:48:16,528 INFO [train.py:1114] (2/4) Epoch 18, batch 750, loss[loss=0.2087, simple_loss=0.2764, pruned_loss=0.05082, ctc_loss=0.09843, over 19859.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2727, pruned_loss=0.05063, ctc_loss=0.09526, over 3775543.53 frames. ], batch size: 55, lr: 8.37e-03, grad_scale: 16.0 +2024-08-31 15:48:38,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=229733.33333333334, ans=0.125 +2024-08-31 15:49:03,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=229840.0, ans=6.0 +2024-08-31 15:49:17,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=229893.33333333334, ans=0.0 +2024-08-31 15:49:19,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=229893.33333333334, ans=0.0 +2024-08-31 15:49:28,065 INFO [train.py:1114] (2/4) Epoch 18, batch 800, loss[loss=0.1791, simple_loss=0.2475, pruned_loss=0.03994, ctc_loss=0.0769, over 19430.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2732, pruned_loss=0.05093, ctc_loss=0.09594, over 3796776.82 frames. ], batch size: 48, lr: 8.37e-03, grad_scale: 32.0 +2024-08-31 15:49:39,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=229946.66666666666, ans=0.0 +2024-08-31 15:50:26,260 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.84 vs. limit=15.0 +2024-08-31 15:50:27,780 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.682e+02 1.957e+02 2.333e+02 3.697e+02, threshold=3.913e+02, percent-clipped=0.0 +2024-08-31 15:50:29,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230160.0, ans=0.1 +2024-08-31 15:50:33,702 INFO [train.py:1114] (2/4) Epoch 18, batch 850, loss[loss=0.2134, simple_loss=0.2943, pruned_loss=0.04873, ctc_loss=0.08778, over 19630.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2725, pruned_loss=0.05061, ctc_loss=0.09535, over 3815628.64 frames. ], batch size: 59, lr: 8.36e-03, grad_scale: 32.0 +2024-08-31 15:51:31,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=230266.66666666666, ans=0.125 +2024-08-31 15:51:33,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=230266.66666666666, ans=0.07 +2024-08-31 15:51:47,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=230320.0, ans=0.2 +2024-08-31 15:52:06,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=230426.66666666666, ans=0.0 +2024-08-31 15:52:06,630 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.10 vs. limit=22.5 +2024-08-31 15:52:07,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=230426.66666666666, ans=0.125 +2024-08-31 15:52:08,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=230426.66666666666, ans=0.0 +2024-08-31 15:52:15,954 INFO [train.py:1114] (2/4) Epoch 18, batch 900, loss[loss=0.1701, simple_loss=0.2408, pruned_loss=0.03566, ctc_loss=0.07006, over 19406.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.273, pruned_loss=0.05097, ctc_loss=0.09616, over 3819241.07 frames. ], batch size: 48, lr: 8.36e-03, grad_scale: 32.0 +2024-08-31 15:52:17,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=230480.0, ans=0.1 +2024-08-31 15:52:49,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=230586.66666666666, ans=0.0 +2024-08-31 15:52:57,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=230640.0, ans=0.125 +2024-08-31 15:53:00,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=230640.0, ans=0.125 +2024-08-31 15:53:06,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=230693.33333333334, ans=0.2 +2024-08-31 15:53:07,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=230693.33333333334, ans=0.0 +2024-08-31 15:53:08,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=230693.33333333334, ans=0.0 +2024-08-31 15:53:12,021 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.268e+02 1.645e+02 1.872e+02 2.411e+02 3.930e+02, threshold=3.745e+02, percent-clipped=1.0 +2024-08-31 15:53:13,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=230693.33333333334, ans=0.0 +2024-08-31 15:53:46,104 INFO [train.py:1114] (2/4) Epoch 18, batch 950, loss[loss=0.2067, simple_loss=0.2653, pruned_loss=0.05465, ctc_loss=0.09693, over 19504.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2731, pruned_loss=0.05111, ctc_loss=0.09636, over 3821614.48 frames. ], batch size: 49, lr: 8.35e-03, grad_scale: 32.0 +2024-08-31 15:53:47,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=230746.66666666666, ans=0.125 +2024-08-31 15:53:47,924 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.23 vs. limit=15.0 +2024-08-31 15:53:54,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=230746.66666666666, ans=0.0 +2024-08-31 15:53:57,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=230800.0, ans=0.125 +2024-08-31 15:54:03,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230800.0, ans=0.1 +2024-08-31 15:54:32,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=230906.66666666666, ans=0.125 +2024-08-31 15:54:48,344 INFO [train.py:1114] (2/4) Epoch 18, batch 1000, loss[loss=0.2083, simple_loss=0.2823, pruned_loss=0.04853, ctc_loss=0.09287, over 19859.00 frames. ], tot_loss[loss=0.208, simple_loss=0.274, pruned_loss=0.05152, ctc_loss=0.09708, over 3816720.59 frames. ], batch size: 52, lr: 8.35e-03, grad_scale: 32.0 +2024-08-31 15:55:02,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=231013.33333333334, ans=0.125 +2024-08-31 15:55:10,604 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.15 vs. limit=10.0 +2024-08-31 15:55:12,590 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 15:55:15,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=231120.0, ans=0.125 +2024-08-31 15:55:43,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231173.33333333334, ans=0.1 +2024-08-31 15:55:55,150 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.285e+02 1.660e+02 1.836e+02 2.172e+02 3.389e+02, threshold=3.673e+02, percent-clipped=0.0 +2024-08-31 15:56:01,095 INFO [train.py:1114] (2/4) Epoch 18, batch 1050, loss[loss=0.2094, simple_loss=0.2888, pruned_loss=0.04775, ctc_loss=0.08633, over 19821.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2739, pruned_loss=0.05162, ctc_loss=0.09737, over 3823860.40 frames. ], batch size: 57, lr: 8.34e-03, grad_scale: 32.0 +2024-08-31 15:56:11,490 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.56 vs. limit=6.0 +2024-08-31 15:56:26,774 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 15:56:37,881 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.40 vs. limit=15.0 +2024-08-31 15:56:38,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=231440.0, ans=0.125 +2024-08-31 15:56:39,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=231440.0, ans=0.125 +2024-08-31 15:57:01,196 INFO [train.py:1114] (2/4) Epoch 18, batch 1100, loss[loss=0.1915, simple_loss=0.2681, pruned_loss=0.04221, ctc_loss=0.07619, over 19597.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2733, pruned_loss=0.05103, ctc_loss=0.09602, over 3831830.36 frames. ], batch size: 52, lr: 8.34e-03, grad_scale: 32.0 +2024-08-31 15:57:07,417 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.82 vs. limit=15.0 +2024-08-31 15:57:08,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=231546.66666666666, ans=0.125 +2024-08-31 15:57:21,676 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 15:57:26,486 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.31 vs. limit=10.0 +2024-08-31 15:57:50,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=231706.66666666666, ans=0.0 +2024-08-31 15:57:51,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231760.0, ans=0.1 +2024-08-31 15:57:58,316 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.372e+02 1.608e+02 1.860e+02 2.284e+02 4.941e+02, threshold=3.719e+02, percent-clipped=1.0 +2024-08-31 15:58:04,224 INFO [train.py:1114] (2/4) Epoch 18, batch 1150, loss[loss=0.1821, simple_loss=0.2578, pruned_loss=0.0384, ctc_loss=0.07372, over 19594.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2728, pruned_loss=0.0508, ctc_loss=0.09576, over 3830669.25 frames. ], batch size: 52, lr: 8.33e-03, grad_scale: 32.0 +2024-08-31 15:58:18,567 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 15:58:25,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=231866.66666666666, ans=0.0 +2024-08-31 15:58:31,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=231866.66666666666, ans=0.125 +2024-08-31 15:58:35,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=231866.66666666666, ans=0.2 +2024-08-31 15:58:36,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=231866.66666666666, ans=0.2 +2024-08-31 15:58:48,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231920.0, ans=0.1 +2024-08-31 15:58:52,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=231973.33333333334, ans=0.125 +2024-08-31 15:59:17,232 INFO [train.py:1114] (2/4) Epoch 18, batch 1200, loss[loss=0.199, simple_loss=0.2722, pruned_loss=0.046, ctc_loss=0.08446, over 19844.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2739, pruned_loss=0.05146, ctc_loss=0.09689, over 3825491.91 frames. ], batch size: 57, lr: 8.33e-03, grad_scale: 32.0 +2024-08-31 15:59:30,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=232133.33333333334, ans=0.125 +2024-08-31 15:59:31,096 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.35 vs. limit=15.0 +2024-08-31 15:59:43,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=232186.66666666666, ans=0.0 +2024-08-31 15:59:44,126 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.82 vs. limit=15.0 +2024-08-31 15:59:53,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=232240.0, ans=0.1 +2024-08-31 16:00:10,264 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.83 vs. limit=10.0 +2024-08-31 16:00:12,205 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.411e+02 1.681e+02 1.869e+02 2.236e+02 3.755e+02, threshold=3.738e+02, percent-clipped=1.0 +2024-08-31 16:00:18,285 INFO [train.py:1114] (2/4) Epoch 18, batch 1250, loss[loss=0.2325, simple_loss=0.2955, pruned_loss=0.06174, ctc_loss=0.1153, over 19524.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2747, pruned_loss=0.05163, ctc_loss=0.09711, over 3842929.75 frames. ], batch size: 61, lr: 8.32e-03, grad_scale: 32.0 +2024-08-31 16:00:37,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=232400.0, ans=0.0 +2024-08-31 16:00:50,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=232453.33333333334, ans=0.0 +2024-08-31 16:00:54,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=232453.33333333334, ans=0.5 +2024-08-31 16:00:56,575 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:01:05,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=232506.66666666666, ans=0.125 +2024-08-31 16:01:19,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=232613.33333333334, ans=0.125 +2024-08-31 16:01:22,451 INFO [train.py:1114] (2/4) Epoch 18, batch 1300, loss[loss=0.2313, simple_loss=0.2931, pruned_loss=0.0625, ctc_loss=0.1113, over 18811.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2741, pruned_loss=0.05132, ctc_loss=0.0964, over 3845733.83 frames. ], batch size: 76, lr: 8.32e-03, grad_scale: 32.0 +2024-08-31 16:01:31,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=232613.33333333334, ans=0.125 +2024-08-31 16:01:36,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=232666.66666666666, ans=0.125 +2024-08-31 16:01:44,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=232666.66666666666, ans=0.125 +2024-08-31 16:01:44,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=232666.66666666666, ans=0.125 +2024-08-31 16:02:14,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=232826.66666666666, ans=0.0 +2024-08-31 16:02:19,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=232826.66666666666, ans=0.0 +2024-08-31 16:02:21,667 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.351e+02 1.758e+02 2.176e+02 2.645e+02 4.342e+02, threshold=4.353e+02, percent-clipped=3.0 +2024-08-31 16:02:23,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=232826.66666666666, ans=0.0 +2024-08-31 16:02:27,594 INFO [train.py:1114] (2/4) Epoch 18, batch 1350, loss[loss=0.2033, simple_loss=0.2753, pruned_loss=0.0471, ctc_loss=0.09272, over 19781.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2731, pruned_loss=0.05072, ctc_loss=0.0953, over 3856639.95 frames. ], batch size: 54, lr: 8.31e-03, grad_scale: 32.0 +2024-08-31 16:02:31,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=232880.0, ans=0.0 +2024-08-31 16:02:37,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232880.0, ans=0.1 +2024-08-31 16:02:49,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=232933.33333333334, ans=0.125 +2024-08-31 16:03:13,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff2.min_abs, batch_count=233040.0, ans=0.1 +2024-08-31 16:03:25,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=233093.33333333334, ans=0.2 +2024-08-31 16:03:29,593 INFO [train.py:1114] (2/4) Epoch 18, batch 1400, loss[loss=0.1901, simple_loss=0.2544, pruned_loss=0.04559, ctc_loss=0.0867, over 19707.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2726, pruned_loss=0.05038, ctc_loss=0.09465, over 3863721.32 frames. ], batch size: 46, lr: 8.31e-03, grad_scale: 32.0 +2024-08-31 16:03:31,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=233146.66666666666, ans=0.125 +2024-08-31 16:03:41,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=233146.66666666666, ans=0.125 +2024-08-31 16:03:47,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=233146.66666666666, ans=0.0 +2024-08-31 16:03:51,959 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.57 vs. limit=15.0 +2024-08-31 16:04:14,946 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.14 vs. limit=15.0 +2024-08-31 16:04:16,240 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.62 vs. limit=15.0 +2024-08-31 16:04:31,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=233360.0, ans=0.1 +2024-08-31 16:04:36,287 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.322e+02 1.655e+02 1.916e+02 2.338e+02 3.956e+02, threshold=3.832e+02, percent-clipped=0.0 +2024-08-31 16:04:40,650 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.73 vs. limit=6.0 +2024-08-31 16:04:42,277 INFO [train.py:1114] (2/4) Epoch 18, batch 1450, loss[loss=0.2215, simple_loss=0.2817, pruned_loss=0.05792, ctc_loss=0.1137, over 19658.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2735, pruned_loss=0.05073, ctc_loss=0.09546, over 3861602.59 frames. ], batch size: 63, lr: 8.30e-03, grad_scale: 32.0 +2024-08-31 16:04:42,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=233413.33333333334, ans=0.125 +2024-08-31 16:05:01,734 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.68 vs. limit=15.0 +2024-08-31 16:05:17,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=233520.0, ans=0.125 +2024-08-31 16:05:44,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=233626.66666666666, ans=0.0 +2024-08-31 16:05:48,828 INFO [train.py:1114] (2/4) Epoch 18, batch 1500, loss[loss=0.235, simple_loss=0.2977, pruned_loss=0.06227, ctc_loss=0.1196, over 19583.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2739, pruned_loss=0.05104, ctc_loss=0.09608, over 3861275.12 frames. ], batch size: 57, lr: 8.30e-03, grad_scale: 32.0 +2024-08-31 16:06:09,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=233733.33333333334, ans=0.125 +2024-08-31 16:06:16,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=233786.66666666666, ans=0.0 +2024-08-31 16:06:29,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=233840.0, ans=0.0 +2024-08-31 16:06:45,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=233893.33333333334, ans=0.1 +2024-08-31 16:06:46,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.48 vs. limit=15.0 +2024-08-31 16:06:50,185 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.336e+02 1.669e+02 1.866e+02 2.355e+02 3.552e+02, threshold=3.733e+02, percent-clipped=0.0 +2024-08-31 16:07:06,063 INFO [train.py:1114] (2/4) Epoch 18, batch 1550, loss[loss=0.2282, simple_loss=0.2873, pruned_loss=0.06207, ctc_loss=0.1125, over 19595.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2731, pruned_loss=0.05096, ctc_loss=0.09598, over 3845791.37 frames. ], batch size: 60, lr: 8.30e-03, grad_scale: 32.0 +2024-08-31 16:07:24,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=234000.0, ans=0.125 +2024-08-31 16:07:27,795 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.55 vs. limit=15.0 +2024-08-31 16:07:33,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=234053.33333333334, ans=0.125 +2024-08-31 16:07:37,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=234053.33333333334, ans=0.2 +2024-08-31 16:07:44,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=234106.66666666666, ans=0.0 +2024-08-31 16:07:48,668 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.38 vs. limit=15.0 +2024-08-31 16:08:07,324 INFO [train.py:1114] (2/4) Epoch 18, batch 1600, loss[loss=0.2198, simple_loss=0.2872, pruned_loss=0.05542, ctc_loss=0.1039, over 19853.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2731, pruned_loss=0.05097, ctc_loss=0.09607, over 3836055.87 frames. ], batch size: 57, lr: 8.29e-03, grad_scale: 32.0 +2024-08-31 16:08:07,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=234213.33333333334, ans=0.025 +2024-08-31 16:08:11,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=234213.33333333334, ans=0.0 +2024-08-31 16:08:21,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.53 vs. limit=6.0 +2024-08-31 16:08:24,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234266.66666666666, ans=0.1 +2024-08-31 16:08:43,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=234320.0, ans=0.1 +2024-08-31 16:09:01,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=234426.66666666666, ans=0.0 +2024-08-31 16:09:20,623 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.787e+02 2.153e+02 2.672e+02 5.491e+02, threshold=4.305e+02, percent-clipped=8.0 +2024-08-31 16:09:20,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=234426.66666666666, ans=0.025 +2024-08-31 16:09:26,583 INFO [train.py:1114] (2/4) Epoch 18, batch 1650, loss[loss=0.2001, simple_loss=0.2767, pruned_loss=0.04459, ctc_loss=0.08589, over 19643.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2731, pruned_loss=0.05105, ctc_loss=0.0961, over 3830462.78 frames. ], batch size: 59, lr: 8.29e-03, grad_scale: 32.0 +2024-08-31 16:09:26,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=234480.0, ans=0.125 +2024-08-31 16:12:44,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=234480.0, ans=0.0 +2024-08-31 16:13:15,676 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=234533.33333333334, ans=0.025 +2024-08-31 16:13:28,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234586.66666666666, ans=0.1 +2024-08-31 16:13:28,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.02 vs. limit=22.5 +2024-08-31 16:14:05,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=234693.33333333334, ans=0.125 +2024-08-31 16:14:15,749 INFO [train.py:1114] (2/4) Epoch 18, batch 1700, loss[loss=0.1925, simple_loss=0.2453, pruned_loss=0.05027, ctc_loss=0.09769, over 19671.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2732, pruned_loss=0.05083, ctc_loss=0.09581, over 3845647.98 frames. ], batch size: 46, lr: 8.28e-03, grad_scale: 32.0 +2024-08-31 16:14:36,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=234800.0, ans=0.125 +2024-08-31 16:14:43,083 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.31 vs. limit=15.0 +2024-08-31 16:14:51,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234906.66666666666, ans=0.1 +2024-08-31 16:14:55,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=234906.66666666666, ans=0.2 +2024-08-31 16:15:07,762 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.273e+02 1.694e+02 2.038e+02 2.484e+02 5.869e+02, threshold=4.076e+02, percent-clipped=3.0 +2024-08-31 16:15:13,535 INFO [train.py:1114] (2/4) Epoch 18, batch 1750, loss[loss=0.1738, simple_loss=0.2427, pruned_loss=0.03789, ctc_loss=0.07289, over 19674.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2723, pruned_loss=0.05024, ctc_loss=0.09466, over 3850710.37 frames. ], batch size: 45, lr: 8.28e-03, grad_scale: 32.0 +2024-08-31 16:15:27,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=235066.66666666666, ans=0.125 +2024-08-31 16:15:34,792 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.07 vs. limit=22.5 +2024-08-31 16:15:35,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=235066.66666666666, ans=0.025 +2024-08-31 16:15:40,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=235066.66666666666, ans=0.125 +2024-08-31 16:15:43,772 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.62 vs. limit=15.0 +2024-08-31 16:15:44,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=235120.0, ans=0.125 +2024-08-31 16:15:49,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=235120.0, ans=0.0 +2024-08-31 16:15:51,356 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.17 vs. limit=15.0 +2024-08-31 16:15:58,418 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.18 vs. limit=10.0 +2024-08-31 16:16:03,102 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.29 vs. limit=6.0 +2024-08-31 16:16:18,961 INFO [train.py:1114] (2/4) Epoch 18, batch 1800, loss[loss=0.2056, simple_loss=0.2764, pruned_loss=0.0493, ctc_loss=0.09063, over 19612.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2726, pruned_loss=0.05039, ctc_loss=0.09484, over 3851559.86 frames. ], batch size: 55, lr: 8.27e-03, grad_scale: 32.0 +2024-08-31 16:16:26,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=235280.0, ans=0.125 +2024-08-31 16:16:31,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=235333.33333333334, ans=15.0 +2024-08-31 16:16:46,448 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.19 vs. limit=15.0 +2024-08-31 16:16:48,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=235386.66666666666, ans=0.07 +2024-08-31 16:16:50,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=235386.66666666666, ans=0.125 +2024-08-31 16:16:53,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=235440.0, ans=0.0 +2024-08-31 16:17:02,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=235440.0, ans=0.2 +2024-08-31 16:17:12,067 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.320e+02 1.739e+02 2.099e+02 2.606e+02 4.220e+02, threshold=4.197e+02, percent-clipped=1.0 +2024-08-31 16:17:14,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=235493.33333333334, ans=0.125 +2024-08-31 16:17:16,686 INFO [train.py:1114] (2/4) Epoch 18, batch 1850, loss[loss=0.2, simple_loss=0.2759, pruned_loss=0.04542, ctc_loss=0.08299, over 19593.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2722, pruned_loss=0.05022, ctc_loss=0.09456, over 3855874.93 frames. ], batch size: 57, lr: 8.27e-03, grad_scale: 16.0 +2024-08-31 16:17:19,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.06 vs. limit=15.0 +2024-08-31 16:17:39,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=235600.0, ans=0.2 +2024-08-31 16:17:58,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=235706.66666666666, ans=0.0 +2024-08-31 16:18:01,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=235706.66666666666, ans=0.125 +2024-08-31 16:18:02,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=235706.66666666666, ans=0.125 +2024-08-31 16:18:05,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=235706.66666666666, ans=0.025 +2024-08-31 16:18:06,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=235706.66666666666, ans=0.025 +2024-08-31 16:18:21,118 INFO [train.py:1114] (2/4) Epoch 18, batch 1900, loss[loss=0.2071, simple_loss=0.2812, pruned_loss=0.04851, ctc_loss=0.08976, over 19650.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2732, pruned_loss=0.05076, ctc_loss=0.09534, over 3861000.92 frames. ], batch size: 59, lr: 8.26e-03, grad_scale: 16.0 +2024-08-31 16:18:23,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=235813.33333333334, ans=0.1 +2024-08-31 16:18:45,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=235920.0, ans=0.125 +2024-08-31 16:18:56,692 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.69 vs. limit=8.0 +2024-08-31 16:19:05,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=235973.33333333334, ans=0.5 +2024-08-31 16:19:13,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=236026.66666666666, ans=0.0 +2024-08-31 16:19:14,244 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.296e+02 1.623e+02 1.837e+02 2.195e+02 5.135e+02, threshold=3.673e+02, percent-clipped=2.0 +2024-08-31 16:19:18,768 INFO [train.py:1114] (2/4) Epoch 18, batch 1950, loss[loss=0.2075, simple_loss=0.2697, pruned_loss=0.05218, ctc_loss=0.1021, over 19583.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2742, pruned_loss=0.0509, ctc_loss=0.09554, over 3869991.12 frames. ], batch size: 52, lr: 8.26e-03, grad_scale: 16.0 +2024-08-31 16:20:28,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=236133.33333333334, ans=0.1 +2024-08-31 16:20:34,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=236133.33333333334, ans=0.0 +2024-08-31 16:20:36,081 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.45 vs. limit=6.0 +2024-08-31 16:20:45,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=236186.66666666666, ans=0.125 +2024-08-31 16:20:45,563 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.34 vs. limit=15.0 +2024-08-31 16:21:03,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=236293.33333333334, ans=0.125 +2024-08-31 16:21:21,708 INFO [train.py:1114] (2/4) Epoch 18, batch 2000, loss[loss=0.1827, simple_loss=0.249, pruned_loss=0.04216, ctc_loss=0.08037, over 19673.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2748, pruned_loss=0.05123, ctc_loss=0.09617, over 3854452.41 frames. ], batch size: 45, lr: 8.25e-03, grad_scale: 32.0 +2024-08-31 16:21:24,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=236346.66666666666, ans=0.125 +2024-08-31 16:21:59,145 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.40 vs. limit=10.0 +2024-08-31 16:22:10,741 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.54 vs. limit=10.0 +2024-08-31 16:22:14,035 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.42 vs. limit=10.0 +2024-08-31 16:22:14,731 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.400e+02 1.704e+02 2.096e+02 2.751e+02 4.638e+02, threshold=4.193e+02, percent-clipped=6.0 +2024-08-31 16:22:19,168 INFO [train.py:1114] (2/4) Epoch 18, batch 2050, loss[loss=0.208, simple_loss=0.262, pruned_loss=0.05668, ctc_loss=0.1017, over 19716.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2742, pruned_loss=0.05129, ctc_loss=0.09632, over 3851418.13 frames. ], batch size: 47, lr: 8.25e-03, grad_scale: 32.0 +2024-08-31 16:22:31,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=236666.66666666666, ans=0.2 +2024-08-31 16:22:35,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=236666.66666666666, ans=0.2 +2024-08-31 16:22:41,072 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.76 vs. limit=15.0 +2024-08-31 16:22:59,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=236773.33333333334, ans=0.125 +2024-08-31 16:23:18,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=236826.66666666666, ans=10.0 +2024-08-31 16:23:21,346 INFO [train.py:1114] (2/4) Epoch 18, batch 2100, loss[loss=0.2063, simple_loss=0.2738, pruned_loss=0.05048, ctc_loss=0.09448, over 19773.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2733, pruned_loss=0.05079, ctc_loss=0.09552, over 3858565.95 frames. ], batch size: 54, lr: 8.25e-03, grad_scale: 32.0 +2024-08-31 16:23:29,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236880.0, ans=0.1 +2024-08-31 16:23:33,651 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.24 vs. limit=15.0 +2024-08-31 16:23:34,753 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.84 vs. limit=22.5 +2024-08-31 16:23:56,154 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:24:01,802 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.86 vs. limit=22.5 +2024-08-31 16:24:26,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=237093.33333333334, ans=0.125 +2024-08-31 16:24:27,115 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.628e+02 1.802e+02 2.351e+02 4.404e+02, threshold=3.604e+02, percent-clipped=1.0 +2024-08-31 16:24:31,676 INFO [train.py:1114] (2/4) Epoch 18, batch 2150, loss[loss=0.1948, simple_loss=0.2648, pruned_loss=0.04513, ctc_loss=0.08661, over 19870.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2721, pruned_loss=0.05033, ctc_loss=0.09448, over 3868180.37 frames. ], batch size: 52, lr: 8.24e-03, grad_scale: 32.0 +2024-08-31 16:24:35,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=237146.66666666666, ans=0.2 +2024-08-31 16:24:47,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237200.0, ans=0.1 +2024-08-31 16:24:51,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=237200.0, ans=0.0 +2024-08-31 16:25:25,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.88 vs. limit=15.0 +2024-08-31 16:25:40,266 INFO [train.py:1114] (2/4) Epoch 18, batch 2200, loss[loss=0.238, simple_loss=0.2965, pruned_loss=0.06478, ctc_loss=0.1251, over 19592.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2723, pruned_loss=0.0504, ctc_loss=0.0947, over 3866406.18 frames. ], batch size: 57, lr: 8.24e-03, grad_scale: 32.0 +2024-08-31 16:25:50,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=237413.33333333334, ans=0.125 +2024-08-31 16:25:55,818 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.30 vs. limit=15.0 +2024-08-31 16:26:17,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=237573.33333333334, ans=0.125 +2024-08-31 16:26:33,729 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.369e+02 1.652e+02 1.938e+02 2.493e+02 4.901e+02, threshold=3.877e+02, percent-clipped=6.0 +2024-08-31 16:26:38,334 INFO [train.py:1114] (2/4) Epoch 18, batch 2250, loss[loss=0.2001, simple_loss=0.2815, pruned_loss=0.04286, ctc_loss=0.08269, over 19616.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2719, pruned_loss=0.05015, ctc_loss=0.09414, over 3865969.40 frames. ], batch size: 55, lr: 8.23e-03, grad_scale: 32.0 +2024-08-31 16:26:53,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=237680.0, ans=0.0 +2024-08-31 16:27:06,411 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.24 vs. limit=15.0 +2024-08-31 16:27:09,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=237733.33333333334, ans=0.125 +2024-08-31 16:27:26,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=237840.0, ans=0.125 +2024-08-31 16:27:51,890 INFO [train.py:1114] (2/4) Epoch 18, batch 2300, loss[loss=0.1878, simple_loss=0.2615, pruned_loss=0.04112, ctc_loss=0.07954, over 19510.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2715, pruned_loss=0.05026, ctc_loss=0.09452, over 3860086.18 frames. ], batch size: 49, lr: 8.23e-03, grad_scale: 32.0 +2024-08-31 16:27:56,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=237946.66666666666, ans=0.0 +2024-08-31 16:28:00,053 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.03 vs. limit=15.0 +2024-08-31 16:28:19,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=238053.33333333334, ans=0.025 +2024-08-31 16:28:28,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=238106.66666666666, ans=0.125 +2024-08-31 16:28:30,586 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.44 vs. limit=15.0 +2024-08-31 16:28:47,462 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.355e+02 1.696e+02 1.848e+02 2.393e+02 3.836e+02, threshold=3.696e+02, percent-clipped=0.0 +2024-08-31 16:29:07,685 INFO [train.py:1114] (2/4) Epoch 18, batch 2350, loss[loss=0.2023, simple_loss=0.2787, pruned_loss=0.04582, ctc_loss=0.08596, over 19687.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2716, pruned_loss=0.05036, ctc_loss=0.09464, over 3862480.50 frames. ], batch size: 63, lr: 8.22e-03, grad_scale: 32.0 +2024-08-31 16:29:14,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=238213.33333333334, ans=0.125 +2024-08-31 16:29:40,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=238373.33333333334, ans=0.0 +2024-08-31 16:30:30,916 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.88 vs. limit=15.0 +2024-08-31 16:30:36,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238426.66666666666, ans=0.1 +2024-08-31 16:30:38,505 INFO [train.py:1114] (2/4) Epoch 18, batch 2400, loss[loss=0.2199, simple_loss=0.2902, pruned_loss=0.05395, ctc_loss=0.1041, over 19247.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2746, pruned_loss=0.05165, ctc_loss=0.09696, over 3857430.91 frames. ], batch size: 71, lr: 8.22e-03, grad_scale: 32.0 +2024-08-31 16:30:52,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=238533.33333333334, ans=0.125 +2024-08-31 16:30:58,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=238533.33333333334, ans=0.2 +2024-08-31 16:31:03,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=238586.66666666666, ans=0.025 +2024-08-31 16:31:30,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=238693.33333333334, ans=0.125 +2024-08-31 16:31:47,473 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.444e+02 1.682e+02 1.835e+02 2.125e+02 4.662e+02, threshold=3.671e+02, percent-clipped=5.0 +2024-08-31 16:31:52,088 INFO [train.py:1114] (2/4) Epoch 18, batch 2450, loss[loss=0.2773, simple_loss=0.3079, pruned_loss=0.09028, ctc_loss=0.1655, over 14215.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2782, pruned_loss=0.05417, ctc_loss=0.102, over 3733241.97 frames. ], batch size: 140, lr: 8.21e-03, grad_scale: 32.0 +2024-08-31 16:31:52,651 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=11.21 vs. limit=12.0 +2024-08-31 16:32:08,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=238800.0, ans=0.125 +2024-08-31 16:32:22,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=238853.33333333334, ans=0.09899494936611666 +2024-08-31 16:32:25,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=238906.66666666666, ans=0.125 +2024-08-31 16:33:43,942 INFO [train.py:1114] (2/4) Epoch 19, batch 0, loss[loss=0.204, simple_loss=0.2631, pruned_loss=0.05302, ctc_loss=0.09728, over 19405.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2631, pruned_loss=0.05302, ctc_loss=0.09728, over 19405.00 frames. ], batch size: 48, lr: 7.99e-03, grad_scale: 32.0 +2024-08-31 16:33:43,943 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-31 16:33:52,861 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.0668, 2.2441, 2.9506, 3.3905], device='cuda:2') +2024-08-31 16:34:00,536 INFO [train.py:1146] (2/4) Epoch 19, validation: loss=0.1846, simple_loss=0.2728, pruned_loss=0.03584, ctc_loss=0.06159, over 944034.00 frames. +2024-08-31 16:34:01,380 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 13505MB +2024-08-31 16:34:20,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=239008.0, ans=0.0 +2024-08-31 16:34:26,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=239061.33333333334, ans=0.125 +2024-08-31 16:34:54,290 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.57 vs. limit=15.0 +2024-08-31 16:35:04,412 INFO [train.py:1114] (2/4) Epoch 19, batch 50, loss[loss=0.1988, simple_loss=0.2562, pruned_loss=0.05183, ctc_loss=0.09426, over 19697.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2744, pruned_loss=0.05164, ctc_loss=0.0981, over 843613.11 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-31 16:35:05,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=239221.33333333334, ans=0.2 +2024-08-31 16:35:09,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239221.33333333334, ans=0.1 +2024-08-31 16:35:12,115 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.19 vs. limit=15.0 +2024-08-31 16:35:12,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=239221.33333333334, ans=6.0 +2024-08-31 16:35:12,518 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.230e+02 1.795e+02 2.006e+02 2.342e+02 4.821e+02, threshold=4.012e+02, percent-clipped=4.0 +2024-08-31 16:35:24,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=239274.66666666666, ans=0.04949747468305833 +2024-08-31 16:35:26,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=239328.0, ans=0.2 +2024-08-31 16:35:34,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=239328.0, ans=0.025 +2024-08-31 16:35:43,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=239381.33333333334, ans=0.025 +2024-08-31 16:35:45,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=239381.33333333334, ans=0.0 +2024-08-31 16:35:46,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=239381.33333333334, ans=0.0 +2024-08-31 16:36:03,809 INFO [train.py:1114] (2/4) Epoch 19, batch 100, loss[loss=0.187, simple_loss=0.2547, pruned_loss=0.04367, ctc_loss=0.07987, over 19711.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2758, pruned_loss=0.0514, ctc_loss=0.09745, over 1498173.55 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 32.0 +2024-08-31 16:36:05,291 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:36:23,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=239541.33333333334, ans=0.125 +2024-08-31 16:36:32,095 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.60 vs. limit=12.0 +2024-08-31 16:36:35,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239594.66666666666, ans=0.125 +2024-08-31 16:36:54,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=239701.33333333334, ans=0.0 +2024-08-31 16:37:00,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239701.33333333334, ans=0.125 +2024-08-31 16:37:06,673 INFO [train.py:1114] (2/4) Epoch 19, batch 150, loss[loss=0.1964, simple_loss=0.251, pruned_loss=0.05246, ctc_loss=0.09199, over 19712.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2721, pruned_loss=0.0496, ctc_loss=0.0936, over 2027757.76 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-31 16:37:14,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=239754.66666666666, ans=0.09899494936611666 +2024-08-31 16:37:15,241 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.762e+02 1.953e+02 2.445e+02 3.524e+02, threshold=3.906e+02, percent-clipped=0.0 +2024-08-31 16:37:43,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239914.66666666666, ans=0.1 +2024-08-31 16:37:50,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=239914.66666666666, ans=0.2 +2024-08-31 16:37:52,724 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.12 vs. limit=10.0 +2024-08-31 16:38:02,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=239968.0, ans=0.0 +2024-08-31 16:38:02,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=239968.0, ans=0.07 +2024-08-31 16:38:14,091 INFO [train.py:1114] (2/4) Epoch 19, batch 200, loss[loss=0.2242, simple_loss=0.2916, pruned_loss=0.05776, ctc_loss=0.1031, over 18193.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2722, pruned_loss=0.04987, ctc_loss=0.0936, over 2435345.63 frames. ], batch size: 85, lr: 7.97e-03, grad_scale: 32.0 +2024-08-31 16:38:15,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=240021.33333333334, ans=0.0 +2024-08-31 16:38:17,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=240021.33333333334, ans=0.125 +2024-08-31 16:38:47,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=240128.0, ans=0.0 +2024-08-31 16:38:49,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=240181.33333333334, ans=0.2 +2024-08-31 16:38:50,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=240181.33333333334, ans=0.125 +2024-08-31 16:38:54,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff3.min_abs, batch_count=240181.33333333334, ans=0.2 +2024-08-31 16:39:03,528 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.13 vs. limit=12.0 +2024-08-31 16:39:13,532 INFO [train.py:1114] (2/4) Epoch 19, batch 250, loss[loss=0.2254, simple_loss=0.2843, pruned_loss=0.06097, ctc_loss=0.1111, over 19434.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2723, pruned_loss=0.05018, ctc_loss=0.09419, over 2756225.88 frames. ], batch size: 67, lr: 7.97e-03, grad_scale: 32.0 +2024-08-31 16:39:18,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=240288.0, ans=0.125 +2024-08-31 16:39:27,166 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.313e+02 1.733e+02 2.186e+02 2.853e+02 4.755e+02, threshold=4.372e+02, percent-clipped=7.0 +2024-08-31 16:39:30,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=240341.33333333334, ans=0.125 +2024-08-31 16:39:31,483 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.05 vs. limit=22.5 +2024-08-31 16:39:33,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=240341.33333333334, ans=0.025 +2024-08-31 16:39:36,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=240341.33333333334, ans=0.0 +2024-08-31 16:39:55,922 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.84 vs. limit=15.0 +2024-08-31 16:39:58,787 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.96 vs. limit=15.0 +2024-08-31 16:40:12,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=240501.33333333334, ans=0.0 +2024-08-31 16:40:14,963 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.29 vs. limit=22.5 +2024-08-31 16:40:15,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=240501.33333333334, ans=0.1 +2024-08-31 16:40:20,412 INFO [train.py:1114] (2/4) Epoch 19, batch 300, loss[loss=0.2345, simple_loss=0.2937, pruned_loss=0.06393, ctc_loss=0.1185, over 19508.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2715, pruned_loss=0.04995, ctc_loss=0.09426, over 3000511.74 frames. ], batch size: 61, lr: 7.96e-03, grad_scale: 32.0 +2024-08-31 16:40:20,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=240554.66666666666, ans=0.0 +2024-08-31 16:40:21,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=240554.66666666666, ans=0.035 +2024-08-31 16:40:22,244 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.46 vs. limit=15.0 +2024-08-31 16:40:25,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=240554.66666666666, ans=0.125 +2024-08-31 16:40:29,100 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.83 vs. limit=6.0 +2024-08-31 16:40:37,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=240608.0, ans=0.025 +2024-08-31 16:40:47,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=240661.33333333334, ans=0.125 +2024-08-31 16:41:06,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=240714.66666666666, ans=0.125 +2024-08-31 16:41:21,973 INFO [train.py:1114] (2/4) Epoch 19, batch 350, loss[loss=0.1779, simple_loss=0.2474, pruned_loss=0.03942, ctc_loss=0.07405, over 19796.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2728, pruned_loss=0.05023, ctc_loss=0.09471, over 3190250.24 frames. ], batch size: 48, lr: 7.96e-03, grad_scale: 32.0 +2024-08-31 16:41:30,312 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.408e+02 1.653e+02 1.904e+02 2.349e+02 4.016e+02, threshold=3.809e+02, percent-clipped=0.0 +2024-08-31 16:41:35,458 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.11 vs. limit=15.0 +2024-08-31 16:41:46,551 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=15.08 vs. limit=15.0 +2024-08-31 16:42:06,158 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.94 vs. limit=15.0 +2024-08-31 16:42:25,379 INFO [train.py:1114] (2/4) Epoch 19, batch 400, loss[loss=0.2056, simple_loss=0.2768, pruned_loss=0.04867, ctc_loss=0.09252, over 19481.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2719, pruned_loss=0.04954, ctc_loss=0.09334, over 3342450.82 frames. ], batch size: 54, lr: 7.95e-03, grad_scale: 32.0 +2024-08-31 16:42:37,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=241088.0, ans=0.2 +2024-08-31 16:43:12,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=241248.0, ans=0.2 +2024-08-31 16:43:15,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=241248.0, ans=0.04949747468305833 +2024-08-31 16:43:18,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=241248.0, ans=0.0 +2024-08-31 16:43:27,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=241301.33333333334, ans=0.2 +2024-08-31 16:43:34,394 INFO [train.py:1114] (2/4) Epoch 19, batch 450, loss[loss=0.2163, simple_loss=0.2834, pruned_loss=0.05428, ctc_loss=0.1016, over 19612.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2721, pruned_loss=0.04976, ctc_loss=0.09369, over 3450474.67 frames. ], batch size: 55, lr: 7.95e-03, grad_scale: 32.0 +2024-08-31 16:43:42,745 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.686e+02 1.896e+02 2.370e+02 4.152e+02, threshold=3.792e+02, percent-clipped=1.0 +2024-08-31 16:43:44,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=241354.66666666666, ans=0.2 +2024-08-31 16:44:01,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=241461.33333333334, ans=0.125 +2024-08-31 16:44:05,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=241461.33333333334, ans=0.2 +2024-08-31 16:44:11,441 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.47 vs. limit=22.5 +2024-08-31 16:44:13,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=241514.66666666666, ans=0.0 +2024-08-31 16:44:14,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=241514.66666666666, ans=0.125 +2024-08-31 16:44:35,447 INFO [train.py:1114] (2/4) Epoch 19, batch 500, loss[loss=0.2435, simple_loss=0.3011, pruned_loss=0.06839, ctc_loss=0.1226, over 19693.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2712, pruned_loss=0.04935, ctc_loss=0.09294, over 3545980.66 frames. ], batch size: 63, lr: 7.95e-03, grad_scale: 32.0 +2024-08-31 16:44:35,655 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:44:37,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=241621.33333333334, ans=0.035 +2024-08-31 16:44:51,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=241674.66666666666, ans=0.125 +2024-08-31 16:45:06,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241728.0, ans=0.1 +2024-08-31 16:45:12,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=241781.33333333334, ans=0.0 +2024-08-31 16:45:28,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-31 16:45:31,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-31 16:45:55,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=241888.0, ans=0.125 +2024-08-31 16:46:04,262 INFO [train.py:1114] (2/4) Epoch 19, batch 550, loss[loss=0.2042, simple_loss=0.2769, pruned_loss=0.04814, ctc_loss=0.08822, over 19316.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2708, pruned_loss=0.04922, ctc_loss=0.09254, over 3608060.51 frames. ], batch size: 71, lr: 7.94e-03, grad_scale: 32.0 +2024-08-31 16:46:12,730 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.697e+02 1.983e+02 2.191e+02 3.507e+02, threshold=3.966e+02, percent-clipped=0.0 +2024-08-31 16:46:25,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=241888.0, ans=0.125 +2024-08-31 16:46:27,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=241941.33333333334, ans=0.07 +2024-08-31 16:46:35,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=241941.33333333334, ans=0.2 +2024-08-31 16:46:40,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=241994.66666666666, ans=0.125 +2024-08-31 16:46:58,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=242048.0, ans=0.2 +2024-08-31 16:47:05,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=242101.33333333334, ans=0.2 +2024-08-31 16:47:05,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=242101.33333333334, ans=0.0 +2024-08-31 16:47:10,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=242101.33333333334, ans=0.0 +2024-08-31 16:47:16,276 INFO [train.py:1114] (2/4) Epoch 19, batch 600, loss[loss=0.2074, simple_loss=0.2757, pruned_loss=0.05032, ctc_loss=0.0962, over 19426.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2707, pruned_loss=0.04899, ctc_loss=0.09211, over 3664988.97 frames. ], batch size: 67, lr: 7.94e-03, grad_scale: 32.0 +2024-08-31 16:47:22,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=242154.66666666666, ans=0.125 +2024-08-31 16:47:26,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=242154.66666666666, ans=0.0 +2024-08-31 16:47:30,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=242154.66666666666, ans=0.07 +2024-08-31 16:47:37,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=242208.0, ans=0.0 +2024-08-31 16:47:46,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=242208.0, ans=0.125 +2024-08-31 16:47:54,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=242261.33333333334, ans=0.0 +2024-08-31 16:48:21,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=242314.66666666666, ans=0.0 +2024-08-31 16:48:22,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242314.66666666666, ans=0.1 +2024-08-31 16:48:39,676 INFO [train.py:1114] (2/4) Epoch 19, batch 650, loss[loss=0.1988, simple_loss=0.2691, pruned_loss=0.04677, ctc_loss=0.08712, over 19774.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2705, pruned_loss=0.04901, ctc_loss=0.09245, over 3715999.86 frames. ], batch size: 54, lr: 7.93e-03, grad_scale: 32.0 +2024-08-31 16:48:42,378 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:48:48,388 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 1.784e+02 2.044e+02 2.793e+02 4.792e+02, threshold=4.088e+02, percent-clipped=6.0 +2024-08-31 16:49:03,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=242474.66666666666, ans=0.125 +2024-08-31 16:49:08,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=242528.0, ans=0.2 +2024-08-31 16:49:20,492 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.48 vs. limit=15.0 +2024-08-31 16:49:29,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=242581.33333333334, ans=0.0 +2024-08-31 16:49:40,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=242634.66666666666, ans=0.2 +2024-08-31 16:50:02,079 INFO [train.py:1114] (2/4) Epoch 19, batch 700, loss[loss=0.1932, simple_loss=0.265, pruned_loss=0.04262, ctc_loss=0.09048, over 19734.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2711, pruned_loss=0.0491, ctc_loss=0.09244, over 3749459.92 frames. ], batch size: 51, lr: 7.93e-03, grad_scale: 32.0 +2024-08-31 16:50:14,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=242688.0, ans=0.125 +2024-08-31 16:50:22,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=242741.33333333334, ans=0.09899494936611666 +2024-08-31 16:50:30,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=242794.66666666666, ans=0.1 +2024-08-31 16:50:43,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=242848.0, ans=0.95 +2024-08-31 16:50:51,495 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.76 vs. limit=10.0 +2024-08-31 16:51:27,355 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.73 vs. limit=15.0 +2024-08-31 16:52:16,516 INFO [train.py:1114] (2/4) Epoch 19, batch 750, loss[loss=0.2027, simple_loss=0.2674, pruned_loss=0.05036, ctc_loss=0.09327, over 19506.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2705, pruned_loss=0.04888, ctc_loss=0.0919, over 3774607.39 frames. ], batch size: 54, lr: 7.92e-03, grad_scale: 32.0 +2024-08-31 16:52:38,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=242954.66666666666, ans=0.0 +2024-08-31 16:52:40,602 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.383e+02 1.707e+02 2.012e+02 2.576e+02 4.596e+02, threshold=4.024e+02, percent-clipped=2.0 +2024-08-31 16:52:42,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=242954.66666666666, ans=0.0 +2024-08-31 16:53:17,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=243114.66666666666, ans=0.0 +2024-08-31 16:53:23,351 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.89 vs. limit=15.0 +2024-08-31 16:53:27,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=243168.0, ans=0.125 +2024-08-31 16:53:40,986 INFO [train.py:1114] (2/4) Epoch 19, batch 800, loss[loss=0.1848, simple_loss=0.2485, pruned_loss=0.04444, ctc_loss=0.08061, over 19799.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2698, pruned_loss=0.04858, ctc_loss=0.09133, over 3796075.64 frames. ], batch size: 49, lr: 7.92e-03, grad_scale: 32.0 +2024-08-31 16:53:43,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.31 vs. limit=10.0 +2024-08-31 16:54:07,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=243328.0, ans=0.0 +2024-08-31 16:54:18,805 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.79 vs. limit=15.0 +2024-08-31 16:54:40,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=243434.66666666666, ans=0.125 +2024-08-31 16:54:52,038 INFO [train.py:1114] (2/4) Epoch 19, batch 850, loss[loss=0.2007, simple_loss=0.2828, pruned_loss=0.04236, ctc_loss=0.08497, over 19632.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2701, pruned_loss=0.04879, ctc_loss=0.09187, over 3814477.48 frames. ], batch size: 59, lr: 7.92e-03, grad_scale: 32.0 +2024-08-31 16:55:00,087 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.677e+02 1.837e+02 2.316e+02 3.927e+02, threshold=3.675e+02, percent-clipped=0.0 +2024-08-31 16:55:32,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=243648.0, ans=0.125 +2024-08-31 16:55:40,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=243648.0, ans=0.0 +2024-08-31 16:55:41,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=243648.0, ans=0.0 +2024-08-31 16:55:45,623 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:55:52,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=243701.33333333334, ans=0.125 +2024-08-31 16:55:52,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243701.33333333334, ans=0.1 +2024-08-31 16:55:55,939 INFO [train.py:1114] (2/4) Epoch 19, batch 900, loss[loss=0.1734, simple_loss=0.2408, pruned_loss=0.03883, ctc_loss=0.07063, over 19400.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2707, pruned_loss=0.04934, ctc_loss=0.09293, over 3817687.94 frames. ], batch size: 48, lr: 7.91e-03, grad_scale: 32.0 +2024-08-31 16:56:06,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=243808.0, ans=0.2 +2024-08-31 16:56:08,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=243808.0, ans=0.0 +2024-08-31 16:56:10,870 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.36 vs. limit=15.0 +2024-08-31 16:56:12,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243808.0, ans=0.1 +2024-08-31 16:56:14,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=243808.0, ans=0.2 +2024-08-31 16:56:19,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=243861.33333333334, ans=0.0 +2024-08-31 16:56:53,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=243968.0, ans=0.125 +2024-08-31 16:57:05,831 INFO [train.py:1114] (2/4) Epoch 19, batch 950, loss[loss=0.1791, simple_loss=0.2468, pruned_loss=0.03994, ctc_loss=0.07906, over 19503.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2711, pruned_loss=0.04957, ctc_loss=0.0933, over 3817453.70 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-31 16:57:10,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=244021.33333333334, ans=0.0 +2024-08-31 16:57:14,292 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.368e+02 1.751e+02 2.034e+02 2.400e+02 3.857e+02, threshold=4.067e+02, percent-clipped=1.0 +2024-08-31 16:57:14,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=244021.33333333334, ans=0.0 +2024-08-31 16:57:18,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=244074.66666666666, ans=0.125 +2024-08-31 16:57:48,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=244181.33333333334, ans=0.07 +2024-08-31 16:57:51,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=244181.33333333334, ans=0.2 +2024-08-31 16:57:52,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=244234.66666666666, ans=0.95 +2024-08-31 16:57:59,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=244234.66666666666, ans=0.125 +2024-08-31 16:58:06,281 INFO [train.py:1114] (2/4) Epoch 19, batch 1000, loss[loss=0.1944, simple_loss=0.2605, pruned_loss=0.04676, ctc_loss=0.08708, over 19847.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2721, pruned_loss=0.05018, ctc_loss=0.09451, over 3813881.18 frames. ], batch size: 52, lr: 7.90e-03, grad_scale: 32.0 +2024-08-31 16:58:30,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=244288.0, ans=0.125 +2024-08-31 16:58:52,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=244341.33333333334, ans=0.0 +2024-08-31 16:59:43,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=244448.0, ans=0.125 +2024-08-31 16:59:50,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=244448.0, ans=0.025 +2024-08-31 17:00:09,436 INFO [train.py:1114] (2/4) Epoch 19, batch 1050, loss[loss=0.1893, simple_loss=0.2656, pruned_loss=0.04027, ctc_loss=0.08089, over 19837.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2713, pruned_loss=0.04961, ctc_loss=0.09347, over 3821505.79 frames. ], batch size: 57, lr: 7.90e-03, grad_scale: 32.0 +2024-08-31 17:00:09,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=244554.66666666666, ans=0.0 +2024-08-31 17:00:15,863 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.97 vs. limit=15.0 +2024-08-31 17:00:17,655 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.651e+02 1.935e+02 2.361e+02 3.363e+02, threshold=3.870e+02, percent-clipped=0.0 +2024-08-31 17:00:23,871 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.24 vs. limit=6.0 +2024-08-31 17:00:27,637 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.12 vs. limit=15.0 +2024-08-31 17:01:12,074 INFO [train.py:1114] (2/4) Epoch 19, batch 1100, loss[loss=0.207, simple_loss=0.2733, pruned_loss=0.05189, ctc_loss=0.09218, over 19574.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2711, pruned_loss=0.0492, ctc_loss=0.09283, over 3828692.34 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-31 17:01:46,022 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.45 vs. limit=22.5 +2024-08-31 17:02:31,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=245034.66666666666, ans=0.07 +2024-08-31 17:02:40,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=245034.66666666666, ans=0.0 +2024-08-31 17:02:43,470 INFO [train.py:1114] (2/4) Epoch 19, batch 1150, loss[loss=0.1951, simple_loss=0.2585, pruned_loss=0.04856, ctc_loss=0.08652, over 19586.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2713, pruned_loss=0.04932, ctc_loss=0.09286, over 3826076.37 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-31 17:02:44,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=245088.0, ans=0.0 +2024-08-31 17:03:11,404 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.331e+02 1.693e+02 1.899e+02 2.295e+02 3.327e+02, threshold=3.798e+02, percent-clipped=0.0 +2024-08-31 17:03:14,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=245141.33333333334, ans=0.125 +2024-08-31 17:03:36,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245194.66666666666, ans=0.1 +2024-08-31 17:03:56,462 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:04:04,712 INFO [train.py:1114] (2/4) Epoch 19, batch 1200, loss[loss=0.224, simple_loss=0.2874, pruned_loss=0.05756, ctc_loss=0.1139, over 19843.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2722, pruned_loss=0.04967, ctc_loss=0.09346, over 3821922.92 frames. ], batch size: 57, lr: 7.89e-03, grad_scale: 32.0 +2024-08-31 17:04:20,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=245408.0, ans=0.125 +2024-08-31 17:04:23,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=245408.0, ans=0.125 +2024-08-31 17:04:29,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=245408.0, ans=0.1 +2024-08-31 17:04:31,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.52 vs. limit=12.0 +2024-08-31 17:04:35,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=245461.33333333334, ans=0.125 +2024-08-31 17:05:08,583 INFO [train.py:1114] (2/4) Epoch 19, batch 1250, loss[loss=0.2319, simple_loss=0.2911, pruned_loss=0.0627, ctc_loss=0.1184, over 19548.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2734, pruned_loss=0.05026, ctc_loss=0.09439, over 3840961.06 frames. ], batch size: 61, lr: 7.88e-03, grad_scale: 32.0 +2024-08-31 17:05:16,751 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.647e+02 1.911e+02 2.205e+02 3.499e+02, threshold=3.822e+02, percent-clipped=0.0 +2024-08-31 17:05:18,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=245621.33333333334, ans=0.0 +2024-08-31 17:05:24,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=245674.66666666666, ans=0.125 +2024-08-31 17:06:04,934 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.20 vs. limit=15.0 +2024-08-31 17:06:19,724 INFO [train.py:1114] (2/4) Epoch 19, batch 1300, loss[loss=0.2118, simple_loss=0.279, pruned_loss=0.05196, ctc_loss=0.1015, over 18921.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2721, pruned_loss=0.04953, ctc_loss=0.09306, over 3844775.59 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 32.0 +2024-08-31 17:06:29,465 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.42 vs. limit=22.5 +2024-08-31 17:07:01,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=246048.0, ans=0.125 +2024-08-31 17:07:10,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=246101.33333333334, ans=0.125 +2024-08-31 17:07:12,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=246101.33333333334, ans=0.125 +2024-08-31 17:07:25,631 INFO [train.py:1114] (2/4) Epoch 19, batch 1350, loss[loss=0.1903, simple_loss=0.2634, pruned_loss=0.04161, ctc_loss=0.0848, over 19770.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2713, pruned_loss=0.04908, ctc_loss=0.09232, over 3855306.68 frames. ], batch size: 54, lr: 7.87e-03, grad_scale: 64.0 +2024-08-31 17:07:28,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=246154.66666666666, ans=0.125 +2024-08-31 17:07:39,280 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.398e+02 1.765e+02 2.070e+02 2.720e+02 4.418e+02, threshold=4.141e+02, percent-clipped=1.0 +2024-08-31 17:07:39,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=246154.66666666666, ans=0.0 +2024-08-31 17:07:47,591 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.22 vs. limit=15.0 +2024-08-31 17:07:48,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=246208.0, ans=0.0 +2024-08-31 17:07:53,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=246208.0, ans=0.125 +2024-08-31 17:07:57,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=246261.33333333334, ans=0.0 +2024-08-31 17:08:06,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=246314.66666666666, ans=0.07 +2024-08-31 17:08:18,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=246314.66666666666, ans=0.125 +2024-08-31 17:08:35,887 INFO [train.py:1114] (2/4) Epoch 19, batch 1400, loss[loss=0.1738, simple_loss=0.2423, pruned_loss=0.03858, ctc_loss=0.07047, over 19671.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2708, pruned_loss=0.04893, ctc_loss=0.09195, over 3863274.26 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 64.0 +2024-08-31 17:08:46,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246474.66666666666, ans=0.1 +2024-08-31 17:09:02,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=246474.66666666666, ans=0.125 +2024-08-31 17:09:13,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=246528.0, ans=0.125 +2024-08-31 17:09:24,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=246581.33333333334, ans=0.125 +2024-08-31 17:09:25,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=246581.33333333334, ans=0.125 +2024-08-31 17:09:27,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=246581.33333333334, ans=0.5 +2024-08-31 17:09:53,658 INFO [train.py:1114] (2/4) Epoch 19, batch 1450, loss[loss=0.2193, simple_loss=0.2862, pruned_loss=0.05525, ctc_loss=0.1049, over 19636.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2719, pruned_loss=0.04963, ctc_loss=0.09341, over 3860184.88 frames. ], batch size: 63, lr: 7.87e-03, grad_scale: 64.0 +2024-08-31 17:10:02,066 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.290e+02 1.691e+02 1.919e+02 2.362e+02 3.353e+02, threshold=3.838e+02, percent-clipped=0.0 +2024-08-31 17:10:07,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=246741.33333333334, ans=0.05 +2024-08-31 17:11:23,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246741.33333333334, ans=0.1 +2024-08-31 17:11:25,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246794.66666666666, ans=0.1 +2024-08-31 17:11:50,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=246848.0, ans=0.2 +2024-08-31 17:11:53,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=246901.33333333334, ans=0.1 +2024-08-31 17:12:07,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.71 vs. limit=15.0 +2024-08-31 17:12:07,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=246901.33333333334, ans=0.0 +2024-08-31 17:12:12,393 INFO [train.py:1114] (2/4) Epoch 19, batch 1500, loss[loss=0.1912, simple_loss=0.2729, pruned_loss=0.03948, ctc_loss=0.07671, over 19590.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2723, pruned_loss=0.0496, ctc_loss=0.0934, over 3861306.88 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 64.0 +2024-08-31 17:12:27,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=247008.0, ans=10.0 +2024-08-31 17:13:58,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=247114.66666666666, ans=0.0 +2024-08-31 17:14:38,401 INFO [train.py:1114] (2/4) Epoch 19, batch 1550, loss[loss=0.2226, simple_loss=0.2863, pruned_loss=0.05834, ctc_loss=0.1055, over 19613.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2723, pruned_loss=0.04988, ctc_loss=0.09389, over 3845734.36 frames. ], batch size: 60, lr: 7.86e-03, grad_scale: 64.0 +2024-08-31 17:14:42,804 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=9.77 vs. limit=15.0 +2024-08-31 17:14:46,788 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.654e+02 1.883e+02 2.328e+02 3.879e+02, threshold=3.765e+02, percent-clipped=1.0 +2024-08-31 17:15:14,291 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.06 vs. limit=15.0 +2024-08-31 17:15:15,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=247274.66666666666, ans=10.0 +2024-08-31 17:16:14,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=247381.33333333334, ans=0.2 +2024-08-31 17:16:29,403 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.64 vs. limit=15.0 +2024-08-31 17:16:40,633 INFO [train.py:1114] (2/4) Epoch 19, batch 1600, loss[loss=0.2214, simple_loss=0.2933, pruned_loss=0.0538, ctc_loss=0.1048, over 19825.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.272, pruned_loss=0.04952, ctc_loss=0.09334, over 3835103.23 frames. ], batch size: 57, lr: 7.85e-03, grad_scale: 64.0 +2024-08-31 17:16:48,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=247488.0, ans=0.125 +2024-08-31 17:16:54,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=247541.33333333334, ans=0.125 +2024-08-31 17:17:19,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=247648.0, ans=0.125 +2024-08-31 17:17:39,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=247701.33333333334, ans=0.0 +2024-08-31 17:17:42,004 INFO [train.py:1114] (2/4) Epoch 19, batch 1650, loss[loss=0.2071, simple_loss=0.2786, pruned_loss=0.04891, ctc_loss=0.0948, over 19643.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2716, pruned_loss=0.04935, ctc_loss=0.09297, over 3830846.76 frames. ], batch size: 59, lr: 7.85e-03, grad_scale: 64.0 +2024-08-31 17:17:50,571 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.367e+02 1.753e+02 1.927e+02 2.360e+02 4.500e+02, threshold=3.853e+02, percent-clipped=4.0 +2024-08-31 17:17:52,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=247754.66666666666, ans=0.0 +2024-08-31 17:18:00,443 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.60 vs. limit=22.5 +2024-08-31 17:18:01,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=247808.0, ans=0.0 +2024-08-31 17:18:44,908 INFO [train.py:1114] (2/4) Epoch 19, batch 1700, loss[loss=0.2146, simple_loss=0.2654, pruned_loss=0.06034, ctc_loss=0.1081, over 19654.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2717, pruned_loss=0.0496, ctc_loss=0.09336, over 3846161.58 frames. ], batch size: 46, lr: 7.84e-03, grad_scale: 64.0 +2024-08-31 17:18:49,054 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.58 vs. limit=15.0 +2024-08-31 17:18:56,848 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.35 vs. limit=15.0 +2024-08-31 17:19:05,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=248074.66666666666, ans=0.125 +2024-08-31 17:19:25,991 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.54 vs. limit=15.0 +2024-08-31 17:19:33,468 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.69 vs. limit=15.0 +2024-08-31 17:19:52,916 INFO [train.py:1114] (2/4) Epoch 19, batch 1750, loss[loss=0.1998, simple_loss=0.2589, pruned_loss=0.05042, ctc_loss=0.09954, over 19643.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2711, pruned_loss=0.04927, ctc_loss=0.09278, over 3852816.79 frames. ], batch size: 45, lr: 7.84e-03, grad_scale: 32.0 +2024-08-31 17:19:57,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248288.0, ans=0.1 +2024-08-31 17:20:02,157 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.715e+02 1.941e+02 2.441e+02 4.524e+02, threshold=3.882e+02, percent-clipped=3.0 +2024-08-31 17:20:03,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=248341.33333333334, ans=0.2 +2024-08-31 17:20:04,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=248341.33333333334, ans=0.2 +2024-08-31 17:20:07,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=248341.33333333334, ans=0.125 +2024-08-31 17:20:15,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=248394.66666666666, ans=0.125 +2024-08-31 17:20:40,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=248501.33333333334, ans=0.07 +2024-08-31 17:20:47,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.62 vs. limit=6.0 +2024-08-31 17:20:47,905 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.40 vs. limit=15.0 +2024-08-31 17:20:49,896 INFO [train.py:1114] (2/4) Epoch 19, batch 1800, loss[loss=0.2257, simple_loss=0.2927, pruned_loss=0.05794, ctc_loss=0.1072, over 19631.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2711, pruned_loss=0.04934, ctc_loss=0.09287, over 3852949.68 frames. ], batch size: 55, lr: 7.84e-03, grad_scale: 32.0 +2024-08-31 17:20:54,122 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.22 vs. limit=22.5 +2024-08-31 17:20:58,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=248554.66666666666, ans=0.125 +2024-08-31 17:21:02,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=248608.0, ans=0.0 +2024-08-31 17:21:05,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=248608.0, ans=0.0 +2024-08-31 17:21:07,545 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.88 vs. limit=6.0 +2024-08-31 17:21:10,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=248608.0, ans=0.125 +2024-08-31 17:21:16,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=248661.33333333334, ans=0.125 +2024-08-31 17:21:30,086 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.39 vs. limit=15.0 +2024-08-31 17:21:47,154 INFO [train.py:1114] (2/4) Epoch 19, batch 1850, loss[loss=0.2023, simple_loss=0.2777, pruned_loss=0.04666, ctc_loss=0.08398, over 19557.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2712, pruned_loss=0.04937, ctc_loss=0.09307, over 3855688.77 frames. ], batch size: 57, lr: 7.83e-03, grad_scale: 32.0 +2024-08-31 17:21:56,052 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 1.825e+02 2.203e+02 3.044e+02 4.782e+02, threshold=4.406e+02, percent-clipped=6.0 +2024-08-31 17:22:08,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=248874.66666666666, ans=0.05 +2024-08-31 17:22:18,190 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.12 vs. limit=15.0 +2024-08-31 17:22:28,378 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.32 vs. limit=15.0 +2024-08-31 17:22:37,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=248981.33333333334, ans=0.2 +2024-08-31 17:22:52,468 INFO [train.py:1114] (2/4) Epoch 19, batch 1900, loss[loss=0.2002, simple_loss=0.2751, pruned_loss=0.04529, ctc_loss=0.0869, over 19664.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2716, pruned_loss=0.04935, ctc_loss=0.09284, over 3860408.64 frames. ], batch size: 59, lr: 7.83e-03, grad_scale: 32.0 +2024-08-31 17:23:19,556 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.87 vs. limit=22.5 +2024-08-31 17:23:20,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=249194.66666666666, ans=0.0 +2024-08-31 17:23:44,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=249301.33333333334, ans=0.0 +2024-08-31 17:23:48,204 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.84 vs. limit=15.0 +2024-08-31 17:23:48,979 INFO [train.py:1114] (2/4) Epoch 19, batch 1950, loss[loss=0.188, simple_loss=0.2566, pruned_loss=0.04346, ctc_loss=0.08116, over 19576.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2725, pruned_loss=0.04944, ctc_loss=0.09308, over 3869360.89 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 32.0 +2024-08-31 17:23:55,509 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.59 vs. limit=22.5 +2024-08-31 17:23:56,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=249354.66666666666, ans=0.125 +2024-08-31 17:23:58,758 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.354e+02 1.608e+02 1.802e+02 2.157e+02 4.545e+02, threshold=3.604e+02, percent-clipped=1.0 +2024-08-31 17:24:00,242 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:24:00,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=249408.0, ans=0.125 +2024-08-31 17:24:06,526 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.91 vs. limit=15.0 +2024-08-31 17:24:25,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=249514.66666666666, ans=0.0 +2024-08-31 17:24:32,282 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.83 vs. limit=22.5 +2024-08-31 17:24:48,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=249568.0, ans=0.0 +2024-08-31 17:24:50,861 INFO [train.py:1114] (2/4) Epoch 19, batch 2000, loss[loss=0.1783, simple_loss=0.2398, pruned_loss=0.04225, ctc_loss=0.08084, over 19698.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2736, pruned_loss=0.05028, ctc_loss=0.09447, over 3854395.22 frames. ], batch size: 45, lr: 7.82e-03, grad_scale: 32.0 +2024-08-31 17:24:50,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=249621.33333333334, ans=0.125 +2024-08-31 17:25:21,304 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.78 vs. limit=22.5 +2024-08-31 17:25:43,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=249834.66666666666, ans=0.2 +2024-08-31 17:25:47,794 INFO [train.py:1114] (2/4) Epoch 19, batch 2050, loss[loss=0.1946, simple_loss=0.2478, pruned_loss=0.05164, ctc_loss=0.09563, over 19725.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2727, pruned_loss=0.05016, ctc_loss=0.09432, over 3851573.45 frames. ], batch size: 47, lr: 7.82e-03, grad_scale: 32.0 +2024-08-31 17:25:48,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=249888.0, ans=0.2 +2024-08-31 17:25:57,143 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.442e+02 1.719e+02 2.018e+02 2.402e+02 3.677e+02, threshold=4.037e+02, percent-clipped=1.0 +2024-08-31 17:26:00,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=249941.33333333334, ans=0.0 +2024-08-31 17:26:06,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=249941.33333333334, ans=0.125 +2024-08-31 17:26:44,693 INFO [train.py:1114] (2/4) Epoch 19, batch 2100, loss[loss=0.1956, simple_loss=0.2726, pruned_loss=0.04244, ctc_loss=0.08413, over 19766.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2716, pruned_loss=0.04951, ctc_loss=0.0931, over 3858392.60 frames. ], batch size: 54, lr: 7.81e-03, grad_scale: 32.0 +2024-08-31 17:26:46,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=250154.66666666666, ans=0.0 +2024-08-31 17:26:54,369 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=6.59 vs. limit=15.0 +2024-08-31 17:27:02,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=250208.0, ans=0.0 +2024-08-31 17:27:29,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=250368.0, ans=0.125 +2024-08-31 17:27:32,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=250368.0, ans=0.0 +2024-08-31 17:27:37,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=250368.0, ans=0.125 +2024-08-31 17:27:42,518 INFO [train.py:1114] (2/4) Epoch 19, batch 2150, loss[loss=0.1887, simple_loss=0.2574, pruned_loss=0.04329, ctc_loss=0.08363, over 19837.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2712, pruned_loss=0.04917, ctc_loss=0.09243, over 3869130.50 frames. ], batch size: 52, lr: 7.81e-03, grad_scale: 32.0 +2024-08-31 17:27:42,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250421.33333333334, ans=0.1 +2024-08-31 17:27:43,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=250421.33333333334, ans=0.125 +2024-08-31 17:27:51,503 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.322e+02 1.672e+02 1.975e+02 2.523e+02 4.782e+02, threshold=3.951e+02, percent-clipped=2.0 +2024-08-31 17:28:03,504 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:28:05,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=250528.0, ans=0.0 +2024-08-31 17:28:10,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=250528.0, ans=0.0 +2024-08-31 17:28:14,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250528.0, ans=0.1 +2024-08-31 17:28:20,118 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.55 vs. limit=15.0 +2024-08-31 17:28:33,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=250634.66666666666, ans=0.2 +2024-08-31 17:28:39,682 INFO [train.py:1114] (2/4) Epoch 19, batch 2200, loss[loss=0.2261, simple_loss=0.2888, pruned_loss=0.05851, ctc_loss=0.116, over 19582.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2714, pruned_loss=0.04918, ctc_loss=0.09261, over 3867627.29 frames. ], batch size: 57, lr: 7.80e-03, grad_scale: 32.0 +2024-08-31 17:28:59,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.40 vs. limit=10.0 +2024-08-31 17:29:16,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=250848.0, ans=0.125 +2024-08-31 17:29:19,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=250848.0, ans=0.125 +2024-08-31 17:29:20,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=250848.0, ans=0.0 +2024-08-31 17:29:38,740 INFO [train.py:1114] (2/4) Epoch 19, batch 2250, loss[loss=0.1834, simple_loss=0.2628, pruned_loss=0.03846, ctc_loss=0.06771, over 19610.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2716, pruned_loss=0.04934, ctc_loss=0.09308, over 3867632.10 frames. ], batch size: 55, lr: 7.80e-03, grad_scale: 32.0 +2024-08-31 17:29:45,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=250954.66666666666, ans=0.125 +2024-08-31 17:29:47,361 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.680e+02 1.896e+02 2.375e+02 5.292e+02, threshold=3.791e+02, percent-clipped=4.0 +2024-08-31 17:29:57,508 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.21 vs. limit=15.0 +2024-08-31 17:30:21,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=251114.66666666666, ans=0.07 +2024-08-31 17:30:22,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=251114.66666666666, ans=0.0 +2024-08-31 17:30:34,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=251168.0, ans=0.125 +2024-08-31 17:30:38,156 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.23 vs. limit=15.0 +2024-08-31 17:30:40,025 INFO [train.py:1114] (2/4) Epoch 19, batch 2300, loss[loss=0.2046, simple_loss=0.264, pruned_loss=0.05219, ctc_loss=0.1021, over 19515.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2707, pruned_loss=0.04921, ctc_loss=0.09276, over 3861449.56 frames. ], batch size: 49, lr: 7.80e-03, grad_scale: 32.0 +2024-08-31 17:30:52,122 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.64 vs. limit=15.0 +2024-08-31 17:31:27,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.60 vs. limit=15.0 +2024-08-31 17:31:34,699 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.76 vs. limit=15.0 +2024-08-31 17:31:36,421 INFO [train.py:1114] (2/4) Epoch 19, batch 2350, loss[loss=0.1846, simple_loss=0.2687, pruned_loss=0.03581, ctc_loss=0.07242, over 19701.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2706, pruned_loss=0.04923, ctc_loss=0.09257, over 3864096.56 frames. ], batch size: 63, lr: 7.79e-03, grad_scale: 32.0 +2024-08-31 17:31:43,451 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.57 vs. limit=15.0 +2024-08-31 17:31:45,233 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.365e+02 1.718e+02 2.013e+02 2.563e+02 3.706e+02, threshold=4.026e+02, percent-clipped=0.0 +2024-08-31 17:31:52,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=251541.33333333334, ans=0.125 +2024-08-31 17:32:12,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=251594.66666666666, ans=0.0 +2024-08-31 17:32:18,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=251648.0, ans=0.125 +2024-08-31 17:32:20,118 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:32:26,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=251701.33333333334, ans=0.025 +2024-08-31 17:32:28,501 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.18 vs. limit=22.5 +2024-08-31 17:32:36,577 INFO [train.py:1114] (2/4) Epoch 19, batch 2400, loss[loss=0.2026, simple_loss=0.2665, pruned_loss=0.04979, ctc_loss=0.0978, over 19333.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2721, pruned_loss=0.04986, ctc_loss=0.09355, over 3857815.16 frames. ], batch size: 71, lr: 7.79e-03, grad_scale: 32.0 +2024-08-31 17:32:37,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=251754.66666666666, ans=0.125 +2024-08-31 17:32:49,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=251808.0, ans=0.0 +2024-08-31 17:32:54,132 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.89 vs. limit=15.0 +2024-08-31 17:33:15,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=251914.66666666666, ans=0.0 +2024-08-31 17:33:18,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=251914.66666666666, ans=0.0 +2024-08-31 17:33:39,841 INFO [train.py:1114] (2/4) Epoch 19, batch 2450, loss[loss=0.2899, simple_loss=0.3219, pruned_loss=0.09196, ctc_loss=0.1851, over 13397.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2761, pruned_loss=0.0525, ctc_loss=0.09895, over 3731230.38 frames. ], batch size: 140, lr: 7.78e-03, grad_scale: 32.0 +2024-08-31 17:33:48,954 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.471e+02 1.610e+02 1.856e+02 2.081e+02 3.075e+02, threshold=3.711e+02, percent-clipped=0.0 +2024-08-31 17:33:51,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=252074.66666666666, ans=0.04949747468305833 +2024-08-31 17:34:00,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=252074.66666666666, ans=0.125 +2024-08-31 17:34:02,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=252128.0, ans=0.2 +2024-08-31 17:34:03,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=252128.0, ans=0.125 +2024-08-31 17:34:09,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=252128.0, ans=0.125 +2024-08-31 17:36:18,554 INFO [train.py:1114] (2/4) Epoch 20, batch 0, loss[loss=0.2266, simple_loss=0.2783, pruned_loss=0.06332, ctc_loss=0.1208, over 19812.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.2783, pruned_loss=0.06332, ctc_loss=0.1208, over 19812.00 frames. ], batch size: 49, lr: 7.58e-03, grad_scale: 32.0 +2024-08-31 17:36:18,554 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-31 17:36:23,601 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.1220, 2.2929, 2.5668, 2.2020], device='cuda:2') +2024-08-31 17:36:28,429 INFO [train.py:1146] (2/4) Epoch 20, validation: loss=0.1834, simple_loss=0.2715, pruned_loss=0.03542, ctc_loss=0.061, over 944034.00 frames. +2024-08-31 17:36:28,429 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 13705MB +2024-08-31 17:36:41,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=252288.0, ans=0.025 +2024-08-31 17:36:47,342 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.24 vs. limit=15.0 +2024-08-31 17:36:50,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=252288.0, ans=0.125 +2024-08-31 17:37:06,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=252394.66666666666, ans=0.025 +2024-08-31 17:37:18,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=252448.0, ans=0.125 +2024-08-31 17:37:27,974 INFO [train.py:1114] (2/4) Epoch 20, batch 50, loss[loss=0.1885, simple_loss=0.2487, pruned_loss=0.04669, ctc_loss=0.08728, over 19724.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2738, pruned_loss=0.05018, ctc_loss=0.09484, over 844974.64 frames. ], batch size: 47, lr: 7.58e-03, grad_scale: 32.0 +2024-08-31 17:37:36,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=252501.33333333334, ans=0.125 +2024-08-31 17:37:37,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=252501.33333333334, ans=0.0 +2024-08-31 17:37:51,148 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.696e+02 1.962e+02 2.261e+02 4.473e+02, threshold=3.923e+02, percent-clipped=2.0 +2024-08-31 17:38:34,794 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.82 vs. limit=15.0 +2024-08-31 17:41:08,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=252714.66666666666, ans=0.125 +2024-08-31 17:41:27,187 INFO [train.py:1114] (2/4) Epoch 20, batch 100, loss[loss=0.1955, simple_loss=0.2628, pruned_loss=0.04602, ctc_loss=0.09063, over 19701.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2748, pruned_loss=0.05027, ctc_loss=0.09532, over 1499556.84 frames. ], batch size: 51, lr: 7.57e-03, grad_scale: 32.0 +2024-08-31 17:41:53,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=252821.33333333334, ans=0.125 +2024-08-31 17:42:08,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=252821.33333333334, ans=0.2 +2024-08-31 17:43:46,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=252928.0, ans=0.1 +2024-08-31 17:43:51,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=252981.33333333334, ans=0.125 +2024-08-31 17:44:05,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=253034.66666666666, ans=0.0 +2024-08-31 17:44:06,482 INFO [train.py:1114] (2/4) Epoch 20, batch 150, loss[loss=0.1987, simple_loss=0.259, pruned_loss=0.05054, ctc_loss=0.0934, over 19725.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2723, pruned_loss=0.04923, ctc_loss=0.0934, over 2027658.68 frames. ], batch size: 47, lr: 7.57e-03, grad_scale: 32.0 +2024-08-31 17:44:11,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=253034.66666666666, ans=0.125 +2024-08-31 17:44:57,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=253088.0, ans=0.125 +2024-08-31 17:44:59,739 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.301e+02 1.634e+02 1.821e+02 2.194e+02 3.683e+02, threshold=3.641e+02, percent-clipped=0.0 +2024-08-31 17:45:25,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=253141.33333333334, ans=0.125 +2024-08-31 17:45:33,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=253194.66666666666, ans=0.125 +2024-08-31 17:45:39,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=253194.66666666666, ans=0.05 +2024-08-31 17:45:39,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=253194.66666666666, ans=0.125 +2024-08-31 17:45:59,902 INFO [train.py:1114] (2/4) Epoch 20, batch 200, loss[loss=0.2369, simple_loss=0.2993, pruned_loss=0.06305, ctc_loss=0.1208, over 18199.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.271, pruned_loss=0.04879, ctc_loss=0.09245, over 2435022.78 frames. ], batch size: 85, lr: 7.56e-03, grad_scale: 32.0 +2024-08-31 17:46:41,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=253408.0, ans=0.125 +2024-08-31 17:46:48,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=253408.0, ans=0.07 +2024-08-31 17:46:48,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=253408.0, ans=10.0 +2024-08-31 17:47:03,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=253461.33333333334, ans=0.2 +2024-08-31 17:47:09,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=253461.33333333334, ans=0.1 +2024-08-31 17:47:11,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=253461.33333333334, ans=0.125 +2024-08-31 17:47:33,348 INFO [train.py:1114] (2/4) Epoch 20, batch 250, loss[loss=0.2386, simple_loss=0.2965, pruned_loss=0.0656, ctc_loss=0.1235, over 19424.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2715, pruned_loss=0.04916, ctc_loss=0.09299, over 2755868.39 frames. ], batch size: 67, lr: 7.56e-03, grad_scale: 32.0 +2024-08-31 17:47:44,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=253568.0, ans=0.125 +2024-08-31 17:47:57,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=253621.33333333334, ans=0.2 +2024-08-31 17:47:59,369 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.745e+02 2.044e+02 2.602e+02 4.259e+02, threshold=4.089e+02, percent-clipped=6.0 +2024-08-31 17:48:00,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=253674.66666666666, ans=0.025 +2024-08-31 17:49:29,736 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.15 vs. limit=22.5 +2024-08-31 17:49:51,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=253781.33333333334, ans=0.125 +2024-08-31 17:49:59,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253834.66666666666, ans=0.1 +2024-08-31 17:50:00,339 INFO [train.py:1114] (2/4) Epoch 20, batch 300, loss[loss=0.2223, simple_loss=0.2862, pruned_loss=0.05767, ctc_loss=0.1075, over 19523.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2706, pruned_loss=0.04832, ctc_loss=0.09118, over 3002011.17 frames. ], batch size: 61, lr: 7.56e-03, grad_scale: 32.0 +2024-08-31 17:50:03,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=253834.66666666666, ans=0.2 +2024-08-31 17:50:12,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=253888.0, ans=0.125 +2024-08-31 17:50:21,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=253888.0, ans=0.125 +2024-08-31 17:50:27,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253941.33333333334, ans=0.1 +2024-08-31 17:51:05,483 INFO [train.py:1114] (2/4) Epoch 20, batch 350, loss[loss=0.2015, simple_loss=0.2577, pruned_loss=0.05141, ctc_loss=0.1061, over 19767.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2706, pruned_loss=0.04841, ctc_loss=0.09131, over 3190861.06 frames. ], batch size: 48, lr: 7.55e-03, grad_scale: 32.0 +2024-08-31 17:51:23,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=254154.66666666666, ans=0.125 +2024-08-31 17:51:26,962 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.703e+02 1.946e+02 2.321e+02 4.034e+02, threshold=3.891e+02, percent-clipped=0.0 +2024-08-31 17:51:27,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=254154.66666666666, ans=0.05 +2024-08-31 17:51:36,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=254208.0, ans=0.125 +2024-08-31 17:51:41,672 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.43 vs. limit=10.0 +2024-08-31 17:51:45,262 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.45 vs. limit=15.0 +2024-08-31 17:51:58,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=254314.66666666666, ans=0.025 +2024-08-31 17:52:04,334 INFO [train.py:1114] (2/4) Epoch 20, batch 400, loss[loss=0.2073, simple_loss=0.284, pruned_loss=0.04875, ctc_loss=0.08288, over 19488.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2702, pruned_loss=0.04823, ctc_loss=0.09073, over 3341925.11 frames. ], batch size: 54, lr: 7.55e-03, grad_scale: 32.0 +2024-08-31 17:52:21,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=254421.33333333334, ans=0.125 +2024-08-31 17:52:23,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=254421.33333333334, ans=0.125 +2024-08-31 17:52:29,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=254421.33333333334, ans=0.0 +2024-08-31 17:52:47,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=254528.0, ans=0.07 +2024-08-31 17:53:01,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=254581.33333333334, ans=0.0 +2024-08-31 17:53:10,616 INFO [train.py:1114] (2/4) Epoch 20, batch 450, loss[loss=0.1961, simple_loss=0.2694, pruned_loss=0.04407, ctc_loss=0.08644, over 19610.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2701, pruned_loss=0.0482, ctc_loss=0.09055, over 3450704.69 frames. ], batch size: 55, lr: 7.55e-03, grad_scale: 32.0 +2024-08-31 17:53:15,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=254634.66666666666, ans=0.2 +2024-08-31 17:53:31,692 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.356e+02 1.627e+02 1.777e+02 2.217e+02 3.582e+02, threshold=3.554e+02, percent-clipped=0.0 +2024-08-31 17:54:15,354 INFO [train.py:1114] (2/4) Epoch 20, batch 500, loss[loss=0.2308, simple_loss=0.298, pruned_loss=0.05965, ctc_loss=0.1106, over 19677.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2689, pruned_loss=0.04761, ctc_loss=0.08954, over 3546812.11 frames. ], batch size: 63, lr: 7.54e-03, grad_scale: 32.0 +2024-08-31 17:54:20,717 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.21 vs. limit=6.0 +2024-08-31 17:55:14,672 INFO [train.py:1114] (2/4) Epoch 20, batch 550, loss[loss=0.213, simple_loss=0.2803, pruned_loss=0.05362, ctc_loss=0.09645, over 19343.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2687, pruned_loss=0.04761, ctc_loss=0.08967, over 3609053.13 frames. ], batch size: 71, lr: 7.54e-03, grad_scale: 32.0 +2024-08-31 17:55:14,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255168.0, ans=0.1 +2024-08-31 17:55:16,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=255168.0, ans=0.0 +2024-08-31 17:55:29,707 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.81 vs. limit=15.0 +2024-08-31 17:55:35,927 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.279e+02 1.640e+02 1.908e+02 2.178e+02 3.229e+02, threshold=3.816e+02, percent-clipped=0.0 +2024-08-31 17:55:49,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=255274.66666666666, ans=0.125 +2024-08-31 17:56:19,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=255381.33333333334, ans=0.025 +2024-08-31 17:56:21,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=255434.66666666666, ans=0.125 +2024-08-31 17:56:22,749 INFO [train.py:1114] (2/4) Epoch 20, batch 600, loss[loss=0.2385, simple_loss=0.3059, pruned_loss=0.06208, ctc_loss=0.1173, over 19390.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2692, pruned_loss=0.04787, ctc_loss=0.09002, over 3665874.64 frames. ], batch size: 67, lr: 7.53e-03, grad_scale: 32.0 +2024-08-31 17:56:36,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=255488.0, ans=0.0 +2024-08-31 17:56:47,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=255541.33333333334, ans=0.0 +2024-08-31 17:56:55,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=255541.33333333334, ans=0.2 +2024-08-31 17:57:22,369 INFO [train.py:1114] (2/4) Epoch 20, batch 650, loss[loss=0.2026, simple_loss=0.2643, pruned_loss=0.05021, ctc_loss=0.1011, over 19779.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2692, pruned_loss=0.04784, ctc_loss=0.0901, over 3715968.33 frames. ], batch size: 54, lr: 7.53e-03, grad_scale: 32.0 +2024-08-31 17:57:44,318 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.759e+02 2.153e+02 2.838e+02 5.166e+02, threshold=4.306e+02, percent-clipped=8.0 +2024-08-31 17:57:46,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=255808.0, ans=0.125 +2024-08-31 17:57:49,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255808.0, ans=0.1 +2024-08-31 17:57:51,956 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.88 vs. limit=6.0 +2024-08-31 17:58:05,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=255861.33333333334, ans=0.2 +2024-08-31 17:58:22,799 INFO [train.py:1114] (2/4) Epoch 20, batch 700, loss[loss=0.1768, simple_loss=0.2504, pruned_loss=0.03804, ctc_loss=0.0676, over 19692.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2697, pruned_loss=0.04816, ctc_loss=0.09053, over 3747565.94 frames. ], batch size: 51, lr: 7.53e-03, grad_scale: 32.0 +2024-08-31 17:58:27,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=255968.0, ans=0.2 +2024-08-31 17:58:39,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=256021.33333333334, ans=0.125 +2024-08-31 17:58:57,857 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.54 vs. limit=6.0 +2024-08-31 17:59:00,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=256128.0, ans=0.0 +2024-08-31 17:59:06,731 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.52 vs. limit=15.0 +2024-08-31 17:59:11,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=256181.33333333334, ans=0.125 +2024-08-31 17:59:17,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=256181.33333333334, ans=0.0 +2024-08-31 17:59:17,477 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.32 vs. limit=15.0 +2024-08-31 17:59:24,908 INFO [train.py:1114] (2/4) Epoch 20, batch 750, loss[loss=0.2088, simple_loss=0.2794, pruned_loss=0.04879, ctc_loss=0.1016, over 19505.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2698, pruned_loss=0.04816, ctc_loss=0.09076, over 3773491.09 frames. ], batch size: 54, lr: 7.52e-03, grad_scale: 32.0 +2024-08-31 17:59:29,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=256234.66666666666, ans=0.125 +2024-08-31 17:59:46,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=256288.0, ans=0.05 +2024-08-31 17:59:58,592 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.267e+02 1.642e+02 1.855e+02 2.095e+02 3.716e+02, threshold=3.709e+02, percent-clipped=0.0 +2024-08-31 18:00:01,392 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.33 vs. limit=15.0 +2024-08-31 18:00:03,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=256341.33333333334, ans=0.0 +2024-08-31 18:00:12,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=256394.66666666666, ans=0.0 +2024-08-31 18:00:15,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=256394.66666666666, ans=0.07 +2024-08-31 18:00:22,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=256394.66666666666, ans=0.2 +2024-08-31 18:00:23,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=256394.66666666666, ans=0.125 +2024-08-31 18:00:42,916 INFO [train.py:1114] (2/4) Epoch 20, batch 800, loss[loss=0.1706, simple_loss=0.2439, pruned_loss=0.03501, ctc_loss=0.06802, over 19396.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2697, pruned_loss=0.04805, ctc_loss=0.09056, over 3795249.99 frames. ], batch size: 48, lr: 7.52e-03, grad_scale: 32.0 +2024-08-31 18:00:43,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=256501.33333333334, ans=0.0 +2024-08-31 18:00:44,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=256501.33333333334, ans=0.1 +2024-08-31 18:00:54,122 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.38 vs. limit=6.0 +2024-08-31 18:00:59,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=256554.66666666666, ans=10.0 +2024-08-31 18:01:28,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=256661.33333333334, ans=0.0 +2024-08-31 18:01:40,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=256714.66666666666, ans=0.5 +2024-08-31 18:01:43,053 INFO [train.py:1114] (2/4) Epoch 20, batch 850, loss[loss=0.1964, simple_loss=0.2782, pruned_loss=0.04071, ctc_loss=0.08291, over 19678.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2696, pruned_loss=0.04817, ctc_loss=0.09069, over 3813468.42 frames. ], batch size: 59, lr: 7.51e-03, grad_scale: 32.0 +2024-08-31 18:01:43,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=256768.0, ans=15.0 +2024-08-31 18:01:53,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=256768.0, ans=0.125 +2024-08-31 18:02:05,172 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.672e+02 2.009e+02 2.661e+02 4.692e+02, threshold=4.019e+02, percent-clipped=5.0 +2024-08-31 18:02:31,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=256981.33333333334, ans=0.125 +2024-08-31 18:02:33,511 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=8.83 vs. limit=22.5 +2024-08-31 18:02:42,849 INFO [train.py:1114] (2/4) Epoch 20, batch 900, loss[loss=0.203, simple_loss=0.2574, pruned_loss=0.05524, ctc_loss=0.09515, over 19823.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2699, pruned_loss=0.04838, ctc_loss=0.09107, over 3818082.07 frames. ], batch size: 49, lr: 7.51e-03, grad_scale: 32.0 +2024-08-31 18:02:43,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=257034.66666666666, ans=0.0 +2024-08-31 18:02:47,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=257034.66666666666, ans=0.2 +2024-08-31 18:02:51,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=257034.66666666666, ans=0.0 +2024-08-31 18:02:51,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=257034.66666666666, ans=0.125 +2024-08-31 18:03:03,081 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.65 vs. limit=15.0 +2024-08-31 18:03:20,624 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:03:23,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=257194.66666666666, ans=0.0 +2024-08-31 18:03:25,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=257194.66666666666, ans=0.2 +2024-08-31 18:03:32,209 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.04 vs. limit=15.0 +2024-08-31 18:03:45,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=257248.0, ans=0.05 +2024-08-31 18:03:49,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=257301.33333333334, ans=0.125 +2024-08-31 18:03:49,915 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.60 vs. limit=15.0 +2024-08-31 18:03:50,697 INFO [train.py:1114] (2/4) Epoch 20, batch 950, loss[loss=0.1765, simple_loss=0.2424, pruned_loss=0.04025, ctc_loss=0.07544, over 19523.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2698, pruned_loss=0.04832, ctc_loss=0.09102, over 3819962.07 frames. ], batch size: 49, lr: 7.51e-03, grad_scale: 32.0 +2024-08-31 18:03:50,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=257301.33333333334, ans=0.035 +2024-08-31 18:03:51,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=257301.33333333334, ans=0.125 +2024-08-31 18:03:53,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=257301.33333333334, ans=0.125 +2024-08-31 18:03:55,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=257301.33333333334, ans=0.125 +2024-08-31 18:04:12,191 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.312e+02 1.674e+02 1.914e+02 2.385e+02 5.476e+02, threshold=3.829e+02, percent-clipped=1.0 +2024-08-31 18:04:48,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=257408.0, ans=0.035 +2024-08-31 18:04:48,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=257408.0, ans=0.125 +2024-08-31 18:05:05,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=257461.33333333334, ans=10.0 +2024-08-31 18:05:12,756 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.89 vs. limit=6.0 +2024-08-31 18:05:18,604 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.98 vs. limit=6.0 +2024-08-31 18:05:25,082 INFO [train.py:1114] (2/4) Epoch 20, batch 1000, loss[loss=0.1882, simple_loss=0.2565, pruned_loss=0.0433, ctc_loss=0.08345, over 19868.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2705, pruned_loss=0.0485, ctc_loss=0.0914, over 3817270.55 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 32.0 +2024-08-31 18:05:27,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=257568.0, ans=0.0 +2024-08-31 18:10:43,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=257728.0, ans=0.0 +2024-08-31 18:11:53,006 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.94 vs. limit=22.5 +2024-08-31 18:12:06,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=257781.33333333334, ans=0.035 +2024-08-31 18:12:10,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=257781.33333333334, ans=0.1 +2024-08-31 18:12:15,980 INFO [train.py:1114] (2/4) Epoch 20, batch 1050, loss[loss=0.2247, simple_loss=0.2908, pruned_loss=0.05773, ctc_loss=0.1078, over 19844.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2698, pruned_loss=0.04824, ctc_loss=0.09094, over 3823601.14 frames. ], batch size: 57, lr: 7.50e-03, grad_scale: 32.0 +2024-08-31 18:12:21,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=257834.66666666666, ans=0.125 +2024-08-31 18:12:37,422 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.683e+02 1.941e+02 2.234e+02 3.103e+02, threshold=3.882e+02, percent-clipped=0.0 +2024-08-31 18:12:38,177 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.97 vs. limit=6.0 +2024-08-31 18:12:46,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=257941.33333333334, ans=0.125 +2024-08-31 18:13:11,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=258048.0, ans=0.2 +2024-08-31 18:13:25,850 INFO [train.py:1114] (2/4) Epoch 20, batch 1100, loss[loss=0.1899, simple_loss=0.2603, pruned_loss=0.04352, ctc_loss=0.0814, over 19588.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2698, pruned_loss=0.04813, ctc_loss=0.09086, over 3830386.52 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 32.0 +2024-08-31 18:13:30,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=258101.33333333334, ans=0.125 +2024-08-31 18:13:45,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258154.66666666666, ans=0.1 +2024-08-31 18:13:59,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=258208.0, ans=0.125 +2024-08-31 18:14:00,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=258208.0, ans=0.125 +2024-08-31 18:14:02,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=258261.33333333334, ans=0.0 +2024-08-31 18:14:13,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.14 vs. limit=15.0 +2024-08-31 18:14:26,128 INFO [train.py:1114] (2/4) Epoch 20, batch 1150, loss[loss=0.2038, simple_loss=0.2729, pruned_loss=0.04946, ctc_loss=0.08953, over 19588.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2701, pruned_loss=0.04854, ctc_loss=0.09172, over 3830406.24 frames. ], batch size: 52, lr: 7.49e-03, grad_scale: 32.0 +2024-08-31 18:14:29,408 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.84 vs. limit=15.0 +2024-08-31 18:15:02,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=258421.33333333334, ans=0.125 +2024-08-31 18:15:02,931 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:15:04,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=258421.33333333334, ans=0.0 +2024-08-31 18:15:04,484 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=21.34 vs. limit=22.5 +2024-08-31 18:15:12,227 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.349e+02 1.657e+02 1.937e+02 2.398e+02 3.976e+02, threshold=3.875e+02, percent-clipped=1.0 +2024-08-31 18:15:12,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=258421.33333333334, ans=0.025 +2024-08-31 18:15:33,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=258528.0, ans=0.125 +2024-08-31 18:15:38,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=258581.33333333334, ans=0.025 +2024-08-31 18:15:51,964 INFO [train.py:1114] (2/4) Epoch 20, batch 1200, loss[loss=0.1929, simple_loss=0.2748, pruned_loss=0.04053, ctc_loss=0.07496, over 19834.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2707, pruned_loss=0.04838, ctc_loss=0.09146, over 3826157.79 frames. ], batch size: 57, lr: 7.49e-03, grad_scale: 32.0 +2024-08-31 18:16:21,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=258741.33333333334, ans=0.0 +2024-08-31 18:16:53,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=258901.33333333334, ans=0.125 +2024-08-31 18:16:54,775 INFO [train.py:1114] (2/4) Epoch 20, batch 1250, loss[loss=0.219, simple_loss=0.2845, pruned_loss=0.05522, ctc_loss=0.1075, over 19528.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2713, pruned_loss=0.04862, ctc_loss=0.09158, over 3844326.38 frames. ], batch size: 61, lr: 7.48e-03, grad_scale: 32.0 +2024-08-31 18:16:58,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=258901.33333333334, ans=0.125 +2024-08-31 18:17:19,400 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.17 vs. limit=10.0 +2024-08-31 18:17:20,837 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.340e+02 1.673e+02 1.864e+02 2.243e+02 4.460e+02, threshold=3.727e+02, percent-clipped=1.0 +2024-08-31 18:17:41,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=259061.33333333334, ans=0.0 +2024-08-31 18:17:42,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=259061.33333333334, ans=0.125 +2024-08-31 18:17:51,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=259061.33333333334, ans=0.125 +2024-08-31 18:19:03,734 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.32 vs. limit=15.0 +2024-08-31 18:19:05,804 INFO [train.py:1114] (2/4) Epoch 20, batch 1300, loss[loss=0.2409, simple_loss=0.306, pruned_loss=0.06445, ctc_loss=0.1173, over 18879.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2702, pruned_loss=0.04811, ctc_loss=0.09049, over 3848500.15 frames. ], batch size: 76, lr: 7.48e-03, grad_scale: 32.0 +2024-08-31 18:19:09,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=259168.0, ans=0.0 +2024-08-31 18:19:49,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=259328.0, ans=0.125 +2024-08-31 18:19:54,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=259328.0, ans=0.125 +2024-08-31 18:20:01,456 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.27 vs. limit=12.0 +2024-08-31 18:20:12,176 INFO [train.py:1114] (2/4) Epoch 20, batch 1350, loss[loss=0.2074, simple_loss=0.2771, pruned_loss=0.04942, ctc_loss=0.09713, over 19750.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2701, pruned_loss=0.04788, ctc_loss=0.09003, over 3859791.35 frames. ], batch size: 54, lr: 7.48e-03, grad_scale: 32.0 +2024-08-31 18:20:15,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=259434.66666666666, ans=0.025 +2024-08-31 18:20:24,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=259488.0, ans=0.0 +2024-08-31 18:20:31,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=259488.0, ans=0.125 +2024-08-31 18:20:38,792 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.436e+02 1.677e+02 1.917e+02 2.382e+02 4.193e+02, threshold=3.834e+02, percent-clipped=5.0 +2024-08-31 18:21:01,744 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.17 vs. limit=15.0 +2024-08-31 18:21:06,766 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.50 vs. limit=22.5 +2024-08-31 18:21:16,778 INFO [train.py:1114] (2/4) Epoch 20, batch 1400, loss[loss=0.1471, simple_loss=0.2213, pruned_loss=0.02578, ctc_loss=0.05327, over 19673.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2699, pruned_loss=0.04767, ctc_loss=0.08976, over 3866412.50 frames. ], batch size: 46, lr: 7.47e-03, grad_scale: 32.0 +2024-08-31 18:21:18,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=259701.33333333334, ans=0.025 +2024-08-31 18:21:38,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=259754.66666666666, ans=0.07 +2024-08-31 18:21:51,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=259808.0, ans=0.1 +2024-08-31 18:22:05,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=259861.33333333334, ans=0.0 +2024-08-31 18:22:06,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=259861.33333333334, ans=0.09899494936611666 +2024-08-31 18:22:07,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=259861.33333333334, ans=0.2 +2024-08-31 18:22:50,432 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.72 vs. limit=15.0 +2024-08-31 18:22:53,584 INFO [train.py:1114] (2/4) Epoch 20, batch 1450, loss[loss=0.2143, simple_loss=0.2876, pruned_loss=0.05071, ctc_loss=0.09878, over 19651.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2706, pruned_loss=0.04804, ctc_loss=0.09051, over 3863949.13 frames. ], batch size: 63, lr: 7.47e-03, grad_scale: 32.0 +2024-08-31 18:23:08,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=260021.33333333334, ans=0.0 +2024-08-31 18:23:09,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=260021.33333333334, ans=0.125 +2024-08-31 18:23:09,941 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.61 vs. limit=15.0 +2024-08-31 18:23:17,227 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.407e+02 1.776e+02 2.029e+02 2.458e+02 5.712e+02, threshold=4.059e+02, percent-clipped=1.0 +2024-08-31 18:23:28,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=260074.66666666666, ans=0.2 +2024-08-31 18:23:36,942 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.65 vs. limit=15.0 +2024-08-31 18:23:39,870 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:23:53,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=260234.66666666666, ans=0.0 +2024-08-31 18:23:54,006 INFO [train.py:1114] (2/4) Epoch 20, batch 1500, loss[loss=0.2014, simple_loss=0.2739, pruned_loss=0.04705, ctc_loss=0.08707, over 19614.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2707, pruned_loss=0.04807, ctc_loss=0.09049, over 3863395.95 frames. ], batch size: 57, lr: 7.47e-03, grad_scale: 32.0 +2024-08-31 18:24:05,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=260288.0, ans=0.0 +2024-08-31 18:24:14,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260288.0, ans=0.1 +2024-08-31 18:24:31,620 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:24:35,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=260394.66666666666, ans=0.0 +2024-08-31 18:25:24,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=260448.0, ans=0.125 +2024-08-31 18:25:34,759 INFO [train.py:1114] (2/4) Epoch 20, batch 1550, loss[loss=0.2261, simple_loss=0.2841, pruned_loss=0.06219, ctc_loss=0.1091, over 19609.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2704, pruned_loss=0.04818, ctc_loss=0.09069, over 3847375.28 frames. ], batch size: 60, lr: 7.46e-03, grad_scale: 32.0 +2024-08-31 18:26:01,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=260501.33333333334, ans=0.125 +2024-08-31 18:26:25,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=260554.66666666666, ans=0.125 +2024-08-31 18:26:31,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260554.66666666666, ans=0.1 +2024-08-31 18:26:33,184 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.367e+02 1.748e+02 2.049e+02 2.466e+02 3.855e+02, threshold=4.097e+02, percent-clipped=0.0 +2024-08-31 18:26:44,825 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:26:51,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=260661.33333333334, ans=0.0 +2024-08-31 18:26:56,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=260661.33333333334, ans=0.125 +2024-08-31 18:26:56,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=260661.33333333334, ans=0.2 +2024-08-31 18:27:11,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=260714.66666666666, ans=0.1 +2024-08-31 18:27:11,679 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.64 vs. limit=22.5 +2024-08-31 18:27:18,521 INFO [train.py:1114] (2/4) Epoch 20, batch 1600, loss[loss=0.1954, simple_loss=0.2666, pruned_loss=0.04476, ctc_loss=0.08689, over 19838.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2697, pruned_loss=0.0477, ctc_loss=0.09006, over 3837212.03 frames. ], batch size: 57, lr: 7.46e-03, grad_scale: 32.0 +2024-08-31 18:27:26,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=260768.0, ans=0.125 +2024-08-31 18:27:45,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=260874.66666666666, ans=0.025 +2024-08-31 18:28:07,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=260981.33333333334, ans=0.125 +2024-08-31 18:28:15,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=260981.33333333334, ans=0.0 +2024-08-31 18:28:28,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=260981.33333333334, ans=0.125 +2024-08-31 18:28:30,410 INFO [train.py:1114] (2/4) Epoch 20, batch 1650, loss[loss=0.2043, simple_loss=0.2785, pruned_loss=0.04714, ctc_loss=0.08965, over 19667.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2694, pruned_loss=0.0478, ctc_loss=0.09017, over 3834348.45 frames. ], batch size: 59, lr: 7.45e-03, grad_scale: 32.0 +2024-08-31 18:28:37,229 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.09 vs. limit=22.5 +2024-08-31 18:28:53,164 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.350e+02 1.719e+02 2.026e+02 2.553e+02 4.958e+02, threshold=4.052e+02, percent-clipped=3.0 +2024-08-31 18:28:54,563 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:29:06,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=261194.66666666666, ans=0.0 +2024-08-31 18:29:07,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=261194.66666666666, ans=0.0 +2024-08-31 18:29:19,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=261248.0, ans=0.07 +2024-08-31 18:29:20,946 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.96 vs. limit=15.0 +2024-08-31 18:29:26,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=261248.0, ans=0.125 +2024-08-31 18:29:29,542 INFO [train.py:1114] (2/4) Epoch 20, batch 1700, loss[loss=0.1842, simple_loss=0.2411, pruned_loss=0.04628, ctc_loss=0.08683, over 19659.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2689, pruned_loss=0.04731, ctc_loss=0.08917, over 3848218.58 frames. ], batch size: 46, lr: 7.45e-03, grad_scale: 32.0 +2024-08-31 18:29:30,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261301.33333333334, ans=0.1 +2024-08-31 18:29:41,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=261354.66666666666, ans=0.125 +2024-08-31 18:29:54,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=261408.0, ans=0.125 +2024-08-31 18:29:58,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=261408.0, ans=0.125 +2024-08-31 18:30:04,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=261408.0, ans=0.2 +2024-08-31 18:30:10,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=261461.33333333334, ans=0.2 +2024-08-31 18:31:08,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=261514.66666666666, ans=0.2 +2024-08-31 18:31:18,019 INFO [train.py:1114] (2/4) Epoch 20, batch 1750, loss[loss=0.1892, simple_loss=0.2508, pruned_loss=0.04694, ctc_loss=0.08414, over 19694.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2688, pruned_loss=0.04723, ctc_loss=0.08893, over 3853221.62 frames. ], batch size: 45, lr: 7.45e-03, grad_scale: 32.0 +2024-08-31 18:31:24,384 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.01 vs. limit=22.5 +2024-08-31 18:31:28,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=261621.33333333334, ans=0.125 +2024-08-31 18:31:32,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261621.33333333334, ans=0.1 +2024-08-31 18:31:40,012 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.679e+02 1.951e+02 2.329e+02 4.159e+02, threshold=3.901e+02, percent-clipped=0.0 +2024-08-31 18:31:40,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=261674.66666666666, ans=0.125 +2024-08-31 18:31:53,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=261728.0, ans=0.0 +2024-08-31 18:31:57,638 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.97 vs. limit=10.0 +2024-08-31 18:32:02,296 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.41 vs. limit=15.0 +2024-08-31 18:32:06,990 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.85 vs. limit=15.0 +2024-08-31 18:32:15,177 INFO [train.py:1114] (2/4) Epoch 20, batch 1800, loss[loss=0.2156, simple_loss=0.2862, pruned_loss=0.05252, ctc_loss=0.09997, over 19607.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2692, pruned_loss=0.04766, ctc_loss=0.08991, over 3854716.77 frames. ], batch size: 55, lr: 7.44e-03, grad_scale: 32.0 +2024-08-31 18:32:36,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=261888.0, ans=0.125 +2024-08-31 18:32:59,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=261941.33333333334, ans=0.07 +2024-08-31 18:33:06,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261941.33333333334, ans=0.1 +2024-08-31 18:33:13,457 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.52 vs. limit=15.0 +2024-08-31 18:33:14,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=261994.66666666666, ans=0.0 +2024-08-31 18:33:31,522 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.43 vs. limit=15.0 +2024-08-31 18:33:34,490 INFO [train.py:1114] (2/4) Epoch 20, batch 1850, loss[loss=0.2079, simple_loss=0.2818, pruned_loss=0.04822, ctc_loss=0.09384, over 19587.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2692, pruned_loss=0.04785, ctc_loss=0.09001, over 3858477.29 frames. ], batch size: 57, lr: 7.44e-03, grad_scale: 32.0 +2024-08-31 18:33:40,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=262101.33333333334, ans=0.2 +2024-08-31 18:33:51,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=262154.6666666667, ans=0.125 +2024-08-31 18:33:56,004 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.842e+02 2.206e+02 3.038e+02 4.306e+02, threshold=4.411e+02, percent-clipped=5.0 +2024-08-31 18:33:57,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262208.0, ans=0.1 +2024-08-31 18:34:04,960 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.64 vs. limit=15.0 +2024-08-31 18:34:07,855 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:34:19,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=262261.3333333333, ans=0.125 +2024-08-31 18:34:36,223 INFO [train.py:1114] (2/4) Epoch 20, batch 1900, loss[loss=0.1825, simple_loss=0.267, pruned_loss=0.03561, ctc_loss=0.06667, over 19647.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2696, pruned_loss=0.04809, ctc_loss=0.09039, over 3863146.02 frames. ], batch size: 59, lr: 7.44e-03, grad_scale: 32.0 +2024-08-31 18:34:41,344 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.15 vs. limit=12.0 +2024-08-31 18:35:07,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=262474.6666666667, ans=0.125 +2024-08-31 18:35:34,445 INFO [train.py:1114] (2/4) Epoch 20, batch 1950, loss[loss=0.1821, simple_loss=0.2528, pruned_loss=0.04053, ctc_loss=0.0758, over 19589.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2701, pruned_loss=0.04785, ctc_loss=0.08996, over 3871529.26 frames. ], batch size: 52, lr: 7.43e-03, grad_scale: 32.0 +2024-08-31 18:35:43,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=262634.6666666667, ans=0.125 +2024-08-31 18:35:47,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=262688.0, ans=0.0 +2024-08-31 18:35:54,906 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.62 vs. limit=15.0 +2024-08-31 18:35:55,625 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.650e+02 1.780e+02 2.101e+02 3.496e+02, threshold=3.560e+02, percent-clipped=0.0 +2024-08-31 18:36:00,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=262741.3333333333, ans=0.0 +2024-08-31 18:36:14,445 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.21 vs. limit=6.0 +2024-08-31 18:36:24,836 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.66 vs. limit=15.0 +2024-08-31 18:36:31,323 INFO [train.py:1114] (2/4) Epoch 20, batch 2000, loss[loss=0.1738, simple_loss=0.2359, pruned_loss=0.04065, ctc_loss=0.07592, over 19617.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2706, pruned_loss=0.04828, ctc_loss=0.09092, over 3855804.95 frames. ], batch size: 45, lr: 7.43e-03, grad_scale: 32.0 +2024-08-31 18:36:31,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=262901.3333333333, ans=0.0 +2024-08-31 18:36:40,189 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.65 vs. limit=6.0 +2024-08-31 18:36:51,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=262954.6666666667, ans=0.0 +2024-08-31 18:36:55,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263008.0, ans=0.1 +2024-08-31 18:37:04,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=263008.0, ans=0.125 +2024-08-31 18:37:06,563 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.26 vs. limit=22.5 +2024-08-31 18:37:32,668 INFO [train.py:1114] (2/4) Epoch 20, batch 2050, loss[loss=0.1788, simple_loss=0.2491, pruned_loss=0.03983, ctc_loss=0.07203, over 19726.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.27, pruned_loss=0.04839, ctc_loss=0.091, over 3851100.32 frames. ], batch size: 47, lr: 7.42e-03, grad_scale: 32.0 +2024-08-31 18:37:37,613 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.88 vs. limit=15.0 +2024-08-31 18:37:51,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=263221.3333333333, ans=0.0 +2024-08-31 18:37:53,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=263221.3333333333, ans=0.125 +2024-08-31 18:38:02,084 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.334e+02 1.724e+02 2.041e+02 2.585e+02 3.821e+02, threshold=4.082e+02, percent-clipped=5.0 +2024-08-31 18:38:32,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=263381.3333333333, ans=0.04949747468305833 +2024-08-31 18:38:36,465 INFO [train.py:1114] (2/4) Epoch 20, batch 2100, loss[loss=0.1988, simple_loss=0.2663, pruned_loss=0.04759, ctc_loss=0.09013, over 19766.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2695, pruned_loss=0.04808, ctc_loss=0.09028, over 3857313.01 frames. ], batch size: 54, lr: 7.42e-03, grad_scale: 32.0 +2024-08-31 18:38:38,090 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.46 vs. limit=15.0 +2024-08-31 18:38:53,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=263488.0, ans=0.125 +2024-08-31 18:39:07,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=263541.3333333333, ans=0.125 +2024-08-31 18:39:32,897 INFO [train.py:1114] (2/4) Epoch 20, batch 2150, loss[loss=0.2043, simple_loss=0.2724, pruned_loss=0.0491, ctc_loss=0.09524, over 19843.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2686, pruned_loss=0.04763, ctc_loss=0.08941, over 3867882.17 frames. ], batch size: 52, lr: 7.42e-03, grad_scale: 32.0 +2024-08-31 18:39:33,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=263701.3333333333, ans=0.125 +2024-08-31 18:39:37,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=263701.3333333333, ans=0.2 +2024-08-31 18:39:58,528 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.627e+02 1.896e+02 2.393e+02 5.058e+02, threshold=3.792e+02, percent-clipped=5.0 +2024-08-31 18:40:03,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=263808.0, ans=0.125 +2024-08-31 18:40:05,606 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.87 vs. limit=10.0 +2024-08-31 18:40:07,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=263808.0, ans=0.125 +2024-08-31 18:40:10,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=263861.3333333333, ans=0.0 +2024-08-31 18:40:33,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=263914.6666666667, ans=0.0 +2024-08-31 18:41:03,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=263914.6666666667, ans=0.0 +2024-08-31 18:41:09,871 INFO [train.py:1114] (2/4) Epoch 20, batch 2200, loss[loss=0.1871, simple_loss=0.2626, pruned_loss=0.04053, ctc_loss=0.07599, over 19596.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2688, pruned_loss=0.0476, ctc_loss=0.08923, over 3866356.65 frames. ], batch size: 57, lr: 7.41e-03, grad_scale: 32.0 +2024-08-31 18:41:45,061 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.16 vs. limit=22.5 +2024-08-31 18:42:03,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=264128.0, ans=0.2 +2024-08-31 18:42:06,209 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:42:17,182 INFO [train.py:1114] (2/4) Epoch 20, batch 2250, loss[loss=0.2022, simple_loss=0.2822, pruned_loss=0.04396, ctc_loss=0.08545, over 19614.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2698, pruned_loss=0.04772, ctc_loss=0.08955, over 3866128.11 frames. ], batch size: 55, lr: 7.41e-03, grad_scale: 32.0 +2024-08-31 18:42:24,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=264234.6666666667, ans=0.0 +2024-08-31 18:42:42,056 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.705e+02 2.149e+02 2.747e+02 5.291e+02, threshold=4.298e+02, percent-clipped=7.0 +2024-08-31 18:42:49,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=264341.3333333333, ans=0.2 +2024-08-31 18:43:02,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=264394.6666666667, ans=0.025 +2024-08-31 18:43:16,659 INFO [train.py:1114] (2/4) Epoch 20, batch 2300, loss[loss=0.1774, simple_loss=0.2547, pruned_loss=0.03553, ctc_loss=0.07252, over 19497.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2694, pruned_loss=0.04793, ctc_loss=0.08987, over 3860959.48 frames. ], batch size: 49, lr: 7.41e-03, grad_scale: 32.0 +2024-08-31 18:43:17,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=264501.3333333333, ans=0.125 +2024-08-31 18:43:21,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=264501.3333333333, ans=0.0 +2024-08-31 18:43:30,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264554.6666666667, ans=0.1 +2024-08-31 18:43:34,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=264554.6666666667, ans=0.0 +2024-08-31 18:43:36,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=264554.6666666667, ans=0.0 +2024-08-31 18:43:39,080 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.18 vs. limit=15.0 +2024-08-31 18:43:41,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264608.0, ans=0.1 +2024-08-31 18:43:42,676 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=264608.0, ans=0.125 +2024-08-31 18:43:56,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=264661.3333333333, ans=0.2 +2024-08-31 18:44:01,457 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.23 vs. limit=10.0 +2024-08-31 18:44:12,816 INFO [train.py:1114] (2/4) Epoch 20, batch 2350, loss[loss=0.2356, simple_loss=0.2934, pruned_loss=0.06348, ctc_loss=0.1269, over 19642.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2694, pruned_loss=0.04783, ctc_loss=0.08962, over 3863166.37 frames. ], batch size: 63, lr: 7.40e-03, grad_scale: 32.0 +2024-08-31 18:44:34,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=264821.3333333333, ans=0.1 +2024-08-31 18:44:47,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=264821.3333333333, ans=0.125 +2024-08-31 18:44:49,422 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.185e+02 1.669e+02 1.905e+02 2.325e+02 3.822e+02, threshold=3.811e+02, percent-clipped=0.0 +2024-08-31 18:45:09,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=264928.0, ans=0.125 +2024-08-31 18:45:26,862 INFO [train.py:1114] (2/4) Epoch 20, batch 2400, loss[loss=0.2183, simple_loss=0.287, pruned_loss=0.05441, ctc_loss=0.1018, over 19315.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2712, pruned_loss=0.04846, ctc_loss=0.09065, over 3857478.20 frames. ], batch size: 71, lr: 7.40e-03, grad_scale: 32.0 +2024-08-31 18:45:32,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=265034.6666666667, ans=0.07 +2024-08-31 18:45:33,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=265034.6666666667, ans=0.125 +2024-08-31 18:45:39,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=265088.0, ans=0.025 +2024-08-31 18:45:44,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=265088.0, ans=0.2 +2024-08-31 18:45:44,398 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.00 vs. limit=15.0 +2024-08-31 18:45:46,361 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:45:50,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265141.3333333333, ans=0.1 +2024-08-31 18:46:08,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=265194.6666666667, ans=0.0 +2024-08-31 18:46:13,106 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.16 vs. limit=6.0 +2024-08-31 18:46:20,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=265248.0, ans=0.0 +2024-08-31 18:46:23,887 INFO [train.py:1114] (2/4) Epoch 20, batch 2450, loss[loss=0.2717, simple_loss=0.3115, pruned_loss=0.08417, ctc_loss=0.159, over 13610.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2747, pruned_loss=0.05094, ctc_loss=0.09589, over 3726544.93 frames. ], batch size: 140, lr: 7.40e-03, grad_scale: 32.0 +2024-08-31 18:46:45,868 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.336e+02 1.663e+02 1.874e+02 2.086e+02 3.013e+02, threshold=3.749e+02, percent-clipped=0.0 +2024-08-31 18:46:48,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=265408.0, ans=0.0 +2024-08-31 18:47:07,520 INFO [train.py:1387] (2/4) Done! diff --git a/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-31-13-15-01-3 b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-31-13-15-01-3 new file mode 100644 index 0000000000000000000000000000000000000000..3c42374eb5d19d13f4612c0a06cae77ec5ba7e60 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/log/log-train-2024-08-31-13-15-01-3 @@ -0,0 +1,1045 @@ +2024-08-31 13:15:01,249 INFO [train.py:1182] (3/4) Training started +2024-08-31 13:15:01,971 INFO [train.py:1192] (3/4) Device: cuda:3 +2024-08-31 13:15:02,198 INFO [train.py:1210] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2535.int.cedar.computecanada.ca', 'IP address': '172.16.145.228'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 18, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-31 13:15:02,198 INFO [train.py:1212] (3/4) About to create model +2024-08-31 13:15:10,412 INFO [train.py:1216] (3/4) Number of model parameters: 66367431 +2024-08-31 13:15:10,438 INFO [checkpoint.py:112] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/streaming/exp/epoch-17.pt +2024-08-31 13:16:01,730 INFO [train.py:1231] (3/4) Using DDP +2024-08-31 13:16:07,016 INFO [train.py:1243] (3/4) Loading optimizer state dict +2024-08-31 13:16:39,979 INFO [train.py:1251] (3/4) Loading scheduler state dict +2024-08-31 13:16:39,979 INFO [asr_datamodule.py:894] (3/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-31 13:16:39,986 INFO [asr_datamodule.py:696] (3/4) Disable MUSAN +2024-08-31 13:16:39,987 INFO [asr_datamodule.py:714] (3/4) Enable SpecAugment +2024-08-31 13:16:39,987 INFO [asr_datamodule.py:715] (3/4) Time warp factor: 80 +2024-08-31 13:16:39,987 INFO [asr_datamodule.py:725] (3/4) Num frame mask: 10 +2024-08-31 13:16:39,987 INFO [asr_datamodule.py:738] (3/4) About to create train dataset +2024-08-31 13:16:39,988 INFO [asr_datamodule.py:765] (3/4) Using DynamicBucketingSampler. +2024-08-31 13:16:41,571 INFO [asr_datamodule.py:782] (3/4) About to create train dataloader +2024-08-31 13:16:41,848 INFO [asr_datamodule.py:911] (3/4) About to get dev-clean cuts +2024-08-31 13:16:41,851 INFO [asr_datamodule.py:918] (3/4) About to get dev-other cuts +2024-08-31 13:16:41,852 INFO [asr_datamodule.py:814] (3/4) About to create dev dataset +2024-08-31 13:16:42,173 INFO [asr_datamodule.py:831] (3/4) About to create dev dataloader +2024-08-31 13:16:42,173 INFO [train.py:1435] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-31 13:22:43,896 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12808MB +2024-08-31 13:22:45,377 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-31 13:23:02,019 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12885MB +2024-08-31 13:23:03,015 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=192, metric=4.30 vs. limit=5.0 +2024-08-31 13:23:03,528 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-31 13:24:12,095 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-31 13:24:13,681 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 13097MB +2024-08-31 13:24:13,703 INFO [train.py:1344] (3/4) Loading grad scaler state dict +2024-08-31 13:25:06,159 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.51 vs. limit=6.0 +2024-08-31 13:25:06,950 INFO [train.py:1114] (3/4) Epoch 18, batch 0, loss[loss=0.1891, simple_loss=0.2515, pruned_loss=0.04522, ctc_loss=0.09044, over 19798.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2515, pruned_loss=0.04522, ctc_loss=0.09044, over 19798.00 frames. ], batch size: 49, lr: 8.44e-03, grad_scale: 32.0 +2024-08-31 13:25:06,951 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-31 13:25:28,552 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([1.3005, 0.9812, 1.4801, 0.7409, 1.4295, 1.5490, 1.6372, 1.3385], + device='cuda:3') +2024-08-31 13:25:49,895 INFO [train.py:1146] (3/4) Epoch 18, validation: loss=0.1864, simple_loss=0.2743, pruned_loss=0.03646, ctc_loss=0.06397, over 944034.00 frames. +2024-08-31 13:25:49,896 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 13097MB +2024-08-31 13:27:49,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=225680.0, ans=0.1 +2024-08-31 13:38:35,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=225786.66666666666, ans=0.0 +2024-08-31 13:41:44,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=225786.66666666666, ans=0.125 +2024-08-31 13:44:41,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=225840.0, ans=0.125 +2024-08-31 13:48:13,677 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.468e+02 1.934e+02 2.118e+02 2.433e+02 6.228e+02, threshold=4.237e+02, percent-clipped=5.0 +2024-08-31 13:56:45,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=225946.66666666666, ans=0.0 +2024-08-31 13:56:46,282 INFO [train.py:1114] (3/4) Epoch 18, batch 50, loss[loss=0.182, simple_loss=0.2481, pruned_loss=0.04203, ctc_loss=0.0794, over 19711.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2764, pruned_loss=0.05246, ctc_loss=0.09993, over 844773.76 frames. ], batch size: 47, lr: 8.44e-03, grad_scale: 32.0 +2024-08-31 14:00:22,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=226000.0, ans=0.0 +2024-08-31 14:00:57,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=226000.0, ans=0.2 +2024-08-31 14:01:09,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=226053.33333333334, ans=0.125 +2024-08-31 14:02:51,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=226053.33333333334, ans=0.125 +2024-08-31 14:15:00,238 INFO [train.py:1114] (3/4) Epoch 18, batch 100, loss[loss=0.19, simple_loss=0.2623, pruned_loss=0.04262, ctc_loss=0.08109, over 19730.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2781, pruned_loss=0.0526, ctc_loss=0.1003, over 1499130.94 frames. ], batch size: 51, lr: 8.43e-03, grad_scale: 32.0 +2024-08-31 14:16:49,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=226213.33333333334, ans=0.2 +2024-08-31 14:17:06,231 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.84 vs. limit=6.0 +2024-08-31 14:26:09,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=226320.0, ans=0.125 +2024-08-31 14:26:20,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=226320.0, ans=0.125 +2024-08-31 14:28:23,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=226320.0, ans=0.025 +2024-08-31 14:28:47,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=226373.33333333334, ans=0.125 +2024-08-31 14:32:43,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=226426.66666666666, ans=0.125 +2024-08-31 14:32:51,603 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.685e+02 1.949e+02 2.332e+02 3.525e+02, threshold=3.898e+02, percent-clipped=0.0 +2024-08-31 14:34:38,797 INFO [train.py:1114] (3/4) Epoch 18, batch 150, loss[loss=0.1807, simple_loss=0.2471, pruned_loss=0.04195, ctc_loss=0.07635, over 19710.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2754, pruned_loss=0.05175, ctc_loss=0.0982, over 2028556.29 frames. ], batch size: 47, lr: 8.43e-03, grad_scale: 32.0 +2024-08-31 14:44:38,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=226533.33333333334, ans=0.125 +2024-08-31 14:45:09,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=226533.33333333334, ans=0.125 +2024-08-31 14:45:10,864 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.91 vs. limit=15.0 +2024-08-31 14:47:42,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=226586.66666666666, ans=0.125 +2024-08-31 15:05:15,398 INFO [train.py:1114] (3/4) Epoch 18, batch 200, loss[loss=0.2423, simple_loss=0.3004, pruned_loss=0.06577, ctc_loss=0.1317, over 18383.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2747, pruned_loss=0.05163, ctc_loss=0.09788, over 2435908.49 frames. ], batch size: 86, lr: 8.42e-03, grad_scale: 32.0 +2024-08-31 15:06:06,479 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.30 vs. limit=15.0 +2024-08-31 15:15:15,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=226906.66666666666, ans=0.2 +2024-08-31 15:17:44,777 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.325e+02 1.761e+02 2.086e+02 2.524e+02 4.159e+02, threshold=4.172e+02, percent-clipped=2.0 +2024-08-31 15:17:59,744 INFO [train.py:1114] (3/4) Epoch 18, batch 250, loss[loss=0.2346, simple_loss=0.2974, pruned_loss=0.06208, ctc_loss=0.119, over 19385.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2738, pruned_loss=0.05107, ctc_loss=0.09641, over 2755239.67 frames. ], batch size: 67, lr: 8.42e-03, grad_scale: 32.0 +2024-08-31 15:19:52,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=227066.66666666666, ans=0.125 +2024-08-31 15:22:02,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=227120.0, ans=0.125 +2024-08-31 15:22:32,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=227120.0, ans=0.125 +2024-08-31 15:23:12,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=227173.33333333334, ans=0.125 +2024-08-31 15:24:04,301 INFO [train.py:1114] (3/4) Epoch 18, batch 300, loss[loss=0.2284, simple_loss=0.2895, pruned_loss=0.06192, ctc_loss=0.1087, over 19510.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2733, pruned_loss=0.05096, ctc_loss=0.09586, over 3000722.02 frames. ], batch size: 61, lr: 8.41e-03, grad_scale: 32.0 +2024-08-31 15:24:59,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=227333.33333333334, ans=0.125 +2024-08-31 15:25:24,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=227333.33333333334, ans=0.125 +2024-08-31 15:28:09,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=227386.66666666666, ans=0.125 +2024-08-31 15:30:47,324 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.305e+02 1.680e+02 1.932e+02 2.386e+02 3.920e+02, threshold=3.864e+02, percent-clipped=0.0 +2024-08-31 15:31:32,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=227493.33333333334, ans=0.125 +2024-08-31 15:31:47,640 INFO [train.py:1114] (3/4) Epoch 18, batch 350, loss[loss=0.17, simple_loss=0.2372, pruned_loss=0.0366, ctc_loss=0.07378, over 19798.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2744, pruned_loss=0.05153, ctc_loss=0.09691, over 3190837.21 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 32.0 +2024-08-31 15:31:49,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=227546.66666666666, ans=0.0 +2024-08-31 15:32:20,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=227600.0, ans=0.125 +2024-08-31 15:33:58,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=227706.66666666666, ans=0.0 +2024-08-31 15:34:22,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=227760.0, ans=0.0 +2024-08-31 15:34:33,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=227760.0, ans=0.125 +2024-08-31 15:34:57,662 INFO [train.py:1114] (3/4) Epoch 18, batch 400, loss[loss=0.1921, simple_loss=0.2766, pruned_loss=0.03874, ctc_loss=0.07516, over 19478.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2741, pruned_loss=0.05131, ctc_loss=0.09655, over 3342300.72 frames. ], batch size: 54, lr: 8.40e-03, grad_scale: 32.0 +2024-08-31 15:35:52,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=227920.0, ans=0.125 +2024-08-31 15:36:35,607 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.56 vs. limit=10.0 +2024-08-31 15:37:04,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=228026.66666666666, ans=0.0 +2024-08-31 15:37:11,046 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.718e+02 1.967e+02 2.336e+02 3.401e+02, threshold=3.934e+02, percent-clipped=0.0 +2024-08-31 15:37:37,964 INFO [train.py:1114] (3/4) Epoch 18, batch 450, loss[loss=0.197, simple_loss=0.271, pruned_loss=0.04471, ctc_loss=0.08391, over 19604.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2744, pruned_loss=0.05149, ctc_loss=0.09684, over 3450789.67 frames. ], batch size: 55, lr: 8.40e-03, grad_scale: 32.0 +2024-08-31 15:39:20,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228133.33333333334, ans=0.1 +2024-08-31 15:39:21,873 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.56 vs. limit=15.0 +2024-08-31 15:39:36,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=228186.66666666666, ans=0.125 +2024-08-31 15:39:48,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=228240.0, ans=0.0 +2024-08-31 15:39:50,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=228240.0, ans=0.125 +2024-08-31 15:40:18,494 INFO [train.py:1114] (3/4) Epoch 18, batch 500, loss[loss=0.2439, simple_loss=0.3041, pruned_loss=0.06675, ctc_loss=0.1255, over 19664.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2733, pruned_loss=0.05084, ctc_loss=0.09569, over 3545366.65 frames. ], batch size: 63, lr: 8.39e-03, grad_scale: 32.0 +2024-08-31 15:40:26,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=228346.66666666666, ans=0.2 +2024-08-31 15:40:30,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=228400.0, ans=0.125 +2024-08-31 15:40:36,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=228400.0, ans=0.0 +2024-08-31 15:40:53,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=228506.66666666666, ans=0.2 +2024-08-31 15:41:02,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=228506.66666666666, ans=0.125 +2024-08-31 15:41:10,332 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.331e+02 1.618e+02 1.812e+02 2.329e+02 3.946e+02, threshold=3.624e+02, percent-clipped=1.0 +2024-08-31 15:41:10,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=228560.0, ans=0.0 +2024-08-31 15:41:17,484 INFO [train.py:1114] (3/4) Epoch 18, batch 550, loss[loss=0.256, simple_loss=0.3078, pruned_loss=0.0744, ctc_loss=0.1389, over 19350.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2736, pruned_loss=0.05109, ctc_loss=0.0961, over 3608466.68 frames. ], batch size: 71, lr: 8.39e-03, grad_scale: 32.0 +2024-08-31 15:42:27,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=228666.66666666666, ans=0.0 +2024-08-31 15:43:26,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=228720.0, ans=0.0 +2024-08-31 15:43:27,106 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.39 vs. limit=15.0 +2024-08-31 15:43:28,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=228720.0, ans=0.125 +2024-08-31 15:43:37,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=228720.0, ans=0.05 +2024-08-31 15:44:18,825 INFO [train.py:1114] (3/4) Epoch 18, batch 600, loss[loss=0.2144, simple_loss=0.2744, pruned_loss=0.0553, ctc_loss=0.1095, over 19320.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2734, pruned_loss=0.05108, ctc_loss=0.09605, over 3665320.01 frames. ], batch size: 67, lr: 8.38e-03, grad_scale: 32.0 +2024-08-31 15:44:56,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.98 vs. limit=10.0 +2024-08-31 15:45:05,006 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.66 vs. limit=6.0 +2024-08-31 15:45:07,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228986.66666666666, ans=0.1 +2024-08-31 15:45:28,758 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.314e+02 1.735e+02 2.092e+02 3.203e+02 5.009e+02, threshold=4.184e+02, percent-clipped=13.0 +2024-08-31 15:45:38,283 INFO [train.py:1114] (3/4) Epoch 18, batch 650, loss[loss=0.2017, simple_loss=0.2752, pruned_loss=0.04644, ctc_loss=0.08838, over 19775.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2729, pruned_loss=0.05069, ctc_loss=0.0954, over 3715876.17 frames. ], batch size: 54, lr: 8.38e-03, grad_scale: 32.0 +2024-08-31 15:46:17,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=229146.66666666666, ans=0.125 +2024-08-31 15:46:31,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=229200.0, ans=0.05 +2024-08-31 15:46:39,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=229253.33333333334, ans=0.2 +2024-08-31 15:46:51,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=229306.66666666666, ans=0.125 +2024-08-31 15:46:59,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=229306.66666666666, ans=0.1 +2024-08-31 15:47:10,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=229360.0, ans=0.125 +2024-08-31 15:47:16,570 INFO [train.py:1114] (3/4) Epoch 18, batch 700, loss[loss=0.1723, simple_loss=0.2481, pruned_loss=0.03487, ctc_loss=0.06689, over 19735.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2737, pruned_loss=0.05118, ctc_loss=0.09642, over 3746552.88 frames. ], batch size: 51, lr: 8.37e-03, grad_scale: 16.0 +2024-08-31 15:47:26,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=229413.33333333334, ans=0.125 +2024-08-31 15:47:30,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=229466.66666666666, ans=0.0 +2024-08-31 15:47:30,894 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.98 vs. limit=15.0 +2024-08-31 15:47:34,154 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.98 vs. limit=15.0 +2024-08-31 15:48:10,576 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.394e+02 1.672e+02 1.935e+02 2.401e+02 4.868e+02, threshold=3.870e+02, percent-clipped=1.0 +2024-08-31 15:48:14,761 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.52 vs. limit=15.0 +2024-08-31 15:48:16,519 INFO [train.py:1114] (3/4) Epoch 18, batch 750, loss[loss=0.2169, simple_loss=0.2779, pruned_loss=0.05625, ctc_loss=0.1083, over 19502.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2729, pruned_loss=0.05082, ctc_loss=0.09591, over 3773339.17 frames. ], batch size: 54, lr: 8.37e-03, grad_scale: 16.0 +2024-08-31 15:48:20,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=229680.0, ans=0.125 +2024-08-31 15:48:22,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=229680.0, ans=0.125 +2024-08-31 15:49:08,259 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.62 vs. limit=15.0 +2024-08-31 15:49:10,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229840.0, ans=0.1 +2024-08-31 15:49:17,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229893.33333333334, ans=0.1 +2024-08-31 15:49:28,025 INFO [train.py:1114] (3/4) Epoch 18, batch 800, loss[loss=0.194, simple_loss=0.2544, pruned_loss=0.04952, ctc_loss=0.08633, over 19800.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2732, pruned_loss=0.05098, ctc_loss=0.0961, over 3794534.88 frames. ], batch size: 49, lr: 8.37e-03, grad_scale: 32.0 +2024-08-31 15:49:37,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=229946.66666666666, ans=0.2 +2024-08-31 15:49:56,677 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.46 vs. limit=15.0 +2024-08-31 15:49:58,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=230053.33333333334, ans=0.125 +2024-08-31 15:49:59,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=230053.33333333334, ans=0.0 +2024-08-31 15:50:00,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=230053.33333333334, ans=0.025 +2024-08-31 15:50:24,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=230160.0, ans=0.2 +2024-08-31 15:50:24,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=230160.0, ans=22.5 +2024-08-31 15:50:27,782 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.682e+02 1.957e+02 2.333e+02 3.697e+02, threshold=3.913e+02, percent-clipped=0.0 +2024-08-31 15:50:33,684 INFO [train.py:1114] (3/4) Epoch 18, batch 850, loss[loss=0.2164, simple_loss=0.2837, pruned_loss=0.05471, ctc_loss=0.09926, over 19618.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2727, pruned_loss=0.05112, ctc_loss=0.09613, over 3812874.07 frames. ], batch size: 59, lr: 8.36e-03, grad_scale: 32.0 +2024-08-31 15:50:40,242 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.10 vs. limit=22.5 +2024-08-31 15:51:33,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=230266.66666666666, ans=0.95 +2024-08-31 15:51:38,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=230320.0, ans=0.125 +2024-08-31 15:51:50,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=230373.33333333334, ans=0.0 +2024-08-31 15:51:58,107 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.28 vs. limit=15.0 +2024-08-31 15:51:59,453 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.33 vs. limit=12.0 +2024-08-31 15:52:15,347 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.22 vs. limit=12.0 +2024-08-31 15:52:15,925 INFO [train.py:1114] (3/4) Epoch 18, batch 900, loss[loss=0.1792, simple_loss=0.2437, pruned_loss=0.04221, ctc_loss=0.0756, over 19824.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2729, pruned_loss=0.05134, ctc_loss=0.09645, over 3817990.98 frames. ], batch size: 49, lr: 8.36e-03, grad_scale: 32.0 +2024-08-31 15:52:20,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.40 vs. limit=15.0 +2024-08-31 15:52:23,175 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 15:52:26,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=230533.33333333334, ans=0.2 +2024-08-31 15:52:47,908 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.14 vs. limit=10.0 +2024-08-31 15:53:02,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=230640.0, ans=0.05 +2024-08-31 15:53:09,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=230693.33333333334, ans=0.025 +2024-08-31 15:53:12,022 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.268e+02 1.645e+02 1.872e+02 2.411e+02 3.930e+02, threshold=3.745e+02, percent-clipped=1.0 +2024-08-31 15:53:16,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=230693.33333333334, ans=0.1 +2024-08-31 15:53:46,110 INFO [train.py:1114] (3/4) Epoch 18, batch 950, loss[loss=0.2131, simple_loss=0.2681, pruned_loss=0.05739, ctc_loss=0.1085, over 19489.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2733, pruned_loss=0.05165, ctc_loss=0.09697, over 3820036.99 frames. ], batch size: 49, lr: 8.35e-03, grad_scale: 32.0 +2024-08-31 15:53:46,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230746.66666666666, ans=0.1 +2024-08-31 15:53:50,430 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.54 vs. limit=22.5 +2024-08-31 15:53:53,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230746.66666666666, ans=0.1 +2024-08-31 15:54:35,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=230960.0, ans=0.2 +2024-08-31 15:54:36,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=230960.0, ans=0.95 +2024-08-31 15:54:40,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=230960.0, ans=0.0 +2024-08-31 15:54:48,313 INFO [train.py:1114] (3/4) Epoch 18, batch 1000, loss[loss=0.2018, simple_loss=0.2724, pruned_loss=0.04639, ctc_loss=0.09608, over 19864.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2745, pruned_loss=0.05203, ctc_loss=0.09765, over 3816317.46 frames. ], batch size: 52, lr: 8.35e-03, grad_scale: 32.0 +2024-08-31 15:54:57,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=231013.33333333334, ans=0.0 +2024-08-31 15:55:12,674 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.70 vs. limit=22.5 +2024-08-31 15:55:38,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231173.33333333334, ans=0.1 +2024-08-31 15:55:51,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=231226.66666666666, ans=0.0 +2024-08-31 15:55:55,126 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.285e+02 1.660e+02 1.836e+02 2.172e+02 3.389e+02, threshold=3.673e+02, percent-clipped=0.0 +2024-08-31 15:56:01,085 INFO [train.py:1114] (3/4) Epoch 18, batch 1050, loss[loss=0.2064, simple_loss=0.2823, pruned_loss=0.0466, ctc_loss=0.0931, over 19827.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2737, pruned_loss=0.05162, ctc_loss=0.097, over 3823782.84 frames. ], batch size: 57, lr: 8.34e-03, grad_scale: 32.0 +2024-08-31 15:56:03,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=231280.0, ans=0.125 +2024-08-31 15:56:13,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=231333.33333333334, ans=0.2 +2024-08-31 15:57:01,167 INFO [train.py:1114] (3/4) Epoch 18, batch 1100, loss[loss=0.2189, simple_loss=0.2822, pruned_loss=0.05634, ctc_loss=0.1072, over 19595.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2736, pruned_loss=0.05133, ctc_loss=0.09652, over 3831868.12 frames. ], batch size: 52, lr: 8.34e-03, grad_scale: 32.0 +2024-08-31 15:57:01,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=231546.66666666666, ans=0.0 +2024-08-31 15:57:10,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=231546.66666666666, ans=0.125 +2024-08-31 15:57:13,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=231546.66666666666, ans=0.125 +2024-08-31 15:57:20,781 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.96 vs. limit=22.5 +2024-08-31 15:57:44,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=231706.66666666666, ans=0.025 +2024-08-31 15:57:58,310 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.372e+02 1.608e+02 1.860e+02 2.284e+02 4.941e+02, threshold=3.719e+02, percent-clipped=1.0 +2024-08-31 15:58:04,206 INFO [train.py:1114] (3/4) Epoch 18, batch 1150, loss[loss=0.1967, simple_loss=0.2678, pruned_loss=0.04517, ctc_loss=0.08796, over 19585.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2736, pruned_loss=0.05139, ctc_loss=0.09646, over 3828941.83 frames. ], batch size: 52, lr: 8.33e-03, grad_scale: 32.0 +2024-08-31 15:58:26,720 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.92 vs. limit=12.0 +2024-08-31 15:58:27,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=231866.66666666666, ans=0.95 +2024-08-31 15:58:37,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231920.0, ans=0.1 +2024-08-31 15:58:45,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=231920.0, ans=0.125 +2024-08-31 15:58:57,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=231973.33333333334, ans=0.95 +2024-08-31 15:59:03,437 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.31 vs. limit=10.0 +2024-08-31 15:59:17,232 INFO [train.py:1114] (3/4) Epoch 18, batch 1200, loss[loss=0.205, simple_loss=0.2802, pruned_loss=0.04791, ctc_loss=0.08501, over 19846.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.274, pruned_loss=0.05123, ctc_loss=0.09636, over 3824642.10 frames. ], batch size: 57, lr: 8.33e-03, grad_scale: 32.0 +2024-08-31 15:59:33,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=232133.33333333334, ans=0.0 +2024-08-31 15:59:34,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=232133.33333333334, ans=0.125 +2024-08-31 15:59:37,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=232133.33333333334, ans=0.1 +2024-08-31 15:59:46,575 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.41 vs. limit=15.0 +2024-08-31 15:59:48,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=232186.66666666666, ans=0.025 +2024-08-31 15:59:50,345 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.17 vs. limit=6.0 +2024-08-31 15:59:52,625 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.25 vs. limit=10.0 +2024-08-31 16:00:06,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=232293.33333333334, ans=0.0 +2024-08-31 16:00:12,202 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.411e+02 1.681e+02 1.869e+02 2.236e+02 3.755e+02, threshold=3.738e+02, percent-clipped=1.0 +2024-08-31 16:00:18,304 INFO [train.py:1114] (3/4) Epoch 18, batch 1250, loss[loss=0.2303, simple_loss=0.2992, pruned_loss=0.05875, ctc_loss=0.1096, over 19519.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2746, pruned_loss=0.05129, ctc_loss=0.09653, over 3842414.52 frames. ], batch size: 61, lr: 8.32e-03, grad_scale: 32.0 +2024-08-31 16:00:24,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=232346.66666666666, ans=0.2 +2024-08-31 16:01:12,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=232560.0, ans=0.2 +2024-08-31 16:01:15,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=232560.0, ans=0.125 +2024-08-31 16:01:22,409 INFO [train.py:1114] (3/4) Epoch 18, batch 1300, loss[loss=0.2172, simple_loss=0.2789, pruned_loss=0.05567, ctc_loss=0.1103, over 18885.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2742, pruned_loss=0.05105, ctc_loss=0.09628, over 3845881.93 frames. ], batch size: 76, lr: 8.32e-03, grad_scale: 32.0 +2024-08-31 16:01:41,024 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.30 vs. limit=10.0 +2024-08-31 16:01:48,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=232720.0, ans=0.125 +2024-08-31 16:02:21,662 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.351e+02 1.758e+02 2.176e+02 2.645e+02 4.342e+02, threshold=4.353e+02, percent-clipped=3.0 +2024-08-31 16:02:25,783 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.99 vs. limit=15.0 +2024-08-31 16:02:27,588 INFO [train.py:1114] (3/4) Epoch 18, batch 1350, loss[loss=0.2047, simple_loss=0.2731, pruned_loss=0.04981, ctc_loss=0.09156, over 19779.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2739, pruned_loss=0.05112, ctc_loss=0.09623, over 3857636.80 frames. ], batch size: 54, lr: 8.31e-03, grad_scale: 32.0 +2024-08-31 16:02:34,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=232880.0, ans=0.0 +2024-08-31 16:02:52,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.95 vs. limit=15.0 +2024-08-31 16:03:04,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.98 vs. limit=15.0 +2024-08-31 16:03:25,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=233093.33333333334, ans=0.125 +2024-08-31 16:03:29,585 INFO [train.py:1114] (3/4) Epoch 18, batch 1400, loss[loss=0.1888, simple_loss=0.2552, pruned_loss=0.04496, ctc_loss=0.0811, over 19661.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2739, pruned_loss=0.05112, ctc_loss=0.09614, over 3864000.41 frames. ], batch size: 46, lr: 8.31e-03, grad_scale: 32.0 +2024-08-31 16:03:39,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=233146.66666666666, ans=0.125 +2024-08-31 16:03:40,439 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.18 vs. limit=12.0 +2024-08-31 16:04:00,349 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.90 vs. limit=15.0 +2024-08-31 16:04:06,600 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.00 vs. limit=22.5 +2024-08-31 16:04:29,892 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.45 vs. limit=22.5 +2024-08-31 16:04:36,291 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.322e+02 1.655e+02 1.916e+02 2.338e+02 3.956e+02, threshold=3.832e+02, percent-clipped=0.0 +2024-08-31 16:04:42,274 INFO [train.py:1114] (3/4) Epoch 18, batch 1450, loss[loss=0.2494, simple_loss=0.3061, pruned_loss=0.07002, ctc_loss=0.1318, over 19631.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2748, pruned_loss=0.05125, ctc_loss=0.09651, over 3862140.30 frames. ], batch size: 63, lr: 8.30e-03, grad_scale: 32.0 +2024-08-31 16:04:49,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=233413.33333333334, ans=0.2 +2024-08-31 16:05:12,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233520.0, ans=0.1 +2024-08-31 16:05:22,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=233573.33333333334, ans=0.025 +2024-08-31 16:05:35,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=233626.66666666666, ans=0.0 +2024-08-31 16:05:48,827 INFO [train.py:1114] (3/4) Epoch 18, batch 1500, loss[loss=0.18, simple_loss=0.2528, pruned_loss=0.03902, ctc_loss=0.07278, over 19582.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2749, pruned_loss=0.05123, ctc_loss=0.09641, over 3862528.34 frames. ], batch size: 57, lr: 8.30e-03, grad_scale: 32.0 +2024-08-31 16:06:04,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=233733.33333333334, ans=0.125 +2024-08-31 16:06:15,484 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.99 vs. limit=22.5 +2024-08-31 16:06:42,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=233893.33333333334, ans=0.125 +2024-08-31 16:06:50,190 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.336e+02 1.669e+02 1.866e+02 2.355e+02 3.552e+02, threshold=3.733e+02, percent-clipped=0.0 +2024-08-31 16:07:06,042 INFO [train.py:1114] (3/4) Epoch 18, batch 1550, loss[loss=0.2309, simple_loss=0.2948, pruned_loss=0.06067, ctc_loss=0.1139, over 19583.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2747, pruned_loss=0.05132, ctc_loss=0.0966, over 3847134.59 frames. ], batch size: 60, lr: 8.30e-03, grad_scale: 32.0 +2024-08-31 16:07:20,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=234000.0, ans=0.025 +2024-08-31 16:07:26,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=234000.0, ans=0.0 +2024-08-31 16:07:44,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=234106.66666666666, ans=0.125 +2024-08-31 16:07:45,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=234106.66666666666, ans=0.07 +2024-08-31 16:08:07,315 INFO [train.py:1114] (3/4) Epoch 18, batch 1600, loss[loss=0.2111, simple_loss=0.2823, pruned_loss=0.05051, ctc_loss=0.09738, over 19822.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2744, pruned_loss=0.05131, ctc_loss=0.09687, over 3835614.43 frames. ], batch size: 57, lr: 8.29e-03, grad_scale: 32.0 +2024-08-31 16:08:11,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=234213.33333333334, ans=0.0 +2024-08-31 16:08:21,757 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.54 vs. limit=6.0 +2024-08-31 16:08:36,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=234320.0, ans=0.5 +2024-08-31 16:08:42,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=234320.0, ans=0.125 +2024-08-31 16:08:54,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=234373.33333333334, ans=0.125 +2024-08-31 16:09:02,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=234426.66666666666, ans=0.2 +2024-08-31 16:09:20,622 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.787e+02 2.153e+02 2.672e+02 5.491e+02, threshold=4.305e+02, percent-clipped=8.0 +2024-08-31 16:09:26,584 INFO [train.py:1114] (3/4) Epoch 18, batch 1650, loss[loss=0.2215, simple_loss=0.2882, pruned_loss=0.05662, ctc_loss=0.1036, over 19641.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2743, pruned_loss=0.05136, ctc_loss=0.09679, over 3831937.21 frames. ], batch size: 59, lr: 8.29e-03, grad_scale: 32.0 +2024-08-31 16:13:29,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=234586.66666666666, ans=0.2 +2024-08-31 16:13:36,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=234640.0, ans=0.125 +2024-08-31 16:14:12,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=234693.33333333334, ans=0.125 +2024-08-31 16:14:15,750 INFO [train.py:1114] (3/4) Epoch 18, batch 1700, loss[loss=0.1724, simple_loss=0.2347, pruned_loss=0.03996, ctc_loss=0.07529, over 19662.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2736, pruned_loss=0.05071, ctc_loss=0.09555, over 3845944.45 frames. ], batch size: 46, lr: 8.28e-03, grad_scale: 32.0 +2024-08-31 16:14:17,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=234746.66666666666, ans=0.025 +2024-08-31 16:14:35,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=234800.0, ans=0.0 +2024-08-31 16:14:39,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=234853.33333333334, ans=0.125 +2024-08-31 16:14:43,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=234853.33333333334, ans=0.0 +2024-08-31 16:15:07,764 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.273e+02 1.694e+02 2.038e+02 2.484e+02 5.869e+02, threshold=4.076e+02, percent-clipped=3.0 +2024-08-31 16:15:09,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234960.0, ans=0.1 +2024-08-31 16:15:13,538 INFO [train.py:1114] (3/4) Epoch 18, batch 1750, loss[loss=0.2153, simple_loss=0.2678, pruned_loss=0.06074, ctc_loss=0.103, over 19645.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2733, pruned_loss=0.05062, ctc_loss=0.09524, over 3850764.40 frames. ], batch size: 45, lr: 8.28e-03, grad_scale: 32.0 +2024-08-31 16:15:22,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=235013.33333333334, ans=0.0 +2024-08-31 16:15:37,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=235066.66666666666, ans=0.0 +2024-08-31 16:15:45,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=235120.0, ans=0.125 +2024-08-31 16:15:50,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.75 vs. limit=15.0 +2024-08-31 16:15:59,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=235173.33333333334, ans=0.1 +2024-08-31 16:16:05,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=235173.33333333334, ans=22.5 +2024-08-31 16:16:07,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=235226.66666666666, ans=0.0 +2024-08-31 16:16:18,932 INFO [train.py:1114] (3/4) Epoch 18, batch 1800, loss[loss=0.2146, simple_loss=0.2811, pruned_loss=0.05412, ctc_loss=0.09974, over 19616.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2737, pruned_loss=0.05086, ctc_loss=0.09548, over 3851681.83 frames. ], batch size: 55, lr: 8.27e-03, grad_scale: 32.0 +2024-08-31 16:16:33,610 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.56 vs. limit=15.0 +2024-08-31 16:16:40,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=235333.33333333334, ans=0.125 +2024-08-31 16:16:42,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=235386.66666666666, ans=0.125 +2024-08-31 16:17:12,064 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.320e+02 1.739e+02 2.099e+02 2.606e+02 4.220e+02, threshold=4.197e+02, percent-clipped=1.0 +2024-08-31 16:17:16,664 INFO [train.py:1114] (3/4) Epoch 18, batch 1850, loss[loss=0.1984, simple_loss=0.2759, pruned_loss=0.0426, ctc_loss=0.08941, over 19615.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2734, pruned_loss=0.05086, ctc_loss=0.09559, over 3855891.58 frames. ], batch size: 57, lr: 8.27e-03, grad_scale: 16.0 +2024-08-31 16:17:26,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=235546.66666666666, ans=0.2 +2024-08-31 16:17:43,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=235600.0, ans=0.125 +2024-08-31 16:18:06,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=235706.66666666666, ans=0.2 +2024-08-31 16:18:08,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=235760.0, ans=0.125 +2024-08-31 16:18:09,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=235760.0, ans=0.125 +2024-08-31 16:18:12,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=235760.0, ans=0.5 +2024-08-31 16:18:21,105 INFO [train.py:1114] (3/4) Epoch 18, batch 1900, loss[loss=0.2063, simple_loss=0.2855, pruned_loss=0.0456, ctc_loss=0.0899, over 19669.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2739, pruned_loss=0.05101, ctc_loss=0.09583, over 3860883.18 frames. ], batch size: 59, lr: 8.26e-03, grad_scale: 16.0 +2024-08-31 16:18:24,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=235813.33333333334, ans=0.125 +2024-08-31 16:18:27,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=235813.33333333334, ans=0.035 +2024-08-31 16:18:38,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=235866.66666666666, ans=15.0 +2024-08-31 16:18:43,945 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.42 vs. limit=22.5 +2024-08-31 16:18:46,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235920.0, ans=0.1 +2024-08-31 16:19:08,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=236026.66666666666, ans=0.125 +2024-08-31 16:19:14,238 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.296e+02 1.623e+02 1.837e+02 2.195e+02 5.135e+02, threshold=3.673e+02, percent-clipped=2.0 +2024-08-31 16:19:18,766 INFO [train.py:1114] (3/4) Epoch 18, batch 1950, loss[loss=0.1837, simple_loss=0.2565, pruned_loss=0.04076, ctc_loss=0.07339, over 19593.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.275, pruned_loss=0.05132, ctc_loss=0.09665, over 3869736.26 frames. ], batch size: 52, lr: 8.26e-03, grad_scale: 16.0 +2024-08-31 16:19:29,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=236133.33333333334, ans=0.1 +2024-08-31 16:20:21,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=236133.33333333334, ans=0.07 +2024-08-31 16:20:28,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=236133.33333333334, ans=0.125 +2024-08-31 16:20:39,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=236186.66666666666, ans=0.0 +2024-08-31 16:20:48,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=236186.66666666666, ans=0.125 +2024-08-31 16:20:49,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.82 vs. limit=15.0 +2024-08-31 16:21:07,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=236293.33333333334, ans=0.125 +2024-08-31 16:21:21,681 INFO [train.py:1114] (3/4) Epoch 18, batch 2000, loss[loss=0.2004, simple_loss=0.2528, pruned_loss=0.05436, ctc_loss=0.09824, over 19652.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2753, pruned_loss=0.05158, ctc_loss=0.09726, over 3853632.70 frames. ], batch size: 45, lr: 8.25e-03, grad_scale: 32.0 +2024-08-31 16:21:27,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=236346.66666666666, ans=0.125 +2024-08-31 16:21:43,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=236453.33333333334, ans=0.125 +2024-08-31 16:21:58,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=236506.66666666666, ans=0.125 +2024-08-31 16:22:04,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236506.66666666666, ans=0.1 +2024-08-31 16:22:12,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=236560.0, ans=0.1 +2024-08-31 16:22:14,727 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.400e+02 1.704e+02 2.096e+02 2.751e+02 4.638e+02, threshold=4.193e+02, percent-clipped=6.0 +2024-08-31 16:22:19,167 INFO [train.py:1114] (3/4) Epoch 18, batch 2050, loss[loss=0.1865, simple_loss=0.2481, pruned_loss=0.04505, ctc_loss=0.08716, over 19737.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2743, pruned_loss=0.05137, ctc_loss=0.09676, over 3850559.28 frames. ], batch size: 47, lr: 8.25e-03, grad_scale: 32.0 +2024-08-31 16:22:19,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=236613.33333333334, ans=0.125 +2024-08-31 16:22:25,214 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.43 vs. limit=15.0 +2024-08-31 16:22:38,439 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.30 vs. limit=15.0 +2024-08-31 16:22:48,713 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.91 vs. limit=22.5 +2024-08-31 16:23:04,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=236773.33333333334, ans=0.04949747468305833 +2024-08-31 16:23:21,344 INFO [train.py:1114] (3/4) Epoch 18, batch 2100, loss[loss=0.1964, simple_loss=0.2689, pruned_loss=0.04561, ctc_loss=0.08157, over 19756.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2737, pruned_loss=0.05083, ctc_loss=0.09584, over 3857749.41 frames. ], batch size: 54, lr: 8.25e-03, grad_scale: 32.0 +2024-08-31 16:23:37,136 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.17 vs. limit=22.5 +2024-08-31 16:23:38,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=236933.33333333334, ans=0.0 +2024-08-31 16:23:42,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=236933.33333333334, ans=0.025 +2024-08-31 16:23:57,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=236986.66666666666, ans=0.2 +2024-08-31 16:23:58,766 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.31 vs. limit=10.0 +2024-08-31 16:24:26,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=237093.33333333334, ans=0.125 +2024-08-31 16:24:27,117 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.628e+02 1.802e+02 2.351e+02 4.404e+02, threshold=3.604e+02, percent-clipped=1.0 +2024-08-31 16:24:31,668 INFO [train.py:1114] (3/4) Epoch 18, batch 2150, loss[loss=0.1863, simple_loss=0.2609, pruned_loss=0.04013, ctc_loss=0.07876, over 19829.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2734, pruned_loss=0.05087, ctc_loss=0.09591, over 3869587.38 frames. ], batch size: 52, lr: 8.24e-03, grad_scale: 32.0 +2024-08-31 16:24:41,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=237146.66666666666, ans=0.0 +2024-08-31 16:24:42,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.whiten.whitening_limit, batch_count=237146.66666666666, ans=12.0 +2024-08-31 16:24:59,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=237253.33333333334, ans=0.0 +2024-08-31 16:25:29,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=237360.0, ans=0.125 +2024-08-31 16:25:40,259 INFO [train.py:1114] (3/4) Epoch 18, batch 2200, loss[loss=0.2231, simple_loss=0.2828, pruned_loss=0.05852, ctc_loss=0.1156, over 19580.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2729, pruned_loss=0.05039, ctc_loss=0.095, over 3867603.63 frames. ], batch size: 57, lr: 8.24e-03, grad_scale: 32.0 +2024-08-31 16:26:07,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=237520.0, ans=0.0 +2024-08-31 16:26:10,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=237520.0, ans=0.2 +2024-08-31 16:26:15,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.38 vs. limit=15.0 +2024-08-31 16:26:33,721 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.369e+02 1.652e+02 1.938e+02 2.493e+02 4.901e+02, threshold=3.877e+02, percent-clipped=6.0 +2024-08-31 16:26:38,341 INFO [train.py:1114] (3/4) Epoch 18, batch 2250, loss[loss=0.214, simple_loss=0.2869, pruned_loss=0.05116, ctc_loss=0.09722, over 19620.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2727, pruned_loss=0.05007, ctc_loss=0.09455, over 3867278.58 frames. ], batch size: 55, lr: 8.23e-03, grad_scale: 32.0 +2024-08-31 16:26:58,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten.whitening_limit, batch_count=237680.0, ans=15.0 +2024-08-31 16:27:03,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=237733.33333333334, ans=0.125 +2024-08-31 16:27:13,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=237733.33333333334, ans=0.0 +2024-08-31 16:27:39,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=237893.33333333334, ans=0.125 +2024-08-31 16:27:51,885 INFO [train.py:1114] (3/4) Epoch 18, batch 2300, loss[loss=0.1943, simple_loss=0.2611, pruned_loss=0.0466, ctc_loss=0.08589, over 19511.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.272, pruned_loss=0.05018, ctc_loss=0.09476, over 3861205.89 frames. ], batch size: 49, lr: 8.23e-03, grad_scale: 32.0 +2024-08-31 16:28:04,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=238000.0, ans=0.125 +2024-08-31 16:28:22,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=238053.33333333334, ans=0.95 +2024-08-31 16:28:28,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=238106.66666666666, ans=0.125 +2024-08-31 16:28:33,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=238106.66666666666, ans=0.125 +2024-08-31 16:28:40,164 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.63 vs. limit=15.0 +2024-08-31 16:28:42,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=238160.0, ans=0.0 +2024-08-31 16:28:47,462 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.355e+02 1.696e+02 1.848e+02 2.393e+02 3.836e+02, threshold=3.696e+02, percent-clipped=0.0 +2024-08-31 16:29:03,702 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.60 vs. limit=6.0 +2024-08-31 16:29:07,667 INFO [train.py:1114] (3/4) Epoch 18, batch 2350, loss[loss=0.196, simple_loss=0.2699, pruned_loss=0.0447, ctc_loss=0.08199, over 19631.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2718, pruned_loss=0.04993, ctc_loss=0.09426, over 3863894.69 frames. ], batch size: 63, lr: 8.22e-03, grad_scale: 32.0 +2024-08-31 16:29:33,111 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.91 vs. limit=22.5 +2024-08-31 16:29:34,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238320.0, ans=0.1 +2024-08-31 16:29:35,540 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.00 vs. limit=15.0 +2024-08-31 16:29:37,717 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.89 vs. limit=10.0 +2024-08-31 16:30:29,253 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:30:31,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=238426.66666666666, ans=0.125 +2024-08-31 16:30:31,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=238426.66666666666, ans=0.1 +2024-08-31 16:30:32,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=238426.66666666666, ans=0.125 +2024-08-31 16:30:38,498 INFO [train.py:1114] (3/4) Epoch 18, batch 2400, loss[loss=0.2115, simple_loss=0.2842, pruned_loss=0.05022, ctc_loss=0.09604, over 19319.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2738, pruned_loss=0.05054, ctc_loss=0.09521, over 3857630.46 frames. ], batch size: 71, lr: 8.22e-03, grad_scale: 32.0 +2024-08-31 16:30:38,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=238480.0, ans=0.5 +2024-08-31 16:30:45,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=238480.0, ans=0.0 +2024-08-31 16:30:58,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=238533.33333333334, ans=0.125 +2024-08-31 16:31:00,254 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:31:14,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=238586.66666666666, ans=0.025 +2024-08-31 16:31:46,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=238693.33333333334, ans=0.2 +2024-08-31 16:31:47,468 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.444e+02 1.682e+02 1.835e+02 2.125e+02 4.662e+02, threshold=3.671e+02, percent-clipped=5.0 +2024-08-31 16:31:52,086 INFO [train.py:1114] (3/4) Epoch 18, batch 2450, loss[loss=0.2635, simple_loss=0.3014, pruned_loss=0.08033, ctc_loss=0.1624, over 13216.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2777, pruned_loss=0.05338, ctc_loss=0.1009, over 3730768.58 frames. ], batch size: 140, lr: 8.21e-03, grad_scale: 32.0 +2024-08-31 16:32:21,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=238853.33333333334, ans=0.0 +2024-08-31 16:32:22,499 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=10.32 vs. limit=10.0 +2024-08-31 16:32:27,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=238906.66666666666, ans=0.07 +2024-08-31 16:32:31,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=238906.66666666666, ans=0.0 +2024-08-31 16:32:33,819 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.35 vs. limit=6.0 +2024-08-31 16:33:43,960 INFO [train.py:1114] (3/4) Epoch 19, batch 0, loss[loss=0.2179, simple_loss=0.2769, pruned_loss=0.05762, ctc_loss=0.109, over 19809.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2769, pruned_loss=0.05762, ctc_loss=0.109, over 19809.00 frames. ], batch size: 49, lr: 7.99e-03, grad_scale: 32.0 +2024-08-31 16:33:43,961 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-31 16:33:52,624 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.0211, 2.1922, 2.9664, 3.3492], device='cuda:3') +2024-08-31 16:34:00,542 INFO [train.py:1146] (3/4) Epoch 19, validation: loss=0.1846, simple_loss=0.2728, pruned_loss=0.03584, ctc_loss=0.06159, over 944034.00 frames. +2024-08-31 16:34:01,381 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 13816MB +2024-08-31 16:34:02,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=238954.66666666666, ans=0.125 +2024-08-31 16:34:08,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238954.66666666666, ans=0.1 +2024-08-31 16:34:48,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=239114.66666666666, ans=0.125 +2024-08-31 16:34:58,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=239168.0, ans=10.0 +2024-08-31 16:35:04,408 INFO [train.py:1114] (3/4) Epoch 19, batch 50, loss[loss=0.1781, simple_loss=0.2498, pruned_loss=0.03856, ctc_loss=0.0734, over 19714.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.275, pruned_loss=0.05142, ctc_loss=0.0985, over 844167.97 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-31 16:35:09,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=239221.33333333334, ans=0.125 +2024-08-31 16:35:12,506 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.230e+02 1.795e+02 2.006e+02 2.342e+02 4.821e+02, threshold=4.012e+02, percent-clipped=4.0 +2024-08-31 16:35:15,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=239274.66666666666, ans=0.05 +2024-08-31 16:35:21,383 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.19 vs. limit=15.0 +2024-08-31 16:35:23,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=239274.66666666666, ans=0.125 +2024-08-31 16:35:31,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=239328.0, ans=0.07 +2024-08-31 16:35:40,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=239381.33333333334, ans=0.0 +2024-08-31 16:36:03,808 INFO [train.py:1114] (3/4) Epoch 19, batch 100, loss[loss=0.2017, simple_loss=0.2619, pruned_loss=0.05181, ctc_loss=0.09444, over 19723.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2773, pruned_loss=0.05192, ctc_loss=0.09843, over 1498340.35 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 32.0 +2024-08-31 16:36:12,172 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.63 vs. limit=22.5 +2024-08-31 16:36:16,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=239541.33333333334, ans=0.09899494936611666 +2024-08-31 16:36:35,912 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.97 vs. limit=10.0 +2024-08-31 16:36:48,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=239648.0, ans=0.2 +2024-08-31 16:37:02,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=239701.33333333334, ans=0.125 +2024-08-31 16:37:06,676 INFO [train.py:1114] (3/4) Epoch 19, batch 150, loss[loss=0.1985, simple_loss=0.2594, pruned_loss=0.05091, ctc_loss=0.08925, over 19736.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2744, pruned_loss=0.05081, ctc_loss=0.09598, over 2027311.34 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-31 16:37:15,238 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.762e+02 1.953e+02 2.445e+02 3.524e+02, threshold=3.906e+02, percent-clipped=0.0 +2024-08-31 16:37:17,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=239808.0, ans=0.05 +2024-08-31 16:37:39,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239861.33333333334, ans=0.125 +2024-08-31 16:37:42,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=239861.33333333334, ans=0.0 +2024-08-31 16:37:44,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=239914.66666666666, ans=0.125 +2024-08-31 16:37:44,469 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.58 vs. limit=10.0 +2024-08-31 16:37:59,529 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.76 vs. limit=15.0 +2024-08-31 16:38:07,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=239968.0, ans=0.125 +2024-08-31 16:38:14,090 INFO [train.py:1114] (3/4) Epoch 19, batch 200, loss[loss=0.224, simple_loss=0.2887, pruned_loss=0.05587, ctc_loss=0.1187, over 18332.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2728, pruned_loss=0.04988, ctc_loss=0.09404, over 2435699.92 frames. ], batch size: 85, lr: 7.97e-03, grad_scale: 32.0 +2024-08-31 16:38:19,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=240021.33333333334, ans=0.025 +2024-08-31 16:38:35,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=240074.66666666666, ans=0.2 +2024-08-31 16:38:39,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=240128.0, ans=0.125 +2024-08-31 16:38:47,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=240128.0, ans=10.0 +2024-08-31 16:38:49,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=240181.33333333334, ans=0.125 +2024-08-31 16:39:10,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=240234.66666666666, ans=0.0 +2024-08-31 16:39:13,534 INFO [train.py:1114] (3/4) Epoch 19, batch 250, loss[loss=0.2275, simple_loss=0.2883, pruned_loss=0.06071, ctc_loss=0.1133, over 19409.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2731, pruned_loss=0.05043, ctc_loss=0.09509, over 2756001.92 frames. ], batch size: 67, lr: 7.97e-03, grad_scale: 32.0 +2024-08-31 16:39:27,162 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.313e+02 1.733e+02 2.186e+02 2.853e+02 4.755e+02, threshold=4.372e+02, percent-clipped=7.0 +2024-08-31 16:40:02,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=240448.0, ans=0.125 +2024-08-31 16:40:03,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240448.0, ans=0.1 +2024-08-31 16:40:03,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=240448.0, ans=0.125 +2024-08-31 16:40:07,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=240501.33333333334, ans=0.1 +2024-08-31 16:40:11,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=240501.33333333334, ans=22.5 +2024-08-31 16:40:19,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=240554.66666666666, ans=0.2 +2024-08-31 16:40:20,385 INFO [train.py:1114] (3/4) Epoch 19, batch 300, loss[loss=0.2432, simple_loss=0.3044, pruned_loss=0.06703, ctc_loss=0.1196, over 19526.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2724, pruned_loss=0.05022, ctc_loss=0.09469, over 3001126.27 frames. ], batch size: 61, lr: 7.96e-03, grad_scale: 32.0 +2024-08-31 16:40:20,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=240554.66666666666, ans=0.0 +2024-08-31 16:40:21,066 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.46 vs. limit=15.0 +2024-08-31 16:40:37,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=240608.0, ans=0.125 +2024-08-31 16:40:49,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240661.33333333334, ans=0.1 +2024-08-31 16:40:58,463 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:40:59,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=240714.66666666666, ans=0.125 +2024-08-31 16:41:03,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=240714.66666666666, ans=0.125 +2024-08-31 16:41:04,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=240714.66666666666, ans=0.0 +2024-08-31 16:41:06,210 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.34 vs. limit=15.0 +2024-08-31 16:41:09,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=240768.0, ans=0.2 +2024-08-31 16:41:10,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=240768.0, ans=0.1 +2024-08-31 16:41:13,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=240768.0, ans=0.125 +2024-08-31 16:41:21,962 INFO [train.py:1114] (3/4) Epoch 19, batch 350, loss[loss=0.1807, simple_loss=0.2453, pruned_loss=0.04256, ctc_loss=0.07761, over 19758.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2728, pruned_loss=0.05017, ctc_loss=0.09431, over 3191307.89 frames. ], batch size: 48, lr: 7.96e-03, grad_scale: 32.0 +2024-08-31 16:41:26,451 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.69 vs. limit=15.0 +2024-08-31 16:41:30,311 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.408e+02 1.653e+02 1.904e+02 2.349e+02 4.016e+02, threshold=3.809e+02, percent-clipped=0.0 +2024-08-31 16:42:08,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=240981.33333333334, ans=0.0 +2024-08-31 16:42:25,376 INFO [train.py:1114] (3/4) Epoch 19, batch 400, loss[loss=0.2116, simple_loss=0.2799, pruned_loss=0.05236, ctc_loss=0.09635, over 19488.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2724, pruned_loss=0.05006, ctc_loss=0.09425, over 3343080.18 frames. ], batch size: 54, lr: 7.95e-03, grad_scale: 32.0 +2024-08-31 16:42:53,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=241141.33333333334, ans=0.2 +2024-08-31 16:42:56,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=241194.66666666666, ans=0.0 +2024-08-31 16:43:02,981 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.27 vs. limit=15.0 +2024-08-31 16:43:06,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=241194.66666666666, ans=0.125 +2024-08-31 16:43:12,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=241248.0, ans=0.0 +2024-08-31 16:43:18,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241248.0, ans=0.1 +2024-08-31 16:43:23,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=241301.33333333334, ans=0.025 +2024-08-31 16:43:34,373 INFO [train.py:1114] (3/4) Epoch 19, batch 450, loss[loss=0.2368, simple_loss=0.301, pruned_loss=0.06349, ctc_loss=0.1139, over 19594.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2722, pruned_loss=0.04984, ctc_loss=0.09376, over 3450659.63 frames. ], batch size: 55, lr: 7.95e-03, grad_scale: 32.0 +2024-08-31 16:43:42,751 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.686e+02 1.896e+02 2.370e+02 4.152e+02, threshold=3.792e+02, percent-clipped=1.0 +2024-08-31 16:43:45,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=241408.0, ans=0.0 +2024-08-31 16:43:46,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=241408.0, ans=0.0 +2024-08-31 16:43:53,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=241408.0, ans=0.125 +2024-08-31 16:43:54,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=241408.0, ans=0.0 +2024-08-31 16:44:19,845 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.31 vs. limit=15.0 +2024-08-31 16:44:33,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=241568.0, ans=0.125 +2024-08-31 16:44:35,446 INFO [train.py:1114] (3/4) Epoch 19, batch 500, loss[loss=0.2048, simple_loss=0.2745, pruned_loss=0.049, ctc_loss=0.09275, over 19678.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.271, pruned_loss=0.0493, ctc_loss=0.09283, over 3546334.18 frames. ], batch size: 63, lr: 7.95e-03, grad_scale: 32.0 +2024-08-31 16:44:35,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=241621.33333333334, ans=0.1 +2024-08-31 16:44:40,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=241621.33333333334, ans=0.2 +2024-08-31 16:44:47,236 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.10 vs. limit=15.0 +2024-08-31 16:45:12,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=241781.33333333334, ans=0.0 +2024-08-31 16:45:12,967 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.58 vs. limit=15.0 +2024-08-31 16:45:13,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=241781.33333333334, ans=0.025 +2024-08-31 16:46:04,267 INFO [train.py:1114] (3/4) Epoch 19, batch 550, loss[loss=0.2198, simple_loss=0.2887, pruned_loss=0.05598, ctc_loss=0.09733, over 19290.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.271, pruned_loss=0.04948, ctc_loss=0.09316, over 3608127.09 frames. ], batch size: 71, lr: 7.94e-03, grad_scale: 32.0 +2024-08-31 16:46:11,223 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.47 vs. limit=22.5 +2024-08-31 16:46:12,720 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.697e+02 1.983e+02 2.191e+02 3.507e+02, threshold=3.966e+02, percent-clipped=0.0 +2024-08-31 16:46:48,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=241994.66666666666, ans=0.125 +2024-08-31 16:46:56,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=242048.0, ans=0.015 +2024-08-31 16:47:00,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=242048.0, ans=0.5 +2024-08-31 16:47:08,141 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.94 vs. limit=12.0 +2024-08-31 16:47:11,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=242101.33333333334, ans=0.0 +2024-08-31 16:47:12,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=242101.33333333334, ans=10.0 +2024-08-31 16:47:13,266 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.65 vs. limit=15.0 +2024-08-31 16:47:16,285 INFO [train.py:1114] (3/4) Epoch 19, batch 600, loss[loss=0.2402, simple_loss=0.3063, pruned_loss=0.06301, ctc_loss=0.1201, over 19361.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2722, pruned_loss=0.04989, ctc_loss=0.09405, over 3665674.33 frames. ], batch size: 67, lr: 7.94e-03, grad_scale: 32.0 +2024-08-31 16:47:23,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=242154.66666666666, ans=0.125 +2024-08-31 16:47:29,487 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.55 vs. limit=12.0 +2024-08-31 16:47:45,787 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.19 vs. limit=15.0 +2024-08-31 16:47:47,085 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.16 vs. limit=15.0 +2024-08-31 16:47:49,471 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.46 vs. limit=12.0 +2024-08-31 16:48:11,519 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:48:13,126 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.61 vs. limit=15.0 +2024-08-31 16:48:18,018 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.50 vs. limit=15.0 +2024-08-31 16:48:22,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=242314.66666666666, ans=0.0 +2024-08-31 16:48:29,262 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.77 vs. limit=15.0 +2024-08-31 16:48:31,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=242368.0, ans=0.125 +2024-08-31 16:48:39,685 INFO [train.py:1114] (3/4) Epoch 19, batch 650, loss[loss=0.2058, simple_loss=0.2733, pruned_loss=0.0499, ctc_loss=0.09602, over 19775.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2712, pruned_loss=0.0494, ctc_loss=0.09299, over 3716489.95 frames. ], batch size: 54, lr: 7.93e-03, grad_scale: 32.0 +2024-08-31 16:48:45,294 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.42 vs. limit=15.0 +2024-08-31 16:48:48,383 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 1.784e+02 2.044e+02 2.793e+02 4.792e+02, threshold=4.088e+02, percent-clipped=6.0 +2024-08-31 16:48:50,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=242474.66666666666, ans=0.2 +2024-08-31 16:49:28,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242581.33333333334, ans=0.1 +2024-08-31 16:49:36,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242634.66666666666, ans=0.1 +2024-08-31 16:50:01,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=242688.0, ans=0.125 +2024-08-31 16:50:02,078 INFO [train.py:1114] (3/4) Epoch 19, batch 700, loss[loss=0.2035, simple_loss=0.2655, pruned_loss=0.05033, ctc_loss=0.1022, over 19733.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2723, pruned_loss=0.05036, ctc_loss=0.09454, over 3749037.36 frames. ], batch size: 51, lr: 7.93e-03, grad_scale: 32.0 +2024-08-31 16:50:06,791 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=9.39 vs. limit=15.0 +2024-08-31 16:50:10,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=242688.0, ans=0.0 +2024-08-31 16:50:43,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=242848.0, ans=0.125 +2024-08-31 16:51:24,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=242901.33333333334, ans=0.025 +2024-08-31 16:52:16,517 INFO [train.py:1114] (3/4) Epoch 19, batch 750, loss[loss=0.1894, simple_loss=0.2648, pruned_loss=0.04117, ctc_loss=0.07917, over 19495.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2715, pruned_loss=0.04996, ctc_loss=0.09392, over 3775690.54 frames. ], batch size: 54, lr: 7.92e-03, grad_scale: 32.0 +2024-08-31 16:52:17,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=242954.66666666666, ans=0.2 +2024-08-31 16:52:18,238 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.41 vs. limit=12.0 +2024-08-31 16:52:40,610 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.383e+02 1.707e+02 2.012e+02 2.576e+02 4.596e+02, threshold=4.024e+02, percent-clipped=2.0 +2024-08-31 16:52:40,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=242954.66666666666, ans=0.0 +2024-08-31 16:52:47,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=243008.0, ans=0.125 +2024-08-31 16:52:57,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=243061.33333333334, ans=0.2 +2024-08-31 16:53:01,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=243061.33333333334, ans=0.05 +2024-08-31 16:53:12,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=243114.66666666666, ans=0.0 +2024-08-31 16:53:40,995 INFO [train.py:1114] (3/4) Epoch 19, batch 800, loss[loss=0.1907, simple_loss=0.254, pruned_loss=0.04586, ctc_loss=0.08915, over 19406.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2712, pruned_loss=0.04968, ctc_loss=0.09337, over 3797056.26 frames. ], batch size: 48, lr: 7.92e-03, grad_scale: 32.0 +2024-08-31 16:53:48,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=243221.33333333334, ans=0.1 +2024-08-31 16:53:48,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.35 vs. limit=15.0 +2024-08-31 16:53:55,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=243274.66666666666, ans=0.125 +2024-08-31 16:54:01,012 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.28 vs. limit=15.0 +2024-08-31 16:54:09,649 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.19 vs. limit=10.0 +2024-08-31 16:54:13,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=243328.0, ans=0.125 +2024-08-31 16:54:24,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=243381.33333333334, ans=0.125 +2024-08-31 16:54:37,460 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.99 vs. limit=22.5 +2024-08-31 16:54:40,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=243434.66666666666, ans=0.0 +2024-08-31 16:54:52,040 INFO [train.py:1114] (3/4) Epoch 19, batch 850, loss[loss=0.2151, simple_loss=0.2889, pruned_loss=0.0516, ctc_loss=0.09536, over 19632.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2706, pruned_loss=0.04929, ctc_loss=0.09257, over 3816610.56 frames. ], batch size: 59, lr: 7.92e-03, grad_scale: 32.0 +2024-08-31 16:54:58,253 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.20 vs. limit=15.0 +2024-08-31 16:55:00,086 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.677e+02 1.837e+02 2.316e+02 3.927e+02, threshold=3.675e+02, percent-clipped=0.0 +2024-08-31 16:55:24,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=243594.66666666666, ans=0.2 +2024-08-31 16:55:46,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=243701.33333333334, ans=0.125 +2024-08-31 16:55:55,904 INFO [train.py:1114] (3/4) Epoch 19, batch 900, loss[loss=0.1863, simple_loss=0.2522, pruned_loss=0.04413, ctc_loss=0.08042, over 19423.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2711, pruned_loss=0.04967, ctc_loss=0.09342, over 3820995.48 frames. ], batch size: 48, lr: 7.91e-03, grad_scale: 32.0 +2024-08-31 16:56:01,168 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.60 vs. limit=15.0 +2024-08-31 16:56:23,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.12 vs. limit=12.0 +2024-08-31 16:56:26,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=243861.33333333334, ans=0.2 +2024-08-31 16:56:39,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=243914.66666666666, ans=0.025 +2024-08-31 16:56:46,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=243914.66666666666, ans=0.125 +2024-08-31 16:57:04,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=244021.33333333334, ans=0.125 +2024-08-31 16:57:05,823 INFO [train.py:1114] (3/4) Epoch 19, batch 950, loss[loss=0.1849, simple_loss=0.2555, pruned_loss=0.04189, ctc_loss=0.07609, over 19517.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2714, pruned_loss=0.04986, ctc_loss=0.09364, over 3820732.71 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-31 16:57:07,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=244021.33333333334, ans=0.0 +2024-08-31 16:57:10,863 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 16:57:14,284 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.368e+02 1.751e+02 2.034e+02 2.400e+02 3.857e+02, threshold=4.067e+02, percent-clipped=1.0 +2024-08-31 16:57:24,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=244074.66666666666, ans=0.125 +2024-08-31 16:57:26,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=244074.66666666666, ans=0.125 +2024-08-31 16:57:51,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=244181.33333333334, ans=0.0 +2024-08-31 16:57:58,397 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.45 vs. limit=15.0 +2024-08-31 16:58:06,275 INFO [train.py:1114] (3/4) Epoch 19, batch 1000, loss[loss=0.1735, simple_loss=0.2502, pruned_loss=0.0351, ctc_loss=0.06664, over 19864.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2726, pruned_loss=0.05022, ctc_loss=0.09441, over 3816160.43 frames. ], batch size: 52, lr: 7.90e-03, grad_scale: 32.0 +2024-08-31 16:59:54,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=244501.33333333334, ans=0.025 +2024-08-31 17:00:06,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=244501.33333333334, ans=0.1 +2024-08-31 17:00:07,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=244501.33333333334, ans=0.0 +2024-08-31 17:00:09,431 INFO [train.py:1114] (3/4) Epoch 19, batch 1050, loss[loss=0.2118, simple_loss=0.2823, pruned_loss=0.05171, ctc_loss=0.09477, over 19837.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2719, pruned_loss=0.05003, ctc_loss=0.09397, over 3823259.35 frames. ], batch size: 57, lr: 7.90e-03, grad_scale: 32.0 +2024-08-31 17:00:17,649 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.651e+02 1.935e+02 2.361e+02 3.363e+02, threshold=3.870e+02, percent-clipped=0.0 +2024-08-31 17:00:17,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244554.66666666666, ans=0.1 +2024-08-31 17:00:38,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=244661.33333333334, ans=0.125 +2024-08-31 17:00:39,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=244661.33333333334, ans=0.125 +2024-08-31 17:00:42,949 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.53 vs. limit=12.0 +2024-08-31 17:00:47,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=244714.66666666666, ans=0.0 +2024-08-31 17:00:52,397 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.12 vs. limit=22.5 +2024-08-31 17:01:12,071 INFO [train.py:1114] (3/4) Epoch 19, batch 1100, loss[loss=0.1919, simple_loss=0.2604, pruned_loss=0.04507, ctc_loss=0.08305, over 19584.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2712, pruned_loss=0.04943, ctc_loss=0.09303, over 3831325.97 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-31 17:01:52,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=244928.0, ans=0.125 +2024-08-31 17:01:55,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=244928.0, ans=0.0 +2024-08-31 17:02:07,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=244981.33333333334, ans=0.125 +2024-08-31 17:02:43,476 INFO [train.py:1114] (3/4) Epoch 19, batch 1150, loss[loss=0.2256, simple_loss=0.2805, pruned_loss=0.06213, ctc_loss=0.1159, over 19576.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2712, pruned_loss=0.04963, ctc_loss=0.09344, over 3829831.16 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-31 17:03:11,405 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.331e+02 1.693e+02 1.899e+02 2.295e+02 3.327e+02, threshold=3.798e+02, percent-clipped=0.0 +2024-08-31 17:03:11,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=245088.0, ans=0.125 +2024-08-31 17:03:12,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=245088.0, ans=0.125 +2024-08-31 17:03:49,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=245248.0, ans=0.2 +2024-08-31 17:03:50,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=245248.0, ans=0.1 +2024-08-31 17:03:56,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=245301.33333333334, ans=0.0 +2024-08-31 17:03:56,831 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.61 vs. limit=22.5 +2024-08-31 17:04:04,674 INFO [train.py:1114] (3/4) Epoch 19, batch 1200, loss[loss=0.1896, simple_loss=0.2671, pruned_loss=0.04057, ctc_loss=0.07747, over 19836.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2722, pruned_loss=0.04996, ctc_loss=0.09411, over 3824867.73 frames. ], batch size: 57, lr: 7.89e-03, grad_scale: 32.0 +2024-08-31 17:04:30,510 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.72 vs. limit=15.0 +2024-08-31 17:04:46,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=245514.66666666666, ans=0.125 +2024-08-31 17:04:49,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=245514.66666666666, ans=0.125 +2024-08-31 17:04:52,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=245514.66666666666, ans=0.1 +2024-08-31 17:04:55,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=245568.0, ans=0.125 +2024-08-31 17:05:08,565 INFO [train.py:1114] (3/4) Epoch 19, batch 1250, loss[loss=0.2459, simple_loss=0.2992, pruned_loss=0.07012, ctc_loss=0.1306, over 19549.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2727, pruned_loss=0.05006, ctc_loss=0.09422, over 3843427.90 frames. ], batch size: 61, lr: 7.88e-03, grad_scale: 32.0 +2024-08-31 17:05:16,755 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.647e+02 1.911e+02 2.205e+02 3.499e+02, threshold=3.822e+02, percent-clipped=0.0 +2024-08-31 17:05:39,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=245728.0, ans=0.2 +2024-08-31 17:05:50,132 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:06:03,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=245781.33333333334, ans=0.2 +2024-08-31 17:06:15,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=245834.66666666666, ans=15.0 +2024-08-31 17:06:18,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=245888.0, ans=0.125 +2024-08-31 17:06:19,691 INFO [train.py:1114] (3/4) Epoch 19, batch 1300, loss[loss=0.2119, simple_loss=0.282, pruned_loss=0.05119, ctc_loss=0.09866, over 18812.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.272, pruned_loss=0.04998, ctc_loss=0.09405, over 3845729.62 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 32.0 +2024-08-31 17:06:35,595 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.10 vs. limit=15.0 +2024-08-31 17:06:38,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=245941.33333333334, ans=0.2 +2024-08-31 17:06:41,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=245941.33333333334, ans=0.0 +2024-08-31 17:06:54,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245994.66666666666, ans=0.1 +2024-08-31 17:07:15,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=246101.33333333334, ans=0.0 +2024-08-31 17:07:22,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=246101.33333333334, ans=0.2 +2024-08-31 17:07:25,630 INFO [train.py:1114] (3/4) Epoch 19, batch 1350, loss[loss=0.1712, simple_loss=0.2488, pruned_loss=0.0341, ctc_loss=0.06333, over 19764.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2711, pruned_loss=0.04915, ctc_loss=0.09238, over 3857959.38 frames. ], batch size: 54, lr: 7.87e-03, grad_scale: 64.0 +2024-08-31 17:07:39,286 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.398e+02 1.765e+02 2.070e+02 2.720e+02 4.418e+02, threshold=4.141e+02, percent-clipped=1.0 +2024-08-31 17:08:07,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=246314.66666666666, ans=0.125 +2024-08-31 17:08:35,879 INFO [train.py:1114] (3/4) Epoch 19, batch 1400, loss[loss=0.1736, simple_loss=0.2353, pruned_loss=0.04081, ctc_loss=0.07566, over 19666.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2704, pruned_loss=0.04884, ctc_loss=0.09181, over 3865333.02 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 64.0 +2024-08-31 17:08:44,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=246421.33333333334, ans=0.125 +2024-08-31 17:08:44,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=246421.33333333334, ans=0.2 +2024-08-31 17:09:09,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=246474.66666666666, ans=0.1 +2024-08-31 17:09:34,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=246581.33333333334, ans=0.1 +2024-08-31 17:09:36,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=246634.66666666666, ans=0.025 +2024-08-31 17:09:36,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=246634.66666666666, ans=0.0 +2024-08-31 17:09:53,651 INFO [train.py:1114] (3/4) Epoch 19, batch 1450, loss[loss=0.2121, simple_loss=0.2818, pruned_loss=0.05175, ctc_loss=0.09734, over 19675.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2712, pruned_loss=0.04929, ctc_loss=0.09278, over 3863411.14 frames. ], batch size: 63, lr: 7.87e-03, grad_scale: 64.0 +2024-08-31 17:10:02,074 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.290e+02 1.691e+02 1.919e+02 2.362e+02 3.353e+02, threshold=3.838e+02, percent-clipped=0.0 +2024-08-31 17:10:08,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=246741.33333333334, ans=0.125 +2024-08-31 17:11:07,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=246741.33333333334, ans=0.0 +2024-08-31 17:11:25,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246794.66666666666, ans=0.1 +2024-08-31 17:11:31,577 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.82 vs. limit=15.0 +2024-08-31 17:11:47,130 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.92 vs. limit=22.5 +2024-08-31 17:11:53,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=246901.33333333334, ans=0.125 +2024-08-31 17:12:02,033 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.11 vs. limit=15.0 +2024-08-31 17:12:11,940 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.51 vs. limit=6.0 +2024-08-31 17:12:12,390 INFO [train.py:1114] (3/4) Epoch 19, batch 1500, loss[loss=0.2086, simple_loss=0.2889, pruned_loss=0.04672, ctc_loss=0.08717, over 19559.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.272, pruned_loss=0.04963, ctc_loss=0.09359, over 3862802.71 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 64.0 +2024-08-31 17:14:38,395 INFO [train.py:1114] (3/4) Epoch 19, batch 1550, loss[loss=0.2032, simple_loss=0.2881, pruned_loss=0.04248, ctc_loss=0.0832, over 19587.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2719, pruned_loss=0.04961, ctc_loss=0.09344, over 3847731.08 frames. ], batch size: 60, lr: 7.86e-03, grad_scale: 64.0 +2024-08-31 17:14:46,790 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.654e+02 1.883e+02 2.328e+02 3.879e+02, threshold=3.765e+02, percent-clipped=1.0 +2024-08-31 17:14:48,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=247221.33333333334, ans=0.125 +2024-08-31 17:15:08,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=247274.66666666666, ans=0.125 +2024-08-31 17:15:32,182 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.10 vs. limit=12.0 +2024-08-31 17:16:15,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=247381.33333333334, ans=0.0 +2024-08-31 17:16:15,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=247381.33333333334, ans=0.1 +2024-08-31 17:16:40,629 INFO [train.py:1114] (3/4) Epoch 19, batch 1600, loss[loss=0.2071, simple_loss=0.2805, pruned_loss=0.04894, ctc_loss=0.08984, over 19839.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2717, pruned_loss=0.04959, ctc_loss=0.09346, over 3836938.66 frames. ], batch size: 57, lr: 7.85e-03, grad_scale: 64.0 +2024-08-31 17:17:00,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=247541.33333333334, ans=0.125 +2024-08-31 17:17:11,661 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.22 vs. limit=22.5 +2024-08-31 17:17:20,042 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.19 vs. limit=15.0 +2024-08-31 17:17:35,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=247701.33333333334, ans=0.0 +2024-08-31 17:17:41,216 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.47 vs. limit=15.0 +2024-08-31 17:17:41,997 INFO [train.py:1114] (3/4) Epoch 19, batch 1650, loss[loss=0.1891, simple_loss=0.2738, pruned_loss=0.03846, ctc_loss=0.06853, over 19648.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2713, pruned_loss=0.04964, ctc_loss=0.09376, over 3833338.69 frames. ], batch size: 59, lr: 7.85e-03, grad_scale: 64.0 +2024-08-31 17:17:46,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=247754.66666666666, ans=0.1 +2024-08-31 17:17:50,571 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.367e+02 1.753e+02 1.927e+02 2.360e+02 4.500e+02, threshold=3.853e+02, percent-clipped=4.0 +2024-08-31 17:18:30,042 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.96 vs. limit=15.0 +2024-08-31 17:18:38,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=247968.0, ans=0.125 +2024-08-31 17:18:44,908 INFO [train.py:1114] (3/4) Epoch 19, batch 1700, loss[loss=0.1991, simple_loss=0.254, pruned_loss=0.05162, ctc_loss=0.1025, over 19679.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2708, pruned_loss=0.04899, ctc_loss=0.09251, over 3847403.35 frames. ], batch size: 46, lr: 7.84e-03, grad_scale: 64.0 +2024-08-31 17:18:53,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=248021.33333333334, ans=0.125 +2024-08-31 17:19:02,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=248074.66666666666, ans=0.0 +2024-08-31 17:19:10,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=248128.0, ans=0.0 +2024-08-31 17:19:24,171 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.93 vs. limit=15.0 +2024-08-31 17:19:32,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=248181.33333333334, ans=0.0 +2024-08-31 17:19:32,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=248181.33333333334, ans=0.025 +2024-08-31 17:19:42,642 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.61 vs. limit=15.0 +2024-08-31 17:19:52,915 INFO [train.py:1114] (3/4) Epoch 19, batch 1750, loss[loss=0.1921, simple_loss=0.2525, pruned_loss=0.04941, ctc_loss=0.08211, over 19628.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2706, pruned_loss=0.04896, ctc_loss=0.09225, over 3851796.27 frames. ], batch size: 45, lr: 7.84e-03, grad_scale: 32.0 +2024-08-31 17:20:02,144 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.715e+02 1.941e+02 2.441e+02 4.524e+02, threshold=3.882e+02, percent-clipped=3.0 +2024-08-31 17:20:28,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=248448.0, ans=0.125 +2024-08-31 17:20:35,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248448.0, ans=0.1 +2024-08-31 17:20:49,898 INFO [train.py:1114] (3/4) Epoch 19, batch 1800, loss[loss=0.2033, simple_loss=0.2817, pruned_loss=0.04603, ctc_loss=0.08216, over 19609.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.271, pruned_loss=0.04888, ctc_loss=0.09214, over 3853062.75 frames. ], batch size: 55, lr: 7.84e-03, grad_scale: 32.0 +2024-08-31 17:20:51,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=248554.66666666666, ans=0.0 +2024-08-31 17:21:14,183 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=6.153e-03 +2024-08-31 17:21:22,358 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.57 vs. limit=22.5 +2024-08-31 17:21:36,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=248768.0, ans=0.125 +2024-08-31 17:21:47,143 INFO [train.py:1114] (3/4) Epoch 19, batch 1850, loss[loss=0.2155, simple_loss=0.2882, pruned_loss=0.05173, ctc_loss=0.09813, over 19589.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2708, pruned_loss=0.04887, ctc_loss=0.09222, over 3856845.91 frames. ], batch size: 57, lr: 7.83e-03, grad_scale: 32.0 +2024-08-31 17:21:53,501 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.17 vs. limit=22.5 +2024-08-31 17:21:56,046 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 1.825e+02 2.203e+02 3.044e+02 4.782e+02, threshold=4.406e+02, percent-clipped=6.0 +2024-08-31 17:22:11,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=248874.66666666666, ans=0.025 +2024-08-31 17:22:47,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=249034.66666666666, ans=0.125 +2024-08-31 17:22:51,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=249088.0, ans=0.125 +2024-08-31 17:22:52,470 INFO [train.py:1114] (3/4) Epoch 19, batch 1900, loss[loss=0.1986, simple_loss=0.2663, pruned_loss=0.04762, ctc_loss=0.08905, over 19656.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.271, pruned_loss=0.04878, ctc_loss=0.09183, over 3862137.32 frames. ], batch size: 59, lr: 7.83e-03, grad_scale: 32.0 +2024-08-31 17:23:16,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=249194.66666666666, ans=0.125 +2024-08-31 17:23:18,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=249194.66666666666, ans=0.2 +2024-08-31 17:23:45,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=249301.33333333334, ans=0.04949747468305833 +2024-08-31 17:23:48,976 INFO [train.py:1114] (3/4) Epoch 19, batch 1950, loss[loss=0.1863, simple_loss=0.2483, pruned_loss=0.04522, ctc_loss=0.08497, over 19588.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2718, pruned_loss=0.04907, ctc_loss=0.09228, over 3871034.46 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 32.0 +2024-08-31 17:23:58,749 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.354e+02 1.608e+02 1.802e+02 2.157e+02 4.545e+02, threshold=3.604e+02, percent-clipped=1.0 +2024-08-31 17:24:01,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=249408.0, ans=0.0 +2024-08-31 17:24:11,127 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.39 vs. limit=15.0 +2024-08-31 17:24:36,744 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=8.55 vs. limit=22.5 +2024-08-31 17:24:39,031 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:24:50,837 INFO [train.py:1114] (3/4) Epoch 19, batch 2000, loss[loss=0.1846, simple_loss=0.2436, pruned_loss=0.0449, ctc_loss=0.08945, over 19668.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2723, pruned_loss=0.04924, ctc_loss=0.09261, over 3856443.69 frames. ], batch size: 45, lr: 7.82e-03, grad_scale: 32.0 +2024-08-31 17:25:03,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=249674.66666666666, ans=10.0 +2024-08-31 17:25:15,413 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.57 vs. limit=15.0 +2024-08-31 17:25:35,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=249834.66666666666, ans=0.125 +2024-08-31 17:25:35,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=249834.66666666666, ans=0.125 +2024-08-31 17:25:46,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=249888.0, ans=0.125 +2024-08-31 17:25:47,795 INFO [train.py:1114] (3/4) Epoch 19, batch 2050, loss[loss=0.1951, simple_loss=0.2578, pruned_loss=0.04819, ctc_loss=0.09005, over 19732.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2711, pruned_loss=0.04884, ctc_loss=0.09196, over 3853661.78 frames. ], batch size: 47, lr: 7.82e-03, grad_scale: 32.0 +2024-08-31 17:25:54,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=249888.0, ans=0.0 +2024-08-31 17:25:57,136 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.442e+02 1.719e+02 2.018e+02 2.402e+02 3.677e+02, threshold=4.037e+02, percent-clipped=1.0 +2024-08-31 17:26:01,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=249941.33333333334, ans=0.0 +2024-08-31 17:26:18,001 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.45 vs. limit=22.5 +2024-08-31 17:26:22,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=250048.0, ans=0.125 +2024-08-31 17:26:44,670 INFO [train.py:1114] (3/4) Epoch 19, batch 2100, loss[loss=0.2095, simple_loss=0.2769, pruned_loss=0.05215, ctc_loss=0.09478, over 19771.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2703, pruned_loss=0.04833, ctc_loss=0.09105, over 3860442.32 frames. ], batch size: 54, lr: 7.81e-03, grad_scale: 32.0 +2024-08-31 17:26:48,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.48 vs. limit=15.0 +2024-08-31 17:26:49,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=250154.66666666666, ans=0.1 +2024-08-31 17:27:06,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=250261.33333333334, ans=0.0 +2024-08-31 17:27:10,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=250261.33333333334, ans=0.0 +2024-08-31 17:27:12,309 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.55 vs. limit=22.5 +2024-08-31 17:27:16,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=250261.33333333334, ans=0.125 +2024-08-31 17:27:29,077 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.90 vs. limit=15.0 +2024-08-31 17:27:35,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=250368.0, ans=0.125 +2024-08-31 17:27:36,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=250368.0, ans=0.0 +2024-08-31 17:27:38,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=250368.0, ans=0.125 +2024-08-31 17:27:42,498 INFO [train.py:1114] (3/4) Epoch 19, batch 2150, loss[loss=0.1845, simple_loss=0.2589, pruned_loss=0.04006, ctc_loss=0.07504, over 19850.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2699, pruned_loss=0.04827, ctc_loss=0.09096, over 3871247.27 frames. ], batch size: 52, lr: 7.81e-03, grad_scale: 32.0 +2024-08-31 17:27:42,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=250421.33333333334, ans=0.2 +2024-08-31 17:27:51,498 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.322e+02 1.672e+02 1.975e+02 2.523e+02 4.782e+02, threshold=3.951e+02, percent-clipped=2.0 +2024-08-31 17:27:51,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=250421.33333333334, ans=0.125 +2024-08-31 17:28:07,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=250528.0, ans=0.125 +2024-08-31 17:28:18,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=250581.33333333334, ans=0.125 +2024-08-31 17:28:31,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=250634.66666666666, ans=0.0 +2024-08-31 17:28:35,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=250634.66666666666, ans=0.2 +2024-08-31 17:28:39,679 INFO [train.py:1114] (3/4) Epoch 19, batch 2200, loss[loss=0.2016, simple_loss=0.2703, pruned_loss=0.04785, ctc_loss=0.09283, over 19589.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2699, pruned_loss=0.0483, ctc_loss=0.09093, over 3869966.80 frames. ], batch size: 57, lr: 7.80e-03, grad_scale: 32.0 +2024-08-31 17:28:52,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=250741.33333333334, ans=0.1 +2024-08-31 17:28:53,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=250741.33333333334, ans=0.125 +2024-08-31 17:29:11,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=250794.66666666666, ans=0.0 +2024-08-31 17:29:11,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250794.66666666666, ans=0.1 +2024-08-31 17:29:27,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=250901.33333333334, ans=0.0 +2024-08-31 17:29:38,744 INFO [train.py:1114] (3/4) Epoch 19, batch 2250, loss[loss=0.2266, simple_loss=0.3, pruned_loss=0.05582, ctc_loss=0.1037, over 19610.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2707, pruned_loss=0.04869, ctc_loss=0.09164, over 3869114.50 frames. ], batch size: 55, lr: 7.80e-03, grad_scale: 32.0 +2024-08-31 17:29:47,351 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.680e+02 1.896e+02 2.375e+02 5.292e+02, threshold=3.791e+02, percent-clipped=4.0 +2024-08-31 17:29:57,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=251008.0, ans=0.0 +2024-08-31 17:29:58,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=251008.0, ans=0.0 +2024-08-31 17:29:59,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=251008.0, ans=0.0 +2024-08-31 17:30:01,774 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:30:03,108 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.20 vs. limit=12.0 +2024-08-31 17:30:12,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.57 vs. limit=15.0 +2024-08-31 17:30:33,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=251168.0, ans=0.125 +2024-08-31 17:30:40,030 INFO [train.py:1114] (3/4) Epoch 19, batch 2300, loss[loss=0.1767, simple_loss=0.2504, pruned_loss=0.037, ctc_loss=0.07256, over 19483.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2697, pruned_loss=0.04856, ctc_loss=0.09145, over 3863046.08 frames. ], batch size: 49, lr: 7.80e-03, grad_scale: 32.0 +2024-08-31 17:30:43,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=251221.33333333334, ans=0.0 +2024-08-31 17:30:46,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=251221.33333333334, ans=0.0 +2024-08-31 17:30:55,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=251274.66666666666, ans=0.125 +2024-08-31 17:31:03,247 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.28 vs. limit=15.0 +2024-08-31 17:31:05,414 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.19 vs. limit=15.0 +2024-08-31 17:31:05,765 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.14 vs. limit=15.0 +2024-08-31 17:31:36,399 INFO [train.py:1114] (3/4) Epoch 19, batch 2350, loss[loss=0.2021, simple_loss=0.2764, pruned_loss=0.04688, ctc_loss=0.08488, over 19682.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2702, pruned_loss=0.04873, ctc_loss=0.09152, over 3865447.04 frames. ], batch size: 63, lr: 7.79e-03, grad_scale: 32.0 +2024-08-31 17:31:43,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=251488.0, ans=0.0 +2024-08-31 17:31:45,225 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.365e+02 1.718e+02 2.013e+02 2.563e+02 3.706e+02, threshold=4.026e+02, percent-clipped=0.0 +2024-08-31 17:31:56,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=251541.33333333334, ans=0.07 +2024-08-31 17:32:01,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=251594.66666666666, ans=0.0 +2024-08-31 17:32:12,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=251594.66666666666, ans=0.1 +2024-08-31 17:32:14,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=251648.0, ans=0.125 +2024-08-31 17:32:32,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=251701.33333333334, ans=0.0 +2024-08-31 17:32:36,571 INFO [train.py:1114] (3/4) Epoch 19, batch 2400, loss[loss=0.2284, simple_loss=0.2933, pruned_loss=0.05933, ctc_loss=0.1124, over 19361.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2726, pruned_loss=0.04985, ctc_loss=0.09325, over 3858986.01 frames. ], batch size: 71, lr: 7.79e-03, grad_scale: 32.0 +2024-08-31 17:32:43,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=251754.66666666666, ans=0.0 +2024-08-31 17:32:44,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=251754.66666666666, ans=0.0 +2024-08-31 17:32:45,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=251754.66666666666, ans=0.0 +2024-08-31 17:32:46,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=251754.66666666666, ans=0.05 +2024-08-31 17:32:48,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=251808.0, ans=0.125 +2024-08-31 17:32:49,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=251808.0, ans=0.2 +2024-08-31 17:32:50,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=251808.0, ans=0.125 +2024-08-31 17:33:25,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=251914.66666666666, ans=0.0 +2024-08-31 17:33:39,839 INFO [train.py:1114] (3/4) Epoch 19, batch 2450, loss[loss=0.2636, simple_loss=0.3023, pruned_loss=0.08088, ctc_loss=0.158, over 12773.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2764, pruned_loss=0.05257, ctc_loss=0.09876, over 3730665.26 frames. ], batch size: 141, lr: 7.78e-03, grad_scale: 32.0 +2024-08-31 17:33:44,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252021.33333333334, ans=0.1 +2024-08-31 17:33:48,951 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.471e+02 1.610e+02 1.856e+02 2.081e+02 3.075e+02, threshold=3.711e+02, percent-clipped=0.0 +2024-08-31 17:33:57,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=252074.66666666666, ans=0.125 +2024-08-31 17:34:04,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=252128.0, ans=0.025 +2024-08-31 17:34:11,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=252128.0, ans=0.2 +2024-08-31 17:34:14,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=252181.33333333334, ans=0.0 +2024-08-31 17:34:18,920 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.76 vs. limit=15.0 +2024-08-31 17:36:18,534 INFO [train.py:1114] (3/4) Epoch 20, batch 0, loss[loss=0.2192, simple_loss=0.2735, pruned_loss=0.05901, ctc_loss=0.117, over 19388.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2735, pruned_loss=0.05901, ctc_loss=0.117, over 19388.00 frames. ], batch size: 48, lr: 7.58e-03, grad_scale: 32.0 +2024-08-31 17:36:18,534 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-31 17:36:23,457 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.5962, 3.1128, 2.1584, 2.8092], device='cuda:3') +2024-08-31 17:36:28,434 INFO [train.py:1146] (3/4) Epoch 20, validation: loss=0.1834, simple_loss=0.2715, pruned_loss=0.03542, ctc_loss=0.061, over 944034.00 frames. +2024-08-31 17:36:28,434 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 13816MB +2024-08-31 17:37:17,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=252448.0, ans=0.04949747468305833 +2024-08-31 17:37:20,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=252448.0, ans=0.125 +2024-08-31 17:37:23,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=252448.0, ans=0.0 +2024-08-31 17:37:27,970 INFO [train.py:1114] (3/4) Epoch 20, batch 50, loss[loss=0.1903, simple_loss=0.257, pruned_loss=0.04383, ctc_loss=0.08962, over 19735.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2741, pruned_loss=0.05092, ctc_loss=0.0973, over 843375.14 frames. ], batch size: 47, lr: 7.58e-03, grad_scale: 32.0 +2024-08-31 17:37:51,151 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.696e+02 1.962e+02 2.261e+02 4.473e+02, threshold=3.923e+02, percent-clipped=2.0 +2024-08-31 17:40:00,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252661.33333333334, ans=0.1 +2024-08-31 17:41:27,191 INFO [train.py:1114] (3/4) Epoch 20, batch 100, loss[loss=0.1856, simple_loss=0.2579, pruned_loss=0.04111, ctc_loss=0.07769, over 19726.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.275, pruned_loss=0.05043, ctc_loss=0.09577, over 1497870.57 frames. ], batch size: 51, lr: 7.57e-03, grad_scale: 32.0 +2024-08-31 17:41:32,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=252768.0, ans=0.025 +2024-08-31 17:42:11,158 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:42:17,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=252821.33333333334, ans=0.0 +2024-08-31 17:42:26,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=252821.33333333334, ans=0.125 +2024-08-31 17:42:36,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=252874.66666666666, ans=0.125 +2024-08-31 17:42:44,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=252874.66666666666, ans=0.0 +2024-08-31 17:43:01,180 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.14 vs. limit=12.0 +2024-08-31 17:43:48,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=252981.33333333334, ans=0.04949747468305833 +2024-08-31 17:43:59,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=252981.33333333334, ans=0.0 +2024-08-31 17:43:59,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252981.33333333334, ans=0.1 +2024-08-31 17:44:06,483 INFO [train.py:1114] (3/4) Epoch 20, batch 150, loss[loss=0.1703, simple_loss=0.237, pruned_loss=0.03723, ctc_loss=0.07265, over 19717.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2716, pruned_loss=0.04855, ctc_loss=0.09182, over 2027723.87 frames. ], batch size: 47, lr: 7.57e-03, grad_scale: 32.0 +2024-08-31 17:44:06,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=253034.66666666666, ans=0.125 +2024-08-31 17:44:59,737 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.301e+02 1.634e+02 1.821e+02 2.194e+02 3.683e+02, threshold=3.641e+02, percent-clipped=0.0 +2024-08-31 17:45:32,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=253194.66666666666, ans=0.04949747468305833 +2024-08-31 17:45:59,899 INFO [train.py:1114] (3/4) Epoch 20, batch 200, loss[loss=0.2032, simple_loss=0.2739, pruned_loss=0.04742, ctc_loss=0.09421, over 18525.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2706, pruned_loss=0.04839, ctc_loss=0.09115, over 2435784.70 frames. ], batch size: 85, lr: 7.56e-03, grad_scale: 32.0 +2024-08-31 17:46:03,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=253301.33333333334, ans=0.125 +2024-08-31 17:46:05,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=253301.33333333334, ans=0.0 +2024-08-31 17:46:49,995 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 17:46:58,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=253461.33333333334, ans=0.0 +2024-08-31 17:47:12,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=253461.33333333334, ans=0.0 +2024-08-31 17:47:33,343 INFO [train.py:1114] (3/4) Epoch 20, batch 250, loss[loss=0.2162, simple_loss=0.2871, pruned_loss=0.0532, ctc_loss=0.09738, over 19355.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2696, pruned_loss=0.04781, ctc_loss=0.09011, over 2756034.48 frames. ], batch size: 67, lr: 7.56e-03, grad_scale: 32.0 +2024-08-31 17:47:56,509 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.99 vs. limit=10.0 +2024-08-31 17:47:59,372 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.745e+02 2.044e+02 2.602e+02 4.259e+02, threshold=4.089e+02, percent-clipped=6.0 +2024-08-31 17:49:00,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=253674.66666666666, ans=0.0 +2024-08-31 17:49:45,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=253728.0, ans=0.0 +2024-08-31 17:49:54,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=253781.33333333334, ans=0.125 +2024-08-31 17:50:00,349 INFO [train.py:1114] (3/4) Epoch 20, batch 300, loss[loss=0.2186, simple_loss=0.287, pruned_loss=0.05508, ctc_loss=0.1, over 19518.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2694, pruned_loss=0.04748, ctc_loss=0.08951, over 3000612.69 frames. ], batch size: 61, lr: 7.56e-03, grad_scale: 32.0 +2024-08-31 17:50:14,391 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.57 vs. limit=10.0 +2024-08-31 17:50:34,181 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.63 vs. limit=15.0 +2024-08-31 17:50:42,504 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.53 vs. limit=15.0 +2024-08-31 17:50:56,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=254048.0, ans=0.0 +2024-08-31 17:51:05,490 INFO [train.py:1114] (3/4) Epoch 20, batch 350, loss[loss=0.1775, simple_loss=0.2409, pruned_loss=0.04109, ctc_loss=0.07976, over 19756.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2701, pruned_loss=0.04792, ctc_loss=0.09024, over 3189836.52 frames. ], batch size: 48, lr: 7.55e-03, grad_scale: 32.0 +2024-08-31 17:51:10,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=254101.33333333334, ans=0.035 +2024-08-31 17:51:14,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=254101.33333333334, ans=0.125 +2024-08-31 17:51:26,965 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.703e+02 1.946e+02 2.321e+02 4.034e+02, threshold=3.891e+02, percent-clipped=0.0 +2024-08-31 17:51:45,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=254261.33333333334, ans=0.125 +2024-08-31 17:51:46,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=254261.33333333334, ans=0.125 +2024-08-31 17:52:04,338 INFO [train.py:1114] (3/4) Epoch 20, batch 400, loss[loss=0.213, simple_loss=0.2739, pruned_loss=0.05448, ctc_loss=0.1079, over 19496.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2696, pruned_loss=0.04775, ctc_loss=0.09009, over 3341479.33 frames. ], batch size: 54, lr: 7.55e-03, grad_scale: 32.0 +2024-08-31 17:52:10,473 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.41 vs. limit=22.5 +2024-08-31 17:52:11,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=254368.0, ans=0.025 +2024-08-31 17:52:20,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254421.33333333334, ans=0.1 +2024-08-31 17:52:58,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=254581.33333333334, ans=0.125 +2024-08-31 17:53:10,612 INFO [train.py:1114] (3/4) Epoch 20, batch 450, loss[loss=0.2178, simple_loss=0.2877, pruned_loss=0.05247, ctc_loss=0.1073, over 19610.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2694, pruned_loss=0.04778, ctc_loss=0.09006, over 3448990.92 frames. ], batch size: 55, lr: 7.55e-03, grad_scale: 32.0 +2024-08-31 17:53:16,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=254634.66666666666, ans=0.125 +2024-08-31 17:53:18,986 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.47 vs. limit=10.0 +2024-08-31 17:53:23,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=254688.0, ans=0.125 +2024-08-31 17:53:31,688 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.356e+02 1.627e+02 1.777e+02 2.217e+02 3.582e+02, threshold=3.554e+02, percent-clipped=0.0 +2024-08-31 17:53:42,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=254741.33333333334, ans=0.125 +2024-08-31 17:54:01,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=254794.66666666666, ans=0.125 +2024-08-31 17:54:08,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=254848.0, ans=0.2 +2024-08-31 17:54:15,350 INFO [train.py:1114] (3/4) Epoch 20, batch 500, loss[loss=0.2191, simple_loss=0.2964, pruned_loss=0.05141, ctc_loss=0.09738, over 19642.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2687, pruned_loss=0.04764, ctc_loss=0.08968, over 3545247.99 frames. ], batch size: 63, lr: 7.54e-03, grad_scale: 32.0 +2024-08-31 17:54:50,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=255061.33333333334, ans=0.125 +2024-08-31 17:54:55,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=255061.33333333334, ans=0.125 +2024-08-31 17:55:14,677 INFO [train.py:1114] (3/4) Epoch 20, batch 550, loss[loss=0.2286, simple_loss=0.2928, pruned_loss=0.05927, ctc_loss=0.1146, over 19211.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2688, pruned_loss=0.04781, ctc_loss=0.09013, over 3606801.94 frames. ], batch size: 71, lr: 7.54e-03, grad_scale: 32.0 +2024-08-31 17:55:14,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=255168.0, ans=0.125 +2024-08-31 17:55:27,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=255221.33333333334, ans=0.125 +2024-08-31 17:55:35,929 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.279e+02 1.640e+02 1.908e+02 2.178e+02 3.229e+02, threshold=3.816e+02, percent-clipped=0.0 +2024-08-31 17:55:56,205 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.47 vs. limit=15.0 +2024-08-31 17:56:03,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=255328.0, ans=0.0 +2024-08-31 17:56:04,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=255328.0, ans=0.0 +2024-08-31 17:56:07,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=255381.33333333334, ans=0.125 +2024-08-31 17:56:22,748 INFO [train.py:1114] (3/4) Epoch 20, batch 600, loss[loss=0.2408, simple_loss=0.2994, pruned_loss=0.06423, ctc_loss=0.1342, over 19330.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.27, pruned_loss=0.04856, ctc_loss=0.09137, over 3663846.54 frames. ], batch size: 67, lr: 7.53e-03, grad_scale: 32.0 +2024-08-31 17:56:23,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=255434.66666666666, ans=0.125 +2024-08-31 17:56:36,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255488.0, ans=0.1 +2024-08-31 17:56:49,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=255541.33333333334, ans=0.125 +2024-08-31 17:57:01,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=255594.66666666666, ans=0.125 +2024-08-31 17:57:02,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=255594.66666666666, ans=0.125 +2024-08-31 17:57:11,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255648.0, ans=0.1 +2024-08-31 17:57:15,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=255648.0, ans=0.125 +2024-08-31 17:57:22,395 INFO [train.py:1114] (3/4) Epoch 20, batch 650, loss[loss=0.1994, simple_loss=0.2744, pruned_loss=0.04576, ctc_loss=0.0824, over 19774.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2697, pruned_loss=0.04837, ctc_loss=0.09075, over 3714515.65 frames. ], batch size: 54, lr: 7.53e-03, grad_scale: 32.0 +2024-08-31 17:57:25,627 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.88 vs. limit=15.0 +2024-08-31 17:57:39,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=255754.66666666666, ans=0.0 +2024-08-31 17:57:44,325 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.759e+02 2.153e+02 2.838e+02 5.166e+02, threshold=4.306e+02, percent-clipped=8.0 +2024-08-31 17:57:55,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=255808.0, ans=0.09899494936611666 +2024-08-31 17:58:22,793 INFO [train.py:1114] (3/4) Epoch 20, batch 700, loss[loss=0.199, simple_loss=0.2641, pruned_loss=0.04807, ctc_loss=0.09417, over 19716.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2703, pruned_loss=0.04858, ctc_loss=0.09106, over 3746641.94 frames. ], batch size: 51, lr: 7.53e-03, grad_scale: 32.0 +2024-08-31 17:58:34,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=255968.0, ans=0.0 +2024-08-31 17:58:42,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=256021.33333333334, ans=0.125 +2024-08-31 17:58:59,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=256128.0, ans=0.0 +2024-08-31 17:59:04,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=256128.0, ans=0.025 +2024-08-31 17:59:12,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=256181.33333333334, ans=0.125 +2024-08-31 17:59:24,898 INFO [train.py:1114] (3/4) Epoch 20, batch 750, loss[loss=0.1972, simple_loss=0.2751, pruned_loss=0.04377, ctc_loss=0.07934, over 19493.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2698, pruned_loss=0.04821, ctc_loss=0.09042, over 3772097.76 frames. ], batch size: 54, lr: 7.52e-03, grad_scale: 32.0 +2024-08-31 17:59:37,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=256234.66666666666, ans=0.0 +2024-08-31 17:59:54,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=256288.0, ans=0.0 +2024-08-31 17:59:58,591 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.267e+02 1.642e+02 1.855e+02 2.095e+02 3.716e+02, threshold=3.709e+02, percent-clipped=0.0 +2024-08-31 17:59:58,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=256288.0, ans=0.125 +2024-08-31 18:00:13,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=256394.66666666666, ans=0.025 +2024-08-31 18:00:18,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=256394.66666666666, ans=0.125 +2024-08-31 18:00:23,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=256394.66666666666, ans=0.2 +2024-08-31 18:00:24,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=256448.0, ans=0.0 +2024-08-31 18:00:28,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=256448.0, ans=0.0 +2024-08-31 18:00:42,914 INFO [train.py:1114] (3/4) Epoch 20, batch 800, loss[loss=0.1804, simple_loss=0.2542, pruned_loss=0.03854, ctc_loss=0.07378, over 19413.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2694, pruned_loss=0.0478, ctc_loss=0.08962, over 3794382.28 frames. ], batch size: 48, lr: 7.52e-03, grad_scale: 32.0 +2024-08-31 18:00:43,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=256501.33333333334, ans=0.025 +2024-08-31 18:00:43,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=256501.33333333334, ans=0.0 +2024-08-31 18:00:43,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=256501.33333333334, ans=0.125 +2024-08-31 18:00:46,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=256501.33333333334, ans=0.0 +2024-08-31 18:01:02,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=256554.66666666666, ans=0.125 +2024-08-31 18:01:14,882 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.87 vs. limit=22.5 +2024-08-31 18:01:14,949 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.18 vs. limit=22.5 +2024-08-31 18:01:42,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=256768.0, ans=0.2 +2024-08-31 18:01:43,047 INFO [train.py:1114] (3/4) Epoch 20, batch 850, loss[loss=0.2142, simple_loss=0.2812, pruned_loss=0.05276, ctc_loss=0.1043, over 19650.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2691, pruned_loss=0.04798, ctc_loss=0.0904, over 3814968.13 frames. ], batch size: 59, lr: 7.51e-03, grad_scale: 32.0 +2024-08-31 18:01:50,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=256768.0, ans=0.125 +2024-08-31 18:02:00,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=256821.33333333334, ans=15.0 +2024-08-31 18:02:05,173 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.672e+02 2.009e+02 2.661e+02 4.692e+02, threshold=4.019e+02, percent-clipped=5.0 +2024-08-31 18:02:21,133 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.35 vs. limit=15.0 +2024-08-31 18:02:42,833 INFO [train.py:1114] (3/4) Epoch 20, batch 900, loss[loss=0.1972, simple_loss=0.2577, pruned_loss=0.04924, ctc_loss=0.09548, over 19400.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2701, pruned_loss=0.04882, ctc_loss=0.09193, over 3818685.56 frames. ], batch size: 48, lr: 7.51e-03, grad_scale: 32.0 +2024-08-31 18:02:44,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=257034.66666666666, ans=0.09899494936611666 +2024-08-31 18:02:48,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=257034.66666666666, ans=0.025 +2024-08-31 18:03:12,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=257141.33333333334, ans=0.0 +2024-08-31 18:03:13,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=257141.33333333334, ans=0.125 +2024-08-31 18:03:13,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=257141.33333333334, ans=0.125 +2024-08-31 18:03:44,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.69 vs. limit=10.0 +2024-08-31 18:03:50,701 INFO [train.py:1114] (3/4) Epoch 20, batch 950, loss[loss=0.2256, simple_loss=0.2769, pruned_loss=0.06302, ctc_loss=0.1207, over 19498.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2707, pruned_loss=0.04897, ctc_loss=0.09251, over 3819873.11 frames. ], batch size: 49, lr: 7.51e-03, grad_scale: 32.0 +2024-08-31 18:03:53,582 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.53 vs. limit=6.0 +2024-08-31 18:04:12,191 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.312e+02 1.674e+02 1.914e+02 2.385e+02 5.476e+02, threshold=3.829e+02, percent-clipped=1.0 +2024-08-31 18:04:12,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=257354.66666666666, ans=0.025 +2024-08-31 18:04:51,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=257408.0, ans=0.0 +2024-08-31 18:05:02,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=257461.33333333334, ans=0.125 +2024-08-31 18:05:08,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=257461.33333333334, ans=0.09899494936611666 +2024-08-31 18:05:13,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=257514.66666666666, ans=0.2 +2024-08-31 18:05:19,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=257514.66666666666, ans=0.0 +2024-08-31 18:05:20,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=257514.66666666666, ans=0.0 +2024-08-31 18:05:25,097 INFO [train.py:1114] (3/4) Epoch 20, batch 1000, loss[loss=0.1948, simple_loss=0.2574, pruned_loss=0.04837, ctc_loss=0.08875, over 19858.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2713, pruned_loss=0.04916, ctc_loss=0.09292, over 3816153.61 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 32.0 +2024-08-31 18:05:29,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=257568.0, ans=0.125 +2024-08-31 18:05:35,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=257568.0, ans=0.2 +2024-08-31 18:10:14,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.50 vs. limit=15.0 +2024-08-31 18:10:30,567 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.66 vs. limit=15.0 +2024-08-31 18:12:15,977 INFO [train.py:1114] (3/4) Epoch 20, batch 1050, loss[loss=0.1993, simple_loss=0.2768, pruned_loss=0.04446, ctc_loss=0.08224, over 19836.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2703, pruned_loss=0.04882, ctc_loss=0.09232, over 3821136.31 frames. ], batch size: 57, lr: 7.50e-03, grad_scale: 32.0 +2024-08-31 18:12:37,418 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.683e+02 1.941e+02 2.234e+02 3.103e+02, threshold=3.882e+02, percent-clipped=0.0 +2024-08-31 18:12:47,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=257941.33333333334, ans=0.035 +2024-08-31 18:12:49,413 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.40 vs. limit=22.5 +2024-08-31 18:13:14,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=258048.0, ans=0.125 +2024-08-31 18:13:23,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=258101.33333333334, ans=0.2 +2024-08-31 18:13:25,854 INFO [train.py:1114] (3/4) Epoch 20, batch 1100, loss[loss=0.2068, simple_loss=0.2775, pruned_loss=0.04947, ctc_loss=0.09292, over 19579.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2705, pruned_loss=0.04877, ctc_loss=0.09227, over 3828453.10 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 32.0 +2024-08-31 18:13:28,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=258101.33333333334, ans=0.025 +2024-08-31 18:13:37,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=258154.66666666666, ans=10.0 +2024-08-31 18:14:05,904 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=9.38 vs. limit=15.0 +2024-08-31 18:14:15,126 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=9.62 vs. limit=12.0 +2024-08-31 18:14:25,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=258368.0, ans=0.125 +2024-08-31 18:14:26,124 INFO [train.py:1114] (3/4) Epoch 20, batch 1150, loss[loss=0.1897, simple_loss=0.2673, pruned_loss=0.04021, ctc_loss=0.07939, over 19584.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2707, pruned_loss=0.04897, ctc_loss=0.09261, over 3827007.66 frames. ], batch size: 52, lr: 7.49e-03, grad_scale: 32.0 +2024-08-31 18:15:11,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258421.33333333334, ans=0.1 +2024-08-31 18:15:12,227 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.349e+02 1.657e+02 1.937e+02 2.398e+02 3.976e+02, threshold=3.875e+02, percent-clipped=1.0 +2024-08-31 18:15:27,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=258528.0, ans=0.125 +2024-08-31 18:15:36,366 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.18 vs. limit=10.0 +2024-08-31 18:15:42,876 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.09 vs. limit=10.0 +2024-08-31 18:15:51,946 INFO [train.py:1114] (3/4) Epoch 20, batch 1200, loss[loss=0.2169, simple_loss=0.2849, pruned_loss=0.05328, ctc_loss=0.106, over 19843.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2708, pruned_loss=0.04849, ctc_loss=0.09181, over 3823359.18 frames. ], batch size: 57, lr: 7.49e-03, grad_scale: 32.0 +2024-08-31 18:16:02,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=258688.0, ans=0.125 +2024-08-31 18:16:16,152 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.87 vs. limit=15.0 +2024-08-31 18:16:27,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=258794.66666666666, ans=0.0 +2024-08-31 18:16:31,612 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.89 vs. limit=15.0 +2024-08-31 18:16:36,408 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.74 vs. limit=15.0 +2024-08-31 18:16:54,771 INFO [train.py:1114] (3/4) Epoch 20, batch 1250, loss[loss=0.249, simple_loss=0.3005, pruned_loss=0.07195, ctc_loss=0.1339, over 19512.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2714, pruned_loss=0.04883, ctc_loss=0.09218, over 3841902.61 frames. ], batch size: 61, lr: 7.48e-03, grad_scale: 32.0 +2024-08-31 18:17:10,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=258954.66666666666, ans=0.025 +2024-08-31 18:17:17,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=258954.66666666666, ans=0.0 +2024-08-31 18:17:20,836 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.340e+02 1.673e+02 1.864e+02 2.243e+02 4.460e+02, threshold=3.727e+02, percent-clipped=1.0 +2024-08-31 18:17:55,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=259061.33333333334, ans=0.125 +2024-08-31 18:18:01,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=259114.66666666666, ans=0.125 +2024-08-31 18:18:06,102 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.47 vs. limit=22.5 +2024-08-31 18:19:05,809 INFO [train.py:1114] (3/4) Epoch 20, batch 1300, loss[loss=0.2442, simple_loss=0.3034, pruned_loss=0.06667, ctc_loss=0.1291, over 18920.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2706, pruned_loss=0.0485, ctc_loss=0.09132, over 3845313.70 frames. ], batch size: 76, lr: 7.48e-03, grad_scale: 32.0 +2024-08-31 18:19:13,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=259168.0, ans=0.2 +2024-08-31 18:19:20,362 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.28 vs. limit=15.0 +2024-08-31 18:19:38,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259274.66666666666, ans=0.1 +2024-08-31 18:20:04,223 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.76 vs. limit=6.0 +2024-08-31 18:20:12,175 INFO [train.py:1114] (3/4) Epoch 20, batch 1350, loss[loss=0.1966, simple_loss=0.2636, pruned_loss=0.04745, ctc_loss=0.08659, over 19766.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2705, pruned_loss=0.04838, ctc_loss=0.09094, over 3856715.66 frames. ], batch size: 54, lr: 7.48e-03, grad_scale: 32.0 +2024-08-31 18:20:15,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=259434.66666666666, ans=0.0 +2024-08-31 18:20:38,785 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.436e+02 1.677e+02 1.917e+02 2.382e+02 4.193e+02, threshold=3.834e+02, percent-clipped=5.0 +2024-08-31 18:20:47,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=259541.33333333334, ans=0.125 +2024-08-31 18:20:52,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=259594.66666666666, ans=0.0 +2024-08-31 18:20:56,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=259594.66666666666, ans=0.0 +2024-08-31 18:20:58,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=259594.66666666666, ans=0.0 +2024-08-31 18:21:14,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=259648.0, ans=0.125 +2024-08-31 18:21:14,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=259648.0, ans=0.125 +2024-08-31 18:21:16,783 INFO [train.py:1114] (3/4) Epoch 20, batch 1400, loss[loss=0.1636, simple_loss=0.2311, pruned_loss=0.03548, ctc_loss=0.06281, over 19657.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2703, pruned_loss=0.04826, ctc_loss=0.09081, over 3863644.08 frames. ], batch size: 46, lr: 7.47e-03, grad_scale: 32.0 +2024-08-31 18:21:34,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=259754.66666666666, ans=0.0 +2024-08-31 18:21:57,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=259861.33333333334, ans=0.125 +2024-08-31 18:21:57,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=259861.33333333334, ans=0.125 +2024-08-31 18:22:29,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=259914.66666666666, ans=0.0 +2024-08-31 18:22:29,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259914.66666666666, ans=0.1 +2024-08-31 18:22:30,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259914.66666666666, ans=0.1 +2024-08-31 18:22:33,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=259914.66666666666, ans=0.1 +2024-08-31 18:22:53,590 INFO [train.py:1114] (3/4) Epoch 20, batch 1450, loss[loss=0.2188, simple_loss=0.2931, pruned_loss=0.05211, ctc_loss=0.1006, over 19646.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2708, pruned_loss=0.04829, ctc_loss=0.0908, over 3862602.45 frames. ], batch size: 63, lr: 7.47e-03, grad_scale: 32.0 +2024-08-31 18:23:01,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=259968.0, ans=0.025 +2024-08-31 18:23:15,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=260021.33333333334, ans=0.125 +2024-08-31 18:23:17,226 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.407e+02 1.776e+02 2.029e+02 2.458e+02 5.712e+02, threshold=4.059e+02, percent-clipped=1.0 +2024-08-31 18:23:21,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=260074.66666666666, ans=0.125 +2024-08-31 18:23:21,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=260074.66666666666, ans=0.125 +2024-08-31 18:23:25,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=260074.66666666666, ans=0.125 +2024-08-31 18:23:28,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=260074.66666666666, ans=0.125 +2024-08-31 18:23:33,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=260128.0, ans=0.1 +2024-08-31 18:23:35,818 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=6.21 vs. limit=15.0 +2024-08-31 18:23:39,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=260128.0, ans=0.2 +2024-08-31 18:23:41,631 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.85 vs. limit=15.0 +2024-08-31 18:23:54,008 INFO [train.py:1114] (3/4) Epoch 20, batch 1500, loss[loss=0.2228, simple_loss=0.2911, pruned_loss=0.05604, ctc_loss=0.1058, over 19585.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2709, pruned_loss=0.04799, ctc_loss=0.09035, over 3862617.62 frames. ], batch size: 57, lr: 7.47e-03, grad_scale: 32.0 +2024-08-31 18:23:54,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=260234.66666666666, ans=10.0 +2024-08-31 18:23:57,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=260234.66666666666, ans=0.025 +2024-08-31 18:24:19,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=260341.33333333334, ans=0.1 +2024-08-31 18:24:20,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260341.33333333334, ans=0.1 +2024-08-31 18:24:26,707 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:24:27,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=260341.33333333334, ans=0.0 +2024-08-31 18:24:29,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=260394.66666666666, ans=0.0 +2024-08-31 18:24:35,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=260394.66666666666, ans=0.025 +2024-08-31 18:24:37,914 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.88 vs. limit=6.0 +2024-08-31 18:25:34,752 INFO [train.py:1114] (3/4) Epoch 20, batch 1550, loss[loss=0.2069, simple_loss=0.281, pruned_loss=0.04877, ctc_loss=0.08821, over 19635.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2711, pruned_loss=0.0483, ctc_loss=0.09099, over 3846805.53 frames. ], batch size: 60, lr: 7.46e-03, grad_scale: 32.0 +2024-08-31 18:26:28,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=260554.66666666666, ans=0.0 +2024-08-31 18:26:33,197 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.367e+02 1.748e+02 2.049e+02 2.466e+02 3.855e+02, threshold=4.097e+02, percent-clipped=0.0 +2024-08-31 18:26:39,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=260608.0, ans=0.125 +2024-08-31 18:27:18,518 INFO [train.py:1114] (3/4) Epoch 20, batch 1600, loss[loss=0.2032, simple_loss=0.2781, pruned_loss=0.04688, ctc_loss=0.08615, over 19854.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2712, pruned_loss=0.04852, ctc_loss=0.0913, over 3836004.15 frames. ], batch size: 57, lr: 7.46e-03, grad_scale: 32.0 +2024-08-31 18:27:32,587 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.73 vs. limit=22.5 +2024-08-31 18:27:48,503 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.14 vs. limit=12.0 +2024-08-31 18:27:53,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=260928.0, ans=0.125 +2024-08-31 18:28:03,884 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.09 vs. limit=15.0 +2024-08-31 18:28:30,428 INFO [train.py:1114] (3/4) Epoch 20, batch 1650, loss[loss=0.1991, simple_loss=0.2779, pruned_loss=0.04328, ctc_loss=0.08423, over 19670.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2707, pruned_loss=0.04853, ctc_loss=0.09151, over 3832121.53 frames. ], batch size: 59, lr: 7.45e-03, grad_scale: 32.0 +2024-08-31 18:28:34,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=261034.66666666666, ans=0.125 +2024-08-31 18:28:35,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=261034.66666666666, ans=0.0 +2024-08-31 18:28:53,153 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.350e+02 1.719e+02 2.026e+02 2.553e+02 4.958e+02, threshold=4.052e+02, percent-clipped=3.0 +2024-08-31 18:29:25,617 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.04 vs. limit=15.0 +2024-08-31 18:29:28,861 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.91 vs. limit=6.0 +2024-08-31 18:29:29,542 INFO [train.py:1114] (3/4) Epoch 20, batch 1700, loss[loss=0.1714, simple_loss=0.2353, pruned_loss=0.03886, ctc_loss=0.07429, over 19682.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2704, pruned_loss=0.04819, ctc_loss=0.09096, over 3846224.64 frames. ], batch size: 46, lr: 7.45e-03, grad_scale: 32.0 +2024-08-31 18:29:30,306 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.22 vs. limit=15.0 +2024-08-31 18:29:36,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=261301.33333333334, ans=0.2 +2024-08-31 18:29:38,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=261301.33333333334, ans=0.0 +2024-08-31 18:29:45,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=261354.66666666666, ans=0.0 +2024-08-31 18:29:52,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=261354.66666666666, ans=0.125 +2024-08-31 18:30:08,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=261461.33333333334, ans=0.2 +2024-08-31 18:30:17,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=261514.66666666666, ans=0.125 +2024-08-31 18:31:14,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=261514.66666666666, ans=0.05 +2024-08-31 18:31:17,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=261568.0, ans=0.125 +2024-08-31 18:31:18,020 INFO [train.py:1114] (3/4) Epoch 20, batch 1750, loss[loss=0.1914, simple_loss=0.2524, pruned_loss=0.04752, ctc_loss=0.08852, over 19644.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2703, pruned_loss=0.04815, ctc_loss=0.09108, over 3851220.45 frames. ], batch size: 45, lr: 7.45e-03, grad_scale: 32.0 +2024-08-31 18:31:39,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=261621.33333333334, ans=0.07 +2024-08-31 18:31:39,630 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.98 vs. limit=15.0 +2024-08-31 18:31:39,759 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.07 vs. limit=15.0 +2024-08-31 18:31:40,001 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.679e+02 1.951e+02 2.329e+02 4.159e+02, threshold=3.901e+02, percent-clipped=0.0 +2024-08-31 18:31:58,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=261728.0, ans=0.0 +2024-08-31 18:31:58,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=261728.0, ans=0.125 +2024-08-31 18:32:15,181 INFO [train.py:1114] (3/4) Epoch 20, batch 1800, loss[loss=0.2058, simple_loss=0.2758, pruned_loss=0.04911, ctc_loss=0.09426, over 19626.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.27, pruned_loss=0.04824, ctc_loss=0.09116, over 3852909.19 frames. ], batch size: 55, lr: 7.44e-03, grad_scale: 32.0 +2024-08-31 18:32:24,176 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.53 vs. limit=22.5 +2024-08-31 18:33:20,837 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:33:26,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=262048.0, ans=0.125 +2024-08-31 18:33:27,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=262048.0, ans=0.95 +2024-08-31 18:33:28,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=262048.0, ans=0.125 +2024-08-31 18:33:34,492 INFO [train.py:1114] (3/4) Epoch 20, batch 1850, loss[loss=0.2048, simple_loss=0.2788, pruned_loss=0.04824, ctc_loss=0.08582, over 19584.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2697, pruned_loss=0.04817, ctc_loss=0.09081, over 3857665.79 frames. ], batch size: 57, lr: 7.44e-03, grad_scale: 32.0 +2024-08-31 18:33:52,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=262154.6666666667, ans=0.0 +2024-08-31 18:33:55,997 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.842e+02 2.206e+02 3.038e+02 4.306e+02, threshold=4.411e+02, percent-clipped=5.0 +2024-08-31 18:34:05,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=262208.0, ans=0.125 +2024-08-31 18:34:06,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=262208.0, ans=0.2 +2024-08-31 18:34:21,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=262261.3333333333, ans=0.125 +2024-08-31 18:34:36,231 INFO [train.py:1114] (3/4) Epoch 20, batch 1900, loss[loss=0.1958, simple_loss=0.2728, pruned_loss=0.04359, ctc_loss=0.07898, over 19647.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2701, pruned_loss=0.04837, ctc_loss=0.09108, over 3862953.43 frames. ], batch size: 59, lr: 7.44e-03, grad_scale: 32.0 +2024-08-31 18:34:37,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=262368.0, ans=0.5 +2024-08-31 18:34:49,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=262421.3333333333, ans=0.125 +2024-08-31 18:35:06,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=262474.6666666667, ans=0.125 +2024-08-31 18:35:11,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=262528.0, ans=0.125 +2024-08-31 18:35:24,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=262581.3333333333, ans=0.125 +2024-08-31 18:35:34,443 INFO [train.py:1114] (3/4) Epoch 20, batch 1950, loss[loss=0.1892, simple_loss=0.2562, pruned_loss=0.0439, ctc_loss=0.08589, over 19603.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2712, pruned_loss=0.04862, ctc_loss=0.09146, over 3871290.78 frames. ], batch size: 52, lr: 7.43e-03, grad_scale: 32.0 +2024-08-31 18:35:38,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=262634.6666666667, ans=0.125 +2024-08-31 18:35:48,143 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.29 vs. limit=15.0 +2024-08-31 18:35:55,629 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.392e+02 1.650e+02 1.780e+02 2.101e+02 3.496e+02, threshold=3.560e+02, percent-clipped=0.0 +2024-08-31 18:36:16,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=262794.6666666667, ans=0.0 +2024-08-31 18:36:31,282 INFO [train.py:1114] (3/4) Epoch 20, batch 2000, loss[loss=0.1692, simple_loss=0.2366, pruned_loss=0.03722, ctc_loss=0.06816, over 19675.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2719, pruned_loss=0.04905, ctc_loss=0.09217, over 3854870.91 frames. ], batch size: 45, lr: 7.43e-03, grad_scale: 32.0 +2024-08-31 18:36:33,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=262901.3333333333, ans=0.04949747468305833 +2024-08-31 18:36:47,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262954.6666666667, ans=0.1 +2024-08-31 18:37:21,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=263114.6666666667, ans=0.125 +2024-08-31 18:37:32,654 INFO [train.py:1114] (3/4) Epoch 20, batch 2050, loss[loss=0.1789, simple_loss=0.2446, pruned_loss=0.04172, ctc_loss=0.07438, over 19699.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2706, pruned_loss=0.04883, ctc_loss=0.09176, over 3850390.55 frames. ], batch size: 47, lr: 7.42e-03, grad_scale: 32.0 +2024-08-31 18:37:34,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=263168.0, ans=0.025 +2024-08-31 18:37:47,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=263168.0, ans=10.0 +2024-08-31 18:38:01,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.44 vs. limit=6.0 +2024-08-31 18:38:02,078 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.334e+02 1.724e+02 2.041e+02 2.585e+02 3.821e+02, threshold=4.082e+02, percent-clipped=5.0 +2024-08-31 18:38:28,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=263381.3333333333, ans=0.0 +2024-08-31 18:38:28,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=263381.3333333333, ans=0.125 +2024-08-31 18:38:36,463 INFO [train.py:1114] (3/4) Epoch 20, batch 2100, loss[loss=0.1916, simple_loss=0.2629, pruned_loss=0.04316, ctc_loss=0.08522, over 19774.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2699, pruned_loss=0.04823, ctc_loss=0.09048, over 3858025.09 frames. ], batch size: 54, lr: 7.42e-03, grad_scale: 32.0 +2024-08-31 18:38:36,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=263434.6666666667, ans=0.0 +2024-08-31 18:38:42,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=263434.6666666667, ans=0.1 +2024-08-31 18:38:44,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=263434.6666666667, ans=0.0 +2024-08-31 18:39:07,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=263541.3333333333, ans=0.5 +2024-08-31 18:39:28,991 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=12.0 +2024-08-31 18:39:32,892 INFO [train.py:1114] (3/4) Epoch 20, batch 2150, loss[loss=0.1921, simple_loss=0.2628, pruned_loss=0.04463, ctc_loss=0.08035, over 19861.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2692, pruned_loss=0.0481, ctc_loss=0.09034, over 3870347.82 frames. ], batch size: 52, lr: 7.42e-03, grad_scale: 32.0 +2024-08-31 18:39:57,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=263754.6666666667, ans=0.05 +2024-08-31 18:39:58,529 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.627e+02 1.896e+02 2.393e+02 5.058e+02, threshold=3.792e+02, percent-clipped=5.0 +2024-08-31 18:40:10,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263861.3333333333, ans=0.1 +2024-08-31 18:40:11,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=263861.3333333333, ans=0.0 +2024-08-31 18:41:09,868 INFO [train.py:1114] (3/4) Epoch 20, batch 2200, loss[loss=0.2244, simple_loss=0.2962, pruned_loss=0.05599, ctc_loss=0.1016, over 19581.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2689, pruned_loss=0.04783, ctc_loss=0.08987, over 3868929.77 frames. ], batch size: 57, lr: 7.41e-03, grad_scale: 32.0 +2024-08-31 18:41:19,124 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.57 vs. limit=15.0 +2024-08-31 18:41:21,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.28 vs. limit=6.0 +2024-08-31 18:41:22,682 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.00 vs. limit=15.0 +2024-08-31 18:41:24,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=264021.3333333333, ans=0.125 +2024-08-31 18:41:25,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=264021.3333333333, ans=0.125 +2024-08-31 18:41:42,440 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.37 vs. limit=15.0 +2024-08-31 18:41:49,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=264074.6666666667, ans=22.5 +2024-08-31 18:42:17,186 INFO [train.py:1114] (3/4) Epoch 20, batch 2250, loss[loss=0.1798, simple_loss=0.2571, pruned_loss=0.0364, ctc_loss=0.07441, over 19609.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2697, pruned_loss=0.04832, ctc_loss=0.09081, over 3867828.09 frames. ], batch size: 55, lr: 7.41e-03, grad_scale: 32.0 +2024-08-31 18:42:39,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=264288.0, ans=0.2 +2024-08-31 18:42:41,062 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:42:42,063 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.705e+02 2.149e+02 2.747e+02 5.291e+02, threshold=4.298e+02, percent-clipped=7.0 +2024-08-31 18:42:56,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264394.6666666667, ans=0.1 +2024-08-31 18:42:58,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=264394.6666666667, ans=0.125 +2024-08-31 18:43:00,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264394.6666666667, ans=0.1 +2024-08-31 18:43:16,656 INFO [train.py:1114] (3/4) Epoch 20, batch 2300, loss[loss=0.1902, simple_loss=0.2546, pruned_loss=0.04724, ctc_loss=0.07806, over 19514.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2691, pruned_loss=0.04848, ctc_loss=0.0912, over 3862372.82 frames. ], batch size: 49, lr: 7.41e-03, grad_scale: 32.0 +2024-08-31 18:43:25,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=264501.3333333333, ans=0.0 +2024-08-31 18:43:27,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=264554.6666666667, ans=0.125 +2024-08-31 18:44:04,757 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.94 vs. limit=22.5 +2024-08-31 18:44:12,819 INFO [train.py:1114] (3/4) Epoch 20, batch 2350, loss[loss=0.2098, simple_loss=0.2824, pruned_loss=0.05107, ctc_loss=0.08755, over 19659.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2687, pruned_loss=0.04814, ctc_loss=0.09044, over 3865153.71 frames. ], batch size: 63, lr: 7.40e-03, grad_scale: 32.0 +2024-08-31 18:44:46,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=264821.3333333333, ans=0.0 +2024-08-31 18:44:47,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=264821.3333333333, ans=0.125 +2024-08-31 18:44:49,420 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.185e+02 1.669e+02 1.905e+02 2.325e+02 3.822e+02, threshold=3.811e+02, percent-clipped=0.0 +2024-08-31 18:44:55,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264874.6666666667, ans=0.1 +2024-08-31 18:45:05,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=264928.0, ans=0.025 +2024-08-31 18:45:13,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=264928.0, ans=0.125 +2024-08-31 18:45:16,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=264981.3333333333, ans=0.0 +2024-08-31 18:45:25,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=265034.6666666667, ans=0.125 +2024-08-31 18:45:26,868 INFO [train.py:1114] (3/4) Epoch 20, batch 2400, loss[loss=0.1963, simple_loss=0.2751, pruned_loss=0.04241, ctc_loss=0.0818, over 19338.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2707, pruned_loss=0.04858, ctc_loss=0.09108, over 3860083.88 frames. ], batch size: 71, lr: 7.40e-03, grad_scale: 32.0 +2024-08-31 18:45:45,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=265088.0, ans=0.125 +2024-08-31 18:45:46,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=265088.0, ans=0.0 +2024-08-31 18:45:47,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=265088.0, ans=0.125 +2024-08-31 18:45:54,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=265141.3333333333, ans=0.125 +2024-08-31 18:46:05,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=265194.6666666667, ans=0.125 +2024-08-31 18:46:21,125 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.35 vs. limit=15.0 +2024-08-31 18:46:23,157 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-31 18:46:23,881 INFO [train.py:1114] (3/4) Epoch 20, batch 2450, loss[loss=0.2392, simple_loss=0.286, pruned_loss=0.06858, ctc_loss=0.1383, over 13237.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2747, pruned_loss=0.05145, ctc_loss=0.09681, over 3732985.70 frames. ], batch size: 140, lr: 7.40e-03, grad_scale: 32.0 +2024-08-31 18:46:25,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=265301.3333333333, ans=0.125 +2024-08-31 18:46:32,380 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.14 vs. limit=15.0 +2024-08-31 18:46:44,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=265354.6666666667, ans=0.0 +2024-08-31 18:46:45,873 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.336e+02 1.663e+02 1.874e+02 2.086e+02 3.013e+02, threshold=3.749e+02, percent-clipped=0.0 +2024-08-31 18:46:55,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=265408.0, ans=0.0 +2024-08-31 18:47:07,497 INFO [train.py:1387] (3/4) Done! diff --git a/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1724577812.cdr2654.int.cedar.computecanada.ca.70.0 b/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1724577812.cdr2654.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..9848dd70f4bb724c2ba22a28e7a93d7925dadbef --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1724577812.cdr2654.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:61365b5c7358e64414972303d682b805806f18146b07e25a3e8a95c0934f424e +size 417133 diff --git a/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1724923212.cdr2655.int.cedar.computecanada.ca.70.0 b/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1724923212.cdr2655.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..769bd1330db2480e4baaf5884da05eecaa5be86a --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1724923212.cdr2655.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:29e2313dc5ee6ffce015e5b0da45e5903a4c879641a95b6aace79fd890873715 +size 980 diff --git a/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1724952574.cdr2558.int.cedar.computecanada.ca.70.0 b/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1724952574.cdr2558.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..ec3798e82b9087d5269b1c59712caa4f1c84b5ca --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1724952574.cdr2558.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a6a912ba9d25e5e3cbbcb911907c87324439c2494a113b49c26c774b718ce81a +size 980 diff --git a/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1724962117.cdr2549.int.cedar.computecanada.ca.70.0 b/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1724962117.cdr2549.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..3a50d932560dc9013122e6d741d896f167bd5551 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1724962117.cdr2549.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:766081f198edf21416a04cc0f43b2d1af6937368b383556a113aa33f918f5972 +size 100694 diff --git a/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1725047086.cdr2651.int.cedar.computecanada.ca.70.0 b/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1725047086.cdr2651.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..18eed61ad5e9655df186afa213efa2065b813fa7 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1725047086.cdr2651.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ea2bce5b0c4d605a2d724515cc55837e6ed3064d4361cace12cabc3af6b90daa +size 47396 diff --git a/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1725073612.cdr2647.int.cedar.computecanada.ca.70.0 b/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1725073612.cdr2647.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..dd6cf1f6f16b476bc382b38309417cbe1c0c1b86 --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1725073612.cdr2647.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:47d6be3336e0b5cb7abdbf85fc8114886e8bb8a987a6a1262456fdc31a96fa07 +size 980 diff --git a/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1725078928.cdr2539.int.cedar.computecanada.ca.70.0 b/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1725078928.cdr2539.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..e2a30c911bd6ce8247dcf1ce5ae977ac8ab56fde --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1725078928.cdr2539.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4a4fdcef438b1f175c7fed56eec6ebe788bacc1ab9e3022dd2e7c6abed7d327f +size 1579 diff --git a/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1725135301.cdr2535.int.cedar.computecanada.ca.70.0 b/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1725135301.cdr2535.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..38bbe352f73a7bd343b85bee6e82ee9bb21949af --- /dev/null +++ b/zipformer/pretrained/ctc/causal/exp/tensorboard/events.out.tfevents.1725135301.cdr2535.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9a91a068436231020e1a2a58c7069f34d7265400a62e8da451d8f9bd7cee8cac +size 90817 diff --git a/zipformer/pretrained/ctc/non_causal/exp/best-train-loss.pt b/zipformer/pretrained/ctc/non_causal/exp/best-train-loss.pt new file mode 100644 index 0000000000000000000000000000000000000000..f7e8a42d8bfab6d5f006cfe72255f5a2c4176ef0 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/best-train-loss.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:39613c48594e073e86c1f1a4f05015119693bd7ba9f29f168fb8637f28137e91 +size 1053872782 diff --git a/zipformer/pretrained/ctc/non_causal/exp/best-valid-loss.pt b/zipformer/pretrained/ctc/non_causal/exp/best-valid-loss.pt new file mode 100644 index 0000000000000000000000000000000000000000..f7e8a42d8bfab6d5f006cfe72255f5a2c4176ef0 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/best-valid-loss.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:39613c48594e073e86c1f1a4f05015119693bd7ba9f29f168fb8637f28137e91 +size 1053872782 diff --git a/zipformer/pretrained/ctc/non_causal/exp/checkpoint-12000.pt b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-12000.pt new file mode 100644 index 0000000000000000000000000000000000000000..1812bcdd348c2c3f24ef52984b63d2ca72667ac7 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-12000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2a03f71fa4973649c9ce4e0adc9e881497f158f03b9956e32a15d663f17b6c97 +size 1053888534 diff --git a/zipformer/pretrained/ctc/non_causal/exp/checkpoint-16000.pt b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-16000.pt new file mode 100644 index 0000000000000000000000000000000000000000..02a875e7b571c01da57a1c8882b7a47106ef6168 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-16000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:452c1c9add2d8e35fa96f03e76c917c914855b4399a32dd92f4108f8904d04bf +size 1053888598 diff --git a/zipformer/pretrained/ctc/non_causal/exp/checkpoint-20000.pt b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-20000.pt new file mode 100644 index 0000000000000000000000000000000000000000..5e39e483bfd1245939114a5a93a17b142fe05db4 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-20000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b6fa76ed9c42df5965f0a58012ffbefb2446c580d4faa636ff048cf2be14ec92 +size 1053888726 diff --git a/zipformer/pretrained/ctc/non_causal/exp/checkpoint-24000.pt b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-24000.pt new file mode 100644 index 0000000000000000000000000000000000000000..f00fcd4b1a6a45422af3513698468944adde6b02 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-24000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:377b58a1738a23e97d93f8e9d4f4dafb1130263ef115ff229a46691ff613d121 +size 1053888790 diff --git a/zipformer/pretrained/ctc/non_causal/exp/checkpoint-28000.pt b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-28000.pt new file mode 100644 index 0000000000000000000000000000000000000000..b5d33d1c925be52c6983c10c4e0ea612dd3e1db5 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-28000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ef1792231251c44b52b1e96a0f6610bd95b6a5eb6ffb0a56b4b984cc200dc1cd +size 1053888854 diff --git a/zipformer/pretrained/ctc/non_causal/exp/checkpoint-32000.pt b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-32000.pt new file mode 100644 index 0000000000000000000000000000000000000000..9543dec1824651baa08ec3deef386589cb000824 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-32000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:53c54341953015d6e084043ff3c2e80e2346f452fb5bb28c3bfabed04a435f09 +size 1053888918 diff --git a/zipformer/pretrained/ctc/non_causal/exp/checkpoint-36000.pt b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-36000.pt new file mode 100644 index 0000000000000000000000000000000000000000..5949aa50c41de1071555611e5744c9134968caaa --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-36000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3a42412fc7a42729ae29e5b956802bdc691e87f49efb6a504159ee58c6378495 +size 1053889046 diff --git a/zipformer/pretrained/ctc/non_causal/exp/checkpoint-4000.pt b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-4000.pt new file mode 100644 index 0000000000000000000000000000000000000000..d3ec0c3935b3430baa63a5ba212b40584e8586bf --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-4000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0348de91c692178d305149ea54b8afb5eeadf64a2dc2a666a3a6150e14e696b8 +size 1053886533 diff --git a/zipformer/pretrained/ctc/non_causal/exp/checkpoint-40000.pt b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-40000.pt new file mode 100644 index 0000000000000000000000000000000000000000..086ddd6f1535a0edb78c3bc71271135c34cd29a6 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-40000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4dcc6ed2ec68be840a092307ebe8c928fdbbf5307bf2cb0c6ae194d94f3a5b3a +size 1053889110 diff --git a/zipformer/pretrained/ctc/non_causal/exp/checkpoint-44000.pt b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-44000.pt new file mode 100644 index 0000000000000000000000000000000000000000..5e48ee882aa31fd3d78395410920f28571d70359 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-44000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:92785d8f33bdc5ca4d83cdc0d4df2f6e209f64c32ae3ca5c04d22f810dbcec5e +size 1053889174 diff --git a/zipformer/pretrained/ctc/non_causal/exp/checkpoint-48000.pt b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-48000.pt new file mode 100644 index 0000000000000000000000000000000000000000..1253f150130d03b2162b762cb8e3f8532bcb80de --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-48000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:439531d69ca244e649341af26c1dad99341439b1420936105bfeb79de052fb2b +size 1053888534 diff --git a/zipformer/pretrained/ctc/non_causal/exp/checkpoint-8000.pt b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-8000.pt new file mode 100644 index 0000000000000000000000000000000000000000..aaf261fd57c6ffd3c157a268d06257b5864ad881 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/checkpoint-8000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b82a36e8171480c5d1493864382843feba7803477fc8cb952d52a5c7293f2a2d +size 1053886533 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-1.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-1.pt new file mode 100644 index 0000000000000000000000000000000000000000..bce37f495126e68f529f710faa288f0ba071716b --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-1.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f5e43c3d0bc9ec0b6f4e9b5cbd80e4ddf5ee11c9530a03489a68313c88c4685 +size 1053870333 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-10.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-10.pt new file mode 100644 index 0000000000000000000000000000000000000000..b3ea7fd07be88f8ad3d02c05681518c49ebe5f47 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-10.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5b9d535a0817a35ab6adf221c8f86b64fe4c363d39ba0a8d2fc7d0281d859342 +size 1053873038 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-11.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-11.pt new file mode 100644 index 0000000000000000000000000000000000000000..3fe400c25ef60897b7cec6b549fd8048f9b8d328 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-11.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d7a8d4f5bf64077e9104310d0748e81caf194bb5d42d8776163fca49b70ac308 +size 1053873038 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-12.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-12.pt new file mode 100644 index 0000000000000000000000000000000000000000..13922058bf2c0d289ee9c46c26cf37b82b42dcd5 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-12.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:18b111699c4e98018557a70a1ad479f6c99558a7684a37a243471e5e0da05886 +size 1053873102 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-13.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-13.pt new file mode 100644 index 0000000000000000000000000000000000000000..9a2dbc30feb2a0c3646f9f0d9ce2ee308c8f88af --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-13.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8c74f2dd4a22e061fec4448b74bcbd107405bed86a55377f314b5b695d940b2d +size 1053873166 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-14.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-14.pt new file mode 100644 index 0000000000000000000000000000000000000000..e1c1632dec3c6a73f9d030779645ecf9009740d8 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-14.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6a06c97df3fe7a1448ce02471b8e86cdea20642ba2723faa2f96ed8dd9fdcb6f +size 1053873230 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-15.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-15.pt new file mode 100644 index 0000000000000000000000000000000000000000..44d1b454dabdb40f8b7a80d31e125c83bb80e4d4 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-15.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d63ebb0ebc7307699a8657c1dffa0e35e9b914571b1c10fc7cd2aa953bc5abb5 +size 1053873230 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-16.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-16.pt new file mode 100644 index 0000000000000000000000000000000000000000..2cb03e26848d2640c658c9273d1a57a5d9f04a8e --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-16.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:171ce9f91b7160d56723bb1c6943270006d4300b0e4664cffa82f9635b698b02 +size 1053873294 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-17.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-17.pt new file mode 100644 index 0000000000000000000000000000000000000000..302f67754c534033caa297b50539e15317215923 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-17.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eee22e74fb1973a95bf92a6731af315ab330eec794700e350e262409897aed46 +size 1053873358 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-18.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-18.pt new file mode 100644 index 0000000000000000000000000000000000000000..9f1767cd0a28e1bbc67fe55689188b99c9e63380 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-18.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e218ba9aaa427938661fa0887b9be04940ca605fa759e2daa733b339ba87c8e +size 1053873422 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-19.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-19.pt new file mode 100644 index 0000000000000000000000000000000000000000..90c702ef625932cfe0fb0a988b55d431df1bde6b --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-19.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8dc77c019568361746d60eb1a507e8999ea825a15407e628f5403e6c070cf63a +size 1053872718 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-2.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-2.pt new file mode 100644 index 0000000000000000000000000000000000000000..36cf7fcdebc45307c2de7e6dc5c8634651488e66 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-2.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0c4e9bcb42450dfab31a1ff1848b38370921a736ae3340791642c4fbdec9858e +size 1053870397 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-20.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-20.pt new file mode 100644 index 0000000000000000000000000000000000000000..f7e8a42d8bfab6d5f006cfe72255f5a2c4176ef0 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-20.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:39613c48594e073e86c1f1a4f05015119693bd7ba9f29f168fb8637f28137e91 +size 1053872782 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-3.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-3.pt new file mode 100644 index 0000000000000000000000000000000000000000..f61d521cda3edbd3c54140d2e0f034f7953ba474 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-3.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f818990a5e33be5dbef3c10608f46e0f14a217c3b9f834ce594677e437b0ed2b +size 1053870461 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-4.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-4.pt new file mode 100644 index 0000000000000000000000000000000000000000..1a350366383e53515feafbd0f293c72a64d0295c --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-4.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1caa70c64a7a9fdd71224616ecb713c8672bd95bfbd618279cf12acb03bd92f0 +size 1053870397 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-5.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-5.pt new file mode 100644 index 0000000000000000000000000000000000000000..d3bec02c2302ea301d2d3559879a254244222a80 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-5.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e3871823e9e09c729e5b344f99e464dd5adcb68a92cf028b9da2c04da73e5f2c +size 1053870461 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-6.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-6.pt new file mode 100644 index 0000000000000000000000000000000000000000..ab5a67438471f93b1155f4ec5c36b77124d3d1b2 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-6.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:71c4f9c0c9ee3ea053f9d98b64ddcc7b222a54c64f7220a1e38423dd5c7ecf8d +size 1053870525 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-7.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-7.pt new file mode 100644 index 0000000000000000000000000000000000000000..2135269678933733b619d9f98bc824992dc8ddff --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-7.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:93285a7734e563ed969d0720e16267075585483e4af82d159ceffd40b3d4fa2d +size 1053870525 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-8.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-8.pt new file mode 100644 index 0000000000000000000000000000000000000000..3f77e6881a24c380f1c572967120e7ed6ecb14b7 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-8.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d6789aa0156a77182f0c3d20166e199f16fa65097d40d76a86992dcaba888a8f +size 1053870589 diff --git a/zipformer/pretrained/ctc/non_causal/exp/epoch-9.pt b/zipformer/pretrained/ctc/non_causal/exp/epoch-9.pt new file mode 100644 index 0000000000000000000000000000000000000000..bad7362160055bad51e8fbc7151b496b64a1df02 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/epoch-9.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8beaae1488d5394a20e300ed0cdc4cc4f549f047e1f664ef19bf6cbda4769969 +size 1053870653 diff --git a/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-25-03-46-09-0 b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-25-03-46-09-0 new file mode 100644 index 0000000000000000000000000000000000000000..c7ee70b45380029850c414923931c55ecd2bfc27 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-25-03-46-09-0 @@ -0,0 +1,1144 @@ +2024-08-25 03:46:09,034 INFO [train.py:1182] (0/4) Training started +2024-08-25 03:46:09,039 INFO [train.py:1192] (0/4) Device: cuda:0 +2024-08-25 03:46:09,373 INFO [train.py:1210] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2649.int.cedar.computecanada.ca', 'IP address': '172.16.146.86'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-25 03:46:09,373 INFO [train.py:1212] (0/4) About to create model +2024-08-25 03:46:10,481 INFO [train.py:1216] (0/4) Number of model parameters: 65805511 +2024-08-25 03:46:11,267 INFO [train.py:1231] (0/4) Using DDP +2024-08-25 03:46:14,820 INFO [asr_datamodule.py:894] (0/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:696] (0/4) Disable MUSAN +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:714] (0/4) Enable SpecAugment +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:715] (0/4) Time warp factor: 80 +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:725] (0/4) Num frame mask: 10 +2024-08-25 03:46:14,899 INFO [asr_datamodule.py:738] (0/4) About to create train dataset +2024-08-25 03:46:14,899 INFO [asr_datamodule.py:765] (0/4) Using DynamicBucketingSampler. +2024-08-25 03:46:16,485 INFO [asr_datamodule.py:782] (0/4) About to create train dataloader +2024-08-25 03:46:16,488 INFO [asr_datamodule.py:911] (0/4) About to get dev-clean cuts +2024-08-25 03:46:16,584 INFO [asr_datamodule.py:918] (0/4) About to get dev-other cuts +2024-08-25 03:46:16,613 INFO [asr_datamodule.py:814] (0/4) About to create dev dataset +2024-08-25 03:46:16,931 INFO [asr_datamodule.py:831] (0/4) About to create dev dataloader +2024-08-25 03:46:16,931 INFO [train.py:1435] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-25 03:50:49,730 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=49.69 vs. limit=7.5 +2024-08-25 03:50:50,512 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 11612MB +2024-08-25 03:50:51,645 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 11612MB +2024-08-25 03:51:20,163 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 11612MB +2024-08-25 03:51:21,409 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 11612MB +2024-08-25 03:51:43,054 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 11612MB +2024-08-25 03:51:44,345 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 11612MB +2024-08-25 03:53:11,522 INFO [train.py:1114] (0/4) Epoch 1, batch 0, loss[loss=8.717, simple_loss=7.066, pruned_loss=6.906, ctc_loss=4.795, over 19814.00 frames. ], tot_loss[loss=8.717, simple_loss=7.066, pruned_loss=6.906, ctc_loss=4.795, over 19814.00 frames. ], batch size: 49, lr: 2.25e-02, grad_scale: 1.0 +2024-08-25 03:53:11,523 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-25 03:53:26,569 INFO [train.py:1146] (0/4) Epoch 1, validation: loss=8.842, simple_loss=7.151, pruned_loss=6.961, ctc_loss=4.966, over 944034.00 frames. +2024-08-25 03:53:26,570 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 11981MB +2024-08-25 03:53:28,073 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.11 vs. limit=3.0 +2024-08-25 03:53:35,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=0.0, ans=0.5 +2024-08-25 03:53:41,642 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=13.31 vs. limit=7.5 +2024-08-25 03:53:42,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=0.0, ans=0.2 +2024-08-25 03:53:46,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=27.55 vs. limit=7.5 +2024-08-25 03:54:08,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=0.0, ans=0.9 +2024-08-25 03:54:36,708 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.008e+03 4.149e+03 4.360e+03 5.530e+03 5.553e+03, threshold=1.744e+04, percent-clipped=0.0 +2024-08-25 03:54:39,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=53.333333333333336, ans=0.198 +2024-08-25 03:54:55,967 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=183.33 vs. limit=5.026666666666666 +2024-08-25 03:55:46,191 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.063e+03 1.598e+03 4.141e+03 5.530e+03 6.572e+03, threshold=1.656e+04, percent-clipped=0.0 +2024-08-25 03:57:08,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=277.20 vs. limit=7.54 +2024-08-25 03:57:26,145 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=88.29 vs. limit=5.04 +2024-08-25 03:57:39,313 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=19.90 vs. limit=5.04 +2024-08-25 03:57:39,807 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=98.65 vs. limit=5.0 +2024-08-25 04:00:14,861 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.873e+02 1.048e+03 1.328e+03 4.149e+03 6.572e+03, threshold=5.310e+03, percent-clipped=0.0 +2024-08-25 04:00:18,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=213.33333333333334, ans=0.49 +2024-08-25 04:00:27,894 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.47 vs. limit=7.58 +2024-08-25 04:00:39,849 INFO [train.py:1114] (0/4) Epoch 1, batch 50, loss[loss=1.546, simple_loss=1.018, pruned_loss=1.171, ctc_loss=1.987, over 19697.00 frames. ], tot_loss[loss=3.752, simple_loss=2.911, pruned_loss=2.556, ctc_loss=2.894, over 845725.26 frames. ], batch size: 47, lr: 2.48e-02, grad_scale: 0.25 +2024-08-25 04:00:51,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=266.6666666666667, ans=0.8906666666666667 +2024-08-25 04:01:03,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=320.0, ans=0.098 +2024-08-25 04:01:14,399 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=153.91 vs. limit=5.16 +2024-08-25 04:01:20,935 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=12.61 vs. limit=4.128 +2024-08-25 04:01:25,592 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=9.68 vs. limit=5.08 +2024-08-25 04:01:50,686 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=22.67 vs. limit=5.1866666666666665 +2024-08-25 04:01:50,755 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=311.42 vs. limit=5.1866666666666665 +2024-08-25 04:02:04,478 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=10.79 vs. limit=4.1706666666666665 +2024-08-25 04:02:05,349 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=20.71 vs. limit=7.66 +2024-08-25 04:02:06,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=426.6666666666667, ans=0.8850666666666667 +2024-08-25 04:02:39,187 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=57.10 vs. limit=7.86 +2024-08-25 04:02:39,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=480.0, ans=0.0892 +2024-08-25 04:02:53,827 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=29.28 vs. limit=7.86 +2024-08-25 04:02:59,895 INFO [train.py:1114] (0/4) Epoch 1, batch 100, loss[loss=1.406, simple_loss=0.9854, pruned_loss=1.239, ctc_loss=1.328, over 19718.00 frames. ], tot_loss[loss=2.582, simple_loss=1.908, pruned_loss=1.862, ctc_loss=2.359, over 1499439.12 frames. ], batch size: 51, lr: 2.70e-02, grad_scale: 0.5 +2024-08-25 04:03:07,083 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.807e+02 4.974e+02 8.674e+02 1.328e+03 6.572e+03, threshold=1.735e+03, percent-clipped=0.0 +2024-08-25 04:03:17,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=533.3333333333334, ans=0.18 +2024-08-25 04:03:19,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=586.6666666666666, ans=0.5 +2024-08-25 04:03:36,642 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=10.33 vs. limit=4.256 +2024-08-25 04:03:51,963 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=20.02 vs. limit=5.346666666666667 +2024-08-25 04:03:53,759 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.80 vs. limit=3.104 +2024-08-25 04:03:57,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=693.3333333333334, ans=0.29306666666666664 +2024-08-25 04:04:01,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=693.3333333333334, ans=0.017333333333333336 +2024-08-25 04:04:03,521 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=12.51 vs. limit=7.76 +2024-08-25 04:04:04,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=746.6666666666666, ans=0.04766666666666667 +2024-08-25 04:04:16,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=746.6666666666666, ans=0.46499999999999997 +2024-08-25 04:04:18,335 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=383.26 vs. limit=7.78 +2024-08-25 04:04:19,552 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=365.52 vs. limit=7.78 +2024-08-25 04:04:22,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=800.0, ans=0.5 +2024-08-25 04:04:22,850 INFO [train.py:1114] (0/4) Epoch 1, batch 150, loss[loss=1.148, simple_loss=0.79, pruned_loss=1.006, ctc_loss=1.093, over 19717.00 frames. ], tot_loss[loss=2.05, simple_loss=1.492, pruned_loss=1.569, ctc_loss=1.873, over 2027737.98 frames. ], batch size: 47, lr: 2.93e-02, grad_scale: 0.5 +2024-08-25 04:04:24,835 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=39.53 vs. limit=8.1 +2024-08-25 04:04:25,915 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=205.18 vs. limit=7.8 +2024-08-25 04:04:27,527 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=78.34 vs. limit=5.0 +2024-08-25 04:04:33,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=27.84 vs. limit=8.1 +2024-08-25 04:04:34,665 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=17.06 vs. limit=7.82 +2024-08-25 04:04:34,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=11.01 vs. limit=5.213333333333333 +2024-08-25 04:04:40,119 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=22.79 vs. limit=7.82 +2024-08-25 04:04:58,182 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=27.79 vs. limit=8.18 +2024-08-25 04:05:07,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=906.6666666666666, ans=0.2136 +2024-08-25 04:05:24,337 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=50.09 vs. limit=8.22 +2024-08-25 04:05:40,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=1013.3333333333334, ans=0.4525 +2024-08-25 04:05:42,042 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=12.96 vs. limit=5.253333333333333 +2024-08-25 04:05:44,372 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=26.06 vs. limit=7.88 +2024-08-25 04:05:49,016 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=12.97 vs. limit=7.88 +2024-08-25 04:05:51,954 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=175.34 vs. limit=7.88 +2024-08-25 04:05:53,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=1066.6666666666667, ans=0.45 +2024-08-25 04:05:53,521 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=151.52 vs. limit=7.9 +2024-08-25 04:05:53,952 INFO [train.py:1114] (0/4) Epoch 1, batch 200, loss[loss=1.263, simple_loss=0.8703, pruned_loss=1.013, ctc_loss=1.218, over 18215.00 frames. ], tot_loss[loss=1.761, simple_loss=1.265, pruned_loss=1.381, ctc_loss=1.625, over 2435361.75 frames. ], batch size: 85, lr: 3.15e-02, grad_scale: 1.0 +2024-08-25 04:05:57,465 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 7.117e+01 1.191e+02 1.554e+02 2.219e+02 5.914e+02, threshold=3.108e+02, percent-clipped=0.0 +2024-08-25 04:06:01,474 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.20 vs. limit=7.9 +2024-08-25 04:06:03,912 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=10.83 vs. limit=4.426666666666667 +2024-08-25 04:06:04,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=1120.0, ans=0.0748 +2024-08-25 04:06:16,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=1173.3333333333333, ans=0.35333333333333333 +2024-08-25 04:06:17,810 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=26.12 vs. limit=8.38 +2024-08-25 04:06:17,852 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=13.93 vs. limit=7.94 +2024-08-25 04:06:21,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.59 vs. limit=5.293333333333333 +2024-08-25 04:06:22,628 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=12.77 vs. limit=7.94 +2024-08-25 04:06:25,967 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=11.77 vs. limit=4.469333333333333 +2024-08-25 04:06:25,990 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=7.70 vs. limit=4.469333333333333 +2024-08-25 04:06:36,587 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.68 vs. limit=8.42 +2024-08-25 04:06:52,980 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=21.73 vs. limit=8.46 +2024-08-25 04:06:57,326 INFO [train.py:1114] (0/4) Epoch 1, batch 250, loss[loss=1.249, simple_loss=0.8448, pruned_loss=0.9912, ctc_loss=1.232, over 19443.00 frames. ], tot_loss[loss=1.587, simple_loss=1.126, pruned_loss=1.254, ctc_loss=1.484, over 2755446.34 frames. ], batch size: 67, lr: 3.38e-02, grad_scale: 1.0 +2024-08-25 04:07:04,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.whiten.whitening_limit, batch_count=1333.3333333333333, ans=4.533333333333333 +2024-08-25 04:07:04,975 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=45.33 vs. limit=5.666666666666667 +2024-08-25 04:07:42,076 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=214.04 vs. limit=8.02 +2024-08-25 04:07:50,954 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.54 vs. limit=8.54 +2024-08-25 04:07:53,538 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=267.32 vs. limit=8.02 +2024-08-25 04:07:54,707 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=18.68 vs. limit=8.02 +2024-08-25 04:07:56,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=1440.0, ans=0.4325 +2024-08-25 04:08:01,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=1440.0, ans=0.8496 +2024-08-25 04:08:10,113 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.67 vs. limit=4.597333333333333 +2024-08-25 04:08:18,548 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=3.96 vs. limit=4.298666666666667 +2024-08-25 04:08:22,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=1546.6666666666667, ans=0.04516666666666667 +2024-08-25 04:08:23,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=1546.6666666666667, ans=0.4275 +2024-08-25 04:09:06,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1546.6666666666667, ans=0.2845333333333333 +2024-08-25 04:09:08,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=1546.6666666666667, ans=0.2232 +2024-08-25 04:09:11,357 INFO [train.py:1114] (0/4) Epoch 1, batch 300, loss[loss=1.225, simple_loss=0.8202, pruned_loss=0.956, ctc_loss=1.206, over 19507.00 frames. ], tot_loss[loss=1.471, simple_loss=1.031, pruned_loss=1.162, ctc_loss=1.392, over 2998983.33 frames. ], batch size: 61, lr: 3.60e-02, grad_scale: 2.0 +2024-08-25 04:09:11,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1600.0, ans=0.284 +2024-08-25 04:09:14,912 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.125e+01 1.367e+02 1.753e+02 2.332e+02 3.681e+02, threshold=3.505e+02, percent-clipped=6.0 +2024-08-25 04:09:15,588 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=39.01 vs. limit=8.1 +2024-08-25 04:09:20,316 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=34.34 vs. limit=8.1 +2024-08-25 04:09:34,636 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=115.66 vs. limit=8.12 +2024-08-25 04:10:19,928 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=207.46 vs. limit=8.12 +2024-08-25 04:10:26,748 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=59.49 vs. limit=8.12 +2024-08-25 04:10:30,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=1706.6666666666667, ans=0.42 +2024-08-25 04:10:46,816 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=126.08 vs. limit=8.16 +2024-08-25 04:11:02,936 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=8.293e-01 +2024-08-25 04:11:12,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=1866.6666666666667, ans=0.13 +2024-08-25 04:11:13,105 INFO [train.py:1114] (0/4) Epoch 1, batch 350, loss[loss=1.071, simple_loss=0.7124, pruned_loss=0.8183, ctc_loss=1.052, over 19768.00 frames. ], tot_loss[loss=1.393, simple_loss=0.9657, pruned_loss=1.093, ctc_loss=1.329, over 3189785.81 frames. ], batch size: 48, lr: 3.83e-02, grad_scale: 2.0 +2024-08-25 04:11:16,010 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.99 vs. limit=8.9 +2024-08-25 04:11:29,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=1920.0, ans=0.41000000000000003 +2024-08-25 04:11:32,064 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=18.24 vs. limit=8.22 +2024-08-25 04:11:39,378 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=36.51 vs. limit=8.24 +2024-08-25 04:11:43,822 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.10 vs. limit=4.789333333333333 +2024-08-25 04:11:44,125 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=18.98 vs. limit=8.98 +2024-08-25 04:11:44,969 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=30.43 vs. limit=8.24 +2024-08-25 04:11:46,185 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=53.49 vs. limit=8.24 +2024-08-25 04:11:46,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=2026.6666666666667, ans=0.8290666666666667 +2024-08-25 04:11:46,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=2026.6666666666667, ans=0.405 +2024-08-25 04:11:51,887 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=21.55 vs. limit=8.26 +2024-08-25 04:11:54,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=2026.6666666666667, ans=0.0544 +2024-08-25 04:11:56,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=2026.6666666666667, ans=0.405 +2024-08-25 04:11:57,692 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.44 vs. limit=9.02 +2024-08-25 04:11:58,120 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=14.48 vs. limit=8.26 +2024-08-25 04:12:00,077 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.13 vs. limit=9.06 +2024-08-25 04:12:11,681 INFO [train.py:1114] (0/4) Epoch 1, batch 400, loss[loss=1.178, simple_loss=0.7908, pruned_loss=0.8549, ctc_loss=1.134, over 19500.00 frames. ], tot_loss[loss=1.332, simple_loss=0.916, pruned_loss=1.034, ctc_loss=1.278, over 3341471.66 frames. ], batch size: 54, lr: 4.05e-02, grad_scale: 4.0 +2024-08-25 04:12:13,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=2133.3333333333335, ans=0.4 +2024-08-25 04:12:15,152 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.241e+01 1.644e+02 2.144e+02 2.768e+02 4.713e+02, threshold=4.287e+02, percent-clipped=10.0 +2024-08-25 04:12:19,329 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=24.73 vs. limit=8.3 +2024-08-25 04:12:27,437 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=12.92 vs. limit=8.32 +2024-08-25 04:12:32,943 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.147e+00 +2024-08-25 04:12:37,827 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=30.50 vs. limit=8.34 +2024-08-25 04:12:37,884 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.66 vs. limit=4.896 +2024-08-25 04:12:38,057 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=17.44 vs. limit=8.34 +2024-08-25 04:12:42,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=21.16 vs. limit=8.34 +2024-08-25 04:12:47,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=2240.0, ans=0.395 +2024-08-25 04:12:54,554 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=34.55 vs. limit=8.36 +2024-08-25 04:12:56,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=2293.3333333333335, ans=0.08566666666666667 +2024-08-25 04:13:04,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=2346.6666666666665, ans=0.112 +2024-08-25 04:13:04,691 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.86 vs. limit=5.586666666666667 +2024-08-25 04:13:12,060 INFO [train.py:1114] (0/4) Epoch 1, batch 450, loss[loss=1.107, simple_loss=0.7555, pruned_loss=0.7382, ctc_loss=1.065, over 19617.00 frames. ], tot_loss[loss=1.281, simple_loss=0.8774, pruned_loss=0.9725, ctc_loss=1.231, over 3449265.00 frames. ], batch size: 55, lr: 4.28e-02, grad_scale: 4.0 +2024-08-25 04:13:12,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=2400.0, ans=0.27599999999999997 +2024-08-25 04:13:14,003 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.76 vs. limit=9.3 +2024-08-25 04:14:01,748 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.27 vs. limit=8.42 +2024-08-25 04:14:08,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=2453.3333333333335, ans=0.385 +2024-08-25 04:14:10,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=2453.3333333333335, ans=0.8141333333333334 +2024-08-25 04:14:18,765 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.72 vs. limit=5.626666666666667 +2024-08-25 04:14:24,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=2560.0, ans=0.2744 +2024-08-25 04:14:24,765 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.13 vs. limit=6.28 +2024-08-25 04:14:30,426 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.93 vs. limit=8.46 +2024-08-25 04:14:35,649 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.16 vs. limit=8.46 +2024-08-25 04:14:40,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=2613.3333333333335, ans=0.8085333333333333 +2024-08-25 04:14:45,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=2613.3333333333335, ans=0.102 +2024-08-25 04:14:53,361 INFO [train.py:1114] (0/4) Epoch 1, batch 500, loss[loss=1.061, simple_loss=0.7373, pruned_loss=0.6441, ctc_loss=1.033, over 19666.00 frames. ], tot_loss[loss=1.218, simple_loss=0.8357, pruned_loss=0.8939, ctc_loss=1.171, over 3545091.46 frames. ], batch size: 63, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:14:53,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2666.6666666666665, ans=0.2733333333333333 +2024-08-25 04:14:57,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=2666.6666666666665, ans=0.375 +2024-08-25 04:14:59,588 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.247e+02 2.224e+02 2.884e+02 3.405e+02 7.334e+02, threshold=5.768e+02, percent-clipped=15.0 +2024-08-25 04:15:11,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=2720.0, ans=0.3725 +2024-08-25 04:15:14,765 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.28 vs. limit=8.52 +2024-08-25 04:15:14,839 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.52 vs. limit=9.54 +2024-08-25 04:15:17,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=2773.3333333333335, ans=0.8029333333333334 +2024-08-25 04:15:19,510 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.78 vs. limit=5.109333333333334 +2024-08-25 04:15:20,490 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.90 vs. limit=8.54 +2024-08-25 04:15:23,996 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.44 vs. limit=8.54 +2024-08-25 04:15:32,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.72 vs. limit=8.56 +2024-08-25 04:15:46,713 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.36 vs. limit=8.58 +2024-08-25 04:15:50,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=2880.0, ans=0.035199999999999995 +2024-08-25 04:15:52,589 INFO [train.py:1114] (0/4) Epoch 1, batch 550, loss[loss=0.9895, simple_loss=0.6945, pruned_loss=0.5728, ctc_loss=0.9569, over 19288.00 frames. ], tot_loss[loss=1.152, simple_loss=0.793, pruned_loss=0.8138, ctc_loss=1.109, over 3607779.82 frames. ], batch size: 71, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:16:01,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2933.3333333333335, ans=0.27066666666666667 +2024-08-25 04:16:14,560 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.81 vs. limit=5.76 +2024-08-25 04:16:23,825 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.48 vs. limit=8.64 +2024-08-25 04:16:40,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=3093.3333333333335, ans=0.11333333333333334 +2024-08-25 04:16:57,297 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.57 vs. limit=5.786666666666667 +2024-08-25 04:16:59,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=3200.0, ans=0.09999999999999998 +2024-08-25 04:17:00,426 INFO [train.py:1114] (0/4) Epoch 1, batch 600, loss[loss=0.7956, simple_loss=0.5745, pruned_loss=0.4204, ctc_loss=0.7587, over 19369.00 frames. ], tot_loss[loss=1.081, simple_loss=0.7491, pruned_loss=0.733, ctc_loss=1.041, over 3664209.30 frames. ], batch size: 67, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:17:03,770 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.709e+02 2.809e+02 3.766e+02 4.633e+02 8.655e+02, threshold=7.532e+02, percent-clipped=12.0 +2024-08-25 04:17:04,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=3200.0, ans=0.027999999999999997 +2024-08-25 04:17:08,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=3200.0, ans=0.35 +2024-08-25 04:17:09,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3200.0, ans=0.35 +2024-08-25 04:17:12,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=3253.3333333333335, ans=0.03983333333333333 +2024-08-25 04:17:14,220 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=10.58 vs. limit=9.94 +2024-08-25 04:17:51,581 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.75 vs. limit=5.826666666666666 +2024-08-25 04:17:58,331 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=10.65 vs. limit=9.98 +2024-08-25 04:18:05,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=3360.0, ans=0.3425 +2024-08-25 04:18:26,482 INFO [train.py:1114] (0/4) Epoch 1, batch 650, loss[loss=0.7809, simple_loss=0.5732, pruned_loss=0.3854, ctc_loss=0.7475, over 19771.00 frames. ], tot_loss[loss=1.008, simple_loss=0.7046, pruned_loss=0.6546, ctc_loss=0.9691, over 3714614.08 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:18:29,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=3466.6666666666665, ans=0.3375 +2024-08-25 04:18:36,857 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.30 vs. limit=5.88 +2024-08-25 04:18:52,151 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.61 vs. limit=8.82 +2024-08-25 04:18:52,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3573.3333333333335, ans=0.26426666666666665 +2024-08-25 04:19:05,157 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.17 vs. limit=8.86 +2024-08-25 04:19:07,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.17 vs. limit=6.8133333333333335 +2024-08-25 04:19:08,421 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=10.72 vs. limit=10.22 +2024-08-25 04:19:21,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3680.0, ans=0.3275 +2024-08-25 04:19:23,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=3680.0, ans=0.2552 +2024-08-25 04:20:32,374 INFO [train.py:1114] (0/4) Epoch 1, batch 700, loss[loss=0.6967, simple_loss=0.5113, pruned_loss=0.343, ctc_loss=0.6625, over 19712.00 frames. ], tot_loss[loss=0.9455, simple_loss=0.6672, pruned_loss=0.5877, ctc_loss=0.9048, over 3746967.74 frames. ], batch size: 51, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:20:35,542 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 2.600e+02 3.309e+02 4.487e+02 1.180e+03, threshold=6.619e+02, percent-clipped=3.0 +2024-08-25 04:20:40,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=3733.3333333333335, ans=0.325 +2024-08-25 04:20:45,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=3786.6666666666665, ans=0.3225 +2024-08-25 04:21:08,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=3893.3333333333335, ans=0.3175 +2024-08-25 04:21:14,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=3893.3333333333335, ans=0.3175 +2024-08-25 04:21:19,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3946.6666666666665, ans=0.26053333333333334 +2024-08-25 04:21:23,011 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.67 vs. limit=5.578666666666667 +2024-08-25 04:21:26,550 INFO [train.py:1114] (0/4) Epoch 1, batch 750, loss[loss=0.6988, simple_loss=0.5283, pruned_loss=0.3246, ctc_loss=0.6379, over 19497.00 frames. ], tot_loss[loss=0.8839, simple_loss=0.6308, pruned_loss=0.5266, ctc_loss=0.8393, over 3772518.90 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:21:26,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=4000.0, ans=0.3125 +2024-08-25 04:21:26,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=4000.0, ans=0.04999999999999999 +2024-08-25 04:21:33,519 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.43 vs. limit=10.5 +2024-08-25 04:21:38,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=4053.3333333333335, ans=0.07466666666666667 +2024-08-25 04:21:55,648 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.01 vs. limit=9.040000000000001 +2024-08-25 04:22:04,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=4160.0, ans=0.04933333333333333 +2024-08-25 04:22:06,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=4160.0, ans=0.7544 +2024-08-25 04:22:39,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=4266.666666666667, ans=0.2573333333333333 +2024-08-25 04:22:40,630 INFO [train.py:1114] (0/4) Epoch 1, batch 800, loss[loss=0.5632, simple_loss=0.4393, pruned_loss=0.2488, ctc_loss=0.4864, over 19798.00 frames. ], tot_loss[loss=0.8271, simple_loss=0.5981, pruned_loss=0.4724, ctc_loss=0.7761, over 3793423.11 frames. ], batch size: 49, lr: 4.49e-02, grad_scale: 16.0 +2024-08-25 04:22:40,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=4266.666666666667, ans=0.7506666666666667 +2024-08-25 04:22:43,867 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.649e+02 2.484e+02 3.479e+02 4.307e+02 9.603e+02, threshold=6.957e+02, percent-clipped=4.0 +2024-08-25 04:22:46,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=4266.666666666667, ans=0.3 +2024-08-25 04:22:55,150 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:23:06,056 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.31 vs. limit=9.14 +2024-08-25 04:23:10,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=4373.333333333333, ans=0.29500000000000004 +2024-08-25 04:23:31,165 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.31 vs. limit=9.18 +2024-08-25 04:23:42,176 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.94 vs. limit=9.2 +2024-08-25 04:23:42,668 INFO [train.py:1114] (0/4) Epoch 1, batch 850, loss[loss=0.6521, simple_loss=0.5082, pruned_loss=0.2914, ctc_loss=0.556, over 19652.00 frames. ], tot_loss[loss=0.7775, simple_loss=0.5702, pruned_loss=0.4261, ctc_loss=0.7195, over 3813331.40 frames. ], batch size: 59, lr: 4.49e-02, grad_scale: 16.0 +2024-08-25 04:23:47,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=4533.333333333333, ans=0.2875 +2024-08-25 04:23:50,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=4533.333333333333, ans=0.7413333333333334 +2024-08-25 04:24:16,062 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.32 vs. limit=9.26 +2024-08-25 04:24:21,610 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.73 vs. limit=9.26 +2024-08-25 04:24:24,088 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.57 vs. limit=6.173333333333333 +2024-08-25 04:24:24,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=4746.666666666667, ans=0.00983768115942029 +2024-08-25 04:24:31,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.44 vs. limit=6.1866666666666665 +2024-08-25 04:24:34,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4746.666666666667, ans=0.25253333333333333 +2024-08-25 04:24:36,254 INFO [train.py:1114] (0/4) Epoch 1, batch 900, loss[loss=0.5163, simple_loss=0.4195, pruned_loss=0.2131, ctc_loss=0.4178, over 19815.00 frames. ], tot_loss[loss=0.7367, simple_loss=0.5476, pruned_loss=0.3887, ctc_loss=0.6713, over 3817847.63 frames. ], batch size: 49, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:24:39,546 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.665e+02 2.433e+02 3.203e+02 4.513e+02 7.559e+02, threshold=6.406e+02, percent-clipped=2.0 +2024-08-25 04:24:58,715 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.38 vs. limit=11.18 +2024-08-25 04:24:59,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=4906.666666666667, ans=0.0 +2024-08-25 04:25:04,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=4906.666666666667, ans=0.25093333333333334 +2024-08-25 04:25:16,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=4960.0, ans=0.26749999999999996 +2024-08-25 04:25:19,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=4960.0, ans=8.1 +2024-08-25 04:25:26,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=5013.333333333333, ans=0.009779710144927536 +2024-08-25 04:25:32,737 INFO [train.py:1114] (0/4) Epoch 1, batch 950, loss[loss=0.505, simple_loss=0.4167, pruned_loss=0.1983, ctc_loss=0.4131, over 19497.00 frames. ], tot_loss[loss=0.7001, simple_loss=0.5275, pruned_loss=0.3561, ctc_loss=0.628, over 3819682.03 frames. ], batch size: 49, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:25:38,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=5066.666666666667, ans=0.24933333333333332 +2024-08-25 04:25:43,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=5120.0, ans=0.0 +2024-08-25 04:25:49,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=5120.0, ans=0.26 +2024-08-25 04:25:51,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=5120.0, ans=0.009756521739130435 +2024-08-25 04:25:55,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=5173.333333333333, ans=0.2575 +2024-08-25 04:26:14,595 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.19 vs. limit=11.42 +2024-08-25 04:26:15,394 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.02 vs. limit=11.42 +2024-08-25 04:26:16,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=5226.666666666667, ans=0.255 +2024-08-25 04:26:33,406 INFO [train.py:1114] (0/4) Epoch 1, batch 1000, loss[loss=0.4993, simple_loss=0.418, pruned_loss=0.1933, ctc_loss=0.3975, over 19848.00 frames. ], tot_loss[loss=0.6702, simple_loss=0.5117, pruned_loss=0.3299, ctc_loss=0.5911, over 3816190.86 frames. ], batch size: 52, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:26:36,086 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.07 vs. limit=9.5 +2024-08-25 04:26:36,698 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.705e+02 2.226e+02 2.758e+02 3.479e+02 9.619e+02, threshold=5.516e+02, percent-clipped=3.0 +2024-08-25 04:26:39,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=5333.333333333333, ans=0.025 +2024-08-25 04:26:41,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5333.333333333333, ans=0.25 +2024-08-25 04:26:42,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=5333.333333333333, ans=0.24666666666666667 +2024-08-25 04:26:44,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=5386.666666666667, ans=0.2475 +2024-08-25 04:27:10,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=5493.333333333333, ans=0.2824 +2024-08-25 04:27:22,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=5546.666666666667, ans=0.24 +2024-08-25 04:27:25,673 INFO [train.py:1114] (0/4) Epoch 1, batch 1050, loss[loss=0.5363, simple_loss=0.4486, pruned_loss=0.2094, ctc_loss=0.4279, over 19842.00 frames. ], tot_loss[loss=0.6384, simple_loss=0.4944, pruned_loss=0.3042, ctc_loss=0.5534, over 3821745.37 frames. ], batch size: 57, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:27:58,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=5760.0, ans=0.22999999999999998 +2024-08-25 04:28:04,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=5760.0, ans=0.22999999999999998 +2024-08-25 04:28:08,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=5813.333333333333, ans=0.24186666666666667 +2024-08-25 04:28:10,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=5813.333333333333, ans=0.04244444444444445 +2024-08-25 04:28:11,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=5813.333333333333, ans=0.22749999999999998 +2024-08-25 04:28:20,181 INFO [train.py:1114] (0/4) Epoch 1, batch 1100, loss[loss=0.464, simple_loss=0.3976, pruned_loss=0.1749, ctc_loss=0.3582, over 19601.00 frames. ], tot_loss[loss=0.6107, simple_loss=0.4798, pruned_loss=0.2821, ctc_loss=0.5204, over 3828941.86 frames. ], batch size: 52, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:28:23,253 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.625e+02 2.143e+02 2.593e+02 3.421e+02 4.407e+02, threshold=5.186e+02, percent-clipped=0.0 +2024-08-25 04:28:38,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=5920.0, ans=0.009582608695652174 +2024-08-25 04:28:52,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=6026.666666666667, ans=0.6890666666666667 +2024-08-25 04:29:06,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6080.0, ans=0.2392 +2024-08-25 04:29:15,902 INFO [train.py:1114] (0/4) Epoch 1, batch 1150, loss[loss=0.4494, simple_loss=0.395, pruned_loss=0.1631, ctc_loss=0.3361, over 19591.00 frames. ], tot_loss[loss=0.5878, simple_loss=0.4678, pruned_loss=0.2642, ctc_loss=0.4928, over 3828275.11 frames. ], batch size: 52, lr: 4.47e-02, grad_scale: 16.0 +2024-08-25 04:29:19,536 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=2.687e-03 +2024-08-25 04:29:20,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=6133.333333333333, ans=8.066666666666666 +2024-08-25 04:32:27,988 INFO [train.py:1114] (0/4) Epoch 1, batch 1200, loss[loss=0.4863, simple_loss=0.4188, pruned_loss=0.1863, ctc_loss=0.3696, over 19841.00 frames. ], tot_loss[loss=0.5705, simple_loss=0.4595, pruned_loss=0.2504, ctc_loss=0.4709, over 3823615.31 frames. ], batch size: 57, lr: 4.47e-02, grad_scale: 32.0 +2024-08-25 04:32:31,062 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.480e+02 2.077e+02 2.797e+02 3.799e+02 8.339e+02, threshold=5.594e+02, percent-clipped=11.0 +2024-08-25 04:32:31,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=6400.0, ans=0.2 +2024-08-25 04:32:34,731 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.05 vs. limit=8.2 +2024-08-25 04:33:03,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=6560.0, ans=0.1925 +2024-08-25 04:33:06,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=6560.0, ans=0.1925 +2024-08-25 04:33:19,303 INFO [train.py:1114] (0/4) Epoch 1, batch 1250, loss[loss=0.5004, simple_loss=0.4324, pruned_loss=0.1936, ctc_loss=0.3746, over 19507.00 frames. ], tot_loss[loss=0.5497, simple_loss=0.4495, pruned_loss=0.235, ctc_loss=0.4456, over 3841835.80 frames. ], batch size: 61, lr: 4.47e-02, grad_scale: 32.0 +2024-08-25 04:33:32,113 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.78 vs. limit=6.688000000000001 +2024-08-25 04:33:44,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=6773.333333333333, ans=0.03844444444444445 +2024-08-25 04:33:52,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=6826.666666666667, ans=0.18 +2024-08-25 04:33:53,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=6826.666666666667, ans=0.18 +2024-08-25 04:34:10,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=6880.0, ans=0.1775 +2024-08-25 04:34:10,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=6880.0, ans=9.3 +2024-08-25 04:34:12,504 INFO [train.py:1114] (0/4) Epoch 1, batch 1300, loss[loss=0.5163, simple_loss=0.4384, pruned_loss=0.2045, ctc_loss=0.4015, over 18813.00 frames. ], tot_loss[loss=0.5313, simple_loss=0.4398, pruned_loss=0.2226, ctc_loss=0.4241, over 3845162.98 frames. ], batch size: 76, lr: 4.47e-02, grad_scale: 32.0 +2024-08-25 04:34:14,084 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.25 vs. limit=12.7 +2024-08-25 04:34:15,550 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 2.007e+02 2.492e+02 3.309e+02 5.533e+02, threshold=4.985e+02, percent-clipped=0.0 +2024-08-25 04:34:26,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=6986.666666666667, ans=0.1725 +2024-08-25 04:34:32,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=6986.666666666667, ans=0.03755555555555556 +2024-08-25 04:34:50,517 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.56 vs. limit=12.82 +2024-08-25 04:34:55,152 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.84 vs. limit=6.773333333333333 +2024-08-25 04:35:11,278 INFO [train.py:1114] (0/4) Epoch 1, batch 1350, loss[loss=0.4564, simple_loss=0.4054, pruned_loss=0.1712, ctc_loss=0.3337, over 19757.00 frames. ], tot_loss[loss=0.5149, simple_loss=0.4319, pruned_loss=0.2114, ctc_loss=0.4049, over 3855296.72 frames. ], batch size: 54, lr: 4.46e-02, grad_scale: 32.0 +2024-08-25 04:35:19,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=7200.0, ans=0.22799999999999998 +2024-08-25 04:35:22,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=7253.333333333333, ans=0.15999999999999998 +2024-08-25 04:35:24,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=7253.333333333333, ans=0.036444444444444446 +2024-08-25 04:35:28,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=7253.333333333333, ans=0.15999999999999998 +2024-08-25 04:35:36,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=7306.666666666667, ans=0.15749999999999997 +2024-08-25 04:35:45,175 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.73 vs. limit=13.02 +2024-08-25 04:35:57,536 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.22 vs. limit=8.706666666666667 +2024-08-25 04:36:02,103 INFO [train.py:1114] (0/4) Epoch 1, batch 1400, loss[loss=0.371, simple_loss=0.3386, pruned_loss=0.1334, ctc_loss=0.2681, over 19686.00 frames. ], tot_loss[loss=0.5016, simple_loss=0.4255, pruned_loss=0.2027, ctc_loss=0.3889, over 3862562.08 frames. ], batch size: 46, lr: 4.46e-02, grad_scale: 32.0 +2024-08-25 04:36:05,092 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.594e+02 1.980e+02 2.233e+02 2.820e+02 5.701e+02, threshold=4.466e+02, percent-clipped=2.0 +2024-08-25 04:36:05,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=7466.666666666667, ans=0.6386666666666667 +2024-08-25 04:36:27,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=7573.333333333333, ans=0.009223188405797101 +2024-08-25 04:36:50,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=7680.0, ans=0.14 +2024-08-25 04:36:53,008 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:36:54,777 INFO [train.py:1114] (0/4) Epoch 1, batch 1450, loss[loss=0.4547, simple_loss=0.4137, pruned_loss=0.1675, ctc_loss=0.3249, over 19680.00 frames. ], tot_loss[loss=0.4909, simple_loss=0.4209, pruned_loss=0.1956, ctc_loss=0.3763, over 3860643.92 frames. ], batch size: 63, lr: 4.46e-02, grad_scale: 32.0 +2024-08-25 04:37:01,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=7733.333333333333, ans=0.1375 +2024-08-25 04:37:33,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=7893.333333333333, ans=0.0 +2024-08-25 04:37:48,632 INFO [train.py:1114] (0/4) Epoch 1, batch 1500, loss[loss=0.4652, simple_loss=0.4177, pruned_loss=0.1744, ctc_loss=0.3449, over 19588.00 frames. ], tot_loss[loss=0.4803, simple_loss=0.4162, pruned_loss=0.1889, ctc_loss=0.3641, over 3860583.50 frames. ], batch size: 57, lr: 4.46e-02, grad_scale: 32.0 +2024-08-25 04:37:52,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=8000.0, ans=0.0 +2024-08-25 04:37:54,386 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.582e+02 1.987e+02 2.351e+02 3.240e+02 5.717e+02, threshold=4.702e+02, percent-clipped=4.0 +2024-08-25 04:37:58,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=8000.0, ans=0.03333333333333334 +2024-08-25 04:38:13,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=8106.666666666667, ans=0.125 +2024-08-25 04:38:38,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=8160.0, ans=0.09899494936611666 +2024-08-25 04:38:56,143 INFO [train.py:1114] (0/4) Epoch 1, batch 1550, loss[loss=0.4712, simple_loss=0.4275, pruned_loss=0.1759, ctc_loss=0.3455, over 19602.00 frames. ], tot_loss[loss=0.4718, simple_loss=0.4126, pruned_loss=0.1838, ctc_loss=0.3546, over 3844875.38 frames. ], batch size: 60, lr: 4.45e-02, grad_scale: 32.0 +2024-08-25 04:39:07,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=8320.0, ans=0.032 +2024-08-25 04:39:07,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=8320.0, ans=0.125 +2024-08-25 04:39:10,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.18 vs. limit=4.248 +2024-08-25 04:39:14,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=8320.0, ans=0.025 +2024-08-25 04:39:26,313 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:39:47,214 INFO [train.py:1114] (0/4) Epoch 1, batch 1600, loss[loss=0.4155, simple_loss=0.39, pruned_loss=0.1503, ctc_loss=0.2911, over 19842.00 frames. ], tot_loss[loss=0.4631, simple_loss=0.4087, pruned_loss=0.179, ctc_loss=0.3446, over 3835647.09 frames. ], batch size: 57, lr: 4.45e-02, grad_scale: 32.0 +2024-08-25 04:39:52,850 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.585e+02 2.044e+02 2.368e+02 2.950e+02 6.795e+02, threshold=4.737e+02, percent-clipped=6.0 +2024-08-25 04:39:56,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=8533.333333333334, ans=0.009014492753623189 +2024-08-25 04:40:05,847 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.74 vs. limit=13.94 +2024-08-25 04:40:11,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=8640.0, ans=10.0 +2024-08-25 04:40:30,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=8693.333333333334, ans=0.125 +2024-08-25 04:40:36,680 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.49 vs. limit=14.059999999999999 +2024-08-25 04:40:40,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=8746.666666666666, ans=0.030222222222222227 +2024-08-25 04:40:42,981 INFO [train.py:1114] (0/4) Epoch 1, batch 1650, loss[loss=0.4339, simple_loss=0.3985, pruned_loss=0.1614, ctc_loss=0.3189, over 19671.00 frames. ], tot_loss[loss=0.4541, simple_loss=0.4047, pruned_loss=0.174, ctc_loss=0.3354, over 3832259.73 frames. ], batch size: 59, lr: 4.45e-02, grad_scale: 32.0 +2024-08-25 04:40:44,571 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.29 vs. limit=7.52 +2024-08-25 04:41:50,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=8800.0, ans=10.8 +2024-08-25 04:43:05,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=8906.666666666666, ans=0.21093333333333333 +2024-08-25 04:43:09,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=8960.0, ans=0.125 +2024-08-25 04:43:14,469 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.77 vs. limit=14.219999999999999 +2024-08-25 04:43:28,616 INFO [train.py:1114] (0/4) Epoch 1, batch 1700, loss[loss=0.3771, simple_loss=0.3534, pruned_loss=0.1377, ctc_loss=0.2741, over 19670.00 frames. ], tot_loss[loss=0.4446, simple_loss=0.4005, pruned_loss=0.1688, ctc_loss=0.3257, over 3846772.86 frames. ], batch size: 46, lr: 4.44e-02, grad_scale: 32.0 +2024-08-25 04:43:31,592 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.509e+02 1.986e+02 2.386e+02 2.791e+02 4.935e+02, threshold=4.772e+02, percent-clipped=1.0 +2024-08-25 04:43:35,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=9066.666666666666, ans=0.5826666666666667 +2024-08-25 04:44:00,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=9226.666666666666, ans=0.5770666666666667 +2024-08-25 04:44:06,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=9280.0, ans=0.125 +2024-08-25 04:45:26,296 INFO [train.py:1114] (0/4) Epoch 1, batch 1750, loss[loss=0.3362, simple_loss=0.3367, pruned_loss=0.1169, ctc_loss=0.2155, over 19689.00 frames. ], tot_loss[loss=0.436, simple_loss=0.3969, pruned_loss=0.1644, ctc_loss=0.3166, over 3851413.56 frames. ], batch size: 45, lr: 4.44e-02, grad_scale: 32.0 +2024-08-25 04:45:31,448 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.01 vs. limit=11.0 +2024-08-25 04:45:34,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=9386.666666666666, ans=0.125 +2024-08-25 04:45:40,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=9386.666666666666, ans=0.125 +2024-08-25 04:45:43,475 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:46:00,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=9493.333333333334, ans=0.125 +2024-08-25 04:46:04,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=9546.666666666666, ans=14.66 +2024-08-25 04:46:13,426 INFO [train.py:1114] (0/4) Epoch 1, batch 1800, loss[loss=0.4074, simple_loss=0.3946, pruned_loss=0.1475, ctc_loss=0.281, over 19621.00 frames. ], tot_loss[loss=0.4294, simple_loss=0.3945, pruned_loss=0.161, ctc_loss=0.3101, over 3852102.84 frames. ], batch size: 55, lr: 4.44e-02, grad_scale: 32.0 +2024-08-25 04:46:16,183 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.506e+02 2.025e+02 2.321e+02 2.784e+02 4.120e+02, threshold=4.643e+02, percent-clipped=0.0 +2024-08-25 04:48:23,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=9813.333333333334, ans=0.025 +2024-08-25 04:48:28,777 INFO [train.py:1114] (0/4) Epoch 1, batch 1850, loss[loss=0.4153, simple_loss=0.4009, pruned_loss=0.1509, ctc_loss=0.2959, over 19592.00 frames. ], tot_loss[loss=0.4223, simple_loss=0.3918, pruned_loss=0.1575, ctc_loss=0.303, over 3856242.32 frames. ], batch size: 57, lr: 4.43e-02, grad_scale: 32.0 +2024-08-25 04:48:33,528 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:48:34,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=9866.666666666666, ans=0.02555555555555556 +2024-08-25 04:48:35,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=9866.666666666666, ans=0.125 +2024-08-25 04:48:36,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=9866.666666666666, ans=0.125 +2024-08-25 04:48:40,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=9920.0, ans=0.025 +2024-08-25 04:48:48,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=9973.333333333334, ans=0.00870144927536232 +2024-08-25 04:49:15,878 INFO [train.py:1114] (0/4) Epoch 1, batch 1900, loss[loss=0.4444, simple_loss=0.4266, pruned_loss=0.1647, ctc_loss=0.3161, over 19637.00 frames. ], tot_loss[loss=0.4179, simple_loss=0.391, pruned_loss=0.1554, ctc_loss=0.2986, over 3862203.38 frames. ], batch size: 59, lr: 4.43e-02, grad_scale: 32.0 +2024-08-25 04:49:18,610 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.464e+02 2.031e+02 2.370e+02 2.878e+02 5.610e+02, threshold=4.739e+02, percent-clipped=2.0 +2024-08-25 04:50:13,142 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.62 vs. limit=15.219999999999999 +2024-08-25 04:50:14,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=10293.333333333334, ans=0.008631884057971015 +2024-08-25 04:50:15,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=10293.333333333334, ans=0.125 +2024-08-25 04:50:23,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=10346.666666666666, ans=0.02355555555555556 +2024-08-25 04:50:25,916 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.84 vs. limit=11.379999999999999 +2024-08-25 04:50:31,844 INFO [train.py:1114] (0/4) Epoch 1, batch 1950, loss[loss=0.3974, simple_loss=0.3868, pruned_loss=0.1462, ctc_loss=0.2812, over 19597.00 frames. ], tot_loss[loss=0.4127, simple_loss=0.3901, pruned_loss=0.1528, ctc_loss=0.294, over 3870430.16 frames. ], batch size: 52, lr: 4.43e-02, grad_scale: 32.0 +2024-08-25 04:50:42,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=10453.333333333334, ans=0.035 +2024-08-25 04:51:05,993 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.47 vs. limit=15.42 +2024-08-25 04:51:16,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10560.0, ans=0.1944 +2024-08-25 04:51:35,613 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.03 vs. limit=11.48 +2024-08-25 04:52:05,800 INFO [train.py:1114] (0/4) Epoch 1, batch 2000, loss[loss=0.3463, simple_loss=0.3346, pruned_loss=0.1291, ctc_loss=0.2493, over 19630.00 frames. ], tot_loss[loss=0.4094, simple_loss=0.3894, pruned_loss=0.1515, ctc_loss=0.2911, over 3853410.72 frames. ], batch size: 45, lr: 4.42e-02, grad_scale: 32.0 +2024-08-25 04:52:09,678 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.538e+02 1.861e+02 2.137e+02 2.685e+02 4.799e+02, threshold=4.274e+02, percent-clipped=1.0 +2024-08-25 04:53:45,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=10720.0, ans=0.022000000000000002 +2024-08-25 04:54:18,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=10826.666666666666, ans=0.125 +2024-08-25 04:54:18,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=10826.666666666666, ans=0.125 +2024-08-25 04:54:24,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=10880.0, ans=0.125 +2024-08-25 04:54:32,956 INFO [train.py:1114] (0/4) Epoch 1, batch 2050, loss[loss=0.3273, simple_loss=0.338, pruned_loss=0.1142, ctc_loss=0.2205, over 19705.00 frames. ], tot_loss[loss=0.403, simple_loss=0.3866, pruned_loss=0.1487, ctc_loss=0.2855, over 3850116.75 frames. ], batch size: 47, lr: 4.42e-02, grad_scale: 32.0 +2024-08-25 04:54:41,273 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.39 vs. limit=8.373333333333335 +2024-08-25 04:54:51,342 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.90 vs. limit=15.74 +2024-08-25 04:55:21,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=11093.333333333334, ans=0.5117333333333334 +2024-08-25 04:55:31,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=11146.666666666666, ans=0.125 +2024-08-25 04:55:40,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=11146.666666666666, ans=0.020222222222222228 +2024-08-25 04:55:42,298 INFO [train.py:1114] (0/4) Epoch 1, batch 2100, loss[loss=0.3654, simple_loss=0.3727, pruned_loss=0.1299, ctc_loss=0.2463, over 19773.00 frames. ], tot_loss[loss=0.3967, simple_loss=0.3838, pruned_loss=0.1458, ctc_loss=0.28, over 3857796.26 frames. ], batch size: 54, lr: 4.42e-02, grad_scale: 32.0 +2024-08-25 04:56:36,116 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.677e+02 1.936e+02 2.214e+02 2.535e+02 3.885e+02, threshold=4.428e+02, percent-clipped=0.0 +2024-08-25 04:56:36,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11200.0, ans=0.188 +2024-08-25 04:57:15,247 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=6.30 vs. limit=8.522666666666666 +2024-08-25 04:57:21,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=11360.0, ans=0.3704 +2024-08-25 04:57:35,985 INFO [train.py:1114] (0/4) Epoch 1, batch 2150, loss[loss=0.3549, simple_loss=0.355, pruned_loss=0.1283, ctc_loss=0.2454, over 19587.00 frames. ], tot_loss[loss=0.3895, simple_loss=0.3802, pruned_loss=0.1423, ctc_loss=0.2734, over 3868506.34 frames. ], batch size: 52, lr: 4.41e-02, grad_scale: 32.0 +2024-08-25 04:58:59,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=11520.0, ans=0.125 +2024-08-25 04:59:34,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=11680.0, ans=0.1 +2024-08-25 04:59:34,919 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.47 vs. limit=11.879999999999999 +2024-08-25 04:59:36,285 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.61 vs. limit=4.76 +2024-08-25 04:59:36,673 INFO [train.py:1114] (0/4) Epoch 1, batch 2200, loss[loss=0.3758, simple_loss=0.3766, pruned_loss=0.1341, ctc_loss=0.267, over 19580.00 frames. ], tot_loss[loss=0.3854, simple_loss=0.3786, pruned_loss=0.1404, ctc_loss=0.2697, over 3867344.24 frames. ], batch size: 57, lr: 4.41e-02, grad_scale: 32.0 +2024-08-25 04:59:36,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11733.333333333334, ans=0.18266666666666664 +2024-08-25 04:59:40,220 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.512e+02 1.884e+02 2.153e+02 2.810e+02 4.673e+02, threshold=4.307e+02, percent-clipped=1.0 +2024-08-25 04:59:56,021 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.61 vs. limit=7.96 +2024-08-25 04:59:56,930 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.89 vs. limit=8.736 +2024-08-25 05:00:06,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=11893.333333333334, ans=0.017111111111111105 +2024-08-25 05:00:27,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=11946.666666666666, ans=0.025 +2024-08-25 05:00:34,226 INFO [train.py:1114] (0/4) Epoch 1, batch 2250, loss[loss=0.3639, simple_loss=0.3761, pruned_loss=0.1258, ctc_loss=0.25, over 19620.00 frames. ], tot_loss[loss=0.3821, simple_loss=0.3772, pruned_loss=0.1387, ctc_loss=0.2667, over 3868369.72 frames. ], batch size: 55, lr: 4.40e-02, grad_scale: 32.0 +2024-08-25 05:00:38,385 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.07 vs. limit=4.8 +2024-08-25 05:00:41,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=12000.0, ans=0.48000000000000004 +2024-08-25 05:01:15,291 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.93 vs. limit=4.816 +2024-08-25 05:01:28,254 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.06 vs. limit=12.059999999999999 +2024-08-25 05:01:37,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=12213.333333333334, ans=0.125 +2024-08-25 05:01:43,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=12266.666666666666, ans=0.17733333333333334 +2024-08-25 05:01:44,079 INFO [train.py:1114] (0/4) Epoch 1, batch 2300, loss[loss=0.3462, simple_loss=0.3559, pruned_loss=0.1203, ctc_loss=0.2396, over 19478.00 frames. ], tot_loss[loss=0.3787, simple_loss=0.3754, pruned_loss=0.1373, ctc_loss=0.2632, over 3862515.72 frames. ], batch size: 49, lr: 4.40e-02, grad_scale: 32.0 +2024-08-25 05:01:47,659 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.534e+02 1.926e+02 2.114e+02 2.507e+02 4.625e+02, threshold=4.228e+02, percent-clipped=3.0 +2024-08-25 05:01:49,954 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.80 vs. limit=16.7 +2024-08-25 05:02:02,019 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.89 vs. limit=12.14 +2024-08-25 05:02:11,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12373.333333333334, ans=0.17626666666666668 +2024-08-25 05:02:30,767 INFO [train.py:1114] (0/4) Epoch 1, batch 2350, loss[loss=0.3862, simple_loss=0.3915, pruned_loss=0.1382, ctc_loss=0.261, over 19657.00 frames. ], tot_loss[loss=0.3751, simple_loss=0.3735, pruned_loss=0.1356, ctc_loss=0.2596, over 3864591.13 frames. ], batch size: 63, lr: 4.40e-02, grad_scale: 32.0 +2024-08-25 05:04:22,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=12586.666666666666, ans=0.014222222222222226 +2024-08-25 05:04:24,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=12586.666666666666, ans=0.125 +2024-08-25 05:04:32,402 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.27 vs. limit=16.98 +2024-08-25 05:04:33,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=12640.0, ans=10.0 +2024-08-25 05:04:37,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=12693.333333333334, ans=0.013777777777777778 +2024-08-25 05:04:54,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=12693.333333333334, ans=0.008110144927536232 +2024-08-25 05:05:00,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=12746.666666666666, ans=0.4538666666666667 +2024-08-25 05:05:04,909 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.78 vs. limit=17.1 +2024-08-25 05:05:05,283 INFO [train.py:1114] (0/4) Epoch 1, batch 2400, loss[loss=0.4107, simple_loss=0.4054, pruned_loss=0.1504, ctc_loss=0.2882, over 19375.00 frames. ], tot_loss[loss=0.3762, simple_loss=0.3755, pruned_loss=0.1359, ctc_loss=0.2598, over 3860114.29 frames. ], batch size: 67, lr: 4.39e-02, grad_scale: 32.0 +2024-08-25 05:05:08,752 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.515e+02 1.948e+02 2.252e+02 2.666e+02 4.870e+02, threshold=4.504e+02, percent-clipped=4.0 +2024-08-25 05:05:10,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=12800.0, ans=0.00808695652173913 +2024-08-25 05:05:16,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=12853.333333333334, ans=0.013111111111111108 +2024-08-25 05:05:24,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=12906.666666666666, ans=0.125 +2024-08-25 05:05:30,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=12906.666666666666, ans=0.4482666666666667 +2024-08-25 05:05:31,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=12906.666666666666, ans=0.4482666666666667 +2024-08-25 05:05:34,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=12960.0, ans=0.125 +2024-08-25 05:05:52,659 INFO [train.py:1114] (0/4) Epoch 1, batch 2450, loss[loss=0.5362, simple_loss=0.456, pruned_loss=0.2287, ctc_loss=0.3977, over 13585.00 frames. ], tot_loss[loss=0.3853, simple_loss=0.3807, pruned_loss=0.1408, ctc_loss=0.2682, over 3739887.67 frames. ], batch size: 140, lr: 4.39e-02, grad_scale: 32.0 +2024-08-25 05:06:33,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=13173.333333333334, ans=0.16826666666666668 +2024-08-25 05:06:35,540 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=19.96 vs. limit=17.380000000000003 +2024-08-25 05:06:49,248 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-1.pt +2024-08-25 05:07:49,747 INFO [train.py:1114] (0/4) Epoch 2, batch 0, loss[loss=0.3392, simple_loss=0.3484, pruned_loss=0.1201, ctc_loss=0.2246, over 19411.00 frames. ], tot_loss[loss=0.3392, simple_loss=0.3484, pruned_loss=0.1201, ctc_loss=0.2246, over 19411.00 frames. ], batch size: 48, lr: 4.30e-02, grad_scale: 32.0 +2024-08-25 05:07:49,749 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-25 05:09:16,713 INFO [train.py:1146] (0/4) Epoch 2, validation: loss=0.2886, simple_loss=0.3508, pruned_loss=0.0823, ctc_loss=0.1542, over 944034.00 frames. +2024-08-25 05:09:16,713 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13390MB +2024-08-25 05:09:16,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=13280.0, ans=0.09899494936611666 +2024-08-25 05:09:35,631 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.504e+02 1.938e+02 2.191e+02 2.677e+02 6.592e+02, threshold=4.382e+02, percent-clipped=7.0 +2024-08-25 05:09:37,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=13333.333333333334, ans=0.125 +2024-08-25 05:09:40,897 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.01 vs. limit=12.52 +2024-08-25 05:09:43,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=13386.666666666666, ans=0.125 +2024-08-25 05:09:44,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=13386.666666666666, ans=0.125 +2024-08-25 05:09:53,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=13440.0, ans=0.05 +2024-08-25 05:09:57,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=13440.0, ans=0.125 +2024-08-25 05:10:10,709 INFO [train.py:1114] (0/4) Epoch 2, batch 50, loss[loss=0.3199, simple_loss=0.3308, pruned_loss=0.1128, ctc_loss=0.2086, over 19712.00 frames. ], tot_loss[loss=0.3744, simple_loss=0.3768, pruned_loss=0.1349, ctc_loss=0.2557, over 843639.75 frames. ], batch size: 47, lr: 4.29e-02, grad_scale: 32.0 +2024-08-25 05:11:06,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=13653.333333333334, ans=0.09899494936611666 +2024-08-25 05:11:17,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=13706.666666666666, ans=0.007889855072463769 +2024-08-25 05:11:23,523 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:11:30,246 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.94 vs. limit=8.44 +2024-08-25 05:11:47,131 INFO [train.py:1114] (0/4) Epoch 2, batch 100, loss[loss=0.3582, simple_loss=0.3642, pruned_loss=0.1279, ctc_loss=0.2412, over 19726.00 frames. ], tot_loss[loss=0.3719, simple_loss=0.3763, pruned_loss=0.1332, ctc_loss=0.2526, over 1499110.32 frames. ], batch size: 51, lr: 4.29e-02, grad_scale: 32.0 +2024-08-25 05:11:50,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=13813.333333333334, ans=0.025 +2024-08-25 05:11:52,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=13813.333333333334, ans=0.125 +2024-08-25 05:11:56,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=13866.666666666666, ans=0.025 +2024-08-25 05:11:57,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=13866.666666666666, ans=0.41466666666666674 +2024-08-25 05:12:00,807 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 1.907e+02 2.167e+02 2.481e+02 4.957e+02, threshold=4.333e+02, percent-clipped=1.0 +2024-08-25 05:12:03,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=13866.666666666666, ans=0.05 +2024-08-25 05:12:26,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=13973.333333333334, ans=0.04949747468305833 +2024-08-25 05:12:50,516 INFO [train.py:1114] (0/4) Epoch 2, batch 150, loss[loss=0.3308, simple_loss=0.3388, pruned_loss=0.1166, ctc_loss=0.224, over 19727.00 frames. ], tot_loss[loss=0.3646, simple_loss=0.3711, pruned_loss=0.1297, ctc_loss=0.2468, over 2027714.20 frames. ], batch size: 47, lr: 4.29e-02, grad_scale: 32.0 +2024-08-25 05:12:56,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=14080.0, ans=0.008 +2024-08-25 05:12:58,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=14080.0, ans=0.125 +2024-08-25 05:13:02,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=14133.333333333334, ans=0.125 +2024-08-25 05:14:14,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=14240.0, ans=0.125 +2024-08-25 05:14:35,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=14240.0, ans=0.025 +2024-08-25 05:14:38,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=14240.0, ans=0.125 +2024-08-25 05:14:39,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=14293.333333333334, ans=0.125 +2024-08-25 05:14:48,020 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.95 vs. limit=8.573333333333334 +2024-08-25 05:14:50,525 INFO [train.py:1114] (0/4) Epoch 2, batch 200, loss[loss=0.3893, simple_loss=0.3929, pruned_loss=0.1404, ctc_loss=0.2622, over 18298.00 frames. ], tot_loss[loss=0.3592, simple_loss=0.3676, pruned_loss=0.127, ctc_loss=0.2419, over 2435029.92 frames. ], batch size: 85, lr: 4.28e-02, grad_scale: 32.0 +2024-08-25 05:15:14,930 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.506e+02 1.847e+02 2.110e+02 2.499e+02 4.235e+02, threshold=4.220e+02, percent-clipped=0.0 +2024-08-25 05:15:49,788 INFO [train.py:1114] (0/4) Epoch 2, batch 250, loss[loss=0.3741, simple_loss=0.3798, pruned_loss=0.1312, ctc_loss=0.2648, over 19414.00 frames. ], tot_loss[loss=0.3562, simple_loss=0.3658, pruned_loss=0.1255, ctc_loss=0.239, over 2754771.79 frames. ], batch size: 67, lr: 4.28e-02, grad_scale: 32.0 +2024-08-25 05:16:12,754 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.03 vs. limit=18.46 +2024-08-25 05:16:16,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=14613.333333333334, ans=0.125 +2024-08-25 05:16:30,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=14720.0, ans=0.007669565217391304 +2024-08-25 05:16:39,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=14773.333333333334, ans=0.125 +2024-08-25 05:19:37,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=14880.0, ans=0.125 +2024-08-25 05:19:37,929 INFO [train.py:1114] (0/4) Epoch 2, batch 300, loss[loss=0.3705, simple_loss=0.3761, pruned_loss=0.1341, ctc_loss=0.2415, over 19540.00 frames. ], tot_loss[loss=0.3542, simple_loss=0.3645, pruned_loss=0.1246, ctc_loss=0.2369, over 2999480.96 frames. ], batch size: 61, lr: 4.27e-02, grad_scale: 32.0 +2024-08-25 05:19:48,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=14880.0, ans=0.025 +2024-08-25 05:19:56,623 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.593e+02 1.858e+02 2.099e+02 2.398e+02 3.801e+02, threshold=4.198e+02, percent-clipped=0.0 +2024-08-25 05:19:59,081 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.11 vs. limit=13.1 +2024-08-25 05:20:10,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=14986.666666666666, ans=0.125 +2024-08-25 05:20:23,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=15040.0, ans=0.025 +2024-08-25 05:20:54,632 INFO [train.py:1114] (0/4) Epoch 2, batch 350, loss[loss=0.3034, simple_loss=0.3292, pruned_loss=0.1007, ctc_loss=0.1903, over 19747.00 frames. ], tot_loss[loss=0.353, simple_loss=0.3643, pruned_loss=0.1238, ctc_loss=0.2354, over 3189626.37 frames. ], batch size: 48, lr: 4.27e-02, grad_scale: 32.0 +2024-08-25 05:21:01,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=15146.666666666666, ans=0.14853333333333335 +2024-08-25 05:21:06,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=15200.0, ans=0.125 +2024-08-25 05:21:13,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=15200.0, ans=0.003333333333333334 +2024-08-25 05:21:14,268 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=9.30 vs. limit=13.2 +2024-08-25 05:21:16,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15253.333333333334, ans=0.14746666666666666 +2024-08-25 05:21:22,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=15253.333333333334, ans=0.125 +2024-08-25 05:21:37,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=15306.666666666666, ans=0.025 +2024-08-25 05:21:51,461 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.42 vs. limit=5.304 +2024-08-25 05:22:06,916 INFO [train.py:1114] (0/4) Epoch 2, batch 400, loss[loss=0.3104, simple_loss=0.3434, pruned_loss=0.1001, ctc_loss=0.1934, over 19505.00 frames. ], tot_loss[loss=0.3491, simple_loss=0.3619, pruned_loss=0.1218, ctc_loss=0.2319, over 3341816.65 frames. ], batch size: 54, lr: 4.26e-02, grad_scale: 32.0 +2024-08-25 05:22:17,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=15466.666666666666, ans=0.125 +2024-08-25 05:22:20,578 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.569e+02 1.895e+02 2.189e+02 2.528e+02 4.758e+02, threshold=4.379e+02, percent-clipped=2.0 +2024-08-25 05:22:22,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=15466.666666666666, ans=0.125 +2024-08-25 05:22:36,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=15520.0, ans=0.1 +2024-08-25 05:22:39,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=15573.333333333334, ans=0.125 +2024-08-25 05:23:46,264 INFO [train.py:1114] (0/4) Epoch 2, batch 450, loss[loss=0.3295, simple_loss=0.3649, pruned_loss=0.1044, ctc_loss=0.213, over 19618.00 frames. ], tot_loss[loss=0.3494, simple_loss=0.3619, pruned_loss=0.122, ctc_loss=0.232, over 3449617.60 frames. ], batch size: 55, lr: 4.26e-02, grad_scale: 32.0 +2024-08-25 05:23:49,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=15680.0, ans=0.125 +2024-08-25 05:23:59,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=15733.333333333334, ans=0.05 +2024-08-25 05:24:04,768 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.72 vs. limit=12.893333333333333 +2024-08-25 05:24:08,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=15786.666666666666, ans=0.0008888888888888904 +2024-08-25 05:24:16,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15840.0, ans=0.1416 +2024-08-25 05:24:22,548 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=7.58 vs. limit=8.96 +2024-08-25 05:24:23,442 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.13 vs. limit=19.380000000000003 +2024-08-25 05:24:24,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15893.333333333334, ans=0.14106666666666667 +2024-08-25 05:24:28,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=15893.333333333334, ans=0.125 +2024-08-25 05:24:37,913 INFO [train.py:1114] (0/4) Epoch 2, batch 500, loss[loss=0.3392, simple_loss=0.3713, pruned_loss=0.1098, ctc_loss=0.2186, over 19670.00 frames. ], tot_loss[loss=0.3454, simple_loss=0.3595, pruned_loss=0.12, ctc_loss=0.2283, over 3545631.43 frames. ], batch size: 63, lr: 4.25e-02, grad_scale: 32.0 +2024-08-25 05:24:38,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=15946.666666666666, ans=0.125 +2024-08-25 05:26:01,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=16000.0, ans=0.07 +2024-08-25 05:26:05,501 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.383e+02 1.778e+02 2.035e+02 2.349e+02 4.286e+02, threshold=4.071e+02, percent-clipped=0.0 +2024-08-25 05:26:09,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=16000.0, ans=0.125 +2024-08-25 05:26:12,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=16053.333333333334, ans=0.125 +2024-08-25 05:26:28,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=16106.666666666666, ans=0.125 +2024-08-25 05:26:34,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=16160.0, ans=0.007356521739130435 +2024-08-25 05:26:34,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=16160.0, ans=0.125 +2024-08-25 05:26:51,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=16160.0, ans=0.07 +2024-08-25 05:26:53,739 INFO [train.py:1114] (0/4) Epoch 2, batch 550, loss[loss=0.3739, simple_loss=0.3804, pruned_loss=0.1326, ctc_loss=0.2555, over 19306.00 frames. ], tot_loss[loss=0.3451, simple_loss=0.3594, pruned_loss=0.1199, ctc_loss=0.2277, over 3608461.94 frames. ], batch size: 71, lr: 4.25e-02, grad_scale: 32.0 +2024-08-25 05:27:03,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=16213.333333333334, ans=0.0 +2024-08-25 05:27:06,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=16213.333333333334, ans=0.13786666666666667 +2024-08-25 05:27:12,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=16266.666666666666, ans=0.125 +2024-08-25 05:27:22,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=16320.0, ans=0.125 +2024-08-25 05:28:06,013 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.57 vs. limit=13.66 +2024-08-25 05:28:14,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=16426.666666666668, ans=0.125 +2024-08-25 05:28:16,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=16426.666666666668, ans=0.125 +2024-08-25 05:28:19,439 INFO [train.py:1114] (0/4) Epoch 2, batch 600, loss[loss=0.3689, simple_loss=0.3801, pruned_loss=0.1305, ctc_loss=0.242, over 19382.00 frames. ], tot_loss[loss=0.3447, simple_loss=0.3596, pruned_loss=0.1196, ctc_loss=0.2267, over 3666275.06 frames. ], batch size: 67, lr: 4.24e-02, grad_scale: 32.0 +2024-08-25 05:28:26,781 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.02 vs. limit=13.68 +2024-08-25 05:28:30,119 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.91 vs. limit=19.9 +2024-08-25 05:28:31,364 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.88 vs. limit=9.133333333333333 +2024-08-25 05:28:34,467 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.463e+02 1.917e+02 2.183e+02 2.770e+02 8.189e+02, threshold=4.366e+02, percent-clipped=5.0 +2024-08-25 05:28:36,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=16533.333333333332, ans=0.125 +2024-08-25 05:28:41,835 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.07 vs. limit=13.72 +2024-08-25 05:29:01,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=16640.0, ans=0.0 +2024-08-25 05:29:10,653 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:29:14,158 INFO [train.py:1114] (0/4) Epoch 2, batch 650, loss[loss=0.3172, simple_loss=0.3526, pruned_loss=0.1014, ctc_loss=0.1971, over 19784.00 frames. ], tot_loss[loss=0.3422, simple_loss=0.3579, pruned_loss=0.1184, ctc_loss=0.2243, over 3716432.68 frames. ], batch size: 54, lr: 4.24e-02, grad_scale: 32.0 +2024-08-25 05:29:14,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=16746.666666666668, ans=0.0 +2024-08-25 05:31:11,018 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.34 vs. limit=13.373333333333335 +2024-08-25 05:31:26,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=16800.0, ans=0.0 +2024-08-25 05:31:59,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=16960.0, ans=0.0 +2024-08-25 05:32:01,961 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.44 vs. limit=13.86 +2024-08-25 05:32:02,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=16960.0, ans=0.3064 +2024-08-25 05:32:06,446 INFO [train.py:1114] (0/4) Epoch 2, batch 700, loss[loss=0.3405, simple_loss=0.3549, pruned_loss=0.1198, ctc_loss=0.2163, over 19706.00 frames. ], tot_loss[loss=0.3416, simple_loss=0.3579, pruned_loss=0.1179, ctc_loss=0.2233, over 3747743.13 frames. ], batch size: 51, lr: 4.23e-02, grad_scale: 32.0 +2024-08-25 05:32:36,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=17013.333333333332, ans=0.0 +2024-08-25 05:32:36,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=17013.333333333332, ans=0.30453333333333343 +2024-08-25 05:32:40,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=17013.333333333332, ans=0.0 +2024-08-25 05:32:41,566 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.49 vs. limit=20.259999999999998 +2024-08-25 05:32:47,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=17066.666666666668, ans=0.125 +2024-08-25 05:32:47,846 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.375e+02 1.759e+02 2.005e+02 2.359e+02 5.033e+02, threshold=4.011e+02, percent-clipped=2.0 +2024-08-25 05:33:00,043 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:33:00,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=17120.0, ans=0.007147826086956522 +2024-08-25 05:33:04,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=17120.0, ans=0.30080000000000007 +2024-08-25 05:33:27,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=17280.0, ans=0.125 +2024-08-25 05:33:28,055 INFO [train.py:1114] (0/4) Epoch 2, batch 750, loss[loss=0.3213, simple_loss=0.353, pruned_loss=0.1042, ctc_loss=0.2029, over 19521.00 frames. ], tot_loss[loss=0.3401, simple_loss=0.3572, pruned_loss=0.1172, ctc_loss=0.2216, over 3774462.36 frames. ], batch size: 54, lr: 4.23e-02, grad_scale: 32.0 +2024-08-25 05:35:17,688 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.44 vs. limit=13.98 +2024-08-25 05:37:34,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17493.333333333332, ans=0.1250666666666667 +2024-08-25 05:37:40,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=15.67 vs. limit=14.08 +2024-08-25 05:37:40,887 INFO [train.py:1114] (0/4) Epoch 2, batch 800, loss[loss=0.2964, simple_loss=0.318, pruned_loss=0.09779, ctc_loss=0.1978, over 19396.00 frames. ], tot_loss[loss=0.3391, simple_loss=0.3565, pruned_loss=0.1167, ctc_loss=0.2207, over 3795728.73 frames. ], batch size: 48, lr: 4.22e-02, grad_scale: 32.0 +2024-08-25 05:37:43,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=17546.666666666668, ans=0.2858666666666667 +2024-08-25 05:38:03,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=17600.0, ans=0.125 +2024-08-25 05:38:06,533 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.454e+02 1.845e+02 2.130e+02 2.517e+02 4.310e+02, threshold=4.259e+02, percent-clipped=1.0 +2024-08-25 05:38:10,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=17600.0, ans=0.124 +2024-08-25 05:38:25,057 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.44 vs. limit=20.740000000000002 +2024-08-25 05:38:48,289 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.33 vs. limit=14.18 +2024-08-25 05:38:48,685 INFO [train.py:1114] (0/4) Epoch 2, batch 850, loss[loss=0.3722, simple_loss=0.3862, pruned_loss=0.1308, ctc_loss=0.2415, over 19654.00 frames. ], tot_loss[loss=0.3377, simple_loss=0.3558, pruned_loss=0.116, ctc_loss=0.219, over 3814410.93 frames. ], batch size: 59, lr: 4.22e-02, grad_scale: 16.0 +2024-08-25 05:38:54,785 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=17.15 vs. limit=13.906666666666666 +2024-08-25 05:39:00,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=17813.333333333332, ans=0.125 +2024-08-25 05:39:05,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17866.666666666668, ans=0.12133333333333332 +2024-08-25 05:39:14,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=17920.0, ans=0.0 +2024-08-25 05:39:22,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=17920.0, ans=0.0 +2024-08-25 05:39:41,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=18026.666666666668, ans=0.0 +2024-08-25 05:39:58,483 INFO [train.py:1114] (0/4) Epoch 2, batch 900, loss[loss=0.2924, simple_loss=0.3204, pruned_loss=0.09629, ctc_loss=0.1793, over 19409.00 frames. ], tot_loss[loss=0.3373, simple_loss=0.3555, pruned_loss=0.1158, ctc_loss=0.2184, over 3817568.43 frames. ], batch size: 48, lr: 4.21e-02, grad_scale: 8.0 +2024-08-25 05:40:19,546 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 1.852e+02 2.189e+02 2.703e+02 9.878e+02, threshold=4.378e+02, percent-clipped=3.0 +2024-08-25 05:40:58,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=18240.0, ans=0.125 +2024-08-25 05:41:08,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=18293.333333333332, ans=0.0 +2024-08-25 05:41:08,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=18293.333333333332, ans=0.25973333333333337 +2024-08-25 05:41:14,124 INFO [train.py:1114] (0/4) Epoch 2, batch 950, loss[loss=0.2953, simple_loss=0.3183, pruned_loss=0.09982, ctc_loss=0.1817, over 19504.00 frames. ], tot_loss[loss=0.3376, simple_loss=0.3556, pruned_loss=0.1161, ctc_loss=0.2186, over 3819324.98 frames. ], batch size: 49, lr: 4.21e-02, grad_scale: 8.0 +2024-08-25 05:41:17,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=18346.666666666668, ans=0.025 +2024-08-25 05:41:18,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=18346.666666666668, ans=0.11653333333333332 +2024-08-25 05:41:30,301 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.55 vs. limit=21.3 +2024-08-25 05:41:34,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=18453.333333333332, ans=0.125 +2024-08-25 05:41:45,077 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.47 vs. limit=14.440000000000001 +2024-08-25 05:41:53,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=18506.666666666668, ans=0.125 +2024-08-25 05:42:01,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=18560.0, ans=0.006834782608695652 +2024-08-25 05:42:06,452 INFO [train.py:1114] (0/4) Epoch 2, batch 1000, loss[loss=0.3003, simple_loss=0.3288, pruned_loss=0.09994, ctc_loss=0.1799, over 19845.00 frames. ], tot_loss[loss=0.3373, simple_loss=0.3556, pruned_loss=0.1159, ctc_loss=0.218, over 3815946.82 frames. ], batch size: 52, lr: 4.20e-02, grad_scale: 8.0 +2024-08-25 05:42:13,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=18613.333333333332, ans=0.125 +2024-08-25 05:42:37,203 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.59 vs. limit=14.5 +2024-08-25 05:42:41,284 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.839e+02 2.030e+02 2.416e+02 3.488e+02, threshold=4.061e+02, percent-clipped=0.0 +2024-08-25 05:42:57,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=18773.333333333332, ans=0.125 +2024-08-25 05:43:06,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=18826.666666666668, ans=0.125 +2024-08-25 05:43:16,616 INFO [train.py:1114] (0/4) Epoch 2, batch 1050, loss[loss=0.3204, simple_loss=0.3479, pruned_loss=0.1044, ctc_loss=0.21, over 19838.00 frames. ], tot_loss[loss=0.3359, simple_loss=0.3546, pruned_loss=0.1153, ctc_loss=0.2168, over 3823718.17 frames. ], batch size: 57, lr: 4.20e-02, grad_scale: 8.0 +2024-08-25 05:43:17,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=18880.0, ans=0.23919999999999997 +2024-08-25 05:43:58,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18986.666666666668, ans=0.1101333333333333 +2024-08-25 05:44:00,883 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=14.47 vs. limit=14.64 +2024-08-25 05:44:05,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=19040.0, ans=0.0 +2024-08-25 05:44:14,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=19093.333333333332, ans=0.23173333333333346 +2024-08-25 05:44:23,169 INFO [train.py:1114] (0/4) Epoch 2, batch 1100, loss[loss=0.3094, simple_loss=0.3335, pruned_loss=0.1026, ctc_loss=0.2003, over 19565.00 frames. ], tot_loss[loss=0.3345, simple_loss=0.3537, pruned_loss=0.1146, ctc_loss=0.2156, over 3831878.62 frames. ], batch size: 52, lr: 4.19e-02, grad_scale: 8.0 +2024-08-25 05:44:40,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19146.666666666668, ans=0.10853333333333334 +2024-08-25 05:44:43,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=19200.0, ans=0.10800000000000001 +2024-08-25 05:44:48,508 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.463e+02 1.777e+02 2.009e+02 2.448e+02 3.967e+02, threshold=4.019e+02, percent-clipped=0.0 +2024-08-25 05:44:53,049 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.95 vs. limit=21.939999999999998 +2024-08-25 05:44:56,803 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.24 vs. limit=14.719999999999999 +2024-08-25 05:45:16,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19360.0, ans=0.10640000000000002 +2024-08-25 05:45:22,346 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.29 vs. limit=14.780000000000001 +2024-08-25 05:45:31,302 INFO [train.py:1114] (0/4) Epoch 2, batch 1150, loss[loss=0.3358, simple_loss=0.3517, pruned_loss=0.1159, ctc_loss=0.2204, over 19587.00 frames. ], tot_loss[loss=0.3342, simple_loss=0.3536, pruned_loss=0.1144, ctc_loss=0.2148, over 3831567.54 frames. ], batch size: 52, lr: 4.19e-02, grad_scale: 8.0 +2024-08-25 05:45:44,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19466.666666666668, ans=0.10533333333333333 +2024-08-25 05:47:34,827 INFO [train.py:1114] (0/4) Epoch 2, batch 1200, loss[loss=0.336, simple_loss=0.3642, pruned_loss=0.111, ctc_loss=0.2145, over 19835.00 frames. ], tot_loss[loss=0.3354, simple_loss=0.3545, pruned_loss=0.115, ctc_loss=0.2155, over 3827106.49 frames. ], batch size: 57, lr: 4.18e-02, grad_scale: 16.0 +2024-08-25 05:47:49,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=3.60 vs. limit=14.9 +2024-08-25 05:47:50,316 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.449e+02 1.798e+02 2.208e+02 2.852e+02 1.698e+03, threshold=4.415e+02, percent-clipped=3.0 +2024-08-25 05:48:07,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=19786.666666666668, ans=0.125 +2024-08-25 05:48:07,693 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=15.21 vs. limit=14.893333333333334 +2024-08-25 05:48:14,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=19840.0, ans=0.2056 +2024-08-25 05:48:31,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=19893.333333333332, ans=0.125 +2024-08-25 05:48:37,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=19893.333333333332, ans=0.1010666666666667 +2024-08-25 05:48:40,334 INFO [train.py:1114] (0/4) Epoch 2, batch 1250, loss[loss=0.3397, simple_loss=0.3645, pruned_loss=0.1136, ctc_loss=0.2193, over 19537.00 frames. ], tot_loss[loss=0.3337, simple_loss=0.354, pruned_loss=0.114, ctc_loss=0.2135, over 3844123.46 frames. ], batch size: 61, lr: 4.17e-02, grad_scale: 16.0 +2024-08-25 05:48:49,492 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:49:02,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.86 vs. limit=15.0 +2024-08-25 05:49:06,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=20000.0, ans=0.125 +2024-08-25 05:49:08,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=20053.333333333332, ans=0.125 +2024-08-25 05:49:14,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=20053.333333333332, ans=0.125 +2024-08-25 05:49:16,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=20053.333333333332, ans=0.0 +2024-08-25 05:49:21,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=20106.666666666668, ans=0.00649855072463768 +2024-08-25 05:49:28,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=20160.0, ans=0.025 +2024-08-25 05:49:37,278 INFO [train.py:1114] (0/4) Epoch 2, batch 1300, loss[loss=0.3624, simple_loss=0.3833, pruned_loss=0.1221, ctc_loss=0.2434, over 18852.00 frames. ], tot_loss[loss=0.3322, simple_loss=0.3531, pruned_loss=0.1132, ctc_loss=0.2125, over 3845391.14 frames. ], batch size: 76, lr: 4.17e-02, grad_scale: 16.0 +2024-08-25 05:49:40,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=20213.333333333332, ans=0.0 +2024-08-25 05:49:42,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=20213.333333333332, ans=0.125 +2024-08-25 05:49:52,762 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.507e+02 1.771e+02 1.898e+02 2.175e+02 3.765e+02, threshold=3.796e+02, percent-clipped=0.0 +2024-08-25 05:49:59,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=20320.0, ans=0.125 +2024-08-25 05:50:00,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=20320.0, ans=0.125 +2024-08-25 05:50:19,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=20426.666666666668, ans=0.125 +2024-08-25 05:50:25,289 INFO [train.py:1114] (0/4) Epoch 2, batch 1350, loss[loss=0.3421, simple_loss=0.3597, pruned_loss=0.1176, ctc_loss=0.2234, over 19753.00 frames. ], tot_loss[loss=0.3306, simple_loss=0.3522, pruned_loss=0.1123, ctc_loss=0.211, over 3856086.37 frames. ], batch size: 54, lr: 4.16e-02, grad_scale: 16.0 +2024-08-25 05:50:47,248 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.65 vs. limit=6.0 +2024-08-25 05:50:55,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=20586.666666666668, ans=0.006394202898550725 +2024-08-25 05:51:19,083 INFO [train.py:1114] (0/4) Epoch 2, batch 1400, loss[loss=0.2681, simple_loss=0.2916, pruned_loss=0.0897, ctc_loss=0.1632, over 19657.00 frames. ], tot_loss[loss=0.33, simple_loss=0.3518, pruned_loss=0.1121, ctc_loss=0.2102, over 3863322.05 frames. ], batch size: 46, lr: 4.16e-02, grad_scale: 16.0 +2024-08-25 05:51:32,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=20800.0, ans=0.125 +2024-08-25 05:51:34,333 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.473e+02 1.933e+02 2.205e+02 2.519e+02 3.569e+02, threshold=4.410e+02, percent-clipped=0.0 +2024-08-25 05:51:55,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=20906.666666666668, ans=0.2 +2024-08-25 05:51:57,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=20906.666666666668, ans=0.0 +2024-08-25 05:51:59,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.64 vs. limit=15.0 +2024-08-25 05:52:00,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=20960.0, ans=0.125 +2024-08-25 05:52:09,361 INFO [train.py:1114] (0/4) Epoch 2, batch 1450, loss[loss=0.3389, simple_loss=0.3675, pruned_loss=0.1122, ctc_loss=0.2145, over 19700.00 frames. ], tot_loss[loss=0.3298, simple_loss=0.352, pruned_loss=0.1119, ctc_loss=0.2097, over 3861891.40 frames. ], batch size: 63, lr: 4.15e-02, grad_scale: 16.0 +2024-08-25 05:52:09,805 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.92 vs. limit=15.0 +2024-08-25 05:52:11,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=21013.333333333332, ans=0.2 +2024-08-25 05:52:18,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=21066.666666666668, ans=0.125 +2024-08-25 05:52:19,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=21066.666666666668, ans=0.025 +2024-08-25 05:52:25,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21066.666666666668, ans=0.1 +2024-08-25 05:52:31,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=21120.0, ans=0.125 +2024-08-25 05:52:43,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=21173.333333333332, ans=0.125 +2024-08-25 05:52:43,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=21173.333333333332, ans=0.0 +2024-08-25 05:52:56,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=21280.0, ans=0.1 +2024-08-25 05:52:56,996 INFO [train.py:1114] (0/4) Epoch 2, batch 1500, loss[loss=0.3165, simple_loss=0.3589, pruned_loss=0.09859, ctc_loss=0.1926, over 19567.00 frames. ], tot_loss[loss=0.3287, simple_loss=0.3517, pruned_loss=0.1112, ctc_loss=0.2082, over 3860597.06 frames. ], batch size: 57, lr: 4.15e-02, grad_scale: 16.0 +2024-08-25 05:53:04,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=21280.0, ans=0.0 +2024-08-25 05:53:05,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=21280.0, ans=0.125 +2024-08-25 05:53:05,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=21280.0, ans=0.125 +2024-08-25 05:53:05,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=21280.0, ans=0.125 +2024-08-25 05:53:06,942 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/checkpoint-4000.pt +2024-08-25 05:53:17,231 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.397e+02 1.832e+02 2.087e+02 2.558e+02 5.212e+02, threshold=4.175e+02, percent-clipped=3.0 +2024-08-25 05:53:24,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=21386.666666666668, ans=0.2 +2024-08-25 05:53:25,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=21386.666666666668, ans=0.125 +2024-08-25 05:53:28,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=21386.666666666668, ans=0.125 +2024-08-25 05:53:42,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=21440.0, ans=0.2 +2024-08-25 05:54:05,948 INFO [train.py:1114] (0/4) Epoch 2, batch 1550, loss[loss=0.3657, simple_loss=0.3812, pruned_loss=0.128, ctc_loss=0.2355, over 19590.00 frames. ], tot_loss[loss=0.3287, simple_loss=0.3516, pruned_loss=0.1113, ctc_loss=0.2081, over 3846009.12 frames. ], batch size: 60, lr: 4.14e-02, grad_scale: 16.0 +2024-08-25 05:54:07,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=21546.666666666668, ans=0.125 +2024-08-25 05:54:08,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21546.666666666668, ans=0.1 +2024-08-25 05:54:10,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=21546.666666666668, ans=0.006185507246376811 +2024-08-25 05:54:40,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=21653.333333333332, ans=0.125 +2024-08-25 05:54:41,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=21653.333333333332, ans=0.125 +2024-08-25 05:54:47,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=21653.333333333332, ans=0.0 +2024-08-25 05:54:49,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=21706.666666666668, ans=0.125 +2024-08-25 05:54:54,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=9.78 vs. limit=12.0 +2024-08-25 05:55:11,750 INFO [train.py:1114] (0/4) Epoch 2, batch 1600, loss[loss=0.3436, simple_loss=0.3632, pruned_loss=0.1192, ctc_loss=0.2143, over 19845.00 frames. ], tot_loss[loss=0.3276, simple_loss=0.3508, pruned_loss=0.1108, ctc_loss=0.2074, over 3836526.19 frames. ], batch size: 57, lr: 4.13e-02, grad_scale: 32.0 +2024-08-25 05:55:24,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=21866.666666666668, ans=0.2 +2024-08-25 05:55:32,422 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.487e+02 1.812e+02 2.122e+02 2.604e+02 4.336e+02, threshold=4.244e+02, percent-clipped=2.0 +2024-08-25 05:55:37,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=21920.0, ans=0.006104347826086956 +2024-08-25 05:55:44,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=21920.0, ans=0.125 +2024-08-25 05:56:00,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=21973.333333333332, ans=0.125 +2024-08-25 05:56:01,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=22026.666666666668, ans=0.125 +2024-08-25 05:56:02,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=22026.666666666668, ans=0.1 +2024-08-25 05:56:13,278 INFO [train.py:1114] (0/4) Epoch 2, batch 1650, loss[loss=0.354, simple_loss=0.3733, pruned_loss=0.1219, ctc_loss=0.2273, over 19663.00 frames. ], tot_loss[loss=0.3274, simple_loss=0.3504, pruned_loss=0.1107, ctc_loss=0.2074, over 3833014.04 frames. ], batch size: 59, lr: 4.13e-02, grad_scale: 16.0 +2024-08-25 05:56:13,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22080.0, ans=0.1 +2024-08-25 05:56:28,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=22133.333333333332, ans=0.125 +2024-08-25 05:56:38,438 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=10.69 vs. limit=12.0 +2024-08-25 05:56:39,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=22186.666666666668, ans=0.125 +2024-08-25 05:56:51,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=22240.0, ans=0.125 +2024-08-25 05:56:53,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=22240.0, ans=0.125 +2024-08-25 05:57:05,870 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.96 vs. limit=15.0 +2024-08-25 05:57:12,875 INFO [train.py:1114] (0/4) Epoch 2, batch 1700, loss[loss=0.2708, simple_loss=0.3002, pruned_loss=0.08801, ctc_loss=0.1637, over 19663.00 frames. ], tot_loss[loss=0.3262, simple_loss=0.3498, pruned_loss=0.1101, ctc_loss=0.2061, over 3847569.20 frames. ], batch size: 46, lr: 4.12e-02, grad_scale: 16.0 +2024-08-25 05:57:20,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=22346.666666666668, ans=0.125 +2024-08-25 05:57:29,330 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.387e+02 1.791e+02 2.005e+02 2.338e+02 3.555e+02, threshold=4.010e+02, percent-clipped=0.0 +2024-08-25 05:57:33,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22453.333333333332, ans=0.1 +2024-08-25 05:57:36,493 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.03 vs. limit=15.0 +2024-08-25 05:57:41,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22453.333333333332, ans=0.1 +2024-08-25 05:58:34,068 INFO [train.py:1114] (0/4) Epoch 2, batch 1750, loss[loss=0.2548, simple_loss=0.2962, pruned_loss=0.07721, ctc_loss=0.1474, over 19718.00 frames. ], tot_loss[loss=0.324, simple_loss=0.3481, pruned_loss=0.1091, ctc_loss=0.2043, over 3852085.59 frames. ], batch size: 45, lr: 4.12e-02, grad_scale: 16.0 +2024-08-25 05:58:38,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=22613.333333333332, ans=0.2 +2024-08-25 05:58:39,046 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.71 vs. limit=22.5 +2024-08-25 05:58:40,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=22613.333333333332, ans=0.1 +2024-08-25 05:58:53,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=22720.0, ans=0.0 +2024-08-25 05:59:18,890 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.51 vs. limit=10.0 +2024-08-25 05:59:24,711 INFO [train.py:1114] (0/4) Epoch 2, batch 1800, loss[loss=0.2974, simple_loss=0.3385, pruned_loss=0.09135, ctc_loss=0.1839, over 19612.00 frames. ], tot_loss[loss=0.3237, simple_loss=0.348, pruned_loss=0.1089, ctc_loss=0.204, over 3853623.10 frames. ], batch size: 55, lr: 4.11e-02, grad_scale: 16.0 +2024-08-25 05:59:24,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=22880.0, ans=0.2 +2024-08-25 05:59:26,148 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.09 vs. limit=15.0 +2024-08-25 05:59:26,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=22880.0, ans=0.005895652173913043 +2024-08-25 05:59:28,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=22880.0, ans=0.125 +2024-08-25 05:59:39,812 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 1.812e+02 2.002e+02 2.312e+02 3.839e+02, threshold=4.004e+02, percent-clipped=0.0 +2024-08-25 05:59:56,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=23040.0, ans=0.125 +2024-08-25 06:00:03,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23093.333333333332, ans=0.1 +2024-08-25 06:00:10,991 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.89 vs. limit=15.0 +2024-08-25 06:00:12,413 INFO [train.py:1114] (0/4) Epoch 2, batch 1850, loss[loss=0.3537, simple_loss=0.3712, pruned_loss=0.1229, ctc_loss=0.226, over 19567.00 frames. ], tot_loss[loss=0.3222, simple_loss=0.3469, pruned_loss=0.1082, ctc_loss=0.2027, over 3856613.79 frames. ], batch size: 57, lr: 4.11e-02, grad_scale: 16.0 +2024-08-25 06:00:40,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=23306.666666666668, ans=0.125 +2024-08-25 06:00:52,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=23360.0, ans=0.005791304347826087 +2024-08-25 06:00:59,790 INFO [train.py:1114] (0/4) Epoch 2, batch 1900, loss[loss=0.3074, simple_loss=0.3504, pruned_loss=0.09434, ctc_loss=0.1893, over 19660.00 frames. ], tot_loss[loss=0.3224, simple_loss=0.3474, pruned_loss=0.1082, ctc_loss=0.2024, over 3861488.72 frames. ], batch size: 59, lr: 4.10e-02, grad_scale: 16.0 +2024-08-25 06:01:10,092 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:01:18,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=23466.666666666668, ans=0.125 +2024-08-25 06:01:18,891 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.426e+02 1.814e+02 2.067e+02 2.451e+02 4.716e+02, threshold=4.135e+02, percent-clipped=1.0 +2024-08-25 06:01:38,173 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.10 vs. limit=15.0 +2024-08-25 06:01:51,681 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.58 vs. limit=15.0 +2024-08-25 06:01:52,059 INFO [train.py:1114] (0/4) Epoch 2, batch 1950, loss[loss=0.2969, simple_loss=0.3283, pruned_loss=0.09661, ctc_loss=0.1806, over 19592.00 frames. ], tot_loss[loss=0.3219, simple_loss=0.3478, pruned_loss=0.1077, ctc_loss=0.2015, over 3870074.09 frames. ], batch size: 52, lr: 4.09e-02, grad_scale: 16.0 +2024-08-25 06:02:17,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=23786.666666666668, ans=0.125 +2024-08-25 06:02:32,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=23893.333333333332, ans=0.2 +2024-08-25 06:02:34,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=23893.333333333332, ans=0.125 +2024-08-25 06:02:40,712 INFO [train.py:1114] (0/4) Epoch 2, batch 2000, loss[loss=0.3001, simple_loss=0.3203, pruned_loss=0.1019, ctc_loss=0.19, over 19656.00 frames. ], tot_loss[loss=0.324, simple_loss=0.3492, pruned_loss=0.1088, ctc_loss=0.2031, over 3855218.47 frames. ], batch size: 45, lr: 4.09e-02, grad_scale: 32.0 +2024-08-25 06:02:46,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=23946.666666666668, ans=0.125 +2024-08-25 06:02:57,863 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.475e+02 1.781e+02 1.996e+02 2.377e+02 5.355e+02, threshold=3.992e+02, percent-clipped=1.0 +2024-08-25 06:03:00,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=24053.333333333332, ans=0.125 +2024-08-25 06:03:10,107 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.93 vs. limit=15.0 +2024-08-25 06:03:14,386 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=22.70 vs. limit=15.0 +2024-08-25 06:03:18,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=24160.0, ans=10.0 +2024-08-25 06:03:29,343 INFO [train.py:1114] (0/4) Epoch 2, batch 2050, loss[loss=0.2627, simple_loss=0.3001, pruned_loss=0.08168, ctc_loss=0.1547, over 19714.00 frames. ], tot_loss[loss=0.3227, simple_loss=0.3481, pruned_loss=0.1082, ctc_loss=0.2023, over 3852140.17 frames. ], batch size: 47, lr: 4.08e-02, grad_scale: 32.0 +2024-08-25 06:03:49,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=24320.0, ans=0.0 +2024-08-25 06:03:54,502 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.46 vs. limit=15.0 +2024-08-25 06:04:13,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=24480.0, ans=0.125 +2024-08-25 06:04:17,643 INFO [train.py:1114] (0/4) Epoch 2, batch 2100, loss[loss=0.3036, simple_loss=0.3348, pruned_loss=0.09944, ctc_loss=0.1838, over 19792.00 frames. ], tot_loss[loss=0.3198, simple_loss=0.3464, pruned_loss=0.1067, ctc_loss=0.1997, over 3859095.84 frames. ], batch size: 54, lr: 4.08e-02, grad_scale: 32.0 +2024-08-25 06:04:33,037 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 1.823e+02 2.012e+02 2.259e+02 3.531e+02, threshold=4.024e+02, percent-clipped=0.0 +2024-08-25 06:04:35,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=24586.666666666668, ans=0.1 +2024-08-25 06:04:54,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=24693.333333333332, ans=0.09899494936611666 +2024-08-25 06:04:54,698 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.07 vs. limit=12.0 +2024-08-25 06:05:02,142 INFO [train.py:1114] (0/4) Epoch 2, batch 2150, loss[loss=0.2863, simple_loss=0.3261, pruned_loss=0.09005, ctc_loss=0.1659, over 19582.00 frames. ], tot_loss[loss=0.3182, simple_loss=0.3452, pruned_loss=0.106, ctc_loss=0.1982, over 3869528.29 frames. ], batch size: 52, lr: 4.07e-02, grad_scale: 32.0 +2024-08-25 06:05:12,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=24746.666666666668, ans=0.025 +2024-08-25 06:05:15,316 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.52 vs. limit=10.0 +2024-08-25 06:05:20,664 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=10.08 vs. limit=15.0 +2024-08-25 06:05:24,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24853.333333333332, ans=0.1 +2024-08-25 06:05:29,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=24853.333333333332, ans=0.125 +2024-08-25 06:05:32,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=24853.333333333332, ans=0.125 +2024-08-25 06:05:43,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=24906.666666666668, ans=0.125 +2024-08-25 06:05:53,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=24960.0, ans=0.125 +2024-08-25 06:06:00,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=25013.333333333332, ans=0.025 +2024-08-25 06:06:01,433 INFO [train.py:1114] (0/4) Epoch 2, batch 2200, loss[loss=0.3222, simple_loss=0.3465, pruned_loss=0.1085, ctc_loss=0.2023, over 19607.00 frames. ], tot_loss[loss=0.3184, simple_loss=0.3454, pruned_loss=0.106, ctc_loss=0.1983, over 3867725.96 frames. ], batch size: 57, lr: 4.06e-02, grad_scale: 32.0 +2024-08-25 06:06:01,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=25013.333333333332, ans=0.125 +2024-08-25 06:06:02,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=25013.333333333332, ans=0.125 +2024-08-25 06:06:16,771 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.98 vs. limit=12.0 +2024-08-25 06:06:17,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25066.666666666668, ans=0.1 +2024-08-25 06:06:25,290 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.516e+02 1.924e+02 2.286e+02 2.709e+02 6.222e+02, threshold=4.573e+02, percent-clipped=4.0 +2024-08-25 06:06:31,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=25120.0, ans=0.0 +2024-08-25 06:06:54,566 INFO [train.py:1114] (0/4) Epoch 2, batch 2250, loss[loss=0.3259, simple_loss=0.3514, pruned_loss=0.1098, ctc_loss=0.2023, over 19629.00 frames. ], tot_loss[loss=0.3179, simple_loss=0.345, pruned_loss=0.1059, ctc_loss=0.1976, over 3866861.28 frames. ], batch size: 55, lr: 4.06e-02, grad_scale: 32.0 +2024-08-25 06:07:08,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=25333.333333333332, ans=0.0053623188405797105 +2024-08-25 06:07:17,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=25386.666666666668, ans=0.125 +2024-08-25 06:07:19,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=25440.0, ans=0.025 +2024-08-25 06:07:23,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=25440.0, ans=0.125 +2024-08-25 06:07:32,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=25493.333333333332, ans=0.005327536231884058 +2024-08-25 06:07:36,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=25493.333333333332, ans=0.2 +2024-08-25 06:07:41,080 INFO [train.py:1114] (0/4) Epoch 2, batch 2300, loss[loss=0.3008, simple_loss=0.3336, pruned_loss=0.09729, ctc_loss=0.1833, over 19494.00 frames. ], tot_loss[loss=0.3164, simple_loss=0.3438, pruned_loss=0.1051, ctc_loss=0.1967, over 3861320.93 frames. ], batch size: 49, lr: 4.05e-02, grad_scale: 32.0 +2024-08-25 06:07:54,781 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.85 vs. limit=15.0 +2024-08-25 06:07:57,410 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.59 vs. limit=6.0 +2024-08-25 06:07:58,726 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.382e+02 1.775e+02 2.049e+02 2.504e+02 6.120e+02, threshold=4.097e+02, percent-clipped=1.0 +2024-08-25 06:08:06,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=25653.333333333332, ans=0.1 +2024-08-25 06:08:21,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=25760.0, ans=0.1 +2024-08-25 06:08:21,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=25760.0, ans=0.125 +2024-08-25 06:08:29,083 INFO [train.py:1114] (0/4) Epoch 2, batch 2350, loss[loss=0.3273, simple_loss=0.3554, pruned_loss=0.1099, ctc_loss=0.1982, over 19671.00 frames. ], tot_loss[loss=0.317, simple_loss=0.3441, pruned_loss=0.1055, ctc_loss=0.1972, over 3864359.88 frames. ], batch size: 63, lr: 4.04e-02, grad_scale: 32.0 +2024-08-25 06:08:30,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=25813.333333333332, ans=0.0 +2024-08-25 06:08:32,125 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.23 vs. limit=15.0 +2024-08-25 06:08:34,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=25813.333333333332, ans=0.2 +2024-08-25 06:08:35,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=25813.333333333332, ans=0.125 +2024-08-25 06:08:47,815 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.94 vs. limit=15.0 +2024-08-25 06:09:06,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=25973.333333333332, ans=0.005223188405797102 +2024-08-25 06:09:24,570 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.48 vs. limit=12.0 +2024-08-25 06:09:28,455 INFO [train.py:1114] (0/4) Epoch 2, batch 2400, loss[loss=0.3336, simple_loss=0.3599, pruned_loss=0.1116, ctc_loss=0.2105, over 19337.00 frames. ], tot_loss[loss=0.3197, simple_loss=0.3466, pruned_loss=0.1066, ctc_loss=0.1992, over 3858931.03 frames. ], batch size: 67, lr: 4.04e-02, grad_scale: 32.0 +2024-08-25 06:09:43,432 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.503e+02 1.803e+02 2.129e+02 2.459e+02 5.388e+02, threshold=4.257e+02, percent-clipped=1.0 +2024-08-25 06:09:48,431 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.64 vs. limit=22.5 +2024-08-25 06:09:55,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=26240.0, ans=0.0 +2024-08-25 06:09:55,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=26240.0, ans=0.025 +2024-08-25 06:09:56,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=26240.0, ans=0.125 +2024-08-25 06:10:14,652 INFO [train.py:1114] (0/4) Epoch 2, batch 2450, loss[loss=0.4238, simple_loss=0.397, pruned_loss=0.1642, ctc_loss=0.3051, over 13904.00 frames. ], tot_loss[loss=0.3307, simple_loss=0.3528, pruned_loss=0.1124, ctc_loss=0.2096, over 3730699.37 frames. ], batch size: 140, lr: 4.03e-02, grad_scale: 32.0 +2024-08-25 06:10:27,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=26400.0, ans=0.125 +2024-08-25 06:10:42,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=26453.333333333332, ans=0.125 +2024-08-25 06:10:44,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=26453.333333333332, ans=0.125 +2024-08-25 06:10:52,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=26506.666666666668, ans=0.005107246376811594 +2024-08-25 06:10:57,360 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-2.pt +2024-08-25 06:11:52,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=26554.666666666668, ans=0.0 +2024-08-25 06:11:53,141 INFO [train.py:1114] (0/4) Epoch 3, batch 0, loss[loss=0.3217, simple_loss=0.3386, pruned_loss=0.1107, ctc_loss=0.2086, over 19791.00 frames. ], tot_loss[loss=0.3217, simple_loss=0.3386, pruned_loss=0.1107, ctc_loss=0.2086, over 19791.00 frames. ], batch size: 49, lr: 3.83e-02, grad_scale: 32.0 +2024-08-25 06:11:55,992 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-25 06:12:07,824 INFO [train.py:1146] (0/4) Epoch 3, validation: loss=0.2565, simple_loss=0.3309, pruned_loss=0.06653, ctc_loss=0.1228, over 944034.00 frames. +2024-08-25 06:12:07,825 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13390MB +2024-08-25 06:13:57,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=26608.0, ans=0.2 +2024-08-25 06:15:15,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=26661.333333333332, ans=0.0 +2024-08-25 06:15:44,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=26661.333333333332, ans=0.005073623188405798 +2024-08-25 06:16:00,086 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.589e+02 1.983e+02 2.286e+02 2.644e+02 3.774e+02, threshold=4.572e+02, percent-clipped=0.0 +2024-08-25 06:17:19,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=26714.666666666668, ans=0.025 +2024-08-25 06:17:56,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=26714.666666666668, ans=0.2 +2024-08-25 06:17:57,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.69 vs. limit=22.5 +2024-08-25 06:20:02,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=26768.0, ans=0.125 +2024-08-25 06:22:59,724 INFO [train.py:1114] (0/4) Epoch 3, batch 50, loss[loss=0.2623, simple_loss=0.3014, pruned_loss=0.08004, ctc_loss=0.1579, over 19698.00 frames. ], tot_loss[loss=0.3225, simple_loss=0.3498, pruned_loss=0.1073, ctc_loss=0.2016, over 845269.70 frames. ], batch size: 47, lr: 3.82e-02, grad_scale: 16.0 +2024-08-25 06:29:44,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=26874.666666666668, ans=0.125 +2024-08-25 06:30:17,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=26874.666666666668, ans=0.1 +2024-08-25 06:34:48,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=26874.666666666668, ans=0.025 +2024-08-25 06:36:26,387 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-08-25 06:40:59,024 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:44:24,660 INFO [train.py:1114] (0/4) Epoch 3, batch 100, loss[loss=0.2831, simple_loss=0.3227, pruned_loss=0.08768, ctc_loss=0.1703, over 19727.00 frames. ], tot_loss[loss=0.3218, simple_loss=0.3489, pruned_loss=0.107, ctc_loss=0.2015, over 1500423.89 frames. ], batch size: 51, lr: 3.82e-02, grad_scale: 16.0 +2024-08-25 06:45:53,312 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.99 vs. limit=15.0 +2024-08-25 06:46:28,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=27088.0, ans=10.0 +2024-08-25 06:47:46,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=27194.666666666668, ans=0.1 +2024-08-25 06:48:15,512 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.410e+02 1.744e+02 2.032e+02 2.291e+02 1.205e+03, threshold=4.063e+02, percent-clipped=1.0 +2024-08-25 06:50:11,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=27301.333333333332, ans=0.0 +2024-08-25 06:50:28,567 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.02 vs. limit=15.0 +2024-08-25 06:50:43,563 INFO [train.py:1114] (0/4) Epoch 3, batch 150, loss[loss=0.2808, simple_loss=0.3111, pruned_loss=0.09165, ctc_loss=0.1679, over 19745.00 frames. ], tot_loss[loss=0.3163, simple_loss=0.3447, pruned_loss=0.1046, ctc_loss=0.1965, over 2028606.56 frames. ], batch size: 47, lr: 3.81e-02, grad_scale: 16.0 +2024-08-25 06:51:33,899 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.13 vs. limit=12.0 +2024-08-25 06:53:56,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=27514.666666666668, ans=0.0 +2024-08-25 06:54:10,654 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.00 vs. limit=15.0 +2024-08-25 06:54:16,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=27568.0, ans=0.125 +2024-08-25 06:54:31,938 INFO [train.py:1114] (0/4) Epoch 3, batch 200, loss[loss=0.3341, simple_loss=0.3491, pruned_loss=0.1157, ctc_loss=0.2195, over 18269.00 frames. ], tot_loss[loss=0.3129, simple_loss=0.3419, pruned_loss=0.1031, ctc_loss=0.1938, over 2435586.34 frames. ], batch size: 85, lr: 3.80e-02, grad_scale: 16.0 +2024-08-25 06:56:00,408 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.731e+02 1.977e+02 2.205e+02 3.305e+02, threshold=3.953e+02, percent-clipped=0.0 +2024-08-25 06:56:10,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=27781.333333333332, ans=0.025 +2024-08-25 06:56:34,805 INFO [train.py:1114] (0/4) Epoch 3, batch 250, loss[loss=0.3449, simple_loss=0.3686, pruned_loss=0.1177, ctc_loss=0.2146, over 19362.00 frames. ], tot_loss[loss=0.3122, simple_loss=0.3417, pruned_loss=0.1028, ctc_loss=0.1929, over 2754938.96 frames. ], batch size: 67, lr: 3.80e-02, grad_scale: 16.0 +2024-08-25 06:57:32,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27994.666666666668, ans=0.1 +2024-08-25 06:57:53,083 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.18 vs. limit=15.0 +2024-08-25 07:02:46,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=28048.0, ans=0.025 +2024-08-25 07:02:59,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=28101.333333333332, ans=0.004760579710144928 +2024-08-25 07:03:09,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=28101.333333333332, ans=0.004760579710144928 +2024-08-25 07:03:09,861 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.74 vs. limit=15.0 +2024-08-25 07:03:24,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=28101.333333333332, ans=0.2 +2024-08-25 07:03:29,189 INFO [train.py:1114] (0/4) Epoch 3, batch 300, loss[loss=0.3133, simple_loss=0.3538, pruned_loss=0.09952, ctc_loss=0.1844, over 19553.00 frames. ], tot_loss[loss=0.3102, simple_loss=0.3403, pruned_loss=0.1018, ctc_loss=0.1912, over 2999555.88 frames. ], batch size: 61, lr: 3.79e-02, grad_scale: 16.0 +2024-08-25 07:03:53,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=28154.666666666668, ans=0.125 +2024-08-25 07:04:14,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=28208.0, ans=0.0 +2024-08-25 07:04:33,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=28261.333333333332, ans=0.125 +2024-08-25 07:04:34,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=28261.333333333332, ans=0.125 +2024-08-25 07:04:44,389 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.724e+02 1.968e+02 2.265e+02 3.417e+02, threshold=3.936e+02, percent-clipped=0.0 +2024-08-25 07:05:04,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=28314.666666666668, ans=0.04949747468305833 +2024-08-25 07:05:35,060 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.25 vs. limit=10.0 +2024-08-25 07:05:49,888 INFO [train.py:1114] (0/4) Epoch 3, batch 350, loss[loss=0.2795, simple_loss=0.3131, pruned_loss=0.0893, ctc_loss=0.1684, over 19749.00 frames. ], tot_loss[loss=0.3109, simple_loss=0.341, pruned_loss=0.1021, ctc_loss=0.1917, over 3189804.67 frames. ], batch size: 48, lr: 3.79e-02, grad_scale: 16.0 +2024-08-25 07:05:57,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=28421.333333333332, ans=0.1 +2024-08-25 07:06:09,921 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.98 vs. limit=15.0 +2024-08-25 07:06:19,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=28421.333333333332, ans=0.025 +2024-08-25 07:07:40,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=28581.333333333332, ans=0.125 +2024-08-25 07:07:45,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=28581.333333333332, ans=0.004656231884057971 +2024-08-25 07:07:47,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=28634.666666666668, ans=0.125 +2024-08-25 07:08:01,167 INFO [train.py:1114] (0/4) Epoch 3, batch 400, loss[loss=0.3132, simple_loss=0.3455, pruned_loss=0.1033, ctc_loss=0.1858, over 19501.00 frames. ], tot_loss[loss=0.31, simple_loss=0.3405, pruned_loss=0.1017, ctc_loss=0.1907, over 3342807.24 frames. ], batch size: 54, lr: 3.78e-02, grad_scale: 32.0 +2024-08-25 07:08:24,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=28794.666666666668, ans=0.0 +2024-08-25 07:08:42,256 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 1.762e+02 1.982e+02 2.336e+02 5.420e+02, threshold=3.963e+02, percent-clipped=2.0 +2024-08-25 07:08:46,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28848.0, ans=0.1 +2024-08-25 07:09:04,146 INFO [train.py:1114] (0/4) Epoch 3, batch 450, loss[loss=0.2989, simple_loss=0.3439, pruned_loss=0.09183, ctc_loss=0.1757, over 19610.00 frames. ], tot_loss[loss=0.3097, simple_loss=0.3402, pruned_loss=0.1015, ctc_loss=0.1903, over 3452268.07 frames. ], batch size: 55, lr: 3.77e-02, grad_scale: 32.0 +2024-08-25 07:09:13,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=28954.666666666668, ans=0.0 +2024-08-25 07:09:37,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=29114.666666666668, ans=0.125 +2024-08-25 07:09:39,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=29114.666666666668, ans=0.025 +2024-08-25 07:09:56,833 INFO [train.py:1114] (0/4) Epoch 3, batch 500, loss[loss=0.3087, simple_loss=0.3454, pruned_loss=0.09894, ctc_loss=0.1852, over 19636.00 frames. ], tot_loss[loss=0.3081, simple_loss=0.339, pruned_loss=0.1008, ctc_loss=0.189, over 3547304.73 frames. ], batch size: 63, lr: 3.77e-02, grad_scale: 32.0 +2024-08-25 07:10:20,462 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.72 vs. limit=15.0 +2024-08-25 07:10:28,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29274.666666666668, ans=0.1 +2024-08-25 07:10:34,908 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.75 vs. limit=15.0 +2024-08-25 07:10:43,381 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 1.753e+02 1.966e+02 2.327e+02 4.047e+02, threshold=3.932e+02, percent-clipped=2.0 +2024-08-25 07:11:10,798 INFO [train.py:1114] (0/4) Epoch 3, batch 550, loss[loss=0.3242, simple_loss=0.3526, pruned_loss=0.1073, ctc_loss=0.2032, over 19227.00 frames. ], tot_loss[loss=0.3085, simple_loss=0.3393, pruned_loss=0.101, ctc_loss=0.1892, over 3608971.33 frames. ], batch size: 71, lr: 3.76e-02, grad_scale: 16.0 +2024-08-25 07:11:51,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=29541.333333333332, ans=0.125 +2024-08-25 07:11:55,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=29541.333333333332, ans=0.125 +2024-08-25 07:12:37,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=29648.0, ans=0.0 +2024-08-25 07:12:45,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=29701.333333333332, ans=0.2 +2024-08-25 07:12:46,934 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.34 vs. limit=15.0 +2024-08-25 07:12:53,133 INFO [train.py:1114] (0/4) Epoch 3, batch 600, loss[loss=0.314, simple_loss=0.3532, pruned_loss=0.1006, ctc_loss=0.1844, over 19344.00 frames. ], tot_loss[loss=0.3078, simple_loss=0.339, pruned_loss=0.1006, ctc_loss=0.1885, over 3665528.88 frames. ], batch size: 67, lr: 3.76e-02, grad_scale: 16.0 +2024-08-25 07:12:59,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29754.666666666668, ans=0.1 +2024-08-25 07:13:37,728 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.450e+02 1.812e+02 2.009e+02 2.360e+02 5.731e+02, threshold=4.017e+02, percent-clipped=3.0 +2024-08-25 07:13:46,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=29968.0, ans=0.0 +2024-08-25 07:13:54,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=29968.0, ans=0.2 +2024-08-25 07:13:57,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=29968.0, ans=0.025 +2024-08-25 07:14:00,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=29968.0, ans=0.004354782608695653 +2024-08-25 07:14:02,714 INFO [train.py:1114] (0/4) Epoch 3, batch 650, loss[loss=0.2891, simple_loss=0.3351, pruned_loss=0.08916, ctc_loss=0.1619, over 19787.00 frames. ], tot_loss[loss=0.3062, simple_loss=0.3378, pruned_loss=0.09991, ctc_loss=0.1872, over 3715665.21 frames. ], batch size: 54, lr: 3.75e-02, grad_scale: 16.0 +2024-08-25 07:14:13,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=30074.666666666668, ans=0.07 +2024-08-25 07:14:14,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=30074.666666666668, ans=0.125 +2024-08-25 07:14:20,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=30074.666666666668, ans=0.025 +2024-08-25 07:14:21,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=30074.666666666668, ans=0.125 +2024-08-25 07:14:34,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=30181.333333333332, ans=0.0043084057971014495 +2024-08-25 07:14:45,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=30234.666666666668, ans=10.0 +2024-08-25 07:14:45,919 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.16 vs. limit=15.0 +2024-08-25 07:14:55,117 INFO [train.py:1114] (0/4) Epoch 3, batch 700, loss[loss=0.2874, simple_loss=0.3224, pruned_loss=0.09123, ctc_loss=0.1747, over 19730.00 frames. ], tot_loss[loss=0.3057, simple_loss=0.3375, pruned_loss=0.09964, ctc_loss=0.1864, over 3746817.77 frames. ], batch size: 51, lr: 3.74e-02, grad_scale: 16.0 +2024-08-25 07:15:03,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=30288.0, ans=0.004285217391304348 +2024-08-25 07:15:07,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=30341.333333333332, ans=0.0 +2024-08-25 07:15:28,451 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.846e+02 1.998e+02 2.505e+02 9.071e+02, threshold=3.995e+02, percent-clipped=5.0 +2024-08-25 07:15:29,036 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.98 vs. limit=15.0 +2024-08-25 07:15:35,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=30448.0, ans=0.125 +2024-08-25 07:15:38,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=30448.0, ans=0.2 +2024-08-25 07:15:43,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=30501.333333333332, ans=0.125 +2024-08-25 07:15:48,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=30501.333333333332, ans=0.125 +2024-08-25 07:15:56,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=30554.666666666668, ans=0.125 +2024-08-25 07:15:58,569 INFO [train.py:1114] (0/4) Epoch 3, batch 750, loss[loss=0.2947, simple_loss=0.3356, pruned_loss=0.09184, ctc_loss=0.1754, over 19487.00 frames. ], tot_loss[loss=0.3047, simple_loss=0.3368, pruned_loss=0.09926, ctc_loss=0.1854, over 3772901.44 frames. ], batch size: 54, lr: 3.74e-02, grad_scale: 16.0 +2024-08-25 07:16:07,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=30554.666666666668, ans=0.1 +2024-08-25 07:16:26,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=30608.0, ans=0.1 +2024-08-25 07:16:30,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30661.333333333332, ans=0.1 +2024-08-25 07:16:38,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=30661.333333333332, ans=0.2 +2024-08-25 07:17:00,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=30714.666666666668, ans=0.025 +2024-08-25 07:24:36,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=30714.666666666668, ans=0.0 +2024-08-25 07:34:01,396 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.07 vs. limit=15.0 +2024-08-25 07:34:42,523 INFO [train.py:1114] (0/4) Epoch 3, batch 800, loss[loss=0.2705, simple_loss=0.309, pruned_loss=0.085, ctc_loss=0.1551, over 19821.00 frames. ], tot_loss[loss=0.3041, simple_loss=0.3363, pruned_loss=0.09898, ctc_loss=0.1849, over 3794536.37 frames. ], batch size: 49, lr: 3.73e-02, grad_scale: 32.0 +2024-08-25 07:40:57,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30821.333333333332, ans=0.1 +2024-08-25 08:00:17,471 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.88 vs. limit=22.5 +2024-08-25 08:02:40,818 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.303e+02 1.761e+02 1.928e+02 2.233e+02 3.899e+02, threshold=3.856e+02, percent-clipped=0.0 +2024-08-25 08:12:12,170 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.21 vs. limit=10.0 +2024-08-25 08:12:59,502 INFO [train.py:1114] (0/4) Epoch 3, batch 850, loss[loss=0.3265, simple_loss=0.3565, pruned_loss=0.1074, ctc_loss=0.2041, over 19628.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.3367, pruned_loss=0.09917, ctc_loss=0.1852, over 3815463.35 frames. ], batch size: 59, lr: 3.73e-02, grad_scale: 32.0 +2024-08-25 08:14:59,482 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.93 vs. limit=15.0 +2024-08-25 08:23:27,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=31141.333333333332, ans=0.2 +2024-08-25 08:25:54,625 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.59 vs. limit=15.0 +2024-08-25 08:43:00,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31301.333333333332, ans=0.1 +2024-08-25 08:44:41,568 INFO [train.py:1114] (0/4) Epoch 3, batch 900, loss[loss=0.2716, simple_loss=0.3047, pruned_loss=0.08591, ctc_loss=0.1668, over 19827.00 frames. ], tot_loss[loss=0.3055, simple_loss=0.3374, pruned_loss=0.09958, ctc_loss=0.1859, over 3819825.72 frames. ], batch size: 49, lr: 3.72e-02, grad_scale: 32.0 +2024-08-25 08:50:42,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=31408.0, ans=0.95 +2024-08-25 08:52:09,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=31461.333333333332, ans=0.125 +2024-08-25 08:57:05,263 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.64 vs. limit=10.0 +2024-08-25 08:57:54,732 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.390e+02 1.748e+02 1.945e+02 2.250e+02 3.446e+02, threshold=3.889e+02, percent-clipped=0.0 +2024-08-25 08:59:29,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=31514.666666666668, ans=0.125 +2024-08-25 09:02:10,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=31568.0, ans=0.0 +2024-08-25 09:04:40,801 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.61 vs. limit=15.0 +2024-08-25 09:05:01,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=31568.0, ans=0.0 +2024-08-25 09:05:03,363 INFO [train.py:1114] (0/4) Epoch 3, batch 950, loss[loss=0.2728, simple_loss=0.3092, pruned_loss=0.0858, ctc_loss=0.1619, over 19499.00 frames. ], tot_loss[loss=0.3065, simple_loss=0.338, pruned_loss=0.1001, ctc_loss=0.187, over 3821606.58 frames. ], batch size: 49, lr: 3.71e-02, grad_scale: 32.0 +2024-08-25 09:07:07,750 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.65 vs. limit=15.0 +2024-08-25 09:16:00,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=31728.0, ans=0.125 +2024-08-25 09:22:20,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31888.0, ans=0.1 +2024-08-25 09:23:03,904 INFO [train.py:1114] (0/4) Epoch 3, batch 1000, loss[loss=0.2564, simple_loss=0.3079, pruned_loss=0.07392, ctc_loss=0.1427, over 19845.00 frames. ], tot_loss[loss=0.3073, simple_loss=0.3388, pruned_loss=0.1004, ctc_loss=0.1874, over 3817304.87 frames. ], batch size: 52, lr: 3.71e-02, grad_scale: 16.0 +2024-08-25 09:27:50,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=31994.666666666668, ans=0.125 +2024-08-25 09:28:02,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=31994.666666666668, ans=0.0 +2024-08-25 09:29:07,850 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.398e+02 1.873e+02 2.237e+02 2.628e+02 7.664e+02, threshold=4.475e+02, percent-clipped=6.0 +2024-08-25 09:29:19,256 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.51 vs. limit=15.0 +2024-08-25 09:29:57,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=32048.0, ans=0.003902608695652174 +2024-08-25 09:32:27,621 INFO [train.py:1114] (0/4) Epoch 3, batch 1050, loss[loss=0.323, simple_loss=0.3641, pruned_loss=0.1035, ctc_loss=0.1874, over 19840.00 frames. ], tot_loss[loss=0.3063, simple_loss=0.3379, pruned_loss=0.09998, ctc_loss=0.1867, over 3823854.97 frames. ], batch size: 57, lr: 3.70e-02, grad_scale: 16.0 +2024-08-25 09:32:33,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=32154.666666666668, ans=0.125 +2024-08-25 09:33:25,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=32208.0, ans=0.0 +2024-08-25 09:34:01,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=32208.0, ans=0.0 +2024-08-25 09:34:20,317 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.32 vs. limit=22.5 +2024-08-25 09:35:45,576 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.43 vs. limit=15.0 +2024-08-25 09:36:22,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=32314.666666666668, ans=0.0 +2024-08-25 09:36:30,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=32314.666666666668, ans=0.1 +2024-08-25 09:39:11,497 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=14.66 vs. limit=15.0 +2024-08-25 09:39:16,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=32368.0, ans=0.125 +2024-08-25 09:41:10,314 INFO [train.py:1114] (0/4) Epoch 3, batch 1100, loss[loss=0.2928, simple_loss=0.3321, pruned_loss=0.0918, ctc_loss=0.1749, over 19578.00 frames. ], tot_loss[loss=0.3051, simple_loss=0.3373, pruned_loss=0.09936, ctc_loss=0.1854, over 3830357.79 frames. ], batch size: 52, lr: 3.70e-02, grad_scale: 16.0 +2024-08-25 09:42:26,287 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.90 vs. limit=15.0 +2024-08-25 09:42:45,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=32528.0, ans=0.2 +2024-08-25 09:43:12,039 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.45 vs. limit=10.0 +2024-08-25 09:43:23,046 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.681e+02 1.943e+02 2.357e+02 4.515e+02, threshold=3.887e+02, percent-clipped=1.0 +2024-08-25 09:45:15,747 INFO [train.py:1114] (0/4) Epoch 3, batch 1150, loss[loss=0.2711, simple_loss=0.311, pruned_loss=0.08428, ctc_loss=0.1569, over 19566.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.3366, pruned_loss=0.09924, ctc_loss=0.1851, over 3828597.53 frames. ], batch size: 52, lr: 3.69e-02, grad_scale: 16.0 +2024-08-25 09:54:01,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=32794.666666666664, ans=0.125 +2024-08-25 09:55:14,576 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.40 vs. limit=15.0 +2024-08-25 09:55:29,637 INFO [train.py:1114] (0/4) Epoch 3, batch 1200, loss[loss=0.3001, simple_loss=0.3471, pruned_loss=0.09167, ctc_loss=0.1742, over 19832.00 frames. ], tot_loss[loss=0.3058, simple_loss=0.3378, pruned_loss=0.09971, ctc_loss=0.186, over 3824323.82 frames. ], batch size: 57, lr: 3.68e-02, grad_scale: 32.0 +2024-08-25 09:56:10,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.93 vs. limit=12.0 +2024-08-25 09:56:16,777 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.92 vs. limit=12.0 +2024-08-25 09:56:30,794 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.95 vs. limit=10.0 +2024-08-25 09:56:31,119 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.454e+02 1.705e+02 1.941e+02 2.201e+02 4.168e+02, threshold=3.882e+02, percent-clipped=1.0 +2024-08-25 09:56:48,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=33114.666666666664, ans=0.125 +2024-08-25 09:57:41,496 INFO [train.py:1114] (0/4) Epoch 3, batch 1250, loss[loss=0.3241, simple_loss=0.3536, pruned_loss=0.1068, ctc_loss=0.2024, over 19515.00 frames. ], tot_loss[loss=0.3049, simple_loss=0.3376, pruned_loss=0.09911, ctc_loss=0.1848, over 3843084.15 frames. ], batch size: 61, lr: 3.68e-02, grad_scale: 32.0 +2024-08-25 09:58:01,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=33274.666666666664, ans=0.125 +2024-08-25 09:58:33,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=33328.0, ans=0.1 +2024-08-25 09:59:04,137 INFO [train.py:1114] (0/4) Epoch 3, batch 1300, loss[loss=0.3129, simple_loss=0.343, pruned_loss=0.1021, ctc_loss=0.1968, over 18759.00 frames. ], tot_loss[loss=0.3025, simple_loss=0.3359, pruned_loss=0.09801, ctc_loss=0.1828, over 3847195.98 frames. ], batch size: 76, lr: 3.67e-02, grad_scale: 32.0 +2024-08-25 09:59:28,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=33541.333333333336, ans=10.0 +2024-08-25 09:59:48,218 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.674e+02 1.887e+02 2.172e+02 3.368e+02, threshold=3.774e+02, percent-clipped=0.0 +2024-08-25 09:59:57,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=33648.0, ans=0.1 +2024-08-25 09:59:59,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=33648.0, ans=0.125 +2024-08-25 10:00:05,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=33701.333333333336, ans=0.1 +2024-08-25 10:00:09,802 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.44 vs. limit=22.5 +2024-08-25 10:00:22,481 INFO [train.py:1114] (0/4) Epoch 3, batch 1350, loss[loss=0.2958, simple_loss=0.3326, pruned_loss=0.09338, ctc_loss=0.1807, over 19761.00 frames. ], tot_loss[loss=0.3016, simple_loss=0.3354, pruned_loss=0.09743, ctc_loss=0.1823, over 3857821.88 frames. ], batch size: 54, lr: 3.67e-02, grad_scale: 32.0 +2024-08-25 10:00:22,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33754.666666666664, ans=0.1 +2024-08-25 10:00:33,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=33754.666666666664, ans=0.0 +2024-08-25 10:00:50,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=33808.0, ans=0.0 +2024-08-25 10:00:52,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=33808.0, ans=0.125 +2024-08-25 10:00:52,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=33808.0, ans=0.125 +2024-08-25 10:00:52,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=33808.0, ans=0.0 +2024-08-25 10:00:58,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=33808.0, ans=0.0 +2024-08-25 10:01:13,398 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.73 vs. limit=22.5 +2024-08-25 10:01:20,703 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.17 vs. limit=12.0 +2024-08-25 10:01:33,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=33914.666666666664, ans=0.0 +2024-08-25 10:01:59,291 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.65 vs. limit=15.0 +2024-08-25 10:02:01,554 INFO [train.py:1114] (0/4) Epoch 3, batch 1400, loss[loss=0.2731, simple_loss=0.3031, pruned_loss=0.08862, ctc_loss=0.1644, over 19694.00 frames. ], tot_loss[loss=0.3007, simple_loss=0.3349, pruned_loss=0.09697, ctc_loss=0.1814, over 3864952.25 frames. ], batch size: 46, lr: 3.66e-02, grad_scale: 32.0 +2024-08-25 10:02:21,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=34074.666666666664, ans=0.2 +2024-08-25 10:02:32,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=34128.0, ans=0.125 +2024-08-25 10:02:33,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=34128.0, ans=0.0034504347826086953 +2024-08-25 10:02:45,296 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.503e+02 1.896e+02 2.159e+02 2.528e+02 3.857e+02, threshold=4.318e+02, percent-clipped=1.0 +2024-08-25 10:03:04,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=34234.666666666664, ans=0.125 +2024-08-25 10:03:12,603 INFO [train.py:1114] (0/4) Epoch 3, batch 1450, loss[loss=0.3314, simple_loss=0.3642, pruned_loss=0.1087, ctc_loss=0.2032, over 19649.00 frames. ], tot_loss[loss=0.3015, simple_loss=0.3355, pruned_loss=0.09736, ctc_loss=0.182, over 3863352.84 frames. ], batch size: 63, lr: 3.65e-02, grad_scale: 32.0 +2024-08-25 10:03:33,253 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-08-25 10:03:33,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=34341.333333333336, ans=0.0 +2024-08-25 10:04:06,650 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.16 vs. limit=12.0 +2024-08-25 10:04:13,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=34501.333333333336, ans=0.125 +2024-08-25 10:04:21,619 INFO [train.py:1114] (0/4) Epoch 3, batch 1500, loss[loss=0.3255, simple_loss=0.3626, pruned_loss=0.1044, ctc_loss=0.199, over 19595.00 frames. ], tot_loss[loss=0.302, simple_loss=0.336, pruned_loss=0.09755, ctc_loss=0.1824, over 3863083.24 frames. ], batch size: 57, lr: 3.65e-02, grad_scale: 32.0 +2024-08-25 10:04:29,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=34554.666666666664, ans=0.2 +2024-08-25 10:04:31,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=34554.666666666664, ans=0.125 +2024-08-25 10:04:44,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=34661.333333333336, ans=0.125 +2024-08-25 10:04:51,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=34661.333333333336, ans=0.0 +2024-08-25 10:04:56,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=34714.666666666664, ans=0.125 +2024-08-25 10:05:09,914 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 1.778e+02 1.971e+02 2.353e+02 5.678e+02, threshold=3.941e+02, percent-clipped=1.0 +2024-08-25 10:05:10,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34714.666666666664, ans=0.1 +2024-08-25 10:05:11,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=34714.666666666664, ans=0.2 +2024-08-25 10:05:25,643 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.07 vs. limit=22.5 +2024-08-25 10:05:26,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=34768.0, ans=0.1 +2024-08-25 10:05:29,603 INFO [train.py:1114] (0/4) Epoch 3, batch 1550, loss[loss=0.3537, simple_loss=0.3763, pruned_loss=0.1231, ctc_loss=0.2122, over 19626.00 frames. ], tot_loss[loss=0.303, simple_loss=0.3363, pruned_loss=0.09822, ctc_loss=0.1833, over 3846218.17 frames. ], batch size: 60, lr: 3.64e-02, grad_scale: 32.0 +2024-08-25 10:05:36,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=34821.333333333336, ans=0.0 +2024-08-25 10:05:36,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=34821.333333333336, ans=0.2 +2024-08-25 10:05:44,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=34874.666666666664, ans=15.0 +2024-08-25 10:05:58,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=34928.0, ans=0.003276521739130434 +2024-08-25 10:06:07,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=34981.333333333336, ans=0.125 +2024-08-25 10:06:09,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=34981.333333333336, ans=0.125 +2024-08-25 10:06:42,354 INFO [train.py:1114] (0/4) Epoch 3, batch 1600, loss[loss=0.2926, simple_loss=0.3349, pruned_loss=0.09226, ctc_loss=0.1647, over 19836.00 frames. ], tot_loss[loss=0.3022, simple_loss=0.3356, pruned_loss=0.09786, ctc_loss=0.1828, over 3835008.45 frames. ], batch size: 57, lr: 3.64e-02, grad_scale: 32.0 +2024-08-25 10:07:01,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=35088.0, ans=0.003241739130434783 +2024-08-25 10:07:28,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=35194.666666666664, ans=0.125 +2024-08-25 10:07:47,938 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.483e+02 1.752e+02 2.032e+02 2.338e+02 4.104e+02, threshold=4.064e+02, percent-clipped=1.0 +2024-08-25 10:07:52,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35248.0, ans=0.1 +2024-08-25 10:08:06,823 INFO [train.py:1114] (0/4) Epoch 3, batch 1650, loss[loss=0.353, simple_loss=0.3775, pruned_loss=0.1186, ctc_loss=0.2279, over 19661.00 frames. ], tot_loss[loss=0.3023, simple_loss=0.3356, pruned_loss=0.09794, ctc_loss=0.1828, over 3831787.34 frames. ], batch size: 59, lr: 3.63e-02, grad_scale: 32.0 +2024-08-25 10:08:06,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=35354.666666666664, ans=0.0031837681159420303 +2024-08-25 10:08:11,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=35354.666666666664, ans=0.125 +2024-08-25 10:08:21,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35408.0, ans=0.1 +2024-08-25 10:08:21,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=35408.0, ans=0.1 +2024-08-25 10:08:41,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=35461.333333333336, ans=0.2 +2024-08-25 10:08:48,059 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-08-25 10:08:52,022 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.39 vs. limit=15.0 +2024-08-25 10:08:53,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=35514.666666666664, ans=0.015 +2024-08-25 10:08:59,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=35568.0, ans=0.0031373913043478262 +2024-08-25 10:09:04,856 INFO [train.py:1114] (0/4) Epoch 3, batch 1700, loss[loss=0.2327, simple_loss=0.2763, pruned_loss=0.06846, ctc_loss=0.1302, over 19671.00 frames. ], tot_loss[loss=0.3008, simple_loss=0.3348, pruned_loss=0.09713, ctc_loss=0.1814, over 3846193.75 frames. ], batch size: 46, lr: 3.62e-02, grad_scale: 32.0 +2024-08-25 10:09:11,485 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:09:22,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=35728.0, ans=0.125 +2024-08-25 10:09:52,811 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.458e+02 1.835e+02 2.022e+02 2.484e+02 3.793e+02, threshold=4.043e+02, percent-clipped=0.0 +2024-08-25 10:10:09,480 INFO [train.py:1114] (0/4) Epoch 3, batch 1750, loss[loss=0.2521, simple_loss=0.2845, pruned_loss=0.08083, ctc_loss=0.1451, over 19628.00 frames. ], tot_loss[loss=0.3, simple_loss=0.3341, pruned_loss=0.09683, ctc_loss=0.1807, over 3851012.17 frames. ], batch size: 45, lr: 3.62e-02, grad_scale: 32.0 +2024-08-25 10:10:16,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35888.0, ans=0.1 +2024-08-25 10:10:28,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35941.333333333336, ans=0.1 +2024-08-25 10:10:39,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35994.666666666664, ans=0.1 +2024-08-25 10:10:42,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=35994.666666666664, ans=0.025 +2024-08-25 10:10:48,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35994.666666666664, ans=0.1 +2024-08-25 10:11:09,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=36101.333333333336, ans=0.003021449275362318 +2024-08-25 10:11:20,666 INFO [train.py:1114] (0/4) Epoch 3, batch 1800, loss[loss=0.2989, simple_loss=0.343, pruned_loss=0.09149, ctc_loss=0.1793, over 19624.00 frames. ], tot_loss[loss=0.2998, simple_loss=0.3342, pruned_loss=0.09663, ctc_loss=0.1804, over 3852942.62 frames. ], batch size: 55, lr: 3.61e-02, grad_scale: 32.0 +2024-08-25 10:11:36,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=36208.0, ans=0.0 +2024-08-25 10:11:45,472 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.79 vs. limit=15.0 +2024-08-25 10:11:45,506 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.01 vs. limit=15.0 +2024-08-25 10:11:52,938 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.758e+02 2.042e+02 2.396e+02 4.902e+02, threshold=4.083e+02, percent-clipped=1.0 +2024-08-25 10:11:57,842 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.10 vs. limit=15.0 +2024-08-25 10:12:29,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=36421.333333333336, ans=0.1 +2024-08-25 10:12:33,952 INFO [train.py:1114] (0/4) Epoch 3, batch 1850, loss[loss=0.2964, simple_loss=0.3395, pruned_loss=0.09167, ctc_loss=0.175, over 19590.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.3326, pruned_loss=0.09532, ctc_loss=0.1784, over 3857160.26 frames. ], batch size: 57, lr: 3.61e-02, grad_scale: 32.0 +2024-08-25 10:12:53,515 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.78 vs. limit=15.0 +2024-08-25 10:12:57,898 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.43 vs. limit=10.0 +2024-08-25 10:13:18,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=36581.333333333336, ans=0.125 +2024-08-25 10:13:23,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=36634.666666666664, ans=15.0 +2024-08-25 10:13:31,596 INFO [train.py:1114] (0/4) Epoch 3, batch 1900, loss[loss=0.3181, simple_loss=0.3545, pruned_loss=0.1023, ctc_loss=0.193, over 19648.00 frames. ], tot_loss[loss=0.2976, simple_loss=0.3331, pruned_loss=0.09537, ctc_loss=0.1782, over 3860676.35 frames. ], batch size: 59, lr: 3.60e-02, grad_scale: 32.0 +2024-08-25 10:14:26,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=36794.666666666664, ans=0.0028707246376811595 +2024-08-25 10:14:29,194 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.301e+02 1.725e+02 1.920e+02 2.285e+02 4.448e+02, threshold=3.841e+02, percent-clipped=1.0 +2024-08-25 10:14:34,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=36848.0, ans=0.125 +2024-08-25 10:14:35,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=36848.0, ans=0.125 +2024-08-25 10:14:47,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36901.333333333336, ans=0.1 +2024-08-25 10:14:52,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=36954.666666666664, ans=0.125 +2024-08-25 10:14:54,742 INFO [train.py:1114] (0/4) Epoch 3, batch 1950, loss[loss=0.2779, simple_loss=0.3186, pruned_loss=0.08505, ctc_loss=0.1676, over 19579.00 frames. ], tot_loss[loss=0.299, simple_loss=0.3348, pruned_loss=0.09583, ctc_loss=0.1792, over 3869319.91 frames. ], batch size: 52, lr: 3.59e-02, grad_scale: 32.0 +2024-08-25 10:15:04,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=36954.666666666664, ans=22.5 +2024-08-25 10:15:08,928 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.49 vs. limit=10.0 +2024-08-25 10:15:14,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=37008.0, ans=0.1 +2024-08-25 10:15:17,815 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:15:24,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=37061.333333333336, ans=0.04949747468305833 +2024-08-25 10:15:36,331 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.11 vs. limit=15.0 +2024-08-25 10:15:37,883 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.28 vs. limit=15.0 +2024-08-25 10:15:44,713 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.55 vs. limit=15.0 +2024-08-25 10:15:45,366 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.46 vs. limit=15.0 +2024-08-25 10:15:51,814 INFO [train.py:1114] (0/4) Epoch 3, batch 2000, loss[loss=0.2604, simple_loss=0.2968, pruned_loss=0.08065, ctc_loss=0.1569, over 19645.00 frames. ], tot_loss[loss=0.3, simple_loss=0.3354, pruned_loss=0.09635, ctc_loss=0.1799, over 3854838.15 frames. ], batch size: 45, lr: 3.59e-02, grad_scale: 32.0 +2024-08-25 10:16:10,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=37328.0, ans=0.0 +2024-08-25 10:16:19,093 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.521e+02 1.904e+02 2.146e+02 2.566e+02 5.347e+02, threshold=4.293e+02, percent-clipped=2.0 +2024-08-25 10:16:44,027 INFO [train.py:1114] (0/4) Epoch 3, batch 2050, loss[loss=0.2776, simple_loss=0.3094, pruned_loss=0.08873, ctc_loss=0.1708, over 19718.00 frames. ], tot_loss[loss=0.2994, simple_loss=0.3343, pruned_loss=0.09627, ctc_loss=0.1798, over 3852097.82 frames. ], batch size: 47, lr: 3.58e-02, grad_scale: 32.0 +2024-08-25 10:16:44,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=37488.0, ans=0.125 +2024-08-25 10:16:47,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=37488.0, ans=0.025 +2024-08-25 10:17:00,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=37488.0, ans=0.025 +2024-08-25 10:17:21,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=37594.666666666664, ans=0.0 +2024-08-25 10:17:24,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=37594.666666666664, ans=10.0 +2024-08-25 10:17:44,496 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.66 vs. limit=15.0 +2024-08-25 10:17:56,062 INFO [train.py:1114] (0/4) Epoch 3, batch 2100, loss[loss=0.2765, simple_loss=0.3209, pruned_loss=0.08313, ctc_loss=0.1646, over 19755.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.3337, pruned_loss=0.09576, ctc_loss=0.1789, over 3858773.03 frames. ], batch size: 54, lr: 3.58e-02, grad_scale: 16.0 +2024-08-25 10:18:06,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=37754.666666666664, ans=0.125 +2024-08-25 10:18:18,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=37754.666666666664, ans=0.2 +2024-08-25 10:18:39,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=37808.0, ans=0.04949747468305833 +2024-08-25 10:18:46,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=37808.0, ans=0.0 +2024-08-25 10:18:50,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=37808.0, ans=0.125 +2024-08-25 10:19:08,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=37861.333333333336, ans=0.125 +2024-08-25 10:19:20,785 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.776e+02 1.971e+02 2.246e+02 3.814e+02, threshold=3.941e+02, percent-clipped=0.0 +2024-08-25 10:20:09,497 INFO [train.py:1114] (0/4) Epoch 3, batch 2150, loss[loss=0.258, simple_loss=0.3053, pruned_loss=0.07688, ctc_loss=0.1424, over 19588.00 frames. ], tot_loss[loss=0.2966, simple_loss=0.3324, pruned_loss=0.09501, ctc_loss=0.1772, over 3869168.56 frames. ], batch size: 52, lr: 3.57e-02, grad_scale: 16.0 +2024-08-25 10:20:12,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=38021.333333333336, ans=0.125 +2024-08-25 10:20:14,378 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.55 vs. limit=22.5 +2024-08-25 10:20:50,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38128.0, ans=0.1 +2024-08-25 10:20:51,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=38181.333333333336, ans=0.125 +2024-08-25 10:21:10,828 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.56 vs. limit=15.0 +2024-08-25 10:21:11,134 INFO [train.py:1114] (0/4) Epoch 3, batch 2200, loss[loss=0.3221, simple_loss=0.349, pruned_loss=0.1067, ctc_loss=0.2045, over 19591.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.3324, pruned_loss=0.09513, ctc_loss=0.1775, over 3867395.74 frames. ], batch size: 57, lr: 3.56e-02, grad_scale: 16.0 +2024-08-25 10:21:18,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=38288.0, ans=0.0 +2024-08-25 10:21:29,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=38341.333333333336, ans=0.125 +2024-08-25 10:21:30,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=38341.333333333336, ans=0.125 +2024-08-25 10:21:32,206 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.89 vs. limit=6.0 +2024-08-25 10:21:51,396 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:21:56,469 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.750e+02 1.922e+02 2.212e+02 3.187e+02, threshold=3.844e+02, percent-clipped=0.0 +2024-08-25 10:22:09,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=38448.0, ans=0.125 +2024-08-25 10:22:21,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38501.333333333336, ans=0.1 +2024-08-25 10:22:28,987 INFO [train.py:1114] (0/4) Epoch 3, batch 2250, loss[loss=0.294, simple_loss=0.3363, pruned_loss=0.09124, ctc_loss=0.173, over 19631.00 frames. ], tot_loss[loss=0.2967, simple_loss=0.3324, pruned_loss=0.09505, ctc_loss=0.1771, over 3866860.08 frames. ], batch size: 55, lr: 3.56e-02, grad_scale: 16.0 +2024-08-25 10:22:30,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=38554.666666666664, ans=0.125 +2024-08-25 10:22:55,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=38661.333333333336, ans=0.1 +2024-08-25 10:23:01,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=38661.333333333336, ans=0.1 +2024-08-25 10:23:12,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=38714.666666666664, ans=0.125 +2024-08-25 10:23:40,907 INFO [train.py:1114] (0/4) Epoch 3, batch 2300, loss[loss=0.2506, simple_loss=0.295, pruned_loss=0.07343, ctc_loss=0.1485, over 19515.00 frames. ], tot_loss[loss=0.2964, simple_loss=0.3317, pruned_loss=0.09512, ctc_loss=0.1773, over 3860470.99 frames. ], batch size: 49, lr: 3.55e-02, grad_scale: 16.0 +2024-08-25 10:23:42,168 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.90 vs. limit=15.0 +2024-08-25 10:23:52,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=38874.666666666664, ans=0.125 +2024-08-25 10:23:53,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=38874.666666666664, ans=0.125 +2024-08-25 10:23:58,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38928.0, ans=0.1 +2024-08-25 10:24:00,798 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.05 vs. limit=22.5 +2024-08-25 10:24:10,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=38928.0, ans=0.0 +2024-08-25 10:24:12,494 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.13 vs. limit=10.0 +2024-08-25 10:24:13,778 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.419e+02 1.820e+02 2.030e+02 2.354e+02 3.970e+02, threshold=4.059e+02, percent-clipped=1.0 +2024-08-25 10:24:22,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=39034.666666666664, ans=15.0 +2024-08-25 10:24:48,808 INFO [train.py:1114] (0/4) Epoch 3, batch 2350, loss[loss=0.3057, simple_loss=0.3386, pruned_loss=0.09911, ctc_loss=0.1865, over 19684.00 frames. ], tot_loss[loss=0.2957, simple_loss=0.3312, pruned_loss=0.09471, ctc_loss=0.1767, over 3863948.62 frames. ], batch size: 63, lr: 3.55e-02, grad_scale: 16.0 +2024-08-25 10:25:00,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=39088.0, ans=0.09899494936611666 +2024-08-25 10:25:06,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=39141.333333333336, ans=0.0 +2024-08-25 10:25:10,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=39141.333333333336, ans=0.125 +2024-08-25 10:25:15,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=39194.666666666664, ans=0.0023489855072463773 +2024-08-25 10:25:19,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=39194.666666666664, ans=0.2 +2024-08-25 10:25:21,714 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.80 vs. limit=15.0 +2024-08-25 10:25:49,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=39301.333333333336, ans=0.0023257971014492744 +2024-08-25 10:25:52,378 INFO [train.py:1114] (0/4) Epoch 3, batch 2400, loss[loss=0.3205, simple_loss=0.3554, pruned_loss=0.1028, ctc_loss=0.1998, over 19526.00 frames. ], tot_loss[loss=0.2988, simple_loss=0.3338, pruned_loss=0.09602, ctc_loss=0.1791, over 3858826.04 frames. ], batch size: 67, lr: 3.54e-02, grad_scale: 32.0 +2024-08-25 10:26:03,966 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.81 vs. limit=10.0 +2024-08-25 10:26:04,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39408.0, ans=0.1 +2024-08-25 10:26:04,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=39408.0, ans=0.125 +2024-08-25 10:26:16,181 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:26:39,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=39514.666666666664, ans=0.125 +2024-08-25 10:26:40,402 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=9.87 vs. limit=15.0 +2024-08-25 10:26:41,528 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.413e+02 1.777e+02 2.047e+02 2.383e+02 4.291e+02, threshold=4.094e+02, percent-clipped=1.0 +2024-08-25 10:26:59,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=39514.666666666664, ans=0.125 +2024-08-25 10:27:14,152 INFO [train.py:1114] (0/4) Epoch 3, batch 2450, loss[loss=0.4108, simple_loss=0.3822, pruned_loss=0.1607, ctc_loss=0.295, over 13134.00 frames. ], tot_loss[loss=0.308, simple_loss=0.3391, pruned_loss=0.1008, ctc_loss=0.1881, over 3733231.49 frames. ], batch size: 141, lr: 3.53e-02, grad_scale: 16.0 +2024-08-25 10:27:15,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=39621.333333333336, ans=0.125 +2024-08-25 10:27:48,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=39781.333333333336, ans=0.09899494936611666 +2024-08-25 10:27:56,280 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-3.pt diff --git a/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-25-03-46-09-1 b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-25-03-46-09-1 new file mode 100644 index 0000000000000000000000000000000000000000..d4b549fd484eba256d8b2ea4adf45f356bcccc5e --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-25-03-46-09-1 @@ -0,0 +1,1163 @@ +2024-08-25 03:46:09,310 INFO [train.py:1182] (1/4) Training started +2024-08-25 03:46:09,311 INFO [train.py:1192] (1/4) Device: cuda:1 +2024-08-25 03:46:09,373 INFO [train.py:1210] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2649.int.cedar.computecanada.ca', 'IP address': '172.16.146.86'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-25 03:46:09,373 INFO [train.py:1212] (1/4) About to create model +2024-08-25 03:46:10,419 INFO [train.py:1216] (1/4) Number of model parameters: 65805511 +2024-08-25 03:46:10,563 INFO [train.py:1231] (1/4) Using DDP +2024-08-25 03:46:14,817 INFO [asr_datamodule.py:894] (1/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-25 03:46:14,897 INFO [asr_datamodule.py:696] (1/4) Disable MUSAN +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:714] (1/4) Enable SpecAugment +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:715] (1/4) Time warp factor: 80 +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:725] (1/4) Num frame mask: 10 +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:738] (1/4) About to create train dataset +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:765] (1/4) Using DynamicBucketingSampler. +2024-08-25 03:46:16,483 INFO [asr_datamodule.py:782] (1/4) About to create train dataloader +2024-08-25 03:46:16,488 INFO [asr_datamodule.py:911] (1/4) About to get dev-clean cuts +2024-08-25 03:46:16,584 INFO [asr_datamodule.py:918] (1/4) About to get dev-other cuts +2024-08-25 03:46:16,613 INFO [asr_datamodule.py:814] (1/4) About to create dev dataset +2024-08-25 03:46:16,931 INFO [asr_datamodule.py:831] (1/4) About to create dev dataloader +2024-08-25 03:46:16,931 INFO [train.py:1435] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-25 03:50:49,685 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=9.22 vs. limit=3.0 +2024-08-25 03:50:50,515 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12342MB +2024-08-25 03:50:51,639 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12342MB +2024-08-25 03:51:20,162 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12342MB +2024-08-25 03:51:20,928 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=384, metric=95.89 vs. limit=4.0 +2024-08-25 03:51:21,408 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12342MB +2024-08-25 03:51:43,054 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12342MB +2024-08-25 03:51:44,350 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12342MB +2024-08-25 03:53:10,643 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.67 vs. limit=7.5 +2024-08-25 03:53:11,521 INFO [train.py:1114] (1/4) Epoch 1, batch 0, loss[loss=8.723, simple_loss=7.061, pruned_loss=6.92, ctc_loss=4.845, over 19825.00 frames. ], tot_loss[loss=8.723, simple_loss=7.061, pruned_loss=6.92, ctc_loss=4.845, over 19825.00 frames. ], batch size: 49, lr: 2.25e-02, grad_scale: 1.0 +2024-08-25 03:53:11,522 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-25 03:53:26,568 INFO [train.py:1146] (1/4) Epoch 1, validation: loss=8.842, simple_loss=7.151, pruned_loss=6.961, ctc_loss=4.966, over 944034.00 frames. +2024-08-25 03:53:26,569 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12342MB +2024-08-25 03:53:27,118 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.79 vs. limit=7.5 +2024-08-25 03:53:27,986 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.40 vs. limit=7.5 +2024-08-25 03:53:37,744 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.94 vs. limit=7.5 +2024-08-25 03:53:38,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=0.0, ans=0.25 +2024-08-25 03:53:45,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=0.0, ans=0.2 +2024-08-25 03:54:08,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=0.0, ans=0.5 +2024-08-25 03:54:35,061 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.78 vs. limit=7.52 +2024-08-25 03:54:36,707 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.008e+03 4.149e+03 4.360e+03 5.530e+03 5.553e+03, threshold=1.744e+04, percent-clipped=0.0 +2024-08-25 03:54:38,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=53.333333333333336, ans=0.4975 +2024-08-25 03:54:38,355 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=6.43 vs. limit=4.021333333333334 +2024-08-25 03:54:59,972 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=13.39 vs. limit=7.52 +2024-08-25 03:55:14,449 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=499.63 vs. limit=7.58 +2024-08-25 03:55:46,191 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.063e+03 1.598e+03 4.141e+03 5.530e+03 6.572e+03, threshold=1.656e+04, percent-clipped=0.0 +2024-08-25 03:55:46,838 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=94.13 vs. limit=5.026666666666666 +2024-08-25 03:57:05,411 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=503.08 vs. limit=7.58 +2024-08-25 03:57:10,949 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=374.53 vs. limit=7.58 +2024-08-25 03:57:18,436 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=352.40 vs. limit=5.08 +2024-08-25 03:57:21,971 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=232.56 vs. limit=7.56 +2024-08-25 04:00:04,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=213.33333333333334, ans=0.8925333333333334 +2024-08-25 04:00:04,969 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=68.02 vs. limit=7.58 +2024-08-25 04:00:14,876 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.873e+02 1.048e+03 1.328e+03 4.149e+03 6.572e+03, threshold=5.310e+03, percent-clipped=0.0 +2024-08-25 04:00:17,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=213.33333333333334, ans=0.49 +2024-08-25 04:00:18,637 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=110.58 vs. limit=7.58 +2024-08-25 04:00:19,608 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=360.29 vs. limit=7.58 +2024-08-25 04:00:25,750 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=144.46 vs. limit=7.58 +2024-08-25 04:00:39,839 INFO [train.py:1114] (1/4) Epoch 1, batch 50, loss[loss=1.642, simple_loss=1.082, pruned_loss=1.24, ctc_loss=2.104, over 19710.00 frames. ], tot_loss[loss=3.754, simple_loss=2.917, pruned_loss=2.565, ctc_loss=2.866, over 844643.19 frames. ], batch size: 47, lr: 2.48e-02, grad_scale: 0.25 +2024-08-25 04:00:48,159 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=74.31 vs. limit=7.7 +2024-08-25 04:00:50,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=266.6666666666667, ans=0.8906666666666667 +2024-08-25 04:00:54,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=266.6666666666667, ans=0.19 +2024-08-25 04:00:56,121 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=48.50 vs. limit=7.7 +2024-08-25 04:01:20,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=8.84 vs. limit=5.08 +2024-08-25 04:01:26,324 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=11.85 vs. limit=4.128 +2024-08-25 04:01:26,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=373.3333333333333, ans=0.4825 +2024-08-25 04:01:41,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=373.3333333333333, ans=0.0916 +2024-08-25 04:02:02,730 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=11.11 vs. limit=5.093333333333334 +2024-08-25 04:02:03,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=426.6666666666667, ans=0.29573333333333335 +2024-08-25 04:02:05,507 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=347.88 vs. limit=7.66 +2024-08-25 04:02:37,912 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=161.22 vs. limit=7.68 +2024-08-25 04:02:59,895 INFO [train.py:1114] (1/4) Epoch 1, batch 100, loss[loss=1.395, simple_loss=0.9742, pruned_loss=1.215, ctc_loss=1.341, over 19760.00 frames. ], tot_loss[loss=2.588, simple_loss=1.913, pruned_loss=1.869, ctc_loss=2.355, over 1498832.64 frames. ], batch size: 51, lr: 2.70e-02, grad_scale: 0.5 +2024-08-25 04:03:06,651 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=43.59 vs. limit=7.9 +2024-08-25 04:03:07,089 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.807e+02 4.974e+02 8.674e+02 1.328e+03 6.572e+03, threshold=1.735e+03, percent-clipped=0.0 +2024-08-25 04:03:15,523 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=28.81 vs. limit=5.133333333333334 +2024-08-25 04:03:17,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=533.3333333333334, ans=5.333333333333333 +2024-08-25 04:03:19,246 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.42 vs. limit=3.088 +2024-08-25 04:03:19,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=586.6666666666666, ans=0.5 +2024-08-25 04:03:20,185 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=31.12 vs. limit=5.293333333333333 +2024-08-25 04:03:33,766 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=337.16 vs. limit=7.72 +2024-08-25 04:03:33,825 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=19.72 vs. limit=7.72 +2024-08-25 04:03:35,296 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=250.53 vs. limit=7.74 +2024-08-25 04:03:59,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=693.3333333333334, ans=0.4675 +2024-08-25 04:04:02,552 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=11.48 vs. limit=4.277333333333333 +2024-08-25 04:04:04,509 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=8.47 vs. limit=4.298666666666667 +2024-08-25 04:04:11,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=746.6666666666666, ans=5.466666666666667 +2024-08-25 04:04:22,845 INFO [train.py:1114] (1/4) Epoch 1, batch 150, loss[loss=1.145, simple_loss=0.7924, pruned_loss=0.9977, ctc_loss=1.075, over 19707.00 frames. ], tot_loss[loss=2.053, simple_loss=1.496, pruned_loss=1.573, ctc_loss=1.87, over 2028052.72 frames. ], batch size: 47, lr: 2.93e-02, grad_scale: 0.5 +2024-08-25 04:04:25,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=800.0, ans=0.872 +2024-08-25 04:04:26,062 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=18.48 vs. limit=4.32 +2024-08-25 04:04:26,141 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=102.10 vs. limit=7.8 +2024-08-25 04:04:29,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=11.89 vs. limit=4.32 +2024-08-25 04:04:41,196 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=11.89 vs. limit=7.82 +2024-08-25 04:05:10,244 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=35.58 vs. limit=8.18 +2024-08-25 04:05:10,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=906.6666666666666, ans=0.4575 +2024-08-25 04:05:12,312 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.20 vs. limit=8.18 +2024-08-25 04:05:13,657 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=11.48 vs. limit=4.384 +2024-08-25 04:05:17,334 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=126.47 vs. limit=7.86 +2024-08-25 04:05:40,704 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=76.87 vs. limit=7.88 +2024-08-25 04:05:41,002 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=15.76 vs. limit=5.253333333333333 +2024-08-25 04:05:47,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=1013.3333333333334, ans=0.4525 +2024-08-25 04:05:52,904 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.62 vs. limit=8.3 +2024-08-25 04:05:53,934 INFO [train.py:1114] (1/4) Epoch 1, batch 200, loss[loss=1.28, simple_loss=0.881, pruned_loss=1.029, ctc_loss=1.239, over 18088.00 frames. ], tot_loss[loss=1.762, simple_loss=1.267, pruned_loss=1.383, ctc_loss=1.622, over 2435338.20 frames. ], batch size: 85, lr: 3.15e-02, grad_scale: 1.0 +2024-08-25 04:05:55,812 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=10.40 vs. limit=4.426666666666667 +2024-08-25 04:05:55,857 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=13.37 vs. limit=7.9 +2024-08-25 04:05:56,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=1066.6666666666667, ans=0.7606666666666667 +2024-08-25 04:05:57,467 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.117e+01 1.191e+02 1.554e+02 2.219e+02 5.914e+02, threshold=3.108e+02, percent-clipped=0.0 +2024-08-25 04:05:57,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=1066.6666666666667, ans=0.28933333333333333 +2024-08-25 04:06:01,747 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=47.69 vs. limit=7.9 +2024-08-25 04:06:02,960 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=57.47 vs. limit=8.3 +2024-08-25 04:06:03,963 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=142.93 vs. limit=7.9 +2024-08-25 04:06:08,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=1120.0, ans=0.093 +2024-08-25 04:06:12,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=42.99 vs. limit=7.92 +2024-08-25 04:06:12,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=1120.0, ans=0.36 +2024-08-25 04:06:19,174 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=39.93 vs. limit=8.38 +2024-08-25 04:06:20,536 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=34.46 vs. limit=7.94 +2024-08-25 04:06:23,561 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=55.55 vs. limit=7.94 +2024-08-25 04:06:28,550 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.27 vs. limit=5.306666666666667 +2024-08-25 04:06:50,076 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=39.86 vs. limit=7.98 +2024-08-25 04:06:55,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=1280.0, ans=0.44 +2024-08-25 04:06:55,471 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.53 vs. limit=7.98 +2024-08-25 04:06:56,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=1333.3333333333333, ans=0.8533333333333334 +2024-08-25 04:06:57,331 INFO [train.py:1114] (1/4) Epoch 1, batch 250, loss[loss=1.245, simple_loss=0.8443, pruned_loss=0.988, ctc_loss=1.217, over 19372.00 frames. ], tot_loss[loss=1.585, simple_loss=1.125, pruned_loss=1.253, ctc_loss=1.479, over 2755780.74 frames. ], batch size: 67, lr: 3.38e-02, grad_scale: 1.0 +2024-08-25 04:07:48,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=1386.6666666666667, ans=0.435 +2024-08-25 04:07:49,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=1386.6666666666667, ans=0.8514666666666667 +2024-08-25 04:07:55,757 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=73.00 vs. limit=8.54 +2024-08-25 04:08:01,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=1440.0, ans=0.14600000000000002 +2024-08-25 04:08:04,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=1440.0, ans=0.5 +2024-08-25 04:08:07,315 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=3.482e-01 +2024-08-25 04:08:11,361 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=35.44 vs. limit=5.746666666666666 +2024-08-25 04:08:17,026 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.85 vs. limit=5.373333333333333 +2024-08-25 04:08:20,709 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=17.70 vs. limit=8.08 +2024-08-25 04:08:25,384 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=14.41 vs. limit=8.08 +2024-08-25 04:09:05,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=1546.6666666666667, ans=0.5 +2024-08-25 04:09:07,238 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.80 vs. limit=8.66 +2024-08-25 04:09:11,363 INFO [train.py:1114] (1/4) Epoch 1, batch 300, loss[loss=1.219, simple_loss=0.8167, pruned_loss=0.9461, ctc_loss=1.206, over 19521.00 frames. ], tot_loss[loss=1.469, simple_loss=1.03, pruned_loss=1.161, ctc_loss=1.388, over 3000944.94 frames. ], batch size: 61, lr: 3.60e-02, grad_scale: 2.0 +2024-08-25 04:09:11,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=1600.0, ans=0.5 +2024-08-25 04:09:12,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=1600.0, ans=0.425 +2024-08-25 04:09:14,905 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.125e+01 1.367e+02 1.753e+02 2.332e+02 3.681e+02, threshold=3.505e+02, percent-clipped=6.0 +2024-08-25 04:09:15,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=1600.0, ans=0.425 +2024-08-25 04:09:16,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=1600.0, ans=0.09000000000000001 +2024-08-25 04:09:16,837 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=48.03 vs. limit=8.1 +2024-08-25 04:10:27,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=1653.3333333333333, ans=0.08966666666666667 +2024-08-25 04:10:37,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=1706.6666666666667, ans=0.0616 +2024-08-25 04:10:45,796 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.68 vs. limit=8.82 +2024-08-25 04:11:07,093 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=40.73 vs. limit=8.18 +2024-08-25 04:11:09,105 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=53.05 vs. limit=5.906666666666666 +2024-08-25 04:11:09,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=1813.3333333333333, ans=0.415 +2024-08-25 04:11:13,112 INFO [train.py:1114] (1/4) Epoch 1, batch 350, loss[loss=1.014, simple_loss=0.6721, pruned_loss=0.7762, ctc_loss=1.002, over 19751.00 frames. ], tot_loss[loss=1.392, simple_loss=0.9654, pruned_loss=1.093, ctc_loss=1.327, over 3190480.39 frames. ], batch size: 48, lr: 3.83e-02, grad_scale: 2.0 +2024-08-25 04:11:17,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=1866.6666666666667, ans=0.4125 +2024-08-25 04:11:19,622 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.66 vs. limit=8.9 +2024-08-25 04:11:22,939 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=19.20 vs. limit=8.2 +2024-08-25 04:11:23,144 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=143.86 vs. limit=8.2 +2024-08-25 04:11:25,354 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=43.19 vs. limit=8.22 +2024-08-25 04:11:28,853 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.54 vs. limit=8.94 +2024-08-25 04:11:34,845 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=25.59 vs. limit=8.22 +2024-08-25 04:11:36,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.52 vs. limit=8.98 +2024-08-25 04:11:42,983 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=15.36 vs. limit=8.24 +2024-08-25 04:11:51,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=2026.6666666666667, ans=0.08733333333333333 +2024-08-25 04:11:51,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=2026.6666666666667, ans=0.0544 +2024-08-25 04:11:54,386 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.30 vs. limit=9.02 +2024-08-25 04:11:56,692 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=64.11 vs. limit=8.26 +2024-08-25 04:12:01,331 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=20.35 vs. limit=8.28 +2024-08-25 04:12:02,501 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.49 vs. limit=4.832 +2024-08-25 04:12:11,704 INFO [train.py:1114] (1/4) Epoch 1, batch 400, loss[loss=1.184, simple_loss=0.7949, pruned_loss=0.855, ctc_loss=1.145, over 19878.00 frames. ], tot_loss[loss=1.331, simple_loss=0.915, pruned_loss=1.034, ctc_loss=1.275, over 3342068.16 frames. ], batch size: 55, lr: 4.05e-02, grad_scale: 4.0 +2024-08-25 04:12:13,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=64.12 vs. limit=8.3 +2024-08-25 04:12:15,155 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.241e+01 1.644e+02 2.144e+02 2.768e+02 4.713e+02, threshold=4.287e+02, percent-clipped=10.0 +2024-08-25 04:12:18,461 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=6.80 vs. limit=5.0 +2024-08-25 04:12:21,990 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=68.26 vs. limit=8.3 +2024-08-25 04:12:39,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=2240.0, ans=0.11599999999999999 +2024-08-25 04:12:40,176 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=17.84 vs. limit=8.34 +2024-08-25 04:13:02,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=2346.6666666666665, ans=0.8178666666666667 +2024-08-25 04:13:04,882 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.70 vs. limit=8.38 +2024-08-25 04:13:05,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.48 vs. limit=8.38 +2024-08-25 04:13:10,427 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.27 vs. limit=9.26 +2024-08-25 04:13:12,050 INFO [train.py:1114] (1/4) Epoch 1, batch 450, loss[loss=1.156, simple_loss=0.7904, pruned_loss=0.7709, ctc_loss=1.104, over 19609.00 frames. ], tot_loss[loss=1.28, simple_loss=0.877, pruned_loss=0.9721, ctc_loss=1.228, over 3450267.38 frames. ], batch size: 55, lr: 4.28e-02, grad_scale: 4.0 +2024-08-25 04:14:03,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=2453.3333333333335, ans=0.385 +2024-08-25 04:14:04,167 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=9.57 vs. limit=8.42 +2024-08-25 04:14:08,576 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.55 vs. limit=8.42 +2024-08-25 04:14:08,673 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=23.12 vs. limit=8.42 +2024-08-25 04:14:11,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=2453.3333333333335, ans=0.385 +2024-08-25 04:14:12,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=2506.6666666666665, ans=0.22493333333333335 +2024-08-25 04:14:15,282 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.86 vs. limit=9.379999999999999 +2024-08-25 04:14:17,744 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=34.04 vs. limit=8.44 +2024-08-25 04:14:24,628 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=14.03 vs. limit=5.64 +2024-08-25 04:14:28,181 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.14 vs. limit=9.42 +2024-08-25 04:14:28,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2560.0, ans=0.2744 +2024-08-25 04:14:30,355 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.82 vs. limit=9.42 +2024-08-25 04:14:38,962 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.42 vs. limit=5.045333333333334 +2024-08-25 04:14:39,975 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.20 vs. limit=9.46 +2024-08-25 04:14:43,274 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.39 vs. limit=8.48 +2024-08-25 04:14:45,733 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=6.74 vs. limit=5.045333333333334 +2024-08-25 04:14:52,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=2666.6666666666665, ans=0.375 +2024-08-25 04:14:53,378 INFO [train.py:1114] (1/4) Epoch 1, batch 500, loss[loss=1.01, simple_loss=0.7024, pruned_loss=0.6173, ctc_loss=0.972, over 19664.00 frames. ], tot_loss[loss=1.217, simple_loss=0.835, pruned_loss=0.8932, ctc_loss=1.169, over 3545445.64 frames. ], batch size: 63, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:14:59,590 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.247e+02 2.224e+02 2.884e+02 3.405e+02 7.334e+02, threshold=5.768e+02, percent-clipped=15.0 +2024-08-25 04:15:03,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2666.6666666666665, ans=0.2733333333333333 +2024-08-25 04:15:09,375 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=19.97 vs. limit=8.52 +2024-08-25 04:15:23,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=2773.3333333333335, ans=0.37 +2024-08-25 04:15:27,365 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=23.12 vs. limit=8.54 +2024-08-25 04:15:28,471 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.52 vs. limit=8.54 +2024-08-25 04:15:38,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=2826.6666666666665, ans=0.7782666666666667 +2024-08-25 04:15:45,609 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.74 vs. limit=8.58 +2024-08-25 04:15:49,133 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=4.33 vs. limit=4.576 +2024-08-25 04:15:50,996 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.26 vs. limit=8.58 +2024-08-25 04:15:52,593 INFO [train.py:1114] (1/4) Epoch 1, batch 550, loss[loss=0.912, simple_loss=0.6418, pruned_loss=0.5179, ctc_loss=0.8917, over 19313.00 frames. ], tot_loss[loss=1.151, simple_loss=0.7928, pruned_loss=0.8137, ctc_loss=1.108, over 3607780.47 frames. ], batch size: 71, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:15:56,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=2933.3333333333335, ans=0.08999999999999998 +2024-08-25 04:16:11,315 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=8.69 vs. limit=8.620000000000001 +2024-08-25 04:16:18,191 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=15.19 vs. limit=8.64 +2024-08-25 04:16:23,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=3040.0, ans=0.2456 +2024-08-25 04:17:00,419 INFO [train.py:1114] (1/4) Epoch 1, batch 600, loss[loss=0.8352, simple_loss=0.6007, pruned_loss=0.44, ctc_loss=0.8089, over 19416.00 frames. ], tot_loss[loss=1.082, simple_loss=0.7496, pruned_loss=0.7333, ctc_loss=1.041, over 3665856.03 frames. ], batch size: 67, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:17:03,291 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.48 vs. limit=5.8 +2024-08-25 04:17:03,771 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.709e+02 2.809e+02 3.766e+02 4.633e+02 8.655e+02, threshold=7.532e+02, percent-clipped=12.0 +2024-08-25 04:17:05,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=3200.0, ans=0.09999999999999998 +2024-08-25 04:17:13,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=3253.3333333333335, ans=0.34750000000000003 +2024-08-25 04:17:19,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.77 vs. limit=5.8133333333333335 +2024-08-25 04:18:10,248 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=10.30 vs. limit=10.02 +2024-08-25 04:18:26,514 INFO [train.py:1114] (1/4) Epoch 1, batch 650, loss[loss=0.6971, simple_loss=0.5186, pruned_loss=0.3355, ctc_loss=0.6552, over 19771.00 frames. ], tot_loss[loss=1.008, simple_loss=0.7046, pruned_loss=0.6544, ctc_loss=0.9687, over 3716267.31 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:18:34,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=3466.6666666666665, ans=0.022000000000000006 +2024-08-25 04:18:39,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=3520.0, ans=0.0208 +2024-08-25 04:18:51,343 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.22 vs. limit=5.88 +2024-08-25 04:18:55,375 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.67 vs. limit=10.18 +2024-08-25 04:18:56,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=3573.3333333333335, ans=0.26426666666666665 +2024-08-25 04:18:58,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=3573.3333333333335, ans=0.3325 +2024-08-25 04:20:32,372 INFO [train.py:1114] (1/4) Epoch 1, batch 700, loss[loss=0.6708, simple_loss=0.5004, pruned_loss=0.3244, ctc_loss=0.6167, over 19715.00 frames. ], tot_loss[loss=0.944, simple_loss=0.6662, pruned_loss=0.5867, ctc_loss=0.9031, over 3749077.52 frames. ], batch size: 51, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:20:32,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=3733.3333333333335, ans=0.325 +2024-08-25 04:20:35,545 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 2.600e+02 3.309e+02 4.487e+02 1.180e+03, threshold=6.619e+02, percent-clipped=3.0 +2024-08-25 04:20:45,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=3786.6666666666665, ans=0.3225 +2024-08-25 04:20:49,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3786.6666666666665, ans=0.3225 +2024-08-25 04:20:55,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=3840.0, ans=0.32 +2024-08-25 04:21:00,179 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.90 vs. limit=10.379999999999999 +2024-08-25 04:21:02,293 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.98 vs. limit=5.536 +2024-08-25 04:21:09,528 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.930e-01 +2024-08-25 04:21:15,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=3946.6666666666665, ans=0.315 +2024-08-25 04:21:17,805 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=3.96 vs. limit=5.578666666666667 +2024-08-25 04:21:24,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=3946.6666666666665, ans=0.315 +2024-08-25 04:21:26,550 INFO [train.py:1114] (1/4) Epoch 1, batch 750, loss[loss=0.6553, simple_loss=0.5017, pruned_loss=0.2981, ctc_loss=0.586, over 19495.00 frames. ], tot_loss[loss=0.885, simple_loss=0.6316, pruned_loss=0.5271, ctc_loss=0.8405, over 3775728.42 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:21:28,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4000.0, ans=0.26 +2024-08-25 04:21:32,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=4000.0, ans=0.07500000000000001 +2024-08-25 04:21:32,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=4000.0, ans=0.3125 +2024-08-25 04:21:35,658 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=10.82 vs. limit=10.5 +2024-08-25 04:21:52,182 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.24 vs. limit=9.040000000000001 +2024-08-25 04:22:27,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=4160.0, ans=7.6 +2024-08-25 04:22:29,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4213.333333333333, ans=0.2578666666666667 +2024-08-25 04:22:33,238 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.75 vs. limit=10.66 +2024-08-25 04:22:37,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=4213.333333333333, ans=0.7525333333333334 +2024-08-25 04:22:40,637 INFO [train.py:1114] (1/4) Epoch 1, batch 800, loss[loss=0.5599, simple_loss=0.4397, pruned_loss=0.2454, ctc_loss=0.4764, over 19827.00 frames. ], tot_loss[loss=0.8299, simple_loss=0.6, pruned_loss=0.4739, ctc_loss=0.7789, over 3797367.18 frames. ], batch size: 49, lr: 4.49e-02, grad_scale: 16.0 +2024-08-25 04:22:43,863 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.649e+02 2.484e+02 3.479e+02 4.307e+02 9.603e+02, threshold=6.957e+02, percent-clipped=4.0 +2024-08-25 04:22:45,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4266.666666666667, ans=0.2573333333333333 +2024-08-25 04:22:46,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4266.666666666667, ans=0.2573333333333333 +2024-08-25 04:23:23,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=4426.666666666667, ans=0.2925 +2024-08-25 04:23:34,667 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.26 vs. limit=10.86 +2024-08-25 04:23:37,967 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.45 vs. limit=6.12 +2024-08-25 04:23:40,877 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=9.898e-01 +2024-08-25 04:23:42,705 INFO [train.py:1114] (1/4) Epoch 1, batch 850, loss[loss=0.593, simple_loss=0.4709, pruned_loss=0.2536, ctc_loss=0.4992, over 19671.00 frames. ], tot_loss[loss=0.7802, simple_loss=0.5721, pruned_loss=0.4275, ctc_loss=0.722, over 3815221.61 frames. ], batch size: 59, lr: 4.49e-02, grad_scale: 16.0 +2024-08-25 04:23:46,439 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.49 vs. limit=10.9 +2024-08-25 04:23:50,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=4533.333333333333, ans=0.2875 +2024-08-25 04:23:51,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=4533.333333333333, ans=6.133333333333333 +2024-08-25 04:23:55,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4586.666666666667, ans=0.2541333333333333 +2024-08-25 04:24:36,255 INFO [train.py:1114] (1/4) Epoch 1, batch 900, loss[loss=0.5104, simple_loss=0.4095, pruned_loss=0.2147, ctc_loss=0.4231, over 19823.00 frames. ], tot_loss[loss=0.7393, simple_loss=0.5493, pruned_loss=0.3901, ctc_loss=0.674, over 3819074.93 frames. ], batch size: 49, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:24:36,926 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=10.80 vs. limit=11.1 +2024-08-25 04:24:39,557 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.665e+02 2.433e+02 3.203e+02 4.513e+02 7.559e+02, threshold=6.406e+02, percent-clipped=2.0 +2024-08-25 04:24:39,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4800.0, ans=0.252 +2024-08-25 04:24:46,707 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.81 vs. limit=7.426666666666666 +2024-08-25 04:25:10,336 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.01 vs. limit=6.24 +2024-08-25 04:25:22,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=5013.333333333333, ans=0.265 +2024-08-25 04:25:23,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=5013.333333333333, ans=0.265 +2024-08-25 04:25:24,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=5013.333333333333, ans=0.03433333333333334 +2024-08-25 04:25:25,815 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.54 vs. limit=9.379999999999999 +2024-08-25 04:25:31,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=5066.666666666667, ans=0.025 +2024-08-25 04:25:32,738 INFO [train.py:1114] (1/4) Epoch 1, batch 950, loss[loss=0.5071, simple_loss=0.4155, pruned_loss=0.2039, ctc_loss=0.4135, over 19494.00 frames. ], tot_loss[loss=0.7005, simple_loss=0.5279, pruned_loss=0.3562, ctc_loss=0.6284, over 3820940.71 frames. ], batch size: 49, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:25:41,759 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.43 vs. limit=6.266666666666667 +2024-08-25 04:25:49,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=5120.0, ans=0.0 +2024-08-25 04:25:54,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=5173.333333333333, ans=0.04949747468305833 +2024-08-25 04:25:55,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=5173.333333333333, ans=0.24826666666666666 +2024-08-25 04:26:12,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=5226.666666666667, ans=0.06733333333333333 +2024-08-25 04:26:13,364 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.28 vs. limit=3.784 +2024-08-25 04:26:20,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=5280.0, ans=0.24719999999999998 +2024-08-25 04:26:33,405 INFO [train.py:1114] (1/4) Epoch 1, batch 1000, loss[loss=0.4809, simple_loss=0.4059, pruned_loss=0.1847, ctc_loss=0.3747, over 19853.00 frames. ], tot_loss[loss=0.6694, simple_loss=0.5114, pruned_loss=0.3292, ctc_loss=0.5905, over 3817984.21 frames. ], batch size: 52, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:26:36,701 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.705e+02 2.226e+02 2.758e+02 3.479e+02 9.619e+02, threshold=5.516e+02, percent-clipped=3.0 +2024-08-25 04:26:44,546 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.82 vs. limit=6.154666666666667 +2024-08-25 04:26:47,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=5386.666666666667, ans=0.2475 +2024-08-25 04:26:48,434 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=2.172e-01 +2024-08-25 04:26:57,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=5440.0, ans=0.245 +2024-08-25 04:27:00,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=5440.0, ans=0.025 +2024-08-25 04:27:02,167 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.74 vs. limit=9.54 +2024-08-25 04:27:11,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=5493.333333333333, ans=0.043777777777777784 +2024-08-25 04:27:21,935 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.76 vs. limit=9.58 +2024-08-25 04:27:25,690 INFO [train.py:1114] (1/4) Epoch 1, batch 1050, loss[loss=0.5276, simple_loss=0.4416, pruned_loss=0.2073, ctc_loss=0.4162, over 19861.00 frames. ], tot_loss[loss=0.6378, simple_loss=0.4943, pruned_loss=0.3034, ctc_loss=0.5529, over 3823633.07 frames. ], batch size: 57, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:27:37,477 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.76 vs. limit=11.74 +2024-08-25 04:27:37,689 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.82 vs. limit=11.74 +2024-08-25 04:27:46,055 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.54 vs. limit=11.780000000000001 +2024-08-25 04:28:03,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=5760.0, ans=0.22999999999999998 +2024-08-25 04:28:09,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=5813.333333333333, ans=0.009605797101449275 +2024-08-25 04:28:20,199 INFO [train.py:1114] (1/4) Epoch 1, batch 1100, loss[loss=0.509, simple_loss=0.4281, pruned_loss=0.2009, ctc_loss=0.3954, over 19577.00 frames. ], tot_loss[loss=0.6097, simple_loss=0.4792, pruned_loss=0.2812, ctc_loss=0.5197, over 3830869.71 frames. ], batch size: 52, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:28:21,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=5866.666666666667, ans=0.24133333333333332 +2024-08-25 04:28:23,261 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.625e+02 2.143e+02 2.593e+02 3.421e+02 4.407e+02, threshold=5.186e+02, percent-clipped=0.0 +2024-08-25 04:28:34,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=5920.0, ans=0.009582608695652174 +2024-08-25 04:28:35,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn2.whiten.whitening_limit, batch_count=5920.0, ans=11.940000000000001 +2024-08-25 04:28:46,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=5973.333333333333, ans=0.24026666666666666 +2024-08-25 04:28:46,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=5973.333333333333, ans=0.025 +2024-08-25 04:28:54,778 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.44 vs. limit=8.013333333333334 +2024-08-25 04:28:57,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=6026.666666666667, ans=0.21750000000000003 +2024-08-25 04:29:15,933 INFO [train.py:1114] (1/4) Epoch 1, batch 1150, loss[loss=0.4886, simple_loss=0.4181, pruned_loss=0.1847, ctc_loss=0.3834, over 19567.00 frames. ], tot_loss[loss=0.5881, simple_loss=0.4681, pruned_loss=0.264, ctc_loss=0.4934, over 3829809.04 frames. ], batch size: 52, lr: 4.47e-02, grad_scale: 16.0 +2024-08-25 04:29:49,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=6240.0, ans=0.20750000000000002 +2024-08-25 04:32:28,005 INFO [train.py:1114] (1/4) Epoch 1, batch 1200, loss[loss=0.4917, simple_loss=0.4253, pruned_loss=0.1873, ctc_loss=0.3707, over 19852.00 frames. ], tot_loss[loss=0.5708, simple_loss=0.4598, pruned_loss=0.2503, ctc_loss=0.4717, over 3825553.02 frames. ], batch size: 57, lr: 4.47e-02, grad_scale: 32.0 +2024-08-25 04:32:31,062 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.480e+02 2.077e+02 2.797e+02 3.799e+02 8.339e+02, threshold=5.594e+02, percent-clipped=11.0 +2024-08-25 04:32:32,613 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.07 vs. limit=9.9 +2024-08-25 04:33:01,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=6560.0, ans=0.1925 +2024-08-25 04:33:01,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=6560.0, ans=0.1925 +2024-08-25 04:33:03,570 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.87 vs. limit=12.42 +2024-08-25 04:33:05,626 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.74 vs. limit=6.64 +2024-08-25 04:33:07,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6560.0, ans=0.2344 +2024-08-25 04:33:07,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=6560.0, ans=0.1925 +2024-08-25 04:33:19,310 INFO [train.py:1114] (1/4) Epoch 1, batch 1250, loss[loss=0.5131, simple_loss=0.4353, pruned_loss=0.2055, ctc_loss=0.3889, over 19519.00 frames. ], tot_loss[loss=0.551, simple_loss=0.4502, pruned_loss=0.2357, ctc_loss=0.4474, over 3843501.71 frames. ], batch size: 61, lr: 4.47e-02, grad_scale: 32.0 +2024-08-25 04:33:38,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=6720.0, ans=0.185 +2024-08-25 04:33:44,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=6773.333333333333, ans=0.07 +2024-08-25 04:33:48,920 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.30 vs. limit=10.04 +2024-08-25 04:33:49,838 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.29 vs. limit=10.04 +2024-08-25 04:34:06,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=6880.0, ans=0.1775 +2024-08-25 04:34:12,520 INFO [train.py:1114] (1/4) Epoch 1, batch 1300, loss[loss=0.509, simple_loss=0.4346, pruned_loss=0.2018, ctc_loss=0.3869, over 18874.00 frames. ], tot_loss[loss=0.5313, simple_loss=0.4399, pruned_loss=0.2223, ctc_loss=0.4245, over 3846323.59 frames. ], batch size: 76, lr: 4.47e-02, grad_scale: 32.0 +2024-08-25 04:34:12,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=6933.333333333333, ans=0.175 +2024-08-25 04:34:15,557 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 2.007e+02 2.492e+02 3.309e+02 5.533e+02, threshold=4.985e+02, percent-clipped=0.0 +2024-08-25 04:34:19,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6933.333333333333, ans=0.23066666666666666 +2024-08-25 04:34:29,288 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:34:35,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=7040.0, ans=0.037333333333333336 +2024-08-25 04:34:54,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=7093.333333333333, ans=0.16749999999999998 +2024-08-25 04:34:57,344 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.94 vs. limit=12.82 +2024-08-25 04:35:00,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7146.666666666667, ans=0.2285333333333333 +2024-08-25 04:35:01,745 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.26 vs. limit=10.18 +2024-08-25 04:35:02,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=7146.666666666667, ans=0.03688888888888889 +2024-08-25 04:35:11,312 INFO [train.py:1114] (1/4) Epoch 1, batch 1350, loss[loss=0.4349, simple_loss=0.3882, pruned_loss=0.1596, ctc_loss=0.3242, over 19760.00 frames. ], tot_loss[loss=0.5147, simple_loss=0.4318, pruned_loss=0.2111, ctc_loss=0.405, over 3858477.95 frames. ], batch size: 54, lr: 4.46e-02, grad_scale: 32.0 +2024-08-25 04:35:16,909 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.72 vs. limit=12.9 +2024-08-25 04:35:23,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=7253.333333333333, ans=0.036444444444444446 +2024-08-25 04:35:25,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=7253.333333333333, ans=0.13290666666666667 +2024-08-25 04:35:30,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7306.666666666667, ans=0.22693333333333332 +2024-08-25 04:35:50,455 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.07 vs. limit=10.26 +2024-08-25 04:35:58,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.08 vs. limit=13.059999999999999 +2024-08-25 04:36:02,128 INFO [train.py:1114] (1/4) Epoch 1, batch 1400, loss[loss=0.3964, simple_loss=0.3503, pruned_loss=0.1501, ctc_loss=0.2954, over 19668.00 frames. ], tot_loss[loss=0.5003, simple_loss=0.4246, pruned_loss=0.2019, ctc_loss=0.3882, over 3864290.12 frames. ], batch size: 46, lr: 4.46e-02, grad_scale: 32.0 +2024-08-25 04:36:05,098 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.594e+02 1.980e+02 2.233e+02 2.820e+02 5.701e+02, threshold=4.466e+02, percent-clipped=2.0 +2024-08-25 04:36:06,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=7466.666666666667, ans=0.15000000000000002 +2024-08-25 04:36:25,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=7573.333333333333, ans=0.14500000000000002 +2024-08-25 04:36:30,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=7573.333333333333, ans=0.14500000000000002 +2024-08-25 04:36:48,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=7680.0, ans=0.03466666666666667 +2024-08-25 04:36:54,785 INFO [train.py:1114] (1/4) Epoch 1, batch 1450, loss[loss=0.4836, simple_loss=0.4296, pruned_loss=0.1859, ctc_loss=0.3491, over 19686.00 frames. ], tot_loss[loss=0.4908, simple_loss=0.4209, pruned_loss=0.1955, ctc_loss=0.3759, over 3861926.89 frames. ], batch size: 63, lr: 4.46e-02, grad_scale: 32.0 +2024-08-25 04:36:57,323 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.58 vs. limit=10.4 +2024-08-25 04:36:59,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7733.333333333333, ans=0.22266666666666668 +2024-08-25 04:37:02,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=7733.333333333333, ans=0.1375 +2024-08-25 04:37:03,125 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.83 vs. limit=6.933333333333334 +2024-08-25 04:37:30,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=7893.333333333333, ans=0.03377777777777778 +2024-08-25 04:37:40,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=7946.666666666667, ans=0.025 +2024-08-25 04:37:48,625 INFO [train.py:1114] (1/4) Epoch 1, batch 1500, loss[loss=0.4527, simple_loss=0.417, pruned_loss=0.1647, ctc_loss=0.3228, over 19597.00 frames. ], tot_loss[loss=0.48, simple_loss=0.4161, pruned_loss=0.1888, ctc_loss=0.3636, over 3862190.24 frames. ], batch size: 57, lr: 4.46e-02, grad_scale: 32.0 +2024-08-25 04:37:52,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=8000.0, ans=0.009130434782608696 +2024-08-25 04:37:52,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=8000.0, ans=0.125 +2024-08-25 04:37:52,925 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.09 vs. limit=9.0 +2024-08-25 04:37:54,384 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.582e+02 1.987e+02 2.351e+02 3.240e+02 5.717e+02, threshold=4.702e+02, percent-clipped=4.0 +2024-08-25 04:37:58,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=8000.0, ans=0.62 +2024-08-25 04:38:01,613 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:38:19,042 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.11 vs. limit=13.58 +2024-08-25 04:38:35,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=8160.0, ans=0.03266666666666667 +2024-08-25 04:38:56,151 INFO [train.py:1114] (1/4) Epoch 1, batch 1550, loss[loss=0.4656, simple_loss=0.418, pruned_loss=0.1754, ctc_loss=0.3485, over 19624.00 frames. ], tot_loss[loss=0.4721, simple_loss=0.4126, pruned_loss=0.1841, ctc_loss=0.3547, over 3846925.98 frames. ], batch size: 60, lr: 4.45e-02, grad_scale: 32.0 +2024-08-25 04:39:01,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=8266.666666666666, ans=0.125 +2024-08-25 04:39:05,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=8320.0, ans=0.6088 +2024-08-25 04:39:05,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=8320.0, ans=0.09899494936611666 +2024-08-25 04:39:39,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=8480.0, ans=0.00902608695652174 +2024-08-25 04:39:39,390 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.18 vs. limit=13.86 +2024-08-25 04:39:46,376 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.01 vs. limit=10.7 +2024-08-25 04:39:47,230 INFO [train.py:1114] (1/4) Epoch 1, batch 1600, loss[loss=0.4418, simple_loss=0.4094, pruned_loss=0.162, ctc_loss=0.3166, over 19831.00 frames. ], tot_loss[loss=0.4627, simple_loss=0.4084, pruned_loss=0.1788, ctc_loss=0.3443, over 3836866.91 frames. ], batch size: 57, lr: 4.45e-02, grad_scale: 32.0 +2024-08-25 04:39:52,854 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.585e+02 2.044e+02 2.368e+02 2.950e+02 6.795e+02, threshold=4.737e+02, percent-clipped=6.0 +2024-08-25 04:40:01,892 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.86 vs. limit=13.94 +2024-08-25 04:40:04,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=8586.666666666666, ans=0.5994666666666667 +2024-08-25 04:40:33,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=8746.666666666666, ans=0.030222222222222227 +2024-08-25 04:40:42,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8800.0, ans=0.212 +2024-08-25 04:40:43,013 INFO [train.py:1114] (1/4) Epoch 1, batch 1650, loss[loss=0.4154, simple_loss=0.3867, pruned_loss=0.1534, ctc_loss=0.2949, over 19652.00 frames. ], tot_loss[loss=0.4535, simple_loss=0.4042, pruned_loss=0.1738, ctc_loss=0.335, over 3831942.97 frames. ], batch size: 59, lr: 4.45e-02, grad_scale: 32.0 +2024-08-25 04:41:50,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=8800.0, ans=0.07 +2024-08-25 04:43:04,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=8906.666666666666, ans=0.125 +2024-08-25 04:43:24,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=9013.333333333334, ans=0.0 +2024-08-25 04:43:28,637 INFO [train.py:1114] (1/4) Epoch 1, batch 1700, loss[loss=0.3467, simple_loss=0.3309, pruned_loss=0.124, ctc_loss=0.246, over 19670.00 frames. ], tot_loss[loss=0.4439, simple_loss=0.4002, pruned_loss=0.1684, ctc_loss=0.325, over 3845983.60 frames. ], batch size: 46, lr: 4.44e-02, grad_scale: 32.0 +2024-08-25 04:43:31,592 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.509e+02 1.986e+02 2.386e+02 2.791e+02 4.935e+02, threshold=4.772e+02, percent-clipped=1.0 +2024-08-25 04:43:35,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=9066.666666666666, ans=0.20933333333333334 +2024-08-25 04:43:39,889 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.48 vs. limit=10.92 +2024-08-25 04:43:40,785 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.95 vs. limit=10.92 +2024-08-25 04:43:48,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=9173.333333333334, ans=0.125 +2024-08-25 04:43:56,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.94 vs. limit=14.42 +2024-08-25 04:44:00,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=9226.666666666666, ans=0.028222222222222225 +2024-08-25 04:44:03,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=9226.666666666666, ans=0.125 +2024-08-25 04:44:07,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=9280.0, ans=10.98 +2024-08-25 04:45:26,323 INFO [train.py:1114] (1/4) Epoch 1, batch 1750, loss[loss=0.3455, simple_loss=0.3427, pruned_loss=0.1193, ctc_loss=0.2343, over 19627.00 frames. ], tot_loss[loss=0.4358, simple_loss=0.3967, pruned_loss=0.1643, ctc_loss=0.3167, over 3850913.43 frames. ], batch size: 45, lr: 4.44e-02, grad_scale: 32.0 +2024-08-25 04:45:34,439 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.60 vs. limit=11.0 +2024-08-25 04:46:11,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=9546.666666666666, ans=0.025 +2024-08-25 04:46:13,450 INFO [train.py:1114] (1/4) Epoch 1, batch 1800, loss[loss=0.4142, simple_loss=0.3973, pruned_loss=0.1512, ctc_loss=0.2903, over 19608.00 frames. ], tot_loss[loss=0.4305, simple_loss=0.3954, pruned_loss=0.1615, ctc_loss=0.3114, over 3852586.37 frames. ], batch size: 55, lr: 4.44e-02, grad_scale: 32.0 +2024-08-25 04:46:16,186 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.506e+02 2.025e+02 2.321e+02 2.784e+02 4.120e+02, threshold=4.643e+02, percent-clipped=0.0 +2024-08-25 04:48:19,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=9813.333333333334, ans=0.07 +2024-08-25 04:48:26,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=9813.333333333334, ans=0.125 +2024-08-25 04:48:28,786 INFO [train.py:1114] (1/4) Epoch 1, batch 1850, loss[loss=0.397, simple_loss=0.3774, pruned_loss=0.1495, ctc_loss=0.2742, over 19578.00 frames. ], tot_loss[loss=0.4229, simple_loss=0.3923, pruned_loss=0.1578, ctc_loss=0.3036, over 3856379.05 frames. ], batch size: 57, lr: 4.43e-02, grad_scale: 32.0 +2024-08-25 04:48:37,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=9866.666666666666, ans=0.008724637681159421 +2024-08-25 04:48:38,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=9920.0, ans=0.0 +2024-08-25 04:48:44,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=9920.0, ans=0.0 +2024-08-25 04:48:49,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=9973.333333333334, ans=11.24 +2024-08-25 04:48:52,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=9973.333333333334, ans=0.125 +2024-08-25 04:48:57,181 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.32 vs. limit=15.02 +2024-08-25 04:49:03,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=10026.666666666666, ans=0.19973333333333332 +2024-08-25 04:49:13,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=10080.0, ans=0.125 +2024-08-25 04:49:15,886 INFO [train.py:1114] (1/4) Epoch 1, batch 1900, loss[loss=0.3993, simple_loss=0.3942, pruned_loss=0.1439, ctc_loss=0.2746, over 19629.00 frames. ], tot_loss[loss=0.4176, simple_loss=0.3909, pruned_loss=0.1552, ctc_loss=0.2984, over 3860757.23 frames. ], batch size: 59, lr: 4.43e-02, grad_scale: 32.0 +2024-08-25 04:49:18,621 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.464e+02 2.031e+02 2.370e+02 2.878e+02 5.610e+02, threshold=4.739e+02, percent-clipped=2.0 +2024-08-25 04:49:56,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=10186.666666666666, ans=0.125 +2024-08-25 04:50:12,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=10240.0, ans=0.025 +2024-08-25 04:50:12,927 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:50:31,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=10400.0, ans=0.125 +2024-08-25 04:50:31,873 INFO [train.py:1114] (1/4) Epoch 1, batch 1950, loss[loss=0.3744, simple_loss=0.3712, pruned_loss=0.1339, ctc_loss=0.2665, over 19583.00 frames. ], tot_loss[loss=0.4114, simple_loss=0.3893, pruned_loss=0.1522, ctc_loss=0.2926, over 3870139.29 frames. ], batch size: 52, lr: 4.43e-02, grad_scale: 32.0 +2024-08-25 04:50:43,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=10453.333333333334, ans=0.023111111111111107 +2024-08-25 04:50:49,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=10506.666666666666, ans=0.022888888888888893 +2024-08-25 04:50:49,959 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=4.5760000000000005 +2024-08-25 04:50:53,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=10506.666666666666, ans=0.0 +2024-08-25 04:50:55,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.70 vs. limit=11.44 +2024-08-25 04:50:55,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10506.666666666666, ans=0.19493333333333335 +2024-08-25 04:51:09,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=10560.0, ans=0.125 +2024-08-25 04:52:05,814 INFO [train.py:1114] (1/4) Epoch 1, batch 2000, loss[loss=0.3488, simple_loss=0.3495, pruned_loss=0.1248, ctc_loss=0.2465, over 19677.00 frames. ], tot_loss[loss=0.4078, simple_loss=0.3884, pruned_loss=0.1507, ctc_loss=0.2899, over 3854520.47 frames. ], batch size: 45, lr: 4.42e-02, grad_scale: 32.0 +2024-08-25 04:52:09,673 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.538e+02 1.861e+02 2.137e+02 2.685e+02 4.799e+02, threshold=4.274e+02, percent-clipped=1.0 +2024-08-25 04:52:10,971 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=11.5 +2024-08-25 04:53:25,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=10720.0, ans=0.0 +2024-08-25 04:53:38,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=10720.0, ans=0.125 +2024-08-25 04:53:46,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10720.0, ans=0.19279999999999997 +2024-08-25 04:54:20,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=10880.0, ans=11.58 +2024-08-25 04:54:32,949 INFO [train.py:1114] (1/4) Epoch 1, batch 2050, loss[loss=0.3572, simple_loss=0.3558, pruned_loss=0.1303, ctc_loss=0.2449, over 19750.00 frames. ], tot_loss[loss=0.4011, simple_loss=0.385, pruned_loss=0.1479, ctc_loss=0.284, over 3850741.77 frames. ], batch size: 47, lr: 4.42e-02, grad_scale: 32.0 +2024-08-25 04:54:39,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=10933.333333333334, ans=0.125 +2024-08-25 04:54:49,417 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.02 vs. limit=4.648 +2024-08-25 04:55:22,206 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.05 vs. limit=10.546666666666667 +2024-08-25 04:55:26,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=11093.333333333334, ans=0.5117333333333334 +2024-08-25 04:55:35,638 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.36 vs. limit=4.672 +2024-08-25 04:55:42,293 INFO [train.py:1114] (1/4) Epoch 1, batch 2100, loss[loss=0.3977, simple_loss=0.3883, pruned_loss=0.1465, ctc_loss=0.2853, over 19772.00 frames. ], tot_loss[loss=0.3935, simple_loss=0.3816, pruned_loss=0.1443, ctc_loss=0.2773, over 3857647.84 frames. ], batch size: 54, lr: 4.42e-02, grad_scale: 32.0 +2024-08-25 04:56:34,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=11200.0, ans=0.188 +2024-08-25 04:56:36,117 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.677e+02 1.936e+02 2.214e+02 2.535e+02 3.885e+02, threshold=4.428e+02, percent-clipped=0.0 +2024-08-25 04:56:37,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=11200.0, ans=0.508 +2024-08-25 04:56:41,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=11200.0, ans=0.020000000000000004 +2024-08-25 04:56:41,540 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.07 vs. limit=7.8 +2024-08-25 04:56:42,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=11253.333333333334, ans=0.18746666666666667 +2024-08-25 04:57:08,790 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.42 vs. limit=15.98 +2024-08-25 04:57:25,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=11360.0, ans=0.019333333333333338 +2024-08-25 04:57:27,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=11413.333333333334, ans=0.125 +2024-08-25 04:57:30,160 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.80 vs. limit=4.712 +2024-08-25 04:57:35,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=11466.666666666666, ans=0.125 +2024-08-25 04:57:35,994 INFO [train.py:1114] (1/4) Epoch 1, batch 2150, loss[loss=0.3526, simple_loss=0.3625, pruned_loss=0.1232, ctc_loss=0.2405, over 19609.00 frames. ], tot_loss[loss=0.3868, simple_loss=0.3785, pruned_loss=0.141, ctc_loss=0.271, over 3868410.32 frames. ], batch size: 52, lr: 4.41e-02, grad_scale: 32.0 +2024-08-25 04:57:36,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=11466.666666666666, ans=0.4986666666666667 +2024-08-25 04:58:57,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=11520.0, ans=0.125 +2024-08-25 04:58:58,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=11520.0, ans=0.125 +2024-08-25 04:59:30,049 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.46 vs. limit=11.879999999999999 +2024-08-25 04:59:34,060 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.30 vs. limit=11.879999999999999 +2024-08-25 04:59:36,680 INFO [train.py:1114] (1/4) Epoch 1, batch 2200, loss[loss=0.3941, simple_loss=0.3932, pruned_loss=0.1427, ctc_loss=0.2739, over 19555.00 frames. ], tot_loss[loss=0.3831, simple_loss=0.3771, pruned_loss=0.1392, ctc_loss=0.2678, over 3866197.78 frames. ], batch size: 57, lr: 4.41e-02, grad_scale: 32.0 +2024-08-25 04:59:40,227 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.512e+02 1.884e+02 2.153e+02 2.810e+02 4.673e+02, threshold=4.307e+02, percent-clipped=1.0 +2024-08-25 04:59:40,906 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.10 vs. limit=16.3 +2024-08-25 04:59:41,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=11733.333333333334, ans=0.18266666666666664 +2024-08-25 04:59:45,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=11786.666666666666, ans=0.125 +2024-08-25 05:00:10,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=11893.333333333334, ans=0.125 +2024-08-25 05:00:33,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=12000.0, ans=0.48000000000000004 +2024-08-25 05:00:34,243 INFO [train.py:1114] (1/4) Epoch 1, batch 2250, loss[loss=0.3561, simple_loss=0.3674, pruned_loss=0.1236, ctc_loss=0.2439, over 19598.00 frames. ], tot_loss[loss=0.3802, simple_loss=0.3761, pruned_loss=0.1378, ctc_loss=0.2649, over 3866141.34 frames. ], batch size: 55, lr: 4.40e-02, grad_scale: 32.0 +2024-08-25 05:00:37,214 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=14.50 vs. limit=12.0 +2024-08-25 05:01:04,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=12053.333333333334, ans=0.4781333333333333 +2024-08-25 05:01:19,400 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.73 vs. limit=4.816 +2024-08-25 05:01:30,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=12160.0, ans=0.016 +2024-08-25 05:01:44,086 INFO [train.py:1114] (1/4) Epoch 1, batch 2300, loss[loss=0.3473, simple_loss=0.3555, pruned_loss=0.1223, ctc_loss=0.2361, over 19507.00 frames. ], tot_loss[loss=0.3767, simple_loss=0.3738, pruned_loss=0.1363, ctc_loss=0.2616, over 3860747.31 frames. ], batch size: 49, lr: 4.40e-02, grad_scale: 32.0 +2024-08-25 05:01:47,653 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.534e+02 1.926e+02 2.114e+02 2.507e+02 4.625e+02, threshold=4.228e+02, percent-clipped=3.0 +2024-08-25 05:01:48,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=12266.666666666666, ans=0.015555555555555559 +2024-08-25 05:02:12,252 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.51 vs. limit=16.82 +2024-08-25 05:02:30,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=12533.333333333334, ans=0.125 +2024-08-25 05:02:30,767 INFO [train.py:1114] (1/4) Epoch 1, batch 2350, loss[loss=0.3935, simple_loss=0.3888, pruned_loss=0.1436, ctc_loss=0.2775, over 19700.00 frames. ], tot_loss[loss=0.3747, simple_loss=0.3732, pruned_loss=0.1354, ctc_loss=0.2596, over 3863328.77 frames. ], batch size: 63, lr: 4.40e-02, grad_scale: 32.0 +2024-08-25 05:04:30,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=12640.0, ans=0.125 +2024-08-25 05:04:38,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=12693.333333333334, ans=10.0 +2024-08-25 05:04:44,473 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.57 vs. limit=9.077333333333334 +2024-08-25 05:04:57,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=12746.666666666666, ans=0.025 +2024-08-25 05:05:04,665 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.56 vs. limit=6.5600000000000005 +2024-08-25 05:05:05,278 INFO [train.py:1114] (1/4) Epoch 1, batch 2400, loss[loss=0.4004, simple_loss=0.3957, pruned_loss=0.1467, ctc_loss=0.2791, over 19379.00 frames. ], tot_loss[loss=0.3757, simple_loss=0.3751, pruned_loss=0.1356, ctc_loss=0.2595, over 3858170.43 frames. ], batch size: 67, lr: 4.39e-02, grad_scale: 32.0 +2024-08-25 05:05:08,752 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.515e+02 1.948e+02 2.252e+02 2.666e+02 4.870e+02, threshold=4.504e+02, percent-clipped=4.0 +2024-08-25 05:05:11,567 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.05 vs. limit=9.120000000000001 +2024-08-25 05:05:18,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=12853.333333333334, ans=0.00807536231884058 +2024-08-25 05:05:21,625 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.52 vs. limit=4.928 +2024-08-25 05:05:33,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=12960.0, ans=0.125 +2024-08-25 05:05:35,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=12960.0, ans=0.125 +2024-08-25 05:05:44,868 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.99 vs. limit=12.379999999999999 +2024-08-25 05:05:45,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=13013.333333333334, ans=0.3952 +2024-08-25 05:05:52,656 INFO [train.py:1114] (1/4) Epoch 1, batch 2450, loss[loss=0.4947, simple_loss=0.433, pruned_loss=0.2032, ctc_loss=0.3751, over 13653.00 frames. ], tot_loss[loss=0.3853, simple_loss=0.3806, pruned_loss=0.1408, ctc_loss=0.2684, over 3732983.70 frames. ], batch size: 140, lr: 4.39e-02, grad_scale: 32.0 +2024-08-25 05:06:18,971 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.56 vs. limit=8.266666666666666 +2024-08-25 05:06:27,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=13120.0, ans=0.008017391304347827 +2024-08-25 05:06:31,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=13173.333333333334, ans=0.125 +2024-08-25 05:07:49,748 INFO [train.py:1114] (1/4) Epoch 2, batch 0, loss[loss=0.3543, simple_loss=0.3605, pruned_loss=0.1275, ctc_loss=0.2328, over 19390.00 frames. ], tot_loss[loss=0.3543, simple_loss=0.3605, pruned_loss=0.1275, ctc_loss=0.2328, over 19390.00 frames. ], batch size: 48, lr: 4.30e-02, grad_scale: 32.0 +2024-08-25 05:07:49,749 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-25 05:09:16,713 INFO [train.py:1146] (1/4) Epoch 2, validation: loss=0.2886, simple_loss=0.3508, pruned_loss=0.0823, ctc_loss=0.1542, over 944034.00 frames. +2024-08-25 05:09:16,714 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 13283MB +2024-08-25 05:09:16,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13280.0, ans=0.1672 +2024-08-25 05:09:35,626 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.504e+02 1.938e+02 2.191e+02 2.677e+02 6.592e+02, threshold=4.382e+02, percent-clipped=7.0 +2024-08-25 05:09:36,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=13333.333333333334, ans=0.125 +2024-08-25 05:09:52,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13440.0, ans=0.1656 +2024-08-25 05:09:52,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=13440.0, ans=0.125 +2024-08-25 05:10:10,713 INFO [train.py:1114] (1/4) Epoch 2, batch 50, loss[loss=0.2993, simple_loss=0.3185, pruned_loss=0.1009, ctc_loss=0.1963, over 19743.00 frames. ], tot_loss[loss=0.3735, simple_loss=0.3755, pruned_loss=0.1345, ctc_loss=0.2565, over 845814.23 frames. ], batch size: 47, lr: 4.29e-02, grad_scale: 32.0 +2024-08-25 05:11:12,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=13653.333333333334, ans=0.4221333333333333 +2024-08-25 05:11:14,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=13653.333333333334, ans=0.125 +2024-08-25 05:11:46,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=13813.333333333334, ans=0.025 +2024-08-25 05:11:47,132 INFO [train.py:1114] (1/4) Epoch 2, batch 100, loss[loss=0.3074, simple_loss=0.3364, pruned_loss=0.09915, ctc_loss=0.2002, over 19718.00 frames. ], tot_loss[loss=0.3733, simple_loss=0.377, pruned_loss=0.1338, ctc_loss=0.2549, over 1499532.21 frames. ], batch size: 51, lr: 4.29e-02, grad_scale: 32.0 +2024-08-25 05:12:00,806 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 1.907e+02 2.167e+02 2.481e+02 4.957e+02, threshold=4.333e+02, percent-clipped=1.0 +2024-08-25 05:12:01,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=13866.666666666666, ans=0.0 +2024-08-25 05:12:14,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=13920.0, ans=0.125 +2024-08-25 05:12:22,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=13973.333333333334, ans=0.025 +2024-08-25 05:12:47,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=14080.0, ans=8.52 +2024-08-25 05:12:50,537 INFO [train.py:1114] (1/4) Epoch 2, batch 150, loss[loss=0.3072, simple_loss=0.3237, pruned_loss=0.1056, ctc_loss=0.1984, over 19716.00 frames. ], tot_loss[loss=0.3652, simple_loss=0.3716, pruned_loss=0.1299, ctc_loss=0.2474, over 2028297.15 frames. ], batch size: 47, lr: 4.29e-02, grad_scale: 32.0 +2024-08-25 05:12:53,009 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.20 vs. limit=12.780000000000001 +2024-08-25 05:12:53,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=14080.0, ans=0.125 +2024-08-25 05:14:14,631 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.19 vs. limit=18.18 +2024-08-25 05:14:33,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=14240.0, ans=0.15760000000000002 +2024-08-25 05:14:50,529 INFO [train.py:1114] (1/4) Epoch 2, batch 200, loss[loss=0.3813, simple_loss=0.3779, pruned_loss=0.1382, ctc_loss=0.2706, over 18347.00 frames. ], tot_loss[loss=0.3586, simple_loss=0.3669, pruned_loss=0.1268, ctc_loss=0.2418, over 2435894.34 frames. ], batch size: 85, lr: 4.28e-02, grad_scale: 32.0 +2024-08-25 05:14:54,116 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.59 vs. limit=12.879999999999999 +2024-08-25 05:15:13,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=14400.0, ans=0.125 +2024-08-25 05:15:14,927 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.506e+02 1.847e+02 2.110e+02 2.499e+02 4.235e+02, threshold=4.220e+02, percent-clipped=0.0 +2024-08-25 05:15:49,817 INFO [train.py:1114] (1/4) Epoch 2, batch 250, loss[loss=0.3884, simple_loss=0.3984, pruned_loss=0.1371, ctc_loss=0.2604, over 19410.00 frames. ], tot_loss[loss=0.3558, simple_loss=0.3657, pruned_loss=0.1252, ctc_loss=0.2387, over 2755875.80 frames. ], batch size: 67, lr: 4.28e-02, grad_scale: 32.0 +2024-08-25 05:16:18,440 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.17 vs. limit=18.5 +2024-08-25 05:16:21,449 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.73 vs. limit=9.866666666666667 +2024-08-25 05:16:26,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=14720.0, ans=0.09899494936611666 +2024-08-25 05:16:30,154 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.55 vs. limit=18.54 +2024-08-25 05:16:31,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=14720.0, ans=0.125 +2024-08-25 05:16:35,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=14720.0, ans=0.125 +2024-08-25 05:19:29,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=14826.666666666666, ans=0.125 +2024-08-25 05:19:35,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=14826.666666666666, ans=0.125 +2024-08-25 05:19:37,930 INFO [train.py:1114] (1/4) Epoch 2, batch 300, loss[loss=0.3925, simple_loss=0.3961, pruned_loss=0.1413, ctc_loss=0.2658, over 19532.00 frames. ], tot_loss[loss=0.3534, simple_loss=0.3642, pruned_loss=0.1241, ctc_loss=0.2362, over 3000767.60 frames. ], batch size: 61, lr: 4.27e-02, grad_scale: 32.0 +2024-08-25 05:19:43,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=14880.0, ans=0.1512 +2024-08-25 05:19:49,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=14880.0, ans=0.025 +2024-08-25 05:19:51,774 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.64 vs. limit=13.1 +2024-08-25 05:19:51,815 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.96 vs. limit=8.733333333333334 +2024-08-25 05:19:53,892 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.36 vs. limit=8.733333333333334 +2024-08-25 05:19:56,630 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.593e+02 1.858e+02 2.099e+02 2.398e+02 3.801e+02, threshold=4.198e+02, percent-clipped=0.0 +2024-08-25 05:19:58,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=14933.333333333334, ans=0.004444444444444438 +2024-08-25 05:20:13,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=14986.666666666666, ans=0.007611594202898551 +2024-08-25 05:20:25,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=15040.0, ans=0.125 +2024-08-25 05:20:51,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=15093.333333333334, ans=0.125 +2024-08-25 05:20:51,296 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.01 vs. limit=18.82 +2024-08-25 05:20:54,653 INFO [train.py:1114] (1/4) Epoch 2, batch 350, loss[loss=0.2976, simple_loss=0.3227, pruned_loss=0.09887, ctc_loss=0.1868, over 19746.00 frames. ], tot_loss[loss=0.3524, simple_loss=0.3638, pruned_loss=0.1234, ctc_loss=0.2352, over 3189874.42 frames. ], batch size: 48, lr: 4.27e-02, grad_scale: 32.0 +2024-08-25 05:20:58,415 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.44 vs. limit=18.86 +2024-08-25 05:21:08,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=15200.0, ans=0.125 +2024-08-25 05:21:21,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=15253.333333333334, ans=0.125 +2024-08-25 05:21:32,667 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.40 vs. limit=13.219999999999999 +2024-08-25 05:22:06,919 INFO [train.py:1114] (1/4) Epoch 2, batch 400, loss[loss=0.3413, simple_loss=0.3606, pruned_loss=0.1165, ctc_loss=0.2227, over 19498.00 frames. ], tot_loss[loss=0.3514, simple_loss=0.3635, pruned_loss=0.1229, ctc_loss=0.2336, over 3341436.16 frames. ], batch size: 54, lr: 4.26e-02, grad_scale: 32.0 +2024-08-25 05:22:08,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=15413.333333333334, ans=0.007518840579710145 +2024-08-25 05:22:10,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=15413.333333333334, ans=0.0 +2024-08-25 05:22:20,586 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.569e+02 1.895e+02 2.189e+02 2.528e+02 4.758e+02, threshold=4.379e+02, percent-clipped=2.0 +2024-08-25 05:22:25,727 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:22:28,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=15520.0, ans=0.0020000000000000018 +2024-08-25 05:23:46,286 INFO [train.py:1114] (1/4) Epoch 2, batch 450, loss[loss=0.3499, simple_loss=0.3691, pruned_loss=0.121, ctc_loss=0.2216, over 19607.00 frames. ], tot_loss[loss=0.3498, simple_loss=0.3622, pruned_loss=0.1222, ctc_loss=0.2322, over 3450016.16 frames. ], batch size: 55, lr: 4.26e-02, grad_scale: 32.0 +2024-08-25 05:23:51,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=15680.0, ans=0.025 +2024-08-25 05:23:52,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=15680.0, ans=0.025 +2024-08-25 05:24:07,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=15786.666666666666, ans=0.125 +2024-08-25 05:24:14,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=15840.0, ans=0.0006666666666666696 +2024-08-25 05:24:22,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=15840.0, ans=0.125 +2024-08-25 05:24:25,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=15893.333333333334, ans=0.125 +2024-08-25 05:24:29,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=15893.333333333334, ans=0.007414492753623188 +2024-08-25 05:24:37,929 INFO [train.py:1114] (1/4) Epoch 2, batch 500, loss[loss=0.3542, simple_loss=0.3731, pruned_loss=0.1201, ctc_loss=0.2377, over 19633.00 frames. ], tot_loss[loss=0.3464, simple_loss=0.3603, pruned_loss=0.1205, ctc_loss=0.229, over 3545224.30 frames. ], batch size: 63, lr: 4.25e-02, grad_scale: 32.0 +2024-08-25 05:26:00,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16000.0, ans=0.14 +2024-08-25 05:26:05,506 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.383e+02 1.778e+02 2.035e+02 2.349e+02 4.286e+02, threshold=4.071e+02, percent-clipped=0.0 +2024-08-25 05:26:19,490 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.61 vs. limit=19.54 +2024-08-25 05:26:24,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=16106.666666666666, ans=0.035 +2024-08-25 05:26:35,294 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.43 vs. limit=19.619999999999997 +2024-08-25 05:26:35,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=16160.0, ans=0.125 +2024-08-25 05:26:49,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=16160.0, ans=0.125 +2024-08-25 05:26:53,764 INFO [train.py:1114] (1/4) Epoch 2, batch 550, loss[loss=0.3619, simple_loss=0.3702, pruned_loss=0.1265, ctc_loss=0.2518, over 19278.00 frames. ], tot_loss[loss=0.3468, simple_loss=0.3606, pruned_loss=0.1207, ctc_loss=0.229, over 3606845.61 frames. ], batch size: 71, lr: 4.25e-02, grad_scale: 32.0 +2024-08-25 05:27:03,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=16213.333333333334, ans=0.125 +2024-08-25 05:27:07,745 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.30 vs. limit=13.58 +2024-08-25 05:27:16,190 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.26 vs. limit=19.7 +2024-08-25 05:27:27,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=16320.0, ans=0.125 +2024-08-25 05:27:36,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=16373.333333333334, ans=0.0 +2024-08-25 05:28:02,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=16373.333333333334, ans=0.3269333333333333 +2024-08-25 05:28:02,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=16373.333333333334, ans=0.125 +2024-08-25 05:28:05,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=16426.666666666668, ans=0.125 +2024-08-25 05:28:15,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=16426.666666666668, ans=0.0 +2024-08-25 05:28:19,445 INFO [train.py:1114] (1/4) Epoch 2, batch 600, loss[loss=0.356, simple_loss=0.3757, pruned_loss=0.1224, ctc_loss=0.2287, over 19448.00 frames. ], tot_loss[loss=0.3456, simple_loss=0.3601, pruned_loss=0.1201, ctc_loss=0.2276, over 3664934.48 frames. ], batch size: 67, lr: 4.24e-02, grad_scale: 32.0 +2024-08-25 05:28:19,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=16480.0, ans=0.125 +2024-08-25 05:28:34,466 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.463e+02 1.917e+02 2.183e+02 2.770e+02 8.189e+02, threshold=4.366e+02, percent-clipped=5.0 +2024-08-25 05:28:38,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=16533.333333333332, ans=0.08466666666666667 +2024-08-25 05:28:55,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16640.0, ans=0.13360000000000002 +2024-08-25 05:29:14,185 INFO [train.py:1114] (1/4) Epoch 2, batch 650, loss[loss=0.3069, simple_loss=0.3377, pruned_loss=0.09826, ctc_loss=0.1991, over 19762.00 frames. ], tot_loss[loss=0.3424, simple_loss=0.3581, pruned_loss=0.1184, ctc_loss=0.2248, over 3715274.43 frames. ], batch size: 54, lr: 4.24e-02, grad_scale: 32.0 +2024-08-25 05:31:10,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=16746.666666666668, ans=0.07 +2024-08-25 05:31:10,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=16746.666666666668, ans=0.007228985507246377 +2024-08-25 05:31:27,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=16800.0, ans=0.125 +2024-08-25 05:31:38,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=16853.333333333332, ans=0.025 +2024-08-25 05:31:45,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=16906.666666666668, ans=0.025 +2024-08-25 05:31:56,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=16960.0, ans=10.0 +2024-08-25 05:32:06,451 INFO [train.py:1114] (1/4) Epoch 2, batch 700, loss[loss=0.3208, simple_loss=0.336, pruned_loss=0.1112, ctc_loss=0.2079, over 19719.00 frames. ], tot_loss[loss=0.342, simple_loss=0.358, pruned_loss=0.1182, ctc_loss=0.2242, over 3747259.36 frames. ], batch size: 51, lr: 4.23e-02, grad_scale: 32.0 +2024-08-25 05:32:06,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=17013.333333333332, ans=0.07 +2024-08-25 05:32:44,641 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.00 vs. limit=9.266666666666667 +2024-08-25 05:32:47,855 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.375e+02 1.759e+02 2.005e+02 2.359e+02 5.033e+02, threshold=4.011e+02, percent-clipped=2.0 +2024-08-25 05:32:53,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=17066.666666666668, ans=0.0 +2024-08-25 05:33:14,211 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.44 vs. limit=13.94 +2024-08-25 05:33:15,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=17173.333333333332, ans=0.125 +2024-08-25 05:33:28,064 INFO [train.py:1114] (1/4) Epoch 2, batch 750, loss[loss=0.3358, simple_loss=0.3555, pruned_loss=0.1155, ctc_loss=0.2129, over 19500.00 frames. ], tot_loss[loss=0.3422, simple_loss=0.358, pruned_loss=0.1184, ctc_loss=0.224, over 3774238.27 frames. ], batch size: 54, lr: 4.23e-02, grad_scale: 32.0 +2024-08-25 05:35:19,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=17280.0, ans=0.125 +2024-08-25 05:37:33,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=17493.333333333332, ans=0.2877333333333334 +2024-08-25 05:37:40,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=17546.666666666668, ans=0.0 +2024-08-25 05:37:40,883 INFO [train.py:1114] (1/4) Epoch 2, batch 800, loss[loss=0.3057, simple_loss=0.3337, pruned_loss=0.101, ctc_loss=0.1893, over 19415.00 frames. ], tot_loss[loss=0.3408, simple_loss=0.3572, pruned_loss=0.1177, ctc_loss=0.2225, over 3796416.54 frames. ], batch size: 48, lr: 4.22e-02, grad_scale: 32.0 +2024-08-25 05:37:43,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=17546.666666666668, ans=0.025 +2024-08-25 05:37:44,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=17546.666666666668, ans=0.125 +2024-08-25 05:37:47,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.53 vs. limit=20.66 +2024-08-25 05:38:06,533 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.454e+02 1.845e+02 2.130e+02 2.517e+02 4.310e+02, threshold=4.259e+02, percent-clipped=1.0 +2024-08-25 05:38:21,141 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.47 vs. limit=9.413333333333334 +2024-08-25 05:38:40,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=17760.0, ans=0.0070086956521739135 +2024-08-25 05:38:48,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=17813.333333333332, ans=0.125 +2024-08-25 05:38:48,680 INFO [train.py:1114] (1/4) Epoch 2, batch 850, loss[loss=0.3463, simple_loss=0.3701, pruned_loss=0.1165, ctc_loss=0.2238, over 19672.00 frames. ], tot_loss[loss=0.3388, simple_loss=0.3559, pruned_loss=0.1167, ctc_loss=0.2205, over 3814423.31 frames. ], batch size: 59, lr: 4.22e-02, grad_scale: 16.0 +2024-08-25 05:38:59,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=17813.333333333332, ans=0.125 +2024-08-25 05:39:09,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=17866.666666666668, ans=0.0 +2024-08-25 05:39:30,413 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.35 vs. limit=14.24 +2024-08-25 05:39:33,608 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.01 vs. limit=14.24 +2024-08-25 05:39:58,487 INFO [train.py:1114] (1/4) Epoch 2, batch 900, loss[loss=0.3158, simple_loss=0.3344, pruned_loss=0.1082, ctc_loss=0.2018, over 19803.00 frames. ], tot_loss[loss=0.3394, simple_loss=0.3563, pruned_loss=0.1171, ctc_loss=0.2209, over 3818215.99 frames. ], batch size: 49, lr: 4.21e-02, grad_scale: 8.0 +2024-08-25 05:40:09,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.23 vs. limit=14.280000000000001 +2024-08-25 05:40:19,555 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 1.852e+02 2.189e+02 2.703e+02 9.878e+02, threshold=4.378e+02, percent-clipped=3.0 +2024-08-25 05:40:49,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=18240.0, ans=0.125 +2024-08-25 05:40:49,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=18240.0, ans=0.11760000000000001 +2024-08-25 05:41:14,124 INFO [train.py:1114] (1/4) Epoch 2, batch 950, loss[loss=0.2776, simple_loss=0.3105, pruned_loss=0.08921, ctc_loss=0.1656, over 19517.00 frames. ], tot_loss[loss=0.3389, simple_loss=0.356, pruned_loss=0.1169, ctc_loss=0.2201, over 3819215.99 frames. ], batch size: 49, lr: 4.21e-02, grad_scale: 8.0 +2024-08-25 05:41:23,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=18346.666666666668, ans=0.125 +2024-08-25 05:41:24,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=18400.0, ans=0.125 +2024-08-25 05:41:41,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=18453.333333333332, ans=0.0 +2024-08-25 05:42:02,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18560.0, ans=0.1144 +2024-08-25 05:42:03,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=18560.0, ans=0.0 +2024-08-25 05:42:06,446 INFO [train.py:1114] (1/4) Epoch 2, batch 1000, loss[loss=0.3228, simple_loss=0.3443, pruned_loss=0.1091, ctc_loss=0.2074, over 19834.00 frames. ], tot_loss[loss=0.3398, simple_loss=0.3568, pruned_loss=0.1173, ctc_loss=0.2204, over 3814199.32 frames. ], batch size: 52, lr: 4.20e-02, grad_scale: 8.0 +2024-08-25 05:42:21,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=18613.333333333332, ans=0.0 +2024-08-25 05:42:38,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=18666.666666666668, ans=0.125 +2024-08-25 05:42:41,293 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.839e+02 2.030e+02 2.416e+02 3.488e+02, threshold=4.061e+02, percent-clipped=0.0 +2024-08-25 05:42:46,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=18720.0, ans=0.025 +2024-08-25 05:42:51,163 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.94 vs. limit=11.488 +2024-08-25 05:42:59,558 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.03 vs. limit=5.816 +2024-08-25 05:43:02,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=18773.333333333332, ans=0.00678840579710145 +2024-08-25 05:43:03,579 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.77 vs. limit=9.693333333333332 +2024-08-25 05:43:16,642 INFO [train.py:1114] (1/4) Epoch 2, batch 1050, loss[loss=0.3271, simple_loss=0.3547, pruned_loss=0.1091, ctc_loss=0.2035, over 19836.00 frames. ], tot_loss[loss=0.3373, simple_loss=0.3553, pruned_loss=0.1161, ctc_loss=0.2179, over 3821203.38 frames. ], batch size: 57, lr: 4.20e-02, grad_scale: 8.0 +2024-08-25 05:43:23,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=18880.0, ans=0.006765217391304348 +2024-08-25 05:43:31,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=18933.333333333332, ans=0.025 +2024-08-25 05:43:53,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=18986.666666666668, ans=0.125 +2024-08-25 05:44:00,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=19040.0, ans=0.48560000000000003 +2024-08-25 05:44:00,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=19040.0, ans=0.125 +2024-08-25 05:44:02,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=19040.0, ans=0.23360000000000003 +2024-08-25 05:44:04,899 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=3.68 vs. limit=14.64 +2024-08-25 05:44:18,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=19093.333333333332, ans=0.10906666666666667 +2024-08-25 05:44:23,180 INFO [train.py:1114] (1/4) Epoch 2, batch 1100, loss[loss=0.3232, simple_loss=0.3454, pruned_loss=0.1106, ctc_loss=0.1993, over 19593.00 frames. ], tot_loss[loss=0.3354, simple_loss=0.3543, pruned_loss=0.1151, ctc_loss=0.2157, over 3829216.55 frames. ], batch size: 52, lr: 4.19e-02, grad_scale: 8.0 +2024-08-25 05:44:39,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=19146.666666666668, ans=0.125 +2024-08-25 05:44:40,110 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=14.68 +2024-08-25 05:44:42,028 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=11.07 vs. limit=14.7 +2024-08-25 05:44:48,510 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.463e+02 1.777e+02 2.009e+02 2.448e+02 3.967e+02, threshold=4.019e+02, percent-clipped=0.0 +2024-08-25 05:44:57,789 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.80 vs. limit=14.719999999999999 +2024-08-25 05:45:05,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=19306.666666666668, ans=0.10693333333333332 +2024-08-25 05:45:31,303 INFO [train.py:1114] (1/4) Epoch 2, batch 1150, loss[loss=0.3094, simple_loss=0.3356, pruned_loss=0.1023, ctc_loss=0.1965, over 19580.00 frames. ], tot_loss[loss=0.3342, simple_loss=0.3535, pruned_loss=0.1145, ctc_loss=0.2147, over 3829591.84 frames. ], batch size: 52, lr: 4.19e-02, grad_scale: 8.0 +2024-08-25 05:45:33,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19413.333333333332, ans=0.10586666666666669 +2024-08-25 05:45:45,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=19466.666666666668, ans=0.00663768115942029 +2024-08-25 05:45:52,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=19520.0, ans=0.0 +2024-08-25 05:47:24,629 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.06 vs. limit=22.22 +2024-08-25 05:47:24,716 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.07 vs. limit=14.86 +2024-08-25 05:47:34,831 INFO [train.py:1114] (1/4) Epoch 2, batch 1200, loss[loss=0.3091, simple_loss=0.3536, pruned_loss=0.09606, ctc_loss=0.1811, over 19837.00 frames. ], tot_loss[loss=0.3345, simple_loss=0.354, pruned_loss=0.1145, ctc_loss=0.2149, over 3824515.07 frames. ], batch size: 57, lr: 4.18e-02, grad_scale: 16.0 +2024-08-25 05:47:44,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=19733.333333333332, ans=0.125 +2024-08-25 05:47:46,160 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.13 vs. limit=22.3 +2024-08-25 05:47:50,321 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.449e+02 1.798e+02 2.208e+02 2.852e+02 1.698e+03, threshold=4.415e+02, percent-clipped=3.0 +2024-08-25 05:48:04,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=19733.333333333332, ans=0.006579710144927537 +2024-08-25 05:48:20,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=19840.0, ans=0.0 +2024-08-25 05:48:21,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19840.0, ans=0.10160000000000002 +2024-08-25 05:48:40,349 INFO [train.py:1114] (1/4) Epoch 2, batch 1250, loss[loss=0.3444, simple_loss=0.3679, pruned_loss=0.1184, ctc_loss=0.2102, over 19534.00 frames. ], tot_loss[loss=0.3325, simple_loss=0.3532, pruned_loss=0.1133, ctc_loss=0.2129, over 3842435.77 frames. ], batch size: 61, lr: 4.17e-02, grad_scale: 16.0 +2024-08-25 05:48:43,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=19946.666666666668, ans=0.0 +2024-08-25 05:48:50,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=20000.0, ans=0.025 +2024-08-25 05:49:25,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=20106.666666666668, ans=0.125 +2024-08-25 05:49:29,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=20160.0, ans=0.125 +2024-08-25 05:49:31,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=20160.0, ans=0.125 +2024-08-25 05:49:32,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=20160.0, ans=0.125 +2024-08-25 05:49:36,921 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.55 vs. limit=10.0 +2024-08-25 05:49:37,279 INFO [train.py:1114] (1/4) Epoch 2, batch 1300, loss[loss=0.3401, simple_loss=0.3644, pruned_loss=0.115, ctc_loss=0.2146, over 18869.00 frames. ], tot_loss[loss=0.33, simple_loss=0.3519, pruned_loss=0.112, ctc_loss=0.2105, over 3846646.55 frames. ], batch size: 76, lr: 4.17e-02, grad_scale: 16.0 +2024-08-25 05:49:48,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=20266.666666666668, ans=0.125 +2024-08-25 05:49:52,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=20266.666666666668, ans=0.2 +2024-08-25 05:49:52,768 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.507e+02 1.771e+02 1.898e+02 2.175e+02 3.765e+02, threshold=3.796e+02, percent-clipped=0.0 +2024-08-25 05:50:20,710 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:50:25,312 INFO [train.py:1114] (1/4) Epoch 2, batch 1350, loss[loss=0.3114, simple_loss=0.3389, pruned_loss=0.1031, ctc_loss=0.1942, over 19775.00 frames. ], tot_loss[loss=0.3293, simple_loss=0.3516, pruned_loss=0.1116, ctc_loss=0.2095, over 3858167.57 frames. ], batch size: 54, lr: 4.16e-02, grad_scale: 16.0 +2024-08-25 05:50:42,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=20533.333333333332, ans=0.0 +2024-08-25 05:50:45,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=20533.333333333332, ans=0.025 +2024-08-25 05:50:47,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=20533.333333333332, ans=0.0 +2024-08-25 05:50:53,063 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.11 vs. limit=15.0 +2024-08-25 05:50:57,863 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.91 vs. limit=10.0 +2024-08-25 05:51:04,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=20640.0, ans=0.0 +2024-08-25 05:51:19,094 INFO [train.py:1114] (1/4) Epoch 2, batch 1400, loss[loss=0.2869, simple_loss=0.3116, pruned_loss=0.09494, ctc_loss=0.1809, over 19678.00 frames. ], tot_loss[loss=0.3276, simple_loss=0.3505, pruned_loss=0.1107, ctc_loss=0.208, over 3864535.33 frames. ], batch size: 46, lr: 4.16e-02, grad_scale: 16.0 +2024-08-25 05:51:23,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=20746.666666666668, ans=0.0 +2024-08-25 05:51:25,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=20746.666666666668, ans=0.006359420289855073 +2024-08-25 05:51:30,141 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.68 vs. limit=10.0 +2024-08-25 05:51:34,341 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.473e+02 1.933e+02 2.205e+02 2.519e+02 3.569e+02, threshold=4.410e+02, percent-clipped=0.0 +2024-08-25 05:51:40,686 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.12 vs. limit=15.0 +2024-08-25 05:51:48,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=20906.666666666668, ans=0.0 +2024-08-25 05:51:54,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20906.666666666668, ans=0.1 +2024-08-25 05:52:01,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=20960.0, ans=0.0 +2024-08-25 05:52:02,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=20960.0, ans=0.0 +2024-08-25 05:52:06,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=20960.0, ans=0.125 +2024-08-25 05:52:09,371 INFO [train.py:1114] (1/4) Epoch 2, batch 1450, loss[loss=0.3436, simple_loss=0.3679, pruned_loss=0.1164, ctc_loss=0.216, over 19723.00 frames. ], tot_loss[loss=0.3281, simple_loss=0.351, pruned_loss=0.111, ctc_loss=0.2082, over 3862719.65 frames. ], batch size: 63, lr: 4.15e-02, grad_scale: 16.0 +2024-08-25 05:52:33,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=21120.0, ans=10.0 +2024-08-25 05:52:43,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=21173.333333333332, ans=0.025 +2024-08-25 05:52:52,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=21226.666666666668, ans=0.2 +2024-08-25 05:52:57,019 INFO [train.py:1114] (1/4) Epoch 2, batch 1500, loss[loss=0.3221, simple_loss=0.3531, pruned_loss=0.1053, ctc_loss=0.2011, over 19583.00 frames. ], tot_loss[loss=0.3285, simple_loss=0.3515, pruned_loss=0.1111, ctc_loss=0.2084, over 3861872.94 frames. ], batch size: 57, lr: 4.15e-02, grad_scale: 16.0 +2024-08-25 05:53:13,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21333.333333333332, ans=0.1 +2024-08-25 05:53:17,230 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.397e+02 1.832e+02 2.087e+02 2.558e+02 5.212e+02, threshold=4.175e+02, percent-clipped=3.0 +2024-08-25 05:53:18,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=21333.333333333332, ans=0.1 +2024-08-25 05:53:33,633 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.27 vs. limit=12.0 +2024-08-25 05:53:36,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21440.0, ans=0.1 +2024-08-25 05:53:45,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=21493.333333333332, ans=0.006197101449275363 +2024-08-25 05:53:47,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=21493.333333333332, ans=0.09899494936611666 +2024-08-25 05:53:58,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=21493.333333333332, ans=0.006197101449275363 +2024-08-25 05:54:02,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=21493.333333333332, ans=0.125 +2024-08-25 05:54:03,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=21493.333333333332, ans=0.006197101449275363 +2024-08-25 05:54:05,952 INFO [train.py:1114] (1/4) Epoch 2, batch 1550, loss[loss=0.3891, simple_loss=0.3891, pruned_loss=0.1432, ctc_loss=0.2567, over 19584.00 frames. ], tot_loss[loss=0.3292, simple_loss=0.3517, pruned_loss=0.1116, ctc_loss=0.2091, over 3846711.90 frames. ], batch size: 60, lr: 4.14e-02, grad_scale: 16.0 +2024-08-25 05:54:06,608 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.06 vs. limit=15.0 +2024-08-25 05:54:18,745 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.99 vs. limit=15.0 +2024-08-25 05:54:21,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.27 vs. limit=15.0 +2024-08-25 05:54:32,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=21600.0, ans=0.05 +2024-08-25 05:54:35,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=21600.0, ans=0.125 +2024-08-25 05:54:37,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=21600.0, ans=0.125 +2024-08-25 05:54:47,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=21653.333333333332, ans=0.1 +2024-08-25 05:54:55,665 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.18 vs. limit=10.0 +2024-08-25 05:55:11,760 INFO [train.py:1114] (1/4) Epoch 2, batch 1600, loss[loss=0.3207, simple_loss=0.3563, pruned_loss=0.1028, ctc_loss=0.1985, over 19837.00 frames. ], tot_loss[loss=0.3285, simple_loss=0.351, pruned_loss=0.1113, ctc_loss=0.2088, over 3835638.61 frames. ], batch size: 57, lr: 4.13e-02, grad_scale: 32.0 +2024-08-25 05:55:13,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=21813.333333333332, ans=0.0 +2024-08-25 05:55:17,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=21813.333333333332, ans=0.2 +2024-08-25 05:55:27,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=21866.666666666668, ans=0.125 +2024-08-25 05:55:32,429 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.487e+02 1.812e+02 2.122e+02 2.604e+02 4.336e+02, threshold=4.244e+02, percent-clipped=2.0 +2024-08-25 05:55:41,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=21920.0, ans=0.125 +2024-08-25 05:55:41,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=21920.0, ans=0.125 +2024-08-25 05:56:05,196 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-08-25 05:56:09,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=22026.666666666668, ans=0.125 +2024-08-25 05:56:13,283 INFO [train.py:1114] (1/4) Epoch 2, batch 1650, loss[loss=0.348, simple_loss=0.3672, pruned_loss=0.1215, ctc_loss=0.2146, over 19641.00 frames. ], tot_loss[loss=0.327, simple_loss=0.3498, pruned_loss=0.1106, ctc_loss=0.2073, over 3833478.71 frames. ], batch size: 59, lr: 4.13e-02, grad_scale: 16.0 +2024-08-25 05:56:13,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=22080.0, ans=0.0 +2024-08-25 05:56:37,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=22186.666666666668, ans=0.0 +2024-08-25 05:56:42,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=22186.666666666668, ans=0.0 +2024-08-25 05:57:05,965 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.39 vs. limit=15.0 +2024-08-25 05:57:11,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=22293.333333333332, ans=0.0 +2024-08-25 05:57:12,900 INFO [train.py:1114] (1/4) Epoch 2, batch 1700, loss[loss=0.2901, simple_loss=0.3142, pruned_loss=0.09545, ctc_loss=0.1877, over 19679.00 frames. ], tot_loss[loss=0.3249, simple_loss=0.3489, pruned_loss=0.1094, ctc_loss=0.2053, over 3848170.98 frames. ], batch size: 46, lr: 4.12e-02, grad_scale: 16.0 +2024-08-25 05:57:29,334 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.387e+02 1.791e+02 2.005e+02 2.338e+02 3.555e+02, threshold=4.010e+02, percent-clipped=0.0 +2024-08-25 05:57:46,029 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.75 vs. limit=15.0 +2024-08-25 05:58:02,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=22506.666666666668, ans=0.125 +2024-08-25 05:58:27,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=22560.0, ans=0.2 +2024-08-25 05:58:34,072 INFO [train.py:1114] (1/4) Epoch 2, batch 1750, loss[loss=0.3049, simple_loss=0.3218, pruned_loss=0.1057, ctc_loss=0.1915, over 19643.00 frames. ], tot_loss[loss=0.3231, simple_loss=0.3476, pruned_loss=0.1086, ctc_loss=0.2037, over 3853458.26 frames. ], batch size: 45, lr: 4.12e-02, grad_scale: 16.0 +2024-08-25 05:58:35,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=22613.333333333332, ans=0.2 +2024-08-25 05:58:35,490 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.27 vs. limit=22.5 +2024-08-25 05:58:40,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=22613.333333333332, ans=0.0 +2024-08-25 05:58:43,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=22666.666666666668, ans=0.07 +2024-08-25 05:58:43,533 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.40 vs. limit=6.0 +2024-08-25 05:58:55,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=22720.0, ans=0.125 +2024-08-25 05:59:14,496 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.14 vs. limit=15.0 +2024-08-25 05:59:15,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=22826.666666666668, ans=0.125 +2024-08-25 05:59:19,959 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.85 vs. limit=22.5 +2024-08-25 05:59:24,711 INFO [train.py:1114] (1/4) Epoch 2, batch 1800, loss[loss=0.321, simple_loss=0.3529, pruned_loss=0.1045, ctc_loss=0.2002, over 19617.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.3479, pruned_loss=0.1085, ctc_loss=0.2038, over 3854744.74 frames. ], batch size: 55, lr: 4.11e-02, grad_scale: 16.0 +2024-08-25 05:59:26,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=22880.0, ans=0.2 +2024-08-25 05:59:26,666 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:59:39,815 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 1.812e+02 2.002e+02 2.312e+02 3.839e+02, threshold=4.004e+02, percent-clipped=0.0 +2024-08-25 06:00:12,424 INFO [train.py:1114] (1/4) Epoch 2, batch 1850, loss[loss=0.343, simple_loss=0.3622, pruned_loss=0.1179, ctc_loss=0.22, over 19579.00 frames. ], tot_loss[loss=0.3224, simple_loss=0.3475, pruned_loss=0.1081, ctc_loss=0.2028, over 3858462.73 frames. ], batch size: 57, lr: 4.11e-02, grad_scale: 16.0 +2024-08-25 06:00:12,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=23146.666666666668, ans=0.125 +2024-08-25 06:00:19,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=23146.666666666668, ans=10.0 +2024-08-25 06:00:28,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=23200.0, ans=0.2 +2024-08-25 06:00:54,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=23360.0, ans=0.125 +2024-08-25 06:00:59,781 INFO [train.py:1114] (1/4) Epoch 2, batch 1900, loss[loss=0.3291, simple_loss=0.3591, pruned_loss=0.107, ctc_loss=0.2127, over 19635.00 frames. ], tot_loss[loss=0.3223, simple_loss=0.3478, pruned_loss=0.108, ctc_loss=0.2025, over 3861789.55 frames. ], batch size: 59, lr: 4.10e-02, grad_scale: 16.0 +2024-08-25 06:01:02,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=23413.333333333332, ans=0.2 +2024-08-25 06:01:07,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=23413.333333333332, ans=0.95 +2024-08-25 06:01:13,172 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.58 vs. limit=12.0 +2024-08-25 06:01:18,897 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.426e+02 1.814e+02 2.067e+02 2.451e+02 4.716e+02, threshold=4.135e+02, percent-clipped=1.0 +2024-08-25 06:01:41,638 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.82 vs. limit=22.5 +2024-08-25 06:01:52,057 INFO [train.py:1114] (1/4) Epoch 2, batch 1950, loss[loss=0.3302, simple_loss=0.3527, pruned_loss=0.1116, ctc_loss=0.2114, over 19600.00 frames. ], tot_loss[loss=0.3221, simple_loss=0.3482, pruned_loss=0.1076, ctc_loss=0.202, over 3870452.80 frames. ], batch size: 52, lr: 4.09e-02, grad_scale: 16.0 +2024-08-25 06:02:03,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=23733.333333333332, ans=0.0 +2024-08-25 06:02:21,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff3.min_abs, batch_count=23840.0, ans=0.2 +2024-08-25 06:02:23,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=23840.0, ans=0.125 +2024-08-25 06:02:33,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23893.333333333332, ans=0.1 +2024-08-25 06:02:40,739 INFO [train.py:1114] (1/4) Epoch 2, batch 2000, loss[loss=0.2654, simple_loss=0.3013, pruned_loss=0.08289, ctc_loss=0.1593, over 19636.00 frames. ], tot_loss[loss=0.3229, simple_loss=0.3487, pruned_loss=0.1081, ctc_loss=0.2025, over 3854595.39 frames. ], batch size: 45, lr: 4.09e-02, grad_scale: 32.0 +2024-08-25 06:02:45,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=23946.666666666668, ans=0.125 +2024-08-25 06:02:48,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=23946.666666666668, ans=0.125 +2024-08-25 06:02:49,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=23946.666666666668, ans=0.125 +2024-08-25 06:02:52,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=24000.0, ans=0.125 +2024-08-25 06:02:57,876 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.475e+02 1.781e+02 1.996e+02 2.377e+02 5.355e+02, threshold=3.992e+02, percent-clipped=1.0 +2024-08-25 06:03:03,857 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.84 vs. limit=15.0 +2024-08-25 06:03:07,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=24053.333333333332, ans=0.2 +2024-08-25 06:03:29,341 INFO [train.py:1114] (1/4) Epoch 2, batch 2050, loss[loss=0.2712, simple_loss=0.306, pruned_loss=0.08526, ctc_loss=0.1644, over 19707.00 frames. ], tot_loss[loss=0.3213, simple_loss=0.3472, pruned_loss=0.1074, ctc_loss=0.2014, over 3851137.52 frames. ], batch size: 47, lr: 4.08e-02, grad_scale: 32.0 +2024-08-25 06:03:56,283 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.84 vs. limit=15.0 +2024-08-25 06:04:12,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.78 vs. limit=15.0 +2024-08-25 06:04:17,657 INFO [train.py:1114] (1/4) Epoch 2, batch 2100, loss[loss=0.3176, simple_loss=0.3475, pruned_loss=0.1047, ctc_loss=0.1959, over 19760.00 frames. ], tot_loss[loss=0.3196, simple_loss=0.3459, pruned_loss=0.1066, ctc_loss=0.2, over 3857910.48 frames. ], batch size: 54, lr: 4.08e-02, grad_scale: 32.0 +2024-08-25 06:04:22,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.63 vs. limit=15.0 +2024-08-25 06:04:23,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=24480.0, ans=10.0 +2024-08-25 06:04:33,041 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 1.823e+02 2.012e+02 2.259e+02 3.531e+02, threshold=4.024e+02, percent-clipped=0.0 +2024-08-25 06:04:41,162 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:04:43,395 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.81 vs. limit=15.0 +2024-08-25 06:04:56,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=24693.333333333332, ans=0.125 +2024-08-25 06:05:02,135 INFO [train.py:1114] (1/4) Epoch 2, batch 2150, loss[loss=0.3065, simple_loss=0.3351, pruned_loss=0.1022, ctc_loss=0.1837, over 19577.00 frames. ], tot_loss[loss=0.3179, simple_loss=0.3448, pruned_loss=0.1058, ctc_loss=0.1983, over 3868715.03 frames. ], batch size: 52, lr: 4.07e-02, grad_scale: 32.0 +2024-08-25 06:05:12,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=24746.666666666668, ans=0.2 +2024-08-25 06:05:19,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=24800.0, ans=0.5 +2024-08-25 06:05:33,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=24906.666666666668, ans=0.005455072463768116 +2024-08-25 06:05:36,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=24906.666666666668, ans=0.125 +2024-08-25 06:05:39,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=24906.666666666668, ans=0.09899494936611666 +2024-08-25 06:05:40,516 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:05:43,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=24906.666666666668, ans=0.125 +2024-08-25 06:05:44,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=24906.666666666668, ans=0.125 +2024-08-25 06:05:44,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=24960.0, ans=0.125 +2024-08-25 06:05:56,450 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.63 vs. limit=10.0 +2024-08-25 06:05:58,411 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-08-25 06:05:59,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=24960.0, ans=0.2 +2024-08-25 06:06:01,091 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.38 vs. limit=10.0 +2024-08-25 06:06:01,433 INFO [train.py:1114] (1/4) Epoch 2, batch 2200, loss[loss=0.3593, simple_loss=0.3804, pruned_loss=0.1234, ctc_loss=0.2285, over 19601.00 frames. ], tot_loss[loss=0.3188, simple_loss=0.3454, pruned_loss=0.1062, ctc_loss=0.1989, over 3867598.72 frames. ], batch size: 57, lr: 4.06e-02, grad_scale: 32.0 +2024-08-25 06:06:11,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=25013.333333333332, ans=0.125 +2024-08-25 06:06:25,288 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.516e+02 1.924e+02 2.286e+02 2.709e+02 6.222e+02, threshold=4.573e+02, percent-clipped=4.0 +2024-08-25 06:06:25,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=25066.666666666668, ans=0.125 +2024-08-25 06:06:34,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=25120.0, ans=0.125 +2024-08-25 06:06:36,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=25173.333333333332, ans=0.005397101449275363 +2024-08-25 06:06:52,524 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.73 vs. limit=15.0 +2024-08-25 06:06:54,567 INFO [train.py:1114] (1/4) Epoch 2, batch 2250, loss[loss=0.3134, simple_loss=0.3504, pruned_loss=0.09975, ctc_loss=0.1921, over 19604.00 frames. ], tot_loss[loss=0.3185, simple_loss=0.3454, pruned_loss=0.1061, ctc_loss=0.1985, over 3867623.39 frames. ], batch size: 55, lr: 4.06e-02, grad_scale: 32.0 +2024-08-25 06:06:56,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=25280.0, ans=0.125 +2024-08-25 06:06:59,369 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.92 vs. limit=15.0 +2024-08-25 06:07:03,846 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.35 vs. limit=12.0 +2024-08-25 06:07:07,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=25333.333333333332, ans=0.125 +2024-08-25 06:07:07,450 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.19 vs. limit=15.0 +2024-08-25 06:07:07,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=25333.333333333332, ans=0.125 +2024-08-25 06:07:25,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=25440.0, ans=0.0 +2024-08-25 06:07:34,666 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.88 vs. limit=22.5 +2024-08-25 06:07:41,081 INFO [train.py:1114] (1/4) Epoch 2, batch 2300, loss[loss=0.291, simple_loss=0.3237, pruned_loss=0.09445, ctc_loss=0.1736, over 19519.00 frames. ], tot_loss[loss=0.3176, simple_loss=0.3443, pruned_loss=0.1058, ctc_loss=0.198, over 3861882.93 frames. ], batch size: 49, lr: 4.05e-02, grad_scale: 32.0 +2024-08-25 06:07:55,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25600.0, ans=0.1 +2024-08-25 06:07:58,722 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.382e+02 1.775e+02 2.049e+02 2.504e+02 6.120e+02, threshold=4.097e+02, percent-clipped=1.0 +2024-08-25 06:08:05,239 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.70 vs. limit=15.0 +2024-08-25 06:08:11,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=25706.666666666668, ans=0.5 +2024-08-25 06:08:11,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25706.666666666668, ans=0.1 +2024-08-25 06:08:21,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=25760.0, ans=0.025 +2024-08-25 06:08:25,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=25760.0, ans=0.0 +2024-08-25 06:08:29,078 INFO [train.py:1114] (1/4) Epoch 2, batch 2350, loss[loss=0.3296, simple_loss=0.3598, pruned_loss=0.1087, ctc_loss=0.2051, over 19672.00 frames. ], tot_loss[loss=0.3181, simple_loss=0.3445, pruned_loss=0.1062, ctc_loss=0.1983, over 3864853.50 frames. ], batch size: 63, lr: 4.04e-02, grad_scale: 32.0 +2024-08-25 06:08:30,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=25813.333333333332, ans=0.125 +2024-08-25 06:08:33,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=25813.333333333332, ans=0.1 +2024-08-25 06:08:39,773 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.21 vs. limit=15.0 +2024-08-25 06:08:42,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=25866.666666666668, ans=0.0 +2024-08-25 06:08:51,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=25920.0, ans=0.2 +2024-08-25 06:09:05,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=25973.333333333332, ans=0.2 +2024-08-25 06:09:22,748 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=20.15 vs. limit=22.5 +2024-08-25 06:09:23,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=26026.666666666668, ans=0.125 +2024-08-25 06:09:26,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=26026.666666666668, ans=0.125 +2024-08-25 06:09:28,452 INFO [train.py:1114] (1/4) Epoch 2, batch 2400, loss[loss=0.3595, simple_loss=0.3727, pruned_loss=0.1264, ctc_loss=0.234, over 19187.00 frames. ], tot_loss[loss=0.3193, simple_loss=0.3462, pruned_loss=0.1065, ctc_loss=0.1988, over 3859043.96 frames. ], batch size: 71, lr: 4.04e-02, grad_scale: 32.0 +2024-08-25 06:09:37,832 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.99 vs. limit=10.0 +2024-08-25 06:09:41,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.35 vs. limit=12.0 +2024-08-25 06:09:43,437 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.503e+02 1.803e+02 2.129e+02 2.459e+02 5.388e+02, threshold=4.257e+02, percent-clipped=1.0 +2024-08-25 06:09:46,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26186.666666666668, ans=0.1 +2024-08-25 06:10:13,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=26293.333333333332, ans=0.2 +2024-08-25 06:10:14,670 INFO [train.py:1114] (1/4) Epoch 2, batch 2450, loss[loss=0.412, simple_loss=0.3875, pruned_loss=0.1588, ctc_loss=0.297, over 13465.00 frames. ], tot_loss[loss=0.33, simple_loss=0.3523, pruned_loss=0.112, ctc_loss=0.209, over 3732884.53 frames. ], batch size: 141, lr: 4.03e-02, grad_scale: 32.0 +2024-08-25 06:10:17,016 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.68 vs. limit=15.0 +2024-08-25 06:10:25,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=26400.0, ans=0.125 +2024-08-25 06:10:28,029 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.56 vs. limit=22.5 +2024-08-25 06:10:33,913 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.14 vs. limit=22.5 +2024-08-25 06:10:42,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=26453.333333333332, ans=0.005118840579710145 +2024-08-25 06:10:43,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=11.05 vs. limit=12.0 +2024-08-25 06:11:53,152 INFO [train.py:1114] (1/4) Epoch 3, batch 0, loss[loss=0.3058, simple_loss=0.3247, pruned_loss=0.1041, ctc_loss=0.1964, over 19823.00 frames. ], tot_loss[loss=0.3058, simple_loss=0.3247, pruned_loss=0.1041, ctc_loss=0.1964, over 19823.00 frames. ], batch size: 49, lr: 3.83e-02, grad_scale: 32.0 +2024-08-25 06:11:53,508 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-25 06:12:07,831 INFO [train.py:1146] (1/4) Epoch 3, validation: loss=0.2565, simple_loss=0.3309, pruned_loss=0.06653, ctc_loss=0.1228, over 944034.00 frames. +2024-08-25 06:12:07,832 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 13283MB +2024-08-25 06:12:09,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=26554.666666666668, ans=0.125 +2024-08-25 06:12:18,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=26554.666666666668, ans=0.125 +2024-08-25 06:13:57,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=26608.0, ans=0.125 +2024-08-25 06:15:44,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=26661.333333333332, ans=0.005073623188405798 +2024-08-25 06:16:00,091 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.589e+02 1.983e+02 2.286e+02 2.644e+02 3.774e+02, threshold=4.572e+02, percent-clipped=0.0 +2024-08-25 06:17:19,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=26714.666666666668, ans=0.025 +2024-08-25 06:17:19,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=26714.666666666668, ans=0.125 +2024-08-25 06:18:48,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=26768.0, ans=0.125 +2024-08-25 06:20:22,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=26768.0, ans=0.125 +2024-08-25 06:22:59,718 INFO [train.py:1114] (1/4) Epoch 3, batch 50, loss[loss=0.2482, simple_loss=0.2913, pruned_loss=0.07413, ctc_loss=0.1419, over 19667.00 frames. ], tot_loss[loss=0.3207, simple_loss=0.347, pruned_loss=0.1069, ctc_loss=0.2014, over 844347.37 frames. ], batch size: 47, lr: 3.82e-02, grad_scale: 16.0 +2024-08-25 06:23:30,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.97 vs. limit=15.0 +2024-08-25 06:23:38,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=26821.333333333332, ans=0.125 +2024-08-25 06:29:45,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=26874.666666666668, ans=0.2 +2024-08-25 06:31:29,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=26874.666666666668, ans=0.125 +2024-08-25 06:34:49,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=26874.666666666668, ans=0.2 +2024-08-25 06:40:51,612 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.12 vs. limit=15.0 +2024-08-25 06:44:24,665 INFO [train.py:1114] (1/4) Epoch 3, batch 100, loss[loss=0.2845, simple_loss=0.3255, pruned_loss=0.08814, ctc_loss=0.1681, over 19702.00 frames. ], tot_loss[loss=0.3216, simple_loss=0.3485, pruned_loss=0.107, ctc_loss=0.2016, over 1499014.27 frames. ], batch size: 51, lr: 3.82e-02, grad_scale: 16.0 +2024-08-25 06:46:48,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=27141.333333333332, ans=0.125 +2024-08-25 06:46:48,953 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.20 vs. limit=22.5 +2024-08-25 06:46:59,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=27141.333333333332, ans=0.125 +2024-08-25 06:47:46,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=27194.666666666668, ans=0.125 +2024-08-25 06:47:49,839 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.02 vs. limit=22.5 +2024-08-25 06:47:50,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=27194.666666666668, ans=0.2 +2024-08-25 06:48:13,340 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=4.53 vs. limit=15.0 +2024-08-25 06:48:15,480 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.410e+02 1.744e+02 2.032e+02 2.291e+02 1.205e+03, threshold=4.063e+02, percent-clipped=1.0 +2024-08-25 06:50:29,662 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.13 vs. limit=22.5 +2024-08-25 06:50:33,595 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.60 vs. limit=15.0 +2024-08-25 06:50:41,243 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-08-25 06:50:43,552 INFO [train.py:1114] (1/4) Epoch 3, batch 150, loss[loss=0.3103, simple_loss=0.3288, pruned_loss=0.1072, ctc_loss=0.1936, over 19725.00 frames. ], tot_loss[loss=0.3173, simple_loss=0.3453, pruned_loss=0.1051, ctc_loss=0.1977, over 2028246.68 frames. ], batch size: 47, lr: 3.81e-02, grad_scale: 16.0 +2024-08-25 06:51:34,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=27354.666666666668, ans=0.125 +2024-08-25 06:51:51,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=27354.666666666668, ans=0.125 +2024-08-25 06:53:48,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=27514.666666666668, ans=0.004888115942028985 +2024-08-25 06:54:02,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=27514.666666666668, ans=0.125 +2024-08-25 06:54:15,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=27568.0, ans=0.125 +2024-08-25 06:54:31,940 INFO [train.py:1114] (1/4) Epoch 3, batch 200, loss[loss=0.361, simple_loss=0.3786, pruned_loss=0.1255, ctc_loss=0.2311, over 18329.00 frames. ], tot_loss[loss=0.3136, simple_loss=0.3424, pruned_loss=0.1035, ctc_loss=0.1945, over 2436541.20 frames. ], batch size: 85, lr: 3.80e-02, grad_scale: 16.0 +2024-08-25 06:54:49,884 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.20 vs. limit=6.0 +2024-08-25 06:56:00,392 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.731e+02 1.977e+02 2.205e+02 3.305e+02, threshold=3.953e+02, percent-clipped=0.0 +2024-08-25 06:56:10,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=27781.333333333332, ans=0.125 +2024-08-25 06:56:34,804 INFO [train.py:1114] (1/4) Epoch 3, batch 250, loss[loss=0.3446, simple_loss=0.3719, pruned_loss=0.1159, ctc_loss=0.2138, over 19373.00 frames. ], tot_loss[loss=0.3117, simple_loss=0.3414, pruned_loss=0.1025, ctc_loss=0.1924, over 2755941.42 frames. ], batch size: 67, lr: 3.80e-02, grad_scale: 16.0 +2024-08-25 06:56:44,853 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.62 vs. limit=15.0 +2024-08-25 06:57:29,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=27941.333333333332, ans=0.125 +2024-08-25 06:57:30,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=27941.333333333332, ans=0.0 +2024-08-25 07:00:43,499 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.14 vs. limit=22.5 +2024-08-25 07:02:40,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=28048.0, ans=0.004772173913043478 +2024-08-25 07:03:05,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=28101.333333333332, ans=0.125 +2024-08-25 07:03:23,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=28101.333333333332, ans=0.0 +2024-08-25 07:03:29,188 INFO [train.py:1114] (1/4) Epoch 3, batch 300, loss[loss=0.3242, simple_loss=0.3482, pruned_loss=0.1103, ctc_loss=0.1989, over 19525.00 frames. ], tot_loss[loss=0.3106, simple_loss=0.3406, pruned_loss=0.102, ctc_loss=0.1912, over 3000471.32 frames. ], batch size: 61, lr: 3.79e-02, grad_scale: 16.0 +2024-08-25 07:03:33,887 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.25 vs. limit=8.0 +2024-08-25 07:03:53,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=28154.666666666668, ans=0.125 +2024-08-25 07:04:32,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=28261.333333333332, ans=0.125 +2024-08-25 07:04:33,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=28261.333333333332, ans=0.125 +2024-08-25 07:04:35,015 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.53 vs. limit=22.5 +2024-08-25 07:04:40,405 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.83 vs. limit=15.0 +2024-08-25 07:04:43,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=28261.333333333332, ans=0.125 +2024-08-25 07:04:44,395 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.724e+02 1.968e+02 2.265e+02 3.417e+02, threshold=3.936e+02, percent-clipped=0.0 +2024-08-25 07:05:03,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=28314.666666666668, ans=0.125 +2024-08-25 07:05:03,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=28314.666666666668, ans=0.125 +2024-08-25 07:05:03,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=28314.666666666668, ans=0.04949747468305833 +2024-08-25 07:05:49,910 INFO [train.py:1114] (1/4) Epoch 3, batch 350, loss[loss=0.2857, simple_loss=0.3188, pruned_loss=0.09105, ctc_loss=0.1762, over 19763.00 frames. ], tot_loss[loss=0.3102, simple_loss=0.3403, pruned_loss=0.1018, ctc_loss=0.191, over 3190566.06 frames. ], batch size: 48, lr: 3.79e-02, grad_scale: 16.0 +2024-08-25 07:05:56,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=28421.333333333332, ans=0.0 +2024-08-25 07:07:33,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=28528.0, ans=0.004667826086956522 +2024-08-25 07:07:40,983 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.60 vs. limit=15.0 +2024-08-25 07:07:45,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=28581.333333333332, ans=0.004656231884057971 +2024-08-25 07:07:52,983 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.65 vs. limit=22.5 +2024-08-25 07:07:53,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28634.666666666668, ans=0.1 +2024-08-25 07:08:01,163 INFO [train.py:1114] (1/4) Epoch 3, batch 400, loss[loss=0.3057, simple_loss=0.3412, pruned_loss=0.09683, ctc_loss=0.1917, over 19530.00 frames. ], tot_loss[loss=0.3088, simple_loss=0.3396, pruned_loss=0.1011, ctc_loss=0.1897, over 3342662.72 frames. ], batch size: 54, lr: 3.78e-02, grad_scale: 32.0 +2024-08-25 07:08:21,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=28741.333333333332, ans=0.025 +2024-08-25 07:08:24,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=28794.666666666668, ans=0.0 +2024-08-25 07:08:29,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=28794.666666666668, ans=0.125 +2024-08-25 07:08:42,254 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 1.762e+02 1.982e+02 2.336e+02 5.420e+02, threshold=3.963e+02, percent-clipped=2.0 +2024-08-25 07:08:56,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=28901.333333333332, ans=0.025 +2024-08-25 07:08:57,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=28901.333333333332, ans=0.025 +2024-08-25 07:08:59,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.93 vs. limit=12.0 +2024-08-25 07:09:04,147 INFO [train.py:1114] (1/4) Epoch 3, batch 450, loss[loss=0.313, simple_loss=0.3449, pruned_loss=0.1027, ctc_loss=0.1891, over 19611.00 frames. ], tot_loss[loss=0.3088, simple_loss=0.3396, pruned_loss=0.1011, ctc_loss=0.1894, over 3450195.22 frames. ], batch size: 55, lr: 3.77e-02, grad_scale: 32.0 +2024-08-25 07:09:09,498 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.29 vs. limit=15.0 +2024-08-25 07:09:15,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=28954.666666666668, ans=0.2 +2024-08-25 07:09:23,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=29008.0, ans=0.125 +2024-08-25 07:09:37,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=29114.666666666668, ans=0.125 +2024-08-25 07:09:39,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=29114.666666666668, ans=0.0 +2024-08-25 07:09:56,832 INFO [train.py:1114] (1/4) Epoch 3, batch 500, loss[loss=0.318, simple_loss=0.3548, pruned_loss=0.1019, ctc_loss=0.1936, over 19675.00 frames. ], tot_loss[loss=0.3075, simple_loss=0.3388, pruned_loss=0.1004, ctc_loss=0.1882, over 3545219.68 frames. ], batch size: 63, lr: 3.77e-02, grad_scale: 32.0 +2024-08-25 07:10:43,384 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 1.753e+02 1.966e+02 2.327e+02 4.047e+02, threshold=3.932e+02, percent-clipped=2.0 +2024-08-25 07:10:46,756 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.06 vs. limit=15.0 +2024-08-25 07:11:02,989 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.23 vs. limit=22.5 +2024-08-25 07:11:10,801 INFO [train.py:1114] (1/4) Epoch 3, batch 550, loss[loss=0.3332, simple_loss=0.3587, pruned_loss=0.1116, ctc_loss=0.2114, over 19305.00 frames. ], tot_loss[loss=0.3083, simple_loss=0.3394, pruned_loss=0.1008, ctc_loss=0.1887, over 3607695.08 frames. ], batch size: 71, lr: 3.76e-02, grad_scale: 16.0 +2024-08-25 07:11:41,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=29488.0, ans=0.125 +2024-08-25 07:11:51,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=29541.333333333332, ans=0.125 +2024-08-25 07:11:51,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=29541.333333333332, ans=0.125 +2024-08-25 07:12:46,937 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.68 vs. limit=15.0 +2024-08-25 07:12:53,131 INFO [train.py:1114] (1/4) Epoch 3, batch 600, loss[loss=0.3215, simple_loss=0.3518, pruned_loss=0.1072, ctc_loss=0.192, over 19375.00 frames. ], tot_loss[loss=0.3072, simple_loss=0.3388, pruned_loss=0.1002, ctc_loss=0.1877, over 3665416.30 frames. ], batch size: 67, lr: 3.76e-02, grad_scale: 16.0 +2024-08-25 07:12:59,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29754.666666666668, ans=0.1 +2024-08-25 07:13:37,725 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.450e+02 1.812e+02 2.009e+02 2.360e+02 5.731e+02, threshold=4.017e+02, percent-clipped=3.0 +2024-08-25 07:13:42,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29914.666666666668, ans=0.1 +2024-08-25 07:13:59,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29968.0, ans=0.1 +2024-08-25 07:14:02,707 INFO [train.py:1114] (1/4) Epoch 3, batch 650, loss[loss=0.2962, simple_loss=0.3363, pruned_loss=0.0931, ctc_loss=0.1746, over 19774.00 frames. ], tot_loss[loss=0.3054, simple_loss=0.3375, pruned_loss=0.09941, ctc_loss=0.1862, over 3716429.58 frames. ], batch size: 54, lr: 3.75e-02, grad_scale: 16.0 +2024-08-25 07:14:11,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=30021.333333333332, ans=0.0 +2024-08-25 07:14:16,713 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.05 vs. limit=15.0 +2024-08-25 07:14:19,409 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.68 vs. limit=15.0 +2024-08-25 07:14:20,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=30074.666666666668, ans=0.004331594202898551 +2024-08-25 07:14:23,212 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.08 vs. limit=6.0 +2024-08-25 07:14:33,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=30128.0, ans=0.125 +2024-08-25 07:14:35,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=30181.333333333332, ans=0.125 +2024-08-25 07:14:44,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=30234.666666666668, ans=0.0 +2024-08-25 07:14:55,118 INFO [train.py:1114] (1/4) Epoch 3, batch 700, loss[loss=0.3249, simple_loss=0.3412, pruned_loss=0.1139, ctc_loss=0.2018, over 19739.00 frames. ], tot_loss[loss=0.3066, simple_loss=0.3385, pruned_loss=0.09992, ctc_loss=0.1871, over 3749289.09 frames. ], batch size: 51, lr: 3.74e-02, grad_scale: 16.0 +2024-08-25 07:15:02,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=30288.0, ans=0.1 +2024-08-25 07:15:03,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=30288.0, ans=0.05 +2024-08-25 07:15:22,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=30394.666666666668, ans=0.1 +2024-08-25 07:15:28,448 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.846e+02 1.998e+02 2.505e+02 9.071e+02, threshold=3.995e+02, percent-clipped=5.0 +2024-08-25 07:15:34,802 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.23 vs. limit=10.0 +2024-08-25 07:15:38,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=30448.0, ans=0.0 +2024-08-25 07:15:42,400 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 07:15:47,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30501.333333333332, ans=0.1 +2024-08-25 07:15:47,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=30501.333333333332, ans=0.125 +2024-08-25 07:15:56,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=30554.666666666668, ans=0.0 +2024-08-25 07:15:58,573 INFO [train.py:1114] (1/4) Epoch 3, batch 750, loss[loss=0.2859, simple_loss=0.3354, pruned_loss=0.08464, ctc_loss=0.1681, over 19487.00 frames. ], tot_loss[loss=0.3057, simple_loss=0.3376, pruned_loss=0.09965, ctc_loss=0.1865, over 3775271.28 frames. ], batch size: 54, lr: 3.74e-02, grad_scale: 16.0 +2024-08-25 07:16:06,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=30554.666666666668, ans=0.125 +2024-08-25 07:16:26,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=30608.0, ans=0.125 +2024-08-25 07:16:51,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=30714.666666666668, ans=0.004192463768115941 +2024-08-25 07:34:42,534 INFO [train.py:1114] (1/4) Epoch 3, batch 800, loss[loss=0.3075, simple_loss=0.329, pruned_loss=0.1044, ctc_loss=0.193, over 19807.00 frames. ], tot_loss[loss=0.3052, simple_loss=0.3371, pruned_loss=0.09947, ctc_loss=0.1862, over 3796174.33 frames. ], batch size: 49, lr: 3.73e-02, grad_scale: 32.0 +2024-08-25 07:39:39,828 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.35 vs. limit=15.0 +2024-08-25 07:39:50,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=30821.333333333332, ans=0.2 +2024-08-25 08:02:40,829 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.303e+02 1.761e+02 1.928e+02 2.233e+02 3.899e+02, threshold=3.856e+02, percent-clipped=0.0 +2024-08-25 08:12:59,519 INFO [train.py:1114] (1/4) Epoch 3, batch 850, loss[loss=0.3026, simple_loss=0.3424, pruned_loss=0.09531, ctc_loss=0.1803, over 19655.00 frames. ], tot_loss[loss=0.3039, simple_loss=0.3359, pruned_loss=0.09888, ctc_loss=0.1852, over 3815868.27 frames. ], batch size: 59, lr: 3.73e-02, grad_scale: 32.0 +2024-08-25 08:23:27,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=31141.333333333332, ans=0.0 +2024-08-25 08:42:44,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=31301.333333333332, ans=0.125 +2024-08-25 08:44:41,569 INFO [train.py:1114] (1/4) Epoch 3, batch 900, loss[loss=0.249, simple_loss=0.2939, pruned_loss=0.07432, ctc_loss=0.1385, over 19422.00 frames. ], tot_loss[loss=0.3047, simple_loss=0.3365, pruned_loss=0.09926, ctc_loss=0.1859, over 3819560.69 frames. ], batch size: 48, lr: 3.72e-02, grad_scale: 32.0 +2024-08-25 08:47:09,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31354.666666666668, ans=0.1 +2024-08-25 08:48:49,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=31408.0, ans=0.035 +2024-08-25 08:51:48,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31461.333333333332, ans=0.1 +2024-08-25 08:51:48,537 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.03 vs. limit=15.0 +2024-08-25 08:52:10,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=31461.333333333332, ans=0.2 +2024-08-25 08:57:54,734 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.390e+02 1.748e+02 1.945e+02 2.250e+02 3.446e+02, threshold=3.889e+02, percent-clipped=0.0 +2024-08-25 08:57:55,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=31514.666666666668, ans=0.004018550724637681 +2024-08-25 09:00:10,866 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.84 vs. limit=22.5 +2024-08-25 09:02:23,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=31568.0, ans=0.125 +2024-08-25 09:03:26,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=31568.0, ans=0.0 +2024-08-25 09:05:01,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=31568.0, ans=0.125 +2024-08-25 09:05:03,363 INFO [train.py:1114] (1/4) Epoch 3, batch 950, loss[loss=0.2791, simple_loss=0.3135, pruned_loss=0.08868, ctc_loss=0.1683, over 19491.00 frames. ], tot_loss[loss=0.3051, simple_loss=0.3367, pruned_loss=0.09949, ctc_loss=0.1865, over 3821438.83 frames. ], batch size: 49, lr: 3.71e-02, grad_scale: 32.0 +2024-08-25 09:16:38,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=31781.333333333332, ans=0.125 +2024-08-25 09:23:03,904 INFO [train.py:1114] (1/4) Epoch 3, batch 1000, loss[loss=0.2785, simple_loss=0.3204, pruned_loss=0.0853, ctc_loss=0.1652, over 19850.00 frames. ], tot_loss[loss=0.307, simple_loss=0.3383, pruned_loss=0.1003, ctc_loss=0.1875, over 3817613.11 frames. ], batch size: 52, lr: 3.71e-02, grad_scale: 16.0 +2024-08-25 09:29:07,851 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.398e+02 1.873e+02 2.237e+02 2.628e+02 7.664e+02, threshold=4.475e+02, percent-clipped=6.0 +2024-08-25 09:29:25,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32048.0, ans=0.1 +2024-08-25 09:29:25,211 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.86 vs. limit=15.0 +2024-08-25 09:30:17,283 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.14 vs. limit=22.5 +2024-08-25 09:32:27,624 INFO [train.py:1114] (1/4) Epoch 3, batch 1050, loss[loss=0.2998, simple_loss=0.3344, pruned_loss=0.09626, ctc_loss=0.1817, over 19827.00 frames. ], tot_loss[loss=0.3052, simple_loss=0.3369, pruned_loss=0.0995, ctc_loss=0.1861, over 3824558.96 frames. ], batch size: 57, lr: 3.70e-02, grad_scale: 16.0 +2024-08-25 09:32:31,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=32154.666666666668, ans=0.125 +2024-08-25 09:33:39,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=32208.0, ans=0.07 +2024-08-25 09:41:10,308 INFO [train.py:1114] (1/4) Epoch 3, batch 1100, loss[loss=0.3072, simple_loss=0.3371, pruned_loss=0.102, ctc_loss=0.1836, over 19577.00 frames. ], tot_loss[loss=0.3044, simple_loss=0.3366, pruned_loss=0.09907, ctc_loss=0.1854, over 3831467.53 frames. ], batch size: 52, lr: 3.70e-02, grad_scale: 16.0 +2024-08-25 09:41:42,415 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.66 vs. limit=6.0 +2024-08-25 09:41:52,770 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.79 vs. limit=22.5 +2024-08-25 09:42:46,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=32528.0, ans=0.0 +2024-08-25 09:43:23,046 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.681e+02 1.943e+02 2.357e+02 4.515e+02, threshold=3.887e+02, percent-clipped=1.0 +2024-08-25 09:45:14,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=14.86 vs. limit=15.0 +2024-08-25 09:45:15,742 INFO [train.py:1114] (1/4) Epoch 3, batch 1150, loss[loss=0.2932, simple_loss=0.3346, pruned_loss=0.09042, ctc_loss=0.1772, over 19585.00 frames. ], tot_loss[loss=0.3053, simple_loss=0.3372, pruned_loss=0.09953, ctc_loss=0.1861, over 3831042.27 frames. ], batch size: 52, lr: 3.69e-02, grad_scale: 16.0 +2024-08-25 09:53:24,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=32741.333333333332, ans=0.125 +2024-08-25 09:54:01,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=32794.666666666664, ans=0.0 +2024-08-25 09:55:14,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=32901.333333333336, ans=0.07 +2024-08-25 09:55:29,633 INFO [train.py:1114] (1/4) Epoch 3, batch 1200, loss[loss=0.3252, simple_loss=0.359, pruned_loss=0.1073, ctc_loss=0.1917, over 19842.00 frames. ], tot_loss[loss=0.3064, simple_loss=0.3385, pruned_loss=0.09978, ctc_loss=0.1867, over 3825539.56 frames. ], batch size: 57, lr: 3.68e-02, grad_scale: 32.0 +2024-08-25 09:55:39,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=32954.666666666664, ans=0.2 +2024-08-25 09:56:31,125 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.454e+02 1.705e+02 1.941e+02 2.201e+02 4.168e+02, threshold=3.882e+02, percent-clipped=1.0 +2024-08-25 09:56:31,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=33114.666666666664, ans=0.2 +2024-08-25 09:57:41,523 INFO [train.py:1114] (1/4) Epoch 3, batch 1250, loss[loss=0.3266, simple_loss=0.3593, pruned_loss=0.1072, ctc_loss=0.1992, over 19541.00 frames. ], tot_loss[loss=0.3064, simple_loss=0.3389, pruned_loss=0.09973, ctc_loss=0.1863, over 3843396.04 frames. ], batch size: 61, lr: 3.68e-02, grad_scale: 32.0 +2024-08-25 09:58:04,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=33274.666666666664, ans=0.0 +2024-08-25 09:58:48,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=33434.666666666664, ans=0.07 +2024-08-25 09:59:01,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=33434.666666666664, ans=0.125 +2024-08-25 09:59:01,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=33434.666666666664, ans=0.125 +2024-08-25 09:59:04,140 INFO [train.py:1114] (1/4) Epoch 3, batch 1300, loss[loss=0.2947, simple_loss=0.3391, pruned_loss=0.09068, ctc_loss=0.1724, over 18793.00 frames. ], tot_loss[loss=0.3045, simple_loss=0.3372, pruned_loss=0.09893, ctc_loss=0.1847, over 3847174.36 frames. ], batch size: 76, lr: 3.67e-02, grad_scale: 32.0 +2024-08-25 09:59:31,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=33541.333333333336, ans=0.125 +2024-08-25 09:59:48,218 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.674e+02 1.887e+02 2.172e+02 3.368e+02, threshold=3.774e+02, percent-clipped=0.0 +2024-08-25 09:59:58,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=33648.0, ans=0.125 +2024-08-25 10:00:13,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=33701.333333333336, ans=0.025 +2024-08-25 10:00:14,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=33701.333333333336, ans=0.2 +2024-08-25 10:00:20,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=33701.333333333336, ans=0.125 +2024-08-25 10:00:22,480 INFO [train.py:1114] (1/4) Epoch 3, batch 1350, loss[loss=0.3168, simple_loss=0.3513, pruned_loss=0.1035, ctc_loss=0.1883, over 19771.00 frames. ], tot_loss[loss=0.3026, simple_loss=0.3361, pruned_loss=0.09794, ctc_loss=0.1831, over 3856257.20 frames. ], batch size: 54, lr: 3.67e-02, grad_scale: 32.0 +2024-08-25 10:00:33,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=33754.666666666664, ans=0.0 +2024-08-25 10:00:52,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=33808.0, ans=0.125 +2024-08-25 10:00:52,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=33808.0, ans=0.125 +2024-08-25 10:00:58,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=33808.0, ans=0.1 +2024-08-25 10:01:32,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=33914.666666666664, ans=0.04949747468305833 +2024-08-25 10:01:46,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=33914.666666666664, ans=0.125 +2024-08-25 10:02:01,542 INFO [train.py:1114] (1/4) Epoch 3, batch 1400, loss[loss=0.2894, simple_loss=0.3166, pruned_loss=0.09536, ctc_loss=0.1788, over 19699.00 frames. ], tot_loss[loss=0.3022, simple_loss=0.3355, pruned_loss=0.09791, ctc_loss=0.1827, over 3864232.26 frames. ], batch size: 46, lr: 3.66e-02, grad_scale: 32.0 +2024-08-25 10:02:20,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=34074.666666666664, ans=0.2 +2024-08-25 10:02:25,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=34074.666666666664, ans=0.025 +2024-08-25 10:02:33,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.84 vs. limit=15.0 +2024-08-25 10:02:45,304 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.503e+02 1.896e+02 2.159e+02 2.528e+02 3.857e+02, threshold=4.318e+02, percent-clipped=1.0 +2024-08-25 10:02:54,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=34181.333333333336, ans=0.0 +2024-08-25 10:03:06,782 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.22 vs. limit=22.5 +2024-08-25 10:03:07,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=34234.666666666664, ans=0.2 +2024-08-25 10:03:12,625 INFO [train.py:1114] (1/4) Epoch 3, batch 1450, loss[loss=0.3199, simple_loss=0.3524, pruned_loss=0.1047, ctc_loss=0.1953, over 19654.00 frames. ], tot_loss[loss=0.303, simple_loss=0.3363, pruned_loss=0.09816, ctc_loss=0.1832, over 3862401.94 frames. ], batch size: 63, lr: 3.65e-02, grad_scale: 32.0 +2024-08-25 10:03:30,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=34341.333333333336, ans=0.2 +2024-08-25 10:04:19,346 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.42 vs. limit=15.0 +2024-08-25 10:04:21,623 INFO [train.py:1114] (1/4) Epoch 3, batch 1500, loss[loss=0.334, simple_loss=0.3629, pruned_loss=0.1115, ctc_loss=0.2053, over 19575.00 frames. ], tot_loss[loss=0.3029, simple_loss=0.3366, pruned_loss=0.09802, ctc_loss=0.183, over 3862253.01 frames. ], batch size: 57, lr: 3.65e-02, grad_scale: 32.0 +2024-08-25 10:04:25,049 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.09 vs. limit=15.0 +2024-08-25 10:04:28,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=34554.666666666664, ans=0.0033576811594202907 +2024-08-25 10:04:36,570 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.11 vs. limit=15.0 +2024-08-25 10:04:52,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=34661.333333333336, ans=0.0 +2024-08-25 10:04:55,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=34714.666666666664, ans=0.125 +2024-08-25 10:05:09,913 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 1.778e+02 1.971e+02 2.353e+02 5.678e+02, threshold=3.941e+02, percent-clipped=1.0 +2024-08-25 10:05:10,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34714.666666666664, ans=0.1 +2024-08-25 10:05:11,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=34714.666666666664, ans=0.125 +2024-08-25 10:05:11,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34714.666666666664, ans=0.1 +2024-08-25 10:05:29,598 INFO [train.py:1114] (1/4) Epoch 3, batch 1550, loss[loss=0.3476, simple_loss=0.3666, pruned_loss=0.1208, ctc_loss=0.2174, over 19587.00 frames. ], tot_loss[loss=0.3035, simple_loss=0.3368, pruned_loss=0.09841, ctc_loss=0.1836, over 3847577.24 frames. ], batch size: 60, lr: 3.64e-02, grad_scale: 32.0 +2024-08-25 10:05:37,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=34821.333333333336, ans=0.125 +2024-08-25 10:05:37,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=34821.333333333336, ans=0.125 +2024-08-25 10:06:03,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34981.333333333336, ans=0.1 +2024-08-25 10:06:08,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34981.333333333336, ans=0.1 +2024-08-25 10:06:10,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=34981.333333333336, ans=0.125 +2024-08-25 10:06:22,169 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.38 vs. limit=15.0 +2024-08-25 10:06:42,357 INFO [train.py:1114] (1/4) Epoch 3, batch 1600, loss[loss=0.3407, simple_loss=0.3688, pruned_loss=0.1143, ctc_loss=0.2102, over 19839.00 frames. ], tot_loss[loss=0.304, simple_loss=0.3367, pruned_loss=0.09871, ctc_loss=0.1844, over 3835820.56 frames. ], batch size: 57, lr: 3.64e-02, grad_scale: 32.0 +2024-08-25 10:07:05,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=35141.333333333336, ans=0.0032301449275362317 +2024-08-25 10:07:26,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=35194.666666666664, ans=0.025 +2024-08-25 10:07:47,933 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.483e+02 1.752e+02 2.032e+02 2.338e+02 4.104e+02, threshold=4.064e+02, percent-clipped=1.0 +2024-08-25 10:07:55,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=35301.333333333336, ans=0.125 +2024-08-25 10:08:06,817 INFO [train.py:1114] (1/4) Epoch 3, batch 1650, loss[loss=0.3008, simple_loss=0.3383, pruned_loss=0.09706, ctc_loss=0.1733, over 19638.00 frames. ], tot_loss[loss=0.303, simple_loss=0.336, pruned_loss=0.0983, ctc_loss=0.1836, over 3831935.54 frames. ], batch size: 59, lr: 3.63e-02, grad_scale: 32.0 +2024-08-25 10:08:08,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=35354.666666666664, ans=0.0031837681159420303 +2024-08-25 10:08:12,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=35354.666666666664, ans=15.0 +2024-08-25 10:08:14,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=35354.666666666664, ans=0.125 +2024-08-25 10:08:19,628 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.22 vs. limit=6.0 +2024-08-25 10:08:22,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=35408.0, ans=0.125 +2024-08-25 10:08:46,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=35514.666666666664, ans=0.0 +2024-08-25 10:08:54,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=35514.666666666664, ans=0.5 +2024-08-25 10:08:55,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=35568.0, ans=0.0 +2024-08-25 10:09:00,404 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:09:00,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=35568.0, ans=0.125 +2024-08-25 10:09:04,855 INFO [train.py:1114] (1/4) Epoch 3, batch 1700, loss[loss=0.2859, simple_loss=0.3133, pruned_loss=0.09377, ctc_loss=0.1775, over 19649.00 frames. ], tot_loss[loss=0.3013, simple_loss=0.3352, pruned_loss=0.09733, ctc_loss=0.182, over 3846531.43 frames. ], batch size: 46, lr: 3.62e-02, grad_scale: 32.0 +2024-08-25 10:09:15,284 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.44 vs. limit=22.5 +2024-08-25 10:09:21,142 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.31 vs. limit=15.0 +2024-08-25 10:09:22,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=35728.0, ans=0.125 +2024-08-25 10:09:52,817 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.458e+02 1.835e+02 2.022e+02 2.484e+02 3.793e+02, threshold=4.043e+02, percent-clipped=0.0 +2024-08-25 10:10:09,478 INFO [train.py:1114] (1/4) Epoch 3, batch 1750, loss[loss=0.2663, simple_loss=0.3009, pruned_loss=0.08336, ctc_loss=0.1623, over 19681.00 frames. ], tot_loss[loss=0.3, simple_loss=0.3337, pruned_loss=0.09691, ctc_loss=0.1809, over 3851205.57 frames. ], batch size: 45, lr: 3.62e-02, grad_scale: 32.0 +2024-08-25 10:10:28,682 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=8.55 vs. limit=12.0 +2024-08-25 10:10:37,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=35994.666666666664, ans=0.125 +2024-08-25 10:11:02,520 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.87 vs. limit=10.0 +2024-08-25 10:11:20,661 INFO [train.py:1114] (1/4) Epoch 3, batch 1800, loss[loss=0.2628, simple_loss=0.3119, pruned_loss=0.07763, ctc_loss=0.1459, over 19609.00 frames. ], tot_loss[loss=0.3003, simple_loss=0.3341, pruned_loss=0.09707, ctc_loss=0.1812, over 3853268.25 frames. ], batch size: 55, lr: 3.61e-02, grad_scale: 32.0 +2024-08-25 10:11:27,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=36154.666666666664, ans=0.125 +2024-08-25 10:11:28,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=36154.666666666664, ans=0.125 +2024-08-25 10:11:29,539 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.60 vs. limit=15.0 +2024-08-25 10:11:52,946 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.758e+02 2.042e+02 2.396e+02 4.902e+02, threshold=4.083e+02, percent-clipped=1.0 +2024-08-25 10:12:24,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=36368.0, ans=0.125 +2024-08-25 10:12:33,954 INFO [train.py:1114] (1/4) Epoch 3, batch 1850, loss[loss=0.3174, simple_loss=0.3477, pruned_loss=0.1051, ctc_loss=0.1925, over 19608.00 frames. ], tot_loss[loss=0.2991, simple_loss=0.3331, pruned_loss=0.09657, ctc_loss=0.1801, over 3856041.44 frames. ], batch size: 57, lr: 3.61e-02, grad_scale: 32.0 +2024-08-25 10:12:34,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=36421.333333333336, ans=0.125 +2024-08-25 10:13:07,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=36528.0, ans=0.125 +2024-08-25 10:13:08,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36528.0, ans=0.1 +2024-08-25 10:13:12,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=36581.333333333336, ans=0.035 +2024-08-25 10:13:31,591 INFO [train.py:1114] (1/4) Epoch 3, batch 1900, loss[loss=0.3041, simple_loss=0.3456, pruned_loss=0.09431, ctc_loss=0.1851, over 19659.00 frames. ], tot_loss[loss=0.2993, simple_loss=0.3335, pruned_loss=0.0965, ctc_loss=0.18, over 3861295.52 frames. ], batch size: 59, lr: 3.60e-02, grad_scale: 32.0 +2024-08-25 10:14:04,863 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.09 vs. limit=15.0 +2024-08-25 10:14:09,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=36741.333333333336, ans=0.125 +2024-08-25 10:14:14,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=36741.333333333336, ans=0.125 +2024-08-25 10:14:24,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=36794.666666666664, ans=0.125 +2024-08-25 10:14:26,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=36794.666666666664, ans=0.04949747468305833 +2024-08-25 10:14:27,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=36848.0, ans=0.125 +2024-08-25 10:14:29,229 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.301e+02 1.725e+02 1.920e+02 2.285e+02 4.448e+02, threshold=3.841e+02, percent-clipped=1.0 +2024-08-25 10:14:32,802 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.88 vs. limit=15.0 +2024-08-25 10:14:47,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=36901.333333333336, ans=0.0 +2024-08-25 10:14:54,735 INFO [train.py:1114] (1/4) Epoch 3, batch 1950, loss[loss=0.2683, simple_loss=0.3082, pruned_loss=0.08376, ctc_loss=0.1523, over 19591.00 frames. ], tot_loss[loss=0.2993, simple_loss=0.3342, pruned_loss=0.09629, ctc_loss=0.1796, over 3870141.84 frames. ], batch size: 52, lr: 3.59e-02, grad_scale: 32.0 +2024-08-25 10:15:02,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=36954.666666666664, ans=10.0 +2024-08-25 10:15:51,810 INFO [train.py:1114] (1/4) Epoch 3, batch 2000, loss[loss=0.2595, simple_loss=0.2964, pruned_loss=0.08001, ctc_loss=0.1565, over 19661.00 frames. ], tot_loss[loss=0.3001, simple_loss=0.3348, pruned_loss=0.09662, ctc_loss=0.1802, over 3854345.99 frames. ], batch size: 45, lr: 3.59e-02, grad_scale: 32.0 +2024-08-25 10:16:11,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=37328.0, ans=0.002754782608695652 +2024-08-25 10:16:19,104 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.521e+02 1.904e+02 2.146e+02 2.566e+02 5.347e+02, threshold=4.293e+02, percent-clipped=2.0 +2024-08-25 10:16:44,037 INFO [train.py:1114] (1/4) Epoch 3, batch 2050, loss[loss=0.2662, simple_loss=0.3062, pruned_loss=0.08184, ctc_loss=0.1565, over 19725.00 frames. ], tot_loss[loss=0.2989, simple_loss=0.3336, pruned_loss=0.09615, ctc_loss=0.1795, over 3850800.98 frames. ], batch size: 47, lr: 3.58e-02, grad_scale: 32.0 +2024-08-25 10:16:44,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=37488.0, ans=0.125 +2024-08-25 10:16:57,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=37488.0, ans=0.125 +2024-08-25 10:17:08,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=37541.333333333336, ans=0.2 +2024-08-25 10:17:09,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.09 vs. limit=10.0 +2024-08-25 10:17:21,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=37594.666666666664, ans=0.0 +2024-08-25 10:17:27,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=37648.0, ans=0.07 +2024-08-25 10:17:56,058 INFO [train.py:1114] (1/4) Epoch 3, batch 2100, loss[loss=0.287, simple_loss=0.3255, pruned_loss=0.0907, ctc_loss=0.1679, over 19758.00 frames. ], tot_loss[loss=0.2979, simple_loss=0.3331, pruned_loss=0.09565, ctc_loss=0.1785, over 3857825.56 frames. ], batch size: 54, lr: 3.58e-02, grad_scale: 16.0 +2024-08-25 10:18:18,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=37754.666666666664, ans=0.5 +2024-08-25 10:18:19,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=37754.666666666664, ans=0.0 +2024-08-25 10:18:35,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=37754.666666666664, ans=0.125 +2024-08-25 10:18:39,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=37808.0, ans=0.025 +2024-08-25 10:18:41,042 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.76 vs. limit=22.5 +2024-08-25 10:18:46,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=37808.0, ans=0.125 +2024-08-25 10:19:20,760 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.776e+02 1.971e+02 2.246e+02 3.814e+02, threshold=3.941e+02, percent-clipped=0.0 +2024-08-25 10:20:09,497 INFO [train.py:1114] (1/4) Epoch 3, batch 2150, loss[loss=0.2869, simple_loss=0.3267, pruned_loss=0.08966, ctc_loss=0.1695, over 19594.00 frames. ], tot_loss[loss=0.296, simple_loss=0.3314, pruned_loss=0.09481, ctc_loss=0.1772, over 3868883.36 frames. ], batch size: 52, lr: 3.57e-02, grad_scale: 16.0 +2024-08-25 10:20:51,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=38181.333333333336, ans=0.125 +2024-08-25 10:20:53,698 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.55 vs. limit=22.5 +2024-08-25 10:20:56,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=38181.333333333336, ans=0.0 +2024-08-25 10:21:09,810 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.30 vs. limit=15.0 +2024-08-25 10:21:10,424 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:21:11,133 INFO [train.py:1114] (1/4) Epoch 3, batch 2200, loss[loss=0.294, simple_loss=0.3365, pruned_loss=0.0914, ctc_loss=0.1716, over 19580.00 frames. ], tot_loss[loss=0.2963, simple_loss=0.332, pruned_loss=0.09483, ctc_loss=0.1773, over 3868042.63 frames. ], batch size: 57, lr: 3.56e-02, grad_scale: 16.0 +2024-08-25 10:21:18,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38288.0, ans=0.1 +2024-08-25 10:21:30,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=38341.333333333336, ans=0.09899494936611666 +2024-08-25 10:21:56,464 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.750e+02 1.922e+02 2.212e+02 3.187e+02, threshold=3.844e+02, percent-clipped=0.0 +2024-08-25 10:22:21,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=38501.333333333336, ans=0.0024997101449275357 +2024-08-25 10:22:28,991 INFO [train.py:1114] (1/4) Epoch 3, batch 2250, loss[loss=0.292, simple_loss=0.3338, pruned_loss=0.09075, ctc_loss=0.1716, over 19603.00 frames. ], tot_loss[loss=0.2964, simple_loss=0.3322, pruned_loss=0.09487, ctc_loss=0.1771, over 3868149.79 frames. ], batch size: 55, lr: 3.56e-02, grad_scale: 16.0 +2024-08-25 10:22:30,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=38554.666666666664, ans=0.002488115942028987 +2024-08-25 10:22:31,143 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.56 vs. limit=12.0 +2024-08-25 10:22:55,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.34 vs. limit=6.0 +2024-08-25 10:23:01,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=38661.333333333336, ans=0.002464927536231884 +2024-08-25 10:23:17,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=38714.666666666664, ans=0.125 +2024-08-25 10:23:29,346 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:23:40,914 INFO [train.py:1114] (1/4) Epoch 3, batch 2300, loss[loss=0.2613, simple_loss=0.3004, pruned_loss=0.08032, ctc_loss=0.1538, over 19527.00 frames. ], tot_loss[loss=0.2955, simple_loss=0.3309, pruned_loss=0.09465, ctc_loss=0.1769, over 3862623.77 frames. ], batch size: 49, lr: 3.55e-02, grad_scale: 16.0 +2024-08-25 10:23:54,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=38874.666666666664, ans=0.125 +2024-08-25 10:24:03,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=38928.0, ans=0.0 +2024-08-25 10:24:03,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=38928.0, ans=0.125 +2024-08-25 10:24:06,274 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:24:13,779 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.419e+02 1.820e+02 2.030e+02 2.354e+02 3.970e+02, threshold=4.059e+02, percent-clipped=1.0 +2024-08-25 10:24:13,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=38981.333333333336, ans=0.09899494936611666 +2024-08-25 10:24:14,934 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.46 vs. limit=15.0 +2024-08-25 10:24:43,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=39034.666666666664, ans=0.125 +2024-08-25 10:24:48,819 INFO [train.py:1114] (1/4) Epoch 3, batch 2350, loss[loss=0.329, simple_loss=0.3609, pruned_loss=0.108, ctc_loss=0.2025, over 19678.00 frames. ], tot_loss[loss=0.2952, simple_loss=0.3306, pruned_loss=0.09454, ctc_loss=0.1769, over 3864981.09 frames. ], batch size: 63, lr: 3.55e-02, grad_scale: 16.0 +2024-08-25 10:25:01,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39088.0, ans=0.1 +2024-08-25 10:25:06,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=39141.333333333336, ans=0.125 +2024-08-25 10:25:20,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39194.666666666664, ans=0.1 +2024-08-25 10:25:23,277 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.87 vs. limit=15.0 +2024-08-25 10:25:38,885 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.90 vs. limit=15.0 +2024-08-25 10:25:52,398 INFO [train.py:1114] (1/4) Epoch 3, batch 2400, loss[loss=0.3264, simple_loss=0.3583, pruned_loss=0.1062, ctc_loss=0.2052, over 19316.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3335, pruned_loss=0.09599, ctc_loss=0.1792, over 3859318.23 frames. ], batch size: 71, lr: 3.54e-02, grad_scale: 32.0 +2024-08-25 10:25:55,449 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.31 vs. limit=12.0 +2024-08-25 10:26:02,264 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.40 vs. limit=15.0 +2024-08-25 10:26:04,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=39408.0, ans=0.025 +2024-08-25 10:26:06,706 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.27 vs. limit=22.5 +2024-08-25 10:26:11,983 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.02 vs. limit=15.0 +2024-08-25 10:26:40,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=39514.666666666664, ans=0.125 +2024-08-25 10:26:41,539 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.413e+02 1.777e+02 2.047e+02 2.383e+02 4.291e+02, threshold=4.094e+02, percent-clipped=1.0 +2024-08-25 10:27:07,784 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.69 vs. limit=15.0 +2024-08-25 10:27:14,171 INFO [train.py:1114] (1/4) Epoch 3, batch 2450, loss[loss=0.3815, simple_loss=0.3729, pruned_loss=0.1421, ctc_loss=0.2651, over 13715.00 frames. ], tot_loss[loss=0.3091, simple_loss=0.3395, pruned_loss=0.1015, ctc_loss=0.1891, over 3731134.73 frames. ], batch size: 140, lr: 3.53e-02, grad_scale: 16.0 +2024-08-25 10:27:47,548 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.23 vs. limit=15.0 +2024-08-25 10:27:48,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=39781.333333333336, ans=0.0022214492753623175 +2024-08-25 10:39:24,826 INFO [train.py:1050] (1/4) Caught exception: [Rank 1] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=89707, OpType=ALLREDUCE, NumelIn=745, NumelOut=745, Timeout(ms)=600000) ran for 600003 milliseconds before timing out.. +2024-08-25 10:39:24,827 INFO [checkpoint.py:75] (1/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/bad-model-1.pt +2024-08-25 10:39:39,763 INFO [train.py:1413] (1/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/batch-41f60be0-7cef-6aa3-6aed-cf4a4599a084.pt +2024-08-25 10:39:39,825 INFO [train.py:1419] (1/4) features shape: torch.Size([48, 1633, 80]) +2024-08-25 10:39:39,828 INFO [train.py:1423] (1/4) num tokens: 3940 diff --git a/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-25-03-46-09-2 b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-25-03-46-09-2 new file mode 100644 index 0000000000000000000000000000000000000000..74b60af8c4fc998f110faf34da8919250ba12a6e --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-25-03-46-09-2 @@ -0,0 +1,1156 @@ +2024-08-25 03:46:09,313 INFO [train.py:1182] (2/4) Training started +2024-08-25 03:46:09,314 INFO [train.py:1192] (2/4) Device: cuda:2 +2024-08-25 03:46:09,373 INFO [train.py:1210] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2649.int.cedar.computecanada.ca', 'IP address': '172.16.146.86'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-25 03:46:09,373 INFO [train.py:1212] (2/4) About to create model +2024-08-25 03:46:10,428 INFO [train.py:1216] (2/4) Number of model parameters: 65805511 +2024-08-25 03:46:10,531 INFO [train.py:1231] (2/4) Using DDP +2024-08-25 03:46:14,820 INFO [asr_datamodule.py:894] (2/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:696] (2/4) Disable MUSAN +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:714] (2/4) Enable SpecAugment +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:715] (2/4) Time warp factor: 80 +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:725] (2/4) Num frame mask: 10 +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:738] (2/4) About to create train dataset +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:765] (2/4) Using DynamicBucketingSampler. +2024-08-25 03:46:16,505 INFO [asr_datamodule.py:782] (2/4) About to create train dataloader +2024-08-25 03:46:16,507 INFO [asr_datamodule.py:911] (2/4) About to get dev-clean cuts +2024-08-25 03:46:16,584 INFO [asr_datamodule.py:918] (2/4) About to get dev-other cuts +2024-08-25 03:46:16,613 INFO [asr_datamodule.py:814] (2/4) About to create dev dataset +2024-08-25 03:46:16,935 INFO [asr_datamodule.py:831] (2/4) About to create dev dataloader +2024-08-25 03:46:16,935 INFO [train.py:1435] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-25 03:50:49,731 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=512, metric=44.40 vs. limit=7.5 +2024-08-25 03:50:50,510 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 11561MB +2024-08-25 03:50:51,648 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 11561MB +2024-08-25 03:51:20,162 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 11561MB +2024-08-25 03:51:21,412 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 11561MB +2024-08-25 03:51:43,055 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 11561MB +2024-08-25 03:51:44,348 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 11561MB +2024-08-25 03:53:11,522 INFO [train.py:1114] (2/4) Epoch 1, batch 0, loss[loss=8.684, simple_loss=7.024, pruned_loss=6.921, ctc_loss=4.834, over 19418.00 frames. ], tot_loss[loss=8.684, simple_loss=7.024, pruned_loss=6.921, ctc_loss=4.834, over 19418.00 frames. ], batch size: 48, lr: 2.25e-02, grad_scale: 1.0 +2024-08-25 03:53:11,523 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-25 03:53:26,557 INFO [train.py:1146] (2/4) Epoch 1, validation: loss=8.842, simple_loss=7.151, pruned_loss=6.961, ctc_loss=4.966, over 944034.00 frames. +2024-08-25 03:53:26,558 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 11601MB +2024-08-25 03:53:28,034 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.47 vs. limit=7.5 +2024-08-25 03:53:33,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=0.0, ans=0.3 +2024-08-25 03:53:38,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=0.0, ans=0.5 +2024-08-25 03:53:42,753 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=26.54 vs. limit=7.5 +2024-08-25 03:53:47,306 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.09 vs. limit=7.5 +2024-08-25 03:54:10,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=53.333333333333336, ans=0.0988 +2024-08-25 03:54:11,400 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.16 vs. limit=7.52 +2024-08-25 03:54:36,709 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.008e+03 4.149e+03 4.360e+03 5.530e+03 5.553e+03, threshold=1.744e+04, percent-clipped=0.0 +2024-08-25 03:54:56,955 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=465.31 vs. limit=7.54 +2024-08-25 03:54:57,280 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.64 vs. limit=5.026666666666666 +2024-08-25 03:54:59,603 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=492.37 vs. limit=5.026666666666666 +2024-08-25 03:55:15,617 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=503.85 vs. limit=5.053333333333334 +2024-08-25 03:55:20,859 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.69 vs. limit=7.54 +2024-08-25 03:55:46,189 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.063e+03 1.598e+03 4.141e+03 5.530e+03 6.572e+03, threshold=1.656e+04, percent-clipped=0.0 +2024-08-25 03:55:46,701 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=169.61 vs. limit=5.053333333333334 +2024-08-25 03:57:12,400 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=180.11 vs. limit=4.032 +2024-08-25 03:57:30,445 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=110.75 vs. limit=5.08 +2024-08-25 04:00:13,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=213.33333333333334, ans=0.49 +2024-08-25 04:00:14,872 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.873e+02 1.048e+03 1.328e+03 4.149e+03 6.572e+03, threshold=5.310e+03, percent-clipped=0.0 +2024-08-25 04:00:18,248 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=17.67 vs. limit=7.58 +2024-08-25 04:00:19,559 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=29.40 vs. limit=7.66 +2024-08-25 04:00:39,836 INFO [train.py:1114] (2/4) Epoch 1, batch 50, loss[loss=1.627, simple_loss=1.075, pruned_loss=1.224, ctc_loss=2.078, over 19747.00 frames. ], tot_loss[loss=3.747, simple_loss=2.908, pruned_loss=2.556, ctc_loss=2.881, over 844203.64 frames. ], batch size: 47, lr: 2.48e-02, grad_scale: 0.25 +2024-08-25 04:00:50,923 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=13.14 vs. limit=4.1066666666666665 +2024-08-25 04:00:50,929 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=21.42 vs. limit=7.6 +2024-08-25 04:00:59,157 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=25.18 vs. limit=5.066666666666666 +2024-08-25 04:01:03,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=320.0, ans=5.2 +2024-08-25 04:01:03,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=320.0, ans=0.049 +2024-08-25 04:01:27,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=373.3333333333333, ans=0.2962666666666667 +2024-08-25 04:01:50,089 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=30.89 vs. limit=7.78 +2024-08-25 04:01:59,704 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.25 vs. limit=5.093333333333334 +2024-08-25 04:02:05,521 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=49.87 vs. limit=7.66 +2024-08-25 04:02:10,698 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=488.91 vs. limit=7.66 +2024-08-25 04:02:31,898 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=57.70 vs. limit=7.86 +2024-08-25 04:02:39,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=480.0, ans=0.4775 +2024-08-25 04:02:40,364 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=138.84 vs. limit=7.68 +2024-08-25 04:02:42,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=480.0, ans=0.0892 +2024-08-25 04:02:48,080 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=46.53 vs. limit=7.86 +2024-08-25 04:02:49,181 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=25.47 vs. limit=5.24 +2024-08-25 04:02:49,290 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=31.95 vs. limit=7.68 +2024-08-25 04:02:53,773 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=24.61 vs. limit=7.68 +2024-08-25 04:02:59,370 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=66.53 vs. limit=7.7 +2024-08-25 04:02:59,897 INFO [train.py:1114] (2/4) Epoch 1, batch 100, loss[loss=1.423, simple_loss=1.005, pruned_loss=1.26, ctc_loss=1.3, over 19704.00 frames. ], tot_loss[loss=2.582, simple_loss=1.909, pruned_loss=1.864, ctc_loss=2.351, over 1496570.41 frames. ], batch size: 51, lr: 2.70e-02, grad_scale: 0.5 +2024-08-25 04:03:07,085 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.807e+02 4.974e+02 8.674e+02 1.328e+03 6.572e+03, threshold=1.735e+03, percent-clipped=0.0 +2024-08-25 04:03:18,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.whiten.whitening_limit, batch_count=586.6666666666666, ans=4.234666666666667 +2024-08-25 04:03:24,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=586.6666666666666, ans=0.2941333333333333 +2024-08-25 04:03:25,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=121.70 vs. limit=7.72 +2024-08-25 04:03:32,163 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=172.86 vs. limit=7.72 +2024-08-25 04:03:36,620 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=23.15 vs. limit=5.16 +2024-08-25 04:03:40,394 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=97.97 vs. limit=7.74 +2024-08-25 04:03:58,602 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=92.62 vs. limit=7.76 +2024-08-25 04:04:02,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=693.3333333333334, ans=0.4675 +2024-08-25 04:04:11,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=746.6666666666666, ans=0.46499999999999997 +2024-08-25 04:04:11,557 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=104.81 vs. limit=7.78 +2024-08-25 04:04:22,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=800.0, ans=0.0475 +2024-08-25 04:04:22,866 INFO [train.py:1114] (2/4) Epoch 1, batch 150, loss[loss=1.175, simple_loss=0.8141, pruned_loss=1.03, ctc_loss=1.093, over 19716.00 frames. ], tot_loss[loss=2.05, simple_loss=1.493, pruned_loss=1.571, ctc_loss=1.869, over 2026976.04 frames. ], batch size: 47, lr: 2.93e-02, grad_scale: 0.5 +2024-08-25 04:04:27,006 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=78.11 vs. limit=7.8 +2024-08-25 04:04:30,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=800.0, ans=0.0475 +2024-08-25 04:04:34,799 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=35.26 vs. limit=8.14 +2024-08-25 04:04:38,956 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=29.15 vs. limit=8.14 +2024-08-25 04:04:43,504 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=68.16 vs. limit=7.82 +2024-08-25 04:04:51,264 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=49.55 vs. limit=7.84 +2024-08-25 04:05:10,414 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=47.26 vs. limit=7.84 +2024-08-25 04:05:12,459 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=9.80 vs. limit=5.226666666666667 +2024-08-25 04:05:19,481 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=17.04 vs. limit=5.48 +2024-08-25 04:05:44,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=1013.3333333333334, ans=0.4525 +2024-08-25 04:05:46,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=1013.3333333333334, ans=0.5 +2024-08-25 04:05:50,434 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=12.76 vs. limit=5.253333333333333 +2024-08-25 04:05:53,924 INFO [train.py:1114] (2/4) Epoch 1, batch 200, loss[loss=1.256, simple_loss=0.8652, pruned_loss=1.01, ctc_loss=1.211, over 18042.00 frames. ], tot_loss[loss=1.76, simple_loss=1.265, pruned_loss=1.381, ctc_loss=1.621, over 2434889.36 frames. ], batch size: 85, lr: 3.15e-02, grad_scale: 1.0 +2024-08-25 04:05:55,897 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.53 vs. limit=4.426666666666667 +2024-08-25 04:05:56,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=1066.6666666666667, ans=0.7606666666666667 +2024-08-25 04:05:57,468 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.117e+01 1.191e+02 1.554e+02 2.219e+02 5.914e+02, threshold=3.108e+02, percent-clipped=0.0 +2024-08-25 04:06:00,431 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=26.30 vs. limit=7.9 +2024-08-25 04:06:05,126 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=160.47 vs. limit=7.92 +2024-08-25 04:06:08,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=1120.0, ans=0.8608 +2024-08-25 04:06:08,211 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=54.79 vs. limit=7.92 +2024-08-25 04:06:08,525 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=145.49 vs. limit=7.92 +2024-08-25 04:06:15,460 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.43 vs. limit=8.34 +2024-08-25 04:06:16,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1173.3333333333333, ans=0.28826666666666667 +2024-08-25 04:06:16,802 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=15.83 vs. limit=5.586666666666667 +2024-08-25 04:06:18,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=1173.3333333333333, ans=7.94 +2024-08-25 04:06:20,084 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.21 vs. limit=5.293333333333333 +2024-08-25 04:06:26,016 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.61 vs. limit=8.38 +2024-08-25 04:06:40,185 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.50 vs. limit=8.46 +2024-08-25 04:06:57,328 INFO [train.py:1114] (2/4) Epoch 1, batch 250, loss[loss=1.321, simple_loss=0.8957, pruned_loss=1.043, ctc_loss=1.295, over 19449.00 frames. ], tot_loss[loss=1.586, simple_loss=1.126, pruned_loss=1.254, ctc_loss=1.481, over 2755846.84 frames. ], batch size: 67, lr: 3.38e-02, grad_scale: 1.0 +2024-08-25 04:07:38,382 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=29.26 vs. limit=8.0 +2024-08-25 04:07:39,638 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=76.05 vs. limit=8.0 +2024-08-25 04:07:39,778 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=27.83 vs. limit=5.666666666666667 +2024-08-25 04:07:39,892 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=35.35 vs. limit=8.0 +2024-08-25 04:08:00,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1440.0, ans=0.28559999999999997 +2024-08-25 04:08:02,940 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=70.33 vs. limit=8.58 +2024-08-25 04:08:02,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=1440.0, ans=8.58 +2024-08-25 04:08:03,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=1440.0, ans=0.4325 +2024-08-25 04:08:25,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=1546.6666666666667, ans=0.4275 +2024-08-25 04:08:26,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=1546.6666666666667, ans=0.142 +2024-08-25 04:09:08,481 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=6.52 vs. limit=4.618666666666667 +2024-08-25 04:09:08,592 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=36.82 vs. limit=8.08 +2024-08-25 04:09:11,358 INFO [train.py:1114] (2/4) Epoch 1, batch 300, loss[loss=1.247, simple_loss=0.8373, pruned_loss=0.9629, ctc_loss=1.228, over 19531.00 frames. ], tot_loss[loss=1.471, simple_loss=1.032, pruned_loss=1.162, ctc_loss=1.39, over 2999758.88 frames. ], batch size: 61, lr: 3.60e-02, grad_scale: 2.0 +2024-08-25 04:09:12,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=1600.0, ans=0.425 +2024-08-25 04:09:14,915 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.125e+01 1.367e+02 1.753e+02 2.332e+02 3.681e+02, threshold=3.505e+02, percent-clipped=6.0 +2024-08-25 04:10:23,607 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=95.50 vs. limit=8.12 +2024-08-25 04:10:25,774 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=19.56 vs. limit=8.12 +2024-08-25 04:10:35,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=1706.6666666666667, ans=0.2866666666666666 +2024-08-25 04:10:42,358 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=37.93 vs. limit=8.14 +2024-08-25 04:10:50,869 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.13 vs. limit=8.82 +2024-08-25 04:11:08,004 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=16.15 vs. limit=8.18 +2024-08-25 04:11:11,350 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=43.30 vs. limit=8.18 +2024-08-25 04:11:13,103 INFO [train.py:1114] (2/4) Epoch 1, batch 350, loss[loss=1.102, simple_loss=0.7333, pruned_loss=0.8468, ctc_loss=1.072, over 19758.00 frames. ], tot_loss[loss=1.393, simple_loss=0.9663, pruned_loss=1.094, ctc_loss=1.329, over 3190055.67 frames. ], batch size: 48, lr: 3.83e-02, grad_scale: 2.0 +2024-08-25 04:11:19,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1866.6666666666667, ans=0.2813333333333333 +2024-08-25 04:11:21,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1866.6666666666667, ans=0.2813333333333333 +2024-08-25 04:11:27,548 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.86 vs. limit=4.768 +2024-08-25 04:11:30,919 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.26 vs. limit=8.94 +2024-08-25 04:11:35,571 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=21.77 vs. limit=5.986666666666666 +2024-08-25 04:11:45,928 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=11.12 vs. limit=8.24 +2024-08-25 04:11:49,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=2026.6666666666667, ans=0.405 +2024-08-25 04:11:49,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=2026.6666666666667, ans=6.266666666666667 +2024-08-25 04:11:51,849 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=37.12 vs. limit=8.26 +2024-08-25 04:11:52,067 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=27.21 vs. limit=9.02 +2024-08-25 04:11:53,055 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=20.95 vs. limit=8.26 +2024-08-25 04:11:54,281 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=32.29 vs. limit=8.26 +2024-08-25 04:12:02,564 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.95 vs. limit=8.28 +2024-08-25 04:12:11,676 INFO [train.py:1114] (2/4) Epoch 1, batch 400, loss[loss=1.131, simple_loss=0.7571, pruned_loss=0.8203, ctc_loss=1.101, over 19484.00 frames. ], tot_loss[loss=1.333, simple_loss=0.9162, pruned_loss=1.035, ctc_loss=1.277, over 3342861.84 frames. ], batch size: 54, lr: 4.05e-02, grad_scale: 4.0 +2024-08-25 04:12:15,149 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.241e+01 1.644e+02 2.144e+02 2.768e+02 4.713e+02, threshold=4.287e+02, percent-clipped=10.0 +2024-08-25 04:12:19,625 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=24.27 vs. limit=9.1 +2024-08-25 04:12:23,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=2186.6666666666665, ans=0.22666666666666668 +2024-08-25 04:12:33,277 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.15 vs. limit=9.14 +2024-08-25 04:12:36,371 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=1.165e+01 +2024-08-25 04:12:40,517 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.21 vs. limit=9.18 +2024-08-25 04:12:49,727 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.43 vs. limit=9.22 +2024-08-25 04:12:58,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=2293.3333333333335, ans=0.08566666666666667 +2024-08-25 04:13:01,198 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=17.20 vs. limit=6.173333333333333 +2024-08-25 04:13:05,891 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=16.56 vs. limit=5.586666666666667 +2024-08-25 04:13:11,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.27 vs. limit=9.3 +2024-08-25 04:13:12,048 INFO [train.py:1114] (2/4) Epoch 1, batch 450, loss[loss=1.105, simple_loss=0.7551, pruned_loss=0.7328, ctc_loss=1.065, over 19621.00 frames. ], tot_loss[loss=1.282, simple_loss=0.8783, pruned_loss=0.9737, ctc_loss=1.232, over 3451125.70 frames. ], batch size: 55, lr: 4.28e-02, grad_scale: 4.0 +2024-08-25 04:13:14,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=2400.0, ans=0.3875 +2024-08-25 04:13:16,910 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=26.17 vs. limit=6.2 +2024-08-25 04:14:10,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=2453.3333333333335, ans=0.8141333333333334 +2024-08-25 04:14:12,033 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=18.33 vs. limit=8.42 +2024-08-25 04:14:12,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=2506.6666666666665, ans=0.1866666666666667 +2024-08-25 04:14:14,212 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=11.30 vs. limit=8.44 +2024-08-25 04:14:26,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=2560.0, ans=0.104 +2024-08-25 04:14:28,962 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.49 vs. limit=8.46 +2024-08-25 04:14:44,408 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.80 vs. limit=8.48 +2024-08-25 04:14:47,703 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.52 vs. limit=8.48 +2024-08-25 04:14:51,709 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.57 vs. limit=9.46 +2024-08-25 04:14:53,362 INFO [train.py:1114] (2/4) Epoch 1, batch 500, loss[loss=0.9956, simple_loss=0.6927, pruned_loss=0.6055, ctc_loss=0.9628, over 19683.00 frames. ], tot_loss[loss=1.218, simple_loss=0.8351, pruned_loss=0.8938, ctc_loss=1.171, over 3546755.56 frames. ], batch size: 63, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:14:58,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=2666.6666666666665, ans=0.375 +2024-08-25 04:14:59,588 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.247e+02 2.224e+02 2.884e+02 3.405e+02 7.334e+02, threshold=5.768e+02, percent-clipped=15.0 +2024-08-25 04:14:59,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=2666.6666666666665, ans=0.24 +2024-08-25 04:15:03,406 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.81 vs. limit=8.5 +2024-08-25 04:15:06,089 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.22 vs. limit=9.5 +2024-08-25 04:15:12,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=2720.0, ans=0.3725 +2024-08-25 04:15:29,490 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.73 vs. limit=8.56 +2024-08-25 04:15:47,755 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.03 vs. limit=8.58 +2024-08-25 04:15:48,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=2880.0, ans=0.088 +2024-08-25 04:15:52,586 INFO [train.py:1114] (2/4) Epoch 1, batch 550, loss[loss=0.9125, simple_loss=0.641, pruned_loss=0.5192, ctc_loss=0.8955, over 19210.00 frames. ], tot_loss[loss=1.152, simple_loss=0.7932, pruned_loss=0.8142, ctc_loss=1.11, over 3608352.33 frames. ], batch size: 71, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:15:52,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=2933.3333333333335, ans=0.03399999999999999 +2024-08-25 04:16:02,302 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.08 vs. limit=8.6 +2024-08-25 04:16:03,340 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.41 vs. limit=8.620000000000001 +2024-08-25 04:16:04,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=2986.6666666666665, ans=0.36 +2024-08-25 04:16:16,779 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.35 vs. limit=9.78 +2024-08-25 04:16:19,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=3040.0, ans=9.78 +2024-08-25 04:16:19,236 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.95 vs. limit=9.78 +2024-08-25 04:16:20,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3040.0, ans=0.2696 +2024-08-25 04:16:42,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=3093.3333333333335, ans=0.355 +2024-08-25 04:17:00,431 INFO [train.py:1114] (2/4) Epoch 1, batch 600, loss[loss=0.8499, simple_loss=0.6123, pruned_loss=0.4579, ctc_loss=0.7999, over 19434.00 frames. ], tot_loss[loss=1.082, simple_loss=0.7496, pruned_loss=0.7335, ctc_loss=1.042, over 3666673.28 frames. ], batch size: 67, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:17:00,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=3200.0, ans=0.08 +2024-08-25 04:17:01,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=3200.0, ans=0.09899494936611666 +2024-08-25 04:17:03,439 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.22 vs. limit=5.8 +2024-08-25 04:17:03,764 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.709e+02 2.809e+02 3.766e+02 4.633e+02 8.655e+02, threshold=7.532e+02, percent-clipped=12.0 +2024-08-25 04:17:10,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_na.min_abs, batch_count=3253.3333333333335, ans=0.01701333333333333 +2024-08-25 04:17:15,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.09 vs. limit=9.94 +2024-08-25 04:17:20,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=3253.3333333333335, ans=0.34750000000000003 +2024-08-25 04:18:07,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=3360.0, ans=0.07400000000000001 +2024-08-25 04:18:26,484 INFO [train.py:1114] (2/4) Epoch 1, batch 650, loss[loss=0.7524, simple_loss=0.551, pruned_loss=0.3768, ctc_loss=0.7152, over 19734.00 frames. ], tot_loss[loss=1.008, simple_loss=0.7044, pruned_loss=0.6542, ctc_loss=0.9685, over 3716831.64 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:18:32,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=3466.6666666666665, ans=0.3375 +2024-08-25 04:18:40,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3520.0, ans=0.2648 +2024-08-25 04:19:11,726 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.30 vs. limit=8.86 +2024-08-25 04:19:13,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=3626.6666666666665, ans=0.32999999999999996 +2024-08-25 04:19:21,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=3680.0, ans=0.3275 +2024-08-25 04:19:25,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=3733.3333333333335, ans=0.04000000000000001 +2024-08-25 04:20:32,366 INFO [train.py:1114] (2/4) Epoch 1, batch 700, loss[loss=0.6746, simple_loss=0.4988, pruned_loss=0.3284, ctc_loss=0.6337, over 19719.00 frames. ], tot_loss[loss=0.9438, simple_loss=0.6662, pruned_loss=0.5864, ctc_loss=0.9029, over 3748642.13 frames. ], batch size: 51, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:20:34,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=3733.3333333333335, ans=0.04000000000000001 +2024-08-25 04:20:35,545 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 2.600e+02 3.309e+02 4.487e+02 1.180e+03, threshold=6.619e+02, percent-clipped=3.0 +2024-08-25 04:20:43,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=3786.6666666666665, ans=0.21213333333333334 +2024-08-25 04:20:47,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3786.6666666666665, ans=0.26213333333333333 +2024-08-25 04:20:53,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=3840.0, ans=0.32 +2024-08-25 04:21:11,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3893.3333333333335, ans=0.26106666666666667 +2024-08-25 04:21:19,702 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.85 vs. limit=5.986666666666666 +2024-08-25 04:21:25,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=4000.0, ans=0.3125 +2024-08-25 04:21:26,550 INFO [train.py:1114] (2/4) Epoch 1, batch 750, loss[loss=0.6431, simple_loss=0.4956, pruned_loss=0.2889, ctc_loss=0.5701, over 19497.00 frames. ], tot_loss[loss=0.8839, simple_loss=0.631, pruned_loss=0.5263, ctc_loss=0.8389, over 3774549.71 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:21:33,569 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.42 vs. limit=6.0 +2024-08-25 04:21:38,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=4053.3333333333335, ans=0.07466666666666667 +2024-08-25 04:22:07,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=4160.0, ans=0.2624 +2024-08-25 04:22:09,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=4160.0, ans=0.305 +2024-08-25 04:22:33,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=4213.333333333333, ans=0.07 +2024-08-25 04:22:40,630 INFO [train.py:1114] (2/4) Epoch 1, batch 800, loss[loss=0.6197, simple_loss=0.4744, pruned_loss=0.2869, ctc_loss=0.5413, over 19408.00 frames. ], tot_loss[loss=0.8308, simple_loss=0.6008, pruned_loss=0.4744, ctc_loss=0.7793, over 3794397.83 frames. ], batch size: 48, lr: 4.49e-02, grad_scale: 16.0 +2024-08-25 04:22:42,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=4266.666666666667, ans=10.7 +2024-08-25 04:22:43,241 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.47 vs. limit=9.1 +2024-08-25 04:22:43,860 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.649e+02 2.484e+02 3.479e+02 4.307e+02 9.603e+02, threshold=6.957e+02, percent-clipped=4.0 +2024-08-25 04:22:48,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=4266.666666666667, ans=0.2573333333333333 +2024-08-25 04:23:12,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=4373.333333333333, ans=6.093333333333334 +2024-08-25 04:23:18,940 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.49 vs. limit=10.82 +2024-08-25 04:23:21,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=4426.666666666667, ans=0.025 +2024-08-25 04:23:21,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=4426.666666666667, ans=0.09899494936611666 +2024-08-25 04:23:22,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.59 vs. limit=10.82 +2024-08-25 04:23:30,208 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.03 vs. limit=7.213333333333333 +2024-08-25 04:23:42,670 INFO [train.py:1114] (2/4) Epoch 1, batch 850, loss[loss=0.6227, simple_loss=0.4924, pruned_loss=0.2633, ctc_loss=0.5395, over 19636.00 frames. ], tot_loss[loss=0.7803, simple_loss=0.5722, pruned_loss=0.4275, ctc_loss=0.7217, over 3813762.31 frames. ], batch size: 59, lr: 4.49e-02, grad_scale: 16.0 +2024-08-25 04:23:52,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=4586.666666666667, ans=0.009872463768115942 +2024-08-25 04:23:52,806 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.09 vs. limit=9.22 +2024-08-25 04:23:58,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=4586.666666666667, ans=0.7394666666666667 +2024-08-25 04:24:22,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=4693.333333333333, ans=0.28 +2024-08-25 04:24:36,249 INFO [train.py:1114] (2/4) Epoch 1, batch 900, loss[loss=0.5051, simple_loss=0.4094, pruned_loss=0.2066, ctc_loss=0.4173, over 19400.00 frames. ], tot_loss[loss=0.7398, simple_loss=0.5499, pruned_loss=0.3902, ctc_loss=0.6744, over 3818394.56 frames. ], batch size: 48, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:24:39,553 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.665e+02 2.433e+02 3.203e+02 4.513e+02 7.559e+02, threshold=6.406e+02, percent-clipped=2.0 +2024-08-25 04:24:53,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4853.333333333333, ans=0.25146666666666667 +2024-08-25 04:25:01,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=4906.666666666667, ans=0.27 +2024-08-25 04:25:05,973 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.92 vs. limit=9.34 +2024-08-25 04:25:20,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=4960.0, ans=0.7264 +2024-08-25 04:25:22,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=5013.333333333333, ans=0.06866666666666668 +2024-08-25 04:25:32,734 INFO [train.py:1114] (2/4) Epoch 1, batch 950, loss[loss=0.5119, simple_loss=0.4161, pruned_loss=0.2089, ctc_loss=0.4225, over 19500.00 frames. ], tot_loss[loss=0.7028, simple_loss=0.5296, pruned_loss=0.3574, ctc_loss=0.6305, over 3819361.22 frames. ], batch size: 49, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:25:34,143 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.32 vs. limit=6.266666666666667 +2024-08-25 04:25:51,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=5120.0, ans=0.009756521739130435 +2024-08-25 04:25:53,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=5173.333333333333, ans=0.009744927536231884 +2024-08-25 04:26:01,130 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.15 vs. limit=9.44 +2024-08-25 04:26:10,284 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.66 vs. limit=9.46 +2024-08-25 04:26:18,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=5280.0, ans=0.009721739130434783 +2024-08-25 04:26:33,400 INFO [train.py:1114] (2/4) Epoch 1, batch 1000, loss[loss=0.4833, simple_loss=0.4035, pruned_loss=0.1882, ctc_loss=0.3857, over 19850.00 frames. ], tot_loss[loss=0.6715, simple_loss=0.5128, pruned_loss=0.3303, ctc_loss=0.5924, over 3815620.21 frames. ], batch size: 52, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:26:36,696 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.705e+02 2.226e+02 2.758e+02 3.479e+02 9.619e+02, threshold=5.516e+02, percent-clipped=3.0 +2024-08-25 04:26:39,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=5333.333333333333, ans=0.25 +2024-08-25 04:26:42,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=5333.333333333333, ans=11.5 +2024-08-25 04:26:52,477 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=5.902e-02 +2024-08-25 04:27:05,561 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.81 vs. limit=11.620000000000001 +2024-08-25 04:27:09,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=5493.333333333333, ans=0.2824 +2024-08-25 04:27:20,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=5546.666666666667, ans=0.24 +2024-08-25 04:27:21,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=5546.666666666667, ans=0.24 +2024-08-25 04:27:25,682 INFO [train.py:1114] (2/4) Epoch 1, batch 1050, loss[loss=0.5239, simple_loss=0.4401, pruned_loss=0.2044, ctc_loss=0.4116, over 19854.00 frames. ], tot_loss[loss=0.6382, simple_loss=0.4947, pruned_loss=0.3034, ctc_loss=0.5534, over 3822603.09 frames. ], batch size: 57, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:27:26,268 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.75 vs. limit=9.6 +2024-08-25 04:27:28,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=5600.0, ans=7.8 +2024-08-25 04:27:29,302 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.20 vs. limit=9.6 +2024-08-25 04:27:53,408 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.70 vs. limit=9.64 +2024-08-25 04:27:54,277 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.82 vs. limit=3.856 +2024-08-25 04:28:01,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=5760.0, ans=0.22999999999999998 +2024-08-25 04:28:05,371 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.68 vs. limit=11.82 +2024-08-25 04:28:20,176 INFO [train.py:1114] (2/4) Epoch 1, batch 1100, loss[loss=0.5003, simple_loss=0.4165, pruned_loss=0.1995, ctc_loss=0.3981, over 19580.00 frames. ], tot_loss[loss=0.6104, simple_loss=0.4799, pruned_loss=0.2814, ctc_loss=0.5201, over 3829879.19 frames. ], batch size: 52, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:28:23,245 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.625e+02 2.143e+02 2.593e+02 3.421e+02 4.407e+02, threshold=5.186e+02, percent-clipped=0.0 +2024-08-25 04:28:25,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=5866.666666666667, ans=0.09899494936611666 +2024-08-25 04:28:40,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=5973.333333333333, ans=0.03133333333333334 +2024-08-25 04:28:40,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=5973.333333333333, ans=0.21999999999999997 +2024-08-25 04:28:52,182 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:28:56,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=6026.666666666667, ans=0.041555555555555554 +2024-08-25 04:28:57,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=6026.666666666667, ans=0.0 +2024-08-25 04:28:58,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=6026.666666666667, ans=0.031166666666666665 +2024-08-25 04:29:10,255 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.69 vs. limit=12.059999999999999 +2024-08-25 04:29:14,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=6080.0, ans=0.21500000000000002 +2024-08-25 04:29:15,901 INFO [train.py:1114] (2/4) Epoch 1, batch 1150, loss[loss=0.4637, simple_loss=0.4001, pruned_loss=0.1752, ctc_loss=0.3526, over 19591.00 frames. ], tot_loss[loss=0.5881, simple_loss=0.4684, pruned_loss=0.264, ctc_loss=0.4927, over 3828919.76 frames. ], batch size: 52, lr: 4.47e-02, grad_scale: 16.0 +2024-08-25 04:29:49,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=6240.0, ans=0.04066666666666667 +2024-08-25 04:32:06,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=6293.333333333333, ans=0.07 +2024-08-25 04:32:19,855 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.14 vs. limit=12.26 +2024-08-25 04:32:25,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=6346.666666666667, ans=0.2025 +2024-08-25 04:32:27,982 INFO [train.py:1114] (2/4) Epoch 1, batch 1200, loss[loss=0.4879, simple_loss=0.4246, pruned_loss=0.181, ctc_loss=0.3744, over 19843.00 frames. ], tot_loss[loss=0.5703, simple_loss=0.4597, pruned_loss=0.2501, ctc_loss=0.4702, over 3823874.27 frames. ], batch size: 57, lr: 4.47e-02, grad_scale: 32.0 +2024-08-25 04:32:31,078 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.480e+02 2.077e+02 2.797e+02 3.799e+02 8.339e+02, threshold=5.594e+02, percent-clipped=11.0 +2024-08-25 04:32:48,179 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.85 vs. limit=12.379999999999999 +2024-08-25 04:32:55,473 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.22 vs. limit=12.379999999999999 +2024-08-25 04:32:59,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=6560.0, ans=0.1925 +2024-08-25 04:33:04,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6560.0, ans=0.2344 +2024-08-25 04:33:06,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=6560.0, ans=0.025 +2024-08-25 04:33:09,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=6613.333333333333, ans=0.07 +2024-08-25 04:33:10,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=6613.333333333333, ans=0.6685333333333334 +2024-08-25 04:33:10,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=6613.333333333333, ans=0.23386666666666667 +2024-08-25 04:33:19,307 INFO [train.py:1114] (2/4) Epoch 1, batch 1250, loss[loss=0.5084, simple_loss=0.436, pruned_loss=0.1967, ctc_loss=0.3919, over 19546.00 frames. ], tot_loss[loss=0.55, simple_loss=0.4498, pruned_loss=0.235, ctc_loss=0.4458, over 3841753.42 frames. ], batch size: 61, lr: 4.47e-02, grad_scale: 32.0 +2024-08-25 04:33:28,058 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.14 vs. limit=12.5 +2024-08-25 04:33:36,675 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.51 vs. limit=10.02 +2024-08-25 04:33:41,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=6773.333333333333, ans=0.0 +2024-08-25 04:33:43,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=6773.333333333333, ans=0.009397101449275363 +2024-08-25 04:33:47,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=6773.333333333333, ans=0.0 +2024-08-25 04:34:12,500 INFO [train.py:1114] (2/4) Epoch 1, batch 1300, loss[loss=0.5032, simple_loss=0.4364, pruned_loss=0.1964, ctc_loss=0.3706, over 18845.00 frames. ], tot_loss[loss=0.5321, simple_loss=0.4407, pruned_loss=0.2227, ctc_loss=0.4244, over 3845178.19 frames. ], batch size: 76, lr: 4.47e-02, grad_scale: 32.0 +2024-08-25 04:34:15,554 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 2.007e+02 2.492e+02 3.309e+02 5.533e+02, threshold=4.985e+02, percent-clipped=0.0 +2024-08-25 04:34:35,598 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.45 vs. limit=12.780000000000001 +2024-08-25 04:34:38,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=7040.0, ans=0.13488 +2024-08-25 04:35:04,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=7146.666666666667, ans=0.03688888888888889 +2024-08-25 04:35:10,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=7200.0, ans=0.648 +2024-08-25 04:35:11,274 INFO [train.py:1114] (2/4) Epoch 1, batch 1350, loss[loss=0.4704, simple_loss=0.4118, pruned_loss=0.181, ctc_loss=0.348, over 19763.00 frames. ], tot_loss[loss=0.516, simple_loss=0.433, pruned_loss=0.2117, ctc_loss=0.4052, over 3856291.44 frames. ], batch size: 54, lr: 4.46e-02, grad_scale: 32.0 +2024-08-25 04:35:16,838 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.93 vs. limit=12.9 +2024-08-25 04:35:24,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=7253.333333333333, ans=0.15999999999999998 +2024-08-25 04:35:26,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=7253.333333333333, ans=0.15999999999999998 +2024-08-25 04:35:33,783 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:36:02,099 INFO [train.py:1114] (2/4) Epoch 1, batch 1400, loss[loss=0.419, simple_loss=0.3718, pruned_loss=0.1603, ctc_loss=0.3016, over 19658.00 frames. ], tot_loss[loss=0.5021, simple_loss=0.4261, pruned_loss=0.2027, ctc_loss=0.3892, over 3863215.76 frames. ], batch size: 46, lr: 4.46e-02, grad_scale: 32.0 +2024-08-25 04:36:05,097 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.594e+02 1.980e+02 2.233e+02 2.820e+02 5.701e+02, threshold=4.466e+02, percent-clipped=2.0 +2024-08-25 04:36:06,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=7466.666666666667, ans=0.6386666666666667 +2024-08-25 04:36:30,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=7573.333333333333, ans=0.14500000000000002 +2024-08-25 04:36:50,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=7680.0, ans=0.14 +2024-08-25 04:36:51,358 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.16 vs. limit=4.152 +2024-08-25 04:36:54,782 INFO [train.py:1114] (2/4) Epoch 1, batch 1450, loss[loss=0.4705, simple_loss=0.4243, pruned_loss=0.1779, ctc_loss=0.3307, over 19655.00 frames. ], tot_loss[loss=0.4916, simple_loss=0.4215, pruned_loss=0.1959, ctc_loss=0.3764, over 3861439.54 frames. ], batch size: 63, lr: 4.46e-02, grad_scale: 32.0 +2024-08-25 04:36:59,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=7733.333333333333, ans=0.1375 +2024-08-25 04:37:08,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=7786.666666666667, ans=0.009176811594202899 +2024-08-25 04:37:10,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=7786.666666666667, ans=0.0 +2024-08-25 04:37:12,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=7786.666666666667, ans=0.6274666666666666 +2024-08-25 04:37:29,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=7893.333333333333, ans=0.025 +2024-08-25 04:37:39,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=7946.666666666667, ans=0.033555555555555554 +2024-08-25 04:37:39,345 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.58 vs. limit=13.46 +2024-08-25 04:37:44,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=7946.666666666667, ans=0.1275 +2024-08-25 04:37:48,626 INFO [train.py:1114] (2/4) Epoch 1, batch 1500, loss[loss=0.4525, simple_loss=0.4162, pruned_loss=0.1662, ctc_loss=0.3177, over 19592.00 frames. ], tot_loss[loss=0.4807, simple_loss=0.4168, pruned_loss=0.189, ctc_loss=0.3639, over 3861829.57 frames. ], batch size: 57, lr: 4.46e-02, grad_scale: 32.0 +2024-08-25 04:37:52,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=8000.0, ans=0.03333333333333334 +2024-08-25 04:37:54,385 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.582e+02 1.987e+02 2.351e+02 3.240e+02 5.717e+02, threshold=4.702e+02, percent-clipped=4.0 +2024-08-25 04:38:08,890 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.01 vs. limit=7.013333333333334 +2024-08-25 04:38:19,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=8106.666666666667, ans=0.025 +2024-08-25 04:38:56,150 INFO [train.py:1114] (2/4) Epoch 1, batch 1550, loss[loss=0.4752, simple_loss=0.4209, pruned_loss=0.1826, ctc_loss=0.3594, over 19606.00 frames. ], tot_loss[loss=0.4722, simple_loss=0.4128, pruned_loss=0.1841, ctc_loss=0.3546, over 3847991.69 frames. ], batch size: 60, lr: 4.45e-02, grad_scale: 32.0 +2024-08-25 04:38:59,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=8266.666666666666, ans=0.125 +2024-08-25 04:39:03,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=8266.666666666666, ans=0.125 +2024-08-25 04:39:09,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=8320.0, ans=0.05 +2024-08-25 04:39:47,206 INFO [train.py:1114] (2/4) Epoch 1, batch 1600, loss[loss=0.4587, simple_loss=0.4223, pruned_loss=0.1709, ctc_loss=0.3261, over 19855.00 frames. ], tot_loss[loss=0.4637, simple_loss=0.4091, pruned_loss=0.1794, ctc_loss=0.345, over 3835807.66 frames. ], batch size: 57, lr: 4.45e-02, grad_scale: 32.0 +2024-08-25 04:39:52,844 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.585e+02 2.044e+02 2.368e+02 2.950e+02 6.795e+02, threshold=4.737e+02, percent-clipped=6.0 +2024-08-25 04:40:03,531 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:40:30,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8693.333333333334, ans=0.21306666666666665 +2024-08-25 04:40:42,984 INFO [train.py:1114] (2/4) Epoch 1, batch 1650, loss[loss=0.4572, simple_loss=0.4145, pruned_loss=0.1727, ctc_loss=0.3412, over 19650.00 frames. ], tot_loss[loss=0.4549, simple_loss=0.4052, pruned_loss=0.1745, ctc_loss=0.3358, over 3832121.91 frames. ], batch size: 59, lr: 4.45e-02, grad_scale: 32.0 +2024-08-25 04:40:44,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=8800.0, ans=0.04949747468305833 +2024-08-25 04:42:03,351 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.00 vs. limit=7.213333333333333 +2024-08-25 04:42:48,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=8906.666666666666, ans=0.21093333333333333 +2024-08-25 04:43:21,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=9013.333333333334, ans=0.02911111111111111 +2024-08-25 04:43:24,860 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:43:28,136 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.40 vs. limit=10.9 +2024-08-25 04:43:28,616 INFO [train.py:1114] (2/4) Epoch 1, batch 1700, loss[loss=0.3433, simple_loss=0.3344, pruned_loss=0.1191, ctc_loss=0.2401, over 19698.00 frames. ], tot_loss[loss=0.4456, simple_loss=0.4011, pruned_loss=0.1694, ctc_loss=0.3263, over 3846604.51 frames. ], batch size: 46, lr: 4.44e-02, grad_scale: 32.0 +2024-08-25 04:43:31,593 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.509e+02 1.986e+02 2.386e+02 2.791e+02 4.935e+02, threshold=4.772e+02, percent-clipped=1.0 +2024-08-25 04:43:55,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=9173.333333333334, ans=0.5789333333333333 +2024-08-25 04:43:56,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=9226.666666666666, ans=0.07 +2024-08-25 04:44:02,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=9226.666666666666, ans=0.125 +2024-08-25 04:44:12,692 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.92 vs. limit=14.46 +2024-08-25 04:45:23,911 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.61 vs. limit=10.98 +2024-08-25 04:45:26,293 INFO [train.py:1114] (2/4) Epoch 1, batch 1750, loss[loss=0.3472, simple_loss=0.3354, pruned_loss=0.1257, ctc_loss=0.2351, over 19669.00 frames. ], tot_loss[loss=0.4367, simple_loss=0.3973, pruned_loss=0.1648, ctc_loss=0.3172, over 3851010.48 frames. ], batch size: 45, lr: 4.44e-02, grad_scale: 32.0 +2024-08-25 04:45:27,624 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.54 vs. limit=11.0 +2024-08-25 04:45:32,413 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.83 vs. limit=7.733333333333333 +2024-08-25 04:45:36,234 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.78 vs. limit=9.693333333333332 +2024-08-25 04:45:36,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=9386.666666666666, ans=0.5714666666666668 +2024-08-25 04:45:38,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.18 vs. limit=7.346666666666666 +2024-08-25 04:45:44,742 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.49 vs. limit=14.58 +2024-08-25 04:46:01,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=9493.333333333334, ans=0.05 +2024-08-25 04:46:13,428 INFO [train.py:1114] (2/4) Epoch 1, batch 1800, loss[loss=0.4117, simple_loss=0.4007, pruned_loss=0.1472, ctc_loss=0.287, over 19630.00 frames. ], tot_loss[loss=0.4314, simple_loss=0.3958, pruned_loss=0.1621, ctc_loss=0.3116, over 3851386.34 frames. ], batch size: 55, lr: 4.44e-02, grad_scale: 32.0 +2024-08-25 04:46:16,181 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.506e+02 2.025e+02 2.321e+02 2.784e+02 4.120e+02, threshold=4.643e+02, percent-clipped=0.0 +2024-08-25 04:46:17,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=9600.0, ans=0.125 +2024-08-25 04:46:17,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=9600.0, ans=0.02666666666666667 +2024-08-25 04:46:41,336 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.82 vs. limit=7.904 +2024-08-25 04:46:45,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=9760.0, ans=0.125 +2024-08-25 04:48:18,868 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.15 vs. limit=14.86 +2024-08-25 04:48:23,599 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.98 vs. limit=11.18 +2024-08-25 04:48:28,775 INFO [train.py:1114] (2/4) Epoch 1, batch 1850, loss[loss=0.4105, simple_loss=0.3955, pruned_loss=0.15, ctc_loss=0.2904, over 19577.00 frames. ], tot_loss[loss=0.423, simple_loss=0.3921, pruned_loss=0.1581, ctc_loss=0.3039, over 3855035.66 frames. ], batch size: 57, lr: 4.43e-02, grad_scale: 32.0 +2024-08-25 04:48:29,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=9866.666666666666, ans=0.025 +2024-08-25 04:48:30,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=9866.666666666666, ans=0.5546666666666666 +2024-08-25 04:48:30,967 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.35 vs. limit=4.48 +2024-08-25 04:48:32,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=9866.666666666666, ans=0.125 +2024-08-25 04:48:44,161 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.27 vs. limit=7.48 +2024-08-25 04:48:44,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=9920.0, ans=0.125 +2024-08-25 04:48:52,556 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.65 vs. limit=11.24 +2024-08-25 04:49:05,133 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:49:07,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=10080.0, ans=0.125 +2024-08-25 04:49:10,967 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.52 vs. limit=8.032 +2024-08-25 04:49:15,872 INFO [train.py:1114] (2/4) Epoch 1, batch 1900, loss[loss=0.3972, simple_loss=0.3927, pruned_loss=0.1435, ctc_loss=0.27, over 19635.00 frames. ], tot_loss[loss=0.4177, simple_loss=0.3906, pruned_loss=0.1555, ctc_loss=0.2987, over 3860409.62 frames. ], batch size: 59, lr: 4.43e-02, grad_scale: 32.0 +2024-08-25 04:49:18,609 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.464e+02 2.031e+02 2.370e+02 2.878e+02 5.610e+02, threshold=4.739e+02, percent-clipped=2.0 +2024-08-25 04:49:24,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=10186.666666666666, ans=0.025 +2024-08-25 04:49:53,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=10186.666666666666, ans=0.008655072463768116 +2024-08-25 04:50:06,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=10240.0, ans=0.125 +2024-08-25 04:50:19,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.whiten.whitening_limit, batch_count=10293.333333333334, ans=8.117333333333335 +2024-08-25 04:50:25,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=10346.666666666666, ans=0.035 +2024-08-25 04:50:25,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=10346.666666666666, ans=0.0 +2024-08-25 04:50:31,846 INFO [train.py:1114] (2/4) Epoch 1, batch 1950, loss[loss=0.3603, simple_loss=0.3565, pruned_loss=0.1307, ctc_loss=0.249, over 19602.00 frames. ], tot_loss[loss=0.4123, simple_loss=0.3895, pruned_loss=0.1527, ctc_loss=0.2937, over 3870016.29 frames. ], batch size: 52, lr: 4.43e-02, grad_scale: 32.0 +2024-08-25 04:50:44,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=10453.333333333334, ans=0.125 +2024-08-25 04:50:55,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=10506.666666666666, ans=0.125 +2024-08-25 04:51:16,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=10560.0, ans=0.125 +2024-08-25 04:52:05,800 INFO [train.py:1114] (2/4) Epoch 1, batch 2000, loss[loss=0.3076, simple_loss=0.3223, pruned_loss=0.1047, ctc_loss=0.2086, over 19660.00 frames. ], tot_loss[loss=0.4082, simple_loss=0.3883, pruned_loss=0.151, ctc_loss=0.2905, over 3853565.15 frames. ], batch size: 45, lr: 4.42e-02, grad_scale: 32.0 +2024-08-25 04:52:09,665 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.538e+02 1.861e+02 2.137e+02 2.685e+02 4.799e+02, threshold=4.274e+02, percent-clipped=1.0 +2024-08-25 04:52:13,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=10666.666666666666, ans=0.5266666666666667 +2024-08-25 04:53:22,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=10666.666666666666, ans=0.0 +2024-08-25 04:53:38,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=10720.0, ans=0.008539130434782608 +2024-08-25 04:53:39,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=10720.0, ans=0.125 +2024-08-25 04:53:41,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=10720.0, ans=0.5248 +2024-08-25 04:53:55,034 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.37 vs. limit=15.58 +2024-08-25 04:54:13,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10826.666666666666, ans=0.19173333333333334 +2024-08-25 04:54:17,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=10826.666666666666, ans=0.5210666666666668 +2024-08-25 04:54:28,810 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:54:32,958 INFO [train.py:1114] (2/4) Epoch 1, batch 2050, loss[loss=0.3706, simple_loss=0.3624, pruned_loss=0.1366, ctc_loss=0.2637, over 19688.00 frames. ], tot_loss[loss=0.402, simple_loss=0.3855, pruned_loss=0.1484, ctc_loss=0.2851, over 3851076.60 frames. ], batch size: 47, lr: 4.42e-02, grad_scale: 32.0 +2024-08-25 04:54:34,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=10933.333333333334, ans=0.02111111111111111 +2024-08-25 04:54:34,524 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.16 vs. limit=8.373333333333335 +2024-08-25 04:54:44,537 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.77 vs. limit=11.6 +2024-08-25 04:54:49,326 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.27 vs. limit=10.493333333333332 +2024-08-25 04:54:53,125 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.79 vs. limit=15.74 +2024-08-25 04:55:22,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11093.333333333334, ans=0.18906666666666666 +2024-08-25 04:55:33,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=11146.666666666666, ans=0.125 +2024-08-25 04:55:33,326 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.47 vs. limit=11.68 +2024-08-25 04:55:41,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=11200.0, ans=0.008434782608695653 +2024-08-25 04:55:42,297 INFO [train.py:1114] (2/4) Epoch 1, batch 2100, loss[loss=0.3676, simple_loss=0.379, pruned_loss=0.1274, ctc_loss=0.2537, over 19755.00 frames. ], tot_loss[loss=0.3955, simple_loss=0.3826, pruned_loss=0.1453, ctc_loss=0.2793, over 3858190.80 frames. ], batch size: 54, lr: 4.42e-02, grad_scale: 32.0 +2024-08-25 04:56:30,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=11200.0, ans=0.125 +2024-08-25 04:56:35,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=11200.0, ans=0.125 +2024-08-25 04:56:36,118 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.677e+02 1.936e+02 2.214e+02 2.535e+02 3.885e+02, threshold=4.428e+02, percent-clipped=0.0 +2024-08-25 04:56:37,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11200.0, ans=0.188 +2024-08-25 04:56:38,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=11200.0, ans=0.025 +2024-08-25 04:56:39,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.97 vs. limit=11.7 +2024-08-25 04:56:59,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=11253.333333333334, ans=0.125 +2024-08-25 04:57:07,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11306.666666666666, ans=0.18693333333333334 +2024-08-25 04:57:09,767 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.08 vs. limit=15.98 +2024-08-25 04:57:16,738 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:57:22,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=11360.0, ans=0.019333333333333338 +2024-08-25 04:57:33,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=11413.333333333334, ans=0.0 +2024-08-25 04:57:35,995 INFO [train.py:1114] (2/4) Epoch 1, batch 2150, loss[loss=0.3265, simple_loss=0.3487, pruned_loss=0.1097, ctc_loss=0.212, over 19853.00 frames. ], tot_loss[loss=0.3905, simple_loss=0.3806, pruned_loss=0.1429, ctc_loss=0.2747, over 3868217.85 frames. ], batch size: 52, lr: 4.41e-02, grad_scale: 32.0 +2024-08-25 04:58:14,116 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.07 vs. limit=16.1 +2024-08-25 04:58:45,897 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.72 vs. limit=7.866666666666666 +2024-08-25 04:59:00,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=11520.0, ans=0.07 +2024-08-25 04:59:36,667 INFO [train.py:1114] (2/4) Epoch 1, batch 2200, loss[loss=0.3692, simple_loss=0.3797, pruned_loss=0.1292, ctc_loss=0.2504, over 19608.00 frames. ], tot_loss[loss=0.386, simple_loss=0.3788, pruned_loss=0.1407, ctc_loss=0.2703, over 3866449.55 frames. ], batch size: 57, lr: 4.41e-02, grad_scale: 32.0 +2024-08-25 04:59:37,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=11733.333333333334, ans=0.125 +2024-08-25 04:59:40,225 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.512e+02 1.884e+02 2.153e+02 2.810e+02 4.673e+02, threshold=4.307e+02, percent-clipped=1.0 +2024-08-25 04:59:48,898 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.01 vs. limit=16.34 +2024-08-25 04:59:58,750 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.50 vs. limit=16.380000000000003 +2024-08-25 05:00:09,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=11893.333333333334, ans=0.125 +2024-08-25 05:00:30,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=11946.666666666666, ans=0.025 +2024-08-25 05:00:32,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=11946.666666666666, ans=0.4818666666666667 +2024-08-25 05:00:34,224 INFO [train.py:1114] (2/4) Epoch 1, batch 2250, loss[loss=0.3553, simple_loss=0.3716, pruned_loss=0.1219, ctc_loss=0.2381, over 19622.00 frames. ], tot_loss[loss=0.3828, simple_loss=0.3778, pruned_loss=0.1391, ctc_loss=0.2671, over 3867289.53 frames. ], batch size: 55, lr: 4.40e-02, grad_scale: 32.0 +2024-08-25 05:00:37,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=12000.0, ans=0.125 +2024-08-25 05:01:30,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=12160.0, ans=0.016 +2024-08-25 05:01:44,084 INFO [train.py:1114] (2/4) Epoch 1, batch 2300, loss[loss=0.3489, simple_loss=0.3572, pruned_loss=0.1216, ctc_loss=0.2436, over 19513.00 frames. ], tot_loss[loss=0.3786, simple_loss=0.3753, pruned_loss=0.1372, ctc_loss=0.263, over 3861513.50 frames. ], batch size: 49, lr: 4.40e-02, grad_scale: 32.0 +2024-08-25 05:01:47,657 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.534e+02 1.926e+02 2.114e+02 2.507e+02 4.625e+02, threshold=4.228e+02, percent-clipped=3.0 +2024-08-25 05:02:17,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=12426.666666666666, ans=0.125 +2024-08-25 05:02:30,774 INFO [train.py:1114] (2/4) Epoch 1, batch 2350, loss[loss=0.3903, simple_loss=0.3946, pruned_loss=0.1403, ctc_loss=0.2637, over 19677.00 frames. ], tot_loss[loss=0.3764, simple_loss=0.3746, pruned_loss=0.1362, ctc_loss=0.2605, over 3863663.06 frames. ], batch size: 63, lr: 4.40e-02, grad_scale: 32.0 +2024-08-25 05:02:31,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=12533.333333333334, ans=0.0 +2024-08-25 05:02:36,236 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:04:31,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=12640.0, ans=0.17359999999999998 +2024-08-25 05:04:43,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=12693.333333333334, ans=17.02 +2024-08-25 05:04:56,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=12746.666666666666, ans=0.125 +2024-08-25 05:04:59,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=12746.666666666666, ans=0.125 +2024-08-25 05:05:05,284 INFO [train.py:1114] (2/4) Epoch 1, batch 2400, loss[loss=0.3825, simple_loss=0.3882, pruned_loss=0.1363, ctc_loss=0.2604, over 19247.00 frames. ], tot_loss[loss=0.3759, simple_loss=0.3755, pruned_loss=0.1356, ctc_loss=0.2596, over 3858080.96 frames. ], batch size: 71, lr: 4.39e-02, grad_scale: 32.0 +2024-08-25 05:05:05,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=12800.0, ans=0.125 +2024-08-25 05:05:06,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12800.0, ans=0.172 +2024-08-25 05:05:08,755 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.515e+02 1.948e+02 2.252e+02 2.666e+02 4.870e+02, threshold=4.504e+02, percent-clipped=4.0 +2024-08-25 05:05:15,021 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.71 vs. limit=12.32 +2024-08-25 05:05:19,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=12853.333333333334, ans=0.125 +2024-08-25 05:05:22,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=12853.333333333334, ans=0.013111111111111108 +2024-08-25 05:05:34,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=12960.0, ans=0.09899494936611666 +2024-08-25 05:05:40,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=12960.0, ans=0.012666666666666666 +2024-08-25 05:05:46,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=13013.333333333334, ans=0.44453333333333334 +2024-08-25 05:05:51,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=13013.333333333334, ans=0.125 +2024-08-25 05:05:52,655 INFO [train.py:1114] (2/4) Epoch 1, batch 2450, loss[loss=0.4987, simple_loss=0.4325, pruned_loss=0.2063, ctc_loss=0.3804, over 13508.00 frames. ], tot_loss[loss=0.3858, simple_loss=0.381, pruned_loss=0.141, ctc_loss=0.2687, over 3731381.37 frames. ], batch size: 141, lr: 4.39e-02, grad_scale: 32.0 +2024-08-25 05:06:17,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=13066.666666666666, ans=0.125 +2024-08-25 05:06:28,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=13120.0, ans=0.012000000000000004 +2024-08-25 05:06:31,958 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.09 vs. limit=4.976 +2024-08-25 05:06:35,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=13173.333333333334, ans=0.025 +2024-08-25 05:07:49,749 INFO [train.py:1114] (2/4) Epoch 2, batch 0, loss[loss=0.342, simple_loss=0.3499, pruned_loss=0.1212, ctc_loss=0.2294, over 19423.00 frames. ], tot_loss[loss=0.342, simple_loss=0.3499, pruned_loss=0.1212, ctc_loss=0.2294, over 19423.00 frames. ], batch size: 48, lr: 4.30e-02, grad_scale: 32.0 +2024-08-25 05:07:49,750 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-25 05:09:14,179 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.6524, 4.0124, 3.8649, 3.8429], device='cuda:2') +2024-08-25 05:09:16,712 INFO [train.py:1146] (2/4) Epoch 2, validation: loss=0.2886, simple_loss=0.3508, pruned_loss=0.0823, ctc_loss=0.1542, over 944034.00 frames. +2024-08-25 05:09:16,712 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 13259MB +2024-08-25 05:09:17,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=13280.0, ans=0.1672 +2024-08-25 05:09:24,227 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.98 vs. limit=8.32 +2024-08-25 05:09:35,624 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.504e+02 1.938e+02 2.191e+02 2.677e+02 6.592e+02, threshold=4.382e+02, percent-clipped=7.0 +2024-08-25 05:09:38,348 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=5.87 vs. limit=6.666666666666667 +2024-08-25 05:09:43,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=13386.666666666666, ans=0.4314666666666667 +2024-08-25 05:09:43,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=13386.666666666666, ans=0.125 +2024-08-25 05:09:44,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=13386.666666666666, ans=0.125 +2024-08-25 05:10:10,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.27 vs. limit=8.386666666666667 +2024-08-25 05:10:10,708 INFO [train.py:1114] (2/4) Epoch 2, batch 50, loss[loss=0.282, simple_loss=0.3032, pruned_loss=0.09346, ctc_loss=0.185, over 19707.00 frames. ], tot_loss[loss=0.3688, simple_loss=0.3726, pruned_loss=0.132, ctc_loss=0.2523, over 845808.55 frames. ], batch size: 47, lr: 4.29e-02, grad_scale: 32.0 +2024-08-25 05:10:33,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=13546.666666666666, ans=0.125 +2024-08-25 05:10:45,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=13600.0, ans=0.09899494936611666 +2024-08-25 05:11:07,156 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.67 vs. limit=12.620000000000001 +2024-08-25 05:11:12,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=13653.333333333334, ans=0.009777777777777774 +2024-08-25 05:11:14,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=13653.333333333334, ans=0.125 +2024-08-25 05:11:20,665 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:11:47,135 INFO [train.py:1114] (2/4) Epoch 2, batch 100, loss[loss=0.3215, simple_loss=0.3371, pruned_loss=0.1111, ctc_loss=0.2094, over 19733.00 frames. ], tot_loss[loss=0.3686, simple_loss=0.374, pruned_loss=0.1314, ctc_loss=0.2509, over 1500191.62 frames. ], batch size: 51, lr: 4.29e-02, grad_scale: 32.0 +2024-08-25 05:11:49,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=13813.333333333334, ans=0.125 +2024-08-25 05:11:53,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=13813.333333333334, ans=0.009111111111111105 +2024-08-25 05:12:00,809 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 1.907e+02 2.167e+02 2.481e+02 4.957e+02, threshold=4.333e+02, percent-clipped=1.0 +2024-08-25 05:12:01,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=13866.666666666666, ans=0.125 +2024-08-25 05:12:02,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=13866.666666666666, ans=0.0 +2024-08-25 05:12:13,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=13920.0, ans=0.125 +2024-08-25 05:12:15,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=13973.333333333334, ans=0.0 +2024-08-25 05:12:23,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=13973.333333333334, ans=0.125 +2024-08-25 05:12:50,515 INFO [train.py:1114] (2/4) Epoch 2, batch 150, loss[loss=0.2924, simple_loss=0.3177, pruned_loss=0.09527, ctc_loss=0.1912, over 19700.00 frames. ], tot_loss[loss=0.363, simple_loss=0.3698, pruned_loss=0.1289, ctc_loss=0.2458, over 2029517.07 frames. ], batch size: 47, lr: 4.29e-02, grad_scale: 32.0 +2024-08-25 05:12:52,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=14080.0, ans=0.007808695652173913 +2024-08-25 05:13:21,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=14186.666666666666, ans=0.125 +2024-08-25 05:13:26,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=14186.666666666666, ans=0.125 +2024-08-25 05:14:15,329 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.05 vs. limit=8.56 +2024-08-25 05:14:47,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=14293.333333333334, ans=10.0 +2024-08-25 05:14:50,535 INFO [train.py:1114] (2/4) Epoch 2, batch 200, loss[loss=0.3685, simple_loss=0.3686, pruned_loss=0.1338, ctc_loss=0.2521, over 18357.00 frames. ], tot_loss[loss=0.3571, simple_loss=0.3661, pruned_loss=0.126, ctc_loss=0.2402, over 2437654.93 frames. ], batch size: 85, lr: 4.28e-02, grad_scale: 32.0 +2024-08-25 05:15:13,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=14400.0, ans=0.156 +2024-08-25 05:15:14,931 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.506e+02 1.847e+02 2.110e+02 2.499e+02 4.235e+02, threshold=4.220e+02, percent-clipped=0.0 +2024-08-25 05:15:22,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=14453.333333333334, ans=0.125 +2024-08-25 05:15:45,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=14560.0, ans=0.06532000000000002 +2024-08-25 05:15:49,792 INFO [train.py:1114] (2/4) Epoch 2, batch 250, loss[loss=0.3627, simple_loss=0.3765, pruned_loss=0.1258, ctc_loss=0.2431, over 19354.00 frames. ], tot_loss[loss=0.3556, simple_loss=0.3659, pruned_loss=0.125, ctc_loss=0.2379, over 2756627.41 frames. ], batch size: 67, lr: 4.28e-02, grad_scale: 32.0 +2024-08-25 05:16:06,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=14613.333333333334, ans=0.3885333333333333 +2024-08-25 05:16:26,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=14720.0, ans=0.38480000000000003 +2024-08-25 05:16:29,743 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:16:35,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=14720.0, ans=13.02 +2024-08-25 05:16:45,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=14773.333333333334, ans=0.005111111111111108 +2024-08-25 05:19:29,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=14826.666666666666, ans=0.125 +2024-08-25 05:19:37,924 INFO [train.py:1114] (2/4) Epoch 2, batch 300, loss[loss=0.3541, simple_loss=0.3747, pruned_loss=0.1223, ctc_loss=0.2225, over 19528.00 frames. ], tot_loss[loss=0.3535, simple_loss=0.3644, pruned_loss=0.1241, ctc_loss=0.2359, over 3001049.19 frames. ], batch size: 61, lr: 4.27e-02, grad_scale: 32.0 +2024-08-25 05:19:49,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=14880.0, ans=0.007634782608695653 +2024-08-25 05:19:56,629 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.593e+02 1.858e+02 2.099e+02 2.398e+02 3.801e+02, threshold=4.198e+02, percent-clipped=0.0 +2024-08-25 05:20:13,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=14986.666666666666, ans=0.007611594202898551 +2024-08-25 05:20:51,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=15093.333333333334, ans=0.125 +2024-08-25 05:20:54,634 INFO [train.py:1114] (2/4) Epoch 2, batch 350, loss[loss=0.2934, simple_loss=0.326, pruned_loss=0.09399, ctc_loss=0.1823, over 19768.00 frames. ], tot_loss[loss=0.3519, simple_loss=0.3636, pruned_loss=0.1233, ctc_loss=0.2343, over 3190772.48 frames. ], batch size: 48, lr: 4.27e-02, grad_scale: 32.0 +2024-08-25 05:20:59,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=15146.666666666666, ans=0.125 +2024-08-25 05:20:59,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=15146.666666666666, ans=0.125 +2024-08-25 05:21:09,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=15200.0, ans=0.025 +2024-08-25 05:21:16,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=15253.333333333334, ans=0.125 +2024-08-25 05:21:44,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=15360.0, ans=0.125 +2024-08-25 05:21:50,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=15360.0, ans=0.36240000000000006 +2024-08-25 05:22:06,916 INFO [train.py:1114] (2/4) Epoch 2, batch 400, loss[loss=0.3478, simple_loss=0.3664, pruned_loss=0.1195, ctc_loss=0.2253, over 19488.00 frames. ], tot_loss[loss=0.3513, simple_loss=0.3632, pruned_loss=0.123, ctc_loss=0.2333, over 3343165.54 frames. ], batch size: 54, lr: 4.26e-02, grad_scale: 32.0 +2024-08-25 05:22:10,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=15413.333333333334, ans=0.3605333333333334 +2024-08-25 05:22:13,367 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.03 vs. limit=13.280000000000001 +2024-08-25 05:22:20,574 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.569e+02 1.895e+02 2.189e+02 2.528e+02 4.758e+02, threshold=4.379e+02, percent-clipped=2.0 +2024-08-25 05:22:26,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=15520.0, ans=0.3568 +2024-08-25 05:22:35,306 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.39 vs. limit=13.32 +2024-08-25 05:22:40,373 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=10.229333333333333 +2024-08-25 05:22:57,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=15626.666666666666, ans=0.09899494936611666 +2024-08-25 05:23:46,273 INFO [train.py:1114] (2/4) Epoch 2, batch 450, loss[loss=0.3573, simple_loss=0.3789, pruned_loss=0.1219, ctc_loss=0.2297, over 19608.00 frames. ], tot_loss[loss=0.3493, simple_loss=0.362, pruned_loss=0.122, ctc_loss=0.2313, over 3449550.98 frames. ], batch size: 55, lr: 4.26e-02, grad_scale: 32.0 +2024-08-25 05:23:50,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=15680.0, ans=0.35120000000000007 +2024-08-25 05:23:55,601 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.59 vs. limit=13.4 +2024-08-25 05:23:59,184 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.22 vs. limit=13.4 +2024-08-25 05:23:59,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=15733.333333333334, ans=0.05446666666666666 +2024-08-25 05:24:06,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15786.666666666666, ans=0.14213333333333333 +2024-08-25 05:24:07,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=15786.666666666666, ans=0.00743768115942029 +2024-08-25 05:24:15,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=15840.0, ans=0.1416 +2024-08-25 05:24:33,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=15893.333333333334, ans=0.007414492753623188 +2024-08-25 05:24:37,913 INFO [train.py:1114] (2/4) Epoch 2, batch 500, loss[loss=0.3674, simple_loss=0.3812, pruned_loss=0.1283, ctc_loss=0.2421, over 19707.00 frames. ], tot_loss[loss=0.3469, simple_loss=0.3608, pruned_loss=0.1207, ctc_loss=0.2292, over 3545511.59 frames. ], batch size: 63, lr: 4.25e-02, grad_scale: 32.0 +2024-08-25 05:24:38,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=15946.666666666666, ans=0.00022222222222222088 +2024-08-25 05:25:45,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=15946.666666666666, ans=0.125 +2024-08-25 05:26:05,497 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.383e+02 1.778e+02 2.035e+02 2.349e+02 4.286e+02, threshold=4.071e+02, percent-clipped=0.0 +2024-08-25 05:26:09,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=16000.0, ans=0.33999999999999997 +2024-08-25 05:26:24,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=16106.666666666666, ans=0.125 +2024-08-25 05:26:29,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=16160.0, ans=0.0 +2024-08-25 05:26:33,527 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.94 vs. limit=13.559999999999999 +2024-08-25 05:26:35,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=16160.0, ans=0.0 +2024-08-25 05:26:49,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=16160.0, ans=0.125 +2024-08-25 05:26:49,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=16160.0, ans=0.33440000000000003 +2024-08-25 05:26:53,745 INFO [train.py:1114] (2/4) Epoch 2, batch 550, loss[loss=0.3568, simple_loss=0.373, pruned_loss=0.123, ctc_loss=0.2365, over 19245.00 frames. ], tot_loss[loss=0.3468, simple_loss=0.3607, pruned_loss=0.1206, ctc_loss=0.2288, over 3607977.97 frames. ], batch size: 71, lr: 4.25e-02, grad_scale: 32.0 +2024-08-25 05:27:02,534 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.11 vs. limit=13.58 +2024-08-25 05:27:04,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=16213.333333333334, ans=0.025 +2024-08-25 05:27:04,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16213.333333333334, ans=0.13786666666666667 +2024-08-25 05:27:28,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=16320.0, ans=0.125 +2024-08-25 05:27:36,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=16373.333333333334, ans=0.3269333333333333 +2024-08-25 05:28:05,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=16426.666666666668, ans=0.125 +2024-08-25 05:28:14,723 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:28:19,446 INFO [train.py:1114] (2/4) Epoch 2, batch 600, loss[loss=0.3703, simple_loss=0.384, pruned_loss=0.1296, ctc_loss=0.2436, over 19326.00 frames. ], tot_loss[loss=0.3462, simple_loss=0.3607, pruned_loss=0.1203, ctc_loss=0.2276, over 3665666.24 frames. ], batch size: 67, lr: 4.24e-02, grad_scale: 32.0 +2024-08-25 05:28:21,000 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.87 vs. limit=19.86 +2024-08-25 05:28:28,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=16480.0, ans=0.125 +2024-08-25 05:28:34,469 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.463e+02 1.917e+02 2.183e+02 2.770e+02 8.189e+02, threshold=4.366e+02, percent-clipped=5.0 +2024-08-25 05:28:54,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=16640.0, ans=0.0 +2024-08-25 05:29:14,160 INFO [train.py:1114] (2/4) Epoch 2, batch 650, loss[loss=0.3229, simple_loss=0.3458, pruned_loss=0.1074, ctc_loss=0.2129, over 19772.00 frames. ], tot_loss[loss=0.3428, simple_loss=0.3585, pruned_loss=0.1186, ctc_loss=0.2246, over 3716440.71 frames. ], batch size: 54, lr: 4.24e-02, grad_scale: 32.0 +2024-08-25 05:29:20,629 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.39 vs. limit=13.780000000000001 +2024-08-25 05:31:12,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=16746.666666666668, ans=0.125 +2024-08-25 05:31:32,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=16853.333333333332, ans=0.125 +2024-08-25 05:31:51,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=16906.666666666668, ans=0.0 +2024-08-25 05:31:54,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=16906.666666666668, ans=0.125 +2024-08-25 05:32:01,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=16960.0, ans=0.125 +2024-08-25 05:32:02,873 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.64 vs. limit=13.86 +2024-08-25 05:32:06,448 INFO [train.py:1114] (2/4) Epoch 2, batch 700, loss[loss=0.3142, simple_loss=0.3367, pruned_loss=0.1049, ctc_loss=0.2048, over 19727.00 frames. ], tot_loss[loss=0.3428, simple_loss=0.3588, pruned_loss=0.1185, ctc_loss=0.2243, over 3748435.88 frames. ], batch size: 51, lr: 4.23e-02, grad_scale: 32.0 +2024-08-25 05:32:47,851 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.375e+02 1.759e+02 2.005e+02 2.359e+02 5.033e+02, threshold=4.011e+02, percent-clipped=2.0 +2024-08-25 05:32:57,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=17120.0, ans=0.30080000000000007 +2024-08-25 05:33:09,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17173.333333333332, ans=0.1282666666666667 +2024-08-25 05:33:21,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=17226.666666666668, ans=0.125 +2024-08-25 05:33:28,051 INFO [train.py:1114] (2/4) Epoch 2, batch 750, loss[loss=0.3284, simple_loss=0.3625, pruned_loss=0.1065, ctc_loss=0.2029, over 19498.00 frames. ], tot_loss[loss=0.3411, simple_loss=0.3575, pruned_loss=0.1178, ctc_loss=0.2226, over 3774949.39 frames. ], batch size: 54, lr: 4.23e-02, grad_scale: 32.0 +2024-08-25 05:35:14,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=17280.0, ans=0.0 +2024-08-25 05:36:53,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=17440.0, ans=0.125 +2024-08-25 05:36:59,230 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.74 vs. limit=14.04 +2024-08-25 05:37:37,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=17493.333333333332, ans=0.125 +2024-08-25 05:37:40,887 INFO [train.py:1114] (2/4) Epoch 2, batch 800, loss[loss=0.3183, simple_loss=0.337, pruned_loss=0.1107, ctc_loss=0.1954, over 19417.00 frames. ], tot_loss[loss=0.3396, simple_loss=0.3566, pruned_loss=0.1171, ctc_loss=0.2213, over 3796406.38 frames. ], batch size: 48, lr: 4.22e-02, grad_scale: 32.0 +2024-08-25 05:37:55,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=17546.666666666668, ans=20.66 +2024-08-25 05:38:04,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=17600.0, ans=0.28400000000000003 +2024-08-25 05:38:06,528 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.454e+02 1.845e+02 2.130e+02 2.517e+02 4.310e+02, threshold=4.259e+02, percent-clipped=1.0 +2024-08-25 05:38:15,799 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.49 vs. limit=14.120000000000001 +2024-08-25 05:38:46,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=17760.0, ans=0.0070086956521739135 +2024-08-25 05:38:48,687 INFO [train.py:1114] (2/4) Epoch 2, batch 850, loss[loss=0.3508, simple_loss=0.3651, pruned_loss=0.1236, ctc_loss=0.2233, over 19649.00 frames. ], tot_loss[loss=0.338, simple_loss=0.3554, pruned_loss=0.1163, ctc_loss=0.2197, over 3815173.93 frames. ], batch size: 59, lr: 4.22e-02, grad_scale: 16.0 +2024-08-25 05:38:49,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=17813.333333333332, ans=0.006997101449275362 +2024-08-25 05:38:53,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=17813.333333333332, ans=0.2765333333333334 +2024-08-25 05:39:00,457 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:39:58,479 INFO [train.py:1114] (2/4) Epoch 2, batch 900, loss[loss=0.2929, simple_loss=0.3148, pruned_loss=0.09927, ctc_loss=0.1811, over 19420.00 frames. ], tot_loss[loss=0.3372, simple_loss=0.3548, pruned_loss=0.116, ctc_loss=0.2189, over 3818259.72 frames. ], batch size: 48, lr: 4.21e-02, grad_scale: 8.0 +2024-08-25 05:40:01,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=18080.0, ans=0.025 +2024-08-25 05:40:08,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=18080.0, ans=0.006939130434782609 +2024-08-25 05:40:19,556 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 1.852e+02 2.189e+02 2.703e+02 9.878e+02, threshold=4.378e+02, percent-clipped=3.0 +2024-08-25 05:40:33,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=18240.0, ans=0.125 +2024-08-25 05:40:55,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=18240.0, ans=0.125 +2024-08-25 05:40:55,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=18240.0, ans=0.11760000000000001 +2024-08-25 05:41:14,127 INFO [train.py:1114] (2/4) Epoch 2, batch 950, loss[loss=0.3012, simple_loss=0.3334, pruned_loss=0.09728, ctc_loss=0.1862, over 19501.00 frames. ], tot_loss[loss=0.3375, simple_loss=0.3553, pruned_loss=0.1161, ctc_loss=0.2189, over 3818663.54 frames. ], batch size: 49, lr: 4.21e-02, grad_scale: 8.0 +2024-08-25 05:41:51,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=18506.666666666668, ans=0.1149333333333333 +2024-08-25 05:41:57,554 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.67 vs. limit=21.42 +2024-08-25 05:42:06,454 INFO [train.py:1114] (2/4) Epoch 2, batch 1000, loss[loss=0.2808, simple_loss=0.3207, pruned_loss=0.08747, ctc_loss=0.165, over 19851.00 frames. ], tot_loss[loss=0.3378, simple_loss=0.3558, pruned_loss=0.1161, ctc_loss=0.2188, over 3815174.09 frames. ], batch size: 52, lr: 4.20e-02, grad_scale: 8.0 +2024-08-25 05:42:21,841 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.72 vs. limit=5.0 +2024-08-25 05:42:26,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=18666.666666666668, ans=0.025 +2024-08-25 05:42:41,290 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.839e+02 2.030e+02 2.416e+02 3.488e+02, threshold=4.061e+02, percent-clipped=0.0 +2024-08-25 05:42:44,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=18720.0, ans=0.125 +2024-08-25 05:42:49,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=18720.0, ans=0.125 +2024-08-25 05:42:52,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=18720.0, ans=10.0 +2024-08-25 05:42:53,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=18720.0, ans=0.125 +2024-08-25 05:42:58,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=18773.333333333332, ans=0.125 +2024-08-25 05:43:05,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=18773.333333333332, ans=0.125 +2024-08-25 05:43:06,435 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.29 vs. limit=11.530666666666667 +2024-08-25 05:43:12,223 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.21 vs. limit=14.559999999999999 +2024-08-25 05:43:16,659 INFO [train.py:1114] (2/4) Epoch 2, batch 1050, loss[loss=0.3101, simple_loss=0.3499, pruned_loss=0.09718, ctc_loss=0.1899, over 19847.00 frames. ], tot_loss[loss=0.336, simple_loss=0.3546, pruned_loss=0.1152, ctc_loss=0.2174, over 3821771.57 frames. ], batch size: 57, lr: 4.20e-02, grad_scale: 8.0 +2024-08-25 05:43:52,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=18986.666666666668, ans=0.125 +2024-08-25 05:44:04,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=19040.0, ans=0.006730434782608695 +2024-08-25 05:44:22,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=19146.666666666668, ans=0.2 +2024-08-25 05:44:23,169 INFO [train.py:1114] (2/4) Epoch 2, batch 1100, loss[loss=0.2902, simple_loss=0.3323, pruned_loss=0.09012, ctc_loss=0.1699, over 19590.00 frames. ], tot_loss[loss=0.3331, simple_loss=0.353, pruned_loss=0.1137, ctc_loss=0.2147, over 3831001.47 frames. ], batch size: 52, lr: 4.19e-02, grad_scale: 8.0 +2024-08-25 05:44:23,637 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.85 vs. limit=14.68 +2024-08-25 05:44:25,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=19146.666666666668, ans=0.05 +2024-08-25 05:44:26,443 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.73 vs. limit=9.786666666666667 +2024-08-25 05:44:48,513 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.463e+02 1.777e+02 2.009e+02 2.448e+02 3.967e+02, threshold=4.019e+02, percent-clipped=0.0 +2024-08-25 05:45:07,337 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.44 vs. limit=14.74 +2024-08-25 05:45:31,302 INFO [train.py:1114] (2/4) Epoch 2, batch 1150, loss[loss=0.3135, simple_loss=0.3415, pruned_loss=0.1044, ctc_loss=0.1917, over 19605.00 frames. ], tot_loss[loss=0.3323, simple_loss=0.3524, pruned_loss=0.1133, ctc_loss=0.2138, over 3830964.62 frames. ], batch size: 52, lr: 4.19e-02, grad_scale: 8.0 +2024-08-25 05:45:31,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=19413.333333333332, ans=0.0 +2024-08-25 05:45:54,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=19520.0, ans=0.025 +2024-08-25 05:46:06,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=19573.333333333332, ans=0.125 +2024-08-25 05:47:34,835 INFO [train.py:1114] (2/4) Epoch 2, batch 1200, loss[loss=0.3216, simple_loss=0.3522, pruned_loss=0.1061, ctc_loss=0.1966, over 19853.00 frames. ], tot_loss[loss=0.3341, simple_loss=0.3541, pruned_loss=0.1141, ctc_loss=0.2147, over 3825842.79 frames. ], batch size: 57, lr: 4.18e-02, grad_scale: 16.0 +2024-08-25 05:47:43,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=19733.333333333332, ans=0.0 +2024-08-25 05:47:50,318 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.449e+02 1.798e+02 2.208e+02 2.852e+02 1.698e+03, threshold=4.415e+02, percent-clipped=3.0 +2024-08-25 05:48:05,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=19786.666666666668, ans=0.125 +2024-08-25 05:48:22,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=19840.0, ans=0.006556521739130435 +2024-08-25 05:48:31,468 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.55 vs. limit=14.96 +2024-08-25 05:48:40,335 INFO [train.py:1114] (2/4) Epoch 2, batch 1250, loss[loss=0.3176, simple_loss=0.3476, pruned_loss=0.104, ctc_loss=0.199, over 19508.00 frames. ], tot_loss[loss=0.3325, simple_loss=0.3537, pruned_loss=0.1131, ctc_loss=0.2128, over 3843963.19 frames. ], batch size: 61, lr: 4.17e-02, grad_scale: 16.0 +2024-08-25 05:48:50,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=20000.0, ans=0.125 +2024-08-25 05:48:57,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=20000.0, ans=0.025 +2024-08-25 05:49:00,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=20000.0, ans=0.125 +2024-08-25 05:49:07,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20053.333333333332, ans=0.1 +2024-08-25 05:49:19,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=20106.666666666668, ans=0.125 +2024-08-25 05:49:27,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=20160.0, ans=0.125 +2024-08-25 05:49:34,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=20160.0, ans=0.0 +2024-08-25 05:49:36,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=20213.333333333332, ans=0.125 +2024-08-25 05:49:37,284 INFO [train.py:1114] (2/4) Epoch 2, batch 1300, loss[loss=0.3737, simple_loss=0.379, pruned_loss=0.1336, ctc_loss=0.2534, over 18856.00 frames. ], tot_loss[loss=0.3303, simple_loss=0.3523, pruned_loss=0.1121, ctc_loss=0.2105, over 3848656.50 frames. ], batch size: 76, lr: 4.17e-02, grad_scale: 16.0 +2024-08-25 05:49:42,452 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.75 vs. limit=10.0 +2024-08-25 05:49:52,768 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.507e+02 1.771e+02 1.898e+02 2.175e+02 3.765e+02, threshold=3.796e+02, percent-clipped=0.0 +2024-08-25 05:49:53,034 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:50:12,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=20373.333333333332, ans=0.2 +2024-08-25 05:50:25,297 INFO [train.py:1114] (2/4) Epoch 2, batch 1350, loss[loss=0.3151, simple_loss=0.3473, pruned_loss=0.1025, ctc_loss=0.195, over 19769.00 frames. ], tot_loss[loss=0.3288, simple_loss=0.3514, pruned_loss=0.1113, ctc_loss=0.209, over 3859181.43 frames. ], batch size: 54, lr: 4.16e-02, grad_scale: 16.0 +2024-08-25 05:50:32,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=20480.0, ans=0.125 +2024-08-25 05:50:52,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=20586.666666666668, ans=0.125 +2024-08-25 05:50:53,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=20586.666666666668, ans=0.0 +2024-08-25 05:51:04,578 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.34 vs. limit=15.0 +2024-08-25 05:51:17,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=20693.333333333332, ans=0.0 +2024-08-25 05:51:19,088 INFO [train.py:1114] (2/4) Epoch 2, batch 1400, loss[loss=0.2717, simple_loss=0.2979, pruned_loss=0.08991, ctc_loss=0.1642, over 19692.00 frames. ], tot_loss[loss=0.3283, simple_loss=0.3511, pruned_loss=0.1111, ctc_loss=0.2084, over 3865934.75 frames. ], batch size: 46, lr: 4.16e-02, grad_scale: 16.0 +2024-08-25 05:51:26,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=20746.666666666668, ans=0.125 +2024-08-25 05:51:29,215 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.97 vs. limit=12.0 +2024-08-25 05:51:34,336 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.473e+02 1.933e+02 2.205e+02 2.519e+02 3.569e+02, threshold=4.410e+02, percent-clipped=0.0 +2024-08-25 05:51:36,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=20800.0, ans=0.0 +2024-08-25 05:51:44,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=20853.333333333332, ans=0.006336231884057971 +2024-08-25 05:51:46,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=20853.333333333332, ans=0.0 +2024-08-25 05:52:02,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20960.0, ans=0.1 +2024-08-25 05:52:05,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20960.0, ans=0.1 +2024-08-25 05:52:06,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=20960.0, ans=0.125 +2024-08-25 05:52:09,363 INFO [train.py:1114] (2/4) Epoch 2, batch 1450, loss[loss=0.3385, simple_loss=0.3651, pruned_loss=0.1131, ctc_loss=0.2144, over 19650.00 frames. ], tot_loss[loss=0.3281, simple_loss=0.3511, pruned_loss=0.111, ctc_loss=0.2078, over 3864306.48 frames. ], batch size: 63, lr: 4.15e-02, grad_scale: 16.0 +2024-08-25 05:52:10,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=21013.333333333332, ans=0.0 +2024-08-25 05:52:11,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=21013.333333333332, ans=0.05 +2024-08-25 05:52:27,913 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.18 vs. limit=15.0 +2024-08-25 05:52:28,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21120.0, ans=0.1 +2024-08-25 05:52:33,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=21120.0, ans=0.125 +2024-08-25 05:52:35,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=21120.0, ans=0.125 +2024-08-25 05:52:45,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=21173.333333333332, ans=0.0 +2024-08-25 05:52:53,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=21226.666666666668, ans=0.125 +2024-08-25 05:52:57,000 INFO [train.py:1114] (2/4) Epoch 2, batch 1500, loss[loss=0.3251, simple_loss=0.3481, pruned_loss=0.1095, ctc_loss=0.2079, over 19595.00 frames. ], tot_loss[loss=0.3278, simple_loss=0.3511, pruned_loss=0.1108, ctc_loss=0.2072, over 3863794.10 frames. ], batch size: 57, lr: 4.15e-02, grad_scale: 16.0 +2024-08-25 05:53:14,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=21333.333333333332, ans=0.04949747468305833 +2024-08-25 05:53:15,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=21333.333333333332, ans=0.0 +2024-08-25 05:53:17,224 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.397e+02 1.832e+02 2.087e+02 2.558e+02 5.212e+02, threshold=4.175e+02, percent-clipped=3.0 +2024-08-25 05:53:32,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=21386.666666666668, ans=0.2 +2024-08-25 05:53:37,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=21440.0, ans=0.0 +2024-08-25 05:53:47,820 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.47 vs. limit=22.5 +2024-08-25 05:53:48,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=21493.333333333332, ans=10.0 +2024-08-25 05:54:04,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=21493.333333333332, ans=0.125 +2024-08-25 05:54:05,953 INFO [train.py:1114] (2/4) Epoch 2, batch 1550, loss[loss=0.3418, simple_loss=0.3667, pruned_loss=0.1159, ctc_loss=0.2129, over 19601.00 frames. ], tot_loss[loss=0.3282, simple_loss=0.3513, pruned_loss=0.1111, ctc_loss=0.2077, over 3847595.28 frames. ], batch size: 60, lr: 4.14e-02, grad_scale: 16.0 +2024-08-25 05:54:12,051 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.75 vs. limit=15.0 +2024-08-25 05:54:20,565 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.10 vs. limit=22.5 +2024-08-25 05:54:21,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21600.0, ans=0.1 +2024-08-25 05:54:36,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=21600.0, ans=0.0061739130434782605 +2024-08-25 05:54:47,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=21653.333333333332, ans=0.125 +2024-08-25 05:54:49,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=21706.666666666668, ans=0.125 +2024-08-25 05:55:11,748 INFO [train.py:1114] (2/4) Epoch 2, batch 1600, loss[loss=0.3288, simple_loss=0.3572, pruned_loss=0.1088, ctc_loss=0.2068, over 19842.00 frames. ], tot_loss[loss=0.3278, simple_loss=0.3508, pruned_loss=0.1109, ctc_loss=0.2075, over 3836408.51 frames. ], batch size: 57, lr: 4.13e-02, grad_scale: 32.0 +2024-08-25 05:55:14,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=21813.333333333332, ans=0.0 +2024-08-25 05:55:17,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=21813.333333333332, ans=0.5 +2024-08-25 05:55:21,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=21866.666666666668, ans=0.0 +2024-08-25 05:55:23,213 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.96 vs. limit=12.0 +2024-08-25 05:55:24,848 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:55:28,919 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.30 vs. limit=15.0 +2024-08-25 05:55:32,422 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.487e+02 1.812e+02 2.122e+02 2.604e+02 4.336e+02, threshold=4.244e+02, percent-clipped=2.0 +2024-08-25 05:55:42,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=21920.0, ans=0.125 +2024-08-25 05:55:42,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21920.0, ans=0.1 +2024-08-25 05:56:00,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21973.333333333332, ans=0.1 +2024-08-25 05:56:13,288 INFO [train.py:1114] (2/4) Epoch 2, batch 1650, loss[loss=0.3657, simple_loss=0.3876, pruned_loss=0.1249, ctc_loss=0.2345, over 19633.00 frames. ], tot_loss[loss=0.3276, simple_loss=0.3506, pruned_loss=0.1108, ctc_loss=0.2075, over 3832415.29 frames. ], batch size: 59, lr: 4.13e-02, grad_scale: 16.0 +2024-08-25 05:56:14,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22080.0, ans=0.1 +2024-08-25 05:56:14,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=22080.0, ans=0.07 +2024-08-25 05:56:26,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=22133.333333333332, ans=0.07 +2024-08-25 05:56:46,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22240.0, ans=0.1 +2024-08-25 05:57:07,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=22293.333333333332, ans=0.0 +2024-08-25 05:57:11,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=22346.666666666668, ans=0.0 +2024-08-25 05:57:12,872 INFO [train.py:1114] (2/4) Epoch 2, batch 1700, loss[loss=0.2912, simple_loss=0.3173, pruned_loss=0.0958, ctc_loss=0.1836, over 19693.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3495, pruned_loss=0.1096, ctc_loss=0.2056, over 3846676.64 frames. ], batch size: 46, lr: 4.12e-02, grad_scale: 16.0 +2024-08-25 05:57:29,326 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.387e+02 1.791e+02 2.005e+02 2.338e+02 3.555e+02, threshold=4.010e+02, percent-clipped=0.0 +2024-08-25 05:57:46,086 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.24 vs. limit=12.0 +2024-08-25 05:58:05,050 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.26 vs. limit=22.5 +2024-08-25 05:58:11,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=22560.0, ans=0.005965217391304348 +2024-08-25 05:58:30,043 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.53 vs. limit=15.0 +2024-08-25 05:58:34,080 INFO [train.py:1114] (2/4) Epoch 2, batch 1750, loss[loss=0.2778, simple_loss=0.311, pruned_loss=0.08884, ctc_loss=0.1672, over 19650.00 frames. ], tot_loss[loss=0.3237, simple_loss=0.348, pruned_loss=0.1089, ctc_loss=0.204, over 3850482.56 frames. ], batch size: 45, lr: 4.12e-02, grad_scale: 16.0 +2024-08-25 05:58:35,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22613.333333333332, ans=0.1 +2024-08-25 05:58:51,153 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=7.70 vs. limit=12.0 +2024-08-25 05:58:52,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=22720.0, ans=0.0 +2024-08-25 05:58:56,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=22720.0, ans=0.125 +2024-08-25 05:59:07,238 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.22 vs. limit=15.0 +2024-08-25 05:59:07,309 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.82 vs. limit=15.0 +2024-08-25 05:59:13,639 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.34 vs. limit=15.0 +2024-08-25 05:59:14,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=22773.333333333332, ans=0.125 +2024-08-25 05:59:14,514 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.89 vs. limit=12.0 +2024-08-25 05:59:15,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=22826.666666666668, ans=0.125 +2024-08-25 05:59:24,718 INFO [train.py:1114] (2/4) Epoch 2, batch 1800, loss[loss=0.3311, simple_loss=0.3582, pruned_loss=0.1111, ctc_loss=0.2043, over 19615.00 frames. ], tot_loss[loss=0.3238, simple_loss=0.3481, pruned_loss=0.1089, ctc_loss=0.2043, over 3853017.99 frames. ], batch size: 55, lr: 4.11e-02, grad_scale: 16.0 +2024-08-25 05:59:26,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=22880.0, ans=0.125 +2024-08-25 05:59:27,869 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.03 vs. limit=15.0 +2024-08-25 05:59:32,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=22880.0, ans=0.125 +2024-08-25 05:59:39,810 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 1.812e+02 2.002e+02 2.312e+02 3.839e+02, threshold=4.004e+02, percent-clipped=0.0 +2024-08-25 05:59:49,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=22986.666666666668, ans=0.0 +2024-08-25 06:00:08,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=23093.333333333332, ans=0.025 +2024-08-25 06:00:12,427 INFO [train.py:1114] (2/4) Epoch 2, batch 1850, loss[loss=0.3455, simple_loss=0.368, pruned_loss=0.1163, ctc_loss=0.2264, over 19588.00 frames. ], tot_loss[loss=0.3226, simple_loss=0.3475, pruned_loss=0.1083, ctc_loss=0.2029, over 3854273.72 frames. ], batch size: 57, lr: 4.11e-02, grad_scale: 16.0 +2024-08-25 06:00:20,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23200.0, ans=0.1 +2024-08-25 06:00:29,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=23253.333333333332, ans=0.1 +2024-08-25 06:00:54,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=23360.0, ans=0.125 +2024-08-25 06:00:55,392 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:00:59,798 INFO [train.py:1114] (2/4) Epoch 2, batch 1900, loss[loss=0.3452, simple_loss=0.3651, pruned_loss=0.1183, ctc_loss=0.2215, over 19637.00 frames. ], tot_loss[loss=0.3233, simple_loss=0.3482, pruned_loss=0.1085, ctc_loss=0.2035, over 3859639.00 frames. ], batch size: 59, lr: 4.10e-02, grad_scale: 16.0 +2024-08-25 06:01:08,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23413.333333333332, ans=0.1 +2024-08-25 06:01:18,893 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.426e+02 1.814e+02 2.067e+02 2.451e+02 4.716e+02, threshold=4.135e+02, percent-clipped=1.0 +2024-08-25 06:01:34,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=15.0 +2024-08-25 06:01:52,057 INFO [train.py:1114] (2/4) Epoch 2, batch 1950, loss[loss=0.3023, simple_loss=0.3369, pruned_loss=0.0987, ctc_loss=0.1756, over 19582.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.3488, pruned_loss=0.1082, ctc_loss=0.2029, over 3868670.13 frames. ], batch size: 52, lr: 4.09e-02, grad_scale: 16.0 +2024-08-25 06:02:03,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=23733.333333333332, ans=0.125 +2024-08-25 06:02:22,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=23840.0, ans=0.0 +2024-08-25 06:02:24,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=23840.0, ans=0.125 +2024-08-25 06:02:40,716 INFO [train.py:1114] (2/4) Epoch 2, batch 2000, loss[loss=0.2752, simple_loss=0.3036, pruned_loss=0.08907, ctc_loss=0.1716, over 19678.00 frames. ], tot_loss[loss=0.3236, simple_loss=0.3491, pruned_loss=0.1084, ctc_loss=0.2034, over 3854662.64 frames. ], batch size: 45, lr: 4.09e-02, grad_scale: 32.0 +2024-08-25 06:02:40,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=23946.666666666668, ans=0.125 +2024-08-25 06:02:43,472 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.48 vs. limit=22.5 +2024-08-25 06:02:46,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=23946.666666666668, ans=0.0 +2024-08-25 06:02:49,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=23946.666666666668, ans=0.07 +2024-08-25 06:02:53,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=24000.0, ans=0.125 +2024-08-25 06:02:57,868 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.475e+02 1.781e+02 1.996e+02 2.377e+02 5.355e+02, threshold=3.992e+02, percent-clipped=1.0 +2024-08-25 06:03:26,917 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.83 vs. limit=10.0 +2024-08-25 06:03:29,347 INFO [train.py:1114] (2/4) Epoch 2, batch 2050, loss[loss=0.2615, simple_loss=0.2948, pruned_loss=0.08284, ctc_loss=0.1562, over 19714.00 frames. ], tot_loss[loss=0.3216, simple_loss=0.3472, pruned_loss=0.1076, ctc_loss=0.2019, over 3850224.89 frames. ], batch size: 47, lr: 4.08e-02, grad_scale: 32.0 +2024-08-25 06:03:38,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=24266.666666666668, ans=0.0 +2024-08-25 06:04:03,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=24373.333333333332, ans=0.035 +2024-08-25 06:04:13,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=24480.0, ans=0.2 +2024-08-25 06:04:17,646 INFO [train.py:1114] (2/4) Epoch 2, batch 2100, loss[loss=0.3017, simple_loss=0.3379, pruned_loss=0.09692, ctc_loss=0.1791, over 19778.00 frames. ], tot_loss[loss=0.319, simple_loss=0.3455, pruned_loss=0.1063, ctc_loss=0.1996, over 3858887.10 frames. ], batch size: 54, lr: 4.08e-02, grad_scale: 32.0 +2024-08-25 06:04:24,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=24480.0, ans=0.125 +2024-08-25 06:04:33,038 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 1.823e+02 2.012e+02 2.259e+02 3.531e+02, threshold=4.024e+02, percent-clipped=0.0 +2024-08-25 06:04:34,396 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.95 vs. limit=15.0 +2024-08-25 06:04:42,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=24586.666666666668, ans=0.125 +2024-08-25 06:04:48,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=24640.0, ans=0.125 +2024-08-25 06:04:56,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=24693.333333333332, ans=0.0 +2024-08-25 06:05:02,134 INFO [train.py:1114] (2/4) Epoch 2, batch 2150, loss[loss=0.2801, simple_loss=0.3242, pruned_loss=0.0844, ctc_loss=0.1679, over 19844.00 frames. ], tot_loss[loss=0.3166, simple_loss=0.3439, pruned_loss=0.1051, ctc_loss=0.1974, over 3869343.48 frames. ], batch size: 52, lr: 4.07e-02, grad_scale: 32.0 +2024-08-25 06:05:06,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=24746.666666666668, ans=0.0 +2024-08-25 06:05:29,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=24853.333333333332, ans=0.125 +2024-08-25 06:05:33,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=24906.666666666668, ans=0.005455072463768116 +2024-08-25 06:05:36,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=24906.666666666668, ans=0.125 +2024-08-25 06:05:39,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=24906.666666666668, ans=0.09899494936611666 +2024-08-25 06:05:40,523 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:05:43,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=24906.666666666668, ans=0.125 +2024-08-25 06:05:44,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=24906.666666666668, ans=0.125 +2024-08-25 06:05:44,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=24960.0, ans=0.125 +2024-08-25 06:05:56,433 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.62 vs. limit=10.0 +2024-08-25 06:05:58,445 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.38 vs. limit=15.0 +2024-08-25 06:05:59,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=24960.0, ans=0.125 +2024-08-25 06:05:59,320 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.72 vs. limit=22.5 +2024-08-25 06:06:01,074 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.44 vs. limit=15.0 +2024-08-25 06:06:01,432 INFO [train.py:1114] (2/4) Epoch 2, batch 2200, loss[loss=0.3075, simple_loss=0.3474, pruned_loss=0.0974, ctc_loss=0.1823, over 19581.00 frames. ], tot_loss[loss=0.3166, simple_loss=0.344, pruned_loss=0.1051, ctc_loss=0.1971, over 3867352.30 frames. ], batch size: 57, lr: 4.06e-02, grad_scale: 32.0 +2024-08-25 06:06:02,789 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.42 vs. limit=22.5 +2024-08-25 06:06:06,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25013.333333333332, ans=0.1 +2024-08-25 06:06:11,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25013.333333333332, ans=0.1 +2024-08-25 06:06:12,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=25013.333333333332, ans=0.005431884057971015 +2024-08-25 06:06:12,334 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.07 vs. limit=15.0 +2024-08-25 06:06:13,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=25066.666666666668, ans=0.125 +2024-08-25 06:06:15,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=25066.666666666668, ans=0.09899494936611666 +2024-08-25 06:06:16,717 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=31.20 vs. limit=22.5 +2024-08-25 06:06:19,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25066.666666666668, ans=0.1 +2024-08-25 06:06:25,284 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.516e+02 1.924e+02 2.286e+02 2.709e+02 6.222e+02, threshold=4.573e+02, percent-clipped=4.0 +2024-08-25 06:06:34,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=25120.0, ans=0.2 +2024-08-25 06:06:37,801 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:06:54,562 INFO [train.py:1114] (2/4) Epoch 2, batch 2250, loss[loss=0.3233, simple_loss=0.3563, pruned_loss=0.1056, ctc_loss=0.1977, over 19623.00 frames. ], tot_loss[loss=0.3168, simple_loss=0.3443, pruned_loss=0.1052, ctc_loss=0.1972, over 3866805.21 frames. ], batch size: 55, lr: 4.06e-02, grad_scale: 32.0 +2024-08-25 06:07:06,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=25333.333333333332, ans=0.04949747468305833 +2024-08-25 06:07:06,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25333.333333333332, ans=0.1 +2024-08-25 06:07:08,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=25333.333333333332, ans=0.025 +2024-08-25 06:07:10,802 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.02 vs. limit=15.0 +2024-08-25 06:07:12,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=25386.666666666668, ans=0.125 +2024-08-25 06:07:33,667 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.81 vs. limit=22.5 +2024-08-25 06:07:40,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=25546.666666666668, ans=0.125 +2024-08-25 06:07:41,088 INFO [train.py:1114] (2/4) Epoch 2, batch 2300, loss[loss=0.2961, simple_loss=0.3224, pruned_loss=0.09704, ctc_loss=0.1893, over 19496.00 frames. ], tot_loss[loss=0.3167, simple_loss=0.3437, pruned_loss=0.1054, ctc_loss=0.1973, over 3860956.39 frames. ], batch size: 49, lr: 4.05e-02, grad_scale: 32.0 +2024-08-25 06:07:58,725 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.382e+02 1.775e+02 2.049e+02 2.504e+02 6.120e+02, threshold=4.097e+02, percent-clipped=1.0 +2024-08-25 06:08:02,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=25653.333333333332, ans=0.005292753623188406 +2024-08-25 06:08:06,027 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.98 vs. limit=15.0 +2024-08-25 06:08:10,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=25653.333333333332, ans=15.0 +2024-08-25 06:08:10,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25706.666666666668, ans=0.1 +2024-08-25 06:08:20,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=25760.0, ans=0.125 +2024-08-25 06:08:20,978 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.47 vs. limit=15.0 +2024-08-25 06:08:29,085 INFO [train.py:1114] (2/4) Epoch 2, batch 2350, loss[loss=0.3297, simple_loss=0.3521, pruned_loss=0.1123, ctc_loss=0.207, over 19700.00 frames. ], tot_loss[loss=0.3172, simple_loss=0.3438, pruned_loss=0.1057, ctc_loss=0.1977, over 3863339.85 frames. ], batch size: 63, lr: 4.04e-02, grad_scale: 32.0 +2024-08-25 06:08:36,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25813.333333333332, ans=0.1 +2024-08-25 06:09:03,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=25973.333333333332, ans=0.125 +2024-08-25 06:09:28,459 INFO [train.py:1114] (2/4) Epoch 2, batch 2400, loss[loss=0.3512, simple_loss=0.3654, pruned_loss=0.1216, ctc_loss=0.2344, over 19256.00 frames. ], tot_loss[loss=0.3192, simple_loss=0.3459, pruned_loss=0.1064, ctc_loss=0.1991, over 3857805.16 frames. ], batch size: 71, lr: 4.04e-02, grad_scale: 32.0 +2024-08-25 06:09:43,436 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.503e+02 1.803e+02 2.129e+02 2.459e+02 5.388e+02, threshold=4.257e+02, percent-clipped=1.0 +2024-08-25 06:09:44,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=26133.333333333332, ans=0.125 +2024-08-25 06:09:52,139 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.51 vs. limit=10.0 +2024-08-25 06:10:02,090 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=18.06 vs. limit=15.0 +2024-08-25 06:10:03,230 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.35 vs. limit=15.0 +2024-08-25 06:10:12,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=26293.333333333332, ans=0.0 +2024-08-25 06:10:14,654 INFO [train.py:1114] (2/4) Epoch 2, batch 2450, loss[loss=0.4336, simple_loss=0.4037, pruned_loss=0.1681, ctc_loss=0.3184, over 12860.00 frames. ], tot_loss[loss=0.3297, simple_loss=0.3521, pruned_loss=0.1118, ctc_loss=0.2091, over 3730769.54 frames. ], batch size: 141, lr: 4.03e-02, grad_scale: 32.0 +2024-08-25 06:10:22,536 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.78 vs. limit=15.0 +2024-08-25 06:10:23,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=26400.0, ans=0.05 +2024-08-25 06:10:33,929 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.49 vs. limit=22.5 +2024-08-25 06:10:38,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=26453.333333333332, ans=0.125 +2024-08-25 06:11:35,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=26554.666666666668, ans=0.07 +2024-08-25 06:11:53,137 INFO [train.py:1114] (2/4) Epoch 3, batch 0, loss[loss=0.3167, simple_loss=0.3391, pruned_loss=0.1057, ctc_loss=0.2076, over 19414.00 frames. ], tot_loss[loss=0.3167, simple_loss=0.3391, pruned_loss=0.1057, ctc_loss=0.2076, over 19414.00 frames. ], batch size: 48, lr: 3.83e-02, grad_scale: 32.0 +2024-08-25 06:11:53,508 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-25 06:12:07,835 INFO [train.py:1146] (2/4) Epoch 3, validation: loss=0.2565, simple_loss=0.3309, pruned_loss=0.06653, ctc_loss=0.1228, over 944034.00 frames. +2024-08-25 06:12:07,837 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 13259MB +2024-08-25 06:12:09,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=26554.666666666668, ans=0.2 +2024-08-25 06:12:18,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=26554.666666666668, ans=0.2 +2024-08-25 06:12:39,512 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.20 vs. limit=22.5 +2024-08-25 06:12:49,110 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.86 vs. limit=15.0 +2024-08-25 06:13:57,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=26608.0, ans=0.125 +2024-08-25 06:15:43,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=26661.333333333332, ans=0.05 +2024-08-25 06:16:00,080 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.589e+02 1.983e+02 2.286e+02 2.644e+02 3.774e+02, threshold=4.572e+02, percent-clipped=0.0 +2024-08-25 06:17:18,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=26714.666666666668, ans=0.025 +2024-08-25 06:17:18,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=26714.666666666668, ans=0.125 +2024-08-25 06:18:46,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=26768.0, ans=0.0 +2024-08-25 06:22:59,721 INFO [train.py:1114] (2/4) Epoch 3, batch 50, loss[loss=0.2665, simple_loss=0.3024, pruned_loss=0.08399, ctc_loss=0.1566, over 19698.00 frames. ], tot_loss[loss=0.3235, simple_loss=0.349, pruned_loss=0.1082, ctc_loss=0.2039, over 844960.58 frames. ], batch size: 47, lr: 3.82e-02, grad_scale: 16.0 +2024-08-25 06:23:30,436 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:29:36,917 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.52 vs. limit=15.0 +2024-08-25 06:30:18,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=26874.666666666668, ans=0.125 +2024-08-25 06:30:23,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=26874.666666666668, ans=0.5 +2024-08-25 06:44:24,666 INFO [train.py:1114] (2/4) Epoch 3, batch 100, loss[loss=0.2764, simple_loss=0.3186, pruned_loss=0.08518, ctc_loss=0.1598, over 19726.00 frames. ], tot_loss[loss=0.321, simple_loss=0.3483, pruned_loss=0.1067, ctc_loss=0.2008, over 1500110.56 frames. ], batch size: 51, lr: 3.82e-02, grad_scale: 16.0 +2024-08-25 06:47:32,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=27141.333333333332, ans=22.5 +2024-08-25 06:47:48,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=27194.666666666668, ans=0.00495768115942029 +2024-08-25 06:48:13,158 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.88 vs. limit=22.5 +2024-08-25 06:48:15,486 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.410e+02 1.744e+02 2.032e+02 2.291e+02 1.205e+03, threshold=4.063e+02, percent-clipped=1.0 +2024-08-25 06:50:43,549 INFO [train.py:1114] (2/4) Epoch 3, batch 150, loss[loss=0.2966, simple_loss=0.3201, pruned_loss=0.09999, ctc_loss=0.1827, over 19704.00 frames. ], tot_loss[loss=0.3166, simple_loss=0.3454, pruned_loss=0.1045, ctc_loss=0.1969, over 2028676.42 frames. ], batch size: 47, lr: 3.81e-02, grad_scale: 16.0 +2024-08-25 06:51:50,560 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.38 vs. limit=15.0 +2024-08-25 06:53:48,859 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.07 vs. limit=22.5 +2024-08-25 06:54:31,942 INFO [train.py:1114] (2/4) Epoch 3, batch 200, loss[loss=0.3397, simple_loss=0.3568, pruned_loss=0.118, ctc_loss=0.2162, over 18180.00 frames. ], tot_loss[loss=0.3137, simple_loss=0.3426, pruned_loss=0.1035, ctc_loss=0.1947, over 2435563.59 frames. ], batch size: 85, lr: 3.80e-02, grad_scale: 16.0 +2024-08-25 06:54:47,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=27621.333333333332, ans=0.025 +2024-08-25 06:55:04,020 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.69 vs. limit=15.0 +2024-08-25 06:56:00,392 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.731e+02 1.977e+02 2.205e+02 3.305e+02, threshold=3.953e+02, percent-clipped=0.0 +2024-08-25 06:56:20,576 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:56:34,801 INFO [train.py:1114] (2/4) Epoch 3, batch 250, loss[loss=0.3128, simple_loss=0.3443, pruned_loss=0.1012, ctc_loss=0.1969, over 19405.00 frames. ], tot_loss[loss=0.3118, simple_loss=0.3419, pruned_loss=0.1024, ctc_loss=0.1925, over 2755297.25 frames. ], batch size: 67, lr: 3.80e-02, grad_scale: 16.0 +2024-08-25 06:57:28,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=27941.333333333332, ans=0.125 +2024-08-25 06:57:32,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27994.666666666668, ans=0.1 +2024-08-25 07:00:43,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=28048.0, ans=0.1 +2024-08-25 07:02:41,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=28048.0, ans=0.125 +2024-08-25 07:02:46,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=28048.0, ans=0.2 +2024-08-25 07:02:49,403 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 07:03:29,188 INFO [train.py:1114] (2/4) Epoch 3, batch 300, loss[loss=0.2881, simple_loss=0.3347, pruned_loss=0.08668, ctc_loss=0.1705, over 19532.00 frames. ], tot_loss[loss=0.3106, simple_loss=0.3411, pruned_loss=0.1019, ctc_loss=0.191, over 2999990.84 frames. ], batch size: 61, lr: 3.79e-02, grad_scale: 16.0 +2024-08-25 07:04:04,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=28208.0, ans=0.2 +2024-08-25 07:04:13,298 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.53 vs. limit=22.5 +2024-08-25 07:04:27,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=28208.0, ans=0.1 +2024-08-25 07:04:44,395 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.724e+02 1.968e+02 2.265e+02 3.417e+02, threshold=3.936e+02, percent-clipped=0.0 +2024-08-25 07:04:44,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=28314.666666666668, ans=6.0 +2024-08-25 07:05:18,322 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.93 vs. limit=15.0 +2024-08-25 07:05:36,011 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.49 vs. limit=12.0 +2024-08-25 07:05:49,898 INFO [train.py:1114] (2/4) Epoch 3, batch 350, loss[loss=0.2745, simple_loss=0.3075, pruned_loss=0.08835, ctc_loss=0.1621, over 19709.00 frames. ], tot_loss[loss=0.3099, simple_loss=0.3407, pruned_loss=0.1015, ctc_loss=0.1903, over 3190487.02 frames. ], batch size: 48, lr: 3.79e-02, grad_scale: 16.0 +2024-08-25 07:05:51,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=28421.333333333332, ans=0.125 +2024-08-25 07:05:57,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=28421.333333333332, ans=0.125 +2024-08-25 07:07:29,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=28528.0, ans=0.2 +2024-08-25 07:08:01,163 INFO [train.py:1114] (2/4) Epoch 3, batch 400, loss[loss=0.307, simple_loss=0.3411, pruned_loss=0.0981, ctc_loss=0.192, over 19501.00 frames. ], tot_loss[loss=0.309, simple_loss=0.3399, pruned_loss=0.1011, ctc_loss=0.1894, over 3341906.26 frames. ], batch size: 54, lr: 3.78e-02, grad_scale: 32.0 +2024-08-25 07:08:01,863 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.54 vs. limit=12.0 +2024-08-25 07:08:15,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28741.333333333332, ans=0.1 +2024-08-25 07:08:20,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=28741.333333333332, ans=0.0 +2024-08-25 07:08:24,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=28794.666666666668, ans=0.1 +2024-08-25 07:08:42,255 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 1.762e+02 1.982e+02 2.336e+02 5.420e+02, threshold=3.963e+02, percent-clipped=2.0 +2024-08-25 07:08:54,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=28901.333333333332, ans=0.025 +2024-08-25 07:09:04,150 INFO [train.py:1114] (2/4) Epoch 3, batch 450, loss[loss=0.2905, simple_loss=0.3334, pruned_loss=0.09053, ctc_loss=0.1664, over 19616.00 frames. ], tot_loss[loss=0.3087, simple_loss=0.3397, pruned_loss=0.101, ctc_loss=0.1892, over 3450576.39 frames. ], batch size: 55, lr: 3.77e-02, grad_scale: 32.0 +2024-08-25 07:09:08,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=28954.666666666668, ans=0.05 +2024-08-25 07:09:27,085 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.21 vs. limit=22.5 +2024-08-25 07:09:35,494 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.72 vs. limit=15.0 +2024-08-25 07:09:42,223 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.33 vs. limit=15.0 +2024-08-25 07:09:56,838 INFO [train.py:1114] (2/4) Epoch 3, batch 500, loss[loss=0.3002, simple_loss=0.3425, pruned_loss=0.09416, ctc_loss=0.1737, over 19718.00 frames. ], tot_loss[loss=0.3072, simple_loss=0.3386, pruned_loss=0.1003, ctc_loss=0.1879, over 3546365.53 frames. ], batch size: 63, lr: 3.77e-02, grad_scale: 32.0 +2024-08-25 07:10:16,724 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.39 vs. limit=22.5 +2024-08-25 07:10:24,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29274.666666666668, ans=0.1 +2024-08-25 07:10:43,377 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 1.753e+02 1.966e+02 2.327e+02 4.047e+02, threshold=3.932e+02, percent-clipped=2.0 +2024-08-25 07:11:10,797 INFO [train.py:1114] (2/4) Epoch 3, batch 550, loss[loss=0.3457, simple_loss=0.3663, pruned_loss=0.1173, ctc_loss=0.2262, over 19277.00 frames. ], tot_loss[loss=0.309, simple_loss=0.3394, pruned_loss=0.1014, ctc_loss=0.1896, over 3609812.95 frames. ], batch size: 71, lr: 3.76e-02, grad_scale: 16.0 +2024-08-25 07:11:37,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=29488.0, ans=0.125 +2024-08-25 07:11:39,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=29488.0, ans=0.125 +2024-08-25 07:11:40,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=29488.0, ans=0.125 +2024-08-25 07:12:01,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=29541.333333333332, ans=0.125 +2024-08-25 07:12:01,710 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.60 vs. limit=15.0 +2024-08-25 07:12:14,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=29648.0, ans=0.125 +2024-08-25 07:12:40,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=29701.333333333332, ans=0.125 +2024-08-25 07:12:40,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=29701.333333333332, ans=0.0 +2024-08-25 07:12:53,138 INFO [train.py:1114] (2/4) Epoch 3, batch 600, loss[loss=0.3417, simple_loss=0.367, pruned_loss=0.1162, ctc_loss=0.2098, over 19378.00 frames. ], tot_loss[loss=0.3077, simple_loss=0.3387, pruned_loss=0.1006, ctc_loss=0.1882, over 3667534.10 frames. ], batch size: 67, lr: 3.76e-02, grad_scale: 16.0 +2024-08-25 07:12:53,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29754.666666666668, ans=0.1 +2024-08-25 07:13:11,072 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.09 vs. limit=6.0 +2024-08-25 07:13:31,908 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.85 vs. limit=15.0 +2024-08-25 07:13:37,725 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.450e+02 1.812e+02 2.009e+02 2.360e+02 5.731e+02, threshold=4.017e+02, percent-clipped=3.0 +2024-08-25 07:13:38,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=29914.666666666668, ans=0.004366376811594202 +2024-08-25 07:13:53,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=29968.0, ans=0.125 +2024-08-25 07:13:54,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=29968.0, ans=0.125 +2024-08-25 07:14:02,709 INFO [train.py:1114] (2/4) Epoch 3, batch 650, loss[loss=0.2939, simple_loss=0.3292, pruned_loss=0.09426, ctc_loss=0.1751, over 19773.00 frames. ], tot_loss[loss=0.305, simple_loss=0.3368, pruned_loss=0.09938, ctc_loss=0.1861, over 3717508.31 frames. ], batch size: 54, lr: 3.75e-02, grad_scale: 16.0 +2024-08-25 07:14:07,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=30021.333333333332, ans=0.035 +2024-08-25 07:14:07,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=30021.333333333332, ans=0.125 +2024-08-25 07:14:10,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=30021.333333333332, ans=0.125 +2024-08-25 07:14:15,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=30074.666666666668, ans=0.025 +2024-08-25 07:14:16,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=30074.666666666668, ans=0.125 +2024-08-25 07:14:26,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=30128.0, ans=0.125 +2024-08-25 07:14:39,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=30181.333333333332, ans=0.0 +2024-08-25 07:14:42,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=30181.333333333332, ans=0.125 +2024-08-25 07:14:55,113 INFO [train.py:1114] (2/4) Epoch 3, batch 700, loss[loss=0.2639, simple_loss=0.3106, pruned_loss=0.0776, ctc_loss=0.1549, over 19726.00 frames. ], tot_loss[loss=0.306, simple_loss=0.3377, pruned_loss=0.09983, ctc_loss=0.1868, over 3749069.74 frames. ], batch size: 51, lr: 3.74e-02, grad_scale: 16.0 +2024-08-25 07:14:57,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=30288.0, ans=0.004285217391304348 +2024-08-25 07:14:59,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=30288.0, ans=0.0 +2024-08-25 07:15:02,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=30288.0, ans=0.125 +2024-08-25 07:15:03,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=30288.0, ans=0.125 +2024-08-25 07:15:18,916 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.17 vs. limit=6.0 +2024-08-25 07:15:22,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=30394.666666666668, ans=0.2 +2024-08-25 07:15:28,453 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.846e+02 1.998e+02 2.505e+02 9.071e+02, threshold=3.995e+02, percent-clipped=5.0 +2024-08-25 07:15:29,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=30448.0, ans=0.125 +2024-08-25 07:15:37,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=30448.0, ans=0.0 +2024-08-25 07:15:58,577 INFO [train.py:1114] (2/4) Epoch 3, batch 750, loss[loss=0.3048, simple_loss=0.3415, pruned_loss=0.09795, ctc_loss=0.1804, over 19501.00 frames. ], tot_loss[loss=0.3055, simple_loss=0.3373, pruned_loss=0.09959, ctc_loss=0.1863, over 3774217.50 frames. ], batch size: 54, lr: 3.74e-02, grad_scale: 16.0 +2024-08-25 07:16:09,166 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.55 vs. limit=12.0 +2024-08-25 07:16:22,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=30608.0, ans=0.125 +2024-08-25 07:16:29,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=30608.0, ans=0.0 +2024-08-25 07:16:31,072 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=4.36 vs. limit=15.0 +2024-08-25 07:16:46,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=30661.333333333332, ans=0.125 +2024-08-25 07:19:10,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=30714.666666666668, ans=0.1 +2024-08-25 07:21:16,435 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.63 vs. limit=22.5 +2024-08-25 07:34:42,530 INFO [train.py:1114] (2/4) Epoch 3, batch 800, loss[loss=0.2981, simple_loss=0.3287, pruned_loss=0.09765, ctc_loss=0.1807, over 19404.00 frames. ], tot_loss[loss=0.305, simple_loss=0.337, pruned_loss=0.09931, ctc_loss=0.1858, over 3795776.34 frames. ], batch size: 48, lr: 3.73e-02, grad_scale: 32.0 +2024-08-25 08:02:14,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=30928.0, ans=0.125 +2024-08-25 08:02:40,819 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.303e+02 1.761e+02 1.928e+02 2.233e+02 3.899e+02, threshold=3.856e+02, percent-clipped=0.0 +2024-08-25 08:12:59,505 INFO [train.py:1114] (2/4) Epoch 3, batch 850, loss[loss=0.3225, simple_loss=0.3588, pruned_loss=0.1034, ctc_loss=0.1983, over 19648.00 frames. ], tot_loss[loss=0.304, simple_loss=0.3363, pruned_loss=0.09882, ctc_loss=0.185, over 3815617.43 frames. ], batch size: 59, lr: 3.73e-02, grad_scale: 32.0 +2024-08-25 08:27:21,828 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.21 vs. limit=10.0 +2024-08-25 08:41:05,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=31301.333333333332, ans=0.125 +2024-08-25 08:44:41,572 INFO [train.py:1114] (2/4) Epoch 3, batch 900, loss[loss=0.2713, simple_loss=0.3109, pruned_loss=0.08433, ctc_loss=0.1577, over 19410.00 frames. ], tot_loss[loss=0.305, simple_loss=0.337, pruned_loss=0.09936, ctc_loss=0.1858, over 3818244.52 frames. ], batch size: 48, lr: 3.72e-02, grad_scale: 32.0 +2024-08-25 08:48:07,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=31354.666666666668, ans=0.125 +2024-08-25 08:49:30,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=31408.0, ans=0.0 +2024-08-25 08:51:42,119 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 08:57:47,299 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.05 vs. limit=15.0 +2024-08-25 08:57:54,734 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.390e+02 1.748e+02 1.945e+02 2.250e+02 3.446e+02, threshold=3.889e+02, percent-clipped=0.0 +2024-08-25 09:02:00,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=31514.666666666668, ans=0.0 +2024-08-25 09:02:10,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=31568.0, ans=0.125 +2024-08-25 09:04:09,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=31568.0, ans=0.025 +2024-08-25 09:05:03,363 INFO [train.py:1114] (2/4) Epoch 3, batch 950, loss[loss=0.2828, simple_loss=0.3224, pruned_loss=0.08785, ctc_loss=0.169, over 19489.00 frames. ], tot_loss[loss=0.3054, simple_loss=0.3372, pruned_loss=0.09957, ctc_loss=0.1862, over 3820235.48 frames. ], batch size: 49, lr: 3.71e-02, grad_scale: 32.0 +2024-08-25 09:12:46,453 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.77 vs. limit=22.5 +2024-08-25 09:16:15,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=31728.0, ans=0.07 +2024-08-25 09:20:47,855 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.11 vs. limit=10.0 +2024-08-25 09:23:03,906 INFO [train.py:1114] (2/4) Epoch 3, batch 1000, loss[loss=0.2808, simple_loss=0.3189, pruned_loss=0.08767, ctc_loss=0.1682, over 19853.00 frames. ], tot_loss[loss=0.3071, simple_loss=0.3384, pruned_loss=0.1004, ctc_loss=0.1874, over 3815699.50 frames. ], batch size: 52, lr: 3.71e-02, grad_scale: 16.0 +2024-08-25 09:23:58,121 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.43 vs. limit=10.0 +2024-08-25 09:25:53,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=31941.333333333332, ans=0.125 +2024-08-25 09:27:51,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=31994.666666666668, ans=0.0039142028985507255 +2024-08-25 09:28:26,222 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.83 vs. limit=22.5 +2024-08-25 09:29:07,852 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.398e+02 1.873e+02 2.237e+02 2.628e+02 7.664e+02, threshold=4.475e+02, percent-clipped=6.0 +2024-08-25 09:32:11,568 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.39 vs. limit=15.0 +2024-08-25 09:32:27,623 INFO [train.py:1114] (2/4) Epoch 3, batch 1050, loss[loss=0.3104, simple_loss=0.3565, pruned_loss=0.09611, ctc_loss=0.1805, over 19842.00 frames. ], tot_loss[loss=0.3059, simple_loss=0.3374, pruned_loss=0.09986, ctc_loss=0.1867, over 3821755.97 frames. ], batch size: 57, lr: 3.70e-02, grad_scale: 16.0 +2024-08-25 09:32:27,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=32154.666666666668, ans=0.125 +2024-08-25 09:32:33,680 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.93 vs. limit=6.0 +2024-08-25 09:32:49,469 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.91 vs. limit=12.0 +2024-08-25 09:33:24,311 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.65 vs. limit=15.0 +2024-08-25 09:33:26,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=32208.0, ans=0.125 +2024-08-25 09:33:26,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.25 vs. limit=22.5 +2024-08-25 09:36:56,745 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.23 vs. limit=12.0 +2024-08-25 09:41:10,309 INFO [train.py:1114] (2/4) Epoch 3, batch 1100, loss[loss=0.2756, simple_loss=0.3191, pruned_loss=0.08411, ctc_loss=0.16, over 19593.00 frames. ], tot_loss[loss=0.3048, simple_loss=0.3368, pruned_loss=0.09921, ctc_loss=0.1858, over 3828945.65 frames. ], batch size: 52, lr: 3.70e-02, grad_scale: 16.0 +2024-08-25 09:41:42,535 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.93 vs. limit=12.0 +2024-08-25 09:42:35,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=32528.0, ans=0.0 +2024-08-25 09:42:47,757 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.41 vs. limit=22.5 +2024-08-25 09:43:23,047 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.681e+02 1.943e+02 2.357e+02 4.515e+02, threshold=3.887e+02, percent-clipped=1.0 +2024-08-25 09:45:13,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=32634.666666666668, ans=0.2 +2024-08-25 09:45:15,743 INFO [train.py:1114] (2/4) Epoch 3, batch 1150, loss[loss=0.2753, simple_loss=0.319, pruned_loss=0.0836, ctc_loss=0.161, over 19602.00 frames. ], tot_loss[loss=0.3043, simple_loss=0.3362, pruned_loss=0.09913, ctc_loss=0.1855, over 3828331.19 frames. ], batch size: 52, lr: 3.69e-02, grad_scale: 16.0 +2024-08-25 09:52:14,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=32741.333333333332, ans=0.2 +2024-08-25 09:52:17,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=32741.333333333332, ans=0.003751884057971014 +2024-08-25 09:54:46,768 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.44 vs. limit=15.0 +2024-08-25 09:55:21,116 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.11 vs. limit=15.0 +2024-08-25 09:55:21,205 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.25 vs. limit=15.0 +2024-08-25 09:55:29,632 INFO [train.py:1114] (2/4) Epoch 3, batch 1200, loss[loss=0.3172, simple_loss=0.3477, pruned_loss=0.1037, ctc_loss=0.1984, over 19838.00 frames. ], tot_loss[loss=0.3054, simple_loss=0.3373, pruned_loss=0.09953, ctc_loss=0.1862, over 3824201.59 frames. ], batch size: 57, lr: 3.68e-02, grad_scale: 32.0 +2024-08-25 09:55:30,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=32954.666666666664, ans=0.025 +2024-08-25 09:55:41,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=32954.666666666664, ans=0.1 +2024-08-25 09:56:31,120 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.454e+02 1.705e+02 1.941e+02 2.201e+02 4.168e+02, threshold=3.882e+02, percent-clipped=1.0 +2024-08-25 09:57:41,499 INFO [train.py:1114] (2/4) Epoch 3, batch 1250, loss[loss=0.3428, simple_loss=0.3651, pruned_loss=0.1179, ctc_loss=0.212, over 19526.00 frames. ], tot_loss[loss=0.3043, simple_loss=0.337, pruned_loss=0.0988, ctc_loss=0.1848, over 3841897.57 frames. ], batch size: 61, lr: 3.68e-02, grad_scale: 32.0 +2024-08-25 09:58:19,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33328.0, ans=0.1 +2024-08-25 09:58:20,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=33328.0, ans=0.125 +2024-08-25 09:58:26,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=33328.0, ans=0.0036243478260869558 +2024-08-25 09:58:42,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=33381.333333333336, ans=0.125 +2024-08-25 09:58:56,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33434.666666666664, ans=0.1 +2024-08-25 09:58:56,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=33434.666666666664, ans=0.2 +2024-08-25 09:59:04,141 INFO [train.py:1114] (2/4) Epoch 3, batch 1300, loss[loss=0.3289, simple_loss=0.3536, pruned_loss=0.1106, ctc_loss=0.2074, over 18863.00 frames. ], tot_loss[loss=0.3033, simple_loss=0.3362, pruned_loss=0.09844, ctc_loss=0.1839, over 3846381.76 frames. ], batch size: 76, lr: 3.67e-02, grad_scale: 32.0 +2024-08-25 09:59:23,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=33541.333333333336, ans=0.0035779710144927535 +2024-08-25 09:59:46,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=33648.0, ans=0.0 +2024-08-25 09:59:47,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=33648.0, ans=0.125 +2024-08-25 09:59:48,221 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.674e+02 1.887e+02 2.172e+02 3.368e+02, threshold=3.774e+02, percent-clipped=0.0 +2024-08-25 10:00:05,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=33701.333333333336, ans=0.125 +2024-08-25 10:00:09,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=33701.333333333336, ans=0.125 +2024-08-25 10:00:14,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=33701.333333333336, ans=0.125 +2024-08-25 10:00:19,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=33701.333333333336, ans=0.003543188405797101 +2024-08-25 10:00:22,488 INFO [train.py:1114] (2/4) Epoch 3, batch 1350, loss[loss=0.2857, simple_loss=0.3324, pruned_loss=0.08732, ctc_loss=0.1607, over 19784.00 frames. ], tot_loss[loss=0.3013, simple_loss=0.3349, pruned_loss=0.09742, ctc_loss=0.1823, over 3857682.71 frames. ], batch size: 54, lr: 3.67e-02, grad_scale: 32.0 +2024-08-25 10:00:36,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=33754.666666666664, ans=0.1 +2024-08-25 10:00:39,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33754.666666666664, ans=0.1 +2024-08-25 10:00:45,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=33754.666666666664, ans=0.1 +2024-08-25 10:01:34,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=33914.666666666664, ans=0.1 +2024-08-25 10:02:01,550 INFO [train.py:1114] (2/4) Epoch 3, batch 1400, loss[loss=0.2555, simple_loss=0.2931, pruned_loss=0.08046, ctc_loss=0.1425, over 19666.00 frames. ], tot_loss[loss=0.301, simple_loss=0.3347, pruned_loss=0.09731, ctc_loss=0.1818, over 3864933.31 frames. ], batch size: 46, lr: 3.66e-02, grad_scale: 32.0 +2024-08-25 10:02:15,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34074.666666666664, ans=0.1 +2024-08-25 10:02:16,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=34074.666666666664, ans=0.125 +2024-08-25 10:02:27,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=34074.666666666664, ans=0.125 +2024-08-25 10:02:28,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=34128.0, ans=0.0 +2024-08-25 10:02:31,936 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.91 vs. limit=15.0 +2024-08-25 10:02:33,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.09 vs. limit=15.0 +2024-08-25 10:02:40,372 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.87 vs. limit=22.5 +2024-08-25 10:02:45,294 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.503e+02 1.896e+02 2.159e+02 2.528e+02 3.857e+02, threshold=4.318e+02, percent-clipped=1.0 +2024-08-25 10:03:00,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=34181.333333333336, ans=0.003438840579710144 +2024-08-25 10:03:12,609 INFO [train.py:1114] (2/4) Epoch 3, batch 1450, loss[loss=0.3395, simple_loss=0.3694, pruned_loss=0.1126, ctc_loss=0.2112, over 19668.00 frames. ], tot_loss[loss=0.3015, simple_loss=0.3352, pruned_loss=0.0974, ctc_loss=0.1822, over 3863076.85 frames. ], batch size: 63, lr: 3.65e-02, grad_scale: 32.0 +2024-08-25 10:03:20,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=34288.0, ans=0.0 +2024-08-25 10:03:23,590 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.96 vs. limit=6.0 +2024-08-25 10:04:06,585 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.35 vs. limit=15.0 +2024-08-25 10:04:21,626 INFO [train.py:1114] (2/4) Epoch 3, batch 1500, loss[loss=0.2966, simple_loss=0.3417, pruned_loss=0.09247, ctc_loss=0.1664, over 19571.00 frames. ], tot_loss[loss=0.3016, simple_loss=0.3354, pruned_loss=0.09744, ctc_loss=0.1822, over 3863439.76 frames. ], batch size: 57, lr: 3.65e-02, grad_scale: 32.0 +2024-08-25 10:04:24,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=34554.666666666664, ans=0.0033576811594202907 +2024-08-25 10:04:25,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34554.666666666664, ans=0.1 +2024-08-25 10:04:50,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34661.333333333336, ans=0.1 +2024-08-25 10:04:53,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34661.333333333336, ans=0.1 +2024-08-25 10:05:09,919 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 1.778e+02 1.971e+02 2.353e+02 5.678e+02, threshold=3.941e+02, percent-clipped=1.0 +2024-08-25 10:05:19,517 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.88 vs. limit=12.0 +2024-08-25 10:05:29,603 INFO [train.py:1114] (2/4) Epoch 3, batch 1550, loss[loss=0.3454, simple_loss=0.3692, pruned_loss=0.1183, ctc_loss=0.2128, over 19608.00 frames. ], tot_loss[loss=0.3019, simple_loss=0.3352, pruned_loss=0.09776, ctc_loss=0.1826, over 3847786.25 frames. ], batch size: 60, lr: 3.64e-02, grad_scale: 32.0 +2024-08-25 10:05:32,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=34821.333333333336, ans=0.125 +2024-08-25 10:05:55,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=34928.0, ans=0.125 +2024-08-25 10:06:06,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34981.333333333336, ans=0.1 +2024-08-25 10:06:15,346 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:06:42,358 INFO [train.py:1114] (2/4) Epoch 3, batch 1600, loss[loss=0.3042, simple_loss=0.3351, pruned_loss=0.09798, ctc_loss=0.1936, over 19818.00 frames. ], tot_loss[loss=0.3013, simple_loss=0.3345, pruned_loss=0.09755, ctc_loss=0.1824, over 3836873.53 frames. ], batch size: 57, lr: 3.64e-02, grad_scale: 32.0 +2024-08-25 10:06:44,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=35088.0, ans=0.125 +2024-08-25 10:07:47,931 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.483e+02 1.752e+02 2.032e+02 2.338e+02 4.104e+02, threshold=4.064e+02, percent-clipped=1.0 +2024-08-25 10:07:49,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=35248.0, ans=0.2 +2024-08-25 10:08:06,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=35354.666666666664, ans=0.07 +2024-08-25 10:08:06,825 INFO [train.py:1114] (2/4) Epoch 3, batch 1650, loss[loss=0.3226, simple_loss=0.3538, pruned_loss=0.1052, ctc_loss=0.2023, over 19662.00 frames. ], tot_loss[loss=0.3009, simple_loss=0.3344, pruned_loss=0.0973, ctc_loss=0.182, over 3832747.85 frames. ], batch size: 59, lr: 3.63e-02, grad_scale: 32.0 +2024-08-25 10:08:16,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=35408.0, ans=0.003172173913043479 +2024-08-25 10:08:20,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=35408.0, ans=0.2 +2024-08-25 10:08:29,313 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.84 vs. limit=6.0 +2024-08-25 10:08:48,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=35514.666666666664, ans=0.5 +2024-08-25 10:08:50,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=35514.666666666664, ans=0.125 +2024-08-25 10:08:56,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=35568.0, ans=0.0031373913043478262 +2024-08-25 10:09:04,853 INFO [train.py:1114] (2/4) Epoch 3, batch 1700, loss[loss=0.2609, simple_loss=0.2898, pruned_loss=0.08397, ctc_loss=0.1601, over 19677.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3335, pruned_loss=0.09638, ctc_loss=0.1806, over 3846372.42 frames. ], batch size: 46, lr: 3.62e-02, grad_scale: 32.0 +2024-08-25 10:09:19,733 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.61 vs. limit=15.0 +2024-08-25 10:09:52,818 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.458e+02 1.835e+02 2.022e+02 2.484e+02 3.793e+02, threshold=4.043e+02, percent-clipped=0.0 +2024-08-25 10:10:09,478 INFO [train.py:1114] (2/4) Epoch 3, batch 1750, loss[loss=0.2934, simple_loss=0.323, pruned_loss=0.09797, ctc_loss=0.1699, over 19688.00 frames. ], tot_loss[loss=0.2988, simple_loss=0.3331, pruned_loss=0.09622, ctc_loss=0.1801, over 3849674.85 frames. ], batch size: 45, lr: 3.62e-02, grad_scale: 32.0 +2024-08-25 10:10:25,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=35941.333333333336, ans=0.125 +2024-08-25 10:10:26,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=35941.333333333336, ans=0.125 +2024-08-25 10:10:27,042 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.14 vs. limit=22.5 +2024-08-25 10:10:27,107 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.10 vs. limit=10.0 +2024-08-25 10:10:40,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=35994.666666666664, ans=0.125 +2024-08-25 10:11:14,077 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.22 vs. limit=15.0 +2024-08-25 10:11:15,195 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.71 vs. limit=10.0 +2024-08-25 10:11:20,669 INFO [train.py:1114] (2/4) Epoch 3, batch 1800, loss[loss=0.2817, simple_loss=0.3363, pruned_loss=0.08143, ctc_loss=0.1607, over 19622.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.3333, pruned_loss=0.09605, ctc_loss=0.1801, over 3851657.31 frames. ], batch size: 55, lr: 3.61e-02, grad_scale: 32.0 +2024-08-25 10:11:24,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=36154.666666666664, ans=0.0 +2024-08-25 10:11:24,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=36154.666666666664, ans=0.0 +2024-08-25 10:11:52,949 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.758e+02 2.042e+02 2.396e+02 4.902e+02, threshold=4.083e+02, percent-clipped=1.0 +2024-08-25 10:12:02,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=36368.0, ans=0.002963478260869565 +2024-08-25 10:12:25,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=36368.0, ans=0.002963478260869565 +2024-08-25 10:12:33,957 INFO [train.py:1114] (2/4) Epoch 3, batch 1850, loss[loss=0.3028, simple_loss=0.3431, pruned_loss=0.09531, ctc_loss=0.1795, over 19591.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.3331, pruned_loss=0.09586, ctc_loss=0.1797, over 3855442.08 frames. ], batch size: 57, lr: 3.61e-02, grad_scale: 32.0 +2024-08-25 10:12:44,795 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.41 vs. limit=15.0 +2024-08-25 10:13:00,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=36528.0, ans=0.125 +2024-08-25 10:13:01,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=36528.0, ans=0.1 +2024-08-25 10:13:09,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=36528.0, ans=0.0 +2024-08-25 10:13:10,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=36528.0, ans=0.2 +2024-08-25 10:13:24,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=36634.666666666664, ans=0.125 +2024-08-25 10:13:31,593 INFO [train.py:1114] (2/4) Epoch 3, batch 1900, loss[loss=0.3199, simple_loss=0.3555, pruned_loss=0.1026, ctc_loss=0.1978, over 19664.00 frames. ], tot_loss[loss=0.2995, simple_loss=0.3343, pruned_loss=0.0963, ctc_loss=0.1803, over 3860442.41 frames. ], batch size: 59, lr: 3.60e-02, grad_scale: 32.0 +2024-08-25 10:13:55,382 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.84 vs. limit=15.0 +2024-08-25 10:14:07,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36741.333333333336, ans=0.1 +2024-08-25 10:14:09,336 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.50 vs. limit=15.0 +2024-08-25 10:14:15,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=36794.666666666664, ans=0.125 +2024-08-25 10:14:21,545 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:14:25,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=36794.666666666664, ans=0.125 +2024-08-25 10:14:29,198 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.301e+02 1.725e+02 1.920e+02 2.285e+02 4.448e+02, threshold=3.841e+02, percent-clipped=1.0 +2024-08-25 10:14:33,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=36848.0, ans=0.125 +2024-08-25 10:14:38,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=36901.333333333336, ans=0.2 +2024-08-25 10:14:54,740 INFO [train.py:1114] (2/4) Epoch 3, batch 1950, loss[loss=0.2622, simple_loss=0.308, pruned_loss=0.07936, ctc_loss=0.1443, over 19572.00 frames. ], tot_loss[loss=0.3004, simple_loss=0.3355, pruned_loss=0.09658, ctc_loss=0.1804, over 3869401.96 frames. ], batch size: 52, lr: 3.59e-02, grad_scale: 32.0 +2024-08-25 10:15:03,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=36954.666666666664, ans=0.125 +2024-08-25 10:15:22,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=37008.0, ans=0.2 +2024-08-25 10:15:29,927 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:15:51,815 INFO [train.py:1114] (2/4) Epoch 3, batch 2000, loss[loss=0.2591, simple_loss=0.293, pruned_loss=0.08259, ctc_loss=0.1498, over 19680.00 frames. ], tot_loss[loss=0.3025, simple_loss=0.3368, pruned_loss=0.09769, ctc_loss=0.1823, over 3853775.13 frames. ], batch size: 45, lr: 3.59e-02, grad_scale: 32.0 +2024-08-25 10:16:00,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=37274.666666666664, ans=0.125 +2024-08-25 10:16:01,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37274.666666666664, ans=0.1 +2024-08-25 10:16:01,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=37274.666666666664, ans=0.0 +2024-08-25 10:16:04,531 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.03 vs. limit=12.0 +2024-08-25 10:16:06,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=37274.666666666664, ans=0.95 +2024-08-25 10:16:08,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=37328.0, ans=0.0 +2024-08-25 10:16:17,906 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.12 vs. limit=15.0 +2024-08-25 10:16:19,098 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.521e+02 1.904e+02 2.146e+02 2.566e+02 5.347e+02, threshold=4.293e+02, percent-clipped=2.0 +2024-08-25 10:16:21,353 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.49 vs. limit=15.0 +2024-08-25 10:16:42,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=37434.666666666664, ans=0.125 +2024-08-25 10:16:42,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=37434.666666666664, ans=0.04949747468305833 +2024-08-25 10:16:44,032 INFO [train.py:1114] (2/4) Epoch 3, batch 2050, loss[loss=0.2715, simple_loss=0.3095, pruned_loss=0.08432, ctc_loss=0.1618, over 19733.00 frames. ], tot_loss[loss=0.3018, simple_loss=0.3357, pruned_loss=0.09757, ctc_loss=0.1818, over 3850119.84 frames. ], batch size: 47, lr: 3.58e-02, grad_scale: 32.0 +2024-08-25 10:16:47,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=37488.0, ans=0.125 +2024-08-25 10:17:00,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=37488.0, ans=0.2 +2024-08-25 10:17:27,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=37648.0, ans=0.125 +2024-08-25 10:17:43,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=37701.333333333336, ans=0.125 +2024-08-25 10:17:53,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=37701.333333333336, ans=0.2 +2024-08-25 10:17:56,061 INFO [train.py:1114] (2/4) Epoch 3, batch 2100, loss[loss=0.311, simple_loss=0.3474, pruned_loss=0.09896, ctc_loss=0.1916, over 19774.00 frames. ], tot_loss[loss=0.2993, simple_loss=0.334, pruned_loss=0.09631, ctc_loss=0.1797, over 3857857.51 frames. ], batch size: 54, lr: 3.58e-02, grad_scale: 16.0 +2024-08-25 10:18:06,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37754.666666666664, ans=0.1 +2024-08-25 10:18:17,436 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.89 vs. limit=10.0 +2024-08-25 10:18:39,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=37808.0, ans=0.125 +2024-08-25 10:18:41,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=37808.0, ans=0.2 +2024-08-25 10:19:08,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=37861.333333333336, ans=0.0 +2024-08-25 10:19:12,578 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.87 vs. limit=15.0 +2024-08-25 10:19:20,764 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.776e+02 1.971e+02 2.246e+02 3.814e+02, threshold=3.941e+02, percent-clipped=0.0 +2024-08-25 10:19:40,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=37914.666666666664, ans=0.5 +2024-08-25 10:20:09,494 INFO [train.py:1114] (2/4) Epoch 3, batch 2150, loss[loss=0.2844, simple_loss=0.3242, pruned_loss=0.08904, ctc_loss=0.1664, over 19867.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3328, pruned_loss=0.09563, ctc_loss=0.1783, over 3869079.99 frames. ], batch size: 52, lr: 3.57e-02, grad_scale: 16.0 +2024-08-25 10:20:14,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=38021.333333333336, ans=0.125 +2024-08-25 10:20:54,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=38181.333333333336, ans=0.125 +2024-08-25 10:21:11,141 INFO [train.py:1114] (2/4) Epoch 3, batch 2200, loss[loss=0.3043, simple_loss=0.3417, pruned_loss=0.09691, ctc_loss=0.1825, over 19555.00 frames. ], tot_loss[loss=0.2975, simple_loss=0.3329, pruned_loss=0.09542, ctc_loss=0.1782, over 3867006.86 frames. ], batch size: 57, lr: 3.56e-02, grad_scale: 16.0 +2024-08-25 10:21:42,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=38341.333333333336, ans=0.125 +2024-08-25 10:21:42,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=38341.333333333336, ans=0.0 +2024-08-25 10:21:56,467 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.750e+02 1.922e+02 2.212e+02 3.187e+02, threshold=3.844e+02, percent-clipped=0.0 +2024-08-25 10:22:12,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=38448.0, ans=0.125 +2024-08-25 10:22:28,993 INFO [train.py:1114] (2/4) Epoch 3, batch 2250, loss[loss=0.2701, simple_loss=0.3114, pruned_loss=0.08298, ctc_loss=0.157, over 19614.00 frames. ], tot_loss[loss=0.2967, simple_loss=0.3324, pruned_loss=0.09505, ctc_loss=0.1775, over 3866779.90 frames. ], batch size: 55, lr: 3.56e-02, grad_scale: 16.0 +2024-08-25 10:22:29,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=38554.666666666664, ans=0.0 +2024-08-25 10:22:32,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=38554.666666666664, ans=0.002488115942028987 +2024-08-25 10:22:33,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38554.666666666664, ans=0.1 +2024-08-25 10:23:08,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=38714.666666666664, ans=0.125 +2024-08-25 10:23:08,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=38714.666666666664, ans=0.0 +2024-08-25 10:23:09,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=38714.666666666664, ans=0.2 +2024-08-25 10:23:19,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=38714.666666666664, ans=0.2 +2024-08-25 10:23:32,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38768.0, ans=0.1 +2024-08-25 10:23:40,906 INFO [train.py:1114] (2/4) Epoch 3, batch 2300, loss[loss=0.2764, simple_loss=0.3146, pruned_loss=0.08597, ctc_loss=0.1657, over 19505.00 frames. ], tot_loss[loss=0.2962, simple_loss=0.3314, pruned_loss=0.09501, ctc_loss=0.1775, over 3860084.14 frames. ], batch size: 49, lr: 3.55e-02, grad_scale: 16.0 +2024-08-25 10:24:04,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=38928.0, ans=0.125 +2024-08-25 10:24:07,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=38928.0, ans=0.125 +2024-08-25 10:24:09,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=38928.0, ans=0.0024069565217391302 +2024-08-25 10:24:13,778 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.419e+02 1.820e+02 2.030e+02 2.354e+02 3.970e+02, threshold=4.059e+02, percent-clipped=1.0 +2024-08-25 10:24:46,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=39034.666666666664, ans=0.0 +2024-08-25 10:24:48,813 INFO [train.py:1114] (2/4) Epoch 3, batch 2350, loss[loss=0.3344, simple_loss=0.3625, pruned_loss=0.1119, ctc_loss=0.2062, over 19681.00 frames. ], tot_loss[loss=0.2959, simple_loss=0.3312, pruned_loss=0.09485, ctc_loss=0.1771, over 3862694.85 frames. ], batch size: 63, lr: 3.55e-02, grad_scale: 16.0 +2024-08-25 10:24:58,012 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.00 vs. limit=22.5 +2024-08-25 10:25:10,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=39141.333333333336, ans=0.002360579710144927 +2024-08-25 10:25:23,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=39194.666666666664, ans=0.125 +2024-08-25 10:25:34,672 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.09 vs. limit=15.0 +2024-08-25 10:25:52,380 INFO [train.py:1114] (2/4) Epoch 3, batch 2400, loss[loss=0.3279, simple_loss=0.3617, pruned_loss=0.1068, ctc_loss=0.2012, over 19295.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3337, pruned_loss=0.09594, ctc_loss=0.1791, over 3858731.18 frames. ], batch size: 71, lr: 3.54e-02, grad_scale: 32.0 +2024-08-25 10:25:55,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=39354.666666666664, ans=0.125 +2024-08-25 10:26:04,126 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.58 vs. limit=15.0 +2024-08-25 10:26:17,365 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.17 vs. limit=22.5 +2024-08-25 10:26:41,533 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.413e+02 1.777e+02 2.047e+02 2.383e+02 4.291e+02, threshold=4.094e+02, percent-clipped=1.0 +2024-08-25 10:27:11,342 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.68 vs. limit=6.0 +2024-08-25 10:27:14,153 INFO [train.py:1114] (2/4) Epoch 3, batch 2450, loss[loss=0.4046, simple_loss=0.3818, pruned_loss=0.1548, ctc_loss=0.2945, over 12663.00 frames. ], tot_loss[loss=0.3087, simple_loss=0.3395, pruned_loss=0.1012, ctc_loss=0.1887, over 3731513.20 frames. ], batch size: 140, lr: 3.53e-02, grad_scale: 16.0 +2024-08-25 10:27:22,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=39621.333333333336, ans=0.125 +2024-08-25 10:27:25,897 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.61 vs. limit=6.0 +2024-08-25 10:27:47,360 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=7.19 vs. limit=12.0 +2024-08-25 10:27:52,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=39781.333333333336, ans=0.125 +2024-08-25 10:39:24,826 INFO [train.py:1050] (2/4) Caught exception: [Rank 2] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=89707, OpType=ALLREDUCE, NumelIn=745, NumelOut=745, Timeout(ms)=600000) ran for 600005 milliseconds before timing out.. +2024-08-25 10:39:24,827 INFO [checkpoint.py:75] (2/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/bad-model-2.pt diff --git a/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-25-03-46-09-3 b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-25-03-46-09-3 new file mode 100644 index 0000000000000000000000000000000000000000..f46c1060cd83d0acf2584e325356bee8d4dc542e --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-25-03-46-09-3 @@ -0,0 +1,1186 @@ +2024-08-25 03:46:09,309 INFO [train.py:1182] (3/4) Training started +2024-08-25 03:46:09,310 INFO [train.py:1192] (3/4) Device: cuda:3 +2024-08-25 03:46:09,373 INFO [train.py:1210] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2649.int.cedar.computecanada.ca', 'IP address': '172.16.146.86'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-25 03:46:09,373 INFO [train.py:1212] (3/4) About to create model +2024-08-25 03:46:10,409 INFO [train.py:1216] (3/4) Number of model parameters: 65805511 +2024-08-25 03:46:10,554 INFO [train.py:1231] (3/4) Using DDP +2024-08-25 03:46:14,820 INFO [asr_datamodule.py:894] (3/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:696] (3/4) Disable MUSAN +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:714] (3/4) Enable SpecAugment +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:715] (3/4) Time warp factor: 80 +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:725] (3/4) Num frame mask: 10 +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:738] (3/4) About to create train dataset +2024-08-25 03:46:14,898 INFO [asr_datamodule.py:765] (3/4) Using DynamicBucketingSampler. +2024-08-25 03:46:16,490 INFO [asr_datamodule.py:782] (3/4) About to create train dataloader +2024-08-25 03:46:16,492 INFO [asr_datamodule.py:911] (3/4) About to get dev-clean cuts +2024-08-25 03:46:16,584 INFO [asr_datamodule.py:918] (3/4) About to get dev-other cuts +2024-08-25 03:46:16,612 INFO [asr_datamodule.py:814] (3/4) About to create dev dataset +2024-08-25 03:46:16,952 INFO [asr_datamodule.py:831] (3/4) About to create dev dataloader +2024-08-25 03:46:16,952 INFO [train.py:1435] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-25 03:50:49,730 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=49.34 vs. limit=7.5 +2024-08-25 03:50:50,511 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 11590MB +2024-08-25 03:50:51,641 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 11590MB +2024-08-25 03:51:20,158 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 11590MB +2024-08-25 03:51:21,404 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 11590MB +2024-08-25 03:51:42,535 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=384, metric=75.11 vs. limit=7.5 +2024-08-25 03:51:43,057 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 11590MB +2024-08-25 03:51:43,892 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=384, metric=90.53 vs. limit=4.0 +2024-08-25 03:51:44,351 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 11590MB +2024-08-25 03:53:11,522 INFO [train.py:1114] (3/4) Epoch 1, batch 0, loss[loss=8.668, simple_loss=7.015, pruned_loss=6.859, ctc_loss=4.827, over 19817.00 frames. ], tot_loss[loss=8.668, simple_loss=7.015, pruned_loss=6.859, ctc_loss=4.827, over 19817.00 frames. ], batch size: 49, lr: 2.25e-02, grad_scale: 1.0 +2024-08-25 03:53:11,522 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-25 03:53:26,569 INFO [train.py:1146] (3/4) Epoch 1, validation: loss=8.842, simple_loss=7.151, pruned_loss=6.961, ctc_loss=4.966, over 944034.00 frames. +2024-08-25 03:53:26,569 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 11984MB +2024-08-25 03:53:28,139 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.15 vs. limit=3.0 +2024-08-25 03:53:32,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=0.0, ans=0.5 +2024-08-25 03:53:38,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=0.0, ans=0.1 +2024-08-25 03:53:38,966 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=13.07 vs. limit=7.5 +2024-08-25 03:53:42,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=0.0, ans=0.2 +2024-08-25 03:53:42,734 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=25.59 vs. limit=7.5 +2024-08-25 03:54:08,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=0.0, ans=0.25 +2024-08-25 03:54:20,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=53.333333333333336, ans=0.49333333333333335 +2024-08-25 03:54:36,706 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.008e+03 4.149e+03 4.360e+03 5.530e+03 5.553e+03, threshold=1.744e+04, percent-clipped=0.0 +2024-08-25 03:54:37,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=53.333333333333336, ans=5.033333333333333 +2024-08-25 03:54:50,793 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=252.23 vs. limit=7.52 +2024-08-25 03:55:20,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=106.66666666666667, ans=0.495 +2024-08-25 03:55:46,187 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.063e+03 1.598e+03 4.141e+03 5.530e+03 6.572e+03, threshold=1.656e+04, percent-clipped=0.0 +2024-08-25 03:55:46,764 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=44.43 vs. limit=7.54 +2024-08-25 03:55:55,734 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=202.79 vs. limit=7.54 +2024-08-25 03:57:06,789 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=86.83 vs. limit=7.54 +2024-08-25 03:57:07,911 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=108.49 vs. limit=7.54 +2024-08-25 03:57:29,188 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=27.35 vs. limit=5.04 +2024-08-25 04:00:12,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=213.33333333333334, ans=0.192 +2024-08-25 04:00:14,864 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.873e+02 1.048e+03 1.328e+03 4.149e+03 6.572e+03, threshold=5.310e+03, percent-clipped=0.0 +2024-08-25 04:00:16,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=213.33333333333334, ans=0.49 +2024-08-25 04:00:24,536 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=8.93 vs. limit=4.085333333333334 +2024-08-25 04:00:34,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=56.57 vs. limit=7.58 +2024-08-25 04:00:38,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=266.6666666666667, ans=0.235 +2024-08-25 04:00:39,862 INFO [train.py:1114] (3/4) Epoch 1, batch 50, loss[loss=1.633, simple_loss=1.088, pruned_loss=1.246, ctc_loss=2.033, over 19710.00 frames. ], tot_loss[loss=3.75, simple_loss=2.912, pruned_loss=2.557, ctc_loss=2.878, over 845595.77 frames. ], batch size: 47, lr: 2.48e-02, grad_scale: 0.25 +2024-08-25 04:00:54,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=266.6666666666667, ans=0.4875 +2024-08-25 04:00:58,236 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=173.92 vs. limit=7.6 +2024-08-25 04:01:12,731 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=215.51 vs. limit=7.62 +2024-08-25 04:01:20,796 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=75.41 vs. limit=7.74 +2024-08-25 04:01:20,984 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=179.53 vs. limit=5.16 +2024-08-25 04:01:26,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=373.3333333333333, ans=0.0916 +2024-08-25 04:02:00,788 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=36.11 vs. limit=7.78 +2024-08-25 04:02:03,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=426.6666666666667, ans=0.29573333333333335 +2024-08-25 04:02:05,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=426.6666666666667, ans=0.48 +2024-08-25 04:02:06,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=426.6666666666667, ans=0.48 +2024-08-25 04:02:21,706 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=39.56 vs. limit=7.66 +2024-08-25 04:02:33,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=480.0, ans=7.86 +2024-08-25 04:02:39,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=480.0, ans=0.182 +2024-08-25 04:02:59,891 INFO [train.py:1114] (3/4) Epoch 1, batch 100, loss[loss=1.369, simple_loss=0.9627, pruned_loss=1.205, ctc_loss=1.279, over 19721.00 frames. ], tot_loss[loss=2.588, simple_loss=1.913, pruned_loss=1.868, ctc_loss=2.357, over 1499372.19 frames. ], batch size: 51, lr: 2.70e-02, grad_scale: 0.5 +2024-08-25 04:03:04,900 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=33.18 vs. limit=7.7 +2024-08-25 04:03:07,084 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.807e+02 4.974e+02 8.674e+02 1.328e+03 6.572e+03, threshold=1.735e+03, percent-clipped=0.0 +2024-08-25 04:03:07,697 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=32.98 vs. limit=7.9 +2024-08-25 04:03:13,868 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=20.80 vs. limit=7.7 +2024-08-25 04:03:27,354 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=25.45 vs. limit=7.94 +2024-08-25 04:03:38,788 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=55.16 vs. limit=7.74 +2024-08-25 04:03:52,057 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=255.81 vs. limit=7.76 +2024-08-25 04:03:58,743 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=34.12 vs. limit=8.02 +2024-08-25 04:04:01,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=693.3333333333334, ans=0.4675 +2024-08-25 04:04:12,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=746.6666666666666, ans=5.466666666666667 +2024-08-25 04:04:22,881 INFO [train.py:1114] (3/4) Epoch 1, batch 150, loss[loss=1.176, simple_loss=0.8152, pruned_loss=1.03, ctc_loss=1.092, over 19706.00 frames. ], tot_loss[loss=2.052, simple_loss=1.495, pruned_loss=1.571, ctc_loss=1.871, over 2028131.03 frames. ], batch size: 47, lr: 2.93e-02, grad_scale: 0.5 +2024-08-25 04:04:29,736 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=182.82 vs. limit=5.4 +2024-08-25 04:04:39,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.85 vs. limit=5.213333333333333 +2024-08-25 04:04:42,591 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=219.15 vs. limit=7.82 +2024-08-25 04:04:49,211 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=29.12 vs. limit=8.18 +2024-08-25 04:05:07,692 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=12.94 vs. limit=7.84 +2024-08-25 04:05:08,814 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=34.56 vs. limit=8.18 +2024-08-25 04:05:08,924 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=117.77 vs. limit=7.84 +2024-08-25 04:05:08,934 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=122.51 vs. limit=7.84 +2024-08-25 04:05:08,992 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=55.42 vs. limit=7.84 +2024-08-25 04:05:14,608 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=44.55 vs. limit=7.86 +2024-08-25 04:05:17,335 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=7.33 vs. limit=4.384 +2024-08-25 04:05:18,539 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=59.87 vs. limit=8.22 +2024-08-25 04:05:40,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=1013.3333333333334, ans=0.162 +2024-08-25 04:05:46,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=1013.3333333333334, ans=0.4525 +2024-08-25 04:05:49,411 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=12.35 vs. limit=7.88 +2024-08-25 04:05:51,603 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.55 vs. limit=5.253333333333333 +2024-08-25 04:05:51,810 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=38.07 vs. limit=7.88 +2024-08-25 04:05:53,001 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=96.93 vs. limit=7.9 +2024-08-25 04:05:53,923 INFO [train.py:1114] (3/4) Epoch 1, batch 200, loss[loss=1.247, simple_loss=0.8591, pruned_loss=1.003, ctc_loss=1.201, over 18388.00 frames. ], tot_loss[loss=1.762, simple_loss=1.267, pruned_loss=1.382, ctc_loss=1.624, over 2436267.97 frames. ], batch size: 85, lr: 3.15e-02, grad_scale: 1.0 +2024-08-25 04:05:56,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=1066.6666666666667, ans=0.16 +2024-08-25 04:05:57,468 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 7.117e+01 1.191e+02 1.554e+02 2.219e+02 5.914e+02, threshold=3.108e+02, percent-clipped=0.0 +2024-08-25 04:05:59,063 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=57.21 vs. limit=7.9 +2024-08-25 04:05:59,133 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=26.51 vs. limit=8.3 +2024-08-25 04:06:07,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=1120.0, ans=0.4475 +2024-08-25 04:06:15,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.58 vs. limit=5.28 +2024-08-25 04:06:17,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=40.03 vs. limit=8.38 +2024-08-25 04:06:24,920 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=20.40 vs. limit=7.94 +2024-08-25 04:06:30,609 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=92.29 vs. limit=7.96 +2024-08-25 04:06:35,530 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.95 vs. limit=5.306666666666667 +2024-08-25 04:06:39,309 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=9.34 vs. limit=8.42 +2024-08-25 04:06:42,707 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=13.00 vs. limit=7.98 +2024-08-25 04:06:54,287 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=27.51 vs. limit=8.46 +2024-08-25 04:06:54,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.whiten.whitening_limit, batch_count=1280.0, ans=4.5120000000000005 +2024-08-25 04:06:57,356 INFO [train.py:1114] (3/4) Epoch 1, batch 250, loss[loss=1.205, simple_loss=0.8165, pruned_loss=0.9521, ctc_loss=1.187, over 19410.00 frames. ], tot_loss[loss=1.587, simple_loss=1.126, pruned_loss=1.254, ctc_loss=1.482, over 2756308.20 frames. ], batch size: 67, lr: 3.38e-02, grad_scale: 1.0 +2024-08-25 04:06:57,848 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=63.71 vs. limit=8.0 +2024-08-25 04:07:36,857 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=43.25 vs. limit=8.0 +2024-08-25 04:07:37,067 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.41 vs. limit=8.5 +2024-08-25 04:07:37,316 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=99.86 vs. limit=5.666666666666667 +2024-08-25 04:07:40,961 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=12.72 vs. limit=8.0 +2024-08-25 04:07:41,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1386.6666666666667, ans=0.28613333333333335 +2024-08-25 04:08:00,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1440.0, ans=0.28559999999999997 +2024-08-25 04:08:02,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=1440.0, ans=0.4325 +2024-08-25 04:08:02,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=1440.0, ans=0.4325 +2024-08-25 04:08:02,814 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=28.20 vs. limit=8.58 +2024-08-25 04:08:03,037 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.92 vs. limit=8.04 +2024-08-25 04:08:04,101 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.60 vs. limit=4.576 +2024-08-25 04:08:04,394 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=17.77 vs. limit=5.0 +2024-08-25 04:08:10,102 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.71 vs. limit=5.373333333333333 +2024-08-25 04:08:19,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=14.26 vs. limit=5.373333333333333 +2024-08-25 04:08:59,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1546.6666666666667, ans=0.2845333333333333 +2024-08-25 04:09:07,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=63.08 vs. limit=8.08 +2024-08-25 04:09:10,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=1600.0, ans=8.1 +2024-08-25 04:09:11,351 INFO [train.py:1114] (3/4) Epoch 1, batch 300, loss[loss=1.262, simple_loss=0.8488, pruned_loss=0.9775, ctc_loss=1.233, over 19532.00 frames. ], tot_loss[loss=1.471, simple_loss=1.032, pruned_loss=1.162, ctc_loss=1.39, over 3001332.80 frames. ], batch size: 61, lr: 3.60e-02, grad_scale: 2.0 +2024-08-25 04:09:14,916 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.125e+01 1.367e+02 1.753e+02 2.332e+02 3.681e+02, threshold=3.505e+02, percent-clipped=6.0 +2024-08-25 04:09:20,255 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=51.23 vs. limit=8.1 +2024-08-25 04:09:33,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=35.28 vs. limit=8.1 +2024-08-25 04:10:25,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=1653.3333333333333, ans=0.8421333333333334 +2024-08-25 04:10:34,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=1706.6666666666667, ans=8.14 +2024-08-25 04:10:36,389 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=12.93 vs. limit=8.14 +2024-08-25 04:11:02,054 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=50.35 vs. limit=8.18 +2024-08-25 04:11:06,917 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=103.77 vs. limit=8.18 +2024-08-25 04:11:07,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=1813.3333333333333, ans=0.0592 +2024-08-25 04:11:11,489 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.29 vs. limit=5.906666666666666 +2024-08-25 04:11:12,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=1866.6666666666667, ans=0.13 +2024-08-25 04:11:13,109 INFO [train.py:1114] (3/4) Epoch 1, batch 350, loss[loss=1.102, simple_loss=0.7286, pruned_loss=0.8398, ctc_loss=1.103, over 19760.00 frames. ], tot_loss[loss=1.393, simple_loss=0.9665, pruned_loss=1.094, ctc_loss=1.329, over 3191955.11 frames. ], batch size: 48, lr: 3.83e-02, grad_scale: 2.0 +2024-08-25 04:11:17,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=19.16 vs. limit=8.9 +2024-08-25 04:11:18,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1866.6666666666667, ans=0.2813333333333333 +2024-08-25 04:11:20,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1866.6666666666667, ans=0.2813333333333333 +2024-08-25 04:11:31,210 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=135.82 vs. limit=8.22 +2024-08-25 04:11:35,990 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=11.98 vs. limit=8.24 +2024-08-25 04:11:38,108 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=29.68 vs. limit=8.24 +2024-08-25 04:11:39,462 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=50.13 vs. limit=8.24 +2024-08-25 04:11:48,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=2026.6666666666667, ans=0.8290666666666667 +2024-08-25 04:11:48,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=2026.6666666666667, ans=0.405 +2024-08-25 04:11:56,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=2026.6666666666667, ans=0.405 +2024-08-25 04:11:57,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=2026.6666666666667, ans=0.405 +2024-08-25 04:11:59,076 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=45.84 vs. limit=8.28 +2024-08-25 04:12:11,684 INFO [train.py:1114] (3/4) Epoch 1, batch 400, loss[loss=1.132, simple_loss=0.7609, pruned_loss=0.8206, ctc_loss=1.088, over 19503.00 frames. ], tot_loss[loss=1.33, simple_loss=0.9148, pruned_loss=1.033, ctc_loss=1.276, over 3344538.75 frames. ], batch size: 54, lr: 4.05e-02, grad_scale: 4.0 +2024-08-25 04:12:15,148 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.241e+01 1.644e+02 2.144e+02 2.768e+02 4.713e+02, threshold=4.287e+02, percent-clipped=10.0 +2024-08-25 04:12:15,697 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=16.68 vs. limit=8.3 +2024-08-25 04:12:22,976 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=15.47 vs. limit=8.32 +2024-08-25 04:12:25,311 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.04 vs. limit=9.14 +2024-08-25 04:12:28,709 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=25.27 vs. limit=8.32 +2024-08-25 04:12:30,946 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=55.61 vs. limit=8.32 +2024-08-25 04:12:39,255 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=28.42 vs. limit=8.34 +2024-08-25 04:12:48,576 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.52 vs. limit=8.36 +2024-08-25 04:12:48,746 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=14.18 vs. limit=8.36 +2024-08-25 04:12:54,493 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.95 vs. limit=9.22 +2024-08-25 04:12:57,959 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=34.86 vs. limit=8.36 +2024-08-25 04:13:12,078 INFO [train.py:1114] (3/4) Epoch 1, batch 450, loss[loss=1.123, simple_loss=0.7652, pruned_loss=0.7465, ctc_loss=1.089, over 19616.00 frames. ], tot_loss[loss=1.28, simple_loss=0.8767, pruned_loss=0.972, ctc_loss=1.229, over 3453240.13 frames. ], batch size: 55, lr: 4.28e-02, grad_scale: 4.0 +2024-08-25 04:13:13,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=2400.0, ans=0.11 +2024-08-25 04:13:17,262 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=20.88 vs. limit=5.6 +2024-08-25 04:14:11,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=2453.3333333333335, ans=0.1933333333333333 +2024-08-25 04:14:25,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2560.0, ans=0.2744 +2024-08-25 04:14:36,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=2560.0, ans=0.5 +2024-08-25 04:14:38,930 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=9.70 vs. limit=8.48 +2024-08-25 04:14:45,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=2613.3333333333335, ans=0.3775 +2024-08-25 04:14:53,356 INFO [train.py:1114] (3/4) Epoch 1, batch 500, loss[loss=1.037, simple_loss=0.7217, pruned_loss=0.6281, ctc_loss=1.005, over 19686.00 frames. ], tot_loss[loss=1.217, simple_loss=0.8349, pruned_loss=0.8934, ctc_loss=1.169, over 3547250.01 frames. ], batch size: 63, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:14:53,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=2666.6666666666665, ans=0.375 +2024-08-25 04:14:59,593 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.247e+02 2.224e+02 2.884e+02 3.405e+02 7.334e+02, threshold=5.768e+02, percent-clipped=15.0 +2024-08-25 04:15:04,759 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=9.74 vs. limit=8.5 +2024-08-25 04:15:11,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=2720.0, ans=0.8048000000000001 +2024-08-25 04:15:13,951 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=10.17 vs. limit=8.52 +2024-08-25 04:15:19,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=2773.3333333333335, ans=0.37 +2024-08-25 04:15:23,990 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.77 vs. limit=9.58 +2024-08-25 04:15:45,536 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.69 vs. limit=8.58 +2024-08-25 04:15:52,597 INFO [train.py:1114] (3/4) Epoch 1, batch 550, loss[loss=0.9838, simple_loss=0.6925, pruned_loss=0.5695, ctc_loss=0.9427, over 19216.00 frames. ], tot_loss[loss=1.152, simple_loss=0.7933, pruned_loss=0.8145, ctc_loss=1.109, over 3608044.28 frames. ], batch size: 71, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:15:53,314 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.25 vs. limit=9.7 +2024-08-25 04:16:01,240 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.53 vs. limit=5.733333333333333 +2024-08-25 04:16:02,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2986.6666666666665, ans=0.27013333333333334 +2024-08-25 04:16:08,912 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.60 vs. limit=5.1946666666666665 +2024-08-25 04:16:18,077 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.56 vs. limit=8.64 +2024-08-25 04:16:20,297 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.38 vs. limit=8.64 +2024-08-25 04:16:35,649 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=10.48 vs. limit=9.82 +2024-08-25 04:16:37,826 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.87 vs. limit=8.66 +2024-08-25 04:16:58,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=3146.6666666666665, ans=0.7898666666666667 +2024-08-25 04:17:00,427 INFO [train.py:1114] (3/4) Epoch 1, batch 600, loss[loss=0.8782, simple_loss=0.6307, pruned_loss=0.4667, ctc_loss=0.8471, over 19385.00 frames. ], tot_loss[loss=1.082, simple_loss=0.75, pruned_loss=0.7338, ctc_loss=1.042, over 3666354.70 frames. ], batch size: 67, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:17:03,774 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.709e+02 2.809e+02 3.766e+02 4.633e+02 8.655e+02, threshold=7.532e+02, percent-clipped=12.0 +2024-08-25 04:17:08,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=3200.0, ans=7.0 +2024-08-25 04:17:10,902 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.72 vs. limit=5.8133333333333335 +2024-08-25 04:17:17,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=3253.3333333333335, ans=9.94 +2024-08-25 04:17:18,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=10.14 vs. limit=9.94 +2024-08-25 04:18:05,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=3360.0, ans=0.07400000000000001 +2024-08-25 04:18:09,361 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=8.94 vs. limit=5.84 +2024-08-25 04:18:10,341 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.47 vs. limit=3.504 +2024-08-25 04:18:26,491 INFO [train.py:1114] (3/4) Epoch 1, batch 650, loss[loss=0.7101, simple_loss=0.5224, pruned_loss=0.3516, ctc_loss=0.6728, over 19771.00 frames. ], tot_loss[loss=1.009, simple_loss=0.7054, pruned_loss=0.6553, ctc_loss=0.9697, over 3716874.53 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:18:28,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=3466.6666666666665, ans=0.7786666666666667 +2024-08-25 04:18:31,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=3466.6666666666665, ans=0.3375 +2024-08-25 04:18:32,812 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.69 vs. limit=8.8 +2024-08-25 04:18:35,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=3466.6666666666665, ans=0.3375 +2024-08-25 04:18:39,040 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.73 vs. limit=5.88 +2024-08-25 04:18:47,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=3520.0, ans=0.7768 +2024-08-25 04:18:58,824 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.35 vs. limit=8.84 +2024-08-25 04:19:07,555 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.37 vs. limit=5.906666666666666 +2024-08-25 04:19:21,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3680.0, ans=0.3275 +2024-08-25 04:19:23,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=3680.0, ans=0.2552 +2024-08-25 04:20:32,376 INFO [train.py:1114] (3/4) Epoch 1, batch 700, loss[loss=0.7132, simple_loss=0.5258, pruned_loss=0.3529, ctc_loss=0.6647, over 19732.00 frames. ], tot_loss[loss=0.9466, simple_loss=0.6682, pruned_loss=0.5882, ctc_loss=0.9051, over 3748893.67 frames. ], batch size: 51, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:20:32,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=3733.3333333333335, ans=0.325 +2024-08-25 04:20:35,549 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 2.600e+02 3.309e+02 4.487e+02 1.180e+03, threshold=6.619e+02, percent-clipped=3.0 +2024-08-25 04:20:40,485 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.32 vs. limit=5.933333333333334 +2024-08-25 04:20:40,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=3733.3333333333335, ans=0.033333333333333326 +2024-08-25 04:20:41,501 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.30 vs. limit=8.9 +2024-08-25 04:20:45,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=3786.6666666666665, ans=0.3225 +2024-08-25 04:20:51,385 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.03 vs. limit=5.946666666666666 +2024-08-25 04:20:55,953 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.19 vs. limit=6.92 +2024-08-25 04:20:59,942 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.58 vs. limit=8.94 +2024-08-25 04:21:06,653 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.81 vs. limit=6.946666666666667 +2024-08-25 04:21:08,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3893.3333333333335, ans=0.26106666666666667 +2024-08-25 04:21:14,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=3893.3333333333335, ans=0.7637333333333334 +2024-08-25 04:21:19,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=3946.6666666666665, ans=0.26053333333333334 +2024-08-25 04:21:23,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=3946.6666666666665, ans=0.315 +2024-08-25 04:21:26,582 INFO [train.py:1114] (3/4) Epoch 1, batch 750, loss[loss=0.6701, simple_loss=0.5169, pruned_loss=0.3015, ctc_loss=0.5909, over 19497.00 frames. ], tot_loss[loss=0.8862, simple_loss=0.6326, pruned_loss=0.5279, ctc_loss=0.8411, over 3776162.02 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 8.0 +2024-08-25 04:21:26,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=4000.0, ans=0.3125 +2024-08-25 04:21:50,001 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.24 vs. limit=10.58 +2024-08-25 04:21:55,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.85 vs. limit=10.58 +2024-08-25 04:22:04,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=4160.0, ans=0.025 +2024-08-25 04:22:06,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=4160.0, ans=0.04933333333333333 +2024-08-25 04:22:27,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=4160.0, ans=0.037000000000000005 +2024-08-25 04:22:34,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=4213.333333333333, ans=10.66 +2024-08-25 04:22:36,417 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.33 vs. limit=6.053333333333333 +2024-08-25 04:22:40,639 INFO [train.py:1114] (3/4) Epoch 1, batch 800, loss[loss=0.5806, simple_loss=0.4515, pruned_loss=0.2559, ctc_loss=0.5086, over 19823.00 frames. ], tot_loss[loss=0.8308, simple_loss=0.6007, pruned_loss=0.4745, ctc_loss=0.7796, over 3797711.47 frames. ], batch size: 49, lr: 4.49e-02, grad_scale: 16.0 +2024-08-25 04:22:40,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=4266.666666666667, ans=0.7506666666666667 +2024-08-25 04:22:43,871 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.649e+02 2.484e+02 3.479e+02 4.307e+02 9.603e+02, threshold=6.957e+02, percent-clipped=4.0 +2024-08-25 04:22:45,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=4266.666666666667, ans=7.666666666666667 +2024-08-25 04:22:51,475 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.49 vs. limit=10.74 +2024-08-25 04:22:52,474 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.02 vs. limit=9.120000000000001 +2024-08-25 04:22:56,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4320.0, ans=0.2568 +2024-08-25 04:23:11,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=4373.333333333333, ans=0.29500000000000004 +2024-08-25 04:23:16,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=4426.666666666667, ans=0.04822222222222222 +2024-08-25 04:23:17,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4426.666666666667, ans=0.2557333333333333 +2024-08-25 04:23:18,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=4426.666666666667, ans=0.035 +2024-08-25 04:23:34,665 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.96 vs. limit=10.86 +2024-08-25 04:23:34,831 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.42 vs. limit=9.18 +2024-08-25 04:23:42,673 INFO [train.py:1114] (3/4) Epoch 1, batch 850, loss[loss=0.6173, simple_loss=0.4879, pruned_loss=0.2674, ctc_loss=0.5202, over 19649.00 frames. ], tot_loss[loss=0.7804, simple_loss=0.5723, pruned_loss=0.4276, ctc_loss=0.7218, over 3815877.14 frames. ], batch size: 59, lr: 4.49e-02, grad_scale: 16.0 +2024-08-25 04:23:45,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=4533.333333333333, ans=0.2875 +2024-08-25 04:23:46,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=4533.333333333333, ans=6.133333333333333 +2024-08-25 04:23:50,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4533.333333333333, ans=0.25466666666666665 +2024-08-25 04:23:53,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.29 vs. limit=9.22 +2024-08-25 04:23:56,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=4586.666666666667, ans=0.28500000000000003 +2024-08-25 04:24:02,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=4586.666666666667, ans=0.04755555555555556 +2024-08-25 04:24:21,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=4693.333333333333, ans=0.009849275362318841 +2024-08-25 04:24:31,591 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.66 vs. limit=11.06 +2024-08-25 04:24:34,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=4746.666666666667, ans=0.27749999999999997 +2024-08-25 04:24:36,277 INFO [train.py:1114] (3/4) Epoch 1, batch 900, loss[loss=0.5298, simple_loss=0.4242, pruned_loss=0.2272, ctc_loss=0.4317, over 19807.00 frames. ], tot_loss[loss=0.739, simple_loss=0.5492, pruned_loss=0.3901, ctc_loss=0.6732, over 3820209.74 frames. ], batch size: 49, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:24:39,550 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.665e+02 2.433e+02 3.203e+02 4.513e+02 7.559e+02, threshold=6.406e+02, percent-clipped=2.0 +2024-08-25 04:24:50,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=4853.333333333333, ans=0.00981449275362319 +2024-08-25 04:25:17,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=4960.0, ans=0.26749999999999996 +2024-08-25 04:25:19,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=4960.0, ans=8.1 +2024-08-25 04:25:32,739 INFO [train.py:1114] (3/4) Epoch 1, batch 950, loss[loss=0.5095, simple_loss=0.416, pruned_loss=0.2052, ctc_loss=0.4199, over 19512.00 frames. ], tot_loss[loss=0.7016, simple_loss=0.5286, pruned_loss=0.3571, ctc_loss=0.6292, over 3821989.98 frames. ], batch size: 49, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:25:37,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=5066.666666666667, ans=0.2625 +2024-08-25 04:25:40,902 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.03 vs. limit=6.266666666666667 +2024-08-25 04:25:50,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=5120.0, ans=0.26 +2024-08-25 04:26:16,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=5226.666666666667, ans=0.7170666666666667 +2024-08-25 04:26:26,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=5280.0, ans=0.009721739130434783 +2024-08-25 04:26:32,796 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.18 vs. limit=5.0 +2024-08-25 04:26:33,421 INFO [train.py:1114] (3/4) Epoch 1, batch 1000, loss[loss=0.5013, simple_loss=0.4178, pruned_loss=0.1953, ctc_loss=0.4027, over 19852.00 frames. ], tot_loss[loss=0.6711, simple_loss=0.5122, pruned_loss=0.3305, ctc_loss=0.5921, over 3818261.56 frames. ], batch size: 52, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:26:36,694 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.705e+02 2.226e+02 2.758e+02 3.479e+02 9.619e+02, threshold=5.516e+02, percent-clipped=3.0 +2024-08-25 04:26:54,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=5440.0, ans=0.24559999999999998 +2024-08-25 04:26:58,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=5440.0, ans=0.7096 +2024-08-25 04:27:04,397 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.20 vs. limit=9.56 +2024-08-25 04:27:09,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=5493.333333333333, ans=0.2425 +2024-08-25 04:27:21,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=5546.666666666667, ans=0.24 +2024-08-25 04:27:25,679 INFO [train.py:1114] (3/4) Epoch 1, batch 1050, loss[loss=0.5558, simple_loss=0.464, pruned_loss=0.2216, ctc_loss=0.4334, over 19830.00 frames. ], tot_loss[loss=0.6378, simple_loss=0.4942, pruned_loss=0.3035, ctc_loss=0.553, over 3824917.30 frames. ], batch size: 57, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:27:35,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=5653.333333333333, ans=0.7021333333333334 +2024-08-25 04:28:07,789 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.56 vs. limit=6.453333333333333 +2024-08-25 04:28:20,176 INFO [train.py:1114] (3/4) Epoch 1, batch 1100, loss[loss=0.4597, simple_loss=0.3909, pruned_loss=0.1744, ctc_loss=0.3624, over 19577.00 frames. ], tot_loss[loss=0.6097, simple_loss=0.4793, pruned_loss=0.2813, ctc_loss=0.5195, over 3832258.35 frames. ], batch size: 52, lr: 4.48e-02, grad_scale: 16.0 +2024-08-25 04:28:23,253 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.625e+02 2.143e+02 2.593e+02 3.421e+02 4.407e+02, threshold=5.186e+02, percent-clipped=0.0 +2024-08-25 04:28:24,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=5866.666666666667, ans=0.22499999999999998 +2024-08-25 04:28:37,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=5920.0, ans=0.009582608695652174 +2024-08-25 04:28:44,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=5973.333333333333, ans=0.04177777777777778 +2024-08-25 04:28:50,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=5973.333333333333, ans=0.6909333333333334 +2024-08-25 04:28:50,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=5973.333333333333, ans=0.19026666666666667 +2024-08-25 04:29:02,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=6080.0, ans=0.21500000000000002 +2024-08-25 04:29:13,481 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.77 vs. limit=8.04 +2024-08-25 04:29:15,900 INFO [train.py:1114] (3/4) Epoch 1, batch 1150, loss[loss=0.4715, simple_loss=0.4058, pruned_loss=0.1792, ctc_loss=0.3588, over 19574.00 frames. ], tot_loss[loss=0.588, simple_loss=0.4681, pruned_loss=0.2642, ctc_loss=0.4929, over 3829761.48 frames. ], batch size: 52, lr: 4.47e-02, grad_scale: 16.0 +2024-08-25 04:29:21,970 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.28 vs. limit=9.8 +2024-08-25 04:29:25,103 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.48 vs. limit=12.1 +2024-08-25 04:29:30,725 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.45 vs. limit=6.546666666666667 +2024-08-25 04:29:51,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=6240.0, ans=0.6816 +2024-08-25 04:32:27,984 INFO [train.py:1114] (3/4) Epoch 1, batch 1200, loss[loss=0.5099, simple_loss=0.4397, pruned_loss=0.193, ctc_loss=0.3933, over 19835.00 frames. ], tot_loss[loss=0.57, simple_loss=0.4593, pruned_loss=0.2499, ctc_loss=0.4705, over 3825141.66 frames. ], batch size: 57, lr: 4.47e-02, grad_scale: 32.0 +2024-08-25 04:32:31,069 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.480e+02 2.077e+02 2.797e+02 3.799e+02 8.339e+02, threshold=5.594e+02, percent-clipped=11.0 +2024-08-25 04:32:49,130 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.49 vs. limit=9.94 +2024-08-25 04:32:53,459 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.72 vs. limit=9.94 +2024-08-25 04:32:56,598 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.96 vs. limit=8.253333333333334 +2024-08-25 04:33:01,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=6560.0, ans=0.1925 +2024-08-25 04:33:06,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=6560.0, ans=0.1925 +2024-08-25 04:33:08,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=6613.333333333333, ans=0.6685333333333334 +2024-08-25 04:33:11,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=6613.333333333333, ans=0.19 +2024-08-25 04:33:11,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=6613.333333333333, ans=0.03911111111111111 +2024-08-25 04:33:17,955 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.27 vs. limit=12.46 +2024-08-25 04:33:19,312 INFO [train.py:1114] (3/4) Epoch 1, batch 1250, loss[loss=0.5128, simple_loss=0.4377, pruned_loss=0.2007, ctc_loss=0.3948, over 19546.00 frames. ], tot_loss[loss=0.55, simple_loss=0.4496, pruned_loss=0.2352, ctc_loss=0.4465, over 3843098.91 frames. ], batch size: 61, lr: 4.47e-02, grad_scale: 32.0 +2024-08-25 04:33:44,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=6773.333333333333, ans=0.009397101449275363 +2024-08-25 04:33:49,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=6773.333333333333, ans=0.1825 +2024-08-25 04:33:51,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=6826.666666666667, ans=0.23173333333333335 +2024-08-25 04:33:52,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=6826.666666666667, ans=0.009385507246376813 +2024-08-25 04:33:59,005 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.66 vs. limit=10.06 +2024-08-25 04:34:10,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=6880.0, ans=0.1775 +2024-08-25 04:34:10,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=6880.0, ans=9.3 +2024-08-25 04:34:12,497 INFO [train.py:1114] (3/4) Epoch 1, batch 1300, loss[loss=0.5159, simple_loss=0.4397, pruned_loss=0.2059, ctc_loss=0.39, over 18922.00 frames. ], tot_loss[loss=0.5331, simple_loss=0.441, pruned_loss=0.2234, ctc_loss=0.4262, over 3847791.71 frames. ], batch size: 76, lr: 4.47e-02, grad_scale: 32.0 +2024-08-25 04:34:15,550 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 2.007e+02 2.492e+02 3.309e+02 5.533e+02, threshold=4.985e+02, percent-clipped=0.0 +2024-08-25 04:34:16,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=6933.333333333333, ans=0.175 +2024-08-25 04:34:26,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6986.666666666667, ans=0.23013333333333333 +2024-08-25 04:34:32,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=6986.666666666667, ans=0.1725 +2024-08-25 04:34:39,851 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.33 vs. limit=12.780000000000001 +2024-08-25 04:34:58,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=7093.333333333333, ans=0.16749999999999998 +2024-08-25 04:35:05,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=7146.666666666667, ans=0.16499999999999998 +2024-08-25 04:35:11,275 INFO [train.py:1114] (3/4) Epoch 1, batch 1350, loss[loss=0.412, simple_loss=0.3799, pruned_loss=0.1437, ctc_loss=0.2934, over 19754.00 frames. ], tot_loss[loss=0.5169, simple_loss=0.4332, pruned_loss=0.2124, ctc_loss=0.407, over 3858533.61 frames. ], batch size: 54, lr: 4.46e-02, grad_scale: 32.0 +2024-08-25 04:35:11,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=7200.0, ans=0.03666666666666667 +2024-08-25 04:35:17,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=7200.0, ans=0.22799999999999998 +2024-08-25 04:35:25,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=7253.333333333333, ans=0.15999999999999998 +2024-08-25 04:35:46,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=7360.0, ans=0.036000000000000004 +2024-08-25 04:36:02,099 INFO [train.py:1114] (3/4) Epoch 1, batch 1400, loss[loss=0.3892, simple_loss=0.3531, pruned_loss=0.1417, ctc_loss=0.281, over 19661.00 frames. ], tot_loss[loss=0.5029, simple_loss=0.4262, pruned_loss=0.2034, ctc_loss=0.3908, over 3865756.22 frames. ], batch size: 46, lr: 4.46e-02, grad_scale: 32.0 +2024-08-25 04:36:02,533 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.12 vs. limit=8.733333333333334 +2024-08-25 04:36:05,091 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.594e+02 1.980e+02 2.233e+02 2.820e+02 5.701e+02, threshold=4.466e+02, percent-clipped=2.0 +2024-08-25 04:36:19,906 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.43 vs. limit=13.14 +2024-08-25 04:36:25,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=7573.333333333333, ans=0.14500000000000002 +2024-08-25 04:36:30,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=7573.333333333333, ans=0.14500000000000002 +2024-08-25 04:36:48,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=7680.0, ans=0.052000000000000005 +2024-08-25 04:36:51,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=7680.0, ans=0.2232 +2024-08-25 04:36:54,784 INFO [train.py:1114] (3/4) Epoch 1, batch 1450, loss[loss=0.5256, simple_loss=0.4513, pruned_loss=0.2125, ctc_loss=0.3899, over 19644.00 frames. ], tot_loss[loss=0.4917, simple_loss=0.4211, pruned_loss=0.1963, ctc_loss=0.3775, over 3863304.18 frames. ], batch size: 63, lr: 4.46e-02, grad_scale: 32.0 +2024-08-25 04:37:11,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=7786.666666666667, ans=0.0 +2024-08-25 04:37:31,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=7893.333333333333, ans=0.13 +2024-08-25 04:37:41,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=7946.666666666667, ans=0.1275 +2024-08-25 04:37:47,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=8000.0, ans=0.0 +2024-08-25 04:37:48,162 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.00 vs. limit=13.5 +2024-08-25 04:37:48,637 INFO [train.py:1114] (3/4) Epoch 1, batch 1500, loss[loss=0.506, simple_loss=0.4425, pruned_loss=0.1991, ctc_loss=0.3761, over 19599.00 frames. ], tot_loss[loss=0.4828, simple_loss=0.4177, pruned_loss=0.1904, ctc_loss=0.3664, over 3863668.54 frames. ], batch size: 57, lr: 4.46e-02, grad_scale: 32.0 +2024-08-25 04:37:54,382 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.582e+02 1.987e+02 2.351e+02 3.240e+02 5.717e+02, threshold=4.702e+02, percent-clipped=4.0 +2024-08-25 04:37:59,852 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.68 vs. limit=10.5 +2024-08-25 04:38:01,856 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.13 vs. limit=10.52 +2024-08-25 04:38:04,925 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.09 vs. limit=13.54 +2024-08-25 04:38:10,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=8106.666666666667, ans=0.125 +2024-08-25 04:38:36,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=8160.0, ans=0.125 +2024-08-25 04:38:50,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=8213.333333333334, ans=0.125 +2024-08-25 04:38:52,816 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.57 vs. limit=10.58 +2024-08-25 04:38:56,144 INFO [train.py:1114] (3/4) Epoch 1, batch 1550, loss[loss=0.4848, simple_loss=0.4358, pruned_loss=0.1861, ctc_loss=0.3473, over 19615.00 frames. ], tot_loss[loss=0.4729, simple_loss=0.4132, pruned_loss=0.1846, ctc_loss=0.3553, over 3847809.76 frames. ], batch size: 60, lr: 4.45e-02, grad_scale: 32.0 +2024-08-25 04:39:05,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8320.0, ans=0.2168 +2024-08-25 04:39:24,185 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:39:28,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=8426.666666666666, ans=0.03155555555555556 +2024-08-25 04:39:47,208 INFO [train.py:1114] (3/4) Epoch 1, batch 1600, loss[loss=0.4384, simple_loss=0.4059, pruned_loss=0.1619, ctc_loss=0.3109, over 19840.00 frames. ], tot_loss[loss=0.4642, simple_loss=0.4091, pruned_loss=0.1799, ctc_loss=0.3456, over 3837198.14 frames. ], batch size: 57, lr: 4.45e-02, grad_scale: 32.0 +2024-08-25 04:39:48,773 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.13 vs. limit=4.28 +2024-08-25 04:39:52,848 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.585e+02 2.044e+02 2.368e+02 2.950e+02 6.795e+02, threshold=4.737e+02, percent-clipped=6.0 +2024-08-25 04:40:03,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=8586.666666666666, ans=0.0 +2024-08-25 04:40:08,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=8586.666666666666, ans=0.030888888888888893 +2024-08-25 04:40:15,172 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.99 vs. limit=4.296 +2024-08-25 04:40:17,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=8640.0, ans=0.025 +2024-08-25 04:40:31,699 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.27 vs. limit=10.76 +2024-08-25 04:40:42,979 INFO [train.py:1114] (3/4) Epoch 1, batch 1650, loss[loss=0.454, simple_loss=0.42, pruned_loss=0.1683, ctc_loss=0.3272, over 19670.00 frames. ], tot_loss[loss=0.4545, simple_loss=0.405, pruned_loss=0.1743, ctc_loss=0.3358, over 3834030.75 frames. ], batch size: 59, lr: 4.45e-02, grad_scale: 32.0 +2024-08-25 04:42:03,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8853.333333333334, ans=0.21146666666666664 +2024-08-25 04:43:06,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=8906.666666666666, ans=0.125 +2024-08-25 04:43:23,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=9013.333333333334, ans=0.125 +2024-08-25 04:43:28,615 INFO [train.py:1114] (3/4) Epoch 1, batch 1700, loss[loss=0.3536, simple_loss=0.3393, pruned_loss=0.1259, ctc_loss=0.2481, over 19677.00 frames. ], tot_loss[loss=0.4454, simple_loss=0.4013, pruned_loss=0.1692, ctc_loss=0.3262, over 3848340.67 frames. ], batch size: 46, lr: 4.44e-02, grad_scale: 32.0 +2024-08-25 04:43:31,591 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.509e+02 1.986e+02 2.386e+02 2.791e+02 4.935e+02, threshold=4.772e+02, percent-clipped=1.0 +2024-08-25 04:43:57,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=9226.666666666666, ans=0.125 +2024-08-25 04:44:04,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=9226.666666666666, ans=0.125 +2024-08-25 04:45:26,300 INFO [train.py:1114] (3/4) Epoch 1, batch 1750, loss[loss=0.3672, simple_loss=0.3488, pruned_loss=0.1351, ctc_loss=0.2553, over 19672.00 frames. ], tot_loss[loss=0.4368, simple_loss=0.3973, pruned_loss=0.1649, ctc_loss=0.3179, over 3853438.43 frames. ], batch size: 45, lr: 4.44e-02, grad_scale: 32.0 +2024-08-25 04:45:28,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=9333.333333333334, ans=0.125 +2024-08-25 04:45:32,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=9333.333333333334, ans=0.20666666666666667 +2024-08-25 04:45:58,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=9493.333333333334, ans=0.20506666666666665 +2024-08-25 04:46:02,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=9493.333333333334, ans=0.125 +2024-08-25 04:46:10,653 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.34 vs. limit=11.08 +2024-08-25 04:46:13,427 INFO [train.py:1114] (3/4) Epoch 1, batch 1800, loss[loss=0.4291, simple_loss=0.4106, pruned_loss=0.1584, ctc_loss=0.2955, over 19613.00 frames. ], tot_loss[loss=0.4303, simple_loss=0.3948, pruned_loss=0.1616, ctc_loss=0.3112, over 3854221.20 frames. ], batch size: 55, lr: 4.44e-02, grad_scale: 32.0 +2024-08-25 04:46:16,175 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.506e+02 2.025e+02 2.321e+02 2.784e+02 4.120e+02, threshold=4.643e+02, percent-clipped=0.0 +2024-08-25 04:46:21,574 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=7.76 vs. limit=7.4 +2024-08-25 04:46:44,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=9760.0, ans=0.04949747468305833 +2024-08-25 04:48:28,770 INFO [train.py:1114] (3/4) Epoch 1, batch 1850, loss[loss=0.4141, simple_loss=0.3927, pruned_loss=0.1543, ctc_loss=0.2957, over 19592.00 frames. ], tot_loss[loss=0.4217, simple_loss=0.3912, pruned_loss=0.1573, ctc_loss=0.3028, over 3859030.19 frames. ], batch size: 57, lr: 4.43e-02, grad_scale: 32.0 +2024-08-25 04:48:32,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=9866.666666666666, ans=0.125 +2024-08-25 04:48:40,093 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.97 vs. limit=7.48 +2024-08-25 04:48:53,355 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.99 vs. limit=11.24 +2024-08-25 04:49:08,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=10080.0, ans=0.125 +2024-08-25 04:49:15,870 INFO [train.py:1114] (3/4) Epoch 1, batch 1900, loss[loss=0.4109, simple_loss=0.3971, pruned_loss=0.1506, ctc_loss=0.2932, over 19655.00 frames. ], tot_loss[loss=0.4163, simple_loss=0.3897, pruned_loss=0.1548, ctc_loss=0.2975, over 3863302.01 frames. ], batch size: 59, lr: 4.43e-02, grad_scale: 32.0 +2024-08-25 04:49:18,611 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.464e+02 2.031e+02 2.370e+02 2.878e+02 5.610e+02, threshold=4.739e+02, percent-clipped=2.0 +2024-08-25 04:49:22,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=10133.333333333334, ans=0.125 +2024-08-25 04:49:53,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=10186.666666666666, ans=0.008655072463768116 +2024-08-25 04:50:03,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=10240.0, ans=0.125 +2024-08-25 04:50:05,809 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.61 vs. limit=15.18 +2024-08-25 04:50:07,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=10240.0, ans=0.125 +2024-08-25 04:50:15,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=10293.333333333334, ans=0.023777777777777773 +2024-08-25 04:50:18,747 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.78 vs. limit=8.117333333333335 +2024-08-25 04:50:19,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=10293.333333333334, ans=0.125 +2024-08-25 04:50:22,169 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.72 vs. limit=15.26 +2024-08-25 04:50:31,839 INFO [train.py:1114] (3/4) Epoch 1, batch 1950, loss[loss=0.3722, simple_loss=0.3695, pruned_loss=0.1348, ctc_loss=0.2557, over 19608.00 frames. ], tot_loss[loss=0.4117, simple_loss=0.389, pruned_loss=0.1525, ctc_loss=0.293, over 3871559.95 frames. ], batch size: 52, lr: 4.43e-02, grad_scale: 32.0 +2024-08-25 04:50:49,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10506.666666666666, ans=0.19493333333333335 +2024-08-25 04:50:52,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=10506.666666666666, ans=0.04949747468305833 +2024-08-25 04:50:56,087 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.19 vs. limit=11.44 +2024-08-25 04:50:56,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=10506.666666666666, ans=0.025 +2024-08-25 04:51:06,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10560.0, ans=0.1944 +2024-08-25 04:51:20,871 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.34 vs. limit=15.42 +2024-08-25 04:51:21,802 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.10 vs. limit=15.42 +2024-08-25 04:52:05,789 INFO [train.py:1114] (3/4) Epoch 1, batch 2000, loss[loss=0.3341, simple_loss=0.3361, pruned_loss=0.1202, ctc_loss=0.2296, over 19619.00 frames. ], tot_loss[loss=0.4082, simple_loss=0.3884, pruned_loss=0.1511, ctc_loss=0.29, over 3857002.57 frames. ], batch size: 45, lr: 4.42e-02, grad_scale: 32.0 +2024-08-25 04:52:09,669 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.538e+02 1.861e+02 2.137e+02 2.685e+02 4.799e+02, threshold=4.274e+02, percent-clipped=1.0 +2024-08-25 04:52:14,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=10666.666666666666, ans=0.022222222222222227 +2024-08-25 04:53:21,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=10666.666666666666, ans=0.0 +2024-08-25 04:53:39,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10720.0, ans=0.19279999999999997 +2024-08-25 04:53:41,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=10720.0, ans=0.125 +2024-08-25 04:54:13,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=10826.666666666666, ans=0.19173333333333334 +2024-08-25 04:54:28,351 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.89 vs. limit=15.66 +2024-08-25 04:54:32,627 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.10 vs. limit=4.64 +2024-08-25 04:54:32,954 INFO [train.py:1114] (3/4) Epoch 1, batch 2050, loss[loss=0.3395, simple_loss=0.3342, pruned_loss=0.1253, ctc_loss=0.2358, over 19731.00 frames. ], tot_loss[loss=0.4017, simple_loss=0.3853, pruned_loss=0.1483, ctc_loss=0.2844, over 3852270.95 frames. ], batch size: 47, lr: 4.42e-02, grad_scale: 32.0 +2024-08-25 04:54:33,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=10933.333333333334, ans=0.125 +2024-08-25 04:54:42,072 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.86 vs. limit=11.6 +2024-08-25 04:54:48,393 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.38 vs. limit=11.620000000000001 +2024-08-25 04:54:49,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10986.666666666666, ans=0.19013333333333332 +2024-08-25 04:55:20,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11093.333333333334, ans=0.18906666666666666 +2024-08-25 04:55:38,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=11146.666666666666, ans=0.5098666666666667 +2024-08-25 04:55:41,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=11200.0, ans=0.125 +2024-08-25 04:55:42,291 INFO [train.py:1114] (3/4) Epoch 1, batch 2100, loss[loss=0.3761, simple_loss=0.3774, pruned_loss=0.1339, ctc_loss=0.2672, over 19783.00 frames. ], tot_loss[loss=0.3949, simple_loss=0.3821, pruned_loss=0.1452, ctc_loss=0.2787, over 3858651.34 frames. ], batch size: 54, lr: 4.42e-02, grad_scale: 32.0 +2024-08-25 04:56:33,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11200.0, ans=0.188 +2024-08-25 04:56:36,111 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.677e+02 1.936e+02 2.214e+02 2.535e+02 3.885e+02, threshold=4.428e+02, percent-clipped=0.0 +2024-08-25 04:56:38,924 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.20 vs. limit=11.7 +2024-08-25 04:56:43,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=11253.333333333334, ans=0.125 +2024-08-25 04:57:03,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11253.333333333334, ans=0.18746666666666667 +2024-08-25 04:57:16,708 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 04:57:18,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=11360.0, ans=0.05 +2024-08-25 04:57:18,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=11360.0, ans=0.125 +2024-08-25 04:57:22,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11360.0, ans=0.18639999999999998 +2024-08-25 04:57:28,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=11413.333333333334, ans=0.125 +2024-08-25 04:57:35,988 INFO [train.py:1114] (3/4) Epoch 1, batch 2150, loss[loss=0.3606, simple_loss=0.3719, pruned_loss=0.1253, ctc_loss=0.247, over 19848.00 frames. ], tot_loss[loss=0.3893, simple_loss=0.3799, pruned_loss=0.1424, ctc_loss=0.2731, over 3870799.03 frames. ], batch size: 52, lr: 4.41e-02, grad_scale: 32.0 +2024-08-25 04:58:49,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=11466.666666666666, ans=0.125 +2024-08-25 04:58:55,508 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.48 vs. limit=4.728 +2024-08-25 04:58:57,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=11520.0, ans=0.125 +2024-08-25 04:59:32,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=11680.0, ans=0.125 +2024-08-25 04:59:34,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=11680.0, ans=0.125 +2024-08-25 04:59:36,674 INFO [train.py:1114] (3/4) Epoch 1, batch 2200, loss[loss=0.4068, simple_loss=0.4019, pruned_loss=0.15, ctc_loss=0.2796, over 19589.00 frames. ], tot_loss[loss=0.3859, simple_loss=0.3786, pruned_loss=0.1408, ctc_loss=0.2702, over 3868780.41 frames. ], batch size: 57, lr: 4.41e-02, grad_scale: 32.0 +2024-08-25 04:59:40,226 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.512e+02 1.884e+02 2.153e+02 2.810e+02 4.673e+02, threshold=4.307e+02, percent-clipped=1.0 +2024-08-25 05:00:04,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=11893.333333333334, ans=0.48373333333333335 +2024-08-25 05:00:06,530 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:00:32,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=11946.666666666666, ans=16.46 +2024-08-25 05:00:34,228 INFO [train.py:1114] (3/4) Epoch 1, batch 2250, loss[loss=0.407, simple_loss=0.4059, pruned_loss=0.1478, ctc_loss=0.281, over 19623.00 frames. ], tot_loss[loss=0.383, simple_loss=0.378, pruned_loss=0.1393, ctc_loss=0.2669, over 3868154.38 frames. ], batch size: 55, lr: 4.40e-02, grad_scale: 32.0 +2024-08-25 05:00:40,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=12000.0, ans=0.0 +2024-08-25 05:01:20,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=12106.666666666666, ans=0.125 +2024-08-25 05:01:36,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=12213.333333333334, ans=0.125 +2024-08-25 05:01:39,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=12213.333333333334, ans=0.17786666666666667 +2024-08-25 05:01:44,088 INFO [train.py:1114] (3/4) Epoch 1, batch 2300, loss[loss=0.3385, simple_loss=0.348, pruned_loss=0.1182, ctc_loss=0.2314, over 19494.00 frames. ], tot_loss[loss=0.3794, simple_loss=0.3756, pruned_loss=0.1378, ctc_loss=0.2636, over 3861800.96 frames. ], batch size: 49, lr: 4.40e-02, grad_scale: 32.0 +2024-08-25 05:01:47,655 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.534e+02 1.926e+02 2.114e+02 2.507e+02 4.625e+02, threshold=4.228e+02, percent-clipped=3.0 +2024-08-25 05:02:19,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=12426.666666666666, ans=0.025 +2024-08-25 05:02:25,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12480.0, ans=0.1752 +2024-08-25 05:02:31,685 INFO [train.py:1114] (3/4) Epoch 1, batch 2350, loss[loss=0.4219, simple_loss=0.4085, pruned_loss=0.1582, ctc_loss=0.2968, over 19678.00 frames. ], tot_loss[loss=0.3772, simple_loss=0.3748, pruned_loss=0.1367, ctc_loss=0.2613, over 3863961.48 frames. ], batch size: 63, lr: 4.40e-02, grad_scale: 32.0 +2024-08-25 05:04:25,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=12586.666666666666, ans=0.125 +2024-08-25 05:04:25,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=12586.666666666666, ans=0.125 +2024-08-25 05:04:34,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=12640.0, ans=0.3896 +2024-08-25 05:04:37,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=12693.333333333334, ans=0.125 +2024-08-25 05:04:42,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=12693.333333333334, ans=0.025 +2024-08-25 05:04:43,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12693.333333333334, ans=0.17306666666666667 +2024-08-25 05:04:54,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=12693.333333333334, ans=0.125 +2024-08-25 05:04:59,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=12746.666666666666, ans=0.125 +2024-08-25 05:05:00,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12746.666666666666, ans=0.17253333333333334 +2024-08-25 05:05:05,283 INFO [train.py:1114] (3/4) Epoch 1, batch 2400, loss[loss=0.4001, simple_loss=0.3968, pruned_loss=0.1457, ctc_loss=0.2803, over 19283.00 frames. ], tot_loss[loss=0.3785, simple_loss=0.377, pruned_loss=0.137, ctc_loss=0.2618, over 3858589.54 frames. ], batch size: 71, lr: 4.39e-02, grad_scale: 32.0 +2024-08-25 05:05:08,758 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.515e+02 1.948e+02 2.252e+02 2.666e+02 4.870e+02, threshold=4.504e+02, percent-clipped=4.0 +2024-08-25 05:05:13,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=12800.0, ans=0.013333333333333336 +2024-08-25 05:05:14,096 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.11 vs. limit=8.2 +2024-08-25 05:05:16,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=12853.333333333334, ans=0.07 +2024-08-25 05:05:29,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=12906.666666666666, ans=0.125 +2024-08-25 05:05:33,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=12960.0, ans=0.125 +2024-08-25 05:05:41,856 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=20.22 vs. limit=12.36 +2024-08-25 05:05:49,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=13013.333333333334, ans=0.025 +2024-08-25 05:05:51,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=13066.666666666666, ans=0.125 +2024-08-25 05:05:52,664 INFO [train.py:1114] (3/4) Epoch 1, batch 2450, loss[loss=0.4692, simple_loss=0.4166, pruned_loss=0.1899, ctc_loss=0.355, over 13283.00 frames. ], tot_loss[loss=0.3874, simple_loss=0.382, pruned_loss=0.1418, ctc_loss=0.2701, over 3730669.53 frames. ], batch size: 141, lr: 4.39e-02, grad_scale: 32.0 +2024-08-25 05:06:26,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13120.0, ans=0.16879999999999998 +2024-08-25 05:06:29,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=13120.0, ans=0.125 +2024-08-25 05:06:31,302 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.20 vs. limit=12.440000000000001 +2024-08-25 05:07:49,749 INFO [train.py:1114] (3/4) Epoch 2, batch 0, loss[loss=0.3595, simple_loss=0.3603, pruned_loss=0.1297, ctc_loss=0.2484, over 19819.00 frames. ], tot_loss[loss=0.3595, simple_loss=0.3603, pruned_loss=0.1297, ctc_loss=0.2484, over 19819.00 frames. ], batch size: 49, lr: 4.30e-02, grad_scale: 32.0 +2024-08-25 05:07:49,750 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-25 05:09:14,200 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.8633, 5.1646, 5.4777, 5.3215], device='cuda:3') +2024-08-25 05:09:16,707 INFO [train.py:1146] (3/4) Epoch 2, validation: loss=0.2886, simple_loss=0.3508, pruned_loss=0.0823, ctc_loss=0.1542, over 944034.00 frames. +2024-08-25 05:09:16,708 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 13361MB +2024-08-25 05:09:35,637 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.504e+02 1.938e+02 2.191e+02 2.677e+02 6.592e+02, threshold=4.382e+02, percent-clipped=7.0 +2024-08-25 05:09:37,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=13333.333333333334, ans=0.43333333333333335 +2024-08-25 05:09:44,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13386.666666666666, ans=0.16613333333333333 +2024-08-25 05:09:44,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=13386.666666666666, ans=0.125 +2024-08-25 05:09:48,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=13386.666666666666, ans=0.125 +2024-08-25 05:09:53,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=13440.0, ans=0.010666666666666672 +2024-08-25 05:10:10,726 INFO [train.py:1114] (3/4) Epoch 2, batch 50, loss[loss=0.3325, simple_loss=0.3427, pruned_loss=0.118, ctc_loss=0.2158, over 19694.00 frames. ], tot_loss[loss=0.3724, simple_loss=0.3755, pruned_loss=0.1336, ctc_loss=0.2548, over 844976.60 frames. ], batch size: 47, lr: 4.29e-02, grad_scale: 32.0 +2024-08-25 05:10:34,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=13546.666666666666, ans=0.125 +2024-08-25 05:10:36,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.54 vs. limit=12.58 +2024-08-25 05:11:14,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=13653.333333333334, ans=0.00790144927536232 +2024-08-25 05:11:16,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=13706.666666666666, ans=0.125 +2024-08-25 05:11:42,625 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.22 vs. limit=9.504 +2024-08-25 05:11:47,147 INFO [train.py:1114] (3/4) Epoch 2, batch 100, loss[loss=0.3359, simple_loss=0.3499, pruned_loss=0.1183, ctc_loss=0.2136, over 19724.00 frames. ], tot_loss[loss=0.37, simple_loss=0.3751, pruned_loss=0.1324, ctc_loss=0.2505, over 1499358.22 frames. ], batch size: 51, lr: 4.29e-02, grad_scale: 32.0 +2024-08-25 05:11:50,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=13813.333333333334, ans=0.125 +2024-08-25 05:11:53,461 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.35 vs. limit=12.68 +2024-08-25 05:11:53,535 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.07 vs. limit=12.68 +2024-08-25 05:11:56,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=13866.666666666666, ans=0.125 +2024-08-25 05:12:00,812 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 1.907e+02 2.167e+02 2.481e+02 4.957e+02, threshold=4.333e+02, percent-clipped=1.0 +2024-08-25 05:12:15,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=13973.333333333334, ans=0.125 +2024-08-25 05:12:17,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=13973.333333333334, ans=0.007831884057971014 +2024-08-25 05:12:28,852 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:12:30,895 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:12:50,512 INFO [train.py:1114] (3/4) Epoch 2, batch 150, loss[loss=0.3317, simple_loss=0.338, pruned_loss=0.1182, ctc_loss=0.2226, over 19711.00 frames. ], tot_loss[loss=0.3632, simple_loss=0.3706, pruned_loss=0.129, ctc_loss=0.2446, over 2027441.50 frames. ], batch size: 47, lr: 4.29e-02, grad_scale: 32.0 +2024-08-25 05:12:54,012 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.93 vs. limit=12.780000000000001 +2024-08-25 05:12:57,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=14080.0, ans=0.008 +2024-08-25 05:14:13,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=14240.0, ans=0.125 +2024-08-25 05:14:37,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=14240.0, ans=0.4016 +2024-08-25 05:14:50,524 INFO [train.py:1114] (3/4) Epoch 2, batch 200, loss[loss=0.3849, simple_loss=0.3851, pruned_loss=0.1399, ctc_loss=0.2625, over 18255.00 frames. ], tot_loss[loss=0.3579, simple_loss=0.3668, pruned_loss=0.1264, ctc_loss=0.2402, over 2435304.42 frames. ], batch size: 85, lr: 4.28e-02, grad_scale: 32.0 +2024-08-25 05:14:52,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=14346.666666666666, ans=0.125 +2024-08-25 05:15:14,932 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.506e+02 1.847e+02 2.110e+02 2.499e+02 4.235e+02, threshold=4.220e+02, percent-clipped=0.0 +2024-08-25 05:15:27,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=14453.333333333334, ans=0.007727536231884058 +2024-08-25 05:15:44,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=14560.0, ans=0.125 +2024-08-25 05:15:49,795 INFO [train.py:1114] (3/4) Epoch 2, batch 250, loss[loss=0.3681, simple_loss=0.3802, pruned_loss=0.1289, ctc_loss=0.2453, over 19406.00 frames. ], tot_loss[loss=0.3554, simple_loss=0.3656, pruned_loss=0.1251, ctc_loss=0.2376, over 2754696.94 frames. ], batch size: 67, lr: 4.28e-02, grad_scale: 32.0 +2024-08-25 05:16:15,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=14613.333333333334, ans=0.125 +2024-08-25 05:16:36,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=14773.333333333334, ans=0.005111111111111108 +2024-08-25 05:16:38,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=14773.333333333334, ans=0.125 +2024-08-25 05:16:41,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=14773.333333333334, ans=0.125 +2024-08-25 05:19:35,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14826.666666666666, ans=0.15173333333333333 +2024-08-25 05:19:37,926 INFO [train.py:1114] (3/4) Epoch 2, batch 300, loss[loss=0.3512, simple_loss=0.3683, pruned_loss=0.1201, ctc_loss=0.2345, over 19500.00 frames. ], tot_loss[loss=0.3525, simple_loss=0.3639, pruned_loss=0.1235, ctc_loss=0.2349, over 2999500.08 frames. ], batch size: 61, lr: 4.27e-02, grad_scale: 32.0 +2024-08-25 05:19:51,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=14933.333333333334, ans=0.125 +2024-08-25 05:19:51,703 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.96 vs. limit=13.1 +2024-08-25 05:19:56,630 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.593e+02 1.858e+02 2.099e+02 2.398e+02 3.801e+02, threshold=4.198e+02, percent-clipped=0.0 +2024-08-25 05:20:09,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=14986.666666666666, ans=0.004222222222222224 +2024-08-25 05:20:37,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=15093.333333333334, ans=0.37173333333333336 +2024-08-25 05:20:54,632 INFO [train.py:1114] (3/4) Epoch 2, batch 350, loss[loss=0.2961, simple_loss=0.3254, pruned_loss=0.09564, ctc_loss=0.189, over 19780.00 frames. ], tot_loss[loss=0.3515, simple_loss=0.3637, pruned_loss=0.123, ctc_loss=0.2334, over 3190079.29 frames. ], batch size: 48, lr: 4.27e-02, grad_scale: 32.0 +2024-08-25 05:21:01,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=15146.666666666666, ans=0.09853333333333333 +2024-08-25 05:21:05,368 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.58 vs. limit=10.058666666666667 +2024-08-25 05:21:05,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=15200.0, ans=0.003333333333333334 +2024-08-25 05:21:08,273 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.33 vs. limit=13.2 +2024-08-25 05:21:37,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=15306.666666666666, ans=0.125 +2024-08-25 05:21:40,461 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.66 vs. limit=13.24 +2024-08-25 05:21:42,296 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.66 vs. limit=5.295999999999999 +2024-08-25 05:21:50,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=15360.0, ans=0.125 +2024-08-25 05:22:06,934 INFO [train.py:1114] (3/4) Epoch 2, batch 400, loss[loss=0.3416, simple_loss=0.3729, pruned_loss=0.1126, ctc_loss=0.2124, over 19496.00 frames. ], tot_loss[loss=0.3498, simple_loss=0.3626, pruned_loss=0.1221, ctc_loss=0.2319, over 3342521.02 frames. ], batch size: 54, lr: 4.26e-02, grad_scale: 32.0 +2024-08-25 05:22:16,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15466.666666666666, ans=0.14533333333333334 +2024-08-25 05:22:18,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=15466.666666666666, ans=0.125 +2024-08-25 05:22:20,585 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.569e+02 1.895e+02 2.189e+02 2.528e+02 4.758e+02, threshold=4.379e+02, percent-clipped=2.0 +2024-08-25 05:22:23,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15466.666666666666, ans=0.14533333333333334 +2024-08-25 05:22:36,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=15520.0, ans=0.05 +2024-08-25 05:22:39,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=15573.333333333334, ans=0.125 +2024-08-25 05:23:46,271 INFO [train.py:1114] (3/4) Epoch 2, batch 450, loss[loss=0.3208, simple_loss=0.3474, pruned_loss=0.106, ctc_loss=0.2051, over 19623.00 frames. ], tot_loss[loss=0.3492, simple_loss=0.3624, pruned_loss=0.1217, ctc_loss=0.2311, over 3449587.75 frames. ], batch size: 55, lr: 4.26e-02, grad_scale: 32.0 +2024-08-25 05:23:49,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=15680.0, ans=0.09899494936611666 +2024-08-25 05:23:57,393 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.89 vs. limit=13.4 +2024-08-25 05:23:59,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=15733.333333333334, ans=0.007449275362318841 +2024-08-25 05:24:00,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=15733.333333333334, ans=0.025 +2024-08-25 05:24:02,940 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.69 vs. limit=13.4 +2024-08-25 05:24:07,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=15786.666666666666, ans=0.00743768115942029 +2024-08-25 05:24:14,507 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.19 vs. limit=8.96 +2024-08-25 05:24:23,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=15840.0, ans=0.025 +2024-08-25 05:24:27,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15893.333333333334, ans=0.14106666666666667 +2024-08-25 05:24:32,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=15893.333333333334, ans=0.035 +2024-08-25 05:24:37,906 INFO [train.py:1114] (3/4) Epoch 2, batch 500, loss[loss=0.3454, simple_loss=0.3657, pruned_loss=0.1177, ctc_loss=0.2243, over 19682.00 frames. ], tot_loss[loss=0.346, simple_loss=0.3602, pruned_loss=0.1202, ctc_loss=0.2285, over 3545523.35 frames. ], batch size: 63, lr: 4.25e-02, grad_scale: 32.0 +2024-08-25 05:25:49,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.19 vs. limit=12.973333333333333 +2024-08-25 05:25:56,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=15946.666666666666, ans=0.125 +2024-08-25 05:26:05,504 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.383e+02 1.778e+02 2.035e+02 2.349e+02 4.286e+02, threshold=4.071e+02, percent-clipped=0.0 +2024-08-25 05:26:07,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=16000.0, ans=0.44 +2024-08-25 05:26:26,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten.whitening_limit, batch_count=16106.666666666666, ans=19.58 +2024-08-25 05:26:30,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=16160.0, ans=0.025 +2024-08-25 05:26:33,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=16160.0, ans=0.125 +2024-08-25 05:26:53,245 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.81 vs. limit=19.66 +2024-08-25 05:26:53,741 INFO [train.py:1114] (3/4) Epoch 2, batch 550, loss[loss=0.3834, simple_loss=0.3954, pruned_loss=0.1349, ctc_loss=0.2542, over 19287.00 frames. ], tot_loss[loss=0.346, simple_loss=0.3602, pruned_loss=0.1202, ctc_loss=0.2284, over 3607067.87 frames. ], batch size: 71, lr: 4.25e-02, grad_scale: 32.0 +2024-08-25 05:26:53,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=16213.333333333334, ans=0.125 +2024-08-25 05:27:29,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=16320.0, ans=0.4448 +2024-08-25 05:28:12,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=16426.666666666668, ans=0.125 +2024-08-25 05:28:12,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=16426.666666666668, ans=0.125 +2024-08-25 05:28:19,023 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.23 vs. limit=10.591999999999999 +2024-08-25 05:28:19,453 INFO [train.py:1114] (3/4) Epoch 2, batch 600, loss[loss=0.3863, simple_loss=0.3903, pruned_loss=0.1387, ctc_loss=0.262, over 19377.00 frames. ], tot_loss[loss=0.3456, simple_loss=0.36, pruned_loss=0.1201, ctc_loss=0.2275, over 3665159.69 frames. ], batch size: 67, lr: 4.24e-02, grad_scale: 32.0 +2024-08-25 05:28:29,262 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.99 vs. limit=13.68 +2024-08-25 05:28:34,468 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.463e+02 1.917e+02 2.183e+02 2.770e+02 8.189e+02, threshold=4.366e+02, percent-clipped=5.0 +2024-08-25 05:28:36,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16533.333333333332, ans=0.13466666666666668 +2024-08-25 05:28:36,939 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.08 vs. limit=19.9 +2024-08-25 05:28:55,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=16640.0, ans=0.0 +2024-08-25 05:29:00,371 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.42 vs. limit=13.74 +2024-08-25 05:29:02,132 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.35 vs. limit=19.98 +2024-08-25 05:29:13,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=16746.666666666668, ans=0.007228985507246377 +2024-08-25 05:29:14,161 INFO [train.py:1114] (3/4) Epoch 2, batch 650, loss[loss=0.3065, simple_loss=0.3374, pruned_loss=0.1004, ctc_loss=0.187, over 19757.00 frames. ], tot_loss[loss=0.3425, simple_loss=0.3581, pruned_loss=0.1186, ctc_loss=0.2246, over 3715297.44 frames. ], batch size: 54, lr: 4.24e-02, grad_scale: 32.0 +2024-08-25 05:31:20,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16800.0, ans=0.132 +2024-08-25 05:31:25,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=16800.0, ans=0.0 +2024-08-25 05:31:25,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=16800.0, ans=0.125 +2024-08-25 05:32:01,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=16960.0, ans=0.3064 +2024-08-25 05:32:06,461 INFO [train.py:1114] (3/4) Epoch 2, batch 700, loss[loss=0.3402, simple_loss=0.3554, pruned_loss=0.1182, ctc_loss=0.2214, over 19719.00 frames. ], tot_loss[loss=0.3422, simple_loss=0.3582, pruned_loss=0.1183, ctc_loss=0.2237, over 3747694.77 frames. ], batch size: 51, lr: 4.23e-02, grad_scale: 32.0 +2024-08-25 05:32:34,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=17013.333333333332, ans=0.125 +2024-08-25 05:32:39,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=17013.333333333332, ans=0.125 +2024-08-25 05:32:39,756 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=14.51 vs. limit=13.879999999999999 +2024-08-25 05:32:43,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=17066.666666666668, ans=0.125 +2024-08-25 05:32:45,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=17066.666666666668, ans=0.0 +2024-08-25 05:32:47,852 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.375e+02 1.759e+02 2.005e+02 2.359e+02 5.033e+02, threshold=4.011e+02, percent-clipped=2.0 +2024-08-25 05:32:59,493 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.41 vs. limit=20.34 +2024-08-25 05:33:00,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=17120.0, ans=0.09899494936611666 +2024-08-25 05:33:03,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=17120.0, ans=0.007147826086956522 +2024-08-25 05:33:18,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=17226.666666666668, ans=0.1277333333333333 +2024-08-25 05:33:25,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17226.666666666668, ans=0.1277333333333333 +2024-08-25 05:33:28,048 INFO [train.py:1114] (3/4) Epoch 2, batch 750, loss[loss=0.3149, simple_loss=0.3499, pruned_loss=0.1019, ctc_loss=0.1904, over 19518.00 frames. ], tot_loss[loss=0.3402, simple_loss=0.357, pruned_loss=0.1173, ctc_loss=0.222, over 3772835.56 frames. ], batch size: 54, lr: 4.23e-02, grad_scale: 32.0 +2024-08-25 05:33:28,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=17280.0, ans=0.125 +2024-08-25 05:36:53,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=17440.0, ans=0.125 +2024-08-25 05:37:24,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=17440.0, ans=0.28959999999999997 +2024-08-25 05:37:34,240 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=17493.333333333332, ans=0.0381866666666667 +2024-08-25 05:37:40,887 INFO [train.py:1114] (3/4) Epoch 2, batch 800, loss[loss=0.3255, simple_loss=0.3392, pruned_loss=0.1155, ctc_loss=0.2024, over 19833.00 frames. ], tot_loss[loss=0.3394, simple_loss=0.3567, pruned_loss=0.1168, ctc_loss=0.2209, over 3794352.44 frames. ], batch size: 49, lr: 4.22e-02, grad_scale: 32.0 +2024-08-25 05:37:44,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=17546.666666666668, ans=0.007055072463768117 +2024-08-25 05:38:06,531 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.454e+02 1.845e+02 2.130e+02 2.517e+02 4.310e+02, threshold=4.259e+02, percent-clipped=1.0 +2024-08-25 05:38:15,785 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.01 vs. limit=20.740000000000002 +2024-08-25 05:38:35,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=17706.666666666668, ans=0.125 +2024-08-25 05:38:48,679 INFO [train.py:1114] (3/4) Epoch 2, batch 850, loss[loss=0.3642, simple_loss=0.3807, pruned_loss=0.1255, ctc_loss=0.2415, over 19660.00 frames. ], tot_loss[loss=0.3375, simple_loss=0.3553, pruned_loss=0.116, ctc_loss=0.2188, over 3814929.35 frames. ], batch size: 59, lr: 4.22e-02, grad_scale: 16.0 +2024-08-25 05:38:58,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=17813.333333333332, ans=0.2765333333333334 +2024-08-25 05:39:12,778 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.99 vs. limit=11.146666666666668 +2024-08-25 05:39:15,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=17920.0, ans=0.006973913043478261 +2024-08-25 05:39:34,683 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.75 vs. limit=9.506666666666668 +2024-08-25 05:39:58,492 INFO [train.py:1114] (3/4) Epoch 2, batch 900, loss[loss=0.2933, simple_loss=0.3286, pruned_loss=0.09385, ctc_loss=0.1759, over 19419.00 frames. ], tot_loss[loss=0.338, simple_loss=0.3557, pruned_loss=0.1163, ctc_loss=0.2191, over 3818536.94 frames. ], batch size: 48, lr: 4.21e-02, grad_scale: 8.0 +2024-08-25 05:40:14,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=18133.333333333332, ans=0.006927536231884059 +2024-08-25 05:40:19,550 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 1.852e+02 2.189e+02 2.703e+02 9.878e+02, threshold=4.378e+02, percent-clipped=3.0 +2024-08-25 05:40:48,507 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.88 vs. limit=11.296 +2024-08-25 05:41:01,563 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.68 vs. limit=14.36 +2024-08-25 05:41:08,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18293.333333333332, ans=0.11706666666666668 +2024-08-25 05:41:14,142 INFO [train.py:1114] (3/4) Epoch 2, batch 950, loss[loss=0.3032, simple_loss=0.3333, pruned_loss=0.09873, ctc_loss=0.1894, over 19495.00 frames. ], tot_loss[loss=0.3374, simple_loss=0.3553, pruned_loss=0.116, ctc_loss=0.2183, over 3819408.56 frames. ], batch size: 49, lr: 4.21e-02, grad_scale: 8.0 +2024-08-25 05:41:18,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18346.666666666668, ans=0.11653333333333332 +2024-08-25 05:41:53,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=18506.666666666668, ans=0.07 +2024-08-25 05:42:01,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=18560.0, ans=0.1144 +2024-08-25 05:42:06,454 INFO [train.py:1114] (3/4) Epoch 2, batch 1000, loss[loss=0.3371, simple_loss=0.3537, pruned_loss=0.1149, ctc_loss=0.2269, over 19839.00 frames. ], tot_loss[loss=0.3374, simple_loss=0.3556, pruned_loss=0.116, ctc_loss=0.218, over 3815288.34 frames. ], batch size: 52, lr: 4.20e-02, grad_scale: 8.0 +2024-08-25 05:42:21,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=18613.333333333332, ans=0.025 +2024-08-25 05:42:37,205 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.48 vs. limit=11.466666666666667 +2024-08-25 05:42:41,285 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.839e+02 2.030e+02 2.416e+02 3.488e+02, threshold=4.061e+02, percent-clipped=0.0 +2024-08-25 05:43:00,759 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.65 vs. limit=21.58 +2024-08-25 05:43:02,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=18773.333333333332, ans=0.0 +2024-08-25 05:43:04,717 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.73 vs. limit=21.58 +2024-08-25 05:43:07,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=18826.666666666668, ans=0.125 +2024-08-25 05:43:09,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=18826.666666666668, ans=0.07 +2024-08-25 05:43:11,297 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.23 vs. limit=14.559999999999999 +2024-08-25 05:43:16,624 INFO [train.py:1114] (3/4) Epoch 2, batch 1050, loss[loss=0.3712, simple_loss=0.3911, pruned_loss=0.1277, ctc_loss=0.2399, over 19854.00 frames. ], tot_loss[loss=0.3361, simple_loss=0.3547, pruned_loss=0.1154, ctc_loss=0.2169, over 3821712.04 frames. ], batch size: 57, lr: 4.20e-02, grad_scale: 8.0 +2024-08-25 05:43:18,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=18880.0, ans=0.23919999999999997 +2024-08-25 05:43:30,861 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.12 vs. limit=11.573333333333334 +2024-08-25 05:43:51,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=18986.666666666668, ans=0.0 +2024-08-25 05:43:54,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=18986.666666666668, ans=0.025 +2024-08-25 05:44:07,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=19040.0, ans=0.23360000000000003 +2024-08-25 05:44:13,780 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=9.24 vs. limit=14.66 +2024-08-25 05:44:23,176 INFO [train.py:1114] (3/4) Epoch 2, batch 1100, loss[loss=0.3169, simple_loss=0.3415, pruned_loss=0.1049, ctc_loss=0.2059, over 19594.00 frames. ], tot_loss[loss=0.335, simple_loss=0.3541, pruned_loss=0.1148, ctc_loss=0.2158, over 3829183.81 frames. ], batch size: 52, lr: 4.19e-02, grad_scale: 8.0 +2024-08-25 05:44:43,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=19200.0, ans=0.125 +2024-08-25 05:44:46,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=19200.0, ans=0.05 +2024-08-25 05:44:48,513 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.463e+02 1.777e+02 2.009e+02 2.448e+02 3.967e+02, threshold=4.019e+02, percent-clipped=0.0 +2024-08-25 05:44:53,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=19253.333333333332, ans=0.125 +2024-08-25 05:44:54,787 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=14.719999999999999 +2024-08-25 05:45:19,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19360.0, ans=0.10640000000000002 +2024-08-25 05:45:31,299 INFO [train.py:1114] (3/4) Epoch 2, batch 1150, loss[loss=0.3074, simple_loss=0.3424, pruned_loss=0.09781, ctc_loss=0.1923, over 19585.00 frames. ], tot_loss[loss=0.3355, simple_loss=0.3543, pruned_loss=0.1152, ctc_loss=0.2163, over 3829453.92 frames. ], batch size: 52, lr: 4.19e-02, grad_scale: 8.0 +2024-08-25 05:45:32,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=19413.333333333332, ans=14.780000000000001 +2024-08-25 05:45:42,504 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.63 vs. limit=14.8 +2024-08-25 05:45:46,318 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.64 vs. limit=14.8 +2024-08-25 05:47:20,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=19573.333333333332, ans=0.05 +2024-08-25 05:47:34,849 INFO [train.py:1114] (3/4) Epoch 2, batch 1200, loss[loss=0.3411, simple_loss=0.3685, pruned_loss=0.1133, ctc_loss=0.218, over 19835.00 frames. ], tot_loss[loss=0.3359, simple_loss=0.3549, pruned_loss=0.1152, ctc_loss=0.2162, over 3825699.81 frames. ], batch size: 57, lr: 4.18e-02, grad_scale: 16.0 +2024-08-25 05:47:40,963 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:47:50,324 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.449e+02 1.798e+02 2.208e+02 2.852e+02 1.698e+03, threshold=4.415e+02, percent-clipped=3.0 +2024-08-25 05:48:08,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=19786.666666666668, ans=0.125 +2024-08-25 05:48:38,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=19893.333333333332, ans=0.125 +2024-08-25 05:48:38,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=19893.333333333332, ans=0.20373333333333343 +2024-08-25 05:48:40,348 INFO [train.py:1114] (3/4) Epoch 2, batch 1250, loss[loss=0.3566, simple_loss=0.3779, pruned_loss=0.1217, ctc_loss=0.2298, over 19560.00 frames. ], tot_loss[loss=0.3339, simple_loss=0.3544, pruned_loss=0.114, ctc_loss=0.2139, over 3843546.43 frames. ], batch size: 61, lr: 4.17e-02, grad_scale: 16.0 +2024-08-25 05:48:59,612 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 05:49:03,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=20000.0, ans=0.025 +2024-08-25 05:49:05,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=20000.0, ans=0.125 +2024-08-25 05:49:08,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=20053.333333333332, ans=0.2 +2024-08-25 05:49:10,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.whiten.whitening_limit, batch_count=20053.333333333332, ans=12.0 +2024-08-25 05:49:10,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=20053.333333333332, ans=0.125 +2024-08-25 05:49:15,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=20053.333333333332, ans=0.2 +2024-08-25 05:49:17,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=20106.666666666668, ans=0.1 +2024-08-25 05:49:23,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=20106.666666666668, ans=0.0 +2024-08-25 05:49:37,280 INFO [train.py:1114] (3/4) Epoch 2, batch 1300, loss[loss=0.3729, simple_loss=0.3821, pruned_loss=0.131, ctc_loss=0.2543, over 18948.00 frames. ], tot_loss[loss=0.332, simple_loss=0.353, pruned_loss=0.113, ctc_loss=0.2122, over 3847154.83 frames. ], batch size: 76, lr: 4.17e-02, grad_scale: 16.0 +2024-08-25 05:49:42,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=20213.333333333332, ans=0.125 +2024-08-25 05:49:52,771 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.507e+02 1.771e+02 1.898e+02 2.175e+02 3.765e+02, threshold=3.796e+02, percent-clipped=0.0 +2024-08-25 05:50:21,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=20426.666666666668, ans=0.125 +2024-08-25 05:50:25,291 INFO [train.py:1114] (3/4) Epoch 2, batch 1350, loss[loss=0.3019, simple_loss=0.3358, pruned_loss=0.0973, ctc_loss=0.1838, over 19775.00 frames. ], tot_loss[loss=0.3297, simple_loss=0.3516, pruned_loss=0.1119, ctc_loss=0.2101, over 3858684.74 frames. ], batch size: 54, lr: 4.16e-02, grad_scale: 16.0 +2024-08-25 05:50:47,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=20533.333333333332, ans=0.125 +2024-08-25 05:50:54,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=20586.666666666668, ans=0.025 +2024-08-25 05:51:00,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=20640.0, ans=0.125 +2024-08-25 05:51:01,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=20640.0, ans=0.125 +2024-08-25 05:51:04,685 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.91 vs. limit=15.0 +2024-08-25 05:51:17,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=20693.333333333332, ans=0.125 +2024-08-25 05:51:19,085 INFO [train.py:1114] (3/4) Epoch 2, batch 1400, loss[loss=0.2716, simple_loss=0.3025, pruned_loss=0.08613, ctc_loss=0.171, over 19670.00 frames. ], tot_loss[loss=0.3281, simple_loss=0.3506, pruned_loss=0.111, ctc_loss=0.2085, over 3865506.63 frames. ], batch size: 46, lr: 4.16e-02, grad_scale: 16.0 +2024-08-25 05:51:34,337 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.473e+02 1.933e+02 2.205e+02 2.519e+02 3.569e+02, threshold=4.410e+02, percent-clipped=0.0 +2024-08-25 05:51:38,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=20853.333333333332, ans=0.0 +2024-08-25 05:51:39,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=20853.333333333332, ans=0.0 +2024-08-25 05:51:47,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=20906.666666666668, ans=0.0 +2024-08-25 05:51:47,466 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.60 vs. limit=22.5 +2024-08-25 05:51:54,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=20906.666666666668, ans=0.125 +2024-08-25 05:52:07,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=20960.0, ans=0.1 +2024-08-25 05:52:09,369 INFO [train.py:1114] (3/4) Epoch 2, batch 1450, loss[loss=0.3598, simple_loss=0.3762, pruned_loss=0.1239, ctc_loss=0.2388, over 19681.00 frames. ], tot_loss[loss=0.3287, simple_loss=0.3513, pruned_loss=0.1113, ctc_loss=0.2089, over 3862745.91 frames. ], batch size: 63, lr: 4.15e-02, grad_scale: 16.0 +2024-08-25 05:52:14,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=21013.333333333332, ans=0.025 +2024-08-25 05:52:15,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=21013.333333333332, ans=0.0 +2024-08-25 05:52:16,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=21013.333333333332, ans=0.125 +2024-08-25 05:52:16,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.33 vs. limit=15.0 +2024-08-25 05:52:18,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=21066.666666666668, ans=0.0 +2024-08-25 05:52:20,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=21066.666666666668, ans=0.2 +2024-08-25 05:52:21,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=21066.666666666668, ans=0.0 +2024-08-25 05:52:27,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=21120.0, ans=0.0 +2024-08-25 05:52:40,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=21173.333333333332, ans=0.2 +2024-08-25 05:52:41,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=21173.333333333332, ans=0.09899494936611666 +2024-08-25 05:52:45,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=21173.333333333332, ans=0.0 +2024-08-25 05:52:47,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=21226.666666666668, ans=0.95 +2024-08-25 05:52:50,630 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.02 vs. limit=6.0 +2024-08-25 05:52:56,993 INFO [train.py:1114] (3/4) Epoch 2, batch 1500, loss[loss=0.3341, simple_loss=0.3664, pruned_loss=0.1102, ctc_loss=0.2037, over 19574.00 frames. ], tot_loss[loss=0.3283, simple_loss=0.3512, pruned_loss=0.111, ctc_loss=0.2082, over 3862326.39 frames. ], batch size: 57, lr: 4.15e-02, grad_scale: 16.0 +2024-08-25 05:53:06,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=21333.333333333332, ans=0.0 +2024-08-25 05:53:06,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=21333.333333333332, ans=0.125 +2024-08-25 05:53:12,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21333.333333333332, ans=0.1 +2024-08-25 05:53:14,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=21333.333333333332, ans=0.125 +2024-08-25 05:53:17,237 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.397e+02 1.832e+02 2.087e+02 2.558e+02 5.212e+02, threshold=4.175e+02, percent-clipped=3.0 +2024-08-25 05:53:25,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21386.666666666668, ans=0.1 +2024-08-25 05:53:34,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=21386.666666666668, ans=0.1 +2024-08-25 05:53:40,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21440.0, ans=0.1 +2024-08-25 05:53:45,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=21493.333333333332, ans=0.2 +2024-08-25 05:54:05,974 INFO [train.py:1114] (3/4) Epoch 2, batch 1550, loss[loss=0.3586, simple_loss=0.3755, pruned_loss=0.1247, ctc_loss=0.2306, over 19604.00 frames. ], tot_loss[loss=0.3288, simple_loss=0.3515, pruned_loss=0.1113, ctc_loss=0.2088, over 3847724.41 frames. ], batch size: 60, lr: 4.14e-02, grad_scale: 16.0 +2024-08-25 05:54:11,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=21546.666666666668, ans=0.025 +2024-08-25 05:54:18,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=21546.666666666668, ans=0.2 +2024-08-25 05:54:22,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=21600.0, ans=0.1 +2024-08-25 05:54:23,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=21600.0, ans=0.025 +2024-08-25 05:54:44,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=21653.333333333332, ans=0.125 +2024-08-25 05:54:52,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=21706.666666666668, ans=0.2 +2024-08-25 05:55:05,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=21760.0, ans=0.125 +2024-08-25 05:55:07,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=21760.0, ans=0.2 +2024-08-25 05:55:11,749 INFO [train.py:1114] (3/4) Epoch 2, batch 1600, loss[loss=0.3511, simple_loss=0.3693, pruned_loss=0.1222, ctc_loss=0.2211, over 19836.00 frames. ], tot_loss[loss=0.3284, simple_loss=0.3511, pruned_loss=0.1111, ctc_loss=0.2085, over 3835939.44 frames. ], batch size: 57, lr: 4.13e-02, grad_scale: 32.0 +2024-08-25 05:55:17,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=21813.333333333332, ans=0.0 +2024-08-25 05:55:32,432 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.487e+02 1.812e+02 2.122e+02 2.604e+02 4.336e+02, threshold=4.244e+02, percent-clipped=2.0 +2024-08-25 05:55:42,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=21920.0, ans=0.0 +2024-08-25 05:55:54,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=21973.333333333332, ans=0.125 +2024-08-25 05:56:06,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22026.666666666668, ans=0.1 +2024-08-25 05:56:12,978 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.18 vs. limit=22.5 +2024-08-25 05:56:13,292 INFO [train.py:1114] (3/4) Epoch 2, batch 1650, loss[loss=0.3566, simple_loss=0.3742, pruned_loss=0.1236, ctc_loss=0.2295, over 19647.00 frames. ], tot_loss[loss=0.3279, simple_loss=0.3505, pruned_loss=0.1111, ctc_loss=0.208, over 3832389.91 frames. ], batch size: 59, lr: 4.13e-02, grad_scale: 16.0 +2024-08-25 05:56:18,940 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.65 vs. limit=22.5 +2024-08-25 05:57:07,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=22293.333333333332, ans=0.006023188405797101 +2024-08-25 05:57:12,877 INFO [train.py:1114] (3/4) Epoch 2, batch 1700, loss[loss=0.2588, simple_loss=0.2994, pruned_loss=0.07805, ctc_loss=0.1555, over 19685.00 frames. ], tot_loss[loss=0.3251, simple_loss=0.3491, pruned_loss=0.1095, ctc_loss=0.2053, over 3846786.50 frames. ], batch size: 46, lr: 4.12e-02, grad_scale: 16.0 +2024-08-25 05:57:23,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=22400.0, ans=0.0 +2024-08-25 05:57:29,329 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.387e+02 1.791e+02 2.005e+02 2.338e+02 3.555e+02, threshold=4.010e+02, percent-clipped=0.0 +2024-08-25 05:57:47,785 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.21 vs. limit=22.5 +2024-08-25 05:58:03,062 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.59 vs. limit=12.0 +2024-08-25 05:58:34,078 INFO [train.py:1114] (3/4) Epoch 2, batch 1750, loss[loss=0.2856, simple_loss=0.3168, pruned_loss=0.09171, ctc_loss=0.1773, over 19653.00 frames. ], tot_loss[loss=0.324, simple_loss=0.3483, pruned_loss=0.109, ctc_loss=0.2042, over 3852038.36 frames. ], batch size: 45, lr: 4.12e-02, grad_scale: 16.0 +2024-08-25 05:58:37,481 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.70 vs. limit=10.0 +2024-08-25 05:58:41,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=22613.333333333332, ans=0.005953623188405798 +2024-08-25 05:58:49,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=22666.666666666668, ans=0.125 +2024-08-25 05:58:55,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=22720.0, ans=0.0 +2024-08-25 05:59:03,584 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.17 vs. limit=15.0 +2024-08-25 05:59:07,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=22773.333333333332, ans=0.025 +2024-08-25 05:59:17,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=22826.666666666668, ans=0.05 +2024-08-25 05:59:24,716 INFO [train.py:1114] (3/4) Epoch 2, batch 1800, loss[loss=0.3253, simple_loss=0.3546, pruned_loss=0.1062, ctc_loss=0.2089, over 19625.00 frames. ], tot_loss[loss=0.3238, simple_loss=0.3482, pruned_loss=0.1089, ctc_loss=0.2045, over 3853550.70 frames. ], batch size: 55, lr: 4.11e-02, grad_scale: 16.0 +2024-08-25 05:59:28,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=22880.0, ans=0.0 +2024-08-25 05:59:29,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=22880.0, ans=0.025 +2024-08-25 05:59:39,816 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 1.812e+02 2.002e+02 2.312e+02 3.839e+02, threshold=4.004e+02, percent-clipped=0.0 +2024-08-25 05:59:47,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=22986.666666666668, ans=0.025 +2024-08-25 05:59:58,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=23040.0, ans=0.125 +2024-08-25 06:00:12,420 INFO [train.py:1114] (3/4) Epoch 2, batch 1850, loss[loss=0.3372, simple_loss=0.3619, pruned_loss=0.1138, ctc_loss=0.2125, over 19602.00 frames. ], tot_loss[loss=0.3226, simple_loss=0.3477, pruned_loss=0.1082, ctc_loss=0.2029, over 3856750.56 frames. ], batch size: 57, lr: 4.11e-02, grad_scale: 16.0 +2024-08-25 06:00:20,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=23200.0, ans=0.125 +2024-08-25 06:00:41,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=23306.666666666668, ans=0.125 +2024-08-25 06:00:51,907 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 06:00:53,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=23360.0, ans=0.125 +2024-08-25 06:00:59,794 INFO [train.py:1114] (3/4) Epoch 2, batch 1900, loss[loss=0.3312, simple_loss=0.3629, pruned_loss=0.1093, ctc_loss=0.2023, over 19659.00 frames. ], tot_loss[loss=0.3219, simple_loss=0.3477, pruned_loss=0.1077, ctc_loss=0.2019, over 3861920.01 frames. ], batch size: 59, lr: 4.10e-02, grad_scale: 16.0 +2024-08-25 06:01:18,891 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.426e+02 1.814e+02 2.067e+02 2.451e+02 4.716e+02, threshold=4.135e+02, percent-clipped=1.0 +2024-08-25 06:01:23,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=23466.666666666668, ans=0.025 +2024-08-25 06:01:52,060 INFO [train.py:1114] (3/4) Epoch 2, batch 1950, loss[loss=0.2986, simple_loss=0.3346, pruned_loss=0.09613, ctc_loss=0.1759, over 19583.00 frames. ], tot_loss[loss=0.3223, simple_loss=0.3486, pruned_loss=0.1077, ctc_loss=0.2017, over 3870854.51 frames. ], batch size: 52, lr: 4.09e-02, grad_scale: 16.0 +2024-08-25 06:02:10,865 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.27 vs. limit=15.0 +2024-08-25 06:02:13,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=23786.666666666668, ans=0.125 +2024-08-25 06:02:27,536 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.24 vs. limit=15.0 +2024-08-25 06:02:37,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=23893.333333333332, ans=0.125 +2024-08-25 06:02:40,712 INFO [train.py:1114] (3/4) Epoch 2, batch 2000, loss[loss=0.2849, simple_loss=0.3087, pruned_loss=0.09574, ctc_loss=0.1743, over 19653.00 frames. ], tot_loss[loss=0.3235, simple_loss=0.3491, pruned_loss=0.1083, ctc_loss=0.2029, over 3855664.61 frames. ], batch size: 45, lr: 4.09e-02, grad_scale: 32.0 +2024-08-25 06:02:47,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=23946.666666666668, ans=0.125 +2024-08-25 06:02:53,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=24000.0, ans=10.0 +2024-08-25 06:02:57,864 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.475e+02 1.781e+02 1.996e+02 2.377e+02 5.355e+02, threshold=3.992e+02, percent-clipped=1.0 +2024-08-25 06:02:58,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=24000.0, ans=0.125 +2024-08-25 06:03:01,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=24053.333333333332, ans=0.0 +2024-08-25 06:03:06,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=24053.333333333332, ans=0.125 +2024-08-25 06:03:15,328 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.90 vs. limit=15.0 +2024-08-25 06:03:21,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=24160.0, ans=10.0 +2024-08-25 06:03:29,345 INFO [train.py:1114] (3/4) Epoch 2, batch 2050, loss[loss=0.2822, simple_loss=0.3108, pruned_loss=0.09158, ctc_loss=0.176, over 19710.00 frames. ], tot_loss[loss=0.3218, simple_loss=0.3476, pruned_loss=0.1077, ctc_loss=0.2016, over 3851469.00 frames. ], batch size: 47, lr: 4.08e-02, grad_scale: 32.0 +2024-08-25 06:03:34,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=20.10 vs. limit=22.5 +2024-08-25 06:04:17,639 INFO [train.py:1114] (3/4) Epoch 2, batch 2100, loss[loss=0.3173, simple_loss=0.3505, pruned_loss=0.1036, ctc_loss=0.1923, over 19771.00 frames. ], tot_loss[loss=0.3197, simple_loss=0.3461, pruned_loss=0.1067, ctc_loss=0.1997, over 3858121.59 frames. ], batch size: 54, lr: 4.08e-02, grad_scale: 32.0 +2024-08-25 06:04:20,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=24480.0, ans=0.125 +2024-08-25 06:04:26,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=24533.333333333332, ans=0.005536231884057972 +2024-08-25 06:04:30,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=24533.333333333332, ans=0.125 +2024-08-25 06:04:33,042 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 1.823e+02 2.012e+02 2.259e+02 3.531e+02, threshold=4.024e+02, percent-clipped=0.0 +2024-08-25 06:04:36,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=24586.666666666668, ans=0.125 +2024-08-25 06:05:01,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=24746.666666666668, ans=0.2 +2024-08-25 06:05:02,139 INFO [train.py:1114] (3/4) Epoch 2, batch 2150, loss[loss=0.2963, simple_loss=0.3297, pruned_loss=0.09566, ctc_loss=0.1791, over 19847.00 frames. ], tot_loss[loss=0.3181, simple_loss=0.3449, pruned_loss=0.106, ctc_loss=0.1984, over 3870257.90 frames. ], batch size: 52, lr: 4.07e-02, grad_scale: 32.0 +2024-08-25 06:05:02,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=24746.666666666668, ans=0.2 +2024-08-25 06:05:12,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=24746.666666666668, ans=0.025 +2024-08-25 06:05:12,443 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.16 vs. limit=15.0 +2024-08-25 06:05:24,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=24853.333333333332, ans=0.0 +2024-08-25 06:05:33,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=24906.666666666668, ans=0.0 +2024-08-25 06:05:36,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24906.666666666668, ans=0.1 +2024-08-25 06:05:53,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=24960.0, ans=0.125 +2024-08-25 06:05:58,447 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.64 vs. limit=12.0 +2024-08-25 06:06:00,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=25013.333333333332, ans=0.035 +2024-08-25 06:06:01,436 INFO [train.py:1114] (3/4) Epoch 2, batch 2200, loss[loss=0.3251, simple_loss=0.3531, pruned_loss=0.1072, ctc_loss=0.207, over 19587.00 frames. ], tot_loss[loss=0.3173, simple_loss=0.3446, pruned_loss=0.1055, ctc_loss=0.1976, over 3869038.25 frames. ], batch size: 57, lr: 4.06e-02, grad_scale: 32.0 +2024-08-25 06:06:07,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=25013.333333333332, ans=0.0 +2024-08-25 06:06:07,839 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.60 vs. limit=10.0 +2024-08-25 06:06:08,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=25013.333333333332, ans=0.025 +2024-08-25 06:06:25,293 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.516e+02 1.924e+02 2.286e+02 2.709e+02 6.222e+02, threshold=4.573e+02, percent-clipped=4.0 +2024-08-25 06:06:26,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=25066.666666666668, ans=0.125 +2024-08-25 06:06:30,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=25120.0, ans=0.125 +2024-08-25 06:06:32,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=25120.0, ans=0.0 +2024-08-25 06:06:37,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=25173.333333333332, ans=0.125 +2024-08-25 06:06:48,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=25226.666666666668, ans=0.125 +2024-08-25 06:06:54,574 INFO [train.py:1114] (3/4) Epoch 2, batch 2250, loss[loss=0.3454, simple_loss=0.3701, pruned_loss=0.1159, ctc_loss=0.2225, over 19606.00 frames. ], tot_loss[loss=0.3168, simple_loss=0.3444, pruned_loss=0.1051, ctc_loss=0.1973, over 3868519.86 frames. ], batch size: 55, lr: 4.06e-02, grad_scale: 32.0 +2024-08-25 06:07:12,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=25386.666666666668, ans=0.125 +2024-08-25 06:07:19,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=25386.666666666668, ans=0.125 +2024-08-25 06:07:36,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=25493.333333333332, ans=0.125 +2024-08-25 06:07:41,120 INFO [train.py:1114] (3/4) Epoch 2, batch 2300, loss[loss=0.2759, simple_loss=0.3141, pruned_loss=0.08516, ctc_loss=0.1686, over 19513.00 frames. ], tot_loss[loss=0.3162, simple_loss=0.3434, pruned_loss=0.1051, ctc_loss=0.1969, over 3862000.82 frames. ], batch size: 49, lr: 4.05e-02, grad_scale: 32.0 +2024-08-25 06:07:44,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=25546.666666666668, ans=0.0 +2024-08-25 06:07:53,920 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.87 vs. limit=15.0 +2024-08-25 06:07:58,723 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.382e+02 1.775e+02 2.049e+02 2.504e+02 6.120e+02, threshold=4.097e+02, percent-clipped=1.0 +2024-08-25 06:08:10,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=25653.333333333332, ans=0.125 +2024-08-25 06:08:21,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=25760.0, ans=0.0 +2024-08-25 06:08:24,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=25760.0, ans=0.125 +2024-08-25 06:08:25,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=25760.0, ans=0.025 +2024-08-25 06:08:29,084 INFO [train.py:1114] (3/4) Epoch 2, batch 2350, loss[loss=0.3097, simple_loss=0.3466, pruned_loss=0.09879, ctc_loss=0.1881, over 19655.00 frames. ], tot_loss[loss=0.316, simple_loss=0.3432, pruned_loss=0.1051, ctc_loss=0.1966, over 3864149.94 frames. ], batch size: 63, lr: 4.04e-02, grad_scale: 32.0 +2024-08-25 06:08:34,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25813.333333333332, ans=0.1 +2024-08-25 06:08:34,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=25813.333333333332, ans=0.005257971014492754 +2024-08-25 06:08:36,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=25813.333333333332, ans=0.125 +2024-08-25 06:08:40,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=25866.666666666668, ans=0.125 +2024-08-25 06:08:58,495 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.44 vs. limit=22.5 +2024-08-25 06:09:11,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=25973.333333333332, ans=15.0 +2024-08-25 06:09:26,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=26026.666666666668, ans=0.0 +2024-08-25 06:09:28,483 INFO [train.py:1114] (3/4) Epoch 2, batch 2400, loss[loss=0.3349, simple_loss=0.3552, pruned_loss=0.1142, ctc_loss=0.2152, over 19243.00 frames. ], tot_loss[loss=0.3191, simple_loss=0.3461, pruned_loss=0.1063, ctc_loss=0.1987, over 3858949.39 frames. ], batch size: 71, lr: 4.04e-02, grad_scale: 32.0 +2024-08-25 06:09:36,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=26133.333333333332, ans=0.125 +2024-08-25 06:09:43,432 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.503e+02 1.803e+02 2.129e+02 2.459e+02 5.388e+02, threshold=4.257e+02, percent-clipped=1.0 +2024-08-25 06:09:59,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=26240.0, ans=0.035 +2024-08-25 06:10:08,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=26293.333333333332, ans=0.2 +2024-08-25 06:10:14,651 INFO [train.py:1114] (3/4) Epoch 2, batch 2450, loss[loss=0.4284, simple_loss=0.4002, pruned_loss=0.1676, ctc_loss=0.3036, over 13311.00 frames. ], tot_loss[loss=0.3292, simple_loss=0.3517, pruned_loss=0.1116, ctc_loss=0.2085, over 3728997.81 frames. ], batch size: 140, lr: 4.03e-02, grad_scale: 32.0 +2024-08-25 06:10:25,238 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.72 vs. limit=22.5 +2024-08-25 06:10:27,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=26400.0, ans=0.125 +2024-08-25 06:10:36,034 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.02 vs. limit=22.5 +2024-08-25 06:10:46,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=26453.333333333332, ans=0.05 +2024-08-25 06:10:47,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.80 vs. limit=22.5 +2024-08-25 06:10:49,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=26506.666666666668, ans=0.125 +2024-08-25 06:10:55,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=26506.666666666668, ans=0.125 +2024-08-25 06:11:37,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=26554.666666666668, ans=0.0 +2024-08-25 06:11:53,144 INFO [train.py:1114] (3/4) Epoch 3, batch 0, loss[loss=0.3271, simple_loss=0.3387, pruned_loss=0.1157, ctc_loss=0.2103, over 19796.00 frames. ], tot_loss[loss=0.3271, simple_loss=0.3387, pruned_loss=0.1157, ctc_loss=0.2103, over 19796.00 frames. ], batch size: 49, lr: 3.83e-02, grad_scale: 32.0 +2024-08-25 06:11:53,508 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-25 06:12:07,821 INFO [train.py:1146] (3/4) Epoch 3, validation: loss=0.2565, simple_loss=0.3309, pruned_loss=0.06653, ctc_loss=0.1228, over 944034.00 frames. +2024-08-25 06:12:07,822 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 13361MB +2024-08-25 06:12:20,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=26554.666666666668, ans=22.5 +2024-08-25 06:13:57,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=26608.0, ans=0.0 +2024-08-25 06:13:58,803 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.15 vs. limit=15.0 +2024-08-25 06:14:54,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=26661.333333333332, ans=0.1 +2024-08-25 06:15:23,247 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=25.93 vs. limit=22.5 +2024-08-25 06:16:00,085 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.589e+02 1.983e+02 2.286e+02 2.644e+02 3.774e+02, threshold=4.572e+02, percent-clipped=0.0 +2024-08-25 06:17:19,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26714.666666666668, ans=0.1 +2024-08-25 06:17:28,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26714.666666666668, ans=0.1 +2024-08-25 06:17:49,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=26714.666666666668, ans=0.025 +2024-08-25 06:20:23,744 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.58 vs. limit=10.0 +2024-08-25 06:22:59,724 INFO [train.py:1114] (3/4) Epoch 3, batch 50, loss[loss=0.2713, simple_loss=0.316, pruned_loss=0.08206, ctc_loss=0.1564, over 19705.00 frames. ], tot_loss[loss=0.328, simple_loss=0.3518, pruned_loss=0.1106, ctc_loss=0.2078, over 844545.25 frames. ], batch size: 47, lr: 3.82e-02, grad_scale: 16.0 +2024-08-25 06:26:49,786 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.53 vs. limit=6.0 +2024-08-25 06:29:16,367 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.49 vs. limit=15.0 +2024-08-25 06:29:45,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=26874.666666666668, ans=0.07 +2024-08-25 06:31:29,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=26874.666666666668, ans=0.1 +2024-08-25 06:34:57,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=26874.666666666668, ans=0.005027246376811594 +2024-08-25 06:38:56,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=26981.333333333332, ans=0.125 +2024-08-25 06:44:24,688 INFO [train.py:1114] (3/4) Epoch 3, batch 100, loss[loss=0.277, simple_loss=0.3175, pruned_loss=0.08615, ctc_loss=0.1607, over 19696.00 frames. ], tot_loss[loss=0.3245, simple_loss=0.3509, pruned_loss=0.1084, ctc_loss=0.2035, over 1498666.18 frames. ], batch size: 51, lr: 3.82e-02, grad_scale: 16.0 +2024-08-25 06:45:45,064 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.45 vs. limit=5.0 +2024-08-25 06:47:04,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=27141.333333333332, ans=0.125 +2024-08-25 06:47:59,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=27194.666666666668, ans=0.125 +2024-08-25 06:48:15,477 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.410e+02 1.744e+02 2.032e+02 2.291e+02 1.205e+03, threshold=4.063e+02, percent-clipped=1.0 +2024-08-25 06:50:12,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=27301.333333333332, ans=0.125 +2024-08-25 06:50:43,564 INFO [train.py:1114] (3/4) Epoch 3, batch 150, loss[loss=0.2829, simple_loss=0.3154, pruned_loss=0.09021, ctc_loss=0.1751, over 19710.00 frames. ], tot_loss[loss=0.3175, simple_loss=0.3453, pruned_loss=0.1053, ctc_loss=0.1975, over 2028092.68 frames. ], batch size: 47, lr: 3.81e-02, grad_scale: 16.0 +2024-08-25 06:51:34,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=27354.666666666668, ans=0.125 +2024-08-25 06:51:51,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=27354.666666666668, ans=0.125 +2024-08-25 06:52:42,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=27461.333333333332, ans=0.0 +2024-08-25 06:53:55,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=27514.666666666668, ans=0.0 +2024-08-25 06:54:14,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=27568.0, ans=0.125 +2024-08-25 06:54:31,967 INFO [train.py:1114] (3/4) Epoch 3, batch 200, loss[loss=0.3488, simple_loss=0.3682, pruned_loss=0.1203, ctc_loss=0.2218, over 18470.00 frames. ], tot_loss[loss=0.3138, simple_loss=0.3425, pruned_loss=0.1036, ctc_loss=0.1943, over 2435922.79 frames. ], batch size: 85, lr: 3.80e-02, grad_scale: 16.0 +2024-08-25 06:55:11,966 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=26.30 vs. limit=22.5 +2024-08-25 06:55:24,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27728.0, ans=0.1 +2024-08-25 06:56:00,391 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.731e+02 1.977e+02 2.205e+02 3.305e+02, threshold=3.953e+02, percent-clipped=0.0 +2024-08-25 06:56:04,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=27781.333333333332, ans=0.125 +2024-08-25 06:56:09,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=27781.333333333332, ans=0.125 +2024-08-25 06:56:21,774 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.31 vs. limit=22.5 +2024-08-25 06:56:25,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=27834.666666666668, ans=0.2 +2024-08-25 06:56:34,804 INFO [train.py:1114] (3/4) Epoch 3, batch 250, loss[loss=0.3595, simple_loss=0.3766, pruned_loss=0.1266, ctc_loss=0.223, over 19425.00 frames. ], tot_loss[loss=0.3122, simple_loss=0.3418, pruned_loss=0.1028, ctc_loss=0.1924, over 2756123.64 frames. ], batch size: 67, lr: 3.80e-02, grad_scale: 16.0 +2024-08-25 06:56:55,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=27888.0, ans=0.0 +2024-08-25 06:57:28,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=27941.333333333332, ans=0.0 +2024-08-25 06:57:29,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=27941.333333333332, ans=0.125 +2024-08-25 06:57:30,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=27941.333333333332, ans=0.125 +2024-08-25 07:00:42,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=28048.0, ans=0.125 +2024-08-25 07:02:51,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=28101.333333333332, ans=0.125 +2024-08-25 07:02:51,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=28101.333333333332, ans=0.125 +2024-08-25 07:03:11,753 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.09 vs. limit=22.5 +2024-08-25 07:03:29,194 INFO [train.py:1114] (3/4) Epoch 3, batch 300, loss[loss=0.3515, simple_loss=0.3768, pruned_loss=0.1203, ctc_loss=0.2138, over 19505.00 frames. ], tot_loss[loss=0.3104, simple_loss=0.3405, pruned_loss=0.102, ctc_loss=0.1906, over 3001487.37 frames. ], batch size: 61, lr: 3.79e-02, grad_scale: 16.0 +2024-08-25 07:03:34,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=28154.666666666668, ans=0.09899494936611666 +2024-08-25 07:03:35,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=28154.666666666668, ans=0.2 +2024-08-25 07:03:53,888 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.48 vs. limit=15.0 +2024-08-25 07:04:06,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=28208.0, ans=22.5 +2024-08-25 07:04:28,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=28261.333333333332, ans=0.0 +2024-08-25 07:04:42,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=28261.333333333332, ans=0.125 +2024-08-25 07:04:44,393 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 1.724e+02 1.968e+02 2.265e+02 3.417e+02, threshold=3.936e+02, percent-clipped=0.0 +2024-08-25 07:05:02,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=28314.666666666668, ans=0.004714202898550725 +2024-08-25 07:05:02,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=28314.666666666668, ans=0.125 +2024-08-25 07:05:16,423 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.18 vs. limit=15.0 +2024-08-25 07:05:49,893 INFO [train.py:1114] (3/4) Epoch 3, batch 350, loss[loss=0.2523, simple_loss=0.2951, pruned_loss=0.07581, ctc_loss=0.1446, over 19746.00 frames. ], tot_loss[loss=0.3101, simple_loss=0.3405, pruned_loss=0.1018, ctc_loss=0.1904, over 3191503.78 frames. ], batch size: 48, lr: 3.79e-02, grad_scale: 16.0 +2024-08-25 07:05:52,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=28421.333333333332, ans=0.004691014492753623 +2024-08-25 07:05:56,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=28421.333333333332, ans=22.5 +2024-08-25 07:07:24,942 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=21.86 vs. limit=22.5 +2024-08-25 07:07:45,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=28581.333333333332, ans=0.125 +2024-08-25 07:07:58,895 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.13 vs. limit=12.0 +2024-08-25 07:08:01,172 INFO [train.py:1114] (3/4) Epoch 3, batch 400, loss[loss=0.3056, simple_loss=0.3455, pruned_loss=0.09512, ctc_loss=0.1889, over 19500.00 frames. ], tot_loss[loss=0.3093, simple_loss=0.3401, pruned_loss=0.1013, ctc_loss=0.1897, over 3341862.26 frames. ], batch size: 54, lr: 3.78e-02, grad_scale: 32.0 +2024-08-25 07:08:18,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=28741.333333333332, ans=0.025 +2024-08-25 07:08:27,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=28794.666666666668, ans=0.125 +2024-08-25 07:08:27,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=28794.666666666668, ans=0.2 +2024-08-25 07:08:42,253 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 1.762e+02 1.982e+02 2.336e+02 5.420e+02, threshold=3.963e+02, percent-clipped=2.0 +2024-08-25 07:08:54,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=28901.333333333332, ans=0.0 +2024-08-25 07:09:04,136 INFO [train.py:1114] (3/4) Epoch 3, batch 450, loss[loss=0.2703, simple_loss=0.3249, pruned_loss=0.07689, ctc_loss=0.1546, over 19607.00 frames. ], tot_loss[loss=0.3087, simple_loss=0.34, pruned_loss=0.1009, ctc_loss=0.1891, over 3449635.25 frames. ], batch size: 55, lr: 3.77e-02, grad_scale: 32.0 +2024-08-25 07:09:09,420 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.08 vs. limit=15.0 +2024-08-25 07:09:09,604 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-08-25 07:09:29,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=29061.333333333332, ans=0.2 +2024-08-25 07:09:37,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=29114.666666666668, ans=0.2 +2024-08-25 07:09:56,837 INFO [train.py:1114] (3/4) Epoch 3, batch 500, loss[loss=0.3119, simple_loss=0.3495, pruned_loss=0.1008, ctc_loss=0.1815, over 19696.00 frames. ], tot_loss[loss=0.307, simple_loss=0.3385, pruned_loss=0.1002, ctc_loss=0.1876, over 3545514.73 frames. ], batch size: 63, lr: 3.77e-02, grad_scale: 32.0 +2024-08-25 07:10:43,382 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 1.753e+02 1.966e+02 2.327e+02 4.047e+02, threshold=3.932e+02, percent-clipped=2.0 +2024-08-25 07:11:10,802 INFO [train.py:1114] (3/4) Epoch 3, batch 550, loss[loss=0.3526, simple_loss=0.3709, pruned_loss=0.1203, ctc_loss=0.2341, over 19297.00 frames. ], tot_loss[loss=0.3078, simple_loss=0.3388, pruned_loss=0.1007, ctc_loss=0.1882, over 3608395.42 frames. ], batch size: 71, lr: 3.76e-02, grad_scale: 16.0 +2024-08-25 07:11:37,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.94 vs. limit=6.0 +2024-08-25 07:11:41,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=29488.0, ans=0.125 +2024-08-25 07:12:10,395 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.34 vs. limit=22.5 +2024-08-25 07:12:15,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=29648.0, ans=0.125 +2024-08-25 07:12:41,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=29701.333333333332, ans=0.2 +2024-08-25 07:12:42,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=29701.333333333332, ans=0.125 +2024-08-25 07:12:53,160 INFO [train.py:1114] (3/4) Epoch 3, batch 600, loss[loss=0.3363, simple_loss=0.3654, pruned_loss=0.1111, ctc_loss=0.2123, over 19344.00 frames. ], tot_loss[loss=0.3071, simple_loss=0.3384, pruned_loss=0.1004, ctc_loss=0.1877, over 3665281.45 frames. ], batch size: 67, lr: 3.76e-02, grad_scale: 16.0 +2024-08-25 07:12:54,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=29754.666666666668, ans=0.125 +2024-08-25 07:13:31,838 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=2.560e-03 +2024-08-25 07:13:37,729 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.450e+02 1.812e+02 2.009e+02 2.360e+02 5.731e+02, threshold=4.017e+02, percent-clipped=3.0 +2024-08-25 07:13:38,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=29914.666666666668, ans=0.004366376811594202 +2024-08-25 07:13:44,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=29914.666666666668, ans=0.2 +2024-08-25 07:13:54,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=29968.0, ans=0.125 +2024-08-25 07:14:02,713 INFO [train.py:1114] (3/4) Epoch 3, batch 650, loss[loss=0.3354, simple_loss=0.3537, pruned_loss=0.1156, ctc_loss=0.2144, over 19747.00 frames. ], tot_loss[loss=0.3057, simple_loss=0.3373, pruned_loss=0.0998, ctc_loss=0.1863, over 3715143.58 frames. ], batch size: 54, lr: 3.75e-02, grad_scale: 16.0 +2024-08-25 07:14:03,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=30021.333333333332, ans=0.125 +2024-08-25 07:14:06,282 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.35 vs. limit=6.0 +2024-08-25 07:14:15,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=30074.666666666668, ans=0.0 +2024-08-25 07:14:15,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=30074.666666666668, ans=0.025 +2024-08-25 07:14:30,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=30128.0, ans=0.0 +2024-08-25 07:14:40,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=30181.333333333332, ans=0.2 +2024-08-25 07:14:55,123 INFO [train.py:1114] (3/4) Epoch 3, batch 700, loss[loss=0.2922, simple_loss=0.3294, pruned_loss=0.09384, ctc_loss=0.1682, over 19722.00 frames. ], tot_loss[loss=0.3059, simple_loss=0.3377, pruned_loss=0.09979, ctc_loss=0.1862, over 3747185.94 frames. ], batch size: 51, lr: 3.74e-02, grad_scale: 16.0 +2024-08-25 07:14:59,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=30288.0, ans=0.05 +2024-08-25 07:15:03,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=30288.0, ans=0.125 +2024-08-25 07:15:07,239 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-08-25 07:15:22,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=30394.666666666668, ans=0.004262028985507247 +2024-08-25 07:15:28,451 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.846e+02 1.998e+02 2.505e+02 9.071e+02, threshold=3.995e+02, percent-clipped=5.0 +2024-08-25 07:15:32,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=30448.0, ans=0.125 +2024-08-25 07:15:33,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=30448.0, ans=0.0 +2024-08-25 07:15:38,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=30448.0, ans=0.2 +2024-08-25 07:15:44,190 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 07:15:48,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=30501.333333333332, ans=0.125 +2024-08-25 07:15:58,573 INFO [train.py:1114] (3/4) Epoch 3, batch 750, loss[loss=0.3039, simple_loss=0.3439, pruned_loss=0.09625, ctc_loss=0.1786, over 19504.00 frames. ], tot_loss[loss=0.3042, simple_loss=0.3365, pruned_loss=0.09903, ctc_loss=0.1847, over 3774363.66 frames. ], batch size: 54, lr: 3.74e-02, grad_scale: 16.0 +2024-08-25 07:16:00,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=30554.666666666668, ans=0.025 +2024-08-25 07:16:22,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=30608.0, ans=0.125 +2024-08-25 07:16:29,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=30608.0, ans=0.0 +2024-08-25 07:16:35,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=30661.333333333332, ans=0.0 +2024-08-25 07:16:43,239 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.10 vs. limit=15.0 +2024-08-25 07:16:46,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=30661.333333333332, ans=0.025 +2024-08-25 07:18:51,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=30714.666666666668, ans=0.0 +2024-08-25 07:29:29,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=30768.0, ans=0.1 +2024-08-25 07:34:42,530 INFO [train.py:1114] (3/4) Epoch 3, batch 800, loss[loss=0.2629, simple_loss=0.3045, pruned_loss=0.08059, ctc_loss=0.1501, over 19416.00 frames. ], tot_loss[loss=0.3039, simple_loss=0.3364, pruned_loss=0.09883, ctc_loss=0.1845, over 3794555.81 frames. ], batch size: 48, lr: 3.73e-02, grad_scale: 32.0 +2024-08-25 07:37:24,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30821.333333333332, ans=0.1 +2024-08-25 08:02:40,818 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.303e+02 1.761e+02 1.928e+02 2.233e+02 3.899e+02, threshold=3.856e+02, percent-clipped=0.0 +2024-08-25 08:03:02,322 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.20 vs. limit=22.5 +2024-08-25 08:12:59,503 INFO [train.py:1114] (3/4) Epoch 3, batch 850, loss[loss=0.3156, simple_loss=0.3509, pruned_loss=0.1017, ctc_loss=0.1922, over 19669.00 frames. ], tot_loss[loss=0.3035, simple_loss=0.3364, pruned_loss=0.09846, ctc_loss=0.1841, over 3814526.82 frames. ], batch size: 59, lr: 3.73e-02, grad_scale: 32.0 +2024-08-25 08:17:57,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=31088.0, ans=0.125 +2024-08-25 08:20:22,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=31141.333333333332, ans=0.025 +2024-08-25 08:27:20,930 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.64 vs. limit=22.5 +2024-08-25 08:32:09,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=31248.0, ans=0.035 +2024-08-25 08:35:54,821 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.29 vs. limit=5.0 +2024-08-25 08:44:41,589 INFO [train.py:1114] (3/4) Epoch 3, batch 900, loss[loss=0.2574, simple_loss=0.3001, pruned_loss=0.07837, ctc_loss=0.1451, over 19806.00 frames. ], tot_loss[loss=0.3041, simple_loss=0.3366, pruned_loss=0.09882, ctc_loss=0.1846, over 3817708.33 frames. ], batch size: 49, lr: 3.72e-02, grad_scale: 32.0 +2024-08-25 08:47:05,823 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.98 vs. limit=15.0 +2024-08-25 08:48:06,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=31354.666666666668, ans=0.125 +2024-08-25 08:48:53,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31408.0, ans=0.1 +2024-08-25 08:55:51,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=31461.333333333332, ans=0.004030144927536232 +2024-08-25 08:57:54,734 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.390e+02 1.748e+02 1.945e+02 2.250e+02 3.446e+02, threshold=3.889e+02, percent-clipped=0.0 +2024-08-25 09:00:55,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=31514.666666666668, ans=0.125 +2024-08-25 09:03:41,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=31568.0, ans=0.025 +2024-08-25 09:05:03,362 INFO [train.py:1114] (3/4) Epoch 3, batch 950, loss[loss=0.2584, simple_loss=0.298, pruned_loss=0.07992, ctc_loss=0.1474, over 19519.00 frames. ], tot_loss[loss=0.3039, simple_loss=0.3366, pruned_loss=0.09869, ctc_loss=0.1847, over 3820935.01 frames. ], batch size: 49, lr: 3.71e-02, grad_scale: 32.0 +2024-08-25 09:12:03,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=31674.666666666668, ans=0.0 +2024-08-25 09:13:22,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=31728.0, ans=0.2 +2024-08-25 09:13:22,943 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.55 vs. limit=15.0 +2024-08-25 09:16:00,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=31728.0, ans=0.04949747468305833 +2024-08-25 09:17:05,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=31781.333333333332, ans=0.125 +2024-08-25 09:22:18,947 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.12 vs. limit=12.0 +2024-08-25 09:23:03,902 INFO [train.py:1114] (3/4) Epoch 3, batch 1000, loss[loss=0.2742, simple_loss=0.3139, pruned_loss=0.08543, ctc_loss=0.1593, over 19866.00 frames. ], tot_loss[loss=0.3055, simple_loss=0.3376, pruned_loss=0.09945, ctc_loss=0.1861, over 3817290.58 frames. ], batch size: 52, lr: 3.71e-02, grad_scale: 16.0 +2024-08-25 09:26:43,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=31994.666666666668, ans=0.125 +2024-08-25 09:28:26,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=31994.666666666668, ans=0.125 +2024-08-25 09:29:07,849 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.398e+02 1.873e+02 2.237e+02 2.628e+02 7.664e+02, threshold=4.475e+02, percent-clipped=6.0 +2024-08-25 09:29:15,835 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.41 vs. limit=22.5 +2024-08-25 09:32:05,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=32101.333333333332, ans=0.125 +2024-08-25 09:32:27,624 INFO [train.py:1114] (3/4) Epoch 3, batch 1050, loss[loss=0.3064, simple_loss=0.3395, pruned_loss=0.09938, ctc_loss=0.1865, over 19833.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.3369, pruned_loss=0.09905, ctc_loss=0.1855, over 3823057.75 frames. ], batch size: 57, lr: 3.70e-02, grad_scale: 16.0 +2024-08-25 09:32:36,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=32154.666666666668, ans=0.0 +2024-08-25 09:35:12,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=32261.333333333332, ans=0.0 +2024-08-25 09:35:13,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.32 vs. limit=22.5 +2024-08-25 09:35:19,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=32261.333333333332, ans=0.125 +2024-08-25 09:36:22,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=12.24 vs. limit=15.0 +2024-08-25 09:39:16,711 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.45 vs. limit=10.0 +2024-08-25 09:41:10,324 INFO [train.py:1114] (3/4) Epoch 3, batch 1100, loss[loss=0.2903, simple_loss=0.3231, pruned_loss=0.09291, ctc_loss=0.1793, over 19594.00 frames. ], tot_loss[loss=0.3034, simple_loss=0.3362, pruned_loss=0.0984, ctc_loss=0.1844, over 3830355.15 frames. ], batch size: 52, lr: 3.70e-02, grad_scale: 16.0 +2024-08-25 09:41:46,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=32474.666666666668, ans=0.025 +2024-08-25 09:43:23,038 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.681e+02 1.943e+02 2.357e+02 4.515e+02, threshold=3.887e+02, percent-clipped=1.0 +2024-08-25 09:43:30,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=32581.333333333332, ans=0.0037866666666666665 +2024-08-25 09:45:15,744 INFO [train.py:1114] (3/4) Epoch 3, batch 1150, loss[loss=0.2951, simple_loss=0.3286, pruned_loss=0.09564, ctc_loss=0.176, over 19570.00 frames. ], tot_loss[loss=0.3031, simple_loss=0.3359, pruned_loss=0.09833, ctc_loss=0.184, over 3829999.35 frames. ], batch size: 52, lr: 3.69e-02, grad_scale: 16.0 +2024-08-25 09:51:56,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=32688.0, ans=0.125 +2024-08-25 09:51:56,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=32688.0, ans=0.2 +2024-08-25 09:54:00,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32741.333333333332, ans=0.1 +2024-08-25 09:55:26,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=32901.333333333336, ans=0.0 +2024-08-25 09:55:29,636 INFO [train.py:1114] (3/4) Epoch 3, batch 1200, loss[loss=0.3184, simple_loss=0.3577, pruned_loss=0.1018, ctc_loss=0.1889, over 19848.00 frames. ], tot_loss[loss=0.3036, simple_loss=0.3364, pruned_loss=0.09844, ctc_loss=0.1847, over 3825593.25 frames. ], batch size: 57, lr: 3.68e-02, grad_scale: 32.0 +2024-08-25 09:56:08,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=33061.333333333336, ans=0.0 +2024-08-25 09:56:31,119 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.454e+02 1.705e+02 1.941e+02 2.201e+02 4.168e+02, threshold=3.882e+02, percent-clipped=1.0 +2024-08-25 09:56:48,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=33114.666666666664, ans=0.05 +2024-08-25 09:57:41,496 INFO [train.py:1114] (3/4) Epoch 3, batch 1250, loss[loss=0.3168, simple_loss=0.3518, pruned_loss=0.1032, ctc_loss=0.1884, over 19540.00 frames. ], tot_loss[loss=0.3037, simple_loss=0.3371, pruned_loss=0.0983, ctc_loss=0.1843, over 3843355.26 frames. ], batch size: 61, lr: 3.68e-02, grad_scale: 32.0 +2024-08-25 09:57:48,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=33221.333333333336, ans=0.1 +2024-08-25 09:58:16,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=33274.666666666664, ans=0.0036359420289855072 +2024-08-25 09:58:17,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=33274.666666666664, ans=0.1 +2024-08-25 09:58:39,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=33381.333333333336, ans=0.2 +2024-08-25 09:58:48,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=33434.666666666664, ans=0.025 +2024-08-25 09:59:04,151 INFO [train.py:1114] (3/4) Epoch 3, batch 1300, loss[loss=0.3455, simple_loss=0.3691, pruned_loss=0.1182, ctc_loss=0.2138, over 18785.00 frames. ], tot_loss[loss=0.3015, simple_loss=0.3355, pruned_loss=0.09729, ctc_loss=0.1824, over 3846466.03 frames. ], batch size: 76, lr: 3.67e-02, grad_scale: 32.0 +2024-08-25 09:59:18,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=33488.0, ans=0.0 +2024-08-25 09:59:43,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=33594.666666666664, ans=0.0 +2024-08-25 09:59:48,217 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.674e+02 1.887e+02 2.172e+02 3.368e+02, threshold=3.774e+02, percent-clipped=0.0 +2024-08-25 10:00:00,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=33648.0, ans=0.125 +2024-08-25 10:00:02,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=33701.333333333336, ans=0.125 +2024-08-25 10:00:22,514 INFO [train.py:1114] (3/4) Epoch 3, batch 1350, loss[loss=0.3111, simple_loss=0.3387, pruned_loss=0.1021, ctc_loss=0.1982, over 19785.00 frames. ], tot_loss[loss=0.2997, simple_loss=0.3342, pruned_loss=0.09649, ctc_loss=0.1808, over 3858148.22 frames. ], batch size: 54, lr: 3.67e-02, grad_scale: 32.0 +2024-08-25 10:00:33,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=33754.666666666664, ans=0.0 +2024-08-25 10:00:33,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=33754.666666666664, ans=0.0 +2024-08-25 10:01:14,313 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.09 vs. limit=15.0 +2024-08-25 10:01:16,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=33861.333333333336, ans=0.003508405797101449 +2024-08-25 10:01:50,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=33968.0, ans=0.003485217391304348 +2024-08-25 10:02:01,546 INFO [train.py:1114] (3/4) Epoch 3, batch 1400, loss[loss=0.2752, simple_loss=0.3091, pruned_loss=0.08777, ctc_loss=0.1646, over 19685.00 frames. ], tot_loss[loss=0.2991, simple_loss=0.3339, pruned_loss=0.09615, ctc_loss=0.18, over 3865726.13 frames. ], batch size: 46, lr: 3.66e-02, grad_scale: 32.0 +2024-08-25 10:02:10,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=34021.333333333336, ans=0.125 +2024-08-25 10:02:12,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=34021.333333333336, ans=0.125 +2024-08-25 10:02:13,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=34021.333333333336, ans=0.125 +2024-08-25 10:02:21,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=34074.666666666664, ans=0.04949747468305833 +2024-08-25 10:02:22,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=34074.666666666664, ans=0.125 +2024-08-25 10:02:33,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=34128.0, ans=0.125 +2024-08-25 10:02:42,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=34128.0, ans=15.0 +2024-08-25 10:02:45,298 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.503e+02 1.896e+02 2.159e+02 2.528e+02 3.857e+02, threshold=4.318e+02, percent-clipped=1.0 +2024-08-25 10:02:55,016 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.63 vs. limit=6.0 +2024-08-25 10:02:55,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=34181.333333333336, ans=0.125 +2024-08-25 10:02:56,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=34181.333333333336, ans=0.125 +2024-08-25 10:03:12,602 INFO [train.py:1114] (3/4) Epoch 3, batch 1450, loss[loss=0.3312, simple_loss=0.357, pruned_loss=0.1116, ctc_loss=0.2055, over 19664.00 frames. ], tot_loss[loss=0.3011, simple_loss=0.3353, pruned_loss=0.09711, ctc_loss=0.1817, over 3863914.65 frames. ], batch size: 63, lr: 3.65e-02, grad_scale: 32.0 +2024-08-25 10:03:15,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34288.0, ans=0.1 +2024-08-25 10:03:22,670 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.31 vs. limit=15.0 +2024-08-25 10:03:39,164 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.71 vs. limit=22.5 +2024-08-25 10:03:47,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=34394.666666666664, ans=0.025 +2024-08-25 10:03:57,420 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=6.08 vs. limit=12.0 +2024-08-25 10:04:16,493 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.54 vs. limit=22.5 +2024-08-25 10:04:19,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=34501.333333333336, ans=0.95 +2024-08-25 10:04:20,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=34501.333333333336, ans=0.025 +2024-08-25 10:04:20,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=34554.666666666664, ans=0.125 +2024-08-25 10:04:21,642 INFO [train.py:1114] (3/4) Epoch 3, batch 1500, loss[loss=0.3069, simple_loss=0.3416, pruned_loss=0.09847, ctc_loss=0.1883, over 19574.00 frames. ], tot_loss[loss=0.301, simple_loss=0.3356, pruned_loss=0.09689, ctc_loss=0.1816, over 3864371.48 frames. ], batch size: 57, lr: 3.65e-02, grad_scale: 32.0 +2024-08-25 10:04:36,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=34608.0, ans=0.0033460869565217393 +2024-08-25 10:04:44,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34661.333333333336, ans=0.1 +2024-08-25 10:04:45,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=34661.333333333336, ans=0.2 +2024-08-25 10:05:09,918 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 1.778e+02 1.971e+02 2.353e+02 5.678e+02, threshold=3.941e+02, percent-clipped=1.0 +2024-08-25 10:05:29,629 INFO [train.py:1114] (3/4) Epoch 3, batch 1550, loss[loss=0.3223, simple_loss=0.3499, pruned_loss=0.1092, ctc_loss=0.1904, over 19610.00 frames. ], tot_loss[loss=0.3018, simple_loss=0.3357, pruned_loss=0.09751, ctc_loss=0.1823, over 3849402.66 frames. ], batch size: 60, lr: 3.64e-02, grad_scale: 32.0 +2024-08-25 10:05:53,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=34928.0, ans=0.125 +2024-08-25 10:05:56,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=34928.0, ans=0.2 +2024-08-25 10:06:19,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=35034.666666666664, ans=0.125 +2024-08-25 10:06:40,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=35034.666666666664, ans=0.0032533333333333346 +2024-08-25 10:06:42,364 INFO [train.py:1114] (3/4) Epoch 3, batch 1600, loss[loss=0.3054, simple_loss=0.3421, pruned_loss=0.09786, ctc_loss=0.1823, over 19838.00 frames. ], tot_loss[loss=0.3023, simple_loss=0.3356, pruned_loss=0.09793, ctc_loss=0.1829, over 3838381.03 frames. ], batch size: 57, lr: 3.64e-02, grad_scale: 32.0 +2024-08-25 10:07:10,341 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.48 vs. limit=15.0 +2024-08-25 10:07:28,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=35194.666666666664, ans=0.125 +2024-08-25 10:07:28,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=35248.0, ans=0.0032069565217391306 +2024-08-25 10:07:47,935 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.483e+02 1.752e+02 2.032e+02 2.338e+02 4.104e+02, threshold=4.064e+02, percent-clipped=1.0 +2024-08-25 10:07:50,904 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:08:00,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=35301.333333333336, ans=0.2 +2024-08-25 10:08:06,824 INFO [train.py:1114] (3/4) Epoch 3, batch 1650, loss[loss=0.2938, simple_loss=0.3321, pruned_loss=0.09223, ctc_loss=0.1779, over 19671.00 frames. ], tot_loss[loss=0.3021, simple_loss=0.3355, pruned_loss=0.09786, ctc_loss=0.1827, over 3833842.25 frames. ], batch size: 59, lr: 3.63e-02, grad_scale: 32.0 +2024-08-25 10:08:12,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=35354.666666666664, ans=0.125 +2024-08-25 10:08:15,632 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:08:20,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=35408.0, ans=0.025 +2024-08-25 10:08:45,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=35514.666666666664, ans=0.0 +2024-08-25 10:08:50,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=35514.666666666664, ans=0.0 +2024-08-25 10:08:57,925 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.45 vs. limit=10.0 +2024-08-25 10:09:04,871 INFO [train.py:1114] (3/4) Epoch 3, batch 1700, loss[loss=0.2679, simple_loss=0.3001, pruned_loss=0.08629, ctc_loss=0.1581, over 19677.00 frames. ], tot_loss[loss=0.301, simple_loss=0.3349, pruned_loss=0.09723, ctc_loss=0.1815, over 3847148.22 frames. ], batch size: 46, lr: 3.62e-02, grad_scale: 32.0 +2024-08-25 10:09:11,825 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.82 vs. limit=6.0 +2024-08-25 10:09:17,188 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.42 vs. limit=15.0 +2024-08-25 10:09:26,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=35728.0, ans=0.125 +2024-08-25 10:09:52,821 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.458e+02 1.835e+02 2.022e+02 2.484e+02 3.793e+02, threshold=4.043e+02, percent-clipped=0.0 +2024-08-25 10:10:02,205 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=3.70 vs. limit=15.0 +2024-08-25 10:10:09,475 INFO [train.py:1114] (3/4) Epoch 3, batch 1750, loss[loss=0.2681, simple_loss=0.3015, pruned_loss=0.08571, ctc_loss=0.1581, over 19625.00 frames. ], tot_loss[loss=0.3003, simple_loss=0.3346, pruned_loss=0.09681, ctc_loss=0.1807, over 3852016.43 frames. ], batch size: 45, lr: 3.62e-02, grad_scale: 32.0 +2024-08-25 10:10:19,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=35941.333333333336, ans=0.125 +2024-08-25 10:11:09,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=36101.333333333336, ans=0.2 +2024-08-25 10:11:14,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=36101.333333333336, ans=0.125 +2024-08-25 10:11:20,665 INFO [train.py:1114] (3/4) Epoch 3, batch 1800, loss[loss=0.3033, simple_loss=0.3455, pruned_loss=0.09477, ctc_loss=0.1789, over 19605.00 frames. ], tot_loss[loss=0.301, simple_loss=0.335, pruned_loss=0.09718, ctc_loss=0.1815, over 3853205.65 frames. ], batch size: 55, lr: 3.61e-02, grad_scale: 32.0 +2024-08-25 10:11:52,946 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.758e+02 2.042e+02 2.396e+02 4.902e+02, threshold=4.083e+02, percent-clipped=1.0 +2024-08-25 10:11:53,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=36314.666666666664, ans=10.0 +2024-08-25 10:11:58,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=36314.666666666664, ans=0.125 +2024-08-25 10:11:59,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=36314.666666666664, ans=0.125 +2024-08-25 10:12:33,948 INFO [train.py:1114] (3/4) Epoch 3, batch 1850, loss[loss=0.2931, simple_loss=0.3344, pruned_loss=0.08979, ctc_loss=0.1806, over 19592.00 frames. ], tot_loss[loss=0.2995, simple_loss=0.3338, pruned_loss=0.09655, ctc_loss=0.1803, over 3857174.83 frames. ], batch size: 57, lr: 3.61e-02, grad_scale: 32.0 +2024-08-25 10:12:43,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=36474.666666666664, ans=0.2 +2024-08-25 10:12:52,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36474.666666666664, ans=0.1 +2024-08-25 10:12:52,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=36474.666666666664, ans=0.0 +2024-08-25 10:12:57,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=36528.0, ans=0.015 +2024-08-25 10:13:09,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=36528.0, ans=0.125 +2024-08-25 10:13:31,586 INFO [train.py:1114] (3/4) Epoch 3, batch 1900, loss[loss=0.3141, simple_loss=0.3507, pruned_loss=0.1018, ctc_loss=0.1845, over 19621.00 frames. ], tot_loss[loss=0.2994, simple_loss=0.3341, pruned_loss=0.09634, ctc_loss=0.1801, over 3861414.14 frames. ], batch size: 59, lr: 3.60e-02, grad_scale: 32.0 +2024-08-25 10:13:49,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=36688.0, ans=0.1 +2024-08-25 10:13:52,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=36688.0, ans=0.125 +2024-08-25 10:14:13,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff2.min_abs, batch_count=36741.333333333336, ans=0.1 +2024-08-25 10:14:16,878 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.69 vs. limit=12.0 +2024-08-25 10:14:24,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=36794.666666666664, ans=0.125 +2024-08-25 10:14:24,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=36794.666666666664, ans=0.125 +2024-08-25 10:14:29,198 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.301e+02 1.725e+02 1.920e+02 2.285e+02 4.448e+02, threshold=3.841e+02, percent-clipped=1.0 +2024-08-25 10:14:35,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=36848.0, ans=0.125 +2024-08-25 10:14:54,777 INFO [train.py:1114] (3/4) Epoch 3, batch 1950, loss[loss=0.27, simple_loss=0.3135, pruned_loss=0.08046, ctc_loss=0.1641, over 19586.00 frames. ], tot_loss[loss=0.3004, simple_loss=0.3355, pruned_loss=0.09658, ctc_loss=0.1804, over 3869991.40 frames. ], batch size: 52, lr: 3.59e-02, grad_scale: 32.0 +2024-08-25 10:14:54,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=36954.666666666664, ans=0.0 +2024-08-25 10:15:12,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=37008.0, ans=0.125 +2024-08-25 10:15:16,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=37008.0, ans=0.125 +2024-08-25 10:15:21,423 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.55 vs. limit=15.0 +2024-08-25 10:15:23,112 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.51 vs. limit=22.5 +2024-08-25 10:15:51,822 INFO [train.py:1114] (3/4) Epoch 3, batch 2000, loss[loss=0.2461, simple_loss=0.2835, pruned_loss=0.07648, ctc_loss=0.1395, over 19631.00 frames. ], tot_loss[loss=0.3018, simple_loss=0.3362, pruned_loss=0.09731, ctc_loss=0.1816, over 3854736.13 frames. ], batch size: 45, lr: 3.59e-02, grad_scale: 32.0 +2024-08-25 10:15:59,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=37221.333333333336, ans=0.015 +2024-08-25 10:16:00,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37274.666666666664, ans=0.1 +2024-08-25 10:16:04,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37274.666666666664, ans=0.1 +2024-08-25 10:16:19,095 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.521e+02 1.904e+02 2.146e+02 2.566e+02 5.347e+02, threshold=4.293e+02, percent-clipped=2.0 +2024-08-25 10:16:19,601 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.04 vs. limit=15.0 +2024-08-25 10:16:20,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=37381.333333333336, ans=0.125 +2024-08-25 10:16:34,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=37434.666666666664, ans=0.2 +2024-08-25 10:16:44,026 INFO [train.py:1114] (3/4) Epoch 3, batch 2050, loss[loss=0.2665, simple_loss=0.2997, pruned_loss=0.08476, ctc_loss=0.1597, over 19692.00 frames. ], tot_loss[loss=0.3001, simple_loss=0.3345, pruned_loss=0.09675, ctc_loss=0.1806, over 3850078.92 frames. ], batch size: 47, lr: 3.58e-02, grad_scale: 32.0 +2024-08-25 10:16:45,258 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.67 vs. limit=15.0 +2024-08-25 10:17:00,573 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.05 vs. limit=15.0 +2024-08-25 10:17:11,751 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.61 vs. limit=15.0 +2024-08-25 10:17:12,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=37541.333333333336, ans=0.025 +2024-08-25 10:17:15,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=37541.333333333336, ans=0.0 +2024-08-25 10:17:24,805 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.02 vs. limit=12.0 +2024-08-25 10:17:27,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37648.0, ans=0.1 +2024-08-25 10:17:44,191 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:17:48,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=37701.333333333336, ans=0.5 +2024-08-25 10:17:50,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37701.333333333336, ans=0.1 +2024-08-25 10:17:56,064 INFO [train.py:1114] (3/4) Epoch 3, batch 2100, loss[loss=0.2973, simple_loss=0.3352, pruned_loss=0.09447, ctc_loss=0.1762, over 19758.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.3337, pruned_loss=0.096, ctc_loss=0.1792, over 3857945.32 frames. ], batch size: 54, lr: 3.58e-02, grad_scale: 16.0 +2024-08-25 10:18:18,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=37754.666666666664, ans=0.2 +2024-08-25 10:18:40,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=37808.0, ans=0.0 +2024-08-25 10:18:40,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.92 vs. limit=15.0 +2024-08-25 10:18:51,423 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.40 vs. limit=22.5 +2024-08-25 10:19:06,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=37861.333333333336, ans=0.125 +2024-08-25 10:19:13,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=37914.666666666664, ans=0.125 +2024-08-25 10:19:20,762 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.776e+02 1.971e+02 2.246e+02 3.814e+02, threshold=3.941e+02, percent-clipped=0.0 +2024-08-25 10:19:39,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=37914.666666666664, ans=0.2 +2024-08-25 10:19:47,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37968.0, ans=0.1 +2024-08-25 10:20:09,517 INFO [train.py:1114] (3/4) Epoch 3, batch 2150, loss[loss=0.2774, simple_loss=0.3173, pruned_loss=0.08601, ctc_loss=0.1638, over 19869.00 frames. ], tot_loss[loss=0.2978, simple_loss=0.333, pruned_loss=0.09564, ctc_loss=0.1783, over 3869275.29 frames. ], batch size: 52, lr: 3.57e-02, grad_scale: 16.0 +2024-08-25 10:20:30,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=38074.666666666664, ans=0.125 +2024-08-25 10:20:31,344 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.19 vs. limit=12.0 +2024-08-25 10:20:34,639 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.59 vs. limit=15.0 +2024-08-25 10:20:35,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=38074.666666666664, ans=0.025 +2024-08-25 10:20:38,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=38128.0, ans=0.125 +2024-08-25 10:20:42,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=38128.0, ans=0.0 +2024-08-25 10:20:52,078 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.66 vs. limit=12.0 +2024-08-25 10:21:02,529 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-08-25 10:21:03,245 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.19 vs. limit=15.0 +2024-08-25 10:21:11,136 INFO [train.py:1114] (3/4) Epoch 3, batch 2200, loss[loss=0.3129, simple_loss=0.3499, pruned_loss=0.09978, ctc_loss=0.1909, over 19596.00 frames. ], tot_loss[loss=0.2975, simple_loss=0.3329, pruned_loss=0.09544, ctc_loss=0.1779, over 3867911.36 frames. ], batch size: 57, lr: 3.56e-02, grad_scale: 16.0 +2024-08-25 10:21:45,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=38394.666666666664, ans=10.0 +2024-08-25 10:21:48,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=38394.666666666664, ans=0.2 +2024-08-25 10:21:56,467 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.750e+02 1.922e+02 2.212e+02 3.187e+02, threshold=3.844e+02, percent-clipped=0.0 +2024-08-25 10:22:12,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=38448.0, ans=0.002511304347826087 +2024-08-25 10:22:16,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=38501.333333333336, ans=6.0 +2024-08-25 10:22:28,991 INFO [train.py:1114] (3/4) Epoch 3, batch 2250, loss[loss=0.2763, simple_loss=0.3224, pruned_loss=0.08354, ctc_loss=0.1577, over 19614.00 frames. ], tot_loss[loss=0.2967, simple_loss=0.3323, pruned_loss=0.09503, ctc_loss=0.1774, over 3868294.23 frames. ], batch size: 55, lr: 3.56e-02, grad_scale: 16.0 +2024-08-25 10:22:31,647 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-25 10:22:34,593 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.08 vs. limit=15.0 +2024-08-25 10:22:45,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=38608.0, ans=0.125 +2024-08-25 10:22:53,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38661.333333333336, ans=0.1 +2024-08-25 10:23:17,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=38714.666666666664, ans=0.125 +2024-08-25 10:23:40,909 INFO [train.py:1114] (3/4) Epoch 3, batch 2300, loss[loss=0.2485, simple_loss=0.2956, pruned_loss=0.07312, ctc_loss=0.138, over 19512.00 frames. ], tot_loss[loss=0.2959, simple_loss=0.3313, pruned_loss=0.09482, ctc_loss=0.1773, over 3860984.35 frames. ], batch size: 49, lr: 3.55e-02, grad_scale: 16.0 +2024-08-25 10:23:41,349 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.35 vs. limit=15.0 +2024-08-25 10:23:51,371 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.46 vs. limit=22.5 +2024-08-25 10:24:13,769 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.419e+02 1.820e+02 2.030e+02 2.354e+02 3.970e+02, threshold=4.059e+02, percent-clipped=1.0 +2024-08-25 10:24:14,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=38981.333333333336, ans=0.125 +2024-08-25 10:24:45,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=39034.666666666664, ans=0.025 +2024-08-25 10:24:48,807 INFO [train.py:1114] (3/4) Epoch 3, batch 2350, loss[loss=0.3087, simple_loss=0.3467, pruned_loss=0.09959, ctc_loss=0.1789, over 19691.00 frames. ], tot_loss[loss=0.2963, simple_loss=0.3314, pruned_loss=0.0951, ctc_loss=0.1776, over 3863350.47 frames. ], batch size: 63, lr: 3.55e-02, grad_scale: 16.0 +2024-08-25 10:24:57,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=39088.0, ans=0.0 +2024-08-25 10:25:02,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=39088.0, ans=0.125 +2024-08-25 10:25:09,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=39141.333333333336, ans=0.125 +2024-08-25 10:25:10,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=39141.333333333336, ans=0.125 +2024-08-25 10:25:43,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=39301.333333333336, ans=0.0 +2024-08-25 10:25:45,126 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.69 vs. limit=22.5 +2024-08-25 10:25:52,380 INFO [train.py:1114] (3/4) Epoch 3, batch 2400, loss[loss=0.312, simple_loss=0.3503, pruned_loss=0.09974, ctc_loss=0.1856, over 19154.00 frames. ], tot_loss[loss=0.2988, simple_loss=0.3336, pruned_loss=0.09615, ctc_loss=0.1793, over 3857217.27 frames. ], batch size: 71, lr: 3.54e-02, grad_scale: 32.0 +2024-08-25 10:25:57,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=39354.666666666664, ans=0.125 +2024-08-25 10:26:04,024 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.07 vs. limit=15.0 +2024-08-25 10:26:11,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.01 vs. limit=22.5 +2024-08-25 10:26:41,529 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.413e+02 1.777e+02 2.047e+02 2.383e+02 4.291e+02, threshold=4.094e+02, percent-clipped=1.0 +2024-08-25 10:27:07,770 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.46 vs. limit=15.0 +2024-08-25 10:27:14,153 INFO [train.py:1114] (3/4) Epoch 3, batch 2450, loss[loss=0.3985, simple_loss=0.3828, pruned_loss=0.1516, ctc_loss=0.2773, over 13418.00 frames. ], tot_loss[loss=0.3086, simple_loss=0.3393, pruned_loss=0.1012, ctc_loss=0.1887, over 3731394.84 frames. ], batch size: 141, lr: 3.53e-02, grad_scale: 16.0 +2024-08-25 10:27:41,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=39728.0, ans=0.125 +2024-08-25 10:27:51,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=39781.333333333336, ans=0.2 +2024-08-25 10:27:54,289 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.49 vs. limit=15.0 +2024-08-25 10:39:24,826 INFO [train.py:1050] (3/4) Caught exception: [Rank 3] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=89707, OpType=ALLREDUCE, NumelIn=745, NumelOut=745, Timeout(ms)=600000) ran for 600002 milliseconds before timing out.. +2024-08-25 10:39:24,827 INFO [checkpoint.py:75] (3/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/bad-model-3.pt +2024-08-25 10:40:06,840 INFO [train.py:1413] (3/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/batch-41f60be0-7cef-6aa3-6aed-cf4a4599a084.pt +2024-08-25 10:40:07,355 INFO [train.py:1419] (3/4) features shape: torch.Size([48, 1633, 80]) +2024-08-25 10:40:07,357 INFO [train.py:1423] (3/4) num tokens: 3855 diff --git a/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-26-14-14-02-0 b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-26-14-14-02-0 new file mode 100644 index 0000000000000000000000000000000000000000..47586076d51c86f9373837853e27d8910b9269b2 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-26-14-14-02-0 @@ -0,0 +1,5503 @@ +2024-08-26 14:14:06,477 INFO [train.py:1182] (0/4) Training started +2024-08-26 14:14:12,389 INFO [train.py:1192] (0/4) Device: cuda:0 +2024-08-26 14:14:12,392 INFO [train.py:1210] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2652.int.cedar.computecanada.ca', 'IP address': '172.16.146.89'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 4, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-26 14:14:12,392 INFO [train.py:1212] (0/4) About to create model +2024-08-26 14:14:13,058 INFO [train.py:1216] (0/4) Number of model parameters: 65805511 +2024-08-26 14:14:13,604 INFO [checkpoint.py:112] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-3.pt +2024-08-26 14:14:19,856 INFO [checkpoint.py:131] (0/4) Loading averaged model +2024-08-26 14:14:20,225 INFO [train.py:1231] (0/4) Using DDP +2024-08-26 14:14:24,078 INFO [train.py:1243] (0/4) Loading optimizer state dict +2024-08-26 14:14:25,323 INFO [train.py:1251] (0/4) Loading scheduler state dict +2024-08-26 14:14:25,324 INFO [asr_datamodule.py:894] (0/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-26 14:14:28,346 INFO [asr_datamodule.py:696] (0/4) Disable MUSAN +2024-08-26 14:14:28,346 INFO [asr_datamodule.py:714] (0/4) Enable SpecAugment +2024-08-26 14:14:28,346 INFO [asr_datamodule.py:715] (0/4) Time warp factor: 80 +2024-08-26 14:14:28,346 INFO [asr_datamodule.py:725] (0/4) Num frame mask: 10 +2024-08-26 14:14:28,347 INFO [asr_datamodule.py:738] (0/4) About to create train dataset +2024-08-26 14:14:28,347 INFO [asr_datamodule.py:765] (0/4) Using DynamicBucketingSampler. +2024-08-26 14:14:29,882 INFO [asr_datamodule.py:782] (0/4) About to create train dataloader +2024-08-26 14:14:29,883 INFO [asr_datamodule.py:911] (0/4) About to get dev-clean cuts +2024-08-26 14:14:32,051 INFO [asr_datamodule.py:918] (0/4) About to get dev-other cuts +2024-08-26 14:14:32,052 INFO [asr_datamodule.py:814] (0/4) About to create dev dataset +2024-08-26 14:14:32,362 INFO [asr_datamodule.py:831] (0/4) About to create dev dataloader +2024-08-26 14:14:32,362 INFO [train.py:1435] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-26 14:18:38,883 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=3.13 vs. limit=7.5 +2024-08-26 14:18:40,628 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12155MB +2024-08-26 14:18:41,877 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12155MB +2024-08-26 14:18:49,649 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12209MB +2024-08-26 14:18:50,842 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12220MB +2024-08-26 14:19:04,869 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12220MB +2024-08-26 14:19:05,859 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.61 vs. limit=7.5 +2024-08-26 14:19:06,158 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12220MB +2024-08-26 14:19:06,175 INFO [train.py:1344] (0/4) Loading grad scaler state dict +2024-08-26 14:19:49,468 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.97 vs. limit=15.0 +2024-08-26 14:19:52,355 INFO [train.py:1114] (0/4) Epoch 4, batch 0, loss[loss=0.2792, simple_loss=0.3179, pruned_loss=0.08588, ctc_loss=0.1717, over 19412.00 frames. ], tot_loss[loss=0.2792, simple_loss=0.3179, pruned_loss=0.08588, ctc_loss=0.1717, over 19412.00 frames. ], batch size: 48, lr: 3.30e-02, grad_scale: 32.0 +2024-08-26 14:19:52,356 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-26 14:20:25,790 INFO [train.py:1146] (0/4) Epoch 4, validation: loss=0.2421, simple_loss=0.3218, pruned_loss=0.05945, ctc_loss=0.1086, over 944034.00 frames. +2024-08-26 14:20:25,791 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 12220MB +2024-08-26 14:20:28,128 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=8.49 vs. limit=15.0 +2024-08-26 14:21:40,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=39936.0, ans=0.125 +2024-08-26 14:22:00,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=39936.0, ans=0.0021878260869565213 +2024-08-26 14:22:42,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=39989.333333333336, ans=0.00217623188405797 +2024-08-26 14:23:04,536 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.569e+02 1.845e+02 2.126e+02 2.642e+02 4.004e+02, threshold=4.252e+02, percent-clipped=0.0 +2024-08-26 14:23:26,402 INFO [train.py:1114] (0/4) Epoch 4, batch 50, loss[loss=0.2685, simple_loss=0.3056, pruned_loss=0.08346, ctc_loss=0.1611, over 19690.00 frames. ], tot_loss[loss=0.3033, simple_loss=0.3367, pruned_loss=0.09798, ctc_loss=0.1847, over 844645.13 frames. ], batch size: 47, lr: 3.30e-02, grad_scale: 32.0 +2024-08-26 14:23:55,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=40149.333333333336, ans=0.0 +2024-08-26 14:24:01,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=40149.333333333336, ans=0.125 +2024-08-26 14:24:35,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=40202.666666666664, ans=0.125 +2024-08-26 14:24:41,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=40202.666666666664, ans=0.125 +2024-08-26 14:24:54,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=40256.0, ans=0.125 +2024-08-26 14:25:08,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=40256.0, ans=0.125 +2024-08-26 14:25:33,125 INFO [train.py:1114] (0/4) Epoch 4, batch 100, loss[loss=0.2924, simple_loss=0.3329, pruned_loss=0.09114, ctc_loss=0.1744, over 19708.00 frames. ], tot_loss[loss=0.3016, simple_loss=0.3365, pruned_loss=0.09699, ctc_loss=0.1818, over 1498983.17 frames. ], batch size: 51, lr: 3.29e-02, grad_scale: 32.0 +2024-08-26 14:25:36,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=40362.666666666664, ans=0.125 +2024-08-26 14:26:02,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=40416.0, ans=0.0 +2024-08-26 14:26:13,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff2.min_abs, batch_count=40469.333333333336, ans=0.1 +2024-08-26 14:26:34,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=40522.666666666664, ans=0.0 +2024-08-26 14:26:40,725 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.372e+02 1.662e+02 1.906e+02 2.226e+02 3.245e+02, threshold=3.812e+02, percent-clipped=0.0 +2024-08-26 14:26:48,534 INFO [train.py:1114] (0/4) Epoch 4, batch 150, loss[loss=0.2788, simple_loss=0.3065, pruned_loss=0.09234, ctc_loss=0.1661, over 19733.00 frames. ], tot_loss[loss=0.2955, simple_loss=0.3319, pruned_loss=0.0942, ctc_loss=0.1766, over 2028351.17 frames. ], batch size: 47, lr: 3.28e-02, grad_scale: 32.0 +2024-08-26 14:26:49,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=40629.333333333336, ans=10.0 +2024-08-26 14:27:32,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=40736.0, ans=0.07 +2024-08-26 14:27:38,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=40736.0, ans=0.00201391304347826 +2024-08-26 14:27:54,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=40842.666666666664, ans=0.035 +2024-08-26 14:28:04,923 INFO [train.py:1114] (0/4) Epoch 4, batch 200, loss[loss=0.3166, simple_loss=0.3503, pruned_loss=0.1027, ctc_loss=0.1937, over 18390.00 frames. ], tot_loss[loss=0.2934, simple_loss=0.3305, pruned_loss=0.09321, ctc_loss=0.1749, over 2436007.39 frames. ], batch size: 85, lr: 3.28e-02, grad_scale: 32.0 +2024-08-26 14:28:06,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=40896.0, ans=0.0 +2024-08-26 14:28:07,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=40896.0, ans=0.025 +2024-08-26 14:28:21,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=40949.333333333336, ans=0.1 +2024-08-26 14:28:28,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=41002.666666666664, ans=0.0 +2024-08-26 14:28:30,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=41002.666666666664, ans=0.2 +2024-08-26 14:28:37,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=41056.0, ans=0.1 +2024-08-26 14:28:49,777 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.824e+02 2.102e+02 2.533e+02 3.992e+02, threshold=4.203e+02, percent-clipped=3.0 +2024-08-26 14:28:55,760 INFO [train.py:1114] (0/4) Epoch 4, batch 250, loss[loss=0.3065, simple_loss=0.3533, pruned_loss=0.09436, ctc_loss=0.1775, over 19399.00 frames. ], tot_loss[loss=0.2913, simple_loss=0.3295, pruned_loss=0.09203, ctc_loss=0.1724, over 2755963.59 frames. ], batch size: 67, lr: 3.27e-02, grad_scale: 32.0 +2024-08-26 14:29:19,666 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.79 vs. limit=8.0 +2024-08-26 14:29:36,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=41376.0, ans=0.125 +2024-08-26 14:29:42,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=41376.0, ans=0.125 +2024-08-26 14:29:44,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=41376.0, ans=0.1 +2024-08-26 14:29:46,774 INFO [train.py:1114] (0/4) Epoch 4, batch 300, loss[loss=0.2956, simple_loss=0.3424, pruned_loss=0.09124, ctc_loss=0.1656, over 19505.00 frames. ], tot_loss[loss=0.29, simple_loss=0.3288, pruned_loss=0.0914, ctc_loss=0.1713, over 3001044.60 frames. ], batch size: 61, lr: 3.27e-02, grad_scale: 32.0 +2024-08-26 14:30:07,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=41536.0, ans=0.1 +2024-08-26 14:30:21,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=41589.333333333336, ans=0.001828405797101449 +2024-08-26 14:30:31,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=41642.666666666664, ans=0.125 +2024-08-26 14:30:32,084 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 1.674e+02 1.880e+02 2.161e+02 3.950e+02, threshold=3.761e+02, percent-clipped=0.0 +2024-08-26 14:30:32,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=41642.666666666664, ans=0.2 +2024-08-26 14:30:37,847 INFO [train.py:1114] (0/4) Epoch 4, batch 350, loss[loss=0.2861, simple_loss=0.3132, pruned_loss=0.09452, ctc_loss=0.1748, over 19766.00 frames. ], tot_loss[loss=0.2908, simple_loss=0.3293, pruned_loss=0.09185, ctc_loss=0.1717, over 3191455.99 frames. ], batch size: 48, lr: 3.26e-02, grad_scale: 32.0 +2024-08-26 14:30:49,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=41749.333333333336, ans=0.0 +2024-08-26 14:31:12,052 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.44 vs. limit=15.0 +2024-08-26 14:31:35,382 INFO [train.py:1114] (0/4) Epoch 4, batch 400, loss[loss=0.2758, simple_loss=0.3259, pruned_loss=0.0821, ctc_loss=0.154, over 19502.00 frames. ], tot_loss[loss=0.2889, simple_loss=0.328, pruned_loss=0.09098, ctc_loss=0.1698, over 3343576.12 frames. ], batch size: 54, lr: 3.26e-02, grad_scale: 32.0 +2024-08-26 14:32:04,286 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.42 vs. limit=10.0 +2024-08-26 14:32:09,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=42122.666666666664, ans=0.0017124637681159433 +2024-08-26 14:32:19,256 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.438e+02 1.828e+02 2.157e+02 2.598e+02 8.551e+02, threshold=4.314e+02, percent-clipped=2.0 +2024-08-26 14:32:23,166 INFO [train.py:1114] (0/4) Epoch 4, batch 450, loss[loss=0.2808, simple_loss=0.3321, pruned_loss=0.08319, ctc_loss=0.1575, over 19616.00 frames. ], tot_loss[loss=0.2892, simple_loss=0.328, pruned_loss=0.09122, ctc_loss=0.17, over 3452205.27 frames. ], batch size: 55, lr: 3.25e-02, grad_scale: 8.0 +2024-08-26 14:32:33,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=42229.333333333336, ans=0.1 +2024-08-26 14:32:37,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=42282.666666666664, ans=0.2 +2024-08-26 14:32:39,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=42282.666666666664, ans=0.125 +2024-08-26 14:32:50,410 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:32:50,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=42336.0, ans=0.125 +2024-08-26 14:32:56,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=42389.333333333336, ans=0.1 +2024-08-26 14:32:57,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=42389.333333333336, ans=0.125 +2024-08-26 14:33:01,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=42389.333333333336, ans=0.2 +2024-08-26 14:33:03,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=42442.666666666664, ans=0.5 +2024-08-26 14:33:14,189 INFO [train.py:1114] (0/4) Epoch 4, batch 500, loss[loss=0.2618, simple_loss=0.3148, pruned_loss=0.07652, ctc_loss=0.1393, over 19707.00 frames. ], tot_loss[loss=0.2865, simple_loss=0.3261, pruned_loss=0.08987, ctc_loss=0.1679, over 3547717.56 frames. ], batch size: 63, lr: 3.25e-02, grad_scale: 8.0 +2024-08-26 14:33:14,747 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.98 vs. limit=6.0 +2024-08-26 14:33:50,048 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/checkpoint-8000.pt +2024-08-26 14:33:56,932 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.25 vs. limit=22.5 +2024-08-26 14:34:05,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=42709.333333333336, ans=0.125 +2024-08-26 14:34:07,927 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.277e+02 1.676e+02 1.857e+02 2.171e+02 5.331e+02, threshold=3.714e+02, percent-clipped=2.0 +2024-08-26 14:34:11,747 INFO [train.py:1114] (0/4) Epoch 4, batch 550, loss[loss=0.3004, simple_loss=0.338, pruned_loss=0.09611, ctc_loss=0.1764, over 19276.00 frames. ], tot_loss[loss=0.2878, simple_loss=0.3271, pruned_loss=0.09048, ctc_loss=0.1688, over 3609344.55 frames. ], batch size: 71, lr: 3.24e-02, grad_scale: 8.0 +2024-08-26 14:34:17,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=42762.666666666664, ans=0.125 +2024-08-26 14:34:18,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=42762.666666666664, ans=0.05 +2024-08-26 14:34:20,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=42816.0, ans=0.0 +2024-08-26 14:34:35,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=42869.333333333336, ans=0.0 +2024-08-26 14:34:45,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=42922.666666666664, ans=0.125 +2024-08-26 14:34:47,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=42922.666666666664, ans=0.125 +2024-08-26 14:35:00,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=42976.0, ans=0.125 +2024-08-26 14:35:03,326 INFO [train.py:1114] (0/4) Epoch 4, batch 600, loss[loss=0.2845, simple_loss=0.3268, pruned_loss=0.08814, ctc_loss=0.1647, over 19390.00 frames. ], tot_loss[loss=0.2867, simple_loss=0.3264, pruned_loss=0.08992, ctc_loss=0.1679, over 3666017.55 frames. ], batch size: 67, lr: 3.24e-02, grad_scale: 8.0 +2024-08-26 14:35:03,924 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.35 vs. limit=22.5 +2024-08-26 14:35:22,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=43136.0, ans=0.0 +2024-08-26 14:35:40,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=43189.333333333336, ans=0.125 +2024-08-26 14:35:50,402 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.428e+02 1.699e+02 1.953e+02 2.270e+02 5.390e+02, threshold=3.906e+02, percent-clipped=1.0 +2024-08-26 14:35:52,095 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.75 vs. limit=15.0 +2024-08-26 14:35:54,189 INFO [train.py:1114] (0/4) Epoch 4, batch 650, loss[loss=0.286, simple_loss=0.3301, pruned_loss=0.08782, ctc_loss=0.1657, over 19764.00 frames. ], tot_loss[loss=0.2838, simple_loss=0.3242, pruned_loss=0.08863, ctc_loss=0.1654, over 3716027.46 frames. ], batch size: 54, lr: 3.23e-02, grad_scale: 8.0 +2024-08-26 14:36:24,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=43402.666666666664, ans=0.125 +2024-08-26 14:36:48,327 INFO [train.py:1114] (0/4) Epoch 4, batch 700, loss[loss=0.256, simple_loss=0.3017, pruned_loss=0.07619, ctc_loss=0.1449, over 19712.00 frames. ], tot_loss[loss=0.285, simple_loss=0.3253, pruned_loss=0.08911, ctc_loss=0.1664, over 3748433.07 frames. ], batch size: 51, lr: 3.22e-02, grad_scale: 8.0 +2024-08-26 14:36:56,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=43562.666666666664, ans=0.0 +2024-08-26 14:37:14,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=43669.333333333336, ans=0.125 +2024-08-26 14:37:15,014 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.26 vs. limit=22.5 +2024-08-26 14:37:29,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=43776.0, ans=0.0 +2024-08-26 14:37:33,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=43776.0, ans=0.00135304347826087 +2024-08-26 14:37:33,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=43776.0, ans=0.0 +2024-08-26 14:37:36,044 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 1.717e+02 1.974e+02 2.287e+02 3.794e+02, threshold=3.948e+02, percent-clipped=0.0 +2024-08-26 14:37:39,952 INFO [train.py:1114] (0/4) Epoch 4, batch 750, loss[loss=0.3118, simple_loss=0.3556, pruned_loss=0.09827, ctc_loss=0.1785, over 19497.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.3254, pruned_loss=0.08912, ctc_loss=0.1663, over 3775360.00 frames. ], batch size: 54, lr: 3.22e-02, grad_scale: 8.0 +2024-08-26 14:37:47,426 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.59 vs. limit=22.5 +2024-08-26 14:37:49,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43882.666666666664, ans=0.1 +2024-08-26 14:37:55,121 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.13 vs. limit=15.0 +2024-08-26 14:37:58,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=43936.0, ans=0.0 +2024-08-26 14:38:09,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=43989.333333333336, ans=0.5 +2024-08-26 14:38:10,061 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.11 vs. limit=15.0 +2024-08-26 14:38:22,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=44042.666666666664, ans=0.025 +2024-08-26 14:38:31,812 INFO [train.py:1114] (0/4) Epoch 4, batch 800, loss[loss=0.2788, simple_loss=0.313, pruned_loss=0.08899, ctc_loss=0.1665, over 19439.00 frames. ], tot_loss[loss=0.2849, simple_loss=0.3252, pruned_loss=0.08907, ctc_loss=0.1662, over 3795552.55 frames. ], batch size: 48, lr: 3.21e-02, grad_scale: 16.0 +2024-08-26 14:38:46,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=44149.333333333336, ans=0.1 +2024-08-26 14:39:05,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=44256.0, ans=0.015 +2024-08-26 14:39:13,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=44309.333333333336, ans=0.0 +2024-08-26 14:39:14,483 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:39:16,253 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.371e+02 1.706e+02 1.876e+02 2.197e+02 5.470e+02, threshold=3.751e+02, percent-clipped=2.0 +2024-08-26 14:39:22,934 INFO [train.py:1114] (0/4) Epoch 4, batch 850, loss[loss=0.3378, simple_loss=0.368, pruned_loss=0.112, ctc_loss=0.209, over 19643.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3245, pruned_loss=0.08858, ctc_loss=0.1652, over 3815313.62 frames. ], batch size: 59, lr: 3.21e-02, grad_scale: 16.0 +2024-08-26 14:39:34,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=44416.0, ans=0.125 +2024-08-26 14:39:45,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=44469.333333333336, ans=0.125 +2024-08-26 14:39:50,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=44469.333333333336, ans=0.1 +2024-08-26 14:39:52,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=44522.666666666664, ans=0.125 +2024-08-26 14:39:52,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=44522.666666666664, ans=0.125 +2024-08-26 14:40:00,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=44576.0, ans=0.2 +2024-08-26 14:40:01,541 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.95 vs. limit=5.0 +2024-08-26 14:40:02,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=44576.0, ans=0.125 +2024-08-26 14:40:11,461 INFO [train.py:1114] (0/4) Epoch 4, batch 900, loss[loss=0.237, simple_loss=0.2886, pruned_loss=0.06622, ctc_loss=0.1323, over 19384.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3253, pruned_loss=0.08933, ctc_loss=0.1665, over 3818849.62 frames. ], batch size: 48, lr: 3.20e-02, grad_scale: 16.0 +2024-08-26 14:40:14,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=44629.333333333336, ans=0.125 +2024-08-26 14:40:27,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=44682.666666666664, ans=0.2 +2024-08-26 14:40:28,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=44682.666666666664, ans=0.025 +2024-08-26 14:40:45,393 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.25 vs. limit=15.0 +2024-08-26 14:40:59,413 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.434e+02 1.686e+02 1.871e+02 2.157e+02 4.639e+02, threshold=3.742e+02, percent-clipped=1.0 +2024-08-26 14:41:03,411 INFO [train.py:1114] (0/4) Epoch 4, batch 950, loss[loss=0.2412, simple_loss=0.2912, pruned_loss=0.06964, ctc_loss=0.1299, over 19495.00 frames. ], tot_loss[loss=0.2859, simple_loss=0.3255, pruned_loss=0.0897, ctc_loss=0.1673, over 3820486.04 frames. ], batch size: 49, lr: 3.20e-02, grad_scale: 16.0 +2024-08-26 14:41:04,160 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.01 vs. limit=6.0 +2024-08-26 14:41:34,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=45056.0, ans=0.125 +2024-08-26 14:41:34,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=45056.0, ans=0.04949747468305833 +2024-08-26 14:41:52,378 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.99 vs. limit=22.5 +2024-08-26 14:41:53,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=45109.333333333336, ans=0.125 +2024-08-26 14:41:53,245 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.46 vs. limit=15.0 +2024-08-26 14:41:54,738 INFO [train.py:1114] (0/4) Epoch 4, batch 1000, loss[loss=0.2648, simple_loss=0.3047, pruned_loss=0.0824, ctc_loss=0.1502, over 19848.00 frames. ], tot_loss[loss=0.2873, simple_loss=0.3264, pruned_loss=0.09037, ctc_loss=0.1684, over 3816355.45 frames. ], batch size: 52, lr: 3.19e-02, grad_scale: 16.0 +2024-08-26 14:42:36,342 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.81 vs. limit=22.5 +2024-08-26 14:42:42,501 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.702e+02 1.844e+02 2.187e+02 3.225e+02, threshold=3.689e+02, percent-clipped=0.0 +2024-08-26 14:42:44,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=45376.0, ans=0.1 +2024-08-26 14:42:46,508 INFO [train.py:1114] (0/4) Epoch 4, batch 1050, loss[loss=0.3346, simple_loss=0.3647, pruned_loss=0.1113, ctc_loss=0.2048, over 19838.00 frames. ], tot_loss[loss=0.2858, simple_loss=0.3253, pruned_loss=0.08963, ctc_loss=0.1673, over 3823183.60 frames. ], batch size: 57, lr: 3.19e-02, grad_scale: 16.0 +2024-08-26 14:43:20,866 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.23 vs. limit=15.0 +2024-08-26 14:43:26,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=45589.333333333336, ans=0.125 +2024-08-26 14:43:27,600 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.28 vs. limit=15.0 +2024-08-26 14:43:35,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=45642.666666666664, ans=0.125 +2024-08-26 14:43:38,151 INFO [train.py:1114] (0/4) Epoch 4, batch 1100, loss[loss=0.2623, simple_loss=0.3118, pruned_loss=0.07708, ctc_loss=0.1464, over 19598.00 frames. ], tot_loss[loss=0.2848, simple_loss=0.3248, pruned_loss=0.08912, ctc_loss=0.1662, over 3830731.93 frames. ], batch size: 52, lr: 3.18e-02, grad_scale: 16.0 +2024-08-26 14:43:39,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=45696.0, ans=0.125 +2024-08-26 14:43:45,564 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.95 vs. limit=15.0 +2024-08-26 14:43:48,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=45749.333333333336, ans=0.09899494936611666 +2024-08-26 14:43:51,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45749.333333333336, ans=0.1 +2024-08-26 14:43:57,121 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.06 vs. limit=15.0 +2024-08-26 14:44:21,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=45909.333333333336, ans=0.125 +2024-08-26 14:44:25,688 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 1.748e+02 1.997e+02 2.350e+02 6.199e+02, threshold=3.995e+02, percent-clipped=5.0 +2024-08-26 14:44:29,534 INFO [train.py:1114] (0/4) Epoch 4, batch 1150, loss[loss=0.266, simple_loss=0.3089, pruned_loss=0.08215, ctc_loss=0.1472, over 19591.00 frames. ], tot_loss[loss=0.2843, simple_loss=0.3243, pruned_loss=0.08894, ctc_loss=0.1661, over 3829434.88 frames. ], batch size: 52, lr: 3.18e-02, grad_scale: 16.0 +2024-08-26 14:44:30,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=45962.666666666664, ans=0.125 +2024-08-26 14:44:35,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=45962.666666666664, ans=0.125 +2024-08-26 14:44:35,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=45962.666666666664, ans=0.1 +2024-08-26 14:46:03,380 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.84 vs. limit=15.0 +2024-08-26 14:46:04,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.whiten.whitening_limit, batch_count=46069.333333333336, ans=12.0 +2024-08-26 14:46:31,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=46176.0, ans=0.1 +2024-08-26 14:46:38,928 INFO [train.py:1114] (0/4) Epoch 4, batch 1200, loss[loss=0.2901, simple_loss=0.3282, pruned_loss=0.09065, ctc_loss=0.1766, over 19839.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.3252, pruned_loss=0.08942, ctc_loss=0.1671, over 3825559.34 frames. ], batch size: 57, lr: 3.17e-02, grad_scale: 32.0 +2024-08-26 14:46:44,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=46229.333333333336, ans=0.125 +2024-08-26 14:46:50,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=46282.666666666664, ans=0.0 +2024-08-26 14:47:00,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=46336.0, ans=0.1 +2024-08-26 14:47:04,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=46336.0, ans=0.0 +2024-08-26 14:47:06,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=46389.333333333336, ans=0.125 +2024-08-26 14:47:14,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=46389.333333333336, ans=0.0 +2024-08-26 14:47:14,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=46389.333333333336, ans=0.0 +2024-08-26 14:47:17,995 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.73 vs. limit=22.5 +2024-08-26 14:47:23,214 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.478e+02 1.767e+02 1.944e+02 2.283e+02 5.479e+02, threshold=3.889e+02, percent-clipped=1.0 +2024-08-26 14:47:29,952 INFO [train.py:1114] (0/4) Epoch 4, batch 1250, loss[loss=0.3091, simple_loss=0.3433, pruned_loss=0.1008, ctc_loss=0.1832, over 19526.00 frames. ], tot_loss[loss=0.285, simple_loss=0.3253, pruned_loss=0.08908, ctc_loss=0.1663, over 3843759.16 frames. ], batch size: 61, lr: 3.17e-02, grad_scale: 32.0 +2024-08-26 14:47:30,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=46496.0, ans=0.125 +2024-08-26 14:47:43,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=46549.333333333336, ans=0.125 +2024-08-26 14:47:46,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=46549.333333333336, ans=0.125 +2024-08-26 14:47:51,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=46602.666666666664, ans=0.025 +2024-08-26 14:47:51,777 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:47:59,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46656.0, ans=0.1 +2024-08-26 14:48:21,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=46762.666666666664, ans=0.025 +2024-08-26 14:48:22,031 INFO [train.py:1114] (0/4) Epoch 4, batch 1300, loss[loss=0.3296, simple_loss=0.3619, pruned_loss=0.1088, ctc_loss=0.1988, over 19010.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3243, pruned_loss=0.08863, ctc_loss=0.1656, over 3847381.00 frames. ], batch size: 76, lr: 3.16e-02, grad_scale: 32.0 +2024-08-26 14:48:36,836 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.01 vs. limit=6.0 +2024-08-26 14:48:41,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=46869.333333333336, ans=0.0006805797101449261 +2024-08-26 14:49:06,437 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.331e+02 1.633e+02 1.793e+02 2.136e+02 4.035e+02, threshold=3.586e+02, percent-clipped=1.0 +2024-08-26 14:49:07,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=46976.0, ans=0.125 +2024-08-26 14:49:09,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=47029.333333333336, ans=0.1 +2024-08-26 14:49:10,195 INFO [train.py:1114] (0/4) Epoch 4, batch 1350, loss[loss=0.2731, simple_loss=0.3202, pruned_loss=0.08132, ctc_loss=0.1585, over 19764.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3238, pruned_loss=0.08808, ctc_loss=0.1646, over 3856211.67 frames. ], batch size: 54, lr: 3.16e-02, grad_scale: 32.0 +2024-08-26 14:49:22,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=47082.666666666664, ans=0.125 +2024-08-26 14:49:27,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=47082.666666666664, ans=0.125 +2024-08-26 14:49:28,315 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.93 vs. limit=22.5 +2024-08-26 14:49:33,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=47136.0, ans=0.1 +2024-08-26 14:49:59,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=47242.666666666664, ans=0.0 +2024-08-26 14:50:01,623 INFO [train.py:1114] (0/4) Epoch 4, batch 1400, loss[loss=0.2442, simple_loss=0.29, pruned_loss=0.07191, ctc_loss=0.1367, over 19662.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3238, pruned_loss=0.08804, ctc_loss=0.1648, over 3862713.70 frames. ], batch size: 46, lr: 3.15e-02, grad_scale: 32.0 +2024-08-26 14:50:23,744 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.91 vs. limit=15.0 +2024-08-26 14:50:31,967 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.15 vs. limit=22.5 +2024-08-26 14:50:42,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=47509.333333333336, ans=0.125 +2024-08-26 14:50:49,028 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.452e+02 1.701e+02 1.930e+02 2.137e+02 5.469e+02, threshold=3.859e+02, percent-clipped=2.0 +2024-08-26 14:50:53,069 INFO [train.py:1114] (0/4) Epoch 4, batch 1450, loss[loss=0.291, simple_loss=0.3313, pruned_loss=0.09212, ctc_loss=0.1662, over 19662.00 frames. ], tot_loss[loss=0.2843, simple_loss=0.325, pruned_loss=0.0886, ctc_loss=0.166, over 3860652.50 frames. ], batch size: 63, lr: 3.15e-02, grad_scale: 32.0 +2024-08-26 14:51:14,584 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.78 vs. limit=15.0 +2024-08-26 14:51:43,456 INFO [train.py:1114] (0/4) Epoch 4, batch 1500, loss[loss=0.3009, simple_loss=0.3447, pruned_loss=0.09391, ctc_loss=0.1733, over 19581.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3248, pruned_loss=0.08806, ctc_loss=0.165, over 3859580.36 frames. ], batch size: 57, lr: 3.14e-02, grad_scale: 32.0 +2024-08-26 14:51:46,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=47829.333333333336, ans=0.125 +2024-08-26 14:51:53,402 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:52:11,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=47936.0, ans=0.0004486956521739128 +2024-08-26 14:52:20,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=47989.333333333336, ans=0.025 +2024-08-26 14:52:34,707 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.344e+02 1.743e+02 1.956e+02 2.243e+02 3.928e+02, threshold=3.912e+02, percent-clipped=1.0 +2024-08-26 14:52:38,434 INFO [train.py:1114] (0/4) Epoch 4, batch 1550, loss[loss=0.3004, simple_loss=0.3406, pruned_loss=0.09646, ctc_loss=0.1679, over 19593.00 frames. ], tot_loss[loss=0.2843, simple_loss=0.3251, pruned_loss=0.08865, ctc_loss=0.1657, over 3846078.25 frames. ], batch size: 60, lr: 3.14e-02, grad_scale: 32.0 +2024-08-26 14:53:20,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=48309.333333333336, ans=0.125 +2024-08-26 14:53:23,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=48309.333333333336, ans=0.07 +2024-08-26 14:53:23,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=48309.333333333336, ans=0.125 +2024-08-26 14:53:24,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=48309.333333333336, ans=0.125 +2024-08-26 14:53:29,859 INFO [train.py:1114] (0/4) Epoch 4, batch 1600, loss[loss=0.2955, simple_loss=0.3369, pruned_loss=0.09166, ctc_loss=0.177, over 19838.00 frames. ], tot_loss[loss=0.2844, simple_loss=0.3248, pruned_loss=0.08879, ctc_loss=0.166, over 3836386.44 frames. ], batch size: 57, lr: 3.13e-02, grad_scale: 32.0 +2024-08-26 14:53:39,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=48416.0, ans=0.2 +2024-08-26 14:53:41,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=48416.0, ans=0.125 +2024-08-26 14:53:46,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=48416.0, ans=0.125 +2024-08-26 14:54:18,017 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.502e+02 1.701e+02 1.882e+02 2.341e+02 4.982e+02, threshold=3.764e+02, percent-clipped=3.0 +2024-08-26 14:54:21,809 INFO [train.py:1114] (0/4) Epoch 4, batch 1650, loss[loss=0.322, simple_loss=0.3514, pruned_loss=0.1053, ctc_loss=0.2048, over 19659.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.3245, pruned_loss=0.08876, ctc_loss=0.1657, over 3833121.08 frames. ], batch size: 59, lr: 3.13e-02, grad_scale: 32.0 +2024-08-26 14:54:24,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=48629.333333333336, ans=0.125 +2024-08-26 14:54:30,087 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.34 vs. limit=15.0 +2024-08-26 14:54:37,795 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.95 vs. limit=15.0 +2024-08-26 14:54:38,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=48682.666666666664, ans=0.1 +2024-08-26 14:54:42,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=48736.0, ans=0.2 +2024-08-26 14:55:17,031 INFO [train.py:1114] (0/4) Epoch 4, batch 1700, loss[loss=0.2436, simple_loss=0.2857, pruned_loss=0.07395, ctc_loss=0.1339, over 19658.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.3237, pruned_loss=0.08806, ctc_loss=0.1639, over 3847142.73 frames. ], batch size: 46, lr: 3.12e-02, grad_scale: 32.0 +2024-08-26 14:55:24,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=48896.0, ans=0.0002399999999999989 +2024-08-26 14:55:26,100 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=15.0 +2024-08-26 14:55:29,733 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.33 vs. limit=15.0 +2024-08-26 14:55:36,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=49002.666666666664, ans=0.2 +2024-08-26 14:55:40,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=49002.666666666664, ans=0.125 +2024-08-26 14:55:43,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=49002.666666666664, ans=0.1 +2024-08-26 14:55:45,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=49056.0, ans=0.2 +2024-08-26 14:55:46,181 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.63 vs. limit=15.0 +2024-08-26 14:55:53,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=49109.333333333336, ans=10.0 +2024-08-26 14:55:55,306 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.16 vs. limit=15.0 +2024-08-26 14:55:59,535 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.433e+02 1.770e+02 1.975e+02 2.193e+02 4.882e+02, threshold=3.950e+02, percent-clipped=1.0 +2024-08-26 14:55:59,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=49109.333333333336, ans=0.125 +2024-08-26 14:56:03,237 INFO [train.py:1114] (0/4) Epoch 4, batch 1750, loss[loss=0.2441, simple_loss=0.2883, pruned_loss=0.07304, ctc_loss=0.1347, over 19657.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.323, pruned_loss=0.08762, ctc_loss=0.1632, over 3852663.18 frames. ], batch size: 45, lr: 3.11e-02, grad_scale: 32.0 +2024-08-26 14:56:15,068 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.51 vs. limit=15.0 +2024-08-26 14:56:16,010 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.31 vs. limit=15.0 +2024-08-26 14:56:30,523 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.55 vs. limit=15.0 +2024-08-26 14:56:48,556 INFO [train.py:1114] (0/4) Epoch 4, batch 1800, loss[loss=0.2778, simple_loss=0.3307, pruned_loss=0.08192, ctc_loss=0.1526, over 19615.00 frames. ], tot_loss[loss=0.282, simple_loss=0.3231, pruned_loss=0.08775, ctc_loss=0.1633, over 3852526.50 frames. ], batch size: 55, lr: 3.11e-02, grad_scale: 32.0 +2024-08-26 14:56:56,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=49429.333333333336, ans=0.125 +2024-08-26 14:56:57,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=49482.666666666664, ans=0.125 +2024-08-26 14:57:02,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=49482.666666666664, ans=0.00011246376811594253 +2024-08-26 14:57:26,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=49642.666666666664, ans=0.0 +2024-08-26 14:57:30,226 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.411e+02 1.664e+02 1.898e+02 2.172e+02 3.982e+02, threshold=3.795e+02, percent-clipped=1.0 +2024-08-26 14:57:33,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=49696.0, ans=0.125 +2024-08-26 14:57:33,984 INFO [train.py:1114] (0/4) Epoch 4, batch 1850, loss[loss=0.2761, simple_loss=0.3279, pruned_loss=0.08135, ctc_loss=0.1541, over 19599.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3224, pruned_loss=0.08725, ctc_loss=0.1624, over 3856120.50 frames. ], batch size: 57, lr: 3.10e-02, grad_scale: 32.0 +2024-08-26 14:57:53,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.94 vs. limit=15.0 +2024-08-26 14:58:18,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=49909.333333333336, ans=0.125 +2024-08-26 14:58:20,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=49962.666666666664, ans=0.025 +2024-08-26 14:58:21,318 INFO [train.py:1114] (0/4) Epoch 4, batch 1900, loss[loss=0.2984, simple_loss=0.3424, pruned_loss=0.09349, ctc_loss=0.1687, over 19647.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3228, pruned_loss=0.08714, ctc_loss=0.1622, over 3860972.37 frames. ], batch size: 59, lr: 3.10e-02, grad_scale: 16.0 +2024-08-26 14:58:41,341 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.28 vs. limit=15.0 +2024-08-26 14:58:44,607 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:59:03,247 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.687e+02 1.820e+02 2.228e+02 3.741e+02, threshold=3.639e+02, percent-clipped=0.0 +2024-08-26 14:59:06,123 INFO [train.py:1114] (0/4) Epoch 4, batch 1950, loss[loss=0.282, simple_loss=0.3218, pruned_loss=0.08784, ctc_loss=0.1663, over 19588.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3236, pruned_loss=0.08709, ctc_loss=0.1622, over 3869830.13 frames. ], batch size: 52, lr: 3.09e-02, grad_scale: 16.0 +2024-08-26 14:59:15,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=50282.666666666664, ans=0.125 +2024-08-26 14:59:31,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=50336.0, ans=0.0 +2024-08-26 14:59:42,917 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.44 vs. limit=15.0 +2024-08-26 14:59:49,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=50442.666666666664, ans=0.125 +2024-08-26 14:59:53,436 INFO [train.py:1114] (0/4) Epoch 4, batch 2000, loss[loss=0.2675, simple_loss=0.2975, pruned_loss=0.08646, ctc_loss=0.1613, over 19654.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.3243, pruned_loss=0.08776, ctc_loss=0.1636, over 3855683.96 frames. ], batch size: 45, lr: 3.09e-02, grad_scale: 32.0 +2024-08-26 15:00:06,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=50549.333333333336, ans=0.025 +2024-08-26 15:00:30,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=50709.333333333336, ans=0.2 +2024-08-26 15:00:35,434 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.418e+02 1.722e+02 2.023e+02 2.377e+02 8.657e+02, threshold=4.047e+02, percent-clipped=4.0 +2024-08-26 15:00:37,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=50762.666666666664, ans=0.125 +2024-08-26 15:00:38,082 INFO [train.py:1114] (0/4) Epoch 4, batch 2050, loss[loss=0.2354, simple_loss=0.2763, pruned_loss=0.06957, ctc_loss=0.1383, over 19719.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3228, pruned_loss=0.08726, ctc_loss=0.1628, over 3851342.85 frames. ], batch size: 47, lr: 3.08e-02, grad_scale: 32.0 +2024-08-26 15:00:38,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=50762.666666666664, ans=0.025 +2024-08-26 15:00:41,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=50762.666666666664, ans=0.125 +2024-08-26 15:00:55,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=50869.333333333336, ans=0.05 +2024-08-26 15:00:59,892 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.38 vs. limit=15.0 +2024-08-26 15:01:03,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=50922.666666666664, ans=0.125 +2024-08-26 15:01:11,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=50922.666666666664, ans=10.0 +2024-08-26 15:01:18,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=50976.0, ans=0.1 +2024-08-26 15:01:21,118 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.68 vs. limit=15.0 +2024-08-26 15:01:22,461 INFO [train.py:1114] (0/4) Epoch 4, batch 2100, loss[loss=0.277, simple_loss=0.3218, pruned_loss=0.08491, ctc_loss=0.1561, over 19770.00 frames. ], tot_loss[loss=0.2792, simple_loss=0.3217, pruned_loss=0.08623, ctc_loss=0.1608, over 3857773.24 frames. ], batch size: 54, lr: 3.08e-02, grad_scale: 32.0 +2024-08-26 15:01:32,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51082.666666666664, ans=0.1 +2024-08-26 15:01:35,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=51082.666666666664, ans=0.0 +2024-08-26 15:01:36,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=51082.666666666664, ans=0.125 +2024-08-26 15:01:41,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=51136.0, ans=0.125 +2024-08-26 15:01:53,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=51189.333333333336, ans=0.125 +2024-08-26 15:01:59,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=51242.666666666664, ans=0.125 +2024-08-26 15:02:04,155 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.348e+02 1.626e+02 1.780e+02 1.895e+02 2.709e+02, threshold=3.561e+02, percent-clipped=0.0 +2024-08-26 15:02:07,164 INFO [train.py:1114] (0/4) Epoch 4, batch 2150, loss[loss=0.2481, simple_loss=0.3013, pruned_loss=0.0699, ctc_loss=0.1375, over 19590.00 frames. ], tot_loss[loss=0.278, simple_loss=0.3206, pruned_loss=0.08567, ctc_loss=0.1599, over 3868264.41 frames. ], batch size: 52, lr: 3.07e-02, grad_scale: 32.0 +2024-08-26 15:02:08,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=51296.0, ans=0.0 +2024-08-26 15:02:10,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=51296.0, ans=0.2 +2024-08-26 15:02:19,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51349.333333333336, ans=0.1 +2024-08-26 15:02:26,011 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.92 vs. limit=15.0 +2024-08-26 15:02:26,986 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.54 vs. limit=15.0 +2024-08-26 15:02:29,680 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.79 vs. limit=15.0 +2024-08-26 15:02:38,213 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.75 vs. limit=6.0 +2024-08-26 15:02:41,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=51456.0, ans=0.0 +2024-08-26 15:02:48,752 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.43 vs. limit=22.5 +2024-08-26 15:02:50,331 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.49 vs. limit=22.5 +2024-08-26 15:02:54,327 INFO [train.py:1114] (0/4) Epoch 4, batch 2200, loss[loss=0.2601, simple_loss=0.3169, pruned_loss=0.07376, ctc_loss=0.1397, over 19592.00 frames. ], tot_loss[loss=0.2777, simple_loss=0.3203, pruned_loss=0.0856, ctc_loss=0.1596, over 3867008.68 frames. ], batch size: 57, lr: 3.07e-02, grad_scale: 32.0 +2024-08-26 15:02:57,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=51562.666666666664, ans=0.125 +2024-08-26 15:03:01,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=51562.666666666664, ans=0.015 +2024-08-26 15:03:05,220 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.95 vs. limit=15.0 +2024-08-26 15:03:10,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=51616.0, ans=0.1 +2024-08-26 15:03:22,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=51722.666666666664, ans=0.07 +2024-08-26 15:03:23,840 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.41 vs. limit=10.0 +2024-08-26 15:03:24,758 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.66 vs. limit=22.5 +2024-08-26 15:03:33,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=51776.0, ans=0.0 +2024-08-26 15:03:34,332 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.28 vs. limit=22.5 +2024-08-26 15:03:36,548 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.424e+02 1.687e+02 1.993e+02 2.251e+02 9.209e+02, threshold=3.987e+02, percent-clipped=2.0 +2024-08-26 15:03:39,207 INFO [train.py:1114] (0/4) Epoch 4, batch 2250, loss[loss=0.2819, simple_loss=0.3297, pruned_loss=0.08407, ctc_loss=0.1649, over 19623.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3208, pruned_loss=0.08572, ctc_loss=0.1601, over 3867747.12 frames. ], batch size: 55, lr: 3.06e-02, grad_scale: 32.0 +2024-08-26 15:03:40,642 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.94 vs. limit=15.0 +2024-08-26 15:03:50,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=51882.666666666664, ans=0.1 +2024-08-26 15:03:52,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=51882.666666666664, ans=10.0 +2024-08-26 15:03:52,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=51882.666666666664, ans=0.0 +2024-08-26 15:03:59,607 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.44 vs. limit=15.0 +2024-08-26 15:04:11,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=51989.333333333336, ans=0.0 +2024-08-26 15:04:15,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=52042.666666666664, ans=0.025 +2024-08-26 15:04:21,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52042.666666666664, ans=0.1 +2024-08-26 15:04:21,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52042.666666666664, ans=0.1 +2024-08-26 15:04:23,339 INFO [train.py:1114] (0/4) Epoch 4, batch 2300, loss[loss=0.253, simple_loss=0.3044, pruned_loss=0.07346, ctc_loss=0.1366, over 19504.00 frames. ], tot_loss[loss=0.2771, simple_loss=0.3196, pruned_loss=0.08535, ctc_loss=0.1595, over 3860995.81 frames. ], batch size: 49, lr: 3.06e-02, grad_scale: 32.0 +2024-08-26 15:04:35,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=52149.333333333336, ans=0.125 +2024-08-26 15:04:35,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=52149.333333333336, ans=0.125 +2024-08-26 15:04:37,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52149.333333333336, ans=0.1 +2024-08-26 15:04:47,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=52202.666666666664, ans=0.0 +2024-08-26 15:04:49,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52256.0, ans=0.1 +2024-08-26 15:05:06,729 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.800e+02 1.978e+02 2.376e+02 5.904e+02, threshold=3.955e+02, percent-clipped=2.0 +2024-08-26 15:05:09,374 INFO [train.py:1114] (0/4) Epoch 4, batch 2350, loss[loss=0.3077, simple_loss=0.3466, pruned_loss=0.09966, ctc_loss=0.1737, over 19660.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3199, pruned_loss=0.08554, ctc_loss=0.1596, over 3864022.99 frames. ], batch size: 63, lr: 3.05e-02, grad_scale: 32.0 +2024-08-26 15:05:13,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=52362.666666666664, ans=0.0 +2024-08-26 15:05:23,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52416.0, ans=0.1 +2024-08-26 15:05:25,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=52469.333333333336, ans=0.125 +2024-08-26 15:05:29,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=52469.333333333336, ans=0.0 +2024-08-26 15:05:36,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=52522.666666666664, ans=6.0 +2024-08-26 15:05:38,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=52522.666666666664, ans=0.0 +2024-08-26 15:05:43,478 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:05:47,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=52576.0, ans=0.1 +2024-08-26 15:05:51,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=52576.0, ans=0.2 +2024-08-26 15:06:03,789 INFO [train.py:1114] (0/4) Epoch 4, batch 2400, loss[loss=0.3055, simple_loss=0.345, pruned_loss=0.09725, ctc_loss=0.1788, over 19401.00 frames. ], tot_loss[loss=0.2795, simple_loss=0.3221, pruned_loss=0.0863, ctc_loss=0.1608, over 3858940.05 frames. ], batch size: 67, lr: 3.05e-02, grad_scale: 32.0 +2024-08-26 15:06:06,033 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.09 vs. limit=6.0 +2024-08-26 15:06:08,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52629.333333333336, ans=0.1 +2024-08-26 15:06:13,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=52682.666666666664, ans=0.05 +2024-08-26 15:06:21,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=52682.666666666664, ans=0.0 +2024-08-26 15:06:21,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=52682.666666666664, ans=0.125 +2024-08-26 15:06:24,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=52682.666666666664, ans=22.5 +2024-08-26 15:06:45,634 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=18.83 vs. limit=15.0 +2024-08-26 15:06:53,233 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.448e+02 1.824e+02 2.127e+02 2.398e+02 5.215e+02, threshold=4.254e+02, percent-clipped=1.0 +2024-08-26 15:06:54,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=52896.0, ans=0.0 +2024-08-26 15:06:55,099 INFO [train.py:1114] (0/4) Epoch 4, batch 2450, loss[loss=0.3918, simple_loss=0.374, pruned_loss=0.1473, ctc_loss=0.2878, over 13118.00 frames. ], tot_loss[loss=0.289, simple_loss=0.3278, pruned_loss=0.09113, ctc_loss=0.17, over 3734912.68 frames. ], batch size: 140, lr: 3.05e-02, grad_scale: 16.0 +2024-08-26 15:06:59,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=52896.0, ans=0.05 +2024-08-26 15:07:04,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=52949.333333333336, ans=0.0 +2024-08-26 15:07:04,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=52949.333333333336, ans=0.0 +2024-08-26 15:07:04,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=52949.333333333336, ans=0.125 +2024-08-26 15:07:08,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=52949.333333333336, ans=0.2 +2024-08-26 15:07:12,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=53002.666666666664, ans=0.2 +2024-08-26 15:07:13,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=53002.666666666664, ans=0.0 +2024-08-26 15:07:18,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=53002.666666666664, ans=0.0 +2024-08-26 15:07:29,590 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-4.pt +2024-08-26 15:09:12,282 INFO [train.py:1114] (0/4) Epoch 5, batch 0, loss[loss=0.2637, simple_loss=0.302, pruned_loss=0.08211, ctc_loss=0.1531, over 19423.00 frames. ], tot_loss[loss=0.2637, simple_loss=0.302, pruned_loss=0.08211, ctc_loss=0.1531, over 19423.00 frames. ], batch size: 48, lr: 2.83e-02, grad_scale: 32.0 +2024-08-26 15:09:12,284 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-26 15:09:22,083 INFO [train.py:1146] (0/4) Epoch 5, validation: loss=0.2289, simple_loss=0.3118, pruned_loss=0.05352, ctc_loss=0.09739, over 944034.00 frames. +2024-08-26 15:09:22,763 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 12775MB +2024-08-26 15:09:30,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=53104.0, ans=0.2 +2024-08-26 15:09:37,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=53157.333333333336, ans=0.0 +2024-08-26 15:09:41,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53210.666666666664, ans=0.1 +2024-08-26 15:09:50,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=53264.0, ans=0.125 +2024-08-26 15:09:54,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=53264.0, ans=0.0 +2024-08-26 15:10:10,885 INFO [train.py:1114] (0/4) Epoch 5, batch 50, loss[loss=0.2436, simple_loss=0.2952, pruned_loss=0.06917, ctc_loss=0.1338, over 19753.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.3261, pruned_loss=0.08796, ctc_loss=0.1656, over 844155.08 frames. ], batch size: 47, lr: 2.83e-02, grad_scale: 32.0 +2024-08-26 15:10:11,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=53370.666666666664, ans=0.0 +2024-08-26 15:10:22,327 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.804e+02 2.028e+02 2.297e+02 4.038e+02, threshold=4.056e+02, percent-clipped=0.0 +2024-08-26 15:10:45,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=53530.666666666664, ans=0.0 +2024-08-26 15:10:45,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=53530.666666666664, ans=0.125 +2024-08-26 15:10:49,545 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.14 vs. limit=15.0 +2024-08-26 15:11:01,253 INFO [train.py:1114] (0/4) Epoch 5, batch 100, loss[loss=0.2461, simple_loss=0.2941, pruned_loss=0.07249, ctc_loss=0.1326, over 19724.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.3252, pruned_loss=0.0873, ctc_loss=0.1636, over 1498403.14 frames. ], batch size: 51, lr: 2.82e-02, grad_scale: 32.0 +2024-08-26 15:11:13,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=53690.666666666664, ans=0.125 +2024-08-26 15:11:16,955 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.56 vs. limit=6.0 +2024-08-26 15:11:19,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.53 vs. limit=15.0 +2024-08-26 15:11:29,962 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.89 vs. limit=22.5 +2024-08-26 15:11:30,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=53744.0, ans=0.2 +2024-08-26 15:11:34,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=53797.333333333336, ans=0.0 +2024-08-26 15:11:59,572 INFO [train.py:1114] (0/4) Epoch 5, batch 150, loss[loss=0.267, simple_loss=0.2947, pruned_loss=0.08802, ctc_loss=0.158, over 19741.00 frames. ], tot_loss[loss=0.2764, simple_loss=0.3205, pruned_loss=0.08447, ctc_loss=0.1584, over 2028143.16 frames. ], batch size: 47, lr: 2.82e-02, grad_scale: 32.0 +2024-08-26 15:12:07,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=53904.0, ans=0.0 +2024-08-26 15:12:10,027 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.442e+02 1.696e+02 1.862e+02 2.172e+02 3.492e+02, threshold=3.724e+02, percent-clipped=0.0 +2024-08-26 15:12:14,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=53957.333333333336, ans=0.125 +2024-08-26 15:12:19,946 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.46 vs. limit=22.5 +2024-08-26 15:12:29,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=54064.0, ans=0.2 +2024-08-26 15:12:30,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=54064.0, ans=0.125 +2024-08-26 15:12:44,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=54117.333333333336, ans=0.0 +2024-08-26 15:12:48,444 INFO [train.py:1114] (0/4) Epoch 5, batch 200, loss[loss=0.3062, simple_loss=0.339, pruned_loss=0.1003, ctc_loss=0.1818, over 18194.00 frames. ], tot_loss[loss=0.2737, simple_loss=0.3183, pruned_loss=0.08334, ctc_loss=0.1559, over 2436360.97 frames. ], batch size: 85, lr: 2.81e-02, grad_scale: 32.0 +2024-08-26 15:12:51,926 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.66 vs. limit=15.0 +2024-08-26 15:12:54,668 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.07 vs. limit=22.5 +2024-08-26 15:12:58,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=54224.0, ans=0.125 +2024-08-26 15:13:01,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=54224.0, ans=0.1 +2024-08-26 15:13:16,414 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.32 vs. limit=10.0 +2024-08-26 15:13:20,233 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.21 vs. limit=15.0 +2024-08-26 15:13:29,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=54330.666666666664, ans=0.2 +2024-08-26 15:13:37,759 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.15 vs. limit=12.0 +2024-08-26 15:13:41,977 INFO [train.py:1114] (0/4) Epoch 5, batch 250, loss[loss=0.279, simple_loss=0.3269, pruned_loss=0.08501, ctc_loss=0.1528, over 19428.00 frames. ], tot_loss[loss=0.2716, simple_loss=0.3169, pruned_loss=0.08228, ctc_loss=0.1543, over 2757087.79 frames. ], batch size: 67, lr: 2.81e-02, grad_scale: 32.0 +2024-08-26 15:13:50,513 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.357e+02 1.685e+02 1.803e+02 2.078e+02 3.456e+02, threshold=3.607e+02, percent-clipped=0.0 +2024-08-26 15:13:59,305 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.18 vs. limit=22.5 +2024-08-26 15:14:03,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=54544.0, ans=0.0 +2024-08-26 15:14:16,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=54597.333333333336, ans=0.125 +2024-08-26 15:14:18,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=54597.333333333336, ans=0.125 +2024-08-26 15:14:32,302 INFO [train.py:1114] (0/4) Epoch 5, batch 300, loss[loss=0.3051, simple_loss=0.3436, pruned_loss=0.09675, ctc_loss=0.1828, over 19541.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3167, pruned_loss=0.08216, ctc_loss=0.1541, over 3002184.15 frames. ], batch size: 61, lr: 2.81e-02, grad_scale: 32.0 +2024-08-26 15:15:01,360 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.18 vs. limit=6.0 +2024-08-26 15:15:16,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=54917.333333333336, ans=0.0 +2024-08-26 15:15:22,164 INFO [train.py:1114] (0/4) Epoch 5, batch 350, loss[loss=0.2666, simple_loss=0.3101, pruned_loss=0.08178, ctc_loss=0.1492, over 19769.00 frames. ], tot_loss[loss=0.2712, simple_loss=0.3168, pruned_loss=0.08204, ctc_loss=0.1538, over 3191174.97 frames. ], batch size: 48, lr: 2.80e-02, grad_scale: 32.0 +2024-08-26 15:15:23,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=54970.666666666664, ans=0.1 +2024-08-26 15:15:27,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=54970.666666666664, ans=0.1 +2024-08-26 15:15:31,764 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.410e+02 1.717e+02 1.933e+02 2.233e+02 3.797e+02, threshold=3.865e+02, percent-clipped=1.0 +2024-08-26 15:15:47,387 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.53 vs. limit=12.0 +2024-08-26 15:15:48,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=55077.333333333336, ans=0.1 +2024-08-26 15:15:48,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=55077.333333333336, ans=0.125 +2024-08-26 15:16:15,655 INFO [train.py:1114] (0/4) Epoch 5, batch 400, loss[loss=0.2796, simple_loss=0.329, pruned_loss=0.08303, ctc_loss=0.1605, over 19499.00 frames. ], tot_loss[loss=0.2706, simple_loss=0.3164, pruned_loss=0.08172, ctc_loss=0.1532, over 3343119.31 frames. ], batch size: 54, lr: 2.80e-02, grad_scale: 32.0 +2024-08-26 15:16:17,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=55237.333333333336, ans=0.1 +2024-08-26 15:16:24,052 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.47 vs. limit=15.0 +2024-08-26 15:16:25,034 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.72 vs. limit=10.0 +2024-08-26 15:16:44,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=55397.333333333336, ans=0.125 +2024-08-26 15:16:51,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55397.333333333336, ans=0.1 +2024-08-26 15:17:07,113 INFO [train.py:1114] (0/4) Epoch 5, batch 450, loss[loss=0.2415, simple_loss=0.3013, pruned_loss=0.06573, ctc_loss=0.1258, over 19599.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.3161, pruned_loss=0.0817, ctc_loss=0.1532, over 3450619.45 frames. ], batch size: 55, lr: 2.79e-02, grad_scale: 16.0 +2024-08-26 15:17:11,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=55504.0, ans=0.125 +2024-08-26 15:17:17,447 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.642e+02 1.899e+02 2.179e+02 3.523e+02, threshold=3.798e+02, percent-clipped=0.0 +2024-08-26 15:17:17,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=55557.333333333336, ans=0.0 +2024-08-26 15:17:20,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=55557.333333333336, ans=0.125 +2024-08-26 15:17:25,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55610.666666666664, ans=0.1 +2024-08-26 15:17:26,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=55610.666666666664, ans=0.0 +2024-08-26 15:17:35,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=55664.0, ans=0.125 +2024-08-26 15:17:37,916 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.90 vs. limit=15.0 +2024-08-26 15:17:48,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=55717.333333333336, ans=0.0 +2024-08-26 15:18:04,472 INFO [train.py:1114] (0/4) Epoch 5, batch 500, loss[loss=0.288, simple_loss=0.3297, pruned_loss=0.09088, ctc_loss=0.1614, over 19665.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3155, pruned_loss=0.08122, ctc_loss=0.1522, over 3546264.51 frames. ], batch size: 63, lr: 2.79e-02, grad_scale: 16.0 +2024-08-26 15:18:13,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55770.666666666664, ans=0.1 +2024-08-26 15:18:13,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=55770.666666666664, ans=0.0 +2024-08-26 15:18:20,197 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.47 vs. limit=15.0 +2024-08-26 15:18:41,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=55877.333333333336, ans=0.125 +2024-08-26 15:18:43,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=55877.333333333336, ans=0.125 +2024-08-26 15:18:51,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=55877.333333333336, ans=0.0 +2024-08-26 15:18:54,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=55930.666666666664, ans=0.2 +2024-08-26 15:19:06,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=55930.666666666664, ans=0.125 +2024-08-26 15:19:14,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=55930.666666666664, ans=0.5 +2024-08-26 15:19:46,989 INFO [train.py:1114] (0/4) Epoch 5, batch 550, loss[loss=0.3083, simple_loss=0.3462, pruned_loss=0.09894, ctc_loss=0.1815, over 19337.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3154, pruned_loss=0.0814, ctc_loss=0.1522, over 3609162.31 frames. ], batch size: 71, lr: 2.78e-02, grad_scale: 16.0 +2024-08-26 15:20:03,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=56090.666666666664, ans=0.125 +2024-08-26 15:20:04,970 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.339e+02 1.676e+02 1.860e+02 2.053e+02 4.118e+02, threshold=3.720e+02, percent-clipped=1.0 +2024-08-26 15:20:43,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=56197.333333333336, ans=0.09899494936611666 +2024-08-26 15:20:56,217 INFO [train.py:1114] (0/4) Epoch 5, batch 600, loss[loss=0.3025, simple_loss=0.3443, pruned_loss=0.09394, ctc_loss=0.182, over 19446.00 frames. ], tot_loss[loss=0.27, simple_loss=0.3159, pruned_loss=0.08155, ctc_loss=0.1526, over 3667103.29 frames. ], batch size: 67, lr: 2.78e-02, grad_scale: 16.0 +2024-08-26 15:20:58,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=56304.0, ans=0.125 +2024-08-26 15:21:00,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=56304.0, ans=0.1 +2024-08-26 15:21:05,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=56304.0, ans=0.07 +2024-08-26 15:21:09,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56357.333333333336, ans=0.1 +2024-08-26 15:21:15,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=56357.333333333336, ans=0.2 +2024-08-26 15:21:16,270 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.70 vs. limit=15.0 +2024-08-26 15:21:29,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=56464.0, ans=0.125 +2024-08-26 15:21:40,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=56517.333333333336, ans=22.5 +2024-08-26 15:21:49,363 INFO [train.py:1114] (0/4) Epoch 5, batch 650, loss[loss=0.2688, simple_loss=0.322, pruned_loss=0.07961, ctc_loss=0.1409, over 19760.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.3146, pruned_loss=0.08066, ctc_loss=0.1509, over 3717918.01 frames. ], batch size: 54, lr: 2.77e-02, grad_scale: 16.0 +2024-08-26 15:21:59,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56624.0, ans=0.1 +2024-08-26 15:21:59,896 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.327e+02 1.659e+02 1.803e+02 2.095e+02 3.596e+02, threshold=3.607e+02, percent-clipped=0.0 +2024-08-26 15:22:22,336 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.06 vs. limit=15.0 +2024-08-26 15:22:33,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=56784.0, ans=0.025 +2024-08-26 15:22:35,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=56784.0, ans=0.0 +2024-08-26 15:22:39,296 INFO [train.py:1114] (0/4) Epoch 5, batch 700, loss[loss=0.233, simple_loss=0.2882, pruned_loss=0.06522, ctc_loss=0.1183, over 19720.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3151, pruned_loss=0.08103, ctc_loss=0.1514, over 3749766.63 frames. ], batch size: 51, lr: 2.77e-02, grad_scale: 16.0 +2024-08-26 15:22:58,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=56944.0, ans=0.125 +2024-08-26 15:23:12,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=56997.333333333336, ans=0.125 +2024-08-26 15:23:26,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=57050.666666666664, ans=0.125 +2024-08-26 15:23:27,622 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:23:28,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=57104.0, ans=0.125 +2024-08-26 15:23:29,295 INFO [train.py:1114] (0/4) Epoch 5, batch 750, loss[loss=0.2643, simple_loss=0.3238, pruned_loss=0.07339, ctc_loss=0.1453, over 19841.00 frames. ], tot_loss[loss=0.269, simple_loss=0.3149, pruned_loss=0.08111, ctc_loss=0.1519, over 3776345.09 frames. ], batch size: 55, lr: 2.77e-02, grad_scale: 16.0 +2024-08-26 15:23:39,772 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.452e+02 1.732e+02 1.957e+02 2.375e+02 6.184e+02, threshold=3.914e+02, percent-clipped=3.0 +2024-08-26 15:23:45,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=57157.333333333336, ans=0.125 +2024-08-26 15:24:19,566 INFO [train.py:1114] (0/4) Epoch 5, batch 800, loss[loss=0.2195, simple_loss=0.2853, pruned_loss=0.05465, ctc_loss=0.1107, over 19430.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3149, pruned_loss=0.08098, ctc_loss=0.1515, over 3797007.90 frames. ], batch size: 48, lr: 2.76e-02, grad_scale: 32.0 +2024-08-26 15:24:21,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=57370.666666666664, ans=0.125 +2024-08-26 15:24:44,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=57477.333333333336, ans=0.0 +2024-08-26 15:25:02,789 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.21 vs. limit=22.5 +2024-08-26 15:25:10,633 INFO [train.py:1114] (0/4) Epoch 5, batch 850, loss[loss=0.3022, simple_loss=0.3462, pruned_loss=0.09443, ctc_loss=0.1733, over 19660.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.3142, pruned_loss=0.08081, ctc_loss=0.151, over 3815725.18 frames. ], batch size: 59, lr: 2.76e-02, grad_scale: 32.0 +2024-08-26 15:25:13,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=57637.333333333336, ans=0.0 +2024-08-26 15:25:24,574 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.368e+02 1.744e+02 1.971e+02 2.331e+02 4.591e+02, threshold=3.942e+02, percent-clipped=1.0 +2024-08-26 15:25:30,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=57690.666666666664, ans=0.1 +2024-08-26 15:26:03,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=57850.666666666664, ans=0.05 +2024-08-26 15:26:06,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=57904.0, ans=0.07 +2024-08-26 15:26:07,575 INFO [train.py:1114] (0/4) Epoch 5, batch 900, loss[loss=0.2372, simple_loss=0.2901, pruned_loss=0.06729, ctc_loss=0.1241, over 19403.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3145, pruned_loss=0.08113, ctc_loss=0.1514, over 3818513.59 frames. ], batch size: 48, lr: 2.75e-02, grad_scale: 32.0 +2024-08-26 15:26:12,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=57904.0, ans=0.0 +2024-08-26 15:26:25,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=57957.333333333336, ans=0.2 +2024-08-26 15:26:49,206 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.10 vs. limit=15.0 +2024-08-26 15:26:53,888 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.14 vs. limit=12.0 +2024-08-26 15:26:58,228 INFO [train.py:1114] (0/4) Epoch 5, batch 950, loss[loss=0.2325, simple_loss=0.2851, pruned_loss=0.06515, ctc_loss=0.1238, over 19477.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.3151, pruned_loss=0.08173, ctc_loss=0.1523, over 3820323.62 frames. ], batch size: 49, lr: 2.75e-02, grad_scale: 32.0 +2024-08-26 15:27:01,309 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:27:03,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=58170.666666666664, ans=0.5 +2024-08-26 15:27:11,439 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.396e+02 1.648e+02 1.859e+02 2.135e+02 3.098e+02, threshold=3.718e+02, percent-clipped=0.0 +2024-08-26 15:27:14,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=58224.0, ans=0.05 +2024-08-26 15:27:15,204 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.06 vs. limit=10.0 +2024-08-26 15:27:16,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=58224.0, ans=0.125 +2024-08-26 15:27:19,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=58277.333333333336, ans=0.025 +2024-08-26 15:27:43,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=58384.0, ans=0.0 +2024-08-26 15:27:43,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=58384.0, ans=0.0 +2024-08-26 15:27:49,896 INFO [train.py:1114] (0/4) Epoch 5, batch 1000, loss[loss=0.282, simple_loss=0.3214, pruned_loss=0.08812, ctc_loss=0.1656, over 19842.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.3157, pruned_loss=0.08198, ctc_loss=0.1529, over 3816207.50 frames. ], batch size: 52, lr: 2.74e-02, grad_scale: 32.0 +2024-08-26 15:27:53,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=58437.333333333336, ans=0.125 +2024-08-26 15:27:54,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=58437.333333333336, ans=0.0 +2024-08-26 15:27:54,421 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.32 vs. limit=10.0 +2024-08-26 15:27:59,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=58490.666666666664, ans=0.1 +2024-08-26 15:28:40,058 INFO [train.py:1114] (0/4) Epoch 5, batch 1050, loss[loss=0.2749, simple_loss=0.3239, pruned_loss=0.08152, ctc_loss=0.1573, over 19842.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.315, pruned_loss=0.08157, ctc_loss=0.1525, over 3823196.06 frames. ], batch size: 57, lr: 2.74e-02, grad_scale: 32.0 +2024-08-26 15:28:50,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=58757.333333333336, ans=0.125 +2024-08-26 15:28:50,853 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.680e+02 1.893e+02 2.161e+02 3.731e+02, threshold=3.786e+02, percent-clipped=1.0 +2024-08-26 15:28:54,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=58757.333333333336, ans=0.125 +2024-08-26 15:29:01,149 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.23 vs. limit=15.0 +2024-08-26 15:29:02,166 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.67 vs. limit=6.0 +2024-08-26 15:29:03,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=58810.666666666664, ans=0.0 +2024-08-26 15:29:24,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=58917.333333333336, ans=0.0 +2024-08-26 15:29:33,663 INFO [train.py:1114] (0/4) Epoch 5, batch 1100, loss[loss=0.2413, simple_loss=0.291, pruned_loss=0.06948, ctc_loss=0.1315, over 19566.00 frames. ], tot_loss[loss=0.2682, simple_loss=0.3141, pruned_loss=0.08088, ctc_loss=0.1514, over 3831654.08 frames. ], batch size: 52, lr: 2.74e-02, grad_scale: 16.0 +2024-08-26 15:29:35,086 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.42 vs. limit=15.0 +2024-08-26 15:29:51,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=59077.333333333336, ans=0.2 +2024-08-26 15:29:55,664 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:30:08,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=59130.666666666664, ans=0.0 +2024-08-26 15:30:18,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59184.0, ans=0.1 +2024-08-26 15:30:21,908 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.70 vs. limit=22.5 +2024-08-26 15:30:24,311 INFO [train.py:1114] (0/4) Epoch 5, batch 1150, loss[loss=0.2516, simple_loss=0.2958, pruned_loss=0.07563, ctc_loss=0.1406, over 19606.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.3143, pruned_loss=0.08098, ctc_loss=0.1513, over 3829563.53 frames. ], batch size: 52, lr: 2.73e-02, grad_scale: 16.0 +2024-08-26 15:30:24,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=59237.333333333336, ans=0.025 +2024-08-26 15:30:34,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=59290.666666666664, ans=0.1 +2024-08-26 15:30:35,926 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.334e+02 1.591e+02 1.744e+02 2.042e+02 4.394e+02, threshold=3.489e+02, percent-clipped=2.0 +2024-08-26 15:30:37,158 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:30:38,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59290.666666666664, ans=0.1 +2024-08-26 15:30:45,518 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.35 vs. limit=12.0 +2024-08-26 15:30:53,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=59397.333333333336, ans=0.0 +2024-08-26 15:31:06,148 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.06 vs. limit=15.0 +2024-08-26 15:31:06,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=59450.666666666664, ans=0.0 +2024-08-26 15:31:15,286 INFO [train.py:1114] (0/4) Epoch 5, batch 1200, loss[loss=0.2621, simple_loss=0.3153, pruned_loss=0.07565, ctc_loss=0.1438, over 19839.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3153, pruned_loss=0.08142, ctc_loss=0.1522, over 3824947.14 frames. ], batch size: 57, lr: 2.73e-02, grad_scale: 32.0 +2024-08-26 15:31:24,593 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.89 vs. limit=15.0 +2024-08-26 15:31:36,229 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.59 vs. limit=15.0 +2024-08-26 15:31:43,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=59664.0, ans=0.125 +2024-08-26 15:31:53,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=59717.333333333336, ans=0.125 +2024-08-26 15:31:53,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=59717.333333333336, ans=0.1 +2024-08-26 15:32:06,262 INFO [train.py:1114] (0/4) Epoch 5, batch 1250, loss[loss=0.2984, simple_loss=0.3385, pruned_loss=0.09625, ctc_loss=0.1643, over 19557.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3152, pruned_loss=0.08105, ctc_loss=0.1514, over 3842931.87 frames. ], batch size: 61, lr: 2.72e-02, grad_scale: 32.0 +2024-08-26 15:32:18,030 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.635e+02 1.798e+02 2.001e+02 4.301e+02, threshold=3.596e+02, percent-clipped=1.0 +2024-08-26 15:32:48,488 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.39 vs. limit=15.0 +2024-08-26 15:32:55,068 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=15.0 +2024-08-26 15:32:56,445 INFO [train.py:1114] (0/4) Epoch 5, batch 1300, loss[loss=0.3065, simple_loss=0.3396, pruned_loss=0.09962, ctc_loss=0.1855, over 18839.00 frames. ], tot_loss[loss=0.2673, simple_loss=0.314, pruned_loss=0.08029, ctc_loss=0.1499, over 3846267.59 frames. ], batch size: 76, lr: 2.72e-02, grad_scale: 32.0 +2024-08-26 15:32:56,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=60037.333333333336, ans=0.0 +2024-08-26 15:33:05,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=60090.666666666664, ans=15.0 +2024-08-26 15:33:06,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=60090.666666666664, ans=0.025 +2024-08-26 15:33:11,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=60090.666666666664, ans=0.125 +2024-08-26 15:33:16,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=60144.0, ans=0.04949747468305833 +2024-08-26 15:33:28,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=60197.333333333336, ans=0.0 +2024-08-26 15:33:43,911 INFO [train.py:1114] (0/4) Epoch 5, batch 1350, loss[loss=0.245, simple_loss=0.3031, pruned_loss=0.06812, ctc_loss=0.1269, over 19751.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.3135, pruned_loss=0.07963, ctc_loss=0.1486, over 3857861.13 frames. ], batch size: 54, lr: 2.71e-02, grad_scale: 32.0 +2024-08-26 15:33:47,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=60304.0, ans=0.125 +2024-08-26 15:33:54,118 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.70 vs. limit=22.5 +2024-08-26 15:33:55,391 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.610e+02 1.752e+02 1.989e+02 4.527e+02, threshold=3.503e+02, percent-clipped=1.0 +2024-08-26 15:34:04,464 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.94 vs. limit=10.0 +2024-08-26 15:34:09,293 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.93 vs. limit=22.5 +2024-08-26 15:34:13,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=60410.666666666664, ans=0.09899494936611666 +2024-08-26 15:34:15,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60464.0, ans=0.1 +2024-08-26 15:34:15,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=60464.0, ans=0.2 +2024-08-26 15:34:18,170 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.39 vs. limit=10.0 +2024-08-26 15:34:20,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=60464.0, ans=0.125 +2024-08-26 15:34:29,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=60517.333333333336, ans=0.125 +2024-08-26 15:34:30,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=60517.333333333336, ans=0.025 +2024-08-26 15:34:30,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60517.333333333336, ans=0.1 +2024-08-26 15:34:32,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=60517.333333333336, ans=0.125 +2024-08-26 15:34:34,576 INFO [train.py:1114] (0/4) Epoch 5, batch 1400, loss[loss=0.2404, simple_loss=0.2826, pruned_loss=0.07286, ctc_loss=0.131, over 19661.00 frames. ], tot_loss[loss=0.2653, simple_loss=0.3127, pruned_loss=0.0794, ctc_loss=0.148, over 3864258.34 frames. ], batch size: 46, lr: 2.71e-02, grad_scale: 32.0 +2024-08-26 15:34:37,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=60570.666666666664, ans=0.2 +2024-08-26 15:34:47,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.22 vs. limit=10.0 +2024-08-26 15:34:55,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.29 vs. limit=15.0 +2024-08-26 15:35:17,244 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.55 vs. limit=15.0 +2024-08-26 15:35:21,227 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.54 vs. limit=10.0 +2024-08-26 15:35:27,728 INFO [train.py:1114] (0/4) Epoch 5, batch 1450, loss[loss=0.2757, simple_loss=0.3256, pruned_loss=0.08131, ctc_loss=0.1579, over 19665.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.314, pruned_loss=0.07995, ctc_loss=0.1491, over 3862292.07 frames. ], batch size: 63, lr: 2.71e-02, grad_scale: 32.0 +2024-08-26 15:35:42,506 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.680e+02 1.820e+02 2.123e+02 3.172e+02, threshold=3.639e+02, percent-clipped=0.0 +2024-08-26 15:35:51,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=60944.0, ans=0.125 +2024-08-26 15:35:56,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=60944.0, ans=0.125 +2024-08-26 15:35:56,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=60944.0, ans=0.125 +2024-08-26 15:35:56,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=60944.0, ans=0.0 +2024-08-26 15:36:00,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=60997.333333333336, ans=0.125 +2024-08-26 15:36:19,831 INFO [train.py:1114] (0/4) Epoch 5, batch 1500, loss[loss=0.2468, simple_loss=0.3122, pruned_loss=0.06618, ctc_loss=0.1229, over 19582.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.3137, pruned_loss=0.07957, ctc_loss=0.1486, over 3862535.21 frames. ], batch size: 57, lr: 2.70e-02, grad_scale: 32.0 +2024-08-26 15:36:24,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=61104.0, ans=0.2 +2024-08-26 15:36:25,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=61104.0, ans=0.0 +2024-08-26 15:36:43,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=61210.666666666664, ans=0.2 +2024-08-26 15:36:45,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=61210.666666666664, ans=0.125 +2024-08-26 15:36:59,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=61317.333333333336, ans=0.125 +2024-08-26 15:37:09,970 INFO [train.py:1114] (0/4) Epoch 5, batch 1550, loss[loss=0.2885, simple_loss=0.3278, pruned_loss=0.0911, ctc_loss=0.1677, over 19603.00 frames. ], tot_loss[loss=0.267, simple_loss=0.3141, pruned_loss=0.08003, ctc_loss=0.1495, over 3847063.11 frames. ], batch size: 60, lr: 2.70e-02, grad_scale: 16.0 +2024-08-26 15:37:16,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=61370.666666666664, ans=0.2 +2024-08-26 15:37:22,481 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.359e+02 1.752e+02 1.975e+02 2.269e+02 3.644e+02, threshold=3.951e+02, percent-clipped=1.0 +2024-08-26 15:37:25,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=61424.0, ans=0.125 +2024-08-26 15:37:38,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=61530.666666666664, ans=0.125 +2024-08-26 15:38:03,723 INFO [train.py:1114] (0/4) Epoch 5, batch 1600, loss[loss=0.2667, simple_loss=0.3193, pruned_loss=0.07789, ctc_loss=0.1457, over 19839.00 frames. ], tot_loss[loss=0.268, simple_loss=0.3144, pruned_loss=0.08064, ctc_loss=0.1508, over 3836525.65 frames. ], batch size: 57, lr: 2.69e-02, grad_scale: 32.0 +2024-08-26 15:38:07,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=61637.333333333336, ans=0.125 +2024-08-26 15:38:26,262 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=19.53 vs. limit=15.0 +2024-08-26 15:38:44,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=61744.0, ans=0.125 +2024-08-26 15:38:47,875 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.27 vs. limit=12.0 +2024-08-26 15:38:54,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=61744.0, ans=0.1 +2024-08-26 15:39:09,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=61850.666666666664, ans=0.125 +2024-08-26 15:39:19,221 INFO [train.py:1114] (0/4) Epoch 5, batch 1650, loss[loss=0.2682, simple_loss=0.3233, pruned_loss=0.07635, ctc_loss=0.1509, over 19640.00 frames. ], tot_loss[loss=0.2683, simple_loss=0.3146, pruned_loss=0.08076, ctc_loss=0.151, over 3833447.05 frames. ], batch size: 59, lr: 2.69e-02, grad_scale: 32.0 +2024-08-26 15:39:20,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=61904.0, ans=15.0 +2024-08-26 15:39:24,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=61904.0, ans=0.125 +2024-08-26 15:39:27,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=61904.0, ans=0.125 +2024-08-26 15:39:29,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=61957.333333333336, ans=0.125 +2024-08-26 15:39:30,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff2.min_abs, batch_count=61957.333333333336, ans=0.1 +2024-08-26 15:39:31,760 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.578e+02 1.738e+02 2.103e+02 3.628e+02, threshold=3.475e+02, percent-clipped=0.0 +2024-08-26 15:39:33,876 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:39:33,994 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.99 vs. limit=15.0 +2024-08-26 15:39:34,072 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.29 vs. limit=22.5 +2024-08-26 15:39:50,177 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.47 vs. limit=22.5 +2024-08-26 15:39:56,924 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.07 vs. limit=15.0 +2024-08-26 15:39:58,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=62117.333333333336, ans=0.1 +2024-08-26 15:40:03,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=62117.333333333336, ans=0.2 +2024-08-26 15:40:08,740 INFO [train.py:1114] (0/4) Epoch 5, batch 1700, loss[loss=0.2179, simple_loss=0.2682, pruned_loss=0.06001, ctc_loss=0.1187, over 19665.00 frames. ], tot_loss[loss=0.2664, simple_loss=0.3135, pruned_loss=0.07979, ctc_loss=0.1493, over 3847846.32 frames. ], batch size: 46, lr: 2.69e-02, grad_scale: 32.0 +2024-08-26 15:40:13,221 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.11 vs. limit=6.0 +2024-08-26 15:40:13,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=62170.666666666664, ans=0.125 +2024-08-26 15:40:41,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=62330.666666666664, ans=0.125 +2024-08-26 15:40:41,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=62330.666666666664, ans=0.0 +2024-08-26 15:40:44,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=62384.0, ans=0.025 +2024-08-26 15:40:54,052 INFO [train.py:1114] (0/4) Epoch 5, batch 1750, loss[loss=0.2234, simple_loss=0.2713, pruned_loss=0.06411, ctc_loss=0.1184, over 19655.00 frames. ], tot_loss[loss=0.265, simple_loss=0.3125, pruned_loss=0.07913, ctc_loss=0.148, over 3852781.90 frames. ], batch size: 45, lr: 2.68e-02, grad_scale: 32.0 +2024-08-26 15:41:00,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=62437.333333333336, ans=0.125 +2024-08-26 15:41:05,733 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.599e+02 1.842e+02 2.097e+02 3.191e+02, threshold=3.683e+02, percent-clipped=0.0 +2024-08-26 15:41:15,448 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.44 vs. limit=15.0 +2024-08-26 15:41:19,186 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.34 vs. limit=10.0 +2024-08-26 15:41:36,357 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.98 vs. limit=15.0 +2024-08-26 15:41:39,328 INFO [train.py:1114] (0/4) Epoch 5, batch 1800, loss[loss=0.2563, simple_loss=0.3069, pruned_loss=0.07446, ctc_loss=0.1422, over 19613.00 frames. ], tot_loss[loss=0.2656, simple_loss=0.313, pruned_loss=0.07938, ctc_loss=0.1485, over 3853809.76 frames. ], batch size: 55, lr: 2.68e-02, grad_scale: 32.0 +2024-08-26 15:41:45,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=62704.0, ans=0.1 +2024-08-26 15:41:48,786 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.48 vs. limit=22.5 +2024-08-26 15:41:51,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=62757.333333333336, ans=0.0 +2024-08-26 15:42:07,588 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.86 vs. limit=22.5 +2024-08-26 15:42:09,309 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.07 vs. limit=12.0 +2024-08-26 15:42:19,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=62917.333333333336, ans=10.0 +2024-08-26 15:42:20,022 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.32 vs. limit=15.0 +2024-08-26 15:42:24,245 INFO [train.py:1114] (0/4) Epoch 5, batch 1850, loss[loss=0.2635, simple_loss=0.315, pruned_loss=0.07704, ctc_loss=0.1449, over 19569.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3126, pruned_loss=0.07893, ctc_loss=0.1476, over 3856524.31 frames. ], batch size: 57, lr: 2.67e-02, grad_scale: 32.0 +2024-08-26 15:42:27,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=62970.666666666664, ans=0.125 +2024-08-26 15:42:30,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=62970.666666666664, ans=0.1 +2024-08-26 15:42:31,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=62970.666666666664, ans=0.0 +2024-08-26 15:42:35,859 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.605e+02 1.818e+02 2.016e+02 3.945e+02, threshold=3.637e+02, percent-clipped=1.0 +2024-08-26 15:42:44,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63077.333333333336, ans=0.1 +2024-08-26 15:42:53,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=63077.333333333336, ans=0.125 +2024-08-26 15:42:53,783 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.54 vs. limit=15.0 +2024-08-26 15:42:59,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=63130.666666666664, ans=0.125 +2024-08-26 15:43:16,393 INFO [train.py:1114] (0/4) Epoch 5, batch 1900, loss[loss=0.269, simple_loss=0.3232, pruned_loss=0.07824, ctc_loss=0.1459, over 19652.00 frames. ], tot_loss[loss=0.2651, simple_loss=0.313, pruned_loss=0.07903, ctc_loss=0.1479, over 3860781.12 frames. ], batch size: 59, lr: 2.67e-02, grad_scale: 16.0 +2024-08-26 15:43:17,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=63237.333333333336, ans=0.125 +2024-08-26 15:43:24,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=63237.333333333336, ans=0.0 +2024-08-26 15:43:37,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=63290.666666666664, ans=0.1 +2024-08-26 15:43:39,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=63344.0, ans=0.125 +2024-08-26 15:43:48,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=63397.333333333336, ans=0.125 +2024-08-26 15:44:05,667 INFO [train.py:1114] (0/4) Epoch 5, batch 1950, loss[loss=0.252, simple_loss=0.3051, pruned_loss=0.07317, ctc_loss=0.1314, over 19569.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.314, pruned_loss=0.07918, ctc_loss=0.148, over 3869409.36 frames. ], batch size: 52, lr: 2.67e-02, grad_scale: 16.0 +2024-08-26 15:44:07,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63504.0, ans=0.1 +2024-08-26 15:44:12,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=63504.0, ans=0.1 +2024-08-26 15:44:20,104 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.677e+02 1.824e+02 1.963e+02 3.212e+02, threshold=3.647e+02, percent-clipped=0.0 +2024-08-26 15:44:21,428 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.84 vs. limit=15.0 +2024-08-26 15:44:23,886 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:44:24,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=63610.666666666664, ans=0.0 +2024-08-26 15:44:26,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63610.666666666664, ans=0.1 +2024-08-26 15:44:27,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.98 vs. limit=22.5 +2024-08-26 15:44:33,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63664.0, ans=0.1 +2024-08-26 15:44:35,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=63664.0, ans=0.2 +2024-08-26 15:44:44,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=63717.333333333336, ans=0.125 +2024-08-26 15:44:46,466 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.68 vs. limit=22.5 +2024-08-26 15:44:52,301 INFO [train.py:1114] (0/4) Epoch 5, batch 2000, loss[loss=0.2104, simple_loss=0.2605, pruned_loss=0.05903, ctc_loss=0.1057, over 19636.00 frames. ], tot_loss[loss=0.2674, simple_loss=0.3149, pruned_loss=0.08006, ctc_loss=0.1495, over 3853618.52 frames. ], batch size: 45, lr: 2.66e-02, grad_scale: 32.0 +2024-08-26 15:44:57,989 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.81 vs. limit=15.0 +2024-08-26 15:45:04,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63824.0, ans=0.1 +2024-08-26 15:45:17,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=63877.333333333336, ans=0.0 +2024-08-26 15:45:20,351 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.83 vs. limit=15.0 +2024-08-26 15:45:29,791 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/checkpoint-12000.pt +2024-08-26 15:45:36,649 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.01 vs. limit=15.0 +2024-08-26 15:45:38,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=63984.0, ans=0.0 +2024-08-26 15:45:42,246 INFO [train.py:1114] (0/4) Epoch 5, batch 2050, loss[loss=0.2261, simple_loss=0.275, pruned_loss=0.06343, ctc_loss=0.1256, over 19706.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3134, pruned_loss=0.07966, ctc_loss=0.149, over 3850977.62 frames. ], batch size: 47, lr: 2.66e-02, grad_scale: 32.0 +2024-08-26 15:45:54,616 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.354e+02 1.624e+02 1.773e+02 2.077e+02 3.322e+02, threshold=3.546e+02, percent-clipped=0.0 +2024-08-26 15:45:56,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=64090.666666666664, ans=0.125 +2024-08-26 15:46:10,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=64197.333333333336, ans=0.5 +2024-08-26 15:46:11,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=64197.333333333336, ans=0.1 +2024-08-26 15:46:26,371 INFO [train.py:1114] (0/4) Epoch 5, batch 2100, loss[loss=0.2664, simple_loss=0.3154, pruned_loss=0.07882, ctc_loss=0.1498, over 19789.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3123, pruned_loss=0.07878, ctc_loss=0.1471, over 3858618.66 frames. ], batch size: 54, lr: 2.65e-02, grad_scale: 32.0 +2024-08-26 15:46:37,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=64357.333333333336, ans=0.125 +2024-08-26 15:46:45,880 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.67 vs. limit=15.0 +2024-08-26 15:46:49,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64410.666666666664, ans=0.1 +2024-08-26 15:46:55,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=64464.0, ans=0.125 +2024-08-26 15:46:56,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=64464.0, ans=0.0 +2024-08-26 15:47:18,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64517.333333333336, ans=0.1 +2024-08-26 15:47:23,494 INFO [train.py:1114] (0/4) Epoch 5, batch 2150, loss[loss=0.2435, simple_loss=0.301, pruned_loss=0.06674, ctc_loss=0.1314, over 19597.00 frames. ], tot_loss[loss=0.2631, simple_loss=0.3114, pruned_loss=0.07824, ctc_loss=0.1461, over 3869218.50 frames. ], batch size: 52, lr: 2.65e-02, grad_scale: 32.0 +2024-08-26 15:47:32,173 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.81 vs. limit=15.0 +2024-08-26 15:47:35,827 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.599e+02 1.757e+02 2.074e+02 2.995e+02, threshold=3.513e+02, percent-clipped=0.0 +2024-08-26 15:47:46,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=64677.333333333336, ans=0.125 +2024-08-26 15:47:49,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64730.666666666664, ans=0.1 +2024-08-26 15:47:59,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=64784.0, ans=0.0 +2024-08-26 15:48:07,237 INFO [train.py:1114] (0/4) Epoch 5, batch 2200, loss[loss=0.2865, simple_loss=0.3308, pruned_loss=0.08808, ctc_loss=0.165, over 19592.00 frames. ], tot_loss[loss=0.2636, simple_loss=0.3117, pruned_loss=0.07845, ctc_loss=0.1465, over 3867821.49 frames. ], batch size: 57, lr: 2.65e-02, grad_scale: 32.0 +2024-08-26 15:48:12,050 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.93 vs. limit=15.0 +2024-08-26 15:48:21,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=64890.666666666664, ans=0.125 +2024-08-26 15:48:26,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=64944.0, ans=0.1 +2024-08-26 15:48:37,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=64997.333333333336, ans=0.1 +2024-08-26 15:48:37,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=64997.333333333336, ans=0.07 +2024-08-26 15:48:52,521 INFO [train.py:1114] (0/4) Epoch 5, batch 2250, loss[loss=0.2658, simple_loss=0.3158, pruned_loss=0.07672, ctc_loss=0.1558, over 19606.00 frames. ], tot_loss[loss=0.2637, simple_loss=0.3118, pruned_loss=0.07842, ctc_loss=0.1466, over 3867541.43 frames. ], batch size: 55, lr: 2.64e-02, grad_scale: 16.0 +2024-08-26 15:48:58,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=65104.0, ans=0.0 +2024-08-26 15:49:01,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=65157.333333333336, ans=0.035 +2024-08-26 15:49:02,637 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.01 vs. limit=10.0 +2024-08-26 15:49:03,661 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.50 vs. limit=22.5 +2024-08-26 15:49:05,750 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.368e+02 1.721e+02 2.056e+02 2.448e+02 6.138e+02, threshold=4.112e+02, percent-clipped=3.0 +2024-08-26 15:49:08,694 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.32 vs. limit=22.5 +2024-08-26 15:49:09,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=65210.666666666664, ans=0.0 +2024-08-26 15:49:15,696 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.85 vs. limit=15.0 +2024-08-26 15:49:25,732 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.66 vs. limit=8.0 +2024-08-26 15:49:28,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=65317.333333333336, ans=0.0 +2024-08-26 15:49:34,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=65317.333333333336, ans=0.2 +2024-08-26 15:49:36,468 INFO [train.py:1114] (0/4) Epoch 5, batch 2300, loss[loss=0.2419, simple_loss=0.2974, pruned_loss=0.06802, ctc_loss=0.1258, over 19504.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3106, pruned_loss=0.0784, ctc_loss=0.1463, over 3861826.33 frames. ], batch size: 49, lr: 2.64e-02, grad_scale: 16.0 +2024-08-26 15:49:57,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=65477.333333333336, ans=0.125 +2024-08-26 15:50:01,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=65477.333333333336, ans=0.2 +2024-08-26 15:50:07,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=65530.666666666664, ans=0.1 +2024-08-26 15:50:10,256 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.71 vs. limit=15.0 +2024-08-26 15:50:23,059 INFO [train.py:1114] (0/4) Epoch 5, batch 2350, loss[loss=0.3045, simple_loss=0.3399, pruned_loss=0.09893, ctc_loss=0.1782, over 19684.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.311, pruned_loss=0.07859, ctc_loss=0.1465, over 3864672.00 frames. ], batch size: 63, lr: 2.63e-02, grad_scale: 16.0 +2024-08-26 15:50:23,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=65637.33333333333, ans=0.1 +2024-08-26 15:50:32,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=65690.66666666667, ans=0.125 +2024-08-26 15:50:36,069 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.297e+02 1.568e+02 1.781e+02 2.033e+02 3.218e+02, threshold=3.561e+02, percent-clipped=0.0 +2024-08-26 15:50:42,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=65744.0, ans=0.1 +2024-08-26 15:50:44,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=65744.0, ans=0.09899494936611666 +2024-08-26 15:50:49,827 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.39 vs. limit=22.5 +2024-08-26 15:50:57,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=65850.66666666667, ans=0.0 +2024-08-26 15:51:02,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=65850.66666666667, ans=0.125 +2024-08-26 15:51:07,079 INFO [train.py:1114] (0/4) Epoch 5, batch 2400, loss[loss=0.274, simple_loss=0.3329, pruned_loss=0.07686, ctc_loss=0.1536, over 19429.00 frames. ], tot_loss[loss=0.2657, simple_loss=0.3135, pruned_loss=0.07928, ctc_loss=0.1481, over 3858442.69 frames. ], batch size: 67, lr: 2.63e-02, grad_scale: 32.0 +2024-08-26 15:51:36,885 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.73 vs. limit=15.0 +2024-08-26 15:51:48,164 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.85 vs. limit=12.0 +2024-08-26 15:51:52,367 INFO [train.py:1114] (0/4) Epoch 5, batch 2450, loss[loss=0.3699, simple_loss=0.3717, pruned_loss=0.135, ctc_loss=0.2452, over 13486.00 frames. ], tot_loss[loss=0.2742, simple_loss=0.3186, pruned_loss=0.0837, ctc_loss=0.1561, over 3734260.93 frames. ], batch size: 141, lr: 2.63e-02, grad_scale: 16.0 +2024-08-26 15:51:54,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=66170.66666666667, ans=0.125 +2024-08-26 15:52:03,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=66224.0, ans=0.2 +2024-08-26 15:52:07,306 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.477e+02 1.716e+02 1.912e+02 2.213e+02 5.978e+02, threshold=3.825e+02, percent-clipped=3.0 +2024-08-26 15:52:07,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=66224.0, ans=0.125 +2024-08-26 15:52:18,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=66277.33333333333, ans=0.125 +2024-08-26 15:52:27,976 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-5.pt +2024-08-26 15:53:42,790 INFO [train.py:1114] (0/4) Epoch 6, batch 0, loss[loss=0.2388, simple_loss=0.2865, pruned_loss=0.07046, ctc_loss=0.1255, over 19827.00 frames. ], tot_loss[loss=0.2388, simple_loss=0.2865, pruned_loss=0.07046, ctc_loss=0.1255, over 19827.00 frames. ], batch size: 49, lr: 2.45e-02, grad_scale: 32.0 +2024-08-26 15:53:42,791 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-26 15:54:26,135 INFO [train.py:1146] (0/4) Epoch 6, validation: loss=0.2162, simple_loss=0.3022, pruned_loss=0.04785, ctc_loss=0.08613, over 944034.00 frames. +2024-08-26 15:54:26,136 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 12795MB +2024-08-26 15:54:44,004 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.08 vs. limit=10.0 +2024-08-26 15:54:45,902 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.05 vs. limit=22.5 +2024-08-26 15:54:50,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=66485.33333333333, ans=0.125 +2024-08-26 15:54:55,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66538.66666666667, ans=0.1 +2024-08-26 15:55:02,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=66538.66666666667, ans=0.125 +2024-08-26 15:55:03,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=66592.0, ans=0.125 +2024-08-26 15:55:04,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=66592.0, ans=0.125 +2024-08-26 15:55:05,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=66592.0, ans=0.125 +2024-08-26 15:55:07,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=66592.0, ans=0.0 +2024-08-26 15:55:13,592 INFO [train.py:1114] (0/4) Epoch 6, batch 50, loss[loss=0.2227, simple_loss=0.2729, pruned_loss=0.06215, ctc_loss=0.1204, over 19728.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3141, pruned_loss=0.0804, ctc_loss=0.152, over 844928.81 frames. ], batch size: 47, lr: 2.44e-02, grad_scale: 32.0 +2024-08-26 15:55:13,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=66645.33333333333, ans=0.125 +2024-08-26 15:55:19,338 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:55:19,720 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.43 vs. limit=22.5 +2024-08-26 15:55:29,317 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.68 vs. limit=6.0 +2024-08-26 15:55:29,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=66698.66666666667, ans=0.0 +2024-08-26 15:55:29,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=66698.66666666667, ans=0.0 +2024-08-26 15:55:37,718 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.54 vs. limit=15.0 +2024-08-26 15:55:39,180 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.607e+02 1.759e+02 1.997e+02 3.496e+02, threshold=3.518e+02, percent-clipped=0.0 +2024-08-26 15:55:41,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=66805.33333333333, ans=0.2 +2024-08-26 15:55:47,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=66805.33333333333, ans=0.125 +2024-08-26 15:55:50,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=66858.66666666667, ans=0.125 +2024-08-26 15:56:02,669 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.99 vs. limit=22.5 +2024-08-26 15:56:03,171 INFO [train.py:1114] (0/4) Epoch 6, batch 100, loss[loss=0.2226, simple_loss=0.2851, pruned_loss=0.0588, ctc_loss=0.1062, over 19711.00 frames. ], tot_loss[loss=0.2664, simple_loss=0.3152, pruned_loss=0.07909, ctc_loss=0.1487, over 1499585.12 frames. ], batch size: 51, lr: 2.44e-02, grad_scale: 32.0 +2024-08-26 15:56:14,050 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.99 vs. limit=15.0 +2024-08-26 15:56:41,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=67072.0, ans=0.125 +2024-08-26 15:56:43,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=67072.0, ans=0.2 +2024-08-26 15:56:53,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67125.33333333333, ans=0.1 +2024-08-26 15:56:57,279 INFO [train.py:1114] (0/4) Epoch 6, batch 150, loss[loss=0.2303, simple_loss=0.2806, pruned_loss=0.06622, ctc_loss=0.1189, over 19714.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3104, pruned_loss=0.0769, ctc_loss=0.1441, over 2027782.93 frames. ], batch size: 47, lr: 2.44e-02, grad_scale: 32.0 +2024-08-26 15:57:19,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=67285.33333333333, ans=0.125 +2024-08-26 15:57:22,721 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.333e+02 1.584e+02 1.709e+02 1.986e+02 2.973e+02, threshold=3.418e+02, percent-clipped=0.0 +2024-08-26 15:57:28,645 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:57:36,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=67392.0, ans=0.125 +2024-08-26 15:57:40,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=67392.0, ans=0.125 +2024-08-26 15:57:44,428 INFO [train.py:1114] (0/4) Epoch 6, batch 200, loss[loss=0.2702, simple_loss=0.3151, pruned_loss=0.0828, ctc_loss=0.1494, over 18434.00 frames. ], tot_loss[loss=0.2582, simple_loss=0.3082, pruned_loss=0.07579, ctc_loss=0.1417, over 2435777.26 frames. ], batch size: 85, lr: 2.43e-02, grad_scale: 32.0 +2024-08-26 15:57:47,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=67445.33333333333, ans=0.05 +2024-08-26 15:57:50,873 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.97 vs. limit=10.0 +2024-08-26 15:58:03,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=67552.0, ans=0.0 +2024-08-26 15:58:04,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=67552.0, ans=0.2 +2024-08-26 15:58:10,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=67552.0, ans=0.05 +2024-08-26 15:58:11,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=67552.0, ans=0.0 +2024-08-26 15:58:16,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=67605.33333333333, ans=0.2 +2024-08-26 15:58:19,226 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.98 vs. limit=10.0 +2024-08-26 15:58:26,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=67658.66666666667, ans=0.125 +2024-08-26 15:58:27,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=67658.66666666667, ans=0.125 +2024-08-26 15:58:36,038 INFO [train.py:1114] (0/4) Epoch 6, batch 250, loss[loss=0.2651, simple_loss=0.3197, pruned_loss=0.07616, ctc_loss=0.1452, over 19444.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3083, pruned_loss=0.07518, ctc_loss=0.1408, over 2756020.36 frames. ], batch size: 67, lr: 2.43e-02, grad_scale: 32.0 +2024-08-26 15:59:01,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=67765.33333333333, ans=0.0 +2024-08-26 15:59:06,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=67818.66666666667, ans=0.0 +2024-08-26 15:59:10,413 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.559e+02 1.703e+02 1.915e+02 3.590e+02, threshold=3.407e+02, percent-clipped=1.0 +2024-08-26 15:59:11,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=67818.66666666667, ans=0.5 +2024-08-26 15:59:16,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=67872.0, ans=0.125 +2024-08-26 15:59:29,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=67925.33333333333, ans=0.125 +2024-08-26 15:59:32,669 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:59:35,255 INFO [train.py:1114] (0/4) Epoch 6, batch 300, loss[loss=0.2952, simple_loss=0.3378, pruned_loss=0.09449, ctc_loss=0.1593, over 19503.00 frames. ], tot_loss[loss=0.2571, simple_loss=0.3077, pruned_loss=0.07514, ctc_loss=0.1405, over 3000636.50 frames. ], batch size: 61, lr: 2.43e-02, grad_scale: 32.0 +2024-08-26 15:59:35,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=67978.66666666667, ans=0.125 +2024-08-26 15:59:44,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=67978.66666666667, ans=0.125 +2024-08-26 16:00:10,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=68138.66666666667, ans=0.125 +2024-08-26 16:00:24,076 INFO [train.py:1114] (0/4) Epoch 6, batch 350, loss[loss=0.2413, simple_loss=0.2877, pruned_loss=0.07199, ctc_loss=0.1272, over 19759.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3082, pruned_loss=0.07515, ctc_loss=0.1404, over 3190927.67 frames. ], batch size: 48, lr: 2.42e-02, grad_scale: 32.0 +2024-08-26 16:00:24,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=68245.33333333333, ans=0.125 +2024-08-26 16:00:31,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=68245.33333333333, ans=0.1 +2024-08-26 16:00:49,640 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.625e+02 1.872e+02 2.224e+02 3.924e+02, threshold=3.744e+02, percent-clipped=2.0 +2024-08-26 16:00:58,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=68405.33333333333, ans=0.035 +2024-08-26 16:01:02,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=68458.66666666667, ans=0.025 +2024-08-26 16:01:04,828 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:01:10,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=68512.0, ans=0.2 +2024-08-26 16:01:11,319 INFO [train.py:1114] (0/4) Epoch 6, batch 400, loss[loss=0.2264, simple_loss=0.2868, pruned_loss=0.0603, ctc_loss=0.1134, over 19521.00 frames. ], tot_loss[loss=0.2557, simple_loss=0.307, pruned_loss=0.0744, ctc_loss=0.1391, over 3343391.92 frames. ], batch size: 54, lr: 2.42e-02, grad_scale: 32.0 +2024-08-26 16:01:20,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=68565.33333333333, ans=0.125 +2024-08-26 16:01:33,560 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.24 vs. limit=6.0 +2024-08-26 16:01:34,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=68618.66666666667, ans=0.5 +2024-08-26 16:01:36,571 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.41 vs. limit=22.5 +2024-08-26 16:01:56,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=68725.33333333333, ans=0.0 +2024-08-26 16:02:07,092 INFO [train.py:1114] (0/4) Epoch 6, batch 450, loss[loss=0.271, simple_loss=0.3205, pruned_loss=0.08128, ctc_loss=0.1475, over 19629.00 frames. ], tot_loss[loss=0.2561, simple_loss=0.3072, pruned_loss=0.07461, ctc_loss=0.1395, over 3451979.55 frames. ], batch size: 55, lr: 2.42e-02, grad_scale: 32.0 +2024-08-26 16:02:14,086 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.64 vs. limit=15.0 +2024-08-26 16:02:26,912 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.90 vs. limit=6.0 +2024-08-26 16:02:26,950 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.08 vs. limit=15.0 +2024-08-26 16:02:34,034 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.302e+02 1.611e+02 1.799e+02 2.140e+02 4.925e+02, threshold=3.597e+02, percent-clipped=1.0 +2024-08-26 16:02:37,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=68938.66666666667, ans=0.125 +2024-08-26 16:02:45,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=68992.0, ans=0.125 +2024-08-26 16:02:55,674 INFO [train.py:1114] (0/4) Epoch 6, batch 500, loss[loss=0.2859, simple_loss=0.3346, pruned_loss=0.08699, ctc_loss=0.1581, over 19635.00 frames. ], tot_loss[loss=0.2549, simple_loss=0.306, pruned_loss=0.07414, ctc_loss=0.1388, over 3547779.06 frames. ], batch size: 63, lr: 2.41e-02, grad_scale: 32.0 +2024-08-26 16:02:55,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=69045.33333333333, ans=0.0 +2024-08-26 16:03:27,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69205.33333333333, ans=0.1 +2024-08-26 16:03:32,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=69205.33333333333, ans=0.125 +2024-08-26 16:03:33,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=69258.66666666667, ans=0.2 +2024-08-26 16:03:35,015 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.10 vs. limit=6.0 +2024-08-26 16:03:43,059 INFO [train.py:1114] (0/4) Epoch 6, batch 550, loss[loss=0.2901, simple_loss=0.3301, pruned_loss=0.09161, ctc_loss=0.1673, over 19282.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3065, pruned_loss=0.07446, ctc_loss=0.1394, over 3609600.28 frames. ], batch size: 71, lr: 2.41e-02, grad_scale: 32.0 +2024-08-26 16:03:54,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=69365.33333333333, ans=0.125 +2024-08-26 16:03:57,083 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.49 vs. limit=15.0 +2024-08-26 16:04:08,890 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.633e+02 1.875e+02 2.080e+02 6.681e+02, threshold=3.749e+02, percent-clipped=3.0 +2024-08-26 16:04:09,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69418.66666666667, ans=0.1 +2024-08-26 16:04:13,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=69472.0, ans=0.125 +2024-08-26 16:04:19,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=69472.0, ans=0.125 +2024-08-26 16:04:30,180 INFO [train.py:1114] (0/4) Epoch 6, batch 600, loss[loss=0.275, simple_loss=0.3291, pruned_loss=0.0805, ctc_loss=0.1498, over 19348.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3063, pruned_loss=0.07406, ctc_loss=0.1387, over 3666363.11 frames. ], batch size: 67, lr: 2.41e-02, grad_scale: 32.0 +2024-08-26 16:04:37,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=69578.66666666667, ans=0.07 +2024-08-26 16:04:38,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=69578.66666666667, ans=0.0 +2024-08-26 16:04:39,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=69578.66666666667, ans=0.125 +2024-08-26 16:04:49,283 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.46 vs. limit=22.5 +2024-08-26 16:04:54,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69632.0, ans=0.1 +2024-08-26 16:05:05,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69738.66666666667, ans=0.1 +2024-08-26 16:05:07,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=69738.66666666667, ans=0.0 +2024-08-26 16:05:07,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=69738.66666666667, ans=0.0 +2024-08-26 16:05:12,626 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:05:25,783 INFO [train.py:1114] (0/4) Epoch 6, batch 650, loss[loss=0.2289, simple_loss=0.2947, pruned_loss=0.05904, ctc_loss=0.1127, over 19766.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3058, pruned_loss=0.07363, ctc_loss=0.1379, over 3716488.73 frames. ], batch size: 54, lr: 2.40e-02, grad_scale: 32.0 +2024-08-26 16:05:53,416 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.596e+02 1.734e+02 1.974e+02 3.978e+02, threshold=3.467e+02, percent-clipped=1.0 +2024-08-26 16:05:57,830 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.89 vs. limit=22.5 +2024-08-26 16:06:00,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=70005.33333333333, ans=0.0 +2024-08-26 16:06:07,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=70058.66666666667, ans=0.025 +2024-08-26 16:06:15,361 INFO [train.py:1114] (0/4) Epoch 6, batch 700, loss[loss=0.2117, simple_loss=0.276, pruned_loss=0.05412, ctc_loss=0.09767, over 19724.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3065, pruned_loss=0.07407, ctc_loss=0.1383, over 3748518.01 frames. ], batch size: 51, lr: 2.40e-02, grad_scale: 16.0 +2024-08-26 16:06:16,691 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.42 vs. limit=15.0 +2024-08-26 16:06:27,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=70165.33333333333, ans=0.2 +2024-08-26 16:06:40,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=70218.66666666667, ans=0.0 +2024-08-26 16:07:02,317 INFO [train.py:1114] (0/4) Epoch 6, batch 750, loss[loss=0.2504, simple_loss=0.3116, pruned_loss=0.06734, ctc_loss=0.1362, over 19502.00 frames. ], tot_loss[loss=0.2542, simple_loss=0.3059, pruned_loss=0.07372, ctc_loss=0.1377, over 3774484.37 frames. ], batch size: 54, lr: 2.40e-02, grad_scale: 16.0 +2024-08-26 16:07:13,311 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.02 vs. limit=15.0 +2024-08-26 16:07:27,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70485.33333333333, ans=0.1 +2024-08-26 16:07:33,082 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.662e+02 1.845e+02 2.236e+02 2.956e+02, threshold=3.689e+02, percent-clipped=0.0 +2024-08-26 16:07:43,803 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.71 vs. limit=15.0 +2024-08-26 16:08:24,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=70592.0, ans=0.125 +2024-08-26 16:08:25,917 INFO [train.py:1114] (0/4) Epoch 6, batch 800, loss[loss=0.205, simple_loss=0.2682, pruned_loss=0.0506, ctc_loss=0.1016, over 19810.00 frames. ], tot_loss[loss=0.254, simple_loss=0.3055, pruned_loss=0.07371, ctc_loss=0.1377, over 3794875.81 frames. ], batch size: 49, lr: 2.39e-02, grad_scale: 32.0 +2024-08-26 16:08:26,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=70645.33333333333, ans=0.2 +2024-08-26 16:08:41,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=70698.66666666667, ans=0.125 +2024-08-26 16:09:29,162 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.61 vs. limit=6.0 +2024-08-26 16:09:32,223 INFO [train.py:1114] (0/4) Epoch 6, batch 850, loss[loss=0.2563, simple_loss=0.3154, pruned_loss=0.07148, ctc_loss=0.1355, over 19681.00 frames. ], tot_loss[loss=0.254, simple_loss=0.3053, pruned_loss=0.07381, ctc_loss=0.1378, over 3813603.32 frames. ], batch size: 59, lr: 2.39e-02, grad_scale: 32.0 +2024-08-26 16:09:34,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=70912.0, ans=0.125 +2024-08-26 16:09:48,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=70965.33333333333, ans=0.125 +2024-08-26 16:09:58,792 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.315e+02 1.558e+02 1.696e+02 1.888e+02 5.151e+02, threshold=3.391e+02, percent-clipped=1.0 +2024-08-26 16:10:04,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=71072.0, ans=0.09899494936611666 +2024-08-26 16:10:22,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=71072.0, ans=0.125 +2024-08-26 16:10:24,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=71072.0, ans=0.025 +2024-08-26 16:10:29,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=71125.33333333333, ans=0.0 +2024-08-26 16:10:35,829 INFO [train.py:1114] (0/4) Epoch 6, batch 900, loss[loss=0.2265, simple_loss=0.279, pruned_loss=0.06327, ctc_loss=0.1188, over 19439.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.3056, pruned_loss=0.07421, ctc_loss=0.1384, over 3817682.59 frames. ], batch size: 48, lr: 2.39e-02, grad_scale: 32.0 +2024-08-26 16:10:47,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=71232.0, ans=0.125 +2024-08-26 16:10:51,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=71232.0, ans=0.125 +2024-08-26 16:10:55,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=71285.33333333333, ans=10.0 +2024-08-26 16:11:05,785 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.11 vs. limit=22.5 +2024-08-26 16:11:11,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=71338.66666666667, ans=0.125 +2024-08-26 16:11:14,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=71392.0, ans=0.0 +2024-08-26 16:11:16,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=71392.0, ans=0.125 +2024-08-26 16:11:23,813 INFO [train.py:1114] (0/4) Epoch 6, batch 950, loss[loss=0.2394, simple_loss=0.2944, pruned_loss=0.06637, ctc_loss=0.1295, over 19522.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.306, pruned_loss=0.07455, ctc_loss=0.1391, over 3819604.40 frames. ], batch size: 49, lr: 2.38e-02, grad_scale: 16.0 +2024-08-26 16:11:45,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=71498.66666666667, ans=0.125 +2024-08-26 16:11:47,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=71498.66666666667, ans=0.1 +2024-08-26 16:11:50,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=71552.0, ans=0.125 +2024-08-26 16:11:59,454 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.365e+02 1.602e+02 1.780e+02 2.099e+02 5.215e+02, threshold=3.559e+02, percent-clipped=4.0 +2024-08-26 16:12:03,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.whiten.whitening_limit, batch_count=71605.33333333333, ans=15.0 +2024-08-26 16:12:04,586 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:12:21,521 INFO [train.py:1114] (0/4) Epoch 6, batch 1000, loss[loss=0.2451, simple_loss=0.2975, pruned_loss=0.07056, ctc_loss=0.129, over 19845.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.3067, pruned_loss=0.07466, ctc_loss=0.1393, over 3815694.02 frames. ], batch size: 52, lr: 2.38e-02, grad_scale: 16.0 +2024-08-26 16:12:30,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=71765.33333333333, ans=0.125 +2024-08-26 16:12:31,906 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.95 vs. limit=15.0 +2024-08-26 16:12:59,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=71818.66666666667, ans=0.0 +2024-08-26 16:13:04,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=71872.0, ans=0.025 +2024-08-26 16:13:11,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=71872.0, ans=0.09899494936611666 +2024-08-26 16:13:12,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=71925.33333333333, ans=0.1 +2024-08-26 16:13:16,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=71925.33333333333, ans=0.125 +2024-08-26 16:13:19,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=71925.33333333333, ans=0.125 +2024-08-26 16:13:20,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=71925.33333333333, ans=0.125 +2024-08-26 16:13:22,633 INFO [train.py:1114] (0/4) Epoch 6, batch 1050, loss[loss=0.2442, simple_loss=0.3038, pruned_loss=0.06744, ctc_loss=0.1244, over 19845.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3059, pruned_loss=0.0743, ctc_loss=0.1387, over 3821802.79 frames. ], batch size: 57, lr: 2.37e-02, grad_scale: 16.0 +2024-08-26 16:13:29,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71978.66666666667, ans=0.1 +2024-08-26 16:13:50,116 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.278e+02 1.587e+02 1.763e+02 2.081e+02 5.001e+02, threshold=3.526e+02, percent-clipped=1.0 +2024-08-26 16:13:55,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=72138.66666666667, ans=0.0 +2024-08-26 16:14:09,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=72192.0, ans=0.125 +2024-08-26 16:14:10,566 INFO [train.py:1114] (0/4) Epoch 6, batch 1100, loss[loss=0.2333, simple_loss=0.2998, pruned_loss=0.06101, ctc_loss=0.1122, over 19586.00 frames. ], tot_loss[loss=0.254, simple_loss=0.3055, pruned_loss=0.07376, ctc_loss=0.1377, over 3829564.03 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 16.0 +2024-08-26 16:14:11,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=72245.33333333333, ans=0.2 +2024-08-26 16:14:27,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=72298.66666666667, ans=0.125 +2024-08-26 16:14:36,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=72352.0, ans=0.125 +2024-08-26 16:14:42,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72352.0, ans=0.1 +2024-08-26 16:14:50,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=72405.33333333333, ans=0.0 +2024-08-26 16:15:25,945 INFO [train.py:1114] (0/4) Epoch 6, batch 1150, loss[loss=0.2321, simple_loss=0.2884, pruned_loss=0.06255, ctc_loss=0.1269, over 19581.00 frames. ], tot_loss[loss=0.2535, simple_loss=0.3049, pruned_loss=0.07353, ctc_loss=0.1373, over 3828050.86 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 16.0 +2024-08-26 16:15:52,302 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.06 vs. limit=15.0 +2024-08-26 16:15:55,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=72565.33333333333, ans=0.07 +2024-08-26 16:15:59,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=72565.33333333333, ans=0.025 +2024-08-26 16:16:51,594 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.628e+02 1.822e+02 2.077e+02 5.117e+02, threshold=3.645e+02, percent-clipped=2.0 +2024-08-26 16:16:53,089 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.58 vs. limit=6.0 +2024-08-26 16:17:05,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=72725.33333333333, ans=0.07 +2024-08-26 16:17:15,527 INFO [train.py:1114] (0/4) Epoch 6, batch 1200, loss[loss=0.2879, simple_loss=0.3349, pruned_loss=0.08792, ctc_loss=0.1627, over 19845.00 frames. ], tot_loss[loss=0.2549, simple_loss=0.3062, pruned_loss=0.07413, ctc_loss=0.1384, over 3823637.56 frames. ], batch size: 57, lr: 2.36e-02, grad_scale: 32.0 +2024-08-26 16:17:17,891 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.37 vs. limit=15.0 +2024-08-26 16:17:26,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=72832.0, ans=0.125 +2024-08-26 16:17:30,519 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.92 vs. limit=15.0 +2024-08-26 16:17:31,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=72832.0, ans=0.2 +2024-08-26 16:17:32,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=72832.0, ans=0.2 +2024-08-26 16:17:37,152 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.58 vs. limit=22.5 +2024-08-26 16:17:38,037 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.66 vs. limit=10.0 +2024-08-26 16:17:40,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=72885.33333333333, ans=0.125 +2024-08-26 16:17:45,477 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.64 vs. limit=15.0 +2024-08-26 16:17:48,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72938.66666666667, ans=0.1 +2024-08-26 16:18:04,588 INFO [train.py:1114] (0/4) Epoch 6, batch 1250, loss[loss=0.2821, simple_loss=0.3216, pruned_loss=0.09039, ctc_loss=0.1545, over 19526.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3062, pruned_loss=0.07377, ctc_loss=0.1377, over 3842300.87 frames. ], batch size: 61, lr: 2.36e-02, grad_scale: 32.0 +2024-08-26 16:18:15,716 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.01 vs. limit=15.0 +2024-08-26 16:18:16,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=73098.66666666667, ans=0.125 +2024-08-26 16:18:17,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=73098.66666666667, ans=0.125 +2024-08-26 16:18:24,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=73152.0, ans=0.125 +2024-08-26 16:18:31,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=73152.0, ans=0.0 +2024-08-26 16:18:31,866 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.253e+02 1.534e+02 1.709e+02 2.004e+02 3.682e+02, threshold=3.418e+02, percent-clipped=1.0 +2024-08-26 16:18:56,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=73258.66666666667, ans=0.125 +2024-08-26 16:18:59,507 INFO [train.py:1114] (0/4) Epoch 6, batch 1300, loss[loss=0.2961, simple_loss=0.3443, pruned_loss=0.08942, ctc_loss=0.1727, over 18835.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.3055, pruned_loss=0.07349, ctc_loss=0.1371, over 3846607.15 frames. ], batch size: 76, lr: 2.36e-02, grad_scale: 32.0 +2024-08-26 16:19:18,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=73312.0, ans=0.2 +2024-08-26 16:19:22,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=73365.33333333333, ans=0.125 +2024-08-26 16:19:53,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=73365.33333333333, ans=0.2 +2024-08-26 16:19:56,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=73365.33333333333, ans=15.0 +2024-08-26 16:20:00,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=73418.66666666667, ans=0.125 +2024-08-26 16:20:10,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=73472.0, ans=0.0 +2024-08-26 16:20:16,244 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:20:27,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=73525.33333333333, ans=0.125 +2024-08-26 16:20:32,211 INFO [train.py:1114] (0/4) Epoch 6, batch 1350, loss[loss=0.2353, simple_loss=0.3021, pruned_loss=0.06189, ctc_loss=0.112, over 19765.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3045, pruned_loss=0.07273, ctc_loss=0.1357, over 3857129.69 frames. ], batch size: 54, lr: 2.36e-02, grad_scale: 32.0 +2024-08-26 16:20:33,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=73578.66666666667, ans=0.0 +2024-08-26 16:20:38,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=73578.66666666667, ans=0.0 +2024-08-26 16:20:42,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=73632.0, ans=0.2 +2024-08-26 16:20:55,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=73685.33333333333, ans=0.0 +2024-08-26 16:20:57,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=73685.33333333333, ans=0.0 +2024-08-26 16:21:00,540 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.536e+02 1.657e+02 1.960e+02 3.055e+02, threshold=3.315e+02, percent-clipped=0.0 +2024-08-26 16:21:20,641 INFO [train.py:1114] (0/4) Epoch 6, batch 1400, loss[loss=0.2398, simple_loss=0.2859, pruned_loss=0.07056, ctc_loss=0.1314, over 19670.00 frames. ], tot_loss[loss=0.2525, simple_loss=0.3046, pruned_loss=0.07291, ctc_loss=0.1363, over 3864354.35 frames. ], batch size: 46, lr: 2.35e-02, grad_scale: 32.0 +2024-08-26 16:21:42,313 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-08-26 16:21:52,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=74005.33333333333, ans=0.125 +2024-08-26 16:21:57,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=74005.33333333333, ans=0.125 +2024-08-26 16:21:57,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=74005.33333333333, ans=0.2 +2024-08-26 16:22:07,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=74112.0, ans=0.0 +2024-08-26 16:22:08,768 INFO [train.py:1114] (0/4) Epoch 6, batch 1450, loss[loss=0.2909, simple_loss=0.3311, pruned_loss=0.09206, ctc_loss=0.1662, over 19641.00 frames. ], tot_loss[loss=0.2529, simple_loss=0.305, pruned_loss=0.07306, ctc_loss=0.1366, over 3863037.37 frames. ], batch size: 63, lr: 2.35e-02, grad_scale: 16.0 +2024-08-26 16:23:32,945 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.303e+02 1.612e+02 1.863e+02 2.093e+02 4.374e+02, threshold=3.727e+02, percent-clipped=2.0 +2024-08-26 16:23:40,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=74272.0, ans=0.0 +2024-08-26 16:23:47,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=74325.33333333333, ans=0.125 +2024-08-26 16:23:49,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=74325.33333333333, ans=0.04949747468305833 +2024-08-26 16:23:57,147 INFO [train.py:1114] (0/4) Epoch 6, batch 1500, loss[loss=0.2557, simple_loss=0.3073, pruned_loss=0.07504, ctc_loss=0.135, over 19578.00 frames. ], tot_loss[loss=0.2533, simple_loss=0.3055, pruned_loss=0.0732, ctc_loss=0.1367, over 3862765.27 frames. ], batch size: 57, lr: 2.35e-02, grad_scale: 16.0 +2024-08-26 16:23:59,717 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-08-26 16:24:31,956 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.32 vs. limit=12.0 +2024-08-26 16:24:38,929 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.06 vs. limit=15.0 +2024-08-26 16:24:40,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=74485.33333333333, ans=0.0 +2024-08-26 16:24:41,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74485.33333333333, ans=0.1 +2024-08-26 16:24:51,910 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.77 vs. limit=6.0 +2024-08-26 16:25:10,583 INFO [train.py:1114] (0/4) Epoch 6, batch 1550, loss[loss=0.2527, simple_loss=0.3079, pruned_loss=0.07176, ctc_loss=0.1351, over 19610.00 frames. ], tot_loss[loss=0.2535, simple_loss=0.3054, pruned_loss=0.07341, ctc_loss=0.1371, over 3846999.82 frames. ], batch size: 60, lr: 2.34e-02, grad_scale: 16.0 +2024-08-26 16:25:12,037 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.70 vs. limit=15.0 +2024-08-26 16:26:20,821 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.577e+02 1.696e+02 1.957e+02 2.811e+02, threshold=3.391e+02, percent-clipped=0.0 +2024-08-26 16:26:22,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=74805.33333333333, ans=0.0 +2024-08-26 16:26:30,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=74858.66666666667, ans=0.125 +2024-08-26 16:26:33,967 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:26:37,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=74858.66666666667, ans=0.125 +2024-08-26 16:26:38,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=74858.66666666667, ans=0.04949747468305833 +2024-08-26 16:26:40,312 INFO [train.py:1114] (0/4) Epoch 6, batch 1600, loss[loss=0.2651, simple_loss=0.3194, pruned_loss=0.07668, ctc_loss=0.1437, over 19841.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3057, pruned_loss=0.07376, ctc_loss=0.1376, over 3836462.91 frames. ], batch size: 57, lr: 2.34e-02, grad_scale: 32.0 +2024-08-26 16:27:12,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=75018.66666666667, ans=0.125 +2024-08-26 16:27:22,897 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.11 vs. limit=6.0 +2024-08-26 16:27:35,213 INFO [train.py:1114] (0/4) Epoch 6, batch 1650, loss[loss=0.2634, simple_loss=0.3218, pruned_loss=0.07341, ctc_loss=0.1454, over 19643.00 frames. ], tot_loss[loss=0.2538, simple_loss=0.3055, pruned_loss=0.07354, ctc_loss=0.1374, over 3832786.98 frames. ], batch size: 59, lr: 2.34e-02, grad_scale: 32.0 +2024-08-26 16:27:35,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=75178.66666666667, ans=0.125 +2024-08-26 16:27:58,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=75285.33333333333, ans=10.0 +2024-08-26 16:28:43,074 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.279e+02 1.584e+02 1.799e+02 2.082e+02 3.549e+02, threshold=3.597e+02, percent-clipped=1.0 +2024-08-26 16:29:32,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=75392.0, ans=0.1 +2024-08-26 16:29:36,192 INFO [train.py:1114] (0/4) Epoch 6, batch 1700, loss[loss=0.2135, simple_loss=0.2664, pruned_loss=0.05972, ctc_loss=0.1029, over 19668.00 frames. ], tot_loss[loss=0.2522, simple_loss=0.3046, pruned_loss=0.07274, ctc_loss=0.136, over 3846878.92 frames. ], batch size: 46, lr: 2.33e-02, grad_scale: 32.0 +2024-08-26 16:29:38,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=75445.33333333333, ans=0.125 +2024-08-26 16:29:55,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75552.0, ans=0.1 +2024-08-26 16:29:58,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=75552.0, ans=0.0 +2024-08-26 16:30:00,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=75552.0, ans=0.0 +2024-08-26 16:30:01,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=75552.0, ans=0.125 +2024-08-26 16:30:07,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=75605.33333333333, ans=0.5 +2024-08-26 16:30:20,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=75658.66666666667, ans=0.025 +2024-08-26 16:30:22,748 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.41 vs. limit=10.0 +2024-08-26 16:30:24,076 INFO [train.py:1114] (0/4) Epoch 6, batch 1750, loss[loss=0.219, simple_loss=0.2704, pruned_loss=0.06066, ctc_loss=0.1154, over 19645.00 frames. ], tot_loss[loss=0.2515, simple_loss=0.304, pruned_loss=0.07244, ctc_loss=0.1354, over 3852018.78 frames. ], batch size: 45, lr: 2.33e-02, grad_scale: 32.0 +2024-08-26 16:31:04,531 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.524e+02 1.697e+02 1.959e+02 3.052e+02, threshold=3.394e+02, percent-clipped=0.0 +2024-08-26 16:31:11,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=75872.0, ans=0.2 +2024-08-26 16:31:16,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=75925.33333333333, ans=0.125 +2024-08-26 16:31:25,741 INFO [train.py:1114] (0/4) Epoch 6, batch 1800, loss[loss=0.2595, simple_loss=0.3175, pruned_loss=0.07314, ctc_loss=0.1382, over 19623.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3043, pruned_loss=0.07264, ctc_loss=0.136, over 3853824.76 frames. ], batch size: 55, lr: 2.33e-02, grad_scale: 32.0 +2024-08-26 16:32:52,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=76192.0, ans=0.025 +2024-08-26 16:33:01,929 INFO [train.py:1114] (0/4) Epoch 6, batch 1850, loss[loss=0.2769, simple_loss=0.3287, pruned_loss=0.08189, ctc_loss=0.1531, over 19579.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3039, pruned_loss=0.07233, ctc_loss=0.1354, over 3858141.23 frames. ], batch size: 57, lr: 2.32e-02, grad_scale: 32.0 +2024-08-26 16:33:26,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=76245.33333333333, ans=0.2 +2024-08-26 16:33:57,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=76352.0, ans=0.125 +2024-08-26 16:34:05,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=76405.33333333333, ans=0.0 +2024-08-26 16:34:05,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=76405.33333333333, ans=0.0 +2024-08-26 16:34:05,740 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.545e+02 1.701e+02 1.893e+02 2.907e+02, threshold=3.402e+02, percent-clipped=0.0 +2024-08-26 16:34:23,395 INFO [train.py:1114] (0/4) Epoch 6, batch 1900, loss[loss=0.2541, simple_loss=0.3187, pruned_loss=0.06997, ctc_loss=0.124, over 19656.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3044, pruned_loss=0.07223, ctc_loss=0.135, over 3863486.84 frames. ], batch size: 59, lr: 2.32e-02, grad_scale: 32.0 +2024-08-26 16:34:29,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=76512.0, ans=0.0 +2024-08-26 16:34:49,280 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.84 vs. limit=15.0 +2024-08-26 16:35:11,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76672.0, ans=0.1 +2024-08-26 16:35:13,197 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.29 vs. limit=15.0 +2024-08-26 16:35:18,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=76725.33333333333, ans=0.2 +2024-08-26 16:35:25,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=76725.33333333333, ans=0.2 +2024-08-26 16:35:26,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=76725.33333333333, ans=0.025 +2024-08-26 16:35:26,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=76725.33333333333, ans=0.2 +2024-08-26 16:35:27,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=76778.66666666667, ans=0.125 +2024-08-26 16:35:27,762 INFO [train.py:1114] (0/4) Epoch 6, batch 1950, loss[loss=0.2267, simple_loss=0.2906, pruned_loss=0.05816, ctc_loss=0.1162, over 19571.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.3058, pruned_loss=0.07262, ctc_loss=0.1356, over 3871849.41 frames. ], batch size: 52, lr: 2.32e-02, grad_scale: 32.0 +2024-08-26 16:36:23,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=76885.33333333333, ans=0.125 +2024-08-26 16:36:32,182 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.394e+02 1.646e+02 1.808e+02 2.059e+02 4.885e+02, threshold=3.617e+02, percent-clipped=2.0 +2024-08-26 16:36:40,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=76938.66666666667, ans=0.125 +2024-08-26 16:36:53,611 INFO [train.py:1114] (0/4) Epoch 6, batch 2000, loss[loss=0.2277, simple_loss=0.2787, pruned_loss=0.06344, ctc_loss=0.1244, over 19691.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.307, pruned_loss=0.07351, ctc_loss=0.1373, over 3855417.73 frames. ], batch size: 45, lr: 2.31e-02, grad_scale: 32.0 +2024-08-26 16:37:07,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=77098.66666666667, ans=0.0 +2024-08-26 16:37:16,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=77152.0, ans=0.0 +2024-08-26 16:37:22,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=77205.33333333333, ans=0.125 +2024-08-26 16:37:24,481 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.65 vs. limit=12.0 +2024-08-26 16:37:32,363 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.43 vs. limit=10.0 +2024-08-26 16:37:38,199 INFO [train.py:1114] (0/4) Epoch 6, batch 2050, loss[loss=0.1985, simple_loss=0.2597, pruned_loss=0.04934, ctc_loss=0.09688, over 19725.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.3051, pruned_loss=0.07288, ctc_loss=0.1361, over 3852827.87 frames. ], batch size: 47, lr: 2.31e-02, grad_scale: 32.0 +2024-08-26 16:37:38,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=77312.0, ans=0.0 +2024-08-26 16:37:38,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=77312.0, ans=0.125 +2024-08-26 16:37:40,147 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:37:44,123 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.22 vs. limit=22.5 +2024-08-26 16:38:04,750 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.285e+02 1.566e+02 1.748e+02 2.075e+02 4.290e+02, threshold=3.497e+02, percent-clipped=1.0 +2024-08-26 16:38:10,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=77472.0, ans=0.025 +2024-08-26 16:38:12,113 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.89 vs. limit=12.0 +2024-08-26 16:38:34,161 INFO [train.py:1114] (0/4) Epoch 6, batch 2100, loss[loss=0.2441, simple_loss=0.3067, pruned_loss=0.06495, ctc_loss=0.129, over 19770.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3041, pruned_loss=0.07204, ctc_loss=0.1348, over 3859819.95 frames. ], batch size: 54, lr: 2.31e-02, grad_scale: 32.0 +2024-08-26 16:38:35,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=77578.66666666667, ans=0.2 +2024-08-26 16:39:07,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=77578.66666666667, ans=0.125 +2024-08-26 16:39:10,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=77578.66666666667, ans=0.125 +2024-08-26 16:39:30,072 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.32 vs. limit=15.0 +2024-08-26 16:39:42,870 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.09 vs. limit=15.0 +2024-08-26 16:39:42,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=77792.0, ans=15.0 +2024-08-26 16:39:46,031 INFO [train.py:1114] (0/4) Epoch 6, batch 2150, loss[loss=0.241, simple_loss=0.2986, pruned_loss=0.06677, ctc_loss=0.1249, over 19586.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3036, pruned_loss=0.0718, ctc_loss=0.1342, over 3870116.27 frames. ], batch size: 52, lr: 2.30e-02, grad_scale: 32.0 +2024-08-26 16:39:52,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.89 vs. limit=15.0 +2024-08-26 16:39:55,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=77898.66666666667, ans=0.0 +2024-08-26 16:40:04,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=77952.0, ans=0.125 +2024-08-26 16:40:13,759 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.258e+02 1.590e+02 1.744e+02 2.019e+02 3.989e+02, threshold=3.489e+02, percent-clipped=1.0 +2024-08-26 16:40:14,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=78005.33333333333, ans=0.125 +2024-08-26 16:40:24,500 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:40:31,353 INFO [train.py:1114] (0/4) Epoch 6, batch 2200, loss[loss=0.2384, simple_loss=0.3033, pruned_loss=0.06232, ctc_loss=0.1223, over 19602.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3038, pruned_loss=0.07196, ctc_loss=0.1344, over 3868804.12 frames. ], batch size: 57, lr: 2.30e-02, grad_scale: 32.0 +2024-08-26 16:40:33,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78112.0, ans=0.1 +2024-08-26 16:40:51,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=78218.66666666667, ans=0.125 +2024-08-26 16:40:56,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=78218.66666666667, ans=0.025 +2024-08-26 16:40:56,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=78218.66666666667, ans=0.125 +2024-08-26 16:41:01,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=78272.0, ans=0.2 +2024-08-26 16:41:03,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=78272.0, ans=0.05 +2024-08-26 16:41:57,897 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=14.10 vs. limit=15.0 +2024-08-26 16:42:02,279 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.49 vs. limit=15.0 +2024-08-26 16:42:04,357 INFO [train.py:1114] (0/4) Epoch 6, batch 2250, loss[loss=0.2337, simple_loss=0.2954, pruned_loss=0.06267, ctc_loss=0.1166, over 19609.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3037, pruned_loss=0.07206, ctc_loss=0.1343, over 3868380.80 frames. ], batch size: 55, lr: 2.30e-02, grad_scale: 32.0 +2024-08-26 16:42:30,472 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.366e+02 1.631e+02 1.850e+02 2.118e+02 4.912e+02, threshold=3.701e+02, percent-clipped=4.0 +2024-08-26 16:42:57,024 INFO [train.py:1114] (0/4) Epoch 6, batch 2300, loss[loss=0.2239, simple_loss=0.2808, pruned_loss=0.06044, ctc_loss=0.1152, over 19514.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3028, pruned_loss=0.07183, ctc_loss=0.1339, over 3861729.87 frames. ], batch size: 49, lr: 2.29e-02, grad_scale: 32.0 +2024-08-26 16:43:17,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=78752.0, ans=0.125 +2024-08-26 16:43:25,629 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.52 vs. limit=22.5 +2024-08-26 16:43:28,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=78805.33333333333, ans=0.04949747468305833 +2024-08-26 16:43:36,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=78858.66666666667, ans=0.125 +2024-08-26 16:43:41,597 INFO [train.py:1114] (0/4) Epoch 6, batch 2350, loss[loss=0.2648, simple_loss=0.3144, pruned_loss=0.07872, ctc_loss=0.1445, over 19666.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3031, pruned_loss=0.0721, ctc_loss=0.1345, over 3864765.58 frames. ], batch size: 63, lr: 2.29e-02, grad_scale: 32.0 +2024-08-26 16:43:56,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78965.33333333333, ans=0.1 +2024-08-26 16:43:56,899 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.03 vs. limit=15.0 +2024-08-26 16:44:09,678 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.319e+02 1.571e+02 1.792e+02 2.053e+02 3.529e+02, threshold=3.585e+02, percent-clipped=0.0 +2024-08-26 16:44:10,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=79072.0, ans=0.0 +2024-08-26 16:44:27,088 INFO [train.py:1114] (0/4) Epoch 6, batch 2400, loss[loss=0.2553, simple_loss=0.31, pruned_loss=0.07228, ctc_loss=0.1401, over 19346.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.3049, pruned_loss=0.07285, ctc_loss=0.1357, over 3859314.25 frames. ], batch size: 67, lr: 2.29e-02, grad_scale: 32.0 +2024-08-26 16:44:42,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=79232.0, ans=0.2 +2024-08-26 16:44:44,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=79285.33333333333, ans=0.2 +2024-08-26 16:45:06,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=79392.0, ans=0.125 +2024-08-26 16:45:12,831 INFO [train.py:1114] (0/4) Epoch 6, batch 2450, loss[loss=0.3439, simple_loss=0.3484, pruned_loss=0.1258, ctc_loss=0.2196, over 13494.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3099, pruned_loss=0.07687, ctc_loss=0.1432, over 3735937.20 frames. ], batch size: 140, lr: 2.29e-02, grad_scale: 32.0 +2024-08-26 16:45:13,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=79445.33333333333, ans=15.0 +2024-08-26 16:45:17,839 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.37 vs. limit=15.0 +2024-08-26 16:45:22,416 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.95 vs. limit=15.0 +2024-08-26 16:45:23,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=79498.66666666667, ans=0.0 +2024-08-26 16:45:26,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=79498.66666666667, ans=0.125 +2024-08-26 16:45:26,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=79498.66666666667, ans=0.125 +2024-08-26 16:45:34,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=79552.0, ans=0.125 +2024-08-26 16:45:35,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=79552.0, ans=0.0 +2024-08-26 16:45:40,096 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.433e+02 1.744e+02 1.902e+02 2.066e+02 3.652e+02, threshold=3.804e+02, percent-clipped=1.0 +2024-08-26 16:45:41,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=79605.33333333333, ans=0.125 +2024-08-26 16:45:44,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=79605.33333333333, ans=0.125 +2024-08-26 16:45:48,561 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-6.pt +2024-08-26 16:48:16,413 INFO [train.py:1114] (0/4) Epoch 7, batch 0, loss[loss=0.2426, simple_loss=0.2927, pruned_loss=0.07011, ctc_loss=0.1307, over 19812.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.2927, pruned_loss=0.07011, ctc_loss=0.1307, over 19812.00 frames. ], batch size: 49, lr: 2.14e-02, grad_scale: 32.0 +2024-08-26 16:48:16,415 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-26 16:48:29,568 INFO [train.py:1146] (0/4) Epoch 7, validation: loss=0.2068, simple_loss=0.2958, pruned_loss=0.04327, ctc_loss=0.07811, over 944034.00 frames. +2024-08-26 16:48:29,570 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13201MB +2024-08-26 16:48:35,791 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.20 vs. limit=22.5 +2024-08-26 16:48:45,057 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.60 vs. limit=6.0 +2024-08-26 16:49:02,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=79818.66666666667, ans=0.0 +2024-08-26 16:49:08,969 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.54 vs. limit=15.0 +2024-08-26 16:49:19,279 INFO [train.py:1114] (0/4) Epoch 7, batch 50, loss[loss=0.213, simple_loss=0.2767, pruned_loss=0.05369, ctc_loss=0.1046, over 19723.00 frames. ], tot_loss[loss=0.2536, simple_loss=0.3061, pruned_loss=0.07314, ctc_loss=0.1374, over 844189.99 frames. ], batch size: 47, lr: 2.14e-02, grad_scale: 32.0 +2024-08-26 16:49:24,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=79925.33333333333, ans=0.125 +2024-08-26 16:49:30,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.69 vs. limit=12.0 +2024-08-26 16:49:42,031 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.54 vs. limit=10.0 +2024-08-26 16:49:50,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=80085.33333333333, ans=0.025 +2024-08-26 16:49:57,479 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.282e+02 1.584e+02 1.822e+02 2.089e+02 3.575e+02, threshold=3.645e+02, percent-clipped=0.0 +2024-08-26 16:50:07,020 INFO [train.py:1114] (0/4) Epoch 7, batch 100, loss[loss=0.2194, simple_loss=0.2846, pruned_loss=0.05637, ctc_loss=0.1035, over 19727.00 frames. ], tot_loss[loss=0.2527, simple_loss=0.3063, pruned_loss=0.07236, ctc_loss=0.136, over 1500395.15 frames. ], batch size: 51, lr: 2.13e-02, grad_scale: 32.0 +2024-08-26 16:50:11,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=80192.0, ans=0.2 +2024-08-26 16:50:34,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=80298.66666666667, ans=0.125 +2024-08-26 16:50:52,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=80405.33333333333, ans=0.1 +2024-08-26 16:50:58,409 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.04 vs. limit=22.5 +2024-08-26 16:51:01,442 INFO [train.py:1114] (0/4) Epoch 7, batch 150, loss[loss=0.2096, simple_loss=0.2623, pruned_loss=0.05717, ctc_loss=0.1065, over 19726.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3032, pruned_loss=0.07065, ctc_loss=0.1325, over 2029556.49 frames. ], batch size: 47, lr: 2.13e-02, grad_scale: 32.0 +2024-08-26 16:51:32,194 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.97 vs. limit=15.0 +2024-08-26 16:51:33,942 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=6.20 vs. limit=12.0 +2024-08-26 16:51:39,008 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.222e+02 1.525e+02 1.667e+02 1.863e+02 2.878e+02, threshold=3.334e+02, percent-clipped=0.0 +2024-08-26 16:51:41,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=80672.0, ans=0.0 +2024-08-26 16:51:46,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=80672.0, ans=0.125 +2024-08-26 16:51:47,146 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.37 vs. limit=22.5 +2024-08-26 16:51:48,551 INFO [train.py:1114] (0/4) Epoch 7, batch 200, loss[loss=0.2648, simple_loss=0.3214, pruned_loss=0.07601, ctc_loss=0.1404, over 18351.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3016, pruned_loss=0.07031, ctc_loss=0.1319, over 2436499.74 frames. ], batch size: 85, lr: 2.13e-02, grad_scale: 32.0 +2024-08-26 16:51:49,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=80725.33333333333, ans=0.125 +2024-08-26 16:51:49,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=80725.33333333333, ans=0.025 +2024-08-26 16:51:51,841 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.69 vs. limit=22.5 +2024-08-26 16:52:11,299 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.78 vs. limit=12.0 +2024-08-26 16:52:11,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=80832.0, ans=0.0 +2024-08-26 16:52:35,146 INFO [train.py:1114] (0/4) Epoch 7, batch 250, loss[loss=0.2396, simple_loss=0.3074, pruned_loss=0.06224, ctc_loss=0.1181, over 19400.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3003, pruned_loss=0.06921, ctc_loss=0.1297, over 2756000.58 frames. ], batch size: 67, lr: 2.13e-02, grad_scale: 32.0 +2024-08-26 16:52:41,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=80992.0, ans=0.125 +2024-08-26 16:52:50,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=81045.33333333333, ans=0.125 +2024-08-26 16:53:01,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=81098.66666666667, ans=0.125 +2024-08-26 16:53:03,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=81152.0, ans=0.0 +2024-08-26 16:53:05,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81152.0, ans=0.1 +2024-08-26 16:53:16,592 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.591e+02 1.729e+02 1.900e+02 5.825e+02, threshold=3.457e+02, percent-clipped=1.0 +2024-08-26 16:53:25,919 INFO [train.py:1114] (0/4) Epoch 7, batch 300, loss[loss=0.2698, simple_loss=0.3268, pruned_loss=0.07686, ctc_loss=0.1477, over 19538.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.2999, pruned_loss=0.0689, ctc_loss=0.1294, over 2999935.01 frames. ], batch size: 61, lr: 2.12e-02, grad_scale: 32.0 +2024-08-26 16:53:35,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=81258.66666666667, ans=0.1 +2024-08-26 16:54:06,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81418.66666666667, ans=0.1 +2024-08-26 16:54:10,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=81472.0, ans=0.0 +2024-08-26 16:54:18,317 INFO [train.py:1114] (0/4) Epoch 7, batch 350, loss[loss=0.2226, simple_loss=0.2715, pruned_loss=0.06341, ctc_loss=0.1172, over 19755.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.2998, pruned_loss=0.06892, ctc_loss=0.1292, over 3189143.82 frames. ], batch size: 48, lr: 2.12e-02, grad_scale: 16.0 +2024-08-26 16:54:20,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=81525.33333333333, ans=0.0 +2024-08-26 16:54:31,494 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:54:51,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=81685.33333333333, ans=0.0 +2024-08-26 16:54:56,446 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.574e+02 1.753e+02 2.022e+02 2.928e+02, threshold=3.506e+02, percent-clipped=0.0 +2024-08-26 16:54:59,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=81738.66666666667, ans=0.0 +2024-08-26 16:55:04,706 INFO [train.py:1114] (0/4) Epoch 7, batch 400, loss[loss=0.2486, simple_loss=0.3059, pruned_loss=0.06984, ctc_loss=0.1291, over 19482.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.2997, pruned_loss=0.06887, ctc_loss=0.1288, over 3340491.50 frames. ], batch size: 54, lr: 2.12e-02, grad_scale: 32.0 +2024-08-26 16:55:05,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=81792.0, ans=0.05 +2024-08-26 16:55:09,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81792.0, ans=0.1 +2024-08-26 16:55:17,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=81845.33333333333, ans=0.0 +2024-08-26 16:55:20,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81845.33333333333, ans=0.1 +2024-08-26 16:55:21,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=81845.33333333333, ans=0.025 +2024-08-26 16:55:36,198 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.74 vs. limit=22.5 +2024-08-26 16:55:43,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=82005.33333333333, ans=0.2 +2024-08-26 16:55:47,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=82005.33333333333, ans=0.0 +2024-08-26 16:55:49,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=82005.33333333333, ans=0.07 +2024-08-26 16:55:51,750 INFO [train.py:1114] (0/4) Epoch 7, batch 450, loss[loss=0.2448, simple_loss=0.3053, pruned_loss=0.06501, ctc_loss=0.1358, over 19601.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.2999, pruned_loss=0.069, ctc_loss=0.1291, over 3447901.07 frames. ], batch size: 55, lr: 2.11e-02, grad_scale: 16.0 +2024-08-26 16:56:11,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=82112.0, ans=0.2 +2024-08-26 16:56:11,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=82112.0, ans=0.0 +2024-08-26 16:56:34,680 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.47 vs. limit=15.0 +2024-08-26 16:56:41,733 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.250e+02 1.485e+02 1.753e+02 2.038e+02 3.855e+02, threshold=3.505e+02, percent-clipped=1.0 +2024-08-26 16:56:43,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82272.0, ans=0.1 +2024-08-26 16:56:44,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.72 vs. limit=15.0 +2024-08-26 16:56:48,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=82325.33333333333, ans=0.025 +2024-08-26 16:56:49,043 INFO [train.py:1114] (0/4) Epoch 7, batch 500, loss[loss=0.2535, simple_loss=0.3125, pruned_loss=0.07118, ctc_loss=0.1303, over 19655.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.2982, pruned_loss=0.06803, ctc_loss=0.1273, over 3543511.16 frames. ], batch size: 63, lr: 2.11e-02, grad_scale: 16.0 +2024-08-26 16:57:07,788 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:57:27,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=82538.66666666667, ans=6.0 +2024-08-26 16:57:29,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=82538.66666666667, ans=0.1 +2024-08-26 16:57:34,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=82538.66666666667, ans=0.2 +2024-08-26 16:57:35,769 INFO [train.py:1114] (0/4) Epoch 7, batch 550, loss[loss=0.2527, simple_loss=0.3097, pruned_loss=0.07025, ctc_loss=0.1378, over 19167.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.2987, pruned_loss=0.06835, ctc_loss=0.1278, over 3604774.38 frames. ], batch size: 71, lr: 2.11e-02, grad_scale: 16.0 +2024-08-26 16:57:48,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=82645.33333333333, ans=0.125 +2024-08-26 16:57:58,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=82698.66666666667, ans=0.0 +2024-08-26 16:58:00,788 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.21 vs. limit=15.0 +2024-08-26 16:58:16,879 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.530e+02 1.701e+02 1.927e+02 4.407e+02, threshold=3.402e+02, percent-clipped=1.0 +2024-08-26 16:58:30,198 INFO [train.py:1114] (0/4) Epoch 7, batch 600, loss[loss=0.2554, simple_loss=0.3162, pruned_loss=0.0718, ctc_loss=0.1276, over 19322.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.2993, pruned_loss=0.06838, ctc_loss=0.128, over 3663939.57 frames. ], batch size: 67, lr: 2.11e-02, grad_scale: 16.0 +2024-08-26 17:00:42,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82912.0, ans=0.1 +2024-08-26 17:04:51,937 INFO [train.py:1114] (0/4) Epoch 7, batch 650, loss[loss=0.2662, simple_loss=0.3208, pruned_loss=0.07728, ctc_loss=0.1429, over 19746.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.2982, pruned_loss=0.06783, ctc_loss=0.127, over 3714209.88 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 16.0 +2024-08-26 17:05:23,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=83232.0, ans=0.125 +2024-08-26 17:05:41,844 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.294e+02 1.502e+02 1.666e+02 1.880e+02 3.682e+02, threshold=3.331e+02, percent-clipped=2.0 +2024-08-26 17:06:19,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=83392.0, ans=0.125 +2024-08-26 17:06:20,351 INFO [train.py:1114] (0/4) Epoch 7, batch 700, loss[loss=0.2125, simple_loss=0.2748, pruned_loss=0.05463, ctc_loss=0.102, over 19715.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.2988, pruned_loss=0.0682, ctc_loss=0.1276, over 3746801.58 frames. ], batch size: 51, lr: 2.10e-02, grad_scale: 16.0 +2024-08-26 17:06:46,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=83498.66666666667, ans=0.2 +2024-08-26 17:07:06,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=83605.33333333333, ans=0.0 +2024-08-26 17:07:08,428 INFO [train.py:1114] (0/4) Epoch 7, batch 750, loss[loss=0.2147, simple_loss=0.2876, pruned_loss=0.05137, ctc_loss=0.09747, over 19505.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.2984, pruned_loss=0.06805, ctc_loss=0.1274, over 3773207.45 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 16.0 +2024-08-26 17:07:10,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=83658.66666666667, ans=0.2 +2024-08-26 17:07:20,132 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.35 vs. limit=10.0 +2024-08-26 17:07:41,293 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=6.0 +2024-08-26 17:07:48,231 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.281e+02 1.533e+02 1.678e+02 1.875e+02 3.166e+02, threshold=3.356e+02, percent-clipped=0.0 +2024-08-26 17:07:50,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=83872.0, ans=0.0 +2024-08-26 17:07:56,873 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.51 vs. limit=10.0 +2024-08-26 17:07:58,364 INFO [train.py:1114] (0/4) Epoch 7, batch 800, loss[loss=0.2274, simple_loss=0.2828, pruned_loss=0.06265, ctc_loss=0.1169, over 19801.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.2988, pruned_loss=0.06833, ctc_loss=0.1276, over 3794941.03 frames. ], batch size: 49, lr: 2.10e-02, grad_scale: 32.0 +2024-08-26 17:08:11,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=83978.66666666667, ans=0.125 +2024-08-26 17:08:13,556 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.65 vs. limit=22.5 +2024-08-26 17:08:14,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=83978.66666666667, ans=0.1 +2024-08-26 17:08:20,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=84032.0, ans=0.2 +2024-08-26 17:08:27,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=84032.0, ans=0.0 +2024-08-26 17:08:36,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=84085.33333333333, ans=0.0 +2024-08-26 17:08:36,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=84085.33333333333, ans=0.125 +2024-08-26 17:08:56,347 INFO [train.py:1114] (0/4) Epoch 7, batch 850, loss[loss=0.2589, simple_loss=0.3179, pruned_loss=0.0728, ctc_loss=0.1356, over 19655.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.2982, pruned_loss=0.06799, ctc_loss=0.1272, over 3814006.77 frames. ], batch size: 59, lr: 2.09e-02, grad_scale: 32.0 +2024-08-26 17:08:56,915 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.76 vs. limit=6.0 +2024-08-26 17:09:04,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=84192.0, ans=0.2 +2024-08-26 17:09:06,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=84245.33333333333, ans=0.1 +2024-08-26 17:09:19,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84298.66666666667, ans=0.1 +2024-08-26 17:09:33,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=84298.66666666667, ans=0.125 +2024-08-26 17:09:34,956 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.05 vs. limit=22.5 +2024-08-26 17:09:48,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84298.66666666667, ans=0.1 +2024-08-26 17:10:01,952 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.310e+02 1.545e+02 1.673e+02 1.909e+02 3.259e+02, threshold=3.346e+02, percent-clipped=0.0 +2024-08-26 17:10:09,591 INFO [train.py:1114] (0/4) Epoch 7, batch 900, loss[loss=0.2096, simple_loss=0.2687, pruned_loss=0.05443, ctc_loss=0.1044, over 19440.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.2987, pruned_loss=0.06855, ctc_loss=0.1279, over 3817543.60 frames. ], batch size: 48, lr: 2.09e-02, grad_scale: 32.0 +2024-08-26 17:10:11,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=84458.66666666667, ans=0.0 +2024-08-26 17:10:26,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=84512.0, ans=0.5 +2024-08-26 17:10:36,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=84565.33333333333, ans=0.05 +2024-08-26 17:10:58,453 INFO [train.py:1114] (0/4) Epoch 7, batch 950, loss[loss=0.2258, simple_loss=0.2831, pruned_loss=0.06146, ctc_loss=0.1139, over 19510.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.2989, pruned_loss=0.06864, ctc_loss=0.1282, over 3819914.51 frames. ], batch size: 49, lr: 2.09e-02, grad_scale: 32.0 +2024-08-26 17:11:03,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=84725.33333333333, ans=0.0 +2024-08-26 17:11:14,971 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.76 vs. limit=15.0 +2024-08-26 17:11:19,705 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.22 vs. limit=15.0 +2024-08-26 17:11:21,707 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.08 vs. limit=15.0 +2024-08-26 17:11:22,721 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.15 vs. limit=12.0 +2024-08-26 17:11:39,653 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:11:39,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=84885.33333333333, ans=0.0 +2024-08-26 17:11:48,307 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.332e+02 1.566e+02 1.708e+02 1.976e+02 3.572e+02, threshold=3.415e+02, percent-clipped=1.0 +2024-08-26 17:12:17,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=84992.0, ans=0.95 +2024-08-26 17:12:18,436 INFO [train.py:1114] (0/4) Epoch 7, batch 1000, loss[loss=0.212, simple_loss=0.2755, pruned_loss=0.05384, ctc_loss=0.1021, over 19858.00 frames. ], tot_loss[loss=0.245, simple_loss=0.2999, pruned_loss=0.06918, ctc_loss=0.1292, over 3816644.91 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 32.0 +2024-08-26 17:13:31,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=85098.66666666667, ans=0.0 +2024-08-26 17:13:38,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=85098.66666666667, ans=0.0 +2024-08-26 17:13:57,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=85205.33333333333, ans=0.1 +2024-08-26 17:13:59,708 INFO [train.py:1114] (0/4) Epoch 7, batch 1050, loss[loss=0.2513, simple_loss=0.312, pruned_loss=0.07016, ctc_loss=0.1256, over 19864.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.299, pruned_loss=0.0686, ctc_loss=0.1279, over 3823808.53 frames. ], batch size: 57, lr: 2.08e-02, grad_scale: 32.0 +2024-08-26 17:14:10,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=85312.0, ans=0.125 +2024-08-26 17:14:12,366 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/checkpoint-16000.pt +2024-08-26 17:14:39,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=85312.0, ans=0.0 +2024-08-26 17:14:40,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=85312.0, ans=0.125 +2024-08-26 17:16:32,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=85418.66666666667, ans=0.125 +2024-08-26 17:16:37,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=85418.66666666667, ans=0.125 +2024-08-26 17:16:40,670 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.203e+02 1.449e+02 1.584e+02 1.768e+02 2.861e+02, threshold=3.169e+02, percent-clipped=0.0 +2024-08-26 17:16:40,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=85472.0, ans=0.125 +2024-08-26 17:16:43,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=85472.0, ans=0.025 +2024-08-26 17:16:44,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85472.0, ans=0.1 +2024-08-26 17:16:44,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=85472.0, ans=0.125 +2024-08-26 17:16:45,993 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.88 vs. limit=12.0 +2024-08-26 17:16:46,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=85472.0, ans=0.125 +2024-08-26 17:16:47,590 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:16:48,398 INFO [train.py:1114] (0/4) Epoch 7, batch 1100, loss[loss=0.2326, simple_loss=0.2912, pruned_loss=0.06286, ctc_loss=0.1207, over 19601.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.298, pruned_loss=0.06787, ctc_loss=0.127, over 3831458.50 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 32.0 +2024-08-26 17:16:48,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=85525.33333333333, ans=0.125 +2024-08-26 17:17:07,922 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.63 vs. limit=12.0 +2024-08-26 17:17:09,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.21 vs. limit=22.5 +2024-08-26 17:17:12,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=85632.0, ans=0.04949747468305833 +2024-08-26 17:17:35,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=85738.66666666667, ans=0.125 +2024-08-26 17:17:45,027 INFO [train.py:1114] (0/4) Epoch 7, batch 1150, loss[loss=0.2128, simple_loss=0.2796, pruned_loss=0.05249, ctc_loss=0.1026, over 19588.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.2983, pruned_loss=0.06831, ctc_loss=0.1279, over 3830327.90 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 16.0 +2024-08-26 17:17:45,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=85792.0, ans=0.125 +2024-08-26 17:17:56,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=85845.33333333333, ans=0.0 +2024-08-26 17:18:01,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=85845.33333333333, ans=0.125 +2024-08-26 17:18:05,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=85845.33333333333, ans=0.125 +2024-08-26 17:18:08,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=85898.66666666667, ans=0.2 +2024-08-26 17:18:16,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=85898.66666666667, ans=0.0 +2024-08-26 17:18:24,551 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.11 vs. limit=6.0 +2024-08-26 17:18:41,958 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.243e+02 1.522e+02 1.667e+02 1.891e+02 3.736e+02, threshold=3.335e+02, percent-clipped=2.0 +2024-08-26 17:18:45,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=86005.33333333333, ans=0.025 +2024-08-26 17:18:48,636 INFO [train.py:1114] (0/4) Epoch 7, batch 1200, loss[loss=0.2522, simple_loss=0.3106, pruned_loss=0.07121, ctc_loss=0.1283, over 19850.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.2993, pruned_loss=0.06861, ctc_loss=0.1283, over 3826209.33 frames. ], batch size: 57, lr: 2.07e-02, grad_scale: 32.0 +2024-08-26 17:19:14,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=86165.33333333333, ans=15.0 +2024-08-26 17:19:26,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=86218.66666666667, ans=0.125 +2024-08-26 17:19:54,879 INFO [train.py:1114] (0/4) Epoch 7, batch 1250, loss[loss=0.2555, simple_loss=0.3106, pruned_loss=0.07243, ctc_loss=0.1387, over 19523.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.2994, pruned_loss=0.06828, ctc_loss=0.1278, over 3844367.75 frames. ], batch size: 61, lr: 2.07e-02, grad_scale: 32.0 +2024-08-26 17:19:56,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.35 vs. limit=15.0 +2024-08-26 17:20:00,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=86325.33333333333, ans=0.125 +2024-08-26 17:20:02,027 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.39 vs. limit=15.0 +2024-08-26 17:20:08,770 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:20:17,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=86432.0, ans=10.0 +2024-08-26 17:20:21,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=86432.0, ans=0.125 +2024-08-26 17:20:21,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=86432.0, ans=0.125 +2024-08-26 17:20:30,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=86485.33333333333, ans=0.125 +2024-08-26 17:20:31,618 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.87 vs. limit=15.0 +2024-08-26 17:20:35,654 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.207e+02 1.476e+02 1.609e+02 1.857e+02 3.245e+02, threshold=3.218e+02, percent-clipped=0.0 +2024-08-26 17:20:44,740 INFO [train.py:1114] (0/4) Epoch 7, batch 1300, loss[loss=0.2722, simple_loss=0.3193, pruned_loss=0.08363, ctc_loss=0.1446, over 18932.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.2981, pruned_loss=0.06756, ctc_loss=0.1265, over 3847421.45 frames. ], batch size: 76, lr: 2.07e-02, grad_scale: 32.0 +2024-08-26 17:20:49,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=86592.0, ans=0.0 +2024-08-26 17:21:23,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=86752.0, ans=0.0 +2024-08-26 17:21:26,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=86752.0, ans=0.125 +2024-08-26 17:21:31,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=86805.33333333333, ans=0.125 +2024-08-26 17:21:37,540 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.28 vs. limit=15.0 +2024-08-26 17:21:38,929 INFO [train.py:1114] (0/4) Epoch 7, batch 1350, loss[loss=0.2372, simple_loss=0.2975, pruned_loss=0.06492, ctc_loss=0.1176, over 19771.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.2969, pruned_loss=0.06679, ctc_loss=0.1251, over 3858190.88 frames. ], batch size: 54, lr: 2.07e-02, grad_scale: 32.0 +2024-08-26 17:21:44,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=86858.66666666667, ans=0.0 +2024-08-26 17:21:48,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=86912.0, ans=0.0 +2024-08-26 17:21:52,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=86912.0, ans=0.125 +2024-08-26 17:21:57,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86965.33333333333, ans=0.1 +2024-08-26 17:22:08,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=87018.66666666667, ans=0.025 +2024-08-26 17:22:15,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=87018.66666666667, ans=0.1 +2024-08-26 17:22:19,557 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.495e+02 1.726e+02 1.992e+02 3.104e+02, threshold=3.452e+02, percent-clipped=0.0 +2024-08-26 17:22:26,113 INFO [train.py:1114] (0/4) Epoch 7, batch 1400, loss[loss=0.1916, simple_loss=0.2518, pruned_loss=0.04749, ctc_loss=0.09083, over 19698.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.2963, pruned_loss=0.06677, ctc_loss=0.1247, over 3866044.91 frames. ], batch size: 46, lr: 2.06e-02, grad_scale: 32.0 +2024-08-26 17:22:28,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=87125.33333333333, ans=0.0 +2024-08-26 17:22:28,520 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.95 vs. limit=22.5 +2024-08-26 17:23:02,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.66 vs. limit=15.0 +2024-08-26 17:23:24,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=87285.33333333333, ans=0.125 +2024-08-26 17:23:35,695 INFO [train.py:1114] (0/4) Epoch 7, batch 1450, loss[loss=0.2593, simple_loss=0.3118, pruned_loss=0.07483, ctc_loss=0.1427, over 19693.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2976, pruned_loss=0.06746, ctc_loss=0.126, over 3862692.13 frames. ], batch size: 63, lr: 2.06e-02, grad_scale: 32.0 +2024-08-26 17:23:49,129 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.62 vs. limit=22.5 +2024-08-26 17:23:50,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=87445.33333333333, ans=0.125 +2024-08-26 17:24:07,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=87552.0, ans=0.125 +2024-08-26 17:24:13,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=87552.0, ans=0.0 +2024-08-26 17:24:20,640 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.279e+02 1.540e+02 1.669e+02 1.894e+02 3.453e+02, threshold=3.338e+02, percent-clipped=1.0 +2024-08-26 17:24:23,900 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.67 vs. limit=15.0 +2024-08-26 17:24:29,674 INFO [train.py:1114] (0/4) Epoch 7, batch 1500, loss[loss=0.2585, simple_loss=0.3116, pruned_loss=0.07483, ctc_loss=0.1395, over 19570.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.298, pruned_loss=0.06755, ctc_loss=0.1264, over 3861815.85 frames. ], batch size: 57, lr: 2.06e-02, grad_scale: 32.0 +2024-08-26 17:24:35,484 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:24:39,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=87658.66666666667, ans=0.125 +2024-08-26 17:25:01,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=87818.66666666667, ans=0.1 +2024-08-26 17:25:08,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=87818.66666666667, ans=0.125 +2024-08-26 17:25:08,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=87818.66666666667, ans=0.0 +2024-08-26 17:25:19,519 INFO [train.py:1114] (0/4) Epoch 7, batch 1550, loss[loss=0.2569, simple_loss=0.3173, pruned_loss=0.07051, ctc_loss=0.139, over 19600.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.2981, pruned_loss=0.06787, ctc_loss=0.127, over 3847484.92 frames. ], batch size: 60, lr: 2.06e-02, grad_scale: 32.0 +2024-08-26 17:25:21,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=87925.33333333333, ans=0.125 +2024-08-26 17:25:37,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=88032.0, ans=0.1 +2024-08-26 17:25:38,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=88032.0, ans=0.125 +2024-08-26 17:25:47,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.18 vs. limit=15.0 +2024-08-26 17:25:57,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=88085.33333333333, ans=0.0 +2024-08-26 17:26:00,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.84 vs. limit=10.0 +2024-08-26 17:26:03,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=88138.66666666667, ans=0.09899494936611666 +2024-08-26 17:26:04,271 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.330e+02 1.559e+02 1.788e+02 2.182e+02 5.116e+02, threshold=3.576e+02, percent-clipped=3.0 +2024-08-26 17:26:06,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=88138.66666666667, ans=0.125 +2024-08-26 17:26:10,964 INFO [train.py:1114] (0/4) Epoch 7, batch 1600, loss[loss=0.2403, simple_loss=0.3043, pruned_loss=0.06305, ctc_loss=0.1252, over 19840.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.2983, pruned_loss=0.06801, ctc_loss=0.1272, over 3836479.69 frames. ], batch size: 57, lr: 2.05e-02, grad_scale: 32.0 +2024-08-26 17:26:12,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=88192.0, ans=0.2 +2024-08-26 17:26:34,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=88298.66666666667, ans=0.0 +2024-08-26 17:26:39,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=88298.66666666667, ans=0.0 +2024-08-26 17:26:42,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=88352.0, ans=0.0 +2024-08-26 17:26:46,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=88352.0, ans=0.1 +2024-08-26 17:26:53,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=88405.33333333333, ans=0.125 +2024-08-26 17:27:01,934 INFO [train.py:1114] (0/4) Epoch 7, batch 1650, loss[loss=0.2374, simple_loss=0.2998, pruned_loss=0.06294, ctc_loss=0.123, over 19659.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.2979, pruned_loss=0.06789, ctc_loss=0.1271, over 3832847.28 frames. ], batch size: 59, lr: 2.05e-02, grad_scale: 32.0 +2024-08-26 17:27:30,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=88458.66666666667, ans=0.0 +2024-08-26 17:27:32,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=88458.66666666667, ans=0.125 +2024-08-26 17:27:47,871 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.76 vs. limit=15.0 +2024-08-26 17:28:05,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=88565.33333333333, ans=0.0 +2024-08-26 17:28:12,761 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.93 vs. limit=10.0 +2024-08-26 17:28:17,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=88565.33333333333, ans=0.125 +2024-08-26 17:28:44,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=88618.66666666667, ans=0.1 +2024-08-26 17:29:23,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=88672.0, ans=0.125 +2024-08-26 17:29:25,554 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.242e+02 1.503e+02 1.653e+02 1.809e+02 2.992e+02, threshold=3.307e+02, percent-clipped=0.0 +2024-08-26 17:29:26,126 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.33 vs. limit=22.5 +2024-08-26 17:29:30,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=88672.0, ans=0.5 +2024-08-26 17:29:40,063 INFO [train.py:1114] (0/4) Epoch 7, batch 1700, loss[loss=0.2161, simple_loss=0.2677, pruned_loss=0.06006, ctc_loss=0.1113, over 19668.00 frames. ], tot_loss[loss=0.241, simple_loss=0.2974, pruned_loss=0.0671, ctc_loss=0.1258, over 3847638.34 frames. ], batch size: 46, lr: 2.05e-02, grad_scale: 32.0 +2024-08-26 17:30:08,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=88778.66666666667, ans=0.1 +2024-08-26 17:30:12,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=88778.66666666667, ans=0.2 +2024-08-26 17:30:29,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=88885.33333333333, ans=0.1 +2024-08-26 17:30:44,517 INFO [train.py:1114] (0/4) Epoch 7, batch 1750, loss[loss=0.1987, simple_loss=0.2523, pruned_loss=0.05267, ctc_loss=0.09946, over 19664.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.2969, pruned_loss=0.06696, ctc_loss=0.1254, over 3851910.98 frames. ], batch size: 45, lr: 2.05e-02, grad_scale: 32.0 +2024-08-26 17:31:13,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=89152.0, ans=0.125 +2024-08-26 17:31:15,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=89152.0, ans=0.125 +2024-08-26 17:31:17,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=89152.0, ans=0.0 +2024-08-26 17:31:23,276 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.487e+02 1.622e+02 1.808e+02 3.869e+02, threshold=3.245e+02, percent-clipped=1.0 +2024-08-26 17:31:28,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=89258.66666666667, ans=0.0 +2024-08-26 17:31:29,437 INFO [train.py:1114] (0/4) Epoch 7, batch 1800, loss[loss=0.2397, simple_loss=0.3036, pruned_loss=0.06332, ctc_loss=0.1232, over 19609.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.2969, pruned_loss=0.06688, ctc_loss=0.1253, over 3853707.10 frames. ], batch size: 55, lr: 2.04e-02, grad_scale: 32.0 +2024-08-26 17:31:29,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=89258.66666666667, ans=0.025 +2024-08-26 17:31:31,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=89258.66666666667, ans=0.025 +2024-08-26 17:31:39,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=89312.0, ans=0.0 +2024-08-26 17:31:55,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=89418.66666666667, ans=0.1 +2024-08-26 17:31:57,091 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:32:14,104 INFO [train.py:1114] (0/4) Epoch 7, batch 1850, loss[loss=0.2514, simple_loss=0.3128, pruned_loss=0.0687, ctc_loss=0.1316, over 19566.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.2964, pruned_loss=0.06647, ctc_loss=0.1243, over 3856339.26 frames. ], batch size: 57, lr: 2.04e-02, grad_scale: 32.0 +2024-08-26 17:32:16,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=89525.33333333333, ans=0.2 +2024-08-26 17:32:19,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=89525.33333333333, ans=0.05 +2024-08-26 17:32:20,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=89525.33333333333, ans=0.0 +2024-08-26 17:32:49,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=89685.33333333333, ans=0.0 +2024-08-26 17:32:54,352 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.90 vs. limit=10.0 +2024-08-26 17:32:55,548 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.278e+02 1.590e+02 1.759e+02 2.003e+02 3.443e+02, threshold=3.517e+02, percent-clipped=1.0 +2024-08-26 17:33:01,825 INFO [train.py:1114] (0/4) Epoch 7, batch 1900, loss[loss=0.2351, simple_loss=0.2992, pruned_loss=0.0614, ctc_loss=0.1204, over 19679.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.2971, pruned_loss=0.06669, ctc_loss=0.1247, over 3861764.73 frames. ], batch size: 59, lr: 2.04e-02, grad_scale: 32.0 +2024-08-26 17:35:00,640 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.09 vs. limit=15.0 +2024-08-26 17:35:13,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90005.33333333333, ans=0.1 +2024-08-26 17:35:17,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=90005.33333333333, ans=0.1 +2024-08-26 17:35:20,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=90005.33333333333, ans=0.07 +2024-08-26 17:35:23,453 INFO [train.py:1114] (0/4) Epoch 7, batch 1950, loss[loss=0.2478, simple_loss=0.3069, pruned_loss=0.06723, ctc_loss=0.1359, over 19594.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.2979, pruned_loss=0.06665, ctc_loss=0.1249, over 3871132.26 frames. ], batch size: 52, lr: 2.04e-02, grad_scale: 32.0 +2024-08-26 17:35:23,943 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.13 vs. limit=22.5 +2024-08-26 17:35:45,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=90165.33333333333, ans=0.0 +2024-08-26 17:35:45,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=90165.33333333333, ans=0.0 +2024-08-26 17:35:53,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=90218.66666666667, ans=0.0 +2024-08-26 17:35:57,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90218.66666666667, ans=0.1 +2024-08-26 17:36:03,243 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.287e+02 1.531e+02 1.657e+02 1.854e+02 3.915e+02, threshold=3.314e+02, percent-clipped=1.0 +2024-08-26 17:36:05,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90272.0, ans=0.1 +2024-08-26 17:36:09,475 INFO [train.py:1114] (0/4) Epoch 7, batch 2000, loss[loss=0.2074, simple_loss=0.2626, pruned_loss=0.05567, ctc_loss=0.102, over 19665.00 frames. ], tot_loss[loss=0.242, simple_loss=0.2986, pruned_loss=0.06743, ctc_loss=0.1262, over 3856701.82 frames. ], batch size: 45, lr: 2.03e-02, grad_scale: 32.0 +2024-08-26 17:36:16,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=90325.33333333333, ans=0.0 +2024-08-26 17:36:18,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=90378.66666666667, ans=0.0 +2024-08-26 17:36:19,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=90378.66666666667, ans=0.0 +2024-08-26 17:36:22,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=90378.66666666667, ans=0.125 +2024-08-26 17:36:22,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=90378.66666666667, ans=0.125 +2024-08-26 17:36:26,167 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.20 vs. limit=10.0 +2024-08-26 17:36:27,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90432.0, ans=0.1 +2024-08-26 17:36:33,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=90432.0, ans=0.125 +2024-08-26 17:36:42,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90485.33333333333, ans=0.1 +2024-08-26 17:36:44,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=90538.66666666667, ans=0.125 +2024-08-26 17:36:51,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=90538.66666666667, ans=0.025 +2024-08-26 17:36:53,970 INFO [train.py:1114] (0/4) Epoch 7, batch 2050, loss[loss=0.2141, simple_loss=0.2736, pruned_loss=0.05704, ctc_loss=0.1014, over 19720.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.2971, pruned_loss=0.06693, ctc_loss=0.1252, over 3852774.23 frames. ], batch size: 47, lr: 2.03e-02, grad_scale: 32.0 +2024-08-26 17:36:54,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=90592.0, ans=0.2 +2024-08-26 17:36:56,789 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:36:56,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=90592.0, ans=0.125 +2024-08-26 17:37:02,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=90645.33333333333, ans=0.125 +2024-08-26 17:37:15,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=90698.66666666667, ans=0.0 +2024-08-26 17:37:16,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=90698.66666666667, ans=0.0 +2024-08-26 17:37:21,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=90752.0, ans=0.0 +2024-08-26 17:37:26,706 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.92 vs. limit=6.0 +2024-08-26 17:37:27,602 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.45 vs. limit=12.0 +2024-08-26 17:37:28,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=90752.0, ans=0.1 +2024-08-26 17:37:29,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=90805.33333333333, ans=0.125 +2024-08-26 17:37:32,308 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.477e+02 1.642e+02 1.962e+02 4.346e+02, threshold=3.284e+02, percent-clipped=3.0 +2024-08-26 17:37:38,468 INFO [train.py:1114] (0/4) Epoch 7, batch 2100, loss[loss=0.2338, simple_loss=0.2969, pruned_loss=0.06311, ctc_loss=0.1111, over 19765.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.2967, pruned_loss=0.0665, ctc_loss=0.1244, over 3859772.11 frames. ], batch size: 54, lr: 2.03e-02, grad_scale: 32.0 +2024-08-26 17:37:52,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=90912.0, ans=0.09899494936611666 +2024-08-26 17:37:55,133 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.88 vs. limit=15.0 +2024-08-26 17:38:00,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=90965.33333333333, ans=0.2 +2024-08-26 17:38:08,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91018.66666666667, ans=0.1 +2024-08-26 17:38:09,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=91018.66666666667, ans=0.0 +2024-08-26 17:38:11,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=91018.66666666667, ans=0.125 +2024-08-26 17:38:13,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=91018.66666666667, ans=0.125 +2024-08-26 17:38:25,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=91125.33333333333, ans=0.0 +2024-08-26 17:38:25,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.18 vs. limit=15.0 +2024-08-26 17:38:26,419 INFO [train.py:1114] (0/4) Epoch 7, batch 2150, loss[loss=0.2261, simple_loss=0.2851, pruned_loss=0.06036, ctc_loss=0.1156, over 19586.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.2955, pruned_loss=0.06587, ctc_loss=0.123, over 3870090.82 frames. ], batch size: 52, lr: 2.03e-02, grad_scale: 32.0 +2024-08-26 17:38:37,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=91178.66666666667, ans=0.125 +2024-08-26 17:38:38,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91178.66666666667, ans=0.1 +2024-08-26 17:38:51,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=91232.0, ans=0.0 +2024-08-26 17:38:52,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91285.33333333333, ans=0.1 +2024-08-26 17:39:04,135 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.485e+02 1.702e+02 1.931e+02 2.999e+02, threshold=3.403e+02, percent-clipped=0.0 +2024-08-26 17:39:10,353 INFO [train.py:1114] (0/4) Epoch 7, batch 2200, loss[loss=0.2246, simple_loss=0.2904, pruned_loss=0.05788, ctc_loss=0.1078, over 19566.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.2955, pruned_loss=0.06575, ctc_loss=0.1229, over 3867872.96 frames. ], batch size: 57, lr: 2.02e-02, grad_scale: 32.0 +2024-08-26 17:39:14,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=91392.0, ans=0.125 +2024-08-26 17:39:18,921 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.89 vs. limit=22.5 +2024-08-26 17:39:23,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=91445.33333333333, ans=0.025 +2024-08-26 17:39:25,644 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:39:33,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=91498.66666666667, ans=0.0 +2024-08-26 17:39:40,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=91552.0, ans=0.125 +2024-08-26 17:39:47,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=91605.33333333333, ans=0.125 +2024-08-26 17:39:54,464 INFO [train.py:1114] (0/4) Epoch 7, batch 2250, loss[loss=0.2515, simple_loss=0.3058, pruned_loss=0.07216, ctc_loss=0.1324, over 19606.00 frames. ], tot_loss[loss=0.2383, simple_loss=0.2956, pruned_loss=0.06587, ctc_loss=0.1233, over 3867908.29 frames. ], batch size: 55, lr: 2.02e-02, grad_scale: 32.0 +2024-08-26 17:39:57,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=91658.66666666667, ans=0.125 +2024-08-26 17:40:21,462 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.79 vs. limit=22.5 +2024-08-26 17:40:23,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=91818.66666666667, ans=0.125 +2024-08-26 17:40:32,444 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.538e+02 1.708e+02 1.997e+02 3.315e+02, threshold=3.416e+02, percent-clipped=0.0 +2024-08-26 17:40:38,568 INFO [train.py:1114] (0/4) Epoch 7, batch 2300, loss[loss=0.2082, simple_loss=0.2687, pruned_loss=0.05355, ctc_loss=0.1014, over 19477.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.2946, pruned_loss=0.06563, ctc_loss=0.1228, over 3861624.74 frames. ], batch size: 49, lr: 2.02e-02, grad_scale: 32.0 +2024-08-26 17:40:45,216 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.72 vs. limit=15.0 +2024-08-26 17:40:48,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=91978.66666666667, ans=0.125 +2024-08-26 17:40:57,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=92032.0, ans=0.2 +2024-08-26 17:41:02,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.40 vs. limit=22.5 +2024-08-26 17:41:06,365 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:41:08,499 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.59 vs. limit=6.0 +2024-08-26 17:41:11,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=92085.33333333333, ans=0.125 +2024-08-26 17:41:22,879 INFO [train.py:1114] (0/4) Epoch 7, batch 2350, loss[loss=0.258, simple_loss=0.3096, pruned_loss=0.07606, ctc_loss=0.1356, over 19667.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.2949, pruned_loss=0.06601, ctc_loss=0.1233, over 3864539.95 frames. ], batch size: 63, lr: 2.02e-02, grad_scale: 32.0 +2024-08-26 17:41:40,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=92298.66666666667, ans=0.125 +2024-08-26 17:41:42,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=92298.66666666667, ans=0.125 +2024-08-26 17:41:43,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=92298.66666666667, ans=0.0 +2024-08-26 17:41:45,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=92298.66666666667, ans=0.125 +2024-08-26 17:42:01,691 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.248e+02 1.515e+02 1.664e+02 1.862e+02 3.479e+02, threshold=3.327e+02, percent-clipped=1.0 +2024-08-26 17:42:06,884 INFO [train.py:1114] (0/4) Epoch 7, batch 2400, loss[loss=0.2392, simple_loss=0.3025, pruned_loss=0.06449, ctc_loss=0.1174, over 19393.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.2971, pruned_loss=0.06677, ctc_loss=0.1245, over 3858854.77 frames. ], batch size: 67, lr: 2.01e-02, grad_scale: 32.0 +2024-08-26 17:42:09,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=92458.66666666667, ans=0.125 +2024-08-26 17:42:09,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=92458.66666666667, ans=0.125 +2024-08-26 17:42:13,549 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.20 vs. limit=15.0 +2024-08-26 17:42:16,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=92512.0, ans=22.5 +2024-08-26 17:42:19,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=92512.0, ans=0.0 +2024-08-26 17:42:44,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=92672.0, ans=0.125 +2024-08-26 17:42:48,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=92672.0, ans=0.95 +2024-08-26 17:42:50,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=92672.0, ans=0.0 +2024-08-26 17:42:56,044 INFO [train.py:1114] (0/4) Epoch 7, batch 2450, loss[loss=0.3343, simple_loss=0.348, pruned_loss=0.1158, ctc_loss=0.2228, over 14230.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3026, pruned_loss=0.07097, ctc_loss=0.1327, over 3733452.56 frames. ], batch size: 140, lr: 2.01e-02, grad_scale: 16.0 +2024-08-26 17:43:10,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=92778.66666666667, ans=0.0 +2024-08-26 17:43:19,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=92832.0, ans=0.125 +2024-08-26 17:43:24,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=92885.33333333333, ans=0.0 +2024-08-26 17:43:31,083 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-7.pt +2024-08-26 17:44:23,145 INFO [train.py:1114] (0/4) Epoch 8, batch 0, loss[loss=0.2292, simple_loss=0.2851, pruned_loss=0.06487, ctc_loss=0.1089, over 19412.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.2851, pruned_loss=0.06487, ctc_loss=0.1089, over 19412.00 frames. ], batch size: 48, lr: 1.89e-02, grad_scale: 32.0 +2024-08-26 17:44:23,146 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-26 17:44:49,256 INFO [train.py:1146] (0/4) Epoch 8, validation: loss=0.2003, simple_loss=0.2903, pruned_loss=0.04062, ctc_loss=0.07268, over 944034.00 frames. +2024-08-26 17:44:49,257 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13201MB +2024-08-26 17:44:55,033 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.675e+02 1.918e+02 2.084e+02 4.365e+02, threshold=3.836e+02, percent-clipped=1.0 +2024-08-26 17:45:17,325 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=7.06 vs. limit=12.0 +2024-08-26 17:45:37,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=93093.33333333333, ans=0.2 +2024-08-26 17:45:38,086 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.52 vs. limit=15.0 +2024-08-26 17:45:51,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=93146.66666666667, ans=0.0 +2024-08-26 17:45:54,276 INFO [train.py:1114] (0/4) Epoch 8, batch 50, loss[loss=0.2166, simple_loss=0.2756, pruned_loss=0.05781, ctc_loss=0.105, over 19734.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.301, pruned_loss=0.06807, ctc_loss=0.128, over 844483.73 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 32.0 +2024-08-26 17:45:58,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=93200.0, ans=0.125 +2024-08-26 17:46:04,867 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.81 vs. limit=22.5 +2024-08-26 17:46:16,039 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.32 vs. limit=22.5 +2024-08-26 17:46:23,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=93360.0, ans=0.2 +2024-08-26 17:46:29,419 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.89 vs. limit=12.0 +2024-08-26 17:46:42,910 INFO [train.py:1114] (0/4) Epoch 8, batch 100, loss[loss=0.2104, simple_loss=0.2772, pruned_loss=0.05147, ctc_loss=0.1017, over 19716.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.2993, pruned_loss=0.06722, ctc_loss=0.1264, over 1499106.47 frames. ], batch size: 51, lr: 1.89e-02, grad_scale: 32.0 +2024-08-26 17:46:48,506 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.231e+02 1.574e+02 1.749e+02 2.053e+02 3.512e+02, threshold=3.498e+02, percent-clipped=0.0 +2024-08-26 17:47:03,880 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.00 vs. limit=12.0 +2024-08-26 17:47:04,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=93573.33333333333, ans=0.0 +2024-08-26 17:47:26,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=93680.0, ans=0.125 +2024-08-26 17:47:32,162 INFO [train.py:1114] (0/4) Epoch 8, batch 150, loss[loss=0.2096, simple_loss=0.2654, pruned_loss=0.0567, ctc_loss=0.101, over 19697.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.2963, pruned_loss=0.06603, ctc_loss=0.1237, over 2026666.89 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 32.0 +2024-08-26 17:47:36,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=93733.33333333333, ans=0.125 +2024-08-26 17:47:41,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=93786.66666666667, ans=0.125 +2024-08-26 17:47:50,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=93786.66666666667, ans=0.125 +2024-08-26 17:47:50,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=93840.0, ans=0.0 +2024-08-26 17:48:11,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=93946.66666666667, ans=0.125 +2024-08-26 17:48:20,174 INFO [train.py:1114] (0/4) Epoch 8, batch 200, loss[loss=0.2606, simple_loss=0.3157, pruned_loss=0.07429, ctc_loss=0.1424, over 18453.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.2939, pruned_loss=0.06484, ctc_loss=0.1218, over 2434139.91 frames. ], batch size: 85, lr: 1.88e-02, grad_scale: 32.0 +2024-08-26 17:48:24,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=94000.0, ans=0.0 +2024-08-26 17:48:25,116 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.22 vs. limit=6.0 +2024-08-26 17:48:25,564 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.434e+02 1.574e+02 1.787e+02 2.973e+02, threshold=3.148e+02, percent-clipped=0.0 +2024-08-26 17:48:31,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=94053.33333333333, ans=0.1 +2024-08-26 17:48:32,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=94053.33333333333, ans=0.0 +2024-08-26 17:48:38,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94053.33333333333, ans=0.1 +2024-08-26 17:48:43,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=94106.66666666667, ans=0.125 +2024-08-26 17:48:46,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=94106.66666666667, ans=0.125 +2024-08-26 17:48:49,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=94106.66666666667, ans=0.1 +2024-08-26 17:48:49,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=94106.66666666667, ans=0.125 +2024-08-26 17:48:52,851 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.44 vs. limit=6.0 +2024-08-26 17:48:53,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=94160.0, ans=0.025 +2024-08-26 17:49:12,242 INFO [train.py:1114] (0/4) Epoch 8, batch 250, loss[loss=0.2511, simple_loss=0.3075, pruned_loss=0.07046, ctc_loss=0.1346, over 19396.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.2935, pruned_loss=0.06448, ctc_loss=0.1211, over 2754429.15 frames. ], batch size: 67, lr: 1.88e-02, grad_scale: 32.0 +2024-08-26 17:49:26,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=94320.0, ans=0.125 +2024-08-26 17:49:44,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=94426.66666666667, ans=0.2 +2024-08-26 17:49:47,334 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.08 vs. limit=15.0 +2024-08-26 17:49:48,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=94426.66666666667, ans=0.0 +2024-08-26 17:49:54,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94480.0, ans=0.1 +2024-08-26 17:49:59,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=94480.0, ans=0.05 +2024-08-26 17:50:03,379 INFO [train.py:1114] (0/4) Epoch 8, batch 300, loss[loss=0.2624, simple_loss=0.3169, pruned_loss=0.0752, ctc_loss=0.1439, over 19547.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.2929, pruned_loss=0.06406, ctc_loss=0.1202, over 2999392.35 frames. ], batch size: 61, lr: 1.88e-02, grad_scale: 32.0 +2024-08-26 17:50:09,200 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.273e+02 1.482e+02 1.652e+02 1.879e+02 4.693e+02, threshold=3.305e+02, percent-clipped=1.0 +2024-08-26 17:50:19,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=94586.66666666667, ans=0.125 +2024-08-26 17:50:23,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=94640.0, ans=0.07 +2024-08-26 17:50:39,278 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:50:50,298 INFO [train.py:1114] (0/4) Epoch 8, batch 350, loss[loss=0.2147, simple_loss=0.2673, pruned_loss=0.06011, ctc_loss=0.1046, over 19784.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.2933, pruned_loss=0.06406, ctc_loss=0.1199, over 3189369.02 frames. ], batch size: 48, lr: 1.88e-02, grad_scale: 32.0 +2024-08-26 17:51:02,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=94853.33333333333, ans=0.125 +2024-08-26 17:51:15,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=94906.66666666667, ans=0.125 +2024-08-26 17:51:31,770 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.34 vs. limit=15.0 +2024-08-26 17:51:34,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=95013.33333333333, ans=0.125 +2024-08-26 17:51:59,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=95013.33333333333, ans=0.025 +2024-08-26 17:52:04,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=95013.33333333333, ans=0.025 +2024-08-26 17:52:19,173 INFO [train.py:1114] (0/4) Epoch 8, batch 400, loss[loss=0.2201, simple_loss=0.2921, pruned_loss=0.05373, ctc_loss=0.1014, over 19501.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.2924, pruned_loss=0.06356, ctc_loss=0.1187, over 3341730.71 frames. ], batch size: 54, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:52:23,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=95066.66666666667, ans=0.125 +2024-08-26 17:52:24,638 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.326e+02 1.574e+02 1.829e+02 2.059e+02 4.627e+02, threshold=3.659e+02, percent-clipped=2.0 +2024-08-26 17:52:25,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=95066.66666666667, ans=0.125 +2024-08-26 17:53:02,322 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.97 vs. limit=22.5 +2024-08-26 17:53:04,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=95280.0, ans=0.0 +2024-08-26 17:53:08,502 INFO [train.py:1114] (0/4) Epoch 8, batch 450, loss[loss=0.2217, simple_loss=0.2896, pruned_loss=0.05655, ctc_loss=0.1018, over 19589.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.2929, pruned_loss=0.06383, ctc_loss=0.1195, over 3449127.76 frames. ], batch size: 55, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:53:11,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=95333.33333333333, ans=0.125 +2024-08-26 17:53:11,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=95333.33333333333, ans=0.0 +2024-08-26 17:53:14,689 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.06 vs. limit=22.5 +2024-08-26 17:53:21,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=95386.66666666667, ans=0.2 +2024-08-26 17:53:27,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=95386.66666666667, ans=0.1 +2024-08-26 17:53:42,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=95493.33333333333, ans=0.025 +2024-08-26 17:53:50,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=95546.66666666667, ans=0.125 +2024-08-26 17:53:56,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=95546.66666666667, ans=0.125 +2024-08-26 17:53:58,070 INFO [train.py:1114] (0/4) Epoch 8, batch 500, loss[loss=0.2472, simple_loss=0.3119, pruned_loss=0.06644, ctc_loss=0.1243, over 19623.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.2918, pruned_loss=0.06312, ctc_loss=0.1183, over 3545194.71 frames. ], batch size: 63, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:54:03,655 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.242e+02 1.468e+02 1.609e+02 1.778e+02 4.606e+02, threshold=3.218e+02, percent-clipped=1.0 +2024-08-26 17:54:07,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=95653.33333333333, ans=0.0 +2024-08-26 17:54:42,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=95760.0, ans=0.125 +2024-08-26 17:55:44,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=95760.0, ans=0.5 +2024-08-26 17:55:49,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.63 vs. limit=15.0 +2024-08-26 17:56:20,608 INFO [train.py:1114] (0/4) Epoch 8, batch 550, loss[loss=0.2465, simple_loss=0.3037, pruned_loss=0.06903, ctc_loss=0.1282, over 19305.00 frames. ], tot_loss[loss=0.233, simple_loss=0.2917, pruned_loss=0.0634, ctc_loss=0.1186, over 3607776.32 frames. ], batch size: 71, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:56:25,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=95866.66666666667, ans=0.125 +2024-08-26 17:57:14,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=95866.66666666667, ans=0.025 +2024-08-26 17:57:22,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=95920.0, ans=0.07 +2024-08-26 17:57:32,759 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.06 vs. limit=15.0 +2024-08-26 17:57:58,504 INFO [train.py:1114] (0/4) Epoch 8, batch 600, loss[loss=0.235, simple_loss=0.3025, pruned_loss=0.06099, ctc_loss=0.1135, over 19373.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.2926, pruned_loss=0.06378, ctc_loss=0.1193, over 3665938.20 frames. ], batch size: 67, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:58:05,961 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.284e+02 1.508e+02 1.654e+02 1.896e+02 3.415e+02, threshold=3.309e+02, percent-clipped=1.0 +2024-08-26 17:58:42,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96346.66666666667, ans=0.1 +2024-08-26 17:58:49,408 INFO [train.py:1114] (0/4) Epoch 8, batch 650, loss[loss=0.2238, simple_loss=0.2851, pruned_loss=0.05913, ctc_loss=0.1106, over 19765.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.2916, pruned_loss=0.06327, ctc_loss=0.1185, over 3715955.42 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 32.0 +2024-08-26 17:58:51,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96400.0, ans=0.1 +2024-08-26 17:59:03,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=96453.33333333333, ans=0.0 +2024-08-26 17:59:04,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=96453.33333333333, ans=0.125 +2024-08-26 17:59:33,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=96613.33333333333, ans=15.0 +2024-08-26 17:59:36,106 INFO [train.py:1114] (0/4) Epoch 8, batch 700, loss[loss=0.2169, simple_loss=0.281, pruned_loss=0.05562, ctc_loss=0.1041, over 19717.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.2918, pruned_loss=0.06324, ctc_loss=0.1184, over 3747733.37 frames. ], batch size: 51, lr: 1.86e-02, grad_scale: 32.0 +2024-08-26 17:59:41,810 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.199e+02 1.481e+02 1.644e+02 1.817e+02 3.294e+02, threshold=3.287e+02, percent-clipped=0.0 +2024-08-26 17:59:46,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=96720.0, ans=0.125 +2024-08-26 18:00:20,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=96880.0, ans=0.0 +2024-08-26 18:00:27,685 INFO [train.py:1114] (0/4) Epoch 8, batch 750, loss[loss=0.2217, simple_loss=0.2924, pruned_loss=0.05496, ctc_loss=0.1029, over 19505.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.2917, pruned_loss=0.06306, ctc_loss=0.1181, over 3773504.01 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 32.0 +2024-08-26 18:00:38,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=96986.66666666667, ans=0.125 +2024-08-26 18:00:44,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=96986.66666666667, ans=0.025 +2024-08-26 18:00:57,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=97093.33333333333, ans=0.125 +2024-08-26 18:01:10,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=97146.66666666667, ans=0.125 +2024-08-26 18:01:19,066 INFO [train.py:1114] (0/4) Epoch 8, batch 800, loss[loss=0.2112, simple_loss=0.2648, pruned_loss=0.0577, ctc_loss=0.1053, over 19402.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.2912, pruned_loss=0.06269, ctc_loss=0.1173, over 3795054.21 frames. ], batch size: 48, lr: 1.86e-02, grad_scale: 32.0 +2024-08-26 18:01:24,567 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.172e+02 1.524e+02 1.729e+02 2.039e+02 3.596e+02, threshold=3.457e+02, percent-clipped=1.0 +2024-08-26 18:01:34,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=97253.33333333333, ans=0.07 +2024-08-26 18:01:36,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=97306.66666666667, ans=0.125 +2024-08-26 18:01:37,319 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.66 vs. limit=6.0 +2024-08-26 18:01:56,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=97413.33333333333, ans=0.125 +2024-08-26 18:01:58,848 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:01:59,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=97413.33333333333, ans=0.0 +2024-08-26 18:02:00,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=97413.33333333333, ans=0.0 +2024-08-26 18:02:02,692 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:02:06,324 INFO [train.py:1114] (0/4) Epoch 8, batch 850, loss[loss=0.2471, simple_loss=0.3088, pruned_loss=0.06769, ctc_loss=0.1252, over 19658.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.2909, pruned_loss=0.0627, ctc_loss=0.1173, over 3814413.34 frames. ], batch size: 59, lr: 1.85e-02, grad_scale: 32.0 +2024-08-26 18:02:08,626 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.24 vs. limit=15.0 +2024-08-26 18:02:22,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=97520.0, ans=0.125 +2024-08-26 18:02:24,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=97573.33333333333, ans=0.5 +2024-08-26 18:02:27,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=97573.33333333333, ans=0.0 +2024-08-26 18:02:34,603 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.76 vs. limit=15.0 +2024-08-26 18:02:47,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=97680.0, ans=0.125 +2024-08-26 18:02:54,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=97680.0, ans=0.125 +2024-08-26 18:02:58,340 INFO [train.py:1114] (0/4) Epoch 8, batch 900, loss[loss=0.1981, simple_loss=0.2645, pruned_loss=0.04831, ctc_loss=0.08783, over 19397.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.2917, pruned_loss=0.06328, ctc_loss=0.1184, over 3817837.83 frames. ], batch size: 48, lr: 1.85e-02, grad_scale: 32.0 +2024-08-26 18:03:03,997 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.312e+02 1.578e+02 1.704e+02 2.106e+02 3.434e+02, threshold=3.409e+02, percent-clipped=0.0 +2024-08-26 18:03:09,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=97786.66666666667, ans=10.0 +2024-08-26 18:03:13,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=97786.66666666667, ans=0.125 +2024-08-26 18:03:16,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=97840.0, ans=0.125 +2024-08-26 18:03:19,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=97840.0, ans=0.125 +2024-08-26 18:03:45,502 INFO [train.py:1114] (0/4) Epoch 8, batch 950, loss[loss=0.2247, simple_loss=0.2864, pruned_loss=0.05996, ctc_loss=0.1079, over 19485.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.292, pruned_loss=0.06355, ctc_loss=0.119, over 3818995.90 frames. ], batch size: 49, lr: 1.85e-02, grad_scale: 16.0 +2024-08-26 18:03:50,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=98000.0, ans=0.125 +2024-08-26 18:04:07,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=98053.33333333333, ans=0.0 +2024-08-26 18:04:08,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=98106.66666666667, ans=0.125 +2024-08-26 18:04:17,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=98106.66666666667, ans=0.0 +2024-08-26 18:04:37,651 INFO [train.py:1114] (0/4) Epoch 8, batch 1000, loss[loss=0.2051, simple_loss=0.2694, pruned_loss=0.05104, ctc_loss=0.09669, over 19854.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.2927, pruned_loss=0.06382, ctc_loss=0.1196, over 3815625.27 frames. ], batch size: 52, lr: 1.85e-02, grad_scale: 16.0 +2024-08-26 18:04:44,380 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.225e+02 1.497e+02 1.652e+02 1.874e+02 4.992e+02, threshold=3.305e+02, percent-clipped=2.0 +2024-08-26 18:04:51,619 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.07 vs. limit=6.0 +2024-08-26 18:04:54,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=98320.0, ans=0.025 +2024-08-26 18:05:12,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=98426.66666666667, ans=0.125 +2024-08-26 18:05:14,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=98480.0, ans=0.125 +2024-08-26 18:05:22,319 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.51 vs. limit=22.5 +2024-08-26 18:05:24,681 INFO [train.py:1114] (0/4) Epoch 8, batch 1050, loss[loss=0.221, simple_loss=0.2895, pruned_loss=0.05493, ctc_loss=0.1065, over 19837.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.2915, pruned_loss=0.06328, ctc_loss=0.1185, over 3820525.10 frames. ], batch size: 57, lr: 1.85e-02, grad_scale: 16.0 +2024-08-26 18:05:28,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=98533.33333333333, ans=0.125 +2024-08-26 18:05:54,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=98640.0, ans=0.0 +2024-08-26 18:06:03,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=98693.33333333333, ans=0.125 +2024-08-26 18:06:09,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=98746.66666666667, ans=0.125 +2024-08-26 18:06:18,164 INFO [train.py:1114] (0/4) Epoch 8, batch 1100, loss[loss=0.2454, simple_loss=0.3018, pruned_loss=0.06859, ctc_loss=0.1296, over 19597.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.2912, pruned_loss=0.06304, ctc_loss=0.118, over 3829314.28 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 16.0 +2024-08-26 18:06:24,659 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.469e+02 1.560e+02 1.744e+02 3.443e+02, threshold=3.121e+02, percent-clipped=2.0 +2024-08-26 18:06:27,661 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:07:10,093 INFO [train.py:1114] (0/4) Epoch 8, batch 1150, loss[loss=0.2146, simple_loss=0.2796, pruned_loss=0.05449, ctc_loss=0.1014, over 19589.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.2912, pruned_loss=0.06314, ctc_loss=0.1183, over 3827681.85 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 16.0 +2024-08-26 18:07:14,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=99066.66666666667, ans=0.0 +2024-08-26 18:07:23,808 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.14 vs. limit=15.0 +2024-08-26 18:07:45,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=99226.66666666667, ans=0.0 +2024-08-26 18:07:57,677 INFO [train.py:1114] (0/4) Epoch 8, batch 1200, loss[loss=0.2346, simple_loss=0.2996, pruned_loss=0.06167, ctc_loss=0.1154, over 19848.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.2924, pruned_loss=0.06362, ctc_loss=0.1193, over 3824393.63 frames. ], batch size: 57, lr: 1.84e-02, grad_scale: 32.0 +2024-08-26 18:07:57,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=99333.33333333333, ans=0.0 +2024-08-26 18:07:58,309 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.65 vs. limit=15.0 +2024-08-26 18:08:04,254 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.256e+02 1.491e+02 1.608e+02 2.003e+02 2.840e+02, threshold=3.216e+02, percent-clipped=0.0 +2024-08-26 18:08:10,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=99386.66666666667, ans=0.125 +2024-08-26 18:08:23,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=99440.0, ans=0.125 +2024-08-26 18:08:24,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=99493.33333333333, ans=0.025 +2024-08-26 18:08:34,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=99493.33333333333, ans=0.1 +2024-08-26 18:08:49,185 INFO [train.py:1114] (0/4) Epoch 8, batch 1250, loss[loss=0.2788, simple_loss=0.3285, pruned_loss=0.08354, ctc_loss=0.1552, over 19533.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.2931, pruned_loss=0.06379, ctc_loss=0.1193, over 3842788.18 frames. ], batch size: 61, lr: 1.84e-02, grad_scale: 32.0 +2024-08-26 18:09:35,673 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.88 vs. limit=15.0 +2024-08-26 18:09:40,616 INFO [train.py:1114] (0/4) Epoch 8, batch 1300, loss[loss=0.2744, simple_loss=0.3232, pruned_loss=0.0826, ctc_loss=0.1511, over 18859.00 frames. ], tot_loss[loss=0.233, simple_loss=0.2923, pruned_loss=0.06321, ctc_loss=0.1182, over 3845901.22 frames. ], batch size: 76, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:09:47,138 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.481e+02 1.661e+02 1.866e+02 3.142e+02, threshold=3.323e+02, percent-clipped=0.0 +2024-08-26 18:09:59,820 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.58 vs. limit=22.5 +2024-08-26 18:10:13,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=100026.66666666667, ans=0.015 +2024-08-26 18:10:14,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=100026.66666666667, ans=0.0 +2024-08-26 18:10:15,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=100026.66666666667, ans=0.125 +2024-08-26 18:10:27,285 INFO [train.py:1114] (0/4) Epoch 8, batch 1350, loss[loss=0.2237, simple_loss=0.2856, pruned_loss=0.0588, ctc_loss=0.1102, over 19769.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.2913, pruned_loss=0.06234, ctc_loss=0.1166, over 3857801.95 frames. ], batch size: 54, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:10:29,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100133.33333333333, ans=0.1 +2024-08-26 18:10:30,595 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.03 vs. limit=22.5 +2024-08-26 18:10:31,167 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:10:39,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=100186.66666666667, ans=0.125 +2024-08-26 18:10:48,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=100240.0, ans=0.0 +2024-08-26 18:10:53,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100240.0, ans=0.1 +2024-08-26 18:10:57,957 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.78 vs. limit=22.5 +2024-08-26 18:11:11,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=100346.66666666667, ans=0.95 +2024-08-26 18:11:14,663 INFO [train.py:1114] (0/4) Epoch 8, batch 1400, loss[loss=0.2128, simple_loss=0.2652, pruned_loss=0.05859, ctc_loss=0.1081, over 19669.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.2909, pruned_loss=0.06233, ctc_loss=0.1165, over 3864552.82 frames. ], batch size: 46, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:11:15,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=100400.0, ans=10.0 +2024-08-26 18:11:23,745 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.577e+02 1.859e+02 2.331e+02 3.237e+02, threshold=3.718e+02, percent-clipped=0.0 +2024-08-26 18:11:33,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=100453.33333333333, ans=0.2 +2024-08-26 18:11:39,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100506.66666666667, ans=0.1 +2024-08-26 18:11:39,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=100506.66666666667, ans=0.2 +2024-08-26 18:11:47,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=100506.66666666667, ans=0.125 +2024-08-26 18:11:48,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100506.66666666667, ans=0.125 +2024-08-26 18:11:55,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=100560.0, ans=0.0 +2024-08-26 18:12:00,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=100613.33333333333, ans=0.95 +2024-08-26 18:12:01,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=100613.33333333333, ans=0.125 +2024-08-26 18:12:09,366 INFO [train.py:1114] (0/4) Epoch 8, batch 1450, loss[loss=0.2491, simple_loss=0.3086, pruned_loss=0.06987, ctc_loss=0.1244, over 19703.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.2915, pruned_loss=0.06265, ctc_loss=0.117, over 3862989.12 frames. ], batch size: 63, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:12:20,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=100720.0, ans=0.125 +2024-08-26 18:12:42,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=100826.66666666667, ans=0.125 +2024-08-26 18:12:53,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=100880.0, ans=0.04949747468305833 +2024-08-26 18:12:54,432 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=15.0 +2024-08-26 18:13:00,705 INFO [train.py:1114] (0/4) Epoch 8, batch 1500, loss[loss=0.238, simple_loss=0.2999, pruned_loss=0.06451, ctc_loss=0.1175, over 19578.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.292, pruned_loss=0.06278, ctc_loss=0.1173, over 3862350.18 frames. ], batch size: 57, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:13:05,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100933.33333333333, ans=0.125 +2024-08-26 18:13:07,551 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.450e+02 1.594e+02 1.806e+02 5.150e+02, threshold=3.189e+02, percent-clipped=1.0 +2024-08-26 18:13:34,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=101093.33333333333, ans=0.125 +2024-08-26 18:13:37,414 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.95 vs. limit=22.5 +2024-08-26 18:13:41,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=101146.66666666667, ans=0.125 +2024-08-26 18:13:48,291 INFO [train.py:1114] (0/4) Epoch 8, batch 1550, loss[loss=0.2496, simple_loss=0.3038, pruned_loss=0.07135, ctc_loss=0.1318, over 19607.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.2919, pruned_loss=0.06301, ctc_loss=0.118, over 3846891.21 frames. ], batch size: 60, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:13:59,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=101253.33333333333, ans=0.025 +2024-08-26 18:14:04,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101253.33333333333, ans=0.1 +2024-08-26 18:14:14,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101306.66666666667, ans=0.1 +2024-08-26 18:14:22,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=101360.0, ans=0.125 +2024-08-26 18:14:30,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=101413.33333333333, ans=0.025 +2024-08-26 18:14:34,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=101413.33333333333, ans=0.125 +2024-08-26 18:14:35,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=101413.33333333333, ans=0.125 +2024-08-26 18:14:40,875 INFO [train.py:1114] (0/4) Epoch 8, batch 1600, loss[loss=0.2287, simple_loss=0.2942, pruned_loss=0.05954, ctc_loss=0.1102, over 19829.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.2919, pruned_loss=0.06313, ctc_loss=0.1182, over 3836792.40 frames. ], batch size: 57, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:14:47,311 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.562e+02 1.716e+02 2.059e+02 3.797e+02, threshold=3.431e+02, percent-clipped=2.0 +2024-08-26 18:14:55,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=101520.0, ans=0.2 +2024-08-26 18:15:20,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=101626.66666666667, ans=0.125 +2024-08-26 18:15:26,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=101680.0, ans=0.025 +2024-08-26 18:15:26,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=101680.0, ans=0.125 +2024-08-26 18:15:32,093 INFO [train.py:1114] (0/4) Epoch 8, batch 1650, loss[loss=0.2362, simple_loss=0.2991, pruned_loss=0.06271, ctc_loss=0.1196, over 19644.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.2918, pruned_loss=0.06322, ctc_loss=0.1183, over 3834185.61 frames. ], batch size: 59, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:15:42,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=101786.66666666667, ans=0.125 +2024-08-26 18:15:50,366 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.08 vs. limit=15.0 +2024-08-26 18:15:57,884 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=1.95 vs. limit=15.0 +2024-08-26 18:15:58,959 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.51 vs. limit=15.0 +2024-08-26 18:16:01,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn1.whiten.whitening_limit, batch_count=101893.33333333333, ans=22.5 +2024-08-26 18:16:05,207 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:16:13,693 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.84 vs. limit=22.5 +2024-08-26 18:16:14,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=101946.66666666667, ans=0.0 +2024-08-26 18:16:16,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=101946.66666666667, ans=0.125 +2024-08-26 18:16:18,712 INFO [train.py:1114] (0/4) Epoch 8, batch 1700, loss[loss=0.1996, simple_loss=0.2579, pruned_loss=0.05172, ctc_loss=0.09463, over 19678.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.2911, pruned_loss=0.06265, ctc_loss=0.1172, over 3848314.27 frames. ], batch size: 46, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:16:25,307 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.242e+02 1.495e+02 1.737e+02 2.089e+02 3.401e+02, threshold=3.475e+02, percent-clipped=0.0 +2024-08-26 18:17:03,791 INFO [train.py:1114] (0/4) Epoch 8, batch 1750, loss[loss=0.2016, simple_loss=0.2611, pruned_loss=0.05278, ctc_loss=0.09118, over 19684.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.2903, pruned_loss=0.06231, ctc_loss=0.1165, over 3851911.08 frames. ], batch size: 45, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:17:05,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=102266.66666666667, ans=0.125 +2024-08-26 18:17:10,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=102266.66666666667, ans=0.95 +2024-08-26 18:17:13,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=102320.0, ans=0.125 +2024-08-26 18:17:19,666 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.62 vs. limit=15.0 +2024-08-26 18:17:27,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102373.33333333333, ans=0.1 +2024-08-26 18:17:35,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=102426.66666666667, ans=0.2 +2024-08-26 18:17:48,529 INFO [train.py:1114] (0/4) Epoch 8, batch 1800, loss[loss=0.2378, simple_loss=0.3049, pruned_loss=0.06223, ctc_loss=0.1153, over 19612.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.2905, pruned_loss=0.06222, ctc_loss=0.1161, over 3854379.38 frames. ], batch size: 55, lr: 1.81e-02, grad_scale: 32.0 +2024-08-26 18:17:56,848 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.517e+02 1.665e+02 1.949e+02 3.105e+02, threshold=3.330e+02, percent-clipped=0.0 +2024-08-26 18:18:00,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=102586.66666666667, ans=0.125 +2024-08-26 18:18:15,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=102640.0, ans=0.1 +2024-08-26 18:18:18,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=102693.33333333333, ans=0.125 +2024-08-26 18:18:36,732 INFO [train.py:1114] (0/4) Epoch 8, batch 1850, loss[loss=0.2607, simple_loss=0.3131, pruned_loss=0.07619, ctc_loss=0.1397, over 19594.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.2907, pruned_loss=0.06243, ctc_loss=0.1167, over 3857135.20 frames. ], batch size: 57, lr: 1.81e-02, grad_scale: 32.0 +2024-08-26 18:19:01,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=102906.66666666667, ans=0.125 +2024-08-26 18:19:18,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=103013.33333333333, ans=0.5 +2024-08-26 18:19:21,226 INFO [train.py:1114] (0/4) Epoch 8, batch 1900, loss[loss=0.2423, simple_loss=0.3125, pruned_loss=0.06328, ctc_loss=0.1137, over 19649.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.291, pruned_loss=0.0624, ctc_loss=0.1166, over 3861782.54 frames. ], batch size: 59, lr: 1.81e-02, grad_scale: 16.0 +2024-08-26 18:19:27,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=103066.66666666667, ans=0.025 +2024-08-26 18:19:28,165 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.277e+02 1.533e+02 1.714e+02 2.014e+02 3.062e+02, threshold=3.427e+02, percent-clipped=0.0 +2024-08-26 18:19:29,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=103120.0, ans=0.025 +2024-08-26 18:19:32,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=103120.0, ans=0.125 +2024-08-26 18:19:39,866 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.95 vs. limit=15.0 +2024-08-26 18:19:49,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=103226.66666666667, ans=0.0 +2024-08-26 18:19:54,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=103226.66666666667, ans=0.2 +2024-08-26 18:20:04,889 INFO [train.py:1114] (0/4) Epoch 8, batch 1950, loss[loss=0.2213, simple_loss=0.2831, pruned_loss=0.05867, ctc_loss=0.1054, over 19589.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.2921, pruned_loss=0.06249, ctc_loss=0.1166, over 3870478.34 frames. ], batch size: 52, lr: 1.81e-02, grad_scale: 16.0 +2024-08-26 18:20:12,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=103333.33333333333, ans=0.2 +2024-08-26 18:20:13,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=103386.66666666667, ans=0.125 +2024-08-26 18:20:36,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=103493.33333333333, ans=0.2 +2024-08-26 18:20:37,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=103493.33333333333, ans=0.125 +2024-08-26 18:20:44,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=103546.66666666667, ans=0.0 +2024-08-26 18:20:51,122 INFO [train.py:1114] (0/4) Epoch 8, batch 2000, loss[loss=0.2035, simple_loss=0.2584, pruned_loss=0.05414, ctc_loss=0.101, over 19643.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.2931, pruned_loss=0.0633, ctc_loss=0.118, over 3855829.59 frames. ], batch size: 45, lr: 1.81e-02, grad_scale: 16.0 +2024-08-26 18:21:00,304 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.277e+02 1.619e+02 1.835e+02 2.136e+02 5.632e+02, threshold=3.670e+02, percent-clipped=2.0 +2024-08-26 18:21:09,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=103706.66666666667, ans=0.0 +2024-08-26 18:21:21,587 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.30 vs. limit=15.0 +2024-08-26 18:21:26,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=103813.33333333333, ans=0.125 +2024-08-26 18:21:36,072 INFO [train.py:1114] (0/4) Epoch 8, batch 2050, loss[loss=0.2165, simple_loss=0.2719, pruned_loss=0.05924, ctc_loss=0.1065, over 19688.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.2922, pruned_loss=0.06313, ctc_loss=0.1175, over 3851400.97 frames. ], batch size: 47, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:21:37,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=103866.66666666667, ans=0.2 +2024-08-26 18:21:42,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=103866.66666666667, ans=0.025 +2024-08-26 18:21:48,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=103920.0, ans=0.0 +2024-08-26 18:21:52,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=103920.0, ans=0.125 +2024-08-26 18:21:57,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=103973.33333333333, ans=0.125 +2024-08-26 18:22:00,034 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.80 vs. limit=15.0 +2024-08-26 18:22:07,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104026.66666666667, ans=0.1 +2024-08-26 18:22:10,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=104080.0, ans=0.125 +2024-08-26 18:22:12,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=104080.0, ans=0.125 +2024-08-26 18:22:19,592 INFO [train.py:1114] (0/4) Epoch 8, batch 2100, loss[loss=0.2367, simple_loss=0.2933, pruned_loss=0.0651, ctc_loss=0.1249, over 19772.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.2907, pruned_loss=0.06211, ctc_loss=0.116, over 3858466.81 frames. ], batch size: 54, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:22:27,467 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.502e+02 1.673e+02 2.007e+02 2.886e+02, threshold=3.346e+02, percent-clipped=0.0 +2024-08-26 18:22:37,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=104240.0, ans=0.125 +2024-08-26 18:22:39,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=104240.0, ans=0.125 +2024-08-26 18:22:47,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=104293.33333333333, ans=0.2 +2024-08-26 18:22:49,573 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.05 vs. limit=22.5 +2024-08-26 18:22:52,926 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.55 vs. limit=15.0 +2024-08-26 18:23:02,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=104400.0, ans=0.2 +2024-08-26 18:23:03,063 INFO [train.py:1114] (0/4) Epoch 8, batch 2150, loss[loss=0.2215, simple_loss=0.2876, pruned_loss=0.05669, ctc_loss=0.1048, over 19585.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.29, pruned_loss=0.06171, ctc_loss=0.1153, over 3869125.10 frames. ], batch size: 52, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:23:46,687 INFO [train.py:1114] (0/4) Epoch 8, batch 2200, loss[loss=0.2468, simple_loss=0.3026, pruned_loss=0.07042, ctc_loss=0.1253, over 19594.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.2902, pruned_loss=0.06173, ctc_loss=0.1154, over 3867395.04 frames. ], batch size: 57, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:23:54,540 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.275e+02 1.596e+02 1.839e+02 2.214e+02 3.376e+02, threshold=3.678e+02, percent-clipped=1.0 +2024-08-26 18:24:00,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=104720.0, ans=0.09899494936611666 +2024-08-26 18:24:16,131 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.43 vs. limit=10.0 +2024-08-26 18:24:21,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=104880.0, ans=0.125 +2024-08-26 18:24:25,883 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.57 vs. limit=15.0 +2024-08-26 18:24:30,564 INFO [train.py:1114] (0/4) Epoch 8, batch 2250, loss[loss=0.2304, simple_loss=0.2986, pruned_loss=0.05865, ctc_loss=0.1123, over 19628.00 frames. ], tot_loss[loss=0.23, simple_loss=0.2903, pruned_loss=0.06179, ctc_loss=0.1156, over 3867405.77 frames. ], batch size: 55, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:24:42,293 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.50 vs. limit=6.0 +2024-08-26 18:24:48,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=105040.0, ans=0.07 +2024-08-26 18:24:55,208 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.13 vs. limit=15.0 +2024-08-26 18:24:57,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=105093.33333333333, ans=0.125 +2024-08-26 18:25:02,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=105093.33333333333, ans=0.0 +2024-08-26 18:25:03,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=105093.33333333333, ans=0.0 +2024-08-26 18:25:08,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=105146.66666666667, ans=0.125 +2024-08-26 18:25:14,129 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.43 vs. limit=6.0 +2024-08-26 18:25:16,092 INFO [train.py:1114] (0/4) Epoch 8, batch 2300, loss[loss=0.1967, simple_loss=0.2624, pruned_loss=0.04811, ctc_loss=0.08696, over 19513.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.2892, pruned_loss=0.06186, ctc_loss=0.1156, over 3860937.02 frames. ], batch size: 49, lr: 1.79e-02, grad_scale: 16.0 +2024-08-26 18:25:17,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=105200.0, ans=0.2 +2024-08-26 18:25:23,766 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.553e+02 1.767e+02 2.002e+02 4.280e+02, threshold=3.534e+02, percent-clipped=3.0 +2024-08-26 18:25:24,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=105253.33333333333, ans=0.125 +2024-08-26 18:25:27,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=105253.33333333333, ans=0.125 +2024-08-26 18:25:35,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105306.66666666667, ans=0.1 +2024-08-26 18:25:37,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=105306.66666666667, ans=0.0 +2024-08-26 18:25:52,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=105413.33333333333, ans=0.2 +2024-08-26 18:25:58,624 INFO [train.py:1114] (0/4) Epoch 8, batch 2350, loss[loss=0.2613, simple_loss=0.3197, pruned_loss=0.07499, ctc_loss=0.1324, over 19669.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.2891, pruned_loss=0.06181, ctc_loss=0.1154, over 3863603.50 frames. ], batch size: 63, lr: 1.79e-02, grad_scale: 16.0 +2024-08-26 18:25:59,044 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.47 vs. limit=10.0 +2024-08-26 18:26:18,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=105573.33333333333, ans=0.125 +2024-08-26 18:26:24,883 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.11 vs. limit=22.5 +2024-08-26 18:26:33,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=105680.0, ans=0.125 +2024-08-26 18:26:42,919 INFO [train.py:1114] (0/4) Epoch 8, batch 2400, loss[loss=0.2614, simple_loss=0.3146, pruned_loss=0.07596, ctc_loss=0.1409, over 19477.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.2916, pruned_loss=0.06294, ctc_loss=0.1173, over 3858382.89 frames. ], batch size: 67, lr: 1.79e-02, grad_scale: 32.0 +2024-08-26 18:26:50,599 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.261e+02 1.526e+02 1.733e+02 1.998e+02 3.354e+02, threshold=3.467e+02, percent-clipped=0.0 +2024-08-26 18:27:10,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=105893.33333333333, ans=0.125 +2024-08-26 18:27:18,224 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.76 vs. limit=15.0 +2024-08-26 18:27:27,047 INFO [train.py:1114] (0/4) Epoch 8, batch 2450, loss[loss=0.3059, simple_loss=0.3309, pruned_loss=0.1038, ctc_loss=0.1831, over 13958.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.2966, pruned_loss=0.067, ctc_loss=0.1247, over 3736317.57 frames. ], batch size: 140, lr: 1.79e-02, grad_scale: 16.0 +2024-08-26 18:27:28,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=106000.0, ans=0.125 +2024-08-26 18:27:29,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=106000.0, ans=0.125 +2024-08-26 18:27:32,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=106000.0, ans=0.125 +2024-08-26 18:27:36,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=106053.33333333333, ans=0.125 +2024-08-26 18:27:36,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=106053.33333333333, ans=0.1 +2024-08-26 18:27:45,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=106106.66666666667, ans=0.2 +2024-08-26 18:27:51,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=106106.66666666667, ans=0.2 +2024-08-26 18:27:55,594 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.13 vs. limit=10.0 +2024-08-26 18:27:58,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=106160.0, ans=0.125 +2024-08-26 18:27:59,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=106160.0, ans=0.5 +2024-08-26 18:28:01,684 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-8.pt +2024-08-26 18:28:46,566 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:28:47,198 INFO [train.py:1114] (0/4) Epoch 9, batch 0, loss[loss=0.2132, simple_loss=0.2692, pruned_loss=0.05737, ctc_loss=0.1062, over 19818.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2692, pruned_loss=0.05737, ctc_loss=0.1062, over 19818.00 frames. ], batch size: 49, lr: 1.69e-02, grad_scale: 32.0 +2024-08-26 18:28:47,199 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-26 18:28:56,819 INFO [train.py:1146] (0/4) Epoch 9, validation: loss=0.1927, simple_loss=0.2844, pruned_loss=0.03737, ctc_loss=0.06585, over 944034.00 frames. +2024-08-26 18:28:56,819 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13201MB +2024-08-26 18:29:13,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=106261.33333333333, ans=0.125 +2024-08-26 18:29:16,435 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.480e+02 1.688e+02 1.849e+02 2.025e+02 3.204e+02, threshold=3.698e+02, percent-clipped=0.0 +2024-08-26 18:29:43,038 INFO [train.py:1114] (0/4) Epoch 9, batch 50, loss[loss=0.1846, simple_loss=0.2546, pruned_loss=0.0414, ctc_loss=0.07968, over 19734.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.2922, pruned_loss=0.06228, ctc_loss=0.1179, over 844043.89 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 32.0 +2024-08-26 18:29:51,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106474.66666666667, ans=0.1 +2024-08-26 18:29:52,861 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.89 vs. limit=15.0 +2024-08-26 18:30:12,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=106634.66666666667, ans=0.07 +2024-08-26 18:30:17,650 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/checkpoint-20000.pt +2024-08-26 18:30:33,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=106688.0, ans=0.2 +2024-08-26 18:30:39,522 INFO [train.py:1114] (0/4) Epoch 9, batch 100, loss[loss=0.2083, simple_loss=0.274, pruned_loss=0.05117, ctc_loss=0.1008, over 19724.00 frames. ], tot_loss[loss=0.233, simple_loss=0.2934, pruned_loss=0.06257, ctc_loss=0.1187, over 1497202.38 frames. ], batch size: 51, lr: 1.69e-02, grad_scale: 32.0 +2024-08-26 18:30:44,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=106741.33333333333, ans=0.125 +2024-08-26 18:30:57,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=106794.66666666667, ans=0.2 +2024-08-26 18:31:02,331 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.216e+02 1.554e+02 1.735e+02 2.126e+02 3.416e+02, threshold=3.470e+02, percent-clipped=0.0 +2024-08-26 18:31:03,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106848.0, ans=0.0 +2024-08-26 18:31:07,375 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.84 vs. limit=15.0 +2024-08-26 18:31:07,601 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.25 vs. limit=15.0 +2024-08-26 18:31:09,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=106901.33333333333, ans=0.125 +2024-08-26 18:31:21,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=106954.66666666667, ans=0.125 +2024-08-26 18:31:21,497 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.94 vs. limit=22.5 +2024-08-26 18:31:25,034 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.67 vs. limit=15.0 +2024-08-26 18:31:28,284 INFO [train.py:1114] (0/4) Epoch 9, batch 150, loss[loss=0.1952, simple_loss=0.2607, pruned_loss=0.04782, ctc_loss=0.08532, over 19693.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.2903, pruned_loss=0.06141, ctc_loss=0.1158, over 2027503.00 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 16.0 +2024-08-26 18:31:35,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=107008.0, ans=22.5 +2024-08-26 18:32:14,112 INFO [train.py:1114] (0/4) Epoch 9, batch 200, loss[loss=0.2469, simple_loss=0.3017, pruned_loss=0.0696, ctc_loss=0.1323, over 18274.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.2887, pruned_loss=0.06064, ctc_loss=0.1145, over 2435063.29 frames. ], batch size: 85, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:32:14,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=107274.66666666667, ans=0.0 +2024-08-26 18:32:16,534 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.35 vs. limit=22.5 +2024-08-26 18:32:22,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=107328.0, ans=0.2 +2024-08-26 18:32:36,049 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.227e+02 1.442e+02 1.571e+02 1.787e+02 2.800e+02, threshold=3.143e+02, percent-clipped=0.0 +2024-08-26 18:32:40,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=107381.33333333333, ans=0.125 +2024-08-26 18:32:45,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=107434.66666666667, ans=0.125 +2024-08-26 18:32:54,127 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.02 vs. limit=15.0 +2024-08-26 18:33:01,987 INFO [train.py:1114] (0/4) Epoch 9, batch 250, loss[loss=0.2612, simple_loss=0.3183, pruned_loss=0.07498, ctc_loss=0.1355, over 19360.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.2885, pruned_loss=0.06026, ctc_loss=0.1135, over 2754598.94 frames. ], batch size: 67, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:33:06,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=107541.33333333333, ans=0.0 +2024-08-26 18:33:26,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=107594.66666666667, ans=0.125 +2024-08-26 18:33:40,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107701.33333333333, ans=0.1 +2024-08-26 18:33:43,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=107701.33333333333, ans=0.2 +2024-08-26 18:33:53,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=107754.66666666667, ans=0.125 +2024-08-26 18:33:54,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=107754.66666666667, ans=0.2 +2024-08-26 18:34:01,016 INFO [train.py:1114] (0/4) Epoch 9, batch 300, loss[loss=0.2371, simple_loss=0.2972, pruned_loss=0.06411, ctc_loss=0.1217, over 19553.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.2875, pruned_loss=0.05977, ctc_loss=0.1121, over 2999865.94 frames. ], batch size: 61, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:34:12,177 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.37 vs. limit=22.5 +2024-08-26 18:34:12,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=107861.33333333333, ans=0.0 +2024-08-26 18:34:24,464 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.264e+02 1.498e+02 1.681e+02 1.999e+02 2.633e+02, threshold=3.363e+02, percent-clipped=0.0 +2024-08-26 18:34:29,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.95 vs. limit=15.0 +2024-08-26 18:34:30,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=107914.66666666667, ans=22.5 +2024-08-26 18:34:35,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=107968.0, ans=0.0 +2024-08-26 18:34:36,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=107968.0, ans=0.0 +2024-08-26 18:34:36,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=107968.0, ans=0.0 +2024-08-26 18:34:50,531 INFO [train.py:1114] (0/4) Epoch 9, batch 350, loss[loss=0.2072, simple_loss=0.266, pruned_loss=0.05353, ctc_loss=0.1034, over 19747.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.2876, pruned_loss=0.05972, ctc_loss=0.1117, over 3189846.95 frames. ], batch size: 48, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:34:58,533 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.22 vs. limit=15.0 +2024-08-26 18:35:02,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=108128.0, ans=0.125 +2024-08-26 18:35:15,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=108181.33333333333, ans=0.125 +2024-08-26 18:35:25,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=108234.66666666667, ans=0.0 +2024-08-26 18:35:29,112 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.21 vs. limit=10.0 +2024-08-26 18:35:40,759 INFO [train.py:1114] (0/4) Epoch 9, batch 400, loss[loss=0.2062, simple_loss=0.2881, pruned_loss=0.04398, ctc_loss=0.09103, over 19514.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.2871, pruned_loss=0.05933, ctc_loss=0.1111, over 3342190.73 frames. ], batch size: 54, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:35:54,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=108394.66666666667, ans=0.125 +2024-08-26 18:35:55,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.48 vs. limit=15.0 +2024-08-26 18:36:02,029 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.232e+02 1.489e+02 1.712e+02 1.995e+02 4.778e+02, threshold=3.424e+02, percent-clipped=1.0 +2024-08-26 18:36:02,573 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.04 vs. limit=15.0 +2024-08-26 18:36:05,518 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.98 vs. limit=22.5 +2024-08-26 18:36:23,485 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:36:26,321 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:36:27,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=108554.66666666667, ans=0.0 +2024-08-26 18:36:32,710 INFO [train.py:1114] (0/4) Epoch 9, batch 450, loss[loss=0.1995, simple_loss=0.2791, pruned_loss=0.04258, ctc_loss=0.08658, over 19624.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.287, pruned_loss=0.05912, ctc_loss=0.1106, over 3450818.45 frames. ], batch size: 55, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:36:36,683 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:37:09,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108768.0, ans=0.1 +2024-08-26 18:37:19,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=108821.33333333333, ans=0.025 +2024-08-26 18:37:19,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=108821.33333333333, ans=0.125 +2024-08-26 18:37:21,536 INFO [train.py:1114] (0/4) Epoch 9, batch 500, loss[loss=0.2416, simple_loss=0.3059, pruned_loss=0.06526, ctc_loss=0.1169, over 19701.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2861, pruned_loss=0.05876, ctc_loss=0.1101, over 3545802.40 frames. ], batch size: 63, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:37:24,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=108874.66666666667, ans=0.125 +2024-08-26 18:37:31,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=108928.0, ans=0.0 +2024-08-26 18:37:35,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=108928.0, ans=0.05 +2024-08-26 18:37:42,874 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.242e+02 1.480e+02 1.660e+02 1.957e+02 3.087e+02, threshold=3.320e+02, percent-clipped=0.0 +2024-08-26 18:37:46,340 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.58 vs. limit=12.0 +2024-08-26 18:37:59,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=109088.0, ans=0.125 +2024-08-26 18:38:05,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=109088.0, ans=0.0 +2024-08-26 18:38:07,950 INFO [train.py:1114] (0/4) Epoch 9, batch 550, loss[loss=0.2229, simple_loss=0.2877, pruned_loss=0.05799, ctc_loss=0.1051, over 19392.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.2868, pruned_loss=0.05921, ctc_loss=0.1111, over 3607826.60 frames. ], batch size: 71, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:38:09,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=109141.33333333333, ans=0.0 +2024-08-26 18:38:25,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=109194.66666666667, ans=0.09899494936611666 +2024-08-26 18:38:38,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=109301.33333333333, ans=0.125 +2024-08-26 18:38:50,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=109354.66666666667, ans=0.2 +2024-08-26 18:38:55,941 INFO [train.py:1114] (0/4) Epoch 9, batch 600, loss[loss=0.2389, simple_loss=0.2979, pruned_loss=0.06521, ctc_loss=0.1238, over 19429.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.2876, pruned_loss=0.0595, ctc_loss=0.1115, over 3665188.56 frames. ], batch size: 67, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:38:58,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=109408.0, ans=0.125 +2024-08-26 18:39:21,961 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.261e+02 1.496e+02 1.658e+02 1.980e+02 4.382e+02, threshold=3.316e+02, percent-clipped=1.0 +2024-08-26 18:39:32,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=109568.0, ans=0.125 +2024-08-26 18:39:34,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=109568.0, ans=0.2 +2024-08-26 18:39:35,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109568.0, ans=0.1 +2024-08-26 18:39:49,347 INFO [train.py:1114] (0/4) Epoch 9, batch 650, loss[loss=0.2102, simple_loss=0.2788, pruned_loss=0.05193, ctc_loss=0.09442, over 19758.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.2867, pruned_loss=0.05893, ctc_loss=0.1105, over 3715812.23 frames. ], batch size: 54, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:39:51,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=109674.66666666667, ans=0.025 +2024-08-26 18:39:58,325 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.70 vs. limit=15.0 +2024-08-26 18:40:29,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109834.66666666667, ans=0.1 +2024-08-26 18:40:36,288 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.59 vs. limit=6.0 +2024-08-26 18:40:40,276 INFO [train.py:1114] (0/4) Epoch 9, batch 700, loss[loss=0.2349, simple_loss=0.2891, pruned_loss=0.06613, ctc_loss=0.1214, over 19723.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2869, pruned_loss=0.05922, ctc_loss=0.111, over 3748214.42 frames. ], batch size: 51, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:40:42,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109941.33333333333, ans=0.1 +2024-08-26 18:41:01,793 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.271e+02 1.503e+02 1.748e+02 2.321e+02 3.813e+02, threshold=3.497e+02, percent-clipped=1.0 +2024-08-26 18:41:04,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=110048.0, ans=0.125 +2024-08-26 18:41:22,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=110154.66666666667, ans=0.0 +2024-08-26 18:41:28,640 INFO [train.py:1114] (0/4) Epoch 9, batch 750, loss[loss=0.2169, simple_loss=0.287, pruned_loss=0.05304, ctc_loss=0.1019, over 19483.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.2867, pruned_loss=0.05913, ctc_loss=0.1106, over 3774614.02 frames. ], batch size: 54, lr: 1.66e-02, grad_scale: 16.0 +2024-08-26 18:41:38,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=110261.33333333333, ans=0.025 +2024-08-26 18:41:39,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=110261.33333333333, ans=0.125 +2024-08-26 18:41:53,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=110314.66666666667, ans=0.0 +2024-08-26 18:42:09,833 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:42:22,143 INFO [train.py:1114] (0/4) Epoch 9, batch 800, loss[loss=0.1923, simple_loss=0.2477, pruned_loss=0.04961, ctc_loss=0.09423, over 19841.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.2868, pruned_loss=0.05924, ctc_loss=0.1107, over 3796448.51 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:42:24,629 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=15.09 vs. limit=15.0 +2024-08-26 18:42:33,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=110528.0, ans=0.125 +2024-08-26 18:42:35,074 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.67 vs. limit=15.0 +2024-08-26 18:42:38,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=110528.0, ans=0.2 +2024-08-26 18:42:43,921 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.269e+02 1.427e+02 1.539e+02 1.792e+02 3.382e+02, threshold=3.078e+02, percent-clipped=0.0 +2024-08-26 18:43:09,192 INFO [train.py:1114] (0/4) Epoch 9, batch 850, loss[loss=0.24, simple_loss=0.3005, pruned_loss=0.06452, ctc_loss=0.1263, over 19657.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.2863, pruned_loss=0.05899, ctc_loss=0.1102, over 3815637.27 frames. ], batch size: 59, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:43:10,719 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.76 vs. limit=15.0 +2024-08-26 18:43:21,520 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.33 vs. limit=15.0 +2024-08-26 18:43:30,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=110848.0, ans=0.0 +2024-08-26 18:43:41,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=110901.33333333333, ans=0.125 +2024-08-26 18:43:52,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=110954.66666666667, ans=0.125 +2024-08-26 18:43:55,669 INFO [train.py:1114] (0/4) Epoch 9, batch 900, loss[loss=0.1884, simple_loss=0.257, pruned_loss=0.04258, ctc_loss=0.08659, over 19805.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.2866, pruned_loss=0.05927, ctc_loss=0.1108, over 3820448.07 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:44:05,200 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.14 vs. limit=15.0 +2024-08-26 18:45:38,160 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.254e+02 1.519e+02 1.752e+02 2.077e+02 5.433e+02, threshold=3.505e+02, percent-clipped=5.0 +2024-08-26 18:45:39,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=111114.66666666667, ans=0.0 +2024-08-26 18:45:58,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=111221.33333333333, ans=0.0 +2024-08-26 18:46:05,599 INFO [train.py:1114] (0/4) Epoch 9, batch 950, loss[loss=0.189, simple_loss=0.2586, pruned_loss=0.04362, ctc_loss=0.08024, over 19520.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.2873, pruned_loss=0.05965, ctc_loss=0.1115, over 3822695.00 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:46:16,725 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.43 vs. limit=6.0 +2024-08-26 18:46:20,884 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.16 vs. limit=15.0 +2024-08-26 18:46:27,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=111328.0, ans=10.0 +2024-08-26 18:46:38,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111434.66666666667, ans=0.1 +2024-08-26 18:46:39,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=111434.66666666667, ans=0.125 +2024-08-26 18:46:50,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=111488.0, ans=0.0 +2024-08-26 18:46:57,416 INFO [train.py:1114] (0/4) Epoch 9, batch 1000, loss[loss=0.1983, simple_loss=0.2683, pruned_loss=0.04634, ctc_loss=0.08893, over 19829.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.2881, pruned_loss=0.06002, ctc_loss=0.1122, over 3819280.03 frames. ], batch size: 52, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:47:09,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=111594.66666666667, ans=0.0 +2024-08-26 18:47:12,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111594.66666666667, ans=0.125 +2024-08-26 18:47:19,853 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.174e+02 1.461e+02 1.756e+02 2.077e+02 6.803e+02, threshold=3.513e+02, percent-clipped=1.0 +2024-08-26 18:47:43,906 INFO [train.py:1114] (0/4) Epoch 9, batch 1050, loss[loss=0.2242, simple_loss=0.2909, pruned_loss=0.05801, ctc_loss=0.1036, over 19859.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.2872, pruned_loss=0.05963, ctc_loss=0.1116, over 3823747.95 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 16.0 +2024-08-26 18:47:45,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=111808.0, ans=0.0 +2024-08-26 18:48:00,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111861.33333333333, ans=0.1 +2024-08-26 18:48:31,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112074.66666666667, ans=0.1 +2024-08-26 18:48:32,538 INFO [train.py:1114] (0/4) Epoch 9, batch 1100, loss[loss=0.2207, simple_loss=0.2855, pruned_loss=0.05667, ctc_loss=0.1062, over 19592.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.2865, pruned_loss=0.05921, ctc_loss=0.1108, over 3831698.90 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 16.0 +2024-08-26 18:48:32,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=112074.66666666667, ans=0.125 +2024-08-26 18:48:41,128 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:48:43,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=112128.0, ans=0.125 +2024-08-26 18:48:59,878 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.444e+02 1.690e+02 2.009e+02 4.396e+02, threshold=3.380e+02, percent-clipped=1.0 +2024-08-26 18:49:05,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=112181.33333333333, ans=0.0 +2024-08-26 18:49:41,001 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.75 vs. limit=22.5 +2024-08-26 18:49:51,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112288.0, ans=0.1 +2024-08-26 18:49:53,113 INFO [train.py:1114] (0/4) Epoch 9, batch 1150, loss[loss=0.1977, simple_loss=0.2705, pruned_loss=0.04602, ctc_loss=0.08183, over 19600.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.2859, pruned_loss=0.05911, ctc_loss=0.1105, over 3830700.83 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 16.0 +2024-08-26 18:50:00,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=112341.33333333333, ans=0.125 +2024-08-26 18:50:24,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=112448.0, ans=0.125 +2024-08-26 18:50:32,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=112448.0, ans=0.125 +2024-08-26 18:50:39,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112501.33333333333, ans=0.1 +2024-08-26 18:50:40,216 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.95 vs. limit=15.0 +2024-08-26 18:50:54,160 INFO [train.py:1114] (0/4) Epoch 9, batch 1200, loss[loss=0.2151, simple_loss=0.2879, pruned_loss=0.05152, ctc_loss=0.09815, over 19834.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.2864, pruned_loss=0.05905, ctc_loss=0.1105, over 3825832.07 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 32.0 +2024-08-26 18:51:04,079 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.27 vs. limit=15.0 +2024-08-26 18:51:09,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=112661.33333333333, ans=0.1 +2024-08-26 18:51:15,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=112714.66666666667, ans=0.05 +2024-08-26 18:51:16,811 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.431e+02 1.600e+02 1.807e+02 3.201e+02, threshold=3.201e+02, percent-clipped=0.0 +2024-08-26 18:51:18,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=112714.66666666667, ans=0.0 +2024-08-26 18:51:18,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=112714.66666666667, ans=0.1 +2024-08-26 18:51:33,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112821.33333333333, ans=0.1 +2024-08-26 18:51:42,791 INFO [train.py:1114] (0/4) Epoch 9, batch 1250, loss[loss=0.2535, simple_loss=0.3079, pruned_loss=0.07346, ctc_loss=0.1303, over 19548.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.2868, pruned_loss=0.05905, ctc_loss=0.1103, over 3844137.32 frames. ], batch size: 61, lr: 1.65e-02, grad_scale: 32.0 +2024-08-26 18:51:51,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=112928.0, ans=0.0 +2024-08-26 18:51:58,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=112928.0, ans=0.125 +2024-08-26 18:52:01,654 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.43 vs. limit=6.0 +2024-08-26 18:52:26,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=113088.0, ans=0.2 +2024-08-26 18:52:31,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113088.0, ans=0.1 +2024-08-26 18:52:36,294 INFO [train.py:1114] (0/4) Epoch 9, batch 1300, loss[loss=0.2433, simple_loss=0.3045, pruned_loss=0.06583, ctc_loss=0.1262, over 18806.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.286, pruned_loss=0.05872, ctc_loss=0.1097, over 3847673.27 frames. ], batch size: 76, lr: 1.64e-02, grad_scale: 32.0 +2024-08-26 18:52:50,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=113194.66666666667, ans=0.0 +2024-08-26 18:52:56,294 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.46 vs. limit=15.0 +2024-08-26 18:52:58,752 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.498e+02 1.743e+02 2.034e+02 3.430e+02, threshold=3.487e+02, percent-clipped=2.0 +2024-08-26 18:53:16,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=113354.66666666667, ans=0.09899494936611666 +2024-08-26 18:53:19,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113354.66666666667, ans=0.1 +2024-08-26 18:53:22,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=113408.0, ans=0.025 +2024-08-26 18:53:23,262 INFO [train.py:1114] (0/4) Epoch 9, batch 1350, loss[loss=0.1992, simple_loss=0.2718, pruned_loss=0.0451, ctc_loss=0.09085, over 19737.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2854, pruned_loss=0.05831, ctc_loss=0.1089, over 3859306.16 frames. ], batch size: 54, lr: 1.64e-02, grad_scale: 32.0 +2024-08-26 18:53:33,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=113461.33333333333, ans=0.125 +2024-08-26 18:53:34,815 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.78 vs. limit=15.0 +2024-08-26 18:53:46,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=113514.66666666667, ans=0.125 +2024-08-26 18:53:49,825 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:53:53,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=113568.0, ans=0.125 +2024-08-26 18:53:55,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=113568.0, ans=0.125 +2024-08-26 18:54:06,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=113621.33333333333, ans=0.125 +2024-08-26 18:54:09,868 INFO [train.py:1114] (0/4) Epoch 9, batch 1400, loss[loss=0.2051, simple_loss=0.263, pruned_loss=0.05334, ctc_loss=0.1012, over 19662.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2855, pruned_loss=0.05827, ctc_loss=0.1088, over 3866130.34 frames. ], batch size: 46, lr: 1.64e-02, grad_scale: 32.0 +2024-08-26 18:54:20,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=113728.0, ans=0.2 +2024-08-26 18:54:33,079 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.492e+02 1.644e+02 1.948e+02 2.802e+02, threshold=3.287e+02, percent-clipped=0.0 +2024-08-26 18:54:33,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=113781.33333333333, ans=0.125 +2024-08-26 18:54:37,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113781.33333333333, ans=0.1 +2024-08-26 18:54:38,907 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:54:54,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=113888.0, ans=0.125 +2024-08-26 18:54:59,233 INFO [train.py:1114] (0/4) Epoch 9, batch 1450, loss[loss=0.2486, simple_loss=0.3004, pruned_loss=0.07228, ctc_loss=0.1308, over 19679.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2859, pruned_loss=0.05847, ctc_loss=0.1093, over 3864108.67 frames. ], batch size: 63, lr: 1.64e-02, grad_scale: 16.0 +2024-08-26 18:55:04,416 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.85 vs. limit=15.0 +2024-08-26 18:55:09,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=113941.33333333333, ans=0.025 +2024-08-26 18:55:20,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=113994.66666666667, ans=0.0 +2024-08-26 18:55:25,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=114048.0, ans=10.0 +2024-08-26 18:55:28,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=114048.0, ans=0.125 +2024-08-26 18:55:29,753 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.91 vs. limit=15.0 +2024-08-26 18:55:41,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=114101.33333333333, ans=0.125 +2024-08-26 18:55:54,510 INFO [train.py:1114] (0/4) Epoch 9, batch 1500, loss[loss=0.239, simple_loss=0.3014, pruned_loss=0.06477, ctc_loss=0.1177, over 19600.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2863, pruned_loss=0.05856, ctc_loss=0.1095, over 3863198.78 frames. ], batch size: 57, lr: 1.64e-02, grad_scale: 16.0 +2024-08-26 18:55:55,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=114208.0, ans=0.125 +2024-08-26 18:56:01,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=114208.0, ans=0.125 +2024-08-26 18:56:16,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=114314.66666666667, ans=0.0 +2024-08-26 18:56:18,314 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.542e+02 1.688e+02 1.884e+02 2.711e+02, threshold=3.377e+02, percent-clipped=0.0 +2024-08-26 18:56:32,447 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:56:41,347 INFO [train.py:1114] (0/4) Epoch 9, batch 1550, loss[loss=0.2519, simple_loss=0.311, pruned_loss=0.06993, ctc_loss=0.1323, over 19597.00 frames. ], tot_loss[loss=0.225, simple_loss=0.2872, pruned_loss=0.05927, ctc_loss=0.1109, over 3847363.39 frames. ], batch size: 60, lr: 1.64e-02, grad_scale: 16.0 +2024-08-26 18:56:42,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=114474.66666666667, ans=0.07 +2024-08-26 18:56:43,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=114474.66666666667, ans=0.125 +2024-08-26 18:56:47,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=114474.66666666667, ans=0.1 +2024-08-26 18:56:55,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=114528.0, ans=0.0 +2024-08-26 18:56:59,378 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.44 vs. limit=15.0 +2024-08-26 18:57:08,732 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.33 vs. limit=15.0 +2024-08-26 18:57:11,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=114634.66666666667, ans=0.0 +2024-08-26 18:57:12,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=114634.66666666667, ans=0.125 +2024-08-26 18:57:16,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=114634.66666666667, ans=0.125 +2024-08-26 18:57:17,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=114634.66666666667, ans=0.125 +2024-08-26 18:57:29,653 INFO [train.py:1114] (0/4) Epoch 9, batch 1600, loss[loss=0.2364, simple_loss=0.2947, pruned_loss=0.06485, ctc_loss=0.1211, over 19838.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.2875, pruned_loss=0.05971, ctc_loss=0.1116, over 3835919.64 frames. ], batch size: 57, lr: 1.63e-02, grad_scale: 32.0 +2024-08-26 18:57:35,632 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.25 vs. limit=12.0 +2024-08-26 18:57:42,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=114794.66666666667, ans=0.125 +2024-08-26 18:57:44,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=114794.66666666667, ans=0.125 +2024-08-26 18:57:56,394 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.15 vs. limit=15.0 +2024-08-26 18:57:57,607 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.291e+02 1.549e+02 1.720e+02 1.979e+02 3.573e+02, threshold=3.441e+02, percent-clipped=1.0 +2024-08-26 18:58:11,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=114901.33333333333, ans=0.0 +2024-08-26 18:58:36,384 INFO [train.py:1114] (0/4) Epoch 9, batch 1650, loss[loss=0.2455, simple_loss=0.3067, pruned_loss=0.06755, ctc_loss=0.1228, over 19639.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.2873, pruned_loss=0.05955, ctc_loss=0.1111, over 3833183.94 frames. ], batch size: 59, lr: 1.63e-02, grad_scale: 32.0 +2024-08-26 18:59:46,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=115114.66666666667, ans=0.2 +2024-08-26 18:59:46,775 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.00 vs. limit=10.0 +2024-08-26 18:59:57,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=115168.0, ans=0.2 +2024-08-26 19:00:07,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=115221.33333333333, ans=0.125 +2024-08-26 19:00:11,416 INFO [train.py:1114] (0/4) Epoch 9, batch 1700, loss[loss=0.194, simple_loss=0.2575, pruned_loss=0.04751, ctc_loss=0.08855, over 19661.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2867, pruned_loss=0.0587, ctc_loss=0.1098, over 3847890.26 frames. ], batch size: 46, lr: 1.63e-02, grad_scale: 16.0 +2024-08-26 19:00:11,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=115274.66666666667, ans=0.125 +2024-08-26 19:00:19,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=115328.0, ans=0.0 +2024-08-26 19:00:34,656 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.254e+02 1.433e+02 1.619e+02 1.844e+02 2.581e+02, threshold=3.239e+02, percent-clipped=0.0 +2024-08-26 19:00:35,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115381.33333333333, ans=0.1 +2024-08-26 19:00:36,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=115381.33333333333, ans=0.2 +2024-08-26 19:00:39,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=115434.66666666667, ans=0.125 +2024-08-26 19:00:53,077 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.72 vs. limit=22.5 +2024-08-26 19:00:56,875 INFO [train.py:1114] (0/4) Epoch 9, batch 1750, loss[loss=0.2125, simple_loss=0.2659, pruned_loss=0.0578, ctc_loss=0.1088, over 19683.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2857, pruned_loss=0.0583, ctc_loss=0.109, over 3852696.95 frames. ], batch size: 45, lr: 1.63e-02, grad_scale: 16.0 +2024-08-26 19:01:05,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=115594.66666666667, ans=0.07 +2024-08-26 19:01:05,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=115594.66666666667, ans=0.0 +2024-08-26 19:01:10,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=115594.66666666667, ans=0.125 +2024-08-26 19:01:30,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=115701.33333333333, ans=0.2 +2024-08-26 19:01:33,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=115754.66666666667, ans=0.125 +2024-08-26 19:01:43,101 INFO [train.py:1114] (0/4) Epoch 9, batch 1800, loss[loss=0.2098, simple_loss=0.2886, pruned_loss=0.04745, ctc_loss=0.09012, over 19616.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2859, pruned_loss=0.05839, ctc_loss=0.1091, over 3852814.33 frames. ], batch size: 55, lr: 1.63e-02, grad_scale: 16.0 +2024-08-26 19:01:43,604 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.03 vs. limit=15.0 +2024-08-26 19:01:51,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=115861.33333333333, ans=0.95 +2024-08-26 19:01:59,404 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.42 vs. limit=15.0 +2024-08-26 19:02:02,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=115914.66666666667, ans=0.2 +2024-08-26 19:02:06,016 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.500e+02 1.645e+02 1.953e+02 3.789e+02, threshold=3.290e+02, percent-clipped=1.0 +2024-08-26 19:02:06,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=115914.66666666667, ans=0.125 +2024-08-26 19:02:27,274 INFO [train.py:1114] (0/4) Epoch 9, batch 1850, loss[loss=0.238, simple_loss=0.3074, pruned_loss=0.06102, ctc_loss=0.1163, over 19591.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2857, pruned_loss=0.05851, ctc_loss=0.109, over 3856726.08 frames. ], batch size: 57, lr: 1.63e-02, grad_scale: 16.0 +2024-08-26 19:02:49,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=116181.33333333333, ans=0.95 +2024-08-26 19:03:01,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=116234.66666666667, ans=0.2 +2024-08-26 19:03:08,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=116288.0, ans=0.0 +2024-08-26 19:03:13,217 INFO [train.py:1114] (0/4) Epoch 9, batch 1900, loss[loss=0.2309, simple_loss=0.3, pruned_loss=0.05942, ctc_loss=0.1076, over 19640.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2862, pruned_loss=0.0586, ctc_loss=0.1092, over 3862410.33 frames. ], batch size: 59, lr: 1.62e-02, grad_scale: 16.0 +2024-08-26 19:03:17,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=116341.33333333333, ans=0.0 +2024-08-26 19:03:18,988 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.80 vs. limit=12.0 +2024-08-26 19:03:33,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=116448.0, ans=0.125 +2024-08-26 19:03:35,874 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.270e+02 1.509e+02 1.695e+02 1.935e+02 3.320e+02, threshold=3.390e+02, percent-clipped=1.0 +2024-08-26 19:03:49,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=116554.66666666667, ans=0.125 +2024-08-26 19:03:49,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=116554.66666666667, ans=0.125 +2024-08-26 19:03:56,674 INFO [train.py:1114] (0/4) Epoch 9, batch 1950, loss[loss=0.2239, simple_loss=0.2904, pruned_loss=0.05713, ctc_loss=0.1078, over 19575.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.2872, pruned_loss=0.05869, ctc_loss=0.1095, over 3871997.96 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 16.0 +2024-08-26 19:03:57,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=116608.0, ans=0.0 +2024-08-26 19:03:59,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=116608.0, ans=0.1 +2024-08-26 19:04:08,437 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.67 vs. limit=15.0 +2024-08-26 19:04:09,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=116661.33333333333, ans=0.125 +2024-08-26 19:04:13,463 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:04:17,896 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.04 vs. limit=15.0 +2024-08-26 19:04:26,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=116768.0, ans=0.125 +2024-08-26 19:04:30,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=116768.0, ans=0.125 +2024-08-26 19:04:34,536 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.35 vs. limit=22.5 +2024-08-26 19:04:41,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=116821.33333333333, ans=0.125 +2024-08-26 19:04:45,336 INFO [train.py:1114] (0/4) Epoch 9, batch 2000, loss[loss=0.1812, simple_loss=0.2426, pruned_loss=0.04352, ctc_loss=0.08177, over 19696.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.2873, pruned_loss=0.05896, ctc_loss=0.11, over 3856507.86 frames. ], batch size: 45, lr: 1.62e-02, grad_scale: 32.0 +2024-08-26 19:05:09,040 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.202e+02 1.518e+02 1.711e+02 1.998e+02 4.316e+02, threshold=3.422e+02, percent-clipped=2.0 +2024-08-26 19:05:11,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=117034.66666666667, ans=0.0 +2024-08-26 19:05:12,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=117034.66666666667, ans=0.1 +2024-08-26 19:05:14,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=117034.66666666667, ans=0.125 +2024-08-26 19:05:16,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=117034.66666666667, ans=0.125 +2024-08-26 19:05:29,284 INFO [train.py:1114] (0/4) Epoch 9, batch 2050, loss[loss=0.1871, simple_loss=0.2554, pruned_loss=0.04269, ctc_loss=0.08361, over 19686.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2863, pruned_loss=0.05863, ctc_loss=0.1093, over 3852956.16 frames. ], batch size: 47, lr: 1.62e-02, grad_scale: 16.0 +2024-08-26 19:05:40,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=117194.66666666667, ans=0.125 +2024-08-26 19:05:45,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=117194.66666666667, ans=0.125 +2024-08-26 19:05:45,481 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.24 vs. limit=15.0 +2024-08-26 19:05:51,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=117248.0, ans=0.125 +2024-08-26 19:05:51,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=117248.0, ans=0.125 +2024-08-26 19:06:08,281 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.17 vs. limit=12.0 +2024-08-26 19:06:12,997 INFO [train.py:1114] (0/4) Epoch 9, batch 2100, loss[loss=0.2273, simple_loss=0.2904, pruned_loss=0.06034, ctc_loss=0.1087, over 19767.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2857, pruned_loss=0.05834, ctc_loss=0.1087, over 3859079.64 frames. ], batch size: 54, lr: 1.62e-02, grad_scale: 16.0 +2024-08-26 19:06:19,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=117408.0, ans=0.125 +2024-08-26 19:06:36,666 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.287e+02 1.488e+02 1.695e+02 1.945e+02 3.088e+02, threshold=3.391e+02, percent-clipped=0.0 +2024-08-26 19:06:46,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=117621.33333333333, ans=0.0 +2024-08-26 19:06:55,554 INFO [train.py:1114] (0/4) Epoch 9, batch 2150, loss[loss=0.1918, simple_loss=0.258, pruned_loss=0.04564, ctc_loss=0.08569, over 19589.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2849, pruned_loss=0.05803, ctc_loss=0.1081, over 3869879.56 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 8.0 +2024-08-26 19:07:02,222 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.01 vs. limit=15.0 +2024-08-26 19:07:03,716 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.48 vs. limit=15.0 +2024-08-26 19:07:05,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=117728.0, ans=0.0 +2024-08-26 19:07:06,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=117728.0, ans=0.125 +2024-08-26 19:07:15,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=117781.33333333333, ans=0.2 +2024-08-26 19:07:15,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=117781.33333333333, ans=0.0 +2024-08-26 19:07:15,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=117781.33333333333, ans=0.0 +2024-08-26 19:07:23,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=117834.66666666667, ans=0.125 +2024-08-26 19:07:25,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=117834.66666666667, ans=0.0 +2024-08-26 19:07:38,969 INFO [train.py:1114] (0/4) Epoch 9, batch 2200, loss[loss=0.226, simple_loss=0.2902, pruned_loss=0.05903, ctc_loss=0.1093, over 19579.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2854, pruned_loss=0.05839, ctc_loss=0.1089, over 3869284.31 frames. ], batch size: 57, lr: 1.61e-02, grad_scale: 8.0 +2024-08-26 19:07:40,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=117941.33333333333, ans=0.025 +2024-08-26 19:07:48,172 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.14 vs. limit=22.5 +2024-08-26 19:07:56,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=118048.0, ans=0.0 +2024-08-26 19:07:56,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=118048.0, ans=0.125 +2024-08-26 19:08:03,128 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.284e+02 1.528e+02 1.792e+02 2.132e+02 3.306e+02, threshold=3.583e+02, percent-clipped=0.0 +2024-08-26 19:08:19,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=118154.66666666667, ans=0.2 +2024-08-26 19:08:34,323 INFO [train.py:1114] (0/4) Epoch 9, batch 2250, loss[loss=0.2057, simple_loss=0.278, pruned_loss=0.04791, ctc_loss=0.09361, over 19627.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2854, pruned_loss=0.05806, ctc_loss=0.1085, over 3868578.45 frames. ], batch size: 55, lr: 1.61e-02, grad_scale: 8.0 +2024-08-26 19:08:56,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=118314.66666666667, ans=0.125 +2024-08-26 19:09:02,465 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.86 vs. limit=6.0 +2024-08-26 19:09:17,302 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.53 vs. limit=12.0 +2024-08-26 19:09:17,826 INFO [train.py:1114] (0/4) Epoch 9, batch 2300, loss[loss=0.1863, simple_loss=0.2521, pruned_loss=0.04392, ctc_loss=0.08149, over 19510.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2838, pruned_loss=0.05753, ctc_loss=0.1076, over 3861868.07 frames. ], batch size: 49, lr: 1.61e-02, grad_scale: 8.0 +2024-08-26 19:09:33,795 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.38 vs. limit=12.0 +2024-08-26 19:09:42,035 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.274e+02 1.479e+02 1.669e+02 2.317e+02 3.988e+02, threshold=3.338e+02, percent-clipped=3.0 +2024-08-26 19:09:51,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118634.66666666667, ans=0.1 +2024-08-26 19:09:54,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=118688.0, ans=0.125 +2024-08-26 19:10:01,389 INFO [train.py:1114] (0/4) Epoch 9, batch 2350, loss[loss=0.2364, simple_loss=0.2997, pruned_loss=0.0623, ctc_loss=0.1215, over 19635.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2837, pruned_loss=0.05742, ctc_loss=0.1075, over 3864113.15 frames. ], batch size: 63, lr: 1.61e-02, grad_scale: 8.0 +2024-08-26 19:10:05,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=118741.33333333333, ans=0.125 +2024-08-26 19:10:06,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=118741.33333333333, ans=0.2 +2024-08-26 19:10:10,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=118794.66666666667, ans=0.125 +2024-08-26 19:10:10,199 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.23 vs. limit=10.0 +2024-08-26 19:11:00,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=118848.0, ans=0.125 +2024-08-26 19:11:01,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118848.0, ans=0.1 +2024-08-26 19:11:01,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=118848.0, ans=0.125 +2024-08-26 19:11:06,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=118848.0, ans=0.04949747468305833 +2024-08-26 19:11:10,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=118901.33333333333, ans=15.0 +2024-08-26 19:11:18,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=118954.66666666667, ans=0.1 +2024-08-26 19:11:23,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=118954.66666666667, ans=0.125 +2024-08-26 19:11:29,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=118954.66666666667, ans=0.125 +2024-08-26 19:11:31,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=118954.66666666667, ans=0.125 +2024-08-26 19:11:32,749 INFO [train.py:1114] (0/4) Epoch 9, batch 2400, loss[loss=0.2491, simple_loss=0.3036, pruned_loss=0.07143, ctc_loss=0.1294, over 19405.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.286, pruned_loss=0.05855, ctc_loss=0.1093, over 3857848.50 frames. ], batch size: 67, lr: 1.61e-02, grad_scale: 16.0 +2024-08-26 19:11:36,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=119008.0, ans=0.125 +2024-08-26 19:11:44,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=119008.0, ans=0.07 +2024-08-26 19:11:52,179 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.56 vs. limit=15.0 +2024-08-26 19:12:03,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=119114.66666666667, ans=0.0 +2024-08-26 19:12:04,696 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.526e+02 1.714e+02 1.892e+02 3.175e+02, threshold=3.427e+02, percent-clipped=0.0 +2024-08-26 19:12:23,660 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.75 vs. limit=15.0 +2024-08-26 19:12:24,795 INFO [train.py:1114] (0/4) Epoch 9, batch 2450, loss[loss=0.309, simple_loss=0.3321, pruned_loss=0.102, ctc_loss=0.205, over 13679.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.2905, pruned_loss=0.06217, ctc_loss=0.1166, over 3728798.97 frames. ], batch size: 140, lr: 1.61e-02, grad_scale: 16.0 +2024-08-26 19:12:32,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=119274.66666666667, ans=0.125 +2024-08-26 19:13:11,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=119434.66666666667, ans=0.125 +2024-08-26 19:13:15,623 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-9.pt +2024-08-26 19:14:15,882 INFO [train.py:1114] (0/4) Epoch 10, batch 0, loss[loss=0.2146, simple_loss=0.2727, pruned_loss=0.05705, ctc_loss=0.1061, over 19806.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2727, pruned_loss=0.05705, ctc_loss=0.1061, over 19806.00 frames. ], batch size: 49, lr: 1.53e-02, grad_scale: 16.0 +2024-08-26 19:14:15,883 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-26 19:14:48,059 INFO [train.py:1146] (0/4) Epoch 10, validation: loss=0.1896, simple_loss=0.2813, pruned_loss=0.03622, ctc_loss=0.0637, over 944034.00 frames. +2024-08-26 19:14:48,061 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13201MB +2024-08-26 19:14:52,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=119482.66666666667, ans=0.125 +2024-08-26 19:15:03,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119536.0, ans=0.1 +2024-08-26 19:15:25,083 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.696e+02 1.867e+02 2.057e+02 3.331e+02, threshold=3.733e+02, percent-clipped=0.0 +2024-08-26 19:15:25,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=119696.0, ans=0.0 +2024-08-26 19:15:34,236 INFO [train.py:1114] (0/4) Epoch 10, batch 50, loss[loss=0.1856, simple_loss=0.2487, pruned_loss=0.04434, ctc_loss=0.08443, over 19711.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.2884, pruned_loss=0.05847, ctc_loss=0.1104, over 843213.08 frames. ], batch size: 47, lr: 1.52e-02, grad_scale: 16.0 +2024-08-26 19:15:48,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=119802.66666666667, ans=0.125 +2024-08-26 19:15:48,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=119802.66666666667, ans=0.2 +2024-08-26 19:16:03,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=119909.33333333333, ans=0.125 +2024-08-26 19:16:14,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=119962.66666666667, ans=0.125 +2024-08-26 19:16:20,477 INFO [train.py:1114] (0/4) Epoch 10, batch 100, loss[loss=0.2038, simple_loss=0.273, pruned_loss=0.0482, ctc_loss=0.0954, over 19710.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2889, pruned_loss=0.0585, ctc_loss=0.1099, over 1497783.82 frames. ], batch size: 51, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:16:21,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=120016.0, ans=0.125 +2024-08-26 19:16:52,904 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.07 vs. limit=6.0 +2024-08-26 19:16:53,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=120176.0, ans=0.0 +2024-08-26 19:16:55,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=120176.0, ans=0.0 +2024-08-26 19:17:03,447 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.471e+02 1.633e+02 1.792e+02 2.780e+02, threshold=3.265e+02, percent-clipped=0.0 +2024-08-26 19:17:09,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=120229.33333333333, ans=0.125 +2024-08-26 19:17:11,629 INFO [train.py:1114] (0/4) Epoch 10, batch 150, loss[loss=0.1958, simple_loss=0.2671, pruned_loss=0.04555, ctc_loss=0.08362, over 19725.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.287, pruned_loss=0.05807, ctc_loss=0.1085, over 2026659.98 frames. ], batch size: 47, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:17:20,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=120282.66666666667, ans=0.125 +2024-08-26 19:17:20,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=120282.66666666667, ans=0.125 +2024-08-26 19:17:48,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=120442.66666666667, ans=0.0 +2024-08-26 19:18:04,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=120496.0, ans=0.125 +2024-08-26 19:18:04,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=120496.0, ans=0.5 +2024-08-26 19:18:07,157 INFO [train.py:1114] (0/4) Epoch 10, batch 200, loss[loss=0.2401, simple_loss=0.2925, pruned_loss=0.06806, ctc_loss=0.1288, over 18272.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2851, pruned_loss=0.05768, ctc_loss=0.1079, over 2434216.39 frames. ], batch size: 85, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:18:30,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=120549.33333333333, ans=0.125 +2024-08-26 19:18:45,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=120656.0, ans=0.125 +2024-08-26 19:18:47,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=120656.0, ans=0.0 +2024-08-26 19:18:52,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120709.33333333333, ans=0.1 +2024-08-26 19:19:08,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=120709.33333333333, ans=0.125 +2024-08-26 19:19:12,212 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.205e+02 1.459e+02 1.596e+02 1.815e+02 3.041e+02, threshold=3.193e+02, percent-clipped=0.0 +2024-08-26 19:19:44,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=120762.66666666667, ans=0.2 +2024-08-26 19:19:48,326 INFO [train.py:1114] (0/4) Epoch 10, batch 250, loss[loss=0.2287, simple_loss=0.2993, pruned_loss=0.05737, ctc_loss=0.1085, over 19445.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2844, pruned_loss=0.05689, ctc_loss=0.1067, over 2755282.56 frames. ], batch size: 67, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:19:49,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=120816.0, ans=0.0 +2024-08-26 19:19:51,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=120816.0, ans=0.0 +2024-08-26 19:19:51,789 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.80 vs. limit=10.0 +2024-08-26 19:19:57,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.00 vs. limit=15.0 +2024-08-26 19:20:05,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=120869.33333333333, ans=0.125 +2024-08-26 19:20:15,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.79 vs. limit=6.0 +2024-08-26 19:20:17,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=120922.66666666667, ans=0.125 +2024-08-26 19:20:28,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=120976.0, ans=0.0 +2024-08-26 19:20:45,510 INFO [train.py:1114] (0/4) Epoch 10, batch 300, loss[loss=0.2353, simple_loss=0.3013, pruned_loss=0.06156, ctc_loss=0.1152, over 19529.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2833, pruned_loss=0.05643, ctc_loss=0.1058, over 3000652.65 frames. ], batch size: 61, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:21:03,532 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.87 vs. limit=15.0 +2024-08-26 19:21:14,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=121189.33333333333, ans=0.035 +2024-08-26 19:21:15,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=121189.33333333333, ans=0.0 +2024-08-26 19:21:22,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=121242.66666666667, ans=0.125 +2024-08-26 19:21:29,978 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.480e+02 1.641e+02 1.981e+02 3.456e+02, threshold=3.281e+02, percent-clipped=2.0 +2024-08-26 19:21:33,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=121296.0, ans=0.0 +2024-08-26 19:21:34,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=121296.0, ans=0.0 +2024-08-26 19:21:38,273 INFO [train.py:1114] (0/4) Epoch 10, batch 350, loss[loss=0.208, simple_loss=0.2641, pruned_loss=0.05642, ctc_loss=0.09755, over 19747.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2837, pruned_loss=0.05676, ctc_loss=0.1066, over 3191619.59 frames. ], batch size: 48, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:21:41,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=121349.33333333333, ans=0.125 +2024-08-26 19:21:45,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=121349.33333333333, ans=0.125 +2024-08-26 19:21:52,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=121402.66666666667, ans=0.05 +2024-08-26 19:21:58,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=121456.0, ans=0.125 +2024-08-26 19:22:24,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=121616.0, ans=0.09899494936611666 +2024-08-26 19:22:24,844 INFO [train.py:1114] (0/4) Epoch 10, batch 400, loss[loss=0.2177, simple_loss=0.2849, pruned_loss=0.05535, ctc_loss=0.09919, over 19862.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2827, pruned_loss=0.05617, ctc_loss=0.1054, over 3342903.28 frames. ], batch size: 55, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:22:27,183 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.09 vs. limit=6.0 +2024-08-26 19:22:28,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=121616.0, ans=0.0 +2024-08-26 19:22:30,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=121616.0, ans=0.2 +2024-08-26 19:22:34,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=121616.0, ans=0.125 +2024-08-26 19:22:35,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121669.33333333333, ans=0.1 +2024-08-26 19:22:40,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=121669.33333333333, ans=0.125 +2024-08-26 19:23:09,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121776.0, ans=0.1 +2024-08-26 19:23:18,032 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.268e+02 1.471e+02 1.735e+02 2.020e+02 3.245e+02, threshold=3.470e+02, percent-clipped=0.0 +2024-08-26 19:23:23,150 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.04 vs. limit=22.5 +2024-08-26 19:23:26,370 INFO [train.py:1114] (0/4) Epoch 10, batch 450, loss[loss=0.1946, simple_loss=0.2747, pruned_loss=0.04033, ctc_loss=0.08458, over 19603.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2831, pruned_loss=0.05656, ctc_loss=0.1058, over 3450466.04 frames. ], batch size: 55, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:23:49,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=121989.33333333333, ans=0.0 +2024-08-26 19:23:56,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=122042.66666666667, ans=0.125 +2024-08-26 19:24:06,418 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:24:19,331 INFO [train.py:1114] (0/4) Epoch 10, batch 500, loss[loss=0.2184, simple_loss=0.2846, pruned_loss=0.05528, ctc_loss=0.1043, over 19671.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2819, pruned_loss=0.0561, ctc_loss=0.1049, over 3546117.80 frames. ], batch size: 63, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:24:26,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=122149.33333333333, ans=0.0 +2024-08-26 19:24:26,729 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.18 vs. limit=15.0 +2024-08-26 19:24:28,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=122149.33333333333, ans=0.05 +2024-08-26 19:24:33,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122149.33333333333, ans=0.1 +2024-08-26 19:25:07,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=122309.33333333333, ans=0.125 +2024-08-26 19:25:10,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=122362.66666666667, ans=0.125 +2024-08-26 19:25:10,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=122362.66666666667, ans=0.125 +2024-08-26 19:25:11,346 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.449e+02 1.637e+02 1.959e+02 3.375e+02, threshold=3.275e+02, percent-clipped=0.0 +2024-08-26 19:25:17,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=122362.66666666667, ans=0.125 +2024-08-26 19:25:19,722 INFO [train.py:1114] (0/4) Epoch 10, batch 550, loss[loss=0.2392, simple_loss=0.3049, pruned_loss=0.06326, ctc_loss=0.1173, over 19236.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2821, pruned_loss=0.0563, ctc_loss=0.1052, over 3606988.23 frames. ], batch size: 71, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:25:20,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=122416.0, ans=0.125 +2024-08-26 19:25:33,161 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:25:34,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=122469.33333333333, ans=0.0 +2024-08-26 19:25:43,920 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.35 vs. limit=15.0 +2024-08-26 19:26:00,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=122629.33333333333, ans=0.1 +2024-08-26 19:26:10,298 INFO [train.py:1114] (0/4) Epoch 10, batch 600, loss[loss=0.2492, simple_loss=0.3069, pruned_loss=0.06866, ctc_loss=0.1353, over 19406.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2824, pruned_loss=0.05637, ctc_loss=0.1054, over 3665624.41 frames. ], batch size: 67, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:26:50,249 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.480e+02 1.661e+02 1.846e+02 3.271e+02, threshold=3.322e+02, percent-clipped=0.0 +2024-08-26 19:26:50,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=122896.0, ans=0.0 +2024-08-26 19:26:58,408 INFO [train.py:1114] (0/4) Epoch 10, batch 650, loss[loss=0.213, simple_loss=0.2789, pruned_loss=0.05328, ctc_loss=0.1014, over 19767.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2817, pruned_loss=0.05589, ctc_loss=0.1047, over 3716191.05 frames. ], batch size: 54, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:27:43,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=123162.66666666667, ans=0.04949747468305833 +2024-08-26 19:27:49,792 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:27:51,538 INFO [train.py:1114] (0/4) Epoch 10, batch 700, loss[loss=0.205, simple_loss=0.2765, pruned_loss=0.04828, ctc_loss=0.0922, over 19721.00 frames. ], tot_loss[loss=0.218, simple_loss=0.282, pruned_loss=0.05603, ctc_loss=0.1048, over 3747909.39 frames. ], batch size: 51, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:27:54,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=123216.0, ans=0.125 +2024-08-26 19:28:04,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=123269.33333333333, ans=0.125 +2024-08-26 19:28:08,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=123269.33333333333, ans=0.125 +2024-08-26 19:28:20,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=123376.0, ans=0.125 +2024-08-26 19:28:29,133 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.526e+02 1.912e+02 2.394e+02 4.336e+02, threshold=3.825e+02, percent-clipped=8.0 +2024-08-26 19:28:36,473 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.05 vs. limit=15.0 +2024-08-26 19:28:38,776 INFO [train.py:1114] (0/4) Epoch 10, batch 750, loss[loss=0.2271, simple_loss=0.2954, pruned_loss=0.05779, ctc_loss=0.1081, over 19860.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2822, pruned_loss=0.05615, ctc_loss=0.105, over 3774351.17 frames. ], batch size: 55, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:28:57,586 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.36 vs. limit=15.0 +2024-08-26 19:29:23,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=123696.0, ans=0.1 +2024-08-26 19:29:27,355 INFO [train.py:1114] (0/4) Epoch 10, batch 800, loss[loss=0.1818, simple_loss=0.2556, pruned_loss=0.03836, ctc_loss=0.07814, over 19816.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2822, pruned_loss=0.05615, ctc_loss=0.1049, over 3795624.09 frames. ], batch size: 49, lr: 1.50e-02, grad_scale: 32.0 +2024-08-26 19:29:32,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=123749.33333333333, ans=0.0 +2024-08-26 19:29:47,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=123856.0, ans=0.1 +2024-08-26 19:29:54,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=123856.0, ans=0.025 +2024-08-26 19:29:58,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=123909.33333333333, ans=0.125 +2024-08-26 19:30:07,521 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.505e+02 1.745e+02 2.038e+02 4.368e+02, threshold=3.490e+02, percent-clipped=1.0 +2024-08-26 19:30:08,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=123962.66666666667, ans=0.125 +2024-08-26 19:30:17,637 INFO [train.py:1114] (0/4) Epoch 10, batch 850, loss[loss=0.2168, simple_loss=0.2857, pruned_loss=0.05335, ctc_loss=0.1028, over 19633.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2817, pruned_loss=0.05592, ctc_loss=0.1044, over 3815807.51 frames. ], batch size: 59, lr: 1.50e-02, grad_scale: 32.0 +2024-08-26 19:30:30,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=124069.33333333333, ans=0.125 +2024-08-26 19:30:32,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=124069.33333333333, ans=0.125 +2024-08-26 19:30:34,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=124069.33333333333, ans=0.125 +2024-08-26 19:30:38,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=124122.66666666667, ans=0.125 +2024-08-26 19:30:42,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=124122.66666666667, ans=0.02 +2024-08-26 19:30:50,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=124176.0, ans=0.0 +2024-08-26 19:31:08,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.27 vs. limit=6.0 +2024-08-26 19:31:14,610 INFO [train.py:1114] (0/4) Epoch 10, batch 900, loss[loss=0.209, simple_loss=0.2612, pruned_loss=0.05724, ctc_loss=0.1055, over 19398.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2818, pruned_loss=0.05648, ctc_loss=0.1052, over 3820191.06 frames. ], batch size: 48, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:31:17,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=124282.66666666667, ans=0.0 +2024-08-26 19:31:18,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=124282.66666666667, ans=0.05 +2024-08-26 19:31:23,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=124282.66666666667, ans=0.125 +2024-08-26 19:31:23,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=124282.66666666667, ans=0.0 +2024-08-26 19:32:20,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=124389.33333333333, ans=0.125 +2024-08-26 19:32:23,575 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.28 vs. limit=15.0 +2024-08-26 19:32:29,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=124442.66666666667, ans=0.0 +2024-08-26 19:32:32,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=124496.0, ans=0.0 +2024-08-26 19:32:35,085 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.199e+02 1.525e+02 1.733e+02 2.036e+02 4.140e+02, threshold=3.466e+02, percent-clipped=3.0 +2024-08-26 19:32:42,447 INFO [train.py:1114] (0/4) Epoch 10, batch 950, loss[loss=0.2078, simple_loss=0.2738, pruned_loss=0.05093, ctc_loss=0.09959, over 19493.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2823, pruned_loss=0.05665, ctc_loss=0.1055, over 3821394.20 frames. ], batch size: 49, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:32:43,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=124549.33333333333, ans=0.0 +2024-08-26 19:32:48,509 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.46 vs. limit=22.5 +2024-08-26 19:33:17,449 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.11 vs. limit=15.0 +2024-08-26 19:33:18,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124709.33333333333, ans=0.1 +2024-08-26 19:33:18,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=124709.33333333333, ans=0.2 +2024-08-26 19:33:33,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=124762.66666666667, ans=0.125 +2024-08-26 19:33:33,563 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.41 vs. limit=10.0 +2024-08-26 19:33:35,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124762.66666666667, ans=0.1 +2024-08-26 19:33:36,604 INFO [train.py:1114] (0/4) Epoch 10, batch 1000, loss[loss=0.2046, simple_loss=0.2751, pruned_loss=0.04792, ctc_loss=0.09569, over 19839.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2827, pruned_loss=0.05681, ctc_loss=0.1058, over 3818105.96 frames. ], batch size: 52, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:33:40,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=124816.0, ans=0.1 +2024-08-26 19:33:51,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=124869.33333333333, ans=0.0 +2024-08-26 19:33:51,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=124869.33333333333, ans=0.2 +2024-08-26 19:33:56,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=124869.33333333333, ans=0.2 +2024-08-26 19:34:18,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=125029.33333333333, ans=0.0 +2024-08-26 19:34:19,956 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.214e+02 1.433e+02 1.580e+02 1.832e+02 3.141e+02, threshold=3.159e+02, percent-clipped=0.0 +2024-08-26 19:34:24,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=125029.33333333333, ans=0.125 +2024-08-26 19:34:27,369 INFO [train.py:1114] (0/4) Epoch 10, batch 1050, loss[loss=0.2098, simple_loss=0.2878, pruned_loss=0.04756, ctc_loss=0.09201, over 19856.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2821, pruned_loss=0.05643, ctc_loss=0.1053, over 3822872.28 frames. ], batch size: 57, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:34:51,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=125082.66666666667, ans=0.07 +2024-08-26 19:35:21,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=125242.66666666667, ans=0.125 +2024-08-26 19:35:26,107 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:35:28,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=125296.0, ans=0.1 +2024-08-26 19:35:36,345 INFO [train.py:1114] (0/4) Epoch 10, batch 1100, loss[loss=0.2071, simple_loss=0.2752, pruned_loss=0.05049, ctc_loss=0.0952, over 19586.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2819, pruned_loss=0.05619, ctc_loss=0.1048, over 3830875.15 frames. ], batch size: 52, lr: 1.49e-02, grad_scale: 8.0 +2024-08-26 19:35:50,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=125402.66666666667, ans=0.025 +2024-08-26 19:35:55,278 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.64 vs. limit=15.0 +2024-08-26 19:36:03,647 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.91 vs. limit=6.0 +2024-08-26 19:36:06,426 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.04 vs. limit=15.0 +2024-08-26 19:36:07,304 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.51 vs. limit=22.5 +2024-08-26 19:36:16,686 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.43 vs. limit=15.0 +2024-08-26 19:36:18,875 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.239e+02 1.433e+02 1.605e+02 1.841e+02 2.779e+02, threshold=3.211e+02, percent-clipped=0.0 +2024-08-26 19:36:25,415 INFO [train.py:1114] (0/4) Epoch 10, batch 1150, loss[loss=0.1883, simple_loss=0.2623, pruned_loss=0.04146, ctc_loss=0.07853, over 19602.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2819, pruned_loss=0.05616, ctc_loss=0.1048, over 3828605.14 frames. ], batch size: 52, lr: 1.49e-02, grad_scale: 8.0 +2024-08-26 19:36:28,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=125616.0, ans=0.0 +2024-08-26 19:36:30,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=125616.0, ans=0.0 +2024-08-26 19:36:48,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=125722.66666666667, ans=10.0 +2024-08-26 19:37:11,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=125829.33333333333, ans=0.125 +2024-08-26 19:37:17,645 INFO [train.py:1114] (0/4) Epoch 10, batch 1200, loss[loss=0.2189, simple_loss=0.2897, pruned_loss=0.05231, ctc_loss=0.1085, over 19844.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2825, pruned_loss=0.05627, ctc_loss=0.1054, over 3823900.47 frames. ], batch size: 57, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:37:34,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=125936.0, ans=0.2 +2024-08-26 19:37:48,505 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.89 vs. limit=22.5 +2024-08-26 19:37:56,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=126096.0, ans=0.0 +2024-08-26 19:37:57,392 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.466e+02 1.608e+02 1.824e+02 2.979e+02, threshold=3.216e+02, percent-clipped=0.0 +2024-08-26 19:37:57,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=126096.0, ans=0.0 +2024-08-26 19:37:57,861 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.78 vs. limit=15.0 +2024-08-26 19:38:04,053 INFO [train.py:1114] (0/4) Epoch 10, batch 1250, loss[loss=0.2253, simple_loss=0.2902, pruned_loss=0.05759, ctc_loss=0.1131, over 19528.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2825, pruned_loss=0.05593, ctc_loss=0.1047, over 3842182.81 frames. ], batch size: 61, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:38:33,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=126256.0, ans=0.125 +2024-08-26 19:38:39,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=126256.0, ans=0.125 +2024-08-26 19:38:46,393 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.32 vs. limit=22.5 +2024-08-26 19:39:04,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=126309.33333333333, ans=0.05 +2024-08-26 19:40:04,495 INFO [train.py:1114] (0/4) Epoch 10, batch 1300, loss[loss=0.2237, simple_loss=0.2919, pruned_loss=0.05593, ctc_loss=0.1088, over 18915.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2816, pruned_loss=0.05554, ctc_loss=0.1039, over 3846635.47 frames. ], batch size: 76, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:40:12,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=126416.0, ans=0.0 +2024-08-26 19:40:18,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=126469.33333333333, ans=0.0 +2024-08-26 19:40:24,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126469.33333333333, ans=0.1 +2024-08-26 19:40:25,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=126469.33333333333, ans=0.125 +2024-08-26 19:40:41,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=126576.0, ans=0.025 +2024-08-26 19:40:54,253 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.480e+02 1.716e+02 1.981e+02 3.061e+02, threshold=3.432e+02, percent-clipped=0.0 +2024-08-26 19:41:00,864 INFO [train.py:1114] (0/4) Epoch 10, batch 1350, loss[loss=0.209, simple_loss=0.2739, pruned_loss=0.05248, ctc_loss=0.09775, over 19767.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2814, pruned_loss=0.05532, ctc_loss=0.1034, over 3858173.24 frames. ], batch size: 54, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:41:02,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=126682.66666666667, ans=0.0 +2024-08-26 19:41:12,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126736.0, ans=0.1 +2024-08-26 19:41:16,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=126736.0, ans=0.125 +2024-08-26 19:41:19,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=126736.0, ans=0.125 +2024-08-26 19:41:20,567 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.02 vs. limit=15.0 +2024-08-26 19:41:24,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=126789.33333333333, ans=0.2 +2024-08-26 19:41:24,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=126789.33333333333, ans=0.015 +2024-08-26 19:41:30,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126789.33333333333, ans=0.1 +2024-08-26 19:41:33,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=126842.66666666667, ans=0.125 +2024-08-26 19:41:39,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=126842.66666666667, ans=0.07 +2024-08-26 19:41:39,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=126842.66666666667, ans=0.125 +2024-08-26 19:41:52,414 INFO [train.py:1114] (0/4) Epoch 10, batch 1400, loss[loss=0.1955, simple_loss=0.2528, pruned_loss=0.05004, ctc_loss=0.09512, over 19682.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2808, pruned_loss=0.05494, ctc_loss=0.1029, over 3865225.95 frames. ], batch size: 46, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:42:25,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=127056.0, ans=0.09899494936611666 +2024-08-26 19:42:26,140 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-08-26 19:42:34,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=127109.33333333333, ans=0.125 +2024-08-26 19:42:38,302 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.62 vs. limit=12.0 +2024-08-26 19:42:41,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=127162.66666666667, ans=0.0 +2024-08-26 19:42:41,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=127162.66666666667, ans=0.0 +2024-08-26 19:42:43,189 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.452e+02 1.585e+02 1.952e+02 4.788e+02, threshold=3.170e+02, percent-clipped=2.0 +2024-08-26 19:42:47,675 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.28 vs. limit=8.0 +2024-08-26 19:42:49,762 INFO [train.py:1114] (0/4) Epoch 10, batch 1450, loss[loss=0.2366, simple_loss=0.3058, pruned_loss=0.061, ctc_loss=0.1134, over 19636.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.282, pruned_loss=0.05547, ctc_loss=0.1037, over 3863599.03 frames. ], batch size: 63, lr: 1.48e-02, grad_scale: 16.0 +2024-08-26 19:43:07,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=127322.66666666667, ans=0.125 +2024-08-26 19:43:14,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=127322.66666666667, ans=0.0 +2024-08-26 19:43:15,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=127322.66666666667, ans=0.0 +2024-08-26 19:43:40,835 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:43:44,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=127429.33333333333, ans=0.125 +2024-08-26 19:43:48,225 INFO [train.py:1114] (0/4) Epoch 10, batch 1500, loss[loss=0.2431, simple_loss=0.3034, pruned_loss=0.06687, ctc_loss=0.1227, over 19587.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2824, pruned_loss=0.05551, ctc_loss=0.1038, over 3862952.85 frames. ], batch size: 57, lr: 1.48e-02, grad_scale: 16.0 +2024-08-26 19:43:48,796 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.78 vs. limit=12.0 +2024-08-26 19:43:58,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=127482.66666666667, ans=0.05 +2024-08-26 19:44:03,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=127536.0, ans=0.125 +2024-08-26 19:44:25,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=127642.66666666667, ans=0.1 +2024-08-26 19:44:28,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=127642.66666666667, ans=0.1 +2024-08-26 19:44:37,667 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.149e+02 1.427e+02 1.587e+02 1.794e+02 3.285e+02, threshold=3.174e+02, percent-clipped=1.0 +2024-08-26 19:44:51,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=127749.33333333333, ans=0.0 +2024-08-26 19:44:52,062 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.87 vs. limit=15.0 +2024-08-26 19:44:52,457 INFO [train.py:1114] (0/4) Epoch 10, batch 1550, loss[loss=0.2252, simple_loss=0.2835, pruned_loss=0.06098, ctc_loss=0.1124, over 19610.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2821, pruned_loss=0.05549, ctc_loss=0.1041, over 3847527.24 frames. ], batch size: 60, lr: 1.48e-02, grad_scale: 16.0 +2024-08-26 19:44:57,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=127749.33333333333, ans=0.125 +2024-08-26 19:45:18,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=127856.0, ans=0.125 +2024-08-26 19:45:35,905 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/checkpoint-24000.pt +2024-08-26 19:45:43,642 INFO [train.py:1114] (0/4) Epoch 10, batch 1600, loss[loss=0.2236, simple_loss=0.2875, pruned_loss=0.05772, ctc_loss=0.1108, over 19839.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.282, pruned_loss=0.0555, ctc_loss=0.1041, over 3836742.72 frames. ], batch size: 57, lr: 1.48e-02, grad_scale: 32.0 +2024-08-26 19:45:49,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=128016.0, ans=0.125 +2024-08-26 19:45:51,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=128016.0, ans=0.0 +2024-08-26 19:46:02,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=128122.66666666667, ans=0.125 +2024-08-26 19:46:04,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=128122.66666666667, ans=0.125 +2024-08-26 19:46:06,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=128122.66666666667, ans=0.95 +2024-08-26 19:46:18,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=128176.0, ans=0.04949747468305833 +2024-08-26 19:46:26,520 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.210e+02 1.460e+02 1.671e+02 2.068e+02 2.984e+02, threshold=3.342e+02, percent-clipped=0.0 +2024-08-26 19:46:33,068 INFO [train.py:1114] (0/4) Epoch 10, batch 1650, loss[loss=0.2198, simple_loss=0.2864, pruned_loss=0.05611, ctc_loss=0.1026, over 19641.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2819, pruned_loss=0.05566, ctc_loss=0.1043, over 3834161.07 frames. ], batch size: 59, lr: 1.48e-02, grad_scale: 32.0 +2024-08-26 19:46:35,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=128282.66666666667, ans=0.0 +2024-08-26 19:46:59,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=128389.33333333333, ans=0.5 +2024-08-26 19:47:03,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128389.33333333333, ans=0.1 +2024-08-26 19:47:05,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=128442.66666666667, ans=0.125 +2024-08-26 19:47:28,686 INFO [train.py:1114] (0/4) Epoch 10, batch 1700, loss[loss=0.1978, simple_loss=0.2581, pruned_loss=0.05019, ctc_loss=0.09276, over 19696.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2816, pruned_loss=0.0553, ctc_loss=0.1034, over 3847940.54 frames. ], batch size: 46, lr: 1.48e-02, grad_scale: 32.0 +2024-08-26 19:47:29,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=128549.33333333333, ans=0.2 +2024-08-26 19:47:39,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=128602.66666666667, ans=0.2 +2024-08-26 19:47:40,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=128602.66666666667, ans=0.125 +2024-08-26 19:47:48,635 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.02 vs. limit=22.5 +2024-08-26 19:47:51,047 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:48:10,531 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.19 vs. limit=15.0 +2024-08-26 19:48:14,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=128709.33333333333, ans=0.0 +2024-08-26 19:48:16,006 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.81 vs. limit=10.0 +2024-08-26 19:48:17,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=128762.66666666667, ans=0.0 +2024-08-26 19:48:18,885 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.236e+02 1.440e+02 1.568e+02 1.897e+02 2.765e+02, threshold=3.136e+02, percent-clipped=0.0 +2024-08-26 19:48:25,121 INFO [train.py:1114] (0/4) Epoch 10, batch 1750, loss[loss=0.1924, simple_loss=0.2534, pruned_loss=0.04704, ctc_loss=0.09322, over 19649.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.281, pruned_loss=0.05514, ctc_loss=0.1032, over 3852797.68 frames. ], batch size: 45, lr: 1.48e-02, grad_scale: 32.0 +2024-08-26 19:48:32,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=128816.0, ans=0.2 +2024-08-26 19:48:43,348 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.65 vs. limit=8.0 +2024-08-26 19:48:49,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128922.66666666667, ans=0.1 +2024-08-26 19:48:50,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=128976.0, ans=0.125 +2024-08-26 19:48:57,129 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.36 vs. limit=12.0 +2024-08-26 19:49:03,117 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.40 vs. limit=10.0 +2024-08-26 19:49:08,981 INFO [train.py:1114] (0/4) Epoch 10, batch 1800, loss[loss=0.2041, simple_loss=0.28, pruned_loss=0.04691, ctc_loss=0.0857, over 19616.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2812, pruned_loss=0.05523, ctc_loss=0.1034, over 3854022.31 frames. ], batch size: 55, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:49:13,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=129082.66666666667, ans=0.0 +2024-08-26 19:49:47,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=129296.0, ans=0.0 +2024-08-26 19:49:49,354 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.466e+02 1.715e+02 2.130e+02 3.505e+02, threshold=3.430e+02, percent-clipped=4.0 +2024-08-26 19:49:54,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=129349.33333333333, ans=0.125 +2024-08-26 19:49:55,605 INFO [train.py:1114] (0/4) Epoch 10, batch 1850, loss[loss=0.2382, simple_loss=0.303, pruned_loss=0.06253, ctc_loss=0.1209, over 19577.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2805, pruned_loss=0.05479, ctc_loss=0.1024, over 3858493.05 frames. ], batch size: 57, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:50:00,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=129349.33333333333, ans=0.025 +2024-08-26 19:50:03,880 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.12 vs. limit=10.0 +2024-08-26 19:50:05,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=129402.66666666667, ans=0.025 +2024-08-26 19:50:15,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=129456.0, ans=0.125 +2024-08-26 19:50:41,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=129562.66666666667, ans=0.125 +2024-08-26 19:50:42,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=129562.66666666667, ans=0.2 +2024-08-26 19:50:50,337 INFO [train.py:1114] (0/4) Epoch 10, batch 1900, loss[loss=0.218, simple_loss=0.2864, pruned_loss=0.05337, ctc_loss=0.1071, over 19653.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2813, pruned_loss=0.05503, ctc_loss=0.1027, over 3864230.46 frames. ], batch size: 59, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:50:53,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=129616.0, ans=0.125 +2024-08-26 19:50:56,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=129616.0, ans=0.125 +2024-08-26 19:51:07,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=129722.66666666667, ans=0.125 +2024-08-26 19:51:14,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129722.66666666667, ans=0.1 +2024-08-26 19:51:27,696 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.498e+02 1.655e+02 1.944e+02 4.101e+02, threshold=3.311e+02, percent-clipped=1.0 +2024-08-26 19:51:33,750 INFO [train.py:1114] (0/4) Epoch 10, batch 1950, loss[loss=0.1924, simple_loss=0.2667, pruned_loss=0.04241, ctc_loss=0.08322, over 19591.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2819, pruned_loss=0.05497, ctc_loss=0.1026, over 3872510.94 frames. ], batch size: 52, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:51:37,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=129882.66666666667, ans=0.05 +2024-08-26 19:51:45,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=129936.0, ans=0.2 +2024-08-26 19:51:47,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=129936.0, ans=0.0 +2024-08-26 19:51:53,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=129989.33333333333, ans=0.0 +2024-08-26 19:52:51,606 INFO [train.py:1114] (0/4) Epoch 10, batch 2000, loss[loss=0.2174, simple_loss=0.2676, pruned_loss=0.06198, ctc_loss=0.1082, over 19679.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2828, pruned_loss=0.05562, ctc_loss=0.104, over 3857015.78 frames. ], batch size: 45, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:53:02,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=130202.66666666667, ans=0.125 +2024-08-26 19:53:18,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=130309.33333333333, ans=0.125 +2024-08-26 19:53:29,085 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.467e+02 1.617e+02 1.850e+02 3.299e+02, threshold=3.233e+02, percent-clipped=0.0 +2024-08-26 19:53:31,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=130362.66666666667, ans=0.125 +2024-08-26 19:53:33,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=130362.66666666667, ans=0.5 +2024-08-26 19:53:35,211 INFO [train.py:1114] (0/4) Epoch 10, batch 2050, loss[loss=0.1808, simple_loss=0.2429, pruned_loss=0.04265, ctc_loss=0.08354, over 19736.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2815, pruned_loss=0.05531, ctc_loss=0.1034, over 3851578.47 frames. ], batch size: 47, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:53:45,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=130469.33333333333, ans=0.2 +2024-08-26 19:53:46,966 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.12 vs. limit=15.0 +2024-08-26 19:54:18,664 INFO [train.py:1114] (0/4) Epoch 10, batch 2100, loss[loss=0.205, simple_loss=0.2735, pruned_loss=0.04899, ctc_loss=0.09645, over 19749.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2808, pruned_loss=0.05456, ctc_loss=0.1018, over 3858479.51 frames. ], batch size: 54, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:54:19,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=130682.66666666667, ans=0.125 +2024-08-26 19:54:22,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=130682.66666666667, ans=0.125 +2024-08-26 19:54:26,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=130736.0, ans=0.2 +2024-08-26 19:54:35,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=130789.33333333333, ans=0.0 +2024-08-26 19:54:40,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=130789.33333333333, ans=0.09899494936611666 +2024-08-26 19:54:45,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=130842.66666666667, ans=0.125 +2024-08-26 19:54:53,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=130896.0, ans=0.0 +2024-08-26 19:54:56,941 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.237e+02 1.404e+02 1.614e+02 1.979e+02 3.349e+02, threshold=3.228e+02, percent-clipped=1.0 +2024-08-26 19:55:03,212 INFO [train.py:1114] (0/4) Epoch 10, batch 2150, loss[loss=0.2003, simple_loss=0.2662, pruned_loss=0.0488, ctc_loss=0.09212, over 19579.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2803, pruned_loss=0.0545, ctc_loss=0.1018, over 3869417.32 frames. ], batch size: 52, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:55:23,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=131056.0, ans=0.125 +2024-08-26 19:55:28,034 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.24 vs. limit=15.0 +2024-08-26 19:55:28,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=131109.33333333334, ans=0.125 +2024-08-26 19:55:43,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=131162.66666666666, ans=0.0 +2024-08-26 19:55:50,198 INFO [train.py:1114] (0/4) Epoch 10, batch 2200, loss[loss=0.2124, simple_loss=0.2891, pruned_loss=0.05036, ctc_loss=0.08731, over 19596.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2802, pruned_loss=0.05434, ctc_loss=0.1015, over 3867611.73 frames. ], batch size: 57, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:56:22,298 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.73 vs. limit=15.0 +2024-08-26 19:56:22,433 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.77 vs. limit=10.0 +2024-08-26 19:56:22,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=131322.66666666666, ans=0.05 +2024-08-26 19:56:26,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=131376.0, ans=0.125 +2024-08-26 19:56:33,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=131376.0, ans=0.125 +2024-08-26 19:56:38,538 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.198e+02 1.505e+02 1.694e+02 1.989e+02 3.015e+02, threshold=3.388e+02, percent-clipped=0.0 +2024-08-26 19:56:43,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=131482.66666666666, ans=0.0 +2024-08-26 19:56:44,637 INFO [train.py:1114] (0/4) Epoch 10, batch 2250, loss[loss=0.2275, simple_loss=0.2891, pruned_loss=0.0601, ctc_loss=0.1141, over 19624.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2806, pruned_loss=0.05464, ctc_loss=0.102, over 3867698.01 frames. ], batch size: 55, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:56:57,282 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.98 vs. limit=22.5 +2024-08-26 19:57:09,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=131589.33333333334, ans=0.05 +2024-08-26 19:57:13,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131642.66666666666, ans=0.1 +2024-08-26 19:57:17,667 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:57:20,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=131696.0, ans=0.0 +2024-08-26 19:57:27,780 INFO [train.py:1114] (0/4) Epoch 10, batch 2300, loss[loss=0.1995, simple_loss=0.266, pruned_loss=0.04889, ctc_loss=0.08791, over 19495.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2796, pruned_loss=0.0546, ctc_loss=0.1017, over 3861259.43 frames. ], batch size: 49, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:57:42,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=131802.66666666666, ans=0.125 +2024-08-26 19:57:45,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=131856.0, ans=0.125 +2024-08-26 19:57:51,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=131856.0, ans=0.125 +2024-08-26 19:58:05,769 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.213e+02 1.499e+02 1.709e+02 2.092e+02 3.241e+02, threshold=3.418e+02, percent-clipped=0.0 +2024-08-26 19:58:43,763 INFO [train.py:1114] (0/4) Epoch 10, batch 2350, loss[loss=0.2433, simple_loss=0.3077, pruned_loss=0.0657, ctc_loss=0.1189, over 19695.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.28, pruned_loss=0.05489, ctc_loss=0.1021, over 3864159.33 frames. ], batch size: 63, lr: 1.46e-02, grad_scale: 16.0 +2024-08-26 19:58:43,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=132016.0, ans=0.1 +2024-08-26 19:58:50,071 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.15 vs. limit=22.5 +2024-08-26 19:58:51,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=132016.0, ans=0.125 +2024-08-26 19:58:54,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=132069.33333333334, ans=0.0 +2024-08-26 19:59:22,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=132176.0, ans=0.0 +2024-08-26 19:59:26,768 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.87 vs. limit=22.5 +2024-08-26 19:59:31,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=132229.33333333334, ans=0.1 +2024-08-26 19:59:32,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=132282.66666666666, ans=0.0 +2024-08-26 19:59:32,687 INFO [train.py:1114] (0/4) Epoch 10, batch 2400, loss[loss=0.2321, simple_loss=0.2924, pruned_loss=0.06303, ctc_loss=0.1147, over 19387.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.282, pruned_loss=0.05579, ctc_loss=0.1037, over 3858298.54 frames. ], batch size: 67, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:59:54,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=132389.33333333334, ans=0.0 +2024-08-26 20:00:08,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=132389.33333333334, ans=0.05 +2024-08-26 20:00:09,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=132389.33333333334, ans=0.0 +2024-08-26 20:00:24,176 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.05 vs. limit=10.0 +2024-08-26 20:00:25,694 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:00:36,869 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.213e+02 1.532e+02 1.694e+02 1.900e+02 3.260e+02, threshold=3.387e+02, percent-clipped=0.0 +2024-08-26 20:00:38,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=132496.0, ans=0.0 +2024-08-26 20:00:42,855 INFO [train.py:1114] (0/4) Epoch 10, batch 2450, loss[loss=0.311, simple_loss=0.3348, pruned_loss=0.1061, ctc_loss=0.1873, over 14117.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.2866, pruned_loss=0.05915, ctc_loss=0.11, over 3732667.83 frames. ], batch size: 140, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 20:00:51,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=132549.33333333334, ans=0.05 +2024-08-26 20:00:53,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=132549.33333333334, ans=0.0 +2024-08-26 20:01:02,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=132602.66666666666, ans=0.0 +2024-08-26 20:01:09,641 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=11.02 vs. limit=12.0 +2024-08-26 20:01:19,722 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.67 vs. limit=10.0 +2024-08-26 20:01:24,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=132709.33333333334, ans=0.125 +2024-08-26 20:01:31,733 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-10.pt +2024-08-26 20:03:28,163 INFO [train.py:1114] (0/4) Epoch 11, batch 0, loss[loss=0.2145, simple_loss=0.2768, pruned_loss=0.05604, ctc_loss=0.1003, over 19429.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2768, pruned_loss=0.05604, ctc_loss=0.1003, over 19429.00 frames. ], batch size: 48, lr: 1.39e-02, grad_scale: 32.0 +2024-08-26 20:03:28,164 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-26 20:03:42,225 INFO [train.py:1146] (0/4) Epoch 11, validation: loss=0.1858, simple_loss=0.2776, pruned_loss=0.03491, ctc_loss=0.06042, over 944034.00 frames. +2024-08-26 20:03:42,226 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13201MB +2024-08-26 20:04:01,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=132864.0, ans=0.125 +2024-08-26 20:04:09,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=132864.0, ans=0.125 +2024-08-26 20:04:11,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=132864.0, ans=0.5 +2024-08-26 20:04:14,291 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.82 vs. limit=15.0 +2024-08-26 20:04:16,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132917.33333333334, ans=0.1 +2024-08-26 20:04:32,366 INFO [train.py:1114] (0/4) Epoch 11, batch 50, loss[loss=0.1953, simple_loss=0.2584, pruned_loss=0.04806, ctc_loss=0.08989, over 19703.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2855, pruned_loss=0.05715, ctc_loss=0.1079, over 844145.34 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 32.0 +2024-08-26 20:04:37,949 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.177e+02 1.624e+02 1.801e+02 2.017e+02 3.320e+02, threshold=3.603e+02, percent-clipped=0.0 +2024-08-26 20:04:41,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=133077.33333333334, ans=0.125 +2024-08-26 20:04:50,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=133077.33333333334, ans=0.125 +2024-08-26 20:05:00,326 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:05:00,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=133130.66666666666, ans=0.0 +2024-08-26 20:05:10,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff3.min_abs, batch_count=133184.0, ans=0.2 +2024-08-26 20:05:10,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=133184.0, ans=0.0 +2024-08-26 20:05:21,266 INFO [train.py:1114] (0/4) Epoch 11, batch 100, loss[loss=0.1973, simple_loss=0.2723, pruned_loss=0.04428, ctc_loss=0.08413, over 19719.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.286, pruned_loss=0.05626, ctc_loss=0.106, over 1498581.10 frames. ], batch size: 51, lr: 1.39e-02, grad_scale: 32.0 +2024-08-26 20:05:29,996 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:05:34,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=133344.0, ans=0.0 +2024-08-26 20:05:49,628 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.07 vs. limit=10.0 +2024-08-26 20:05:53,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133450.66666666666, ans=0.1 +2024-08-26 20:05:55,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=133450.66666666666, ans=0.0 +2024-08-26 20:06:01,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=133504.0, ans=0.125 +2024-08-26 20:06:08,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=133504.0, ans=0.125 +2024-08-26 20:06:10,908 INFO [train.py:1114] (0/4) Epoch 11, batch 150, loss[loss=0.1848, simple_loss=0.2486, pruned_loss=0.04471, ctc_loss=0.07895, over 19738.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2826, pruned_loss=0.05529, ctc_loss=0.1036, over 2028235.97 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 32.0 +2024-08-26 20:06:16,408 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.457e+02 1.584e+02 1.841e+02 2.561e+02, threshold=3.167e+02, percent-clipped=0.0 +2024-08-26 20:06:21,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=133610.66666666666, ans=0.125 +2024-08-26 20:06:34,900 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:06:34,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=133664.0, ans=0.025 +2024-08-26 20:06:35,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=133664.0, ans=0.0 +2024-08-26 20:06:40,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=133717.33333333334, ans=0.0 +2024-08-26 20:06:51,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=133717.33333333334, ans=0.125 +2024-08-26 20:06:54,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=133770.66666666666, ans=0.5 +2024-08-26 20:06:56,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133770.66666666666, ans=0.1 +2024-08-26 20:08:06,609 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.97 vs. limit=15.0 +2024-08-26 20:08:08,065 INFO [train.py:1114] (0/4) Epoch 11, batch 200, loss[loss=0.2477, simple_loss=0.3093, pruned_loss=0.06725, ctc_loss=0.1291, over 18128.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2811, pruned_loss=0.05453, ctc_loss=0.1021, over 2435787.07 frames. ], batch size: 85, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:08:32,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=133930.66666666666, ans=0.125 +2024-08-26 20:08:38,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=133984.0, ans=0.1 +2024-08-26 20:09:00,078 INFO [train.py:1114] (0/4) Epoch 11, batch 250, loss[loss=0.2099, simple_loss=0.2804, pruned_loss=0.0508, ctc_loss=0.0948, over 19397.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2797, pruned_loss=0.0537, ctc_loss=0.1005, over 2755511.01 frames. ], batch size: 67, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:09:05,644 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.170e+02 1.397e+02 1.518e+02 1.749e+02 2.921e+02, threshold=3.037e+02, percent-clipped=0.0 +2024-08-26 20:09:06,248 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.11 vs. limit=15.0 +2024-08-26 20:09:11,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=134144.0, ans=0.125 +2024-08-26 20:09:13,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=134144.0, ans=0.0 +2024-08-26 20:09:17,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=134144.0, ans=0.125 +2024-08-26 20:09:20,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=134197.33333333334, ans=0.07 +2024-08-26 20:09:24,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=134197.33333333334, ans=0.125 +2024-08-26 20:09:40,943 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.58 vs. limit=15.0 +2024-08-26 20:09:43,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=134304.0, ans=0.125 +2024-08-26 20:09:43,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=134304.0, ans=0.1 +2024-08-26 20:09:51,430 INFO [train.py:1114] (0/4) Epoch 11, batch 300, loss[loss=0.2428, simple_loss=0.2996, pruned_loss=0.06782, ctc_loss=0.1258, over 19497.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2789, pruned_loss=0.05357, ctc_loss=0.1002, over 2999613.45 frames. ], batch size: 61, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:10:05,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=134410.66666666666, ans=0.125 +2024-08-26 20:10:18,605 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.57 vs. limit=15.0 +2024-08-26 20:10:35,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.11 vs. limit=15.0 +2024-08-26 20:10:41,585 INFO [train.py:1114] (0/4) Epoch 11, batch 350, loss[loss=0.1908, simple_loss=0.256, pruned_loss=0.04686, ctc_loss=0.07982, over 19775.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2793, pruned_loss=0.05349, ctc_loss=0.09984, over 3190014.24 frames. ], batch size: 48, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:10:43,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=134624.0, ans=0.025 +2024-08-26 20:10:45,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=134624.0, ans=0.125 +2024-08-26 20:10:47,198 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.479e+02 1.637e+02 2.052e+02 3.441e+02, threshold=3.275e+02, percent-clipped=1.0 +2024-08-26 20:11:08,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=134730.66666666666, ans=0.125 +2024-08-26 20:11:20,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=134784.0, ans=0.0 +2024-08-26 20:11:29,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=134837.33333333334, ans=0.2 +2024-08-26 20:11:31,284 INFO [train.py:1114] (0/4) Epoch 11, batch 400, loss[loss=0.2012, simple_loss=0.2811, pruned_loss=0.04386, ctc_loss=0.08407, over 19835.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2793, pruned_loss=0.05342, ctc_loss=0.09981, over 3342096.11 frames. ], batch size: 55, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:11:31,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134890.66666666666, ans=0.1 +2024-08-26 20:11:37,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=134890.66666666666, ans=0.0 +2024-08-26 20:11:43,085 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.25 vs. limit=22.5 +2024-08-26 20:11:54,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=134997.33333333334, ans=0.0 +2024-08-26 20:11:58,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=134997.33333333334, ans=0.125 +2024-08-26 20:12:02,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=135050.66666666666, ans=0.2 +2024-08-26 20:12:18,512 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.15 vs. limit=15.0 +2024-08-26 20:12:20,804 INFO [train.py:1114] (0/4) Epoch 11, batch 450, loss[loss=0.1891, simple_loss=0.271, pruned_loss=0.03866, ctc_loss=0.07484, over 19612.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2791, pruned_loss=0.0533, ctc_loss=0.09961, over 3450879.35 frames. ], batch size: 55, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:12:29,027 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.192e+02 1.489e+02 1.652e+02 2.008e+02 3.634e+02, threshold=3.305e+02, percent-clipped=1.0 +2024-08-26 20:12:32,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=135210.66666666666, ans=0.125 +2024-08-26 20:12:42,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=135264.0, ans=0.2 +2024-08-26 20:13:11,615 INFO [train.py:1114] (0/4) Epoch 11, batch 500, loss[loss=0.2131, simple_loss=0.2838, pruned_loss=0.05163, ctc_loss=0.09778, over 19700.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2781, pruned_loss=0.05302, ctc_loss=0.09929, over 3546311.09 frames. ], batch size: 63, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:13:20,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135477.33333333334, ans=0.1 +2024-08-26 20:13:20,870 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.55 vs. limit=15.0 +2024-08-26 20:13:46,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=135584.0, ans=0.1 +2024-08-26 20:13:49,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=135637.33333333334, ans=0.125 +2024-08-26 20:13:58,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=135690.66666666666, ans=0.0 +2024-08-26 20:13:58,587 INFO [train.py:1114] (0/4) Epoch 11, batch 550, loss[loss=0.2136, simple_loss=0.2793, pruned_loss=0.05467, ctc_loss=0.09627, over 19187.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2786, pruned_loss=0.05332, ctc_loss=0.09973, over 3609092.84 frames. ], batch size: 71, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:13:58,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=135690.66666666666, ans=0.125 +2024-08-26 20:14:04,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=135690.66666666666, ans=0.2 +2024-08-26 20:14:04,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=135690.66666666666, ans=0.2 +2024-08-26 20:14:06,857 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.449e+02 1.695e+02 2.078e+02 4.377e+02, threshold=3.390e+02, percent-clipped=1.0 +2024-08-26 20:14:08,460 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.07 vs. limit=15.0 +2024-08-26 20:14:09,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=135690.66666666666, ans=0.125 +2024-08-26 20:14:09,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=135690.66666666666, ans=0.125 +2024-08-26 20:14:19,627 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.82 vs. limit=12.0 +2024-08-26 20:14:24,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=135797.33333333334, ans=0.125 +2024-08-26 20:14:50,549 INFO [train.py:1114] (0/4) Epoch 11, batch 600, loss[loss=0.2108, simple_loss=0.2875, pruned_loss=0.04841, ctc_loss=0.09331, over 19392.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2786, pruned_loss=0.05317, ctc_loss=0.09962, over 3667260.93 frames. ], batch size: 67, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:15:04,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=136010.66666666666, ans=0.0 +2024-08-26 20:15:07,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=136010.66666666666, ans=0.0 +2024-08-26 20:15:24,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=136117.33333333334, ans=0.1 +2024-08-26 20:15:41,240 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.21 vs. limit=15.0 +2024-08-26 20:15:41,572 INFO [train.py:1114] (0/4) Epoch 11, batch 650, loss[loss=0.2004, simple_loss=0.2771, pruned_loss=0.04506, ctc_loss=0.0841, over 19769.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2778, pruned_loss=0.05301, ctc_loss=0.09915, over 3717034.93 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:15:44,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=136224.0, ans=0.95 +2024-08-26 20:15:47,098 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.457e+02 1.627e+02 2.058e+02 3.143e+02, threshold=3.253e+02, percent-clipped=0.0 +2024-08-26 20:16:01,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=136330.66666666666, ans=0.2 +2024-08-26 20:16:05,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=136330.66666666666, ans=0.05 +2024-08-26 20:16:27,822 INFO [train.py:1114] (0/4) Epoch 11, batch 700, loss[loss=0.2036, simple_loss=0.2747, pruned_loss=0.0483, ctc_loss=0.08998, over 19722.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2783, pruned_loss=0.05298, ctc_loss=0.09913, over 3748776.55 frames. ], batch size: 51, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:16:42,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=136544.0, ans=0.2 +2024-08-26 20:17:05,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=136650.66666666666, ans=0.125 +2024-08-26 20:17:16,604 INFO [train.py:1114] (0/4) Epoch 11, batch 750, loss[loss=0.2002, simple_loss=0.2743, pruned_loss=0.04658, ctc_loss=0.08225, over 19492.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2776, pruned_loss=0.05265, ctc_loss=0.09834, over 3775041.64 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:17:24,636 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.496e+02 1.727e+02 2.151e+02 3.286e+02, threshold=3.455e+02, percent-clipped=1.0 +2024-08-26 20:17:26,068 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.34 vs. limit=12.0 +2024-08-26 20:17:32,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=136810.66666666666, ans=0.0 +2024-08-26 20:17:40,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=136864.0, ans=0.025 +2024-08-26 20:17:51,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=136917.33333333334, ans=0.0 +2024-08-26 20:17:53,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=136917.33333333334, ans=0.125 +2024-08-26 20:18:08,144 INFO [train.py:1114] (0/4) Epoch 11, batch 800, loss[loss=0.1915, simple_loss=0.2512, pruned_loss=0.04856, ctc_loss=0.08692, over 19826.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2778, pruned_loss=0.05278, ctc_loss=0.0986, over 3797332.69 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:18:09,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=137024.0, ans=0.125 +2024-08-26 20:18:19,654 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.93 vs. limit=15.0 +2024-08-26 20:18:19,755 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.37 vs. limit=10.0 +2024-08-26 20:18:32,528 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.12 vs. limit=15.0 +2024-08-26 20:18:50,325 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.73 vs. limit=15.0 +2024-08-26 20:19:00,314 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.09 vs. limit=22.5 +2024-08-26 20:19:09,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=137237.33333333334, ans=0.0 +2024-08-26 20:19:27,349 INFO [train.py:1114] (0/4) Epoch 11, batch 850, loss[loss=0.2255, simple_loss=0.293, pruned_loss=0.05762, ctc_loss=0.1069, over 19636.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2779, pruned_loss=0.05289, ctc_loss=0.09906, over 3815886.78 frames. ], batch size: 59, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:19:35,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=137290.66666666666, ans=0.1 +2024-08-26 20:19:39,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=137290.66666666666, ans=0.95 +2024-08-26 20:19:39,839 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.453e+02 1.601e+02 1.920e+02 5.497e+02, threshold=3.202e+02, percent-clipped=1.0 +2024-08-26 20:19:47,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=137344.0, ans=0.125 +2024-08-26 20:19:50,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=137344.0, ans=0.0 +2024-08-26 20:19:51,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=137344.0, ans=0.125 +2024-08-26 20:19:58,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=137344.0, ans=0.0 +2024-08-26 20:20:05,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=137397.33333333334, ans=0.125 +2024-08-26 20:20:25,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=137450.66666666666, ans=0.025 +2024-08-26 20:20:44,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=137504.0, ans=0.025 +2024-08-26 20:20:45,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=137504.0, ans=0.125 +2024-08-26 20:20:53,077 INFO [train.py:1114] (0/4) Epoch 11, batch 900, loss[loss=0.1995, simple_loss=0.2605, pruned_loss=0.04966, ctc_loss=0.09763, over 19416.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2787, pruned_loss=0.05357, ctc_loss=0.1002, over 3820477.17 frames. ], batch size: 48, lr: 1.37e-02, grad_scale: 16.0 +2024-08-26 20:21:07,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=137610.66666666666, ans=10.0 +2024-08-26 20:21:26,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=137664.0, ans=0.025 +2024-08-26 20:21:48,815 INFO [train.py:1114] (0/4) Epoch 11, batch 950, loss[loss=0.2078, simple_loss=0.2712, pruned_loss=0.05265, ctc_loss=0.09788, over 19507.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2787, pruned_loss=0.0536, ctc_loss=0.1001, over 3820471.57 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 16.0 +2024-08-26 20:21:50,885 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:21:52,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=137824.0, ans=0.0 +2024-08-26 20:21:52,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=137824.0, ans=0.5 +2024-08-26 20:21:55,402 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.214e+02 1.468e+02 1.744e+02 2.017e+02 3.816e+02, threshold=3.488e+02, percent-clipped=2.0 +2024-08-26 20:22:14,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137930.66666666666, ans=0.1 +2024-08-26 20:22:16,075 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.38 vs. limit=22.5 +2024-08-26 20:22:27,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=137984.0, ans=0.2 +2024-08-26 20:22:40,793 INFO [train.py:1114] (0/4) Epoch 11, batch 1000, loss[loss=0.1903, simple_loss=0.2622, pruned_loss=0.0428, ctc_loss=0.08225, over 19840.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.279, pruned_loss=0.05373, ctc_loss=0.1003, over 3815736.60 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 16.0 +2024-08-26 20:22:51,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=138144.0, ans=0.0 +2024-08-26 20:23:01,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=138197.33333333334, ans=0.1 +2024-08-26 20:23:03,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.04 vs. limit=15.0 +2024-08-26 20:23:22,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=138304.0, ans=0.125 +2024-08-26 20:23:28,374 INFO [train.py:1114] (0/4) Epoch 11, batch 1050, loss[loss=0.2241, simple_loss=0.2919, pruned_loss=0.05677, ctc_loss=0.1067, over 19837.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2781, pruned_loss=0.05323, ctc_loss=0.09934, over 3821539.10 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 16.0 +2024-08-26 20:23:29,861 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.98 vs. limit=22.5 +2024-08-26 20:23:34,918 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.363e+02 1.534e+02 1.839e+02 4.578e+02, threshold=3.069e+02, percent-clipped=1.0 +2024-08-26 20:24:07,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=138357.33333333334, ans=0.0 +2024-08-26 20:24:09,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=138410.66666666666, ans=0.2 +2024-08-26 20:24:33,895 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.52 vs. limit=15.0 +2024-08-26 20:25:00,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=138570.66666666666, ans=0.2 +2024-08-26 20:25:03,554 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.01 vs. limit=10.0 +2024-08-26 20:25:07,611 INFO [train.py:1114] (0/4) Epoch 11, batch 1100, loss[loss=0.1984, simple_loss=0.2671, pruned_loss=0.04693, ctc_loss=0.08996, over 19566.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2779, pruned_loss=0.05276, ctc_loss=0.09858, over 3828733.98 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 16.0 +2024-08-26 20:25:13,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=138624.0, ans=0.0 +2024-08-26 20:25:14,901 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.26 vs. limit=12.0 +2024-08-26 20:25:25,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=138677.33333333334, ans=0.125 +2024-08-26 20:25:26,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=138730.66666666666, ans=0.05 +2024-08-26 20:25:30,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=138730.66666666666, ans=0.2 +2024-08-26 20:25:35,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138784.0, ans=0.1 +2024-08-26 20:25:56,940 INFO [train.py:1114] (0/4) Epoch 11, batch 1150, loss[loss=0.1878, simple_loss=0.2645, pruned_loss=0.03983, ctc_loss=0.07875, over 19585.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2777, pruned_loss=0.05271, ctc_loss=0.09839, over 3827621.95 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 16.0 +2024-08-26 20:26:03,591 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.470e+02 1.661e+02 1.952e+02 3.516e+02, threshold=3.323e+02, percent-clipped=2.0 +2024-08-26 20:26:03,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=138890.66666666666, ans=0.125 +2024-08-26 20:26:08,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=138944.0, ans=0.0 +2024-08-26 20:26:30,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=139050.66666666666, ans=0.07 +2024-08-26 20:26:31,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=139050.66666666666, ans=0.0 +2024-08-26 20:26:33,830 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.73 vs. limit=10.0 +2024-08-26 20:26:45,573 INFO [train.py:1114] (0/4) Epoch 11, batch 1200, loss[loss=0.2326, simple_loss=0.3064, pruned_loss=0.05794, ctc_loss=0.1073, over 19840.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2787, pruned_loss=0.05294, ctc_loss=0.09878, over 3823615.73 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 32.0 +2024-08-26 20:26:49,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139157.33333333334, ans=0.1 +2024-08-26 20:26:56,417 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.78 vs. limit=15.0 +2024-08-26 20:27:00,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=139210.66666666666, ans=0.125 +2024-08-26 20:27:03,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=139264.0, ans=0.125 +2024-08-26 20:27:31,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=139370.66666666666, ans=0.1 +2024-08-26 20:27:43,272 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.81 vs. limit=15.0 +2024-08-26 20:27:49,791 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.49 vs. limit=15.0 +2024-08-26 20:28:18,699 INFO [train.py:1114] (0/4) Epoch 11, batch 1250, loss[loss=0.215, simple_loss=0.2792, pruned_loss=0.05355, ctc_loss=0.1089, over 19520.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2786, pruned_loss=0.05272, ctc_loss=0.09841, over 3842516.75 frames. ], batch size: 61, lr: 1.36e-02, grad_scale: 32.0 +2024-08-26 20:28:21,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=139424.0, ans=0.0 +2024-08-26 20:28:27,597 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.224e+02 1.425e+02 1.545e+02 1.729e+02 3.064e+02, threshold=3.089e+02, percent-clipped=0.0 +2024-08-26 20:28:45,582 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.19 vs. limit=15.0 +2024-08-26 20:28:53,233 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.38 vs. limit=12.0 +2024-08-26 20:29:01,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=139637.33333333334, ans=0.2 +2024-08-26 20:29:12,955 INFO [train.py:1114] (0/4) Epoch 11, batch 1300, loss[loss=0.2193, simple_loss=0.2883, pruned_loss=0.0548, ctc_loss=0.1016, over 18823.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2779, pruned_loss=0.05244, ctc_loss=0.09801, over 3846394.09 frames. ], batch size: 76, lr: 1.36e-02, grad_scale: 32.0 +2024-08-26 20:29:18,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=139690.66666666666, ans=0.125 +2024-08-26 20:29:29,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=139744.0, ans=0.125 +2024-08-26 20:29:32,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=139744.0, ans=0.0 +2024-08-26 20:33:36,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=139797.33333333334, ans=0.0 +2024-08-26 20:35:35,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=139850.66666666666, ans=0.0 +2024-08-26 20:35:43,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=139904.0, ans=0.125 +2024-08-26 20:35:46,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139904.0, ans=0.1 +2024-08-26 20:35:47,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139904.0, ans=0.1 +2024-08-26 20:35:52,056 INFO [train.py:1114] (0/4) Epoch 11, batch 1350, loss[loss=0.2055, simple_loss=0.279, pruned_loss=0.04754, ctc_loss=0.09236, over 19769.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2775, pruned_loss=0.05219, ctc_loss=0.09755, over 3857051.96 frames. ], batch size: 54, lr: 1.36e-02, grad_scale: 32.0 +2024-08-26 20:35:58,559 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.092e+02 1.441e+02 1.644e+02 1.919e+02 3.174e+02, threshold=3.287e+02, percent-clipped=1.0 +2024-08-26 20:36:00,939 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.43 vs. limit=15.0 +2024-08-26 20:36:25,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=140117.33333333334, ans=0.125 +2024-08-26 20:36:29,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=140117.33333333334, ans=0.125 +2024-08-26 20:36:29,591 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.24 vs. limit=15.0 +2024-08-26 20:36:41,172 INFO [train.py:1114] (0/4) Epoch 11, batch 1400, loss[loss=0.1921, simple_loss=0.2547, pruned_loss=0.04691, ctc_loss=0.0893, over 19656.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2774, pruned_loss=0.05216, ctc_loss=0.09727, over 3863704.71 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:37:27,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=140330.66666666666, ans=0.0 +2024-08-26 20:37:32,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=140330.66666666666, ans=0.025 +2024-08-26 20:37:50,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=140437.33333333334, ans=0.125 +2024-08-26 20:38:01,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140490.66666666666, ans=0.1 +2024-08-26 20:38:01,757 INFO [train.py:1114] (0/4) Epoch 11, batch 1450, loss[loss=0.2385, simple_loss=0.2931, pruned_loss=0.06642, ctc_loss=0.1277, over 19695.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2783, pruned_loss=0.0528, ctc_loss=0.09849, over 3861968.91 frames. ], batch size: 63, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:38:04,915 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.48 vs. limit=15.0 +2024-08-26 20:38:08,097 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.486e+02 1.636e+02 1.926e+02 3.321e+02, threshold=3.272e+02, percent-clipped=1.0 +2024-08-26 20:38:10,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=140544.0, ans=0.125 +2024-08-26 20:38:16,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=140544.0, ans=0.125 +2024-08-26 20:38:29,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=140597.33333333334, ans=0.125 +2024-08-26 20:38:32,239 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.90 vs. limit=12.0 +2024-08-26 20:38:33,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=140650.66666666666, ans=0.05 +2024-08-26 20:38:35,043 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.46 vs. limit=15.0 +2024-08-26 20:38:40,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=140704.0, ans=0.0 +2024-08-26 20:38:43,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=140704.0, ans=0.0 +2024-08-26 20:38:50,495 INFO [train.py:1114] (0/4) Epoch 11, batch 1500, loss[loss=0.2159, simple_loss=0.2928, pruned_loss=0.04984, ctc_loss=0.09854, over 19582.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2787, pruned_loss=0.05291, ctc_loss=0.09882, over 3860681.04 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:39:03,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=140810.66666666666, ans=0.125 +2024-08-26 20:39:10,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=140864.0, ans=0.2 +2024-08-26 20:39:10,860 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.79 vs. limit=6.0 +2024-08-26 20:39:18,415 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.59 vs. limit=15.0 +2024-08-26 20:39:39,002 INFO [train.py:1114] (0/4) Epoch 11, batch 1550, loss[loss=0.2275, simple_loss=0.2888, pruned_loss=0.06045, ctc_loss=0.1131, over 19578.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2789, pruned_loss=0.05332, ctc_loss=0.09957, over 3845422.91 frames. ], batch size: 60, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:39:41,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=141024.0, ans=0.125 +2024-08-26 20:39:45,238 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.401e+02 1.612e+02 1.919e+02 3.103e+02, threshold=3.225e+02, percent-clipped=0.0 +2024-08-26 20:39:49,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=141077.33333333334, ans=0.2 +2024-08-26 20:40:15,825 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=15.98 vs. limit=15.0 +2024-08-26 20:40:21,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141237.33333333334, ans=0.1 +2024-08-26 20:40:29,937 INFO [train.py:1114] (0/4) Epoch 11, batch 1600, loss[loss=0.2199, simple_loss=0.2926, pruned_loss=0.05331, ctc_loss=0.1015, over 19843.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2787, pruned_loss=0.05345, ctc_loss=0.0998, over 3834287.04 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:40:40,698 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:40:49,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=141397.33333333334, ans=0.1 +2024-08-26 20:41:18,740 INFO [train.py:1114] (0/4) Epoch 11, batch 1650, loss[loss=0.2245, simple_loss=0.2897, pruned_loss=0.05903, ctc_loss=0.1033, over 19656.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2789, pruned_loss=0.05378, ctc_loss=0.1003, over 3831344.31 frames. ], batch size: 59, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:41:25,307 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.523e+02 1.726e+02 1.964e+02 3.202e+02, threshold=3.451e+02, percent-clipped=0.0 +2024-08-26 20:41:27,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=141610.66666666666, ans=0.125 +2024-08-26 20:41:29,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=141610.66666666666, ans=0.0 +2024-08-26 20:41:36,031 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.15 vs. limit=10.0 +2024-08-26 20:41:36,840 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.20 vs. limit=15.0 +2024-08-26 20:41:43,427 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.54 vs. limit=6.0 +2024-08-26 20:41:44,247 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.56 vs. limit=15.0 +2024-08-26 20:42:00,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=141770.66666666666, ans=15.0 +2024-08-26 20:42:07,201 INFO [train.py:1114] (0/4) Epoch 11, batch 1700, loss[loss=0.1848, simple_loss=0.2442, pruned_loss=0.04561, ctc_loss=0.0854, over 19650.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2779, pruned_loss=0.05315, ctc_loss=0.09915, over 3845513.70 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:42:16,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.52 vs. limit=15.0 +2024-08-26 20:42:16,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=141824.0, ans=0.125 +2024-08-26 20:42:18,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=141824.0, ans=0.125 +2024-08-26 20:42:24,581 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.95 vs. limit=15.0 +2024-08-26 20:42:36,687 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.60 vs. limit=15.0 +2024-08-26 20:42:37,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=141930.66666666666, ans=0.125 +2024-08-26 20:42:46,172 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:43:00,060 INFO [train.py:1114] (0/4) Epoch 11, batch 1750, loss[loss=0.1747, simple_loss=0.2421, pruned_loss=0.03829, ctc_loss=0.07687, over 19641.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2777, pruned_loss=0.05309, ctc_loss=0.09898, over 3851280.81 frames. ], batch size: 45, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:43:01,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=142090.66666666666, ans=0.0 +2024-08-26 20:43:06,161 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.149e+02 1.441e+02 1.591e+02 1.781e+02 2.526e+02, threshold=3.183e+02, percent-clipped=0.0 +2024-08-26 20:43:06,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=142090.66666666666, ans=0.1 +2024-08-26 20:43:17,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142197.33333333334, ans=0.1 +2024-08-26 20:43:22,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=142197.33333333334, ans=0.125 +2024-08-26 20:43:35,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=142250.66666666666, ans=0.125 +2024-08-26 20:43:46,385 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.36 vs. limit=22.5 +2024-08-26 20:43:50,387 INFO [train.py:1114] (0/4) Epoch 11, batch 1800, loss[loss=0.209, simple_loss=0.2846, pruned_loss=0.04885, ctc_loss=0.08933, over 19620.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2777, pruned_loss=0.05288, ctc_loss=0.09882, over 3852829.33 frames. ], batch size: 55, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:43:54,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=142357.33333333334, ans=0.0 +2024-08-26 20:43:55,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=142357.33333333334, ans=0.09899494936611666 +2024-08-26 20:43:56,975 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.55 vs. limit=6.0 +2024-08-26 20:44:03,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=142410.66666666666, ans=0.125 +2024-08-26 20:44:07,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=142410.66666666666, ans=0.125 +2024-08-26 20:44:09,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=142464.0, ans=0.0 +2024-08-26 20:44:17,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=142464.0, ans=0.125 +2024-08-26 20:44:31,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=142570.66666666666, ans=0.125 +2024-08-26 20:44:40,950 INFO [train.py:1114] (0/4) Epoch 11, batch 1850, loss[loss=0.2044, simple_loss=0.278, pruned_loss=0.04701, ctc_loss=0.09219, over 19558.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2774, pruned_loss=0.05261, ctc_loss=0.09844, over 3855883.92 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:44:47,995 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.442e+02 1.639e+02 2.043e+02 4.343e+02, threshold=3.277e+02, percent-clipped=6.0 +2024-08-26 20:44:49,470 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.66 vs. limit=10.0 +2024-08-26 20:44:51,351 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.65 vs. limit=15.0 +2024-08-26 20:44:58,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=142677.33333333334, ans=0.0 +2024-08-26 20:45:29,161 INFO [train.py:1114] (0/4) Epoch 11, batch 1900, loss[loss=0.2232, simple_loss=0.2938, pruned_loss=0.05629, ctc_loss=0.1003, over 19643.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2777, pruned_loss=0.05262, ctc_loss=0.09839, over 3860584.81 frames. ], batch size: 59, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:45:44,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=142944.0, ans=0.125 +2024-08-26 20:45:44,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=142944.0, ans=0.125 +2024-08-26 20:45:49,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=142997.33333333334, ans=0.07 +2024-08-26 20:45:55,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=142997.33333333334, ans=0.025 +2024-08-26 20:46:23,316 INFO [train.py:1114] (0/4) Epoch 11, batch 1950, loss[loss=0.2068, simple_loss=0.2775, pruned_loss=0.04949, ctc_loss=0.09283, over 19582.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2792, pruned_loss=0.05287, ctc_loss=0.09869, over 3869804.74 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:47:24,646 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.230e+02 1.500e+02 1.631e+02 1.894e+02 3.317e+02, threshold=3.262e+02, percent-clipped=1.0 +2024-08-26 20:48:00,975 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.40 vs. limit=15.0 +2024-08-26 20:48:08,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=143317.33333333334, ans=0.125 +2024-08-26 20:48:22,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=143317.33333333334, ans=0.0 +2024-08-26 20:48:24,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=143370.66666666666, ans=0.0 +2024-08-26 20:48:33,084 INFO [train.py:1114] (0/4) Epoch 11, batch 2000, loss[loss=0.1976, simple_loss=0.2544, pruned_loss=0.0512, ctc_loss=0.09612, over 19676.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2793, pruned_loss=0.05321, ctc_loss=0.09934, over 3855510.50 frames. ], batch size: 45, lr: 1.34e-02, grad_scale: 32.0 +2024-08-26 20:48:40,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=143424.0, ans=0.04949747468305833 +2024-08-26 20:48:51,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=143530.66666666666, ans=0.0 +2024-08-26 20:48:52,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=143530.66666666666, ans=0.125 +2024-08-26 20:49:00,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=143584.0, ans=0.125 +2024-08-26 20:49:02,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=143584.0, ans=0.0 +2024-08-26 20:49:03,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=143584.0, ans=0.125 +2024-08-26 20:49:39,121 INFO [train.py:1114] (0/4) Epoch 11, batch 2050, loss[loss=0.208, simple_loss=0.2694, pruned_loss=0.054, ctc_loss=0.0965, over 19730.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2788, pruned_loss=0.05324, ctc_loss=0.09929, over 3852352.43 frames. ], batch size: 47, lr: 1.34e-02, grad_scale: 32.0 +2024-08-26 20:49:47,245 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.205e+02 1.448e+02 1.585e+02 1.933e+02 3.153e+02, threshold=3.170e+02, percent-clipped=0.0 +2024-08-26 20:49:58,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=143744.0, ans=0.0 +2024-08-26 20:50:00,686 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.03 vs. limit=6.0 +2024-08-26 20:50:01,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=143744.0, ans=0.2 +2024-08-26 20:50:20,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=143850.66666666666, ans=0.0 +2024-08-26 20:50:22,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=143850.66666666666, ans=0.0 +2024-08-26 20:50:33,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten.whitening_limit, batch_count=143904.0, ans=22.5 +2024-08-26 20:50:37,969 INFO [train.py:1114] (0/4) Epoch 11, batch 2100, loss[loss=0.2076, simple_loss=0.2704, pruned_loss=0.05271, ctc_loss=0.09822, over 19789.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2784, pruned_loss=0.05306, ctc_loss=0.09897, over 3859782.18 frames. ], batch size: 54, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:50:38,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=143957.33333333334, ans=0.125 +2024-08-26 20:50:39,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=143957.33333333334, ans=0.125 +2024-08-26 20:50:45,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=143957.33333333334, ans=0.2 +2024-08-26 20:50:47,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=144010.66666666666, ans=0.0 +2024-08-26 20:50:54,427 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.34 vs. limit=10.0 +2024-08-26 20:50:55,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=144064.0, ans=0.125 +2024-08-26 20:50:56,093 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.66 vs. limit=15.0 +2024-08-26 20:51:00,407 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.27 vs. limit=15.0 +2024-08-26 20:51:02,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=144064.0, ans=0.125 +2024-08-26 20:51:06,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=144117.33333333334, ans=0.125 +2024-08-26 20:51:09,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144117.33333333334, ans=0.1 +2024-08-26 20:51:11,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=144117.33333333334, ans=0.0 +2024-08-26 20:51:12,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=144170.66666666666, ans=0.0 +2024-08-26 20:51:22,999 INFO [train.py:1114] (0/4) Epoch 11, batch 2150, loss[loss=0.2042, simple_loss=0.2716, pruned_loss=0.05033, ctc_loss=0.09029, over 19591.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2779, pruned_loss=0.05267, ctc_loss=0.09824, over 3869376.40 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:51:30,822 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.485e+02 1.672e+02 2.037e+02 4.338e+02, threshold=3.345e+02, percent-clipped=7.0 +2024-08-26 20:51:47,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=144330.66666666666, ans=0.0 +2024-08-26 20:51:55,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=144384.0, ans=0.125 +2024-08-26 20:51:58,401 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.11 vs. limit=15.0 +2024-08-26 20:52:02,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=144437.33333333334, ans=0.0 +2024-08-26 20:52:03,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=144437.33333333334, ans=0.07 +2024-08-26 20:52:03,593 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.55 vs. limit=15.0 +2024-08-26 20:52:04,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=144437.33333333334, ans=0.0 +2024-08-26 20:52:06,899 INFO [train.py:1114] (0/4) Epoch 11, batch 2200, loss[loss=0.2243, simple_loss=0.292, pruned_loss=0.05731, ctc_loss=0.105, over 19588.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2774, pruned_loss=0.05233, ctc_loss=0.09767, over 3869041.31 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:52:10,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=144490.66666666666, ans=0.125 +2024-08-26 20:52:19,784 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.89 vs. limit=15.0 +2024-08-26 20:52:22,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=144544.0, ans=0.125 +2024-08-26 20:52:23,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=144597.33333333334, ans=0.5 +2024-08-26 20:52:29,269 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.62 vs. limit=10.0 +2024-08-26 20:52:39,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=144650.66666666666, ans=0.0 +2024-08-26 20:52:50,853 INFO [train.py:1114] (0/4) Epoch 11, batch 2250, loss[loss=0.2121, simple_loss=0.2806, pruned_loss=0.05209, ctc_loss=0.09851, over 19619.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2777, pruned_loss=0.05245, ctc_loss=0.09779, over 3868584.66 frames. ], batch size: 55, lr: 1.33e-02, grad_scale: 16.0 +2024-08-26 20:52:58,755 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.208e+02 1.461e+02 1.628e+02 1.934e+02 8.673e+02, threshold=3.256e+02, percent-clipped=2.0 +2024-08-26 20:53:02,005 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.45 vs. limit=22.5 +2024-08-26 20:53:03,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=144810.66666666666, ans=0.125 +2024-08-26 20:53:07,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=144864.0, ans=0.125 +2024-08-26 20:53:12,204 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.06 vs. limit=6.0 +2024-08-26 20:53:14,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=144864.0, ans=0.07 +2024-08-26 20:53:22,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=144917.33333333334, ans=0.1 +2024-08-26 20:53:30,705 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.65 vs. limit=15.0 +2024-08-26 20:53:35,365 INFO [train.py:1114] (0/4) Epoch 11, batch 2300, loss[loss=0.1876, simple_loss=0.2569, pruned_loss=0.04336, ctc_loss=0.07903, over 19508.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2768, pruned_loss=0.05243, ctc_loss=0.09779, over 3861329.49 frames. ], batch size: 49, lr: 1.33e-02, grad_scale: 16.0 +2024-08-26 20:53:42,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145024.0, ans=0.1 +2024-08-26 20:53:57,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=145130.66666666666, ans=0.0 +2024-08-26 20:54:05,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=145184.0, ans=0.125 +2024-08-26 20:54:17,803 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.04 vs. limit=10.0 +2024-08-26 20:54:20,122 INFO [train.py:1114] (0/4) Epoch 11, batch 2350, loss[loss=0.2356, simple_loss=0.3023, pruned_loss=0.06281, ctc_loss=0.1083, over 19678.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2765, pruned_loss=0.05242, ctc_loss=0.09765, over 3863696.93 frames. ], batch size: 63, lr: 1.33e-02, grad_scale: 16.0 +2024-08-26 20:54:22,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=145290.66666666666, ans=0.0 +2024-08-26 20:54:24,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=145290.66666666666, ans=0.125 +2024-08-26 20:54:24,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=145290.66666666666, ans=0.025 +2024-08-26 20:54:28,773 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.482e+02 1.673e+02 1.901e+02 2.829e+02, threshold=3.345e+02, percent-clipped=0.0 +2024-08-26 20:54:31,942 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.19 vs. limit=15.0 +2024-08-26 20:54:34,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=145344.0, ans=0.95 +2024-08-26 20:54:45,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=145397.33333333334, ans=0.0 +2024-08-26 20:54:46,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=145450.66666666666, ans=0.1 +2024-08-26 20:55:01,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145504.0, ans=0.1 +2024-08-26 20:55:04,264 INFO [train.py:1114] (0/4) Epoch 11, batch 2400, loss[loss=0.2675, simple_loss=0.3267, pruned_loss=0.0774, ctc_loss=0.1339, over 19327.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2789, pruned_loss=0.05349, ctc_loss=0.09946, over 3858441.92 frames. ], batch size: 67, lr: 1.33e-02, grad_scale: 32.0 +2024-08-26 20:55:06,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145557.33333333334, ans=0.1 +2024-08-26 20:55:09,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=145557.33333333334, ans=0.5 +2024-08-26 20:55:25,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=145664.0, ans=0.125 +2024-08-26 20:55:27,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=145664.0, ans=0.125 +2024-08-26 20:55:27,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=145664.0, ans=0.2 +2024-08-26 20:55:49,239 INFO [train.py:1114] (0/4) Epoch 11, batch 2450, loss[loss=0.3206, simple_loss=0.3447, pruned_loss=0.1078, ctc_loss=0.2024, over 13466.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.283, pruned_loss=0.05649, ctc_loss=0.1053, over 3734133.43 frames. ], batch size: 141, lr: 1.33e-02, grad_scale: 32.0 +2024-08-26 20:55:56,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=145824.0, ans=0.0 +2024-08-26 20:55:58,190 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.577e+02 1.748e+02 1.957e+02 3.323e+02, threshold=3.496e+02, percent-clipped=0.0 +2024-08-26 20:56:22,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=145984.0, ans=0.125 +2024-08-26 20:56:24,354 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-11.pt +2024-08-26 21:01:32,163 INFO [train.py:1114] (0/4) Epoch 12, batch 0, loss[loss=0.1915, simple_loss=0.2585, pruned_loss=0.0453, ctc_loss=0.08483, over 19444.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2585, pruned_loss=0.0453, ctc_loss=0.08483, over 19444.00 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:01:32,164 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-26 21:01:49,208 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.8352, 5.0595, 5.6308, 5.3060], device='cuda:0') +2024-08-26 21:01:52,246 INFO [train.py:1146] (0/4) Epoch 12, validation: loss=0.1812, simple_loss=0.274, pruned_loss=0.03284, ctc_loss=0.05683, over 944034.00 frames. +2024-08-26 21:01:52,247 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13201MB +2024-08-26 21:01:52,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=146032.0, ans=0.09899494936611666 +2024-08-26 21:02:26,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=146138.66666666666, ans=0.2 +2024-08-26 21:02:37,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=146192.0, ans=0.125 +2024-08-26 21:02:42,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=146245.33333333334, ans=0.0 +2024-08-26 21:02:50,474 INFO [train.py:1114] (0/4) Epoch 12, batch 50, loss[loss=0.1897, simple_loss=0.2481, pruned_loss=0.04839, ctc_loss=0.08598, over 19701.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2809, pruned_loss=0.05423, ctc_loss=0.1023, over 843793.79 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:03:02,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=146352.0, ans=0.0 +2024-08-26 21:03:06,336 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.42 vs. limit=15.0 +2024-08-26 21:03:11,152 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.556e+02 1.742e+02 1.990e+02 3.045e+02, threshold=3.484e+02, percent-clipped=0.0 +2024-08-26 21:03:30,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=146512.0, ans=0.0 +2024-08-26 21:04:07,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=146512.0, ans=0.125 +2024-08-26 21:04:10,876 INFO [train.py:1114] (0/4) Epoch 12, batch 100, loss[loss=0.1905, simple_loss=0.2609, pruned_loss=0.04402, ctc_loss=0.08002, over 19716.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2812, pruned_loss=0.05331, ctc_loss=0.1001, over 1498987.01 frames. ], batch size: 51, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:04:11,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=146565.33333333334, ans=0.0 +2024-08-26 21:04:15,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=146565.33333333334, ans=0.125 +2024-08-26 21:04:42,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=146672.0, ans=0.0 +2024-08-26 21:04:53,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146725.33333333334, ans=0.1 +2024-08-26 21:04:54,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=146725.33333333334, ans=0.5 +2024-08-26 21:05:04,625 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-08-26 21:05:05,123 INFO [train.py:1114] (0/4) Epoch 12, batch 150, loss[loss=0.1803, simple_loss=0.2523, pruned_loss=0.03975, ctc_loss=0.07207, over 19734.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2781, pruned_loss=0.05185, ctc_loss=0.09722, over 2027803.58 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:05:25,624 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.236e+02 1.421e+02 1.535e+02 1.745e+02 2.429e+02, threshold=3.070e+02, percent-clipped=0.0 +2024-08-26 21:05:29,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=146938.66666666666, ans=0.2 +2024-08-26 21:05:34,263 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:05:34,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=146992.0, ans=0.0 +2024-08-26 21:05:36,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=146992.0, ans=0.125 +2024-08-26 21:05:38,262 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.91 vs. limit=10.0 +2024-08-26 21:05:52,006 INFO [train.py:1114] (0/4) Epoch 12, batch 200, loss[loss=0.2261, simple_loss=0.2913, pruned_loss=0.05862, ctc_loss=0.1093, over 18565.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2765, pruned_loss=0.05127, ctc_loss=0.09627, over 2435789.11 frames. ], batch size: 85, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:05:54,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=147098.66666666666, ans=0.125 +2024-08-26 21:06:02,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147152.0, ans=0.1 +2024-08-26 21:06:22,125 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.87 vs. limit=6.0 +2024-08-26 21:06:33,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=147312.0, ans=0.0 +2024-08-26 21:06:35,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=147312.0, ans=0.125 +2024-08-26 21:06:35,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=147312.0, ans=0.015 +2024-08-26 21:06:38,645 INFO [train.py:1114] (0/4) Epoch 12, batch 250, loss[loss=0.2059, simple_loss=0.285, pruned_loss=0.04626, ctc_loss=0.08554, over 19348.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2765, pruned_loss=0.05069, ctc_loss=0.09543, over 2755129.31 frames. ], batch size: 67, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:06:59,406 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.414e+02 1.495e+02 1.680e+02 4.024e+02, threshold=2.991e+02, percent-clipped=1.0 +2024-08-26 21:06:59,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147472.0, ans=0.1 +2024-08-26 21:07:00,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=147472.0, ans=0.2 +2024-08-26 21:07:07,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=147525.33333333334, ans=0.125 +2024-08-26 21:07:20,652 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.78 vs. limit=15.0 +2024-08-26 21:07:26,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=147578.66666666666, ans=0.2 +2024-08-26 21:07:34,620 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.39 vs. limit=15.0 +2024-08-26 21:07:35,050 INFO [train.py:1114] (0/4) Epoch 12, batch 300, loss[loss=0.2107, simple_loss=0.282, pruned_loss=0.05093, ctc_loss=0.09364, over 19533.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2758, pruned_loss=0.05049, ctc_loss=0.09483, over 3000376.94 frames. ], batch size: 61, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:07:38,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=147632.0, ans=0.125 +2024-08-26 21:08:30,184 INFO [train.py:1114] (0/4) Epoch 12, batch 350, loss[loss=0.1884, simple_loss=0.2508, pruned_loss=0.04744, ctc_loss=0.0778, over 19738.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.276, pruned_loss=0.05051, ctc_loss=0.09472, over 3190558.33 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:08:36,019 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.48 vs. limit=15.0 +2024-08-26 21:12:05,919 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.26 vs. limit=15.0 +2024-08-26 21:12:10,843 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.537e+02 1.863e+02 2.287e+02 4.040e+02, threshold=3.725e+02, percent-clipped=5.0 +2024-08-26 21:12:15,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten.whitening_limit, batch_count=148005.33333333334, ans=15.0 +2024-08-26 21:12:18,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148058.66666666666, ans=0.1 +2024-08-26 21:12:25,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=148058.66666666666, ans=0.0 +2024-08-26 21:12:26,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=148112.0, ans=0.125 +2024-08-26 21:13:47,513 INFO [train.py:1114] (0/4) Epoch 12, batch 400, loss[loss=0.2198, simple_loss=0.2896, pruned_loss=0.05441, ctc_loss=0.1028, over 19522.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2756, pruned_loss=0.0505, ctc_loss=0.09465, over 3341150.43 frames. ], batch size: 54, lr: 1.27e-02, grad_scale: 32.0 +2024-08-26 21:13:47,739 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:14:15,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=148325.33333333334, ans=0.125 +2024-08-26 21:14:15,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=148325.33333333334, ans=0.025 +2024-08-26 21:14:22,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=148325.33333333334, ans=0.0 +2024-08-26 21:14:34,560 INFO [train.py:1114] (0/4) Epoch 12, batch 450, loss[loss=0.212, simple_loss=0.2852, pruned_loss=0.04891, ctc_loss=0.1023, over 19609.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2761, pruned_loss=0.0509, ctc_loss=0.09549, over 3450543.39 frames. ], batch size: 55, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:14:53,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=148485.33333333334, ans=0.125 +2024-08-26 21:15:05,746 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.502e+02 1.695e+02 2.071e+02 2.894e+02, threshold=3.390e+02, percent-clipped=0.0 +2024-08-26 21:15:16,381 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.48 vs. limit=22.5 +2024-08-26 21:15:21,807 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.94 vs. limit=15.0 +2024-08-26 21:15:24,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=148645.33333333334, ans=0.0 +2024-08-26 21:15:28,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=148645.33333333334, ans=0.0 +2024-08-26 21:15:31,500 INFO [train.py:1114] (0/4) Epoch 12, batch 500, loss[loss=0.22, simple_loss=0.2921, pruned_loss=0.05471, ctc_loss=0.09605, over 19667.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.275, pruned_loss=0.05042, ctc_loss=0.09446, over 3546574.60 frames. ], batch size: 63, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:15:34,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=148698.66666666666, ans=0.0 +2024-08-26 21:15:54,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=148805.33333333334, ans=0.125 +2024-08-26 21:16:03,948 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=1.94 vs. limit=15.0 +2024-08-26 21:16:06,055 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.23 vs. limit=12.0 +2024-08-26 21:16:10,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=148912.0, ans=0.2 +2024-08-26 21:16:19,315 INFO [train.py:1114] (0/4) Epoch 12, batch 550, loss[loss=0.2243, simple_loss=0.294, pruned_loss=0.05534, ctc_loss=0.1097, over 19334.00 frames. ], tot_loss[loss=0.207, simple_loss=0.275, pruned_loss=0.05054, ctc_loss=0.09481, over 3608185.56 frames. ], batch size: 71, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:16:29,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=149018.66666666666, ans=0.125 +2024-08-26 21:16:39,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=149072.0, ans=0.0 +2024-08-26 21:16:39,717 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.448e+02 1.617e+02 1.906e+02 3.977e+02, threshold=3.234e+02, percent-clipped=1.0 +2024-08-26 21:16:40,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=149072.0, ans=0.125 +2024-08-26 21:16:56,983 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.25 vs. limit=10.0 +2024-08-26 21:17:36,374 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.73 vs. limit=6.0 +2024-08-26 21:17:47,329 INFO [train.py:1114] (0/4) Epoch 12, batch 600, loss[loss=0.2267, simple_loss=0.2966, pruned_loss=0.05804, ctc_loss=0.102, over 19414.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2754, pruned_loss=0.0509, ctc_loss=0.09549, over 3665590.69 frames. ], batch size: 67, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:17:53,207 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.48 vs. limit=15.0 +2024-08-26 21:17:54,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.95 vs. limit=15.0 +2024-08-26 21:18:01,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=149285.33333333334, ans=0.025 +2024-08-26 21:18:04,234 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/checkpoint-28000.pt +2024-08-26 21:18:34,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=149392.0, ans=0.2 +2024-08-26 21:18:40,430 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.64 vs. limit=15.0 +2024-08-26 21:18:41,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149445.33333333334, ans=0.1 +2024-08-26 21:18:43,346 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.58 vs. limit=15.0 +2024-08-26 21:18:44,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=149445.33333333334, ans=0.0 +2024-08-26 21:18:46,466 INFO [train.py:1114] (0/4) Epoch 12, batch 650, loss[loss=0.201, simple_loss=0.2773, pruned_loss=0.04567, ctc_loss=0.08356, over 19757.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2745, pruned_loss=0.05042, ctc_loss=0.09451, over 3715431.52 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:18:58,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=149498.66666666666, ans=0.1 +2024-08-26 21:19:01,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=149498.66666666666, ans=0.125 +2024-08-26 21:19:06,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=149552.0, ans=0.125 +2024-08-26 21:19:16,418 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.436e+02 1.583e+02 1.844e+02 2.674e+02, threshold=3.165e+02, percent-clipped=0.0 +2024-08-26 21:19:29,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149658.66666666666, ans=0.1 +2024-08-26 21:19:36,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=149712.0, ans=0.2 +2024-08-26 21:19:39,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.67 vs. limit=15.0 +2024-08-26 21:19:41,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=149712.0, ans=0.2 +2024-08-26 21:19:44,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_na.min_abs, batch_count=149765.33333333334, ans=0.02 +2024-08-26 21:19:45,029 INFO [train.py:1114] (0/4) Epoch 12, batch 700, loss[loss=0.1929, simple_loss=0.2629, pruned_loss=0.04494, ctc_loss=0.08278, over 19738.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.275, pruned_loss=0.05064, ctc_loss=0.09488, over 3747405.65 frames. ], batch size: 51, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:19:45,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=149765.33333333334, ans=0.025 +2024-08-26 21:19:54,844 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.23 vs. limit=15.0 +2024-08-26 21:20:00,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=149818.66666666666, ans=0.09899494936611666 +2024-08-26 21:20:12,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149925.33333333334, ans=0.1 +2024-08-26 21:20:26,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=149978.66666666666, ans=0.025 +2024-08-26 21:20:27,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=149978.66666666666, ans=0.025 +2024-08-26 21:20:31,162 INFO [train.py:1114] (0/4) Epoch 12, batch 750, loss[loss=0.2047, simple_loss=0.2756, pruned_loss=0.04936, ctc_loss=0.08774, over 19491.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2749, pruned_loss=0.05074, ctc_loss=0.09487, over 3774030.89 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:20:37,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=150032.0, ans=0.09899494936611666 +2024-08-26 21:20:43,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=150085.33333333334, ans=0.0 +2024-08-26 21:20:51,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=150138.66666666666, ans=0.0 +2024-08-26 21:20:51,913 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.231e+02 1.592e+02 1.843e+02 2.247e+02 3.979e+02, threshold=3.686e+02, percent-clipped=6.0 +2024-08-26 21:20:57,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=150138.66666666666, ans=0.125 +2024-08-26 21:21:02,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=150192.0, ans=0.125 +2024-08-26 21:21:22,385 INFO [train.py:1114] (0/4) Epoch 12, batch 800, loss[loss=0.2094, simple_loss=0.2722, pruned_loss=0.05367, ctc_loss=0.09821, over 19782.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2751, pruned_loss=0.05109, ctc_loss=0.0956, over 3796126.48 frames. ], batch size: 49, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:21:35,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=150352.0, ans=0.0 +2024-08-26 21:21:36,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=150352.0, ans=0.125 +2024-08-26 21:21:39,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150352.0, ans=0.1 +2024-08-26 21:21:48,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=150405.33333333334, ans=0.125 +2024-08-26 21:21:49,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=150405.33333333334, ans=0.125 +2024-08-26 21:22:12,971 INFO [train.py:1114] (0/4) Epoch 12, batch 850, loss[loss=0.2129, simple_loss=0.2811, pruned_loss=0.05203, ctc_loss=0.1015, over 19663.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2744, pruned_loss=0.05074, ctc_loss=0.09471, over 3814370.38 frames. ], batch size: 59, lr: 1.26e-02, grad_scale: 16.0 +2024-08-26 21:22:17,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=150565.33333333334, ans=0.025 +2024-08-26 21:22:21,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=150618.66666666666, ans=0.0 +2024-08-26 21:22:24,727 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.41 vs. limit=6.0 +2024-08-26 21:22:34,296 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.451e+02 1.599e+02 1.811e+02 2.698e+02, threshold=3.198e+02, percent-clipped=0.0 +2024-08-26 21:22:35,579 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.50 vs. limit=12.0 +2024-08-26 21:22:42,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=150725.33333333334, ans=0.0 +2024-08-26 21:22:46,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=150725.33333333334, ans=0.07 +2024-08-26 21:22:49,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.11 vs. limit=22.5 +2024-08-26 21:22:51,067 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:22:53,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=150778.66666666666, ans=0.0 +2024-08-26 21:22:53,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=150778.66666666666, ans=0.0 +2024-08-26 21:22:56,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=150778.66666666666, ans=0.035 +2024-08-26 21:23:00,255 INFO [train.py:1114] (0/4) Epoch 12, batch 900, loss[loss=0.1825, simple_loss=0.252, pruned_loss=0.04114, ctc_loss=0.07651, over 19826.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2744, pruned_loss=0.05079, ctc_loss=0.09509, over 3818421.65 frames. ], batch size: 49, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:23:07,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=150832.0, ans=0.125 +2024-08-26 21:23:15,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=150885.33333333334, ans=0.025 +2024-08-26 21:23:42,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.85 vs. limit=15.0 +2024-08-26 21:23:47,031 INFO [train.py:1114] (0/4) Epoch 12, batch 950, loss[loss=0.1815, simple_loss=0.251, pruned_loss=0.04075, ctc_loss=0.07609, over 19519.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2748, pruned_loss=0.05095, ctc_loss=0.09553, over 3821068.99 frames. ], batch size: 49, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:23:48,364 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.18 vs. limit=15.0 +2024-08-26 21:23:49,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=151098.66666666666, ans=0.0 +2024-08-26 21:24:08,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=151098.66666666666, ans=0.025 +2024-08-26 21:24:10,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=151152.0, ans=0.125 +2024-08-26 21:24:12,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=151152.0, ans=0.95 +2024-08-26 21:24:20,962 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.52 vs. limit=10.0 +2024-08-26 21:24:29,929 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.208e+02 1.446e+02 1.609e+02 1.941e+02 6.709e+02, threshold=3.217e+02, percent-clipped=2.0 +2024-08-26 21:24:30,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=151205.33333333334, ans=0.125 +2024-08-26 21:24:37,871 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.33 vs. limit=15.0 +2024-08-26 21:24:56,169 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.90 vs. limit=15.0 +2024-08-26 21:24:57,588 INFO [train.py:1114] (0/4) Epoch 12, batch 1000, loss[loss=0.1883, simple_loss=0.2617, pruned_loss=0.04127, ctc_loss=0.08092, over 19850.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2752, pruned_loss=0.05101, ctc_loss=0.09556, over 3816496.46 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:24:59,126 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.33 vs. limit=15.0 +2024-08-26 21:25:24,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=151418.66666666666, ans=0.125 +2024-08-26 21:25:34,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=151472.0, ans=0.125 +2024-08-26 21:25:59,418 INFO [train.py:1114] (0/4) Epoch 12, batch 1050, loss[loss=0.1921, simple_loss=0.2699, pruned_loss=0.04147, ctc_loss=0.07825, over 19841.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2742, pruned_loss=0.0505, ctc_loss=0.09446, over 3823111.14 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:26:05,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=151632.0, ans=0.125 +2024-08-26 21:26:11,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=151685.33333333334, ans=0.0 +2024-08-26 21:26:13,704 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.36 vs. limit=15.0 +2024-08-26 21:26:20,568 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.117e+02 1.384e+02 1.517e+02 1.769e+02 3.938e+02, threshold=3.034e+02, percent-clipped=1.0 +2024-08-26 21:26:21,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=151738.66666666666, ans=0.0 +2024-08-26 21:26:27,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=151792.0, ans=0.0 +2024-08-26 21:26:38,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=151845.33333333334, ans=0.0 +2024-08-26 21:26:43,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=151845.33333333334, ans=0.125 +2024-08-26 21:26:45,785 INFO [train.py:1114] (0/4) Epoch 12, batch 1100, loss[loss=0.1901, simple_loss=0.2593, pruned_loss=0.04412, ctc_loss=0.08146, over 19597.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2739, pruned_loss=0.05029, ctc_loss=0.09383, over 3830721.63 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:26:48,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=151898.66666666666, ans=0.0 +2024-08-26 21:26:57,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=151952.0, ans=0.2 +2024-08-26 21:27:04,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.41 vs. limit=15.0 +2024-08-26 21:27:05,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152005.33333333334, ans=0.1 +2024-08-26 21:27:17,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=152058.66666666666, ans=0.125 +2024-08-26 21:27:25,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.45 vs. limit=15.0 +2024-08-26 21:27:27,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=152112.0, ans=0.125 +2024-08-26 21:27:27,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=152112.0, ans=0.125 +2024-08-26 21:27:34,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=152112.0, ans=0.125 +2024-08-26 21:27:38,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=152112.0, ans=0.125 +2024-08-26 21:27:38,862 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.30 vs. limit=15.0 +2024-08-26 21:27:39,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=152112.0, ans=0.0 +2024-08-26 21:27:41,050 INFO [train.py:1114] (0/4) Epoch 12, batch 1150, loss[loss=0.1888, simple_loss=0.2637, pruned_loss=0.04146, ctc_loss=0.07759, over 19581.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.274, pruned_loss=0.05021, ctc_loss=0.09366, over 3829672.44 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:27:44,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=152165.33333333334, ans=0.125 +2024-08-26 21:27:45,197 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:28:02,687 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.513e+02 1.822e+02 2.260e+02 3.131e+02, threshold=3.643e+02, percent-clipped=1.0 +2024-08-26 21:28:02,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=152272.0, ans=0.125 +2024-08-26 21:28:03,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=152272.0, ans=10.0 +2024-08-26 21:28:07,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=152272.0, ans=0.2 +2024-08-26 21:28:10,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=152325.33333333334, ans=0.125 +2024-08-26 21:28:28,107 INFO [train.py:1114] (0/4) Epoch 12, batch 1200, loss[loss=0.2089, simple_loss=0.2854, pruned_loss=0.04787, ctc_loss=0.09184, over 19837.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2749, pruned_loss=0.05039, ctc_loss=0.09419, over 3826395.84 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 32.0 +2024-08-26 21:28:31,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=152432.0, ans=0.2 +2024-08-26 21:28:32,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=152432.0, ans=0.2 +2024-08-26 21:28:40,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=152485.33333333334, ans=0.125 +2024-08-26 21:28:58,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=152592.0, ans=0.1 +2024-08-26 21:29:15,262 INFO [train.py:1114] (0/4) Epoch 12, batch 1250, loss[loss=0.2253, simple_loss=0.294, pruned_loss=0.05759, ctc_loss=0.1033, over 19529.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2754, pruned_loss=0.05036, ctc_loss=0.09404, over 3844268.22 frames. ], batch size: 61, lr: 1.25e-02, grad_scale: 32.0 +2024-08-26 21:29:27,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152752.0, ans=0.1 +2024-08-26 21:29:36,830 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.211e+02 1.442e+02 1.596e+02 2.011e+02 3.434e+02, threshold=3.192e+02, percent-clipped=0.0 +2024-08-26 21:29:49,570 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.29 vs. limit=15.0 +2024-08-26 21:30:07,695 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.53 vs. limit=15.0 +2024-08-26 21:30:08,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=152912.0, ans=0.125 +2024-08-26 21:30:17,223 INFO [train.py:1114] (0/4) Epoch 12, batch 1300, loss[loss=0.22, simple_loss=0.2839, pruned_loss=0.05664, ctc_loss=0.1074, over 18869.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2747, pruned_loss=0.05025, ctc_loss=0.09399, over 3847329.36 frames. ], batch size: 76, lr: 1.25e-02, grad_scale: 32.0 +2024-08-26 21:30:44,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=153072.0, ans=0.125 +2024-08-26 21:31:03,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=153178.66666666666, ans=0.07 +2024-08-26 21:31:06,497 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:31:08,962 INFO [train.py:1114] (0/4) Epoch 12, batch 1350, loss[loss=0.2059, simple_loss=0.2814, pruned_loss=0.04698, ctc_loss=0.09098, over 19756.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2746, pruned_loss=0.05022, ctc_loss=0.09389, over 3858031.15 frames. ], batch size: 54, lr: 1.25e-02, grad_scale: 32.0 +2024-08-26 21:31:10,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=153232.0, ans=0.125 +2024-08-26 21:31:29,983 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.104e+02 1.467e+02 1.650e+02 2.044e+02 3.234e+02, threshold=3.299e+02, percent-clipped=1.0 +2024-08-26 21:31:34,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=153338.66666666666, ans=0.1 +2024-08-26 21:31:34,319 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.22 vs. limit=6.0 +2024-08-26 21:31:36,101 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:31:38,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=153392.0, ans=0.025 +2024-08-26 21:31:45,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=153445.33333333334, ans=0.0 +2024-08-26 21:31:55,315 INFO [train.py:1114] (0/4) Epoch 12, batch 1400, loss[loss=0.18, simple_loss=0.2466, pruned_loss=0.04147, ctc_loss=0.07632, over 19661.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2743, pruned_loss=0.05006, ctc_loss=0.0935, over 3864670.14 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:32:02,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=153498.66666666666, ans=0.125 +2024-08-26 21:32:03,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=153552.0, ans=0.09899494936611666 +2024-08-26 21:32:14,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=153605.33333333334, ans=0.1 +2024-08-26 21:32:44,531 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.37 vs. limit=10.0 +2024-08-26 21:32:47,925 INFO [train.py:1114] (0/4) Epoch 12, batch 1450, loss[loss=0.2282, simple_loss=0.2925, pruned_loss=0.06059, ctc_loss=0.107, over 19681.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2749, pruned_loss=0.05047, ctc_loss=0.09424, over 3862329.37 frames. ], batch size: 63, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:33:08,038 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.53 vs. limit=6.0 +2024-08-26 21:33:12,065 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.245e+02 1.443e+02 1.618e+02 1.909e+02 2.759e+02, threshold=3.236e+02, percent-clipped=0.0 +2024-08-26 21:33:16,990 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.76 vs. limit=22.5 +2024-08-26 21:33:33,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=153978.66666666666, ans=0.125 +2024-08-26 21:33:37,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=153978.66666666666, ans=0.125 +2024-08-26 21:33:42,291 INFO [train.py:1114] (0/4) Epoch 12, batch 1500, loss[loss=0.2115, simple_loss=0.2808, pruned_loss=0.05161, ctc_loss=0.09731, over 19579.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2755, pruned_loss=0.05085, ctc_loss=0.09494, over 3861668.37 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:33:55,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=154085.33333333334, ans=0.0 +2024-08-26 21:34:02,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=154138.66666666666, ans=0.125 +2024-08-26 21:34:03,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=154138.66666666666, ans=0.125 +2024-08-26 21:34:06,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=154138.66666666666, ans=0.125 +2024-08-26 21:34:06,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=154138.66666666666, ans=0.1 +2024-08-26 21:34:06,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=154138.66666666666, ans=0.125 +2024-08-26 21:34:17,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=154192.0, ans=0.125 +2024-08-26 21:34:29,573 INFO [train.py:1114] (0/4) Epoch 12, batch 1550, loss[loss=0.2235, simple_loss=0.2932, pruned_loss=0.05718, ctc_loss=0.09853, over 19607.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2757, pruned_loss=0.05112, ctc_loss=0.09546, over 3846894.07 frames. ], batch size: 60, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:34:42,470 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.34 vs. limit=12.0 +2024-08-26 21:34:51,386 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.237e+02 1.431e+02 1.666e+02 1.890e+02 5.087e+02, threshold=3.332e+02, percent-clipped=2.0 +2024-08-26 21:34:54,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=154405.33333333334, ans=0.0 +2024-08-26 21:34:56,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=154405.33333333334, ans=0.015 +2024-08-26 21:35:15,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=154512.0, ans=0.1 +2024-08-26 21:35:15,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=154512.0, ans=0.125 +2024-08-26 21:35:17,029 INFO [train.py:1114] (0/4) Epoch 12, batch 1600, loss[loss=0.2216, simple_loss=0.2915, pruned_loss=0.05523, ctc_loss=0.1029, over 19819.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2757, pruned_loss=0.05118, ctc_loss=0.09561, over 3835404.97 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:35:26,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=154618.66666666666, ans=10.0 +2024-08-26 21:35:38,144 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.01 vs. limit=22.5 +2024-08-26 21:35:39,153 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.67 vs. limit=15.0 +2024-08-26 21:35:49,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=154725.33333333334, ans=0.07 +2024-08-26 21:36:03,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=154778.66666666666, ans=0.0 +2024-08-26 21:36:08,914 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.93 vs. limit=22.5 +2024-08-26 21:36:11,255 INFO [train.py:1114] (0/4) Epoch 12, batch 1650, loss[loss=0.2153, simple_loss=0.2925, pruned_loss=0.05036, ctc_loss=0.09375, over 19658.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2757, pruned_loss=0.05128, ctc_loss=0.0958, over 3832233.24 frames. ], batch size: 59, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:36:21,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=154885.33333333334, ans=0.125 +2024-08-26 21:36:23,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=154885.33333333334, ans=0.125 +2024-08-26 21:36:34,771 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.198e+02 1.420e+02 1.592e+02 1.938e+02 3.625e+02, threshold=3.184e+02, percent-clipped=1.0 +2024-08-26 21:36:36,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=154938.66666666666, ans=0.125 +2024-08-26 21:36:45,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=154992.0, ans=0.125 +2024-08-26 21:36:51,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=155045.33333333334, ans=0.1 +2024-08-26 21:37:00,216 INFO [train.py:1114] (0/4) Epoch 12, batch 1700, loss[loss=0.1877, simple_loss=0.2435, pruned_loss=0.0489, ctc_loss=0.08536, over 19678.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2752, pruned_loss=0.05084, ctc_loss=0.09477, over 3846671.64 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:37:12,527 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.07 vs. limit=15.0 +2024-08-26 21:37:21,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=155205.33333333334, ans=0.1 +2024-08-26 21:37:41,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=155312.0, ans=0.125 +2024-08-26 21:37:44,941 INFO [train.py:1114] (0/4) Epoch 12, batch 1750, loss[loss=0.1904, simple_loss=0.2514, pruned_loss=0.04702, ctc_loss=0.08841, over 19690.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2743, pruned_loss=0.05019, ctc_loss=0.09385, over 3852665.87 frames. ], batch size: 45, lr: 1.24e-02, grad_scale: 16.0 +2024-08-26 21:38:06,161 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.409e+02 1.600e+02 1.878e+02 3.182e+02, threshold=3.201e+02, percent-clipped=0.0 +2024-08-26 21:38:07,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=155472.0, ans=0.125 +2024-08-26 21:38:09,221 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.13 vs. limit=10.0 +2024-08-26 21:38:21,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=155578.66666666666, ans=0.125 +2024-08-26 21:38:24,931 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.74 vs. limit=15.0 +2024-08-26 21:38:28,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=155632.0, ans=0.2 +2024-08-26 21:38:28,996 INFO [train.py:1114] (0/4) Epoch 12, batch 1800, loss[loss=0.2204, simple_loss=0.2924, pruned_loss=0.05418, ctc_loss=0.1003, over 19602.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2749, pruned_loss=0.05045, ctc_loss=0.09438, over 3853341.67 frames. ], batch size: 55, lr: 1.24e-02, grad_scale: 16.0 +2024-08-26 21:38:49,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=155738.66666666666, ans=0.125 +2024-08-26 21:38:57,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=155792.0, ans=0.2 +2024-08-26 21:38:59,876 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:39:03,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=155845.33333333334, ans=0.2 +2024-08-26 21:39:04,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=155845.33333333334, ans=0.125 +2024-08-26 21:39:12,706 INFO [train.py:1114] (0/4) Epoch 12, batch 1850, loss[loss=0.2151, simple_loss=0.2825, pruned_loss=0.05375, ctc_loss=0.1005, over 19579.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2741, pruned_loss=0.04998, ctc_loss=0.09337, over 3854769.03 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 16.0 +2024-08-26 21:39:34,658 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.542e+02 1.764e+02 2.176e+02 3.980e+02, threshold=3.528e+02, percent-clipped=3.0 +2024-08-26 21:39:36,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=156005.33333333334, ans=0.2 +2024-08-26 21:39:42,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=156058.66666666666, ans=0.2 +2024-08-26 21:39:48,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=156112.0, ans=0.0 +2024-08-26 21:39:56,684 INFO [train.py:1114] (0/4) Epoch 12, batch 1900, loss[loss=0.2052, simple_loss=0.279, pruned_loss=0.04719, ctc_loss=0.09233, over 19655.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2749, pruned_loss=0.05024, ctc_loss=0.09376, over 3859826.76 frames. ], batch size: 59, lr: 1.23e-02, grad_scale: 8.0 +2024-08-26 21:40:24,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=156325.33333333334, ans=0.125 +2024-08-26 21:40:27,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=156325.33333333334, ans=0.0 +2024-08-26 21:40:40,037 INFO [train.py:1114] (0/4) Epoch 12, batch 1950, loss[loss=0.1935, simple_loss=0.266, pruned_loss=0.04403, ctc_loss=0.08231, over 19587.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2758, pruned_loss=0.05021, ctc_loss=0.09371, over 3869081.57 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 8.0 +2024-08-26 21:40:40,472 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.17 vs. limit=15.0 +2024-08-26 21:40:47,660 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.23 vs. limit=15.0 +2024-08-26 21:40:49,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=156485.33333333334, ans=0.125 +2024-08-26 21:40:50,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156485.33333333334, ans=0.1 +2024-08-26 21:40:56,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=156538.66666666666, ans=0.125 +2024-08-26 21:41:01,728 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.258e+02 1.443e+02 1.619e+02 1.881e+02 3.638e+02, threshold=3.238e+02, percent-clipped=1.0 +2024-08-26 21:41:05,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=156592.0, ans=0.125 +2024-08-26 21:41:17,401 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=11.41 vs. limit=15.0 +2024-08-26 21:41:25,580 INFO [train.py:1114] (0/4) Epoch 12, batch 2000, loss[loss=0.1897, simple_loss=0.2452, pruned_loss=0.04993, ctc_loss=0.0859, over 19697.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2763, pruned_loss=0.05061, ctc_loss=0.09445, over 3855269.57 frames. ], batch size: 45, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:41:38,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=156752.0, ans=0.125 +2024-08-26 21:42:10,401 INFO [train.py:1114] (0/4) Epoch 12, batch 2050, loss[loss=0.1843, simple_loss=0.2466, pruned_loss=0.04442, ctc_loss=0.08275, over 19728.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2753, pruned_loss=0.05045, ctc_loss=0.09416, over 3851162.19 frames. ], batch size: 47, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:42:12,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.63 vs. limit=22.5 +2024-08-26 21:42:24,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=157018.66666666666, ans=0.2 +2024-08-26 21:42:33,045 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.456e+02 1.628e+02 1.934e+02 3.317e+02, threshold=3.256e+02, percent-clipped=1.0 +2024-08-26 21:42:47,520 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.15 vs. limit=6.0 +2024-08-26 21:42:51,131 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.18 vs. limit=15.0 +2024-08-26 21:42:51,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=157178.66666666666, ans=0.2 +2024-08-26 21:42:54,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=157178.66666666666, ans=0.0 +2024-08-26 21:42:54,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=157178.66666666666, ans=0.0 +2024-08-26 21:42:55,850 INFO [train.py:1114] (0/4) Epoch 12, batch 2100, loss[loss=0.2091, simple_loss=0.2783, pruned_loss=0.05115, ctc_loss=0.09406, over 19782.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2745, pruned_loss=0.04991, ctc_loss=0.09321, over 3859765.02 frames. ], batch size: 54, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:43:11,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=157285.33333333334, ans=0.125 +2024-08-26 21:43:22,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157392.0, ans=0.1 +2024-08-26 21:44:02,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.97 vs. limit=15.0 +2024-08-26 21:44:03,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=157445.33333333334, ans=0.0 +2024-08-26 21:44:06,214 INFO [train.py:1114] (0/4) Epoch 12, batch 2150, loss[loss=0.2013, simple_loss=0.268, pruned_loss=0.04918, ctc_loss=0.09067, over 19584.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.274, pruned_loss=0.04971, ctc_loss=0.09265, over 3870387.59 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:44:14,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157552.0, ans=0.1 +2024-08-26 21:44:21,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=157552.0, ans=0.125 +2024-08-26 21:44:24,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=157605.33333333334, ans=0.125 +2024-08-26 21:44:27,883 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.226e+02 1.483e+02 1.683e+02 2.213e+02 4.687e+02, threshold=3.365e+02, percent-clipped=1.0 +2024-08-26 21:44:50,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=157712.0, ans=0.125 +2024-08-26 21:45:26,353 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:45:26,962 INFO [train.py:1114] (0/4) Epoch 12, batch 2200, loss[loss=0.2148, simple_loss=0.2917, pruned_loss=0.04996, ctc_loss=0.09489, over 19582.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.274, pruned_loss=0.04976, ctc_loss=0.0927, over 3868343.65 frames. ], batch size: 57, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:45:49,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=157872.0, ans=0.0 +2024-08-26 21:46:00,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=157925.33333333334, ans=0.125 +2024-08-26 21:46:01,244 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.25 vs. limit=15.0 +2024-08-26 21:46:01,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=157978.66666666666, ans=0.125 +2024-08-26 21:46:08,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157978.66666666666, ans=0.1 +2024-08-26 21:46:10,351 INFO [train.py:1114] (0/4) Epoch 12, batch 2250, loss[loss=0.1934, simple_loss=0.2771, pruned_loss=0.03904, ctc_loss=0.07888, over 19636.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2738, pruned_loss=0.04964, ctc_loss=0.09267, over 3868134.48 frames. ], batch size: 55, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:46:15,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=158032.0, ans=0.025 +2024-08-26 21:46:31,822 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.542e+02 1.805e+02 2.126e+02 6.638e+02, threshold=3.611e+02, percent-clipped=1.0 +2024-08-26 21:46:41,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=158192.0, ans=0.0 +2024-08-26 21:46:53,558 INFO [train.py:1114] (0/4) Epoch 12, batch 2300, loss[loss=0.185, simple_loss=0.2562, pruned_loss=0.04055, ctc_loss=0.08169, over 19490.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2733, pruned_loss=0.04982, ctc_loss=0.09302, over 3862323.12 frames. ], batch size: 49, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:46:58,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=158298.66666666666, ans=0.025 +2024-08-26 21:47:09,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=158352.0, ans=0.125 +2024-08-26 21:47:15,541 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.68 vs. limit=22.5 +2024-08-26 21:47:25,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=158458.66666666666, ans=0.1 +2024-08-26 21:47:36,484 INFO [train.py:1114] (0/4) Epoch 12, batch 2350, loss[loss=0.226, simple_loss=0.2944, pruned_loss=0.05835, ctc_loss=0.1023, over 19671.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2735, pruned_loss=0.05002, ctc_loss=0.09347, over 3864821.42 frames. ], batch size: 63, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:47:44,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=158565.33333333334, ans=0.125 +2024-08-26 21:47:58,859 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.457e+02 1.679e+02 1.938e+02 3.188e+02, threshold=3.358e+02, percent-clipped=0.0 +2024-08-26 21:48:10,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158725.33333333334, ans=0.1 +2024-08-26 21:48:14,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=158778.66666666666, ans=0.2 +2024-08-26 21:48:20,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158832.0, ans=0.1 +2024-08-26 21:48:21,587 INFO [train.py:1114] (0/4) Epoch 12, batch 2400, loss[loss=0.2269, simple_loss=0.2922, pruned_loss=0.05828, ctc_loss=0.1125, over 19390.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2754, pruned_loss=0.05091, ctc_loss=0.09501, over 3859457.85 frames. ], batch size: 67, lr: 1.22e-02, grad_scale: 32.0 +2024-08-26 21:48:32,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=158832.0, ans=0.0 +2024-08-26 21:48:32,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=158885.33333333334, ans=0.025 +2024-08-26 21:49:25,115 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.10 vs. limit=22.5 +2024-08-26 21:49:28,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=158992.0, ans=0.0 +2024-08-26 21:49:29,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158992.0, ans=0.1 +2024-08-26 21:49:33,485 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.32 vs. limit=15.0 +2024-08-26 21:49:36,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=159045.33333333334, ans=0.125 +2024-08-26 21:49:42,231 INFO [train.py:1114] (0/4) Epoch 12, batch 2450, loss[loss=0.2766, simple_loss=0.3147, pruned_loss=0.08497, ctc_loss=0.1712, over 13431.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2794, pruned_loss=0.05382, ctc_loss=0.1005, over 3736242.85 frames. ], batch size: 141, lr: 1.22e-02, grad_scale: 32.0 +2024-08-26 21:49:43,681 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.56 vs. limit=22.5 +2024-08-26 21:49:52,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159152.0, ans=0.1 +2024-08-26 21:49:54,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=159152.0, ans=0.2 +2024-08-26 21:49:57,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=159152.0, ans=0.2 +2024-08-26 21:50:05,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=159205.33333333334, ans=0.0 +2024-08-26 21:50:05,689 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.273e+02 1.611e+02 1.857e+02 2.069e+02 3.042e+02, threshold=3.714e+02, percent-clipped=0.0 +2024-08-26 21:50:08,777 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=10.20 vs. limit=12.0 +2024-08-26 21:50:18,211 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-12.pt +2024-08-26 21:51:14,812 INFO [train.py:1114] (0/4) Epoch 13, batch 0, loss[loss=0.1753, simple_loss=0.2462, pruned_loss=0.0377, ctc_loss=0.07276, over 19800.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2462, pruned_loss=0.0377, ctc_loss=0.07276, over 19800.00 frames. ], batch size: 49, lr: 1.18e-02, grad_scale: 16.0 +2024-08-26 21:51:14,813 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-26 21:51:27,606 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.8999, 2.4765, 3.9194, 5.2323], device='cuda:0') +2024-08-26 21:51:28,894 INFO [train.py:1146] (0/4) Epoch 13, validation: loss=0.1795, simple_loss=0.2723, pruned_loss=0.03226, ctc_loss=0.05568, over 944034.00 frames. +2024-08-26 21:51:28,894 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13201MB +2024-08-26 21:51:30,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=159306.66666666666, ans=0.0 +2024-08-26 21:51:31,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=159306.66666666666, ans=0.0 +2024-08-26 21:51:33,056 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.74 vs. limit=15.0 +2024-08-26 21:51:41,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=159360.0, ans=0.0 +2024-08-26 21:51:50,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.59 vs. limit=15.0 +2024-08-26 21:52:03,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=159466.66666666666, ans=0.125 +2024-08-26 21:52:06,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=159466.66666666666, ans=0.125 +2024-08-26 21:52:18,719 INFO [train.py:1114] (0/4) Epoch 13, batch 50, loss[loss=0.1633, simple_loss=0.2369, pruned_loss=0.03251, ctc_loss=0.0618, over 19681.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2754, pruned_loss=0.05073, ctc_loss=0.09541, over 844325.49 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:52:19,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=159573.33333333334, ans=0.2 +2024-08-26 21:52:37,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159626.66666666666, ans=0.1 +2024-08-26 21:52:38,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=159626.66666666666, ans=0.125 +2024-08-26 21:52:42,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=159680.0, ans=0.0 +2024-08-26 21:52:50,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=159733.33333333334, ans=0.2 +2024-08-26 21:52:56,397 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.509e+02 1.748e+02 2.087e+02 2.763e+02, threshold=3.495e+02, percent-clipped=0.0 +2024-08-26 21:53:03,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=159786.66666666666, ans=0.125 +2024-08-26 21:53:07,757 INFO [train.py:1114] (0/4) Epoch 13, batch 100, loss[loss=0.1815, simple_loss=0.2515, pruned_loss=0.04101, ctc_loss=0.07357, over 19711.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2764, pruned_loss=0.05039, ctc_loss=0.09494, over 1498893.67 frames. ], batch size: 51, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:53:08,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=159840.0, ans=0.0 +2024-08-26 21:53:15,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=159840.0, ans=0.125 +2024-08-26 21:53:23,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=159893.33333333334, ans=0.0 +2024-08-26 21:53:27,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=159893.33333333334, ans=0.125 +2024-08-26 21:53:39,905 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.00 vs. limit=15.0 +2024-08-26 21:53:42,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160000.0, ans=0.1 +2024-08-26 21:53:46,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=160000.0, ans=0.125 +2024-08-26 21:54:19,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=160053.33333333334, ans=0.125 +2024-08-26 21:54:20,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=160053.33333333334, ans=0.125 +2024-08-26 21:54:20,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=160053.33333333334, ans=0.0 +2024-08-26 21:54:22,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=160106.66666666666, ans=0.125 +2024-08-26 21:54:23,454 INFO [train.py:1114] (0/4) Epoch 13, batch 150, loss[loss=0.1914, simple_loss=0.2582, pruned_loss=0.04659, ctc_loss=0.07863, over 19717.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2734, pruned_loss=0.04927, ctc_loss=0.09255, over 2027521.62 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:54:23,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160106.66666666666, ans=0.1 +2024-08-26 21:54:50,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=160213.33333333334, ans=0.125 +2024-08-26 21:54:59,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=160266.66666666666, ans=0.0 +2024-08-26 21:55:02,354 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.397e+02 1.535e+02 1.726e+02 2.735e+02, threshold=3.069e+02, percent-clipped=0.0 +2024-08-26 21:55:13,385 INFO [train.py:1114] (0/4) Epoch 13, batch 200, loss[loss=0.2409, simple_loss=0.3019, pruned_loss=0.06578, ctc_loss=0.1206, over 18324.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2727, pruned_loss=0.04898, ctc_loss=0.09186, over 2435886.34 frames. ], batch size: 85, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:55:15,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=160373.33333333334, ans=0.125 +2024-08-26 21:55:20,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=160373.33333333334, ans=0.125 +2024-08-26 21:55:26,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=160426.66666666666, ans=0.125 +2024-08-26 21:55:33,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=160480.0, ans=0.5 +2024-08-26 21:55:35,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=160480.0, ans=0.0 +2024-08-26 21:55:47,064 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.39 vs. limit=22.5 +2024-08-26 21:55:50,898 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.92 vs. limit=15.0 +2024-08-26 21:55:55,657 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.28 vs. limit=15.0 +2024-08-26 21:56:19,213 INFO [train.py:1114] (0/4) Epoch 13, batch 250, loss[loss=0.2023, simple_loss=0.2732, pruned_loss=0.04827, ctc_loss=0.08715, over 19389.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2722, pruned_loss=0.0488, ctc_loss=0.0913, over 2755928.06 frames. ], batch size: 67, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:56:31,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=160693.33333333334, ans=0.0 +2024-08-26 21:56:57,701 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.442e+02 1.721e+02 2.190e+02 3.294e+02, threshold=3.441e+02, percent-clipped=2.0 +2024-08-26 21:56:59,023 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.81 vs. limit=15.0 +2024-08-26 21:56:59,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=160853.33333333334, ans=0.05 +2024-08-26 21:57:07,799 INFO [train.py:1114] (0/4) Epoch 13, batch 300, loss[loss=0.2129, simple_loss=0.2883, pruned_loss=0.05069, ctc_loss=0.09013, over 19506.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2725, pruned_loss=0.04893, ctc_loss=0.09128, over 3000371.21 frames. ], batch size: 61, lr: 1.17e-02, grad_scale: 8.0 +2024-08-26 21:57:08,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=160906.66666666666, ans=0.95 +2024-08-26 21:57:13,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=160906.66666666666, ans=0.125 +2024-08-26 21:57:14,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=160906.66666666666, ans=0.07 +2024-08-26 21:57:25,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=161013.33333333334, ans=0.125 +2024-08-26 21:57:50,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=161120.0, ans=0.125 +2024-08-26 21:57:55,091 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.71 vs. limit=6.0 +2024-08-26 21:57:55,510 INFO [train.py:1114] (0/4) Epoch 13, batch 350, loss[loss=0.1883, simple_loss=0.2443, pruned_loss=0.04864, ctc_loss=0.08739, over 19781.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2728, pruned_loss=0.04916, ctc_loss=0.0916, over 3190650.97 frames. ], batch size: 48, lr: 1.17e-02, grad_scale: 8.0 +2024-08-26 21:58:10,833 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.68 vs. limit=15.0 +2024-08-26 21:58:11,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=161226.66666666666, ans=0.0 +2024-08-26 21:58:33,157 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.400e+02 1.583e+02 1.867e+02 2.908e+02, threshold=3.167e+02, percent-clipped=0.0 +2024-08-26 21:58:33,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=161386.66666666666, ans=10.0 +2024-08-26 21:58:37,342 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.60 vs. limit=15.0 +2024-08-26 21:58:39,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=161386.66666666666, ans=0.125 +2024-08-26 21:58:43,183 INFO [train.py:1114] (0/4) Epoch 13, batch 400, loss[loss=0.2133, simple_loss=0.2819, pruned_loss=0.05353, ctc_loss=0.09392, over 19512.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2723, pruned_loss=0.04893, ctc_loss=0.0912, over 3342951.10 frames. ], batch size: 54, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:58:55,949 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.68 vs. limit=15.0 +2024-08-26 21:58:58,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=161493.33333333334, ans=0.125 +2024-08-26 21:58:59,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=161493.33333333334, ans=0.95 +2024-08-26 21:59:01,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=161493.33333333334, ans=0.025 +2024-08-26 21:59:05,089 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.02 vs. limit=22.5 +2024-08-26 21:59:18,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=161600.0, ans=0.2 +2024-08-26 21:59:19,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=161600.0, ans=0.1 +2024-08-26 21:59:31,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=161706.66666666666, ans=0.0 +2024-08-26 21:59:32,232 INFO [train.py:1114] (0/4) Epoch 13, batch 450, loss[loss=0.1999, simple_loss=0.2813, pruned_loss=0.04285, ctc_loss=0.08182, over 19616.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2732, pruned_loss=0.04928, ctc_loss=0.09169, over 3451082.16 frames. ], batch size: 55, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:59:32,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=161706.66666666666, ans=0.0 +2024-08-26 21:59:51,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=161760.0, ans=0.125 +2024-08-26 21:59:52,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161813.33333333334, ans=0.1 +2024-08-26 21:59:54,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=161813.33333333334, ans=0.125 +2024-08-26 21:59:56,173 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.35 vs. limit=10.0 +2024-08-26 22:00:01,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161866.66666666666, ans=0.1 +2024-08-26 22:00:10,465 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.228e+02 1.449e+02 1.659e+02 1.894e+02 3.083e+02, threshold=3.319e+02, percent-clipped=0.0 +2024-08-26 22:00:20,539 INFO [train.py:1114] (0/4) Epoch 13, batch 500, loss[loss=0.218, simple_loss=0.2862, pruned_loss=0.05476, ctc_loss=0.1008, over 19664.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2723, pruned_loss=0.0489, ctc_loss=0.09107, over 3546593.79 frames. ], batch size: 63, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 22:00:32,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162026.66666666666, ans=0.1 +2024-08-26 22:00:38,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=162026.66666666666, ans=0.2 +2024-08-26 22:00:51,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=162133.33333333334, ans=0.2 +2024-08-26 22:01:03,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=162186.66666666666, ans=0.0 +2024-08-26 22:01:07,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=162186.66666666666, ans=0.125 +2024-08-26 22:01:07,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162186.66666666666, ans=0.1 +2024-08-26 22:01:10,495 INFO [train.py:1114] (0/4) Epoch 13, batch 550, loss[loss=0.2224, simple_loss=0.2922, pruned_loss=0.05588, ctc_loss=0.102, over 19314.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2725, pruned_loss=0.04911, ctc_loss=0.09153, over 3608138.28 frames. ], batch size: 71, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 22:01:17,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=162240.0, ans=0.125 +2024-08-26 22:01:32,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=162346.66666666666, ans=0.2 +2024-08-26 22:01:44,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=162400.0, ans=0.0 +2024-08-26 22:01:59,750 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.555e+02 1.782e+02 2.360e+02 4.088e+02, threshold=3.564e+02, percent-clipped=3.0 +2024-08-26 22:02:00,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=162453.33333333334, ans=0.0 +2024-08-26 22:02:10,198 INFO [train.py:1114] (0/4) Epoch 13, batch 600, loss[loss=0.2093, simple_loss=0.2878, pruned_loss=0.04718, ctc_loss=0.09094, over 19428.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2725, pruned_loss=0.04889, ctc_loss=0.09118, over 3665328.45 frames. ], batch size: 67, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:02:14,169 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:02:15,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=162506.66666666666, ans=0.025 +2024-08-26 22:02:16,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162506.66666666666, ans=0.1 +2024-08-26 22:02:42,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=162666.66666666666, ans=0.1 +2024-08-26 22:02:43,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162666.66666666666, ans=0.1 +2024-08-26 22:02:45,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=162666.66666666666, ans=0.125 +2024-08-26 22:02:48,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=162720.0, ans=0.125 +2024-08-26 22:02:52,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=162720.0, ans=0.125 +2024-08-26 22:02:57,644 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:02:58,290 INFO [train.py:1114] (0/4) Epoch 13, batch 650, loss[loss=0.1921, simple_loss=0.27, pruned_loss=0.04131, ctc_loss=0.07878, over 19748.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2719, pruned_loss=0.0485, ctc_loss=0.09041, over 3715452.02 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:03:01,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=162773.33333333334, ans=0.125 +2024-08-26 22:03:27,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=162826.66666666666, ans=0.125 +2024-08-26 22:03:47,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=162933.33333333334, ans=0.025 +2024-08-26 22:03:47,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=162933.33333333334, ans=0.5 +2024-08-26 22:03:57,399 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.208e+02 1.372e+02 1.512e+02 1.802e+02 3.637e+02, threshold=3.024e+02, percent-clipped=1.0 +2024-08-26 22:04:09,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=163040.0, ans=0.125 +2024-08-26 22:04:09,866 INFO [train.py:1114] (0/4) Epoch 13, batch 700, loss[loss=0.1803, simple_loss=0.2598, pruned_loss=0.03671, ctc_loss=0.0688, over 19719.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2723, pruned_loss=0.04851, ctc_loss=0.09051, over 3748105.30 frames. ], batch size: 51, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:04:42,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=163146.66666666666, ans=0.025 +2024-08-26 22:04:45,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=163200.0, ans=10.0 +2024-08-26 22:04:58,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=163253.33333333334, ans=0.125 +2024-08-26 22:05:04,307 INFO [train.py:1114] (0/4) Epoch 13, batch 750, loss[loss=0.2036, simple_loss=0.2851, pruned_loss=0.04326, ctc_loss=0.08909, over 19514.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2721, pruned_loss=0.04853, ctc_loss=0.09056, over 3775227.81 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:05:09,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=163306.66666666666, ans=0.125 +2024-08-26 22:05:12,155 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.98 vs. limit=12.0 +2024-08-26 22:05:15,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=163360.0, ans=0.125 +2024-08-26 22:05:23,117 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.07 vs. limit=15.0 +2024-08-26 22:05:33,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=163466.66666666666, ans=0.125 +2024-08-26 22:05:36,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=163466.66666666666, ans=0.0 +2024-08-26 22:05:41,870 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.233e+02 1.560e+02 1.959e+02 2.402e+02 3.823e+02, threshold=3.919e+02, percent-clipped=10.0 +2024-08-26 22:05:43,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=163520.0, ans=0.125 +2024-08-26 22:05:48,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=163520.0, ans=0.125 +2024-08-26 22:05:50,688 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:05:51,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=163520.0, ans=0.2 +2024-08-26 22:05:56,843 INFO [train.py:1114] (0/4) Epoch 13, batch 800, loss[loss=0.1906, simple_loss=0.2571, pruned_loss=0.04501, ctc_loss=0.08501, over 19405.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2718, pruned_loss=0.04847, ctc_loss=0.09058, over 3796428.28 frames. ], batch size: 48, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 22:05:59,027 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=1.87 vs. limit=15.0 +2024-08-26 22:06:33,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=163733.33333333334, ans=0.125 +2024-08-26 22:06:51,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=163786.66666666666, ans=0.0 +2024-08-26 22:06:54,790 INFO [train.py:1114] (0/4) Epoch 13, batch 850, loss[loss=0.2132, simple_loss=0.2856, pruned_loss=0.05134, ctc_loss=0.09528, over 19647.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2713, pruned_loss=0.04823, ctc_loss=0.08998, over 3815611.85 frames. ], batch size: 59, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:07:10,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=163893.33333333334, ans=0.0 +2024-08-26 22:07:21,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=163946.66666666666, ans=0.1 +2024-08-26 22:07:28,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=164000.0, ans=0.125 +2024-08-26 22:07:36,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=164000.0, ans=0.125 +2024-08-26 22:07:37,694 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.177e+02 1.442e+02 1.756e+02 2.038e+02 3.459e+02, threshold=3.512e+02, percent-clipped=0.0 +2024-08-26 22:07:50,315 INFO [train.py:1114] (0/4) Epoch 13, batch 900, loss[loss=0.1726, simple_loss=0.242, pruned_loss=0.03755, ctc_loss=0.07002, over 19817.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2717, pruned_loss=0.04846, ctc_loss=0.09008, over 3819191.02 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:07:57,994 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:07:59,105 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.25 vs. limit=15.0 +2024-08-26 22:08:14,153 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.60 vs. limit=6.0 +2024-08-26 22:08:20,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=164266.66666666666, ans=0.0 +2024-08-26 22:08:25,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164266.66666666666, ans=0.1 +2024-08-26 22:08:32,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=164320.0, ans=0.025 +2024-08-26 22:08:38,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=164320.0, ans=0.125 +2024-08-26 22:08:40,767 INFO [train.py:1114] (0/4) Epoch 13, batch 950, loss[loss=0.1848, simple_loss=0.2555, pruned_loss=0.04142, ctc_loss=0.07819, over 19491.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2724, pruned_loss=0.04897, ctc_loss=0.0912, over 3821463.59 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:08:50,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=164426.66666666666, ans=0.025 +2024-08-26 22:09:11,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=164533.33333333334, ans=0.125 +2024-08-26 22:09:12,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=164533.33333333334, ans=0.125 +2024-08-26 22:09:17,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=164533.33333333334, ans=0.2 +2024-08-26 22:09:20,502 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.559e+02 1.935e+02 2.172e+02 5.830e+02, threshold=3.869e+02, percent-clipped=1.0 +2024-08-26 22:09:29,545 INFO [train.py:1114] (0/4) Epoch 13, batch 1000, loss[loss=0.176, simple_loss=0.2477, pruned_loss=0.0378, ctc_loss=0.07172, over 19854.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2725, pruned_loss=0.04896, ctc_loss=0.09134, over 3817729.86 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:09:32,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=164640.0, ans=0.2 +2024-08-26 22:09:34,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=164640.0, ans=0.2 +2024-08-26 22:09:42,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=164693.33333333334, ans=0.1 +2024-08-26 22:09:44,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=164693.33333333334, ans=0.125 +2024-08-26 22:10:09,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=164853.33333333334, ans=0.0 +2024-08-26 22:10:19,110 INFO [train.py:1114] (0/4) Epoch 13, batch 1050, loss[loss=0.2017, simple_loss=0.2773, pruned_loss=0.04559, ctc_loss=0.08737, over 19837.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2718, pruned_loss=0.04878, ctc_loss=0.09107, over 3823736.33 frames. ], batch size: 57, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:10:20,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=164906.66666666666, ans=0.1 +2024-08-26 22:10:22,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=164906.66666666666, ans=0.125 +2024-08-26 22:10:39,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=165013.33333333334, ans=0.2 +2024-08-26 22:10:39,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=165013.33333333334, ans=0.1 +2024-08-26 22:10:40,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=165013.33333333334, ans=0.025 +2024-08-26 22:10:55,509 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.353e+02 1.566e+02 1.889e+02 2.686e+02, threshold=3.131e+02, percent-clipped=0.0 +2024-08-26 22:11:01,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=165120.0, ans=0.125 +2024-08-26 22:11:06,687 INFO [train.py:1114] (0/4) Epoch 13, batch 1100, loss[loss=0.1984, simple_loss=0.2721, pruned_loss=0.04468, ctc_loss=0.0883, over 19566.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2716, pruned_loss=0.04848, ctc_loss=0.09053, over 3830771.65 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:11:06,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=165173.33333333334, ans=0.0 +2024-08-26 22:11:22,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=165226.66666666666, ans=0.125 +2024-08-26 22:11:34,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=165333.33333333334, ans=0.5 +2024-08-26 22:11:49,344 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:11:55,417 INFO [train.py:1114] (0/4) Epoch 13, batch 1150, loss[loss=0.1933, simple_loss=0.2649, pruned_loss=0.04468, ctc_loss=0.08099, over 19597.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2714, pruned_loss=0.04852, ctc_loss=0.09053, over 3830498.68 frames. ], batch size: 52, lr: 1.15e-02, grad_scale: 16.0 +2024-08-26 22:11:55,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=165440.0, ans=0.125 +2024-08-26 22:11:56,912 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-08-26 22:12:27,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=165600.0, ans=0.1 +2024-08-26 22:12:34,675 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.454e+02 1.639e+02 1.902e+02 3.180e+02, threshold=3.277e+02, percent-clipped=1.0 +2024-08-26 22:12:37,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=165653.33333333334, ans=0.2 +2024-08-26 22:12:43,815 INFO [train.py:1114] (0/4) Epoch 13, batch 1200, loss[loss=0.2101, simple_loss=0.2808, pruned_loss=0.05105, ctc_loss=0.09334, over 19841.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2724, pruned_loss=0.04879, ctc_loss=0.09116, over 3825729.86 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:12:45,253 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.51 vs. limit=15.0 +2024-08-26 22:12:49,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=165706.66666666666, ans=0.125 +2024-08-26 22:12:53,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=165706.66666666666, ans=0.125 +2024-08-26 22:13:00,978 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.46 vs. limit=22.5 +2024-08-26 22:13:09,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=165813.33333333334, ans=0.125 +2024-08-26 22:13:32,332 INFO [train.py:1114] (0/4) Epoch 13, batch 1250, loss[loss=0.227, simple_loss=0.2885, pruned_loss=0.06124, ctc_loss=0.1078, over 19518.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2723, pruned_loss=0.04869, ctc_loss=0.09075, over 3843655.39 frames. ], batch size: 61, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:13:32,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=165973.33333333334, ans=0.125 +2024-08-26 22:13:34,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=165973.33333333334, ans=0.125 +2024-08-26 22:14:01,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166133.33333333334, ans=0.1 +2024-08-26 22:14:11,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166186.66666666666, ans=0.1 +2024-08-26 22:14:11,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=166186.66666666666, ans=0.125 +2024-08-26 22:14:11,807 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.418e+02 1.637e+02 2.002e+02 4.206e+02, threshold=3.274e+02, percent-clipped=1.0 +2024-08-26 22:14:23,437 INFO [train.py:1114] (0/4) Epoch 13, batch 1300, loss[loss=0.2251, simple_loss=0.2874, pruned_loss=0.05958, ctc_loss=0.1089, over 18789.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2715, pruned_loss=0.04831, ctc_loss=0.08991, over 3847040.83 frames. ], batch size: 76, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:14:40,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=166293.33333333334, ans=0.0 +2024-08-26 22:14:42,372 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.57 vs. limit=15.0 +2024-08-26 22:15:02,622 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.56 vs. limit=12.0 +2024-08-26 22:15:09,553 INFO [train.py:1114] (0/4) Epoch 13, batch 1350, loss[loss=0.1723, simple_loss=0.2553, pruned_loss=0.032, ctc_loss=0.06338, over 19774.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2708, pruned_loss=0.04777, ctc_loss=0.08902, over 3858652.72 frames. ], batch size: 54, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:15:22,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=166560.0, ans=0.05 +2024-08-26 22:15:31,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=166613.33333333334, ans=0.025 +2024-08-26 22:15:47,890 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.45 vs. limit=12.0 +2024-08-26 22:15:50,052 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.412e+02 1.605e+02 1.958e+02 2.747e+02, threshold=3.211e+02, percent-clipped=0.0 +2024-08-26 22:15:59,142 INFO [train.py:1114] (0/4) Epoch 13, batch 1400, loss[loss=0.1825, simple_loss=0.2412, pruned_loss=0.04485, ctc_loss=0.08533, over 19692.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2705, pruned_loss=0.04772, ctc_loss=0.08897, over 3864667.05 frames. ], batch size: 46, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:16:18,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=166880.0, ans=0.125 +2024-08-26 22:16:28,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166933.33333333334, ans=0.1 +2024-08-26 22:16:34,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=166933.33333333334, ans=0.125 +2024-08-26 22:16:47,825 INFO [train.py:1114] (0/4) Epoch 13, batch 1450, loss[loss=0.2049, simple_loss=0.2726, pruned_loss=0.05026, ctc_loss=0.09158, over 19679.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2714, pruned_loss=0.04816, ctc_loss=0.08964, over 3862526.47 frames. ], batch size: 63, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:16:59,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=167093.33333333334, ans=0.025 +2024-08-26 22:17:19,396 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.50 vs. limit=22.5 +2024-08-26 22:17:22,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=167200.0, ans=0.125 +2024-08-26 22:17:27,080 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.193e+02 1.434e+02 1.640e+02 1.966e+02 4.010e+02, threshold=3.281e+02, percent-clipped=1.0 +2024-08-26 22:17:34,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=167253.33333333334, ans=0.0 +2024-08-26 22:17:36,397 INFO [train.py:1114] (0/4) Epoch 13, batch 1500, loss[loss=0.214, simple_loss=0.2874, pruned_loss=0.05098, ctc_loss=0.09685, over 19580.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.272, pruned_loss=0.0482, ctc_loss=0.08968, over 3861682.56 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:18:09,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=167466.66666666666, ans=0.125 +2024-08-26 22:18:14,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=167520.0, ans=0.125 +2024-08-26 22:18:15,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=167520.0, ans=0.125 +2024-08-26 22:18:26,617 INFO [train.py:1114] (0/4) Epoch 13, batch 1550, loss[loss=0.2223, simple_loss=0.2884, pruned_loss=0.05708, ctc_loss=0.1051, over 19588.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2723, pruned_loss=0.04878, ctc_loss=0.09079, over 3846022.74 frames. ], batch size: 60, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:18:35,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=167626.66666666666, ans=0.2 +2024-08-26 22:18:42,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=167626.66666666666, ans=0.0 +2024-08-26 22:18:55,406 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.20 vs. limit=22.5 +2024-08-26 22:19:03,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=167786.66666666666, ans=0.04949747468305833 +2024-08-26 22:19:04,502 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.500e+02 1.731e+02 2.118e+02 3.338e+02, threshold=3.463e+02, percent-clipped=1.0 +2024-08-26 22:19:11,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=167786.66666666666, ans=0.0 +2024-08-26 22:19:12,912 INFO [train.py:1114] (0/4) Epoch 13, batch 1600, loss[loss=0.2135, simple_loss=0.2863, pruned_loss=0.0517, ctc_loss=0.09325, over 19856.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2723, pruned_loss=0.0488, ctc_loss=0.09078, over 3836438.34 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:19:30,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=167893.33333333334, ans=0.125 +2024-08-26 22:19:32,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=167946.66666666666, ans=0.125 +2024-08-26 22:19:33,710 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.59 vs. limit=22.5 +2024-08-26 22:19:59,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=167946.66666666666, ans=0.0 +2024-08-26 22:19:59,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=167946.66666666666, ans=0.0 +2024-08-26 22:20:08,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=168000.0, ans=0.0 +2024-08-26 22:20:13,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=168053.33333333334, ans=0.125 +2024-08-26 22:20:19,721 INFO [train.py:1114] (0/4) Epoch 13, batch 1650, loss[loss=0.1998, simple_loss=0.2745, pruned_loss=0.04403, ctc_loss=0.09277, over 19662.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.272, pruned_loss=0.04863, ctc_loss=0.09071, over 3833199.88 frames. ], batch size: 59, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:20:32,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=168160.0, ans=0.1 +2024-08-26 22:20:43,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=168213.33333333334, ans=0.125 +2024-08-26 22:20:46,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.77 vs. limit=6.0 +2024-08-26 22:20:57,545 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.124e+02 1.381e+02 1.542e+02 1.780e+02 2.683e+02, threshold=3.084e+02, percent-clipped=0.0 +2024-08-26 22:21:01,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=168320.0, ans=0.0 +2024-08-26 22:21:07,572 INFO [train.py:1114] (0/4) Epoch 13, batch 1700, loss[loss=0.1868, simple_loss=0.2492, pruned_loss=0.04408, ctc_loss=0.09073, over 19674.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2716, pruned_loss=0.04817, ctc_loss=0.08991, over 3847041.83 frames. ], batch size: 46, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:21:27,652 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.56 vs. limit=12.0 +2024-08-26 22:21:39,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=168533.33333333334, ans=0.125 +2024-08-26 22:21:48,821 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.86 vs. limit=12.0 +2024-08-26 22:21:52,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=168586.66666666666, ans=0.025 +2024-08-26 22:21:53,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=168640.0, ans=0.05 +2024-08-26 22:21:53,902 INFO [train.py:1114] (0/4) Epoch 13, batch 1750, loss[loss=0.1778, simple_loss=0.2449, pruned_loss=0.04031, ctc_loss=0.07492, over 19627.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2707, pruned_loss=0.0478, ctc_loss=0.08911, over 3852460.98 frames. ], batch size: 45, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:23:42,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=168800.0, ans=0.0 +2024-08-26 22:23:43,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=168800.0, ans=0.125 +2024-08-26 22:23:50,356 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.22 vs. limit=15.0 +2024-08-26 22:23:51,722 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.438e+02 1.563e+02 1.924e+02 3.851e+02, threshold=3.126e+02, percent-clipped=3.0 +2024-08-26 22:24:01,012 INFO [train.py:1114] (0/4) Epoch 13, batch 1800, loss[loss=0.2176, simple_loss=0.2944, pruned_loss=0.05169, ctc_loss=0.09358, over 19597.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2711, pruned_loss=0.04797, ctc_loss=0.08949, over 3854137.29 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 16.0 +2024-08-26 22:24:06,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=168906.66666666666, ans=0.025 +2024-08-26 22:24:09,271 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.65 vs. limit=15.0 +2024-08-26 22:24:25,702 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:24:25,823 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.85 vs. limit=15.0 +2024-08-26 22:24:44,742 INFO [train.py:1114] (0/4) Epoch 13, batch 1850, loss[loss=0.2041, simple_loss=0.2786, pruned_loss=0.04628, ctc_loss=0.09279, over 19579.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2705, pruned_loss=0.0478, ctc_loss=0.08906, over 3856853.35 frames. ], batch size: 57, lr: 1.14e-02, grad_scale: 16.0 +2024-08-26 22:24:49,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=169173.33333333334, ans=0.2 +2024-08-26 22:24:53,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=169226.66666666666, ans=0.125 +2024-08-26 22:24:58,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=169226.66666666666, ans=0.0 +2024-08-26 22:24:59,559 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.84 vs. limit=10.0 +2024-08-26 22:25:01,335 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.47 vs. limit=22.5 +2024-08-26 22:25:09,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=169280.0, ans=0.125 +2024-08-26 22:25:12,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=169333.33333333334, ans=0.125 +2024-08-26 22:25:21,909 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.546e+02 1.793e+02 2.323e+02 4.609e+02, threshold=3.586e+02, percent-clipped=7.0 +2024-08-26 22:25:29,826 INFO [train.py:1114] (0/4) Epoch 13, batch 1900, loss[loss=0.1988, simple_loss=0.2768, pruned_loss=0.04347, ctc_loss=0.08437, over 19653.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.271, pruned_loss=0.048, ctc_loss=0.0893, over 3861046.47 frames. ], batch size: 59, lr: 1.14e-02, grad_scale: 16.0 +2024-08-26 22:25:59,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=169600.0, ans=0.0 +2024-08-26 22:26:13,639 INFO [train.py:1114] (0/4) Epoch 13, batch 1950, loss[loss=0.1809, simple_loss=0.2527, pruned_loss=0.03978, ctc_loss=0.07372, over 19587.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2717, pruned_loss=0.04798, ctc_loss=0.08929, over 3870178.17 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 16.0 +2024-08-26 22:26:31,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=169813.33333333334, ans=0.1 +2024-08-26 22:26:31,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=169813.33333333334, ans=0.125 +2024-08-26 22:26:43,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=169866.66666666666, ans=0.1 +2024-08-26 22:26:52,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=169920.0, ans=0.1 +2024-08-26 22:26:53,524 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.527e+02 1.786e+02 2.093e+02 2.857e+02, threshold=3.573e+02, percent-clipped=0.0 +2024-08-26 22:26:58,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=169920.0, ans=0.1 +2024-08-26 22:27:00,498 INFO [train.py:1114] (0/4) Epoch 13, batch 2000, loss[loss=0.1829, simple_loss=0.2528, pruned_loss=0.04129, ctc_loss=0.07569, over 19613.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2727, pruned_loss=0.04861, ctc_loss=0.09044, over 3853235.11 frames. ], batch size: 45, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:27:15,476 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:27:35,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.07 vs. limit=22.5 +2024-08-26 22:27:41,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=170186.66666666666, ans=0.125 +2024-08-26 22:27:43,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=170240.0, ans=0.0 +2024-08-26 22:27:44,095 INFO [train.py:1114] (0/4) Epoch 13, batch 2050, loss[loss=0.1872, simple_loss=0.2591, pruned_loss=0.0422, ctc_loss=0.07719, over 19708.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2717, pruned_loss=0.04834, ctc_loss=0.08986, over 3851156.12 frames. ], batch size: 47, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:27:46,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=170240.0, ans=0.2 +2024-08-26 22:27:51,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=170240.0, ans=0.025 +2024-08-26 22:28:15,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170400.0, ans=0.1 +2024-08-26 22:28:20,405 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.436e+02 1.652e+02 1.928e+02 2.658e+02, threshold=3.303e+02, percent-clipped=0.0 +2024-08-26 22:28:27,531 INFO [train.py:1114] (0/4) Epoch 13, batch 2100, loss[loss=0.1946, simple_loss=0.2637, pruned_loss=0.04586, ctc_loss=0.08462, over 19762.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2711, pruned_loss=0.0476, ctc_loss=0.08872, over 3858379.74 frames. ], batch size: 54, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:28:42,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=170560.0, ans=0.0 +2024-08-26 22:28:45,298 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.99 vs. limit=15.0 +2024-08-26 22:28:53,131 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/checkpoint-32000.pt +2024-08-26 22:29:06,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=170666.66666666666, ans=0.0 +2024-08-26 22:29:17,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=170720.0, ans=0.125 +2024-08-26 22:29:17,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=170773.33333333334, ans=0.0 +2024-08-26 22:29:18,563 INFO [train.py:1114] (0/4) Epoch 13, batch 2150, loss[loss=0.1784, simple_loss=0.2568, pruned_loss=0.03659, ctc_loss=0.06699, over 19575.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2703, pruned_loss=0.04725, ctc_loss=0.08813, over 3869607.32 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:29:37,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=170880.0, ans=0.125 +2024-08-26 22:29:38,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170880.0, ans=0.1 +2024-08-26 22:29:39,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.36 vs. limit=22.5 +2024-08-26 22:29:39,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=170880.0, ans=0.2 +2024-08-26 22:29:44,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=170933.33333333334, ans=0.2 +2024-08-26 22:29:55,023 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.462e+02 1.698e+02 2.269e+02 4.218e+02, threshold=3.397e+02, percent-clipped=7.0 +2024-08-26 22:29:56,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=170986.66666666666, ans=0.0 +2024-08-26 22:30:02,085 INFO [train.py:1114] (0/4) Epoch 13, batch 2200, loss[loss=0.2214, simple_loss=0.296, pruned_loss=0.05366, ctc_loss=0.09863, over 19592.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2704, pruned_loss=0.04732, ctc_loss=0.08813, over 3867172.51 frames. ], batch size: 57, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:30:05,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=171040.0, ans=0.125 +2024-08-26 22:30:28,143 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.95 vs. limit=10.0 +2024-08-26 22:30:36,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=171253.33333333334, ans=0.5 +2024-08-26 22:30:43,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=171253.33333333334, ans=0.0 +2024-08-26 22:30:46,568 INFO [train.py:1114] (0/4) Epoch 13, batch 2250, loss[loss=0.2306, simple_loss=0.3013, pruned_loss=0.05876, ctc_loss=0.1057, over 19623.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2709, pruned_loss=0.04747, ctc_loss=0.0886, over 3867954.74 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:30:50,487 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=26.85 vs. limit=22.5 +2024-08-26 22:31:07,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171413.33333333334, ans=0.1 +2024-08-26 22:31:07,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=171413.33333333334, ans=0.0 +2024-08-26 22:31:22,557 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.444e+02 1.610e+02 1.869e+02 3.635e+02, threshold=3.220e+02, percent-clipped=1.0 +2024-08-26 22:31:27,047 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:31:29,429 INFO [train.py:1114] (0/4) Epoch 13, batch 2300, loss[loss=0.179, simple_loss=0.2497, pruned_loss=0.03925, ctc_loss=0.07428, over 19511.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2703, pruned_loss=0.04758, ctc_loss=0.08894, over 3862602.56 frames. ], batch size: 49, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 22:31:31,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=171573.33333333334, ans=0.125 +2024-08-26 22:31:32,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=171573.33333333334, ans=0.2 +2024-08-26 22:31:55,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=171733.33333333334, ans=0.2 +2024-08-26 22:31:58,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=171733.33333333334, ans=0.0 +2024-08-26 22:32:08,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=171786.66666666666, ans=0.04949747468305833 +2024-08-26 22:32:10,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=171786.66666666666, ans=0.125 +2024-08-26 22:32:13,415 INFO [train.py:1114] (0/4) Epoch 13, batch 2350, loss[loss=0.2145, simple_loss=0.2786, pruned_loss=0.05559, ctc_loss=0.0979, over 19693.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2705, pruned_loss=0.0477, ctc_loss=0.089, over 3865403.74 frames. ], batch size: 63, lr: 1.13e-02, grad_scale: 16.0 +2024-08-26 22:32:27,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=171893.33333333334, ans=0.2 +2024-08-26 22:32:50,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=171946.66666666666, ans=0.0 +2024-08-26 22:32:58,607 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.11 vs. limit=15.0 +2024-08-26 22:33:04,852 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.253e+02 1.652e+02 1.956e+02 2.487e+02 4.828e+02, threshold=3.913e+02, percent-clipped=4.0 +2024-08-26 22:33:10,708 INFO [train.py:1114] (0/4) Epoch 13, batch 2400, loss[loss=0.2309, simple_loss=0.3025, pruned_loss=0.0579, ctc_loss=0.1087, over 19379.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2725, pruned_loss=0.04844, ctc_loss=0.09039, over 3859690.14 frames. ], batch size: 67, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 22:33:10,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=172106.66666666666, ans=0.125 +2024-08-26 22:33:14,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=172106.66666666666, ans=0.125 +2024-08-26 22:33:21,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=172160.0, ans=0.2 +2024-08-26 22:33:34,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=172213.33333333334, ans=0.125 +2024-08-26 22:33:52,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=172320.0, ans=0.2 +2024-08-26 22:33:54,748 INFO [train.py:1114] (0/4) Epoch 13, batch 2450, loss[loss=0.2742, simple_loss=0.3102, pruned_loss=0.08615, ctc_loss=0.1648, over 12724.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2766, pruned_loss=0.05146, ctc_loss=0.0963, over 3732461.70 frames. ], batch size: 140, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 22:33:59,027 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.49 vs. limit=22.5 +2024-08-26 22:34:10,675 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.52 vs. limit=15.0 +2024-08-26 22:34:13,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=172480.0, ans=0.0 +2024-08-26 22:34:29,339 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-13.pt +2024-08-26 22:38:18,460 INFO [train.py:1114] (0/4) Epoch 14, batch 0, loss[loss=0.1888, simple_loss=0.2606, pruned_loss=0.04301, ctc_loss=0.07734, over 19413.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2606, pruned_loss=0.04301, ctc_loss=0.07734, over 19413.00 frames. ], batch size: 48, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:38:18,461 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-26 22:38:36,019 INFO [train.py:1146] (0/4) Epoch 14, validation: loss=0.1777, simple_loss=0.2705, pruned_loss=0.03149, ctc_loss=0.05468, over 944034.00 frames. +2024-08-26 22:38:36,020 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13201MB +2024-08-26 22:38:39,678 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.631e+02 1.782e+02 1.968e+02 3.125e+02, threshold=3.565e+02, percent-clipped=0.0 +2024-08-26 22:38:44,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=172634.66666666666, ans=0.0 +2024-08-26 22:39:04,648 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.80 vs. limit=22.5 +2024-08-26 22:39:09,126 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.11 vs. limit=15.0 +2024-08-26 22:39:21,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=172794.66666666666, ans=0.1 +2024-08-26 22:40:03,206 INFO [train.py:1114] (0/4) Epoch 14, batch 50, loss[loss=0.1904, simple_loss=0.2575, pruned_loss=0.04475, ctc_loss=0.08443, over 19713.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2734, pruned_loss=0.04821, ctc_loss=0.09071, over 844800.35 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:40:08,279 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.18 vs. limit=22.5 +2024-08-26 22:40:16,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=172848.0, ans=0.1 +2024-08-26 22:41:56,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=172901.33333333334, ans=0.025 +2024-08-26 22:42:01,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.79 vs. limit=15.0 +2024-08-26 22:42:41,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=172901.33333333334, ans=0.125 +2024-08-26 22:42:42,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=172901.33333333334, ans=0.125 +2024-08-26 22:42:42,201 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.28 vs. limit=15.0 +2024-08-26 22:42:50,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=172954.66666666666, ans=0.025 +2024-08-26 22:43:09,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173008.0, ans=0.1 +2024-08-26 22:43:12,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=173061.33333333334, ans=0.2 +2024-08-26 22:43:20,288 INFO [train.py:1114] (0/4) Epoch 14, batch 100, loss[loss=0.1827, simple_loss=0.2603, pruned_loss=0.03777, ctc_loss=0.07377, over 19723.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2744, pruned_loss=0.04836, ctc_loss=0.09053, over 1499143.56 frames. ], batch size: 51, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:43:23,801 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.174e+02 1.427e+02 1.577e+02 1.836e+02 2.542e+02, threshold=3.153e+02, percent-clipped=0.0 +2024-08-26 22:43:32,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=173168.0, ans=0.125 +2024-08-26 22:44:03,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173328.0, ans=0.1 +2024-08-26 22:44:10,505 INFO [train.py:1114] (0/4) Epoch 14, batch 150, loss[loss=0.1749, simple_loss=0.2436, pruned_loss=0.03821, ctc_loss=0.07451, over 19719.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2723, pruned_loss=0.04759, ctc_loss=0.08894, over 2027452.09 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:44:11,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=173381.33333333334, ans=0.125 +2024-08-26 22:44:11,981 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.97 vs. limit=15.0 +2024-08-26 22:44:12,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=173381.33333333334, ans=0.0 +2024-08-26 22:44:17,313 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.42 vs. limit=6.0 +2024-08-26 22:44:25,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=173434.66666666666, ans=0.025 +2024-08-26 22:44:29,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=173434.66666666666, ans=0.0 +2024-08-26 22:45:00,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=173594.66666666666, ans=0.09899494936611666 +2024-08-26 22:45:10,942 INFO [train.py:1114] (0/4) Epoch 14, batch 200, loss[loss=0.2346, simple_loss=0.2988, pruned_loss=0.06165, ctc_loss=0.1178, over 18353.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2698, pruned_loss=0.04694, ctc_loss=0.08769, over 2435099.22 frames. ], batch size: 85, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:45:14,585 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.393e+02 1.624e+02 1.885e+02 3.247e+02, threshold=3.247e+02, percent-clipped=1.0 +2024-08-26 22:45:17,521 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.12 vs. limit=15.0 +2024-08-26 22:45:18,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=173648.0, ans=0.125 +2024-08-26 22:45:23,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=173701.33333333334, ans=0.0 +2024-08-26 22:45:24,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=173701.33333333334, ans=0.125 +2024-08-26 22:45:48,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=173808.0, ans=10.0 +2024-08-26 22:46:04,220 INFO [train.py:1114] (0/4) Epoch 14, batch 250, loss[loss=0.1994, simple_loss=0.272, pruned_loss=0.04652, ctc_loss=0.08452, over 19394.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2697, pruned_loss=0.04673, ctc_loss=0.0873, over 2755806.45 frames. ], batch size: 67, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:46:04,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=173914.66666666666, ans=0.125 +2024-08-26 22:46:11,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=173914.66666666666, ans=0.025 +2024-08-26 22:46:12,207 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.71 vs. limit=10.0 +2024-08-26 22:46:25,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=174021.33333333334, ans=0.125 +2024-08-26 22:46:25,871 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.33 vs. limit=15.0 +2024-08-26 22:46:32,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=174074.66666666666, ans=0.125 +2024-08-26 22:46:34,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=174074.66666666666, ans=0.125 +2024-08-26 22:46:42,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=174074.66666666666, ans=0.025 +2024-08-26 22:46:46,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=174128.0, ans=0.04949747468305833 +2024-08-26 22:46:51,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=174128.0, ans=0.0 +2024-08-26 22:46:51,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=174128.0, ans=0.125 +2024-08-26 22:46:52,742 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.17 vs. limit=22.5 +2024-08-26 22:46:54,992 INFO [train.py:1114] (0/4) Epoch 14, batch 300, loss[loss=0.2, simple_loss=0.2709, pruned_loss=0.04745, ctc_loss=0.08583, over 19521.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2691, pruned_loss=0.04652, ctc_loss=0.08681, over 3001481.83 frames. ], batch size: 61, lr: 1.09e-02, grad_scale: 16.0 +2024-08-26 22:46:56,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=174181.33333333334, ans=0.125 +2024-08-26 22:46:58,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=174181.33333333334, ans=0.0 +2024-08-26 22:46:59,573 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.470e+02 1.728e+02 2.225e+02 3.956e+02, threshold=3.457e+02, percent-clipped=2.0 +2024-08-26 22:47:09,592 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.76 vs. limit=22.5 +2024-08-26 22:47:15,205 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.57 vs. limit=15.0 +2024-08-26 22:47:19,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=174288.0, ans=0.0 +2024-08-26 22:47:29,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=174341.33333333334, ans=0.0 +2024-08-26 22:47:35,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=174394.66666666666, ans=0.125 +2024-08-26 22:47:38,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=174394.66666666666, ans=0.125 +2024-08-26 22:47:43,369 INFO [train.py:1114] (0/4) Epoch 14, batch 350, loss[loss=0.1752, simple_loss=0.2409, pruned_loss=0.04022, ctc_loss=0.07276, over 19759.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2689, pruned_loss=0.04636, ctc_loss=0.08643, over 3191850.72 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 16.0 +2024-08-26 22:47:48,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=174448.0, ans=0.025 +2024-08-26 22:47:51,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=174448.0, ans=0.125 +2024-08-26 22:48:21,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=174554.66666666666, ans=0.1 +2024-08-26 22:48:26,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=174554.66666666666, ans=0.2 +2024-08-26 22:48:32,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=174608.0, ans=0.2 +2024-08-26 22:48:37,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174608.0, ans=0.1 +2024-08-26 22:48:48,264 INFO [train.py:1114] (0/4) Epoch 14, batch 400, loss[loss=0.2109, simple_loss=0.2821, pruned_loss=0.05049, ctc_loss=0.09677, over 19496.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2687, pruned_loss=0.04643, ctc_loss=0.0866, over 3342545.93 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:48:52,771 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.491e+02 1.630e+02 1.842e+02 3.705e+02, threshold=3.261e+02, percent-clipped=1.0 +2024-08-26 22:49:10,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=174821.33333333334, ans=0.025 +2024-08-26 22:49:11,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=174821.33333333334, ans=0.125 +2024-08-26 22:49:39,154 INFO [train.py:1114] (0/4) Epoch 14, batch 450, loss[loss=0.1974, simple_loss=0.2767, pruned_loss=0.0429, ctc_loss=0.08052, over 19609.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2692, pruned_loss=0.0465, ctc_loss=0.08682, over 3450915.96 frames. ], batch size: 55, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:49:40,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff3.min_abs, batch_count=174981.33333333334, ans=0.2 +2024-08-26 22:49:54,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=175034.66666666666, ans=0.125 +2024-08-26 22:50:17,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=175141.33333333334, ans=0.125 +2024-08-26 22:50:20,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175194.66666666666, ans=0.1 +2024-08-26 22:50:22,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=175194.66666666666, ans=0.07 +2024-08-26 22:50:23,613 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:50:27,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=175248.0, ans=0.0 +2024-08-26 22:50:27,954 INFO [train.py:1114] (0/4) Epoch 14, batch 500, loss[loss=0.2251, simple_loss=0.3023, pruned_loss=0.05404, ctc_loss=0.09978, over 19681.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2685, pruned_loss=0.0462, ctc_loss=0.08633, over 3546652.26 frames. ], batch size: 63, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:50:32,529 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.438e+02 1.690e+02 1.988e+02 3.244e+02, threshold=3.379e+02, percent-clipped=0.0 +2024-08-26 22:50:55,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=175408.0, ans=0.0 +2024-08-26 22:51:03,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=175408.0, ans=0.125 +2024-08-26 22:51:03,699 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.85 vs. limit=15.0 +2024-08-26 22:51:08,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=175461.33333333334, ans=0.0 +2024-08-26 22:51:08,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=175461.33333333334, ans=0.025 +2024-08-26 22:51:11,983 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.29 vs. limit=12.0 +2024-08-26 22:51:15,968 INFO [train.py:1114] (0/4) Epoch 14, batch 550, loss[loss=0.2151, simple_loss=0.2882, pruned_loss=0.05226, ctc_loss=0.09378, over 19273.00 frames. ], tot_loss[loss=0.198, simple_loss=0.269, pruned_loss=0.04624, ctc_loss=0.08655, over 3608532.80 frames. ], batch size: 71, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:51:26,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.whiten.whitening_limit, batch_count=175568.0, ans=12.0 +2024-08-26 22:51:27,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=175568.0, ans=0.0 +2024-08-26 22:51:56,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=175728.0, ans=10.0 +2024-08-26 22:52:05,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=175728.0, ans=0.125 +2024-08-26 22:52:15,480 INFO [train.py:1114] (0/4) Epoch 14, batch 600, loss[loss=0.2013, simple_loss=0.2832, pruned_loss=0.0435, ctc_loss=0.08111, over 19370.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2694, pruned_loss=0.04623, ctc_loss=0.08666, over 3666425.54 frames. ], batch size: 67, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:52:16,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=175781.33333333334, ans=0.125 +2024-08-26 22:52:20,029 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.434e+02 1.658e+02 1.951e+02 2.764e+02, threshold=3.317e+02, percent-clipped=0.0 +2024-08-26 22:52:23,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=175781.33333333334, ans=0.04949747468305833 +2024-08-26 22:52:25,880 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.14 vs. limit=10.0 +2024-08-26 22:52:28,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=175834.66666666666, ans=0.1 +2024-08-26 22:53:17,362 INFO [train.py:1114] (0/4) Epoch 14, batch 650, loss[loss=0.2037, simple_loss=0.2803, pruned_loss=0.04707, ctc_loss=0.08235, over 19762.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.269, pruned_loss=0.046, ctc_loss=0.08629, over 3716649.43 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:53:20,724 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.92 vs. limit=15.0 +2024-08-26 22:53:27,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.88 vs. limit=15.0 +2024-08-26 22:53:34,058 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.70 vs. limit=15.0 +2024-08-26 22:53:47,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=176208.0, ans=0.125 +2024-08-26 22:54:20,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=176261.33333333334, ans=0.0 +2024-08-26 22:54:24,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=176261.33333333334, ans=0.0 +2024-08-26 22:54:29,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=176261.33333333334, ans=0.025 +2024-08-26 22:54:41,331 INFO [train.py:1114] (0/4) Epoch 14, batch 700, loss[loss=0.1876, simple_loss=0.2601, pruned_loss=0.04183, ctc_loss=0.07887, over 19729.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2696, pruned_loss=0.0463, ctc_loss=0.08681, over 3748596.56 frames. ], batch size: 51, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:54:41,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=176314.66666666666, ans=0.125 +2024-08-26 22:54:43,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=176314.66666666666, ans=15.0 +2024-08-26 22:54:52,781 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.446e+02 1.597e+02 2.123e+02 3.826e+02, threshold=3.195e+02, percent-clipped=1.0 +2024-08-26 22:55:10,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=176368.0, ans=0.125 +2024-08-26 22:55:18,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=176368.0, ans=0.2 +2024-08-26 22:55:23,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=176421.33333333334, ans=0.125 +2024-08-26 22:55:30,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176421.33333333334, ans=0.1 +2024-08-26 22:57:53,113 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.18 vs. limit=10.0 +2024-08-26 22:59:23,425 INFO [train.py:1114] (0/4) Epoch 14, batch 750, loss[loss=0.19, simple_loss=0.2664, pruned_loss=0.04161, ctc_loss=0.07573, over 19505.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.269, pruned_loss=0.0461, ctc_loss=0.08641, over 3775051.89 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 16.0 +2024-08-26 23:00:10,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=176581.33333333334, ans=0.2 +2024-08-26 23:00:10,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=176581.33333333334, ans=0.125 +2024-08-26 23:00:36,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=176634.66666666666, ans=0.125 +2024-08-26 23:00:42,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=176634.66666666666, ans=0.125 +2024-08-26 23:01:09,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=176688.0, ans=0.125 +2024-08-26 23:01:14,076 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.61 vs. limit=15.0 +2024-08-26 23:01:23,460 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.22 vs. limit=22.5 +2024-08-26 23:01:43,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=176794.66666666666, ans=0.125 +2024-08-26 23:01:45,017 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.83 vs. limit=15.0 +2024-08-26 23:01:58,980 INFO [train.py:1114] (0/4) Epoch 14, batch 800, loss[loss=0.1796, simple_loss=0.25, pruned_loss=0.03955, ctc_loss=0.07543, over 19409.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2689, pruned_loss=0.04633, ctc_loss=0.08658, over 3795596.12 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 23:02:00,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=176848.0, ans=0.125 +2024-08-26 23:02:11,891 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.199e+02 1.464e+02 1.718e+02 2.120e+02 3.590e+02, threshold=3.437e+02, percent-clipped=3.0 +2024-08-26 23:05:15,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=177008.0, ans=0.125 +2024-08-26 23:05:52,830 INFO [train.py:1114] (0/4) Epoch 14, batch 850, loss[loss=0.2166, simple_loss=0.2915, pruned_loss=0.05118, ctc_loss=0.09808, over 19642.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2685, pruned_loss=0.04622, ctc_loss=0.08627, over 3814865.22 frames. ], batch size: 59, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 23:05:54,399 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=9.93 vs. limit=22.5 +2024-08-26 23:05:54,431 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.26 vs. limit=22.5 +2024-08-26 23:05:58,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=177114.66666666666, ans=0.0 +2024-08-26 23:06:22,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=177274.66666666666, ans=0.025 +2024-08-26 23:06:44,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177328.0, ans=0.1 +2024-08-26 23:06:46,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177381.33333333334, ans=0.1 +2024-08-26 23:06:46,761 INFO [train.py:1114] (0/4) Epoch 14, batch 900, loss[loss=0.1824, simple_loss=0.2537, pruned_loss=0.04053, ctc_loss=0.07493, over 19784.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.269, pruned_loss=0.04654, ctc_loss=0.08687, over 3818440.25 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 23:06:52,135 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.084e+02 1.429e+02 1.657e+02 1.986e+02 3.410e+02, threshold=3.315e+02, percent-clipped=0.0 +2024-08-26 23:06:59,972 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.40 vs. limit=15.0 +2024-08-26 23:07:19,036 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.18 vs. limit=15.0 +2024-08-26 23:07:19,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=177541.33333333334, ans=6.0 +2024-08-26 23:07:26,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=177594.66666666666, ans=0.125 +2024-08-26 23:07:30,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177594.66666666666, ans=0.1 +2024-08-26 23:07:36,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=177594.66666666666, ans=0.2 +2024-08-26 23:07:38,548 INFO [train.py:1114] (0/4) Epoch 14, batch 950, loss[loss=0.1773, simple_loss=0.2506, pruned_loss=0.03777, ctc_loss=0.071, over 19503.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2692, pruned_loss=0.04662, ctc_loss=0.08697, over 3820636.09 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 23:07:48,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=177701.33333333334, ans=0.0 +2024-08-26 23:07:51,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=177701.33333333334, ans=0.0 +2024-08-26 23:08:00,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177754.66666666666, ans=0.1 +2024-08-26 23:08:08,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=177808.0, ans=0.0 +2024-08-26 23:08:08,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=177808.0, ans=0.0 +2024-08-26 23:08:10,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=177808.0, ans=0.125 +2024-08-26 23:08:12,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=177808.0, ans=0.125 +2024-08-26 23:08:18,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=177861.33333333334, ans=0.125 +2024-08-26 23:08:28,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=177914.66666666666, ans=0.1 +2024-08-26 23:08:35,560 INFO [train.py:1114] (0/4) Epoch 14, batch 1000, loss[loss=0.1804, simple_loss=0.2494, pruned_loss=0.04031, ctc_loss=0.07686, over 19854.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2698, pruned_loss=0.04698, ctc_loss=0.08778, over 3816509.39 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:08:41,161 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.435e+02 1.639e+02 1.944e+02 3.185e+02, threshold=3.279e+02, percent-clipped=0.0 +2024-08-26 23:09:07,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=178021.33333333334, ans=0.125 +2024-08-26 23:09:10,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=178074.66666666666, ans=0.2 +2024-08-26 23:09:18,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=178128.0, ans=0.0 +2024-08-26 23:09:20,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=178128.0, ans=0.09899494936611666 +2024-08-26 23:09:20,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=178128.0, ans=0.0 +2024-08-26 23:09:29,120 INFO [train.py:1114] (0/4) Epoch 14, batch 1050, loss[loss=0.199, simple_loss=0.2743, pruned_loss=0.04511, ctc_loss=0.0839, over 19845.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2691, pruned_loss=0.0468, ctc_loss=0.08753, over 3823872.80 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:09:41,061 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.86 vs. limit=22.5 +2024-08-26 23:09:49,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=178288.0, ans=0.2 +2024-08-26 23:09:54,877 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.58 vs. limit=15.0 +2024-08-26 23:10:09,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=178341.33333333334, ans=0.125 +2024-08-26 23:10:11,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=178341.33333333334, ans=0.125 +2024-08-26 23:10:15,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178394.66666666666, ans=0.1 +2024-08-26 23:10:15,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=178394.66666666666, ans=0.125 +2024-08-26 23:10:20,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=178394.66666666666, ans=0.2 +2024-08-26 23:10:47,588 INFO [train.py:1114] (0/4) Epoch 14, batch 1100, loss[loss=0.1744, simple_loss=0.2511, pruned_loss=0.03523, ctc_loss=0.06831, over 19612.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2685, pruned_loss=0.0464, ctc_loss=0.08688, over 3831458.89 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:10:53,001 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.155e+02 1.389e+02 1.598e+02 1.774e+02 3.032e+02, threshold=3.197e+02, percent-clipped=0.0 +2024-08-26 23:11:08,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=178554.66666666666, ans=0.0 +2024-08-26 23:11:32,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=178661.33333333334, ans=0.0 +2024-08-26 23:11:37,944 INFO [train.py:1114] (0/4) Epoch 14, batch 1150, loss[loss=0.1779, simple_loss=0.255, pruned_loss=0.03585, ctc_loss=0.07306, over 19593.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2685, pruned_loss=0.04657, ctc_loss=0.08712, over 3828830.16 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:11:47,211 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.56 vs. limit=15.0 +2024-08-26 23:11:47,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=178768.0, ans=0.0 +2024-08-26 23:11:58,558 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.94 vs. limit=22.5 +2024-08-26 23:11:59,532 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.95 vs. limit=15.0 +2024-08-26 23:12:03,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=178821.33333333334, ans=0.0 +2024-08-26 23:12:04,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=178821.33333333334, ans=0.125 +2024-08-26 23:12:31,155 INFO [train.py:1114] (0/4) Epoch 14, batch 1200, loss[loss=0.2212, simple_loss=0.29, pruned_loss=0.05521, ctc_loss=0.105, over 19839.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2696, pruned_loss=0.04664, ctc_loss=0.08752, over 3825820.21 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:12:36,809 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.458e+02 1.687e+02 2.139e+02 4.936e+02, threshold=3.375e+02, percent-clipped=2.0 +2024-08-26 23:12:41,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=179034.66666666666, ans=0.0 +2024-08-26 23:12:46,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=179034.66666666666, ans=0.125 +2024-08-26 23:13:06,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=179141.33333333334, ans=0.1 +2024-08-26 23:13:20,040 INFO [train.py:1114] (0/4) Epoch 14, batch 1250, loss[loss=0.2145, simple_loss=0.2854, pruned_loss=0.05228, ctc_loss=0.0975, over 19531.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2698, pruned_loss=0.04649, ctc_loss=0.08711, over 3843334.00 frames. ], batch size: 61, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:13:22,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=179248.0, ans=10.0 +2024-08-26 23:13:31,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179301.33333333334, ans=0.1 +2024-08-26 23:13:54,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=179408.0, ans=0.015 +2024-08-26 23:13:56,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=179408.0, ans=0.04949747468305833 +2024-08-26 23:14:01,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=179461.33333333334, ans=0.125 +2024-08-26 23:14:04,764 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.95 vs. limit=12.0 +2024-08-26 23:14:06,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=179461.33333333334, ans=0.0 +2024-08-26 23:14:12,513 INFO [train.py:1114] (0/4) Epoch 14, batch 1300, loss[loss=0.2306, simple_loss=0.2948, pruned_loss=0.06118, ctc_loss=0.1101, over 18833.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.269, pruned_loss=0.04617, ctc_loss=0.08628, over 3847330.15 frames. ], batch size: 76, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:14:13,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=179514.66666666666, ans=0.0 +2024-08-26 23:14:16,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=179514.66666666666, ans=10.0 +2024-08-26 23:14:19,144 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.402e+02 1.628e+02 1.914e+02 2.926e+02, threshold=3.256e+02, percent-clipped=0.0 +2024-08-26 23:14:21,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=179568.0, ans=0.125 +2024-08-26 23:14:21,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=179568.0, ans=0.125 +2024-08-26 23:14:30,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179621.33333333334, ans=0.1 +2024-08-26 23:14:33,576 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.34 vs. limit=10.0 +2024-08-26 23:14:35,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=179621.33333333334, ans=0.125 +2024-08-26 23:14:44,629 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.86 vs. limit=15.0 +2024-08-26 23:14:47,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=179674.66666666666, ans=0.125 +2024-08-26 23:14:48,315 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.24 vs. limit=15.0 +2024-08-26 23:14:52,392 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 23:14:54,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=179728.0, ans=0.0 +2024-08-26 23:14:58,662 INFO [train.py:1114] (0/4) Epoch 14, batch 1350, loss[loss=0.1834, simple_loss=0.2614, pruned_loss=0.03719, ctc_loss=0.07722, over 19770.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2688, pruned_loss=0.04611, ctc_loss=0.086, over 3858687.41 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:15:01,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=179781.33333333334, ans=0.125 +2024-08-26 23:15:01,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=179781.33333333334, ans=0.025 +2024-08-26 23:16:30,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179834.66666666666, ans=0.1 +2024-08-26 23:16:38,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=179888.0, ans=0.125 +2024-08-26 23:16:41,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.whiten.whitening_limit, batch_count=179941.33333333334, ans=12.0 +2024-08-26 23:16:59,385 INFO [train.py:1114] (0/4) Epoch 14, batch 1400, loss[loss=0.1794, simple_loss=0.2505, pruned_loss=0.03925, ctc_loss=0.07436, over 19674.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2689, pruned_loss=0.04605, ctc_loss=0.08606, over 3864940.55 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:17:07,629 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.482e+02 1.624e+02 2.003e+02 3.142e+02, threshold=3.248e+02, percent-clipped=0.0 +2024-08-26 23:17:16,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=180101.33333333334, ans=0.0 +2024-08-26 23:17:18,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=180101.33333333334, ans=0.025 +2024-08-26 23:17:38,667 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.34 vs. limit=6.0 +2024-08-26 23:17:41,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=180261.33333333334, ans=0.125 +2024-08-26 23:17:50,483 INFO [train.py:1114] (0/4) Epoch 14, batch 1450, loss[loss=0.2175, simple_loss=0.2829, pruned_loss=0.05503, ctc_loss=0.1049, over 19668.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2695, pruned_loss=0.04629, ctc_loss=0.08675, over 3862868.32 frames. ], batch size: 63, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:17:54,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=180314.66666666666, ans=0.125 +2024-08-26 23:18:43,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=180528.0, ans=0.2 +2024-08-26 23:18:46,184 INFO [train.py:1114] (0/4) Epoch 14, batch 1500, loss[loss=0.2046, simple_loss=0.275, pruned_loss=0.04856, ctc_loss=0.09291, over 19588.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2696, pruned_loss=0.04624, ctc_loss=0.08677, over 3861664.46 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:18:49,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=180581.33333333334, ans=0.125 +2024-08-26 23:18:51,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=180581.33333333334, ans=0.025 +2024-08-26 23:18:52,950 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.115e+02 1.461e+02 1.607e+02 1.928e+02 3.862e+02, threshold=3.214e+02, percent-clipped=2.0 +2024-08-26 23:18:59,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=180634.66666666666, ans=0.125 +2024-08-26 23:19:19,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=180741.33333333334, ans=0.1 +2024-08-26 23:20:21,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=180794.66666666666, ans=0.125 +2024-08-26 23:20:27,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=180794.66666666666, ans=0.125 +2024-08-26 23:20:30,222 INFO [train.py:1114] (0/4) Epoch 14, batch 1550, loss[loss=0.2084, simple_loss=0.2817, pruned_loss=0.04974, ctc_loss=0.0887, over 19602.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2693, pruned_loss=0.04643, ctc_loss=0.08696, over 3847541.67 frames. ], batch size: 60, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:20:36,905 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 23:20:47,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=180901.33333333334, ans=0.0 +2024-08-26 23:20:54,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=180954.66666666666, ans=0.1 +2024-08-26 23:21:05,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=181008.0, ans=0.1 +2024-08-26 23:21:08,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=181061.33333333334, ans=0.0 +2024-08-26 23:21:12,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn2.whiten.whitening_limit, batch_count=181061.33333333334, ans=22.5 +2024-08-26 23:21:12,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=181061.33333333334, ans=0.09899494936611666 +2024-08-26 23:21:20,677 INFO [train.py:1114] (0/4) Epoch 14, batch 1600, loss[loss=0.1971, simple_loss=0.2702, pruned_loss=0.0449, ctc_loss=0.08529, over 19828.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2688, pruned_loss=0.04625, ctc_loss=0.0864, over 3837473.80 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:21:23,125 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.50 vs. limit=15.0 +2024-08-26 23:21:25,498 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 23:21:27,131 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.461e+02 1.627e+02 1.971e+02 3.033e+02, threshold=3.255e+02, percent-clipped=0.0 +2024-08-26 23:21:32,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=181168.0, ans=0.2 +2024-08-26 23:21:32,334 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.78 vs. limit=15.0 +2024-08-26 23:21:35,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=181168.0, ans=0.0 +2024-08-26 23:21:38,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=181168.0, ans=0.2 +2024-08-26 23:21:38,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=181168.0, ans=0.125 +2024-08-26 23:23:54,622 INFO [train.py:1114] (0/4) Epoch 14, batch 1650, loss[loss=0.21, simple_loss=0.2857, pruned_loss=0.04878, ctc_loss=0.09204, over 19654.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2688, pruned_loss=0.04636, ctc_loss=0.08655, over 3833134.13 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:24:05,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=181434.66666666666, ans=0.125 +2024-08-26 23:24:17,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=181488.0, ans=0.0 +2024-08-26 23:24:20,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=181488.0, ans=0.1 +2024-08-26 23:24:35,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=181594.66666666666, ans=0.0 +2024-08-26 23:24:38,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=181594.66666666666, ans=0.0 +2024-08-26 23:24:40,741 INFO [train.py:1114] (0/4) Epoch 14, batch 1700, loss[loss=0.1667, simple_loss=0.2348, pruned_loss=0.03544, ctc_loss=0.06915, over 19703.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2686, pruned_loss=0.04613, ctc_loss=0.08606, over 3847514.77 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:24:47,146 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.192e+02 1.441e+02 1.691e+02 2.079e+02 3.382e+02, threshold=3.381e+02, percent-clipped=3.0 +2024-08-26 23:24:50,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=181701.33333333334, ans=0.025 +2024-08-26 23:24:53,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=181701.33333333334, ans=0.0 +2024-08-26 23:25:00,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=181754.66666666666, ans=0.2 +2024-08-26 23:25:06,999 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.16 vs. limit=22.5 +2024-08-26 23:25:07,019 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.77 vs. limit=22.5 +2024-08-26 23:25:08,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181808.0, ans=0.1 +2024-08-26 23:25:17,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=181861.33333333334, ans=0.2 +2024-08-26 23:25:19,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=181861.33333333334, ans=0.0 +2024-08-26 23:25:19,935 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 23:25:24,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=181914.66666666666, ans=0.125 +2024-08-26 23:25:25,111 INFO [train.py:1114] (0/4) Epoch 14, batch 1750, loss[loss=0.1709, simple_loss=0.2306, pruned_loss=0.04075, ctc_loss=0.07453, over 19617.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2683, pruned_loss=0.04604, ctc_loss=0.08583, over 3852353.23 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:25:48,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182021.33333333334, ans=0.1 +2024-08-26 23:25:48,901 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.63 vs. limit=6.0 +2024-08-26 23:25:55,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=182074.66666666666, ans=0.125 +2024-08-26 23:26:08,528 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.68 vs. limit=15.0 +2024-08-26 23:26:09,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=182074.66666666666, ans=0.2 +2024-08-26 23:26:19,464 INFO [train.py:1114] (0/4) Epoch 14, batch 1800, loss[loss=0.1731, simple_loss=0.2519, pruned_loss=0.03449, ctc_loss=0.06336, over 19592.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2686, pruned_loss=0.04599, ctc_loss=0.08589, over 3852353.41 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:26:26,559 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.533e+02 1.884e+02 2.505e+02 4.097e+02, threshold=3.767e+02, percent-clipped=5.0 +2024-08-26 23:26:48,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=182341.33333333334, ans=0.0 +2024-08-26 23:26:53,111 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.22 vs. limit=15.0 +2024-08-26 23:26:58,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=182394.66666666666, ans=0.125 +2024-08-26 23:27:05,183 INFO [train.py:1114] (0/4) Epoch 14, batch 1850, loss[loss=0.2013, simple_loss=0.2781, pruned_loss=0.04486, ctc_loss=0.08688, over 19596.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2685, pruned_loss=0.04574, ctc_loss=0.0855, over 3854748.92 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:27:05,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=182448.0, ans=0.07 +2024-08-26 23:27:09,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=182448.0, ans=0.0 +2024-08-26 23:28:45,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=182501.33333333334, ans=0.0 +2024-08-26 23:28:56,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=182608.0, ans=0.0 +2024-08-26 23:29:14,708 INFO [train.py:1114] (0/4) Epoch 14, batch 1900, loss[loss=0.2033, simple_loss=0.2866, pruned_loss=0.04396, ctc_loss=0.08026, over 19628.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2692, pruned_loss=0.0462, ctc_loss=0.08607, over 3861097.39 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:29:21,593 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.217e+02 1.441e+02 1.690e+02 2.071e+02 3.452e+02, threshold=3.379e+02, percent-clipped=0.0 +2024-08-26 23:29:21,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=182714.66666666666, ans=0.125 +2024-08-26 23:29:22,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=182768.0, ans=0.0 +2024-08-26 23:29:27,284 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.88 vs. limit=15.0 +2024-08-26 23:29:33,234 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.35 vs. limit=15.0 +2024-08-26 23:29:57,802 INFO [train.py:1114] (0/4) Epoch 14, batch 1950, loss[loss=0.1791, simple_loss=0.2533, pruned_loss=0.03814, ctc_loss=0.07167, over 19583.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2702, pruned_loss=0.04632, ctc_loss=0.08628, over 3870300.04 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:30:04,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=182981.33333333334, ans=0.0 +2024-08-26 23:30:25,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183141.33333333334, ans=0.1 +2024-08-26 23:30:25,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=183141.33333333334, ans=0.125 +2024-08-26 23:30:44,845 INFO [train.py:1114] (0/4) Epoch 14, batch 2000, loss[loss=0.1716, simple_loss=0.2422, pruned_loss=0.03659, ctc_loss=0.06964, over 19643.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2706, pruned_loss=0.04661, ctc_loss=0.08682, over 3854479.09 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:30:50,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=183248.0, ans=0.0 +2024-08-26 23:30:52,059 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.411e+02 1.571e+02 1.845e+02 2.838e+02, threshold=3.143e+02, percent-clipped=0.0 +2024-08-26 23:32:08,134 INFO [train.py:1114] (0/4) Epoch 14, batch 2050, loss[loss=0.1835, simple_loss=0.245, pruned_loss=0.04475, ctc_loss=0.08145, over 19725.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2699, pruned_loss=0.04672, ctc_loss=0.08695, over 3850346.95 frames. ], batch size: 47, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:32:15,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=183514.66666666666, ans=0.025 +2024-08-26 23:32:25,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183621.33333333334, ans=0.1 +2024-08-26 23:32:26,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=183621.33333333334, ans=0.2 +2024-08-26 23:32:27,675 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.29 vs. limit=15.0 +2024-08-26 23:32:51,514 INFO [train.py:1114] (0/4) Epoch 14, batch 2100, loss[loss=0.187, simple_loss=0.2598, pruned_loss=0.04142, ctc_loss=0.07828, over 19758.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2686, pruned_loss=0.04595, ctc_loss=0.08579, over 3857367.71 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:32:58,375 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.193e+02 1.491e+02 1.652e+02 1.860e+02 2.729e+02, threshold=3.304e+02, percent-clipped=0.0 +2024-08-26 23:34:03,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183941.33333333334, ans=0.1 +2024-08-26 23:34:16,393 INFO [train.py:1114] (0/4) Epoch 14, batch 2150, loss[loss=0.2013, simple_loss=0.2736, pruned_loss=0.04667, ctc_loss=0.08918, over 19570.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2683, pruned_loss=0.04581, ctc_loss=0.08546, over 3868244.08 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:34:42,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=184208.0, ans=0.0 +2024-08-26 23:34:47,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=184208.0, ans=0.125 +2024-08-26 23:34:59,941 INFO [train.py:1114] (0/4) Epoch 14, batch 2200, loss[loss=0.197, simple_loss=0.2741, pruned_loss=0.04371, ctc_loss=0.08128, over 19567.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2685, pruned_loss=0.04616, ctc_loss=0.08597, over 3866837.54 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:35:01,844 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 23:35:01,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184314.66666666666, ans=0.1 +2024-08-26 23:35:04,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=184314.66666666666, ans=0.125 +2024-08-26 23:35:06,952 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.447e+02 1.750e+02 2.552e+02 4.295e+02, threshold=3.499e+02, percent-clipped=8.0 +2024-08-26 23:35:11,103 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.08 vs. limit=8.0 +2024-08-26 23:35:14,126 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 23:35:22,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=184421.33333333334, ans=0.125 +2024-08-26 23:35:24,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.87 vs. limit=12.0 +2024-08-26 23:35:43,880 INFO [train.py:1114] (0/4) Epoch 14, batch 2250, loss[loss=0.1938, simple_loss=0.2744, pruned_loss=0.04161, ctc_loss=0.07507, over 19595.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2688, pruned_loss=0.04611, ctc_loss=0.08584, over 3866495.61 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:35:46,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=184581.33333333334, ans=0.125 +2024-08-26 23:35:47,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=184581.33333333334, ans=0.0 +2024-08-26 23:35:50,967 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.59 vs. limit=15.0 +2024-08-26 23:35:51,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=184634.66666666666, ans=0.0 +2024-08-26 23:36:01,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=184688.0, ans=0.125 +2024-08-26 23:36:10,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184741.33333333334, ans=0.1 +2024-08-26 23:36:11,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=184741.33333333334, ans=0.025 +2024-08-26 23:36:24,512 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.39 vs. limit=22.5 +2024-08-26 23:36:27,374 INFO [train.py:1114] (0/4) Epoch 14, batch 2300, loss[loss=0.1859, simple_loss=0.2514, pruned_loss=0.043, ctc_loss=0.08609, over 19508.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.268, pruned_loss=0.04608, ctc_loss=0.08583, over 3860452.97 frames. ], batch size: 49, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:36:34,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=184848.0, ans=0.125 +2024-08-26 23:36:35,134 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.458e+02 1.662e+02 2.114e+02 3.033e+02, threshold=3.324e+02, percent-clipped=0.0 +2024-08-26 23:36:44,305 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.88 vs. limit=12.0 +2024-08-26 23:36:52,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184954.66666666666, ans=0.1 +2024-08-26 23:37:04,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=185061.33333333334, ans=0.0 +2024-08-26 23:37:10,950 INFO [train.py:1114] (0/4) Epoch 14, batch 2350, loss[loss=0.2183, simple_loss=0.2851, pruned_loss=0.05581, ctc_loss=0.0996, over 19697.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2679, pruned_loss=0.04623, ctc_loss=0.08598, over 3864029.08 frames. ], batch size: 63, lr: 1.05e-02, grad_scale: 16.0 +2024-08-26 23:37:15,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=185114.66666666666, ans=0.0 +2024-08-26 23:37:16,754 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 23:37:22,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=185168.0, ans=0.0 +2024-08-26 23:37:28,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=185221.33333333334, ans=0.125 +2024-08-26 23:37:34,170 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.47 vs. limit=15.0 +2024-08-26 23:37:42,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=185274.66666666666, ans=0.125 +2024-08-26 23:37:55,033 INFO [train.py:1114] (0/4) Epoch 14, batch 2400, loss[loss=0.2109, simple_loss=0.2807, pruned_loss=0.05151, ctc_loss=0.09517, over 19439.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2702, pruned_loss=0.04721, ctc_loss=0.08762, over 3858659.76 frames. ], batch size: 67, lr: 1.05e-02, grad_scale: 32.0 +2024-08-26 23:38:02,848 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.081e+02 1.569e+02 1.843e+02 2.357e+02 3.475e+02, threshold=3.685e+02, percent-clipped=2.0 +2024-08-26 23:38:10,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=185434.66666666666, ans=0.0 +2024-08-26 23:38:11,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=185488.0, ans=0.0 +2024-08-26 23:38:28,742 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=9.77 vs. limit=15.0 +2024-08-26 23:38:39,316 INFO [train.py:1114] (0/4) Epoch 14, batch 2450, loss[loss=0.2632, simple_loss=0.3066, pruned_loss=0.0791, ctc_loss=0.1543, over 13414.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2736, pruned_loss=0.04949, ctc_loss=0.09229, over 3733724.63 frames. ], batch size: 143, lr: 1.05e-02, grad_scale: 16.0 +2024-08-26 23:38:43,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=185648.0, ans=0.0 +2024-08-26 23:38:45,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=185648.0, ans=0.125 +2024-08-26 23:38:46,890 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.01 vs. limit=22.5 +2024-08-26 23:38:47,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=185701.33333333334, ans=0.07 +2024-08-26 23:38:54,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=185701.33333333334, ans=0.0 +2024-08-26 23:39:13,272 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-14.pt +2024-08-26 23:40:44,468 INFO [train.py:1114] (0/4) Epoch 15, batch 0, loss[loss=0.196, simple_loss=0.2639, pruned_loss=0.04552, ctc_loss=0.09263, over 19411.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2639, pruned_loss=0.04552, ctc_loss=0.09263, over 19411.00 frames. ], batch size: 48, lr: 1.02e-02, grad_scale: 32.0 +2024-08-26 23:40:46,075 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-26 23:41:25,227 INFO [train.py:1146] (0/4) Epoch 15, validation: loss=0.1751, simple_loss=0.2686, pruned_loss=0.03035, ctc_loss=0.05216, over 944034.00 frames. +2024-08-26 23:41:25,227 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13201MB +2024-08-26 23:41:30,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=185856.0, ans=0.2 +2024-08-26 23:41:31,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=185856.0, ans=0.125 +2024-08-26 23:41:46,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=185856.0, ans=0.125 +2024-08-26 23:42:30,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=185962.66666666666, ans=0.09899494936611666 +2024-08-26 23:42:34,498 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.661e+02 1.811e+02 2.041e+02 3.400e+02, threshold=3.623e+02, percent-clipped=0.0 +2024-08-26 23:42:55,152 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.50 vs. limit=15.0 +2024-08-26 23:45:26,828 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.66 vs. limit=12.0 +2024-08-26 23:49:22,216 INFO [train.py:1114] (0/4) Epoch 15, batch 50, loss[loss=0.1927, simple_loss=0.2565, pruned_loss=0.04794, ctc_loss=0.08278, over 19710.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2715, pruned_loss=0.04669, ctc_loss=0.08744, over 843490.78 frames. ], batch size: 47, lr: 1.02e-02, grad_scale: 16.0 +2024-08-26 23:51:28,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=186122.66666666666, ans=0.07 +2024-08-26 23:51:49,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=186122.66666666666, ans=0.125 +2024-08-26 23:53:59,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=186282.66666666666, ans=0.2 +2024-08-26 23:54:00,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=186282.66666666666, ans=0.125 +2024-08-26 23:54:48,879 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.82 vs. limit=15.0 +2024-08-26 23:58:27,660 INFO [train.py:1114] (0/4) Epoch 15, batch 100, loss[loss=0.1851, simple_loss=0.2553, pruned_loss=0.04139, ctc_loss=0.08035, over 19716.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.272, pruned_loss=0.0464, ctc_loss=0.0865, over 1497089.71 frames. ], batch size: 51, lr: 1.02e-02, grad_scale: 16.0 +2024-08-26 23:58:28,351 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.96 vs. limit=15.0 +2024-08-27 00:06:52,641 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.493e+02 1.771e+02 2.166e+02 3.428e+02, threshold=3.543e+02, percent-clipped=0.0 +2024-08-27 00:09:13,627 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.68 vs. limit=15.0 +2024-08-27 00:10:02,702 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.72 vs. limit=15.0 +2024-08-27 00:12:03,631 INFO [train.py:1114] (0/4) Epoch 15, batch 150, loss[loss=0.1768, simple_loss=0.2447, pruned_loss=0.03921, ctc_loss=0.07611, over 19727.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2694, pruned_loss=0.0456, ctc_loss=0.08488, over 2025845.62 frames. ], batch size: 47, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:12:15,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=186656.0, ans=0.125 +2024-08-27 00:12:29,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=186709.33333333334, ans=0.2 +2024-08-27 00:14:20,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=186816.0, ans=0.125 +2024-08-27 00:17:05,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=186869.33333333334, ans=0.125 +2024-08-27 00:17:10,133 INFO [train.py:1114] (0/4) Epoch 15, batch 200, loss[loss=0.2088, simple_loss=0.2834, pruned_loss=0.04876, ctc_loss=0.09197, over 18056.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2676, pruned_loss=0.04537, ctc_loss=0.08462, over 2434205.28 frames. ], batch size: 85, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:17:44,750 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.68 vs. limit=10.0 +2024-08-27 00:17:52,108 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.48 vs. limit=15.0 +2024-08-27 00:17:54,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=187029.33333333334, ans=0.125 +2024-08-27 00:17:59,322 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.145e+02 1.435e+02 1.602e+02 1.959e+02 3.588e+02, threshold=3.205e+02, percent-clipped=1.0 +2024-08-27 00:18:47,705 INFO [train.py:1114] (0/4) Epoch 15, batch 250, loss[loss=0.2055, simple_loss=0.2837, pruned_loss=0.0469, ctc_loss=0.08378, over 19445.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.268, pruned_loss=0.04526, ctc_loss=0.08446, over 2754227.35 frames. ], batch size: 67, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:19:50,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=187189.33333333334, ans=0.05 +2024-08-27 00:20:05,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=187242.66666666666, ans=0.0 +2024-08-27 00:20:13,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=187296.0, ans=0.125 +2024-08-27 00:20:18,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=187349.33333333334, ans=0.5 +2024-08-27 00:20:27,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=187349.33333333334, ans=15.0 +2024-08-27 00:20:34,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=187402.66666666666, ans=0.125 +2024-08-27 00:21:10,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=187402.66666666666, ans=0.125 +2024-08-27 00:21:12,352 INFO [train.py:1114] (0/4) Epoch 15, batch 300, loss[loss=0.2071, simple_loss=0.2839, pruned_loss=0.04776, ctc_loss=0.08696, over 19512.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2672, pruned_loss=0.04498, ctc_loss=0.084, over 2999797.41 frames. ], batch size: 61, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:21:17,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=187456.0, ans=0.1 +2024-08-27 00:21:25,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=187509.33333333334, ans=0.125 +2024-08-27 00:22:00,740 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.02 vs. limit=10.0 +2024-08-27 00:22:03,851 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.482e+02 1.757e+02 2.250e+02 4.561e+02, threshold=3.514e+02, percent-clipped=7.0 +2024-08-27 00:22:10,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=187562.66666666666, ans=0.025 +2024-08-27 00:22:31,076 INFO [train.py:1114] (0/4) Epoch 15, batch 350, loss[loss=0.1699, simple_loss=0.2431, pruned_loss=0.03491, ctc_loss=0.06708, over 19746.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2677, pruned_loss=0.04503, ctc_loss=0.08425, over 3189316.06 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:24:54,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=187776.0, ans=0.125 +2024-08-27 00:24:59,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=187829.33333333334, ans=0.95 +2024-08-27 00:25:03,878 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.43 vs. limit=6.0 +2024-08-27 00:25:09,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=187882.66666666666, ans=0.1 +2024-08-27 00:25:25,424 INFO [train.py:1114] (0/4) Epoch 15, batch 400, loss[loss=0.1937, simple_loss=0.2722, pruned_loss=0.04201, ctc_loss=0.07773, over 19497.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2672, pruned_loss=0.04494, ctc_loss=0.08406, over 3342205.60 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:25:27,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=187989.33333333334, ans=0.1 +2024-08-27 00:25:31,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=187989.33333333334, ans=0.05 +2024-08-27 00:25:46,844 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.414e+02 1.733e+02 2.120e+02 3.671e+02, threshold=3.466e+02, percent-clipped=1.0 +2024-08-27 00:26:13,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=188149.33333333334, ans=0.2 +2024-08-27 00:26:33,886 INFO [train.py:1114] (0/4) Epoch 15, batch 450, loss[loss=0.1923, simple_loss=0.2748, pruned_loss=0.03979, ctc_loss=0.07554, over 19609.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2679, pruned_loss=0.04525, ctc_loss=0.08483, over 3449783.93 frames. ], batch size: 55, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:26:34,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=188256.0, ans=0.125 +2024-08-27 00:26:52,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=188309.33333333334, ans=0.2 +2024-08-27 00:27:33,699 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.65 vs. limit=15.0 +2024-08-27 00:27:54,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=188469.33333333334, ans=0.125 +2024-08-27 00:27:58,671 INFO [train.py:1114] (0/4) Epoch 15, batch 500, loss[loss=0.1981, simple_loss=0.2808, pruned_loss=0.04277, ctc_loss=0.07459, over 19712.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2671, pruned_loss=0.04488, ctc_loss=0.08399, over 3544980.18 frames. ], batch size: 63, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:28:15,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=188576.0, ans=0.1 +2024-08-27 00:28:25,431 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.423e+02 1.716e+02 2.052e+02 3.766e+02, threshold=3.431e+02, percent-clipped=1.0 +2024-08-27 00:28:25,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=188629.33333333334, ans=0.0 +2024-08-27 00:28:25,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=188629.33333333334, ans=0.125 +2024-08-27 00:28:44,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=188682.66666666666, ans=0.125 +2024-08-27 00:28:45,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=188682.66666666666, ans=0.125 +2024-08-27 00:28:47,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=188736.0, ans=0.025 +2024-08-27 00:28:49,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=188736.0, ans=0.1 +2024-08-27 00:28:50,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=188736.0, ans=0.125 +2024-08-27 00:28:56,803 INFO [train.py:1114] (0/4) Epoch 15, batch 550, loss[loss=0.208, simple_loss=0.282, pruned_loss=0.04897, ctc_loss=0.08994, over 19249.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2675, pruned_loss=0.04532, ctc_loss=0.08468, over 3605052.13 frames. ], batch size: 71, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:29:27,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=188789.33333333334, ans=0.0 +2024-08-27 00:30:15,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=189002.66666666666, ans=0.2 +2024-08-27 00:30:17,657 INFO [train.py:1114] (0/4) Epoch 15, batch 600, loss[loss=0.2181, simple_loss=0.2937, pruned_loss=0.05182, ctc_loss=0.09719, over 19440.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2681, pruned_loss=0.04558, ctc_loss=0.08514, over 3663301.89 frames. ], batch size: 67, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:31:07,355 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.25 vs. limit=22.5 +2024-08-27 00:31:13,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=189109.33333333334, ans=0.125 +2024-08-27 00:31:17,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=189162.66666666666, ans=0.125 +2024-08-27 00:31:17,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=189162.66666666666, ans=0.125 +2024-08-27 00:31:18,196 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.468e+02 1.719e+02 2.297e+02 4.329e+02, threshold=3.438e+02, percent-clipped=2.0 +2024-08-27 00:31:20,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=189162.66666666666, ans=0.125 +2024-08-27 00:31:52,649 INFO [train.py:1114] (0/4) Epoch 15, batch 650, loss[loss=0.1844, simple_loss=0.2629, pruned_loss=0.0385, ctc_loss=0.07236, over 19768.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2673, pruned_loss=0.04517, ctc_loss=0.08448, over 3714569.97 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:31:54,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=189322.66666666666, ans=0.0 +2024-08-27 00:31:59,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=189322.66666666666, ans=0.125 +2024-08-27 00:32:19,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=189429.33333333334, ans=0.2 +2024-08-27 00:32:21,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=189482.66666666666, ans=0.125 +2024-08-27 00:33:04,270 INFO [train.py:1114] (0/4) Epoch 15, batch 700, loss[loss=0.1749, simple_loss=0.2461, pruned_loss=0.037, ctc_loss=0.07441, over 19719.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2676, pruned_loss=0.0451, ctc_loss=0.08439, over 3746563.06 frames. ], batch size: 51, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:33:08,447 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.86 vs. limit=10.0 +2024-08-27 00:33:13,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=189642.66666666666, ans=0.125 +2024-08-27 00:33:14,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=189642.66666666666, ans=0.125 +2024-08-27 00:33:59,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=189642.66666666666, ans=0.0 +2024-08-27 00:34:02,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=189696.0, ans=0.125 +2024-08-27 00:34:03,643 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.167e+02 1.548e+02 1.878e+02 2.334e+02 4.066e+02, threshold=3.756e+02, percent-clipped=4.0 +2024-08-27 00:34:07,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=189696.0, ans=0.0 +2024-08-27 00:34:08,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=189696.0, ans=0.5 +2024-08-27 00:34:09,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=189696.0, ans=0.0 +2024-08-27 00:34:12,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=189749.33333333334, ans=0.0 +2024-08-27 00:34:12,443 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.12 vs. limit=22.5 +2024-08-27 00:35:17,144 INFO [train.py:1114] (0/4) Epoch 15, batch 750, loss[loss=0.204, simple_loss=0.2828, pruned_loss=0.04483, ctc_loss=0.08891, over 19502.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2672, pruned_loss=0.04503, ctc_loss=0.08404, over 3773103.81 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:35:17,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=189856.0, ans=0.0 +2024-08-27 00:35:18,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189856.0, ans=0.1 +2024-08-27 00:35:19,768 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.89 vs. limit=22.5 +2024-08-27 00:35:25,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=189856.0, ans=0.125 +2024-08-27 00:35:34,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=189909.33333333334, ans=0.025 +2024-08-27 00:35:44,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=190016.0, ans=0.125 +2024-08-27 00:35:45,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=190016.0, ans=0.125 +2024-08-27 00:36:06,217 INFO [train.py:1114] (0/4) Epoch 15, batch 800, loss[loss=0.176, simple_loss=0.2451, pruned_loss=0.0387, ctc_loss=0.07349, over 19806.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2666, pruned_loss=0.04477, ctc_loss=0.08359, over 3796038.15 frames. ], batch size: 49, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:36:06,678 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.65 vs. limit=15.0 +2024-08-27 00:36:07,777 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.12 vs. limit=15.0 +2024-08-27 00:36:09,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=190122.66666666666, ans=0.025 +2024-08-27 00:36:15,408 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.89 vs. limit=15.0 +2024-08-27 00:36:27,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=190229.33333333334, ans=10.0 +2024-08-27 00:36:29,652 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.516e+02 1.778e+02 2.217e+02 3.654e+02, threshold=3.555e+02, percent-clipped=0.0 +2024-08-27 00:36:29,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=190229.33333333334, ans=0.0 +2024-08-27 00:36:37,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=190282.66666666666, ans=0.0 +2024-08-27 00:36:54,853 INFO [train.py:1114] (0/4) Epoch 15, batch 850, loss[loss=0.2152, simple_loss=0.2901, pruned_loss=0.05132, ctc_loss=0.09399, over 19670.00 frames. ], tot_loss[loss=0.194, simple_loss=0.266, pruned_loss=0.04439, ctc_loss=0.08285, over 3816028.85 frames. ], batch size: 59, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:36:58,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190389.33333333334, ans=0.1 +2024-08-27 00:37:05,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=190442.66666666666, ans=0.0 +2024-08-27 00:37:07,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=190442.66666666666, ans=0.125 +2024-08-27 00:37:13,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=190442.66666666666, ans=0.125 +2024-08-27 00:37:13,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190442.66666666666, ans=0.1 +2024-08-27 00:37:23,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=190549.33333333334, ans=0.0 +2024-08-27 00:37:24,866 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.19 vs. limit=12.0 +2024-08-27 00:37:30,528 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.26 vs. limit=22.5 +2024-08-27 00:37:34,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190602.66666666666, ans=0.1 +2024-08-27 00:37:42,021 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:37:46,589 INFO [train.py:1114] (0/4) Epoch 15, batch 900, loss[loss=0.1767, simple_loss=0.2484, pruned_loss=0.03854, ctc_loss=0.06981, over 19402.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2662, pruned_loss=0.0446, ctc_loss=0.08323, over 3818918.45 frames. ], batch size: 48, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:37:51,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=190656.0, ans=0.0 +2024-08-27 00:38:12,641 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.206e+02 1.396e+02 1.546e+02 1.855e+02 3.193e+02, threshold=3.091e+02, percent-clipped=0.0 +2024-08-27 00:38:19,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=190816.0, ans=0.2 +2024-08-27 00:38:42,122 INFO [train.py:1114] (0/4) Epoch 15, batch 950, loss[loss=0.1962, simple_loss=0.2658, pruned_loss=0.04649, ctc_loss=0.08392, over 19500.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2665, pruned_loss=0.0448, ctc_loss=0.08342, over 3820506.49 frames. ], batch size: 49, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:38:42,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=190922.66666666666, ans=0.125 +2024-08-27 00:38:42,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=190922.66666666666, ans=0.0 +2024-08-27 00:38:44,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=190922.66666666666, ans=0.07 +2024-08-27 00:38:51,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=190922.66666666666, ans=0.0 +2024-08-27 00:38:55,377 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.67 vs. limit=12.0 +2024-08-27 00:39:09,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=191029.33333333334, ans=0.025 +2024-08-27 00:39:12,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=191029.33333333334, ans=0.125 +2024-08-27 00:39:13,215 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-08-27 00:39:16,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=191029.33333333334, ans=0.125 +2024-08-27 00:39:28,038 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:39:31,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=191136.0, ans=0.025 +2024-08-27 00:39:37,066 INFO [train.py:1114] (0/4) Epoch 15, batch 1000, loss[loss=0.177, simple_loss=0.2502, pruned_loss=0.03801, ctc_loss=0.06973, over 19842.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2667, pruned_loss=0.04478, ctc_loss=0.08344, over 3817654.75 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:39:41,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=191189.33333333334, ans=0.125 +2024-08-27 00:39:44,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=191189.33333333334, ans=0.025 +2024-08-27 00:39:49,769 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.39 vs. limit=22.5 +2024-08-27 00:39:56,311 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.87 vs. limit=15.0 +2024-08-27 00:40:00,299 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.061e+02 1.403e+02 1.586e+02 1.924e+02 3.101e+02, threshold=3.172e+02, percent-clipped=1.0 +2024-08-27 00:40:09,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191349.33333333334, ans=0.1 +2024-08-27 00:40:17,599 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=15.0 +2024-08-27 00:40:24,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=191456.0, ans=0.125 +2024-08-27 00:40:25,468 INFO [train.py:1114] (0/4) Epoch 15, batch 1050, loss[loss=0.1811, simple_loss=0.2605, pruned_loss=0.03635, ctc_loss=0.07246, over 19840.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2659, pruned_loss=0.0446, ctc_loss=0.08319, over 3823923.35 frames. ], batch size: 57, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:40:40,573 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.45 vs. limit=15.0 +2024-08-27 00:40:53,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=191562.66666666666, ans=0.125 +2024-08-27 00:40:53,431 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.26 vs. limit=15.0 +2024-08-27 00:40:56,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=191616.0, ans=0.2 +2024-08-27 00:41:12,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191669.33333333334, ans=0.1 +2024-08-27 00:41:14,670 INFO [train.py:1114] (0/4) Epoch 15, batch 1100, loss[loss=0.1911, simple_loss=0.263, pruned_loss=0.04403, ctc_loss=0.07767, over 19589.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2658, pruned_loss=0.04449, ctc_loss=0.08291, over 3831082.54 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:41:18,940 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.64 vs. limit=22.5 +2024-08-27 00:41:21,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=191722.66666666666, ans=10.0 +2024-08-27 00:41:28,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=191776.0, ans=0.125 +2024-08-27 00:41:29,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=191776.0, ans=0.2 +2024-08-27 00:41:36,199 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.227e+02 1.518e+02 1.811e+02 2.066e+02 3.149e+02, threshold=3.622e+02, percent-clipped=0.0 +2024-08-27 00:41:42,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=191882.66666666666, ans=0.125 +2024-08-27 00:42:01,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=191936.0, ans=0.0 +2024-08-27 00:42:07,799 INFO [train.py:1114] (0/4) Epoch 15, batch 1150, loss[loss=0.1952, simple_loss=0.2706, pruned_loss=0.04299, ctc_loss=0.08477, over 19572.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2665, pruned_loss=0.0448, ctc_loss=0.08357, over 3828740.94 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:42:07,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=191989.33333333334, ans=0.07 +2024-08-27 00:42:08,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=191989.33333333334, ans=0.02 +2024-08-27 00:42:08,968 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/checkpoint-36000.pt +2024-08-27 00:42:21,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=191989.33333333334, ans=0.125 +2024-08-27 00:42:21,726 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=8.52 vs. limit=12.0 +2024-08-27 00:42:26,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=192042.66666666666, ans=0.125 +2024-08-27 00:42:31,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192042.66666666666, ans=0.1 +2024-08-27 00:43:02,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=192202.66666666666, ans=0.125 +2024-08-27 00:43:04,218 INFO [train.py:1114] (0/4) Epoch 15, batch 1200, loss[loss=0.1954, simple_loss=0.2716, pruned_loss=0.04345, ctc_loss=0.08062, over 19835.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2671, pruned_loss=0.0449, ctc_loss=0.08395, over 3822545.46 frames. ], batch size: 57, lr: 1.00e-02, grad_scale: 32.0 +2024-08-27 00:44:32,769 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.478e+02 1.729e+02 2.216e+02 4.347e+02, threshold=3.458e+02, percent-clipped=1.0 +2024-08-27 00:45:40,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=192416.0, ans=0.125 +2024-08-27 00:45:43,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=192416.0, ans=0.125 +2024-08-27 00:45:53,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=192469.33333333334, ans=0.125 +2024-08-27 00:46:05,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=192469.33333333334, ans=0.2 +2024-08-27 00:46:12,657 INFO [train.py:1114] (0/4) Epoch 15, batch 1250, loss[loss=0.2154, simple_loss=0.2857, pruned_loss=0.05331, ctc_loss=0.0959, over 19506.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2676, pruned_loss=0.04494, ctc_loss=0.08374, over 3841818.15 frames. ], batch size: 61, lr: 1.00e-02, grad_scale: 32.0 +2024-08-27 00:46:34,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=192522.66666666666, ans=0.125 +2024-08-27 00:46:34,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=192522.66666666666, ans=0.125 +2024-08-27 00:46:50,789 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.95 vs. limit=10.0 +2024-08-27 00:47:08,969 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.68 vs. limit=15.0 +2024-08-27 00:47:22,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=192629.33333333334, ans=0.125 +2024-08-27 00:47:28,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=192629.33333333334, ans=0.125 +2024-08-27 00:47:35,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=192629.33333333334, ans=0.1 +2024-08-27 00:47:35,543 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.40 vs. limit=15.0 +2024-08-27 00:48:26,339 INFO [train.py:1114] (0/4) Epoch 15, batch 1300, loss[loss=0.215, simple_loss=0.2871, pruned_loss=0.05115, ctc_loss=0.1014, over 18896.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2669, pruned_loss=0.04452, ctc_loss=0.08313, over 3846791.24 frames. ], batch size: 76, lr: 9.99e-03, grad_scale: 32.0 +2024-08-27 00:48:30,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=192789.33333333334, ans=0.05 +2024-08-27 00:48:42,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=192789.33333333334, ans=0.05 +2024-08-27 00:49:04,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=192842.66666666666, ans=0.0 +2024-08-27 00:49:08,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=192842.66666666666, ans=0.125 +2024-08-27 00:49:50,649 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.210e+02 1.421e+02 1.669e+02 2.080e+02 3.869e+02, threshold=3.339e+02, percent-clipped=2.0 +2024-08-27 00:50:39,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=193002.66666666666, ans=0.025 +2024-08-27 00:50:43,609 INFO [train.py:1114] (0/4) Epoch 15, batch 1350, loss[loss=0.172, simple_loss=0.247, pruned_loss=0.03578, ctc_loss=0.06367, over 19767.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2663, pruned_loss=0.04433, ctc_loss=0.08273, over 3857434.60 frames. ], batch size: 54, lr: 9.98e-03, grad_scale: 32.0 +2024-08-27 00:51:41,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=193056.0, ans=0.0 +2024-08-27 00:51:57,161 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.61 vs. limit=15.0 +2024-08-27 00:52:04,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=193162.66666666666, ans=0.125 +2024-08-27 00:52:04,671 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.56 vs. limit=15.0 +2024-08-27 00:52:38,712 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.41 vs. limit=6.0 +2024-08-27 00:52:41,342 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.89 vs. limit=10.0 +2024-08-27 00:53:24,225 INFO [train.py:1114] (0/4) Epoch 15, batch 1400, loss[loss=0.1803, simple_loss=0.2385, pruned_loss=0.04392, ctc_loss=0.08546, over 19663.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2662, pruned_loss=0.04454, ctc_loss=0.08301, over 3864045.08 frames. ], batch size: 46, lr: 9.98e-03, grad_scale: 32.0 +2024-08-27 00:53:24,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=193322.66666666666, ans=0.5 +2024-08-27 00:53:27,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=193322.66666666666, ans=0.1 +2024-08-27 00:53:29,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=193322.66666666666, ans=0.07 +2024-08-27 00:53:36,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=193376.0, ans=0.0 +2024-08-27 00:53:57,406 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.449e+02 1.647e+02 2.125e+02 3.032e+02, threshold=3.293e+02, percent-clipped=0.0 +2024-08-27 00:54:19,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=193429.33333333334, ans=0.0 +2024-08-27 00:54:22,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=193482.66666666666, ans=0.0 +2024-08-27 00:54:48,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=193482.66666666666, ans=0.1 +2024-08-27 00:54:54,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=193482.66666666666, ans=0.2 +2024-08-27 00:54:55,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=193536.0, ans=0.125 +2024-08-27 00:55:08,671 INFO [train.py:1114] (0/4) Epoch 15, batch 1450, loss[loss=0.212, simple_loss=0.2842, pruned_loss=0.05113, ctc_loss=0.09392, over 19645.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.267, pruned_loss=0.04469, ctc_loss=0.08336, over 3861696.22 frames. ], batch size: 63, lr: 9.97e-03, grad_scale: 32.0 +2024-08-27 00:55:21,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=193589.33333333334, ans=0.0 +2024-08-27 00:56:01,040 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.32 vs. limit=6.0 +2024-08-27 00:56:23,592 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.97 vs. limit=22.5 +2024-08-27 00:56:35,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=193802.66666666666, ans=0.125 +2024-08-27 00:56:39,603 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.62 vs. limit=8.0 +2024-08-27 00:56:39,792 INFO [train.py:1114] (0/4) Epoch 15, batch 1500, loss[loss=0.2034, simple_loss=0.2821, pruned_loss=0.04524, ctc_loss=0.08538, over 19575.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2675, pruned_loss=0.04474, ctc_loss=0.08353, over 3861285.39 frames. ], batch size: 57, lr: 9.96e-03, grad_scale: 32.0 +2024-08-27 00:56:57,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=193856.0, ans=0.125 +2024-08-27 00:57:53,499 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.81 vs. limit=15.0 +2024-08-27 00:57:55,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=193909.33333333334, ans=0.125 +2024-08-27 00:58:20,410 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.504e+02 1.720e+02 2.138e+02 3.076e+02, threshold=3.439e+02, percent-clipped=0.0 +2024-08-27 00:58:22,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=193962.66666666666, ans=0.5 +2024-08-27 00:58:50,637 INFO [train.py:1114] (0/4) Epoch 15, batch 1550, loss[loss=0.2191, simple_loss=0.2838, pruned_loss=0.05694, ctc_loss=0.1012, over 19610.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2672, pruned_loss=0.04481, ctc_loss=0.08353, over 3847267.13 frames. ], batch size: 60, lr: 9.96e-03, grad_scale: 32.0 +2024-08-27 00:59:07,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=194176.0, ans=0.125 +2024-08-27 00:59:11,653 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.14 vs. limit=10.0 +2024-08-27 00:59:23,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=194282.66666666666, ans=0.125 +2024-08-27 00:59:25,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194282.66666666666, ans=0.1 +2024-08-27 00:59:26,185 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=19.09 vs. limit=22.5 +2024-08-27 00:59:36,418 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.11 vs. limit=15.0 +2024-08-27 00:59:37,707 INFO [train.py:1114] (0/4) Epoch 15, batch 1600, loss[loss=0.203, simple_loss=0.2796, pruned_loss=0.04627, ctc_loss=0.08456, over 19833.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2676, pruned_loss=0.04525, ctc_loss=0.08445, over 3836125.80 frames. ], batch size: 57, lr: 9.95e-03, grad_scale: 32.0 +2024-08-27 00:59:52,991 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.91 vs. limit=15.0 +2024-08-27 00:59:56,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=194496.0, ans=0.025 +2024-08-27 00:59:57,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=194496.0, ans=0.125 +2024-08-27 01:00:17,423 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.455e+02 1.710e+02 2.060e+02 3.831e+02, threshold=3.419e+02, percent-clipped=3.0 +2024-08-27 01:00:17,751 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:00:26,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=194496.0, ans=0.125 +2024-08-27 01:00:40,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=194602.66666666666, ans=0.0 +2024-08-27 01:00:48,596 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.21 vs. limit=6.0 +2024-08-27 01:00:50,779 INFO [train.py:1114] (0/4) Epoch 15, batch 1650, loss[loss=0.1901, simple_loss=0.2719, pruned_loss=0.03903, ctc_loss=0.07549, over 19643.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2672, pruned_loss=0.04499, ctc_loss=0.08392, over 3832052.74 frames. ], batch size: 59, lr: 9.94e-03, grad_scale: 16.0 +2024-08-27 01:01:08,865 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.87 vs. limit=15.0 +2024-08-27 01:01:14,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=194709.33333333334, ans=0.125 +2024-08-27 01:01:41,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=194816.0, ans=0.0 +2024-08-27 01:01:49,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=194869.33333333334, ans=0.125 +2024-08-27 01:02:11,998 INFO [train.py:1114] (0/4) Epoch 15, batch 1700, loss[loss=0.1682, simple_loss=0.2347, pruned_loss=0.03721, ctc_loss=0.0682, over 19682.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.267, pruned_loss=0.04451, ctc_loss=0.08307, over 3846484.66 frames. ], batch size: 46, lr: 9.94e-03, grad_scale: 16.0 +2024-08-27 01:02:16,177 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.42 vs. limit=15.0 +2024-08-27 01:02:26,824 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.19 vs. limit=15.0 +2024-08-27 01:02:27,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=194976.0, ans=0.2 +2024-08-27 01:02:30,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=194976.0, ans=0.2 +2024-08-27 01:02:31,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=195029.33333333334, ans=0.0 +2024-08-27 01:02:33,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=195029.33333333334, ans=0.125 +2024-08-27 01:02:36,950 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.414e+02 1.817e+02 2.372e+02 3.799e+02, threshold=3.634e+02, percent-clipped=1.0 +2024-08-27 01:02:44,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=195082.66666666666, ans=0.025 +2024-08-27 01:02:45,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.57 vs. limit=22.5 +2024-08-27 01:03:00,189 INFO [train.py:1114] (0/4) Epoch 15, batch 1750, loss[loss=0.1775, simple_loss=0.2452, pruned_loss=0.03993, ctc_loss=0.07478, over 19659.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2666, pruned_loss=0.04437, ctc_loss=0.08284, over 3851315.28 frames. ], batch size: 45, lr: 9.93e-03, grad_scale: 16.0 +2024-08-27 01:03:00,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=195189.33333333334, ans=0.09899494936611666 +2024-08-27 01:03:01,712 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.27 vs. limit=15.0 +2024-08-27 01:03:05,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=195189.33333333334, ans=0.125 +2024-08-27 01:03:19,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195296.0, ans=0.1 +2024-08-27 01:03:22,125 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.64 vs. limit=15.0 +2024-08-27 01:03:30,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=195349.33333333334, ans=0.125 +2024-08-27 01:03:34,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=195349.33333333334, ans=0.0 +2024-08-27 01:03:45,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=195402.66666666666, ans=0.0 +2024-08-27 01:03:46,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=195402.66666666666, ans=0.0 +2024-08-27 01:03:49,206 INFO [train.py:1114] (0/4) Epoch 15, batch 1800, loss[loss=0.1946, simple_loss=0.2743, pruned_loss=0.04137, ctc_loss=0.08012, over 19617.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2665, pruned_loss=0.04422, ctc_loss=0.08256, over 3852338.26 frames. ], batch size: 55, lr: 9.92e-03, grad_scale: 16.0 +2024-08-27 01:03:55,285 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.77 vs. limit=15.0 +2024-08-27 01:03:57,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=195509.33333333334, ans=0.025 +2024-08-27 01:04:34,452 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.516e+02 1.927e+02 2.557e+02 3.874e+02, threshold=3.854e+02, percent-clipped=2.0 +2024-08-27 01:04:36,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=195562.66666666666, ans=0.125 +2024-08-27 01:05:43,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=195669.33333333334, ans=0.0 +2024-08-27 01:05:44,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=195669.33333333334, ans=0.0 +2024-08-27 01:05:54,925 INFO [train.py:1114] (0/4) Epoch 15, batch 1850, loss[loss=0.2031, simple_loss=0.2809, pruned_loss=0.04589, ctc_loss=0.0841, over 19592.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2664, pruned_loss=0.04428, ctc_loss=0.08265, over 3856614.83 frames. ], batch size: 57, lr: 9.92e-03, grad_scale: 16.0 +2024-08-27 01:06:10,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=195776.0, ans=0.125 +2024-08-27 01:06:22,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=195829.33333333334, ans=0.125 +2024-08-27 01:06:28,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=195829.33333333334, ans=6.0 +2024-08-27 01:06:33,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=195882.66666666666, ans=0.125 +2024-08-27 01:06:39,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=195882.66666666666, ans=0.2 +2024-08-27 01:06:41,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=195936.0, ans=0.125 +2024-08-27 01:06:49,449 INFO [train.py:1114] (0/4) Epoch 15, batch 1900, loss[loss=0.204, simple_loss=0.2794, pruned_loss=0.04626, ctc_loss=0.09039, over 19668.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2672, pruned_loss=0.04464, ctc_loss=0.08317, over 3862380.04 frames. ], batch size: 59, lr: 9.91e-03, grad_scale: 16.0 +2024-08-27 01:06:58,889 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.60 vs. limit=22.5 +2024-08-27 01:07:07,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=196042.66666666666, ans=0.0 +2024-08-27 01:07:09,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196096.0, ans=0.1 +2024-08-27 01:07:43,087 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.146e+02 1.422e+02 1.649e+02 2.231e+02 4.535e+02, threshold=3.297e+02, percent-clipped=1.0 +2024-08-27 01:08:04,607 INFO [train.py:1114] (0/4) Epoch 15, batch 1950, loss[loss=0.1836, simple_loss=0.2574, pruned_loss=0.03974, ctc_loss=0.07609, over 19589.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.268, pruned_loss=0.04472, ctc_loss=0.08327, over 3870699.90 frames. ], batch size: 52, lr: 9.90e-03, grad_scale: 16.0 +2024-08-27 01:08:13,934 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.63 vs. limit=15.0 +2024-08-27 01:08:45,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.71 vs. limit=15.0 +2024-08-27 01:08:49,896 INFO [train.py:1114] (0/4) Epoch 15, batch 2000, loss[loss=0.1693, simple_loss=0.235, pruned_loss=0.03802, ctc_loss=0.06882, over 19619.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2685, pruned_loss=0.0451, ctc_loss=0.08396, over 3855755.56 frames. ], batch size: 45, lr: 9.90e-03, grad_scale: 32.0 +2024-08-27 01:08:54,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=196522.66666666666, ans=0.125 +2024-08-27 01:09:34,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=196576.0, ans=0.09899494936611666 +2024-08-27 01:09:39,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=196576.0, ans=0.0 +2024-08-27 01:09:46,678 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.403e+02 1.640e+02 2.044e+02 3.050e+02, threshold=3.279e+02, percent-clipped=0.0 +2024-08-27 01:10:10,625 INFO [train.py:1114] (0/4) Epoch 15, batch 2050, loss[loss=0.1672, simple_loss=0.2386, pruned_loss=0.03454, ctc_loss=0.06685, over 19726.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2675, pruned_loss=0.04486, ctc_loss=0.08355, over 3851699.63 frames. ], batch size: 47, lr: 9.89e-03, grad_scale: 32.0 +2024-08-27 01:10:20,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=196842.66666666666, ans=0.125 +2024-08-27 01:10:36,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=196896.0, ans=0.2 +2024-08-27 01:10:54,828 INFO [train.py:1114] (0/4) Epoch 15, batch 2100, loss[loss=0.1998, simple_loss=0.2731, pruned_loss=0.04548, ctc_loss=0.089, over 19763.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2666, pruned_loss=0.04446, ctc_loss=0.08286, over 3858793.46 frames. ], batch size: 54, lr: 9.88e-03, grad_scale: 32.0 +2024-08-27 01:11:10,071 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.07 vs. limit=22.5 +2024-08-27 01:11:19,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=197109.33333333334, ans=0.125 +2024-08-27 01:11:20,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=197109.33333333334, ans=0.125 +2024-08-27 01:11:26,630 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.202e+02 1.442e+02 1.703e+02 2.065e+02 4.080e+02, threshold=3.406e+02, percent-clipped=2.0 +2024-08-27 01:11:29,644 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.84 vs. limit=22.5 +2024-08-27 01:11:48,549 INFO [train.py:1114] (0/4) Epoch 15, batch 2150, loss[loss=0.1973, simple_loss=0.2683, pruned_loss=0.04632, ctc_loss=0.08416, over 19595.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2661, pruned_loss=0.04432, ctc_loss=0.0826, over 3869908.97 frames. ], batch size: 52, lr: 9.88e-03, grad_scale: 32.0 +2024-08-27 01:11:54,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=197322.66666666666, ans=0.07 +2024-08-27 01:11:56,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=197376.0, ans=0.2 +2024-08-27 01:12:00,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=197376.0, ans=0.0 +2024-08-27 01:12:02,523 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:12:06,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=197429.33333333334, ans=0.0 +2024-08-27 01:12:07,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=197429.33333333334, ans=0.125 +2024-08-27 01:12:13,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=197482.66666666666, ans=0.125 +2024-08-27 01:12:15,859 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.66 vs. limit=22.5 +2024-08-27 01:12:24,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197536.0, ans=0.1 +2024-08-27 01:12:25,258 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.36 vs. limit=15.0 +2024-08-27 01:12:26,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=197536.0, ans=0.0 +2024-08-27 01:12:29,839 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.58 vs. limit=15.0 +2024-08-27 01:12:31,732 INFO [train.py:1114] (0/4) Epoch 15, batch 2200, loss[loss=0.2053, simple_loss=0.2825, pruned_loss=0.04695, ctc_loss=0.08553, over 19603.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2665, pruned_loss=0.04435, ctc_loss=0.08264, over 3866967.17 frames. ], batch size: 57, lr: 9.87e-03, grad_scale: 16.0 +2024-08-27 01:12:34,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=197589.33333333334, ans=0.0 +2024-08-27 01:12:40,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=197642.66666666666, ans=0.125 +2024-08-27 01:12:43,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=197642.66666666666, ans=0.125 +2024-08-27 01:12:54,929 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.477e+02 1.816e+02 2.262e+02 3.833e+02, threshold=3.631e+02, percent-clipped=4.0 +2024-08-27 01:12:58,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=197749.33333333334, ans=0.1 +2024-08-27 01:12:59,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=197749.33333333334, ans=0.125 +2024-08-27 01:13:00,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197749.33333333334, ans=0.1 +2024-08-27 01:13:01,211 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.77 vs. limit=15.0 +2024-08-27 01:13:03,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=197749.33333333334, ans=0.1 +2024-08-27 01:13:15,775 INFO [train.py:1114] (0/4) Epoch 15, batch 2250, loss[loss=0.2114, simple_loss=0.29, pruned_loss=0.0488, ctc_loss=0.08783, over 19614.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.267, pruned_loss=0.0446, ctc_loss=0.08303, over 3866993.22 frames. ], batch size: 55, lr: 9.87e-03, grad_scale: 16.0 +2024-08-27 01:13:18,898 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.76 vs. limit=15.0 +2024-08-27 01:13:23,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=197909.33333333334, ans=0.0 +2024-08-27 01:13:24,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=197909.33333333334, ans=0.1 +2024-08-27 01:13:30,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=197909.33333333334, ans=0.125 +2024-08-27 01:13:45,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=198016.0, ans=0.025 +2024-08-27 01:13:49,378 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.74 vs. limit=15.0 +2024-08-27 01:13:52,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=198069.33333333334, ans=0.0 +2024-08-27 01:13:58,193 INFO [train.py:1114] (0/4) Epoch 15, batch 2300, loss[loss=0.1637, simple_loss=0.2376, pruned_loss=0.03235, ctc_loss=0.06268, over 19503.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2657, pruned_loss=0.04466, ctc_loss=0.08316, over 3861395.04 frames. ], batch size: 49, lr: 9.86e-03, grad_scale: 16.0 +2024-08-27 01:14:04,513 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.71 vs. limit=15.0 +2024-08-27 01:14:13,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=198176.0, ans=0.09899494936611666 +2024-08-27 01:14:57,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=198229.33333333334, ans=0.0 +2024-08-27 01:15:02,208 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.441e+02 1.617e+02 1.954e+02 3.129e+02, threshold=3.235e+02, percent-clipped=0.0 +2024-08-27 01:15:13,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=198336.0, ans=0.0 +2024-08-27 01:15:23,096 INFO [train.py:1114] (0/4) Epoch 15, batch 2350, loss[loss=0.2059, simple_loss=0.2841, pruned_loss=0.04615, ctc_loss=0.08841, over 19705.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2659, pruned_loss=0.04503, ctc_loss=0.08351, over 3864267.56 frames. ], batch size: 63, lr: 9.85e-03, grad_scale: 16.0 +2024-08-27 01:15:24,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=198389.33333333334, ans=0.125 +2024-08-27 01:15:37,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=198442.66666666666, ans=0.125 +2024-08-27 01:15:43,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=198496.0, ans=0.0 +2024-08-27 01:15:49,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=198549.33333333334, ans=0.125 +2024-08-27 01:15:57,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=198549.33333333334, ans=0.125 +2024-08-27 01:16:31,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=198656.0, ans=0.2 +2024-08-27 01:16:31,888 INFO [train.py:1114] (0/4) Epoch 15, batch 2400, loss[loss=0.2086, simple_loss=0.2825, pruned_loss=0.04866, ctc_loss=0.09331, over 19363.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2679, pruned_loss=0.04558, ctc_loss=0.08455, over 3858534.45 frames. ], batch size: 67, lr: 9.85e-03, grad_scale: 32.0 +2024-08-27 01:17:19,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=198656.0, ans=0.125 +2024-08-27 01:17:28,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=198709.33333333334, ans=0.025 +2024-08-27 01:17:32,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=198762.66666666666, ans=0.0 +2024-08-27 01:17:35,478 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.164e+02 1.452e+02 1.605e+02 2.004e+02 3.213e+02, threshold=3.211e+02, percent-clipped=0.0 +2024-08-27 01:17:39,734 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.67 vs. limit=15.0 +2024-08-27 01:17:41,695 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.79 vs. limit=6.0 +2024-08-27 01:17:57,656 INFO [train.py:1114] (0/4) Epoch 15, batch 2450, loss[loss=0.2385, simple_loss=0.2893, pruned_loss=0.06893, ctc_loss=0.1243, over 13381.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2712, pruned_loss=0.04799, ctc_loss=0.08942, over 3731305.61 frames. ], batch size: 141, lr: 9.84e-03, grad_scale: 32.0 +2024-08-27 01:18:22,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=199029.33333333334, ans=0.09899494936611666 +2024-08-27 01:18:39,445 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-15.pt +2024-08-27 01:20:20,966 INFO [train.py:1114] (0/4) Epoch 16, batch 0, loss[loss=0.182, simple_loss=0.2472, pruned_loss=0.04261, ctc_loss=0.07891, over 19833.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2472, pruned_loss=0.04261, ctc_loss=0.07891, over 19833.00 frames. ], batch size: 49, lr: 9.52e-03, grad_scale: 32.0 +2024-08-27 01:20:20,967 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-27 01:21:17,376 INFO [train.py:1146] (0/4) Epoch 16, validation: loss=0.1744, simple_loss=0.2673, pruned_loss=0.03034, ctc_loss=0.05204, over 944034.00 frames. +2024-08-27 01:21:17,378 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13201MB +2024-08-27 01:21:17,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=199130.66666666666, ans=0.0 +2024-08-27 01:21:24,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=199130.66666666666, ans=0.125 +2024-08-27 01:21:39,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=199237.33333333334, ans=0.125 +2024-08-27 01:21:48,826 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.35 vs. limit=15.0 +2024-08-27 01:21:54,894 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.674e+02 1.811e+02 2.106e+02 3.737e+02, threshold=3.622e+02, percent-clipped=2.0 +2024-08-27 01:21:55,428 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.38 vs. limit=22.5 +2024-08-27 01:21:57,166 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.06 vs. limit=15.0 +2024-08-27 01:22:02,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=199344.0, ans=0.0 +2024-08-27 01:22:03,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=199344.0, ans=0.0 +2024-08-27 01:22:07,225 INFO [train.py:1114] (0/4) Epoch 16, batch 50, loss[loss=0.1601, simple_loss=0.2367, pruned_loss=0.02991, ctc_loss=0.05931, over 19702.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2667, pruned_loss=0.04399, ctc_loss=0.08312, over 845293.61 frames. ], batch size: 47, lr: 9.51e-03, grad_scale: 32.0 +2024-08-27 01:22:07,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=199397.33333333334, ans=0.025 +2024-08-27 01:22:31,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=199504.0, ans=0.125 +2024-08-27 01:22:32,192 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.23 vs. limit=12.0 +2024-08-27 01:22:45,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=199610.66666666666, ans=0.0 +2024-08-27 01:22:53,635 INFO [train.py:1114] (0/4) Epoch 16, batch 100, loss[loss=0.1738, simple_loss=0.2524, pruned_loss=0.03444, ctc_loss=0.06588, over 19711.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2695, pruned_loss=0.04487, ctc_loss=0.08438, over 1499669.28 frames. ], batch size: 51, lr: 9.51e-03, grad_scale: 32.0 +2024-08-27 01:23:15,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=199770.66666666666, ans=0.0 +2024-08-27 01:23:33,428 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.434e+02 1.536e+02 1.885e+02 3.287e+02, threshold=3.072e+02, percent-clipped=0.0 +2024-08-27 01:23:45,318 INFO [train.py:1114] (0/4) Epoch 16, batch 150, loss[loss=0.1781, simple_loss=0.2383, pruned_loss=0.04246, ctc_loss=0.0824, over 19725.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2665, pruned_loss=0.04424, ctc_loss=0.08296, over 2028484.55 frames. ], batch size: 47, lr: 9.50e-03, grad_scale: 32.0 +2024-08-27 01:23:45,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=199930.66666666666, ans=0.2 +2024-08-27 01:23:56,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=199984.0, ans=0.2 +2024-08-27 01:24:03,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=200037.33333333334, ans=0.0 +2024-08-27 01:24:09,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=200037.33333333334, ans=0.125 +2024-08-27 01:24:26,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=200144.0, ans=0.0 +2024-08-27 01:24:35,669 INFO [train.py:1114] (0/4) Epoch 16, batch 200, loss[loss=0.2032, simple_loss=0.2721, pruned_loss=0.04911, ctc_loss=0.08996, over 18100.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2658, pruned_loss=0.0441, ctc_loss=0.08227, over 2435806.75 frames. ], batch size: 85, lr: 9.49e-03, grad_scale: 32.0 +2024-08-27 01:24:59,298 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.19 vs. limit=12.0 +2024-08-27 01:25:01,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=200304.0, ans=0.0 +2024-08-27 01:25:08,304 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.63 vs. limit=15.0 +2024-08-27 01:25:12,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=200357.33333333334, ans=0.0 +2024-08-27 01:25:14,234 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.526e+02 1.826e+02 2.235e+02 3.925e+02, threshold=3.652e+02, percent-clipped=6.0 +2024-08-27 01:25:46,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=200410.66666666666, ans=0.125 +2024-08-27 01:25:52,467 INFO [train.py:1114] (0/4) Epoch 16, batch 250, loss[loss=0.1881, simple_loss=0.2681, pruned_loss=0.03902, ctc_loss=0.07499, over 19425.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2655, pruned_loss=0.04379, ctc_loss=0.08176, over 2756064.49 frames. ], batch size: 67, lr: 9.49e-03, grad_scale: 32.0 +2024-08-27 01:26:04,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=200464.0, ans=0.0 +2024-08-27 01:26:07,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=200517.33333333334, ans=0.125 +2024-08-27 01:26:08,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=200517.33333333334, ans=0.125 +2024-08-27 01:26:13,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=200517.33333333334, ans=0.025 +2024-08-27 01:26:19,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=200570.66666666666, ans=0.0 +2024-08-27 01:26:19,346 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.09 vs. limit=15.0 +2024-08-27 01:26:20,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200570.66666666666, ans=0.1 +2024-08-27 01:26:35,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=200624.0, ans=0.125 +2024-08-27 01:26:42,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=200677.33333333334, ans=0.2 +2024-08-27 01:26:42,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=200677.33333333334, ans=0.125 +2024-08-27 01:26:44,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=200677.33333333334, ans=0.125 +2024-08-27 01:26:46,538 INFO [train.py:1114] (0/4) Epoch 16, batch 300, loss[loss=0.2133, simple_loss=0.2802, pruned_loss=0.05398, ctc_loss=0.09578, over 19542.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2651, pruned_loss=0.04365, ctc_loss=0.08145, over 3000525.35 frames. ], batch size: 61, lr: 9.48e-03, grad_scale: 32.0 +2024-08-27 01:27:22,584 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.450e+02 1.677e+02 2.025e+02 3.129e+02, threshold=3.354e+02, percent-clipped=0.0 +2024-08-27 01:27:25,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=200944.0, ans=0.5 +2024-08-27 01:27:25,762 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.09 vs. limit=15.0 +2024-08-27 01:27:28,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=200944.0, ans=0.0 +2024-08-27 01:27:33,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=200944.0, ans=0.0 +2024-08-27 01:27:36,632 INFO [train.py:1114] (0/4) Epoch 16, batch 350, loss[loss=0.1687, simple_loss=0.2397, pruned_loss=0.03608, ctc_loss=0.06369, over 19764.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2663, pruned_loss=0.04408, ctc_loss=0.08215, over 3189776.14 frames. ], batch size: 48, lr: 9.48e-03, grad_scale: 32.0 +2024-08-27 01:27:37,933 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.56 vs. limit=12.0 +2024-08-27 01:27:48,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=201050.66666666666, ans=0.05 +2024-08-27 01:28:24,286 INFO [train.py:1114] (0/4) Epoch 16, batch 400, loss[loss=0.1812, simple_loss=0.2608, pruned_loss=0.03663, ctc_loss=0.07069, over 19493.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2656, pruned_loss=0.0437, ctc_loss=0.08163, over 3341541.24 frames. ], batch size: 54, lr: 9.47e-03, grad_scale: 32.0 +2024-08-27 01:28:46,184 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.89 vs. limit=22.5 +2024-08-27 01:28:49,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=201370.66666666666, ans=0.0 +2024-08-27 01:28:52,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=201424.0, ans=0.0 +2024-08-27 01:28:54,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=201424.0, ans=0.95 +2024-08-27 01:28:57,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=201424.0, ans=0.025 +2024-08-27 01:28:58,540 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.170e+02 1.444e+02 1.663e+02 2.108e+02 3.293e+02, threshold=3.326e+02, percent-clipped=0.0 +2024-08-27 01:29:06,234 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:29:07,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=201477.33333333334, ans=0.125 +2024-08-27 01:29:10,807 INFO [train.py:1114] (0/4) Epoch 16, batch 450, loss[loss=0.1918, simple_loss=0.2747, pruned_loss=0.03946, ctc_loss=0.07521, over 19623.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2653, pruned_loss=0.04344, ctc_loss=0.08129, over 3449745.03 frames. ], batch size: 55, lr: 9.46e-03, grad_scale: 32.0 +2024-08-27 01:29:10,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=201530.66666666666, ans=0.125 +2024-08-27 01:29:11,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=201530.66666666666, ans=0.0 +2024-08-27 01:29:53,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=201744.0, ans=0.125 +2024-08-27 01:30:01,618 INFO [train.py:1114] (0/4) Epoch 16, batch 500, loss[loss=0.188, simple_loss=0.2675, pruned_loss=0.03903, ctc_loss=0.07602, over 19663.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2641, pruned_loss=0.04311, ctc_loss=0.08061, over 3545183.97 frames. ], batch size: 63, lr: 9.46e-03, grad_scale: 32.0 +2024-08-27 01:30:24,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=201904.0, ans=0.0 +2024-08-27 01:30:30,607 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.67 vs. limit=6.0 +2024-08-27 01:30:34,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=201957.33333333334, ans=0.125 +2024-08-27 01:30:35,562 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.65 vs. limit=10.0 +2024-08-27 01:30:39,486 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.484e+02 1.746e+02 2.096e+02 4.072e+02, threshold=3.492e+02, percent-clipped=1.0 +2024-08-27 01:30:46,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202010.66666666666, ans=0.1 +2024-08-27 01:30:51,382 INFO [train.py:1114] (0/4) Epoch 16, batch 550, loss[loss=0.2026, simple_loss=0.2763, pruned_loss=0.04693, ctc_loss=0.08764, over 19241.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.264, pruned_loss=0.04317, ctc_loss=0.08056, over 3606918.73 frames. ], batch size: 71, lr: 9.45e-03, grad_scale: 32.0 +2024-08-27 01:30:56,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=202064.0, ans=0.1 +2024-08-27 01:31:08,546 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.12 vs. limit=15.0 +2024-08-27 01:31:10,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=202170.66666666666, ans=0.125 +2024-08-27 01:31:12,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=202170.66666666666, ans=0.125 +2024-08-27 01:31:16,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202170.66666666666, ans=0.1 +2024-08-27 01:31:19,282 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:31:19,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=202224.0, ans=0.0 +2024-08-27 01:31:25,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=202224.0, ans=0.025 +2024-08-27 01:31:34,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=202277.33333333334, ans=0.125 +2024-08-27 01:31:36,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=202277.33333333334, ans=0.0 +2024-08-27 01:31:37,734 INFO [train.py:1114] (0/4) Epoch 16, batch 600, loss[loss=0.2014, simple_loss=0.2811, pruned_loss=0.04425, ctc_loss=0.08282, over 19441.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2644, pruned_loss=0.04325, ctc_loss=0.08078, over 3664117.11 frames. ], batch size: 67, lr: 9.45e-03, grad_scale: 32.0 +2024-08-27 01:31:45,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=202330.66666666666, ans=0.07 +2024-08-27 01:31:50,187 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.47 vs. limit=15.0 +2024-08-27 01:32:09,179 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.30 vs. limit=15.0 +2024-08-27 01:32:14,245 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.474e+02 1.879e+02 2.462e+02 5.922e+02, threshold=3.759e+02, percent-clipped=13.0 +2024-08-27 01:32:19,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=202544.0, ans=0.05 +2024-08-27 01:32:26,170 INFO [train.py:1114] (0/4) Epoch 16, batch 650, loss[loss=0.1879, simple_loss=0.2595, pruned_loss=0.0423, ctc_loss=0.07951, over 19769.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2636, pruned_loss=0.04285, ctc_loss=0.08027, over 3715349.48 frames. ], batch size: 54, lr: 9.44e-03, grad_scale: 32.0 +2024-08-27 01:32:30,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=202597.33333333334, ans=0.125 +2024-08-27 01:32:33,814 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:32:38,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202650.66666666666, ans=0.1 +2024-08-27 01:32:39,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=202650.66666666666, ans=0.035 +2024-08-27 01:32:47,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=202704.0, ans=0.025 +2024-08-27 01:33:03,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=202757.33333333334, ans=0.125 +2024-08-27 01:33:03,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=202757.33333333334, ans=0.0 +2024-08-27 01:33:18,144 INFO [train.py:1114] (0/4) Epoch 16, batch 700, loss[loss=0.1751, simple_loss=0.2471, pruned_loss=0.03707, ctc_loss=0.07261, over 19717.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2642, pruned_loss=0.04301, ctc_loss=0.08072, over 3747773.79 frames. ], batch size: 51, lr: 9.43e-03, grad_scale: 32.0 +2024-08-27 01:33:24,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=202864.0, ans=0.0 +2024-08-27 01:33:25,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=202864.0, ans=0.1 +2024-08-27 01:33:27,026 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.83 vs. limit=22.5 +2024-08-27 01:33:36,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=202970.66666666666, ans=0.2 +2024-08-27 01:33:41,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=202970.66666666666, ans=0.125 +2024-08-27 01:33:44,919 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.22 vs. limit=15.0 +2024-08-27 01:33:50,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=203024.0, ans=0.125 +2024-08-27 01:33:52,578 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.223e+02 1.460e+02 1.707e+02 2.152e+02 4.812e+02, threshold=3.413e+02, percent-clipped=3.0 +2024-08-27 01:33:56,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=203077.33333333334, ans=0.0 +2024-08-27 01:33:59,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=203077.33333333334, ans=0.0 +2024-08-27 01:34:03,402 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.43 vs. limit=10.0 +2024-08-27 01:34:03,449 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.08 vs. limit=15.0 +2024-08-27 01:34:04,705 INFO [train.py:1114] (0/4) Epoch 16, batch 750, loss[loss=0.1891, simple_loss=0.2716, pruned_loss=0.03837, ctc_loss=0.07467, over 19493.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2637, pruned_loss=0.04277, ctc_loss=0.08016, over 3772968.21 frames. ], batch size: 54, lr: 9.43e-03, grad_scale: 32.0 +2024-08-27 01:34:09,850 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.68 vs. limit=15.0 +2024-08-27 01:34:12,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=203130.66666666666, ans=0.125 +2024-08-27 01:34:12,773 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.97 vs. limit=15.0 +2024-08-27 01:34:16,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=203184.0, ans=0.1 +2024-08-27 01:34:28,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=203237.33333333334, ans=0.05 +2024-08-27 01:34:57,324 INFO [train.py:1114] (0/4) Epoch 16, batch 800, loss[loss=0.1826, simple_loss=0.2526, pruned_loss=0.04065, ctc_loss=0.078, over 19828.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2637, pruned_loss=0.04286, ctc_loss=0.08026, over 3794945.14 frames. ], batch size: 49, lr: 9.42e-03, grad_scale: 32.0 +2024-08-27 01:35:49,637 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.508e+02 1.846e+02 2.334e+02 3.502e+02, threshold=3.692e+02, percent-clipped=1.0 +2024-08-27 01:36:01,627 INFO [train.py:1114] (0/4) Epoch 16, batch 850, loss[loss=0.1922, simple_loss=0.2723, pruned_loss=0.04118, ctc_loss=0.07444, over 19643.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2636, pruned_loss=0.04278, ctc_loss=0.08002, over 3814588.77 frames. ], batch size: 59, lr: 9.42e-03, grad_scale: 32.0 +2024-08-27 01:36:19,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=203717.33333333334, ans=0.125 +2024-08-27 01:36:45,506 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:36:46,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=203877.33333333334, ans=0.2 +2024-08-27 01:36:51,744 INFO [train.py:1114] (0/4) Epoch 16, batch 900, loss[loss=0.1789, simple_loss=0.2569, pruned_loss=0.03662, ctc_loss=0.06928, over 19403.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2644, pruned_loss=0.04328, ctc_loss=0.08095, over 3818681.23 frames. ], batch size: 48, lr: 9.41e-03, grad_scale: 32.0 +2024-08-27 01:37:15,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=204037.33333333334, ans=0.0 +2024-08-27 01:37:16,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=204037.33333333334, ans=0.2 +2024-08-27 01:37:26,150 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.398e+02 1.563e+02 1.898e+02 3.698e+02, threshold=3.126e+02, percent-clipped=1.0 +2024-08-27 01:37:35,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=204144.0, ans=0.125 +2024-08-27 01:37:38,110 INFO [train.py:1114] (0/4) Epoch 16, batch 950, loss[loss=0.1774, simple_loss=0.2497, pruned_loss=0.03818, ctc_loss=0.07204, over 19500.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.265, pruned_loss=0.04362, ctc_loss=0.08151, over 3821331.88 frames. ], batch size: 49, lr: 9.40e-03, grad_scale: 32.0 +2024-08-27 01:37:42,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=204197.33333333334, ans=0.125 +2024-08-27 01:37:44,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=204197.33333333334, ans=0.125 +2024-08-27 01:37:56,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=204250.66666666666, ans=0.125 +2024-08-27 01:37:58,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=204304.0, ans=0.1 +2024-08-27 01:38:08,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204357.33333333334, ans=0.1 +2024-08-27 01:38:15,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=204357.33333333334, ans=0.0 +2024-08-27 01:38:20,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=204410.66666666666, ans=0.125 +2024-08-27 01:38:29,234 INFO [train.py:1114] (0/4) Epoch 16, batch 1000, loss[loss=0.1733, simple_loss=0.2497, pruned_loss=0.03544, ctc_loss=0.06502, over 19863.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2655, pruned_loss=0.04377, ctc_loss=0.08182, over 3817099.88 frames. ], batch size: 52, lr: 9.40e-03, grad_scale: 32.0 +2024-08-27 01:38:33,646 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.61 vs. limit=22.5 +2024-08-27 01:38:47,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204570.66666666666, ans=0.1 +2024-08-27 01:38:48,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=204570.66666666666, ans=0.0 +2024-08-27 01:38:54,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=204570.66666666666, ans=0.125 +2024-08-27 01:39:01,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=204624.0, ans=0.1 +2024-08-27 01:39:07,621 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.409e+02 1.616e+02 2.034e+02 3.159e+02, threshold=3.231e+02, percent-clipped=1.0 +2024-08-27 01:39:08,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=204624.0, ans=0.125 +2024-08-27 01:39:19,854 INFO [train.py:1114] (0/4) Epoch 16, batch 1050, loss[loss=0.1898, simple_loss=0.2694, pruned_loss=0.04064, ctc_loss=0.07231, over 19848.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.265, pruned_loss=0.04364, ctc_loss=0.08177, over 3823437.21 frames. ], batch size: 57, lr: 9.39e-03, grad_scale: 32.0 +2024-08-27 01:39:27,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=204730.66666666666, ans=0.5 +2024-08-27 01:39:27,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=204730.66666666666, ans=0.0 +2024-08-27 01:39:28,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=204784.0, ans=0.125 +2024-08-27 01:39:29,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=204784.0, ans=0.125 +2024-08-27 01:39:37,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=204784.0, ans=0.0 +2024-08-27 01:39:37,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=204784.0, ans=0.125 +2024-08-27 01:39:39,345 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.06 vs. limit=15.0 +2024-08-27 01:39:40,299 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.78 vs. limit=12.0 +2024-08-27 01:39:43,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=204837.33333333334, ans=0.0 +2024-08-27 01:39:57,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204944.0, ans=0.1 +2024-08-27 01:40:07,054 INFO [train.py:1114] (0/4) Epoch 16, batch 1100, loss[loss=0.183, simple_loss=0.258, pruned_loss=0.03941, ctc_loss=0.07289, over 19578.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2646, pruned_loss=0.04328, ctc_loss=0.08118, over 3830357.68 frames. ], batch size: 52, lr: 9.39e-03, grad_scale: 32.0 +2024-08-27 01:40:14,788 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.79 vs. limit=15.0 +2024-08-27 01:40:25,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=205104.0, ans=0.125 +2024-08-27 01:40:26,815 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.39 vs. limit=15.0 +2024-08-27 01:40:38,198 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.08 vs. limit=22.5 +2024-08-27 01:40:38,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=205157.33333333334, ans=0.125 +2024-08-27 01:40:39,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=205157.33333333334, ans=0.125 +2024-08-27 01:40:43,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=205157.33333333334, ans=0.025 +2024-08-27 01:40:44,426 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.474e+02 1.664e+02 2.002e+02 3.685e+02, threshold=3.328e+02, percent-clipped=2.0 +2024-08-27 01:40:55,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=205210.66666666666, ans=0.125 +2024-08-27 01:40:55,524 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.31 vs. limit=12.0 +2024-08-27 01:40:57,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=205210.66666666666, ans=0.0 +2024-08-27 01:40:58,199 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.29 vs. limit=6.0 +2024-08-27 01:40:59,536 INFO [train.py:1114] (0/4) Epoch 16, batch 1150, loss[loss=0.1812, simple_loss=0.2529, pruned_loss=0.03998, ctc_loss=0.07391, over 19586.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2643, pruned_loss=0.04323, ctc_loss=0.08104, over 3828742.07 frames. ], batch size: 52, lr: 9.38e-03, grad_scale: 32.0 +2024-08-27 01:42:51,165 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:43:16,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=205370.66666666666, ans=0.125 +2024-08-27 01:43:18,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=205424.0, ans=0.07 +2024-08-27 01:43:32,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=205477.33333333334, ans=0.125 +2024-08-27 01:43:39,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=205530.66666666666, ans=0.0 +2024-08-27 01:43:40,068 INFO [train.py:1114] (0/4) Epoch 16, batch 1200, loss[loss=0.1881, simple_loss=0.2645, pruned_loss=0.04054, ctc_loss=0.07653, over 19826.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2651, pruned_loss=0.04369, ctc_loss=0.08189, over 3825157.00 frames. ], batch size: 57, lr: 9.38e-03, grad_scale: 32.0 +2024-08-27 01:43:49,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=205530.66666666666, ans=0.125 +2024-08-27 01:44:03,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=205637.33333333334, ans=0.125 +2024-08-27 01:44:16,050 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.206e+02 1.520e+02 1.803e+02 2.158e+02 3.897e+02, threshold=3.606e+02, percent-clipped=2.0 +2024-08-27 01:44:27,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=205797.33333333334, ans=0.0 +2024-08-27 01:44:28,169 INFO [train.py:1114] (0/4) Epoch 16, batch 1250, loss[loss=0.2045, simple_loss=0.2813, pruned_loss=0.04697, ctc_loss=0.08458, over 19537.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2656, pruned_loss=0.04355, ctc_loss=0.08166, over 3843181.77 frames. ], batch size: 61, lr: 9.37e-03, grad_scale: 32.0 +2024-08-27 01:44:32,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=205797.33333333334, ans=0.0 +2024-08-27 01:44:37,220 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.67 vs. limit=15.0 +2024-08-27 01:45:08,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=206010.66666666666, ans=0.0 +2024-08-27 01:45:14,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=206010.66666666666, ans=0.025 +2024-08-27 01:45:15,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=206010.66666666666, ans=0.125 +2024-08-27 01:45:17,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=206064.0, ans=0.125 +2024-08-27 01:45:17,715 INFO [train.py:1114] (0/4) Epoch 16, batch 1300, loss[loss=0.1951, simple_loss=0.271, pruned_loss=0.04377, ctc_loss=0.07904, over 18946.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2645, pruned_loss=0.04307, ctc_loss=0.08087, over 3847064.72 frames. ], batch size: 76, lr: 9.36e-03, grad_scale: 32.0 +2024-08-27 01:45:20,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=206064.0, ans=0.125 +2024-08-27 01:45:27,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=206117.33333333334, ans=0.0 +2024-08-27 01:45:37,547 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.14 vs. limit=6.0 +2024-08-27 01:45:38,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=206170.66666666666, ans=0.125 +2024-08-27 01:45:52,819 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.516e+02 1.773e+02 2.282e+02 3.618e+02, threshold=3.546e+02, percent-clipped=1.0 +2024-08-27 01:45:56,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206277.33333333334, ans=0.1 +2024-08-27 01:45:58,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=206277.33333333334, ans=0.125 +2024-08-27 01:46:06,818 INFO [train.py:1114] (0/4) Epoch 16, batch 1350, loss[loss=0.182, simple_loss=0.2603, pruned_loss=0.0374, ctc_loss=0.07227, over 19769.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2645, pruned_loss=0.04303, ctc_loss=0.08061, over 3859016.68 frames. ], batch size: 54, lr: 9.36e-03, grad_scale: 32.0 +2024-08-27 01:46:14,861 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=5.29 vs. limit=15.0 +2024-08-27 01:46:38,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206490.66666666666, ans=0.1 +2024-08-27 01:46:53,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=206544.0, ans=0.0 +2024-08-27 01:46:56,696 INFO [train.py:1114] (0/4) Epoch 16, batch 1400, loss[loss=0.1671, simple_loss=0.2269, pruned_loss=0.03938, ctc_loss=0.07147, over 19644.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.264, pruned_loss=0.04283, ctc_loss=0.08029, over 3865651.80 frames. ], batch size: 46, lr: 9.35e-03, grad_scale: 32.0 +2024-08-27 01:47:01,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=206597.33333333334, ans=0.125 +2024-08-27 01:47:11,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=206650.66666666666, ans=0.0 +2024-08-27 01:47:27,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=206757.33333333334, ans=0.1 +2024-08-27 01:48:25,294 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.410e+02 1.569e+02 1.892e+02 4.037e+02, threshold=3.138e+02, percent-clipped=1.0 +2024-08-27 01:48:27,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=206810.66666666666, ans=0.2 +2024-08-27 01:48:37,431 INFO [train.py:1114] (0/4) Epoch 16, batch 1450, loss[loss=0.2154, simple_loss=0.2801, pruned_loss=0.05529, ctc_loss=0.1006, over 19666.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2643, pruned_loss=0.04285, ctc_loss=0.08049, over 3862662.88 frames. ], batch size: 63, lr: 9.35e-03, grad_scale: 32.0 +2024-08-27 01:49:12,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=207024.0, ans=0.04949747468305833 +2024-08-27 01:49:20,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=207077.33333333334, ans=0.025 +2024-08-27 01:49:23,210 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:49:25,843 INFO [train.py:1114] (0/4) Epoch 16, batch 1500, loss[loss=0.2127, simple_loss=0.2885, pruned_loss=0.04951, ctc_loss=0.09468, over 19605.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2652, pruned_loss=0.04305, ctc_loss=0.08101, over 3862701.02 frames. ], batch size: 57, lr: 9.34e-03, grad_scale: 32.0 +2024-08-27 01:49:52,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=207237.33333333334, ans=0.1 +2024-08-27 01:49:53,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=207237.33333333334, ans=0.5 +2024-08-27 01:50:03,763 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.422e+02 1.666e+02 2.042e+02 4.208e+02, threshold=3.332e+02, percent-clipped=3.0 +2024-08-27 01:50:18,921 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.26 vs. limit=22.5 +2024-08-27 01:50:22,133 INFO [train.py:1114] (0/4) Epoch 16, batch 1550, loss[loss=0.2124, simple_loss=0.2823, pruned_loss=0.05141, ctc_loss=0.09915, over 19591.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2652, pruned_loss=0.04319, ctc_loss=0.0812, over 3846710.12 frames. ], batch size: 60, lr: 9.33e-03, grad_scale: 32.0 +2024-08-27 01:50:37,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=207450.66666666666, ans=0.025 +2024-08-27 01:51:04,486 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.02 vs. limit=22.5 +2024-08-27 01:51:10,018 INFO [train.py:1114] (0/4) Epoch 16, batch 1600, loss[loss=0.1712, simple_loss=0.2528, pruned_loss=0.0332, ctc_loss=0.0583, over 19829.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2646, pruned_loss=0.04306, ctc_loss=0.08086, over 3835215.71 frames. ], batch size: 57, lr: 9.33e-03, grad_scale: 32.0 +2024-08-27 01:51:26,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=207717.33333333334, ans=0.04949747468305833 +2024-08-27 01:51:26,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=207717.33333333334, ans=0.1 +2024-08-27 01:51:32,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=207770.66666666666, ans=0.125 +2024-08-27 01:51:50,486 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.43 vs. limit=15.0 +2024-08-27 01:51:55,662 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.401e+02 1.606e+02 1.975e+02 3.175e+02, threshold=3.213e+02, percent-clipped=0.0 +2024-08-27 01:51:57,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=207877.33333333334, ans=0.125 +2024-08-27 01:52:14,354 INFO [train.py:1114] (0/4) Epoch 16, batch 1650, loss[loss=0.1959, simple_loss=0.2745, pruned_loss=0.0425, ctc_loss=0.08096, over 19647.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2646, pruned_loss=0.04325, ctc_loss=0.08098, over 3832569.16 frames. ], batch size: 59, lr: 9.32e-03, grad_scale: 32.0 +2024-08-27 01:52:16,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=207930.66666666666, ans=0.2 +2024-08-27 01:52:31,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=207984.0, ans=0.2 +2024-08-27 01:52:41,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=208037.33333333334, ans=0.125 +2024-08-27 01:52:46,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.81 vs. limit=15.0 +2024-08-27 01:53:04,346 INFO [train.py:1114] (0/4) Epoch 16, batch 1700, loss[loss=0.1591, simple_loss=0.2274, pruned_loss=0.03332, ctc_loss=0.06052, over 19662.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2642, pruned_loss=0.04287, ctc_loss=0.08024, over 3846213.36 frames. ], batch size: 46, lr: 9.32e-03, grad_scale: 64.0 +2024-08-27 01:53:06,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.48 vs. limit=15.0 +2024-08-27 01:53:14,969 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.79 vs. limit=22.5 +2024-08-27 01:53:21,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=208304.0, ans=0.125 +2024-08-27 01:53:42,381 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.216e+02 1.468e+02 1.742e+02 2.214e+02 3.607e+02, threshold=3.484e+02, percent-clipped=2.0 +2024-08-27 01:53:53,050 INFO [train.py:1114] (0/4) Epoch 16, batch 1750, loss[loss=0.172, simple_loss=0.237, pruned_loss=0.03927, ctc_loss=0.07118, over 19699.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2638, pruned_loss=0.04284, ctc_loss=0.08008, over 3850569.47 frames. ], batch size: 45, lr: 9.31e-03, grad_scale: 32.0 +2024-08-27 01:53:58,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=208464.0, ans=0.1 +2024-08-27 01:54:00,416 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.83 vs. limit=15.0 +2024-08-27 01:54:01,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=208517.33333333334, ans=0.125 +2024-08-27 01:54:11,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=208570.66666666666, ans=0.125 +2024-08-27 01:54:26,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=208624.0, ans=0.0 +2024-08-27 01:54:28,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=208677.33333333334, ans=0.1 +2024-08-27 01:54:32,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=208677.33333333334, ans=0.1 +2024-08-27 01:54:37,047 INFO [train.py:1114] (0/4) Epoch 16, batch 1800, loss[loss=0.1879, simple_loss=0.2638, pruned_loss=0.04097, ctc_loss=0.0753, over 19609.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.264, pruned_loss=0.04294, ctc_loss=0.08003, over 3853165.70 frames. ], batch size: 55, lr: 9.31e-03, grad_scale: 32.0 +2024-08-27 01:54:39,352 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.09 vs. limit=22.5 +2024-08-27 01:54:54,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=208837.33333333334, ans=0.1 +2024-08-27 01:55:04,525 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.39 vs. limit=15.0 +2024-08-27 01:55:10,169 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.155e+02 1.563e+02 1.995e+02 2.578e+02 4.186e+02, threshold=3.991e+02, percent-clipped=7.0 +2024-08-27 01:55:16,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=208944.0, ans=0.1 +2024-08-27 01:55:16,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=208944.0, ans=0.025 +2024-08-27 01:55:20,656 INFO [train.py:1114] (0/4) Epoch 16, batch 1850, loss[loss=0.2044, simple_loss=0.2837, pruned_loss=0.04605, ctc_loss=0.08266, over 19587.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2635, pruned_loss=0.04285, ctc_loss=0.07966, over 3856066.70 frames. ], batch size: 57, lr: 9.30e-03, grad_scale: 32.0 +2024-08-27 01:55:20,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=208997.33333333334, ans=0.125 +2024-08-27 01:55:25,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=208997.33333333334, ans=0.125 +2024-08-27 01:55:29,790 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.22 vs. limit=15.0 +2024-08-27 01:55:35,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=209050.66666666666, ans=0.1 +2024-08-27 01:55:41,543 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.38 vs. limit=15.0 +2024-08-27 01:55:44,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=209104.0, ans=0.0 +2024-08-27 01:56:02,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=209210.66666666666, ans=0.1 +2024-08-27 01:56:04,479 INFO [train.py:1114] (0/4) Epoch 16, batch 1900, loss[loss=0.2, simple_loss=0.2804, pruned_loss=0.04301, ctc_loss=0.08389, over 19654.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2644, pruned_loss=0.04311, ctc_loss=0.08002, over 3861856.26 frames. ], batch size: 59, lr: 9.29e-03, grad_scale: 32.0 +2024-08-27 01:56:12,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=209317.33333333334, ans=0.0 +2024-08-27 01:56:14,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=209317.33333333334, ans=0.0 +2024-08-27 01:56:20,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=209317.33333333334, ans=0.125 +2024-08-27 01:56:29,967 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:56:37,674 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.165e+02 1.418e+02 1.626e+02 2.079e+02 4.675e+02, threshold=3.252e+02, percent-clipped=2.0 +2024-08-27 01:56:43,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=209477.33333333334, ans=0.2 +2024-08-27 01:56:48,334 INFO [train.py:1114] (0/4) Epoch 16, batch 1950, loss[loss=0.1775, simple_loss=0.2506, pruned_loss=0.03846, ctc_loss=0.06875, over 19579.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2655, pruned_loss=0.04327, ctc_loss=0.08042, over 3870276.46 frames. ], batch size: 52, lr: 9.29e-03, grad_scale: 32.0 +2024-08-27 01:56:48,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=209530.66666666666, ans=0.0 +2024-08-27 01:56:56,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=209584.0, ans=0.125 +2024-08-27 01:57:04,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=209584.0, ans=0.2 +2024-08-27 01:57:08,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=209637.33333333334, ans=0.95 +2024-08-27 01:57:10,963 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.44 vs. limit=15.0 +2024-08-27 01:57:22,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=209690.66666666666, ans=0.125 +2024-08-27 01:57:25,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209744.0, ans=0.1 +2024-08-27 01:57:27,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=209744.0, ans=0.0 +2024-08-27 01:57:29,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=209744.0, ans=0.125 +2024-08-27 01:57:34,488 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.80 vs. limit=15.0 +2024-08-27 01:57:35,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=209797.33333333334, ans=0.125 +2024-08-27 01:57:35,866 INFO [train.py:1114] (0/4) Epoch 16, batch 2000, loss[loss=0.152, simple_loss=0.2227, pruned_loss=0.0294, ctc_loss=0.05601, over 19680.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2664, pruned_loss=0.04385, ctc_loss=0.08161, over 3856251.01 frames. ], batch size: 45, lr: 9.28e-03, grad_scale: 32.0 +2024-08-27 01:57:38,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=209797.33333333334, ans=0.125 +2024-08-27 01:57:39,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=209797.33333333334, ans=0.125 +2024-08-27 01:57:55,036 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.90 vs. limit=15.0 +2024-08-27 01:58:07,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=209957.33333333334, ans=0.025 +2024-08-27 01:58:07,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=209957.33333333334, ans=0.05 +2024-08-27 01:58:09,432 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.401e+02 1.655e+02 2.254e+02 4.011e+02, threshold=3.310e+02, percent-clipped=6.0 +2024-08-27 01:58:11,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=210010.66666666666, ans=0.125 +2024-08-27 01:58:13,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=210010.66666666666, ans=0.0 +2024-08-27 01:58:14,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=210010.66666666666, ans=0.0 +2024-08-27 01:58:14,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=210010.66666666666, ans=0.125 +2024-08-27 01:58:15,478 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.09 vs. limit=15.0 +2024-08-27 01:58:20,006 INFO [train.py:1114] (0/4) Epoch 16, batch 2050, loss[loss=0.164, simple_loss=0.2329, pruned_loss=0.03515, ctc_loss=0.06226, over 19724.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2651, pruned_loss=0.04358, ctc_loss=0.08115, over 3852418.76 frames. ], batch size: 47, lr: 9.28e-03, grad_scale: 32.0 +2024-08-27 01:58:41,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=210170.66666666666, ans=0.125 +2024-08-27 01:58:44,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=210170.66666666666, ans=0.125 +2024-08-27 01:58:52,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210224.0, ans=0.1 +2024-08-27 01:59:03,126 INFO [train.py:1114] (0/4) Epoch 16, batch 2100, loss[loss=0.1883, simple_loss=0.2676, pruned_loss=0.03991, ctc_loss=0.07308, over 19785.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2645, pruned_loss=0.04313, ctc_loss=0.08036, over 3859205.20 frames. ], batch size: 54, lr: 9.27e-03, grad_scale: 32.0 +2024-08-27 01:59:08,578 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.33 vs. limit=15.0 +2024-08-27 01:59:17,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=210384.0, ans=0.125 +2024-08-27 01:59:23,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=210437.33333333334, ans=0.025 +2024-08-27 01:59:26,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.70 vs. limit=22.5 +2024-08-27 01:59:26,967 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.26 vs. limit=10.0 +2024-08-27 01:59:34,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=210490.66666666666, ans=0.0 +2024-08-27 01:59:35,724 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.547e+02 1.892e+02 2.472e+02 4.594e+02, threshold=3.784e+02, percent-clipped=3.0 +2024-08-27 01:59:41,216 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:59:42,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=210544.0, ans=0.125 +2024-08-27 01:59:43,997 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.92 vs. limit=22.5 +2024-08-27 01:59:47,026 INFO [train.py:1114] (0/4) Epoch 16, batch 2150, loss[loss=0.1746, simple_loss=0.2515, pruned_loss=0.03507, ctc_loss=0.06905, over 19604.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2636, pruned_loss=0.04276, ctc_loss=0.0797, over 3869138.34 frames. ], batch size: 52, lr: 9.27e-03, grad_scale: 32.0 +2024-08-27 01:59:59,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=210650.66666666666, ans=6.0 +2024-08-27 02:00:02,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=210650.66666666666, ans=0.125 +2024-08-27 02:00:13,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=210757.33333333334, ans=0.125 +2024-08-27 02:00:30,187 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.96 vs. limit=5.0 +2024-08-27 02:00:30,373 INFO [train.py:1114] (0/4) Epoch 16, batch 2200, loss[loss=0.1957, simple_loss=0.268, pruned_loss=0.04547, ctc_loss=0.08118, over 19590.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2634, pruned_loss=0.04246, ctc_loss=0.07936, over 3866681.74 frames. ], batch size: 57, lr: 9.26e-03, grad_scale: 32.0 +2024-08-27 02:01:06,355 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.493e+02 1.671e+02 2.113e+02 4.070e+02, threshold=3.342e+02, percent-clipped=1.0 +2024-08-27 02:01:11,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=211077.33333333334, ans=0.125 +2024-08-27 02:01:17,554 INFO [train.py:1114] (0/4) Epoch 16, batch 2250, loss[loss=0.1968, simple_loss=0.2752, pruned_loss=0.043, ctc_loss=0.08093, over 19629.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2637, pruned_loss=0.04244, ctc_loss=0.0794, over 3866935.30 frames. ], batch size: 55, lr: 9.25e-03, grad_scale: 32.0 +2024-08-27 02:01:31,763 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.81 vs. limit=15.0 +2024-08-27 02:01:39,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=211237.33333333334, ans=0.125 +2024-08-27 02:01:39,396 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.43 vs. limit=6.0 +2024-08-27 02:01:44,297 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:01:49,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=211290.66666666666, ans=0.125 +2024-08-27 02:01:58,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=211344.0, ans=0.0 +2024-08-27 02:02:00,434 INFO [train.py:1114] (0/4) Epoch 16, batch 2300, loss[loss=0.1693, simple_loss=0.2442, pruned_loss=0.03427, ctc_loss=0.06488, over 19502.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2628, pruned_loss=0.04242, ctc_loss=0.07917, over 3861191.67 frames. ], batch size: 49, lr: 9.25e-03, grad_scale: 32.0 +2024-08-27 02:02:00,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=211397.33333333334, ans=0.0 +2024-08-27 02:02:12,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=211450.66666666666, ans=0.04949747468305833 +2024-08-27 02:02:16,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=211450.66666666666, ans=0.0 +2024-08-27 02:02:25,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211557.33333333334, ans=0.1 +2024-08-27 02:02:28,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=211557.33333333334, ans=0.1 +2024-08-27 02:02:33,257 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.186e+02 1.480e+02 1.722e+02 2.096e+02 3.640e+02, threshold=3.444e+02, percent-clipped=3.0 +2024-08-27 02:02:41,819 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.35 vs. limit=15.0 +2024-08-27 02:02:44,148 INFO [train.py:1114] (0/4) Epoch 16, batch 2350, loss[loss=0.2232, simple_loss=0.2886, pruned_loss=0.05842, ctc_loss=0.1023, over 19649.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2636, pruned_loss=0.04302, ctc_loss=0.08004, over 3862969.39 frames. ], batch size: 63, lr: 9.24e-03, grad_scale: 32.0 +2024-08-27 02:02:56,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=211717.33333333334, ans=0.0 +2024-08-27 02:03:22,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=211824.0, ans=0.0 +2024-08-27 02:03:25,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=211877.33333333334, ans=0.125 +2024-08-27 02:03:27,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=211877.33333333334, ans=0.125 +2024-08-27 02:03:30,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211877.33333333334, ans=0.1 +2024-08-27 02:03:33,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211930.66666666666, ans=0.1 +2024-08-27 02:03:34,519 INFO [train.py:1114] (0/4) Epoch 16, batch 2400, loss[loss=0.225, simple_loss=0.294, pruned_loss=0.05828, ctc_loss=0.09858, over 19453.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2659, pruned_loss=0.0439, ctc_loss=0.08143, over 3857514.17 frames. ], batch size: 67, lr: 9.24e-03, grad_scale: 32.0 +2024-08-27 02:03:39,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=211930.66666666666, ans=0.125 +2024-08-27 02:03:43,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=211984.0, ans=0.025 +2024-08-27 02:03:44,189 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:03:48,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=211984.0, ans=0.0 +2024-08-27 02:03:53,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=212037.33333333334, ans=0.125 +2024-08-27 02:03:57,109 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:04:01,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=212090.66666666666, ans=0.0 +2024-08-27 02:04:07,966 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.276e+02 1.442e+02 1.653e+02 2.239e+02 3.362e+02, threshold=3.307e+02, percent-clipped=0.0 +2024-08-27 02:04:08,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=212090.66666666666, ans=0.0 +2024-08-27 02:04:13,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=212144.0, ans=0.2 +2024-08-27 02:04:17,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=212144.0, ans=0.0 +2024-08-27 02:04:18,796 INFO [train.py:1114] (0/4) Epoch 16, batch 2450, loss[loss=0.2435, simple_loss=0.2889, pruned_loss=0.07095, ctc_loss=0.1406, over 13465.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2692, pruned_loss=0.04618, ctc_loss=0.08601, over 3733785.23 frames. ], batch size: 140, lr: 9.23e-03, grad_scale: 32.0 +2024-08-27 02:04:23,989 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.89 vs. limit=15.0 +2024-08-27 02:04:30,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=212250.66666666666, ans=0.025 +2024-08-27 02:04:39,057 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.60 vs. limit=22.5 +2024-08-27 02:04:42,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=212304.0, ans=0.0 +2024-08-27 02:04:52,941 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-16.pt +2024-08-27 02:05:43,525 INFO [train.py:1114] (0/4) Epoch 17, batch 0, loss[loss=0.185, simple_loss=0.2569, pruned_loss=0.04078, ctc_loss=0.07875, over 19800.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2569, pruned_loss=0.04078, ctc_loss=0.07875, over 19800.00 frames. ], batch size: 49, lr: 8.95e-03, grad_scale: 32.0 +2024-08-27 02:05:43,526 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-27 02:05:50,949 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.3685, 4.7511, 5.3221, 5.1868], device='cuda:0') +2024-08-27 02:05:53,290 INFO [train.py:1146] (0/4) Epoch 17, validation: loss=0.172, simple_loss=0.265, pruned_loss=0.02949, ctc_loss=0.04976, over 944034.00 frames. +2024-08-27 02:05:53,290 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13201MB +2024-08-27 02:06:02,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212458.66666666666, ans=0.1 +2024-08-27 02:06:05,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=212458.66666666666, ans=0.02 +2024-08-27 02:06:12,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=212512.0, ans=0.125 +2024-08-27 02:06:12,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=212512.0, ans=0.0 +2024-08-27 02:06:40,302 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.629e+02 1.801e+02 2.001e+02 3.255e+02, threshold=3.602e+02, percent-clipped=0.0 +2024-08-27 02:06:40,336 INFO [train.py:1114] (0/4) Epoch 17, batch 50, loss[loss=0.17, simple_loss=0.2384, pruned_loss=0.03751, ctc_loss=0.06631, over 19711.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2649, pruned_loss=0.0421, ctc_loss=0.07957, over 844772.04 frames. ], batch size: 47, lr: 8.94e-03, grad_scale: 16.0 +2024-08-27 02:06:48,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=212725.33333333334, ans=0.1 +2024-08-27 02:06:51,696 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.13 vs. limit=15.0 +2024-08-27 02:07:04,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=212778.66666666666, ans=0.125 +2024-08-27 02:07:04,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=212778.66666666666, ans=0.05 +2024-08-27 02:07:13,796 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.31 vs. limit=12.0 +2024-08-27 02:07:21,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=212885.33333333334, ans=10.0 +2024-08-27 02:07:22,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212885.33333333334, ans=0.1 +2024-08-27 02:07:29,652 INFO [train.py:1114] (0/4) Epoch 17, batch 100, loss[loss=0.1758, simple_loss=0.2502, pruned_loss=0.03671, ctc_loss=0.06976, over 19727.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2665, pruned_loss=0.043, ctc_loss=0.08084, over 1498273.11 frames. ], batch size: 51, lr: 8.94e-03, grad_scale: 16.0 +2024-08-27 02:08:09,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=213098.66666666666, ans=0.125 +2024-08-27 02:08:16,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=213152.0, ans=0.125 +2024-08-27 02:08:20,135 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.458e+02 1.665e+02 2.006e+02 3.256e+02, threshold=3.330e+02, percent-clipped=0.0 +2024-08-27 02:08:20,169 INFO [train.py:1114] (0/4) Epoch 17, batch 150, loss[loss=0.1762, simple_loss=0.2435, pruned_loss=0.03967, ctc_loss=0.07384, over 19685.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2645, pruned_loss=0.04269, ctc_loss=0.08014, over 2027311.53 frames. ], batch size: 47, lr: 8.93e-03, grad_scale: 16.0 +2024-08-27 02:08:23,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=213205.33333333334, ans=0.2 +2024-08-27 02:08:41,271 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/checkpoint-40000.pt +2024-08-27 02:08:57,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=213312.0, ans=0.125 +2024-08-27 02:10:35,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=213418.66666666666, ans=10.0 +2024-08-27 02:10:53,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=213418.66666666666, ans=0.2 +2024-08-27 02:10:55,458 INFO [train.py:1114] (0/4) Epoch 17, batch 200, loss[loss=0.216, simple_loss=0.2819, pruned_loss=0.05503, ctc_loss=0.1001, over 18082.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2636, pruned_loss=0.04255, ctc_loss=0.07958, over 2434813.02 frames. ], batch size: 85, lr: 8.93e-03, grad_scale: 16.0 +2024-08-27 02:10:56,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=213472.0, ans=0.0 +2024-08-27 02:11:03,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213472.0, ans=0.1 +2024-08-27 02:11:04,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=213472.0, ans=0.0 +2024-08-27 02:11:04,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=213472.0, ans=0.95 +2024-08-27 02:11:07,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=213525.33333333334, ans=0.125 +2024-08-27 02:11:19,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=213578.66666666666, ans=0.125 +2024-08-27 02:11:23,757 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.13 vs. limit=12.0 +2024-08-27 02:11:41,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=213685.33333333334, ans=0.1 +2024-08-27 02:11:48,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213738.66666666666, ans=0.1 +2024-08-27 02:11:49,177 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.468e+02 1.730e+02 2.457e+02 4.645e+02, threshold=3.460e+02, percent-clipped=6.0 +2024-08-27 02:11:49,210 INFO [train.py:1114] (0/4) Epoch 17, batch 250, loss[loss=0.192, simple_loss=0.2647, pruned_loss=0.04364, ctc_loss=0.0801, over 19421.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2626, pruned_loss=0.04198, ctc_loss=0.07852, over 2755282.98 frames. ], batch size: 67, lr: 8.92e-03, grad_scale: 16.0 +2024-08-27 02:12:14,665 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:12:20,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.31 vs. limit=15.0 +2024-08-27 02:12:25,883 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.32 vs. limit=12.0 +2024-08-27 02:12:39,445 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.12 vs. limit=22.5 +2024-08-27 02:14:34,926 INFO [train.py:1114] (0/4) Epoch 17, batch 300, loss[loss=0.1925, simple_loss=0.2716, pruned_loss=0.04132, ctc_loss=0.07691, over 19530.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2621, pruned_loss=0.04181, ctc_loss=0.07808, over 3000773.43 frames. ], batch size: 61, lr: 8.92e-03, grad_scale: 16.0 +2024-08-27 02:14:36,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=214005.33333333334, ans=0.125 +2024-08-27 02:14:40,167 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.58 vs. limit=15.0 +2024-08-27 02:14:46,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=214058.66666666666, ans=0.0 +2024-08-27 02:14:48,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=214058.66666666666, ans=0.04949747468305833 +2024-08-27 02:14:59,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=214112.0, ans=0.0 +2024-08-27 02:16:23,327 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.13 vs. limit=15.0 +2024-08-27 02:16:48,693 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.450e+02 1.705e+02 2.074e+02 4.169e+02, threshold=3.410e+02, percent-clipped=2.0 +2024-08-27 02:16:48,727 INFO [train.py:1114] (0/4) Epoch 17, batch 350, loss[loss=0.1651, simple_loss=0.2397, pruned_loss=0.03291, ctc_loss=0.06195, over 19745.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.263, pruned_loss=0.0421, ctc_loss=0.07876, over 3190658.25 frames. ], batch size: 48, lr: 8.91e-03, grad_scale: 16.0 +2024-08-27 02:16:49,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214272.0, ans=0.1 +2024-08-27 02:16:57,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=214325.33333333334, ans=0.125 +2024-08-27 02:17:03,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=214325.33333333334, ans=0.125 +2024-08-27 02:17:17,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=214432.0, ans=0.2 +2024-08-27 02:17:25,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=214432.0, ans=0.125 +2024-08-27 02:17:31,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=214485.33333333334, ans=0.05 +2024-08-27 02:17:31,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214485.33333333334, ans=0.1 +2024-08-27 02:17:36,092 INFO [train.py:1114] (0/4) Epoch 17, batch 400, loss[loss=0.1921, simple_loss=0.2719, pruned_loss=0.0401, ctc_loss=0.08008, over 19506.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2628, pruned_loss=0.04204, ctc_loss=0.0786, over 3343295.33 frames. ], batch size: 54, lr: 8.91e-03, grad_scale: 32.0 +2024-08-27 02:17:39,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=214538.66666666666, ans=0.05 +2024-08-27 02:17:42,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214538.66666666666, ans=0.1 +2024-08-27 02:18:09,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=214698.66666666666, ans=0.125 +2024-08-27 02:18:15,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=214752.0, ans=0.125 +2024-08-27 02:18:25,568 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.479e+02 1.707e+02 2.031e+02 4.496e+02, threshold=3.413e+02, percent-clipped=2.0 +2024-08-27 02:18:25,601 INFO [train.py:1114] (0/4) Epoch 17, batch 450, loss[loss=0.1788, simple_loss=0.2674, pruned_loss=0.03229, ctc_loss=0.06397, over 19619.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2632, pruned_loss=0.04223, ctc_loss=0.07889, over 3449981.31 frames. ], batch size: 55, lr: 8.90e-03, grad_scale: 32.0 +2024-08-27 02:18:29,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=214805.33333333334, ans=0.015 +2024-08-27 02:18:29,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214805.33333333334, ans=0.1 +2024-08-27 02:18:49,425 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.12 vs. limit=22.5 +2024-08-27 02:18:52,685 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:18:53,012 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.00 vs. limit=12.0 +2024-08-27 02:19:11,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=215018.66666666666, ans=0.0 +2024-08-27 02:19:18,887 INFO [train.py:1114] (0/4) Epoch 17, batch 500, loss[loss=0.2009, simple_loss=0.2799, pruned_loss=0.04453, ctc_loss=0.08197, over 19663.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2625, pruned_loss=0.04171, ctc_loss=0.0781, over 3545189.32 frames. ], batch size: 63, lr: 8.90e-03, grad_scale: 32.0 +2024-08-27 02:19:19,023 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:19:20,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=215072.0, ans=0.0 +2024-08-27 02:19:24,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=215072.0, ans=0.125 +2024-08-27 02:19:25,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=215072.0, ans=0.07 +2024-08-27 02:19:28,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=215125.33333333334, ans=0.0 +2024-08-27 02:19:44,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=215178.66666666666, ans=0.0 +2024-08-27 02:19:52,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=215232.0, ans=0.5 +2024-08-27 02:20:25,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=215285.33333333334, ans=0.2 +2024-08-27 02:20:44,559 INFO [train.py:1114] (0/4) Epoch 17, batch 550, loss[loss=0.2107, simple_loss=0.2823, pruned_loss=0.05038, ctc_loss=0.09573, over 19206.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2627, pruned_loss=0.04193, ctc_loss=0.07835, over 3607486.88 frames. ], batch size: 71, lr: 8.89e-03, grad_scale: 16.0 +2024-08-27 02:20:45,394 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.446e+02 1.711e+02 2.254e+02 3.980e+02, threshold=3.422e+02, percent-clipped=2.0 +2024-08-27 02:20:45,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=215338.66666666666, ans=0.125 +2024-08-27 02:20:47,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=215338.66666666666, ans=0.0 +2024-08-27 02:21:09,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=215445.33333333334, ans=0.125 +2024-08-27 02:21:19,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=215498.66666666666, ans=0.025 +2024-08-27 02:21:32,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=215605.33333333334, ans=0.0 +2024-08-27 02:21:42,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_na.min_abs, batch_count=215605.33333333334, ans=0.02 +2024-08-27 02:21:43,257 INFO [train.py:1114] (0/4) Epoch 17, batch 600, loss[loss=0.2051, simple_loss=0.2783, pruned_loss=0.04752, ctc_loss=0.09202, over 19446.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.263, pruned_loss=0.04196, ctc_loss=0.07839, over 3664553.91 frames. ], batch size: 67, lr: 8.88e-03, grad_scale: 16.0 +2024-08-27 02:21:56,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=215658.66666666666, ans=0.0 +2024-08-27 02:21:57,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=215658.66666666666, ans=0.0 +2024-08-27 02:22:27,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=215818.66666666666, ans=0.125 +2024-08-27 02:22:35,481 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.71 vs. limit=6.0 +2024-08-27 02:22:35,807 INFO [train.py:1114] (0/4) Epoch 17, batch 650, loss[loss=0.1712, simple_loss=0.2479, pruned_loss=0.03456, ctc_loss=0.06352, over 19762.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2621, pruned_loss=0.04154, ctc_loss=0.07772, over 3715413.88 frames. ], batch size: 54, lr: 8.88e-03, grad_scale: 16.0 +2024-08-27 02:22:36,659 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.214e+02 1.454e+02 1.765e+02 2.281e+02 4.784e+02, threshold=3.530e+02, percent-clipped=4.0 +2024-08-27 02:23:10,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216032.0, ans=0.1 +2024-08-27 02:23:13,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=216032.0, ans=0.125 +2024-08-27 02:23:24,052 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.46 vs. limit=15.0 +2024-08-27 02:23:25,370 INFO [train.py:1114] (0/4) Epoch 17, batch 700, loss[loss=0.1732, simple_loss=0.2573, pruned_loss=0.03241, ctc_loss=0.06087, over 19728.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2631, pruned_loss=0.04178, ctc_loss=0.07808, over 3747076.79 frames. ], batch size: 51, lr: 8.87e-03, grad_scale: 16.0 +2024-08-27 02:23:28,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=216138.66666666666, ans=0.125 +2024-08-27 02:23:46,302 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.40 vs. limit=15.0 +2024-08-27 02:23:47,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=216245.33333333334, ans=0.0 +2024-08-27 02:23:50,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=216245.33333333334, ans=0.125 +2024-08-27 02:23:53,732 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.79 vs. limit=15.0 +2024-08-27 02:27:37,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=216352.0, ans=0.1 +2024-08-27 02:28:51,343 INFO [train.py:1114] (0/4) Epoch 17, batch 750, loss[loss=0.1876, simple_loss=0.2713, pruned_loss=0.03735, ctc_loss=0.073, over 19495.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2632, pruned_loss=0.04194, ctc_loss=0.0783, over 3773742.85 frames. ], batch size: 54, lr: 8.87e-03, grad_scale: 16.0 +2024-08-27 02:29:21,548 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.483e+02 1.820e+02 2.509e+02 4.091e+02, threshold=3.640e+02, percent-clipped=8.0 +2024-08-27 02:32:03,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216405.33333333334, ans=0.1 +2024-08-27 02:38:07,121 INFO [train.py:1114] (0/4) Epoch 17, batch 800, loss[loss=0.1737, simple_loss=0.2405, pruned_loss=0.03873, ctc_loss=0.07362, over 19403.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2631, pruned_loss=0.04198, ctc_loss=0.07858, over 3795895.10 frames. ], batch size: 48, lr: 8.86e-03, grad_scale: 32.0 +2024-08-27 02:39:28,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216725.33333333334, ans=0.1 +2024-08-27 02:39:36,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=216725.33333333334, ans=0.025 +2024-08-27 02:40:06,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=216778.66666666666, ans=0.025 +2024-08-27 02:40:10,062 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.86 vs. limit=15.0 +2024-08-27 02:40:19,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=216832.0, ans=0.125 +2024-08-27 02:40:43,402 INFO [train.py:1114] (0/4) Epoch 17, batch 850, loss[loss=0.1971, simple_loss=0.2769, pruned_loss=0.04329, ctc_loss=0.07703, over 19634.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2629, pruned_loss=0.04183, ctc_loss=0.07816, over 3815942.73 frames. ], batch size: 59, lr: 8.86e-03, grad_scale: 32.0 +2024-08-27 02:40:44,265 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.490e+02 1.788e+02 2.181e+02 3.218e+02, threshold=3.576e+02, percent-clipped=0.0 +2024-08-27 02:40:46,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=216938.66666666666, ans=0.0 +2024-08-27 02:40:56,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=216992.0, ans=0.125 +2024-08-27 02:40:57,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216992.0, ans=0.1 +2024-08-27 02:41:18,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217045.33333333334, ans=0.1 +2024-08-27 02:41:23,739 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.63 vs. limit=6.0 +2024-08-27 02:41:28,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=217098.66666666666, ans=0.025 +2024-08-27 02:41:31,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=217098.66666666666, ans=0.025 +2024-08-27 02:41:34,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=217098.66666666666, ans=0.07 +2024-08-27 02:41:34,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217098.66666666666, ans=0.1 +2024-08-27 02:41:37,370 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.60 vs. limit=10.0 +2024-08-27 02:41:48,113 INFO [train.py:1114] (0/4) Epoch 17, batch 900, loss[loss=0.1622, simple_loss=0.2363, pruned_loss=0.03176, ctc_loss=0.06143, over 19407.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2633, pruned_loss=0.04218, ctc_loss=0.07889, over 3820019.15 frames. ], batch size: 48, lr: 8.85e-03, grad_scale: 32.0 +2024-08-27 02:41:49,544 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.92 vs. limit=22.5 +2024-08-27 02:41:52,175 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.15 vs. limit=15.0 +2024-08-27 02:42:01,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=217258.66666666666, ans=0.125 +2024-08-27 02:42:03,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=217258.66666666666, ans=0.2 +2024-08-27 02:42:03,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=217258.66666666666, ans=0.125 +2024-08-27 02:42:23,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=217365.33333333334, ans=0.2 +2024-08-27 02:42:37,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=217418.66666666666, ans=0.07 +2024-08-27 02:42:42,348 INFO [train.py:1114] (0/4) Epoch 17, batch 950, loss[loss=0.1934, simple_loss=0.2605, pruned_loss=0.04579, ctc_loss=0.08676, over 19522.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2637, pruned_loss=0.04243, ctc_loss=0.0794, over 3821118.76 frames. ], batch size: 49, lr: 8.85e-03, grad_scale: 32.0 +2024-08-27 02:42:43,217 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.442e+02 1.596e+02 1.963e+02 3.277e+02, threshold=3.193e+02, percent-clipped=0.0 +2024-08-27 02:43:04,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=217525.33333333334, ans=0.1 +2024-08-27 02:44:44,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217632.0, ans=0.1 +2024-08-27 02:45:06,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=217685.33333333334, ans=0.2 +2024-08-27 02:45:22,505 INFO [train.py:1114] (0/4) Epoch 17, batch 1000, loss[loss=0.1624, simple_loss=0.2442, pruned_loss=0.02932, ctc_loss=0.05473, over 19843.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2644, pruned_loss=0.04272, ctc_loss=0.07987, over 3816399.91 frames. ], batch size: 52, lr: 8.84e-03, grad_scale: 32.0 +2024-08-27 02:45:22,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=217738.66666666666, ans=0.0 +2024-08-27 02:45:22,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=217738.66666666666, ans=0.125 +2024-08-27 02:45:24,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=217738.66666666666, ans=0.1 +2024-08-27 02:45:44,711 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.76 vs. limit=15.0 +2024-08-27 02:45:46,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=217792.0, ans=0.125 +2024-08-27 02:45:48,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=217845.33333333334, ans=0.1 +2024-08-27 02:45:51,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=217845.33333333334, ans=0.5 +2024-08-27 02:46:05,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=217845.33333333334, ans=0.2 +2024-08-27 02:46:21,426 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.65 vs. limit=22.5 +2024-08-27 02:46:27,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=218005.33333333334, ans=0.125 +2024-08-27 02:46:28,504 INFO [train.py:1114] (0/4) Epoch 17, batch 1050, loss[loss=0.1996, simple_loss=0.2735, pruned_loss=0.04417, ctc_loss=0.09344, over 19845.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2637, pruned_loss=0.04245, ctc_loss=0.07927, over 3821538.54 frames. ], batch size: 57, lr: 8.84e-03, grad_scale: 32.0 +2024-08-27 02:46:29,428 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.403e+02 1.586e+02 2.025e+02 2.959e+02, threshold=3.171e+02, percent-clipped=1.0 +2024-08-27 02:46:29,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=218005.33333333334, ans=0.125 +2024-08-27 02:47:08,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=218112.0, ans=0.125 +2024-08-27 02:47:11,323 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.43 vs. limit=12.0 +2024-08-27 02:47:25,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=218218.66666666666, ans=0.125 +2024-08-27 02:47:38,618 INFO [train.py:1114] (0/4) Epoch 17, batch 1100, loss[loss=0.1778, simple_loss=0.2591, pruned_loss=0.03525, ctc_loss=0.06508, over 19593.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2631, pruned_loss=0.04199, ctc_loss=0.0785, over 3829762.59 frames. ], batch size: 52, lr: 8.83e-03, grad_scale: 32.0 +2024-08-27 02:47:39,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=218272.0, ans=0.2 +2024-08-27 02:47:43,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=218272.0, ans=0.125 +2024-08-27 02:47:49,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=218325.33333333334, ans=0.125 +2024-08-27 02:48:24,852 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.10 vs. limit=22.5 +2024-08-27 02:48:26,524 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.56 vs. limit=15.0 +2024-08-27 02:48:29,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=218325.33333333334, ans=0.125 +2024-08-27 02:48:55,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218378.66666666666, ans=0.1 +2024-08-27 02:49:08,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218432.0, ans=0.1 +2024-08-27 02:49:09,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=218432.0, ans=0.2 +2024-08-27 02:49:09,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=218432.0, ans=0.025 +2024-08-27 02:49:17,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=218485.33333333334, ans=0.125 +2024-08-27 02:49:22,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=218485.33333333334, ans=0.025 +2024-08-27 02:49:27,456 INFO [train.py:1114] (0/4) Epoch 17, batch 1150, loss[loss=0.1788, simple_loss=0.2551, pruned_loss=0.03743, ctc_loss=0.06901, over 19563.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2635, pruned_loss=0.04229, ctc_loss=0.07894, over 3829113.45 frames. ], batch size: 52, lr: 8.83e-03, grad_scale: 32.0 +2024-08-27 02:49:28,308 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.459e+02 1.619e+02 1.965e+02 3.390e+02, threshold=3.239e+02, percent-clipped=1.0 +2024-08-27 02:49:44,094 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.52 vs. limit=15.0 +2024-08-27 02:49:45,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=218645.33333333334, ans=0.2 +2024-08-27 02:49:51,685 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.45 vs. limit=15.0 +2024-08-27 02:49:54,402 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.64 vs. limit=22.5 +2024-08-27 02:49:59,165 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.99 vs. limit=10.0 +2024-08-27 02:49:59,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=218698.66666666666, ans=0.125 +2024-08-27 02:50:09,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=218752.0, ans=0.0 +2024-08-27 02:50:14,204 INFO [train.py:1114] (0/4) Epoch 17, batch 1200, loss[loss=0.1969, simple_loss=0.2754, pruned_loss=0.04338, ctc_loss=0.07903, over 19840.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.264, pruned_loss=0.04245, ctc_loss=0.07916, over 3824969.65 frames. ], batch size: 57, lr: 8.82e-03, grad_scale: 32.0 +2024-08-27 02:50:23,076 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.52 vs. limit=10.0 +2024-08-27 02:50:52,251 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:51:02,853 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.39 vs. limit=6.0 +2024-08-27 02:51:28,547 INFO [train.py:1114] (0/4) Epoch 17, batch 1250, loss[loss=0.1974, simple_loss=0.2749, pruned_loss=0.04342, ctc_loss=0.08289, over 19509.00 frames. ], tot_loss[loss=0.19, simple_loss=0.264, pruned_loss=0.04226, ctc_loss=0.07859, over 3843027.80 frames. ], batch size: 61, lr: 8.82e-03, grad_scale: 32.0 +2024-08-27 02:51:29,443 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.236e+02 1.488e+02 1.826e+02 2.228e+02 3.440e+02, threshold=3.652e+02, percent-clipped=1.0 +2024-08-27 02:52:40,142 INFO [train.py:1114] (0/4) Epoch 17, batch 1300, loss[loss=0.208, simple_loss=0.2826, pruned_loss=0.0485, ctc_loss=0.09084, over 18867.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2632, pruned_loss=0.04203, ctc_loss=0.07835, over 3845692.99 frames. ], batch size: 76, lr: 8.81e-03, grad_scale: 32.0 +2024-08-27 02:52:45,127 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.09 vs. limit=15.0 +2024-08-27 02:52:52,573 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.59 vs. limit=15.0 +2024-08-27 02:52:54,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=219392.0, ans=0.1 +2024-08-27 02:52:59,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=219445.33333333334, ans=0.125 +2024-08-27 02:53:01,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=219445.33333333334, ans=15.0 +2024-08-27 02:53:49,249 INFO [train.py:1114] (0/4) Epoch 17, batch 1350, loss[loss=0.1768, simple_loss=0.2517, pruned_loss=0.03681, ctc_loss=0.07077, over 19764.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2624, pruned_loss=0.04171, ctc_loss=0.07757, over 3857308.50 frames. ], batch size: 54, lr: 8.81e-03, grad_scale: 32.0 +2024-08-27 02:53:50,128 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.111e+02 1.487e+02 1.709e+02 2.118e+02 3.687e+02, threshold=3.418e+02, percent-clipped=1.0 +2024-08-27 02:53:51,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=219605.33333333334, ans=0.125 +2024-08-27 02:53:53,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=219605.33333333334, ans=0.0 +2024-08-27 02:54:01,705 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.68 vs. limit=15.0 +2024-08-27 02:54:04,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=219658.66666666666, ans=0.125 +2024-08-27 02:54:15,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.99 vs. limit=15.0 +2024-08-27 02:54:42,972 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.14 vs. limit=15.0 +2024-08-27 02:54:47,053 INFO [train.py:1114] (0/4) Epoch 17, batch 1400, loss[loss=0.1823, simple_loss=0.2458, pruned_loss=0.04298, ctc_loss=0.08201, over 19668.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2622, pruned_loss=0.04178, ctc_loss=0.07771, over 3864290.68 frames. ], batch size: 46, lr: 8.80e-03, grad_scale: 32.0 +2024-08-27 02:54:51,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=219872.0, ans=0.125 +2024-08-27 02:54:53,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=219872.0, ans=0.0 +2024-08-27 02:54:57,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=219925.33333333334, ans=0.125 +2024-08-27 02:55:19,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=220032.0, ans=0.0 +2024-08-27 02:55:28,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=220032.0, ans=0.0 +2024-08-27 02:55:39,016 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:55:41,670 INFO [train.py:1114] (0/4) Epoch 17, batch 1450, loss[loss=0.2155, simple_loss=0.2858, pruned_loss=0.05326, ctc_loss=0.09695, over 19666.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.263, pruned_loss=0.04199, ctc_loss=0.07837, over 3863106.64 frames. ], batch size: 63, lr: 8.80e-03, grad_scale: 32.0 +2024-08-27 02:55:42,532 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.445e+02 1.654e+02 2.032e+02 3.496e+02, threshold=3.307e+02, percent-clipped=1.0 +2024-08-27 02:55:52,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=220192.0, ans=0.125 +2024-08-27 02:56:06,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=220245.33333333334, ans=0.125 +2024-08-27 02:56:21,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=220298.66666666666, ans=0.035 +2024-08-27 02:56:24,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=220352.0, ans=0.0 +2024-08-27 02:56:31,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=220352.0, ans=0.2 +2024-08-27 02:56:35,339 INFO [train.py:1114] (0/4) Epoch 17, batch 1500, loss[loss=0.203, simple_loss=0.2731, pruned_loss=0.04898, ctc_loss=0.08713, over 19599.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.263, pruned_loss=0.04195, ctc_loss=0.07826, over 3863722.97 frames. ], batch size: 57, lr: 8.79e-03, grad_scale: 32.0 +2024-08-27 02:56:40,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=220405.33333333334, ans=0.025 +2024-08-27 02:56:48,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220458.66666666666, ans=0.1 +2024-08-27 02:57:00,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=220512.0, ans=0.125 +2024-08-27 02:57:10,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=220565.33333333334, ans=0.125 +2024-08-27 02:57:18,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=220618.66666666666, ans=0.0 +2024-08-27 02:57:22,838 INFO [train.py:1114] (0/4) Epoch 17, batch 1550, loss[loss=0.2095, simple_loss=0.2766, pruned_loss=0.05222, ctc_loss=0.09488, over 19627.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2632, pruned_loss=0.04227, ctc_loss=0.07894, over 3848490.77 frames. ], batch size: 60, lr: 8.79e-03, grad_scale: 32.0 +2024-08-27 02:57:23,800 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.186e+02 1.433e+02 1.700e+02 2.311e+02 3.923e+02, threshold=3.401e+02, percent-clipped=1.0 +2024-08-27 02:57:51,118 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:57:51,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=220725.33333333334, ans=0.125 +2024-08-27 02:57:52,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=220725.33333333334, ans=0.125 +2024-08-27 02:58:00,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=220778.66666666666, ans=0.0 +2024-08-27 02:58:00,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=220778.66666666666, ans=0.1 +2024-08-27 02:58:05,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=220778.66666666666, ans=0.0 +2024-08-27 02:58:14,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=220832.0, ans=0.0 +2024-08-27 02:58:14,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220832.0, ans=0.1 +2024-08-27 02:58:16,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=220832.0, ans=0.2 +2024-08-27 02:58:16,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=220832.0, ans=0.2 +2024-08-27 02:58:18,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=220885.33333333334, ans=0.0 +2024-08-27 02:58:27,685 INFO [train.py:1114] (0/4) Epoch 17, batch 1600, loss[loss=0.1898, simple_loss=0.2635, pruned_loss=0.04207, ctc_loss=0.07972, over 19833.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2632, pruned_loss=0.04223, ctc_loss=0.07886, over 3837459.82 frames. ], batch size: 57, lr: 8.78e-03, grad_scale: 32.0 +2024-08-27 02:59:25,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=220992.0, ans=0.125 +2024-08-27 03:00:24,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=221045.33333333334, ans=0.125 +2024-08-27 03:00:38,536 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.06 vs. limit=15.0 +2024-08-27 03:00:50,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=221152.0, ans=0.5 +2024-08-27 03:00:55,413 INFO [train.py:1114] (0/4) Epoch 17, batch 1650, loss[loss=0.1928, simple_loss=0.2767, pruned_loss=0.03943, ctc_loss=0.07504, over 19655.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2624, pruned_loss=0.0418, ctc_loss=0.07824, over 3833286.80 frames. ], batch size: 59, lr: 8.77e-03, grad_scale: 32.0 +2024-08-27 03:00:55,840 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.02 vs. limit=15.0 +2024-08-27 03:00:58,230 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.431e+02 1.952e+02 2.452e+02 3.980e+02, threshold=3.905e+02, percent-clipped=5.0 +2024-08-27 03:01:05,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=221205.33333333334, ans=0.125 +2024-08-27 03:01:12,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=221258.66666666666, ans=0.0 +2024-08-27 03:01:25,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=221365.33333333334, ans=0.125 +2024-08-27 03:01:49,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=221418.66666666666, ans=0.0 +2024-08-27 03:01:55,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221418.66666666666, ans=0.1 +2024-08-27 03:01:57,109 INFO [train.py:1114] (0/4) Epoch 17, batch 1700, loss[loss=0.1683, simple_loss=0.2352, pruned_loss=0.03726, ctc_loss=0.06737, over 19662.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2623, pruned_loss=0.04164, ctc_loss=0.0778, over 3846857.71 frames. ], batch size: 46, lr: 8.77e-03, grad_scale: 32.0 +2024-08-27 03:02:01,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=221472.0, ans=0.125 +2024-08-27 03:02:05,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=221525.33333333334, ans=0.0 +2024-08-27 03:02:07,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=221525.33333333334, ans=0.125 +2024-08-27 03:02:43,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=221685.33333333334, ans=0.0 +2024-08-27 03:02:48,262 INFO [train.py:1114] (0/4) Epoch 17, batch 1750, loss[loss=0.1787, simple_loss=0.246, pruned_loss=0.04037, ctc_loss=0.07653, over 19628.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2621, pruned_loss=0.04156, ctc_loss=0.0776, over 3850512.50 frames. ], batch size: 45, lr: 8.76e-03, grad_scale: 16.0 +2024-08-27 03:02:49,970 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.526e+02 1.896e+02 2.459e+02 4.889e+02, threshold=3.791e+02, percent-clipped=1.0 +2024-08-27 03:03:07,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=221738.66666666666, ans=0.0 +2024-08-27 03:03:07,846 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.80 vs. limit=22.5 +2024-08-27 03:03:11,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=221792.0, ans=0.0 +2024-08-27 03:03:13,981 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.64 vs. limit=10.0 +2024-08-27 03:03:18,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=221792.0, ans=0.0 +2024-08-27 03:03:20,101 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.34 vs. limit=12.0 +2024-08-27 03:03:27,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221845.33333333334, ans=0.1 +2024-08-27 03:03:40,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=221952.0, ans=0.125 +2024-08-27 03:03:45,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=221952.0, ans=0.125 +2024-08-27 03:03:46,718 INFO [train.py:1114] (0/4) Epoch 17, batch 1800, loss[loss=0.1835, simple_loss=0.2583, pruned_loss=0.04026, ctc_loss=0.07033, over 19609.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2624, pruned_loss=0.04167, ctc_loss=0.07771, over 3852141.97 frames. ], batch size: 55, lr: 8.76e-03, grad_scale: 16.0 +2024-08-27 03:03:49,490 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 03:03:50,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=222005.33333333334, ans=0.2 +2024-08-27 03:03:53,811 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 03:04:18,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=222165.33333333334, ans=0.125 +2024-08-27 03:04:24,958 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 03:04:30,759 INFO [train.py:1114] (0/4) Epoch 17, batch 1850, loss[loss=0.2226, simple_loss=0.2937, pruned_loss=0.05569, ctc_loss=0.1005, over 19585.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2624, pruned_loss=0.04171, ctc_loss=0.07775, over 3855966.23 frames. ], batch size: 57, lr: 8.75e-03, grad_scale: 16.0 +2024-08-27 03:04:32,492 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.249e+02 1.484e+02 1.846e+02 2.436e+02 4.218e+02, threshold=3.691e+02, percent-clipped=2.0 +2024-08-27 03:04:58,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=222432.0, ans=0.025 +2024-08-27 03:05:08,965 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=5.87 vs. limit=15.0 +2024-08-27 03:05:11,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=222485.33333333334, ans=0.125 +2024-08-27 03:05:14,606 INFO [train.py:1114] (0/4) Epoch 17, batch 1900, loss[loss=0.2084, simple_loss=0.2821, pruned_loss=0.04915, ctc_loss=0.09111, over 19659.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2631, pruned_loss=0.04186, ctc_loss=0.0781, over 3861031.91 frames. ], batch size: 59, lr: 8.75e-03, grad_scale: 16.0 +2024-08-27 03:05:19,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=222538.66666666666, ans=0.125 +2024-08-27 03:05:43,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=222698.66666666666, ans=0.5 +2024-08-27 03:06:00,581 INFO [train.py:1114] (0/4) Epoch 17, batch 1950, loss[loss=0.188, simple_loss=0.2653, pruned_loss=0.04025, ctc_loss=0.07579, over 19559.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2639, pruned_loss=0.04187, ctc_loss=0.07821, over 3869686.70 frames. ], batch size: 52, lr: 8.74e-03, grad_scale: 16.0 +2024-08-27 03:06:02,421 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.462e+02 1.715e+02 2.122e+02 4.504e+02, threshold=3.430e+02, percent-clipped=1.0 +2024-08-27 03:06:09,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=222858.66666666666, ans=0.125 +2024-08-27 03:06:17,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=222912.0, ans=0.125 +2024-08-27 03:06:21,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=222912.0, ans=0.125 +2024-08-27 03:06:24,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=222912.0, ans=0.125 +2024-08-27 03:06:31,808 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 03:06:47,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=223072.0, ans=0.0 +2024-08-27 03:06:48,016 INFO [train.py:1114] (0/4) Epoch 17, batch 2000, loss[loss=0.1714, simple_loss=0.2371, pruned_loss=0.03882, ctc_loss=0.07016, over 19628.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2646, pruned_loss=0.04224, ctc_loss=0.07902, over 3854277.82 frames. ], batch size: 45, lr: 8.74e-03, grad_scale: 32.0 +2024-08-27 03:08:01,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223125.33333333334, ans=0.1 +2024-08-27 03:09:25,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=223232.0, ans=0.125 +2024-08-27 03:09:27,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=223232.0, ans=0.125 +2024-08-27 03:09:30,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223285.33333333334, ans=0.1 +2024-08-27 03:09:39,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223285.33333333334, ans=0.1 +2024-08-27 03:09:41,594 INFO [train.py:1114] (0/4) Epoch 17, batch 2050, loss[loss=0.1703, simple_loss=0.2399, pruned_loss=0.03715, ctc_loss=0.06598, over 19707.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2632, pruned_loss=0.04199, ctc_loss=0.07848, over 3850647.74 frames. ], batch size: 47, lr: 8.73e-03, grad_scale: 32.0 +2024-08-27 03:09:42,030 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.52 vs. limit=15.0 +2024-08-27 03:09:43,284 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.433e+02 1.718e+02 2.194e+02 3.489e+02, threshold=3.436e+02, percent-clipped=1.0 +2024-08-27 03:09:51,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=223338.66666666666, ans=0.125 +2024-08-27 03:09:51,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=223338.66666666666, ans=0.0 +2024-08-27 03:10:20,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=223392.0, ans=0.125 +2024-08-27 03:10:33,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=223445.33333333334, ans=0.125 +2024-08-27 03:10:34,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=223445.33333333334, ans=0.125 +2024-08-27 03:10:35,055 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.49 vs. limit=12.0 +2024-08-27 03:10:46,785 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.70 vs. limit=22.5 +2024-08-27 03:13:17,699 INFO [train.py:1114] (0/4) Epoch 17, batch 2100, loss[loss=0.1946, simple_loss=0.2713, pruned_loss=0.0424, ctc_loss=0.08284, over 19773.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2625, pruned_loss=0.04158, ctc_loss=0.07758, over 3857785.87 frames. ], batch size: 54, lr: 8.73e-03, grad_scale: 32.0 +2024-08-27 03:13:42,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=223658.66666666666, ans=0.125 +2024-08-27 03:24:04,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=223818.66666666666, ans=0.2 +2024-08-27 03:24:40,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=223818.66666666666, ans=0.125 +2024-08-27 03:24:47,082 INFO [train.py:1114] (0/4) Epoch 17, batch 2150, loss[loss=0.1827, simple_loss=0.2563, pruned_loss=0.04018, ctc_loss=0.07176, over 19594.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2619, pruned_loss=0.04147, ctc_loss=0.07715, over 3868200.70 frames. ], batch size: 52, lr: 8.72e-03, grad_scale: 32.0 +2024-08-27 03:24:49,693 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.181e+02 1.464e+02 1.691e+02 2.317e+02 5.931e+02, threshold=3.382e+02, percent-clipped=6.0 +2024-08-27 03:24:56,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=223872.0, ans=0.125 +2024-08-27 03:24:57,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=223872.0, ans=0.2 +2024-08-27 03:24:57,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.68 vs. limit=22.5 +2024-08-27 03:26:31,804 INFO [train.py:1114] (0/4) Epoch 17, batch 2200, loss[loss=0.2055, simple_loss=0.2824, pruned_loss=0.04749, ctc_loss=0.08373, over 19586.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2621, pruned_loss=0.04148, ctc_loss=0.07725, over 3867184.45 frames. ], batch size: 57, lr: 8.72e-03, grad_scale: 32.0 +2024-08-27 03:26:35,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=224138.66666666666, ans=0.125 +2024-08-27 03:26:35,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=224138.66666666666, ans=0.0 +2024-08-27 03:27:06,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=224298.66666666666, ans=0.125 +2024-08-27 03:27:07,709 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.61 vs. limit=6.0 +2024-08-27 03:27:26,233 INFO [train.py:1114] (0/4) Epoch 17, batch 2250, loss[loss=0.1978, simple_loss=0.2737, pruned_loss=0.04411, ctc_loss=0.08443, over 19615.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2622, pruned_loss=0.04142, ctc_loss=0.07716, over 3867576.77 frames. ], batch size: 55, lr: 8.71e-03, grad_scale: 32.0 +2024-08-27 03:27:29,877 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.517e+02 1.774e+02 2.256e+02 3.791e+02, threshold=3.548e+02, percent-clipped=1.0 +2024-08-27 03:27:42,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=224458.66666666666, ans=0.025 +2024-08-27 03:27:44,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.75 vs. limit=15.0 +2024-08-27 03:28:04,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=224565.33333333334, ans=0.125 +2024-08-27 03:29:04,923 INFO [train.py:1114] (0/4) Epoch 17, batch 2300, loss[loss=0.1692, simple_loss=0.2439, pruned_loss=0.03442, ctc_loss=0.06424, over 19495.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2617, pruned_loss=0.04161, ctc_loss=0.07739, over 3861679.45 frames. ], batch size: 49, lr: 8.71e-03, grad_scale: 16.0 +2024-08-27 03:29:07,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=224672.0, ans=0.125 +2024-08-27 03:29:08,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=224672.0, ans=15.0 +2024-08-27 03:29:11,183 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.73 vs. limit=12.0 +2024-08-27 03:29:39,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=224725.33333333334, ans=0.125 +2024-08-27 03:33:09,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=224778.66666666666, ans=0.0 +2024-08-27 03:35:45,129 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.11 vs. limit=22.5 +2024-08-27 03:36:02,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=224832.0, ans=0.1 +2024-08-27 03:36:06,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=224832.0, ans=0.125 +2024-08-27 03:36:15,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=224885.33333333334, ans=0.2 +2024-08-27 03:36:33,924 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.74 vs. limit=22.5 +2024-08-27 03:36:41,027 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.42 vs. limit=22.5 +2024-08-27 03:36:49,584 INFO [train.py:1114] (0/4) Epoch 17, batch 2350, loss[loss=0.2077, simple_loss=0.2788, pruned_loss=0.05002, ctc_loss=0.0914, over 19653.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2615, pruned_loss=0.04189, ctc_loss=0.07767, over 3864539.45 frames. ], batch size: 63, lr: 8.70e-03, grad_scale: 16.0 +2024-08-27 03:37:01,827 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.439e+02 1.647e+02 2.102e+02 4.091e+02, threshold=3.295e+02, percent-clipped=1.0 +2024-08-27 03:37:22,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=224992.0, ans=0.125 +2024-08-27 03:37:43,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=225045.33333333334, ans=0.125 +2024-08-27 03:37:53,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=225045.33333333334, ans=0.125 +2024-08-27 03:37:57,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=225045.33333333334, ans=0.125 +2024-08-27 03:38:16,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=225045.33333333334, ans=0.2 +2024-08-27 03:39:26,219 INFO [train.py:1114] (0/4) Epoch 17, batch 2400, loss[loss=0.2137, simple_loss=0.2848, pruned_loss=0.05229, ctc_loss=0.0948, over 19364.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2638, pruned_loss=0.04269, ctc_loss=0.07907, over 3858657.20 frames. ], batch size: 67, lr: 8.70e-03, grad_scale: 32.0 +2024-08-27 03:44:22,611 INFO [train.py:1114] (0/4) Epoch 17, batch 2450, loss[loss=0.2636, simple_loss=0.3075, pruned_loss=0.08025, ctc_loss=0.1481, over 13661.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2673, pruned_loss=0.04502, ctc_loss=0.08368, over 3734008.74 frames. ], batch size: 140, lr: 8.69e-03, grad_scale: 32.0 +2024-08-27 03:44:30,553 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.519e+02 1.805e+02 2.064e+02 2.900e+02, threshold=3.609e+02, percent-clipped=0.0 +2024-08-27 03:44:41,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=225525.33333333334, ans=0.0 +2024-08-27 03:44:44,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=225525.33333333334, ans=0.0 +2024-08-27 03:45:41,460 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 03:46:21,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=225578.66666666666, ans=0.2 +2024-08-27 03:47:32,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=225632.0, ans=10.0 +2024-08-27 03:47:34,393 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.98 vs. limit=15.0 +2024-08-27 03:47:37,196 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-17.pt +2024-08-27 03:50:08,490 INFO [train.py:1114] (0/4) Epoch 18, batch 0, loss[loss=0.1718, simple_loss=0.2426, pruned_loss=0.03656, ctc_loss=0.06963, over 19813.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2426, pruned_loss=0.03656, ctc_loss=0.06963, over 19813.00 frames. ], batch size: 49, lr: 8.44e-03, grad_scale: 32.0 +2024-08-27 03:50:08,490 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-27 03:56:59,325 INFO [train.py:1146] (0/4) Epoch 18, validation: loss=0.1731, simple_loss=0.2653, pruned_loss=0.0303, ctc_loss=0.05087, over 944034.00 frames. +2024-08-27 03:56:59,325 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13201MB +2024-08-27 03:58:10,844 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.68 vs. limit=15.0 +2024-08-27 03:58:29,128 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.52 vs. limit=12.0 +2024-08-27 03:59:06,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=225786.66666666666, ans=0.0 +2024-08-27 03:59:19,748 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.41 vs. limit=15.0 +2024-08-27 03:59:20,580 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.80 vs. limit=15.0 +2024-08-27 03:59:24,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=225840.0, ans=0.125 +2024-08-27 03:59:36,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=225893.33333333334, ans=0.2 +2024-08-27 03:59:37,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=225893.33333333334, ans=0.125 +2024-08-27 03:59:40,036 INFO [train.py:1114] (0/4) Epoch 18, batch 50, loss[loss=0.1738, simple_loss=0.2453, pruned_loss=0.03652, ctc_loss=0.07347, over 19716.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.265, pruned_loss=0.04201, ctc_loss=0.07871, over 845746.19 frames. ], batch size: 47, lr: 8.44e-03, grad_scale: 32.0 +2024-08-27 03:59:52,917 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.620e+02 1.870e+02 2.127e+02 3.474e+02, threshold=3.740e+02, percent-clipped=0.0 +2024-08-27 04:00:02,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=226053.33333333334, ans=0.125 +2024-08-27 04:00:07,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=226053.33333333334, ans=0.125 +2024-08-27 04:00:11,326 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:00:24,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=226160.0, ans=0.125 +2024-08-27 04:00:34,099 INFO [train.py:1114] (0/4) Epoch 18, batch 100, loss[loss=0.177, simple_loss=0.2513, pruned_loss=0.03785, ctc_loss=0.06743, over 19718.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2663, pruned_loss=0.04265, ctc_loss=0.0798, over 1499054.83 frames. ], batch size: 51, lr: 8.43e-03, grad_scale: 32.0 +2024-08-27 04:00:34,391 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:00:38,842 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.29 vs. limit=15.0 +2024-08-27 04:05:28,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=226320.0, ans=0.125 +2024-08-27 04:05:39,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=226320.0, ans=0.2 +2024-08-27 04:05:55,651 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.74 vs. limit=5.0 +2024-08-27 04:06:00,496 INFO [train.py:1114] (0/4) Epoch 18, batch 150, loss[loss=0.1743, simple_loss=0.2407, pruned_loss=0.03984, ctc_loss=0.07086, over 19715.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2628, pruned_loss=0.04135, ctc_loss=0.07703, over 2028052.35 frames. ], batch size: 47, lr: 8.43e-03, grad_scale: 32.0 +2024-08-27 04:06:16,203 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.462e+02 1.764e+02 2.186e+02 3.977e+02, threshold=3.529e+02, percent-clipped=1.0 +2024-08-27 04:06:16,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=226533.33333333334, ans=0.0 +2024-08-27 04:06:17,424 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:06:28,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=226586.66666666666, ans=0.125 +2024-08-27 04:06:29,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=226586.66666666666, ans=0.04949747468305833 +2024-08-27 04:06:35,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=226640.0, ans=0.125 +2024-08-27 04:06:38,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=226640.0, ans=0.025 +2024-08-27 04:06:44,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=226693.33333333334, ans=0.125 +2024-08-27 04:06:49,806 INFO [train.py:1114] (0/4) Epoch 18, batch 200, loss[loss=0.194, simple_loss=0.2707, pruned_loss=0.04355, ctc_loss=0.07536, over 18248.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2619, pruned_loss=0.04105, ctc_loss=0.07649, over 2435754.48 frames. ], batch size: 85, lr: 8.42e-03, grad_scale: 32.0 +2024-08-27 04:06:56,681 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.96 vs. limit=22.5 +2024-08-27 04:07:07,626 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.60 vs. limit=15.0 +2024-08-27 04:07:24,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=226906.66666666666, ans=0.125 +2024-08-27 04:07:24,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=226906.66666666666, ans=0.125 +2024-08-27 04:07:35,902 INFO [train.py:1114] (0/4) Epoch 18, batch 250, loss[loss=0.2006, simple_loss=0.2715, pruned_loss=0.048, ctc_loss=0.08435, over 19404.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2621, pruned_loss=0.04113, ctc_loss=0.07669, over 2755802.13 frames. ], batch size: 67, lr: 8.42e-03, grad_scale: 32.0 +2024-08-27 04:07:46,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=227066.66666666666, ans=0.0 +2024-08-27 04:07:48,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=227066.66666666666, ans=0.125 +2024-08-27 04:07:50,820 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.244e+02 1.521e+02 1.873e+02 2.606e+02 4.367e+02, threshold=3.746e+02, percent-clipped=8.0 +2024-08-27 04:07:52,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=227066.66666666666, ans=0.0 +2024-08-27 04:07:58,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227120.0, ans=0.1 +2024-08-27 04:08:18,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=227173.33333333334, ans=6.0 +2024-08-27 04:08:31,214 INFO [train.py:1114] (0/4) Epoch 18, batch 300, loss[loss=0.1841, simple_loss=0.2643, pruned_loss=0.03834, ctc_loss=0.06837, over 19499.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2617, pruned_loss=0.04108, ctc_loss=0.07666, over 3000417.31 frames. ], batch size: 61, lr: 8.41e-03, grad_scale: 32.0 +2024-08-27 04:08:44,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=227333.33333333334, ans=0.0 +2024-08-27 04:08:53,036 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:09:05,118 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.02 vs. limit=22.5 +2024-08-27 04:09:14,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=227493.33333333334, ans=0.2 +2024-08-27 04:09:15,346 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.24 vs. limit=6.0 +2024-08-27 04:09:17,357 INFO [train.py:1114] (0/4) Epoch 18, batch 350, loss[loss=0.1608, simple_loss=0.2306, pruned_loss=0.03272, ctc_loss=0.06409, over 19741.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2616, pruned_loss=0.04096, ctc_loss=0.0767, over 3191008.27 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 32.0 +2024-08-27 04:09:17,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=227546.66666666666, ans=0.125 +2024-08-27 04:09:26,205 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.19 vs. limit=10.0 +2024-08-27 04:09:28,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=227600.0, ans=0.0 +2024-08-27 04:09:30,331 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.460e+02 1.643e+02 1.956e+02 3.165e+02, threshold=3.287e+02, percent-clipped=0.0 +2024-08-27 04:10:14,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=227706.66666666666, ans=0.0 +2024-08-27 04:10:17,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=227706.66666666666, ans=0.125 +2024-08-27 04:10:17,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=227706.66666666666, ans=0.0 +2024-08-27 04:10:40,375 INFO [train.py:1114] (0/4) Epoch 18, batch 400, loss[loss=0.1871, simple_loss=0.2651, pruned_loss=0.03948, ctc_loss=0.07522, over 19506.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2616, pruned_loss=0.04072, ctc_loss=0.07638, over 3343005.44 frames. ], batch size: 54, lr: 8.40e-03, grad_scale: 32.0 +2024-08-27 04:10:49,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=227866.66666666666, ans=0.125 +2024-08-27 04:11:58,076 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.71 vs. limit=15.0 +2024-08-27 04:12:21,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227973.33333333334, ans=0.1 +2024-08-27 04:12:32,837 INFO [train.py:1114] (0/4) Epoch 18, batch 450, loss[loss=0.1736, simple_loss=0.2559, pruned_loss=0.03283, ctc_loss=0.06406, over 19611.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2611, pruned_loss=0.04042, ctc_loss=0.0759, over 3451708.44 frames. ], batch size: 55, lr: 8.40e-03, grad_scale: 32.0 +2024-08-27 04:12:34,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=228080.0, ans=0.125 +2024-08-27 04:12:38,345 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.14 vs. limit=10.0 +2024-08-27 04:12:57,342 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.62 vs. limit=15.0 +2024-08-27 04:12:57,364 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.10 vs. limit=22.5 +2024-08-27 04:12:59,749 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.395e+02 1.673e+02 2.305e+02 3.910e+02, threshold=3.347e+02, percent-clipped=3.0 +2024-08-27 04:13:14,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=228240.0, ans=0.0 +2024-08-27 04:13:18,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=228240.0, ans=0.125 +2024-08-27 04:13:32,852 INFO [train.py:1114] (0/4) Epoch 18, batch 500, loss[loss=0.1991, simple_loss=0.2671, pruned_loss=0.04699, ctc_loss=0.09291, over 19634.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2604, pruned_loss=0.04017, ctc_loss=0.07541, over 3546079.07 frames. ], batch size: 63, lr: 8.39e-03, grad_scale: 32.0 +2024-08-27 04:13:45,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228400.0, ans=0.1 +2024-08-27 04:13:46,763 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:13:46,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=228400.0, ans=0.1 +2024-08-27 04:13:50,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=228400.0, ans=0.0 +2024-08-27 04:14:02,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=228506.66666666666, ans=0.0 +2024-08-27 04:14:20,879 INFO [train.py:1114] (0/4) Epoch 18, batch 550, loss[loss=0.1954, simple_loss=0.2718, pruned_loss=0.04365, ctc_loss=0.07923, over 19273.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2609, pruned_loss=0.04048, ctc_loss=0.07588, over 3608445.94 frames. ], batch size: 71, lr: 8.39e-03, grad_scale: 32.0 +2024-08-27 04:14:28,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=228613.33333333334, ans=0.0 +2024-08-27 04:14:34,514 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.436e+02 1.681e+02 2.031e+02 3.505e+02, threshold=3.363e+02, percent-clipped=1.0 +2024-08-27 04:14:46,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228720.0, ans=0.1 +2024-08-27 04:14:47,885 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:15:12,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=228826.66666666666, ans=0.125 +2024-08-27 04:15:14,800 INFO [train.py:1114] (0/4) Epoch 18, batch 600, loss[loss=0.1967, simple_loss=0.2695, pruned_loss=0.04536, ctc_loss=0.08315, over 19393.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.261, pruned_loss=0.04056, ctc_loss=0.0758, over 3666186.54 frames. ], batch size: 67, lr: 8.38e-03, grad_scale: 32.0 +2024-08-27 04:15:19,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=228880.0, ans=0.2 +2024-08-27 04:15:33,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=228933.33333333334, ans=0.125 +2024-08-27 04:15:43,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=228986.66666666666, ans=0.125 +2024-08-27 04:15:54,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=229040.0, ans=0.125 +2024-08-27 04:16:56,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=229093.33333333334, ans=0.0 +2024-08-27 04:17:05,577 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.71 vs. limit=10.0 +2024-08-27 04:17:06,968 INFO [train.py:1114] (0/4) Epoch 18, batch 650, loss[loss=0.1875, simple_loss=0.2581, pruned_loss=0.04256, ctc_loss=0.07935, over 19762.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2599, pruned_loss=0.04003, ctc_loss=0.07484, over 3716392.84 frames. ], batch size: 54, lr: 8.38e-03, grad_scale: 32.0 +2024-08-27 04:17:08,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=229146.66666666666, ans=0.07 +2024-08-27 04:17:13,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=229146.66666666666, ans=0.025 +2024-08-27 04:17:15,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=229200.0, ans=0.125 +2024-08-27 04:17:20,097 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.567e+02 1.955e+02 2.726e+02 4.189e+02, threshold=3.909e+02, percent-clipped=6.0 +2024-08-27 04:17:20,558 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.58 vs. limit=6.0 +2024-08-27 04:17:23,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=229200.0, ans=0.0 +2024-08-27 04:17:23,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=229200.0, ans=0.025 +2024-08-27 04:17:32,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=229253.33333333334, ans=0.125 +2024-08-27 04:17:34,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229306.66666666666, ans=0.1 +2024-08-27 04:17:39,710 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:17:40,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=229306.66666666666, ans=0.125 +2024-08-27 04:18:45,803 INFO [train.py:1114] (0/4) Epoch 18, batch 700, loss[loss=0.1787, simple_loss=0.251, pruned_loss=0.03921, ctc_loss=0.0699, over 19714.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2604, pruned_loss=0.04023, ctc_loss=0.07512, over 3749192.59 frames. ], batch size: 51, lr: 8.37e-03, grad_scale: 32.0 +2024-08-27 04:18:54,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=229413.33333333334, ans=0.125 +2024-08-27 04:18:57,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=229466.66666666666, ans=0.0 +2024-08-27 04:18:58,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229466.66666666666, ans=0.1 +2024-08-27 04:19:02,846 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.08 vs. limit=15.0 +2024-08-27 04:19:08,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229520.0, ans=0.1 +2024-08-27 04:19:13,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=229520.0, ans=0.125 +2024-08-27 04:19:22,107 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.31 vs. limit=15.0 +2024-08-27 04:19:33,234 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=15.0 +2024-08-27 04:19:34,990 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.13 vs. limit=15.0 +2024-08-27 04:19:35,533 INFO [train.py:1114] (0/4) Epoch 18, batch 750, loss[loss=0.1828, simple_loss=0.2666, pruned_loss=0.03506, ctc_loss=0.07188, over 19502.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2605, pruned_loss=0.04032, ctc_loss=0.07538, over 3774959.33 frames. ], batch size: 54, lr: 8.37e-03, grad_scale: 16.0 +2024-08-27 04:19:36,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=229680.0, ans=0.2 +2024-08-27 04:19:37,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=229680.0, ans=0.125 +2024-08-27 04:19:44,130 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.16 vs. limit=6.0 +2024-08-27 04:19:44,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229733.33333333334, ans=0.1 +2024-08-27 04:19:47,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229733.33333333334, ans=0.1 +2024-08-27 04:19:49,143 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.146e+02 1.399e+02 1.632e+02 2.193e+02 3.721e+02, threshold=3.263e+02, percent-clipped=0.0 +2024-08-27 04:19:56,205 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.20 vs. limit=12.0 +2024-08-27 04:19:57,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229786.66666666666, ans=0.1 +2024-08-27 04:19:57,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=229786.66666666666, ans=0.0 +2024-08-27 04:20:10,937 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.21 vs. limit=22.5 +2024-08-27 04:20:25,620 INFO [train.py:1114] (0/4) Epoch 18, batch 800, loss[loss=0.1671, simple_loss=0.2372, pruned_loss=0.03559, ctc_loss=0.06459, over 19807.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2603, pruned_loss=0.04029, ctc_loss=0.07536, over 3796787.06 frames. ], batch size: 49, lr: 8.37e-03, grad_scale: 32.0 +2024-08-27 04:20:53,580 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.70 vs. limit=12.0 +2024-08-27 04:21:02,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=230000.0, ans=0.025 +2024-08-27 04:21:04,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=230053.33333333334, ans=0.125 +2024-08-27 04:21:06,886 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.40 vs. limit=15.0 +2024-08-27 04:21:11,348 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:21:31,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=230160.0, ans=0.125 +2024-08-27 04:21:31,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=230160.0, ans=0.0 +2024-08-27 04:21:33,150 INFO [train.py:1114] (0/4) Epoch 18, batch 850, loss[loss=0.2081, simple_loss=0.2808, pruned_loss=0.04898, ctc_loss=0.09358, over 19633.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2604, pruned_loss=0.04029, ctc_loss=0.07521, over 3815142.45 frames. ], batch size: 59, lr: 8.36e-03, grad_scale: 32.0 +2024-08-27 04:21:34,417 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.05 vs. limit=22.5 +2024-08-27 04:21:42,590 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:21:57,957 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.249e+02 1.452e+02 1.736e+02 2.395e+02 3.551e+02, threshold=3.472e+02, percent-clipped=2.0 +2024-08-27 04:22:06,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=230320.0, ans=0.04949747468305833 +2024-08-27 04:22:13,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=230373.33333333334, ans=0.125 +2024-08-27 04:22:14,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=230373.33333333334, ans=0.125 +2024-08-27 04:22:18,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=230373.33333333334, ans=0.1 +2024-08-27 04:22:26,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=230426.66666666666, ans=0.2 +2024-08-27 04:22:30,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=230480.0, ans=0.2 +2024-08-27 04:22:31,213 INFO [train.py:1114] (0/4) Epoch 18, batch 900, loss[loss=0.1716, simple_loss=0.2454, pruned_loss=0.03511, ctc_loss=0.06896, over 19425.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2608, pruned_loss=0.04058, ctc_loss=0.076, over 3819486.86 frames. ], batch size: 48, lr: 8.36e-03, grad_scale: 32.0 +2024-08-27 04:22:37,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230480.0, ans=0.1 +2024-08-27 04:22:39,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=230533.33333333334, ans=0.0 +2024-08-27 04:22:42,856 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.65 vs. limit=6.0 +2024-08-27 04:22:56,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=230586.66666666666, ans=0.125 +2024-08-27 04:22:57,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=230586.66666666666, ans=0.125 +2024-08-27 04:23:17,846 INFO [train.py:1114] (0/4) Epoch 18, batch 950, loss[loss=0.1654, simple_loss=0.244, pruned_loss=0.03092, ctc_loss=0.06262, over 19484.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2615, pruned_loss=0.04098, ctc_loss=0.07669, over 3820910.45 frames. ], batch size: 49, lr: 8.35e-03, grad_scale: 32.0 +2024-08-27 04:23:20,241 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=8.90 vs. limit=22.5 +2024-08-27 04:23:36,277 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.122e+02 1.393e+02 1.674e+02 2.227e+02 4.492e+02, threshold=3.349e+02, percent-clipped=5.0 +2024-08-27 04:23:48,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=18.39 vs. limit=15.0 +2024-08-27 04:23:56,571 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.40 vs. limit=6.0 +2024-08-27 04:24:09,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=230960.0, ans=0.2 +2024-08-27 04:24:15,454 INFO [train.py:1114] (0/4) Epoch 18, batch 1000, loss[loss=0.1718, simple_loss=0.2499, pruned_loss=0.03334, ctc_loss=0.06736, over 19851.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2622, pruned_loss=0.04125, ctc_loss=0.07712, over 3815875.19 frames. ], batch size: 52, lr: 8.35e-03, grad_scale: 32.0 +2024-08-27 04:24:28,177 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.37 vs. limit=6.0 +2024-08-27 04:24:35,645 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.74 vs. limit=22.5 +2024-08-27 04:24:39,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=231120.0, ans=0.0 +2024-08-27 04:24:43,152 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.54 vs. limit=15.0 +2024-08-27 04:24:57,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=231226.66666666666, ans=0.0 +2024-08-27 04:24:57,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=231226.66666666666, ans=0.0 +2024-08-27 04:25:01,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=231226.66666666666, ans=0.0 +2024-08-27 04:25:01,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=231226.66666666666, ans=0.125 +2024-08-27 04:25:11,435 INFO [train.py:1114] (0/4) Epoch 18, batch 1050, loss[loss=0.194, simple_loss=0.2761, pruned_loss=0.04009, ctc_loss=0.07941, over 19852.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2615, pruned_loss=0.04116, ctc_loss=0.0769, over 3822177.37 frames. ], batch size: 57, lr: 8.34e-03, grad_scale: 32.0 +2024-08-27 04:25:12,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=231280.0, ans=10.0 +2024-08-27 04:25:18,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=231280.0, ans=0.0 +2024-08-27 04:25:25,220 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.375e+02 1.549e+02 1.865e+02 3.480e+02, threshold=3.097e+02, percent-clipped=1.0 +2024-08-27 04:25:34,924 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.77 vs. limit=15.0 +2024-08-27 04:25:38,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=231440.0, ans=0.0 +2024-08-27 04:25:49,285 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:25:56,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=231546.66666666666, ans=0.0 +2024-08-27 04:25:57,488 INFO [train.py:1114] (0/4) Epoch 18, batch 1100, loss[loss=0.192, simple_loss=0.2657, pruned_loss=0.04432, ctc_loss=0.07421, over 19596.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2611, pruned_loss=0.04098, ctc_loss=0.07656, over 3829672.02 frames. ], batch size: 52, lr: 8.34e-03, grad_scale: 16.0 +2024-08-27 04:26:01,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=231546.66666666666, ans=0.0 +2024-08-27 04:26:36,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=231653.33333333334, ans=0.125 +2024-08-27 04:26:54,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=231706.66666666666, ans=0.0 +2024-08-27 04:27:00,508 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.45 vs. limit=15.0 +2024-08-27 04:27:31,637 INFO [train.py:1114] (0/4) Epoch 18, batch 1150, loss[loss=0.183, simple_loss=0.2541, pruned_loss=0.04014, ctc_loss=0.07903, over 19608.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2614, pruned_loss=0.04113, ctc_loss=0.07701, over 3827298.87 frames. ], batch size: 52, lr: 8.33e-03, grad_scale: 16.0 +2024-08-27 04:27:44,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=231866.66666666666, ans=0.025 +2024-08-27 04:27:47,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=231866.66666666666, ans=0.0 +2024-08-27 04:27:50,641 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.153e+02 1.426e+02 1.640e+02 2.078e+02 3.185e+02, threshold=3.280e+02, percent-clipped=3.0 +2024-08-27 04:28:03,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=231920.0, ans=0.125 +2024-08-27 04:28:06,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=231920.0, ans=0.0 +2024-08-27 04:28:26,369 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.60 vs. limit=22.5 +2024-08-27 04:28:27,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=232026.66666666666, ans=0.2 +2024-08-27 04:28:32,341 INFO [train.py:1114] (0/4) Epoch 18, batch 1200, loss[loss=0.1998, simple_loss=0.2789, pruned_loss=0.04406, ctc_loss=0.08161, over 19837.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2626, pruned_loss=0.04135, ctc_loss=0.07752, over 3824043.11 frames. ], batch size: 57, lr: 8.33e-03, grad_scale: 32.0 +2024-08-27 04:28:37,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=232080.0, ans=0.125 +2024-08-27 04:28:46,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=232133.33333333334, ans=0.125 +2024-08-27 04:28:57,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=232186.66666666666, ans=0.0 +2024-08-27 04:28:58,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=232186.66666666666, ans=0.2 +2024-08-27 04:29:05,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=232240.0, ans=0.125 +2024-08-27 04:29:09,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=232240.0, ans=0.125 +2024-08-27 04:29:19,792 INFO [train.py:1114] (0/4) Epoch 18, batch 1250, loss[loss=0.213, simple_loss=0.2771, pruned_loss=0.05452, ctc_loss=0.09965, over 19507.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2631, pruned_loss=0.04135, ctc_loss=0.07741, over 3842402.06 frames. ], batch size: 61, lr: 8.32e-03, grad_scale: 32.0 +2024-08-27 04:29:19,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=232346.66666666666, ans=0.125 +2024-08-27 04:29:29,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=232400.0, ans=0.1 +2024-08-27 04:29:34,585 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.452e+02 1.815e+02 2.295e+02 4.200e+02, threshold=3.630e+02, percent-clipped=5.0 +2024-08-27 04:29:35,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=232400.0, ans=0.125 +2024-08-27 04:29:41,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=232453.33333333334, ans=0.2 +2024-08-27 04:29:43,062 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:30:25,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232506.66666666666, ans=0.1 +2024-08-27 04:30:39,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232560.0, ans=0.1 +2024-08-27 04:30:43,396 INFO [train.py:1114] (0/4) Epoch 18, batch 1300, loss[loss=0.1934, simple_loss=0.2691, pruned_loss=0.04232, ctc_loss=0.08291, over 18824.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2617, pruned_loss=0.04093, ctc_loss=0.07669, over 3845983.72 frames. ], batch size: 76, lr: 8.32e-03, grad_scale: 16.0 +2024-08-27 04:30:54,226 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.77 vs. limit=15.0 +2024-08-27 04:30:56,907 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.42 vs. limit=12.0 +2024-08-27 04:31:05,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232720.0, ans=0.1 +2024-08-27 04:31:18,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=232773.33333333334, ans=0.0 +2024-08-27 04:31:29,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=232826.66666666666, ans=0.125 +2024-08-27 04:31:33,100 INFO [train.py:1114] (0/4) Epoch 18, batch 1350, loss[loss=0.1789, simple_loss=0.2482, pruned_loss=0.03907, ctc_loss=0.0789, over 19761.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2612, pruned_loss=0.04084, ctc_loss=0.07624, over 3857349.73 frames. ], batch size: 54, lr: 8.31e-03, grad_scale: 16.0 +2024-08-27 04:31:45,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=232933.33333333334, ans=0.125 +2024-08-27 04:31:48,898 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.387e+02 1.655e+02 2.106e+02 4.022e+02, threshold=3.310e+02, percent-clipped=4.0 +2024-08-27 04:31:49,313 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.03 vs. limit=15.0 +2024-08-27 04:32:19,566 INFO [train.py:1114] (0/4) Epoch 18, batch 1400, loss[loss=0.1616, simple_loss=0.2317, pruned_loss=0.03361, ctc_loss=0.06053, over 19657.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2613, pruned_loss=0.0409, ctc_loss=0.07643, over 3864164.22 frames. ], batch size: 46, lr: 8.31e-03, grad_scale: 16.0 +2024-08-27 04:32:19,925 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.50 vs. limit=15.0 +2024-08-27 04:32:27,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=233146.66666666666, ans=0.125 +2024-08-27 04:32:45,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=233200.0, ans=0.0 +2024-08-27 04:33:00,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=233253.33333333334, ans=0.0 +2024-08-27 04:33:40,121 INFO [train.py:1114] (0/4) Epoch 18, batch 1450, loss[loss=0.2024, simple_loss=0.2789, pruned_loss=0.04546, ctc_loss=0.08767, over 19668.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2618, pruned_loss=0.04116, ctc_loss=0.07687, over 3861721.28 frames. ], batch size: 63, lr: 8.30e-03, grad_scale: 16.0 +2024-08-27 04:34:21,007 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.457e+02 1.713e+02 1.981e+02 3.848e+02, threshold=3.426e+02, percent-clipped=1.0 +2024-08-27 04:34:26,960 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.93 vs. limit=15.0 +2024-08-27 04:35:50,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=233626.66666666666, ans=0.125 +2024-08-27 04:35:51,362 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.23 vs. limit=22.5 +2024-08-27 04:36:46,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=233680.0, ans=0.125 +2024-08-27 04:36:59,898 INFO [train.py:1114] (0/4) Epoch 18, batch 1500, loss[loss=0.1779, simple_loss=0.2584, pruned_loss=0.03578, ctc_loss=0.06442, over 19584.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2621, pruned_loss=0.04111, ctc_loss=0.07691, over 3861790.55 frames. ], batch size: 57, lr: 8.30e-03, grad_scale: 16.0 +2024-08-27 04:37:01,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=233680.0, ans=0.1 +2024-08-27 04:37:01,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.91 vs. limit=10.0 +2024-08-27 04:37:04,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=233680.0, ans=0.0 +2024-08-27 04:37:34,923 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:37:34,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=233680.0, ans=0.1 +2024-08-27 04:37:45,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=233733.33333333334, ans=0.0 +2024-08-27 04:37:46,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=233733.33333333334, ans=0.1 +2024-08-27 04:38:31,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=233840.0, ans=0.125 +2024-08-27 04:39:00,335 INFO [train.py:1114] (0/4) Epoch 18, batch 1550, loss[loss=0.208, simple_loss=0.2825, pruned_loss=0.0489, ctc_loss=0.08943, over 19603.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2617, pruned_loss=0.04099, ctc_loss=0.07671, over 3845633.94 frames. ], batch size: 60, lr: 8.30e-03, grad_scale: 16.0 +2024-08-27 04:39:00,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=233946.66666666666, ans=0.125 +2024-08-27 04:39:02,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=233946.66666666666, ans=0.125 +2024-08-27 04:39:35,555 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.24 vs. limit=22.5 +2024-08-27 04:39:37,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=234000.0, ans=0.05 +2024-08-27 04:39:38,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=234000.0, ans=0.125 +2024-08-27 04:39:50,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=234000.0, ans=0.5 +2024-08-27 04:39:51,778 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.454e+02 1.713e+02 2.109e+02 3.815e+02, threshold=3.426e+02, percent-clipped=1.0 +2024-08-27 04:39:51,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=234000.0, ans=0.125 +2024-08-27 04:40:09,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=234106.66666666666, ans=0.0 +2024-08-27 04:40:34,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=234213.33333333334, ans=0.125 +2024-08-27 04:40:35,442 INFO [train.py:1114] (0/4) Epoch 18, batch 1600, loss[loss=0.198, simple_loss=0.279, pruned_loss=0.04252, ctc_loss=0.07987, over 19848.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2615, pruned_loss=0.0409, ctc_loss=0.07657, over 3835104.16 frames. ], batch size: 57, lr: 8.29e-03, grad_scale: 32.0 +2024-08-27 04:40:36,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=234213.33333333334, ans=0.125 +2024-08-27 04:40:40,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234213.33333333334, ans=0.1 +2024-08-27 04:40:50,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=234266.66666666666, ans=0.125 +2024-08-27 04:40:58,857 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:41:01,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=234320.0, ans=0.04949747468305833 +2024-08-27 04:41:01,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=234320.0, ans=0.2 +2024-08-27 04:41:24,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=234373.33333333334, ans=0.0 +2024-08-27 04:41:26,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=234373.33333333334, ans=0.125 +2024-08-27 04:41:42,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=234426.66666666666, ans=0.2 +2024-08-27 04:41:44,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=234426.66666666666, ans=0.125 +2024-08-27 04:41:48,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=234426.66666666666, ans=0.125 +2024-08-27 04:41:53,471 INFO [train.py:1114] (0/4) Epoch 18, batch 1650, loss[loss=0.1956, simple_loss=0.2767, pruned_loss=0.04101, ctc_loss=0.08102, over 19673.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2612, pruned_loss=0.04081, ctc_loss=0.07637, over 3832693.46 frames. ], batch size: 59, lr: 8.29e-03, grad_scale: 32.0 +2024-08-27 04:42:16,286 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.559e+02 1.894e+02 2.296e+02 3.896e+02, threshold=3.788e+02, percent-clipped=3.0 +2024-08-27 04:42:25,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=234586.66666666666, ans=0.125 +2024-08-27 04:42:35,873 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/checkpoint-44000.pt +2024-08-27 04:42:56,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.84 vs. limit=15.0 +2024-08-27 04:43:00,728 INFO [train.py:1114] (0/4) Epoch 18, batch 1700, loss[loss=0.1772, simple_loss=0.2399, pruned_loss=0.04169, ctc_loss=0.07774, over 19685.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2608, pruned_loss=0.04043, ctc_loss=0.07573, over 3846869.25 frames. ], batch size: 46, lr: 8.28e-03, grad_scale: 32.0 +2024-08-27 04:43:15,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=234800.0, ans=0.125 +2024-08-27 04:43:17,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=234800.0, ans=0.95 +2024-08-27 04:43:18,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=234800.0, ans=0.125 +2024-08-27 04:43:28,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=234853.33333333334, ans=0.035 +2024-08-27 04:43:32,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=234906.66666666666, ans=0.125 +2024-08-27 04:43:46,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234960.0, ans=0.1 +2024-08-27 04:43:46,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=234960.0, ans=0.2 +2024-08-27 04:43:53,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=234960.0, ans=0.2 +2024-08-27 04:43:59,944 INFO [train.py:1114] (0/4) Epoch 18, batch 1750, loss[loss=0.1725, simple_loss=0.2391, pruned_loss=0.0389, ctc_loss=0.07031, over 19659.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2602, pruned_loss=0.04019, ctc_loss=0.07528, over 3850963.92 frames. ], batch size: 45, lr: 8.28e-03, grad_scale: 32.0 +2024-08-27 04:44:05,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=235013.33333333334, ans=0.125 +2024-08-27 04:44:14,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=235066.66666666666, ans=0.125 +2024-08-27 04:44:16,974 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.479e+02 1.670e+02 2.161e+02 3.908e+02, threshold=3.340e+02, percent-clipped=1.0 +2024-08-27 04:44:18,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=235120.0, ans=0.0 +2024-08-27 04:44:20,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=235120.0, ans=0.125 +2024-08-27 04:44:21,110 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.47 vs. limit=15.0 +2024-08-27 04:44:28,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=235173.33333333334, ans=0.025 +2024-08-27 04:44:33,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=235173.33333333334, ans=0.125 +2024-08-27 04:44:50,515 INFO [train.py:1114] (0/4) Epoch 18, batch 1800, loss[loss=0.1775, simple_loss=0.2637, pruned_loss=0.03388, ctc_loss=0.05899, over 19610.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2604, pruned_loss=0.04006, ctc_loss=0.07491, over 3852616.49 frames. ], batch size: 55, lr: 8.27e-03, grad_scale: 16.0 +2024-08-27 04:44:56,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=235280.0, ans=0.0 +2024-08-27 04:44:56,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=235280.0, ans=0.125 +2024-08-27 04:45:06,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=235333.33333333334, ans=0.2 +2024-08-27 04:45:10,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=235386.66666666666, ans=0.025 +2024-08-27 04:45:23,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=235386.66666666666, ans=0.025 +2024-08-27 04:45:27,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=235440.0, ans=0.09899494936611666 +2024-08-27 04:45:30,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=235440.0, ans=0.0 +2024-08-27 04:45:35,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235440.0, ans=0.1 +2024-08-27 04:45:36,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=235440.0, ans=0.0 +2024-08-27 04:45:48,623 INFO [train.py:1114] (0/4) Epoch 18, batch 1850, loss[loss=0.2133, simple_loss=0.2846, pruned_loss=0.05188, ctc_loss=0.09554, over 19590.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2605, pruned_loss=0.04005, ctc_loss=0.07488, over 3855800.83 frames. ], batch size: 57, lr: 8.27e-03, grad_scale: 8.0 +2024-08-27 04:45:52,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=235546.66666666666, ans=0.125 +2024-08-27 04:46:34,512 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.73 vs. limit=22.5 +2024-08-27 04:46:38,525 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.122e+02 1.500e+02 1.800e+02 2.247e+02 4.177e+02, threshold=3.601e+02, percent-clipped=3.0 +2024-08-27 04:46:40,622 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.69 vs. limit=15.0 +2024-08-27 04:46:44,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=235653.33333333334, ans=0.125 +2024-08-27 04:46:52,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=235706.66666666666, ans=0.025 +2024-08-27 04:46:56,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=235760.0, ans=0.125 +2024-08-27 04:47:04,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=235760.0, ans=0.125 +2024-08-27 04:47:06,326 INFO [train.py:1114] (0/4) Epoch 18, batch 1900, loss[loss=0.2, simple_loss=0.2776, pruned_loss=0.04488, ctc_loss=0.08143, over 19649.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2611, pruned_loss=0.04029, ctc_loss=0.07527, over 3860635.93 frames. ], batch size: 59, lr: 8.26e-03, grad_scale: 8.0 +2024-08-27 04:47:07,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=235813.33333333334, ans=0.035 +2024-08-27 04:47:09,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=235813.33333333334, ans=0.0 +2024-08-27 04:47:22,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=235920.0, ans=0.125 +2024-08-27 04:47:29,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=235920.0, ans=0.125 +2024-08-27 04:47:44,548 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.04 vs. limit=15.0 +2024-08-27 04:47:46,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=236026.66666666666, ans=0.0 +2024-08-27 04:47:46,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=236026.66666666666, ans=0.125 +2024-08-27 04:47:51,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=236026.66666666666, ans=0.125 +2024-08-27 04:47:53,590 INFO [train.py:1114] (0/4) Epoch 18, batch 1950, loss[loss=0.1751, simple_loss=0.2563, pruned_loss=0.03407, ctc_loss=0.0642, over 19579.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.262, pruned_loss=0.04032, ctc_loss=0.07546, over 3870189.79 frames. ], batch size: 52, lr: 8.26e-03, grad_scale: 8.0 +2024-08-27 04:47:54,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=236080.0, ans=0.04949747468305833 +2024-08-27 04:48:06,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=236133.33333333334, ans=0.125 +2024-08-27 04:48:07,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=236133.33333333334, ans=0.07 +2024-08-27 04:48:09,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=236133.33333333334, ans=0.1 +2024-08-27 04:48:12,688 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.245e+02 1.481e+02 1.697e+02 2.159e+02 5.555e+02, threshold=3.394e+02, percent-clipped=1.0 +2024-08-27 04:48:13,282 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.31 vs. limit=15.0 +2024-08-27 04:48:20,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=236186.66666666666, ans=0.025 +2024-08-27 04:48:35,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=236293.33333333334, ans=0.2 +2024-08-27 04:48:50,294 INFO [train.py:1114] (0/4) Epoch 18, batch 2000, loss[loss=0.1634, simple_loss=0.2308, pruned_loss=0.03579, ctc_loss=0.06111, over 19655.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2625, pruned_loss=0.04063, ctc_loss=0.07603, over 3855253.43 frames. ], batch size: 45, lr: 8.25e-03, grad_scale: 8.0 +2024-08-27 04:48:58,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=236346.66666666666, ans=0.125 +2024-08-27 04:49:06,877 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:49:46,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=236453.33333333334, ans=0.1 +2024-08-27 04:49:47,057 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.87 vs. limit=12.0 +2024-08-27 04:50:16,429 INFO [train.py:1114] (0/4) Epoch 18, batch 2050, loss[loss=0.1706, simple_loss=0.2381, pruned_loss=0.03808, ctc_loss=0.0674, over 19732.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2616, pruned_loss=0.04063, ctc_loss=0.0761, over 3850587.51 frames. ], batch size: 47, lr: 8.25e-03, grad_scale: 8.0 +2024-08-27 04:50:46,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=236720.0, ans=0.0 +2024-08-27 04:50:47,338 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.468e+02 1.842e+02 2.423e+02 4.039e+02, threshold=3.684e+02, percent-clipped=4.0 +2024-08-27 04:50:53,942 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.99 vs. limit=22.5 +2024-08-27 04:51:07,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=236826.66666666666, ans=0.0 +2024-08-27 04:51:13,548 INFO [train.py:1114] (0/4) Epoch 18, batch 2100, loss[loss=0.1773, simple_loss=0.2563, pruned_loss=0.03593, ctc_loss=0.06647, over 19774.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2614, pruned_loss=0.0405, ctc_loss=0.07575, over 3858150.81 frames. ], batch size: 54, lr: 8.25e-03, grad_scale: 8.0 +2024-08-27 04:51:32,876 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.23 vs. limit=15.0 +2024-08-27 04:51:34,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=236933.33333333334, ans=0.1 +2024-08-27 04:52:03,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=237040.0, ans=0.2 +2024-08-27 04:52:03,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=237040.0, ans=0.1 +2024-08-27 04:52:11,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=237093.33333333334, ans=0.125 +2024-08-27 04:52:13,279 INFO [train.py:1114] (0/4) Epoch 18, batch 2150, loss[loss=0.1783, simple_loss=0.2565, pruned_loss=0.03616, ctc_loss=0.06959, over 19594.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2605, pruned_loss=0.04021, ctc_loss=0.07501, over 3868534.95 frames. ], batch size: 52, lr: 8.24e-03, grad_scale: 8.0 +2024-08-27 04:52:14,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=237146.66666666666, ans=0.0 +2024-08-27 04:52:26,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=237200.0, ans=0.125 +2024-08-27 04:52:30,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=237253.33333333334, ans=0.125 +2024-08-27 04:52:30,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=237253.33333333334, ans=0.125 +2024-08-27 04:52:31,101 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.418e+02 1.667e+02 2.145e+02 4.483e+02, threshold=3.333e+02, percent-clipped=3.0 +2024-08-27 04:52:33,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=237253.33333333334, ans=0.125 +2024-08-27 04:52:33,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=237253.33333333334, ans=0.1 +2024-08-27 04:52:39,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=237306.66666666666, ans=0.125 +2024-08-27 04:52:56,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=237413.33333333334, ans=0.09899494936611666 +2024-08-27 04:52:57,215 INFO [train.py:1114] (0/4) Epoch 18, batch 2200, loss[loss=0.1778, simple_loss=0.2534, pruned_loss=0.03787, ctc_loss=0.06581, over 19586.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2603, pruned_loss=0.04011, ctc_loss=0.07486, over 3867468.35 frames. ], batch size: 57, lr: 8.24e-03, grad_scale: 8.0 +2024-08-27 04:53:45,576 INFO [train.py:1114] (0/4) Epoch 18, batch 2250, loss[loss=0.1942, simple_loss=0.276, pruned_loss=0.04037, ctc_loss=0.0793, over 19603.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2609, pruned_loss=0.0402, ctc_loss=0.07514, over 3867383.66 frames. ], batch size: 55, lr: 8.23e-03, grad_scale: 8.0 +2024-08-27 04:58:07,202 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.185e+02 1.445e+02 1.673e+02 2.181e+02 3.635e+02, threshold=3.347e+02, percent-clipped=1.0 +2024-08-27 04:59:44,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=237893.33333333334, ans=0.125 +2024-08-27 05:00:05,706 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.93 vs. limit=15.0 +2024-08-27 05:00:07,276 INFO [train.py:1114] (0/4) Epoch 18, batch 2300, loss[loss=0.1878, simple_loss=0.2588, pruned_loss=0.04297, ctc_loss=0.07712, over 19496.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2602, pruned_loss=0.04024, ctc_loss=0.07536, over 3860900.49 frames. ], batch size: 49, lr: 8.23e-03, grad_scale: 8.0 +2024-08-27 05:04:40,698 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.55 vs. limit=15.0 +2024-08-27 05:04:54,667 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.06 vs. limit=10.0 +2024-08-27 05:05:47,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=238106.66666666666, ans=0.025 +2024-08-27 05:05:52,863 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.25 vs. limit=22.5 +2024-08-27 05:06:01,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=238160.0, ans=0.025 +2024-08-27 05:06:14,066 INFO [train.py:1114] (0/4) Epoch 18, batch 2350, loss[loss=0.2076, simple_loss=0.2815, pruned_loss=0.04885, ctc_loss=0.0899, over 19647.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2605, pruned_loss=0.04063, ctc_loss=0.07596, over 3863844.14 frames. ], batch size: 63, lr: 8.22e-03, grad_scale: 8.0 +2024-08-27 05:09:12,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=238266.66666666666, ans=0.025 +2024-08-27 05:09:45,141 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.379e+02 1.605e+02 2.102e+02 3.614e+02, threshold=3.209e+02, percent-clipped=2.0 +2024-08-27 05:10:12,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238320.0, ans=0.1 +2024-08-27 05:10:20,259 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.19 vs. limit=10.0 +2024-08-27 05:11:02,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=238373.33333333334, ans=0.0 +2024-08-27 05:11:47,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=238426.66666666666, ans=0.0 +2024-08-27 05:11:47,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=238426.66666666666, ans=0.125 +2024-08-27 05:11:49,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=238426.66666666666, ans=0.025 +2024-08-27 05:11:54,344 INFO [train.py:1114] (0/4) Epoch 18, batch 2400, loss[loss=0.2085, simple_loss=0.2752, pruned_loss=0.05248, ctc_loss=0.0918, over 19382.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2625, pruned_loss=0.04129, ctc_loss=0.07711, over 3858008.55 frames. ], batch size: 67, lr: 8.22e-03, grad_scale: 16.0 +2024-08-27 05:13:54,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=238640.0, ans=0.025 +2024-08-27 05:14:34,817 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.77 vs. limit=10.0 +2024-08-27 05:14:36,108 INFO [train.py:1114] (0/4) Epoch 18, batch 2450, loss[loss=0.2496, simple_loss=0.2951, pruned_loss=0.07462, ctc_loss=0.1374, over 13211.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2661, pruned_loss=0.04375, ctc_loss=0.08175, over 3733992.69 frames. ], batch size: 140, lr: 8.21e-03, grad_scale: 16.0 +2024-08-27 05:14:39,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=238746.66666666666, ans=0.0 +2024-08-27 05:14:45,943 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.02 vs. limit=22.5 +2024-08-27 05:15:10,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=238800.0, ans=0.2 +2024-08-27 05:15:19,820 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.338e+02 1.631e+02 1.872e+02 2.220e+02 3.951e+02, threshold=3.743e+02, percent-clipped=5.0 +2024-08-27 05:15:29,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=238853.33333333334, ans=0.0 +2024-08-27 05:15:33,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=238853.33333333334, ans=0.2 +2024-08-27 05:15:46,063 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-18.pt +2024-08-27 05:19:02,739 INFO [train.py:1114] (0/4) Epoch 19, batch 0, loss[loss=0.1853, simple_loss=0.2584, pruned_loss=0.04146, ctc_loss=0.0731, over 19421.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2584, pruned_loss=0.04146, ctc_loss=0.0731, over 19421.00 frames. ], batch size: 48, lr: 7.99e-03, grad_scale: 32.0 +2024-08-27 05:19:02,740 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-27 05:20:01,889 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.2820, 3.6340, 4.1929, 4.1283], device='cuda:0') +2024-08-27 05:20:05,932 INFO [train.py:1146] (0/4) Epoch 19, validation: loss=0.1709, simple_loss=0.2636, pruned_loss=0.02933, ctc_loss=0.04896, over 944034.00 frames. +2024-08-27 05:20:05,933 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 13201MB +2024-08-27 05:20:07,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=238954.66666666666, ans=0.125 +2024-08-27 05:20:09,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=238954.66666666666, ans=0.0 +2024-08-27 05:21:28,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=239114.66666666666, ans=0.125 +2024-08-27 05:21:35,407 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:22:54,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=239221.33333333334, ans=0.125 +2024-08-27 05:22:55,134 INFO [train.py:1114] (0/4) Epoch 19, batch 50, loss[loss=0.1762, simple_loss=0.2477, pruned_loss=0.03737, ctc_loss=0.07481, over 19679.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2619, pruned_loss=0.03972, ctc_loss=0.07474, over 845033.20 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-27 05:23:20,778 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:23:21,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=239221.33333333334, ans=0.07 +2024-08-27 05:23:28,900 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.91 vs. limit=15.0 +2024-08-27 05:23:46,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=239328.0, ans=0.0 +2024-08-27 05:23:58,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=239381.33333333334, ans=0.0 +2024-08-27 05:23:59,559 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.492e+02 1.734e+02 2.135e+02 3.431e+02, threshold=3.468e+02, percent-clipped=0.0 +2024-08-27 05:24:15,340 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.67 vs. limit=22.5 +2024-08-27 05:24:18,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2.whitening_limit, batch_count=239488.0, ans=15.0 +2024-08-27 05:24:19,424 INFO [train.py:1114] (0/4) Epoch 19, batch 100, loss[loss=0.1682, simple_loss=0.2427, pruned_loss=0.03464, ctc_loss=0.06132, over 19728.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2638, pruned_loss=0.04052, ctc_loss=0.07626, over 1498716.65 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 32.0 +2024-08-27 05:24:26,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=239488.0, ans=0.2 +2024-08-27 05:24:34,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=239541.33333333334, ans=0.0 +2024-08-27 05:24:38,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=239541.33333333334, ans=0.125 +2024-08-27 05:24:39,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=239594.66666666666, ans=0.0 +2024-08-27 05:25:42,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.32 vs. limit=12.0 +2024-08-27 05:25:46,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=239648.0, ans=0.0 +2024-08-27 05:25:52,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=239648.0, ans=0.0 +2024-08-27 05:25:55,913 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.14 vs. limit=15.0 +2024-08-27 05:26:05,121 INFO [train.py:1114] (0/4) Epoch 19, batch 150, loss[loss=0.167, simple_loss=0.235, pruned_loss=0.03683, ctc_loss=0.06332, over 19754.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2616, pruned_loss=0.0403, ctc_loss=0.07568, over 2027546.02 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-27 05:27:05,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=239861.33333333334, ans=0.0 +2024-08-27 05:27:11,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=239861.33333333334, ans=0.125 +2024-08-27 05:27:18,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239914.66666666666, ans=0.1 +2024-08-27 05:27:20,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=239914.66666666666, ans=0.0 +2024-08-27 05:27:20,691 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.059e+02 1.500e+02 1.966e+02 2.497e+02 3.604e+02, threshold=3.932e+02, percent-clipped=3.0 +2024-08-27 05:27:27,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=239914.66666666666, ans=0.125 +2024-08-27 05:27:47,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=239968.0, ans=0.0 +2024-08-27 05:27:47,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=239968.0, ans=0.0 +2024-08-27 05:27:59,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=239968.0, ans=0.125 +2024-08-27 05:28:10,142 INFO [train.py:1114] (0/4) Epoch 19, batch 200, loss[loss=0.2011, simple_loss=0.27, pruned_loss=0.04855, ctc_loss=0.0874, over 18444.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2598, pruned_loss=0.03992, ctc_loss=0.07469, over 2435779.19 frames. ], batch size: 85, lr: 7.97e-03, grad_scale: 32.0 +2024-08-27 05:28:10,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=240021.33333333334, ans=0.0 +2024-08-27 05:28:13,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=240021.33333333334, ans=0.125 +2024-08-27 05:28:21,226 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:28:28,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=240128.0, ans=0.125 +2024-08-27 05:29:09,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=240128.0, ans=0.02 +2024-08-27 05:29:18,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=240181.33333333334, ans=0.125 +2024-08-27 05:29:27,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=240234.66666666666, ans=0.0 +2024-08-27 05:29:32,168 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.30 vs. limit=15.0 +2024-08-27 05:29:34,217 INFO [train.py:1114] (0/4) Epoch 19, batch 250, loss[loss=0.1965, simple_loss=0.2702, pruned_loss=0.04488, ctc_loss=0.08258, over 19356.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2588, pruned_loss=0.03931, ctc_loss=0.07378, over 2756817.04 frames. ], batch size: 67, lr: 7.97e-03, grad_scale: 32.0 +2024-08-27 05:29:39,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=240288.0, ans=0.0 +2024-08-27 05:29:47,519 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.66 vs. limit=15.0 +2024-08-27 05:29:58,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=240394.66666666666, ans=0.0 +2024-08-27 05:30:02,558 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.446e+02 1.683e+02 2.499e+02 4.574e+02, threshold=3.367e+02, percent-clipped=7.0 +2024-08-27 05:30:05,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=240448.0, ans=0.125 +2024-08-27 05:30:09,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=240448.0, ans=0.2 +2024-08-27 05:30:10,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240448.0, ans=0.1 +2024-08-27 05:30:14,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=240501.33333333334, ans=0.025 +2024-08-27 05:30:16,089 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.68 vs. limit=15.0 +2024-08-27 05:30:22,802 INFO [train.py:1114] (0/4) Epoch 19, batch 300, loss[loss=0.203, simple_loss=0.2805, pruned_loss=0.04571, ctc_loss=0.08512, over 19482.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2586, pruned_loss=0.03905, ctc_loss=0.07332, over 3000995.93 frames. ], batch size: 61, lr: 7.96e-03, grad_scale: 32.0 +2024-08-27 05:30:43,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=240661.33333333334, ans=0.0 +2024-08-27 05:30:55,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=240714.66666666666, ans=0.0 +2024-08-27 05:31:00,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=240768.0, ans=0.0 +2024-08-27 05:31:07,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=240768.0, ans=0.0 +2024-08-27 05:31:09,976 INFO [train.py:1114] (0/4) Epoch 19, batch 350, loss[loss=0.1755, simple_loss=0.2445, pruned_loss=0.03808, ctc_loss=0.07572, over 19748.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2595, pruned_loss=0.03934, ctc_loss=0.07366, over 3190586.01 frames. ], batch size: 48, lr: 7.96e-03, grad_scale: 32.0 +2024-08-27 05:31:25,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240874.66666666666, ans=0.1 +2024-08-27 05:31:26,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=240874.66666666666, ans=0.125 +2024-08-27 05:31:39,922 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.453e+02 1.753e+02 2.405e+02 3.677e+02, threshold=3.507e+02, percent-clipped=2.0 +2024-08-27 05:31:51,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=241034.66666666666, ans=0.0 +2024-08-27 05:31:53,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=241034.66666666666, ans=0.125 +2024-08-27 05:31:56,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=241088.0, ans=0.125 +2024-08-27 05:31:57,288 INFO [train.py:1114] (0/4) Epoch 19, batch 400, loss[loss=0.1851, simple_loss=0.265, pruned_loss=0.03779, ctc_loss=0.07396, over 19494.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2592, pruned_loss=0.03924, ctc_loss=0.07358, over 3342052.19 frames. ], batch size: 54, lr: 7.95e-03, grad_scale: 32.0 +2024-08-27 05:32:09,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=241088.0, ans=0.2 +2024-08-27 05:32:35,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=241194.66666666666, ans=0.125 +2024-08-27 05:32:46,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=241248.0, ans=0.125 +2024-08-27 05:32:53,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=241301.33333333334, ans=0.0 +2024-08-27 05:32:56,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=241301.33333333334, ans=0.125 +2024-08-27 05:32:59,289 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:33:00,037 INFO [train.py:1114] (0/4) Epoch 19, batch 450, loss[loss=0.1805, simple_loss=0.2678, pruned_loss=0.03303, ctc_loss=0.06791, over 19626.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2591, pruned_loss=0.03919, ctc_loss=0.07341, over 3450414.33 frames. ], batch size: 55, lr: 7.95e-03, grad_scale: 32.0 +2024-08-27 05:33:10,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=241354.66666666666, ans=0.0 +2024-08-27 05:33:10,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241408.0, ans=0.1 +2024-08-27 05:33:11,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=241408.0, ans=0.07 +2024-08-27 05:33:12,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=241408.0, ans=0.025 +2024-08-27 05:33:20,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=241461.33333333334, ans=0.125 +2024-08-27 05:33:20,532 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.90 vs. limit=15.0 +2024-08-27 05:33:25,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=241461.33333333334, ans=0.125 +2024-08-27 05:33:26,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=241461.33333333334, ans=0.125 +2024-08-27 05:33:30,920 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.397e+02 1.631e+02 2.046e+02 3.175e+02, threshold=3.262e+02, percent-clipped=0.0 +2024-08-27 05:33:49,302 INFO [train.py:1114] (0/4) Epoch 19, batch 500, loss[loss=0.1959, simple_loss=0.2738, pruned_loss=0.04316, ctc_loss=0.07913, over 19656.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2585, pruned_loss=0.03903, ctc_loss=0.07307, over 3545263.34 frames. ], batch size: 63, lr: 7.95e-03, grad_scale: 32.0 +2024-08-27 05:34:12,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=241728.0, ans=0.125 +2024-08-27 05:34:20,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=241781.33333333334, ans=0.125 +2024-08-27 05:34:20,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=241781.33333333334, ans=0.125 +2024-08-27 05:34:23,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=241781.33333333334, ans=0.2 +2024-08-27 05:34:24,929 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.83 vs. limit=15.0 +2024-08-27 05:34:33,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-27 05:34:34,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241834.66666666666, ans=0.1 +2024-08-27 05:34:39,019 INFO [train.py:1114] (0/4) Epoch 19, batch 550, loss[loss=0.1888, simple_loss=0.2617, pruned_loss=0.04219, ctc_loss=0.07897, over 19368.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2592, pruned_loss=0.03938, ctc_loss=0.07376, over 3607835.04 frames. ], batch size: 71, lr: 7.94e-03, grad_scale: 32.0 +2024-08-27 05:34:40,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=241888.0, ans=0.2 +2024-08-27 05:35:07,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=242048.0, ans=0.0 +2024-08-27 05:35:09,357 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.385e+02 1.667e+02 1.980e+02 3.512e+02, threshold=3.334e+02, percent-clipped=2.0 +2024-08-27 05:35:18,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=242101.33333333334, ans=0.0 +2024-08-27 05:35:24,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242101.33333333334, ans=0.1 +2024-08-27 05:35:27,094 INFO [train.py:1114] (0/4) Epoch 19, batch 600, loss[loss=0.1877, simple_loss=0.269, pruned_loss=0.0388, ctc_loss=0.07183, over 19400.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2591, pruned_loss=0.03922, ctc_loss=0.07339, over 3666587.04 frames. ], batch size: 67, lr: 7.94e-03, grad_scale: 32.0 +2024-08-27 05:35:42,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=242208.0, ans=0.125 +2024-08-27 05:35:43,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242208.0, ans=0.1 +2024-08-27 05:36:18,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=242368.0, ans=0.025 +2024-08-27 05:36:19,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=242368.0, ans=0.0 +2024-08-27 05:36:23,049 INFO [train.py:1114] (0/4) Epoch 19, batch 650, loss[loss=0.1642, simple_loss=0.239, pruned_loss=0.03217, ctc_loss=0.06266, over 19773.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2583, pruned_loss=0.03905, ctc_loss=0.0731, over 3716546.96 frames. ], batch size: 54, lr: 7.93e-03, grad_scale: 32.0 +2024-08-27 05:36:23,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=242421.33333333334, ans=0.04949747468305833 +2024-08-27 05:36:39,870 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.49 vs. limit=22.5 +2024-08-27 05:36:53,251 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.470e+02 1.907e+02 2.471e+02 4.129e+02, threshold=3.814e+02, percent-clipped=9.0 +2024-08-27 05:36:57,434 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.67 vs. limit=15.0 +2024-08-27 05:37:00,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=242634.66666666666, ans=0.0 +2024-08-27 05:37:05,065 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.02 vs. limit=12.0 +2024-08-27 05:37:30,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=242634.66666666666, ans=0.2 +2024-08-27 05:37:33,297 INFO [train.py:1114] (0/4) Epoch 19, batch 700, loss[loss=0.1823, simple_loss=0.2555, pruned_loss=0.04009, ctc_loss=0.07244, over 19733.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2586, pruned_loss=0.03933, ctc_loss=0.07347, over 3748806.62 frames. ], batch size: 51, lr: 7.93e-03, grad_scale: 32.0 +2024-08-27 05:37:35,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=242688.0, ans=0.125 +2024-08-27 05:37:44,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=242741.33333333334, ans=0.95 +2024-08-27 05:37:49,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=242741.33333333334, ans=0.125 +2024-08-27 05:37:55,791 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.39 vs. limit=6.0 +2024-08-27 05:37:59,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=242794.66666666666, ans=0.125 +2024-08-27 05:38:08,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=242848.0, ans=0.0 +2024-08-27 05:38:11,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=242901.33333333334, ans=0.0 +2024-08-27 05:38:23,006 INFO [train.py:1114] (0/4) Epoch 19, batch 750, loss[loss=0.1847, simple_loss=0.2737, pruned_loss=0.03533, ctc_loss=0.06268, over 19496.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2588, pruned_loss=0.03949, ctc_loss=0.07379, over 3774571.12 frames. ], batch size: 54, lr: 7.92e-03, grad_scale: 32.0 +2024-08-27 05:38:25,913 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:38:27,949 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.14 vs. limit=10.0 +2024-08-27 05:38:31,582 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.28 vs. limit=15.0 +2024-08-27 05:38:34,386 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.89 vs. limit=15.0 +2024-08-27 05:38:37,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243008.0, ans=0.1 +2024-08-27 05:38:51,402 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.489e+02 1.823e+02 2.314e+02 3.772e+02, threshold=3.647e+02, percent-clipped=0.0 +2024-08-27 05:38:52,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=243114.66666666666, ans=0.0 +2024-08-27 05:38:55,428 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.63 vs. limit=10.0 +2024-08-27 05:39:04,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=243168.0, ans=0.125 +2024-08-27 05:39:11,744 INFO [train.py:1114] (0/4) Epoch 19, batch 800, loss[loss=0.1659, simple_loss=0.2387, pruned_loss=0.03408, ctc_loss=0.06263, over 19802.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2591, pruned_loss=0.03975, ctc_loss=0.07419, over 3795407.26 frames. ], batch size: 49, lr: 7.92e-03, grad_scale: 32.0 +2024-08-27 05:39:23,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=243274.66666666666, ans=0.05 +2024-08-27 05:39:40,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=243381.33333333334, ans=0.2 +2024-08-27 05:39:58,025 INFO [train.py:1114] (0/4) Epoch 19, batch 850, loss[loss=0.1802, simple_loss=0.2673, pruned_loss=0.03375, ctc_loss=0.064, over 19670.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2589, pruned_loss=0.03959, ctc_loss=0.07383, over 3815217.60 frames. ], batch size: 59, lr: 7.92e-03, grad_scale: 32.0 +2024-08-27 05:40:07,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=243488.0, ans=0.2 +2024-08-27 05:40:12,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=243541.33333333334, ans=0.125 +2024-08-27 05:40:28,699 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.388e+02 1.609e+02 2.074e+02 4.897e+02, threshold=3.218e+02, percent-clipped=1.0 +2024-08-27 05:40:33,896 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.60 vs. limit=15.0 +2024-08-27 05:40:42,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=243701.33333333334, ans=0.125 +2024-08-27 05:40:49,412 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.34 vs. limit=6.0 +2024-08-27 05:40:51,593 INFO [train.py:1114] (0/4) Epoch 19, batch 900, loss[loss=0.1698, simple_loss=0.2452, pruned_loss=0.0344, ctc_loss=0.06385, over 19806.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2591, pruned_loss=0.03971, ctc_loss=0.07401, over 3819233.74 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-27 05:41:48,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=243861.33333333334, ans=0.125 +2024-08-27 05:45:20,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=243914.66666666666, ans=0.125 +2024-08-27 05:45:28,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=243914.66666666666, ans=0.125 +2024-08-27 05:45:29,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=243914.66666666666, ans=0.125 +2024-08-27 05:46:16,458 INFO [train.py:1114] (0/4) Epoch 19, batch 950, loss[loss=0.1629, simple_loss=0.2371, pruned_loss=0.03277, ctc_loss=0.058, over 19501.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2595, pruned_loss=0.0399, ctc_loss=0.07434, over 3820559.24 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-27 05:46:40,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=244074.66666666666, ans=0.125 +2024-08-27 05:46:49,949 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:46:54,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=244128.0, ans=0.0 +2024-08-27 05:47:00,337 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.59 vs. limit=12.0 +2024-08-27 05:47:02,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=244128.0, ans=0.125 +2024-08-27 05:47:05,282 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.185e+02 1.465e+02 1.729e+02 2.037e+02 3.385e+02, threshold=3.459e+02, percent-clipped=1.0 +2024-08-27 05:47:07,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=244181.33333333334, ans=0.07 +2024-08-27 05:47:23,106 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.10 vs. limit=15.0 +2024-08-27 05:47:24,467 INFO [train.py:1114] (0/4) Epoch 19, batch 1000, loss[loss=0.1667, simple_loss=0.2533, pruned_loss=0.0293, ctc_loss=0.05386, over 19856.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2606, pruned_loss=0.04036, ctc_loss=0.07521, over 3815653.69 frames. ], batch size: 52, lr: 7.90e-03, grad_scale: 32.0 +2024-08-27 05:47:36,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=244341.33333333334, ans=0.125 +2024-08-27 05:47:38,079 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.91 vs. limit=22.5 +2024-08-27 05:47:44,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=244394.66666666666, ans=0.09899494936611666 +2024-08-27 05:47:44,338 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.47 vs. limit=6.0 +2024-08-27 05:48:05,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=244501.33333333334, ans=0.5 +2024-08-27 05:48:10,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=244501.33333333334, ans=0.125 +2024-08-27 05:48:12,792 INFO [train.py:1114] (0/4) Epoch 19, batch 1050, loss[loss=0.1901, simple_loss=0.2712, pruned_loss=0.03948, ctc_loss=0.07512, over 19834.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2597, pruned_loss=0.04007, ctc_loss=0.07483, over 3822306.47 frames. ], batch size: 57, lr: 7.90e-03, grad_scale: 32.0 +2024-08-27 05:48:18,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244554.66666666666, ans=0.1 +2024-08-27 05:48:27,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=244608.0, ans=0.95 +2024-08-27 05:48:35,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=244661.33333333334, ans=0.125 +2024-08-27 05:48:36,266 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.28 vs. limit=15.0 +2024-08-27 05:48:42,874 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.407e+02 1.559e+02 1.901e+02 2.565e+02, threshold=3.118e+02, percent-clipped=0.0 +2024-08-27 05:48:47,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=244714.66666666666, ans=0.0 +2024-08-27 05:49:02,407 INFO [train.py:1114] (0/4) Epoch 19, batch 1100, loss[loss=0.1714, simple_loss=0.2462, pruned_loss=0.03549, ctc_loss=0.0642, over 19574.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2593, pruned_loss=0.03969, ctc_loss=0.0743, over 3830280.28 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-27 05:49:12,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=244874.66666666666, ans=0.0 +2024-08-27 05:49:34,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=244981.33333333334, ans=0.125 +2024-08-27 05:49:51,755 INFO [train.py:1114] (0/4) Epoch 19, batch 1150, loss[loss=0.1547, simple_loss=0.2325, pruned_loss=0.02783, ctc_loss=0.05308, over 19596.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2594, pruned_loss=0.03982, ctc_loss=0.07464, over 3828089.38 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-27 05:49:58,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=245088.0, ans=0.125 +2024-08-27 05:51:13,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=245141.33333333334, ans=0.2 +2024-08-27 05:51:15,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=245141.33333333334, ans=0.0 +2024-08-27 05:51:16,050 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.02 vs. limit=15.0 +2024-08-27 05:51:19,939 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.51 vs. limit=15.0 +2024-08-27 05:51:34,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=245194.66666666666, ans=0.125 +2024-08-27 05:52:27,619 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.437e+02 1.648e+02 2.100e+02 3.411e+02, threshold=3.296e+02, percent-clipped=3.0 +2024-08-27 05:52:31,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=245248.0, ans=0.05 +2024-08-27 05:52:42,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=245301.33333333334, ans=0.125 +2024-08-27 05:52:44,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=245301.33333333334, ans=0.125 +2024-08-27 05:52:46,998 INFO [train.py:1114] (0/4) Epoch 19, batch 1200, loss[loss=0.1918, simple_loss=0.2639, pruned_loss=0.04264, ctc_loss=0.08591, over 19828.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2598, pruned_loss=0.03966, ctc_loss=0.07437, over 3823389.80 frames. ], batch size: 57, lr: 7.89e-03, grad_scale: 32.0 +2024-08-27 05:52:52,164 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.11 vs. limit=22.5 +2024-08-27 05:53:13,867 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.53 vs. limit=15.0 +2024-08-27 05:53:20,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245514.66666666666, ans=0.1 +2024-08-27 05:53:24,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=245514.66666666666, ans=0.1 +2024-08-27 05:53:26,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=245568.0, ans=0.0 +2024-08-27 05:53:35,242 INFO [train.py:1114] (0/4) Epoch 19, batch 1250, loss[loss=0.1928, simple_loss=0.2655, pruned_loss=0.04439, ctc_loss=0.0784, over 19532.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2608, pruned_loss=0.04011, ctc_loss=0.07497, over 3841958.57 frames. ], batch size: 61, lr: 7.88e-03, grad_scale: 32.0 +2024-08-27 05:54:02,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=245728.0, ans=0.0 +2024-08-27 05:54:04,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=245781.33333333334, ans=0.125 +2024-08-27 05:54:05,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=245781.33333333334, ans=0.0 +2024-08-27 05:54:05,854 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.230e+02 1.471e+02 1.735e+02 2.173e+02 3.319e+02, threshold=3.470e+02, percent-clipped=1.0 +2024-08-27 05:54:06,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=245781.33333333334, ans=0.04949747468305833 +2024-08-27 05:54:26,192 INFO [train.py:1114] (0/4) Epoch 19, batch 1300, loss[loss=0.1884, simple_loss=0.2646, pruned_loss=0.04173, ctc_loss=0.07173, over 18889.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2601, pruned_loss=0.03989, ctc_loss=0.0745, over 3846145.41 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 32.0 +2024-08-27 05:54:30,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=245888.0, ans=0.07 +2024-08-27 05:54:37,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=245941.33333333334, ans=0.125 +2024-08-27 05:54:46,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=245994.66666666666, ans=0.0 +2024-08-27 05:54:55,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246048.0, ans=0.1 +2024-08-27 05:55:10,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=246101.33333333334, ans=0.0 +2024-08-27 05:55:13,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246154.66666666666, ans=0.1 +2024-08-27 05:55:13,889 INFO [train.py:1114] (0/4) Epoch 19, batch 1350, loss[loss=0.1675, simple_loss=0.2512, pruned_loss=0.03072, ctc_loss=0.05604, over 19769.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2594, pruned_loss=0.0395, ctc_loss=0.07374, over 3856948.23 frames. ], batch size: 54, lr: 7.87e-03, grad_scale: 16.0 +2024-08-27 05:55:14,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=246154.66666666666, ans=0.125 +2024-08-27 05:55:17,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=246154.66666666666, ans=0.125 +2024-08-27 05:55:26,365 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.40 vs. limit=22.5 +2024-08-27 05:55:27,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=246208.0, ans=0.125 +2024-08-27 05:55:28,378 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.96 vs. limit=22.5 +2024-08-27 05:55:32,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=246208.0, ans=0.125 +2024-08-27 05:55:45,525 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.414e+02 1.634e+02 2.144e+02 3.359e+02, threshold=3.268e+02, percent-clipped=0.0 +2024-08-27 05:56:03,857 INFO [train.py:1114] (0/4) Epoch 19, batch 1400, loss[loss=0.1426, simple_loss=0.2195, pruned_loss=0.02395, ctc_loss=0.04458, over 19657.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2593, pruned_loss=0.03957, ctc_loss=0.07378, over 3863914.06 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 16.0 +2024-08-27 05:56:14,706 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.76 vs. limit=12.0 +2024-08-27 05:56:15,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=246474.66666666666, ans=0.0 +2024-08-27 05:56:18,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=246474.66666666666, ans=0.125 +2024-08-27 05:56:23,153 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.14 vs. limit=6.0 +2024-08-27 05:56:24,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=246528.0, ans=0.0 +2024-08-27 05:56:34,361 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.76 vs. limit=15.0 +2024-08-27 05:56:37,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=246581.33333333334, ans=0.09899494936611666 +2024-08-27 05:56:42,401 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.26 vs. limit=6.0 +2024-08-27 05:56:44,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=246634.66666666666, ans=0.2 +2024-08-27 05:56:45,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246634.66666666666, ans=0.1 +2024-08-27 05:56:49,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=246634.66666666666, ans=0.125 +2024-08-27 05:56:53,085 INFO [train.py:1114] (0/4) Epoch 19, batch 1450, loss[loss=0.1965, simple_loss=0.2697, pruned_loss=0.04469, ctc_loss=0.08484, over 19667.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2596, pruned_loss=0.03984, ctc_loss=0.07412, over 3862670.75 frames. ], batch size: 63, lr: 7.87e-03, grad_scale: 16.0 +2024-08-27 05:57:23,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=246848.0, ans=0.2 +2024-08-27 05:57:25,417 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.422e+02 1.608e+02 1.963e+02 3.546e+02, threshold=3.216e+02, percent-clipped=4.0 +2024-08-27 05:57:32,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246901.33333333334, ans=0.1 +2024-08-27 05:57:34,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=246901.33333333334, ans=0.125 +2024-08-27 05:57:35,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=246901.33333333334, ans=0.0 +2024-08-27 05:57:37,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246901.33333333334, ans=0.1 +2024-08-27 05:57:42,296 INFO [train.py:1114] (0/4) Epoch 19, batch 1500, loss[loss=0.1876, simple_loss=0.2658, pruned_loss=0.03902, ctc_loss=0.07827, over 19578.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2596, pruned_loss=0.03955, ctc_loss=0.07378, over 3862431.58 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 16.0 +2024-08-27 05:57:53,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247008.0, ans=0.1 +2024-08-27 05:58:44,792 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.10 vs. limit=10.0 +2024-08-27 05:58:59,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=247168.0, ans=0.5 +2024-08-27 05:59:01,744 INFO [train.py:1114] (0/4) Epoch 19, batch 1550, loss[loss=0.2011, simple_loss=0.2785, pruned_loss=0.04441, ctc_loss=0.08739, over 19613.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2595, pruned_loss=0.03958, ctc_loss=0.07402, over 3847641.78 frames. ], batch size: 60, lr: 7.86e-03, grad_scale: 16.0 +2024-08-27 05:59:16,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=247274.66666666666, ans=0.0 +2024-08-27 05:59:32,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247328.0, ans=0.1 +2024-08-27 05:59:38,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=247328.0, ans=0.125 +2024-08-27 05:59:43,861 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.416e+02 1.634e+02 2.007e+02 4.215e+02, threshold=3.267e+02, percent-clipped=2.0 +2024-08-27 05:59:45,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=247381.33333333334, ans=0.0 +2024-08-27 05:59:46,485 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.44 vs. limit=15.0 +2024-08-27 05:59:51,059 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.54 vs. limit=15.0 +2024-08-27 05:59:59,599 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.77 vs. limit=15.0 +2024-08-27 06:00:02,731 INFO [train.py:1114] (0/4) Epoch 19, batch 1600, loss[loss=0.1812, simple_loss=0.2652, pruned_loss=0.03435, ctc_loss=0.0712, over 19846.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2596, pruned_loss=0.03979, ctc_loss=0.07431, over 3836418.62 frames. ], batch size: 57, lr: 7.85e-03, grad_scale: 32.0 +2024-08-27 06:00:06,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=247488.0, ans=22.5 +2024-08-27 06:00:15,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247541.33333333334, ans=0.1 +2024-08-27 06:00:16,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247541.33333333334, ans=0.1 +2024-08-27 06:00:16,455 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.41 vs. limit=15.0 +2024-08-27 06:00:21,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=247594.66666666666, ans=0.0 +2024-08-27 06:00:51,748 INFO [train.py:1114] (0/4) Epoch 19, batch 1650, loss[loss=0.2031, simple_loss=0.2822, pruned_loss=0.04464, ctc_loss=0.087, over 19628.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2597, pruned_loss=0.03999, ctc_loss=0.07454, over 3833472.39 frames. ], batch size: 59, lr: 7.85e-03, grad_scale: 32.0 +2024-08-27 06:01:00,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=247808.0, ans=0.125 +2024-08-27 06:01:21,512 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.147e+02 1.539e+02 1.985e+02 2.467e+02 4.637e+02, threshold=3.969e+02, percent-clipped=10.0 +2024-08-27 06:01:35,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=247968.0, ans=0.1 +2024-08-27 06:01:39,973 INFO [train.py:1114] (0/4) Epoch 19, batch 1700, loss[loss=0.1847, simple_loss=0.2463, pruned_loss=0.04505, ctc_loss=0.08268, over 19657.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2594, pruned_loss=0.03966, ctc_loss=0.07406, over 3847350.29 frames. ], batch size: 46, lr: 7.84e-03, grad_scale: 32.0 +2024-08-27 06:01:58,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=248128.0, ans=0.125 +2024-08-27 06:02:16,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=248234.66666666666, ans=0.025 +2024-08-27 06:02:19,226 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.49 vs. limit=15.0 +2024-08-27 06:02:23,585 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.53 vs. limit=15.0 +2024-08-27 06:02:23,934 INFO [train.py:1114] (0/4) Epoch 19, batch 1750, loss[loss=0.1827, simple_loss=0.2444, pruned_loss=0.04414, ctc_loss=0.08169, over 19654.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2591, pruned_loss=0.03954, ctc_loss=0.07395, over 3852186.62 frames. ], batch size: 45, lr: 7.84e-03, grad_scale: 32.0 +2024-08-27 06:02:31,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=248288.0, ans=0.0 +2024-08-27 06:02:57,022 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.179e+02 1.492e+02 1.808e+02 2.313e+02 3.735e+02, threshold=3.616e+02, percent-clipped=0.0 +2024-08-27 06:02:59,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=248448.0, ans=0.125 +2024-08-27 06:03:00,291 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.86 vs. limit=22.5 +2024-08-27 06:03:01,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248448.0, ans=0.1 +2024-08-27 06:03:18,745 INFO [train.py:1114] (0/4) Epoch 19, batch 1800, loss[loss=0.18, simple_loss=0.2624, pruned_loss=0.036, ctc_loss=0.06371, over 19604.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2594, pruned_loss=0.03932, ctc_loss=0.07377, over 3854361.58 frames. ], batch size: 55, lr: 7.84e-03, grad_scale: 16.0 +2024-08-27 06:03:22,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=248554.66666666666, ans=0.125 +2024-08-27 06:03:23,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=248554.66666666666, ans=0.2 +2024-08-27 06:03:30,643 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.72 vs. limit=15.0 +2024-08-27 06:03:55,513 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.55 vs. limit=8.0 +2024-08-27 06:03:56,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=248768.0, ans=0.0 +2024-08-27 06:04:02,716 INFO [train.py:1114] (0/4) Epoch 19, batch 1850, loss[loss=0.1949, simple_loss=0.2738, pruned_loss=0.04257, ctc_loss=0.07726, over 19595.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2595, pruned_loss=0.03943, ctc_loss=0.07381, over 3857567.59 frames. ], batch size: 57, lr: 7.83e-03, grad_scale: 16.0 +2024-08-27 06:04:32,741 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.498e+02 2.037e+02 3.063e+02 6.275e+02, threshold=4.074e+02, percent-clipped=13.0 +2024-08-27 06:04:37,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248981.33333333334, ans=0.1 +2024-08-27 06:04:47,719 INFO [train.py:1114] (0/4) Epoch 19, batch 1900, loss[loss=0.1938, simple_loss=0.2807, pruned_loss=0.0391, ctc_loss=0.07161, over 19668.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2603, pruned_loss=0.03948, ctc_loss=0.07386, over 3862216.50 frames. ], batch size: 59, lr: 7.83e-03, grad_scale: 16.0 +2024-08-27 06:04:47,832 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 06:04:51,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249088.0, ans=0.1 +2024-08-27 06:05:03,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=249141.33333333334, ans=0.0 +2024-08-27 06:05:06,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=249194.66666666666, ans=0.125 +2024-08-27 06:05:13,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=249194.66666666666, ans=0.125 +2024-08-27 06:05:13,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=249194.66666666666, ans=0.025 +2024-08-27 06:05:16,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=249248.0, ans=10.0 +2024-08-27 06:05:47,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=249248.0, ans=0.125 +2024-08-27 06:05:58,944 INFO [train.py:1114] (0/4) Epoch 19, batch 1950, loss[loss=0.1674, simple_loss=0.2453, pruned_loss=0.03269, ctc_loss=0.06049, over 19586.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.261, pruned_loss=0.03981, ctc_loss=0.07441, over 3871303.88 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 16.0 diff --git a/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-26-14-14-03-1 b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-26-14-14-03-1 new file mode 100644 index 0000000000000000000000000000000000000000..25a77e5c2feca414713fb3d228ea48ff9f0776eb --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-26-14-14-03-1 @@ -0,0 +1,5424 @@ +2024-08-26 14:14:06,055 INFO [train.py:1182] (1/4) Training started +2024-08-26 14:14:09,228 INFO [train.py:1192] (1/4) Device: cuda:1 +2024-08-26 14:14:11,784 INFO [train.py:1210] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2652.int.cedar.computecanada.ca', 'IP address': '172.16.146.89'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 4, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-26 14:14:11,784 INFO [train.py:1212] (1/4) About to create model +2024-08-26 14:14:12,458 INFO [train.py:1216] (1/4) Number of model parameters: 65805511 +2024-08-26 14:14:12,459 INFO [checkpoint.py:112] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-3.pt +2024-08-26 14:14:20,052 INFO [train.py:1231] (1/4) Using DDP +2024-08-26 14:14:24,078 INFO [train.py:1243] (1/4) Loading optimizer state dict +2024-08-26 14:14:24,266 INFO [train.py:1251] (1/4) Loading scheduler state dict +2024-08-26 14:14:24,266 INFO [asr_datamodule.py:894] (1/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-26 14:14:27,296 INFO [asr_datamodule.py:696] (1/4) Disable MUSAN +2024-08-26 14:14:27,296 INFO [asr_datamodule.py:714] (1/4) Enable SpecAugment +2024-08-26 14:14:27,296 INFO [asr_datamodule.py:715] (1/4) Time warp factor: 80 +2024-08-26 14:14:27,297 INFO [asr_datamodule.py:725] (1/4) Num frame mask: 10 +2024-08-26 14:14:27,297 INFO [asr_datamodule.py:738] (1/4) About to create train dataset +2024-08-26 14:14:27,297 INFO [asr_datamodule.py:765] (1/4) Using DynamicBucketingSampler. +2024-08-26 14:14:28,822 INFO [asr_datamodule.py:782] (1/4) About to create train dataloader +2024-08-26 14:14:28,829 INFO [asr_datamodule.py:911] (1/4) About to get dev-clean cuts +2024-08-26 14:14:31,125 INFO [asr_datamodule.py:918] (1/4) About to get dev-other cuts +2024-08-26 14:14:32,027 INFO [asr_datamodule.py:814] (1/4) About to create dev dataset +2024-08-26 14:14:32,333 INFO [asr_datamodule.py:831] (1/4) About to create dev dataloader +2024-08-26 14:14:32,333 INFO [train.py:1435] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-26 14:18:38,801 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=4.99 vs. limit=3.0 +2024-08-26 14:18:40,629 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12881MB +2024-08-26 14:18:41,873 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12881MB +2024-08-26 14:18:49,645 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12881MB +2024-08-26 14:18:50,847 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12881MB +2024-08-26 14:19:04,362 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=384, metric=23.10 vs. limit=7.5 +2024-08-26 14:19:04,871 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12881MB +2024-08-26 14:19:05,853 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=256, metric=4.08 vs. limit=7.5 +2024-08-26 14:19:06,160 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12881MB +2024-08-26 14:19:06,178 INFO [train.py:1344] (1/4) Loading grad scaler state dict +2024-08-26 14:19:52,361 INFO [train.py:1114] (1/4) Epoch 4, batch 0, loss[loss=0.2966, simple_loss=0.3244, pruned_loss=0.09742, ctc_loss=0.1849, over 19425.00 frames. ], tot_loss[loss=0.2966, simple_loss=0.3244, pruned_loss=0.09742, ctc_loss=0.1849, over 19425.00 frames. ], batch size: 48, lr: 3.30e-02, grad_scale: 32.0 +2024-08-26 14:19:52,361 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-26 14:20:25,789 INFO [train.py:1146] (1/4) Epoch 4, validation: loss=0.2421, simple_loss=0.3218, pruned_loss=0.05945, ctc_loss=0.1086, over 944034.00 frames. +2024-08-26 14:20:25,790 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12881MB +2024-08-26 14:20:26,377 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.26 vs. limit=6.0 +2024-08-26 14:22:31,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=39989.333333333336, ans=0.00217623188405797 +2024-08-26 14:22:36,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=39989.333333333336, ans=0.2 +2024-08-26 14:23:04,536 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.569e+02 1.845e+02 2.126e+02 2.642e+02 4.004e+02, threshold=4.252e+02, percent-clipped=0.0 +2024-08-26 14:23:26,401 INFO [train.py:1114] (1/4) Epoch 4, batch 50, loss[loss=0.2348, simple_loss=0.2851, pruned_loss=0.06712, ctc_loss=0.1255, over 19702.00 frames. ], tot_loss[loss=0.3012, simple_loss=0.3361, pruned_loss=0.09664, ctc_loss=0.1827, over 845181.07 frames. ], batch size: 47, lr: 3.30e-02, grad_scale: 32.0 +2024-08-26 14:24:00,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=40149.333333333336, ans=0.125 +2024-08-26 14:24:23,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40202.666666666664, ans=0.1 +2024-08-26 14:25:32,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=40362.666666666664, ans=0.125 +2024-08-26 14:25:33,107 INFO [train.py:1114] (1/4) Epoch 4, batch 100, loss[loss=0.2742, simple_loss=0.3215, pruned_loss=0.08177, ctc_loss=0.1584, over 19702.00 frames. ], tot_loss[loss=0.2983, simple_loss=0.3349, pruned_loss=0.09513, ctc_loss=0.1788, over 1499126.13 frames. ], batch size: 51, lr: 3.29e-02, grad_scale: 32.0 +2024-08-26 14:25:39,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=40362.666666666664, ans=0.0 +2024-08-26 14:25:44,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=40362.666666666664, ans=0.0 +2024-08-26 14:26:17,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40469.333333333336, ans=0.1 +2024-08-26 14:26:40,719 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.372e+02 1.662e+02 1.906e+02 2.226e+02 3.245e+02, threshold=3.812e+02, percent-clipped=0.0 +2024-08-26 14:26:48,538 INFO [train.py:1114] (1/4) Epoch 4, batch 150, loss[loss=0.238, simple_loss=0.2837, pruned_loss=0.07037, ctc_loss=0.1291, over 19700.00 frames. ], tot_loss[loss=0.2927, simple_loss=0.3302, pruned_loss=0.0928, ctc_loss=0.1741, over 2026958.37 frames. ], batch size: 47, lr: 3.28e-02, grad_scale: 32.0 +2024-08-26 14:26:48,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40629.333333333336, ans=0.1 +2024-08-26 14:26:57,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=40629.333333333336, ans=0.04949747468305833 +2024-08-26 14:27:53,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=40842.666666666664, ans=0.1 +2024-08-26 14:28:03,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=40842.666666666664, ans=0.125 +2024-08-26 14:28:04,924 INFO [train.py:1114] (1/4) Epoch 4, batch 200, loss[loss=0.3169, simple_loss=0.3491, pruned_loss=0.1033, ctc_loss=0.1952, over 18345.00 frames. ], tot_loss[loss=0.2912, simple_loss=0.3289, pruned_loss=0.09221, ctc_loss=0.1726, over 2434725.20 frames. ], batch size: 85, lr: 3.28e-02, grad_scale: 32.0 +2024-08-26 14:28:07,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40896.0, ans=0.1 +2024-08-26 14:28:16,617 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.89 vs. limit=15.0 +2024-08-26 14:28:21,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.25 vs. limit=22.5 +2024-08-26 14:28:29,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=41002.666666666664, ans=0.09899494936611666 +2024-08-26 14:28:31,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=41002.666666666664, ans=0.025 +2024-08-26 14:28:43,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.75 vs. limit=22.5 +2024-08-26 14:28:49,770 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.824e+02 2.102e+02 2.533e+02 3.992e+02, threshold=4.203e+02, percent-clipped=3.0 +2024-08-26 14:28:55,763 INFO [train.py:1114] (1/4) Epoch 4, batch 250, loss[loss=0.2998, simple_loss=0.3381, pruned_loss=0.09599, ctc_loss=0.1738, over 19353.00 frames. ], tot_loss[loss=0.2901, simple_loss=0.3287, pruned_loss=0.09152, ctc_loss=0.1713, over 2754787.26 frames. ], batch size: 67, lr: 3.27e-02, grad_scale: 32.0 +2024-08-26 14:29:02,258 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.53 vs. limit=22.5 +2024-08-26 14:29:08,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=41216.0, ans=0.125 +2024-08-26 14:29:08,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=41216.0, ans=0.125 +2024-08-26 14:29:19,417 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.30 vs. limit=15.0 +2024-08-26 14:29:22,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=41269.333333333336, ans=0.1 +2024-08-26 14:29:45,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=41376.0, ans=0.2 +2024-08-26 14:29:46,780 INFO [train.py:1114] (1/4) Epoch 4, batch 300, loss[loss=0.2771, simple_loss=0.3248, pruned_loss=0.08269, ctc_loss=0.1602, over 19508.00 frames. ], tot_loss[loss=0.2896, simple_loss=0.3281, pruned_loss=0.09133, ctc_loss=0.1711, over 3000076.96 frames. ], batch size: 61, lr: 3.27e-02, grad_scale: 32.0 +2024-08-26 14:30:10,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=41536.0, ans=0.125 +2024-08-26 14:30:16,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=41536.0, ans=0.125 +2024-08-26 14:30:20,022 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.58 vs. limit=15.0 +2024-08-26 14:30:32,082 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 1.674e+02 1.880e+02 2.161e+02 3.950e+02, threshold=3.761e+02, percent-clipped=0.0 +2024-08-26 14:30:37,853 INFO [train.py:1114] (1/4) Epoch 4, batch 350, loss[loss=0.2618, simple_loss=0.2982, pruned_loss=0.08233, ctc_loss=0.1519, over 19757.00 frames. ], tot_loss[loss=0.2897, simple_loss=0.3282, pruned_loss=0.09139, ctc_loss=0.1711, over 3188636.47 frames. ], batch size: 48, lr: 3.26e-02, grad_scale: 32.0 +2024-08-26 14:30:43,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=41696.0, ans=0.04949747468305833 +2024-08-26 14:30:44,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=41696.0, ans=10.0 +2024-08-26 14:30:53,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=41749.333333333336, ans=0.025 +2024-08-26 14:31:13,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=41856.0, ans=0.0 +2024-08-26 14:31:35,389 INFO [train.py:1114] (1/4) Epoch 4, batch 400, loss[loss=0.3054, simple_loss=0.3384, pruned_loss=0.09967, ctc_loss=0.1827, over 19504.00 frames. ], tot_loss[loss=0.2886, simple_loss=0.3278, pruned_loss=0.09071, ctc_loss=0.1699, over 3340991.85 frames. ], batch size: 54, lr: 3.26e-02, grad_scale: 32.0 +2024-08-26 14:31:51,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=42016.0, ans=0.0 +2024-08-26 14:32:05,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=42122.666666666664, ans=0.125 +2024-08-26 14:32:14,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=42176.0, ans=0.1 +2024-08-26 14:32:19,258 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.438e+02 1.828e+02 2.157e+02 2.598e+02 8.551e+02, threshold=4.314e+02, percent-clipped=2.0 +2024-08-26 14:32:21,714 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.60 vs. limit=15.0 +2024-08-26 14:32:23,142 INFO [train.py:1114] (1/4) Epoch 4, batch 450, loss[loss=0.3272, simple_loss=0.3642, pruned_loss=0.1059, ctc_loss=0.1961, over 19623.00 frames. ], tot_loss[loss=0.29, simple_loss=0.3285, pruned_loss=0.09159, ctc_loss=0.1709, over 3449441.54 frames. ], batch size: 55, lr: 3.25e-02, grad_scale: 8.0 +2024-08-26 14:32:33,548 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.12 vs. limit=15.0 +2024-08-26 14:32:40,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=42282.666666666664, ans=0.125 +2024-08-26 14:33:01,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=42389.333333333336, ans=0.0016544927536231869 +2024-08-26 14:33:12,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=42442.666666666664, ans=0.125 +2024-08-26 14:33:14,191 INFO [train.py:1114] (1/4) Epoch 4, batch 500, loss[loss=0.3097, simple_loss=0.3475, pruned_loss=0.09887, ctc_loss=0.1854, over 19692.00 frames. ], tot_loss[loss=0.288, simple_loss=0.3268, pruned_loss=0.09066, ctc_loss=0.1695, over 3545718.72 frames. ], batch size: 63, lr: 3.25e-02, grad_scale: 8.0 +2024-08-26 14:33:43,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=42602.666666666664, ans=0.125 +2024-08-26 14:34:07,924 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.277e+02 1.676e+02 1.857e+02 2.171e+02 5.331e+02, threshold=3.714e+02, percent-clipped=2.0 +2024-08-26 14:34:11,744 INFO [train.py:1114] (1/4) Epoch 4, batch 550, loss[loss=0.3025, simple_loss=0.3411, pruned_loss=0.09523, ctc_loss=0.1838, over 19214.00 frames. ], tot_loss[loss=0.2882, simple_loss=0.3269, pruned_loss=0.09079, ctc_loss=0.1698, over 3606988.10 frames. ], batch size: 71, lr: 3.24e-02, grad_scale: 8.0 +2024-08-26 14:34:26,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=42816.0, ans=0.125 +2024-08-26 14:34:28,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=42816.0, ans=0.125 +2024-08-26 14:34:29,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=42816.0, ans=0.0 +2024-08-26 14:34:37,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=42869.333333333336, ans=0.125 +2024-08-26 14:34:41,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=42869.333333333336, ans=0.125 +2024-08-26 14:34:42,563 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.19 vs. limit=10.0 +2024-08-26 14:34:52,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=42976.0, ans=0.0015269565217391305 +2024-08-26 14:35:03,318 INFO [train.py:1114] (1/4) Epoch 4, batch 600, loss[loss=0.3208, simple_loss=0.3571, pruned_loss=0.1041, ctc_loss=0.1909, over 19407.00 frames. ], tot_loss[loss=0.2882, simple_loss=0.3269, pruned_loss=0.09073, ctc_loss=0.1698, over 3664793.90 frames. ], batch size: 67, lr: 3.24e-02, grad_scale: 8.0 +2024-08-26 14:35:04,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43029.333333333336, ans=0.1 +2024-08-26 14:35:05,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=43029.333333333336, ans=0.125 +2024-08-26 14:35:11,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=43029.333333333336, ans=0.00151536231884058 +2024-08-26 14:35:15,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=43082.666666666664, ans=0.125 +2024-08-26 14:35:27,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=43136.0, ans=0.125 +2024-08-26 14:35:45,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=43242.666666666664, ans=0.125 +2024-08-26 14:35:45,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=43242.666666666664, ans=0.1 +2024-08-26 14:35:47,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=43242.666666666664, ans=0.0014689855072463776 +2024-08-26 14:35:48,738 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:35:50,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.whiten.whitening_limit, batch_count=43242.666666666664, ans=12.0 +2024-08-26 14:35:50,404 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.428e+02 1.699e+02 1.953e+02 2.270e+02 5.390e+02, threshold=3.906e+02, percent-clipped=1.0 +2024-08-26 14:35:54,190 INFO [train.py:1114] (1/4) Epoch 4, batch 650, loss[loss=0.2951, simple_loss=0.335, pruned_loss=0.09274, ctc_loss=0.1743, over 19790.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.3255, pruned_loss=0.08973, ctc_loss=0.1681, over 3715390.33 frames. ], batch size: 54, lr: 3.23e-02, grad_scale: 8.0 +2024-08-26 14:36:29,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=43456.0, ans=0.125 +2024-08-26 14:36:41,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=43509.333333333336, ans=0.0 +2024-08-26 14:36:48,327 INFO [train.py:1114] (1/4) Epoch 4, batch 700, loss[loss=0.2722, simple_loss=0.3143, pruned_loss=0.08368, ctc_loss=0.157, over 19727.00 frames. ], tot_loss[loss=0.287, simple_loss=0.3264, pruned_loss=0.09007, ctc_loss=0.1688, over 3747440.16 frames. ], batch size: 51, lr: 3.22e-02, grad_scale: 8.0 +2024-08-26 14:37:00,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=43616.0, ans=0.125 +2024-08-26 14:37:17,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=43722.666666666664, ans=0.0013646376811594207 +2024-08-26 14:37:18,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=43722.666666666664, ans=0.0 +2024-08-26 14:37:32,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=43776.0, ans=0.0 +2024-08-26 14:37:33,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=43776.0, ans=0.0 +2024-08-26 14:37:36,033 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 1.717e+02 1.974e+02 2.287e+02 3.794e+02, threshold=3.948e+02, percent-clipped=0.0 +2024-08-26 14:37:37,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=43776.0, ans=0.125 +2024-08-26 14:37:39,965 INFO [train.py:1114] (1/4) Epoch 4, batch 750, loss[loss=0.2841, simple_loss=0.3351, pruned_loss=0.08525, ctc_loss=0.1566, over 19483.00 frames. ], tot_loss[loss=0.286, simple_loss=0.3255, pruned_loss=0.08965, ctc_loss=0.1681, over 3772264.34 frames. ], batch size: 54, lr: 3.22e-02, grad_scale: 8.0 +2024-08-26 14:37:54,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=43882.666666666664, ans=0.125 +2024-08-26 14:38:01,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=43936.0, ans=0.125 +2024-08-26 14:38:05,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=43936.0, ans=0.0 +2024-08-26 14:38:06,055 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.24 vs. limit=15.0 +2024-08-26 14:38:15,887 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.87 vs. limit=10.0 +2024-08-26 14:38:15,914 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.90 vs. limit=15.0 +2024-08-26 14:38:25,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=44042.666666666664, ans=0.2 +2024-08-26 14:38:31,794 INFO [train.py:1114] (1/4) Epoch 4, batch 800, loss[loss=0.2753, simple_loss=0.3089, pruned_loss=0.08905, ctc_loss=0.159, over 19810.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.3249, pruned_loss=0.08924, ctc_loss=0.1674, over 3794731.81 frames. ], batch size: 49, lr: 3.21e-02, grad_scale: 16.0 +2024-08-26 14:38:43,168 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.97 vs. limit=5.0 +2024-08-26 14:38:49,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=44149.333333333336, ans=0.05 +2024-08-26 14:38:54,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=44202.666666666664, ans=0.125 +2024-08-26 14:39:16,257 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.371e+02 1.706e+02 1.876e+02 2.197e+02 5.470e+02, threshold=3.751e+02, percent-clipped=2.0 +2024-08-26 14:39:19,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=44309.333333333336, ans=0.125 +2024-08-26 14:39:22,913 INFO [train.py:1114] (1/4) Epoch 4, batch 850, loss[loss=0.2964, simple_loss=0.3423, pruned_loss=0.09203, ctc_loss=0.1663, over 19670.00 frames. ], tot_loss[loss=0.284, simple_loss=0.324, pruned_loss=0.08871, ctc_loss=0.1665, over 3815241.96 frames. ], batch size: 59, lr: 3.21e-02, grad_scale: 16.0 +2024-08-26 14:39:23,502 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.16 vs. limit=15.0 +2024-08-26 14:39:52,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=44522.666666666664, ans=0.125 +2024-08-26 14:39:53,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=44522.666666666664, ans=0.125 +2024-08-26 14:40:11,458 INFO [train.py:1114] (1/4) Epoch 4, batch 900, loss[loss=0.2626, simple_loss=0.3021, pruned_loss=0.08108, ctc_loss=0.1523, over 19799.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.3248, pruned_loss=0.08926, ctc_loss=0.1672, over 3819422.89 frames. ], batch size: 49, lr: 3.20e-02, grad_scale: 16.0 +2024-08-26 14:40:16,023 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.02 vs. limit=6.0 +2024-08-26 14:40:16,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=44629.333333333336, ans=0.025 +2024-08-26 14:40:28,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=44682.666666666664, ans=0.025 +2024-08-26 14:40:28,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=44682.666666666664, ans=0.1 +2024-08-26 14:40:37,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=44736.0, ans=0.0011443478260869562 +2024-08-26 14:40:37,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=44736.0, ans=0.0 +2024-08-26 14:40:44,039 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.90 vs. limit=15.0 +2024-08-26 14:40:59,413 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.434e+02 1.686e+02 1.871e+02 2.157e+02 4.639e+02, threshold=3.742e+02, percent-clipped=1.0 +2024-08-26 14:41:03,418 INFO [train.py:1114] (1/4) Epoch 4, batch 950, loss[loss=0.2498, simple_loss=0.2969, pruned_loss=0.07345, ctc_loss=0.1396, over 19496.00 frames. ], tot_loss[loss=0.286, simple_loss=0.3255, pruned_loss=0.08965, ctc_loss=0.1679, over 3821051.77 frames. ], batch size: 49, lr: 3.20e-02, grad_scale: 16.0 +2024-08-26 14:41:20,514 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.13 vs. limit=6.0 +2024-08-26 14:41:35,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=45056.0, ans=0.125 +2024-08-26 14:41:35,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=45056.0, ans=0.2 +2024-08-26 14:41:54,729 INFO [train.py:1114] (1/4) Epoch 4, batch 1000, loss[loss=0.2509, simple_loss=0.3063, pruned_loss=0.07159, ctc_loss=0.1308, over 19857.00 frames. ], tot_loss[loss=0.2874, simple_loss=0.3266, pruned_loss=0.09035, ctc_loss=0.1688, over 3817432.80 frames. ], batch size: 52, lr: 3.19e-02, grad_scale: 16.0 +2024-08-26 14:42:00,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=45162.666666666664, ans=0.125 +2024-08-26 14:42:38,237 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.78 vs. limit=22.5 +2024-08-26 14:42:42,497 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.702e+02 1.844e+02 2.187e+02 3.225e+02, threshold=3.689e+02, percent-clipped=0.0 +2024-08-26 14:42:46,501 INFO [train.py:1114] (1/4) Epoch 4, batch 1050, loss[loss=0.2833, simple_loss=0.3327, pruned_loss=0.08504, ctc_loss=0.1596, over 19835.00 frames. ], tot_loss[loss=0.2855, simple_loss=0.3252, pruned_loss=0.08942, ctc_loss=0.1671, over 3823387.51 frames. ], batch size: 57, lr: 3.19e-02, grad_scale: 16.0 +2024-08-26 14:43:01,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=45482.666666666664, ans=0.125 +2024-08-26 14:43:07,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=45536.0, ans=0.0 +2024-08-26 14:43:38,138 INFO [train.py:1114] (1/4) Epoch 4, batch 1100, loss[loss=0.2729, simple_loss=0.3239, pruned_loss=0.08106, ctc_loss=0.1492, over 19585.00 frames. ], tot_loss[loss=0.2842, simple_loss=0.3242, pruned_loss=0.08888, ctc_loss=0.1662, over 3829412.18 frames. ], batch size: 52, lr: 3.18e-02, grad_scale: 16.0 +2024-08-26 14:43:38,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=45696.0, ans=0.2 +2024-08-26 14:43:40,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=45696.0, ans=0.05 +2024-08-26 14:43:47,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45749.333333333336, ans=0.1 +2024-08-26 14:43:52,022 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.94 vs. limit=22.5 +2024-08-26 14:43:53,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=45749.333333333336, ans=0.125 +2024-08-26 14:43:59,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=45802.666666666664, ans=0.2 +2024-08-26 14:44:15,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=45856.0, ans=0.125 +2024-08-26 14:44:23,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=45909.333333333336, ans=0.09899494936611666 +2024-08-26 14:44:25,680 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 1.748e+02 1.997e+02 2.350e+02 6.199e+02, threshold=3.995e+02, percent-clipped=5.0 +2024-08-26 14:44:29,534 INFO [train.py:1114] (1/4) Epoch 4, batch 1150, loss[loss=0.2663, simple_loss=0.3141, pruned_loss=0.0802, ctc_loss=0.145, over 19587.00 frames. ], tot_loss[loss=0.2849, simple_loss=0.3247, pruned_loss=0.08922, ctc_loss=0.1666, over 3827714.63 frames. ], batch size: 52, lr: 3.18e-02, grad_scale: 16.0 +2024-08-26 14:44:34,052 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.71 vs. limit=15.0 +2024-08-26 14:44:37,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=45962.666666666664, ans=0.125 +2024-08-26 14:45:30,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=45962.666666666664, ans=0.1 +2024-08-26 14:45:35,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=46016.0, ans=0.125 +2024-08-26 14:46:05,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=46069.333333333336, ans=0.0008544927536231883 +2024-08-26 14:46:20,314 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.81 vs. limit=22.5 +2024-08-26 14:46:32,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46176.0, ans=0.1 +2024-08-26 14:46:38,928 INFO [train.py:1114] (1/4) Epoch 4, batch 1200, loss[loss=0.2996, simple_loss=0.3416, pruned_loss=0.09333, ctc_loss=0.1774, over 19854.00 frames. ], tot_loss[loss=0.2859, simple_loss=0.3258, pruned_loss=0.08954, ctc_loss=0.1671, over 3824320.42 frames. ], batch size: 57, lr: 3.17e-02, grad_scale: 32.0 +2024-08-26 14:46:45,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=46229.333333333336, ans=0.0008197101449275365 +2024-08-26 14:46:59,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=46336.0, ans=0.1 +2024-08-26 14:47:02,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=46336.0, ans=0.0007965217391304354 +2024-08-26 14:47:05,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=46336.0, ans=0.0 +2024-08-26 14:47:16,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=46442.666666666664, ans=0.2 +2024-08-26 14:47:23,216 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.478e+02 1.767e+02 1.944e+02 2.283e+02 5.479e+02, threshold=3.889e+02, percent-clipped=1.0 +2024-08-26 14:47:29,952 INFO [train.py:1114] (1/4) Epoch 4, batch 1250, loss[loss=0.3007, simple_loss=0.3422, pruned_loss=0.09498, ctc_loss=0.1729, over 19492.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.3255, pruned_loss=0.08884, ctc_loss=0.1657, over 3842481.00 frames. ], batch size: 61, lr: 3.17e-02, grad_scale: 32.0 +2024-08-26 14:47:48,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=46602.666666666664, ans=0.035 +2024-08-26 14:47:51,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=46602.666666666664, ans=0.125 +2024-08-26 14:47:56,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=46602.666666666664, ans=0.0007385507246376825 +2024-08-26 14:48:21,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=46762.666666666664, ans=0.125 +2024-08-26 14:48:22,038 INFO [train.py:1114] (1/4) Epoch 4, batch 1300, loss[loss=0.316, simple_loss=0.3506, pruned_loss=0.1029, ctc_loss=0.1891, over 18872.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3238, pruned_loss=0.0877, ctc_loss=0.1639, over 3847173.41 frames. ], batch size: 76, lr: 3.16e-02, grad_scale: 32.0 +2024-08-26 14:48:22,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=46762.666666666664, ans=0.2 +2024-08-26 14:48:23,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=46762.666666666664, ans=0.125 +2024-08-26 14:49:06,440 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.331e+02 1.633e+02 1.793e+02 2.136e+02 4.035e+02, threshold=3.586e+02, percent-clipped=1.0 +2024-08-26 14:49:08,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=46976.0, ans=0.125 +2024-08-26 14:49:09,700 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.04 vs. limit=15.0 +2024-08-26 14:49:10,201 INFO [train.py:1114] (1/4) Epoch 4, batch 1350, loss[loss=0.2515, simple_loss=0.3032, pruned_loss=0.0744, ctc_loss=0.1275, over 19767.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.3234, pruned_loss=0.08721, ctc_loss=0.1629, over 3856426.63 frames. ], batch size: 54, lr: 3.16e-02, grad_scale: 32.0 +2024-08-26 14:49:10,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=47029.333333333336, ans=0.1 +2024-08-26 14:49:19,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=47029.333333333336, ans=0.125 +2024-08-26 14:49:23,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=47082.666666666664, ans=0.1 +2024-08-26 14:49:30,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=47082.666666666664, ans=0.09899494936611666 +2024-08-26 14:49:34,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=47136.0, ans=0.125 +2024-08-26 14:49:35,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=47136.0, ans=0.2 +2024-08-26 14:49:43,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=47189.333333333336, ans=10.0 +2024-08-26 14:50:01,602 INFO [train.py:1114] (1/4) Epoch 4, batch 1400, loss[loss=0.2417, simple_loss=0.2781, pruned_loss=0.07456, ctc_loss=0.1404, over 19673.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3228, pruned_loss=0.08707, ctc_loss=0.1627, over 3863554.18 frames. ], batch size: 46, lr: 3.15e-02, grad_scale: 32.0 +2024-08-26 14:50:01,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47296.0, ans=0.1 +2024-08-26 14:50:02,072 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=1.99 vs. limit=15.0 +2024-08-26 14:50:49,033 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.452e+02 1.701e+02 1.930e+02 2.137e+02 5.469e+02, threshold=3.859e+02, percent-clipped=2.0 +2024-08-26 14:50:53,061 INFO [train.py:1114] (1/4) Epoch 4, batch 1450, loss[loss=0.3054, simple_loss=0.3469, pruned_loss=0.09723, ctc_loss=0.1734, over 19649.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.3234, pruned_loss=0.08718, ctc_loss=0.1629, over 3859954.86 frames. ], batch size: 63, lr: 3.15e-02, grad_scale: 32.0 +2024-08-26 14:51:28,885 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-08-26 14:51:30,548 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.18 vs. limit=15.0 +2024-08-26 14:51:31,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=47722.666666666664, ans=0.1 +2024-08-26 14:51:32,316 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:51:43,460 INFO [train.py:1114] (1/4) Epoch 4, batch 1500, loss[loss=0.3142, simple_loss=0.3495, pruned_loss=0.1018, ctc_loss=0.1883, over 19579.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3238, pruned_loss=0.08726, ctc_loss=0.1633, over 3860060.18 frames. ], batch size: 57, lr: 3.14e-02, grad_scale: 32.0 +2024-08-26 14:51:47,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=47829.333333333336, ans=0.0 +2024-08-26 14:52:11,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=47936.0, ans=0.2 +2024-08-26 14:52:18,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=47989.333333333336, ans=0.125 +2024-08-26 14:52:34,701 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.344e+02 1.743e+02 1.956e+02 2.243e+02 3.928e+02, threshold=3.912e+02, percent-clipped=1.0 +2024-08-26 14:52:38,429 INFO [train.py:1114] (1/4) Epoch 4, batch 1550, loss[loss=0.2944, simple_loss=0.3339, pruned_loss=0.09281, ctc_loss=0.173, over 19613.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.3241, pruned_loss=0.08776, ctc_loss=0.1642, over 3844874.58 frames. ], batch size: 60, lr: 3.14e-02, grad_scale: 32.0 +2024-08-26 14:52:42,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=48096.0, ans=10.0 +2024-08-26 14:52:43,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=48096.0, ans=0.125 +2024-08-26 14:53:21,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=48309.333333333336, ans=0.125 +2024-08-26 14:53:23,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=48309.333333333336, ans=0.125 +2024-08-26 14:53:25,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=48309.333333333336, ans=0.125 +2024-08-26 14:53:29,865 INFO [train.py:1114] (1/4) Epoch 4, batch 1600, loss[loss=0.278, simple_loss=0.3292, pruned_loss=0.08255, ctc_loss=0.154, over 19836.00 frames. ], tot_loss[loss=0.2823, simple_loss=0.3236, pruned_loss=0.08776, ctc_loss=0.1639, over 3834288.08 frames. ], batch size: 57, lr: 3.13e-02, grad_scale: 32.0 +2024-08-26 14:53:31,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=48362.666666666664, ans=0.125 +2024-08-26 14:53:42,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.whiten.whitening_limit, batch_count=48416.0, ans=12.0 +2024-08-26 14:53:42,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=48416.0, ans=0.125 +2024-08-26 14:53:48,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=48469.333333333336, ans=0.2 +2024-08-26 14:54:15,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=48576.0, ans=0.2 +2024-08-26 14:54:18,015 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.502e+02 1.701e+02 1.882e+02 2.341e+02 4.982e+02, threshold=3.764e+02, percent-clipped=3.0 +2024-08-26 14:54:21,786 INFO [train.py:1114] (1/4) Epoch 4, batch 1650, loss[loss=0.2992, simple_loss=0.3368, pruned_loss=0.09492, ctc_loss=0.1795, over 19651.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3227, pruned_loss=0.08731, ctc_loss=0.1632, over 3829513.74 frames. ], batch size: 59, lr: 3.13e-02, grad_scale: 32.0 +2024-08-26 14:54:25,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=48629.333333333336, ans=0.07 +2024-08-26 14:54:40,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=48736.0, ans=0.5 +2024-08-26 14:54:43,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48736.0, ans=0.1 +2024-08-26 14:55:03,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=48789.333333333336, ans=0.0 +2024-08-26 14:55:17,034 INFO [train.py:1114] (1/4) Epoch 4, batch 1700, loss[loss=0.2409, simple_loss=0.2838, pruned_loss=0.07336, ctc_loss=0.1281, over 19694.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3224, pruned_loss=0.08687, ctc_loss=0.1624, over 3843496.50 frames. ], batch size: 46, lr: 3.12e-02, grad_scale: 32.0 +2024-08-26 14:55:22,117 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:55:30,586 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.19 vs. limit=15.0 +2024-08-26 14:55:36,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=49002.666666666664, ans=0.125 +2024-08-26 14:55:36,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=49002.666666666664, ans=0.125 +2024-08-26 14:55:45,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=49056.0, ans=0.125 +2024-08-26 14:55:52,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=49056.0, ans=0.125 +2024-08-26 14:55:55,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=49109.333333333336, ans=0.00019362318840579658 +2024-08-26 14:55:55,466 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.91 vs. limit=22.5 +2024-08-26 14:55:56,377 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.56 vs. limit=22.5 +2024-08-26 14:55:57,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=49109.333333333336, ans=0.125 +2024-08-26 14:55:59,539 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.433e+02 1.770e+02 1.975e+02 2.193e+02 4.882e+02, threshold=3.950e+02, percent-clipped=1.0 +2024-08-26 14:55:59,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=49109.333333333336, ans=0.125 +2024-08-26 14:56:00,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=49109.333333333336, ans=0.125 +2024-08-26 14:56:03,227 INFO [train.py:1114] (1/4) Epoch 4, batch 1750, loss[loss=0.2361, simple_loss=0.2814, pruned_loss=0.06967, ctc_loss=0.1286, over 19641.00 frames. ], tot_loss[loss=0.28, simple_loss=0.3219, pruned_loss=0.08659, ctc_loss=0.162, over 3848065.76 frames. ], batch size: 45, lr: 3.11e-02, grad_scale: 32.0 +2024-08-26 14:56:19,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=49216.0, ans=0.2 +2024-08-26 14:56:35,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=49322.666666666664, ans=0.125 +2024-08-26 14:56:42,826 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.43 vs. limit=15.0 +2024-08-26 14:56:48,547 INFO [train.py:1114] (1/4) Epoch 4, batch 1800, loss[loss=0.2779, simple_loss=0.326, pruned_loss=0.08356, ctc_loss=0.1565, over 19620.00 frames. ], tot_loss[loss=0.2804, simple_loss=0.3221, pruned_loss=0.08685, ctc_loss=0.1623, over 3849760.61 frames. ], batch size: 55, lr: 3.11e-02, grad_scale: 32.0 +2024-08-26 14:56:56,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49429.333333333336, ans=0.1 +2024-08-26 14:56:58,239 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.15 vs. limit=15.0 +2024-08-26 14:57:03,514 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.89 vs. limit=15.0 +2024-08-26 14:57:28,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=49642.666666666664, ans=0.125 +2024-08-26 14:57:30,226 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.411e+02 1.664e+02 1.898e+02 2.172e+02 3.982e+02, threshold=3.795e+02, percent-clipped=1.0 +2024-08-26 14:57:31,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=49642.666666666664, ans=0.0 +2024-08-26 14:57:33,984 INFO [train.py:1114] (1/4) Epoch 4, batch 1850, loss[loss=0.3059, simple_loss=0.3405, pruned_loss=0.09873, ctc_loss=0.1845, over 19583.00 frames. ], tot_loss[loss=0.28, simple_loss=0.3218, pruned_loss=0.08668, ctc_loss=0.1619, over 3853486.93 frames. ], batch size: 57, lr: 3.10e-02, grad_scale: 32.0 +2024-08-26 14:57:52,792 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.19 vs. limit=15.0 +2024-08-26 14:58:11,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49909.333333333336, ans=0.1 +2024-08-26 14:58:19,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=49909.333333333336, ans=0.125 +2024-08-26 14:58:20,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=49962.666666666664, ans=0.0 +2024-08-26 14:58:21,303 INFO [train.py:1114] (1/4) Epoch 4, batch 1900, loss[loss=0.2698, simple_loss=0.317, pruned_loss=0.07952, ctc_loss=0.1591, over 19639.00 frames. ], tot_loss[loss=0.28, simple_loss=0.3224, pruned_loss=0.08651, ctc_loss=0.1614, over 3858891.13 frames. ], batch size: 59, lr: 3.10e-02, grad_scale: 16.0 +2024-08-26 14:58:22,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=49962.666666666664, ans=0.025 +2024-08-26 14:58:42,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=50069.333333333336, ans=0.125 +2024-08-26 14:58:48,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=50122.666666666664, ans=0.0 +2024-08-26 14:58:59,795 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:59:03,248 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.687e+02 1.820e+02 2.228e+02 3.741e+02, threshold=3.639e+02, percent-clipped=0.0 +2024-08-26 14:59:05,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=50229.333333333336, ans=0.125 +2024-08-26 14:59:06,127 INFO [train.py:1114] (1/4) Epoch 4, batch 1950, loss[loss=0.2398, simple_loss=0.2959, pruned_loss=0.06618, ctc_loss=0.1285, over 19581.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3236, pruned_loss=0.08664, ctc_loss=0.1618, over 3868493.40 frames. ], batch size: 52, lr: 3.09e-02, grad_scale: 16.0 +2024-08-26 14:59:37,449 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:59:53,431 INFO [train.py:1114] (1/4) Epoch 4, batch 2000, loss[loss=0.2458, simple_loss=0.2834, pruned_loss=0.07606, ctc_loss=0.1401, over 19608.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.324, pruned_loss=0.08714, ctc_loss=0.1627, over 3853087.84 frames. ], batch size: 45, lr: 3.09e-02, grad_scale: 32.0 +2024-08-26 15:00:08,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=50549.333333333336, ans=0.2 +2024-08-26 15:00:12,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=50602.666666666664, ans=0.2 +2024-08-26 15:00:16,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=50602.666666666664, ans=0.07 +2024-08-26 15:00:32,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=50709.333333333336, ans=0.125 +2024-08-26 15:00:33,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=50709.333333333336, ans=0.025 +2024-08-26 15:00:35,430 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.418e+02 1.722e+02 2.023e+02 2.377e+02 8.657e+02, threshold=4.047e+02, percent-clipped=4.0 +2024-08-26 15:00:38,082 INFO [train.py:1114] (1/4) Epoch 4, batch 2050, loss[loss=0.2286, simple_loss=0.2818, pruned_loss=0.06365, ctc_loss=0.1205, over 19738.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3228, pruned_loss=0.0869, ctc_loss=0.1622, over 3850142.93 frames. ], batch size: 47, lr: 3.08e-02, grad_scale: 32.0 +2024-08-26 15:00:39,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=50762.666666666664, ans=0.025 +2024-08-26 15:00:40,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.whiten.whitening_limit, batch_count=50762.666666666664, ans=12.0 +2024-08-26 15:00:45,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=50762.666666666664, ans=0.2 +2024-08-26 15:00:50,789 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:01:05,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=50922.666666666664, ans=0.0 +2024-08-26 15:01:21,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=51029.333333333336, ans=0.07 +2024-08-26 15:01:22,459 INFO [train.py:1114] (1/4) Epoch 4, batch 2100, loss[loss=0.2692, simple_loss=0.3129, pruned_loss=0.08336, ctc_loss=0.147, over 19799.00 frames. ], tot_loss[loss=0.2792, simple_loss=0.3219, pruned_loss=0.08611, ctc_loss=0.1609, over 3857068.53 frames. ], batch size: 54, lr: 3.08e-02, grad_scale: 32.0 +2024-08-26 15:01:34,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.50 vs. limit=15.0 +2024-08-26 15:01:35,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=51082.666666666664, ans=0.0 +2024-08-26 15:01:38,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=51082.666666666664, ans=0.125 +2024-08-26 15:01:39,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=51136.0, ans=0.0 +2024-08-26 15:01:49,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=51189.333333333336, ans=0.025 +2024-08-26 15:01:55,501 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:02:04,152 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.348e+02 1.626e+02 1.780e+02 1.895e+02 2.709e+02, threshold=3.561e+02, percent-clipped=0.0 +2024-08-26 15:02:07,170 INFO [train.py:1114] (1/4) Epoch 4, batch 2150, loss[loss=0.2678, simple_loss=0.3126, pruned_loss=0.0819, ctc_loss=0.1479, over 19577.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3204, pruned_loss=0.08531, ctc_loss=0.1595, over 3867589.37 frames. ], batch size: 52, lr: 3.07e-02, grad_scale: 32.0 +2024-08-26 15:02:08,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=51296.0, ans=0.125 +2024-08-26 15:02:10,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=51296.0, ans=0.1 +2024-08-26 15:02:13,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=51296.0, ans=0.125 +2024-08-26 15:02:28,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.91 vs. limit=22.5 +2024-08-26 15:02:31,153 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.35 vs. limit=10.0 +2024-08-26 15:02:41,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=51456.0, ans=0.125 +2024-08-26 15:02:44,046 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:02:44,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=51509.333333333336, ans=0.0 +2024-08-26 15:02:54,358 INFO [train.py:1114] (1/4) Epoch 4, batch 2200, loss[loss=0.2911, simple_loss=0.341, pruned_loss=0.08751, ctc_loss=0.1652, over 19581.00 frames. ], tot_loss[loss=0.2779, simple_loss=0.3206, pruned_loss=0.08564, ctc_loss=0.1598, over 3866382.53 frames. ], batch size: 57, lr: 3.07e-02, grad_scale: 32.0 +2024-08-26 15:03:00,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=51562.666666666664, ans=0.125 +2024-08-26 15:03:03,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=51616.0, ans=0.0 +2024-08-26 15:03:04,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=51616.0, ans=0.125 +2024-08-26 15:03:21,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=51722.666666666664, ans=0.125 +2024-08-26 15:03:32,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=51776.0, ans=0.0 +2024-08-26 15:03:36,249 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.54 vs. limit=22.5 +2024-08-26 15:03:36,542 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.424e+02 1.687e+02 1.993e+02 2.251e+02 9.209e+02, threshold=3.987e+02, percent-clipped=2.0 +2024-08-26 15:03:39,208 INFO [train.py:1114] (1/4) Epoch 4, batch 2250, loss[loss=0.2827, simple_loss=0.3329, pruned_loss=0.08425, ctc_loss=0.16, over 19607.00 frames. ], tot_loss[loss=0.2783, simple_loss=0.3207, pruned_loss=0.08589, ctc_loss=0.1603, over 3866626.85 frames. ], batch size: 55, lr: 3.06e-02, grad_scale: 32.0 +2024-08-26 15:03:54,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51882.666666666664, ans=0.1 +2024-08-26 15:03:55,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=51936.0, ans=0.0 +2024-08-26 15:03:56,340 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=14.39 vs. limit=15.0 +2024-08-26 15:04:02,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=51936.0, ans=0.125 +2024-08-26 15:04:13,939 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.70 vs. limit=6.0 +2024-08-26 15:04:18,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=52042.666666666664, ans=0.125 +2024-08-26 15:04:23,334 INFO [train.py:1114] (1/4) Epoch 4, batch 2300, loss[loss=0.2687, simple_loss=0.3106, pruned_loss=0.08111, ctc_loss=0.1617, over 19509.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.3203, pruned_loss=0.08606, ctc_loss=0.1607, over 3861051.19 frames. ], batch size: 49, lr: 3.06e-02, grad_scale: 32.0 +2024-08-26 15:04:24,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=52096.0, ans=0.0 +2024-08-26 15:04:36,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=52149.333333333336, ans=0.125 +2024-08-26 15:04:37,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=52149.333333333336, ans=0.0 +2024-08-26 15:04:37,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=52149.333333333336, ans=0.2 +2024-08-26 15:04:39,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=52149.333333333336, ans=10.0 +2024-08-26 15:04:39,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=52149.333333333336, ans=0.1 +2024-08-26 15:04:41,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=52202.666666666664, ans=0.125 +2024-08-26 15:04:43,069 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.08 vs. limit=15.0 +2024-08-26 15:04:49,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=52202.666666666664, ans=0.0 +2024-08-26 15:05:06,728 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.800e+02 1.978e+02 2.376e+02 5.904e+02, threshold=3.955e+02, percent-clipped=2.0 +2024-08-26 15:05:09,381 INFO [train.py:1114] (1/4) Epoch 4, batch 2350, loss[loss=0.3076, simple_loss=0.3397, pruned_loss=0.1014, ctc_loss=0.182, over 19643.00 frames. ], tot_loss[loss=0.278, simple_loss=0.32, pruned_loss=0.08596, ctc_loss=0.1604, over 3864614.61 frames. ], batch size: 63, lr: 3.05e-02, grad_scale: 32.0 +2024-08-26 15:05:10,821 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.32 vs. limit=10.0 +2024-08-26 15:05:16,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=52362.666666666664, ans=0.2 +2024-08-26 15:05:21,703 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:05:25,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=52469.333333333336, ans=0.125 +2024-08-26 15:05:26,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=52469.333333333336, ans=0.125 +2024-08-26 15:05:32,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=52469.333333333336, ans=0.125 +2024-08-26 15:05:41,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=52522.666666666664, ans=0.0 +2024-08-26 15:05:45,188 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:05:50,552 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.49 vs. limit=22.5 +2024-08-26 15:06:03,786 INFO [train.py:1114] (1/4) Epoch 4, batch 2400, loss[loss=0.3068, simple_loss=0.3446, pruned_loss=0.09733, ctc_loss=0.1861, over 19266.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3225, pruned_loss=0.08696, ctc_loss=0.162, over 3858684.65 frames. ], batch size: 71, lr: 3.05e-02, grad_scale: 32.0 +2024-08-26 15:06:12,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=52682.666666666664, ans=0.125 +2024-08-26 15:06:21,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=52682.666666666664, ans=0.125 +2024-08-26 15:06:24,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=52682.666666666664, ans=0.025 +2024-08-26 15:06:24,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=52682.666666666664, ans=0.0 +2024-08-26 15:06:53,227 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.448e+02 1.824e+02 2.127e+02 2.398e+02 5.215e+02, threshold=4.254e+02, percent-clipped=1.0 +2024-08-26 15:06:55,090 INFO [train.py:1114] (1/4) Epoch 4, batch 2450, loss[loss=0.3596, simple_loss=0.361, pruned_loss=0.129, ctc_loss=0.2503, over 13884.00 frames. ], tot_loss[loss=0.2897, simple_loss=0.328, pruned_loss=0.09151, ctc_loss=0.1708, over 3734050.30 frames. ], batch size: 140, lr: 3.05e-02, grad_scale: 16.0 +2024-08-26 15:06:56,622 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.80 vs. limit=15.0 +2024-08-26 15:06:57,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=52896.0, ans=0.09899494936611666 +2024-08-26 15:06:58,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=52896.0, ans=10.0 +2024-08-26 15:07:07,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=52949.333333333336, ans=0.125 +2024-08-26 15:07:14,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=53002.666666666664, ans=0.125 +2024-08-26 15:07:19,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=53002.666666666664, ans=0.125 +2024-08-26 15:07:22,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=53056.0, ans=0.0 +2024-08-26 15:07:26,182 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.59 vs. limit=6.0 +2024-08-26 15:09:12,285 INFO [train.py:1114] (1/4) Epoch 5, batch 0, loss[loss=0.277, simple_loss=0.3033, pruned_loss=0.09095, ctc_loss=0.172, over 19837.00 frames. ], tot_loss[loss=0.277, simple_loss=0.3033, pruned_loss=0.09095, ctc_loss=0.172, over 19837.00 frames. ], batch size: 49, lr: 2.83e-02, grad_scale: 32.0 +2024-08-26 15:09:12,286 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-26 15:09:22,083 INFO [train.py:1146] (1/4) Epoch 5, validation: loss=0.2289, simple_loss=0.3118, pruned_loss=0.05352, ctc_loss=0.09739, over 944034.00 frames. +2024-08-26 15:09:22,763 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12882MB +2024-08-26 15:09:31,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=53157.333333333336, ans=0.0 +2024-08-26 15:09:38,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=53157.333333333336, ans=0.125 +2024-08-26 15:09:40,786 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.17 vs. limit=15.0 +2024-08-26 15:09:54,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=53264.0, ans=0.0 +2024-08-26 15:10:10,888 INFO [train.py:1114] (1/4) Epoch 5, batch 50, loss[loss=0.2388, simple_loss=0.2885, pruned_loss=0.06925, ctc_loss=0.1264, over 19700.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3238, pruned_loss=0.087, ctc_loss=0.1638, over 844585.17 frames. ], batch size: 47, lr: 2.83e-02, grad_scale: 32.0 +2024-08-26 15:10:13,341 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.22 vs. limit=10.0 +2024-08-26 15:10:22,332 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.804e+02 2.028e+02 2.297e+02 4.038e+02, threshold=4.056e+02, percent-clipped=0.0 +2024-08-26 15:10:49,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=53530.666666666664, ans=0.125 +2024-08-26 15:10:51,439 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.48 vs. limit=15.0 +2024-08-26 15:10:53,461 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.50 vs. limit=15.0 +2024-08-26 15:11:00,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=53637.333333333336, ans=0.0 +2024-08-26 15:11:01,263 INFO [train.py:1114] (1/4) Epoch 5, batch 100, loss[loss=0.2432, simple_loss=0.2962, pruned_loss=0.06876, ctc_loss=0.1313, over 19728.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3243, pruned_loss=0.08662, ctc_loss=0.1629, over 1499416.85 frames. ], batch size: 51, lr: 2.82e-02, grad_scale: 32.0 +2024-08-26 15:11:17,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=53690.666666666664, ans=0.05 +2024-08-26 15:11:29,290 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.51 vs. limit=15.0 +2024-08-26 15:11:59,575 INFO [train.py:1114] (1/4) Epoch 5, batch 150, loss[loss=0.2547, simple_loss=0.2939, pruned_loss=0.07962, ctc_loss=0.1408, over 19717.00 frames. ], tot_loss[loss=0.2775, simple_loss=0.3212, pruned_loss=0.08505, ctc_loss=0.1594, over 2027995.75 frames. ], batch size: 47, lr: 2.82e-02, grad_scale: 32.0 +2024-08-26 15:12:10,015 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.442e+02 1.696e+02 1.862e+02 2.172e+02 3.492e+02, threshold=3.724e+02, percent-clipped=0.0 +2024-08-26 15:12:13,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=53957.333333333336, ans=0.2 +2024-08-26 15:12:30,140 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.04 vs. limit=15.0 +2024-08-26 15:12:44,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=54117.333333333336, ans=0.125 +2024-08-26 15:12:48,450 INFO [train.py:1114] (1/4) Epoch 5, batch 200, loss[loss=0.3021, simple_loss=0.3408, pruned_loss=0.09488, ctc_loss=0.1841, over 18206.00 frames. ], tot_loss[loss=0.2748, simple_loss=0.3188, pruned_loss=0.08391, ctc_loss=0.1572, over 2435865.93 frames. ], batch size: 85, lr: 2.81e-02, grad_scale: 32.0 +2024-08-26 15:12:53,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54170.666666666664, ans=0.1 +2024-08-26 15:12:58,431 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.61 vs. limit=22.5 +2024-08-26 15:13:32,862 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.87 vs. limit=22.5 +2024-08-26 15:13:41,970 INFO [train.py:1114] (1/4) Epoch 5, batch 250, loss[loss=0.287, simple_loss=0.3314, pruned_loss=0.08746, ctc_loss=0.1693, over 19452.00 frames. ], tot_loss[loss=0.2738, simple_loss=0.3182, pruned_loss=0.08348, ctc_loss=0.156, over 2756215.66 frames. ], batch size: 67, lr: 2.81e-02, grad_scale: 32.0 +2024-08-26 15:13:50,504 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.357e+02 1.685e+02 1.803e+02 2.078e+02 3.456e+02, threshold=3.607e+02, percent-clipped=0.0 +2024-08-26 15:14:04,008 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:14:18,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54597.333333333336, ans=0.1 +2024-08-26 15:14:32,304 INFO [train.py:1114] (1/4) Epoch 5, batch 300, loss[loss=0.277, simple_loss=0.3292, pruned_loss=0.0819, ctc_loss=0.1525, over 19520.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.3171, pruned_loss=0.08318, ctc_loss=0.1553, over 3000581.82 frames. ], batch size: 61, lr: 2.81e-02, grad_scale: 32.0 +2024-08-26 15:15:14,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=54917.333333333336, ans=0.025 +2024-08-26 15:15:22,169 INFO [train.py:1114] (1/4) Epoch 5, batch 350, loss[loss=0.2783, simple_loss=0.3109, pruned_loss=0.08899, ctc_loss=0.1693, over 19774.00 frames. ], tot_loss[loss=0.2731, simple_loss=0.3178, pruned_loss=0.0831, ctc_loss=0.1554, over 3190486.70 frames. ], batch size: 48, lr: 2.80e-02, grad_scale: 32.0 +2024-08-26 15:15:23,496 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:15:25,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=54970.666666666664, ans=0.125 +2024-08-26 15:15:25,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=54970.666666666664, ans=0.05 +2024-08-26 15:15:29,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=54970.666666666664, ans=0.0 +2024-08-26 15:15:31,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=55024.0, ans=0.025 +2024-08-26 15:15:31,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=55024.0, ans=0.04949747468305833 +2024-08-26 15:15:31,760 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.410e+02 1.717e+02 1.933e+02 2.233e+02 3.797e+02, threshold=3.865e+02, percent-clipped=1.0 +2024-08-26 15:15:37,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=55024.0, ans=6.0 +2024-08-26 15:15:38,918 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.06 vs. limit=15.0 +2024-08-26 15:15:45,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=55077.333333333336, ans=0.125 +2024-08-26 15:16:15,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55237.333333333336, ans=0.1 +2024-08-26 15:16:15,652 INFO [train.py:1114] (1/4) Epoch 5, batch 400, loss[loss=0.261, simple_loss=0.3146, pruned_loss=0.07541, ctc_loss=0.1415, over 19505.00 frames. ], tot_loss[loss=0.2718, simple_loss=0.3171, pruned_loss=0.08238, ctc_loss=0.1543, over 3341696.15 frames. ], batch size: 54, lr: 2.80e-02, grad_scale: 32.0 +2024-08-26 15:16:26,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=55290.666666666664, ans=0.125 +2024-08-26 15:16:37,736 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.30 vs. limit=6.0 +2024-08-26 15:16:39,656 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.03 vs. limit=15.0 +2024-08-26 15:16:41,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=55344.0, ans=0.125 +2024-08-26 15:16:48,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=55397.333333333336, ans=0.0 +2024-08-26 15:16:55,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=55450.666666666664, ans=0.125 +2024-08-26 15:17:07,110 INFO [train.py:1114] (1/4) Epoch 5, batch 450, loss[loss=0.2699, simple_loss=0.3213, pruned_loss=0.07934, ctc_loss=0.1497, over 19626.00 frames. ], tot_loss[loss=0.2718, simple_loss=0.317, pruned_loss=0.08243, ctc_loss=0.1542, over 3450532.18 frames. ], batch size: 55, lr: 2.79e-02, grad_scale: 16.0 +2024-08-26 15:17:08,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=55504.0, ans=0.025 +2024-08-26 15:17:17,440 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.642e+02 1.899e+02 2.179e+02 3.523e+02, threshold=3.798e+02, percent-clipped=0.0 +2024-08-26 15:17:28,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55610.666666666664, ans=0.1 +2024-08-26 15:17:36,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=55664.0, ans=0.125 +2024-08-26 15:17:45,506 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:18:04,472 INFO [train.py:1114] (1/4) Epoch 5, batch 500, loss[loss=0.2877, simple_loss=0.3379, pruned_loss=0.08704, ctc_loss=0.1584, over 19671.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3148, pruned_loss=0.08118, ctc_loss=0.1516, over 3545615.28 frames. ], batch size: 63, lr: 2.79e-02, grad_scale: 16.0 +2024-08-26 15:18:12,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=55770.666666666664, ans=0.0 +2024-08-26 15:18:42,267 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=15.0 +2024-08-26 15:18:50,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55877.333333333336, ans=0.1 +2024-08-26 15:18:52,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=55930.666666666664, ans=0.0 +2024-08-26 15:19:03,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=55930.666666666664, ans=0.5 +2024-08-26 15:19:47,001 INFO [train.py:1114] (1/4) Epoch 5, batch 550, loss[loss=0.2988, simple_loss=0.3436, pruned_loss=0.09273, ctc_loss=0.171, over 19310.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3153, pruned_loss=0.08151, ctc_loss=0.1523, over 3607774.45 frames. ], batch size: 71, lr: 2.78e-02, grad_scale: 16.0 +2024-08-26 15:19:47,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=56037.333333333336, ans=0.035 +2024-08-26 15:20:01,260 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:20:02,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=56090.666666666664, ans=0.125 +2024-08-26 15:20:04,964 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.339e+02 1.676e+02 1.860e+02 2.053e+02 4.118e+02, threshold=3.720e+02, percent-clipped=1.0 +2024-08-26 15:20:41,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=56197.333333333336, ans=0.07 +2024-08-26 15:20:47,049 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.10 vs. limit=15.0 +2024-08-26 15:20:54,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=56250.666666666664, ans=0.125 +2024-08-26 15:20:56,217 INFO [train.py:1114] (1/4) Epoch 5, batch 600, loss[loss=0.2743, simple_loss=0.3241, pruned_loss=0.08206, ctc_loss=0.1509, over 19452.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3156, pruned_loss=0.08134, ctc_loss=0.1521, over 3664908.07 frames. ], batch size: 67, lr: 2.78e-02, grad_scale: 16.0 +2024-08-26 15:21:01,731 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.32 vs. limit=15.0 +2024-08-26 15:21:14,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56357.333333333336, ans=0.1 +2024-08-26 15:21:25,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=56410.666666666664, ans=0.125 +2024-08-26 15:21:27,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=56410.666666666664, ans=0.125 +2024-08-26 15:21:44,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=56517.333333333336, ans=0.125 +2024-08-26 15:21:49,364 INFO [train.py:1114] (1/4) Epoch 5, batch 650, loss[loss=0.2351, simple_loss=0.2949, pruned_loss=0.06268, ctc_loss=0.1251, over 19773.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3145, pruned_loss=0.08045, ctc_loss=0.1505, over 3716151.69 frames. ], batch size: 54, lr: 2.77e-02, grad_scale: 16.0 +2024-08-26 15:21:59,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=56624.0, ans=0.1 +2024-08-26 15:21:59,580 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.97 vs. limit=12.0 +2024-08-26 15:21:59,892 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.327e+02 1.659e+02 1.803e+02 2.095e+02 3.596e+02, threshold=3.607e+02, percent-clipped=0.0 +2024-08-26 15:22:05,066 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.93 vs. limit=22.5 +2024-08-26 15:22:06,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=56624.0, ans=0.0 +2024-08-26 15:22:19,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=56730.666666666664, ans=0.2 +2024-08-26 15:22:30,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=56784.0, ans=0.125 +2024-08-26 15:22:33,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=56784.0, ans=0.0 +2024-08-26 15:22:39,299 INFO [train.py:1114] (1/4) Epoch 5, batch 700, loss[loss=0.2616, simple_loss=0.3071, pruned_loss=0.07848, ctc_loss=0.1482, over 19703.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3149, pruned_loss=0.08085, ctc_loss=0.1512, over 3748123.76 frames. ], batch size: 51, lr: 2.77e-02, grad_scale: 16.0 +2024-08-26 15:22:52,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=56890.666666666664, ans=0.025 +2024-08-26 15:23:03,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=56944.0, ans=0.125 +2024-08-26 15:23:09,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=56997.333333333336, ans=0.125 +2024-08-26 15:23:11,263 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.94 vs. limit=22.5 +2024-08-26 15:23:24,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=57050.666666666664, ans=0.0 +2024-08-26 15:23:29,297 INFO [train.py:1114] (1/4) Epoch 5, batch 750, loss[loss=0.2647, simple_loss=0.318, pruned_loss=0.07758, ctc_loss=0.1405, over 19491.00 frames. ], tot_loss[loss=0.2682, simple_loss=0.3145, pruned_loss=0.08073, ctc_loss=0.151, over 3774347.55 frames. ], batch size: 54, lr: 2.77e-02, grad_scale: 16.0 +2024-08-26 15:23:39,289 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.74 vs. limit=15.0 +2024-08-26 15:23:39,770 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.452e+02 1.732e+02 1.957e+02 2.375e+02 6.184e+02, threshold=3.914e+02, percent-clipped=3.0 +2024-08-26 15:23:41,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=57157.333333333336, ans=22.5 +2024-08-26 15:23:55,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=57210.666666666664, ans=0.125 +2024-08-26 15:24:01,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=57264.0, ans=0.125 +2024-08-26 15:24:19,570 INFO [train.py:1114] (1/4) Epoch 5, batch 800, loss[loss=0.2561, simple_loss=0.2986, pruned_loss=0.07754, ctc_loss=0.146, over 19818.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3142, pruned_loss=0.08061, ctc_loss=0.1506, over 3795939.85 frames. ], batch size: 49, lr: 2.76e-02, grad_scale: 32.0 +2024-08-26 15:24:46,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=57477.333333333336, ans=0.0 +2024-08-26 15:25:10,623 INFO [train.py:1114] (1/4) Epoch 5, batch 850, loss[loss=0.2949, simple_loss=0.3394, pruned_loss=0.09125, ctc_loss=0.1699, over 19645.00 frames. ], tot_loss[loss=0.2672, simple_loss=0.3138, pruned_loss=0.08029, ctc_loss=0.15, over 3814806.93 frames. ], batch size: 59, lr: 2.76e-02, grad_scale: 32.0 +2024-08-26 15:25:16,850 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.24 vs. limit=10.0 +2024-08-26 15:25:24,565 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.368e+02 1.744e+02 1.971e+02 2.331e+02 4.591e+02, threshold=3.942e+02, percent-clipped=1.0 +2024-08-26 15:25:29,109 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-08-26 15:25:45,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=57797.333333333336, ans=0.125 +2024-08-26 15:25:55,447 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.68 vs. limit=10.0 +2024-08-26 15:25:56,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=57850.666666666664, ans=0.0 +2024-08-26 15:26:01,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57850.666666666664, ans=0.1 +2024-08-26 15:26:07,580 INFO [train.py:1114] (1/4) Epoch 5, batch 900, loss[loss=0.25, simple_loss=0.2967, pruned_loss=0.07383, ctc_loss=0.139, over 19813.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.3143, pruned_loss=0.08076, ctc_loss=0.1507, over 3819785.67 frames. ], batch size: 49, lr: 2.75e-02, grad_scale: 32.0 +2024-08-26 15:26:12,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=57904.0, ans=0.125 +2024-08-26 15:26:18,476 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.95 vs. limit=12.0 +2024-08-26 15:26:58,239 INFO [train.py:1114] (1/4) Epoch 5, batch 950, loss[loss=0.2583, simple_loss=0.3068, pruned_loss=0.07725, ctc_loss=0.1381, over 19489.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3146, pruned_loss=0.08111, ctc_loss=0.1515, over 3821498.10 frames. ], batch size: 49, lr: 2.75e-02, grad_scale: 32.0 +2024-08-26 15:27:02,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58170.666666666664, ans=0.1 +2024-08-26 15:27:05,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=58170.666666666664, ans=0.05 +2024-08-26 15:27:05,567 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.44 vs. limit=22.5 +2024-08-26 15:27:11,436 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.396e+02 1.648e+02 1.859e+02 2.135e+02 3.098e+02, threshold=3.718e+02, percent-clipped=0.0 +2024-08-26 15:27:13,354 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.00 vs. limit=15.0 +2024-08-26 15:27:24,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=58277.333333333336, ans=0.025 +2024-08-26 15:27:25,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=58277.333333333336, ans=0.125 +2024-08-26 15:27:36,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=58330.666666666664, ans=0.0 +2024-08-26 15:27:46,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=58384.0, ans=0.0 +2024-08-26 15:27:49,899 INFO [train.py:1114] (1/4) Epoch 5, batch 1000, loss[loss=0.2466, simple_loss=0.2918, pruned_loss=0.07306, ctc_loss=0.1381, over 19869.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3152, pruned_loss=0.08137, ctc_loss=0.1522, over 3818493.34 frames. ], batch size: 52, lr: 2.74e-02, grad_scale: 32.0 +2024-08-26 15:27:58,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=58437.333333333336, ans=0.125 +2024-08-26 15:28:05,147 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.33 vs. limit=6.0 +2024-08-26 15:28:40,055 INFO [train.py:1114] (1/4) Epoch 5, batch 1050, loss[loss=0.2594, simple_loss=0.3199, pruned_loss=0.07188, ctc_loss=0.1381, over 19830.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3146, pruned_loss=0.08105, ctc_loss=0.1516, over 3824036.28 frames. ], batch size: 57, lr: 2.74e-02, grad_scale: 32.0 +2024-08-26 15:28:42,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=58704.0, ans=0.0 +2024-08-26 15:28:46,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=58704.0, ans=0.125 +2024-08-26 15:28:48,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=58704.0, ans=0.125 +2024-08-26 15:28:49,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58757.333333333336, ans=0.1 +2024-08-26 15:28:50,845 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.680e+02 1.893e+02 2.161e+02 3.731e+02, threshold=3.786e+02, percent-clipped=1.0 +2024-08-26 15:29:22,485 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.08 vs. limit=15.0 +2024-08-26 15:29:29,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=58917.333333333336, ans=0.125 +2024-08-26 15:29:32,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=58917.333333333336, ans=0.125 +2024-08-26 15:29:33,655 INFO [train.py:1114] (1/4) Epoch 5, batch 1100, loss[loss=0.236, simple_loss=0.2906, pruned_loss=0.06478, ctc_loss=0.1293, over 19580.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3141, pruned_loss=0.08055, ctc_loss=0.1509, over 3831028.62 frames. ], batch size: 52, lr: 2.74e-02, grad_scale: 16.0 +2024-08-26 15:29:43,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=59024.0, ans=0.0 +2024-08-26 15:29:49,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=59024.0, ans=0.0 +2024-08-26 15:30:00,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=59077.333333333336, ans=0.125 +2024-08-26 15:30:11,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=59130.666666666664, ans=0.125 +2024-08-26 15:30:12,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=59130.666666666664, ans=0.125 +2024-08-26 15:30:20,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=59184.0, ans=0.025 +2024-08-26 15:30:24,312 INFO [train.py:1114] (1/4) Epoch 5, batch 1150, loss[loss=0.2502, simple_loss=0.3057, pruned_loss=0.07116, ctc_loss=0.1311, over 19596.00 frames. ], tot_loss[loss=0.2674, simple_loss=0.3138, pruned_loss=0.08038, ctc_loss=0.1505, over 3830538.20 frames. ], batch size: 52, lr: 2.73e-02, grad_scale: 16.0 +2024-08-26 15:30:26,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=59237.333333333336, ans=0.125 +2024-08-26 15:30:35,929 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.334e+02 1.591e+02 1.744e+02 2.042e+02 4.394e+02, threshold=3.489e+02, percent-clipped=2.0 +2024-08-26 15:30:42,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=59290.666666666664, ans=0.0 +2024-08-26 15:30:46,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=59344.0, ans=0.0 +2024-08-26 15:30:58,451 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.50 vs. limit=15.0 +2024-08-26 15:31:15,272 INFO [train.py:1114] (1/4) Epoch 5, batch 1200, loss[loss=0.2737, simple_loss=0.322, pruned_loss=0.08236, ctc_loss=0.1519, over 19832.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3149, pruned_loss=0.08094, ctc_loss=0.1517, over 3824495.26 frames. ], batch size: 57, lr: 2.73e-02, grad_scale: 32.0 +2024-08-26 15:31:18,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=59504.0, ans=0.025 +2024-08-26 15:31:19,768 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.76 vs. limit=15.0 +2024-08-26 15:31:22,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=59504.0, ans=0.025 +2024-08-26 15:31:31,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=59557.333333333336, ans=0.125 +2024-08-26 15:31:33,467 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.48 vs. limit=15.0 +2024-08-26 15:31:36,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=59610.666666666664, ans=0.125 +2024-08-26 15:31:40,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=59610.666666666664, ans=0.0 +2024-08-26 15:31:46,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=59664.0, ans=0.125 +2024-08-26 15:32:06,263 INFO [train.py:1114] (1/4) Epoch 5, batch 1250, loss[loss=0.2694, simple_loss=0.3228, pruned_loss=0.07993, ctc_loss=0.1405, over 19512.00 frames. ], tot_loss[loss=0.2683, simple_loss=0.3151, pruned_loss=0.0806, ctc_loss=0.1506, over 3842699.00 frames. ], batch size: 61, lr: 2.72e-02, grad_scale: 32.0 +2024-08-26 15:32:13,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=59770.666666666664, ans=0.125 +2024-08-26 15:32:18,027 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.635e+02 1.798e+02 2.001e+02 4.301e+02, threshold=3.596e+02, percent-clipped=1.0 +2024-08-26 15:32:26,710 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.00 vs. limit=15.0 +2024-08-26 15:32:30,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=59877.333333333336, ans=0.0 +2024-08-26 15:32:42,037 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.25 vs. limit=15.0 +2024-08-26 15:32:56,451 INFO [train.py:1114] (1/4) Epoch 5, batch 1300, loss[loss=0.2799, simple_loss=0.3231, pruned_loss=0.08508, ctc_loss=0.1662, over 18800.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.3135, pruned_loss=0.07957, ctc_loss=0.149, over 3846418.41 frames. ], batch size: 76, lr: 2.72e-02, grad_scale: 32.0 +2024-08-26 15:33:00,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=60037.333333333336, ans=0.0 +2024-08-26 15:33:02,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=60037.333333333336, ans=0.025 +2024-08-26 15:33:06,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=60090.666666666664, ans=0.0 +2024-08-26 15:33:07,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=60090.666666666664, ans=0.95 +2024-08-26 15:33:42,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60250.666666666664, ans=0.1 +2024-08-26 15:33:43,520 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.12 vs. limit=15.0 +2024-08-26 15:33:43,914 INFO [train.py:1114] (1/4) Epoch 5, batch 1350, loss[loss=0.264, simple_loss=0.32, pruned_loss=0.07543, ctc_loss=0.1429, over 19761.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3126, pruned_loss=0.07892, ctc_loss=0.1476, over 3857363.12 frames. ], batch size: 54, lr: 2.71e-02, grad_scale: 32.0 +2024-08-26 15:33:49,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=60304.0, ans=0.0 +2024-08-26 15:33:50,227 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.33 vs. limit=15.0 +2024-08-26 15:33:54,086 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.59 vs. limit=22.5 +2024-08-26 15:33:55,382 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.610e+02 1.752e+02 1.989e+02 4.527e+02, threshold=3.503e+02, percent-clipped=1.0 +2024-08-26 15:33:55,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=60357.333333333336, ans=0.125 +2024-08-26 15:34:07,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=60410.666666666664, ans=0.0 +2024-08-26 15:34:20,097 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.27 vs. limit=22.5 +2024-08-26 15:34:25,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=60517.333333333336, ans=0.0 +2024-08-26 15:34:28,578 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.59 vs. limit=22.5 +2024-08-26 15:34:32,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=60517.333333333336, ans=0.2 +2024-08-26 15:34:34,190 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.33 vs. limit=15.0 +2024-08-26 15:34:34,572 INFO [train.py:1114] (1/4) Epoch 5, batch 1400, loss[loss=0.2394, simple_loss=0.2777, pruned_loss=0.07386, ctc_loss=0.1335, over 19679.00 frames. ], tot_loss[loss=0.2653, simple_loss=0.3127, pruned_loss=0.07935, ctc_loss=0.1482, over 3863772.33 frames. ], batch size: 46, lr: 2.71e-02, grad_scale: 32.0 +2024-08-26 15:34:53,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=60677.333333333336, ans=0.0 +2024-08-26 15:34:54,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=60677.333333333336, ans=0.0 +2024-08-26 15:35:20,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=60784.0, ans=0.2 +2024-08-26 15:35:26,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60784.0, ans=0.1 +2024-08-26 15:35:27,703 INFO [train.py:1114] (1/4) Epoch 5, batch 1450, loss[loss=0.2883, simple_loss=0.3342, pruned_loss=0.08802, ctc_loss=0.1657, over 19674.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3138, pruned_loss=0.07992, ctc_loss=0.1493, over 3862302.65 frames. ], batch size: 63, lr: 2.71e-02, grad_scale: 32.0 +2024-08-26 15:35:42,506 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.680e+02 1.820e+02 2.123e+02 3.172e+02, threshold=3.639e+02, percent-clipped=0.0 +2024-08-26 15:35:51,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=60944.0, ans=0.2 +2024-08-26 15:35:51,321 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.15 vs. limit=15.0 +2024-08-26 15:35:54,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=60944.0, ans=0.125 +2024-08-26 15:36:11,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=61050.666666666664, ans=0.0 +2024-08-26 15:36:15,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=61050.666666666664, ans=0.025 +2024-08-26 15:36:19,829 INFO [train.py:1114] (1/4) Epoch 5, batch 1500, loss[loss=0.286, simple_loss=0.3381, pruned_loss=0.0854, ctc_loss=0.1579, over 19569.00 frames. ], tot_loss[loss=0.267, simple_loss=0.3142, pruned_loss=0.07998, ctc_loss=0.1497, over 3862689.19 frames. ], batch size: 57, lr: 2.70e-02, grad_scale: 32.0 +2024-08-26 15:36:41,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=61210.666666666664, ans=0.125 +2024-08-26 15:36:42,751 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.15 vs. limit=15.0 +2024-08-26 15:37:01,895 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.65 vs. limit=15.0 +2024-08-26 15:37:09,981 INFO [train.py:1114] (1/4) Epoch 5, batch 1550, loss[loss=0.2842, simple_loss=0.3311, pruned_loss=0.08785, ctc_loss=0.1539, over 19639.00 frames. ], tot_loss[loss=0.2676, simple_loss=0.3144, pruned_loss=0.08029, ctc_loss=0.1503, over 3847007.99 frames. ], batch size: 60, lr: 2.70e-02, grad_scale: 16.0 +2024-08-26 15:37:20,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=61424.0, ans=0.07 +2024-08-26 15:37:22,482 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.359e+02 1.752e+02 1.975e+02 2.269e+02 3.644e+02, threshold=3.951e+02, percent-clipped=1.0 +2024-08-26 15:37:24,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61424.0, ans=0.1 +2024-08-26 15:37:33,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=61477.333333333336, ans=0.125 +2024-08-26 15:37:53,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=61584.0, ans=0.09899494936611666 +2024-08-26 15:38:03,692 INFO [train.py:1114] (1/4) Epoch 5, batch 1600, loss[loss=0.2896, simple_loss=0.3394, pruned_loss=0.08814, ctc_loss=0.159, over 19845.00 frames. ], tot_loss[loss=0.2679, simple_loss=0.3145, pruned_loss=0.08058, ctc_loss=0.1506, over 3836328.59 frames. ], batch size: 57, lr: 2.69e-02, grad_scale: 32.0 +2024-08-26 15:38:07,149 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.74 vs. limit=10.0 +2024-08-26 15:38:35,794 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.41 vs. limit=15.0 +2024-08-26 15:39:04,445 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.49 vs. limit=15.0 +2024-08-26 15:39:19,228 INFO [train.py:1114] (1/4) Epoch 5, batch 1650, loss[loss=0.2817, simple_loss=0.324, pruned_loss=0.08806, ctc_loss=0.1581, over 19665.00 frames. ], tot_loss[loss=0.2676, simple_loss=0.3141, pruned_loss=0.08042, ctc_loss=0.1505, over 3833090.56 frames. ], batch size: 59, lr: 2.69e-02, grad_scale: 32.0 +2024-08-26 15:39:19,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61904.0, ans=0.1 +2024-08-26 15:39:21,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61904.0, ans=0.1 +2024-08-26 15:39:31,759 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.578e+02 1.738e+02 2.103e+02 3.628e+02, threshold=3.475e+02, percent-clipped=0.0 +2024-08-26 15:39:56,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=62117.333333333336, ans=0.125 +2024-08-26 15:39:58,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=62117.333333333336, ans=0.2 +2024-08-26 15:40:08,739 INFO [train.py:1114] (1/4) Epoch 5, batch 1700, loss[loss=0.2264, simple_loss=0.2649, pruned_loss=0.06818, ctc_loss=0.1288, over 19689.00 frames. ], tot_loss[loss=0.2665, simple_loss=0.3134, pruned_loss=0.07993, ctc_loss=0.1494, over 3847837.05 frames. ], batch size: 46, lr: 2.69e-02, grad_scale: 32.0 +2024-08-26 15:40:09,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=62170.666666666664, ans=0.0 +2024-08-26 15:40:21,384 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.77 vs. limit=22.5 +2024-08-26 15:40:37,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=62330.666666666664, ans=10.0 +2024-08-26 15:40:37,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=62330.666666666664, ans=0.05 +2024-08-26 15:40:40,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=62330.666666666664, ans=0.1 +2024-08-26 15:40:46,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=62384.0, ans=0.125 +2024-08-26 15:40:54,055 INFO [train.py:1114] (1/4) Epoch 5, batch 1750, loss[loss=0.2406, simple_loss=0.281, pruned_loss=0.07332, ctc_loss=0.1338, over 19702.00 frames. ], tot_loss[loss=0.2656, simple_loss=0.3128, pruned_loss=0.07949, ctc_loss=0.1487, over 3852457.68 frames. ], batch size: 45, lr: 2.68e-02, grad_scale: 32.0 +2024-08-26 15:40:55,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=62437.333333333336, ans=0.09899494936611666 +2024-08-26 15:41:04,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=62490.666666666664, ans=0.95 +2024-08-26 15:41:05,739 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.599e+02 1.842e+02 2.097e+02 3.191e+02, threshold=3.683e+02, percent-clipped=0.0 +2024-08-26 15:41:08,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=62490.666666666664, ans=0.05 +2024-08-26 15:41:23,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=62597.333333333336, ans=0.125 +2024-08-26 15:41:23,722 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.85 vs. limit=22.5 +2024-08-26 15:41:24,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=62597.333333333336, ans=0.0 +2024-08-26 15:41:39,324 INFO [train.py:1114] (1/4) Epoch 5, batch 1800, loss[loss=0.2743, simple_loss=0.3276, pruned_loss=0.08046, ctc_loss=0.1501, over 19609.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3131, pruned_loss=0.0798, ctc_loss=0.1493, over 3853524.62 frames. ], batch size: 55, lr: 2.68e-02, grad_scale: 32.0 +2024-08-26 15:41:42,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=62704.0, ans=0.07 +2024-08-26 15:41:46,224 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.28 vs. limit=12.0 +2024-08-26 15:41:48,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=62757.333333333336, ans=0.0 +2024-08-26 15:41:52,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=62757.333333333336, ans=0.125 +2024-08-26 15:42:18,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=62917.333333333336, ans=0.2 +2024-08-26 15:42:24,239 INFO [train.py:1114] (1/4) Epoch 5, batch 1850, loss[loss=0.286, simple_loss=0.3323, pruned_loss=0.08844, ctc_loss=0.157, over 19587.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3125, pruned_loss=0.07911, ctc_loss=0.1479, over 3856995.29 frames. ], batch size: 57, lr: 2.67e-02, grad_scale: 32.0 +2024-08-26 15:42:25,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=62970.666666666664, ans=0.2 +2024-08-26 15:42:29,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=62970.666666666664, ans=0.0 +2024-08-26 15:42:29,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=62970.666666666664, ans=0.125 +2024-08-26 15:42:30,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=62970.666666666664, ans=0.025 +2024-08-26 15:42:35,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=63024.0, ans=0.2 +2024-08-26 15:42:35,850 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.605e+02 1.818e+02 2.016e+02 3.945e+02, threshold=3.637e+02, percent-clipped=1.0 +2024-08-26 15:42:42,021 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:42:51,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=63077.333333333336, ans=0.0 +2024-08-26 15:42:51,225 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.18 vs. limit=6.0 +2024-08-26 15:42:52,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=63077.333333333336, ans=0.125 +2024-08-26 15:42:52,893 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.88 vs. limit=15.0 +2024-08-26 15:43:03,583 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.94 vs. limit=22.5 +2024-08-26 15:43:16,395 INFO [train.py:1114] (1/4) Epoch 5, batch 1900, loss[loss=0.2599, simple_loss=0.317, pruned_loss=0.07247, ctc_loss=0.1447, over 19659.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3126, pruned_loss=0.07898, ctc_loss=0.1478, over 3862274.50 frames. ], batch size: 59, lr: 2.67e-02, grad_scale: 16.0 +2024-08-26 15:43:16,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=63237.333333333336, ans=0.1 +2024-08-26 15:43:20,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63237.333333333336, ans=0.1 +2024-08-26 15:43:34,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=63290.666666666664, ans=0.125 +2024-08-26 15:43:37,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=63290.666666666664, ans=0.125 +2024-08-26 15:43:37,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=63290.666666666664, ans=0.125 +2024-08-26 15:43:37,216 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:43:46,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=63397.333333333336, ans=0.125 +2024-08-26 15:43:53,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=63397.333333333336, ans=0.125 +2024-08-26 15:44:05,666 INFO [train.py:1114] (1/4) Epoch 5, batch 1950, loss[loss=0.2485, simple_loss=0.3031, pruned_loss=0.06944, ctc_loss=0.1374, over 19601.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.3142, pruned_loss=0.07914, ctc_loss=0.1482, over 3870831.22 frames. ], batch size: 52, lr: 2.67e-02, grad_scale: 16.0 +2024-08-26 15:44:05,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63504.0, ans=0.1 +2024-08-26 15:44:05,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=63504.0, ans=0.125 +2024-08-26 15:44:06,244 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.87 vs. limit=12.0 +2024-08-26 15:44:08,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=63504.0, ans=0.125 +2024-08-26 15:44:11,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63504.0, ans=0.1 +2024-08-26 15:44:20,099 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.677e+02 1.824e+02 1.963e+02 3.212e+02, threshold=3.647e+02, percent-clipped=0.0 +2024-08-26 15:44:21,445 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.90 vs. limit=10.0 +2024-08-26 15:44:32,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=63610.666666666664, ans=0.125 +2024-08-26 15:44:32,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=63610.666666666664, ans=0.125 +2024-08-26 15:44:34,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=63664.0, ans=0.0 +2024-08-26 15:44:43,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63717.333333333336, ans=0.1 +2024-08-26 15:44:46,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=63717.333333333336, ans=0.125 +2024-08-26 15:44:51,922 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.54 vs. limit=15.0 +2024-08-26 15:44:52,291 INFO [train.py:1114] (1/4) Epoch 5, batch 2000, loss[loss=0.2454, simple_loss=0.2817, pruned_loss=0.07688, ctc_loss=0.1385, over 19666.00 frames. ], tot_loss[loss=0.2675, simple_loss=0.3152, pruned_loss=0.08, ctc_loss=0.1494, over 3854746.18 frames. ], batch size: 45, lr: 2.66e-02, grad_scale: 32.0 +2024-08-26 15:45:09,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=63877.333333333336, ans=0.125 +2024-08-26 15:45:13,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=63877.333333333336, ans=0.2 +2024-08-26 15:45:21,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=63930.666666666664, ans=0.125 +2024-08-26 15:45:27,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=63984.0, ans=0.0 +2024-08-26 15:45:39,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=63984.0, ans=0.0 +2024-08-26 15:45:40,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=63984.0, ans=0.0 +2024-08-26 15:45:42,246 INFO [train.py:1114] (1/4) Epoch 5, batch 2050, loss[loss=0.2193, simple_loss=0.2748, pruned_loss=0.05943, ctc_loss=0.1124, over 19741.00 frames. ], tot_loss[loss=0.2666, simple_loss=0.3141, pruned_loss=0.07979, ctc_loss=0.1489, over 3850981.22 frames. ], batch size: 47, lr: 2.66e-02, grad_scale: 32.0 +2024-08-26 15:45:54,616 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.354e+02 1.624e+02 1.773e+02 2.077e+02 3.322e+02, threshold=3.546e+02, percent-clipped=0.0 +2024-08-26 15:45:56,202 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.74 vs. limit=15.0 +2024-08-26 15:45:58,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=64090.666666666664, ans=0.0 +2024-08-26 15:46:08,279 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.92 vs. limit=6.0 +2024-08-26 15:46:15,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=64197.333333333336, ans=0.95 +2024-08-26 15:46:18,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=64250.666666666664, ans=0.0 +2024-08-26 15:46:26,362 INFO [train.py:1114] (1/4) Epoch 5, batch 2100, loss[loss=0.2586, simple_loss=0.3092, pruned_loss=0.07582, ctc_loss=0.1411, over 19764.00 frames. ], tot_loss[loss=0.265, simple_loss=0.313, pruned_loss=0.07901, ctc_loss=0.1475, over 3858018.39 frames. ], batch size: 54, lr: 2.65e-02, grad_scale: 32.0 +2024-08-26 15:46:41,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64357.333333333336, ans=0.1 +2024-08-26 15:46:43,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=64410.666666666664, ans=0.125 +2024-08-26 15:46:58,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=64464.0, ans=0.2 +2024-08-26 15:47:13,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=64517.333333333336, ans=0.0 +2024-08-26 15:47:17,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=64517.333333333336, ans=0.2 +2024-08-26 15:47:23,494 INFO [train.py:1114] (1/4) Epoch 5, batch 2150, loss[loss=0.2387, simple_loss=0.2898, pruned_loss=0.06833, ctc_loss=0.1273, over 19602.00 frames. ], tot_loss[loss=0.2648, simple_loss=0.3125, pruned_loss=0.07902, ctc_loss=0.1475, over 3868961.66 frames. ], batch size: 52, lr: 2.65e-02, grad_scale: 32.0 +2024-08-26 15:47:35,823 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.599e+02 1.757e+02 2.074e+02 2.995e+02, threshold=3.513e+02, percent-clipped=0.0 +2024-08-26 15:47:44,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=64677.333333333336, ans=0.125 +2024-08-26 15:47:45,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=64677.333333333336, ans=0.0 +2024-08-26 15:47:51,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64730.666666666664, ans=0.1 +2024-08-26 15:48:04,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=64784.0, ans=0.125 +2024-08-26 15:48:07,237 INFO [train.py:1114] (1/4) Epoch 5, batch 2200, loss[loss=0.2734, simple_loss=0.3178, pruned_loss=0.08288, ctc_loss=0.158, over 19577.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.3122, pruned_loss=0.07873, ctc_loss=0.1473, over 3867474.71 frames. ], batch size: 57, lr: 2.65e-02, grad_scale: 32.0 +2024-08-26 15:48:33,287 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-08-26 15:48:52,500 INFO [train.py:1114] (1/4) Epoch 5, batch 2250, loss[loss=0.2778, simple_loss=0.3361, pruned_loss=0.08002, ctc_loss=0.1484, over 19617.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3118, pruned_loss=0.07826, ctc_loss=0.1465, over 3868277.11 frames. ], batch size: 55, lr: 2.64e-02, grad_scale: 16.0 +2024-08-26 15:48:59,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=65104.0, ans=0.0 +2024-08-26 15:49:05,744 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.368e+02 1.721e+02 2.056e+02 2.448e+02 6.138e+02, threshold=4.112e+02, percent-clipped=3.0 +2024-08-26 15:49:19,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=65264.0, ans=0.2 +2024-08-26 15:49:22,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=65264.0, ans=0.2 +2024-08-26 15:49:36,470 INFO [train.py:1114] (1/4) Epoch 5, batch 2300, loss[loss=0.2401, simple_loss=0.296, pruned_loss=0.06724, ctc_loss=0.1245, over 19508.00 frames. ], tot_loss[loss=0.263, simple_loss=0.311, pruned_loss=0.07823, ctc_loss=0.1465, over 3861687.49 frames. ], batch size: 49, lr: 2.64e-02, grad_scale: 16.0 +2024-08-26 15:49:39,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=65370.666666666664, ans=0.07 +2024-08-26 15:49:52,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.47 vs. limit=22.5 +2024-08-26 15:50:03,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=65477.333333333336, ans=0.0 +2024-08-26 15:50:22,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=65637.33333333333, ans=0.125 +2024-08-26 15:50:23,049 INFO [train.py:1114] (1/4) Epoch 5, batch 2350, loss[loss=0.2974, simple_loss=0.3414, pruned_loss=0.09279, ctc_loss=0.1697, over 19686.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3107, pruned_loss=0.07819, ctc_loss=0.1464, over 3864002.50 frames. ], batch size: 63, lr: 2.63e-02, grad_scale: 16.0 +2024-08-26 15:50:26,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=65637.33333333333, ans=0.025 +2024-08-26 15:50:26,832 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.02 vs. limit=15.0 +2024-08-26 15:50:36,067 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.297e+02 1.568e+02 1.781e+02 2.033e+02 3.218e+02, threshold=3.561e+02, percent-clipped=0.0 +2024-08-26 15:50:39,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=65690.66666666667, ans=0.2 +2024-08-26 15:50:50,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=65797.33333333333, ans=0.125 +2024-08-26 15:50:56,841 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.61 vs. limit=12.0 +2024-08-26 15:51:02,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=65850.66666666667, ans=0.0 +2024-08-26 15:51:04,994 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.69 vs. limit=22.5 +2024-08-26 15:51:07,085 INFO [train.py:1114] (1/4) Epoch 5, batch 2400, loss[loss=0.277, simple_loss=0.3242, pruned_loss=0.08332, ctc_loss=0.1577, over 19270.00 frames. ], tot_loss[loss=0.2664, simple_loss=0.3137, pruned_loss=0.07978, ctc_loss=0.1489, over 3858281.04 frames. ], batch size: 71, lr: 2.63e-02, grad_scale: 32.0 +2024-08-26 15:51:08,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=65904.0, ans=0.125 +2024-08-26 15:51:24,618 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.90 vs. limit=15.0 +2024-08-26 15:51:28,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=66010.66666666667, ans=0.125 +2024-08-26 15:51:52,370 INFO [train.py:1114] (1/4) Epoch 5, batch 2450, loss[loss=0.3543, simple_loss=0.3519, pruned_loss=0.1296, ctc_loss=0.2435, over 13139.00 frames. ], tot_loss[loss=0.2751, simple_loss=0.3188, pruned_loss=0.0843, ctc_loss=0.1572, over 3727883.52 frames. ], batch size: 140, lr: 2.63e-02, grad_scale: 16.0 +2024-08-26 15:52:03,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=66224.0, ans=0.125 +2024-08-26 15:52:07,310 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.477e+02 1.716e+02 1.912e+02 2.213e+02 5.978e+02, threshold=3.825e+02, percent-clipped=3.0 +2024-08-26 15:52:09,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=66224.0, ans=0.1 +2024-08-26 15:52:14,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=66277.33333333333, ans=0.0 +2024-08-26 15:53:42,766 INFO [train.py:1114] (1/4) Epoch 6, batch 0, loss[loss=0.2699, simple_loss=0.3135, pruned_loss=0.08284, ctc_loss=0.1514, over 19399.00 frames. ], tot_loss[loss=0.2699, simple_loss=0.3135, pruned_loss=0.08284, ctc_loss=0.1514, over 19399.00 frames. ], batch size: 48, lr: 2.45e-02, grad_scale: 32.0 +2024-08-26 15:53:42,767 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-26 15:54:26,133 INFO [train.py:1146] (1/4) Epoch 6, validation: loss=0.2162, simple_loss=0.3022, pruned_loss=0.04785, ctc_loss=0.08613, over 944034.00 frames. +2024-08-26 15:54:26,134 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12882MB +2024-08-26 15:54:29,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=66378.66666666667, ans=0.125 +2024-08-26 15:54:42,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=66432.0, ans=15.0 +2024-08-26 15:54:45,863 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.10 vs. limit=22.5 +2024-08-26 15:54:50,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66485.33333333333, ans=0.1 +2024-08-26 15:54:50,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=66485.33333333333, ans=0.125 +2024-08-26 15:54:56,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=66538.66666666667, ans=0.1 +2024-08-26 15:55:03,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=66592.0, ans=0.1 +2024-08-26 15:55:04,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=66592.0, ans=0.125 +2024-08-26 15:55:06,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=66592.0, ans=0.125 +2024-08-26 15:55:07,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=66592.0, ans=0.2 +2024-08-26 15:55:13,234 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.15 vs. limit=15.0 +2024-08-26 15:55:13,585 INFO [train.py:1114] (1/4) Epoch 6, batch 50, loss[loss=0.2243, simple_loss=0.2766, pruned_loss=0.06262, ctc_loss=0.1168, over 19715.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3164, pruned_loss=0.08069, ctc_loss=0.1521, over 844837.63 frames. ], batch size: 47, lr: 2.44e-02, grad_scale: 32.0 +2024-08-26 15:55:31,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66752.0, ans=0.1 +2024-08-26 15:55:31,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=66752.0, ans=0.04949747468305833 +2024-08-26 15:55:39,174 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.607e+02 1.759e+02 1.997e+02 3.496e+02, threshold=3.518e+02, percent-clipped=0.0 +2024-08-26 15:55:39,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=66752.0, ans=0.0 +2024-08-26 15:55:43,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=66805.33333333333, ans=0.125 +2024-08-26 15:55:49,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=66805.33333333333, ans=0.125 +2024-08-26 15:55:50,263 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.57 vs. limit=15.0 +2024-08-26 15:55:57,168 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.37 vs. limit=15.0 +2024-08-26 15:55:57,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66858.66666666667, ans=0.1 +2024-08-26 15:56:03,173 INFO [train.py:1114] (1/4) Epoch 6, batch 100, loss[loss=0.2585, simple_loss=0.3054, pruned_loss=0.07696, ctc_loss=0.1442, over 19702.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3152, pruned_loss=0.07942, ctc_loss=0.1491, over 1498587.70 frames. ], batch size: 51, lr: 2.44e-02, grad_scale: 32.0 +2024-08-26 15:56:04,594 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.49 vs. limit=22.5 +2024-08-26 15:56:30,971 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-08-26 15:56:53,188 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.67 vs. limit=6.0 +2024-08-26 15:56:55,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=67125.33333333333, ans=0.09899494936611666 +2024-08-26 15:56:56,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=67178.66666666667, ans=0.125 +2024-08-26 15:56:57,270 INFO [train.py:1114] (1/4) Epoch 6, batch 150, loss[loss=0.244, simple_loss=0.283, pruned_loss=0.07526, ctc_loss=0.1365, over 19679.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3116, pruned_loss=0.07736, ctc_loss=0.145, over 2027875.39 frames. ], batch size: 47, lr: 2.44e-02, grad_scale: 32.0 +2024-08-26 15:57:01,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67178.66666666667, ans=0.1 +2024-08-26 15:57:20,272 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.66 vs. limit=15.0 +2024-08-26 15:57:22,726 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.333e+02 1.584e+02 1.709e+02 1.986e+02 2.973e+02, threshold=3.418e+02, percent-clipped=0.0 +2024-08-26 15:57:39,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=67392.0, ans=0.125 +2024-08-26 15:57:44,431 INFO [train.py:1114] (1/4) Epoch 6, batch 200, loss[loss=0.2494, simple_loss=0.302, pruned_loss=0.07056, ctc_loss=0.1392, over 18471.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3097, pruned_loss=0.07661, ctc_loss=0.1436, over 2436571.14 frames. ], batch size: 86, lr: 2.43e-02, grad_scale: 32.0 +2024-08-26 15:57:45,165 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.16 vs. limit=10.0 +2024-08-26 15:57:45,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67445.33333333333, ans=0.1 +2024-08-26 15:57:51,765 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.14 vs. limit=15.0 +2024-08-26 15:58:07,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=67552.0, ans=0.0 +2024-08-26 15:58:08,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=67552.0, ans=0.0 +2024-08-26 15:58:09,739 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.15 vs. limit=22.5 +2024-08-26 15:58:24,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=67605.33333333333, ans=0.125 +2024-08-26 15:58:26,091 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.12 vs. limit=15.0 +2024-08-26 15:58:31,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=67658.66666666667, ans=0.025 +2024-08-26 15:58:33,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=67658.66666666667, ans=0.125 +2024-08-26 15:58:36,021 INFO [train.py:1114] (1/4) Epoch 6, batch 250, loss[loss=0.2665, simple_loss=0.3155, pruned_loss=0.07845, ctc_loss=0.1512, over 19431.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3088, pruned_loss=0.07617, ctc_loss=0.1426, over 2756537.75 frames. ], batch size: 67, lr: 2.43e-02, grad_scale: 32.0 +2024-08-26 15:58:41,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=67712.0, ans=0.0 +2024-08-26 15:59:05,230 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.04 vs. limit=15.0 +2024-08-26 15:59:09,881 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.42 vs. limit=6.0 +2024-08-26 15:59:10,415 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.559e+02 1.703e+02 1.915e+02 3.590e+02, threshold=3.407e+02, percent-clipped=1.0 +2024-08-26 15:59:10,858 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.49 vs. limit=15.0 +2024-08-26 15:59:16,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=67872.0, ans=0.125 +2024-08-26 15:59:18,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=67872.0, ans=0.5 +2024-08-26 15:59:20,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67872.0, ans=0.1 +2024-08-26 15:59:35,248 INFO [train.py:1114] (1/4) Epoch 6, batch 300, loss[loss=0.2653, simple_loss=0.3153, pruned_loss=0.07827, ctc_loss=0.147, over 19497.00 frames. ], tot_loss[loss=0.2577, simple_loss=0.3078, pruned_loss=0.07554, ctc_loss=0.1411, over 3001602.54 frames. ], batch size: 61, lr: 2.43e-02, grad_scale: 32.0 +2024-08-26 15:59:40,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=67978.66666666667, ans=0.125 +2024-08-26 15:59:48,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=68032.0, ans=0.125 +2024-08-26 16:00:08,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=68138.66666666667, ans=0.2 +2024-08-26 16:00:13,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=68138.66666666667, ans=0.5 +2024-08-26 16:00:21,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=68192.0, ans=0.0 +2024-08-26 16:00:24,075 INFO [train.py:1114] (1/4) Epoch 6, batch 350, loss[loss=0.229, simple_loss=0.2749, pruned_loss=0.06749, ctc_loss=0.1207, over 19761.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3087, pruned_loss=0.07577, ctc_loss=0.1418, over 3191085.11 frames. ], batch size: 48, lr: 2.42e-02, grad_scale: 32.0 +2024-08-26 16:00:25,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=68245.33333333333, ans=0.125 +2024-08-26 16:00:27,565 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.62 vs. limit=10.0 +2024-08-26 16:00:29,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=68245.33333333333, ans=0.125 +2024-08-26 16:00:42,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=68352.0, ans=0.125 +2024-08-26 16:00:49,640 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.625e+02 1.872e+02 2.224e+02 3.924e+02, threshold=3.744e+02, percent-clipped=2.0 +2024-08-26 16:00:53,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=68405.33333333333, ans=0.125 +2024-08-26 16:00:54,032 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.02 vs. limit=15.0 +2024-08-26 16:00:59,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=68405.33333333333, ans=0.125 +2024-08-26 16:01:00,268 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:01:11,323 INFO [train.py:1114] (1/4) Epoch 6, batch 400, loss[loss=0.2611, simple_loss=0.3171, pruned_loss=0.0739, ctc_loss=0.1432, over 19485.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.308, pruned_loss=0.07531, ctc_loss=0.141, over 3342801.21 frames. ], batch size: 54, lr: 2.42e-02, grad_scale: 32.0 +2024-08-26 16:01:14,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=68512.0, ans=0.125 +2024-08-26 16:01:24,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=68565.33333333333, ans=0.0 +2024-08-26 16:01:35,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=68618.66666666667, ans=0.125 +2024-08-26 16:01:54,298 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:01:59,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=68725.33333333333, ans=0.0 +2024-08-26 16:02:07,097 INFO [train.py:1114] (1/4) Epoch 6, batch 450, loss[loss=0.2436, simple_loss=0.2999, pruned_loss=0.06764, ctc_loss=0.1298, over 19624.00 frames. ], tot_loss[loss=0.2574, simple_loss=0.308, pruned_loss=0.07526, ctc_loss=0.1408, over 3451744.00 frames. ], batch size: 55, lr: 2.42e-02, grad_scale: 32.0 +2024-08-26 16:02:15,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=68832.0, ans=0.2 +2024-08-26 16:02:34,034 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.302e+02 1.611e+02 1.799e+02 2.140e+02 4.925e+02, threshold=3.597e+02, percent-clipped=1.0 +2024-08-26 16:02:48,519 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:02:49,756 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.18 vs. limit=15.0 +2024-08-26 16:02:55,665 INFO [train.py:1114] (1/4) Epoch 6, batch 500, loss[loss=0.2778, simple_loss=0.3224, pruned_loss=0.08476, ctc_loss=0.1592, over 19656.00 frames. ], tot_loss[loss=0.2564, simple_loss=0.3073, pruned_loss=0.07475, ctc_loss=0.14, over 3546592.17 frames. ], batch size: 63, lr: 2.41e-02, grad_scale: 32.0 +2024-08-26 16:02:57,230 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.99 vs. limit=15.0 +2024-08-26 16:02:58,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=69045.33333333333, ans=0.125 +2024-08-26 16:02:59,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=69045.33333333333, ans=0.5 +2024-08-26 16:03:09,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=69098.66666666667, ans=0.1 +2024-08-26 16:03:12,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=69098.66666666667, ans=0.125 +2024-08-26 16:03:25,670 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.31 vs. limit=15.0 +2024-08-26 16:03:43,064 INFO [train.py:1114] (1/4) Epoch 6, batch 550, loss[loss=0.2799, simple_loss=0.3275, pruned_loss=0.08357, ctc_loss=0.1632, over 19238.00 frames. ], tot_loss[loss=0.2564, simple_loss=0.3072, pruned_loss=0.07482, ctc_loss=0.1401, over 3609361.26 frames. ], batch size: 71, lr: 2.41e-02, grad_scale: 32.0 +2024-08-26 16:03:56,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=69365.33333333333, ans=0.95 +2024-08-26 16:04:06,731 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=4.93 vs. limit=15.0 +2024-08-26 16:04:08,881 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.633e+02 1.875e+02 2.080e+02 6.681e+02, threshold=3.749e+02, percent-clipped=3.0 +2024-08-26 16:04:21,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=69525.33333333333, ans=0.0 +2024-08-26 16:04:30,171 INFO [train.py:1114] (1/4) Epoch 6, batch 600, loss[loss=0.2531, simple_loss=0.3146, pruned_loss=0.06978, ctc_loss=0.13, over 19413.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3069, pruned_loss=0.07432, ctc_loss=0.1389, over 3666238.52 frames. ], batch size: 67, lr: 2.41e-02, grad_scale: 32.0 +2024-08-26 16:04:33,578 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.42 vs. limit=15.0 +2024-08-26 16:04:39,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=69578.66666666667, ans=0.2 +2024-08-26 16:04:55,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=69632.0, ans=0.2 +2024-08-26 16:04:56,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=69685.33333333333, ans=0.0 +2024-08-26 16:05:07,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=69738.66666666667, ans=0.125 +2024-08-26 16:05:09,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=69738.66666666667, ans=0.125 +2024-08-26 16:05:09,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=69738.66666666667, ans=0.5 +2024-08-26 16:05:24,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=69792.0, ans=0.125 +2024-08-26 16:05:25,780 INFO [train.py:1114] (1/4) Epoch 6, batch 650, loss[loss=0.2474, simple_loss=0.2992, pruned_loss=0.07133, ctc_loss=0.1323, over 19769.00 frames. ], tot_loss[loss=0.2532, simple_loss=0.3051, pruned_loss=0.0732, ctc_loss=0.1371, over 3717015.54 frames. ], batch size: 54, lr: 2.40e-02, grad_scale: 32.0 +2024-08-26 16:05:26,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=69845.33333333333, ans=0.125 +2024-08-26 16:05:49,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=69952.0, ans=0.125 +2024-08-26 16:05:50,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=69952.0, ans=0.0 +2024-08-26 16:05:53,416 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.596e+02 1.734e+02 1.974e+02 3.978e+02, threshold=3.467e+02, percent-clipped=1.0 +2024-08-26 16:06:02,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=70005.33333333333, ans=0.5 +2024-08-26 16:06:08,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=70058.66666666667, ans=0.0 +2024-08-26 16:06:15,338 INFO [train.py:1114] (1/4) Epoch 6, batch 700, loss[loss=0.2458, simple_loss=0.2959, pruned_loss=0.07, ctc_loss=0.139, over 19712.00 frames. ], tot_loss[loss=0.2535, simple_loss=0.3053, pruned_loss=0.0734, ctc_loss=0.1374, over 3749297.31 frames. ], batch size: 51, lr: 2.40e-02, grad_scale: 16.0 +2024-08-26 16:06:33,709 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.10 vs. limit=12.0 +2024-08-26 16:06:41,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff3.min_abs, batch_count=70218.66666666667, ans=0.2 +2024-08-26 16:06:54,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=70325.33333333333, ans=0.125 +2024-08-26 16:06:55,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=70325.33333333333, ans=0.125 +2024-08-26 16:07:02,325 INFO [train.py:1114] (1/4) Epoch 6, batch 750, loss[loss=0.2681, simple_loss=0.3183, pruned_loss=0.07958, ctc_loss=0.1468, over 19507.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3056, pruned_loss=0.07353, ctc_loss=0.1377, over 3775115.39 frames. ], batch size: 54, lr: 2.40e-02, grad_scale: 16.0 +2024-08-26 16:07:03,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=70378.66666666667, ans=0.1 +2024-08-26 16:07:24,210 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.08 vs. limit=15.0 +2024-08-26 16:07:33,074 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.662e+02 1.845e+02 2.236e+02 2.956e+02, threshold=3.689e+02, percent-clipped=0.0 +2024-08-26 16:07:36,150 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:08:08,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=70592.0, ans=0.0 +2024-08-26 16:08:25,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=70645.33333333333, ans=0.0 +2024-08-26 16:08:25,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=70645.33333333333, ans=0.0 +2024-08-26 16:08:25,914 INFO [train.py:1114] (1/4) Epoch 6, batch 800, loss[loss=0.2244, simple_loss=0.2773, pruned_loss=0.06275, ctc_loss=0.1152, over 19422.00 frames. ], tot_loss[loss=0.2542, simple_loss=0.3057, pruned_loss=0.07379, ctc_loss=0.1379, over 3795598.18 frames. ], batch size: 48, lr: 2.39e-02, grad_scale: 32.0 +2024-08-26 16:08:26,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=70645.33333333333, ans=0.025 +2024-08-26 16:08:39,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=70698.66666666667, ans=0.09899494936611666 +2024-08-26 16:09:25,214 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.65 vs. limit=15.0 +2024-08-26 16:09:32,230 INFO [train.py:1114] (1/4) Epoch 6, batch 850, loss[loss=0.2796, simple_loss=0.3389, pruned_loss=0.0789, ctc_loss=0.1561, over 19650.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3056, pruned_loss=0.07378, ctc_loss=0.1378, over 3815002.64 frames. ], batch size: 59, lr: 2.39e-02, grad_scale: 32.0 +2024-08-26 16:09:33,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=70912.0, ans=0.0 +2024-08-26 16:09:58,790 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.315e+02 1.558e+02 1.696e+02 1.888e+02 5.151e+02, threshold=3.391e+02, percent-clipped=1.0 +2024-08-26 16:10:00,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=71072.0, ans=0.0 +2024-08-26 16:10:03,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=71072.0, ans=0.2 +2024-08-26 16:10:05,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=71072.0, ans=0.0 +2024-08-26 16:10:26,557 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.05 vs. limit=10.0 +2024-08-26 16:10:28,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=71125.33333333333, ans=0.05 +2024-08-26 16:10:35,827 INFO [train.py:1114] (1/4) Epoch 6, batch 900, loss[loss=0.2402, simple_loss=0.2956, pruned_loss=0.06816, ctc_loss=0.121, over 19409.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3064, pruned_loss=0.07441, ctc_loss=0.1389, over 3819354.15 frames. ], batch size: 48, lr: 2.39e-02, grad_scale: 32.0 +2024-08-26 16:10:45,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71232.0, ans=0.1 +2024-08-26 16:10:50,679 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.75 vs. limit=12.0 +2024-08-26 16:10:52,434 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.60 vs. limit=15.0 +2024-08-26 16:10:55,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=71285.33333333333, ans=15.0 +2024-08-26 16:11:01,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=71285.33333333333, ans=0.125 +2024-08-26 16:11:05,854 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.01 vs. limit=15.0 +2024-08-26 16:11:10,876 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.10 vs. limit=15.0 +2024-08-26 16:11:13,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71392.0, ans=0.1 +2024-08-26 16:11:23,803 INFO [train.py:1114] (1/4) Epoch 6, batch 950, loss[loss=0.2313, simple_loss=0.2822, pruned_loss=0.06614, ctc_loss=0.1203, over 19493.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3069, pruned_loss=0.07486, ctc_loss=0.1401, over 3821090.00 frames. ], batch size: 49, lr: 2.38e-02, grad_scale: 16.0 +2024-08-26 16:11:35,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=71445.33333333333, ans=0.05 +2024-08-26 16:11:42,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=71498.66666666667, ans=0.0 +2024-08-26 16:11:44,186 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:11:53,194 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.43 vs. limit=15.0 +2024-08-26 16:11:57,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=71552.0, ans=0.125 +2024-08-26 16:11:58,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=71552.0, ans=0.125 +2024-08-26 16:11:59,456 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.365e+02 1.602e+02 1.780e+02 2.099e+02 5.215e+02, threshold=3.559e+02, percent-clipped=4.0 +2024-08-26 16:12:08,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=71605.33333333333, ans=0.025 +2024-08-26 16:12:18,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71658.66666666667, ans=0.1 +2024-08-26 16:12:21,527 INFO [train.py:1114] (1/4) Epoch 6, batch 1000, loss[loss=0.2193, simple_loss=0.2816, pruned_loss=0.05674, ctc_loss=0.1087, over 19838.00 frames. ], tot_loss[loss=0.257, simple_loss=0.3076, pruned_loss=0.07515, ctc_loss=0.1403, over 3815904.45 frames. ], batch size: 52, lr: 2.38e-02, grad_scale: 16.0 +2024-08-26 16:12:29,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=71712.0, ans=0.0 +2024-08-26 16:12:34,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=71765.33333333333, ans=0.1 +2024-08-26 16:12:52,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_na.min_abs, batch_count=71765.33333333333, ans=0.02 +2024-08-26 16:12:59,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=71818.66666666667, ans=0.1 +2024-08-26 16:13:06,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=71872.0, ans=0.2 +2024-08-26 16:13:08,101 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.76 vs. limit=15.0 +2024-08-26 16:13:08,173 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.13 vs. limit=12.0 +2024-08-26 16:13:11,551 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:13:17,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=71925.33333333333, ans=0.125 +2024-08-26 16:13:18,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=71925.33333333333, ans=0.0 +2024-08-26 16:13:22,622 INFO [train.py:1114] (1/4) Epoch 6, batch 1050, loss[loss=0.2625, simple_loss=0.3169, pruned_loss=0.07562, ctc_loss=0.1422, over 19835.00 frames. ], tot_loss[loss=0.255, simple_loss=0.306, pruned_loss=0.07425, ctc_loss=0.1389, over 3822457.66 frames. ], batch size: 57, lr: 2.37e-02, grad_scale: 16.0 +2024-08-26 16:13:24,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=71978.66666666667, ans=0.125 +2024-08-26 16:13:26,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.33 vs. limit=15.0 +2024-08-26 16:13:38,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=72032.0, ans=0.0 +2024-08-26 16:13:42,299 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.47 vs. limit=10.0 +2024-08-26 16:13:50,114 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.278e+02 1.587e+02 1.763e+02 2.081e+02 5.001e+02, threshold=3.526e+02, percent-clipped=1.0 +2024-08-26 16:13:55,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=72138.66666666667, ans=0.2 +2024-08-26 16:14:10,561 INFO [train.py:1114] (1/4) Epoch 6, batch 1100, loss[loss=0.2478, simple_loss=0.2977, pruned_loss=0.07286, ctc_loss=0.1306, over 19591.00 frames. ], tot_loss[loss=0.2535, simple_loss=0.3052, pruned_loss=0.07343, ctc_loss=0.1372, over 3829725.50 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 16.0 +2024-08-26 16:14:35,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.76 vs. limit=15.0 +2024-08-26 16:14:38,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=72352.0, ans=0.125 +2024-08-26 16:14:41,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=72352.0, ans=0.125 +2024-08-26 16:14:43,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=72352.0, ans=0.125 +2024-08-26 16:14:45,442 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.09 vs. limit=15.0 +2024-08-26 16:15:15,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=72458.66666666667, ans=0.125 +2024-08-26 16:15:25,918 INFO [train.py:1114] (1/4) Epoch 6, batch 1150, loss[loss=0.2377, simple_loss=0.2981, pruned_loss=0.06463, ctc_loss=0.1198, over 19583.00 frames. ], tot_loss[loss=0.2533, simple_loss=0.3049, pruned_loss=0.07348, ctc_loss=0.1371, over 3827941.44 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 16.0 +2024-08-26 16:15:45,115 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.93 vs. limit=15.0 +2024-08-26 16:15:48,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=72512.0, ans=0.07 +2024-08-26 16:15:50,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=72512.0, ans=0.025 +2024-08-26 16:16:51,599 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.628e+02 1.822e+02 2.077e+02 5.117e+02, threshold=3.645e+02, percent-clipped=2.0 +2024-08-26 16:16:52,184 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.85 vs. limit=15.0 +2024-08-26 16:16:52,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=72672.0, ans=0.125 +2024-08-26 16:16:57,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=72672.0, ans=0.09899494936611666 +2024-08-26 16:17:15,533 INFO [train.py:1114] (1/4) Epoch 6, batch 1200, loss[loss=0.2724, simple_loss=0.3307, pruned_loss=0.07797, ctc_loss=0.1452, over 19837.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3063, pruned_loss=0.07413, ctc_loss=0.1385, over 3823993.67 frames. ], batch size: 57, lr: 2.36e-02, grad_scale: 32.0 +2024-08-26 16:17:23,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=72778.66666666667, ans=0.0 +2024-08-26 16:17:25,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=72832.0, ans=0.0 +2024-08-26 16:17:28,241 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:17:49,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=72938.66666666667, ans=0.125 +2024-08-26 16:18:02,431 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.63 vs. limit=15.0 +2024-08-26 16:18:04,581 INFO [train.py:1114] (1/4) Epoch 6, batch 1250, loss[loss=0.2822, simple_loss=0.3308, pruned_loss=0.08586, ctc_loss=0.1548, over 19520.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3062, pruned_loss=0.0737, ctc_loss=0.1374, over 3841866.82 frames. ], batch size: 61, lr: 2.36e-02, grad_scale: 32.0 +2024-08-26 16:18:13,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=73098.66666666667, ans=0.125 +2024-08-26 16:18:28,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=73152.0, ans=0.125 +2024-08-26 16:18:28,811 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.55 vs. limit=15.0 +2024-08-26 16:18:31,859 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.253e+02 1.534e+02 1.709e+02 2.004e+02 3.682e+02, threshold=3.418e+02, percent-clipped=1.0 +2024-08-26 16:18:38,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=73205.33333333333, ans=0.0 +2024-08-26 16:18:59,493 INFO [train.py:1114] (1/4) Epoch 6, batch 1300, loss[loss=0.2516, simple_loss=0.3145, pruned_loss=0.06812, ctc_loss=0.1312, over 18887.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3054, pruned_loss=0.07339, ctc_loss=0.1367, over 3845848.26 frames. ], batch size: 76, lr: 2.36e-02, grad_scale: 32.0 +2024-08-26 16:19:16,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=73312.0, ans=0.2 +2024-08-26 16:19:52,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=73365.33333333333, ans=0.025 +2024-08-26 16:19:57,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=73365.33333333333, ans=0.0 +2024-08-26 16:19:59,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=73365.33333333333, ans=0.125 +2024-08-26 16:20:01,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73418.66666666667, ans=0.1 +2024-08-26 16:20:09,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=73418.66666666667, ans=0.125 +2024-08-26 16:20:19,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=73472.0, ans=0.125 +2024-08-26 16:20:26,025 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.79 vs. limit=22.5 +2024-08-26 16:20:32,211 INFO [train.py:1114] (1/4) Epoch 6, batch 1350, loss[loss=0.2236, simple_loss=0.292, pruned_loss=0.05509, ctc_loss=0.1125, over 19757.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3041, pruned_loss=0.07253, ctc_loss=0.1352, over 3856087.46 frames. ], batch size: 54, lr: 2.36e-02, grad_scale: 32.0 +2024-08-26 16:20:34,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=73578.66666666667, ans=0.2 +2024-08-26 16:20:35,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=73578.66666666667, ans=0.015 +2024-08-26 16:20:36,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=73578.66666666667, ans=0.1 +2024-08-26 16:20:47,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=73632.0, ans=0.0 +2024-08-26 16:20:49,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=73632.0, ans=0.0 +2024-08-26 16:20:51,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=73685.33333333333, ans=0.0 +2024-08-26 16:21:00,545 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.536e+02 1.657e+02 1.960e+02 3.055e+02, threshold=3.315e+02, percent-clipped=0.0 +2024-08-26 16:21:02,136 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.17 vs. limit=22.5 +2024-08-26 16:21:19,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=73845.33333333333, ans=0.125 +2024-08-26 16:21:20,641 INFO [train.py:1114] (1/4) Epoch 6, batch 1400, loss[loss=0.2287, simple_loss=0.2778, pruned_loss=0.06613, ctc_loss=0.1183, over 19654.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3038, pruned_loss=0.07239, ctc_loss=0.1351, over 3863972.67 frames. ], batch size: 46, lr: 2.35e-02, grad_scale: 32.0 +2024-08-26 16:21:26,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=73845.33333333333, ans=0.125 +2024-08-26 16:21:34,285 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:21:37,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73898.66666666667, ans=0.1 +2024-08-26 16:21:43,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=73952.0, ans=0.035 +2024-08-26 16:22:08,747 INFO [train.py:1114] (1/4) Epoch 6, batch 1450, loss[loss=0.2838, simple_loss=0.3287, pruned_loss=0.08756, ctc_loss=0.1597, over 19687.00 frames. ], tot_loss[loss=0.2525, simple_loss=0.3048, pruned_loss=0.07292, ctc_loss=0.1362, over 3862438.35 frames. ], batch size: 63, lr: 2.35e-02, grad_scale: 16.0 +2024-08-26 16:22:24,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=74165.33333333333, ans=0.05 +2024-08-26 16:23:09,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=74165.33333333333, ans=0.0 +2024-08-26 16:23:32,934 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.303e+02 1.612e+02 1.863e+02 2.093e+02 4.374e+02, threshold=3.727e+02, percent-clipped=2.0 +2024-08-26 16:23:34,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=74272.0, ans=0.125 +2024-08-26 16:23:49,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=74325.33333333333, ans=0.025 +2024-08-26 16:23:57,146 INFO [train.py:1114] (1/4) Epoch 6, batch 1500, loss[loss=0.2402, simple_loss=0.298, pruned_loss=0.06692, ctc_loss=0.1213, over 19592.00 frames. ], tot_loss[loss=0.253, simple_loss=0.3051, pruned_loss=0.07309, ctc_loss=0.1365, over 3862805.82 frames. ], batch size: 57, lr: 2.35e-02, grad_scale: 16.0 +2024-08-26 16:24:01,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=74378.66666666667, ans=0.0 +2024-08-26 16:24:04,684 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=14.50 vs. limit=15.0 +2024-08-26 16:24:29,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=74432.0, ans=0.0 +2024-08-26 16:24:29,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74432.0, ans=0.1 +2024-08-26 16:24:35,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=74432.0, ans=0.0 +2024-08-26 16:24:38,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=74432.0, ans=0.125 +2024-08-26 16:25:09,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=74645.33333333333, ans=0.125 +2024-08-26 16:25:10,564 INFO [train.py:1114] (1/4) Epoch 6, batch 1550, loss[loss=0.2654, simple_loss=0.3204, pruned_loss=0.07696, ctc_loss=0.1409, over 19611.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3055, pruned_loss=0.07363, ctc_loss=0.1375, over 3848623.69 frames. ], batch size: 60, lr: 2.34e-02, grad_scale: 16.0 +2024-08-26 16:25:13,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74645.33333333333, ans=0.1 +2024-08-26 16:26:02,846 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.93 vs. limit=15.0 +2024-08-26 16:26:19,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=74752.0, ans=0.125 +2024-08-26 16:26:19,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=74752.0, ans=0.125 +2024-08-26 16:26:20,821 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.577e+02 1.696e+02 1.957e+02 2.811e+02, threshold=3.391e+02, percent-clipped=0.0 +2024-08-26 16:26:30,287 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=6.0 +2024-08-26 16:26:40,315 INFO [train.py:1114] (1/4) Epoch 6, batch 1600, loss[loss=0.2584, simple_loss=0.3182, pruned_loss=0.07182, ctc_loss=0.1375, over 19862.00 frames. ], tot_loss[loss=0.2531, simple_loss=0.3047, pruned_loss=0.07333, ctc_loss=0.1371, over 3838342.69 frames. ], batch size: 57, lr: 2.34e-02, grad_scale: 32.0 +2024-08-26 16:26:53,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=74965.33333333333, ans=0.125 +2024-08-26 16:27:35,215 INFO [train.py:1114] (1/4) Epoch 6, batch 1650, loss[loss=0.2712, simple_loss=0.3241, pruned_loss=0.07985, ctc_loss=0.1464, over 19633.00 frames. ], tot_loss[loss=0.2531, simple_loss=0.3047, pruned_loss=0.0733, ctc_loss=0.137, over 3835386.23 frames. ], batch size: 59, lr: 2.34e-02, grad_scale: 32.0 +2024-08-26 16:27:39,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=75178.66666666667, ans=0.0 +2024-08-26 16:27:48,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=75232.0, ans=0.125 +2024-08-26 16:28:04,597 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.82 vs. limit=22.5 +2024-08-26 16:28:43,067 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.279e+02 1.584e+02 1.799e+02 2.082e+02 3.549e+02, threshold=3.597e+02, percent-clipped=1.0 +2024-08-26 16:29:36,198 INFO [train.py:1114] (1/4) Epoch 6, batch 1700, loss[loss=0.2069, simple_loss=0.2609, pruned_loss=0.05582, ctc_loss=0.1032, over 19660.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3041, pruned_loss=0.07241, ctc_loss=0.1355, over 3848577.47 frames. ], batch size: 46, lr: 2.33e-02, grad_scale: 32.0 +2024-08-26 16:29:43,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=75445.33333333333, ans=0.125 +2024-08-26 16:29:44,133 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.57 vs. limit=15.0 +2024-08-26 16:29:45,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75498.66666666667, ans=0.1 +2024-08-26 16:29:48,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=75498.66666666667, ans=0.125 +2024-08-26 16:29:49,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=75498.66666666667, ans=0.04949747468305833 +2024-08-26 16:29:49,662 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.53 vs. limit=6.0 +2024-08-26 16:29:51,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=75498.66666666667, ans=0.0 +2024-08-26 16:29:54,124 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.84 vs. limit=15.0 +2024-08-26 16:29:57,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=75552.0, ans=10.0 +2024-08-26 16:30:01,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=75552.0, ans=0.04949747468305833 +2024-08-26 16:30:24,056 INFO [train.py:1114] (1/4) Epoch 6, batch 1750, loss[loss=0.2154, simple_loss=0.2725, pruned_loss=0.057, ctc_loss=0.1109, over 19642.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3035, pruned_loss=0.07218, ctc_loss=0.135, over 3853401.56 frames. ], batch size: 45, lr: 2.33e-02, grad_scale: 32.0 +2024-08-26 16:30:28,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=75712.0, ans=0.0 +2024-08-26 16:31:04,523 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.524e+02 1.697e+02 1.959e+02 3.052e+02, threshold=3.394e+02, percent-clipped=0.0 +2024-08-26 16:31:05,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=75872.0, ans=0.0 +2024-08-26 16:31:25,741 INFO [train.py:1114] (1/4) Epoch 6, batch 1800, loss[loss=0.2476, simple_loss=0.3118, pruned_loss=0.06688, ctc_loss=0.1242, over 19621.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.3042, pruned_loss=0.07251, ctc_loss=0.1355, over 3854754.97 frames. ], batch size: 55, lr: 2.33e-02, grad_scale: 32.0 +2024-08-26 16:31:30,770 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.15 vs. limit=10.0 +2024-08-26 16:32:01,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=75978.66666666667, ans=0.125 +2024-08-26 16:32:02,450 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.19 vs. limit=12.0 +2024-08-26 16:32:16,575 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.90 vs. limit=15.0 +2024-08-26 16:32:19,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=76085.33333333333, ans=0.0 +2024-08-26 16:32:20,849 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.62 vs. limit=15.0 +2024-08-26 16:32:26,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=76138.66666666667, ans=0.1 +2024-08-26 16:33:01,906 INFO [train.py:1114] (1/4) Epoch 6, batch 1850, loss[loss=0.2244, simple_loss=0.2893, pruned_loss=0.05776, ctc_loss=0.1103, over 19601.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3034, pruned_loss=0.07196, ctc_loss=0.1345, over 3857229.03 frames. ], batch size: 57, lr: 2.32e-02, grad_scale: 32.0 +2024-08-26 16:33:22,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=76245.33333333333, ans=0.125 +2024-08-26 16:33:27,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76245.33333333333, ans=0.1 +2024-08-26 16:33:52,069 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.13 vs. limit=10.0 +2024-08-26 16:33:54,776 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.31 vs. limit=15.0 +2024-08-26 16:33:56,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=76352.0, ans=0.125 +2024-08-26 16:33:57,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=76352.0, ans=0.2 +2024-08-26 16:33:58,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=76352.0, ans=0.07 +2024-08-26 16:34:05,742 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.545e+02 1.701e+02 1.893e+02 2.907e+02, threshold=3.402e+02, percent-clipped=0.0 +2024-08-26 16:34:06,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=76405.33333333333, ans=0.025 +2024-08-26 16:34:20,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=76458.66666666667, ans=0.0 +2024-08-26 16:34:23,393 INFO [train.py:1114] (1/4) Epoch 6, batch 1900, loss[loss=0.2551, simple_loss=0.3093, pruned_loss=0.07187, ctc_loss=0.1431, over 19651.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3042, pruned_loss=0.07226, ctc_loss=0.135, over 3861251.94 frames. ], batch size: 59, lr: 2.32e-02, grad_scale: 32.0 +2024-08-26 16:34:48,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=76672.0, ans=0.2 +2024-08-26 16:35:11,367 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.65 vs. limit=15.0 +2024-08-26 16:35:12,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=76672.0, ans=0.0 +2024-08-26 16:35:12,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=76672.0, ans=0.2 +2024-08-26 16:35:14,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=76672.0, ans=0.0 +2024-08-26 16:35:14,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=76672.0, ans=0.1 +2024-08-26 16:35:14,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=76672.0, ans=0.1 +2024-08-26 16:35:14,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=76672.0, ans=0.0 +2024-08-26 16:35:15,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=76672.0, ans=0.125 +2024-08-26 16:35:18,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=76725.33333333333, ans=0.2 +2024-08-26 16:35:27,766 INFO [train.py:1114] (1/4) Epoch 6, batch 1950, loss[loss=0.2262, simple_loss=0.2838, pruned_loss=0.06157, ctc_loss=0.114, over 19585.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3048, pruned_loss=0.0722, ctc_loss=0.1347, over 3870447.06 frames. ], batch size: 52, lr: 2.32e-02, grad_scale: 32.0 +2024-08-26 16:35:29,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=76778.66666666667, ans=0.125 +2024-08-26 16:35:32,216 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.16 vs. limit=15.0 +2024-08-26 16:36:20,178 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.19 vs. limit=22.5 +2024-08-26 16:36:27,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=76885.33333333333, ans=0.0 +2024-08-26 16:36:32,170 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.394e+02 1.646e+02 1.808e+02 2.059e+02 4.885e+02, threshold=3.617e+02, percent-clipped=2.0 +2024-08-26 16:36:38,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=76938.66666666667, ans=0.2 +2024-08-26 16:36:46,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=76992.0, ans=0.0 +2024-08-26 16:36:53,607 INFO [train.py:1114] (1/4) Epoch 6, batch 2000, loss[loss=0.2302, simple_loss=0.2805, pruned_loss=0.06567, ctc_loss=0.1215, over 19686.00 frames. ], tot_loss[loss=0.253, simple_loss=0.3057, pruned_loss=0.0729, ctc_loss=0.1362, over 3854966.03 frames. ], batch size: 45, lr: 2.31e-02, grad_scale: 32.0 +2024-08-26 16:36:56,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=77045.33333333333, ans=0.0 +2024-08-26 16:37:10,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=77152.0, ans=0.125 +2024-08-26 16:37:22,946 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=5.84 vs. limit=15.0 +2024-08-26 16:37:23,617 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.87 vs. limit=15.0 +2024-08-26 16:37:26,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=77205.33333333333, ans=0.125 +2024-08-26 16:37:38,187 INFO [train.py:1114] (1/4) Epoch 6, batch 2050, loss[loss=0.2079, simple_loss=0.2587, pruned_loss=0.0567, ctc_loss=0.109, over 19698.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3045, pruned_loss=0.07245, ctc_loss=0.1354, over 3851842.29 frames. ], batch size: 47, lr: 2.31e-02, grad_scale: 32.0 +2024-08-26 16:37:50,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=77365.33333333333, ans=0.125 +2024-08-26 16:37:57,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=77418.66666666667, ans=0.125 +2024-08-26 16:38:04,756 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.285e+02 1.566e+02 1.748e+02 2.075e+02 4.290e+02, threshold=3.497e+02, percent-clipped=1.0 +2024-08-26 16:38:06,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=77472.0, ans=0.125 +2024-08-26 16:38:09,581 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.58 vs. limit=15.0 +2024-08-26 16:38:11,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=77472.0, ans=0.0 +2024-08-26 16:38:14,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=77525.33333333333, ans=0.125 +2024-08-26 16:38:34,154 INFO [train.py:1114] (1/4) Epoch 6, batch 2100, loss[loss=0.2554, simple_loss=0.3129, pruned_loss=0.0717, ctc_loss=0.1364, over 19786.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3037, pruned_loss=0.07198, ctc_loss=0.1345, over 3857919.47 frames. ], batch size: 54, lr: 2.31e-02, grad_scale: 32.0 +2024-08-26 16:38:35,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=77578.66666666667, ans=0.125 +2024-08-26 16:38:37,143 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.64 vs. limit=12.0 +2024-08-26 16:39:08,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=77578.66666666667, ans=0.125 +2024-08-26 16:39:15,872 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:39:19,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=77685.33333333333, ans=0.025 +2024-08-26 16:39:34,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=77738.66666666667, ans=0.125 +2024-08-26 16:39:36,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77792.0, ans=0.1 +2024-08-26 16:39:38,586 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.63 vs. limit=15.0 +2024-08-26 16:39:40,399 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.98 vs. limit=15.0 +2024-08-26 16:39:46,024 INFO [train.py:1114] (1/4) Epoch 6, batch 2150, loss[loss=0.2356, simple_loss=0.2919, pruned_loss=0.06595, ctc_loss=0.1184, over 19583.00 frames. ], tot_loss[loss=0.25, simple_loss=0.303, pruned_loss=0.07168, ctc_loss=0.1339, over 3869040.43 frames. ], batch size: 52, lr: 2.30e-02, grad_scale: 32.0 +2024-08-26 16:39:50,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=77845.33333333333, ans=0.09899494936611666 +2024-08-26 16:39:56,259 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:39:58,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77898.66666666667, ans=0.1 +2024-08-26 16:40:04,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.54 vs. limit=12.0 +2024-08-26 16:40:05,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=77952.0, ans=0.125 +2024-08-26 16:40:07,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77952.0, ans=0.1 +2024-08-26 16:40:11,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=77952.0, ans=0.0 +2024-08-26 16:40:13,760 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.258e+02 1.590e+02 1.744e+02 2.019e+02 3.989e+02, threshold=3.489e+02, percent-clipped=1.0 +2024-08-26 16:40:19,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=78005.33333333333, ans=0.125 +2024-08-26 16:40:24,806 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.11 vs. limit=15.0 +2024-08-26 16:40:31,345 INFO [train.py:1114] (1/4) Epoch 6, batch 2200, loss[loss=0.2579, simple_loss=0.3139, pruned_loss=0.07267, ctc_loss=0.1416, over 19604.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.303, pruned_loss=0.07177, ctc_loss=0.134, over 3868027.54 frames. ], batch size: 57, lr: 2.30e-02, grad_scale: 32.0 +2024-08-26 16:40:34,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=78112.0, ans=0.125 +2024-08-26 16:40:38,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=78112.0, ans=0.125 +2024-08-26 16:40:43,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=78165.33333333333, ans=0.125 +2024-08-26 16:40:43,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=78165.33333333333, ans=0.125 +2024-08-26 16:40:49,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=78218.66666666667, ans=0.0 +2024-08-26 16:40:53,001 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.15 vs. limit=15.0 +2024-08-26 16:40:58,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=78272.0, ans=0.2 +2024-08-26 16:41:58,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=78325.33333333333, ans=0.1 +2024-08-26 16:41:58,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=78325.33333333333, ans=0.0 +2024-08-26 16:42:03,100 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.99 vs. limit=10.0 +2024-08-26 16:42:04,353 INFO [train.py:1114] (1/4) Epoch 6, batch 2250, loss[loss=0.2485, simple_loss=0.3137, pruned_loss=0.06568, ctc_loss=0.13, over 19604.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3033, pruned_loss=0.07171, ctc_loss=0.1339, over 3868605.32 frames. ], batch size: 55, lr: 2.30e-02, grad_scale: 32.0 +2024-08-26 16:42:07,357 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=15.0 +2024-08-26 16:42:30,471 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.366e+02 1.631e+02 1.850e+02 2.118e+02 4.912e+02, threshold=3.701e+02, percent-clipped=4.0 +2024-08-26 16:42:37,139 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.22 vs. limit=15.0 +2024-08-26 16:42:41,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=78592.0, ans=0.1 +2024-08-26 16:42:57,019 INFO [train.py:1114] (1/4) Epoch 6, batch 2300, loss[loss=0.2393, simple_loss=0.2879, pruned_loss=0.07101, ctc_loss=0.122, over 19509.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3022, pruned_loss=0.07161, ctc_loss=0.1337, over 3862070.77 frames. ], batch size: 49, lr: 2.29e-02, grad_scale: 32.0 +2024-08-26 16:43:05,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=78698.66666666667, ans=0.0 +2024-08-26 16:43:10,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=78698.66666666667, ans=0.2 +2024-08-26 16:43:11,200 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:43:16,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=78752.0, ans=0.125 +2024-08-26 16:43:19,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=78752.0, ans=0.125 +2024-08-26 16:43:24,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=78805.33333333333, ans=0.035 +2024-08-26 16:43:25,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=78805.33333333333, ans=0.125 +2024-08-26 16:43:41,594 INFO [train.py:1114] (1/4) Epoch 6, batch 2350, loss[loss=0.2551, simple_loss=0.3107, pruned_loss=0.07235, ctc_loss=0.1368, over 19696.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3024, pruned_loss=0.07186, ctc_loss=0.1341, over 3864413.14 frames. ], batch size: 63, lr: 2.29e-02, grad_scale: 32.0 +2024-08-26 16:43:42,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=78912.0, ans=0.0 +2024-08-26 16:43:58,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=78965.33333333333, ans=0.0 +2024-08-26 16:44:06,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=79018.66666666667, ans=0.025 +2024-08-26 16:44:09,673 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.319e+02 1.571e+02 1.792e+02 2.053e+02 3.529e+02, threshold=3.585e+02, percent-clipped=0.0 +2024-08-26 16:44:12,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=79072.0, ans=0.1 +2024-08-26 16:44:21,400 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:44:27,085 INFO [train.py:1114] (1/4) Epoch 6, batch 2400, loss[loss=0.2553, simple_loss=0.3108, pruned_loss=0.07188, ctc_loss=0.1403, over 19418.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.3048, pruned_loss=0.07281, ctc_loss=0.1359, over 3858376.52 frames. ], batch size: 67, lr: 2.29e-02, grad_scale: 32.0 +2024-08-26 16:44:30,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=79178.66666666667, ans=0.0 +2024-08-26 16:44:42,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=79232.0, ans=0.125 +2024-08-26 16:44:53,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=79338.66666666667, ans=0.125 +2024-08-26 16:45:12,832 INFO [train.py:1114] (1/4) Epoch 6, batch 2450, loss[loss=0.3704, simple_loss=0.3663, pruned_loss=0.1382, ctc_loss=0.2449, over 13027.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3097, pruned_loss=0.07694, ctc_loss=0.1434, over 3730840.69 frames. ], batch size: 140, lr: 2.29e-02, grad_scale: 32.0 +2024-08-26 16:45:13,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=79445.33333333333, ans=0.1 +2024-08-26 16:45:14,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=79445.33333333333, ans=0.125 +2024-08-26 16:45:20,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=79445.33333333333, ans=0.125 +2024-08-26 16:45:23,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=79498.66666666667, ans=0.0 +2024-08-26 16:45:28,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=79498.66666666667, ans=0.0 +2024-08-26 16:45:29,749 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.98 vs. limit=15.0 +2024-08-26 16:45:40,095 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.433e+02 1.744e+02 1.902e+02 2.066e+02 3.652e+02, threshold=3.804e+02, percent-clipped=1.0 +2024-08-26 16:48:16,404 INFO [train.py:1114] (1/4) Epoch 7, batch 0, loss[loss=0.2319, simple_loss=0.2871, pruned_loss=0.06371, ctc_loss=0.1233, over 19404.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.2871, pruned_loss=0.06371, ctc_loss=0.1233, over 19404.00 frames. ], batch size: 48, lr: 2.14e-02, grad_scale: 32.0 +2024-08-26 16:48:16,405 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-26 16:48:29,565 INFO [train.py:1146] (1/4) Epoch 7, validation: loss=0.2068, simple_loss=0.2958, pruned_loss=0.04327, ctc_loss=0.07811, over 944034.00 frames. +2024-08-26 16:48:29,566 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12882MB +2024-08-26 16:48:41,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=79712.0, ans=0.125 +2024-08-26 16:49:14,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=79872.0, ans=0.125 +2024-08-26 16:49:19,280 INFO [train.py:1114] (1/4) Epoch 7, batch 50, loss[loss=0.2222, simple_loss=0.2772, pruned_loss=0.06038, ctc_loss=0.1163, over 19704.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3093, pruned_loss=0.07617, ctc_loss=0.1428, over 845399.71 frames. ], batch size: 47, lr: 2.14e-02, grad_scale: 32.0 +2024-08-26 16:49:19,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=79925.33333333333, ans=0.125 +2024-08-26 16:49:42,085 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.01 vs. limit=5.0 +2024-08-26 16:49:51,579 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.76 vs. limit=6.0 +2024-08-26 16:49:57,473 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.282e+02 1.584e+02 1.822e+02 2.089e+02 3.575e+02, threshold=3.645e+02, percent-clipped=0.0 +2024-08-26 16:50:07,015 INFO [train.py:1114] (1/4) Epoch 7, batch 100, loss[loss=0.2217, simple_loss=0.2852, pruned_loss=0.05732, ctc_loss=0.1091, over 19722.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3079, pruned_loss=0.07387, ctc_loss=0.1387, over 1499399.80 frames. ], batch size: 51, lr: 2.13e-02, grad_scale: 32.0 +2024-08-26 16:50:10,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=80192.0, ans=0.0 +2024-08-26 16:50:38,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80352.0, ans=0.1 +2024-08-26 16:50:52,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=80405.33333333333, ans=0.125 +2024-08-26 16:51:01,445 INFO [train.py:1114] (1/4) Epoch 7, batch 150, loss[loss=0.2158, simple_loss=0.2765, pruned_loss=0.0555, ctc_loss=0.1103, over 19722.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3038, pruned_loss=0.07157, ctc_loss=0.1345, over 2027591.77 frames. ], batch size: 47, lr: 2.13e-02, grad_scale: 32.0 +2024-08-26 16:51:07,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80458.66666666667, ans=0.1 +2024-08-26 16:51:08,332 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-08-26 16:51:14,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=80512.0, ans=0.2 +2024-08-26 16:51:25,748 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.93 vs. limit=10.0 +2024-08-26 16:51:33,084 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.68 vs. limit=15.0 +2024-08-26 16:51:38,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=80672.0, ans=0.125 +2024-08-26 16:51:39,013 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.222e+02 1.525e+02 1.667e+02 1.863e+02 2.878e+02, threshold=3.334e+02, percent-clipped=0.0 +2024-08-26 16:51:48,551 INFO [train.py:1114] (1/4) Epoch 7, batch 200, loss[loss=0.2692, simple_loss=0.325, pruned_loss=0.07792, ctc_loss=0.1439, over 18201.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3016, pruned_loss=0.07044, ctc_loss=0.1319, over 2435660.66 frames. ], batch size: 85, lr: 2.13e-02, grad_scale: 32.0 +2024-08-26 16:51:48,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=80725.33333333333, ans=0.025 +2024-08-26 16:52:17,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=80885.33333333333, ans=0.0 +2024-08-26 16:52:17,692 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.02 vs. limit=15.0 +2024-08-26 16:52:22,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80885.33333333333, ans=0.1 +2024-08-26 16:52:35,140 INFO [train.py:1114] (1/4) Epoch 7, batch 250, loss[loss=0.2693, simple_loss=0.3193, pruned_loss=0.08021, ctc_loss=0.147, over 19374.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3003, pruned_loss=0.06949, ctc_loss=0.1304, over 2757003.56 frames. ], batch size: 67, lr: 2.13e-02, grad_scale: 32.0 +2024-08-26 16:52:35,653 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.07 vs. limit=15.0 +2024-08-26 16:52:39,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=80992.0, ans=0.125 +2024-08-26 16:53:01,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=81098.66666666667, ans=0.125 +2024-08-26 16:53:05,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=81152.0, ans=0.125 +2024-08-26 16:53:16,587 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.591e+02 1.729e+02 1.900e+02 5.825e+02, threshold=3.457e+02, percent-clipped=1.0 +2024-08-26 16:53:25,915 INFO [train.py:1114] (1/4) Epoch 7, batch 300, loss[loss=0.2584, simple_loss=0.31, pruned_loss=0.07499, ctc_loss=0.1422, over 19528.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.2998, pruned_loss=0.06936, ctc_loss=0.13, over 3002026.24 frames. ], batch size: 61, lr: 2.12e-02, grad_scale: 32.0 +2024-08-26 16:53:30,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=81258.66666666667, ans=0.125 +2024-08-26 16:54:05,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=81418.66666666667, ans=0.125 +2024-08-26 16:54:18,323 INFO [train.py:1114] (1/4) Epoch 7, batch 350, loss[loss=0.2016, simple_loss=0.2645, pruned_loss=0.05033, ctc_loss=0.09515, over 19747.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3003, pruned_loss=0.06949, ctc_loss=0.1301, over 3191705.25 frames. ], batch size: 48, lr: 2.12e-02, grad_scale: 16.0 +2024-08-26 16:54:56,441 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.574e+02 1.753e+02 2.022e+02 2.928e+02, threshold=3.506e+02, percent-clipped=0.0 +2024-08-26 16:55:04,699 INFO [train.py:1114] (1/4) Epoch 7, batch 400, loss[loss=0.257, simple_loss=0.3159, pruned_loss=0.07271, ctc_loss=0.1319, over 19508.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3001, pruned_loss=0.06942, ctc_loss=0.1297, over 3343929.88 frames. ], batch size: 54, lr: 2.12e-02, grad_scale: 32.0 +2024-08-26 16:55:05,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=81792.0, ans=0.125 +2024-08-26 16:55:16,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=81845.33333333333, ans=0.125 +2024-08-26 16:55:21,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=81845.33333333333, ans=0.125 +2024-08-26 16:55:33,510 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.70 vs. limit=6.0 +2024-08-26 16:55:39,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=81952.0, ans=0.95 +2024-08-26 16:55:43,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=82005.33333333333, ans=0.125 +2024-08-26 16:55:47,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=82005.33333333333, ans=0.0 +2024-08-26 16:55:51,735 INFO [train.py:1114] (1/4) Epoch 7, batch 450, loss[loss=0.2449, simple_loss=0.3047, pruned_loss=0.06851, ctc_loss=0.1202, over 19623.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3001, pruned_loss=0.06957, ctc_loss=0.13, over 3450438.20 frames. ], batch size: 55, lr: 2.11e-02, grad_scale: 16.0 +2024-08-26 16:56:05,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=6.0 +2024-08-26 16:56:10,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=82112.0, ans=0.125 +2024-08-26 16:56:11,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=82112.0, ans=0.0 +2024-08-26 16:56:21,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=82165.33333333333, ans=0.0 +2024-08-26 16:56:40,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=82272.0, ans=0.2 +2024-08-26 16:56:41,732 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.250e+02 1.485e+02 1.753e+02 2.038e+02 3.855e+02, threshold=3.505e+02, percent-clipped=1.0 +2024-08-26 16:56:43,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=82272.0, ans=0.125 +2024-08-26 16:56:44,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=82272.0, ans=0.025 +2024-08-26 16:56:49,048 INFO [train.py:1114] (1/4) Epoch 7, batch 500, loss[loss=0.2523, simple_loss=0.3171, pruned_loss=0.06875, ctc_loss=0.1253, over 19678.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.2989, pruned_loss=0.06863, ctc_loss=0.1283, over 3547137.89 frames. ], batch size: 63, lr: 2.11e-02, grad_scale: 16.0 +2024-08-26 16:56:53,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=82325.33333333333, ans=0.125 +2024-08-26 16:57:23,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=82485.33333333333, ans=0.0 +2024-08-26 16:57:29,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=82538.66666666667, ans=0.125 +2024-08-26 16:57:31,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=82538.66666666667, ans=0.1 +2024-08-26 16:57:35,771 INFO [train.py:1114] (1/4) Epoch 7, batch 550, loss[loss=0.26, simple_loss=0.3187, pruned_loss=0.07244, ctc_loss=0.141, over 19309.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.2989, pruned_loss=0.06877, ctc_loss=0.1282, over 3609288.66 frames. ], batch size: 71, lr: 2.11e-02, grad_scale: 16.0 +2024-08-26 16:57:35,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=82592.0, ans=0.125 +2024-08-26 16:57:42,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=82592.0, ans=0.125 +2024-08-26 16:57:52,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=82645.33333333333, ans=0.125 +2024-08-26 16:57:59,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82698.66666666667, ans=0.1 +2024-08-26 16:58:13,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=82805.33333333333, ans=0.125 +2024-08-26 16:58:16,886 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.530e+02 1.701e+02 1.927e+02 4.407e+02, threshold=3.402e+02, percent-clipped=1.0 +2024-08-26 16:58:21,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=82805.33333333333, ans=0.0 +2024-08-26 16:58:30,194 INFO [train.py:1114] (1/4) Epoch 7, batch 600, loss[loss=0.2415, simple_loss=0.3046, pruned_loss=0.06423, ctc_loss=0.1248, over 19404.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.2989, pruned_loss=0.06858, ctc_loss=0.1279, over 3666929.36 frames. ], batch size: 67, lr: 2.11e-02, grad_scale: 16.0 +2024-08-26 16:59:01,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=82858.66666666667, ans=0.0 +2024-08-26 17:00:43,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82912.0, ans=0.1 +2024-08-26 17:01:16,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=83018.66666666667, ans=0.025 +2024-08-26 17:04:50,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=83072.0, ans=0.07 +2024-08-26 17:04:51,935 INFO [train.py:1114] (1/4) Epoch 7, batch 650, loss[loss=0.2307, simple_loss=0.2929, pruned_loss=0.06108, ctc_loss=0.1158, over 19777.00 frames. ], tot_loss[loss=0.242, simple_loss=0.2976, pruned_loss=0.06782, ctc_loss=0.1268, over 3716872.42 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 16.0 +2024-08-26 17:05:15,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=83178.66666666667, ans=0.125 +2024-08-26 17:05:41,844 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.294e+02 1.502e+02 1.666e+02 1.880e+02 3.682e+02, threshold=3.331e+02, percent-clipped=2.0 +2024-08-26 17:06:20,359 INFO [train.py:1114] (1/4) Epoch 7, batch 700, loss[loss=0.2264, simple_loss=0.2883, pruned_loss=0.05964, ctc_loss=0.113, over 19715.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.2981, pruned_loss=0.068, ctc_loss=0.127, over 3748590.20 frames. ], batch size: 51, lr: 2.10e-02, grad_scale: 16.0 +2024-08-26 17:06:26,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=83392.0, ans=0.125 +2024-08-26 17:06:48,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=83552.0, ans=0.0 +2024-08-26 17:07:06,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=83605.33333333333, ans=0.125 +2024-08-26 17:07:08,431 INFO [train.py:1114] (1/4) Epoch 7, batch 750, loss[loss=0.2188, simple_loss=0.2889, pruned_loss=0.05435, ctc_loss=0.1003, over 19489.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.2979, pruned_loss=0.06789, ctc_loss=0.1268, over 3773990.72 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 16.0 +2024-08-26 17:07:08,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=83658.66666666667, ans=0.0 +2024-08-26 17:07:33,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=83765.33333333333, ans=0.0 +2024-08-26 17:07:37,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=83818.66666666667, ans=0.1 +2024-08-26 17:07:42,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=83818.66666666667, ans=0.125 +2024-08-26 17:07:45,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=83872.0, ans=0.125 +2024-08-26 17:07:48,230 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.281e+02 1.533e+02 1.678e+02 1.875e+02 3.166e+02, threshold=3.356e+02, percent-clipped=0.0 +2024-08-26 17:07:58,364 INFO [train.py:1114] (1/4) Epoch 7, batch 800, loss[loss=0.2192, simple_loss=0.2746, pruned_loss=0.05926, ctc_loss=0.1132, over 19412.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.2979, pruned_loss=0.06808, ctc_loss=0.127, over 3794781.94 frames. ], batch size: 48, lr: 2.10e-02, grad_scale: 32.0 +2024-08-26 17:07:58,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=83925.33333333333, ans=0.2 +2024-08-26 17:08:06,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=83978.66666666667, ans=0.2 +2024-08-26 17:08:14,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=83978.66666666667, ans=0.0 +2024-08-26 17:08:17,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=83978.66666666667, ans=0.125 +2024-08-26 17:08:19,440 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.66 vs. limit=22.5 +2024-08-26 17:08:30,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=84032.0, ans=0.125 +2024-08-26 17:08:36,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=84085.33333333333, ans=0.2 +2024-08-26 17:08:37,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=84085.33333333333, ans=0.0 +2024-08-26 17:08:38,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=84085.33333333333, ans=0.04949747468305833 +2024-08-26 17:08:39,394 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.85 vs. limit=6.0 +2024-08-26 17:08:46,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=84138.66666666667, ans=0.2 +2024-08-26 17:08:53,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=84138.66666666667, ans=0.2 +2024-08-26 17:08:56,338 INFO [train.py:1114] (1/4) Epoch 7, batch 850, loss[loss=0.259, simple_loss=0.314, pruned_loss=0.07432, ctc_loss=0.1386, over 19653.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.2973, pruned_loss=0.0677, ctc_loss=0.1264, over 3814384.82 frames. ], batch size: 59, lr: 2.09e-02, grad_scale: 32.0 +2024-08-26 17:09:32,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=84298.66666666667, ans=0.125 +2024-08-26 17:09:48,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=84298.66666666667, ans=0.0 +2024-08-26 17:09:49,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=84352.0, ans=0.2 +2024-08-26 17:09:59,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=84405.33333333333, ans=0.125 +2024-08-26 17:10:01,941 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.310e+02 1.545e+02 1.673e+02 1.909e+02 3.259e+02, threshold=3.346e+02, percent-clipped=0.0 +2024-08-26 17:10:09,590 INFO [train.py:1114] (1/4) Epoch 7, batch 900, loss[loss=0.2174, simple_loss=0.2757, pruned_loss=0.05797, ctc_loss=0.1078, over 19808.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.298, pruned_loss=0.06812, ctc_loss=0.1272, over 3818924.23 frames. ], batch size: 49, lr: 2.09e-02, grad_scale: 32.0 +2024-08-26 17:10:09,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=84458.66666666667, ans=0.0 +2024-08-26 17:10:10,310 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.02 vs. limit=6.0 +2024-08-26 17:10:14,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=84458.66666666667, ans=0.0 +2024-08-26 17:10:24,613 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.98 vs. limit=12.0 +2024-08-26 17:10:29,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=84565.33333333333, ans=0.0 +2024-08-26 17:10:29,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=84565.33333333333, ans=0.125 +2024-08-26 17:10:39,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=84618.66666666667, ans=0.0 +2024-08-26 17:10:58,452 INFO [train.py:1114] (1/4) Epoch 7, batch 950, loss[loss=0.2107, simple_loss=0.2676, pruned_loss=0.05638, ctc_loss=0.1025, over 19496.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.2986, pruned_loss=0.06859, ctc_loss=0.1281, over 3820132.42 frames. ], batch size: 49, lr: 2.09e-02, grad_scale: 32.0 +2024-08-26 17:10:58,697 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:11:21,787 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.51 vs. limit=15.0 +2024-08-26 17:11:22,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=84832.0, ans=0.0 +2024-08-26 17:11:43,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=84938.66666666667, ans=0.125 +2024-08-26 17:11:48,307 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.332e+02 1.566e+02 1.708e+02 1.976e+02 3.572e+02, threshold=3.415e+02, percent-clipped=1.0 +2024-08-26 17:12:18,434 INFO [train.py:1114] (1/4) Epoch 7, batch 1000, loss[loss=0.225, simple_loss=0.2897, pruned_loss=0.0578, ctc_loss=0.1116, over 19875.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.2998, pruned_loss=0.06909, ctc_loss=0.129, over 3815457.84 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 32.0 +2024-08-26 17:12:18,824 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:12:22,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84992.0, ans=0.1 +2024-08-26 17:12:26,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=84992.0, ans=0.125 +2024-08-26 17:12:28,346 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.24 vs. limit=15.0 +2024-08-26 17:13:44,249 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.20 vs. limit=15.0 +2024-08-26 17:13:45,048 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.91 vs. limit=15.0 +2024-08-26 17:13:59,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=85258.66666666667, ans=0.2 +2024-08-26 17:13:59,712 INFO [train.py:1114] (1/4) Epoch 7, batch 1050, loss[loss=0.2532, simple_loss=0.3122, pruned_loss=0.07017, ctc_loss=0.1346, over 19844.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.2987, pruned_loss=0.06847, ctc_loss=0.1278, over 3823090.38 frames. ], batch size: 57, lr: 2.08e-02, grad_scale: 32.0 +2024-08-26 17:13:59,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=85258.66666666667, ans=0.0 +2024-08-26 17:14:08,810 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.31 vs. limit=15.0 +2024-08-26 17:14:10,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=85312.0, ans=0.125 +2024-08-26 17:14:39,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=85312.0, ans=0.125 +2024-08-26 17:14:41,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=85312.0, ans=0.125 +2024-08-26 17:16:19,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=85365.33333333333, ans=0.0 +2024-08-26 17:16:26,854 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:16:29,540 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:16:34,233 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:16:35,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=85418.66666666667, ans=0.125 +2024-08-26 17:16:40,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=85472.0, ans=0.125 +2024-08-26 17:16:40,668 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.203e+02 1.449e+02 1.584e+02 1.768e+02 2.861e+02, threshold=3.169e+02, percent-clipped=0.0 +2024-08-26 17:16:42,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=85472.0, ans=0.125 +2024-08-26 17:16:46,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=85472.0, ans=0.125 +2024-08-26 17:16:47,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=85525.33333333333, ans=0.125 +2024-08-26 17:16:48,374 INFO [train.py:1114] (1/4) Epoch 7, batch 1100, loss[loss=0.2177, simple_loss=0.2859, pruned_loss=0.0543, ctc_loss=0.1024, over 19591.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.2988, pruned_loss=0.06823, ctc_loss=0.1277, over 3830140.20 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 32.0 +2024-08-26 17:16:49,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85525.33333333333, ans=0.1 +2024-08-26 17:16:50,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=85525.33333333333, ans=0.2 +2024-08-26 17:17:14,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=85632.0, ans=0.025 +2024-08-26 17:17:32,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=85738.66666666667, ans=0.0 +2024-08-26 17:17:45,020 INFO [train.py:1114] (1/4) Epoch 7, batch 1150, loss[loss=0.2214, simple_loss=0.286, pruned_loss=0.05678, ctc_loss=0.1081, over 19582.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.2986, pruned_loss=0.06834, ctc_loss=0.1281, over 3828421.18 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 16.0 +2024-08-26 17:17:54,664 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.56 vs. limit=6.0 +2024-08-26 17:18:02,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=85845.33333333333, ans=0.125 +2024-08-26 17:18:11,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=85898.66666666667, ans=0.0 +2024-08-26 17:18:16,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=85898.66666666667, ans=0.125 +2024-08-26 17:18:38,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=86005.33333333333, ans=0.0 +2024-08-26 17:18:41,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=86005.33333333333, ans=0.0 +2024-08-26 17:18:41,960 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.243e+02 1.522e+02 1.667e+02 1.891e+02 3.736e+02, threshold=3.335e+02, percent-clipped=2.0 +2024-08-26 17:18:48,629 INFO [train.py:1114] (1/4) Epoch 7, batch 1200, loss[loss=0.2387, simple_loss=0.3054, pruned_loss=0.06213, ctc_loss=0.1193, over 19830.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.299, pruned_loss=0.06853, ctc_loss=0.1284, over 3823710.99 frames. ], batch size: 57, lr: 2.07e-02, grad_scale: 32.0 +2024-08-26 17:19:00,318 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.89 vs. limit=15.0 +2024-08-26 17:19:00,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=86112.0, ans=0.125 +2024-08-26 17:19:11,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=86165.33333333333, ans=0.125 +2024-08-26 17:19:21,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86165.33333333333, ans=0.1 +2024-08-26 17:19:28,806 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.58 vs. limit=15.0 +2024-08-26 17:19:43,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=86218.66666666667, ans=0.125 +2024-08-26 17:19:54,875 INFO [train.py:1114] (1/4) Epoch 7, batch 1250, loss[loss=0.2731, simple_loss=0.3292, pruned_loss=0.07806, ctc_loss=0.1522, over 19523.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.2991, pruned_loss=0.06817, ctc_loss=0.1277, over 3842663.98 frames. ], batch size: 61, lr: 2.07e-02, grad_scale: 32.0 +2024-08-26 17:20:08,076 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.56 vs. limit=15.0 +2024-08-26 17:20:16,286 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.01 vs. limit=15.0 +2024-08-26 17:20:22,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=86485.33333333333, ans=0.125 +2024-08-26 17:20:25,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=86485.33333333333, ans=0.2 +2024-08-26 17:20:26,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=86485.33333333333, ans=0.125 +2024-08-26 17:20:35,652 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.207e+02 1.476e+02 1.609e+02 1.857e+02 3.245e+02, threshold=3.218e+02, percent-clipped=0.0 +2024-08-26 17:20:35,894 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:20:42,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=86538.66666666667, ans=0.125 +2024-08-26 17:20:44,747 INFO [train.py:1114] (1/4) Epoch 7, batch 1300, loss[loss=0.2526, simple_loss=0.3103, pruned_loss=0.06965, ctc_loss=0.1389, over 18921.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.2977, pruned_loss=0.06728, ctc_loss=0.126, over 3846659.20 frames. ], batch size: 76, lr: 2.07e-02, grad_scale: 32.0 +2024-08-26 17:20:47,349 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.87 vs. limit=12.0 +2024-08-26 17:20:55,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=86645.33333333333, ans=0.125 +2024-08-26 17:21:08,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=86698.66666666667, ans=0.0 +2024-08-26 17:21:30,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=86805.33333333333, ans=0.0 +2024-08-26 17:21:31,095 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.12 vs. limit=12.0 +2024-08-26 17:21:31,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.17 vs. limit=15.0 +2024-08-26 17:21:38,922 INFO [train.py:1114] (1/4) Epoch 7, batch 1350, loss[loss=0.2209, simple_loss=0.2869, pruned_loss=0.05547, ctc_loss=0.1098, over 19786.00 frames. ], tot_loss[loss=0.241, simple_loss=0.2974, pruned_loss=0.06719, ctc_loss=0.1255, over 3858723.75 frames. ], batch size: 54, lr: 2.07e-02, grad_scale: 32.0 +2024-08-26 17:21:41,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=86858.66666666667, ans=0.015 +2024-08-26 17:21:58,072 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.73 vs. limit=15.0 +2024-08-26 17:22:03,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=86965.33333333333, ans=0.02 +2024-08-26 17:22:05,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=86965.33333333333, ans=0.125 +2024-08-26 17:22:08,745 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.00 vs. limit=6.0 +2024-08-26 17:22:09,438 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.66 vs. limit=22.5 +2024-08-26 17:22:19,561 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.495e+02 1.726e+02 1.992e+02 3.104e+02, threshold=3.452e+02, percent-clipped=0.0 +2024-08-26 17:22:26,108 INFO [train.py:1114] (1/4) Epoch 7, batch 1400, loss[loss=0.2054, simple_loss=0.2559, pruned_loss=0.05638, ctc_loss=0.1052, over 19649.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.2965, pruned_loss=0.06684, ctc_loss=0.1249, over 3865468.57 frames. ], batch size: 46, lr: 2.06e-02, grad_scale: 32.0 +2024-08-26 17:22:26,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=87125.33333333333, ans=0.025 +2024-08-26 17:22:49,340 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.57 vs. limit=10.0 +2024-08-26 17:23:06,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=87285.33333333333, ans=0.125 +2024-08-26 17:23:23,769 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:23:28,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=87338.66666666667, ans=0.0 +2024-08-26 17:23:35,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=87392.0, ans=0.2 +2024-08-26 17:23:35,693 INFO [train.py:1114] (1/4) Epoch 7, batch 1450, loss[loss=0.2566, simple_loss=0.3152, pruned_loss=0.07232, ctc_loss=0.1334, over 19663.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.2973, pruned_loss=0.06736, ctc_loss=0.1257, over 3863342.27 frames. ], batch size: 63, lr: 2.06e-02, grad_scale: 32.0 +2024-08-26 17:24:09,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87552.0, ans=0.1 +2024-08-26 17:24:20,638 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.279e+02 1.540e+02 1.669e+02 1.894e+02 3.453e+02, threshold=3.338e+02, percent-clipped=1.0 +2024-08-26 17:24:20,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=87605.33333333333, ans=0.0 +2024-08-26 17:24:29,672 INFO [train.py:1114] (1/4) Epoch 7, batch 1500, loss[loss=0.2441, simple_loss=0.2993, pruned_loss=0.06884, ctc_loss=0.1279, over 19582.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.2979, pruned_loss=0.06768, ctc_loss=0.1263, over 3862746.88 frames. ], batch size: 57, lr: 2.06e-02, grad_scale: 32.0 +2024-08-26 17:24:40,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=87658.66666666667, ans=0.0 +2024-08-26 17:24:44,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=87712.0, ans=0.125 +2024-08-26 17:24:51,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=87765.33333333333, ans=0.125 +2024-08-26 17:24:52,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=87765.33333333333, ans=0.05 +2024-08-26 17:24:54,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=87765.33333333333, ans=0.125 +2024-08-26 17:24:58,541 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.80 vs. limit=10.0 +2024-08-26 17:25:04,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=87818.66666666667, ans=10.0 +2024-08-26 17:25:19,511 INFO [train.py:1114] (1/4) Epoch 7, batch 1550, loss[loss=0.2592, simple_loss=0.3145, pruned_loss=0.07457, ctc_loss=0.1368, over 19607.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.2983, pruned_loss=0.06812, ctc_loss=0.1274, over 3848878.45 frames. ], batch size: 60, lr: 2.06e-02, grad_scale: 32.0 +2024-08-26 17:25:22,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=87925.33333333333, ans=0.0 +2024-08-26 17:25:27,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=87925.33333333333, ans=0.125 +2024-08-26 17:25:44,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=88032.0, ans=0.125 +2024-08-26 17:25:46,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.60 vs. limit=15.0 +2024-08-26 17:26:04,270 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.330e+02 1.559e+02 1.788e+02 2.182e+02 5.116e+02, threshold=3.576e+02, percent-clipped=3.0 +2024-08-26 17:26:05,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=88138.66666666667, ans=0.0 +2024-08-26 17:26:10,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=88192.0, ans=0.1 +2024-08-26 17:26:10,453 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.07 vs. limit=10.0 +2024-08-26 17:26:10,939 INFO [train.py:1114] (1/4) Epoch 7, batch 1600, loss[loss=0.2461, simple_loss=0.3067, pruned_loss=0.06796, ctc_loss=0.1241, over 19853.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.2982, pruned_loss=0.06813, ctc_loss=0.1272, over 3837900.68 frames. ], batch size: 57, lr: 2.05e-02, grad_scale: 32.0 +2024-08-26 17:26:12,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=88192.0, ans=0.0 +2024-08-26 17:26:15,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=88192.0, ans=0.125 +2024-08-26 17:26:53,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=88405.33333333333, ans=0.125 +2024-08-26 17:26:59,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=88405.33333333333, ans=0.125 +2024-08-26 17:27:01,933 INFO [train.py:1114] (1/4) Epoch 7, batch 1650, loss[loss=0.2596, simple_loss=0.3072, pruned_loss=0.07809, ctc_loss=0.1393, over 19670.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.2979, pruned_loss=0.06819, ctc_loss=0.1274, over 3834353.21 frames. ], batch size: 59, lr: 2.05e-02, grad_scale: 32.0 +2024-08-26 17:28:12,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=88565.33333333333, ans=0.125 +2024-08-26 17:28:16,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=88565.33333333333, ans=0.2 +2024-08-26 17:28:23,678 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.42 vs. limit=12.0 +2024-08-26 17:28:59,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=88618.66666666667, ans=0.125 +2024-08-26 17:29:25,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=88672.0, ans=0.125 +2024-08-26 17:29:25,562 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.242e+02 1.503e+02 1.653e+02 1.809e+02 2.992e+02, threshold=3.307e+02, percent-clipped=0.0 +2024-08-26 17:29:40,039 INFO [train.py:1114] (1/4) Epoch 7, batch 1700, loss[loss=0.2164, simple_loss=0.2693, pruned_loss=0.05867, ctc_loss=0.1155, over 19698.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2973, pruned_loss=0.06753, ctc_loss=0.1262, over 3848276.44 frames. ], batch size: 46, lr: 2.05e-02, grad_scale: 32.0 +2024-08-26 17:29:58,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=88725.33333333333, ans=0.0 +2024-08-26 17:30:14,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=88778.66666666667, ans=0.125 +2024-08-26 17:30:17,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=88832.0, ans=0.0 +2024-08-26 17:30:21,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=88832.0, ans=0.1 +2024-08-26 17:30:33,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=88885.33333333333, ans=0.0 +2024-08-26 17:30:34,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=88938.66666666667, ans=0.125 +2024-08-26 17:30:39,476 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.30 vs. limit=22.5 +2024-08-26 17:30:44,520 INFO [train.py:1114] (1/4) Epoch 7, batch 1750, loss[loss=0.1973, simple_loss=0.2572, pruned_loss=0.05041, ctc_loss=0.09139, over 19625.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.2966, pruned_loss=0.06719, ctc_loss=0.1256, over 3852421.63 frames. ], batch size: 45, lr: 2.05e-02, grad_scale: 32.0 +2024-08-26 17:31:08,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=89098.66666666667, ans=0.1 +2024-08-26 17:31:18,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=89152.0, ans=0.0 +2024-08-26 17:31:18,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=89152.0, ans=0.125 +2024-08-26 17:31:22,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=89205.33333333333, ans=0.0 +2024-08-26 17:31:23,267 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.487e+02 1.622e+02 1.808e+02 3.869e+02, threshold=3.245e+02, percent-clipped=1.0 +2024-08-26 17:31:24,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=89205.33333333333, ans=0.125 +2024-08-26 17:31:29,441 INFO [train.py:1114] (1/4) Epoch 7, batch 1800, loss[loss=0.2506, simple_loss=0.3115, pruned_loss=0.06988, ctc_loss=0.1251, over 19609.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.2969, pruned_loss=0.06723, ctc_loss=0.1256, over 3853282.55 frames. ], batch size: 55, lr: 2.04e-02, grad_scale: 32.0 +2024-08-26 17:31:30,053 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.26 vs. limit=15.0 +2024-08-26 17:31:34,381 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.11 vs. limit=22.5 +2024-08-26 17:31:47,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=89365.33333333333, ans=0.125 +2024-08-26 17:31:57,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=89418.66666666667, ans=0.025 +2024-08-26 17:31:58,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=89418.66666666667, ans=0.125 +2024-08-26 17:32:14,096 INFO [train.py:1114] (1/4) Epoch 7, batch 1850, loss[loss=0.24, simple_loss=0.3069, pruned_loss=0.06274, ctc_loss=0.1192, over 19570.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.2968, pruned_loss=0.06717, ctc_loss=0.1254, over 3857561.30 frames. ], batch size: 57, lr: 2.04e-02, grad_scale: 32.0 +2024-08-26 17:32:19,596 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:32:22,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=89578.66666666667, ans=0.1 +2024-08-26 17:32:23,538 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.27 vs. limit=10.0 +2024-08-26 17:32:52,570 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.39 vs. limit=10.0 +2024-08-26 17:32:55,556 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.278e+02 1.590e+02 1.759e+02 2.003e+02 3.443e+02, threshold=3.517e+02, percent-clipped=1.0 +2024-08-26 17:33:01,829 INFO [train.py:1114] (1/4) Epoch 7, batch 1900, loss[loss=0.2343, simple_loss=0.2985, pruned_loss=0.06239, ctc_loss=0.1135, over 19656.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2977, pruned_loss=0.06736, ctc_loss=0.1256, over 3862690.98 frames. ], batch size: 59, lr: 2.04e-02, grad_scale: 32.0 +2024-08-26 17:33:02,489 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.21 vs. limit=15.0 +2024-08-26 17:33:03,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=89792.0, ans=0.2 +2024-08-26 17:35:03,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=89898.66666666667, ans=0.0 +2024-08-26 17:35:15,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=90005.33333333333, ans=0.0 +2024-08-26 17:35:19,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=90005.33333333333, ans=0.125 +2024-08-26 17:35:21,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=90005.33333333333, ans=0.035 +2024-08-26 17:35:23,459 INFO [train.py:1114] (1/4) Epoch 7, batch 1950, loss[loss=0.2099, simple_loss=0.2762, pruned_loss=0.0524, ctc_loss=0.09679, over 19577.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.2979, pruned_loss=0.0668, ctc_loss=0.1248, over 3871580.56 frames. ], batch size: 52, lr: 2.04e-02, grad_scale: 32.0 +2024-08-26 17:35:39,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=90112.0, ans=0.07 +2024-08-26 17:35:53,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=90218.66666666667, ans=0.125 +2024-08-26 17:35:57,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=90218.66666666667, ans=0.125 +2024-08-26 17:36:03,244 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.287e+02 1.531e+02 1.657e+02 1.854e+02 3.915e+02, threshold=3.314e+02, percent-clipped=1.0 +2024-08-26 17:36:09,473 INFO [train.py:1114] (1/4) Epoch 7, batch 2000, loss[loss=0.2114, simple_loss=0.2717, pruned_loss=0.05604, ctc_loss=0.09772, over 19656.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.2989, pruned_loss=0.0676, ctc_loss=0.1263, over 3855939.06 frames. ], batch size: 45, lr: 2.03e-02, grad_scale: 32.0 +2024-08-26 17:36:13,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=90325.33333333333, ans=0.125 +2024-08-26 17:36:19,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=90378.66666666667, ans=0.07 +2024-08-26 17:36:21,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=90378.66666666667, ans=0.125 +2024-08-26 17:36:22,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=90378.66666666667, ans=0.0 +2024-08-26 17:36:36,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=90485.33333333333, ans=0.125 +2024-08-26 17:36:43,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=90485.33333333333, ans=0.2 +2024-08-26 17:36:51,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=90538.66666666667, ans=0.0 +2024-08-26 17:36:53,972 INFO [train.py:1114] (1/4) Epoch 7, batch 2050, loss[loss=0.226, simple_loss=0.2808, pruned_loss=0.06147, ctc_loss=0.1207, over 19726.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.2973, pruned_loss=0.06703, ctc_loss=0.1254, over 3852162.00 frames. ], batch size: 47, lr: 2.03e-02, grad_scale: 32.0 +2024-08-26 17:36:54,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=90592.0, ans=0.05 +2024-08-26 17:36:58,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90592.0, ans=0.1 +2024-08-26 17:37:04,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=90645.33333333333, ans=0.1 +2024-08-26 17:37:18,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=90698.66666666667, ans=0.0 +2024-08-26 17:37:19,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=90752.0, ans=0.0 +2024-08-26 17:37:22,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=90752.0, ans=0.0 +2024-08-26 17:37:25,744 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.61 vs. limit=15.0 +2024-08-26 17:37:29,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90805.33333333333, ans=0.1 +2024-08-26 17:37:29,443 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.05 vs. limit=15.0 +2024-08-26 17:37:30,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=90805.33333333333, ans=0.025 +2024-08-26 17:37:32,300 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.477e+02 1.642e+02 1.962e+02 4.346e+02, threshold=3.284e+02, percent-clipped=3.0 +2024-08-26 17:37:38,476 INFO [train.py:1114] (1/4) Epoch 7, batch 2100, loss[loss=0.212, simple_loss=0.2777, pruned_loss=0.05222, ctc_loss=0.1049, over 19754.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.2963, pruned_loss=0.06605, ctc_loss=0.1236, over 3858882.17 frames. ], batch size: 54, lr: 2.03e-02, grad_scale: 32.0 +2024-08-26 17:37:41,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=90858.66666666667, ans=0.0 +2024-08-26 17:37:55,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=90912.0, ans=0.0 +2024-08-26 17:38:08,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=91018.66666666667, ans=0.09899494936611666 +2024-08-26 17:38:10,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91018.66666666667, ans=0.1 +2024-08-26 17:38:13,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=91018.66666666667, ans=0.125 +2024-08-26 17:38:13,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=91018.66666666667, ans=0.2 +2024-08-26 17:38:13,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=91018.66666666667, ans=0.125 +2024-08-26 17:38:16,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=91018.66666666667, ans=0.1 +2024-08-26 17:38:24,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=91072.0, ans=0.2 +2024-08-26 17:38:26,427 INFO [train.py:1114] (1/4) Epoch 7, batch 2150, loss[loss=0.214, simple_loss=0.2788, pruned_loss=0.05428, ctc_loss=0.1015, over 19584.00 frames. ], tot_loss[loss=0.2384, simple_loss=0.2956, pruned_loss=0.06594, ctc_loss=0.1233, over 3869609.55 frames. ], batch size: 52, lr: 2.03e-02, grad_scale: 32.0 +2024-08-26 17:38:28,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=91125.33333333333, ans=0.125 +2024-08-26 17:38:34,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=91178.66666666667, ans=0.125 +2024-08-26 17:38:40,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=91178.66666666667, ans=0.125 +2024-08-26 17:38:42,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=91178.66666666667, ans=0.125 +2024-08-26 17:38:42,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=91178.66666666667, ans=0.125 +2024-08-26 17:38:50,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=91232.0, ans=0.125 +2024-08-26 17:38:50,767 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.87 vs. limit=6.0 +2024-08-26 17:39:04,139 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.485e+02 1.702e+02 1.931e+02 2.999e+02, threshold=3.403e+02, percent-clipped=0.0 +2024-08-26 17:39:07,383 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.06 vs. limit=15.0 +2024-08-26 17:39:08,213 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.68 vs. limit=15.0 +2024-08-26 17:39:10,355 INFO [train.py:1114] (1/4) Epoch 7, batch 2200, loss[loss=0.2706, simple_loss=0.3207, pruned_loss=0.07984, ctc_loss=0.1521, over 19580.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.2958, pruned_loss=0.06601, ctc_loss=0.1232, over 3868263.55 frames. ], batch size: 57, lr: 2.02e-02, grad_scale: 32.0 +2024-08-26 17:39:19,804 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.65 vs. limit=12.0 +2024-08-26 17:39:27,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=91498.66666666667, ans=0.125 +2024-08-26 17:39:27,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=91498.66666666667, ans=0.0 +2024-08-26 17:39:32,937 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.94 vs. limit=15.0 +2024-08-26 17:39:51,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91605.33333333333, ans=0.1 +2024-08-26 17:39:54,462 INFO [train.py:1114] (1/4) Epoch 7, batch 2250, loss[loss=0.2437, simple_loss=0.3024, pruned_loss=0.06727, ctc_loss=0.126, over 19603.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.2959, pruned_loss=0.06601, ctc_loss=0.1234, over 3867162.72 frames. ], batch size: 55, lr: 2.02e-02, grad_scale: 32.0 +2024-08-26 17:40:05,413 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.15 vs. limit=15.0 +2024-08-26 17:40:21,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.17 vs. limit=15.0 +2024-08-26 17:40:27,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=91818.66666666667, ans=0.125 +2024-08-26 17:40:30,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=91872.0, ans=0.025 +2024-08-26 17:40:32,441 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.538e+02 1.708e+02 1.997e+02 3.315e+02, threshold=3.416e+02, percent-clipped=0.0 +2024-08-26 17:40:33,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=91872.0, ans=0.125 +2024-08-26 17:40:36,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=91872.0, ans=0.125 +2024-08-26 17:40:37,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=91925.33333333333, ans=10.0 +2024-08-26 17:40:38,577 INFO [train.py:1114] (1/4) Epoch 7, batch 2300, loss[loss=0.2236, simple_loss=0.2803, pruned_loss=0.06041, ctc_loss=0.1152, over 19505.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.2955, pruned_loss=0.06604, ctc_loss=0.1234, over 3861012.41 frames. ], batch size: 49, lr: 2.02e-02, grad_scale: 32.0 +2024-08-26 17:40:40,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=91925.33333333333, ans=0.0 +2024-08-26 17:40:40,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=91925.33333333333, ans=0.2 +2024-08-26 17:40:46,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=91978.66666666667, ans=0.125 +2024-08-26 17:40:55,001 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.87 vs. limit=12.0 +2024-08-26 17:41:04,113 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.11 vs. limit=15.0 +2024-08-26 17:41:15,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=92138.66666666667, ans=0.125 +2024-08-26 17:41:16,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=92138.66666666667, ans=0.0 +2024-08-26 17:41:22,373 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.36 vs. limit=15.0 +2024-08-26 17:41:22,874 INFO [train.py:1114] (1/4) Epoch 7, batch 2350, loss[loss=0.2596, simple_loss=0.3183, pruned_loss=0.07255, ctc_loss=0.1396, over 19666.00 frames. ], tot_loss[loss=0.2388, simple_loss=0.2956, pruned_loss=0.06626, ctc_loss=0.1237, over 3863615.91 frames. ], batch size: 63, lr: 2.02e-02, grad_scale: 32.0 +2024-08-26 17:41:29,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=92192.0, ans=0.0 +2024-08-26 17:41:32,875 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.73 vs. limit=15.0 +2024-08-26 17:41:43,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=92298.66666666667, ans=0.0 +2024-08-26 17:41:47,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=92298.66666666667, ans=0.0 +2024-08-26 17:41:58,435 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:42:01,683 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.248e+02 1.515e+02 1.664e+02 1.862e+02 3.479e+02, threshold=3.327e+02, percent-clipped=1.0 +2024-08-26 17:42:06,888 INFO [train.py:1114] (1/4) Epoch 7, batch 2400, loss[loss=0.2728, simple_loss=0.3281, pruned_loss=0.07864, ctc_loss=0.1505, over 19295.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.2973, pruned_loss=0.06707, ctc_loss=0.125, over 3857314.72 frames. ], batch size: 71, lr: 2.01e-02, grad_scale: 32.0 +2024-08-26 17:42:13,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=92458.66666666667, ans=0.125 +2024-08-26 17:42:13,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92458.66666666667, ans=0.1 +2024-08-26 17:42:22,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=92512.0, ans=0.025 +2024-08-26 17:42:53,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=92672.0, ans=0.1 +2024-08-26 17:42:53,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=92672.0, ans=0.125 +2024-08-26 17:42:55,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=92725.33333333333, ans=0.125 +2024-08-26 17:42:56,042 INFO [train.py:1114] (1/4) Epoch 7, batch 2450, loss[loss=0.3526, simple_loss=0.359, pruned_loss=0.1259, ctc_loss=0.2362, over 13294.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3022, pruned_loss=0.07103, ctc_loss=0.1327, over 3729139.51 frames. ], batch size: 140, lr: 2.01e-02, grad_scale: 16.0 +2024-08-26 17:43:07,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92778.66666666667, ans=0.1 +2024-08-26 17:43:22,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=92885.33333333333, ans=0.0 +2024-08-26 17:43:27,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=92885.33333333333, ans=0.5 +2024-08-26 17:44:23,155 INFO [train.py:1114] (1/4) Epoch 8, batch 0, loss[loss=0.2237, simple_loss=0.2826, pruned_loss=0.05971, ctc_loss=0.1135, over 19413.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2826, pruned_loss=0.05971, ctc_loss=0.1135, over 19413.00 frames. ], batch size: 48, lr: 1.89e-02, grad_scale: 32.0 +2024-08-26 17:44:23,155 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-26 17:44:49,260 INFO [train.py:1146] (1/4) Epoch 8, validation: loss=0.2003, simple_loss=0.2903, pruned_loss=0.04062, ctc_loss=0.07268, over 944034.00 frames. +2024-08-26 17:44:49,261 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12882MB +2024-08-26 17:44:55,029 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.675e+02 1.918e+02 2.084e+02 4.365e+02, threshold=3.836e+02, percent-clipped=1.0 +2024-08-26 17:45:17,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=7.38 vs. limit=12.0 +2024-08-26 17:45:28,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93040.0, ans=0.1 +2024-08-26 17:45:38,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=93146.66666666667, ans=0.125 +2024-08-26 17:45:51,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=93146.66666666667, ans=0.025 +2024-08-26 17:45:54,269 INFO [train.py:1114] (1/4) Epoch 8, batch 50, loss[loss=0.2205, simple_loss=0.2716, pruned_loss=0.06251, ctc_loss=0.1112, over 19725.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3016, pruned_loss=0.06955, ctc_loss=0.131, over 845284.01 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 32.0 +2024-08-26 17:46:23,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=93360.0, ans=0.0 +2024-08-26 17:46:23,674 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.94 vs. limit=15.0 +2024-08-26 17:46:25,843 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.75 vs. limit=22.5 +2024-08-26 17:46:42,909 INFO [train.py:1114] (1/4) Epoch 8, batch 100, loss[loss=0.2296, simple_loss=0.287, pruned_loss=0.06211, ctc_loss=0.1198, over 19716.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3013, pruned_loss=0.06867, ctc_loss=0.1293, over 1500494.30 frames. ], batch size: 51, lr: 1.89e-02, grad_scale: 32.0 +2024-08-26 17:46:47,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93466.66666666667, ans=0.1 +2024-08-26 17:46:48,505 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.231e+02 1.574e+02 1.749e+02 2.053e+02 3.512e+02, threshold=3.498e+02, percent-clipped=0.0 +2024-08-26 17:47:02,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=93520.0, ans=0.0 +2024-08-26 17:47:04,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=93573.33333333333, ans=0.2 +2024-08-26 17:47:20,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=93626.66666666667, ans=0.125 +2024-08-26 17:47:21,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=93626.66666666667, ans=0.125 +2024-08-26 17:47:26,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=93680.0, ans=0.0 +2024-08-26 17:47:32,166 INFO [train.py:1114] (1/4) Epoch 8, batch 150, loss[loss=0.2076, simple_loss=0.2653, pruned_loss=0.05499, ctc_loss=0.09963, over 19719.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.2968, pruned_loss=0.06667, ctc_loss=0.125, over 2028940.94 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 32.0 +2024-08-26 17:47:41,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=93786.66666666667, ans=0.125 +2024-08-26 17:47:50,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=93840.0, ans=0.125 +2024-08-26 17:47:51,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=93840.0, ans=0.0 +2024-08-26 17:47:51,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=93840.0, ans=0.125 +2024-08-26 17:48:02,878 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.80 vs. limit=15.0 +2024-08-26 17:48:04,730 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.31 vs. limit=22.5 +2024-08-26 17:48:20,173 INFO [train.py:1114] (1/4) Epoch 8, batch 200, loss[loss=0.2403, simple_loss=0.298, pruned_loss=0.0662, ctc_loss=0.1257, over 18206.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.2944, pruned_loss=0.06531, ctc_loss=0.1221, over 2436792.66 frames. ], batch size: 85, lr: 1.88e-02, grad_scale: 32.0 +2024-08-26 17:48:25,556 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.434e+02 1.574e+02 1.787e+02 2.973e+02, threshold=3.148e+02, percent-clipped=0.0 +2024-08-26 17:48:32,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=94053.33333333333, ans=0.125 +2024-08-26 17:48:49,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=94106.66666666667, ans=0.025 +2024-08-26 17:49:12,241 INFO [train.py:1114] (1/4) Epoch 8, batch 250, loss[loss=0.2489, simple_loss=0.3051, pruned_loss=0.06943, ctc_loss=0.1344, over 19411.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.2934, pruned_loss=0.06451, ctc_loss=0.1205, over 2756344.82 frames. ], batch size: 67, lr: 1.88e-02, grad_scale: 32.0 +2024-08-26 17:49:17,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=94266.66666666667, ans=0.125 +2024-08-26 17:49:24,837 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:49:27,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=94320.0, ans=0.0 +2024-08-26 17:49:36,079 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.72 vs. limit=15.0 +2024-08-26 17:49:38,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=94373.33333333333, ans=0.2 +2024-08-26 17:49:38,971 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.52 vs. limit=15.0 +2024-08-26 17:50:02,615 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:50:03,371 INFO [train.py:1114] (1/4) Epoch 8, batch 300, loss[loss=0.2337, simple_loss=0.2984, pruned_loss=0.06138, ctc_loss=0.1158, over 19536.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.2918, pruned_loss=0.06351, ctc_loss=0.1187, over 3000549.85 frames. ], batch size: 61, lr: 1.88e-02, grad_scale: 32.0 +2024-08-26 17:50:09,198 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.273e+02 1.482e+02 1.652e+02 1.879e+02 4.693e+02, threshold=3.305e+02, percent-clipped=1.0 +2024-08-26 17:50:17,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=94586.66666666667, ans=0.0 +2024-08-26 17:50:28,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=94640.0, ans=0.125 +2024-08-26 17:50:30,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=94640.0, ans=0.025 +2024-08-26 17:50:31,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=94693.33333333333, ans=0.0 +2024-08-26 17:50:50,288 INFO [train.py:1114] (1/4) Epoch 8, batch 350, loss[loss=0.2153, simple_loss=0.2696, pruned_loss=0.05933, ctc_loss=0.1059, over 19761.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.293, pruned_loss=0.06379, ctc_loss=0.1191, over 3191442.03 frames. ], batch size: 48, lr: 1.88e-02, grad_scale: 32.0 +2024-08-26 17:51:04,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=94853.33333333333, ans=0.125 +2024-08-26 17:51:04,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=94853.33333333333, ans=0.5 +2024-08-26 17:51:10,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=94906.66666666667, ans=0.1 +2024-08-26 17:51:16,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=94906.66666666667, ans=0.0 +2024-08-26 17:51:59,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=95013.33333333333, ans=0.0 +2024-08-26 17:52:17,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=95013.33333333333, ans=0.05 +2024-08-26 17:52:19,168 INFO [train.py:1114] (1/4) Epoch 8, batch 400, loss[loss=0.2349, simple_loss=0.2929, pruned_loss=0.06506, ctc_loss=0.117, over 19491.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.2924, pruned_loss=0.06342, ctc_loss=0.1187, over 3343449.44 frames. ], batch size: 54, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:52:24,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=95066.66666666667, ans=0.2 +2024-08-26 17:52:24,625 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.326e+02 1.574e+02 1.829e+02 2.059e+02 4.627e+02, threshold=3.659e+02, percent-clipped=2.0 +2024-08-26 17:52:29,187 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.86 vs. limit=6.0 +2024-08-26 17:52:38,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=95173.33333333333, ans=0.2 +2024-08-26 17:52:39,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=95173.33333333333, ans=0.125 +2024-08-26 17:52:44,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=95173.33333333333, ans=0.0 +2024-08-26 17:52:56,744 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.93 vs. limit=15.0 +2024-08-26 17:53:05,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=95280.0, ans=0.0 +2024-08-26 17:53:08,493 INFO [train.py:1114] (1/4) Epoch 8, batch 450, loss[loss=0.241, simple_loss=0.3128, pruned_loss=0.06092, ctc_loss=0.1183, over 19623.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.2923, pruned_loss=0.06351, ctc_loss=0.1188, over 3451718.36 frames. ], batch size: 55, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:53:12,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=95333.33333333333, ans=0.125 +2024-08-26 17:53:13,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=95333.33333333333, ans=0.1 +2024-08-26 17:53:13,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=95333.33333333333, ans=0.0 +2024-08-26 17:53:14,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=95333.33333333333, ans=0.1 +2024-08-26 17:53:38,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=95493.33333333333, ans=0.125 +2024-08-26 17:53:44,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=95493.33333333333, ans=22.5 +2024-08-26 17:53:45,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=95493.33333333333, ans=0.0 +2024-08-26 17:53:53,888 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.00 vs. limit=15.0 +2024-08-26 17:53:58,072 INFO [train.py:1114] (1/4) Epoch 8, batch 500, loss[loss=0.2504, simple_loss=0.3091, pruned_loss=0.06897, ctc_loss=0.1343, over 19638.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.291, pruned_loss=0.06309, ctc_loss=0.1183, over 3547664.10 frames. ], batch size: 63, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:54:03,653 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.242e+02 1.468e+02 1.609e+02 1.778e+02 4.606e+02, threshold=3.218e+02, percent-clipped=1.0 +2024-08-26 17:54:07,242 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.49 vs. limit=15.0 +2024-08-26 17:54:07,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=95653.33333333333, ans=0.125 +2024-08-26 17:54:08,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=95653.33333333333, ans=0.0 +2024-08-26 17:54:26,160 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.73 vs. limit=15.0 +2024-08-26 17:54:42,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=95760.0, ans=0.2 +2024-08-26 17:56:20,603 INFO [train.py:1114] (1/4) Epoch 8, batch 550, loss[loss=0.2529, simple_loss=0.3077, pruned_loss=0.07144, ctc_loss=0.1378, over 19289.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.292, pruned_loss=0.06367, ctc_loss=0.1192, over 3609336.19 frames. ], batch size: 71, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:56:21,126 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.32 vs. limit=15.0 +2024-08-26 17:57:15,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=95866.66666666667, ans=0.125 +2024-08-26 17:57:15,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=95866.66666666667, ans=0.125 +2024-08-26 17:57:30,900 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=11.22 vs. limit=15.0 +2024-08-26 17:57:35,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=95973.33333333333, ans=0.125 +2024-08-26 17:57:58,502 INFO [train.py:1114] (1/4) Epoch 8, batch 600, loss[loss=0.2479, simple_loss=0.3126, pruned_loss=0.06656, ctc_loss=0.1252, over 19378.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.2923, pruned_loss=0.06378, ctc_loss=0.1192, over 3666215.50 frames. ], batch size: 67, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:57:59,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=96133.33333333333, ans=0.125 +2024-08-26 17:58:05,965 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.284e+02 1.508e+02 1.654e+02 1.896e+02 3.415e+02, threshold=3.309e+02, percent-clipped=1.0 +2024-08-26 17:58:24,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=96240.0, ans=0.0 +2024-08-26 17:58:49,400 INFO [train.py:1114] (1/4) Epoch 8, batch 650, loss[loss=0.2298, simple_loss=0.2902, pruned_loss=0.05949, ctc_loss=0.126, over 19759.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.2915, pruned_loss=0.06335, ctc_loss=0.1186, over 3716209.91 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 32.0 +2024-08-26 17:58:55,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96400.0, ans=0.1 +2024-08-26 17:59:01,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=96453.33333333333, ans=0.1 +2024-08-26 17:59:07,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=96506.66666666667, ans=0.0 +2024-08-26 17:59:13,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=96506.66666666667, ans=0.0 +2024-08-26 17:59:16,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=96560.0, ans=0.025 +2024-08-26 17:59:28,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=96613.33333333333, ans=0.0 +2024-08-26 17:59:33,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=96613.33333333333, ans=0.125 +2024-08-26 17:59:34,688 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.63 vs. limit=15.0 +2024-08-26 17:59:36,090 INFO [train.py:1114] (1/4) Epoch 8, batch 700, loss[loss=0.2159, simple_loss=0.2761, pruned_loss=0.05636, ctc_loss=0.1074, over 19719.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.2921, pruned_loss=0.06358, ctc_loss=0.1191, over 3749112.39 frames. ], batch size: 51, lr: 1.86e-02, grad_scale: 32.0 +2024-08-26 17:59:39,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=96666.66666666667, ans=0.5 +2024-08-26 17:59:41,809 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.199e+02 1.481e+02 1.644e+02 1.817e+02 3.294e+02, threshold=3.287e+02, percent-clipped=0.0 +2024-08-26 17:59:47,999 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.27 vs. limit=12.0 +2024-08-26 18:00:27,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=96933.33333333333, ans=0.04949747468305833 +2024-08-26 18:00:27,685 INFO [train.py:1114] (1/4) Epoch 8, batch 750, loss[loss=0.2438, simple_loss=0.3061, pruned_loss=0.06629, ctc_loss=0.122, over 19493.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.2919, pruned_loss=0.06364, ctc_loss=0.1191, over 3774857.95 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 32.0 +2024-08-26 18:00:29,912 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.37 vs. limit=15.0 +2024-08-26 18:00:31,922 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.70 vs. limit=10.0 +2024-08-26 18:00:41,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=96986.66666666667, ans=0.125 +2024-08-26 18:00:44,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.whiten.whitening_limit, batch_count=96986.66666666667, ans=15.0 +2024-08-26 18:00:45,651 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.77 vs. limit=15.0 +2024-08-26 18:00:55,467 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.88 vs. limit=6.0 +2024-08-26 18:01:19,063 INFO [train.py:1114] (1/4) Epoch 8, batch 800, loss[loss=0.205, simple_loss=0.2641, pruned_loss=0.0537, ctc_loss=0.09616, over 19804.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.292, pruned_loss=0.06365, ctc_loss=0.119, over 3796328.98 frames. ], batch size: 49, lr: 1.86e-02, grad_scale: 32.0 +2024-08-26 18:01:24,569 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.172e+02 1.524e+02 1.729e+02 2.039e+02 3.596e+02, threshold=3.457e+02, percent-clipped=1.0 +2024-08-26 18:01:24,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=97200.0, ans=0.0 +2024-08-26 18:01:26,024 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.66 vs. limit=6.0 +2024-08-26 18:01:37,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=97306.66666666667, ans=0.2 +2024-08-26 18:01:42,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=97306.66666666667, ans=0.1 +2024-08-26 18:01:45,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=97306.66666666667, ans=0.125 +2024-08-26 18:01:53,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=97360.0, ans=0.025 +2024-08-26 18:02:06,303 INFO [train.py:1114] (1/4) Epoch 8, batch 850, loss[loss=0.2635, simple_loss=0.3145, pruned_loss=0.07613, ctc_loss=0.1507, over 19646.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.2918, pruned_loss=0.06347, ctc_loss=0.1185, over 3815639.42 frames. ], batch size: 59, lr: 1.85e-02, grad_scale: 32.0 +2024-08-26 18:02:06,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=97466.66666666667, ans=0.2 +2024-08-26 18:02:10,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=97466.66666666667, ans=0.125 +2024-08-26 18:02:11,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.68 vs. limit=6.0 +2024-08-26 18:02:11,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=97466.66666666667, ans=0.125 +2024-08-26 18:02:21,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.84 vs. limit=15.0 +2024-08-26 18:02:22,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=97520.0, ans=0.125 +2024-08-26 18:02:23,105 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:02:33,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=97626.66666666667, ans=0.0 +2024-08-26 18:02:33,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=97626.66666666667, ans=0.5 +2024-08-26 18:02:39,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=97626.66666666667, ans=0.125 +2024-08-26 18:02:53,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=97680.0, ans=0.125 +2024-08-26 18:02:57,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=97733.33333333333, ans=0.0 +2024-08-26 18:02:58,337 INFO [train.py:1114] (1/4) Epoch 8, batch 900, loss[loss=0.2188, simple_loss=0.2759, pruned_loss=0.05863, ctc_loss=0.1108, over 19428.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.2923, pruned_loss=0.06379, ctc_loss=0.119, over 3820034.18 frames. ], batch size: 48, lr: 1.85e-02, grad_scale: 32.0 +2024-08-26 18:03:02,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=97733.33333333333, ans=0.125 +2024-08-26 18:03:03,993 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.312e+02 1.578e+02 1.704e+02 2.106e+02 3.434e+02, threshold=3.409e+02, percent-clipped=0.0 +2024-08-26 18:03:06,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=97733.33333333333, ans=0.125 +2024-08-26 18:03:42,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=97946.66666666667, ans=0.125 +2024-08-26 18:03:45,507 INFO [train.py:1114] (1/4) Epoch 8, batch 950, loss[loss=0.2052, simple_loss=0.2638, pruned_loss=0.05438, ctc_loss=0.09453, over 19506.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.2927, pruned_loss=0.06407, ctc_loss=0.1197, over 3821119.37 frames. ], batch size: 49, lr: 1.85e-02, grad_scale: 16.0 +2024-08-26 18:03:51,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98000.0, ans=0.1 +2024-08-26 18:04:07,400 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.05 vs. limit=15.0 +2024-08-26 18:04:26,964 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.97 vs. limit=15.0 +2024-08-26 18:04:37,644 INFO [train.py:1114] (1/4) Epoch 8, batch 1000, loss[loss=0.2121, simple_loss=0.2737, pruned_loss=0.05451, ctc_loss=0.1036, over 19838.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.2936, pruned_loss=0.06428, ctc_loss=0.1203, over 3817448.78 frames. ], batch size: 52, lr: 1.85e-02, grad_scale: 16.0 +2024-08-26 18:04:44,376 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.225e+02 1.497e+02 1.652e+02 1.874e+02 4.992e+02, threshold=3.305e+02, percent-clipped=2.0 +2024-08-26 18:04:45,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=98266.66666666667, ans=0.125 +2024-08-26 18:04:49,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=98320.0, ans=0.0 +2024-08-26 18:04:50,431 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:04:54,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98320.0, ans=0.1 +2024-08-26 18:04:58,375 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.17 vs. limit=22.5 +2024-08-26 18:05:01,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=98373.33333333333, ans=0.125 +2024-08-26 18:05:09,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=98426.66666666667, ans=0.125 +2024-08-26 18:05:10,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.21 vs. limit=22.5 +2024-08-26 18:05:15,986 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.62 vs. limit=22.5 +2024-08-26 18:05:20,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=98480.0, ans=0.0 +2024-08-26 18:05:22,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=98480.0, ans=0.0 +2024-08-26 18:05:24,678 INFO [train.py:1114] (1/4) Epoch 8, batch 1050, loss[loss=0.2367, simple_loss=0.299, pruned_loss=0.06408, ctc_loss=0.1155, over 19833.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.2928, pruned_loss=0.06406, ctc_loss=0.1198, over 3823813.01 frames. ], batch size: 57, lr: 1.85e-02, grad_scale: 16.0 +2024-08-26 18:05:36,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=98586.66666666667, ans=0.5 +2024-08-26 18:05:48,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98586.66666666667, ans=0.1 +2024-08-26 18:06:02,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=98693.33333333333, ans=0.1 +2024-08-26 18:06:03,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=98693.33333333333, ans=0.0 +2024-08-26 18:06:05,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=98693.33333333333, ans=0.025 +2024-08-26 18:06:07,675 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:06:18,145 INFO [train.py:1114] (1/4) Epoch 8, batch 1100, loss[loss=0.2166, simple_loss=0.2776, pruned_loss=0.05648, ctc_loss=0.1067, over 19576.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.2922, pruned_loss=0.0635, ctc_loss=0.1188, over 3831686.92 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 16.0 +2024-08-26 18:06:24,661 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.469e+02 1.560e+02 1.744e+02 3.443e+02, threshold=3.121e+02, percent-clipped=2.0 +2024-08-26 18:06:28,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=98853.33333333333, ans=0.125 +2024-08-26 18:06:36,477 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.21 vs. limit=10.0 +2024-08-26 18:06:37,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=98906.66666666667, ans=0.1 +2024-08-26 18:06:44,053 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.05 vs. limit=15.0 +2024-08-26 18:06:46,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=98906.66666666667, ans=0.125 +2024-08-26 18:06:49,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=98960.0, ans=0.125 +2024-08-26 18:07:00,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99013.33333333333, ans=0.1 +2024-08-26 18:07:10,068 INFO [train.py:1114] (1/4) Epoch 8, batch 1150, loss[loss=0.2302, simple_loss=0.2865, pruned_loss=0.06339, ctc_loss=0.1178, over 19597.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.2926, pruned_loss=0.06368, ctc_loss=0.1192, over 3830909.21 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 16.0 +2024-08-26 18:07:12,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=99066.66666666667, ans=0.125 +2024-08-26 18:07:17,406 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.36 vs. limit=15.0 +2024-08-26 18:07:32,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=99173.33333333333, ans=0.125 +2024-08-26 18:07:35,406 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.75 vs. limit=15.0 +2024-08-26 18:07:39,267 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.23 vs. limit=22.5 +2024-08-26 18:07:49,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99280.0, ans=0.1 +2024-08-26 18:07:49,423 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.92 vs. limit=12.0 +2024-08-26 18:07:51,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=99280.0, ans=0.025 +2024-08-26 18:07:51,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.84 vs. limit=15.0 +2024-08-26 18:07:57,682 INFO [train.py:1114] (1/4) Epoch 8, batch 1200, loss[loss=0.2306, simple_loss=0.2989, pruned_loss=0.05912, ctc_loss=0.1098, over 19842.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.2933, pruned_loss=0.06395, ctc_loss=0.1197, over 3824781.15 frames. ], batch size: 57, lr: 1.84e-02, grad_scale: 32.0 +2024-08-26 18:08:04,249 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.256e+02 1.491e+02 1.608e+02 2.003e+02 2.840e+02, threshold=3.216e+02, percent-clipped=0.0 +2024-08-26 18:08:08,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=99386.66666666667, ans=0.025 +2024-08-26 18:08:39,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=99546.66666666667, ans=0.0 +2024-08-26 18:08:41,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=99546.66666666667, ans=0.05 +2024-08-26 18:08:47,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=99546.66666666667, ans=0.0 +2024-08-26 18:08:49,186 INFO [train.py:1114] (1/4) Epoch 8, batch 1250, loss[loss=0.2432, simple_loss=0.3071, pruned_loss=0.06589, ctc_loss=0.1187, over 19526.00 frames. ], tot_loss[loss=0.234, simple_loss=0.2935, pruned_loss=0.06351, ctc_loss=0.1189, over 3843129.06 frames. ], batch size: 61, lr: 1.84e-02, grad_scale: 32.0 +2024-08-26 18:08:49,762 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.81 vs. limit=22.5 +2024-08-26 18:08:56,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=99600.0, ans=0.0 +2024-08-26 18:09:01,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=99653.33333333333, ans=0.025 +2024-08-26 18:09:12,378 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.70 vs. limit=15.0 +2024-08-26 18:09:14,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=99706.66666666667, ans=0.0 +2024-08-26 18:09:40,598 INFO [train.py:1114] (1/4) Epoch 8, batch 1300, loss[loss=0.2537, simple_loss=0.3031, pruned_loss=0.07361, ctc_loss=0.1428, over 18890.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.2917, pruned_loss=0.06259, ctc_loss=0.1174, over 3846658.49 frames. ], batch size: 76, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:09:42,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=99866.66666666667, ans=0.125 +2024-08-26 18:09:47,133 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.481e+02 1.661e+02 1.866e+02 3.142e+02, threshold=3.323e+02, percent-clipped=0.0 +2024-08-26 18:10:03,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=99973.33333333333, ans=0.0 +2024-08-26 18:10:26,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=100133.33333333333, ans=0.2 +2024-08-26 18:10:27,282 INFO [train.py:1114] (1/4) Epoch 8, batch 1350, loss[loss=0.2239, simple_loss=0.2939, pruned_loss=0.05581, ctc_loss=0.106, over 19792.00 frames. ], tot_loss[loss=0.232, simple_loss=0.2916, pruned_loss=0.06269, ctc_loss=0.1173, over 3858031.17 frames. ], batch size: 54, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:10:28,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=100133.33333333333, ans=0.04949747468305833 +2024-08-26 18:10:47,430 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.34 vs. limit=22.5 +2024-08-26 18:10:53,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=100240.0, ans=0.125 +2024-08-26 18:10:54,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=100293.33333333333, ans=0.125 +2024-08-26 18:11:02,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100293.33333333333, ans=0.1 +2024-08-26 18:11:03,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=100293.33333333333, ans=0.95 +2024-08-26 18:11:07,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=100346.66666666667, ans=0.2 +2024-08-26 18:11:14,665 INFO [train.py:1114] (1/4) Epoch 8, batch 1400, loss[loss=0.2046, simple_loss=0.2624, pruned_loss=0.05417, ctc_loss=0.09603, over 19658.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.2918, pruned_loss=0.06292, ctc_loss=0.1175, over 3865207.46 frames. ], batch size: 46, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:11:23,741 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.577e+02 1.859e+02 2.331e+02 3.237e+02, threshold=3.718e+02, percent-clipped=0.0 +2024-08-26 18:11:32,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=100453.33333333333, ans=0.5 +2024-08-26 18:11:36,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=100453.33333333333, ans=0.2 +2024-08-26 18:11:51,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=100560.0, ans=0.125 +2024-08-26 18:11:55,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=100560.0, ans=0.125 +2024-08-26 18:12:09,362 INFO [train.py:1114] (1/4) Epoch 8, batch 1450, loss[loss=0.2619, simple_loss=0.3127, pruned_loss=0.07759, ctc_loss=0.1398, over 19693.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.2927, pruned_loss=0.0634, ctc_loss=0.1184, over 3863108.04 frames. ], batch size: 63, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:12:35,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=100773.33333333333, ans=0.0 +2024-08-26 18:12:39,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100773.33333333333, ans=0.125 +2024-08-26 18:13:00,711 INFO [train.py:1114] (1/4) Epoch 8, batch 1500, loss[loss=0.2478, simple_loss=0.305, pruned_loss=0.06925, ctc_loss=0.1304, over 19590.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.2927, pruned_loss=0.06327, ctc_loss=0.1183, over 3862434.42 frames. ], batch size: 57, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:13:07,545 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.450e+02 1.594e+02 1.806e+02 5.150e+02, threshold=3.189e+02, percent-clipped=1.0 +2024-08-26 18:13:08,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100933.33333333333, ans=0.125 +2024-08-26 18:13:23,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=101040.0, ans=0.025 +2024-08-26 18:13:48,297 INFO [train.py:1114] (1/4) Epoch 8, batch 1550, loss[loss=0.251, simple_loss=0.3083, pruned_loss=0.07087, ctc_loss=0.1297, over 19627.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.2931, pruned_loss=0.06379, ctc_loss=0.119, over 3846907.76 frames. ], batch size: 60, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:13:55,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=101200.0, ans=0.09899494936611666 +2024-08-26 18:14:03,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=101253.33333333333, ans=0.2 +2024-08-26 18:14:40,869 INFO [train.py:1114] (1/4) Epoch 8, batch 1600, loss[loss=0.2466, simple_loss=0.3076, pruned_loss=0.06721, ctc_loss=0.1277, over 19828.00 frames. ], tot_loss[loss=0.234, simple_loss=0.2929, pruned_loss=0.06376, ctc_loss=0.1192, over 3836701.43 frames. ], batch size: 57, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:14:41,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=101466.66666666667, ans=0.0 +2024-08-26 18:14:42,236 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.73 vs. limit=15.0 +2024-08-26 18:14:47,302 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.562e+02 1.716e+02 2.059e+02 3.797e+02, threshold=3.431e+02, percent-clipped=2.0 +2024-08-26 18:14:53,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=101520.0, ans=0.2 +2024-08-26 18:15:05,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=101573.33333333333, ans=0.0 +2024-08-26 18:15:19,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=101626.66666666667, ans=0.125 +2024-08-26 18:15:32,087 INFO [train.py:1114] (1/4) Epoch 8, batch 1650, loss[loss=0.2296, simple_loss=0.2988, pruned_loss=0.05835, ctc_loss=0.1094, over 19665.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.2925, pruned_loss=0.06365, ctc_loss=0.119, over 3832380.28 frames. ], batch size: 59, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:15:36,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=101733.33333333333, ans=0.0 +2024-08-26 18:15:47,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=101786.66666666667, ans=0.0 +2024-08-26 18:16:01,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=101893.33333333333, ans=0.125 +2024-08-26 18:16:17,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=101946.66666666667, ans=0.0 +2024-08-26 18:16:18,705 INFO [train.py:1114] (1/4) Epoch 8, batch 1700, loss[loss=0.1964, simple_loss=0.2496, pruned_loss=0.05175, ctc_loss=0.09923, over 19678.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.2918, pruned_loss=0.06298, ctc_loss=0.1176, over 3846502.58 frames. ], batch size: 46, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:16:21,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=102000.0, ans=0.125 +2024-08-26 18:16:24,732 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:16:25,301 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.242e+02 1.495e+02 1.737e+02 2.089e+02 3.401e+02, threshold=3.475e+02, percent-clipped=0.0 +2024-08-26 18:16:31,364 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.50 vs. limit=22.5 +2024-08-26 18:16:34,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=102053.33333333333, ans=0.125 +2024-08-26 18:16:45,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=102160.0, ans=0.0 +2024-08-26 18:17:03,797 INFO [train.py:1114] (1/4) Epoch 8, batch 1750, loss[loss=0.1834, simple_loss=0.2442, pruned_loss=0.04535, ctc_loss=0.07969, over 19665.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.2912, pruned_loss=0.06258, ctc_loss=0.117, over 3850806.49 frames. ], batch size: 45, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:17:25,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=102373.33333333333, ans=0.125 +2024-08-26 18:17:39,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=102480.0, ans=0.125 +2024-08-26 18:17:46,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=102480.0, ans=0.125 +2024-08-26 18:17:48,504 INFO [train.py:1114] (1/4) Epoch 8, batch 1800, loss[loss=0.2334, simple_loss=0.2995, pruned_loss=0.06049, ctc_loss=0.1158, over 19613.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.2913, pruned_loss=0.06245, ctc_loss=0.1167, over 3852697.43 frames. ], batch size: 55, lr: 1.81e-02, grad_scale: 32.0 +2024-08-26 18:17:56,850 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.517e+02 1.665e+02 1.949e+02 3.105e+02, threshold=3.330e+02, percent-clipped=0.0 +2024-08-26 18:18:16,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102640.0, ans=0.1 +2024-08-26 18:18:25,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=102693.33333333333, ans=0.125 +2024-08-26 18:18:36,732 INFO [train.py:1114] (1/4) Epoch 8, batch 1850, loss[loss=0.2392, simple_loss=0.3057, pruned_loss=0.0628, ctc_loss=0.118, over 19586.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.2911, pruned_loss=0.06237, ctc_loss=0.1165, over 3856138.23 frames. ], batch size: 57, lr: 1.81e-02, grad_scale: 32.0 +2024-08-26 18:18:36,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=102800.0, ans=0.125 +2024-08-26 18:18:39,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=102800.0, ans=0.1 +2024-08-26 18:18:57,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=102906.66666666667, ans=0.125 +2024-08-26 18:18:59,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=102906.66666666667, ans=0.0 +2024-08-26 18:19:00,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=102906.66666666667, ans=0.2 +2024-08-26 18:19:01,204 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.58 vs. limit=15.0 +2024-08-26 18:19:14,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=103013.33333333333, ans=0.025 +2024-08-26 18:19:21,232 INFO [train.py:1114] (1/4) Epoch 8, batch 1900, loss[loss=0.2349, simple_loss=0.3052, pruned_loss=0.06033, ctc_loss=0.1101, over 19658.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.2914, pruned_loss=0.0624, ctc_loss=0.1166, over 3860755.12 frames. ], batch size: 59, lr: 1.81e-02, grad_scale: 16.0 +2024-08-26 18:19:22,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=103066.66666666667, ans=0.0 +2024-08-26 18:19:28,168 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.277e+02 1.533e+02 1.714e+02 2.014e+02 3.062e+02, threshold=3.427e+02, percent-clipped=0.0 +2024-08-26 18:19:36,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=103120.0, ans=0.125 +2024-08-26 18:19:40,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=103173.33333333333, ans=0.125 +2024-08-26 18:19:42,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=103173.33333333333, ans=0.125 +2024-08-26 18:19:48,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=103226.66666666667, ans=0.0 +2024-08-26 18:19:49,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=103226.66666666667, ans=0.125 +2024-08-26 18:19:52,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=103226.66666666667, ans=0.125 +2024-08-26 18:20:01,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=103280.0, ans=0.125 +2024-08-26 18:20:04,890 INFO [train.py:1114] (1/4) Epoch 8, batch 1950, loss[loss=0.2295, simple_loss=0.2859, pruned_loss=0.06356, ctc_loss=0.1149, over 19599.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.2929, pruned_loss=0.0629, ctc_loss=0.1175, over 3870260.74 frames. ], batch size: 52, lr: 1.81e-02, grad_scale: 16.0 +2024-08-26 18:20:10,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=103333.33333333333, ans=0.125 +2024-08-26 18:20:15,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=103386.66666666667, ans=0.0 +2024-08-26 18:20:16,287 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.44 vs. limit=15.0 +2024-08-26 18:20:29,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=103440.0, ans=0.125 +2024-08-26 18:20:32,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=103493.33333333333, ans=0.2 +2024-08-26 18:20:33,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=103493.33333333333, ans=0.025 +2024-08-26 18:20:44,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=103546.66666666667, ans=0.125 +2024-08-26 18:20:51,118 INFO [train.py:1114] (1/4) Epoch 8, batch 2000, loss[loss=0.207, simple_loss=0.2587, pruned_loss=0.05605, ctc_loss=0.1077, over 19638.00 frames. ], tot_loss[loss=0.234, simple_loss=0.2935, pruned_loss=0.06349, ctc_loss=0.1185, over 3855069.17 frames. ], batch size: 45, lr: 1.81e-02, grad_scale: 16.0 +2024-08-26 18:20:54,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=103600.0, ans=0.125 +2024-08-26 18:21:00,305 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.277e+02 1.619e+02 1.835e+02 2.136e+02 5.632e+02, threshold=3.670e+02, percent-clipped=2.0 +2024-08-26 18:21:05,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=103653.33333333333, ans=0.0 +2024-08-26 18:21:13,081 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.93 vs. limit=22.5 +2024-08-26 18:21:24,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103760.0, ans=0.1 +2024-08-26 18:21:28,535 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.36 vs. limit=22.5 +2024-08-26 18:21:32,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=103813.33333333333, ans=0.0 +2024-08-26 18:21:36,066 INFO [train.py:1114] (1/4) Epoch 8, batch 2050, loss[loss=0.2161, simple_loss=0.2769, pruned_loss=0.05736, ctc_loss=0.1015, over 19708.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.2923, pruned_loss=0.06308, ctc_loss=0.1176, over 3850588.02 frames. ], batch size: 47, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:21:37,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=103866.66666666667, ans=0.0 +2024-08-26 18:21:38,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103866.66666666667, ans=0.1 +2024-08-26 18:21:46,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=103920.0, ans=0.2 +2024-08-26 18:21:57,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=103973.33333333333, ans=10.0 +2024-08-26 18:22:04,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=104026.66666666667, ans=0.125 +2024-08-26 18:22:06,185 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.14 vs. limit=15.0 +2024-08-26 18:22:07,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=104026.66666666667, ans=0.0 +2024-08-26 18:22:09,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=104026.66666666667, ans=0.0 +2024-08-26 18:22:10,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=104080.0, ans=0.2 +2024-08-26 18:22:13,960 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.99 vs. limit=15.0 +2024-08-26 18:22:19,591 INFO [train.py:1114] (1/4) Epoch 8, batch 2100, loss[loss=0.2204, simple_loss=0.2832, pruned_loss=0.057, ctc_loss=0.1089, over 19768.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.2912, pruned_loss=0.06238, ctc_loss=0.1163, over 3856941.52 frames. ], batch size: 54, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:22:24,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=104133.33333333333, ans=0.0 +2024-08-26 18:22:27,466 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.502e+02 1.673e+02 2.007e+02 2.886e+02, threshold=3.346e+02, percent-clipped=0.0 +2024-08-26 18:22:28,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=104186.66666666667, ans=0.07 +2024-08-26 18:22:31,563 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.36 vs. limit=15.0 +2024-08-26 18:22:35,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=104186.66666666667, ans=0.07 +2024-08-26 18:22:42,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.19 vs. limit=10.0 +2024-08-26 18:22:45,204 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.41 vs. limit=10.0 +2024-08-26 18:22:47,940 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.99 vs. limit=22.5 +2024-08-26 18:22:49,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=104293.33333333333, ans=0.0 +2024-08-26 18:22:55,561 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.44 vs. limit=10.0 +2024-08-26 18:22:57,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104346.66666666667, ans=0.1 +2024-08-26 18:22:58,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=104346.66666666667, ans=0.125 +2024-08-26 18:23:02,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=104400.0, ans=0.0 +2024-08-26 18:23:03,052 INFO [train.py:1114] (1/4) Epoch 8, batch 2150, loss[loss=0.2203, simple_loss=0.2826, pruned_loss=0.05685, ctc_loss=0.1106, over 19573.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2903, pruned_loss=0.06188, ctc_loss=0.1155, over 3867606.77 frames. ], batch size: 52, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:23:11,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=104453.33333333333, ans=0.125 +2024-08-26 18:23:30,767 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.23 vs. limit=6.0 +2024-08-26 18:23:37,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=104613.33333333333, ans=0.02 +2024-08-26 18:23:43,720 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.94 vs. limit=15.0 +2024-08-26 18:23:46,683 INFO [train.py:1114] (1/4) Epoch 8, batch 2200, loss[loss=0.2395, simple_loss=0.2999, pruned_loss=0.06553, ctc_loss=0.12, over 19584.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.29, pruned_loss=0.06171, ctc_loss=0.1154, over 3866375.89 frames. ], batch size: 57, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:23:54,542 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.275e+02 1.596e+02 1.839e+02 2.214e+02 3.376e+02, threshold=3.678e+02, percent-clipped=1.0 +2024-08-26 18:24:10,092 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.30 vs. limit=12.0 +2024-08-26 18:24:12,505 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.48 vs. limit=15.0 +2024-08-26 18:24:30,567 INFO [train.py:1114] (1/4) Epoch 8, batch 2250, loss[loss=0.2268, simple_loss=0.287, pruned_loss=0.06069, ctc_loss=0.1129, over 19622.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.2905, pruned_loss=0.06199, ctc_loss=0.1157, over 3866484.83 frames. ], batch size: 55, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:24:40,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=104986.66666666667, ans=0.125 +2024-08-26 18:25:02,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=105093.33333333333, ans=0.025 +2024-08-26 18:25:16,099 INFO [train.py:1114] (1/4) Epoch 8, batch 2300, loss[loss=0.2176, simple_loss=0.2751, pruned_loss=0.05904, ctc_loss=0.1053, over 19506.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.2894, pruned_loss=0.06174, ctc_loss=0.1153, over 3861007.67 frames. ], batch size: 49, lr: 1.79e-02, grad_scale: 16.0 +2024-08-26 18:25:21,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=105200.0, ans=0.125 +2024-08-26 18:25:23,759 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.553e+02 1.767e+02 2.002e+02 4.280e+02, threshold=3.534e+02, percent-clipped=3.0 +2024-08-26 18:25:24,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=105253.33333333333, ans=0.125 +2024-08-26 18:25:27,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=105253.33333333333, ans=0.0 +2024-08-26 18:25:32,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=105306.66666666667, ans=0.125 +2024-08-26 18:25:38,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=105306.66666666667, ans=0.125 +2024-08-26 18:25:41,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=105360.0, ans=0.2 +2024-08-26 18:25:49,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=105413.33333333333, ans=0.2 +2024-08-26 18:25:49,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=105413.33333333333, ans=0.125 +2024-08-26 18:25:58,622 INFO [train.py:1114] (1/4) Epoch 8, batch 2350, loss[loss=0.2517, simple_loss=0.3086, pruned_loss=0.07014, ctc_loss=0.1364, over 19670.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.2895, pruned_loss=0.06188, ctc_loss=0.1153, over 3864515.36 frames. ], batch size: 63, lr: 1.79e-02, grad_scale: 16.0 +2024-08-26 18:25:59,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=105466.66666666667, ans=15.0 +2024-08-26 18:26:15,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=105573.33333333333, ans=0.125 +2024-08-26 18:26:39,308 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.69 vs. limit=15.0 +2024-08-26 18:26:42,917 INFO [train.py:1114] (1/4) Epoch 8, batch 2400, loss[loss=0.2418, simple_loss=0.2941, pruned_loss=0.06899, ctc_loss=0.1287, over 19309.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.2917, pruned_loss=0.06273, ctc_loss=0.1166, over 3859355.61 frames. ], batch size: 71, lr: 1.79e-02, grad_scale: 32.0 +2024-08-26 18:26:43,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=105733.33333333333, ans=0.09899494936611666 +2024-08-26 18:26:49,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=105733.33333333333, ans=0.125 +2024-08-26 18:26:50,597 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.261e+02 1.526e+02 1.733e+02 1.998e+02 3.354e+02, threshold=3.467e+02, percent-clipped=0.0 +2024-08-26 18:26:56,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=105786.66666666667, ans=0.2 +2024-08-26 18:26:56,332 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.28 vs. limit=15.0 +2024-08-26 18:27:01,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=105840.0, ans=0.125 +2024-08-26 18:27:09,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105893.33333333333, ans=0.1 +2024-08-26 18:27:27,051 INFO [train.py:1114] (1/4) Epoch 8, batch 2450, loss[loss=0.312, simple_loss=0.3348, pruned_loss=0.105, ctc_loss=0.1977, over 13681.00 frames. ], tot_loss[loss=0.239, simple_loss=0.2963, pruned_loss=0.06619, ctc_loss=0.1234, over 3729889.19 frames. ], batch size: 140, lr: 1.79e-02, grad_scale: 16.0 +2024-08-26 18:27:31,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=106000.0, ans=0.0 +2024-08-26 18:27:34,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=106000.0, ans=0.1 +2024-08-26 18:27:52,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=106106.66666666667, ans=0.0 +2024-08-26 18:27:56,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=106160.0, ans=0.125 +2024-08-26 18:28:00,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106160.0, ans=0.1 +2024-08-26 18:28:47,198 INFO [train.py:1114] (1/4) Epoch 9, batch 0, loss[loss=0.2431, simple_loss=0.2925, pruned_loss=0.06991, ctc_loss=0.1346, over 19801.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.2925, pruned_loss=0.06991, ctc_loss=0.1346, over 19801.00 frames. ], batch size: 49, lr: 1.69e-02, grad_scale: 32.0 +2024-08-26 18:28:47,198 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-26 18:28:54,385 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.3972, 4.0880, 3.9306, 3.8133], device='cuda:1') +2024-08-26 18:28:56,818 INFO [train.py:1146] (1/4) Epoch 9, validation: loss=0.1927, simple_loss=0.2844, pruned_loss=0.03737, ctc_loss=0.06585, over 944034.00 frames. +2024-08-26 18:28:56,819 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12882MB +2024-08-26 18:28:59,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=106208.0, ans=0.2 +2024-08-26 18:29:11,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=106261.33333333333, ans=0.0 +2024-08-26 18:29:14,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106261.33333333333, ans=0.0 +2024-08-26 18:29:16,433 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.480e+02 1.688e+02 1.849e+02 2.025e+02 3.204e+02, threshold=3.698e+02, percent-clipped=0.0 +2024-08-26 18:29:22,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=106314.66666666667, ans=0.0 +2024-08-26 18:29:22,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=106314.66666666667, ans=0.0 +2024-08-26 18:29:23,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=106368.0, ans=0.125 +2024-08-26 18:29:26,185 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.92 vs. limit=15.0 +2024-08-26 18:29:33,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=106421.33333333333, ans=0.2 +2024-08-26 18:29:40,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=106421.33333333333, ans=0.125 +2024-08-26 18:29:43,039 INFO [train.py:1114] (1/4) Epoch 9, batch 50, loss[loss=0.1972, simple_loss=0.2603, pruned_loss=0.04966, ctc_loss=0.08676, over 19686.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.2946, pruned_loss=0.06416, ctc_loss=0.1193, over 845285.85 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 32.0 +2024-08-26 18:30:06,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=106581.33333333333, ans=0.025 +2024-08-26 18:30:10,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=106581.33333333333, ans=0.5 +2024-08-26 18:30:12,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=106634.66666666667, ans=0.125 +2024-08-26 18:30:23,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=106634.66666666667, ans=0.125 +2024-08-26 18:30:23,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=106634.66666666667, ans=0.07 +2024-08-26 18:30:39,523 INFO [train.py:1114] (1/4) Epoch 9, batch 100, loss[loss=0.2027, simple_loss=0.2678, pruned_loss=0.0497, ctc_loss=0.09549, over 19705.00 frames. ], tot_loss[loss=0.234, simple_loss=0.294, pruned_loss=0.06336, ctc_loss=0.1179, over 1499557.75 frames. ], batch size: 51, lr: 1.69e-02, grad_scale: 32.0 +2024-08-26 18:30:47,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=106741.33333333333, ans=0.0 +2024-08-26 18:30:58,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=106794.66666666667, ans=0.125 +2024-08-26 18:31:02,332 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.216e+02 1.554e+02 1.735e+02 2.126e+02 3.416e+02, threshold=3.470e+02, percent-clipped=0.0 +2024-08-26 18:31:06,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=106848.0, ans=0.125 +2024-08-26 18:31:07,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106848.0, ans=0.125 +2024-08-26 18:31:28,286 INFO [train.py:1114] (1/4) Epoch 9, batch 150, loss[loss=0.2078, simple_loss=0.2683, pruned_loss=0.05295, ctc_loss=0.1032, over 19740.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2908, pruned_loss=0.06171, ctc_loss=0.115, over 2028994.73 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 16.0 +2024-08-26 18:31:30,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=107008.0, ans=0.025 +2024-08-26 18:31:35,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=107008.0, ans=0.5 +2024-08-26 18:31:45,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=107114.66666666667, ans=0.95 +2024-08-26 18:32:07,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=107221.33333333333, ans=0.0 +2024-08-26 18:32:14,109 INFO [train.py:1114] (1/4) Epoch 9, batch 200, loss[loss=0.2568, simple_loss=0.3086, pruned_loss=0.07495, ctc_loss=0.1376, over 18126.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.2896, pruned_loss=0.06117, ctc_loss=0.1141, over 2436455.84 frames. ], batch size: 85, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:32:20,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=107274.66666666667, ans=0.125 +2024-08-26 18:32:24,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=107328.0, ans=0.025 +2024-08-26 18:32:25,088 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:32:30,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=107328.0, ans=0.125 +2024-08-26 18:32:36,043 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.227e+02 1.442e+02 1.571e+02 1.787e+02 2.800e+02, threshold=3.143e+02, percent-clipped=0.0 +2024-08-26 18:32:37,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=107381.33333333333, ans=0.125 +2024-08-26 18:32:42,071 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.55 vs. limit=12.0 +2024-08-26 18:32:42,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=107434.66666666667, ans=0.0 +2024-08-26 18:32:46,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=107434.66666666667, ans=0.125 +2024-08-26 18:33:01,989 INFO [train.py:1114] (1/4) Epoch 9, batch 250, loss[loss=0.2434, simple_loss=0.2959, pruned_loss=0.07039, ctc_loss=0.1253, over 19416.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.2889, pruned_loss=0.06047, ctc_loss=0.1129, over 2756445.89 frames. ], batch size: 67, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:33:03,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=107541.33333333333, ans=0.0 +2024-08-26 18:33:07,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107541.33333333333, ans=0.1 +2024-08-26 18:33:09,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=107541.33333333333, ans=0.125 +2024-08-26 18:33:14,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=107594.66666666667, ans=0.0 +2024-08-26 18:33:33,124 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.92 vs. limit=6.0 +2024-08-26 18:33:44,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=107701.33333333333, ans=0.0 +2024-08-26 18:33:57,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=107754.66666666667, ans=0.0 +2024-08-26 18:33:59,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=107754.66666666667, ans=0.07 +2024-08-26 18:34:01,006 INFO [train.py:1114] (1/4) Epoch 9, batch 300, loss[loss=0.2489, simple_loss=0.3111, pruned_loss=0.06816, ctc_loss=0.1258, over 19512.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.289, pruned_loss=0.06041, ctc_loss=0.1129, over 3000901.66 frames. ], batch size: 61, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:34:05,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=107808.0, ans=0.0 +2024-08-26 18:34:23,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=107914.66666666667, ans=0.125 +2024-08-26 18:34:24,464 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.264e+02 1.498e+02 1.681e+02 1.999e+02 2.633e+02, threshold=3.363e+02, percent-clipped=0.0 +2024-08-26 18:34:25,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=107914.66666666667, ans=0.125 +2024-08-26 18:34:33,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=107968.0, ans=0.0 +2024-08-26 18:34:38,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=107968.0, ans=0.125 +2024-08-26 18:34:38,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=107968.0, ans=0.125 +2024-08-26 18:34:39,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107968.0, ans=0.1 +2024-08-26 18:34:40,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=108021.33333333333, ans=0.025 +2024-08-26 18:34:40,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108021.33333333333, ans=0.1 +2024-08-26 18:34:50,540 INFO [train.py:1114] (1/4) Epoch 9, batch 350, loss[loss=0.2138, simple_loss=0.2647, pruned_loss=0.0581, ctc_loss=0.117, over 19739.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.2892, pruned_loss=0.06045, ctc_loss=0.1131, over 3191307.58 frames. ], batch size: 48, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:34:52,683 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:35:01,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=108128.0, ans=0.0 +2024-08-26 18:35:18,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=108181.33333333333, ans=0.125 +2024-08-26 18:35:40,767 INFO [train.py:1114] (1/4) Epoch 9, batch 400, loss[loss=0.2146, simple_loss=0.2869, pruned_loss=0.05096, ctc_loss=0.1007, over 19488.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.2888, pruned_loss=0.06032, ctc_loss=0.1129, over 3343094.27 frames. ], batch size: 54, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:35:41,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=108341.33333333333, ans=6.0 +2024-08-26 18:35:53,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=108394.66666666667, ans=0.025 +2024-08-26 18:36:02,023 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.232e+02 1.489e+02 1.712e+02 1.995e+02 4.778e+02, threshold=3.424e+02, percent-clipped=1.0 +2024-08-26 18:36:24,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=108554.66666666667, ans=0.0 +2024-08-26 18:36:30,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=108554.66666666667, ans=0.05 +2024-08-26 18:36:32,713 INFO [train.py:1114] (1/4) Epoch 9, batch 450, loss[loss=0.2103, simple_loss=0.2834, pruned_loss=0.0496, ctc_loss=0.09519, over 19607.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.2889, pruned_loss=0.06039, ctc_loss=0.113, over 3449872.69 frames. ], batch size: 55, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:37:01,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108714.66666666667, ans=0.1 +2024-08-26 18:37:21,531 INFO [train.py:1114] (1/4) Epoch 9, batch 500, loss[loss=0.2367, simple_loss=0.3031, pruned_loss=0.06229, ctc_loss=0.1141, over 19651.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.2877, pruned_loss=0.05973, ctc_loss=0.1116, over 3546527.31 frames. ], batch size: 63, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:37:22,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=108874.66666666667, ans=0.125 +2024-08-26 18:37:23,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=108874.66666666667, ans=0.125 +2024-08-26 18:37:24,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108874.66666666667, ans=0.0 +2024-08-26 18:37:42,868 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.242e+02 1.480e+02 1.660e+02 1.957e+02 3.087e+02, threshold=3.320e+02, percent-clipped=0.0 +2024-08-26 18:38:02,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=109088.0, ans=0.125 +2024-08-26 18:38:07,948 INFO [train.py:1114] (1/4) Epoch 9, batch 550, loss[loss=0.2584, simple_loss=0.3182, pruned_loss=0.07214, ctc_loss=0.1356, over 19302.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.2877, pruned_loss=0.05958, ctc_loss=0.1114, over 3609589.52 frames. ], batch size: 71, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:38:09,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=109141.33333333333, ans=0.0 +2024-08-26 18:38:11,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=109141.33333333333, ans=0.125 +2024-08-26 18:38:12,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=109141.33333333333, ans=0.2 +2024-08-26 18:38:44,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=109301.33333333333, ans=0.125 +2024-08-26 18:38:54,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109354.66666666667, ans=0.1 +2024-08-26 18:38:55,937 INFO [train.py:1114] (1/4) Epoch 9, batch 600, loss[loss=0.2215, simple_loss=0.2887, pruned_loss=0.05606, ctc_loss=0.1055, over 19431.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.2876, pruned_loss=0.05955, ctc_loss=0.1113, over 3666994.55 frames. ], batch size: 67, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:39:06,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=109408.0, ans=0.025 +2024-08-26 18:39:18,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=109514.66666666667, ans=0.2 +2024-08-26 18:39:21,964 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.261e+02 1.496e+02 1.658e+02 1.980e+02 4.382e+02, threshold=3.316e+02, percent-clipped=1.0 +2024-08-26 18:39:31,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=109568.0, ans=0.0 +2024-08-26 18:39:32,352 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=6.24 vs. limit=12.0 +2024-08-26 18:39:36,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=109568.0, ans=0.0 +2024-08-26 18:39:38,533 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.61 vs. limit=15.0 +2024-08-26 18:39:40,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=109621.33333333333, ans=0.0 +2024-08-26 18:39:45,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=109621.33333333333, ans=0.125 +2024-08-26 18:39:46,060 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.00 vs. limit=15.0 +2024-08-26 18:39:49,350 INFO [train.py:1114] (1/4) Epoch 9, batch 650, loss[loss=0.215, simple_loss=0.2826, pruned_loss=0.05408, ctc_loss=0.09806, over 19762.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.2873, pruned_loss=0.05953, ctc_loss=0.1112, over 3717055.50 frames. ], batch size: 54, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:39:56,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=109674.66666666667, ans=0.125 +2024-08-26 18:39:57,207 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.96 vs. limit=22.5 +2024-08-26 18:40:32,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109888.0, ans=0.1 +2024-08-26 18:40:40,260 INFO [train.py:1114] (1/4) Epoch 9, batch 700, loss[loss=0.2203, simple_loss=0.2844, pruned_loss=0.05627, ctc_loss=0.1088, over 19718.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.288, pruned_loss=0.05989, ctc_loss=0.1118, over 3748739.61 frames. ], batch size: 51, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:41:01,808 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.271e+02 1.503e+02 1.748e+02 2.321e+02 3.813e+02, threshold=3.497e+02, percent-clipped=1.0 +2024-08-26 18:41:04,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=110048.0, ans=0.125 +2024-08-26 18:41:11,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=110101.33333333333, ans=0.125 +2024-08-26 18:41:25,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=110154.66666666667, ans=0.0 +2024-08-26 18:41:28,644 INFO [train.py:1114] (1/4) Epoch 9, batch 750, loss[loss=0.2182, simple_loss=0.2876, pruned_loss=0.05394, ctc_loss=0.1022, over 19491.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.2871, pruned_loss=0.0596, ctc_loss=0.1111, over 3774634.43 frames. ], batch size: 54, lr: 1.66e-02, grad_scale: 16.0 +2024-08-26 18:41:32,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=110208.0, ans=0.0 +2024-08-26 18:41:33,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=110208.0, ans=0.125 +2024-08-26 18:41:41,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110261.33333333333, ans=0.1 +2024-08-26 18:41:43,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=110261.33333333333, ans=0.07 +2024-08-26 18:41:46,467 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.62 vs. limit=15.0 +2024-08-26 18:41:48,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=110314.66666666667, ans=0.0 +2024-08-26 18:41:54,978 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.41 vs. limit=15.0 +2024-08-26 18:42:12,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=110421.33333333333, ans=0.125 +2024-08-26 18:42:21,860 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.30 vs. limit=15.0 +2024-08-26 18:42:22,143 INFO [train.py:1114] (1/4) Epoch 9, batch 800, loss[loss=0.2052, simple_loss=0.2634, pruned_loss=0.05426, ctc_loss=0.09596, over 19420.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.2866, pruned_loss=0.05937, ctc_loss=0.1107, over 3795830.93 frames. ], batch size: 48, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:42:35,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=110528.0, ans=0.125 +2024-08-26 18:42:41,783 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.00 vs. limit=15.0 +2024-08-26 18:42:43,918 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.269e+02 1.427e+02 1.539e+02 1.792e+02 3.382e+02, threshold=3.078e+02, percent-clipped=0.0 +2024-08-26 18:42:45,602 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.72 vs. limit=22.5 +2024-08-26 18:43:09,193 INFO [train.py:1114] (1/4) Epoch 9, batch 850, loss[loss=0.2267, simple_loss=0.2925, pruned_loss=0.0586, ctc_loss=0.1093, over 19651.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.2868, pruned_loss=0.05951, ctc_loss=0.1109, over 3815143.33 frames. ], batch size: 59, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:43:11,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=110741.33333333333, ans=0.025 +2024-08-26 18:43:32,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=110848.0, ans=0.1 +2024-08-26 18:43:48,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=110954.66666666667, ans=0.05 +2024-08-26 18:43:55,669 INFO [train.py:1114] (1/4) Epoch 9, batch 900, loss[loss=0.189, simple_loss=0.2548, pruned_loss=0.04512, ctc_loss=0.08256, over 19821.00 frames. ], tot_loss[loss=0.225, simple_loss=0.2867, pruned_loss=0.05951, ctc_loss=0.1109, over 3820006.16 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:45:38,147 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.254e+02 1.519e+02 1.752e+02 2.077e+02 5.433e+02, threshold=3.505e+02, percent-clipped=5.0 +2024-08-26 18:45:39,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=111114.66666666667, ans=0.0 +2024-08-26 18:45:40,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=111114.66666666667, ans=0.2 +2024-08-26 18:45:59,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=111221.33333333333, ans=0.0 +2024-08-26 18:46:05,595 INFO [train.py:1114] (1/4) Epoch 9, batch 950, loss[loss=0.2165, simple_loss=0.2799, pruned_loss=0.05573, ctc_loss=0.1041, over 19494.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2864, pruned_loss=0.05948, ctc_loss=0.1109, over 3822352.50 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:46:19,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=111328.0, ans=0.125 +2024-08-26 18:46:21,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=111328.0, ans=0.2 +2024-08-26 18:46:27,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111328.0, ans=0.1 +2024-08-26 18:46:41,378 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.32 vs. limit=15.0 +2024-08-26 18:46:48,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=111488.0, ans=0.125 +2024-08-26 18:46:57,418 INFO [train.py:1114] (1/4) Epoch 9, batch 1000, loss[loss=0.2205, simple_loss=0.2845, pruned_loss=0.05695, ctc_loss=0.1066, over 19843.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.2876, pruned_loss=0.06004, ctc_loss=0.1118, over 3816887.10 frames. ], batch size: 52, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:47:08,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=111594.66666666667, ans=0.0 +2024-08-26 18:47:19,848 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.174e+02 1.461e+02 1.756e+02 2.077e+02 6.803e+02, threshold=3.513e+02, percent-clipped=1.0 +2024-08-26 18:47:29,642 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.26 vs. limit=15.0 +2024-08-26 18:47:43,914 INFO [train.py:1114] (1/4) Epoch 9, batch 1050, loss[loss=0.2207, simple_loss=0.2918, pruned_loss=0.05402, ctc_loss=0.1042, over 19826.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.2868, pruned_loss=0.05975, ctc_loss=0.1114, over 3823253.42 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 16.0 +2024-08-26 18:47:49,905 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.34 vs. limit=10.0 +2024-08-26 18:47:58,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111861.33333333333, ans=0.1 +2024-08-26 18:48:00,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=111861.33333333333, ans=0.2 +2024-08-26 18:48:04,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=111914.66666666667, ans=0.125 +2024-08-26 18:48:07,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=111914.66666666667, ans=0.2 +2024-08-26 18:48:32,530 INFO [train.py:1114] (1/4) Epoch 9, batch 1100, loss[loss=0.208, simple_loss=0.273, pruned_loss=0.05195, ctc_loss=0.09766, over 19600.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.2861, pruned_loss=0.05921, ctc_loss=0.1105, over 3830916.43 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 16.0 +2024-08-26 18:48:34,486 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:48:39,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=112074.66666666667, ans=0.125 +2024-08-26 18:48:39,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=112074.66666666667, ans=0.125 +2024-08-26 18:48:59,870 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.444e+02 1.690e+02 2.009e+02 4.396e+02, threshold=3.380e+02, percent-clipped=1.0 +2024-08-26 18:49:01,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=112181.33333333333, ans=0.125 +2024-08-26 18:49:47,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112288.0, ans=0.1 +2024-08-26 18:49:51,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=112288.0, ans=0.0 +2024-08-26 18:49:51,785 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.45 vs. limit=15.0 +2024-08-26 18:49:53,120 INFO [train.py:1114] (1/4) Epoch 9, batch 1150, loss[loss=0.202, simple_loss=0.2732, pruned_loss=0.04833, ctc_loss=0.08503, over 19561.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.2861, pruned_loss=0.05931, ctc_loss=0.1106, over 3829625.20 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 16.0 +2024-08-26 18:50:34,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=112501.33333333333, ans=0.0 +2024-08-26 18:50:54,143 INFO [train.py:1114] (1/4) Epoch 9, batch 1200, loss[loss=0.2441, simple_loss=0.3059, pruned_loss=0.06588, ctc_loss=0.1265, over 19844.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2868, pruned_loss=0.05957, ctc_loss=0.1112, over 3825238.18 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 32.0 +2024-08-26 18:50:58,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=112608.0, ans=0.125 +2024-08-26 18:51:04,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=112661.33333333333, ans=0.1 +2024-08-26 18:51:10,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=112661.33333333333, ans=0.0 +2024-08-26 18:51:15,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=112714.66666666667, ans=0.0 +2024-08-26 18:51:15,428 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.83 vs. limit=15.0 +2024-08-26 18:51:16,814 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.431e+02 1.600e+02 1.807e+02 3.201e+02, threshold=3.201e+02, percent-clipped=0.0 +2024-08-26 18:51:36,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.57 vs. limit=15.0 +2024-08-26 18:51:37,570 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:51:38,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=112821.33333333333, ans=0.125 +2024-08-26 18:51:42,796 INFO [train.py:1114] (1/4) Epoch 9, batch 1250, loss[loss=0.2455, simple_loss=0.3041, pruned_loss=0.06798, ctc_loss=0.1271, over 19502.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.2871, pruned_loss=0.05941, ctc_loss=0.1109, over 3844066.73 frames. ], batch size: 61, lr: 1.65e-02, grad_scale: 32.0 +2024-08-26 18:51:46,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112874.66666666667, ans=0.1 +2024-08-26 18:51:54,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=112928.0, ans=0.0 +2024-08-26 18:52:10,835 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.41 vs. limit=22.5 +2024-08-26 18:52:21,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=113034.66666666667, ans=0.125 +2024-08-26 18:52:23,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=113034.66666666667, ans=0.2 +2024-08-26 18:52:24,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=113088.0, ans=0.125 +2024-08-26 18:52:26,965 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.11 vs. limit=15.0 +2024-08-26 18:52:29,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=113088.0, ans=0.125 +2024-08-26 18:52:36,293 INFO [train.py:1114] (1/4) Epoch 9, batch 1300, loss[loss=0.2493, simple_loss=0.31, pruned_loss=0.06921, ctc_loss=0.1254, over 18755.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2862, pruned_loss=0.05898, ctc_loss=0.1103, over 3847285.77 frames. ], batch size: 76, lr: 1.64e-02, grad_scale: 32.0 +2024-08-26 18:52:48,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=113194.66666666667, ans=0.125 +2024-08-26 18:52:58,751 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.498e+02 1.743e+02 2.034e+02 3.430e+02, threshold=3.487e+02, percent-clipped=2.0 +2024-08-26 18:52:59,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=113248.0, ans=0.125 +2024-08-26 18:53:10,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=113301.33333333333, ans=0.1 +2024-08-26 18:53:23,259 INFO [train.py:1114] (1/4) Epoch 9, batch 1350, loss[loss=0.2303, simple_loss=0.2968, pruned_loss=0.05927, ctc_loss=0.1129, over 19776.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.286, pruned_loss=0.05877, ctc_loss=0.1097, over 3858500.78 frames. ], batch size: 54, lr: 1.64e-02, grad_scale: 32.0 +2024-08-26 18:53:26,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113408.0, ans=0.1 +2024-08-26 18:53:33,742 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:53:38,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=113461.33333333333, ans=0.025 +2024-08-26 18:53:43,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=113514.66666666667, ans=0.0 +2024-08-26 18:53:58,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=113568.0, ans=0.125 +2024-08-26 18:53:59,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=113621.33333333333, ans=0.125 +2024-08-26 18:54:09,874 INFO [train.py:1114] (1/4) Epoch 9, batch 1400, loss[loss=0.2039, simple_loss=0.2571, pruned_loss=0.05586, ctc_loss=0.09738, over 19683.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2857, pruned_loss=0.05885, ctc_loss=0.1098, over 3865413.39 frames. ], batch size: 46, lr: 1.64e-02, grad_scale: 32.0 +2024-08-26 18:54:11,398 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=4.54 vs. limit=15.0 +2024-08-26 18:54:20,445 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.65 vs. limit=15.0 +2024-08-26 18:54:28,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=113781.33333333333, ans=0.1 +2024-08-26 18:54:33,074 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.492e+02 1.644e+02 1.948e+02 2.802e+02, threshold=3.287e+02, percent-clipped=0.0 +2024-08-26 18:54:33,552 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.19 vs. limit=10.0 +2024-08-26 18:54:46,620 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.19 vs. limit=12.0 +2024-08-26 18:54:59,233 INFO [train.py:1114] (1/4) Epoch 9, batch 1450, loss[loss=0.2484, simple_loss=0.3131, pruned_loss=0.06751, ctc_loss=0.1216, over 19683.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.2865, pruned_loss=0.05916, ctc_loss=0.1101, over 3862231.39 frames. ], batch size: 63, lr: 1.64e-02, grad_scale: 16.0 +2024-08-26 18:55:14,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=113994.66666666667, ans=0.2 +2024-08-26 18:55:17,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=113994.66666666667, ans=0.0 +2024-08-26 18:55:20,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn2.whiten.whitening_limit, batch_count=113994.66666666667, ans=22.5 +2024-08-26 18:55:21,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=113994.66666666667, ans=0.0 +2024-08-26 18:55:32,306 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.38 vs. limit=15.0 +2024-08-26 18:55:35,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=114101.33333333333, ans=0.125 +2024-08-26 18:55:42,879 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.40 vs. limit=15.0 +2024-08-26 18:55:48,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=114154.66666666667, ans=0.125 +2024-08-26 18:55:54,507 INFO [train.py:1114] (1/4) Epoch 9, batch 1500, loss[loss=0.2297, simple_loss=0.2968, pruned_loss=0.06034, ctc_loss=0.1047, over 19577.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.2876, pruned_loss=0.0595, ctc_loss=0.1107, over 3862086.48 frames. ], batch size: 57, lr: 1.64e-02, grad_scale: 16.0 +2024-08-26 18:56:00,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=114208.0, ans=10.0 +2024-08-26 18:56:03,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=114261.33333333333, ans=0.0 +2024-08-26 18:56:07,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=114261.33333333333, ans=0.125 +2024-08-26 18:56:12,032 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.34 vs. limit=15.0 +2024-08-26 18:56:18,311 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.542e+02 1.688e+02 1.884e+02 2.711e+02, threshold=3.377e+02, percent-clipped=0.0 +2024-08-26 18:56:37,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=114421.33333333333, ans=0.125 +2024-08-26 18:56:41,348 INFO [train.py:1114] (1/4) Epoch 9, batch 1550, loss[loss=0.2318, simple_loss=0.298, pruned_loss=0.06037, ctc_loss=0.1123, over 19601.00 frames. ], tot_loss[loss=0.226, simple_loss=0.288, pruned_loss=0.05975, ctc_loss=0.1114, over 3845598.20 frames. ], batch size: 60, lr: 1.64e-02, grad_scale: 16.0 +2024-08-26 18:57:02,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=114581.33333333333, ans=0.125 +2024-08-26 18:57:03,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=114581.33333333333, ans=0.125 +2024-08-26 18:57:08,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=114634.66666666667, ans=0.125 +2024-08-26 18:57:10,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=114634.66666666667, ans=0.2 +2024-08-26 18:57:26,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114688.0, ans=0.1 +2024-08-26 18:57:29,651 INFO [train.py:1114] (1/4) Epoch 9, batch 1600, loss[loss=0.2061, simple_loss=0.2816, pruned_loss=0.0473, ctc_loss=0.09005, over 19845.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.288, pruned_loss=0.05983, ctc_loss=0.1114, over 3835446.47 frames. ], batch size: 57, lr: 1.63e-02, grad_scale: 32.0 +2024-08-26 18:57:35,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=114741.33333333333, ans=0.125 +2024-08-26 18:57:57,609 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.291e+02 1.549e+02 1.720e+02 1.979e+02 3.573e+02, threshold=3.441e+02, percent-clipped=1.0 +2024-08-26 18:57:58,299 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.62 vs. limit=15.0 +2024-08-26 18:58:10,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=114901.33333333333, ans=0.1 +2024-08-26 18:58:36,375 INFO [train.py:1114] (1/4) Epoch 9, batch 1650, loss[loss=0.2184, simple_loss=0.285, pruned_loss=0.05485, ctc_loss=0.1052, over 19634.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.2881, pruned_loss=0.05989, ctc_loss=0.1116, over 3831847.39 frames. ], batch size: 59, lr: 1.63e-02, grad_scale: 32.0 +2024-08-26 18:59:41,062 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.85 vs. limit=15.0 +2024-08-26 18:59:54,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=115168.0, ans=0.025 +2024-08-26 18:59:59,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=115168.0, ans=0.125 +2024-08-26 19:00:11,415 INFO [train.py:1114] (1/4) Epoch 9, batch 1700, loss[loss=0.2002, simple_loss=0.2619, pruned_loss=0.05086, ctc_loss=0.09189, over 19663.00 frames. ], tot_loss[loss=0.225, simple_loss=0.2873, pruned_loss=0.05924, ctc_loss=0.1103, over 3846475.32 frames. ], batch size: 46, lr: 1.63e-02, grad_scale: 16.0 +2024-08-26 19:00:12,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=115274.66666666667, ans=0.0 +2024-08-26 19:00:20,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=115328.0, ans=0.2 +2024-08-26 19:00:26,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=115328.0, ans=0.0 +2024-08-26 19:00:27,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115328.0, ans=0.1 +2024-08-26 19:00:32,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=115381.33333333333, ans=0.125 +2024-08-26 19:00:32,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=115381.33333333333, ans=0.125 +2024-08-26 19:00:34,661 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.254e+02 1.433e+02 1.619e+02 1.844e+02 2.581e+02, threshold=3.239e+02, percent-clipped=0.0 +2024-08-26 19:00:37,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=115434.66666666667, ans=0.2 +2024-08-26 19:00:37,836 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.39 vs. limit=22.5 +2024-08-26 19:00:56,872 INFO [train.py:1114] (1/4) Epoch 9, batch 1750, loss[loss=0.1802, simple_loss=0.2415, pruned_loss=0.04388, ctc_loss=0.07784, over 19655.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.2866, pruned_loss=0.05918, ctc_loss=0.1103, over 3851275.88 frames. ], batch size: 45, lr: 1.63e-02, grad_scale: 16.0 +2024-08-26 19:00:57,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=115541.33333333333, ans=0.125 +2024-08-26 19:01:02,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=115541.33333333333, ans=0.125 +2024-08-26 19:01:03,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=115541.33333333333, ans=0.125 +2024-08-26 19:01:18,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=115648.0, ans=0.125 +2024-08-26 19:01:20,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=115648.0, ans=0.2 +2024-08-26 19:01:24,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=115701.33333333333, ans=10.0 +2024-08-26 19:01:31,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=115701.33333333333, ans=0.125 +2024-08-26 19:01:43,102 INFO [train.py:1114] (1/4) Epoch 9, batch 1800, loss[loss=0.2207, simple_loss=0.2938, pruned_loss=0.05389, ctc_loss=0.09964, over 19616.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2864, pruned_loss=0.05887, ctc_loss=0.1098, over 3855165.41 frames. ], batch size: 55, lr: 1.63e-02, grad_scale: 16.0 +2024-08-26 19:01:54,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=115861.33333333333, ans=0.125 +2024-08-26 19:01:57,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=115861.33333333333, ans=0.0 +2024-08-26 19:02:06,016 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.500e+02 1.645e+02 1.953e+02 3.789e+02, threshold=3.290e+02, percent-clipped=1.0 +2024-08-26 19:02:09,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115968.0, ans=0.1 +2024-08-26 19:02:11,168 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.38 vs. limit=15.0 +2024-08-26 19:02:19,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=116021.33333333333, ans=0.0 +2024-08-26 19:02:27,281 INFO [train.py:1114] (1/4) Epoch 9, batch 1850, loss[loss=0.2429, simple_loss=0.3023, pruned_loss=0.06663, ctc_loss=0.1258, over 19587.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2858, pruned_loss=0.05857, ctc_loss=0.1093, over 3857559.76 frames. ], batch size: 57, lr: 1.63e-02, grad_scale: 16.0 +2024-08-26 19:02:36,358 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:02:40,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=116128.0, ans=0.125 +2024-08-26 19:02:45,113 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.11 vs. limit=22.5 +2024-08-26 19:02:50,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=116181.33333333333, ans=0.125 +2024-08-26 19:02:51,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=116181.33333333333, ans=0.025 +2024-08-26 19:03:13,218 INFO [train.py:1114] (1/4) Epoch 9, batch 1900, loss[loss=0.2251, simple_loss=0.2921, pruned_loss=0.05766, ctc_loss=0.1069, over 19648.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.2867, pruned_loss=0.05886, ctc_loss=0.1099, over 3862589.14 frames. ], batch size: 59, lr: 1.62e-02, grad_scale: 16.0 +2024-08-26 19:03:19,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=116341.33333333333, ans=0.1 +2024-08-26 19:03:34,684 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.93 vs. limit=15.0 +2024-08-26 19:03:35,875 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.270e+02 1.509e+02 1.695e+02 1.935e+02 3.320e+02, threshold=3.390e+02, percent-clipped=1.0 +2024-08-26 19:03:49,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=116554.66666666667, ans=0.125 +2024-08-26 19:03:50,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=116554.66666666667, ans=0.125 +2024-08-26 19:03:56,678 INFO [train.py:1114] (1/4) Epoch 9, batch 1950, loss[loss=0.1934, simple_loss=0.2649, pruned_loss=0.04359, ctc_loss=0.08676, over 19604.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.2876, pruned_loss=0.05896, ctc_loss=0.1101, over 3871366.78 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 16.0 +2024-08-26 19:04:08,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=116661.33333333333, ans=0.125 +2024-08-26 19:04:19,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=116714.66666666667, ans=0.2 +2024-08-26 19:04:20,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=116714.66666666667, ans=0.0 +2024-08-26 19:04:30,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=116768.0, ans=0.125 +2024-08-26 19:04:39,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=116821.33333333333, ans=0.125 +2024-08-26 19:04:45,338 INFO [train.py:1114] (1/4) Epoch 9, batch 2000, loss[loss=0.1996, simple_loss=0.2603, pruned_loss=0.05108, ctc_loss=0.09158, over 19628.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.288, pruned_loss=0.05913, ctc_loss=0.1107, over 3855299.30 frames. ], batch size: 45, lr: 1.62e-02, grad_scale: 32.0 +2024-08-26 19:04:47,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=116874.66666666667, ans=0.125 +2024-08-26 19:04:53,160 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.37 vs. limit=6.0 +2024-08-26 19:04:54,907 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.17 vs. limit=15.0 +2024-08-26 19:04:56,347 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.28 vs. limit=15.0 +2024-08-26 19:05:03,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=116981.33333333333, ans=0.0 +2024-08-26 19:05:05,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=116981.33333333333, ans=0.05 +2024-08-26 19:05:09,036 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.202e+02 1.518e+02 1.711e+02 1.998e+02 4.316e+02, threshold=3.422e+02, percent-clipped=2.0 +2024-08-26 19:05:11,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=117034.66666666667, ans=0.125 +2024-08-26 19:05:17,959 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:05:26,266 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.18 vs. limit=22.5 +2024-08-26 19:05:27,631 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:05:29,278 INFO [train.py:1114] (1/4) Epoch 9, batch 2050, loss[loss=0.1858, simple_loss=0.2481, pruned_loss=0.04445, ctc_loss=0.08661, over 19729.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.2872, pruned_loss=0.0589, ctc_loss=0.1103, over 3851579.32 frames. ], batch size: 47, lr: 1.62e-02, grad_scale: 16.0 +2024-08-26 19:05:34,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=117141.33333333333, ans=0.0 +2024-08-26 19:05:41,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=117194.66666666667, ans=0.1 +2024-08-26 19:06:06,550 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.99 vs. limit=22.5 +2024-08-26 19:06:12,993 INFO [train.py:1114] (1/4) Epoch 9, batch 2100, loss[loss=0.2154, simple_loss=0.2901, pruned_loss=0.05179, ctc_loss=0.0928, over 19762.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2866, pruned_loss=0.05844, ctc_loss=0.1094, over 3857799.30 frames. ], batch size: 54, lr: 1.62e-02, grad_scale: 16.0 +2024-08-26 19:06:30,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=117514.66666666667, ans=0.125 +2024-08-26 19:06:36,666 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.287e+02 1.488e+02 1.695e+02 1.945e+02 3.088e+02, threshold=3.391e+02, percent-clipped=0.0 +2024-08-26 19:06:44,738 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.44 vs. limit=15.0 +2024-08-26 19:06:51,416 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:06:52,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=117621.33333333333, ans=15.0 +2024-08-26 19:06:53,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=117621.33333333333, ans=0.025 +2024-08-26 19:06:53,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=117621.33333333333, ans=0.0 +2024-08-26 19:06:55,548 INFO [train.py:1114] (1/4) Epoch 9, batch 2150, loss[loss=0.1996, simple_loss=0.2662, pruned_loss=0.04814, ctc_loss=0.09211, over 19567.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2854, pruned_loss=0.05785, ctc_loss=0.1083, over 3867762.36 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 8.0 +2024-08-26 19:06:55,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117674.66666666667, ans=0.1 +2024-08-26 19:06:57,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=117674.66666666667, ans=0.125 +2024-08-26 19:07:02,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=117674.66666666667, ans=0.0 +2024-08-26 19:07:11,322 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:07:13,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117781.33333333333, ans=0.1 +2024-08-26 19:07:13,342 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.56 vs. limit=15.0 +2024-08-26 19:07:18,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=117781.33333333333, ans=0.04949747468305833 +2024-08-26 19:07:18,678 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.29 vs. limit=15.0 +2024-08-26 19:07:29,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117888.0, ans=0.1 +2024-08-26 19:07:36,981 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.67 vs. limit=22.5 +2024-08-26 19:07:38,963 INFO [train.py:1114] (1/4) Epoch 9, batch 2200, loss[loss=0.2376, simple_loss=0.3064, pruned_loss=0.06202, ctc_loss=0.1118, over 19601.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2853, pruned_loss=0.05785, ctc_loss=0.1083, over 3866602.78 frames. ], batch size: 57, lr: 1.61e-02, grad_scale: 8.0 +2024-08-26 19:07:42,232 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.46 vs. limit=6.0 +2024-08-26 19:07:45,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=117941.33333333333, ans=0.125 +2024-08-26 19:07:45,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=117941.33333333333, ans=0.125 +2024-08-26 19:08:03,124 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.284e+02 1.528e+02 1.792e+02 2.132e+02 3.306e+02, threshold=3.583e+02, percent-clipped=0.0 +2024-08-26 19:08:14,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=118154.66666666667, ans=0.125 +2024-08-26 19:08:20,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=118154.66666666667, ans=0.0 +2024-08-26 19:08:21,085 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.77 vs. limit=15.0 +2024-08-26 19:08:34,327 INFO [train.py:1114] (1/4) Epoch 9, batch 2250, loss[loss=0.216, simple_loss=0.2886, pruned_loss=0.05131, ctc_loss=0.1018, over 19614.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2852, pruned_loss=0.05781, ctc_loss=0.1083, over 3867023.90 frames. ], batch size: 55, lr: 1.61e-02, grad_scale: 8.0 +2024-08-26 19:09:09,685 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.68 vs. limit=15.0 +2024-08-26 19:09:17,821 INFO [train.py:1114] (1/4) Epoch 9, batch 2300, loss[loss=0.2093, simple_loss=0.2695, pruned_loss=0.05483, ctc_loss=0.0986, over 19496.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2841, pruned_loss=0.0578, ctc_loss=0.1081, over 3860668.95 frames. ], batch size: 49, lr: 1.61e-02, grad_scale: 8.0 +2024-08-26 19:09:20,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=118474.66666666667, ans=0.125 +2024-08-26 19:09:42,032 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.274e+02 1.479e+02 1.669e+02 2.317e+02 3.988e+02, threshold=3.338e+02, percent-clipped=3.0 +2024-08-26 19:09:43,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=118634.66666666667, ans=0.1 +2024-08-26 19:09:48,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=118634.66666666667, ans=0.125 +2024-08-26 19:10:01,375 INFO [train.py:1114] (1/4) Epoch 9, batch 2350, loss[loss=0.2345, simple_loss=0.2989, pruned_loss=0.06194, ctc_loss=0.1153, over 19669.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2844, pruned_loss=0.05814, ctc_loss=0.1087, over 3863748.78 frames. ], batch size: 63, lr: 1.61e-02, grad_scale: 8.0 +2024-08-26 19:10:06,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=118741.33333333333, ans=0.125 +2024-08-26 19:10:13,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=118794.66666666667, ans=0.125 +2024-08-26 19:11:06,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=118848.0, ans=0.125 +2024-08-26 19:11:06,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=118848.0, ans=0.1 +2024-08-26 19:11:06,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=118848.0, ans=0.0 +2024-08-26 19:11:11,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=118901.33333333333, ans=0.125 +2024-08-26 19:11:14,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=118901.33333333333, ans=0.125 +2024-08-26 19:11:19,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=118954.66666666667, ans=0.125 +2024-08-26 19:11:32,744 INFO [train.py:1114] (1/4) Epoch 9, batch 2400, loss[loss=0.2652, simple_loss=0.3194, pruned_loss=0.07627, ctc_loss=0.1465, over 19240.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.2868, pruned_loss=0.05907, ctc_loss=0.1103, over 3857864.72 frames. ], batch size: 71, lr: 1.61e-02, grad_scale: 16.0 +2024-08-26 19:11:50,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=119061.33333333333, ans=0.125 +2024-08-26 19:12:04,703 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.526e+02 1.714e+02 1.892e+02 3.175e+02, threshold=3.427e+02, percent-clipped=0.0 +2024-08-26 19:12:13,727 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.66 vs. limit=15.0 +2024-08-26 19:12:22,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=119221.33333333333, ans=0.125 +2024-08-26 19:12:22,721 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=10.14 vs. limit=12.0 +2024-08-26 19:12:24,772 INFO [train.py:1114] (1/4) Epoch 9, batch 2450, loss[loss=0.2879, simple_loss=0.3178, pruned_loss=0.09466, ctc_loss=0.172, over 13701.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.2912, pruned_loss=0.0624, ctc_loss=0.1169, over 3731728.38 frames. ], batch size: 141, lr: 1.61e-02, grad_scale: 16.0 +2024-08-26 19:14:15,880 INFO [train.py:1114] (1/4) Epoch 10, batch 0, loss[loss=0.2042, simple_loss=0.2648, pruned_loss=0.05236, ctc_loss=0.09713, over 19818.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2648, pruned_loss=0.05236, ctc_loss=0.09713, over 19818.00 frames. ], batch size: 49, lr: 1.53e-02, grad_scale: 16.0 +2024-08-26 19:14:15,881 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-26 19:14:48,065 INFO [train.py:1146] (1/4) Epoch 10, validation: loss=0.1896, simple_loss=0.2813, pruned_loss=0.03622, ctc_loss=0.0637, over 944034.00 frames. +2024-08-26 19:14:48,066 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12882MB +2024-08-26 19:15:04,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=119536.0, ans=0.125 +2024-08-26 19:15:14,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=119589.33333333333, ans=0.125 +2024-08-26 19:15:25,080 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.696e+02 1.867e+02 2.057e+02 3.331e+02, threshold=3.733e+02, percent-clipped=0.0 +2024-08-26 19:15:25,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=119696.0, ans=0.125 +2024-08-26 19:15:26,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=119696.0, ans=0.125 +2024-08-26 19:15:34,236 INFO [train.py:1114] (1/4) Epoch 10, batch 50, loss[loss=0.1967, simple_loss=0.2659, pruned_loss=0.04623, ctc_loss=0.08732, over 19716.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.2874, pruned_loss=0.05884, ctc_loss=0.1113, over 843671.78 frames. ], batch size: 47, lr: 1.52e-02, grad_scale: 16.0 +2024-08-26 19:15:42,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=119802.66666666667, ans=0.125 +2024-08-26 19:15:51,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=119802.66666666667, ans=0.0 +2024-08-26 19:15:53,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=119856.0, ans=0.0 +2024-08-26 19:16:02,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=119909.33333333333, ans=0.0 +2024-08-26 19:16:10,104 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.08 vs. limit=15.0 +2024-08-26 19:16:14,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=119962.66666666667, ans=0.0 +2024-08-26 19:16:20,471 INFO [train.py:1114] (1/4) Epoch 10, batch 100, loss[loss=0.2057, simple_loss=0.2706, pruned_loss=0.05089, ctc_loss=0.09777, over 19715.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2886, pruned_loss=0.05879, ctc_loss=0.1104, over 1499199.10 frames. ], batch size: 51, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:16:27,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=120016.0, ans=0.125 +2024-08-26 19:16:28,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=120069.33333333333, ans=0.125 +2024-08-26 19:16:37,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.10 vs. limit=15.0 +2024-08-26 19:16:38,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=120069.33333333333, ans=0.125 +2024-08-26 19:16:53,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=120176.0, ans=0.0 +2024-08-26 19:16:53,950 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.87 vs. limit=15.0 +2024-08-26 19:17:03,443 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.471e+02 1.633e+02 1.792e+02 2.780e+02, threshold=3.265e+02, percent-clipped=0.0 +2024-08-26 19:17:09,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=120229.33333333333, ans=0.05 +2024-08-26 19:17:11,616 INFO [train.py:1114] (1/4) Epoch 10, batch 150, loss[loss=0.1951, simple_loss=0.2534, pruned_loss=0.05005, ctc_loss=0.09158, over 19685.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.2858, pruned_loss=0.05801, ctc_loss=0.1083, over 2028038.53 frames. ], batch size: 47, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:17:16,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=120282.66666666667, ans=0.125 +2024-08-26 19:17:19,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=120282.66666666667, ans=0.0 +2024-08-26 19:17:19,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=120282.66666666667, ans=0.0 +2024-08-26 19:17:20,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=120282.66666666667, ans=0.125 +2024-08-26 19:17:38,931 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.83 vs. limit=15.0 +2024-08-26 19:17:52,753 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.98 vs. limit=22.5 +2024-08-26 19:18:02,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=120496.0, ans=0.2 +2024-08-26 19:18:07,131 INFO [train.py:1114] (1/4) Epoch 10, batch 200, loss[loss=0.241, simple_loss=0.3066, pruned_loss=0.06353, ctc_loss=0.1209, over 18236.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2849, pruned_loss=0.05768, ctc_loss=0.1075, over 2435602.70 frames. ], batch size: 85, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:18:29,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=120549.33333333333, ans=0.125 +2024-08-26 19:18:30,189 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.95 vs. limit=22.5 +2024-08-26 19:18:30,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=120549.33333333333, ans=0.125 +2024-08-26 19:18:33,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=120549.33333333333, ans=0.0 +2024-08-26 19:18:34,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=120602.66666666667, ans=0.0 +2024-08-26 19:18:40,946 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.48 vs. limit=22.5 +2024-08-26 19:18:51,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=120656.0, ans=0.0 +2024-08-26 19:19:10,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=120762.66666666667, ans=0.0 +2024-08-26 19:19:12,214 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.205e+02 1.459e+02 1.596e+02 1.815e+02 3.041e+02, threshold=3.193e+02, percent-clipped=0.0 +2024-08-26 19:19:43,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=120762.66666666667, ans=0.1 +2024-08-26 19:19:47,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=120816.0, ans=0.125 +2024-08-26 19:19:48,330 INFO [train.py:1114] (1/4) Epoch 10, batch 250, loss[loss=0.2149, simple_loss=0.2863, pruned_loss=0.05233, ctc_loss=0.09711, over 19382.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2845, pruned_loss=0.0572, ctc_loss=0.1065, over 2755365.69 frames. ], batch size: 67, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:19:50,786 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.63 vs. limit=10.0 +2024-08-26 19:20:03,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=120869.33333333333, ans=0.07 +2024-08-26 19:20:06,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=120869.33333333333, ans=0.0 +2024-08-26 19:20:11,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=120869.33333333333, ans=0.125 +2024-08-26 19:20:11,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.05 vs. limit=22.5 +2024-08-26 19:20:14,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=120922.66666666667, ans=0.025 +2024-08-26 19:20:25,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=120976.0, ans=0.0 +2024-08-26 19:20:37,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=121029.33333333333, ans=0.125 +2024-08-26 19:20:45,507 INFO [train.py:1114] (1/4) Epoch 10, batch 300, loss[loss=0.2265, simple_loss=0.2941, pruned_loss=0.05848, ctc_loss=0.1045, over 19524.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2835, pruned_loss=0.05671, ctc_loss=0.1056, over 3000066.51 frames. ], batch size: 61, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:20:53,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=121082.66666666667, ans=0.025 +2024-08-26 19:21:01,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=121136.0, ans=0.125 +2024-08-26 19:21:25,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=121242.66666666667, ans=0.0 +2024-08-26 19:21:28,840 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.56 vs. limit=6.0 +2024-08-26 19:21:29,982 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.480e+02 1.641e+02 1.981e+02 3.456e+02, threshold=3.281e+02, percent-clipped=2.0 +2024-08-26 19:21:30,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=121296.0, ans=0.125 +2024-08-26 19:21:30,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.69 vs. limit=12.0 +2024-08-26 19:21:31,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=121296.0, ans=0.09899494936611666 +2024-08-26 19:21:37,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=121349.33333333333, ans=0.125 +2024-08-26 19:21:38,275 INFO [train.py:1114] (1/4) Epoch 10, batch 350, loss[loss=0.1943, simple_loss=0.2546, pruned_loss=0.04931, ctc_loss=0.0882, over 19801.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2835, pruned_loss=0.05664, ctc_loss=0.1056, over 3189437.63 frames. ], batch size: 48, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:21:40,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=121349.33333333333, ans=0.125 +2024-08-26 19:21:53,634 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.07 vs. limit=15.0 +2024-08-26 19:21:56,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=121456.0, ans=0.125 +2024-08-26 19:21:56,441 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.02 vs. limit=15.0 +2024-08-26 19:21:57,244 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=6.00 vs. limit=12.0 +2024-08-26 19:22:09,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=121509.33333333333, ans=0.1 +2024-08-26 19:22:18,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.05 vs. limit=6.0 +2024-08-26 19:22:24,882 INFO [train.py:1114] (1/4) Epoch 10, batch 400, loss[loss=0.2127, simple_loss=0.283, pruned_loss=0.05112, ctc_loss=0.1005, over 19497.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2829, pruned_loss=0.05654, ctc_loss=0.1054, over 3341910.85 frames. ], batch size: 54, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:22:26,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=121616.0, ans=0.0 +2024-08-26 19:22:37,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121669.33333333333, ans=0.1 +2024-08-26 19:23:08,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=121776.0, ans=0.125 +2024-08-26 19:23:16,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=121829.33333333333, ans=0.125 +2024-08-26 19:23:18,027 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.268e+02 1.471e+02 1.735e+02 2.020e+02 3.245e+02, threshold=3.470e+02, percent-clipped=0.0 +2024-08-26 19:23:26,370 INFO [train.py:1114] (1/4) Epoch 10, batch 450, loss[loss=0.2056, simple_loss=0.2805, pruned_loss=0.04778, ctc_loss=0.0877, over 19622.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.283, pruned_loss=0.05637, ctc_loss=0.1053, over 3450128.20 frames. ], batch size: 55, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:23:41,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=121936.0, ans=0.0 +2024-08-26 19:23:46,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=121989.33333333333, ans=0.125 +2024-08-26 19:23:53,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=121989.33333333333, ans=0.125 +2024-08-26 19:24:05,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=122042.66666666667, ans=0.125 +2024-08-26 19:24:09,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=122096.0, ans=0.125 +2024-08-26 19:24:16,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=122149.33333333333, ans=0.125 +2024-08-26 19:24:19,329 INFO [train.py:1114] (1/4) Epoch 10, batch 500, loss[loss=0.2391, simple_loss=0.3065, pruned_loss=0.06325, ctc_loss=0.113, over 19674.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2824, pruned_loss=0.05586, ctc_loss=0.1044, over 3545386.11 frames. ], batch size: 63, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:24:25,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=122149.33333333333, ans=0.125 +2024-08-26 19:24:28,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=122149.33333333333, ans=0.025 +2024-08-26 19:24:28,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=122149.33333333333, ans=0.2 +2024-08-26 19:24:42,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=122202.66666666667, ans=0.125 +2024-08-26 19:24:58,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=122256.0, ans=0.0 +2024-08-26 19:25:05,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=122309.33333333333, ans=0.125 +2024-08-26 19:25:11,344 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.449e+02 1.637e+02 1.959e+02 3.375e+02, threshold=3.275e+02, percent-clipped=0.0 +2024-08-26 19:25:11,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=122362.66666666667, ans=0.0 +2024-08-26 19:25:14,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=122362.66666666667, ans=0.125 +2024-08-26 19:25:14,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=122362.66666666667, ans=0.125 +2024-08-26 19:25:19,721 INFO [train.py:1114] (1/4) Epoch 10, batch 550, loss[loss=0.2297, simple_loss=0.2883, pruned_loss=0.06212, ctc_loss=0.1171, over 19232.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2826, pruned_loss=0.05604, ctc_loss=0.1047, over 3607483.00 frames. ], batch size: 71, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:25:21,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=122416.0, ans=0.125 +2024-08-26 19:25:22,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=122416.0, ans=0.125 +2024-08-26 19:25:30,373 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:25:31,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=122469.33333333333, ans=0.0 +2024-08-26 19:26:01,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=122629.33333333333, ans=0.07 +2024-08-26 19:26:03,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=122629.33333333333, ans=0.0 +2024-08-26 19:26:10,276 INFO [train.py:1114] (1/4) Epoch 10, batch 600, loss[loss=0.2162, simple_loss=0.2867, pruned_loss=0.05261, ctc_loss=0.1013, over 19386.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2831, pruned_loss=0.05647, ctc_loss=0.1053, over 3665325.29 frames. ], batch size: 67, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:26:24,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=122736.0, ans=0.0 +2024-08-26 19:26:44,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=122842.66666666667, ans=0.0 +2024-08-26 19:26:50,243 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.480e+02 1.661e+02 1.846e+02 3.271e+02, threshold=3.322e+02, percent-clipped=0.0 +2024-08-26 19:26:56,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=122896.0, ans=0.125 +2024-08-26 19:26:58,395 INFO [train.py:1114] (1/4) Epoch 10, batch 650, loss[loss=0.2134, simple_loss=0.2805, pruned_loss=0.05251, ctc_loss=0.1034, over 19773.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2827, pruned_loss=0.05619, ctc_loss=0.105, over 3716018.99 frames. ], batch size: 54, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:27:07,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=122949.33333333333, ans=0.0 +2024-08-26 19:27:12,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=123002.66666666667, ans=0.0 +2024-08-26 19:27:36,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=123109.33333333333, ans=0.125 +2024-08-26 19:27:37,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=123109.33333333333, ans=0.125 +2024-08-26 19:27:48,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=123162.66666666667, ans=0.0 +2024-08-26 19:27:51,539 INFO [train.py:1114] (1/4) Epoch 10, batch 700, loss[loss=0.1988, simple_loss=0.2688, pruned_loss=0.04764, ctc_loss=0.08378, over 19726.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2831, pruned_loss=0.05623, ctc_loss=0.105, over 3747424.30 frames. ], batch size: 51, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:27:58,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=123216.0, ans=0.125 +2024-08-26 19:28:01,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=123269.33333333333, ans=0.0 +2024-08-26 19:28:10,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=123322.66666666667, ans=0.125 +2024-08-26 19:28:11,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=123322.66666666667, ans=0.0 +2024-08-26 19:28:13,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=123322.66666666667, ans=0.125 +2024-08-26 19:28:21,384 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.43 vs. limit=15.0 +2024-08-26 19:28:29,135 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.526e+02 1.912e+02 2.394e+02 4.336e+02, threshold=3.825e+02, percent-clipped=8.0 +2024-08-26 19:28:35,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=123429.33333333333, ans=0.025 +2024-08-26 19:28:38,774 INFO [train.py:1114] (1/4) Epoch 10, batch 750, loss[loss=0.2041, simple_loss=0.2718, pruned_loss=0.04932, ctc_loss=0.09441, over 19519.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2824, pruned_loss=0.05589, ctc_loss=0.1042, over 3773650.40 frames. ], batch size: 54, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:29:19,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.15 vs. limit=15.0 +2024-08-26 19:29:27,332 INFO [train.py:1114] (1/4) Epoch 10, batch 800, loss[loss=0.1912, simple_loss=0.2497, pruned_loss=0.04775, ctc_loss=0.0933, over 19818.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2816, pruned_loss=0.05563, ctc_loss=0.1037, over 3795006.20 frames. ], batch size: 49, lr: 1.50e-02, grad_scale: 32.0 +2024-08-26 19:29:35,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=123802.66666666667, ans=0.05 +2024-08-26 19:29:36,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=123802.66666666667, ans=0.125 +2024-08-26 19:29:44,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=123802.66666666667, ans=0.05 +2024-08-26 19:29:46,360 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.96 vs. limit=22.5 +2024-08-26 19:29:47,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=123856.0, ans=0.125 +2024-08-26 19:29:59,847 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.81 vs. limit=22.5 +2024-08-26 19:30:07,513 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.505e+02 1.745e+02 2.038e+02 4.368e+02, threshold=3.490e+02, percent-clipped=1.0 +2024-08-26 19:30:17,629 INFO [train.py:1114] (1/4) Epoch 10, batch 850, loss[loss=0.2195, simple_loss=0.2881, pruned_loss=0.05537, ctc_loss=0.1002, over 19645.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2817, pruned_loss=0.05554, ctc_loss=0.1036, over 3814424.14 frames. ], batch size: 59, lr: 1.50e-02, grad_scale: 32.0 +2024-08-26 19:30:25,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=124016.0, ans=0.0 +2024-08-26 19:30:36,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=124069.33333333333, ans=0.0 +2024-08-26 19:30:42,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=124122.66666666667, ans=0.125 +2024-08-26 19:30:56,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=124229.33333333333, ans=0.125 +2024-08-26 19:31:00,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=124229.33333333333, ans=0.125 +2024-08-26 19:31:04,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=124229.33333333333, ans=0.125 +2024-08-26 19:31:08,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=124229.33333333333, ans=0.125 +2024-08-26 19:31:13,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=124229.33333333333, ans=0.125 +2024-08-26 19:31:14,603 INFO [train.py:1114] (1/4) Epoch 10, batch 900, loss[loss=0.1877, simple_loss=0.2563, pruned_loss=0.04365, ctc_loss=0.07952, over 19420.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2819, pruned_loss=0.056, ctc_loss=0.1043, over 3819291.77 frames. ], batch size: 48, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:32:19,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=124389.33333333333, ans=0.0 +2024-08-26 19:32:22,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=124389.33333333333, ans=0.0 +2024-08-26 19:32:35,080 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.199e+02 1.525e+02 1.733e+02 2.036e+02 4.140e+02, threshold=3.466e+02, percent-clipped=3.0 +2024-08-26 19:32:42,441 INFO [train.py:1114] (1/4) Epoch 10, batch 950, loss[loss=0.2108, simple_loss=0.2715, pruned_loss=0.05513, ctc_loss=0.09944, over 19481.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2825, pruned_loss=0.05649, ctc_loss=0.1053, over 3822031.47 frames. ], batch size: 49, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:33:07,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=124656.0, ans=0.0 +2024-08-26 19:33:20,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=124709.33333333333, ans=0.125 +2024-08-26 19:33:20,886 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.37 vs. limit=10.0 +2024-08-26 19:33:22,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124709.33333333333, ans=0.1 +2024-08-26 19:33:36,585 INFO [train.py:1114] (1/4) Epoch 10, batch 1000, loss[loss=0.1934, simple_loss=0.2621, pruned_loss=0.0452, ctc_loss=0.08566, over 19854.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2828, pruned_loss=0.05659, ctc_loss=0.1057, over 3817081.22 frames. ], batch size: 52, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:33:39,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.whiten.whitening_limit, batch_count=124816.0, ans=12.0 +2024-08-26 19:34:05,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=124922.66666666667, ans=0.125 +2024-08-26 19:34:07,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=124922.66666666667, ans=15.0 +2024-08-26 19:34:11,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=124976.0, ans=0.125 +2024-08-26 19:34:19,953 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.214e+02 1.433e+02 1.580e+02 1.832e+02 3.141e+02, threshold=3.159e+02, percent-clipped=0.0 +2024-08-26 19:34:22,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=125029.33333333333, ans=0.125 +2024-08-26 19:34:27,366 INFO [train.py:1114] (1/4) Epoch 10, batch 1050, loss[loss=0.2293, simple_loss=0.3078, pruned_loss=0.05535, ctc_loss=0.1003, over 19837.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2821, pruned_loss=0.0562, ctc_loss=0.105, over 3823053.40 frames. ], batch size: 57, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:35:09,714 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.74 vs. limit=15.0 +2024-08-26 19:35:15,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=125189.33333333333, ans=0.2 +2024-08-26 19:35:23,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=125242.66666666667, ans=0.1 +2024-08-26 19:35:29,460 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.93 vs. limit=22.5 +2024-08-26 19:35:31,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=125296.0, ans=0.125 +2024-08-26 19:35:36,344 INFO [train.py:1114] (1/4) Epoch 10, batch 1100, loss[loss=0.2098, simple_loss=0.2781, pruned_loss=0.05214, ctc_loss=0.09303, over 19580.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2813, pruned_loss=0.0557, ctc_loss=0.1041, over 3829407.05 frames. ], batch size: 52, lr: 1.49e-02, grad_scale: 8.0 +2024-08-26 19:35:57,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=125456.0, ans=0.125 +2024-08-26 19:36:15,876 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.85 vs. limit=15.0 +2024-08-26 19:36:18,873 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.239e+02 1.433e+02 1.605e+02 1.841e+02 2.779e+02, threshold=3.211e+02, percent-clipped=0.0 +2024-08-26 19:36:25,416 INFO [train.py:1114] (1/4) Epoch 10, batch 1150, loss[loss=0.2024, simple_loss=0.2711, pruned_loss=0.04898, ctc_loss=0.08939, over 19601.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2818, pruned_loss=0.05594, ctc_loss=0.1045, over 3829021.94 frames. ], batch size: 52, lr: 1.49e-02, grad_scale: 8.0 +2024-08-26 19:36:32,761 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.46 vs. limit=15.0 +2024-08-26 19:36:54,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=125722.66666666667, ans=0.125 +2024-08-26 19:36:55,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=125776.0, ans=0.025 +2024-08-26 19:37:16,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=125882.66666666667, ans=0.2 +2024-08-26 19:37:17,649 INFO [train.py:1114] (1/4) Epoch 10, batch 1200, loss[loss=0.2208, simple_loss=0.2908, pruned_loss=0.05514, ctc_loss=0.1015, over 19845.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2826, pruned_loss=0.05631, ctc_loss=0.1052, over 3823979.26 frames. ], batch size: 57, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:37:26,713 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.67 vs. limit=10.0 +2024-08-26 19:37:30,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=125936.0, ans=0.0 +2024-08-26 19:37:33,068 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.50 vs. limit=15.0 +2024-08-26 19:37:43,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125989.33333333333, ans=0.1 +2024-08-26 19:37:44,121 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.65 vs. limit=22.5 +2024-08-26 19:37:57,386 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.466e+02 1.608e+02 1.824e+02 2.979e+02, threshold=3.216e+02, percent-clipped=0.0 +2024-08-26 19:38:04,046 INFO [train.py:1114] (1/4) Epoch 10, batch 1250, loss[loss=0.2385, simple_loss=0.2998, pruned_loss=0.06595, ctc_loss=0.1131, over 19506.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.283, pruned_loss=0.05631, ctc_loss=0.1049, over 3841312.74 frames. ], batch size: 61, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:38:04,586 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.29 vs. limit=15.0 +2024-08-26 19:38:05,400 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.71 vs. limit=10.0 +2024-08-26 19:38:08,234 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.49 vs. limit=22.5 +2024-08-26 19:38:19,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=126202.66666666667, ans=0.125 +2024-08-26 19:38:46,986 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.37 vs. limit=15.0 +2024-08-26 19:39:57,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=126362.66666666667, ans=0.04949747468305833 +2024-08-26 19:40:04,476 INFO [train.py:1114] (1/4) Epoch 10, batch 1300, loss[loss=0.239, simple_loss=0.2985, pruned_loss=0.06457, ctc_loss=0.1258, over 18932.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.282, pruned_loss=0.0558, ctc_loss=0.104, over 3844627.38 frames. ], batch size: 76, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:40:04,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=126416.0, ans=0.0 +2024-08-26 19:40:34,761 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.16 vs. limit=6.0 +2024-08-26 19:40:54,259 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.480e+02 1.716e+02 1.981e+02 3.061e+02, threshold=3.432e+02, percent-clipped=0.0 +2024-08-26 19:41:00,867 INFO [train.py:1114] (1/4) Epoch 10, batch 1350, loss[loss=0.2363, simple_loss=0.3012, pruned_loss=0.06209, ctc_loss=0.1178, over 19782.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2817, pruned_loss=0.05565, ctc_loss=0.1036, over 3855221.60 frames. ], batch size: 54, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:41:01,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=126682.66666666667, ans=0.125 +2024-08-26 19:41:06,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126682.66666666667, ans=0.1 +2024-08-26 19:41:06,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=126682.66666666667, ans=0.1 +2024-08-26 19:41:16,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126736.0, ans=0.1 +2024-08-26 19:41:25,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=126789.33333333333, ans=0.2 +2024-08-26 19:41:29,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=126789.33333333333, ans=0.0 +2024-08-26 19:41:42,569 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.94 vs. limit=15.0 +2024-08-26 19:41:45,268 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.10 vs. limit=22.5 +2024-08-26 19:41:45,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=126896.0, ans=0.125 +2024-08-26 19:41:48,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=126896.0, ans=0.125 +2024-08-26 19:41:52,399 INFO [train.py:1114] (1/4) Epoch 10, batch 1400, loss[loss=0.181, simple_loss=0.2413, pruned_loss=0.04419, ctc_loss=0.08095, over 19658.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2812, pruned_loss=0.05536, ctc_loss=0.1032, over 3861749.60 frames. ], batch size: 46, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:42:17,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=127002.66666666667, ans=0.125 +2024-08-26 19:42:20,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127002.66666666667, ans=0.1 +2024-08-26 19:42:27,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=127056.0, ans=0.0 +2024-08-26 19:42:43,187 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.452e+02 1.585e+02 1.952e+02 4.788e+02, threshold=3.170e+02, percent-clipped=2.0 +2024-08-26 19:42:49,759 INFO [train.py:1114] (1/4) Epoch 10, batch 1450, loss[loss=0.2318, simple_loss=0.3025, pruned_loss=0.05907, ctc_loss=0.1073, over 19707.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2822, pruned_loss=0.05576, ctc_loss=0.1041, over 3860715.75 frames. ], batch size: 63, lr: 1.48e-02, grad_scale: 16.0 +2024-08-26 19:42:57,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=127216.0, ans=0.125 +2024-08-26 19:43:06,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127269.33333333333, ans=0.1 +2024-08-26 19:43:11,177 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.62 vs. limit=12.0 +2024-08-26 19:43:15,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=127322.66666666667, ans=0.0 +2024-08-26 19:43:26,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=127376.0, ans=0.04949747468305833 +2024-08-26 19:43:37,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=127429.33333333333, ans=0.025 +2024-08-26 19:43:40,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=127429.33333333333, ans=0.025 +2024-08-26 19:43:44,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=127429.33333333333, ans=0.125 +2024-08-26 19:43:48,227 INFO [train.py:1114] (1/4) Epoch 10, batch 1500, loss[loss=0.2338, simple_loss=0.3014, pruned_loss=0.06066, ctc_loss=0.1123, over 19599.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2827, pruned_loss=0.05589, ctc_loss=0.1043, over 3861030.36 frames. ], batch size: 57, lr: 1.48e-02, grad_scale: 16.0 +2024-08-26 19:43:49,086 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.46 vs. limit=15.0 +2024-08-26 19:43:49,824 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.46 vs. limit=12.0 +2024-08-26 19:43:54,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=127482.66666666667, ans=0.0 +2024-08-26 19:43:59,796 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=4.74 vs. limit=15.0 +2024-08-26 19:44:01,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=127536.0, ans=0.0 +2024-08-26 19:44:08,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127589.33333333333, ans=0.1 +2024-08-26 19:44:34,631 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.37 vs. limit=15.0 +2024-08-26 19:44:36,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=127696.0, ans=0.125 +2024-08-26 19:44:37,665 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.149e+02 1.427e+02 1.587e+02 1.794e+02 3.285e+02, threshold=3.174e+02, percent-clipped=1.0 +2024-08-26 19:44:52,461 INFO [train.py:1114] (1/4) Epoch 10, batch 1550, loss[loss=0.228, simple_loss=0.2965, pruned_loss=0.05812, ctc_loss=0.1084, over 19591.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2828, pruned_loss=0.05611, ctc_loss=0.1048, over 3846715.28 frames. ], batch size: 60, lr: 1.48e-02, grad_scale: 16.0 +2024-08-26 19:45:06,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127802.66666666667, ans=0.1 +2024-08-26 19:45:12,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127856.0, ans=0.1 +2024-08-26 19:45:30,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=127962.66666666667, ans=0.0 +2024-08-26 19:45:43,648 INFO [train.py:1114] (1/4) Epoch 10, batch 1600, loss[loss=0.2161, simple_loss=0.2872, pruned_loss=0.05203, ctc_loss=0.1025, over 19843.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2832, pruned_loss=0.05647, ctc_loss=0.1054, over 3837124.92 frames. ], batch size: 57, lr: 1.48e-02, grad_scale: 32.0 +2024-08-26 19:45:43,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=128016.0, ans=0.125 +2024-08-26 19:45:46,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128016.0, ans=0.1 +2024-08-26 19:46:13,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=128176.0, ans=0.125 +2024-08-26 19:46:18,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=128176.0, ans=0.0 +2024-08-26 19:46:26,524 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.210e+02 1.460e+02 1.671e+02 2.068e+02 2.984e+02, threshold=3.342e+02, percent-clipped=0.0 +2024-08-26 19:46:26,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=128229.33333333333, ans=0.125 +2024-08-26 19:46:33,074 INFO [train.py:1114] (1/4) Epoch 10, batch 1650, loss[loss=0.2129, simple_loss=0.2896, pruned_loss=0.0498, ctc_loss=0.09144, over 19672.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2827, pruned_loss=0.05605, ctc_loss=0.1047, over 3833661.31 frames. ], batch size: 59, lr: 1.48e-02, grad_scale: 32.0 +2024-08-26 19:46:46,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=128336.0, ans=0.0 +2024-08-26 19:47:00,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=128389.33333333333, ans=0.125 +2024-08-26 19:47:14,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=128496.0, ans=0.125 +2024-08-26 19:47:23,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=128496.0, ans=0.125 +2024-08-26 19:47:24,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=128496.0, ans=0.125 +2024-08-26 19:47:28,672 INFO [train.py:1114] (1/4) Epoch 10, batch 1700, loss[loss=0.202, simple_loss=0.2581, pruned_loss=0.05377, ctc_loss=0.09605, over 19655.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2821, pruned_loss=0.05564, ctc_loss=0.1039, over 3847897.17 frames. ], batch size: 46, lr: 1.48e-02, grad_scale: 32.0 +2024-08-26 19:47:32,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128549.33333333333, ans=0.1 +2024-08-26 19:47:40,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=128602.66666666667, ans=0.125 +2024-08-26 19:47:47,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=128656.0, ans=0.125 +2024-08-26 19:47:50,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128656.0, ans=0.1 +2024-08-26 19:48:15,897 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.44 vs. limit=22.5 +2024-08-26 19:48:18,882 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.236e+02 1.440e+02 1.568e+02 1.897e+02 2.765e+02, threshold=3.136e+02, percent-clipped=0.0 +2024-08-26 19:48:20,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=128762.66666666667, ans=0.125 +2024-08-26 19:48:25,123 INFO [train.py:1114] (1/4) Epoch 10, batch 1750, loss[loss=0.1821, simple_loss=0.2423, pruned_loss=0.04453, ctc_loss=0.08224, over 19670.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2813, pruned_loss=0.05518, ctc_loss=0.1029, over 3851825.52 frames. ], batch size: 45, lr: 1.48e-02, grad_scale: 32.0 +2024-08-26 19:48:26,482 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.98 vs. limit=15.0 +2024-08-26 19:48:34,264 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.58 vs. limit=15.0 +2024-08-26 19:48:42,116 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.88 vs. limit=15.0 +2024-08-26 19:49:08,980 INFO [train.py:1114] (1/4) Epoch 10, batch 1800, loss[loss=0.2218, simple_loss=0.2893, pruned_loss=0.05574, ctc_loss=0.1073, over 19616.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2815, pruned_loss=0.05522, ctc_loss=0.1029, over 3852281.16 frames. ], batch size: 55, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:49:33,179 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.47 vs. limit=12.0 +2024-08-26 19:49:38,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=129242.66666666667, ans=0.125 +2024-08-26 19:49:43,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129242.66666666667, ans=0.1 +2024-08-26 19:49:49,354 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.466e+02 1.715e+02 2.130e+02 3.505e+02, threshold=3.430e+02, percent-clipped=4.0 +2024-08-26 19:49:55,059 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.86 vs. limit=15.0 +2024-08-26 19:49:55,615 INFO [train.py:1114] (1/4) Epoch 10, batch 1850, loss[loss=0.2136, simple_loss=0.2817, pruned_loss=0.05196, ctc_loss=0.104, over 19591.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2808, pruned_loss=0.05496, ctc_loss=0.1024, over 3854580.27 frames. ], batch size: 57, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:49:55,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=129349.33333333333, ans=0.2 +2024-08-26 19:50:05,680 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.73 vs. limit=22.5 +2024-08-26 19:50:14,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=129456.0, ans=0.125 +2024-08-26 19:50:26,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=129456.0, ans=0.125 +2024-08-26 19:50:35,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129509.33333333333, ans=0.1 +2024-08-26 19:50:39,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=129509.33333333333, ans=0.125 +2024-08-26 19:50:50,324 INFO [train.py:1114] (1/4) Epoch 10, batch 1900, loss[loss=0.2231, simple_loss=0.2969, pruned_loss=0.05406, ctc_loss=0.1031, over 19661.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2813, pruned_loss=0.05495, ctc_loss=0.1025, over 3860649.34 frames. ], batch size: 59, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:51:13,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=129722.66666666667, ans=0.125 +2024-08-26 19:51:20,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=129776.0, ans=0.0 +2024-08-26 19:51:27,690 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.498e+02 1.655e+02 1.944e+02 4.101e+02, threshold=3.311e+02, percent-clipped=1.0 +2024-08-26 19:51:30,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=129829.33333333333, ans=0.025 +2024-08-26 19:51:33,752 INFO [train.py:1114] (1/4) Epoch 10, batch 1950, loss[loss=0.1979, simple_loss=0.272, pruned_loss=0.04514, ctc_loss=0.08379, over 19601.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2825, pruned_loss=0.05522, ctc_loss=0.103, over 3869545.62 frames. ], batch size: 52, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:51:35,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=129882.66666666667, ans=0.1 +2024-08-26 19:52:42,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130096.0, ans=0.1 +2024-08-26 19:52:47,508 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.14 vs. limit=15.0 +2024-08-26 19:52:51,600 INFO [train.py:1114] (1/4) Epoch 10, batch 2000, loss[loss=0.2061, simple_loss=0.263, pruned_loss=0.05448, ctc_loss=0.1004, over 19652.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2836, pruned_loss=0.05582, ctc_loss=0.1042, over 3854942.46 frames. ], batch size: 45, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:52:52,830 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.10 vs. limit=6.0 +2024-08-26 19:53:13,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=130256.0, ans=0.125 +2024-08-26 19:53:23,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=130309.33333333333, ans=0.125 +2024-08-26 19:53:28,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=130362.66666666667, ans=0.0 +2024-08-26 19:53:29,077 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.467e+02 1.617e+02 1.850e+02 3.299e+02, threshold=3.233e+02, percent-clipped=0.0 +2024-08-26 19:53:31,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130362.66666666667, ans=0.1 +2024-08-26 19:53:35,207 INFO [train.py:1114] (1/4) Epoch 10, batch 2050, loss[loss=0.186, simple_loss=0.2568, pruned_loss=0.04191, ctc_loss=0.07831, over 19689.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2824, pruned_loss=0.05552, ctc_loss=0.1036, over 3851297.82 frames. ], batch size: 47, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:53:38,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=130416.0, ans=0.1 +2024-08-26 19:53:56,516 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.04 vs. limit=6.0 +2024-08-26 19:54:09,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=130629.33333333333, ans=0.025 +2024-08-26 19:54:09,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=130629.33333333333, ans=0.04949747468305833 +2024-08-26 19:54:14,751 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.94 vs. limit=22.5 +2024-08-26 19:54:18,657 INFO [train.py:1114] (1/4) Epoch 10, batch 2100, loss[loss=0.2174, simple_loss=0.2833, pruned_loss=0.05549, ctc_loss=0.1013, over 19773.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2815, pruned_loss=0.05483, ctc_loss=0.1024, over 3857717.85 frames. ], batch size: 54, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:54:24,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=130682.66666666667, ans=0.2 +2024-08-26 19:54:26,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=130736.0, ans=0.1 +2024-08-26 19:54:37,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=130789.33333333333, ans=0.125 +2024-08-26 19:54:41,910 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.41 vs. limit=22.5 +2024-08-26 19:54:42,020 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.53 vs. limit=6.0 +2024-08-26 19:54:48,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=130842.66666666667, ans=0.125 +2024-08-26 19:54:51,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=130842.66666666667, ans=0.125 +2024-08-26 19:54:56,946 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.237e+02 1.404e+02 1.614e+02 1.979e+02 3.349e+02, threshold=3.228e+02, percent-clipped=1.0 +2024-08-26 19:54:59,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130896.0, ans=0.1 +2024-08-26 19:55:03,178 INFO [train.py:1114] (1/4) Epoch 10, batch 2150, loss[loss=0.1959, simple_loss=0.2633, pruned_loss=0.04634, ctc_loss=0.08956, over 19584.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.281, pruned_loss=0.05471, ctc_loss=0.102, over 3868234.63 frames. ], batch size: 52, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:55:11,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=131002.66666666667, ans=0.025 +2024-08-26 19:55:12,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=131002.66666666667, ans=0.025 +2024-08-26 19:55:13,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=131002.66666666667, ans=0.1 +2024-08-26 19:55:15,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=131002.66666666667, ans=0.125 +2024-08-26 19:55:33,963 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.71 vs. limit=10.0 +2024-08-26 19:55:43,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=131162.66666666666, ans=0.05 +2024-08-26 19:55:49,739 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.00 vs. limit=15.0 +2024-08-26 19:55:50,183 INFO [train.py:1114] (1/4) Epoch 10, batch 2200, loss[loss=0.2291, simple_loss=0.2918, pruned_loss=0.06228, ctc_loss=0.1047, over 19586.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2805, pruned_loss=0.05434, ctc_loss=0.1012, over 3867086.24 frames. ], batch size: 57, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:55:50,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=131216.0, ans=0.0 +2024-08-26 19:55:57,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=131216.0, ans=0.035 +2024-08-26 19:55:57,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=131216.0, ans=0.125 +2024-08-26 19:56:00,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131269.33333333334, ans=0.1 +2024-08-26 19:56:14,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=131269.33333333334, ans=0.0 +2024-08-26 19:56:20,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=131322.66666666666, ans=0.125 +2024-08-26 19:56:38,538 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.198e+02 1.505e+02 1.694e+02 1.989e+02 3.015e+02, threshold=3.388e+02, percent-clipped=0.0 +2024-08-26 19:56:39,766 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.10 vs. limit=22.5 +2024-08-26 19:56:44,629 INFO [train.py:1114] (1/4) Epoch 10, batch 2250, loss[loss=0.2182, simple_loss=0.2908, pruned_loss=0.05305, ctc_loss=0.09894, over 19612.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2811, pruned_loss=0.05464, ctc_loss=0.1017, over 3866927.65 frames. ], batch size: 55, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:56:51,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=131482.66666666666, ans=0.125 +2024-08-26 19:56:55,386 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:56:58,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=131536.0, ans=0.0 +2024-08-26 19:57:05,858 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.07 vs. limit=10.0 +2024-08-26 19:57:27,777 INFO [train.py:1114] (1/4) Epoch 10, batch 2300, loss[loss=0.1966, simple_loss=0.2612, pruned_loss=0.04826, ctc_loss=0.0888, over 19491.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2805, pruned_loss=0.05451, ctc_loss=0.1016, over 3861842.14 frames. ], batch size: 49, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:57:27,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131749.33333333334, ans=0.1 +2024-08-26 19:57:30,100 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.81 vs. limit=6.0 +2024-08-26 19:57:34,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=131749.33333333334, ans=0.125 +2024-08-26 19:57:50,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=131856.0, ans=0.125 +2024-08-26 19:57:55,885 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.30 vs. limit=22.5 +2024-08-26 19:58:02,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=131962.66666666666, ans=0.2 +2024-08-26 19:58:05,764 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.213e+02 1.499e+02 1.709e+02 2.092e+02 3.241e+02, threshold=3.418e+02, percent-clipped=0.0 +2024-08-26 19:58:43,743 INFO [train.py:1114] (1/4) Epoch 10, batch 2350, loss[loss=0.2294, simple_loss=0.2875, pruned_loss=0.06291, ctc_loss=0.1136, over 19659.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2805, pruned_loss=0.05475, ctc_loss=0.1021, over 3863871.65 frames. ], batch size: 63, lr: 1.46e-02, grad_scale: 16.0 +2024-08-26 19:59:06,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=132122.66666666666, ans=0.125 +2024-08-26 19:59:14,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132176.0, ans=0.1 +2024-08-26 19:59:15,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=132176.0, ans=0.2 +2024-08-26 19:59:24,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.91 vs. limit=10.0 +2024-08-26 19:59:26,134 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.83 vs. limit=15.0 +2024-08-26 19:59:32,682 INFO [train.py:1114] (1/4) Epoch 10, batch 2400, loss[loss=0.2214, simple_loss=0.2866, pruned_loss=0.05714, ctc_loss=0.1044, over 19163.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2824, pruned_loss=0.05553, ctc_loss=0.1036, over 3857574.56 frames. ], batch size: 71, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:59:36,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=132282.66666666666, ans=0.125 +2024-08-26 19:59:37,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=132282.66666666666, ans=0.125 +2024-08-26 19:59:37,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=132282.66666666666, ans=0.125 +2024-08-26 20:00:10,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=132389.33333333334, ans=0.025 +2024-08-26 20:00:18,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=132442.66666666666, ans=0.125 +2024-08-26 20:00:19,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=132442.66666666666, ans=0.125 +2024-08-26 20:00:20,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132442.66666666666, ans=0.1 +2024-08-26 20:00:25,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=132496.0, ans=0.125 +2024-08-26 20:00:36,862 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.213e+02 1.532e+02 1.694e+02 1.900e+02 3.260e+02, threshold=3.387e+02, percent-clipped=0.0 +2024-08-26 20:00:42,847 INFO [train.py:1114] (1/4) Epoch 10, batch 2450, loss[loss=0.2914, simple_loss=0.3192, pruned_loss=0.0966, ctc_loss=0.1763, over 13381.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2865, pruned_loss=0.05885, ctc_loss=0.1101, over 3729699.44 frames. ], batch size: 140, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 20:00:59,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=132602.66666666666, ans=0.0 +2024-08-26 20:03:28,140 INFO [train.py:1114] (1/4) Epoch 11, batch 0, loss[loss=0.2016, simple_loss=0.2673, pruned_loss=0.04905, ctc_loss=0.09449, over 19408.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2673, pruned_loss=0.04905, ctc_loss=0.09449, over 19408.00 frames. ], batch size: 48, lr: 1.39e-02, grad_scale: 32.0 +2024-08-26 20:03:28,141 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-26 20:03:42,230 INFO [train.py:1146] (1/4) Epoch 11, validation: loss=0.1858, simple_loss=0.2776, pruned_loss=0.03491, ctc_loss=0.06042, over 944034.00 frames. +2024-08-26 20:03:42,231 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12907MB +2024-08-26 20:03:46,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=132757.33333333334, ans=10.0 +2024-08-26 20:03:54,659 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:04:01,502 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.27 vs. limit=10.0 +2024-08-26 20:04:11,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=132864.0, ans=10.0 +2024-08-26 20:04:15,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132917.33333333334, ans=0.1 +2024-08-26 20:04:17,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=132917.33333333334, ans=0.0 +2024-08-26 20:04:21,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=132970.66666666666, ans=0.05 +2024-08-26 20:04:32,361 INFO [train.py:1114] (1/4) Epoch 11, batch 50, loss[loss=0.1783, simple_loss=0.2471, pruned_loss=0.04014, ctc_loss=0.07329, over 19716.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2809, pruned_loss=0.05488, ctc_loss=0.1029, over 845449.42 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 32.0 +2024-08-26 20:04:37,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=133024.0, ans=0.09899494936611666 +2024-08-26 20:04:37,948 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.177e+02 1.624e+02 1.801e+02 2.017e+02 3.320e+02, threshold=3.603e+02, percent-clipped=0.0 +2024-08-26 20:04:40,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=133024.0, ans=0.125 +2024-08-26 20:04:59,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=133130.66666666666, ans=0.1 +2024-08-26 20:05:07,201 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.44 vs. limit=10.0 +2024-08-26 20:05:09,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=133184.0, ans=0.035 +2024-08-26 20:05:18,991 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.90 vs. limit=15.0 +2024-08-26 20:05:20,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=133290.66666666666, ans=0.125 +2024-08-26 20:05:21,253 INFO [train.py:1114] (1/4) Epoch 11, batch 100, loss[loss=0.2106, simple_loss=0.2776, pruned_loss=0.05257, ctc_loss=0.09598, over 19715.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2832, pruned_loss=0.05573, ctc_loss=0.1041, over 1498424.97 frames. ], batch size: 51, lr: 1.39e-02, grad_scale: 32.0 +2024-08-26 20:05:32,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=133344.0, ans=0.125 +2024-08-26 20:06:00,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=133450.66666666666, ans=0.0 +2024-08-26 20:06:10,882 INFO [train.py:1114] (1/4) Epoch 11, batch 150, loss[loss=0.1932, simple_loss=0.2569, pruned_loss=0.0479, ctc_loss=0.0842, over 19708.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2808, pruned_loss=0.05454, ctc_loss=0.1019, over 2026854.05 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 32.0 +2024-08-26 20:06:15,821 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:06:16,416 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.457e+02 1.584e+02 1.841e+02 2.561e+02, threshold=3.167e+02, percent-clipped=0.0 +2024-08-26 20:06:29,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=133664.0, ans=0.0 +2024-08-26 20:06:33,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=133664.0, ans=0.125 +2024-08-26 20:06:37,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=133664.0, ans=0.2 +2024-08-26 20:06:39,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133717.33333333334, ans=0.1 +2024-08-26 20:06:40,688 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.28 vs. limit=15.0 +2024-08-26 20:06:43,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=133717.33333333334, ans=0.125 +2024-08-26 20:06:53,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=133717.33333333334, ans=0.0 +2024-08-26 20:06:56,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=133770.66666666666, ans=0.1 +2024-08-26 20:08:08,067 INFO [train.py:1114] (1/4) Epoch 11, batch 200, loss[loss=0.2192, simple_loss=0.2869, pruned_loss=0.0554, ctc_loss=0.1016, over 18510.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2793, pruned_loss=0.05414, ctc_loss=0.101, over 2434621.63 frames. ], batch size: 85, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:08:15,118 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.24 vs. limit=10.0 +2024-08-26 20:08:30,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=133930.66666666666, ans=0.0 +2024-08-26 20:08:31,495 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.24 vs. limit=15.0 +2024-08-26 20:08:37,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=133984.0, ans=0.025 +2024-08-26 20:09:00,080 INFO [train.py:1114] (1/4) Epoch 11, batch 250, loss[loss=0.2327, simple_loss=0.3004, pruned_loss=0.06038, ctc_loss=0.1107, over 19382.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2794, pruned_loss=0.05392, ctc_loss=0.1004, over 2754253.18 frames. ], batch size: 67, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:09:01,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=134090.66666666666, ans=0.0 +2024-08-26 20:09:05,639 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.170e+02 1.397e+02 1.518e+02 1.749e+02 2.921e+02, threshold=3.037e+02, percent-clipped=0.0 +2024-08-26 20:09:10,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=134090.66666666666, ans=0.0 +2024-08-26 20:09:12,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=134144.0, ans=0.125 +2024-08-26 20:09:15,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=134144.0, ans=0.2 +2024-08-26 20:09:18,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=134144.0, ans=0.0 +2024-08-26 20:09:22,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=134197.33333333334, ans=0.0 +2024-08-26 20:09:22,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=134197.33333333334, ans=0.0 +2024-08-26 20:09:34,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=134250.66666666666, ans=0.1 +2024-08-26 20:09:41,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=134304.0, ans=0.025 +2024-08-26 20:09:42,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=134304.0, ans=0.0 +2024-08-26 20:09:44,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=134304.0, ans=0.0 +2024-08-26 20:09:51,421 INFO [train.py:1114] (1/4) Epoch 11, batch 300, loss[loss=0.2103, simple_loss=0.2802, pruned_loss=0.05136, ctc_loss=0.09434, over 19510.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.279, pruned_loss=0.05366, ctc_loss=0.09982, over 3000172.55 frames. ], batch size: 61, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:10:11,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=134410.66666666666, ans=0.125 +2024-08-26 20:10:25,945 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.70 vs. limit=15.0 +2024-08-26 20:10:41,575 INFO [train.py:1114] (1/4) Epoch 11, batch 350, loss[loss=0.1983, simple_loss=0.2562, pruned_loss=0.05032, ctc_loss=0.09964, over 19724.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2795, pruned_loss=0.05375, ctc_loss=0.1001, over 3190539.03 frames. ], batch size: 48, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:10:45,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=134624.0, ans=0.125 +2024-08-26 20:10:47,199 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.479e+02 1.637e+02 2.052e+02 3.441e+02, threshold=3.275e+02, percent-clipped=1.0 +2024-08-26 20:10:56,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=134677.33333333334, ans=0.0 +2024-08-26 20:11:08,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134730.66666666666, ans=0.1 +2024-08-26 20:11:26,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=134837.33333333334, ans=0.125 +2024-08-26 20:11:29,850 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.22 vs. limit=15.0 +2024-08-26 20:11:30,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=134890.66666666666, ans=0.0 +2024-08-26 20:11:31,275 INFO [train.py:1114] (1/4) Epoch 11, batch 400, loss[loss=0.2113, simple_loss=0.2789, pruned_loss=0.05235, ctc_loss=0.0975, over 19513.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.279, pruned_loss=0.05331, ctc_loss=0.09946, over 3342563.84 frames. ], batch size: 54, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:11:40,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff2.min_abs, batch_count=134890.66666666666, ans=0.1 +2024-08-26 20:11:55,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=134997.33333333334, ans=0.2 +2024-08-26 20:11:57,161 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.22 vs. limit=22.5 +2024-08-26 20:11:58,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=134997.33333333334, ans=0.0 +2024-08-26 20:12:04,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=135050.66666666666, ans=0.015 +2024-08-26 20:12:19,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=135104.0, ans=0.125 +2024-08-26 20:12:20,807 INFO [train.py:1114] (1/4) Epoch 11, batch 450, loss[loss=0.2009, simple_loss=0.2765, pruned_loss=0.04535, ctc_loss=0.08625, over 19618.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2792, pruned_loss=0.05351, ctc_loss=0.0998, over 3451323.42 frames. ], batch size: 55, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:12:22,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.03 vs. limit=22.5 +2024-08-26 20:12:29,025 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.192e+02 1.489e+02 1.652e+02 2.008e+02 3.634e+02, threshold=3.305e+02, percent-clipped=1.0 +2024-08-26 20:12:32,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=135210.66666666666, ans=0.2 +2024-08-26 20:12:33,116 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.28 vs. limit=15.0 +2024-08-26 20:12:42,380 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.39 vs. limit=15.0 +2024-08-26 20:12:42,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=135264.0, ans=0.125 +2024-08-26 20:13:11,612 INFO [train.py:1114] (1/4) Epoch 11, batch 500, loss[loss=0.2245, simple_loss=0.2895, pruned_loss=0.05867, ctc_loss=0.1054, over 19714.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2782, pruned_loss=0.05304, ctc_loss=0.09888, over 3547378.58 frames. ], batch size: 63, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:13:16,666 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.36 vs. limit=6.0 +2024-08-26 20:13:20,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=135477.33333333334, ans=0.0 +2024-08-26 20:13:50,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=135637.33333333334, ans=10.0 +2024-08-26 20:13:51,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135637.33333333334, ans=0.1 +2024-08-26 20:13:58,579 INFO [train.py:1114] (1/4) Epoch 11, batch 550, loss[loss=0.2433, simple_loss=0.3049, pruned_loss=0.06564, ctc_loss=0.126, over 19374.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2783, pruned_loss=0.05306, ctc_loss=0.09905, over 3609558.62 frames. ], batch size: 71, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:13:59,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=135690.66666666666, ans=0.125 +2024-08-26 20:14:06,853 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.449e+02 1.695e+02 2.078e+02 4.377e+02, threshold=3.390e+02, percent-clipped=1.0 +2024-08-26 20:14:07,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=135690.66666666666, ans=0.0 +2024-08-26 20:14:10,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=135744.0, ans=0.125 +2024-08-26 20:14:21,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=135797.33333333334, ans=0.025 +2024-08-26 20:14:23,571 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.31 vs. limit=22.5 +2024-08-26 20:14:28,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=135797.33333333334, ans=0.0 +2024-08-26 20:14:37,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=135850.66666666666, ans=0.1 +2024-08-26 20:14:50,535 INFO [train.py:1114] (1/4) Epoch 11, batch 600, loss[loss=0.2387, simple_loss=0.306, pruned_loss=0.06309, ctc_loss=0.1128, over 19334.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2789, pruned_loss=0.0534, ctc_loss=0.09978, over 3665906.05 frames. ], batch size: 67, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:14:53,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=135957.33333333334, ans=0.125 +2024-08-26 20:15:05,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=136010.66666666666, ans=0.1 +2024-08-26 20:15:08,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=136010.66666666666, ans=0.0 +2024-08-26 20:15:14,559 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.81 vs. limit=15.0 +2024-08-26 20:15:24,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=136117.33333333334, ans=0.125 +2024-08-26 20:15:41,567 INFO [train.py:1114] (1/4) Epoch 11, batch 650, loss[loss=0.1967, simple_loss=0.28, pruned_loss=0.03988, ctc_loss=0.08425, over 19775.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2782, pruned_loss=0.05299, ctc_loss=0.09892, over 3715925.17 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:15:44,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=136224.0, ans=0.0 +2024-08-26 20:15:47,093 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.457e+02 1.627e+02 2.058e+02 3.143e+02, threshold=3.253e+02, percent-clipped=0.0 +2024-08-26 20:16:27,820 INFO [train.py:1114] (1/4) Epoch 11, batch 700, loss[loss=0.2026, simple_loss=0.2658, pruned_loss=0.05047, ctc_loss=0.09612, over 19718.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2785, pruned_loss=0.05316, ctc_loss=0.09928, over 3747623.78 frames. ], batch size: 51, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:16:30,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=136490.66666666666, ans=0.0 +2024-08-26 20:17:05,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=136650.66666666666, ans=0.125 +2024-08-26 20:17:16,586 INFO [train.py:1114] (1/4) Epoch 11, batch 750, loss[loss=0.2021, simple_loss=0.2807, pruned_loss=0.0451, ctc_loss=0.08297, over 19503.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.278, pruned_loss=0.05301, ctc_loss=0.09885, over 3774889.90 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:17:24,265 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.63 vs. limit=15.0 +2024-08-26 20:17:24,639 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.496e+02 1.727e+02 2.151e+02 3.286e+02, threshold=3.455e+02, percent-clipped=1.0 +2024-08-26 20:17:28,096 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.34 vs. limit=15.0 +2024-08-26 20:17:32,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=136810.66666666666, ans=0.125 +2024-08-26 20:17:38,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=136864.0, ans=0.2 +2024-08-26 20:17:40,831 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.25 vs. limit=15.0 +2024-08-26 20:17:53,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=136917.33333333334, ans=0.0 +2024-08-26 20:17:57,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=136970.66666666666, ans=0.0 +2024-08-26 20:18:02,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=136970.66666666666, ans=0.07 +2024-08-26 20:18:07,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137024.0, ans=0.1 +2024-08-26 20:18:08,124 INFO [train.py:1114] (1/4) Epoch 11, batch 800, loss[loss=0.1835, simple_loss=0.2562, pruned_loss=0.04088, ctc_loss=0.07279, over 19782.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2784, pruned_loss=0.05338, ctc_loss=0.0996, over 3797096.71 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:18:15,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=137024.0, ans=0.025 +2024-08-26 20:18:26,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=137130.66666666666, ans=0.125 +2024-08-26 20:18:29,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=137130.66666666666, ans=0.125 +2024-08-26 20:19:13,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=137237.33333333334, ans=0.0 +2024-08-26 20:19:27,349 INFO [train.py:1114] (1/4) Epoch 11, batch 850, loss[loss=0.2218, simple_loss=0.2904, pruned_loss=0.05568, ctc_loss=0.1046, over 19685.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2781, pruned_loss=0.05324, ctc_loss=0.09944, over 3816208.68 frames. ], batch size: 59, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:19:39,843 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.453e+02 1.601e+02 1.920e+02 5.497e+02, threshold=3.202e+02, percent-clipped=1.0 +2024-08-26 20:19:40,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=137290.66666666666, ans=0.125 +2024-08-26 20:19:49,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=137344.0, ans=0.125 +2024-08-26 20:19:54,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=137344.0, ans=0.0 +2024-08-26 20:20:17,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=137450.66666666666, ans=0.5 +2024-08-26 20:20:29,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=137450.66666666666, ans=0.0 +2024-08-26 20:20:44,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=137504.0, ans=0.2 +2024-08-26 20:20:53,074 INFO [train.py:1114] (1/4) Epoch 11, batch 900, loss[loss=0.1843, simple_loss=0.2511, pruned_loss=0.04336, ctc_loss=0.07701, over 19413.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2781, pruned_loss=0.05339, ctc_loss=0.09966, over 3819692.85 frames. ], batch size: 48, lr: 1.37e-02, grad_scale: 16.0 +2024-08-26 20:21:10,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=137610.66666666666, ans=10.0 +2024-08-26 20:21:18,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=137664.0, ans=0.0 +2024-08-26 20:21:29,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=137717.33333333334, ans=0.1 +2024-08-26 20:21:34,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=137717.33333333334, ans=0.2 +2024-08-26 20:21:48,794 INFO [train.py:1114] (1/4) Epoch 11, batch 950, loss[loss=0.1897, simple_loss=0.2609, pruned_loss=0.04328, ctc_loss=0.07963, over 19495.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2789, pruned_loss=0.05378, ctc_loss=0.1004, over 3821715.22 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 16.0 +2024-08-26 20:21:55,400 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.214e+02 1.468e+02 1.744e+02 2.017e+02 3.816e+02, threshold=3.488e+02, percent-clipped=2.0 +2024-08-26 20:21:55,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=137824.0, ans=0.125 +2024-08-26 20:21:56,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=137824.0, ans=0.2 +2024-08-26 20:22:33,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=138037.33333333334, ans=0.0 +2024-08-26 20:22:40,794 INFO [train.py:1114] (1/4) Epoch 11, batch 1000, loss[loss=0.2127, simple_loss=0.2809, pruned_loss=0.05262, ctc_loss=0.09792, over 19861.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2795, pruned_loss=0.05394, ctc_loss=0.1007, over 3818232.30 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 16.0 +2024-08-26 20:22:46,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=138090.66666666666, ans=0.0 +2024-08-26 20:22:49,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=138144.0, ans=0.0 +2024-08-26 20:22:56,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=138144.0, ans=0.0 +2024-08-26 20:23:04,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=138197.33333333334, ans=0.125 +2024-08-26 20:23:10,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=138250.66666666666, ans=0.125 +2024-08-26 20:23:11,779 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-08-26 20:23:25,728 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:23:28,368 INFO [train.py:1114] (1/4) Epoch 11, batch 1050, loss[loss=0.1912, simple_loss=0.2699, pruned_loss=0.04045, ctc_loss=0.07886, over 19836.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2784, pruned_loss=0.05352, ctc_loss=0.0999, over 3825246.23 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 16.0 +2024-08-26 20:23:34,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=138357.33333333334, ans=0.0 +2024-08-26 20:23:34,927 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.363e+02 1.534e+02 1.839e+02 4.578e+02, threshold=3.069e+02, percent-clipped=1.0 +2024-08-26 20:24:08,200 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.56 vs. limit=15.0 +2024-08-26 20:24:10,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=138410.66666666666, ans=0.2 +2024-08-26 20:24:10,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=138410.66666666666, ans=0.5 +2024-08-26 20:24:12,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=138410.66666666666, ans=0.0 +2024-08-26 20:25:07,603 INFO [train.py:1114] (1/4) Epoch 11, batch 1100, loss[loss=0.1899, simple_loss=0.2643, pruned_loss=0.04319, ctc_loss=0.073, over 19595.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2781, pruned_loss=0.05295, ctc_loss=0.09896, over 3832824.15 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 16.0 +2024-08-26 20:25:08,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=138624.0, ans=0.025 +2024-08-26 20:25:19,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=138677.33333333334, ans=0.0 +2024-08-26 20:25:30,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=138730.66666666666, ans=0.05 +2024-08-26 20:25:32,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=138730.66666666666, ans=0.2 +2024-08-26 20:25:36,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=138784.0, ans=0.125 +2024-08-26 20:25:36,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=138784.0, ans=0.125 +2024-08-26 20:25:37,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=138784.0, ans=0.125 +2024-08-26 20:25:43,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=138784.0, ans=0.0 +2024-08-26 20:25:56,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=138890.66666666666, ans=0.125 +2024-08-26 20:25:56,914 INFO [train.py:1114] (1/4) Epoch 11, batch 1150, loss[loss=0.1931, simple_loss=0.2656, pruned_loss=0.04438, ctc_loss=0.07998, over 19607.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2779, pruned_loss=0.05304, ctc_loss=0.09909, over 3832353.99 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 16.0 +2024-08-26 20:26:03,585 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.470e+02 1.661e+02 1.952e+02 3.516e+02, threshold=3.323e+02, percent-clipped=2.0 +2024-08-26 20:26:12,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=138944.0, ans=0.125 +2024-08-26 20:26:15,319 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.46 vs. limit=15.0 +2024-08-26 20:26:15,377 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.77 vs. limit=15.0 +2024-08-26 20:26:40,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=139104.0, ans=0.125 +2024-08-26 20:26:45,579 INFO [train.py:1114] (1/4) Epoch 11, batch 1200, loss[loss=0.2154, simple_loss=0.2832, pruned_loss=0.05346, ctc_loss=0.1014, over 19837.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2788, pruned_loss=0.05321, ctc_loss=0.09966, over 3827484.35 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 32.0 +2024-08-26 20:26:46,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.66 vs. limit=12.0 +2024-08-26 20:26:52,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=139157.33333333334, ans=0.0 +2024-08-26 20:26:54,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=139210.66666666666, ans=0.0 +2024-08-26 20:26:58,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=139210.66666666666, ans=0.2 +2024-08-26 20:27:06,729 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.75 vs. limit=22.5 +2024-08-26 20:27:10,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=139264.0, ans=0.0 +2024-08-26 20:27:13,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=139317.33333333334, ans=0.2 +2024-08-26 20:27:30,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=139317.33333333334, ans=10.0 +2024-08-26 20:27:30,867 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.88 vs. limit=22.5 +2024-08-26 20:28:18,701 INFO [train.py:1114] (1/4) Epoch 11, batch 1250, loss[loss=0.2153, simple_loss=0.2872, pruned_loss=0.0526, ctc_loss=0.0958, over 19537.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2789, pruned_loss=0.05297, ctc_loss=0.09922, over 3845387.56 frames. ], batch size: 61, lr: 1.36e-02, grad_scale: 32.0 +2024-08-26 20:28:27,604 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.224e+02 1.425e+02 1.545e+02 1.729e+02 3.064e+02, threshold=3.089e+02, percent-clipped=0.0 +2024-08-26 20:28:34,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=139477.33333333334, ans=0.0 +2024-08-26 20:28:58,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=139584.0, ans=0.125 +2024-08-26 20:29:06,404 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.91 vs. limit=6.0 +2024-08-26 20:29:12,944 INFO [train.py:1114] (1/4) Epoch 11, batch 1300, loss[loss=0.2491, simple_loss=0.3111, pruned_loss=0.06868, ctc_loss=0.1247, over 18837.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2783, pruned_loss=0.05259, ctc_loss=0.09859, over 3847789.11 frames. ], batch size: 76, lr: 1.36e-02, grad_scale: 32.0 +2024-08-26 20:35:20,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=139797.33333333334, ans=0.125 +2024-08-26 20:35:28,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=139797.33333333334, ans=0.125 +2024-08-26 20:35:31,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139850.66666666666, ans=0.1 +2024-08-26 20:35:49,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=139904.0, ans=0.0 +2024-08-26 20:35:49,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.22 vs. limit=22.5 +2024-08-26 20:35:52,052 INFO [train.py:1114] (1/4) Epoch 11, batch 1350, loss[loss=0.1969, simple_loss=0.275, pruned_loss=0.04333, ctc_loss=0.08032, over 19754.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2781, pruned_loss=0.0525, ctc_loss=0.09831, over 3858244.04 frames. ], batch size: 54, lr: 1.36e-02, grad_scale: 32.0 +2024-08-26 20:35:58,549 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.092e+02 1.441e+02 1.644e+02 1.919e+02 3.174e+02, threshold=3.287e+02, percent-clipped=1.0 +2024-08-26 20:36:00,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140010.66666666666, ans=0.1 +2024-08-26 20:36:37,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=140170.66666666666, ans=0.125 +2024-08-26 20:36:37,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=140170.66666666666, ans=0.0 +2024-08-26 20:36:38,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=140170.66666666666, ans=0.025 +2024-08-26 20:36:39,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=140170.66666666666, ans=0.125 +2024-08-26 20:36:40,738 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.79 vs. limit=15.0 +2024-08-26 20:36:41,170 INFO [train.py:1114] (1/4) Epoch 11, batch 1400, loss[loss=0.1895, simple_loss=0.249, pruned_loss=0.04689, ctc_loss=0.09069, over 19659.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.278, pruned_loss=0.05251, ctc_loss=0.09822, over 3865075.90 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:36:53,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=140277.33333333334, ans=0.125 +2024-08-26 20:37:20,654 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.07 vs. limit=15.0 +2024-08-26 20:37:40,128 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.47 vs. limit=15.0 +2024-08-26 20:37:42,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=140384.0, ans=0.0 +2024-08-26 20:37:50,407 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.22 vs. limit=15.0 +2024-08-26 20:38:01,750 INFO [train.py:1114] (1/4) Epoch 11, batch 1450, loss[loss=0.2275, simple_loss=0.29, pruned_loss=0.06021, ctc_loss=0.1116, over 19704.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.279, pruned_loss=0.05292, ctc_loss=0.09888, over 3862923.73 frames. ], batch size: 63, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:38:08,100 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.486e+02 1.636e+02 1.926e+02 3.321e+02, threshold=3.272e+02, percent-clipped=1.0 +2024-08-26 20:38:14,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=140544.0, ans=0.0 +2024-08-26 20:38:33,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=140650.66666666666, ans=0.125 +2024-08-26 20:38:42,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=140704.0, ans=0.0 +2024-08-26 20:38:50,489 INFO [train.py:1114] (1/4) Epoch 11, batch 1500, loss[loss=0.2016, simple_loss=0.2776, pruned_loss=0.04584, ctc_loss=0.08467, over 19592.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2789, pruned_loss=0.05263, ctc_loss=0.09854, over 3862884.47 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:39:00,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=140757.33333333334, ans=0.0 +2024-08-26 20:39:03,363 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.13 vs. limit=6.0 +2024-08-26 20:39:12,593 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.65 vs. limit=6.0 +2024-08-26 20:39:19,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140864.0, ans=0.1 +2024-08-26 20:39:37,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=140970.66666666666, ans=0.0 +2024-08-26 20:39:38,999 INFO [train.py:1114] (1/4) Epoch 11, batch 1550, loss[loss=0.2158, simple_loss=0.2809, pruned_loss=0.05564, ctc_loss=0.0986, over 19614.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2785, pruned_loss=0.05276, ctc_loss=0.09869, over 3847926.47 frames. ], batch size: 60, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:39:39,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=141024.0, ans=0.125 +2024-08-26 20:39:43,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=141024.0, ans=0.0 +2024-08-26 20:39:45,241 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.401e+02 1.612e+02 1.919e+02 3.103e+02, threshold=3.225e+02, percent-clipped=0.0 +2024-08-26 20:39:59,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=141130.66666666666, ans=0.125 +2024-08-26 20:40:28,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=141237.33333333334, ans=0.2 +2024-08-26 20:40:29,942 INFO [train.py:1114] (1/4) Epoch 11, batch 1600, loss[loss=0.2426, simple_loss=0.3065, pruned_loss=0.06495, ctc_loss=0.1222, over 19840.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2784, pruned_loss=0.05284, ctc_loss=0.09879, over 3837004.12 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:40:35,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=141290.66666666666, ans=0.125 +2024-08-26 20:40:38,178 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.29 vs. limit=15.0 +2024-08-26 20:40:53,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=141397.33333333334, ans=0.125 +2024-08-26 20:41:07,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=141450.66666666666, ans=0.0 +2024-08-26 20:41:16,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=141504.0, ans=0.125 +2024-08-26 20:41:18,741 INFO [train.py:1114] (1/4) Epoch 11, batch 1650, loss[loss=0.2067, simple_loss=0.2801, pruned_loss=0.04884, ctc_loss=0.0889, over 19656.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2779, pruned_loss=0.05278, ctc_loss=0.09863, over 3833068.39 frames. ], batch size: 59, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:41:22,162 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.59 vs. limit=15.0 +2024-08-26 20:41:25,303 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.523e+02 1.726e+02 1.964e+02 3.202e+02, threshold=3.451e+02, percent-clipped=0.0 +2024-08-26 20:41:25,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=141557.33333333334, ans=0.025 +2024-08-26 20:41:33,315 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.39 vs. limit=10.0 +2024-08-26 20:41:51,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=141717.33333333334, ans=0.025 +2024-08-26 20:42:07,203 INFO [train.py:1114] (1/4) Epoch 11, batch 1700, loss[loss=0.1868, simple_loss=0.2505, pruned_loss=0.04546, ctc_loss=0.08022, over 19652.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2781, pruned_loss=0.05271, ctc_loss=0.09842, over 3847348.79 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:42:20,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=141824.0, ans=0.0 +2024-08-26 20:42:33,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=141930.66666666666, ans=0.2 +2024-08-26 20:42:52,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=142037.33333333334, ans=0.125 +2024-08-26 20:42:59,707 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.88 vs. limit=6.0 +2024-08-26 20:43:00,056 INFO [train.py:1114] (1/4) Epoch 11, batch 1750, loss[loss=0.1785, simple_loss=0.2394, pruned_loss=0.04314, ctc_loss=0.07822, over 19616.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2776, pruned_loss=0.05243, ctc_loss=0.09789, over 3851715.47 frames. ], batch size: 45, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:43:06,155 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.149e+02 1.441e+02 1.591e+02 1.781e+02 2.526e+02, threshold=3.183e+02, percent-clipped=0.0 +2024-08-26 20:43:09,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=142144.0, ans=0.0 +2024-08-26 20:43:09,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=142144.0, ans=0.025 +2024-08-26 20:43:18,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=142197.33333333334, ans=0.125 +2024-08-26 20:43:26,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=142197.33333333334, ans=0.0 +2024-08-26 20:43:40,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142304.0, ans=0.1 +2024-08-26 20:43:50,376 INFO [train.py:1114] (1/4) Epoch 11, batch 1800, loss[loss=0.2147, simple_loss=0.2869, pruned_loss=0.05188, ctc_loss=0.09683, over 19611.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2773, pruned_loss=0.0522, ctc_loss=0.09747, over 3854080.64 frames. ], batch size: 55, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:43:56,081 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.30 vs. limit=15.0 +2024-08-26 20:44:05,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=142410.66666666666, ans=0.0 +2024-08-26 20:44:17,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142464.0, ans=0.1 +2024-08-26 20:44:18,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=142464.0, ans=0.04949747468305833 +2024-08-26 20:44:26,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=142517.33333333334, ans=0.125 +2024-08-26 20:44:28,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=142517.33333333334, ans=0.0 +2024-08-26 20:44:32,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=142570.66666666666, ans=0.125 +2024-08-26 20:44:36,187 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.38 vs. limit=12.0 +2024-08-26 20:44:40,937 INFO [train.py:1114] (1/4) Epoch 11, batch 1850, loss[loss=0.2129, simple_loss=0.287, pruned_loss=0.0504, ctc_loss=0.09498, over 19581.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2773, pruned_loss=0.05232, ctc_loss=0.09748, over 3856468.98 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:44:45,965 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.36 vs. limit=12.0 +2024-08-26 20:44:47,991 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.442e+02 1.639e+02 2.043e+02 4.343e+02, threshold=3.277e+02, percent-clipped=6.0 +2024-08-26 20:45:21,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=142837.33333333334, ans=0.05 +2024-08-26 20:45:29,164 INFO [train.py:1114] (1/4) Epoch 11, batch 1900, loss[loss=0.2071, simple_loss=0.2839, pruned_loss=0.04795, ctc_loss=0.08621, over 19649.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2777, pruned_loss=0.05234, ctc_loss=0.09758, over 3861115.76 frames. ], batch size: 59, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:45:49,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=142997.33333333334, ans=0.2 +2024-08-26 20:46:00,651 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.73 vs. limit=6.0 +2024-08-26 20:46:21,954 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.01 vs. limit=15.0 +2024-08-26 20:46:23,313 INFO [train.py:1114] (1/4) Epoch 11, batch 1950, loss[loss=0.1939, simple_loss=0.2601, pruned_loss=0.04612, ctc_loss=0.08856, over 19586.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.279, pruned_loss=0.05253, ctc_loss=0.09779, over 3870338.24 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:46:26,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=143157.33333333334, ans=0.025 +2024-08-26 20:46:32,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=143157.33333333334, ans=0.0 +2024-08-26 20:47:24,652 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.230e+02 1.500e+02 1.631e+02 1.894e+02 3.317e+02, threshold=3.262e+02, percent-clipped=1.0 +2024-08-26 20:48:00,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.14 vs. limit=10.0 +2024-08-26 20:48:33,068 INFO [train.py:1114] (1/4) Epoch 11, batch 2000, loss[loss=0.1869, simple_loss=0.2457, pruned_loss=0.04639, ctc_loss=0.0885, over 19632.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2799, pruned_loss=0.05306, ctc_loss=0.09895, over 3855225.69 frames. ], batch size: 45, lr: 1.34e-02, grad_scale: 32.0 +2024-08-26 20:48:38,840 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.74 vs. limit=15.0 +2024-08-26 20:48:40,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143424.0, ans=0.1 +2024-08-26 20:48:40,847 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.26 vs. limit=15.0 +2024-08-26 20:48:46,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=143477.33333333334, ans=0.0 +2024-08-26 20:48:48,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=143477.33333333334, ans=0.0 +2024-08-26 20:49:02,039 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.77 vs. limit=15.0 +2024-08-26 20:49:37,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=143637.33333333334, ans=0.0 +2024-08-26 20:49:39,121 INFO [train.py:1114] (1/4) Epoch 11, batch 2050, loss[loss=0.1949, simple_loss=0.2562, pruned_loss=0.04778, ctc_loss=0.09502, over 19721.00 frames. ], tot_loss[loss=0.212, simple_loss=0.279, pruned_loss=0.05287, ctc_loss=0.09843, over 3850917.53 frames. ], batch size: 47, lr: 1.34e-02, grad_scale: 32.0 +2024-08-26 20:49:47,236 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.205e+02 1.448e+02 1.585e+02 1.933e+02 3.153e+02, threshold=3.170e+02, percent-clipped=0.0 +2024-08-26 20:49:47,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=143690.66666666666, ans=0.0 +2024-08-26 20:49:49,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=143744.0, ans=0.0 +2024-08-26 20:49:50,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=143744.0, ans=0.1 +2024-08-26 20:49:50,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=143744.0, ans=0.035 +2024-08-26 20:49:51,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=143744.0, ans=0.125 +2024-08-26 20:50:24,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143850.66666666666, ans=0.1 +2024-08-26 20:50:37,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=143957.33333333334, ans=0.2 +2024-08-26 20:50:37,966 INFO [train.py:1114] (1/4) Epoch 11, batch 2100, loss[loss=0.2066, simple_loss=0.2779, pruned_loss=0.0493, ctc_loss=0.09163, over 19785.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2779, pruned_loss=0.05231, ctc_loss=0.09753, over 3857044.90 frames. ], batch size: 54, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:50:59,514 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.52 vs. limit=15.0 +2024-08-26 20:51:06,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=144117.33333333334, ans=0.07 +2024-08-26 20:51:11,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=144117.33333333334, ans=0.0 +2024-08-26 20:51:13,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=144170.66666666666, ans=0.0 +2024-08-26 20:51:13,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=144170.66666666666, ans=0.125 +2024-08-26 20:51:14,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=144170.66666666666, ans=0.025 +2024-08-26 20:51:23,031 INFO [train.py:1114] (1/4) Epoch 11, batch 2150, loss[loss=0.1934, simple_loss=0.2586, pruned_loss=0.04722, ctc_loss=0.0847, over 19581.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2768, pruned_loss=0.05199, ctc_loss=0.0968, over 3867527.11 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:51:23,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=144224.0, ans=0.125 +2024-08-26 20:51:30,827 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.485e+02 1.672e+02 2.037e+02 4.338e+02, threshold=3.345e+02, percent-clipped=7.0 +2024-08-26 20:51:39,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=144277.33333333334, ans=0.125 +2024-08-26 20:51:43,796 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.17 vs. limit=12.0 +2024-08-26 20:52:06,906 INFO [train.py:1114] (1/4) Epoch 11, batch 2200, loss[loss=0.2287, simple_loss=0.2991, pruned_loss=0.05796, ctc_loss=0.106, over 19593.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2764, pruned_loss=0.05186, ctc_loss=0.09667, over 3867221.50 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:52:14,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=144490.66666666666, ans=0.125 +2024-08-26 20:52:50,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=144757.33333333334, ans=0.125 +2024-08-26 20:52:50,880 INFO [train.py:1114] (1/4) Epoch 11, batch 2250, loss[loss=0.2354, simple_loss=0.3041, pruned_loss=0.06106, ctc_loss=0.1114, over 19604.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2767, pruned_loss=0.05202, ctc_loss=0.09704, over 3866737.60 frames. ], batch size: 55, lr: 1.33e-02, grad_scale: 16.0 +2024-08-26 20:52:58,753 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.208e+02 1.461e+02 1.628e+02 1.934e+02 8.673e+02, threshold=3.256e+02, percent-clipped=2.0 +2024-08-26 20:53:06,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=144810.66666666666, ans=0.04949747468305833 +2024-08-26 20:53:10,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=144864.0, ans=0.125 +2024-08-26 20:53:16,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=144917.33333333334, ans=0.0 +2024-08-26 20:53:35,353 INFO [train.py:1114] (1/4) Epoch 11, batch 2300, loss[loss=0.2032, simple_loss=0.2691, pruned_loss=0.04947, ctc_loss=0.09591, over 19489.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2757, pruned_loss=0.05192, ctc_loss=0.0966, over 3861277.75 frames. ], batch size: 49, lr: 1.33e-02, grad_scale: 16.0 +2024-08-26 20:53:37,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=145024.0, ans=0.1 +2024-08-26 20:53:41,102 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.31 vs. limit=22.5 +2024-08-26 20:53:43,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=145077.33333333334, ans=0.125 +2024-08-26 20:54:03,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=145184.0, ans=0.0 +2024-08-26 20:54:20,116 INFO [train.py:1114] (1/4) Epoch 11, batch 2350, loss[loss=0.2473, simple_loss=0.3042, pruned_loss=0.07, ctc_loss=0.1258, over 19640.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2761, pruned_loss=0.05213, ctc_loss=0.09685, over 3863502.55 frames. ], batch size: 63, lr: 1.33e-02, grad_scale: 16.0 +2024-08-26 20:54:22,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145290.66666666666, ans=0.1 +2024-08-26 20:54:27,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=145290.66666666666, ans=0.125 +2024-08-26 20:54:28,779 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.482e+02 1.673e+02 1.901e+02 2.829e+02, threshold=3.345e+02, percent-clipped=0.0 +2024-08-26 20:54:35,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=145344.0, ans=0.0 +2024-08-26 20:54:48,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=145450.66666666666, ans=0.2 +2024-08-26 20:54:50,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=145450.66666666666, ans=0.125 +2024-08-26 20:54:53,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=145450.66666666666, ans=0.025 +2024-08-26 20:55:01,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145504.0, ans=0.1 +2024-08-26 20:55:04,263 INFO [train.py:1114] (1/4) Epoch 11, batch 2400, loss[loss=0.2176, simple_loss=0.2863, pruned_loss=0.05455, ctc_loss=0.09977, over 19286.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2783, pruned_loss=0.05294, ctc_loss=0.09824, over 3858062.37 frames. ], batch size: 71, lr: 1.33e-02, grad_scale: 32.0 +2024-08-26 20:55:13,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=145610.66666666666, ans=0.09899494936611666 +2024-08-26 20:55:17,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.80 vs. limit=22.5 +2024-08-26 20:55:20,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.61 vs. limit=15.0 +2024-08-26 20:55:23,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=145664.0, ans=0.125 +2024-08-26 20:55:29,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=145717.33333333334, ans=0.125 +2024-08-26 20:55:29,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=145717.33333333334, ans=0.07 +2024-08-26 20:55:33,594 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.46 vs. limit=15.0 +2024-08-26 20:55:35,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=145717.33333333334, ans=0.125 +2024-08-26 20:55:45,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145770.66666666666, ans=0.1 +2024-08-26 20:55:49,241 INFO [train.py:1114] (1/4) Epoch 11, batch 2450, loss[loss=0.2889, simple_loss=0.3185, pruned_loss=0.09499, ctc_loss=0.1733, over 13684.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2829, pruned_loss=0.0562, ctc_loss=0.1046, over 3727836.25 frames. ], batch size: 141, lr: 1.33e-02, grad_scale: 32.0 +2024-08-26 20:55:51,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145824.0, ans=0.1 +2024-08-26 20:55:56,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=145824.0, ans=0.0 +2024-08-26 20:55:57,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=145877.33333333334, ans=6.0 +2024-08-26 20:55:58,187 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.577e+02 1.748e+02 1.957e+02 3.323e+02, threshold=3.496e+02, percent-clipped=0.0 +2024-08-26 20:56:21,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=145984.0, ans=0.025 +2024-08-26 21:01:32,170 INFO [train.py:1114] (1/4) Epoch 12, batch 0, loss[loss=0.2164, simple_loss=0.2726, pruned_loss=0.05853, ctc_loss=0.1081, over 19403.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2726, pruned_loss=0.05853, ctc_loss=0.1081, over 19403.00 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:01:32,170 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-26 21:01:47,788 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.8304, 5.0517, 5.6239, 5.2800], device='cuda:1') +2024-08-26 21:01:52,249 INFO [train.py:1146] (1/4) Epoch 12, validation: loss=0.1812, simple_loss=0.274, pruned_loss=0.03284, ctc_loss=0.05683, over 944034.00 frames. +2024-08-26 21:01:52,249 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12945MB +2024-08-26 21:01:57,684 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.73 vs. limit=22.5 +2024-08-26 21:02:14,033 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.11 vs. limit=10.0 +2024-08-26 21:02:25,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=146138.66666666666, ans=10.0 +2024-08-26 21:02:26,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146138.66666666666, ans=0.1 +2024-08-26 21:02:43,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=146245.33333333334, ans=0.1 +2024-08-26 21:02:50,485 INFO [train.py:1114] (1/4) Epoch 12, batch 50, loss[loss=0.1771, simple_loss=0.2491, pruned_loss=0.03725, ctc_loss=0.07631, over 19725.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2788, pruned_loss=0.0529, ctc_loss=0.09983, over 845355.73 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:02:55,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=146298.66666666666, ans=0.125 +2024-08-26 21:02:58,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=146298.66666666666, ans=0.0 +2024-08-26 21:03:00,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=146352.0, ans=0.125 +2024-08-26 21:03:02,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=146352.0, ans=0.0 +2024-08-26 21:03:06,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=146352.0, ans=0.125 +2024-08-26 21:03:11,152 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.556e+02 1.742e+02 1.990e+02 3.045e+02, threshold=3.484e+02, percent-clipped=0.0 +2024-08-26 21:03:19,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=146458.66666666666, ans=0.125 +2024-08-26 21:03:24,872 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.31 vs. limit=15.0 +2024-08-26 21:04:10,874 INFO [train.py:1114] (1/4) Epoch 12, batch 100, loss[loss=0.1835, simple_loss=0.2587, pruned_loss=0.03845, ctc_loss=0.07841, over 19713.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2805, pruned_loss=0.05299, ctc_loss=0.09991, over 1498997.21 frames. ], batch size: 51, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:04:21,638 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.58 vs. limit=12.0 +2024-08-26 21:04:25,206 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.13 vs. limit=6.0 +2024-08-26 21:04:46,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146725.33333333334, ans=0.1 +2024-08-26 21:04:47,273 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.29 vs. limit=10.0 +2024-08-26 21:04:57,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=146778.66666666666, ans=0.125 +2024-08-26 21:04:57,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=146778.66666666666, ans=0.125 +2024-08-26 21:05:01,182 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.75 vs. limit=15.0 +2024-08-26 21:05:05,128 INFO [train.py:1114] (1/4) Epoch 12, batch 150, loss[loss=0.1798, simple_loss=0.2435, pruned_loss=0.042, ctc_loss=0.08019, over 19728.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2775, pruned_loss=0.05196, ctc_loss=0.09749, over 2027965.43 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:05:12,332 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.24 vs. limit=22.5 +2024-08-26 21:05:25,626 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.236e+02 1.421e+02 1.535e+02 1.745e+02 2.429e+02, threshold=3.070e+02, percent-clipped=0.0 +2024-08-26 21:05:33,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=146992.0, ans=0.125 +2024-08-26 21:05:38,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=146992.0, ans=0.0 +2024-08-26 21:05:39,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=146992.0, ans=0.0 +2024-08-26 21:05:42,974 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.91 vs. limit=15.0 +2024-08-26 21:05:44,134 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.66 vs. limit=10.0 +2024-08-26 21:05:50,581 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.42 vs. limit=15.0 +2024-08-26 21:05:52,017 INFO [train.py:1114] (1/4) Epoch 12, batch 200, loss[loss=0.2412, simple_loss=0.3027, pruned_loss=0.06474, ctc_loss=0.1257, over 18078.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2764, pruned_loss=0.05159, ctc_loss=0.09672, over 2435354.71 frames. ], batch size: 85, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:05:58,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=147098.66666666666, ans=0.125 +2024-08-26 21:06:00,067 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.71 vs. limit=15.0 +2024-08-26 21:06:02,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=147152.0, ans=0.0 +2024-08-26 21:06:26,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.88 vs. limit=6.0 +2024-08-26 21:06:38,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=147365.33333333334, ans=0.05 +2024-08-26 21:06:38,658 INFO [train.py:1114] (1/4) Epoch 12, batch 250, loss[loss=0.2167, simple_loss=0.2859, pruned_loss=0.05392, ctc_loss=0.09903, over 19469.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2757, pruned_loss=0.05128, ctc_loss=0.09602, over 2757226.13 frames. ], batch size: 67, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:06:40,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=147365.33333333334, ans=0.125 +2024-08-26 21:06:59,405 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.414e+02 1.495e+02 1.680e+02 4.024e+02, threshold=2.991e+02, percent-clipped=1.0 +2024-08-26 21:07:05,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=147472.0, ans=0.125 +2024-08-26 21:07:32,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=147578.66666666666, ans=0.0 +2024-08-26 21:07:35,052 INFO [train.py:1114] (1/4) Epoch 12, batch 300, loss[loss=0.2337, simple_loss=0.3013, pruned_loss=0.0618, ctc_loss=0.1062, over 19513.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.275, pruned_loss=0.05099, ctc_loss=0.09538, over 3000839.16 frames. ], batch size: 61, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:07:36,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=147632.0, ans=0.125 +2024-08-26 21:07:43,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=147685.33333333334, ans=0.125 +2024-08-26 21:08:05,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=147792.0, ans=0.5 +2024-08-26 21:08:11,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=147845.33333333334, ans=0.1 +2024-08-26 21:08:13,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=147845.33333333334, ans=0.125 +2024-08-26 21:08:15,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=147845.33333333334, ans=0.125 +2024-08-26 21:08:30,180 INFO [train.py:1114] (1/4) Epoch 12, batch 350, loss[loss=0.2006, simple_loss=0.2645, pruned_loss=0.0506, ctc_loss=0.08894, over 19757.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2753, pruned_loss=0.05102, ctc_loss=0.0954, over 3190720.75 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:11:59,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=147952.0, ans=0.125 +2024-08-26 21:12:05,910 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.16 vs. limit=15.0 +2024-08-26 21:12:10,851 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.537e+02 1.863e+02 2.287e+02 4.040e+02, threshold=3.725e+02, percent-clipped=5.0 +2024-08-26 21:12:15,297 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.21 vs. limit=8.0 +2024-08-26 21:12:23,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=148058.66666666666, ans=0.125 +2024-08-26 21:12:25,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=148058.66666666666, ans=0.125 +2024-08-26 21:12:31,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148112.0, ans=0.1 +2024-08-26 21:13:43,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=148112.0, ans=0.09899494936611666 +2024-08-26 21:13:43,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=148112.0, ans=0.0 +2024-08-26 21:13:47,512 INFO [train.py:1114] (1/4) Epoch 12, batch 400, loss[loss=0.1903, simple_loss=0.2675, pruned_loss=0.04122, ctc_loss=0.07677, over 19497.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.275, pruned_loss=0.05078, ctc_loss=0.09488, over 3342696.82 frames. ], batch size: 54, lr: 1.27e-02, grad_scale: 32.0 +2024-08-26 21:14:01,033 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.15 vs. limit=10.0 +2024-08-26 21:14:22,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.41 vs. limit=15.0 +2024-08-26 21:14:24,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=148378.66666666666, ans=0.125 +2024-08-26 21:14:30,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=148378.66666666666, ans=0.125 +2024-08-26 21:14:34,559 INFO [train.py:1114] (1/4) Epoch 12, batch 450, loss[loss=0.2083, simple_loss=0.2868, pruned_loss=0.04769, ctc_loss=0.08622, over 19609.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.275, pruned_loss=0.05084, ctc_loss=0.09499, over 3451399.95 frames. ], batch size: 55, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:14:44,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=148432.0, ans=0.125 +2024-08-26 21:14:52,504 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.03 vs. limit=15.0 +2024-08-26 21:15:03,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=148538.66666666666, ans=0.125 +2024-08-26 21:15:05,752 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.502e+02 1.695e+02 2.071e+02 2.894e+02, threshold=3.390e+02, percent-clipped=0.0 +2024-08-26 21:15:14,477 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.72 vs. limit=15.0 +2024-08-26 21:15:31,495 INFO [train.py:1114] (1/4) Epoch 12, batch 500, loss[loss=0.2025, simple_loss=0.2778, pruned_loss=0.047, ctc_loss=0.08317, over 19695.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2738, pruned_loss=0.05022, ctc_loss=0.09365, over 3547732.00 frames. ], batch size: 63, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:15:37,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=148698.66666666666, ans=0.0 +2024-08-26 21:15:37,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148698.66666666666, ans=0.1 +2024-08-26 21:15:47,786 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:16:02,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=148858.66666666666, ans=0.04949747468305833 +2024-08-26 21:16:03,248 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.01 vs. limit=6.0 +2024-08-26 21:16:12,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=148912.0, ans=0.125 +2024-08-26 21:16:19,338 INFO [train.py:1114] (1/4) Epoch 12, batch 550, loss[loss=0.2502, simple_loss=0.3088, pruned_loss=0.07145, ctc_loss=0.122, over 19247.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2743, pruned_loss=0.05044, ctc_loss=0.09394, over 3608680.84 frames. ], batch size: 71, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:16:19,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=148965.33333333334, ans=0.1 +2024-08-26 21:16:29,924 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.67 vs. limit=15.0 +2024-08-26 21:16:38,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=149072.0, ans=0.125 +2024-08-26 21:16:39,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=149072.0, ans=0.125 +2024-08-26 21:16:39,722 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.448e+02 1.617e+02 1.906e+02 3.977e+02, threshold=3.234e+02, percent-clipped=1.0 +2024-08-26 21:16:48,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=149125.33333333334, ans=0.0 +2024-08-26 21:17:47,333 INFO [train.py:1114] (1/4) Epoch 12, batch 600, loss[loss=0.2227, simple_loss=0.2969, pruned_loss=0.05416, ctc_loss=0.1002, over 19360.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2747, pruned_loss=0.05044, ctc_loss=0.09392, over 3665956.24 frames. ], batch size: 67, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:17:49,703 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.25 vs. limit=22.5 +2024-08-26 21:17:53,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=149232.0, ans=0.2 +2024-08-26 21:18:12,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=149338.66666666666, ans=0.1 +2024-08-26 21:18:24,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.15 vs. limit=15.0 +2024-08-26 21:18:24,745 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.40 vs. limit=15.0 +2024-08-26 21:18:37,776 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.91 vs. limit=15.0 +2024-08-26 21:18:44,315 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.60 vs. limit=12.0 +2024-08-26 21:18:46,486 INFO [train.py:1114] (1/4) Epoch 12, batch 650, loss[loss=0.2091, simple_loss=0.2852, pruned_loss=0.04832, ctc_loss=0.09105, over 19754.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2742, pruned_loss=0.05026, ctc_loss=0.09368, over 3716283.73 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:19:01,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=149498.66666666666, ans=0.0 +2024-08-26 21:19:09,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=149552.0, ans=0.025 +2024-08-26 21:19:11,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=149552.0, ans=0.125 +2024-08-26 21:19:12,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=149552.0, ans=0.125 +2024-08-26 21:19:16,432 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.436e+02 1.583e+02 1.844e+02 2.674e+02, threshold=3.165e+02, percent-clipped=0.0 +2024-08-26 21:19:28,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=149658.66666666666, ans=0.125 +2024-08-26 21:19:33,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=149658.66666666666, ans=0.2 +2024-08-26 21:19:35,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=149712.0, ans=0.125 +2024-08-26 21:19:41,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=149712.0, ans=0.025 +2024-08-26 21:19:45,046 INFO [train.py:1114] (1/4) Epoch 12, batch 700, loss[loss=0.1898, simple_loss=0.2566, pruned_loss=0.045, ctc_loss=0.08243, over 19713.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2749, pruned_loss=0.05067, ctc_loss=0.09446, over 3746477.32 frames. ], batch size: 51, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:19:48,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=149765.33333333334, ans=0.0 +2024-08-26 21:19:49,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=149765.33333333334, ans=0.07 +2024-08-26 21:19:50,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=149765.33333333334, ans=0.015 +2024-08-26 21:20:31,168 INFO [train.py:1114] (1/4) Epoch 12, batch 750, loss[loss=0.2195, simple_loss=0.2902, pruned_loss=0.05472, ctc_loss=0.09848, over 19512.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2746, pruned_loss=0.05061, ctc_loss=0.09438, over 3773749.03 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:20:33,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=150032.0, ans=0.125 +2024-08-26 21:20:34,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150032.0, ans=0.1 +2024-08-26 21:20:38,290 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.27 vs. limit=12.0 +2024-08-26 21:20:46,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=150085.33333333334, ans=0.125 +2024-08-26 21:20:49,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=150138.66666666666, ans=0.0 +2024-08-26 21:20:51,920 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.231e+02 1.592e+02 1.843e+02 2.247e+02 3.979e+02, threshold=3.686e+02, percent-clipped=6.0 +2024-08-26 21:20:55,361 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.63 vs. limit=12.0 +2024-08-26 21:20:56,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=150138.66666666666, ans=0.125 +2024-08-26 21:21:04,752 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.34 vs. limit=22.5 +2024-08-26 21:21:08,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=150245.33333333334, ans=0.025 +2024-08-26 21:21:22,389 INFO [train.py:1114] (1/4) Epoch 12, batch 800, loss[loss=0.1866, simple_loss=0.248, pruned_loss=0.04528, ctc_loss=0.08657, over 19417.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2747, pruned_loss=0.05092, ctc_loss=0.09488, over 3795520.06 frames. ], batch size: 48, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:21:52,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=150405.33333333334, ans=0.125 +2024-08-26 21:21:53,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=150458.66666666666, ans=0.125 +2024-08-26 21:22:12,987 INFO [train.py:1114] (1/4) Epoch 12, batch 850, loss[loss=0.2224, simple_loss=0.2896, pruned_loss=0.05643, ctc_loss=0.1057, over 19644.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2749, pruned_loss=0.05089, ctc_loss=0.09491, over 3815292.38 frames. ], batch size: 59, lr: 1.26e-02, grad_scale: 16.0 +2024-08-26 21:22:19,262 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.71 vs. limit=6.0 +2024-08-26 21:22:25,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=150618.66666666666, ans=0.0 +2024-08-26 21:22:34,295 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.451e+02 1.599e+02 1.811e+02 2.698e+02, threshold=3.198e+02, percent-clipped=0.0 +2024-08-26 21:22:43,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=150725.33333333334, ans=0.0 +2024-08-26 21:22:50,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=150778.66666666666, ans=0.0 +2024-08-26 21:23:00,257 INFO [train.py:1114] (1/4) Epoch 12, batch 900, loss[loss=0.1754, simple_loss=0.2431, pruned_loss=0.03895, ctc_loss=0.07463, over 19398.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2752, pruned_loss=0.05119, ctc_loss=0.09539, over 3818318.72 frames. ], batch size: 48, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:23:19,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=150938.66666666666, ans=0.05 +2024-08-26 21:23:47,028 INFO [train.py:1114] (1/4) Epoch 12, batch 950, loss[loss=0.182, simple_loss=0.2568, pruned_loss=0.03829, ctc_loss=0.07671, over 19481.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2758, pruned_loss=0.05138, ctc_loss=0.09583, over 3818384.29 frames. ], batch size: 49, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:24:06,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=151098.66666666666, ans=0.0 +2024-08-26 21:24:17,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=151152.0, ans=0.05 +2024-08-26 21:24:20,256 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.36 vs. limit=6.0 +2024-08-26 21:24:29,934 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.208e+02 1.446e+02 1.609e+02 1.941e+02 6.709e+02, threshold=3.217e+02, percent-clipped=2.0 +2024-08-26 21:24:35,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=151205.33333333334, ans=0.0 +2024-08-26 21:24:50,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=151312.0, ans=0.2 +2024-08-26 21:24:57,622 INFO [train.py:1114] (1/4) Epoch 12, batch 1000, loss[loss=0.1877, simple_loss=0.2605, pruned_loss=0.04264, ctc_loss=0.074, over 19855.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2761, pruned_loss=0.05134, ctc_loss=0.09582, over 3815332.89 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:25:00,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=151365.33333333334, ans=0.125 +2024-08-26 21:25:22,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.31 vs. limit=15.0 +2024-08-26 21:25:27,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=151418.66666666666, ans=0.125 +2024-08-26 21:25:39,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=151472.0, ans=0.025 +2024-08-26 21:25:59,413 INFO [train.py:1114] (1/4) Epoch 12, batch 1050, loss[loss=0.1971, simple_loss=0.2757, pruned_loss=0.04258, ctc_loss=0.08345, over 19847.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2753, pruned_loss=0.05093, ctc_loss=0.09522, over 3821509.76 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:26:06,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=151632.0, ans=0.125 +2024-08-26 21:26:20,577 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.117e+02 1.384e+02 1.517e+02 1.769e+02 3.938e+02, threshold=3.034e+02, percent-clipped=1.0 +2024-08-26 21:26:26,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=151792.0, ans=0.125 +2024-08-26 21:26:43,704 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.09 vs. limit=6.0 +2024-08-26 21:26:45,801 INFO [train.py:1114] (1/4) Epoch 12, batch 1100, loss[loss=0.2025, simple_loss=0.2703, pruned_loss=0.04921, ctc_loss=0.09053, over 19578.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2749, pruned_loss=0.05089, ctc_loss=0.09501, over 3828704.68 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:26:48,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=151898.66666666666, ans=0.07 +2024-08-26 21:26:52,096 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.43 vs. limit=15.0 +2024-08-26 21:26:52,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=151898.66666666666, ans=0.1 +2024-08-26 21:26:59,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=151952.0, ans=0.125 +2024-08-26 21:27:02,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=151952.0, ans=0.0 +2024-08-26 21:27:08,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=152005.33333333334, ans=0.0 +2024-08-26 21:27:08,633 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.83 vs. limit=22.5 +2024-08-26 21:27:20,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=152058.66666666666, ans=0.0 +2024-08-26 21:27:31,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=152112.0, ans=0.125 +2024-08-26 21:27:32,336 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.57 vs. limit=15.0 +2024-08-26 21:27:36,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=152112.0, ans=0.125 +2024-08-26 21:27:37,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=152112.0, ans=0.2 +2024-08-26 21:27:41,053 INFO [train.py:1114] (1/4) Epoch 12, batch 1150, loss[loss=0.1874, simple_loss=0.2543, pruned_loss=0.04385, ctc_loss=0.08206, over 19595.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2745, pruned_loss=0.05055, ctc_loss=0.09461, over 3828532.61 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:27:41,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=152165.33333333334, ans=0.07 +2024-08-26 21:27:44,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=152165.33333333334, ans=0.125 +2024-08-26 21:27:47,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=152165.33333333334, ans=0.2 +2024-08-26 21:27:48,792 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:27:57,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152218.66666666666, ans=0.1 +2024-08-26 21:28:02,690 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.513e+02 1.822e+02 2.260e+02 3.131e+02, threshold=3.643e+02, percent-clipped=1.0 +2024-08-26 21:28:06,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152272.0, ans=0.1 +2024-08-26 21:28:06,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=152272.0, ans=0.125 +2024-08-26 21:28:08,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=152325.33333333334, ans=0.125 +2024-08-26 21:28:10,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=152325.33333333334, ans=0.125 +2024-08-26 21:28:10,748 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.51 vs. limit=22.5 +2024-08-26 21:28:12,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=152325.33333333334, ans=0.0 +2024-08-26 21:28:13,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=152325.33333333334, ans=0.125 +2024-08-26 21:28:28,136 INFO [train.py:1114] (1/4) Epoch 12, batch 1200, loss[loss=0.2135, simple_loss=0.282, pruned_loss=0.05244, ctc_loss=0.1003, over 19828.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2751, pruned_loss=0.05063, ctc_loss=0.09483, over 3824097.13 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 32.0 +2024-08-26 21:28:44,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=152485.33333333334, ans=0.025 +2024-08-26 21:29:15,284 INFO [train.py:1114] (1/4) Epoch 12, batch 1250, loss[loss=0.2187, simple_loss=0.2849, pruned_loss=0.05646, ctc_loss=0.09877, over 19525.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2759, pruned_loss=0.0509, ctc_loss=0.09514, over 3842652.98 frames. ], batch size: 61, lr: 1.25e-02, grad_scale: 32.0 +2024-08-26 21:29:30,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152752.0, ans=0.1 +2024-08-26 21:29:36,829 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.211e+02 1.442e+02 1.596e+02 2.011e+02 3.434e+02, threshold=3.192e+02, percent-clipped=0.0 +2024-08-26 21:30:17,254 INFO [train.py:1114] (1/4) Epoch 12, batch 1300, loss[loss=0.2171, simple_loss=0.2874, pruned_loss=0.05311, ctc_loss=0.1012, over 18795.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.275, pruned_loss=0.05043, ctc_loss=0.09435, over 3846441.29 frames. ], batch size: 76, lr: 1.25e-02, grad_scale: 32.0 +2024-08-26 21:31:05,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=153178.66666666666, ans=0.125 +2024-08-26 21:31:06,803 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.29 vs. limit=15.0 +2024-08-26 21:31:08,966 INFO [train.py:1114] (1/4) Epoch 12, batch 1350, loss[loss=0.2044, simple_loss=0.267, pruned_loss=0.05144, ctc_loss=0.09719, over 19780.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2744, pruned_loss=0.05008, ctc_loss=0.09365, over 3856928.48 frames. ], batch size: 54, lr: 1.25e-02, grad_scale: 32.0 +2024-08-26 21:31:29,983 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.104e+02 1.467e+02 1.650e+02 2.044e+02 3.234e+02, threshold=3.299e+02, percent-clipped=1.0 +2024-08-26 21:31:32,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=153338.66666666666, ans=0.025 +2024-08-26 21:31:34,564 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.29 vs. limit=22.5 +2024-08-26 21:31:35,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=153338.66666666666, ans=0.1 +2024-08-26 21:31:39,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=153392.0, ans=0.125 +2024-08-26 21:31:42,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=153392.0, ans=0.0 +2024-08-26 21:31:54,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=153498.66666666666, ans=0.0 +2024-08-26 21:31:55,342 INFO [train.py:1114] (1/4) Epoch 12, batch 1400, loss[loss=0.201, simple_loss=0.2564, pruned_loss=0.05347, ctc_loss=0.09676, over 19678.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2743, pruned_loss=0.05015, ctc_loss=0.0938, over 3864355.01 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:31:59,502 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.97 vs. limit=15.0 +2024-08-26 21:32:03,304 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.60 vs. limit=6.0 +2024-08-26 21:32:42,802 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.91 vs. limit=15.0 +2024-08-26 21:32:47,925 INFO [train.py:1114] (1/4) Epoch 12, batch 1450, loss[loss=0.2162, simple_loss=0.2854, pruned_loss=0.05299, ctc_loss=0.1024, over 19673.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2751, pruned_loss=0.0506, ctc_loss=0.09454, over 3862587.11 frames. ], batch size: 63, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:33:12,065 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.245e+02 1.443e+02 1.618e+02 1.909e+02 2.759e+02, threshold=3.236e+02, percent-clipped=0.0 +2024-08-26 21:33:33,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=153978.66666666666, ans=0.0 +2024-08-26 21:33:37,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=153978.66666666666, ans=0.025 +2024-08-26 21:33:38,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=153978.66666666666, ans=0.0 +2024-08-26 21:33:42,303 INFO [train.py:1114] (1/4) Epoch 12, batch 1500, loss[loss=0.1992, simple_loss=0.2708, pruned_loss=0.04684, ctc_loss=0.085, over 19598.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.275, pruned_loss=0.05015, ctc_loss=0.09389, over 3861806.77 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:34:03,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154138.66666666666, ans=0.1 +2024-08-26 21:34:06,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=154138.66666666666, ans=0.025 +2024-08-26 21:34:07,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154138.66666666666, ans=0.1 +2024-08-26 21:34:26,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=154245.33333333334, ans=0.0 +2024-08-26 21:34:29,573 INFO [train.py:1114] (1/4) Epoch 12, batch 1550, loss[loss=0.2289, simple_loss=0.2957, pruned_loss=0.05848, ctc_loss=0.1132, over 19594.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2751, pruned_loss=0.05055, ctc_loss=0.09459, over 3845995.35 frames. ], batch size: 60, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:34:35,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=154298.66666666666, ans=0.025 +2024-08-26 21:34:35,827 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.38 vs. limit=15.0 +2024-08-26 21:34:51,389 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.237e+02 1.431e+02 1.666e+02 1.890e+02 5.087e+02, threshold=3.332e+02, percent-clipped=2.0 +2024-08-26 21:35:17,028 INFO [train.py:1114] (1/4) Epoch 12, batch 1600, loss[loss=0.2017, simple_loss=0.2778, pruned_loss=0.04533, ctc_loss=0.0871, over 19849.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2746, pruned_loss=0.05051, ctc_loss=0.09446, over 3835436.76 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:35:34,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=154618.66666666666, ans=0.0 +2024-08-26 21:35:48,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=154725.33333333334, ans=0.0 +2024-08-26 21:35:59,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=154778.66666666666, ans=0.125 +2024-08-26 21:36:01,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=154778.66666666666, ans=0.07 +2024-08-26 21:36:05,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=154778.66666666666, ans=0.125 +2024-08-26 21:36:11,264 INFO [train.py:1114] (1/4) Epoch 12, batch 1650, loss[loss=0.2146, simple_loss=0.2834, pruned_loss=0.05286, ctc_loss=0.1002, over 19656.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2745, pruned_loss=0.05067, ctc_loss=0.09466, over 3831667.61 frames. ], batch size: 59, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:36:16,608 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.12 vs. limit=22.5 +2024-08-26 21:36:22,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=154885.33333333334, ans=0.0 +2024-08-26 21:36:34,779 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.198e+02 1.420e+02 1.592e+02 1.938e+02 3.625e+02, threshold=3.184e+02, percent-clipped=1.0 +2024-08-26 21:36:35,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=154938.66666666666, ans=0.2 +2024-08-26 21:36:37,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=154938.66666666666, ans=0.09899494936611666 +2024-08-26 21:36:43,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=154992.0, ans=0.125 +2024-08-26 21:36:50,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=155045.33333333334, ans=0.0 +2024-08-26 21:36:50,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=155045.33333333334, ans=0.125 +2024-08-26 21:36:50,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=155045.33333333334, ans=0.0 +2024-08-26 21:36:56,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=155045.33333333334, ans=0.04949747468305833 +2024-08-26 21:37:00,210 INFO [train.py:1114] (1/4) Epoch 12, batch 1700, loss[loss=0.1865, simple_loss=0.2524, pruned_loss=0.04431, ctc_loss=0.08021, over 19671.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2742, pruned_loss=0.05022, ctc_loss=0.09389, over 3846379.08 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:37:07,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=155098.66666666666, ans=0.1 +2024-08-26 21:37:07,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=155098.66666666666, ans=0.0 +2024-08-26 21:37:34,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=155258.66666666666, ans=0.0 +2024-08-26 21:37:42,763 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.09 vs. limit=15.0 +2024-08-26 21:37:44,946 INFO [train.py:1114] (1/4) Epoch 12, batch 1750, loss[loss=0.1899, simple_loss=0.2527, pruned_loss=0.04588, ctc_loss=0.08858, over 19637.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2742, pruned_loss=0.0501, ctc_loss=0.09369, over 3851033.26 frames. ], batch size: 45, lr: 1.24e-02, grad_scale: 16.0 +2024-08-26 21:37:45,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=155365.33333333334, ans=0.0 +2024-08-26 21:37:53,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=155418.66666666666, ans=0.125 +2024-08-26 21:38:05,532 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:38:06,162 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.409e+02 1.600e+02 1.878e+02 3.182e+02, threshold=3.201e+02, percent-clipped=0.0 +2024-08-26 21:38:06,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=155472.0, ans=0.125 +2024-08-26 21:38:07,245 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:38:17,308 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.42 vs. limit=12.0 +2024-08-26 21:38:20,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=155578.66666666666, ans=0.0 +2024-08-26 21:38:26,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=155578.66666666666, ans=15.0 +2024-08-26 21:38:28,996 INFO [train.py:1114] (1/4) Epoch 12, batch 1800, loss[loss=0.1961, simple_loss=0.275, pruned_loss=0.04244, ctc_loss=0.08085, over 19599.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2744, pruned_loss=0.05005, ctc_loss=0.09357, over 3853096.14 frames. ], batch size: 55, lr: 1.24e-02, grad_scale: 16.0 +2024-08-26 21:38:30,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=155632.0, ans=0.025 +2024-08-26 21:38:41,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=155685.33333333334, ans=0.125 +2024-08-26 21:38:46,160 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.00 vs. limit=6.0 +2024-08-26 21:38:55,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=155792.0, ans=0.125 +2024-08-26 21:39:03,555 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.52 vs. limit=12.0 +2024-08-26 21:39:12,710 INFO [train.py:1114] (1/4) Epoch 12, batch 1850, loss[loss=0.2479, simple_loss=0.3121, pruned_loss=0.06713, ctc_loss=0.1236, over 19597.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2739, pruned_loss=0.0497, ctc_loss=0.09286, over 3855362.83 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 16.0 +2024-08-26 21:39:12,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=155898.66666666666, ans=0.125 +2024-08-26 21:39:23,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=155952.0, ans=0.125 +2024-08-26 21:39:34,666 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.542e+02 1.764e+02 2.176e+02 3.980e+02, threshold=3.528e+02, percent-clipped=3.0 +2024-08-26 21:39:43,939 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.23 vs. limit=15.0 +2024-08-26 21:39:56,683 INFO [train.py:1114] (1/4) Epoch 12, batch 1900, loss[loss=0.2118, simple_loss=0.2853, pruned_loss=0.05066, ctc_loss=0.09258, over 19655.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2749, pruned_loss=0.05015, ctc_loss=0.0937, over 3861072.89 frames. ], batch size: 59, lr: 1.23e-02, grad_scale: 8.0 +2024-08-26 21:39:59,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=156165.33333333334, ans=0.1 +2024-08-26 21:40:09,002 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:40:16,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=156272.0, ans=0.025 +2024-08-26 21:40:20,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=156272.0, ans=0.1 +2024-08-26 21:40:22,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=156325.33333333334, ans=0.09899494936611666 +2024-08-26 21:40:29,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=156325.33333333334, ans=10.0 +2024-08-26 21:40:40,047 INFO [train.py:1114] (1/4) Epoch 12, batch 1950, loss[loss=0.1829, simple_loss=0.255, pruned_loss=0.04043, ctc_loss=0.07509, over 19593.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2757, pruned_loss=0.05027, ctc_loss=0.09381, over 3869858.39 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 8.0 +2024-08-26 21:40:47,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=156432.0, ans=0.0 +2024-08-26 21:40:54,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=156485.33333333334, ans=0.0 +2024-08-26 21:40:55,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=156485.33333333334, ans=0.1 +2024-08-26 21:41:01,722 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.258e+02 1.443e+02 1.619e+02 1.881e+02 3.638e+02, threshold=3.238e+02, percent-clipped=1.0 +2024-08-26 21:41:25,589 INFO [train.py:1114] (1/4) Epoch 12, batch 2000, loss[loss=0.187, simple_loss=0.251, pruned_loss=0.04444, ctc_loss=0.08532, over 19656.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2765, pruned_loss=0.05085, ctc_loss=0.09474, over 3853730.37 frames. ], batch size: 45, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:41:38,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156752.0, ans=0.1 +2024-08-26 21:42:10,404 INFO [train.py:1114] (1/4) Epoch 12, batch 2050, loss[loss=0.1878, simple_loss=0.2514, pruned_loss=0.0448, ctc_loss=0.08647, over 19725.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2748, pruned_loss=0.05042, ctc_loss=0.09399, over 3850393.15 frames. ], batch size: 47, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:42:13,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=156965.33333333334, ans=0.0 +2024-08-26 21:42:17,324 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.87 vs. limit=15.0 +2024-08-26 21:42:24,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=157018.66666666666, ans=0.125 +2024-08-26 21:42:33,052 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.456e+02 1.628e+02 1.934e+02 3.317e+02, threshold=3.256e+02, percent-clipped=1.0 +2024-08-26 21:42:43,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=157125.33333333334, ans=0.125 +2024-08-26 21:42:48,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=157178.66666666666, ans=0.0 +2024-08-26 21:42:48,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=157178.66666666666, ans=0.0 +2024-08-26 21:42:50,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=157178.66666666666, ans=0.125 +2024-08-26 21:42:55,875 INFO [train.py:1114] (1/4) Epoch 12, batch 2100, loss[loss=0.1974, simple_loss=0.2677, pruned_loss=0.04625, ctc_loss=0.08659, over 19764.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2741, pruned_loss=0.04965, ctc_loss=0.09293, over 3858278.38 frames. ], batch size: 54, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:43:01,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=157232.0, ans=0.0 +2024-08-26 21:43:08,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=157285.33333333334, ans=0.125 +2024-08-26 21:43:19,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=157338.66666666666, ans=0.0 +2024-08-26 21:43:56,951 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.40 vs. limit=22.5 +2024-08-26 21:44:00,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=157445.33333333334, ans=0.125 +2024-08-26 21:44:06,211 INFO [train.py:1114] (1/4) Epoch 12, batch 2150, loss[loss=0.1913, simple_loss=0.265, pruned_loss=0.04242, ctc_loss=0.08179, over 19563.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2737, pruned_loss=0.04968, ctc_loss=0.09294, over 3869358.89 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:44:18,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=157552.0, ans=0.0 +2024-08-26 21:44:27,514 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.35 vs. limit=10.0 +2024-08-26 21:44:27,884 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.226e+02 1.483e+02 1.683e+02 2.213e+02 4.687e+02, threshold=3.365e+02, percent-clipped=1.0 +2024-08-26 21:44:48,376 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.68 vs. limit=15.0 +2024-08-26 21:45:26,973 INFO [train.py:1114] (1/4) Epoch 12, batch 2200, loss[loss=0.2318, simple_loss=0.2984, pruned_loss=0.05973, ctc_loss=0.1142, over 19595.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2734, pruned_loss=0.04964, ctc_loss=0.09272, over 3867203.55 frames. ], batch size: 57, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:45:40,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=157818.66666666666, ans=0.2 +2024-08-26 21:45:45,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=157872.0, ans=0.0 +2024-08-26 21:45:55,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=157925.33333333334, ans=0.0 +2024-08-26 21:45:55,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=157925.33333333334, ans=0.0 +2024-08-26 21:46:04,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=157978.66666666666, ans=0.1 +2024-08-26 21:46:10,383 INFO [train.py:1114] (1/4) Epoch 12, batch 2250, loss[loss=0.2124, simple_loss=0.2822, pruned_loss=0.05226, ctc_loss=0.09495, over 19608.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2738, pruned_loss=0.04983, ctc_loss=0.09295, over 3866797.67 frames. ], batch size: 55, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:46:12,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=158032.0, ans=0.125 +2024-08-26 21:46:13,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158032.0, ans=0.1 +2024-08-26 21:46:14,384 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.51 vs. limit=6.0 +2024-08-26 21:46:18,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=158085.33333333334, ans=0.2 +2024-08-26 21:46:31,824 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.542e+02 1.805e+02 2.126e+02 6.638e+02, threshold=3.611e+02, percent-clipped=1.0 +2024-08-26 21:46:38,179 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.08 vs. limit=22.5 +2024-08-26 21:46:39,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=158192.0, ans=0.125 +2024-08-26 21:46:53,570 INFO [train.py:1114] (1/4) Epoch 12, batch 2300, loss[loss=0.1851, simple_loss=0.2567, pruned_loss=0.04138, ctc_loss=0.07694, over 19502.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2732, pruned_loss=0.05002, ctc_loss=0.09323, over 3860138.16 frames. ], batch size: 49, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:47:03,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158352.0, ans=0.1 +2024-08-26 21:47:20,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=158458.66666666666, ans=0.0 +2024-08-26 21:47:21,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=158458.66666666666, ans=0.0 +2024-08-26 21:47:36,481 INFO [train.py:1114] (1/4) Epoch 12, batch 2350, loss[loss=0.2156, simple_loss=0.2853, pruned_loss=0.05369, ctc_loss=0.09612, over 19669.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2728, pruned_loss=0.04995, ctc_loss=0.09296, over 3863839.87 frames. ], batch size: 63, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:47:39,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=158565.33333333334, ans=0.04949747468305833 +2024-08-26 21:47:39,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=158565.33333333334, ans=0.125 +2024-08-26 21:47:41,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.44 vs. limit=15.0 +2024-08-26 21:47:51,689 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.10 vs. limit=15.0 +2024-08-26 21:47:58,863 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.457e+02 1.679e+02 1.938e+02 3.188e+02, threshold=3.358e+02, percent-clipped=0.0 +2024-08-26 21:48:02,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=158725.33333333334, ans=0.125 +2024-08-26 21:48:05,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=158725.33333333334, ans=0.0 +2024-08-26 21:48:12,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=158778.66666666666, ans=0.125 +2024-08-26 21:48:14,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=158778.66666666666, ans=0.125 +2024-08-26 21:48:21,589 INFO [train.py:1114] (1/4) Epoch 12, batch 2400, loss[loss=0.2286, simple_loss=0.2996, pruned_loss=0.05743, ctc_loss=0.1069, over 19313.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.275, pruned_loss=0.05057, ctc_loss=0.09398, over 3858473.22 frames. ], batch size: 71, lr: 1.22e-02, grad_scale: 32.0 +2024-08-26 21:48:21,736 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:48:24,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=158832.0, ans=0.025 +2024-08-26 21:49:07,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158885.33333333334, ans=0.1 +2024-08-26 21:49:24,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=158992.0, ans=0.5 +2024-08-26 21:49:33,530 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.73 vs. limit=15.0 +2024-08-26 21:49:42,229 INFO [train.py:1114] (1/4) Epoch 12, batch 2450, loss[loss=0.2792, simple_loss=0.3202, pruned_loss=0.08681, ctc_loss=0.1614, over 13343.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2788, pruned_loss=0.05339, ctc_loss=0.09947, over 3731368.21 frames. ], batch size: 140, lr: 1.22e-02, grad_scale: 32.0 +2024-08-26 21:49:43,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=159098.66666666666, ans=0.125 +2024-08-26 21:49:45,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=159098.66666666666, ans=0.125 +2024-08-26 21:49:49,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=159098.66666666666, ans=0.125 +2024-08-26 21:49:53,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=159152.0, ans=0.025 +2024-08-26 21:49:57,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=159152.0, ans=0.125 +2024-08-26 21:50:05,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=159205.33333333334, ans=0.2 +2024-08-26 21:50:05,692 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.273e+02 1.611e+02 1.857e+02 2.069e+02 3.042e+02, threshold=3.714e+02, percent-clipped=0.0 +2024-08-26 21:50:12,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=159258.66666666666, ans=0.0 +2024-08-26 21:51:14,831 INFO [train.py:1114] (1/4) Epoch 13, batch 0, loss[loss=0.1816, simple_loss=0.2506, pruned_loss=0.04097, ctc_loss=0.07703, over 19821.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2506, pruned_loss=0.04097, ctc_loss=0.07703, over 19821.00 frames. ], batch size: 49, lr: 1.18e-02, grad_scale: 16.0 +2024-08-26 21:51:14,832 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-26 21:51:26,163 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.6628, 4.8349, 5.4402, 5.1924], device='cuda:1') +2024-08-26 21:51:28,903 INFO [train.py:1146] (1/4) Epoch 13, validation: loss=0.1795, simple_loss=0.2723, pruned_loss=0.03226, ctc_loss=0.05568, over 944034.00 frames. +2024-08-26 21:51:28,903 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12945MB +2024-08-26 21:51:30,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=159306.66666666666, ans=0.0 +2024-08-26 21:51:32,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=159306.66666666666, ans=0.0 +2024-08-26 21:51:41,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=159360.0, ans=0.125 +2024-08-26 21:51:48,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=159360.0, ans=0.0 +2024-08-26 21:51:55,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=159413.33333333334, ans=0.125 +2024-08-26 21:52:00,966 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.48 vs. limit=22.5 +2024-08-26 21:52:02,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=159466.66666666666, ans=0.025 +2024-08-26 21:52:04,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=159466.66666666666, ans=0.125 +2024-08-26 21:52:08,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=159520.0, ans=0.025 +2024-08-26 21:52:10,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.45 vs. limit=12.0 +2024-08-26 21:52:18,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=159573.33333333334, ans=0.125 +2024-08-26 21:52:18,716 INFO [train.py:1114] (1/4) Epoch 13, batch 50, loss[loss=0.1651, simple_loss=0.2345, pruned_loss=0.03486, ctc_loss=0.06467, over 19716.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2745, pruned_loss=0.04971, ctc_loss=0.09304, over 844748.83 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:52:32,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=159626.66666666666, ans=0.1 +2024-08-26 21:52:36,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=159626.66666666666, ans=0.0 +2024-08-26 21:52:39,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=159680.0, ans=0.07 +2024-08-26 21:52:41,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=159680.0, ans=0.2 +2024-08-26 21:52:48,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=159733.33333333334, ans=0.2 +2024-08-26 21:52:56,397 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.509e+02 1.748e+02 2.087e+02 2.763e+02, threshold=3.495e+02, percent-clipped=0.0 +2024-08-26 21:52:56,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=159733.33333333334, ans=0.04949747468305833 +2024-08-26 21:53:01,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159786.66666666666, ans=0.1 +2024-08-26 21:53:07,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=159840.0, ans=0.125 +2024-08-26 21:53:07,768 INFO [train.py:1114] (1/4) Epoch 13, batch 100, loss[loss=0.2003, simple_loss=0.2667, pruned_loss=0.04902, ctc_loss=0.08947, over 19708.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2762, pruned_loss=0.05, ctc_loss=0.09349, over 1499477.02 frames. ], batch size: 51, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:53:10,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=159840.0, ans=0.2 +2024-08-26 21:53:15,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=159840.0, ans=0.125 +2024-08-26 21:53:15,690 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.67 vs. limit=12.0 +2024-08-26 21:53:19,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=159893.33333333334, ans=0.0 +2024-08-26 21:53:30,381 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.32 vs. limit=10.0 +2024-08-26 21:53:36,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=159946.66666666666, ans=0.2 +2024-08-26 21:53:39,090 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.22 vs. limit=22.5 +2024-08-26 21:53:42,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=160000.0, ans=0.125 +2024-08-26 21:53:43,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160000.0, ans=0.1 +2024-08-26 21:53:45,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=160000.0, ans=0.0 +2024-08-26 21:54:16,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=160053.33333333334, ans=0.125 +2024-08-26 21:54:21,193 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.42 vs. limit=22.5 +2024-08-26 21:54:21,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=160053.33333333334, ans=0.0 +2024-08-26 21:54:22,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=160106.66666666666, ans=0.09899494936611666 +2024-08-26 21:54:23,451 INFO [train.py:1114] (1/4) Epoch 13, batch 150, loss[loss=0.1982, simple_loss=0.2565, pruned_loss=0.05106, ctc_loss=0.09449, over 19698.00 frames. ], tot_loss[loss=0.205, simple_loss=0.274, pruned_loss=0.04956, ctc_loss=0.09231, over 2027175.31 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:54:24,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160106.66666666666, ans=0.1 +2024-08-26 21:54:27,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=160106.66666666666, ans=0.09899494936611666 +2024-08-26 21:54:47,200 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.10 vs. limit=15.0 +2024-08-26 21:54:56,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=160266.66666666666, ans=0.0 +2024-08-26 21:55:02,363 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.397e+02 1.535e+02 1.726e+02 2.735e+02, threshold=3.069e+02, percent-clipped=0.0 +2024-08-26 21:55:13,398 INFO [train.py:1114] (1/4) Epoch 13, batch 200, loss[loss=0.2255, simple_loss=0.292, pruned_loss=0.05733, ctc_loss=0.1109, over 18394.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2729, pruned_loss=0.04937, ctc_loss=0.09192, over 2434816.66 frames. ], batch size: 85, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:55:22,774 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.71 vs. limit=15.0 +2024-08-26 21:55:29,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=160426.66666666666, ans=0.125 +2024-08-26 21:55:31,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=160480.0, ans=0.0 +2024-08-26 21:55:41,133 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:56:19,220 INFO [train.py:1114] (1/4) Epoch 13, batch 250, loss[loss=0.2119, simple_loss=0.28, pruned_loss=0.05325, ctc_loss=0.09327, over 19336.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2725, pruned_loss=0.04885, ctc_loss=0.09125, over 2754771.31 frames. ], batch size: 67, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:56:24,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160640.0, ans=0.1 +2024-08-26 21:56:25,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160640.0, ans=0.1 +2024-08-26 21:56:45,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=160746.66666666666, ans=0.125 +2024-08-26 21:56:50,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=160800.0, ans=0.125 +2024-08-26 21:56:56,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=160800.0, ans=0.0 +2024-08-26 21:56:57,710 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.442e+02 1.721e+02 2.190e+02 3.294e+02, threshold=3.441e+02, percent-clipped=2.0 +2024-08-26 21:57:01,051 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.73 vs. limit=6.0 +2024-08-26 21:57:04,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=160853.33333333334, ans=0.125 +2024-08-26 21:57:07,833 INFO [train.py:1114] (1/4) Epoch 13, batch 300, loss[loss=0.2084, simple_loss=0.2749, pruned_loss=0.05156, ctc_loss=0.09716, over 19518.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2718, pruned_loss=0.04869, ctc_loss=0.09119, over 3000163.21 frames. ], batch size: 61, lr: 1.17e-02, grad_scale: 8.0 +2024-08-26 21:57:22,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=160960.0, ans=0.2 +2024-08-26 21:57:46,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=161120.0, ans=0.125 +2024-08-26 21:57:49,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=161120.0, ans=0.125 +2024-08-26 21:57:52,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=161120.0, ans=0.125 +2024-08-26 21:57:55,518 INFO [train.py:1114] (1/4) Epoch 13, batch 350, loss[loss=0.1699, simple_loss=0.2404, pruned_loss=0.03611, ctc_loss=0.06769, over 19765.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2724, pruned_loss=0.0488, ctc_loss=0.09119, over 3189239.62 frames. ], batch size: 48, lr: 1.17e-02, grad_scale: 8.0 +2024-08-26 21:58:26,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=161333.33333333334, ans=0.125 +2024-08-26 21:58:29,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=161333.33333333334, ans=0.0 +2024-08-26 21:58:32,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=161333.33333333334, ans=0.2 +2024-08-26 21:58:33,157 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.400e+02 1.583e+02 1.867e+02 2.908e+02, threshold=3.167e+02, percent-clipped=0.0 +2024-08-26 21:58:43,184 INFO [train.py:1114] (1/4) Epoch 13, batch 400, loss[loss=0.2051, simple_loss=0.2766, pruned_loss=0.04908, ctc_loss=0.08872, over 19481.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2718, pruned_loss=0.04846, ctc_loss=0.09071, over 3341325.68 frames. ], batch size: 54, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:58:46,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=161440.0, ans=0.125 +2024-08-26 21:58:51,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=161493.33333333334, ans=0.0 +2024-08-26 21:58:55,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161493.33333333334, ans=0.1 +2024-08-26 21:58:56,798 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.64 vs. limit=15.0 +2024-08-26 21:58:59,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=161493.33333333334, ans=0.0 +2024-08-26 21:59:15,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=161600.0, ans=0.0 +2024-08-26 21:59:16,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161600.0, ans=0.1 +2024-08-26 21:59:30,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.97 vs. limit=10.0 +2024-08-26 21:59:32,239 INFO [train.py:1114] (1/4) Epoch 13, batch 450, loss[loss=0.191, simple_loss=0.2732, pruned_loss=0.03875, ctc_loss=0.07834, over 19624.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2722, pruned_loss=0.04873, ctc_loss=0.09146, over 3450443.00 frames. ], batch size: 55, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:59:48,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=161760.0, ans=0.125 +2024-08-26 21:59:51,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=161760.0, ans=0.0 +2024-08-26 22:00:10,470 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.228e+02 1.449e+02 1.659e+02 1.894e+02 3.083e+02, threshold=3.319e+02, percent-clipped=0.0 +2024-08-26 22:00:10,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=161920.0, ans=0.2 +2024-08-26 22:00:20,541 INFO [train.py:1114] (1/4) Epoch 13, batch 500, loss[loss=0.2132, simple_loss=0.2815, pruned_loss=0.05267, ctc_loss=0.0992, over 19660.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2711, pruned_loss=0.04829, ctc_loss=0.0905, over 3546503.37 frames. ], batch size: 63, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 22:00:46,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=162080.0, ans=0.1 +2024-08-26 22:00:52,036 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:00:56,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-08-26 22:01:02,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=162186.66666666666, ans=0.125 +2024-08-26 22:01:10,522 INFO [train.py:1114] (1/4) Epoch 13, batch 550, loss[loss=0.2136, simple_loss=0.2845, pruned_loss=0.05151, ctc_loss=0.0992, over 19314.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2708, pruned_loss=0.04817, ctc_loss=0.0903, over 3608218.83 frames. ], batch size: 71, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 22:01:36,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=162346.66666666666, ans=0.0 +2024-08-26 22:01:40,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=162400.0, ans=0.025 +2024-08-26 22:01:43,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=162400.0, ans=0.0 +2024-08-26 22:01:59,759 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.555e+02 1.782e+02 2.360e+02 4.088e+02, threshold=3.564e+02, percent-clipped=3.0 +2024-08-26 22:02:10,210 INFO [train.py:1114] (1/4) Epoch 13, batch 600, loss[loss=0.1985, simple_loss=0.2769, pruned_loss=0.04334, ctc_loss=0.08358, over 19384.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2715, pruned_loss=0.04828, ctc_loss=0.09044, over 3667030.45 frames. ], batch size: 67, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:02:11,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=162506.66666666666, ans=0.125 +2024-08-26 22:02:12,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=162506.66666666666, ans=0.07 +2024-08-26 22:02:20,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=162560.0, ans=0.0 +2024-08-26 22:02:30,919 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.18 vs. limit=15.0 +2024-08-26 22:02:38,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=162613.33333333334, ans=0.0 +2024-08-26 22:02:43,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.73 vs. limit=15.0 +2024-08-26 22:02:47,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=162666.66666666666, ans=0.125 +2024-08-26 22:02:48,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=162720.0, ans=0.125 +2024-08-26 22:02:58,290 INFO [train.py:1114] (1/4) Epoch 13, batch 650, loss[loss=0.1898, simple_loss=0.2671, pruned_loss=0.04083, ctc_loss=0.07727, over 19763.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.271, pruned_loss=0.04801, ctc_loss=0.08979, over 3717515.22 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:02:59,708 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.76 vs. limit=6.0 +2024-08-26 22:03:01,592 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.98 vs. limit=15.0 +2024-08-26 22:03:02,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=162773.33333333334, ans=15.0 +2024-08-26 22:03:43,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=162880.0, ans=0.05 +2024-08-26 22:03:44,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=162880.0, ans=0.0 +2024-08-26 22:03:51,158 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.79 vs. limit=10.0 +2024-08-26 22:03:56,183 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.22 vs. limit=10.0 +2024-08-26 22:03:57,401 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.208e+02 1.372e+02 1.512e+02 1.802e+02 3.637e+02, threshold=3.024e+02, percent-clipped=1.0 +2024-08-26 22:03:58,862 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.04 vs. limit=22.5 +2024-08-26 22:04:09,863 INFO [train.py:1114] (1/4) Epoch 13, batch 700, loss[loss=0.1796, simple_loss=0.2511, pruned_loss=0.03926, ctc_loss=0.07382, over 19725.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2716, pruned_loss=0.0483, ctc_loss=0.09013, over 3749066.62 frames. ], batch size: 51, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:04:19,770 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.97 vs. limit=15.0 +2024-08-26 22:04:23,325 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.90 vs. limit=15.0 +2024-08-26 22:04:28,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=163093.33333333334, ans=0.025 +2024-08-26 22:04:36,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=163146.66666666666, ans=0.1 +2024-08-26 22:04:38,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=163146.66666666666, ans=0.125 +2024-08-26 22:04:38,333 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.22 vs. limit=15.0 +2024-08-26 22:04:42,019 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.25 vs. limit=6.0 +2024-08-26 22:04:43,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=163146.66666666666, ans=0.125 +2024-08-26 22:04:50,540 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.69 vs. limit=15.0 +2024-08-26 22:04:53,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=163200.0, ans=0.0 +2024-08-26 22:05:04,326 INFO [train.py:1114] (1/4) Epoch 13, batch 750, loss[loss=0.1925, simple_loss=0.2683, pruned_loss=0.04242, ctc_loss=0.07955, over 19474.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2713, pruned_loss=0.04818, ctc_loss=0.09002, over 3774963.78 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:05:04,922 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.73 vs. limit=10.0 +2024-08-26 22:05:29,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=163413.33333333334, ans=0.125 +2024-08-26 22:05:31,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.whiten.whitening_limit, batch_count=163413.33333333334, ans=12.0 +2024-08-26 22:05:38,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=163466.66666666666, ans=0.0 +2024-08-26 22:05:41,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=163466.66666666666, ans=0.125 +2024-08-26 22:05:41,879 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.233e+02 1.560e+02 1.959e+02 2.402e+02 3.823e+02, threshold=3.919e+02, percent-clipped=10.0 +2024-08-26 22:05:56,853 INFO [train.py:1114] (1/4) Epoch 13, batch 800, loss[loss=0.1854, simple_loss=0.253, pruned_loss=0.04246, ctc_loss=0.0824, over 19820.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2714, pruned_loss=0.04818, ctc_loss=0.08998, over 3796423.05 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 22:05:58,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163573.33333333334, ans=0.1 +2024-08-26 22:06:02,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=163573.33333333334, ans=0.0 +2024-08-26 22:06:12,973 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.86 vs. limit=15.0 +2024-08-26 22:06:18,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=163680.0, ans=0.025 +2024-08-26 22:06:23,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=163733.33333333334, ans=0.2 +2024-08-26 22:06:54,853 INFO [train.py:1114] (1/4) Epoch 13, batch 850, loss[loss=0.1988, simple_loss=0.2785, pruned_loss=0.04333, ctc_loss=0.0812, over 19658.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2708, pruned_loss=0.048, ctc_loss=0.08956, over 3814761.56 frames. ], batch size: 59, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:06:58,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=163840.0, ans=0.125 +2024-08-26 22:06:59,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=163840.0, ans=0.125 +2024-08-26 22:07:12,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=163893.33333333334, ans=0.125 +2024-08-26 22:07:25,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=163946.66666666666, ans=0.0 +2024-08-26 22:07:29,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=164000.0, ans=0.125 +2024-08-26 22:07:37,696 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.177e+02 1.442e+02 1.756e+02 2.038e+02 3.459e+02, threshold=3.512e+02, percent-clipped=0.0 +2024-08-26 22:07:44,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=164053.33333333334, ans=0.125 +2024-08-26 22:07:44,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=164053.33333333334, ans=0.2 +2024-08-26 22:07:50,321 INFO [train.py:1114] (1/4) Epoch 13, batch 900, loss[loss=0.1801, simple_loss=0.2464, pruned_loss=0.04199, ctc_loss=0.07443, over 19831.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2713, pruned_loss=0.04839, ctc_loss=0.09031, over 3819190.32 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:07:57,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=164106.66666666666, ans=0.0 +2024-08-26 22:08:12,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=164213.33333333334, ans=0.125 +2024-08-26 22:08:19,016 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.45 vs. limit=8.0 +2024-08-26 22:08:24,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=164266.66666666666, ans=0.125 +2024-08-26 22:08:40,772 INFO [train.py:1114] (1/4) Epoch 13, batch 950, loss[loss=0.1833, simple_loss=0.2561, pruned_loss=0.03981, ctc_loss=0.07725, over 19500.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2719, pruned_loss=0.04881, ctc_loss=0.09112, over 3820526.74 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:08:42,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=164373.33333333334, ans=0.0 +2024-08-26 22:08:45,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=164373.33333333334, ans=0.2 +2024-08-26 22:08:57,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=164426.66666666666, ans=0.025 +2024-08-26 22:09:00,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164426.66666666666, ans=0.1 +2024-08-26 22:09:03,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=164480.0, ans=0.0 +2024-08-26 22:09:04,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=164480.0, ans=0.125 +2024-08-26 22:09:18,263 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.90 vs. limit=22.5 +2024-08-26 22:09:18,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=164533.33333333334, ans=0.2 +2024-08-26 22:09:20,508 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.559e+02 1.935e+02 2.172e+02 5.830e+02, threshold=3.869e+02, percent-clipped=1.0 +2024-08-26 22:09:21,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=164586.66666666666, ans=0.125 +2024-08-26 22:09:24,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=164586.66666666666, ans=0.125 +2024-08-26 22:09:25,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=164586.66666666666, ans=0.125 +2024-08-26 22:09:29,555 INFO [train.py:1114] (1/4) Epoch 13, batch 1000, loss[loss=0.1968, simple_loss=0.2698, pruned_loss=0.04564, ctc_loss=0.08121, over 19861.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2727, pruned_loss=0.04925, ctc_loss=0.09186, over 3816217.51 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:09:31,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=164640.0, ans=0.125 +2024-08-26 22:09:34,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=164640.0, ans=0.125 +2024-08-26 22:09:46,325 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:09:49,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=164746.66666666666, ans=0.125 +2024-08-26 22:10:05,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=164800.0, ans=0.07 +2024-08-26 22:10:06,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=164800.0, ans=0.0 +2024-08-26 22:10:11,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=164853.33333333334, ans=0.125 +2024-08-26 22:10:19,116 INFO [train.py:1114] (1/4) Epoch 13, batch 1050, loss[loss=0.1941, simple_loss=0.2686, pruned_loss=0.04311, ctc_loss=0.08372, over 19840.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2719, pruned_loss=0.049, ctc_loss=0.09138, over 3821903.62 frames. ], batch size: 57, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:10:20,441 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.85 vs. limit=6.0 +2024-08-26 22:10:29,821 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.78 vs. limit=15.0 +2024-08-26 22:10:30,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164960.0, ans=0.1 +2024-08-26 22:10:33,286 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.50 vs. limit=15.0 +2024-08-26 22:10:36,793 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.40 vs. limit=15.0 +2024-08-26 22:10:38,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=165013.33333333334, ans=0.025 +2024-08-26 22:10:55,514 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.353e+02 1.566e+02 1.889e+02 2.686e+02, threshold=3.131e+02, percent-clipped=0.0 +2024-08-26 22:11:06,693 INFO [train.py:1114] (1/4) Epoch 13, batch 1100, loss[loss=0.1865, simple_loss=0.2588, pruned_loss=0.04177, ctc_loss=0.07683, over 19579.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2722, pruned_loss=0.04893, ctc_loss=0.09128, over 3829803.11 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:11:29,119 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.65 vs. limit=12.0 +2024-08-26 22:11:34,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=165333.33333333334, ans=0.0 +2024-08-26 22:11:39,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=165333.33333333334, ans=0.5 +2024-08-26 22:11:44,850 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.38 vs. limit=15.0 +2024-08-26 22:11:44,892 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.70 vs. limit=22.5 +2024-08-26 22:11:47,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=165386.66666666666, ans=0.125 +2024-08-26 22:11:55,423 INFO [train.py:1114] (1/4) Epoch 13, batch 1150, loss[loss=0.1978, simple_loss=0.2691, pruned_loss=0.04608, ctc_loss=0.08571, over 19582.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2726, pruned_loss=0.04915, ctc_loss=0.09157, over 3829936.96 frames. ], batch size: 52, lr: 1.15e-02, grad_scale: 16.0 +2024-08-26 22:12:01,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=165440.0, ans=0.0 +2024-08-26 22:12:16,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=165546.66666666666, ans=0.1 +2024-08-26 22:12:24,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=165600.0, ans=0.125 +2024-08-26 22:12:34,672 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.454e+02 1.639e+02 1.902e+02 3.180e+02, threshold=3.277e+02, percent-clipped=1.0 +2024-08-26 22:12:34,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=165653.33333333334, ans=0.1 +2024-08-26 22:12:36,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=165653.33333333334, ans=10.0 +2024-08-26 22:12:39,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=165653.33333333334, ans=0.2 +2024-08-26 22:12:43,816 INFO [train.py:1114] (1/4) Epoch 13, batch 1200, loss[loss=0.214, simple_loss=0.2874, pruned_loss=0.05037, ctc_loss=0.0995, over 19832.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2736, pruned_loss=0.04949, ctc_loss=0.09234, over 3824766.47 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:13:10,665 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.51 vs. limit=22.5 +2024-08-26 22:13:23,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=165920.0, ans=0.125 +2024-08-26 22:13:32,356 INFO [train.py:1114] (1/4) Epoch 13, batch 1250, loss[loss=0.2275, simple_loss=0.2958, pruned_loss=0.05884, ctc_loss=0.1039, over 19512.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2738, pruned_loss=0.04932, ctc_loss=0.09198, over 3842801.59 frames. ], batch size: 61, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:13:35,623 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.33 vs. limit=10.0 +2024-08-26 22:13:49,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=166026.66666666666, ans=0.025 +2024-08-26 22:13:59,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166080.0, ans=0.1 +2024-08-26 22:13:59,334 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.93 vs. limit=12.0 +2024-08-26 22:14:04,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_na.min_abs, batch_count=166133.33333333334, ans=0.02 +2024-08-26 22:14:11,818 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.418e+02 1.637e+02 2.002e+02 4.206e+02, threshold=3.274e+02, percent-clipped=1.0 +2024-08-26 22:14:23,455 INFO [train.py:1114] (1/4) Epoch 13, batch 1300, loss[loss=0.2113, simple_loss=0.2786, pruned_loss=0.05183, ctc_loss=0.1008, over 18906.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2725, pruned_loss=0.0486, ctc_loss=0.09103, over 3847225.03 frames. ], batch size: 76, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:14:27,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=166240.0, ans=0.0 +2024-08-26 22:14:27,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=166240.0, ans=0.125 +2024-08-26 22:14:29,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=166240.0, ans=0.0 +2024-08-26 22:14:30,567 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.84 vs. limit=8.0 +2024-08-26 22:14:34,013 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.01 vs. limit=15.0 +2024-08-26 22:14:38,621 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.84 vs. limit=6.0 +2024-08-26 22:14:44,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=166346.66666666666, ans=0.125 +2024-08-26 22:14:49,627 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.86 vs. limit=15.0 +2024-08-26 22:14:59,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=166453.33333333334, ans=0.0 +2024-08-26 22:15:09,563 INFO [train.py:1114] (1/4) Epoch 13, batch 1350, loss[loss=0.1934, simple_loss=0.2689, pruned_loss=0.04302, ctc_loss=0.07989, over 19751.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2722, pruned_loss=0.04847, ctc_loss=0.0907, over 3858651.13 frames. ], batch size: 54, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:15:12,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=166506.66666666666, ans=0.125 +2024-08-26 22:15:19,343 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.39 vs. limit=15.0 +2024-08-26 22:15:28,351 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:15:41,585 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.56 vs. limit=22.5 +2024-08-26 22:15:49,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=166720.0, ans=0.0 +2024-08-26 22:15:50,041 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.412e+02 1.605e+02 1.958e+02 2.747e+02, threshold=3.211e+02, percent-clipped=0.0 +2024-08-26 22:15:51,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166720.0, ans=0.1 +2024-08-26 22:15:59,140 INFO [train.py:1114] (1/4) Epoch 13, batch 1400, loss[loss=0.1692, simple_loss=0.2363, pruned_loss=0.0367, ctc_loss=0.0714, over 19677.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.272, pruned_loss=0.04849, ctc_loss=0.09051, over 3864928.92 frames. ], batch size: 46, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:16:01,592 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.68 vs. limit=10.0 +2024-08-26 22:16:06,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=166773.33333333334, ans=0.125 +2024-08-26 22:16:08,014 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.98 vs. limit=15.0 +2024-08-26 22:16:21,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=166880.0, ans=10.0 +2024-08-26 22:16:22,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=166880.0, ans=0.0 +2024-08-26 22:16:31,361 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.07 vs. limit=15.0 +2024-08-26 22:16:32,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=166933.33333333334, ans=0.2 +2024-08-26 22:16:33,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=166933.33333333334, ans=0.125 +2024-08-26 22:16:37,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=166986.66666666666, ans=0.125 +2024-08-26 22:16:47,845 INFO [train.py:1114] (1/4) Epoch 13, batch 1450, loss[loss=0.2167, simple_loss=0.28, pruned_loss=0.05665, ctc_loss=0.1004, over 19651.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2725, pruned_loss=0.04877, ctc_loss=0.09104, over 3863219.48 frames. ], batch size: 63, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:16:49,216 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.81 vs. limit=22.5 +2024-08-26 22:17:10,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=167146.66666666666, ans=0.0 +2024-08-26 22:17:14,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=167146.66666666666, ans=0.125 +2024-08-26 22:17:27,086 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.193e+02 1.434e+02 1.640e+02 1.966e+02 4.010e+02, threshold=3.281e+02, percent-clipped=1.0 +2024-08-26 22:17:31,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=167253.33333333334, ans=0.025 +2024-08-26 22:17:36,408 INFO [train.py:1114] (1/4) Epoch 13, batch 1500, loss[loss=0.2013, simple_loss=0.2693, pruned_loss=0.04809, ctc_loss=0.09277, over 19591.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2725, pruned_loss=0.04849, ctc_loss=0.09062, over 3862550.12 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:18:02,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=167413.33333333334, ans=0.2 +2024-08-26 22:18:12,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=167466.66666666666, ans=0.0 +2024-08-26 22:18:14,484 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.70 vs. limit=15.0 +2024-08-26 22:18:16,132 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.91 vs. limit=15.0 +2024-08-26 22:18:26,618 INFO [train.py:1114] (1/4) Epoch 13, batch 1550, loss[loss=0.2192, simple_loss=0.2919, pruned_loss=0.05408, ctc_loss=0.09588, over 19627.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2729, pruned_loss=0.04895, ctc_loss=0.09133, over 3846724.73 frames. ], batch size: 60, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:18:28,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=167573.33333333334, ans=0.0 +2024-08-26 22:18:33,255 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.66 vs. limit=15.0 +2024-08-26 22:18:43,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=167626.66666666666, ans=0.09899494936611666 +2024-08-26 22:18:57,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=167733.33333333334, ans=0.0 +2024-08-26 22:19:04,513 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.500e+02 1.731e+02 2.118e+02 3.338e+02, threshold=3.463e+02, percent-clipped=1.0 +2024-08-26 22:19:12,941 INFO [train.py:1114] (1/4) Epoch 13, batch 1600, loss[loss=0.1816, simple_loss=0.26, pruned_loss=0.03752, ctc_loss=0.07053, over 19840.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2722, pruned_loss=0.04868, ctc_loss=0.09098, over 3834490.98 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:19:16,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=167840.0, ans=0.125 +2024-08-26 22:19:17,175 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.09 vs. limit=15.0 +2024-08-26 22:19:27,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=167893.33333333334, ans=0.2 +2024-08-26 22:19:31,204 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.12 vs. limit=22.5 +2024-08-26 22:19:59,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=167946.66666666666, ans=0.125 +2024-08-26 22:20:06,666 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.47 vs. limit=10.0 +2024-08-26 22:20:19,756 INFO [train.py:1114] (1/4) Epoch 13, batch 1650, loss[loss=0.2041, simple_loss=0.281, pruned_loss=0.04603, ctc_loss=0.08781, over 19655.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2727, pruned_loss=0.04898, ctc_loss=0.0913, over 3832591.27 frames. ], batch size: 59, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:20:20,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=168106.66666666666, ans=0.95 +2024-08-26 22:20:29,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=168160.0, ans=0.0 +2024-08-26 22:20:51,606 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.91 vs. limit=12.0 +2024-08-26 22:20:57,548 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.124e+02 1.381e+02 1.542e+02 1.780e+02 2.683e+02, threshold=3.084e+02, percent-clipped=0.0 +2024-08-26 22:21:07,581 INFO [train.py:1114] (1/4) Epoch 13, batch 1700, loss[loss=0.1773, simple_loss=0.2425, pruned_loss=0.04023, ctc_loss=0.07907, over 19702.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2718, pruned_loss=0.04838, ctc_loss=0.09033, over 3847218.47 frames. ], batch size: 46, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:21:28,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=168480.0, ans=0.125 +2024-08-26 22:21:53,907 INFO [train.py:1114] (1/4) Epoch 13, batch 1750, loss[loss=0.178, simple_loss=0.2398, pruned_loss=0.04179, ctc_loss=0.08129, over 19684.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2714, pruned_loss=0.04816, ctc_loss=0.09003, over 3851805.56 frames. ], batch size: 45, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:22:00,112 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:22:07,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=168693.33333333334, ans=22.5 +2024-08-26 22:22:08,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=168693.33333333334, ans=0.0 +2024-08-26 22:22:09,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=168693.33333333334, ans=0.0 +2024-08-26 22:23:51,730 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.438e+02 1.563e+02 1.924e+02 3.851e+02, threshold=3.126e+02, percent-clipped=3.0 +2024-08-26 22:23:55,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168853.33333333334, ans=0.1 +2024-08-26 22:24:01,036 INFO [train.py:1114] (1/4) Epoch 13, batch 1800, loss[loss=0.2025, simple_loss=0.2796, pruned_loss=0.04459, ctc_loss=0.09059, over 19620.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2714, pruned_loss=0.04802, ctc_loss=0.08977, over 3853381.72 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 16.0 +2024-08-26 22:24:02,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=168906.66666666666, ans=0.125 +2024-08-26 22:24:11,049 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.41 vs. limit=22.5 +2024-08-26 22:24:31,028 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.56 vs. limit=6.0 +2024-08-26 22:24:36,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=169120.0, ans=0.2 +2024-08-26 22:24:44,741 INFO [train.py:1114] (1/4) Epoch 13, batch 1850, loss[loss=0.2018, simple_loss=0.2737, pruned_loss=0.04687, ctc_loss=0.09052, over 19571.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.271, pruned_loss=0.04795, ctc_loss=0.08946, over 3856130.43 frames. ], batch size: 57, lr: 1.14e-02, grad_scale: 16.0 +2024-08-26 22:24:51,304 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.04 vs. limit=15.0 +2024-08-26 22:25:05,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.09 vs. limit=22.5 +2024-08-26 22:25:08,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=169280.0, ans=0.125 +2024-08-26 22:25:21,911 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.546e+02 1.793e+02 2.323e+02 4.609e+02, threshold=3.586e+02, percent-clipped=7.0 +2024-08-26 22:25:25,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=169386.66666666666, ans=0.125 +2024-08-26 22:25:29,827 INFO [train.py:1114] (1/4) Epoch 13, batch 1900, loss[loss=0.1857, simple_loss=0.2673, pruned_loss=0.03853, ctc_loss=0.06734, over 19628.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2711, pruned_loss=0.04775, ctc_loss=0.08904, over 3861621.93 frames. ], batch size: 59, lr: 1.14e-02, grad_scale: 16.0 +2024-08-26 22:25:45,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=169493.33333333334, ans=0.125 +2024-08-26 22:25:56,390 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:25:58,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169600.0, ans=0.1 +2024-08-26 22:26:01,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=169600.0, ans=0.2 +2024-08-26 22:26:05,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=169653.33333333334, ans=0.125 +2024-08-26 22:26:09,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=169653.33333333334, ans=0.0 +2024-08-26 22:26:13,641 INFO [train.py:1114] (1/4) Epoch 13, batch 1950, loss[loss=0.1993, simple_loss=0.2706, pruned_loss=0.04661, ctc_loss=0.08692, over 19591.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2723, pruned_loss=0.04793, ctc_loss=0.08947, over 3870432.45 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 16.0 +2024-08-26 22:26:16,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=169706.66666666666, ans=0.125 +2024-08-26 22:26:17,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=169706.66666666666, ans=0.0 +2024-08-26 22:26:28,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=169760.0, ans=0.125 +2024-08-26 22:26:40,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=169866.66666666666, ans=0.2 +2024-08-26 22:26:42,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=169866.66666666666, ans=0.0 +2024-08-26 22:26:45,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=169866.66666666666, ans=0.025 +2024-08-26 22:26:53,144 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.42 vs. limit=6.0 +2024-08-26 22:26:53,523 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.527e+02 1.786e+02 2.093e+02 2.857e+02, threshold=3.573e+02, percent-clipped=0.0 +2024-08-26 22:27:00,505 INFO [train.py:1114] (1/4) Epoch 13, batch 2000, loss[loss=0.1805, simple_loss=0.2381, pruned_loss=0.04519, ctc_loss=0.08111, over 19627.00 frames. ], tot_loss[loss=0.203, simple_loss=0.273, pruned_loss=0.04837, ctc_loss=0.09039, over 3856227.76 frames. ], batch size: 45, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:27:01,943 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.18 vs. limit=15.0 +2024-08-26 22:27:02,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=169973.33333333334, ans=0.125 +2024-08-26 22:27:35,017 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.85 vs. limit=10.0 +2024-08-26 22:27:35,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=170186.66666666666, ans=0.025 +2024-08-26 22:27:38,423 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.19 vs. limit=15.0 +2024-08-26 22:27:44,116 INFO [train.py:1114] (1/4) Epoch 13, batch 2050, loss[loss=0.1765, simple_loss=0.2427, pruned_loss=0.04025, ctc_loss=0.07457, over 19719.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.272, pruned_loss=0.04817, ctc_loss=0.08999, over 3851755.38 frames. ], batch size: 47, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:27:58,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=170293.33333333334, ans=0.2 +2024-08-26 22:28:20,406 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.436e+02 1.652e+02 1.928e+02 2.658e+02, threshold=3.303e+02, percent-clipped=0.0 +2024-08-26 22:28:24,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=170453.33333333334, ans=0.035 +2024-08-26 22:28:27,560 INFO [train.py:1114] (1/4) Epoch 13, batch 2100, loss[loss=0.1916, simple_loss=0.2673, pruned_loss=0.04247, ctc_loss=0.07709, over 19769.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2715, pruned_loss=0.04778, ctc_loss=0.08917, over 3858448.85 frames. ], batch size: 54, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:28:40,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=170560.0, ans=0.0 +2024-08-26 22:28:44,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=170613.33333333334, ans=0.95 +2024-08-26 22:28:45,299 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.06 vs. limit=22.5 +2024-08-26 22:28:52,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=170613.33333333334, ans=0.2 +2024-08-26 22:28:52,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=170613.33333333334, ans=0.125 +2024-08-26 22:28:52,566 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.28 vs. limit=15.0 +2024-08-26 22:29:18,564 INFO [train.py:1114] (1/4) Epoch 13, batch 2150, loss[loss=0.1872, simple_loss=0.2616, pruned_loss=0.04094, ctc_loss=0.07732, over 19608.00 frames. ], tot_loss[loss=0.201, simple_loss=0.271, pruned_loss=0.0477, ctc_loss=0.08887, over 3869651.90 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:29:18,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=170773.33333333334, ans=0.125 +2024-08-26 22:29:22,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170773.33333333334, ans=0.1 +2024-08-26 22:29:24,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.66 vs. limit=15.0 +2024-08-26 22:29:38,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=170880.0, ans=0.125 +2024-08-26 22:29:48,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=170933.33333333334, ans=0.2 +2024-08-26 22:29:55,024 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.462e+02 1.698e+02 2.269e+02 4.218e+02, threshold=3.397e+02, percent-clipped=7.0 +2024-08-26 22:29:55,598 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.54 vs. limit=6.0 +2024-08-26 22:30:02,059 INFO [train.py:1114] (1/4) Epoch 13, batch 2200, loss[loss=0.2147, simple_loss=0.2835, pruned_loss=0.05264, ctc_loss=0.1014, over 19596.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2711, pruned_loss=0.04761, ctc_loss=0.08881, over 3868024.11 frames. ], batch size: 57, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:30:09,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=171040.0, ans=0.025 +2024-08-26 22:30:11,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=171093.33333333334, ans=0.125 +2024-08-26 22:30:15,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=171093.33333333334, ans=0.125 +2024-08-26 22:30:18,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=171093.33333333334, ans=10.0 +2024-08-26 22:30:46,564 INFO [train.py:1114] (1/4) Epoch 13, batch 2250, loss[loss=0.1976, simple_loss=0.2698, pruned_loss=0.04541, ctc_loss=0.0862, over 19613.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2709, pruned_loss=0.04762, ctc_loss=0.08912, over 3867521.25 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:30:50,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=171306.66666666666, ans=0.125 +2024-08-26 22:30:50,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=171306.66666666666, ans=0.09899494936611666 +2024-08-26 22:31:13,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=171466.66666666666, ans=0.125 +2024-08-26 22:31:20,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=171520.0, ans=0.04949747468305833 +2024-08-26 22:31:22,562 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.444e+02 1.610e+02 1.869e+02 3.635e+02, threshold=3.220e+02, percent-clipped=1.0 +2024-08-26 22:31:24,906 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.05 vs. limit=6.0 +2024-08-26 22:31:29,438 INFO [train.py:1114] (1/4) Epoch 13, batch 2300, loss[loss=0.1859, simple_loss=0.2565, pruned_loss=0.04205, ctc_loss=0.07814, over 19513.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.27, pruned_loss=0.04756, ctc_loss=0.08893, over 3860632.66 frames. ], batch size: 49, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 22:31:35,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=171573.33333333334, ans=0.125 +2024-08-26 22:31:53,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=171680.0, ans=0.125 +2024-08-26 22:32:10,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=171786.66666666666, ans=0.0 +2024-08-26 22:32:13,426 INFO [train.py:1114] (1/4) Epoch 13, batch 2350, loss[loss=0.2123, simple_loss=0.2886, pruned_loss=0.049, ctc_loss=0.0947, over 19661.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2705, pruned_loss=0.04788, ctc_loss=0.08946, over 3863505.81 frames. ], batch size: 63, lr: 1.13e-02, grad_scale: 16.0 +2024-08-26 22:32:15,218 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:32:19,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=171840.0, ans=0.0 +2024-08-26 22:32:20,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=171840.0, ans=0.0 +2024-08-26 22:32:53,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=172000.0, ans=0.125 +2024-08-26 22:32:56,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172000.0, ans=0.1 +2024-08-26 22:33:04,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=172053.33333333334, ans=0.0 +2024-08-26 22:33:04,856 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.253e+02 1.652e+02 1.956e+02 2.487e+02 4.828e+02, threshold=3.913e+02, percent-clipped=4.0 +2024-08-26 22:33:07,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=172053.33333333334, ans=0.0 +2024-08-26 22:33:08,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=172053.33333333334, ans=0.125 +2024-08-26 22:33:10,713 INFO [train.py:1114] (1/4) Epoch 13, batch 2400, loss[loss=0.2012, simple_loss=0.2765, pruned_loss=0.04484, ctc_loss=0.09061, over 19235.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2726, pruned_loss=0.04871, ctc_loss=0.09081, over 3858875.86 frames. ], batch size: 71, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 22:33:12,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=172106.66666666666, ans=0.125 +2024-08-26 22:33:18,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=172160.0, ans=0.125 +2024-08-26 22:33:22,967 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:33:54,746 INFO [train.py:1114] (1/4) Epoch 13, batch 2450, loss[loss=0.3028, simple_loss=0.3266, pruned_loss=0.1021, ctc_loss=0.187, over 13687.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2767, pruned_loss=0.05162, ctc_loss=0.09655, over 3730503.06 frames. ], batch size: 142, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 22:33:55,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172373.33333333334, ans=0.1 +2024-08-26 22:33:56,134 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=12.59 vs. limit=15.0 +2024-08-26 22:33:56,279 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.68 vs. limit=15.0 +2024-08-26 22:33:57,129 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.59 vs. limit=10.0 +2024-08-26 22:34:03,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=172426.66666666666, ans=0.5 +2024-08-26 22:34:15,845 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=9.73 vs. limit=15.0 +2024-08-26 22:34:23,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172533.33333333334, ans=0.1 +2024-08-26 22:34:25,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=172533.33333333334, ans=0.125 +2024-08-26 22:38:18,480 INFO [train.py:1114] (1/4) Epoch 14, batch 0, loss[loss=0.1833, simple_loss=0.2562, pruned_loss=0.03987, ctc_loss=0.07686, over 19403.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2562, pruned_loss=0.03987, ctc_loss=0.07686, over 19403.00 frames. ], batch size: 48, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:38:18,481 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-26 22:38:36,025 INFO [train.py:1146] (1/4) Epoch 14, validation: loss=0.1777, simple_loss=0.2705, pruned_loss=0.03149, ctc_loss=0.05468, over 944034.00 frames. +2024-08-26 22:38:36,168 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12945MB +2024-08-26 22:38:39,676 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.631e+02 1.782e+02 1.968e+02 3.125e+02, threshold=3.565e+02, percent-clipped=0.0 +2024-08-26 22:38:43,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172581.33333333334, ans=0.1 +2024-08-26 22:38:44,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172634.66666666666, ans=0.1 +2024-08-26 22:39:11,107 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.18 vs. limit=10.0 +2024-08-26 22:40:01,745 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.68 vs. limit=6.0 +2024-08-26 22:40:03,201 INFO [train.py:1114] (1/4) Epoch 14, batch 50, loss[loss=0.1982, simple_loss=0.2565, pruned_loss=0.05142, ctc_loss=0.09256, over 19699.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2732, pruned_loss=0.04853, ctc_loss=0.09122, over 844835.93 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:40:16,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=172848.0, ans=0.125 +2024-08-26 22:41:55,150 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:42:41,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=172901.33333333334, ans=0.0 +2024-08-26 22:42:48,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=172954.66666666666, ans=0.2 +2024-08-26 22:42:48,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=172954.66666666666, ans=0.125 +2024-08-26 22:43:09,660 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.04 vs. limit=12.0 +2024-08-26 22:43:15,385 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.81 vs. limit=22.5 +2024-08-26 22:43:20,293 INFO [train.py:1114] (1/4) Epoch 14, batch 100, loss[loss=0.1818, simple_loss=0.2565, pruned_loss=0.03903, ctc_loss=0.07268, over 19712.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2739, pruned_loss=0.04848, ctc_loss=0.09067, over 1499995.38 frames. ], batch size: 51, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:43:23,803 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.174e+02 1.427e+02 1.577e+02 1.836e+02 2.542e+02, threshold=3.153e+02, percent-clipped=0.0 +2024-08-26 22:43:31,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=173168.0, ans=0.025 +2024-08-26 22:43:54,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=173274.66666666666, ans=0.0 +2024-08-26 22:44:08,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=173328.0, ans=0.125 +2024-08-26 22:44:08,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_na.min_abs, batch_count=173328.0, ans=0.02 +2024-08-26 22:44:09,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=173381.33333333334, ans=0.0 +2024-08-26 22:44:10,532 INFO [train.py:1114] (1/4) Epoch 14, batch 150, loss[loss=0.1826, simple_loss=0.2501, pruned_loss=0.0424, ctc_loss=0.07556, over 19717.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2718, pruned_loss=0.04793, ctc_loss=0.08942, over 2027349.01 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:44:15,590 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.41 vs. limit=22.5 +2024-08-26 22:44:21,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=173434.66666666666, ans=0.07 +2024-08-26 22:44:22,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=173434.66666666666, ans=0.125 +2024-08-26 22:44:29,500 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.68 vs. limit=22.5 +2024-08-26 22:44:42,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=173541.33333333334, ans=0.125 +2024-08-26 22:44:43,256 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.19 vs. limit=12.0 +2024-08-26 22:44:43,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=173541.33333333334, ans=0.125 +2024-08-26 22:44:47,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=173541.33333333334, ans=0.125 +2024-08-26 22:45:08,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.88 vs. limit=15.0 +2024-08-26 22:45:10,943 INFO [train.py:1114] (1/4) Epoch 14, batch 200, loss[loss=0.2253, simple_loss=0.2846, pruned_loss=0.06056, ctc_loss=0.1122, over 18213.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2701, pruned_loss=0.04735, ctc_loss=0.08838, over 2434363.68 frames. ], batch size: 85, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:45:12,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=173648.0, ans=0.0 +2024-08-26 22:45:14,585 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.393e+02 1.624e+02 1.885e+02 3.247e+02, threshold=3.247e+02, percent-clipped=1.0 +2024-08-26 22:45:18,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=173648.0, ans=0.0 +2024-08-26 22:45:19,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173648.0, ans=0.1 +2024-08-26 22:45:40,409 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.06 vs. limit=15.0 +2024-08-26 22:45:44,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=173808.0, ans=0.0 +2024-08-26 22:46:04,239 INFO [train.py:1114] (1/4) Epoch 14, batch 250, loss[loss=0.1935, simple_loss=0.2741, pruned_loss=0.04202, ctc_loss=0.07227, over 19420.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.27, pruned_loss=0.04696, ctc_loss=0.08786, over 2753707.57 frames. ], batch size: 67, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:46:16,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.82 vs. limit=22.5 +2024-08-26 22:46:25,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=174021.33333333334, ans=22.5 +2024-08-26 22:46:25,694 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.15 vs. limit=22.5 +2024-08-26 22:46:32,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=174074.66666666666, ans=0.025 +2024-08-26 22:46:37,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=174074.66666666666, ans=0.125 +2024-08-26 22:46:47,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.96 vs. limit=15.0 +2024-08-26 22:46:49,910 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.71 vs. limit=6.0 +2024-08-26 22:46:50,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=174128.0, ans=0.125 +2024-08-26 22:46:51,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=174128.0, ans=0.2 +2024-08-26 22:46:54,993 INFO [train.py:1114] (1/4) Epoch 14, batch 300, loss[loss=0.2164, simple_loss=0.2843, pruned_loss=0.05406, ctc_loss=0.1009, over 19548.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2697, pruned_loss=0.04676, ctc_loss=0.08768, over 2999566.01 frames. ], batch size: 61, lr: 1.09e-02, grad_scale: 16.0 +2024-08-26 22:46:59,571 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.470e+02 1.728e+02 2.225e+02 3.956e+02, threshold=3.457e+02, percent-clipped=2.0 +2024-08-26 22:47:23,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=174288.0, ans=0.0 +2024-08-26 22:47:29,318 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.19 vs. limit=15.0 +2024-08-26 22:47:32,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=174341.33333333334, ans=0.0 +2024-08-26 22:47:32,889 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.49 vs. limit=15.0 +2024-08-26 22:47:37,438 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.05 vs. limit=15.0 +2024-08-26 22:47:43,371 INFO [train.py:1114] (1/4) Epoch 14, batch 350, loss[loss=0.1741, simple_loss=0.2396, pruned_loss=0.03879, ctc_loss=0.0774, over 19763.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2704, pruned_loss=0.04688, ctc_loss=0.088, over 3190345.80 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 16.0 +2024-08-26 22:48:14,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=174501.33333333334, ans=0.125 +2024-08-26 22:48:14,860 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.61 vs. limit=15.0 +2024-08-26 22:48:20,354 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.69 vs. limit=15.0 +2024-08-26 22:48:20,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=174554.66666666666, ans=0.125 +2024-08-26 22:48:23,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=174554.66666666666, ans=0.07 +2024-08-26 22:48:24,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=174554.66666666666, ans=0.125 +2024-08-26 22:48:26,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=174554.66666666666, ans=0.125 +2024-08-26 22:48:32,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=174608.0, ans=0.0 +2024-08-26 22:48:35,875 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.38 vs. limit=15.0 +2024-08-26 22:48:45,057 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.43 vs. limit=12.0 +2024-08-26 22:48:47,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=174714.66666666666, ans=0.125 +2024-08-26 22:48:48,273 INFO [train.py:1114] (1/4) Epoch 14, batch 400, loss[loss=0.1962, simple_loss=0.2731, pruned_loss=0.04306, ctc_loss=0.08297, over 19500.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2699, pruned_loss=0.04662, ctc_loss=0.08752, over 3341930.78 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:48:52,781 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.491e+02 1.630e+02 1.842e+02 3.705e+02, threshold=3.261e+02, percent-clipped=1.0 +2024-08-26 22:48:57,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=174714.66666666666, ans=0.0 +2024-08-26 22:49:08,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=174821.33333333334, ans=0.125 +2024-08-26 22:49:11,253 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.57 vs. limit=10.0 +2024-08-26 22:49:22,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=174874.66666666666, ans=0.2 +2024-08-26 22:49:29,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=174928.0, ans=0.125 +2024-08-26 22:49:29,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=174928.0, ans=0.2 +2024-08-26 22:49:33,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=174928.0, ans=0.025 +2024-08-26 22:49:39,144 INFO [train.py:1114] (1/4) Epoch 14, batch 450, loss[loss=0.1956, simple_loss=0.2727, pruned_loss=0.04287, ctc_loss=0.08197, over 19622.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2702, pruned_loss=0.04695, ctc_loss=0.08814, over 3450368.11 frames. ], batch size: 55, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:50:15,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=175141.33333333334, ans=0.125 +2024-08-26 22:50:16,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=175141.33333333334, ans=0.0 +2024-08-26 22:50:27,953 INFO [train.py:1114] (1/4) Epoch 14, batch 500, loss[loss=0.2043, simple_loss=0.2772, pruned_loss=0.04804, ctc_loss=0.08808, over 19650.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2691, pruned_loss=0.04636, ctc_loss=0.08713, over 3545951.97 frames. ], batch size: 63, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:50:30,941 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:50:30,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=175248.0, ans=0.09899494936611666 +2024-08-26 22:50:32,540 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.438e+02 1.690e+02 1.988e+02 3.244e+02, threshold=3.379e+02, percent-clipped=0.0 +2024-08-26 22:50:48,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=175354.66666666666, ans=0.0 +2024-08-26 22:51:00,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=175408.0, ans=0.125 +2024-08-26 22:51:01,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=175408.0, ans=0.125 +2024-08-26 22:51:10,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=175461.33333333334, ans=0.015 +2024-08-26 22:51:15,972 INFO [train.py:1114] (1/4) Epoch 14, batch 550, loss[loss=0.2016, simple_loss=0.2761, pruned_loss=0.04671, ctc_loss=0.08404, over 19266.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2694, pruned_loss=0.04659, ctc_loss=0.08747, over 3608026.80 frames. ], batch size: 71, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:51:51,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=175674.66666666666, ans=0.2 +2024-08-26 22:52:06,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=175728.0, ans=0.0 +2024-08-26 22:52:15,516 INFO [train.py:1114] (1/4) Epoch 14, batch 600, loss[loss=0.22, simple_loss=0.2904, pruned_loss=0.05395, ctc_loss=0.1044, over 19321.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2698, pruned_loss=0.04683, ctc_loss=0.08787, over 3664666.00 frames. ], batch size: 67, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:52:20,038 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.434e+02 1.658e+02 1.951e+02 2.764e+02, threshold=3.317e+02, percent-clipped=0.0 +2024-08-26 22:52:22,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=175781.33333333334, ans=0.125 +2024-08-26 22:52:22,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=175781.33333333334, ans=0.125 +2024-08-26 22:52:29,135 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.58 vs. limit=6.0 +2024-08-26 22:52:31,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=175834.66666666666, ans=0.125 +2024-08-26 22:52:32,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=175834.66666666666, ans=0.125 +2024-08-26 22:52:37,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=175888.0, ans=0.125 +2024-08-26 22:52:37,585 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.55 vs. limit=15.0 +2024-08-26 22:52:40,268 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.25 vs. limit=15.0 +2024-08-26 22:52:49,915 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:52:51,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=175941.33333333334, ans=0.025 +2024-08-26 22:52:56,505 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.54 vs. limit=15.0 +2024-08-26 22:53:17,371 INFO [train.py:1114] (1/4) Epoch 14, batch 650, loss[loss=0.1971, simple_loss=0.2737, pruned_loss=0.04367, ctc_loss=0.08269, over 19752.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2691, pruned_loss=0.04653, ctc_loss=0.08702, over 3715941.04 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:53:44,994 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.39 vs. limit=12.0 +2024-08-26 22:53:50,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176208.0, ans=0.1 +2024-08-26 22:53:52,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176208.0, ans=0.1 +2024-08-26 22:54:13,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176208.0, ans=0.1 +2024-08-26 22:54:13,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=176208.0, ans=0.125 +2024-08-26 22:54:15,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=176261.33333333334, ans=10.0 +2024-08-26 22:54:29,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=176261.33333333334, ans=0.0 +2024-08-26 22:54:41,337 INFO [train.py:1114] (1/4) Epoch 14, batch 700, loss[loss=0.1925, simple_loss=0.2596, pruned_loss=0.04659, ctc_loss=0.08022, over 19721.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2699, pruned_loss=0.04694, ctc_loss=0.08756, over 3748643.73 frames. ], batch size: 51, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:54:52,792 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.446e+02 1.597e+02 2.123e+02 3.826e+02, threshold=3.195e+02, percent-clipped=1.0 +2024-08-26 22:55:06,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176368.0, ans=0.1 +2024-08-26 22:55:13,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=176368.0, ans=0.0 +2024-08-26 22:55:18,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=176368.0, ans=0.125 +2024-08-26 22:58:22,191 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.26 vs. limit=15.0 +2024-08-26 22:58:22,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=176528.0, ans=0.125 +2024-08-26 22:58:30,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=176528.0, ans=0.125 +2024-08-26 22:59:23,413 INFO [train.py:1114] (1/4) Epoch 14, batch 750, loss[loss=0.1848, simple_loss=0.2654, pruned_loss=0.03727, ctc_loss=0.07415, over 19855.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2695, pruned_loss=0.04672, ctc_loss=0.08728, over 3776206.67 frames. ], batch size: 55, lr: 1.08e-02, grad_scale: 16.0 +2024-08-26 22:59:23,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=176581.33333333334, ans=0.125 +2024-08-26 22:59:56,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=176581.33333333334, ans=0.1 +2024-08-26 23:00:26,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=176581.33333333334, ans=0.125 +2024-08-26 23:00:59,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=176634.66666666666, ans=0.125 +2024-08-26 23:01:37,498 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.07 vs. limit=15.0 +2024-08-26 23:01:58,992 INFO [train.py:1114] (1/4) Epoch 14, batch 800, loss[loss=0.1771, simple_loss=0.2462, pruned_loss=0.03949, ctc_loss=0.07239, over 19804.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2698, pruned_loss=0.04703, ctc_loss=0.08783, over 3797126.98 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 23:02:11,900 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.199e+02 1.464e+02 1.718e+02 2.120e+02 3.590e+02, threshold=3.437e+02, percent-clipped=3.0 +2024-08-26 23:05:03,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176954.66666666666, ans=0.1 +2024-08-26 23:05:52,828 INFO [train.py:1114] (1/4) Epoch 14, batch 850, loss[loss=0.2045, simple_loss=0.2771, pruned_loss=0.04754, ctc_loss=0.09177, over 19658.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2692, pruned_loss=0.04653, ctc_loss=0.08707, over 3815224.61 frames. ], batch size: 59, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 23:06:08,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=177168.0, ans=0.0 +2024-08-26 23:06:16,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=177221.33333333334, ans=0.125 +2024-08-26 23:06:34,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=177328.0, ans=0.0 +2024-08-26 23:06:46,754 INFO [train.py:1114] (1/4) Epoch 14, batch 900, loss[loss=0.1826, simple_loss=0.2532, pruned_loss=0.04082, ctc_loss=0.076, over 19433.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2695, pruned_loss=0.0468, ctc_loss=0.08754, over 3818547.13 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 23:06:48,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=177381.33333333334, ans=0.0 +2024-08-26 23:06:52,132 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.084e+02 1.429e+02 1.657e+02 1.986e+02 3.410e+02, threshold=3.315e+02, percent-clipped=0.0 +2024-08-26 23:07:06,006 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.64 vs. limit=22.5 +2024-08-26 23:07:14,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=177488.0, ans=0.125 +2024-08-26 23:07:19,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=177541.33333333334, ans=0.0 +2024-08-26 23:07:20,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=177541.33333333334, ans=0.125 +2024-08-26 23:07:21,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=177541.33333333334, ans=0.0 +2024-08-26 23:07:23,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=177541.33333333334, ans=0.125 +2024-08-26 23:07:29,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177594.66666666666, ans=0.1 +2024-08-26 23:07:38,547 INFO [train.py:1114] (1/4) Epoch 14, batch 950, loss[loss=0.1764, simple_loss=0.246, pruned_loss=0.03883, ctc_loss=0.07281, over 19512.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2697, pruned_loss=0.04697, ctc_loss=0.08789, over 3821519.80 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 23:07:39,067 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.73 vs. limit=22.5 +2024-08-26 23:07:51,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=177701.33333333334, ans=0.0 +2024-08-26 23:07:55,890 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.34 vs. limit=22.5 +2024-08-26 23:08:00,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=177754.66666666666, ans=0.125 +2024-08-26 23:08:01,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=177754.66666666666, ans=0.2 +2024-08-26 23:08:01,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=177754.66666666666, ans=0.04949747468305833 +2024-08-26 23:08:03,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=177754.66666666666, ans=0.07 +2024-08-26 23:08:04,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=177754.66666666666, ans=0.125 +2024-08-26 23:08:11,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=177808.0, ans=0.125 +2024-08-26 23:08:21,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=177861.33333333334, ans=0.125 +2024-08-26 23:08:26,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=177861.33333333334, ans=0.125 +2024-08-26 23:08:28,929 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.94 vs. limit=15.0 +2024-08-26 23:08:35,564 INFO [train.py:1114] (1/4) Epoch 14, batch 1000, loss[loss=0.1924, simple_loss=0.2608, pruned_loss=0.04522, ctc_loss=0.08417, over 19863.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2703, pruned_loss=0.04715, ctc_loss=0.08816, over 3818921.51 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:08:41,166 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.435e+02 1.639e+02 1.944e+02 3.185e+02, threshold=3.279e+02, percent-clipped=0.0 +2024-08-26 23:09:00,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=178021.33333333334, ans=0.0 +2024-08-26 23:09:03,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=178021.33333333334, ans=0.2 +2024-08-26 23:09:11,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=178074.66666666666, ans=0.125 +2024-08-26 23:09:12,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=178074.66666666666, ans=0.0 +2024-08-26 23:09:29,122 INFO [train.py:1114] (1/4) Epoch 14, batch 1050, loss[loss=0.1997, simple_loss=0.2747, pruned_loss=0.04489, ctc_loss=0.08741, over 19861.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2696, pruned_loss=0.04709, ctc_loss=0.08795, over 3824616.59 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:09:31,688 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.72 vs. limit=6.0 +2024-08-26 23:09:34,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.28 vs. limit=15.0 +2024-08-26 23:09:40,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=178234.66666666666, ans=0.025 +2024-08-26 23:10:03,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=178288.0, ans=0.0 +2024-08-26 23:10:07,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=178341.33333333334, ans=0.0 +2024-08-26 23:10:47,603 INFO [train.py:1114] (1/4) Epoch 14, batch 1100, loss[loss=0.2232, simple_loss=0.2834, pruned_loss=0.0605, ctc_loss=0.105, over 19567.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2692, pruned_loss=0.0468, ctc_loss=0.08751, over 3832045.48 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:10:53,001 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.155e+02 1.389e+02 1.598e+02 1.774e+02 3.032e+02, threshold=3.197e+02, percent-clipped=0.0 +2024-08-26 23:11:01,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=178501.33333333334, ans=0.1 +2024-08-26 23:11:10,333 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.15 vs. limit=15.0 +2024-08-26 23:11:35,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=178661.33333333334, ans=0.125 +2024-08-26 23:11:37,950 INFO [train.py:1114] (1/4) Epoch 14, batch 1150, loss[loss=0.1744, simple_loss=0.2459, pruned_loss=0.03779, ctc_loss=0.06807, over 19594.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2685, pruned_loss=0.04637, ctc_loss=0.08683, over 3830601.15 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:11:38,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=178714.66666666666, ans=0.125 +2024-08-26 23:11:40,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=178714.66666666666, ans=0.125 +2024-08-26 23:11:42,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=178714.66666666666, ans=0.125 +2024-08-26 23:11:45,852 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 23:11:48,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=178768.0, ans=0.2 +2024-08-26 23:12:00,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.62 vs. limit=12.0 +2024-08-26 23:12:13,307 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.15 vs. limit=15.0 +2024-08-26 23:12:31,156 INFO [train.py:1114] (1/4) Epoch 14, batch 1200, loss[loss=0.2085, simple_loss=0.2842, pruned_loss=0.04781, ctc_loss=0.09325, over 19831.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2696, pruned_loss=0.04666, ctc_loss=0.08729, over 3826118.36 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:12:34,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=178981.33333333334, ans=0.015 +2024-08-26 23:12:36,804 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.458e+02 1.687e+02 2.139e+02 4.936e+02, threshold=3.375e+02, percent-clipped=2.0 +2024-08-26 23:12:38,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=178981.33333333334, ans=0.125 +2024-08-26 23:12:44,192 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.91 vs. limit=15.0 +2024-08-26 23:12:47,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=179034.66666666666, ans=0.125 +2024-08-26 23:12:58,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=179088.0, ans=0.125 +2024-08-26 23:13:05,966 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 23:13:14,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=179194.66666666666, ans=0.0 +2024-08-26 23:13:17,803 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.19 vs. limit=15.0 +2024-08-26 23:13:17,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.59 vs. limit=10.0 +2024-08-26 23:13:20,037 INFO [train.py:1114] (1/4) Epoch 14, batch 1250, loss[loss=0.2213, simple_loss=0.2908, pruned_loss=0.05558, ctc_loss=0.1018, over 19505.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.27, pruned_loss=0.04655, ctc_loss=0.08682, over 3843744.84 frames. ], batch size: 61, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:13:22,338 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.52 vs. limit=10.0 +2024-08-26 23:13:25,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=179248.0, ans=0.0 +2024-08-26 23:13:30,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=179301.33333333334, ans=0.2 +2024-08-26 23:13:45,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=179354.66666666666, ans=0.125 +2024-08-26 23:13:49,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=179354.66666666666, ans=0.125 +2024-08-26 23:13:49,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=179354.66666666666, ans=0.09899494936611666 +2024-08-26 23:13:55,634 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.32 vs. limit=10.0 +2024-08-26 23:13:57,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=179408.0, ans=0.0 +2024-08-26 23:14:01,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=179461.33333333334, ans=0.0 +2024-08-26 23:14:12,534 INFO [train.py:1114] (1/4) Epoch 14, batch 1300, loss[loss=0.2336, simple_loss=0.2956, pruned_loss=0.063, ctc_loss=0.114, over 18980.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2692, pruned_loss=0.04618, ctc_loss=0.0863, over 3845894.64 frames. ], batch size: 76, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:14:15,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=179514.66666666666, ans=0.125 +2024-08-26 23:14:19,137 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.402e+02 1.628e+02 1.914e+02 2.926e+02, threshold=3.256e+02, percent-clipped=0.0 +2024-08-26 23:14:24,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=179568.0, ans=0.125 +2024-08-26 23:14:29,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=179568.0, ans=0.125 +2024-08-26 23:14:46,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=179674.66666666666, ans=0.0 +2024-08-26 23:14:47,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=179674.66666666666, ans=0.0 +2024-08-26 23:14:55,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=179728.0, ans=0.125 +2024-08-26 23:14:58,668 INFO [train.py:1114] (1/4) Epoch 14, batch 1350, loss[loss=0.1856, simple_loss=0.2651, pruned_loss=0.03838, ctc_loss=0.07338, over 19772.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.269, pruned_loss=0.0462, ctc_loss=0.086, over 3856351.84 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:16:00,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=179781.33333333334, ans=0.125 +2024-08-26 23:16:06,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=179834.66666666666, ans=0.0 +2024-08-26 23:16:34,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=179888.0, ans=0.125 +2024-08-26 23:16:49,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=179994.66666666666, ans=0.2 +2024-08-26 23:16:59,391 INFO [train.py:1114] (1/4) Epoch 14, batch 1400, loss[loss=0.1703, simple_loss=0.2354, pruned_loss=0.03861, ctc_loss=0.06984, over 19666.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2687, pruned_loss=0.04619, ctc_loss=0.08591, over 3863430.61 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:17:07,631 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.482e+02 1.624e+02 2.003e+02 3.142e+02, threshold=3.248e+02, percent-clipped=0.0 +2024-08-26 23:17:08,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=180048.0, ans=0.125 +2024-08-26 23:17:11,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=180101.33333333334, ans=0.125 +2024-08-26 23:17:24,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=180154.66666666666, ans=0.125 +2024-08-26 23:17:30,238 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.03 vs. limit=15.0 +2024-08-26 23:17:34,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=180208.0, ans=0.125 +2024-08-26 23:17:34,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=180208.0, ans=0.0 +2024-08-26 23:17:35,125 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.20 vs. limit=10.0 +2024-08-26 23:17:37,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=180208.0, ans=0.0 +2024-08-26 23:17:45,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=180261.33333333334, ans=0.125 +2024-08-26 23:17:50,487 INFO [train.py:1114] (1/4) Epoch 14, batch 1450, loss[loss=0.2233, simple_loss=0.2894, pruned_loss=0.05783, ctc_loss=0.1039, over 19663.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2692, pruned_loss=0.04623, ctc_loss=0.08624, over 3861905.87 frames. ], batch size: 63, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:18:25,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=180421.33333333334, ans=0.0 +2024-08-26 23:18:36,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=180528.0, ans=0.125 +2024-08-26 23:18:37,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=180528.0, ans=0.125 +2024-08-26 23:18:40,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=180528.0, ans=0.0 +2024-08-26 23:18:46,184 INFO [train.py:1114] (1/4) Epoch 14, batch 1500, loss[loss=0.2126, simple_loss=0.2832, pruned_loss=0.05226, ctc_loss=0.0936, over 19586.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2698, pruned_loss=0.04645, ctc_loss=0.08659, over 3861529.31 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:18:52,954 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.115e+02 1.461e+02 1.607e+02 1.928e+02 3.862e+02, threshold=3.214e+02, percent-clipped=2.0 +2024-08-26 23:18:55,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=180634.66666666666, ans=0.125 +2024-08-26 23:19:14,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=180741.33333333334, ans=0.125 +2024-08-26 23:20:19,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=180741.33333333334, ans=0.0 +2024-08-26 23:20:20,533 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.93 vs. limit=22.5 +2024-08-26 23:20:30,224 INFO [train.py:1114] (1/4) Epoch 14, batch 1550, loss[loss=0.2114, simple_loss=0.2897, pruned_loss=0.04883, ctc_loss=0.08849, over 19621.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2701, pruned_loss=0.04685, ctc_loss=0.08736, over 3846645.55 frames. ], batch size: 60, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:20:32,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=180848.0, ans=0.125 +2024-08-26 23:21:01,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=181008.0, ans=0.0 +2024-08-26 23:21:20,684 INFO [train.py:1114] (1/4) Epoch 14, batch 1600, loss[loss=0.1982, simple_loss=0.2714, pruned_loss=0.04506, ctc_loss=0.08718, over 19843.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2699, pruned_loss=0.04697, ctc_loss=0.08779, over 3835823.69 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:21:27,134 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.461e+02 1.627e+02 1.971e+02 3.033e+02, threshold=3.255e+02, percent-clipped=0.0 +2024-08-26 23:21:29,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=181168.0, ans=0.0 +2024-08-26 23:21:34,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=181168.0, ans=0.0 +2024-08-26 23:21:39,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.42 vs. limit=6.0 +2024-08-26 23:23:41,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=181274.66666666666, ans=0.0 +2024-08-26 23:23:43,646 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.49 vs. limit=15.0 +2024-08-26 23:23:53,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=181381.33333333334, ans=0.0 +2024-08-26 23:23:54,652 INFO [train.py:1114] (1/4) Epoch 14, batch 1650, loss[loss=0.1944, simple_loss=0.2734, pruned_loss=0.04171, ctc_loss=0.07986, over 19621.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2694, pruned_loss=0.04669, ctc_loss=0.08736, over 3832200.11 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:23:57,064 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.37 vs. limit=22.5 +2024-08-26 23:23:58,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=181381.33333333334, ans=0.025 +2024-08-26 23:24:07,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=181434.66666666666, ans=0.125 +2024-08-26 23:24:10,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=181434.66666666666, ans=0.1 +2024-08-26 23:24:21,923 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.53 vs. limit=22.5 +2024-08-26 23:24:40,741 INFO [train.py:1114] (1/4) Epoch 14, batch 1700, loss[loss=0.1831, simple_loss=0.2475, pruned_loss=0.04285, ctc_loss=0.0825, over 19669.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2691, pruned_loss=0.04631, ctc_loss=0.08671, over 3846575.53 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:24:47,151 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.192e+02 1.441e+02 1.691e+02 2.079e+02 3.382e+02, threshold=3.381e+02, percent-clipped=3.0 +2024-08-26 23:24:50,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=181701.33333333334, ans=0.2 +2024-08-26 23:25:02,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181754.66666666666, ans=0.1 +2024-08-26 23:25:11,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181808.0, ans=0.1 +2024-08-26 23:25:13,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181808.0, ans=0.1 +2024-08-26 23:25:13,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=181808.0, ans=0.0 +2024-08-26 23:25:18,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181861.33333333334, ans=0.1 +2024-08-26 23:25:25,113 INFO [train.py:1114] (1/4) Epoch 14, batch 1750, loss[loss=0.1958, simple_loss=0.2563, pruned_loss=0.04991, ctc_loss=0.08861, over 19673.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2689, pruned_loss=0.04608, ctc_loss=0.08628, over 3852118.27 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:25:42,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182021.33333333334, ans=0.1 +2024-08-26 23:25:46,989 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.67 vs. limit=12.0 +2024-08-26 23:25:52,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=182074.66666666666, ans=0.125 +2024-08-26 23:26:19,465 INFO [train.py:1114] (1/4) Epoch 14, batch 1800, loss[loss=0.1974, simple_loss=0.2744, pruned_loss=0.04312, ctc_loss=0.08539, over 19615.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2693, pruned_loss=0.04622, ctc_loss=0.08651, over 3853355.10 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:26:20,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=182181.33333333334, ans=0.125 +2024-08-26 23:26:24,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=182181.33333333334, ans=0.125 +2024-08-26 23:26:26,552 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.533e+02 1.884e+02 2.505e+02 4.097e+02, threshold=3.767e+02, percent-clipped=5.0 +2024-08-26 23:26:32,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=182234.66666666666, ans=0.09899494936611666 +2024-08-26 23:26:47,085 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.45 vs. limit=15.0 +2024-08-26 23:26:52,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=182341.33333333334, ans=0.125 +2024-08-26 23:26:59,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=182394.66666666666, ans=0.125 +2024-08-26 23:27:05,187 INFO [train.py:1114] (1/4) Epoch 14, batch 1850, loss[loss=0.1983, simple_loss=0.2749, pruned_loss=0.04382, ctc_loss=0.08533, over 19584.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2691, pruned_loss=0.04627, ctc_loss=0.08654, over 3856493.44 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:27:07,417 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.60 vs. limit=15.0 +2024-08-26 23:27:08,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=182448.0, ans=0.2 +2024-08-26 23:28:46,380 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.21 vs. limit=12.0 +2024-08-26 23:28:55,597 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.81 vs. limit=15.0 +2024-08-26 23:29:14,713 INFO [train.py:1114] (1/4) Epoch 14, batch 1900, loss[loss=0.1943, simple_loss=0.2731, pruned_loss=0.04191, ctc_loss=0.07916, over 19663.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2696, pruned_loss=0.04628, ctc_loss=0.08664, over 3860795.35 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:29:16,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=182714.66666666666, ans=0.2 +2024-08-26 23:29:21,598 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.217e+02 1.441e+02 1.690e+02 2.071e+02 3.452e+02, threshold=3.379e+02, percent-clipped=0.0 +2024-08-26 23:29:33,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=182821.33333333334, ans=0.0 +2024-08-26 23:29:35,058 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.74 vs. limit=15.0 +2024-08-26 23:29:47,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=182874.66666666666, ans=0.125 +2024-08-26 23:29:55,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=182928.0, ans=0.125 +2024-08-26 23:29:57,807 INFO [train.py:1114] (1/4) Epoch 14, batch 1950, loss[loss=0.1749, simple_loss=0.2519, pruned_loss=0.03525, ctc_loss=0.06837, over 19584.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2701, pruned_loss=0.04613, ctc_loss=0.08635, over 3869924.75 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:30:07,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=183034.66666666666, ans=0.0 +2024-08-26 23:30:08,386 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.23 vs. limit=15.0 +2024-08-26 23:30:10,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=183034.66666666666, ans=0.125 +2024-08-26 23:30:16,135 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.52 vs. limit=22.5 +2024-08-26 23:30:16,808 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 23:30:30,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=183141.33333333334, ans=0.0 +2024-08-26 23:30:33,076 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.13 vs. limit=15.0 +2024-08-26 23:30:44,846 INFO [train.py:1114] (1/4) Epoch 14, batch 2000, loss[loss=0.1704, simple_loss=0.2377, pruned_loss=0.0383, ctc_loss=0.06639, over 19667.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2708, pruned_loss=0.04694, ctc_loss=0.08763, over 3854683.60 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:30:52,059 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.411e+02 1.571e+02 1.845e+02 2.838e+02, threshold=3.143e+02, percent-clipped=0.0 +2024-08-26 23:30:54,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=183301.33333333334, ans=0.2 +2024-08-26 23:31:14,055 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.81 vs. limit=15.0 +2024-08-26 23:31:14,090 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.68 vs. limit=22.5 +2024-08-26 23:32:08,143 INFO [train.py:1114] (1/4) Epoch 14, batch 2050, loss[loss=0.1704, simple_loss=0.2402, pruned_loss=0.0362, ctc_loss=0.07049, over 19696.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2696, pruned_loss=0.0467, ctc_loss=0.08712, over 3850423.23 frames. ], batch size: 47, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:32:09,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183514.66666666666, ans=0.1 +2024-08-26 23:32:09,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=183514.66666666666, ans=0.2 +2024-08-26 23:32:20,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=183568.0, ans=0.0 +2024-08-26 23:32:28,624 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.33 vs. limit=10.0 +2024-08-26 23:32:35,748 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.13 vs. limit=22.5 +2024-08-26 23:32:51,519 INFO [train.py:1114] (1/4) Epoch 14, batch 2100, loss[loss=0.1947, simple_loss=0.2725, pruned_loss=0.04232, ctc_loss=0.08047, over 19783.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2687, pruned_loss=0.046, ctc_loss=0.08583, over 3857559.04 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:32:57,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=183781.33333333334, ans=0.125 +2024-08-26 23:32:58,374 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.193e+02 1.491e+02 1.652e+02 1.860e+02 2.729e+02, threshold=3.304e+02, percent-clipped=0.0 +2024-08-26 23:34:16,403 INFO [train.py:1114] (1/4) Epoch 14, batch 2150, loss[loss=0.1995, simple_loss=0.2718, pruned_loss=0.04662, ctc_loss=0.08499, over 19581.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2683, pruned_loss=0.04605, ctc_loss=0.0858, over 3868055.93 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:34:37,682 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.80 vs. limit=6.0 +2024-08-26 23:34:53,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=184261.33333333334, ans=0.125 +2024-08-26 23:34:56,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=184261.33333333334, ans=0.0 +2024-08-26 23:34:57,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=184261.33333333334, ans=0.2 +2024-08-26 23:34:58,806 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=4.66 vs. limit=15.0 +2024-08-26 23:34:59,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=184314.66666666666, ans=0.0 +2024-08-26 23:34:59,941 INFO [train.py:1114] (1/4) Epoch 14, batch 2200, loss[loss=0.1944, simple_loss=0.2689, pruned_loss=0.04333, ctc_loss=0.08319, over 19577.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.268, pruned_loss=0.04578, ctc_loss=0.08545, over 3867342.29 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:35:02,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=184314.66666666666, ans=0.125 +2024-08-26 23:35:06,950 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.447e+02 1.750e+02 2.552e+02 4.295e+02, threshold=3.499e+02, percent-clipped=8.0 +2024-08-26 23:35:17,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=184421.33333333334, ans=0.0 +2024-08-26 23:35:30,973 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.80 vs. limit=12.0 +2024-08-26 23:35:39,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=184528.0, ans=0.125 +2024-08-26 23:35:41,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=184528.0, ans=0.125 +2024-08-26 23:35:43,863 INFO [train.py:1114] (1/4) Epoch 14, batch 2250, loss[loss=0.2049, simple_loss=0.2787, pruned_loss=0.04828, ctc_loss=0.08628, over 19609.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2679, pruned_loss=0.0457, ctc_loss=0.08529, over 3867767.48 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:35:56,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=184634.66666666666, ans=0.125 +2024-08-26 23:36:05,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=184688.0, ans=0.125 +2024-08-26 23:36:14,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184741.33333333334, ans=0.1 +2024-08-26 23:36:27,381 INFO [train.py:1114] (1/4) Epoch 14, batch 2300, loss[loss=0.1767, simple_loss=0.2469, pruned_loss=0.03868, ctc_loss=0.07262, over 19503.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2669, pruned_loss=0.04551, ctc_loss=0.08491, over 3861437.78 frames. ], batch size: 49, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:36:28,868 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.37 vs. limit=6.0 +2024-08-26 23:36:35,134 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.458e+02 1.662e+02 2.114e+02 3.033e+02, threshold=3.324e+02, percent-clipped=0.0 +2024-08-26 23:36:39,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=184901.33333333334, ans=0.0 +2024-08-26 23:36:55,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=185008.0, ans=0.05 +2024-08-26 23:37:00,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=185008.0, ans=0.07 +2024-08-26 23:37:01,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=185061.33333333334, ans=0.0 +2024-08-26 23:37:10,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=185114.66666666666, ans=10.0 +2024-08-26 23:37:10,969 INFO [train.py:1114] (1/4) Epoch 14, batch 2350, loss[loss=0.2007, simple_loss=0.2717, pruned_loss=0.04773, ctc_loss=0.08549, over 19691.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2673, pruned_loss=0.04591, ctc_loss=0.08549, over 3864115.47 frames. ], batch size: 63, lr: 1.05e-02, grad_scale: 16.0 +2024-08-26 23:37:19,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185114.66666666666, ans=0.1 +2024-08-26 23:37:24,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=185168.0, ans=0.125 +2024-08-26 23:37:42,683 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.50 vs. limit=6.0 +2024-08-26 23:37:44,392 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.49 vs. limit=15.0 +2024-08-26 23:37:50,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=185328.0, ans=0.0 +2024-08-26 23:37:55,070 INFO [train.py:1114] (1/4) Epoch 14, batch 2400, loss[loss=0.2141, simple_loss=0.2786, pruned_loss=0.05432, ctc_loss=0.1023, over 19263.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2697, pruned_loss=0.04684, ctc_loss=0.08704, over 3858838.15 frames. ], batch size: 71, lr: 1.05e-02, grad_scale: 32.0 +2024-08-26 23:37:55,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=185381.33333333334, ans=0.0 +2024-08-26 23:38:00,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=185381.33333333334, ans=0.125 +2024-08-26 23:38:02,857 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.081e+02 1.569e+02 1.843e+02 2.357e+02 3.475e+02, threshold=3.685e+02, percent-clipped=2.0 +2024-08-26 23:38:06,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=185434.66666666666, ans=0.0 +2024-08-26 23:38:07,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=185434.66666666666, ans=0.0 +2024-08-26 23:38:15,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=185488.0, ans=0.2 +2024-08-26 23:38:39,327 INFO [train.py:1114] (1/4) Epoch 14, batch 2450, loss[loss=0.2837, simple_loss=0.3195, pruned_loss=0.08939, ctc_loss=0.1728, over 13328.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2738, pruned_loss=0.04964, ctc_loss=0.09255, over 3731819.87 frames. ], batch size: 140, lr: 1.05e-02, grad_scale: 16.0 +2024-08-26 23:38:41,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=185648.0, ans=0.125 +2024-08-26 23:38:50,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=185701.33333333334, ans=0.125 +2024-08-26 23:38:59,792 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.03 vs. limit=15.0 +2024-08-26 23:39:00,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.13 vs. limit=15.0 +2024-08-26 23:40:44,471 INFO [train.py:1114] (1/4) Epoch 15, batch 0, loss[loss=0.1895, simple_loss=0.255, pruned_loss=0.04425, ctc_loss=0.08872, over 19792.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.255, pruned_loss=0.04425, ctc_loss=0.08872, over 19792.00 frames. ], batch size: 49, lr: 1.02e-02, grad_scale: 32.0 +2024-08-26 23:40:46,074 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-26 23:41:25,236 INFO [train.py:1146] (1/4) Epoch 15, validation: loss=0.1751, simple_loss=0.2686, pruned_loss=0.03035, ctc_loss=0.05216, over 944034.00 frames. +2024-08-26 23:41:29,769 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12945MB +2024-08-26 23:41:30,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=185856.0, ans=0.0 +2024-08-26 23:41:32,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=185856.0, ans=0.125 +2024-08-26 23:41:32,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=185856.0, ans=0.2 +2024-08-26 23:42:34,508 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.661e+02 1.811e+02 2.041e+02 3.400e+02, threshold=3.623e+02, percent-clipped=0.0 +2024-08-26 23:42:54,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=185962.66666666666, ans=0.2 +2024-08-26 23:47:49,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=186069.33333333334, ans=0.025 +2024-08-26 23:49:22,219 INFO [train.py:1114] (1/4) Epoch 15, batch 50, loss[loss=0.1653, simple_loss=0.2339, pruned_loss=0.03466, ctc_loss=0.06842, over 19715.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2687, pruned_loss=0.04665, ctc_loss=0.08806, over 844130.59 frames. ], batch size: 47, lr: 1.02e-02, grad_scale: 16.0 +2024-08-26 23:49:22,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=186122.66666666666, ans=0.0 +2024-08-26 23:51:28,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=186122.66666666666, ans=0.125 +2024-08-26 23:52:09,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=186229.33333333334, ans=0.2 +2024-08-26 23:53:58,855 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.98 vs. limit=15.0 +2024-08-26 23:57:10,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=186336.0, ans=0.0 +2024-08-26 23:58:27,655 INFO [train.py:1114] (1/4) Epoch 15, batch 100, loss[loss=0.1844, simple_loss=0.2564, pruned_loss=0.0417, ctc_loss=0.07263, over 19731.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2708, pruned_loss=0.04635, ctc_loss=0.08704, over 1499265.19 frames. ], batch size: 51, lr: 1.02e-02, grad_scale: 16.0 +2024-08-27 00:06:36,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=186442.66666666666, ans=0.2 +2024-08-27 00:06:52,657 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.493e+02 1.771e+02 2.166e+02 3.428e+02, threshold=3.543e+02, percent-clipped=0.0 +2024-08-27 00:09:12,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=186496.0, ans=0.2 +2024-08-27 00:12:03,622 INFO [train.py:1114] (1/4) Epoch 15, batch 150, loss[loss=0.183, simple_loss=0.2431, pruned_loss=0.04453, ctc_loss=0.08473, over 19736.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2685, pruned_loss=0.04576, ctc_loss=0.08584, over 2027143.42 frames. ], batch size: 47, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:12:28,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=186709.33333333334, ans=0.1 +2024-08-27 00:12:43,984 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.33 vs. limit=12.0 +2024-08-27 00:14:08,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=186762.66666666666, ans=0.1 +2024-08-27 00:14:15,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186816.0, ans=0.1 +2024-08-27 00:14:42,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=186816.0, ans=0.0 +2024-08-27 00:17:10,134 INFO [train.py:1114] (1/4) Epoch 15, batch 200, loss[loss=0.2127, simple_loss=0.287, pruned_loss=0.05025, ctc_loss=0.09467, over 18371.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2668, pruned_loss=0.04493, ctc_loss=0.08443, over 2433712.99 frames. ], batch size: 85, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:17:42,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=186976.0, ans=0.125 +2024-08-27 00:17:55,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=187029.33333333334, ans=0.125 +2024-08-27 00:17:59,318 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.145e+02 1.435e+02 1.602e+02 1.959e+02 3.588e+02, threshold=3.205e+02, percent-clipped=1.0 +2024-08-27 00:18:07,276 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.70 vs. limit=6.0 +2024-08-27 00:18:16,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=187082.66666666666, ans=0.1 +2024-08-27 00:18:47,710 INFO [train.py:1114] (1/4) Epoch 15, batch 250, loss[loss=0.2093, simple_loss=0.2893, pruned_loss=0.04677, ctc_loss=0.08957, over 19426.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2669, pruned_loss=0.04467, ctc_loss=0.08361, over 2754856.41 frames. ], batch size: 67, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:19:47,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=187189.33333333334, ans=0.2 +2024-08-27 00:19:50,610 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.72 vs. limit=6.0 +2024-08-27 00:20:13,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=187296.0, ans=0.0 +2024-08-27 00:20:18,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=187349.33333333334, ans=0.125 +2024-08-27 00:20:31,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=187349.33333333334, ans=0.0 +2024-08-27 00:21:09,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=187402.66666666666, ans=10.0 +2024-08-27 00:21:12,355 INFO [train.py:1114] (1/4) Epoch 15, batch 300, loss[loss=0.1994, simple_loss=0.2727, pruned_loss=0.0469, ctc_loss=0.08062, over 19546.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2661, pruned_loss=0.04443, ctc_loss=0.08299, over 2999580.57 frames. ], batch size: 61, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:21:17,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=187456.0, ans=0.125 +2024-08-27 00:21:17,291 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-08-27 00:22:03,852 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.482e+02 1.757e+02 2.250e+02 4.561e+02, threshold=3.514e+02, percent-clipped=7.0 +2024-08-27 00:22:31,075 INFO [train.py:1114] (1/4) Epoch 15, batch 350, loss[loss=0.173, simple_loss=0.2404, pruned_loss=0.03828, ctc_loss=0.0727, over 19736.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2671, pruned_loss=0.04464, ctc_loss=0.08334, over 3189607.36 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:24:46,299 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.99 vs. limit=15.0 +2024-08-27 00:24:57,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=187829.33333333334, ans=0.125 +2024-08-27 00:25:01,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=187829.33333333334, ans=0.125 +2024-08-27 00:25:01,968 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.81 vs. limit=15.0 +2024-08-27 00:25:05,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=187829.33333333334, ans=0.125 +2024-08-27 00:25:07,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=187882.66666666666, ans=0.125 +2024-08-27 00:25:25,431 INFO [train.py:1114] (1/4) Epoch 15, batch 400, loss[loss=0.203, simple_loss=0.2783, pruned_loss=0.04685, ctc_loss=0.08517, over 19491.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2664, pruned_loss=0.04423, ctc_loss=0.08272, over 3341688.53 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:25:26,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=187989.33333333334, ans=0.1 +2024-08-27 00:25:28,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=187989.33333333334, ans=0.05 +2024-08-27 00:25:46,849 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.414e+02 1.733e+02 2.120e+02 3.671e+02, threshold=3.466e+02, percent-clipped=1.0 +2024-08-27 00:25:50,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=188096.0, ans=0.2 +2024-08-27 00:26:30,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=188202.66666666666, ans=0.2 +2024-08-27 00:26:33,890 INFO [train.py:1114] (1/4) Epoch 15, batch 450, loss[loss=0.1822, simple_loss=0.2633, pruned_loss=0.03631, ctc_loss=0.07122, over 19602.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2665, pruned_loss=0.0446, ctc_loss=0.08326, over 3448438.69 frames. ], batch size: 55, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:26:40,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=188256.0, ans=0.2 +2024-08-27 00:27:29,690 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:27:51,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=188469.33333333334, ans=0.125 +2024-08-27 00:27:58,673 INFO [train.py:1114] (1/4) Epoch 15, batch 500, loss[loss=0.1995, simple_loss=0.2787, pruned_loss=0.0444, ctc_loss=0.07869, over 19663.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2657, pruned_loss=0.04419, ctc_loss=0.08263, over 3544209.62 frames. ], batch size: 63, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:28:09,837 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.89 vs. limit=15.0 +2024-08-27 00:28:12,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=188576.0, ans=0.125 +2024-08-27 00:28:18,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=188576.0, ans=0.125 +2024-08-27 00:28:18,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=188576.0, ans=0.125 +2024-08-27 00:28:19,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=188629.33333333334, ans=0.5 +2024-08-27 00:28:25,434 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.423e+02 1.716e+02 2.052e+02 3.766e+02, threshold=3.431e+02, percent-clipped=1.0 +2024-08-27 00:28:40,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=188682.66666666666, ans=0.125 +2024-08-27 00:28:42,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=188682.66666666666, ans=0.0 +2024-08-27 00:28:44,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=188682.66666666666, ans=0.1 +2024-08-27 00:28:45,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=188682.66666666666, ans=0.0 +2024-08-27 00:28:47,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=188736.0, ans=0.07 +2024-08-27 00:28:50,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=188736.0, ans=0.04949747468305833 +2024-08-27 00:28:56,799 INFO [train.py:1114] (1/4) Epoch 15, batch 550, loss[loss=0.2295, simple_loss=0.2905, pruned_loss=0.06243, ctc_loss=0.109, over 19161.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2663, pruned_loss=0.04451, ctc_loss=0.08327, over 3606403.89 frames. ], batch size: 71, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:29:52,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=188896.0, ans=0.125 +2024-08-27 00:29:56,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=188896.0, ans=0.2 +2024-08-27 00:29:59,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=188949.33333333334, ans=0.125 +2024-08-27 00:30:17,658 INFO [train.py:1114] (1/4) Epoch 15, batch 600, loss[loss=0.2147, simple_loss=0.2955, pruned_loss=0.04818, ctc_loss=0.09373, over 19405.00 frames. ], tot_loss[loss=0.195, simple_loss=0.267, pruned_loss=0.04472, ctc_loss=0.08367, over 3665565.83 frames. ], batch size: 67, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:30:23,877 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.26 vs. limit=15.0 +2024-08-27 00:31:06,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=189109.33333333334, ans=0.125 +2024-08-27 00:31:12,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=189109.33333333334, ans=0.125 +2024-08-27 00:31:15,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=189162.66666666666, ans=0.125 +2024-08-27 00:31:18,192 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.468e+02 1.719e+02 2.297e+02 4.329e+02, threshold=3.438e+02, percent-clipped=2.0 +2024-08-27 00:31:52,656 INFO [train.py:1114] (1/4) Epoch 15, batch 650, loss[loss=0.1784, simple_loss=0.2599, pruned_loss=0.0346, ctc_loss=0.0692, over 19759.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2662, pruned_loss=0.04451, ctc_loss=0.08316, over 3715728.51 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:31:58,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=189322.66666666666, ans=0.125 +2024-08-27 00:31:59,556 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.57 vs. limit=12.0 +2024-08-27 00:31:59,618 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.69 vs. limit=15.0 +2024-08-27 00:32:24,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=189482.66666666666, ans=0.125 +2024-08-27 00:32:31,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=189536.0, ans=0.125 +2024-08-27 00:33:04,286 INFO [train.py:1114] (1/4) Epoch 15, batch 700, loss[loss=0.1649, simple_loss=0.2393, pruned_loss=0.03314, ctc_loss=0.06051, over 19748.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2664, pruned_loss=0.04483, ctc_loss=0.08376, over 3748378.29 frames. ], batch size: 51, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:33:07,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=189589.33333333334, ans=0.125 +2024-08-27 00:33:09,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=189589.33333333334, ans=0.025 +2024-08-27 00:33:13,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189642.66666666666, ans=0.1 +2024-08-27 00:33:58,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=189642.66666666666, ans=0.5 +2024-08-27 00:34:01,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=189696.0, ans=0.5 +2024-08-27 00:34:01,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=189696.0, ans=0.0 +2024-08-27 00:34:03,643 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.167e+02 1.548e+02 1.878e+02 2.334e+02 4.066e+02, threshold=3.756e+02, percent-clipped=4.0 +2024-08-27 00:34:11,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=189749.33333333334, ans=0.125 +2024-08-27 00:34:11,468 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.58 vs. limit=22.5 +2024-08-27 00:35:10,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=189802.66666666666, ans=0.1 +2024-08-27 00:35:17,154 INFO [train.py:1114] (1/4) Epoch 15, batch 750, loss[loss=0.1772, simple_loss=0.2573, pruned_loss=0.03454, ctc_loss=0.07018, over 19478.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2661, pruned_loss=0.04463, ctc_loss=0.08356, over 3775378.62 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:35:24,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=189856.0, ans=0.0 +2024-08-27 00:35:24,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=189856.0, ans=10.0 +2024-08-27 00:35:27,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=189909.33333333334, ans=0.5 +2024-08-27 00:35:36,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=189962.66666666666, ans=0.0 +2024-08-27 00:35:38,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=189962.66666666666, ans=0.0 +2024-08-27 00:35:48,557 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.11 vs. limit=22.5 +2024-08-27 00:36:02,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=190122.66666666666, ans=0.125 +2024-08-27 00:36:06,228 INFO [train.py:1114] (1/4) Epoch 15, batch 800, loss[loss=0.1664, simple_loss=0.2392, pruned_loss=0.03354, ctc_loss=0.06657, over 19404.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2666, pruned_loss=0.04475, ctc_loss=0.08373, over 3797168.46 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:36:22,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=190176.0, ans=0.125 +2024-08-27 00:36:29,662 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.516e+02 1.778e+02 2.217e+02 3.654e+02, threshold=3.555e+02, percent-clipped=0.0 +2024-08-27 00:36:29,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=190229.33333333334, ans=10.0 +2024-08-27 00:36:41,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=190282.66666666666, ans=0.125 +2024-08-27 00:36:54,402 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.20 vs. limit=15.0 +2024-08-27 00:36:54,883 INFO [train.py:1114] (1/4) Epoch 15, batch 850, loss[loss=0.2108, simple_loss=0.2831, pruned_loss=0.05007, ctc_loss=0.09585, over 19649.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2665, pruned_loss=0.04468, ctc_loss=0.08364, over 3816716.82 frames. ], batch size: 59, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:36:55,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=190389.33333333334, ans=0.125 +2024-08-27 00:36:58,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=190389.33333333334, ans=0.125 +2024-08-27 00:37:04,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=190389.33333333334, ans=0.2 +2024-08-27 00:37:17,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=190496.0, ans=0.125 +2024-08-27 00:37:39,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=190602.66666666666, ans=0.125 +2024-08-27 00:37:40,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=190602.66666666666, ans=0.07 +2024-08-27 00:37:46,588 INFO [train.py:1114] (1/4) Epoch 15, batch 900, loss[loss=0.1682, simple_loss=0.2403, pruned_loss=0.03487, ctc_loss=0.06585, over 19801.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.267, pruned_loss=0.04504, ctc_loss=0.08396, over 3819125.90 frames. ], batch size: 49, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:38:08,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=190762.66666666666, ans=0.2 +2024-08-27 00:38:12,640 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.206e+02 1.396e+02 1.546e+02 1.855e+02 3.193e+02, threshold=3.091e+02, percent-clipped=0.0 +2024-08-27 00:38:32,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=190869.33333333334, ans=0.0 +2024-08-27 00:38:42,121 INFO [train.py:1114] (1/4) Epoch 15, batch 950, loss[loss=0.1849, simple_loss=0.2589, pruned_loss=0.04046, ctc_loss=0.07486, over 19487.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2672, pruned_loss=0.04507, ctc_loss=0.08418, over 3820889.42 frames. ], batch size: 49, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:38:49,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=190922.66666666666, ans=0.2 +2024-08-27 00:39:04,507 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.92 vs. limit=15.0 +2024-08-27 00:39:11,487 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=5.81 vs. limit=15.0 +2024-08-27 00:39:13,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=191029.33333333334, ans=0.125 +2024-08-27 00:39:22,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=191082.66666666666, ans=0.125 +2024-08-27 00:39:22,804 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.09 vs. limit=12.0 +2024-08-27 00:39:36,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=191189.33333333334, ans=0.125 +2024-08-27 00:39:37,092 INFO [train.py:1114] (1/4) Epoch 15, batch 1000, loss[loss=0.1683, simple_loss=0.2403, pruned_loss=0.0355, ctc_loss=0.06316, over 19861.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2677, pruned_loss=0.04529, ctc_loss=0.08456, over 3815966.68 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:39:54,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=191242.66666666666, ans=0.125 +2024-08-27 00:40:00,305 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.061e+02 1.403e+02 1.586e+02 1.924e+02 3.101e+02, threshold=3.172e+02, percent-clipped=1.0 +2024-08-27 00:40:01,001 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.81 vs. limit=22.5 +2024-08-27 00:40:01,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=191296.0, ans=0.025 +2024-08-27 00:40:16,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=191402.66666666666, ans=0.0 +2024-08-27 00:40:25,497 INFO [train.py:1114] (1/4) Epoch 15, batch 1050, loss[loss=0.1849, simple_loss=0.2669, pruned_loss=0.03686, ctc_loss=0.07286, over 19833.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2673, pruned_loss=0.04512, ctc_loss=0.08431, over 3823073.04 frames. ], batch size: 57, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:40:41,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=191509.33333333334, ans=0.2 +2024-08-27 00:40:42,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=191509.33333333334, ans=0.05 +2024-08-27 00:40:46,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=191562.66666666666, ans=0.125 +2024-08-27 00:41:12,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=191669.33333333334, ans=0.0 +2024-08-27 00:41:14,678 INFO [train.py:1114] (1/4) Epoch 15, batch 1100, loss[loss=0.1829, simple_loss=0.2649, pruned_loss=0.03613, ctc_loss=0.07167, over 19583.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2669, pruned_loss=0.04495, ctc_loss=0.08402, over 3830141.95 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:41:17,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=191722.66666666666, ans=0.2 +2024-08-27 00:41:20,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=191722.66666666666, ans=0.0 +2024-08-27 00:41:23,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=191776.0, ans=0.0 +2024-08-27 00:41:36,203 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.227e+02 1.518e+02 1.811e+02 2.066e+02 3.149e+02, threshold=3.622e+02, percent-clipped=0.0 +2024-08-27 00:41:57,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=191936.0, ans=0.09899494936611666 +2024-08-27 00:42:07,801 INFO [train.py:1114] (1/4) Epoch 15, batch 1150, loss[loss=0.1834, simple_loss=0.2632, pruned_loss=0.03721, ctc_loss=0.07311, over 19589.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2666, pruned_loss=0.04485, ctc_loss=0.08393, over 3829266.93 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:42:08,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=191989.33333333334, ans=0.0 +2024-08-27 00:42:23,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=191989.33333333334, ans=0.125 +2024-08-27 00:42:42,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=192096.0, ans=0.0 +2024-08-27 00:42:55,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=192202.66666666666, ans=0.0 +2024-08-27 00:43:04,223 INFO [train.py:1114] (1/4) Epoch 15, batch 1200, loss[loss=0.1859, simple_loss=0.2721, pruned_loss=0.03625, ctc_loss=0.06805, over 19846.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2674, pruned_loss=0.04509, ctc_loss=0.08444, over 3825173.40 frames. ], batch size: 57, lr: 1.00e-02, grad_scale: 32.0 +2024-08-27 00:43:06,792 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.52 vs. limit=6.0 +2024-08-27 00:43:09,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=192256.0, ans=0.0 +2024-08-27 00:44:32,779 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.478e+02 1.729e+02 2.216e+02 4.347e+02, threshold=3.458e+02, percent-clipped=1.0 +2024-08-27 00:45:40,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=192416.0, ans=0.1 +2024-08-27 00:46:05,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=192469.33333333334, ans=0.0 +2024-08-27 00:46:05,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=192469.33333333334, ans=0.125 +2024-08-27 00:46:12,630 INFO [train.py:1114] (1/4) Epoch 15, batch 1250, loss[loss=0.2014, simple_loss=0.2689, pruned_loss=0.04918, ctc_loss=0.08879, over 19519.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2677, pruned_loss=0.04508, ctc_loss=0.08435, over 3843000.09 frames. ], batch size: 61, lr: 1.00e-02, grad_scale: 32.0 +2024-08-27 00:47:15,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=192576.0, ans=0.125 +2024-08-27 00:48:15,204 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.19 vs. limit=22.5 +2024-08-27 00:48:21,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=192736.0, ans=0.125 +2024-08-27 00:48:26,358 INFO [train.py:1114] (1/4) Epoch 15, batch 1300, loss[loss=0.2117, simple_loss=0.2834, pruned_loss=0.05026, ctc_loss=0.09879, over 18934.00 frames. ], tot_loss[loss=0.195, simple_loss=0.267, pruned_loss=0.04475, ctc_loss=0.08359, over 3846476.85 frames. ], batch size: 76, lr: 9.99e-03, grad_scale: 32.0 +2024-08-27 00:49:50,655 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.210e+02 1.421e+02 1.669e+02 2.080e+02 3.869e+02, threshold=3.339e+02, percent-clipped=2.0 +2024-08-27 00:50:09,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=192949.33333333334, ans=0.125 +2024-08-27 00:50:40,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=193002.66666666666, ans=0.125 +2024-08-27 00:50:43,622 INFO [train.py:1114] (1/4) Epoch 15, batch 1350, loss[loss=0.1839, simple_loss=0.2645, pruned_loss=0.03744, ctc_loss=0.07106, over 19755.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2665, pruned_loss=0.0444, ctc_loss=0.08277, over 3858434.80 frames. ], batch size: 54, lr: 9.98e-03, grad_scale: 32.0 +2024-08-27 00:50:43,780 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:50:48,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=193056.0, ans=0.025 +2024-08-27 00:51:56,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=193109.33333333334, ans=0.0 +2024-08-27 00:51:56,824 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:52:07,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=193162.66666666666, ans=0.0 +2024-08-27 00:52:08,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=193162.66666666666, ans=10.0 +2024-08-27 00:53:17,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=193269.33333333334, ans=0.125 +2024-08-27 00:53:24,207 INFO [train.py:1114] (1/4) Epoch 15, batch 1400, loss[loss=0.1766, simple_loss=0.236, pruned_loss=0.04286, ctc_loss=0.07899, over 19677.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.266, pruned_loss=0.04419, ctc_loss=0.08245, over 3864695.51 frames. ], batch size: 46, lr: 9.98e-03, grad_scale: 32.0 +2024-08-27 00:53:34,994 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.69 vs. limit=15.0 +2024-08-27 00:53:56,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=193429.33333333334, ans=0.0 +2024-08-27 00:53:57,413 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.449e+02 1.647e+02 2.125e+02 3.032e+02, threshold=3.293e+02, percent-clipped=0.0 +2024-08-27 00:54:13,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=193429.33333333334, ans=0.2 +2024-08-27 00:54:22,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=193482.66666666666, ans=0.125 +2024-08-27 00:54:50,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=193482.66666666666, ans=0.1 +2024-08-27 00:54:57,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=15.0 +2024-08-27 00:55:08,678 INFO [train.py:1114] (1/4) Epoch 15, batch 1450, loss[loss=0.2191, simple_loss=0.2879, pruned_loss=0.05465, ctc_loss=0.1027, over 19683.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2668, pruned_loss=0.04439, ctc_loss=0.08285, over 3862898.74 frames. ], batch size: 63, lr: 9.97e-03, grad_scale: 32.0 +2024-08-27 00:55:34,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=193589.33333333334, ans=0.0 +2024-08-27 00:56:15,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=193749.33333333334, ans=0.2 +2024-08-27 00:56:20,314 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.71 vs. limit=10.0 +2024-08-27 00:56:36,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=193802.66666666666, ans=0.0 +2024-08-27 00:56:37,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=193802.66666666666, ans=0.125 +2024-08-27 00:56:39,804 INFO [train.py:1114] (1/4) Epoch 15, batch 1500, loss[loss=0.2041, simple_loss=0.277, pruned_loss=0.04792, ctc_loss=0.08831, over 19570.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.267, pruned_loss=0.04429, ctc_loss=0.08269, over 3862677.43 frames. ], batch size: 57, lr: 9.96e-03, grad_scale: 32.0 +2024-08-27 00:57:54,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=193909.33333333334, ans=0.125 +2024-08-27 00:58:20,413 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.504e+02 1.720e+02 2.138e+02 3.076e+02, threshold=3.439e+02, percent-clipped=0.0 +2024-08-27 00:58:45,618 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:58:50,644 INFO [train.py:1114] (1/4) Epoch 15, batch 1550, loss[loss=0.2196, simple_loss=0.29, pruned_loss=0.05431, ctc_loss=0.1012, over 19586.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2672, pruned_loss=0.04466, ctc_loss=0.08362, over 3845845.14 frames. ], batch size: 60, lr: 9.96e-03, grad_scale: 32.0 +2024-08-27 00:58:55,863 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.12 vs. limit=22.5 +2024-08-27 00:59:00,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=194176.0, ans=0.025 +2024-08-27 00:59:16,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=194229.33333333334, ans=0.125 +2024-08-27 00:59:16,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=194229.33333333334, ans=0.2 +2024-08-27 00:59:17,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.65 vs. limit=15.0 +2024-08-27 00:59:17,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=194282.66666666666, ans=0.0 +2024-08-27 00:59:18,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194282.66666666666, ans=0.1 +2024-08-27 00:59:37,711 INFO [train.py:1114] (1/4) Epoch 15, batch 1600, loss[loss=0.202, simple_loss=0.2802, pruned_loss=0.04468, ctc_loss=0.08603, over 19833.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2674, pruned_loss=0.04494, ctc_loss=0.0841, over 3835219.53 frames. ], batch size: 57, lr: 9.95e-03, grad_scale: 32.0 +2024-08-27 00:59:43,515 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:59:50,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=194442.66666666666, ans=0.1 +2024-08-27 00:59:57,061 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-08-27 00:59:57,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=194496.0, ans=0.125 +2024-08-27 01:00:17,421 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.455e+02 1.710e+02 2.060e+02 3.831e+02, threshold=3.419e+02, percent-clipped=3.0 +2024-08-27 01:00:41,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=194602.66666666666, ans=0.05 +2024-08-27 01:00:50,799 INFO [train.py:1114] (1/4) Epoch 15, batch 1650, loss[loss=0.2059, simple_loss=0.2801, pruned_loss=0.0476, ctc_loss=0.09133, over 19646.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2676, pruned_loss=0.04505, ctc_loss=0.08421, over 3831815.30 frames. ], batch size: 59, lr: 9.94e-03, grad_scale: 16.0 +2024-08-27 01:01:25,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=194762.66666666666, ans=0.125 +2024-08-27 01:01:32,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=194762.66666666666, ans=0.07 +2024-08-27 01:01:42,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=194816.0, ans=0.125 +2024-08-27 01:01:43,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=194816.0, ans=0.0 +2024-08-27 01:02:11,998 INFO [train.py:1114] (1/4) Epoch 15, batch 1700, loss[loss=0.1831, simple_loss=0.2453, pruned_loss=0.04425, ctc_loss=0.081, over 19675.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2672, pruned_loss=0.04468, ctc_loss=0.08362, over 3846198.71 frames. ], batch size: 46, lr: 9.94e-03, grad_scale: 16.0 +2024-08-27 01:02:23,189 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.72 vs. limit=22.5 +2024-08-27 01:02:25,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=194976.0, ans=0.0 +2024-08-27 01:02:27,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=194976.0, ans=0.2 +2024-08-27 01:02:27,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=194976.0, ans=0.0 +2024-08-27 01:02:36,963 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.414e+02 1.817e+02 2.372e+02 3.799e+02, threshold=3.634e+02, percent-clipped=1.0 +2024-08-27 01:02:39,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=195029.33333333334, ans=0.0 +2024-08-27 01:02:43,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195082.66666666666, ans=0.1 +2024-08-27 01:03:00,189 INFO [train.py:1114] (1/4) Epoch 15, batch 1750, loss[loss=0.1675, simple_loss=0.2364, pruned_loss=0.03547, ctc_loss=0.06936, over 19658.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2666, pruned_loss=0.04451, ctc_loss=0.08329, over 3851342.47 frames. ], batch size: 45, lr: 9.93e-03, grad_scale: 16.0 +2024-08-27 01:03:06,918 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.16 vs. limit=15.0 +2024-08-27 01:03:12,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=195242.66666666666, ans=0.125 +2024-08-27 01:03:16,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=195242.66666666666, ans=0.125 +2024-08-27 01:03:22,901 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.32 vs. limit=22.5 +2024-08-27 01:03:32,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=195349.33333333334, ans=0.0 +2024-08-27 01:03:44,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195402.66666666666, ans=0.1 +2024-08-27 01:03:49,209 INFO [train.py:1114] (1/4) Epoch 15, batch 1800, loss[loss=0.1971, simple_loss=0.2737, pruned_loss=0.04399, ctc_loss=0.08131, over 19593.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2665, pruned_loss=0.0446, ctc_loss=0.08337, over 3852161.23 frames. ], batch size: 55, lr: 9.92e-03, grad_scale: 16.0 +2024-08-27 01:03:51,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=195456.0, ans=0.025 +2024-08-27 01:03:53,304 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.75 vs. limit=15.0 +2024-08-27 01:03:55,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=195456.0, ans=0.025 +2024-08-27 01:04:05,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=195509.33333333334, ans=0.2 +2024-08-27 01:04:34,019 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.99 vs. limit=15.0 +2024-08-27 01:04:34,457 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.516e+02 1.927e+02 2.557e+02 3.874e+02, threshold=3.854e+02, percent-clipped=2.0 +2024-08-27 01:04:36,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=195562.66666666666, ans=0.025 +2024-08-27 01:05:42,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=195669.33333333334, ans=0.09899494936611666 +2024-08-27 01:05:54,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=195722.66666666666, ans=0.125 +2024-08-27 01:05:54,931 INFO [train.py:1114] (1/4) Epoch 15, batch 1850, loss[loss=0.1998, simple_loss=0.2837, pruned_loss=0.04142, ctc_loss=0.0827, over 19563.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2665, pruned_loss=0.04459, ctc_loss=0.08333, over 3854557.53 frames. ], batch size: 57, lr: 9.92e-03, grad_scale: 16.0 +2024-08-27 01:06:23,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195829.33333333334, ans=0.1 +2024-08-27 01:06:38,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=195882.66666666666, ans=0.125 +2024-08-27 01:06:40,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195936.0, ans=0.1 +2024-08-27 01:06:44,578 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.38 vs. limit=15.0 +2024-08-27 01:06:49,443 INFO [train.py:1114] (1/4) Epoch 15, batch 1900, loss[loss=0.1971, simple_loss=0.2749, pruned_loss=0.04327, ctc_loss=0.0818, over 19630.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2671, pruned_loss=0.04469, ctc_loss=0.08356, over 3859475.40 frames. ], batch size: 59, lr: 9.91e-03, grad_scale: 16.0 +2024-08-27 01:06:57,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=196042.66666666666, ans=0.125 +2024-08-27 01:07:14,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=196096.0, ans=0.0 +2024-08-27 01:07:43,096 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.146e+02 1.422e+02 1.649e+02 2.231e+02 4.535e+02, threshold=3.297e+02, percent-clipped=1.0 +2024-08-27 01:07:52,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=196149.33333333334, ans=0.125 +2024-08-27 01:08:04,606 INFO [train.py:1114] (1/4) Epoch 15, batch 1950, loss[loss=0.1763, simple_loss=0.2519, pruned_loss=0.03652, ctc_loss=0.06949, over 19582.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2677, pruned_loss=0.04458, ctc_loss=0.08333, over 3868676.38 frames. ], batch size: 52, lr: 9.90e-03, grad_scale: 16.0 +2024-08-27 01:08:26,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=196362.66666666666, ans=0.2 +2024-08-27 01:08:42,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=196469.33333333334, ans=0.035 +2024-08-27 01:08:49,900 INFO [train.py:1114] (1/4) Epoch 15, batch 2000, loss[loss=0.1768, simple_loss=0.2417, pruned_loss=0.04098, ctc_loss=0.07476, over 19662.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2687, pruned_loss=0.04514, ctc_loss=0.08444, over 3854088.50 frames. ], batch size: 45, lr: 9.90e-03, grad_scale: 32.0 +2024-08-27 01:08:50,386 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.48 vs. limit=12.0 +2024-08-27 01:09:33,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=196576.0, ans=0.125 +2024-08-27 01:09:34,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=196576.0, ans=0.2 +2024-08-27 01:09:45,504 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.11 vs. limit=15.0 +2024-08-27 01:09:46,684 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.403e+02 1.640e+02 2.044e+02 3.050e+02, threshold=3.279e+02, percent-clipped=0.0 +2024-08-27 01:09:55,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=196682.66666666666, ans=0.0 +2024-08-27 01:10:01,037 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:10:10,633 INFO [train.py:1114] (1/4) Epoch 15, batch 2050, loss[loss=0.1657, simple_loss=0.2326, pruned_loss=0.03669, ctc_loss=0.06346, over 19731.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2674, pruned_loss=0.04484, ctc_loss=0.08386, over 3851358.94 frames. ], batch size: 47, lr: 9.89e-03, grad_scale: 32.0 +2024-08-27 01:10:14,625 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.26 vs. limit=22.5 +2024-08-27 01:10:27,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=196842.66666666666, ans=0.125 +2024-08-27 01:10:44,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196949.33333333334, ans=0.1 +2024-08-27 01:10:45,133 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.99 vs. limit=6.0 +2024-08-27 01:10:54,857 INFO [train.py:1114] (1/4) Epoch 15, batch 2100, loss[loss=0.1919, simple_loss=0.2645, pruned_loss=0.04237, ctc_loss=0.08653, over 19772.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2663, pruned_loss=0.0442, ctc_loss=0.08275, over 3859328.21 frames. ], batch size: 54, lr: 9.88e-03, grad_scale: 32.0 +2024-08-27 01:11:11,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=197056.0, ans=0.125 +2024-08-27 01:11:20,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=197109.33333333334, ans=0.0 +2024-08-27 01:11:22,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=197162.66666666666, ans=0.125 +2024-08-27 01:11:26,635 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.202e+02 1.442e+02 1.703e+02 2.065e+02 4.080e+02, threshold=3.406e+02, percent-clipped=2.0 +2024-08-27 01:11:39,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=197269.33333333334, ans=0.125 +2024-08-27 01:11:46,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197269.33333333334, ans=0.1 +2024-08-27 01:11:48,561 INFO [train.py:1114] (1/4) Epoch 15, batch 2150, loss[loss=0.1896, simple_loss=0.2676, pruned_loss=0.04115, ctc_loss=0.07331, over 19579.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.266, pruned_loss=0.04414, ctc_loss=0.08234, over 3869107.04 frames. ], batch size: 52, lr: 9.88e-03, grad_scale: 32.0 +2024-08-27 01:11:51,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=197322.66666666666, ans=0.5 +2024-08-27 01:11:51,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=197322.66666666666, ans=0.1 +2024-08-27 01:11:58,797 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.32 vs. limit=15.0 +2024-08-27 01:11:59,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197376.0, ans=0.1 +2024-08-27 01:12:06,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=197429.33333333334, ans=0.025 +2024-08-27 01:12:16,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=197482.66666666666, ans=0.0 +2024-08-27 01:12:31,747 INFO [train.py:1114] (1/4) Epoch 15, batch 2200, loss[loss=0.2125, simple_loss=0.2891, pruned_loss=0.04924, ctc_loss=0.09371, over 19604.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2661, pruned_loss=0.04418, ctc_loss=0.08242, over 3868156.74 frames. ], batch size: 57, lr: 9.87e-03, grad_scale: 16.0 +2024-08-27 01:12:42,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=197642.66666666666, ans=0.09899494936611666 +2024-08-27 01:12:44,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=197642.66666666666, ans=0.1 +2024-08-27 01:12:50,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=197696.0, ans=0.0 +2024-08-27 01:12:52,455 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:12:54,923 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.477e+02 1.816e+02 2.262e+02 3.833e+02, threshold=3.631e+02, percent-clipped=4.0 +2024-08-27 01:13:01,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=197749.33333333334, ans=0.125 +2024-08-27 01:13:01,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=197749.33333333334, ans=0.125 +2024-08-27 01:13:02,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=197749.33333333334, ans=0.125 +2024-08-27 01:13:15,797 INFO [train.py:1114] (1/4) Epoch 15, batch 2250, loss[loss=0.2028, simple_loss=0.2852, pruned_loss=0.04299, ctc_loss=0.08587, over 19619.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2662, pruned_loss=0.04407, ctc_loss=0.08234, over 3868504.27 frames. ], batch size: 55, lr: 9.87e-03, grad_scale: 16.0 +2024-08-27 01:13:32,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197962.66666666666, ans=0.1 +2024-08-27 01:13:46,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=198016.0, ans=0.125 +2024-08-27 01:13:47,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=198016.0, ans=0.0 +2024-08-27 01:13:58,193 INFO [train.py:1114] (1/4) Epoch 15, batch 2300, loss[loss=0.1863, simple_loss=0.2558, pruned_loss=0.04274, ctc_loss=0.07831, over 19482.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2652, pruned_loss=0.044, ctc_loss=0.0821, over 3861569.81 frames. ], batch size: 49, lr: 9.86e-03, grad_scale: 16.0 +2024-08-27 01:14:03,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=198122.66666666666, ans=0.125 +2024-08-27 01:14:05,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=198122.66666666666, ans=0.0 +2024-08-27 01:14:58,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=198229.33333333334, ans=0.05 +2024-08-27 01:14:59,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=198229.33333333334, ans=0.0 +2024-08-27 01:15:02,203 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.441e+02 1.617e+02 1.954e+02 3.129e+02, threshold=3.235e+02, percent-clipped=0.0 +2024-08-27 01:15:03,875 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=8.06 vs. limit=15.0 +2024-08-27 01:15:07,117 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:15:07,372 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.33 vs. limit=15.0 +2024-08-27 01:15:13,928 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.39 vs. limit=15.0 +2024-08-27 01:15:16,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=198336.0, ans=0.2 +2024-08-27 01:15:23,091 INFO [train.py:1114] (1/4) Epoch 15, batch 2350, loss[loss=0.2061, simple_loss=0.2783, pruned_loss=0.04861, ctc_loss=0.09162, over 19666.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2657, pruned_loss=0.04443, ctc_loss=0.08275, over 3863692.55 frames. ], batch size: 63, lr: 9.85e-03, grad_scale: 16.0 +2024-08-27 01:15:35,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198442.66666666666, ans=0.1 +2024-08-27 01:15:40,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=198496.0, ans=0.0 +2024-08-27 01:15:41,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=198496.0, ans=0.0 +2024-08-27 01:15:44,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=198496.0, ans=0.125 +2024-08-27 01:16:00,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=198602.66666666666, ans=0.5 +2024-08-27 01:16:01,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=198602.66666666666, ans=0.125 +2024-08-27 01:16:31,894 INFO [train.py:1114] (1/4) Epoch 15, batch 2400, loss[loss=0.2074, simple_loss=0.2824, pruned_loss=0.04795, ctc_loss=0.09097, over 19295.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2677, pruned_loss=0.04523, ctc_loss=0.0843, over 3856804.39 frames. ], batch size: 71, lr: 9.85e-03, grad_scale: 32.0 +2024-08-27 01:17:15,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=198656.0, ans=0.0 +2024-08-27 01:17:16,029 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:17:22,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=198709.33333333334, ans=0.125 +2024-08-27 01:17:35,485 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.164e+02 1.452e+02 1.605e+02 2.004e+02 3.213e+02, threshold=3.211e+02, percent-clipped=0.0 +2024-08-27 01:17:35,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=198762.66666666666, ans=0.2 +2024-08-27 01:17:37,140 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.72 vs. limit=22.5 +2024-08-27 01:17:38,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=198816.0, ans=0.0 +2024-08-27 01:17:45,418 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.26 vs. limit=6.0 +2024-08-27 01:17:50,935 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=6.39 vs. limit=12.0 +2024-08-27 01:17:57,656 INFO [train.py:1114] (1/4) Epoch 15, batch 2450, loss[loss=0.2848, simple_loss=0.3193, pruned_loss=0.09325, ctc_loss=0.1596, over 12649.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2714, pruned_loss=0.04782, ctc_loss=0.08919, over 3727997.94 frames. ], batch size: 140, lr: 9.84e-03, grad_scale: 32.0 +2024-08-27 01:18:33,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=199082.66666666666, ans=0.125 +2024-08-27 01:20:20,975 INFO [train.py:1114] (1/4) Epoch 16, batch 0, loss[loss=0.1877, simple_loss=0.2555, pruned_loss=0.0445, ctc_loss=0.07718, over 19798.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2555, pruned_loss=0.0445, ctc_loss=0.07718, over 19798.00 frames. ], batch size: 49, lr: 9.52e-03, grad_scale: 32.0 +2024-08-27 01:20:20,976 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-27 01:21:17,371 INFO [train.py:1146] (1/4) Epoch 16, validation: loss=0.1744, simple_loss=0.2673, pruned_loss=0.03034, ctc_loss=0.05204, over 944034.00 frames. +2024-08-27 01:21:17,372 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12945MB +2024-08-27 01:21:17,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=199130.66666666666, ans=0.0 +2024-08-27 01:21:24,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=199130.66666666666, ans=0.125 +2024-08-27 01:21:25,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=199130.66666666666, ans=0.0 +2024-08-27 01:21:32,290 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.70 vs. limit=22.5 +2024-08-27 01:21:40,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=199237.33333333334, ans=0.125 +2024-08-27 01:21:54,897 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.674e+02 1.811e+02 2.106e+02 3.737e+02, threshold=3.622e+02, percent-clipped=2.0 +2024-08-27 01:22:07,226 INFO [train.py:1114] (1/4) Epoch 16, batch 50, loss[loss=0.1713, simple_loss=0.243, pruned_loss=0.03632, ctc_loss=0.06736, over 19724.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.267, pruned_loss=0.04568, ctc_loss=0.08508, over 845315.06 frames. ], batch size: 47, lr: 9.51e-03, grad_scale: 32.0 +2024-08-27 01:22:08,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=199397.33333333334, ans=0.125 +2024-08-27 01:22:11,153 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:22:45,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=199610.66666666666, ans=0.0 +2024-08-27 01:22:53,661 INFO [train.py:1114] (1/4) Epoch 16, batch 100, loss[loss=0.1751, simple_loss=0.2482, pruned_loss=0.0371, ctc_loss=0.06941, over 19711.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2687, pruned_loss=0.04544, ctc_loss=0.08474, over 1500370.32 frames. ], batch size: 51, lr: 9.51e-03, grad_scale: 32.0 +2024-08-27 01:23:11,998 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.41 vs. limit=12.0 +2024-08-27 01:23:15,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=199770.66666666666, ans=0.035 +2024-08-27 01:23:17,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=199770.66666666666, ans=0.07 +2024-08-27 01:23:29,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.40 vs. limit=22.5 +2024-08-27 01:23:33,430 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.434e+02 1.536e+02 1.885e+02 3.287e+02, threshold=3.072e+02, percent-clipped=0.0 +2024-08-27 01:23:45,324 INFO [train.py:1114] (1/4) Epoch 16, batch 150, loss[loss=0.161, simple_loss=0.2273, pruned_loss=0.0343, ctc_loss=0.06503, over 19726.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2669, pruned_loss=0.04466, ctc_loss=0.08313, over 2028336.53 frames. ], batch size: 47, lr: 9.50e-03, grad_scale: 32.0 +2024-08-27 01:23:45,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=199930.66666666666, ans=0.0 +2024-08-27 01:23:56,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=199984.0, ans=0.125 +2024-08-27 01:23:57,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=199984.0, ans=0.125 +2024-08-27 01:24:01,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199984.0, ans=0.1 +2024-08-27 01:24:06,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=200037.33333333334, ans=0.0 +2024-08-27 01:24:11,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=200037.33333333334, ans=0.125 +2024-08-27 01:24:22,433 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.30 vs. limit=22.5 +2024-08-27 01:24:34,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=200197.33333333334, ans=10.0 +2024-08-27 01:24:35,675 INFO [train.py:1114] (1/4) Epoch 16, batch 200, loss[loss=0.2173, simple_loss=0.2796, pruned_loss=0.05651, ctc_loss=0.1051, over 17985.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.265, pruned_loss=0.04391, ctc_loss=0.08185, over 2435728.98 frames. ], batch size: 85, lr: 9.49e-03, grad_scale: 32.0 +2024-08-27 01:24:42,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200197.33333333334, ans=0.1 +2024-08-27 01:24:58,390 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.23 vs. limit=15.0 +2024-08-27 01:25:14,233 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.526e+02 1.826e+02 2.235e+02 3.925e+02, threshold=3.652e+02, percent-clipped=6.0 +2024-08-27 01:25:52,471 INFO [train.py:1114] (1/4) Epoch 16, batch 250, loss[loss=0.182, simple_loss=0.2636, pruned_loss=0.03624, ctc_loss=0.06983, over 19429.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2651, pruned_loss=0.04374, ctc_loss=0.08172, over 2754843.08 frames. ], batch size: 67, lr: 9.49e-03, grad_scale: 32.0 +2024-08-27 01:26:00,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=200464.0, ans=0.125 +2024-08-27 01:26:07,119 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.62 vs. limit=15.0 +2024-08-27 01:26:11,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=200517.33333333334, ans=0.125 +2024-08-27 01:26:11,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=200517.33333333334, ans=0.0 +2024-08-27 01:26:23,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=200570.66666666666, ans=0.125 +2024-08-27 01:26:33,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=200624.0, ans=0.125 +2024-08-27 01:26:34,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=200624.0, ans=0.0 +2024-08-27 01:26:44,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=200677.33333333334, ans=0.0 +2024-08-27 01:26:44,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=200677.33333333334, ans=0.05 +2024-08-27 01:26:45,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=200677.33333333334, ans=0.125 +2024-08-27 01:26:46,567 INFO [train.py:1114] (1/4) Epoch 16, batch 300, loss[loss=0.2118, simple_loss=0.2766, pruned_loss=0.05364, ctc_loss=0.0992, over 19518.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2644, pruned_loss=0.04337, ctc_loss=0.0811, over 3000764.20 frames. ], batch size: 61, lr: 9.48e-03, grad_scale: 32.0 +2024-08-27 01:26:47,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=200730.66666666666, ans=0.2 +2024-08-27 01:27:03,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=200784.0, ans=0.125 +2024-08-27 01:27:22,589 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.450e+02 1.677e+02 2.025e+02 3.129e+02, threshold=3.354e+02, percent-clipped=0.0 +2024-08-27 01:27:29,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=200944.0, ans=10.0 +2024-08-27 01:27:36,611 INFO [train.py:1114] (1/4) Epoch 16, batch 350, loss[loss=0.1894, simple_loss=0.2546, pruned_loss=0.04471, ctc_loss=0.08673, over 19773.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2653, pruned_loss=0.04368, ctc_loss=0.08155, over 3190855.31 frames. ], batch size: 48, lr: 9.48e-03, grad_scale: 32.0 +2024-08-27 01:27:44,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=200997.33333333334, ans=10.0 +2024-08-27 01:27:51,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=201050.66666666666, ans=0.125 +2024-08-27 01:28:17,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=201210.66666666666, ans=0.2 +2024-08-27 01:28:24,285 INFO [train.py:1114] (1/4) Epoch 16, batch 400, loss[loss=0.1821, simple_loss=0.2586, pruned_loss=0.03893, ctc_loss=0.0694, over 19477.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2651, pruned_loss=0.04356, ctc_loss=0.08137, over 3342904.94 frames. ], batch size: 54, lr: 9.47e-03, grad_scale: 32.0 +2024-08-27 01:28:28,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=201264.0, ans=0.07 +2024-08-27 01:28:30,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=201264.0, ans=0.125 +2024-08-27 01:28:31,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=201264.0, ans=0.0 +2024-08-27 01:28:55,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=201424.0, ans=0.125 +2024-08-27 01:28:57,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=201424.0, ans=0.2 +2024-08-27 01:28:58,541 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.170e+02 1.444e+02 1.663e+02 2.108e+02 3.293e+02, threshold=3.326e+02, percent-clipped=0.0 +2024-08-27 01:29:00,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=201477.33333333334, ans=0.125 +2024-08-27 01:29:10,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=201530.66666666666, ans=0.125 +2024-08-27 01:29:10,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=201530.66666666666, ans=0.0 +2024-08-27 01:29:10,813 INFO [train.py:1114] (1/4) Epoch 16, batch 450, loss[loss=0.1965, simple_loss=0.2716, pruned_loss=0.04466, ctc_loss=0.07996, over 19611.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2657, pruned_loss=0.04395, ctc_loss=0.08207, over 3450336.79 frames. ], batch size: 55, lr: 9.46e-03, grad_scale: 32.0 +2024-08-27 01:29:13,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=201530.66666666666, ans=0.1 +2024-08-27 01:29:40,722 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.07 vs. limit=15.0 +2024-08-27 01:29:48,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=201690.66666666666, ans=0.2 +2024-08-27 01:29:59,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=201744.0, ans=0.0 +2024-08-27 01:30:01,622 INFO [train.py:1114] (1/4) Epoch 16, batch 500, loss[loss=0.2138, simple_loss=0.2891, pruned_loss=0.05196, ctc_loss=0.08651, over 19649.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2648, pruned_loss=0.04358, ctc_loss=0.0815, over 3545536.02 frames. ], batch size: 63, lr: 9.46e-03, grad_scale: 32.0 +2024-08-27 01:30:33,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=201957.33333333334, ans=0.125 +2024-08-27 01:30:39,494 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.484e+02 1.746e+02 2.096e+02 4.072e+02, threshold=3.492e+02, percent-clipped=1.0 +2024-08-27 01:30:40,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=201957.33333333334, ans=0.125 +2024-08-27 01:30:51,384 INFO [train.py:1114] (1/4) Epoch 16, batch 550, loss[loss=0.2214, simple_loss=0.2956, pruned_loss=0.05388, ctc_loss=0.0989, over 19295.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2651, pruned_loss=0.0437, ctc_loss=0.08173, over 3607433.72 frames. ], batch size: 71, lr: 9.45e-03, grad_scale: 32.0 +2024-08-27 01:30:53,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=202064.0, ans=0.0 +2024-08-27 01:31:18,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=202224.0, ans=0.1 +2024-08-27 01:31:27,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=202277.33333333334, ans=0.125 +2024-08-27 01:31:37,738 INFO [train.py:1114] (1/4) Epoch 16, batch 600, loss[loss=0.2041, simple_loss=0.2821, pruned_loss=0.04643, ctc_loss=0.08311, over 19341.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2657, pruned_loss=0.04391, ctc_loss=0.08203, over 3665042.59 frames. ], batch size: 67, lr: 9.45e-03, grad_scale: 32.0 +2024-08-27 01:31:45,224 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:31:46,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=202384.0, ans=0.2 +2024-08-27 01:31:55,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=202437.33333333334, ans=0.125 +2024-08-27 01:31:58,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.24 vs. limit=15.0 +2024-08-27 01:32:14,251 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.474e+02 1.879e+02 2.462e+02 5.922e+02, threshold=3.759e+02, percent-clipped=13.0 +2024-08-27 01:32:23,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=202544.0, ans=0.2 +2024-08-27 01:32:25,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=202597.33333333334, ans=0.125 +2024-08-27 01:32:26,188 INFO [train.py:1114] (1/4) Epoch 16, batch 650, loss[loss=0.1778, simple_loss=0.2559, pruned_loss=0.03581, ctc_loss=0.07024, over 19775.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2651, pruned_loss=0.04359, ctc_loss=0.08148, over 3716361.67 frames. ], batch size: 54, lr: 9.44e-03, grad_scale: 32.0 +2024-08-27 01:32:30,407 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.90 vs. limit=15.0 +2024-08-27 01:32:31,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=202597.33333333334, ans=0.125 +2024-08-27 01:32:35,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=202597.33333333334, ans=0.125 +2024-08-27 01:32:37,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=202650.66666666666, ans=0.0 +2024-08-27 01:32:41,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=202650.66666666666, ans=0.125 +2024-08-27 01:32:55,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=202757.33333333334, ans=0.0 +2024-08-27 01:32:57,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=202757.33333333334, ans=0.2 +2024-08-27 01:33:04,442 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.01 vs. limit=15.0 +2024-08-27 01:33:05,501 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.09 vs. limit=15.0 +2024-08-27 01:33:18,144 INFO [train.py:1114] (1/4) Epoch 16, batch 700, loss[loss=0.1861, simple_loss=0.2645, pruned_loss=0.03945, ctc_loss=0.07205, over 19726.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2655, pruned_loss=0.04377, ctc_loss=0.08163, over 3748555.01 frames. ], batch size: 51, lr: 9.43e-03, grad_scale: 32.0 +2024-08-27 01:33:26,062 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.96 vs. limit=22.5 +2024-08-27 01:33:38,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=202970.66666666666, ans=0.1 +2024-08-27 01:33:40,055 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.53 vs. limit=22.5 +2024-08-27 01:33:52,574 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.223e+02 1.460e+02 1.707e+02 2.152e+02 4.812e+02, threshold=3.413e+02, percent-clipped=3.0 +2024-08-27 01:34:04,697 INFO [train.py:1114] (1/4) Epoch 16, batch 750, loss[loss=0.178, simple_loss=0.2597, pruned_loss=0.03552, ctc_loss=0.06282, over 19527.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2646, pruned_loss=0.04337, ctc_loss=0.08095, over 3773192.21 frames. ], batch size: 54, lr: 9.43e-03, grad_scale: 32.0 +2024-08-27 01:34:11,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=203130.66666666666, ans=0.0 +2024-08-27 01:34:18,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=203184.0, ans=0.0 +2024-08-27 01:34:31,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=203237.33333333334, ans=0.125 +2024-08-27 01:34:31,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=203290.66666666666, ans=0.125 +2024-08-27 01:34:43,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=203344.0, ans=0.125 +2024-08-27 01:34:43,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.29 vs. limit=15.0 +2024-08-27 01:34:57,323 INFO [train.py:1114] (1/4) Epoch 16, batch 800, loss[loss=0.1687, simple_loss=0.2405, pruned_loss=0.03562, ctc_loss=0.06433, over 19808.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2646, pruned_loss=0.04337, ctc_loss=0.08091, over 3794738.18 frames. ], batch size: 49, lr: 9.42e-03, grad_scale: 32.0 +2024-08-27 01:35:29,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.03 vs. limit=22.5 +2024-08-27 01:35:32,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=203504.0, ans=0.125 +2024-08-27 01:35:48,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=203557.33333333334, ans=0.125 +2024-08-27 01:35:49,633 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.508e+02 1.846e+02 2.334e+02 3.502e+02, threshold=3.692e+02, percent-clipped=1.0 +2024-08-27 01:35:56,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=203610.66666666666, ans=0.025 +2024-08-27 01:36:01,625 INFO [train.py:1114] (1/4) Epoch 16, batch 850, loss[loss=0.2128, simple_loss=0.2824, pruned_loss=0.05209, ctc_loss=0.09754, over 19655.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2644, pruned_loss=0.04341, ctc_loss=0.08111, over 3814579.97 frames. ], batch size: 59, lr: 9.42e-03, grad_scale: 32.0 +2024-08-27 01:36:14,172 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.38 vs. limit=22.5 +2024-08-27 01:36:16,933 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=12.0 +2024-08-27 01:36:23,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=203770.66666666666, ans=0.125 +2024-08-27 01:36:38,100 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.67 vs. limit=15.0 +2024-08-27 01:36:41,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=203877.33333333334, ans=0.0 +2024-08-27 01:36:51,749 INFO [train.py:1114] (1/4) Epoch 16, batch 900, loss[loss=0.1673, simple_loss=0.2392, pruned_loss=0.03485, ctc_loss=0.06402, over 19423.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2647, pruned_loss=0.04366, ctc_loss=0.0816, over 3817507.33 frames. ], batch size: 48, lr: 9.41e-03, grad_scale: 32.0 +2024-08-27 01:36:55,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=203930.66666666666, ans=0.1 +2024-08-27 01:36:56,950 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.22 vs. limit=12.0 +2024-08-27 01:37:02,184 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:37:03,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=203984.0, ans=0.125 +2024-08-27 01:37:09,042 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.71 vs. limit=12.0 +2024-08-27 01:37:26,147 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.398e+02 1.563e+02 1.898e+02 3.698e+02, threshold=3.126e+02, percent-clipped=1.0 +2024-08-27 01:37:31,298 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.29 vs. limit=15.0 +2024-08-27 01:37:32,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=204144.0, ans=0.125 +2024-08-27 01:37:38,123 INFO [train.py:1114] (1/4) Epoch 16, batch 950, loss[loss=0.1797, simple_loss=0.253, pruned_loss=0.03834, ctc_loss=0.07424, over 19489.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.265, pruned_loss=0.04374, ctc_loss=0.08184, over 3820500.71 frames. ], batch size: 49, lr: 9.40e-03, grad_scale: 32.0 +2024-08-27 01:37:46,155 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.81 vs. limit=15.0 +2024-08-27 01:37:54,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=204250.66666666666, ans=0.2 +2024-08-27 01:38:16,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=204357.33333333334, ans=0.125 +2024-08-27 01:38:27,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204410.66666666666, ans=0.1 +2024-08-27 01:38:29,230 INFO [train.py:1114] (1/4) Epoch 16, batch 1000, loss[loss=0.1765, simple_loss=0.2565, pruned_loss=0.03432, ctc_loss=0.06953, over 19869.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.266, pruned_loss=0.04403, ctc_loss=0.08238, over 3817813.39 frames. ], batch size: 52, lr: 9.40e-03, grad_scale: 32.0 +2024-08-27 01:38:46,383 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.91 vs. limit=15.0 +2024-08-27 01:38:53,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.whiten.whitening_limit, batch_count=204570.66666666666, ans=15.0 +2024-08-27 01:39:07,622 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.409e+02 1.616e+02 2.034e+02 3.159e+02, threshold=3.231e+02, percent-clipped=1.0 +2024-08-27 01:39:19,857 INFO [train.py:1114] (1/4) Epoch 16, batch 1050, loss[loss=0.1852, simple_loss=0.2667, pruned_loss=0.03749, ctc_loss=0.07148, over 19829.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2651, pruned_loss=0.04373, ctc_loss=0.08166, over 3823563.85 frames. ], batch size: 57, lr: 9.39e-03, grad_scale: 32.0 +2024-08-27 01:39:23,133 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.36 vs. limit=15.0 +2024-08-27 01:39:26,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=204730.66666666666, ans=0.025 +2024-08-27 01:39:39,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=204837.33333333334, ans=0.2 +2024-08-27 01:39:47,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=204890.66666666666, ans=0.0 +2024-08-27 01:39:47,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=204890.66666666666, ans=0.125 +2024-08-27 01:39:49,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=204890.66666666666, ans=0.0 +2024-08-27 01:39:56,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=204944.0, ans=0.125 +2024-08-27 01:39:59,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=204944.0, ans=0.2 +2024-08-27 01:40:03,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=204944.0, ans=0.0 +2024-08-27 01:40:07,055 INFO [train.py:1114] (1/4) Epoch 16, batch 1100, loss[loss=0.1806, simple_loss=0.2556, pruned_loss=0.03838, ctc_loss=0.07207, over 19592.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2647, pruned_loss=0.04334, ctc_loss=0.08119, over 3829492.88 frames. ], batch size: 52, lr: 9.39e-03, grad_scale: 32.0 +2024-08-27 01:40:09,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.71 vs. limit=15.0 +2024-08-27 01:40:21,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=205050.66666666666, ans=0.1 +2024-08-27 01:40:29,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=205104.0, ans=0.0 +2024-08-27 01:40:43,784 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:40:44,420 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.474e+02 1.664e+02 2.002e+02 3.685e+02, threshold=3.328e+02, percent-clipped=2.0 +2024-08-27 01:40:51,463 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.60 vs. limit=6.0 +2024-08-27 01:40:59,572 INFO [train.py:1114] (1/4) Epoch 16, batch 1150, loss[loss=0.1838, simple_loss=0.2555, pruned_loss=0.03989, ctc_loss=0.08074, over 19567.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2645, pruned_loss=0.04328, ctc_loss=0.08112, over 3828230.62 frames. ], batch size: 52, lr: 9.38e-03, grad_scale: 32.0 +2024-08-27 01:42:50,465 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.21 vs. limit=15.0 +2024-08-27 01:42:51,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=205264.0, ans=0.125 +2024-08-27 01:42:52,536 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.75 vs. limit=15.0 +2024-08-27 01:43:07,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=205317.33333333334, ans=0.1 +2024-08-27 01:43:25,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=205424.0, ans=0.0 +2024-08-27 01:43:33,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=205477.33333333334, ans=0.125 +2024-08-27 01:43:36,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=205477.33333333334, ans=0.025 +2024-08-27 01:43:37,545 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.04 vs. limit=15.0 +2024-08-27 01:43:37,694 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.37 vs. limit=22.5 +2024-08-27 01:43:40,100 INFO [train.py:1114] (1/4) Epoch 16, batch 1200, loss[loss=0.1948, simple_loss=0.2749, pruned_loss=0.04128, ctc_loss=0.08043, over 19850.00 frames. ], tot_loss[loss=0.193, simple_loss=0.266, pruned_loss=0.04366, ctc_loss=0.08173, over 3824031.93 frames. ], batch size: 57, lr: 9.38e-03, grad_scale: 32.0 +2024-08-27 01:43:47,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=205530.66666666666, ans=0.0 +2024-08-27 01:43:49,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff2.min_abs, batch_count=205530.66666666666, ans=0.1 +2024-08-27 01:43:51,690 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.93 vs. limit=15.0 +2024-08-27 01:43:55,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=205584.0, ans=0.0 +2024-08-27 01:43:59,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=205637.33333333334, ans=0.0 +2024-08-27 01:44:05,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=205637.33333333334, ans=0.125 +2024-08-27 01:44:09,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=205690.66666666666, ans=0.125 +2024-08-27 01:44:09,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=205690.66666666666, ans=0.0 +2024-08-27 01:44:16,054 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.206e+02 1.520e+02 1.803e+02 2.158e+02 3.897e+02, threshold=3.606e+02, percent-clipped=2.0 +2024-08-27 01:44:21,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=205744.0, ans=0.125 +2024-08-27 01:44:22,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=205744.0, ans=0.125 +2024-08-27 01:44:28,170 INFO [train.py:1114] (1/4) Epoch 16, batch 1250, loss[loss=0.2107, simple_loss=0.2887, pruned_loss=0.04757, ctc_loss=0.09367, over 19502.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2659, pruned_loss=0.04347, ctc_loss=0.08145, over 3842506.11 frames. ], batch size: 61, lr: 9.37e-03, grad_scale: 32.0 +2024-08-27 01:44:29,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=205797.33333333334, ans=0.0 +2024-08-27 01:44:45,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=205850.66666666666, ans=0.1 +2024-08-27 01:44:50,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=205904.0, ans=0.125 +2024-08-27 01:45:17,709 INFO [train.py:1114] (1/4) Epoch 16, batch 1300, loss[loss=0.2049, simple_loss=0.2796, pruned_loss=0.04817, ctc_loss=0.08484, over 18933.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2654, pruned_loss=0.04346, ctc_loss=0.08112, over 3846574.33 frames. ], batch size: 76, lr: 9.36e-03, grad_scale: 32.0 +2024-08-27 01:45:19,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=206064.0, ans=0.2 +2024-08-27 01:45:26,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=206117.33333333334, ans=0.125 +2024-08-27 01:45:36,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=206170.66666666666, ans=0.125 +2024-08-27 01:45:38,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=206170.66666666666, ans=0.125 +2024-08-27 01:45:40,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=206170.66666666666, ans=0.125 +2024-08-27 01:45:42,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=206170.66666666666, ans=0.1 +2024-08-27 01:45:52,826 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.516e+02 1.773e+02 2.282e+02 3.618e+02, threshold=3.546e+02, percent-clipped=1.0 +2024-08-27 01:45:56,213 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.75 vs. limit=15.0 +2024-08-27 01:45:58,963 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.39 vs. limit=12.0 +2024-08-27 01:46:03,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=206277.33333333334, ans=0.125 +2024-08-27 01:46:06,815 INFO [train.py:1114] (1/4) Epoch 16, batch 1350, loss[loss=0.1797, simple_loss=0.2591, pruned_loss=0.03645, ctc_loss=0.06831, over 19737.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2649, pruned_loss=0.04312, ctc_loss=0.08032, over 3857255.26 frames. ], batch size: 54, lr: 9.36e-03, grad_scale: 32.0 +2024-08-27 01:46:10,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=206330.66666666666, ans=0.0 +2024-08-27 01:46:19,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=206384.0, ans=0.125 +2024-08-27 01:46:22,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=206384.0, ans=0.0 +2024-08-27 01:46:25,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=206384.0, ans=0.0 +2024-08-27 01:46:27,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=206437.33333333334, ans=0.125 +2024-08-27 01:46:31,108 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:46:31,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=206437.33333333334, ans=0.125 +2024-08-27 01:46:56,732 INFO [train.py:1114] (1/4) Epoch 16, batch 1400, loss[loss=0.1827, simple_loss=0.2494, pruned_loss=0.04183, ctc_loss=0.08076, over 19665.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2648, pruned_loss=0.04317, ctc_loss=0.08031, over 3863890.49 frames. ], batch size: 46, lr: 9.35e-03, grad_scale: 32.0 +2024-08-27 01:46:57,266 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.17 vs. limit=12.0 +2024-08-27 01:47:02,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=206597.33333333334, ans=0.125 +2024-08-27 01:47:15,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=206704.0, ans=0.125 +2024-08-27 01:47:19,176 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=1.94 vs. limit=15.0 +2024-08-27 01:48:25,293 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.410e+02 1.569e+02 1.892e+02 4.037e+02, threshold=3.138e+02, percent-clipped=1.0 +2024-08-27 01:48:26,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=206757.33333333334, ans=0.07 +2024-08-27 01:48:36,161 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.31 vs. limit=15.0 +2024-08-27 01:48:37,438 INFO [train.py:1114] (1/4) Epoch 16, batch 1450, loss[loss=0.2029, simple_loss=0.2722, pruned_loss=0.04841, ctc_loss=0.09191, over 19670.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2653, pruned_loss=0.0434, ctc_loss=0.08088, over 3862124.09 frames. ], batch size: 63, lr: 9.35e-03, grad_scale: 32.0 +2024-08-27 01:48:39,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=206864.0, ans=15.0 +2024-08-27 01:48:44,825 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:48:51,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=206917.33333333334, ans=0.125 +2024-08-27 01:49:00,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=206970.66666666666, ans=0.125 +2024-08-27 01:49:05,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=206970.66666666666, ans=0.125 +2024-08-27 01:49:07,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=207024.0, ans=0.025 +2024-08-27 01:49:21,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=207077.33333333334, ans=0.05 +2024-08-27 01:49:25,843 INFO [train.py:1114] (1/4) Epoch 16, batch 1500, loss[loss=0.2057, simple_loss=0.2833, pruned_loss=0.04722, ctc_loss=0.08391, over 19580.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2655, pruned_loss=0.04328, ctc_loss=0.08071, over 3862803.67 frames. ], batch size: 57, lr: 9.34e-03, grad_scale: 32.0 +2024-08-27 01:49:34,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=207130.66666666666, ans=0.0 +2024-08-27 01:49:42,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=207184.0, ans=0.125 +2024-08-27 01:49:47,323 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:50:02,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.65 vs. limit=6.0 +2024-08-27 01:50:03,768 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.422e+02 1.666e+02 2.042e+02 4.208e+02, threshold=3.332e+02, percent-clipped=3.0 +2024-08-27 01:50:19,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=207344.0, ans=0.0 +2024-08-27 01:50:22,137 INFO [train.py:1114] (1/4) Epoch 16, batch 1550, loss[loss=0.2043, simple_loss=0.2785, pruned_loss=0.04758, ctc_loss=0.08708, over 19604.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2661, pruned_loss=0.0436, ctc_loss=0.08156, over 3847412.94 frames. ], batch size: 60, lr: 9.33e-03, grad_scale: 32.0 +2024-08-27 01:50:29,967 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.43 vs. limit=15.0 +2024-08-27 01:51:10,035 INFO [train.py:1114] (1/4) Epoch 16, batch 1600, loss[loss=0.1967, simple_loss=0.2766, pruned_loss=0.04252, ctc_loss=0.07921, over 19845.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2658, pruned_loss=0.04367, ctc_loss=0.08175, over 3835699.28 frames. ], batch size: 57, lr: 9.33e-03, grad_scale: 32.0 +2024-08-27 01:51:29,334 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.94 vs. limit=22.5 +2024-08-27 01:51:55,658 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.401e+02 1.606e+02 1.975e+02 3.175e+02, threshold=3.213e+02, percent-clipped=0.0 +2024-08-27 01:51:59,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=207877.33333333334, ans=0.2 +2024-08-27 01:52:00,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=207877.33333333334, ans=0.125 +2024-08-27 01:52:06,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=207930.66666666666, ans=0.125 +2024-08-27 01:52:14,354 INFO [train.py:1114] (1/4) Epoch 16, batch 1650, loss[loss=0.2042, simple_loss=0.2844, pruned_loss=0.04518, ctc_loss=0.08399, over 19618.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2655, pruned_loss=0.04366, ctc_loss=0.08171, over 3831939.66 frames. ], batch size: 59, lr: 9.32e-03, grad_scale: 32.0 +2024-08-27 01:52:15,779 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.19 vs. limit=15.0 +2024-08-27 01:52:41,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=208037.33333333334, ans=0.1 +2024-08-27 01:52:56,765 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.03 vs. limit=15.0 +2024-08-27 01:52:57,453 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:53:04,339 INFO [train.py:1114] (1/4) Epoch 16, batch 1700, loss[loss=0.1736, simple_loss=0.2436, pruned_loss=0.03693, ctc_loss=0.07451, over 19656.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2649, pruned_loss=0.04317, ctc_loss=0.08076, over 3846742.17 frames. ], batch size: 46, lr: 9.32e-03, grad_scale: 64.0 +2024-08-27 01:53:04,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=208197.33333333334, ans=0.125 +2024-08-27 01:53:17,477 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=208250.66666666666, ans=0.0 +2024-08-27 01:53:19,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=208250.66666666666, ans=0.025 +2024-08-27 01:53:21,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=208304.0, ans=0.05 +2024-08-27 01:53:26,651 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.17 vs. limit=22.5 +2024-08-27 01:53:27,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=208304.0, ans=0.025 +2024-08-27 01:53:33,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=208357.33333333334, ans=0.125 +2024-08-27 01:53:39,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=208357.33333333334, ans=0.125 +2024-08-27 01:53:42,381 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.216e+02 1.468e+02 1.742e+02 2.214e+02 3.607e+02, threshold=3.484e+02, percent-clipped=2.0 +2024-08-27 01:53:46,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=208410.66666666666, ans=0.125 +2024-08-27 01:53:53,048 INFO [train.py:1114] (1/4) Epoch 16, batch 1750, loss[loss=0.1697, simple_loss=0.2343, pruned_loss=0.038, ctc_loss=0.07301, over 19637.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.264, pruned_loss=0.04282, ctc_loss=0.08028, over 3850980.22 frames. ], batch size: 45, lr: 9.31e-03, grad_scale: 32.0 +2024-08-27 01:54:09,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=208517.33333333334, ans=0.2 +2024-08-27 01:54:19,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=208624.0, ans=0.015 +2024-08-27 01:54:32,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=208677.33333333334, ans=0.1 +2024-08-27 01:54:34,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=208677.33333333334, ans=0.09899494936611666 +2024-08-27 01:54:37,048 INFO [train.py:1114] (1/4) Epoch 16, batch 1800, loss[loss=0.1869, simple_loss=0.2677, pruned_loss=0.03824, ctc_loss=0.07439, over 19619.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2642, pruned_loss=0.04296, ctc_loss=0.08044, over 3853296.31 frames. ], batch size: 55, lr: 9.31e-03, grad_scale: 32.0 +2024-08-27 01:54:38,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=208730.66666666666, ans=0.2 +2024-08-27 01:54:45,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=208784.0, ans=0.025 +2024-08-27 01:54:46,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=208784.0, ans=0.0 +2024-08-27 01:54:48,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=208784.0, ans=0.2 +2024-08-27 01:54:52,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=208784.0, ans=0.0 +2024-08-27 01:55:10,175 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.155e+02 1.563e+02 1.995e+02 2.578e+02 4.186e+02, threshold=3.991e+02, percent-clipped=7.0 +2024-08-27 01:55:16,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=208944.0, ans=0.1 +2024-08-27 01:55:17,725 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.42 vs. limit=22.5 +2024-08-27 01:55:20,663 INFO [train.py:1114] (1/4) Epoch 16, batch 1850, loss[loss=0.211, simple_loss=0.285, pruned_loss=0.04946, ctc_loss=0.09487, over 19586.00 frames. ], tot_loss[loss=0.191, simple_loss=0.264, pruned_loss=0.04289, ctc_loss=0.08025, over 3856883.61 frames. ], batch size: 57, lr: 9.30e-03, grad_scale: 32.0 +2024-08-27 01:55:21,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=208997.33333333334, ans=0.0 +2024-08-27 01:55:30,500 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:55:43,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=209104.0, ans=0.125 +2024-08-27 01:55:48,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=209157.33333333334, ans=0.025 +2024-08-27 01:55:55,711 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.43 vs. limit=15.0 +2024-08-27 01:56:04,472 INFO [train.py:1114] (1/4) Epoch 16, batch 1900, loss[loss=0.1941, simple_loss=0.2742, pruned_loss=0.04174, ctc_loss=0.07616, over 19647.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2648, pruned_loss=0.0432, ctc_loss=0.08059, over 3860956.87 frames. ], batch size: 59, lr: 9.29e-03, grad_scale: 32.0 +2024-08-27 01:56:07,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=209264.0, ans=0.0 +2024-08-27 01:56:17,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=209317.33333333334, ans=0.125 +2024-08-27 01:56:37,687 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.165e+02 1.418e+02 1.626e+02 2.079e+02 4.675e+02, threshold=3.252e+02, percent-clipped=2.0 +2024-08-27 01:56:42,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=209477.33333333334, ans=0.07 +2024-08-27 01:56:48,329 INFO [train.py:1114] (1/4) Epoch 16, batch 1950, loss[loss=0.1777, simple_loss=0.2607, pruned_loss=0.0343, ctc_loss=0.06538, over 19605.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2657, pruned_loss=0.04336, ctc_loss=0.08082, over 3870048.19 frames. ], batch size: 52, lr: 9.29e-03, grad_scale: 32.0 +2024-08-27 01:56:54,190 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.60 vs. limit=10.0 +2024-08-27 01:57:08,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209637.33333333334, ans=0.1 +2024-08-27 01:57:13,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=209637.33333333334, ans=0.2 +2024-08-27 01:57:22,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209690.66666666666, ans=0.1 +2024-08-27 01:57:35,865 INFO [train.py:1114] (1/4) Epoch 16, batch 2000, loss[loss=0.1683, simple_loss=0.2366, pruned_loss=0.03622, ctc_loss=0.0688, over 19660.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2666, pruned_loss=0.04384, ctc_loss=0.08169, over 3855245.04 frames. ], batch size: 45, lr: 9.28e-03, grad_scale: 32.0 +2024-08-27 01:57:36,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.whiten.whitening_limit, batch_count=209797.33333333334, ans=15.0 +2024-08-27 01:58:00,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=209904.0, ans=0.0 +2024-08-27 01:58:09,440 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.401e+02 1.655e+02 2.254e+02 4.011e+02, threshold=3.310e+02, percent-clipped=6.0 +2024-08-27 01:58:13,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.83 vs. limit=22.5 +2024-08-27 01:58:20,009 INFO [train.py:1114] (1/4) Epoch 16, batch 2050, loss[loss=0.1625, simple_loss=0.2339, pruned_loss=0.03279, ctc_loss=0.06347, over 19737.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2655, pruned_loss=0.04367, ctc_loss=0.08133, over 3852853.59 frames. ], batch size: 47, lr: 9.28e-03, grad_scale: 32.0 +2024-08-27 01:58:41,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=210170.66666666666, ans=0.0 +2024-08-27 01:58:41,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=210170.66666666666, ans=0.0 +2024-08-27 01:58:57,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=210277.33333333334, ans=0.125 +2024-08-27 01:59:02,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=210330.66666666666, ans=0.0 +2024-08-27 01:59:03,129 INFO [train.py:1114] (1/4) Epoch 16, batch 2100, loss[loss=0.1817, simple_loss=0.2616, pruned_loss=0.03686, ctc_loss=0.0702, over 19766.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2646, pruned_loss=0.04298, ctc_loss=0.08018, over 3859400.07 frames. ], batch size: 54, lr: 9.27e-03, grad_scale: 32.0 +2024-08-27 01:59:08,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=210330.66666666666, ans=0.125 +2024-08-27 01:59:08,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=210330.66666666666, ans=0.125 +2024-08-27 01:59:11,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=210384.0, ans=0.125 +2024-08-27 01:59:11,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=210384.0, ans=0.125 +2024-08-27 01:59:16,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=210384.0, ans=22.5 +2024-08-27 01:59:20,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=210437.33333333334, ans=0.05 +2024-08-27 01:59:35,722 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.547e+02 1.892e+02 2.472e+02 4.594e+02, threshold=3.784e+02, percent-clipped=3.0 +2024-08-27 01:59:47,029 INFO [train.py:1114] (1/4) Epoch 16, batch 2150, loss[loss=0.1736, simple_loss=0.251, pruned_loss=0.03525, ctc_loss=0.06423, over 19576.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.264, pruned_loss=0.04273, ctc_loss=0.07982, over 3869308.08 frames. ], batch size: 52, lr: 9.27e-03, grad_scale: 32.0 +2024-08-27 01:59:56,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=210650.66666666666, ans=0.05 +2024-08-27 01:59:57,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210650.66666666666, ans=0.1 +2024-08-27 02:00:09,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=210704.0, ans=0.0 +2024-08-27 02:00:30,378 INFO [train.py:1114] (1/4) Epoch 16, batch 2200, loss[loss=0.1891, simple_loss=0.2618, pruned_loss=0.04175, ctc_loss=0.08196, over 19591.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2636, pruned_loss=0.04239, ctc_loss=0.07913, over 3867258.39 frames. ], batch size: 57, lr: 9.26e-03, grad_scale: 32.0 +2024-08-27 02:00:40,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.19 vs. limit=15.0 +2024-08-27 02:00:59,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=211024.0, ans=0.125 +2024-08-27 02:01:04,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=211024.0, ans=0.125 +2024-08-27 02:01:04,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=211024.0, ans=0.125 +2024-08-27 02:01:06,364 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.493e+02 1.671e+02 2.113e+02 4.070e+02, threshold=3.342e+02, percent-clipped=1.0 +2024-08-27 02:01:17,555 INFO [train.py:1114] (1/4) Epoch 16, batch 2250, loss[loss=0.1918, simple_loss=0.272, pruned_loss=0.04161, ctc_loss=0.07096, over 19622.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2637, pruned_loss=0.04255, ctc_loss=0.07936, over 3866351.87 frames. ], batch size: 55, lr: 9.25e-03, grad_scale: 32.0 +2024-08-27 02:01:36,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=211237.33333333334, ans=0.125 +2024-08-27 02:01:38,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=211237.33333333334, ans=0.07 +2024-08-27 02:01:41,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=211237.33333333334, ans=0.0 +2024-08-27 02:01:41,839 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=7.73 vs. limit=12.0 +2024-08-27 02:01:46,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=211290.66666666666, ans=0.2 +2024-08-27 02:01:48,076 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.79 vs. limit=22.5 +2024-08-27 02:02:00,445 INFO [train.py:1114] (1/4) Epoch 16, batch 2300, loss[loss=0.1781, simple_loss=0.2507, pruned_loss=0.03824, ctc_loss=0.07247, over 19515.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2628, pruned_loss=0.04249, ctc_loss=0.07931, over 3859699.18 frames. ], batch size: 49, lr: 9.25e-03, grad_scale: 32.0 +2024-08-27 02:02:18,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211504.0, ans=0.1 +2024-08-27 02:02:27,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=211557.33333333334, ans=0.125 +2024-08-27 02:02:33,264 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.186e+02 1.480e+02 1.722e+02 2.096e+02 3.640e+02, threshold=3.444e+02, percent-clipped=3.0 +2024-08-27 02:02:36,389 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.40 vs. limit=6.0 +2024-08-27 02:02:44,151 INFO [train.py:1114] (1/4) Epoch 16, batch 2350, loss[loss=0.2117, simple_loss=0.2832, pruned_loss=0.05129, ctc_loss=0.09434, over 19677.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2631, pruned_loss=0.04292, ctc_loss=0.08003, over 3862859.07 frames. ], batch size: 63, lr: 9.24e-03, grad_scale: 32.0 +2024-08-27 02:02:59,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=211717.33333333334, ans=10.0 +2024-08-27 02:03:00,902 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.67 vs. limit=15.0 +2024-08-27 02:03:12,256 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.23 vs. limit=15.0 +2024-08-27 02:03:12,999 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.39 vs. limit=15.0 +2024-08-27 02:03:28,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=211877.33333333334, ans=0.0 +2024-08-27 02:03:29,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=211877.33333333334, ans=0.07 +2024-08-27 02:03:32,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211877.33333333334, ans=0.1 +2024-08-27 02:03:34,532 INFO [train.py:1114] (1/4) Epoch 16, batch 2400, loss[loss=0.21, simple_loss=0.2809, pruned_loss=0.05103, ctc_loss=0.09248, over 19323.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2654, pruned_loss=0.04387, ctc_loss=0.08184, over 3858282.58 frames. ], batch size: 71, lr: 9.24e-03, grad_scale: 32.0 +2024-08-27 02:03:36,884 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.36 vs. limit=22.5 +2024-08-27 02:03:41,795 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.98 vs. limit=15.0 +2024-08-27 02:03:43,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211984.0, ans=0.1 +2024-08-27 02:03:50,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=211984.0, ans=0.125 +2024-08-27 02:03:54,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=212037.33333333334, ans=0.125 +2024-08-27 02:04:01,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=212090.66666666666, ans=0.0 +2024-08-27 02:04:07,965 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.276e+02 1.442e+02 1.653e+02 2.239e+02 3.362e+02, threshold=3.307e+02, percent-clipped=0.0 +2024-08-27 02:04:11,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=212144.0, ans=0.1 +2024-08-27 02:04:18,803 INFO [train.py:1114] (1/4) Epoch 16, batch 2450, loss[loss=0.2534, simple_loss=0.2987, pruned_loss=0.07543, ctc_loss=0.1432, over 13173.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2688, pruned_loss=0.04605, ctc_loss=0.08615, over 3731705.48 frames. ], batch size: 143, lr: 9.23e-03, grad_scale: 32.0 +2024-08-27 02:04:23,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=212197.33333333334, ans=0.125 +2024-08-27 02:04:24,141 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.94 vs. limit=5.0 +2024-08-27 02:04:42,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=212304.0, ans=0.125 +2024-08-27 02:05:43,528 INFO [train.py:1114] (1/4) Epoch 17, batch 0, loss[loss=0.1764, simple_loss=0.2461, pruned_loss=0.03837, ctc_loss=0.07496, over 19820.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2461, pruned_loss=0.03837, ctc_loss=0.07496, over 19820.00 frames. ], batch size: 49, lr: 8.95e-03, grad_scale: 32.0 +2024-08-27 02:05:43,529 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-27 02:05:51,185 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.6209, 4.8802, 5.5564, 5.4054], device='cuda:1') +2024-08-27 02:05:53,282 INFO [train.py:1146] (1/4) Epoch 17, validation: loss=0.172, simple_loss=0.265, pruned_loss=0.02949, ctc_loss=0.04976, over 944034.00 frames. +2024-08-27 02:05:53,283 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12945MB +2024-08-27 02:06:02,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=212458.66666666666, ans=0.125 +2024-08-27 02:06:05,872 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.60 vs. limit=15.0 +2024-08-27 02:06:13,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=212512.0, ans=0.125 +2024-08-27 02:06:13,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.90 vs. limit=15.0 +2024-08-27 02:06:28,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=212565.33333333334, ans=0.0 +2024-08-27 02:06:29,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=212618.66666666666, ans=0.125 +2024-08-27 02:06:40,307 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.629e+02 1.801e+02 2.001e+02 3.255e+02, threshold=3.602e+02, percent-clipped=0.0 +2024-08-27 02:06:40,342 INFO [train.py:1114] (1/4) Epoch 17, batch 50, loss[loss=0.1839, simple_loss=0.2532, pruned_loss=0.04171, ctc_loss=0.07791, over 19696.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2659, pruned_loss=0.04336, ctc_loss=0.0816, over 844645.99 frames. ], batch size: 47, lr: 8.94e-03, grad_scale: 16.0 +2024-08-27 02:06:43,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=212672.0, ans=0.2 +2024-08-27 02:06:55,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212725.33333333334, ans=0.1 +2024-08-27 02:07:00,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=212778.66666666666, ans=10.0 +2024-08-27 02:07:04,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=212778.66666666666, ans=0.125 +2024-08-27 02:07:29,668 INFO [train.py:1114] (1/4) Epoch 17, batch 100, loss[loss=0.1831, simple_loss=0.2592, pruned_loss=0.03877, ctc_loss=0.0735, over 19711.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2673, pruned_loss=0.04376, ctc_loss=0.08169, over 1499050.01 frames. ], batch size: 51, lr: 8.94e-03, grad_scale: 16.0 +2024-08-27 02:07:40,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212992.0, ans=0.1 +2024-08-27 02:08:17,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=213152.0, ans=0.125 +2024-08-27 02:08:20,137 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.458e+02 1.665e+02 2.006e+02 3.256e+02, threshold=3.330e+02, percent-clipped=0.0 +2024-08-27 02:08:20,171 INFO [train.py:1114] (1/4) Epoch 17, batch 150, loss[loss=0.163, simple_loss=0.2278, pruned_loss=0.0362, ctc_loss=0.06461, over 19703.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2652, pruned_loss=0.04315, ctc_loss=0.08039, over 2028977.84 frames. ], batch size: 47, lr: 8.93e-03, grad_scale: 16.0 +2024-08-27 02:09:18,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213365.33333333334, ans=0.1 +2024-08-27 02:10:25,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=213418.66666666666, ans=0.125 +2024-08-27 02:10:30,589 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.61 vs. limit=15.0 +2024-08-27 02:10:50,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=213418.66666666666, ans=0.2 +2024-08-27 02:10:53,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=213418.66666666666, ans=0.025 +2024-08-27 02:10:55,453 INFO [train.py:1114] (1/4) Epoch 17, batch 200, loss[loss=0.1965, simple_loss=0.2734, pruned_loss=0.04333, ctc_loss=0.08217, over 18150.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2644, pruned_loss=0.04279, ctc_loss=0.07978, over 2437042.07 frames. ], batch size: 85, lr: 8.93e-03, grad_scale: 16.0 +2024-08-27 02:10:55,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=213472.0, ans=0.0 +2024-08-27 02:11:06,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213525.33333333334, ans=0.1 +2024-08-27 02:11:06,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=213525.33333333334, ans=0.125 +2024-08-27 02:11:09,942 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.19 vs. limit=15.0 +2024-08-27 02:11:19,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=213578.66666666666, ans=0.125 +2024-08-27 02:11:30,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=213632.0, ans=0.125 +2024-08-27 02:11:41,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=213685.33333333334, ans=0.1 +2024-08-27 02:11:42,062 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.66 vs. limit=15.0 +2024-08-27 02:11:49,176 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.468e+02 1.730e+02 2.457e+02 4.645e+02, threshold=3.460e+02, percent-clipped=6.0 +2024-08-27 02:11:49,210 INFO [train.py:1114] (1/4) Epoch 17, batch 250, loss[loss=0.205, simple_loss=0.2787, pruned_loss=0.04874, ctc_loss=0.08484, over 19395.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2639, pruned_loss=0.04229, ctc_loss=0.07893, over 2757247.20 frames. ], batch size: 67, lr: 8.92e-03, grad_scale: 16.0 +2024-08-27 02:11:56,936 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.14 vs. limit=15.0 +2024-08-27 02:12:18,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=213845.33333333334, ans=0.02 +2024-08-27 02:12:39,243 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:14:34,927 INFO [train.py:1114] (1/4) Epoch 17, batch 300, loss[loss=0.2064, simple_loss=0.2821, pruned_loss=0.04741, ctc_loss=0.08968, over 19510.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2632, pruned_loss=0.04205, ctc_loss=0.07864, over 3001398.30 frames. ], batch size: 61, lr: 8.92e-03, grad_scale: 16.0 +2024-08-27 02:14:48,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=214058.66666666666, ans=0.125 +2024-08-27 02:16:42,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=214218.66666666666, ans=10.0 +2024-08-27 02:16:48,690 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.450e+02 1.705e+02 2.074e+02 4.169e+02, threshold=3.410e+02, percent-clipped=2.0 +2024-08-27 02:16:48,724 INFO [train.py:1114] (1/4) Epoch 17, batch 350, loss[loss=0.1737, simple_loss=0.2474, pruned_loss=0.03663, ctc_loss=0.06692, over 19756.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2639, pruned_loss=0.04237, ctc_loss=0.07898, over 3191743.97 frames. ], batch size: 48, lr: 8.91e-03, grad_scale: 16.0 +2024-08-27 02:16:48,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=214272.0, ans=0.0 +2024-08-27 02:16:50,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=214272.0, ans=0.125 +2024-08-27 02:16:57,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=214325.33333333334, ans=0.125 +2024-08-27 02:17:25,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=214432.0, ans=0.125 +2024-08-27 02:17:30,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=214485.33333333334, ans=0.0 +2024-08-27 02:17:31,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=214485.33333333334, ans=0.125 +2024-08-27 02:17:36,075 INFO [train.py:1114] (1/4) Epoch 17, batch 400, loss[loss=0.1792, simple_loss=0.2641, pruned_loss=0.03435, ctc_loss=0.06392, over 19487.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2635, pruned_loss=0.04228, ctc_loss=0.07886, over 3343628.60 frames. ], batch size: 54, lr: 8.91e-03, grad_scale: 32.0 +2024-08-27 02:17:39,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_na.min_abs, batch_count=214538.66666666666, ans=0.02 +2024-08-27 02:17:42,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214538.66666666666, ans=0.1 +2024-08-27 02:17:55,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=214645.33333333334, ans=0.125 +2024-08-27 02:18:01,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214645.33333333334, ans=0.1 +2024-08-27 02:18:07,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=214698.66666666666, ans=0.125 +2024-08-27 02:18:23,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=214752.0, ans=0.125 +2024-08-27 02:18:25,574 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.479e+02 1.707e+02 2.031e+02 4.496e+02, threshold=3.413e+02, percent-clipped=2.0 +2024-08-27 02:18:25,608 INFO [train.py:1114] (1/4) Epoch 17, batch 450, loss[loss=0.2009, simple_loss=0.2815, pruned_loss=0.04467, ctc_loss=0.07735, over 19605.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2637, pruned_loss=0.04243, ctc_loss=0.07913, over 3450831.97 frames. ], batch size: 55, lr: 8.90e-03, grad_scale: 32.0 +2024-08-27 02:18:28,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214805.33333333334, ans=0.1 +2024-08-27 02:19:13,500 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:19:15,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=215018.66666666666, ans=0.125 +2024-08-27 02:19:18,862 INFO [train.py:1114] (1/4) Epoch 17, batch 500, loss[loss=0.195, simple_loss=0.274, pruned_loss=0.04208, ctc_loss=0.07983, over 19661.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2625, pruned_loss=0.04183, ctc_loss=0.07812, over 3546821.36 frames. ], batch size: 63, lr: 8.90e-03, grad_scale: 32.0 +2024-08-27 02:19:43,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.53 vs. limit=15.0 +2024-08-27 02:19:43,645 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.06 vs. limit=10.0 +2024-08-27 02:19:44,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=215178.66666666666, ans=0.125 +2024-08-27 02:20:25,034 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:20:26,999 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.04 vs. limit=10.0 +2024-08-27 02:20:44,559 INFO [train.py:1114] (1/4) Epoch 17, batch 550, loss[loss=0.1933, simple_loss=0.2681, pruned_loss=0.04352, ctc_loss=0.07893, over 19299.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2622, pruned_loss=0.04185, ctc_loss=0.07809, over 3608064.98 frames. ], batch size: 71, lr: 8.89e-03, grad_scale: 16.0 +2024-08-27 02:20:45,395 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.446e+02 1.711e+02 2.254e+02 3.980e+02, threshold=3.422e+02, percent-clipped=2.0 +2024-08-27 02:20:47,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=215338.66666666666, ans=0.125 +2024-08-27 02:20:50,274 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:20:53,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=215392.0, ans=0.0 +2024-08-27 02:20:53,319 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.84 vs. limit=10.0 +2024-08-27 02:20:55,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=215392.0, ans=0.125 +2024-08-27 02:21:09,343 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.84 vs. limit=22.5 +2024-08-27 02:21:10,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=215445.33333333334, ans=0.125 +2024-08-27 02:21:43,267 INFO [train.py:1114] (1/4) Epoch 17, batch 600, loss[loss=0.2073, simple_loss=0.2854, pruned_loss=0.04758, ctc_loss=0.08538, over 19430.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2622, pruned_loss=0.04163, ctc_loss=0.07763, over 3665994.82 frames. ], batch size: 67, lr: 8.88e-03, grad_scale: 16.0 +2024-08-27 02:21:43,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=215605.33333333334, ans=0.2 +2024-08-27 02:21:44,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=215605.33333333334, ans=0.0 +2024-08-27 02:21:47,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=215605.33333333334, ans=0.125 +2024-08-27 02:21:57,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215658.66666666666, ans=0.1 +2024-08-27 02:22:34,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215818.66666666666, ans=0.1 +2024-08-27 02:22:35,837 INFO [train.py:1114] (1/4) Epoch 17, batch 650, loss[loss=0.1736, simple_loss=0.2501, pruned_loss=0.03527, ctc_loss=0.06642, over 19764.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2614, pruned_loss=0.04126, ctc_loss=0.07701, over 3716311.99 frames. ], batch size: 54, lr: 8.88e-03, grad_scale: 16.0 +2024-08-27 02:22:36,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=215872.0, ans=0.025 +2024-08-27 02:22:36,655 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.214e+02 1.454e+02 1.765e+02 2.281e+02 4.784e+02, threshold=3.530e+02, percent-clipped=4.0 +2024-08-27 02:22:51,049 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.89 vs. limit=15.0 +2024-08-27 02:23:09,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=216032.0, ans=0.05 +2024-08-27 02:23:11,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=216032.0, ans=0.125 +2024-08-27 02:23:14,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=216032.0, ans=0.125 +2024-08-27 02:23:16,757 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.37 vs. limit=15.0 +2024-08-27 02:23:23,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=216085.33333333334, ans=0.125 +2024-08-27 02:23:25,366 INFO [train.py:1114] (1/4) Epoch 17, batch 700, loss[loss=0.169, simple_loss=0.2512, pruned_loss=0.03185, ctc_loss=0.05768, over 19726.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2624, pruned_loss=0.0416, ctc_loss=0.07762, over 3749338.22 frames. ], batch size: 51, lr: 8.87e-03, grad_scale: 16.0 +2024-08-27 02:23:34,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=216192.0, ans=0.125 +2024-08-27 02:23:50,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=216245.33333333334, ans=0.0 +2024-08-27 02:27:51,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=216352.0, ans=0.025 +2024-08-27 02:28:51,342 INFO [train.py:1114] (1/4) Epoch 17, batch 750, loss[loss=0.1814, simple_loss=0.2661, pruned_loss=0.03495, ctc_loss=0.06723, over 19479.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2619, pruned_loss=0.04149, ctc_loss=0.07744, over 3775726.61 frames. ], batch size: 54, lr: 8.87e-03, grad_scale: 16.0 +2024-08-27 02:29:21,550 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.483e+02 1.820e+02 2.509e+02 4.091e+02, threshold=3.640e+02, percent-clipped=8.0 +2024-08-27 02:32:03,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216405.33333333334, ans=0.1 +2024-08-27 02:32:31,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=216458.66666666666, ans=0.025 +2024-08-27 02:33:17,948 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.53 vs. limit=12.0 +2024-08-27 02:36:16,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216565.33333333334, ans=0.1 +2024-08-27 02:38:07,119 INFO [train.py:1114] (1/4) Epoch 17, batch 800, loss[loss=0.1785, simple_loss=0.237, pruned_loss=0.0438, ctc_loss=0.08114, over 19834.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.262, pruned_loss=0.0417, ctc_loss=0.07799, over 3795773.92 frames. ], batch size: 49, lr: 8.86e-03, grad_scale: 32.0 +2024-08-27 02:40:02,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=216778.66666666666, ans=0.125 +2024-08-27 02:40:07,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=216778.66666666666, ans=0.2 +2024-08-27 02:40:22,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216832.0, ans=0.1 +2024-08-27 02:40:43,403 INFO [train.py:1114] (1/4) Epoch 17, batch 850, loss[loss=0.213, simple_loss=0.2864, pruned_loss=0.0506, ctc_loss=0.09607, over 19643.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.262, pruned_loss=0.04171, ctc_loss=0.07798, over 3815745.90 frames. ], batch size: 59, lr: 8.86e-03, grad_scale: 32.0 +2024-08-27 02:40:44,270 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.490e+02 1.788e+02 2.181e+02 3.218e+02, threshold=3.576e+02, percent-clipped=0.0 +2024-08-27 02:40:46,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=216938.66666666666, ans=0.1 +2024-08-27 02:40:53,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=216992.0, ans=0.2 +2024-08-27 02:40:56,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=216992.0, ans=0.125 +2024-08-27 02:41:19,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=217045.33333333334, ans=0.0 +2024-08-27 02:41:23,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=217045.33333333334, ans=0.125 +2024-08-27 02:41:29,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=217098.66666666666, ans=0.1 +2024-08-27 02:41:32,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=217098.66666666666, ans=0.125 +2024-08-27 02:41:34,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=217098.66666666666, ans=0.125 +2024-08-27 02:41:35,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=217098.66666666666, ans=0.125 +2024-08-27 02:41:41,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=217152.0, ans=0.1 +2024-08-27 02:41:48,135 INFO [train.py:1114] (1/4) Epoch 17, batch 900, loss[loss=0.1688, simple_loss=0.2415, pruned_loss=0.03507, ctc_loss=0.06492, over 19805.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2625, pruned_loss=0.04219, ctc_loss=0.07889, over 3819499.53 frames. ], batch size: 49, lr: 8.85e-03, grad_scale: 32.0 +2024-08-27 02:41:54,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=217205.33333333334, ans=0.0 +2024-08-27 02:42:02,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=217258.66666666666, ans=0.125 +2024-08-27 02:42:23,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=217365.33333333334, ans=0.125 +2024-08-27 02:42:24,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=217365.33333333334, ans=0.125 +2024-08-27 02:42:42,362 INFO [train.py:1114] (1/4) Epoch 17, batch 950, loss[loss=0.1733, simple_loss=0.2381, pruned_loss=0.03921, ctc_loss=0.07513, over 19497.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2632, pruned_loss=0.0425, ctc_loss=0.07945, over 3821559.39 frames. ], batch size: 49, lr: 8.85e-03, grad_scale: 32.0 +2024-08-27 02:42:43,222 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.442e+02 1.596e+02 1.963e+02 3.277e+02, threshold=3.193e+02, percent-clipped=0.0 +2024-08-27 02:44:41,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=217632.0, ans=0.0 +2024-08-27 02:44:46,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=217632.0, ans=0.125 +2024-08-27 02:45:22,498 INFO [train.py:1114] (1/4) Epoch 17, batch 1000, loss[loss=0.1736, simple_loss=0.2529, pruned_loss=0.03391, ctc_loss=0.06621, over 19883.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2638, pruned_loss=0.04258, ctc_loss=0.0797, over 3817867.61 frames. ], batch size: 52, lr: 8.84e-03, grad_scale: 32.0 +2024-08-27 02:45:24,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=217738.66666666666, ans=0.0 +2024-08-27 02:45:24,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=217738.66666666666, ans=0.125 +2024-08-27 02:45:27,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=217738.66666666666, ans=0.1 +2024-08-27 02:45:48,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=217845.33333333334, ans=0.025 +2024-08-27 02:46:02,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=217845.33333333334, ans=0.2 +2024-08-27 02:46:02,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=217845.33333333334, ans=0.125 +2024-08-27 02:46:09,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=217898.66666666666, ans=0.125 +2024-08-27 02:46:23,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=217952.0, ans=0.125 +2024-08-27 02:46:28,538 INFO [train.py:1114] (1/4) Epoch 17, batch 1050, loss[loss=0.1878, simple_loss=0.2718, pruned_loss=0.0382, ctc_loss=0.0686, over 19821.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2628, pruned_loss=0.04214, ctc_loss=0.07894, over 3824747.58 frames. ], batch size: 57, lr: 8.84e-03, grad_scale: 32.0 +2024-08-27 02:46:29,437 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.403e+02 1.586e+02 2.025e+02 2.959e+02, threshold=3.171e+02, percent-clipped=1.0 +2024-08-27 02:46:35,383 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.46 vs. limit=15.0 +2024-08-27 02:47:09,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=218112.0, ans=0.125 +2024-08-27 02:47:38,611 INFO [train.py:1114] (1/4) Epoch 17, batch 1100, loss[loss=0.1841, simple_loss=0.2633, pruned_loss=0.03834, ctc_loss=0.07028, over 19584.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2628, pruned_loss=0.04197, ctc_loss=0.07849, over 3831537.03 frames. ], batch size: 52, lr: 8.83e-03, grad_scale: 32.0 +2024-08-27 02:47:38,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=218272.0, ans=0.0 +2024-08-27 02:47:43,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=218272.0, ans=0.0 +2024-08-27 02:47:45,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=218272.0, ans=0.125 +2024-08-27 02:48:25,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=218325.33333333334, ans=0.125 +2024-08-27 02:48:47,994 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.12 vs. limit=15.0 +2024-08-27 02:48:57,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218378.66666666666, ans=0.1 +2024-08-27 02:49:11,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=218432.0, ans=0.1 +2024-08-27 02:49:14,900 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.13 vs. limit=15.0 +2024-08-27 02:49:18,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=218485.33333333334, ans=0.125 +2024-08-27 02:49:27,451 INFO [train.py:1114] (1/4) Epoch 17, batch 1150, loss[loss=0.1793, simple_loss=0.2528, pruned_loss=0.03918, ctc_loss=0.06846, over 19583.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2629, pruned_loss=0.04213, ctc_loss=0.07881, over 3831079.30 frames. ], batch size: 52, lr: 8.83e-03, grad_scale: 32.0 +2024-08-27 02:49:28,306 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.459e+02 1.619e+02 1.965e+02 3.390e+02, threshold=3.239e+02, percent-clipped=1.0 +2024-08-27 02:49:29,562 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:49:46,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=218645.33333333334, ans=0.125 +2024-08-27 02:49:49,367 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:49:53,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=218645.33333333334, ans=0.125 +2024-08-27 02:49:59,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=218698.66666666666, ans=0.125 +2024-08-27 02:50:02,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=218698.66666666666, ans=0.0 +2024-08-27 02:50:03,439 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.78 vs. limit=15.0 +2024-08-27 02:50:14,225 INFO [train.py:1114] (1/4) Epoch 17, batch 1200, loss[loss=0.188, simple_loss=0.2691, pruned_loss=0.03891, ctc_loss=0.07259, over 19841.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2635, pruned_loss=0.04227, ctc_loss=0.0791, over 3825453.22 frames. ], batch size: 57, lr: 8.82e-03, grad_scale: 32.0 +2024-08-27 02:50:38,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=218912.0, ans=0.0 +2024-08-27 02:51:04,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=219018.66666666666, ans=10.0 +2024-08-27 02:51:28,548 INFO [train.py:1114] (1/4) Epoch 17, batch 1250, loss[loss=0.2159, simple_loss=0.2799, pruned_loss=0.05595, ctc_loss=0.09992, over 19520.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2637, pruned_loss=0.04238, ctc_loss=0.07915, over 3843634.46 frames. ], batch size: 61, lr: 8.82e-03, grad_scale: 32.0 +2024-08-27 02:51:29,444 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.236e+02 1.488e+02 1.826e+02 2.228e+02 3.440e+02, threshold=3.652e+02, percent-clipped=1.0 +2024-08-27 02:51:46,591 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.37 vs. limit=15.0 +2024-08-27 02:52:01,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=219232.0, ans=0.125 +2024-08-27 02:52:40,149 INFO [train.py:1114] (1/4) Epoch 17, batch 1300, loss[loss=0.2098, simple_loss=0.2784, pruned_loss=0.05153, ctc_loss=0.09535, over 18867.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2627, pruned_loss=0.04199, ctc_loss=0.07846, over 3847381.66 frames. ], batch size: 76, lr: 8.81e-03, grad_scale: 32.0 +2024-08-27 02:52:41,266 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:52:48,875 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.00 vs. limit=15.0 +2024-08-27 02:53:40,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=219552.0, ans=0.2 +2024-08-27 02:53:49,257 INFO [train.py:1114] (1/4) Epoch 17, batch 1350, loss[loss=0.1913, simple_loss=0.2746, pruned_loss=0.03869, ctc_loss=0.0765, over 19769.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2621, pruned_loss=0.04161, ctc_loss=0.07779, over 3857863.66 frames. ], batch size: 54, lr: 8.81e-03, grad_scale: 32.0 +2024-08-27 02:53:50,132 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.111e+02 1.487e+02 1.709e+02 2.118e+02 3.687e+02, threshold=3.418e+02, percent-clipped=1.0 +2024-08-27 02:53:53,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=219605.33333333334, ans=0.125 +2024-08-27 02:54:05,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=219658.66666666666, ans=0.0 +2024-08-27 02:54:06,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=219712.0, ans=0.125 +2024-08-27 02:54:14,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=219712.0, ans=0.125 +2024-08-27 02:54:47,051 INFO [train.py:1114] (1/4) Epoch 17, batch 1400, loss[loss=0.189, simple_loss=0.2543, pruned_loss=0.04492, ctc_loss=0.08475, over 19679.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2617, pruned_loss=0.04153, ctc_loss=0.07754, over 3864231.98 frames. ], batch size: 46, lr: 8.80e-03, grad_scale: 32.0 +2024-08-27 02:54:49,360 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.35 vs. limit=6.0 +2024-08-27 02:54:52,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=219872.0, ans=0.1 +2024-08-27 02:54:59,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=219925.33333333334, ans=0.125 +2024-08-27 02:55:18,313 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=11.79 vs. limit=15.0 +2024-08-27 02:55:30,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=220085.33333333334, ans=0.04949747468305833 +2024-08-27 02:55:40,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=220138.66666666666, ans=0.0 +2024-08-27 02:55:41,669 INFO [train.py:1114] (1/4) Epoch 17, batch 1450, loss[loss=0.2176, simple_loss=0.2908, pruned_loss=0.05324, ctc_loss=0.09474, over 19695.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2627, pruned_loss=0.04197, ctc_loss=0.0783, over 3862167.13 frames. ], batch size: 63, lr: 8.80e-03, grad_scale: 32.0 +2024-08-27 02:55:42,529 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.445e+02 1.654e+02 2.032e+02 3.496e+02, threshold=3.307e+02, percent-clipped=1.0 +2024-08-27 02:55:57,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220192.0, ans=0.1 +2024-08-27 02:56:22,402 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.12 vs. limit=15.0 +2024-08-27 02:56:24,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=220352.0, ans=0.0 +2024-08-27 02:56:27,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=220352.0, ans=22.5 +2024-08-27 02:56:33,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.whiten.whitening_limit, batch_count=220352.0, ans=12.0 +2024-08-27 02:56:35,344 INFO [train.py:1114] (1/4) Epoch 17, batch 1500, loss[loss=0.1867, simple_loss=0.2669, pruned_loss=0.03867, ctc_loss=0.07303, over 19565.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.263, pruned_loss=0.04177, ctc_loss=0.07783, over 3862445.51 frames. ], batch size: 57, lr: 8.79e-03, grad_scale: 32.0 +2024-08-27 02:56:46,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=220458.66666666666, ans=0.025 +2024-08-27 02:56:51,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=220458.66666666666, ans=0.125 +2024-08-27 02:57:21,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=220618.66666666666, ans=0.0 +2024-08-27 02:57:22,839 INFO [train.py:1114] (1/4) Epoch 17, batch 1550, loss[loss=0.2014, simple_loss=0.2758, pruned_loss=0.04633, ctc_loss=0.08596, over 19604.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2634, pruned_loss=0.0421, ctc_loss=0.07865, over 3847588.66 frames. ], batch size: 60, lr: 8.79e-03, grad_scale: 32.0 +2024-08-27 02:57:23,802 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.186e+02 1.433e+02 1.700e+02 2.311e+02 3.923e+02, threshold=3.401e+02, percent-clipped=1.0 +2024-08-27 02:57:24,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=220672.0, ans=0.025 +2024-08-27 02:57:53,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=220725.33333333334, ans=0.125 +2024-08-27 02:57:55,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=220725.33333333334, ans=0.125 +2024-08-27 02:58:16,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff3.min_abs, batch_count=220832.0, ans=0.2 +2024-08-27 02:58:27,693 INFO [train.py:1114] (1/4) Epoch 17, batch 1600, loss[loss=0.1911, simple_loss=0.2706, pruned_loss=0.03965, ctc_loss=0.08044, over 19845.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2636, pruned_loss=0.04233, ctc_loss=0.07907, over 3836188.10 frames. ], batch size: 57, lr: 8.78e-03, grad_scale: 32.0 +2024-08-27 02:59:27,396 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.76 vs. limit=22.5 +2024-08-27 02:59:31,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=220992.0, ans=0.125 +2024-08-27 03:00:06,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=221045.33333333334, ans=0.125 +2024-08-27 03:00:29,356 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.88 vs. limit=22.5 +2024-08-27 03:00:53,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=221152.0, ans=0.2 +2024-08-27 03:00:55,470 INFO [train.py:1114] (1/4) Epoch 17, batch 1650, loss[loss=0.1881, simple_loss=0.2653, pruned_loss=0.04009, ctc_loss=0.07687, over 19679.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2634, pruned_loss=0.0422, ctc_loss=0.07877, over 3831818.43 frames. ], batch size: 59, lr: 8.77e-03, grad_scale: 32.0 +2024-08-27 03:00:56,097 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.74 vs. limit=15.0 +2024-08-27 03:00:58,237 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.431e+02 1.952e+02 2.452e+02 3.980e+02, threshold=3.905e+02, percent-clipped=5.0 +2024-08-27 03:01:02,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=221205.33333333334, ans=0.0 +2024-08-27 03:01:07,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=221258.66666666666, ans=0.2 +2024-08-27 03:01:19,926 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.22 vs. limit=15.0 +2024-08-27 03:01:22,384 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.06 vs. limit=15.0 +2024-08-27 03:01:39,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=221365.33333333334, ans=0.125 +2024-08-27 03:01:51,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten.whitening_limit, batch_count=221418.66666666666, ans=22.5 +2024-08-27 03:01:57,110 INFO [train.py:1114] (1/4) Epoch 17, batch 1700, loss[loss=0.1792, simple_loss=0.247, pruned_loss=0.04043, ctc_loss=0.0765, over 19675.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2632, pruned_loss=0.04213, ctc_loss=0.07863, over 3845734.92 frames. ], batch size: 46, lr: 8.77e-03, grad_scale: 32.0 +2024-08-27 03:02:05,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=221525.33333333334, ans=0.125 +2024-08-27 03:02:07,512 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.30 vs. limit=6.0 +2024-08-27 03:02:08,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=221525.33333333334, ans=0.125 +2024-08-27 03:02:16,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=221525.33333333334, ans=0.125 +2024-08-27 03:02:34,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=221632.0, ans=0.0 +2024-08-27 03:02:39,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=221685.33333333334, ans=0.0 +2024-08-27 03:02:45,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=221685.33333333334, ans=0.04949747468305833 +2024-08-27 03:02:48,263 INFO [train.py:1114] (1/4) Epoch 17, batch 1750, loss[loss=0.1753, simple_loss=0.2377, pruned_loss=0.0417, ctc_loss=0.07389, over 19647.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2626, pruned_loss=0.04207, ctc_loss=0.07852, over 3850469.71 frames. ], batch size: 45, lr: 8.76e-03, grad_scale: 16.0 +2024-08-27 03:02:49,980 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.526e+02 1.896e+02 2.459e+02 4.889e+02, threshold=3.791e+02, percent-clipped=1.0 +2024-08-27 03:03:10,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=221792.0, ans=0.0 +2024-08-27 03:03:13,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=221792.0, ans=0.2 +2024-08-27 03:03:20,161 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.42 vs. limit=15.0 +2024-08-27 03:03:29,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=221898.66666666666, ans=0.125 +2024-08-27 03:03:33,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221898.66666666666, ans=0.1 +2024-08-27 03:03:42,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221952.0, ans=0.1 +2024-08-27 03:03:46,723 INFO [train.py:1114] (1/4) Epoch 17, batch 1800, loss[loss=0.1955, simple_loss=0.2737, pruned_loss=0.04183, ctc_loss=0.08431, over 19604.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2629, pruned_loss=0.04203, ctc_loss=0.0785, over 3851798.91 frames. ], batch size: 55, lr: 8.76e-03, grad_scale: 16.0 +2024-08-27 03:03:47,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=222005.33333333334, ans=0.1 +2024-08-27 03:03:48,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=222005.33333333334, ans=0.125 +2024-08-27 03:03:48,835 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.51 vs. limit=6.0 +2024-08-27 03:03:53,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222005.33333333334, ans=0.1 +2024-08-27 03:03:56,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=222058.66666666666, ans=0.125 +2024-08-27 03:03:56,852 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.65 vs. limit=15.0 +2024-08-27 03:04:10,896 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.55 vs. limit=22.5 +2024-08-27 03:04:30,761 INFO [train.py:1114] (1/4) Epoch 17, batch 1850, loss[loss=0.1973, simple_loss=0.2775, pruned_loss=0.04331, ctc_loss=0.07593, over 19592.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2627, pruned_loss=0.04175, ctc_loss=0.07781, over 3854949.06 frames. ], batch size: 57, lr: 8.75e-03, grad_scale: 16.0 +2024-08-27 03:04:32,493 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.249e+02 1.484e+02 1.846e+02 2.436e+02 4.218e+02, threshold=3.691e+02, percent-clipped=2.0 +2024-08-27 03:04:46,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=222325.33333333334, ans=0.125 +2024-08-27 03:04:50,586 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.56 vs. limit=22.5 +2024-08-27 03:05:13,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=222485.33333333334, ans=0.125 +2024-08-27 03:05:14,599 INFO [train.py:1114] (1/4) Epoch 17, batch 1900, loss[loss=0.2078, simple_loss=0.2871, pruned_loss=0.04752, ctc_loss=0.08366, over 19647.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2628, pruned_loss=0.04158, ctc_loss=0.07751, over 3860703.66 frames. ], batch size: 59, lr: 8.75e-03, grad_scale: 16.0 +2024-08-27 03:05:21,703 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.60 vs. limit=15.0 +2024-08-27 03:05:45,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=222698.66666666666, ans=0.125 +2024-08-27 03:05:57,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=222752.0, ans=0.0 +2024-08-27 03:06:00,580 INFO [train.py:1114] (1/4) Epoch 17, batch 1950, loss[loss=0.1742, simple_loss=0.2538, pruned_loss=0.0338, ctc_loss=0.06768, over 19595.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2637, pruned_loss=0.04178, ctc_loss=0.07793, over 3870458.50 frames. ], batch size: 52, lr: 8.74e-03, grad_scale: 16.0 +2024-08-27 03:06:02,414 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.462e+02 1.715e+02 2.122e+02 4.504e+02, threshold=3.430e+02, percent-clipped=1.0 +2024-08-27 03:06:11,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=222858.66666666666, ans=0.0 +2024-08-27 03:06:27,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=222965.33333333334, ans=0.2 +2024-08-27 03:06:27,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=222965.33333333334, ans=0.125 +2024-08-27 03:06:38,197 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.60 vs. limit=22.5 +2024-08-27 03:06:48,020 INFO [train.py:1114] (1/4) Epoch 17, batch 2000, loss[loss=0.159, simple_loss=0.2304, pruned_loss=0.03214, ctc_loss=0.05836, over 19625.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2647, pruned_loss=0.04246, ctc_loss=0.0792, over 3856646.93 frames. ], batch size: 45, lr: 8.74e-03, grad_scale: 32.0 +2024-08-27 03:06:49,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=223072.0, ans=0.0 +2024-08-27 03:06:50,248 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.65 vs. limit=15.0 +2024-08-27 03:09:30,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=223285.33333333334, ans=0.125 +2024-08-27 03:09:33,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=223285.33333333334, ans=0.035 +2024-08-27 03:09:41,585 INFO [train.py:1114] (1/4) Epoch 17, batch 2050, loss[loss=0.1648, simple_loss=0.2348, pruned_loss=0.03492, ctc_loss=0.06243, over 19729.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2634, pruned_loss=0.04212, ctc_loss=0.07865, over 3852531.93 frames. ], batch size: 47, lr: 8.73e-03, grad_scale: 32.0 +2024-08-27 03:09:43,284 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.433e+02 1.718e+02 2.194e+02 3.489e+02, threshold=3.436e+02, percent-clipped=1.0 +2024-08-27 03:09:52,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=223338.66666666666, ans=0.2 +2024-08-27 03:10:35,993 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-08-27 03:13:12,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=223552.0, ans=0.125 +2024-08-27 03:13:17,715 INFO [train.py:1114] (1/4) Epoch 17, batch 2100, loss[loss=0.1713, simple_loss=0.2498, pruned_loss=0.03353, ctc_loss=0.06419, over 19766.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2626, pruned_loss=0.04169, ctc_loss=0.0779, over 3859529.90 frames. ], batch size: 54, lr: 8.73e-03, grad_scale: 32.0 +2024-08-27 03:13:55,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=223658.66666666666, ans=0.125 +2024-08-27 03:23:47,215 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.61 vs. limit=10.0 +2024-08-27 03:24:00,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=223818.66666666666, ans=0.125 +2024-08-27 03:24:47,085 INFO [train.py:1114] (1/4) Epoch 17, batch 2150, loss[loss=0.1697, simple_loss=0.2489, pruned_loss=0.03314, ctc_loss=0.06055, over 19570.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2621, pruned_loss=0.04169, ctc_loss=0.07785, over 3870502.16 frames. ], batch size: 52, lr: 8.72e-03, grad_scale: 32.0 +2024-08-27 03:24:47,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223872.0, ans=0.1 +2024-08-27 03:24:49,700 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.181e+02 1.464e+02 1.691e+02 2.317e+02 5.931e+02, threshold=3.382e+02, percent-clipped=6.0 +2024-08-27 03:24:54,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=223872.0, ans=0.125 +2024-08-27 03:25:03,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=223872.0, ans=0.0 +2024-08-27 03:25:30,051 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.61 vs. limit=12.0 +2024-08-27 03:26:13,187 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.61 vs. limit=10.0 +2024-08-27 03:26:31,804 INFO [train.py:1114] (1/4) Epoch 17, batch 2200, loss[loss=0.1934, simple_loss=0.2725, pruned_loss=0.0416, ctc_loss=0.07767, over 19579.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.262, pruned_loss=0.04165, ctc_loss=0.07763, over 3867850.87 frames. ], batch size: 57, lr: 8.72e-03, grad_scale: 32.0 +2024-08-27 03:27:07,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=224298.66666666666, ans=0.2 +2024-08-27 03:27:07,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-08-27 03:27:09,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=224298.66666666666, ans=0.125 +2024-08-27 03:27:26,236 INFO [train.py:1114] (1/4) Epoch 17, batch 2250, loss[loss=0.1844, simple_loss=0.2647, pruned_loss=0.0377, ctc_loss=0.07199, over 19616.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2624, pruned_loss=0.04178, ctc_loss=0.07801, over 3867771.57 frames. ], batch size: 55, lr: 8.71e-03, grad_scale: 32.0 +2024-08-27 03:27:29,878 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.517e+02 1.774e+02 2.256e+02 3.791e+02, threshold=3.548e+02, percent-clipped=1.0 +2024-08-27 03:27:50,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=224458.66666666666, ans=0.2 +2024-08-27 03:27:52,900 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.88 vs. limit=15.0 +2024-08-27 03:29:04,920 INFO [train.py:1114] (1/4) Epoch 17, batch 2300, loss[loss=0.1631, simple_loss=0.2336, pruned_loss=0.03348, ctc_loss=0.06434, over 19506.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.262, pruned_loss=0.04194, ctc_loss=0.07841, over 3861446.76 frames. ], batch size: 49, lr: 8.71e-03, grad_scale: 16.0 +2024-08-27 03:29:10,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=224672.0, ans=0.2 +2024-08-27 03:29:16,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=224725.33333333334, ans=0.0 +2024-08-27 03:29:24,977 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.01 vs. limit=6.0 +2024-08-27 03:30:49,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=224778.66666666666, ans=0.125 +2024-08-27 03:32:21,079 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.97 vs. limit=15.0 +2024-08-27 03:36:20,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=224885.33333333334, ans=0.1 +2024-08-27 03:36:29,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=224885.33333333334, ans=0.1 +2024-08-27 03:36:49,581 INFO [train.py:1114] (1/4) Epoch 17, batch 2350, loss[loss=0.2005, simple_loss=0.2766, pruned_loss=0.04618, ctc_loss=0.08034, over 19670.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2621, pruned_loss=0.04206, ctc_loss=0.07835, over 3863977.51 frames. ], batch size: 63, lr: 8.70e-03, grad_scale: 16.0 +2024-08-27 03:37:01,829 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.439e+02 1.647e+02 2.102e+02 4.091e+02, threshold=3.295e+02, percent-clipped=1.0 +2024-08-27 03:37:22,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=224992.0, ans=0.125 +2024-08-27 03:37:30,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=224992.0, ans=0.125 +2024-08-27 03:37:37,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=224992.0, ans=0.0 +2024-08-27 03:37:40,711 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.04 vs. limit=15.0 +2024-08-27 03:37:53,817 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.68 vs. limit=6.0 +2024-08-27 03:38:20,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=225098.66666666666, ans=0.125 +2024-08-27 03:39:26,221 INFO [train.py:1114] (1/4) Epoch 17, batch 2400, loss[loss=0.2131, simple_loss=0.2857, pruned_loss=0.05097, ctc_loss=0.09628, over 19262.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2636, pruned_loss=0.04247, ctc_loss=0.07909, over 3858306.35 frames. ], batch size: 71, lr: 8.70e-03, grad_scale: 32.0 +2024-08-27 03:41:34,585 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.98 vs. limit=12.0 +2024-08-27 03:42:54,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=225365.33333333334, ans=0.125 +2024-08-27 03:43:47,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=225365.33333333334, ans=0.125 +2024-08-27 03:43:56,868 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.44 vs. limit=15.0 +2024-08-27 03:44:00,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=225418.66666666666, ans=0.125 +2024-08-27 03:44:22,632 INFO [train.py:1114] (1/4) Epoch 17, batch 2450, loss[loss=0.2586, simple_loss=0.3022, pruned_loss=0.07836, ctc_loss=0.1458, over 13116.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2674, pruned_loss=0.04508, ctc_loss=0.08415, over 3731084.46 frames. ], batch size: 140, lr: 8.69e-03, grad_scale: 32.0 +2024-08-27 03:44:30,564 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.519e+02 1.805e+02 2.064e+02 2.900e+02, threshold=3.609e+02, percent-clipped=0.0 +2024-08-27 03:44:39,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.95 vs. limit=22.5 +2024-08-27 03:45:47,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=225525.33333333334, ans=0.5 +2024-08-27 03:45:47,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225525.33333333334, ans=0.1 +2024-08-27 03:47:09,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=225578.66666666666, ans=0.04949747468305833 +2024-08-27 03:50:08,498 INFO [train.py:1114] (1/4) Epoch 18, batch 0, loss[loss=0.1598, simple_loss=0.2341, pruned_loss=0.03049, ctc_loss=0.06103, over 19410.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2341, pruned_loss=0.03049, ctc_loss=0.06103, over 19410.00 frames. ], batch size: 48, lr: 8.44e-03, grad_scale: 32.0 +2024-08-27 03:50:08,499 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-27 03:56:59,334 INFO [train.py:1146] (1/4) Epoch 18, validation: loss=0.1731, simple_loss=0.2653, pruned_loss=0.0303, ctc_loss=0.05087, over 944034.00 frames. +2024-08-27 03:56:59,336 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12945MB +2024-08-27 03:58:10,825 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.66 vs. limit=15.0 +2024-08-27 03:58:49,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=225786.66666666666, ans=0.0 +2024-08-27 03:59:36,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=225893.33333333334, ans=0.07 +2024-08-27 03:59:40,036 INFO [train.py:1114] (1/4) Epoch 18, batch 50, loss[loss=0.1642, simple_loss=0.2337, pruned_loss=0.03461, ctc_loss=0.06345, over 19735.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2632, pruned_loss=0.04249, ctc_loss=0.08033, over 843693.80 frames. ], batch size: 47, lr: 8.44e-03, grad_scale: 32.0 +2024-08-27 03:59:51,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=226000.0, ans=0.0 +2024-08-27 03:59:52,928 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.620e+02 1.870e+02 2.127e+02 3.474e+02, threshold=3.740e+02, percent-clipped=0.0 +2024-08-27 04:00:01,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226053.33333333334, ans=0.1 +2024-08-27 04:00:09,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.02 vs. limit=12.0 +2024-08-27 04:00:10,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=226106.66666666666, ans=0.125 +2024-08-27 04:00:23,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=226160.0, ans=0.0 +2024-08-27 04:00:34,086 INFO [train.py:1114] (1/4) Epoch 18, batch 100, loss[loss=0.1819, simple_loss=0.2559, pruned_loss=0.03934, ctc_loss=0.07288, over 19719.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2648, pruned_loss=0.04222, ctc_loss=0.07928, over 1498452.85 frames. ], batch size: 51, lr: 8.43e-03, grad_scale: 32.0 +2024-08-27 04:04:08,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=226266.66666666666, ans=0.125 +2024-08-27 04:05:20,886 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.53 vs. limit=22.5 +2024-08-27 04:05:27,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=226320.0, ans=0.1 +2024-08-27 04:05:34,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=226320.0, ans=0.125 +2024-08-27 04:05:38,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=226320.0, ans=0.025 +2024-08-27 04:05:56,309 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.08 vs. limit=15.0 +2024-08-27 04:05:57,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=226426.66666666666, ans=0.125 +2024-08-27 04:05:58,783 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:06:00,492 INFO [train.py:1114] (1/4) Epoch 18, batch 150, loss[loss=0.1786, simple_loss=0.2444, pruned_loss=0.04118, ctc_loss=0.07631, over 19694.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2624, pruned_loss=0.04137, ctc_loss=0.07804, over 2027899.17 frames. ], batch size: 47, lr: 8.43e-03, grad_scale: 32.0 +2024-08-27 04:06:14,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=226533.33333333334, ans=0.125 +2024-08-27 04:06:16,208 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.462e+02 1.764e+02 2.186e+02 3.977e+02, threshold=3.529e+02, percent-clipped=1.0 +2024-08-27 04:06:16,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=226533.33333333334, ans=0.125 +2024-08-27 04:06:18,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=226533.33333333334, ans=0.125 +2024-08-27 04:06:49,832 INFO [train.py:1114] (1/4) Epoch 18, batch 200, loss[loss=0.194, simple_loss=0.2699, pruned_loss=0.04249, ctc_loss=0.08252, over 18231.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2619, pruned_loss=0.0414, ctc_loss=0.0779, over 2435641.98 frames. ], batch size: 85, lr: 8.42e-03, grad_scale: 32.0 +2024-08-27 04:06:58,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=226800.0, ans=0.125 +2024-08-27 04:07:11,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=226853.33333333334, ans=0.05 +2024-08-27 04:07:13,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=226853.33333333334, ans=0.2 +2024-08-27 04:07:22,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=226906.66666666666, ans=0.025 +2024-08-27 04:07:22,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=226906.66666666666, ans=0.125 +2024-08-27 04:07:22,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=226906.66666666666, ans=0.0 +2024-08-27 04:07:35,896 INFO [train.py:1114] (1/4) Epoch 18, batch 250, loss[loss=0.2155, simple_loss=0.2881, pruned_loss=0.05219, ctc_loss=0.09606, over 19375.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2615, pruned_loss=0.04104, ctc_loss=0.07697, over 2757155.58 frames. ], batch size: 67, lr: 8.42e-03, grad_scale: 32.0 +2024-08-27 04:07:41,196 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.83 vs. limit=15.0 +2024-08-27 04:07:44,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=227013.33333333334, ans=0.125 +2024-08-27 04:07:46,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=227066.66666666666, ans=0.125 +2024-08-27 04:07:50,816 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.244e+02 1.521e+02 1.873e+02 2.606e+02 4.367e+02, threshold=3.746e+02, percent-clipped=8.0 +2024-08-27 04:08:03,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227120.0, ans=0.1 +2024-08-27 04:08:16,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff3.min_abs, batch_count=227173.33333333334, ans=0.2 +2024-08-27 04:08:17,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=227173.33333333334, ans=0.1 +2024-08-27 04:08:31,219 INFO [train.py:1114] (1/4) Epoch 18, batch 300, loss[loss=0.1896, simple_loss=0.2678, pruned_loss=0.03941, ctc_loss=0.08146, over 19505.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2607, pruned_loss=0.04062, ctc_loss=0.07598, over 3002465.67 frames. ], batch size: 61, lr: 8.41e-03, grad_scale: 32.0 +2024-08-27 04:08:44,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=227333.33333333334, ans=0.1 +2024-08-27 04:08:54,808 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:09:04,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227440.0, ans=0.1 +2024-08-27 04:09:06,140 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.18 vs. limit=15.0 +2024-08-27 04:09:17,362 INFO [train.py:1114] (1/4) Epoch 18, batch 350, loss[loss=0.1519, simple_loss=0.2285, pruned_loss=0.02798, ctc_loss=0.04824, over 19745.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2612, pruned_loss=0.04093, ctc_loss=0.07636, over 3192448.38 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 32.0 +2024-08-27 04:09:17,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227546.66666666666, ans=0.1 +2024-08-27 04:09:18,094 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.47 vs. limit=8.0 +2024-08-27 04:09:30,336 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.460e+02 1.643e+02 1.956e+02 3.165e+02, threshold=3.287e+02, percent-clipped=0.0 +2024-08-27 04:10:17,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=227706.66666666666, ans=0.0 +2024-08-27 04:10:33,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=227760.0, ans=0.125 +2024-08-27 04:10:35,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=227760.0, ans=0.0 +2024-08-27 04:10:40,379 INFO [train.py:1114] (1/4) Epoch 18, batch 400, loss[loss=0.1711, simple_loss=0.2574, pruned_loss=0.03035, ctc_loss=0.0601, over 19499.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.261, pruned_loss=0.04068, ctc_loss=0.07573, over 3342545.79 frames. ], batch size: 54, lr: 8.40e-03, grad_scale: 32.0 +2024-08-27 04:10:48,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=227813.33333333334, ans=0.025 +2024-08-27 04:12:28,881 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.08 vs. limit=15.0 +2024-08-27 04:12:32,842 INFO [train.py:1114] (1/4) Epoch 18, batch 450, loss[loss=0.2, simple_loss=0.2776, pruned_loss=0.0441, ctc_loss=0.08526, over 19601.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2612, pruned_loss=0.04081, ctc_loss=0.07615, over 3451509.51 frames. ], batch size: 55, lr: 8.40e-03, grad_scale: 32.0 +2024-08-27 04:12:34,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=228080.0, ans=0.1 +2024-08-27 04:12:59,754 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.395e+02 1.673e+02 2.305e+02 3.910e+02, threshold=3.347e+02, percent-clipped=3.0 +2024-08-27 04:13:04,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=228186.66666666666, ans=0.0 +2024-08-27 04:13:15,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=228240.0, ans=0.0 +2024-08-27 04:13:17,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=228240.0, ans=0.04949747468305833 +2024-08-27 04:13:18,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=228240.0, ans=0.125 +2024-08-27 04:13:32,855 INFO [train.py:1114] (1/4) Epoch 18, batch 500, loss[loss=0.1954, simple_loss=0.2743, pruned_loss=0.0424, ctc_loss=0.07934, over 19696.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2604, pruned_loss=0.04038, ctc_loss=0.07559, over 3546801.94 frames. ], batch size: 63, lr: 8.39e-03, grad_scale: 32.0 +2024-08-27 04:13:37,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228346.66666666666, ans=0.1 +2024-08-27 04:13:39,446 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.92 vs. limit=15.0 +2024-08-27 04:13:43,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=228400.0, ans=0.0 +2024-08-27 04:13:45,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=228400.0, ans=0.0 +2024-08-27 04:13:46,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=228400.0, ans=0.125 +2024-08-27 04:13:51,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=228400.0, ans=0.125 +2024-08-27 04:14:18,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=228560.0, ans=0.2 +2024-08-27 04:14:20,883 INFO [train.py:1114] (1/4) Epoch 18, batch 550, loss[loss=0.2117, simple_loss=0.2833, pruned_loss=0.05063, ctc_loss=0.09699, over 19259.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2604, pruned_loss=0.04042, ctc_loss=0.07544, over 3609220.25 frames. ], batch size: 71, lr: 8.39e-03, grad_scale: 32.0 +2024-08-27 04:14:28,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=228613.33333333334, ans=0.025 +2024-08-27 04:14:34,525 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.436e+02 1.681e+02 2.031e+02 3.505e+02, threshold=3.363e+02, percent-clipped=1.0 +2024-08-27 04:14:38,025 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.07 vs. limit=22.5 +2024-08-27 04:14:38,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=228666.66666666666, ans=0.125 +2024-08-27 04:14:43,520 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.93 vs. limit=15.0 +2024-08-27 04:14:47,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=228720.0, ans=0.0 +2024-08-27 04:15:07,918 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-08-27 04:15:08,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=228826.66666666666, ans=0.2 +2024-08-27 04:15:14,825 INFO [train.py:1114] (1/4) Epoch 18, batch 600, loss[loss=0.2116, simple_loss=0.2891, pruned_loss=0.04919, ctc_loss=0.08946, over 19439.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2609, pruned_loss=0.04061, ctc_loss=0.07597, over 3667203.93 frames. ], batch size: 67, lr: 8.38e-03, grad_scale: 32.0 +2024-08-27 04:15:20,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.57 vs. limit=15.0 +2024-08-27 04:15:20,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=228880.0, ans=0.125 +2024-08-27 04:15:46,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=228986.66666666666, ans=0.125 +2024-08-27 04:16:57,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=229093.33333333334, ans=0.5 +2024-08-27 04:17:06,962 INFO [train.py:1114] (1/4) Epoch 18, batch 650, loss[loss=0.1856, simple_loss=0.2645, pruned_loss=0.03798, ctc_loss=0.077, over 19779.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2601, pruned_loss=0.04027, ctc_loss=0.07543, over 3716775.23 frames. ], batch size: 54, lr: 8.38e-03, grad_scale: 32.0 +2024-08-27 04:17:20,099 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.567e+02 1.955e+02 2.726e+02 4.189e+02, threshold=3.909e+02, percent-clipped=6.0 +2024-08-27 04:17:24,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=229200.0, ans=0.125 +2024-08-27 04:17:24,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229200.0, ans=0.1 +2024-08-27 04:17:43,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=229306.66666666666, ans=0.125 +2024-08-27 04:17:54,858 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.33 vs. limit=15.0 +2024-08-27 04:18:45,811 INFO [train.py:1114] (1/4) Epoch 18, batch 700, loss[loss=0.1805, simple_loss=0.2536, pruned_loss=0.03803, ctc_loss=0.07829, over 19707.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2607, pruned_loss=0.04048, ctc_loss=0.07587, over 3748465.26 frames. ], batch size: 51, lr: 8.37e-03, grad_scale: 32.0 +2024-08-27 04:18:54,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=229413.33333333334, ans=0.025 +2024-08-27 04:18:59,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=229466.66666666666, ans=10.0 +2024-08-27 04:19:10,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=229520.0, ans=0.035 +2024-08-27 04:19:15,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=229520.0, ans=0.125 +2024-08-27 04:19:19,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=229573.33333333334, ans=0.125 +2024-08-27 04:19:35,533 INFO [train.py:1114] (1/4) Epoch 18, batch 750, loss[loss=0.1908, simple_loss=0.2711, pruned_loss=0.04018, ctc_loss=0.0756, over 19498.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2604, pruned_loss=0.0405, ctc_loss=0.07585, over 3775010.57 frames. ], batch size: 54, lr: 8.37e-03, grad_scale: 16.0 +2024-08-27 04:19:38,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=229680.0, ans=0.125 +2024-08-27 04:19:45,091 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.99 vs. limit=6.0 +2024-08-27 04:19:49,144 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.146e+02 1.399e+02 1.632e+02 2.193e+02 3.721e+02, threshold=3.263e+02, percent-clipped=0.0 +2024-08-27 04:19:58,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=229786.66666666666, ans=0.0 +2024-08-27 04:20:00,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.28 vs. limit=15.0 +2024-08-27 04:20:10,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=229840.0, ans=0.125 +2024-08-27 04:20:16,346 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.26 vs. limit=15.0 +2024-08-27 04:20:24,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=229893.33333333334, ans=0.125 +2024-08-27 04:20:27,826 INFO [train.py:1114] (1/4) Epoch 18, batch 800, loss[loss=0.181, simple_loss=0.2536, pruned_loss=0.03955, ctc_loss=0.07314, over 19409.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2605, pruned_loss=0.04061, ctc_loss=0.07588, over 3795416.46 frames. ], batch size: 48, lr: 8.37e-03, grad_scale: 32.0 +2024-08-27 04:20:56,574 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.97 vs. limit=10.0 +2024-08-27 04:21:05,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=230053.33333333334, ans=0.125 +2024-08-27 04:21:06,885 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.30 vs. limit=15.0 +2024-08-27 04:21:31,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=230160.0, ans=0.125 +2024-08-27 04:21:31,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=230160.0, ans=0.0 +2024-08-27 04:21:33,171 INFO [train.py:1114] (1/4) Epoch 18, batch 850, loss[loss=0.1857, simple_loss=0.2663, pruned_loss=0.03807, ctc_loss=0.07233, over 19648.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2606, pruned_loss=0.04058, ctc_loss=0.07596, over 3814604.01 frames. ], batch size: 59, lr: 8.36e-03, grad_scale: 32.0 +2024-08-27 04:21:34,431 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.24 vs. limit=22.5 +2024-08-27 04:21:42,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=230266.66666666666, ans=0.07 +2024-08-27 04:21:57,960 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.249e+02 1.452e+02 1.736e+02 2.395e+02 3.551e+02, threshold=3.472e+02, percent-clipped=2.0 +2024-08-27 04:22:05,146 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.29 vs. limit=15.0 +2024-08-27 04:22:18,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=230373.33333333334, ans=0.1 +2024-08-27 04:22:28,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=230426.66666666666, ans=0.125 +2024-08-27 04:22:30,022 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.04 vs. limit=15.0 +2024-08-27 04:22:31,228 INFO [train.py:1114] (1/4) Epoch 18, batch 900, loss[loss=0.1752, simple_loss=0.2427, pruned_loss=0.03854, ctc_loss=0.07642, over 19809.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2611, pruned_loss=0.04112, ctc_loss=0.07684, over 3819625.95 frames. ], batch size: 49, lr: 8.36e-03, grad_scale: 32.0 +2024-08-27 04:22:38,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=230480.0, ans=0.125 +2024-08-27 04:23:01,731 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.45 vs. limit=15.0 +2024-08-27 04:23:08,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=230693.33333333334, ans=0.0 +2024-08-27 04:23:11,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=230693.33333333334, ans=0.125 +2024-08-27 04:23:13,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=230693.33333333334, ans=0.2 +2024-08-27 04:23:17,852 INFO [train.py:1114] (1/4) Epoch 18, batch 950, loss[loss=0.1746, simple_loss=0.2474, pruned_loss=0.03717, ctc_loss=0.06862, over 19518.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2609, pruned_loss=0.04102, ctc_loss=0.07669, over 3819326.72 frames. ], batch size: 49, lr: 8.35e-03, grad_scale: 32.0 +2024-08-27 04:23:18,317 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.83 vs. limit=15.0 +2024-08-27 04:23:21,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=230746.66666666666, ans=0.0 +2024-08-27 04:23:22,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=230746.66666666666, ans=0.0 +2024-08-27 04:23:36,279 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.122e+02 1.393e+02 1.674e+02 2.227e+02 4.492e+02, threshold=3.349e+02, percent-clipped=5.0 +2024-08-27 04:24:03,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=230906.66666666666, ans=10.0 +2024-08-27 04:24:04,553 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.84 vs. limit=15.0 +2024-08-27 04:24:09,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=230960.0, ans=0.5 +2024-08-27 04:24:09,515 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.67 vs. limit=22.5 +2024-08-27 04:24:15,458 INFO [train.py:1114] (1/4) Epoch 18, batch 1000, loss[loss=0.1642, simple_loss=0.2425, pruned_loss=0.03138, ctc_loss=0.05785, over 19849.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2613, pruned_loss=0.04102, ctc_loss=0.07668, over 3815683.01 frames. ], batch size: 52, lr: 8.35e-03, grad_scale: 32.0 +2024-08-27 04:24:27,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231066.66666666666, ans=0.1 +2024-08-27 04:24:30,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231066.66666666666, ans=0.1 +2024-08-27 04:24:33,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=231120.0, ans=0.0 +2024-08-27 04:24:39,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=231120.0, ans=0.125 +2024-08-27 04:24:51,732 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.55 vs. limit=12.0 +2024-08-27 04:24:57,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=231226.66666666666, ans=0.1 +2024-08-27 04:25:01,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=231226.66666666666, ans=0.125 +2024-08-27 04:25:11,444 INFO [train.py:1114] (1/4) Epoch 18, batch 1050, loss[loss=0.1873, simple_loss=0.2631, pruned_loss=0.04084, ctc_loss=0.07462, over 19852.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2606, pruned_loss=0.04085, ctc_loss=0.07631, over 3823270.16 frames. ], batch size: 57, lr: 8.34e-03, grad_scale: 32.0 +2024-08-27 04:25:11,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=231280.0, ans=0.2 +2024-08-27 04:25:18,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=231280.0, ans=0.125 +2024-08-27 04:25:25,222 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.375e+02 1.549e+02 1.865e+02 3.480e+02, threshold=3.097e+02, percent-clipped=1.0 +2024-08-27 04:25:35,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=231386.66666666666, ans=0.025 +2024-08-27 04:25:37,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=231386.66666666666, ans=0.0 +2024-08-27 04:25:57,488 INFO [train.py:1114] (1/4) Epoch 18, batch 1100, loss[loss=0.1846, simple_loss=0.2592, pruned_loss=0.03952, ctc_loss=0.07737, over 19594.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.26, pruned_loss=0.04049, ctc_loss=0.07566, over 3830526.83 frames. ], batch size: 52, lr: 8.34e-03, grad_scale: 16.0 +2024-08-27 04:26:01,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=231546.66666666666, ans=0.0 +2024-08-27 04:26:03,761 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.29 vs. limit=6.0 +2024-08-27 04:26:16,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=231600.0, ans=0.125 +2024-08-27 04:26:20,956 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.51 vs. limit=10.0 +2024-08-27 04:26:23,793 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:26:48,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=231706.66666666666, ans=0.125 +2024-08-27 04:27:14,051 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.40 vs. limit=15.0 +2024-08-27 04:27:28,523 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.83 vs. limit=12.0 +2024-08-27 04:27:31,647 INFO [train.py:1114] (1/4) Epoch 18, batch 1150, loss[loss=0.1796, simple_loss=0.2578, pruned_loss=0.03755, ctc_loss=0.06559, over 19588.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2602, pruned_loss=0.04064, ctc_loss=0.07588, over 3830167.85 frames. ], batch size: 52, lr: 8.33e-03, grad_scale: 16.0 +2024-08-27 04:27:44,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=231866.66666666666, ans=0.2 +2024-08-27 04:27:45,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=231866.66666666666, ans=0.0 +2024-08-27 04:27:50,646 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.153e+02 1.426e+02 1.640e+02 2.078e+02 3.185e+02, threshold=3.280e+02, percent-clipped=3.0 +2024-08-27 04:28:05,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=231920.0, ans=0.2 +2024-08-27 04:28:27,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=232026.66666666666, ans=0.025 +2024-08-27 04:28:27,957 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:28:32,362 INFO [train.py:1114] (1/4) Epoch 18, batch 1200, loss[loss=0.1881, simple_loss=0.2722, pruned_loss=0.03766, ctc_loss=0.07181, over 19831.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2618, pruned_loss=0.04091, ctc_loss=0.07657, over 3825669.59 frames. ], batch size: 57, lr: 8.33e-03, grad_scale: 32.0 +2024-08-27 04:28:37,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=232080.0, ans=0.0 +2024-08-27 04:28:38,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232080.0, ans=0.1 +2024-08-27 04:28:45,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=232133.33333333334, ans=0.125 +2024-08-27 04:29:04,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232240.0, ans=0.1 +2024-08-27 04:29:07,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=232240.0, ans=0.0 +2024-08-27 04:29:07,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=232240.0, ans=0.125 +2024-08-27 04:29:08,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=232240.0, ans=0.025 +2024-08-27 04:29:18,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=232293.33333333334, ans=0.0 +2024-08-27 04:29:19,801 INFO [train.py:1114] (1/4) Epoch 18, batch 1250, loss[loss=0.2066, simple_loss=0.2834, pruned_loss=0.04705, ctc_loss=0.08935, over 19525.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.262, pruned_loss=0.04101, ctc_loss=0.07675, over 3842781.75 frames. ], batch size: 61, lr: 8.32e-03, grad_scale: 32.0 +2024-08-27 04:29:27,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=232346.66666666666, ans=0.1 +2024-08-27 04:29:33,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232400.0, ans=0.1 +2024-08-27 04:29:34,586 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.452e+02 1.815e+02 2.295e+02 4.200e+02, threshold=3.630e+02, percent-clipped=5.0 +2024-08-27 04:29:39,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=232453.33333333334, ans=0.125 +2024-08-27 04:29:51,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=232506.66666666666, ans=0.125 +2024-08-27 04:30:22,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=232506.66666666666, ans=0.05 +2024-08-27 04:30:43,404 INFO [train.py:1114] (1/4) Epoch 18, batch 1300, loss[loss=0.2056, simple_loss=0.28, pruned_loss=0.04803, ctc_loss=0.08797, over 18867.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2609, pruned_loss=0.0405, ctc_loss=0.07588, over 3846959.32 frames. ], batch size: 76, lr: 8.32e-03, grad_scale: 16.0 +2024-08-27 04:31:02,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=232666.66666666666, ans=0.2 +2024-08-27 04:31:09,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=232720.0, ans=0.0 +2024-08-27 04:31:13,231 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.31 vs. limit=15.0 +2024-08-27 04:31:18,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=232773.33333333334, ans=0.1 +2024-08-27 04:31:20,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=232773.33333333334, ans=0.0 +2024-08-27 04:31:26,128 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.75 vs. limit=15.0 +2024-08-27 04:31:29,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=232826.66666666666, ans=0.125 +2024-08-27 04:31:33,098 INFO [train.py:1114] (1/4) Epoch 18, batch 1350, loss[loss=0.1838, simple_loss=0.2614, pruned_loss=0.03857, ctc_loss=0.07276, over 19771.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2608, pruned_loss=0.0405, ctc_loss=0.07566, over 3856082.84 frames. ], batch size: 54, lr: 8.31e-03, grad_scale: 16.0 +2024-08-27 04:31:48,899 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.387e+02 1.655e+02 2.106e+02 4.022e+02, threshold=3.310e+02, percent-clipped=4.0 +2024-08-27 04:32:17,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=233093.33333333334, ans=0.125 +2024-08-27 04:32:19,576 INFO [train.py:1114] (1/4) Epoch 18, batch 1400, loss[loss=0.1715, simple_loss=0.2319, pruned_loss=0.04048, ctc_loss=0.07535, over 19659.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2607, pruned_loss=0.04047, ctc_loss=0.07567, over 3863302.16 frames. ], batch size: 46, lr: 8.31e-03, grad_scale: 16.0 +2024-08-27 04:32:19,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=233146.66666666666, ans=0.025 +2024-08-27 04:32:58,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=233253.33333333334, ans=0.1 +2024-08-27 04:33:20,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=233306.66666666666, ans=0.125 +2024-08-27 04:33:33,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=233360.0, ans=0.125 +2024-08-27 04:33:40,130 INFO [train.py:1114] (1/4) Epoch 18, batch 1450, loss[loss=0.1953, simple_loss=0.2737, pruned_loss=0.04223, ctc_loss=0.08092, over 19656.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2611, pruned_loss=0.04042, ctc_loss=0.07562, over 3861502.71 frames. ], batch size: 63, lr: 8.30e-03, grad_scale: 16.0 +2024-08-27 04:33:56,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=233413.33333333334, ans=0.0 +2024-08-27 04:34:13,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=233466.66666666666, ans=0.125 +2024-08-27 04:34:21,008 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.457e+02 1.713e+02 1.981e+02 3.848e+02, threshold=3.426e+02, percent-clipped=1.0 +2024-08-27 04:34:41,029 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.96 vs. limit=22.5 +2024-08-27 04:34:45,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=233573.33333333334, ans=0.125 +2024-08-27 04:35:28,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233626.66666666666, ans=0.1 +2024-08-27 04:36:44,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=233626.66666666666, ans=0.125 +2024-08-27 04:36:59,905 INFO [train.py:1114] (1/4) Epoch 18, batch 1500, loss[loss=0.2036, simple_loss=0.2781, pruned_loss=0.04691, ctc_loss=0.08834, over 19574.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2614, pruned_loss=0.04041, ctc_loss=0.07565, over 3860727.16 frames. ], batch size: 57, lr: 8.30e-03, grad_scale: 16.0 +2024-08-27 04:37:05,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=233680.0, ans=0.125 +2024-08-27 04:37:38,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.74 vs. limit=15.0 +2024-08-27 04:37:45,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=233733.33333333334, ans=0.0 +2024-08-27 04:38:23,578 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.29 vs. limit=15.0 +2024-08-27 04:38:29,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233840.0, ans=0.1 +2024-08-27 04:38:29,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=233840.0, ans=0.1 +2024-08-27 04:38:58,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=233893.33333333334, ans=0.1 +2024-08-27 04:39:00,338 INFO [train.py:1114] (1/4) Epoch 18, batch 1550, loss[loss=0.2094, simple_loss=0.2786, pruned_loss=0.05201, ctc_loss=0.09038, over 19621.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2614, pruned_loss=0.04077, ctc_loss=0.07619, over 3845155.05 frames. ], batch size: 60, lr: 8.30e-03, grad_scale: 16.0 +2024-08-27 04:39:35,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=234000.0, ans=0.025 +2024-08-27 04:39:36,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=234000.0, ans=0.125 +2024-08-27 04:39:36,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=234000.0, ans=0.125 +2024-08-27 04:39:39,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=234000.0, ans=0.125 +2024-08-27 04:39:39,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=234000.0, ans=0.125 +2024-08-27 04:39:49,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=234000.0, ans=0.125 +2024-08-27 04:39:51,778 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.454e+02 1.713e+02 2.109e+02 3.815e+02, threshold=3.426e+02, percent-clipped=1.0 +2024-08-27 04:39:58,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=234053.33333333334, ans=0.125 +2024-08-27 04:40:27,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=234160.0, ans=0.125 +2024-08-27 04:40:35,439 INFO [train.py:1114] (1/4) Epoch 18, batch 1600, loss[loss=0.1922, simple_loss=0.2717, pruned_loss=0.04127, ctc_loss=0.07536, over 19841.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2611, pruned_loss=0.04086, ctc_loss=0.07632, over 3834361.49 frames. ], batch size: 57, lr: 8.29e-03, grad_scale: 32.0 +2024-08-27 04:40:59,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234320.0, ans=0.1 +2024-08-27 04:41:02,785 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.59 vs. limit=22.5 +2024-08-27 04:41:21,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234373.33333333334, ans=0.1 +2024-08-27 04:41:23,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=234373.33333333334, ans=0.025 +2024-08-27 04:41:44,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=234426.66666666666, ans=0.04949747468305833 +2024-08-27 04:41:53,483 INFO [train.py:1114] (1/4) Epoch 18, batch 1650, loss[loss=0.1874, simple_loss=0.2671, pruned_loss=0.03934, ctc_loss=0.07225, over 19662.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2608, pruned_loss=0.04099, ctc_loss=0.07638, over 3831307.67 frames. ], batch size: 59, lr: 8.29e-03, grad_scale: 32.0 +2024-08-27 04:41:59,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=234480.0, ans=0.07 +2024-08-27 04:42:16,284 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.559e+02 1.894e+02 2.296e+02 3.896e+02, threshold=3.788e+02, percent-clipped=3.0 +2024-08-27 04:42:20,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.11 vs. limit=22.5 +2024-08-27 04:42:47,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=234640.0, ans=0.125 +2024-08-27 04:42:52,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=234693.33333333334, ans=0.125 +2024-08-27 04:43:00,751 INFO [train.py:1114] (1/4) Epoch 18, batch 1700, loss[loss=0.1797, simple_loss=0.2412, pruned_loss=0.04357, ctc_loss=0.07747, over 19665.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2608, pruned_loss=0.04073, ctc_loss=0.07591, over 3846215.94 frames. ], batch size: 46, lr: 8.28e-03, grad_scale: 32.0 +2024-08-27 04:43:14,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=234800.0, ans=0.125 +2024-08-27 04:43:14,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=234800.0, ans=0.0 +2024-08-27 04:43:15,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.35 vs. limit=10.0 +2024-08-27 04:43:21,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=234853.33333333334, ans=0.125 +2024-08-27 04:43:24,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=234853.33333333334, ans=0.04949747468305833 +2024-08-27 04:43:26,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=234853.33333333334, ans=0.125 +2024-08-27 04:43:33,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=234906.66666666666, ans=0.0 +2024-08-27 04:43:59,954 INFO [train.py:1114] (1/4) Epoch 18, batch 1750, loss[loss=0.1649, simple_loss=0.2327, pruned_loss=0.03542, ctc_loss=0.06589, over 19628.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2605, pruned_loss=0.04058, ctc_loss=0.0757, over 3851893.90 frames. ], batch size: 45, lr: 8.28e-03, grad_scale: 32.0 +2024-08-27 04:44:02,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=235013.33333333334, ans=0.0 +2024-08-27 04:44:10,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=235066.66666666666, ans=0.025 +2024-08-27 04:44:16,982 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.479e+02 1.670e+02 2.161e+02 3.908e+02, threshold=3.340e+02, percent-clipped=1.0 +2024-08-27 04:44:18,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=235120.0, ans=0.1 +2024-08-27 04:44:31,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=235173.33333333334, ans=0.0 +2024-08-27 04:44:50,512 INFO [train.py:1114] (1/4) Epoch 18, batch 1800, loss[loss=0.1868, simple_loss=0.2673, pruned_loss=0.0379, ctc_loss=0.07633, over 19601.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2607, pruned_loss=0.04043, ctc_loss=0.07533, over 3852585.51 frames. ], batch size: 55, lr: 8.27e-03, grad_scale: 16.0 +2024-08-27 04:44:55,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=235280.0, ans=0.025 +2024-08-27 04:44:57,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=235280.0, ans=0.025 +2024-08-27 04:44:58,883 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.90 vs. limit=22.5 +2024-08-27 04:45:01,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235333.33333333334, ans=0.1 +2024-08-27 04:45:02,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=235333.33333333334, ans=0.2 +2024-08-27 04:45:09,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=235386.66666666666, ans=0.0 +2024-08-27 04:45:25,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=235386.66666666666, ans=0.1 +2024-08-27 04:45:48,225 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.76 vs. limit=15.0 +2024-08-27 04:45:48,631 INFO [train.py:1114] (1/4) Epoch 18, batch 1850, loss[loss=0.1801, simple_loss=0.2627, pruned_loss=0.03511, ctc_loss=0.06815, over 19574.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2609, pruned_loss=0.04043, ctc_loss=0.07535, over 3855922.66 frames. ], batch size: 57, lr: 8.27e-03, grad_scale: 8.0 +2024-08-27 04:45:48,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=235546.66666666666, ans=0.125 +2024-08-27 04:45:55,114 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.18 vs. limit=15.0 +2024-08-27 04:46:31,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=235600.0, ans=0.1 +2024-08-27 04:46:38,524 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.122e+02 1.500e+02 1.800e+02 2.247e+02 4.177e+02, threshold=3.601e+02, percent-clipped=3.0 +2024-08-27 04:46:47,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=235653.33333333334, ans=0.125 +2024-08-27 04:47:01,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=235760.0, ans=0.0 +2024-08-27 04:47:03,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=235760.0, ans=0.07 +2024-08-27 04:47:05,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=235813.33333333334, ans=0.035 +2024-08-27 04:47:06,322 INFO [train.py:1114] (1/4) Epoch 18, batch 1900, loss[loss=0.1915, simple_loss=0.2715, pruned_loss=0.03961, ctc_loss=0.08052, over 19667.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2613, pruned_loss=0.04058, ctc_loss=0.07567, over 3860504.63 frames. ], batch size: 59, lr: 8.26e-03, grad_scale: 8.0 +2024-08-27 04:47:09,262 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.39 vs. limit=15.0 +2024-08-27 04:47:16,298 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.13 vs. limit=22.5 +2024-08-27 04:47:42,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=235973.33333333334, ans=0.125 +2024-08-27 04:47:42,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235973.33333333334, ans=0.1 +2024-08-27 04:47:53,603 INFO [train.py:1114] (1/4) Epoch 18, batch 1950, loss[loss=0.1778, simple_loss=0.2612, pruned_loss=0.03481, ctc_loss=0.0621, over 19581.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2624, pruned_loss=0.04086, ctc_loss=0.07612, over 3869594.56 frames. ], batch size: 52, lr: 8.26e-03, grad_scale: 8.0 +2024-08-27 04:48:12,685 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.245e+02 1.481e+02 1.697e+02 2.159e+02 5.555e+02, threshold=3.394e+02, percent-clipped=1.0 +2024-08-27 04:48:15,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=236186.66666666666, ans=0.0 +2024-08-27 04:48:27,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=236240.0, ans=0.125 +2024-08-27 04:48:50,298 INFO [train.py:1114] (1/4) Epoch 18, batch 2000, loss[loss=0.1688, simple_loss=0.2354, pruned_loss=0.03744, ctc_loss=0.06822, over 19639.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2631, pruned_loss=0.04116, ctc_loss=0.07679, over 3853760.36 frames. ], batch size: 45, lr: 8.25e-03, grad_scale: 8.0 +2024-08-27 04:48:51,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=236346.66666666666, ans=0.125 +2024-08-27 04:49:10,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=236400.0, ans=0.125 +2024-08-27 04:49:53,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=236453.33333333334, ans=0.025 +2024-08-27 04:50:00,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236506.66666666666, ans=0.1 +2024-08-27 04:50:16,421 INFO [train.py:1114] (1/4) Epoch 18, batch 2050, loss[loss=0.1598, simple_loss=0.2318, pruned_loss=0.03164, ctc_loss=0.06135, over 19710.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2619, pruned_loss=0.04096, ctc_loss=0.07642, over 3849371.92 frames. ], batch size: 47, lr: 8.25e-03, grad_scale: 8.0 +2024-08-27 04:50:47,343 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.468e+02 1.842e+02 2.423e+02 4.039e+02, threshold=3.684e+02, percent-clipped=4.0 +2024-08-27 04:51:00,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=236773.33333333334, ans=0.0 +2024-08-27 04:51:03,450 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.49 vs. limit=15.0 +2024-08-27 04:51:09,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=236826.66666666666, ans=0.0 +2024-08-27 04:51:11,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=236826.66666666666, ans=0.125 +2024-08-27 04:51:12,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=236826.66666666666, ans=0.0 +2024-08-27 04:51:12,216 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.29 vs. limit=15.0 +2024-08-27 04:51:12,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=236880.0, ans=0.125 +2024-08-27 04:51:13,547 INFO [train.py:1114] (1/4) Epoch 18, batch 2100, loss[loss=0.192, simple_loss=0.2669, pruned_loss=0.04314, ctc_loss=0.07705, over 19787.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2608, pruned_loss=0.04026, ctc_loss=0.07524, over 3857332.35 frames. ], batch size: 54, lr: 8.25e-03, grad_scale: 8.0 +2024-08-27 04:51:30,272 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.93 vs. limit=15.0 +2024-08-27 04:51:31,587 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:51:40,598 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.05 vs. limit=15.0 +2024-08-27 04:51:50,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=236986.66666666666, ans=0.125 +2024-08-27 04:51:51,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=236986.66666666666, ans=0.0 +2024-08-27 04:51:58,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=237040.0, ans=0.125 +2024-08-27 04:51:59,171 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.29 vs. limit=15.0 +2024-08-27 04:52:02,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=237040.0, ans=0.0 +2024-08-27 04:52:07,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=237093.33333333334, ans=0.2 +2024-08-27 04:52:09,085 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:52:11,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=237093.33333333334, ans=0.125 +2024-08-27 04:52:13,291 INFO [train.py:1114] (1/4) Epoch 18, batch 2150, loss[loss=0.1791, simple_loss=0.2578, pruned_loss=0.03655, ctc_loss=0.06826, over 19581.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2601, pruned_loss=0.04014, ctc_loss=0.07497, over 3867655.65 frames. ], batch size: 52, lr: 8.24e-03, grad_scale: 8.0 +2024-08-27 04:52:14,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=237146.66666666666, ans=0.125 +2024-08-27 04:52:20,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=237146.66666666666, ans=0.07 +2024-08-27 04:52:23,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=237200.0, ans=0.0 +2024-08-27 04:52:31,104 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.418e+02 1.667e+02 2.145e+02 4.483e+02, threshold=3.333e+02, percent-clipped=3.0 +2024-08-27 04:52:31,728 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.30 vs. limit=22.5 +2024-08-27 04:52:34,441 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.75 vs. limit=15.0 +2024-08-27 04:52:41,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=237306.66666666666, ans=0.125 +2024-08-27 04:52:42,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=237306.66666666666, ans=0.125 +2024-08-27 04:52:47,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=237360.0, ans=0.125 +2024-08-27 04:52:57,217 INFO [train.py:1114] (1/4) Epoch 18, batch 2200, loss[loss=0.1938, simple_loss=0.2727, pruned_loss=0.04177, ctc_loss=0.07861, over 19570.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2601, pruned_loss=0.04006, ctc_loss=0.07485, over 3867394.06 frames. ], batch size: 57, lr: 8.24e-03, grad_scale: 8.0 +2024-08-27 04:53:12,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=237466.66666666666, ans=0.0 +2024-08-27 04:53:16,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=237520.0, ans=0.125 +2024-08-27 04:53:28,697 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.46 vs. limit=10.0 +2024-08-27 04:53:44,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=237680.0, ans=0.2 +2024-08-27 04:53:45,567 INFO [train.py:1114] (1/4) Epoch 18, batch 2250, loss[loss=0.1989, simple_loss=0.2707, pruned_loss=0.04627, ctc_loss=0.08627, over 19609.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2605, pruned_loss=0.04024, ctc_loss=0.07513, over 3866849.03 frames. ], batch size: 55, lr: 8.23e-03, grad_scale: 8.0 +2024-08-27 04:57:16,413 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.60 vs. limit=15.0 +2024-08-27 04:58:07,208 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.185e+02 1.445e+02 1.673e+02 2.181e+02 3.635e+02, threshold=3.347e+02, percent-clipped=1.0 +2024-08-27 05:00:01,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=237893.33333333334, ans=0.1 +2024-08-27 05:00:07,279 INFO [train.py:1114] (1/4) Epoch 18, batch 2300, loss[loss=0.1803, simple_loss=0.2571, pruned_loss=0.03774, ctc_loss=0.06986, over 19507.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2597, pruned_loss=0.0402, ctc_loss=0.07505, over 3861725.30 frames. ], batch size: 49, lr: 8.23e-03, grad_scale: 8.0 +2024-08-27 05:00:10,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=237946.66666666666, ans=0.025 +2024-08-27 05:05:01,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=238053.33333333334, ans=0.0 +2024-08-27 05:06:05,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=238160.0, ans=0.07 +2024-08-27 05:06:14,069 INFO [train.py:1114] (1/4) Epoch 18, batch 2350, loss[loss=0.2003, simple_loss=0.2779, pruned_loss=0.0447, ctc_loss=0.08317, over 19669.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2599, pruned_loss=0.04034, ctc_loss=0.07529, over 3864437.57 frames. ], batch size: 63, lr: 8.22e-03, grad_scale: 8.0 +2024-08-27 05:06:15,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=238213.33333333334, ans=10.0 +2024-08-27 05:08:17,559 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.50 vs. limit=22.5 +2024-08-27 05:09:45,142 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.379e+02 1.605e+02 2.102e+02 3.614e+02, threshold=3.209e+02, percent-clipped=2.0 +2024-08-27 05:10:04,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=238320.0, ans=0.07 +2024-08-27 05:10:23,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=238373.33333333334, ans=0.125 +2024-08-27 05:11:08,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=238426.66666666666, ans=0.95 +2024-08-27 05:11:24,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=238426.66666666666, ans=0.0 +2024-08-27 05:11:54,342 INFO [train.py:1114] (1/4) Epoch 18, batch 2400, loss[loss=0.1898, simple_loss=0.2641, pruned_loss=0.04213, ctc_loss=0.07792, over 19258.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2618, pruned_loss=0.04098, ctc_loss=0.07654, over 3859107.89 frames. ], batch size: 71, lr: 8.22e-03, grad_scale: 16.0 +2024-08-27 05:13:07,727 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.13 vs. limit=22.5 +2024-08-27 05:13:24,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=238586.66666666666, ans=0.0 +2024-08-27 05:13:43,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=238586.66666666666, ans=0.125 +2024-08-27 05:14:36,116 INFO [train.py:1114] (1/4) Epoch 18, batch 2450, loss[loss=0.2318, simple_loss=0.2845, pruned_loss=0.06462, ctc_loss=0.1248, over 12966.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2657, pruned_loss=0.04347, ctc_loss=0.08155, over 3729718.85 frames. ], batch size: 141, lr: 8.21e-03, grad_scale: 16.0 +2024-08-27 05:14:44,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=238800.0, ans=0.0 +2024-08-27 05:14:44,862 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.88 vs. limit=10.0 +2024-08-27 05:15:11,772 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.08 vs. limit=15.0 +2024-08-27 05:15:19,821 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.338e+02 1.631e+02 1.872e+02 2.220e+02 3.951e+02, threshold=3.743e+02, percent-clipped=5.0 +2024-08-27 05:15:29,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=238853.33333333334, ans=0.0 +2024-08-27 05:15:31,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=238853.33333333334, ans=0.125 +2024-08-27 05:15:32,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=238853.33333333334, ans=0.1 +2024-08-27 05:19:02,747 INFO [train.py:1114] (1/4) Epoch 19, batch 0, loss[loss=0.1767, simple_loss=0.248, pruned_loss=0.03835, ctc_loss=0.07156, over 19417.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.248, pruned_loss=0.03835, ctc_loss=0.07156, over 19417.00 frames. ], batch size: 48, lr: 7.99e-03, grad_scale: 32.0 +2024-08-27 05:19:02,748 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-27 05:19:44,311 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.9009, 3.3176, 3.8041, 3.7220], device='cuda:1') +2024-08-27 05:20:05,937 INFO [train.py:1146] (1/4) Epoch 19, validation: loss=0.1709, simple_loss=0.2636, pruned_loss=0.02933, ctc_loss=0.04896, over 944034.00 frames. +2024-08-27 05:20:05,939 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12945MB +2024-08-27 05:20:07,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=238954.66666666666, ans=10.0 +2024-08-27 05:20:09,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=238954.66666666666, ans=0.0 +2024-08-27 05:20:23,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239061.33333333334, ans=0.1 +2024-08-27 05:20:27,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.63 vs. limit=15.0 +2024-08-27 05:21:15,032 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:21:28,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=239114.66666666666, ans=0.125 +2024-08-27 05:21:28,972 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.21 vs. limit=22.5 +2024-08-27 05:21:32,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239114.66666666666, ans=0.125 +2024-08-27 05:22:55,134 INFO [train.py:1114] (1/4) Epoch 19, batch 50, loss[loss=0.1651, simple_loss=0.2366, pruned_loss=0.03335, ctc_loss=0.06743, over 19694.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2621, pruned_loss=0.04168, ctc_loss=0.07858, over 843816.80 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-27 05:22:55,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239221.33333333334, ans=0.1 +2024-08-27 05:23:20,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=239221.33333333334, ans=0.0 +2024-08-27 05:23:20,851 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:23:21,186 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.69 vs. limit=22.5 +2024-08-27 05:23:24,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239221.33333333334, ans=0.1 +2024-08-27 05:23:24,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239221.33333333334, ans=0.1 +2024-08-27 05:23:33,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239274.66666666666, ans=0.125 +2024-08-27 05:23:56,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=239328.0, ans=0.0 +2024-08-27 05:23:59,567 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.492e+02 1.734e+02 2.135e+02 3.431e+02, threshold=3.468e+02, percent-clipped=0.0 +2024-08-27 05:23:59,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=239381.33333333334, ans=0.125 +2024-08-27 05:24:00,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=239381.33333333334, ans=0.0 +2024-08-27 05:24:08,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=239381.33333333334, ans=0.125 +2024-08-27 05:24:16,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=239434.66666666666, ans=0.2 +2024-08-27 05:24:19,427 INFO [train.py:1114] (1/4) Epoch 19, batch 100, loss[loss=0.1616, simple_loss=0.2359, pruned_loss=0.03153, ctc_loss=0.06082, over 19715.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2626, pruned_loss=0.04111, ctc_loss=0.07727, over 1498509.86 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 32.0 +2024-08-27 05:24:19,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239488.0, ans=0.125 +2024-08-27 05:24:24,269 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:24:28,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=239541.33333333334, ans=0.125 +2024-08-27 05:24:28,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=239541.33333333334, ans=0.0 +2024-08-27 05:24:30,889 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.72 vs. limit=22.5 +2024-08-27 05:24:39,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=239594.66666666666, ans=0.07 +2024-08-27 05:25:47,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=239648.0, ans=0.1 +2024-08-27 05:25:48,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=239648.0, ans=0.125 +2024-08-27 05:26:04,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239754.66666666666, ans=0.1 +2024-08-27 05:26:05,121 INFO [train.py:1114] (1/4) Epoch 19, batch 150, loss[loss=0.1553, simple_loss=0.2303, pruned_loss=0.0297, ctc_loss=0.05245, over 19722.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2609, pruned_loss=0.04033, ctc_loss=0.0755, over 2027222.49 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-27 05:26:27,820 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.92 vs. limit=22.5 +2024-08-27 05:26:28,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=239754.66666666666, ans=0.125 +2024-08-27 05:26:38,970 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:27:19,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=239914.66666666666, ans=0.1 +2024-08-27 05:27:20,689 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.059e+02 1.500e+02 1.966e+02 2.497e+02 3.604e+02, threshold=3.932e+02, percent-clipped=3.0 +2024-08-27 05:27:20,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=239914.66666666666, ans=0.0 +2024-08-27 05:27:33,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239914.66666666666, ans=0.125 +2024-08-27 05:27:48,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=239968.0, ans=0.125 +2024-08-27 05:28:02,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=239968.0, ans=0.0 +2024-08-27 05:28:10,144 INFO [train.py:1114] (1/4) Epoch 19, batch 200, loss[loss=0.2, simple_loss=0.2754, pruned_loss=0.04517, ctc_loss=0.08544, over 18110.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2599, pruned_loss=0.03987, ctc_loss=0.07445, over 2435690.74 frames. ], batch size: 85, lr: 7.97e-03, grad_scale: 32.0 +2024-08-27 05:28:11,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=240021.33333333334, ans=0.2 +2024-08-27 05:28:15,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=240021.33333333334, ans=0.125 +2024-08-27 05:28:30,800 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.07 vs. limit=10.0 +2024-08-27 05:29:18,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=240181.33333333334, ans=0.0 +2024-08-27 05:29:28,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=240234.66666666666, ans=0.125 +2024-08-27 05:29:34,216 INFO [train.py:1114] (1/4) Epoch 19, batch 250, loss[loss=0.1992, simple_loss=0.2738, pruned_loss=0.04522, ctc_loss=0.0853, over 19370.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2596, pruned_loss=0.03955, ctc_loss=0.07407, over 2755982.86 frames. ], batch size: 67, lr: 7.97e-03, grad_scale: 32.0 +2024-08-27 05:29:40,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=240288.0, ans=0.125 +2024-08-27 05:29:49,480 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.50 vs. limit=10.0 +2024-08-27 05:30:02,560 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.446e+02 1.683e+02 2.499e+02 4.574e+02, threshold=3.367e+02, percent-clipped=7.0 +2024-08-27 05:30:09,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=240448.0, ans=0.125 +2024-08-27 05:30:10,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=240448.0, ans=0.2 +2024-08-27 05:30:11,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240448.0, ans=0.1 +2024-08-27 05:30:13,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=240501.33333333334, ans=0.125 +2024-08-27 05:30:15,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=240501.33333333334, ans=0.125 +2024-08-27 05:30:22,806 INFO [train.py:1114] (1/4) Epoch 19, batch 300, loss[loss=0.2165, simple_loss=0.2865, pruned_loss=0.05358, ctc_loss=0.09816, over 19524.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2591, pruned_loss=0.03949, ctc_loss=0.0738, over 3000158.22 frames. ], batch size: 61, lr: 7.96e-03, grad_scale: 32.0 +2024-08-27 05:30:29,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=240554.66666666666, ans=0.125 +2024-08-27 05:30:43,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240661.33333333334, ans=0.1 +2024-08-27 05:30:54,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240714.66666666666, ans=0.1 +2024-08-27 05:30:55,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=240714.66666666666, ans=0.0 +2024-08-27 05:31:02,337 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.30 vs. limit=15.0 +2024-08-27 05:31:09,980 INFO [train.py:1114] (1/4) Epoch 19, batch 350, loss[loss=0.1633, simple_loss=0.2358, pruned_loss=0.03241, ctc_loss=0.06489, over 19750.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2598, pruned_loss=0.03967, ctc_loss=0.07412, over 3190941.52 frames. ], batch size: 48, lr: 7.96e-03, grad_scale: 32.0 +2024-08-27 05:31:19,729 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.04 vs. limit=15.0 +2024-08-27 05:31:36,772 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.91 vs. limit=22.5 +2024-08-27 05:31:39,936 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.453e+02 1.753e+02 2.405e+02 3.677e+02, threshold=3.507e+02, percent-clipped=2.0 +2024-08-27 05:31:40,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=240981.33333333334, ans=0.125 +2024-08-27 05:31:41,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=240981.33333333334, ans=0.125 +2024-08-27 05:31:50,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=241034.66666666666, ans=0.0 +2024-08-27 05:31:51,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=241034.66666666666, ans=0.125 +2024-08-27 05:31:57,316 INFO [train.py:1114] (1/4) Epoch 19, batch 400, loss[loss=0.1771, simple_loss=0.2609, pruned_loss=0.03339, ctc_loss=0.06637, over 19516.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2594, pruned_loss=0.03953, ctc_loss=0.07393, over 3342063.45 frames. ], batch size: 54, lr: 7.95e-03, grad_scale: 32.0 +2024-08-27 05:32:27,288 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:32:51,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=241301.33333333334, ans=0.0 +2024-08-27 05:33:00,037 INFO [train.py:1114] (1/4) Epoch 19, batch 450, loss[loss=0.2003, simple_loss=0.2845, pruned_loss=0.04158, ctc_loss=0.0821, over 19611.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2603, pruned_loss=0.04006, ctc_loss=0.07484, over 3450262.93 frames. ], batch size: 55, lr: 7.95e-03, grad_scale: 32.0 +2024-08-27 05:33:07,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=241354.66666666666, ans=0.125 +2024-08-27 05:33:09,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=241354.66666666666, ans=0.125 +2024-08-27 05:33:15,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=241408.0, ans=0.1 +2024-08-27 05:33:16,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=241408.0, ans=0.125 +2024-08-27 05:33:16,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=241408.0, ans=0.125 +2024-08-27 05:33:19,651 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.10 vs. limit=15.0 +2024-08-27 05:33:22,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=241461.33333333334, ans=0.125 +2024-08-27 05:33:23,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=241461.33333333334, ans=0.125 +2024-08-27 05:33:30,920 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.397e+02 1.631e+02 2.046e+02 3.175e+02, threshold=3.262e+02, percent-clipped=0.0 +2024-08-27 05:33:32,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=241514.66666666666, ans=0.1 +2024-08-27 05:33:39,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=241568.0, ans=0.2 +2024-08-27 05:33:49,280 INFO [train.py:1114] (1/4) Epoch 19, batch 500, loss[loss=0.1956, simple_loss=0.2702, pruned_loss=0.04423, ctc_loss=0.08139, over 19638.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2595, pruned_loss=0.03966, ctc_loss=0.07414, over 3545831.45 frames. ], batch size: 63, lr: 7.95e-03, grad_scale: 32.0 +2024-08-27 05:33:57,255 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.66 vs. limit=15.0 +2024-08-27 05:34:04,579 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.62 vs. limit=15.0 +2024-08-27 05:34:07,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=241674.66666666666, ans=0.05 +2024-08-27 05:34:10,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=241728.0, ans=0.125 +2024-08-27 05:34:11,182 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.10 vs. limit=10.0 +2024-08-27 05:34:18,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=241781.33333333334, ans=0.0 +2024-08-27 05:34:21,131 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.00 vs. limit=15.0 +2024-08-27 05:34:24,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=241781.33333333334, ans=0.125 +2024-08-27 05:34:30,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-27 05:34:31,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-27 05:34:31,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-27 05:34:37,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-27 05:34:39,019 INFO [train.py:1114] (1/4) Epoch 19, batch 550, loss[loss=0.2263, simple_loss=0.2927, pruned_loss=0.05874, ctc_loss=0.1061, over 19283.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2599, pruned_loss=0.04014, ctc_loss=0.07482, over 3608551.17 frames. ], batch size: 71, lr: 7.94e-03, grad_scale: 32.0 +2024-08-27 05:34:59,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=241994.66666666666, ans=0.2 +2024-08-27 05:34:59,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=241994.66666666666, ans=0.0 +2024-08-27 05:35:03,768 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.45 vs. limit=6.0 +2024-08-27 05:35:09,354 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.385e+02 1.667e+02 1.980e+02 3.512e+02, threshold=3.334e+02, percent-clipped=2.0 +2024-08-27 05:35:13,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=242048.0, ans=0.5 +2024-08-27 05:35:16,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=242048.0, ans=0.07 +2024-08-27 05:35:18,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=242101.33333333334, ans=0.0 +2024-08-27 05:35:21,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242101.33333333334, ans=0.1 +2024-08-27 05:35:27,105 INFO [train.py:1114] (1/4) Epoch 19, batch 600, loss[loss=0.213, simple_loss=0.292, pruned_loss=0.04848, ctc_loss=0.09245, over 19427.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2601, pruned_loss=0.04014, ctc_loss=0.07469, over 3666336.13 frames. ], batch size: 67, lr: 7.94e-03, grad_scale: 32.0 +2024-08-27 05:35:33,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=242154.66666666666, ans=0.0 +2024-08-27 05:35:39,657 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.58 vs. limit=15.0 +2024-08-27 05:35:40,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=242208.0, ans=0.125 +2024-08-27 05:35:54,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=242261.33333333334, ans=0.125 +2024-08-27 05:35:57,775 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.36 vs. limit=15.0 +2024-08-27 05:36:09,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=242314.66666666666, ans=0.0 +2024-08-27 05:36:14,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=242368.0, ans=0.2 +2024-08-27 05:36:15,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=242368.0, ans=0.2 +2024-08-27 05:36:18,225 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.78 vs. limit=22.5 +2024-08-27 05:36:23,052 INFO [train.py:1114] (1/4) Epoch 19, batch 650, loss[loss=0.1912, simple_loss=0.2711, pruned_loss=0.04014, ctc_loss=0.07745, over 19773.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2595, pruned_loss=0.03994, ctc_loss=0.07437, over 3717066.29 frames. ], batch size: 54, lr: 7.93e-03, grad_scale: 32.0 +2024-08-27 05:36:28,724 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=242421.33333333334, ans=0.0 +2024-08-27 05:36:28,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=242421.33333333334, ans=0.125 +2024-08-27 05:36:38,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=242474.66666666666, ans=0.025 +2024-08-27 05:36:46,075 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:36:53,254 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.470e+02 1.907e+02 2.471e+02 4.129e+02, threshold=3.814e+02, percent-clipped=9.0 +2024-08-27 05:36:55,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=242581.33333333334, ans=0.125 +2024-08-27 05:36:56,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=242581.33333333334, ans=0.125 +2024-08-27 05:37:01,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=242634.66666666666, ans=0.125 +2024-08-27 05:37:03,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=242634.66666666666, ans=0.125 +2024-08-27 05:37:31,526 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:37:33,298 INFO [train.py:1114] (1/4) Epoch 19, batch 700, loss[loss=0.1793, simple_loss=0.2503, pruned_loss=0.03897, ctc_loss=0.07591, over 19732.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2603, pruned_loss=0.04019, ctc_loss=0.07492, over 3749165.96 frames. ], batch size: 51, lr: 7.93e-03, grad_scale: 32.0 +2024-08-27 05:37:43,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=242741.33333333334, ans=0.07 +2024-08-27 05:37:56,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=242794.66666666666, ans=0.125 +2024-08-27 05:38:02,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=242848.0, ans=0.0 +2024-08-27 05:38:17,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=242901.33333333334, ans=0.2 +2024-08-27 05:38:23,030 INFO [train.py:1114] (1/4) Epoch 19, batch 750, loss[loss=0.1849, simple_loss=0.2667, pruned_loss=0.038, ctc_loss=0.06795, over 19487.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2598, pruned_loss=0.03999, ctc_loss=0.0747, over 3774464.38 frames. ], batch size: 54, lr: 7.92e-03, grad_scale: 32.0 +2024-08-27 05:38:32,538 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.24 vs. limit=15.0 +2024-08-27 05:38:45,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=243061.33333333334, ans=0.0 +2024-08-27 05:38:51,402 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.489e+02 1.823e+02 2.314e+02 3.772e+02, threshold=3.647e+02, percent-clipped=0.0 +2024-08-27 05:38:58,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=243114.66666666666, ans=0.0 +2024-08-27 05:39:11,754 INFO [train.py:1114] (1/4) Epoch 19, batch 800, loss[loss=0.1569, simple_loss=0.2316, pruned_loss=0.0305, ctc_loss=0.05289, over 19841.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2595, pruned_loss=0.03995, ctc_loss=0.0746, over 3796520.43 frames. ], batch size: 49, lr: 7.92e-03, grad_scale: 32.0 +2024-08-27 05:39:19,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=243221.33333333334, ans=0.2 +2024-08-27 05:39:58,029 INFO [train.py:1114] (1/4) Epoch 19, batch 850, loss[loss=0.1994, simple_loss=0.2759, pruned_loss=0.045, ctc_loss=0.08212, over 19650.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2592, pruned_loss=0.03987, ctc_loss=0.07447, over 3814948.74 frames. ], batch size: 59, lr: 7.92e-03, grad_scale: 32.0 +2024-08-27 05:40:04,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=243488.0, ans=0.025 +2024-08-27 05:40:04,963 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.67 vs. limit=15.0 +2024-08-27 05:40:10,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=243541.33333333334, ans=0.0 +2024-08-27 05:40:17,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=243594.66666666666, ans=0.0 +2024-08-27 05:40:25,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=243594.66666666666, ans=0.2 +2024-08-27 05:40:28,701 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.388e+02 1.609e+02 2.074e+02 4.897e+02, threshold=3.218e+02, percent-clipped=1.0 +2024-08-27 05:40:34,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=243648.0, ans=0.125 +2024-08-27 05:40:35,970 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=6.73 vs. limit=15.0 +2024-08-27 05:40:51,626 INFO [train.py:1114] (1/4) Epoch 19, batch 900, loss[loss=0.162, simple_loss=0.2366, pruned_loss=0.0325, ctc_loss=0.05594, over 19823.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2593, pruned_loss=0.03994, ctc_loss=0.07445, over 3819183.23 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-27 05:41:21,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=243808.0, ans=0.025 +2024-08-27 05:41:47,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243861.33333333334, ans=0.1 +2024-08-27 05:45:50,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=243968.0, ans=0.125 +2024-08-27 05:46:16,460 INFO [train.py:1114] (1/4) Epoch 19, batch 950, loss[loss=0.1614, simple_loss=0.2411, pruned_loss=0.02919, ctc_loss=0.05808, over 19491.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2595, pruned_loss=0.04, ctc_loss=0.07454, over 3820498.17 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-27 05:46:35,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244074.66666666666, ans=0.1 +2024-08-27 05:46:39,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.whiten.whitening_limit, batch_count=244074.66666666666, ans=12.0 +2024-08-27 05:46:50,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=244074.66666666666, ans=0.2 +2024-08-27 05:46:57,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=244128.0, ans=0.125 +2024-08-27 05:47:05,276 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.185e+02 1.465e+02 1.729e+02 2.037e+02 3.385e+02, threshold=3.459e+02, percent-clipped=1.0 +2024-08-27 05:47:06,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=244181.33333333334, ans=0.0 +2024-08-27 05:47:24,477 INFO [train.py:1114] (1/4) Epoch 19, batch 1000, loss[loss=0.1651, simple_loss=0.2413, pruned_loss=0.03151, ctc_loss=0.06477, over 19867.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2604, pruned_loss=0.0404, ctc_loss=0.07532, over 3816829.39 frames. ], batch size: 52, lr: 7.90e-03, grad_scale: 32.0 +2024-08-27 05:47:26,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=244288.0, ans=0.0 +2024-08-27 05:47:33,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244341.33333333334, ans=0.1 +2024-08-27 05:47:33,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=244341.33333333334, ans=0.125 +2024-08-27 05:47:55,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=244448.0, ans=0.125 +2024-08-27 05:48:00,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=244448.0, ans=0.125 +2024-08-27 05:48:12,810 INFO [train.py:1114] (1/4) Epoch 19, batch 1050, loss[loss=0.1821, simple_loss=0.2633, pruned_loss=0.03661, ctc_loss=0.06956, over 19850.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2595, pruned_loss=0.04015, ctc_loss=0.07499, over 3822541.22 frames. ], batch size: 57, lr: 7.90e-03, grad_scale: 32.0 +2024-08-27 05:48:13,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=244554.66666666666, ans=0.0 +2024-08-27 05:48:23,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.13 vs. limit=22.5 +2024-08-27 05:48:42,881 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.407e+02 1.559e+02 1.901e+02 2.565e+02, threshold=3.118e+02, percent-clipped=0.0 +2024-08-27 05:48:51,782 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.28 vs. limit=15.0 +2024-08-27 05:49:01,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=244821.33333333334, ans=0.2 +2024-08-27 05:49:02,416 INFO [train.py:1114] (1/4) Epoch 19, batch 1100, loss[loss=0.1792, simple_loss=0.2538, pruned_loss=0.03824, ctc_loss=0.07008, over 19592.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2593, pruned_loss=0.03989, ctc_loss=0.07443, over 3830857.76 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-27 05:49:04,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.97 vs. limit=12.0 +2024-08-27 05:49:39,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=244981.33333333334, ans=0.125 +2024-08-27 05:49:51,779 INFO [train.py:1114] (1/4) Epoch 19, batch 1150, loss[loss=0.1707, simple_loss=0.2541, pruned_loss=0.03195, ctc_loss=0.05838, over 19598.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2592, pruned_loss=0.03963, ctc_loss=0.07412, over 3830026.08 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-27 05:49:53,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=245088.0, ans=0.0 +2024-08-27 05:51:12,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=245141.33333333334, ans=0.0 +2024-08-27 05:52:27,615 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.437e+02 1.648e+02 2.100e+02 3.411e+02, threshold=3.296e+02, percent-clipped=3.0 +2024-08-27 05:52:32,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245248.0, ans=0.1 +2024-08-27 05:52:47,006 INFO [train.py:1114] (1/4) Epoch 19, batch 1200, loss[loss=0.1987, simple_loss=0.2734, pruned_loss=0.04526, ctc_loss=0.08359, over 19840.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2601, pruned_loss=0.03985, ctc_loss=0.07457, over 3824926.60 frames. ], batch size: 57, lr: 7.89e-03, grad_scale: 32.0 +2024-08-27 05:53:09,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=245461.33333333334, ans=0.0 +2024-08-27 05:53:14,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=245461.33333333334, ans=0.0 +2024-08-27 05:53:14,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=245461.33333333334, ans=10.0 +2024-08-27 05:53:18,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=245514.66666666666, ans=0.0 +2024-08-27 05:53:34,959 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.25 vs. limit=10.0 +2024-08-27 05:53:35,240 INFO [train.py:1114] (1/4) Epoch 19, batch 1250, loss[loss=0.1986, simple_loss=0.2734, pruned_loss=0.04488, ctc_loss=0.08499, over 19547.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2608, pruned_loss=0.03998, ctc_loss=0.07477, over 3843041.14 frames. ], batch size: 61, lr: 7.88e-03, grad_scale: 32.0 +2024-08-27 05:53:51,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=245674.66666666666, ans=0.125 +2024-08-27 05:53:54,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245728.0, ans=0.1 +2024-08-27 05:54:05,855 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.230e+02 1.471e+02 1.735e+02 2.173e+02 3.319e+02, threshold=3.470e+02, percent-clipped=1.0 +2024-08-27 05:54:19,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=245834.66666666666, ans=0.125 +2024-08-27 05:54:26,195 INFO [train.py:1114] (1/4) Epoch 19, batch 1300, loss[loss=0.2091, simple_loss=0.2791, pruned_loss=0.05029, ctc_loss=0.09621, over 18776.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2601, pruned_loss=0.03957, ctc_loss=0.07399, over 3846494.21 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 32.0 +2024-08-27 05:54:26,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245888.0, ans=0.1 +2024-08-27 05:54:36,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=245941.33333333334, ans=0.125 +2024-08-27 05:54:39,099 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.93 vs. limit=15.0 +2024-08-27 05:54:47,111 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.77 vs. limit=15.0 +2024-08-27 05:54:52,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=245994.66666666666, ans=0.125 +2024-08-27 05:55:05,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246101.33333333334, ans=0.1 +2024-08-27 05:55:13,893 INFO [train.py:1114] (1/4) Epoch 19, batch 1350, loss[loss=0.1647, simple_loss=0.245, pruned_loss=0.03108, ctc_loss=0.05566, over 19783.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2594, pruned_loss=0.03924, ctc_loss=0.07331, over 3856904.11 frames. ], batch size: 54, lr: 7.87e-03, grad_scale: 16.0 +2024-08-27 05:55:29,264 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.40 vs. limit=15.0 +2024-08-27 05:55:42,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=246314.66666666666, ans=0.2 +2024-08-27 05:55:45,497 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.414e+02 1.634e+02 2.144e+02 3.359e+02, threshold=3.268e+02, percent-clipped=0.0 +2024-08-27 05:56:02,662 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.75 vs. limit=12.0 +2024-08-27 05:56:03,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=246421.33333333334, ans=0.0 +2024-08-27 05:56:03,864 INFO [train.py:1114] (1/4) Epoch 19, batch 1400, loss[loss=0.1435, simple_loss=0.2172, pruned_loss=0.0251, ctc_loss=0.0489, over 19689.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2589, pruned_loss=0.03911, ctc_loss=0.07298, over 3864018.00 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 16.0 +2024-08-27 05:56:11,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=246421.33333333334, ans=0.125 +2024-08-27 05:56:15,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246474.66666666666, ans=0.1 +2024-08-27 05:56:22,972 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.56 vs. limit=10.0 +2024-08-27 05:56:33,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=246581.33333333334, ans=0.125 +2024-08-27 05:56:42,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=246634.66666666666, ans=0.2 +2024-08-27 05:56:53,086 INFO [train.py:1114] (1/4) Epoch 19, batch 1450, loss[loss=0.1951, simple_loss=0.2699, pruned_loss=0.0439, ctc_loss=0.08096, over 19664.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2595, pruned_loss=0.03924, ctc_loss=0.07339, over 3863145.24 frames. ], batch size: 63, lr: 7.87e-03, grad_scale: 16.0 +2024-08-27 05:56:55,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=246688.0, ans=0.2 +2024-08-27 05:57:07,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=246741.33333333334, ans=0.0 +2024-08-27 05:57:08,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=246741.33333333334, ans=0.125 +2024-08-27 05:57:25,427 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.422e+02 1.608e+02 1.963e+02 3.546e+02, threshold=3.216e+02, percent-clipped=4.0 +2024-08-27 05:57:35,341 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.53 vs. limit=15.0 +2024-08-27 05:57:42,305 INFO [train.py:1114] (1/4) Epoch 19, batch 1500, loss[loss=0.1988, simple_loss=0.274, pruned_loss=0.0451, ctc_loss=0.08363, over 19575.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2596, pruned_loss=0.03909, ctc_loss=0.07321, over 3862569.89 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 16.0 +2024-08-27 05:57:42,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=246954.66666666666, ans=0.0 +2024-08-27 05:58:48,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=247114.66666666666, ans=0.5 +2024-08-27 05:58:52,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=247168.0, ans=0.0 +2024-08-27 05:58:58,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247168.0, ans=0.1 +2024-08-27 05:59:00,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=247221.33333333334, ans=0.125 +2024-08-27 05:59:01,745 INFO [train.py:1114] (1/4) Epoch 19, batch 1550, loss[loss=0.2112, simple_loss=0.2783, pruned_loss=0.05219, ctc_loss=0.09906, over 19583.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2597, pruned_loss=0.03942, ctc_loss=0.07387, over 3847411.22 frames. ], batch size: 60, lr: 7.86e-03, grad_scale: 16.0 +2024-08-27 05:59:03,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=247221.33333333334, ans=0.0 +2024-08-27 05:59:06,969 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.03 vs. limit=15.0 +2024-08-27 05:59:39,712 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-08-27 05:59:43,877 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.416e+02 1.634e+02 2.007e+02 4.215e+02, threshold=3.267e+02, percent-clipped=2.0 +2024-08-27 05:59:52,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=247434.66666666666, ans=0.0 +2024-08-27 06:00:02,731 INFO [train.py:1114] (1/4) Epoch 19, batch 1600, loss[loss=0.1846, simple_loss=0.2701, pruned_loss=0.03613, ctc_loss=0.06736, over 19843.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2596, pruned_loss=0.03956, ctc_loss=0.07423, over 3836078.74 frames. ], batch size: 57, lr: 7.85e-03, grad_scale: 32.0 +2024-08-27 06:00:07,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247488.0, ans=0.1 +2024-08-27 06:00:20,167 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.94 vs. limit=22.5 +2024-08-27 06:00:26,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=247594.66666666666, ans=0.0 +2024-08-27 06:00:29,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=247594.66666666666, ans=0.125 +2024-08-27 06:00:45,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=247701.33333333334, ans=0.125 +2024-08-27 06:00:47,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=247701.33333333334, ans=0.2 +2024-08-27 06:00:47,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=247701.33333333334, ans=0.0 +2024-08-27 06:00:51,744 INFO [train.py:1114] (1/4) Epoch 19, batch 1650, loss[loss=0.1831, simple_loss=0.2687, pruned_loss=0.03558, ctc_loss=0.06585, over 19660.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2594, pruned_loss=0.03959, ctc_loss=0.07415, over 3832558.49 frames. ], batch size: 59, lr: 7.85e-03, grad_scale: 32.0 +2024-08-27 06:01:21,526 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.147e+02 1.539e+02 1.985e+02 2.467e+02 4.637e+02, threshold=3.969e+02, percent-clipped=10.0 +2024-08-27 06:01:21,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=247914.66666666666, ans=0.125 +2024-08-27 06:01:23,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=247914.66666666666, ans=0.025 +2024-08-27 06:01:39,978 INFO [train.py:1114] (1/4) Epoch 19, batch 1700, loss[loss=0.1644, simple_loss=0.2325, pruned_loss=0.03514, ctc_loss=0.06511, over 19679.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2591, pruned_loss=0.03913, ctc_loss=0.07347, over 3846785.53 frames. ], batch size: 46, lr: 7.84e-03, grad_scale: 32.0 +2024-08-27 06:01:49,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=248074.66666666666, ans=0.2 +2024-08-27 06:01:57,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=248128.0, ans=0.1 +2024-08-27 06:02:23,933 INFO [train.py:1114] (1/4) Epoch 19, batch 1750, loss[loss=0.1738, simple_loss=0.2372, pruned_loss=0.03995, ctc_loss=0.07625, over 19659.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2589, pruned_loss=0.03911, ctc_loss=0.07339, over 3852102.13 frames. ], batch size: 45, lr: 7.84e-03, grad_scale: 32.0 +2024-08-27 06:02:31,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=248288.0, ans=0.125 +2024-08-27 06:02:38,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=248341.33333333334, ans=0.125 +2024-08-27 06:02:46,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=248394.66666666666, ans=0.125 +2024-08-27 06:02:57,020 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.179e+02 1.492e+02 1.808e+02 2.313e+02 3.735e+02, threshold=3.616e+02, percent-clipped=0.0 +2024-08-27 06:02:58,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=248448.0, ans=0.0 +2024-08-27 06:03:09,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=248501.33333333334, ans=0.2 +2024-08-27 06:03:18,749 INFO [train.py:1114] (1/4) Epoch 19, batch 1800, loss[loss=0.184, simple_loss=0.2659, pruned_loss=0.03734, ctc_loss=0.06864, over 19617.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2589, pruned_loss=0.03915, ctc_loss=0.07344, over 3853513.81 frames. ], batch size: 55, lr: 7.84e-03, grad_scale: 16.0 +2024-08-27 06:03:25,468 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.29 vs. limit=15.0 +2024-08-27 06:04:02,719 INFO [train.py:1114] (1/4) Epoch 19, batch 1850, loss[loss=0.2017, simple_loss=0.2797, pruned_loss=0.04516, ctc_loss=0.08326, over 19581.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2592, pruned_loss=0.03924, ctc_loss=0.07351, over 3856194.78 frames. ], batch size: 57, lr: 7.83e-03, grad_scale: 16.0 +2024-08-27 06:04:02,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=248821.33333333334, ans=0.2 +2024-08-27 06:04:25,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=248928.0, ans=0.09899494936611666 +2024-08-27 06:04:32,247 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.32 vs. limit=15.0 +2024-08-27 06:04:32,744 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.498e+02 2.037e+02 3.063e+02 6.275e+02, threshold=4.074e+02, percent-clipped=13.0 +2024-08-27 06:04:34,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248981.33333333334, ans=0.1 +2024-08-27 06:04:46,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=249034.66666666666, ans=0.125 +2024-08-27 06:04:47,723 INFO [train.py:1114] (1/4) Epoch 19, batch 1900, loss[loss=0.1751, simple_loss=0.2584, pruned_loss=0.03333, ctc_loss=0.06278, over 19643.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2599, pruned_loss=0.03951, ctc_loss=0.07383, over 3860254.22 frames. ], batch size: 59, lr: 7.83e-03, grad_scale: 16.0 +2024-08-27 06:04:49,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=249088.0, ans=0.025 +2024-08-27 06:05:00,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=249141.33333333334, ans=0.0 +2024-08-27 06:05:06,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=249194.66666666666, ans=0.0 +2024-08-27 06:05:53,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=249301.33333333334, ans=0.2 +2024-08-27 06:05:58,956 INFO [train.py:1114] (1/4) Epoch 19, batch 1950, loss[loss=0.1726, simple_loss=0.2519, pruned_loss=0.03352, ctc_loss=0.06567, over 19609.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2612, pruned_loss=0.0397, ctc_loss=0.07427, over 3869695.05 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 16.0 +2024-08-27 06:16:15,092 INFO [train.py:1050] (1/4) Caught exception: [Rank 1] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=471644, OpType=ALLREDUCE, NumelIn=745, NumelOut=745, Timeout(ms)=600000) ran for 600000 milliseconds before timing out.. +2024-08-27 06:16:15,094 INFO [checkpoint.py:75] (1/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/bad-model-1.pt +2024-08-27 06:16:22,126 INFO [train.py:1413] (1/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/batch-277582f0-93f5-0c2c-488e-44f94ecc6c7f.pt +2024-08-27 06:16:22,170 INFO [train.py:1419] (1/4) features shape: torch.Size([50, 1582, 80]) +2024-08-27 06:16:22,172 INFO [train.py:1423] (1/4) num tokens: 4046 diff --git a/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-26-14-14-03-2 b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-26-14-14-03-2 new file mode 100644 index 0000000000000000000000000000000000000000..083b0b6bd960faebe4e487f492dcca369c0e07ab --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-26-14-14-03-2 @@ -0,0 +1,5473 @@ +2024-08-26 14:14:06,049 INFO [train.py:1182] (2/4) Training started +2024-08-26 14:14:09,228 INFO [train.py:1192] (2/4) Device: cuda:2 +2024-08-26 14:14:11,783 INFO [train.py:1210] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2652.int.cedar.computecanada.ca', 'IP address': '172.16.146.89'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 4, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-26 14:14:11,784 INFO [train.py:1212] (2/4) About to create model +2024-08-26 14:14:12,458 INFO [train.py:1216] (2/4) Number of model parameters: 65805511 +2024-08-26 14:14:12,459 INFO [checkpoint.py:112] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-3.pt +2024-08-26 14:14:19,979 INFO [train.py:1231] (2/4) Using DDP +2024-08-26 14:14:24,082 INFO [train.py:1243] (2/4) Loading optimizer state dict +2024-08-26 14:14:24,276 INFO [train.py:1251] (2/4) Loading scheduler state dict +2024-08-26 14:14:24,276 INFO [asr_datamodule.py:894] (2/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-26 14:14:27,296 INFO [asr_datamodule.py:696] (2/4) Disable MUSAN +2024-08-26 14:14:27,296 INFO [asr_datamodule.py:714] (2/4) Enable SpecAugment +2024-08-26 14:14:27,296 INFO [asr_datamodule.py:715] (2/4) Time warp factor: 80 +2024-08-26 14:14:27,296 INFO [asr_datamodule.py:725] (2/4) Num frame mask: 10 +2024-08-26 14:14:27,296 INFO [asr_datamodule.py:738] (2/4) About to create train dataset +2024-08-26 14:14:27,296 INFO [asr_datamodule.py:765] (2/4) Using DynamicBucketingSampler. +2024-08-26 14:14:28,855 INFO [asr_datamodule.py:782] (2/4) About to create train dataloader +2024-08-26 14:14:28,856 INFO [asr_datamodule.py:911] (2/4) About to get dev-clean cuts +2024-08-26 14:14:31,125 INFO [asr_datamodule.py:918] (2/4) About to get dev-other cuts +2024-08-26 14:14:32,027 INFO [asr_datamodule.py:814] (2/4) About to create dev dataset +2024-08-26 14:14:32,332 INFO [asr_datamodule.py:831] (2/4) About to create dev dataloader +2024-08-26 14:14:32,333 INFO [train.py:1435] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-26 14:18:38,883 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=512, metric=3.11 vs. limit=7.5 +2024-08-26 14:18:40,630 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12109MB +2024-08-26 14:18:41,872 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12109MB +2024-08-26 14:18:49,642 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12109MB +2024-08-26 14:18:50,842 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12109MB +2024-08-26 14:19:04,366 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=384, metric=15.55 vs. limit=7.5 +2024-08-26 14:19:04,870 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12109MB +2024-08-26 14:19:05,660 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=384, metric=14.04 vs. limit=7.5 +2024-08-26 14:19:06,160 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12109MB +2024-08-26 14:19:06,179 INFO [train.py:1344] (2/4) Loading grad scaler state dict +2024-08-26 14:19:49,550 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.12 vs. limit=15.0 +2024-08-26 14:19:52,359 INFO [train.py:1114] (2/4) Epoch 4, batch 0, loss[loss=0.2774, simple_loss=0.3163, pruned_loss=0.08761, ctc_loss=0.1582, over 19424.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3163, pruned_loss=0.08761, ctc_loss=0.1582, over 19424.00 frames. ], batch size: 48, lr: 3.30e-02, grad_scale: 32.0 +2024-08-26 14:19:52,359 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-26 14:20:25,782 INFO [train.py:1146] (2/4) Epoch 4, validation: loss=0.2421, simple_loss=0.3218, pruned_loss=0.05945, ctc_loss=0.1086, over 944034.00 frames. +2024-08-26 14:20:25,783 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12216MB +2024-08-26 14:22:00,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=39936.0, ans=0.125 +2024-08-26 14:22:01,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=39936.0, ans=0.125 +2024-08-26 14:22:21,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=39989.333333333336, ans=0.125 +2024-08-26 14:22:42,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=39989.333333333336, ans=0.125 +2024-08-26 14:22:48,269 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.53 vs. limit=22.5 +2024-08-26 14:23:04,537 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.569e+02 1.845e+02 2.126e+02 2.642e+02 4.004e+02, threshold=4.252e+02, percent-clipped=0.0 +2024-08-26 14:23:20,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40042.666666666664, ans=0.1 +2024-08-26 14:23:26,406 INFO [train.py:1114] (2/4) Epoch 4, batch 50, loss[loss=0.2577, simple_loss=0.2973, pruned_loss=0.07975, ctc_loss=0.1468, over 19707.00 frames. ], tot_loss[loss=0.3028, simple_loss=0.3364, pruned_loss=0.09795, ctc_loss=0.183, over 844557.52 frames. ], batch size: 47, lr: 3.30e-02, grad_scale: 32.0 +2024-08-26 14:23:41,696 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:23:59,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=40149.333333333336, ans=0.0 +2024-08-26 14:24:08,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=40149.333333333336, ans=0.125 +2024-08-26 14:24:52,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=40256.0, ans=0.125 +2024-08-26 14:25:11,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=40256.0, ans=0.0 +2024-08-26 14:25:30,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=40309.333333333336, ans=0.125 +2024-08-26 14:25:33,108 INFO [train.py:1114] (2/4) Epoch 4, batch 100, loss[loss=0.2827, simple_loss=0.3253, pruned_loss=0.08744, ctc_loss=0.163, over 19712.00 frames. ], tot_loss[loss=0.3, simple_loss=0.3356, pruned_loss=0.09612, ctc_loss=0.1802, over 1498880.38 frames. ], batch size: 51, lr: 3.29e-02, grad_scale: 32.0 +2024-08-26 14:25:35,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=40362.666666666664, ans=0.125 +2024-08-26 14:25:37,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40362.666666666664, ans=0.1 +2024-08-26 14:25:58,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=40416.0, ans=0.125 +2024-08-26 14:26:03,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=40416.0, ans=0.0020834782608695653 +2024-08-26 14:26:31,059 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.52 vs. limit=15.0 +2024-08-26 14:26:35,005 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.77 vs. limit=22.5 +2024-08-26 14:26:40,724 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.372e+02 1.662e+02 1.906e+02 2.226e+02 3.245e+02, threshold=3.812e+02, percent-clipped=0.0 +2024-08-26 14:26:41,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=40576.0, ans=0.0 +2024-08-26 14:26:48,533 INFO [train.py:1114] (2/4) Epoch 4, batch 150, loss[loss=0.2754, simple_loss=0.3013, pruned_loss=0.09238, ctc_loss=0.1616, over 19732.00 frames. ], tot_loss[loss=0.295, simple_loss=0.3316, pruned_loss=0.09405, ctc_loss=0.1759, over 2028245.15 frames. ], batch size: 47, lr: 3.28e-02, grad_scale: 32.0 +2024-08-26 14:26:49,800 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:26:54,201 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.41 vs. limit=12.0 +2024-08-26 14:27:38,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=40736.0, ans=0.5 +2024-08-26 14:27:41,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=40736.0, ans=0.2 +2024-08-26 14:27:52,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=40789.333333333336, ans=0.0 +2024-08-26 14:28:04,924 INFO [train.py:1114] (2/4) Epoch 4, batch 200, loss[loss=0.3447, simple_loss=0.3668, pruned_loss=0.1183, ctc_loss=0.2149, over 18389.00 frames. ], tot_loss[loss=0.2924, simple_loss=0.3295, pruned_loss=0.0929, ctc_loss=0.1739, over 2436426.00 frames. ], batch size: 86, lr: 3.28e-02, grad_scale: 32.0 +2024-08-26 14:28:07,106 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:28:08,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=40896.0, ans=0.00197913043478261 +2024-08-26 14:28:10,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.61 vs. limit=15.0 +2024-08-26 14:28:16,510 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.14 vs. limit=22.5 +2024-08-26 14:28:39,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=41056.0, ans=0.125 +2024-08-26 14:28:49,778 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.824e+02 2.102e+02 2.533e+02 3.992e+02, threshold=4.203e+02, percent-clipped=3.0 +2024-08-26 14:28:55,766 INFO [train.py:1114] (2/4) Epoch 4, batch 250, loss[loss=0.2885, simple_loss=0.3306, pruned_loss=0.08993, ctc_loss=0.1663, over 19404.00 frames. ], tot_loss[loss=0.2904, simple_loss=0.3286, pruned_loss=0.09171, ctc_loss=0.1719, over 2756072.04 frames. ], batch size: 67, lr: 3.27e-02, grad_scale: 32.0 +2024-08-26 14:29:07,101 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=15.0 +2024-08-26 14:29:07,112 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.70 vs. limit=22.5 +2024-08-26 14:29:09,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=41216.0, ans=0.0019095652173913048 +2024-08-26 14:29:11,776 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.67 vs. limit=15.0 +2024-08-26 14:29:22,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=41269.333333333336, ans=0.125 +2024-08-26 14:29:38,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=41376.0, ans=0.025 +2024-08-26 14:29:46,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=41429.333333333336, ans=0.125 +2024-08-26 14:29:46,779 INFO [train.py:1114] (2/4) Epoch 4, batch 300, loss[loss=0.3234, simple_loss=0.3615, pruned_loss=0.1047, ctc_loss=0.1899, over 19523.00 frames. ], tot_loss[loss=0.2902, simple_loss=0.3284, pruned_loss=0.09172, ctc_loss=0.1718, over 3001487.19 frames. ], batch size: 61, lr: 3.27e-02, grad_scale: 32.0 +2024-08-26 14:30:07,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=41536.0, ans=0.125 +2024-08-26 14:30:32,090 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 1.674e+02 1.880e+02 2.161e+02 3.950e+02, threshold=3.761e+02, percent-clipped=0.0 +2024-08-26 14:30:32,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=41642.666666666664, ans=0.125 +2024-08-26 14:30:37,854 INFO [train.py:1114] (2/4) Epoch 4, batch 350, loss[loss=0.2616, simple_loss=0.3019, pruned_loss=0.08109, ctc_loss=0.1478, over 19751.00 frames. ], tot_loss[loss=0.2905, simple_loss=0.3289, pruned_loss=0.09177, ctc_loss=0.1717, over 3191000.54 frames. ], batch size: 48, lr: 3.26e-02, grad_scale: 32.0 +2024-08-26 14:30:42,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=41696.0, ans=0.5 +2024-08-26 14:30:49,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=41749.333333333336, ans=0.0 +2024-08-26 14:31:04,529 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.07 vs. limit=22.5 +2024-08-26 14:31:10,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=41856.0, ans=0.125 +2024-08-26 14:31:34,672 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:31:35,386 INFO [train.py:1114] (2/4) Epoch 4, batch 400, loss[loss=0.2696, simple_loss=0.3227, pruned_loss=0.07871, ctc_loss=0.1473, over 19489.00 frames. ], tot_loss[loss=0.289, simple_loss=0.3278, pruned_loss=0.09106, ctc_loss=0.1705, over 3342031.38 frames. ], batch size: 54, lr: 3.26e-02, grad_scale: 32.0 +2024-08-26 14:31:49,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42016.0, ans=0.1 +2024-08-26 14:31:49,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=42016.0, ans=0.125 +2024-08-26 14:31:55,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=42069.333333333336, ans=0.125 +2024-08-26 14:32:03,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=42122.666666666664, ans=0.125 +2024-08-26 14:32:11,151 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.48 vs. limit=22.5 +2024-08-26 14:32:11,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=42122.666666666664, ans=0.0 +2024-08-26 14:32:19,256 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.438e+02 1.828e+02 2.157e+02 2.598e+02 8.551e+02, threshold=4.314e+02, percent-clipped=2.0 +2024-08-26 14:32:23,144 INFO [train.py:1114] (2/4) Epoch 4, batch 450, loss[loss=0.2845, simple_loss=0.3305, pruned_loss=0.08639, ctc_loss=0.1646, over 19613.00 frames. ], tot_loss[loss=0.2892, simple_loss=0.3279, pruned_loss=0.09109, ctc_loss=0.1706, over 3449509.33 frames. ], batch size: 55, lr: 3.25e-02, grad_scale: 8.0 +2024-08-26 14:32:37,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=42282.666666666664, ans=0.125 +2024-08-26 14:32:37,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=42282.666666666664, ans=0.0 +2024-08-26 14:32:38,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=42282.666666666664, ans=0.0 +2024-08-26 14:32:40,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=42282.666666666664, ans=0.1 +2024-08-26 14:32:51,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=42336.0, ans=0.07 +2024-08-26 14:32:58,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42389.333333333336, ans=0.1 +2024-08-26 14:33:00,495 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.44 vs. limit=15.0 +2024-08-26 14:33:05,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=42442.666666666664, ans=0.05 +2024-08-26 14:33:10,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=42442.666666666664, ans=0.025 +2024-08-26 14:33:14,184 INFO [train.py:1114] (2/4) Epoch 4, batch 500, loss[loss=0.32, simple_loss=0.3539, pruned_loss=0.1031, ctc_loss=0.1999, over 19651.00 frames. ], tot_loss[loss=0.2869, simple_loss=0.3261, pruned_loss=0.09002, ctc_loss=0.169, over 3545956.63 frames. ], batch size: 63, lr: 3.25e-02, grad_scale: 8.0 +2024-08-26 14:33:55,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=42656.0, ans=0.0 +2024-08-26 14:34:05,703 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.84 vs. limit=22.5 +2024-08-26 14:34:07,927 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.277e+02 1.676e+02 1.857e+02 2.171e+02 5.331e+02, threshold=3.714e+02, percent-clipped=2.0 +2024-08-26 14:34:11,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=42762.666666666664, ans=0.07 +2024-08-26 14:34:11,746 INFO [train.py:1114] (2/4) Epoch 4, batch 550, loss[loss=0.3036, simple_loss=0.3439, pruned_loss=0.09598, ctc_loss=0.1785, over 19266.00 frames. ], tot_loss[loss=0.2873, simple_loss=0.3264, pruned_loss=0.09027, ctc_loss=0.1691, over 3607040.34 frames. ], batch size: 71, lr: 3.24e-02, grad_scale: 8.0 +2024-08-26 14:34:20,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=42816.0, ans=0.015 +2024-08-26 14:34:22,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=42816.0, ans=0.125 +2024-08-26 14:34:26,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=42816.0, ans=0.0 +2024-08-26 14:34:34,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=42869.333333333336, ans=0.125 +2024-08-26 14:34:38,795 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.39 vs. limit=15.0 +2024-08-26 14:35:02,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=43029.333333333336, ans=0.00151536231884058 +2024-08-26 14:35:03,328 INFO [train.py:1114] (2/4) Epoch 4, batch 600, loss[loss=0.2917, simple_loss=0.3371, pruned_loss=0.08824, ctc_loss=0.1743, over 19372.00 frames. ], tot_loss[loss=0.2874, simple_loss=0.3266, pruned_loss=0.09026, ctc_loss=0.169, over 3664404.59 frames. ], batch size: 67, lr: 3.24e-02, grad_scale: 8.0 +2024-08-26 14:35:13,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=43082.666666666664, ans=0.125 +2024-08-26 14:35:24,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=43136.0, ans=0.125 +2024-08-26 14:35:31,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=43136.0, ans=0.125 +2024-08-26 14:35:41,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=43189.333333333336, ans=0.07 +2024-08-26 14:35:42,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=43189.333333333336, ans=0.125 +2024-08-26 14:35:50,399 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.428e+02 1.699e+02 1.953e+02 2.270e+02 5.390e+02, threshold=3.906e+02, percent-clipped=1.0 +2024-08-26 14:35:54,192 INFO [train.py:1114] (2/4) Epoch 4, batch 650, loss[loss=0.2682, simple_loss=0.3152, pruned_loss=0.08114, ctc_loss=0.1475, over 19765.00 frames. ], tot_loss[loss=0.2864, simple_loss=0.3257, pruned_loss=0.08992, ctc_loss=0.1681, over 3714588.22 frames. ], batch size: 54, lr: 3.23e-02, grad_scale: 8.0 +2024-08-26 14:36:07,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=43349.333333333336, ans=0.0 +2024-08-26 14:36:10,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=43349.333333333336, ans=0.0 +2024-08-26 14:36:12,169 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.25 vs. limit=6.0 +2024-08-26 14:36:37,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=43509.333333333336, ans=0.125 +2024-08-26 14:36:39,799 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.53 vs. limit=22.5 +2024-08-26 14:36:45,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=43509.333333333336, ans=0.1 +2024-08-26 14:36:47,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=43562.666666666664, ans=0.125 +2024-08-26 14:36:48,328 INFO [train.py:1114] (2/4) Epoch 4, batch 700, loss[loss=0.2722, simple_loss=0.3122, pruned_loss=0.08519, ctc_loss=0.1546, over 19721.00 frames. ], tot_loss[loss=0.2868, simple_loss=0.3264, pruned_loss=0.08996, ctc_loss=0.1682, over 3746358.54 frames. ], batch size: 51, lr: 3.22e-02, grad_scale: 8.0 +2024-08-26 14:37:16,007 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.50 vs. limit=22.5 +2024-08-26 14:37:17,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=43722.666666666664, ans=0.125 +2024-08-26 14:37:32,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=43776.0, ans=0.04949747468305833 +2024-08-26 14:37:36,039 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 1.717e+02 1.974e+02 2.287e+02 3.794e+02, threshold=3.948e+02, percent-clipped=0.0 +2024-08-26 14:37:36,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=43776.0, ans=0.00135304347826087 +2024-08-26 14:37:37,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=43776.0, ans=0.2 +2024-08-26 14:37:39,960 INFO [train.py:1114] (2/4) Epoch 4, batch 750, loss[loss=0.2522, simple_loss=0.3095, pruned_loss=0.07073, ctc_loss=0.1335, over 19518.00 frames. ], tot_loss[loss=0.2856, simple_loss=0.3256, pruned_loss=0.08945, ctc_loss=0.1669, over 3773031.37 frames. ], batch size: 54, lr: 3.22e-02, grad_scale: 8.0 +2024-08-26 14:37:53,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=43882.666666666664, ans=0.125 +2024-08-26 14:38:04,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=43936.0, ans=0.5 +2024-08-26 14:38:16,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=43989.333333333336, ans=0.05 +2024-08-26 14:38:24,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=44042.666666666664, ans=0.2 +2024-08-26 14:38:31,794 INFO [train.py:1114] (2/4) Epoch 4, batch 800, loss[loss=0.2249, simple_loss=0.278, pruned_loss=0.06311, ctc_loss=0.1139, over 19794.00 frames. ], tot_loss[loss=0.285, simple_loss=0.3251, pruned_loss=0.0892, ctc_loss=0.1663, over 3794584.01 frames. ], batch size: 49, lr: 3.21e-02, grad_scale: 16.0 +2024-08-26 14:38:42,079 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.04 vs. limit=12.0 +2024-08-26 14:38:48,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=44149.333333333336, ans=0.0012718840579710143 +2024-08-26 14:38:53,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=44202.666666666664, ans=0.0012602898550724637 +2024-08-26 14:38:56,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=44202.666666666664, ans=0.125 +2024-08-26 14:39:07,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=44256.0, ans=0.015 +2024-08-26 14:39:16,263 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.371e+02 1.706e+02 1.876e+02 2.197e+02 5.470e+02, threshold=3.751e+02, percent-clipped=2.0 +2024-08-26 14:39:19,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=44309.333333333336, ans=0.125 +2024-08-26 14:39:21,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=44309.333333333336, ans=0.125 +2024-08-26 14:39:22,913 INFO [train.py:1114] (2/4) Epoch 4, batch 850, loss[loss=0.2984, simple_loss=0.3384, pruned_loss=0.09253, ctc_loss=0.183, over 19663.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3248, pruned_loss=0.08905, ctc_loss=0.1659, over 3814262.28 frames. ], batch size: 59, lr: 3.21e-02, grad_scale: 16.0 +2024-08-26 14:39:36,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=44416.0, ans=0.0012139130434782597 +2024-08-26 14:39:46,527 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.10 vs. limit=22.5 +2024-08-26 14:39:50,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=44469.333333333336, ans=0.0 +2024-08-26 14:39:53,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=44522.666666666664, ans=0.0011907246376811603 +2024-08-26 14:40:01,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=44576.0, ans=0.0011791304347826097 +2024-08-26 14:40:03,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=44576.0, ans=0.125 +2024-08-26 14:40:11,454 INFO [train.py:1114] (2/4) Epoch 4, batch 900, loss[loss=0.2829, simple_loss=0.3121, pruned_loss=0.0929, ctc_loss=0.1699, over 19806.00 frames. ], tot_loss[loss=0.2856, simple_loss=0.3254, pruned_loss=0.08956, ctc_loss=0.167, over 3817793.98 frames. ], batch size: 49, lr: 3.20e-02, grad_scale: 16.0 +2024-08-26 14:40:15,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=44629.333333333336, ans=0.0011675362318840574 +2024-08-26 14:40:27,052 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:40:27,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=44682.666666666664, ans=0.025 +2024-08-26 14:40:28,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=44682.666666666664, ans=0.025 +2024-08-26 14:40:28,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=44682.666666666664, ans=0.125 +2024-08-26 14:40:28,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=44682.666666666664, ans=0.125 +2024-08-26 14:40:37,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=44736.0, ans=0.1 +2024-08-26 14:40:59,419 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.434e+02 1.686e+02 1.871e+02 2.157e+02 4.639e+02, threshold=3.742e+02, percent-clipped=1.0 +2024-08-26 14:41:03,414 INFO [train.py:1114] (2/4) Epoch 4, batch 950, loss[loss=0.2617, simple_loss=0.3051, pruned_loss=0.0801, ctc_loss=0.1453, over 19494.00 frames. ], tot_loss[loss=0.2859, simple_loss=0.3256, pruned_loss=0.08963, ctc_loss=0.1675, over 3819918.60 frames. ], batch size: 49, lr: 3.20e-02, grad_scale: 16.0 +2024-08-26 14:41:03,977 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.55 vs. limit=15.0 +2024-08-26 14:41:07,947 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.58 vs. limit=15.0 +2024-08-26 14:41:54,729 INFO [train.py:1114] (2/4) Epoch 4, batch 1000, loss[loss=0.2526, simple_loss=0.3039, pruned_loss=0.07229, ctc_loss=0.1419, over 19849.00 frames. ], tot_loss[loss=0.2865, simple_loss=0.3262, pruned_loss=0.08982, ctc_loss=0.1677, over 3814838.08 frames. ], batch size: 52, lr: 3.19e-02, grad_scale: 16.0 +2024-08-26 14:42:14,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=45216.0, ans=0.0 +2024-08-26 14:42:16,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.22 vs. limit=22.5 +2024-08-26 14:42:16,824 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.00 vs. limit=15.0 +2024-08-26 14:42:42,492 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.702e+02 1.844e+02 2.187e+02 3.225e+02, threshold=3.689e+02, percent-clipped=0.0 +2024-08-26 14:42:46,505 INFO [train.py:1114] (2/4) Epoch 4, batch 1050, loss[loss=0.2782, simple_loss=0.3321, pruned_loss=0.08121, ctc_loss=0.1548, over 19836.00 frames. ], tot_loss[loss=0.285, simple_loss=0.325, pruned_loss=0.08913, ctc_loss=0.1666, over 3822005.55 frames. ], batch size: 57, lr: 3.19e-02, grad_scale: 16.0 +2024-08-26 14:42:55,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=45482.666666666664, ans=0.125 +2024-08-26 14:42:57,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=45482.666666666664, ans=0.025 +2024-08-26 14:43:10,071 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.66 vs. limit=10.0 +2024-08-26 14:43:10,915 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.93 vs. limit=15.0 +2024-08-26 14:43:18,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45589.333333333336, ans=0.1 +2024-08-26 14:43:38,133 INFO [train.py:1114] (2/4) Epoch 4, batch 1100, loss[loss=0.2661, simple_loss=0.3024, pruned_loss=0.08305, ctc_loss=0.1595, over 19606.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3242, pruned_loss=0.08854, ctc_loss=0.1656, over 3829079.95 frames. ], batch size: 52, lr: 3.18e-02, grad_scale: 16.0 +2024-08-26 14:43:44,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=45696.0, ans=0.1 +2024-08-26 14:43:46,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=45696.0, ans=0.2 +2024-08-26 14:43:53,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=45749.333333333336, ans=0.125 +2024-08-26 14:43:56,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=45802.666666666664, ans=0.0 +2024-08-26 14:44:12,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=45856.0, ans=0.1 +2024-08-26 14:44:25,686 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 1.748e+02 1.997e+02 2.350e+02 6.199e+02, threshold=3.995e+02, percent-clipped=5.0 +2024-08-26 14:44:29,537 INFO [train.py:1114] (2/4) Epoch 4, batch 1150, loss[loss=0.2546, simple_loss=0.3053, pruned_loss=0.07517, ctc_loss=0.1339, over 19592.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3238, pruned_loss=0.08832, ctc_loss=0.1653, over 3830223.75 frames. ], batch size: 52, lr: 3.18e-02, grad_scale: 16.0 +2024-08-26 14:44:33,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=45962.666666666664, ans=0.125 +2024-08-26 14:44:33,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45962.666666666664, ans=0.1 +2024-08-26 14:45:33,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=46016.0, ans=0.5 +2024-08-26 14:46:30,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=46176.0, ans=0.0 +2024-08-26 14:46:33,611 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.97 vs. limit=10.0 +2024-08-26 14:46:38,936 INFO [train.py:1114] (2/4) Epoch 4, batch 1200, loss[loss=0.2851, simple_loss=0.3284, pruned_loss=0.08826, ctc_loss=0.1634, over 19845.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.325, pruned_loss=0.08892, ctc_loss=0.1663, over 3825950.41 frames. ], batch size: 57, lr: 3.17e-02, grad_scale: 32.0 +2024-08-26 14:46:42,487 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.79 vs. limit=15.0 +2024-08-26 14:46:49,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=46282.666666666664, ans=0.0 +2024-08-26 14:46:53,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=46282.666666666664, ans=0.1 +2024-08-26 14:46:55,318 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:46:59,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=46336.0, ans=0.125 +2024-08-26 14:47:13,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=46389.333333333336, ans=0.125 +2024-08-26 14:47:23,211 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.478e+02 1.767e+02 1.944e+02 2.283e+02 5.479e+02, threshold=3.889e+02, percent-clipped=1.0 +2024-08-26 14:47:29,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=46496.0, ans=0.05 +2024-08-26 14:47:29,955 INFO [train.py:1114] (2/4) Epoch 4, batch 1250, loss[loss=0.293, simple_loss=0.3365, pruned_loss=0.09035, ctc_loss=0.1722, over 19543.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.3258, pruned_loss=0.08916, ctc_loss=0.1666, over 3843771.03 frames. ], batch size: 61, lr: 3.17e-02, grad_scale: 32.0 +2024-08-26 14:47:42,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=46549.333333333336, ans=0.125 +2024-08-26 14:47:43,425 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.91 vs. limit=15.0 +2024-08-26 14:47:46,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46549.333333333336, ans=0.1 +2024-08-26 14:47:54,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=46602.666666666664, ans=0.0007385507246376825 +2024-08-26 14:48:19,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=46709.333333333336, ans=0.125 +2024-08-26 14:48:20,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=46709.333333333336, ans=0.125 +2024-08-26 14:48:20,811 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=14.19 vs. limit=15.0 +2024-08-26 14:48:22,039 INFO [train.py:1114] (2/4) Epoch 4, batch 1300, loss[loss=0.318, simple_loss=0.3423, pruned_loss=0.1068, ctc_loss=0.2, over 18907.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3242, pruned_loss=0.08838, ctc_loss=0.1651, over 3847068.18 frames. ], batch size: 76, lr: 3.16e-02, grad_scale: 32.0 +2024-08-26 14:48:49,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=46922.666666666664, ans=0.125 +2024-08-26 14:49:06,441 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.331e+02 1.633e+02 1.793e+02 2.136e+02 4.035e+02, threshold=3.586e+02, percent-clipped=1.0 +2024-08-26 14:49:08,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=46976.0, ans=0.125 +2024-08-26 14:49:10,194 INFO [train.py:1114] (2/4) Epoch 4, batch 1350, loss[loss=0.2799, simple_loss=0.3279, pruned_loss=0.08458, ctc_loss=0.1566, over 19758.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3231, pruned_loss=0.08746, ctc_loss=0.1635, over 3858266.94 frames. ], batch size: 54, lr: 3.16e-02, grad_scale: 32.0 +2024-08-26 14:49:10,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=47029.333333333336, ans=0.1 +2024-08-26 14:49:19,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=47029.333333333336, ans=0.05 +2024-08-26 14:49:23,347 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.91 vs. limit=22.5 +2024-08-26 14:49:23,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=47082.666666666664, ans=0.2 +2024-08-26 14:49:36,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=47136.0, ans=0.0 +2024-08-26 14:49:58,171 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.75 vs. limit=10.0 +2024-08-26 14:50:01,599 INFO [train.py:1114] (2/4) Epoch 4, batch 1400, loss[loss=0.229, simple_loss=0.2765, pruned_loss=0.06533, ctc_loss=0.1273, over 19705.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3223, pruned_loss=0.08708, ctc_loss=0.1629, over 3865064.39 frames. ], batch size: 46, lr: 3.15e-02, grad_scale: 32.0 +2024-08-26 14:50:05,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=47296.0, ans=0.0 +2024-08-26 14:50:43,665 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.07 vs. limit=15.0 +2024-08-26 14:50:45,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=47509.333333333336, ans=0.125 +2024-08-26 14:50:49,035 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.452e+02 1.701e+02 1.930e+02 2.137e+02 5.469e+02, threshold=3.859e+02, percent-clipped=2.0 +2024-08-26 14:50:53,072 INFO [train.py:1114] (2/4) Epoch 4, batch 1450, loss[loss=0.2934, simple_loss=0.3366, pruned_loss=0.09111, ctc_loss=0.1699, over 19676.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.323, pruned_loss=0.08758, ctc_loss=0.1638, over 3863127.45 frames. ], batch size: 63, lr: 3.15e-02, grad_scale: 32.0 +2024-08-26 14:51:11,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=47616.0, ans=0.125 +2024-08-26 14:51:35,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=47776.0, ans=0.00048347826086956626 +2024-08-26 14:51:43,458 INFO [train.py:1114] (2/4) Epoch 4, batch 1500, loss[loss=0.3127, simple_loss=0.3498, pruned_loss=0.1025, ctc_loss=0.1766, over 19574.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.3239, pruned_loss=0.08792, ctc_loss=0.1643, over 3862632.16 frames. ], batch size: 57, lr: 3.14e-02, grad_scale: 32.0 +2024-08-26 14:52:16,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=47936.0, ans=0.0 +2024-08-26 14:52:19,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=47989.333333333336, ans=0.0 +2024-08-26 14:52:23,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=47989.333333333336, ans=0.125 +2024-08-26 14:52:34,699 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.344e+02 1.743e+02 1.956e+02 2.243e+02 3.928e+02, threshold=3.912e+02, percent-clipped=1.0 +2024-08-26 14:52:38,440 INFO [train.py:1114] (2/4) Epoch 4, batch 1550, loss[loss=0.3179, simple_loss=0.358, pruned_loss=0.1016, ctc_loss=0.1864, over 19574.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3239, pruned_loss=0.08803, ctc_loss=0.1645, over 3847553.78 frames. ], batch size: 60, lr: 3.14e-02, grad_scale: 32.0 +2024-08-26 14:52:46,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=48096.0, ans=0.00041391304347826105 +2024-08-26 14:52:49,157 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:53:28,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=48309.333333333336, ans=0.09899494936611666 +2024-08-26 14:53:28,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=48309.333333333336, ans=0.125 +2024-08-26 14:53:29,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=48362.666666666664, ans=0.125 +2024-08-26 14:53:29,869 INFO [train.py:1114] (2/4) Epoch 4, batch 1600, loss[loss=0.283, simple_loss=0.3355, pruned_loss=0.08445, ctc_loss=0.154, over 19829.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.324, pruned_loss=0.08839, ctc_loss=0.165, over 3837485.55 frames. ], batch size: 57, lr: 3.13e-02, grad_scale: 32.0 +2024-08-26 14:53:44,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=48416.0, ans=0.2 +2024-08-26 14:53:46,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=48416.0, ans=0.1 +2024-08-26 14:53:55,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=48469.333333333336, ans=0.125 +2024-08-26 14:54:07,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=48522.666666666664, ans=0.125 +2024-08-26 14:54:14,473 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.15 vs. limit=15.0 +2024-08-26 14:54:18,013 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.502e+02 1.701e+02 1.882e+02 2.341e+02 4.982e+02, threshold=3.764e+02, percent-clipped=3.0 +2024-08-26 14:54:19,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=48576.0, ans=0.125 +2024-08-26 14:54:21,792 INFO [train.py:1114] (2/4) Epoch 4, batch 1650, loss[loss=0.3017, simple_loss=0.3417, pruned_loss=0.09456, ctc_loss=0.1814, over 19647.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3242, pruned_loss=0.08874, ctc_loss=0.1655, over 3833438.53 frames. ], batch size: 59, lr: 3.13e-02, grad_scale: 32.0 +2024-08-26 14:55:17,034 INFO [train.py:1114] (2/4) Epoch 4, batch 1700, loss[loss=0.2531, simple_loss=0.2898, pruned_loss=0.07885, ctc_loss=0.1466, over 19645.00 frames. ], tot_loss[loss=0.2831, simple_loss=0.3238, pruned_loss=0.08827, ctc_loss=0.1645, over 3847276.12 frames. ], batch size: 46, lr: 3.12e-02, grad_scale: 32.0 +2024-08-26 14:55:27,819 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.10 vs. limit=15.0 +2024-08-26 14:55:40,547 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.62 vs. limit=15.0 +2024-08-26 14:55:41,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=49002.666666666664, ans=0.125 +2024-08-26 14:55:42,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=49002.666666666664, ans=0.0 +2024-08-26 14:55:45,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=49056.0, ans=0.125 +2024-08-26 14:55:48,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=49056.0, ans=0.00020521739130434716 +2024-08-26 14:55:51,760 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.84 vs. limit=12.0 +2024-08-26 14:55:56,434 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.66 vs. limit=15.0 +2024-08-26 14:55:59,532 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.433e+02 1.770e+02 1.975e+02 2.193e+02 4.882e+02, threshold=3.950e+02, percent-clipped=1.0 +2024-08-26 14:56:00,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=49109.333333333336, ans=15.0 +2024-08-26 14:56:03,229 INFO [train.py:1114] (2/4) Epoch 4, batch 1750, loss[loss=0.2606, simple_loss=0.2927, pruned_loss=0.08386, ctc_loss=0.1519, over 19645.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3225, pruned_loss=0.0877, ctc_loss=0.1636, over 3852181.27 frames. ], batch size: 45, lr: 3.11e-02, grad_scale: 32.0 +2024-08-26 14:56:06,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=49162.666666666664, ans=0.0 +2024-08-26 14:56:23,245 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.59 vs. limit=15.0 +2024-08-26 14:56:24,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=49269.333333333336, ans=0.2 +2024-08-26 14:56:29,180 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:56:29,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=49322.666666666664, ans=0.125 +2024-08-26 14:56:39,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=49376.0, ans=0.125 +2024-08-26 14:56:48,547 INFO [train.py:1114] (2/4) Epoch 4, batch 1800, loss[loss=0.285, simple_loss=0.3317, pruned_loss=0.08678, ctc_loss=0.1618, over 19611.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3224, pruned_loss=0.08742, ctc_loss=0.1631, over 3852946.72 frames. ], batch size: 55, lr: 3.11e-02, grad_scale: 32.0 +2024-08-26 14:56:55,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=49429.333333333336, ans=0.035 +2024-08-26 14:57:01,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49482.666666666664, ans=0.1 +2024-08-26 14:57:02,645 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.21 vs. limit=15.0 +2024-08-26 14:57:30,228 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.411e+02 1.664e+02 1.898e+02 2.172e+02 3.982e+02, threshold=3.795e+02, percent-clipped=1.0 +2024-08-26 14:57:33,986 INFO [train.py:1114] (2/4) Epoch 4, batch 1850, loss[loss=0.2927, simple_loss=0.3308, pruned_loss=0.09245, ctc_loss=0.174, over 19561.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.322, pruned_loss=0.08735, ctc_loss=0.1629, over 3856583.60 frames. ], batch size: 57, lr: 3.10e-02, grad_scale: 32.0 +2024-08-26 14:57:39,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=49696.0, ans=0.125 +2024-08-26 14:57:54,627 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.74 vs. limit=22.5 +2024-08-26 14:58:00,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=49802.666666666664, ans=0.2 +2024-08-26 14:58:16,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=49909.333333333336, ans=0.0 +2024-08-26 14:58:21,310 INFO [train.py:1114] (2/4) Epoch 4, batch 1900, loss[loss=0.2969, simple_loss=0.3445, pruned_loss=0.09021, ctc_loss=0.1724, over 19638.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3225, pruned_loss=0.08737, ctc_loss=0.1628, over 3861482.76 frames. ], batch size: 59, lr: 3.10e-02, grad_scale: 16.0 +2024-08-26 14:58:25,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=49962.666666666664, ans=0.125 +2024-08-26 14:58:25,988 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:58:26,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=49962.666666666664, ans=0.125 +2024-08-26 14:58:48,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=50122.666666666664, ans=0.125 +2024-08-26 14:58:59,202 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.77 vs. limit=10.0 +2024-08-26 14:59:02,113 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.09 vs. limit=10.0 +2024-08-26 14:59:03,258 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.687e+02 1.820e+02 2.228e+02 3.741e+02, threshold=3.639e+02, percent-clipped=0.0 +2024-08-26 14:59:06,128 INFO [train.py:1114] (2/4) Epoch 4, batch 1950, loss[loss=0.2723, simple_loss=0.3201, pruned_loss=0.08182, ctc_loss=0.152, over 19573.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.3237, pruned_loss=0.08752, ctc_loss=0.1628, over 3870492.34 frames. ], batch size: 52, lr: 3.09e-02, grad_scale: 16.0 +2024-08-26 14:59:10,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=50229.333333333336, ans=0.125 +2024-08-26 14:59:22,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=50282.666666666664, ans=0.125 +2024-08-26 14:59:34,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=50389.333333333336, ans=0.125 +2024-08-26 14:59:53,446 INFO [train.py:1114] (2/4) Epoch 4, batch 2000, loss[loss=0.2313, simple_loss=0.2772, pruned_loss=0.06785, ctc_loss=0.1243, over 19668.00 frames. ], tot_loss[loss=0.2828, simple_loss=0.3241, pruned_loss=0.088, ctc_loss=0.1636, over 3853499.94 frames. ], batch size: 45, lr: 3.09e-02, grad_scale: 32.0 +2024-08-26 14:59:57,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=50496.0, ans=0.125 +2024-08-26 15:00:15,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=50602.666666666664, ans=0.125 +2024-08-26 15:00:18,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=50602.666666666664, ans=0.125 +2024-08-26 15:00:24,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=50656.0, ans=0.125 +2024-08-26 15:00:35,434 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.418e+02 1.722e+02 2.023e+02 2.377e+02 8.657e+02, threshold=4.047e+02, percent-clipped=4.0 +2024-08-26 15:00:37,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=50762.666666666664, ans=0.125 +2024-08-26 15:00:38,085 INFO [train.py:1114] (2/4) Epoch 4, batch 2050, loss[loss=0.2374, simple_loss=0.2826, pruned_loss=0.06884, ctc_loss=0.1362, over 19717.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.3233, pruned_loss=0.08758, ctc_loss=0.1632, over 3850611.27 frames. ], batch size: 47, lr: 3.08e-02, grad_scale: 32.0 +2024-08-26 15:00:39,350 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.16 vs. limit=15.0 +2024-08-26 15:00:44,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=50762.666666666664, ans=0.0 +2024-08-26 15:00:46,642 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.89 vs. limit=15.0 +2024-08-26 15:00:52,763 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.72 vs. limit=6.0 +2024-08-26 15:01:00,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=50869.333333333336, ans=0.125 +2024-08-26 15:01:22,456 INFO [train.py:1114] (2/4) Epoch 4, batch 2100, loss[loss=0.2499, simple_loss=0.3066, pruned_loss=0.07062, ctc_loss=0.1296, over 19763.00 frames. ], tot_loss[loss=0.2801, simple_loss=0.3222, pruned_loss=0.08666, ctc_loss=0.1617, over 3858330.99 frames. ], batch size: 54, lr: 3.08e-02, grad_scale: 32.0 +2024-08-26 15:01:27,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=51029.333333333336, ans=0.025 +2024-08-26 15:01:29,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=51029.333333333336, ans=0.025 +2024-08-26 15:01:42,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=51136.0, ans=0.125 +2024-08-26 15:01:49,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=51189.333333333336, ans=0.125 +2024-08-26 15:01:51,389 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.59 vs. limit=15.0 +2024-08-26 15:02:00,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=51242.666666666664, ans=0.95 +2024-08-26 15:02:04,155 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.348e+02 1.626e+02 1.780e+02 1.895e+02 2.709e+02, threshold=3.561e+02, percent-clipped=0.0 +2024-08-26 15:02:07,169 INFO [train.py:1114] (2/4) Epoch 4, batch 2150, loss[loss=0.2625, simple_loss=0.3126, pruned_loss=0.07595, ctc_loss=0.1512, over 19858.00 frames. ], tot_loss[loss=0.2787, simple_loss=0.3211, pruned_loss=0.08602, ctc_loss=0.1605, over 3869328.31 frames. ], batch size: 52, lr: 3.07e-02, grad_scale: 32.0 +2024-08-26 15:02:08,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=51296.0, ans=0.0 +2024-08-26 15:02:13,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=51296.0, ans=0.0 +2024-08-26 15:02:15,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=51349.333333333336, ans=0.2 +2024-08-26 15:02:18,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=51349.333333333336, ans=0.0 +2024-08-26 15:02:22,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=51349.333333333336, ans=0.0 +2024-08-26 15:02:42,534 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.73 vs. limit=6.0 +2024-08-26 15:02:45,137 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.77 vs. limit=15.0 +2024-08-26 15:02:45,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=51509.333333333336, ans=0.125 +2024-08-26 15:02:54,329 INFO [train.py:1114] (2/4) Epoch 4, batch 2200, loss[loss=0.2637, simple_loss=0.3129, pruned_loss=0.07824, ctc_loss=0.1452, over 19574.00 frames. ], tot_loss[loss=0.279, simple_loss=0.3213, pruned_loss=0.08624, ctc_loss=0.1606, over 3868226.85 frames. ], batch size: 57, lr: 3.07e-02, grad_scale: 32.0 +2024-08-26 15:03:05,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=51616.0, ans=0.125 +2024-08-26 15:03:08,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=51616.0, ans=0.125 +2024-08-26 15:03:08,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=51616.0, ans=0.07 +2024-08-26 15:03:28,313 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.61 vs. limit=15.0 +2024-08-26 15:03:30,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=51776.0, ans=0.0 +2024-08-26 15:03:36,544 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.424e+02 1.687e+02 1.993e+02 2.251e+02 9.209e+02, threshold=3.987e+02, percent-clipped=2.0 +2024-08-26 15:03:39,213 INFO [train.py:1114] (2/4) Epoch 4, batch 2250, loss[loss=0.2934, simple_loss=0.3335, pruned_loss=0.09131, ctc_loss=0.1768, over 19617.00 frames. ], tot_loss[loss=0.2793, simple_loss=0.3216, pruned_loss=0.08638, ctc_loss=0.1609, over 3867770.48 frames. ], batch size: 55, lr: 3.06e-02, grad_scale: 32.0 +2024-08-26 15:03:41,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=51829.333333333336, ans=0.2 +2024-08-26 15:03:56,821 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:03:59,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1.whitening_limit, batch_count=51936.0, ans=10.0 +2024-08-26 15:04:03,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=51936.0, ans=0.125 +2024-08-26 15:04:23,341 INFO [train.py:1114] (2/4) Epoch 4, batch 2300, loss[loss=0.2752, simple_loss=0.3185, pruned_loss=0.08417, ctc_loss=0.1587, over 19514.00 frames. ], tot_loss[loss=0.2788, simple_loss=0.3206, pruned_loss=0.08627, ctc_loss=0.1609, over 3861902.31 frames. ], batch size: 49, lr: 3.06e-02, grad_scale: 32.0 +2024-08-26 15:04:31,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=52096.0, ans=15.0 +2024-08-26 15:04:31,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=52149.333333333336, ans=0.125 +2024-08-26 15:04:42,243 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-08-26 15:04:43,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52202.666666666664, ans=0.1 +2024-08-26 15:04:46,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=52202.666666666664, ans=0.125 +2024-08-26 15:04:46,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=52202.666666666664, ans=0.0 +2024-08-26 15:04:55,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=52256.0, ans=0.0 +2024-08-26 15:04:57,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52256.0, ans=0.1 +2024-08-26 15:05:06,727 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.800e+02 1.978e+02 2.376e+02 5.904e+02, threshold=3.955e+02, percent-clipped=2.0 +2024-08-26 15:05:09,373 INFO [train.py:1114] (2/4) Epoch 4, batch 2350, loss[loss=0.321, simple_loss=0.3537, pruned_loss=0.1063, ctc_loss=0.1893, over 19663.00 frames. ], tot_loss[loss=0.2789, simple_loss=0.3205, pruned_loss=0.08641, ctc_loss=0.1613, over 3863950.36 frames. ], batch size: 63, lr: 3.05e-02, grad_scale: 32.0 +2024-08-26 15:05:10,836 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.92 vs. limit=15.0 +2024-08-26 15:05:16,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=52362.666666666664, ans=0.05 +2024-08-26 15:05:30,836 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.66 vs. limit=10.0 +2024-08-26 15:05:33,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=52469.333333333336, ans=0.125 +2024-08-26 15:05:46,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=52576.0, ans=0.0 +2024-08-26 15:05:47,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=52576.0, ans=0.1 +2024-08-26 15:06:03,791 INFO [train.py:1114] (2/4) Epoch 4, batch 2400, loss[loss=0.2952, simple_loss=0.3311, pruned_loss=0.09342, ctc_loss=0.1809, over 19289.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.323, pruned_loss=0.0875, ctc_loss=0.1631, over 3858454.32 frames. ], batch size: 71, lr: 3.05e-02, grad_scale: 32.0 +2024-08-26 15:06:10,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=52629.333333333336, ans=0.125 +2024-08-26 15:06:24,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=52682.666666666664, ans=0.0 +2024-08-26 15:06:30,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=52736.0, ans=0.125 +2024-08-26 15:06:53,223 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.448e+02 1.824e+02 2.127e+02 2.398e+02 5.215e+02, threshold=4.254e+02, percent-clipped=1.0 +2024-08-26 15:06:55,098 INFO [train.py:1114] (2/4) Epoch 4, batch 2450, loss[loss=0.3269, simple_loss=0.3353, pruned_loss=0.1154, ctc_loss=0.2192, over 13795.00 frames. ], tot_loss[loss=0.2897, simple_loss=0.3278, pruned_loss=0.09163, ctc_loss=0.1709, over 3732216.21 frames. ], batch size: 140, lr: 3.05e-02, grad_scale: 16.0 +2024-08-26 15:06:56,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=52896.0, ans=0.125 +2024-08-26 15:07:00,251 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.54 vs. limit=15.0 +2024-08-26 15:07:09,082 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.92 vs. limit=15.0 +2024-08-26 15:07:13,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=53002.666666666664, ans=0.125 +2024-08-26 15:07:15,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=53002.666666666664, ans=0.2 +2024-08-26 15:07:17,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=53002.666666666664, ans=0.0 +2024-08-26 15:07:22,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53056.0, ans=0.1 +2024-08-26 15:07:25,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53056.0, ans=0.1 +2024-08-26 15:09:12,283 INFO [train.py:1114] (2/4) Epoch 5, batch 0, loss[loss=0.2531, simple_loss=0.3026, pruned_loss=0.07468, ctc_loss=0.1354, over 19811.00 frames. ], tot_loss[loss=0.2531, simple_loss=0.3026, pruned_loss=0.07468, ctc_loss=0.1354, over 19811.00 frames. ], batch size: 49, lr: 2.83e-02, grad_scale: 32.0 +2024-08-26 15:09:12,283 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-26 15:09:22,088 INFO [train.py:1146] (2/4) Epoch 5, validation: loss=0.2289, simple_loss=0.3118, pruned_loss=0.05352, ctc_loss=0.09739, over 944034.00 frames. +2024-08-26 15:09:22,763 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12825MB +2024-08-26 15:09:30,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=53104.0, ans=0.125 +2024-08-26 15:09:36,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=53157.333333333336, ans=0.0 +2024-08-26 15:09:41,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53210.666666666664, ans=0.1 +2024-08-26 15:09:49,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=53210.666666666664, ans=0.04949747468305833 +2024-08-26 15:09:59,786 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.59 vs. limit=6.0 +2024-08-26 15:10:10,883 INFO [train.py:1114] (2/4) Epoch 5, batch 50, loss[loss=0.2379, simple_loss=0.2883, pruned_loss=0.0674, ctc_loss=0.1316, over 19692.00 frames. ], tot_loss[loss=0.2798, simple_loss=0.3224, pruned_loss=0.08604, ctc_loss=0.1627, over 843425.80 frames. ], batch size: 47, lr: 2.83e-02, grad_scale: 32.0 +2024-08-26 15:10:13,091 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.45 vs. limit=15.0 +2024-08-26 15:10:22,335 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.804e+02 2.028e+02 2.297e+02 4.038e+02, threshold=4.056e+02, percent-clipped=0.0 +2024-08-26 15:10:47,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=53530.666666666664, ans=0.125 +2024-08-26 15:10:52,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=53584.0, ans=0.125 +2024-08-26 15:11:01,252 INFO [train.py:1114] (2/4) Epoch 5, batch 100, loss[loss=0.2679, simple_loss=0.3075, pruned_loss=0.08265, ctc_loss=0.1575, over 19718.00 frames. ], tot_loss[loss=0.2828, simple_loss=0.3255, pruned_loss=0.08732, ctc_loss=0.1638, over 1498153.10 frames. ], batch size: 51, lr: 2.82e-02, grad_scale: 32.0 +2024-08-26 15:11:08,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=53637.333333333336, ans=0.0 +2024-08-26 15:11:15,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=53690.666666666664, ans=0.125 +2024-08-26 15:11:17,050 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.34 vs. limit=22.5 +2024-08-26 15:11:17,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=53690.666666666664, ans=0.125 +2024-08-26 15:11:28,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=53744.0, ans=0.0 +2024-08-26 15:11:59,576 INFO [train.py:1114] (2/4) Epoch 5, batch 150, loss[loss=0.2652, simple_loss=0.3004, pruned_loss=0.08393, ctc_loss=0.1556, over 19685.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.3214, pruned_loss=0.08516, ctc_loss=0.1598, over 2028120.81 frames. ], batch size: 47, lr: 2.82e-02, grad_scale: 32.0 +2024-08-26 15:12:10,018 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.442e+02 1.696e+02 1.862e+02 2.172e+02 3.492e+02, threshold=3.724e+02, percent-clipped=0.0 +2024-08-26 15:12:10,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53957.333333333336, ans=0.1 +2024-08-26 15:12:13,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=53957.333333333336, ans=0.2 +2024-08-26 15:12:18,050 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.86 vs. limit=15.0 +2024-08-26 15:12:24,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=54010.666666666664, ans=0.0 +2024-08-26 15:12:27,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=54010.666666666664, ans=0.025 +2024-08-26 15:12:33,970 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.67 vs. limit=22.5 +2024-08-26 15:12:43,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54117.333333333336, ans=0.1 +2024-08-26 15:12:48,446 INFO [train.py:1114] (2/4) Epoch 5, batch 200, loss[loss=0.2998, simple_loss=0.3371, pruned_loss=0.09541, ctc_loss=0.1795, over 18092.00 frames. ], tot_loss[loss=0.274, simple_loss=0.3183, pruned_loss=0.08353, ctc_loss=0.1565, over 2435948.10 frames. ], batch size: 85, lr: 2.81e-02, grad_scale: 32.0 +2024-08-26 15:12:50,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=54170.666666666664, ans=0.125 +2024-08-26 15:12:52,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff3.min_abs, batch_count=54170.666666666664, ans=0.2 +2024-08-26 15:12:56,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=54170.666666666664, ans=0.025 +2024-08-26 15:12:57,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=54224.0, ans=0.125 +2024-08-26 15:13:12,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=54277.333333333336, ans=0.125 +2024-08-26 15:13:39,766 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.52 vs. limit=22.5 +2024-08-26 15:13:41,974 INFO [train.py:1114] (2/4) Epoch 5, batch 250, loss[loss=0.2819, simple_loss=0.3329, pruned_loss=0.08426, ctc_loss=0.1559, over 19349.00 frames. ], tot_loss[loss=0.2727, simple_loss=0.3177, pruned_loss=0.0828, ctc_loss=0.1552, over 2756553.01 frames. ], batch size: 67, lr: 2.81e-02, grad_scale: 32.0 +2024-08-26 15:13:50,499 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.357e+02 1.685e+02 1.803e+02 2.078e+02 3.456e+02, threshold=3.607e+02, percent-clipped=0.0 +2024-08-26 15:14:01,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=54544.0, ans=0.0 +2024-08-26 15:14:14,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=54597.333333333336, ans=0.125 +2024-08-26 15:14:28,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=54650.666666666664, ans=0.1 +2024-08-26 15:14:32,309 INFO [train.py:1114] (2/4) Epoch 5, batch 300, loss[loss=0.3053, simple_loss=0.3344, pruned_loss=0.1015, ctc_loss=0.1833, over 19510.00 frames. ], tot_loss[loss=0.2709, simple_loss=0.3163, pruned_loss=0.08202, ctc_loss=0.1537, over 3000980.14 frames. ], batch size: 61, lr: 2.81e-02, grad_scale: 32.0 +2024-08-26 15:15:13,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=54917.333333333336, ans=0.025 +2024-08-26 15:15:19,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=54917.333333333336, ans=0.0 +2024-08-26 15:15:22,166 INFO [train.py:1114] (2/4) Epoch 5, batch 350, loss[loss=0.2323, simple_loss=0.2911, pruned_loss=0.06314, ctc_loss=0.1183, over 19759.00 frames. ], tot_loss[loss=0.2718, simple_loss=0.3172, pruned_loss=0.08243, ctc_loss=0.154, over 3190823.52 frames. ], batch size: 48, lr: 2.80e-02, grad_scale: 32.0 +2024-08-26 15:15:24,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=54970.666666666664, ans=0.2 +2024-08-26 15:15:28,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=54970.666666666664, ans=0.1 +2024-08-26 15:15:30,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=55024.0, ans=0.0 +2024-08-26 15:15:31,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=55024.0, ans=0.125 +2024-08-26 15:15:31,768 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.410e+02 1.717e+02 1.933e+02 2.233e+02 3.797e+02, threshold=3.865e+02, percent-clipped=1.0 +2024-08-26 15:15:36,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=55024.0, ans=0.025 +2024-08-26 15:15:44,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=55077.333333333336, ans=0.07 +2024-08-26 15:16:15,652 INFO [train.py:1114] (2/4) Epoch 5, batch 400, loss[loss=0.2609, simple_loss=0.3212, pruned_loss=0.07224, ctc_loss=0.1406, over 19486.00 frames. ], tot_loss[loss=0.2718, simple_loss=0.3172, pruned_loss=0.08242, ctc_loss=0.1539, over 3342865.79 frames. ], batch size: 54, lr: 2.80e-02, grad_scale: 32.0 +2024-08-26 15:16:20,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=55237.333333333336, ans=0.0 +2024-08-26 15:16:21,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=55237.333333333336, ans=0.0 +2024-08-26 15:16:41,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=55344.0, ans=0.125 +2024-08-26 15:16:47,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55397.333333333336, ans=0.1 +2024-08-26 15:17:01,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=55450.666666666664, ans=0.125 +2024-08-26 15:17:07,108 INFO [train.py:1114] (2/4) Epoch 5, batch 450, loss[loss=0.27, simple_loss=0.3265, pruned_loss=0.07775, ctc_loss=0.1449, over 19619.00 frames. ], tot_loss[loss=0.2715, simple_loss=0.3171, pruned_loss=0.08229, ctc_loss=0.1536, over 3452048.21 frames. ], batch size: 55, lr: 2.79e-02, grad_scale: 16.0 +2024-08-26 15:17:07,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=55504.0, ans=0.0 +2024-08-26 15:17:17,440 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.642e+02 1.899e+02 2.179e+02 3.523e+02, threshold=3.798e+02, percent-clipped=0.0 +2024-08-26 15:17:17,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=55557.333333333336, ans=0.0 +2024-08-26 15:17:23,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55557.333333333336, ans=0.1 +2024-08-26 15:17:27,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=55610.666666666664, ans=0.125 +2024-08-26 15:17:45,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=55664.0, ans=0.125 +2024-08-26 15:17:46,738 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.40 vs. limit=15.0 +2024-08-26 15:18:04,475 INFO [train.py:1114] (2/4) Epoch 5, batch 500, loss[loss=0.2799, simple_loss=0.3299, pruned_loss=0.0849, ctc_loss=0.1504, over 19677.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3157, pruned_loss=0.0814, ctc_loss=0.152, over 3548439.66 frames. ], batch size: 63, lr: 2.79e-02, grad_scale: 16.0 +2024-08-26 15:18:10,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=55770.666666666664, ans=0.0 +2024-08-26 15:18:31,455 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.84 vs. limit=15.0 +2024-08-26 15:18:37,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=55877.333333333336, ans=0.125 +2024-08-26 15:18:42,584 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.75 vs. limit=5.0 +2024-08-26 15:18:52,456 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.15 vs. limit=15.0 +2024-08-26 15:18:55,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=55930.666666666664, ans=0.0 +2024-08-26 15:18:57,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=55930.666666666664, ans=0.125 +2024-08-26 15:19:22,163 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.81 vs. limit=15.0 +2024-08-26 15:19:46,992 INFO [train.py:1114] (2/4) Epoch 5, batch 550, loss[loss=0.2926, simple_loss=0.3322, pruned_loss=0.09178, ctc_loss=0.1738, over 19298.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3155, pruned_loss=0.08138, ctc_loss=0.1522, over 3609573.60 frames. ], batch size: 71, lr: 2.78e-02, grad_scale: 16.0 +2024-08-26 15:19:56,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=56037.333333333336, ans=0.0 +2024-08-26 15:20:04,959 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.339e+02 1.676e+02 1.860e+02 2.053e+02 4.118e+02, threshold=3.720e+02, percent-clipped=1.0 +2024-08-26 15:20:18,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=56144.0, ans=0.0 +2024-08-26 15:20:51,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=56250.666666666664, ans=0.0 +2024-08-26 15:20:56,219 INFO [train.py:1114] (2/4) Epoch 5, batch 600, loss[loss=0.2876, simple_loss=0.3417, pruned_loss=0.0847, ctc_loss=0.1603, over 19362.00 frames. ], tot_loss[loss=0.2701, simple_loss=0.3163, pruned_loss=0.08147, ctc_loss=0.1522, over 3667641.63 frames. ], batch size: 67, lr: 2.78e-02, grad_scale: 16.0 +2024-08-26 15:21:04,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=56304.0, ans=0.125 +2024-08-26 15:21:09,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=56357.333333333336, ans=0.125 +2024-08-26 15:21:21,642 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:21:26,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=56410.666666666664, ans=0.125 +2024-08-26 15:21:26,516 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:21:44,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=56517.333333333336, ans=0.2 +2024-08-26 15:21:45,172 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.50 vs. limit=10.0 +2024-08-26 15:21:48,992 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.85 vs. limit=10.0 +2024-08-26 15:21:49,360 INFO [train.py:1114] (2/4) Epoch 5, batch 650, loss[loss=0.272, simple_loss=0.3179, pruned_loss=0.08163, ctc_loss=0.1574, over 19763.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3154, pruned_loss=0.08119, ctc_loss=0.1519, over 3717753.44 frames. ], batch size: 54, lr: 2.77e-02, grad_scale: 16.0 +2024-08-26 15:21:53,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=56570.666666666664, ans=0.1 +2024-08-26 15:21:59,894 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.327e+02 1.659e+02 1.803e+02 2.095e+02 3.596e+02, threshold=3.607e+02, percent-clipped=0.0 +2024-08-26 15:22:02,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=56624.0, ans=0.0 +2024-08-26 15:22:26,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=56730.666666666664, ans=0.125 +2024-08-26 15:22:39,297 INFO [train.py:1114] (2/4) Epoch 5, batch 700, loss[loss=0.2524, simple_loss=0.3014, pruned_loss=0.07272, ctc_loss=0.1449, over 19738.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3153, pruned_loss=0.08069, ctc_loss=0.151, over 3749019.41 frames. ], batch size: 51, lr: 2.77e-02, grad_scale: 16.0 +2024-08-26 15:22:51,370 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.49 vs. limit=12.0 +2024-08-26 15:22:51,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=56890.666666666664, ans=0.2 +2024-08-26 15:23:05,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=56944.0, ans=0.5 +2024-08-26 15:23:15,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=56997.333333333336, ans=0.2 +2024-08-26 15:23:17,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=57050.666666666664, ans=0.125 +2024-08-26 15:23:29,317 INFO [train.py:1114] (2/4) Epoch 5, batch 750, loss[loss=0.2694, simple_loss=0.3227, pruned_loss=0.07811, ctc_loss=0.1499, over 19513.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3152, pruned_loss=0.08107, ctc_loss=0.1515, over 3774483.61 frames. ], batch size: 54, lr: 2.77e-02, grad_scale: 16.0 +2024-08-26 15:23:33,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=57104.0, ans=0.0 +2024-08-26 15:23:37,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=57104.0, ans=0.125 +2024-08-26 15:23:39,770 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.452e+02 1.732e+02 1.957e+02 2.375e+02 6.184e+02, threshold=3.914e+02, percent-clipped=3.0 +2024-08-26 15:23:42,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=57157.333333333336, ans=0.025 +2024-08-26 15:23:57,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=57264.0, ans=0.2 +2024-08-26 15:24:13,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.72 vs. limit=15.0 +2024-08-26 15:24:19,566 INFO [train.py:1114] (2/4) Epoch 5, batch 800, loss[loss=0.2452, simple_loss=0.2879, pruned_loss=0.07318, ctc_loss=0.1403, over 19784.00 frames. ], tot_loss[loss=0.2688, simple_loss=0.3149, pruned_loss=0.08106, ctc_loss=0.1515, over 3797225.22 frames. ], batch size: 49, lr: 2.76e-02, grad_scale: 32.0 +2024-08-26 15:24:42,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=57477.333333333336, ans=0.0 +2024-08-26 15:24:44,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=57477.333333333336, ans=0.125 +2024-08-26 15:24:51,383 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.53 vs. limit=12.0 +2024-08-26 15:24:56,265 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.63 vs. limit=22.5 +2024-08-26 15:25:10,622 INFO [train.py:1114] (2/4) Epoch 5, batch 850, loss[loss=0.2653, simple_loss=0.3196, pruned_loss=0.07573, ctc_loss=0.1489, over 19643.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.314, pruned_loss=0.08063, ctc_loss=0.1507, over 3816385.78 frames. ], batch size: 59, lr: 2.76e-02, grad_scale: 32.0 +2024-08-26 15:25:16,864 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.32 vs. limit=22.5 +2024-08-26 15:25:24,569 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.368e+02 1.744e+02 1.971e+02 2.331e+02 4.591e+02, threshold=3.942e+02, percent-clipped=1.0 +2024-08-26 15:25:45,175 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.00 vs. limit=15.0 +2024-08-26 15:25:51,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=57797.333333333336, ans=0.0 +2024-08-26 15:25:56,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=57850.666666666664, ans=0.0 +2024-08-26 15:25:56,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.05 vs. limit=15.0 +2024-08-26 15:26:07,577 INFO [train.py:1114] (2/4) Epoch 5, batch 900, loss[loss=0.2314, simple_loss=0.2747, pruned_loss=0.0684, ctc_loss=0.128, over 19431.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3144, pruned_loss=0.08117, ctc_loss=0.1516, over 3820078.46 frames. ], batch size: 48, lr: 2.75e-02, grad_scale: 32.0 +2024-08-26 15:26:07,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=57904.0, ans=0.0 +2024-08-26 15:26:12,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=57904.0, ans=10.0 +2024-08-26 15:26:16,805 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.63 vs. limit=15.0 +2024-08-26 15:26:26,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=57957.333333333336, ans=0.125 +2024-08-26 15:26:33,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=58010.666666666664, ans=0.125 +2024-08-26 15:26:41,527 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.35 vs. limit=15.0 +2024-08-26 15:26:57,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=58170.666666666664, ans=0.125 +2024-08-26 15:26:58,232 INFO [train.py:1114] (2/4) Epoch 5, batch 950, loss[loss=0.2288, simple_loss=0.283, pruned_loss=0.06396, ctc_loss=0.1169, over 19505.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.3142, pruned_loss=0.08103, ctc_loss=0.1515, over 3818664.89 frames. ], batch size: 49, lr: 2.75e-02, grad_scale: 32.0 +2024-08-26 15:26:58,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=58170.666666666664, ans=0.0 +2024-08-26 15:27:02,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=58170.666666666664, ans=0.025 +2024-08-26 15:27:04,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=58170.666666666664, ans=0.125 +2024-08-26 15:27:04,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58170.666666666664, ans=0.1 +2024-08-26 15:27:11,441 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.396e+02 1.648e+02 1.859e+02 2.135e+02 3.098e+02, threshold=3.718e+02, percent-clipped=0.0 +2024-08-26 15:27:20,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=58277.333333333336, ans=0.09899494936611666 +2024-08-26 15:27:30,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=58330.666666666664, ans=0.0 +2024-08-26 15:27:33,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=58330.666666666664, ans=0.125 +2024-08-26 15:27:42,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=58384.0, ans=0.125 +2024-08-26 15:27:43,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=58384.0, ans=0.0 +2024-08-26 15:27:44,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=58384.0, ans=0.1 +2024-08-26 15:27:48,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=58384.0, ans=0.125 +2024-08-26 15:27:49,898 INFO [train.py:1114] (2/4) Epoch 5, batch 1000, loss[loss=0.2508, simple_loss=0.3059, pruned_loss=0.07063, ctc_loss=0.1363, over 19850.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3157, pruned_loss=0.08181, ctc_loss=0.1529, over 3815651.64 frames. ], batch size: 52, lr: 2.74e-02, grad_scale: 32.0 +2024-08-26 15:28:05,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=58490.666666666664, ans=0.0 +2024-08-26 15:28:40,051 INFO [train.py:1114] (2/4) Epoch 5, batch 1050, loss[loss=0.2735, simple_loss=0.3256, pruned_loss=0.08117, ctc_loss=0.1474, over 19839.00 frames. ], tot_loss[loss=0.2692, simple_loss=0.3148, pruned_loss=0.08134, ctc_loss=0.1521, over 3822385.69 frames. ], batch size: 57, lr: 2.74e-02, grad_scale: 32.0 +2024-08-26 15:28:43,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=58704.0, ans=0.125 +2024-08-26 15:28:45,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=58704.0, ans=0.125 +2024-08-26 15:28:50,843 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.680e+02 1.893e+02 2.161e+02 3.731e+02, threshold=3.786e+02, percent-clipped=1.0 +2024-08-26 15:28:51,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=58757.333333333336, ans=0.2 +2024-08-26 15:29:08,602 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.22 vs. limit=15.0 +2024-08-26 15:29:16,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=58864.0, ans=10.0 +2024-08-26 15:29:19,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=58864.0, ans=0.125 +2024-08-26 15:29:28,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=58917.333333333336, ans=0.025 +2024-08-26 15:29:33,657 INFO [train.py:1114] (2/4) Epoch 5, batch 1100, loss[loss=0.2311, simple_loss=0.2905, pruned_loss=0.06292, ctc_loss=0.1147, over 19577.00 frames. ], tot_loss[loss=0.2682, simple_loss=0.3141, pruned_loss=0.08085, ctc_loss=0.1514, over 3830046.76 frames. ], batch size: 52, lr: 2.74e-02, grad_scale: 16.0 +2024-08-26 15:29:42,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=59024.0, ans=0.5 +2024-08-26 15:29:45,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=59024.0, ans=0.2 +2024-08-26 15:29:50,244 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.82 vs. limit=15.0 +2024-08-26 15:29:56,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=59077.333333333336, ans=0.125 +2024-08-26 15:30:06,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=59130.666666666664, ans=15.0 +2024-08-26 15:30:13,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=59130.666666666664, ans=0.125 +2024-08-26 15:30:16,255 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.45 vs. limit=15.0 +2024-08-26 15:30:24,315 INFO [train.py:1114] (2/4) Epoch 5, batch 1150, loss[loss=0.2596, simple_loss=0.3108, pruned_loss=0.07567, ctc_loss=0.1428, over 19601.00 frames. ], tot_loss[loss=0.2677, simple_loss=0.3139, pruned_loss=0.08057, ctc_loss=0.1509, over 3829557.59 frames. ], batch size: 52, lr: 2.73e-02, grad_scale: 16.0 +2024-08-26 15:30:27,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=59237.333333333336, ans=0.0 +2024-08-26 15:30:35,930 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.334e+02 1.591e+02 1.744e+02 2.042e+02 4.394e+02, threshold=3.489e+02, percent-clipped=2.0 +2024-08-26 15:30:37,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=59290.666666666664, ans=0.125 +2024-08-26 15:30:53,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=59397.333333333336, ans=0.2 +2024-08-26 15:30:57,228 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.23 vs. limit=15.0 +2024-08-26 15:31:15,277 INFO [train.py:1114] (2/4) Epoch 5, batch 1200, loss[loss=0.2834, simple_loss=0.3324, pruned_loss=0.08593, ctc_loss=0.1566, over 19839.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3148, pruned_loss=0.08078, ctc_loss=0.1517, over 3825952.48 frames. ], batch size: 57, lr: 2.73e-02, grad_scale: 32.0 +2024-08-26 15:31:25,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=59557.333333333336, ans=0.0 +2024-08-26 15:31:40,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=59610.666666666664, ans=0.2 +2024-08-26 15:31:41,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=59610.666666666664, ans=0.125 +2024-08-26 15:32:06,258 INFO [train.py:1114] (2/4) Epoch 5, batch 1250, loss[loss=0.2748, simple_loss=0.3194, pruned_loss=0.08331, ctc_loss=0.1588, over 19528.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.3152, pruned_loss=0.08073, ctc_loss=0.1515, over 3843642.12 frames. ], batch size: 61, lr: 2.72e-02, grad_scale: 32.0 +2024-08-26 15:32:18,025 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.635e+02 1.798e+02 2.001e+02 4.301e+02, threshold=3.596e+02, percent-clipped=1.0 +2024-08-26 15:32:21,264 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.12 vs. limit=15.0 +2024-08-26 15:32:24,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=59877.333333333336, ans=0.07 +2024-08-26 15:32:26,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=59877.333333333336, ans=0.0 +2024-08-26 15:32:28,694 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.62 vs. limit=15.0 +2024-08-26 15:32:34,443 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.50 vs. limit=15.0 +2024-08-26 15:32:54,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=59984.0, ans=0.125 +2024-08-26 15:32:56,445 INFO [train.py:1114] (2/4) Epoch 5, batch 1300, loss[loss=0.2843, simple_loss=0.326, pruned_loss=0.08667, ctc_loss=0.1733, over 18890.00 frames. ], tot_loss[loss=0.2679, simple_loss=0.3144, pruned_loss=0.08047, ctc_loss=0.151, over 3846186.35 frames. ], batch size: 76, lr: 2.72e-02, grad_scale: 32.0 +2024-08-26 15:33:01,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=60037.333333333336, ans=0.07 +2024-08-26 15:33:01,846 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.51 vs. limit=8.0 +2024-08-26 15:33:43,912 INFO [train.py:1114] (2/4) Epoch 5, batch 1350, loss[loss=0.2573, simple_loss=0.3176, pruned_loss=0.07125, ctc_loss=0.1361, over 19786.00 frames. ], tot_loss[loss=0.2664, simple_loss=0.3135, pruned_loss=0.07976, ctc_loss=0.1495, over 3857362.45 frames. ], batch size: 54, lr: 2.71e-02, grad_scale: 32.0 +2024-08-26 15:33:46,967 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:33:55,384 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.610e+02 1.752e+02 1.989e+02 4.527e+02, threshold=3.503e+02, percent-clipped=1.0 +2024-08-26 15:34:00,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=60357.333333333336, ans=0.2 +2024-08-26 15:34:01,830 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.98 vs. limit=15.0 +2024-08-26 15:34:02,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.01 vs. limit=15.0 +2024-08-26 15:34:10,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=60410.666666666664, ans=0.125 +2024-08-26 15:34:17,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=60464.0, ans=0.125 +2024-08-26 15:34:19,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=60464.0, ans=0.125 +2024-08-26 15:34:21,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=60464.0, ans=0.125 +2024-08-26 15:34:34,576 INFO [train.py:1114] (2/4) Epoch 5, batch 1400, loss[loss=0.2319, simple_loss=0.2783, pruned_loss=0.06752, ctc_loss=0.1263, over 19671.00 frames. ], tot_loss[loss=0.266, simple_loss=0.313, pruned_loss=0.07963, ctc_loss=0.1492, over 3864150.13 frames. ], batch size: 46, lr: 2.71e-02, grad_scale: 32.0 +2024-08-26 15:34:47,082 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:34:53,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=60677.333333333336, ans=0.125 +2024-08-26 15:35:14,524 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-08-26 15:35:27,704 INFO [train.py:1114] (2/4) Epoch 5, batch 1450, loss[loss=0.2764, simple_loss=0.3277, pruned_loss=0.08245, ctc_loss=0.1504, over 19676.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3138, pruned_loss=0.07987, ctc_loss=0.1495, over 3862100.12 frames. ], batch size: 63, lr: 2.71e-02, grad_scale: 32.0 +2024-08-26 15:35:42,509 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.680e+02 1.820e+02 2.123e+02 3.172e+02, threshold=3.639e+02, percent-clipped=0.0 +2024-08-26 15:35:43,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60890.666666666664, ans=0.1 +2024-08-26 15:35:45,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=60890.666666666664, ans=0.125 +2024-08-26 15:36:03,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=60997.333333333336, ans=0.0 +2024-08-26 15:36:06,104 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.32 vs. limit=15.0 +2024-08-26 15:36:19,844 INFO [train.py:1114] (2/4) Epoch 5, batch 1500, loss[loss=0.2893, simple_loss=0.331, pruned_loss=0.09055, ctc_loss=0.1664, over 19586.00 frames. ], tot_loss[loss=0.267, simple_loss=0.3141, pruned_loss=0.08003, ctc_loss=0.1498, over 3861725.35 frames. ], batch size: 57, lr: 2.70e-02, grad_scale: 32.0 +2024-08-26 15:36:25,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=61104.0, ans=0.2 +2024-08-26 15:36:30,201 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.54 vs. limit=15.0 +2024-08-26 15:36:31,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=61157.333333333336, ans=0.95 +2024-08-26 15:36:45,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61210.666666666664, ans=0.1 +2024-08-26 15:36:58,094 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.01 vs. limit=15.0 +2024-08-26 15:37:09,976 INFO [train.py:1114] (2/4) Epoch 5, batch 1550, loss[loss=0.2839, simple_loss=0.3312, pruned_loss=0.08654, ctc_loss=0.159, over 19584.00 frames. ], tot_loss[loss=0.2674, simple_loss=0.3142, pruned_loss=0.08029, ctc_loss=0.1501, over 3847400.37 frames. ], batch size: 60, lr: 2.70e-02, grad_scale: 16.0 +2024-08-26 15:37:11,634 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.35 vs. limit=15.0 +2024-08-26 15:37:15,411 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.80 vs. limit=22.5 +2024-08-26 15:37:17,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=61370.666666666664, ans=0.125 +2024-08-26 15:37:22,472 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.359e+02 1.752e+02 1.975e+02 2.269e+02 3.644e+02, threshold=3.951e+02, percent-clipped=1.0 +2024-08-26 15:37:24,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=61424.0, ans=0.125 +2024-08-26 15:37:34,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=61477.333333333336, ans=0.2 +2024-08-26 15:37:55,618 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=9.24 vs. limit=12.0 +2024-08-26 15:38:03,694 INFO [train.py:1114] (2/4) Epoch 5, batch 1600, loss[loss=0.2626, simple_loss=0.3109, pruned_loss=0.07877, ctc_loss=0.1419, over 19851.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3135, pruned_loss=0.08002, ctc_loss=0.1494, over 3835621.73 frames. ], batch size: 57, lr: 2.69e-02, grad_scale: 32.0 +2024-08-26 15:38:22,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=61637.333333333336, ans=0.125 +2024-08-26 15:38:25,332 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.51 vs. limit=12.0 +2024-08-26 15:38:26,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=61690.666666666664, ans=0.1 +2024-08-26 15:39:04,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=61797.333333333336, ans=0.0 +2024-08-26 15:39:15,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=61850.666666666664, ans=0.125 +2024-08-26 15:39:18,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=61904.0, ans=0.125 +2024-08-26 15:39:19,225 INFO [train.py:1114] (2/4) Epoch 5, batch 1650, loss[loss=0.2689, simple_loss=0.3247, pruned_loss=0.07784, ctc_loss=0.1438, over 19673.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3136, pruned_loss=0.08003, ctc_loss=0.1497, over 3832831.11 frames. ], batch size: 59, lr: 2.69e-02, grad_scale: 32.0 +2024-08-26 15:39:22,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=61904.0, ans=0.2 +2024-08-26 15:39:31,765 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.578e+02 1.738e+02 2.103e+02 3.628e+02, threshold=3.475e+02, percent-clipped=0.0 +2024-08-26 15:39:32,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=61957.333333333336, ans=0.125 +2024-08-26 15:39:41,386 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:39:45,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=62010.666666666664, ans=0.125 +2024-08-26 15:39:47,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=62064.0, ans=0.125 +2024-08-26 15:39:49,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=62064.0, ans=0.125 +2024-08-26 15:39:51,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=62064.0, ans=0.0 +2024-08-26 15:40:00,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=62117.333333333336, ans=0.0 +2024-08-26 15:40:08,741 INFO [train.py:1114] (2/4) Epoch 5, batch 1700, loss[loss=0.238, simple_loss=0.2825, pruned_loss=0.07042, ctc_loss=0.1313, over 19662.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.3127, pruned_loss=0.0792, ctc_loss=0.1483, over 3847310.42 frames. ], batch size: 46, lr: 2.69e-02, grad_scale: 32.0 +2024-08-26 15:40:22,171 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=16.10 vs. limit=15.0 +2024-08-26 15:40:31,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=62277.333333333336, ans=0.125 +2024-08-26 15:40:50,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=62384.0, ans=0.025 +2024-08-26 15:40:53,566 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.18 vs. limit=12.0 +2024-08-26 15:40:54,053 INFO [train.py:1114] (2/4) Epoch 5, batch 1750, loss[loss=0.2251, simple_loss=0.2743, pruned_loss=0.06431, ctc_loss=0.1181, over 19603.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3119, pruned_loss=0.07869, ctc_loss=0.1474, over 3851911.58 frames. ], batch size: 45, lr: 2.68e-02, grad_scale: 32.0 +2024-08-26 15:40:56,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=62437.333333333336, ans=0.0 +2024-08-26 15:41:01,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=62437.333333333336, ans=0.125 +2024-08-26 15:41:05,740 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.599e+02 1.842e+02 2.097e+02 3.191e+02, threshold=3.683e+02, percent-clipped=0.0 +2024-08-26 15:41:09,286 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.88 vs. limit=15.0 +2024-08-26 15:41:16,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=62544.0, ans=0.0 +2024-08-26 15:41:25,655 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.82 vs. limit=22.5 +2024-08-26 15:41:39,328 INFO [train.py:1114] (2/4) Epoch 5, batch 1800, loss[loss=0.2591, simple_loss=0.3178, pruned_loss=0.07313, ctc_loss=0.1356, over 19623.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.3124, pruned_loss=0.07884, ctc_loss=0.1475, over 3853742.09 frames. ], batch size: 55, lr: 2.68e-02, grad_scale: 32.0 +2024-08-26 15:41:40,007 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.67 vs. limit=10.0 +2024-08-26 15:41:44,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=62704.0, ans=0.125 +2024-08-26 15:41:56,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=62810.666666666664, ans=0.125 +2024-08-26 15:41:57,517 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.31 vs. limit=10.0 +2024-08-26 15:42:13,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=62864.0, ans=0.125 +2024-08-26 15:42:19,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=62917.333333333336, ans=0.125 +2024-08-26 15:42:21,106 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.25 vs. limit=22.5 +2024-08-26 15:42:24,238 INFO [train.py:1114] (2/4) Epoch 5, batch 1850, loss[loss=0.2722, simple_loss=0.3299, pruned_loss=0.07757, ctc_loss=0.1483, over 19584.00 frames. ], tot_loss[loss=0.2636, simple_loss=0.3118, pruned_loss=0.07836, ctc_loss=0.1466, over 3856585.11 frames. ], batch size: 57, lr: 2.67e-02, grad_scale: 32.0 +2024-08-26 15:42:30,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62970.666666666664, ans=0.1 +2024-08-26 15:42:35,855 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.605e+02 1.818e+02 2.016e+02 3.945e+02, threshold=3.637e+02, percent-clipped=1.0 +2024-08-26 15:42:36,564 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.09 vs. limit=15.0 +2024-08-26 15:42:38,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=63024.0, ans=0.0 +2024-08-26 15:43:16,394 INFO [train.py:1114] (2/4) Epoch 5, batch 1900, loss[loss=0.266, simple_loss=0.3167, pruned_loss=0.07749, ctc_loss=0.1508, over 19653.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.3127, pruned_loss=0.07857, ctc_loss=0.1469, over 3860612.57 frames. ], batch size: 59, lr: 2.67e-02, grad_scale: 16.0 +2024-08-26 15:43:17,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=63237.333333333336, ans=0.2 +2024-08-26 15:43:20,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63237.333333333336, ans=0.1 +2024-08-26 15:43:26,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=63237.333333333336, ans=0.025 +2024-08-26 15:43:37,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=63344.0, ans=0.125 +2024-08-26 15:43:55,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=63450.666666666664, ans=0.125 +2024-08-26 15:43:55,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=63450.666666666664, ans=0.125 +2024-08-26 15:44:05,664 INFO [train.py:1114] (2/4) Epoch 5, batch 1950, loss[loss=0.2533, simple_loss=0.3059, pruned_loss=0.07344, ctc_loss=0.1344, over 19587.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3133, pruned_loss=0.07858, ctc_loss=0.147, over 3869917.03 frames. ], batch size: 52, lr: 2.67e-02, grad_scale: 16.0 +2024-08-26 15:44:11,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=63504.0, ans=0.0 +2024-08-26 15:44:12,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=63504.0, ans=0.2 +2024-08-26 15:44:20,098 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.677e+02 1.824e+02 1.963e+02 3.212e+02, threshold=3.647e+02, percent-clipped=0.0 +2024-08-26 15:44:22,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=63557.333333333336, ans=0.125 +2024-08-26 15:44:32,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=63610.666666666664, ans=0.125 +2024-08-26 15:44:32,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=63610.666666666664, ans=0.125 +2024-08-26 15:44:34,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=63664.0, ans=0.125 +2024-08-26 15:44:52,293 INFO [train.py:1114] (2/4) Epoch 5, batch 2000, loss[loss=0.2266, simple_loss=0.2736, pruned_loss=0.06484, ctc_loss=0.125, over 19684.00 frames. ], tot_loss[loss=0.2663, simple_loss=0.3143, pruned_loss=0.07944, ctc_loss=0.1484, over 3853024.26 frames. ], batch size: 45, lr: 2.66e-02, grad_scale: 32.0 +2024-08-26 15:44:57,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=63770.666666666664, ans=0.125 +2024-08-26 15:45:09,627 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.57 vs. limit=10.0 +2024-08-26 15:45:10,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=63877.333333333336, ans=0.125 +2024-08-26 15:45:16,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=63877.333333333336, ans=0.125 +2024-08-26 15:45:16,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=63877.333333333336, ans=0.0 +2024-08-26 15:45:23,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=63930.666666666664, ans=0.2 +2024-08-26 15:45:24,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=63930.666666666664, ans=0.0 +2024-08-26 15:45:42,245 INFO [train.py:1114] (2/4) Epoch 5, batch 2050, loss[loss=0.2409, simple_loss=0.2897, pruned_loss=0.07013, ctc_loss=0.1293, over 19738.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.3126, pruned_loss=0.07878, ctc_loss=0.1471, over 3850417.08 frames. ], batch size: 47, lr: 2.66e-02, grad_scale: 32.0 +2024-08-26 15:45:54,606 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.354e+02 1.624e+02 1.773e+02 2.077e+02 3.322e+02, threshold=3.546e+02, percent-clipped=0.0 +2024-08-26 15:46:03,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=64144.0, ans=0.07 +2024-08-26 15:46:04,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=64144.0, ans=0.95 +2024-08-26 15:46:23,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=64250.666666666664, ans=0.95 +2024-08-26 15:46:26,367 INFO [train.py:1114] (2/4) Epoch 5, batch 2100, loss[loss=0.2524, simple_loss=0.3054, pruned_loss=0.07201, ctc_loss=0.1381, over 19770.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.3118, pruned_loss=0.07816, ctc_loss=0.1459, over 3858122.29 frames. ], batch size: 54, lr: 2.65e-02, grad_scale: 32.0 +2024-08-26 15:46:30,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=64304.0, ans=0.035 +2024-08-26 15:46:30,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=64304.0, ans=0.125 +2024-08-26 15:46:31,860 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.49 vs. limit=15.0 +2024-08-26 15:46:32,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=64304.0, ans=0.1 +2024-08-26 15:46:32,850 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.55 vs. limit=15.0 +2024-08-26 15:46:38,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=64357.333333333336, ans=0.0 +2024-08-26 15:46:40,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=64357.333333333336, ans=0.125 +2024-08-26 15:46:52,165 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.83 vs. limit=15.0 +2024-08-26 15:47:19,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=64517.333333333336, ans=0.0 +2024-08-26 15:47:20,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=64517.333333333336, ans=0.0 +2024-08-26 15:47:23,493 INFO [train.py:1114] (2/4) Epoch 5, batch 2150, loss[loss=0.2602, simple_loss=0.312, pruned_loss=0.07612, ctc_loss=0.1405, over 19854.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.311, pruned_loss=0.07776, ctc_loss=0.1452, over 3869402.62 frames. ], batch size: 52, lr: 2.65e-02, grad_scale: 32.0 +2024-08-26 15:47:27,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=64570.666666666664, ans=0.0 +2024-08-26 15:47:28,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=64570.666666666664, ans=0.125 +2024-08-26 15:47:35,825 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.599e+02 1.757e+02 2.074e+02 2.995e+02, threshold=3.513e+02, percent-clipped=0.0 +2024-08-26 15:47:41,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=64677.333333333336, ans=0.125 +2024-08-26 15:47:54,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=64730.666666666664, ans=0.0 +2024-08-26 15:47:54,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=64730.666666666664, ans=0.2 +2024-08-26 15:48:07,247 INFO [train.py:1114] (2/4) Epoch 5, batch 2200, loss[loss=0.2861, simple_loss=0.3324, pruned_loss=0.08656, ctc_loss=0.1664, over 19582.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3109, pruned_loss=0.07758, ctc_loss=0.1449, over 3867300.66 frames. ], batch size: 57, lr: 2.65e-02, grad_scale: 32.0 +2024-08-26 15:48:11,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=64837.333333333336, ans=0.0 +2024-08-26 15:48:11,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=64837.333333333336, ans=0.2 +2024-08-26 15:48:19,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=64890.666666666664, ans=0.125 +2024-08-26 15:48:30,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.82 vs. limit=10.0 +2024-08-26 15:48:52,498 INFO [train.py:1114] (2/4) Epoch 5, batch 2250, loss[loss=0.2699, simple_loss=0.3268, pruned_loss=0.07737, ctc_loss=0.1455, over 19599.00 frames. ], tot_loss[loss=0.2618, simple_loss=0.3109, pruned_loss=0.07741, ctc_loss=0.1446, over 3867046.89 frames. ], batch size: 55, lr: 2.64e-02, grad_scale: 16.0 +2024-08-26 15:48:55,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=65104.0, ans=0.125 +2024-08-26 15:48:57,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=65104.0, ans=0.1 +2024-08-26 15:49:05,747 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.368e+02 1.721e+02 2.056e+02 2.448e+02 6.138e+02, threshold=4.112e+02, percent-clipped=3.0 +2024-08-26 15:49:10,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=65210.666666666664, ans=0.125 +2024-08-26 15:49:26,369 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.74 vs. limit=15.0 +2024-08-26 15:49:33,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=65317.333333333336, ans=0.0 +2024-08-26 15:49:36,474 INFO [train.py:1114] (2/4) Epoch 5, batch 2300, loss[loss=0.2251, simple_loss=0.2826, pruned_loss=0.0613, ctc_loss=0.1124, over 19505.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3095, pruned_loss=0.07726, ctc_loss=0.1442, over 3860953.91 frames. ], batch size: 49, lr: 2.64e-02, grad_scale: 16.0 +2024-08-26 15:49:40,396 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.29 vs. limit=15.0 +2024-08-26 15:49:55,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=65477.333333333336, ans=0.2 +2024-08-26 15:50:02,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=65477.333333333336, ans=0.125 +2024-08-26 15:50:07,888 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.11 vs. limit=10.0 +2024-08-26 15:50:11,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=65530.666666666664, ans=0.2 +2024-08-26 15:50:23,060 INFO [train.py:1114] (2/4) Epoch 5, batch 2350, loss[loss=0.292, simple_loss=0.3418, pruned_loss=0.08822, ctc_loss=0.1646, over 19668.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3099, pruned_loss=0.07764, ctc_loss=0.1447, over 3863446.28 frames. ], batch size: 63, lr: 2.63e-02, grad_scale: 16.0 +2024-08-26 15:50:27,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=65637.33333333333, ans=0.125 +2024-08-26 15:50:31,205 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.03 vs. limit=15.0 +2024-08-26 15:50:36,072 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.297e+02 1.568e+02 1.781e+02 2.033e+02 3.218e+02, threshold=3.561e+02, percent-clipped=0.0 +2024-08-26 15:50:38,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=65690.66666666667, ans=0.1 +2024-08-26 15:50:53,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=65797.33333333333, ans=0.125 +2024-08-26 15:50:54,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=65797.33333333333, ans=0.1 +2024-08-26 15:51:07,083 INFO [train.py:1114] (2/4) Epoch 5, batch 2400, loss[loss=0.2991, simple_loss=0.3423, pruned_loss=0.09362, ctc_loss=0.1714, over 19291.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3129, pruned_loss=0.07902, ctc_loss=0.147, over 3857528.14 frames. ], batch size: 71, lr: 2.63e-02, grad_scale: 32.0 +2024-08-26 15:51:16,931 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:51:17,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=65957.33333333333, ans=0.0 +2024-08-26 15:51:25,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=66010.66666666667, ans=0.125 +2024-08-26 15:51:49,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=66117.33333333333, ans=0.2 +2024-08-26 15:51:52,367 INFO [train.py:1114] (2/4) Epoch 5, batch 2450, loss[loss=0.332, simple_loss=0.351, pruned_loss=0.1135, ctc_loss=0.215, over 14018.00 frames. ], tot_loss[loss=0.2735, simple_loss=0.3181, pruned_loss=0.08339, ctc_loss=0.1554, over 3729975.43 frames. ], batch size: 141, lr: 2.63e-02, grad_scale: 16.0 +2024-08-26 15:51:52,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66170.66666666667, ans=0.1 +2024-08-26 15:52:03,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66224.0, ans=0.1 +2024-08-26 15:52:05,942 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.26 vs. limit=10.0 +2024-08-26 15:52:07,305 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.477e+02 1.716e+02 1.912e+02 2.213e+02 5.978e+02, threshold=3.825e+02, percent-clipped=3.0 +2024-08-26 15:52:18,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=66277.33333333333, ans=0.2 +2024-08-26 15:52:22,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=66330.66666666667, ans=0.0 +2024-08-26 15:52:25,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=66330.66666666667, ans=0.125 +2024-08-26 15:53:42,761 INFO [train.py:1114] (2/4) Epoch 6, batch 0, loss[loss=0.2499, simple_loss=0.2954, pruned_loss=0.07412, ctc_loss=0.1402, over 19792.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.2954, pruned_loss=0.07412, ctc_loss=0.1402, over 19792.00 frames. ], batch size: 49, lr: 2.45e-02, grad_scale: 32.0 +2024-08-26 15:53:42,762 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-26 15:54:26,133 INFO [train.py:1146] (2/4) Epoch 6, validation: loss=0.2162, simple_loss=0.3022, pruned_loss=0.04785, ctc_loss=0.08613, over 944034.00 frames. +2024-08-26 15:54:26,134 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12825MB +2024-08-26 15:54:29,411 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.75 vs. limit=10.0 +2024-08-26 15:54:33,606 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.97 vs. limit=6.0 +2024-08-26 15:54:51,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=66485.33333333333, ans=0.125 +2024-08-26 15:54:56,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=66538.66666666667, ans=0.125 +2024-08-26 15:55:13,594 INFO [train.py:1114] (2/4) Epoch 6, batch 50, loss[loss=0.2187, simple_loss=0.279, pruned_loss=0.05684, ctc_loss=0.1119, over 19723.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3127, pruned_loss=0.07816, ctc_loss=0.1461, over 845158.26 frames. ], batch size: 47, lr: 2.44e-02, grad_scale: 32.0 +2024-08-26 15:55:14,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=66645.33333333333, ans=0.0 +2024-08-26 15:55:31,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=66752.0, ans=0.125 +2024-08-26 15:55:39,181 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.607e+02 1.759e+02 1.997e+02 3.496e+02, threshold=3.518e+02, percent-clipped=0.0 +2024-08-26 15:55:47,258 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.30 vs. limit=15.0 +2024-08-26 15:55:49,148 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.48 vs. limit=15.0 +2024-08-26 15:55:49,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=66805.33333333333, ans=0.125 +2024-08-26 15:55:54,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=66858.66666666667, ans=0.125 +2024-08-26 15:56:03,169 INFO [train.py:1114] (2/4) Epoch 6, batch 100, loss[loss=0.2477, simple_loss=0.2973, pruned_loss=0.07142, ctc_loss=0.1381, over 19707.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3138, pruned_loss=0.0781, ctc_loss=0.147, over 1498722.63 frames. ], batch size: 51, lr: 2.44e-02, grad_scale: 32.0 +2024-08-26 15:56:24,631 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.37 vs. limit=6.0 +2024-08-26 15:56:43,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=67072.0, ans=0.125 +2024-08-26 15:56:45,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=67072.0, ans=0.125 +2024-08-26 15:56:54,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=67125.33333333333, ans=0.125 +2024-08-26 15:56:57,272 INFO [train.py:1114] (2/4) Epoch 6, batch 150, loss[loss=0.2435, simple_loss=0.2843, pruned_loss=0.07518, ctc_loss=0.1307, over 19740.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3111, pruned_loss=0.07702, ctc_loss=0.144, over 2027884.99 frames. ], batch size: 47, lr: 2.44e-02, grad_scale: 32.0 +2024-08-26 15:57:10,656 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:57:22,725 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.333e+02 1.584e+02 1.709e+02 1.986e+02 2.973e+02, threshold=3.418e+02, percent-clipped=0.0 +2024-08-26 15:57:27,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=67338.66666666667, ans=0.125 +2024-08-26 15:57:28,937 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.50 vs. limit=12.0 +2024-08-26 15:57:37,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=67392.0, ans=0.125 +2024-08-26 15:57:42,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67392.0, ans=0.1 +2024-08-26 15:57:44,424 INFO [train.py:1114] (2/4) Epoch 6, batch 200, loss[loss=0.2855, simple_loss=0.3304, pruned_loss=0.08694, ctc_loss=0.1666, over 18250.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3103, pruned_loss=0.07734, ctc_loss=0.1443, over 2435615.73 frames. ], batch size: 85, lr: 2.43e-02, grad_scale: 32.0 +2024-08-26 15:57:54,745 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.56 vs. limit=15.0 +2024-08-26 15:58:04,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67552.0, ans=0.1 +2024-08-26 15:58:11,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=67552.0, ans=0.2 +2024-08-26 15:58:28,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=67658.66666666667, ans=0.125 +2024-08-26 15:58:28,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=67658.66666666667, ans=0.125 +2024-08-26 15:58:36,026 INFO [train.py:1114] (2/4) Epoch 6, batch 250, loss[loss=0.2711, simple_loss=0.325, pruned_loss=0.07893, ctc_loss=0.1481, over 19388.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.3089, pruned_loss=0.07633, ctc_loss=0.1424, over 2755871.45 frames. ], batch size: 67, lr: 2.43e-02, grad_scale: 32.0 +2024-08-26 15:59:07,037 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.12 vs. limit=15.0 +2024-08-26 15:59:10,418 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.559e+02 1.703e+02 1.915e+02 3.590e+02, threshold=3.407e+02, percent-clipped=1.0 +2024-08-26 15:59:12,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=67872.0, ans=0.125 +2024-08-26 15:59:17,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=67872.0, ans=0.125 +2024-08-26 15:59:32,235 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.22 vs. limit=8.0 +2024-08-26 15:59:34,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=67978.66666666667, ans=0.125 +2024-08-26 15:59:35,249 INFO [train.py:1114] (2/4) Epoch 6, batch 300, loss[loss=0.2547, simple_loss=0.3144, pruned_loss=0.07071, ctc_loss=0.1339, over 19499.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3078, pruned_loss=0.07547, ctc_loss=0.1408, over 3000609.96 frames. ], batch size: 61, lr: 2.43e-02, grad_scale: 32.0 +2024-08-26 15:59:44,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=67978.66666666667, ans=0.125 +2024-08-26 15:59:51,973 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=18.36 vs. limit=15.0 +2024-08-26 16:00:08,532 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:00:14,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=68192.0, ans=0.125 +2024-08-26 16:00:20,894 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.05 vs. limit=12.0 +2024-08-26 16:00:23,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=68245.33333333333, ans=0.125 +2024-08-26 16:00:24,077 INFO [train.py:1114] (2/4) Epoch 6, batch 350, loss[loss=0.2133, simple_loss=0.2678, pruned_loss=0.05747, ctc_loss=0.1095, over 19758.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3078, pruned_loss=0.07539, ctc_loss=0.1407, over 3191275.62 frames. ], batch size: 48, lr: 2.42e-02, grad_scale: 32.0 +2024-08-26 16:00:29,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=68245.33333333333, ans=10.0 +2024-08-26 16:00:35,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=68298.66666666667, ans=0.0 +2024-08-26 16:00:47,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=68352.0, ans=0.125 +2024-08-26 16:00:49,642 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.625e+02 1.872e+02 2.224e+02 3.924e+02, threshold=3.744e+02, percent-clipped=2.0 +2024-08-26 16:00:56,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=68405.33333333333, ans=0.125 +2024-08-26 16:01:05,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=68458.66666666667, ans=0.0 +2024-08-26 16:01:11,324 INFO [train.py:1114] (2/4) Epoch 6, batch 400, loss[loss=0.2484, simple_loss=0.3105, pruned_loss=0.06714, ctc_loss=0.1302, over 19497.00 frames. ], tot_loss[loss=0.257, simple_loss=0.3075, pruned_loss=0.07517, ctc_loss=0.1402, over 3342789.81 frames. ], batch size: 54, lr: 2.42e-02, grad_scale: 32.0 +2024-08-26 16:01:29,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=68618.66666666667, ans=0.0 +2024-08-26 16:01:31,933 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=15.78 vs. limit=15.0 +2024-08-26 16:01:34,760 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.72 vs. limit=15.0 +2024-08-26 16:01:55,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=68725.33333333333, ans=0.0 +2024-08-26 16:02:07,098 INFO [train.py:1114] (2/4) Epoch 6, batch 450, loss[loss=0.2401, simple_loss=0.3076, pruned_loss=0.06252, ctc_loss=0.1189, over 19615.00 frames. ], tot_loss[loss=0.2567, simple_loss=0.3075, pruned_loss=0.07496, ctc_loss=0.1399, over 3451041.50 frames. ], batch size: 55, lr: 2.42e-02, grad_scale: 32.0 +2024-08-26 16:02:10,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=68778.66666666667, ans=0.125 +2024-08-26 16:02:23,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=68832.0, ans=0.09899494936611666 +2024-08-26 16:02:34,031 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.302e+02 1.611e+02 1.799e+02 2.140e+02 4.925e+02, threshold=3.597e+02, percent-clipped=1.0 +2024-08-26 16:02:44,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=68938.66666666667, ans=0.2 +2024-08-26 16:02:54,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=68992.0, ans=0.0 +2024-08-26 16:02:55,682 INFO [train.py:1114] (2/4) Epoch 6, batch 500, loss[loss=0.2617, simple_loss=0.3105, pruned_loss=0.0789, ctc_loss=0.1377, over 19687.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3063, pruned_loss=0.07452, ctc_loss=0.1389, over 3545450.38 frames. ], batch size: 63, lr: 2.41e-02, grad_scale: 32.0 +2024-08-26 16:03:07,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=69098.66666666667, ans=0.125 +2024-08-26 16:03:08,659 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.26 vs. limit=22.5 +2024-08-26 16:03:31,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=69205.33333333333, ans=0.125 +2024-08-26 16:03:32,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=69205.33333333333, ans=0.2 +2024-08-26 16:03:43,066 INFO [train.py:1114] (2/4) Epoch 6, batch 550, loss[loss=0.2793, simple_loss=0.3209, pruned_loss=0.08629, ctc_loss=0.1631, over 19302.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3062, pruned_loss=0.07444, ctc_loss=0.1389, over 3606897.15 frames. ], batch size: 71, lr: 2.41e-02, grad_scale: 32.0 +2024-08-26 16:04:01,851 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.55 vs. limit=15.0 +2024-08-26 16:04:08,887 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.633e+02 1.875e+02 2.080e+02 6.681e+02, threshold=3.749e+02, percent-clipped=3.0 +2024-08-26 16:04:19,506 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.98 vs. limit=15.0 +2024-08-26 16:04:25,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=69525.33333333333, ans=0.2 +2024-08-26 16:04:30,180 INFO [train.py:1114] (2/4) Epoch 6, batch 600, loss[loss=0.272, simple_loss=0.3243, pruned_loss=0.07886, ctc_loss=0.1549, over 19395.00 frames. ], tot_loss[loss=0.2557, simple_loss=0.3065, pruned_loss=0.07453, ctc_loss=0.1395, over 3664060.99 frames. ], batch size: 67, lr: 2.41e-02, grad_scale: 32.0 +2024-08-26 16:04:30,584 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:04:30,830 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.16 vs. limit=15.0 +2024-08-26 16:04:31,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=69578.66666666667, ans=0.1 +2024-08-26 16:04:33,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=69578.66666666667, ans=0.0 +2024-08-26 16:04:37,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=69578.66666666667, ans=0.125 +2024-08-26 16:04:49,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69632.0, ans=0.1 +2024-08-26 16:05:04,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=69685.33333333333, ans=0.0 +2024-08-26 16:05:07,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=69738.66666666667, ans=0.125 +2024-08-26 16:05:12,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=69738.66666666667, ans=0.0 +2024-08-26 16:05:24,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=69792.0, ans=0.125 +2024-08-26 16:05:25,773 INFO [train.py:1114] (2/4) Epoch 6, batch 650, loss[loss=0.245, simple_loss=0.3053, pruned_loss=0.06689, ctc_loss=0.1273, over 19766.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3057, pruned_loss=0.07391, ctc_loss=0.1381, over 3714679.70 frames. ], batch size: 54, lr: 2.40e-02, grad_scale: 32.0 +2024-08-26 16:05:53,414 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.596e+02 1.734e+02 1.974e+02 3.978e+02, threshold=3.467e+02, percent-clipped=1.0 +2024-08-26 16:05:59,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=70005.33333333333, ans=0.2 +2024-08-26 16:06:06,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70058.66666666667, ans=0.1 +2024-08-26 16:06:15,337 INFO [train.py:1114] (2/4) Epoch 6, batch 700, loss[loss=0.2332, simple_loss=0.2839, pruned_loss=0.06565, ctc_loss=0.1281, over 19709.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.3061, pruned_loss=0.07389, ctc_loss=0.1382, over 3746895.95 frames. ], batch size: 51, lr: 2.40e-02, grad_scale: 16.0 +2024-08-26 16:06:17,818 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.08 vs. limit=15.0 +2024-08-26 16:06:26,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=70165.33333333333, ans=0.125 +2024-08-26 16:06:29,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=70165.33333333333, ans=0.125 +2024-08-26 16:06:46,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=70272.0, ans=0.2 +2024-08-26 16:06:50,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=70272.0, ans=0.125 +2024-08-26 16:06:56,822 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:06:59,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=70325.33333333333, ans=0.0 +2024-08-26 16:07:02,325 INFO [train.py:1114] (2/4) Epoch 6, batch 750, loss[loss=0.2513, simple_loss=0.3145, pruned_loss=0.06829, ctc_loss=0.1288, over 19511.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.3059, pruned_loss=0.07394, ctc_loss=0.1381, over 3773056.43 frames. ], batch size: 54, lr: 2.40e-02, grad_scale: 16.0 +2024-08-26 16:07:21,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=70432.0, ans=0.125 +2024-08-26 16:07:23,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=70432.0, ans=0.1 +2024-08-26 16:07:33,079 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.662e+02 1.845e+02 2.236e+02 2.956e+02, threshold=3.689e+02, percent-clipped=0.0 +2024-08-26 16:07:53,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=70538.66666666667, ans=0.0 +2024-08-26 16:08:06,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=70592.0, ans=0.0 +2024-08-26 16:08:25,917 INFO [train.py:1114] (2/4) Epoch 6, batch 800, loss[loss=0.2415, simple_loss=0.2809, pruned_loss=0.07371, ctc_loss=0.1366, over 19389.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3063, pruned_loss=0.07439, ctc_loss=0.1386, over 3794047.98 frames. ], batch size: 48, lr: 2.39e-02, grad_scale: 32.0 +2024-08-26 16:08:35,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=70698.66666666667, ans=0.125 +2024-08-26 16:09:29,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=70858.66666666667, ans=0.125 +2024-08-26 16:09:32,229 INFO [train.py:1114] (2/4) Epoch 6, batch 850, loss[loss=0.2666, simple_loss=0.3135, pruned_loss=0.07938, ctc_loss=0.1523, over 19665.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3059, pruned_loss=0.07415, ctc_loss=0.1384, over 3813304.36 frames. ], batch size: 59, lr: 2.39e-02, grad_scale: 32.0 +2024-08-26 16:09:42,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=70965.33333333333, ans=0.125 +2024-08-26 16:09:58,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=71018.66666666667, ans=0.125 +2024-08-26 16:09:58,790 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.315e+02 1.558e+02 1.696e+02 1.888e+02 5.151e+02, threshold=3.391e+02, percent-clipped=1.0 +2024-08-26 16:10:01,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=71072.0, ans=0.125 +2024-08-26 16:10:03,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=71072.0, ans=0.125 +2024-08-26 16:10:30,284 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.16 vs. limit=12.0 +2024-08-26 16:10:34,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=71125.33333333333, ans=0.2 +2024-08-26 16:10:35,830 INFO [train.py:1114] (2/4) Epoch 6, batch 900, loss[loss=0.2167, simple_loss=0.2764, pruned_loss=0.05662, ctc_loss=0.1097, over 19785.00 frames. ], tot_loss[loss=0.255, simple_loss=0.306, pruned_loss=0.07423, ctc_loss=0.1387, over 3818368.18 frames. ], batch size: 49, lr: 2.39e-02, grad_scale: 32.0 +2024-08-26 16:10:42,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=71178.66666666667, ans=0.125 +2024-08-26 16:10:45,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=71232.0, ans=0.125 +2024-08-26 16:10:51,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=71232.0, ans=0.1 +2024-08-26 16:11:08,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=71338.66666666667, ans=0.0 +2024-08-26 16:11:10,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=71338.66666666667, ans=0.025 +2024-08-26 16:11:23,807 INFO [train.py:1114] (2/4) Epoch 6, batch 950, loss[loss=0.2403, simple_loss=0.2921, pruned_loss=0.06951, ctc_loss=0.1238, over 19507.00 frames. ], tot_loss[loss=0.2561, simple_loss=0.3067, pruned_loss=0.07481, ctc_loss=0.1398, over 3820269.97 frames. ], batch size: 49, lr: 2.38e-02, grad_scale: 16.0 +2024-08-26 16:11:39,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=71445.33333333333, ans=0.125 +2024-08-26 16:11:40,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=71498.66666666667, ans=0.2 +2024-08-26 16:11:44,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=71498.66666666667, ans=0.0 +2024-08-26 16:11:51,247 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.02 vs. limit=22.5 +2024-08-26 16:11:53,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=71552.0, ans=0.2 +2024-08-26 16:11:53,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=71552.0, ans=0.0 +2024-08-26 16:11:56,215 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.40 vs. limit=15.0 +2024-08-26 16:11:57,181 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.39 vs. limit=22.5 +2024-08-26 16:11:57,823 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:11:59,452 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.365e+02 1.602e+02 1.780e+02 2.099e+02 5.215e+02, threshold=3.559e+02, percent-clipped=4.0 +2024-08-26 16:12:05,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=71605.33333333333, ans=0.0 +2024-08-26 16:12:21,537 INFO [train.py:1114] (2/4) Epoch 6, batch 1000, loss[loss=0.2239, simple_loss=0.2785, pruned_loss=0.06219, ctc_loss=0.1126, over 19872.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.3073, pruned_loss=0.07514, ctc_loss=0.1404, over 3816465.51 frames. ], batch size: 52, lr: 2.38e-02, grad_scale: 16.0 +2024-08-26 16:12:24,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=71712.0, ans=0.125 +2024-08-26 16:12:30,114 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.06 vs. limit=15.0 +2024-08-26 16:12:33,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=71765.33333333333, ans=0.125 +2024-08-26 16:12:34,498 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:12:52,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=71765.33333333333, ans=0.0 +2024-08-26 16:13:03,220 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.85 vs. limit=15.0 +2024-08-26 16:13:04,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=71872.0, ans=0.2 +2024-08-26 16:13:05,996 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.49 vs. limit=10.0 +2024-08-26 16:13:06,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=71872.0, ans=0.125 +2024-08-26 16:13:09,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=71872.0, ans=0.125 +2024-08-26 16:13:15,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=71925.33333333333, ans=0.125 +2024-08-26 16:13:22,626 INFO [train.py:1114] (2/4) Epoch 6, batch 1050, loss[loss=0.2779, simple_loss=0.3243, pruned_loss=0.08441, ctc_loss=0.1566, over 19849.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.3063, pruned_loss=0.07472, ctc_loss=0.1397, over 3823461.72 frames. ], batch size: 57, lr: 2.37e-02, grad_scale: 16.0 +2024-08-26 16:13:23,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.whiten.whitening_limit, batch_count=71978.66666666667, ans=12.0 +2024-08-26 16:13:50,113 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.278e+02 1.587e+02 1.763e+02 2.081e+02 5.001e+02, threshold=3.526e+02, percent-clipped=1.0 +2024-08-26 16:13:55,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=72138.66666666667, ans=0.2 +2024-08-26 16:14:03,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=72192.0, ans=0.125 +2024-08-26 16:14:05,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=72192.0, ans=0.2 +2024-08-26 16:14:10,561 INFO [train.py:1114] (2/4) Epoch 6, batch 1100, loss[loss=0.2356, simple_loss=0.2918, pruned_loss=0.06509, ctc_loss=0.1229, over 19575.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3058, pruned_loss=0.07454, ctc_loss=0.1394, over 3830010.05 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 16.0 +2024-08-26 16:14:22,372 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.05 vs. limit=15.0 +2024-08-26 16:14:30,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=72298.66666666667, ans=0.125 +2024-08-26 16:14:37,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=72352.0, ans=0.1 +2024-08-26 16:14:37,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=72352.0, ans=0.2 +2024-08-26 16:15:13,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=72405.33333333333, ans=0.0 +2024-08-26 16:15:13,798 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.38 vs. limit=12.0 +2024-08-26 16:15:25,920 INFO [train.py:1114] (2/4) Epoch 6, batch 1150, loss[loss=0.2485, simple_loss=0.3036, pruned_loss=0.06973, ctc_loss=0.1349, over 19575.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3061, pruned_loss=0.07465, ctc_loss=0.1395, over 3829261.88 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 16.0 +2024-08-26 16:15:46,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_na.min_abs, batch_count=72512.0, ans=0.02 +2024-08-26 16:16:50,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=72618.66666666667, ans=0.125 +2024-08-26 16:16:51,599 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.628e+02 1.822e+02 2.077e+02 5.117e+02, threshold=3.645e+02, percent-clipped=2.0 +2024-08-26 16:16:55,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=72672.0, ans=0.125 +2024-08-26 16:17:03,559 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.57 vs. limit=22.5 +2024-08-26 16:17:15,528 INFO [train.py:1114] (2/4) Epoch 6, batch 1200, loss[loss=0.2618, simple_loss=0.3174, pruned_loss=0.07517, ctc_loss=0.1396, over 19839.00 frames. ], tot_loss[loss=0.2564, simple_loss=0.3068, pruned_loss=0.07498, ctc_loss=0.1401, over 3825281.55 frames. ], batch size: 57, lr: 2.36e-02, grad_scale: 32.0 +2024-08-26 16:17:22,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=72778.66666666667, ans=0.04949747468305833 +2024-08-26 16:17:28,598 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.89 vs. limit=15.0 +2024-08-26 16:17:31,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72832.0, ans=0.1 +2024-08-26 16:17:36,155 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.50 vs. limit=15.0 +2024-08-26 16:17:47,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=72938.66666666667, ans=0.2 +2024-08-26 16:18:04,243 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.55 vs. limit=6.0 +2024-08-26 16:18:04,585 INFO [train.py:1114] (2/4) Epoch 6, batch 1250, loss[loss=0.2701, simple_loss=0.3139, pruned_loss=0.08358, ctc_loss=0.1481, over 19541.00 frames. ], tot_loss[loss=0.2559, simple_loss=0.307, pruned_loss=0.07454, ctc_loss=0.1392, over 3842971.79 frames. ], batch size: 61, lr: 2.36e-02, grad_scale: 32.0 +2024-08-26 16:18:04,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=73045.33333333333, ans=0.125 +2024-08-26 16:18:11,815 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.70 vs. limit=12.0 +2024-08-26 16:18:13,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=73098.66666666667, ans=0.0 +2024-08-26 16:18:17,642 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.57 vs. limit=22.5 +2024-08-26 16:18:31,859 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.253e+02 1.534e+02 1.709e+02 2.004e+02 3.682e+02, threshold=3.418e+02, percent-clipped=1.0 +2024-08-26 16:18:45,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=73205.33333333333, ans=0.0 +2024-08-26 16:18:59,489 INFO [train.py:1114] (2/4) Epoch 6, batch 1300, loss[loss=0.2691, simple_loss=0.3263, pruned_loss=0.07685, ctc_loss=0.1455, over 18813.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3062, pruned_loss=0.07379, ctc_loss=0.1378, over 3845774.18 frames. ], batch size: 76, lr: 2.36e-02, grad_scale: 32.0 +2024-08-26 16:19:14,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=73312.0, ans=0.2 +2024-08-26 16:19:15,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=73312.0, ans=0.0 +2024-08-26 16:20:00,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=73418.66666666667, ans=0.125 +2024-08-26 16:20:10,739 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.58 vs. limit=15.0 +2024-08-26 16:20:14,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=73472.0, ans=0.125 +2024-08-26 16:20:22,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=73525.33333333333, ans=0.0 +2024-08-26 16:20:25,874 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.64 vs. limit=10.0 +2024-08-26 16:20:32,211 INFO [train.py:1114] (2/4) Epoch 6, batch 1350, loss[loss=0.2761, simple_loss=0.3217, pruned_loss=0.08221, ctc_loss=0.1654, over 19767.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3056, pruned_loss=0.07331, ctc_loss=0.1367, over 3857695.64 frames. ], batch size: 54, lr: 2.36e-02, grad_scale: 32.0 +2024-08-26 16:20:32,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=73578.66666666667, ans=0.125 +2024-08-26 16:20:39,521 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.27 vs. limit=15.0 +2024-08-26 16:21:00,538 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.536e+02 1.657e+02 1.960e+02 3.055e+02, threshold=3.315e+02, percent-clipped=0.0 +2024-08-26 16:21:20,642 INFO [train.py:1114] (2/4) Epoch 6, batch 1400, loss[loss=0.2293, simple_loss=0.2819, pruned_loss=0.06424, ctc_loss=0.1205, over 19662.00 frames. ], tot_loss[loss=0.2532, simple_loss=0.3053, pruned_loss=0.07327, ctc_loss=0.1365, over 3865002.34 frames. ], batch size: 46, lr: 2.35e-02, grad_scale: 32.0 +2024-08-26 16:21:24,083 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.28 vs. limit=22.5 +2024-08-26 16:21:35,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=73898.66666666667, ans=0.04949747468305833 +2024-08-26 16:21:41,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=73952.0, ans=0.025 +2024-08-26 16:21:44,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=73952.0, ans=0.05 +2024-08-26 16:21:48,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=74005.33333333333, ans=0.09899494936611666 +2024-08-26 16:22:08,757 INFO [train.py:1114] (2/4) Epoch 6, batch 1450, loss[loss=0.2775, simple_loss=0.321, pruned_loss=0.08542, ctc_loss=0.1581, over 19682.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3062, pruned_loss=0.0741, ctc_loss=0.1381, over 3862462.85 frames. ], batch size: 63, lr: 2.35e-02, grad_scale: 16.0 +2024-08-26 16:22:20,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=74165.33333333333, ans=0.025 +2024-08-26 16:23:25,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74218.66666666667, ans=0.1 +2024-08-26 16:23:32,943 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.303e+02 1.612e+02 1.863e+02 2.093e+02 4.374e+02, threshold=3.727e+02, percent-clipped=2.0 +2024-08-26 16:23:34,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=74272.0, ans=0.125 +2024-08-26 16:23:47,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=74325.33333333333, ans=0.125 +2024-08-26 16:23:57,154 INFO [train.py:1114] (2/4) Epoch 6, batch 1500, loss[loss=0.265, simple_loss=0.3168, pruned_loss=0.07776, ctc_loss=0.1443, over 19537.00 frames. ], tot_loss[loss=0.254, simple_loss=0.306, pruned_loss=0.07355, ctc_loss=0.1373, over 3861892.67 frames. ], batch size: 57, lr: 2.35e-02, grad_scale: 16.0 +2024-08-26 16:24:00,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=74378.66666666667, ans=0.0 +2024-08-26 16:24:25,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=74378.66666666667, ans=0.0 +2024-08-26 16:24:34,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=74432.0, ans=0.125 +2024-08-26 16:24:37,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=74432.0, ans=0.125 +2024-08-26 16:24:38,115 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.12 vs. limit=12.0 +2024-08-26 16:25:02,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74592.0, ans=0.1 +2024-08-26 16:25:02,432 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.12 vs. limit=15.0 +2024-08-26 16:25:10,566 INFO [train.py:1114] (2/4) Epoch 6, batch 1550, loss[loss=0.2804, simple_loss=0.328, pruned_loss=0.08553, ctc_loss=0.1546, over 19637.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3063, pruned_loss=0.074, ctc_loss=0.1382, over 3847052.20 frames. ], batch size: 60, lr: 2.34e-02, grad_scale: 16.0 +2024-08-26 16:25:55,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74752.0, ans=0.1 +2024-08-26 16:26:20,816 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.577e+02 1.696e+02 1.957e+02 2.811e+02, threshold=3.391e+02, percent-clipped=0.0 +2024-08-26 16:26:40,315 INFO [train.py:1114] (2/4) Epoch 6, batch 1600, loss[loss=0.2787, simple_loss=0.3308, pruned_loss=0.08099, ctc_loss=0.1615, over 19842.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3062, pruned_loss=0.07404, ctc_loss=0.1384, over 3836413.87 frames. ], batch size: 57, lr: 2.34e-02, grad_scale: 32.0 +2024-08-26 16:26:44,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=74912.0, ans=0.125 +2024-08-26 16:26:54,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=74965.33333333333, ans=0.125 +2024-08-26 16:26:54,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=74965.33333333333, ans=0.0 +2024-08-26 16:26:58,144 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.49 vs. limit=15.0 +2024-08-26 16:27:22,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=75072.0, ans=0.125 +2024-08-26 16:27:22,772 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.25 vs. limit=15.0 +2024-08-26 16:27:35,217 INFO [train.py:1114] (2/4) Epoch 6, batch 1650, loss[loss=0.2598, simple_loss=0.3133, pruned_loss=0.07418, ctc_loss=0.1451, over 19652.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3054, pruned_loss=0.07367, ctc_loss=0.1377, over 3832415.40 frames. ], batch size: 59, lr: 2.34e-02, grad_scale: 32.0 +2024-08-26 16:28:43,071 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.279e+02 1.584e+02 1.799e+02 2.082e+02 3.549e+02, threshold=3.597e+02, percent-clipped=1.0 +2024-08-26 16:28:44,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75338.66666666667, ans=0.1 +2024-08-26 16:29:36,193 INFO [train.py:1114] (2/4) Epoch 6, batch 1700, loss[loss=0.2118, simple_loss=0.2675, pruned_loss=0.05647, ctc_loss=0.1078, over 19646.00 frames. ], tot_loss[loss=0.2522, simple_loss=0.3046, pruned_loss=0.07269, ctc_loss=0.136, over 3847042.32 frames. ], batch size: 46, lr: 2.33e-02, grad_scale: 32.0 +2024-08-26 16:29:43,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=75445.33333333333, ans=0.125 +2024-08-26 16:29:45,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=75498.66666666667, ans=0.0 +2024-08-26 16:30:10,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=75605.33333333333, ans=0.125 +2024-08-26 16:30:24,061 INFO [train.py:1114] (2/4) Epoch 6, batch 1750, loss[loss=0.2065, simple_loss=0.2588, pruned_loss=0.05548, ctc_loss=0.1079, over 19618.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3035, pruned_loss=0.07198, ctc_loss=0.1347, over 3852057.66 frames. ], batch size: 45, lr: 2.33e-02, grad_scale: 32.0 +2024-08-26 16:31:02,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=75818.66666666667, ans=0.125 +2024-08-26 16:31:04,531 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.524e+02 1.697e+02 1.959e+02 3.052e+02, threshold=3.394e+02, percent-clipped=0.0 +2024-08-26 16:31:04,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=75872.0, ans=0.125 +2024-08-26 16:31:06,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=75872.0, ans=0.125 +2024-08-26 16:31:06,721 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.39 vs. limit=10.0 +2024-08-26 16:31:25,742 INFO [train.py:1114] (2/4) Epoch 6, batch 1800, loss[loss=0.2583, simple_loss=0.3147, pruned_loss=0.07399, ctc_loss=0.1349, over 19612.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.304, pruned_loss=0.07237, ctc_loss=0.1353, over 3854282.85 frames. ], batch size: 55, lr: 2.33e-02, grad_scale: 32.0 +2024-08-26 16:32:00,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75978.66666666667, ans=0.1 +2024-08-26 16:32:10,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=76032.0, ans=0.0 +2024-08-26 16:32:56,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=76192.0, ans=0.125 +2024-08-26 16:33:01,906 INFO [train.py:1114] (2/4) Epoch 6, batch 1850, loss[loss=0.2589, simple_loss=0.3152, pruned_loss=0.07351, ctc_loss=0.139, over 19589.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3032, pruned_loss=0.0721, ctc_loss=0.1346, over 3857785.01 frames. ], batch size: 57, lr: 2.32e-02, grad_scale: 32.0 +2024-08-26 16:33:22,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=76245.33333333333, ans=15.0 +2024-08-26 16:33:26,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=76245.33333333333, ans=0.1 +2024-08-26 16:33:43,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=76298.66666666667, ans=0.125 +2024-08-26 16:33:55,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=76298.66666666667, ans=0.2 +2024-08-26 16:34:05,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=76405.33333333333, ans=0.125 +2024-08-26 16:34:05,736 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.545e+02 1.701e+02 1.893e+02 2.907e+02, threshold=3.402e+02, percent-clipped=0.0 +2024-08-26 16:34:12,599 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.81 vs. limit=15.0 +2024-08-26 16:34:20,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=76458.66666666667, ans=0.0 +2024-08-26 16:34:23,397 INFO [train.py:1114] (2/4) Epoch 6, batch 1900, loss[loss=0.2564, simple_loss=0.3183, pruned_loss=0.07072, ctc_loss=0.1326, over 19662.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.304, pruned_loss=0.07242, ctc_loss=0.135, over 3862819.37 frames. ], batch size: 59, lr: 2.32e-02, grad_scale: 32.0 +2024-08-26 16:34:38,743 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.92 vs. limit=15.0 +2024-08-26 16:34:40,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=76618.66666666667, ans=0.0 +2024-08-26 16:34:48,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=76672.0, ans=0.0 +2024-08-26 16:35:14,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=76672.0, ans=0.125 +2024-08-26 16:35:14,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=76672.0, ans=0.125 +2024-08-26 16:35:14,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=76672.0, ans=0.025 +2024-08-26 16:35:27,760 INFO [train.py:1114] (2/4) Epoch 6, batch 1950, loss[loss=0.2176, simple_loss=0.2836, pruned_loss=0.0542, ctc_loss=0.1081, over 19603.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.305, pruned_loss=0.07231, ctc_loss=0.1352, over 3871571.98 frames. ], batch size: 52, lr: 2.32e-02, grad_scale: 32.0 +2024-08-26 16:36:32,172 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.394e+02 1.646e+02 1.808e+02 2.059e+02 4.885e+02, threshold=3.617e+02, percent-clipped=2.0 +2024-08-26 16:36:38,121 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.28 vs. limit=22.5 +2024-08-26 16:36:44,676 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=76992.0, ans=0.125 +2024-08-26 16:36:52,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77045.33333333333, ans=0.1 +2024-08-26 16:36:53,614 INFO [train.py:1114] (2/4) Epoch 6, batch 2000, loss[loss=0.2204, simple_loss=0.265, pruned_loss=0.06447, ctc_loss=0.1169, over 19653.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.3052, pruned_loss=0.07267, ctc_loss=0.1357, over 3855460.36 frames. ], batch size: 45, lr: 2.31e-02, grad_scale: 32.0 +2024-08-26 16:37:11,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=77152.0, ans=0.125 +2024-08-26 16:37:11,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=77152.0, ans=0.0 +2024-08-26 16:37:38,182 INFO [train.py:1114] (2/4) Epoch 6, batch 2050, loss[loss=0.218, simple_loss=0.2729, pruned_loss=0.05833, ctc_loss=0.116, over 19722.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3041, pruned_loss=0.07249, ctc_loss=0.1353, over 3851218.18 frames. ], batch size: 47, lr: 2.31e-02, grad_scale: 32.0 +2024-08-26 16:37:41,957 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:37:51,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77365.33333333333, ans=0.1 +2024-08-26 16:38:04,756 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.285e+02 1.566e+02 1.748e+02 2.075e+02 4.290e+02, threshold=3.497e+02, percent-clipped=1.0 +2024-08-26 16:38:07,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=77472.0, ans=0.125 +2024-08-26 16:38:11,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=77472.0, ans=0.0 +2024-08-26 16:38:12,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=77525.33333333333, ans=0.0 +2024-08-26 16:38:14,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=77525.33333333333, ans=0.07 +2024-08-26 16:38:15,611 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.67 vs. limit=22.5 +2024-08-26 16:38:18,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=77525.33333333333, ans=0.07 +2024-08-26 16:38:34,155 INFO [train.py:1114] (2/4) Epoch 6, batch 2100, loss[loss=0.2437, simple_loss=0.298, pruned_loss=0.06834, ctc_loss=0.1317, over 19787.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3032, pruned_loss=0.07182, ctc_loss=0.1343, over 3858067.76 frames. ], batch size: 54, lr: 2.31e-02, grad_scale: 32.0 +2024-08-26 16:38:36,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=77578.66666666667, ans=0.125 +2024-08-26 16:39:09,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=77578.66666666667, ans=0.5 +2024-08-26 16:39:35,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77738.66666666667, ans=0.1 +2024-08-26 16:39:38,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=77792.0, ans=0.0 +2024-08-26 16:39:46,023 INFO [train.py:1114] (2/4) Epoch 6, batch 2150, loss[loss=0.2372, simple_loss=0.2945, pruned_loss=0.06473, ctc_loss=0.1261, over 19851.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3029, pruned_loss=0.07172, ctc_loss=0.134, over 3868909.89 frames. ], batch size: 52, lr: 2.30e-02, grad_scale: 32.0 +2024-08-26 16:39:52,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=77845.33333333333, ans=0.125 +2024-08-26 16:39:53,040 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.88 vs. limit=15.0 +2024-08-26 16:40:00,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=77898.66666666667, ans=0.125 +2024-08-26 16:40:03,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=77898.66666666667, ans=0.0 +2024-08-26 16:40:03,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.83 vs. limit=22.5 +2024-08-26 16:40:13,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=78005.33333333333, ans=0.2 +2024-08-26 16:40:13,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=78005.33333333333, ans=0.2 +2024-08-26 16:40:13,759 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.258e+02 1.590e+02 1.744e+02 2.019e+02 3.989e+02, threshold=3.489e+02, percent-clipped=1.0 +2024-08-26 16:40:20,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=78005.33333333333, ans=0.125 +2024-08-26 16:40:22,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=78058.66666666667, ans=0.125 +2024-08-26 16:40:29,804 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:40:31,354 INFO [train.py:1114] (2/4) Epoch 6, batch 2200, loss[loss=0.2583, simple_loss=0.3165, pruned_loss=0.07274, ctc_loss=0.1366, over 19581.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3028, pruned_loss=0.07167, ctc_loss=0.1339, over 3867615.28 frames. ], batch size: 57, lr: 2.30e-02, grad_scale: 32.0 +2024-08-26 16:40:40,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=78165.33333333333, ans=0.125 +2024-08-26 16:40:44,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=78165.33333333333, ans=0.1 +2024-08-26 16:40:59,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=78272.0, ans=0.0 +2024-08-26 16:42:04,355 INFO [train.py:1114] (2/4) Epoch 6, batch 2250, loss[loss=0.2358, simple_loss=0.2972, pruned_loss=0.0641, ctc_loss=0.1155, over 19622.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3026, pruned_loss=0.07131, ctc_loss=0.1334, over 3866433.00 frames. ], batch size: 55, lr: 2.30e-02, grad_scale: 32.0 +2024-08-26 16:42:09,172 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.16 vs. limit=10.0 +2024-08-26 16:42:30,469 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.366e+02 1.631e+02 1.850e+02 2.118e+02 4.912e+02, threshold=3.701e+02, percent-clipped=4.0 +2024-08-26 16:42:49,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=78592.0, ans=0.2 +2024-08-26 16:42:51,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=78592.0, ans=0.0 +2024-08-26 16:42:57,019 INFO [train.py:1114] (2/4) Epoch 6, batch 2300, loss[loss=0.2336, simple_loss=0.282, pruned_loss=0.06712, ctc_loss=0.1272, over 19498.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.3016, pruned_loss=0.07101, ctc_loss=0.133, over 3860656.19 frames. ], batch size: 49, lr: 2.29e-02, grad_scale: 32.0 +2024-08-26 16:43:05,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=78698.66666666667, ans=0.125 +2024-08-26 16:43:09,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=78698.66666666667, ans=0.0 +2024-08-26 16:43:17,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=78752.0, ans=0.125 +2024-08-26 16:43:20,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=78752.0, ans=0.2 +2024-08-26 16:43:25,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=78805.33333333333, ans=0.125 +2024-08-26 16:43:39,447 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.14 vs. limit=15.0 +2024-08-26 16:43:41,596 INFO [train.py:1114] (2/4) Epoch 6, batch 2350, loss[loss=0.2781, simple_loss=0.3306, pruned_loss=0.08328, ctc_loss=0.1477, over 19700.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.3017, pruned_loss=0.07109, ctc_loss=0.1329, over 3863792.78 frames. ], batch size: 63, lr: 2.29e-02, grad_scale: 32.0 +2024-08-26 16:43:59,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=78965.33333333333, ans=0.0 +2024-08-26 16:44:09,673 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.319e+02 1.571e+02 1.792e+02 2.053e+02 3.529e+02, threshold=3.585e+02, percent-clipped=0.0 +2024-08-26 16:44:13,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=79072.0, ans=0.0 +2024-08-26 16:44:16,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=79072.0, ans=0.125 +2024-08-26 16:44:24,140 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.48 vs. limit=10.0 +2024-08-26 16:44:27,085 INFO [train.py:1114] (2/4) Epoch 6, batch 2400, loss[loss=0.2726, simple_loss=0.3219, pruned_loss=0.08096, ctc_loss=0.1532, over 19254.00 frames. ], tot_loss[loss=0.251, simple_loss=0.304, pruned_loss=0.07217, ctc_loss=0.1343, over 3857246.83 frames. ], batch size: 71, lr: 2.29e-02, grad_scale: 32.0 +2024-08-26 16:44:29,349 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.52 vs. limit=22.5 +2024-08-26 16:44:33,816 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.28 vs. limit=15.0 +2024-08-26 16:44:40,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=79232.0, ans=0.1 +2024-08-26 16:44:41,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=79232.0, ans=0.125 +2024-08-26 16:44:43,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=79232.0, ans=0.125 +2024-08-26 16:44:54,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=79338.66666666667, ans=0.125 +2024-08-26 16:45:01,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=79338.66666666667, ans=0.0 +2024-08-26 16:45:12,827 INFO [train.py:1114] (2/4) Epoch 6, batch 2450, loss[loss=0.3195, simple_loss=0.3351, pruned_loss=0.1097, ctc_loss=0.2112, over 13151.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3096, pruned_loss=0.0768, ctc_loss=0.1432, over 3729667.10 frames. ], batch size: 140, lr: 2.29e-02, grad_scale: 32.0 +2024-08-26 16:45:14,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=79445.33333333333, ans=0.1 +2024-08-26 16:45:22,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=79498.66666666667, ans=0.025 +2024-08-26 16:45:23,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=79498.66666666667, ans=0.125 +2024-08-26 16:45:29,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=79498.66666666667, ans=0.125 +2024-08-26 16:45:29,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=79498.66666666667, ans=0.0 +2024-08-26 16:45:40,096 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.433e+02 1.744e+02 1.902e+02 2.066e+02 3.652e+02, threshold=3.804e+02, percent-clipped=1.0 +2024-08-26 16:48:16,407 INFO [train.py:1114] (2/4) Epoch 7, batch 0, loss[loss=0.2543, simple_loss=0.2952, pruned_loss=0.07701, ctc_loss=0.1482, over 19811.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.2952, pruned_loss=0.07701, ctc_loss=0.1482, over 19811.00 frames. ], batch size: 49, lr: 2.14e-02, grad_scale: 32.0 +2024-08-26 16:48:16,408 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-26 16:48:29,568 INFO [train.py:1146] (2/4) Epoch 7, validation: loss=0.2068, simple_loss=0.2958, pruned_loss=0.04327, ctc_loss=0.07811, over 944034.00 frames. +2024-08-26 16:48:29,569 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12825MB +2024-08-26 16:49:04,254 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.90 vs. limit=15.0 +2024-08-26 16:49:08,980 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=1.87 vs. limit=15.0 +2024-08-26 16:49:19,278 INFO [train.py:1114] (2/4) Epoch 7, batch 50, loss[loss=0.226, simple_loss=0.2777, pruned_loss=0.06316, ctc_loss=0.12, over 19711.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3081, pruned_loss=0.07424, ctc_loss=0.1399, over 843578.70 frames. ], batch size: 47, lr: 2.14e-02, grad_scale: 32.0 +2024-08-26 16:49:25,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=79925.33333333333, ans=0.0 +2024-08-26 16:49:52,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=80085.33333333333, ans=0.125 +2024-08-26 16:49:57,477 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.282e+02 1.584e+02 1.822e+02 2.089e+02 3.575e+02, threshold=3.645e+02, percent-clipped=0.0 +2024-08-26 16:50:07,022 INFO [train.py:1114] (2/4) Epoch 7, batch 100, loss[loss=0.225, simple_loss=0.2845, pruned_loss=0.05976, ctc_loss=0.115, over 19722.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3073, pruned_loss=0.07421, ctc_loss=0.1385, over 1498623.17 frames. ], batch size: 51, lr: 2.13e-02, grad_scale: 32.0 +2024-08-26 16:50:13,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=80192.0, ans=0.0 +2024-08-26 16:50:13,753 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:50:25,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80245.33333333333, ans=0.1 +2024-08-26 16:50:36,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=80298.66666666667, ans=0.125 +2024-08-26 16:50:40,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=80352.0, ans=0.04949747468305833 +2024-08-26 16:51:01,451 INFO [train.py:1114] (2/4) Epoch 7, batch 150, loss[loss=0.234, simple_loss=0.283, pruned_loss=0.06795, ctc_loss=0.1227, over 19710.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.304, pruned_loss=0.0722, ctc_loss=0.1347, over 2027456.19 frames. ], batch size: 47, lr: 2.13e-02, grad_scale: 32.0 +2024-08-26 16:51:04,703 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.10 vs. limit=15.0 +2024-08-26 16:51:15,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=80512.0, ans=0.125 +2024-08-26 16:51:28,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=80565.33333333333, ans=0.125 +2024-08-26 16:51:33,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=80618.66666666667, ans=0.0 +2024-08-26 16:51:39,013 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.222e+02 1.525e+02 1.667e+02 1.863e+02 2.878e+02, threshold=3.334e+02, percent-clipped=0.0 +2024-08-26 16:51:41,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=80672.0, ans=0.125 +2024-08-26 16:51:48,553 INFO [train.py:1114] (2/4) Epoch 7, batch 200, loss[loss=0.2952, simple_loss=0.3365, pruned_loss=0.09293, ctc_loss=0.1699, over 18253.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3021, pruned_loss=0.07115, ctc_loss=0.1325, over 2436140.00 frames. ], batch size: 85, lr: 2.13e-02, grad_scale: 32.0 +2024-08-26 16:51:52,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=80725.33333333333, ans=0.0 +2024-08-26 16:51:58,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=80778.66666666667, ans=0.0 +2024-08-26 16:52:04,427 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:52:14,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=80832.0, ans=0.5 +2024-08-26 16:52:21,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=80885.33333333333, ans=0.125 +2024-08-26 16:52:35,139 INFO [train.py:1114] (2/4) Epoch 7, batch 250, loss[loss=0.266, simple_loss=0.3226, pruned_loss=0.07577, ctc_loss=0.1448, over 19380.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.3011, pruned_loss=0.07016, ctc_loss=0.1309, over 2756169.51 frames. ], batch size: 67, lr: 2.13e-02, grad_scale: 32.0 +2024-08-26 16:52:42,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=80992.0, ans=0.1 +2024-08-26 16:52:50,292 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.99 vs. limit=15.0 +2024-08-26 16:53:03,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=81152.0, ans=0.2 +2024-08-26 16:53:06,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=81152.0, ans=0.0 +2024-08-26 16:53:14,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81152.0, ans=0.1 +2024-08-26 16:53:14,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81152.0, ans=0.1 +2024-08-26 16:53:16,584 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.591e+02 1.729e+02 1.900e+02 5.825e+02, threshold=3.457e+02, percent-clipped=1.0 +2024-08-26 16:53:21,051 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.84 vs. limit=15.0 +2024-08-26 16:53:25,916 INFO [train.py:1114] (2/4) Epoch 7, batch 300, loss[loss=0.2735, simple_loss=0.3272, pruned_loss=0.08011, ctc_loss=0.1491, over 19535.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.2996, pruned_loss=0.06913, ctc_loss=0.1292, over 3001114.53 frames. ], batch size: 61, lr: 2.12e-02, grad_scale: 32.0 +2024-08-26 16:53:49,188 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.61 vs. limit=15.0 +2024-08-26 16:53:49,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=81365.33333333333, ans=0.125 +2024-08-26 16:54:18,315 INFO [train.py:1114] (2/4) Epoch 7, batch 350, loss[loss=0.217, simple_loss=0.2724, pruned_loss=0.05888, ctc_loss=0.1099, over 19740.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.2995, pruned_loss=0.06893, ctc_loss=0.1288, over 3191443.24 frames. ], batch size: 48, lr: 2.12e-02, grad_scale: 16.0 +2024-08-26 16:54:29,759 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.43 vs. limit=22.5 +2024-08-26 16:54:42,489 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:54:56,442 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.574e+02 1.753e+02 2.022e+02 2.928e+02, threshold=3.506e+02, percent-clipped=0.0 +2024-08-26 16:55:04,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=81792.0, ans=0.125 +2024-08-26 16:55:04,697 INFO [train.py:1114] (2/4) Epoch 7, batch 400, loss[loss=0.2339, simple_loss=0.2991, pruned_loss=0.06134, ctc_loss=0.1149, over 19495.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.2988, pruned_loss=0.06832, ctc_loss=0.128, over 3343247.64 frames. ], batch size: 54, lr: 2.12e-02, grad_scale: 32.0 +2024-08-26 16:55:14,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=81845.33333333333, ans=0.125 +2024-08-26 16:55:18,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=81845.33333333333, ans=0.035 +2024-08-26 16:55:20,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81845.33333333333, ans=0.1 +2024-08-26 16:55:22,208 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.84 vs. limit=12.0 +2024-08-26 16:55:26,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=81898.66666666667, ans=15.0 +2024-08-26 16:55:34,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=81952.0, ans=0.0 +2024-08-26 16:55:39,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=81952.0, ans=0.0 +2024-08-26 16:55:41,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=82005.33333333333, ans=0.09899494936611666 +2024-08-26 16:55:45,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=82005.33333333333, ans=0.2 +2024-08-26 16:55:51,736 INFO [train.py:1114] (2/4) Epoch 7, batch 450, loss[loss=0.2202, simple_loss=0.2905, pruned_loss=0.05395, ctc_loss=0.1051, over 19607.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.2991, pruned_loss=0.06846, ctc_loss=0.1285, over 3450420.32 frames. ], batch size: 55, lr: 2.11e-02, grad_scale: 16.0 +2024-08-26 16:56:03,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=82058.66666666667, ans=0.025 +2024-08-26 16:56:11,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=82112.0, ans=0.07 +2024-08-26 16:56:16,496 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.23 vs. limit=15.0 +2024-08-26 16:56:17,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=82165.33333333333, ans=0.0 +2024-08-26 16:56:18,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=82165.33333333333, ans=0.0 +2024-08-26 16:56:19,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=82165.33333333333, ans=0.2 +2024-08-26 16:56:36,480 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.23 vs. limit=15.0 +2024-08-26 16:56:41,736 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.250e+02 1.485e+02 1.753e+02 2.038e+02 3.855e+02, threshold=3.505e+02, percent-clipped=1.0 +2024-08-26 16:56:42,995 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:56:44,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=82272.0, ans=0.2 +2024-08-26 16:56:49,040 INFO [train.py:1114] (2/4) Epoch 7, batch 500, loss[loss=0.2508, simple_loss=0.3077, pruned_loss=0.07024, ctc_loss=0.1339, over 19666.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.2989, pruned_loss=0.06848, ctc_loss=0.1283, over 3546182.01 frames. ], batch size: 63, lr: 2.11e-02, grad_scale: 16.0 +2024-08-26 16:56:50,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82325.33333333333, ans=0.1 +2024-08-26 16:56:55,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=82325.33333333333, ans=0.2 +2024-08-26 16:57:00,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=82378.66666666667, ans=10.0 +2024-08-26 16:57:01,205 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:57:03,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82378.66666666667, ans=0.1 +2024-08-26 16:57:12,064 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.35 vs. limit=22.5 +2024-08-26 16:57:25,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82538.66666666667, ans=0.1 +2024-08-26 16:57:30,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=82538.66666666667, ans=0.95 +2024-08-26 16:57:35,771 INFO [train.py:1114] (2/4) Epoch 7, batch 550, loss[loss=0.2807, simple_loss=0.3273, pruned_loss=0.08531, ctc_loss=0.1586, over 19277.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.2989, pruned_loss=0.06858, ctc_loss=0.1283, over 3608003.59 frames. ], batch size: 71, lr: 2.11e-02, grad_scale: 16.0 +2024-08-26 16:57:35,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=82592.0, ans=0.2 +2024-08-26 16:57:37,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=82592.0, ans=0.2 +2024-08-26 16:57:40,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=82592.0, ans=0.125 +2024-08-26 16:57:41,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=82592.0, ans=0.1 +2024-08-26 16:57:49,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82645.33333333333, ans=0.1 +2024-08-26 16:58:16,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=82805.33333333333, ans=0.025 +2024-08-26 16:58:16,877 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.530e+02 1.701e+02 1.927e+02 4.407e+02, threshold=3.402e+02, percent-clipped=1.0 +2024-08-26 16:58:30,186 INFO [train.py:1114] (2/4) Epoch 7, batch 600, loss[loss=0.2561, simple_loss=0.3157, pruned_loss=0.07194, ctc_loss=0.1317, over 19349.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.2997, pruned_loss=0.06857, ctc_loss=0.1283, over 3666110.17 frames. ], batch size: 67, lr: 2.11e-02, grad_scale: 16.0 +2024-08-26 16:58:57,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=82858.66666666667, ans=0.0 +2024-08-26 16:58:57,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82858.66666666667, ans=0.1 +2024-08-26 16:58:57,793 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.49 vs. limit=12.0 +2024-08-26 17:04:51,937 INFO [train.py:1114] (2/4) Epoch 7, batch 650, loss[loss=0.2473, simple_loss=0.3096, pruned_loss=0.06666, ctc_loss=0.1292, over 19768.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.2986, pruned_loss=0.06813, ctc_loss=0.1274, over 3716263.92 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 16.0 +2024-08-26 17:05:06,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=83125.33333333333, ans=0.125 +2024-08-26 17:05:23,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=83232.0, ans=0.2 +2024-08-26 17:05:34,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=83285.33333333333, ans=0.2 +2024-08-26 17:05:41,852 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.294e+02 1.502e+02 1.666e+02 1.880e+02 3.682e+02, threshold=3.331e+02, percent-clipped=2.0 +2024-08-26 17:06:20,354 INFO [train.py:1114] (2/4) Epoch 7, batch 700, loss[loss=0.2192, simple_loss=0.28, pruned_loss=0.05835, ctc_loss=0.1043, over 19700.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.2983, pruned_loss=0.06783, ctc_loss=0.1267, over 3748949.27 frames. ], batch size: 51, lr: 2.10e-02, grad_scale: 16.0 +2024-08-26 17:06:28,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=83392.0, ans=0.125 +2024-08-26 17:07:08,431 INFO [train.py:1114] (2/4) Epoch 7, batch 750, loss[loss=0.2469, simple_loss=0.3104, pruned_loss=0.06532, ctc_loss=0.1317, over 19515.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.2977, pruned_loss=0.06751, ctc_loss=0.1264, over 3774137.13 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 16.0 +2024-08-26 17:07:11,801 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.27 vs. limit=6.0 +2024-08-26 17:07:13,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=83658.66666666667, ans=0.125 +2024-08-26 17:07:15,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=83658.66666666667, ans=0.125 +2024-08-26 17:07:18,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=83712.0, ans=0.125 +2024-08-26 17:07:27,753 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.20 vs. limit=15.0 +2024-08-26 17:07:43,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=83818.66666666667, ans=0.0 +2024-08-26 17:07:48,231 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.281e+02 1.533e+02 1.678e+02 1.875e+02 3.166e+02, threshold=3.356e+02, percent-clipped=0.0 +2024-08-26 17:07:49,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=83872.0, ans=0.2 +2024-08-26 17:07:57,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=83925.33333333333, ans=0.125 +2024-08-26 17:07:58,364 INFO [train.py:1114] (2/4) Epoch 7, batch 800, loss[loss=0.2048, simple_loss=0.2656, pruned_loss=0.05269, ctc_loss=0.09683, over 19792.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2973, pruned_loss=0.06752, ctc_loss=0.1262, over 3796585.84 frames. ], batch size: 49, lr: 2.10e-02, grad_scale: 32.0 +2024-08-26 17:08:01,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=83925.33333333333, ans=0.0 +2024-08-26 17:08:13,766 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.51 vs. limit=15.0 +2024-08-26 17:08:14,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=83978.66666666667, ans=0.0 +2024-08-26 17:08:17,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=83978.66666666667, ans=0.025 +2024-08-26 17:08:25,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=84032.0, ans=0.125 +2024-08-26 17:08:36,301 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.95 vs. limit=15.0 +2024-08-26 17:08:39,215 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.53 vs. limit=15.0 +2024-08-26 17:08:43,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=84085.33333333333, ans=0.125 +2024-08-26 17:08:45,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=84085.33333333333, ans=0.125 +2024-08-26 17:08:56,343 INFO [train.py:1114] (2/4) Epoch 7, batch 850, loss[loss=0.2527, simple_loss=0.3095, pruned_loss=0.07088, ctc_loss=0.1352, over 19644.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.2971, pruned_loss=0.06745, ctc_loss=0.126, over 3815446.47 frames. ], batch size: 59, lr: 2.09e-02, grad_scale: 32.0 +2024-08-26 17:09:00,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=84192.0, ans=0.125 +2024-08-26 17:09:07,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=84245.33333333333, ans=0.1 +2024-08-26 17:09:11,020 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.87 vs. limit=15.0 +2024-08-26 17:09:13,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_na.min_abs, batch_count=84245.33333333333, ans=0.02 +2024-08-26 17:09:14,717 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.82 vs. limit=15.0 +2024-08-26 17:09:29,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=84298.66666666667, ans=0.0 +2024-08-26 17:09:36,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=84298.66666666667, ans=0.04949747468305833 +2024-08-26 17:09:55,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=84352.0, ans=0.0 +2024-08-26 17:09:58,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=84352.0, ans=0.0 +2024-08-26 17:10:01,941 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.310e+02 1.545e+02 1.673e+02 1.909e+02 3.259e+02, threshold=3.346e+02, percent-clipped=0.0 +2024-08-26 17:10:04,465 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.00 vs. limit=15.0 +2024-08-26 17:10:06,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=84405.33333333333, ans=0.025 +2024-08-26 17:10:09,597 INFO [train.py:1114] (2/4) Epoch 7, batch 900, loss[loss=0.2279, simple_loss=0.281, pruned_loss=0.06455, ctc_loss=0.1142, over 19388.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.2975, pruned_loss=0.06788, ctc_loss=0.1267, over 3818886.99 frames. ], batch size: 48, lr: 2.09e-02, grad_scale: 32.0 +2024-08-26 17:10:18,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=84458.66666666667, ans=0.025 +2024-08-26 17:10:22,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=84512.0, ans=0.035 +2024-08-26 17:10:47,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=84618.66666666667, ans=0.0 +2024-08-26 17:10:58,444 INFO [train.py:1114] (2/4) Epoch 7, batch 950, loss[loss=0.2146, simple_loss=0.2692, pruned_loss=0.05867, ctc_loss=0.1065, over 19514.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.2977, pruned_loss=0.06794, ctc_loss=0.1268, over 3819380.73 frames. ], batch size: 49, lr: 2.09e-02, grad_scale: 32.0 +2024-08-26 17:11:01,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=84725.33333333333, ans=0.125 +2024-08-26 17:11:11,287 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.78 vs. limit=15.0 +2024-08-26 17:11:14,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=84778.66666666667, ans=0.1 +2024-08-26 17:11:30,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=84832.0, ans=0.0 +2024-08-26 17:11:32,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=84885.33333333333, ans=0.0 +2024-08-26 17:11:34,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=84885.33333333333, ans=0.0 +2024-08-26 17:11:48,304 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.332e+02 1.566e+02 1.708e+02 1.976e+02 3.572e+02, threshold=3.415e+02, percent-clipped=1.0 +2024-08-26 17:12:17,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=84992.0, ans=0.0 +2024-08-26 17:12:18,428 INFO [train.py:1114] (2/4) Epoch 7, batch 1000, loss[loss=0.2214, simple_loss=0.2841, pruned_loss=0.05811, ctc_loss=0.1062, over 19845.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.2986, pruned_loss=0.06838, ctc_loss=0.1277, over 3815233.65 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 32.0 +2024-08-26 17:12:27,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=85045.33333333333, ans=0.025 +2024-08-26 17:12:31,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=85045.33333333333, ans=0.125 +2024-08-26 17:12:58,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=85045.33333333333, ans=0.025 +2024-08-26 17:13:01,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=85098.66666666667, ans=0.0 +2024-08-26 17:13:44,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=85152.0, ans=0.2 +2024-08-26 17:13:56,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=85205.33333333333, ans=0.0 +2024-08-26 17:13:59,712 INFO [train.py:1114] (2/4) Epoch 7, batch 1050, loss[loss=0.2369, simple_loss=0.2958, pruned_loss=0.06444, ctc_loss=0.123, over 19839.00 frames. ], tot_loss[loss=0.242, simple_loss=0.2977, pruned_loss=0.06781, ctc_loss=0.1267, over 3822145.97 frames. ], batch size: 57, lr: 2.08e-02, grad_scale: 32.0 +2024-08-26 17:14:03,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85258.66666666667, ans=0.1 +2024-08-26 17:14:08,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=85312.0, ans=0.125 +2024-08-26 17:16:21,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=85365.33333333333, ans=0.025 +2024-08-26 17:16:24,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=85365.33333333333, ans=0.5 +2024-08-26 17:16:26,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=85365.33333333333, ans=0.2 +2024-08-26 17:16:33,493 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.88 vs. limit=15.0 +2024-08-26 17:16:35,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=85418.66666666667, ans=0.025 +2024-08-26 17:16:40,666 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.203e+02 1.449e+02 1.584e+02 1.768e+02 2.861e+02, threshold=3.169e+02, percent-clipped=0.0 +2024-08-26 17:16:43,845 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:16:44,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=85472.0, ans=0.0 +2024-08-26 17:16:48,369 INFO [train.py:1114] (2/4) Epoch 7, batch 1100, loss[loss=0.2148, simple_loss=0.2748, pruned_loss=0.05715, ctc_loss=0.1012, over 19583.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.2968, pruned_loss=0.06733, ctc_loss=0.1258, over 3830826.14 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 32.0 +2024-08-26 17:16:49,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=85525.33333333333, ans=0.1 +2024-08-26 17:16:52,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=85525.33333333333, ans=0.125 +2024-08-26 17:16:56,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=85525.33333333333, ans=0.025 +2024-08-26 17:16:56,792 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.33 vs. limit=15.0 +2024-08-26 17:16:57,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=85578.66666666667, ans=0.125 +2024-08-26 17:17:00,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=85578.66666666667, ans=0.125 +2024-08-26 17:17:16,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=85685.33333333333, ans=0.125 +2024-08-26 17:17:45,022 INFO [train.py:1114] (2/4) Epoch 7, batch 1150, loss[loss=0.2027, simple_loss=0.264, pruned_loss=0.05148, ctc_loss=0.09585, over 19580.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.2971, pruned_loss=0.06749, ctc_loss=0.1259, over 3830840.94 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 16.0 +2024-08-26 17:17:50,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=85792.0, ans=0.125 +2024-08-26 17:18:03,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=85845.33333333333, ans=0.0 +2024-08-26 17:18:12,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=85898.66666666667, ans=0.1 +2024-08-26 17:18:15,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=85898.66666666667, ans=0.025 +2024-08-26 17:18:15,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=85898.66666666667, ans=0.0 +2024-08-26 17:18:24,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=85952.0, ans=0.125 +2024-08-26 17:18:38,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=86005.33333333333, ans=0.125 +2024-08-26 17:18:41,955 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.243e+02 1.522e+02 1.667e+02 1.891e+02 3.736e+02, threshold=3.335e+02, percent-clipped=2.0 +2024-08-26 17:18:46,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=86005.33333333333, ans=0.2 +2024-08-26 17:18:48,634 INFO [train.py:1114] (2/4) Epoch 7, batch 1200, loss[loss=0.2516, simple_loss=0.3088, pruned_loss=0.07076, ctc_loss=0.1325, over 19829.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.298, pruned_loss=0.06773, ctc_loss=0.1267, over 3825576.42 frames. ], batch size: 57, lr: 2.07e-02, grad_scale: 32.0 +2024-08-26 17:18:48,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=86058.66666666667, ans=0.0 +2024-08-26 17:18:55,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=86058.66666666667, ans=0.025 +2024-08-26 17:19:12,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=86165.33333333333, ans=0.025 +2024-08-26 17:19:28,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=86218.66666666667, ans=0.2 +2024-08-26 17:19:54,874 INFO [train.py:1114] (2/4) Epoch 7, batch 1250, loss[loss=0.2462, simple_loss=0.3015, pruned_loss=0.07026, ctc_loss=0.1259, over 19526.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.2987, pruned_loss=0.06783, ctc_loss=0.1266, over 3843045.10 frames. ], batch size: 61, lr: 2.07e-02, grad_scale: 32.0 +2024-08-26 17:19:56,437 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.84 vs. limit=22.5 +2024-08-26 17:20:01,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=86325.33333333333, ans=0.1 +2024-08-26 17:20:20,153 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.59 vs. limit=22.5 +2024-08-26 17:20:25,664 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.33 vs. limit=22.5 +2024-08-26 17:20:30,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=86485.33333333333, ans=0.5 +2024-08-26 17:20:35,653 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.207e+02 1.476e+02 1.609e+02 1.857e+02 3.245e+02, threshold=3.218e+02, percent-clipped=0.0 +2024-08-26 17:20:44,743 INFO [train.py:1114] (2/4) Epoch 7, batch 1300, loss[loss=0.274, simple_loss=0.3234, pruned_loss=0.08246, ctc_loss=0.1494, over 18934.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.2977, pruned_loss=0.06733, ctc_loss=0.1257, over 3846312.24 frames. ], batch size: 76, lr: 2.07e-02, grad_scale: 32.0 +2024-08-26 17:20:44,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=86592.0, ans=0.0 +2024-08-26 17:20:59,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=86645.33333333333, ans=0.125 +2024-08-26 17:21:08,978 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.88 vs. limit=6.0 +2024-08-26 17:21:38,925 INFO [train.py:1114] (2/4) Epoch 7, batch 1350, loss[loss=0.2179, simple_loss=0.2794, pruned_loss=0.05672, ctc_loss=0.1072, over 19768.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.297, pruned_loss=0.06689, ctc_loss=0.1252, over 3856738.72 frames. ], batch size: 54, lr: 2.07e-02, grad_scale: 32.0 +2024-08-26 17:21:43,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=86858.66666666667, ans=0.125 +2024-08-26 17:21:49,686 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.10 vs. limit=15.0 +2024-08-26 17:21:56,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=86965.33333333333, ans=0.035 +2024-08-26 17:21:59,873 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.98 vs. limit=15.0 +2024-08-26 17:21:59,914 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.67 vs. limit=12.0 +2024-08-26 17:22:04,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=86965.33333333333, ans=0.2 +2024-08-26 17:22:06,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=87018.66666666667, ans=0.125 +2024-08-26 17:22:10,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=87018.66666666667, ans=0.0 +2024-08-26 17:22:19,559 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.495e+02 1.726e+02 1.992e+02 3.104e+02, threshold=3.452e+02, percent-clipped=0.0 +2024-08-26 17:22:20,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=87072.0, ans=0.125 +2024-08-26 17:22:26,110 INFO [train.py:1114] (2/4) Epoch 7, batch 1400, loss[loss=0.2202, simple_loss=0.2708, pruned_loss=0.0607, ctc_loss=0.1204, over 19666.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.2971, pruned_loss=0.06704, ctc_loss=0.1255, over 3863622.39 frames. ], batch size: 46, lr: 2.06e-02, grad_scale: 32.0 +2024-08-26 17:22:28,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=87125.33333333333, ans=0.5 +2024-08-26 17:22:33,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=87125.33333333333, ans=0.05 +2024-08-26 17:23:02,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=87285.33333333333, ans=0.125 +2024-08-26 17:23:04,105 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.66 vs. limit=22.5 +2024-08-26 17:23:06,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=87285.33333333333, ans=0.125 +2024-08-26 17:23:28,848 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.81 vs. limit=6.0 +2024-08-26 17:23:31,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=87338.66666666667, ans=0.125 +2024-08-26 17:23:35,097 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:23:35,695 INFO [train.py:1114] (2/4) Epoch 7, batch 1450, loss[loss=0.2466, simple_loss=0.3015, pruned_loss=0.06957, ctc_loss=0.1311, over 19690.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.2981, pruned_loss=0.0676, ctc_loss=0.1264, over 3861288.08 frames. ], batch size: 63, lr: 2.06e-02, grad_scale: 32.0 +2024-08-26 17:23:35,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=87392.0, ans=0.025 +2024-08-26 17:23:41,737 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.47 vs. limit=6.0 +2024-08-26 17:23:43,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=87392.0, ans=0.1 +2024-08-26 17:23:44,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=87445.33333333333, ans=0.0 +2024-08-26 17:24:05,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=87552.0, ans=0.0 +2024-08-26 17:24:18,015 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:24:19,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=87605.33333333333, ans=0.125 +2024-08-26 17:24:20,637 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.279e+02 1.540e+02 1.669e+02 1.894e+02 3.453e+02, threshold=3.338e+02, percent-clipped=1.0 +2024-08-26 17:24:24,028 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.05 vs. limit=10.0 +2024-08-26 17:24:29,674 INFO [train.py:1114] (2/4) Epoch 7, batch 1500, loss[loss=0.238, simple_loss=0.3016, pruned_loss=0.06225, ctc_loss=0.1249, over 19598.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2979, pruned_loss=0.06728, ctc_loss=0.1259, over 3861141.13 frames. ], batch size: 57, lr: 2.06e-02, grad_scale: 32.0 +2024-08-26 17:24:40,336 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.44 vs. limit=15.0 +2024-08-26 17:24:44,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=87712.0, ans=0.125 +2024-08-26 17:24:56,568 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.15 vs. limit=10.0 +2024-08-26 17:25:17,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=87872.0, ans=0.09899494936611666 +2024-08-26 17:25:19,508 INFO [train.py:1114] (2/4) Epoch 7, batch 1550, loss[loss=0.2418, simple_loss=0.2963, pruned_loss=0.06815, ctc_loss=0.1276, over 19605.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.298, pruned_loss=0.06735, ctc_loss=0.1261, over 3846994.56 frames. ], batch size: 60, lr: 2.06e-02, grad_scale: 32.0 +2024-08-26 17:25:20,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=87925.33333333333, ans=0.125 +2024-08-26 17:25:20,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=87925.33333333333, ans=0.0 +2024-08-26 17:25:40,854 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.85 vs. limit=6.0 +2024-08-26 17:26:04,260 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.330e+02 1.559e+02 1.788e+02 2.182e+02 5.116e+02, threshold=3.576e+02, percent-clipped=3.0 +2024-08-26 17:26:10,130 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:26:10,938 INFO [train.py:1114] (2/4) Epoch 7, batch 1600, loss[loss=0.2592, simple_loss=0.3094, pruned_loss=0.07649, ctc_loss=0.1403, over 19855.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.2978, pruned_loss=0.0676, ctc_loss=0.1264, over 3836156.02 frames. ], batch size: 57, lr: 2.05e-02, grad_scale: 32.0 +2024-08-26 17:26:19,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=88192.0, ans=0.0 +2024-08-26 17:26:52,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=88405.33333333333, ans=0.125 +2024-08-26 17:26:54,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=88405.33333333333, ans=0.1 +2024-08-26 17:27:01,932 INFO [train.py:1114] (2/4) Epoch 7, batch 1650, loss[loss=0.2231, simple_loss=0.2899, pruned_loss=0.05611, ctc_loss=0.11, over 19662.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2975, pruned_loss=0.06744, ctc_loss=0.1262, over 3832636.10 frames. ], batch size: 59, lr: 2.05e-02, grad_scale: 32.0 +2024-08-26 17:27:33,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=88458.66666666667, ans=0.125 +2024-08-26 17:28:09,928 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=11.78 vs. limit=15.0 +2024-08-26 17:28:57,879 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.94 vs. limit=6.0 +2024-08-26 17:28:58,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=88618.66666666667, ans=0.0 +2024-08-26 17:29:07,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=88672.0, ans=0.0 +2024-08-26 17:29:25,555 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.242e+02 1.503e+02 1.653e+02 1.809e+02 2.992e+02, threshold=3.307e+02, percent-clipped=0.0 +2024-08-26 17:29:40,037 INFO [train.py:1114] (2/4) Epoch 7, batch 1700, loss[loss=0.217, simple_loss=0.2701, pruned_loss=0.05909, ctc_loss=0.1145, over 19706.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.297, pruned_loss=0.06692, ctc_loss=0.1255, over 3847235.14 frames. ], batch size: 46, lr: 2.05e-02, grad_scale: 32.0 +2024-08-26 17:29:40,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=88725.33333333333, ans=0.125 +2024-08-26 17:30:02,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=88725.33333333333, ans=0.2 +2024-08-26 17:30:36,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=88938.66666666667, ans=0.5 +2024-08-26 17:30:42,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=88938.66666666667, ans=0.125 +2024-08-26 17:30:44,517 INFO [train.py:1114] (2/4) Epoch 7, batch 1750, loss[loss=0.2178, simple_loss=0.2707, pruned_loss=0.06024, ctc_loss=0.1113, over 19661.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.2966, pruned_loss=0.06676, ctc_loss=0.1253, over 3851361.92 frames. ], batch size: 45, lr: 2.05e-02, grad_scale: 32.0 +2024-08-26 17:31:03,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=89098.66666666667, ans=0.2 +2024-08-26 17:31:23,266 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.487e+02 1.622e+02 1.808e+02 3.869e+02, threshold=3.245e+02, percent-clipped=1.0 +2024-08-26 17:31:29,439 INFO [train.py:1114] (2/4) Epoch 7, batch 1800, loss[loss=0.2475, simple_loss=0.3106, pruned_loss=0.06714, ctc_loss=0.1254, over 19615.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.2969, pruned_loss=0.06671, ctc_loss=0.1252, over 3853077.02 frames. ], batch size: 55, lr: 2.04e-02, grad_scale: 32.0 +2024-08-26 17:31:30,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=89258.66666666667, ans=0.125 +2024-08-26 17:31:42,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=89312.0, ans=0.125 +2024-08-26 17:31:45,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=89312.0, ans=0.125 +2024-08-26 17:31:56,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.17 vs. limit=15.0 +2024-08-26 17:32:02,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=89418.66666666667, ans=0.0 +2024-08-26 17:32:08,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=89472.0, ans=0.0 +2024-08-26 17:32:14,104 INFO [train.py:1114] (2/4) Epoch 7, batch 1850, loss[loss=0.2551, simple_loss=0.3181, pruned_loss=0.07032, ctc_loss=0.1285, over 19598.00 frames. ], tot_loss[loss=0.24, simple_loss=0.2966, pruned_loss=0.06674, ctc_loss=0.1251, over 3856867.39 frames. ], batch size: 57, lr: 2.04e-02, grad_scale: 32.0 +2024-08-26 17:32:16,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=89525.33333333333, ans=0.1 +2024-08-26 17:32:33,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=89632.0, ans=0.0 +2024-08-26 17:32:34,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=89632.0, ans=0.1 +2024-08-26 17:32:34,769 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.95 vs. limit=15.0 +2024-08-26 17:32:35,480 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.45 vs. limit=15.0 +2024-08-26 17:32:55,556 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.278e+02 1.590e+02 1.759e+02 2.003e+02 3.443e+02, threshold=3.517e+02, percent-clipped=1.0 +2024-08-26 17:33:01,840 INFO [train.py:1114] (2/4) Epoch 7, batch 1900, loss[loss=0.2547, simple_loss=0.3124, pruned_loss=0.07072, ctc_loss=0.1389, over 19692.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.297, pruned_loss=0.06671, ctc_loss=0.125, over 3861026.33 frames. ], batch size: 59, lr: 2.04e-02, grad_scale: 32.0 +2024-08-26 17:33:03,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=89792.0, ans=0.125 +2024-08-26 17:33:13,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten.whitening_limit, batch_count=89845.33333333333, ans=15.0 +2024-08-26 17:35:14,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=90005.33333333333, ans=0.125 +2024-08-26 17:35:23,456 INFO [train.py:1114] (2/4) Epoch 7, batch 1950, loss[loss=0.2225, simple_loss=0.2837, pruned_loss=0.05908, ctc_loss=0.1077, over 19590.00 frames. ], tot_loss[loss=0.241, simple_loss=0.2979, pruned_loss=0.06694, ctc_loss=0.1254, over 3870001.50 frames. ], batch size: 52, lr: 2.04e-02, grad_scale: 32.0 +2024-08-26 17:35:39,827 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.18 vs. limit=22.5 +2024-08-26 17:35:40,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=90165.33333333333, ans=0.2 +2024-08-26 17:35:42,535 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.42 vs. limit=15.0 +2024-08-26 17:35:58,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90218.66666666667, ans=0.1 +2024-08-26 17:35:59,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=90272.0, ans=0.0 +2024-08-26 17:36:00,165 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.96 vs. limit=10.0 +2024-08-26 17:36:03,249 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.287e+02 1.531e+02 1.657e+02 1.854e+02 3.915e+02, threshold=3.314e+02, percent-clipped=1.0 +2024-08-26 17:36:08,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=90325.33333333333, ans=0.125 +2024-08-26 17:36:09,474 INFO [train.py:1114] (2/4) Epoch 7, batch 2000, loss[loss=0.206, simple_loss=0.2592, pruned_loss=0.05507, ctc_loss=0.1069, over 19660.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.2984, pruned_loss=0.06741, ctc_loss=0.1262, over 3854536.84 frames. ], batch size: 45, lr: 2.03e-02, grad_scale: 32.0 +2024-08-26 17:36:10,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90325.33333333333, ans=0.1 +2024-08-26 17:36:19,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=90378.66666666667, ans=0.125 +2024-08-26 17:36:20,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=90378.66666666667, ans=0.125 +2024-08-26 17:36:33,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=90432.0, ans=0.125 +2024-08-26 17:36:36,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90485.33333333333, ans=0.1 +2024-08-26 17:36:53,977 INFO [train.py:1114] (2/4) Epoch 7, batch 2050, loss[loss=0.1952, simple_loss=0.2528, pruned_loss=0.0502, ctc_loss=0.09333, over 19706.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.297, pruned_loss=0.06709, ctc_loss=0.1255, over 3850158.93 frames. ], batch size: 47, lr: 2.03e-02, grad_scale: 32.0 +2024-08-26 17:36:54,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90592.0, ans=0.1 +2024-08-26 17:37:03,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=90645.33333333333, ans=0.2 +2024-08-26 17:37:06,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=90645.33333333333, ans=0.0 +2024-08-26 17:37:10,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=90698.66666666667, ans=0.2 +2024-08-26 17:37:10,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=90698.66666666667, ans=0.04949747468305833 +2024-08-26 17:37:32,300 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.477e+02 1.642e+02 1.962e+02 4.346e+02, threshold=3.284e+02, percent-clipped=3.0 +2024-08-26 17:37:32,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=90805.33333333333, ans=0.1 +2024-08-26 17:37:33,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=90805.33333333333, ans=0.125 +2024-08-26 17:37:38,470 INFO [train.py:1114] (2/4) Epoch 7, batch 2100, loss[loss=0.2478, simple_loss=0.3051, pruned_loss=0.06956, ctc_loss=0.1283, over 19772.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.2966, pruned_loss=0.06682, ctc_loss=0.1249, over 3857497.22 frames. ], batch size: 54, lr: 2.03e-02, grad_scale: 32.0 +2024-08-26 17:37:48,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=90858.66666666667, ans=0.125 +2024-08-26 17:37:52,054 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.30 vs. limit=12.0 +2024-08-26 17:37:52,643 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.37 vs. limit=15.0 +2024-08-26 17:38:08,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=91018.66666666667, ans=0.04949747468305833 +2024-08-26 17:38:10,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=91018.66666666667, ans=0.0 +2024-08-26 17:38:23,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=91072.0, ans=0.125 +2024-08-26 17:38:25,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=91125.33333333333, ans=0.125 +2024-08-26 17:38:26,423 INFO [train.py:1114] (2/4) Epoch 7, batch 2150, loss[loss=0.2215, simple_loss=0.2797, pruned_loss=0.05844, ctc_loss=0.1161, over 19856.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.2957, pruned_loss=0.06642, ctc_loss=0.124, over 3868550.33 frames. ], batch size: 52, lr: 2.03e-02, grad_scale: 32.0 +2024-08-26 17:38:26,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=91125.33333333333, ans=0.125 +2024-08-26 17:38:29,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=91125.33333333333, ans=0.125 +2024-08-26 17:38:33,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91125.33333333333, ans=0.1 +2024-08-26 17:38:37,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=91178.66666666667, ans=0.125 +2024-08-26 17:38:40,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=91178.66666666667, ans=0.2 +2024-08-26 17:38:54,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=91285.33333333333, ans=0.125 +2024-08-26 17:39:02,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=91338.66666666667, ans=0.125 +2024-08-26 17:39:04,130 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.485e+02 1.702e+02 1.931e+02 2.999e+02, threshold=3.403e+02, percent-clipped=0.0 +2024-08-26 17:39:07,113 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:39:10,354 INFO [train.py:1114] (2/4) Epoch 7, batch 2200, loss[loss=0.2577, simple_loss=0.3098, pruned_loss=0.07424, ctc_loss=0.1426, over 19579.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.2959, pruned_loss=0.06664, ctc_loss=0.1243, over 3867474.02 frames. ], batch size: 57, lr: 2.02e-02, grad_scale: 32.0 +2024-08-26 17:39:19,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=91445.33333333333, ans=0.125 +2024-08-26 17:39:22,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=91445.33333333333, ans=0.07 +2024-08-26 17:39:22,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=91445.33333333333, ans=0.035 +2024-08-26 17:39:30,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=91498.66666666667, ans=0.125 +2024-08-26 17:39:38,986 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.54 vs. limit=15.0 +2024-08-26 17:39:48,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91605.33333333333, ans=0.1 +2024-08-26 17:39:54,477 INFO [train.py:1114] (2/4) Epoch 7, batch 2250, loss[loss=0.2444, simple_loss=0.3035, pruned_loss=0.0677, ctc_loss=0.1249, over 19606.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.2958, pruned_loss=0.06628, ctc_loss=0.1236, over 3867046.13 frames. ], batch size: 55, lr: 2.02e-02, grad_scale: 32.0 +2024-08-26 17:39:56,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=91658.66666666667, ans=0.2 +2024-08-26 17:40:03,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=91712.0, ans=0.125 +2024-08-26 17:40:11,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=91765.33333333333, ans=0.0 +2024-08-26 17:40:14,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=91765.33333333333, ans=0.125 +2024-08-26 17:40:32,442 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.538e+02 1.708e+02 1.997e+02 3.315e+02, threshold=3.416e+02, percent-clipped=0.0 +2024-08-26 17:40:36,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=91872.0, ans=0.025 +2024-08-26 17:40:38,572 INFO [train.py:1114] (2/4) Epoch 7, batch 2300, loss[loss=0.2163, simple_loss=0.2773, pruned_loss=0.05578, ctc_loss=0.1092, over 19512.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.2953, pruned_loss=0.06651, ctc_loss=0.1239, over 3860740.89 frames. ], batch size: 49, lr: 2.02e-02, grad_scale: 32.0 +2024-08-26 17:40:42,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=91925.33333333333, ans=0.0 +2024-08-26 17:40:44,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=91925.33333333333, ans=0.0 +2024-08-26 17:40:48,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=91978.66666666667, ans=0.0 +2024-08-26 17:40:51,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91978.66666666667, ans=0.1 +2024-08-26 17:40:52,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=91978.66666666667, ans=0.125 +2024-08-26 17:40:57,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=92032.0, ans=0.125 +2024-08-26 17:41:00,276 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.57 vs. limit=10.0 +2024-08-26 17:41:07,682 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.90 vs. limit=6.0 +2024-08-26 17:41:14,455 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.26 vs. limit=15.0 +2024-08-26 17:41:22,883 INFO [train.py:1114] (2/4) Epoch 7, batch 2350, loss[loss=0.2625, simple_loss=0.3184, pruned_loss=0.07482, ctc_loss=0.1421, over 19682.00 frames. ], tot_loss[loss=0.2396, simple_loss=0.2956, pruned_loss=0.0669, ctc_loss=0.1248, over 3863305.38 frames. ], batch size: 63, lr: 2.02e-02, grad_scale: 32.0 +2024-08-26 17:41:25,569 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:41:29,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92192.0, ans=0.1 +2024-08-26 17:41:29,258 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.36 vs. limit=15.0 +2024-08-26 17:41:30,968 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.47 vs. limit=22.5 +2024-08-26 17:41:53,560 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.60 vs. limit=15.0 +2024-08-26 17:41:58,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=92405.33333333333, ans=0.0 +2024-08-26 17:42:01,678 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.248e+02 1.515e+02 1.664e+02 1.862e+02 3.479e+02, threshold=3.327e+02, percent-clipped=1.0 +2024-08-26 17:42:06,893 INFO [train.py:1114] (2/4) Epoch 7, batch 2400, loss[loss=0.2396, simple_loss=0.3064, pruned_loss=0.06374, ctc_loss=0.1135, over 19256.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.298, pruned_loss=0.06776, ctc_loss=0.1258, over 3857578.70 frames. ], batch size: 71, lr: 2.01e-02, grad_scale: 32.0 +2024-08-26 17:42:24,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=92565.33333333333, ans=0.0 +2024-08-26 17:42:25,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=92565.33333333333, ans=0.1 +2024-08-26 17:42:56,035 INFO [train.py:1114] (2/4) Epoch 7, batch 2450, loss[loss=0.3316, simple_loss=0.3528, pruned_loss=0.1134, ctc_loss=0.2088, over 13041.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3025, pruned_loss=0.07137, ctc_loss=0.1329, over 3730281.85 frames. ], batch size: 140, lr: 2.01e-02, grad_scale: 16.0 +2024-08-26 17:43:01,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=92725.33333333333, ans=0.0 +2024-08-26 17:43:05,763 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.20 vs. limit=15.0 +2024-08-26 17:43:07,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=92778.66666666667, ans=0.125 +2024-08-26 17:43:19,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=92832.0, ans=0.125 +2024-08-26 17:43:29,055 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.12 vs. limit=15.0 +2024-08-26 17:44:23,152 INFO [train.py:1114] (2/4) Epoch 8, batch 0, loss[loss=0.2338, simple_loss=0.2856, pruned_loss=0.06568, ctc_loss=0.1264, over 19399.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.2856, pruned_loss=0.06568, ctc_loss=0.1264, over 19399.00 frames. ], batch size: 48, lr: 1.89e-02, grad_scale: 32.0 +2024-08-26 17:44:23,152 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-26 17:44:49,269 INFO [train.py:1146] (2/4) Epoch 8, validation: loss=0.2003, simple_loss=0.2903, pruned_loss=0.04062, ctc_loss=0.07268, over 944034.00 frames. +2024-08-26 17:44:49,270 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12825MB +2024-08-26 17:44:55,028 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.675e+02 1.918e+02 2.084e+02 4.365e+02, threshold=3.836e+02, percent-clipped=1.0 +2024-08-26 17:45:17,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=92986.66666666667, ans=0.125 +2024-08-26 17:45:28,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93040.0, ans=0.1 +2024-08-26 17:45:43,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=93146.66666666667, ans=0.125 +2024-08-26 17:45:48,346 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.36 vs. limit=15.0 +2024-08-26 17:45:51,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=93146.66666666667, ans=0.0 +2024-08-26 17:45:54,269 INFO [train.py:1114] (2/4) Epoch 8, batch 50, loss[loss=0.2261, simple_loss=0.2782, pruned_loss=0.06415, ctc_loss=0.1145, over 19716.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.2986, pruned_loss=0.06771, ctc_loss=0.127, over 843278.58 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 32.0 +2024-08-26 17:46:18,906 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.12 vs. limit=10.0 +2024-08-26 17:46:26,752 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.66 vs. limit=6.0 +2024-08-26 17:46:38,841 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.00 vs. limit=10.0 +2024-08-26 17:46:42,913 INFO [train.py:1114] (2/4) Epoch 8, batch 100, loss[loss=0.2141, simple_loss=0.2857, pruned_loss=0.05192, ctc_loss=0.09654, over 19700.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.2996, pruned_loss=0.06708, ctc_loss=0.1262, over 1498192.96 frames. ], batch size: 51, lr: 1.89e-02, grad_scale: 32.0 +2024-08-26 17:46:48,084 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.13 vs. limit=15.0 +2024-08-26 17:46:48,504 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.231e+02 1.574e+02 1.749e+02 2.053e+02 3.512e+02, threshold=3.498e+02, percent-clipped=0.0 +2024-08-26 17:47:04,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=93573.33333333333, ans=0.0 +2024-08-26 17:47:20,577 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.84 vs. limit=6.0 +2024-08-26 17:47:32,169 INFO [train.py:1114] (2/4) Epoch 8, batch 150, loss[loss=0.2243, simple_loss=0.2773, pruned_loss=0.06198, ctc_loss=0.1185, over 19729.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.2965, pruned_loss=0.06579, ctc_loss=0.1232, over 2026296.86 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 32.0 +2024-08-26 17:47:41,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=93786.66666666667, ans=0.2 +2024-08-26 17:47:50,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=93786.66666666667, ans=0.04949747468305833 +2024-08-26 17:47:59,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.26 vs. limit=15.0 +2024-08-26 17:48:00,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=93893.33333333333, ans=0.0 +2024-08-26 17:48:06,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=93893.33333333333, ans=0.2 +2024-08-26 17:48:09,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=93893.33333333333, ans=0.125 +2024-08-26 17:48:09,591 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.21 vs. limit=15.0 +2024-08-26 17:48:14,177 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.80 vs. limit=12.0 +2024-08-26 17:48:20,169 INFO [train.py:1114] (2/4) Epoch 8, batch 200, loss[loss=0.2605, simple_loss=0.3166, pruned_loss=0.07396, ctc_loss=0.141, over 18115.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.2941, pruned_loss=0.06461, ctc_loss=0.121, over 2433444.82 frames. ], batch size: 85, lr: 1.88e-02, grad_scale: 32.0 +2024-08-26 17:48:25,556 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.434e+02 1.574e+02 1.787e+02 2.973e+02, threshold=3.148e+02, percent-clipped=0.0 +2024-08-26 17:48:30,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=94000.0, ans=0.1 +2024-08-26 17:48:31,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=94053.33333333333, ans=0.125 +2024-08-26 17:48:42,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=94053.33333333333, ans=0.125 +2024-08-26 17:48:45,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=94106.66666666667, ans=0.2 +2024-08-26 17:48:47,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=94106.66666666667, ans=0.0 +2024-08-26 17:48:47,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=94106.66666666667, ans=0.125 +2024-08-26 17:48:52,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=94160.0, ans=0.0 +2024-08-26 17:49:12,241 INFO [train.py:1114] (2/4) Epoch 8, batch 250, loss[loss=0.2493, simple_loss=0.3083, pruned_loss=0.06951, ctc_loss=0.1284, over 19420.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.2938, pruned_loss=0.06416, ctc_loss=0.1201, over 2753964.23 frames. ], batch size: 67, lr: 1.88e-02, grad_scale: 32.0 +2024-08-26 17:49:14,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=94266.66666666667, ans=0.2 +2024-08-26 17:49:35,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=94373.33333333333, ans=10.0 +2024-08-26 17:49:44,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=94426.66666666667, ans=0.125 +2024-08-26 17:49:48,124 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.62 vs. limit=6.0 +2024-08-26 17:49:58,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=94480.0, ans=0.05 +2024-08-26 17:50:03,381 INFO [train.py:1114] (2/4) Epoch 8, batch 300, loss[loss=0.2246, simple_loss=0.2949, pruned_loss=0.05575, ctc_loss=0.1072, over 19526.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.2939, pruned_loss=0.06447, ctc_loss=0.1204, over 2999223.72 frames. ], batch size: 61, lr: 1.88e-02, grad_scale: 32.0 +2024-08-26 17:50:09,194 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.273e+02 1.482e+02 1.652e+02 1.879e+02 4.693e+02, threshold=3.305e+02, percent-clipped=1.0 +2024-08-26 17:50:12,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=94586.66666666667, ans=0.2 +2024-08-26 17:50:16,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=94586.66666666667, ans=0.125 +2024-08-26 17:50:21,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=94640.0, ans=0.125 +2024-08-26 17:50:23,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=94640.0, ans=0.05 +2024-08-26 17:50:26,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=94640.0, ans=0.0 +2024-08-26 17:50:50,293 INFO [train.py:1114] (2/4) Epoch 8, batch 350, loss[loss=0.1903, simple_loss=0.254, pruned_loss=0.04449, ctc_loss=0.09418, over 19759.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.2938, pruned_loss=0.06407, ctc_loss=0.1199, over 3189915.13 frames. ], batch size: 48, lr: 1.88e-02, grad_scale: 32.0 +2024-08-26 17:50:59,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=94853.33333333333, ans=0.125 +2024-08-26 17:51:04,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=94853.33333333333, ans=0.0 +2024-08-26 17:51:06,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=94853.33333333333, ans=0.035 +2024-08-26 17:51:30,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=94960.0, ans=0.0 +2024-08-26 17:51:31,571 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:52:19,167 INFO [train.py:1114] (2/4) Epoch 8, batch 400, loss[loss=0.2254, simple_loss=0.2916, pruned_loss=0.05803, ctc_loss=0.1078, over 19503.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.2931, pruned_loss=0.06377, ctc_loss=0.1194, over 3341611.21 frames. ], batch size: 54, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:52:19,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=95066.66666666667, ans=0.125 +2024-08-26 17:52:22,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=95066.66666666667, ans=0.025 +2024-08-26 17:52:22,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=95066.66666666667, ans=0.125 +2024-08-26 17:52:24,631 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.326e+02 1.574e+02 1.829e+02 2.059e+02 4.627e+02, threshold=3.659e+02, percent-clipped=2.0 +2024-08-26 17:52:35,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=95120.0, ans=0.125 +2024-08-26 17:52:41,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=95173.33333333333, ans=0.0 +2024-08-26 17:53:04,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=95280.0, ans=0.0 +2024-08-26 17:53:07,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=95333.33333333333, ans=0.125 +2024-08-26 17:53:08,491 INFO [train.py:1114] (2/4) Epoch 8, batch 450, loss[loss=0.2214, simple_loss=0.2882, pruned_loss=0.05627, ctc_loss=0.105, over 19616.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.2931, pruned_loss=0.06381, ctc_loss=0.1197, over 3450604.61 frames. ], batch size: 55, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:53:17,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=95333.33333333333, ans=0.2 +2024-08-26 17:53:21,746 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:53:23,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=95386.66666666667, ans=0.0 +2024-08-26 17:53:26,569 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.36 vs. limit=22.5 +2024-08-26 17:53:29,277 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.16 vs. limit=15.0 +2024-08-26 17:53:45,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=95493.33333333333, ans=0.125 +2024-08-26 17:53:50,188 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.60 vs. limit=10.0 +2024-08-26 17:53:58,075 INFO [train.py:1114] (2/4) Epoch 8, batch 500, loss[loss=0.2486, simple_loss=0.3093, pruned_loss=0.06969, ctc_loss=0.1214, over 19651.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.2923, pruned_loss=0.06342, ctc_loss=0.1191, over 3546424.92 frames. ], batch size: 63, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:54:02,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=95600.0, ans=0.2 +2024-08-26 17:54:03,647 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.242e+02 1.468e+02 1.609e+02 1.778e+02 4.606e+02, threshold=3.218e+02, percent-clipped=1.0 +2024-08-26 17:54:37,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=95706.66666666667, ans=0.125 +2024-08-26 17:55:43,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=95760.0, ans=0.125 +2024-08-26 17:55:55,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=95813.33333333333, ans=0.125 +2024-08-26 17:56:19,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=95813.33333333333, ans=0.1 +2024-08-26 17:56:20,600 INFO [train.py:1114] (2/4) Epoch 8, batch 550, loss[loss=0.2498, simple_loss=0.3057, pruned_loss=0.07022, ctc_loss=0.1335, over 19348.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.2922, pruned_loss=0.0634, ctc_loss=0.1191, over 3609011.82 frames. ], batch size: 71, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:57:17,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=95866.66666666667, ans=0.0 +2024-08-26 17:57:37,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=95973.33333333333, ans=0.125 +2024-08-26 17:57:46,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.77 vs. limit=15.0 +2024-08-26 17:57:52,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96080.0, ans=0.1 +2024-08-26 17:57:58,513 INFO [train.py:1114] (2/4) Epoch 8, batch 600, loss[loss=0.2323, simple_loss=0.2974, pruned_loss=0.06145, ctc_loss=0.1107, over 19422.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.2919, pruned_loss=0.06317, ctc_loss=0.1185, over 3667148.39 frames. ], batch size: 67, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:58:03,800 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.55 vs. limit=15.0 +2024-08-26 17:58:05,961 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.284e+02 1.508e+02 1.654e+02 1.896e+02 3.415e+02, threshold=3.309e+02, percent-clipped=1.0 +2024-08-26 17:58:16,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=96186.66666666667, ans=0.0 +2024-08-26 17:58:39,908 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.02 vs. limit=10.0 +2024-08-26 17:58:47,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=96346.66666666667, ans=0.2 +2024-08-26 17:58:49,409 INFO [train.py:1114] (2/4) Epoch 8, batch 650, loss[loss=0.243, simple_loss=0.3005, pruned_loss=0.06732, ctc_loss=0.1271, over 19771.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.291, pruned_loss=0.06247, ctc_loss=0.1175, over 3717737.71 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 32.0 +2024-08-26 17:58:49,820 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.52 vs. limit=22.5 +2024-08-26 17:58:53,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=96400.0, ans=0.125 +2024-08-26 17:58:59,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=96453.33333333333, ans=0.125 +2024-08-26 17:59:08,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=96506.66666666667, ans=0.125 +2024-08-26 17:59:19,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=96560.0, ans=0.125 +2024-08-26 17:59:20,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=96560.0, ans=0.1 +2024-08-26 17:59:24,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=96560.0, ans=0.2 +2024-08-26 17:59:36,098 INFO [train.py:1114] (2/4) Epoch 8, batch 700, loss[loss=0.2126, simple_loss=0.2774, pruned_loss=0.05347, ctc_loss=0.1021, over 19715.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.2921, pruned_loss=0.06291, ctc_loss=0.1183, over 3749215.44 frames. ], batch size: 51, lr: 1.86e-02, grad_scale: 32.0 +2024-08-26 17:59:41,809 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.199e+02 1.481e+02 1.644e+02 1.817e+02 3.294e+02, threshold=3.287e+02, percent-clipped=0.0 +2024-08-26 17:59:42,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=96666.66666666667, ans=0.2 +2024-08-26 18:00:07,861 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.12 vs. limit=15.0 +2024-08-26 18:00:22,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=96880.0, ans=0.125 +2024-08-26 18:00:23,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.53 vs. limit=15.0 +2024-08-26 18:00:27,683 INFO [train.py:1114] (2/4) Epoch 8, batch 750, loss[loss=0.2424, simple_loss=0.307, pruned_loss=0.06517, ctc_loss=0.1184, over 19518.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.2922, pruned_loss=0.06304, ctc_loss=0.1182, over 3775297.67 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 32.0 +2024-08-26 18:00:36,510 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.21 vs. limit=15.0 +2024-08-26 18:00:41,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=96986.66666666667, ans=15.0 +2024-08-26 18:01:00,080 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.67 vs. limit=15.0 +2024-08-26 18:01:07,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=97146.66666666667, ans=0.0 +2024-08-26 18:01:19,053 INFO [train.py:1114] (2/4) Epoch 8, batch 800, loss[loss=0.2039, simple_loss=0.264, pruned_loss=0.05265, ctc_loss=0.09641, over 19412.00 frames. ], tot_loss[loss=0.233, simple_loss=0.292, pruned_loss=0.06326, ctc_loss=0.1186, over 3795967.75 frames. ], batch size: 48, lr: 1.86e-02, grad_scale: 32.0 +2024-08-26 18:01:24,568 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.172e+02 1.524e+02 1.729e+02 2.039e+02 3.596e+02, threshold=3.457e+02, percent-clipped=1.0 +2024-08-26 18:01:26,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=97200.0, ans=0.125 +2024-08-26 18:01:46,674 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-08-26 18:01:57,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=97413.33333333333, ans=0.0 +2024-08-26 18:01:58,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=97413.33333333333, ans=0.2 +2024-08-26 18:02:00,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=97413.33333333333, ans=0.125 +2024-08-26 18:02:06,313 INFO [train.py:1114] (2/4) Epoch 8, batch 850, loss[loss=0.2671, simple_loss=0.3169, pruned_loss=0.07897, ctc_loss=0.1486, over 19658.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.2918, pruned_loss=0.06314, ctc_loss=0.1184, over 3815049.30 frames. ], batch size: 59, lr: 1.85e-02, grad_scale: 32.0 +2024-08-26 18:02:10,505 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.58 vs. limit=22.5 +2024-08-26 18:02:11,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=97466.66666666667, ans=0.125 +2024-08-26 18:02:19,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=97520.0, ans=10.0 +2024-08-26 18:02:20,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=97520.0, ans=0.125 +2024-08-26 18:02:21,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=97520.0, ans=0.0 +2024-08-26 18:02:24,354 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.30 vs. limit=22.5 +2024-08-26 18:02:37,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=97626.66666666667, ans=0.0 +2024-08-26 18:02:38,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=97626.66666666667, ans=0.0 +2024-08-26 18:02:39,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.17 vs. limit=6.0 +2024-08-26 18:02:47,217 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.94 vs. limit=15.0 +2024-08-26 18:02:58,337 INFO [train.py:1114] (2/4) Epoch 8, batch 900, loss[loss=0.2115, simple_loss=0.2715, pruned_loss=0.05543, ctc_loss=0.1014, over 19796.00 frames. ], tot_loss[loss=0.233, simple_loss=0.2918, pruned_loss=0.06344, ctc_loss=0.1185, over 3819189.68 frames. ], batch size: 49, lr: 1.85e-02, grad_scale: 32.0 +2024-08-26 18:03:04,003 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.312e+02 1.578e+02 1.704e+02 2.106e+02 3.434e+02, threshold=3.409e+02, percent-clipped=0.0 +2024-08-26 18:03:04,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=97733.33333333333, ans=0.125 +2024-08-26 18:03:05,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=97733.33333333333, ans=15.0 +2024-08-26 18:03:13,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=97786.66666666667, ans=0.0 +2024-08-26 18:03:13,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=97786.66666666667, ans=0.125 +2024-08-26 18:03:23,825 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.03 vs. limit=15.0 +2024-08-26 18:03:29,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=97893.33333333333, ans=0.125 +2024-08-26 18:03:36,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=97946.66666666667, ans=0.0 +2024-08-26 18:03:41,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=97946.66666666667, ans=0.2 +2024-08-26 18:03:45,505 INFO [train.py:1114] (2/4) Epoch 8, batch 950, loss[loss=0.2056, simple_loss=0.2696, pruned_loss=0.05045, ctc_loss=0.1017, over 19477.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.2925, pruned_loss=0.06395, ctc_loss=0.1195, over 3820856.06 frames. ], batch size: 49, lr: 1.85e-02, grad_scale: 16.0 +2024-08-26 18:03:59,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=98053.33333333333, ans=0.015 +2024-08-26 18:03:59,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=98053.33333333333, ans=0.125 +2024-08-26 18:04:01,100 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.10 vs. limit=22.5 +2024-08-26 18:04:10,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=98106.66666666667, ans=0.0 +2024-08-26 18:04:31,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=98213.33333333333, ans=0.0 +2024-08-26 18:04:37,643 INFO [train.py:1114] (2/4) Epoch 8, batch 1000, loss[loss=0.1945, simple_loss=0.2638, pruned_loss=0.04504, ctc_loss=0.0878, over 19843.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.2927, pruned_loss=0.06384, ctc_loss=0.1195, over 3817543.07 frames. ], batch size: 52, lr: 1.85e-02, grad_scale: 16.0 +2024-08-26 18:04:44,377 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.225e+02 1.497e+02 1.652e+02 1.874e+02 4.992e+02, threshold=3.305e+02, percent-clipped=2.0 +2024-08-26 18:04:51,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=98320.0, ans=0.125 +2024-08-26 18:05:16,059 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.15 vs. limit=15.0 +2024-08-26 18:05:24,685 INFO [train.py:1114] (2/4) Epoch 8, batch 1050, loss[loss=0.2205, simple_loss=0.2866, pruned_loss=0.05488, ctc_loss=0.1118, over 19822.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.2921, pruned_loss=0.06358, ctc_loss=0.1188, over 3824746.11 frames. ], batch size: 57, lr: 1.85e-02, grad_scale: 16.0 +2024-08-26 18:05:28,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=98533.33333333333, ans=0.0 +2024-08-26 18:05:43,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=98586.66666666667, ans=0.125 +2024-08-26 18:05:47,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=98586.66666666667, ans=0.125 +2024-08-26 18:05:58,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=98693.33333333333, ans=0.0 +2024-08-26 18:06:04,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=98693.33333333333, ans=0.025 +2024-08-26 18:06:14,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=98746.66666666667, ans=0.0 +2024-08-26 18:06:18,149 INFO [train.py:1114] (2/4) Epoch 8, batch 1100, loss[loss=0.2233, simple_loss=0.2866, pruned_loss=0.05849, ctc_loss=0.1073, over 19583.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.2918, pruned_loss=0.06345, ctc_loss=0.1185, over 3831219.00 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 16.0 +2024-08-26 18:06:24,660 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.469e+02 1.560e+02 1.744e+02 3.443e+02, threshold=3.121e+02, percent-clipped=2.0 +2024-08-26 18:06:29,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=98853.33333333333, ans=0.125 +2024-08-26 18:06:56,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=98960.0, ans=0.125 +2024-08-26 18:07:10,073 INFO [train.py:1114] (2/4) Epoch 8, batch 1150, loss[loss=0.1908, simple_loss=0.2644, pruned_loss=0.0423, ctc_loss=0.08138, over 19609.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.2915, pruned_loss=0.0631, ctc_loss=0.1181, over 3830468.15 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 16.0 +2024-08-26 18:07:17,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=99066.66666666667, ans=0.125 +2024-08-26 18:07:21,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=99120.0, ans=0.125 +2024-08-26 18:07:29,736 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.77 vs. limit=15.0 +2024-08-26 18:07:33,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99173.33333333333, ans=0.1 +2024-08-26 18:07:51,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_na.min_abs, batch_count=99280.0, ans=0.02 +2024-08-26 18:07:57,681 INFO [train.py:1114] (2/4) Epoch 8, batch 1200, loss[loss=0.2278, simple_loss=0.2991, pruned_loss=0.0562, ctc_loss=0.1104, over 19841.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.2931, pruned_loss=0.06397, ctc_loss=0.1198, over 3825339.57 frames. ], batch size: 57, lr: 1.84e-02, grad_scale: 32.0 +2024-08-26 18:08:03,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=99333.33333333333, ans=0.2 +2024-08-26 18:08:04,258 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.256e+02 1.491e+02 1.608e+02 2.003e+02 2.840e+02, threshold=3.216e+02, percent-clipped=0.0 +2024-08-26 18:08:19,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=99440.0, ans=0.025 +2024-08-26 18:08:37,684 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.03 vs. limit=15.0 +2024-08-26 18:08:42,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=99546.66666666667, ans=0.125 +2024-08-26 18:08:49,183 INFO [train.py:1114] (2/4) Epoch 8, batch 1250, loss[loss=0.2395, simple_loss=0.3019, pruned_loss=0.06555, ctc_loss=0.115, over 19507.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.2932, pruned_loss=0.06402, ctc_loss=0.1196, over 3843318.97 frames. ], batch size: 61, lr: 1.84e-02, grad_scale: 32.0 +2024-08-26 18:08:56,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=99600.0, ans=0.125 +2024-08-26 18:09:40,597 INFO [train.py:1114] (2/4) Epoch 8, batch 1300, loss[loss=0.2684, simple_loss=0.3173, pruned_loss=0.07878, ctc_loss=0.1546, over 18875.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.2925, pruned_loss=0.06361, ctc_loss=0.1188, over 3845959.95 frames. ], batch size: 76, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:09:41,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99866.66666666667, ans=0.1 +2024-08-26 18:09:44,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=99866.66666666667, ans=0.2 +2024-08-26 18:09:47,132 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.481e+02 1.661e+02 1.866e+02 3.142e+02, threshold=3.323e+02, percent-clipped=0.0 +2024-08-26 18:10:05,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=99973.33333333333, ans=0.125 +2024-08-26 18:10:06,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=99973.33333333333, ans=0.1 +2024-08-26 18:10:07,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=99973.33333333333, ans=0.2 +2024-08-26 18:10:20,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=100080.0, ans=0.125 +2024-08-26 18:10:20,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=100080.0, ans=0.125 +2024-08-26 18:10:27,283 INFO [train.py:1114] (2/4) Epoch 8, batch 1350, loss[loss=0.2075, simple_loss=0.2767, pruned_loss=0.04957, ctc_loss=0.09805, over 19755.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.2917, pruned_loss=0.06284, ctc_loss=0.1175, over 3857252.32 frames. ], batch size: 54, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:10:30,776 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=12.37 vs. limit=15.0 +2024-08-26 18:10:36,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=100186.66666666667, ans=0.0 +2024-08-26 18:10:40,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100186.66666666667, ans=0.1 +2024-08-26 18:10:44,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100186.66666666667, ans=0.1 +2024-08-26 18:11:07,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100346.66666666667, ans=0.1 +2024-08-26 18:11:08,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=100346.66666666667, ans=0.125 +2024-08-26 18:11:14,664 INFO [train.py:1114] (2/4) Epoch 8, batch 1400, loss[loss=0.2057, simple_loss=0.2632, pruned_loss=0.0526, ctc_loss=0.1076, over 19645.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.291, pruned_loss=0.06246, ctc_loss=0.1167, over 3864103.57 frames. ], batch size: 46, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:11:21,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=100400.0, ans=0.125 +2024-08-26 18:11:23,739 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.577e+02 1.859e+02 2.331e+02 3.237e+02, threshold=3.718e+02, percent-clipped=0.0 +2024-08-26 18:11:24,326 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.89 vs. limit=15.0 +2024-08-26 18:11:24,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=100400.0, ans=0.0 +2024-08-26 18:11:30,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=100453.33333333333, ans=0.0 +2024-08-26 18:11:31,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=100453.33333333333, ans=0.0 +2024-08-26 18:11:32,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100453.33333333333, ans=0.1 +2024-08-26 18:11:38,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=100453.33333333333, ans=0.125 +2024-08-26 18:11:40,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=100506.66666666667, ans=0.2 +2024-08-26 18:11:45,515 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.53 vs. limit=22.5 +2024-08-26 18:11:46,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=100506.66666666667, ans=0.125 +2024-08-26 18:11:52,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=100560.0, ans=0.5 +2024-08-26 18:12:09,360 INFO [train.py:1114] (2/4) Epoch 8, batch 1450, loss[loss=0.2264, simple_loss=0.2907, pruned_loss=0.0593, ctc_loss=0.1086, over 19681.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.2918, pruned_loss=0.06279, ctc_loss=0.1174, over 3862316.08 frames. ], batch size: 63, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:12:21,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=100720.0, ans=0.2 +2024-08-26 18:12:22,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=100720.0, ans=0.2 +2024-08-26 18:12:33,193 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.68 vs. limit=15.0 +2024-08-26 18:12:34,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=100773.33333333333, ans=0.0 +2024-08-26 18:12:37,742 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.12 vs. limit=15.0 +2024-08-26 18:12:44,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=100826.66666666667, ans=0.025 +2024-08-26 18:13:00,705 INFO [train.py:1114] (2/4) Epoch 8, batch 1500, loss[loss=0.223, simple_loss=0.2858, pruned_loss=0.05884, ctc_loss=0.1061, over 19585.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.2922, pruned_loss=0.0628, ctc_loss=0.1173, over 3861611.60 frames. ], batch size: 57, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:13:07,544 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.450e+02 1.594e+02 1.806e+02 5.150e+02, threshold=3.189e+02, percent-clipped=1.0 +2024-08-26 18:13:15,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=100986.66666666667, ans=0.0 +2024-08-26 18:13:16,746 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.95 vs. limit=15.0 +2024-08-26 18:13:28,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=101093.33333333333, ans=0.125 +2024-08-26 18:13:37,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=101093.33333333333, ans=0.0 +2024-08-26 18:13:42,373 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.98 vs. limit=10.0 +2024-08-26 18:13:48,298 INFO [train.py:1114] (2/4) Epoch 8, batch 1550, loss[loss=0.2382, simple_loss=0.3015, pruned_loss=0.06425, ctc_loss=0.1161, over 19589.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.2925, pruned_loss=0.06332, ctc_loss=0.1184, over 3846035.29 frames. ], batch size: 60, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:13:55,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=101200.0, ans=0.2 +2024-08-26 18:14:03,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=101253.33333333333, ans=0.125 +2024-08-26 18:14:21,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=101360.0, ans=0.125 +2024-08-26 18:14:28,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=101360.0, ans=0.025 +2024-08-26 18:14:33,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=101413.33333333333, ans=0.1 +2024-08-26 18:14:37,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=101413.33333333333, ans=0.125 +2024-08-26 18:14:39,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=101413.33333333333, ans=0.125 +2024-08-26 18:14:40,865 INFO [train.py:1114] (2/4) Epoch 8, batch 1600, loss[loss=0.25, simple_loss=0.3137, pruned_loss=0.0675, ctc_loss=0.1283, over 19851.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.2923, pruned_loss=0.0633, ctc_loss=0.1185, over 3835025.06 frames. ], batch size: 57, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:14:47,307 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.562e+02 1.716e+02 2.059e+02 3.797e+02, threshold=3.431e+02, percent-clipped=2.0 +2024-08-26 18:14:48,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101466.66666666667, ans=0.1 +2024-08-26 18:14:50,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=101520.0, ans=0.0 +2024-08-26 18:15:18,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=101626.66666666667, ans=0.0 +2024-08-26 18:15:31,636 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.63 vs. limit=15.0 +2024-08-26 18:15:32,088 INFO [train.py:1114] (2/4) Epoch 8, batch 1650, loss[loss=0.2187, simple_loss=0.2894, pruned_loss=0.05333, ctc_loss=0.1033, over 19654.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.2917, pruned_loss=0.06303, ctc_loss=0.1181, over 3830497.38 frames. ], batch size: 59, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:15:33,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=101733.33333333333, ans=0.2 +2024-08-26 18:15:48,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=101786.66666666667, ans=0.125 +2024-08-26 18:16:00,060 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.11 vs. limit=15.0 +2024-08-26 18:16:04,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101893.33333333333, ans=0.1 +2024-08-26 18:16:10,999 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.46 vs. limit=12.0 +2024-08-26 18:16:16,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=101946.66666666667, ans=0.125 +2024-08-26 18:16:18,706 INFO [train.py:1114] (2/4) Epoch 8, batch 1700, loss[loss=0.2074, simple_loss=0.2589, pruned_loss=0.05686, ctc_loss=0.1055, over 19672.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.2912, pruned_loss=0.06261, ctc_loss=0.1173, over 3845037.64 frames. ], batch size: 46, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:16:25,301 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.242e+02 1.495e+02 1.737e+02 2.089e+02 3.401e+02, threshold=3.475e+02, percent-clipped=0.0 +2024-08-26 18:16:45,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=102160.0, ans=0.125 +2024-08-26 18:16:56,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=102213.33333333333, ans=0.5 +2024-08-26 18:17:03,796 INFO [train.py:1114] (2/4) Epoch 8, batch 1750, loss[loss=0.1959, simple_loss=0.2577, pruned_loss=0.04961, ctc_loss=0.08714, over 19663.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.2905, pruned_loss=0.06236, ctc_loss=0.1166, over 3849345.66 frames. ], batch size: 45, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:17:04,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=102266.66666666667, ans=0.125 +2024-08-26 18:17:13,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=102320.0, ans=0.0 +2024-08-26 18:17:19,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=102320.0, ans=0.125 +2024-08-26 18:17:31,554 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.42 vs. limit=10.0 +2024-08-26 18:17:32,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=102426.66666666667, ans=10.0 +2024-08-26 18:17:34,775 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.92 vs. limit=12.0 +2024-08-26 18:17:46,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=102480.0, ans=0.125 +2024-08-26 18:17:48,514 INFO [train.py:1114] (2/4) Epoch 8, batch 1800, loss[loss=0.2353, simple_loss=0.2983, pruned_loss=0.06202, ctc_loss=0.1206, over 19624.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.2905, pruned_loss=0.06224, ctc_loss=0.1165, over 3851056.13 frames. ], batch size: 55, lr: 1.81e-02, grad_scale: 32.0 +2024-08-26 18:17:50,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102533.33333333333, ans=0.1 +2024-08-26 18:17:53,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=102533.33333333333, ans=0.025 +2024-08-26 18:17:56,842 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.517e+02 1.665e+02 1.949e+02 3.105e+02, threshold=3.330e+02, percent-clipped=0.0 +2024-08-26 18:18:06,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=102586.66666666667, ans=0.1 +2024-08-26 18:18:09,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102640.0, ans=0.1 +2024-08-26 18:18:33,242 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:18:36,735 INFO [train.py:1114] (2/4) Epoch 8, batch 1850, loss[loss=0.244, simple_loss=0.3063, pruned_loss=0.06603, ctc_loss=0.1238, over 19594.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.2898, pruned_loss=0.06166, ctc_loss=0.1153, over 3854441.68 frames. ], batch size: 57, lr: 1.81e-02, grad_scale: 32.0 +2024-08-26 18:18:37,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=102800.0, ans=0.125 +2024-08-26 18:19:07,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=102960.0, ans=0.125 +2024-08-26 18:19:14,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=103013.33333333333, ans=0.125 +2024-08-26 18:19:19,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=103013.33333333333, ans=0.125 +2024-08-26 18:19:21,233 INFO [train.py:1114] (2/4) Epoch 8, batch 1900, loss[loss=0.2364, simple_loss=0.3026, pruned_loss=0.06166, ctc_loss=0.1173, over 19626.00 frames. ], tot_loss[loss=0.231, simple_loss=0.291, pruned_loss=0.06222, ctc_loss=0.1163, over 3859222.94 frames. ], batch size: 59, lr: 1.81e-02, grad_scale: 16.0 +2024-08-26 18:19:21,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=103066.66666666667, ans=0.09899494936611666 +2024-08-26 18:19:21,801 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.89 vs. limit=15.0 +2024-08-26 18:19:28,166 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.277e+02 1.533e+02 1.714e+02 2.014e+02 3.062e+02, threshold=3.427e+02, percent-clipped=0.0 +2024-08-26 18:19:33,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=103120.0, ans=0.0 +2024-08-26 18:19:47,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103226.66666666667, ans=0.1 +2024-08-26 18:20:02,728 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.86 vs. limit=15.0 +2024-08-26 18:20:04,902 INFO [train.py:1114] (2/4) Epoch 8, batch 1950, loss[loss=0.2222, simple_loss=0.285, pruned_loss=0.05789, ctc_loss=0.1092, over 19608.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.2922, pruned_loss=0.06235, ctc_loss=0.1162, over 3868747.99 frames. ], batch size: 52, lr: 1.81e-02, grad_scale: 16.0 +2024-08-26 18:20:12,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=103333.33333333333, ans=0.125 +2024-08-26 18:20:14,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=103386.66666666667, ans=0.0 +2024-08-26 18:20:23,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=103440.0, ans=0.125 +2024-08-26 18:20:27,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=103440.0, ans=0.0 +2024-08-26 18:20:33,185 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.72 vs. limit=15.0 +2024-08-26 18:20:37,503 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.74 vs. limit=15.0 +2024-08-26 18:20:51,119 INFO [train.py:1114] (2/4) Epoch 8, batch 2000, loss[loss=0.1833, simple_loss=0.2461, pruned_loss=0.04316, ctc_loss=0.08549, over 19671.00 frames. ], tot_loss[loss=0.232, simple_loss=0.2924, pruned_loss=0.0625, ctc_loss=0.1165, over 3852877.11 frames. ], batch size: 45, lr: 1.81e-02, grad_scale: 16.0 +2024-08-26 18:20:53,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=103600.0, ans=0.125 +2024-08-26 18:20:58,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=103600.0, ans=0.025 +2024-08-26 18:20:59,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=103600.0, ans=0.0 +2024-08-26 18:21:00,308 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.277e+02 1.619e+02 1.835e+02 2.136e+02 5.632e+02, threshold=3.670e+02, percent-clipped=2.0 +2024-08-26 18:21:03,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=103653.33333333333, ans=0.125 +2024-08-26 18:21:07,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=103653.33333333333, ans=6.0 +2024-08-26 18:21:08,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=103653.33333333333, ans=0.0 +2024-08-26 18:21:20,852 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.59 vs. limit=15.0 +2024-08-26 18:21:27,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=103813.33333333333, ans=0.125 +2024-08-26 18:21:36,061 INFO [train.py:1114] (2/4) Epoch 8, batch 2050, loss[loss=0.2248, simple_loss=0.2813, pruned_loss=0.06136, ctc_loss=0.1141, over 19713.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.2914, pruned_loss=0.06232, ctc_loss=0.1162, over 3850629.72 frames. ], batch size: 47, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:21:42,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=103866.66666666667, ans=0.2 +2024-08-26 18:21:45,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=103920.0, ans=0.0 +2024-08-26 18:21:56,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=103973.33333333333, ans=0.125 +2024-08-26 18:21:58,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103973.33333333333, ans=0.1 +2024-08-26 18:22:01,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104026.66666666667, ans=0.1 +2024-08-26 18:22:03,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=104026.66666666667, ans=0.125 +2024-08-26 18:22:19,597 INFO [train.py:1114] (2/4) Epoch 8, batch 2100, loss[loss=0.2487, simple_loss=0.3035, pruned_loss=0.06993, ctc_loss=0.1352, over 19765.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2905, pruned_loss=0.06175, ctc_loss=0.1155, over 3858009.65 frames. ], batch size: 54, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:22:25,350 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.76 vs. limit=22.5 +2024-08-26 18:22:26,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=104133.33333333333, ans=0.125 +2024-08-26 18:22:27,471 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.502e+02 1.673e+02 2.007e+02 2.886e+02, threshold=3.346e+02, percent-clipped=0.0 +2024-08-26 18:22:32,310 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.82 vs. limit=12.0 +2024-08-26 18:22:58,422 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.17 vs. limit=22.5 +2024-08-26 18:23:00,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=104346.66666666667, ans=0.0 +2024-08-26 18:23:03,052 INFO [train.py:1114] (2/4) Epoch 8, batch 2150, loss[loss=0.2167, simple_loss=0.2816, pruned_loss=0.05454, ctc_loss=0.1067, over 19862.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.2893, pruned_loss=0.06116, ctc_loss=0.1144, over 3868816.83 frames. ], batch size: 52, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:23:19,840 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:23:43,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=104613.33333333333, ans=0.0 +2024-08-26 18:23:46,678 INFO [train.py:1114] (2/4) Epoch 8, batch 2200, loss[loss=0.2459, simple_loss=0.2971, pruned_loss=0.07055, ctc_loss=0.1342, over 19589.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.2902, pruned_loss=0.06166, ctc_loss=0.1153, over 3866506.60 frames. ], batch size: 57, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:23:54,537 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.275e+02 1.596e+02 1.839e+02 2.214e+02 3.376e+02, threshold=3.678e+02, percent-clipped=1.0 +2024-08-26 18:23:54,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=104720.0, ans=0.0 +2024-08-26 18:23:57,643 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.00 vs. limit=15.0 +2024-08-26 18:24:09,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=104773.33333333333, ans=0.125 +2024-08-26 18:24:21,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=104880.0, ans=0.125 +2024-08-26 18:24:30,584 INFO [train.py:1114] (2/4) Epoch 8, batch 2250, loss[loss=0.2169, simple_loss=0.2896, pruned_loss=0.05162, ctc_loss=0.1025, over 19607.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2905, pruned_loss=0.06173, ctc_loss=0.1156, over 3866394.67 frames. ], batch size: 55, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:24:35,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=104933.33333333333, ans=0.125 +2024-08-26 18:24:37,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=104933.33333333333, ans=0.125 +2024-08-26 18:24:54,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=105040.0, ans=0.0 +2024-08-26 18:25:02,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=105093.33333333333, ans=0.2 +2024-08-26 18:25:04,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105093.33333333333, ans=0.1 +2024-08-26 18:25:14,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=105146.66666666667, ans=0.2 +2024-08-26 18:25:16,103 INFO [train.py:1114] (2/4) Epoch 8, batch 2300, loss[loss=0.2041, simple_loss=0.2685, pruned_loss=0.05116, ctc_loss=0.09373, over 19512.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.2896, pruned_loss=0.06178, ctc_loss=0.1156, over 3859835.77 frames. ], batch size: 49, lr: 1.79e-02, grad_scale: 16.0 +2024-08-26 18:25:23,769 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.553e+02 1.767e+02 2.002e+02 4.280e+02, threshold=3.534e+02, percent-clipped=3.0 +2024-08-26 18:25:25,856 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.38 vs. limit=15.0 +2024-08-26 18:25:41,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=105360.0, ans=0.0 +2024-08-26 18:25:54,082 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.57 vs. limit=15.0 +2024-08-26 18:25:58,621 INFO [train.py:1114] (2/4) Epoch 8, batch 2350, loss[loss=0.262, simple_loss=0.3188, pruned_loss=0.07578, ctc_loss=0.1344, over 19671.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2897, pruned_loss=0.0621, ctc_loss=0.1158, over 3862756.37 frames. ], batch size: 63, lr: 1.79e-02, grad_scale: 16.0 +2024-08-26 18:26:12,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=105520.0, ans=0.125 +2024-08-26 18:26:22,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=105573.33333333333, ans=0.125 +2024-08-26 18:26:27,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=105626.66666666667, ans=0.125 +2024-08-26 18:26:32,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=105626.66666666667, ans=0.125 +2024-08-26 18:26:36,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=105680.0, ans=0.0 +2024-08-26 18:26:42,922 INFO [train.py:1114] (2/4) Epoch 8, batch 2400, loss[loss=0.2484, simple_loss=0.3058, pruned_loss=0.07007, ctc_loss=0.127, over 19234.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.2918, pruned_loss=0.063, ctc_loss=0.1173, over 3857949.06 frames. ], batch size: 71, lr: 1.79e-02, grad_scale: 32.0 +2024-08-26 18:26:50,599 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.261e+02 1.526e+02 1.733e+02 1.998e+02 3.354e+02, threshold=3.467e+02, percent-clipped=0.0 +2024-08-26 18:26:53,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.61 vs. limit=10.0 +2024-08-26 18:26:59,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=105840.0, ans=0.125 +2024-08-26 18:27:25,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=105946.66666666667, ans=0.025 +2024-08-26 18:27:27,046 INFO [train.py:1114] (2/4) Epoch 8, batch 2450, loss[loss=0.3285, simple_loss=0.3416, pruned_loss=0.1147, ctc_loss=0.215, over 13439.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.2968, pruned_loss=0.06686, ctc_loss=0.1248, over 3729396.28 frames. ], batch size: 140, lr: 1.79e-02, grad_scale: 16.0 +2024-08-26 18:27:30,060 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:27:39,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=106053.33333333333, ans=0.125 +2024-08-26 18:27:41,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=106053.33333333333, ans=10.0 +2024-08-26 18:27:44,814 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.19 vs. limit=15.0 +2024-08-26 18:27:47,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106106.66666666667, ans=0.1 +2024-08-26 18:27:48,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=106106.66666666667, ans=0.0 +2024-08-26 18:28:39,348 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:28:47,206 INFO [train.py:1114] (2/4) Epoch 9, batch 0, loss[loss=0.2161, simple_loss=0.2735, pruned_loss=0.05848, ctc_loss=0.1045, over 19818.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2735, pruned_loss=0.05848, ctc_loss=0.1045, over 19818.00 frames. ], batch size: 49, lr: 1.69e-02, grad_scale: 32.0 +2024-08-26 18:28:47,206 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-26 18:28:56,816 INFO [train.py:1146] (2/4) Epoch 9, validation: loss=0.1927, simple_loss=0.2844, pruned_loss=0.03737, ctc_loss=0.06585, over 944034.00 frames. +2024-08-26 18:28:56,817 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12825MB +2024-08-26 18:29:04,952 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.49 vs. limit=15.0 +2024-08-26 18:29:09,576 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-08-26 18:29:16,430 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.480e+02 1.688e+02 1.849e+02 2.025e+02 3.204e+02, threshold=3.698e+02, percent-clipped=0.0 +2024-08-26 18:29:23,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=106314.66666666667, ans=0.125 +2024-08-26 18:29:25,712 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:29:29,695 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.54 vs. limit=10.0 +2024-08-26 18:29:32,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=106368.0, ans=0.125 +2024-08-26 18:29:43,037 INFO [train.py:1114] (2/4) Epoch 9, batch 50, loss[loss=0.203, simple_loss=0.2629, pruned_loss=0.05152, ctc_loss=0.1001, over 19720.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.294, pruned_loss=0.06258, ctc_loss=0.1183, over 844815.75 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 32.0 +2024-08-26 18:30:07,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=106581.33333333333, ans=0.0 +2024-08-26 18:30:11,932 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:30:13,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=106634.66666666667, ans=0.125 +2024-08-26 18:30:26,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=106688.0, ans=0.125 +2024-08-26 18:30:33,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=106688.0, ans=0.04949747468305833 +2024-08-26 18:30:39,528 INFO [train.py:1114] (2/4) Epoch 9, batch 100, loss[loss=0.2188, simple_loss=0.2886, pruned_loss=0.05478, ctc_loss=0.09866, over 19704.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.2948, pruned_loss=0.06262, ctc_loss=0.1179, over 1499610.03 frames. ], batch size: 51, lr: 1.69e-02, grad_scale: 32.0 +2024-08-26 18:30:44,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=106741.33333333333, ans=0.0 +2024-08-26 18:30:50,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.69 vs. limit=15.0 +2024-08-26 18:31:02,333 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.216e+02 1.554e+02 1.735e+02 2.126e+02 3.416e+02, threshold=3.470e+02, percent-clipped=0.0 +2024-08-26 18:31:03,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=106848.0, ans=0.125 +2024-08-26 18:31:05,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=106848.0, ans=0.0 +2024-08-26 18:31:07,341 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.59 vs. limit=15.0 +2024-08-26 18:31:10,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=106901.33333333333, ans=0.125 +2024-08-26 18:31:22,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=106954.66666666667, ans=0.0 +2024-08-26 18:31:28,289 INFO [train.py:1114] (2/4) Epoch 9, batch 150, loss[loss=0.2109, simple_loss=0.2711, pruned_loss=0.0555, ctc_loss=0.09937, over 19714.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.2918, pruned_loss=0.06137, ctc_loss=0.1151, over 2027682.09 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 16.0 +2024-08-26 18:31:37,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=107061.33333333333, ans=0.0 +2024-08-26 18:31:46,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=107114.66666666667, ans=0.125 +2024-08-26 18:31:46,213 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.74 vs. limit=6.0 +2024-08-26 18:31:49,864 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.21 vs. limit=10.0 +2024-08-26 18:32:06,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=107221.33333333333, ans=0.125 +2024-08-26 18:32:11,979 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.62 vs. limit=12.0 +2024-08-26 18:32:14,113 INFO [train.py:1114] (2/4) Epoch 9, batch 200, loss[loss=0.2567, simple_loss=0.3137, pruned_loss=0.0725, ctc_loss=0.1368, over 18323.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.2894, pruned_loss=0.06052, ctc_loss=0.1135, over 2435014.65 frames. ], batch size: 85, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:32:18,090 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.53 vs. limit=15.0 +2024-08-26 18:32:24,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.84 vs. limit=15.0 +2024-08-26 18:32:30,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=107328.0, ans=0.2 +2024-08-26 18:32:35,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=107381.33333333333, ans=0.125 +2024-08-26 18:32:36,039 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.227e+02 1.442e+02 1.571e+02 1.787e+02 2.800e+02, threshold=3.143e+02, percent-clipped=0.0 +2024-08-26 18:32:36,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=107381.33333333333, ans=0.04949747468305833 +2024-08-26 18:32:42,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=107434.66666666667, ans=0.125 +2024-08-26 18:32:45,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=107434.66666666667, ans=0.0 +2024-08-26 18:32:46,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=107434.66666666667, ans=0.0 +2024-08-26 18:32:53,157 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.46 vs. limit=22.5 +2024-08-26 18:32:55,720 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:32:56,994 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.53 vs. limit=15.0 +2024-08-26 18:33:01,997 INFO [train.py:1114] (2/4) Epoch 9, batch 250, loss[loss=0.2245, simple_loss=0.2931, pruned_loss=0.05614, ctc_loss=0.1092, over 19373.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.2883, pruned_loss=0.05996, ctc_loss=0.1125, over 2755550.65 frames. ], batch size: 67, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:33:08,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107541.33333333333, ans=0.125 +2024-08-26 18:33:57,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=107754.66666666667, ans=0.0 +2024-08-26 18:34:01,005 INFO [train.py:1114] (2/4) Epoch 9, batch 300, loss[loss=0.2378, simple_loss=0.3005, pruned_loss=0.06398, ctc_loss=0.1176, over 19520.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.2876, pruned_loss=0.05955, ctc_loss=0.1117, over 2999755.96 frames. ], batch size: 61, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:34:05,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=107808.0, ans=0.0 +2024-08-26 18:34:14,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=107861.33333333333, ans=10.0 +2024-08-26 18:34:22,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107914.66666666667, ans=0.1 +2024-08-26 18:34:24,466 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.264e+02 1.498e+02 1.681e+02 1.999e+02 2.633e+02, threshold=3.363e+02, percent-clipped=0.0 +2024-08-26 18:34:24,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=107914.66666666667, ans=0.125 +2024-08-26 18:34:33,524 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.89 vs. limit=22.5 +2024-08-26 18:34:37,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=107968.0, ans=0.125 +2024-08-26 18:34:38,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=107968.0, ans=0.0 +2024-08-26 18:34:39,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=107968.0, ans=0.125 +2024-08-26 18:34:42,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=108021.33333333333, ans=0.2 +2024-08-26 18:34:48,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=108021.33333333333, ans=0.125 +2024-08-26 18:34:50,539 INFO [train.py:1114] (2/4) Epoch 9, batch 350, loss[loss=0.1929, simple_loss=0.2589, pruned_loss=0.04598, ctc_loss=0.08719, over 19763.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.2884, pruned_loss=0.06005, ctc_loss=0.1123, over 3188993.13 frames. ], batch size: 48, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:35:03,738 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.91 vs. limit=6.0 +2024-08-26 18:35:09,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=108128.0, ans=15.0 +2024-08-26 18:35:36,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=108288.0, ans=0.0 +2024-08-26 18:35:38,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=108288.0, ans=0.025 +2024-08-26 18:35:40,765 INFO [train.py:1114] (2/4) Epoch 9, batch 400, loss[loss=0.211, simple_loss=0.2843, pruned_loss=0.04966, ctc_loss=0.09563, over 19495.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2873, pruned_loss=0.05935, ctc_loss=0.1111, over 3340930.96 frames. ], batch size: 54, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:35:44,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108341.33333333333, ans=0.1 +2024-08-26 18:35:46,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108341.33333333333, ans=0.1 +2024-08-26 18:35:55,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=108394.66666666667, ans=0.0 +2024-08-26 18:36:02,028 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.232e+02 1.489e+02 1.712e+02 1.995e+02 4.778e+02, threshold=3.424e+02, percent-clipped=1.0 +2024-08-26 18:36:32,711 INFO [train.py:1114] (2/4) Epoch 9, batch 450, loss[loss=0.2306, simple_loss=0.2982, pruned_loss=0.05953, ctc_loss=0.11, over 19606.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.288, pruned_loss=0.05975, ctc_loss=0.1117, over 3448148.97 frames. ], batch size: 55, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:36:36,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=108608.0, ans=0.125 +2024-08-26 18:36:37,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=108608.0, ans=0.125 +2024-08-26 18:36:49,566 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.27 vs. limit=12.0 +2024-08-26 18:36:56,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=108714.66666666667, ans=0.125 +2024-08-26 18:36:58,648 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:37:01,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=108714.66666666667, ans=0.125 +2024-08-26 18:37:19,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108821.33333333333, ans=0.1 +2024-08-26 18:37:20,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=108821.33333333333, ans=0.125 +2024-08-26 18:37:20,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=108874.66666666667, ans=0.125 +2024-08-26 18:37:21,537 INFO [train.py:1114] (2/4) Epoch 9, batch 500, loss[loss=0.2439, simple_loss=0.3027, pruned_loss=0.06633, ctc_loss=0.1313, over 19674.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.2867, pruned_loss=0.0592, ctc_loss=0.1107, over 3544506.49 frames. ], batch size: 63, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:37:24,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=108874.66666666667, ans=0.95 +2024-08-26 18:37:35,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108928.0, ans=0.0 +2024-08-26 18:37:42,873 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.242e+02 1.480e+02 1.660e+02 1.957e+02 3.087e+02, threshold=3.320e+02, percent-clipped=0.0 +2024-08-26 18:37:45,171 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:37:48,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109034.66666666667, ans=0.1 +2024-08-26 18:37:54,040 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.66 vs. limit=15.0 +2024-08-26 18:37:59,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=109088.0, ans=0.0 +2024-08-26 18:38:06,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=109088.0, ans=0.125 +2024-08-26 18:38:07,952 INFO [train.py:1114] (2/4) Epoch 9, batch 550, loss[loss=0.2594, simple_loss=0.318, pruned_loss=0.07339, ctc_loss=0.1352, over 19227.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.2867, pruned_loss=0.05919, ctc_loss=0.1108, over 3606222.95 frames. ], batch size: 71, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:38:24,853 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:38:25,939 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.42 vs. limit=12.0 +2024-08-26 18:38:43,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=109301.33333333333, ans=0.125 +2024-08-26 18:38:47,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=109354.66666666667, ans=10.0 +2024-08-26 18:38:51,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=109354.66666666667, ans=0.0 +2024-08-26 18:38:55,931 INFO [train.py:1114] (2/4) Epoch 9, batch 600, loss[loss=0.2494, simple_loss=0.3019, pruned_loss=0.07221, ctc_loss=0.1312, over 19389.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.2872, pruned_loss=0.05932, ctc_loss=0.1109, over 3663539.65 frames. ], batch size: 67, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:39:15,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=109461.33333333333, ans=0.0 +2024-08-26 18:39:21,961 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.261e+02 1.496e+02 1.658e+02 1.980e+02 4.382e+02, threshold=3.316e+02, percent-clipped=1.0 +2024-08-26 18:39:28,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=109514.66666666667, ans=0.0 +2024-08-26 18:39:28,876 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.29 vs. limit=12.0 +2024-08-26 18:39:30,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.65 vs. limit=15.0 +2024-08-26 18:39:32,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=109568.0, ans=0.125 +2024-08-26 18:39:34,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109568.0, ans=0.1 +2024-08-26 18:39:35,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=109568.0, ans=0.0 +2024-08-26 18:39:43,481 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.88 vs. limit=15.0 +2024-08-26 18:39:49,353 INFO [train.py:1114] (2/4) Epoch 9, batch 650, loss[loss=0.2191, simple_loss=0.2835, pruned_loss=0.05618, ctc_loss=0.1061, over 19766.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2861, pruned_loss=0.05888, ctc_loss=0.1099, over 3713816.89 frames. ], batch size: 54, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:39:51,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=109674.66666666667, ans=0.2 +2024-08-26 18:40:13,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=109781.33333333333, ans=0.0 +2024-08-26 18:40:30,605 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.92 vs. limit=15.0 +2024-08-26 18:40:40,265 INFO [train.py:1114] (2/4) Epoch 9, batch 700, loss[loss=0.1934, simple_loss=0.2631, pruned_loss=0.04407, ctc_loss=0.08915, over 19724.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2862, pruned_loss=0.05873, ctc_loss=0.1098, over 3746667.85 frames. ], batch size: 51, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:40:41,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=109941.33333333333, ans=0.125 +2024-08-26 18:40:51,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.66 vs. limit=15.0 +2024-08-26 18:41:01,801 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.271e+02 1.503e+02 1.748e+02 2.321e+02 3.813e+02, threshold=3.497e+02, percent-clipped=1.0 +2024-08-26 18:41:02,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=110048.0, ans=0.125 +2024-08-26 18:41:05,977 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.74 vs. limit=22.5 +2024-08-26 18:41:21,883 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.70 vs. limit=6.0 +2024-08-26 18:41:22,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=110154.66666666667, ans=0.0 +2024-08-26 18:41:27,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110208.0, ans=0.1 +2024-08-26 18:41:28,310 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.94 vs. limit=10.0 +2024-08-26 18:41:28,637 INFO [train.py:1114] (2/4) Epoch 9, batch 750, loss[loss=0.2263, simple_loss=0.2941, pruned_loss=0.05805, ctc_loss=0.106, over 19493.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2861, pruned_loss=0.05861, ctc_loss=0.1098, over 3772977.45 frames. ], batch size: 54, lr: 1.66e-02, grad_scale: 16.0 +2024-08-26 18:41:30,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110208.0, ans=0.1 +2024-08-26 18:41:37,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=110261.33333333333, ans=0.07 +2024-08-26 18:41:38,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=110261.33333333333, ans=0.0 +2024-08-26 18:41:41,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=110261.33333333333, ans=0.0 +2024-08-26 18:42:11,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=110421.33333333333, ans=0.125 +2024-08-26 18:42:22,145 INFO [train.py:1114] (2/4) Epoch 9, batch 800, loss[loss=0.209, simple_loss=0.2646, pruned_loss=0.05616, ctc_loss=0.1027, over 19792.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2862, pruned_loss=0.05891, ctc_loss=0.1104, over 3794489.35 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:42:23,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=110474.66666666667, ans=0.2 +2024-08-26 18:42:43,910 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.269e+02 1.427e+02 1.539e+02 1.792e+02 3.382e+02, threshold=3.078e+02, percent-clipped=0.0 +2024-08-26 18:42:45,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=110581.33333333333, ans=0.015 +2024-08-26 18:42:50,087 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.07 vs. limit=15.0 +2024-08-26 18:43:09,188 INFO [train.py:1114] (2/4) Epoch 9, batch 850, loss[loss=0.2293, simple_loss=0.2992, pruned_loss=0.05744, ctc_loss=0.1113, over 19666.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2858, pruned_loss=0.05879, ctc_loss=0.1101, over 3814034.66 frames. ], batch size: 59, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:43:11,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=110741.33333333333, ans=0.125 +2024-08-26 18:43:23,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=110794.66666666667, ans=0.025 +2024-08-26 18:43:24,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110794.66666666667, ans=0.1 +2024-08-26 18:43:34,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=110848.0, ans=0.125 +2024-08-26 18:43:42,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=110901.33333333333, ans=0.0 +2024-08-26 18:43:55,671 INFO [train.py:1114] (2/4) Epoch 9, batch 900, loss[loss=0.2082, simple_loss=0.2703, pruned_loss=0.05398, ctc_loss=0.09544, over 19414.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.2867, pruned_loss=0.05956, ctc_loss=0.111, over 3818465.82 frames. ], batch size: 48, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:45:38,154 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.254e+02 1.519e+02 1.752e+02 2.077e+02 5.433e+02, threshold=3.505e+02, percent-clipped=5.0 +2024-08-26 18:46:05,601 INFO [train.py:1114] (2/4) Epoch 9, batch 950, loss[loss=0.1896, simple_loss=0.2587, pruned_loss=0.04351, ctc_loss=0.08387, over 19493.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.2867, pruned_loss=0.05954, ctc_loss=0.1111, over 3821199.31 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:46:19,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=111328.0, ans=0.0 +2024-08-26 18:46:20,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=111328.0, ans=0.125 +2024-08-26 18:46:25,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111328.0, ans=0.1 +2024-08-26 18:46:36,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=111381.33333333333, ans=0.0 +2024-08-26 18:46:37,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=111381.33333333333, ans=0.05 +2024-08-26 18:46:54,502 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.91 vs. limit=8.0 +2024-08-26 18:46:57,419 INFO [train.py:1114] (2/4) Epoch 9, batch 1000, loss[loss=0.1981, simple_loss=0.2748, pruned_loss=0.04341, ctc_loss=0.08628, over 19842.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.2877, pruned_loss=0.06002, ctc_loss=0.1119, over 3816943.94 frames. ], batch size: 52, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:47:19,853 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.174e+02 1.461e+02 1.756e+02 2.077e+02 6.803e+02, threshold=3.513e+02, percent-clipped=1.0 +2024-08-26 18:47:25,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=111701.33333333333, ans=0.125 +2024-08-26 18:47:43,912 INFO [train.py:1114] (2/4) Epoch 9, batch 1050, loss[loss=0.2112, simple_loss=0.2829, pruned_loss=0.04983, ctc_loss=0.09975, over 19836.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.2867, pruned_loss=0.05976, ctc_loss=0.1113, over 3823969.67 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 16.0 +2024-08-26 18:48:01,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=111914.66666666667, ans=0.0 +2024-08-26 18:48:02,012 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.64 vs. limit=12.0 +2024-08-26 18:48:02,883 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.46 vs. limit=10.0 +2024-08-26 18:48:23,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=112021.33333333333, ans=0.125 +2024-08-26 18:48:32,532 INFO [train.py:1114] (2/4) Epoch 9, batch 1100, loss[loss=0.218, simple_loss=0.2874, pruned_loss=0.05432, ctc_loss=0.1001, over 19581.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2865, pruned_loss=0.05948, ctc_loss=0.1109, over 3831415.43 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 16.0 +2024-08-26 18:48:40,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=112074.66666666667, ans=0.0 +2024-08-26 18:48:41,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=112128.0, ans=0.0 +2024-08-26 18:48:59,878 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.444e+02 1.690e+02 2.009e+02 4.396e+02, threshold=3.380e+02, percent-clipped=1.0 +2024-08-26 18:49:03,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=112181.33333333333, ans=0.0 +2024-08-26 18:49:53,115 INFO [train.py:1114] (2/4) Epoch 9, batch 1150, loss[loss=0.2111, simple_loss=0.2819, pruned_loss=0.05116, ctc_loss=0.09516, over 19579.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2865, pruned_loss=0.05946, ctc_loss=0.1109, over 3830568.01 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 16.0 +2024-08-26 18:49:57,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112341.33333333333, ans=0.1 +2024-08-26 18:49:59,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.90 vs. limit=22.5 +2024-08-26 18:50:04,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=112341.33333333333, ans=0.125 +2024-08-26 18:50:07,410 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.06 vs. limit=12.0 +2024-08-26 18:50:19,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=112394.66666666667, ans=0.0 +2024-08-26 18:50:26,284 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.68 vs. limit=15.0 +2024-08-26 18:50:35,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=112501.33333333333, ans=0.125 +2024-08-26 18:50:46,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112554.66666666667, ans=0.1 +2024-08-26 18:50:54,149 INFO [train.py:1114] (2/4) Epoch 9, batch 1200, loss[loss=0.2335, simple_loss=0.2979, pruned_loss=0.0616, ctc_loss=0.1146, over 19858.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.2876, pruned_loss=0.05974, ctc_loss=0.1117, over 3825514.24 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 32.0 +2024-08-26 18:50:58,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=112608.0, ans=22.5 +2024-08-26 18:51:05,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=112661.33333333333, ans=0.1 +2024-08-26 18:51:11,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=112661.33333333333, ans=0.09899494936611666 +2024-08-26 18:51:15,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=112714.66666666667, ans=0.125 +2024-08-26 18:51:16,818 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.431e+02 1.600e+02 1.807e+02 3.201e+02, threshold=3.201e+02, percent-clipped=0.0 +2024-08-26 18:51:29,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=112768.0, ans=0.2 +2024-08-26 18:51:31,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=112821.33333333333, ans=0.125 +2024-08-26 18:51:38,674 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.78 vs. limit=15.0 +2024-08-26 18:51:39,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=112821.33333333333, ans=0.125 +2024-08-26 18:51:42,797 INFO [train.py:1114] (2/4) Epoch 9, batch 1250, loss[loss=0.2489, simple_loss=0.3129, pruned_loss=0.06661, ctc_loss=0.1293, over 19524.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.2877, pruned_loss=0.05963, ctc_loss=0.1115, over 3842813.32 frames. ], batch size: 61, lr: 1.65e-02, grad_scale: 32.0 +2024-08-26 18:51:43,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=112874.66666666667, ans=0.125 +2024-08-26 18:51:47,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=112874.66666666667, ans=0.0 +2024-08-26 18:52:23,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=113088.0, ans=0.125 +2024-08-26 18:52:25,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=113088.0, ans=0.125 +2024-08-26 18:52:28,706 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.83 vs. limit=15.0 +2024-08-26 18:52:30,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=113088.0, ans=0.2 +2024-08-26 18:52:36,297 INFO [train.py:1114] (2/4) Epoch 9, batch 1300, loss[loss=0.2324, simple_loss=0.2998, pruned_loss=0.05994, ctc_loss=0.1128, over 18940.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.2869, pruned_loss=0.05909, ctc_loss=0.1106, over 3845203.26 frames. ], batch size: 76, lr: 1.64e-02, grad_scale: 32.0 +2024-08-26 18:52:43,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113141.33333333333, ans=0.1 +2024-08-26 18:52:58,748 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.498e+02 1.743e+02 2.034e+02 3.430e+02, threshold=3.487e+02, percent-clipped=2.0 +2024-08-26 18:53:09,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=113301.33333333333, ans=0.125 +2024-08-26 18:53:15,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113354.66666666667, ans=0.1 +2024-08-26 18:53:17,181 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.38 vs. limit=15.0 +2024-08-26 18:53:23,263 INFO [train.py:1114] (2/4) Epoch 9, batch 1350, loss[loss=0.2263, simple_loss=0.2842, pruned_loss=0.06196, ctc_loss=0.1113, over 19767.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2865, pruned_loss=0.05864, ctc_loss=0.1097, over 3856385.13 frames. ], batch size: 54, lr: 1.64e-02, grad_scale: 32.0 +2024-08-26 18:53:29,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=113408.0, ans=0.125 +2024-08-26 18:53:41,813 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.26 vs. limit=15.0 +2024-08-26 18:53:42,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=113514.66666666667, ans=0.125 +2024-08-26 18:53:45,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=113514.66666666667, ans=0.125 +2024-08-26 18:53:48,798 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:53:48,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=113514.66666666667, ans=0.125 +2024-08-26 18:54:04,676 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=113621.33333333333, ans=0.2 +2024-08-26 18:54:09,874 INFO [train.py:1114] (2/4) Epoch 9, batch 1400, loss[loss=0.1919, simple_loss=0.252, pruned_loss=0.04834, ctc_loss=0.08782, over 19642.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2861, pruned_loss=0.0587, ctc_loss=0.1099, over 3863319.77 frames. ], batch size: 46, lr: 1.64e-02, grad_scale: 32.0 +2024-08-26 18:54:22,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=113728.0, ans=0.125 +2024-08-26 18:54:24,222 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.08 vs. limit=22.5 +2024-08-26 18:54:29,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=113781.33333333333, ans=0.125 +2024-08-26 18:54:30,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=113781.33333333333, ans=0.2 +2024-08-26 18:54:33,075 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.492e+02 1.644e+02 1.948e+02 2.802e+02, threshold=3.287e+02, percent-clipped=0.0 +2024-08-26 18:54:50,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=113888.0, ans=0.125 +2024-08-26 18:54:59,238 INFO [train.py:1114] (2/4) Epoch 9, batch 1450, loss[loss=0.2409, simple_loss=0.3072, pruned_loss=0.06377, ctc_loss=0.1179, over 19659.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.2868, pruned_loss=0.05905, ctc_loss=0.1105, over 3861359.41 frames. ], batch size: 63, lr: 1.64e-02, grad_scale: 16.0 +2024-08-26 18:55:17,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=113994.66666666667, ans=0.0 +2024-08-26 18:55:20,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=113994.66666666667, ans=10.0 +2024-08-26 18:55:25,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=114048.0, ans=0.125 +2024-08-26 18:55:35,002 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.28 vs. limit=15.0 +2024-08-26 18:55:38,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=114101.33333333333, ans=0.125 +2024-08-26 18:55:50,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=114154.66666666667, ans=0.125 +2024-08-26 18:55:54,505 INFO [train.py:1114] (2/4) Epoch 9, batch 1500, loss[loss=0.2188, simple_loss=0.2918, pruned_loss=0.05235, ctc_loss=0.1027, over 19598.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2874, pruned_loss=0.05928, ctc_loss=0.111, over 3861063.45 frames. ], batch size: 57, lr: 1.64e-02, grad_scale: 16.0 +2024-08-26 18:56:06,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=114261.33333333333, ans=0.2 +2024-08-26 18:56:18,310 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.542e+02 1.688e+02 1.884e+02 2.711e+02, threshold=3.377e+02, percent-clipped=0.0 +2024-08-26 18:56:26,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=114368.0, ans=0.025 +2024-08-26 18:56:38,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=114421.33333333333, ans=0.125 +2024-08-26 18:56:41,343 INFO [train.py:1114] (2/4) Epoch 9, batch 1550, loss[loss=0.2447, simple_loss=0.2997, pruned_loss=0.06984, ctc_loss=0.1251, over 19612.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.2876, pruned_loss=0.05965, ctc_loss=0.1118, over 3845360.99 frames. ], batch size: 60, lr: 1.64e-02, grad_scale: 16.0 +2024-08-26 18:56:43,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=114474.66666666667, ans=0.0 +2024-08-26 18:56:51,258 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.92 vs. limit=15.0 +2024-08-26 18:56:51,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=114528.0, ans=0.1 +2024-08-26 18:57:07,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=114581.33333333333, ans=0.1 +2024-08-26 18:57:29,654 INFO [train.py:1114] (2/4) Epoch 9, batch 1600, loss[loss=0.2431, simple_loss=0.306, pruned_loss=0.06678, ctc_loss=0.1164, over 19833.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.2873, pruned_loss=0.0596, ctc_loss=0.1117, over 3835886.41 frames. ], batch size: 57, lr: 1.63e-02, grad_scale: 32.0 +2024-08-26 18:57:39,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=114794.66666666667, ans=0.0 +2024-08-26 18:57:40,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=114794.66666666667, ans=0.125 +2024-08-26 18:57:57,610 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.291e+02 1.549e+02 1.720e+02 1.979e+02 3.573e+02, threshold=3.441e+02, percent-clipped=1.0 +2024-08-26 18:57:59,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=114848.0, ans=0.1 +2024-08-26 18:58:08,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=114901.33333333333, ans=0.125 +2024-08-26 18:58:08,907 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.21 vs. limit=15.0 +2024-08-26 18:58:14,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=114954.66666666667, ans=0.1 +2024-08-26 18:58:36,368 INFO [train.py:1114] (2/4) Epoch 9, batch 1650, loss[loss=0.2242, simple_loss=0.2918, pruned_loss=0.05677, ctc_loss=0.1075, over 19659.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.2872, pruned_loss=0.05952, ctc_loss=0.1114, over 3832639.29 frames. ], batch size: 59, lr: 1.63e-02, grad_scale: 32.0 +2024-08-26 18:58:42,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115008.0, ans=0.1 +2024-08-26 18:59:43,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=115114.66666666667, ans=0.2 +2024-08-26 18:59:54,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=115168.0, ans=0.2 +2024-08-26 19:00:00,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=115168.0, ans=0.025 +2024-08-26 19:00:06,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115221.33333333333, ans=0.1 +2024-08-26 19:00:11,416 INFO [train.py:1114] (2/4) Epoch 9, batch 1700, loss[loss=0.1862, simple_loss=0.2492, pruned_loss=0.04451, ctc_loss=0.08574, over 19674.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.2865, pruned_loss=0.05888, ctc_loss=0.1103, over 3847087.37 frames. ], batch size: 46, lr: 1.63e-02, grad_scale: 16.0 +2024-08-26 19:00:34,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=115381.33333333333, ans=0.0 +2024-08-26 19:00:34,653 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.254e+02 1.433e+02 1.619e+02 1.844e+02 2.581e+02, threshold=3.239e+02, percent-clipped=0.0 +2024-08-26 19:00:34,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=115381.33333333333, ans=0.95 +2024-08-26 19:00:45,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=115434.66666666667, ans=0.2 +2024-08-26 19:00:49,244 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:00:56,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=115541.33333333333, ans=0.125 +2024-08-26 19:00:56,878 INFO [train.py:1114] (2/4) Epoch 9, batch 1750, loss[loss=0.1891, simple_loss=0.2522, pruned_loss=0.04485, ctc_loss=0.09082, over 19680.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2855, pruned_loss=0.05824, ctc_loss=0.1092, over 3851166.61 frames. ], batch size: 45, lr: 1.63e-02, grad_scale: 16.0 +2024-08-26 19:00:59,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=115541.33333333333, ans=0.125 +2024-08-26 19:01:10,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=115594.66666666667, ans=0.0 +2024-08-26 19:01:10,798 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.42 vs. limit=15.0 +2024-08-26 19:01:30,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=115701.33333333333, ans=0.025 +2024-08-26 19:01:38,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=115754.66666666667, ans=0.125 +2024-08-26 19:01:41,154 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.99 vs. limit=22.5 +2024-08-26 19:01:43,102 INFO [train.py:1114] (2/4) Epoch 9, batch 1800, loss[loss=0.2454, simple_loss=0.3056, pruned_loss=0.06812, ctc_loss=0.1223, over 19621.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2858, pruned_loss=0.05849, ctc_loss=0.1096, over 3852683.02 frames. ], batch size: 55, lr: 1.63e-02, grad_scale: 16.0 +2024-08-26 19:01:44,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=115808.0, ans=0.0 +2024-08-26 19:02:02,692 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.45 vs. limit=15.0 +2024-08-26 19:02:06,013 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.500e+02 1.645e+02 1.953e+02 3.789e+02, threshold=3.290e+02, percent-clipped=1.0 +2024-08-26 19:02:06,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=115914.66666666667, ans=0.125 +2024-08-26 19:02:18,141 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.57 vs. limit=15.0 +2024-08-26 19:02:27,282 INFO [train.py:1114] (2/4) Epoch 9, batch 1850, loss[loss=0.2368, simple_loss=0.2954, pruned_loss=0.06405, ctc_loss=0.1252, over 19592.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.285, pruned_loss=0.05809, ctc_loss=0.1087, over 3857004.18 frames. ], batch size: 57, lr: 1.63e-02, grad_scale: 16.0 +2024-08-26 19:02:28,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=116074.66666666667, ans=0.125 +2024-08-26 19:02:31,408 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.45 vs. limit=12.0 +2024-08-26 19:03:09,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=116288.0, ans=0.0 +2024-08-26 19:03:13,220 INFO [train.py:1114] (2/4) Epoch 9, batch 1900, loss[loss=0.2207, simple_loss=0.2953, pruned_loss=0.053, ctc_loss=0.1003, over 19680.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2859, pruned_loss=0.05819, ctc_loss=0.1088, over 3861462.69 frames. ], batch size: 59, lr: 1.62e-02, grad_scale: 16.0 +2024-08-26 19:03:16,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=116341.33333333333, ans=0.125 +2024-08-26 19:03:22,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=116394.66666666667, ans=0.0 +2024-08-26 19:03:25,028 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.97 vs. limit=22.5 +2024-08-26 19:03:28,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.whiten.whitening_limit, batch_count=116394.66666666667, ans=12.0 +2024-08-26 19:03:33,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=116448.0, ans=0.1 +2024-08-26 19:03:35,869 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.270e+02 1.509e+02 1.695e+02 1.935e+02 3.320e+02, threshold=3.390e+02, percent-clipped=1.0 +2024-08-26 19:03:49,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=116554.66666666667, ans=0.125 +2024-08-26 19:03:56,677 INFO [train.py:1114] (2/4) Epoch 9, batch 1950, loss[loss=0.2159, simple_loss=0.2821, pruned_loss=0.05453, ctc_loss=0.1015, over 19584.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.287, pruned_loss=0.05846, ctc_loss=0.1093, over 3869958.39 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 16.0 +2024-08-26 19:04:09,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=116661.33333333333, ans=0.125 +2024-08-26 19:04:13,534 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:04:17,170 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.21 vs. limit=15.0 +2024-08-26 19:04:19,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=116714.66666666667, ans=0.1 +2024-08-26 19:04:30,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=116768.0, ans=0.1 +2024-08-26 19:04:31,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=116768.0, ans=0.0 +2024-08-26 19:04:37,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=116821.33333333333, ans=0.025 +2024-08-26 19:04:45,340 INFO [train.py:1114] (2/4) Epoch 9, batch 2000, loss[loss=0.1976, simple_loss=0.2566, pruned_loss=0.05136, ctc_loss=0.08973, over 19662.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2877, pruned_loss=0.05896, ctc_loss=0.1103, over 3854092.86 frames. ], batch size: 45, lr: 1.62e-02, grad_scale: 32.0 +2024-08-26 19:04:48,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=116874.66666666667, ans=0.125 +2024-08-26 19:04:49,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=116874.66666666667, ans=0.125 +2024-08-26 19:04:57,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=116928.0, ans=0.125 +2024-08-26 19:05:02,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=116981.33333333333, ans=0.05 +2024-08-26 19:05:09,037 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.202e+02 1.518e+02 1.711e+02 1.998e+02 4.316e+02, threshold=3.422e+02, percent-clipped=2.0 +2024-08-26 19:05:10,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=116981.33333333333, ans=0.0 +2024-08-26 19:05:10,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=117034.66666666667, ans=0.125 +2024-08-26 19:05:11,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117034.66666666667, ans=0.1 +2024-08-26 19:05:15,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=117034.66666666667, ans=0.5 +2024-08-26 19:05:20,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=117088.0, ans=0.2 +2024-08-26 19:05:29,284 INFO [train.py:1114] (2/4) Epoch 9, batch 2050, loss[loss=0.1922, simple_loss=0.2526, pruned_loss=0.04803, ctc_loss=0.08959, over 19713.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2864, pruned_loss=0.05861, ctc_loss=0.1096, over 3849627.61 frames. ], batch size: 47, lr: 1.62e-02, grad_scale: 16.0 +2024-08-26 19:05:32,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=117141.33333333333, ans=0.0 +2024-08-26 19:05:50,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=117248.0, ans=0.05 +2024-08-26 19:05:51,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117248.0, ans=0.1 +2024-08-26 19:05:51,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=117248.0, ans=0.0 +2024-08-26 19:05:53,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=117248.0, ans=0.125 +2024-08-26 19:06:10,004 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.29 vs. limit=15.0 +2024-08-26 19:06:12,998 INFO [train.py:1114] (2/4) Epoch 9, batch 2100, loss[loss=0.1894, simple_loss=0.2591, pruned_loss=0.04277, ctc_loss=0.08539, over 19747.00 frames. ], tot_loss[loss=0.222, simple_loss=0.285, pruned_loss=0.05783, ctc_loss=0.1084, over 3858525.45 frames. ], batch size: 54, lr: 1.62e-02, grad_scale: 16.0 +2024-08-26 19:06:19,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=117408.0, ans=0.125 +2024-08-26 19:06:36,662 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.287e+02 1.488e+02 1.695e+02 1.945e+02 3.088e+02, threshold=3.391e+02, percent-clipped=0.0 +2024-08-26 19:06:40,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=117568.0, ans=0.0 +2024-08-26 19:06:46,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=117621.33333333333, ans=0.05 +2024-08-26 19:06:55,560 INFO [train.py:1114] (2/4) Epoch 9, batch 2150, loss[loss=0.204, simple_loss=0.2656, pruned_loss=0.05074, ctc_loss=0.1026, over 19882.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2841, pruned_loss=0.05749, ctc_loss=0.1075, over 3869334.64 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 8.0 +2024-08-26 19:07:02,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=117674.66666666667, ans=0.2 +2024-08-26 19:07:04,591 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.15 vs. limit=6.0 +2024-08-26 19:07:06,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=117728.0, ans=0.09899494936611666 +2024-08-26 19:07:08,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117728.0, ans=0.1 +2024-08-26 19:07:15,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=117781.33333333333, ans=0.125 +2024-08-26 19:07:15,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_na.min_abs, batch_count=117781.33333333333, ans=0.02 +2024-08-26 19:07:26,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=117834.66666666667, ans=0.125 +2024-08-26 19:07:38,969 INFO [train.py:1114] (2/4) Epoch 9, batch 2200, loss[loss=0.2252, simple_loss=0.2975, pruned_loss=0.05466, ctc_loss=0.109, over 19586.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2841, pruned_loss=0.05722, ctc_loss=0.1072, over 3867498.81 frames. ], batch size: 57, lr: 1.61e-02, grad_scale: 8.0 +2024-08-26 19:07:39,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=117941.33333333333, ans=0.2 +2024-08-26 19:07:41,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=117941.33333333333, ans=0.07 +2024-08-26 19:07:46,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=117941.33333333333, ans=0.2 +2024-08-26 19:07:58,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=118048.0, ans=0.0 +2024-08-26 19:07:58,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=118048.0, ans=0.125 +2024-08-26 19:08:01,878 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.78 vs. limit=10.0 +2024-08-26 19:08:03,129 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.284e+02 1.528e+02 1.792e+02 2.132e+02 3.306e+02, threshold=3.583e+02, percent-clipped=0.0 +2024-08-26 19:08:34,323 INFO [train.py:1114] (2/4) Epoch 9, batch 2250, loss[loss=0.2374, simple_loss=0.2959, pruned_loss=0.06484, ctc_loss=0.1231, over 19599.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2846, pruned_loss=0.05773, ctc_loss=0.108, over 3866850.68 frames. ], batch size: 55, lr: 1.61e-02, grad_scale: 8.0 +2024-08-26 19:08:34,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=118208.0, ans=0.125 +2024-08-26 19:08:35,721 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.60 vs. limit=15.0 +2024-08-26 19:08:57,351 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.36 vs. limit=12.0 +2024-08-26 19:09:00,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=118368.0, ans=0.125 +2024-08-26 19:09:05,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=118368.0, ans=0.125 +2024-08-26 19:09:17,827 INFO [train.py:1114] (2/4) Epoch 9, batch 2300, loss[loss=0.2157, simple_loss=0.2694, pruned_loss=0.05961, ctc_loss=0.1069, over 19500.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2844, pruned_loss=0.05816, ctc_loss=0.1087, over 3860346.86 frames. ], batch size: 49, lr: 1.61e-02, grad_scale: 8.0 +2024-08-26 19:09:23,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=118474.66666666667, ans=0.125 +2024-08-26 19:09:42,037 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.274e+02 1.479e+02 1.669e+02 2.317e+02 3.988e+02, threshold=3.338e+02, percent-clipped=3.0 +2024-08-26 19:09:48,332 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.89 vs. limit=12.0 +2024-08-26 19:09:50,068 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.59 vs. limit=15.0 +2024-08-26 19:10:01,368 INFO [train.py:1114] (2/4) Epoch 9, batch 2350, loss[loss=0.234, simple_loss=0.3016, pruned_loss=0.0611, ctc_loss=0.1103, over 19656.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2843, pruned_loss=0.05805, ctc_loss=0.1083, over 3863287.06 frames. ], batch size: 63, lr: 1.61e-02, grad_scale: 8.0 +2024-08-26 19:10:15,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=118794.66666666667, ans=0.125 +2024-08-26 19:11:06,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=118848.0, ans=0.035 +2024-08-26 19:11:06,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=118848.0, ans=0.125 +2024-08-26 19:11:19,344 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.53 vs. limit=15.0 +2024-08-26 19:11:22,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=118954.66666666667, ans=0.0 +2024-08-26 19:11:23,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=118954.66666666667, ans=0.2 +2024-08-26 19:11:32,738 INFO [train.py:1114] (2/4) Epoch 9, batch 2400, loss[loss=0.2407, simple_loss=0.2991, pruned_loss=0.06577, ctc_loss=0.127, over 19295.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.2872, pruned_loss=0.0592, ctc_loss=0.1103, over 3857793.22 frames. ], batch size: 71, lr: 1.61e-02, grad_scale: 16.0 +2024-08-26 19:11:50,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=119061.33333333333, ans=0.04949747468305833 +2024-08-26 19:12:04,703 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.526e+02 1.714e+02 1.892e+02 3.175e+02, threshold=3.427e+02, percent-clipped=0.0 +2024-08-26 19:12:07,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=119168.0, ans=0.125 +2024-08-26 19:12:09,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=119168.0, ans=0.125 +2024-08-26 19:12:09,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=119168.0, ans=0.05 +2024-08-26 19:12:11,644 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.02 vs. limit=15.0 +2024-08-26 19:12:24,777 INFO [train.py:1114] (2/4) Epoch 9, batch 2450, loss[loss=0.2903, simple_loss=0.3193, pruned_loss=0.09439, ctc_loss=0.181, over 13407.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.2918, pruned_loss=0.06282, ctc_loss=0.1173, over 3728202.43 frames. ], batch size: 141, lr: 1.61e-02, grad_scale: 16.0 +2024-08-26 19:12:37,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=119328.0, ans=0.2 +2024-08-26 19:12:40,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=119328.0, ans=0.0 +2024-08-26 19:12:49,089 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.46 vs. limit=15.0 +2024-08-26 19:13:12,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn1.whiten.whitening_limit, batch_count=119434.66666666667, ans=22.5 +2024-08-26 19:14:15,880 INFO [train.py:1114] (2/4) Epoch 10, batch 0, loss[loss=0.2198, simple_loss=0.2787, pruned_loss=0.05949, ctc_loss=0.105, over 19806.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2787, pruned_loss=0.05949, ctc_loss=0.105, over 19806.00 frames. ], batch size: 49, lr: 1.53e-02, grad_scale: 16.0 +2024-08-26 19:14:15,881 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-26 19:14:48,066 INFO [train.py:1146] (2/4) Epoch 10, validation: loss=0.1896, simple_loss=0.2813, pruned_loss=0.03622, ctc_loss=0.0637, over 944034.00 frames. +2024-08-26 19:14:48,067 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12825MB +2024-08-26 19:14:52,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=119482.66666666667, ans=0.125 +2024-08-26 19:14:58,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=119536.0, ans=0.125 +2024-08-26 19:15:03,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=119536.0, ans=0.125 +2024-08-26 19:15:25,084 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.696e+02 1.867e+02 2.057e+02 3.331e+02, threshold=3.733e+02, percent-clipped=0.0 +2024-08-26 19:15:25,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=119696.0, ans=0.125 +2024-08-26 19:15:26,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=119696.0, ans=0.125 +2024-08-26 19:15:34,240 INFO [train.py:1114] (2/4) Epoch 10, batch 50, loss[loss=0.208, simple_loss=0.2611, pruned_loss=0.05587, ctc_loss=0.1082, over 19753.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.289, pruned_loss=0.05988, ctc_loss=0.1138, over 844913.23 frames. ], batch size: 47, lr: 1.52e-02, grad_scale: 16.0 +2024-08-26 19:15:43,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=119802.66666666667, ans=0.1 +2024-08-26 19:15:49,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=119802.66666666667, ans=0.2 +2024-08-26 19:15:52,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=119856.0, ans=0.0 +2024-08-26 19:15:54,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=119856.0, ans=0.2 +2024-08-26 19:16:03,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=119909.33333333333, ans=0.1 +2024-08-26 19:16:03,622 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.96 vs. limit=15.0 +2024-08-26 19:16:06,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=119909.33333333333, ans=10.0 +2024-08-26 19:16:20,477 INFO [train.py:1114] (2/4) Epoch 10, batch 100, loss[loss=0.2132, simple_loss=0.2707, pruned_loss=0.05689, ctc_loss=0.1049, over 19728.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.2894, pruned_loss=0.05918, ctc_loss=0.1119, over 1498754.48 frames. ], batch size: 51, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:16:23,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=120016.0, ans=0.0 +2024-08-26 19:16:40,106 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.72 vs. limit=15.0 +2024-08-26 19:16:56,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=120176.0, ans=0.5 +2024-08-26 19:17:03,446 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.471e+02 1.633e+02 1.792e+02 2.780e+02, threshold=3.265e+02, percent-clipped=0.0 +2024-08-26 19:17:11,609 INFO [train.py:1114] (2/4) Epoch 10, batch 150, loss[loss=0.2027, simple_loss=0.2582, pruned_loss=0.05286, ctc_loss=0.1035, over 19717.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2871, pruned_loss=0.05849, ctc_loss=0.1102, over 2027457.93 frames. ], batch size: 47, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:17:11,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=120282.66666666667, ans=0.0 +2024-08-26 19:17:13,843 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.59 vs. limit=22.5 +2024-08-26 19:17:19,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=120282.66666666667, ans=0.125 +2024-08-26 19:17:22,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=120336.0, ans=0.125 +2024-08-26 19:17:24,849 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.58 vs. limit=22.5 +2024-08-26 19:17:47,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.63 vs. limit=15.0 +2024-08-26 19:17:51,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120442.66666666667, ans=0.1 +2024-08-26 19:18:07,134 INFO [train.py:1114] (2/4) Epoch 10, batch 200, loss[loss=0.2489, simple_loss=0.3067, pruned_loss=0.06902, ctc_loss=0.1328, over 18194.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2848, pruned_loss=0.05755, ctc_loss=0.1084, over 2435318.47 frames. ], batch size: 85, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:18:07,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=120549.33333333333, ans=0.5 +2024-08-26 19:18:38,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=120602.66666666667, ans=0.125 +2024-08-26 19:18:48,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=120656.0, ans=0.125 +2024-08-26 19:18:49,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=120656.0, ans=0.0 +2024-08-26 19:18:50,276 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.15 vs. limit=12.0 +2024-08-26 19:19:04,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=120709.33333333333, ans=0.2 +2024-08-26 19:19:11,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=120762.66666666667, ans=0.125 +2024-08-26 19:19:12,220 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.205e+02 1.459e+02 1.596e+02 1.815e+02 3.041e+02, threshold=3.193e+02, percent-clipped=0.0 +2024-08-26 19:19:44,089 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.83 vs. limit=15.0 +2024-08-26 19:19:48,326 INFO [train.py:1114] (2/4) Epoch 10, batch 250, loss[loss=0.2215, simple_loss=0.2935, pruned_loss=0.05396, ctc_loss=0.104, over 19422.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2839, pruned_loss=0.05692, ctc_loss=0.107, over 2755755.23 frames. ], batch size: 67, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:19:48,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=120816.0, ans=0.2 +2024-08-26 19:19:51,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=120816.0, ans=0.2 +2024-08-26 19:19:53,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=120816.0, ans=0.125 +2024-08-26 19:19:55,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=120816.0, ans=0.0 +2024-08-26 19:20:09,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=120869.33333333333, ans=0.125 +2024-08-26 19:20:16,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=120922.66666666667, ans=0.125 +2024-08-26 19:20:36,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=121029.33333333333, ans=0.025 +2024-08-26 19:20:42,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=121029.33333333333, ans=0.0 +2024-08-26 19:20:45,093 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.86 vs. limit=15.0 +2024-08-26 19:20:45,515 INFO [train.py:1114] (2/4) Epoch 10, batch 300, loss[loss=0.2541, simple_loss=0.3084, pruned_loss=0.07327, ctc_loss=0.1334, over 19522.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2833, pruned_loss=0.05684, ctc_loss=0.1063, over 2999790.65 frames. ], batch size: 61, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:20:56,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=121136.0, ans=0.0 +2024-08-26 19:21:07,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=121189.33333333333, ans=0.0 +2024-08-26 19:21:08,844 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.14 vs. limit=15.0 +2024-08-26 19:21:17,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=121242.66666666667, ans=0.125 +2024-08-26 19:21:19,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=121242.66666666667, ans=0.125 +2024-08-26 19:21:25,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121242.66666666667, ans=0.1 +2024-08-26 19:21:26,761 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.46 vs. limit=15.0 +2024-08-26 19:21:29,985 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.480e+02 1.641e+02 1.981e+02 3.456e+02, threshold=3.281e+02, percent-clipped=2.0 +2024-08-26 19:21:37,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=121349.33333333333, ans=0.0 +2024-08-26 19:21:38,275 INFO [train.py:1114] (2/4) Epoch 10, batch 350, loss[loss=0.1839, simple_loss=0.251, pruned_loss=0.04248, ctc_loss=0.0797, over 19752.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2838, pruned_loss=0.05712, ctc_loss=0.1065, over 3189245.73 frames. ], batch size: 48, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:21:47,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.21 vs. limit=6.0 +2024-08-26 19:21:56,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=121456.0, ans=0.05 +2024-08-26 19:22:03,825 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.56 vs. limit=22.5 +2024-08-26 19:22:16,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=121562.66666666667, ans=0.0 +2024-08-26 19:22:24,850 INFO [train.py:1114] (2/4) Epoch 10, batch 400, loss[loss=0.2114, simple_loss=0.2912, pruned_loss=0.04714, ctc_loss=0.09359, over 19490.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2836, pruned_loss=0.05707, ctc_loss=0.1064, over 3340173.43 frames. ], batch size: 54, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:22:34,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=121669.33333333333, ans=0.0 +2024-08-26 19:22:39,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=121669.33333333333, ans=0.125 +2024-08-26 19:22:58,802 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:23:03,837 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.79 vs. limit=15.0 +2024-08-26 19:23:15,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=121776.0, ans=0.125 +2024-08-26 19:23:18,033 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.268e+02 1.471e+02 1.735e+02 2.020e+02 3.245e+02, threshold=3.470e+02, percent-clipped=0.0 +2024-08-26 19:23:26,373 INFO [train.py:1114] (2/4) Epoch 10, batch 450, loss[loss=0.2173, simple_loss=0.2922, pruned_loss=0.05149, ctc_loss=0.09845, over 19622.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2841, pruned_loss=0.05718, ctc_loss=0.1067, over 3449928.55 frames. ], batch size: 55, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:23:27,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=121882.66666666667, ans=0.125 +2024-08-26 19:23:53,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=121989.33333333333, ans=0.125 +2024-08-26 19:23:54,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=121989.33333333333, ans=0.07 +2024-08-26 19:23:55,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=122042.66666666667, ans=0.125 +2024-08-26 19:24:19,331 INFO [train.py:1114] (2/4) Epoch 10, batch 500, loss[loss=0.2291, simple_loss=0.298, pruned_loss=0.05801, ctc_loss=0.1104, over 19669.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.283, pruned_loss=0.05654, ctc_loss=0.1055, over 3545733.99 frames. ], batch size: 63, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:24:40,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=122202.66666666667, ans=0.025 +2024-08-26 19:24:41,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=122202.66666666667, ans=0.5 +2024-08-26 19:24:49,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122256.0, ans=0.1 +2024-08-26 19:25:07,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=122309.33333333333, ans=0.2 +2024-08-26 19:25:11,344 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.449e+02 1.637e+02 1.959e+02 3.375e+02, threshold=3.275e+02, percent-clipped=0.0 +2024-08-26 19:25:14,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=122362.66666666667, ans=10.0 +2024-08-26 19:25:17,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=122362.66666666667, ans=0.125 +2024-08-26 19:25:19,718 INFO [train.py:1114] (2/4) Epoch 10, batch 550, loss[loss=0.2344, simple_loss=0.2988, pruned_loss=0.06153, ctc_loss=0.1177, over 19210.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.283, pruned_loss=0.05654, ctc_loss=0.1056, over 3607666.55 frames. ], batch size: 71, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:25:23,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=122416.0, ans=0.0 +2024-08-26 19:25:41,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=122522.66666666667, ans=0.125 +2024-08-26 19:25:51,219 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.10 vs. limit=15.0 +2024-08-26 19:25:53,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122576.0, ans=0.1 +2024-08-26 19:26:10,276 INFO [train.py:1114] (2/4) Epoch 10, batch 600, loss[loss=0.2367, simple_loss=0.3023, pruned_loss=0.06227, ctc_loss=0.1163, over 19421.00 frames. ], tot_loss[loss=0.219, simple_loss=0.283, pruned_loss=0.05644, ctc_loss=0.1054, over 3665635.06 frames. ], batch size: 67, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:26:13,386 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.52 vs. limit=15.0 +2024-08-26 19:26:49,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=122896.0, ans=0.125 +2024-08-26 19:26:50,246 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.480e+02 1.661e+02 1.846e+02 3.271e+02, threshold=3.322e+02, percent-clipped=0.0 +2024-08-26 19:26:55,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=122896.0, ans=0.125 +2024-08-26 19:26:58,398 INFO [train.py:1114] (2/4) Epoch 10, batch 650, loss[loss=0.2118, simple_loss=0.2795, pruned_loss=0.05251, ctc_loss=0.09788, over 19758.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2824, pruned_loss=0.05616, ctc_loss=0.1049, over 3715687.27 frames. ], batch size: 54, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:27:09,796 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.44 vs. limit=15.0 +2024-08-26 19:27:18,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=123056.0, ans=0.125 +2024-08-26 19:27:51,538 INFO [train.py:1114] (2/4) Epoch 10, batch 700, loss[loss=0.2017, simple_loss=0.266, pruned_loss=0.04966, ctc_loss=0.09507, over 19725.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.283, pruned_loss=0.05634, ctc_loss=0.1052, over 3748664.78 frames. ], batch size: 51, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:27:58,270 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.38 vs. limit=15.0 +2024-08-26 19:27:59,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=123269.33333333333, ans=0.09899494936611666 +2024-08-26 19:28:13,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=123322.66666666667, ans=0.0 +2024-08-26 19:28:17,880 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.62 vs. limit=15.0 +2024-08-26 19:28:23,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=123376.0, ans=0.0 +2024-08-26 19:28:29,132 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.526e+02 1.912e+02 2.394e+02 4.336e+02, threshold=3.825e+02, percent-clipped=8.0 +2024-08-26 19:28:33,148 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.09 vs. limit=15.0 +2024-08-26 19:28:38,773 INFO [train.py:1114] (2/4) Epoch 10, batch 750, loss[loss=0.2116, simple_loss=0.2837, pruned_loss=0.05065, ctc_loss=0.09556, over 19517.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2824, pruned_loss=0.05604, ctc_loss=0.1047, over 3775255.51 frames. ], batch size: 54, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:29:02,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=123589.33333333333, ans=0.125 +2024-08-26 19:29:14,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=123642.66666666667, ans=0.125 +2024-08-26 19:29:16,927 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.61 vs. limit=15.0 +2024-08-26 19:29:17,769 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.71 vs. limit=15.0 +2024-08-26 19:29:27,334 INFO [train.py:1114] (2/4) Epoch 10, batch 800, loss[loss=0.1958, simple_loss=0.2501, pruned_loss=0.05128, ctc_loss=0.09754, over 19798.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.282, pruned_loss=0.05582, ctc_loss=0.1045, over 3797522.83 frames. ], batch size: 49, lr: 1.50e-02, grad_scale: 32.0 +2024-08-26 19:29:40,739 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.10 vs. limit=15.0 +2024-08-26 19:29:43,373 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.18 vs. limit=12.0 +2024-08-26 19:29:49,204 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.62 vs. limit=10.0 +2024-08-26 19:29:59,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=123909.33333333333, ans=0.025 +2024-08-26 19:29:59,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=123909.33333333333, ans=0.125 +2024-08-26 19:30:03,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=123909.33333333333, ans=0.0 +2024-08-26 19:30:07,516 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.505e+02 1.745e+02 2.038e+02 4.368e+02, threshold=3.490e+02, percent-clipped=1.0 +2024-08-26 19:30:13,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=123962.66666666667, ans=0.125 +2024-08-26 19:30:17,639 INFO [train.py:1114] (2/4) Epoch 10, batch 850, loss[loss=0.2291, simple_loss=0.2964, pruned_loss=0.05863, ctc_loss=0.1114, over 19643.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2822, pruned_loss=0.05596, ctc_loss=0.1045, over 3814987.05 frames. ], batch size: 59, lr: 1.50e-02, grad_scale: 32.0 +2024-08-26 19:30:22,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=124016.0, ans=0.0 +2024-08-26 19:30:25,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=124016.0, ans=0.2 +2024-08-26 19:30:37,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=124122.66666666667, ans=0.125 +2024-08-26 19:30:39,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=124122.66666666667, ans=0.0 +2024-08-26 19:30:51,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=124176.0, ans=0.125 +2024-08-26 19:31:14,605 INFO [train.py:1114] (2/4) Epoch 10, batch 900, loss[loss=0.1967, simple_loss=0.2552, pruned_loss=0.05002, ctc_loss=0.09535, over 19427.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2819, pruned_loss=0.05612, ctc_loss=0.1048, over 3818467.02 frames. ], batch size: 48, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:32:10,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=124336.0, ans=0.0 +2024-08-26 19:32:25,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=124442.66666666667, ans=0.015 +2024-08-26 19:32:35,082 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.199e+02 1.525e+02 1.733e+02 2.036e+02 4.140e+02, threshold=3.466e+02, percent-clipped=3.0 +2024-08-26 19:32:35,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=124496.0, ans=0.125 +2024-08-26 19:32:42,445 INFO [train.py:1114] (2/4) Epoch 10, batch 950, loss[loss=0.2041, simple_loss=0.2723, pruned_loss=0.04921, ctc_loss=0.09364, over 19486.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2827, pruned_loss=0.05639, ctc_loss=0.1054, over 3819321.92 frames. ], batch size: 49, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:32:46,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=124549.33333333333, ans=0.025 +2024-08-26 19:32:52,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124549.33333333333, ans=0.1 +2024-08-26 19:33:24,265 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:33:24,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=124762.66666666667, ans=0.0 +2024-08-26 19:33:36,592 INFO [train.py:1114] (2/4) Epoch 10, batch 1000, loss[loss=0.1975, simple_loss=0.2734, pruned_loss=0.04424, ctc_loss=0.08312, over 19840.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2835, pruned_loss=0.05667, ctc_loss=0.1057, over 3814320.55 frames. ], batch size: 52, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:33:39,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=124816.0, ans=0.125 +2024-08-26 19:33:47,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=124869.33333333333, ans=0.2 +2024-08-26 19:33:52,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=124869.33333333333, ans=0.2 +2024-08-26 19:34:02,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=124922.66666666667, ans=0.025 +2024-08-26 19:34:16,844 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.79 vs. limit=22.5 +2024-08-26 19:34:19,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=125029.33333333333, ans=0.0 +2024-08-26 19:34:19,950 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.214e+02 1.433e+02 1.580e+02 1.832e+02 3.141e+02, threshold=3.159e+02, percent-clipped=0.0 +2024-08-26 19:34:27,365 INFO [train.py:1114] (2/4) Epoch 10, batch 1050, loss[loss=0.2336, simple_loss=0.3083, pruned_loss=0.05674, ctc_loss=0.1136, over 19855.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2825, pruned_loss=0.05615, ctc_loss=0.1049, over 3821281.08 frames. ], batch size: 57, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:34:54,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=125082.66666666667, ans=0.125 +2024-08-26 19:34:58,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=125136.0, ans=0.025 +2024-08-26 19:35:02,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=125136.0, ans=0.125 +2024-08-26 19:35:30,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=125296.0, ans=0.0 +2024-08-26 19:35:36,352 INFO [train.py:1114] (2/4) Epoch 10, batch 1100, loss[loss=0.2225, simple_loss=0.2878, pruned_loss=0.0569, ctc_loss=0.1083, over 19592.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2822, pruned_loss=0.056, ctc_loss=0.1047, over 3828470.94 frames. ], batch size: 52, lr: 1.49e-02, grad_scale: 8.0 +2024-08-26 19:35:36,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=125349.33333333333, ans=0.1 +2024-08-26 19:35:50,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=125402.66666666667, ans=0.125 +2024-08-26 19:35:57,925 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.68 vs. limit=10.0 +2024-08-26 19:36:03,607 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.70 vs. limit=15.0 +2024-08-26 19:36:18,878 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.239e+02 1.433e+02 1.605e+02 1.841e+02 2.779e+02, threshold=3.211e+02, percent-clipped=0.0 +2024-08-26 19:36:24,625 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:36:25,423 INFO [train.py:1114] (2/4) Epoch 10, batch 1150, loss[loss=0.2011, simple_loss=0.2714, pruned_loss=0.04802, ctc_loss=0.08722, over 19580.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2823, pruned_loss=0.05613, ctc_loss=0.1049, over 3829001.93 frames. ], batch size: 52, lr: 1.49e-02, grad_scale: 8.0 +2024-08-26 19:36:34,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=125669.33333333333, ans=0.0 +2024-08-26 19:36:47,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=125722.66666666667, ans=0.125 +2024-08-26 19:36:54,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125722.66666666667, ans=0.1 +2024-08-26 19:37:08,641 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.86 vs. limit=22.5 +2024-08-26 19:37:16,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=125829.33333333333, ans=0.125 +2024-08-26 19:37:17,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=125882.66666666667, ans=0.125 +2024-08-26 19:37:17,640 INFO [train.py:1114] (2/4) Epoch 10, batch 1200, loss[loss=0.2315, simple_loss=0.2929, pruned_loss=0.06218, ctc_loss=0.1146, over 19841.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2828, pruned_loss=0.05616, ctc_loss=0.1051, over 3825625.47 frames. ], batch size: 57, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:37:36,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=125989.33333333333, ans=0.125 +2024-08-26 19:37:40,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=125989.33333333333, ans=0.0 +2024-08-26 19:37:57,386 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.466e+02 1.608e+02 1.824e+02 2.979e+02, threshold=3.216e+02, percent-clipped=0.0 +2024-08-26 19:38:02,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=126096.0, ans=0.2 +2024-08-26 19:38:03,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=126149.33333333333, ans=0.125 +2024-08-26 19:38:04,046 INFO [train.py:1114] (2/4) Epoch 10, batch 1250, loss[loss=0.2259, simple_loss=0.2928, pruned_loss=0.05699, ctc_loss=0.1125, over 19509.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2831, pruned_loss=0.056, ctc_loss=0.1049, over 3843280.40 frames. ], batch size: 61, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:38:04,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=126149.33333333333, ans=0.125 +2024-08-26 19:38:04,307 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:39:40,679 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.66 vs. limit=22.5 +2024-08-26 19:39:40,806 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.19 vs. limit=15.0 +2024-08-26 19:40:04,477 INFO [train.py:1114] (2/4) Epoch 10, batch 1300, loss[loss=0.2279, simple_loss=0.2875, pruned_loss=0.06082, ctc_loss=0.1169, over 18891.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2825, pruned_loss=0.05567, ctc_loss=0.1042, over 3846876.52 frames. ], batch size: 76, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:40:09,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=126416.0, ans=0.125 +2024-08-26 19:40:29,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=126522.66666666667, ans=0.125 +2024-08-26 19:40:30,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=126522.66666666667, ans=0.125 +2024-08-26 19:40:31,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=126522.66666666667, ans=0.0 +2024-08-26 19:40:35,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=126522.66666666667, ans=0.125 +2024-08-26 19:40:52,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=126629.33333333333, ans=0.0 +2024-08-26 19:40:54,259 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.480e+02 1.716e+02 1.981e+02 3.061e+02, threshold=3.432e+02, percent-clipped=0.0 +2024-08-26 19:41:00,868 INFO [train.py:1114] (2/4) Epoch 10, batch 1350, loss[loss=0.2022, simple_loss=0.2751, pruned_loss=0.04745, ctc_loss=0.0862, over 19765.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2819, pruned_loss=0.05536, ctc_loss=0.1035, over 3857709.68 frames. ], batch size: 54, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:41:22,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=126736.0, ans=0.0 +2024-08-26 19:41:32,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=126842.66666666667, ans=0.1 +2024-08-26 19:41:44,249 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.00 vs. limit=6.0 +2024-08-26 19:41:46,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=126896.0, ans=0.125 +2024-08-26 19:41:52,401 INFO [train.py:1114] (2/4) Epoch 10, batch 1400, loss[loss=0.1991, simple_loss=0.2548, pruned_loss=0.05284, ctc_loss=0.09432, over 19682.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2816, pruned_loss=0.0552, ctc_loss=0.1031, over 3865118.71 frames. ], batch size: 46, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:42:01,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127002.66666666667, ans=0.1 +2024-08-26 19:42:19,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=127002.66666666667, ans=0.125 +2024-08-26 19:42:32,000 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.48 vs. limit=15.0 +2024-08-26 19:42:33,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=127109.33333333333, ans=0.0 +2024-08-26 19:42:39,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=127162.66666666667, ans=0.2 +2024-08-26 19:42:43,188 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.452e+02 1.585e+02 1.952e+02 4.788e+02, threshold=3.170e+02, percent-clipped=2.0 +2024-08-26 19:42:46,569 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.60 vs. limit=6.0 +2024-08-26 19:42:49,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=127216.0, ans=0.125 +2024-08-26 19:42:49,762 INFO [train.py:1114] (2/4) Epoch 10, batch 1450, loss[loss=0.2517, simple_loss=0.309, pruned_loss=0.07092, ctc_loss=0.1314, over 19658.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2823, pruned_loss=0.05563, ctc_loss=0.1038, over 3863069.26 frames. ], batch size: 63, lr: 1.48e-02, grad_scale: 16.0 +2024-08-26 19:42:52,285 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.91 vs. limit=10.0 +2024-08-26 19:42:54,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=127216.0, ans=0.2 +2024-08-26 19:42:55,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=127216.0, ans=0.0 +2024-08-26 19:43:01,337 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.91 vs. limit=15.0 +2024-08-26 19:43:11,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=127322.66666666667, ans=0.0 +2024-08-26 19:43:24,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=127322.66666666667, ans=0.125 +2024-08-26 19:43:29,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=127376.0, ans=0.125 +2024-08-26 19:43:30,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=127376.0, ans=0.07 +2024-08-26 19:43:40,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=127429.33333333333, ans=0.125 +2024-08-26 19:43:48,225 INFO [train.py:1114] (2/4) Epoch 10, batch 1500, loss[loss=0.2223, simple_loss=0.2874, pruned_loss=0.05611, ctc_loss=0.1124, over 19582.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2828, pruned_loss=0.05576, ctc_loss=0.104, over 3862044.66 frames. ], batch size: 57, lr: 1.48e-02, grad_scale: 16.0 +2024-08-26 19:43:49,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=127482.66666666667, ans=0.0 +2024-08-26 19:43:53,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=127482.66666666667, ans=0.125 +2024-08-26 19:44:05,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=127536.0, ans=0.07 +2024-08-26 19:44:06,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=127536.0, ans=0.0 +2024-08-26 19:44:10,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127589.33333333333, ans=0.1 +2024-08-26 19:44:29,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=127642.66666666667, ans=0.0 +2024-08-26 19:44:34,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=127696.0, ans=0.0 +2024-08-26 19:44:37,661 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.149e+02 1.427e+02 1.587e+02 1.794e+02 3.285e+02, threshold=3.174e+02, percent-clipped=1.0 +2024-08-26 19:44:38,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=127696.0, ans=0.5 +2024-08-26 19:44:52,497 INFO [train.py:1114] (2/4) Epoch 10, batch 1550, loss[loss=0.225, simple_loss=0.2905, pruned_loss=0.05822, ctc_loss=0.1075, over 19608.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.283, pruned_loss=0.05621, ctc_loss=0.105, over 3845947.00 frames. ], batch size: 60, lr: 1.48e-02, grad_scale: 16.0 +2024-08-26 19:45:19,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=127909.33333333333, ans=0.125 +2024-08-26 19:45:22,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=127909.33333333333, ans=15.0 +2024-08-26 19:45:28,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=127909.33333333333, ans=0.125 +2024-08-26 19:45:31,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=127962.66666666667, ans=0.125 +2024-08-26 19:45:43,643 INFO [train.py:1114] (2/4) Epoch 10, batch 1600, loss[loss=0.2241, simple_loss=0.2945, pruned_loss=0.05524, ctc_loss=0.108, over 19828.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2825, pruned_loss=0.05614, ctc_loss=0.1049, over 3834494.88 frames. ], batch size: 57, lr: 1.48e-02, grad_scale: 32.0 +2024-08-26 19:45:51,804 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.05 vs. limit=15.0 +2024-08-26 19:45:58,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=128069.33333333333, ans=0.2 +2024-08-26 19:46:00,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128069.33333333333, ans=0.1 +2024-08-26 19:46:14,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=128176.0, ans=0.0 +2024-08-26 19:46:26,519 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.210e+02 1.460e+02 1.671e+02 2.068e+02 2.984e+02, threshold=3.342e+02, percent-clipped=0.0 +2024-08-26 19:46:33,080 INFO [train.py:1114] (2/4) Epoch 10, batch 1650, loss[loss=0.2117, simple_loss=0.2802, pruned_loss=0.05218, ctc_loss=0.09687, over 19667.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2821, pruned_loss=0.05593, ctc_loss=0.1044, over 3831066.69 frames. ], batch size: 59, lr: 1.48e-02, grad_scale: 32.0 +2024-08-26 19:46:38,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=128282.66666666667, ans=0.0 +2024-08-26 19:46:46,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=128336.0, ans=0.2 +2024-08-26 19:46:57,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128389.33333333333, ans=0.1 +2024-08-26 19:46:58,313 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-08-26 19:46:58,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=128389.33333333333, ans=0.2 +2024-08-26 19:47:02,550 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:47:11,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128442.66666666667, ans=0.1 +2024-08-26 19:47:28,670 INFO [train.py:1114] (2/4) Epoch 10, batch 1700, loss[loss=0.189, simple_loss=0.2498, pruned_loss=0.04705, ctc_loss=0.08518, over 19668.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2814, pruned_loss=0.05518, ctc_loss=0.1034, over 3845478.45 frames. ], batch size: 46, lr: 1.48e-02, grad_scale: 32.0 +2024-08-26 19:47:29,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=128549.33333333333, ans=0.0 +2024-08-26 19:47:43,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=128602.66666666667, ans=0.0 +2024-08-26 19:47:45,163 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.41 vs. limit=12.0 +2024-08-26 19:47:56,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128709.33333333333, ans=0.1 +2024-08-26 19:48:11,123 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:48:18,887 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.236e+02 1.440e+02 1.568e+02 1.897e+02 2.765e+02, threshold=3.136e+02, percent-clipped=0.0 +2024-08-26 19:48:23,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=128762.66666666667, ans=0.125 +2024-08-26 19:48:25,123 INFO [train.py:1114] (2/4) Epoch 10, batch 1750, loss[loss=0.1864, simple_loss=0.252, pruned_loss=0.04383, ctc_loss=0.08287, over 19644.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2808, pruned_loss=0.05501, ctc_loss=0.1028, over 3850000.57 frames. ], batch size: 45, lr: 1.48e-02, grad_scale: 32.0 +2024-08-26 19:48:33,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=128869.33333333333, ans=0.0 +2024-08-26 19:48:41,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=128922.66666666667, ans=0.0 +2024-08-26 19:48:47,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=128922.66666666667, ans=0.125 +2024-08-26 19:49:08,985 INFO [train.py:1114] (2/4) Epoch 10, batch 1800, loss[loss=0.205, simple_loss=0.2797, pruned_loss=0.0477, ctc_loss=0.08736, over 19621.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2812, pruned_loss=0.05528, ctc_loss=0.1032, over 3852826.67 frames. ], batch size: 55, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:49:17,628 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.56 vs. limit=15.0 +2024-08-26 19:49:37,682 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.93 vs. limit=15.0 +2024-08-26 19:49:45,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=129296.0, ans=0.0 +2024-08-26 19:49:49,352 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.466e+02 1.715e+02 2.130e+02 3.505e+02, threshold=3.430e+02, percent-clipped=4.0 +2024-08-26 19:49:55,607 INFO [train.py:1114] (2/4) Epoch 10, batch 1850, loss[loss=0.2512, simple_loss=0.3101, pruned_loss=0.06978, ctc_loss=0.1316, over 19576.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2811, pruned_loss=0.05523, ctc_loss=0.1031, over 3855910.98 frames. ], batch size: 57, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:49:55,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=129349.33333333333, ans=0.125 +2024-08-26 19:50:08,586 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.73 vs. limit=15.0 +2024-08-26 19:50:14,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=129456.0, ans=0.025 +2024-08-26 19:50:30,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=129509.33333333333, ans=0.125 +2024-08-26 19:50:50,325 INFO [train.py:1114] (2/4) Epoch 10, batch 1900, loss[loss=0.2032, simple_loss=0.2781, pruned_loss=0.04619, ctc_loss=0.08958, over 19650.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2817, pruned_loss=0.0553, ctc_loss=0.103, over 3860739.14 frames. ], batch size: 59, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:50:50,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=129616.0, ans=0.125 +2024-08-26 19:50:51,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=129616.0, ans=0.125 +2024-08-26 19:51:00,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=129669.33333333333, ans=0.2 +2024-08-26 19:51:04,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=129669.33333333333, ans=0.0 +2024-08-26 19:51:21,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=129776.0, ans=0.1 +2024-08-26 19:51:25,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=129829.33333333333, ans=0.2 +2024-08-26 19:51:27,692 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.498e+02 1.655e+02 1.944e+02 4.101e+02, threshold=3.311e+02, percent-clipped=1.0 +2024-08-26 19:51:33,748 INFO [train.py:1114] (2/4) Epoch 10, batch 1950, loss[loss=0.2024, simple_loss=0.2703, pruned_loss=0.04846, ctc_loss=0.09388, over 19575.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2826, pruned_loss=0.05551, ctc_loss=0.1033, over 3869785.17 frames. ], batch size: 52, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:51:35,249 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.11 vs. limit=12.0 +2024-08-26 19:51:45,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=129936.0, ans=0.0 +2024-08-26 19:51:46,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=129936.0, ans=0.125 +2024-08-26 19:51:53,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=129989.33333333333, ans=0.2 +2024-08-26 19:52:00,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=130042.66666666667, ans=0.125 +2024-08-26 19:52:33,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=130042.66666666667, ans=0.125 +2024-08-26 19:52:51,606 INFO [train.py:1114] (2/4) Epoch 10, batch 2000, loss[loss=0.2037, simple_loss=0.2573, pruned_loss=0.05449, ctc_loss=0.1029, over 19625.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2838, pruned_loss=0.05627, ctc_loss=0.1047, over 3855609.89 frames. ], batch size: 45, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:53:02,636 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.14 vs. limit=15.0 +2024-08-26 19:53:14,891 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.11 vs. limit=15.0 +2024-08-26 19:53:26,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=130362.66666666667, ans=0.125 +2024-08-26 19:53:27,858 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.46 vs. limit=15.0 +2024-08-26 19:53:29,079 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.467e+02 1.617e+02 1.850e+02 3.299e+02, threshold=3.233e+02, percent-clipped=0.0 +2024-08-26 19:53:35,210 INFO [train.py:1114] (2/4) Epoch 10, batch 2050, loss[loss=0.2133, simple_loss=0.2675, pruned_loss=0.05886, ctc_loss=0.1034, over 19751.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2825, pruned_loss=0.05605, ctc_loss=0.1042, over 3851886.73 frames. ], batch size: 47, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:53:53,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=130522.66666666667, ans=0.0 +2024-08-26 19:53:56,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=130522.66666666667, ans=0.125 +2024-08-26 19:54:04,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=130576.0, ans=0.0 +2024-08-26 19:54:18,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=130682.66666666667, ans=0.0 +2024-08-26 19:54:18,663 INFO [train.py:1114] (2/4) Epoch 10, batch 2100, loss[loss=0.1995, simple_loss=0.2683, pruned_loss=0.04704, ctc_loss=0.09182, over 19774.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2814, pruned_loss=0.05532, ctc_loss=0.1029, over 3858696.97 frames. ], batch size: 54, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:54:18,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=130682.66666666667, ans=0.2 +2024-08-26 19:54:27,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=130736.0, ans=0.125 +2024-08-26 19:54:27,777 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.68 vs. limit=15.0 +2024-08-26 19:54:30,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=130736.0, ans=0.0 +2024-08-26 19:54:35,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=130789.33333333333, ans=0.125 +2024-08-26 19:54:35,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=130789.33333333333, ans=0.025 +2024-08-26 19:54:49,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=130842.66666666667, ans=0.125 +2024-08-26 19:54:52,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=130842.66666666667, ans=0.0 +2024-08-26 19:54:54,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=130896.0, ans=0.125 +2024-08-26 19:54:56,945 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.237e+02 1.404e+02 1.614e+02 1.979e+02 3.349e+02, threshold=3.228e+02, percent-clipped=1.0 +2024-08-26 19:55:03,183 INFO [train.py:1114] (2/4) Epoch 10, batch 2150, loss[loss=0.2078, simple_loss=0.2728, pruned_loss=0.05115, ctc_loss=0.1013, over 19872.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2807, pruned_loss=0.05497, ctc_loss=0.1022, over 3869140.22 frames. ], batch size: 52, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:55:05,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=130949.33333333333, ans=0.125 +2024-08-26 19:55:08,741 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.16 vs. limit=22.5 +2024-08-26 19:55:08,845 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.77 vs. limit=6.0 +2024-08-26 19:55:14,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=131002.66666666667, ans=0.0 +2024-08-26 19:55:15,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=131002.66666666667, ans=0.125 +2024-08-26 19:55:16,273 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:55:26,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=131056.0, ans=0.0 +2024-08-26 19:55:32,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=131109.33333333334, ans=0.125 +2024-08-26 19:55:35,640 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.19 vs. limit=22.5 +2024-08-26 19:55:46,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=131162.66666666666, ans=0.125 +2024-08-26 19:55:50,185 INFO [train.py:1114] (2/4) Epoch 10, batch 2200, loss[loss=0.2371, simple_loss=0.3045, pruned_loss=0.06135, ctc_loss=0.1176, over 19591.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.281, pruned_loss=0.05506, ctc_loss=0.1024, over 3867343.91 frames. ], batch size: 57, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:56:11,055 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.90 vs. limit=15.0 +2024-08-26 19:56:22,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=131322.66666666666, ans=0.125 +2024-08-26 19:56:26,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=131376.0, ans=0.0 +2024-08-26 19:56:28,739 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.75 vs. limit=15.0 +2024-08-26 19:56:34,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=131376.0, ans=0.125 +2024-08-26 19:56:35,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131429.33333333334, ans=0.1 +2024-08-26 19:56:35,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=131429.33333333334, ans=0.05 +2024-08-26 19:56:38,534 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.198e+02 1.505e+02 1.694e+02 1.989e+02 3.015e+02, threshold=3.388e+02, percent-clipped=0.0 +2024-08-26 19:56:41,681 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.26 vs. limit=10.0 +2024-08-26 19:56:43,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=131429.33333333334, ans=0.1 +2024-08-26 19:56:44,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=131482.66666666666, ans=0.2 +2024-08-26 19:56:44,633 INFO [train.py:1114] (2/4) Epoch 10, batch 2250, loss[loss=0.2154, simple_loss=0.29, pruned_loss=0.05063, ctc_loss=0.0991, over 19614.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2808, pruned_loss=0.05476, ctc_loss=0.102, over 3867782.49 frames. ], batch size: 55, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:56:50,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131482.66666666666, ans=0.1 +2024-08-26 19:56:51,228 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.37 vs. limit=15.0 +2024-08-26 19:56:55,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=131536.0, ans=0.2 +2024-08-26 19:57:25,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=131696.0, ans=0.125 +2024-08-26 19:57:27,772 INFO [train.py:1114] (2/4) Epoch 10, batch 2300, loss[loss=0.19, simple_loss=0.2595, pruned_loss=0.04432, ctc_loss=0.07978, over 19489.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2803, pruned_loss=0.05496, ctc_loss=0.1023, over 3861314.54 frames. ], batch size: 49, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:57:53,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=131909.33333333334, ans=0.2 +2024-08-26 19:57:56,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=131909.33333333334, ans=0.2 +2024-08-26 19:57:56,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=131909.33333333334, ans=0.125 +2024-08-26 19:58:02,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=131962.66666666666, ans=0.125 +2024-08-26 19:58:05,767 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.213e+02 1.499e+02 1.709e+02 2.092e+02 3.241e+02, threshold=3.418e+02, percent-clipped=0.0 +2024-08-26 19:58:43,744 INFO [train.py:1114] (2/4) Epoch 10, batch 2350, loss[loss=0.2253, simple_loss=0.2994, pruned_loss=0.05491, ctc_loss=0.1036, over 19675.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2805, pruned_loss=0.05519, ctc_loss=0.1028, over 3863755.80 frames. ], batch size: 63, lr: 1.46e-02, grad_scale: 16.0 +2024-08-26 19:58:46,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132016.0, ans=0.1 +2024-08-26 19:58:46,879 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.20 vs. limit=15.0 +2024-08-26 19:58:47,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=132016.0, ans=0.1 +2024-08-26 19:59:01,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=132069.33333333334, ans=0.125 +2024-08-26 19:59:07,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=132122.66666666666, ans=0.125 +2024-08-26 19:59:08,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=132122.66666666666, ans=0.025 +2024-08-26 19:59:18,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=132176.0, ans=0.05 +2024-08-26 19:59:21,652 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.56 vs. limit=12.0 +2024-08-26 19:59:32,687 INFO [train.py:1114] (2/4) Epoch 10, batch 2400, loss[loss=0.2247, simple_loss=0.2886, pruned_loss=0.05831, ctc_loss=0.1103, over 19302.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2826, pruned_loss=0.05606, ctc_loss=0.1041, over 3858059.15 frames. ], batch size: 71, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 20:00:19,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=132442.66666666666, ans=15.0 +2024-08-26 20:00:20,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=132442.66666666666, ans=0.0 +2024-08-26 20:00:21,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=132442.66666666666, ans=0.09899494936611666 +2024-08-26 20:00:36,870 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.213e+02 1.532e+02 1.694e+02 1.900e+02 3.260e+02, threshold=3.387e+02, percent-clipped=0.0 +2024-08-26 20:00:42,849 INFO [train.py:1114] (2/4) Epoch 10, batch 2450, loss[loss=0.2922, simple_loss=0.3155, pruned_loss=0.09829, ctc_loss=0.1808, over 13563.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.2871, pruned_loss=0.05939, ctc_loss=0.1107, over 3731945.56 frames. ], batch size: 140, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 20:00:58,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=132549.33333333334, ans=0.025 +2024-08-26 20:01:06,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=132602.66666666666, ans=0.125 +2024-08-26 20:01:11,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=132656.0, ans=0.0 +2024-08-26 20:03:28,142 INFO [train.py:1114] (2/4) Epoch 11, batch 0, loss[loss=0.2159, simple_loss=0.2788, pruned_loss=0.05593, ctc_loss=0.1027, over 19782.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2788, pruned_loss=0.05593, ctc_loss=0.1027, over 19782.00 frames. ], batch size: 49, lr: 1.39e-02, grad_scale: 32.0 +2024-08-26 20:03:28,143 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-26 20:03:42,229 INFO [train.py:1146] (2/4) Epoch 11, validation: loss=0.1858, simple_loss=0.2776, pruned_loss=0.03491, ctc_loss=0.06042, over 944034.00 frames. +2024-08-26 20:03:42,230 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12825MB +2024-08-26 20:03:47,463 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.83 vs. limit=6.0 +2024-08-26 20:04:00,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=132864.0, ans=0.0 +2024-08-26 20:04:03,721 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.79 vs. limit=5.0 +2024-08-26 20:04:11,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=132864.0, ans=0.125 +2024-08-26 20:04:11,391 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.77 vs. limit=10.0 +2024-08-26 20:04:16,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=132917.33333333334, ans=0.0 +2024-08-26 20:04:17,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=132917.33333333334, ans=0.125 +2024-08-26 20:04:32,368 INFO [train.py:1114] (2/4) Epoch 11, batch 50, loss[loss=0.1877, simple_loss=0.2543, pruned_loss=0.04435, ctc_loss=0.08076, over 19726.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2852, pruned_loss=0.05679, ctc_loss=0.1066, over 844464.80 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 32.0 +2024-08-26 20:04:37,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=133024.0, ans=0.125 +2024-08-26 20:04:37,952 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.177e+02 1.624e+02 1.801e+02 2.017e+02 3.320e+02, threshold=3.603e+02, percent-clipped=0.0 +2024-08-26 20:04:50,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=133077.33333333334, ans=0.2 +2024-08-26 20:05:21,260 INFO [train.py:1114] (2/4) Epoch 11, batch 100, loss[loss=0.2134, simple_loss=0.2769, pruned_loss=0.05528, ctc_loss=0.09853, over 19710.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2846, pruned_loss=0.05574, ctc_loss=0.1048, over 1499152.40 frames. ], batch size: 51, lr: 1.39e-02, grad_scale: 32.0 +2024-08-26 20:05:22,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=133290.66666666666, ans=0.2 +2024-08-26 20:05:35,866 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.96 vs. limit=15.0 +2024-08-26 20:05:57,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=133450.66666666666, ans=0.0 +2024-08-26 20:06:05,918 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.05 vs. limit=15.0 +2024-08-26 20:06:10,884 INFO [train.py:1114] (2/4) Epoch 11, batch 150, loss[loss=0.2065, simple_loss=0.2601, pruned_loss=0.05498, ctc_loss=0.1076, over 19685.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2821, pruned_loss=0.05471, ctc_loss=0.1029, over 2028078.50 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 32.0 +2024-08-26 20:06:13,362 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.38 vs. limit=22.5 +2024-08-26 20:06:16,413 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.457e+02 1.584e+02 1.841e+02 2.561e+02, threshold=3.167e+02, percent-clipped=0.0 +2024-08-26 20:06:19,446 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:06:36,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=133664.0, ans=0.025 +2024-08-26 20:06:37,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=133664.0, ans=0.2 +2024-08-26 20:06:40,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=133717.33333333334, ans=0.125 +2024-08-26 20:06:42,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=133717.33333333334, ans=0.125 +2024-08-26 20:06:53,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=133717.33333333334, ans=0.02 +2024-08-26 20:07:45,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=133770.66666666666, ans=0.125 +2024-08-26 20:08:02,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=133770.66666666666, ans=0.0 +2024-08-26 20:08:08,067 INFO [train.py:1114] (2/4) Epoch 11, batch 200, loss[loss=0.2377, simple_loss=0.2987, pruned_loss=0.06411, ctc_loss=0.1212, over 18112.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2815, pruned_loss=0.05472, ctc_loss=0.1027, over 2434418.27 frames. ], batch size: 85, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:08:13,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.59 vs. limit=15.0 +2024-08-26 20:08:32,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=133930.66666666666, ans=0.0 +2024-08-26 20:08:33,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133930.66666666666, ans=0.1 +2024-08-26 20:08:55,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=134037.33333333334, ans=0.125 +2024-08-26 20:09:00,074 INFO [train.py:1114] (2/4) Epoch 11, batch 250, loss[loss=0.2094, simple_loss=0.286, pruned_loss=0.04728, ctc_loss=0.09554, over 19405.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2805, pruned_loss=0.05391, ctc_loss=0.101, over 2754380.29 frames. ], batch size: 67, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:09:05,646 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.170e+02 1.397e+02 1.518e+02 1.749e+02 2.921e+02, threshold=3.037e+02, percent-clipped=0.0 +2024-08-26 20:09:07,322 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.43 vs. limit=15.0 +2024-08-26 20:09:12,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=134144.0, ans=0.2 +2024-08-26 20:09:14,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=134144.0, ans=0.0 +2024-08-26 20:09:14,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=134144.0, ans=0.125 +2024-08-26 20:09:17,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=134144.0, ans=0.2 +2024-08-26 20:09:18,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=134144.0, ans=0.09899494936611666 +2024-08-26 20:09:25,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=134197.33333333334, ans=0.1 +2024-08-26 20:09:30,005 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.96 vs. limit=15.0 +2024-08-26 20:09:43,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=134304.0, ans=0.0 +2024-08-26 20:09:44,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=134304.0, ans=0.2 +2024-08-26 20:09:46,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=134304.0, ans=0.0 +2024-08-26 20:09:51,426 INFO [train.py:1114] (2/4) Epoch 11, batch 300, loss[loss=0.2049, simple_loss=0.2797, pruned_loss=0.04702, ctc_loss=0.09027, over 19517.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2798, pruned_loss=0.05371, ctc_loss=0.1008, over 2998972.64 frames. ], batch size: 61, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:10:02,464 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.15 vs. limit=15.0 +2024-08-26 20:10:12,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=134464.0, ans=0.125 +2024-08-26 20:10:41,580 INFO [train.py:1114] (2/4) Epoch 11, batch 350, loss[loss=0.1965, simple_loss=0.2575, pruned_loss=0.04981, ctc_loss=0.08995, over 19767.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2801, pruned_loss=0.05387, ctc_loss=0.1012, over 3189034.92 frames. ], batch size: 48, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:10:44,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=134624.0, ans=0.125 +2024-08-26 20:10:46,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=134624.0, ans=0.125 +2024-08-26 20:10:47,202 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.479e+02 1.637e+02 2.052e+02 3.441e+02, threshold=3.275e+02, percent-clipped=1.0 +2024-08-26 20:11:01,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=134730.66666666666, ans=0.2 +2024-08-26 20:11:20,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=134784.0, ans=0.125 +2024-08-26 20:11:25,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=134837.33333333334, ans=0.125 +2024-08-26 20:11:30,036 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.59 vs. limit=15.0 +2024-08-26 20:11:30,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=134890.66666666666, ans=0.125 +2024-08-26 20:11:31,275 INFO [train.py:1114] (2/4) Epoch 11, batch 400, loss[loss=0.1903, simple_loss=0.2678, pruned_loss=0.04048, ctc_loss=0.07965, over 19512.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2791, pruned_loss=0.05335, ctc_loss=0.1002, over 3342037.91 frames. ], batch size: 54, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:11:32,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=134890.66666666666, ans=0.1 +2024-08-26 20:11:40,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=134890.66666666666, ans=0.125 +2024-08-26 20:12:03,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=135050.66666666666, ans=0.2 +2024-08-26 20:12:18,537 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.11 vs. limit=15.0 +2024-08-26 20:12:20,811 INFO [train.py:1114] (2/4) Epoch 11, batch 450, loss[loss=0.2116, simple_loss=0.2895, pruned_loss=0.04822, ctc_loss=0.09308, over 19623.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2791, pruned_loss=0.05327, ctc_loss=0.0997, over 3450391.50 frames. ], batch size: 55, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:12:29,028 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.192e+02 1.489e+02 1.652e+02 2.008e+02 3.634e+02, threshold=3.305e+02, percent-clipped=1.0 +2024-08-26 20:13:01,091 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.93 vs. limit=10.0 +2024-08-26 20:13:01,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=135370.66666666666, ans=0.2 +2024-08-26 20:13:05,798 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.50 vs. limit=22.5 +2024-08-26 20:13:11,617 INFO [train.py:1114] (2/4) Epoch 11, batch 500, loss[loss=0.2194, simple_loss=0.2892, pruned_loss=0.05543, ctc_loss=0.09706, over 19673.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2781, pruned_loss=0.05311, ctc_loss=0.09922, over 3545705.63 frames. ], batch size: 63, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:13:15,813 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.10 vs. limit=22.5 +2024-08-26 20:13:19,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=135424.0, ans=0.0 +2024-08-26 20:13:24,580 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-08-26 20:13:24,592 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.87 vs. limit=15.0 +2024-08-26 20:13:36,603 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.13 vs. limit=15.0 +2024-08-26 20:13:42,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=135584.0, ans=0.0 +2024-08-26 20:13:50,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=135637.33333333334, ans=0.035 +2024-08-26 20:13:58,587 INFO [train.py:1114] (2/4) Epoch 11, batch 550, loss[loss=0.2236, simple_loss=0.2874, pruned_loss=0.0585, ctc_loss=0.1071, over 19228.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2784, pruned_loss=0.05341, ctc_loss=0.09979, over 3609068.07 frames. ], batch size: 71, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:13:58,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=135690.66666666666, ans=0.0 +2024-08-26 20:14:04,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=135690.66666666666, ans=0.0 +2024-08-26 20:14:04,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=135690.66666666666, ans=0.125 +2024-08-26 20:14:06,854 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.449e+02 1.695e+02 2.078e+02 4.377e+02, threshold=3.390e+02, percent-clipped=1.0 +2024-08-26 20:14:10,255 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.93 vs. limit=15.0 +2024-08-26 20:14:14,032 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.45 vs. limit=12.0 +2024-08-26 20:14:19,624 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.75 vs. limit=12.0 +2024-08-26 20:14:25,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=135797.33333333334, ans=0.125 +2024-08-26 20:14:37,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135850.66666666666, ans=0.1 +2024-08-26 20:14:50,537 INFO [train.py:1114] (2/4) Epoch 11, batch 600, loss[loss=0.22, simple_loss=0.2896, pruned_loss=0.05429, ctc_loss=0.1047, over 19405.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2788, pruned_loss=0.05336, ctc_loss=0.09975, over 3666460.41 frames. ], batch size: 67, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:14:52,027 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.85 vs. limit=22.5 +2024-08-26 20:15:05,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=136010.66666666666, ans=0.125 +2024-08-26 20:15:07,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=136010.66666666666, ans=0.0 +2024-08-26 20:15:09,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=136064.0, ans=0.0 +2024-08-26 20:15:41,576 INFO [train.py:1114] (2/4) Epoch 11, batch 650, loss[loss=0.2024, simple_loss=0.2713, pruned_loss=0.04897, ctc_loss=0.08879, over 19769.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2781, pruned_loss=0.05305, ctc_loss=0.09908, over 3716733.32 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:15:44,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=136224.0, ans=0.125 +2024-08-26 20:15:45,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=136224.0, ans=0.2 +2024-08-26 20:15:47,087 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.457e+02 1.627e+02 2.058e+02 3.143e+02, threshold=3.253e+02, percent-clipped=0.0 +2024-08-26 20:16:02,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136330.66666666666, ans=0.1 +2024-08-26 20:16:04,169 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.00 vs. limit=15.0 +2024-08-26 20:16:06,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=136330.66666666666, ans=0.125 +2024-08-26 20:16:07,908 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.72 vs. limit=15.0 +2024-08-26 20:16:13,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=136384.0, ans=0.125 +2024-08-26 20:16:27,818 INFO [train.py:1114] (2/4) Epoch 11, batch 700, loss[loss=0.1832, simple_loss=0.2555, pruned_loss=0.03985, ctc_loss=0.07786, over 19736.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2778, pruned_loss=0.05268, ctc_loss=0.09861, over 3748573.45 frames. ], batch size: 51, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:16:30,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=136490.66666666666, ans=0.0 +2024-08-26 20:16:39,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=136544.0, ans=0.125 +2024-08-26 20:16:40,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=136544.0, ans=0.2 +2024-08-26 20:16:42,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=136544.0, ans=0.0 +2024-08-26 20:17:00,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=136650.66666666666, ans=0.05 +2024-08-26 20:17:05,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=136650.66666666666, ans=0.0 +2024-08-26 20:17:06,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=136704.0, ans=0.0 +2024-08-26 20:17:11,662 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.59 vs. limit=22.5 +2024-08-26 20:17:16,586 INFO [train.py:1114] (2/4) Epoch 11, batch 750, loss[loss=0.1941, simple_loss=0.262, pruned_loss=0.04579, ctc_loss=0.08651, over 19502.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2778, pruned_loss=0.0527, ctc_loss=0.09862, over 3773497.42 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:17:23,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=136757.33333333334, ans=0.125 +2024-08-26 20:17:24,641 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.496e+02 1.727e+02 2.151e+02 3.286e+02, threshold=3.455e+02, percent-clipped=1.0 +2024-08-26 20:17:26,848 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:17:32,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=136810.66666666666, ans=0.2 +2024-08-26 20:17:38,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=136864.0, ans=0.1 +2024-08-26 20:17:52,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=136917.33333333334, ans=0.0 +2024-08-26 20:18:08,122 INFO [train.py:1114] (2/4) Epoch 11, batch 800, loss[loss=0.1843, simple_loss=0.2447, pruned_loss=0.04561, ctc_loss=0.0817, over 19800.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2772, pruned_loss=0.05267, ctc_loss=0.09854, over 3795681.07 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:18:08,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137024.0, ans=0.1 +2024-08-26 20:18:16,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=137077.33333333334, ans=0.125 +2024-08-26 20:18:26,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=137130.66666666666, ans=0.025 +2024-08-26 20:18:32,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=137130.66666666666, ans=0.2 +2024-08-26 20:18:41,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.48 vs. limit=10.0 +2024-08-26 20:19:09,199 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.42 vs. limit=10.0 +2024-08-26 20:19:27,341 INFO [train.py:1114] (2/4) Epoch 11, batch 850, loss[loss=0.2393, simple_loss=0.3038, pruned_loss=0.06356, ctc_loss=0.119, over 19636.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2768, pruned_loss=0.05257, ctc_loss=0.09836, over 3814999.06 frames. ], batch size: 59, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:19:39,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=137290.66666666666, ans=0.1 +2024-08-26 20:19:39,850 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.453e+02 1.601e+02 1.920e+02 5.497e+02, threshold=3.202e+02, percent-clipped=1.0 +2024-08-26 20:19:42,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=137290.66666666666, ans=0.025 +2024-08-26 20:19:47,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=137344.0, ans=0.125 +2024-08-26 20:19:52,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=137344.0, ans=0.125 +2024-08-26 20:19:54,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=137344.0, ans=0.125 +2024-08-26 20:20:01,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=137397.33333333334, ans=0.125 +2024-08-26 20:20:46,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=137504.0, ans=0.125 +2024-08-26 20:20:53,069 INFO [train.py:1114] (2/4) Epoch 11, batch 900, loss[loss=0.192, simple_loss=0.2503, pruned_loss=0.0484, ctc_loss=0.09208, over 19422.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2777, pruned_loss=0.05316, ctc_loss=0.0993, over 3820246.27 frames. ], batch size: 48, lr: 1.37e-02, grad_scale: 16.0 +2024-08-26 20:21:09,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=137610.66666666666, ans=0.04949747468305833 +2024-08-26 20:21:17,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=137664.0, ans=0.125 +2024-08-26 20:21:18,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=137664.0, ans=0.125 +2024-08-26 20:21:19,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=137664.0, ans=0.1 +2024-08-26 20:21:28,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=137717.33333333334, ans=0.5 +2024-08-26 20:21:28,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=137717.33333333334, ans=0.025 +2024-08-26 20:21:33,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=137717.33333333334, ans=0.2 +2024-08-26 20:21:37,800 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.19 vs. limit=10.0 +2024-08-26 20:21:48,794 INFO [train.py:1114] (2/4) Epoch 11, batch 950, loss[loss=0.1944, simple_loss=0.2623, pruned_loss=0.04506, ctc_loss=0.09117, over 19493.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2786, pruned_loss=0.05362, ctc_loss=0.1002, over 3820822.53 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 16.0 +2024-08-26 20:21:54,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=137824.0, ans=0.125 +2024-08-26 20:21:55,404 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.214e+02 1.468e+02 1.744e+02 2.017e+02 3.816e+02, threshold=3.488e+02, percent-clipped=2.0 +2024-08-26 20:22:28,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=138037.33333333334, ans=0.05 +2024-08-26 20:22:40,823 INFO [train.py:1114] (2/4) Epoch 11, batch 1000, loss[loss=0.2078, simple_loss=0.2758, pruned_loss=0.05162, ctc_loss=0.09148, over 19845.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.279, pruned_loss=0.05381, ctc_loss=0.1003, over 3816113.47 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 16.0 +2024-08-26 20:22:53,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=138144.0, ans=0.0 +2024-08-26 20:22:58,498 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.87 vs. limit=15.0 +2024-08-26 20:23:02,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=138197.33333333334, ans=0.125 +2024-08-26 20:23:05,253 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.46 vs. limit=6.0 +2024-08-26 20:23:07,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=138197.33333333334, ans=0.125 +2024-08-26 20:23:22,416 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.75 vs. limit=15.0 +2024-08-26 20:23:28,371 INFO [train.py:1114] (2/4) Epoch 11, batch 1050, loss[loss=0.2246, simple_loss=0.2956, pruned_loss=0.05685, ctc_loss=0.1, over 19836.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2789, pruned_loss=0.05379, ctc_loss=0.1003, over 3822419.89 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 16.0 +2024-08-26 20:23:29,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=138357.33333333334, ans=0.0 +2024-08-26 20:23:32,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=138357.33333333334, ans=0.0 +2024-08-26 20:23:34,921 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.363e+02 1.534e+02 1.839e+02 4.578e+02, threshold=3.069e+02, percent-clipped=1.0 +2024-08-26 20:23:35,374 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.83 vs. limit=6.0 +2024-08-26 20:24:12,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=138410.66666666666, ans=0.0 +2024-08-26 20:24:35,757 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.09 vs. limit=15.0 +2024-08-26 20:24:43,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=138464.0, ans=0.125 +2024-08-26 20:24:51,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=138517.33333333334, ans=0.125 +2024-08-26 20:25:04,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138570.66666666666, ans=0.1 +2024-08-26 20:25:06,041 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:25:07,607 INFO [train.py:1114] (2/4) Epoch 11, batch 1100, loss[loss=0.1991, simple_loss=0.2691, pruned_loss=0.04605, ctc_loss=0.09277, over 19600.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2782, pruned_loss=0.05332, ctc_loss=0.0995, over 3829270.27 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 16.0 +2024-08-26 20:25:14,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=138624.0, ans=0.09899494936611666 +2024-08-26 20:25:15,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=138624.0, ans=0.025 +2024-08-26 20:25:27,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=138730.66666666666, ans=0.125 +2024-08-26 20:25:30,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=138730.66666666666, ans=0.125 +2024-08-26 20:25:31,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=138730.66666666666, ans=0.0 +2024-08-26 20:25:52,075 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.67 vs. limit=15.0 +2024-08-26 20:25:56,914 INFO [train.py:1114] (2/4) Epoch 11, batch 1150, loss[loss=0.1928, simple_loss=0.2688, pruned_loss=0.04243, ctc_loss=0.0798, over 19572.00 frames. ], tot_loss[loss=0.212, simple_loss=0.278, pruned_loss=0.05315, ctc_loss=0.09915, over 3828256.24 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 16.0 +2024-08-26 20:25:59,464 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.68 vs. limit=22.5 +2024-08-26 20:26:03,585 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.470e+02 1.661e+02 1.952e+02 3.516e+02, threshold=3.323e+02, percent-clipped=2.0 +2024-08-26 20:26:04,152 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.51 vs. limit=15.0 +2024-08-26 20:26:31,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=139050.66666666666, ans=0.04949747468305833 +2024-08-26 20:26:33,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=139050.66666666666, ans=0.125 +2024-08-26 20:26:45,576 INFO [train.py:1114] (2/4) Epoch 11, batch 1200, loss[loss=0.2212, simple_loss=0.2892, pruned_loss=0.05551, ctc_loss=0.1057, over 19840.00 frames. ], tot_loss[loss=0.213, simple_loss=0.279, pruned_loss=0.05349, ctc_loss=0.09982, over 3823999.86 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 32.0 +2024-08-26 20:26:46,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=139157.33333333334, ans=0.0 +2024-08-26 20:26:49,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=139157.33333333334, ans=0.125 +2024-08-26 20:26:55,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=139210.66666666666, ans=0.125 +2024-08-26 20:27:21,058 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.31 vs. limit=10.0 +2024-08-26 20:27:41,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=139370.66666666666, ans=0.0 +2024-08-26 20:27:43,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=139370.66666666666, ans=0.025 +2024-08-26 20:27:44,278 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.72 vs. limit=15.0 +2024-08-26 20:28:18,702 INFO [train.py:1114] (2/4) Epoch 11, batch 1250, loss[loss=0.2242, simple_loss=0.2939, pruned_loss=0.05683, ctc_loss=0.1023, over 19537.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2792, pruned_loss=0.05344, ctc_loss=0.09954, over 3842558.34 frames. ], batch size: 61, lr: 1.36e-02, grad_scale: 32.0 +2024-08-26 20:28:20,303 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.48 vs. limit=10.0 +2024-08-26 20:28:27,599 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.224e+02 1.425e+02 1.545e+02 1.729e+02 3.064e+02, threshold=3.089e+02, percent-clipped=0.0 +2024-08-26 20:28:29,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=139477.33333333334, ans=0.02 +2024-08-26 20:28:41,215 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.23 vs. limit=6.0 +2024-08-26 20:28:46,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=139530.66666666666, ans=0.125 +2024-08-26 20:29:02,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=139637.33333333334, ans=0.0 +2024-08-26 20:29:12,949 INFO [train.py:1114] (2/4) Epoch 11, batch 1300, loss[loss=0.215, simple_loss=0.2894, pruned_loss=0.05029, ctc_loss=0.1002, over 18926.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2786, pruned_loss=0.05301, ctc_loss=0.0989, over 3846410.19 frames. ], batch size: 76, lr: 1.36e-02, grad_scale: 32.0 +2024-08-26 20:29:19,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=139690.66666666666, ans=0.0 +2024-08-26 20:29:28,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=139744.0, ans=0.125 +2024-08-26 20:33:35,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=139744.0, ans=0.125 +2024-08-26 20:33:38,502 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.43 vs. limit=22.5 +2024-08-26 20:35:30,152 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.23 vs. limit=15.0 +2024-08-26 20:35:43,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=139904.0, ans=0.125 +2024-08-26 20:35:47,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=139904.0, ans=0.2 +2024-08-26 20:35:49,746 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.77 vs. limit=6.0 +2024-08-26 20:35:52,050 INFO [train.py:1114] (2/4) Epoch 11, batch 1350, loss[loss=0.2151, simple_loss=0.2815, pruned_loss=0.05439, ctc_loss=0.1, over 19745.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2776, pruned_loss=0.0523, ctc_loss=0.09775, over 3857898.02 frames. ], batch size: 54, lr: 1.36e-02, grad_scale: 32.0 +2024-08-26 20:35:54,406 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.07 vs. limit=10.0 +2024-08-26 20:35:58,551 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.092e+02 1.441e+02 1.644e+02 1.919e+02 3.174e+02, threshold=3.287e+02, percent-clipped=1.0 +2024-08-26 20:36:23,235 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.69 vs. limit=15.0 +2024-08-26 20:36:25,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=140117.33333333334, ans=0.125 +2024-08-26 20:36:26,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=140117.33333333334, ans=10.0 +2024-08-26 20:36:32,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=140170.66666666666, ans=0.125 +2024-08-26 20:36:41,175 INFO [train.py:1114] (2/4) Epoch 11, batch 1400, loss[loss=0.1957, simple_loss=0.2539, pruned_loss=0.04979, ctc_loss=0.09497, over 19688.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2776, pruned_loss=0.05235, ctc_loss=0.09795, over 3865014.61 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:36:51,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=140224.0, ans=0.125 +2024-08-26 20:36:54,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=140277.33333333334, ans=0.125 +2024-08-26 20:37:51,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=140437.33333333334, ans=0.04949747468305833 +2024-08-26 20:38:01,749 INFO [train.py:1114] (2/4) Epoch 11, batch 1450, loss[loss=0.2138, simple_loss=0.2857, pruned_loss=0.0515, ctc_loss=0.09708, over 19682.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2783, pruned_loss=0.05259, ctc_loss=0.09844, over 3862777.01 frames. ], batch size: 63, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:38:05,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=140490.66666666666, ans=0.5 +2024-08-26 20:38:06,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=140490.66666666666, ans=0.125 +2024-08-26 20:38:08,102 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.486e+02 1.636e+02 1.926e+02 3.321e+02, threshold=3.272e+02, percent-clipped=1.0 +2024-08-26 20:38:28,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=140597.33333333334, ans=0.125 +2024-08-26 20:38:42,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=140704.0, ans=0.0 +2024-08-26 20:38:50,495 INFO [train.py:1114] (2/4) Epoch 11, batch 1500, loss[loss=0.2264, simple_loss=0.303, pruned_loss=0.05427, ctc_loss=0.1031, over 19580.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2787, pruned_loss=0.05259, ctc_loss=0.09854, over 3861628.27 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:39:02,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=140810.66666666666, ans=0.0 +2024-08-26 20:39:20,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn2.whiten.whitening_limit, batch_count=140917.33333333334, ans=22.5 +2024-08-26 20:39:31,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=140970.66666666666, ans=0.125 +2024-08-26 20:39:38,996 INFO [train.py:1114] (2/4) Epoch 11, batch 1550, loss[loss=0.2533, simple_loss=0.3095, pruned_loss=0.07281, ctc_loss=0.1288, over 19619.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2786, pruned_loss=0.05297, ctc_loss=0.0991, over 3845590.94 frames. ], batch size: 60, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:39:39,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=141024.0, ans=0.2 +2024-08-26 20:39:42,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=141024.0, ans=0.125 +2024-08-26 20:39:45,244 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.401e+02 1.612e+02 1.919e+02 3.103e+02, threshold=3.225e+02, percent-clipped=0.0 +2024-08-26 20:39:46,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.whiten.whitening_limit, batch_count=141024.0, ans=15.0 +2024-08-26 20:40:08,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=141184.0, ans=0.0 +2024-08-26 20:40:12,208 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.48 vs. limit=12.0 +2024-08-26 20:40:29,937 INFO [train.py:1114] (2/4) Epoch 11, batch 1600, loss[loss=0.2394, simple_loss=0.3095, pruned_loss=0.06219, ctc_loss=0.1124, over 19841.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2788, pruned_loss=0.05325, ctc_loss=0.09968, over 3834471.70 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:40:31,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=141290.66666666666, ans=0.1 +2024-08-26 20:40:46,374 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.39 vs. limit=15.0 +2024-08-26 20:40:48,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=141397.33333333334, ans=0.125 +2024-08-26 20:41:18,737 INFO [train.py:1114] (2/4) Epoch 11, batch 1650, loss[loss=0.2138, simple_loss=0.2807, pruned_loss=0.05367, ctc_loss=0.09907, over 19663.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2781, pruned_loss=0.05291, ctc_loss=0.09899, over 3830731.41 frames. ], batch size: 59, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:41:25,304 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.523e+02 1.726e+02 1.964e+02 3.202e+02, threshold=3.451e+02, percent-clipped=0.0 +2024-08-26 20:41:34,100 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.86 vs. limit=15.0 +2024-08-26 20:41:52,559 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.22 vs. limit=22.5 +2024-08-26 20:41:53,563 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.49 vs. limit=15.0 +2024-08-26 20:42:04,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141770.66666666666, ans=0.1 +2024-08-26 20:42:07,194 INFO [train.py:1114] (2/4) Epoch 11, batch 1700, loss[loss=0.1997, simple_loss=0.256, pruned_loss=0.05263, ctc_loss=0.09544, over 19670.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2778, pruned_loss=0.05266, ctc_loss=0.09863, over 3845271.98 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:42:31,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141877.33333333334, ans=0.1 +2024-08-26 20:42:34,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=141930.66666666666, ans=0.0 +2024-08-26 20:42:35,013 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.17 vs. limit=15.0 +2024-08-26 20:42:42,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=141984.0, ans=0.125 +2024-08-26 20:42:58,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=142037.33333333334, ans=0.125 +2024-08-26 20:43:00,057 INFO [train.py:1114] (2/4) Epoch 11, batch 1750, loss[loss=0.1739, simple_loss=0.2381, pruned_loss=0.04075, ctc_loss=0.07031, over 19655.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2775, pruned_loss=0.05245, ctc_loss=0.09806, over 3850929.04 frames. ], batch size: 45, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:43:06,154 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.149e+02 1.441e+02 1.591e+02 1.781e+02 2.526e+02, threshold=3.183e+02, percent-clipped=0.0 +2024-08-26 20:43:18,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=142197.33333333334, ans=0.125 +2024-08-26 20:43:20,939 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.07 vs. limit=15.0 +2024-08-26 20:43:41,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=142304.0, ans=0.0 +2024-08-26 20:43:41,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=142304.0, ans=0.125 +2024-08-26 20:43:50,406 INFO [train.py:1114] (2/4) Epoch 11, batch 1800, loss[loss=0.2156, simple_loss=0.2852, pruned_loss=0.05317, ctc_loss=0.09931, over 19597.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2774, pruned_loss=0.05218, ctc_loss=0.09767, over 3852758.36 frames. ], batch size: 55, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:43:51,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=142357.33333333334, ans=0.0 +2024-08-26 20:43:52,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=142357.33333333334, ans=0.04949747468305833 +2024-08-26 20:43:57,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=142357.33333333334, ans=0.125 +2024-08-26 20:44:01,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=142410.66666666666, ans=0.125 +2024-08-26 20:44:14,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=142464.0, ans=0.125 +2024-08-26 20:44:26,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=142517.33333333334, ans=0.125 +2024-08-26 20:44:40,937 INFO [train.py:1114] (2/4) Epoch 11, batch 1850, loss[loss=0.2172, simple_loss=0.2911, pruned_loss=0.05146, ctc_loss=0.1012, over 19561.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2769, pruned_loss=0.0519, ctc_loss=0.09719, over 3856960.27 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:44:47,989 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.442e+02 1.639e+02 2.043e+02 4.343e+02, threshold=3.277e+02, percent-clipped=6.0 +2024-08-26 20:44:51,322 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.72 vs. limit=22.5 +2024-08-26 20:44:55,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=142677.33333333334, ans=0.2 +2024-08-26 20:45:01,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=142730.66666666666, ans=0.125 +2024-08-26 20:45:04,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.44 vs. limit=15.0 +2024-08-26 20:45:15,822 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.36 vs. limit=22.5 +2024-08-26 20:45:16,517 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.54 vs. limit=15.0 +2024-08-26 20:45:19,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=142837.33333333334, ans=0.0 +2024-08-26 20:45:29,153 INFO [train.py:1114] (2/4) Epoch 11, batch 1900, loss[loss=0.2323, simple_loss=0.3049, pruned_loss=0.05829, ctc_loss=0.1079, over 19655.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2777, pruned_loss=0.05214, ctc_loss=0.09745, over 3861403.51 frames. ], batch size: 59, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:46:23,319 INFO [train.py:1114] (2/4) Epoch 11, batch 1950, loss[loss=0.2195, simple_loss=0.2826, pruned_loss=0.05798, ctc_loss=0.1012, over 19592.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2788, pruned_loss=0.05263, ctc_loss=0.098, over 3870394.31 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:47:24,642 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.230e+02 1.500e+02 1.631e+02 1.894e+02 3.317e+02, threshold=3.262e+02, percent-clipped=1.0 +2024-08-26 20:47:29,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=143210.66666666666, ans=0.125 +2024-08-26 20:47:58,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=143264.0, ans=0.025 +2024-08-26 20:47:59,198 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.94 vs. limit=15.0 +2024-08-26 20:48:03,434 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.86 vs. limit=15.0 +2024-08-26 20:48:04,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=143264.0, ans=0.125 +2024-08-26 20:48:18,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=143317.33333333334, ans=0.05 +2024-08-26 20:48:33,061 INFO [train.py:1114] (2/4) Epoch 11, batch 2000, loss[loss=0.1757, simple_loss=0.2392, pruned_loss=0.04077, ctc_loss=0.07655, over 19610.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2792, pruned_loss=0.05296, ctc_loss=0.09869, over 3855528.82 frames. ], batch size: 45, lr: 1.34e-02, grad_scale: 32.0 +2024-08-26 20:48:36,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=143424.0, ans=0.04949747468305833 +2024-08-26 20:48:38,154 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.83 vs. limit=15.0 +2024-08-26 20:48:59,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=143584.0, ans=0.125 +2024-08-26 20:49:38,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=143690.66666666666, ans=0.125 +2024-08-26 20:49:39,119 INFO [train.py:1114] (2/4) Epoch 11, batch 2050, loss[loss=0.198, simple_loss=0.2567, pruned_loss=0.05113, ctc_loss=0.09249, over 19726.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2781, pruned_loss=0.05279, ctc_loss=0.09839, over 3851246.06 frames. ], batch size: 47, lr: 1.34e-02, grad_scale: 32.0 +2024-08-26 20:49:39,614 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.25 vs. limit=10.0 +2024-08-26 20:49:47,238 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.205e+02 1.448e+02 1.585e+02 1.933e+02 3.153e+02, threshold=3.170e+02, percent-clipped=0.0 +2024-08-26 20:49:50,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=143744.0, ans=0.125 +2024-08-26 20:49:52,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=143744.0, ans=0.125 +2024-08-26 20:50:09,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=143797.33333333334, ans=0.0 +2024-08-26 20:50:18,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=143797.33333333334, ans=0.025 +2024-08-26 20:50:35,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=143904.0, ans=0.125 +2024-08-26 20:50:37,970 INFO [train.py:1114] (2/4) Epoch 11, batch 2100, loss[loss=0.2214, simple_loss=0.285, pruned_loss=0.05797, ctc_loss=0.1048, over 19773.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2775, pruned_loss=0.05235, ctc_loss=0.0978, over 3858179.99 frames. ], batch size: 54, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:50:38,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=143957.33333333334, ans=0.125 +2024-08-26 20:50:42,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=143957.33333333334, ans=0.125 +2024-08-26 20:50:42,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=143957.33333333334, ans=0.09899494936611666 +2024-08-26 20:50:43,573 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.01 vs. limit=22.5 +2024-08-26 20:50:45,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=143957.33333333334, ans=0.125 +2024-08-26 20:50:49,079 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.72 vs. limit=15.0 +2024-08-26 20:50:52,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=144010.66666666666, ans=0.0 +2024-08-26 20:50:55,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=144064.0, ans=0.0 +2024-08-26 20:51:01,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=144064.0, ans=0.125 +2024-08-26 20:51:07,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=144117.33333333334, ans=0.0 +2024-08-26 20:51:08,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144117.33333333334, ans=0.1 +2024-08-26 20:51:22,999 INFO [train.py:1114] (2/4) Epoch 11, batch 2150, loss[loss=0.1974, simple_loss=0.2718, pruned_loss=0.04551, ctc_loss=0.07994, over 19856.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2767, pruned_loss=0.05202, ctc_loss=0.097, over 3869964.16 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:51:30,833 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.485e+02 1.672e+02 2.037e+02 4.338e+02, threshold=3.345e+02, percent-clipped=7.0 +2024-08-26 20:51:58,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=144437.33333333334, ans=0.125 +2024-08-26 20:51:59,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=144437.33333333334, ans=0.125 +2024-08-26 20:52:00,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=144437.33333333334, ans=0.125 +2024-08-26 20:52:06,896 INFO [train.py:1114] (2/4) Epoch 11, batch 2200, loss[loss=0.2294, simple_loss=0.29, pruned_loss=0.06216, ctc_loss=0.1113, over 19595.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2767, pruned_loss=0.05198, ctc_loss=0.09697, over 3868361.65 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:52:07,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=144490.66666666666, ans=0.125 +2024-08-26 20:52:19,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=144544.0, ans=0.125 +2024-08-26 20:52:20,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=144544.0, ans=0.0 +2024-08-26 20:52:25,782 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.51 vs. limit=10.0 +2024-08-26 20:52:35,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=144650.66666666666, ans=0.0 +2024-08-26 20:52:39,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=144650.66666666666, ans=0.125 +2024-08-26 20:52:45,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=144704.0, ans=0.05 +2024-08-26 20:52:50,853 INFO [train.py:1114] (2/4) Epoch 11, batch 2250, loss[loss=0.2195, simple_loss=0.2894, pruned_loss=0.05346, ctc_loss=0.1068, over 19626.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.277, pruned_loss=0.05199, ctc_loss=0.09697, over 3867766.00 frames. ], batch size: 55, lr: 1.33e-02, grad_scale: 16.0 +2024-08-26 20:52:58,757 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.208e+02 1.461e+02 1.628e+02 1.934e+02 8.673e+02, threshold=3.256e+02, percent-clipped=2.0 +2024-08-26 20:53:04,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=144810.66666666666, ans=0.125 +2024-08-26 20:53:11,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=144864.0, ans=0.125 +2024-08-26 20:53:18,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=144917.33333333334, ans=0.2 +2024-08-26 20:53:24,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=144917.33333333334, ans=0.125 +2024-08-26 20:53:35,351 INFO [train.py:1114] (2/4) Epoch 11, batch 2300, loss[loss=0.1907, simple_loss=0.263, pruned_loss=0.04333, ctc_loss=0.07938, over 19502.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2765, pruned_loss=0.05215, ctc_loss=0.09722, over 3860445.86 frames. ], batch size: 49, lr: 1.33e-02, grad_scale: 16.0 +2024-08-26 20:53:39,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=145024.0, ans=0.0 +2024-08-26 20:53:49,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=145077.33333333334, ans=0.125 +2024-08-26 20:54:07,660 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.56 vs. limit=10.0 +2024-08-26 20:54:08,236 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.66 vs. limit=15.0 +2024-08-26 20:54:20,122 INFO [train.py:1114] (2/4) Epoch 11, batch 2350, loss[loss=0.2478, simple_loss=0.2999, pruned_loss=0.07204, ctc_loss=0.1292, over 19690.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2768, pruned_loss=0.05236, ctc_loss=0.09752, over 3862862.35 frames. ], batch size: 63, lr: 1.33e-02, grad_scale: 16.0 +2024-08-26 20:54:20,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=145290.66666666666, ans=0.125 +2024-08-26 20:54:21,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=145290.66666666666, ans=0.125 +2024-08-26 20:54:28,777 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.482e+02 1.673e+02 1.901e+02 2.829e+02, threshold=3.345e+02, percent-clipped=0.0 +2024-08-26 20:54:35,378 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.10 vs. limit=12.0 +2024-08-26 20:54:42,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=145397.33333333334, ans=0.09899494936611666 +2024-08-26 20:55:04,263 INFO [train.py:1114] (2/4) Epoch 11, batch 2400, loss[loss=0.2394, simple_loss=0.2983, pruned_loss=0.06532, ctc_loss=0.1247, over 19284.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2796, pruned_loss=0.05334, ctc_loss=0.09937, over 3857542.92 frames. ], batch size: 71, lr: 1.33e-02, grad_scale: 32.0 +2024-08-26 20:55:13,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=145610.66666666666, ans=0.0 +2024-08-26 20:55:18,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=145610.66666666666, ans=0.2 +2024-08-26 20:55:24,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=145664.0, ans=0.125 +2024-08-26 20:55:25,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145664.0, ans=0.1 +2024-08-26 20:55:38,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=145717.33333333334, ans=0.2 +2024-08-26 20:55:49,234 INFO [train.py:1114] (2/4) Epoch 11, batch 2450, loss[loss=0.3079, simple_loss=0.3303, pruned_loss=0.1033, ctc_loss=0.1977, over 12991.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2837, pruned_loss=0.05653, ctc_loss=0.1053, over 3729308.10 frames. ], batch size: 142, lr: 1.33e-02, grad_scale: 32.0 +2024-08-26 20:55:49,683 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.74 vs. limit=15.0 +2024-08-26 20:55:55,118 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=11.58 vs. limit=12.0 +2024-08-26 20:55:58,187 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.577e+02 1.748e+02 1.957e+02 3.323e+02, threshold=3.496e+02, percent-clipped=0.0 +2024-08-26 20:55:58,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=145877.33333333334, ans=0.0 +2024-08-26 20:56:11,084 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.47 vs. limit=15.0 +2024-08-26 20:56:17,538 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=12.94 vs. limit=15.0 +2024-08-26 21:01:32,167 INFO [train.py:1114] (2/4) Epoch 12, batch 0, loss[loss=0.1935, simple_loss=0.2576, pruned_loss=0.04745, ctc_loss=0.086, over 19829.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2576, pruned_loss=0.04745, ctc_loss=0.086, over 19829.00 frames. ], batch size: 49, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:01:32,167 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-26 21:01:49,719 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.8305, 5.0661, 5.6313, 5.3073], device='cuda:2') +2024-08-26 21:01:52,240 INFO [train.py:1146] (2/4) Epoch 12, validation: loss=0.1812, simple_loss=0.274, pruned_loss=0.03284, ctc_loss=0.05683, over 944034.00 frames. +2024-08-26 21:01:52,241 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12825MB +2024-08-26 21:02:26,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=146138.66666666666, ans=0.2 +2024-08-26 21:02:38,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=146192.0, ans=0.025 +2024-08-26 21:02:42,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=146245.33333333334, ans=0.125 +2024-08-26 21:02:49,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.57 vs. limit=22.5 +2024-08-26 21:02:49,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=146298.66666666666, ans=0.0 +2024-08-26 21:02:50,488 INFO [train.py:1114] (2/4) Epoch 12, batch 50, loss[loss=0.1753, simple_loss=0.2465, pruned_loss=0.03789, ctc_loss=0.07053, over 19714.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2823, pruned_loss=0.05501, ctc_loss=0.1029, over 844495.10 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:02:54,661 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.55 vs. limit=10.0 +2024-08-26 21:02:58,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=146352.0, ans=0.125 +2024-08-26 21:03:03,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=146352.0, ans=0.025 +2024-08-26 21:03:11,148 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.556e+02 1.742e+02 1.990e+02 3.045e+02, threshold=3.484e+02, percent-clipped=0.0 +2024-08-26 21:03:18,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=146458.66666666666, ans=0.125 +2024-08-26 21:03:31,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=146512.0, ans=0.05 +2024-08-26 21:04:09,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=146512.0, ans=0.1 +2024-08-26 21:04:10,879 INFO [train.py:1114] (2/4) Epoch 12, batch 100, loss[loss=0.19, simple_loss=0.2629, pruned_loss=0.04257, ctc_loss=0.07992, over 19727.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2819, pruned_loss=0.05383, ctc_loss=0.101, over 1498946.70 frames. ], batch size: 51, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:04:13,178 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.22 vs. limit=15.0 +2024-08-26 21:04:43,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=146672.0, ans=0.1 +2024-08-26 21:04:53,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146725.33333333334, ans=0.1 +2024-08-26 21:04:54,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=146725.33333333334, ans=0.5 +2024-08-26 21:05:05,130 INFO [train.py:1114] (2/4) Epoch 12, batch 150, loss[loss=0.1912, simple_loss=0.2548, pruned_loss=0.04681, ctc_loss=0.0849, over 19733.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2783, pruned_loss=0.05235, ctc_loss=0.09817, over 2027009.71 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:05:05,764 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.47 vs. limit=15.0 +2024-08-26 21:05:25,625 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.236e+02 1.421e+02 1.535e+02 1.745e+02 2.429e+02, threshold=3.070e+02, percent-clipped=0.0 +2024-08-26 21:05:29,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=146938.66666666666, ans=0.2 +2024-08-26 21:05:35,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=146992.0, ans=0.125 +2024-08-26 21:05:43,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147045.33333333334, ans=0.1 +2024-08-26 21:05:52,008 INFO [train.py:1114] (2/4) Epoch 12, batch 200, loss[loss=0.2096, simple_loss=0.2792, pruned_loss=0.05065, ctc_loss=0.09693, over 18276.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2762, pruned_loss=0.0517, ctc_loss=0.09673, over 2434759.55 frames. ], batch size: 85, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:05:53,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=147098.66666666666, ans=15.0 +2024-08-26 21:06:07,300 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=15.0 +2024-08-26 21:06:20,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147258.66666666666, ans=0.1 +2024-08-26 21:06:33,398 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.41 vs. limit=15.0 +2024-08-26 21:06:38,642 INFO [train.py:1114] (2/4) Epoch 12, batch 250, loss[loss=0.2116, simple_loss=0.2838, pruned_loss=0.05129, ctc_loss=0.09196, over 19430.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2762, pruned_loss=0.05129, ctc_loss=0.09588, over 2755371.50 frames. ], batch size: 67, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:06:58,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=147472.0, ans=0.05 +2024-08-26 21:06:59,406 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.414e+02 1.495e+02 1.680e+02 4.024e+02, threshold=2.991e+02, percent-clipped=1.0 +2024-08-26 21:07:06,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=147525.33333333334, ans=0.0 +2024-08-26 21:07:26,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=147578.66666666666, ans=0.125 +2024-08-26 21:07:28,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=147578.66666666666, ans=0.125 +2024-08-26 21:07:35,053 INFO [train.py:1114] (2/4) Epoch 12, batch 300, loss[loss=0.2086, simple_loss=0.2808, pruned_loss=0.04953, ctc_loss=0.09336, over 19497.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2751, pruned_loss=0.05076, ctc_loss=0.09476, over 3001501.23 frames. ], batch size: 61, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:07:37,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=147632.0, ans=0.125 +2024-08-26 21:07:58,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=147738.66666666666, ans=10.0 +2024-08-26 21:08:04,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=147792.0, ans=0.125 +2024-08-26 21:08:09,859 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.71 vs. limit=22.5 +2024-08-26 21:08:15,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=147845.33333333334, ans=0.025 +2024-08-26 21:08:30,175 INFO [train.py:1114] (2/4) Epoch 12, batch 350, loss[loss=0.1807, simple_loss=0.2548, pruned_loss=0.03824, ctc_loss=0.07544, over 19756.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2761, pruned_loss=0.05104, ctc_loss=0.09536, over 3191725.20 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:08:33,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=147898.66666666666, ans=0.025 +2024-08-26 21:12:10,844 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.537e+02 1.863e+02 2.287e+02 4.040e+02, threshold=3.725e+02, percent-clipped=5.0 +2024-08-26 21:12:18,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=148058.66666666666, ans=0.04949747468305833 +2024-08-26 21:12:23,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=148058.66666666666, ans=0.125 +2024-08-26 21:13:47,513 INFO [train.py:1114] (2/4) Epoch 12, batch 400, loss[loss=0.2149, simple_loss=0.281, pruned_loss=0.05348, ctc_loss=0.1046, over 19493.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2756, pruned_loss=0.05081, ctc_loss=0.09511, over 3343684.26 frames. ], batch size: 54, lr: 1.27e-02, grad_scale: 32.0 +2024-08-26 21:13:48,879 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.76 vs. limit=15.0 +2024-08-26 21:14:06,882 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.86 vs. limit=8.0 +2024-08-26 21:14:14,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=148325.33333333334, ans=0.125 +2024-08-26 21:14:34,562 INFO [train.py:1114] (2/4) Epoch 12, batch 450, loss[loss=0.2038, simple_loss=0.2782, pruned_loss=0.04658, ctc_loss=0.09081, over 19628.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2754, pruned_loss=0.05078, ctc_loss=0.09529, over 3452496.09 frames. ], batch size: 55, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:15:05,751 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.502e+02 1.695e+02 2.071e+02 2.894e+02, threshold=3.390e+02, percent-clipped=0.0 +2024-08-26 21:15:19,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=148592.0, ans=0.0 +2024-08-26 21:15:28,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=148645.33333333334, ans=0.0 +2024-08-26 21:15:31,501 INFO [train.py:1114] (2/4) Epoch 12, batch 500, loss[loss=0.2189, simple_loss=0.2828, pruned_loss=0.05652, ctc_loss=0.105, over 19702.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2744, pruned_loss=0.05034, ctc_loss=0.09438, over 3547344.25 frames. ], batch size: 63, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:15:37,047 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:15:54,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=148805.33333333334, ans=0.125 +2024-08-26 21:16:10,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=148912.0, ans=0.125 +2024-08-26 21:16:18,091 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.75 vs. limit=15.0 +2024-08-26 21:16:19,316 INFO [train.py:1114] (2/4) Epoch 12, batch 550, loss[loss=0.2239, simple_loss=0.292, pruned_loss=0.05672, ctc_loss=0.1059, over 19297.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2748, pruned_loss=0.05042, ctc_loss=0.09461, over 3608911.18 frames. ], batch size: 71, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:16:25,362 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.18 vs. limit=15.0 +2024-08-26 21:16:27,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=149018.66666666666, ans=0.0 +2024-08-26 21:16:38,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=149072.0, ans=0.125 +2024-08-26 21:16:39,714 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.448e+02 1.617e+02 1.906e+02 3.977e+02, threshold=3.234e+02, percent-clipped=1.0 +2024-08-26 21:16:40,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=149072.0, ans=0.125 +2024-08-26 21:17:34,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149178.66666666666, ans=0.1 +2024-08-26 21:17:45,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=149178.66666666666, ans=0.125 +2024-08-26 21:17:47,333 INFO [train.py:1114] (2/4) Epoch 12, batch 600, loss[loss=0.2052, simple_loss=0.2766, pruned_loss=0.04892, ctc_loss=0.08993, over 19397.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2752, pruned_loss=0.05046, ctc_loss=0.09466, over 3665652.38 frames. ], batch size: 67, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:18:09,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149338.66666666666, ans=0.1 +2024-08-26 21:18:14,797 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.65 vs. limit=22.5 +2024-08-26 21:18:42,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=149445.33333333334, ans=0.0 +2024-08-26 21:18:46,464 INFO [train.py:1114] (2/4) Epoch 12, batch 650, loss[loss=0.2182, simple_loss=0.2865, pruned_loss=0.05579, ctc_loss=0.09599, over 19779.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2746, pruned_loss=0.05017, ctc_loss=0.09403, over 3716151.89 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:18:59,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=149498.66666666666, ans=0.025 +2024-08-26 21:19:15,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.47 vs. limit=12.0 +2024-08-26 21:19:16,417 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.436e+02 1.583e+02 1.844e+02 2.674e+02, threshold=3.165e+02, percent-clipped=0.0 +2024-08-26 21:19:31,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=149658.66666666666, ans=0.125 +2024-08-26 21:19:38,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=149712.0, ans=0.125 +2024-08-26 21:19:40,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=149712.0, ans=0.0 +2024-08-26 21:19:45,033 INFO [train.py:1114] (2/4) Epoch 12, batch 700, loss[loss=0.1916, simple_loss=0.2626, pruned_loss=0.04398, ctc_loss=0.08172, over 19718.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2749, pruned_loss=0.05019, ctc_loss=0.09403, over 3749713.72 frames. ], batch size: 51, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:19:54,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=149818.66666666666, ans=0.0 +2024-08-26 21:20:06,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=149872.0, ans=0.0 +2024-08-26 21:20:06,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149872.0, ans=0.1 +2024-08-26 21:20:19,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=149925.33333333334, ans=0.125 +2024-08-26 21:20:24,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=149978.66666666666, ans=0.2 +2024-08-26 21:20:26,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=149978.66666666666, ans=0.0 +2024-08-26 21:20:31,165 INFO [train.py:1114] (2/4) Epoch 12, batch 750, loss[loss=0.1952, simple_loss=0.2686, pruned_loss=0.04358, ctc_loss=0.08674, over 19501.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2747, pruned_loss=0.05027, ctc_loss=0.09413, over 3775081.97 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:20:31,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=150032.0, ans=0.07 +2024-08-26 21:20:34,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=150032.0, ans=0.125 +2024-08-26 21:20:37,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=150032.0, ans=0.125 +2024-08-26 21:20:37,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=150032.0, ans=0.2 +2024-08-26 21:20:43,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=150085.33333333334, ans=0.125 +2024-08-26 21:20:51,908 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.231e+02 1.592e+02 1.843e+02 2.247e+02 3.979e+02, threshold=3.686e+02, percent-clipped=6.0 +2024-08-26 21:20:54,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=150138.66666666666, ans=0.125 +2024-08-26 21:20:56,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=150138.66666666666, ans=0.09899494936611666 +2024-08-26 21:20:58,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=150192.0, ans=0.125 +2024-08-26 21:21:10,240 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.48 vs. limit=22.5 +2024-08-26 21:21:22,388 INFO [train.py:1114] (2/4) Epoch 12, batch 800, loss[loss=0.1854, simple_loss=0.2504, pruned_loss=0.04391, ctc_loss=0.08153, over 19432.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2747, pruned_loss=0.05036, ctc_loss=0.09429, over 3796040.55 frames. ], batch size: 48, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:21:22,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=150298.66666666666, ans=0.0 +2024-08-26 21:21:24,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=150298.66666666666, ans=0.125 +2024-08-26 21:21:29,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150298.66666666666, ans=0.1 +2024-08-26 21:22:03,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=150512.0, ans=0.0 +2024-08-26 21:22:12,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=150565.33333333334, ans=0.0 +2024-08-26 21:22:12,972 INFO [train.py:1114] (2/4) Epoch 12, batch 850, loss[loss=0.2128, simple_loss=0.2825, pruned_loss=0.05335, ctc_loss=0.0911, over 19668.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2747, pruned_loss=0.05039, ctc_loss=0.0942, over 3814711.63 frames. ], batch size: 59, lr: 1.26e-02, grad_scale: 16.0 +2024-08-26 21:22:15,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=150565.33333333334, ans=0.125 +2024-08-26 21:22:27,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=150618.66666666666, ans=0.125 +2024-08-26 21:22:34,289 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.451e+02 1.599e+02 1.811e+02 2.698e+02, threshold=3.198e+02, percent-clipped=0.0 +2024-08-26 21:22:36,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=150672.0, ans=0.125 +2024-08-26 21:22:43,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=150725.33333333334, ans=0.125 +2024-08-26 21:22:53,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150778.66666666666, ans=0.1 +2024-08-26 21:23:00,251 INFO [train.py:1114] (2/4) Epoch 12, batch 900, loss[loss=0.1844, simple_loss=0.2502, pruned_loss=0.043, ctc_loss=0.08145, over 19437.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2748, pruned_loss=0.05082, ctc_loss=0.09482, over 3818187.22 frames. ], batch size: 48, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:23:04,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=150832.0, ans=0.025 +2024-08-26 21:23:17,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=150885.33333333334, ans=0.125 +2024-08-26 21:23:28,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=150992.0, ans=0.2 +2024-08-26 21:23:37,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=151045.33333333334, ans=0.0 +2024-08-26 21:23:42,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=151045.33333333334, ans=0.1 +2024-08-26 21:23:44,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=151045.33333333334, ans=0.09899494936611666 +2024-08-26 21:23:46,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=151098.66666666666, ans=0.2 +2024-08-26 21:23:47,033 INFO [train.py:1114] (2/4) Epoch 12, batch 950, loss[loss=0.2037, simple_loss=0.2694, pruned_loss=0.05108, ctc_loss=0.08962, over 19500.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2753, pruned_loss=0.05101, ctc_loss=0.09538, over 3819957.49 frames. ], batch size: 49, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:23:50,292 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.64 vs. limit=6.0 +2024-08-26 21:24:11,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=151152.0, ans=0.125 +2024-08-26 21:24:29,933 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.208e+02 1.446e+02 1.609e+02 1.941e+02 6.709e+02, threshold=3.217e+02, percent-clipped=2.0 +2024-08-26 21:24:30,463 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.86 vs. limit=6.0 +2024-08-26 21:24:36,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=151205.33333333334, ans=0.125 +2024-08-26 21:24:51,750 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.32 vs. limit=22.5 +2024-08-26 21:24:57,588 INFO [train.py:1114] (2/4) Epoch 12, batch 1000, loss[loss=0.2004, simple_loss=0.2676, pruned_loss=0.04963, ctc_loss=0.08489, over 19826.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2763, pruned_loss=0.0514, ctc_loss=0.09597, over 3816165.31 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:25:17,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=151365.33333333334, ans=0.025 +2024-08-26 21:25:18,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=151365.33333333334, ans=0.05 +2024-08-26 21:25:19,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=151418.66666666666, ans=0.0 +2024-08-26 21:25:22,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=151418.66666666666, ans=0.125 +2024-08-26 21:25:27,212 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:25:51,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=151578.66666666666, ans=0.0 +2024-08-26 21:25:57,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=151578.66666666666, ans=0.0 +2024-08-26 21:25:59,413 INFO [train.py:1114] (2/4) Epoch 12, batch 1050, loss[loss=0.1962, simple_loss=0.273, pruned_loss=0.04244, ctc_loss=0.08645, over 19819.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2752, pruned_loss=0.05114, ctc_loss=0.0955, over 3822284.84 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:26:06,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=151632.0, ans=0.2 +2024-08-26 21:26:11,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=151685.33333333334, ans=0.0 +2024-08-26 21:26:13,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=151685.33333333334, ans=0.0 +2024-08-26 21:26:20,568 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.117e+02 1.384e+02 1.517e+02 1.769e+02 3.938e+02, threshold=3.034e+02, percent-clipped=1.0 +2024-08-26 21:26:24,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151738.66666666666, ans=0.1 +2024-08-26 21:26:31,871 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:26:32,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=151792.0, ans=0.125 +2024-08-26 21:26:32,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=151792.0, ans=0.0 +2024-08-26 21:26:45,791 INFO [train.py:1114] (2/4) Epoch 12, batch 1100, loss[loss=0.1997, simple_loss=0.2724, pruned_loss=0.04633, ctc_loss=0.08562, over 19567.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2749, pruned_loss=0.05081, ctc_loss=0.09497, over 3830763.85 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:27:00,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=151952.0, ans=0.125 +2024-08-26 21:27:10,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=152005.33333333334, ans=0.0 +2024-08-26 21:27:11,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=152005.33333333334, ans=0.0 +2024-08-26 21:27:16,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=152058.66666666666, ans=0.125 +2024-08-26 21:27:18,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=152058.66666666666, ans=0.025 +2024-08-26 21:27:19,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=152058.66666666666, ans=0.125 +2024-08-26 21:27:24,234 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.49 vs. limit=22.5 +2024-08-26 21:27:35,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=152112.0, ans=0.125 +2024-08-26 21:27:36,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=152112.0, ans=0.1 +2024-08-26 21:27:41,047 INFO [train.py:1114] (2/4) Epoch 12, batch 1150, loss[loss=0.1839, simple_loss=0.262, pruned_loss=0.03749, ctc_loss=0.07693, over 19596.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2746, pruned_loss=0.05064, ctc_loss=0.09472, over 3828319.46 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:27:48,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=152165.33333333334, ans=0.0 +2024-08-26 21:27:50,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=152218.66666666666, ans=0.125 +2024-08-26 21:27:52,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=152218.66666666666, ans=0.125 +2024-08-26 21:27:54,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152218.66666666666, ans=0.1 +2024-08-26 21:27:59,315 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.29 vs. limit=15.0 +2024-08-26 21:28:02,683 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.513e+02 1.822e+02 2.260e+02 3.131e+02, threshold=3.643e+02, percent-clipped=1.0 +2024-08-26 21:28:22,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=152378.66666666666, ans=0.2 +2024-08-26 21:28:28,137 INFO [train.py:1114] (2/4) Epoch 12, batch 1200, loss[loss=0.2222, simple_loss=0.2947, pruned_loss=0.05443, ctc_loss=0.102, over 19823.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2755, pruned_loss=0.05079, ctc_loss=0.09528, over 3823869.62 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 32.0 +2024-08-26 21:28:41,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=152485.33333333334, ans=0.09899494936611666 +2024-08-26 21:28:50,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=152538.66666666666, ans=0.1 +2024-08-26 21:29:02,798 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.44 vs. limit=15.0 +2024-08-26 21:29:10,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=152645.33333333334, ans=0.0 +2024-08-26 21:29:15,256 INFO [train.py:1114] (2/4) Epoch 12, batch 1250, loss[loss=0.2314, simple_loss=0.2983, pruned_loss=0.06052, ctc_loss=0.1085, over 19530.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2756, pruned_loss=0.05091, ctc_loss=0.09531, over 3842096.02 frames. ], batch size: 61, lr: 1.25e-02, grad_scale: 32.0 +2024-08-26 21:29:24,434 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.82 vs. limit=12.0 +2024-08-26 21:29:36,835 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.211e+02 1.442e+02 1.596e+02 2.011e+02 3.434e+02, threshold=3.192e+02, percent-clipped=0.0 +2024-08-26 21:30:16,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=152965.33333333334, ans=0.125 +2024-08-26 21:30:17,222 INFO [train.py:1114] (2/4) Epoch 12, batch 1300, loss[loss=0.2267, simple_loss=0.2841, pruned_loss=0.06051, ctc_loss=0.1208, over 18846.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2748, pruned_loss=0.05053, ctc_loss=0.09466, over 3844930.74 frames. ], batch size: 76, lr: 1.25e-02, grad_scale: 32.0 +2024-08-26 21:30:22,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=152965.33333333334, ans=6.0 +2024-08-26 21:30:47,356 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.14 vs. limit=22.5 +2024-08-26 21:30:57,500 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.68 vs. limit=15.0 +2024-08-26 21:31:06,635 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.53 vs. limit=15.0 +2024-08-26 21:31:08,965 INFO [train.py:1114] (2/4) Epoch 12, batch 1350, loss[loss=0.2072, simple_loss=0.2695, pruned_loss=0.05287, ctc_loss=0.09774, over 19773.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2746, pruned_loss=0.05032, ctc_loss=0.09411, over 3857406.97 frames. ], batch size: 54, lr: 1.25e-02, grad_scale: 32.0 +2024-08-26 21:31:12,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=153232.0, ans=0.0 +2024-08-26 21:31:18,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=153285.33333333334, ans=0.1 +2024-08-26 21:31:18,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=153285.33333333334, ans=0.125 +2024-08-26 21:31:20,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=153285.33333333334, ans=0.0 +2024-08-26 21:31:20,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=153285.33333333334, ans=0.2 +2024-08-26 21:31:21,633 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.50 vs. limit=22.5 +2024-08-26 21:31:23,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=153285.33333333334, ans=0.05 +2024-08-26 21:31:29,977 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.104e+02 1.467e+02 1.650e+02 2.044e+02 3.234e+02, threshold=3.299e+02, percent-clipped=1.0 +2024-08-26 21:31:44,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=153392.0, ans=0.0 +2024-08-26 21:31:46,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=153445.33333333334, ans=0.0 +2024-08-26 21:31:55,308 INFO [train.py:1114] (2/4) Epoch 12, batch 1400, loss[loss=0.1698, simple_loss=0.2362, pruned_loss=0.03767, ctc_loss=0.07038, over 19659.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2742, pruned_loss=0.05014, ctc_loss=0.09394, over 3864657.87 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:32:07,597 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.28 vs. limit=15.0 +2024-08-26 21:32:29,519 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.16 vs. limit=22.5 +2024-08-26 21:32:47,925 INFO [train.py:1114] (2/4) Epoch 12, batch 1450, loss[loss=0.2214, simple_loss=0.2872, pruned_loss=0.05722, ctc_loss=0.1029, over 19637.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2751, pruned_loss=0.05059, ctc_loss=0.09452, over 3863077.00 frames. ], batch size: 63, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:33:10,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=153872.0, ans=0.125 +2024-08-26 21:33:12,067 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.245e+02 1.443e+02 1.618e+02 1.909e+02 2.759e+02, threshold=3.236e+02, percent-clipped=0.0 +2024-08-26 21:33:18,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=153872.0, ans=0.125 +2024-08-26 21:33:37,183 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.14 vs. limit=15.0 +2024-08-26 21:33:40,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=153978.66666666666, ans=0.125 +2024-08-26 21:33:42,294 INFO [train.py:1114] (2/4) Epoch 12, batch 1500, loss[loss=0.1996, simple_loss=0.2833, pruned_loss=0.04162, ctc_loss=0.08139, over 19566.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2757, pruned_loss=0.05058, ctc_loss=0.09472, over 3863064.16 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:33:45,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=154032.0, ans=0.0 +2024-08-26 21:33:46,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=154032.0, ans=0.125 +2024-08-26 21:33:48,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=154032.0, ans=0.0 +2024-08-26 21:33:48,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=154032.0, ans=0.125 +2024-08-26 21:33:50,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=154032.0, ans=0.125 +2024-08-26 21:33:52,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=154085.33333333334, ans=0.125 +2024-08-26 21:34:06,584 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.81 vs. limit=22.5 +2024-08-26 21:34:08,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=154138.66666666666, ans=0.125 +2024-08-26 21:34:29,579 INFO [train.py:1114] (2/4) Epoch 12, batch 1550, loss[loss=0.2345, simple_loss=0.3013, pruned_loss=0.06044, ctc_loss=0.117, over 19609.00 frames. ], tot_loss[loss=0.208, simple_loss=0.276, pruned_loss=0.05093, ctc_loss=0.09551, over 3847709.59 frames. ], batch size: 60, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:34:39,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=154352.0, ans=0.0 +2024-08-26 21:34:51,389 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.237e+02 1.431e+02 1.666e+02 1.890e+02 5.087e+02, threshold=3.332e+02, percent-clipped=2.0 +2024-08-26 21:34:58,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=154458.66666666666, ans=0.125 +2024-08-26 21:35:09,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=154512.0, ans=0.125 +2024-08-26 21:35:17,029 INFO [train.py:1114] (2/4) Epoch 12, batch 1600, loss[loss=0.2143, simple_loss=0.287, pruned_loss=0.05113, ctc_loss=0.09849, over 19841.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2762, pruned_loss=0.05111, ctc_loss=0.09571, over 3835769.24 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:35:17,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=154565.33333333334, ans=0.125 +2024-08-26 21:35:37,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=154672.0, ans=0.0 +2024-08-26 21:35:38,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=154672.0, ans=0.0 +2024-08-26 21:35:48,812 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.16 vs. limit=22.5 +2024-08-26 21:35:59,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=154778.66666666666, ans=0.0 +2024-08-26 21:36:00,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=154778.66666666666, ans=0.0 +2024-08-26 21:36:04,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=154778.66666666666, ans=0.125 +2024-08-26 21:36:05,356 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.71 vs. limit=15.0 +2024-08-26 21:36:11,257 INFO [train.py:1114] (2/4) Epoch 12, batch 1650, loss[loss=0.1831, simple_loss=0.2615, pruned_loss=0.03763, ctc_loss=0.07376, over 19655.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2753, pruned_loss=0.05066, ctc_loss=0.09484, over 3832969.62 frames. ], batch size: 59, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:36:18,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154832.0, ans=0.1 +2024-08-26 21:36:19,067 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:36:20,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=154832.0, ans=0.2 +2024-08-26 21:36:26,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=154885.33333333334, ans=0.0 +2024-08-26 21:36:33,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=154938.66666666666, ans=0.125 +2024-08-26 21:36:34,775 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.198e+02 1.420e+02 1.592e+02 1.938e+02 3.625e+02, threshold=3.184e+02, percent-clipped=1.0 +2024-08-26 21:36:50,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=155045.33333333334, ans=0.125 +2024-08-26 21:37:00,213 INFO [train.py:1114] (2/4) Epoch 12, batch 1700, loss[loss=0.1711, simple_loss=0.2365, pruned_loss=0.03846, ctc_loss=0.07221, over 19677.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2746, pruned_loss=0.05005, ctc_loss=0.09369, over 3846956.48 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:37:04,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=155098.66666666666, ans=0.125 +2024-08-26 21:37:23,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=155205.33333333334, ans=0.125 +2024-08-26 21:37:33,965 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.33 vs. limit=6.0 +2024-08-26 21:37:37,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=155312.0, ans=0.125 +2024-08-26 21:37:44,944 INFO [train.py:1114] (2/4) Epoch 12, batch 1750, loss[loss=0.2059, simple_loss=0.2658, pruned_loss=0.05282, ctc_loss=0.1009, over 19695.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2739, pruned_loss=0.04984, ctc_loss=0.09315, over 3852611.75 frames. ], batch size: 45, lr: 1.24e-02, grad_scale: 16.0 +2024-08-26 21:37:49,508 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:37:50,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=155365.33333333334, ans=0.125 +2024-08-26 21:37:51,212 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:38:01,447 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=8.10 vs. limit=12.0 +2024-08-26 21:38:04,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=155472.0, ans=0.0 +2024-08-26 21:38:06,168 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.409e+02 1.600e+02 1.878e+02 3.182e+02, threshold=3.201e+02, percent-clipped=0.0 +2024-08-26 21:38:10,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=155525.33333333334, ans=15.0 +2024-08-26 21:38:14,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=155525.33333333334, ans=0.125 +2024-08-26 21:38:18,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=155525.33333333334, ans=0.125 +2024-08-26 21:38:21,611 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.89 vs. limit=10.0 +2024-08-26 21:38:26,863 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.38 vs. limit=15.0 +2024-08-26 21:38:28,996 INFO [train.py:1114] (2/4) Epoch 12, batch 1800, loss[loss=0.2125, simple_loss=0.2803, pruned_loss=0.05289, ctc_loss=0.09727, over 19613.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2743, pruned_loss=0.04989, ctc_loss=0.09337, over 3854669.63 frames. ], batch size: 55, lr: 1.24e-02, grad_scale: 16.0 +2024-08-26 21:38:31,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=155632.0, ans=0.125 +2024-08-26 21:38:39,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=155685.33333333334, ans=0.125 +2024-08-26 21:38:46,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=155738.66666666666, ans=0.125 +2024-08-26 21:38:46,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=155738.66666666666, ans=0.125 +2024-08-26 21:38:58,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=155792.0, ans=0.2 +2024-08-26 21:39:07,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=155845.33333333334, ans=0.0 +2024-08-26 21:39:11,173 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:39:12,710 INFO [train.py:1114] (2/4) Epoch 12, batch 1850, loss[loss=0.2176, simple_loss=0.2883, pruned_loss=0.05296, ctc_loss=0.1025, over 19610.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.274, pruned_loss=0.04965, ctc_loss=0.09298, over 3858666.05 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 16.0 +2024-08-26 21:39:18,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155898.66666666666, ans=0.1 +2024-08-26 21:39:24,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=155952.0, ans=0.0 +2024-08-26 21:39:26,423 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.52 vs. limit=10.0 +2024-08-26 21:39:26,444 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.67 vs. limit=6.0 +2024-08-26 21:39:29,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=156005.33333333334, ans=0.125 +2024-08-26 21:39:34,662 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.542e+02 1.764e+02 2.176e+02 3.980e+02, threshold=3.528e+02, percent-clipped=3.0 +2024-08-26 21:39:37,831 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.65 vs. limit=10.0 +2024-08-26 21:39:43,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=156058.66666666666, ans=0.2 +2024-08-26 21:39:55,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=156112.0, ans=0.125 +2024-08-26 21:39:56,683 INFO [train.py:1114] (2/4) Epoch 12, batch 1900, loss[loss=0.1958, simple_loss=0.2737, pruned_loss=0.04211, ctc_loss=0.08422, over 19657.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2748, pruned_loss=0.0499, ctc_loss=0.0932, over 3862714.37 frames. ], batch size: 59, lr: 1.23e-02, grad_scale: 8.0 +2024-08-26 21:40:06,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=156218.66666666666, ans=0.07 +2024-08-26 21:40:15,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=156272.0, ans=0.125 +2024-08-26 21:40:17,067 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.01 vs. limit=22.5 +2024-08-26 21:40:31,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=156378.66666666666, ans=0.125 +2024-08-26 21:40:34,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=156378.66666666666, ans=0.025 +2024-08-26 21:40:36,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=156378.66666666666, ans=0.2 +2024-08-26 21:40:40,049 INFO [train.py:1114] (2/4) Epoch 12, batch 1950, loss[loss=0.197, simple_loss=0.265, pruned_loss=0.04686, ctc_loss=0.08815, over 19590.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2752, pruned_loss=0.04975, ctc_loss=0.09312, over 3870854.82 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 8.0 +2024-08-26 21:40:48,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=156485.33333333334, ans=0.0 +2024-08-26 21:41:01,725 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.258e+02 1.443e+02 1.619e+02 1.881e+02 3.638e+02, threshold=3.238e+02, percent-clipped=1.0 +2024-08-26 21:41:03,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=156538.66666666666, ans=0.125 +2024-08-26 21:41:25,582 INFO [train.py:1114] (2/4) Epoch 12, batch 2000, loss[loss=0.1978, simple_loss=0.2627, pruned_loss=0.0493, ctc_loss=0.08561, over 19677.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2761, pruned_loss=0.05036, ctc_loss=0.09418, over 3854634.36 frames. ], batch size: 45, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:41:56,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=156858.66666666666, ans=0.125 +2024-08-26 21:41:57,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156858.66666666666, ans=0.1 +2024-08-26 21:42:02,901 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.08 vs. limit=6.0 +2024-08-26 21:42:04,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=156912.0, ans=0.015 +2024-08-26 21:42:07,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=156912.0, ans=0.125 +2024-08-26 21:42:10,405 INFO [train.py:1114] (2/4) Epoch 12, batch 2050, loss[loss=0.1745, simple_loss=0.2401, pruned_loss=0.03968, ctc_loss=0.07385, over 19728.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2749, pruned_loss=0.05041, ctc_loss=0.09411, over 3850292.88 frames. ], batch size: 47, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:42:20,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=157018.66666666666, ans=0.125 +2024-08-26 21:42:33,043 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.456e+02 1.628e+02 1.934e+02 3.317e+02, threshold=3.256e+02, percent-clipped=1.0 +2024-08-26 21:42:34,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=157072.0, ans=0.0 +2024-08-26 21:42:34,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=157072.0, ans=0.0 +2024-08-26 21:42:42,559 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.81 vs. limit=22.5 +2024-08-26 21:42:54,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=157178.66666666666, ans=0.1 +2024-08-26 21:42:55,852 INFO [train.py:1114] (2/4) Epoch 12, batch 2100, loss[loss=0.1937, simple_loss=0.2673, pruned_loss=0.04309, ctc_loss=0.08486, over 19773.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2742, pruned_loss=0.04996, ctc_loss=0.09334, over 3857594.39 frames. ], batch size: 54, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:43:17,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=157338.66666666666, ans=0.2 +2024-08-26 21:43:51,074 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.62 vs. limit=15.0 +2024-08-26 21:43:52,699 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.75 vs. limit=15.0 +2024-08-26 21:44:02,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=157445.33333333334, ans=0.0 +2024-08-26 21:44:06,213 INFO [train.py:1114] (2/4) Epoch 12, batch 2150, loss[loss=0.1884, simple_loss=0.2586, pruned_loss=0.04305, ctc_loss=0.07993, over 19868.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.274, pruned_loss=0.04985, ctc_loss=0.09298, over 3868522.75 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:44:12,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=157498.66666666666, ans=0.125 +2024-08-26 21:44:16,184 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.85 vs. limit=15.0 +2024-08-26 21:44:27,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=157605.33333333334, ans=0.07 +2024-08-26 21:44:27,888 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.226e+02 1.483e+02 1.683e+02 2.213e+02 4.687e+02, threshold=3.365e+02, percent-clipped=1.0 +2024-08-26 21:44:35,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=157605.33333333334, ans=0.0 +2024-08-26 21:44:43,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=157658.66666666666, ans=0.0 +2024-08-26 21:44:50,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=157712.0, ans=0.125 +2024-08-26 21:45:25,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=157712.0, ans=0.125 +2024-08-26 21:45:26,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=157765.33333333334, ans=0.125 +2024-08-26 21:45:26,966 INFO [train.py:1114] (2/4) Epoch 12, batch 2200, loss[loss=0.216, simple_loss=0.2872, pruned_loss=0.053, ctc_loss=0.09713, over 19607.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2738, pruned_loss=0.04986, ctc_loss=0.09277, over 3866806.24 frames. ], batch size: 57, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:45:29,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=157765.33333333334, ans=0.0 +2024-08-26 21:45:40,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=157818.66666666666, ans=0.0 +2024-08-26 21:45:41,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=157818.66666666666, ans=0.0 +2024-08-26 21:46:02,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=157978.66666666666, ans=0.0 +2024-08-26 21:46:04,774 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.39 vs. limit=15.0 +2024-08-26 21:46:05,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=157978.66666666666, ans=0.125 +2024-08-26 21:46:10,347 INFO [train.py:1114] (2/4) Epoch 12, batch 2250, loss[loss=0.1972, simple_loss=0.2755, pruned_loss=0.04286, ctc_loss=0.08325, over 19601.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.274, pruned_loss=0.0499, ctc_loss=0.093, over 3867192.81 frames. ], batch size: 55, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:46:31,822 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.542e+02 1.805e+02 2.126e+02 6.638e+02, threshold=3.611e+02, percent-clipped=1.0 +2024-08-26 21:46:36,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158192.0, ans=0.1 +2024-08-26 21:46:48,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=158245.33333333334, ans=0.125 +2024-08-26 21:46:48,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=158245.33333333334, ans=0.125 +2024-08-26 21:46:49,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=158245.33333333334, ans=0.125 +2024-08-26 21:46:53,554 INFO [train.py:1114] (2/4) Epoch 12, batch 2300, loss[loss=0.189, simple_loss=0.2639, pruned_loss=0.04144, ctc_loss=0.07804, over 19501.00 frames. ], tot_loss[loss=0.205, simple_loss=0.273, pruned_loss=0.04993, ctc_loss=0.09297, over 3860788.69 frames. ], batch size: 49, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:47:03,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=158352.0, ans=0.125 +2024-08-26 21:47:05,021 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.18 vs. limit=15.0 +2024-08-26 21:47:17,244 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.27 vs. limit=22.5 +2024-08-26 21:47:23,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=158458.66666666666, ans=0.125 +2024-08-26 21:47:36,489 INFO [train.py:1114] (2/4) Epoch 12, batch 2350, loss[loss=0.2223, simple_loss=0.2882, pruned_loss=0.05714, ctc_loss=0.1056, over 19678.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2724, pruned_loss=0.0498, ctc_loss=0.09266, over 3863829.97 frames. ], batch size: 63, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:47:40,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=158565.33333333334, ans=0.95 +2024-08-26 21:47:46,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=158618.66666666666, ans=0.125 +2024-08-26 21:47:49,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=158618.66666666666, ans=0.125 +2024-08-26 21:47:56,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=158672.0, ans=0.0 +2024-08-26 21:47:58,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=158672.0, ans=0.1 +2024-08-26 21:47:58,859 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.457e+02 1.679e+02 1.938e+02 3.188e+02, threshold=3.358e+02, percent-clipped=0.0 +2024-08-26 21:48:07,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=158725.33333333334, ans=0.2 +2024-08-26 21:48:07,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=158725.33333333334, ans=0.125 +2024-08-26 21:48:21,593 INFO [train.py:1114] (2/4) Epoch 12, batch 2400, loss[loss=0.2232, simple_loss=0.2902, pruned_loss=0.05681, ctc_loss=0.1064, over 19328.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2753, pruned_loss=0.05098, ctc_loss=0.09472, over 3858772.25 frames. ], batch size: 71, lr: 1.22e-02, grad_scale: 32.0 +2024-08-26 21:49:11,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=158938.66666666666, ans=0.2 +2024-08-26 21:49:19,699 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.22 vs. limit=22.5 +2024-08-26 21:49:41,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=159098.66666666666, ans=0.07 +2024-08-26 21:49:42,223 INFO [train.py:1114] (2/4) Epoch 12, batch 2450, loss[loss=0.2781, simple_loss=0.3099, pruned_loss=0.0889, ctc_loss=0.1712, over 13609.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2795, pruned_loss=0.05397, ctc_loss=0.1007, over 3731400.11 frames. ], batch size: 140, lr: 1.22e-02, grad_scale: 32.0 +2024-08-26 21:49:45,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=159098.66666666666, ans=0.125 +2024-08-26 21:49:49,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=159098.66666666666, ans=0.2 +2024-08-26 21:49:56,938 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.82 vs. limit=15.0 +2024-08-26 21:50:04,355 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.96 vs. limit=15.0 +2024-08-26 21:50:05,682 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.273e+02 1.611e+02 1.857e+02 2.069e+02 3.042e+02, threshold=3.714e+02, percent-clipped=0.0 +2024-08-26 21:50:13,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=159258.66666666666, ans=0.1 +2024-08-26 21:51:14,815 INFO [train.py:1114] (2/4) Epoch 13, batch 0, loss[loss=0.1984, simple_loss=0.2645, pruned_loss=0.04755, ctc_loss=0.09297, over 19813.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2645, pruned_loss=0.04755, ctc_loss=0.09297, over 19813.00 frames. ], batch size: 49, lr: 1.18e-02, grad_scale: 16.0 +2024-08-26 21:51:14,816 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-26 21:51:26,869 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.4806, 3.0697, 4.4346, 5.5592], device='cuda:2') +2024-08-26 21:51:28,894 INFO [train.py:1146] (2/4) Epoch 13, validation: loss=0.1795, simple_loss=0.2723, pruned_loss=0.03226, ctc_loss=0.05568, over 944034.00 frames. +2024-08-26 21:51:28,895 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12825MB +2024-08-26 21:51:30,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=159306.66666666666, ans=0.0 +2024-08-26 21:51:31,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=159306.66666666666, ans=0.0 +2024-08-26 21:51:33,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=159306.66666666666, ans=0.125 +2024-08-26 21:51:42,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=159360.0, ans=0.0 +2024-08-26 21:51:48,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.46 vs. limit=6.0 +2024-08-26 21:51:49,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=159360.0, ans=0.0 +2024-08-26 21:51:55,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=159413.33333333334, ans=10.0 +2024-08-26 21:52:05,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=159466.66666666666, ans=0.125 +2024-08-26 21:52:05,490 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.53 vs. limit=15.0 +2024-08-26 21:52:10,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=159520.0, ans=0.125 +2024-08-26 21:52:16,338 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.17 vs. limit=22.5 +2024-08-26 21:52:18,714 INFO [train.py:1114] (2/4) Epoch 13, batch 50, loss[loss=0.1851, simple_loss=0.2459, pruned_loss=0.04498, ctc_loss=0.08584, over 19726.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2763, pruned_loss=0.0505, ctc_loss=0.09557, over 844763.42 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:52:21,042 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.92 vs. limit=15.0 +2024-08-26 21:52:36,813 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.32 vs. limit=15.0 +2024-08-26 21:52:38,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=159626.66666666666, ans=0.025 +2024-08-26 21:52:42,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=159680.0, ans=0.125 +2024-08-26 21:52:49,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=159733.33333333334, ans=0.0 +2024-08-26 21:52:56,397 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.509e+02 1.748e+02 2.087e+02 2.763e+02, threshold=3.495e+02, percent-clipped=0.0 +2024-08-26 21:52:58,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159786.66666666666, ans=0.1 +2024-08-26 21:53:04,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=159786.66666666666, ans=0.125 +2024-08-26 21:53:07,757 INFO [train.py:1114] (2/4) Epoch 13, batch 100, loss[loss=0.195, simple_loss=0.2689, pruned_loss=0.04456, ctc_loss=0.07996, over 19715.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2769, pruned_loss=0.05002, ctc_loss=0.09397, over 1499351.95 frames. ], batch size: 51, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:53:08,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=159840.0, ans=0.125 +2024-08-26 21:53:10,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=159840.0, ans=0.04949747468305833 +2024-08-26 21:53:11,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159840.0, ans=0.1 +2024-08-26 21:53:14,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=159840.0, ans=0.0 +2024-08-26 21:53:16,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159893.33333333334, ans=0.1 +2024-08-26 21:53:36,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=159946.66666666666, ans=0.2 +2024-08-26 21:53:42,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=160000.0, ans=0.125 +2024-08-26 21:53:43,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160000.0, ans=0.1 +2024-08-26 21:53:45,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=160000.0, ans=0.07 +2024-08-26 21:54:19,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=160053.33333333334, ans=0.2 +2024-08-26 21:54:20,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=160053.33333333334, ans=0.04949747468305833 +2024-08-26 21:54:23,451 INFO [train.py:1114] (2/4) Epoch 13, batch 150, loss[loss=0.186, simple_loss=0.2471, pruned_loss=0.04471, ctc_loss=0.08859, over 19689.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2746, pruned_loss=0.04938, ctc_loss=0.09254, over 2027622.75 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:54:24,250 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.15 vs. limit=22.5 +2024-08-26 21:54:24,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=160106.66666666666, ans=0.2 +2024-08-26 21:54:30,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=160106.66666666666, ans=10.0 +2024-08-26 21:54:38,738 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.00 vs. limit=15.0 +2024-08-26 21:54:49,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=160213.33333333334, ans=0.125 +2024-08-26 21:54:50,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=160213.33333333334, ans=0.0 +2024-08-26 21:54:58,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=160266.66666666666, ans=0.125 +2024-08-26 21:54:58,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160266.66666666666, ans=0.1 +2024-08-26 21:55:02,364 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.397e+02 1.535e+02 1.726e+02 2.735e+02, threshold=3.069e+02, percent-clipped=0.0 +2024-08-26 21:55:03,951 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.07 vs. limit=22.5 +2024-08-26 21:55:13,386 INFO [train.py:1114] (2/4) Epoch 13, batch 200, loss[loss=0.2178, simple_loss=0.2902, pruned_loss=0.05227, ctc_loss=0.1023, over 18259.00 frames. ], tot_loss[loss=0.205, simple_loss=0.274, pruned_loss=0.0495, ctc_loss=0.09233, over 2434814.35 frames. ], batch size: 85, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:55:19,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=160373.33333333334, ans=0.0 +2024-08-26 21:55:20,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=160373.33333333334, ans=0.1 +2024-08-26 21:55:25,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=160426.66666666666, ans=0.0 +2024-08-26 21:55:30,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=160426.66666666666, ans=0.2 +2024-08-26 21:55:31,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160480.0, ans=0.1 +2024-08-26 21:55:32,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=160480.0, ans=0.125 +2024-08-26 21:55:33,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=160480.0, ans=0.1 +2024-08-26 21:55:43,068 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:56:19,217 INFO [train.py:1114] (2/4) Epoch 13, batch 250, loss[loss=0.212, simple_loss=0.2824, pruned_loss=0.05165, ctc_loss=0.09554, over 19360.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2732, pruned_loss=0.04883, ctc_loss=0.0911, over 2754451.47 frames. ], batch size: 67, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:56:31,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=160693.33333333334, ans=0.125 +2024-08-26 21:56:34,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160693.33333333334, ans=0.1 +2024-08-26 21:56:40,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=160746.66666666666, ans=0.125 +2024-08-26 21:56:48,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=160800.0, ans=0.125 +2024-08-26 21:56:50,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=160800.0, ans=0.05 +2024-08-26 21:56:51,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.whiten.whitening_limit, batch_count=160800.0, ans=15.0 +2024-08-26 21:56:56,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=160800.0, ans=0.125 +2024-08-26 21:56:57,702 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.442e+02 1.721e+02 2.190e+02 3.294e+02, threshold=3.441e+02, percent-clipped=2.0 +2024-08-26 21:56:58,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=160853.33333333334, ans=0.125 +2024-08-26 21:57:07,805 INFO [train.py:1114] (2/4) Epoch 13, batch 300, loss[loss=0.2205, simple_loss=0.2915, pruned_loss=0.05518, ctc_loss=0.0977, over 19511.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2722, pruned_loss=0.0482, ctc_loss=0.09014, over 2999845.44 frames. ], batch size: 61, lr: 1.17e-02, grad_scale: 8.0 +2024-08-26 21:57:08,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=160906.66666666666, ans=0.125 +2024-08-26 21:57:14,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=160906.66666666666, ans=0.125 +2024-08-26 21:57:20,316 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.60 vs. limit=6.0 +2024-08-26 21:57:37,850 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.53 vs. limit=22.5 +2024-08-26 21:57:52,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=161120.0, ans=0.09899494936611666 +2024-08-26 21:57:53,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=161120.0, ans=0.025 +2024-08-26 21:57:55,519 INFO [train.py:1114] (2/4) Epoch 13, batch 350, loss[loss=0.1798, simple_loss=0.2477, pruned_loss=0.04106, ctc_loss=0.07433, over 19755.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2725, pruned_loss=0.04835, ctc_loss=0.09046, over 3189809.24 frames. ], batch size: 48, lr: 1.17e-02, grad_scale: 8.0 +2024-08-26 21:58:33,153 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.400e+02 1.583e+02 1.867e+02 2.908e+02, threshold=3.167e+02, percent-clipped=0.0 +2024-08-26 21:58:35,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=161386.66666666666, ans=0.015 +2024-08-26 21:58:40,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=161386.66666666666, ans=0.025 +2024-08-26 21:58:43,178 INFO [train.py:1114] (2/4) Epoch 13, batch 400, loss[loss=0.2048, simple_loss=0.2794, pruned_loss=0.04729, ctc_loss=0.08874, over 19504.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2718, pruned_loss=0.04824, ctc_loss=0.09011, over 3340937.66 frames. ], batch size: 54, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:58:57,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=161493.33333333334, ans=0.125 +2024-08-26 21:58:57,728 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.65 vs. limit=15.0 +2024-08-26 21:58:59,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=161493.33333333334, ans=0.125 +2024-08-26 21:58:59,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=161493.33333333334, ans=0.0 +2024-08-26 21:59:00,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=161493.33333333334, ans=0.125 +2024-08-26 21:59:02,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=161493.33333333334, ans=0.0 +2024-08-26 21:59:19,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=161600.0, ans=0.0 +2024-08-26 21:59:20,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=161600.0, ans=0.025 +2024-08-26 21:59:20,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=161600.0, ans=0.125 +2024-08-26 21:59:25,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=161653.33333333334, ans=0.125 +2024-08-26 21:59:32,241 INFO [train.py:1114] (2/4) Epoch 13, batch 450, loss[loss=0.1865, simple_loss=0.2704, pruned_loss=0.03718, ctc_loss=0.0705, over 19610.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2721, pruned_loss=0.04841, ctc_loss=0.09056, over 3448536.75 frames. ], batch size: 55, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:59:33,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=161706.66666666666, ans=0.07 +2024-08-26 21:59:53,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=161813.33333333334, ans=0.125 +2024-08-26 21:59:55,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=161813.33333333334, ans=0.0 +2024-08-26 22:00:10,467 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.228e+02 1.449e+02 1.659e+02 1.894e+02 3.083e+02, threshold=3.319e+02, percent-clipped=0.0 +2024-08-26 22:00:10,953 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.40 vs. limit=10.0 +2024-08-26 22:00:16,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=161920.0, ans=0.2 +2024-08-26 22:00:17,291 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.78 vs. limit=15.0 +2024-08-26 22:00:20,538 INFO [train.py:1114] (2/4) Epoch 13, batch 500, loss[loss=0.2099, simple_loss=0.2848, pruned_loss=0.04953, ctc_loss=0.08999, over 19656.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2718, pruned_loss=0.04812, ctc_loss=0.09007, over 3544900.86 frames. ], batch size: 63, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 22:00:21,794 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.90 vs. limit=6.0 +2024-08-26 22:00:33,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=162026.66666666666, ans=0.125 +2024-08-26 22:00:39,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=162080.0, ans=0.0 +2024-08-26 22:00:42,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=162080.0, ans=0.0 +2024-08-26 22:00:54,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162133.33333333334, ans=0.1 +2024-08-26 22:00:54,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=162133.33333333334, ans=0.125 +2024-08-26 22:01:04,598 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.97 vs. limit=15.0 +2024-08-26 22:01:04,742 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.38 vs. limit=10.0 +2024-08-26 22:01:06,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=162186.66666666666, ans=0.125 +2024-08-26 22:01:10,505 INFO [train.py:1114] (2/4) Epoch 13, batch 550, loss[loss=0.2367, simple_loss=0.3024, pruned_loss=0.06293, ctc_loss=0.1128, over 19232.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.272, pruned_loss=0.04831, ctc_loss=0.09031, over 3608365.90 frames. ], batch size: 71, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 22:01:10,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=162240.0, ans=0.0 +2024-08-26 22:01:10,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=162240.0, ans=0.2 +2024-08-26 22:01:12,597 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:01:19,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=162293.33333333334, ans=0.0 +2024-08-26 22:01:37,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=162400.0, ans=0.125 +2024-08-26 22:01:59,752 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.555e+02 1.782e+02 2.360e+02 4.088e+02, threshold=3.564e+02, percent-clipped=3.0 +2024-08-26 22:02:03,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162453.33333333334, ans=0.1 +2024-08-26 22:02:04,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=162453.33333333334, ans=0.0 +2024-08-26 22:02:10,203 INFO [train.py:1114] (2/4) Epoch 13, batch 600, loss[loss=0.2033, simple_loss=0.2833, pruned_loss=0.04512, ctc_loss=0.08279, over 19429.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2724, pruned_loss=0.04844, ctc_loss=0.09064, over 3664877.48 frames. ], batch size: 67, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:02:18,739 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.99 vs. limit=12.0 +2024-08-26 22:02:20,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=162560.0, ans=0.0 +2024-08-26 22:02:29,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=162613.33333333334, ans=0.125 +2024-08-26 22:02:46,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=162666.66666666666, ans=0.025 +2024-08-26 22:02:50,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=162720.0, ans=0.125 +2024-08-26 22:02:51,412 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.38 vs. limit=15.0 +2024-08-26 22:02:53,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162720.0, ans=0.1 +2024-08-26 22:02:58,297 INFO [train.py:1114] (2/4) Epoch 13, batch 650, loss[loss=0.1997, simple_loss=0.2726, pruned_loss=0.04594, ctc_loss=0.08709, over 19758.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2718, pruned_loss=0.04814, ctc_loss=0.09005, over 3715505.86 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:03:31,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162826.66666666666, ans=0.1 +2024-08-26 22:03:36,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=162880.0, ans=0.125 +2024-08-26 22:03:54,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=162933.33333333334, ans=0.05 +2024-08-26 22:03:55,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=162933.33333333334, ans=0.025 +2024-08-26 22:03:57,405 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.208e+02 1.372e+02 1.512e+02 1.802e+02 3.637e+02, threshold=3.024e+02, percent-clipped=1.0 +2024-08-26 22:04:01,756 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.80 vs. limit=15.0 +2024-08-26 22:04:09,861 INFO [train.py:1114] (2/4) Epoch 13, batch 700, loss[loss=0.1875, simple_loss=0.2588, pruned_loss=0.04145, ctc_loss=0.08312, over 19719.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2719, pruned_loss=0.04813, ctc_loss=0.0898, over 3747011.77 frames. ], batch size: 51, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:04:14,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=163040.0, ans=0.125 +2024-08-26 22:04:14,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=163040.0, ans=0.0 +2024-08-26 22:04:19,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163093.33333333334, ans=0.1 +2024-08-26 22:04:21,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=163093.33333333334, ans=0.125 +2024-08-26 22:04:21,514 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.59 vs. limit=15.0 +2024-08-26 22:04:38,380 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.25 vs. limit=22.5 +2024-08-26 22:04:48,292 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:04:50,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=163200.0, ans=0.0 +2024-08-26 22:04:50,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=163200.0, ans=0.2 +2024-08-26 22:05:02,090 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.74 vs. limit=15.0 +2024-08-26 22:05:04,310 INFO [train.py:1114] (2/4) Epoch 13, batch 750, loss[loss=0.2118, simple_loss=0.2838, pruned_loss=0.0509, ctc_loss=0.09492, over 19490.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2719, pruned_loss=0.04848, ctc_loss=0.09041, over 3773780.70 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:05:05,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=163306.66666666666, ans=0.025 +2024-08-26 22:05:11,188 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.57 vs. limit=15.0 +2024-08-26 22:05:17,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=163360.0, ans=0.125 +2024-08-26 22:05:20,636 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.80 vs. limit=5.0 +2024-08-26 22:05:31,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=163413.33333333334, ans=0.125 +2024-08-26 22:05:41,873 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.233e+02 1.560e+02 1.959e+02 2.402e+02 3.823e+02, threshold=3.919e+02, percent-clipped=10.0 +2024-08-26 22:05:43,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=163520.0, ans=0.0 +2024-08-26 22:05:56,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=163573.33333333334, ans=0.125 +2024-08-26 22:05:56,845 INFO [train.py:1114] (2/4) Epoch 13, batch 800, loss[loss=0.2, simple_loss=0.261, pruned_loss=0.05029, ctc_loss=0.09623, over 19410.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2718, pruned_loss=0.04855, ctc_loss=0.09039, over 3794107.70 frames. ], batch size: 48, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 22:05:57,239 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.36 vs. limit=12.0 +2024-08-26 22:06:01,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=163573.33333333334, ans=0.125 +2024-08-26 22:06:12,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=163626.66666666666, ans=0.125 +2024-08-26 22:06:15,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163680.0, ans=0.1 +2024-08-26 22:06:37,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=163733.33333333334, ans=0.0 +2024-08-26 22:06:45,595 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.11 vs. limit=5.0 +2024-08-26 22:06:48,226 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.87 vs. limit=15.0 +2024-08-26 22:06:54,791 INFO [train.py:1114] (2/4) Epoch 13, batch 850, loss[loss=0.2325, simple_loss=0.3018, pruned_loss=0.05903, ctc_loss=0.1131, over 19639.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2718, pruned_loss=0.04856, ctc_loss=0.09056, over 3813447.23 frames. ], batch size: 59, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:06:57,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163840.0, ans=0.1 +2024-08-26 22:06:57,575 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.80 vs. limit=15.0 +2024-08-26 22:07:01,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=163840.0, ans=0.025 +2024-08-26 22:07:17,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=163946.66666666666, ans=10.0 +2024-08-26 22:07:36,357 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.42 vs. limit=15.0 +2024-08-26 22:07:37,699 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.177e+02 1.442e+02 1.756e+02 2.038e+02 3.459e+02, threshold=3.512e+02, percent-clipped=0.0 +2024-08-26 22:07:43,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=164053.33333333334, ans=0.125 +2024-08-26 22:07:50,319 INFO [train.py:1114] (2/4) Epoch 13, batch 900, loss[loss=0.1752, simple_loss=0.2476, pruned_loss=0.03771, ctc_loss=0.06852, over 19813.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.272, pruned_loss=0.0489, ctc_loss=0.09111, over 3818229.55 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:07:53,779 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.95 vs. limit=15.0 +2024-08-26 22:08:02,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=164160.0, ans=0.0 +2024-08-26 22:08:31,870 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.92 vs. limit=15.0 +2024-08-26 22:08:33,053 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.47 vs. limit=6.0 +2024-08-26 22:08:34,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=164320.0, ans=0.0 +2024-08-26 22:08:40,773 INFO [train.py:1114] (2/4) Epoch 13, batch 950, loss[loss=0.1945, simple_loss=0.2576, pruned_loss=0.04838, ctc_loss=0.08677, over 19505.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2717, pruned_loss=0.04875, ctc_loss=0.09103, over 3818816.00 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:08:42,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=164373.33333333334, ans=22.5 +2024-08-26 22:08:42,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=164373.33333333334, ans=0.0 +2024-08-26 22:09:04,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=164480.0, ans=0.125 +2024-08-26 22:09:05,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=164480.0, ans=0.125 +2024-08-26 22:09:17,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164533.33333333334, ans=0.1 +2024-08-26 22:09:20,509 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.559e+02 1.935e+02 2.172e+02 5.830e+02, threshold=3.869e+02, percent-clipped=1.0 +2024-08-26 22:09:22,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164586.66666666666, ans=0.1 +2024-08-26 22:09:24,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=164586.66666666666, ans=0.2 +2024-08-26 22:09:28,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=164640.0, ans=0.0 +2024-08-26 22:09:29,545 INFO [train.py:1114] (2/4) Epoch 13, batch 1000, loss[loss=0.1875, simple_loss=0.2635, pruned_loss=0.04058, ctc_loss=0.07584, over 19842.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2727, pruned_loss=0.04891, ctc_loss=0.09137, over 3814582.96 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:09:39,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=164693.33333333334, ans=0.04949747468305833 +2024-08-26 22:09:41,864 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:09:47,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=164746.66666666666, ans=0.125 +2024-08-26 22:09:49,267 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:09:55,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=164746.66666666666, ans=0.0 +2024-08-26 22:09:58,665 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-08-26 22:10:00,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=164800.0, ans=0.125 +2024-08-26 22:10:19,109 INFO [train.py:1114] (2/4) Epoch 13, batch 1050, loss[loss=0.2027, simple_loss=0.2837, pruned_loss=0.04424, ctc_loss=0.08294, over 19838.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2719, pruned_loss=0.0488, ctc_loss=0.09105, over 3821495.00 frames. ], batch size: 57, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:10:28,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=164960.0, ans=0.0 +2024-08-26 22:10:33,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.32 vs. limit=15.0 +2024-08-26 22:10:52,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=165066.66666666666, ans=0.125 +2024-08-26 22:10:53,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165066.66666666666, ans=0.1 +2024-08-26 22:10:55,507 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.353e+02 1.566e+02 1.889e+02 2.686e+02, threshold=3.131e+02, percent-clipped=0.0 +2024-08-26 22:11:02,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=165120.0, ans=0.125 +2024-08-26 22:11:06,699 INFO [train.py:1114] (2/4) Epoch 13, batch 1100, loss[loss=0.2017, simple_loss=0.2717, pruned_loss=0.04809, ctc_loss=0.08858, over 19589.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2715, pruned_loss=0.0484, ctc_loss=0.09053, over 3830301.92 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:11:08,185 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.89 vs. limit=15.0 +2024-08-26 22:11:19,654 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:11:20,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=165226.66666666666, ans=0.0 +2024-08-26 22:11:36,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=165333.33333333334, ans=0.125 +2024-08-26 22:11:55,418 INFO [train.py:1114] (2/4) Epoch 13, batch 1150, loss[loss=0.19, simple_loss=0.2612, pruned_loss=0.0433, ctc_loss=0.08072, over 19614.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2709, pruned_loss=0.04824, ctc_loss=0.09021, over 3830394.96 frames. ], batch size: 52, lr: 1.15e-02, grad_scale: 16.0 +2024-08-26 22:12:02,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=165440.0, ans=0.0 +2024-08-26 22:12:04,164 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:12:05,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=165493.33333333334, ans=0.2 +2024-08-26 22:12:09,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=165493.33333333334, ans=0.0 +2024-08-26 22:12:14,689 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.68 vs. limit=15.0 +2024-08-26 22:12:26,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=165600.0, ans=0.025 +2024-08-26 22:12:34,670 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.454e+02 1.639e+02 1.902e+02 3.180e+02, threshold=3.277e+02, percent-clipped=1.0 +2024-08-26 22:12:43,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=165706.66666666666, ans=0.1 +2024-08-26 22:12:43,813 INFO [train.py:1114] (2/4) Epoch 13, batch 1200, loss[loss=0.1965, simple_loss=0.2691, pruned_loss=0.04463, ctc_loss=0.08642, over 19834.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2715, pruned_loss=0.04843, ctc_loss=0.09053, over 3826231.54 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:12:44,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=165706.66666666666, ans=0.125 +2024-08-26 22:12:54,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=165760.0, ans=0.125 +2024-08-26 22:13:01,093 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.63 vs. limit=15.0 +2024-08-26 22:13:05,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=165813.33333333334, ans=0.07 +2024-08-26 22:13:08,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=165813.33333333334, ans=0.125 +2024-08-26 22:13:32,334 INFO [train.py:1114] (2/4) Epoch 13, batch 1250, loss[loss=0.2262, simple_loss=0.2886, pruned_loss=0.06029, ctc_loss=0.1082, over 19497.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2722, pruned_loss=0.04858, ctc_loss=0.09076, over 3844509.72 frames. ], batch size: 61, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:13:47,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=166026.66666666666, ans=0.125 +2024-08-26 22:13:49,316 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.73 vs. limit=10.0 +2024-08-26 22:14:11,808 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.418e+02 1.637e+02 2.002e+02 4.206e+02, threshold=3.274e+02, percent-clipped=1.0 +2024-08-26 22:14:22,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=166240.0, ans=0.2 +2024-08-26 22:14:23,439 INFO [train.py:1114] (2/4) Epoch 13, batch 1300, loss[loss=0.2187, simple_loss=0.2841, pruned_loss=0.05674, ctc_loss=0.09961, over 18884.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.271, pruned_loss=0.04813, ctc_loss=0.08993, over 3848264.69 frames. ], batch size: 76, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:14:31,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166240.0, ans=0.1 +2024-08-26 22:14:31,364 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.44 vs. limit=15.0 +2024-08-26 22:14:35,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=166293.33333333334, ans=0.125 +2024-08-26 22:14:36,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_na.min_abs, batch_count=166293.33333333334, ans=0.02 +2024-08-26 22:14:40,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=166293.33333333334, ans=0.2 +2024-08-26 22:14:50,656 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.67 vs. limit=22.5 +2024-08-26 22:14:57,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=166400.0, ans=0.125 +2024-08-26 22:14:57,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=166400.0, ans=0.125 +2024-08-26 22:15:09,554 INFO [train.py:1114] (2/4) Epoch 13, batch 1350, loss[loss=0.1941, simple_loss=0.2695, pruned_loss=0.04389, ctc_loss=0.07745, over 19774.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.271, pruned_loss=0.04804, ctc_loss=0.08961, over 3859343.46 frames. ], batch size: 54, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:15:10,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=166506.66666666666, ans=0.125 +2024-08-26 22:15:15,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=166506.66666666666, ans=0.125 +2024-08-26 22:15:45,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=166666.66666666666, ans=0.035 +2024-08-26 22:15:47,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=166666.66666666666, ans=0.2 +2024-08-26 22:15:50,043 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.412e+02 1.605e+02 1.958e+02 2.747e+02, threshold=3.211e+02, percent-clipped=0.0 +2024-08-26 22:15:52,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166720.0, ans=0.1 +2024-08-26 22:15:59,143 INFO [train.py:1114] (2/4) Epoch 13, batch 1400, loss[loss=0.196, simple_loss=0.2583, pruned_loss=0.04875, ctc_loss=0.09054, over 19667.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2709, pruned_loss=0.04803, ctc_loss=0.08946, over 3865710.98 frames. ], batch size: 46, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:16:23,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=166880.0, ans=0.125 +2024-08-26 22:16:31,323 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.80 vs. limit=15.0 +2024-08-26 22:16:36,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.60 vs. limit=15.0 +2024-08-26 22:16:39,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=166986.66666666666, ans=0.2 +2024-08-26 22:16:45,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=166986.66666666666, ans=0.0 +2024-08-26 22:16:47,827 INFO [train.py:1114] (2/4) Epoch 13, batch 1450, loss[loss=0.2174, simple_loss=0.2839, pruned_loss=0.05513, ctc_loss=0.1015, over 19671.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2715, pruned_loss=0.04826, ctc_loss=0.08979, over 3863673.82 frames. ], batch size: 63, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:16:50,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=167040.0, ans=0.0 +2024-08-26 22:17:09,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff3.min_abs, batch_count=167146.66666666666, ans=0.2 +2024-08-26 22:17:10,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=167146.66666666666, ans=0.0 +2024-08-26 22:17:15,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=167146.66666666666, ans=0.125 +2024-08-26 22:17:22,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=167200.0, ans=22.5 +2024-08-26 22:17:27,076 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.193e+02 1.434e+02 1.640e+02 1.966e+02 4.010e+02, threshold=3.281e+02, percent-clipped=1.0 +2024-08-26 22:17:32,271 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=5.58 vs. limit=15.0 +2024-08-26 22:17:36,402 INFO [train.py:1114] (2/4) Epoch 13, batch 1500, loss[loss=0.2035, simple_loss=0.2782, pruned_loss=0.04716, ctc_loss=0.08646, over 19606.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2721, pruned_loss=0.04841, ctc_loss=0.09012, over 3862842.30 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:17:48,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167360.0, ans=0.1 +2024-08-26 22:17:55,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=167413.33333333334, ans=0.125 +2024-08-26 22:18:06,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=167466.66666666666, ans=0.025 +2024-08-26 22:18:14,447 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.17 vs. limit=15.0 +2024-08-26 22:18:26,620 INFO [train.py:1114] (2/4) Epoch 13, batch 1550, loss[loss=0.2343, simple_loss=0.2996, pruned_loss=0.06298, ctc_loss=0.1077, over 19635.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2725, pruned_loss=0.04876, ctc_loss=0.09089, over 3848410.62 frames. ], batch size: 60, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:18:40,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=167626.66666666666, ans=0.125 +2024-08-26 22:19:04,514 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.500e+02 1.731e+02 2.118e+02 3.338e+02, threshold=3.463e+02, percent-clipped=1.0 +2024-08-26 22:19:05,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=167786.66666666666, ans=0.2 +2024-08-26 22:19:08,675 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.27 vs. limit=10.0 +2024-08-26 22:19:12,910 INFO [train.py:1114] (2/4) Epoch 13, batch 1600, loss[loss=0.1988, simple_loss=0.2798, pruned_loss=0.04295, ctc_loss=0.07989, over 19844.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2724, pruned_loss=0.04873, ctc_loss=0.09076, over 3837793.79 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:19:16,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=167840.0, ans=0.5 +2024-08-26 22:19:35,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=167946.66666666666, ans=0.0 +2024-08-26 22:20:12,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=168053.33333333334, ans=0.125 +2024-08-26 22:20:19,729 INFO [train.py:1114] (2/4) Epoch 13, batch 1650, loss[loss=0.1987, simple_loss=0.2703, pruned_loss=0.04554, ctc_loss=0.09003, over 19668.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2721, pruned_loss=0.04878, ctc_loss=0.09086, over 3833703.32 frames. ], batch size: 59, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:20:29,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=168160.0, ans=0.025 +2024-08-26 22:20:34,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=168160.0, ans=0.125 +2024-08-26 22:20:53,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=168266.66666666666, ans=0.125 +2024-08-26 22:20:57,553 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.124e+02 1.381e+02 1.542e+02 1.780e+02 2.683e+02, threshold=3.084e+02, percent-clipped=0.0 +2024-08-26 22:21:06,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=168373.33333333334, ans=0.025 +2024-08-26 22:21:06,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=168373.33333333334, ans=0.0 +2024-08-26 22:21:07,572 INFO [train.py:1114] (2/4) Epoch 13, batch 1700, loss[loss=0.1758, simple_loss=0.245, pruned_loss=0.03855, ctc_loss=0.07375, over 19655.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2718, pruned_loss=0.04824, ctc_loss=0.09001, over 3847716.76 frames. ], batch size: 46, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:21:10,838 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.72 vs. limit=15.0 +2024-08-26 22:21:20,514 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.09 vs. limit=15.0 +2024-08-26 22:21:24,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=168480.0, ans=0.125 +2024-08-26 22:21:29,196 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.68 vs. limit=15.0 +2024-08-26 22:21:40,805 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:21:43,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=168533.33333333334, ans=0.125 +2024-08-26 22:21:53,900 INFO [train.py:1114] (2/4) Epoch 13, batch 1750, loss[loss=0.1654, simple_loss=0.2325, pruned_loss=0.03502, ctc_loss=0.07054, over 19679.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2713, pruned_loss=0.04799, ctc_loss=0.08946, over 3852034.96 frames. ], batch size: 45, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:22:00,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=168640.0, ans=0.125 +2024-08-26 22:22:13,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=168746.66666666666, ans=0.2 +2024-08-26 22:23:46,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168800.0, ans=0.1 +2024-08-26 22:23:51,724 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.438e+02 1.563e+02 1.924e+02 3.851e+02, threshold=3.126e+02, percent-clipped=3.0 +2024-08-26 22:24:01,010 INFO [train.py:1114] (2/4) Epoch 13, batch 1800, loss[loss=0.1996, simple_loss=0.2728, pruned_loss=0.04526, ctc_loss=0.08972, over 19617.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2714, pruned_loss=0.04788, ctc_loss=0.08946, over 3853948.22 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 16.0 +2024-08-26 22:24:07,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=168906.66666666666, ans=0.125 +2024-08-26 22:24:11,836 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.25 vs. limit=15.0 +2024-08-26 22:24:21,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=169013.33333333334, ans=0.2 +2024-08-26 22:24:28,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=169066.66666666666, ans=0.025 +2024-08-26 22:24:31,088 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.52 vs. limit=15.0 +2024-08-26 22:24:37,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=169120.0, ans=0.0 +2024-08-26 22:24:44,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=169173.33333333334, ans=10.0 +2024-08-26 22:24:44,743 INFO [train.py:1114] (2/4) Epoch 13, batch 1850, loss[loss=0.207, simple_loss=0.2811, pruned_loss=0.04713, ctc_loss=0.09655, over 19599.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2713, pruned_loss=0.04779, ctc_loss=0.08921, over 3857857.64 frames. ], batch size: 57, lr: 1.14e-02, grad_scale: 16.0 +2024-08-26 22:24:48,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff3.min_abs, batch_count=169173.33333333334, ans=0.2 +2024-08-26 22:24:53,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=169226.66666666666, ans=0.0 +2024-08-26 22:25:09,090 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.30 vs. limit=22.5 +2024-08-26 22:25:11,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=169333.33333333334, ans=0.2 +2024-08-26 22:25:12,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=169333.33333333334, ans=0.125 +2024-08-26 22:25:13,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=169333.33333333334, ans=0.125 +2024-08-26 22:25:21,906 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.546e+02 1.793e+02 2.323e+02 4.609e+02, threshold=3.586e+02, percent-clipped=7.0 +2024-08-26 22:25:22,425 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.55 vs. limit=22.5 +2024-08-26 22:25:29,827 INFO [train.py:1114] (2/4) Epoch 13, batch 1900, loss[loss=0.2031, simple_loss=0.2764, pruned_loss=0.04742, ctc_loss=0.08741, over 19655.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2721, pruned_loss=0.04817, ctc_loss=0.08979, over 3862789.69 frames. ], batch size: 59, lr: 1.14e-02, grad_scale: 16.0 +2024-08-26 22:25:33,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=169440.0, ans=0.1 +2024-08-26 22:25:35,646 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.71 vs. limit=15.0 +2024-08-26 22:25:44,445 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.33 vs. limit=12.0 +2024-08-26 22:25:47,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=169546.66666666666, ans=0.125 +2024-08-26 22:26:05,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=169653.33333333334, ans=0.0 +2024-08-26 22:26:13,637 INFO [train.py:1114] (2/4) Epoch 13, batch 1950, loss[loss=0.1965, simple_loss=0.2652, pruned_loss=0.04677, ctc_loss=0.0854, over 19586.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2734, pruned_loss=0.04851, ctc_loss=0.09026, over 3871322.34 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 16.0 +2024-08-26 22:26:20,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=169706.66666666666, ans=15.0 +2024-08-26 22:26:24,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=169760.0, ans=0.125 +2024-08-26 22:26:25,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=169760.0, ans=0.0 +2024-08-26 22:26:28,475 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:26:36,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=169813.33333333334, ans=0.0 +2024-08-26 22:26:53,524 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.527e+02 1.786e+02 2.093e+02 2.857e+02, threshold=3.573e+02, percent-clipped=0.0 +2024-08-26 22:26:56,588 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.81 vs. limit=10.0 +2024-08-26 22:26:57,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=169920.0, ans=0.0 +2024-08-26 22:26:57,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=169920.0, ans=0.125 +2024-08-26 22:27:00,499 INFO [train.py:1114] (2/4) Epoch 13, batch 2000, loss[loss=0.1719, simple_loss=0.2364, pruned_loss=0.03983, ctc_loss=0.06903, over 19663.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.274, pruned_loss=0.049, ctc_loss=0.09113, over 3856689.37 frames. ], batch size: 45, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:27:11,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=170026.66666666666, ans=0.2 +2024-08-26 22:27:22,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=170080.0, ans=0.0 +2024-08-26 22:27:23,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=170080.0, ans=0.1 +2024-08-26 22:27:26,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170133.33333333334, ans=0.1 +2024-08-26 22:27:40,385 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.48 vs. limit=6.0 +2024-08-26 22:27:42,865 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.05 vs. limit=10.0 +2024-08-26 22:27:44,102 INFO [train.py:1114] (2/4) Epoch 13, batch 2050, loss[loss=0.1692, simple_loss=0.2398, pruned_loss=0.03526, ctc_loss=0.06993, over 19705.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2729, pruned_loss=0.04884, ctc_loss=0.09115, over 3851357.49 frames. ], batch size: 47, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:27:54,675 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:28:03,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170346.66666666666, ans=0.1 +2024-08-26 22:28:06,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170346.66666666666, ans=0.1 +2024-08-26 22:28:11,615 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.65 vs. limit=15.0 +2024-08-26 22:28:17,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=170400.0, ans=0.0 +2024-08-26 22:28:20,411 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.436e+02 1.652e+02 1.928e+02 2.658e+02, threshold=3.303e+02, percent-clipped=0.0 +2024-08-26 22:28:27,531 INFO [train.py:1114] (2/4) Epoch 13, batch 2100, loss[loss=0.2067, simple_loss=0.2781, pruned_loss=0.0492, ctc_loss=0.0919, over 19768.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2717, pruned_loss=0.04824, ctc_loss=0.09, over 3858507.93 frames. ], batch size: 54, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:28:28,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=170506.66666666666, ans=0.0 +2024-08-26 22:28:38,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=170560.0, ans=0.0 +2024-08-26 22:29:13,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170720.0, ans=0.1 +2024-08-26 22:29:18,565 INFO [train.py:1114] (2/4) Epoch 13, batch 2150, loss[loss=0.1978, simple_loss=0.2552, pruned_loss=0.0514, ctc_loss=0.09392, over 19851.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2712, pruned_loss=0.04802, ctc_loss=0.0896, over 3869917.21 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:29:18,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=170773.33333333334, ans=0.2 +2024-08-26 22:29:28,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=170826.66666666666, ans=0.125 +2024-08-26 22:29:30,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=170826.66666666666, ans=0.2 +2024-08-26 22:29:32,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=170826.66666666666, ans=0.0 +2024-08-26 22:29:32,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.00 vs. limit=15.0 +2024-08-26 22:29:38,499 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.78 vs. limit=15.0 +2024-08-26 22:29:55,027 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.462e+02 1.698e+02 2.269e+02 4.218e+02, threshold=3.397e+02, percent-clipped=7.0 +2024-08-26 22:30:02,065 INFO [train.py:1114] (2/4) Epoch 13, batch 2200, loss[loss=0.2139, simple_loss=0.2898, pruned_loss=0.05006, ctc_loss=0.09501, over 19581.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2712, pruned_loss=0.04788, ctc_loss=0.08928, over 3867915.10 frames. ], batch size: 57, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:30:03,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=171040.0, ans=0.125 +2024-08-26 22:30:22,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=171146.66666666666, ans=0.125 +2024-08-26 22:30:27,743 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:30:38,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=171253.33333333334, ans=0.125 +2024-08-26 22:30:46,562 INFO [train.py:1114] (2/4) Epoch 13, batch 2250, loss[loss=0.1854, simple_loss=0.267, pruned_loss=0.0376, ctc_loss=0.07119, over 19610.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2717, pruned_loss=0.04792, ctc_loss=0.08948, over 3867059.55 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:30:51,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=171306.66666666666, ans=0.2 +2024-08-26 22:30:53,003 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.70 vs. limit=10.0 +2024-08-26 22:30:55,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=171360.0, ans=0.2 +2024-08-26 22:30:57,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=171360.0, ans=0.025 +2024-08-26 22:31:00,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=171360.0, ans=0.5 +2024-08-26 22:31:01,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=171360.0, ans=0.125 +2024-08-26 22:31:06,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=171413.33333333334, ans=0.025 +2024-08-26 22:31:22,555 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.444e+02 1.610e+02 1.869e+02 3.635e+02, threshold=3.220e+02, percent-clipped=1.0 +2024-08-26 22:31:29,434 INFO [train.py:1114] (2/4) Epoch 13, batch 2300, loss[loss=0.2072, simple_loss=0.2763, pruned_loss=0.05057, ctc_loss=0.09223, over 19507.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2708, pruned_loss=0.04789, ctc_loss=0.08929, over 3860537.43 frames. ], batch size: 49, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 22:31:33,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171573.33333333334, ans=0.1 +2024-08-26 22:31:47,876 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.70 vs. limit=6.0 +2024-08-26 22:31:59,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=171733.33333333334, ans=0.025 +2024-08-26 22:32:02,824 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.99 vs. limit=15.0 +2024-08-26 22:32:13,414 INFO [train.py:1114] (2/4) Epoch 13, batch 2350, loss[loss=0.2334, simple_loss=0.2976, pruned_loss=0.06267, ctc_loss=0.1095, over 19662.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2713, pruned_loss=0.04805, ctc_loss=0.08945, over 3863155.69 frames. ], batch size: 63, lr: 1.13e-02, grad_scale: 16.0 +2024-08-26 22:32:21,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=171840.0, ans=0.125 +2024-08-26 22:32:22,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=171893.33333333334, ans=0.2 +2024-08-26 22:32:45,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=171946.66666666666, ans=0.07 +2024-08-26 22:32:49,862 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.68 vs. limit=12.0 +2024-08-26 22:33:04,852 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.253e+02 1.652e+02 1.956e+02 2.487e+02 4.828e+02, threshold=3.913e+02, percent-clipped=4.0 +2024-08-26 22:33:07,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=172053.33333333334, ans=0.035 +2024-08-26 22:33:10,711 INFO [train.py:1114] (2/4) Epoch 13, batch 2400, loss[loss=0.2204, simple_loss=0.2877, pruned_loss=0.0556, ctc_loss=0.1051, over 19359.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2735, pruned_loss=0.04887, ctc_loss=0.09087, over 3857901.38 frames. ], batch size: 71, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 22:33:20,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=172160.0, ans=0.2 +2024-08-26 22:33:27,688 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.70 vs. limit=10.0 +2024-08-26 22:33:48,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=172320.0, ans=0.125 +2024-08-26 22:33:48,678 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=15.0 +2024-08-26 22:33:54,744 INFO [train.py:1114] (2/4) Epoch 13, batch 2450, loss[loss=0.2624, simple_loss=0.3056, pruned_loss=0.07975, ctc_loss=0.149, over 13446.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.277, pruned_loss=0.05132, ctc_loss=0.09565, over 3732248.48 frames. ], batch size: 140, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 22:34:02,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=172373.33333333334, ans=0.05 +2024-08-26 22:34:20,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=172480.0, ans=0.025 +2024-08-26 22:38:18,462 INFO [train.py:1114] (2/4) Epoch 14, batch 0, loss[loss=0.1893, simple_loss=0.2484, pruned_loss=0.04848, ctc_loss=0.0831, over 19789.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2484, pruned_loss=0.04848, ctc_loss=0.0831, over 19789.00 frames. ], batch size: 49, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:38:18,463 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-26 22:38:36,019 INFO [train.py:1146] (2/4) Epoch 14, validation: loss=0.1777, simple_loss=0.2705, pruned_loss=0.03149, ctc_loss=0.05468, over 944034.00 frames. +2024-08-26 22:38:36,019 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12825MB +2024-08-26 22:38:39,677 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.631e+02 1.782e+02 1.968e+02 3.125e+02, threshold=3.565e+02, percent-clipped=0.0 +2024-08-26 22:38:44,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=172634.66666666666, ans=0.125 +2024-08-26 22:38:50,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=172634.66666666666, ans=0.0 +2024-08-26 22:39:09,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172741.33333333334, ans=0.1 +2024-08-26 22:39:21,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=172794.66666666666, ans=0.0 +2024-08-26 22:40:03,204 INFO [train.py:1114] (2/4) Epoch 14, batch 50, loss[loss=0.1836, simple_loss=0.2552, pruned_loss=0.04116, ctc_loss=0.0744, over 19751.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2742, pruned_loss=0.049, ctc_loss=0.09158, over 845084.53 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:41:55,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=172901.33333333334, ans=0.125 +2024-08-26 22:41:56,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=172901.33333333334, ans=0.125 +2024-08-26 22:43:13,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173061.33333333334, ans=0.1 +2024-08-26 22:43:20,291 INFO [train.py:1114] (2/4) Epoch 14, batch 100, loss[loss=0.1813, simple_loss=0.256, pruned_loss=0.03927, ctc_loss=0.07009, over 19711.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2742, pruned_loss=0.04816, ctc_loss=0.09087, over 1499046.71 frames. ], batch size: 51, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:43:23,803 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.174e+02 1.427e+02 1.577e+02 1.836e+02 2.542e+02, threshold=3.153e+02, percent-clipped=0.0 +2024-08-26 22:43:25,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=173114.66666666666, ans=0.125 +2024-08-26 22:43:28,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=173168.0, ans=0.0 +2024-08-26 22:43:38,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=173168.0, ans=0.1 +2024-08-26 22:43:49,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=173274.66666666666, ans=0.125 +2024-08-26 22:44:01,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=173328.0, ans=0.125 +2024-08-26 22:44:06,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=173328.0, ans=0.0 +2024-08-26 22:44:09,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=173381.33333333334, ans=0.125 +2024-08-26 22:44:10,511 INFO [train.py:1114] (2/4) Epoch 14, batch 150, loss[loss=0.1821, simple_loss=0.2492, pruned_loss=0.04189, ctc_loss=0.07815, over 19692.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2721, pruned_loss=0.04768, ctc_loss=0.08953, over 2028604.36 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:44:13,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=173381.33333333334, ans=0.2 +2024-08-26 22:44:15,569 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.59 vs. limit=15.0 +2024-08-26 22:44:17,293 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.23 vs. limit=10.0 +2024-08-26 22:44:31,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=173488.0, ans=0.025 +2024-08-26 22:44:43,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=173541.33333333334, ans=0.125 +2024-08-26 22:44:59,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=173594.66666666666, ans=0.025 +2024-08-26 22:45:00,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=173594.66666666666, ans=0.125 +2024-08-26 22:45:03,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=173594.66666666666, ans=0.0 +2024-08-26 22:45:09,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=173594.66666666666, ans=0.1 +2024-08-26 22:45:10,938 INFO [train.py:1114] (2/4) Epoch 14, batch 200, loss[loss=0.2066, simple_loss=0.2784, pruned_loss=0.04868, ctc_loss=0.09374, over 18454.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2709, pruned_loss=0.0473, ctc_loss=0.08875, over 2436843.10 frames. ], batch size: 85, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:45:12,413 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.42 vs. limit=6.0 +2024-08-26 22:45:14,587 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.393e+02 1.624e+02 1.885e+02 3.247e+02, threshold=3.247e+02, percent-clipped=1.0 +2024-08-26 22:45:15,920 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.28 vs. limit=8.0 +2024-08-26 22:45:27,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=173701.33333333334, ans=0.125 +2024-08-26 22:46:00,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173861.33333333334, ans=0.1 +2024-08-26 22:46:04,221 INFO [train.py:1114] (2/4) Epoch 14, batch 250, loss[loss=0.2175, simple_loss=0.2926, pruned_loss=0.05299, ctc_loss=0.09125, over 19384.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2708, pruned_loss=0.04709, ctc_loss=0.08854, over 2757067.39 frames. ], batch size: 67, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:46:14,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173968.0, ans=0.1 +2024-08-26 22:46:35,796 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.99 vs. limit=10.0 +2024-08-26 22:46:37,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=174074.66666666666, ans=0.125 +2024-08-26 22:46:44,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=174128.0, ans=0.025 +2024-08-26 22:46:49,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=174128.0, ans=0.0 +2024-08-26 22:46:54,998 INFO [train.py:1114] (2/4) Epoch 14, batch 300, loss[loss=0.2144, simple_loss=0.2877, pruned_loss=0.0511, ctc_loss=0.0975, over 19542.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2706, pruned_loss=0.04699, ctc_loss=0.08819, over 3001239.55 frames. ], batch size: 61, lr: 1.09e-02, grad_scale: 16.0 +2024-08-26 22:46:55,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=174181.33333333334, ans=0.035 +2024-08-26 22:46:59,576 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.470e+02 1.728e+02 2.225e+02 3.956e+02, threshold=3.457e+02, percent-clipped=2.0 +2024-08-26 22:46:59,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=174181.33333333334, ans=0.2 +2024-08-26 22:47:00,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=174181.33333333334, ans=0.1 +2024-08-26 22:47:11,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.88 vs. limit=22.5 +2024-08-26 22:47:17,848 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.32 vs. limit=15.0 +2024-08-26 22:47:36,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=174394.66666666666, ans=0.125 +2024-08-26 22:47:39,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=174394.66666666666, ans=0.125 +2024-08-26 22:47:43,363 INFO [train.py:1114] (2/4) Epoch 14, batch 350, loss[loss=0.1888, simple_loss=0.2544, pruned_loss=0.04446, ctc_loss=0.08542, over 19738.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2706, pruned_loss=0.04699, ctc_loss=0.08807, over 3191462.47 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 16.0 +2024-08-26 22:48:25,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=174554.66666666666, ans=0.125 +2024-08-26 22:48:33,195 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.47 vs. limit=22.5 +2024-08-26 22:48:33,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=174608.0, ans=0.2 +2024-08-26 22:48:42,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=174661.33333333334, ans=0.125 +2024-08-26 22:48:48,273 INFO [train.py:1114] (2/4) Epoch 14, batch 400, loss[loss=0.1821, simple_loss=0.2611, pruned_loss=0.0374, ctc_loss=0.07087, over 19482.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2696, pruned_loss=0.04649, ctc_loss=0.08697, over 3343716.94 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:48:52,776 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.491e+02 1.630e+02 1.842e+02 3.705e+02, threshold=3.261e+02, percent-clipped=1.0 +2024-08-26 22:49:04,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=174768.0, ans=0.125 +2024-08-26 22:49:07,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=174768.0, ans=0.0 +2024-08-26 22:49:11,334 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.93 vs. limit=10.0 +2024-08-26 22:49:16,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=174821.33333333334, ans=0.0 +2024-08-26 22:49:16,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=174821.33333333334, ans=0.05 +2024-08-26 22:49:17,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=174821.33333333334, ans=0.125 +2024-08-26 22:49:31,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174928.0, ans=0.1 +2024-08-26 22:49:39,153 INFO [train.py:1114] (2/4) Epoch 14, batch 450, loss[loss=0.1927, simple_loss=0.2667, pruned_loss=0.04308, ctc_loss=0.08104, over 19609.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.27, pruned_loss=0.04676, ctc_loss=0.08753, over 3449888.47 frames. ], batch size: 55, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:49:50,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=175034.66666666666, ans=0.125 +2024-08-26 22:49:57,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=175034.66666666666, ans=0.0 +2024-08-26 22:49:59,741 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.52 vs. limit=15.0 +2024-08-26 22:50:09,601 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:50:27,952 INFO [train.py:1114] (2/4) Epoch 14, batch 500, loss[loss=0.2279, simple_loss=0.2982, pruned_loss=0.05638, ctc_loss=0.112, over 19681.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2689, pruned_loss=0.04621, ctc_loss=0.08659, over 3545845.53 frames. ], batch size: 63, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:50:32,535 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.438e+02 1.690e+02 1.988e+02 3.244e+02, threshold=3.379e+02, percent-clipped=0.0 +2024-08-26 22:50:41,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=175301.33333333334, ans=0.2 +2024-08-26 22:50:58,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=175408.0, ans=0.125 +2024-08-26 22:51:00,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=175408.0, ans=0.125 +2024-08-26 22:51:09,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=175461.33333333334, ans=0.04949747468305833 +2024-08-26 22:51:14,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=175461.33333333334, ans=0.125 +2024-08-26 22:51:15,967 INFO [train.py:1114] (2/4) Epoch 14, batch 550, loss[loss=0.2123, simple_loss=0.2819, pruned_loss=0.05066, ctc_loss=0.1037, over 19311.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.269, pruned_loss=0.04637, ctc_loss=0.08706, over 3607179.29 frames. ], batch size: 71, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:51:21,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=175514.66666666666, ans=0.0 +2024-08-26 22:51:32,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=175568.0, ans=0.0 +2024-08-26 22:51:32,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=175568.0, ans=0.2 +2024-08-26 22:51:33,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=175568.0, ans=0.0 +2024-08-26 22:52:13,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=175728.0, ans=0.2 +2024-08-26 22:52:15,482 INFO [train.py:1114] (2/4) Epoch 14, batch 600, loss[loss=0.2233, simple_loss=0.2989, pruned_loss=0.05472, ctc_loss=0.09548, over 19341.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2689, pruned_loss=0.04624, ctc_loss=0.08672, over 3663666.03 frames. ], batch size: 67, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:52:20,030 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.434e+02 1.658e+02 1.951e+02 2.764e+02, threshold=3.317e+02, percent-clipped=0.0 +2024-08-26 22:52:22,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=175781.33333333334, ans=0.125 +2024-08-26 22:52:28,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=175834.66666666666, ans=0.125 +2024-08-26 22:52:36,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=175888.0, ans=0.025 +2024-08-26 22:52:46,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=175888.0, ans=0.125 +2024-08-26 22:52:47,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=175941.33333333334, ans=0.0 +2024-08-26 22:52:50,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=175941.33333333334, ans=15.0 +2024-08-26 22:52:51,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=175941.33333333334, ans=0.125 +2024-08-26 22:53:17,363 INFO [train.py:1114] (2/4) Epoch 14, batch 650, loss[loss=0.1767, simple_loss=0.25, pruned_loss=0.03836, ctc_loss=0.06674, over 19778.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2682, pruned_loss=0.04593, ctc_loss=0.08597, over 3714475.89 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:53:26,139 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.50 vs. limit=22.5 +2024-08-26 22:53:26,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=176101.33333333334, ans=0.04949747468305833 +2024-08-26 22:53:34,222 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.19 vs. limit=15.0 +2024-08-26 22:53:37,911 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.56 vs. limit=22.5 +2024-08-26 22:53:46,030 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.19 vs. limit=15.0 +2024-08-26 22:54:16,074 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.99 vs. limit=15.0 +2024-08-26 22:54:22,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176261.33333333334, ans=0.1 +2024-08-26 22:54:29,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=176261.33333333334, ans=0.125 +2024-08-26 22:54:40,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=176314.66666666666, ans=0.2 +2024-08-26 22:54:41,335 INFO [train.py:1114] (2/4) Epoch 14, batch 700, loss[loss=0.1887, simple_loss=0.2632, pruned_loss=0.04126, ctc_loss=0.07927, over 19746.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2686, pruned_loss=0.04624, ctc_loss=0.08657, over 3746176.45 frames. ], batch size: 51, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:54:47,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.78 vs. limit=10.0 +2024-08-26 22:54:49,269 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.24 vs. limit=15.0 +2024-08-26 22:54:52,791 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.446e+02 1.597e+02 2.123e+02 3.826e+02, threshold=3.195e+02, percent-clipped=1.0 +2024-08-26 22:54:55,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=176314.66666666666, ans=0.2 +2024-08-26 22:55:27,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=176421.33333333334, ans=10.0 +2024-08-26 22:55:29,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=176421.33333333334, ans=0.125 +2024-08-26 22:55:30,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=176421.33333333334, ans=0.0 +2024-08-26 22:55:31,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=176474.66666666666, ans=0.125 +2024-08-26 22:58:11,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=176474.66666666666, ans=0.2 +2024-08-26 22:58:22,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=176528.0, ans=0.125 +2024-08-26 22:59:22,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=176581.33333333334, ans=0.125 +2024-08-26 22:59:23,406 INFO [train.py:1114] (2/4) Epoch 14, batch 750, loss[loss=0.1914, simple_loss=0.2732, pruned_loss=0.04015, ctc_loss=0.07334, over 19499.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2687, pruned_loss=0.04636, ctc_loss=0.08661, over 3773023.39 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 16.0 +2024-08-26 23:00:07,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=176581.33333333334, ans=0.0 +2024-08-26 23:00:49,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=176634.66666666666, ans=0.125 +2024-08-26 23:01:06,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=176688.0, ans=0.125 +2024-08-26 23:01:34,218 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.56 vs. limit=15.0 +2024-08-26 23:01:53,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=176794.66666666666, ans=0.125 +2024-08-26 23:01:58,980 INFO [train.py:1114] (2/4) Epoch 14, batch 800, loss[loss=0.1963, simple_loss=0.2621, pruned_loss=0.04812, ctc_loss=0.08545, over 19393.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2694, pruned_loss=0.04676, ctc_loss=0.08716, over 3795191.57 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 23:02:11,891 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.199e+02 1.464e+02 1.718e+02 2.120e+02 3.590e+02, threshold=3.437e+02, percent-clipped=3.0 +2024-08-26 23:05:19,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177008.0, ans=0.1 +2024-08-26 23:05:36,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=177008.0, ans=0.125 +2024-08-26 23:05:52,832 INFO [train.py:1114] (2/4) Epoch 14, batch 850, loss[loss=0.1946, simple_loss=0.272, pruned_loss=0.04294, ctc_loss=0.07814, over 19643.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.269, pruned_loss=0.04655, ctc_loss=0.08678, over 3814842.26 frames. ], batch size: 59, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 23:06:02,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=177168.0, ans=0.07 +2024-08-26 23:06:06,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=177168.0, ans=0.2 +2024-08-26 23:06:16,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=177221.33333333334, ans=0.025 +2024-08-26 23:06:21,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=177274.66666666666, ans=0.0 +2024-08-26 23:06:30,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=177328.0, ans=0.025 +2024-08-26 23:06:46,763 INFO [train.py:1114] (2/4) Epoch 14, batch 900, loss[loss=0.1774, simple_loss=0.2417, pruned_loss=0.04169, ctc_loss=0.07431, over 19417.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2691, pruned_loss=0.0467, ctc_loss=0.08715, over 3817936.02 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 23:06:51,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=177381.33333333334, ans=0.0 +2024-08-26 23:06:52,129 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.084e+02 1.429e+02 1.657e+02 1.986e+02 3.410e+02, threshold=3.315e+02, percent-clipped=0.0 +2024-08-26 23:06:54,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=177381.33333333334, ans=0.125 +2024-08-26 23:07:01,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177434.66666666666, ans=0.1 +2024-08-26 23:07:21,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=177541.33333333334, ans=0.125 +2024-08-26 23:07:36,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=177594.66666666666, ans=0.2 +2024-08-26 23:07:36,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=177594.66666666666, ans=0.125 +2024-08-26 23:07:38,549 INFO [train.py:1114] (2/4) Epoch 14, batch 950, loss[loss=0.1754, simple_loss=0.2477, pruned_loss=0.03798, ctc_loss=0.06797, over 19488.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2692, pruned_loss=0.04662, ctc_loss=0.08697, over 3819119.82 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 23:07:38,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=177648.0, ans=0.0 +2024-08-26 23:07:50,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=177701.33333333334, ans=0.125 +2024-08-26 23:08:01,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=177754.66666666666, ans=0.125 +2024-08-26 23:08:07,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=177754.66666666666, ans=0.0 +2024-08-26 23:08:14,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177808.0, ans=0.1 +2024-08-26 23:08:15,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=177808.0, ans=0.125 +2024-08-26 23:08:15,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=177808.0, ans=0.07 +2024-08-26 23:08:35,563 INFO [train.py:1114] (2/4) Epoch 14, batch 1000, loss[loss=0.1664, simple_loss=0.2484, pruned_loss=0.03028, ctc_loss=0.05972, over 19852.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2698, pruned_loss=0.04678, ctc_loss=0.08746, over 3815772.92 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:08:41,159 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.435e+02 1.639e+02 1.944e+02 3.185e+02, threshold=3.279e+02, percent-clipped=0.0 +2024-08-26 23:08:52,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177968.0, ans=0.1 +2024-08-26 23:08:54,599 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 23:09:15,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=178074.66666666666, ans=0.125 +2024-08-26 23:09:16,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=178074.66666666666, ans=0.125 +2024-08-26 23:09:18,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=178074.66666666666, ans=0.125 +2024-08-26 23:09:27,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=178128.0, ans=0.125 +2024-08-26 23:09:29,121 INFO [train.py:1114] (2/4) Epoch 14, batch 1050, loss[loss=0.223, simple_loss=0.2927, pruned_loss=0.05557, ctc_loss=0.1054, over 19852.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2689, pruned_loss=0.04639, ctc_loss=0.08676, over 3822703.55 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:09:35,385 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.41 vs. limit=15.0 +2024-08-26 23:09:48,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=178234.66666666666, ans=0.0 +2024-08-26 23:10:02,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=178288.0, ans=0.1 +2024-08-26 23:10:44,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=178394.66666666666, ans=0.125 +2024-08-26 23:10:47,586 INFO [train.py:1114] (2/4) Epoch 14, batch 1100, loss[loss=0.186, simple_loss=0.2551, pruned_loss=0.04287, ctc_loss=0.07804, over 19588.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2688, pruned_loss=0.04632, ctc_loss=0.08671, over 3830526.86 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:10:49,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=178448.0, ans=0.125 +2024-08-26 23:10:52,556 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.81 vs. limit=15.0 +2024-08-26 23:10:53,004 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.155e+02 1.389e+02 1.598e+02 1.774e+02 3.032e+02, threshold=3.197e+02, percent-clipped=0.0 +2024-08-26 23:10:59,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=178501.33333333334, ans=0.125 +2024-08-26 23:11:16,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=178608.0, ans=0.125 +2024-08-26 23:11:19,368 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.52 vs. limit=15.0 +2024-08-26 23:11:28,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.35 vs. limit=15.0 +2024-08-26 23:11:31,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=178661.33333333334, ans=0.125 +2024-08-26 23:11:36,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=178661.33333333334, ans=0.1 +2024-08-26 23:11:37,947 INFO [train.py:1114] (2/4) Epoch 14, batch 1150, loss[loss=0.1823, simple_loss=0.2618, pruned_loss=0.03703, ctc_loss=0.0722, over 19586.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2689, pruned_loss=0.04619, ctc_loss=0.08648, over 3831181.97 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:11:42,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=178714.66666666666, ans=0.125 +2024-08-26 23:11:50,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=178768.0, ans=0.125 +2024-08-26 23:11:54,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=178768.0, ans=0.125 +2024-08-26 23:11:55,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=178768.0, ans=0.0 +2024-08-26 23:12:05,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=178821.33333333334, ans=0.2 +2024-08-26 23:12:14,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=178874.66666666666, ans=0.0 +2024-08-26 23:12:14,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=178874.66666666666, ans=0.125 +2024-08-26 23:12:31,165 INFO [train.py:1114] (2/4) Epoch 14, batch 1200, loss[loss=0.2082, simple_loss=0.2798, pruned_loss=0.05053, ctc_loss=0.08916, over 19837.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2698, pruned_loss=0.04663, ctc_loss=0.08725, over 3826923.42 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:12:36,804 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.458e+02 1.687e+02 2.139e+02 4.936e+02, threshold=3.375e+02, percent-clipped=2.0 +2024-08-26 23:12:49,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=179088.0, ans=0.04949747468305833 +2024-08-26 23:13:03,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=179141.33333333334, ans=0.125 +2024-08-26 23:13:07,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=179141.33333333334, ans=0.125 +2024-08-26 23:13:09,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=179194.66666666666, ans=0.125 +2024-08-26 23:13:10,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=179194.66666666666, ans=0.04949747468305833 +2024-08-26 23:13:11,014 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=11.47 vs. limit=15.0 +2024-08-26 23:13:20,036 INFO [train.py:1114] (2/4) Epoch 14, batch 1250, loss[loss=0.2106, simple_loss=0.2866, pruned_loss=0.04874, ctc_loss=0.09277, over 19523.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2703, pruned_loss=0.0467, ctc_loss=0.08719, over 3844708.95 frames. ], batch size: 61, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:13:26,023 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.31 vs. limit=15.0 +2024-08-26 23:13:36,280 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.73 vs. limit=6.0 +2024-08-26 23:13:36,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=179301.33333333334, ans=6.0 +2024-08-26 23:13:46,177 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 23:14:02,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=179461.33333333334, ans=0.0 +2024-08-26 23:14:05,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=179461.33333333334, ans=0.125 +2024-08-26 23:14:12,509 INFO [train.py:1114] (2/4) Epoch 14, batch 1300, loss[loss=0.221, simple_loss=0.295, pruned_loss=0.05322, ctc_loss=0.1015, over 18859.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2695, pruned_loss=0.04651, ctc_loss=0.08671, over 3848027.07 frames. ], batch size: 76, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:14:18,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=179514.66666666666, ans=0.1 +2024-08-26 23:14:19,145 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.402e+02 1.628e+02 1.914e+02 2.926e+02, threshold=3.256e+02, percent-clipped=0.0 +2024-08-26 23:14:23,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=179568.0, ans=0.2 +2024-08-26 23:14:29,793 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.80 vs. limit=22.5 +2024-08-26 23:14:38,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=179621.33333333334, ans=0.025 +2024-08-26 23:14:40,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179674.66666666666, ans=0.1 +2024-08-26 23:14:54,409 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.91 vs. limit=22.5 +2024-08-26 23:14:57,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=179728.0, ans=0.1 +2024-08-26 23:14:58,663 INFO [train.py:1114] (2/4) Epoch 14, batch 1350, loss[loss=0.2055, simple_loss=0.2697, pruned_loss=0.05102, ctc_loss=0.09805, over 19790.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2689, pruned_loss=0.04626, ctc_loss=0.08603, over 3857906.85 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:16:08,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=179834.66666666666, ans=0.125 +2024-08-26 23:16:26,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=179834.66666666666, ans=0.125 +2024-08-26 23:16:33,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=179888.0, ans=0.95 +2024-08-26 23:16:48,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=179941.33333333334, ans=0.0 +2024-08-26 23:16:59,392 INFO [train.py:1114] (2/4) Epoch 14, batch 1400, loss[loss=0.1748, simple_loss=0.2448, pruned_loss=0.03811, ctc_loss=0.07163, over 19660.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2686, pruned_loss=0.04638, ctc_loss=0.08632, over 3865083.37 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:17:07,629 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.482e+02 1.624e+02 2.003e+02 3.142e+02, threshold=3.248e+02, percent-clipped=0.0 +2024-08-26 23:17:07,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=180048.0, ans=0.125 +2024-08-26 23:17:11,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=180101.33333333334, ans=0.125 +2024-08-26 23:17:32,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=180208.0, ans=0.125 +2024-08-26 23:17:50,485 INFO [train.py:1114] (2/4) Epoch 14, batch 1450, loss[loss=0.2193, simple_loss=0.2929, pruned_loss=0.05329, ctc_loss=0.09786, over 19663.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2697, pruned_loss=0.04678, ctc_loss=0.08715, over 3861354.24 frames. ], batch size: 63, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:18:02,505 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.93 vs. limit=15.0 +2024-08-26 23:18:03,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=180314.66666666666, ans=0.07 +2024-08-26 23:18:18,866 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.65 vs. limit=15.0 +2024-08-26 23:18:33,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180474.66666666666, ans=0.1 +2024-08-26 23:18:41,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=180528.0, ans=0.125 +2024-08-26 23:18:46,184 INFO [train.py:1114] (2/4) Epoch 14, batch 1500, loss[loss=0.2108, simple_loss=0.2808, pruned_loss=0.05059, ctc_loss=0.09911, over 19592.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2702, pruned_loss=0.04685, ctc_loss=0.08737, over 3861121.37 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:18:52,953 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.115e+02 1.461e+02 1.607e+02 1.928e+02 3.862e+02, threshold=3.214e+02, percent-clipped=2.0 +2024-08-26 23:18:53,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=180581.33333333334, ans=0.1 +2024-08-26 23:19:01,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=180634.66666666666, ans=0.125 +2024-08-26 23:19:02,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.07 vs. limit=15.0 +2024-08-26 23:19:03,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=180634.66666666666, ans=0.125 +2024-08-26 23:19:10,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=180688.0, ans=0.125 +2024-08-26 23:19:18,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=180741.33333333334, ans=0.0 +2024-08-26 23:20:30,248 INFO [train.py:1114] (2/4) Epoch 14, batch 1550, loss[loss=0.2393, simple_loss=0.2996, pruned_loss=0.06693, ctc_loss=0.1131, over 19599.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2703, pruned_loss=0.04709, ctc_loss=0.0876, over 3844900.86 frames. ], batch size: 60, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:20:52,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180954.66666666666, ans=0.1 +2024-08-26 23:20:52,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=180954.66666666666, ans=0.025 +2024-08-26 23:21:20,679 INFO [train.py:1114] (2/4) Epoch 14, batch 1600, loss[loss=0.2053, simple_loss=0.2853, pruned_loss=0.04584, ctc_loss=0.08427, over 19846.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2703, pruned_loss=0.04727, ctc_loss=0.08791, over 3833861.75 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:21:24,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=181114.66666666666, ans=0.125 +2024-08-26 23:21:27,135 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.461e+02 1.627e+02 1.971e+02 3.033e+02, threshold=3.255e+02, percent-clipped=0.0 +2024-08-26 23:21:46,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=181221.33333333334, ans=0.0 +2024-08-26 23:23:54,622 INFO [train.py:1114] (2/4) Epoch 14, batch 1650, loss[loss=0.2064, simple_loss=0.2813, pruned_loss=0.04645, ctc_loss=0.09619, over 19648.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2699, pruned_loss=0.04712, ctc_loss=0.08787, over 3830692.64 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:24:05,343 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.37 vs. limit=22.5 +2024-08-26 23:24:07,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.10 vs. limit=15.0 +2024-08-26 23:24:15,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=181488.0, ans=0.025 +2024-08-26 23:24:16,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=181488.0, ans=0.0 +2024-08-26 23:24:25,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=181541.33333333334, ans=0.125 +2024-08-26 23:24:27,509 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.21 vs. limit=15.0 +2024-08-26 23:24:28,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.55 vs. limit=15.0 +2024-08-26 23:24:29,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=181541.33333333334, ans=0.0 +2024-08-26 23:24:31,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=181594.66666666666, ans=0.0 +2024-08-26 23:24:40,737 INFO [train.py:1114] (2/4) Epoch 14, batch 1700, loss[loss=0.1711, simple_loss=0.2322, pruned_loss=0.04055, ctc_loss=0.07252, over 19674.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2693, pruned_loss=0.04672, ctc_loss=0.08731, over 3844833.60 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:24:47,154 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.192e+02 1.441e+02 1.691e+02 2.079e+02 3.382e+02, threshold=3.381e+02, percent-clipped=3.0 +2024-08-26 23:24:47,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=181648.0, ans=0.1 +2024-08-26 23:24:49,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181701.33333333334, ans=0.1 +2024-08-26 23:24:52,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.37 vs. limit=15.0 +2024-08-26 23:24:53,809 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.78 vs. limit=22.5 +2024-08-26 23:24:58,482 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.22 vs. limit=15.0 +2024-08-26 23:25:03,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=181754.66666666666, ans=0.025 +2024-08-26 23:25:05,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=181754.66666666666, ans=0.0 +2024-08-26 23:25:13,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=181808.0, ans=0.125 +2024-08-26 23:25:21,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=181861.33333333334, ans=0.125 +2024-08-26 23:25:25,117 INFO [train.py:1114] (2/4) Epoch 14, batch 1750, loss[loss=0.1722, simple_loss=0.2367, pruned_loss=0.04002, ctc_loss=0.06917, over 19631.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2689, pruned_loss=0.04641, ctc_loss=0.08688, over 3850031.43 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:25:30,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=181914.66666666666, ans=0.1 +2024-08-26 23:25:33,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=181968.0, ans=0.125 +2024-08-26 23:25:52,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=182074.66666666666, ans=0.04949747468305833 +2024-08-26 23:26:08,413 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.47 vs. limit=15.0 +2024-08-26 23:26:19,465 INFO [train.py:1114] (2/4) Epoch 14, batch 1800, loss[loss=0.2004, simple_loss=0.2781, pruned_loss=0.0446, ctc_loss=0.08392, over 19616.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.269, pruned_loss=0.04648, ctc_loss=0.08695, over 3851925.79 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:26:21,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=182181.33333333334, ans=0.2 +2024-08-26 23:26:22,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=182181.33333333334, ans=0.0 +2024-08-26 23:26:26,548 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.533e+02 1.884e+02 2.505e+02 4.097e+02, threshold=3.767e+02, percent-clipped=5.0 +2024-08-26 23:26:32,320 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.30 vs. limit=15.0 +2024-08-26 23:27:00,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=182394.66666666666, ans=0.125 +2024-08-26 23:27:05,187 INFO [train.py:1114] (2/4) Epoch 14, batch 1850, loss[loss=0.1907, simple_loss=0.2693, pruned_loss=0.04047, ctc_loss=0.07813, over 19581.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2686, pruned_loss=0.04614, ctc_loss=0.08636, over 3855220.69 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:28:42,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=182501.33333333334, ans=0.0 +2024-08-26 23:28:43,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=182501.33333333334, ans=0.025 +2024-08-26 23:28:47,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=182554.66666666666, ans=0.025 +2024-08-26 23:28:52,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=182554.66666666666, ans=0.125 +2024-08-26 23:28:55,636 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.05 vs. limit=22.5 +2024-08-26 23:29:10,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=182661.33333333334, ans=0.2 +2024-08-26 23:29:14,711 INFO [train.py:1114] (2/4) Epoch 14, batch 1900, loss[loss=0.2121, simple_loss=0.2845, pruned_loss=0.05109, ctc_loss=0.09406, over 19652.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.269, pruned_loss=0.04618, ctc_loss=0.08631, over 3859926.01 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:29:21,588 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.217e+02 1.441e+02 1.690e+02 2.071e+02 3.452e+02, threshold=3.379e+02, percent-clipped=0.0 +2024-08-26 23:29:34,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=182821.33333333334, ans=0.125 +2024-08-26 23:29:35,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182821.33333333334, ans=0.1 +2024-08-26 23:29:53,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=182928.0, ans=0.2 +2024-08-26 23:29:55,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=182928.0, ans=0.125 +2024-08-26 23:29:57,807 INFO [train.py:1114] (2/4) Epoch 14, batch 1950, loss[loss=0.1947, simple_loss=0.2695, pruned_loss=0.04415, ctc_loss=0.07897, over 19597.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2706, pruned_loss=0.04663, ctc_loss=0.08704, over 3869013.35 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:29:59,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182981.33333333334, ans=0.1 +2024-08-26 23:30:08,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=183034.66666666666, ans=0.2 +2024-08-26 23:30:17,925 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.65 vs. limit=6.0 +2024-08-26 23:30:40,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183194.66666666666, ans=0.1 +2024-08-26 23:30:41,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=183194.66666666666, ans=0.125 +2024-08-26 23:30:44,859 INFO [train.py:1114] (2/4) Epoch 14, batch 2000, loss[loss=0.1717, simple_loss=0.2372, pruned_loss=0.03849, ctc_loss=0.07309, over 19640.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2711, pruned_loss=0.04694, ctc_loss=0.08776, over 3853811.42 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:30:50,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=183248.0, ans=0.2 +2024-08-26 23:30:52,059 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.411e+02 1.571e+02 1.845e+02 2.838e+02, threshold=3.143e+02, percent-clipped=0.0 +2024-08-26 23:31:55,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=183408.0, ans=0.125 +2024-08-26 23:32:03,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=183461.33333333334, ans=0.0 +2024-08-26 23:32:07,780 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.05 vs. limit=22.5 +2024-08-26 23:32:08,140 INFO [train.py:1114] (2/4) Epoch 14, batch 2050, loss[loss=0.1876, simple_loss=0.252, pruned_loss=0.04539, ctc_loss=0.08079, over 19736.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2701, pruned_loss=0.04663, ctc_loss=0.08716, over 3849695.00 frames. ], batch size: 47, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:32:16,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=183568.0, ans=0.0 +2024-08-26 23:32:23,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=183568.0, ans=0.125 +2024-08-26 23:32:31,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=183621.33333333334, ans=0.0 +2024-08-26 23:32:42,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=183728.0, ans=0.0 +2024-08-26 23:32:51,515 INFO [train.py:1114] (2/4) Epoch 14, batch 2100, loss[loss=0.1887, simple_loss=0.2637, pruned_loss=0.04124, ctc_loss=0.0783, over 19785.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2692, pruned_loss=0.04622, ctc_loss=0.08644, over 3857710.45 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:32:58,369 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.193e+02 1.491e+02 1.652e+02 1.860e+02 2.729e+02, threshold=3.304e+02, percent-clipped=0.0 +2024-08-26 23:32:58,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=4.97 vs. limit=15.0 +2024-08-26 23:33:50,816 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.78 vs. limit=15.0 +2024-08-26 23:33:55,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=183888.0, ans=0.0 +2024-08-26 23:34:03,957 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.80 vs. limit=15.0 +2024-08-26 23:34:05,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.76 vs. limit=12.0 +2024-08-26 23:34:14,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183994.66666666666, ans=0.1 +2024-08-26 23:34:16,401 INFO [train.py:1114] (2/4) Epoch 14, batch 2150, loss[loss=0.1763, simple_loss=0.2514, pruned_loss=0.0367, ctc_loss=0.06976, over 19866.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2684, pruned_loss=0.04586, ctc_loss=0.08599, over 3868069.69 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:34:19,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=184048.0, ans=0.125 +2024-08-26 23:34:33,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=184154.66666666666, ans=0.0 +2024-08-26 23:34:37,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=184154.66666666666, ans=0.0 +2024-08-26 23:34:43,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184208.0, ans=0.1 +2024-08-26 23:34:46,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=184208.0, ans=0.125 +2024-08-26 23:34:53,391 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.72 vs. limit=15.0 +2024-08-26 23:34:53,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.64 vs. limit=22.5 +2024-08-26 23:34:59,944 INFO [train.py:1114] (2/4) Epoch 14, batch 2200, loss[loss=0.1976, simple_loss=0.2782, pruned_loss=0.0423, ctc_loss=0.08132, over 19569.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2683, pruned_loss=0.04562, ctc_loss=0.08541, over 3866659.28 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:35:06,946 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.447e+02 1.750e+02 2.552e+02 4.295e+02, threshold=3.499e+02, percent-clipped=8.0 +2024-08-26 23:35:07,364 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.85 vs. limit=22.5 +2024-08-26 23:35:17,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184421.33333333334, ans=0.1 +2024-08-26 23:35:19,480 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.94 vs. limit=15.0 +2024-08-26 23:35:39,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=184528.0, ans=0.2 +2024-08-26 23:35:43,854 INFO [train.py:1114] (2/4) Epoch 14, batch 2250, loss[loss=0.1948, simple_loss=0.2679, pruned_loss=0.04488, ctc_loss=0.08023, over 19629.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2681, pruned_loss=0.04558, ctc_loss=0.08548, over 3866444.72 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:35:45,072 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.63 vs. limit=15.0 +2024-08-26 23:36:00,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=184688.0, ans=0.2 +2024-08-26 23:36:02,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=184688.0, ans=0.0 +2024-08-26 23:36:24,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=184794.66666666666, ans=0.125 +2024-08-26 23:36:26,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184848.0, ans=0.1 +2024-08-26 23:36:27,366 INFO [train.py:1114] (2/4) Epoch 14, batch 2300, loss[loss=0.19, simple_loss=0.2576, pruned_loss=0.04458, ctc_loss=0.08312, over 19501.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2674, pruned_loss=0.04567, ctc_loss=0.08551, over 3860560.42 frames. ], batch size: 49, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:36:30,524 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.48 vs. limit=10.0 +2024-08-26 23:36:35,131 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.458e+02 1.662e+02 2.114e+02 3.033e+02, threshold=3.324e+02, percent-clipped=0.0 +2024-08-26 23:36:48,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=184954.66666666666, ans=0.025 +2024-08-26 23:36:49,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184954.66666666666, ans=0.1 +2024-08-26 23:37:10,950 INFO [train.py:1114] (2/4) Epoch 14, batch 2350, loss[loss=0.2105, simple_loss=0.286, pruned_loss=0.049, ctc_loss=0.09273, over 19632.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.267, pruned_loss=0.04562, ctc_loss=0.08536, over 3863177.60 frames. ], batch size: 63, lr: 1.05e-02, grad_scale: 16.0 +2024-08-26 23:37:15,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=185114.66666666666, ans=0.125 +2024-08-26 23:37:21,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=185168.0, ans=0.0 +2024-08-26 23:37:29,706 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.36 vs. limit=15.0 +2024-08-26 23:37:55,041 INFO [train.py:1114] (2/4) Epoch 14, batch 2400, loss[loss=0.228, simple_loss=0.2902, pruned_loss=0.06131, ctc_loss=0.1079, over 19310.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2702, pruned_loss=0.04693, ctc_loss=0.08754, over 3857715.38 frames. ], batch size: 71, lr: 1.05e-02, grad_scale: 32.0 +2024-08-26 23:38:02,847 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.081e+02 1.569e+02 1.843e+02 2.357e+02 3.475e+02, threshold=3.685e+02, percent-clipped=2.0 +2024-08-26 23:38:08,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=185434.66666666666, ans=0.125 +2024-08-26 23:38:21,355 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.03 vs. limit=15.0 +2024-08-26 23:38:26,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=185541.33333333334, ans=0.125 +2024-08-26 23:38:35,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=185594.66666666666, ans=0.125 +2024-08-26 23:38:39,323 INFO [train.py:1114] (2/4) Epoch 14, batch 2450, loss[loss=0.2609, simple_loss=0.3029, pruned_loss=0.07851, ctc_loss=0.1548, over 14029.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.274, pruned_loss=0.04967, ctc_loss=0.09292, over 3730481.11 frames. ], batch size: 140, lr: 1.05e-02, grad_scale: 16.0 +2024-08-26 23:38:57,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=185754.66666666666, ans=0.0 +2024-08-26 23:40:44,473 INFO [train.py:1114] (2/4) Epoch 15, batch 0, loss[loss=0.1797, simple_loss=0.2457, pruned_loss=0.042, ctc_loss=0.07437, over 19833.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2457, pruned_loss=0.042, ctc_loss=0.07437, over 19833.00 frames. ], batch size: 49, lr: 1.02e-02, grad_scale: 32.0 +2024-08-26 23:40:47,161 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-26 23:41:25,229 INFO [train.py:1146] (2/4) Epoch 15, validation: loss=0.1751, simple_loss=0.2686, pruned_loss=0.03035, ctc_loss=0.05216, over 944034.00 frames. +2024-08-26 23:41:25,230 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12932MB +2024-08-26 23:41:30,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=185856.0, ans=0.2 +2024-08-26 23:41:31,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=185856.0, ans=0.1 +2024-08-26 23:41:47,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=185909.33333333334, ans=0.125 +2024-08-26 23:42:30,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=185962.66666666666, ans=0.125 +2024-08-26 23:42:34,504 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.661e+02 1.811e+02 2.041e+02 3.400e+02, threshold=3.623e+02, percent-clipped=0.0 +2024-08-26 23:42:54,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=185962.66666666666, ans=0.125 +2024-08-26 23:49:22,219 INFO [train.py:1114] (2/4) Epoch 15, batch 50, loss[loss=0.1616, simple_loss=0.237, pruned_loss=0.0307, ctc_loss=0.06186, over 19709.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2732, pruned_loss=0.04841, ctc_loss=0.09128, over 844718.33 frames. ], batch size: 47, lr: 1.02e-02, grad_scale: 16.0 +2024-08-26 23:51:28,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=186122.66666666666, ans=0.2 +2024-08-26 23:52:25,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=186282.66666666666, ans=0.2 +2024-08-26 23:54:36,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=186282.66666666666, ans=0.0 +2024-08-26 23:57:10,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=186336.0, ans=0.125 +2024-08-26 23:58:27,657 INFO [train.py:1114] (2/4) Epoch 15, batch 100, loss[loss=0.1865, simple_loss=0.264, pruned_loss=0.03968, ctc_loss=0.07434, over 19719.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.272, pruned_loss=0.04752, ctc_loss=0.08887, over 1498447.04 frames. ], batch size: 51, lr: 1.02e-02, grad_scale: 16.0 +2024-08-27 00:01:15,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=186389.33333333334, ans=0.125 +2024-08-27 00:01:16,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=186389.33333333334, ans=0.0 +2024-08-27 00:06:06,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186442.66666666666, ans=0.1 +2024-08-27 00:06:52,636 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.493e+02 1.771e+02 2.166e+02 3.428e+02, threshold=3.543e+02, percent-clipped=0.0 +2024-08-27 00:09:51,759 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.44 vs. limit=22.5 +2024-08-27 00:12:03,611 INFO [train.py:1114] (2/4) Epoch 15, batch 150, loss[loss=0.1686, simple_loss=0.2316, pruned_loss=0.0381, ctc_loss=0.07342, over 19674.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.269, pruned_loss=0.04622, ctc_loss=0.08659, over 2027404.41 frames. ], batch size: 47, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:12:25,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=186709.33333333334, ans=0.125 +2024-08-27 00:14:14,324 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=12.50 vs. limit=15.0 +2024-08-27 00:14:16,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=186816.0, ans=0.125 +2024-08-27 00:17:06,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=186869.33333333334, ans=0.125 +2024-08-27 00:17:10,140 INFO [train.py:1114] (2/4) Epoch 15, batch 200, loss[loss=0.2062, simple_loss=0.2775, pruned_loss=0.04897, ctc_loss=0.09251, over 18235.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2679, pruned_loss=0.04581, ctc_loss=0.08562, over 2435581.35 frames. ], batch size: 85, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:17:23,516 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.37 vs. limit=15.0 +2024-08-27 00:17:28,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=186922.66666666666, ans=0.125 +2024-08-27 00:17:44,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=186976.0, ans=0.025 +2024-08-27 00:17:48,515 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:17:55,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=187029.33333333334, ans=0.0 +2024-08-27 00:17:59,319 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.145e+02 1.435e+02 1.602e+02 1.959e+02 3.588e+02, threshold=3.205e+02, percent-clipped=1.0 +2024-08-27 00:18:47,704 INFO [train.py:1114] (2/4) Epoch 15, batch 250, loss[loss=0.2121, simple_loss=0.2901, pruned_loss=0.04909, ctc_loss=0.08964, over 19383.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2675, pruned_loss=0.04512, ctc_loss=0.08446, over 2755509.28 frames. ], batch size: 67, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:19:57,267 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.56 vs. limit=15.0 +2024-08-27 00:20:00,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=187242.66666666666, ans=0.125 +2024-08-27 00:20:04,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=187242.66666666666, ans=0.0 +2024-08-27 00:20:11,823 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:20:26,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=187349.33333333334, ans=0.125 +2024-08-27 00:20:34,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=187402.66666666666, ans=0.0 +2024-08-27 00:20:36,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=187402.66666666666, ans=0.0 +2024-08-27 00:21:12,350 INFO [train.py:1114] (2/4) Epoch 15, batch 300, loss[loss=0.1985, simple_loss=0.279, pruned_loss=0.04273, ctc_loss=0.08153, over 19522.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2671, pruned_loss=0.04479, ctc_loss=0.0839, over 3000361.20 frames. ], batch size: 61, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:22:03,845 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.482e+02 1.757e+02 2.250e+02 4.561e+02, threshold=3.514e+02, percent-clipped=7.0 +2024-08-27 00:22:12,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=187616.0, ans=0.0 +2024-08-27 00:22:18,789 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.50 vs. limit=6.0 +2024-08-27 00:22:31,072 INFO [train.py:1114] (2/4) Epoch 15, batch 350, loss[loss=0.1825, simple_loss=0.2481, pruned_loss=0.04273, ctc_loss=0.0784, over 19738.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2679, pruned_loss=0.04528, ctc_loss=0.08458, over 3190095.41 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:25:01,188 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.08 vs. limit=15.0 +2024-08-27 00:25:01,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=187829.33333333334, ans=10.0 +2024-08-27 00:25:05,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=187829.33333333334, ans=0.125 +2024-08-27 00:25:07,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.02 vs. limit=15.0 +2024-08-27 00:25:08,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=187882.66666666666, ans=0.0 +2024-08-27 00:25:08,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=187882.66666666666, ans=0.125 +2024-08-27 00:25:10,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=187882.66666666666, ans=0.2 +2024-08-27 00:25:12,974 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.80 vs. limit=6.0 +2024-08-27 00:25:21,260 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.14 vs. limit=15.0 +2024-08-27 00:25:24,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=187989.33333333334, ans=0.125 +2024-08-27 00:25:25,426 INFO [train.py:1114] (2/4) Epoch 15, batch 400, loss[loss=0.2106, simple_loss=0.2834, pruned_loss=0.05005, ctc_loss=0.09414, over 19479.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2673, pruned_loss=0.04486, ctc_loss=0.08382, over 3342533.88 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:25:38,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=188042.66666666666, ans=0.025 +2024-08-27 00:25:46,852 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.414e+02 1.733e+02 2.120e+02 3.671e+02, threshold=3.466e+02, percent-clipped=1.0 +2024-08-27 00:26:33,894 INFO [train.py:1114] (2/4) Epoch 15, batch 450, loss[loss=0.2047, simple_loss=0.2858, pruned_loss=0.04515, ctc_loss=0.08293, over 19616.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2677, pruned_loss=0.04532, ctc_loss=0.08464, over 3451192.28 frames. ], batch size: 55, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:26:34,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=188256.0, ans=0.0 +2024-08-27 00:26:35,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=188256.0, ans=0.125 +2024-08-27 00:27:56,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=188469.33333333334, ans=0.125 +2024-08-27 00:27:58,672 INFO [train.py:1114] (2/4) Epoch 15, batch 500, loss[loss=0.2003, simple_loss=0.2813, pruned_loss=0.04281, ctc_loss=0.08435, over 19721.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2668, pruned_loss=0.04493, ctc_loss=0.08388, over 3546389.06 frames. ], batch size: 63, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:28:14,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=188576.0, ans=0.1 +2024-08-27 00:28:25,430 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.423e+02 1.716e+02 2.052e+02 3.766e+02, threshold=3.431e+02, percent-clipped=1.0 +2024-08-27 00:28:27,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=188629.33333333334, ans=0.0 +2024-08-27 00:28:27,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=188629.33333333334, ans=0.035 +2024-08-27 00:28:27,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=188629.33333333334, ans=0.125 +2024-08-27 00:28:50,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=188736.0, ans=0.125 +2024-08-27 00:28:53,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=188736.0, ans=0.125 +2024-08-27 00:28:56,803 INFO [train.py:1114] (2/4) Epoch 15, batch 550, loss[loss=0.2116, simple_loss=0.2823, pruned_loss=0.0516, ctc_loss=0.09418, over 19235.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.267, pruned_loss=0.04502, ctc_loss=0.08405, over 3607735.30 frames. ], batch size: 71, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:29:42,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-08-27 00:30:02,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=188949.33333333334, ans=0.0 +2024-08-27 00:30:05,346 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.38 vs. limit=15.0 +2024-08-27 00:30:17,662 INFO [train.py:1114] (2/4) Epoch 15, batch 600, loss[loss=0.2191, simple_loss=0.2949, pruned_loss=0.05248, ctc_loss=0.09578, over 19372.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.267, pruned_loss=0.04492, ctc_loss=0.08386, over 3666449.23 frames. ], batch size: 67, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:30:28,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189056.0, ans=0.1 +2024-08-27 00:31:03,587 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.37 vs. limit=15.0 +2024-08-27 00:31:05,351 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.77 vs. limit=15.0 +2024-08-27 00:31:08,083 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.86 vs. limit=15.0 +2024-08-27 00:31:14,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=189109.33333333334, ans=0.125 +2024-08-27 00:31:18,194 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.468e+02 1.719e+02 2.297e+02 4.329e+02, threshold=3.438e+02, percent-clipped=2.0 +2024-08-27 00:31:18,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189162.66666666666, ans=0.1 +2024-08-27 00:31:23,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=189162.66666666666, ans=0.125 +2024-08-27 00:31:52,653 INFO [train.py:1114] (2/4) Epoch 15, batch 650, loss[loss=0.1949, simple_loss=0.2691, pruned_loss=0.04384, ctc_loss=0.08271, over 19753.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2664, pruned_loss=0.04456, ctc_loss=0.08325, over 3717593.53 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:31:53,024 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.00 vs. limit=15.0 +2024-08-27 00:32:01,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=189376.0, ans=0.125 +2024-08-27 00:32:22,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=189482.66666666666, ans=0.1 +2024-08-27 00:32:31,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=189536.0, ans=0.0 +2024-08-27 00:33:03,851 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.35 vs. limit=15.0 +2024-08-27 00:33:04,263 INFO [train.py:1114] (2/4) Epoch 15, batch 700, loss[loss=0.1889, simple_loss=0.2659, pruned_loss=0.04062, ctc_loss=0.0766, over 19734.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2672, pruned_loss=0.04489, ctc_loss=0.08384, over 3749162.36 frames. ], batch size: 51, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:33:14,925 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.92 vs. limit=12.0 +2024-08-27 00:33:16,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=189642.66666666666, ans=0.125 +2024-08-27 00:33:16,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=189642.66666666666, ans=0.125 +2024-08-27 00:34:00,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=189642.66666666666, ans=0.125 +2024-08-27 00:34:01,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189696.0, ans=0.1 +2024-08-27 00:34:03,639 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.167e+02 1.548e+02 1.878e+02 2.334e+02 4.066e+02, threshold=3.756e+02, percent-clipped=4.0 +2024-08-27 00:35:01,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=189749.33333333334, ans=0.07 +2024-08-27 00:35:17,149 INFO [train.py:1114] (2/4) Epoch 15, batch 750, loss[loss=0.1991, simple_loss=0.2769, pruned_loss=0.04361, ctc_loss=0.08538, over 19494.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2664, pruned_loss=0.04469, ctc_loss=0.08354, over 3775088.89 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:35:19,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=189856.0, ans=0.0 +2024-08-27 00:35:20,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=189856.0, ans=0.0 +2024-08-27 00:35:21,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=189856.0, ans=0.2 +2024-08-27 00:35:27,818 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:35:32,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=189909.33333333334, ans=0.0 +2024-08-27 00:35:32,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=189909.33333333334, ans=22.5 +2024-08-27 00:35:35,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=189962.66666666666, ans=0.125 +2024-08-27 00:36:06,220 INFO [train.py:1114] (2/4) Epoch 15, batch 800, loss[loss=0.1771, simple_loss=0.246, pruned_loss=0.03938, ctc_loss=0.07361, over 19409.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2665, pruned_loss=0.04474, ctc_loss=0.08365, over 3796237.68 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:36:15,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=190122.66666666666, ans=0.2 +2024-08-27 00:36:29,657 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.516e+02 1.778e+02 2.217e+02 3.654e+02, threshold=3.555e+02, percent-clipped=0.0 +2024-08-27 00:36:31,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=190229.33333333334, ans=0.0 +2024-08-27 00:36:54,849 INFO [train.py:1114] (2/4) Epoch 15, batch 850, loss[loss=0.2046, simple_loss=0.2894, pruned_loss=0.04376, ctc_loss=0.08047, over 19641.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2663, pruned_loss=0.04453, ctc_loss=0.08312, over 3814513.23 frames. ], batch size: 59, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:37:07,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=190442.66666666666, ans=15.0 +2024-08-27 00:37:12,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190442.66666666666, ans=0.1 +2024-08-27 00:37:15,478 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.01 vs. limit=15.0 +2024-08-27 00:37:19,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=190496.0, ans=0.125 +2024-08-27 00:37:24,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=190549.33333333334, ans=6.0 +2024-08-27 00:37:46,585 INFO [train.py:1114] (2/4) Epoch 15, batch 900, loss[loss=0.1828, simple_loss=0.2481, pruned_loss=0.04265, ctc_loss=0.08069, over 19803.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.267, pruned_loss=0.04502, ctc_loss=0.08395, over 3818417.52 frames. ], batch size: 49, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:37:50,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=190656.0, ans=0.125 +2024-08-27 00:37:57,116 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.29 vs. limit=10.0 +2024-08-27 00:37:58,963 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.04 vs. limit=22.5 +2024-08-27 00:38:12,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=190762.66666666666, ans=0.125 +2024-08-27 00:38:12,637 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.206e+02 1.396e+02 1.546e+02 1.855e+02 3.193e+02, threshold=3.091e+02, percent-clipped=0.0 +2024-08-27 00:38:41,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=190922.66666666666, ans=0.125 +2024-08-27 00:38:42,123 INFO [train.py:1114] (2/4) Epoch 15, batch 950, loss[loss=0.1675, simple_loss=0.2396, pruned_loss=0.03442, ctc_loss=0.06637, over 19507.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2677, pruned_loss=0.04535, ctc_loss=0.08471, over 3821132.33 frames. ], batch size: 49, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:38:49,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=190922.66666666666, ans=0.0 +2024-08-27 00:38:50,359 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.39 vs. limit=22.5 +2024-08-27 00:38:56,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=190976.0, ans=0.0 +2024-08-27 00:39:14,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=191029.33333333334, ans=0.5 +2024-08-27 00:39:15,220 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.53 vs. limit=12.0 +2024-08-27 00:39:17,201 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.34 vs. limit=15.0 +2024-08-27 00:39:37,075 INFO [train.py:1114] (2/4) Epoch 15, batch 1000, loss[loss=0.156, simple_loss=0.2425, pruned_loss=0.02482, ctc_loss=0.04963, over 19856.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.268, pruned_loss=0.04534, ctc_loss=0.08462, over 3818637.71 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:39:45,890 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:39:56,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=191296.0, ans=10.0 +2024-08-27 00:40:00,299 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.061e+02 1.403e+02 1.586e+02 1.924e+02 3.101e+02, threshold=3.172e+02, percent-clipped=1.0 +2024-08-27 00:40:06,276 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.95 vs. limit=15.0 +2024-08-27 00:40:08,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=191349.33333333334, ans=0.0 +2024-08-27 00:40:25,464 INFO [train.py:1114] (2/4) Epoch 15, batch 1050, loss[loss=0.2039, simple_loss=0.2798, pruned_loss=0.04618, ctc_loss=0.08899, over 19838.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.267, pruned_loss=0.04512, ctc_loss=0.08442, over 3825181.74 frames. ], batch size: 57, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:40:47,894 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.62 vs. limit=15.0 +2024-08-27 00:40:52,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=191562.66666666666, ans=0.125 +2024-08-27 00:40:52,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=191562.66666666666, ans=0.125 +2024-08-27 00:41:07,731 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.56 vs. limit=22.5 +2024-08-27 00:41:14,675 INFO [train.py:1114] (2/4) Epoch 15, batch 1100, loss[loss=0.1709, simple_loss=0.2506, pruned_loss=0.0332, ctc_loss=0.0621, over 19591.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2667, pruned_loss=0.04491, ctc_loss=0.08403, over 3832078.55 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:41:24,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=191776.0, ans=0.1 +2024-08-27 00:41:27,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=191776.0, ans=0.0 +2024-08-27 00:41:29,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=191776.0, ans=0.125 +2024-08-27 00:41:34,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=191829.33333333334, ans=0.125 +2024-08-27 00:41:36,206 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.227e+02 1.518e+02 1.811e+02 2.066e+02 3.149e+02, threshold=3.622e+02, percent-clipped=0.0 +2024-08-27 00:42:06,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=191936.0, ans=0.0 +2024-08-27 00:42:06,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=191936.0, ans=0.07 +2024-08-27 00:42:07,801 INFO [train.py:1114] (2/4) Epoch 15, batch 1150, loss[loss=0.194, simple_loss=0.2656, pruned_loss=0.04515, ctc_loss=0.08035, over 19580.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.267, pruned_loss=0.04513, ctc_loss=0.08434, over 3829412.36 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:42:19,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=191989.33333333334, ans=0.125 +2024-08-27 00:42:22,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=191989.33333333334, ans=0.0 +2024-08-27 00:42:24,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192042.66666666666, ans=0.1 +2024-08-27 00:42:34,851 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.10 vs. limit=15.0 +2024-08-27 00:42:48,734 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.70 vs. limit=15.0 +2024-08-27 00:42:53,516 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.32 vs. limit=22.5 +2024-08-27 00:43:04,220 INFO [train.py:1114] (2/4) Epoch 15, batch 1200, loss[loss=0.1878, simple_loss=0.2641, pruned_loss=0.0406, ctc_loss=0.07579, over 19831.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2676, pruned_loss=0.04518, ctc_loss=0.08463, over 3824886.04 frames. ], batch size: 57, lr: 1.00e-02, grad_scale: 32.0 +2024-08-27 00:43:11,212 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.85 vs. limit=15.0 +2024-08-27 00:43:11,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=192256.0, ans=0.0 +2024-08-27 00:44:32,767 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.478e+02 1.729e+02 2.216e+02 4.347e+02, threshold=3.458e+02, percent-clipped=1.0 +2024-08-27 00:45:31,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192416.0, ans=0.1 +2024-08-27 00:45:40,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=192416.0, ans=0.07 +2024-08-27 00:45:57,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=192469.33333333334, ans=0.0 +2024-08-27 00:46:12,627 INFO [train.py:1114] (2/4) Epoch 15, batch 1250, loss[loss=0.2152, simple_loss=0.2836, pruned_loss=0.05329, ctc_loss=0.1008, over 19539.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2678, pruned_loss=0.04513, ctc_loss=0.08445, over 3842529.22 frames. ], batch size: 61, lr: 1.00e-02, grad_scale: 32.0 +2024-08-27 00:46:29,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=192522.66666666666, ans=0.0 +2024-08-27 00:46:29,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192522.66666666666, ans=0.1 +2024-08-27 00:46:44,646 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.91 vs. limit=10.0 +2024-08-27 00:47:19,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=192576.0, ans=0.125 +2024-08-27 00:47:25,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=192629.33333333334, ans=0.2 +2024-08-27 00:48:26,342 INFO [train.py:1114] (2/4) Epoch 15, batch 1300, loss[loss=0.2168, simple_loss=0.2867, pruned_loss=0.05411, ctc_loss=0.09668, over 18994.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2669, pruned_loss=0.04456, ctc_loss=0.08355, over 3846946.16 frames. ], batch size: 76, lr: 9.99e-03, grad_scale: 32.0 +2024-08-27 00:48:28,771 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.05 vs. limit=15.0 +2024-08-27 00:48:34,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=192789.33333333334, ans=0.07 +2024-08-27 00:49:15,454 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-08-27 00:49:50,649 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.210e+02 1.421e+02 1.669e+02 2.080e+02 3.869e+02, threshold=3.339e+02, percent-clipped=2.0 +2024-08-27 00:50:19,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=193002.66666666666, ans=0.2 +2024-08-27 00:50:43,611 INFO [train.py:1114] (2/4) Epoch 15, batch 1350, loss[loss=0.1905, simple_loss=0.2685, pruned_loss=0.04125, ctc_loss=0.07521, over 19791.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2661, pruned_loss=0.0442, ctc_loss=0.08271, over 3857593.22 frames. ], batch size: 54, lr: 9.98e-03, grad_scale: 32.0 +2024-08-27 00:50:46,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=193056.0, ans=0.015 +2024-08-27 00:51:44,518 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:51:51,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=193109.33333333334, ans=0.025 +2024-08-27 00:52:37,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=193216.0, ans=0.125 +2024-08-27 00:52:44,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=193216.0, ans=0.2 +2024-08-27 00:53:11,807 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.49 vs. limit=22.5 +2024-08-27 00:53:19,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=193269.33333333334, ans=0.125 +2024-08-27 00:53:23,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=193322.66666666666, ans=0.025 +2024-08-27 00:53:24,203 INFO [train.py:1114] (2/4) Epoch 15, batch 1400, loss[loss=0.1659, simple_loss=0.2386, pruned_loss=0.03422, ctc_loss=0.06189, over 19685.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2657, pruned_loss=0.04419, ctc_loss=0.08254, over 3864412.35 frames. ], batch size: 46, lr: 9.98e-03, grad_scale: 32.0 +2024-08-27 00:53:28,240 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:53:28,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.62 vs. limit=15.0 +2024-08-27 00:53:57,403 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.449e+02 1.647e+02 2.125e+02 3.032e+02, threshold=3.293e+02, percent-clipped=0.0 +2024-08-27 00:54:16,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=193429.33333333334, ans=0.0 +2024-08-27 00:54:18,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=193429.33333333334, ans=0.0 +2024-08-27 00:55:08,675 INFO [train.py:1114] (2/4) Epoch 15, batch 1450, loss[loss=0.2255, simple_loss=0.2912, pruned_loss=0.05828, ctc_loss=0.1081, over 19631.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2666, pruned_loss=0.04457, ctc_loss=0.08355, over 3862827.17 frames. ], batch size: 63, lr: 9.97e-03, grad_scale: 32.0 +2024-08-27 00:55:38,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=193642.66666666666, ans=0.1 +2024-08-27 00:56:38,225 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:56:39,795 INFO [train.py:1114] (2/4) Epoch 15, batch 1500, loss[loss=0.219, simple_loss=0.2973, pruned_loss=0.05061, ctc_loss=0.09884, over 19600.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2671, pruned_loss=0.04471, ctc_loss=0.0837, over 3862635.72 frames. ], batch size: 57, lr: 9.96e-03, grad_scale: 32.0 +2024-08-27 00:56:40,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=193856.0, ans=0.0 +2024-08-27 00:57:49,741 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.73 vs. limit=15.0 +2024-08-27 00:57:51,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=193909.33333333334, ans=0.125 +2024-08-27 00:57:54,062 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:57:57,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=193962.66666666666, ans=0.125 +2024-08-27 00:58:20,412 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.504e+02 1.720e+02 2.138e+02 3.076e+02, threshold=3.439e+02, percent-clipped=0.0 +2024-08-27 00:58:21,282 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.44 vs. limit=12.0 +2024-08-27 00:58:25,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.32 vs. limit=15.0 +2024-08-27 00:58:35,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=194016.0, ans=0.0 +2024-08-27 00:58:50,634 INFO [train.py:1114] (2/4) Epoch 15, batch 1550, loss[loss=0.2174, simple_loss=0.287, pruned_loss=0.05403, ctc_loss=0.09903, over 19585.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2674, pruned_loss=0.04495, ctc_loss=0.08418, over 3846467.81 frames. ], batch size: 60, lr: 9.96e-03, grad_scale: 32.0 +2024-08-27 00:59:03,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=194176.0, ans=0.0 +2024-08-27 00:59:18,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=194282.66666666666, ans=0.125 +2024-08-27 00:59:19,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=194282.66666666666, ans=0.125 +2024-08-27 00:59:20,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=194282.66666666666, ans=0.125 +2024-08-27 00:59:37,710 INFO [train.py:1114] (2/4) Epoch 15, batch 1600, loss[loss=0.1929, simple_loss=0.2697, pruned_loss=0.04175, ctc_loss=0.08155, over 19824.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2674, pruned_loss=0.04495, ctc_loss=0.08424, over 3835679.45 frames. ], batch size: 57, lr: 9.95e-03, grad_scale: 32.0 +2024-08-27 00:59:51,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=194442.66666666666, ans=0.025 +2024-08-27 00:59:52,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=194442.66666666666, ans=0.125 +2024-08-27 01:00:17,421 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.455e+02 1.710e+02 2.060e+02 3.831e+02, threshold=3.419e+02, percent-clipped=3.0 +2024-08-27 01:00:17,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=194496.0, ans=15.0 +2024-08-27 01:00:37,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194549.33333333334, ans=0.1 +2024-08-27 01:00:44,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=194602.66666666666, ans=0.09899494936611666 +2024-08-27 01:00:50,778 INFO [train.py:1114] (2/4) Epoch 15, batch 1650, loss[loss=0.2059, simple_loss=0.285, pruned_loss=0.04613, ctc_loss=0.08623, over 19655.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2668, pruned_loss=0.04483, ctc_loss=0.08404, over 3831237.10 frames. ], batch size: 59, lr: 9.94e-03, grad_scale: 16.0 +2024-08-27 01:01:23,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=194709.33333333334, ans=0.125 +2024-08-27 01:01:24,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=194709.33333333334, ans=0.125 +2024-08-27 01:01:29,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=194762.66666666666, ans=0.0 +2024-08-27 01:01:30,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=194762.66666666666, ans=0.125 +2024-08-27 01:01:34,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=194762.66666666666, ans=0.2 +2024-08-27 01:01:46,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=194816.0, ans=0.015 +2024-08-27 01:02:11,996 INFO [train.py:1114] (2/4) Epoch 15, batch 1700, loss[loss=0.1659, simple_loss=0.2319, pruned_loss=0.03645, ctc_loss=0.06738, over 19696.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2664, pruned_loss=0.04443, ctc_loss=0.08321, over 3845903.82 frames. ], batch size: 46, lr: 9.94e-03, grad_scale: 16.0 +2024-08-27 01:02:25,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=194976.0, ans=0.0 +2024-08-27 01:02:27,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=194976.0, ans=0.125 +2024-08-27 01:02:28,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=194976.0, ans=0.125 +2024-08-27 01:02:28,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=194976.0, ans=0.0 +2024-08-27 01:02:36,950 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.414e+02 1.817e+02 2.372e+02 3.799e+02, threshold=3.634e+02, percent-clipped=1.0 +2024-08-27 01:02:42,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=195082.66666666666, ans=0.0 +2024-08-27 01:02:51,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195136.0, ans=0.1 +2024-08-27 01:03:00,186 INFO [train.py:1114] (2/4) Epoch 15, batch 1750, loss[loss=0.1646, simple_loss=0.2364, pruned_loss=0.03429, ctc_loss=0.06041, over 19637.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2659, pruned_loss=0.04413, ctc_loss=0.08262, over 3851631.43 frames. ], batch size: 45, lr: 9.93e-03, grad_scale: 16.0 +2024-08-27 01:03:12,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=195242.66666666666, ans=0.125 +2024-08-27 01:03:12,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=195242.66666666666, ans=0.0 +2024-08-27 01:03:28,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=195349.33333333334, ans=0.07 +2024-08-27 01:03:32,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=195349.33333333334, ans=0.04949747468305833 +2024-08-27 01:03:34,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=195349.33333333334, ans=0.125 +2024-08-27 01:03:49,210 INFO [train.py:1114] (2/4) Epoch 15, batch 1800, loss[loss=0.1908, simple_loss=0.2687, pruned_loss=0.04044, ctc_loss=0.08037, over 19621.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2664, pruned_loss=0.04446, ctc_loss=0.08324, over 3853079.40 frames. ], batch size: 55, lr: 9.92e-03, grad_scale: 16.0 +2024-08-27 01:04:34,449 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.516e+02 1.927e+02 2.557e+02 3.874e+02, threshold=3.854e+02, percent-clipped=2.0 +2024-08-27 01:05:41,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=195669.33333333334, ans=0.0 +2024-08-27 01:05:54,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=195722.66666666666, ans=10.0 +2024-08-27 01:05:54,929 INFO [train.py:1114] (2/4) Epoch 15, batch 1850, loss[loss=0.2043, simple_loss=0.2827, pruned_loss=0.04621, ctc_loss=0.08362, over 19593.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.266, pruned_loss=0.04421, ctc_loss=0.08258, over 3856062.68 frames. ], batch size: 57, lr: 9.92e-03, grad_scale: 16.0 +2024-08-27 01:06:09,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=195722.66666666666, ans=0.125 +2024-08-27 01:06:21,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.51 vs. limit=22.5 +2024-08-27 01:06:37,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=195882.66666666666, ans=0.0 +2024-08-27 01:06:39,440 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.30 vs. limit=22.5 +2024-08-27 01:06:49,446 INFO [train.py:1114] (2/4) Epoch 15, batch 1900, loss[loss=0.2057, simple_loss=0.2859, pruned_loss=0.0457, ctc_loss=0.08498, over 19673.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2664, pruned_loss=0.04426, ctc_loss=0.08279, over 3860767.44 frames. ], batch size: 59, lr: 9.91e-03, grad_scale: 16.0 +2024-08-27 01:07:05,374 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.80 vs. limit=22.5 +2024-08-27 01:07:07,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=196042.66666666666, ans=0.04949747468305833 +2024-08-27 01:07:43,092 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.146e+02 1.422e+02 1.649e+02 2.231e+02 4.535e+02, threshold=3.297e+02, percent-clipped=1.0 +2024-08-27 01:07:50,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=196149.33333333334, ans=0.125 +2024-08-27 01:08:04,612 INFO [train.py:1114] (2/4) Epoch 15, batch 1950, loss[loss=0.1742, simple_loss=0.248, pruned_loss=0.03684, ctc_loss=0.06704, over 19582.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2674, pruned_loss=0.04447, ctc_loss=0.08295, over 3870328.65 frames. ], batch size: 52, lr: 9.90e-03, grad_scale: 16.0 +2024-08-27 01:08:22,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=196362.66666666666, ans=0.125 +2024-08-27 01:08:24,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=196362.66666666666, ans=0.0 +2024-08-27 01:08:25,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=196362.66666666666, ans=0.1 +2024-08-27 01:08:49,902 INFO [train.py:1114] (2/4) Epoch 15, batch 2000, loss[loss=0.1687, simple_loss=0.2327, pruned_loss=0.03783, ctc_loss=0.07278, over 19658.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2682, pruned_loss=0.04513, ctc_loss=0.08424, over 3855587.52 frames. ], batch size: 45, lr: 9.90e-03, grad_scale: 32.0 +2024-08-27 01:08:50,405 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.18 vs. limit=15.0 +2024-08-27 01:08:54,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=196522.66666666666, ans=0.125 +2024-08-27 01:09:39,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=196576.0, ans=0.0 +2024-08-27 01:09:46,685 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.403e+02 1.640e+02 2.044e+02 3.050e+02, threshold=3.279e+02, percent-clipped=0.0 +2024-08-27 01:10:08,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=196736.0, ans=0.125 +2024-08-27 01:10:10,623 INFO [train.py:1114] (2/4) Epoch 15, batch 2050, loss[loss=0.1609, simple_loss=0.2327, pruned_loss=0.0326, ctc_loss=0.05962, over 19698.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2672, pruned_loss=0.04493, ctc_loss=0.08388, over 3851679.79 frames. ], batch size: 47, lr: 9.89e-03, grad_scale: 32.0 +2024-08-27 01:10:10,853 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:10:14,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=196789.33333333334, ans=0.0 +2024-08-27 01:10:27,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=196842.66666666666, ans=0.0 +2024-08-27 01:10:43,510 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.32 vs. limit=15.0 +2024-08-27 01:10:54,825 INFO [train.py:1114] (2/4) Epoch 15, batch 2100, loss[loss=0.2208, simple_loss=0.2844, pruned_loss=0.05689, ctc_loss=0.1082, over 19786.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2667, pruned_loss=0.04479, ctc_loss=0.08347, over 3858826.06 frames. ], batch size: 54, lr: 9.88e-03, grad_scale: 32.0 +2024-08-27 01:11:09,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=197056.0, ans=0.125 +2024-08-27 01:11:19,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=197109.33333333334, ans=0.035 +2024-08-27 01:11:20,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=197109.33333333334, ans=0.125 +2024-08-27 01:11:26,629 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.202e+02 1.442e+02 1.703e+02 2.065e+02 4.080e+02, threshold=3.406e+02, percent-clipped=2.0 +2024-08-27 01:11:26,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=197162.66666666666, ans=0.2 +2024-08-27 01:11:30,500 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.44 vs. limit=15.0 +2024-08-27 01:11:43,641 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.40 vs. limit=12.0 +2024-08-27 01:11:48,545 INFO [train.py:1114] (2/4) Epoch 15, batch 2150, loss[loss=0.1782, simple_loss=0.2497, pruned_loss=0.03849, ctc_loss=0.07442, over 19846.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2661, pruned_loss=0.04464, ctc_loss=0.08312, over 3869447.86 frames. ], batch size: 52, lr: 9.88e-03, grad_scale: 32.0 +2024-08-27 01:11:50,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=197322.66666666666, ans=0.125 +2024-08-27 01:11:50,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=197322.66666666666, ans=0.0 +2024-08-27 01:11:55,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=197322.66666666666, ans=0.125 +2024-08-27 01:12:01,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=197376.0, ans=0.0 +2024-08-27 01:12:05,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=197429.33333333334, ans=0.125 +2024-08-27 01:12:07,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=197429.33333333334, ans=0.1 +2024-08-27 01:12:14,045 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.84 vs. limit=15.0 +2024-08-27 01:12:16,808 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.54 vs. limit=10.0 +2024-08-27 01:12:19,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=197482.66666666666, ans=0.125 +2024-08-27 01:12:31,731 INFO [train.py:1114] (2/4) Epoch 15, batch 2200, loss[loss=0.2044, simple_loss=0.28, pruned_loss=0.04698, ctc_loss=0.08724, over 19603.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2661, pruned_loss=0.04455, ctc_loss=0.08288, over 3868541.93 frames. ], batch size: 57, lr: 9.87e-03, grad_scale: 16.0 +2024-08-27 01:12:33,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.78 vs. limit=12.0 +2024-08-27 01:12:43,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=197642.66666666666, ans=0.1 +2024-08-27 01:12:48,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=197696.0, ans=0.025 +2024-08-27 01:12:54,929 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.477e+02 1.816e+02 2.262e+02 3.833e+02, threshold=3.631e+02, percent-clipped=4.0 +2024-08-27 01:13:03,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=197749.33333333334, ans=0.125 +2024-08-27 01:13:03,943 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.61 vs. limit=15.0 +2024-08-27 01:13:15,783 INFO [train.py:1114] (2/4) Epoch 15, batch 2250, loss[loss=0.1955, simple_loss=0.2712, pruned_loss=0.04449, ctc_loss=0.07691, over 19601.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2665, pruned_loss=0.04454, ctc_loss=0.08291, over 3867770.91 frames. ], batch size: 55, lr: 9.87e-03, grad_scale: 16.0 +2024-08-27 01:13:22,152 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=11.01 vs. limit=15.0 +2024-08-27 01:13:23,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=197909.33333333334, ans=0.0 +2024-08-27 01:13:29,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=197909.33333333334, ans=0.025 +2024-08-27 01:13:58,197 INFO [train.py:1114] (2/4) Epoch 15, batch 2300, loss[loss=0.1908, simple_loss=0.2674, pruned_loss=0.0412, ctc_loss=0.07966, over 19491.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2657, pruned_loss=0.04444, ctc_loss=0.08283, over 3860747.25 frames. ], batch size: 49, lr: 9.86e-03, grad_scale: 16.0 +2024-08-27 01:13:59,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=198122.66666666666, ans=0.125 +2024-08-27 01:14:13,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=198176.0, ans=0.0 +2024-08-27 01:15:02,208 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.441e+02 1.617e+02 1.954e+02 3.129e+02, threshold=3.235e+02, percent-clipped=0.0 +2024-08-27 01:15:03,914 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.71 vs. limit=22.5 +2024-08-27 01:15:12,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=198282.66666666666, ans=0.0 +2024-08-27 01:15:21,816 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-08-27 01:15:23,092 INFO [train.py:1114] (2/4) Epoch 15, batch 2350, loss[loss=0.1989, simple_loss=0.2705, pruned_loss=0.047, ctc_loss=0.08311, over 19656.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2654, pruned_loss=0.0443, ctc_loss=0.08252, over 3863738.90 frames. ], batch size: 63, lr: 9.85e-03, grad_scale: 16.0 +2024-08-27 01:15:36,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=198442.66666666666, ans=0.125 +2024-08-27 01:15:38,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=198442.66666666666, ans=0.025 +2024-08-27 01:15:47,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=198496.0, ans=0.125 +2024-08-27 01:15:51,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=198549.33333333334, ans=0.0 +2024-08-27 01:15:55,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=198549.33333333334, ans=0.2 +2024-08-27 01:16:31,884 INFO [train.py:1114] (2/4) Epoch 15, batch 2400, loss[loss=0.2091, simple_loss=0.2842, pruned_loss=0.04956, ctc_loss=0.08706, over 19323.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2679, pruned_loss=0.04525, ctc_loss=0.08398, over 3857244.99 frames. ], batch size: 71, lr: 9.85e-03, grad_scale: 32.0 +2024-08-27 01:17:16,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=198656.0, ans=0.125 +2024-08-27 01:17:17,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=198656.0, ans=0.125 +2024-08-27 01:17:25,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=198709.33333333334, ans=0.125 +2024-08-27 01:17:25,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=198709.33333333334, ans=0.125 +2024-08-27 01:17:30,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=198762.66666666666, ans=0.0 +2024-08-27 01:17:33,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=198762.66666666666, ans=0.125 +2024-08-27 01:17:35,483 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.164e+02 1.452e+02 1.605e+02 2.004e+02 3.213e+02, threshold=3.211e+02, percent-clipped=0.0 +2024-08-27 01:17:38,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=198816.0, ans=0.125 +2024-08-27 01:17:39,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=198816.0, ans=0.0 +2024-08-27 01:17:39,629 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.41 vs. limit=15.0 +2024-08-27 01:17:57,659 INFO [train.py:1114] (2/4) Epoch 15, batch 2450, loss[loss=0.267, simple_loss=0.312, pruned_loss=0.08095, ctc_loss=0.1502, over 13574.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2717, pruned_loss=0.04782, ctc_loss=0.08925, over 3730810.64 frames. ], batch size: 140, lr: 9.84e-03, grad_scale: 32.0 +2024-08-27 01:18:21,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=199029.33333333334, ans=0.04949747468305833 +2024-08-27 01:18:38,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=199082.66666666666, ans=0.2 +2024-08-27 01:20:20,964 INFO [train.py:1114] (2/4) Epoch 16, batch 0, loss[loss=0.1899, simple_loss=0.2617, pruned_loss=0.04318, ctc_loss=0.07949, over 19810.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2617, pruned_loss=0.04318, ctc_loss=0.07949, over 19810.00 frames. ], batch size: 49, lr: 9.52e-03, grad_scale: 32.0 +2024-08-27 01:20:20,965 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-27 01:21:17,367 INFO [train.py:1146] (2/4) Epoch 16, validation: loss=0.1744, simple_loss=0.2673, pruned_loss=0.03034, ctc_loss=0.05204, over 944034.00 frames. +2024-08-27 01:21:17,368 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12932MB +2024-08-27 01:21:17,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=199130.66666666666, ans=0.0 +2024-08-27 01:21:24,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=199130.66666666666, ans=0.125 +2024-08-27 01:21:28,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=199184.0, ans=0.0 +2024-08-27 01:21:53,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199290.66666666666, ans=0.1 +2024-08-27 01:21:54,901 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.674e+02 1.811e+02 2.106e+02 3.737e+02, threshold=3.622e+02, percent-clipped=2.0 +2024-08-27 01:22:02,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=199344.0, ans=0.125 +2024-08-27 01:22:07,228 INFO [train.py:1114] (2/4) Epoch 16, batch 50, loss[loss=0.1703, simple_loss=0.2482, pruned_loss=0.0327, ctc_loss=0.06741, over 19693.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2695, pruned_loss=0.04555, ctc_loss=0.08531, over 845655.20 frames. ], batch size: 47, lr: 9.51e-03, grad_scale: 32.0 +2024-08-27 01:22:07,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=199397.33333333334, ans=0.125 +2024-08-27 01:22:31,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=199504.0, ans=0.0 +2024-08-27 01:22:43,017 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.53 vs. limit=15.0 +2024-08-27 01:22:44,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=199610.66666666666, ans=0.0 +2024-08-27 01:22:53,632 INFO [train.py:1114] (2/4) Epoch 16, batch 100, loss[loss=0.1708, simple_loss=0.2448, pruned_loss=0.03496, ctc_loss=0.067, over 19733.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2697, pruned_loss=0.04524, ctc_loss=0.08499, over 1500622.48 frames. ], batch size: 51, lr: 9.51e-03, grad_scale: 32.0 +2024-08-27 01:23:11,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=199717.33333333334, ans=0.125 +2024-08-27 01:23:14,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199717.33333333334, ans=0.1 +2024-08-27 01:23:19,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=199770.66666666666, ans=0.125 +2024-08-27 01:23:33,428 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.434e+02 1.536e+02 1.885e+02 3.287e+02, threshold=3.072e+02, percent-clipped=0.0 +2024-08-27 01:23:45,322 INFO [train.py:1114] (2/4) Epoch 16, batch 150, loss[loss=0.1681, simple_loss=0.2343, pruned_loss=0.0372, ctc_loss=0.06885, over 19735.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2674, pruned_loss=0.04444, ctc_loss=0.08331, over 2028424.02 frames. ], batch size: 47, lr: 9.50e-03, grad_scale: 32.0 +2024-08-27 01:23:52,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=199930.66666666666, ans=0.2 +2024-08-27 01:24:06,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=200037.33333333334, ans=0.125 +2024-08-27 01:24:07,231 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.84 vs. limit=22.5 +2024-08-27 01:24:09,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=200037.33333333334, ans=0.2 +2024-08-27 01:24:28,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200144.0, ans=0.1 +2024-08-27 01:24:35,673 INFO [train.py:1114] (2/4) Epoch 16, batch 200, loss[loss=0.2084, simple_loss=0.2823, pruned_loss=0.04758, ctc_loss=0.09848, over 18244.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2667, pruned_loss=0.04421, ctc_loss=0.08261, over 2435460.16 frames. ], batch size: 85, lr: 9.49e-03, grad_scale: 32.0 +2024-08-27 01:24:40,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=200197.33333333334, ans=0.1 +2024-08-27 01:25:14,230 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.526e+02 1.826e+02 2.235e+02 3.925e+02, threshold=3.652e+02, percent-clipped=6.0 +2024-08-27 01:25:14,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=200357.33333333334, ans=0.0 +2024-08-27 01:25:18,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=200410.66666666666, ans=0.07 +2024-08-27 01:25:52,467 INFO [train.py:1114] (2/4) Epoch 16, batch 250, loss[loss=0.2132, simple_loss=0.2931, pruned_loss=0.04945, ctc_loss=0.08618, over 19404.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.267, pruned_loss=0.04428, ctc_loss=0.08246, over 2755380.50 frames. ], batch size: 67, lr: 9.49e-03, grad_scale: 32.0 +2024-08-27 01:26:02,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=200464.0, ans=0.0 +2024-08-27 01:26:07,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=200517.33333333334, ans=0.025 +2024-08-27 01:26:12,741 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.73 vs. limit=15.0 +2024-08-27 01:26:34,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=200624.0, ans=0.2 +2024-08-27 01:26:35,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=200624.0, ans=0.125 +2024-08-27 01:26:46,536 INFO [train.py:1114] (2/4) Epoch 16, batch 300, loss[loss=0.2154, simple_loss=0.2943, pruned_loss=0.04983, ctc_loss=0.09211, over 19539.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2653, pruned_loss=0.04346, ctc_loss=0.0812, over 3000463.33 frames. ], batch size: 61, lr: 9.48e-03, grad_scale: 32.0 +2024-08-27 01:26:53,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200730.66666666666, ans=0.1 +2024-08-27 01:27:22,350 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.66 vs. limit=22.5 +2024-08-27 01:27:22,580 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.450e+02 1.677e+02 2.025e+02 3.129e+02, threshold=3.354e+02, percent-clipped=0.0 +2024-08-27 01:27:30,626 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.10 vs. limit=22.5 +2024-08-27 01:27:36,612 INFO [train.py:1114] (2/4) Epoch 16, batch 350, loss[loss=0.181, simple_loss=0.2478, pruned_loss=0.04058, ctc_loss=0.08246, over 19766.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2655, pruned_loss=0.04337, ctc_loss=0.08099, over 3190334.08 frames. ], batch size: 48, lr: 9.48e-03, grad_scale: 32.0 +2024-08-27 01:27:50,784 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.33 vs. limit=22.5 +2024-08-27 01:27:57,349 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.28 vs. limit=15.0 +2024-08-27 01:28:12,147 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.68 vs. limit=8.0 +2024-08-27 01:28:16,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=201210.66666666666, ans=0.04949747468305833 +2024-08-27 01:28:24,280 INFO [train.py:1114] (2/4) Epoch 16, batch 400, loss[loss=0.1902, simple_loss=0.2721, pruned_loss=0.04022, ctc_loss=0.06958, over 19496.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2649, pruned_loss=0.04317, ctc_loss=0.0806, over 3342254.71 frames. ], batch size: 54, lr: 9.47e-03, grad_scale: 32.0 +2024-08-27 01:28:40,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=201317.33333333334, ans=0.0 +2024-08-27 01:28:43,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=201370.66666666666, ans=0.0 +2024-08-27 01:28:44,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=201370.66666666666, ans=0.0 +2024-08-27 01:28:56,074 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:28:58,538 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.170e+02 1.444e+02 1.663e+02 2.108e+02 3.293e+02, threshold=3.326e+02, percent-clipped=0.0 +2024-08-27 01:29:00,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=201477.33333333334, ans=0.1 +2024-08-27 01:29:02,780 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.13 vs. limit=10.0 +2024-08-27 01:29:10,814 INFO [train.py:1114] (2/4) Epoch 16, batch 450, loss[loss=0.1802, simple_loss=0.2672, pruned_loss=0.03353, ctc_loss=0.0653, over 19604.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2653, pruned_loss=0.04312, ctc_loss=0.08042, over 3451391.03 frames. ], batch size: 55, lr: 9.46e-03, grad_scale: 32.0 +2024-08-27 01:29:42,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=201690.66666666666, ans=0.2 +2024-08-27 01:29:45,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=201690.66666666666, ans=0.2 +2024-08-27 01:30:00,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=201744.0, ans=0.0 +2024-08-27 01:30:01,622 INFO [train.py:1114] (2/4) Epoch 16, batch 500, loss[loss=0.1918, simple_loss=0.2684, pruned_loss=0.04165, ctc_loss=0.07979, over 19690.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2643, pruned_loss=0.04282, ctc_loss=0.08008, over 3546751.85 frames. ], batch size: 63, lr: 9.46e-03, grad_scale: 32.0 +2024-08-27 01:30:02,030 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.47 vs. limit=15.0 +2024-08-27 01:30:11,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=201850.66666666666, ans=0.0 +2024-08-27 01:30:20,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=201850.66666666666, ans=0.125 +2024-08-27 01:30:35,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=201957.33333333334, ans=0.125 +2024-08-27 01:30:39,488 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.484e+02 1.746e+02 2.096e+02 4.072e+02, threshold=3.492e+02, percent-clipped=1.0 +2024-08-27 01:30:49,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=202010.66666666666, ans=0.125 +2024-08-27 01:30:51,379 INFO [train.py:1114] (2/4) Epoch 16, batch 550, loss[loss=0.2148, simple_loss=0.2854, pruned_loss=0.05242, ctc_loss=0.09842, over 19263.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2652, pruned_loss=0.04329, ctc_loss=0.08099, over 3607782.23 frames. ], batch size: 71, lr: 9.45e-03, grad_scale: 32.0 +2024-08-27 01:31:02,098 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.70 vs. limit=6.0 +2024-08-27 01:31:07,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=202117.33333333334, ans=0.025 +2024-08-27 01:31:13,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=202170.66666666666, ans=0.0 +2024-08-27 01:31:14,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=202170.66666666666, ans=10.0 +2024-08-27 01:31:29,779 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.21 vs. limit=15.0 +2024-08-27 01:31:37,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=202330.66666666666, ans=0.0 +2024-08-27 01:31:37,731 INFO [train.py:1114] (2/4) Epoch 16, batch 600, loss[loss=0.1913, simple_loss=0.2656, pruned_loss=0.0419, ctc_loss=0.08319, over 19406.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2653, pruned_loss=0.04347, ctc_loss=0.08136, over 3666050.09 frames. ], batch size: 67, lr: 9.45e-03, grad_scale: 32.0 +2024-08-27 01:31:42,858 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.31 vs. limit=22.5 +2024-08-27 01:31:49,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=202384.0, ans=0.125 +2024-08-27 01:31:59,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=202437.33333333334, ans=0.0 +2024-08-27 01:32:08,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=202490.66666666666, ans=0.2 +2024-08-27 01:32:12,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=202490.66666666666, ans=0.0 +2024-08-27 01:32:14,253 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.474e+02 1.879e+02 2.462e+02 5.922e+02, threshold=3.759e+02, percent-clipped=13.0 +2024-08-27 01:32:24,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=202544.0, ans=0.0 +2024-08-27 01:32:26,171 INFO [train.py:1114] (2/4) Epoch 16, batch 650, loss[loss=0.1889, simple_loss=0.2621, pruned_loss=0.04118, ctc_loss=0.08328, over 19768.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2645, pruned_loss=0.04323, ctc_loss=0.08079, over 3716557.92 frames. ], batch size: 54, lr: 9.44e-03, grad_scale: 32.0 +2024-08-27 01:32:31,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=202597.33333333334, ans=0.0 +2024-08-27 01:32:33,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=202597.33333333334, ans=0.125 +2024-08-27 01:32:52,827 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.95 vs. limit=15.0 +2024-08-27 01:32:57,247 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.17 vs. limit=22.5 +2024-08-27 01:33:11,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=202810.66666666666, ans=0.125 +2024-08-27 01:33:18,145 INFO [train.py:1114] (2/4) Epoch 16, batch 700, loss[loss=0.1941, simple_loss=0.2649, pruned_loss=0.04413, ctc_loss=0.08747, over 19714.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2644, pruned_loss=0.0431, ctc_loss=0.08051, over 3747994.16 frames. ], batch size: 51, lr: 9.43e-03, grad_scale: 32.0 +2024-08-27 01:33:20,384 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.94 vs. limit=22.5 +2024-08-27 01:33:22,912 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:33:28,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=202917.33333333334, ans=0.125 +2024-08-27 01:33:43,904 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.94 vs. limit=15.0 +2024-08-27 01:33:46,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=203024.0, ans=0.0 +2024-08-27 01:33:50,414 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.46 vs. limit=10.0 +2024-08-27 01:33:50,458 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.23 vs. limit=15.0 +2024-08-27 01:33:52,576 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.223e+02 1.460e+02 1.707e+02 2.152e+02 4.812e+02, threshold=3.413e+02, percent-clipped=3.0 +2024-08-27 01:33:56,799 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.18 vs. limit=15.0 +2024-08-27 01:33:59,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=203077.33333333334, ans=0.125 +2024-08-27 01:33:59,666 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.61 vs. limit=15.0 +2024-08-27 01:34:04,700 INFO [train.py:1114] (2/4) Epoch 16, batch 750, loss[loss=0.1931, simple_loss=0.2693, pruned_loss=0.0423, ctc_loss=0.08062, over 19509.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2638, pruned_loss=0.043, ctc_loss=0.0804, over 3774184.24 frames. ], batch size: 54, lr: 9.43e-03, grad_scale: 32.0 +2024-08-27 01:34:14,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=203184.0, ans=0.05 +2024-08-27 01:34:41,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=203344.0, ans=0.125 +2024-08-27 01:34:57,349 INFO [train.py:1114] (2/4) Epoch 16, batch 800, loss[loss=0.1702, simple_loss=0.2408, pruned_loss=0.0363, ctc_loss=0.06773, over 19413.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.264, pruned_loss=0.0432, ctc_loss=0.0809, over 3795165.12 frames. ], batch size: 48, lr: 9.42e-03, grad_scale: 32.0 +2024-08-27 01:35:13,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=203397.33333333334, ans=0.0 +2024-08-27 01:35:16,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=203397.33333333334, ans=0.125 +2024-08-27 01:35:23,338 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.14 vs. limit=15.0 +2024-08-27 01:35:29,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=203450.66666666666, ans=0.125 +2024-08-27 01:35:31,502 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:35:49,639 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.508e+02 1.846e+02 2.334e+02 3.502e+02, threshold=3.692e+02, percent-clipped=1.0 +2024-08-27 01:36:01,635 INFO [train.py:1114] (2/4) Epoch 16, batch 850, loss[loss=0.1904, simple_loss=0.2693, pruned_loss=0.04079, ctc_loss=0.07502, over 19676.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2643, pruned_loss=0.04334, ctc_loss=0.08106, over 3814723.49 frames. ], batch size: 59, lr: 9.42e-03, grad_scale: 32.0 +2024-08-27 01:36:01,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=203664.0, ans=0.0 +2024-08-27 01:36:03,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=203664.0, ans=0.125 +2024-08-27 01:36:22,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=203770.66666666666, ans=0.125 +2024-08-27 01:36:25,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=203770.66666666666, ans=0.1 +2024-08-27 01:36:33,462 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.03 vs. limit=15.0 +2024-08-27 01:36:51,745 INFO [train.py:1114] (2/4) Epoch 16, batch 900, loss[loss=0.1822, simple_loss=0.2496, pruned_loss=0.04167, ctc_loss=0.0785, over 19797.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.265, pruned_loss=0.04386, ctc_loss=0.08184, over 3819248.79 frames. ], batch size: 49, lr: 9.41e-03, grad_scale: 32.0 +2024-08-27 01:37:02,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=203984.0, ans=0.125 +2024-08-27 01:37:17,381 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.69 vs. limit=15.0 +2024-08-27 01:37:19,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=204090.66666666666, ans=0.0 +2024-08-27 01:37:21,160 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.34 vs. limit=15.0 +2024-08-27 01:37:26,143 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.398e+02 1.563e+02 1.898e+02 3.698e+02, threshold=3.126e+02, percent-clipped=1.0 +2024-08-27 01:37:26,609 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.54 vs. limit=12.0 +2024-08-27 01:37:27,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=204090.66666666666, ans=0.125 +2024-08-27 01:37:38,112 INFO [train.py:1114] (2/4) Epoch 16, batch 950, loss[loss=0.1774, simple_loss=0.2482, pruned_loss=0.03843, ctc_loss=0.07424, over 19492.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2655, pruned_loss=0.0442, ctc_loss=0.08244, over 3821051.82 frames. ], batch size: 49, lr: 9.40e-03, grad_scale: 32.0 +2024-08-27 01:37:39,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=204197.33333333334, ans=0.125 +2024-08-27 01:37:40,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=204197.33333333334, ans=0.0 +2024-08-27 01:37:57,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=204250.66666666666, ans=0.125 +2024-08-27 01:38:04,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=204304.0, ans=0.0 +2024-08-27 01:38:29,262 INFO [train.py:1114] (2/4) Epoch 16, batch 1000, loss[loss=0.1685, simple_loss=0.2504, pruned_loss=0.03168, ctc_loss=0.05812, over 19853.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2662, pruned_loss=0.04443, ctc_loss=0.08293, over 3817152.03 frames. ], batch size: 52, lr: 9.40e-03, grad_scale: 32.0 +2024-08-27 01:38:32,737 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.47 vs. limit=6.0 +2024-08-27 01:38:49,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=204570.66666666666, ans=0.0 +2024-08-27 01:39:07,624 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.409e+02 1.616e+02 2.034e+02 3.159e+02, threshold=3.231e+02, percent-clipped=1.0 +2024-08-27 01:39:13,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=204677.33333333334, ans=0.125 +2024-08-27 01:39:19,858 INFO [train.py:1114] (2/4) Epoch 16, batch 1050, loss[loss=0.2074, simple_loss=0.2806, pruned_loss=0.04881, ctc_loss=0.0912, over 19846.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2657, pruned_loss=0.04423, ctc_loss=0.08255, over 3822679.86 frames. ], batch size: 57, lr: 9.39e-03, grad_scale: 32.0 +2024-08-27 01:39:35,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=204784.0, ans=10.0 +2024-08-27 01:39:39,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=204837.33333333334, ans=6.0 +2024-08-27 01:39:51,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=204890.66666666666, ans=0.025 +2024-08-27 01:39:56,970 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.07 vs. limit=15.0 +2024-08-27 01:40:07,061 INFO [train.py:1114] (2/4) Epoch 16, batch 1100, loss[loss=0.1693, simple_loss=0.2521, pruned_loss=0.03196, ctc_loss=0.05627, over 19591.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2651, pruned_loss=0.04376, ctc_loss=0.08178, over 3829363.67 frames. ], batch size: 52, lr: 9.39e-03, grad_scale: 32.0 +2024-08-27 01:40:07,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=204997.33333333334, ans=0.125 +2024-08-27 01:40:19,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=205050.66666666666, ans=0.125 +2024-08-27 01:40:21,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=205050.66666666666, ans=0.125 +2024-08-27 01:40:26,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.52 vs. limit=15.0 +2024-08-27 01:40:31,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=205104.0, ans=0.1 +2024-08-27 01:40:33,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=205104.0, ans=0.2 +2024-08-27 01:40:44,423 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.474e+02 1.664e+02 2.002e+02 3.685e+02, threshold=3.328e+02, percent-clipped=2.0 +2024-08-27 01:40:56,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=205210.66666666666, ans=0.0 +2024-08-27 01:40:59,538 INFO [train.py:1114] (2/4) Epoch 16, batch 1150, loss[loss=0.1629, simple_loss=0.2419, pruned_loss=0.03034, ctc_loss=0.05804, over 19594.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2646, pruned_loss=0.04352, ctc_loss=0.08106, over 3828213.04 frames. ], batch size: 52, lr: 9.38e-03, grad_scale: 32.0 +2024-08-27 01:42:54,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=205264.0, ans=0.0 +2024-08-27 01:42:57,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=205264.0, ans=0.0 +2024-08-27 01:42:59,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=205317.33333333334, ans=0.0 +2024-08-27 01:43:05,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=205317.33333333334, ans=0.125 +2024-08-27 01:43:07,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff2.min_abs, batch_count=205317.33333333334, ans=0.1 +2024-08-27 01:43:12,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=205370.66666666666, ans=0.125 +2024-08-27 01:43:13,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=205370.66666666666, ans=0.0 +2024-08-27 01:43:17,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=205370.66666666666, ans=0.0 +2024-08-27 01:43:18,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=205424.0, ans=0.125 +2024-08-27 01:43:23,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205424.0, ans=0.1 +2024-08-27 01:43:26,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=205424.0, ans=0.125 +2024-08-27 01:43:29,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=205477.33333333334, ans=0.2 +2024-08-27 01:43:40,063 INFO [train.py:1114] (2/4) Epoch 16, batch 1200, loss[loss=0.2177, simple_loss=0.289, pruned_loss=0.05381, ctc_loss=0.09722, over 19854.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2654, pruned_loss=0.04373, ctc_loss=0.08167, over 3824553.84 frames. ], batch size: 57, lr: 9.38e-03, grad_scale: 32.0 +2024-08-27 01:43:41,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205530.66666666666, ans=0.1 +2024-08-27 01:43:51,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=205584.0, ans=0.125 +2024-08-27 01:43:52,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205584.0, ans=0.1 +2024-08-27 01:44:07,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=205637.33333333334, ans=0.0 +2024-08-27 01:44:11,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=205690.66666666666, ans=0.0 +2024-08-27 01:44:14,743 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.78 vs. limit=22.5 +2024-08-27 01:44:16,058 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.206e+02 1.520e+02 1.803e+02 2.158e+02 3.897e+02, threshold=3.606e+02, percent-clipped=2.0 +2024-08-27 01:44:19,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=205744.0, ans=0.125 +2024-08-27 01:44:24,049 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.06 vs. limit=6.0 +2024-08-27 01:44:28,166 INFO [train.py:1114] (2/4) Epoch 16, batch 1250, loss[loss=0.2064, simple_loss=0.2777, pruned_loss=0.04951, ctc_loss=0.09029, over 19543.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2656, pruned_loss=0.04357, ctc_loss=0.08123, over 3843199.45 frames. ], batch size: 61, lr: 9.37e-03, grad_scale: 32.0 +2024-08-27 01:44:34,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=205797.33333333334, ans=0.025 +2024-08-27 01:44:47,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=205904.0, ans=0.125 +2024-08-27 01:44:50,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff3.min_abs, batch_count=205904.0, ans=0.2 +2024-08-27 01:44:50,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=205904.0, ans=0.125 +2024-08-27 01:44:53,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=205904.0, ans=0.2 +2024-08-27 01:44:55,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=205957.33333333334, ans=0.125 +2024-08-27 01:44:57,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff3.min_abs, batch_count=205957.33333333334, ans=0.2 +2024-08-27 01:45:04,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=205957.33333333334, ans=0.125 +2024-08-27 01:45:10,840 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:45:17,708 INFO [train.py:1114] (2/4) Epoch 16, batch 1300, loss[loss=0.2064, simple_loss=0.2832, pruned_loss=0.04746, ctc_loss=0.08664, over 18896.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2647, pruned_loss=0.0433, ctc_loss=0.08073, over 3846464.53 frames. ], batch size: 76, lr: 9.36e-03, grad_scale: 32.0 +2024-08-27 01:45:17,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=206064.0, ans=0.125 +2024-08-27 01:45:25,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=206064.0, ans=0.125 +2024-08-27 01:45:33,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=206117.33333333334, ans=0.125 +2024-08-27 01:45:37,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=206170.66666666666, ans=0.125 +2024-08-27 01:45:43,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=206170.66666666666, ans=0.2 +2024-08-27 01:45:49,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=206224.0, ans=0.125 +2024-08-27 01:45:52,815 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.516e+02 1.773e+02 2.282e+02 3.618e+02, threshold=3.546e+02, percent-clipped=1.0 +2024-08-27 01:46:06,819 INFO [train.py:1114] (2/4) Epoch 16, batch 1350, loss[loss=0.1975, simple_loss=0.2692, pruned_loss=0.04552, ctc_loss=0.08697, over 19779.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2646, pruned_loss=0.04319, ctc_loss=0.08052, over 3856788.89 frames. ], batch size: 54, lr: 9.36e-03, grad_scale: 32.0 +2024-08-27 01:46:14,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206330.66666666666, ans=0.1 +2024-08-27 01:46:29,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=206437.33333333334, ans=0.125 +2024-08-27 01:46:44,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=206490.66666666666, ans=0.125 +2024-08-27 01:46:48,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=206544.0, ans=0.125 +2024-08-27 01:46:51,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=206544.0, ans=0.0 +2024-08-27 01:46:53,482 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.19 vs. limit=15.0 +2024-08-27 01:46:56,705 INFO [train.py:1114] (2/4) Epoch 16, batch 1400, loss[loss=0.1627, simple_loss=0.2324, pruned_loss=0.03389, ctc_loss=0.06312, over 19676.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2642, pruned_loss=0.04291, ctc_loss=0.08, over 3863689.80 frames. ], batch size: 46, lr: 9.35e-03, grad_scale: 32.0 +2024-08-27 01:47:06,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=206650.66666666666, ans=0.05 +2024-08-27 01:47:18,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=206704.0, ans=0.0 +2024-08-27 01:48:25,292 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.410e+02 1.569e+02 1.892e+02 4.037e+02, threshold=3.138e+02, percent-clipped=1.0 +2024-08-27 01:48:33,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=206810.66666666666, ans=0.05 +2024-08-27 01:48:34,175 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.30 vs. limit=12.0 +2024-08-27 01:48:37,430 INFO [train.py:1114] (2/4) Epoch 16, batch 1450, loss[loss=0.1946, simple_loss=0.2703, pruned_loss=0.04272, ctc_loss=0.08367, over 19694.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2647, pruned_loss=0.04329, ctc_loss=0.08061, over 3862915.83 frames. ], batch size: 63, lr: 9.35e-03, grad_scale: 32.0 +2024-08-27 01:48:42,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=206864.0, ans=0.125 +2024-08-27 01:48:47,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.whiten.whitening_limit, batch_count=206917.33333333334, ans=12.0 +2024-08-27 01:48:51,508 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.87 vs. limit=15.0 +2024-08-27 01:49:13,000 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:49:16,923 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.96 vs. limit=15.0 +2024-08-27 01:49:18,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=207077.33333333334, ans=0.0 +2024-08-27 01:49:23,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=207077.33333333334, ans=0.125 +2024-08-27 01:49:25,845 INFO [train.py:1114] (2/4) Epoch 16, batch 1500, loss[loss=0.1918, simple_loss=0.2721, pruned_loss=0.04061, ctc_loss=0.07585, over 19577.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2649, pruned_loss=0.04322, ctc_loss=0.08043, over 3862313.53 frames. ], batch size: 57, lr: 9.34e-03, grad_scale: 32.0 +2024-08-27 01:50:03,756 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.422e+02 1.666e+02 2.042e+02 4.208e+02, threshold=3.332e+02, percent-clipped=3.0 +2024-08-27 01:50:22,136 INFO [train.py:1114] (2/4) Epoch 16, batch 1550, loss[loss=0.2165, simple_loss=0.2837, pruned_loss=0.05505, ctc_loss=0.09812, over 19614.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2653, pruned_loss=0.04351, ctc_loss=0.08122, over 3846657.29 frames. ], batch size: 60, lr: 9.33e-03, grad_scale: 32.0 +2024-08-27 01:50:38,267 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.08 vs. limit=15.0 +2024-08-27 01:50:44,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=207504.0, ans=0.0 +2024-08-27 01:50:46,954 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.51 vs. limit=15.0 +2024-08-27 01:50:48,768 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.59 vs. limit=15.0 +2024-08-27 01:51:01,019 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.14 vs. limit=15.0 +2024-08-27 01:51:01,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=207610.66666666666, ans=0.04949747468305833 +2024-08-27 01:51:08,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=207610.66666666666, ans=0.0 +2024-08-27 01:51:10,023 INFO [train.py:1114] (2/4) Epoch 16, batch 1600, loss[loss=0.1899, simple_loss=0.2731, pruned_loss=0.03911, ctc_loss=0.07097, over 19857.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2649, pruned_loss=0.04352, ctc_loss=0.0813, over 3836976.80 frames. ], batch size: 57, lr: 9.33e-03, grad_scale: 32.0 +2024-08-27 01:51:22,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.whiten.whitening_limit, batch_count=207717.33333333334, ans=12.0 +2024-08-27 01:51:33,286 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.94 vs. limit=15.0 +2024-08-27 01:51:40,897 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:51:55,658 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.401e+02 1.606e+02 1.975e+02 3.175e+02, threshold=3.213e+02, percent-clipped=0.0 +2024-08-27 01:52:01,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=207877.33333333334, ans=0.0 +2024-08-27 01:52:14,355 INFO [train.py:1114] (2/4) Epoch 16, batch 1650, loss[loss=0.1881, simple_loss=0.2694, pruned_loss=0.03848, ctc_loss=0.07426, over 19662.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2652, pruned_loss=0.04361, ctc_loss=0.0814, over 3832569.65 frames. ], batch size: 59, lr: 9.32e-03, grad_scale: 32.0 +2024-08-27 01:52:14,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=207930.66666666666, ans=10.0 +2024-08-27 01:52:23,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=207984.0, ans=10.0 +2024-08-27 01:52:26,974 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.48 vs. limit=22.5 +2024-08-27 01:52:29,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=207984.0, ans=0.125 +2024-08-27 01:52:38,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=208037.33333333334, ans=0.125 +2024-08-27 01:52:49,237 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.61 vs. limit=15.0 +2024-08-27 01:52:50,922 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.01 vs. limit=10.0 +2024-08-27 01:53:04,343 INFO [train.py:1114] (2/4) Epoch 16, batch 1700, loss[loss=0.1713, simple_loss=0.2438, pruned_loss=0.03594, ctc_loss=0.06737, over 19681.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2648, pruned_loss=0.04325, ctc_loss=0.08087, over 3846790.89 frames. ], batch size: 46, lr: 9.32e-03, grad_scale: 64.0 +2024-08-27 01:53:15,069 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.06 vs. limit=15.0 +2024-08-27 01:53:31,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=208304.0, ans=0.125 +2024-08-27 01:53:42,383 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.216e+02 1.468e+02 1.742e+02 2.214e+02 3.607e+02, threshold=3.484e+02, percent-clipped=2.0 +2024-08-27 01:53:42,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=208357.33333333334, ans=0.0 +2024-08-27 01:53:53,049 INFO [train.py:1114] (2/4) Epoch 16, batch 1750, loss[loss=0.1671, simple_loss=0.2353, pruned_loss=0.03555, ctc_loss=0.0695, over 19663.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2645, pruned_loss=0.0432, ctc_loss=0.08083, over 3850963.00 frames. ], batch size: 45, lr: 9.31e-03, grad_scale: 32.0 +2024-08-27 01:53:58,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=208464.0, ans=0.07 +2024-08-27 01:53:59,642 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.31 vs. limit=15.0 +2024-08-27 01:54:17,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=208570.66666666666, ans=0.125 +2024-08-27 01:54:28,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=208677.33333333334, ans=0.1 +2024-08-27 01:54:37,045 INFO [train.py:1114] (2/4) Epoch 16, batch 1800, loss[loss=0.1973, simple_loss=0.2762, pruned_loss=0.04272, ctc_loss=0.08226, over 19613.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2646, pruned_loss=0.0431, ctc_loss=0.0806, over 3852362.47 frames. ], batch size: 55, lr: 9.31e-03, grad_scale: 32.0 +2024-08-27 01:54:50,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=208784.0, ans=0.125 +2024-08-27 01:54:50,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=208784.0, ans=0.0 +2024-08-27 01:55:00,404 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.49 vs. limit=6.0 +2024-08-27 01:55:06,558 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.50 vs. limit=15.0 +2024-08-27 01:55:10,170 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.155e+02 1.563e+02 1.995e+02 2.578e+02 4.186e+02, threshold=3.991e+02, percent-clipped=7.0 +2024-08-27 01:55:18,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=208944.0, ans=0.0 +2024-08-27 01:55:20,658 INFO [train.py:1114] (2/4) Epoch 16, batch 1850, loss[loss=0.2078, simple_loss=0.2818, pruned_loss=0.04894, ctc_loss=0.08971, over 19584.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2647, pruned_loss=0.0433, ctc_loss=0.08075, over 3855814.81 frames. ], batch size: 57, lr: 9.30e-03, grad_scale: 32.0 +2024-08-27 01:55:36,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=209050.66666666666, ans=0.125 +2024-08-27 01:55:49,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=209157.33333333334, ans=0.09899494936611666 +2024-08-27 01:55:52,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=209157.33333333334, ans=0.05 +2024-08-27 01:55:53,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=209157.33333333334, ans=0.0 +2024-08-27 01:56:04,476 INFO [train.py:1114] (2/4) Epoch 16, batch 1900, loss[loss=0.1869, simple_loss=0.2749, pruned_loss=0.03561, ctc_loss=0.06938, over 19668.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2653, pruned_loss=0.04353, ctc_loss=0.08098, over 3860371.95 frames. ], batch size: 59, lr: 9.29e-03, grad_scale: 32.0 +2024-08-27 01:56:12,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=209317.33333333334, ans=0.0 +2024-08-27 01:56:18,752 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.89 vs. limit=15.0 +2024-08-27 01:56:22,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=209370.66666666666, ans=0.0 +2024-08-27 01:56:29,477 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.13 vs. limit=22.5 +2024-08-27 01:56:29,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209424.0, ans=0.1 +2024-08-27 01:56:37,676 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.165e+02 1.418e+02 1.626e+02 2.079e+02 4.675e+02, threshold=3.252e+02, percent-clipped=2.0 +2024-08-27 01:56:41,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=209477.33333333334, ans=0.2 +2024-08-27 01:56:48,333 INFO [train.py:1114] (2/4) Epoch 16, batch 1950, loss[loss=0.1953, simple_loss=0.2712, pruned_loss=0.04319, ctc_loss=0.08276, over 19581.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.266, pruned_loss=0.04342, ctc_loss=0.08079, over 3869552.69 frames. ], batch size: 52, lr: 9.29e-03, grad_scale: 32.0 +2024-08-27 01:56:51,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=209530.66666666666, ans=0.125 +2024-08-27 01:56:54,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=209530.66666666666, ans=0.125 +2024-08-27 01:57:02,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=209584.0, ans=0.0 +2024-08-27 01:57:06,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=209637.33333333334, ans=0.125 +2024-08-27 01:57:08,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=209637.33333333334, ans=0.125 +2024-08-27 01:57:09,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=209637.33333333334, ans=0.125 +2024-08-27 01:57:10,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=209637.33333333334, ans=0.0 +2024-08-27 01:57:16,883 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.33 vs. limit=22.5 +2024-08-27 01:57:21,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=209690.66666666666, ans=0.125 +2024-08-27 01:57:21,253 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.94 vs. limit=10.0 +2024-08-27 01:57:27,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=209744.0, ans=0.125 +2024-08-27 01:57:35,368 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.17 vs. limit=15.0 +2024-08-27 01:57:35,864 INFO [train.py:1114] (2/4) Epoch 16, batch 2000, loss[loss=0.1612, simple_loss=0.2286, pruned_loss=0.03394, ctc_loss=0.065, over 19647.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2663, pruned_loss=0.04368, ctc_loss=0.08139, over 3853865.43 frames. ], batch size: 45, lr: 9.28e-03, grad_scale: 32.0 +2024-08-27 01:57:42,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=209797.33333333334, ans=0.125 +2024-08-27 01:57:42,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=209797.33333333334, ans=0.125 +2024-08-27 01:57:46,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=209850.66666666666, ans=0.125 +2024-08-27 01:57:49,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=209850.66666666666, ans=0.125 +2024-08-27 01:58:09,430 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.401e+02 1.655e+02 2.254e+02 4.011e+02, threshold=3.310e+02, percent-clipped=6.0 +2024-08-27 01:58:10,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=210010.66666666666, ans=0.0 +2024-08-27 01:58:18,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=210010.66666666666, ans=0.95 +2024-08-27 01:58:20,006 INFO [train.py:1114] (2/4) Epoch 16, batch 2050, loss[loss=0.1738, simple_loss=0.2422, pruned_loss=0.03779, ctc_loss=0.0744, over 19714.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2654, pruned_loss=0.04361, ctc_loss=0.08109, over 3850267.81 frames. ], batch size: 47, lr: 9.28e-03, grad_scale: 32.0 +2024-08-27 01:58:24,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=210064.0, ans=10.0 +2024-08-27 01:58:50,683 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.60 vs. limit=15.0 +2024-08-27 01:58:53,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=210224.0, ans=0.0 +2024-08-27 01:59:03,128 INFO [train.py:1114] (2/4) Epoch 16, batch 2100, loss[loss=0.1779, simple_loss=0.2606, pruned_loss=0.03435, ctc_loss=0.06635, over 19770.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2645, pruned_loss=0.04288, ctc_loss=0.08007, over 3857183.94 frames. ], batch size: 54, lr: 9.27e-03, grad_scale: 32.0 +2024-08-27 01:59:04,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=210330.66666666666, ans=0.125 +2024-08-27 01:59:08,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=210330.66666666666, ans=0.0 +2024-08-27 01:59:25,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=210437.33333333334, ans=0.125 +2024-08-27 01:59:35,718 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.547e+02 1.892e+02 2.472e+02 4.594e+02, threshold=3.784e+02, percent-clipped=3.0 +2024-08-27 01:59:45,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=210544.0, ans=0.125 +2024-08-27 01:59:47,029 INFO [train.py:1114] (2/4) Epoch 16, batch 2150, loss[loss=0.1777, simple_loss=0.2512, pruned_loss=0.03814, ctc_loss=0.06959, over 19853.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2637, pruned_loss=0.04263, ctc_loss=0.07941, over 3867734.26 frames. ], batch size: 52, lr: 9.27e-03, grad_scale: 32.0 +2024-08-27 01:59:49,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=210597.33333333334, ans=0.125 +2024-08-27 01:59:50,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=210597.33333333334, ans=0.0 +2024-08-27 02:00:01,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=210650.66666666666, ans=0.2 +2024-08-27 02:00:07,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=210704.0, ans=0.125 +2024-08-27 02:00:21,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=210810.66666666666, ans=0.07 +2024-08-27 02:00:26,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=210810.66666666666, ans=0.125 +2024-08-27 02:00:30,370 INFO [train.py:1114] (2/4) Epoch 16, batch 2200, loss[loss=0.1935, simple_loss=0.2757, pruned_loss=0.0407, ctc_loss=0.07453, over 19582.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2641, pruned_loss=0.04274, ctc_loss=0.07977, over 3865825.84 frames. ], batch size: 57, lr: 9.26e-03, grad_scale: 32.0 +2024-08-27 02:00:41,223 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.35 vs. limit=15.0 +2024-08-27 02:00:43,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=210917.33333333334, ans=0.0 +2024-08-27 02:00:50,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=210970.66666666666, ans=0.0 +2024-08-27 02:00:55,658 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.32 vs. limit=22.5 +2024-08-27 02:01:03,338 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.47 vs. limit=12.0 +2024-08-27 02:01:06,355 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.493e+02 1.671e+02 2.113e+02 4.070e+02, threshold=3.342e+02, percent-clipped=1.0 +2024-08-27 02:01:17,559 INFO [train.py:1114] (2/4) Epoch 16, batch 2250, loss[loss=0.1981, simple_loss=0.2779, pruned_loss=0.04238, ctc_loss=0.08414, over 19607.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2645, pruned_loss=0.04278, ctc_loss=0.07985, over 3865772.78 frames. ], batch size: 55, lr: 9.25e-03, grad_scale: 32.0 +2024-08-27 02:01:21,460 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.28 vs. limit=6.0 +2024-08-27 02:01:22,328 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.01 vs. limit=15.0 +2024-08-27 02:01:25,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=211184.0, ans=0.125 +2024-08-27 02:01:25,579 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.30 vs. limit=6.0 +2024-08-27 02:01:29,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=211184.0, ans=0.125 +2024-08-27 02:01:31,748 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.37 vs. limit=15.0 +2024-08-27 02:01:35,273 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.46 vs. limit=22.5 +2024-08-27 02:01:36,251 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.11 vs. limit=15.0 +2024-08-27 02:01:43,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=211290.66666666666, ans=0.2 +2024-08-27 02:01:51,359 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.56 vs. limit=6.0 +2024-08-27 02:02:00,443 INFO [train.py:1114] (2/4) Epoch 16, batch 2300, loss[loss=0.1702, simple_loss=0.2415, pruned_loss=0.03627, ctc_loss=0.06619, over 19504.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2638, pruned_loss=0.04291, ctc_loss=0.08009, over 3860309.09 frames. ], batch size: 49, lr: 9.25e-03, grad_scale: 32.0 +2024-08-27 02:02:01,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=211397.33333333334, ans=0.2 +2024-08-27 02:02:09,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=211450.66666666666, ans=0.125 +2024-08-27 02:02:27,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211557.33333333334, ans=0.1 +2024-08-27 02:02:33,265 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.186e+02 1.480e+02 1.722e+02 2.096e+02 3.640e+02, threshold=3.444e+02, percent-clipped=3.0 +2024-08-27 02:02:38,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=211610.66666666666, ans=0.07 +2024-08-27 02:02:44,154 INFO [train.py:1114] (2/4) Epoch 16, batch 2350, loss[loss=0.2068, simple_loss=0.2853, pruned_loss=0.04667, ctc_loss=0.08757, over 19678.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2639, pruned_loss=0.04302, ctc_loss=0.08012, over 3863352.78 frames. ], batch size: 63, lr: 9.24e-03, grad_scale: 32.0 +2024-08-27 02:02:46,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=211664.0, ans=0.0 +2024-08-27 02:02:50,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=211664.0, ans=0.0 +2024-08-27 02:02:53,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=211664.0, ans=0.2 +2024-08-27 02:03:05,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=211717.33333333334, ans=0.125 +2024-08-27 02:03:14,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=211770.66666666666, ans=0.1 +2024-08-27 02:03:25,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=211877.33333333334, ans=0.125 +2024-08-27 02:03:32,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=211877.33333333334, ans=0.0 +2024-08-27 02:03:34,523 INFO [train.py:1114] (2/4) Epoch 16, batch 2400, loss[loss=0.2082, simple_loss=0.2794, pruned_loss=0.05008, ctc_loss=0.09202, over 19443.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.266, pruned_loss=0.04381, ctc_loss=0.08144, over 3857038.45 frames. ], batch size: 71, lr: 9.24e-03, grad_scale: 32.0 +2024-08-27 02:03:43,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=211984.0, ans=0.04949747468305833 +2024-08-27 02:03:46,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=211984.0, ans=0.05 +2024-08-27 02:03:55,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=212037.33333333334, ans=0.125 +2024-08-27 02:03:59,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=212090.66666666666, ans=0.0 +2024-08-27 02:04:07,971 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.276e+02 1.442e+02 1.653e+02 2.239e+02 3.362e+02, threshold=3.307e+02, percent-clipped=0.0 +2024-08-27 02:04:09,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212144.0, ans=0.1 +2024-08-27 02:04:11,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=212144.0, ans=0.125 +2024-08-27 02:04:18,801 INFO [train.py:1114] (2/4) Epoch 16, batch 2450, loss[loss=0.2508, simple_loss=0.3007, pruned_loss=0.07333, ctc_loss=0.1354, over 12918.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2696, pruned_loss=0.04632, ctc_loss=0.08656, over 3728353.95 frames. ], batch size: 140, lr: 9.23e-03, grad_scale: 32.0 +2024-08-27 02:04:36,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=212304.0, ans=0.125 +2024-08-27 02:04:45,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=212357.33333333334, ans=0.125 +2024-08-27 02:04:50,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=212357.33333333334, ans=0.025 +2024-08-27 02:05:43,527 INFO [train.py:1114] (2/4) Epoch 17, batch 0, loss[loss=0.1792, simple_loss=0.249, pruned_loss=0.04018, ctc_loss=0.07248, over 19416.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.249, pruned_loss=0.04018, ctc_loss=0.07248, over 19416.00 frames. ], batch size: 48, lr: 8.95e-03, grad_scale: 32.0 +2024-08-27 02:05:43,527 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-27 02:05:51,381 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.1522, 4.2818, 4.3547, 4.4697], device='cuda:2') +2024-08-27 02:05:53,287 INFO [train.py:1146] (2/4) Epoch 17, validation: loss=0.172, simple_loss=0.265, pruned_loss=0.02949, ctc_loss=0.04976, over 944034.00 frames. +2024-08-27 02:05:53,287 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12932MB +2024-08-27 02:06:01,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212458.66666666666, ans=0.1 +2024-08-27 02:06:12,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=212512.0, ans=0.5 +2024-08-27 02:06:12,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212512.0, ans=0.1 +2024-08-27 02:06:26,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=212565.33333333334, ans=0.125 +2024-08-27 02:06:27,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=212565.33333333334, ans=0.0 +2024-08-27 02:06:40,306 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.629e+02 1.801e+02 2.001e+02 3.255e+02, threshold=3.602e+02, percent-clipped=0.0 +2024-08-27 02:06:40,340 INFO [train.py:1114] (2/4) Epoch 17, batch 50, loss[loss=0.1697, simple_loss=0.2387, pruned_loss=0.03616, ctc_loss=0.07072, over 19722.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2654, pruned_loss=0.04371, ctc_loss=0.08228, over 845315.44 frames. ], batch size: 47, lr: 8.94e-03, grad_scale: 16.0 +2024-08-27 02:06:41,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212672.0, ans=0.1 +2024-08-27 02:06:54,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=212725.33333333334, ans=0.0 +2024-08-27 02:07:11,198 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.37 vs. limit=22.5 +2024-08-27 02:07:13,191 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=8.11 vs. limit=8.0 +2024-08-27 02:07:13,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=212832.0, ans=0.125 +2024-08-27 02:07:20,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=212885.33333333334, ans=0.125 +2024-08-27 02:07:22,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=212885.33333333334, ans=0.0 +2024-08-27 02:07:29,655 INFO [train.py:1114] (2/4) Epoch 17, batch 100, loss[loss=0.1834, simple_loss=0.2598, pruned_loss=0.03893, ctc_loss=0.07264, over 19753.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2655, pruned_loss=0.04373, ctc_loss=0.08163, over 1498877.26 frames. ], batch size: 51, lr: 8.94e-03, grad_scale: 16.0 +2024-08-27 02:07:42,912 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.17 vs. limit=15.0 +2024-08-27 02:07:56,779 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.19 vs. limit=15.0 +2024-08-27 02:08:07,863 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.60 vs. limit=15.0 +2024-08-27 02:08:09,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=213098.66666666666, ans=0.0 +2024-08-27 02:08:20,142 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.458e+02 1.665e+02 2.006e+02 3.256e+02, threshold=3.330e+02, percent-clipped=0.0 +2024-08-27 02:08:20,176 INFO [train.py:1114] (2/4) Epoch 17, batch 150, loss[loss=0.1727, simple_loss=0.2441, pruned_loss=0.03714, ctc_loss=0.06759, over 19722.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2641, pruned_loss=0.04287, ctc_loss=0.07984, over 2027544.43 frames. ], batch size: 47, lr: 8.93e-03, grad_scale: 16.0 +2024-08-27 02:08:23,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=213205.33333333334, ans=0.0 +2024-08-27 02:10:24,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=213365.33333333334, ans=0.0 +2024-08-27 02:10:41,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=213418.66666666666, ans=0.125 +2024-08-27 02:10:52,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=213418.66666666666, ans=0.125 +2024-08-27 02:10:55,458 INFO [train.py:1114] (2/4) Epoch 17, batch 200, loss[loss=0.2091, simple_loss=0.2769, pruned_loss=0.05126, ctc_loss=0.09714, over 18397.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2632, pruned_loss=0.04275, ctc_loss=0.07966, over 2435139.54 frames. ], batch size: 85, lr: 8.93e-03, grad_scale: 16.0 +2024-08-27 02:11:02,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=213472.0, ans=0.2 +2024-08-27 02:11:03,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213472.0, ans=0.1 +2024-08-27 02:11:04,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=213472.0, ans=0.1 +2024-08-27 02:11:14,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=213525.33333333334, ans=0.125 +2024-08-27 02:11:17,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=213525.33333333334, ans=0.025 +2024-08-27 02:11:18,900 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:11:29,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=213632.0, ans=0.04949747468305833 +2024-08-27 02:11:42,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=213685.33333333334, ans=0.0 +2024-08-27 02:11:47,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=213685.33333333334, ans=0.125 +2024-08-27 02:11:49,173 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.468e+02 1.730e+02 2.457e+02 4.645e+02, threshold=3.460e+02, percent-clipped=6.0 +2024-08-27 02:11:49,207 INFO [train.py:1114] (2/4) Epoch 17, batch 250, loss[loss=0.1932, simple_loss=0.2683, pruned_loss=0.04302, ctc_loss=0.0804, over 19307.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2628, pruned_loss=0.04231, ctc_loss=0.07889, over 2755756.17 frames. ], batch size: 67, lr: 8.92e-03, grad_scale: 16.0 +2024-08-27 02:12:31,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=213898.66666666666, ans=0.05 +2024-08-27 02:12:37,522 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:14:34,922 INFO [train.py:1114] (2/4) Epoch 17, batch 300, loss[loss=0.2042, simple_loss=0.2794, pruned_loss=0.04654, ctc_loss=0.08987, over 19521.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.263, pruned_loss=0.042, ctc_loss=0.07844, over 3000078.39 frames. ], batch size: 61, lr: 8.92e-03, grad_scale: 16.0 +2024-08-27 02:14:56,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=214112.0, ans=0.2 +2024-08-27 02:16:25,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=214165.33333333334, ans=0.125 +2024-08-27 02:16:34,435 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.74 vs. limit=15.0 +2024-08-27 02:16:35,443 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.80 vs. limit=15.0 +2024-08-27 02:16:39,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=214218.66666666666, ans=0.0 +2024-08-27 02:16:46,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=214218.66666666666, ans=0.0 +2024-08-27 02:16:48,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=214272.0, ans=0.125 +2024-08-27 02:16:48,695 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.450e+02 1.705e+02 2.074e+02 4.169e+02, threshold=3.410e+02, percent-clipped=2.0 +2024-08-27 02:16:48,729 INFO [train.py:1114] (2/4) Epoch 17, batch 350, loss[loss=0.1766, simple_loss=0.245, pruned_loss=0.03867, ctc_loss=0.07725, over 19751.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2634, pruned_loss=0.04205, ctc_loss=0.07841, over 3189055.41 frames. ], batch size: 48, lr: 8.91e-03, grad_scale: 16.0 +2024-08-27 02:16:54,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=214272.0, ans=0.125 +2024-08-27 02:17:14,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=214378.66666666666, ans=0.2 +2024-08-27 02:17:20,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=214432.0, ans=0.125 +2024-08-27 02:17:27,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=214485.33333333334, ans=0.95 +2024-08-27 02:17:36,077 INFO [train.py:1114] (2/4) Epoch 17, batch 400, loss[loss=0.1812, simple_loss=0.2631, pruned_loss=0.03562, ctc_loss=0.07008, over 19496.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2627, pruned_loss=0.04184, ctc_loss=0.07806, over 3340401.63 frames. ], batch size: 54, lr: 8.91e-03, grad_scale: 32.0 +2024-08-27 02:17:38,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=214538.66666666666, ans=0.2 +2024-08-27 02:17:42,449 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.75 vs. limit=5.0 +2024-08-27 02:17:57,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=214645.33333333334, ans=0.0 +2024-08-27 02:18:02,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=214645.33333333334, ans=0.125 +2024-08-27 02:18:13,112 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=8.32 vs. limit=12.0 +2024-08-27 02:18:22,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=214752.0, ans=0.0 +2024-08-27 02:18:22,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=214752.0, ans=0.2 +2024-08-27 02:18:25,570 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.479e+02 1.707e+02 2.031e+02 4.496e+02, threshold=3.413e+02, percent-clipped=2.0 +2024-08-27 02:18:25,604 INFO [train.py:1114] (2/4) Epoch 17, batch 450, loss[loss=0.1757, simple_loss=0.2589, pruned_loss=0.03264, ctc_loss=0.06828, over 19617.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2634, pruned_loss=0.04207, ctc_loss=0.07868, over 3449884.10 frames. ], batch size: 55, lr: 8.90e-03, grad_scale: 32.0 +2024-08-27 02:18:37,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=214805.33333333334, ans=0.0 +2024-08-27 02:18:54,911 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.94 vs. limit=12.0 +2024-08-27 02:19:02,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=214965.33333333334, ans=0.0 +2024-08-27 02:19:05,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214965.33333333334, ans=0.1 +2024-08-27 02:19:13,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215018.66666666666, ans=0.1 +2024-08-27 02:19:18,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=215072.0, ans=0.125 +2024-08-27 02:19:18,861 INFO [train.py:1114] (2/4) Epoch 17, batch 500, loss[loss=0.1983, simple_loss=0.2772, pruned_loss=0.04317, ctc_loss=0.08241, over 19708.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2626, pruned_loss=0.04173, ctc_loss=0.07812, over 3545931.45 frames. ], batch size: 63, lr: 8.90e-03, grad_scale: 32.0 +2024-08-27 02:19:19,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=215072.0, ans=0.125 +2024-08-27 02:19:21,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=215072.0, ans=0.2 +2024-08-27 02:19:28,796 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.93 vs. limit=15.0 +2024-08-27 02:19:40,952 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.06 vs. limit=10.0 +2024-08-27 02:19:47,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=215178.66666666666, ans=0.5 +2024-08-27 02:19:51,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=215232.0, ans=0.125 +2024-08-27 02:19:56,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=215232.0, ans=0.0 +2024-08-27 02:20:24,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=215285.33333333334, ans=0.125 +2024-08-27 02:20:26,922 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.82 vs. limit=6.0 +2024-08-27 02:20:44,566 INFO [train.py:1114] (2/4) Epoch 17, batch 550, loss[loss=0.2028, simple_loss=0.268, pruned_loss=0.05034, ctc_loss=0.0923, over 19279.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2625, pruned_loss=0.04192, ctc_loss=0.07844, over 3608240.69 frames. ], batch size: 71, lr: 8.89e-03, grad_scale: 16.0 +2024-08-27 02:20:45,384 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.446e+02 1.711e+02 2.254e+02 3.980e+02, threshold=3.422e+02, percent-clipped=2.0 +2024-08-27 02:20:55,297 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.17 vs. limit=8.0 +2024-08-27 02:21:04,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=215445.33333333334, ans=0.125 +2024-08-27 02:21:10,180 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.35 vs. limit=6.0 +2024-08-27 02:21:26,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=215552.0, ans=0.0 +2024-08-27 02:21:26,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=215552.0, ans=0.0 +2024-08-27 02:21:27,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=215552.0, ans=0.0 +2024-08-27 02:21:43,262 INFO [train.py:1114] (2/4) Epoch 17, batch 600, loss[loss=0.2199, simple_loss=0.291, pruned_loss=0.05409, ctc_loss=0.1018, over 19456.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2629, pruned_loss=0.04199, ctc_loss=0.07841, over 3666029.34 frames. ], batch size: 67, lr: 8.88e-03, grad_scale: 16.0 +2024-08-27 02:21:50,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=215605.33333333334, ans=0.125 +2024-08-27 02:21:51,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215605.33333333334, ans=0.1 +2024-08-27 02:22:28,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=215818.66666666666, ans=0.125 +2024-08-27 02:22:35,804 INFO [train.py:1114] (2/4) Epoch 17, batch 650, loss[loss=0.1884, simple_loss=0.2663, pruned_loss=0.03998, ctc_loss=0.07599, over 19776.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2626, pruned_loss=0.04183, ctc_loss=0.07801, over 3716225.09 frames. ], batch size: 54, lr: 8.88e-03, grad_scale: 16.0 +2024-08-27 02:22:36,659 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.214e+02 1.454e+02 1.765e+02 2.281e+02 4.784e+02, threshold=3.530e+02, percent-clipped=4.0 +2024-08-27 02:22:46,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=215925.33333333334, ans=0.125 +2024-08-27 02:23:00,208 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.32 vs. limit=12.0 +2024-08-27 02:23:07,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=216032.0, ans=0.125 +2024-08-27 02:23:25,364 INFO [train.py:1114] (2/4) Epoch 17, batch 700, loss[loss=0.1896, simple_loss=0.2617, pruned_loss=0.04226, ctc_loss=0.08258, over 19707.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.263, pruned_loss=0.042, ctc_loss=0.07835, over 3747768.85 frames. ], batch size: 51, lr: 8.87e-03, grad_scale: 16.0 +2024-08-27 02:23:27,851 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.91 vs. limit=12.0 +2024-08-27 02:23:38,414 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:23:42,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=216192.0, ans=0.125 +2024-08-27 02:23:44,522 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.65 vs. limit=15.0 +2024-08-27 02:23:49,944 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.29 vs. limit=22.5 +2024-08-27 02:28:37,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=216352.0, ans=0.125 +2024-08-27 02:28:51,344 INFO [train.py:1114] (2/4) Epoch 17, batch 750, loss[loss=0.1725, simple_loss=0.2503, pruned_loss=0.03555, ctc_loss=0.05896, over 19516.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2625, pruned_loss=0.04189, ctc_loss=0.07804, over 3774513.43 frames. ], batch size: 54, lr: 8.87e-03, grad_scale: 16.0 +2024-08-27 02:29:21,548 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.483e+02 1.820e+02 2.509e+02 4.091e+02, threshold=3.640e+02, percent-clipped=8.0 +2024-08-27 02:37:37,042 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.68 vs. limit=15.0 +2024-08-27 02:38:07,122 INFO [train.py:1114] (2/4) Epoch 17, batch 800, loss[loss=0.1649, simple_loss=0.2366, pruned_loss=0.03365, ctc_loss=0.06472, over 19431.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2628, pruned_loss=0.04209, ctc_loss=0.0785, over 3795456.66 frames. ], batch size: 48, lr: 8.86e-03, grad_scale: 32.0 +2024-08-27 02:39:23,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=216725.33333333334, ans=0.025 +2024-08-27 02:39:37,493 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.34 vs. limit=15.0 +2024-08-27 02:40:06,845 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.49 vs. limit=5.0 +2024-08-27 02:40:30,324 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.73 vs. limit=15.0 +2024-08-27 02:40:33,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=216885.33333333334, ans=0.0 +2024-08-27 02:40:40,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=216885.33333333334, ans=0.2 +2024-08-27 02:40:43,401 INFO [train.py:1114] (2/4) Epoch 17, batch 850, loss[loss=0.2026, simple_loss=0.2746, pruned_loss=0.0474, ctc_loss=0.08923, over 19651.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2623, pruned_loss=0.04199, ctc_loss=0.07824, over 3815335.46 frames. ], batch size: 59, lr: 8.86e-03, grad_scale: 32.0 +2024-08-27 02:40:43,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216938.66666666666, ans=0.1 +2024-08-27 02:40:44,261 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.490e+02 1.788e+02 2.181e+02 3.218e+02, threshold=3.576e+02, percent-clipped=0.0 +2024-08-27 02:40:54,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=216992.0, ans=0.125 +2024-08-27 02:41:16,484 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.15 vs. limit=15.0 +2024-08-27 02:41:18,662 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.83 vs. limit=12.0 +2024-08-27 02:41:23,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=217045.33333333334, ans=0.0 +2024-08-27 02:41:34,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=217098.66666666666, ans=0.125 +2024-08-27 02:41:38,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=217152.0, ans=0.125 +2024-08-27 02:41:48,111 INFO [train.py:1114] (2/4) Epoch 17, batch 900, loss[loss=0.1724, simple_loss=0.2491, pruned_loss=0.03444, ctc_loss=0.0671, over 19394.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2629, pruned_loss=0.04236, ctc_loss=0.07894, over 3819723.78 frames. ], batch size: 48, lr: 8.85e-03, grad_scale: 32.0 +2024-08-27 02:41:52,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=217205.33333333334, ans=0.125 +2024-08-27 02:42:02,376 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.74 vs. limit=6.0 +2024-08-27 02:42:06,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=217312.0, ans=0.1 +2024-08-27 02:42:07,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=217312.0, ans=0.125 +2024-08-27 02:42:25,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=217365.33333333334, ans=0.1 +2024-08-27 02:42:42,350 INFO [train.py:1114] (2/4) Epoch 17, batch 950, loss[loss=0.1746, simple_loss=0.2516, pruned_loss=0.03511, ctc_loss=0.06848, over 19494.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2635, pruned_loss=0.04262, ctc_loss=0.07963, over 3820675.06 frames. ], batch size: 49, lr: 8.85e-03, grad_scale: 32.0 +2024-08-27 02:42:43,216 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.442e+02 1.596e+02 1.963e+02 3.277e+02, threshold=3.193e+02, percent-clipped=0.0 +2024-08-27 02:43:12,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=217525.33333333334, ans=0.125 +2024-08-27 02:43:27,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=217578.66666666666, ans=0.125 +2024-08-27 02:44:26,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.12 vs. limit=15.0 +2024-08-27 02:45:01,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=217685.33333333334, ans=0.125 +2024-08-27 02:45:22,491 INFO [train.py:1114] (2/4) Epoch 17, batch 1000, loss[loss=0.1725, simple_loss=0.2538, pruned_loss=0.03367, ctc_loss=0.05986, over 19852.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2641, pruned_loss=0.04274, ctc_loss=0.07977, over 3817731.00 frames. ], batch size: 52, lr: 8.84e-03, grad_scale: 32.0 +2024-08-27 02:45:39,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=217792.0, ans=0.025 +2024-08-27 02:45:42,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=217792.0, ans=0.07 +2024-08-27 02:45:45,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=217792.0, ans=0.2 +2024-08-27 02:46:17,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=217898.66666666666, ans=0.0 +2024-08-27 02:46:28,511 INFO [train.py:1114] (2/4) Epoch 17, batch 1050, loss[loss=0.1793, simple_loss=0.2568, pruned_loss=0.03719, ctc_loss=0.06851, over 19848.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.263, pruned_loss=0.04242, ctc_loss=0.07909, over 3825551.01 frames. ], batch size: 57, lr: 8.84e-03, grad_scale: 32.0 +2024-08-27 02:46:29,426 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.403e+02 1.586e+02 2.025e+02 2.959e+02, threshold=3.171e+02, percent-clipped=1.0 +2024-08-27 02:46:31,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=218005.33333333334, ans=0.0 +2024-08-27 02:47:12,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=218112.0, ans=0.125 +2024-08-27 02:47:21,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=218165.33333333334, ans=0.0 +2024-08-27 02:47:24,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=218165.33333333334, ans=0.125 +2024-08-27 02:47:38,617 INFO [train.py:1114] (2/4) Epoch 17, batch 1100, loss[loss=0.1852, simple_loss=0.2626, pruned_loss=0.03896, ctc_loss=0.07486, over 19592.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2628, pruned_loss=0.04216, ctc_loss=0.07872, over 3832810.09 frames. ], batch size: 52, lr: 8.83e-03, grad_scale: 32.0 +2024-08-27 02:47:43,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=218272.0, ans=0.1 +2024-08-27 02:47:44,441 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-08-27 02:48:25,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=218325.33333333334, ans=0.0 +2024-08-27 02:48:34,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=218325.33333333334, ans=0.125 +2024-08-27 02:48:35,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=218325.33333333334, ans=0.0 +2024-08-27 02:48:55,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=218378.66666666666, ans=0.125 +2024-08-27 02:49:03,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=218432.0, ans=0.125 +2024-08-27 02:49:11,261 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:49:11,616 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.27 vs. limit=12.0 +2024-08-27 02:49:25,942 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:49:27,450 INFO [train.py:1114] (2/4) Epoch 17, batch 1150, loss[loss=0.1697, simple_loss=0.246, pruned_loss=0.03367, ctc_loss=0.06525, over 19591.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2629, pruned_loss=0.04219, ctc_loss=0.07888, over 3830356.08 frames. ], batch size: 52, lr: 8.83e-03, grad_scale: 32.0 +2024-08-27 02:49:28,307 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.459e+02 1.619e+02 1.965e+02 3.390e+02, threshold=3.239e+02, percent-clipped=1.0 +2024-08-27 02:49:43,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218592.0, ans=0.1 +2024-08-27 02:49:54,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=218645.33333333334, ans=0.025 +2024-08-27 02:49:56,373 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.52 vs. limit=15.0 +2024-08-27 02:50:14,206 INFO [train.py:1114] (2/4) Epoch 17, batch 1200, loss[loss=0.1851, simple_loss=0.2674, pruned_loss=0.03629, ctc_loss=0.07545, over 19842.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2641, pruned_loss=0.04252, ctc_loss=0.07951, over 3825167.37 frames. ], batch size: 57, lr: 8.82e-03, grad_scale: 32.0 +2024-08-27 02:50:17,413 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.69 vs. limit=15.0 +2024-08-27 02:50:38,807 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.73 vs. limit=10.0 +2024-08-27 02:51:05,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=219072.0, ans=0.125 +2024-08-27 02:51:28,549 INFO [train.py:1114] (2/4) Epoch 17, batch 1250, loss[loss=0.1958, simple_loss=0.265, pruned_loss=0.047, ctc_loss=0.08142, over 19515.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2647, pruned_loss=0.04279, ctc_loss=0.07984, over 3843189.79 frames. ], batch size: 61, lr: 8.82e-03, grad_scale: 32.0 +2024-08-27 02:51:29,441 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.236e+02 1.488e+02 1.826e+02 2.228e+02 3.440e+02, threshold=3.652e+02, percent-clipped=1.0 +2024-08-27 02:51:39,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=219072.0, ans=0.0 +2024-08-27 02:51:58,222 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.72 vs. limit=6.0 +2024-08-27 02:52:19,658 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:52:29,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=219285.33333333334, ans=0.125 +2024-08-27 02:52:32,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219285.33333333334, ans=0.1 +2024-08-27 02:52:40,149 INFO [train.py:1114] (2/4) Epoch 17, batch 1300, loss[loss=0.2094, simple_loss=0.2803, pruned_loss=0.05108, ctc_loss=0.09084, over 18942.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2636, pruned_loss=0.04222, ctc_loss=0.07887, over 3847215.40 frames. ], batch size: 76, lr: 8.81e-03, grad_scale: 32.0 +2024-08-27 02:52:40,631 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.67 vs. limit=15.0 +2024-08-27 02:52:46,994 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.34 vs. limit=15.0 +2024-08-27 02:53:10,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=219498.66666666666, ans=0.125 +2024-08-27 02:53:12,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=219498.66666666666, ans=0.0 +2024-08-27 02:53:43,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=219552.0, ans=0.0 +2024-08-27 02:53:45,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219552.0, ans=0.1 +2024-08-27 02:53:47,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=219552.0, ans=15.0 +2024-08-27 02:53:47,887 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.40 vs. limit=22.5 +2024-08-27 02:53:49,257 INFO [train.py:1114] (2/4) Epoch 17, batch 1350, loss[loss=0.1694, simple_loss=0.2514, pruned_loss=0.0322, ctc_loss=0.05761, over 19785.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2629, pruned_loss=0.04185, ctc_loss=0.07814, over 3857406.84 frames. ], batch size: 54, lr: 8.81e-03, grad_scale: 32.0 +2024-08-27 02:53:50,131 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.111e+02 1.487e+02 1.709e+02 2.118e+02 3.687e+02, threshold=3.418e+02, percent-clipped=1.0 +2024-08-27 02:54:19,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=219765.33333333334, ans=0.2 +2024-08-27 02:54:47,052 INFO [train.py:1114] (2/4) Epoch 17, batch 1400, loss[loss=0.1546, simple_loss=0.219, pruned_loss=0.03272, ctc_loss=0.06204, over 19655.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2623, pruned_loss=0.04162, ctc_loss=0.07782, over 3863333.21 frames. ], batch size: 46, lr: 8.80e-03, grad_scale: 32.0 +2024-08-27 02:54:55,851 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.14 vs. limit=6.0 +2024-08-27 02:55:00,458 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.25 vs. limit=22.5 +2024-08-27 02:55:09,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=219978.66666666666, ans=0.125 +2024-08-27 02:55:21,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=220032.0, ans=0.125 +2024-08-27 02:55:29,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=220085.33333333334, ans=0.125 +2024-08-27 02:55:40,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=220085.33333333334, ans=0.0 +2024-08-27 02:55:41,676 INFO [train.py:1114] (2/4) Epoch 17, batch 1450, loss[loss=0.225, simple_loss=0.2942, pruned_loss=0.05687, ctc_loss=0.1054, over 19634.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2629, pruned_loss=0.04173, ctc_loss=0.07821, over 3862191.42 frames. ], batch size: 63, lr: 8.80e-03, grad_scale: 32.0 +2024-08-27 02:55:42,534 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.445e+02 1.654e+02 2.032e+02 3.496e+02, threshold=3.307e+02, percent-clipped=1.0 +2024-08-27 02:55:59,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=220192.0, ans=0.05 +2024-08-27 02:56:07,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=220245.33333333334, ans=0.025 +2024-08-27 02:56:17,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=220298.66666666666, ans=0.0 +2024-08-27 02:56:24,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=220352.0, ans=0.1 +2024-08-27 02:56:35,348 INFO [train.py:1114] (2/4) Epoch 17, batch 1500, loss[loss=0.1918, simple_loss=0.2608, pruned_loss=0.04485, ctc_loss=0.08287, over 19586.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.263, pruned_loss=0.04174, ctc_loss=0.07832, over 3862121.78 frames. ], batch size: 57, lr: 8.79e-03, grad_scale: 32.0 +2024-08-27 02:56:40,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=220405.33333333334, ans=0.0 +2024-08-27 02:56:58,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=220512.0, ans=0.0 +2024-08-27 02:57:09,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=220565.33333333334, ans=0.0 +2024-08-27 02:57:10,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=220565.33333333334, ans=0.125 +2024-08-27 02:57:11,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=220565.33333333334, ans=0.125 +2024-08-27 02:57:11,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=220565.33333333334, ans=0.025 +2024-08-27 02:57:15,724 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.47 vs. limit=10.0 +2024-08-27 02:57:22,837 INFO [train.py:1114] (2/4) Epoch 17, batch 1550, loss[loss=0.2246, simple_loss=0.2959, pruned_loss=0.05573, ctc_loss=0.1044, over 19616.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2633, pruned_loss=0.04203, ctc_loss=0.07886, over 3845614.46 frames. ], batch size: 60, lr: 8.79e-03, grad_scale: 32.0 +2024-08-27 02:57:23,797 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.186e+02 1.433e+02 1.700e+02 2.311e+02 3.923e+02, threshold=3.401e+02, percent-clipped=1.0 +2024-08-27 02:57:32,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=220725.33333333334, ans=0.025 +2024-08-27 02:57:50,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220725.33333333334, ans=0.1 +2024-08-27 02:58:15,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=220832.0, ans=0.2 +2024-08-27 02:58:16,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=220832.0, ans=0.0 +2024-08-27 02:58:17,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=220885.33333333334, ans=0.0 +2024-08-27 02:58:27,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=220938.66666666666, ans=0.125 +2024-08-27 02:58:27,693 INFO [train.py:1114] (2/4) Epoch 17, batch 1600, loss[loss=0.1979, simple_loss=0.2772, pruned_loss=0.04347, ctc_loss=0.07915, over 19832.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2629, pruned_loss=0.04183, ctc_loss=0.07848, over 3835786.34 frames. ], batch size: 57, lr: 8.78e-03, grad_scale: 32.0 +2024-08-27 03:00:02,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.whiten.whitening_limit, batch_count=221045.33333333334, ans=15.0 +2024-08-27 03:00:33,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=221098.66666666666, ans=0.2 +2024-08-27 03:00:40,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=221098.66666666666, ans=0.2 +2024-08-27 03:00:46,404 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.51 vs. limit=6.0 +2024-08-27 03:00:55,412 INFO [train.py:1114] (2/4) Epoch 17, batch 1650, loss[loss=0.2003, simple_loss=0.275, pruned_loss=0.04607, ctc_loss=0.0838, over 19655.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2625, pruned_loss=0.0417, ctc_loss=0.07824, over 3833058.26 frames. ], batch size: 59, lr: 8.77e-03, grad_scale: 32.0 +2024-08-27 03:00:58,230 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.431e+02 1.952e+02 2.452e+02 3.980e+02, threshold=3.905e+02, percent-clipped=5.0 +2024-08-27 03:01:07,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=221258.66666666666, ans=0.0 +2024-08-27 03:01:20,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=221312.0, ans=0.2 +2024-08-27 03:01:43,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=221365.33333333334, ans=0.125 +2024-08-27 03:01:49,228 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.19 vs. limit=15.0 +2024-08-27 03:01:57,110 INFO [train.py:1114] (2/4) Epoch 17, batch 1700, loss[loss=0.1662, simple_loss=0.2358, pruned_loss=0.03481, ctc_loss=0.06749, over 19677.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2628, pruned_loss=0.04172, ctc_loss=0.07819, over 3847660.59 frames. ], batch size: 46, lr: 8.77e-03, grad_scale: 32.0 +2024-08-27 03:02:20,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=221525.33333333334, ans=0.125 +2024-08-27 03:02:25,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=221578.66666666666, ans=0.125 +2024-08-27 03:02:26,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221578.66666666666, ans=0.1 +2024-08-27 03:02:30,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=221632.0, ans=0.125 +2024-08-27 03:02:33,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=221632.0, ans=0.125 +2024-08-27 03:02:39,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=221685.33333333334, ans=0.125 +2024-08-27 03:02:48,265 INFO [train.py:1114] (2/4) Epoch 17, batch 1750, loss[loss=0.1758, simple_loss=0.2429, pruned_loss=0.0397, ctc_loss=0.07319, over 19643.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2622, pruned_loss=0.04154, ctc_loss=0.07788, over 3852036.08 frames. ], batch size: 45, lr: 8.76e-03, grad_scale: 16.0 +2024-08-27 03:02:49,975 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.526e+02 1.896e+02 2.459e+02 4.889e+02, threshold=3.791e+02, percent-clipped=1.0 +2024-08-27 03:03:11,273 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.46 vs. limit=12.0 +2024-08-27 03:03:15,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=221792.0, ans=0.125 +2024-08-27 03:03:19,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=221845.33333333334, ans=0.0 +2024-08-27 03:03:20,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=221845.33333333334, ans=0.0 +2024-08-27 03:03:28,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=221898.66666666666, ans=0.125 +2024-08-27 03:03:29,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=221898.66666666666, ans=0.025 +2024-08-27 03:03:37,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=221952.0, ans=0.0 +2024-08-27 03:03:44,696 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.62 vs. limit=15.0 +2024-08-27 03:03:46,722 INFO [train.py:1114] (2/4) Epoch 17, batch 1800, loss[loss=0.1886, simple_loss=0.2655, pruned_loss=0.04101, ctc_loss=0.07447, over 19605.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2623, pruned_loss=0.04157, ctc_loss=0.07777, over 3853124.07 frames. ], batch size: 55, lr: 8.76e-03, grad_scale: 16.0 +2024-08-27 03:03:50,595 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.41 vs. limit=15.0 +2024-08-27 03:03:53,816 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 03:03:59,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=222058.66666666666, ans=0.025 +2024-08-27 03:04:08,715 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 03:04:26,813 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.70 vs. limit=22.5 +2024-08-27 03:04:30,759 INFO [train.py:1114] (2/4) Epoch 17, batch 1850, loss[loss=0.1988, simple_loss=0.2723, pruned_loss=0.04585, ctc_loss=0.08395, over 19604.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2624, pruned_loss=0.04178, ctc_loss=0.07808, over 3856458.31 frames. ], batch size: 57, lr: 8.75e-03, grad_scale: 16.0 +2024-08-27 03:04:32,492 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.249e+02 1.484e+02 1.846e+02 2.436e+02 4.218e+02, threshold=3.691e+02, percent-clipped=2.0 +2024-08-27 03:04:43,664 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.69 vs. limit=15.0 +2024-08-27 03:04:46,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=222325.33333333334, ans=0.125 +2024-08-27 03:04:46,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=222325.33333333334, ans=0.2 +2024-08-27 03:04:53,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=222378.66666666666, ans=0.125 +2024-08-27 03:04:55,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=222378.66666666666, ans=0.125 +2024-08-27 03:05:14,602 INFO [train.py:1114] (2/4) Epoch 17, batch 1900, loss[loss=0.2004, simple_loss=0.2791, pruned_loss=0.04429, ctc_loss=0.08251, over 19617.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2629, pruned_loss=0.04185, ctc_loss=0.07808, over 3861433.48 frames. ], batch size: 59, lr: 8.75e-03, grad_scale: 16.0 +2024-08-27 03:05:18,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=222538.66666666666, ans=0.2 +2024-08-27 03:05:26,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=222592.0, ans=0.1 +2024-08-27 03:05:29,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=222592.0, ans=0.05 +2024-08-27 03:05:42,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=222698.66666666666, ans=0.125 +2024-08-27 03:05:50,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=222752.0, ans=0.0 +2024-08-27 03:05:58,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=222752.0, ans=0.2 +2024-08-27 03:05:59,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=222752.0, ans=0.125 +2024-08-27 03:06:00,577 INFO [train.py:1114] (2/4) Epoch 17, batch 1950, loss[loss=0.1748, simple_loss=0.2495, pruned_loss=0.03743, ctc_loss=0.06276, over 19600.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2635, pruned_loss=0.04188, ctc_loss=0.07783, over 3870786.46 frames. ], batch size: 52, lr: 8.74e-03, grad_scale: 16.0 +2024-08-27 03:06:02,409 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.462e+02 1.715e+02 2.122e+02 4.504e+02, threshold=3.430e+02, percent-clipped=1.0 +2024-08-27 03:06:17,661 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.66 vs. limit=10.0 +2024-08-27 03:06:18,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=222912.0, ans=0.125 +2024-08-27 03:06:21,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=222912.0, ans=0.125 +2024-08-27 03:06:31,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=222965.33333333334, ans=0.125 +2024-08-27 03:06:48,014 INFO [train.py:1114] (2/4) Epoch 17, batch 2000, loss[loss=0.1619, simple_loss=0.229, pruned_loss=0.03535, ctc_loss=0.06019, over 19661.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2637, pruned_loss=0.04211, ctc_loss=0.07819, over 3856145.28 frames. ], batch size: 45, lr: 8.74e-03, grad_scale: 32.0 +2024-08-27 03:06:48,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.25 vs. limit=22.5 +2024-08-27 03:06:54,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=223072.0, ans=0.125 +2024-08-27 03:07:58,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=223125.33333333334, ans=0.0 +2024-08-27 03:08:42,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=223178.66666666666, ans=0.0 +2024-08-27 03:08:58,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=223232.0, ans=0.2 +2024-08-27 03:09:36,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=223285.33333333334, ans=0.0 +2024-08-27 03:09:37,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=223285.33333333334, ans=0.0 +2024-08-27 03:09:38,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=223285.33333333334, ans=0.125 +2024-08-27 03:09:40,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=223338.66666666666, ans=0.2 +2024-08-27 03:09:41,593 INFO [train.py:1114] (2/4) Epoch 17, batch 2050, loss[loss=0.1646, simple_loss=0.2327, pruned_loss=0.03531, ctc_loss=0.06442, over 19708.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2627, pruned_loss=0.04174, ctc_loss=0.0777, over 3852292.04 frames. ], batch size: 47, lr: 8.73e-03, grad_scale: 32.0 +2024-08-27 03:09:41,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=223338.66666666666, ans=0.0 +2024-08-27 03:09:43,289 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.433e+02 1.718e+02 2.194e+02 3.489e+02, threshold=3.436e+02, percent-clipped=1.0 +2024-08-27 03:09:58,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.01 vs. limit=22.5 +2024-08-27 03:10:23,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=223392.0, ans=0.2 +2024-08-27 03:10:31,732 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.77 vs. limit=15.0 +2024-08-27 03:10:37,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223498.66666666666, ans=0.1 +2024-08-27 03:10:42,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=223498.66666666666, ans=0.0 +2024-08-27 03:10:46,779 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.76 vs. limit=15.0 +2024-08-27 03:13:07,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=223552.0, ans=0.025 +2024-08-27 03:13:17,697 INFO [train.py:1114] (2/4) Epoch 17, batch 2100, loss[loss=0.174, simple_loss=0.2553, pruned_loss=0.03338, ctc_loss=0.06499, over 19765.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.262, pruned_loss=0.04136, ctc_loss=0.07707, over 3859849.16 frames. ], batch size: 54, lr: 8.73e-03, grad_scale: 32.0 +2024-08-27 03:13:42,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=223658.66666666666, ans=0.125 +2024-08-27 03:13:48,028 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.19 vs. limit=15.0 +2024-08-27 03:17:59,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=223712.0, ans=0.0 +2024-08-27 03:24:41,103 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=7.88 vs. limit=12.0 +2024-08-27 03:24:47,084 INFO [train.py:1114] (2/4) Epoch 17, batch 2150, loss[loss=0.1596, simple_loss=0.2397, pruned_loss=0.02913, ctc_loss=0.05329, over 19847.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.262, pruned_loss=0.04137, ctc_loss=0.07724, over 3870273.52 frames. ], batch size: 52, lr: 8.72e-03, grad_scale: 32.0 +2024-08-27 03:24:47,747 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.65 vs. limit=15.0 +2024-08-27 03:24:49,696 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.181e+02 1.464e+02 1.691e+02 2.317e+02 5.931e+02, threshold=3.382e+02, percent-clipped=6.0 +2024-08-27 03:25:20,747 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.03 vs. limit=22.5 +2024-08-27 03:25:51,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=223978.66666666666, ans=0.015 +2024-08-27 03:25:51,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=223978.66666666666, ans=0.125 +2024-08-27 03:26:31,796 INFO [train.py:1114] (2/4) Epoch 17, batch 2200, loss[loss=0.2017, simple_loss=0.2753, pruned_loss=0.04725, ctc_loss=0.08387, over 19591.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.262, pruned_loss=0.04112, ctc_loss=0.07687, over 3869561.05 frames. ], batch size: 57, lr: 8.72e-03, grad_scale: 32.0 +2024-08-27 03:26:47,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=224192.0, ans=0.125 +2024-08-27 03:26:57,331 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.52 vs. limit=15.0 +2024-08-27 03:27:05,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=224298.66666666666, ans=0.125 +2024-08-27 03:27:26,260 INFO [train.py:1114] (2/4) Epoch 17, batch 2250, loss[loss=0.1871, simple_loss=0.272, pruned_loss=0.03692, ctc_loss=0.07091, over 19606.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.262, pruned_loss=0.04125, ctc_loss=0.07706, over 3868300.94 frames. ], batch size: 55, lr: 8.71e-03, grad_scale: 32.0 +2024-08-27 03:27:29,879 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.517e+02 1.774e+02 2.256e+02 3.791e+02, threshold=3.548e+02, percent-clipped=1.0 +2024-08-27 03:27:40,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=224405.33333333334, ans=0.125 +2024-08-27 03:27:46,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=224458.66666666666, ans=0.125 +2024-08-27 03:27:56,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=224512.0, ans=0.0 +2024-08-27 03:28:04,078 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.96 vs. limit=22.5 +2024-08-27 03:28:52,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=224618.66666666666, ans=0.1 +2024-08-27 03:29:04,917 INFO [train.py:1114] (2/4) Epoch 17, batch 2300, loss[loss=0.1768, simple_loss=0.2513, pruned_loss=0.03747, ctc_loss=0.06845, over 19502.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2615, pruned_loss=0.0415, ctc_loss=0.07733, over 3862443.18 frames. ], batch size: 49, lr: 8.71e-03, grad_scale: 16.0 +2024-08-27 03:29:05,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=224672.0, ans=0.0 +2024-08-27 03:29:17,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=224725.33333333334, ans=0.125 +2024-08-27 03:29:17,671 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=6.0 +2024-08-27 03:29:18,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=224725.33333333334, ans=0.2 +2024-08-27 03:29:19,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=224725.33333333334, ans=0.125 +2024-08-27 03:29:20,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=224725.33333333334, ans=0.1 +2024-08-27 03:35:34,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=224832.0, ans=0.125 +2024-08-27 03:35:35,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=224832.0, ans=0.125 +2024-08-27 03:36:28,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=224885.33333333334, ans=0.2 +2024-08-27 03:36:49,581 INFO [train.py:1114] (2/4) Epoch 17, batch 2350, loss[loss=0.2027, simple_loss=0.2827, pruned_loss=0.04434, ctc_loss=0.08516, over 19675.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2617, pruned_loss=0.0417, ctc_loss=0.07758, over 3864783.52 frames. ], batch size: 63, lr: 8.70e-03, grad_scale: 16.0 +2024-08-27 03:36:49,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=224938.66666666666, ans=0.1 +2024-08-27 03:37:01,819 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.439e+02 1.647e+02 2.102e+02 4.091e+02, threshold=3.295e+02, percent-clipped=1.0 +2024-08-27 03:38:19,556 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=8.14 vs. limit=12.0 +2024-08-27 03:39:26,220 INFO [train.py:1114] (2/4) Epoch 17, batch 2400, loss[loss=0.1877, simple_loss=0.2647, pruned_loss=0.04076, ctc_loss=0.07314, over 19315.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2641, pruned_loss=0.04263, ctc_loss=0.07903, over 3858493.92 frames. ], batch size: 71, lr: 8.70e-03, grad_scale: 32.0 +2024-08-27 03:39:44,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=225205.33333333334, ans=0.025 +2024-08-27 03:40:23,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=225258.66666666666, ans=0.025 +2024-08-27 03:42:31,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=225365.33333333334, ans=0.125 +2024-08-27 03:43:29,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=225365.33333333334, ans=0.025 +2024-08-27 03:43:29,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=225365.33333333334, ans=0.025 +2024-08-27 03:43:50,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225418.66666666666, ans=0.1 +2024-08-27 03:44:21,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=225418.66666666666, ans=0.0 +2024-08-27 03:44:22,611 INFO [train.py:1114] (2/4) Epoch 17, batch 2450, loss[loss=0.2482, simple_loss=0.2963, pruned_loss=0.07317, ctc_loss=0.1344, over 13434.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.268, pruned_loss=0.04514, ctc_loss=0.08398, over 3729261.65 frames. ], batch size: 140, lr: 8.69e-03, grad_scale: 32.0 +2024-08-27 03:44:30,558 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.519e+02 1.805e+02 2.064e+02 2.900e+02, threshold=3.609e+02, percent-clipped=0.0 +2024-08-27 03:44:32,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=225472.0, ans=0.0 +2024-08-27 03:47:31,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225632.0, ans=0.1 +2024-08-27 03:50:08,496 INFO [train.py:1114] (2/4) Epoch 18, batch 0, loss[loss=0.1701, simple_loss=0.2364, pruned_loss=0.03794, ctc_loss=0.06979, over 19432.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2364, pruned_loss=0.03794, ctc_loss=0.06979, over 19432.00 frames. ], batch size: 48, lr: 8.44e-03, grad_scale: 32.0 +2024-08-27 03:50:08,496 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-27 03:56:59,328 INFO [train.py:1146] (2/4) Epoch 18, validation: loss=0.1731, simple_loss=0.2653, pruned_loss=0.0303, ctc_loss=0.05087, over 944034.00 frames. +2024-08-27 03:56:59,328 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12932MB +2024-08-27 03:58:14,272 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.62 vs. limit=15.0 +2024-08-27 03:58:21,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=225733.33333333334, ans=0.0 +2024-08-27 03:58:47,890 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.35 vs. limit=15.0 +2024-08-27 03:58:56,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=225786.66666666666, ans=0.125 +2024-08-27 03:59:27,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=225840.0, ans=0.125 +2024-08-27 03:59:36,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=225893.33333333334, ans=0.0 +2024-08-27 03:59:37,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=225893.33333333334, ans=0.2 +2024-08-27 03:59:40,031 INFO [train.py:1114] (2/4) Epoch 18, batch 50, loss[loss=0.1561, simple_loss=0.2267, pruned_loss=0.03134, ctc_loss=0.057, over 19736.00 frames. ], tot_loss[loss=0.19, simple_loss=0.264, pruned_loss=0.04221, ctc_loss=0.07889, over 844774.48 frames. ], batch size: 47, lr: 8.44e-03, grad_scale: 32.0 +2024-08-27 03:59:40,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=225946.66666666666, ans=0.2 +2024-08-27 03:59:47,060 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.57 vs. limit=15.0 +2024-08-27 03:59:52,924 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.620e+02 1.870e+02 2.127e+02 3.474e+02, threshold=3.740e+02, percent-clipped=0.0 +2024-08-27 03:59:56,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=226000.0, ans=0.2 +2024-08-27 04:00:15,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=226106.66666666666, ans=0.0 +2024-08-27 04:00:18,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=226106.66666666666, ans=0.0 +2024-08-27 04:00:27,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=226160.0, ans=0.025 +2024-08-27 04:00:34,083 INFO [train.py:1114] (2/4) Epoch 18, batch 100, loss[loss=0.178, simple_loss=0.2525, pruned_loss=0.03816, ctc_loss=0.06778, over 19708.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2657, pruned_loss=0.04234, ctc_loss=0.07882, over 1499238.30 frames. ], batch size: 51, lr: 8.43e-03, grad_scale: 32.0 +2024-08-27 04:00:38,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=226213.33333333334, ans=0.125 +2024-08-27 04:05:33,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=226320.0, ans=0.125 +2024-08-27 04:05:40,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=226373.33333333334, ans=0.0 +2024-08-27 04:06:00,492 INFO [train.py:1114] (2/4) Epoch 18, batch 150, loss[loss=0.1675, simple_loss=0.2407, pruned_loss=0.03431, ctc_loss=0.06414, over 19719.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2628, pruned_loss=0.04107, ctc_loss=0.07655, over 2028180.04 frames. ], batch size: 47, lr: 8.43e-03, grad_scale: 32.0 +2024-08-27 04:06:02,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226480.0, ans=0.1 +2024-08-27 04:06:04,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=226480.0, ans=0.0 +2024-08-27 04:06:15,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=226533.33333333334, ans=0.0 +2024-08-27 04:06:16,203 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.462e+02 1.764e+02 2.186e+02 3.977e+02, threshold=3.529e+02, percent-clipped=1.0 +2024-08-27 04:06:18,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=226533.33333333334, ans=0.125 +2024-08-27 04:06:21,420 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.80 vs. limit=10.0 +2024-08-27 04:06:21,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=226586.66666666666, ans=0.125 +2024-08-27 04:06:29,675 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.81 vs. limit=15.0 +2024-08-27 04:06:41,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=226693.33333333334, ans=0.2 +2024-08-27 04:06:49,810 INFO [train.py:1114] (2/4) Epoch 18, batch 200, loss[loss=0.1842, simple_loss=0.2618, pruned_loss=0.03779, ctc_loss=0.07724, over 18272.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2617, pruned_loss=0.04091, ctc_loss=0.07648, over 2435566.49 frames. ], batch size: 85, lr: 8.42e-03, grad_scale: 32.0 +2024-08-27 04:07:02,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=226800.0, ans=0.125 +2024-08-27 04:07:16,200 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.40 vs. limit=10.0 +2024-08-27 04:07:23,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=226906.66666666666, ans=0.125 +2024-08-27 04:07:28,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=226960.0, ans=0.0 +2024-08-27 04:07:28,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=226960.0, ans=0.1 +2024-08-27 04:07:35,900 INFO [train.py:1114] (2/4) Epoch 18, batch 250, loss[loss=0.2162, simple_loss=0.2952, pruned_loss=0.05023, ctc_loss=0.09207, over 19347.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2616, pruned_loss=0.04083, ctc_loss=0.07645, over 2754676.87 frames. ], batch size: 67, lr: 8.42e-03, grad_scale: 32.0 +2024-08-27 04:07:40,286 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.25 vs. limit=10.0 +2024-08-27 04:07:50,822 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.244e+02 1.521e+02 1.873e+02 2.606e+02 4.367e+02, threshold=3.746e+02, percent-clipped=8.0 +2024-08-27 04:07:51,097 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:07:52,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=227066.66666666666, ans=0.0 +2024-08-27 04:08:04,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=227120.0, ans=0.125 +2024-08-27 04:08:05,224 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.90 vs. limit=15.0 +2024-08-27 04:08:11,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=227173.33333333334, ans=0.125 +2024-08-27 04:08:24,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=227226.66666666666, ans=0.0 +2024-08-27 04:08:26,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=227226.66666666666, ans=0.125 +2024-08-27 04:08:31,213 INFO [train.py:1114] (2/4) Epoch 18, batch 300, loss[loss=0.2134, simple_loss=0.277, pruned_loss=0.05464, ctc_loss=0.1013, over 19550.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2611, pruned_loss=0.04089, ctc_loss=0.07633, over 3000148.78 frames. ], batch size: 61, lr: 8.41e-03, grad_scale: 32.0 +2024-08-27 04:08:49,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=227386.66666666666, ans=0.125 +2024-08-27 04:08:49,795 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.57 vs. limit=15.0 +2024-08-27 04:08:51,479 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.15 vs. limit=15.0 +2024-08-27 04:09:17,363 INFO [train.py:1114] (2/4) Epoch 18, batch 350, loss[loss=0.1582, simple_loss=0.2333, pruned_loss=0.02987, ctc_loss=0.05848, over 19783.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2616, pruned_loss=0.04095, ctc_loss=0.07652, over 3189797.23 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 32.0 +2024-08-27 04:09:19,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=227546.66666666666, ans=0.125 +2024-08-27 04:09:22,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=227546.66666666666, ans=0.125 +2024-08-27 04:09:30,339 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.460e+02 1.643e+02 1.956e+02 3.165e+02, threshold=3.287e+02, percent-clipped=0.0 +2024-08-27 04:09:34,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=227600.0, ans=0.035 +2024-08-27 04:10:13,123 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.55 vs. limit=12.0 +2024-08-27 04:10:14,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=227706.66666666666, ans=0.125 +2024-08-27 04:10:31,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227760.0, ans=0.1 +2024-08-27 04:10:33,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=227760.0, ans=0.125 +2024-08-27 04:10:33,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=227760.0, ans=0.125 +2024-08-27 04:10:40,390 INFO [train.py:1114] (2/4) Epoch 18, batch 400, loss[loss=0.1926, simple_loss=0.2768, pruned_loss=0.039, ctc_loss=0.07602, over 19482.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2613, pruned_loss=0.04064, ctc_loss=0.07626, over 3341232.34 frames. ], batch size: 54, lr: 8.40e-03, grad_scale: 32.0 +2024-08-27 04:10:40,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=227813.33333333334, ans=0.0 +2024-08-27 04:11:57,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=227866.66666666666, ans=0.2 +2024-08-27 04:12:32,838 INFO [train.py:1114] (2/4) Epoch 18, batch 450, loss[loss=0.196, simple_loss=0.2769, pruned_loss=0.04228, ctc_loss=0.07623, over 19625.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.262, pruned_loss=0.04096, ctc_loss=0.07679, over 3450729.30 frames. ], batch size: 55, lr: 8.40e-03, grad_scale: 32.0 +2024-08-27 04:12:34,442 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.02 vs. limit=15.0 +2024-08-27 04:12:53,575 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.31 vs. limit=15.0 +2024-08-27 04:12:59,753 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.395e+02 1.673e+02 2.305e+02 3.910e+02, threshold=3.347e+02, percent-clipped=3.0 +2024-08-27 04:13:01,137 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.63 vs. limit=12.0 +2024-08-27 04:13:11,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=228186.66666666666, ans=0.0 +2024-08-27 04:13:15,862 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.20 vs. limit=15.0 +2024-08-27 04:13:22,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=228240.0, ans=0.0 +2024-08-27 04:13:22,506 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.34 vs. limit=15.0 +2024-08-27 04:13:24,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=228293.33333333334, ans=0.04949747468305833 +2024-08-27 04:13:32,851 INFO [train.py:1114] (2/4) Epoch 18, batch 500, loss[loss=0.2047, simple_loss=0.2826, pruned_loss=0.04608, ctc_loss=0.08673, over 19681.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.261, pruned_loss=0.04048, ctc_loss=0.07605, over 3546019.83 frames. ], batch size: 63, lr: 8.39e-03, grad_scale: 32.0 +2024-08-27 04:13:45,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228400.0, ans=0.1 +2024-08-27 04:13:50,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=228400.0, ans=0.035 +2024-08-27 04:13:54,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=228453.33333333334, ans=0.125 +2024-08-27 04:13:58,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=228453.33333333334, ans=0.125 +2024-08-27 04:14:10,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=228506.66666666666, ans=0.025 +2024-08-27 04:14:20,877 INFO [train.py:1114] (2/4) Epoch 18, batch 550, loss[loss=0.1982, simple_loss=0.2691, pruned_loss=0.04726, ctc_loss=0.08179, over 19242.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2612, pruned_loss=0.04081, ctc_loss=0.0765, over 3608988.72 frames. ], batch size: 71, lr: 8.39e-03, grad_scale: 32.0 +2024-08-27 04:14:34,522 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.436e+02 1.681e+02 2.031e+02 3.505e+02, threshold=3.363e+02, percent-clipped=1.0 +2024-08-27 04:14:37,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=228666.66666666666, ans=0.0 +2024-08-27 04:14:47,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=228720.0, ans=0.0 +2024-08-27 04:15:01,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=228773.33333333334, ans=0.125 +2024-08-27 04:15:14,801 INFO [train.py:1114] (2/4) Epoch 18, batch 600, loss[loss=0.1926, simple_loss=0.274, pruned_loss=0.04088, ctc_loss=0.07382, over 19360.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.261, pruned_loss=0.04062, ctc_loss=0.0762, over 3665746.22 frames. ], batch size: 67, lr: 8.38e-03, grad_scale: 32.0 +2024-08-27 04:15:21,690 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.33 vs. limit=15.0 +2024-08-27 04:15:56,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=229040.0, ans=0.125 +2024-08-27 04:16:57,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=229093.33333333334, ans=0.2 +2024-08-27 04:17:05,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=229093.33333333334, ans=0.5 +2024-08-27 04:17:06,632 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.23 vs. limit=15.0 +2024-08-27 04:17:06,966 INFO [train.py:1114] (2/4) Epoch 18, batch 650, loss[loss=0.1893, simple_loss=0.2679, pruned_loss=0.04001, ctc_loss=0.07683, over 19756.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2604, pruned_loss=0.04044, ctc_loss=0.07571, over 3716208.52 frames. ], batch size: 54, lr: 8.38e-03, grad_scale: 32.0 +2024-08-27 04:17:17,620 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.66 vs. limit=12.0 +2024-08-27 04:17:20,105 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.567e+02 1.955e+02 2.726e+02 4.189e+02, threshold=3.909e+02, percent-clipped=6.0 +2024-08-27 04:17:24,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229253.33333333334, ans=0.1 +2024-08-27 04:17:33,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=229253.33333333334, ans=0.2 +2024-08-27 04:17:47,313 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.22 vs. limit=22.5 +2024-08-27 04:17:49,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=229360.0, ans=0.2 +2024-08-27 04:18:45,805 INFO [train.py:1114] (2/4) Epoch 18, batch 700, loss[loss=0.1634, simple_loss=0.2427, pruned_loss=0.03117, ctc_loss=0.05412, over 19722.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2607, pruned_loss=0.04053, ctc_loss=0.07588, over 3749014.97 frames. ], batch size: 51, lr: 8.37e-03, grad_scale: 32.0 +2024-08-27 04:19:07,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229520.0, ans=0.1 +2024-08-27 04:19:09,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229520.0, ans=0.1 +2024-08-27 04:19:30,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=229626.66666666666, ans=0.125 +2024-08-27 04:19:35,528 INFO [train.py:1114] (2/4) Epoch 18, batch 750, loss[loss=0.1837, simple_loss=0.2612, pruned_loss=0.0382, ctc_loss=0.07439, over 19859.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2602, pruned_loss=0.04027, ctc_loss=0.07535, over 3775543.53 frames. ], batch size: 55, lr: 8.37e-03, grad_scale: 16.0 +2024-08-27 04:19:49,137 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.146e+02 1.399e+02 1.632e+02 2.193e+02 3.721e+02, threshold=3.263e+02, percent-clipped=0.0 +2024-08-27 04:19:49,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=229733.33333333334, ans=0.125 +2024-08-27 04:19:55,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=229786.66666666666, ans=0.125 +2024-08-27 04:20:11,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=229840.0, ans=0.125 +2024-08-27 04:20:25,627 INFO [train.py:1114] (2/4) Epoch 18, batch 800, loss[loss=0.1687, simple_loss=0.2389, pruned_loss=0.03565, ctc_loss=0.06789, over 19430.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2607, pruned_loss=0.04059, ctc_loss=0.07589, over 3796776.82 frames. ], batch size: 48, lr: 8.37e-03, grad_scale: 32.0 +2024-08-27 04:20:55,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=230000.0, ans=0.125 +2024-08-27 04:21:10,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=230053.33333333334, ans=0.125 +2024-08-27 04:21:17,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=230106.66666666666, ans=0.125 +2024-08-27 04:21:33,149 INFO [train.py:1114] (2/4) Epoch 18, batch 850, loss[loss=0.2033, simple_loss=0.2887, pruned_loss=0.04279, ctc_loss=0.0809, over 19630.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2605, pruned_loss=0.04061, ctc_loss=0.07597, over 3815628.64 frames. ], batch size: 59, lr: 8.36e-03, grad_scale: 32.0 +2024-08-27 04:21:35,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=230213.33333333334, ans=0.1 +2024-08-27 04:21:42,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=230266.66666666666, ans=0.125 +2024-08-27 04:21:57,962 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.249e+02 1.452e+02 1.736e+02 2.395e+02 3.551e+02, threshold=3.472e+02, percent-clipped=2.0 +2024-08-27 04:22:05,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=230320.0, ans=0.025 +2024-08-27 04:22:25,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230426.66666666666, ans=0.1 +2024-08-27 04:22:25,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=230426.66666666666, ans=0.07 +2024-08-27 04:22:31,214 INFO [train.py:1114] (2/4) Epoch 18, batch 900, loss[loss=0.168, simple_loss=0.2364, pruned_loss=0.03634, ctc_loss=0.06727, over 19406.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2608, pruned_loss=0.04094, ctc_loss=0.07633, over 3819241.07 frames. ], batch size: 48, lr: 8.36e-03, grad_scale: 32.0 +2024-08-27 04:22:31,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=230480.0, ans=0.0 +2024-08-27 04:22:32,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=230480.0, ans=0.0 +2024-08-27 04:22:43,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=230533.33333333334, ans=0.0 +2024-08-27 04:22:52,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=230586.66666666666, ans=0.125 +2024-08-27 04:23:10,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=230693.33333333334, ans=0.0 +2024-08-27 04:23:17,483 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.41 vs. limit=15.0 +2024-08-27 04:23:17,851 INFO [train.py:1114] (2/4) Epoch 18, batch 950, loss[loss=0.1657, simple_loss=0.2442, pruned_loss=0.03248, ctc_loss=0.05562, over 19504.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2609, pruned_loss=0.04099, ctc_loss=0.0764, over 3821614.48 frames. ], batch size: 49, lr: 8.35e-03, grad_scale: 32.0 +2024-08-27 04:23:26,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=230746.66666666666, ans=0.0 +2024-08-27 04:23:31,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=230800.0, ans=0.125 +2024-08-27 04:23:31,400 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.79 vs. limit=15.0 +2024-08-27 04:23:36,276 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.122e+02 1.393e+02 1.674e+02 2.227e+02 4.492e+02, threshold=3.349e+02, percent-clipped=5.0 +2024-08-27 04:23:45,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=230853.33333333334, ans=0.125 +2024-08-27 04:23:49,194 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.64 vs. limit=15.0 +2024-08-27 04:23:53,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=230853.33333333334, ans=0.0 +2024-08-27 04:24:12,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=230960.0, ans=0.2 +2024-08-27 04:24:13,216 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.77 vs. limit=12.0 +2024-08-27 04:24:15,459 INFO [train.py:1114] (2/4) Epoch 18, batch 1000, loss[loss=0.1793, simple_loss=0.2649, pruned_loss=0.0331, ctc_loss=0.06877, over 19859.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2618, pruned_loss=0.04139, ctc_loss=0.07733, over 3816720.59 frames. ], batch size: 52, lr: 8.35e-03, grad_scale: 32.0 +2024-08-27 04:24:19,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=231013.33333333334, ans=10.0 +2024-08-27 04:24:20,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.91 vs. limit=15.0 +2024-08-27 04:24:29,245 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.07 vs. limit=15.0 +2024-08-27 04:24:29,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=231066.66666666666, ans=0.05 +2024-08-27 04:24:34,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=231120.0, ans=0.025 +2024-08-27 04:25:11,429 INFO [train.py:1114] (2/4) Epoch 18, batch 1050, loss[loss=0.1939, simple_loss=0.2799, pruned_loss=0.03897, ctc_loss=0.0751, over 19821.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2612, pruned_loss=0.04091, ctc_loss=0.07661, over 3823860.40 frames. ], batch size: 57, lr: 8.34e-03, grad_scale: 32.0 +2024-08-27 04:25:14,775 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.11 vs. limit=15.0 +2024-08-27 04:25:20,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=231333.33333333334, ans=0.0 +2024-08-27 04:25:23,036 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.54 vs. limit=15.0 +2024-08-27 04:25:25,222 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.375e+02 1.549e+02 1.865e+02 3.480e+02, threshold=3.097e+02, percent-clipped=1.0 +2024-08-27 04:25:28,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.65 vs. limit=15.0 +2024-08-27 04:25:29,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=231386.66666666666, ans=0.125 +2024-08-27 04:25:29,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=231386.66666666666, ans=0.125 +2024-08-27 04:25:57,490 INFO [train.py:1114] (2/4) Epoch 18, batch 1100, loss[loss=0.1734, simple_loss=0.2587, pruned_loss=0.03157, ctc_loss=0.06258, over 19597.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.261, pruned_loss=0.04072, ctc_loss=0.07628, over 3831830.36 frames. ], batch size: 52, lr: 8.34e-03, grad_scale: 16.0 +2024-08-27 04:26:33,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=231653.33333333334, ans=0.125 +2024-08-27 04:26:58,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=231706.66666666666, ans=0.125 +2024-08-27 04:27:31,648 INFO [train.py:1114] (2/4) Epoch 18, batch 1150, loss[loss=0.1775, simple_loss=0.2549, pruned_loss=0.03516, ctc_loss=0.07432, over 19594.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2606, pruned_loss=0.04062, ctc_loss=0.07618, over 3830669.25 frames. ], batch size: 52, lr: 8.33e-03, grad_scale: 16.0 +2024-08-27 04:27:36,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=231813.33333333334, ans=0.2 +2024-08-27 04:27:37,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=231813.33333333334, ans=0.2 +2024-08-27 04:27:50,640 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.153e+02 1.426e+02 1.640e+02 2.078e+02 3.185e+02, threshold=3.280e+02, percent-clipped=3.0 +2024-08-27 04:28:15,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=231973.33333333334, ans=22.5 +2024-08-27 04:28:16,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=231973.33333333334, ans=0.025 +2024-08-27 04:28:27,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=232026.66666666666, ans=0.0 +2024-08-27 04:28:27,284 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.51 vs. limit=10.0 +2024-08-27 04:28:32,342 INFO [train.py:1114] (2/4) Epoch 18, batch 1200, loss[loss=0.1948, simple_loss=0.2689, pruned_loss=0.04349, ctc_loss=0.08424, over 19844.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2615, pruned_loss=0.0409, ctc_loss=0.07676, over 3825491.91 frames. ], batch size: 57, lr: 8.33e-03, grad_scale: 32.0 +2024-08-27 04:28:32,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.00 vs. limit=22.5 +2024-08-27 04:28:48,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=232133.33333333334, ans=0.125 +2024-08-27 04:28:49,077 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:29:01,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=232240.0, ans=0.125 +2024-08-27 04:29:06,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=232240.0, ans=0.125 +2024-08-27 04:29:17,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=232293.33333333334, ans=0.0 +2024-08-27 04:29:18,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=232293.33333333334, ans=0.0 +2024-08-27 04:29:19,798 INFO [train.py:1114] (2/4) Epoch 18, batch 1250, loss[loss=0.1976, simple_loss=0.2747, pruned_loss=0.04436, ctc_loss=0.07929, over 19524.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2624, pruned_loss=0.04115, ctc_loss=0.07716, over 3842929.75 frames. ], batch size: 61, lr: 8.32e-03, grad_scale: 32.0 +2024-08-27 04:29:30,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=232400.0, ans=0.09899494936611666 +2024-08-27 04:29:34,585 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.452e+02 1.815e+02 2.295e+02 4.200e+02, threshold=3.630e+02, percent-clipped=5.0 +2024-08-27 04:29:35,193 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.92 vs. limit=6.0 +2024-08-27 04:29:41,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=232453.33333333334, ans=0.125 +2024-08-27 04:30:32,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=232560.0, ans=0.0 +2024-08-27 04:30:40,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=232560.0, ans=0.0 +2024-08-27 04:30:41,893 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:30:43,396 INFO [train.py:1114] (2/4) Epoch 18, batch 1300, loss[loss=0.2065, simple_loss=0.2798, pruned_loss=0.04846, ctc_loss=0.09065, over 18811.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2616, pruned_loss=0.04082, ctc_loss=0.07634, over 3845733.83 frames. ], batch size: 76, lr: 8.32e-03, grad_scale: 16.0 +2024-08-27 04:30:50,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=232613.33333333334, ans=0.025 +2024-08-27 04:30:52,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=232613.33333333334, ans=0.1 +2024-08-27 04:31:02,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=232666.66666666666, ans=0.125 +2024-08-27 04:31:26,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=232826.66666666666, ans=0.125 +2024-08-27 04:31:33,091 INFO [train.py:1114] (2/4) Epoch 18, batch 1350, loss[loss=0.1817, simple_loss=0.2636, pruned_loss=0.03617, ctc_loss=0.06837, over 19781.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2608, pruned_loss=0.0404, ctc_loss=0.07554, over 3856639.95 frames. ], batch size: 54, lr: 8.31e-03, grad_scale: 16.0 +2024-08-27 04:31:39,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=232880.0, ans=0.125 +2024-08-27 04:31:42,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=232933.33333333334, ans=0.125 +2024-08-27 04:31:48,898 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.387e+02 1.655e+02 2.106e+02 4.022e+02, threshold=3.310e+02, percent-clipped=4.0 +2024-08-27 04:32:06,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=233040.0, ans=0.5 +2024-08-27 04:32:19,569 INFO [train.py:1114] (2/4) Epoch 18, batch 1400, loss[loss=0.176, simple_loss=0.2465, pruned_loss=0.03874, ctc_loss=0.0702, over 19707.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2604, pruned_loss=0.04023, ctc_loss=0.07537, over 3863721.32 frames. ], batch size: 46, lr: 8.31e-03, grad_scale: 16.0 +2024-08-27 04:32:57,616 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.84 vs. limit=10.0 +2024-08-27 04:32:59,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=233253.33333333334, ans=0.025 +2024-08-27 04:33:21,658 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.44 vs. limit=15.0 +2024-08-27 04:33:40,121 INFO [train.py:1114] (2/4) Epoch 18, batch 1450, loss[loss=0.2022, simple_loss=0.2705, pruned_loss=0.04824, ctc_loss=0.09335, over 19658.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2611, pruned_loss=0.0404, ctc_loss=0.07575, over 3861602.59 frames. ], batch size: 63, lr: 8.30e-03, grad_scale: 16.0 +2024-08-27 04:34:13,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=233466.66666666666, ans=0.0 +2024-08-27 04:34:21,000 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.457e+02 1.713e+02 1.981e+02 3.848e+02, threshold=3.426e+02, percent-clipped=1.0 +2024-08-27 04:34:39,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233573.33333333334, ans=0.1 +2024-08-27 04:34:47,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=233573.33333333334, ans=0.125 +2024-08-27 04:35:48,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=233626.66666666666, ans=0.0 +2024-08-27 04:35:50,715 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.39 vs. limit=10.0 +2024-08-27 04:36:59,899 INFO [train.py:1114] (2/4) Epoch 18, batch 1500, loss[loss=0.1818, simple_loss=0.2686, pruned_loss=0.03409, ctc_loss=0.06688, over 19583.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2613, pruned_loss=0.04048, ctc_loss=0.07571, over 3861275.12 frames. ], batch size: 57, lr: 8.30e-03, grad_scale: 16.0 +2024-08-27 04:37:46,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233733.33333333334, ans=0.1 +2024-08-27 04:37:47,893 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:37:51,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=233786.66666666666, ans=0.125 +2024-08-27 04:38:01,131 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.37 vs. limit=15.0 +2024-08-27 04:38:02,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=233786.66666666666, ans=0.125 +2024-08-27 04:38:16,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233786.66666666666, ans=0.1 +2024-08-27 04:38:20,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=233840.0, ans=0.0 +2024-08-27 04:38:23,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=233840.0, ans=0.1 +2024-08-27 04:38:28,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=233840.0, ans=0.125 +2024-08-27 04:38:32,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=233893.33333333334, ans=0.125 +2024-08-27 04:38:34,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=233893.33333333334, ans=0.125 +2024-08-27 04:39:00,330 INFO [train.py:1114] (2/4) Epoch 18, batch 1550, loss[loss=0.2161, simple_loss=0.2775, pruned_loss=0.05772, ctc_loss=0.09789, over 19595.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2607, pruned_loss=0.04063, ctc_loss=0.07582, over 3845791.37 frames. ], batch size: 60, lr: 8.30e-03, grad_scale: 16.0 +2024-08-27 04:39:36,447 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.74 vs. limit=15.0 +2024-08-27 04:39:51,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=234000.0, ans=0.025 +2024-08-27 04:39:51,778 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.454e+02 1.713e+02 2.109e+02 3.815e+02, threshold=3.426e+02, percent-clipped=1.0 +2024-08-27 04:40:03,669 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.83 vs. limit=22.5 +2024-08-27 04:40:08,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=234053.33333333334, ans=0.0 +2024-08-27 04:40:24,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=234160.0, ans=10.0 +2024-08-27 04:40:24,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=234160.0, ans=0.0 +2024-08-27 04:40:25,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=234160.0, ans=0.125 +2024-08-27 04:40:35,445 INFO [train.py:1114] (2/4) Epoch 18, batch 1600, loss[loss=0.199, simple_loss=0.2745, pruned_loss=0.04487, ctc_loss=0.08449, over 19853.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2609, pruned_loss=0.04081, ctc_loss=0.07625, over 3836055.87 frames. ], batch size: 57, lr: 8.29e-03, grad_scale: 32.0 +2024-08-27 04:40:48,507 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.48 vs. limit=15.0 +2024-08-27 04:41:17,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=234320.0, ans=0.015 +2024-08-27 04:41:20,385 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.34 vs. limit=15.0 +2024-08-27 04:41:40,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=234426.66666666666, ans=0.125 +2024-08-27 04:41:53,474 INFO [train.py:1114] (2/4) Epoch 18, batch 1650, loss[loss=0.1907, simple_loss=0.2704, pruned_loss=0.03937, ctc_loss=0.08066, over 19643.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2609, pruned_loss=0.04082, ctc_loss=0.07626, over 3830462.78 frames. ], batch size: 59, lr: 8.29e-03, grad_scale: 32.0 +2024-08-27 04:41:56,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234480.0, ans=0.1 +2024-08-27 04:42:02,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=234480.0, ans=0.125 +2024-08-27 04:42:16,286 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.559e+02 1.894e+02 2.296e+02 3.896e+02, threshold=3.788e+02, percent-clipped=3.0 +2024-08-27 04:42:23,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234586.66666666666, ans=0.1 +2024-08-27 04:42:50,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=234640.0, ans=0.125 +2024-08-27 04:42:50,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=234640.0, ans=0.04949747468305833 +2024-08-27 04:43:00,722 INFO [train.py:1114] (2/4) Epoch 18, batch 1700, loss[loss=0.168, simple_loss=0.231, pruned_loss=0.03826, ctc_loss=0.07118, over 19671.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.261, pruned_loss=0.04067, ctc_loss=0.076, over 3845647.98 frames. ], batch size: 46, lr: 8.28e-03, grad_scale: 32.0 +2024-08-27 04:43:07,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=234746.66666666666, ans=0.0 +2024-08-27 04:43:25,555 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.51 vs. limit=15.0 +2024-08-27 04:43:28,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=234853.33333333334, ans=0.125 +2024-08-27 04:43:37,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=234906.66666666666, ans=0.125 +2024-08-27 04:43:48,585 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.49 vs. limit=6.0 +2024-08-27 04:43:57,716 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.93 vs. limit=15.0 +2024-08-27 04:43:59,949 INFO [train.py:1114] (2/4) Epoch 18, batch 1750, loss[loss=0.1602, simple_loss=0.2314, pruned_loss=0.03212, ctc_loss=0.06171, over 19674.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2606, pruned_loss=0.04059, ctc_loss=0.07595, over 3850710.37 frames. ], batch size: 45, lr: 8.28e-03, grad_scale: 32.0 +2024-08-27 04:44:00,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=235013.33333333334, ans=0.125 +2024-08-27 04:44:01,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=235013.33333333334, ans=0.125 +2024-08-27 04:44:01,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235013.33333333334, ans=0.1 +2024-08-27 04:44:07,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=235066.66666666666, ans=0.125 +2024-08-27 04:44:10,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=235066.66666666666, ans=0.125 +2024-08-27 04:44:14,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=235066.66666666666, ans=0.125 +2024-08-27 04:44:16,972 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.479e+02 1.670e+02 2.161e+02 3.908e+02, threshold=3.340e+02, percent-clipped=1.0 +2024-08-27 04:44:17,528 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.91 vs. limit=15.0 +2024-08-27 04:44:24,889 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:44:50,511 INFO [train.py:1114] (2/4) Epoch 18, batch 1800, loss[loss=0.1827, simple_loss=0.2645, pruned_loss=0.0371, ctc_loss=0.06684, over 19612.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2607, pruned_loss=0.04052, ctc_loss=0.07584, over 3851559.86 frames. ], batch size: 55, lr: 8.27e-03, grad_scale: 16.0 +2024-08-27 04:44:52,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235280.0, ans=0.1 +2024-08-27 04:44:55,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=235280.0, ans=0.05 +2024-08-27 04:44:55,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=235280.0, ans=0.125 +2024-08-27 04:45:04,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=235333.33333333334, ans=0.025 +2024-08-27 04:45:07,571 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.32 vs. limit=6.0 +2024-08-27 04:45:32,983 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.24 vs. limit=22.5 +2024-08-27 04:45:41,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=235493.33333333334, ans=0.125 +2024-08-27 04:45:48,624 INFO [train.py:1114] (2/4) Epoch 18, batch 1850, loss[loss=0.1932, simple_loss=0.27, pruned_loss=0.04309, ctc_loss=0.07543, over 19593.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2601, pruned_loss=0.04022, ctc_loss=0.07509, over 3855874.93 frames. ], batch size: 57, lr: 8.27e-03, grad_scale: 8.0 +2024-08-27 04:45:49,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=235546.66666666666, ans=0.0 +2024-08-27 04:45:51,664 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.74 vs. limit=15.0 +2024-08-27 04:45:53,350 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.26 vs. limit=15.0 +2024-08-27 04:46:32,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=235600.0, ans=0.125 +2024-08-27 04:46:36,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=235600.0, ans=0.07 +2024-08-27 04:46:38,527 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.122e+02 1.500e+02 1.800e+02 2.247e+02 4.177e+02, threshold=3.601e+02, percent-clipped=3.0 +2024-08-27 04:46:43,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=235653.33333333334, ans=0.07 +2024-08-27 04:46:49,929 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.57 vs. limit=15.0 +2024-08-27 04:47:06,318 INFO [train.py:1114] (2/4) Epoch 18, batch 1900, loss[loss=0.1864, simple_loss=0.2693, pruned_loss=0.03671, ctc_loss=0.07548, over 19650.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.261, pruned_loss=0.04069, ctc_loss=0.0759, over 3861000.92 frames. ], batch size: 59, lr: 8.26e-03, grad_scale: 8.0 +2024-08-27 04:47:14,235 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:47:20,856 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.80 vs. limit=8.0 +2024-08-27 04:47:22,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=235866.66666666666, ans=0.0 +2024-08-27 04:47:24,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=235920.0, ans=0.125 +2024-08-27 04:47:26,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=235920.0, ans=0.125 +2024-08-27 04:47:32,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=235920.0, ans=10.0 +2024-08-27 04:47:32,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=235920.0, ans=0.0 +2024-08-27 04:47:42,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=235973.33333333334, ans=0.0 +2024-08-27 04:47:45,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=236026.66666666666, ans=0.1 +2024-08-27 04:47:47,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=236026.66666666666, ans=0.0 +2024-08-27 04:47:53,595 INFO [train.py:1114] (2/4) Epoch 18, batch 1950, loss[loss=0.173, simple_loss=0.2514, pruned_loss=0.03406, ctc_loss=0.06594, over 19583.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2623, pruned_loss=0.04103, ctc_loss=0.07653, over 3869991.12 frames. ], batch size: 52, lr: 8.26e-03, grad_scale: 8.0 +2024-08-27 04:47:58,340 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=9.64 vs. limit=12.0 +2024-08-27 04:48:00,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=236080.0, ans=0.0 +2024-08-27 04:48:04,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=236133.33333333334, ans=0.0 +2024-08-27 04:48:12,683 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.245e+02 1.481e+02 1.697e+02 2.159e+02 5.555e+02, threshold=3.394e+02, percent-clipped=1.0 +2024-08-27 04:48:17,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=236186.66666666666, ans=0.125 +2024-08-27 04:48:25,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=236240.0, ans=0.0 +2024-08-27 04:48:25,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=236240.0, ans=0.125 +2024-08-27 04:48:31,602 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.54 vs. limit=15.0 +2024-08-27 04:48:32,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=236240.0, ans=0.0 +2024-08-27 04:48:50,293 INFO [train.py:1114] (2/4) Epoch 18, batch 2000, loss[loss=0.1631, simple_loss=0.2362, pruned_loss=0.03235, ctc_loss=0.06318, over 19673.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2625, pruned_loss=0.04111, ctc_loss=0.07669, over 3854452.41 frames. ], batch size: 45, lr: 8.25e-03, grad_scale: 8.0 +2024-08-27 04:48:56,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=236346.66666666666, ans=0.125 +2024-08-27 04:48:56,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=236346.66666666666, ans=0.125 +2024-08-27 04:48:57,283 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.21 vs. limit=6.0 +2024-08-27 04:48:58,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=236346.66666666666, ans=0.125 +2024-08-27 04:49:44,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=236400.0, ans=0.0 +2024-08-27 04:49:52,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=236453.33333333334, ans=0.125 +2024-08-27 04:49:56,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=236506.66666666666, ans=0.125 +2024-08-27 04:50:04,404 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:50:13,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=236560.0, ans=0.125 +2024-08-27 04:50:16,430 INFO [train.py:1114] (2/4) Epoch 18, batch 2050, loss[loss=0.1721, simple_loss=0.2417, pruned_loss=0.03792, ctc_loss=0.06696, over 19716.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2617, pruned_loss=0.04096, ctc_loss=0.07643, over 3851418.13 frames. ], batch size: 47, lr: 8.25e-03, grad_scale: 8.0 +2024-08-27 04:50:25,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=236666.66666666666, ans=0.125 +2024-08-27 04:50:40,417 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:50:47,352 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.468e+02 1.842e+02 2.423e+02 4.039e+02, threshold=3.684e+02, percent-clipped=4.0 +2024-08-27 04:50:51,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=236720.0, ans=0.1 +2024-08-27 04:50:52,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=236720.0, ans=0.05 +2024-08-27 04:51:01,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=236773.33333333334, ans=0.2 +2024-08-27 04:51:13,547 INFO [train.py:1114] (2/4) Epoch 18, batch 2100, loss[loss=0.1876, simple_loss=0.2637, pruned_loss=0.04013, ctc_loss=0.07806, over 19773.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2609, pruned_loss=0.04055, ctc_loss=0.07561, over 3858565.95 frames. ], batch size: 54, lr: 8.25e-03, grad_scale: 8.0 +2024-08-27 04:51:36,997 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.33 vs. limit=12.0 +2024-08-27 04:51:39,703 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.62 vs. limit=22.5 +2024-08-27 04:51:55,437 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:52:03,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=237040.0, ans=0.125 +2024-08-27 04:52:07,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=237093.33333333334, ans=0.025 +2024-08-27 04:52:10,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=237093.33333333334, ans=0.125 +2024-08-27 04:52:13,282 INFO [train.py:1114] (2/4) Epoch 18, batch 2150, loss[loss=0.1837, simple_loss=0.2578, pruned_loss=0.03981, ctc_loss=0.0748, over 19870.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2601, pruned_loss=0.04034, ctc_loss=0.0751, over 3868180.37 frames. ], batch size: 52, lr: 8.24e-03, grad_scale: 8.0 +2024-08-27 04:52:14,246 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.39 vs. limit=22.5 +2024-08-27 04:52:16,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=237146.66666666666, ans=0.0 +2024-08-27 04:52:16,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=237146.66666666666, ans=0.125 +2024-08-27 04:52:21,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=237200.0, ans=0.125 +2024-08-27 04:52:31,107 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.418e+02 1.667e+02 2.145e+02 4.483e+02, threshold=3.333e+02, percent-clipped=3.0 +2024-08-27 04:52:38,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=237253.33333333334, ans=0.2 +2024-08-27 04:52:38,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=237253.33333333334, ans=0.025 +2024-08-27 04:52:38,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=237253.33333333334, ans=0.0 +2024-08-27 04:52:45,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=237306.66666666666, ans=0.0 +2024-08-27 04:52:51,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=237360.0, ans=0.125 +2024-08-27 04:52:57,216 INFO [train.py:1114] (2/4) Epoch 18, batch 2200, loss[loss=0.1944, simple_loss=0.2706, pruned_loss=0.04256, ctc_loss=0.08274, over 19592.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.26, pruned_loss=0.04023, ctc_loss=0.07498, over 3866406.18 frames. ], batch size: 57, lr: 8.24e-03, grad_scale: 8.0 +2024-08-27 04:52:58,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=237413.33333333334, ans=0.0 +2024-08-27 04:53:06,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=237413.33333333334, ans=0.09899494936611666 +2024-08-27 04:53:09,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=237466.66666666666, ans=0.125 +2024-08-27 04:53:09,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=237466.66666666666, ans=0.125 +2024-08-27 04:53:15,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=237466.66666666666, ans=0.125 +2024-08-27 04:53:23,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=237520.0, ans=0.0 +2024-08-27 04:53:45,573 INFO [train.py:1114] (2/4) Epoch 18, batch 2250, loss[loss=0.1992, simple_loss=0.2801, pruned_loss=0.04287, ctc_loss=0.08134, over 19616.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2599, pruned_loss=0.04019, ctc_loss=0.07489, over 3865969.40 frames. ], batch size: 55, lr: 8.23e-03, grad_scale: 8.0 +2024-08-27 04:58:07,199 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.185e+02 1.445e+02 1.673e+02 2.181e+02 3.635e+02, threshold=3.347e+02, percent-clipped=1.0 +2024-08-27 04:58:08,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=237786.66666666666, ans=0.125 +2024-08-27 05:00:07,281 INFO [train.py:1114] (2/4) Epoch 18, batch 2300, loss[loss=0.1865, simple_loss=0.2579, pruned_loss=0.04145, ctc_loss=0.08037, over 19510.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2595, pruned_loss=0.0404, ctc_loss=0.07531, over 3860086.18 frames. ], batch size: 49, lr: 8.23e-03, grad_scale: 8.0 +2024-08-27 05:03:45,142 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:05:59,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=238160.0, ans=0.025 +2024-08-27 05:06:14,064 INFO [train.py:1114] (2/4) Epoch 18, batch 2350, loss[loss=0.2015, simple_loss=0.2763, pruned_loss=0.04714, ctc_loss=0.08108, over 19687.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2597, pruned_loss=0.04057, ctc_loss=0.07561, over 3862480.50 frames. ], batch size: 63, lr: 8.22e-03, grad_scale: 8.0 +2024-08-27 05:06:14,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=238213.33333333334, ans=0.125 +2024-08-27 05:08:50,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=238266.66666666666, ans=0.125 +2024-08-27 05:09:45,137 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.379e+02 1.605e+02 2.102e+02 3.614e+02, threshold=3.209e+02, percent-clipped=2.0 +2024-08-27 05:10:09,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=238320.0, ans=0.125 +2024-08-27 05:10:12,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=238320.0, ans=0.125 +2024-08-27 05:11:01,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=238373.33333333334, ans=0.125 +2024-08-27 05:11:54,343 INFO [train.py:1114] (2/4) Epoch 18, batch 2400, loss[loss=0.1998, simple_loss=0.2776, pruned_loss=0.04448, ctc_loss=0.08267, over 19247.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.262, pruned_loss=0.04102, ctc_loss=0.07643, over 3857430.91 frames. ], batch size: 71, lr: 8.22e-03, grad_scale: 16.0 +2024-08-27 05:12:03,824 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.67 vs. limit=15.0 +2024-08-27 05:12:49,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=238480.0, ans=0.0 +2024-08-27 05:12:54,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=238480.0, ans=0.0 +2024-08-27 05:13:25,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=238586.66666666666, ans=0.0 +2024-08-27 05:13:52,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=238640.0, ans=0.05 +2024-08-27 05:14:24,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=238693.33333333334, ans=0.0 +2024-08-27 05:14:35,791 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=9.14 vs. limit=15.0 +2024-08-27 05:14:36,106 INFO [train.py:1114] (2/4) Epoch 18, batch 2450, loss[loss=0.236, simple_loss=0.2888, pruned_loss=0.06753, ctc_loss=0.1201, over 14215.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2656, pruned_loss=0.04361, ctc_loss=0.08125, over 3733241.97 frames. ], batch size: 140, lr: 8.21e-03, grad_scale: 16.0 +2024-08-27 05:14:50,988 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.10 vs. limit=15.0 +2024-08-27 05:15:19,823 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.338e+02 1.631e+02 1.872e+02 2.220e+02 3.951e+02, threshold=3.743e+02, percent-clipped=5.0 +2024-08-27 05:15:30,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=238853.33333333334, ans=0.025 +2024-08-27 05:15:39,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=238906.66666666666, ans=0.0 +2024-08-27 05:15:44,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=238906.66666666666, ans=0.05 +2024-08-27 05:19:02,746 INFO [train.py:1114] (2/4) Epoch 19, batch 0, loss[loss=0.1782, simple_loss=0.248, pruned_loss=0.04, ctc_loss=0.07085, over 19405.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.248, pruned_loss=0.04, ctc_loss=0.07085, over 19405.00 frames. ], batch size: 48, lr: 7.99e-03, grad_scale: 32.0 +2024-08-27 05:19:02,747 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-27 05:19:51,146 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.8989, 3.3077, 3.7998, 3.7149], device='cuda:2') +2024-08-27 05:20:05,937 INFO [train.py:1146] (2/4) Epoch 19, validation: loss=0.1709, simple_loss=0.2636, pruned_loss=0.02933, ctc_loss=0.04896, over 944034.00 frames. +2024-08-27 05:20:05,937 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12932MB +2024-08-27 05:20:23,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=239008.0, ans=0.2 +2024-08-27 05:21:28,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=239114.66666666666, ans=0.0 +2024-08-27 05:21:32,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=239114.66666666666, ans=0.05 +2024-08-27 05:22:51,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=239168.0, ans=0.125 +2024-08-27 05:22:54,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=239221.33333333334, ans=0.125 +2024-08-27 05:22:55,155 INFO [train.py:1114] (2/4) Epoch 19, batch 50, loss[loss=0.1802, simple_loss=0.2444, pruned_loss=0.04257, ctc_loss=0.0773, over 19697.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2612, pruned_loss=0.04067, ctc_loss=0.07635, over 843613.11 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-27 05:23:16,002 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.81 vs. limit=15.0 +2024-08-27 05:23:18,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=239221.33333333334, ans=0.09899494936611666 +2024-08-27 05:23:21,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=239221.33333333334, ans=0.1 +2024-08-27 05:23:27,796 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.88 vs. limit=15.0 +2024-08-27 05:23:32,674 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.31 vs. limit=15.0 +2024-08-27 05:23:45,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239328.0, ans=0.125 +2024-08-27 05:23:47,232 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=8.87 vs. limit=22.5 +2024-08-27 05:23:59,557 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.492e+02 1.734e+02 2.135e+02 3.431e+02, threshold=3.468e+02, percent-clipped=0.0 +2024-08-27 05:23:59,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=239381.33333333334, ans=0.0 +2024-08-27 05:24:17,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=239434.66666666666, ans=0.0 +2024-08-27 05:24:19,426 INFO [train.py:1114] (2/4) Epoch 19, batch 100, loss[loss=0.1588, simple_loss=0.24, pruned_loss=0.0273, ctc_loss=0.05759, over 19711.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.263, pruned_loss=0.04071, ctc_loss=0.0766, over 1498173.55 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 32.0 +2024-08-27 05:24:19,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=239488.0, ans=0.0 +2024-08-27 05:24:22,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=239488.0, ans=0.125 +2024-08-27 05:24:28,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=239541.33333333334, ans=0.0 +2024-08-27 05:24:39,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=239594.66666666666, ans=0.125 +2024-08-27 05:25:46,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=239648.0, ans=0.0 +2024-08-27 05:25:49,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239648.0, ans=0.1 +2024-08-27 05:25:49,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=239648.0, ans=0.2 +2024-08-27 05:25:53,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=239648.0, ans=0.125 +2024-08-27 05:26:05,118 INFO [train.py:1114] (2/4) Epoch 19, batch 150, loss[loss=0.1656, simple_loss=0.2345, pruned_loss=0.03532, ctc_loss=0.06506, over 19712.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2605, pruned_loss=0.04016, ctc_loss=0.07525, over 2027757.76 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-27 05:26:27,820 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.54 vs. limit=22.5 +2024-08-27 05:26:31,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=239808.0, ans=0.125 +2024-08-27 05:27:05,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=239861.33333333334, ans=0.125 +2024-08-27 05:27:20,691 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.059e+02 1.500e+02 1.966e+02 2.497e+02 3.604e+02, threshold=3.932e+02, percent-clipped=3.0 +2024-08-27 05:27:30,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=239914.66666666666, ans=0.125 +2024-08-27 05:27:48,686 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.78 vs. limit=15.0 +2024-08-27 05:27:49,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=239968.0, ans=10.0 +2024-08-27 05:28:03,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=239968.0, ans=0.0 +2024-08-27 05:28:10,137 INFO [train.py:1114] (2/4) Epoch 19, batch 200, loss[loss=0.2172, simple_loss=0.287, pruned_loss=0.05316, ctc_loss=0.1029, over 18193.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2604, pruned_loss=0.04005, ctc_loss=0.07514, over 2435345.63 frames. ], batch size: 85, lr: 7.97e-03, grad_scale: 32.0 +2024-08-27 05:28:10,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=240021.33333333334, ans=0.0 +2024-08-27 05:28:12,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240021.33333333334, ans=0.1 +2024-08-27 05:28:16,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=240021.33333333334, ans=0.125 +2024-08-27 05:29:07,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=240128.0, ans=0.125 +2024-08-27 05:29:09,110 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.02 vs. limit=22.5 +2024-08-27 05:29:30,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=240234.66666666666, ans=0.125 +2024-08-27 05:29:34,217 INFO [train.py:1114] (2/4) Epoch 19, batch 250, loss[loss=0.1962, simple_loss=0.2689, pruned_loss=0.04492, ctc_loss=0.08413, over 19434.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2597, pruned_loss=0.03954, ctc_loss=0.07414, over 2756225.88 frames. ], batch size: 67, lr: 7.97e-03, grad_scale: 32.0 +2024-08-27 05:29:35,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=240288.0, ans=0.09899494936611666 +2024-08-27 05:29:35,637 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.43 vs. limit=10.0 +2024-08-27 05:30:02,560 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.446e+02 1.683e+02 2.499e+02 4.574e+02, threshold=3.367e+02, percent-clipped=7.0 +2024-08-27 05:30:04,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=240448.0, ans=0.07 +2024-08-27 05:30:11,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=240448.0, ans=0.125 +2024-08-27 05:30:12,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=240501.33333333334, ans=0.07 +2024-08-27 05:30:13,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=240501.33333333334, ans=0.0 +2024-08-27 05:30:17,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=240501.33333333334, ans=0.125 +2024-08-27 05:30:22,810 INFO [train.py:1114] (2/4) Epoch 19, batch 300, loss[loss=0.2028, simple_loss=0.2745, pruned_loss=0.04857, ctc_loss=0.08503, over 19508.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.259, pruned_loss=0.03936, ctc_loss=0.07401, over 3000511.74 frames. ], batch size: 61, lr: 7.96e-03, grad_scale: 32.0 +2024-08-27 05:30:24,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=240554.66666666666, ans=0.2 +2024-08-27 05:30:31,174 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:30:35,959 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:30:42,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=240661.33333333334, ans=0.0 +2024-08-27 05:30:49,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=240714.66666666666, ans=0.125 +2024-08-27 05:31:09,971 INFO [train.py:1114] (2/4) Epoch 19, batch 350, loss[loss=0.1775, simple_loss=0.2457, pruned_loss=0.03937, ctc_loss=0.07626, over 19796.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2602, pruned_loss=0.03947, ctc_loss=0.07415, over 3190250.24 frames. ], batch size: 48, lr: 7.96e-03, grad_scale: 32.0 +2024-08-27 05:31:10,629 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.74 vs. limit=15.0 +2024-08-27 05:31:33,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240928.0, ans=0.1 +2024-08-27 05:31:35,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=240928.0, ans=0.2 +2024-08-27 05:31:39,926 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.453e+02 1.753e+02 2.405e+02 3.677e+02, threshold=3.507e+02, percent-clipped=2.0 +2024-08-27 05:31:47,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=241034.66666666666, ans=0.125 +2024-08-27 05:31:47,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=241034.66666666666, ans=0.2 +2024-08-27 05:31:53,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=241034.66666666666, ans=0.2 +2024-08-27 05:31:57,291 INFO [train.py:1114] (2/4) Epoch 19, batch 400, loss[loss=0.1806, simple_loss=0.2625, pruned_loss=0.03593, ctc_loss=0.06712, over 19481.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2597, pruned_loss=0.03924, ctc_loss=0.07363, over 3342450.82 frames. ], batch size: 54, lr: 7.95e-03, grad_scale: 32.0 +2024-08-27 05:31:57,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=241088.0, ans=0.0 +2024-08-27 05:32:35,314 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:32:40,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=241248.0, ans=0.0 +2024-08-27 05:32:41,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=241248.0, ans=0.0 +2024-08-27 05:32:52,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=241301.33333333334, ans=0.0 +2024-08-27 05:33:00,033 INFO [train.py:1114] (2/4) Epoch 19, batch 450, loss[loss=0.1832, simple_loss=0.2648, pruned_loss=0.03754, ctc_loss=0.06622, over 19612.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2599, pruned_loss=0.03966, ctc_loss=0.07427, over 3450474.67 frames. ], batch size: 55, lr: 7.95e-03, grad_scale: 32.0 +2024-08-27 05:33:02,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=241354.66666666666, ans=0.0 +2024-08-27 05:33:09,547 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=6.75 vs. limit=15.0 +2024-08-27 05:33:17,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=241408.0, ans=0.0 +2024-08-27 05:33:20,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=241461.33333333334, ans=0.125 +2024-08-27 05:33:26,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=241461.33333333334, ans=0.2 +2024-08-27 05:33:30,924 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.397e+02 1.631e+02 2.046e+02 3.175e+02, threshold=3.262e+02, percent-clipped=0.0 +2024-08-27 05:33:31,392 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.39 vs. limit=15.0 +2024-08-27 05:33:32,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=241514.66666666666, ans=0.125 +2024-08-27 05:33:49,282 INFO [train.py:1114] (2/4) Epoch 19, batch 500, loss[loss=0.2028, simple_loss=0.278, pruned_loss=0.04758, ctc_loss=0.08106, over 19693.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2593, pruned_loss=0.03951, ctc_loss=0.07389, over 3545980.66 frames. ], batch size: 63, lr: 7.95e-03, grad_scale: 32.0 +2024-08-27 05:33:56,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=241621.33333333334, ans=0.025 +2024-08-27 05:33:58,265 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.70 vs. limit=15.0 +2024-08-27 05:34:06,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.25 vs. limit=15.0 +2024-08-27 05:34:18,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=241781.33333333334, ans=0.125 +2024-08-27 05:34:20,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=241781.33333333334, ans=0.125 +2024-08-27 05:34:24,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=241781.33333333334, ans=0.125 +2024-08-27 05:34:30,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.33 vs. limit=10.0 +2024-08-27 05:34:31,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-27 05:34:33,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=241834.66666666666, ans=0.0 +2024-08-27 05:34:39,020 INFO [train.py:1114] (2/4) Epoch 19, batch 550, loss[loss=0.2075, simple_loss=0.2773, pruned_loss=0.05047, ctc_loss=0.09204, over 19316.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.259, pruned_loss=0.03945, ctc_loss=0.07365, over 3608060.51 frames. ], batch size: 71, lr: 7.94e-03, grad_scale: 32.0 +2024-08-27 05:34:42,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=241888.0, ans=0.0 +2024-08-27 05:34:42,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=241888.0, ans=0.125 +2024-08-27 05:35:09,357 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.385e+02 1.667e+02 1.980e+02 3.512e+02, threshold=3.334e+02, percent-clipped=2.0 +2024-08-27 05:35:12,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=242048.0, ans=0.0 +2024-08-27 05:35:25,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=242101.33333333334, ans=0.95 +2024-08-27 05:35:26,666 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.64 vs. limit=15.0 +2024-08-27 05:35:27,096 INFO [train.py:1114] (2/4) Epoch 19, batch 600, loss[loss=0.1933, simple_loss=0.2693, pruned_loss=0.043, ctc_loss=0.07842, over 19426.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2591, pruned_loss=0.03934, ctc_loss=0.07344, over 3664988.97 frames. ], batch size: 67, lr: 7.94e-03, grad_scale: 32.0 +2024-08-27 05:35:28,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=242154.66666666666, ans=0.0 +2024-08-27 05:35:31,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=242154.66666666666, ans=0.0 +2024-08-27 05:35:45,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=242261.33333333334, ans=0.02 +2024-08-27 05:35:53,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=242261.33333333334, ans=0.125 +2024-08-27 05:36:14,279 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:36:22,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=242421.33333333334, ans=0.125 +2024-08-27 05:36:23,052 INFO [train.py:1114] (2/4) Epoch 19, batch 650, loss[loss=0.1793, simple_loss=0.2573, pruned_loss=0.03651, ctc_loss=0.07079, over 19774.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2587, pruned_loss=0.03912, ctc_loss=0.07315, over 3715999.86 frames. ], batch size: 54, lr: 7.93e-03, grad_scale: 32.0 +2024-08-27 05:36:27,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=242421.33333333334, ans=0.0 +2024-08-27 05:36:32,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=242474.66666666666, ans=0.09899494936611666 +2024-08-27 05:36:53,248 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.470e+02 1.907e+02 2.471e+02 4.129e+02, threshold=3.814e+02, percent-clipped=9.0 +2024-08-27 05:37:33,289 INFO [train.py:1114] (2/4) Epoch 19, batch 700, loss[loss=0.1776, simple_loss=0.2523, pruned_loss=0.03654, ctc_loss=0.07447, over 19734.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2595, pruned_loss=0.0394, ctc_loss=0.07376, over 3749459.92 frames. ], batch size: 51, lr: 7.93e-03, grad_scale: 32.0 +2024-08-27 05:37:33,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=242688.0, ans=0.0 +2024-08-27 05:37:36,665 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.80 vs. limit=15.0 +2024-08-27 05:37:38,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242688.0, ans=0.1 +2024-08-27 05:37:45,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=242741.33333333334, ans=0.125 +2024-08-27 05:37:49,346 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.96 vs. limit=22.5 +2024-08-27 05:37:55,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=242794.66666666666, ans=15.0 +2024-08-27 05:37:59,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242794.66666666666, ans=0.1 +2024-08-27 05:38:15,068 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.36 vs. limit=15.0 +2024-08-27 05:38:19,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=242901.33333333334, ans=0.125 +2024-08-27 05:38:23,010 INFO [train.py:1114] (2/4) Epoch 19, batch 750, loss[loss=0.1705, simple_loss=0.2495, pruned_loss=0.03331, ctc_loss=0.06196, over 19506.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2592, pruned_loss=0.03955, ctc_loss=0.07407, over 3774607.39 frames. ], batch size: 54, lr: 7.92e-03, grad_scale: 32.0 +2024-08-27 05:38:23,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=242954.66666666666, ans=0.07 +2024-08-27 05:38:41,555 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.33 vs. limit=15.0 +2024-08-27 05:38:47,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=243061.33333333334, ans=0.0 +2024-08-27 05:38:51,395 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.489e+02 1.823e+02 2.314e+02 3.772e+02, threshold=3.647e+02, percent-clipped=0.0 +2024-08-27 05:38:55,749 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.73 vs. limit=15.0 +2024-08-27 05:39:02,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=243168.0, ans=0.125 +2024-08-27 05:39:11,747 INFO [train.py:1114] (2/4) Epoch 19, batch 800, loss[loss=0.1619, simple_loss=0.2361, pruned_loss=0.03193, ctc_loss=0.0594, over 19799.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2587, pruned_loss=0.03934, ctc_loss=0.07359, over 3796075.64 frames. ], batch size: 49, lr: 7.92e-03, grad_scale: 32.0 +2024-08-27 05:39:17,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=243221.33333333334, ans=0.125 +2024-08-27 05:39:31,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.79 vs. limit=15.0 +2024-08-27 05:39:34,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=243328.0, ans=0.125 +2024-08-27 05:39:35,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=243328.0, ans=0.125 +2024-08-27 05:39:45,737 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.44 vs. limit=22.5 +2024-08-27 05:39:49,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=243434.66666666666, ans=0.125 +2024-08-27 05:39:58,034 INFO [train.py:1114] (2/4) Epoch 19, batch 850, loss[loss=0.1922, simple_loss=0.2768, pruned_loss=0.03975, ctc_loss=0.07019, over 19632.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.259, pruned_loss=0.03944, ctc_loss=0.07388, over 3814477.48 frames. ], batch size: 59, lr: 7.92e-03, grad_scale: 32.0 +2024-08-27 05:40:13,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=243541.33333333334, ans=0.125 +2024-08-27 05:40:20,388 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:40:28,699 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.388e+02 1.609e+02 2.074e+02 4.897e+02, threshold=3.218e+02, percent-clipped=1.0 +2024-08-27 05:40:51,595 INFO [train.py:1114] (2/4) Epoch 19, batch 900, loss[loss=0.1651, simple_loss=0.2347, pruned_loss=0.03459, ctc_loss=0.06597, over 19400.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2593, pruned_loss=0.03984, ctc_loss=0.07462, over 3817687.94 frames. ], batch size: 48, lr: 7.91e-03, grad_scale: 32.0 +2024-08-27 05:41:08,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=243754.66666666666, ans=0.0 +2024-08-27 05:41:10,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=243754.66666666666, ans=0.125 +2024-08-27 05:45:31,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243914.66666666666, ans=0.1 +2024-08-27 05:45:59,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=243968.0, ans=0.04949747468305833 +2024-08-27 05:46:00,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=243968.0, ans=0.125 +2024-08-27 05:46:16,462 INFO [train.py:1114] (2/4) Epoch 19, batch 950, loss[loss=0.1658, simple_loss=0.2359, pruned_loss=0.0351, ctc_loss=0.06358, over 19503.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2595, pruned_loss=0.04002, ctc_loss=0.07482, over 3817453.70 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-27 05:46:28,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=244021.33333333334, ans=0.125 +2024-08-27 05:47:05,277 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.185e+02 1.465e+02 1.729e+02 2.037e+02 3.385e+02, threshold=3.459e+02, percent-clipped=1.0 +2024-08-27 05:47:07,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=244181.33333333334, ans=0.125 +2024-08-27 05:47:21,034 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.40 vs. limit=15.0 +2024-08-27 05:47:24,473 INFO [train.py:1114] (2/4) Epoch 19, batch 1000, loss[loss=0.1628, simple_loss=0.2419, pruned_loss=0.03067, ctc_loss=0.056, over 19847.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2601, pruned_loss=0.04011, ctc_loss=0.0749, over 3813881.18 frames. ], batch size: 52, lr: 7.90e-03, grad_scale: 32.0 +2024-08-27 05:47:28,912 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.58 vs. limit=15.0 +2024-08-27 05:47:45,399 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.07 vs. limit=22.5 +2024-08-27 05:47:48,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=244394.66666666666, ans=0.125 +2024-08-27 05:48:03,260 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.05 vs. limit=22.5 +2024-08-27 05:48:10,465 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-08-27 05:48:11,427 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.16 vs. limit=15.0 +2024-08-27 05:48:12,796 INFO [train.py:1114] (2/4) Epoch 19, batch 1050, loss[loss=0.1857, simple_loss=0.2624, pruned_loss=0.04007, ctc_loss=0.07215, over 19837.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2595, pruned_loss=0.03987, ctc_loss=0.07437, over 3821505.79 frames. ], batch size: 57, lr: 7.90e-03, grad_scale: 32.0 +2024-08-27 05:48:17,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=244554.66666666666, ans=0.1 +2024-08-27 05:48:36,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=244661.33333333334, ans=0.125 +2024-08-27 05:48:37,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=244661.33333333334, ans=0.125 +2024-08-27 05:48:42,873 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.407e+02 1.559e+02 1.901e+02 2.565e+02, threshold=3.118e+02, percent-clipped=0.0 +2024-08-27 05:49:02,416 INFO [train.py:1114] (2/4) Epoch 19, batch 1100, loss[loss=0.1809, simple_loss=0.2593, pruned_loss=0.0376, ctc_loss=0.06844, over 19574.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2595, pruned_loss=0.03973, ctc_loss=0.07411, over 3828692.34 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-27 05:49:07,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=244821.33333333334, ans=0.125 +2024-08-27 05:49:15,012 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.29 vs. limit=15.0 +2024-08-27 05:49:25,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=244928.0, ans=0.0 +2024-08-27 05:49:29,959 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.79 vs. limit=22.5 +2024-08-27 05:49:46,686 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.26 vs. limit=15.0 +2024-08-27 05:49:48,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245034.66666666666, ans=0.1 +2024-08-27 05:49:49,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245034.66666666666, ans=0.1 +2024-08-27 05:49:51,756 INFO [train.py:1114] (2/4) Epoch 19, batch 1150, loss[loss=0.1608, simple_loss=0.2387, pruned_loss=0.02995, ctc_loss=0.05783, over 19586.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2595, pruned_loss=0.03972, ctc_loss=0.07424, over 3826076.37 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-27 05:51:16,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=245141.33333333334, ans=0.025 +2024-08-27 05:51:43,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.34 vs. limit=6.0 +2024-08-27 05:52:27,623 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.437e+02 1.648e+02 2.100e+02 3.411e+02, threshold=3.296e+02, percent-clipped=3.0 +2024-08-27 05:52:32,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=245248.0, ans=15.0 +2024-08-27 05:52:45,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=245301.33333333334, ans=0.025 +2024-08-27 05:52:47,008 INFO [train.py:1114] (2/4) Epoch 19, batch 1200, loss[loss=0.197, simple_loss=0.2689, pruned_loss=0.04529, ctc_loss=0.08602, over 19843.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2603, pruned_loss=0.03985, ctc_loss=0.07452, over 3821922.92 frames. ], batch size: 57, lr: 7.89e-03, grad_scale: 32.0 +2024-08-27 05:52:51,984 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.81 vs. limit=10.0 +2024-08-27 05:52:58,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=245408.0, ans=0.125 +2024-08-27 05:53:17,579 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.33 vs. limit=12.0 +2024-08-27 05:53:35,241 INFO [train.py:1114] (2/4) Epoch 19, batch 1250, loss[loss=0.2039, simple_loss=0.2765, pruned_loss=0.04793, ctc_loss=0.08872, over 19548.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2612, pruned_loss=0.04023, ctc_loss=0.07513, over 3840961.06 frames. ], batch size: 61, lr: 7.88e-03, grad_scale: 32.0 +2024-08-27 05:53:40,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=245621.33333333334, ans=0.0 +2024-08-27 05:53:42,415 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.70 vs. limit=15.0 +2024-08-27 05:53:46,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245674.66666666666, ans=0.1 +2024-08-27 05:54:05,856 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.230e+02 1.471e+02 1.735e+02 2.173e+02 3.319e+02, threshold=3.470e+02, percent-clipped=1.0 +2024-08-27 05:54:18,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=245834.66666666666, ans=0.125 +2024-08-27 05:54:24,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245834.66666666666, ans=0.1 +2024-08-27 05:54:26,192 INFO [train.py:1114] (2/4) Epoch 19, batch 1300, loss[loss=0.183, simple_loss=0.264, pruned_loss=0.03652, ctc_loss=0.07228, over 18921.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2602, pruned_loss=0.03997, ctc_loss=0.07449, over 3844775.59 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 32.0 +2024-08-27 05:54:30,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=245888.0, ans=0.125 +2024-08-27 05:54:36,319 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.09 vs. limit=12.0 +2024-08-27 05:54:48,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=245994.66666666666, ans=0.1 +2024-08-27 05:54:53,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245994.66666666666, ans=0.1 +2024-08-27 05:54:55,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=246048.0, ans=0.025 +2024-08-27 05:55:03,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=246101.33333333334, ans=0.125 +2024-08-27 05:55:03,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=246101.33333333334, ans=0.125 +2024-08-27 05:55:13,502 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.15 vs. limit=15.0 +2024-08-27 05:55:13,893 INFO [train.py:1114] (2/4) Epoch 19, batch 1350, loss[loss=0.1686, simple_loss=0.2476, pruned_loss=0.03232, ctc_loss=0.06258, over 19770.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2597, pruned_loss=0.03968, ctc_loss=0.07388, over 3855306.68 frames. ], batch size: 54, lr: 7.87e-03, grad_scale: 16.0 +2024-08-27 05:55:15,915 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:55:21,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=246154.66666666666, ans=0.2 +2024-08-27 05:55:29,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=246208.0, ans=0.0 +2024-08-27 05:55:32,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246208.0, ans=0.1 +2024-08-27 05:55:34,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=246261.33333333334, ans=0.125 +2024-08-27 05:55:34,758 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.09 vs. limit=15.0 +2024-08-27 05:55:44,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=246314.66666666666, ans=0.125 +2024-08-27 05:55:45,495 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.414e+02 1.634e+02 2.144e+02 3.359e+02, threshold=3.268e+02, percent-clipped=0.0 +2024-08-27 05:56:03,864 INFO [train.py:1114] (2/4) Epoch 19, batch 1400, loss[loss=0.1692, simple_loss=0.2388, pruned_loss=0.03696, ctc_loss=0.06418, over 19671.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2592, pruned_loss=0.03956, ctc_loss=0.07378, over 3863274.26 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 16.0 +2024-08-27 05:56:05,336 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.01 vs. limit=15.0 +2024-08-27 05:56:15,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=246474.66666666666, ans=0.125 +2024-08-27 05:56:28,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=246528.0, ans=0.125 +2024-08-27 05:56:39,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=246581.33333333334, ans=0.125 +2024-08-27 05:56:44,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=246634.66666666666, ans=0.0 +2024-08-27 05:56:46,057 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.63 vs. limit=12.0 +2024-08-27 05:56:49,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=246634.66666666666, ans=0.0 +2024-08-27 05:56:53,086 INFO [train.py:1114] (2/4) Epoch 19, batch 1450, loss[loss=0.1984, simple_loss=0.2744, pruned_loss=0.04466, ctc_loss=0.08279, over 19636.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2602, pruned_loss=0.04005, ctc_loss=0.07472, over 3860184.88 frames. ], batch size: 63, lr: 7.87e-03, grad_scale: 16.0 +2024-08-27 05:56:55,876 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.48 vs. limit=12.0 +2024-08-27 05:56:56,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=246688.0, ans=0.125 +2024-08-27 05:57:05,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=246741.33333333334, ans=0.125 +2024-08-27 05:57:08,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=246741.33333333334, ans=0.0 +2024-08-27 05:57:15,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=246794.66666666666, ans=0.125 +2024-08-27 05:57:25,422 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.422e+02 1.608e+02 1.963e+02 3.546e+02, threshold=3.216e+02, percent-clipped=4.0 +2024-08-27 05:57:28,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=246848.0, ans=0.125 +2024-08-27 05:57:35,434 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.02 vs. limit=6.0 +2024-08-27 05:57:42,296 INFO [train.py:1114] (2/4) Epoch 19, batch 1500, loss[loss=0.1873, simple_loss=0.2677, pruned_loss=0.03883, ctc_loss=0.07322, over 19590.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2605, pruned_loss=0.04006, ctc_loss=0.07469, over 3861306.88 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 16.0 +2024-08-27 05:57:42,708 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:57:52,993 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=6.87 vs. limit=15.0 +2024-08-27 05:57:53,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=247008.0, ans=0.125 +2024-08-27 05:58:03,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=247008.0, ans=0.025 +2024-08-27 05:58:40,364 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.43 vs. limit=10.0 +2024-08-27 05:59:01,746 INFO [train.py:1114] (2/4) Epoch 19, batch 1550, loss[loss=0.196, simple_loss=0.2718, pruned_loss=0.04359, ctc_loss=0.08261, over 19613.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2605, pruned_loss=0.04015, ctc_loss=0.07494, over 3845734.36 frames. ], batch size: 60, lr: 7.86e-03, grad_scale: 16.0 +2024-08-27 05:59:32,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=247328.0, ans=0.125 +2024-08-27 05:59:37,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247328.0, ans=0.1 +2024-08-27 05:59:41,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=247381.33333333334, ans=0.2 +2024-08-27 05:59:43,865 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.416e+02 1.634e+02 2.007e+02 4.215e+02, threshold=3.267e+02, percent-clipped=2.0 +2024-08-27 05:59:46,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=247381.33333333334, ans=0.025 +2024-08-27 05:59:47,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247381.33333333334, ans=0.1 +2024-08-27 06:00:02,736 INFO [train.py:1114] (2/4) Epoch 19, batch 1600, loss[loss=0.1884, simple_loss=0.2738, pruned_loss=0.03692, ctc_loss=0.07277, over 19825.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2606, pruned_loss=0.0403, ctc_loss=0.07516, over 3835103.23 frames. ], batch size: 57, lr: 7.85e-03, grad_scale: 32.0 +2024-08-27 06:00:08,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=247488.0, ans=0.125 +2024-08-27 06:00:29,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=247594.66666666666, ans=0.0 +2024-08-27 06:00:43,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=247701.33333333334, ans=0.0 +2024-08-27 06:00:47,794 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.54 vs. limit=15.0 +2024-08-27 06:00:51,750 INFO [train.py:1114] (2/4) Epoch 19, batch 1650, loss[loss=0.1916, simple_loss=0.269, pruned_loss=0.04223, ctc_loss=0.07457, over 19643.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2602, pruned_loss=0.0402, ctc_loss=0.07502, over 3830846.76 frames. ], batch size: 59, lr: 7.85e-03, grad_scale: 32.0 +2024-08-27 06:00:55,938 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.27 vs. limit=15.0 +2024-08-27 06:01:02,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=247808.0, ans=0.0 +2024-08-27 06:01:21,515 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.147e+02 1.539e+02 1.985e+02 2.467e+02 4.637e+02, threshold=3.969e+02, percent-clipped=10.0 +2024-08-27 06:01:21,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=247914.66666666666, ans=0.125 +2024-08-27 06:01:39,979 INFO [train.py:1114] (2/4) Epoch 19, batch 1700, loss[loss=0.1755, simple_loss=0.2423, pruned_loss=0.03958, ctc_loss=0.07371, over 19654.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2597, pruned_loss=0.03969, ctc_loss=0.07413, over 3846161.58 frames. ], batch size: 46, lr: 7.84e-03, grad_scale: 32.0 +2024-08-27 06:01:51,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248074.66666666666, ans=0.1 +2024-08-27 06:01:59,040 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.18 vs. limit=15.0 +2024-08-27 06:02:13,934 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.24 vs. limit=6.0 +2024-08-27 06:02:21,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248234.66666666666, ans=0.1 +2024-08-27 06:02:21,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=248234.66666666666, ans=15.0 +2024-08-27 06:02:23,939 INFO [train.py:1114] (2/4) Epoch 19, batch 1750, loss[loss=0.1612, simple_loss=0.2379, pruned_loss=0.03004, ctc_loss=0.06077, over 19643.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2591, pruned_loss=0.03931, ctc_loss=0.07368, over 3852816.79 frames. ], batch size: 45, lr: 7.84e-03, grad_scale: 32.0 +2024-08-27 06:02:36,350 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.19 vs. limit=15.0 +2024-08-27 06:02:40,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=248341.33333333334, ans=0.0 +2024-08-27 06:02:55,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=248448.0, ans=0.2 +2024-08-27 06:02:56,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=248448.0, ans=0.125 +2024-08-27 06:02:57,023 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.179e+02 1.492e+02 1.808e+02 2.313e+02 3.735e+02, threshold=3.616e+02, percent-clipped=0.0 +2024-08-27 06:03:14,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=248501.33333333334, ans=0.2 +2024-08-27 06:03:18,747 INFO [train.py:1114] (2/4) Epoch 19, batch 1800, loss[loss=0.1943, simple_loss=0.2726, pruned_loss=0.0427, ctc_loss=0.07642, over 19631.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2592, pruned_loss=0.03934, ctc_loss=0.0737, over 3852949.68 frames. ], batch size: 55, lr: 7.84e-03, grad_scale: 16.0 +2024-08-27 06:03:19,169 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.18 vs. limit=6.0 +2024-08-27 06:03:25,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248554.66666666666, ans=0.1 +2024-08-27 06:03:30,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.86 vs. limit=22.5 +2024-08-27 06:03:31,586 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.11 vs. limit=15.0 +2024-08-27 06:03:41,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=248661.33333333334, ans=0.125 +2024-08-27 06:03:59,658 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.24 vs. limit=15.0 +2024-08-27 06:04:02,721 INFO [train.py:1114] (2/4) Epoch 19, batch 1850, loss[loss=0.1847, simple_loss=0.2658, pruned_loss=0.03756, ctc_loss=0.07103, over 19557.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2592, pruned_loss=0.03938, ctc_loss=0.07365, over 3855688.77 frames. ], batch size: 57, lr: 7.83e-03, grad_scale: 16.0 +2024-08-27 06:04:19,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=248874.66666666666, ans=0.125 +2024-08-27 06:04:32,742 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.498e+02 2.037e+02 3.063e+02 6.275e+02, threshold=4.074e+02, percent-clipped=13.0 +2024-08-27 06:04:47,718 INFO [train.py:1114] (2/4) Epoch 19, batch 1900, loss[loss=0.1885, simple_loss=0.2673, pruned_loss=0.03994, ctc_loss=0.07476, over 19664.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2598, pruned_loss=0.03948, ctc_loss=0.07379, over 3860408.64 frames. ], batch size: 59, lr: 7.83e-03, grad_scale: 16.0 +2024-08-27 06:05:01,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=249141.33333333334, ans=0.125 +2024-08-27 06:05:11,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=249194.66666666666, ans=0.0 +2024-08-27 06:05:46,432 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.66 vs. limit=15.0 +2024-08-27 06:05:52,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=249301.33333333334, ans=0.125 +2024-08-27 06:05:58,953 INFO [train.py:1114] (2/4) Epoch 19, batch 1950, loss[loss=0.163, simple_loss=0.2415, pruned_loss=0.03009, ctc_loss=0.06095, over 19576.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2608, pruned_loss=0.03974, ctc_loss=0.07428, over 3869360.89 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 16.0 +2024-08-27 06:05:59,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249354.66666666666, ans=0.1 +2024-08-27 06:06:02,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=249354.66666666666, ans=0.0 +2024-08-27 06:16:15,092 INFO [train.py:1050] (2/4) Caught exception: [Rank 2] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=471644, OpType=ALLREDUCE, NumelIn=745, NumelOut=745, Timeout(ms)=600000) ran for 600009 milliseconds before timing out.. +2024-08-27 06:16:15,093 INFO [checkpoint.py:75] (2/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/bad-model-2.pt +2024-08-27 06:16:19,608 INFO [train.py:1413] (2/4) Saving batch to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/batch-277582f0-93f5-0c2c-488e-44f94ecc6c7f.pt +2024-08-27 06:16:21,676 INFO [train.py:1419] (2/4) features shape: torch.Size([50, 1582, 80]) +2024-08-27 06:16:21,678 INFO [train.py:1423] (2/4) num tokens: 3873 diff --git a/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-26-14-14-03-3 b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-26-14-14-03-3 new file mode 100644 index 0000000000000000000000000000000000000000..511a9bbb3956bdabe0103d8a5e9c5659588c6ca1 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-26-14-14-03-3 @@ -0,0 +1,5441 @@ +2024-08-26 14:14:05,588 INFO [train.py:1182] (3/4) Training started +2024-08-26 14:14:07,267 INFO [train.py:1192] (3/4) Device: cuda:3 +2024-08-26 14:14:09,460 INFO [train.py:1210] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2652.int.cedar.computecanada.ca', 'IP address': '172.16.146.89'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 4, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-26 14:14:09,460 INFO [train.py:1212] (3/4) About to create model +2024-08-26 14:14:10,681 INFO [train.py:1216] (3/4) Number of model parameters: 65805511 +2024-08-26 14:14:12,231 INFO [checkpoint.py:112] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-3.pt +2024-08-26 14:14:19,979 INFO [train.py:1231] (3/4) Using DDP +2024-08-26 14:14:24,077 INFO [train.py:1243] (3/4) Loading optimizer state dict +2024-08-26 14:14:24,267 INFO [train.py:1251] (3/4) Loading scheduler state dict +2024-08-26 14:14:24,267 INFO [asr_datamodule.py:894] (3/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-26 14:14:27,295 INFO [asr_datamodule.py:696] (3/4) Disable MUSAN +2024-08-26 14:14:27,295 INFO [asr_datamodule.py:714] (3/4) Enable SpecAugment +2024-08-26 14:14:27,296 INFO [asr_datamodule.py:715] (3/4) Time warp factor: 80 +2024-08-26 14:14:27,296 INFO [asr_datamodule.py:725] (3/4) Num frame mask: 10 +2024-08-26 14:14:27,296 INFO [asr_datamodule.py:738] (3/4) About to create train dataset +2024-08-26 14:14:27,296 INFO [asr_datamodule.py:765] (3/4) Using DynamicBucketingSampler. +2024-08-26 14:14:28,835 INFO [asr_datamodule.py:782] (3/4) About to create train dataloader +2024-08-26 14:14:28,836 INFO [asr_datamodule.py:911] (3/4) About to get dev-clean cuts +2024-08-26 14:14:31,125 INFO [asr_datamodule.py:918] (3/4) About to get dev-other cuts +2024-08-26 14:14:32,027 INFO [asr_datamodule.py:814] (3/4) About to create dev dataset +2024-08-26 14:14:32,340 INFO [asr_datamodule.py:831] (3/4) About to create dev dataloader +2024-08-26 14:14:32,340 INFO [train.py:1435] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-26 14:18:38,887 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=3.24 vs. limit=7.5 +2024-08-26 14:18:40,633 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12127MB +2024-08-26 14:18:41,875 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12127MB +2024-08-26 14:18:49,650 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12213MB +2024-08-26 14:18:50,850 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12276MB +2024-08-26 14:19:04,876 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12276MB +2024-08-26 14:19:06,156 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12276MB +2024-08-26 14:19:06,174 INFO [train.py:1344] (3/4) Loading grad scaler state dict +2024-08-26 14:19:52,372 INFO [train.py:1114] (3/4) Epoch 4, batch 0, loss[loss=0.2946, simple_loss=0.3282, pruned_loss=0.09504, ctc_loss=0.1777, over 19391.00 frames. ], tot_loss[loss=0.2946, simple_loss=0.3282, pruned_loss=0.09504, ctc_loss=0.1777, over 19391.00 frames. ], batch size: 48, lr: 3.30e-02, grad_scale: 32.0 +2024-08-26 14:19:52,373 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-26 14:20:25,792 INFO [train.py:1146] (3/4) Epoch 4, validation: loss=0.2421, simple_loss=0.3218, pruned_loss=0.05945, ctc_loss=0.1086, over 944034.00 frames. +2024-08-26 14:20:25,794 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12276MB +2024-08-26 14:21:25,009 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.60 vs. limit=10.0 +2024-08-26 14:21:40,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=39936.0, ans=0.0 +2024-08-26 14:21:53,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=39936.0, ans=0.0 +2024-08-26 14:22:11,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=39936.0, ans=0.2 +2024-08-26 14:22:31,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=39989.333333333336, ans=0.125 +2024-08-26 14:22:38,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=39989.333333333336, ans=0.125 +2024-08-26 14:23:04,536 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.569e+02 1.845e+02 2.126e+02 2.642e+02 4.004e+02, threshold=4.252e+02, percent-clipped=0.0 +2024-08-26 14:23:10,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40042.666666666664, ans=0.1 +2024-08-26 14:23:26,418 INFO [train.py:1114] (3/4) Epoch 4, batch 50, loss[loss=0.259, simple_loss=0.2932, pruned_loss=0.0822, ctc_loss=0.1509, over 19726.00 frames. ], tot_loss[loss=0.3049, simple_loss=0.3372, pruned_loss=0.09901, ctc_loss=0.1863, over 843670.46 frames. ], batch size: 47, lr: 3.30e-02, grad_scale: 32.0 +2024-08-26 14:24:13,851 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.50 vs. limit=15.0 +2024-08-26 14:24:39,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=40202.666666666664, ans=0.125 +2024-08-26 14:25:08,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=40256.0, ans=0.125 +2024-08-26 14:25:17,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40309.333333333336, ans=0.1 +2024-08-26 14:25:23,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=40309.333333333336, ans=0.125 +2024-08-26 14:25:24,129 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.88 vs. limit=15.0 +2024-08-26 14:25:32,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=40362.666666666664, ans=0.125 +2024-08-26 14:25:33,111 INFO [train.py:1114] (3/4) Epoch 4, batch 100, loss[loss=0.2925, simple_loss=0.3302, pruned_loss=0.09423, ctc_loss=0.1658, over 19704.00 frames. ], tot_loss[loss=0.3018, simple_loss=0.3369, pruned_loss=0.09699, ctc_loss=0.1818, over 1498193.89 frames. ], batch size: 51, lr: 3.29e-02, grad_scale: 32.0 +2024-08-26 14:25:51,545 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.36 vs. limit=15.0 +2024-08-26 14:26:04,839 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.11 vs. limit=15.0 +2024-08-26 14:26:13,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=40469.333333333336, ans=0.125 +2024-08-26 14:26:40,722 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.372e+02 1.662e+02 1.906e+02 2.226e+02 3.245e+02, threshold=3.812e+02, percent-clipped=0.0 +2024-08-26 14:26:48,542 INFO [train.py:1114] (3/4) Epoch 4, batch 150, loss[loss=0.2529, simple_loss=0.2896, pruned_loss=0.07938, ctc_loss=0.1437, over 19710.00 frames. ], tot_loss[loss=0.2958, simple_loss=0.332, pruned_loss=0.09441, ctc_loss=0.1768, over 2026939.58 frames. ], batch size: 47, lr: 3.28e-02, grad_scale: 32.0 +2024-08-26 14:26:57,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=40629.333333333336, ans=0.125 +2024-08-26 14:26:57,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=40629.333333333336, ans=0.125 +2024-08-26 14:27:02,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=40682.666666666664, ans=0.125 +2024-08-26 14:27:34,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40736.0, ans=0.1 +2024-08-26 14:27:42,929 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.49 vs. limit=22.5 +2024-08-26 14:27:53,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=40842.666666666664, ans=0.125 +2024-08-26 14:27:54,097 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.57 vs. limit=10.0 +2024-08-26 14:28:02,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=40842.666666666664, ans=0.0 +2024-08-26 14:28:04,927 INFO [train.py:1114] (3/4) Epoch 4, batch 200, loss[loss=0.3605, simple_loss=0.3748, pruned_loss=0.125, ctc_loss=0.2404, over 18226.00 frames. ], tot_loss[loss=0.2951, simple_loss=0.3313, pruned_loss=0.09423, ctc_loss=0.1763, over 2434884.14 frames. ], batch size: 85, lr: 3.28e-02, grad_scale: 32.0 +2024-08-26 14:28:06,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=40896.0, ans=0.125 +2024-08-26 14:28:27,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=41002.666666666664, ans=0.09899494936611666 +2024-08-26 14:28:29,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=41002.666666666664, ans=0.0 +2024-08-26 14:28:36,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=41056.0, ans=0.125 +2024-08-26 14:28:49,772 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.824e+02 2.102e+02 2.533e+02 3.992e+02, threshold=4.203e+02, percent-clipped=3.0 +2024-08-26 14:28:55,769 INFO [train.py:1114] (3/4) Epoch 4, batch 250, loss[loss=0.3068, simple_loss=0.3462, pruned_loss=0.09791, ctc_loss=0.179, over 19431.00 frames. ], tot_loss[loss=0.2917, simple_loss=0.3293, pruned_loss=0.09242, ctc_loss=0.1731, over 2755249.50 frames. ], batch size: 67, lr: 3.27e-02, grad_scale: 32.0 +2024-08-26 14:29:06,040 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.84 vs. limit=10.0 +2024-08-26 14:29:06,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=41216.0, ans=0.125 +2024-08-26 14:29:12,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.40 vs. limit=10.0 +2024-08-26 14:29:29,778 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.36 vs. limit=15.0 +2024-08-26 14:29:46,779 INFO [train.py:1114] (3/4) Epoch 4, batch 300, loss[loss=0.2956, simple_loss=0.3253, pruned_loss=0.09668, ctc_loss=0.1814, over 19531.00 frames. ], tot_loss[loss=0.29, simple_loss=0.3277, pruned_loss=0.09174, ctc_loss=0.1718, over 3000480.96 frames. ], batch size: 61, lr: 3.27e-02, grad_scale: 32.0 +2024-08-26 14:30:08,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=41536.0, ans=0.125 +2024-08-26 14:30:11,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=41536.0, ans=0.0018399999999999996 +2024-08-26 14:30:12,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=41536.0, ans=0.125 +2024-08-26 14:30:23,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=41589.333333333336, ans=0.001828405797101449 +2024-08-26 14:30:32,090 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 1.674e+02 1.880e+02 2.161e+02 3.950e+02, threshold=3.761e+02, percent-clipped=0.0 +2024-08-26 14:30:37,868 INFO [train.py:1114] (3/4) Epoch 4, batch 350, loss[loss=0.2658, simple_loss=0.3006, pruned_loss=0.08462, ctc_loss=0.1543, over 19754.00 frames. ], tot_loss[loss=0.2889, simple_loss=0.3273, pruned_loss=0.09114, ctc_loss=0.1706, over 3189928.60 frames. ], batch size: 48, lr: 3.26e-02, grad_scale: 32.0 +2024-08-26 14:30:42,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=41696.0, ans=0.0 +2024-08-26 14:30:42,372 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.53 vs. limit=15.0 +2024-08-26 14:30:49,170 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.81 vs. limit=6.0 +2024-08-26 14:30:50,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=41749.333333333336, ans=0.0017936231884057973 +2024-08-26 14:30:53,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41749.333333333336, ans=0.1 +2024-08-26 14:31:10,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=41856.0, ans=0.125 +2024-08-26 14:31:35,387 INFO [train.py:1114] (3/4) Epoch 4, batch 400, loss[loss=0.2843, simple_loss=0.3251, pruned_loss=0.08865, ctc_loss=0.1656, over 19504.00 frames. ], tot_loss[loss=0.2879, simple_loss=0.327, pruned_loss=0.09047, ctc_loss=0.1695, over 3342217.39 frames. ], batch size: 54, lr: 3.26e-02, grad_scale: 32.0 +2024-08-26 14:31:49,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=42016.0, ans=0.2 +2024-08-26 14:31:49,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42016.0, ans=0.1 +2024-08-26 14:31:55,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=42069.333333333336, ans=0.2 +2024-08-26 14:32:04,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=42122.666666666664, ans=0.125 +2024-08-26 14:32:05,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=42122.666666666664, ans=0.0017124637681159433 +2024-08-26 14:32:14,926 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=13.38 vs. limit=15.0 +2024-08-26 14:32:19,254 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.438e+02 1.828e+02 2.157e+02 2.598e+02 8.551e+02, threshold=4.314e+02, percent-clipped=2.0 +2024-08-26 14:32:23,143 INFO [train.py:1114] (3/4) Epoch 4, batch 450, loss[loss=0.2878, simple_loss=0.3359, pruned_loss=0.08706, ctc_loss=0.164, over 19615.00 frames. ], tot_loss[loss=0.2887, simple_loss=0.3273, pruned_loss=0.09093, ctc_loss=0.1703, over 3450482.58 frames. ], batch size: 55, lr: 3.25e-02, grad_scale: 8.0 +2024-08-26 14:32:35,547 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.76 vs. limit=22.5 +2024-08-26 14:32:36,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=42282.666666666664, ans=0.125 +2024-08-26 14:32:38,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=42282.666666666664, ans=0.1 +2024-08-26 14:32:39,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=42282.666666666664, ans=0.125 +2024-08-26 14:32:41,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=42282.666666666664, ans=0.125 +2024-08-26 14:32:50,995 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.01 vs. limit=15.0 +2024-08-26 14:32:54,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=42389.333333333336, ans=0.125 +2024-08-26 14:32:57,550 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.84 vs. limit=15.0 +2024-08-26 14:32:59,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=42389.333333333336, ans=0.125 +2024-08-26 14:33:03,315 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.75 vs. limit=6.0 +2024-08-26 14:33:05,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=42442.666666666664, ans=0.125 +2024-08-26 14:33:14,196 INFO [train.py:1114] (3/4) Epoch 4, batch 500, loss[loss=0.3003, simple_loss=0.3486, pruned_loss=0.09087, ctc_loss=0.176, over 19663.00 frames. ], tot_loss[loss=0.2868, simple_loss=0.3259, pruned_loss=0.09012, ctc_loss=0.1685, over 3545452.44 frames. ], batch size: 63, lr: 3.25e-02, grad_scale: 8.0 +2024-08-26 14:33:34,600 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.40 vs. limit=12.0 +2024-08-26 14:33:40,240 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.04 vs. limit=22.5 +2024-08-26 14:33:45,226 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.13 vs. limit=15.0 +2024-08-26 14:33:59,654 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.64 vs. limit=12.0 +2024-08-26 14:34:07,932 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.277e+02 1.676e+02 1.857e+02 2.171e+02 5.331e+02, threshold=3.714e+02, percent-clipped=2.0 +2024-08-26 14:34:09,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=42709.333333333336, ans=0.125 +2024-08-26 14:34:11,749 INFO [train.py:1114] (3/4) Epoch 4, batch 550, loss[loss=0.2825, simple_loss=0.3261, pruned_loss=0.08647, ctc_loss=0.165, over 19267.00 frames. ], tot_loss[loss=0.2865, simple_loss=0.3259, pruned_loss=0.0899, ctc_loss=0.1683, over 3608723.52 frames. ], batch size: 71, lr: 3.24e-02, grad_scale: 8.0 +2024-08-26 14:34:11,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=42762.666666666664, ans=0.07 +2024-08-26 14:34:21,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=42816.0, ans=0.125 +2024-08-26 14:34:26,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=42816.0, ans=0.125 +2024-08-26 14:34:28,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42816.0, ans=0.1 +2024-08-26 14:34:35,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=42869.333333333336, ans=0.07 +2024-08-26 14:34:49,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=42922.666666666664, ans=0.125 +2024-08-26 14:34:52,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=42976.0, ans=0.125 +2024-08-26 14:35:03,323 INFO [train.py:1114] (3/4) Epoch 4, batch 600, loss[loss=0.2983, simple_loss=0.3391, pruned_loss=0.09384, ctc_loss=0.1744, over 19423.00 frames. ], tot_loss[loss=0.2856, simple_loss=0.3256, pruned_loss=0.08933, ctc_loss=0.1674, over 3665691.44 frames. ], batch size: 67, lr: 3.24e-02, grad_scale: 8.0 +2024-08-26 14:35:03,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=43029.333333333336, ans=0.2 +2024-08-26 14:35:05,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=43029.333333333336, ans=0.125 +2024-08-26 14:35:10,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=43029.333333333336, ans=0.125 +2024-08-26 14:35:26,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=43136.0, ans=0.125 +2024-08-26 14:35:37,504 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.35 vs. limit=15.0 +2024-08-26 14:35:45,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=43242.666666666664, ans=0.125 +2024-08-26 14:35:46,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=43242.666666666664, ans=0.125 +2024-08-26 14:35:49,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43242.666666666664, ans=0.1 +2024-08-26 14:35:50,398 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.428e+02 1.699e+02 1.953e+02 2.270e+02 5.390e+02, threshold=3.906e+02, percent-clipped=1.0 +2024-08-26 14:35:54,213 INFO [train.py:1114] (3/4) Epoch 4, batch 650, loss[loss=0.2732, simple_loss=0.3251, pruned_loss=0.07934, ctc_loss=0.1565, over 19783.00 frames. ], tot_loss[loss=0.2844, simple_loss=0.3245, pruned_loss=0.08887, ctc_loss=0.1664, over 3716000.01 frames. ], batch size: 54, lr: 3.23e-02, grad_scale: 8.0 +2024-08-26 14:36:28,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=43456.0, ans=0.125 +2024-08-26 14:36:38,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=43509.333333333336, ans=0.0 +2024-08-26 14:36:40,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=43509.333333333336, ans=0.125 +2024-08-26 14:36:48,342 INFO [train.py:1114] (3/4) Epoch 4, batch 700, loss[loss=0.2705, simple_loss=0.3129, pruned_loss=0.08353, ctc_loss=0.1526, over 19732.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3252, pruned_loss=0.08924, ctc_loss=0.1673, over 3748854.21 frames. ], batch size: 51, lr: 3.22e-02, grad_scale: 8.0 +2024-08-26 14:36:48,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43562.666666666664, ans=0.1 +2024-08-26 14:36:59,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=43616.0, ans=0.0 +2024-08-26 14:37:02,956 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:37:12,317 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.98 vs. limit=6.0 +2024-08-26 14:37:17,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=43722.666666666664, ans=0.125 +2024-08-26 14:37:17,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43722.666666666664, ans=0.1 +2024-08-26 14:37:32,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=43776.0, ans=0.07 +2024-08-26 14:37:36,034 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 1.717e+02 1.974e+02 2.287e+02 3.794e+02, threshold=3.948e+02, percent-clipped=0.0 +2024-08-26 14:37:36,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43776.0, ans=0.1 +2024-08-26 14:37:36,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=43776.0, ans=0.125 +2024-08-26 14:37:38,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=43776.0, ans=0.5 +2024-08-26 14:37:39,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=43829.333333333336, ans=0.125 +2024-08-26 14:37:39,954 INFO [train.py:1114] (3/4) Epoch 4, batch 750, loss[loss=0.3045, simple_loss=0.3435, pruned_loss=0.0967, ctc_loss=0.18, over 19507.00 frames. ], tot_loss[loss=0.2844, simple_loss=0.3245, pruned_loss=0.08889, ctc_loss=0.1663, over 3775312.06 frames. ], batch size: 54, lr: 3.22e-02, grad_scale: 8.0 +2024-08-26 14:37:52,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=43882.666666666664, ans=0.2 +2024-08-26 14:37:54,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=43882.666666666664, ans=0.025 +2024-08-26 14:38:08,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.76 vs. limit=15.0 +2024-08-26 14:38:12,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=43989.333333333336, ans=0.0 +2024-08-26 14:38:13,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=43989.333333333336, ans=0.0 +2024-08-26 14:38:25,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=44042.666666666664, ans=0.025 +2024-08-26 14:38:31,794 INFO [train.py:1114] (3/4) Epoch 4, batch 800, loss[loss=0.2497, simple_loss=0.2875, pruned_loss=0.07554, ctc_loss=0.1522, over 19817.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.3245, pruned_loss=0.08908, ctc_loss=0.1669, over 3796139.58 frames. ], batch size: 49, lr: 3.21e-02, grad_scale: 16.0 +2024-08-26 14:38:49,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=44149.333333333336, ans=0.1 +2024-08-26 14:38:53,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=44202.666666666664, ans=0.1 +2024-08-26 14:39:15,860 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.59 vs. limit=22.5 +2024-08-26 14:39:16,256 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.371e+02 1.706e+02 1.876e+02 2.197e+02 5.470e+02, threshold=3.751e+02, percent-clipped=2.0 +2024-08-26 14:39:21,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=44309.333333333336, ans=0.125 +2024-08-26 14:39:22,910 INFO [train.py:1114] (3/4) Epoch 4, batch 850, loss[loss=0.2993, simple_loss=0.342, pruned_loss=0.09232, ctc_loss=0.1797, over 19650.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.3236, pruned_loss=0.08841, ctc_loss=0.1657, over 3815674.58 frames. ], batch size: 59, lr: 3.21e-02, grad_scale: 16.0 +2024-08-26 14:39:40,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=44416.0, ans=0.0 +2024-08-26 14:39:48,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.47 vs. limit=15.0 +2024-08-26 14:39:49,378 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.38 vs. limit=22.5 +2024-08-26 14:39:50,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=44469.333333333336, ans=0.2 +2024-08-26 14:39:53,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=44522.666666666664, ans=0.0 +2024-08-26 14:39:54,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=44522.666666666664, ans=0.125 +2024-08-26 14:40:02,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=44576.0, ans=0.1 +2024-08-26 14:40:04,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=44576.0, ans=0.05 +2024-08-26 14:40:11,456 INFO [train.py:1114] (3/4) Epoch 4, batch 900, loss[loss=0.2722, simple_loss=0.3109, pruned_loss=0.085, ctc_loss=0.1588, over 19805.00 frames. ], tot_loss[loss=0.2842, simple_loss=0.3241, pruned_loss=0.08887, ctc_loss=0.1663, over 3819998.93 frames. ], batch size: 49, lr: 3.20e-02, grad_scale: 16.0 +2024-08-26 14:40:15,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=44629.333333333336, ans=0.125 +2024-08-26 14:40:27,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=44682.666666666664, ans=0.2 +2024-08-26 14:40:27,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=44682.666666666664, ans=0.125 +2024-08-26 14:40:28,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=44682.666666666664, ans=0.0 +2024-08-26 14:40:35,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=44736.0, ans=0.0 +2024-08-26 14:40:56,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=44842.666666666664, ans=0.125 +2024-08-26 14:40:59,418 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.434e+02 1.686e+02 1.871e+02 2.157e+02 4.639e+02, threshold=3.742e+02, percent-clipped=1.0 +2024-08-26 14:41:03,430 INFO [train.py:1114] (3/4) Epoch 4, batch 950, loss[loss=0.2671, simple_loss=0.3115, pruned_loss=0.08104, ctc_loss=0.1517, over 19522.00 frames. ], tot_loss[loss=0.2848, simple_loss=0.3246, pruned_loss=0.08917, ctc_loss=0.1668, over 3822507.68 frames. ], batch size: 49, lr: 3.20e-02, grad_scale: 16.0 +2024-08-26 14:41:19,468 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.68 vs. limit=15.0 +2024-08-26 14:41:36,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=45056.0, ans=0.125 +2024-08-26 14:41:41,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45056.0, ans=0.1 +2024-08-26 14:41:54,753 INFO [train.py:1114] (3/4) Epoch 4, batch 1000, loss[loss=0.2361, simple_loss=0.2923, pruned_loss=0.06581, ctc_loss=0.1205, over 19859.00 frames. ], tot_loss[loss=0.286, simple_loss=0.3257, pruned_loss=0.08965, ctc_loss=0.1678, over 3818724.57 frames. ], batch size: 52, lr: 3.19e-02, grad_scale: 16.0 +2024-08-26 14:41:57,986 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:41:58,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=45162.666666666664, ans=0.025 +2024-08-26 14:42:14,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=45216.0, ans=0.0 +2024-08-26 14:42:32,672 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.99 vs. limit=15.0 +2024-08-26 14:42:36,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=45376.0, ans=0.2 +2024-08-26 14:42:42,493 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.702e+02 1.844e+02 2.187e+02 3.225e+02, threshold=3.689e+02, percent-clipped=0.0 +2024-08-26 14:42:44,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=45376.0, ans=0.125 +2024-08-26 14:42:46,499 INFO [train.py:1114] (3/4) Epoch 4, batch 1050, loss[loss=0.2992, simple_loss=0.3414, pruned_loss=0.09428, ctc_loss=0.171, over 19842.00 frames. ], tot_loss[loss=0.2845, simple_loss=0.3243, pruned_loss=0.08906, ctc_loss=0.1666, over 3824088.49 frames. ], batch size: 57, lr: 3.19e-02, grad_scale: 16.0 +2024-08-26 14:42:50,803 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.47 vs. limit=15.0 +2024-08-26 14:42:57,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=45482.666666666664, ans=0.2 +2024-08-26 14:42:59,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=45482.666666666664, ans=0.0009820289855072464 +2024-08-26 14:43:08,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=45536.0, ans=0.0 +2024-08-26 14:43:18,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=45589.333333333336, ans=0.05 +2024-08-26 14:43:19,684 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.45 vs. limit=10.0 +2024-08-26 14:43:29,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45642.666666666664, ans=0.1 +2024-08-26 14:43:33,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=45642.666666666664, ans=0.0 +2024-08-26 14:43:37,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=45696.0, ans=0.125 +2024-08-26 14:43:38,134 INFO [train.py:1114] (3/4) Epoch 4, batch 1100, loss[loss=0.2532, simple_loss=0.3038, pruned_loss=0.07412, ctc_loss=0.1358, over 19586.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3241, pruned_loss=0.08869, ctc_loss=0.166, over 3832440.39 frames. ], batch size: 52, lr: 3.18e-02, grad_scale: 16.0 +2024-08-26 14:43:40,647 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.77 vs. limit=22.5 +2024-08-26 14:43:46,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=45696.0, ans=0.07 +2024-08-26 14:43:53,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=45749.333333333336, ans=0.0 +2024-08-26 14:44:01,623 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.30 vs. limit=15.0 +2024-08-26 14:44:16,342 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.04 vs. limit=22.5 +2024-08-26 14:44:19,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=45909.333333333336, ans=0.025 +2024-08-26 14:44:25,689 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.453e+02 1.748e+02 1.997e+02 2.350e+02 6.199e+02, threshold=3.995e+02, percent-clipped=5.0 +2024-08-26 14:44:29,550 INFO [train.py:1114] (3/4) Epoch 4, batch 1150, loss[loss=0.2708, simple_loss=0.312, pruned_loss=0.08318, ctc_loss=0.158, over 19569.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3238, pruned_loss=0.08867, ctc_loss=0.1658, over 3830740.64 frames. ], batch size: 52, lr: 3.18e-02, grad_scale: 16.0 +2024-08-26 14:44:33,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=45962.666666666664, ans=0.125 +2024-08-26 14:45:32,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=46016.0, ans=0.125 +2024-08-26 14:46:02,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=46016.0, ans=0.0 +2024-08-26 14:46:29,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=46176.0, ans=0.125 +2024-08-26 14:46:38,939 INFO [train.py:1114] (3/4) Epoch 4, batch 1200, loss[loss=0.2845, simple_loss=0.3228, pruned_loss=0.08914, ctc_loss=0.1698, over 19841.00 frames. ], tot_loss[loss=0.2852, simple_loss=0.3251, pruned_loss=0.08926, ctc_loss=0.167, over 3825768.46 frames. ], batch size: 57, lr: 3.17e-02, grad_scale: 32.0 +2024-08-26 14:46:43,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=46229.333333333336, ans=0.07 +2024-08-26 14:46:48,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=46282.666666666664, ans=0.2 +2024-08-26 14:46:48,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=46282.666666666664, ans=0.2 +2024-08-26 14:46:59,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=46336.0, ans=0.125 +2024-08-26 14:47:03,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=46336.0, ans=0.125 +2024-08-26 14:47:13,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46389.333333333336, ans=0.1 +2024-08-26 14:47:14,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=46389.333333333336, ans=0.125 +2024-08-26 14:47:14,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46389.333333333336, ans=0.1 +2024-08-26 14:47:23,211 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.478e+02 1.767e+02 1.944e+02 2.283e+02 5.479e+02, threshold=3.889e+02, percent-clipped=1.0 +2024-08-26 14:47:29,962 INFO [train.py:1114] (3/4) Epoch 4, batch 1250, loss[loss=0.3026, simple_loss=0.3436, pruned_loss=0.09553, ctc_loss=0.1762, over 19502.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.3255, pruned_loss=0.08908, ctc_loss=0.1664, over 3844202.12 frames. ], batch size: 61, lr: 3.17e-02, grad_scale: 32.0 +2024-08-26 14:47:30,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=46496.0, ans=0.0 +2024-08-26 14:47:44,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=46549.333333333336, ans=0.125 +2024-08-26 14:47:44,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=46549.333333333336, ans=0.2 +2024-08-26 14:47:47,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=46549.333333333336, ans=0.2 +2024-08-26 14:47:50,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=46602.666666666664, ans=0.125 +2024-08-26 14:47:52,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=46602.666666666664, ans=0.025 +2024-08-26 14:47:55,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=46602.666666666664, ans=0.125 +2024-08-26 14:47:58,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=46656.0, ans=0.0 +2024-08-26 14:47:59,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=46656.0, ans=0.125 +2024-08-26 14:48:19,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=46709.333333333336, ans=0.2 +2024-08-26 14:48:20,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=46709.333333333336, ans=0.125 +2024-08-26 14:48:22,055 INFO [train.py:1114] (3/4) Epoch 4, batch 1300, loss[loss=0.3018, simple_loss=0.3411, pruned_loss=0.09521, ctc_loss=0.1803, over 18910.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3244, pruned_loss=0.08837, ctc_loss=0.1654, over 3847151.65 frames. ], batch size: 76, lr: 3.16e-02, grad_scale: 32.0 +2024-08-26 14:48:22,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=46762.666666666664, ans=0.125 +2024-08-26 14:48:41,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=46869.333333333336, ans=0.2 +2024-08-26 14:48:54,312 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.69 vs. limit=22.5 +2024-08-26 14:49:06,445 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.331e+02 1.633e+02 1.793e+02 2.136e+02 4.035e+02, threshold=3.586e+02, percent-clipped=1.0 +2024-08-26 14:49:09,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=47029.333333333336, ans=0.0006457971014492744 +2024-08-26 14:49:10,214 INFO [train.py:1114] (3/4) Epoch 4, batch 1350, loss[loss=0.2774, simple_loss=0.3215, pruned_loss=0.08358, ctc_loss=0.1652, over 19767.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.324, pruned_loss=0.08766, ctc_loss=0.1643, over 3859051.87 frames. ], batch size: 54, lr: 3.16e-02, grad_scale: 32.0 +2024-08-26 14:49:22,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=47082.666666666664, ans=0.125 +2024-08-26 14:49:29,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=47082.666666666664, ans=0.95 +2024-08-26 14:49:29,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=47082.666666666664, ans=0.0 +2024-08-26 14:49:43,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=47189.333333333336, ans=0.125 +2024-08-26 14:50:01,604 INFO [train.py:1114] (3/4) Epoch 4, batch 1400, loss[loss=0.2532, simple_loss=0.2901, pruned_loss=0.07976, ctc_loss=0.1419, over 19683.00 frames. ], tot_loss[loss=0.2821, simple_loss=0.3235, pruned_loss=0.08754, ctc_loss=0.1642, over 3865983.97 frames. ], batch size: 46, lr: 3.15e-02, grad_scale: 32.0 +2024-08-26 14:50:33,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=47456.0, ans=0.125 +2024-08-26 14:50:43,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47509.333333333336, ans=0.1 +2024-08-26 14:50:49,032 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.452e+02 1.701e+02 1.930e+02 2.137e+02 5.469e+02, threshold=3.859e+02, percent-clipped=2.0 +2024-08-26 14:50:53,070 INFO [train.py:1114] (3/4) Epoch 4, batch 1450, loss[loss=0.2868, simple_loss=0.3351, pruned_loss=0.08656, ctc_loss=0.1634, over 19707.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3247, pruned_loss=0.08828, ctc_loss=0.1652, over 3862963.78 frames. ], batch size: 63, lr: 3.15e-02, grad_scale: 32.0 +2024-08-26 14:51:21,158 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.60 vs. limit=15.0 +2024-08-26 14:51:43,452 INFO [train.py:1114] (3/4) Epoch 4, batch 1500, loss[loss=0.2716, simple_loss=0.3245, pruned_loss=0.0786, ctc_loss=0.1536, over 19585.00 frames. ], tot_loss[loss=0.2832, simple_loss=0.3245, pruned_loss=0.08803, ctc_loss=0.1649, over 3862305.15 frames. ], batch size: 57, lr: 3.14e-02, grad_scale: 32.0 +2024-08-26 14:51:47,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=47829.333333333336, ans=0.125 +2024-08-26 14:52:10,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=47936.0, ans=0.0 +2024-08-26 14:52:13,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=47936.0, ans=0.125 +2024-08-26 14:52:18,920 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.38 vs. limit=15.0 +2024-08-26 14:52:34,698 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.344e+02 1.743e+02 1.956e+02 2.243e+02 3.928e+02, threshold=3.912e+02, percent-clipped=1.0 +2024-08-26 14:52:38,433 INFO [train.py:1114] (3/4) Epoch 4, batch 1550, loss[loss=0.3082, simple_loss=0.3518, pruned_loss=0.09753, ctc_loss=0.174, over 19628.00 frames. ], tot_loss[loss=0.2842, simple_loss=0.325, pruned_loss=0.08856, ctc_loss=0.1658, over 3846711.23 frames. ], batch size: 60, lr: 3.14e-02, grad_scale: 32.0 +2024-08-26 14:52:40,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=48096.0, ans=0.2 +2024-08-26 14:52:41,761 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.65 vs. limit=15.0 +2024-08-26 14:53:02,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=48202.666666666664, ans=0.125 +2024-08-26 14:53:15,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=48256.0, ans=0.1 +2024-08-26 14:53:21,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=48309.333333333336, ans=0.125 +2024-08-26 14:53:22,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48309.333333333336, ans=0.1 +2024-08-26 14:53:25,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48309.333333333336, ans=0.1 +2024-08-26 14:53:29,867 INFO [train.py:1114] (3/4) Epoch 4, batch 1600, loss[loss=0.2846, simple_loss=0.3258, pruned_loss=0.08833, ctc_loss=0.1667, over 19850.00 frames. ], tot_loss[loss=0.2845, simple_loss=0.3249, pruned_loss=0.08882, ctc_loss=0.1661, over 3836199.50 frames. ], batch size: 57, lr: 3.13e-02, grad_scale: 32.0 +2024-08-26 14:53:34,146 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.76 vs. limit=15.0 +2024-08-26 14:53:36,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=48362.666666666664, ans=0.125 +2024-08-26 14:53:38,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=48362.666666666664, ans=0.2 +2024-08-26 14:53:44,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=48416.0, ans=0.2 +2024-08-26 14:54:08,877 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.41 vs. limit=15.0 +2024-08-26 14:54:11,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=48576.0, ans=0.0 +2024-08-26 14:54:18,023 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.502e+02 1.701e+02 1.882e+02 2.341e+02 4.982e+02, threshold=3.764e+02, percent-clipped=3.0 +2024-08-26 14:54:21,788 INFO [train.py:1114] (3/4) Epoch 4, batch 1650, loss[loss=0.2861, simple_loss=0.3297, pruned_loss=0.08779, ctc_loss=0.1673, over 19672.00 frames. ], tot_loss[loss=0.284, simple_loss=0.3244, pruned_loss=0.08865, ctc_loss=0.1658, over 3832501.89 frames. ], batch size: 59, lr: 3.13e-02, grad_scale: 32.0 +2024-08-26 14:54:30,300 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.55 vs. limit=15.0 +2024-08-26 14:54:36,831 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.46 vs. limit=15.0 +2024-08-26 14:54:38,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=48682.666666666664, ans=0.0 +2024-08-26 14:55:17,035 INFO [train.py:1114] (3/4) Epoch 4, batch 1700, loss[loss=0.256, simple_loss=0.2948, pruned_loss=0.0784, ctc_loss=0.1513, over 19656.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.3236, pruned_loss=0.08792, ctc_loss=0.1644, over 3846039.49 frames. ], batch size: 46, lr: 3.12e-02, grad_scale: 32.0 +2024-08-26 14:55:32,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=48949.333333333336, ans=0.125 +2024-08-26 14:55:36,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=49002.666666666664, ans=0.125 +2024-08-26 14:55:39,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=49002.666666666664, ans=0.00021681159420289947 +2024-08-26 14:55:43,293 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.46 vs. limit=15.0 +2024-08-26 14:55:48,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=49056.0, ans=0.125 +2024-08-26 14:55:51,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=49056.0, ans=0.125 +2024-08-26 14:55:53,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49109.333333333336, ans=0.1 +2024-08-26 14:55:56,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=49109.333333333336, ans=0.125 +2024-08-26 14:55:59,544 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.433e+02 1.770e+02 1.975e+02 2.193e+02 4.882e+02, threshold=3.950e+02, percent-clipped=1.0 +2024-08-26 14:56:03,239 INFO [train.py:1114] (3/4) Epoch 4, batch 1750, loss[loss=0.2528, simple_loss=0.2915, pruned_loss=0.07746, ctc_loss=0.1476, over 19646.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3225, pruned_loss=0.08737, ctc_loss=0.1634, over 3852194.08 frames. ], batch size: 45, lr: 3.11e-02, grad_scale: 32.0 +2024-08-26 14:56:21,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=49269.333333333336, ans=0.125 +2024-08-26 14:56:44,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=49376.0, ans=0.2 +2024-08-26 14:56:48,543 INFO [train.py:1114] (3/4) Epoch 4, batch 1800, loss[loss=0.2926, simple_loss=0.3324, pruned_loss=0.0935, ctc_loss=0.1646, over 19617.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3227, pruned_loss=0.08728, ctc_loss=0.1632, over 3854640.81 frames. ], batch size: 55, lr: 3.11e-02, grad_scale: 32.0 +2024-08-26 14:56:49,969 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.97 vs. limit=22.5 +2024-08-26 14:56:51,706 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.79 vs. limit=22.5 +2024-08-26 14:56:52,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=49429.333333333336, ans=0.125 +2024-08-26 14:56:53,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=49429.333333333336, ans=0.0 +2024-08-26 14:57:17,918 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.03 vs. limit=10.0 +2024-08-26 14:57:29,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=49642.666666666664, ans=0.125 +2024-08-26 14:57:30,234 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.411e+02 1.664e+02 1.898e+02 2.172e+02 3.982e+02, threshold=3.795e+02, percent-clipped=1.0 +2024-08-26 14:57:33,983 INFO [train.py:1114] (3/4) Epoch 4, batch 1850, loss[loss=0.3079, simple_loss=0.349, pruned_loss=0.09704, ctc_loss=0.1817, over 19608.00 frames. ], tot_loss[loss=0.2802, simple_loss=0.322, pruned_loss=0.08675, ctc_loss=0.1621, over 3857733.12 frames. ], batch size: 57, lr: 3.10e-02, grad_scale: 32.0 +2024-08-26 14:57:44,533 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.60 vs. limit=10.0 +2024-08-26 14:57:51,754 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=15.0 +2024-08-26 14:58:03,876 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 14:58:15,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=49909.333333333336, ans=0.125 +2024-08-26 14:58:16,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=49909.333333333336, ans=0.125 +2024-08-26 14:58:18,238 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.46 vs. limit=22.5 +2024-08-26 14:58:21,304 INFO [train.py:1114] (3/4) Epoch 4, batch 1900, loss[loss=0.2994, simple_loss=0.3446, pruned_loss=0.09143, ctc_loss=0.1786, over 19650.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3227, pruned_loss=0.08689, ctc_loss=0.1621, over 3862698.23 frames. ], batch size: 59, lr: 3.10e-02, grad_scale: 16.0 +2024-08-26 14:58:41,485 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.77 vs. limit=15.0 +2024-08-26 14:58:43,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=50069.333333333336, ans=0.025 +2024-08-26 14:58:51,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=50122.666666666664, ans=0.2 +2024-08-26 14:58:57,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=50176.0, ans=0.125 +2024-08-26 14:58:57,403 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.39 vs. limit=15.0 +2024-08-26 14:59:03,256 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.687e+02 1.820e+02 2.228e+02 3.741e+02, threshold=3.639e+02, percent-clipped=0.0 +2024-08-26 14:59:06,158 INFO [train.py:1114] (3/4) Epoch 4, batch 1950, loss[loss=0.2691, simple_loss=0.3142, pruned_loss=0.08147, ctc_loss=0.1526, over 19594.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.324, pruned_loss=0.08698, ctc_loss=0.1621, over 3871808.73 frames. ], batch size: 52, lr: 3.09e-02, grad_scale: 16.0 +2024-08-26 14:59:28,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=50336.0, ans=0.1 +2024-08-26 14:59:47,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=50442.666666666664, ans=0.04949747468305833 +2024-08-26 14:59:53,437 INFO [train.py:1114] (3/4) Epoch 4, batch 2000, loss[loss=0.2561, simple_loss=0.2915, pruned_loss=0.07932, ctc_loss=0.1553, over 19633.00 frames. ], tot_loss[loss=0.2822, simple_loss=0.3244, pruned_loss=0.08741, ctc_loss=0.163, over 3856302.96 frames. ], batch size: 45, lr: 3.09e-02, grad_scale: 32.0 +2024-08-26 15:00:04,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=50549.333333333336, ans=0.125 +2024-08-26 15:00:07,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=50549.333333333336, ans=0.2 +2024-08-26 15:00:20,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.49 vs. limit=15.0 +2024-08-26 15:00:28,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=50709.333333333336, ans=0.2 +2024-08-26 15:00:34,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.99 vs. limit=22.5 +2024-08-26 15:00:35,439 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.418e+02 1.722e+02 2.023e+02 2.377e+02 8.657e+02, threshold=4.047e+02, percent-clipped=4.0 +2024-08-26 15:00:38,081 INFO [train.py:1114] (3/4) Epoch 4, batch 2050, loss[loss=0.2503, simple_loss=0.2999, pruned_loss=0.07253, ctc_loss=0.1389, over 19726.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3232, pruned_loss=0.08711, ctc_loss=0.1624, over 3852633.65 frames. ], batch size: 47, lr: 3.08e-02, grad_scale: 32.0 +2024-08-26 15:00:40,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=50762.666666666664, ans=0.0 +2024-08-26 15:00:51,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=50816.0, ans=0.025 +2024-08-26 15:01:09,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=50922.666666666664, ans=0.125 +2024-08-26 15:01:14,981 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.75 vs. limit=6.0 +2024-08-26 15:01:17,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=50976.0, ans=0.025 +2024-08-26 15:01:19,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=50976.0, ans=0.1 +2024-08-26 15:01:22,470 INFO [train.py:1114] (3/4) Epoch 4, batch 2100, loss[loss=0.2749, simple_loss=0.3279, pruned_loss=0.08009, ctc_loss=0.1542, over 19752.00 frames. ], tot_loss[loss=0.2785, simple_loss=0.3215, pruned_loss=0.08575, ctc_loss=0.1601, over 3859963.60 frames. ], batch size: 54, lr: 3.08e-02, grad_scale: 32.0 +2024-08-26 15:01:24,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=51029.333333333336, ans=0.125 +2024-08-26 15:01:24,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=51029.333333333336, ans=0.2 +2024-08-26 15:01:30,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=51082.666666666664, ans=0.125 +2024-08-26 15:01:35,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=51082.666666666664, ans=0.125 +2024-08-26 15:01:43,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=51136.0, ans=0.07 +2024-08-26 15:01:49,283 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.54 vs. limit=22.5 +2024-08-26 15:01:55,718 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.81 vs. limit=15.0 +2024-08-26 15:01:58,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=51242.666666666664, ans=0.125 +2024-08-26 15:02:04,159 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.348e+02 1.626e+02 1.780e+02 1.895e+02 2.709e+02, threshold=3.561e+02, percent-clipped=0.0 +2024-08-26 15:02:07,173 INFO [train.py:1114] (3/4) Epoch 4, batch 2150, loss[loss=0.2644, simple_loss=0.3122, pruned_loss=0.07828, ctc_loss=0.1499, over 19850.00 frames. ], tot_loss[loss=0.2781, simple_loss=0.3208, pruned_loss=0.08571, ctc_loss=0.16, over 3870785.76 frames. ], batch size: 52, lr: 3.07e-02, grad_scale: 32.0 +2024-08-26 15:02:09,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=51296.0, ans=0.0 +2024-08-26 15:02:12,293 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.61 vs. limit=5.0 +2024-08-26 15:02:14,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51296.0, ans=0.1 +2024-08-26 15:02:19,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=51349.333333333336, ans=0.0 +2024-08-26 15:02:37,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=51456.0, ans=0.125 +2024-08-26 15:02:54,342 INFO [train.py:1114] (3/4) Epoch 4, batch 2200, loss[loss=0.3265, simple_loss=0.3492, pruned_loss=0.1125, ctc_loss=0.1973, over 19595.00 frames. ], tot_loss[loss=0.278, simple_loss=0.3208, pruned_loss=0.0856, ctc_loss=0.1598, over 3868547.89 frames. ], batch size: 57, lr: 3.07e-02, grad_scale: 32.0 +2024-08-26 15:02:56,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=51562.666666666664, ans=0.125 +2024-08-26 15:02:57,831 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.28 vs. limit=15.0 +2024-08-26 15:02:59,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=51562.666666666664, ans=0.0 +2024-08-26 15:03:00,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=51562.666666666664, ans=0.0 +2024-08-26 15:03:01,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=51562.666666666664, ans=0.125 +2024-08-26 15:03:19,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=51669.333333333336, ans=0.125 +2024-08-26 15:03:28,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=51722.666666666664, ans=0.125 +2024-08-26 15:03:33,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=51776.0, ans=0.0 +2024-08-26 15:03:36,542 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.424e+02 1.687e+02 1.993e+02 2.251e+02 9.209e+02, threshold=3.987e+02, percent-clipped=2.0 +2024-08-26 15:03:39,210 INFO [train.py:1114] (3/4) Epoch 4, batch 2250, loss[loss=0.2829, simple_loss=0.3282, pruned_loss=0.08685, ctc_loss=0.1596, over 19616.00 frames. ], tot_loss[loss=0.2781, simple_loss=0.3209, pruned_loss=0.08565, ctc_loss=0.16, over 3868291.30 frames. ], batch size: 55, lr: 3.06e-02, grad_scale: 32.0 +2024-08-26 15:03:51,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=51882.666666666664, ans=0.125 +2024-08-26 15:03:54,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51882.666666666664, ans=0.1 +2024-08-26 15:03:56,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=51936.0, ans=0.025 +2024-08-26 15:03:59,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=51936.0, ans=0.0 +2024-08-26 15:04:11,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=51989.333333333336, ans=0.0 +2024-08-26 15:04:15,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=52042.666666666664, ans=0.0 +2024-08-26 15:04:23,355 INFO [train.py:1114] (3/4) Epoch 4, batch 2300, loss[loss=0.2503, simple_loss=0.2959, pruned_loss=0.07353, ctc_loss=0.1442, over 19494.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.3202, pruned_loss=0.08567, ctc_loss=0.1599, over 3861732.83 frames. ], batch size: 49, lr: 3.06e-02, grad_scale: 32.0 +2024-08-26 15:04:23,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=52096.0, ans=0.125 +2024-08-26 15:04:34,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=52149.333333333336, ans=0.0 +2024-08-26 15:04:35,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=52149.333333333336, ans=0.125 +2024-08-26 15:04:35,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=52149.333333333336, ans=0.125 +2024-08-26 15:04:37,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=52149.333333333336, ans=0.1 +2024-08-26 15:04:40,049 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.10 vs. limit=15.0 +2024-08-26 15:05:06,268 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.78 vs. limit=12.0 +2024-08-26 15:05:06,729 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.435e+02 1.800e+02 1.978e+02 2.376e+02 5.904e+02, threshold=3.955e+02, percent-clipped=2.0 +2024-08-26 15:05:09,379 INFO [train.py:1114] (3/4) Epoch 4, batch 2350, loss[loss=0.3233, simple_loss=0.3588, pruned_loss=0.1056, ctc_loss=0.1916, over 19687.00 frames. ], tot_loss[loss=0.2779, simple_loss=0.3201, pruned_loss=0.08575, ctc_loss=0.1602, over 3864397.32 frames. ], batch size: 63, lr: 3.05e-02, grad_scale: 32.0 +2024-08-26 15:05:09,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52362.666666666664, ans=0.1 +2024-08-26 15:05:13,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=52362.666666666664, ans=0.2 +2024-08-26 15:05:22,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52416.0, ans=0.1 +2024-08-26 15:05:23,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=52416.0, ans=0.125 +2024-08-26 15:05:29,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=52469.333333333336, ans=0.125 +2024-08-26 15:05:39,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=52522.666666666664, ans=0.0 +2024-08-26 15:05:40,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=52522.666666666664, ans=0.125 +2024-08-26 15:05:42,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=52522.666666666664, ans=0.125 +2024-08-26 15:05:44,245 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:05:45,429 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.91 vs. limit=6.0 +2024-08-26 15:05:48,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=52576.0, ans=0.1 +2024-08-26 15:05:51,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=52576.0, ans=0.125 +2024-08-26 15:05:51,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.whiten.whitening_limit, batch_count=52576.0, ans=12.0 +2024-08-26 15:06:03,787 INFO [train.py:1114] (3/4) Epoch 4, batch 2400, loss[loss=0.2887, simple_loss=0.3305, pruned_loss=0.08882, ctc_loss=0.1733, over 19311.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3229, pruned_loss=0.08706, ctc_loss=0.1623, over 3858609.02 frames. ], batch size: 71, lr: 3.05e-02, grad_scale: 32.0 +2024-08-26 15:06:07,750 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.65 vs. limit=15.0 +2024-08-26 15:06:11,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=52629.333333333336, ans=0.0 +2024-08-26 15:06:25,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=52682.666666666664, ans=0.0 +2024-08-26 15:06:41,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=52789.333333333336, ans=0.0 +2024-08-26 15:06:48,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=52842.666666666664, ans=0.125 +2024-08-26 15:06:51,976 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=12.44 vs. limit=12.0 +2024-08-26 15:06:53,226 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.448e+02 1.824e+02 2.127e+02 2.398e+02 5.215e+02, threshold=4.254e+02, percent-clipped=1.0 +2024-08-26 15:06:55,089 INFO [train.py:1114] (3/4) Epoch 4, batch 2450, loss[loss=0.3493, simple_loss=0.3541, pruned_loss=0.1255, ctc_loss=0.2335, over 13352.00 frames. ], tot_loss[loss=0.2901, simple_loss=0.3282, pruned_loss=0.09175, ctc_loss=0.1711, over 3729049.40 frames. ], batch size: 140, lr: 3.05e-02, grad_scale: 16.0 +2024-08-26 15:06:55,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=52896.0, ans=0.0 +2024-08-26 15:06:57,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52896.0, ans=0.1 +2024-08-26 15:07:05,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=52949.333333333336, ans=0.0 +2024-08-26 15:07:05,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=52949.333333333336, ans=0.0 +2024-08-26 15:07:05,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=52949.333333333336, ans=0.125 +2024-08-26 15:07:08,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=52949.333333333336, ans=0.0 +2024-08-26 15:07:09,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=52949.333333333336, ans=0.2 +2024-08-26 15:07:13,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=53002.666666666664, ans=0.05 +2024-08-26 15:07:15,147 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=7.23 vs. limit=12.0 +2024-08-26 15:09:12,295 INFO [train.py:1114] (3/4) Epoch 5, batch 0, loss[loss=0.257, simple_loss=0.2981, pruned_loss=0.07799, ctc_loss=0.1498, over 19433.00 frames. ], tot_loss[loss=0.257, simple_loss=0.2981, pruned_loss=0.07799, ctc_loss=0.1498, over 19433.00 frames. ], batch size: 48, lr: 2.83e-02, grad_scale: 32.0 +2024-08-26 15:09:12,295 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-26 15:09:22,080 INFO [train.py:1146] (3/4) Epoch 5, validation: loss=0.2289, simple_loss=0.3118, pruned_loss=0.05352, ctc_loss=0.09739, over 944034.00 frames. +2024-08-26 15:09:22,763 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12756MB +2024-08-26 15:09:49,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=53210.666666666664, ans=0.125 +2024-08-26 15:09:53,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=53264.0, ans=0.2 +2024-08-26 15:10:10,886 INFO [train.py:1114] (3/4) Epoch 5, batch 50, loss[loss=0.2465, simple_loss=0.2878, pruned_loss=0.07533, ctc_loss=0.1363, over 19704.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3256, pruned_loss=0.08939, ctc_loss=0.1674, over 843823.62 frames. ], batch size: 47, lr: 2.83e-02, grad_scale: 32.0 +2024-08-26 15:10:11,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=53370.666666666664, ans=0.125 +2024-08-26 15:10:22,337 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.804e+02 2.028e+02 2.297e+02 4.038e+02, threshold=4.056e+02, percent-clipped=0.0 +2024-08-26 15:10:42,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=53530.666666666664, ans=0.0 +2024-08-26 15:10:44,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=53530.666666666664, ans=0.0 +2024-08-26 15:10:45,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=53530.666666666664, ans=0.0 +2024-08-26 15:10:46,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=53530.666666666664, ans=0.125 +2024-08-26 15:10:48,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=53530.666666666664, ans=0.0 +2024-08-26 15:10:53,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=53584.0, ans=0.0 +2024-08-26 15:11:01,274 INFO [train.py:1114] (3/4) Epoch 5, batch 100, loss[loss=0.2688, simple_loss=0.3128, pruned_loss=0.08223, ctc_loss=0.1509, over 19698.00 frames. ], tot_loss[loss=0.2831, simple_loss=0.3252, pruned_loss=0.08756, ctc_loss=0.1647, over 1498517.54 frames. ], batch size: 51, lr: 2.82e-02, grad_scale: 32.0 +2024-08-26 15:11:12,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=53637.333333333336, ans=0.125 +2024-08-26 15:11:12,226 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.41 vs. limit=15.0 +2024-08-26 15:11:18,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=53690.666666666664, ans=0.0 +2024-08-26 15:11:23,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=53744.0, ans=0.125 +2024-08-26 15:11:29,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=53744.0, ans=0.07 +2024-08-26 15:11:59,603 INFO [train.py:1114] (3/4) Epoch 5, batch 150, loss[loss=0.2384, simple_loss=0.288, pruned_loss=0.0685, ctc_loss=0.1296, over 19709.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3212, pruned_loss=0.08549, ctc_loss=0.1604, over 2027402.71 frames. ], batch size: 47, lr: 2.82e-02, grad_scale: 32.0 +2024-08-26 15:12:00,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=53904.0, ans=0.125 +2024-08-26 15:12:06,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=53904.0, ans=0.0 +2024-08-26 15:12:10,018 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.442e+02 1.696e+02 1.862e+02 2.172e+02 3.492e+02, threshold=3.724e+02, percent-clipped=0.0 +2024-08-26 15:12:26,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=54010.666666666664, ans=0.125 +2024-08-26 15:12:29,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=54064.0, ans=0.05 +2024-08-26 15:12:30,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54064.0, ans=0.1 +2024-08-26 15:12:32,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=54064.0, ans=0.2 +2024-08-26 15:12:33,975 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.05 vs. limit=15.0 +2024-08-26 15:12:35,885 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.52 vs. limit=15.0 +2024-08-26 15:12:48,445 INFO [train.py:1114] (3/4) Epoch 5, batch 200, loss[loss=0.3081, simple_loss=0.3479, pruned_loss=0.09814, ctc_loss=0.1801, over 18289.00 frames. ], tot_loss[loss=0.275, simple_loss=0.3192, pruned_loss=0.08404, ctc_loss=0.157, over 2435589.46 frames. ], batch size: 85, lr: 2.81e-02, grad_scale: 32.0 +2024-08-26 15:12:52,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=54170.666666666664, ans=0.0 +2024-08-26 15:12:53,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=54170.666666666664, ans=0.05 +2024-08-26 15:12:58,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54224.0, ans=0.1 +2024-08-26 15:12:58,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=54224.0, ans=0.125 +2024-08-26 15:13:20,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=54277.333333333336, ans=0.125 +2024-08-26 15:13:29,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=54330.666666666664, ans=0.125 +2024-08-26 15:13:41,973 INFO [train.py:1114] (3/4) Epoch 5, batch 250, loss[loss=0.2972, simple_loss=0.336, pruned_loss=0.09473, ctc_loss=0.1724, over 19386.00 frames. ], tot_loss[loss=0.2732, simple_loss=0.3179, pruned_loss=0.08314, ctc_loss=0.1555, over 2755462.74 frames. ], batch size: 67, lr: 2.81e-02, grad_scale: 32.0 +2024-08-26 15:13:50,500 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.357e+02 1.685e+02 1.803e+02 2.078e+02 3.456e+02, threshold=3.607e+02, percent-clipped=0.0 +2024-08-26 15:14:17,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=54597.333333333336, ans=0.1 +2024-08-26 15:14:32,302 INFO [train.py:1114] (3/4) Epoch 5, batch 300, loss[loss=0.3075, simple_loss=0.3443, pruned_loss=0.09962, ctc_loss=0.1784, over 19483.00 frames. ], tot_loss[loss=0.2727, simple_loss=0.3174, pruned_loss=0.08295, ctc_loss=0.1551, over 2998701.36 frames. ], batch size: 61, lr: 2.81e-02, grad_scale: 32.0 +2024-08-26 15:14:32,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=54704.0, ans=0.2 +2024-08-26 15:15:16,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=54917.333333333336, ans=0.125 +2024-08-26 15:15:22,173 INFO [train.py:1114] (3/4) Epoch 5, batch 350, loss[loss=0.269, simple_loss=0.3005, pruned_loss=0.08692, ctc_loss=0.1591, over 19765.00 frames. ], tot_loss[loss=0.2724, simple_loss=0.3173, pruned_loss=0.08279, ctc_loss=0.1546, over 3188431.96 frames. ], batch size: 48, lr: 2.80e-02, grad_scale: 32.0 +2024-08-26 15:15:25,543 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:15:31,767 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.410e+02 1.717e+02 1.933e+02 2.233e+02 3.797e+02, threshold=3.865e+02, percent-clipped=1.0 +2024-08-26 15:15:33,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=55024.0, ans=0.2 +2024-08-26 15:15:39,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=55024.0, ans=0.025 +2024-08-26 15:15:39,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=55024.0, ans=0.07 +2024-08-26 15:15:47,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=55077.333333333336, ans=0.125 +2024-08-26 15:16:15,656 INFO [train.py:1114] (3/4) Epoch 5, batch 400, loss[loss=0.285, simple_loss=0.3314, pruned_loss=0.08692, ctc_loss=0.162, over 19493.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.3162, pruned_loss=0.08169, ctc_loss=0.1529, over 3340848.49 frames. ], batch size: 54, lr: 2.80e-02, grad_scale: 32.0 +2024-08-26 15:16:20,047 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.22 vs. limit=15.0 +2024-08-26 15:16:23,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=55237.333333333336, ans=0.125 +2024-08-26 15:16:44,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=55397.333333333336, ans=0.2 +2024-08-26 15:16:46,325 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.99 vs. limit=15.0 +2024-08-26 15:16:49,114 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.26 vs. limit=15.0 +2024-08-26 15:16:53,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=55450.666666666664, ans=0.0 +2024-08-26 15:16:59,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=55450.666666666664, ans=10.0 +2024-08-26 15:17:07,114 INFO [train.py:1114] (3/4) Epoch 5, batch 450, loss[loss=0.2839, simple_loss=0.3312, pruned_loss=0.08642, ctc_loss=0.1597, over 19616.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.316, pruned_loss=0.0817, ctc_loss=0.1526, over 3449345.08 frames. ], batch size: 55, lr: 2.79e-02, grad_scale: 16.0 +2024-08-26 15:17:08,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=55504.0, ans=0.0 +2024-08-26 15:17:17,447 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.642e+02 1.899e+02 2.179e+02 3.523e+02, threshold=3.798e+02, percent-clipped=0.0 +2024-08-26 15:17:21,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=55557.333333333336, ans=0.1 +2024-08-26 15:17:31,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=55610.666666666664, ans=0.125 +2024-08-26 15:18:04,475 INFO [train.py:1114] (3/4) Epoch 5, batch 500, loss[loss=0.2862, simple_loss=0.34, pruned_loss=0.08492, ctc_loss=0.1563, over 19658.00 frames. ], tot_loss[loss=0.2692, simple_loss=0.3152, pruned_loss=0.08125, ctc_loss=0.1518, over 3544776.48 frames. ], batch size: 63, lr: 2.79e-02, grad_scale: 16.0 +2024-08-26 15:18:08,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=55770.666666666664, ans=0.125 +2024-08-26 15:18:14,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=55770.666666666664, ans=0.125 +2024-08-26 15:18:44,515 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.15 vs. limit=15.0 +2024-08-26 15:18:53,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=55930.666666666664, ans=0.125 +2024-08-26 15:19:33,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=55984.0, ans=0.1 +2024-08-26 15:19:46,992 INFO [train.py:1114] (3/4) Epoch 5, batch 550, loss[loss=0.2818, simple_loss=0.3281, pruned_loss=0.08636, ctc_loss=0.157, over 19241.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3154, pruned_loss=0.08149, ctc_loss=0.1523, over 3607987.84 frames. ], batch size: 71, lr: 2.78e-02, grad_scale: 16.0 +2024-08-26 15:19:51,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=56037.333333333336, ans=0.125 +2024-08-26 15:20:04,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=56090.666666666664, ans=0.2 +2024-08-26 15:20:04,960 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.339e+02 1.676e+02 1.860e+02 2.053e+02 4.118e+02, threshold=3.720e+02, percent-clipped=1.0 +2024-08-26 15:20:05,498 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.02 vs. limit=22.5 +2024-08-26 15:20:27,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=56144.0, ans=0.0 +2024-08-26 15:20:42,230 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.22 vs. limit=15.0 +2024-08-26 15:20:45,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=56250.666666666664, ans=0.07 +2024-08-26 15:20:56,218 INFO [train.py:1114] (3/4) Epoch 5, batch 600, loss[loss=0.2928, simple_loss=0.3364, pruned_loss=0.09017, ctc_loss=0.1718, over 19442.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3157, pruned_loss=0.08145, ctc_loss=0.1519, over 3666424.63 frames. ], batch size: 67, lr: 2.78e-02, grad_scale: 16.0 +2024-08-26 15:21:01,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=56304.0, ans=0.1 +2024-08-26 15:21:12,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=56357.333333333336, ans=0.1 +2024-08-26 15:21:14,455 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.03 vs. limit=22.5 +2024-08-26 15:21:29,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=56464.0, ans=0.0 +2024-08-26 15:21:31,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=56464.0, ans=0.1 +2024-08-26 15:21:41,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=56517.333333333336, ans=0.0 +2024-08-26 15:21:49,388 INFO [train.py:1114] (3/4) Epoch 5, batch 650, loss[loss=0.2535, simple_loss=0.3061, pruned_loss=0.07294, ctc_loss=0.1374, over 19771.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.3145, pruned_loss=0.08073, ctc_loss=0.1508, over 3716413.52 frames. ], batch size: 54, lr: 2.77e-02, grad_scale: 16.0 +2024-08-26 15:21:52,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=56570.666666666664, ans=0.04949747468305833 +2024-08-26 15:21:59,899 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.327e+02 1.659e+02 1.803e+02 2.095e+02 3.596e+02, threshold=3.607e+02, percent-clipped=0.0 +2024-08-26 15:22:10,085 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.82 vs. limit=15.0 +2024-08-26 15:22:11,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=56677.333333333336, ans=0.125 +2024-08-26 15:22:23,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=56730.666666666664, ans=0.125 +2024-08-26 15:22:35,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=56784.0, ans=0.125 +2024-08-26 15:22:37,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=56784.0, ans=0.1 +2024-08-26 15:22:39,308 INFO [train.py:1114] (3/4) Epoch 5, batch 700, loss[loss=0.2515, simple_loss=0.299, pruned_loss=0.07464, ctc_loss=0.1369, over 19719.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.3151, pruned_loss=0.08074, ctc_loss=0.1507, over 3748716.32 frames. ], batch size: 51, lr: 2.77e-02, grad_scale: 16.0 +2024-08-26 15:22:56,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=56890.666666666664, ans=0.0 +2024-08-26 15:23:07,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=56997.333333333336, ans=0.125 +2024-08-26 15:23:27,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57050.666666666664, ans=0.1 +2024-08-26 15:23:29,324 INFO [train.py:1114] (3/4) Epoch 5, batch 750, loss[loss=0.2679, simple_loss=0.318, pruned_loss=0.07967, ctc_loss=0.1459, over 19497.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.3146, pruned_loss=0.08067, ctc_loss=0.1508, over 3774382.21 frames. ], batch size: 54, lr: 2.77e-02, grad_scale: 16.0 +2024-08-26 15:23:39,777 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.452e+02 1.732e+02 1.957e+02 2.375e+02 6.184e+02, threshold=3.914e+02, percent-clipped=3.0 +2024-08-26 15:23:42,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=57157.333333333336, ans=0.125 +2024-08-26 15:23:45,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=57157.333333333336, ans=0.2 +2024-08-26 15:24:00,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57264.0, ans=0.1 +2024-08-26 15:24:04,494 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.20 vs. limit=5.0 +2024-08-26 15:24:19,608 INFO [train.py:1114] (3/4) Epoch 5, batch 800, loss[loss=0.2464, simple_loss=0.2955, pruned_loss=0.0723, ctc_loss=0.1317, over 19418.00 frames. ], tot_loss[loss=0.2682, simple_loss=0.3147, pruned_loss=0.08072, ctc_loss=0.1509, over 3795967.69 frames. ], batch size: 48, lr: 2.76e-02, grad_scale: 32.0 +2024-08-26 15:24:43,767 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.29 vs. limit=15.0 +2024-08-26 15:24:53,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=57530.666666666664, ans=0.0 +2024-08-26 15:24:55,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=57530.666666666664, ans=0.0 +2024-08-26 15:25:10,626 INFO [train.py:1114] (3/4) Epoch 5, batch 850, loss[loss=0.2795, simple_loss=0.3212, pruned_loss=0.08679, ctc_loss=0.1607, over 19656.00 frames. ], tot_loss[loss=0.2679, simple_loss=0.3141, pruned_loss=0.08071, ctc_loss=0.1505, over 3814629.94 frames. ], batch size: 59, lr: 2.76e-02, grad_scale: 32.0 +2024-08-26 15:25:24,575 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.368e+02 1.744e+02 1.971e+02 2.331e+02 4.591e+02, threshold=3.942e+02, percent-clipped=1.0 +2024-08-26 15:25:36,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=57744.0, ans=0.1 +2024-08-26 15:25:42,198 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.13 vs. limit=15.0 +2024-08-26 15:26:05,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57850.666666666664, ans=0.1 +2024-08-26 15:26:07,614 INFO [train.py:1114] (3/4) Epoch 5, batch 900, loss[loss=0.2415, simple_loss=0.2864, pruned_loss=0.0716, ctc_loss=0.1334, over 19418.00 frames. ], tot_loss[loss=0.268, simple_loss=0.3141, pruned_loss=0.0808, ctc_loss=0.1507, over 3818402.36 frames. ], batch size: 48, lr: 2.75e-02, grad_scale: 32.0 +2024-08-26 15:26:08,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=57904.0, ans=0.125 +2024-08-26 15:26:15,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=57904.0, ans=0.0 +2024-08-26 15:26:20,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=57957.333333333336, ans=0.0 +2024-08-26 15:26:27,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=57957.333333333336, ans=0.1 +2024-08-26 15:26:46,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=58064.0, ans=0.125 +2024-08-26 15:26:51,994 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.36 vs. limit=15.0 +2024-08-26 15:26:55,062 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=15.0 +2024-08-26 15:26:57,633 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:26:58,249 INFO [train.py:1114] (3/4) Epoch 5, batch 950, loss[loss=0.2486, simple_loss=0.2979, pruned_loss=0.07315, ctc_loss=0.1326, over 19513.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3146, pruned_loss=0.08127, ctc_loss=0.1517, over 3819311.00 frames. ], batch size: 49, lr: 2.75e-02, grad_scale: 32.0 +2024-08-26 15:27:04,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58170.666666666664, ans=0.1 +2024-08-26 15:27:10,681 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:27:11,440 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.396e+02 1.648e+02 1.859e+02 2.135e+02 3.098e+02, threshold=3.718e+02, percent-clipped=0.0 +2024-08-26 15:27:12,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58224.0, ans=0.1 +2024-08-26 15:27:17,047 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.62 vs. limit=22.5 +2024-08-26 15:27:18,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=58224.0, ans=0.125 +2024-08-26 15:27:42,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=58384.0, ans=0.0 +2024-08-26 15:27:49,893 INFO [train.py:1114] (3/4) Epoch 5, batch 1000, loss[loss=0.2527, simple_loss=0.3019, pruned_loss=0.07307, ctc_loss=0.1433, over 19854.00 frames. ], tot_loss[loss=0.2692, simple_loss=0.315, pruned_loss=0.08131, ctc_loss=0.1518, over 3815031.46 frames. ], batch size: 52, lr: 2.74e-02, grad_scale: 32.0 +2024-08-26 15:27:54,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=58437.333333333336, ans=0.2 +2024-08-26 15:27:57,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.26 vs. limit=15.0 +2024-08-26 15:28:20,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=58597.333333333336, ans=0.0 +2024-08-26 15:28:40,054 INFO [train.py:1114] (3/4) Epoch 5, batch 1050, loss[loss=0.2569, simple_loss=0.312, pruned_loss=0.07345, ctc_loss=0.1373, over 19846.00 frames. ], tot_loss[loss=0.2682, simple_loss=0.3142, pruned_loss=0.08087, ctc_loss=0.1511, over 3822360.93 frames. ], batch size: 57, lr: 2.74e-02, grad_scale: 32.0 +2024-08-26 15:28:46,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.25 vs. limit=15.0 +2024-08-26 15:28:50,852 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.379e+02 1.680e+02 1.893e+02 2.161e+02 3.731e+02, threshold=3.786e+02, percent-clipped=1.0 +2024-08-26 15:28:55,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=58757.333333333336, ans=0.125 +2024-08-26 15:28:57,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=58757.333333333336, ans=0.2 +2024-08-26 15:28:59,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58810.666666666664, ans=0.1 +2024-08-26 15:29:10,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=58864.0, ans=0.1 +2024-08-26 15:29:26,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=58917.333333333336, ans=0.125 +2024-08-26 15:29:33,677 INFO [train.py:1114] (3/4) Epoch 5, batch 1100, loss[loss=0.2727, simple_loss=0.313, pruned_loss=0.08485, ctc_loss=0.1569, over 19583.00 frames. ], tot_loss[loss=0.2671, simple_loss=0.3135, pruned_loss=0.08026, ctc_loss=0.1503, over 3828880.11 frames. ], batch size: 52, lr: 2.74e-02, grad_scale: 16.0 +2024-08-26 15:29:34,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=58970.666666666664, ans=0.125 +2024-08-26 15:29:35,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=58970.666666666664, ans=0.125 +2024-08-26 15:29:39,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=58970.666666666664, ans=0.0 +2024-08-26 15:29:42,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=59024.0, ans=0.125 +2024-08-26 15:29:59,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=59077.333333333336, ans=0.5 +2024-08-26 15:30:11,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=59130.666666666664, ans=15.0 +2024-08-26 15:30:13,475 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.63 vs. limit=22.5 +2024-08-26 15:30:14,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=59184.0, ans=0.125 +2024-08-26 15:30:21,041 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.82 vs. limit=15.0 +2024-08-26 15:30:21,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=59184.0, ans=0.125 +2024-08-26 15:30:24,333 INFO [train.py:1114] (3/4) Epoch 5, batch 1150, loss[loss=0.2509, simple_loss=0.3023, pruned_loss=0.07301, ctc_loss=0.1337, over 19581.00 frames. ], tot_loss[loss=0.267, simple_loss=0.3135, pruned_loss=0.08029, ctc_loss=0.1501, over 3827292.51 frames. ], batch size: 52, lr: 2.73e-02, grad_scale: 16.0 +2024-08-26 15:30:27,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=59237.333333333336, ans=0.1 +2024-08-26 15:30:31,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59237.333333333336, ans=0.1 +2024-08-26 15:30:31,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.18 vs. limit=12.0 +2024-08-26 15:30:35,929 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.334e+02 1.591e+02 1.744e+02 2.042e+02 4.394e+02, threshold=3.489e+02, percent-clipped=2.0 +2024-08-26 15:30:37,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=59290.666666666664, ans=0.125 +2024-08-26 15:30:42,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=59290.666666666664, ans=0.0 +2024-08-26 15:30:51,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=59344.0, ans=0.05 +2024-08-26 15:31:15,278 INFO [train.py:1114] (3/4) Epoch 5, batch 1200, loss[loss=0.2921, simple_loss=0.3371, pruned_loss=0.08957, ctc_loss=0.1699, over 19834.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3147, pruned_loss=0.08092, ctc_loss=0.1514, over 3823627.46 frames. ], batch size: 57, lr: 2.73e-02, grad_scale: 32.0 +2024-08-26 15:31:21,665 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.75 vs. limit=22.5 +2024-08-26 15:31:23,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=59504.0, ans=0.025 +2024-08-26 15:31:32,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.75 vs. limit=10.0 +2024-08-26 15:31:33,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=59557.333333333336, ans=0.025 +2024-08-26 15:31:34,402 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.71 vs. limit=15.0 +2024-08-26 15:31:41,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=59610.666666666664, ans=0.0 +2024-08-26 15:31:47,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=59664.0, ans=0.05 +2024-08-26 15:31:51,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=59664.0, ans=0.07 +2024-08-26 15:31:57,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=59717.333333333336, ans=0.0 +2024-08-26 15:32:06,281 INFO [train.py:1114] (3/4) Epoch 5, batch 1250, loss[loss=0.3162, simple_loss=0.3519, pruned_loss=0.1037, ctc_loss=0.1825, over 19515.00 frames. ], tot_loss[loss=0.268, simple_loss=0.3148, pruned_loss=0.08053, ctc_loss=0.1505, over 3842236.16 frames. ], batch size: 61, lr: 2.72e-02, grad_scale: 32.0 +2024-08-26 15:32:18,024 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 1.635e+02 1.798e+02 2.001e+02 4.301e+02, threshold=3.596e+02, percent-clipped=1.0 +2024-08-26 15:32:23,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=59824.0, ans=0.0 +2024-08-26 15:32:24,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=59877.333333333336, ans=0.125 +2024-08-26 15:32:37,461 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.56 vs. limit=15.0 +2024-08-26 15:32:39,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=59930.666666666664, ans=0.1 +2024-08-26 15:32:52,498 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.96 vs. limit=10.0 +2024-08-26 15:32:56,472 INFO [train.py:1114] (3/4) Epoch 5, batch 1300, loss[loss=0.2869, simple_loss=0.3258, pruned_loss=0.0913, ctc_loss=0.1635, over 18794.00 frames. ], tot_loss[loss=0.2673, simple_loss=0.314, pruned_loss=0.08026, ctc_loss=0.1501, over 3846118.66 frames. ], batch size: 76, lr: 2.72e-02, grad_scale: 32.0 +2024-08-26 15:33:12,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=60090.666666666664, ans=0.0 +2024-08-26 15:33:17,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=60144.0, ans=0.025 +2024-08-26 15:33:20,716 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.51 vs. limit=22.5 +2024-08-26 15:33:20,843 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.43 vs. limit=15.0 +2024-08-26 15:33:43,912 INFO [train.py:1114] (3/4) Epoch 5, batch 1350, loss[loss=0.2454, simple_loss=0.2992, pruned_loss=0.06925, ctc_loss=0.1327, over 19778.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.3128, pruned_loss=0.07963, ctc_loss=0.149, over 3857716.81 frames. ], batch size: 54, lr: 2.71e-02, grad_scale: 32.0 +2024-08-26 15:33:55,388 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.610e+02 1.752e+02 1.989e+02 4.527e+02, threshold=3.503e+02, percent-clipped=1.0 +2024-08-26 15:34:03,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=60357.333333333336, ans=0.125 +2024-08-26 15:34:17,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=60464.0, ans=0.2 +2024-08-26 15:34:33,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60570.666666666664, ans=0.1 +2024-08-26 15:34:34,572 INFO [train.py:1114] (3/4) Epoch 5, batch 1400, loss[loss=0.2365, simple_loss=0.2843, pruned_loss=0.06848, ctc_loss=0.1293, over 19634.00 frames. ], tot_loss[loss=0.2657, simple_loss=0.3128, pruned_loss=0.07958, ctc_loss=0.1488, over 3864927.74 frames. ], batch size: 46, lr: 2.71e-02, grad_scale: 32.0 +2024-08-26 15:34:34,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=60570.666666666664, ans=0.0 +2024-08-26 15:34:34,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=60570.666666666664, ans=0.125 +2024-08-26 15:34:36,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=60570.666666666664, ans=0.125 +2024-08-26 15:34:48,187 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.59 vs. limit=12.0 +2024-08-26 15:35:09,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60730.666666666664, ans=0.1 +2024-08-26 15:35:16,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=60730.666666666664, ans=0.125 +2024-08-26 15:35:27,698 INFO [train.py:1114] (3/4) Epoch 5, batch 1450, loss[loss=0.3099, simple_loss=0.3509, pruned_loss=0.09827, ctc_loss=0.1809, over 19664.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3136, pruned_loss=0.08002, ctc_loss=0.1495, over 3862918.22 frames. ], batch size: 63, lr: 2.71e-02, grad_scale: 32.0 +2024-08-26 15:35:36,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60890.666666666664, ans=0.1 +2024-08-26 15:35:42,504 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.441e+02 1.680e+02 1.820e+02 2.123e+02 3.172e+02, threshold=3.639e+02, percent-clipped=0.0 +2024-08-26 15:35:49,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.35 vs. limit=15.0 +2024-08-26 15:36:01,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=60997.333333333336, ans=0.2 +2024-08-26 15:36:19,834 INFO [train.py:1114] (3/4) Epoch 5, batch 1500, loss[loss=0.2805, simple_loss=0.3282, pruned_loss=0.08571, ctc_loss=0.1535, over 19600.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3138, pruned_loss=0.07999, ctc_loss=0.1491, over 3862222.53 frames. ], batch size: 57, lr: 2.70e-02, grad_scale: 32.0 +2024-08-26 15:36:25,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=61104.0, ans=0.025 +2024-08-26 15:36:28,371 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=15.0 +2024-08-26 15:36:29,259 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.98 vs. limit=6.0 +2024-08-26 15:36:33,929 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.05 vs. limit=15.0 +2024-08-26 15:36:47,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=61210.666666666664, ans=0.95 +2024-08-26 15:36:53,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=61264.0, ans=0.125 +2024-08-26 15:37:01,965 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.13 vs. limit=22.5 +2024-08-26 15:37:03,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=61317.333333333336, ans=0.0 +2024-08-26 15:37:09,980 INFO [train.py:1114] (3/4) Epoch 5, batch 1550, loss[loss=0.2686, simple_loss=0.3198, pruned_loss=0.07904, ctc_loss=0.1482, over 19602.00 frames. ], tot_loss[loss=0.2673, simple_loss=0.3139, pruned_loss=0.08029, ctc_loss=0.1501, over 3846393.16 frames. ], batch size: 60, lr: 2.70e-02, grad_scale: 16.0 +2024-08-26 15:37:18,323 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.73 vs. limit=6.0 +2024-08-26 15:37:20,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=61424.0, ans=0.5 +2024-08-26 15:37:22,484 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.359e+02 1.752e+02 1.975e+02 2.269e+02 3.644e+02, threshold=3.951e+02, percent-clipped=1.0 +2024-08-26 15:37:32,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=61477.333333333336, ans=0.125 +2024-08-26 15:37:35,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=61477.333333333336, ans=0.025 +2024-08-26 15:37:42,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=61530.666666666664, ans=0.0 +2024-08-26 15:37:57,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=61584.0, ans=0.5 +2024-08-26 15:38:02,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=61584.0, ans=0.2 +2024-08-26 15:38:03,699 INFO [train.py:1114] (3/4) Epoch 5, batch 1600, loss[loss=0.271, simple_loss=0.3264, pruned_loss=0.07905, ctc_loss=0.1438, over 19830.00 frames. ], tot_loss[loss=0.2674, simple_loss=0.3138, pruned_loss=0.08047, ctc_loss=0.1503, over 3834995.66 frames. ], batch size: 57, lr: 2.69e-02, grad_scale: 32.0 +2024-08-26 15:38:04,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=61637.333333333336, ans=0.125 +2024-08-26 15:38:22,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=61637.333333333336, ans=0.0 +2024-08-26 15:38:58,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61744.0, ans=0.1 +2024-08-26 15:39:10,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=61850.666666666664, ans=0.125 +2024-08-26 15:39:19,219 INFO [train.py:1114] (3/4) Epoch 5, batch 1650, loss[loss=0.2747, simple_loss=0.3259, pruned_loss=0.08197, ctc_loss=0.1488, over 19679.00 frames. ], tot_loss[loss=0.2671, simple_loss=0.3135, pruned_loss=0.0803, ctc_loss=0.1502, over 3830975.06 frames. ], batch size: 59, lr: 2.69e-02, grad_scale: 32.0 +2024-08-26 15:39:19,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=61904.0, ans=0.5 +2024-08-26 15:39:25,713 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.88 vs. limit=15.0 +2024-08-26 15:39:30,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61957.333333333336, ans=0.1 +2024-08-26 15:39:31,766 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.578e+02 1.738e+02 2.103e+02 3.628e+02, threshold=3.475e+02, percent-clipped=0.0 +2024-08-26 15:39:36,242 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.10 vs. limit=15.0 +2024-08-26 15:39:46,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=62010.666666666664, ans=0.125 +2024-08-26 15:39:51,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=62064.0, ans=0.125 +2024-08-26 15:40:01,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=62117.333333333336, ans=0.125 +2024-08-26 15:40:04,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=62117.333333333336, ans=0.0 +2024-08-26 15:40:08,748 INFO [train.py:1114] (3/4) Epoch 5, batch 1700, loss[loss=0.2362, simple_loss=0.2812, pruned_loss=0.06903, ctc_loss=0.1328, over 19650.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.3131, pruned_loss=0.07973, ctc_loss=0.149, over 3845488.88 frames. ], batch size: 46, lr: 2.69e-02, grad_scale: 32.0 +2024-08-26 15:40:16,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=62170.666666666664, ans=0.0 +2024-08-26 15:40:26,696 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.00 vs. limit=15.0 +2024-08-26 15:40:32,471 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.32 vs. limit=22.5 +2024-08-26 15:40:36,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=62330.666666666664, ans=0.0 +2024-08-26 15:40:43,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=62330.666666666664, ans=0.05 +2024-08-26 15:40:54,048 INFO [train.py:1114] (3/4) Epoch 5, batch 1750, loss[loss=0.213, simple_loss=0.272, pruned_loss=0.05579, ctc_loss=0.1059, over 19660.00 frames. ], tot_loss[loss=0.2654, simple_loss=0.3126, pruned_loss=0.07936, ctc_loss=0.1486, over 3850086.81 frames. ], batch size: 45, lr: 2.68e-02, grad_scale: 32.0 +2024-08-26 15:41:03,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=62490.666666666664, ans=0.09899494936611666 +2024-08-26 15:41:05,749 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.599e+02 1.842e+02 2.097e+02 3.191e+02, threshold=3.683e+02, percent-clipped=0.0 +2024-08-26 15:41:30,998 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.36 vs. limit=22.5 +2024-08-26 15:41:39,327 INFO [train.py:1114] (3/4) Epoch 5, batch 1800, loss[loss=0.256, simple_loss=0.3143, pruned_loss=0.07255, ctc_loss=0.1315, over 19601.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3133, pruned_loss=0.07974, ctc_loss=0.149, over 3852345.49 frames. ], batch size: 55, lr: 2.68e-02, grad_scale: 32.0 +2024-08-26 15:41:48,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=62757.333333333336, ans=0.125 +2024-08-26 15:41:53,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=62757.333333333336, ans=0.1 +2024-08-26 15:41:53,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=62757.333333333336, ans=0.0 +2024-08-26 15:41:57,766 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.85 vs. limit=15.0 +2024-08-26 15:41:58,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=62810.666666666664, ans=0.95 +2024-08-26 15:42:10,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=62864.0, ans=0.0 +2024-08-26 15:42:12,795 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.48 vs. limit=22.5 +2024-08-26 15:42:14,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=62917.333333333336, ans=0.125 +2024-08-26 15:42:24,250 INFO [train.py:1114] (3/4) Epoch 5, batch 1850, loss[loss=0.2896, simple_loss=0.3364, pruned_loss=0.08886, ctc_loss=0.1626, over 19604.00 frames. ], tot_loss[loss=0.2657, simple_loss=0.313, pruned_loss=0.07949, ctc_loss=0.1485, over 3855212.65 frames. ], batch size: 57, lr: 2.67e-02, grad_scale: 32.0 +2024-08-26 15:42:28,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=62970.666666666664, ans=0.09899494936611666 +2024-08-26 15:42:29,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=62970.666666666664, ans=0.125 +2024-08-26 15:42:34,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=63024.0, ans=0.125 +2024-08-26 15:42:34,611 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.53 vs. limit=15.0 +2024-08-26 15:42:35,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=63024.0, ans=0.0 +2024-08-26 15:42:35,859 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.605e+02 1.818e+02 2.016e+02 3.945e+02, threshold=3.637e+02, percent-clipped=1.0 +2024-08-26 15:42:45,852 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.72 vs. limit=15.0 +2024-08-26 15:42:55,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=63130.666666666664, ans=0.2 +2024-08-26 15:42:57,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=63130.666666666664, ans=0.0 +2024-08-26 15:43:03,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=63130.666666666664, ans=0.0 +2024-08-26 15:43:07,284 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.74 vs. limit=15.0 +2024-08-26 15:43:08,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=63184.0, ans=0.0 +2024-08-26 15:43:16,409 INFO [train.py:1114] (3/4) Epoch 5, batch 1900, loss[loss=0.2687, simple_loss=0.3287, pruned_loss=0.07592, ctc_loss=0.1424, over 19617.00 frames. ], tot_loss[loss=0.2656, simple_loss=0.3133, pruned_loss=0.07936, ctc_loss=0.1483, over 3860616.91 frames. ], batch size: 59, lr: 2.67e-02, grad_scale: 16.0 +2024-08-26 15:43:36,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63290.666666666664, ans=0.1 +2024-08-26 15:43:39,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=63344.0, ans=0.0 +2024-08-26 15:43:49,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=63397.333333333336, ans=0.05 +2024-08-26 15:43:59,472 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:44:01,715 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.50 vs. limit=15.0 +2024-08-26 15:44:05,665 INFO [train.py:1114] (3/4) Epoch 5, batch 1950, loss[loss=0.2376, simple_loss=0.2919, pruned_loss=0.06678, ctc_loss=0.1243, over 19587.00 frames. ], tot_loss[loss=0.2657, simple_loss=0.3139, pruned_loss=0.07915, ctc_loss=0.148, over 3869650.84 frames. ], batch size: 52, lr: 2.67e-02, grad_scale: 16.0 +2024-08-26 15:44:09,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=63504.0, ans=0.0 +2024-08-26 15:44:09,603 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.33 vs. limit=22.5 +2024-08-26 15:44:14,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=63557.333333333336, ans=0.125 +2024-08-26 15:44:20,102 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.677e+02 1.824e+02 1.963e+02 3.212e+02, threshold=3.647e+02, percent-clipped=0.0 +2024-08-26 15:44:29,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63610.666666666664, ans=0.1 +2024-08-26 15:44:29,819 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.05 vs. limit=22.5 +2024-08-26 15:44:36,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=63664.0, ans=0.125 +2024-08-26 15:44:36,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63664.0, ans=0.1 +2024-08-26 15:44:47,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=63717.333333333336, ans=0.125 +2024-08-26 15:44:48,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=63717.333333333336, ans=0.125 +2024-08-26 15:44:52,294 INFO [train.py:1114] (3/4) Epoch 5, batch 2000, loss[loss=0.2632, simple_loss=0.297, pruned_loss=0.08497, ctc_loss=0.1487, over 19657.00 frames. ], tot_loss[loss=0.2671, simple_loss=0.3147, pruned_loss=0.07992, ctc_loss=0.1494, over 3856342.36 frames. ], batch size: 45, lr: 2.66e-02, grad_scale: 32.0 +2024-08-26 15:44:59,965 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.28 vs. limit=15.0 +2024-08-26 15:45:04,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.65 vs. limit=15.0 +2024-08-26 15:45:07,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63824.0, ans=0.1 +2024-08-26 15:45:16,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=63877.333333333336, ans=0.2 +2024-08-26 15:45:23,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=63930.666666666664, ans=0.125 +2024-08-26 15:45:28,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=63984.0, ans=0.2 +2024-08-26 15:45:28,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=63984.0, ans=0.125 +2024-08-26 15:45:42,246 INFO [train.py:1114] (3/4) Epoch 5, batch 2050, loss[loss=0.2306, simple_loss=0.2772, pruned_loss=0.06714, ctc_loss=0.1243, over 19708.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.3137, pruned_loss=0.07951, ctc_loss=0.1488, over 3853329.76 frames. ], batch size: 47, lr: 2.66e-02, grad_scale: 32.0 +2024-08-26 15:45:43,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64037.333333333336, ans=0.1 +2024-08-26 15:45:44,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=64037.333333333336, ans=0.0 +2024-08-26 15:45:54,616 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.354e+02 1.624e+02 1.773e+02 2.077e+02 3.322e+02, threshold=3.546e+02, percent-clipped=0.0 +2024-08-26 15:46:02,108 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.87 vs. limit=6.0 +2024-08-26 15:46:03,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=64144.0, ans=0.125 +2024-08-26 15:46:20,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=64250.666666666664, ans=0.125 +2024-08-26 15:46:26,370 INFO [train.py:1114] (3/4) Epoch 5, batch 2100, loss[loss=0.2432, simple_loss=0.2978, pruned_loss=0.06902, ctc_loss=0.1265, over 19774.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3123, pruned_loss=0.07858, ctc_loss=0.1469, over 3860031.85 frames. ], batch size: 54, lr: 2.65e-02, grad_scale: 32.0 +2024-08-26 15:46:38,052 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.33 vs. limit=22.5 +2024-08-26 15:46:42,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=64410.666666666664, ans=0.125 +2024-08-26 15:46:45,170 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.25 vs. limit=22.5 +2024-08-26 15:46:50,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=64410.666666666664, ans=0.125 +2024-08-26 15:46:52,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=64464.0, ans=0.2 +2024-08-26 15:46:54,004 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.37 vs. limit=22.5 +2024-08-26 15:47:13,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=64464.0, ans=0.2 +2024-08-26 15:47:13,416 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.05 vs. limit=22.5 +2024-08-26 15:47:23,510 INFO [train.py:1114] (3/4) Epoch 5, batch 2150, loss[loss=0.25, simple_loss=0.3013, pruned_loss=0.07226, ctc_loss=0.1352, over 19858.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.311, pruned_loss=0.07821, ctc_loss=0.1461, over 3871060.79 frames. ], batch size: 52, lr: 2.65e-02, grad_scale: 32.0 +2024-08-26 15:47:23,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=64570.666666666664, ans=0.2 +2024-08-26 15:47:30,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=64570.666666666664, ans=0.0 +2024-08-26 15:47:31,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=64624.0, ans=0.025 +2024-08-26 15:47:35,828 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.599e+02 1.757e+02 2.074e+02 2.995e+02, threshold=3.513e+02, percent-clipped=0.0 +2024-08-26 15:47:39,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten.whitening_limit, batch_count=64624.0, ans=22.5 +2024-08-26 15:47:40,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=64677.333333333336, ans=0.0 +2024-08-26 15:47:52,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=64730.666666666664, ans=0.125 +2024-08-26 15:48:07,245 INFO [train.py:1114] (3/4) Epoch 5, batch 2200, loss[loss=0.2567, simple_loss=0.3165, pruned_loss=0.07129, ctc_loss=0.1357, over 19586.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3116, pruned_loss=0.07844, ctc_loss=0.1464, over 3868287.77 frames. ], batch size: 57, lr: 2.65e-02, grad_scale: 32.0 +2024-08-26 15:48:23,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=64890.666666666664, ans=0.0 +2024-08-26 15:48:27,979 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.41 vs. limit=6.0 +2024-08-26 15:48:40,990 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 15:48:42,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=65050.666666666664, ans=0.0 +2024-08-26 15:48:43,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=65050.666666666664, ans=0.125 +2024-08-26 15:48:52,505 INFO [train.py:1114] (3/4) Epoch 5, batch 2250, loss[loss=0.2556, simple_loss=0.3116, pruned_loss=0.07293, ctc_loss=0.1345, over 19612.00 frames. ], tot_loss[loss=0.2631, simple_loss=0.3114, pruned_loss=0.07822, ctc_loss=0.1459, over 3867721.41 frames. ], batch size: 55, lr: 2.64e-02, grad_scale: 16.0 +2024-08-26 15:48:59,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=65104.0, ans=0.04949747468305833 +2024-08-26 15:49:05,751 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.368e+02 1.721e+02 2.056e+02 2.448e+02 6.138e+02, threshold=4.112e+02, percent-clipped=3.0 +2024-08-26 15:49:07,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=65157.333333333336, ans=0.125 +2024-08-26 15:49:08,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=65157.333333333336, ans=0.125 +2024-08-26 15:49:09,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=65210.666666666664, ans=0.2 +2024-08-26 15:49:22,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=65264.0, ans=0.0 +2024-08-26 15:49:24,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=65264.0, ans=0.025 +2024-08-26 15:49:35,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=65370.666666666664, ans=0.125 +2024-08-26 15:49:36,498 INFO [train.py:1114] (3/4) Epoch 5, batch 2300, loss[loss=0.2553, simple_loss=0.3056, pruned_loss=0.07455, ctc_loss=0.1395, over 19506.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3109, pruned_loss=0.07827, ctc_loss=0.1462, over 3862186.62 frames. ], batch size: 49, lr: 2.64e-02, grad_scale: 16.0 +2024-08-26 15:50:04,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=65530.666666666664, ans=0.125 +2024-08-26 15:50:09,445 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=1.98 vs. limit=15.0 +2024-08-26 15:50:10,392 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.97 vs. limit=22.5 +2024-08-26 15:50:15,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=65584.0, ans=0.0 +2024-08-26 15:50:16,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=65584.0, ans=0.07 +2024-08-26 15:50:18,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=65584.0, ans=0.1 +2024-08-26 15:50:23,058 INFO [train.py:1114] (3/4) Epoch 5, batch 2350, loss[loss=0.2825, simple_loss=0.332, pruned_loss=0.08436, ctc_loss=0.1607, over 19668.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3106, pruned_loss=0.07843, ctc_loss=0.1464, over 3864782.88 frames. ], batch size: 63, lr: 2.63e-02, grad_scale: 16.0 +2024-08-26 15:50:25,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=65637.33333333333, ans=0.125 +2024-08-26 15:50:30,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=65637.33333333333, ans=0.2 +2024-08-26 15:50:36,080 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.297e+02 1.568e+02 1.781e+02 2.033e+02 3.218e+02, threshold=3.561e+02, percent-clipped=0.0 +2024-08-26 15:50:51,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=65797.33333333333, ans=0.125 +2024-08-26 15:50:52,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=65797.33333333333, ans=0.125 +2024-08-26 15:50:56,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=65797.33333333333, ans=0.125 +2024-08-26 15:51:04,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=65850.66666666667, ans=0.0 +2024-08-26 15:51:06,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=65904.0, ans=0.125 +2024-08-26 15:51:07,080 INFO [train.py:1114] (3/4) Epoch 5, batch 2400, loss[loss=0.2877, simple_loss=0.3351, pruned_loss=0.08894, ctc_loss=0.1557, over 19361.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.3133, pruned_loss=0.07959, ctc_loss=0.1482, over 3859883.67 frames. ], batch size: 71, lr: 2.63e-02, grad_scale: 32.0 +2024-08-26 15:51:10,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=65904.0, ans=0.125 +2024-08-26 15:51:33,205 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.28 vs. limit=15.0 +2024-08-26 15:51:52,366 INFO [train.py:1114] (3/4) Epoch 5, batch 2450, loss[loss=0.3679, simple_loss=0.3654, pruned_loss=0.1348, ctc_loss=0.2524, over 14190.00 frames. ], tot_loss[loss=0.2744, simple_loss=0.3184, pruned_loss=0.0839, ctc_loss=0.1564, over 3732592.76 frames. ], batch size: 140, lr: 2.63e-02, grad_scale: 16.0 +2024-08-26 15:52:04,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=66224.0, ans=0.125 +2024-08-26 15:52:07,310 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.477e+02 1.716e+02 1.912e+02 2.213e+02 5.978e+02, threshold=3.825e+02, percent-clipped=3.0 +2024-08-26 15:53:42,770 INFO [train.py:1114] (3/4) Epoch 6, batch 0, loss[loss=0.2725, simple_loss=0.3086, pruned_loss=0.08669, ctc_loss=0.1575, over 19443.00 frames. ], tot_loss[loss=0.2725, simple_loss=0.3086, pruned_loss=0.08669, ctc_loss=0.1575, over 19443.00 frames. ], batch size: 48, lr: 2.45e-02, grad_scale: 32.0 +2024-08-26 15:53:42,771 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-26 15:54:26,137 INFO [train.py:1146] (3/4) Epoch 6, validation: loss=0.2162, simple_loss=0.3022, pruned_loss=0.04785, ctc_loss=0.08613, over 944034.00 frames. +2024-08-26 15:54:26,138 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12756MB +2024-08-26 15:54:38,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=66432.0, ans=0.125 +2024-08-26 15:54:41,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=66432.0, ans=0.125 +2024-08-26 15:54:42,306 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.44 vs. limit=15.0 +2024-08-26 15:54:47,731 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.05 vs. limit=15.0 +2024-08-26 15:54:50,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=66485.33333333333, ans=0.2 +2024-08-26 15:54:55,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=66538.66666666667, ans=0.1 +2024-08-26 15:55:02,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=66538.66666666667, ans=0.0 +2024-08-26 15:55:06,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=66592.0, ans=0.125 +2024-08-26 15:55:13,609 INFO [train.py:1114] (3/4) Epoch 6, batch 50, loss[loss=0.2231, simple_loss=0.2761, pruned_loss=0.0614, ctc_loss=0.1182, over 19697.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.3135, pruned_loss=0.07914, ctc_loss=0.1499, over 844717.56 frames. ], batch size: 47, lr: 2.44e-02, grad_scale: 32.0 +2024-08-26 15:55:13,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=66645.33333333333, ans=0.125 +2024-08-26 15:55:30,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=66698.66666666667, ans=0.125 +2024-08-26 15:55:38,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66752.0, ans=0.1 +2024-08-26 15:55:39,181 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.306e+02 1.607e+02 1.759e+02 1.997e+02 3.496e+02, threshold=3.518e+02, percent-clipped=0.0 +2024-08-26 15:55:42,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=66805.33333333333, ans=0.125 +2024-08-26 15:55:55,201 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.10 vs. limit=12.0 +2024-08-26 15:56:03,203 INFO [train.py:1114] (3/4) Epoch 6, batch 100, loss[loss=0.2196, simple_loss=0.282, pruned_loss=0.05792, ctc_loss=0.1035, over 19740.00 frames. ], tot_loss[loss=0.2672, simple_loss=0.3157, pruned_loss=0.07953, ctc_loss=0.1491, over 1499352.90 frames. ], batch size: 51, lr: 2.44e-02, grad_scale: 32.0 +2024-08-26 15:56:38,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=67072.0, ans=0.0 +2024-08-26 15:56:42,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=67072.0, ans=0.125 +2024-08-26 15:56:57,271 INFO [train.py:1114] (3/4) Epoch 6, batch 150, loss[loss=0.2072, simple_loss=0.2648, pruned_loss=0.05398, ctc_loss=0.1041, over 19692.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3127, pruned_loss=0.07797, ctc_loss=0.1457, over 2028735.89 frames. ], batch size: 47, lr: 2.44e-02, grad_scale: 32.0 +2024-08-26 15:56:58,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67178.66666666667, ans=0.1 +2024-08-26 15:57:09,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.10 vs. limit=15.0 +2024-08-26 15:57:22,729 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.333e+02 1.584e+02 1.709e+02 1.986e+02 2.973e+02, threshold=3.418e+02, percent-clipped=0.0 +2024-08-26 15:57:27,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=67338.66666666667, ans=0.125 +2024-08-26 15:57:44,460 INFO [train.py:1114] (3/4) Epoch 6, batch 200, loss[loss=0.2771, simple_loss=0.328, pruned_loss=0.08178, ctc_loss=0.1566, over 18149.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3104, pruned_loss=0.07693, ctc_loss=0.1437, over 2435769.68 frames. ], batch size: 85, lr: 2.43e-02, grad_scale: 32.0 +2024-08-26 15:57:51,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=67445.33333333333, ans=0.0 +2024-08-26 15:57:56,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.37 vs. limit=15.0 +2024-08-26 15:58:03,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=67552.0, ans=0.1 +2024-08-26 15:58:04,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=67552.0, ans=0.125 +2024-08-26 15:58:09,905 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.59 vs. limit=15.0 +2024-08-26 15:58:10,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=67552.0, ans=0.125 +2024-08-26 15:58:11,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=67552.0, ans=0.125 +2024-08-26 15:58:16,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=67605.33333333333, ans=0.125 +2024-08-26 15:58:25,168 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.67 vs. limit=15.0 +2024-08-26 15:58:27,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=67658.66666666667, ans=0.125 +2024-08-26 15:58:29,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=67658.66666666667, ans=10.0 +2024-08-26 15:58:36,020 INFO [train.py:1114] (3/4) Epoch 6, batch 250, loss[loss=0.2568, simple_loss=0.3118, pruned_loss=0.07341, ctc_loss=0.1375, over 19379.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3094, pruned_loss=0.07596, ctc_loss=0.1422, over 2756426.68 frames. ], batch size: 67, lr: 2.43e-02, grad_scale: 32.0 +2024-08-26 15:58:39,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=67712.0, ans=0.125 +2024-08-26 15:59:06,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=67818.66666666667, ans=0.07 +2024-08-26 15:59:10,420 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.559e+02 1.703e+02 1.915e+02 3.590e+02, threshold=3.407e+02, percent-clipped=1.0 +2024-08-26 15:59:20,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=67872.0, ans=0.025 +2024-08-26 15:59:28,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=67925.33333333333, ans=0.0 +2024-08-26 15:59:33,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=67925.33333333333, ans=0.2 +2024-08-26 15:59:35,265 INFO [train.py:1114] (3/4) Epoch 6, batch 300, loss[loss=0.2781, simple_loss=0.3314, pruned_loss=0.08137, ctc_loss=0.155, over 19516.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3079, pruned_loss=0.07539, ctc_loss=0.141, over 3000834.48 frames. ], batch size: 61, lr: 2.43e-02, grad_scale: 32.0 +2024-08-26 15:59:43,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=67978.66666666667, ans=0.125 +2024-08-26 15:59:48,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=68032.0, ans=0.125 +2024-08-26 16:00:02,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=68085.33333333333, ans=0.0 +2024-08-26 16:00:07,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=68138.66666666667, ans=0.0 +2024-08-26 16:00:20,006 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-08-26 16:00:21,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=68192.0, ans=0.125 +2024-08-26 16:00:24,093 INFO [train.py:1114] (3/4) Epoch 6, batch 350, loss[loss=0.2203, simple_loss=0.2776, pruned_loss=0.05933, ctc_loss=0.1109, over 19773.00 frames. ], tot_loss[loss=0.257, simple_loss=0.3077, pruned_loss=0.07512, ctc_loss=0.1403, over 3190322.04 frames. ], batch size: 48, lr: 2.42e-02, grad_scale: 32.0 +2024-08-26 16:00:34,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=68298.66666666667, ans=0.0 +2024-08-26 16:00:46,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=68352.0, ans=0.2 +2024-08-26 16:00:46,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=68352.0, ans=0.0 +2024-08-26 16:00:49,645 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.625e+02 1.872e+02 2.224e+02 3.924e+02, threshold=3.744e+02, percent-clipped=2.0 +2024-08-26 16:00:55,897 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.90 vs. limit=10.0 +2024-08-26 16:00:59,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=68405.33333333333, ans=0.05 +2024-08-26 16:01:04,430 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.02 vs. limit=15.0 +2024-08-26 16:01:06,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=68458.66666666667, ans=0.125 +2024-08-26 16:01:11,316 INFO [train.py:1114] (3/4) Epoch 6, batch 400, loss[loss=0.2531, simple_loss=0.3097, pruned_loss=0.07152, ctc_loss=0.1336, over 19500.00 frames. ], tot_loss[loss=0.256, simple_loss=0.3068, pruned_loss=0.07465, ctc_loss=0.1396, over 3341848.39 frames. ], batch size: 54, lr: 2.42e-02, grad_scale: 32.0 +2024-08-26 16:01:31,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=68618.66666666667, ans=0.0 +2024-08-26 16:01:42,688 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:02:07,104 INFO [train.py:1114] (3/4) Epoch 6, batch 450, loss[loss=0.234, simple_loss=0.3014, pruned_loss=0.06028, ctc_loss=0.1151, over 19615.00 frames. ], tot_loss[loss=0.2564, simple_loss=0.3072, pruned_loss=0.07486, ctc_loss=0.1398, over 3450445.40 frames. ], batch size: 55, lr: 2.42e-02, grad_scale: 32.0 +2024-08-26 16:02:09,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=68778.66666666667, ans=0.0 +2024-08-26 16:02:34,040 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.302e+02 1.611e+02 1.799e+02 2.140e+02 4.925e+02, threshold=3.597e+02, percent-clipped=1.0 +2024-08-26 16:02:40,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=68938.66666666667, ans=0.125 +2024-08-26 16:02:52,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=68992.0, ans=0.5 +2024-08-26 16:02:55,682 INFO [train.py:1114] (3/4) Epoch 6, batch 500, loss[loss=0.2561, simple_loss=0.3155, pruned_loss=0.0718, ctc_loss=0.1328, over 19677.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3059, pruned_loss=0.07412, ctc_loss=0.1388, over 3545248.39 frames. ], batch size: 63, lr: 2.41e-02, grad_scale: 32.0 +2024-08-26 16:03:00,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=69045.33333333333, ans=0.07 +2024-08-26 16:03:27,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=69205.33333333333, ans=0.0 +2024-08-26 16:03:29,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=69205.33333333333, ans=0.0 +2024-08-26 16:03:43,070 INFO [train.py:1114] (3/4) Epoch 6, batch 550, loss[loss=0.2879, simple_loss=0.3272, pruned_loss=0.09026, ctc_loss=0.1701, over 19338.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3065, pruned_loss=0.07443, ctc_loss=0.1394, over 3607603.70 frames. ], batch size: 71, lr: 2.41e-02, grad_scale: 32.0 +2024-08-26 16:03:49,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=69312.0, ans=0.95 +2024-08-26 16:04:07,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=69418.66666666667, ans=0.125 +2024-08-26 16:04:08,885 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.381e+02 1.633e+02 1.875e+02 2.080e+02 6.681e+02, threshold=3.749e+02, percent-clipped=3.0 +2024-08-26 16:04:16,694 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.67 vs. limit=15.0 +2024-08-26 16:04:25,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=69525.33333333333, ans=0.2 +2024-08-26 16:04:27,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=69525.33333333333, ans=0.0 +2024-08-26 16:04:30,175 INFO [train.py:1114] (3/4) Epoch 6, batch 600, loss[loss=0.2738, simple_loss=0.3263, pruned_loss=0.08166, ctc_loss=0.1449, over 19379.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3066, pruned_loss=0.0744, ctc_loss=0.1392, over 3666523.33 frames. ], batch size: 67, lr: 2.41e-02, grad_scale: 32.0 +2024-08-26 16:04:38,398 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.08 vs. limit=15.0 +2024-08-26 16:04:44,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=69632.0, ans=0.125 +2024-08-26 16:04:59,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=69685.33333333333, ans=0.125 +2024-08-26 16:05:00,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69685.33333333333, ans=0.1 +2024-08-26 16:05:18,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=69792.0, ans=0.0 +2024-08-26 16:05:25,774 INFO [train.py:1114] (3/4) Epoch 6, batch 650, loss[loss=0.2362, simple_loss=0.2959, pruned_loss=0.06405, ctc_loss=0.1212, over 19764.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3057, pruned_loss=0.0738, ctc_loss=0.1382, over 3716957.91 frames. ], batch size: 54, lr: 2.40e-02, grad_scale: 32.0 +2024-08-26 16:05:40,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=69898.66666666667, ans=0.125 +2024-08-26 16:05:49,230 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.12 vs. limit=15.0 +2024-08-26 16:05:51,062 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.42 vs. limit=15.0 +2024-08-26 16:05:52,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=69952.0, ans=0.125 +2024-08-26 16:05:53,419 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.596e+02 1.734e+02 1.974e+02 3.978e+02, threshold=3.467e+02, percent-clipped=1.0 +2024-08-26 16:05:59,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70005.33333333333, ans=0.1 +2024-08-26 16:06:02,546 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.58 vs. limit=6.0 +2024-08-26 16:06:15,345 INFO [train.py:1114] (3/4) Epoch 6, batch 700, loss[loss=0.2216, simple_loss=0.2863, pruned_loss=0.05642, ctc_loss=0.1099, over 19726.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3064, pruned_loss=0.07416, ctc_loss=0.1387, over 3747934.63 frames. ], batch size: 51, lr: 2.40e-02, grad_scale: 16.0 +2024-08-26 16:06:21,394 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.38 vs. limit=15.0 +2024-08-26 16:06:26,556 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:06:27,682 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.69 vs. limit=15.0 +2024-08-26 16:06:38,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=70218.66666666667, ans=0.125 +2024-08-26 16:06:41,701 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-08-26 16:06:43,527 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.70 vs. limit=10.0 +2024-08-26 16:06:45,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70272.0, ans=0.1 +2024-08-26 16:06:45,254 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:06:52,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=70325.33333333333, ans=0.125 +2024-08-26 16:07:02,325 INFO [train.py:1114] (3/4) Epoch 6, batch 750, loss[loss=0.2348, simple_loss=0.2971, pruned_loss=0.06298, ctc_loss=0.1163, over 19486.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.3058, pruned_loss=0.07396, ctc_loss=0.1382, over 3774371.07 frames. ], batch size: 54, lr: 2.40e-02, grad_scale: 16.0 +2024-08-26 16:07:09,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=70378.66666666667, ans=0.2 +2024-08-26 16:07:33,077 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.662e+02 1.845e+02 2.236e+02 2.956e+02, threshold=3.689e+02, percent-clipped=0.0 +2024-08-26 16:07:54,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70538.66666666667, ans=0.1 +2024-08-26 16:08:25,931 INFO [train.py:1114] (3/4) Epoch 6, batch 800, loss[loss=0.2126, simple_loss=0.2711, pruned_loss=0.05614, ctc_loss=0.1043, over 19804.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3059, pruned_loss=0.07387, ctc_loss=0.1379, over 3796209.12 frames. ], batch size: 49, lr: 2.39e-02, grad_scale: 32.0 +2024-08-26 16:08:26,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.85 vs. limit=10.0 +2024-08-26 16:08:27,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=70645.33333333333, ans=0.125 +2024-08-26 16:08:30,163 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.48 vs. limit=15.0 +2024-08-26 16:08:34,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=70698.66666666667, ans=0.0 +2024-08-26 16:09:11,989 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.21 vs. limit=15.0 +2024-08-26 16:09:21,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=70805.33333333333, ans=0.0 +2024-08-26 16:09:22,507 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.62 vs. limit=22.5 +2024-08-26 16:09:32,246 INFO [train.py:1114] (3/4) Epoch 6, batch 850, loss[loss=0.2757, simple_loss=0.3258, pruned_loss=0.08148, ctc_loss=0.1565, over 19654.00 frames. ], tot_loss[loss=0.2531, simple_loss=0.3047, pruned_loss=0.07331, ctc_loss=0.1371, over 3815529.01 frames. ], batch size: 59, lr: 2.39e-02, grad_scale: 32.0 +2024-08-26 16:09:46,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70965.33333333333, ans=0.1 +2024-08-26 16:09:49,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=70965.33333333333, ans=0.0 +2024-08-26 16:09:58,806 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.315e+02 1.558e+02 1.696e+02 1.888e+02 5.151e+02, threshold=3.391e+02, percent-clipped=1.0 +2024-08-26 16:10:25,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=71125.33333333333, ans=0.0 +2024-08-26 16:10:32,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=71125.33333333333, ans=0.2 +2024-08-26 16:10:35,848 INFO [train.py:1114] (3/4) Epoch 6, batch 900, loss[loss=0.2271, simple_loss=0.2838, pruned_loss=0.06186, ctc_loss=0.1167, over 19424.00 frames. ], tot_loss[loss=0.254, simple_loss=0.3054, pruned_loss=0.07372, ctc_loss=0.1377, over 3818432.12 frames. ], batch size: 48, lr: 2.39e-02, grad_scale: 32.0 +2024-08-26 16:10:39,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=71178.66666666667, ans=0.2 +2024-08-26 16:10:42,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=71178.66666666667, ans=0.125 +2024-08-26 16:10:56,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=71285.33333333333, ans=0.0 +2024-08-26 16:11:04,776 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.12 vs. limit=15.0 +2024-08-26 16:11:08,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=71338.66666666667, ans=0.125 +2024-08-26 16:11:17,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=71392.0, ans=0.05 +2024-08-26 16:11:23,803 INFO [train.py:1114] (3/4) Epoch 6, batch 950, loss[loss=0.2381, simple_loss=0.2898, pruned_loss=0.06691, ctc_loss=0.1317, over 19512.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.3059, pruned_loss=0.07405, ctc_loss=0.1384, over 3820133.53 frames. ], batch size: 49, lr: 2.38e-02, grad_scale: 16.0 +2024-08-26 16:11:24,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=71445.33333333333, ans=0.0 +2024-08-26 16:11:32,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=71445.33333333333, ans=0.125 +2024-08-26 16:11:40,590 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.89 vs. limit=22.5 +2024-08-26 16:11:44,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=71498.66666666667, ans=0.0 +2024-08-26 16:11:47,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=71498.66666666667, ans=0.125 +2024-08-26 16:11:59,457 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.365e+02 1.602e+02 1.780e+02 2.099e+02 5.215e+02, threshold=3.559e+02, percent-clipped=4.0 +2024-08-26 16:12:16,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=71658.66666666667, ans=0.0 +2024-08-26 16:12:21,545 INFO [train.py:1114] (3/4) Epoch 6, batch 1000, loss[loss=0.2294, simple_loss=0.3006, pruned_loss=0.05791, ctc_loss=0.106, over 19863.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3062, pruned_loss=0.07422, ctc_loss=0.1386, over 3816607.31 frames. ], batch size: 52, lr: 2.38e-02, grad_scale: 16.0 +2024-08-26 16:12:21,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=71712.0, ans=0.0 +2024-08-26 16:12:26,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=71712.0, ans=0.125 +2024-08-26 16:12:27,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff3.min_abs, batch_count=71712.0, ans=0.2 +2024-08-26 16:12:33,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=71765.33333333333, ans=0.1 +2024-08-26 16:12:38,693 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.66 vs. limit=22.5 +2024-08-26 16:12:53,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=71818.66666666667, ans=0.2 +2024-08-26 16:12:54,582 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.93 vs. limit=15.0 +2024-08-26 16:13:02,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=71818.66666666667, ans=0.125 +2024-08-26 16:13:22,623 INFO [train.py:1114] (3/4) Epoch 6, batch 1050, loss[loss=0.245, simple_loss=0.3134, pruned_loss=0.06415, ctc_loss=0.1208, over 19840.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3054, pruned_loss=0.07382, ctc_loss=0.1381, over 3823402.83 frames. ], batch size: 57, lr: 2.37e-02, grad_scale: 16.0 +2024-08-26 16:13:24,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=71978.66666666667, ans=0.2 +2024-08-26 16:13:31,574 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.90 vs. limit=15.0 +2024-08-26 16:13:50,116 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.278e+02 1.587e+02 1.763e+02 2.081e+02 5.001e+02, threshold=3.526e+02, percent-clipped=1.0 +2024-08-26 16:13:51,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=72138.66666666667, ans=0.2 +2024-08-26 16:13:58,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=72138.66666666667, ans=0.125 +2024-08-26 16:14:02,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=72192.0, ans=0.125 +2024-08-26 16:14:10,560 INFO [train.py:1114] (3/4) Epoch 6, batch 1100, loss[loss=0.2695, simple_loss=0.311, pruned_loss=0.08424, ctc_loss=0.1486, over 19584.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.3058, pruned_loss=0.07402, ctc_loss=0.1383, over 3831345.65 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 16.0 +2024-08-26 16:14:12,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=72245.33333333333, ans=0.125 +2024-08-26 16:14:12,875 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.43 vs. limit=10.0 +2024-08-26 16:14:20,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=72298.66666666667, ans=0.07 +2024-08-26 16:14:20,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=72298.66666666667, ans=0.125 +2024-08-26 16:14:22,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=72298.66666666667, ans=0.5 +2024-08-26 16:14:38,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=72352.0, ans=0.125 +2024-08-26 16:14:46,349 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.28 vs. limit=12.0 +2024-08-26 16:15:16,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=72458.66666666667, ans=0.05 +2024-08-26 16:15:18,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=72458.66666666667, ans=0.125 +2024-08-26 16:15:25,922 INFO [train.py:1114] (3/4) Epoch 6, batch 1150, loss[loss=0.25, simple_loss=0.3046, pruned_loss=0.07099, ctc_loss=0.1337, over 19595.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3056, pruned_loss=0.07395, ctc_loss=0.1382, over 3830517.04 frames. ], batch size: 52, lr: 2.37e-02, grad_scale: 16.0 +2024-08-26 16:15:52,246 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.66 vs. limit=6.0 +2024-08-26 16:16:44,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=72618.66666666667, ans=0.07 +2024-08-26 16:16:51,599 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.628e+02 1.822e+02 2.077e+02 5.117e+02, threshold=3.645e+02, percent-clipped=2.0 +2024-08-26 16:16:53,035 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.38 vs. limit=15.0 +2024-08-26 16:16:59,779 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.00 vs. limit=15.0 +2024-08-26 16:17:05,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=72725.33333333333, ans=6.0 +2024-08-26 16:17:15,539 INFO [train.py:1114] (3/4) Epoch 6, batch 1200, loss[loss=0.2641, simple_loss=0.3234, pruned_loss=0.07443, ctc_loss=0.1398, over 19839.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3068, pruned_loss=0.07438, ctc_loss=0.1392, over 3825177.28 frames. ], batch size: 57, lr: 2.36e-02, grad_scale: 32.0 +2024-08-26 16:17:20,134 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.67 vs. limit=15.0 +2024-08-26 16:17:36,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=72885.33333333333, ans=0.125 +2024-08-26 16:17:52,020 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.99 vs. limit=22.5 +2024-08-26 16:17:52,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=72992.0, ans=0.125 +2024-08-26 16:17:55,845 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.06 vs. limit=15.0 +2024-08-26 16:18:04,580 INFO [train.py:1114] (3/4) Epoch 6, batch 1250, loss[loss=0.2652, simple_loss=0.3229, pruned_loss=0.07682, ctc_loss=0.1345, over 19508.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3065, pruned_loss=0.07389, ctc_loss=0.1382, over 3843224.54 frames. ], batch size: 61, lr: 2.36e-02, grad_scale: 32.0 +2024-08-26 16:18:09,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=73045.33333333333, ans=0.0 +2024-08-26 16:18:14,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=73098.66666666667, ans=0.125 +2024-08-26 16:18:19,275 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.36 vs. limit=15.0 +2024-08-26 16:18:31,859 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.253e+02 1.534e+02 1.709e+02 2.004e+02 3.682e+02, threshold=3.418e+02, percent-clipped=1.0 +2024-08-26 16:18:34,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=73205.33333333333, ans=0.025 +2024-08-26 16:18:42,776 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.72 vs. limit=12.0 +2024-08-26 16:18:45,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=73205.33333333333, ans=0.125 +2024-08-26 16:18:54,432 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.96 vs. limit=22.5 +2024-08-26 16:18:55,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=73258.66666666667, ans=0.125 +2024-08-26 16:18:59,491 INFO [train.py:1114] (3/4) Epoch 6, batch 1300, loss[loss=0.2533, simple_loss=0.3029, pruned_loss=0.07468, ctc_loss=0.1357, over 18852.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.3055, pruned_loss=0.0735, ctc_loss=0.1373, over 3846827.06 frames. ], batch size: 76, lr: 2.36e-02, grad_scale: 32.0 +2024-08-26 16:19:16,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=73312.0, ans=0.0 +2024-08-26 16:19:19,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=73312.0, ans=10.0 +2024-08-26 16:19:56,723 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.50 vs. limit=6.0 +2024-08-26 16:20:01,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=73418.66666666667, ans=0.0 +2024-08-26 16:20:08,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=73418.66666666667, ans=0.0 +2024-08-26 16:20:17,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=73472.0, ans=0.0 +2024-08-26 16:20:19,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=73472.0, ans=0.2 +2024-08-26 16:20:31,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=73578.66666666667, ans=0.0 +2024-08-26 16:20:32,242 INFO [train.py:1114] (3/4) Epoch 6, batch 1350, loss[loss=0.2343, simple_loss=0.2994, pruned_loss=0.06118, ctc_loss=0.1169, over 19779.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.3049, pruned_loss=0.07286, ctc_loss=0.1361, over 3858002.22 frames. ], batch size: 54, lr: 2.36e-02, grad_scale: 32.0 +2024-08-26 16:20:40,499 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.27 vs. limit=12.0 +2024-08-26 16:20:43,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=73632.0, ans=0.2 +2024-08-26 16:21:00,547 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.536e+02 1.657e+02 1.960e+02 3.055e+02, threshold=3.315e+02, percent-clipped=0.0 +2024-08-26 16:21:04,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=73738.66666666667, ans=0.0 +2024-08-26 16:21:14,442 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.06 vs. limit=15.0 +2024-08-26 16:21:16,340 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.96 vs. limit=6.0 +2024-08-26 16:21:20,649 INFO [train.py:1114] (3/4) Epoch 6, batch 1400, loss[loss=0.2171, simple_loss=0.2673, pruned_loss=0.06001, ctc_loss=0.117, over 19687.00 frames. ], tot_loss[loss=0.2527, simple_loss=0.305, pruned_loss=0.07294, ctc_loss=0.1363, over 3864324.59 frames. ], batch size: 46, lr: 2.35e-02, grad_scale: 32.0 +2024-08-26 16:21:27,802 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.10 vs. limit=15.0 +2024-08-26 16:21:35,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=73898.66666666667, ans=0.2 +2024-08-26 16:21:52,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=74005.33333333333, ans=0.2 +2024-08-26 16:22:03,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=74058.66666666667, ans=0.0 +2024-08-26 16:22:06,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=74058.66666666667, ans=0.04949747468305833 +2024-08-26 16:22:08,751 INFO [train.py:1114] (3/4) Epoch 6, batch 1450, loss[loss=0.2935, simple_loss=0.3408, pruned_loss=0.0908, ctc_loss=0.1614, over 19683.00 frames. ], tot_loss[loss=0.254, simple_loss=0.3059, pruned_loss=0.07351, ctc_loss=0.1375, over 3862527.25 frames. ], batch size: 63, lr: 2.35e-02, grad_scale: 16.0 +2024-08-26 16:22:17,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=74165.33333333333, ans=0.0 +2024-08-26 16:23:32,949 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.303e+02 1.612e+02 1.863e+02 2.093e+02 4.374e+02, threshold=3.727e+02, percent-clipped=2.0 +2024-08-26 16:23:41,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=74272.0, ans=0.0 +2024-08-26 16:23:50,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=74325.33333333333, ans=0.125 +2024-08-26 16:23:57,151 INFO [train.py:1114] (3/4) Epoch 6, batch 1500, loss[loss=0.2573, simple_loss=0.3168, pruned_loss=0.07138, ctc_loss=0.1374, over 19602.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.3058, pruned_loss=0.07341, ctc_loss=0.1372, over 3861644.56 frames. ], batch size: 57, lr: 2.35e-02, grad_scale: 16.0 +2024-08-26 16:23:57,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=74378.66666666667, ans=0.04949747468305833 +2024-08-26 16:24:46,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=74485.33333333333, ans=0.125 +2024-08-26 16:25:10,561 INFO [train.py:1114] (3/4) Epoch 6, batch 1550, loss[loss=0.2607, simple_loss=0.3107, pruned_loss=0.07607, ctc_loss=0.1465, over 19622.00 frames. ], tot_loss[loss=0.2538, simple_loss=0.3057, pruned_loss=0.07354, ctc_loss=0.1372, over 3846569.08 frames. ], batch size: 60, lr: 2.34e-02, grad_scale: 16.0 +2024-08-26 16:25:18,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=74645.33333333333, ans=0.0 +2024-08-26 16:25:19,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.74 vs. limit=15.0 +2024-08-26 16:25:26,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=74698.66666666667, ans=0.025 +2024-08-26 16:25:55,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=74752.0, ans=0.025 +2024-08-26 16:26:02,619 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.96 vs. limit=15.0 +2024-08-26 16:26:20,819 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.577e+02 1.696e+02 1.957e+02 2.811e+02, threshold=3.391e+02, percent-clipped=0.0 +2024-08-26 16:26:28,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=74805.33333333333, ans=0.125 +2024-08-26 16:26:37,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=74858.66666666667, ans=0.0 +2024-08-26 16:26:39,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=74912.0, ans=0.0 +2024-08-26 16:26:40,333 INFO [train.py:1114] (3/4) Epoch 6, batch 1600, loss[loss=0.2616, simple_loss=0.3174, pruned_loss=0.0754, ctc_loss=0.1373, over 19836.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3056, pruned_loss=0.07376, ctc_loss=0.1376, over 3836820.87 frames. ], batch size: 57, lr: 2.34e-02, grad_scale: 32.0 +2024-08-26 16:26:41,041 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.68 vs. limit=6.0 +2024-08-26 16:27:14,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=75018.66666666667, ans=0.125 +2024-08-26 16:27:34,957 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.63 vs. limit=22.5 +2024-08-26 16:27:35,232 INFO [train.py:1114] (3/4) Epoch 6, batch 1650, loss[loss=0.2669, simple_loss=0.3208, pruned_loss=0.07696, ctc_loss=0.1478, over 19675.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.306, pruned_loss=0.07394, ctc_loss=0.138, over 3833812.59 frames. ], batch size: 59, lr: 2.34e-02, grad_scale: 32.0 +2024-08-26 16:27:37,844 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.67 vs. limit=15.0 +2024-08-26 16:27:48,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75232.0, ans=0.1 +2024-08-26 16:28:43,071 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.279e+02 1.584e+02 1.799e+02 2.082e+02 3.549e+02, threshold=3.597e+02, percent-clipped=1.0 +2024-08-26 16:28:49,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=75338.66666666667, ans=0.0 +2024-08-26 16:29:10,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=75392.0, ans=0.125 +2024-08-26 16:29:13,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=75392.0, ans=0.1 +2024-08-26 16:29:31,064 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.72 vs. limit=15.0 +2024-08-26 16:29:34,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=75392.0, ans=0.0 +2024-08-26 16:29:36,219 INFO [train.py:1114] (3/4) Epoch 6, batch 1700, loss[loss=0.2292, simple_loss=0.2768, pruned_loss=0.06614, ctc_loss=0.1232, over 19682.00 frames. ], tot_loss[loss=0.2531, simple_loss=0.3048, pruned_loss=0.07333, ctc_loss=0.137, over 3848443.30 frames. ], batch size: 46, lr: 2.33e-02, grad_scale: 32.0 +2024-08-26 16:29:37,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=75445.33333333333, ans=0.2 +2024-08-26 16:29:41,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=75445.33333333333, ans=0.5 +2024-08-26 16:29:45,102 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.85 vs. limit=15.0 +2024-08-26 16:30:09,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=75605.33333333333, ans=0.125 +2024-08-26 16:30:24,062 INFO [train.py:1114] (3/4) Epoch 6, batch 1750, loss[loss=0.228, simple_loss=0.2742, pruned_loss=0.06634, ctc_loss=0.1228, over 19670.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3037, pruned_loss=0.0725, ctc_loss=0.1354, over 3852158.83 frames. ], batch size: 45, lr: 2.33e-02, grad_scale: 32.0 +2024-08-26 16:30:33,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=75765.33333333333, ans=0.125 +2024-08-26 16:30:38,520 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.74 vs. limit=15.0 +2024-08-26 16:30:48,314 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.15 vs. limit=15.0 +2024-08-26 16:31:04,526 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.524e+02 1.697e+02 1.959e+02 3.052e+02, threshold=3.394e+02, percent-clipped=0.0 +2024-08-26 16:31:16,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=75925.33333333333, ans=0.035 +2024-08-26 16:31:23,342 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 16:31:25,748 INFO [train.py:1114] (3/4) Epoch 6, batch 1800, loss[loss=0.2441, simple_loss=0.3098, pruned_loss=0.06429, ctc_loss=0.1247, over 19619.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3038, pruned_loss=0.07232, ctc_loss=0.1353, over 3853493.58 frames. ], batch size: 55, lr: 2.33e-02, grad_scale: 32.0 +2024-08-26 16:32:14,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=76085.33333333333, ans=0.125 +2024-08-26 16:32:17,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=76085.33333333333, ans=0.125 +2024-08-26 16:32:45,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=76138.66666666667, ans=0.125 +2024-08-26 16:33:01,909 INFO [train.py:1114] (3/4) Epoch 6, batch 1850, loss[loss=0.2667, simple_loss=0.3215, pruned_loss=0.07609, ctc_loss=0.1495, over 19592.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3036, pruned_loss=0.07204, ctc_loss=0.1348, over 3857006.66 frames. ], batch size: 57, lr: 2.32e-02, grad_scale: 32.0 +2024-08-26 16:33:25,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=76245.33333333333, ans=0.025 +2024-08-26 16:33:25,710 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.90 vs. limit=15.0 +2024-08-26 16:33:50,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=76298.66666666667, ans=0.2 +2024-08-26 16:34:04,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=76352.0, ans=0.1 +2024-08-26 16:34:05,736 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.545e+02 1.701e+02 1.893e+02 2.907e+02, threshold=3.402e+02, percent-clipped=0.0 +2024-08-26 16:34:23,396 INFO [train.py:1114] (3/4) Epoch 6, batch 1900, loss[loss=0.2452, simple_loss=0.3071, pruned_loss=0.06602, ctc_loss=0.1282, over 19647.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3039, pruned_loss=0.07198, ctc_loss=0.135, over 3861518.57 frames. ], batch size: 59, lr: 2.32e-02, grad_scale: 32.0 +2024-08-26 16:34:24,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76512.0, ans=0.1 +2024-08-26 16:34:26,536 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.15 vs. limit=15.0 +2024-08-26 16:34:31,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=76565.33333333333, ans=0.2 +2024-08-26 16:34:37,832 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.77 vs. limit=22.5 +2024-08-26 16:34:38,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=76565.33333333333, ans=0.125 +2024-08-26 16:34:39,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=76565.33333333333, ans=0.0 +2024-08-26 16:34:39,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=76565.33333333333, ans=0.0 +2024-08-26 16:34:39,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=76565.33333333333, ans=0.125 +2024-08-26 16:34:40,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=76618.66666666667, ans=0.2 +2024-08-26 16:35:14,210 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.34 vs. limit=15.0 +2024-08-26 16:35:16,565 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.08 vs. limit=15.0 +2024-08-26 16:35:27,777 INFO [train.py:1114] (3/4) Epoch 6, batch 1950, loss[loss=0.2399, simple_loss=0.2946, pruned_loss=0.06753, ctc_loss=0.1252, over 19569.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3044, pruned_loss=0.07177, ctc_loss=0.1347, over 3870912.95 frames. ], batch size: 52, lr: 2.32e-02, grad_scale: 32.0 +2024-08-26 16:35:35,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=76778.66666666667, ans=0.125 +2024-08-26 16:36:32,166 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.394e+02 1.646e+02 1.808e+02 2.059e+02 4.885e+02, threshold=3.617e+02, percent-clipped=2.0 +2024-08-26 16:36:53,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=77045.33333333333, ans=15.0 +2024-08-26 16:36:53,614 INFO [train.py:1114] (3/4) Epoch 6, batch 2000, loss[loss=0.2292, simple_loss=0.2795, pruned_loss=0.06497, ctc_loss=0.1225, over 19647.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3048, pruned_loss=0.07228, ctc_loss=0.1355, over 3855858.15 frames. ], batch size: 45, lr: 2.31e-02, grad_scale: 32.0 +2024-08-26 16:36:54,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=77045.33333333333, ans=0.125 +2024-08-26 16:37:04,141 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.19 vs. limit=10.0 +2024-08-26 16:37:07,587 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.30 vs. limit=15.0 +2024-08-26 16:37:10,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=77098.66666666667, ans=0.2 +2024-08-26 16:37:10,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77152.0, ans=0.1 +2024-08-26 16:37:15,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.93 vs. limit=15.0 +2024-08-26 16:37:33,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=77258.66666666667, ans=0.125 +2024-08-26 16:37:38,185 INFO [train.py:1114] (3/4) Epoch 6, batch 2050, loss[loss=0.226, simple_loss=0.2742, pruned_loss=0.06551, ctc_loss=0.1166, over 19719.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3035, pruned_loss=0.07164, ctc_loss=0.1346, over 3852479.07 frames. ], batch size: 47, lr: 2.31e-02, grad_scale: 32.0 +2024-08-26 16:37:41,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77312.0, ans=0.1 +2024-08-26 16:37:50,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=77365.33333333333, ans=0.125 +2024-08-26 16:37:58,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=77418.66666666667, ans=0.125 +2024-08-26 16:37:59,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=77418.66666666667, ans=0.125 +2024-08-26 16:38:02,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=77418.66666666667, ans=0.0 +2024-08-26 16:38:04,757 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.285e+02 1.566e+02 1.748e+02 2.075e+02 4.290e+02, threshold=3.497e+02, percent-clipped=1.0 +2024-08-26 16:38:09,604 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.36 vs. limit=15.0 +2024-08-26 16:38:34,152 INFO [train.py:1114] (3/4) Epoch 6, batch 2100, loss[loss=0.2468, simple_loss=0.3018, pruned_loss=0.0684, ctc_loss=0.1375, over 19757.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.3027, pruned_loss=0.07101, ctc_loss=0.1337, over 3859139.25 frames. ], batch size: 54, lr: 2.31e-02, grad_scale: 32.0 +2024-08-26 16:38:36,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77578.66666666667, ans=0.1 +2024-08-26 16:39:17,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=77632.0, ans=0.025 +2024-08-26 16:39:21,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=77685.33333333333, ans=0.125 +2024-08-26 16:39:23,113 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.54 vs. limit=15.0 +2024-08-26 16:39:23,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=77685.33333333333, ans=0.0 +2024-08-26 16:39:27,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff2.min_abs, batch_count=77738.66666666667, ans=0.1 +2024-08-26 16:39:45,771 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.27 vs. limit=15.0 +2024-08-26 16:39:46,034 INFO [train.py:1114] (3/4) Epoch 6, batch 2150, loss[loss=0.2314, simple_loss=0.2928, pruned_loss=0.0609, ctc_loss=0.1203, over 19868.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.302, pruned_loss=0.07097, ctc_loss=0.1334, over 3870860.64 frames. ], batch size: 52, lr: 2.30e-02, grad_scale: 32.0 +2024-08-26 16:39:58,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=77898.66666666667, ans=0.05 +2024-08-26 16:39:58,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_na.min_abs, batch_count=77898.66666666667, ans=0.02 +2024-08-26 16:40:13,767 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.258e+02 1.590e+02 1.744e+02 2.019e+02 3.989e+02, threshold=3.489e+02, percent-clipped=1.0 +2024-08-26 16:40:25,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=78058.66666666667, ans=0.07 +2024-08-26 16:40:26,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=78058.66666666667, ans=0.125 +2024-08-26 16:40:31,344 INFO [train.py:1114] (3/4) Epoch 6, batch 2200, loss[loss=0.273, simple_loss=0.3264, pruned_loss=0.08015, ctc_loss=0.148, over 19579.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.3024, pruned_loss=0.07103, ctc_loss=0.1334, over 3868103.31 frames. ], batch size: 57, lr: 2.30e-02, grad_scale: 32.0 +2024-08-26 16:40:37,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=78112.0, ans=0.125 +2024-08-26 16:40:46,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=78165.33333333333, ans=0.2 +2024-08-26 16:40:55,639 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-08-26 16:40:57,617 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.14 vs. limit=5.0 +2024-08-26 16:41:00,993 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.49 vs. limit=15.0 +2024-08-26 16:42:04,354 INFO [train.py:1114] (3/4) Epoch 6, batch 2250, loss[loss=0.2693, simple_loss=0.3234, pruned_loss=0.07789, ctc_loss=0.1487, over 19627.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.3025, pruned_loss=0.07117, ctc_loss=0.1335, over 3868286.74 frames. ], batch size: 55, lr: 2.30e-02, grad_scale: 32.0 +2024-08-26 16:42:30,464 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.366e+02 1.631e+02 1.850e+02 2.118e+02 4.912e+02, threshold=3.701e+02, percent-clipped=4.0 +2024-08-26 16:42:35,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=78538.66666666667, ans=0.125 +2024-08-26 16:42:36,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=78538.66666666667, ans=0.125 +2024-08-26 16:42:50,756 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.31 vs. limit=15.0 +2024-08-26 16:42:53,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78592.0, ans=0.1 +2024-08-26 16:42:57,044 INFO [train.py:1114] (3/4) Epoch 6, batch 2300, loss[loss=0.2072, simple_loss=0.2702, pruned_loss=0.05222, ctc_loss=0.09929, over 19502.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.3016, pruned_loss=0.07101, ctc_loss=0.133, over 3861804.80 frames. ], batch size: 49, lr: 2.29e-02, grad_scale: 32.0 +2024-08-26 16:43:05,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=78698.66666666667, ans=0.125 +2024-08-26 16:43:07,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78698.66666666667, ans=0.1 +2024-08-26 16:43:28,242 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.97 vs. limit=15.0 +2024-08-26 16:43:41,590 INFO [train.py:1114] (3/4) Epoch 6, batch 2350, loss[loss=0.2613, simple_loss=0.3227, pruned_loss=0.0742, ctc_loss=0.1286, over 19652.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.3019, pruned_loss=0.07127, ctc_loss=0.1333, over 3863942.86 frames. ], batch size: 63, lr: 2.29e-02, grad_scale: 32.0 +2024-08-26 16:43:47,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=78912.0, ans=0.125 +2024-08-26 16:44:09,680 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.319e+02 1.571e+02 1.792e+02 2.053e+02 3.529e+02, threshold=3.585e+02, percent-clipped=0.0 +2024-08-26 16:44:27,080 INFO [train.py:1114] (3/4) Epoch 6, batch 2400, loss[loss=0.259, simple_loss=0.3097, pruned_loss=0.07533, ctc_loss=0.1443, over 19333.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.304, pruned_loss=0.07211, ctc_loss=0.1346, over 3859078.99 frames. ], batch size: 71, lr: 2.29e-02, grad_scale: 32.0 +2024-08-26 16:44:29,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=79178.66666666667, ans=0.125 +2024-08-26 16:44:32,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=79178.66666666667, ans=0.0 +2024-08-26 16:44:39,382 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.82 vs. limit=6.0 +2024-08-26 16:44:41,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=79232.0, ans=0.1 +2024-08-26 16:44:43,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=79232.0, ans=0.125 +2024-08-26 16:44:44,609 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.81 vs. limit=10.0 +2024-08-26 16:44:49,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=79285.33333333333, ans=0.0 +2024-08-26 16:44:59,862 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.14 vs. limit=10.0 +2024-08-26 16:45:00,169 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.50 vs. limit=15.0 +2024-08-26 16:45:12,844 INFO [train.py:1114] (3/4) Epoch 6, batch 2450, loss[loss=0.3314, simple_loss=0.3462, pruned_loss=0.1152, ctc_loss=0.2154, over 13517.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3087, pruned_loss=0.07624, ctc_loss=0.1423, over 3733220.80 frames. ], batch size: 141, lr: 2.29e-02, grad_scale: 32.0 +2024-08-26 16:45:14,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=79445.33333333333, ans=0.025 +2024-08-26 16:45:17,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=79445.33333333333, ans=0.0 +2024-08-26 16:45:19,829 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.35 vs. limit=15.0 +2024-08-26 16:45:22,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=79498.66666666667, ans=0.1 +2024-08-26 16:45:29,618 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.62 vs. limit=10.0 +2024-08-26 16:45:40,100 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.433e+02 1.744e+02 1.902e+02 2.066e+02 3.652e+02, threshold=3.804e+02, percent-clipped=1.0 +2024-08-26 16:45:44,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=79605.33333333333, ans=0.0 +2024-08-26 16:48:16,406 INFO [train.py:1114] (3/4) Epoch 7, batch 0, loss[loss=0.2442, simple_loss=0.2993, pruned_loss=0.06887, ctc_loss=0.1283, over 19820.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.2993, pruned_loss=0.06887, ctc_loss=0.1283, over 19820.00 frames. ], batch size: 49, lr: 2.14e-02, grad_scale: 32.0 +2024-08-26 16:48:16,406 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-26 16:48:29,561 INFO [train.py:1146] (3/4) Epoch 7, validation: loss=0.2068, simple_loss=0.2958, pruned_loss=0.04327, ctc_loss=0.07811, over 944034.00 frames. +2024-08-26 16:48:29,562 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12783MB +2024-08-26 16:49:19,293 INFO [train.py:1114] (3/4) Epoch 7, batch 50, loss[loss=0.2276, simple_loss=0.2822, pruned_loss=0.06322, ctc_loss=0.1164, over 19712.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.3064, pruned_loss=0.07302, ctc_loss=0.1374, over 843690.50 frames. ], batch size: 47, lr: 2.14e-02, grad_scale: 32.0 +2024-08-26 16:49:52,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=80085.33333333333, ans=0.125 +2024-08-26 16:49:57,480 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.282e+02 1.584e+02 1.822e+02 2.089e+02 3.575e+02, threshold=3.645e+02, percent-clipped=0.0 +2024-08-26 16:50:05,600 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.14 vs. limit=12.0 +2024-08-26 16:50:07,044 INFO [train.py:1114] (3/4) Epoch 7, batch 100, loss[loss=0.216, simple_loss=0.2748, pruned_loss=0.05728, ctc_loss=0.1067, over 19716.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3058, pruned_loss=0.07172, ctc_loss=0.135, over 1498479.81 frames. ], batch size: 51, lr: 2.13e-02, grad_scale: 32.0 +2024-08-26 16:50:23,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80245.33333333333, ans=0.1 +2024-08-26 16:50:25,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=80245.33333333333, ans=0.2 +2024-08-26 16:50:49,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=80352.0, ans=0.2 +2024-08-26 16:50:54,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=80405.33333333333, ans=0.125 +2024-08-26 16:51:01,446 INFO [train.py:1114] (3/4) Epoch 7, batch 150, loss[loss=0.1984, simple_loss=0.2634, pruned_loss=0.04805, ctc_loss=0.09332, over 19754.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3031, pruned_loss=0.07071, ctc_loss=0.1327, over 2028113.79 frames. ], batch size: 47, lr: 2.13e-02, grad_scale: 32.0 +2024-08-26 16:51:15,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=80512.0, ans=0.125 +2024-08-26 16:51:28,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=80565.33333333333, ans=0.0 +2024-08-26 16:51:39,016 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.222e+02 1.525e+02 1.667e+02 1.863e+02 2.878e+02, threshold=3.334e+02, percent-clipped=0.0 +2024-08-26 16:51:48,559 INFO [train.py:1114] (3/4) Epoch 7, batch 200, loss[loss=0.2645, simple_loss=0.3185, pruned_loss=0.07585, ctc_loss=0.1473, over 18310.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3008, pruned_loss=0.06958, ctc_loss=0.1307, over 2434946.64 frames. ], batch size: 85, lr: 2.13e-02, grad_scale: 32.0 +2024-08-26 16:51:50,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=80725.33333333333, ans=0.125 +2024-08-26 16:51:57,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=80778.66666666667, ans=0.125 +2024-08-26 16:52:05,612 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.69 vs. limit=15.0 +2024-08-26 16:52:15,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=80885.33333333333, ans=10.0 +2024-08-26 16:52:30,080 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.88 vs. limit=15.0 +2024-08-26 16:52:35,141 INFO [train.py:1114] (3/4) Epoch 7, batch 250, loss[loss=0.2829, simple_loss=0.3291, pruned_loss=0.08649, ctc_loss=0.1595, over 19305.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3007, pruned_loss=0.06941, ctc_loss=0.13, over 2754437.13 frames. ], batch size: 67, lr: 2.13e-02, grad_scale: 32.0 +2024-08-26 16:53:08,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=81152.0, ans=0.125 +2024-08-26 16:53:11,573 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.01 vs. limit=22.5 +2024-08-26 16:53:13,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=81152.0, ans=0.125 +2024-08-26 16:53:15,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=81205.33333333333, ans=0.125 +2024-08-26 16:53:16,593 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.591e+02 1.729e+02 1.900e+02 5.825e+02, threshold=3.457e+02, percent-clipped=1.0 +2024-08-26 16:53:17,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=81205.33333333333, ans=0.125 +2024-08-26 16:53:25,916 INFO [train.py:1114] (3/4) Epoch 7, batch 300, loss[loss=0.2378, simple_loss=0.3034, pruned_loss=0.06376, ctc_loss=0.112, over 19536.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.2999, pruned_loss=0.06869, ctc_loss=0.1286, over 3000366.08 frames. ], batch size: 61, lr: 2.12e-02, grad_scale: 32.0 +2024-08-26 16:53:28,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=81258.66666666667, ans=0.2 +2024-08-26 16:53:45,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=81312.0, ans=0.125 +2024-08-26 16:53:55,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=81365.33333333333, ans=0.125 +2024-08-26 16:54:05,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=81418.66666666667, ans=0.0 +2024-08-26 16:54:05,748 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.12 vs. limit=10.0 +2024-08-26 16:54:13,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=81472.0, ans=0.125 +2024-08-26 16:54:18,315 INFO [train.py:1114] (3/4) Epoch 7, batch 350, loss[loss=0.2311, simple_loss=0.2809, pruned_loss=0.06512, ctc_loss=0.1274, over 19739.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3002, pruned_loss=0.06897, ctc_loss=0.1292, over 3189719.98 frames. ], batch size: 48, lr: 2.12e-02, grad_scale: 16.0 +2024-08-26 16:54:29,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=81578.66666666667, ans=0.0 +2024-08-26 16:54:56,442 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.574e+02 1.753e+02 2.022e+02 2.928e+02, threshold=3.506e+02, percent-clipped=0.0 +2024-08-26 16:54:59,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=81738.66666666667, ans=0.125 +2024-08-26 16:55:04,698 INFO [train.py:1114] (3/4) Epoch 7, batch 400, loss[loss=0.2422, simple_loss=0.3067, pruned_loss=0.06512, ctc_loss=0.1186, over 19482.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.2999, pruned_loss=0.0687, ctc_loss=0.1287, over 3341175.01 frames. ], batch size: 54, lr: 2.12e-02, grad_scale: 32.0 +2024-08-26 16:55:07,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=81792.0, ans=0.125 +2024-08-26 16:55:08,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=81792.0, ans=0.125 +2024-08-26 16:55:28,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=81898.66666666667, ans=0.0 +2024-08-26 16:55:37,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81952.0, ans=0.1 +2024-08-26 16:55:37,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=81952.0, ans=0.0 +2024-08-26 16:55:45,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=82005.33333333333, ans=0.125 +2024-08-26 16:55:49,427 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.01 vs. limit=15.0 +2024-08-26 16:55:51,737 INFO [train.py:1114] (3/4) Epoch 7, batch 450, loss[loss=0.2322, simple_loss=0.2984, pruned_loss=0.06011, ctc_loss=0.1143, over 19613.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3002, pruned_loss=0.06878, ctc_loss=0.1289, over 3449286.53 frames. ], batch size: 55, lr: 2.11e-02, grad_scale: 16.0 +2024-08-26 16:56:01,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=82058.66666666667, ans=0.1 +2024-08-26 16:56:08,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=82112.0, ans=0.1 +2024-08-26 16:56:15,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=82165.33333333333, ans=0.125 +2024-08-26 16:56:20,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=82165.33333333333, ans=0.07 +2024-08-26 16:56:38,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=82218.66666666667, ans=0.95 +2024-08-26 16:56:41,731 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.250e+02 1.485e+02 1.753e+02 2.038e+02 3.855e+02, threshold=3.505e+02, percent-clipped=1.0 +2024-08-26 16:56:43,212 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.59 vs. limit=10.0 +2024-08-26 16:56:47,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=82272.0, ans=0.0 +2024-08-26 16:56:49,044 INFO [train.py:1114] (3/4) Epoch 7, batch 500, loss[loss=0.2493, simple_loss=0.3122, pruned_loss=0.06773, ctc_loss=0.1274, over 19659.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.2986, pruned_loss=0.06803, ctc_loss=0.1276, over 3543684.98 frames. ], batch size: 63, lr: 2.11e-02, grad_scale: 16.0 +2024-08-26 16:56:49,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=82325.33333333333, ans=0.2 +2024-08-26 16:56:52,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=82325.33333333333, ans=0.125 +2024-08-26 16:56:53,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=82325.33333333333, ans=0.125 +2024-08-26 16:56:58,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=82378.66666666667, ans=0.05 +2024-08-26 16:57:03,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82378.66666666667, ans=0.1 +2024-08-26 16:57:06,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=82378.66666666667, ans=0.0 +2024-08-26 16:57:15,715 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.54 vs. limit=22.5 +2024-08-26 16:57:33,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=82538.66666666667, ans=0.125 +2024-08-26 16:57:35,788 INFO [train.py:1114] (3/4) Epoch 7, batch 550, loss[loss=0.2545, simple_loss=0.3072, pruned_loss=0.0734, ctc_loss=0.1375, over 19276.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.2989, pruned_loss=0.06828, ctc_loss=0.1281, over 3605939.25 frames. ], batch size: 71, lr: 2.11e-02, grad_scale: 16.0 +2024-08-26 16:57:40,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=82592.0, ans=0.125 +2024-08-26 16:57:41,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=82592.0, ans=0.125 +2024-08-26 16:57:53,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=82698.66666666667, ans=0.0 +2024-08-26 16:58:00,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=82698.66666666667, ans=0.07 +2024-08-26 16:58:16,877 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.530e+02 1.701e+02 1.927e+02 4.407e+02, threshold=3.402e+02, percent-clipped=1.0 +2024-08-26 16:58:30,180 INFO [train.py:1114] (3/4) Epoch 7, batch 600, loss[loss=0.2604, simple_loss=0.3201, pruned_loss=0.07261, ctc_loss=0.1387, over 19414.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.2984, pruned_loss=0.06793, ctc_loss=0.1277, over 3664664.55 frames. ], batch size: 67, lr: 2.11e-02, grad_scale: 16.0 +2024-08-26 16:58:58,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=82858.66666666667, ans=0.5 +2024-08-26 17:00:30,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=82858.66666666667, ans=0.125 +2024-08-26 17:00:31,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=82912.0, ans=0.125 +2024-08-26 17:00:41,387 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.96 vs. limit=15.0 +2024-08-26 17:00:47,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=82965.33333333333, ans=0.2 +2024-08-26 17:04:51,935 INFO [train.py:1114] (3/4) Epoch 7, batch 650, loss[loss=0.2271, simple_loss=0.2921, pruned_loss=0.0582, ctc_loss=0.1142, over 19781.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.2975, pruned_loss=0.06758, ctc_loss=0.1272, over 3715434.76 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 16.0 +2024-08-26 17:05:08,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=83178.66666666667, ans=0.125 +2024-08-26 17:05:12,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=83178.66666666667, ans=0.125 +2024-08-26 17:05:41,851 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.294e+02 1.502e+02 1.666e+02 1.880e+02 3.682e+02, threshold=3.331e+02, percent-clipped=2.0 +2024-08-26 17:06:18,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.56 vs. limit=10.0 +2024-08-26 17:06:20,362 INFO [train.py:1114] (3/4) Epoch 7, batch 700, loss[loss=0.248, simple_loss=0.3064, pruned_loss=0.06939, ctc_loss=0.1271, over 19722.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.2987, pruned_loss=0.06818, ctc_loss=0.128, over 3747583.31 frames. ], batch size: 51, lr: 2.10e-02, grad_scale: 16.0 +2024-08-26 17:06:32,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=83445.33333333333, ans=0.125 +2024-08-26 17:06:38,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=83445.33333333333, ans=0.125 +2024-08-26 17:06:41,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=83498.66666666667, ans=0.0 +2024-08-26 17:06:43,214 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=1.91 vs. limit=15.0 +2024-08-26 17:07:08,423 INFO [train.py:1114] (3/4) Epoch 7, batch 750, loss[loss=0.227, simple_loss=0.2898, pruned_loss=0.05969, ctc_loss=0.112, over 19486.00 frames. ], tot_loss[loss=0.242, simple_loss=0.298, pruned_loss=0.06763, ctc_loss=0.1269, over 3774145.79 frames. ], batch size: 54, lr: 2.10e-02, grad_scale: 16.0 +2024-08-26 17:07:17,539 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.06 vs. limit=22.5 +2024-08-26 17:07:20,990 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.61 vs. limit=15.0 +2024-08-26 17:07:22,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=83712.0, ans=0.2 +2024-08-26 17:07:44,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=83818.66666666667, ans=0.025 +2024-08-26 17:07:47,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=83872.0, ans=0.05 +2024-08-26 17:07:48,231 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.281e+02 1.533e+02 1.678e+02 1.875e+02 3.166e+02, threshold=3.356e+02, percent-clipped=0.0 +2024-08-26 17:07:53,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=83872.0, ans=0.0 +2024-08-26 17:07:58,356 INFO [train.py:1114] (3/4) Epoch 7, batch 800, loss[loss=0.2464, simple_loss=0.2948, pruned_loss=0.07183, ctc_loss=0.136, over 19402.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.2982, pruned_loss=0.06786, ctc_loss=0.1273, over 3794791.59 frames. ], batch size: 48, lr: 2.10e-02, grad_scale: 32.0 +2024-08-26 17:07:58,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.30 vs. limit=15.0 +2024-08-26 17:08:01,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=83925.33333333333, ans=0.125 +2024-08-26 17:08:05,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=83925.33333333333, ans=0.0 +2024-08-26 17:08:10,732 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.29 vs. limit=10.0 +2024-08-26 17:08:27,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=84032.0, ans=0.0 +2024-08-26 17:08:43,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=84085.33333333333, ans=0.125 +2024-08-26 17:08:48,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=84138.66666666667, ans=0.125 +2024-08-26 17:08:50,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=84138.66666666667, ans=0.025 +2024-08-26 17:08:50,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=84138.66666666667, ans=0.2 +2024-08-26 17:08:56,346 INFO [train.py:1114] (3/4) Epoch 7, batch 850, loss[loss=0.2573, simple_loss=0.3158, pruned_loss=0.07275, ctc_loss=0.1333, over 19645.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.2985, pruned_loss=0.06832, ctc_loss=0.128, over 3814345.83 frames. ], batch size: 59, lr: 2.09e-02, grad_scale: 32.0 +2024-08-26 17:08:57,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=84192.0, ans=0.125 +2024-08-26 17:09:51,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84352.0, ans=0.1 +2024-08-26 17:09:54,710 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.47 vs. limit=10.0 +2024-08-26 17:09:59,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=84405.33333333333, ans=0.125 +2024-08-26 17:10:00,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=84405.33333333333, ans=0.2 +2024-08-26 17:10:01,945 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.310e+02 1.545e+02 1.673e+02 1.909e+02 3.259e+02, threshold=3.346e+02, percent-clipped=0.0 +2024-08-26 17:10:02,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=84405.33333333333, ans=0.0 +2024-08-26 17:10:09,589 INFO [train.py:1114] (3/4) Epoch 7, batch 900, loss[loss=0.1955, simple_loss=0.2576, pruned_loss=0.04811, ctc_loss=0.09305, over 19424.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.2989, pruned_loss=0.0684, ctc_loss=0.1279, over 3817862.89 frames. ], batch size: 48, lr: 2.09e-02, grad_scale: 32.0 +2024-08-26 17:10:09,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=84458.66666666667, ans=0.125 +2024-08-26 17:10:12,638 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:10:19,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.97 vs. limit=22.5 +2024-08-26 17:10:22,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=84512.0, ans=0.0 +2024-08-26 17:10:40,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=84618.66666666667, ans=0.0 +2024-08-26 17:10:40,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=84618.66666666667, ans=0.125 +2024-08-26 17:10:51,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=84672.0, ans=0.2 +2024-08-26 17:10:58,446 INFO [train.py:1114] (3/4) Epoch 7, batch 950, loss[loss=0.2177, simple_loss=0.2787, pruned_loss=0.05678, ctc_loss=0.1081, over 19497.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.2994, pruned_loss=0.06879, ctc_loss=0.1283, over 3819933.60 frames. ], batch size: 49, lr: 2.09e-02, grad_scale: 32.0 +2024-08-26 17:11:02,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.03 vs. limit=15.0 +2024-08-26 17:11:10,158 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:11:18,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=84778.66666666667, ans=0.0 +2024-08-26 17:11:19,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=84832.0, ans=0.0 +2024-08-26 17:11:35,207 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.57 vs. limit=6.0 +2024-08-26 17:11:48,304 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.332e+02 1.566e+02 1.708e+02 1.976e+02 3.572e+02, threshold=3.415e+02, percent-clipped=1.0 +2024-08-26 17:12:18,435 INFO [train.py:1114] (3/4) Epoch 7, batch 1000, loss[loss=0.2185, simple_loss=0.281, pruned_loss=0.05643, ctc_loss=0.1081, over 19861.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.2999, pruned_loss=0.06895, ctc_loss=0.1289, over 3816057.85 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 32.0 +2024-08-26 17:12:31,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=85045.33333333333, ans=0.125 +2024-08-26 17:12:33,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=85045.33333333333, ans=0.125 +2024-08-26 17:13:32,469 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.22 vs. limit=22.5 +2024-08-26 17:13:46,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=85152.0, ans=0.2 +2024-08-26 17:13:58,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=85205.33333333333, ans=0.0 +2024-08-26 17:13:59,705 INFO [train.py:1114] (3/4) Epoch 7, batch 1050, loss[loss=0.2411, simple_loss=0.3051, pruned_loss=0.06452, ctc_loss=0.1204, over 19825.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.2986, pruned_loss=0.06836, ctc_loss=0.1276, over 3822662.85 frames. ], batch size: 57, lr: 2.08e-02, grad_scale: 32.0 +2024-08-26 17:14:06,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=85258.66666666667, ans=0.2 +2024-08-26 17:14:07,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=85258.66666666667, ans=0.125 +2024-08-26 17:14:39,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=85312.0, ans=0.0 +2024-08-26 17:16:25,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=85365.33333333333, ans=0.1 +2024-08-26 17:16:28,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=85418.66666666667, ans=0.125 +2024-08-26 17:16:40,677 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.203e+02 1.449e+02 1.584e+02 1.768e+02 2.861e+02, threshold=3.169e+02, percent-clipped=0.0 +2024-08-26 17:16:48,372 INFO [train.py:1114] (3/4) Epoch 7, batch 1100, loss[loss=0.2497, simple_loss=0.3044, pruned_loss=0.07036, ctc_loss=0.1359, over 19586.00 frames. ], tot_loss[loss=0.243, simple_loss=0.2986, pruned_loss=0.06823, ctc_loss=0.1274, over 3829769.58 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 32.0 +2024-08-26 17:16:49,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=85525.33333333333, ans=0.125 +2024-08-26 17:16:54,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=85525.33333333333, ans=0.2 +2024-08-26 17:16:57,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=85578.66666666667, ans=0.125 +2024-08-26 17:16:59,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=85578.66666666667, ans=0.125 +2024-08-26 17:17:00,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=85578.66666666667, ans=0.0 +2024-08-26 17:17:01,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=85578.66666666667, ans=0.125 +2024-08-26 17:17:02,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=85578.66666666667, ans=0.0 +2024-08-26 17:17:03,126 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.63 vs. limit=22.5 +2024-08-26 17:17:03,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=85578.66666666667, ans=0.0 +2024-08-26 17:17:04,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=85578.66666666667, ans=0.125 +2024-08-26 17:17:07,877 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.27 vs. limit=15.0 +2024-08-26 17:17:09,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=85632.0, ans=0.125 +2024-08-26 17:17:32,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=85738.66666666667, ans=0.07 +2024-08-26 17:17:45,026 INFO [train.py:1114] (3/4) Epoch 7, batch 1150, loss[loss=0.2168, simple_loss=0.2771, pruned_loss=0.05651, ctc_loss=0.1088, over 19586.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.298, pruned_loss=0.06797, ctc_loss=0.127, over 3829012.43 frames. ], batch size: 52, lr: 2.08e-02, grad_scale: 16.0 +2024-08-26 17:17:47,532 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.47 vs. limit=12.0 +2024-08-26 17:18:01,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=85845.33333333333, ans=0.125 +2024-08-26 17:18:17,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=85952.0, ans=0.125 +2024-08-26 17:18:19,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=85952.0, ans=0.125 +2024-08-26 17:18:22,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=85952.0, ans=0.0 +2024-08-26 17:18:37,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=85952.0, ans=0.2 +2024-08-26 17:18:38,985 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.49 vs. limit=15.0 +2024-08-26 17:18:39,758 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.00 vs. limit=15.0 +2024-08-26 17:18:41,957 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.243e+02 1.522e+02 1.667e+02 1.891e+02 3.736e+02, threshold=3.335e+02, percent-clipped=2.0 +2024-08-26 17:18:43,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=86005.33333333333, ans=0.1 +2024-08-26 17:18:48,635 INFO [train.py:1114] (3/4) Epoch 7, batch 1200, loss[loss=0.2473, simple_loss=0.3077, pruned_loss=0.06793, ctc_loss=0.1274, over 19842.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.2989, pruned_loss=0.06846, ctc_loss=0.1279, over 3824988.64 frames. ], batch size: 57, lr: 2.07e-02, grad_scale: 32.0 +2024-08-26 17:18:57,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=86112.0, ans=0.125 +2024-08-26 17:19:31,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86218.66666666667, ans=0.1 +2024-08-26 17:19:44,928 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.90 vs. limit=15.0 +2024-08-26 17:19:48,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=86272.0, ans=0.125 +2024-08-26 17:19:54,900 INFO [train.py:1114] (3/4) Epoch 7, batch 1250, loss[loss=0.2657, simple_loss=0.3208, pruned_loss=0.07735, ctc_loss=0.1399, over 19533.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.299, pruned_loss=0.06816, ctc_loss=0.1271, over 3843369.71 frames. ], batch size: 61, lr: 2.07e-02, grad_scale: 32.0 +2024-08-26 17:20:02,155 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.15 vs. limit=15.0 +2024-08-26 17:20:35,650 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.207e+02 1.476e+02 1.609e+02 1.857e+02 3.245e+02, threshold=3.218e+02, percent-clipped=0.0 +2024-08-26 17:20:41,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=86538.66666666667, ans=0.125 +2024-08-26 17:20:44,737 INFO [train.py:1114] (3/4) Epoch 7, batch 1300, loss[loss=0.2486, simple_loss=0.2994, pruned_loss=0.07143, ctc_loss=0.1373, over 18798.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.2982, pruned_loss=0.06797, ctc_loss=0.1268, over 3845458.99 frames. ], batch size: 76, lr: 2.07e-02, grad_scale: 32.0 +2024-08-26 17:20:48,948 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-08-26 17:20:54,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86645.33333333333, ans=0.1 +2024-08-26 17:20:56,425 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:20:58,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=86645.33333333333, ans=0.125 +2024-08-26 17:21:10,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=86698.66666666667, ans=0.025 +2024-08-26 17:21:16,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=86752.0, ans=0.0 +2024-08-26 17:21:38,925 INFO [train.py:1114] (3/4) Epoch 7, batch 1350, loss[loss=0.2536, simple_loss=0.3057, pruned_loss=0.07228, ctc_loss=0.1424, over 19761.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.2975, pruned_loss=0.06737, ctc_loss=0.1257, over 3857306.00 frames. ], batch size: 54, lr: 2.07e-02, grad_scale: 32.0 +2024-08-26 17:21:46,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=86858.66666666667, ans=0.125 +2024-08-26 17:21:46,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=86858.66666666667, ans=0.125 +2024-08-26 17:21:48,701 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.21 vs. limit=22.5 +2024-08-26 17:22:11,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=87018.66666666667, ans=0.125 +2024-08-26 17:22:11,595 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.54 vs. limit=10.0 +2024-08-26 17:22:19,555 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.495e+02 1.726e+02 1.992e+02 3.104e+02, threshold=3.452e+02, percent-clipped=0.0 +2024-08-26 17:22:21,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=87072.0, ans=0.0 +2024-08-26 17:22:22,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=87072.0, ans=10.0 +2024-08-26 17:22:26,105 INFO [train.py:1114] (3/4) Epoch 7, batch 1400, loss[loss=0.2172, simple_loss=0.2706, pruned_loss=0.0601, ctc_loss=0.109, over 19672.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.2973, pruned_loss=0.06745, ctc_loss=0.126, over 3863716.77 frames. ], batch size: 46, lr: 2.06e-02, grad_scale: 32.0 +2024-08-26 17:22:30,478 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.60 vs. limit=10.0 +2024-08-26 17:22:31,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=87125.33333333333, ans=0.0 +2024-08-26 17:22:44,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=87232.0, ans=0.125 +2024-08-26 17:22:59,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=87232.0, ans=0.125 +2024-08-26 17:23:28,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=87338.66666666667, ans=0.125 +2024-08-26 17:23:35,694 INFO [train.py:1114] (3/4) Epoch 7, batch 1450, loss[loss=0.2403, simple_loss=0.3095, pruned_loss=0.06194, ctc_loss=0.118, over 19679.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2976, pruned_loss=0.06743, ctc_loss=0.126, over 3861825.90 frames. ], batch size: 63, lr: 2.06e-02, grad_scale: 32.0 +2024-08-26 17:23:48,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=87445.33333333333, ans=0.125 +2024-08-26 17:23:49,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=87445.33333333333, ans=0.1 +2024-08-26 17:23:57,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=87498.66666666667, ans=0.125 +2024-08-26 17:24:10,775 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.70 vs. limit=15.0 +2024-08-26 17:24:18,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=87605.33333333333, ans=0.125 +2024-08-26 17:24:20,640 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.279e+02 1.540e+02 1.669e+02 1.894e+02 3.453e+02, threshold=3.338e+02, percent-clipped=1.0 +2024-08-26 17:24:29,703 INFO [train.py:1114] (3/4) Epoch 7, batch 1500, loss[loss=0.2434, simple_loss=0.3046, pruned_loss=0.06535, ctc_loss=0.1288, over 19590.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.2982, pruned_loss=0.06775, ctc_loss=0.1265, over 3860974.96 frames. ], batch size: 57, lr: 2.06e-02, grad_scale: 32.0 +2024-08-26 17:24:34,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=87658.66666666667, ans=0.125 +2024-08-26 17:24:40,396 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.11 vs. limit=15.0 +2024-08-26 17:24:43,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=87712.0, ans=0.0 +2024-08-26 17:24:53,338 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:24:55,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=87765.33333333333, ans=0.1 +2024-08-26 17:25:03,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=87818.66666666667, ans=0.125 +2024-08-26 17:25:11,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=87872.0, ans=0.05 +2024-08-26 17:25:19,515 INFO [train.py:1114] (3/4) Epoch 7, batch 1550, loss[loss=0.2898, simple_loss=0.3347, pruned_loss=0.08873, ctc_loss=0.1685, over 19590.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.2984, pruned_loss=0.06799, ctc_loss=0.127, over 3846531.22 frames. ], batch size: 60, lr: 2.06e-02, grad_scale: 32.0 +2024-08-26 17:25:24,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=87925.33333333333, ans=0.1 +2024-08-26 17:25:31,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=87978.66666666667, ans=0.125 +2024-08-26 17:25:31,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=87978.66666666667, ans=0.1 +2024-08-26 17:25:45,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=88032.0, ans=0.125 +2024-08-26 17:25:46,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=88032.0, ans=0.125 +2024-08-26 17:26:04,265 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.330e+02 1.559e+02 1.788e+02 2.182e+02 5.116e+02, threshold=3.576e+02, percent-clipped=3.0 +2024-08-26 17:26:10,936 INFO [train.py:1114] (3/4) Epoch 7, batch 1600, loss[loss=0.2422, simple_loss=0.302, pruned_loss=0.06649, ctc_loss=0.1236, over 19851.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.2988, pruned_loss=0.06845, ctc_loss=0.1275, over 3835162.06 frames. ], batch size: 57, lr: 2.05e-02, grad_scale: 32.0 +2024-08-26 17:26:23,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=88245.33333333333, ans=0.0 +2024-08-26 17:26:23,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=88245.33333333333, ans=0.0 +2024-08-26 17:26:29,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=88298.66666666667, ans=0.0 +2024-08-26 17:26:32,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=88298.66666666667, ans=0.2 +2024-08-26 17:27:01,933 INFO [train.py:1114] (3/4) Epoch 7, batch 1650, loss[loss=0.2366, simple_loss=0.3007, pruned_loss=0.06257, ctc_loss=0.1185, over 19636.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.2983, pruned_loss=0.06821, ctc_loss=0.1274, over 3832284.41 frames. ], batch size: 59, lr: 2.05e-02, grad_scale: 32.0 +2024-08-26 17:27:07,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=88458.66666666667, ans=0.125 +2024-08-26 17:28:04,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=88512.0, ans=0.0 +2024-08-26 17:28:04,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=88512.0, ans=0.125 +2024-08-26 17:28:46,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=88618.66666666667, ans=0.0 +2024-08-26 17:28:58,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=88618.66666666667, ans=0.125 +2024-08-26 17:29:25,566 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.242e+02 1.503e+02 1.653e+02 1.809e+02 2.992e+02, threshold=3.307e+02, percent-clipped=0.0 +2024-08-26 17:29:26,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=88672.0, ans=0.125 +2024-08-26 17:29:40,037 INFO [train.py:1114] (3/4) Epoch 7, batch 1700, loss[loss=0.1992, simple_loss=0.2493, pruned_loss=0.05462, ctc_loss=0.09972, over 19685.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.2977, pruned_loss=0.06762, ctc_loss=0.1265, over 3847011.65 frames. ], batch size: 46, lr: 2.05e-02, grad_scale: 32.0 +2024-08-26 17:30:09,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=88778.66666666667, ans=0.0 +2024-08-26 17:30:20,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=88832.0, ans=0.125 +2024-08-26 17:30:26,222 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.55 vs. limit=22.5 +2024-08-26 17:30:34,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=88938.66666666667, ans=0.125 +2024-08-26 17:30:44,524 INFO [train.py:1114] (3/4) Epoch 7, batch 1750, loss[loss=0.2134, simple_loss=0.2701, pruned_loss=0.05561, ctc_loss=0.1135, over 19671.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.2967, pruned_loss=0.06693, ctc_loss=0.1251, over 3852333.62 frames. ], batch size: 45, lr: 2.05e-02, grad_scale: 32.0 +2024-08-26 17:30:53,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=89045.33333333333, ans=0.0 +2024-08-26 17:31:23,272 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.487e+02 1.622e+02 1.808e+02 3.869e+02, threshold=3.245e+02, percent-clipped=1.0 +2024-08-26 17:31:29,433 INFO [train.py:1114] (3/4) Epoch 7, batch 1800, loss[loss=0.2272, simple_loss=0.2955, pruned_loss=0.05784, ctc_loss=0.1082, over 19612.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.2965, pruned_loss=0.06688, ctc_loss=0.1248, over 3854206.50 frames. ], batch size: 55, lr: 2.04e-02, grad_scale: 32.0 +2024-08-26 17:31:39,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=89312.0, ans=0.125 +2024-08-26 17:31:39,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=89312.0, ans=0.2 +2024-08-26 17:31:40,285 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:31:44,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=89312.0, ans=0.125 +2024-08-26 17:31:46,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=89365.33333333333, ans=0.125 +2024-08-26 17:31:52,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=89365.33333333333, ans=0.0 +2024-08-26 17:31:53,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=89365.33333333333, ans=0.09899494936611666 +2024-08-26 17:31:55,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=89418.66666666667, ans=0.0 +2024-08-26 17:32:08,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=89472.0, ans=0.0 +2024-08-26 17:32:14,097 INFO [train.py:1114] (3/4) Epoch 7, batch 1850, loss[loss=0.2532, simple_loss=0.3058, pruned_loss=0.07304, ctc_loss=0.1363, over 19581.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.296, pruned_loss=0.06641, ctc_loss=0.1239, over 3857402.30 frames. ], batch size: 57, lr: 2.04e-02, grad_scale: 32.0 +2024-08-26 17:32:18,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=89525.33333333333, ans=0.0 +2024-08-26 17:32:23,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=89578.66666666667, ans=0.025 +2024-08-26 17:32:35,415 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.98 vs. limit=15.0 +2024-08-26 17:32:40,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=89685.33333333333, ans=0.025 +2024-08-26 17:32:43,574 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.39 vs. limit=15.0 +2024-08-26 17:32:48,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=89685.33333333333, ans=0.2 +2024-08-26 17:32:55,554 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.278e+02 1.590e+02 1.759e+02 2.003e+02 3.443e+02, threshold=3.517e+02, percent-clipped=1.0 +2024-08-26 17:33:01,860 INFO [train.py:1114] (3/4) Epoch 7, batch 1900, loss[loss=0.2545, simple_loss=0.3191, pruned_loss=0.06915, ctc_loss=0.1292, over 19648.00 frames. ], tot_loss[loss=0.2396, simple_loss=0.2967, pruned_loss=0.06648, ctc_loss=0.1239, over 3861127.34 frames. ], batch size: 59, lr: 2.04e-02, grad_scale: 32.0 +2024-08-26 17:33:17,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=89845.33333333333, ans=0.0 +2024-08-26 17:35:05,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=89952.0, ans=0.125 +2024-08-26 17:35:19,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=90005.33333333333, ans=0.0 +2024-08-26 17:35:23,460 INFO [train.py:1114] (3/4) Epoch 7, batch 1950, loss[loss=0.244, simple_loss=0.3029, pruned_loss=0.06809, ctc_loss=0.1224, over 19592.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.2982, pruned_loss=0.06685, ctc_loss=0.1247, over 3870000.94 frames. ], batch size: 52, lr: 2.04e-02, grad_scale: 32.0 +2024-08-26 17:35:31,975 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.43 vs. limit=10.0 +2024-08-26 17:35:38,121 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.86 vs. limit=6.0 +2024-08-26 17:35:39,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=90112.0, ans=0.125 +2024-08-26 17:35:42,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=90165.33333333333, ans=0.125 +2024-08-26 17:35:50,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=90218.66666666667, ans=0.025 +2024-08-26 17:36:02,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=90272.0, ans=0.125 +2024-08-26 17:36:03,243 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.287e+02 1.531e+02 1.657e+02 1.854e+02 3.915e+02, threshold=3.314e+02, percent-clipped=1.0 +2024-08-26 17:36:09,467 INFO [train.py:1114] (3/4) Epoch 7, batch 2000, loss[loss=0.2002, simple_loss=0.2563, pruned_loss=0.0528, ctc_loss=0.09612, over 19647.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2984, pruned_loss=0.06714, ctc_loss=0.1254, over 3853995.48 frames. ], batch size: 45, lr: 2.03e-02, grad_scale: 32.0 +2024-08-26 17:36:10,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=90325.33333333333, ans=0.125 +2024-08-26 17:36:10,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=90325.33333333333, ans=0.0 +2024-08-26 17:36:11,834 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.60 vs. limit=15.0 +2024-08-26 17:36:18,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=90378.66666666667, ans=0.125 +2024-08-26 17:36:20,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=90378.66666666667, ans=0.125 +2024-08-26 17:36:29,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=90432.0, ans=0.025 +2024-08-26 17:36:30,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=90432.0, ans=0.125 +2024-08-26 17:36:40,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=90485.33333333333, ans=0.2 +2024-08-26 17:36:46,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=90538.66666666667, ans=0.0 +2024-08-26 17:36:53,978 INFO [train.py:1114] (3/4) Epoch 7, batch 2050, loss[loss=0.215, simple_loss=0.2696, pruned_loss=0.05933, ctc_loss=0.1045, over 19728.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.2972, pruned_loss=0.06672, ctc_loss=0.1246, over 3850036.56 frames. ], batch size: 47, lr: 2.03e-02, grad_scale: 32.0 +2024-08-26 17:36:56,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=90592.0, ans=0.125 +2024-08-26 17:37:05,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=90645.33333333333, ans=0.125 +2024-08-26 17:37:12,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=90698.66666666667, ans=0.0 +2024-08-26 17:37:18,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=90698.66666666667, ans=0.125 +2024-08-26 17:37:19,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=90752.0, ans=0.1 +2024-08-26 17:37:32,312 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.477e+02 1.642e+02 1.962e+02 4.346e+02, threshold=3.284e+02, percent-clipped=3.0 +2024-08-26 17:37:38,498 INFO [train.py:1114] (3/4) Epoch 7, batch 2100, loss[loss=0.2467, simple_loss=0.3035, pruned_loss=0.06995, ctc_loss=0.1249, over 19769.00 frames. ], tot_loss[loss=0.239, simple_loss=0.2965, pruned_loss=0.06606, ctc_loss=0.1235, over 3858319.64 frames. ], batch size: 54, lr: 2.03e-02, grad_scale: 32.0 +2024-08-26 17:37:41,544 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=90858.66666666667, ans=0.125 +2024-08-26 17:37:41,871 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.57 vs. limit=22.5 +2024-08-26 17:37:46,695 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.98 vs. limit=15.0 +2024-08-26 17:37:52,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=90912.0, ans=0.2 +2024-08-26 17:37:54,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=90912.0, ans=0.125 +2024-08-26 17:37:55,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=90912.0, ans=0.125 +2024-08-26 17:38:05,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=90965.33333333333, ans=0.125 +2024-08-26 17:38:13,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=91018.66666666667, ans=0.125 +2024-08-26 17:38:13,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=91018.66666666667, ans=0.2 +2024-08-26 17:38:16,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=91072.0, ans=0.125 +2024-08-26 17:38:26,451 INFO [train.py:1114] (3/4) Epoch 7, batch 2150, loss[loss=0.2356, simple_loss=0.2936, pruned_loss=0.06419, ctc_loss=0.1231, over 19860.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.2952, pruned_loss=0.06565, ctc_loss=0.1226, over 3869465.65 frames. ], batch size: 52, lr: 2.03e-02, grad_scale: 32.0 +2024-08-26 17:38:31,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=91125.33333333333, ans=0.125 +2024-08-26 17:38:35,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=91178.66666666667, ans=0.0 +2024-08-26 17:38:35,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=91178.66666666667, ans=0.2 +2024-08-26 17:38:35,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=91178.66666666667, ans=0.125 +2024-08-26 17:38:38,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=91178.66666666667, ans=0.1 +2024-08-26 17:38:46,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=91232.0, ans=0.07 +2024-08-26 17:38:50,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=91232.0, ans=0.125 +2024-08-26 17:38:56,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=91285.33333333333, ans=0.125 +2024-08-26 17:39:03,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=91338.66666666667, ans=0.125 +2024-08-26 17:39:04,131 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.485e+02 1.702e+02 1.931e+02 2.999e+02, threshold=3.403e+02, percent-clipped=0.0 +2024-08-26 17:39:10,380 INFO [train.py:1114] (3/4) Epoch 7, batch 2200, loss[loss=0.2355, simple_loss=0.299, pruned_loss=0.06229, ctc_loss=0.1182, over 19586.00 frames. ], tot_loss[loss=0.2383, simple_loss=0.2955, pruned_loss=0.0659, ctc_loss=0.1232, over 3868252.74 frames. ], batch size: 57, lr: 2.02e-02, grad_scale: 32.0 +2024-08-26 17:39:16,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=91392.0, ans=0.0 +2024-08-26 17:39:27,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=91498.66666666667, ans=0.125 +2024-08-26 17:39:30,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=91498.66666666667, ans=0.125 +2024-08-26 17:39:32,787 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.10 vs. limit=15.0 +2024-08-26 17:39:37,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=91552.0, ans=0.125 +2024-08-26 17:39:47,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=91605.33333333333, ans=0.0 +2024-08-26 17:39:54,470 INFO [train.py:1114] (3/4) Epoch 7, batch 2250, loss[loss=0.2253, simple_loss=0.2925, pruned_loss=0.0564, ctc_loss=0.1133, over 19598.00 frames. ], tot_loss[loss=0.2388, simple_loss=0.296, pruned_loss=0.06606, ctc_loss=0.1236, over 3868037.68 frames. ], batch size: 55, lr: 2.02e-02, grad_scale: 32.0 +2024-08-26 17:39:57,633 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.89 vs. limit=15.0 +2024-08-26 17:40:11,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=91765.33333333333, ans=0.125 +2024-08-26 17:40:31,163 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.53 vs. limit=15.0 +2024-08-26 17:40:32,444 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.538e+02 1.708e+02 1.997e+02 3.315e+02, threshold=3.416e+02, percent-clipped=0.0 +2024-08-26 17:40:38,601 INFO [train.py:1114] (3/4) Epoch 7, batch 2300, loss[loss=0.22, simple_loss=0.283, pruned_loss=0.05685, ctc_loss=0.1082, over 19499.00 frames. ], tot_loss[loss=0.239, simple_loss=0.2955, pruned_loss=0.06638, ctc_loss=0.1242, over 3860645.93 frames. ], batch size: 49, lr: 2.02e-02, grad_scale: 32.0 +2024-08-26 17:40:51,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=91978.66666666667, ans=0.2 +2024-08-26 17:40:56,639 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.87 vs. limit=22.5 +2024-08-26 17:40:58,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92032.0, ans=0.1 +2024-08-26 17:40:59,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=92032.0, ans=0.0 +2024-08-26 17:41:01,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=92032.0, ans=0.125 +2024-08-26 17:41:01,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=92032.0, ans=0.025 +2024-08-26 17:41:01,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=92032.0, ans=0.0 +2024-08-26 17:41:11,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=92085.33333333333, ans=0.125 +2024-08-26 17:41:15,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=92138.66666666667, ans=0.0 +2024-08-26 17:41:22,880 INFO [train.py:1114] (3/4) Epoch 7, batch 2350, loss[loss=0.243, simple_loss=0.2989, pruned_loss=0.06907, ctc_loss=0.1222, over 19666.00 frames. ], tot_loss[loss=0.239, simple_loss=0.2954, pruned_loss=0.06648, ctc_loss=0.1241, over 3862855.98 frames. ], batch size: 63, lr: 2.02e-02, grad_scale: 32.0 +2024-08-26 17:41:38,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=92245.33333333333, ans=0.125 +2024-08-26 17:42:01,686 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.248e+02 1.515e+02 1.664e+02 1.862e+02 3.479e+02, threshold=3.327e+02, percent-clipped=1.0 +2024-08-26 17:42:06,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=92458.66666666667, ans=0.125 +2024-08-26 17:42:06,885 INFO [train.py:1114] (3/4) Epoch 7, batch 2400, loss[loss=0.2453, simple_loss=0.3051, pruned_loss=0.06704, ctc_loss=0.1284, over 19292.00 frames. ], tot_loss[loss=0.242, simple_loss=0.2982, pruned_loss=0.06771, ctc_loss=0.126, over 3857788.50 frames. ], batch size: 71, lr: 2.01e-02, grad_scale: 32.0 +2024-08-26 17:42:10,186 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.70 vs. limit=15.0 +2024-08-26 17:42:13,662 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.22 vs. limit=15.0 +2024-08-26 17:42:34,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=92618.66666666667, ans=0.2 +2024-08-26 17:42:36,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=92618.66666666667, ans=0.025 +2024-08-26 17:42:38,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92618.66666666667, ans=0.1 +2024-08-26 17:42:47,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=92672.0, ans=0.2 +2024-08-26 17:42:56,036 INFO [train.py:1114] (3/4) Epoch 7, batch 2450, loss[loss=0.3454, simple_loss=0.3509, pruned_loss=0.1256, ctc_loss=0.2217, over 13682.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.303, pruned_loss=0.07168, ctc_loss=0.1336, over 3730784.06 frames. ], batch size: 140, lr: 2.01e-02, grad_scale: 16.0 +2024-08-26 17:43:11,163 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.96 vs. limit=15.0 +2024-08-26 17:43:15,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=92832.0, ans=0.0 +2024-08-26 17:43:17,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=92832.0, ans=0.125 +2024-08-26 17:43:29,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=92885.33333333333, ans=0.125 +2024-08-26 17:44:23,169 INFO [train.py:1114] (3/4) Epoch 8, batch 0, loss[loss=0.2244, simple_loss=0.2785, pruned_loss=0.06227, ctc_loss=0.1141, over 19816.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.2785, pruned_loss=0.06227, ctc_loss=0.1141, over 19816.00 frames. ], batch size: 49, lr: 1.89e-02, grad_scale: 32.0 +2024-08-26 17:44:23,170 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-26 17:44:40,887 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.0021, 4.6709, 4.5202, 4.5548], device='cuda:3') +2024-08-26 17:44:49,261 INFO [train.py:1146] (3/4) Epoch 8, validation: loss=0.2003, simple_loss=0.2903, pruned_loss=0.04062, ctc_loss=0.07268, over 944034.00 frames. +2024-08-26 17:44:49,261 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12874MB +2024-08-26 17:44:55,035 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.675e+02 1.918e+02 2.084e+02 4.365e+02, threshold=3.836e+02, percent-clipped=1.0 +2024-08-26 17:44:56,706 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.04 vs. limit=15.0 +2024-08-26 17:45:13,548 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.74 vs. limit=15.0 +2024-08-26 17:45:18,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=92986.66666666667, ans=0.125 +2024-08-26 17:45:19,475 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.10 vs. limit=8.0 +2024-08-26 17:45:38,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=93146.66666666667, ans=0.0 +2024-08-26 17:45:52,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93146.66666666667, ans=0.1 +2024-08-26 17:45:54,274 INFO [train.py:1114] (3/4) Epoch 8, batch 50, loss[loss=0.2187, simple_loss=0.2773, pruned_loss=0.05894, ctc_loss=0.1055, over 19730.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.2991, pruned_loss=0.06759, ctc_loss=0.1278, over 845075.57 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 32.0 +2024-08-26 17:45:58,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=93200.0, ans=0.125 +2024-08-26 17:46:01,214 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.63 vs. limit=6.0 +2024-08-26 17:46:23,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93360.0, ans=0.1 +2024-08-26 17:46:29,395 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.56 vs. limit=6.0 +2024-08-26 17:46:42,914 INFO [train.py:1114] (3/4) Epoch 8, batch 100, loss[loss=0.2299, simple_loss=0.2851, pruned_loss=0.06328, ctc_loss=0.1204, over 19721.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.2999, pruned_loss=0.06717, ctc_loss=0.1257, over 1500583.81 frames. ], batch size: 51, lr: 1.89e-02, grad_scale: 32.0 +2024-08-26 17:46:48,118 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.11 vs. limit=15.0 +2024-08-26 17:46:48,512 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.231e+02 1.574e+02 1.749e+02 2.053e+02 3.512e+02, threshold=3.498e+02, percent-clipped=0.0 +2024-08-26 17:47:15,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=93626.66666666667, ans=0.2 +2024-08-26 17:47:20,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=93626.66666666667, ans=0.1 +2024-08-26 17:47:32,171 INFO [train.py:1114] (3/4) Epoch 8, batch 150, loss[loss=0.211, simple_loss=0.2672, pruned_loss=0.05659, ctc_loss=0.1038, over 19701.00 frames. ], tot_loss[loss=0.238, simple_loss=0.2961, pruned_loss=0.0655, ctc_loss=0.1226, over 2027987.36 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 32.0 +2024-08-26 17:47:36,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=93733.33333333333, ans=0.125 +2024-08-26 17:47:54,974 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:48:20,173 INFO [train.py:1114] (3/4) Epoch 8, batch 200, loss[loss=0.2408, simple_loss=0.3103, pruned_loss=0.06222, ctc_loss=0.1169, over 18240.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.2944, pruned_loss=0.06478, ctc_loss=0.1212, over 2435470.75 frames. ], batch size: 85, lr: 1.88e-02, grad_scale: 32.0 +2024-08-26 17:48:25,559 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.434e+02 1.574e+02 1.787e+02 2.973e+02, threshold=3.148e+02, percent-clipped=0.0 +2024-08-26 17:48:36,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=94053.33333333333, ans=0.125 +2024-08-26 17:48:43,382 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.89 vs. limit=12.0 +2024-08-26 17:48:46,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=94106.66666666667, ans=0.125 +2024-08-26 17:48:52,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=94160.0, ans=0.125 +2024-08-26 17:48:56,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94160.0, ans=0.1 +2024-08-26 17:49:12,238 INFO [train.py:1114] (3/4) Epoch 8, batch 250, loss[loss=0.2474, simple_loss=0.3117, pruned_loss=0.06703, ctc_loss=0.1227, over 19306.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.2937, pruned_loss=0.06403, ctc_loss=0.12, over 2755765.24 frames. ], batch size: 67, lr: 1.88e-02, grad_scale: 32.0 +2024-08-26 17:49:13,783 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.56 vs. limit=15.0 +2024-08-26 17:49:22,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=94266.66666666667, ans=0.125 +2024-08-26 17:49:22,392 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.16 vs. limit=12.0 +2024-08-26 17:49:31,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=94320.0, ans=10.0 +2024-08-26 17:49:52,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=94426.66666666667, ans=0.125 +2024-08-26 17:49:59,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=94480.0, ans=0.125 +2024-08-26 17:50:03,382 INFO [train.py:1114] (3/4) Epoch 8, batch 300, loss[loss=0.2237, simple_loss=0.295, pruned_loss=0.05496, ctc_loss=0.1064, over 19546.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.2935, pruned_loss=0.06387, ctc_loss=0.1199, over 3000913.20 frames. ], batch size: 61, lr: 1.88e-02, grad_scale: 32.0 +2024-08-26 17:50:04,079 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.15 vs. limit=22.5 +2024-08-26 17:50:04,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=94533.33333333333, ans=0.0 +2024-08-26 17:50:09,015 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.05 vs. limit=10.0 +2024-08-26 17:50:09,198 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.273e+02 1.482e+02 1.652e+02 1.879e+02 4.693e+02, threshold=3.305e+02, percent-clipped=1.0 +2024-08-26 17:50:33,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=94693.33333333333, ans=0.125 +2024-08-26 17:50:34,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=94693.33333333333, ans=0.1 +2024-08-26 17:50:44,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=94746.66666666667, ans=0.0 +2024-08-26 17:50:50,296 INFO [train.py:1114] (3/4) Epoch 8, batch 350, loss[loss=0.2147, simple_loss=0.2659, pruned_loss=0.05891, ctc_loss=0.1142, over 19750.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.2936, pruned_loss=0.064, ctc_loss=0.1202, over 3189723.59 frames. ], batch size: 48, lr: 1.88e-02, grad_scale: 32.0 +2024-08-26 17:50:50,785 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.54 vs. limit=6.0 +2024-08-26 17:51:17,215 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.02 vs. limit=15.0 +2024-08-26 17:51:23,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94960.0, ans=0.1 +2024-08-26 17:52:03,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=95013.33333333333, ans=0.125 +2024-08-26 17:52:03,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=95013.33333333333, ans=0.025 +2024-08-26 17:52:19,164 INFO [train.py:1114] (3/4) Epoch 8, batch 400, loss[loss=0.2269, simple_loss=0.2939, pruned_loss=0.05759, ctc_loss=0.112, over 19466.00 frames. ], tot_loss[loss=0.234, simple_loss=0.293, pruned_loss=0.06362, ctc_loss=0.1194, over 3341207.10 frames. ], batch size: 54, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:52:21,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=95066.66666666667, ans=0.0 +2024-08-26 17:52:24,629 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.326e+02 1.574e+02 1.829e+02 2.059e+02 4.627e+02, threshold=3.659e+02, percent-clipped=2.0 +2024-08-26 17:52:27,076 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.36 vs. limit=15.0 +2024-08-26 17:52:39,461 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.19 vs. limit=6.0 +2024-08-26 17:52:42,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=95173.33333333333, ans=0.125 +2024-08-26 17:52:43,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=95173.33333333333, ans=15.0 +2024-08-26 17:53:08,495 INFO [train.py:1114] (3/4) Epoch 8, batch 450, loss[loss=0.2249, simple_loss=0.2971, pruned_loss=0.05475, ctc_loss=0.108, over 19616.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.293, pruned_loss=0.06353, ctc_loss=0.1194, over 3451064.08 frames. ], batch size: 55, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:53:18,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=95333.33333333333, ans=0.125 +2024-08-26 17:53:18,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=95333.33333333333, ans=0.125 +2024-08-26 17:53:30,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=95440.0, ans=0.2 +2024-08-26 17:53:41,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=95493.33333333333, ans=0.0 +2024-08-26 17:53:51,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=95546.66666666667, ans=0.125 +2024-08-26 17:53:52,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=95546.66666666667, ans=0.2 +2024-08-26 17:53:54,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.13 vs. limit=22.5 +2024-08-26 17:53:58,085 INFO [train.py:1114] (3/4) Epoch 8, batch 500, loss[loss=0.231, simple_loss=0.304, pruned_loss=0.05721, ctc_loss=0.109, over 19674.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.292, pruned_loss=0.06294, ctc_loss=0.1184, over 3546644.13 frames. ], batch size: 63, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:53:58,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=95600.0, ans=0.1 +2024-08-26 17:54:03,653 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.242e+02 1.468e+02 1.609e+02 1.778e+02 4.606e+02, threshold=3.218e+02, percent-clipped=1.0 +2024-08-26 17:54:28,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=95653.33333333333, ans=0.1 +2024-08-26 17:54:44,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=95760.0, ans=0.125 +2024-08-26 17:55:50,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=95813.33333333333, ans=0.1 +2024-08-26 17:56:20,603 INFO [train.py:1114] (3/4) Epoch 8, batch 550, loss[loss=0.2578, simple_loss=0.3102, pruned_loss=0.07377, ctc_loss=0.1446, over 19235.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.292, pruned_loss=0.06308, ctc_loss=0.1187, over 3609559.30 frames. ], batch size: 71, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:57:16,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=95866.66666666667, ans=0.0 +2024-08-26 17:57:16,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=95866.66666666667, ans=0.0 +2024-08-26 17:57:25,858 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 17:57:58,528 INFO [train.py:1114] (3/4) Epoch 8, batch 600, loss[loss=0.2208, simple_loss=0.2944, pruned_loss=0.05328, ctc_loss=0.1014, over 19404.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.2926, pruned_loss=0.06322, ctc_loss=0.1187, over 3666516.95 frames. ], batch size: 67, lr: 1.87e-02, grad_scale: 32.0 +2024-08-26 17:58:00,999 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.54 vs. limit=10.0 +2024-08-26 17:58:05,964 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.284e+02 1.508e+02 1.654e+02 1.896e+02 3.415e+02, threshold=3.309e+02, percent-clipped=1.0 +2024-08-26 17:58:23,374 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.96 vs. limit=15.0 +2024-08-26 17:58:23,478 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.86 vs. limit=15.0 +2024-08-26 17:58:49,409 INFO [train.py:1114] (3/4) Epoch 8, batch 650, loss[loss=0.2159, simple_loss=0.2887, pruned_loss=0.05179, ctc_loss=0.09892, over 19766.00 frames. ], tot_loss[loss=0.232, simple_loss=0.2915, pruned_loss=0.0627, ctc_loss=0.1177, over 3716400.18 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 32.0 +2024-08-26 17:59:00,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=96453.33333333333, ans=0.1 +2024-08-26 17:59:11,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=96506.66666666667, ans=0.125 +2024-08-26 17:59:15,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=96506.66666666667, ans=0.2 +2024-08-26 17:59:26,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=96613.33333333333, ans=0.125 +2024-08-26 17:59:36,097 INFO [train.py:1114] (3/4) Epoch 8, batch 700, loss[loss=0.2404, simple_loss=0.2898, pruned_loss=0.06946, ctc_loss=0.1302, over 19724.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.2916, pruned_loss=0.06261, ctc_loss=0.1176, over 3748173.97 frames. ], batch size: 51, lr: 1.86e-02, grad_scale: 32.0 +2024-08-26 17:59:36,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=96666.66666666667, ans=0.125 +2024-08-26 17:59:41,818 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.199e+02 1.481e+02 1.644e+02 1.817e+02 3.294e+02, threshold=3.287e+02, percent-clipped=0.0 +2024-08-26 17:59:49,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=96720.0, ans=0.125 +2024-08-26 17:59:55,588 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.17 vs. limit=15.0 +2024-08-26 18:00:27,683 INFO [train.py:1114] (3/4) Epoch 8, batch 750, loss[loss=0.2363, simple_loss=0.3013, pruned_loss=0.06192, ctc_loss=0.1189, over 19509.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.2914, pruned_loss=0.06266, ctc_loss=0.1176, over 3774076.83 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 32.0 +2024-08-26 18:00:38,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=96986.66666666667, ans=0.125 +2024-08-26 18:00:40,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=96986.66666666667, ans=0.125 +2024-08-26 18:01:00,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=97093.33333333333, ans=0.0 +2024-08-26 18:01:19,055 INFO [train.py:1114] (3/4) Epoch 8, batch 800, loss[loss=0.1946, simple_loss=0.2586, pruned_loss=0.04679, ctc_loss=0.09262, over 19806.00 frames. ], tot_loss[loss=0.232, simple_loss=0.2914, pruned_loss=0.06278, ctc_loss=0.1176, over 3795285.75 frames. ], batch size: 49, lr: 1.86e-02, grad_scale: 32.0 +2024-08-26 18:01:24,570 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.172e+02 1.524e+02 1.729e+02 2.039e+02 3.596e+02, threshold=3.457e+02, percent-clipped=1.0 +2024-08-26 18:01:28,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=97253.33333333333, ans=0.125 +2024-08-26 18:01:30,587 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.89 vs. limit=15.0 +2024-08-26 18:02:01,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=97413.33333333333, ans=0.125 +2024-08-26 18:02:03,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=97413.33333333333, ans=0.125 +2024-08-26 18:02:05,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=97466.66666666667, ans=0.125 +2024-08-26 18:02:06,309 INFO [train.py:1114] (3/4) Epoch 8, batch 850, loss[loss=0.2329, simple_loss=0.2986, pruned_loss=0.06116, ctc_loss=0.1125, over 19658.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.2911, pruned_loss=0.06265, ctc_loss=0.1175, over 3815373.82 frames. ], batch size: 59, lr: 1.85e-02, grad_scale: 32.0 +2024-08-26 18:02:15,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=97520.0, ans=0.125 +2024-08-26 18:02:15,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=97520.0, ans=0.0 +2024-08-26 18:02:24,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=97573.33333333333, ans=0.5 +2024-08-26 18:02:25,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=97573.33333333333, ans=0.2 +2024-08-26 18:02:31,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=97573.33333333333, ans=0.2 +2024-08-26 18:02:35,620 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.02 vs. limit=15.0 +2024-08-26 18:02:42,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=97626.66666666667, ans=0.125 +2024-08-26 18:02:44,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=97680.0, ans=0.125 +2024-08-26 18:02:49,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=97680.0, ans=0.125 +2024-08-26 18:02:57,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=97733.33333333333, ans=0.0 +2024-08-26 18:02:58,356 INFO [train.py:1114] (3/4) Epoch 8, batch 900, loss[loss=0.2053, simple_loss=0.2669, pruned_loss=0.05151, ctc_loss=0.1015, over 19419.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.2919, pruned_loss=0.06319, ctc_loss=0.1184, over 3817949.58 frames. ], batch size: 48, lr: 1.85e-02, grad_scale: 32.0 +2024-08-26 18:03:00,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=97733.33333333333, ans=0.125 +2024-08-26 18:03:04,011 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.312e+02 1.578e+02 1.704e+02 2.106e+02 3.434e+02, threshold=3.409e+02, percent-clipped=0.0 +2024-08-26 18:03:11,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=97786.66666666667, ans=0.125 +2024-08-26 18:03:17,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=97840.0, ans=0.1 +2024-08-26 18:03:19,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=97840.0, ans=0.0 +2024-08-26 18:03:35,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=97946.66666666667, ans=0.025 +2024-08-26 18:03:42,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=97946.66666666667, ans=0.1 +2024-08-26 18:03:45,538 INFO [train.py:1114] (3/4) Epoch 8, batch 950, loss[loss=0.2151, simple_loss=0.2752, pruned_loss=0.05645, ctc_loss=0.1054, over 19496.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.2919, pruned_loss=0.06329, ctc_loss=0.1184, over 3819282.94 frames. ], batch size: 49, lr: 1.85e-02, grad_scale: 16.0 +2024-08-26 18:03:50,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=98000.0, ans=0.2 +2024-08-26 18:04:06,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=98053.33333333333, ans=0.0 +2024-08-26 18:04:07,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=98053.33333333333, ans=0.125 +2024-08-26 18:04:17,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=98106.66666666667, ans=0.2 +2024-08-26 18:04:37,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=98266.66666666667, ans=0.0 +2024-08-26 18:04:37,646 INFO [train.py:1114] (3/4) Epoch 8, batch 1000, loss[loss=0.2052, simple_loss=0.2713, pruned_loss=0.05112, ctc_loss=0.09222, over 19860.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.2929, pruned_loss=0.06378, ctc_loss=0.1193, over 3815697.00 frames. ], batch size: 52, lr: 1.85e-02, grad_scale: 16.0 +2024-08-26 18:04:37,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=98266.66666666667, ans=0.125 +2024-08-26 18:04:44,381 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.225e+02 1.497e+02 1.652e+02 1.874e+02 4.992e+02, threshold=3.305e+02, percent-clipped=2.0 +2024-08-26 18:04:53,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=98320.0, ans=0.025 +2024-08-26 18:05:07,362 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=14.09 vs. limit=15.0 +2024-08-26 18:05:09,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=98426.66666666667, ans=0.125 +2024-08-26 18:05:24,683 INFO [train.py:1114] (3/4) Epoch 8, batch 1050, loss[loss=0.2233, simple_loss=0.2951, pruned_loss=0.05446, ctc_loss=0.1065, over 19846.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.2925, pruned_loss=0.06363, ctc_loss=0.1191, over 3822946.03 frames. ], batch size: 57, lr: 1.85e-02, grad_scale: 16.0 +2024-08-26 18:05:28,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=98533.33333333333, ans=0.125 +2024-08-26 18:05:36,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=98586.66666666667, ans=0.125 +2024-08-26 18:05:49,240 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.93 vs. limit=15.0 +2024-08-26 18:05:53,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=98640.0, ans=0.125 +2024-08-26 18:06:03,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=98693.33333333333, ans=0.125 +2024-08-26 18:06:04,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=98693.33333333333, ans=0.0 +2024-08-26 18:06:18,148 INFO [train.py:1114] (3/4) Epoch 8, batch 1100, loss[loss=0.2277, simple_loss=0.2971, pruned_loss=0.05749, ctc_loss=0.1084, over 19584.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.2925, pruned_loss=0.06351, ctc_loss=0.119, over 3831513.25 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 16.0 +2024-08-26 18:06:19,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=98800.0, ans=0.125 +2024-08-26 18:06:24,392 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.51 vs. limit=15.0 +2024-08-26 18:06:24,655 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.469e+02 1.560e+02 1.744e+02 3.443e+02, threshold=3.121e+02, percent-clipped=2.0 +2024-08-26 18:06:27,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.21 vs. limit=22.5 +2024-08-26 18:06:48,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=98960.0, ans=0.2 +2024-08-26 18:07:03,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=99013.33333333333, ans=0.2 +2024-08-26 18:07:10,061 INFO [train.py:1114] (3/4) Epoch 8, batch 1150, loss[loss=0.2139, simple_loss=0.2832, pruned_loss=0.05395, ctc_loss=0.09175, over 19593.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.2922, pruned_loss=0.06348, ctc_loss=0.1187, over 3830419.49 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 16.0 +2024-08-26 18:07:23,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=99120.0, ans=0.2 +2024-08-26 18:07:30,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.95 vs. limit=22.5 +2024-08-26 18:07:37,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=99173.33333333333, ans=0.125 +2024-08-26 18:07:39,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=99226.66666666667, ans=0.125 +2024-08-26 18:07:41,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=99226.66666666667, ans=0.125 +2024-08-26 18:07:57,702 INFO [train.py:1114] (3/4) Epoch 8, batch 1200, loss[loss=0.2434, simple_loss=0.2998, pruned_loss=0.06776, ctc_loss=0.1288, over 19836.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.2932, pruned_loss=0.06364, ctc_loss=0.1192, over 3825608.64 frames. ], batch size: 57, lr: 1.84e-02, grad_scale: 32.0 +2024-08-26 18:07:58,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=99333.33333333333, ans=0.125 +2024-08-26 18:08:04,255 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.256e+02 1.491e+02 1.608e+02 2.003e+02 2.840e+02, threshold=3.216e+02, percent-clipped=0.0 +2024-08-26 18:08:10,285 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:08:23,387 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.65 vs. limit=6.0 +2024-08-26 18:08:34,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=99493.33333333333, ans=0.125 +2024-08-26 18:08:47,053 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.14 vs. limit=12.0 +2024-08-26 18:08:49,183 INFO [train.py:1114] (3/4) Epoch 8, batch 1250, loss[loss=0.2445, simple_loss=0.307, pruned_loss=0.06609, ctc_loss=0.1246, over 19545.00 frames. ], tot_loss[loss=0.234, simple_loss=0.2934, pruned_loss=0.06358, ctc_loss=0.1188, over 3843494.08 frames. ], batch size: 61, lr: 1.84e-02, grad_scale: 32.0 +2024-08-26 18:08:49,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=99600.0, ans=0.125 +2024-08-26 18:08:53,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=99600.0, ans=0.125 +2024-08-26 18:08:55,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=99600.0, ans=0.125 +2024-08-26 18:09:02,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=99653.33333333333, ans=0.125 +2024-08-26 18:09:26,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=99813.33333333333, ans=0.2 +2024-08-26 18:09:40,600 INFO [train.py:1114] (3/4) Epoch 8, batch 1300, loss[loss=0.2408, simple_loss=0.2976, pruned_loss=0.06692, ctc_loss=0.1255, over 18841.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.2923, pruned_loss=0.06312, ctc_loss=0.1179, over 3846002.07 frames. ], batch size: 76, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:09:46,874 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.77 vs. limit=12.0 +2024-08-26 18:09:47,144 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.481e+02 1.661e+02 1.866e+02 3.142e+02, threshold=3.323e+02, percent-clipped=0.0 +2024-08-26 18:09:48,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=99866.66666666667, ans=0.2 +2024-08-26 18:09:50,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=99920.0, ans=0.125 +2024-08-26 18:10:06,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=99973.33333333333, ans=0.125 +2024-08-26 18:10:13,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=100026.66666666667, ans=0.125 +2024-08-26 18:10:16,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=100026.66666666667, ans=0.2 +2024-08-26 18:10:26,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=100133.33333333333, ans=0.0 +2024-08-26 18:10:27,317 INFO [train.py:1114] (3/4) Epoch 8, batch 1350, loss[loss=0.2254, simple_loss=0.2874, pruned_loss=0.05881, ctc_loss=0.1146, over 19766.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.2916, pruned_loss=0.06253, ctc_loss=0.1167, over 3857437.49 frames. ], batch size: 54, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:10:38,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=100186.66666666667, ans=0.2 +2024-08-26 18:10:50,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=100240.0, ans=0.125 +2024-08-26 18:10:51,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=100240.0, ans=0.2 +2024-08-26 18:11:10,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=100346.66666666667, ans=10.0 +2024-08-26 18:11:13,356 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.25 vs. limit=15.0 +2024-08-26 18:11:14,671 INFO [train.py:1114] (3/4) Epoch 8, batch 1400, loss[loss=0.1987, simple_loss=0.2574, pruned_loss=0.05119, ctc_loss=0.09413, over 19649.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.2917, pruned_loss=0.06268, ctc_loss=0.1168, over 3863980.64 frames. ], batch size: 46, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:11:14,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=100400.0, ans=0.0 +2024-08-26 18:11:23,743 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.577e+02 1.859e+02 2.331e+02 3.237e+02, threshold=3.718e+02, percent-clipped=0.0 +2024-08-26 18:11:31,057 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.39 vs. limit=10.0 +2024-08-26 18:11:31,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=100453.33333333333, ans=0.2 +2024-08-26 18:11:38,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=100453.33333333333, ans=0.0 +2024-08-26 18:11:46,267 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:11:53,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=100560.0, ans=0.5 +2024-08-26 18:11:56,219 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.07 vs. limit=22.5 +2024-08-26 18:11:57,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=100560.0, ans=0.5 +2024-08-26 18:11:58,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=100613.33333333333, ans=0.125 +2024-08-26 18:12:09,364 INFO [train.py:1114] (3/4) Epoch 8, batch 1450, loss[loss=0.2382, simple_loss=0.3048, pruned_loss=0.06134, ctc_loss=0.1225, over 19702.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.2921, pruned_loss=0.06281, ctc_loss=0.1173, over 3861968.41 frames. ], batch size: 63, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:12:15,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=100666.66666666667, ans=0.125 +2024-08-26 18:12:17,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=100720.0, ans=0.125 +2024-08-26 18:12:32,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=100773.33333333333, ans=0.125 +2024-08-26 18:12:33,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=100773.33333333333, ans=0.0 +2024-08-26 18:12:40,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=100773.33333333333, ans=0.0 +2024-08-26 18:12:50,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100880.0, ans=0.125 +2024-08-26 18:13:00,710 INFO [train.py:1114] (3/4) Epoch 8, batch 1500, loss[loss=0.2338, simple_loss=0.306, pruned_loss=0.05821, ctc_loss=0.1129, over 19581.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.2923, pruned_loss=0.06269, ctc_loss=0.1171, over 3861705.14 frames. ], batch size: 57, lr: 1.83e-02, grad_scale: 32.0 +2024-08-26 18:13:05,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100933.33333333333, ans=0.1 +2024-08-26 18:13:07,543 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.450e+02 1.594e+02 1.806e+02 5.150e+02, threshold=3.189e+02, percent-clipped=1.0 +2024-08-26 18:13:42,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=101146.66666666667, ans=0.2 +2024-08-26 18:13:48,331 INFO [train.py:1114] (3/4) Epoch 8, batch 1550, loss[loss=0.2841, simple_loss=0.3337, pruned_loss=0.08651, ctc_loss=0.1537, over 19613.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.2923, pruned_loss=0.06295, ctc_loss=0.1176, over 3846191.26 frames. ], batch size: 60, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:13:57,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101253.33333333333, ans=0.1 +2024-08-26 18:14:11,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=101306.66666666667, ans=0.0 +2024-08-26 18:14:12,064 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.73 vs. limit=22.5 +2024-08-26 18:14:27,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=101360.0, ans=0.0 +2024-08-26 18:14:32,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=101413.33333333333, ans=0.1 +2024-08-26 18:14:34,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=101413.33333333333, ans=0.125 +2024-08-26 18:14:40,870 INFO [train.py:1114] (3/4) Epoch 8, batch 1600, loss[loss=0.2403, simple_loss=0.3029, pruned_loss=0.06447, ctc_loss=0.1218, over 19838.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.2924, pruned_loss=0.06306, ctc_loss=0.1177, over 3835823.42 frames. ], batch size: 57, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:14:42,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=101466.66666666667, ans=0.125 +2024-08-26 18:14:44,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=101466.66666666667, ans=0.0 +2024-08-26 18:14:47,306 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.562e+02 1.716e+02 2.059e+02 3.797e+02, threshold=3.431e+02, percent-clipped=2.0 +2024-08-26 18:14:56,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=101520.0, ans=0.0 +2024-08-26 18:15:16,859 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.85 vs. limit=15.0 +2024-08-26 18:15:23,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=101680.0, ans=0.125 +2024-08-26 18:15:32,091 INFO [train.py:1114] (3/4) Epoch 8, batch 1650, loss[loss=0.2643, simple_loss=0.3182, pruned_loss=0.07649, ctc_loss=0.1435, over 19651.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.2921, pruned_loss=0.0631, ctc_loss=0.118, over 3833814.27 frames. ], batch size: 59, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:15:38,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=101733.33333333333, ans=0.1 +2024-08-26 18:15:39,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=101733.33333333333, ans=0.025 +2024-08-26 18:15:53,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=101840.0, ans=0.0 +2024-08-26 18:15:56,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=101840.0, ans=0.2 +2024-08-26 18:16:00,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=101893.33333333333, ans=0.05 +2024-08-26 18:16:09,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101946.66666666667, ans=0.1 +2024-08-26 18:16:18,697 INFO [train.py:1114] (3/4) Epoch 8, batch 1700, loss[loss=0.2088, simple_loss=0.2611, pruned_loss=0.05734, ctc_loss=0.1043, over 19700.00 frames. ], tot_loss[loss=0.232, simple_loss=0.2917, pruned_loss=0.06267, ctc_loss=0.1173, over 3847576.27 frames. ], batch size: 46, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:16:25,302 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.242e+02 1.495e+02 1.737e+02 2.089e+02 3.401e+02, threshold=3.475e+02, percent-clipped=0.0 +2024-08-26 18:17:01,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=102213.33333333333, ans=0.125 +2024-08-26 18:17:03,828 INFO [train.py:1114] (3/4) Epoch 8, batch 1750, loss[loss=0.2161, simple_loss=0.2747, pruned_loss=0.05769, ctc_loss=0.1054, over 19644.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.2913, pruned_loss=0.06247, ctc_loss=0.1168, over 3852621.28 frames. ], batch size: 45, lr: 1.82e-02, grad_scale: 32.0 +2024-08-26 18:17:05,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=102266.66666666667, ans=0.0 +2024-08-26 18:17:10,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=102266.66666666667, ans=0.2 +2024-08-26 18:17:12,526 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.01 vs. limit=10.0 +2024-08-26 18:17:15,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=102320.0, ans=0.0 +2024-08-26 18:17:18,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=102320.0, ans=0.125 +2024-08-26 18:17:32,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=102426.66666666667, ans=0.0 +2024-08-26 18:17:33,303 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.75 vs. limit=15.0 +2024-08-26 18:17:36,589 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.73 vs. limit=15.0 +2024-08-26 18:17:48,510 INFO [train.py:1114] (3/4) Epoch 8, batch 1800, loss[loss=0.2326, simple_loss=0.2954, pruned_loss=0.06163, ctc_loss=0.1161, over 19618.00 frames. ], tot_loss[loss=0.231, simple_loss=0.2909, pruned_loss=0.06228, ctc_loss=0.1163, over 3853804.88 frames. ], batch size: 55, lr: 1.81e-02, grad_scale: 32.0 +2024-08-26 18:17:51,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=102533.33333333333, ans=0.0 +2024-08-26 18:17:56,850 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.517e+02 1.665e+02 1.949e+02 3.105e+02, threshold=3.330e+02, percent-clipped=0.0 +2024-08-26 18:17:59,297 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.00 vs. limit=15.0 +2024-08-26 18:18:00,179 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.63 vs. limit=15.0 +2024-08-26 18:18:02,618 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.31 vs. limit=15.0 +2024-08-26 18:18:13,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=102640.0, ans=0.2 +2024-08-26 18:18:14,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=102640.0, ans=0.025 +2024-08-26 18:18:36,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=102800.0, ans=0.0 +2024-08-26 18:18:36,760 INFO [train.py:1114] (3/4) Epoch 8, batch 1850, loss[loss=0.2348, simple_loss=0.2995, pruned_loss=0.06135, ctc_loss=0.1184, over 19596.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.2909, pruned_loss=0.06236, ctc_loss=0.1163, over 3856677.92 frames. ], batch size: 57, lr: 1.81e-02, grad_scale: 32.0 +2024-08-26 18:18:42,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102800.0, ans=0.1 +2024-08-26 18:18:52,541 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.98 vs. limit=22.5 +2024-08-26 18:18:58,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=102906.66666666667, ans=0.09899494936611666 +2024-08-26 18:19:12,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=103013.33333333333, ans=0.125 +2024-08-26 18:19:16,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=103013.33333333333, ans=0.125 +2024-08-26 18:19:19,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=103013.33333333333, ans=0.0 +2024-08-26 18:19:21,232 INFO [train.py:1114] (3/4) Epoch 8, batch 1900, loss[loss=0.2278, simple_loss=0.3083, pruned_loss=0.05299, ctc_loss=0.1032, over 19665.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.2915, pruned_loss=0.06244, ctc_loss=0.1162, over 3860546.74 frames. ], batch size: 59, lr: 1.81e-02, grad_scale: 16.0 +2024-08-26 18:19:25,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=103066.66666666667, ans=0.2 +2024-08-26 18:19:28,167 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.277e+02 1.533e+02 1.714e+02 2.014e+02 3.062e+02, threshold=3.427e+02, percent-clipped=0.0 +2024-08-26 18:19:37,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=103173.33333333333, ans=0.125 +2024-08-26 18:19:44,294 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.12 vs. limit=22.5 +2024-08-26 18:20:04,904 INFO [train.py:1114] (3/4) Epoch 8, batch 1950, loss[loss=0.2295, simple_loss=0.2851, pruned_loss=0.06375, ctc_loss=0.1159, over 19582.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.2923, pruned_loss=0.06225, ctc_loss=0.1161, over 3869516.51 frames. ], batch size: 52, lr: 1.81e-02, grad_scale: 16.0 +2024-08-26 18:20:18,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=103386.66666666667, ans=0.125 +2024-08-26 18:20:51,117 INFO [train.py:1114] (3/4) Epoch 8, batch 2000, loss[loss=0.2077, simple_loss=0.2625, pruned_loss=0.05541, ctc_loss=0.1054, over 19650.00 frames. ], tot_loss[loss=0.233, simple_loss=0.293, pruned_loss=0.06295, ctc_loss=0.1175, over 3854954.45 frames. ], batch size: 45, lr: 1.81e-02, grad_scale: 16.0 +2024-08-26 18:20:59,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=103600.0, ans=0.0 +2024-08-26 18:21:00,307 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.277e+02 1.619e+02 1.835e+02 2.136e+02 5.632e+02, threshold=3.670e+02, percent-clipped=2.0 +2024-08-26 18:21:04,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=103653.33333333333, ans=0.125 +2024-08-26 18:21:05,160 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.33 vs. limit=15.0 +2024-08-26 18:21:12,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=103706.66666666667, ans=0.025 +2024-08-26 18:21:31,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=103813.33333333333, ans=0.2 +2024-08-26 18:21:36,058 INFO [train.py:1114] (3/4) Epoch 8, batch 2050, loss[loss=0.2341, simple_loss=0.2766, pruned_loss=0.07043, ctc_loss=0.1268, over 19689.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.2916, pruned_loss=0.06265, ctc_loss=0.1171, over 3851533.26 frames. ], batch size: 47, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:21:38,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=103866.66666666667, ans=0.125 +2024-08-26 18:21:43,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=103866.66666666667, ans=0.125 +2024-08-26 18:21:47,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=103920.0, ans=0.0 +2024-08-26 18:21:52,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=103973.33333333333, ans=0.125 +2024-08-26 18:22:02,863 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.16 vs. limit=22.5 +2024-08-26 18:22:03,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=104026.66666666667, ans=0.0 +2024-08-26 18:22:12,121 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.83 vs. limit=22.5 +2024-08-26 18:22:19,595 INFO [train.py:1114] (3/4) Epoch 8, batch 2100, loss[loss=0.2322, simple_loss=0.2961, pruned_loss=0.06072, ctc_loss=0.1172, over 19763.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.291, pruned_loss=0.06209, ctc_loss=0.1162, over 3858443.25 frames. ], batch size: 54, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:22:21,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=104133.33333333333, ans=0.125 +2024-08-26 18:22:27,467 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.502e+02 1.673e+02 2.007e+02 2.886e+02, threshold=3.346e+02, percent-clipped=0.0 +2024-08-26 18:22:31,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=104186.66666666667, ans=0.125 +2024-08-26 18:22:33,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104186.66666666667, ans=0.1 +2024-08-26 18:22:58,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=104346.66666666667, ans=0.125 +2024-08-26 18:22:59,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=104346.66666666667, ans=0.2 +2024-08-26 18:23:03,054 INFO [train.py:1114] (3/4) Epoch 8, batch 2150, loss[loss=0.2275, simple_loss=0.288, pruned_loss=0.0609, ctc_loss=0.1131, over 19847.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.2897, pruned_loss=0.06138, ctc_loss=0.1149, over 3870537.57 frames. ], batch size: 52, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:23:10,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=104400.0, ans=0.02 +2024-08-26 18:23:46,677 INFO [train.py:1114] (3/4) Epoch 8, batch 2200, loss[loss=0.2273, simple_loss=0.2928, pruned_loss=0.05802, ctc_loss=0.1144, over 19586.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.2896, pruned_loss=0.06132, ctc_loss=0.1149, over 3869428.46 frames. ], batch size: 57, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:23:50,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=104666.66666666667, ans=0.0 +2024-08-26 18:23:54,537 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.275e+02 1.596e+02 1.839e+02 2.214e+02 3.376e+02, threshold=3.678e+02, percent-clipped=1.0 +2024-08-26 18:23:56,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=104720.0, ans=0.125 +2024-08-26 18:23:58,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=104720.0, ans=0.125 +2024-08-26 18:24:00,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=104720.0, ans=0.125 +2024-08-26 18:24:10,893 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.03 vs. limit=15.0 +2024-08-26 18:24:11,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=104773.33333333333, ans=0.025 +2024-08-26 18:24:16,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=104826.66666666667, ans=0.125 +2024-08-26 18:24:28,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=104880.0, ans=0.125 +2024-08-26 18:24:30,572 INFO [train.py:1114] (3/4) Epoch 8, batch 2250, loss[loss=0.2373, simple_loss=0.2977, pruned_loss=0.06443, ctc_loss=0.12, over 19610.00 frames. ], tot_loss[loss=0.229, simple_loss=0.2895, pruned_loss=0.06132, ctc_loss=0.115, over 3868784.71 frames. ], batch size: 55, lr: 1.80e-02, grad_scale: 16.0 +2024-08-26 18:24:33,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=104933.33333333333, ans=0.1 +2024-08-26 18:24:37,042 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.85 vs. limit=15.0 +2024-08-26 18:24:41,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=104986.66666666667, ans=0.125 +2024-08-26 18:24:50,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=105040.0, ans=0.125 +2024-08-26 18:24:53,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=105040.0, ans=0.125 +2024-08-26 18:24:59,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=105093.33333333333, ans=0.2 +2024-08-26 18:25:16,098 INFO [train.py:1114] (3/4) Epoch 8, batch 2300, loss[loss=0.2091, simple_loss=0.2707, pruned_loss=0.05421, ctc_loss=0.09788, over 19479.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.2891, pruned_loss=0.0617, ctc_loss=0.1154, over 3862172.15 frames. ], batch size: 49, lr: 1.79e-02, grad_scale: 16.0 +2024-08-26 18:25:20,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=105200.0, ans=0.0 +2024-08-26 18:25:23,765 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.553e+02 1.767e+02 2.002e+02 4.280e+02, threshold=3.534e+02, percent-clipped=3.0 +2024-08-26 18:25:30,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=105253.33333333333, ans=0.07 +2024-08-26 18:25:48,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105360.0, ans=0.1 +2024-08-26 18:25:58,622 INFO [train.py:1114] (3/4) Epoch 8, batch 2350, loss[loss=0.2583, simple_loss=0.3119, pruned_loss=0.07569, ctc_loss=0.1333, over 19709.00 frames. ], tot_loss[loss=0.23, simple_loss=0.2896, pruned_loss=0.06206, ctc_loss=0.1157, over 3863657.07 frames. ], batch size: 63, lr: 1.79e-02, grad_scale: 16.0 +2024-08-26 18:26:14,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=105520.0, ans=0.125 +2024-08-26 18:26:28,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=105626.66666666667, ans=0.2 +2024-08-26 18:26:33,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=105680.0, ans=0.125 +2024-08-26 18:26:42,918 INFO [train.py:1114] (3/4) Epoch 8, batch 2400, loss[loss=0.2579, simple_loss=0.3195, pruned_loss=0.07207, ctc_loss=0.1303, over 19299.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.2922, pruned_loss=0.06302, ctc_loss=0.1175, over 3857803.31 frames. ], batch size: 71, lr: 1.79e-02, grad_scale: 32.0 +2024-08-26 18:26:45,224 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.81 vs. limit=10.0 +2024-08-26 18:26:50,603 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.261e+02 1.526e+02 1.733e+02 1.998e+02 3.354e+02, threshold=3.467e+02, percent-clipped=0.0 +2024-08-26 18:27:06,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105840.0, ans=0.1 +2024-08-26 18:27:07,445 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.31 vs. limit=10.0 +2024-08-26 18:27:23,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=105946.66666666667, ans=0.0 +2024-08-26 18:27:23,814 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.10 vs. limit=6.0 +2024-08-26 18:27:24,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=105946.66666666667, ans=0.125 +2024-08-26 18:27:27,068 INFO [train.py:1114] (3/4) Epoch 8, batch 2450, loss[loss=0.3168, simple_loss=0.3327, pruned_loss=0.1118, ctc_loss=0.1931, over 13559.00 frames. ], tot_loss[loss=0.24, simple_loss=0.2967, pruned_loss=0.06677, ctc_loss=0.1245, over 3729557.09 frames. ], batch size: 140, lr: 1.79e-02, grad_scale: 16.0 +2024-08-26 18:27:32,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=106000.0, ans=0.125 +2024-08-26 18:27:32,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=106000.0, ans=0.1 +2024-08-26 18:27:32,889 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.21 vs. limit=22.5 +2024-08-26 18:27:41,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106053.33333333333, ans=0.125 +2024-08-26 18:27:41,900 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.08 vs. limit=15.0 +2024-08-26 18:27:44,314 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:27:48,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=106106.66666666667, ans=0.05 +2024-08-26 18:27:50,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=106106.66666666667, ans=0.2 +2024-08-26 18:27:54,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=106160.0, ans=0.125 +2024-08-26 18:28:39,246 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:28:47,227 INFO [train.py:1114] (3/4) Epoch 9, batch 0, loss[loss=0.2192, simple_loss=0.2725, pruned_loss=0.06, ctc_loss=0.1147, over 19434.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2725, pruned_loss=0.06, ctc_loss=0.1147, over 19434.00 frames. ], batch size: 48, lr: 1.69e-02, grad_scale: 32.0 +2024-08-26 18:28:47,227 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-26 18:28:56,810 INFO [train.py:1146] (3/4) Epoch 9, validation: loss=0.1927, simple_loss=0.2844, pruned_loss=0.03737, ctc_loss=0.06585, over 944034.00 frames. +2024-08-26 18:28:56,811 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12874MB +2024-08-26 18:28:59,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=106208.0, ans=0.125 +2024-08-26 18:29:16,431 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.480e+02 1.688e+02 1.849e+02 2.025e+02 3.204e+02, threshold=3.698e+02, percent-clipped=0.0 +2024-08-26 18:29:19,856 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.23 vs. limit=22.5 +2024-08-26 18:29:22,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=106314.66666666667, ans=0.125 +2024-08-26 18:29:23,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=106314.66666666667, ans=0.125 +2024-08-26 18:29:43,036 INFO [train.py:1114] (3/4) Epoch 9, batch 50, loss[loss=0.1982, simple_loss=0.2623, pruned_loss=0.04843, ctc_loss=0.09317, over 19699.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.2939, pruned_loss=0.06378, ctc_loss=0.1193, over 844655.46 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 32.0 +2024-08-26 18:29:50,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=106474.66666666667, ans=0.05 +2024-08-26 18:29:51,663 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.20 vs. limit=10.0 +2024-08-26 18:29:52,779 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.65 vs. limit=12.0 +2024-08-26 18:30:00,248 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.45 vs. limit=15.0 +2024-08-26 18:30:06,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=106581.33333333333, ans=0.125 +2024-08-26 18:30:12,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=106634.66666666667, ans=0.0 +2024-08-26 18:30:22,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=106634.66666666667, ans=0.2 +2024-08-26 18:30:32,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=106688.0, ans=0.125 +2024-08-26 18:30:39,189 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.03 vs. limit=15.0 +2024-08-26 18:30:39,516 INFO [train.py:1114] (3/4) Epoch 9, batch 100, loss[loss=0.1992, simple_loss=0.2704, pruned_loss=0.04631, ctc_loss=0.0887, over 19735.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.2933, pruned_loss=0.06301, ctc_loss=0.118, over 1499016.47 frames. ], batch size: 51, lr: 1.69e-02, grad_scale: 32.0 +2024-08-26 18:31:01,981 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.63 vs. limit=22.5 +2024-08-26 18:31:02,334 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.216e+02 1.554e+02 1.735e+02 2.126e+02 3.416e+02, threshold=3.470e+02, percent-clipped=0.0 +2024-08-26 18:31:02,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=106848.0, ans=0.125 +2024-08-26 18:31:03,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=106848.0, ans=0.0 +2024-08-26 18:31:05,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106848.0, ans=0.1 +2024-08-26 18:31:07,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=106848.0, ans=0.0 +2024-08-26 18:31:19,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=106954.66666666667, ans=0.025 +2024-08-26 18:31:28,281 INFO [train.py:1114] (3/4) Epoch 9, batch 150, loss[loss=0.2158, simple_loss=0.2728, pruned_loss=0.05916, ctc_loss=0.1012, over 19680.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.2902, pruned_loss=0.06131, ctc_loss=0.115, over 2026886.82 frames. ], batch size: 47, lr: 1.69e-02, grad_scale: 16.0 +2024-08-26 18:31:41,628 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.95 vs. limit=15.0 +2024-08-26 18:31:42,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=107061.33333333333, ans=0.125 +2024-08-26 18:32:04,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=107221.33333333333, ans=0.2 +2024-08-26 18:32:12,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=107221.33333333333, ans=0.125 +2024-08-26 18:32:13,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=107274.66666666667, ans=0.2 +2024-08-26 18:32:14,125 INFO [train.py:1114] (3/4) Epoch 9, batch 200, loss[loss=0.2517, simple_loss=0.3085, pruned_loss=0.07114, ctc_loss=0.1313, over 18314.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.2891, pruned_loss=0.06079, ctc_loss=0.1139, over 2435917.01 frames. ], batch size: 85, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:32:16,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=107274.66666666667, ans=0.125 +2024-08-26 18:32:20,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=107274.66666666667, ans=0.125 +2024-08-26 18:32:20,646 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:32:26,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=107328.0, ans=0.09899494936611666 +2024-08-26 18:32:28,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=107328.0, ans=0.0 +2024-08-26 18:32:29,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=107328.0, ans=0.125 +2024-08-26 18:32:36,039 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.227e+02 1.442e+02 1.571e+02 1.787e+02 2.800e+02, threshold=3.143e+02, percent-clipped=0.0 +2024-08-26 18:32:39,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=107381.33333333333, ans=0.0 +2024-08-26 18:32:41,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=107381.33333333333, ans=0.0 +2024-08-26 18:32:42,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107434.66666666667, ans=0.1 +2024-08-26 18:32:53,264 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.03 vs. limit=15.0 +2024-08-26 18:32:58,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=107488.0, ans=0.0 +2024-08-26 18:33:02,006 INFO [train.py:1114] (3/4) Epoch 9, batch 250, loss[loss=0.2582, simple_loss=0.3149, pruned_loss=0.07422, ctc_loss=0.1328, over 19343.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.2888, pruned_loss=0.06052, ctc_loss=0.1132, over 2755622.38 frames. ], batch size: 67, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:33:04,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107541.33333333333, ans=0.1 +2024-08-26 18:33:21,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=107594.66666666667, ans=0.1 +2024-08-26 18:33:25,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=107594.66666666667, ans=0.125 +2024-08-26 18:33:38,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=107701.33333333333, ans=0.125 +2024-08-26 18:33:39,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=107701.33333333333, ans=0.125 +2024-08-26 18:33:40,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=107701.33333333333, ans=0.0 +2024-08-26 18:33:52,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=107754.66666666667, ans=0.2 +2024-08-26 18:33:53,128 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.88 vs. limit=15.0 +2024-08-26 18:34:01,001 INFO [train.py:1114] (3/4) Epoch 9, batch 300, loss[loss=0.2677, simple_loss=0.3211, pruned_loss=0.08017, ctc_loss=0.1351, over 19523.00 frames. ], tot_loss[loss=0.227, simple_loss=0.2884, pruned_loss=0.06032, ctc_loss=0.1125, over 3000418.68 frames. ], batch size: 61, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:34:06,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=107808.0, ans=10.0 +2024-08-26 18:34:16,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=107861.33333333333, ans=0.025 +2024-08-26 18:34:18,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=107861.33333333333, ans=0.125 +2024-08-26 18:34:24,470 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.264e+02 1.498e+02 1.681e+02 1.999e+02 2.633e+02, threshold=3.363e+02, percent-clipped=0.0 +2024-08-26 18:34:27,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=107914.66666666667, ans=0.125 +2024-08-26 18:34:32,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=107968.0, ans=0.025 +2024-08-26 18:34:33,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=107968.0, ans=0.0 +2024-08-26 18:34:34,105 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:34:34,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=107968.0, ans=0.125 +2024-08-26 18:34:37,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=107968.0, ans=0.125 +2024-08-26 18:34:50,533 INFO [train.py:1114] (3/4) Epoch 9, batch 350, loss[loss=0.2325, simple_loss=0.2891, pruned_loss=0.06438, ctc_loss=0.118, over 19744.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.2892, pruned_loss=0.06053, ctc_loss=0.1128, over 3190835.51 frames. ], batch size: 48, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:35:11,021 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.37 vs. limit=22.5 +2024-08-26 18:35:22,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=108234.66666666667, ans=0.125 +2024-08-26 18:35:29,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=108288.0, ans=0.0 +2024-08-26 18:35:40,763 INFO [train.py:1114] (3/4) Epoch 9, batch 400, loss[loss=0.2148, simple_loss=0.2847, pruned_loss=0.05251, ctc_loss=0.09967, over 19519.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.2881, pruned_loss=0.05997, ctc_loss=0.112, over 3342303.24 frames. ], batch size: 54, lr: 1.68e-02, grad_scale: 16.0 +2024-08-26 18:35:41,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=108341.33333333333, ans=0.125 +2024-08-26 18:35:50,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=108394.66666666667, ans=0.125 +2024-08-26 18:36:02,032 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.232e+02 1.489e+02 1.712e+02 1.995e+02 4.778e+02, threshold=3.424e+02, percent-clipped=1.0 +2024-08-26 18:36:22,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=108554.66666666667, ans=0.2 +2024-08-26 18:36:30,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=108554.66666666667, ans=0.125 +2024-08-26 18:36:31,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=108608.0, ans=0.0 +2024-08-26 18:36:32,741 INFO [train.py:1114] (3/4) Epoch 9, batch 450, loss[loss=0.2182, simple_loss=0.2874, pruned_loss=0.05329, ctc_loss=0.1063, over 19625.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.2885, pruned_loss=0.06004, ctc_loss=0.1122, over 3451742.59 frames. ], batch size: 55, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:36:34,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=108608.0, ans=0.04949747468305833 +2024-08-26 18:36:54,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=108714.66666666667, ans=0.04949747468305833 +2024-08-26 18:37:02,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=108768.0, ans=0.2 +2024-08-26 18:37:14,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=108821.33333333333, ans=0.1 +2024-08-26 18:37:15,630 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.89 vs. limit=22.5 +2024-08-26 18:37:21,553 INFO [train.py:1114] (3/4) Epoch 9, batch 500, loss[loss=0.2524, simple_loss=0.3146, pruned_loss=0.07026, ctc_loss=0.1241, over 19699.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.2876, pruned_loss=0.05952, ctc_loss=0.1114, over 3546623.14 frames. ], batch size: 63, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:37:42,880 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.242e+02 1.480e+02 1.660e+02 1.957e+02 3.087e+02, threshold=3.320e+02, percent-clipped=0.0 +2024-08-26 18:37:50,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109034.66666666667, ans=0.1 +2024-08-26 18:38:00,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=109088.0, ans=0.125 +2024-08-26 18:38:07,945 INFO [train.py:1114] (3/4) Epoch 9, batch 550, loss[loss=0.251, simple_loss=0.304, pruned_loss=0.07325, ctc_loss=0.1291, over 19302.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.2874, pruned_loss=0.05973, ctc_loss=0.1116, over 3608514.03 frames. ], batch size: 71, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:38:30,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=109248.0, ans=0.125 +2024-08-26 18:38:34,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=109248.0, ans=0.025 +2024-08-26 18:38:43,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=109301.33333333333, ans=0.0 +2024-08-26 18:38:55,929 INFO [train.py:1114] (3/4) Epoch 9, batch 600, loss[loss=0.2472, simple_loss=0.306, pruned_loss=0.06878, ctc_loss=0.1272, over 19437.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.2879, pruned_loss=0.05976, ctc_loss=0.1118, over 3665876.29 frames. ], batch size: 67, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:39:06,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=109408.0, ans=0.0 +2024-08-26 18:39:16,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=109461.33333333333, ans=0.125 +2024-08-26 18:39:21,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=109514.66666666667, ans=0.05 +2024-08-26 18:39:21,959 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.261e+02 1.496e+02 1.658e+02 1.980e+02 4.382e+02, threshold=3.316e+02, percent-clipped=1.0 +2024-08-26 18:39:23,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=109514.66666666667, ans=0.125 +2024-08-26 18:39:27,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=109514.66666666667, ans=0.0 +2024-08-26 18:39:33,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=109568.0, ans=0.125 +2024-08-26 18:39:35,948 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.77 vs. limit=15.0 +2024-08-26 18:39:42,240 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109621.33333333333, ans=0.1 +2024-08-26 18:39:49,359 INFO [train.py:1114] (3/4) Epoch 9, batch 650, loss[loss=0.2247, simple_loss=0.2973, pruned_loss=0.0551, ctc_loss=0.1045, over 19767.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.2868, pruned_loss=0.05921, ctc_loss=0.1108, over 3716373.21 frames. ], batch size: 54, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:40:04,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=109728.0, ans=0.2 +2024-08-26 18:40:40,264 INFO [train.py:1114] (3/4) Epoch 9, batch 700, loss[loss=0.2081, simple_loss=0.2757, pruned_loss=0.05005, ctc_loss=0.1011, over 19716.00 frames. ], tot_loss[loss=0.225, simple_loss=0.287, pruned_loss=0.05934, ctc_loss=0.111, over 3747551.70 frames. ], batch size: 51, lr: 1.67e-02, grad_scale: 16.0 +2024-08-26 18:40:53,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=109994.66666666667, ans=0.125 +2024-08-26 18:40:57,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=109994.66666666667, ans=0.125 +2024-08-26 18:41:01,803 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.271e+02 1.503e+02 1.748e+02 2.321e+02 3.813e+02, threshold=3.497e+02, percent-clipped=1.0 +2024-08-26 18:41:25,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=110154.66666666667, ans=0.125 +2024-08-26 18:41:26,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=110154.66666666667, ans=0.04949747468305833 +2024-08-26 18:41:28,636 INFO [train.py:1114] (3/4) Epoch 9, batch 750, loss[loss=0.2183, simple_loss=0.2868, pruned_loss=0.05449, ctc_loss=0.1022, over 19492.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.2871, pruned_loss=0.0596, ctc_loss=0.1112, over 3774251.77 frames. ], batch size: 54, lr: 1.66e-02, grad_scale: 16.0 +2024-08-26 18:41:28,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=110208.0, ans=0.125 +2024-08-26 18:41:30,172 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.15 vs. limit=15.0 +2024-08-26 18:41:33,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=110208.0, ans=0.0 +2024-08-26 18:41:38,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110261.33333333333, ans=0.1 +2024-08-26 18:41:52,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=110314.66666666667, ans=0.125 +2024-08-26 18:42:04,496 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.37 vs. limit=15.0 +2024-08-26 18:42:22,151 INFO [train.py:1114] (3/4) Epoch 9, batch 800, loss[loss=0.2229, simple_loss=0.2755, pruned_loss=0.06161, ctc_loss=0.1175, over 19402.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.2869, pruned_loss=0.05976, ctc_loss=0.1115, over 3794706.26 frames. ], batch size: 48, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:42:24,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=110474.66666666667, ans=0.2 +2024-08-26 18:42:43,913 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.269e+02 1.427e+02 1.539e+02 1.792e+02 3.382e+02, threshold=3.078e+02, percent-clipped=0.0 +2024-08-26 18:42:50,997 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.38 vs. limit=15.0 +2024-08-26 18:42:55,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=110634.66666666667, ans=0.125 +2024-08-26 18:43:07,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=110688.0, ans=0.025 +2024-08-26 18:43:08,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=110741.33333333333, ans=0.125 +2024-08-26 18:43:09,191 INFO [train.py:1114] (3/4) Epoch 9, batch 850, loss[loss=0.2289, simple_loss=0.2933, pruned_loss=0.06055, ctc_loss=0.1088, over 19644.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2863, pruned_loss=0.05949, ctc_loss=0.111, over 3813664.29 frames. ], batch size: 59, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:43:19,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=110794.66666666667, ans=0.125 +2024-08-26 18:43:28,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.34 vs. limit=15.0 +2024-08-26 18:43:31,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=110848.0, ans=0.125 +2024-08-26 18:43:51,510 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.51 vs. limit=22.5 +2024-08-26 18:43:55,682 INFO [train.py:1114] (3/4) Epoch 9, batch 900, loss[loss=0.2059, simple_loss=0.2667, pruned_loss=0.0527, ctc_loss=0.0991, over 19412.00 frames. ], tot_loss[loss=0.225, simple_loss=0.2864, pruned_loss=0.05954, ctc_loss=0.1112, over 3817494.92 frames. ], batch size: 48, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:45:38,145 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.254e+02 1.519e+02 1.752e+02 2.077e+02 5.433e+02, threshold=3.505e+02, percent-clipped=5.0 +2024-08-26 18:45:57,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=111221.33333333333, ans=0.025 +2024-08-26 18:45:58,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=111221.33333333333, ans=0.0 +2024-08-26 18:46:05,598 INFO [train.py:1114] (3/4) Epoch 9, batch 950, loss[loss=0.2097, simple_loss=0.2712, pruned_loss=0.05449, ctc_loss=0.09826, over 19494.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.2868, pruned_loss=0.05975, ctc_loss=0.1118, over 3818012.97 frames. ], batch size: 49, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:46:12,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=111274.66666666667, ans=0.0 +2024-08-26 18:46:18,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=111328.0, ans=0.05 +2024-08-26 18:46:39,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=111434.66666666667, ans=0.035 +2024-08-26 18:46:51,469 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.57 vs. limit=10.0 +2024-08-26 18:46:54,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=111488.0, ans=0.2 +2024-08-26 18:46:57,433 INFO [train.py:1114] (3/4) Epoch 9, batch 1000, loss[loss=0.2079, simple_loss=0.2709, pruned_loss=0.05257, ctc_loss=0.09937, over 19873.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.2871, pruned_loss=0.05977, ctc_loss=0.1118, over 3813686.65 frames. ], batch size: 52, lr: 1.66e-02, grad_scale: 32.0 +2024-08-26 18:47:19,412 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.63 vs. limit=15.0 +2024-08-26 18:47:19,859 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.174e+02 1.461e+02 1.756e+02 2.077e+02 6.803e+02, threshold=3.513e+02, percent-clipped=1.0 +2024-08-26 18:47:22,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=111648.0, ans=0.0 +2024-08-26 18:47:25,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=111701.33333333333, ans=0.0 +2024-08-26 18:47:30,723 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.70 vs. limit=22.5 +2024-08-26 18:47:34,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.93 vs. limit=6.0 +2024-08-26 18:47:43,917 INFO [train.py:1114] (3/4) Epoch 9, batch 1050, loss[loss=0.2136, simple_loss=0.2845, pruned_loss=0.05234, ctc_loss=0.09511, over 19837.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2867, pruned_loss=0.05963, ctc_loss=0.1114, over 3819984.06 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 16.0 +2024-08-26 18:47:44,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=111808.0, ans=0.2 +2024-08-26 18:47:47,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=111808.0, ans=0.125 +2024-08-26 18:48:03,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=111914.66666666667, ans=0.2 +2024-08-26 18:48:09,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=111914.66666666667, ans=0.0 +2024-08-26 18:48:11,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=111968.0, ans=0.0 +2024-08-26 18:48:32,534 INFO [train.py:1114] (3/4) Epoch 9, batch 1100, loss[loss=0.2096, simple_loss=0.2703, pruned_loss=0.05443, ctc_loss=0.09999, over 19591.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.2861, pruned_loss=0.05925, ctc_loss=0.1108, over 3828029.26 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 16.0 +2024-08-26 18:48:59,882 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.444e+02 1.690e+02 2.009e+02 4.396e+02, threshold=3.380e+02, percent-clipped=1.0 +2024-08-26 18:49:04,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=112181.33333333333, ans=0.0 +2024-08-26 18:49:53,109 INFO [train.py:1114] (3/4) Epoch 9, batch 1150, loss[loss=0.1986, simple_loss=0.2688, pruned_loss=0.04614, ctc_loss=0.09037, over 19586.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2859, pruned_loss=0.059, ctc_loss=0.1102, over 3828256.56 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 16.0 +2024-08-26 18:50:03,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.33 vs. limit=22.5 +2024-08-26 18:50:13,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=112394.66666666667, ans=0.2 +2024-08-26 18:50:20,069 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.17 vs. limit=15.0 +2024-08-26 18:50:39,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=112501.33333333333, ans=0.2 +2024-08-26 18:50:46,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112554.66666666667, ans=0.1 +2024-08-26 18:50:54,140 INFO [train.py:1114] (3/4) Epoch 9, batch 1200, loss[loss=0.2132, simple_loss=0.2904, pruned_loss=0.04896, ctc_loss=0.09488, over 19818.00 frames. ], tot_loss[loss=0.225, simple_loss=0.287, pruned_loss=0.05935, ctc_loss=0.1109, over 3824123.47 frames. ], batch size: 57, lr: 1.65e-02, grad_scale: 32.0 +2024-08-26 18:50:56,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=112608.0, ans=6.0 +2024-08-26 18:50:56,723 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.70 vs. limit=6.0 +2024-08-26 18:51:09,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=112661.33333333333, ans=0.125 +2024-08-26 18:51:11,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=112661.33333333333, ans=0.125 +2024-08-26 18:51:13,643 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.55 vs. limit=15.0 +2024-08-26 18:51:16,821 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.431e+02 1.600e+02 1.807e+02 3.201e+02, threshold=3.201e+02, percent-clipped=0.0 +2024-08-26 18:51:26,662 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.84 vs. limit=15.0 +2024-08-26 18:51:42,799 INFO [train.py:1114] (3/4) Epoch 9, batch 1250, loss[loss=0.2412, simple_loss=0.2992, pruned_loss=0.067, ctc_loss=0.123, over 19537.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.2873, pruned_loss=0.05942, ctc_loss=0.111, over 3842786.22 frames. ], batch size: 61, lr: 1.65e-02, grad_scale: 32.0 +2024-08-26 18:52:26,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=113088.0, ans=0.025 +2024-08-26 18:52:36,314 INFO [train.py:1114] (3/4) Epoch 9, batch 1300, loss[loss=0.223, simple_loss=0.288, pruned_loss=0.05818, ctc_loss=0.1041, over 18758.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2863, pruned_loss=0.05893, ctc_loss=0.11, over 3846324.30 frames. ], batch size: 76, lr: 1.64e-02, grad_scale: 32.0 +2024-08-26 18:52:39,569 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.78 vs. limit=15.0 +2024-08-26 18:52:51,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=113194.66666666667, ans=0.125 +2024-08-26 18:52:54,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=113248.0, ans=0.125 +2024-08-26 18:52:57,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=113248.0, ans=0.025 +2024-08-26 18:52:58,752 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.498e+02 1.743e+02 2.034e+02 3.430e+02, threshold=3.487e+02, percent-clipped=2.0 +2024-08-26 18:53:07,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=113301.33333333333, ans=0.025 +2024-08-26 18:53:19,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=113354.66666666667, ans=0.04949747468305833 +2024-08-26 18:53:23,267 INFO [train.py:1114] (3/4) Epoch 9, batch 1350, loss[loss=0.2446, simple_loss=0.3012, pruned_loss=0.06809, ctc_loss=0.1295, over 19765.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.2859, pruned_loss=0.05857, ctc_loss=0.1094, over 3857696.96 frames. ], batch size: 54, lr: 1.64e-02, grad_scale: 32.0 +2024-08-26 18:53:30,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=113408.0, ans=0.125 +2024-08-26 18:53:38,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=113461.33333333333, ans=0.125 +2024-08-26 18:53:38,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=113461.33333333333, ans=0.125 +2024-08-26 18:53:41,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=113514.66666666667, ans=0.125 +2024-08-26 18:53:52,091 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.23 vs. limit=15.0 +2024-08-26 18:53:54,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=113568.0, ans=0.0 +2024-08-26 18:53:56,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=113568.0, ans=0.0 +2024-08-26 18:54:03,863 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.11 vs. limit=15.0 +2024-08-26 18:54:08,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=113621.33333333333, ans=0.2 +2024-08-26 18:54:09,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=113674.66666666667, ans=0.125 +2024-08-26 18:54:09,875 INFO [train.py:1114] (3/4) Epoch 9, batch 1400, loss[loss=0.2006, simple_loss=0.26, pruned_loss=0.05181, ctc_loss=0.09415, over 19689.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2856, pruned_loss=0.05859, ctc_loss=0.1093, over 3864354.05 frames. ], batch size: 46, lr: 1.64e-02, grad_scale: 32.0 +2024-08-26 18:54:10,966 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 18:54:25,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113728.0, ans=0.1 +2024-08-26 18:54:33,080 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.492e+02 1.644e+02 1.948e+02 2.802e+02, threshold=3.287e+02, percent-clipped=0.0 +2024-08-26 18:54:44,044 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.33 vs. limit=10.0 +2024-08-26 18:54:48,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=113834.66666666667, ans=0.0 +2024-08-26 18:54:48,423 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.00 vs. limit=12.0 +2024-08-26 18:54:54,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=113888.0, ans=0.0 +2024-08-26 18:54:59,255 INFO [train.py:1114] (3/4) Epoch 9, batch 1450, loss[loss=0.2311, simple_loss=0.2951, pruned_loss=0.06116, ctc_loss=0.1119, over 19658.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2862, pruned_loss=0.05887, ctc_loss=0.11, over 3861727.81 frames. ], batch size: 63, lr: 1.64e-02, grad_scale: 16.0 +2024-08-26 18:55:10,683 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.57 vs. limit=15.0 +2024-08-26 18:55:28,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=114048.0, ans=0.95 +2024-08-26 18:55:29,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=114048.0, ans=0.0 +2024-08-26 18:55:40,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=114101.33333333333, ans=0.125 +2024-08-26 18:55:40,702 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.20 vs. limit=15.0 +2024-08-26 18:55:43,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=114101.33333333333, ans=0.125 +2024-08-26 18:55:47,446 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.12 vs. limit=15.0 +2024-08-26 18:55:54,541 INFO [train.py:1114] (3/4) Epoch 9, batch 1500, loss[loss=0.2229, simple_loss=0.2872, pruned_loss=0.0574, ctc_loss=0.1094, over 19577.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2865, pruned_loss=0.05904, ctc_loss=0.1101, over 3861680.15 frames. ], batch size: 57, lr: 1.64e-02, grad_scale: 16.0 +2024-08-26 18:56:03,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=114261.33333333333, ans=0.025 +2024-08-26 18:56:07,430 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.16 vs. limit=22.5 +2024-08-26 18:56:17,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=114314.66666666667, ans=0.125 +2024-08-26 18:56:18,317 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.542e+02 1.688e+02 1.884e+02 2.711e+02, threshold=3.377e+02, percent-clipped=0.0 +2024-08-26 18:56:18,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=114314.66666666667, ans=0.1 +2024-08-26 18:56:25,635 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.54 vs. limit=6.0 +2024-08-26 18:56:29,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=114368.0, ans=0.2 +2024-08-26 18:56:41,344 INFO [train.py:1114] (3/4) Epoch 9, batch 1550, loss[loss=0.2503, simple_loss=0.3083, pruned_loss=0.06958, ctc_loss=0.1329, over 19610.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.2869, pruned_loss=0.05948, ctc_loss=0.1111, over 3846864.95 frames. ], batch size: 60, lr: 1.64e-02, grad_scale: 16.0 +2024-08-26 18:56:45,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=114474.66666666667, ans=0.125 +2024-08-26 18:56:50,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=114528.0, ans=0.0 +2024-08-26 18:57:05,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=114581.33333333333, ans=0.125 +2024-08-26 18:57:19,237 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.29 vs. limit=6.0 +2024-08-26 18:57:29,235 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.72 vs. limit=15.0 +2024-08-26 18:57:29,647 INFO [train.py:1114] (3/4) Epoch 9, batch 1600, loss[loss=0.2222, simple_loss=0.2897, pruned_loss=0.05565, ctc_loss=0.1085, over 19832.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.287, pruned_loss=0.05971, ctc_loss=0.1115, over 3836243.84 frames. ], batch size: 57, lr: 1.63e-02, grad_scale: 32.0 +2024-08-26 18:57:57,613 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.291e+02 1.549e+02 1.720e+02 1.979e+02 3.573e+02, threshold=3.441e+02, percent-clipped=1.0 +2024-08-26 18:58:19,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=114954.66666666667, ans=0.0 +2024-08-26 18:58:36,372 INFO [train.py:1114] (3/4) Epoch 9, batch 1650, loss[loss=0.2399, simple_loss=0.3081, pruned_loss=0.06304, ctc_loss=0.1141, over 19664.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.2874, pruned_loss=0.0596, ctc_loss=0.1112, over 3833329.32 frames. ], batch size: 59, lr: 1.63e-02, grad_scale: 32.0 +2024-08-26 18:59:32,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=115008.0, ans=0.125 +2024-08-26 18:59:37,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=115061.33333333333, ans=0.025 +2024-08-26 18:59:51,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=115114.66666666667, ans=0.1 +2024-08-26 18:59:55,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=115168.0, ans=0.025 +2024-08-26 19:00:03,358 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:00:09,096 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-08-26 19:00:10,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=115274.66666666667, ans=0.125 +2024-08-26 19:00:10,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=115274.66666666667, ans=0.125 +2024-08-26 19:00:11,433 INFO [train.py:1114] (3/4) Epoch 9, batch 1700, loss[loss=0.193, simple_loss=0.2515, pruned_loss=0.04924, ctc_loss=0.08995, over 19672.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.287, pruned_loss=0.0593, ctc_loss=0.1107, over 3847405.19 frames. ], batch size: 46, lr: 1.63e-02, grad_scale: 16.0 +2024-08-26 19:00:16,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=115274.66666666667, ans=0.125 +2024-08-26 19:00:17,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=115274.66666666667, ans=0.0 +2024-08-26 19:00:34,661 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.254e+02 1.433e+02 1.619e+02 1.844e+02 2.581e+02, threshold=3.239e+02, percent-clipped=0.0 +2024-08-26 19:00:36,032 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.50 vs. limit=15.0 +2024-08-26 19:00:41,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=115434.66666666667, ans=0.2 +2024-08-26 19:00:47,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=115488.0, ans=0.125 +2024-08-26 19:00:56,895 INFO [train.py:1114] (3/4) Epoch 9, batch 1750, loss[loss=0.186, simple_loss=0.2506, pruned_loss=0.0437, ctc_loss=0.08488, over 19620.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2868, pruned_loss=0.05889, ctc_loss=0.11, over 3850976.40 frames. ], batch size: 45, lr: 1.63e-02, grad_scale: 16.0 +2024-08-26 19:01:02,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=115541.33333333333, ans=0.125 +2024-08-26 19:01:09,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=115594.66666666667, ans=0.2 +2024-08-26 19:01:20,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=115648.0, ans=0.015 +2024-08-26 19:01:20,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=115648.0, ans=0.025 +2024-08-26 19:01:43,101 INFO [train.py:1114] (3/4) Epoch 9, batch 1800, loss[loss=0.2248, simple_loss=0.2895, pruned_loss=0.05798, ctc_loss=0.1107, over 19613.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2863, pruned_loss=0.05854, ctc_loss=0.1096, over 3851723.27 frames. ], batch size: 55, lr: 1.63e-02, grad_scale: 16.0 +2024-08-26 19:01:51,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=115861.33333333333, ans=0.0 +2024-08-26 19:01:54,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=115861.33333333333, ans=0.125 +2024-08-26 19:02:04,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=115914.66666666667, ans=0.025 +2024-08-26 19:02:04,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=115914.66666666667, ans=0.125 +2024-08-26 19:02:04,678 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.33 vs. limit=12.0 +2024-08-26 19:02:06,019 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.500e+02 1.645e+02 1.953e+02 3.789e+02, threshold=3.290e+02, percent-clipped=1.0 +2024-08-26 19:02:09,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=115968.0, ans=0.125 +2024-08-26 19:02:25,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=116021.33333333333, ans=0.1 +2024-08-26 19:02:27,287 INFO [train.py:1114] (3/4) Epoch 9, batch 1850, loss[loss=0.244, simple_loss=0.3106, pruned_loss=0.06467, ctc_loss=0.1203, over 19589.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.286, pruned_loss=0.05853, ctc_loss=0.1093, over 3856604.38 frames. ], batch size: 57, lr: 1.63e-02, grad_scale: 16.0 +2024-08-26 19:02:30,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.74 vs. limit=22.5 +2024-08-26 19:02:31,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=116074.66666666667, ans=0.025 +2024-08-26 19:02:41,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=116128.0, ans=0.125 +2024-08-26 19:03:13,221 INFO [train.py:1114] (3/4) Epoch 9, batch 1900, loss[loss=0.2329, simple_loss=0.2974, pruned_loss=0.06145, ctc_loss=0.1137, over 19651.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2867, pruned_loss=0.0588, ctc_loss=0.1098, over 3860965.01 frames. ], batch size: 59, lr: 1.62e-02, grad_scale: 16.0 +2024-08-26 19:03:16,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=116341.33333333333, ans=0.2 +2024-08-26 19:03:25,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=116394.66666666667, ans=0.0 +2024-08-26 19:03:26,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=116394.66666666667, ans=0.2 +2024-08-26 19:03:35,871 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.270e+02 1.509e+02 1.695e+02 1.935e+02 3.320e+02, threshold=3.390e+02, percent-clipped=1.0 +2024-08-26 19:03:47,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=116554.66666666667, ans=0.125 +2024-08-26 19:03:53,436 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.89 vs. limit=15.0 +2024-08-26 19:03:56,686 INFO [train.py:1114] (3/4) Epoch 9, batch 1950, loss[loss=0.1893, simple_loss=0.2618, pruned_loss=0.04265, ctc_loss=0.07864, over 19588.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.2876, pruned_loss=0.05885, ctc_loss=0.1099, over 3870204.12 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 16.0 +2024-08-26 19:04:03,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=116608.0, ans=0.025 +2024-08-26 19:04:05,835 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.61 vs. limit=22.5 +2024-08-26 19:04:13,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=116714.66666666667, ans=0.0 +2024-08-26 19:04:22,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=116714.66666666667, ans=0.0 +2024-08-26 19:04:24,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=116714.66666666667, ans=0.04949747468305833 +2024-08-26 19:04:39,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=116821.33333333333, ans=0.0 +2024-08-26 19:04:45,343 INFO [train.py:1114] (3/4) Epoch 9, batch 2000, loss[loss=0.1994, simple_loss=0.2631, pruned_loss=0.0493, ctc_loss=0.09273, over 19670.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.2879, pruned_loss=0.05907, ctc_loss=0.1104, over 3855943.67 frames. ], batch size: 45, lr: 1.62e-02, grad_scale: 32.0 +2024-08-26 19:04:46,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=116874.66666666667, ans=0.1 +2024-08-26 19:05:09,044 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.202e+02 1.518e+02 1.711e+02 1.998e+02 4.316e+02, threshold=3.422e+02, percent-clipped=2.0 +2024-08-26 19:05:16,624 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.90 vs. limit=12.0 +2024-08-26 19:05:24,650 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.67 vs. limit=15.0 +2024-08-26 19:05:26,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=117088.0, ans=0.1 +2024-08-26 19:05:26,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=117088.0, ans=0.0 +2024-08-26 19:05:28,746 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.60 vs. limit=15.0 +2024-08-26 19:05:29,285 INFO [train.py:1114] (3/4) Epoch 9, batch 2050, loss[loss=0.1995, simple_loss=0.2572, pruned_loss=0.05152, ctc_loss=0.09685, over 19728.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2865, pruned_loss=0.05889, ctc_loss=0.1099, over 3851663.93 frames. ], batch size: 47, lr: 1.62e-02, grad_scale: 16.0 +2024-08-26 19:05:29,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=117141.33333333333, ans=6.0 +2024-08-26 19:05:51,496 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.36 vs. limit=15.0 +2024-08-26 19:05:53,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=117248.0, ans=0.2 +2024-08-26 19:06:12,998 INFO [train.py:1114] (3/4) Epoch 9, batch 2100, loss[loss=0.2329, simple_loss=0.2972, pruned_loss=0.06196, ctc_loss=0.1115, over 19760.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2857, pruned_loss=0.05824, ctc_loss=0.1088, over 3857941.15 frames. ], batch size: 54, lr: 1.62e-02, grad_scale: 16.0 +2024-08-26 19:06:14,195 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.78 vs. limit=15.0 +2024-08-26 19:06:16,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=117408.0, ans=0.2 +2024-08-26 19:06:22,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=117461.33333333333, ans=0.125 +2024-08-26 19:06:36,527 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.10 vs. limit=15.0 +2024-08-26 19:06:36,670 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.287e+02 1.488e+02 1.695e+02 1.945e+02 3.088e+02, threshold=3.391e+02, percent-clipped=0.0 +2024-08-26 19:06:39,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=117568.0, ans=0.125 +2024-08-26 19:06:40,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=117568.0, ans=0.0 +2024-08-26 19:06:41,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=117568.0, ans=0.0 +2024-08-26 19:06:43,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117568.0, ans=0.1 +2024-08-26 19:06:44,028 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.60 vs. limit=15.0 +2024-08-26 19:06:49,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=117621.33333333333, ans=0.0 +2024-08-26 19:06:49,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=117621.33333333333, ans=0.125 +2024-08-26 19:06:50,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=117621.33333333333, ans=0.0 +2024-08-26 19:06:51,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117621.33333333333, ans=0.1 +2024-08-26 19:06:51,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117621.33333333333, ans=0.1 +2024-08-26 19:06:55,559 INFO [train.py:1114] (3/4) Epoch 9, batch 2150, loss[loss=0.2069, simple_loss=0.273, pruned_loss=0.05096, ctc_loss=0.09732, over 19849.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2852, pruned_loss=0.0579, ctc_loss=0.1081, over 3869521.36 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 8.0 +2024-08-26 19:06:59,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117674.66666666667, ans=0.1 +2024-08-26 19:07:05,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=117728.0, ans=0.125 +2024-08-26 19:07:19,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=117781.33333333333, ans=0.125 +2024-08-26 19:07:20,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=117781.33333333333, ans=0.125 +2024-08-26 19:07:38,965 INFO [train.py:1114] (3/4) Epoch 9, batch 2200, loss[loss=0.232, simple_loss=0.2993, pruned_loss=0.06007, ctc_loss=0.1116, over 19598.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2852, pruned_loss=0.05796, ctc_loss=0.1081, over 3867350.51 frames. ], batch size: 57, lr: 1.61e-02, grad_scale: 8.0 +2024-08-26 19:07:40,275 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.45 vs. limit=6.0 +2024-08-26 19:07:55,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=118048.0, ans=0.0 +2024-08-26 19:08:03,136 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.284e+02 1.528e+02 1.792e+02 2.132e+02 3.306e+02, threshold=3.583e+02, percent-clipped=0.0 +2024-08-26 19:08:13,281 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.75 vs. limit=15.0 +2024-08-26 19:08:34,329 INFO [train.py:1114] (3/4) Epoch 9, batch 2250, loss[loss=0.2279, simple_loss=0.2988, pruned_loss=0.05614, ctc_loss=0.1116, over 19612.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2858, pruned_loss=0.05811, ctc_loss=0.1083, over 3867293.27 frames. ], batch size: 55, lr: 1.61e-02, grad_scale: 8.0 +2024-08-26 19:08:37,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=118208.0, ans=0.125 +2024-08-26 19:09:03,437 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.61 vs. limit=15.0 +2024-08-26 19:09:17,841 INFO [train.py:1114] (3/4) Epoch 9, batch 2300, loss[loss=0.2254, simple_loss=0.2806, pruned_loss=0.0622, ctc_loss=0.1143, over 19512.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2849, pruned_loss=0.05814, ctc_loss=0.1082, over 3860876.33 frames. ], batch size: 49, lr: 1.61e-02, grad_scale: 8.0 +2024-08-26 19:09:27,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=118528.0, ans=0.125 +2024-08-26 19:09:29,614 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.57 vs. limit=15.0 +2024-08-26 19:09:31,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=118528.0, ans=0.125 +2024-08-26 19:09:42,038 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.274e+02 1.479e+02 1.669e+02 2.317e+02 3.988e+02, threshold=3.338e+02, percent-clipped=3.0 +2024-08-26 19:09:43,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=118634.66666666667, ans=0.0 +2024-08-26 19:09:54,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=118688.0, ans=0.125 +2024-08-26 19:10:01,366 INFO [train.py:1114] (3/4) Epoch 9, batch 2350, loss[loss=0.23, simple_loss=0.2947, pruned_loss=0.06062, ctc_loss=0.11, over 19655.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.285, pruned_loss=0.05842, ctc_loss=0.1088, over 3863477.13 frames. ], batch size: 63, lr: 1.61e-02, grad_scale: 8.0 +2024-08-26 19:10:04,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=118741.33333333333, ans=0.0 +2024-08-26 19:10:11,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=118794.66666666667, ans=0.0 +2024-08-26 19:10:14,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=118794.66666666667, ans=0.2 +2024-08-26 19:11:00,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=118794.66666666667, ans=0.125 +2024-08-26 19:11:03,924 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.31 vs. limit=15.0 +2024-08-26 19:11:08,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=118848.0, ans=0.0 +2024-08-26 19:11:15,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=118901.33333333333, ans=0.2 +2024-08-26 19:11:32,740 INFO [train.py:1114] (3/4) Epoch 9, batch 2400, loss[loss=0.2472, simple_loss=0.315, pruned_loss=0.06458, ctc_loss=0.1254, over 19275.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2875, pruned_loss=0.05935, ctc_loss=0.1104, over 3857594.78 frames. ], batch size: 71, lr: 1.61e-02, grad_scale: 16.0 +2024-08-26 19:11:50,624 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.48 vs. limit=10.0 +2024-08-26 19:12:04,707 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.526e+02 1.714e+02 1.892e+02 3.175e+02, threshold=3.427e+02, percent-clipped=0.0 +2024-08-26 19:12:08,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=119168.0, ans=0.125 +2024-08-26 19:12:12,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=119168.0, ans=0.1 +2024-08-26 19:12:22,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=119221.33333333333, ans=0.125 +2024-08-26 19:12:24,778 INFO [train.py:1114] (3/4) Epoch 9, batch 2450, loss[loss=0.3104, simple_loss=0.3291, pruned_loss=0.1072, ctc_loss=0.1935, over 13405.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.2919, pruned_loss=0.06298, ctc_loss=0.1172, over 3729870.34 frames. ], batch size: 141, lr: 1.61e-02, grad_scale: 16.0 +2024-08-26 19:12:28,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=119274.66666666667, ans=0.125 +2024-08-26 19:12:33,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=119328.0, ans=0.125 +2024-08-26 19:12:33,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=119328.0, ans=0.125 +2024-08-26 19:12:37,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=119328.0, ans=0.025 +2024-08-26 19:13:12,011 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.46 vs. limit=15.0 +2024-08-26 19:14:15,883 INFO [train.py:1114] (3/4) Epoch 10, batch 0, loss[loss=0.1992, simple_loss=0.2622, pruned_loss=0.04956, ctc_loss=0.09294, over 19409.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2622, pruned_loss=0.04956, ctc_loss=0.09294, over 19409.00 frames. ], batch size: 48, lr: 1.53e-02, grad_scale: 16.0 +2024-08-26 19:14:15,883 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-26 19:14:48,057 INFO [train.py:1146] (3/4) Epoch 10, validation: loss=0.1896, simple_loss=0.2813, pruned_loss=0.03622, ctc_loss=0.0637, over 944034.00 frames. +2024-08-26 19:14:48,058 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12874MB +2024-08-26 19:14:58,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=119536.0, ans=0.0 +2024-08-26 19:15:03,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=119536.0, ans=0.1 +2024-08-26 19:15:10,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=119589.33333333333, ans=0.2 +2024-08-26 19:15:25,087 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.696e+02 1.867e+02 2.057e+02 3.331e+02, threshold=3.733e+02, percent-clipped=0.0 +2024-08-26 19:15:26,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=119696.0, ans=0.125 +2024-08-26 19:15:29,384 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.88 vs. limit=15.0 +2024-08-26 19:15:34,255 INFO [train.py:1114] (3/4) Epoch 10, batch 50, loss[loss=0.1959, simple_loss=0.2588, pruned_loss=0.04826, ctc_loss=0.09129, over 19730.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.2871, pruned_loss=0.05842, ctc_loss=0.111, over 846230.90 frames. ], batch size: 47, lr: 1.52e-02, grad_scale: 16.0 +2024-08-26 19:15:48,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=119802.66666666667, ans=0.125 +2024-08-26 19:15:48,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=119802.66666666667, ans=0.2 +2024-08-26 19:16:05,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=119909.33333333333, ans=10.0 +2024-08-26 19:16:20,467 INFO [train.py:1114] (3/4) Epoch 10, batch 100, loss[loss=0.2145, simple_loss=0.2807, pruned_loss=0.05308, ctc_loss=0.1053, over 19727.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2878, pruned_loss=0.05826, ctc_loss=0.1101, over 1499330.95 frames. ], batch size: 51, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:16:29,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.73 vs. limit=15.0 +2024-08-26 19:16:40,283 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.66 vs. limit=15.0 +2024-08-26 19:17:03,447 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.471e+02 1.633e+02 1.792e+02 2.780e+02, threshold=3.265e+02, percent-clipped=0.0 +2024-08-26 19:17:07,736 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.89 vs. limit=22.5 +2024-08-26 19:17:09,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=120229.33333333333, ans=0.05 +2024-08-26 19:17:11,611 INFO [train.py:1114] (3/4) Epoch 10, batch 150, loss[loss=0.2125, simple_loss=0.2671, pruned_loss=0.05728, ctc_loss=0.1082, over 19722.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2849, pruned_loss=0.05681, ctc_loss=0.1071, over 2027878.69 frames. ], batch size: 47, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:17:16,876 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.96 vs. limit=15.0 +2024-08-26 19:17:48,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=120442.66666666667, ans=0.07 +2024-08-26 19:18:04,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=120496.0, ans=0.125 +2024-08-26 19:18:07,138 INFO [train.py:1114] (3/4) Epoch 10, batch 200, loss[loss=0.2373, simple_loss=0.2973, pruned_loss=0.06491, ctc_loss=0.1188, over 18295.00 frames. ], tot_loss[loss=0.219, simple_loss=0.283, pruned_loss=0.05633, ctc_loss=0.1059, over 2435690.75 frames. ], batch size: 85, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:18:31,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=120549.33333333333, ans=0.125 +2024-08-26 19:18:32,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=120549.33333333333, ans=0.0 +2024-08-26 19:18:36,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=120602.66666666667, ans=0.025 +2024-08-26 19:18:41,814 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.05 vs. limit=15.0 +2024-08-26 19:18:43,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=120656.0, ans=0.0 +2024-08-26 19:18:47,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=120656.0, ans=0.0 +2024-08-26 19:18:52,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=120709.33333333333, ans=0.1 +2024-08-26 19:19:02,525 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.07 vs. limit=15.0 +2024-08-26 19:19:12,222 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.205e+02 1.459e+02 1.596e+02 1.815e+02 3.041e+02, threshold=3.193e+02, percent-clipped=0.0 +2024-08-26 19:19:48,320 INFO [train.py:1114] (3/4) Epoch 10, batch 250, loss[loss=0.2407, simple_loss=0.3009, pruned_loss=0.06649, ctc_loss=0.1189, over 19432.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2832, pruned_loss=0.0561, ctc_loss=0.1055, over 2755567.99 frames. ], batch size: 67, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:19:48,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=120816.0, ans=0.125 +2024-08-26 19:19:51,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=120816.0, ans=0.125 +2024-08-26 19:20:04,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=120869.33333333333, ans=0.0 +2024-08-26 19:20:17,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=120922.66666666667, ans=0.015 +2024-08-26 19:20:21,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=120976.0, ans=0.025 +2024-08-26 19:20:31,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=120976.0, ans=0.0 +2024-08-26 19:20:45,509 INFO [train.py:1114] (3/4) Epoch 10, batch 300, loss[loss=0.2176, simple_loss=0.2871, pruned_loss=0.05369, ctc_loss=0.1016, over 19521.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2827, pruned_loss=0.05602, ctc_loss=0.1051, over 3001630.94 frames. ], batch size: 61, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:20:51,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=121082.66666666667, ans=0.0 +2024-08-26 19:20:59,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121136.0, ans=0.1 +2024-08-26 19:21:00,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=121136.0, ans=0.125 +2024-08-26 19:21:05,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=121136.0, ans=0.125 +2024-08-26 19:21:15,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=121189.33333333333, ans=0.125 +2024-08-26 19:21:15,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=121189.33333333333, ans=0.125 +2024-08-26 19:21:23,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=121242.66666666667, ans=0.125 +2024-08-26 19:21:29,983 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.480e+02 1.641e+02 1.981e+02 3.456e+02, threshold=3.281e+02, percent-clipped=2.0 +2024-08-26 19:21:33,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=121296.0, ans=0.125 +2024-08-26 19:21:35,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=121296.0, ans=0.125 +2024-08-26 19:21:38,274 INFO [train.py:1114] (3/4) Epoch 10, batch 350, loss[loss=0.2116, simple_loss=0.2691, pruned_loss=0.05593, ctc_loss=0.1055, over 19727.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2832, pruned_loss=0.05627, ctc_loss=0.1053, over 3191366.70 frames. ], batch size: 48, lr: 1.52e-02, grad_scale: 8.0 +2024-08-26 19:21:44,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=121349.33333333333, ans=15.0 +2024-08-26 19:21:47,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=121402.66666666667, ans=0.125 +2024-08-26 19:22:01,135 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.46 vs. limit=22.5 +2024-08-26 19:22:13,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=121509.33333333333, ans=0.125 +2024-08-26 19:22:24,851 INFO [train.py:1114] (3/4) Epoch 10, batch 400, loss[loss=0.2256, simple_loss=0.2855, pruned_loss=0.06061, ctc_loss=0.1109, over 19501.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2826, pruned_loss=0.05621, ctc_loss=0.1049, over 3343145.60 frames. ], batch size: 54, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:22:26,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=121616.0, ans=0.07 +2024-08-26 19:22:32,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121616.0, ans=0.1 +2024-08-26 19:22:32,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=121616.0, ans=0.0 +2024-08-26 19:22:36,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=121669.33333333333, ans=0.125 +2024-08-26 19:22:37,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=121669.33333333333, ans=0.2 +2024-08-26 19:22:37,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=121669.33333333333, ans=0.125 +2024-08-26 19:22:55,116 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:22:55,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=121669.33333333333, ans=0.025 +2024-08-26 19:23:11,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=121776.0, ans=0.125 +2024-08-26 19:23:13,629 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:23:18,033 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.268e+02 1.471e+02 1.735e+02 2.020e+02 3.245e+02, threshold=3.470e+02, percent-clipped=0.0 +2024-08-26 19:23:22,460 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.62 vs. limit=8.0 +2024-08-26 19:23:26,369 INFO [train.py:1114] (3/4) Epoch 10, batch 450, loss[loss=0.2305, simple_loss=0.2977, pruned_loss=0.05966, ctc_loss=0.11, over 19624.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2828, pruned_loss=0.05643, ctc_loss=0.1054, over 3451118.13 frames. ], batch size: 55, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:23:35,952 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.95 vs. limit=15.0 +2024-08-26 19:23:44,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=121936.0, ans=0.0 +2024-08-26 19:23:50,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=121989.33333333333, ans=0.1 +2024-08-26 19:23:51,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=121989.33333333333, ans=10.0 +2024-08-26 19:24:05,771 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.66 vs. limit=15.0 +2024-08-26 19:24:09,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=122096.0, ans=0.125 +2024-08-26 19:24:11,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.36 vs. limit=12.0 +2024-08-26 19:24:19,355 INFO [train.py:1114] (3/4) Epoch 10, batch 500, loss[loss=0.2262, simple_loss=0.293, pruned_loss=0.05814, ctc_loss=0.1077, over 19624.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2819, pruned_loss=0.05609, ctc_loss=0.1046, over 3546995.54 frames. ], batch size: 63, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:24:28,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=122149.33333333333, ans=0.0 +2024-08-26 19:24:37,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=122202.66666666667, ans=0.07 +2024-08-26 19:24:37,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=122202.66666666667, ans=0.04949747468305833 +2024-08-26 19:24:45,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=122202.66666666667, ans=0.125 +2024-08-26 19:24:46,627 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.44 vs. limit=15.0 +2024-08-26 19:25:01,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=122309.33333333333, ans=0.125 +2024-08-26 19:25:11,337 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.449e+02 1.637e+02 1.959e+02 3.375e+02, threshold=3.275e+02, percent-clipped=0.0 +2024-08-26 19:25:19,742 INFO [train.py:1114] (3/4) Epoch 10, batch 550, loss[loss=0.2414, simple_loss=0.3047, pruned_loss=0.06486, ctc_loss=0.1206, over 19411.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.282, pruned_loss=0.05618, ctc_loss=0.1049, over 3607964.60 frames. ], batch size: 71, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:25:21,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=122416.0, ans=0.025 +2024-08-26 19:25:25,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=122416.0, ans=0.0 +2024-08-26 19:25:29,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=122469.33333333333, ans=0.125 +2024-08-26 19:25:35,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=122469.33333333333, ans=0.125 +2024-08-26 19:25:52,226 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.58 vs. limit=15.0 +2024-08-26 19:25:55,786 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.13 vs. limit=15.0 +2024-08-26 19:26:02,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=122629.33333333333, ans=0.2 +2024-08-26 19:26:10,266 INFO [train.py:1114] (3/4) Epoch 10, batch 600, loss[loss=0.2259, simple_loss=0.2933, pruned_loss=0.05802, ctc_loss=0.1061, over 19394.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.282, pruned_loss=0.05599, ctc_loss=0.1046, over 3665406.62 frames. ], batch size: 67, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:26:50,242 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.480e+02 1.661e+02 1.846e+02 3.271e+02, threshold=3.322e+02, percent-clipped=0.0 +2024-08-26 19:26:50,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122896.0, ans=0.1 +2024-08-26 19:26:51,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=122896.0, ans=0.125 +2024-08-26 19:26:58,394 INFO [train.py:1114] (3/4) Epoch 10, batch 650, loss[loss=0.2156, simple_loss=0.2869, pruned_loss=0.05216, ctc_loss=0.1001, over 19772.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2811, pruned_loss=0.0555, ctc_loss=0.1037, over 3716276.14 frames. ], batch size: 54, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:27:04,231 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.66 vs. limit=15.0 +2024-08-26 19:27:44,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=123162.66666666667, ans=0.0 +2024-08-26 19:27:48,179 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.48 vs. limit=15.0 +2024-08-26 19:27:51,539 INFO [train.py:1114] (3/4) Epoch 10, batch 700, loss[loss=0.2143, simple_loss=0.2801, pruned_loss=0.05476, ctc_loss=0.09753, over 19732.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2819, pruned_loss=0.05578, ctc_loss=0.1041, over 3748551.67 frames. ], batch size: 51, lr: 1.51e-02, grad_scale: 16.0 +2024-08-26 19:27:52,931 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.64 vs. limit=15.0 +2024-08-26 19:27:55,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=123216.0, ans=0.0 +2024-08-26 19:28:07,648 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.74 vs. limit=10.0 +2024-08-26 19:28:08,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=123269.33333333333, ans=0.125 +2024-08-26 19:28:15,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=123322.66666666667, ans=0.2 +2024-08-26 19:28:17,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=123322.66666666667, ans=0.0 +2024-08-26 19:28:24,094 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.85 vs. limit=15.0 +2024-08-26 19:28:24,990 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.74 vs. limit=22.5 +2024-08-26 19:28:27,986 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.11 vs. limit=10.0 +2024-08-26 19:28:29,132 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.526e+02 1.912e+02 2.394e+02 4.336e+02, threshold=3.825e+02, percent-clipped=8.0 +2024-08-26 19:28:38,796 INFO [train.py:1114] (3/4) Epoch 10, batch 750, loss[loss=0.2189, simple_loss=0.2873, pruned_loss=0.05491, ctc_loss=0.1018, over 19492.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2816, pruned_loss=0.05583, ctc_loss=0.104, over 3775306.47 frames. ], batch size: 54, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:28:46,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=123482.66666666667, ans=0.0 +2024-08-26 19:28:48,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=123536.0, ans=0.1 +2024-08-26 19:29:27,331 INFO [train.py:1114] (3/4) Epoch 10, batch 800, loss[loss=0.1954, simple_loss=0.2608, pruned_loss=0.04621, ctc_loss=0.09405, over 19415.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2818, pruned_loss=0.05603, ctc_loss=0.1045, over 3796476.02 frames. ], batch size: 48, lr: 1.50e-02, grad_scale: 32.0 +2024-08-26 19:29:32,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=123749.33333333333, ans=0.1 +2024-08-26 19:29:33,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=123749.33333333333, ans=0.2 +2024-08-26 19:29:43,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=123802.66666666667, ans=0.125 +2024-08-26 19:29:44,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=123802.66666666667, ans=0.125 +2024-08-26 19:29:46,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=123856.0, ans=0.1 +2024-08-26 19:29:50,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=123856.0, ans=0.125 +2024-08-26 19:29:51,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=123856.0, ans=0.025 +2024-08-26 19:29:57,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=123909.33333333333, ans=0.2 +2024-08-26 19:30:07,512 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.505e+02 1.745e+02 2.038e+02 4.368e+02, threshold=3.490e+02, percent-clipped=1.0 +2024-08-26 19:30:07,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=123962.66666666667, ans=0.025 +2024-08-26 19:30:12,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=123962.66666666667, ans=0.125 +2024-08-26 19:30:17,648 INFO [train.py:1114] (3/4) Epoch 10, batch 850, loss[loss=0.2335, simple_loss=0.303, pruned_loss=0.0592, ctc_loss=0.1139, over 19669.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2822, pruned_loss=0.05635, ctc_loss=0.1049, over 3814970.14 frames. ], batch size: 59, lr: 1.50e-02, grad_scale: 32.0 +2024-08-26 19:30:31,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.86 vs. limit=15.0 +2024-08-26 19:30:45,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=124122.66666666667, ans=0.1 +2024-08-26 19:30:50,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=124176.0, ans=0.125 +2024-08-26 19:30:52,700 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.46 vs. limit=10.0 +2024-08-26 19:31:08,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=124229.33333333333, ans=0.125 +2024-08-26 19:31:14,630 INFO [train.py:1114] (3/4) Epoch 10, batch 900, loss[loss=0.1876, simple_loss=0.2513, pruned_loss=0.04507, ctc_loss=0.08415, over 19809.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2826, pruned_loss=0.05664, ctc_loss=0.1055, over 3819622.48 frames. ], batch size: 49, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:31:17,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=124282.66666666667, ans=0.0 +2024-08-26 19:31:18,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=124282.66666666667, ans=0.2 +2024-08-26 19:31:23,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=124282.66666666667, ans=0.0 +2024-08-26 19:32:35,082 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.199e+02 1.525e+02 1.733e+02 2.036e+02 4.140e+02, threshold=3.466e+02, percent-clipped=3.0 +2024-08-26 19:32:42,440 INFO [train.py:1114] (3/4) Epoch 10, batch 950, loss[loss=0.1953, simple_loss=0.2661, pruned_loss=0.04556, ctc_loss=0.08352, over 19502.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2829, pruned_loss=0.05682, ctc_loss=0.1061, over 3821051.06 frames. ], batch size: 49, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:32:43,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=124549.33333333333, ans=0.0 +2024-08-26 19:33:19,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=124709.33333333333, ans=0.0 +2024-08-26 19:33:19,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=124709.33333333333, ans=0.025 +2024-08-26 19:33:29,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124762.66666666667, ans=0.1 +2024-08-26 19:33:35,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=124816.0, ans=0.125 +2024-08-26 19:33:35,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=124816.0, ans=0.125 +2024-08-26 19:33:36,587 INFO [train.py:1114] (3/4) Epoch 10, batch 1000, loss[loss=0.1967, simple_loss=0.2666, pruned_loss=0.04645, ctc_loss=0.08473, over 19847.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2835, pruned_loss=0.05695, ctc_loss=0.1066, over 3815840.92 frames. ], batch size: 52, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:33:37,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=124816.0, ans=0.125 +2024-08-26 19:33:42,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=124816.0, ans=0.1 +2024-08-26 19:33:56,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=124869.33333333333, ans=0.125 +2024-08-26 19:33:57,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=124869.33333333333, ans=0.05 +2024-08-26 19:34:07,386 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:34:19,957 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.214e+02 1.433e+02 1.580e+02 1.832e+02 3.141e+02, threshold=3.159e+02, percent-clipped=0.0 +2024-08-26 19:34:25,127 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.07 vs. limit=15.0 +2024-08-26 19:34:27,361 INFO [train.py:1114] (3/4) Epoch 10, batch 1050, loss[loss=0.2131, simple_loss=0.2826, pruned_loss=0.05169, ctc_loss=0.1003, over 19850.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.282, pruned_loss=0.05623, ctc_loss=0.1053, over 3822334.63 frames. ], batch size: 57, lr: 1.50e-02, grad_scale: 16.0 +2024-08-26 19:34:50,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=125082.66666666667, ans=0.125 +2024-08-26 19:34:51,100 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.53 vs. limit=12.0 +2024-08-26 19:34:54,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=125082.66666666667, ans=0.0 +2024-08-26 19:34:58,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=125136.0, ans=0.0 +2024-08-26 19:35:08,848 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.82 vs. limit=22.5 +2024-08-26 19:35:31,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=125296.0, ans=0.125 +2024-08-26 19:35:36,348 INFO [train.py:1114] (3/4) Epoch 10, batch 1100, loss[loss=0.1889, simple_loss=0.2603, pruned_loss=0.04271, ctc_loss=0.08007, over 19583.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2821, pruned_loss=0.05617, ctc_loss=0.1052, over 3829859.54 frames. ], batch size: 52, lr: 1.49e-02, grad_scale: 8.0 +2024-08-26 19:35:39,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=125349.33333333333, ans=0.2 +2024-08-26 19:35:41,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=125349.33333333333, ans=0.125 +2024-08-26 19:35:42,448 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.62 vs. limit=15.0 +2024-08-26 19:35:54,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=125456.0, ans=0.2 +2024-08-26 19:36:18,874 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.239e+02 1.433e+02 1.605e+02 1.841e+02 2.779e+02, threshold=3.211e+02, percent-clipped=0.0 +2024-08-26 19:36:25,423 INFO [train.py:1114] (3/4) Epoch 10, batch 1150, loss[loss=0.2096, simple_loss=0.2808, pruned_loss=0.0501, ctc_loss=0.09545, over 19598.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2822, pruned_loss=0.05617, ctc_loss=0.1051, over 3829334.24 frames. ], batch size: 52, lr: 1.49e-02, grad_scale: 8.0 +2024-08-26 19:36:44,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=125669.33333333333, ans=0.0 +2024-08-26 19:37:15,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=125829.33333333333, ans=0.025 +2024-08-26 19:37:16,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=125829.33333333333, ans=0.0 +2024-08-26 19:37:17,639 INFO [train.py:1114] (3/4) Epoch 10, batch 1200, loss[loss=0.2155, simple_loss=0.2905, pruned_loss=0.05091, ctc_loss=0.09688, over 19833.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2828, pruned_loss=0.05633, ctc_loss=0.1055, over 3823765.45 frames. ], batch size: 57, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:37:44,891 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.23 vs. limit=10.0 +2024-08-26 19:37:57,393 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.466e+02 1.608e+02 1.824e+02 2.979e+02, threshold=3.216e+02, percent-clipped=0.0 +2024-08-26 19:38:04,045 INFO [train.py:1114] (3/4) Epoch 10, batch 1250, loss[loss=0.2364, simple_loss=0.2976, pruned_loss=0.06548, ctc_loss=0.1105, over 19520.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2831, pruned_loss=0.05624, ctc_loss=0.1051, over 3842282.76 frames. ], batch size: 61, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:38:07,374 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.74 vs. limit=15.0 +2024-08-26 19:38:58,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=126309.33333333333, ans=0.125 +2024-08-26 19:39:36,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=126309.33333333333, ans=0.125 +2024-08-26 19:40:04,475 INFO [train.py:1114] (3/4) Epoch 10, batch 1300, loss[loss=0.2319, simple_loss=0.2963, pruned_loss=0.06099, ctc_loss=0.1137, over 18801.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2822, pruned_loss=0.05583, ctc_loss=0.1043, over 3845848.70 frames. ], batch size: 76, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:40:04,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=126416.0, ans=0.125 +2024-08-26 19:40:10,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=126416.0, ans=0.025 +2024-08-26 19:40:29,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=126522.66666666667, ans=0.0 +2024-08-26 19:40:30,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=126522.66666666667, ans=0.125 +2024-08-26 19:40:54,260 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.480e+02 1.716e+02 1.981e+02 3.061e+02, threshold=3.432e+02, percent-clipped=0.0 +2024-08-26 19:40:58,683 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.44 vs. limit=22.5 +2024-08-26 19:41:00,863 INFO [train.py:1114] (3/4) Epoch 10, batch 1350, loss[loss=0.2055, simple_loss=0.2792, pruned_loss=0.04771, ctc_loss=0.09107, over 19779.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2815, pruned_loss=0.0555, ctc_loss=0.1036, over 3856321.71 frames. ], batch size: 54, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:41:08,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126682.66666666667, ans=0.1 +2024-08-26 19:41:19,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=126736.0, ans=0.09899494936611666 +2024-08-26 19:41:30,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=126789.33333333333, ans=0.2 +2024-08-26 19:41:46,177 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.96 vs. limit=15.0 +2024-08-26 19:41:47,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=126896.0, ans=0.0 +2024-08-26 19:41:52,394 INFO [train.py:1114] (3/4) Epoch 10, batch 1400, loss[loss=0.1888, simple_loss=0.249, pruned_loss=0.04695, ctc_loss=0.08665, over 19703.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2813, pruned_loss=0.05555, ctc_loss=0.1037, over 3863368.53 frames. ], batch size: 46, lr: 1.49e-02, grad_scale: 16.0 +2024-08-26 19:42:19,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=127002.66666666667, ans=0.125 +2024-08-26 19:42:37,419 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-08-26 19:42:43,188 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.452e+02 1.585e+02 1.952e+02 4.788e+02, threshold=3.170e+02, percent-clipped=2.0 +2024-08-26 19:42:47,347 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.17 vs. limit=15.0 +2024-08-26 19:42:48,316 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.63 vs. limit=10.0 +2024-08-26 19:42:49,755 INFO [train.py:1114] (3/4) Epoch 10, batch 1450, loss[loss=0.2524, simple_loss=0.3159, pruned_loss=0.06891, ctc_loss=0.1279, over 19678.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2823, pruned_loss=0.05595, ctc_loss=0.1044, over 3862457.50 frames. ], batch size: 63, lr: 1.48e-02, grad_scale: 16.0 +2024-08-26 19:42:49,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=127216.0, ans=0.125 +2024-08-26 19:43:12,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=127322.66666666667, ans=0.2 +2024-08-26 19:43:16,858 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.59 vs. limit=15.0 +2024-08-26 19:43:24,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=127322.66666666667, ans=0.125 +2024-08-26 19:43:48,230 INFO [train.py:1114] (3/4) Epoch 10, batch 1500, loss[loss=0.2192, simple_loss=0.2812, pruned_loss=0.05769, ctc_loss=0.1047, over 19590.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2824, pruned_loss=0.05569, ctc_loss=0.1041, over 3861690.34 frames. ], batch size: 57, lr: 1.48e-02, grad_scale: 16.0 +2024-08-26 19:43:49,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=127482.66666666667, ans=0.125 +2024-08-26 19:43:54,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=127482.66666666667, ans=0.125 +2024-08-26 19:44:07,212 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.85 vs. limit=6.0 +2024-08-26 19:44:28,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=127642.66666666667, ans=0.125 +2024-08-26 19:44:34,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=127696.0, ans=0.125 +2024-08-26 19:44:37,660 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.149e+02 1.427e+02 1.587e+02 1.794e+02 3.285e+02, threshold=3.174e+02, percent-clipped=1.0 +2024-08-26 19:44:52,485 INFO [train.py:1114] (3/4) Epoch 10, batch 1550, loss[loss=0.2133, simple_loss=0.2858, pruned_loss=0.05107, ctc_loss=0.09661, over 19597.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2821, pruned_loss=0.05565, ctc_loss=0.1042, over 3846689.75 frames. ], batch size: 60, lr: 1.48e-02, grad_scale: 16.0 +2024-08-26 19:45:20,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=127909.33333333333, ans=0.0 +2024-08-26 19:45:23,129 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.70 vs. limit=15.0 +2024-08-26 19:45:29,728 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.25 vs. limit=22.5 +2024-08-26 19:45:31,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=127962.66666666667, ans=0.125 +2024-08-26 19:45:41,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127962.66666666667, ans=0.1 +2024-08-26 19:45:43,659 INFO [train.py:1114] (3/4) Epoch 10, batch 1600, loss[loss=0.2402, simple_loss=0.3014, pruned_loss=0.06391, ctc_loss=0.1281, over 19835.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2822, pruned_loss=0.0559, ctc_loss=0.1048, over 3835877.42 frames. ], batch size: 57, lr: 1.48e-02, grad_scale: 32.0 +2024-08-26 19:45:48,099 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.78 vs. limit=15.0 +2024-08-26 19:45:59,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=128069.33333333333, ans=0.125 +2024-08-26 19:46:01,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=128069.33333333333, ans=6.0 +2024-08-26 19:46:15,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=128176.0, ans=0.125 +2024-08-26 19:46:17,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128176.0, ans=0.1 +2024-08-26 19:46:26,525 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.210e+02 1.460e+02 1.671e+02 2.068e+02 2.984e+02, threshold=3.342e+02, percent-clipped=0.0 +2024-08-26 19:46:29,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=128229.33333333333, ans=0.025 +2024-08-26 19:46:31,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=128229.33333333333, ans=0.025 +2024-08-26 19:46:33,074 INFO [train.py:1114] (3/4) Epoch 10, batch 1650, loss[loss=0.2376, simple_loss=0.299, pruned_loss=0.06411, ctc_loss=0.1201, over 19638.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2824, pruned_loss=0.05588, ctc_loss=0.1047, over 3832221.01 frames. ], batch size: 59, lr: 1.48e-02, grad_scale: 32.0 +2024-08-26 19:46:42,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=128282.66666666667, ans=0.125 +2024-08-26 19:46:55,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=128336.0, ans=0.0 +2024-08-26 19:47:04,483 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:47:05,420 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:47:09,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=128442.66666666667, ans=0.1 +2024-08-26 19:47:10,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=128442.66666666667, ans=10.0 +2024-08-26 19:47:14,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=128496.0, ans=0.0 +2024-08-26 19:47:28,677 INFO [train.py:1114] (3/4) Epoch 10, batch 1700, loss[loss=0.1783, simple_loss=0.247, pruned_loss=0.03914, ctc_loss=0.07838, over 19679.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2822, pruned_loss=0.05569, ctc_loss=0.1044, over 3846404.40 frames. ], batch size: 46, lr: 1.48e-02, grad_scale: 32.0 +2024-08-26 19:47:33,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=128549.33333333333, ans=0.125 +2024-08-26 19:47:40,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=128602.66666666667, ans=0.125 +2024-08-26 19:47:45,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=128656.0, ans=0.0 +2024-08-26 19:48:18,887 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.236e+02 1.440e+02 1.568e+02 1.897e+02 2.765e+02, threshold=3.136e+02, percent-clipped=0.0 +2024-08-26 19:48:19,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=128762.66666666667, ans=0.04949747468305833 +2024-08-26 19:48:25,118 INFO [train.py:1114] (3/4) Epoch 10, batch 1750, loss[loss=0.189, simple_loss=0.2543, pruned_loss=0.04466, ctc_loss=0.08616, over 19678.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2818, pruned_loss=0.05531, ctc_loss=0.1037, over 3851655.37 frames. ], batch size: 45, lr: 1.48e-02, grad_scale: 32.0 +2024-08-26 19:48:35,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=128869.33333333333, ans=0.125 +2024-08-26 19:48:38,657 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.74 vs. limit=15.0 +2024-08-26 19:48:43,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=128922.66666666667, ans=0.125 +2024-08-26 19:48:47,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=128922.66666666667, ans=0.125 +2024-08-26 19:49:08,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129082.66666666667, ans=0.1 +2024-08-26 19:49:08,999 INFO [train.py:1114] (3/4) Epoch 10, batch 1800, loss[loss=0.2096, simple_loss=0.2866, pruned_loss=0.04809, ctc_loss=0.09116, over 19609.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2819, pruned_loss=0.05548, ctc_loss=0.1037, over 3853702.55 frames. ], batch size: 55, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:49:33,258 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.01 vs. limit=15.0 +2024-08-26 19:49:36,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=129189.33333333333, ans=0.07 +2024-08-26 19:49:45,086 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:49:49,356 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.466e+02 1.715e+02 2.130e+02 3.505e+02, threshold=3.430e+02, percent-clipped=4.0 +2024-08-26 19:49:50,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=129296.0, ans=0.0 +2024-08-26 19:49:55,613 INFO [train.py:1114] (3/4) Epoch 10, batch 1850, loss[loss=0.2244, simple_loss=0.2982, pruned_loss=0.05438, ctc_loss=0.1048, over 19591.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2816, pruned_loss=0.05515, ctc_loss=0.1031, over 3856696.98 frames. ], batch size: 57, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:50:06,749 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.34 vs. limit=15.0 +2024-08-26 19:50:07,672 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.78 vs. limit=6.0 +2024-08-26 19:50:12,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff2.min_abs, batch_count=129456.0, ans=0.1 +2024-08-26 19:50:24,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=129456.0, ans=0.2 +2024-08-26 19:50:26,307 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.50 vs. limit=22.5 +2024-08-26 19:50:27,922 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.78 vs. limit=15.0 +2024-08-26 19:50:31,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=129509.33333333333, ans=0.125 +2024-08-26 19:50:35,891 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.94 vs. limit=15.0 +2024-08-26 19:50:50,325 INFO [train.py:1114] (3/4) Epoch 10, batch 1900, loss[loss=0.2222, simple_loss=0.2967, pruned_loss=0.05398, ctc_loss=0.09927, over 19625.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2825, pruned_loss=0.05549, ctc_loss=0.1035, over 3861802.46 frames. ], batch size: 59, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:50:59,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=129669.33333333333, ans=0.125 +2024-08-26 19:51:01,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=129669.33333333333, ans=0.0 +2024-08-26 19:51:05,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=129669.33333333333, ans=0.125 +2024-08-26 19:51:06,418 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.24 vs. limit=22.5 +2024-08-26 19:51:07,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=129722.66666666667, ans=15.0 +2024-08-26 19:51:07,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=129722.66666666667, ans=0.015 +2024-08-26 19:51:25,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=129829.33333333333, ans=0.125 +2024-08-26 19:51:27,689 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.498e+02 1.655e+02 1.944e+02 4.101e+02, threshold=3.311e+02, percent-clipped=1.0 +2024-08-26 19:51:33,746 INFO [train.py:1114] (3/4) Epoch 10, batch 1950, loss[loss=0.2014, simple_loss=0.2641, pruned_loss=0.05066, ctc_loss=0.09324, over 19584.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2833, pruned_loss=0.05548, ctc_loss=0.1037, over 3870507.56 frames. ], batch size: 52, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:51:36,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=129882.66666666667, ans=0.125 +2024-08-26 19:51:50,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=129989.33333333333, ans=0.125 +2024-08-26 19:52:32,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=130042.66666666667, ans=0.025 +2024-08-26 19:52:51,593 INFO [train.py:1114] (3/4) Epoch 10, batch 2000, loss[loss=0.1918, simple_loss=0.2513, pruned_loss=0.04851, ctc_loss=0.08786, over 19650.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2842, pruned_loss=0.05629, ctc_loss=0.1053, over 3854930.75 frames. ], batch size: 45, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:52:59,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=130202.66666666667, ans=0.125 +2024-08-26 19:53:09,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=130256.0, ans=0.1 +2024-08-26 19:53:12,172 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.42 vs. limit=22.5 +2024-08-26 19:53:29,083 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.467e+02 1.617e+02 1.850e+02 3.299e+02, threshold=3.233e+02, percent-clipped=0.0 +2024-08-26 19:53:35,204 INFO [train.py:1114] (3/4) Epoch 10, batch 2050, loss[loss=0.1904, simple_loss=0.2584, pruned_loss=0.04426, ctc_loss=0.08446, over 19715.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2833, pruned_loss=0.05615, ctc_loss=0.105, over 3852752.19 frames. ], batch size: 47, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:53:53,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=130522.66666666667, ans=0.0 +2024-08-26 19:53:58,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=130522.66666666667, ans=0.2 +2024-08-26 19:54:00,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=130576.0, ans=0.1 +2024-08-26 19:54:08,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130576.0, ans=0.1 +2024-08-26 19:54:13,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=130629.33333333333, ans=0.125 +2024-08-26 19:54:18,656 INFO [train.py:1114] (3/4) Epoch 10, batch 2100, loss[loss=0.2152, simple_loss=0.2875, pruned_loss=0.05176, ctc_loss=0.09878, over 19771.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2822, pruned_loss=0.0556, ctc_loss=0.1039, over 3859455.41 frames. ], batch size: 54, lr: 1.47e-02, grad_scale: 32.0 +2024-08-26 19:54:23,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=130682.66666666667, ans=0.125 +2024-08-26 19:54:33,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=130736.0, ans=0.2 +2024-08-26 19:54:33,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=130736.0, ans=0.0 +2024-08-26 19:54:56,949 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.237e+02 1.404e+02 1.614e+02 1.979e+02 3.349e+02, threshold=3.228e+02, percent-clipped=1.0 +2024-08-26 19:54:58,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=130896.0, ans=0.0 +2024-08-26 19:54:59,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=130896.0, ans=0.125 +2024-08-26 19:55:03,175 INFO [train.py:1114] (3/4) Epoch 10, batch 2150, loss[loss=0.1963, simple_loss=0.2646, pruned_loss=0.04612, ctc_loss=0.08935, over 19864.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2812, pruned_loss=0.05514, ctc_loss=0.1028, over 3870385.41 frames. ], batch size: 52, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:55:06,985 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.71 vs. limit=15.0 +2024-08-26 19:55:09,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=130949.33333333333, ans=0.07 +2024-08-26 19:55:12,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=131002.66666666667, ans=0.0 +2024-08-26 19:55:16,662 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.84 vs. limit=15.0 +2024-08-26 19:55:21,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=131056.0, ans=0.125 +2024-08-26 19:55:25,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=131056.0, ans=0.09899494936611666 +2024-08-26 19:55:32,291 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.21 vs. limit=15.0 +2024-08-26 19:55:34,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=131109.33333333334, ans=0.0 +2024-08-26 19:55:40,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=131162.66666666666, ans=0.0 +2024-08-26 19:55:48,070 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.22 vs. limit=15.0 +2024-08-26 19:55:50,184 INFO [train.py:1114] (3/4) Epoch 10, batch 2200, loss[loss=0.2242, simple_loss=0.2921, pruned_loss=0.05673, ctc_loss=0.1071, over 19586.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2814, pruned_loss=0.05502, ctc_loss=0.1026, over 3868666.47 frames. ], batch size: 57, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:55:50,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=131216.0, ans=0.125 +2024-08-26 19:55:52,410 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.14 vs. limit=10.0 +2024-08-26 19:55:54,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=131216.0, ans=0.0 +2024-08-26 19:56:33,025 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.13 vs. limit=15.0 +2024-08-26 19:56:33,926 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.37 vs. limit=15.0 +2024-08-26 19:56:38,535 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.198e+02 1.505e+02 1.694e+02 1.989e+02 3.015e+02, threshold=3.388e+02, percent-clipped=0.0 +2024-08-26 19:56:44,673 INFO [train.py:1114] (3/4) Epoch 10, batch 2250, loss[loss=0.2408, simple_loss=0.3066, pruned_loss=0.06369, ctc_loss=0.1191, over 19619.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2815, pruned_loss=0.05505, ctc_loss=0.1028, over 3868653.23 frames. ], batch size: 55, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:56:46,001 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.86 vs. limit=15.0 +2024-08-26 19:56:51,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=131482.66666666666, ans=0.0 +2024-08-26 19:56:59,172 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.35 vs. limit=15.0 +2024-08-26 19:56:59,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131536.0, ans=0.1 +2024-08-26 19:57:27,775 INFO [train.py:1114] (3/4) Epoch 10, batch 2300, loss[loss=0.2182, simple_loss=0.28, pruned_loss=0.0569, ctc_loss=0.1067, over 19503.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2807, pruned_loss=0.05485, ctc_loss=0.1025, over 3862127.26 frames. ], batch size: 49, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:57:34,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=131749.33333333334, ans=0.0 +2024-08-26 19:57:41,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=131802.66666666666, ans=0.125 +2024-08-26 19:57:48,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=131856.0, ans=0.125 +2024-08-26 19:57:52,499 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.83 vs. limit=15.0 +2024-08-26 19:58:02,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=131962.66666666666, ans=0.2 +2024-08-26 19:58:05,764 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.213e+02 1.499e+02 1.709e+02 2.092e+02 3.241e+02, threshold=3.418e+02, percent-clipped=0.0 +2024-08-26 19:58:38,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131962.66666666666, ans=0.1 +2024-08-26 19:58:41,670 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.59 vs. limit=12.0 +2024-08-26 19:58:43,746 INFO [train.py:1114] (3/4) Epoch 10, batch 2350, loss[loss=0.2397, simple_loss=0.3001, pruned_loss=0.06649, ctc_loss=0.1158, over 19679.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2809, pruned_loss=0.05527, ctc_loss=0.1033, over 3864104.51 frames. ], batch size: 63, lr: 1.46e-02, grad_scale: 16.0 +2024-08-26 19:58:44,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=132016.0, ans=0.125 +2024-08-26 19:58:48,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=132016.0, ans=0.125 +2024-08-26 19:58:51,716 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 19:59:02,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=132069.33333333334, ans=0.125 +2024-08-26 19:59:05,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=132069.33333333334, ans=0.125 +2024-08-26 19:59:16,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=132176.0, ans=0.125 +2024-08-26 19:59:18,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=132176.0, ans=0.125 +2024-08-26 19:59:19,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=132176.0, ans=0.0 +2024-08-26 19:59:31,502 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.47 vs. limit=12.0 +2024-08-26 19:59:32,683 INFO [train.py:1114] (3/4) Epoch 10, batch 2400, loss[loss=0.2525, simple_loss=0.3131, pruned_loss=0.0703, ctc_loss=0.1284, over 19248.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2833, pruned_loss=0.05632, ctc_loss=0.1052, over 3859369.59 frames. ], batch size: 71, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 19:59:34,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=132282.66666666666, ans=0.2 +2024-08-26 19:59:43,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=132336.0, ans=0.0 +2024-08-26 19:59:43,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=132336.0, ans=0.0 +2024-08-26 19:59:51,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132389.33333333334, ans=0.1 +2024-08-26 19:59:52,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=132389.33333333334, ans=0.2 +2024-08-26 20:00:16,923 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.44 vs. limit=10.0 +2024-08-26 20:00:18,750 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.08 vs. limit=15.0 +2024-08-26 20:00:36,871 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.213e+02 1.532e+02 1.694e+02 1.900e+02 3.260e+02, threshold=3.387e+02, percent-clipped=0.0 +2024-08-26 20:00:42,849 INFO [train.py:1114] (3/4) Epoch 10, batch 2450, loss[loss=0.2875, simple_loss=0.3167, pruned_loss=0.09261, ctc_loss=0.1829, over 13096.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.2875, pruned_loss=0.05964, ctc_loss=0.1116, over 3729036.20 frames. ], batch size: 140, lr: 1.46e-02, grad_scale: 32.0 +2024-08-26 20:00:51,315 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=11.82 vs. limit=12.0 +2024-08-26 20:01:24,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=132709.33333333334, ans=0.0 +2024-08-26 20:01:30,318 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:03:28,136 INFO [train.py:1114] (3/4) Epoch 11, batch 0, loss[loss=0.2005, simple_loss=0.2609, pruned_loss=0.05138, ctc_loss=0.09346, over 19802.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2609, pruned_loss=0.05138, ctc_loss=0.09346, over 19802.00 frames. ], batch size: 49, lr: 1.39e-02, grad_scale: 32.0 +2024-08-26 20:03:28,136 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-26 20:03:42,228 INFO [train.py:1146] (3/4) Epoch 11, validation: loss=0.1858, simple_loss=0.2776, pruned_loss=0.03491, ctc_loss=0.06042, over 944034.00 frames. +2024-08-26 20:03:42,229 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12941MB +2024-08-26 20:03:46,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=132757.33333333334, ans=0.125 +2024-08-26 20:04:01,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=132864.0, ans=0.125 +2024-08-26 20:04:02,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=132864.0, ans=0.0 +2024-08-26 20:04:12,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=132917.33333333334, ans=0.5 +2024-08-26 20:04:18,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=132917.33333333334, ans=0.0 +2024-08-26 20:04:32,383 INFO [train.py:1114] (3/4) Epoch 11, batch 50, loss[loss=0.1976, simple_loss=0.2613, pruned_loss=0.04916, ctc_loss=0.08891, over 19733.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2848, pruned_loss=0.0558, ctc_loss=0.1052, over 844726.23 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 32.0 +2024-08-26 20:04:36,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=133024.0, ans=0.125 +2024-08-26 20:04:37,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=133024.0, ans=0.05 +2024-08-26 20:04:37,952 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.177e+02 1.624e+02 1.801e+02 2.017e+02 3.320e+02, threshold=3.603e+02, percent-clipped=0.0 +2024-08-26 20:04:38,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=133024.0, ans=0.0 +2024-08-26 20:04:39,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=133024.0, ans=0.035 +2024-08-26 20:04:41,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=133077.33333333334, ans=0.125 +2024-08-26 20:04:50,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=133077.33333333334, ans=0.125 +2024-08-26 20:04:52,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=133130.66666666666, ans=0.125 +2024-08-26 20:05:21,251 INFO [train.py:1114] (3/4) Epoch 11, batch 100, loss[loss=0.1793, simple_loss=0.254, pruned_loss=0.03825, ctc_loss=0.06998, over 19711.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2837, pruned_loss=0.05541, ctc_loss=0.1041, over 1499036.81 frames. ], batch size: 51, lr: 1.39e-02, grad_scale: 32.0 +2024-08-26 20:05:35,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=133344.0, ans=0.0 +2024-08-26 20:05:50,546 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.57 vs. limit=12.0 +2024-08-26 20:05:54,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=133450.66666666666, ans=0.0 +2024-08-26 20:05:59,491 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.36 vs. limit=15.0 +2024-08-26 20:06:02,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=133504.0, ans=0.0 +2024-08-26 20:06:10,885 INFO [train.py:1114] (3/4) Epoch 11, batch 150, loss[loss=0.1969, simple_loss=0.2517, pruned_loss=0.05185, ctc_loss=0.09612, over 19699.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.281, pruned_loss=0.05459, ctc_loss=0.1021, over 2027778.22 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 32.0 +2024-08-26 20:06:16,414 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.457e+02 1.584e+02 1.841e+02 2.561e+02, threshold=3.167e+02, percent-clipped=0.0 +2024-08-26 20:06:23,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=133610.66666666666, ans=0.035 +2024-08-26 20:06:37,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=133664.0, ans=0.04949747468305833 +2024-08-26 20:06:38,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133717.33333333334, ans=0.1 +2024-08-26 20:06:54,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133770.66666666666, ans=0.1 +2024-08-26 20:06:56,377 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.34 vs. limit=15.0 +2024-08-26 20:07:45,041 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.67 vs. limit=22.5 +2024-08-26 20:08:08,086 INFO [train.py:1114] (3/4) Epoch 11, batch 200, loss[loss=0.2368, simple_loss=0.3019, pruned_loss=0.06252, ctc_loss=0.1167, over 18237.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2793, pruned_loss=0.05383, ctc_loss=0.1007, over 2435452.68 frames. ], batch size: 85, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:08:23,211 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.91 vs. limit=22.5 +2024-08-26 20:08:33,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=133930.66666666666, ans=0.025 +2024-08-26 20:08:34,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=133930.66666666666, ans=0.125 +2024-08-26 20:08:42,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=133984.0, ans=0.2 +2024-08-26 20:08:50,234 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.86 vs. limit=22.5 +2024-08-26 20:09:00,093 INFO [train.py:1114] (3/4) Epoch 11, batch 250, loss[loss=0.2338, simple_loss=0.2989, pruned_loss=0.06204, ctc_loss=0.1117, over 19402.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.279, pruned_loss=0.05372, ctc_loss=0.1005, over 2756090.28 frames. ], batch size: 67, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:09:05,645 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.170e+02 1.397e+02 1.518e+02 1.749e+02 2.921e+02, threshold=3.037e+02, percent-clipped=0.0 +2024-08-26 20:09:15,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=134144.0, ans=0.125 +2024-08-26 20:09:17,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=134144.0, ans=0.0 +2024-08-26 20:09:42,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=134304.0, ans=0.04949747468305833 +2024-08-26 20:09:47,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=134304.0, ans=0.125 +2024-08-26 20:09:47,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=134304.0, ans=0.125 +2024-08-26 20:09:49,954 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.73 vs. limit=6.0 +2024-08-26 20:09:51,418 INFO [train.py:1114] (3/4) Epoch 11, batch 300, loss[loss=0.2177, simple_loss=0.2881, pruned_loss=0.05366, ctc_loss=0.1, over 19547.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2788, pruned_loss=0.0536, ctc_loss=0.1, over 3001824.53 frames. ], batch size: 61, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:10:35,492 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-08-26 20:10:41,581 INFO [train.py:1114] (3/4) Epoch 11, batch 350, loss[loss=0.2015, simple_loss=0.266, pruned_loss=0.04907, ctc_loss=0.09708, over 19756.00 frames. ], tot_loss[loss=0.213, simple_loss=0.279, pruned_loss=0.05356, ctc_loss=0.09981, over 3191252.45 frames. ], batch size: 48, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:10:47,193 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.479e+02 1.637e+02 2.052e+02 3.441e+02, threshold=3.275e+02, percent-clipped=1.0 +2024-08-26 20:10:48,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.97 vs. limit=15.0 +2024-08-26 20:10:49,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=134624.0, ans=0.125 +2024-08-26 20:10:57,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=134677.33333333334, ans=0.125 +2024-08-26 20:11:01,963 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.30 vs. limit=15.0 +2024-08-26 20:11:10,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=134784.0, ans=0.125 +2024-08-26 20:11:16,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=134784.0, ans=0.0 +2024-08-26 20:11:25,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134837.33333333334, ans=0.1 +2024-08-26 20:11:31,270 INFO [train.py:1114] (3/4) Epoch 11, batch 400, loss[loss=0.2039, simple_loss=0.2757, pruned_loss=0.04774, ctc_loss=0.09171, over 19498.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2789, pruned_loss=0.05337, ctc_loss=0.09954, over 3343130.63 frames. ], batch size: 54, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:11:31,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=134890.66666666666, ans=0.125 +2024-08-26 20:11:35,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=134890.66666666666, ans=0.0 +2024-08-26 20:11:45,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff2.min_abs, batch_count=134944.0, ans=0.1 +2024-08-26 20:12:00,149 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.10 vs. limit=15.0 +2024-08-26 20:12:00,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=134997.33333333334, ans=0.2 +2024-08-26 20:12:03,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=135050.66666666666, ans=0.0 +2024-08-26 20:12:08,293 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=5.73 vs. limit=15.0 +2024-08-26 20:12:20,805 INFO [train.py:1114] (3/4) Epoch 11, batch 450, loss[loss=0.194, simple_loss=0.277, pruned_loss=0.0402, ctc_loss=0.07642, over 19600.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2789, pruned_loss=0.05342, ctc_loss=0.09983, over 3451343.18 frames. ], batch size: 55, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:12:23,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=135157.33333333334, ans=0.05 +2024-08-26 20:12:29,022 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.192e+02 1.489e+02 1.652e+02 2.008e+02 3.634e+02, threshold=3.305e+02, percent-clipped=1.0 +2024-08-26 20:12:44,780 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:12:49,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=135264.0, ans=0.0 +2024-08-26 20:12:49,995 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.93 vs. limit=22.5 +2024-08-26 20:13:11,634 INFO [train.py:1114] (3/4) Epoch 11, batch 500, loss[loss=0.217, simple_loss=0.2879, pruned_loss=0.0527, ctc_loss=0.1015, over 19630.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2781, pruned_loss=0.05297, ctc_loss=0.09908, over 3546681.93 frames. ], batch size: 63, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:13:55,443 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.48 vs. limit=22.5 +2024-08-26 20:13:58,579 INFO [train.py:1114] (3/4) Epoch 11, batch 550, loss[loss=0.2422, simple_loss=0.3079, pruned_loss=0.06572, ctc_loss=0.1128, over 19359.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2781, pruned_loss=0.05298, ctc_loss=0.099, over 3609335.63 frames. ], batch size: 71, lr: 1.38e-02, grad_scale: 32.0 +2024-08-26 20:14:06,847 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.449e+02 1.695e+02 2.078e+02 4.377e+02, threshold=3.390e+02, percent-clipped=1.0 +2024-08-26 20:14:07,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=135690.66666666666, ans=0.125 +2024-08-26 20:14:32,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=135850.66666666666, ans=0.125 +2024-08-26 20:14:42,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=135904.0, ans=0.0 +2024-08-26 20:14:42,662 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.18 vs. limit=10.0 +2024-08-26 20:14:44,506 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.09 vs. limit=22.5 +2024-08-26 20:14:50,535 INFO [train.py:1114] (3/4) Epoch 11, batch 600, loss[loss=0.2193, simple_loss=0.2896, pruned_loss=0.05376, ctc_loss=0.1037, over 19401.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2781, pruned_loss=0.05291, ctc_loss=0.09889, over 3666839.14 frames. ], batch size: 67, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:14:52,067 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.44 vs. limit=6.0 +2024-08-26 20:15:02,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.24 vs. limit=15.0 +2024-08-26 20:15:09,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=136064.0, ans=0.125 +2024-08-26 20:15:12,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=136064.0, ans=0.025 +2024-08-26 20:15:12,986 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.04 vs. limit=15.0 +2024-08-26 20:15:13,830 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.27 vs. limit=10.0 +2024-08-26 20:15:21,833 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.88 vs. limit=12.0 +2024-08-26 20:15:25,492 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.52 vs. limit=22.5 +2024-08-26 20:15:28,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=136117.33333333334, ans=0.125 +2024-08-26 20:15:30,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=136170.66666666666, ans=0.0 +2024-08-26 20:15:37,300 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.76 vs. limit=15.0 +2024-08-26 20:15:41,596 INFO [train.py:1114] (3/4) Epoch 11, batch 650, loss[loss=0.2007, simple_loss=0.2784, pruned_loss=0.04417, ctc_loss=0.08687, over 19771.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2779, pruned_loss=0.05272, ctc_loss=0.09837, over 3716528.57 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:15:47,086 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.457e+02 1.627e+02 2.058e+02 3.143e+02, threshold=3.253e+02, percent-clipped=0.0 +2024-08-26 20:15:49,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=136224.0, ans=0.125 +2024-08-26 20:16:06,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136330.66666666666, ans=0.1 +2024-08-26 20:16:08,820 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.19 vs. limit=15.0 +2024-08-26 20:16:11,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=136384.0, ans=0.125 +2024-08-26 20:16:12,625 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.47 vs. limit=15.0 +2024-08-26 20:16:27,834 INFO [train.py:1114] (3/4) Epoch 11, batch 700, loss[loss=0.1986, simple_loss=0.2645, pruned_loss=0.04843, ctc_loss=0.08966, over 19728.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2787, pruned_loss=0.0531, ctc_loss=0.09936, over 3747550.11 frames. ], batch size: 51, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:16:57,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=136650.66666666666, ans=0.0 +2024-08-26 20:17:04,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=136650.66666666666, ans=0.0 +2024-08-26 20:17:09,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=136704.0, ans=0.1 +2024-08-26 20:17:13,522 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.90 vs. limit=15.0 +2024-08-26 20:17:16,580 INFO [train.py:1114] (3/4) Epoch 11, batch 750, loss[loss=0.1982, simple_loss=0.2722, pruned_loss=0.04492, ctc_loss=0.08619, over 19492.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2781, pruned_loss=0.05288, ctc_loss=0.09888, over 3772940.14 frames. ], batch size: 54, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:17:24,640 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.496e+02 1.727e+02 2.151e+02 3.286e+02, threshold=3.455e+02, percent-clipped=1.0 +2024-08-26 20:17:45,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=136864.0, ans=0.125 +2024-08-26 20:18:00,553 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.06 vs. limit=15.0 +2024-08-26 20:18:08,124 INFO [train.py:1114] (3/4) Epoch 11, batch 800, loss[loss=0.1945, simple_loss=0.2625, pruned_loss=0.04572, ctc_loss=0.08741, over 19814.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2785, pruned_loss=0.05312, ctc_loss=0.09908, over 3794555.52 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:19:26,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=137290.66666666666, ans=0.0 +2024-08-26 20:19:27,338 INFO [train.py:1114] (3/4) Epoch 11, batch 850, loss[loss=0.2311, simple_loss=0.3009, pruned_loss=0.05753, ctc_loss=0.1156, over 19679.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2781, pruned_loss=0.05308, ctc_loss=0.09897, over 3813600.62 frames. ], batch size: 59, lr: 1.37e-02, grad_scale: 32.0 +2024-08-26 20:19:39,843 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.453e+02 1.601e+02 1.920e+02 5.497e+02, threshold=3.202e+02, percent-clipped=1.0 +2024-08-26 20:19:57,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=137344.0, ans=0.0 +2024-08-26 20:20:14,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=137397.33333333334, ans=0.0 +2024-08-26 20:20:53,097 INFO [train.py:1114] (3/4) Epoch 11, batch 900, loss[loss=0.189, simple_loss=0.2599, pruned_loss=0.04331, ctc_loss=0.07882, over 19418.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2785, pruned_loss=0.05333, ctc_loss=0.09953, over 3817804.25 frames. ], batch size: 48, lr: 1.37e-02, grad_scale: 16.0 +2024-08-26 20:20:53,802 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=14.12 vs. limit=15.0 +2024-08-26 20:20:54,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=137557.33333333334, ans=0.125 +2024-08-26 20:21:14,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=137664.0, ans=0.04949747468305833 +2024-08-26 20:21:26,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=137664.0, ans=0.125 +2024-08-26 20:21:27,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=137664.0, ans=0.025 +2024-08-26 20:21:28,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=137717.33333333334, ans=0.125 +2024-08-26 20:21:33,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137717.33333333334, ans=0.1 +2024-08-26 20:21:38,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=137770.66666666666, ans=0.0 +2024-08-26 20:21:48,794 INFO [train.py:1114] (3/4) Epoch 11, batch 950, loss[loss=0.206, simple_loss=0.2727, pruned_loss=0.05053, ctc_loss=0.0956, over 19512.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2788, pruned_loss=0.05341, ctc_loss=0.09956, over 3819341.80 frames. ], batch size: 49, lr: 1.37e-02, grad_scale: 16.0 +2024-08-26 20:21:55,403 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.214e+02 1.468e+02 1.744e+02 2.017e+02 3.816e+02, threshold=3.488e+02, percent-clipped=2.0 +2024-08-26 20:22:37,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=138037.33333333334, ans=0.0 +2024-08-26 20:22:40,788 INFO [train.py:1114] (3/4) Epoch 11, batch 1000, loss[loss=0.1945, simple_loss=0.2687, pruned_loss=0.04469, ctc_loss=0.07726, over 19850.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.28, pruned_loss=0.05415, ctc_loss=0.1009, over 3815711.39 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 16.0 +2024-08-26 20:22:51,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=138144.0, ans=0.125 +2024-08-26 20:23:15,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138250.66666666666, ans=0.1 +2024-08-26 20:23:28,370 INFO [train.py:1114] (3/4) Epoch 11, batch 1050, loss[loss=0.2015, simple_loss=0.2741, pruned_loss=0.04668, ctc_loss=0.08868, over 19839.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2791, pruned_loss=0.05384, ctc_loss=0.1004, over 3822587.29 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 16.0 +2024-08-26 20:23:29,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=138357.33333333334, ans=0.125 +2024-08-26 20:23:34,928 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.363e+02 1.534e+02 1.839e+02 4.578e+02, threshold=3.069e+02, percent-clipped=1.0 +2024-08-26 20:24:08,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=138357.33333333334, ans=0.0 +2024-08-26 20:24:12,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=138410.66666666666, ans=0.125 +2024-08-26 20:24:33,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=138410.66666666666, ans=0.0 +2024-08-26 20:24:50,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=138517.33333333334, ans=0.0 +2024-08-26 20:24:56,453 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.30 vs. limit=22.5 +2024-08-26 20:25:07,640 INFO [train.py:1114] (3/4) Epoch 11, batch 1100, loss[loss=0.2172, simple_loss=0.2797, pruned_loss=0.05702, ctc_loss=0.1017, over 19576.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2783, pruned_loss=0.05329, ctc_loss=0.0996, over 3830551.23 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 16.0 +2024-08-26 20:25:10,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=138624.0, ans=0.125 +2024-08-26 20:25:11,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=138624.0, ans=0.125 +2024-08-26 20:25:22,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=138677.33333333334, ans=0.0 +2024-08-26 20:25:25,235 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.67 vs. limit=15.0 +2024-08-26 20:25:36,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=138784.0, ans=0.125 +2024-08-26 20:25:41,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=138784.0, ans=0.125 +2024-08-26 20:25:42,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=138784.0, ans=0.0 +2024-08-26 20:25:56,914 INFO [train.py:1114] (3/4) Epoch 11, batch 1150, loss[loss=0.1876, simple_loss=0.2575, pruned_loss=0.04345, ctc_loss=0.07673, over 19582.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2782, pruned_loss=0.05335, ctc_loss=0.09966, over 3828472.64 frames. ], batch size: 52, lr: 1.36e-02, grad_scale: 16.0 +2024-08-26 20:26:00,458 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.42 vs. limit=15.0 +2024-08-26 20:26:03,584 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.470e+02 1.661e+02 1.952e+02 3.516e+02, threshold=3.323e+02, percent-clipped=2.0 +2024-08-26 20:26:40,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=139104.0, ans=0.125 +2024-08-26 20:26:42,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139104.0, ans=0.1 +2024-08-26 20:26:45,578 INFO [train.py:1114] (3/4) Epoch 11, batch 1200, loss[loss=0.2155, simple_loss=0.2831, pruned_loss=0.05371, ctc_loss=0.1011, over 19827.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2793, pruned_loss=0.05381, ctc_loss=0.1005, over 3824357.90 frames. ], batch size: 57, lr: 1.36e-02, grad_scale: 32.0 +2024-08-26 20:26:58,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=139210.66666666666, ans=0.0 +2024-08-26 20:26:59,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=139210.66666666666, ans=0.125 +2024-08-26 20:27:04,018 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.74 vs. limit=6.0 +2024-08-26 20:27:13,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=139317.33333333334, ans=0.025 +2024-08-26 20:27:46,894 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.94 vs. limit=15.0 +2024-08-26 20:28:18,696 INFO [train.py:1114] (3/4) Epoch 11, batch 1250, loss[loss=0.2281, simple_loss=0.2939, pruned_loss=0.05863, ctc_loss=0.1123, over 19501.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2799, pruned_loss=0.0538, ctc_loss=0.1005, over 3842991.96 frames. ], batch size: 61, lr: 1.36e-02, grad_scale: 32.0 +2024-08-26 20:28:22,562 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:28:27,606 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.224e+02 1.425e+02 1.545e+02 1.729e+02 3.064e+02, threshold=3.089e+02, percent-clipped=0.0 +2024-08-26 20:28:35,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=139477.33333333334, ans=0.07 +2024-08-26 20:28:40,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=139530.66666666666, ans=0.1 +2024-08-26 20:28:59,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=139584.0, ans=0.125 +2024-08-26 20:29:03,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=139637.33333333334, ans=0.125 +2024-08-26 20:29:12,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=139690.66666666666, ans=0.125 +2024-08-26 20:29:12,949 INFO [train.py:1114] (3/4) Epoch 11, batch 1300, loss[loss=0.2361, simple_loss=0.3044, pruned_loss=0.06118, ctc_loss=0.1136, over 18834.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.279, pruned_loss=0.05331, ctc_loss=0.09962, over 3847251.73 frames. ], batch size: 76, lr: 1.36e-02, grad_scale: 32.0 +2024-08-26 20:29:19,958 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-08-26 20:29:30,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=139744.0, ans=0.07 +2024-08-26 20:35:29,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=139797.33333333334, ans=0.125 +2024-08-26 20:35:52,048 INFO [train.py:1114] (3/4) Epoch 11, batch 1350, loss[loss=0.2154, simple_loss=0.2825, pruned_loss=0.05511, ctc_loss=0.09494, over 19752.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2785, pruned_loss=0.05297, ctc_loss=0.09878, over 3858638.85 frames. ], batch size: 54, lr: 1.36e-02, grad_scale: 32.0 +2024-08-26 20:35:55,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=139957.33333333334, ans=0.0 +2024-08-26 20:35:55,492 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.86 vs. limit=10.0 +2024-08-26 20:35:58,553 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.092e+02 1.441e+02 1.644e+02 1.919e+02 3.174e+02, threshold=3.287e+02, percent-clipped=1.0 +2024-08-26 20:35:58,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=139957.33333333334, ans=0.125 +2024-08-26 20:36:22,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=140117.33333333334, ans=0.125 +2024-08-26 20:36:24,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=140117.33333333334, ans=10.0 +2024-08-26 20:36:36,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=140170.66666666666, ans=0.125 +2024-08-26 20:36:37,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=140170.66666666666, ans=10.0 +2024-08-26 20:36:38,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=140170.66666666666, ans=0.125 +2024-08-26 20:36:41,165 INFO [train.py:1114] (3/4) Epoch 11, batch 1400, loss[loss=0.1917, simple_loss=0.2484, pruned_loss=0.04923, ctc_loss=0.09119, over 19677.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2782, pruned_loss=0.05279, ctc_loss=0.09843, over 3865655.93 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:36:43,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=140224.0, ans=0.125 +2024-08-26 20:37:20,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=140277.33333333334, ans=0.0 +2024-08-26 20:37:27,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=140330.66666666666, ans=0.1 +2024-08-26 20:37:46,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=140384.0, ans=0.0 +2024-08-26 20:38:01,753 INFO [train.py:1114] (3/4) Epoch 11, batch 1450, loss[loss=0.2438, simple_loss=0.3041, pruned_loss=0.06843, ctc_loss=0.1169, over 19655.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2789, pruned_loss=0.05305, ctc_loss=0.09878, over 3863849.63 frames. ], batch size: 63, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:38:06,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=140490.66666666666, ans=0.125 +2024-08-26 20:38:08,107 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.486e+02 1.636e+02 1.926e+02 3.321e+02, threshold=3.272e+02, percent-clipped=1.0 +2024-08-26 20:38:16,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=140544.0, ans=0.0 +2024-08-26 20:38:17,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=140544.0, ans=0.0 +2024-08-26 20:38:23,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=140597.33333333334, ans=0.0 +2024-08-26 20:38:40,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=140704.0, ans=0.2 +2024-08-26 20:38:50,528 INFO [train.py:1114] (3/4) Epoch 11, batch 1500, loss[loss=0.2225, simple_loss=0.2945, pruned_loss=0.05406, ctc_loss=0.1059, over 19575.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2787, pruned_loss=0.05282, ctc_loss=0.09851, over 3863318.59 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:38:51,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=140757.33333333334, ans=0.0 +2024-08-26 20:38:55,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=140757.33333333334, ans=0.015 +2024-08-26 20:39:21,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=140917.33333333334, ans=0.2 +2024-08-26 20:39:35,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=140970.66666666666, ans=0.125 +2024-08-26 20:39:38,998 INFO [train.py:1114] (3/4) Epoch 11, batch 1550, loss[loss=0.2396, simple_loss=0.301, pruned_loss=0.06532, ctc_loss=0.1187, over 19600.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2784, pruned_loss=0.05276, ctc_loss=0.09867, over 3848889.14 frames. ], batch size: 60, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:39:43,140 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.55 vs. limit=12.0 +2024-08-26 20:39:45,236 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.401e+02 1.612e+02 1.919e+02 3.103e+02, threshold=3.225e+02, percent-clipped=0.0 +2024-08-26 20:39:51,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=141077.33333333334, ans=10.0 +2024-08-26 20:40:00,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141130.66666666666, ans=0.1 +2024-08-26 20:40:28,511 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.32 vs. limit=15.0 +2024-08-26 20:40:29,938 INFO [train.py:1114] (3/4) Epoch 11, batch 1600, loss[loss=0.23, simple_loss=0.2963, pruned_loss=0.05923, ctc_loss=0.113, over 19856.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2788, pruned_loss=0.05334, ctc_loss=0.09976, over 3836420.81 frames. ], batch size: 57, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:40:31,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=141290.66666666666, ans=15.0 +2024-08-26 20:40:34,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=141290.66666666666, ans=0.125 +2024-08-26 20:40:49,148 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.54 vs. limit=22.5 +2024-08-26 20:40:59,915 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.59 vs. limit=12.0 +2024-08-26 20:41:02,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=141450.66666666666, ans=0.0 +2024-08-26 20:41:18,740 INFO [train.py:1114] (3/4) Epoch 11, batch 1650, loss[loss=0.2071, simple_loss=0.2782, pruned_loss=0.05006, ctc_loss=0.08972, over 19653.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2789, pruned_loss=0.05351, ctc_loss=0.1002, over 3833698.53 frames. ], batch size: 59, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:41:21,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=141557.33333333334, ans=0.05 +2024-08-26 20:41:25,303 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.523e+02 1.726e+02 1.964e+02 3.202e+02, threshold=3.451e+02, percent-clipped=0.0 +2024-08-26 20:41:34,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=141610.66666666666, ans=0.07 +2024-08-26 20:41:38,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=141664.0, ans=0.125 +2024-08-26 20:41:47,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=141717.33333333334, ans=0.025 +2024-08-26 20:42:07,230 INFO [train.py:1114] (3/4) Epoch 11, batch 1700, loss[loss=0.1837, simple_loss=0.2498, pruned_loss=0.04187, ctc_loss=0.08433, over 19690.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2781, pruned_loss=0.05281, ctc_loss=0.09902, over 3847872.20 frames. ], batch size: 46, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:42:20,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=141824.0, ans=15.0 +2024-08-26 20:42:26,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=141877.33333333334, ans=0.0 +2024-08-26 20:42:48,442 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.46 vs. limit=15.0 +2024-08-26 20:42:55,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=142037.33333333334, ans=0.125 +2024-08-26 20:43:00,059 INFO [train.py:1114] (3/4) Epoch 11, batch 1750, loss[loss=0.1995, simple_loss=0.2582, pruned_loss=0.05253, ctc_loss=0.08939, over 19668.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2773, pruned_loss=0.05245, ctc_loss=0.09816, over 3852452.67 frames. ], batch size: 45, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:43:04,544 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=142090.66666666666, ans=0.125 +2024-08-26 20:43:06,163 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.149e+02 1.441e+02 1.591e+02 1.781e+02 2.526e+02, threshold=3.183e+02, percent-clipped=0.0 +2024-08-26 20:43:32,172 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.75 vs. limit=15.0 +2024-08-26 20:43:46,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=142304.0, ans=0.125 +2024-08-26 20:43:50,372 INFO [train.py:1114] (3/4) Epoch 11, batch 1800, loss[loss=0.2079, simple_loss=0.2859, pruned_loss=0.04798, ctc_loss=0.0851, over 19622.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2779, pruned_loss=0.05264, ctc_loss=0.09845, over 3853593.72 frames. ], batch size: 55, lr: 1.35e-02, grad_scale: 32.0 +2024-08-26 20:43:50,988 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=6.0 +2024-08-26 20:43:52,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.45 vs. limit=15.0 +2024-08-26 20:43:55,773 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:44:08,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=142410.66666666666, ans=0.2 +2024-08-26 20:44:20,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=142517.33333333334, ans=0.2 +2024-08-26 20:44:37,828 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.18 vs. limit=10.0 +2024-08-26 20:44:40,930 INFO [train.py:1114] (3/4) Epoch 11, batch 1850, loss[loss=0.2143, simple_loss=0.2795, pruned_loss=0.05384, ctc_loss=0.1034, over 19599.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2775, pruned_loss=0.05247, ctc_loss=0.09829, over 3856713.00 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:44:43,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=142624.0, ans=0.125 +2024-08-26 20:44:43,958 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.95 vs. limit=15.0 +2024-08-26 20:44:47,992 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.442e+02 1.639e+02 2.043e+02 4.343e+02, threshold=3.277e+02, percent-clipped=6.0 +2024-08-26 20:44:54,624 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:45:10,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142784.0, ans=0.1 +2024-08-26 20:45:11,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=142784.0, ans=0.2 +2024-08-26 20:45:29,151 INFO [train.py:1114] (3/4) Epoch 11, batch 1900, loss[loss=0.2074, simple_loss=0.2839, pruned_loss=0.04721, ctc_loss=0.0911, over 19657.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2779, pruned_loss=0.05257, ctc_loss=0.09835, over 3860517.49 frames. ], batch size: 59, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:45:56,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=142997.33333333334, ans=0.125 +2024-08-26 20:45:56,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=142997.33333333334, ans=0.125 +2024-08-26 20:45:58,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=143050.66666666666, ans=0.0 +2024-08-26 20:46:18,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=143104.0, ans=0.125 +2024-08-26 20:46:23,327 INFO [train.py:1114] (3/4) Epoch 11, batch 1950, loss[loss=0.1906, simple_loss=0.2679, pruned_loss=0.04086, ctc_loss=0.07892, over 19600.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2791, pruned_loss=0.0526, ctc_loss=0.09838, over 3869579.71 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:47:24,649 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.230e+02 1.500e+02 1.631e+02 1.894e+02 3.317e+02, threshold=3.262e+02, percent-clipped=1.0 +2024-08-26 20:47:55,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=143210.66666666666, ans=0.125 +2024-08-26 20:48:06,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=143317.33333333334, ans=0.0 +2024-08-26 20:48:27,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=143370.66666666666, ans=0.125 +2024-08-26 20:48:32,668 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.83 vs. limit=6.0 +2024-08-26 20:48:33,065 INFO [train.py:1114] (3/4) Epoch 11, batch 2000, loss[loss=0.1803, simple_loss=0.2427, pruned_loss=0.04259, ctc_loss=0.08177, over 19639.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2797, pruned_loss=0.05331, ctc_loss=0.09965, over 3854321.89 frames. ], batch size: 45, lr: 1.34e-02, grad_scale: 32.0 +2024-08-26 20:48:44,250 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.94 vs. limit=6.0 +2024-08-26 20:48:52,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=143530.66666666666, ans=0.2 +2024-08-26 20:49:01,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=143584.0, ans=0.0 +2024-08-26 20:49:04,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=143584.0, ans=0.025 +2024-08-26 20:49:30,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=143637.33333333334, ans=0.025 +2024-08-26 20:49:33,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=143637.33333333334, ans=0.125 +2024-08-26 20:49:35,191 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.22 vs. limit=15.0 +2024-08-26 20:49:36,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=143637.33333333334, ans=0.125 +2024-08-26 20:49:38,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=143690.66666666666, ans=0.2 +2024-08-26 20:49:39,121 INFO [train.py:1114] (3/4) Epoch 11, batch 2050, loss[loss=0.1863, simple_loss=0.249, pruned_loss=0.04491, ctc_loss=0.08455, over 19703.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2788, pruned_loss=0.05294, ctc_loss=0.099, over 3851667.99 frames. ], batch size: 47, lr: 1.34e-02, grad_scale: 32.0 +2024-08-26 20:49:42,926 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.73 vs. limit=15.0 +2024-08-26 20:49:47,242 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.205e+02 1.448e+02 1.585e+02 1.933e+02 3.153e+02, threshold=3.170e+02, percent-clipped=0.0 +2024-08-26 20:49:48,642 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.31 vs. limit=15.0 +2024-08-26 20:49:59,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143744.0, ans=0.1 +2024-08-26 20:50:00,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=143744.0, ans=0.2 +2024-08-26 20:50:01,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143744.0, ans=0.1 +2024-08-26 20:50:21,079 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.12 vs. limit=22.5 +2024-08-26 20:50:21,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=143850.66666666666, ans=0.2 +2024-08-26 20:50:37,966 INFO [train.py:1114] (3/4) Epoch 11, batch 2100, loss[loss=0.2063, simple_loss=0.2839, pruned_loss=0.047, ctc_loss=0.0868, over 19761.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2783, pruned_loss=0.05234, ctc_loss=0.09793, over 3858196.46 frames. ], batch size: 54, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:50:42,465 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 20:50:46,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=144010.66666666666, ans=0.025 +2024-08-26 20:50:56,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=144064.0, ans=0.0 +2024-08-26 20:50:57,871 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.26 vs. limit=22.5 +2024-08-26 20:50:58,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=144064.0, ans=0.0 +2024-08-26 20:50:58,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=144064.0, ans=0.125 +2024-08-26 20:51:02,221 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.69 vs. limit=10.0 +2024-08-26 20:51:08,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=144117.33333333334, ans=0.1 +2024-08-26 20:51:12,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=144170.66666666666, ans=0.125 +2024-08-26 20:51:12,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=144170.66666666666, ans=0.125 +2024-08-26 20:51:13,739 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.47 vs. limit=15.0 +2024-08-26 20:51:21,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=144170.66666666666, ans=0.1 +2024-08-26 20:51:22,531 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.70 vs. limit=22.5 +2024-08-26 20:51:22,999 INFO [train.py:1114] (3/4) Epoch 11, batch 2150, loss[loss=0.1962, simple_loss=0.2655, pruned_loss=0.04614, ctc_loss=0.08655, over 19854.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2772, pruned_loss=0.05189, ctc_loss=0.09703, over 3869561.22 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:51:24,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=144224.0, ans=0.025 +2024-08-26 20:51:25,345 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.02 vs. limit=10.0 +2024-08-26 20:51:30,826 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.485e+02 1.672e+02 2.037e+02 4.338e+02, threshold=3.345e+02, percent-clipped=7.0 +2024-08-26 20:51:58,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=144437.33333333334, ans=0.125 +2024-08-26 20:52:06,914 INFO [train.py:1114] (3/4) Epoch 11, batch 2200, loss[loss=0.2271, simple_loss=0.2927, pruned_loss=0.05839, ctc_loss=0.1117, over 19598.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2768, pruned_loss=0.05186, ctc_loss=0.09693, over 3868060.34 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 16.0 +2024-08-26 20:52:13,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=144490.66666666666, ans=0.025 +2024-08-26 20:52:15,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=144544.0, ans=0.125 +2024-08-26 20:52:19,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=144544.0, ans=0.2 +2024-08-26 20:52:21,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=144544.0, ans=0.1 +2024-08-26 20:52:32,919 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.50 vs. limit=10.0 +2024-08-26 20:52:34,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=144650.66666666666, ans=0.0 +2024-08-26 20:52:37,225 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.21 vs. limit=15.0 +2024-08-26 20:52:39,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=144650.66666666666, ans=0.2 +2024-08-26 20:52:50,844 INFO [train.py:1114] (3/4) Epoch 11, batch 2250, loss[loss=0.2277, simple_loss=0.2948, pruned_loss=0.05889, ctc_loss=0.1071, over 19619.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2772, pruned_loss=0.05206, ctc_loss=0.09703, over 3867730.12 frames. ], batch size: 55, lr: 1.33e-02, grad_scale: 16.0 +2024-08-26 20:52:58,759 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.208e+02 1.461e+02 1.628e+02 1.934e+02 8.673e+02, threshold=3.256e+02, percent-clipped=2.0 +2024-08-26 20:53:19,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=144917.33333333334, ans=0.125 +2024-08-26 20:53:25,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=144970.66666666666, ans=0.125 +2024-08-26 20:53:31,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=144970.66666666666, ans=0.125 +2024-08-26 20:53:35,342 INFO [train.py:1114] (3/4) Epoch 11, batch 2300, loss[loss=0.2035, simple_loss=0.2731, pruned_loss=0.04924, ctc_loss=0.0886, over 19514.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2759, pruned_loss=0.05201, ctc_loss=0.09691, over 3860895.39 frames. ], batch size: 49, lr: 1.33e-02, grad_scale: 16.0 +2024-08-26 20:53:45,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=145077.33333333334, ans=0.125 +2024-08-26 20:53:54,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=145130.66666666666, ans=0.125 +2024-08-26 20:54:20,146 INFO [train.py:1114] (3/4) Epoch 11, batch 2350, loss[loss=0.2263, simple_loss=0.2934, pruned_loss=0.05813, ctc_loss=0.1072, over 19652.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2759, pruned_loss=0.05225, ctc_loss=0.0973, over 3863648.94 frames. ], batch size: 63, lr: 1.33e-02, grad_scale: 16.0 +2024-08-26 20:54:27,508 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.02 vs. limit=15.0 +2024-08-26 20:54:28,784 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.482e+02 1.673e+02 1.901e+02 2.829e+02, threshold=3.345e+02, percent-clipped=0.0 +2024-08-26 20:54:35,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=145344.0, ans=0.125 +2024-08-26 20:54:36,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=145344.0, ans=0.025 +2024-08-26 20:54:45,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=145397.33333333334, ans=0.125 +2024-08-26 20:55:04,261 INFO [train.py:1114] (3/4) Epoch 11, batch 2400, loss[loss=0.2283, simple_loss=0.294, pruned_loss=0.05884, ctc_loss=0.1125, over 19293.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2785, pruned_loss=0.05333, ctc_loss=0.09914, over 3857595.91 frames. ], batch size: 71, lr: 1.33e-02, grad_scale: 32.0 +2024-08-26 20:55:17,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=145610.66666666666, ans=15.0 +2024-08-26 20:55:21,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=145664.0, ans=0.5 +2024-08-26 20:55:34,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=145717.33333333334, ans=0.125 +2024-08-26 20:55:38,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145717.33333333334, ans=0.1 +2024-08-26 20:55:40,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=145770.66666666666, ans=0.125 +2024-08-26 20:55:41,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=145770.66666666666, ans=0.125 +2024-08-26 20:55:48,865 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.38 vs. limit=10.0 +2024-08-26 20:55:49,238 INFO [train.py:1114] (3/4) Epoch 11, batch 2450, loss[loss=0.3027, simple_loss=0.3234, pruned_loss=0.1025, ctc_loss=0.1928, over 13244.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2827, pruned_loss=0.05651, ctc_loss=0.1056, over 3729666.82 frames. ], batch size: 140, lr: 1.33e-02, grad_scale: 32.0 +2024-08-26 20:55:58,195 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.577e+02 1.748e+02 1.957e+02 3.323e+02, threshold=3.496e+02, percent-clipped=0.0 +2024-08-26 20:56:04,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=145877.33333333334, ans=0.125 +2024-08-26 20:56:10,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=145930.66666666666, ans=0.0 +2024-08-26 21:01:32,172 INFO [train.py:1114] (3/4) Epoch 12, batch 0, loss[loss=0.2211, simple_loss=0.2775, pruned_loss=0.06092, ctc_loss=0.1072, over 19401.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2775, pruned_loss=0.06092, ctc_loss=0.1072, over 19401.00 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:01:32,172 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-26 21:01:48,319 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.8218, 5.1093, 5.6484, 5.3364], device='cuda:3') +2024-08-26 21:01:52,248 INFO [train.py:1146] (3/4) Epoch 12, validation: loss=0.1812, simple_loss=0.274, pruned_loss=0.03284, ctc_loss=0.05683, over 944034.00 frames. +2024-08-26 21:01:52,249 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12941MB +2024-08-26 21:02:07,366 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.58 vs. limit=15.0 +2024-08-26 21:02:13,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=146138.66666666666, ans=10.0 +2024-08-26 21:02:38,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=146192.0, ans=0.025 +2024-08-26 21:02:42,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=146245.33333333334, ans=0.125 +2024-08-26 21:02:50,482 INFO [train.py:1114] (3/4) Epoch 12, batch 50, loss[loss=0.1735, simple_loss=0.2397, pruned_loss=0.03913, ctc_loss=0.07244, over 19703.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.278, pruned_loss=0.05281, ctc_loss=0.09975, over 845316.12 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:03:03,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=146352.0, ans=0.125 +2024-08-26 21:03:11,159 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.556e+02 1.742e+02 1.990e+02 3.045e+02, threshold=3.484e+02, percent-clipped=0.0 +2024-08-26 21:03:16,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=146405.33333333334, ans=0.125 +2024-08-26 21:03:30,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=146512.0, ans=0.125 +2024-08-26 21:04:07,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=146512.0, ans=0.0 +2024-08-26 21:04:10,877 INFO [train.py:1114] (3/4) Epoch 12, batch 100, loss[loss=0.1958, simple_loss=0.2682, pruned_loss=0.04509, ctc_loss=0.08296, over 19712.00 frames. ], tot_loss[loss=0.215, simple_loss=0.281, pruned_loss=0.05416, ctc_loss=0.1016, over 1500217.78 frames. ], batch size: 51, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:04:11,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=146565.33333333334, ans=0.0 +2024-08-26 21:04:15,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=146565.33333333334, ans=0.04949747468305833 +2024-08-26 21:04:29,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=146618.66666666666, ans=0.0 +2024-08-26 21:04:43,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146672.0, ans=0.1 +2024-08-26 21:04:43,453 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.31 vs. limit=10.0 +2024-08-26 21:04:53,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=146725.33333333334, ans=0.125 +2024-08-26 21:04:53,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=146725.33333333334, ans=0.125 +2024-08-26 21:05:01,073 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.95 vs. limit=15.0 +2024-08-26 21:05:04,727 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.04 vs. limit=6.0 +2024-08-26 21:05:05,143 INFO [train.py:1114] (3/4) Epoch 12, batch 150, loss[loss=0.169, simple_loss=0.2385, pruned_loss=0.03656, ctc_loss=0.06582, over 19714.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2781, pruned_loss=0.05224, ctc_loss=0.09815, over 2028992.09 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:05:23,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=146938.66666666666, ans=0.95 +2024-08-26 21:05:25,630 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.236e+02 1.421e+02 1.535e+02 1.745e+02 2.429e+02, threshold=3.070e+02, percent-clipped=0.0 +2024-08-26 21:05:29,988 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.60 vs. limit=15.0 +2024-08-26 21:05:30,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=146938.66666666666, ans=0.0 +2024-08-26 21:05:33,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=146992.0, ans=0.2 +2024-08-26 21:05:35,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=146992.0, ans=0.2 +2024-08-26 21:05:43,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=147045.33333333334, ans=0.125 +2024-08-26 21:05:45,942 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.27 vs. limit=15.0 +2024-08-26 21:05:52,005 INFO [train.py:1114] (3/4) Epoch 12, batch 200, loss[loss=0.2172, simple_loss=0.2848, pruned_loss=0.05407, ctc_loss=0.1038, over 18303.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2774, pruned_loss=0.05201, ctc_loss=0.09731, over 2435906.35 frames. ], batch size: 85, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:06:38,643 INFO [train.py:1114] (3/4) Epoch 12, batch 250, loss[loss=0.2308, simple_loss=0.2959, pruned_loss=0.06145, ctc_loss=0.1068, over 19363.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2767, pruned_loss=0.05167, ctc_loss=0.09674, over 2755304.30 frames. ], batch size: 67, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:06:49,766 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.66 vs. limit=6.0 +2024-08-26 21:06:57,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=147472.0, ans=0.125 +2024-08-26 21:06:59,403 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.414e+02 1.495e+02 1.680e+02 4.024e+02, threshold=2.991e+02, percent-clipped=1.0 +2024-08-26 21:07:05,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=147472.0, ans=0.125 +2024-08-26 21:07:25,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=147578.66666666666, ans=0.125 +2024-08-26 21:07:29,041 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.06 vs. limit=15.0 +2024-08-26 21:07:35,066 INFO [train.py:1114] (3/4) Epoch 12, batch 300, loss[loss=0.2136, simple_loss=0.2833, pruned_loss=0.05248, ctc_loss=0.09727, over 19550.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2758, pruned_loss=0.05128, ctc_loss=0.09591, over 2999102.31 frames. ], batch size: 61, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:08:04,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=147792.0, ans=15.0 +2024-08-26 21:08:12,651 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.05 vs. limit=15.0 +2024-08-26 21:08:15,489 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.92 vs. limit=15.0 +2024-08-26 21:08:30,185 INFO [train.py:1114] (3/4) Epoch 12, batch 350, loss[loss=0.1979, simple_loss=0.2604, pruned_loss=0.04901, ctc_loss=0.09374, over 19741.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2766, pruned_loss=0.05152, ctc_loss=0.09602, over 3190049.49 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 16.0 +2024-08-26 21:08:33,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=147898.66666666666, ans=10.0 +2024-08-26 21:12:05,934 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.16 vs. limit=22.5 +2024-08-26 21:12:10,849 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.537e+02 1.863e+02 2.287e+02 4.040e+02, threshold=3.725e+02, percent-clipped=5.0 +2024-08-26 21:12:12,306 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.07 vs. limit=15.0 +2024-08-26 21:12:17,857 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.97 vs. limit=15.0 +2024-08-26 21:12:18,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=148058.66666666666, ans=0.125 +2024-08-26 21:12:23,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=148058.66666666666, ans=0.125 +2024-08-26 21:12:24,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=148058.66666666666, ans=0.125 +2024-08-26 21:12:25,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=148058.66666666666, ans=0.04949747468305833 +2024-08-26 21:13:47,519 INFO [train.py:1114] (3/4) Epoch 12, batch 400, loss[loss=0.2158, simple_loss=0.2855, pruned_loss=0.05359, ctc_loss=0.09722, over 19493.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2755, pruned_loss=0.05095, ctc_loss=0.09494, over 3341862.98 frames. ], batch size: 54, lr: 1.27e-02, grad_scale: 32.0 +2024-08-26 21:13:55,427 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.04 vs. limit=10.0 +2024-08-26 21:14:07,712 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.95 vs. limit=15.0 +2024-08-26 21:14:34,264 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.15 vs. limit=6.0 +2024-08-26 21:14:34,569 INFO [train.py:1114] (3/4) Epoch 12, batch 450, loss[loss=0.21, simple_loss=0.2799, pruned_loss=0.05042, ctc_loss=0.09826, over 19604.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2759, pruned_loss=0.05108, ctc_loss=0.09542, over 3449575.95 frames. ], batch size: 55, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:14:49,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=148485.33333333334, ans=0.125 +2024-08-26 21:15:05,747 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.502e+02 1.695e+02 2.071e+02 2.894e+02, threshold=3.390e+02, percent-clipped=0.0 +2024-08-26 21:15:23,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=148645.33333333334, ans=0.125 +2024-08-26 21:15:25,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.34 vs. limit=10.0 +2024-08-26 21:15:28,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=148645.33333333334, ans=0.125 +2024-08-26 21:15:28,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=148645.33333333334, ans=0.0 +2024-08-26 21:15:31,512 INFO [train.py:1114] (3/4) Epoch 12, batch 500, loss[loss=0.2274, simple_loss=0.2987, pruned_loss=0.05701, ctc_loss=0.1054, over 19711.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2749, pruned_loss=0.05064, ctc_loss=0.09479, over 3546020.89 frames. ], batch size: 63, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:15:34,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=148698.66666666666, ans=0.125 +2024-08-26 21:15:37,068 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:16:02,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=148858.66666666666, ans=0.0 +2024-08-26 21:16:14,525 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.55 vs. limit=6.0 +2024-08-26 21:16:15,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=148912.0, ans=0.2 +2024-08-26 21:16:19,319 INFO [train.py:1114] (3/4) Epoch 12, batch 550, loss[loss=0.2213, simple_loss=0.2922, pruned_loss=0.05428, ctc_loss=0.1043, over 19283.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2748, pruned_loss=0.05063, ctc_loss=0.09475, over 3607276.06 frames. ], batch size: 71, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:16:22,583 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.63 vs. limit=15.0 +2024-08-26 21:16:23,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=148965.33333333334, ans=0.025 +2024-08-26 21:16:27,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=149018.66666666666, ans=0.125 +2024-08-26 21:16:38,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=149072.0, ans=0.125 +2024-08-26 21:16:39,722 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.448e+02 1.617e+02 1.906e+02 3.977e+02, threshold=3.234e+02, percent-clipped=1.0 +2024-08-26 21:16:39,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=149072.0, ans=0.125 +2024-08-26 21:16:49,622 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.32 vs. limit=15.0 +2024-08-26 21:16:52,437 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.40 vs. limit=15.0 +2024-08-26 21:17:35,147 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:17:45,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=149178.66666666666, ans=0.125 +2024-08-26 21:17:47,329 INFO [train.py:1114] (3/4) Epoch 12, batch 600, loss[loss=0.2047, simple_loss=0.2794, pruned_loss=0.04795, ctc_loss=0.08516, over 19326.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2749, pruned_loss=0.05073, ctc_loss=0.09503, over 3665697.83 frames. ], batch size: 67, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:17:58,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=149285.33333333334, ans=0.125 +2024-08-26 21:18:09,301 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.71 vs. limit=10.0 +2024-08-26 21:18:09,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=149338.66666666666, ans=0.0 +2024-08-26 21:18:31,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=149392.0, ans=0.125 +2024-08-26 21:18:37,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=149445.33333333334, ans=0.0 +2024-08-26 21:18:41,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=149445.33333333334, ans=0.1 +2024-08-26 21:18:44,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=149445.33333333334, ans=0.1 +2024-08-26 21:18:46,469 INFO [train.py:1114] (3/4) Epoch 12, batch 650, loss[loss=0.1913, simple_loss=0.2652, pruned_loss=0.04224, ctc_loss=0.08225, over 19780.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2739, pruned_loss=0.05023, ctc_loss=0.09409, over 3716004.42 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:18:56,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=149498.66666666666, ans=0.07 +2024-08-26 21:19:02,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=149498.66666666666, ans=0.0 +2024-08-26 21:19:07,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=149552.0, ans=0.125 +2024-08-26 21:19:15,004 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.58 vs. limit=15.0 +2024-08-26 21:19:15,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=149605.33333333334, ans=0.125 +2024-08-26 21:19:16,422 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.436e+02 1.583e+02 1.844e+02 2.674e+02, threshold=3.165e+02, percent-clipped=0.0 +2024-08-26 21:19:21,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149605.33333333334, ans=0.1 +2024-08-26 21:19:23,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=149658.66666666666, ans=0.2 +2024-08-26 21:19:38,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=149712.0, ans=0.125 +2024-08-26 21:19:39,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=149712.0, ans=0.125 +2024-08-26 21:19:45,024 INFO [train.py:1114] (3/4) Epoch 12, batch 700, loss[loss=0.1895, simple_loss=0.2601, pruned_loss=0.04351, ctc_loss=0.07945, over 19722.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2743, pruned_loss=0.05031, ctc_loss=0.09414, over 3748205.91 frames. ], batch size: 51, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:19:45,767 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.03 vs. limit=12.0 +2024-08-26 21:19:50,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=149765.33333333334, ans=0.125 +2024-08-26 21:20:05,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=149872.0, ans=0.0 +2024-08-26 21:20:05,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149872.0, ans=0.1 +2024-08-26 21:20:18,596 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:20:19,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=149925.33333333334, ans=0.125 +2024-08-26 21:20:24,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=149978.66666666666, ans=0.125 +2024-08-26 21:20:31,169 INFO [train.py:1114] (3/4) Epoch 12, batch 750, loss[loss=0.1989, simple_loss=0.2727, pruned_loss=0.04524, ctc_loss=0.08671, over 19494.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2743, pruned_loss=0.05026, ctc_loss=0.0942, over 3772565.94 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:20:32,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=150032.0, ans=0.07 +2024-08-26 21:20:35,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150032.0, ans=0.1 +2024-08-26 21:20:37,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150032.0, ans=0.1 +2024-08-26 21:20:43,831 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.71 vs. limit=22.5 +2024-08-26 21:20:49,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=150138.66666666666, ans=0.125 +2024-08-26 21:20:51,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=150138.66666666666, ans=0.0 +2024-08-26 21:20:51,908 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.231e+02 1.592e+02 1.843e+02 2.247e+02 3.979e+02, threshold=3.686e+02, percent-clipped=6.0 +2024-08-26 21:20:55,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=150138.66666666666, ans=0.125 +2024-08-26 21:20:56,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=150138.66666666666, ans=0.09899494936611666 +2024-08-26 21:20:59,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=150192.0, ans=0.125 +2024-08-26 21:21:22,408 INFO [train.py:1114] (3/4) Epoch 12, batch 800, loss[loss=0.1984, simple_loss=0.2614, pruned_loss=0.05046, ctc_loss=0.08614, over 19402.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2747, pruned_loss=0.05051, ctc_loss=0.09443, over 3793435.95 frames. ], batch size: 48, lr: 1.26e-02, grad_scale: 32.0 +2024-08-26 21:21:30,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=150298.66666666666, ans=0.125 +2024-08-26 21:21:42,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=150352.0, ans=0.0 +2024-08-26 21:22:08,636 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:22:11,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=150512.0, ans=0.0 +2024-08-26 21:22:12,974 INFO [train.py:1114] (3/4) Epoch 12, batch 850, loss[loss=0.2212, simple_loss=0.2879, pruned_loss=0.05514, ctc_loss=0.1107, over 19661.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2743, pruned_loss=0.05039, ctc_loss=0.09413, over 3813485.28 frames. ], batch size: 59, lr: 1.26e-02, grad_scale: 16.0 +2024-08-26 21:22:14,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=150565.33333333334, ans=0.125 +2024-08-26 21:22:17,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=150565.33333333334, ans=0.125 +2024-08-26 21:22:29,099 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:22:34,292 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.451e+02 1.599e+02 1.811e+02 2.698e+02, threshold=3.198e+02, percent-clipped=0.0 +2024-08-26 21:22:34,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=150672.0, ans=0.125 +2024-08-26 21:22:36,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=150672.0, ans=0.125 +2024-08-26 21:22:38,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=150672.0, ans=0.0 +2024-08-26 21:22:39,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=150672.0, ans=0.0 +2024-08-26 21:22:56,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=150778.66666666666, ans=0.95 +2024-08-26 21:23:00,257 INFO [train.py:1114] (3/4) Epoch 12, batch 900, loss[loss=0.1865, simple_loss=0.2552, pruned_loss=0.04278, ctc_loss=0.08046, over 19783.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2748, pruned_loss=0.0507, ctc_loss=0.09466, over 3817612.49 frames. ], batch size: 49, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:23:07,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=150832.0, ans=0.025 +2024-08-26 21:23:11,679 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.08 vs. limit=10.0 +2024-08-26 21:23:20,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=150938.66666666666, ans=0.0 +2024-08-26 21:23:40,092 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.63 vs. limit=10.0 +2024-08-26 21:23:40,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=151045.33333333334, ans=0.025 +2024-08-26 21:23:44,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=151045.33333333334, ans=6.0 +2024-08-26 21:23:45,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=151045.33333333334, ans=0.2 +2024-08-26 21:23:47,039 INFO [train.py:1114] (3/4) Epoch 12, batch 950, loss[loss=0.1929, simple_loss=0.2549, pruned_loss=0.04837, ctc_loss=0.08547, over 19499.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2756, pruned_loss=0.0511, ctc_loss=0.09545, over 3819388.05 frames. ], batch size: 49, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:23:47,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=151098.66666666666, ans=0.1 +2024-08-26 21:23:48,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=151098.66666666666, ans=0.2 +2024-08-26 21:24:10,054 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.68 vs. limit=6.0 +2024-08-26 21:24:17,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=151152.0, ans=0.125 +2024-08-26 21:24:26,849 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.78 vs. limit=6.0 +2024-08-26 21:24:29,928 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.208e+02 1.446e+02 1.609e+02 1.941e+02 6.709e+02, threshold=3.217e+02, percent-clipped=2.0 +2024-08-26 21:24:48,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=151312.0, ans=0.125 +2024-08-26 21:24:57,591 INFO [train.py:1114] (3/4) Epoch 12, batch 1000, loss[loss=0.1908, simple_loss=0.2666, pruned_loss=0.04236, ctc_loss=0.07568, over 19860.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2761, pruned_loss=0.05141, ctc_loss=0.09612, over 3815895.59 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:25:00,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=151365.33333333334, ans=0.125 +2024-08-26 21:25:21,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=151418.66666666666, ans=0.07 +2024-08-26 21:25:55,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=151578.66666666666, ans=0.125 +2024-08-26 21:25:59,410 INFO [train.py:1114] (3/4) Epoch 12, batch 1050, loss[loss=0.214, simple_loss=0.2793, pruned_loss=0.05449, ctc_loss=0.09926, over 19840.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2752, pruned_loss=0.05099, ctc_loss=0.09527, over 3822208.51 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:26:01,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=151632.0, ans=0.125 +2024-08-26 21:26:20,569 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.117e+02 1.384e+02 1.517e+02 1.769e+02 3.938e+02, threshold=3.034e+02, percent-clipped=1.0 +2024-08-26 21:26:28,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=151792.0, ans=0.2 +2024-08-26 21:26:29,165 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:26:32,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=151792.0, ans=0.125 +2024-08-26 21:26:36,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=151845.33333333334, ans=0.1 +2024-08-26 21:26:39,954 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.99 vs. limit=15.0 +2024-08-26 21:26:45,790 INFO [train.py:1114] (3/4) Epoch 12, batch 1100, loss[loss=0.2155, simple_loss=0.28, pruned_loss=0.05531, ctc_loss=0.1012, over 19596.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2742, pruned_loss=0.0504, ctc_loss=0.09432, over 3831298.08 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:26:48,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=151898.66666666666, ans=0.0 +2024-08-26 21:26:54,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=151952.0, ans=0.125 +2024-08-26 21:26:55,639 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.51 vs. limit=22.5 +2024-08-26 21:27:07,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=152005.33333333334, ans=0.05 +2024-08-26 21:27:18,850 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=11.57 vs. limit=15.0 +2024-08-26 21:27:20,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=152058.66666666666, ans=0.125 +2024-08-26 21:27:25,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=152058.66666666666, ans=0.0 +2024-08-26 21:27:32,176 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:27:39,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=152112.0, ans=0.025 +2024-08-26 21:27:41,051 INFO [train.py:1114] (3/4) Epoch 12, batch 1150, loss[loss=0.1967, simple_loss=0.2702, pruned_loss=0.04503, ctc_loss=0.08264, over 19574.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2745, pruned_loss=0.05068, ctc_loss=0.09468, over 3830196.69 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 16.0 +2024-08-26 21:27:42,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=152165.33333333334, ans=0.125 +2024-08-26 21:27:44,543 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.30 vs. limit=22.5 +2024-08-26 21:27:52,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=152218.66666666666, ans=0.025 +2024-08-26 21:27:52,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=152218.66666666666, ans=0.2 +2024-08-26 21:27:54,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=152218.66666666666, ans=0.025 +2024-08-26 21:27:56,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.whiten.whitening_limit, batch_count=152218.66666666666, ans=12.0 +2024-08-26 21:27:59,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=152272.0, ans=0.0 +2024-08-26 21:28:02,691 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.513e+02 1.822e+02 2.260e+02 3.131e+02, threshold=3.643e+02, percent-clipped=1.0 +2024-08-26 21:28:16,265 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.65 vs. limit=22.5 +2024-08-26 21:28:28,100 INFO [train.py:1114] (3/4) Epoch 12, batch 1200, loss[loss=0.2201, simple_loss=0.2808, pruned_loss=0.05837, ctc_loss=0.1066, over 19844.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2753, pruned_loss=0.0509, ctc_loss=0.09502, over 3825521.01 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 32.0 +2024-08-26 21:28:46,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152538.66666666666, ans=0.1 +2024-08-26 21:28:54,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=152538.66666666666, ans=0.125 +2024-08-26 21:29:07,835 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:29:14,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=152698.66666666666, ans=0.2 +2024-08-26 21:29:15,269 INFO [train.py:1114] (3/4) Epoch 12, batch 1250, loss[loss=0.2132, simple_loss=0.2764, pruned_loss=0.05465, ctc_loss=0.1019, over 19521.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2754, pruned_loss=0.05095, ctc_loss=0.09503, over 3843627.66 frames. ], batch size: 61, lr: 1.25e-02, grad_scale: 32.0 +2024-08-26 21:29:26,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=152752.0, ans=0.125 +2024-08-26 21:29:36,827 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.211e+02 1.442e+02 1.596e+02 2.011e+02 3.434e+02, threshold=3.192e+02, percent-clipped=0.0 +2024-08-26 21:29:54,280 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.95 vs. limit=15.0 +2024-08-26 21:30:17,223 INFO [train.py:1114] (3/4) Epoch 12, batch 1300, loss[loss=0.2092, simple_loss=0.2828, pruned_loss=0.04902, ctc_loss=0.09365, over 18845.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2745, pruned_loss=0.05049, ctc_loss=0.09416, over 3848769.64 frames. ], batch size: 76, lr: 1.25e-02, grad_scale: 32.0 +2024-08-26 21:30:21,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=152965.33333333334, ans=0.1 +2024-08-26 21:30:25,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=152965.33333333334, ans=0.125 +2024-08-26 21:30:44,711 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.80 vs. limit=22.5 +2024-08-26 21:30:50,998 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.03 vs. limit=15.0 +2024-08-26 21:30:57,458 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.87 vs. limit=22.5 +2024-08-26 21:31:06,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=153178.66666666666, ans=0.125 +2024-08-26 21:31:08,985 INFO [train.py:1114] (3/4) Epoch 12, batch 1350, loss[loss=0.2333, simple_loss=0.2888, pruned_loss=0.06524, ctc_loss=0.1183, over 19765.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2743, pruned_loss=0.05036, ctc_loss=0.0937, over 3858914.98 frames. ], batch size: 54, lr: 1.25e-02, grad_scale: 32.0 +2024-08-26 21:31:29,984 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.104e+02 1.467e+02 1.650e+02 2.044e+02 3.234e+02, threshold=3.299e+02, percent-clipped=1.0 +2024-08-26 21:31:39,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=153392.0, ans=0.0 +2024-08-26 21:31:50,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=153445.33333333334, ans=0.0 +2024-08-26 21:31:55,310 INFO [train.py:1114] (3/4) Epoch 12, batch 1400, loss[loss=0.1856, simple_loss=0.2446, pruned_loss=0.04516, ctc_loss=0.09067, over 19697.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2744, pruned_loss=0.05054, ctc_loss=0.09412, over 3865222.20 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:32:11,362 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.59 vs. limit=15.0 +2024-08-26 21:32:47,924 INFO [train.py:1114] (3/4) Epoch 12, batch 1450, loss[loss=0.2348, simple_loss=0.3068, pruned_loss=0.0594, ctc_loss=0.11, over 19683.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.275, pruned_loss=0.05056, ctc_loss=0.09434, over 3862585.95 frames. ], batch size: 63, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:33:12,066 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.245e+02 1.443e+02 1.618e+02 1.909e+02 2.759e+02, threshold=3.236e+02, percent-clipped=0.0 +2024-08-26 21:33:17,102 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.39 vs. limit=6.0 +2024-08-26 21:33:42,297 INFO [train.py:1114] (3/4) Epoch 12, batch 1500, loss[loss=0.2029, simple_loss=0.278, pruned_loss=0.04675, ctc_loss=0.08596, over 19599.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.275, pruned_loss=0.05041, ctc_loss=0.09399, over 3863490.21 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:33:50,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=154032.0, ans=0.0 +2024-08-26 21:33:50,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=154032.0, ans=0.0 +2024-08-26 21:34:04,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=154138.66666666666, ans=0.0 +2024-08-26 21:34:10,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=154192.0, ans=0.125 +2024-08-26 21:34:19,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=154245.33333333334, ans=0.0 +2024-08-26 21:34:20,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=154245.33333333334, ans=0.125 +2024-08-26 21:34:29,576 INFO [train.py:1114] (3/4) Epoch 12, batch 1550, loss[loss=0.2192, simple_loss=0.2836, pruned_loss=0.05662, ctc_loss=0.1037, over 19622.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2752, pruned_loss=0.05059, ctc_loss=0.09433, over 3847926.80 frames. ], batch size: 60, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:34:38,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=154352.0, ans=0.07 +2024-08-26 21:34:39,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=154352.0, ans=0.0 +2024-08-26 21:34:51,380 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.237e+02 1.431e+02 1.666e+02 1.890e+02 5.087e+02, threshold=3.332e+02, percent-clipped=2.0 +2024-08-26 21:34:58,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=154458.66666666666, ans=0.125 +2024-08-26 21:34:58,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=154458.66666666666, ans=0.125 +2024-08-26 21:35:16,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=154565.33333333334, ans=0.0 +2024-08-26 21:35:17,030 INFO [train.py:1114] (3/4) Epoch 12, batch 1600, loss[loss=0.2023, simple_loss=0.2753, pruned_loss=0.04641, ctc_loss=0.0913, over 19843.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2751, pruned_loss=0.05067, ctc_loss=0.09448, over 3836593.08 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:35:26,930 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.48 vs. limit=15.0 +2024-08-26 21:35:28,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=154618.66666666666, ans=0.125 +2024-08-26 21:35:49,538 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.98 vs. limit=22.5 +2024-08-26 21:35:59,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154778.66666666666, ans=0.1 +2024-08-26 21:36:02,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=154778.66666666666, ans=0.125 +2024-08-26 21:36:03,658 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.69 vs. limit=15.0 +2024-08-26 21:36:05,477 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.56 vs. limit=15.0 +2024-08-26 21:36:09,955 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.52 vs. limit=15.0 +2024-08-26 21:36:11,264 INFO [train.py:1114] (3/4) Epoch 12, batch 1650, loss[loss=0.2149, simple_loss=0.2889, pruned_loss=0.05135, ctc_loss=0.09535, over 19664.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2748, pruned_loss=0.05072, ctc_loss=0.09437, over 3832756.55 frames. ], batch size: 59, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:36:19,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=154832.0, ans=0.125 +2024-08-26 21:36:27,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=154885.33333333334, ans=0.1 +2024-08-26 21:36:34,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=154938.66666666666, ans=0.0 +2024-08-26 21:36:34,778 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.198e+02 1.420e+02 1.592e+02 1.938e+02 3.625e+02, threshold=3.184e+02, percent-clipped=1.0 +2024-08-26 21:36:41,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=154992.0, ans=0.125 +2024-08-26 21:36:44,586 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.66 vs. limit=15.0 +2024-08-26 21:37:00,210 INFO [train.py:1114] (3/4) Epoch 12, batch 1700, loss[loss=0.1666, simple_loss=0.2325, pruned_loss=0.03678, ctc_loss=0.06785, over 19666.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2747, pruned_loss=0.05052, ctc_loss=0.09408, over 3847160.88 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 32.0 +2024-08-26 21:37:05,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=155098.66666666666, ans=0.2 +2024-08-26 21:37:25,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=155205.33333333334, ans=0.1 +2024-08-26 21:37:37,397 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.46 vs. limit=22.5 +2024-08-26 21:37:38,402 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.61 vs. limit=15.0 +2024-08-26 21:37:38,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=155312.0, ans=0.0 +2024-08-26 21:37:44,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=155365.33333333334, ans=0.125 +2024-08-26 21:37:44,945 INFO [train.py:1114] (3/4) Epoch 12, batch 1750, loss[loss=0.1834, simple_loss=0.2472, pruned_loss=0.04324, ctc_loss=0.08278, over 19645.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2745, pruned_loss=0.05048, ctc_loss=0.09399, over 3851893.16 frames. ], batch size: 45, lr: 1.24e-02, grad_scale: 16.0 +2024-08-26 21:37:51,259 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:37:57,862 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.82 vs. limit=10.0 +2024-08-26 21:38:06,168 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.409e+02 1.600e+02 1.878e+02 3.182e+02, threshold=3.201e+02, percent-clipped=0.0 +2024-08-26 21:38:06,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=155472.0, ans=0.2 +2024-08-26 21:38:12,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=155525.33333333334, ans=0.2 +2024-08-26 21:38:13,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=155525.33333333334, ans=0.125 +2024-08-26 21:38:26,765 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.76 vs. limit=22.5 +2024-08-26 21:38:27,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=155578.66666666666, ans=0.125 +2024-08-26 21:38:28,987 INFO [train.py:1114] (3/4) Epoch 12, batch 1800, loss[loss=0.1914, simple_loss=0.2681, pruned_loss=0.04171, ctc_loss=0.07824, over 19599.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2744, pruned_loss=0.05041, ctc_loss=0.0938, over 3853702.72 frames. ], batch size: 55, lr: 1.24e-02, grad_scale: 16.0 +2024-08-26 21:38:33,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=155632.0, ans=0.125 +2024-08-26 21:38:41,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=155685.33333333334, ans=0.125 +2024-08-26 21:38:48,675 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.85 vs. limit=15.0 +2024-08-26 21:38:48,743 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.65 vs. limit=6.0 +2024-08-26 21:38:51,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.96 vs. limit=22.5 +2024-08-26 21:39:00,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=155792.0, ans=0.5 +2024-08-26 21:39:09,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=155845.33333333334, ans=0.0 +2024-08-26 21:39:12,710 INFO [train.py:1114] (3/4) Epoch 12, batch 1850, loss[loss=0.2031, simple_loss=0.2793, pruned_loss=0.04673, ctc_loss=0.08382, over 19581.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2742, pruned_loss=0.0501, ctc_loss=0.09328, over 3855951.22 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 16.0 +2024-08-26 21:39:12,956 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:39:25,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=155952.0, ans=0.0 +2024-08-26 21:39:31,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=156005.33333333334, ans=0.2 +2024-08-26 21:39:34,654 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.542e+02 1.764e+02 2.176e+02 3.980e+02, threshold=3.528e+02, percent-clipped=3.0 +2024-08-26 21:39:44,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156058.66666666666, ans=0.1 +2024-08-26 21:39:56,686 INFO [train.py:1114] (3/4) Epoch 12, batch 1900, loss[loss=0.2107, simple_loss=0.2887, pruned_loss=0.04804, ctc_loss=0.09165, over 19656.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2746, pruned_loss=0.05033, ctc_loss=0.09351, over 3860707.06 frames. ], batch size: 59, lr: 1.23e-02, grad_scale: 8.0 +2024-08-26 21:40:03,319 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.43 vs. limit=22.5 +2024-08-26 21:40:07,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=156218.66666666666, ans=0.0 +2024-08-26 21:40:15,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=156272.0, ans=0.125 +2024-08-26 21:40:32,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156378.66666666666, ans=0.1 +2024-08-26 21:40:33,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156378.66666666666, ans=0.1 +2024-08-26 21:40:35,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=156378.66666666666, ans=0.05 +2024-08-26 21:40:40,049 INFO [train.py:1114] (3/4) Epoch 12, batch 1950, loss[loss=0.2058, simple_loss=0.2774, pruned_loss=0.04867, ctc_loss=0.09233, over 19587.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2753, pruned_loss=0.05016, ctc_loss=0.09333, over 3869202.79 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 8.0 +2024-08-26 21:40:47,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=156432.0, ans=0.2 +2024-08-26 21:41:01,732 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.258e+02 1.443e+02 1.619e+02 1.881e+02 3.638e+02, threshold=3.238e+02, percent-clipped=1.0 +2024-08-26 21:41:03,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=156538.66666666666, ans=0.0 +2024-08-26 21:41:17,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=156645.33333333334, ans=0.125 +2024-08-26 21:41:19,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=156645.33333333334, ans=0.125 +2024-08-26 21:41:22,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=156645.33333333334, ans=0.125 +2024-08-26 21:41:25,588 INFO [train.py:1114] (3/4) Epoch 12, batch 2000, loss[loss=0.1835, simple_loss=0.2418, pruned_loss=0.0466, ctc_loss=0.08016, over 19645.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2761, pruned_loss=0.05067, ctc_loss=0.09424, over 3855475.62 frames. ], batch size: 45, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:41:57,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=156858.66666666666, ans=0.125 +2024-08-26 21:42:04,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=156912.0, ans=0.015 +2024-08-26 21:42:08,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=156912.0, ans=0.125 +2024-08-26 21:42:10,408 INFO [train.py:1114] (3/4) Epoch 12, batch 2050, loss[loss=0.1745, simple_loss=0.2471, pruned_loss=0.0377, ctc_loss=0.06604, over 19724.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2753, pruned_loss=0.05035, ctc_loss=0.09377, over 3851501.54 frames. ], batch size: 47, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:42:31,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=157072.0, ans=0.0 +2024-08-26 21:42:33,037 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.456e+02 1.628e+02 1.934e+02 3.317e+02, threshold=3.256e+02, percent-clipped=1.0 +2024-08-26 21:42:34,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=157072.0, ans=0.125 +2024-08-26 21:42:34,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=157072.0, ans=0.025 +2024-08-26 21:42:37,978 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.25 vs. limit=12.0 +2024-08-26 21:42:44,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=157125.33333333334, ans=0.125 +2024-08-26 21:42:53,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=157178.66666666666, ans=0.125 +2024-08-26 21:42:55,849 INFO [train.py:1114] (3/4) Epoch 12, batch 2100, loss[loss=0.2068, simple_loss=0.2763, pruned_loss=0.04915, ctc_loss=0.09715, over 19761.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2744, pruned_loss=0.04976, ctc_loss=0.09277, over 3858296.91 frames. ], batch size: 54, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:43:18,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=157338.66666666666, ans=0.04949747468305833 +2024-08-26 21:44:05,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=157498.66666666666, ans=0.2 +2024-08-26 21:44:06,219 INFO [train.py:1114] (3/4) Epoch 12, batch 2150, loss[loss=0.1907, simple_loss=0.2576, pruned_loss=0.04495, ctc_loss=0.08476, over 19848.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2737, pruned_loss=0.04958, ctc_loss=0.09235, over 3869541.67 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:44:13,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=157498.66666666666, ans=0.125 +2024-08-26 21:44:24,244 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.42 vs. limit=15.0 +2024-08-26 21:44:27,892 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.226e+02 1.483e+02 1.683e+02 2.213e+02 4.687e+02, threshold=3.365e+02, percent-clipped=1.0 +2024-08-26 21:44:28,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=157605.33333333334, ans=0.125 +2024-08-26 21:45:25,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=157712.0, ans=0.125 +2024-08-26 21:45:26,961 INFO [train.py:1114] (3/4) Epoch 12, batch 2200, loss[loss=0.2068, simple_loss=0.2833, pruned_loss=0.04672, ctc_loss=0.0921, over 19575.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2734, pruned_loss=0.04927, ctc_loss=0.09176, over 3867635.59 frames. ], batch size: 57, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:45:41,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=157818.66666666666, ans=0.125 +2024-08-26 21:45:41,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=157818.66666666666, ans=0.0 +2024-08-26 21:45:45,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=157872.0, ans=0.0 +2024-08-26 21:45:49,279 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-08-26 21:46:03,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=157978.66666666666, ans=0.2 +2024-08-26 21:46:05,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=157978.66666666666, ans=0.2 +2024-08-26 21:46:10,345 INFO [train.py:1114] (3/4) Epoch 12, batch 2250, loss[loss=0.1873, simple_loss=0.2687, pruned_loss=0.03842, ctc_loss=0.07265, over 19635.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2733, pruned_loss=0.04905, ctc_loss=0.09152, over 3868137.32 frames. ], batch size: 55, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:46:17,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=158032.0, ans=0.125 +2024-08-26 21:46:31,823 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.542e+02 1.805e+02 2.126e+02 6.638e+02, threshold=3.611e+02, percent-clipped=1.0 +2024-08-26 21:46:48,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=158245.33333333334, ans=0.125 +2024-08-26 21:46:53,550 INFO [train.py:1114] (3/4) Epoch 12, batch 2300, loss[loss=0.1917, simple_loss=0.2599, pruned_loss=0.0452, ctc_loss=0.08272, over 19507.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2728, pruned_loss=0.04936, ctc_loss=0.09198, over 3861735.12 frames. ], batch size: 49, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:47:04,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=158352.0, ans=0.1 +2024-08-26 21:47:06,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=158352.0, ans=0.0 +2024-08-26 21:47:06,776 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.69 vs. limit=15.0 +2024-08-26 21:47:19,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=158458.66666666666, ans=0.125 +2024-08-26 21:47:23,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=158458.66666666666, ans=0.0 +2024-08-26 21:47:25,772 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.90 vs. limit=22.5 +2024-08-26 21:47:31,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=158512.0, ans=0.0 +2024-08-26 21:47:36,513 INFO [train.py:1114] (3/4) Epoch 12, batch 2350, loss[loss=0.2363, simple_loss=0.299, pruned_loss=0.0636, ctc_loss=0.1161, over 19675.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2732, pruned_loss=0.05007, ctc_loss=0.09336, over 3864136.16 frames. ], batch size: 63, lr: 1.23e-02, grad_scale: 16.0 +2024-08-26 21:47:47,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.76 vs. limit=22.5 +2024-08-26 21:47:58,865 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.457e+02 1.679e+02 1.938e+02 3.188e+02, threshold=3.358e+02, percent-clipped=0.0 +2024-08-26 21:48:00,212 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.17 vs. limit=15.0 +2024-08-26 21:48:21,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.80 vs. limit=22.5 +2024-08-26 21:48:21,594 INFO [train.py:1114] (3/4) Epoch 12, batch 2400, loss[loss=0.2164, simple_loss=0.2864, pruned_loss=0.05224, ctc_loss=0.1046, over 19333.00 frames. ], tot_loss[loss=0.208, simple_loss=0.276, pruned_loss=0.05098, ctc_loss=0.095, over 3857334.42 frames. ], batch size: 71, lr: 1.22e-02, grad_scale: 32.0 +2024-08-26 21:49:03,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=158885.33333333334, ans=0.125 +2024-08-26 21:49:03,847 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:49:22,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=158992.0, ans=0.125 +2024-08-26 21:49:26,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=158992.0, ans=0.125 +2024-08-26 21:49:29,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=158992.0, ans=0.125 +2024-08-26 21:49:33,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=159045.33333333334, ans=0.2 +2024-08-26 21:49:36,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=159045.33333333334, ans=0.2 +2024-08-26 21:49:41,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=159098.66666666666, ans=0.2 +2024-08-26 21:49:42,231 INFO [train.py:1114] (3/4) Epoch 12, batch 2450, loss[loss=0.2776, simple_loss=0.3196, pruned_loss=0.08399, ctc_loss=0.169, over 13367.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2804, pruned_loss=0.05421, ctc_loss=0.1011, over 3729493.54 frames. ], batch size: 140, lr: 1.22e-02, grad_scale: 32.0 +2024-08-26 21:49:49,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=159098.66666666666, ans=0.09899494936611666 +2024-08-26 21:49:54,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=159152.0, ans=0.0 +2024-08-26 21:50:05,690 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.273e+02 1.611e+02 1.857e+02 2.069e+02 3.042e+02, threshold=3.714e+02, percent-clipped=0.0 +2024-08-26 21:50:11,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=159258.66666666666, ans=0.125 +2024-08-26 21:50:14,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=159258.66666666666, ans=0.125 +2024-08-26 21:50:15,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=159258.66666666666, ans=0.125 +2024-08-26 21:51:14,815 INFO [train.py:1114] (3/4) Epoch 13, batch 0, loss[loss=0.1912, simple_loss=0.2546, pruned_loss=0.04717, ctc_loss=0.08365, over 19823.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2546, pruned_loss=0.04717, ctc_loss=0.08365, over 19823.00 frames. ], batch size: 49, lr: 1.18e-02, grad_scale: 16.0 +2024-08-26 21:51:14,815 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-26 21:51:26,515 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.4765, 4.6658, 5.2633, 4.9692], device='cuda:3') +2024-08-26 21:51:28,895 INFO [train.py:1146] (3/4) Epoch 13, validation: loss=0.1795, simple_loss=0.2723, pruned_loss=0.03226, ctc_loss=0.05568, over 944034.00 frames. +2024-08-26 21:51:28,896 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12941MB +2024-08-26 21:51:30,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=159306.66666666666, ans=0.0 +2024-08-26 21:51:32,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=159306.66666666666, ans=0.0 +2024-08-26 21:51:41,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=159360.0, ans=0.125 +2024-08-26 21:51:55,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=159413.33333333334, ans=0.125 +2024-08-26 21:52:00,971 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.11 vs. limit=22.5 +2024-08-26 21:52:02,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=159466.66666666666, ans=0.025 +2024-08-26 21:52:04,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=159466.66666666666, ans=0.125 +2024-08-26 21:52:08,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=159520.0, ans=0.025 +2024-08-26 21:52:10,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.52 vs. limit=12.0 +2024-08-26 21:52:18,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=159573.33333333334, ans=0.125 +2024-08-26 21:52:18,732 INFO [train.py:1114] (3/4) Epoch 13, batch 50, loss[loss=0.1794, simple_loss=0.251, pruned_loss=0.03866, ctc_loss=0.07616, over 19731.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2769, pruned_loss=0.05194, ctc_loss=0.09785, over 844332.57 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:52:31,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=159626.66666666666, ans=0.2 +2024-08-26 21:52:39,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=159680.0, ans=0.125 +2024-08-26 21:52:47,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=159680.0, ans=0.2 +2024-08-26 21:52:56,412 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.509e+02 1.748e+02 2.087e+02 2.763e+02, threshold=3.495e+02, percent-clipped=0.0 +2024-08-26 21:53:00,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=159786.66666666666, ans=0.2 +2024-08-26 21:53:07,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=159840.0, ans=0.95 +2024-08-26 21:53:07,761 INFO [train.py:1114] (3/4) Epoch 13, batch 100, loss[loss=0.2043, simple_loss=0.2714, pruned_loss=0.05068, ctc_loss=0.08937, over 19736.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2775, pruned_loss=0.0511, ctc_loss=0.09575, over 1499336.38 frames. ], batch size: 51, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:53:12,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=159840.0, ans=0.0 +2024-08-26 21:53:14,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159840.0, ans=0.1 +2024-08-26 21:53:24,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=159893.33333333334, ans=0.1 +2024-08-26 21:53:32,995 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.74 vs. limit=15.0 +2024-08-26 21:53:34,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=159946.66666666666, ans=0.125 +2024-08-26 21:53:39,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=160000.0, ans=0.0 +2024-08-26 21:53:40,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=160000.0, ans=0.125 +2024-08-26 21:53:40,994 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.06 vs. limit=12.0 +2024-08-26 21:54:17,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=160053.33333333334, ans=0.0 +2024-08-26 21:54:17,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=160053.33333333334, ans=0.125 +2024-08-26 21:54:18,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=160053.33333333334, ans=0.0 +2024-08-26 21:54:20,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=160053.33333333334, ans=0.95 +2024-08-26 21:54:23,448 INFO [train.py:1114] (3/4) Epoch 13, batch 150, loss[loss=0.2029, simple_loss=0.2592, pruned_loss=0.05335, ctc_loss=0.09974, over 19721.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2746, pruned_loss=0.04983, ctc_loss=0.09326, over 2028110.84 frames. ], batch size: 47, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:54:26,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=160106.66666666666, ans=0.015 +2024-08-26 21:54:46,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=160213.33333333334, ans=0.025 +2024-08-26 21:54:50,069 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.48 vs. limit=6.0 +2024-08-26 21:54:52,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=160266.66666666666, ans=0.0 +2024-08-26 21:54:58,329 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.76 vs. limit=15.0 +2024-08-26 21:55:02,358 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.397e+02 1.535e+02 1.726e+02 2.735e+02, threshold=3.069e+02, percent-clipped=0.0 +2024-08-26 21:55:13,389 INFO [train.py:1114] (3/4) Epoch 13, batch 200, loss[loss=0.2457, simple_loss=0.3031, pruned_loss=0.06847, ctc_loss=0.1285, over 18464.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2733, pruned_loss=0.04951, ctc_loss=0.0925, over 2435881.61 frames. ], batch size: 85, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:55:21,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=160426.66666666666, ans=0.125 +2024-08-26 21:55:26,315 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 21:55:27,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=160426.66666666666, ans=0.2 +2024-08-26 21:55:30,325 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.62 vs. limit=6.0 +2024-08-26 21:55:38,641 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.02 vs. limit=15.0 +2024-08-26 21:56:19,213 INFO [train.py:1114] (3/4) Epoch 13, batch 250, loss[loss=0.2285, simple_loss=0.2992, pruned_loss=0.05877, ctc_loss=0.1008, over 19430.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2732, pruned_loss=0.04897, ctc_loss=0.09184, over 2756746.88 frames. ], batch size: 67, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:56:19,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=160640.0, ans=0.125 +2024-08-26 21:56:24,406 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-08-26 21:56:32,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160693.33333333334, ans=0.1 +2024-08-26 21:56:36,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=160746.66666666666, ans=0.125 +2024-08-26 21:56:48,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=160800.0, ans=0.125 +2024-08-26 21:56:50,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=160800.0, ans=0.025 +2024-08-26 21:56:56,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=160800.0, ans=0.125 +2024-08-26 21:56:57,709 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.442e+02 1.721e+02 2.190e+02 3.294e+02, threshold=3.441e+02, percent-clipped=2.0 +2024-08-26 21:57:07,801 INFO [train.py:1114] (3/4) Epoch 13, batch 300, loss[loss=0.2037, simple_loss=0.2833, pruned_loss=0.04561, ctc_loss=0.0822, over 19508.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2727, pruned_loss=0.0488, ctc_loss=0.09141, over 3001398.51 frames. ], batch size: 61, lr: 1.17e-02, grad_scale: 8.0 +2024-08-26 21:57:10,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=160906.66666666666, ans=0.0 +2024-08-26 21:57:29,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.74 vs. limit=15.0 +2024-08-26 21:57:55,514 INFO [train.py:1114] (3/4) Epoch 13, batch 350, loss[loss=0.1795, simple_loss=0.2495, pruned_loss=0.03984, ctc_loss=0.07475, over 19777.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.273, pruned_loss=0.04864, ctc_loss=0.0911, over 3190647.47 frames. ], batch size: 48, lr: 1.17e-02, grad_scale: 8.0 +2024-08-26 21:58:05,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=161226.66666666666, ans=0.0 +2024-08-26 21:58:26,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=161333.33333333334, ans=15.0 +2024-08-26 21:58:30,064 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.35 vs. limit=22.5 +2024-08-26 21:58:32,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=161333.33333333334, ans=0.0 +2024-08-26 21:58:33,159 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.400e+02 1.583e+02 1.867e+02 2.908e+02, threshold=3.167e+02, percent-clipped=0.0 +2024-08-26 21:58:43,177 INFO [train.py:1114] (3/4) Epoch 13, batch 400, loss[loss=0.1954, simple_loss=0.2758, pruned_loss=0.04153, ctc_loss=0.0799, over 19494.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2727, pruned_loss=0.04864, ctc_loss=0.09091, over 3343038.56 frames. ], batch size: 54, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:58:51,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161493.33333333334, ans=0.1 +2024-08-26 21:58:51,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=161493.33333333334, ans=0.125 +2024-08-26 21:58:51,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=161493.33333333334, ans=0.0 +2024-08-26 21:58:55,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=161493.33333333334, ans=0.125 +2024-08-26 21:59:13,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=161600.0, ans=0.5 +2024-08-26 21:59:21,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=161600.0, ans=0.04949747468305833 +2024-08-26 21:59:31,013 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.72 vs. limit=10.0 +2024-08-26 21:59:32,239 INFO [train.py:1114] (3/4) Epoch 13, batch 450, loss[loss=0.2063, simple_loss=0.2822, pruned_loss=0.04739, ctc_loss=0.08876, over 19622.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2729, pruned_loss=0.04869, ctc_loss=0.0911, over 3451375.65 frames. ], batch size: 55, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 21:59:33,575 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.97 vs. limit=22.5 +2024-08-26 21:59:58,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=161813.33333333334, ans=0.125 +2024-08-26 22:00:10,469 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.228e+02 1.449e+02 1.659e+02 1.894e+02 3.083e+02, threshold=3.319e+02, percent-clipped=0.0 +2024-08-26 22:00:11,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=161920.0, ans=0.125 +2024-08-26 22:00:13,812 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.88 vs. limit=10.0 +2024-08-26 22:00:20,534 INFO [train.py:1114] (3/4) Epoch 13, batch 500, loss[loss=0.2071, simple_loss=0.2825, pruned_loss=0.04778, ctc_loss=0.09056, over 19673.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2716, pruned_loss=0.04811, ctc_loss=0.09015, over 3546446.50 frames. ], batch size: 63, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 22:00:27,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=161973.33333333334, ans=0.1 +2024-08-26 22:01:10,504 INFO [train.py:1114] (3/4) Epoch 13, batch 550, loss[loss=0.2095, simple_loss=0.2772, pruned_loss=0.05059, ctc_loss=0.1013, over 19375.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2719, pruned_loss=0.04838, ctc_loss=0.09074, over 3609598.00 frames. ], batch size: 71, lr: 1.17e-02, grad_scale: 16.0 +2024-08-26 22:01:11,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=162240.0, ans=0.0 +2024-08-26 22:01:43,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=162400.0, ans=0.05 +2024-08-26 22:01:59,753 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.555e+02 1.782e+02 2.360e+02 4.088e+02, threshold=3.564e+02, percent-clipped=3.0 +2024-08-26 22:02:10,203 INFO [train.py:1114] (3/4) Epoch 13, batch 600, loss[loss=0.2273, simple_loss=0.2968, pruned_loss=0.05816, ctc_loss=0.1038, over 19364.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2718, pruned_loss=0.04826, ctc_loss=0.09031, over 3666015.15 frames. ], batch size: 67, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:02:12,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162506.66666666666, ans=0.1 +2024-08-26 22:02:19,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=162560.0, ans=0.0 +2024-08-26 22:02:36,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=162613.33333333334, ans=0.025 +2024-08-26 22:02:39,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=162666.66666666666, ans=0.0 +2024-08-26 22:02:40,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=162666.66666666666, ans=0.125 +2024-08-26 22:02:45,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=162666.66666666666, ans=0.0 +2024-08-26 22:02:55,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=162720.0, ans=0.04949747468305833 +2024-08-26 22:02:58,294 INFO [train.py:1114] (3/4) Epoch 13, batch 650, loss[loss=0.1913, simple_loss=0.2608, pruned_loss=0.0436, ctc_loss=0.08672, over 19780.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2712, pruned_loss=0.04797, ctc_loss=0.0897, over 3716724.57 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:03:05,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=162773.33333333334, ans=0.2 +2024-08-26 22:03:40,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=162880.0, ans=0.05 +2024-08-26 22:03:42,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162880.0, ans=0.1 +2024-08-26 22:03:45,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=162880.0, ans=0.125 +2024-08-26 22:03:47,452 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.46 vs. limit=15.0 +2024-08-26 22:03:57,397 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.208e+02 1.372e+02 1.512e+02 1.802e+02 3.637e+02, threshold=3.024e+02, percent-clipped=1.0 +2024-08-26 22:04:00,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=162986.66666666666, ans=0.1 +2024-08-26 22:04:00,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=162986.66666666666, ans=0.0 +2024-08-26 22:04:03,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=162986.66666666666, ans=0.1 +2024-08-26 22:04:05,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=162986.66666666666, ans=0.125 +2024-08-26 22:04:09,886 INFO [train.py:1114] (3/4) Epoch 13, batch 700, loss[loss=0.1901, simple_loss=0.257, pruned_loss=0.04476, ctc_loss=0.08399, over 19732.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2715, pruned_loss=0.04819, ctc_loss=0.08995, over 3748157.86 frames. ], batch size: 51, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:04:26,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=163093.33333333334, ans=0.2 +2024-08-26 22:04:33,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=163146.66666666666, ans=0.5 +2024-08-26 22:04:36,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=163146.66666666666, ans=0.125 +2024-08-26 22:04:51,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=163200.0, ans=0.125 +2024-08-26 22:04:52,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=163200.0, ans=0.125 +2024-08-26 22:05:04,311 INFO [train.py:1114] (3/4) Epoch 13, batch 750, loss[loss=0.2095, simple_loss=0.2783, pruned_loss=0.05078, ctc_loss=0.09816, over 19486.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.271, pruned_loss=0.04819, ctc_loss=0.08986, over 3774054.91 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:05:17,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=163360.0, ans=10.0 +2024-08-26 22:05:27,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=163413.33333333334, ans=0.0 +2024-08-26 22:05:30,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=163413.33333333334, ans=10.0 +2024-08-26 22:05:39,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=163466.66666666666, ans=0.125 +2024-08-26 22:05:40,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=163466.66666666666, ans=0.125 +2024-08-26 22:05:41,870 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.233e+02 1.560e+02 1.959e+02 2.402e+02 3.823e+02, threshold=3.919e+02, percent-clipped=10.0 +2024-08-26 22:05:56,846 INFO [train.py:1114] (3/4) Epoch 13, batch 800, loss[loss=0.1871, simple_loss=0.2504, pruned_loss=0.04499, ctc_loss=0.08468, over 19823.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2708, pruned_loss=0.0481, ctc_loss=0.08984, over 3795086.49 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 32.0 +2024-08-26 22:06:20,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=163680.0, ans=0.0 +2024-08-26 22:06:22,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=163680.0, ans=0.2 +2024-08-26 22:06:29,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=163733.33333333334, ans=0.1 +2024-08-26 22:06:41,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=163786.66666666666, ans=0.0 +2024-08-26 22:06:47,024 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:06:50,851 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.37 vs. limit=15.0 +2024-08-26 22:06:54,792 INFO [train.py:1114] (3/4) Epoch 13, batch 850, loss[loss=0.204, simple_loss=0.2848, pruned_loss=0.04484, ctc_loss=0.08388, over 19677.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2705, pruned_loss=0.04782, ctc_loss=0.08925, over 3813675.41 frames. ], batch size: 59, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:06:59,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=163840.0, ans=0.0 +2024-08-26 22:07:07,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=163893.33333333334, ans=0.2 +2024-08-26 22:07:13,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=163893.33333333334, ans=0.125 +2024-08-26 22:07:27,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=164000.0, ans=0.125 +2024-08-26 22:07:37,694 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.177e+02 1.442e+02 1.756e+02 2.038e+02 3.459e+02, threshold=3.512e+02, percent-clipped=0.0 +2024-08-26 22:07:44,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=164053.33333333334, ans=0.0 +2024-08-26 22:07:50,316 INFO [train.py:1114] (3/4) Epoch 13, batch 900, loss[loss=0.195, simple_loss=0.2617, pruned_loss=0.04676, ctc_loss=0.08711, over 19407.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2713, pruned_loss=0.04819, ctc_loss=0.08984, over 3816310.23 frames. ], batch size: 48, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:07:57,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=164106.66666666666, ans=0.2 +2024-08-26 22:08:13,258 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:08:31,016 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.35 vs. limit=15.0 +2024-08-26 22:08:31,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=164320.0, ans=0.0 +2024-08-26 22:08:40,787 INFO [train.py:1114] (3/4) Epoch 13, batch 950, loss[loss=0.2045, simple_loss=0.2691, pruned_loss=0.05155, ctc_loss=0.09232, over 19485.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.272, pruned_loss=0.04886, ctc_loss=0.09116, over 3817781.80 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:08:43,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=164373.33333333334, ans=0.0 +2024-08-26 22:08:44,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=164373.33333333334, ans=0.125 +2024-08-26 22:08:59,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164426.66666666666, ans=0.1 +2024-08-26 22:09:01,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=164480.0, ans=0.0 +2024-08-26 22:09:06,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=164480.0, ans=0.125 +2024-08-26 22:09:20,213 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.93 vs. limit=10.0 +2024-08-26 22:09:20,514 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.559e+02 1.935e+02 2.172e+02 5.830e+02, threshold=3.869e+02, percent-clipped=1.0 +2024-08-26 22:09:22,912 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.31 vs. limit=22.5 +2024-08-26 22:09:22,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=164586.66666666666, ans=15.0 +2024-08-26 22:09:27,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=164586.66666666666, ans=0.125 +2024-08-26 22:09:29,541 INFO [train.py:1114] (3/4) Epoch 13, batch 1000, loss[loss=0.1939, simple_loss=0.2633, pruned_loss=0.04585, ctc_loss=0.0819, over 19858.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2723, pruned_loss=0.04904, ctc_loss=0.09138, over 3813887.63 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:09:37,539 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.53 vs. limit=15.0 +2024-08-26 22:09:39,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=164693.33333333334, ans=0.125 +2024-08-26 22:09:47,308 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:10:08,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=164800.0, ans=0.125 +2024-08-26 22:10:16,954 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.54 vs. limit=12.0 +2024-08-26 22:10:19,130 INFO [train.py:1114] (3/4) Epoch 13, batch 1050, loss[loss=0.2059, simple_loss=0.282, pruned_loss=0.04733, ctc_loss=0.08768, over 19851.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2715, pruned_loss=0.0487, ctc_loss=0.09081, over 3821461.55 frames. ], batch size: 57, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:10:22,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.87 vs. limit=15.0 +2024-08-26 22:10:32,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=164960.0, ans=0.0 +2024-08-26 22:10:33,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=164960.0, ans=0.025 +2024-08-26 22:10:53,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=165066.66666666666, ans=0.125 +2024-08-26 22:10:55,508 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.353e+02 1.566e+02 1.889e+02 2.686e+02, threshold=3.131e+02, percent-clipped=0.0 +2024-08-26 22:10:57,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=165120.0, ans=0.0 +2024-08-26 22:11:02,716 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.72 vs. limit=15.0 +2024-08-26 22:11:06,698 INFO [train.py:1114] (3/4) Epoch 13, batch 1100, loss[loss=0.1853, simple_loss=0.2644, pruned_loss=0.03839, ctc_loss=0.07362, over 19582.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2717, pruned_loss=0.04846, ctc_loss=0.09057, over 3828495.58 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 16.0 +2024-08-26 22:11:09,827 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.12 vs. limit=10.0 +2024-08-26 22:11:15,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=165226.66666666666, ans=0.125 +2024-08-26 22:11:20,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=165226.66666666666, ans=0.125 +2024-08-26 22:11:38,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=165333.33333333334, ans=0.0 +2024-08-26 22:11:42,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=165333.33333333334, ans=0.125 +2024-08-26 22:11:46,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=165386.66666666666, ans=0.07 +2024-08-26 22:11:55,435 INFO [train.py:1114] (3/4) Epoch 13, batch 1150, loss[loss=0.2175, simple_loss=0.2933, pruned_loss=0.05132, ctc_loss=0.09751, over 19573.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2718, pruned_loss=0.04849, ctc_loss=0.09062, over 3826488.39 frames. ], batch size: 52, lr: 1.15e-02, grad_scale: 16.0 +2024-08-26 22:11:57,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=165440.0, ans=0.125 +2024-08-26 22:12:00,780 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.32 vs. limit=15.0 +2024-08-26 22:12:03,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165440.0, ans=0.1 +2024-08-26 22:12:08,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=165493.33333333334, ans=0.125 +2024-08-26 22:12:18,487 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-08-26 22:12:34,675 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.454e+02 1.639e+02 1.902e+02 3.180e+02, threshold=3.277e+02, percent-clipped=1.0 +2024-08-26 22:12:41,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=165653.33333333334, ans=0.0 +2024-08-26 22:12:43,811 INFO [train.py:1114] (3/4) Epoch 13, batch 1200, loss[loss=0.2199, simple_loss=0.2906, pruned_loss=0.05443, ctc_loss=0.1009, over 19836.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.273, pruned_loss=0.04879, ctc_loss=0.09122, over 3822233.55 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:12:45,427 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.78 vs. limit=15.0 +2024-08-26 22:13:20,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=165866.66666666666, ans=0.125 +2024-08-26 22:13:32,333 INFO [train.py:1114] (3/4) Epoch 13, batch 1250, loss[loss=0.2303, simple_loss=0.2959, pruned_loss=0.0609, ctc_loss=0.107, over 19559.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2736, pruned_loss=0.049, ctc_loss=0.09135, over 3841072.53 frames. ], batch size: 61, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:13:39,336 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.07 vs. limit=6.0 +2024-08-26 22:13:53,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=166080.0, ans=0.2 +2024-08-26 22:14:07,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=166133.33333333334, ans=0.05 +2024-08-26 22:14:11,816 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.418e+02 1.637e+02 2.002e+02 4.206e+02, threshold=3.274e+02, percent-clipped=1.0 +2024-08-26 22:14:23,434 INFO [train.py:1114] (3/4) Epoch 13, batch 1300, loss[loss=0.2161, simple_loss=0.2838, pruned_loss=0.05302, ctc_loss=0.1058, over 18839.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2724, pruned_loss=0.04838, ctc_loss=0.09032, over 3844356.87 frames. ], batch size: 76, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:14:30,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=166240.0, ans=0.125 +2024-08-26 22:14:31,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=166293.33333333334, ans=0.125 +2024-08-26 22:14:46,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=166346.66666666666, ans=0.0 +2024-08-26 22:14:59,932 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.41 vs. limit=6.0 +2024-08-26 22:15:09,560 INFO [train.py:1114] (3/4) Epoch 13, batch 1350, loss[loss=0.1984, simple_loss=0.2754, pruned_loss=0.04376, ctc_loss=0.08465, over 19755.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2722, pruned_loss=0.04816, ctc_loss=0.08968, over 3856048.39 frames. ], batch size: 54, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:15:16,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166506.66666666666, ans=0.1 +2024-08-26 22:15:20,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=166560.0, ans=0.0 +2024-08-26 22:15:50,046 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.412e+02 1.605e+02 1.958e+02 2.747e+02, threshold=3.211e+02, percent-clipped=0.0 +2024-08-26 22:15:58,775 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.69 vs. limit=15.0 +2024-08-26 22:15:59,148 INFO [train.py:1114] (3/4) Epoch 13, batch 1400, loss[loss=0.1893, simple_loss=0.2477, pruned_loss=0.04746, ctc_loss=0.0902, over 19652.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.271, pruned_loss=0.04761, ctc_loss=0.08878, over 3862951.88 frames. ], batch size: 46, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:16:08,573 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:16:26,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=166933.33333333334, ans=0.025 +2024-08-26 22:16:30,609 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.58 vs. limit=15.0 +2024-08-26 22:16:35,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=166986.66666666666, ans=0.05 +2024-08-26 22:16:37,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=166986.66666666666, ans=0.0 +2024-08-26 22:16:43,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=166986.66666666666, ans=0.125 +2024-08-26 22:16:44,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=166986.66666666666, ans=0.0 +2024-08-26 22:16:47,826 INFO [train.py:1114] (3/4) Epoch 13, batch 1450, loss[loss=0.2052, simple_loss=0.2846, pruned_loss=0.0458, ctc_loss=0.08553, over 19659.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2722, pruned_loss=0.04824, ctc_loss=0.08986, over 3861419.28 frames. ], batch size: 63, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:16:50,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=167040.0, ans=0.0 +2024-08-26 22:16:59,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.81 vs. limit=15.0 +2024-08-26 22:17:12,159 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.03 vs. limit=15.0 +2024-08-26 22:17:14,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=167146.66666666666, ans=0.0 +2024-08-26 22:17:19,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=167200.0, ans=0.125 +2024-08-26 22:17:27,074 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.193e+02 1.434e+02 1.640e+02 1.966e+02 4.010e+02, threshold=3.281e+02, percent-clipped=1.0 +2024-08-26 22:17:36,397 INFO [train.py:1114] (3/4) Epoch 13, batch 1500, loss[loss=0.2149, simple_loss=0.2891, pruned_loss=0.05049, ctc_loss=0.09931, over 19575.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2727, pruned_loss=0.04848, ctc_loss=0.09046, over 3861637.72 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:17:36,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=167306.66666666666, ans=0.125 +2024-08-26 22:18:12,434 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.57 vs. limit=15.0 +2024-08-26 22:18:13,261 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.96 vs. limit=22.5 +2024-08-26 22:18:26,616 INFO [train.py:1114] (3/4) Epoch 13, batch 1550, loss[loss=0.2178, simple_loss=0.2889, pruned_loss=0.05359, ctc_loss=0.09882, over 19596.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2727, pruned_loss=0.04877, ctc_loss=0.09107, over 3845387.19 frames. ], batch size: 60, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:18:27,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=167573.33333333334, ans=0.025 +2024-08-26 22:18:44,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=167680.0, ans=0.025 +2024-08-26 22:18:54,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=167733.33333333334, ans=0.125 +2024-08-26 22:19:02,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=167733.33333333334, ans=0.125 +2024-08-26 22:19:04,505 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.500e+02 1.731e+02 2.118e+02 3.338e+02, threshold=3.463e+02, percent-clipped=1.0 +2024-08-26 22:19:12,909 INFO [train.py:1114] (3/4) Epoch 13, batch 1600, loss[loss=0.1904, simple_loss=0.2716, pruned_loss=0.03929, ctc_loss=0.07629, over 19833.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2725, pruned_loss=0.04881, ctc_loss=0.09115, over 3834864.77 frames. ], batch size: 57, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:19:21,212 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.65 vs. limit=10.0 +2024-08-26 22:19:30,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=167893.33333333334, ans=0.0 +2024-08-26 22:19:30,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=167893.33333333334, ans=0.2 +2024-08-26 22:20:03,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=168000.0, ans=0.0 +2024-08-26 22:20:10,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168053.33333333334, ans=0.1 +2024-08-26 22:20:10,192 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.30 vs. limit=15.0 +2024-08-26 22:20:19,728 INFO [train.py:1114] (3/4) Epoch 13, batch 1650, loss[loss=0.2243, simple_loss=0.289, pruned_loss=0.05755, ctc_loss=0.1111, over 19674.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2727, pruned_loss=0.04897, ctc_loss=0.09161, over 3831702.51 frames. ], batch size: 59, lr: 1.15e-02, grad_scale: 32.0 +2024-08-26 22:20:26,612 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.95 vs. limit=15.0 +2024-08-26 22:20:32,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=168160.0, ans=0.025 +2024-08-26 22:20:33,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=168160.0, ans=0.0 +2024-08-26 22:20:47,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=168266.66666666666, ans=0.125 +2024-08-26 22:20:50,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=168266.66666666666, ans=0.2 +2024-08-26 22:20:57,555 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.124e+02 1.381e+02 1.542e+02 1.780e+02 2.683e+02, threshold=3.084e+02, percent-clipped=0.0 +2024-08-26 22:21:07,576 INFO [train.py:1114] (3/4) Epoch 13, batch 1700, loss[loss=0.1754, simple_loss=0.2395, pruned_loss=0.04146, ctc_loss=0.07101, over 19690.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2723, pruned_loss=0.04869, ctc_loss=0.09107, over 3846185.05 frames. ], batch size: 46, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:21:13,475 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.71 vs. limit=15.0 +2024-08-26 22:21:27,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=168480.0, ans=0.125 +2024-08-26 22:21:34,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=168480.0, ans=0.125 +2024-08-26 22:21:41,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=168533.33333333334, ans=0.2 +2024-08-26 22:21:42,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=168533.33333333334, ans=0.2 +2024-08-26 22:21:53,896 INFO [train.py:1114] (3/4) Epoch 13, batch 1750, loss[loss=0.1829, simple_loss=0.2463, pruned_loss=0.04355, ctc_loss=0.08095, over 19656.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2718, pruned_loss=0.04834, ctc_loss=0.09039, over 3851308.13 frames. ], batch size: 45, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:22:12,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=168746.66666666666, ans=0.2 +2024-08-26 22:22:15,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=168746.66666666666, ans=0.125 +2024-08-26 22:23:37,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=168746.66666666666, ans=0.2 +2024-08-26 22:23:51,717 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.438e+02 1.563e+02 1.924e+02 3.851e+02, threshold=3.126e+02, percent-clipped=3.0 +2024-08-26 22:23:55,500 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.31 vs. limit=5.0 +2024-08-26 22:23:55,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=168853.33333333334, ans=0.125 +2024-08-26 22:24:01,005 INFO [train.py:1114] (3/4) Epoch 13, batch 1800, loss[loss=0.2022, simple_loss=0.2761, pruned_loss=0.04788, ctc_loss=0.08116, over 19609.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2719, pruned_loss=0.04817, ctc_loss=0.08998, over 3853420.17 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 16.0 +2024-08-26 22:24:04,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=168906.66666666666, ans=0.125 +2024-08-26 22:24:10,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=168960.0, ans=0.5 +2024-08-26 22:24:15,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=168960.0, ans=0.0 +2024-08-26 22:24:21,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=169013.33333333334, ans=0.0 +2024-08-26 22:24:37,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=169120.0, ans=0.05 +2024-08-26 22:24:37,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=169120.0, ans=0.125 +2024-08-26 22:24:38,230 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.90 vs. limit=15.0 +2024-08-26 22:24:38,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=169120.0, ans=0.0 +2024-08-26 22:24:39,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=169120.0, ans=0.2 +2024-08-26 22:24:44,745 INFO [train.py:1114] (3/4) Epoch 13, batch 1850, loss[loss=0.2159, simple_loss=0.2858, pruned_loss=0.05335, ctc_loss=0.0981, over 19604.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2712, pruned_loss=0.04783, ctc_loss=0.08928, over 3856241.64 frames. ], batch size: 57, lr: 1.14e-02, grad_scale: 16.0 +2024-08-26 22:25:12,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=169333.33333333334, ans=0.125 +2024-08-26 22:25:21,905 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.546e+02 1.793e+02 2.323e+02 4.609e+02, threshold=3.586e+02, percent-clipped=7.0 +2024-08-26 22:25:29,849 INFO [train.py:1114] (3/4) Epoch 13, batch 1900, loss[loss=0.2034, simple_loss=0.2855, pruned_loss=0.04434, ctc_loss=0.08177, over 19681.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2721, pruned_loss=0.04808, ctc_loss=0.08958, over 3860124.61 frames. ], batch size: 59, lr: 1.14e-02, grad_scale: 16.0 +2024-08-26 22:25:32,218 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.61 vs. limit=15.0 +2024-08-26 22:25:44,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169493.33333333334, ans=0.1 +2024-08-26 22:26:01,119 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.24 vs. limit=22.5 +2024-08-26 22:26:01,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=169600.0, ans=0.0 +2024-08-26 22:26:05,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=169653.33333333334, ans=0.125 +2024-08-26 22:26:13,636 INFO [train.py:1114] (3/4) Epoch 13, batch 1950, loss[loss=0.1833, simple_loss=0.2559, pruned_loss=0.04005, ctc_loss=0.07637, over 19598.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2728, pruned_loss=0.0482, ctc_loss=0.08987, over 3869711.64 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 16.0 +2024-08-26 22:26:13,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=169706.66666666666, ans=0.0 +2024-08-26 22:26:32,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=169813.33333333334, ans=0.125 +2024-08-26 22:26:47,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=169866.66666666666, ans=0.125 +2024-08-26 22:26:47,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169866.66666666666, ans=0.1 +2024-08-26 22:26:53,521 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.527e+02 1.786e+02 2.093e+02 2.857e+02, threshold=3.573e+02, percent-clipped=0.0 +2024-08-26 22:27:00,494 INFO [train.py:1114] (3/4) Epoch 13, batch 2000, loss[loss=0.1995, simple_loss=0.2559, pruned_loss=0.05225, ctc_loss=0.0967, over 19651.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2735, pruned_loss=0.04869, ctc_loss=0.09075, over 3853944.93 frames. ], batch size: 45, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:27:05,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=169973.33333333334, ans=0.0 +2024-08-26 22:27:15,391 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:27:17,495 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.69 vs. limit=15.0 +2024-08-26 22:27:26,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=170133.33333333334, ans=0.125 +2024-08-26 22:27:28,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=170133.33333333334, ans=0.125 +2024-08-26 22:27:32,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=170133.33333333334, ans=0.125 +2024-08-26 22:27:36,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=170186.66666666666, ans=0.125 +2024-08-26 22:27:42,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170186.66666666666, ans=0.1 +2024-08-26 22:27:44,092 INFO [train.py:1114] (3/4) Epoch 13, batch 2050, loss[loss=0.1842, simple_loss=0.2475, pruned_loss=0.04479, ctc_loss=0.07822, over 19702.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2723, pruned_loss=0.04825, ctc_loss=0.08998, over 3850415.50 frames. ], batch size: 47, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:27:46,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=170240.0, ans=0.0 +2024-08-26 22:27:49,806 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.48 vs. limit=6.0 +2024-08-26 22:27:51,518 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.41 vs. limit=10.0 +2024-08-26 22:27:54,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=170293.33333333334, ans=0.0 +2024-08-26 22:28:04,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=170346.66666666666, ans=0.05 +2024-08-26 22:28:20,409 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.436e+02 1.652e+02 1.928e+02 2.658e+02, threshold=3.303e+02, percent-clipped=0.0 +2024-08-26 22:28:27,538 INFO [train.py:1114] (3/4) Epoch 13, batch 2100, loss[loss=0.1962, simple_loss=0.2671, pruned_loss=0.04569, ctc_loss=0.08494, over 19765.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2713, pruned_loss=0.04771, ctc_loss=0.08921, over 3857634.22 frames. ], batch size: 54, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:28:31,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=170506.66666666666, ans=0.07 +2024-08-26 22:28:42,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.08 vs. limit=15.0 +2024-08-26 22:28:45,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=170613.33333333334, ans=0.125 +2024-08-26 22:28:47,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=170613.33333333334, ans=0.0 +2024-08-26 22:28:50,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=170613.33333333334, ans=0.2 +2024-08-26 22:29:05,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=170666.66666666666, ans=0.125 +2024-08-26 22:29:18,564 INFO [train.py:1114] (3/4) Epoch 13, batch 2150, loss[loss=0.1841, simple_loss=0.2563, pruned_loss=0.03991, ctc_loss=0.07999, over 19866.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2703, pruned_loss=0.04736, ctc_loss=0.08865, over 3868862.44 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:29:29,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=170826.66666666666, ans=0.05 +2024-08-26 22:29:47,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=170933.33333333334, ans=0.125 +2024-08-26 22:29:47,864 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.44 vs. limit=6.0 +2024-08-26 22:29:55,028 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.462e+02 1.698e+02 2.269e+02 4.218e+02, threshold=3.397e+02, percent-clipped=7.0 +2024-08-26 22:29:56,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=170986.66666666666, ans=0.0 +2024-08-26 22:30:02,058 INFO [train.py:1114] (3/4) Epoch 13, batch 2200, loss[loss=0.2123, simple_loss=0.283, pruned_loss=0.05117, ctc_loss=0.098, over 19591.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2708, pruned_loss=0.04747, ctc_loss=0.08889, over 3867637.94 frames. ], batch size: 57, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:30:03,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=171040.0, ans=0.125 +2024-08-26 22:30:18,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=171093.33333333334, ans=0.125 +2024-08-26 22:30:20,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=171146.66666666666, ans=0.2 +2024-08-26 22:30:20,402 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.56 vs. limit=15.0 +2024-08-26 22:30:26,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=171146.66666666666, ans=0.125 +2024-08-26 22:30:46,573 INFO [train.py:1114] (3/4) Epoch 13, batch 2250, loss[loss=0.1975, simple_loss=0.2773, pruned_loss=0.04313, ctc_loss=0.07859, over 19632.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2708, pruned_loss=0.04744, ctc_loss=0.08882, over 3867400.59 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 32.0 +2024-08-26 22:30:51,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171306.66666666666, ans=0.1 +2024-08-26 22:31:17,470 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:31:22,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=171520.0, ans=0.125 +2024-08-26 22:31:22,561 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.444e+02 1.610e+02 1.869e+02 3.635e+02, threshold=3.220e+02, percent-clipped=1.0 +2024-08-26 22:31:23,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=171520.0, ans=0.2 +2024-08-26 22:31:27,326 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=5.62 vs. limit=15.0 +2024-08-26 22:31:29,430 INFO [train.py:1114] (3/4) Epoch 13, batch 2300, loss[loss=0.2044, simple_loss=0.2712, pruned_loss=0.05096, ctc_loss=0.08938, over 19500.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2704, pruned_loss=0.04742, ctc_loss=0.08861, over 3861275.80 frames. ], batch size: 49, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 22:31:39,168 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:31:48,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=171680.0, ans=0.0 +2024-08-26 22:31:59,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=171733.33333333334, ans=0.035 +2024-08-26 22:32:01,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=171733.33333333334, ans=0.125 +2024-08-26 22:32:13,418 INFO [train.py:1114] (3/4) Epoch 13, batch 2350, loss[loss=0.2107, simple_loss=0.2876, pruned_loss=0.04803, ctc_loss=0.09419, over 19703.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2702, pruned_loss=0.04754, ctc_loss=0.08886, over 3863262.31 frames. ], batch size: 63, lr: 1.13e-02, grad_scale: 16.0 +2024-08-26 22:32:18,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=171840.0, ans=0.125 +2024-08-26 22:32:38,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=171893.33333333334, ans=0.125 +2024-08-26 22:33:01,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=172053.33333333334, ans=0.125 +2024-08-26 22:33:04,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=172053.33333333334, ans=0.125 +2024-08-26 22:33:04,844 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.253e+02 1.652e+02 1.956e+02 2.487e+02 4.828e+02, threshold=3.913e+02, percent-clipped=4.0 +2024-08-26 22:33:10,734 INFO [train.py:1114] (3/4) Epoch 13, batch 2400, loss[loss=0.2057, simple_loss=0.2782, pruned_loss=0.049, ctc_loss=0.08797, over 19317.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.273, pruned_loss=0.04854, ctc_loss=0.09061, over 3857386.77 frames. ], batch size: 71, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 22:33:16,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=172106.66666666666, ans=0.125 +2024-08-26 22:33:25,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=172160.0, ans=0.0 +2024-08-26 22:33:28,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=172213.33333333334, ans=0.125 +2024-08-26 22:33:30,273 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.79 vs. limit=10.0 +2024-08-26 22:33:31,543 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:33:40,480 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 22:33:43,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=172266.66666666666, ans=0.125 +2024-08-26 22:33:54,743 INFO [train.py:1114] (3/4) Epoch 13, batch 2450, loss[loss=0.2575, simple_loss=0.3027, pruned_loss=0.07659, ctc_loss=0.1477, over 13453.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2769, pruned_loss=0.05148, ctc_loss=0.09622, over 3730288.78 frames. ], batch size: 140, lr: 1.13e-02, grad_scale: 32.0 +2024-08-26 22:33:55,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=172373.33333333334, ans=0.2 +2024-08-26 22:34:03,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.42 vs. limit=15.0 +2024-08-26 22:34:05,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=172426.66666666666, ans=0.1 +2024-08-26 22:34:05,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=172426.66666666666, ans=0.125 +2024-08-26 22:38:18,465 INFO [train.py:1114] (3/4) Epoch 14, batch 0, loss[loss=0.1885, simple_loss=0.2529, pruned_loss=0.04568, ctc_loss=0.08179, over 19809.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2529, pruned_loss=0.04568, ctc_loss=0.08179, over 19809.00 frames. ], batch size: 49, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:38:18,466 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-26 22:38:36,018 INFO [train.py:1146] (3/4) Epoch 14, validation: loss=0.1777, simple_loss=0.2705, pruned_loss=0.03149, ctc_loss=0.05468, over 944034.00 frames. +2024-08-26 22:38:36,019 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12941MB +2024-08-26 22:38:38,532 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.72 vs. limit=15.0 +2024-08-26 22:38:39,671 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.631e+02 1.782e+02 1.968e+02 3.125e+02, threshold=3.565e+02, percent-clipped=0.0 +2024-08-26 22:38:45,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172634.66666666666, ans=0.1 +2024-08-26 22:38:45,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=172634.66666666666, ans=0.0 +2024-08-26 22:38:50,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=172634.66666666666, ans=0.125 +2024-08-26 22:39:03,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=172688.0, ans=0.0 +2024-08-26 22:39:10,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=172741.33333333334, ans=0.125 +2024-08-26 22:40:00,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=172794.66666666666, ans=0.0 +2024-08-26 22:40:03,229 INFO [train.py:1114] (3/4) Epoch 14, batch 50, loss[loss=0.1795, simple_loss=0.2492, pruned_loss=0.03963, ctc_loss=0.07603, over 19744.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2731, pruned_loss=0.04917, ctc_loss=0.09251, over 844617.17 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:40:14,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=172848.0, ans=0.04949747468305833 +2024-08-26 22:42:43,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=172954.66666666666, ans=0.125 +2024-08-26 22:42:51,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=172954.66666666666, ans=0.2 +2024-08-26 22:42:59,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=172954.66666666666, ans=0.125 +2024-08-26 22:43:11,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=173061.33333333334, ans=0.125 +2024-08-26 22:43:14,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=173061.33333333334, ans=0.2 +2024-08-26 22:43:19,084 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.42 vs. limit=22.5 +2024-08-26 22:43:20,287 INFO [train.py:1114] (3/4) Epoch 14, batch 100, loss[loss=0.2001, simple_loss=0.2702, pruned_loss=0.04652, ctc_loss=0.09238, over 19736.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2741, pruned_loss=0.0494, ctc_loss=0.09265, over 1499497.26 frames. ], batch size: 51, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:43:23,805 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.174e+02 1.427e+02 1.577e+02 1.836e+02 2.542e+02, threshold=3.153e+02, percent-clipped=0.0 +2024-08-26 22:43:25,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=173114.66666666666, ans=0.125 +2024-08-26 22:43:28,131 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.77 vs. limit=22.5 +2024-08-26 22:43:30,716 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.97 vs. limit=15.0 +2024-08-26 22:44:05,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173328.0, ans=0.1 +2024-08-26 22:44:09,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=173381.33333333334, ans=0.0 +2024-08-26 22:44:10,504 INFO [train.py:1114] (3/4) Epoch 14, batch 150, loss[loss=0.168, simple_loss=0.2385, pruned_loss=0.03536, ctc_loss=0.06699, over 19682.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2721, pruned_loss=0.04835, ctc_loss=0.09052, over 2027234.41 frames. ], batch size: 47, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:44:12,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=173381.33333333334, ans=0.125 +2024-08-26 22:44:13,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=173381.33333333334, ans=0.0 +2024-08-26 22:44:24,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=173434.66666666666, ans=0.0 +2024-08-26 22:44:28,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=173434.66666666666, ans=0.2 +2024-08-26 22:44:47,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=173541.33333333334, ans=0.07 +2024-08-26 22:44:59,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=173594.66666666666, ans=0.125 +2024-08-26 22:45:07,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=173594.66666666666, ans=0.0 +2024-08-26 22:45:10,948 INFO [train.py:1114] (3/4) Epoch 14, batch 200, loss[loss=0.2118, simple_loss=0.2801, pruned_loss=0.05273, ctc_loss=0.09506, over 18104.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2703, pruned_loss=0.04763, ctc_loss=0.08918, over 2434650.11 frames. ], batch size: 85, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:45:14,590 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.393e+02 1.624e+02 1.885e+02 3.247e+02, threshold=3.247e+02, percent-clipped=1.0 +2024-08-26 22:45:20,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=173701.33333333334, ans=0.0 +2024-08-26 22:45:23,314 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.74 vs. limit=15.0 +2024-08-26 22:45:23,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173701.33333333334, ans=0.1 +2024-08-26 22:45:48,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=173808.0, ans=10.0 +2024-08-26 22:45:48,744 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.47 vs. limit=15.0 +2024-08-26 22:45:59,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=173861.33333333334, ans=0.125 +2024-08-26 22:46:04,222 INFO [train.py:1114] (3/4) Epoch 14, batch 250, loss[loss=0.2094, simple_loss=0.2849, pruned_loss=0.04869, ctc_loss=0.0914, over 19411.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2697, pruned_loss=0.04706, ctc_loss=0.0879, over 2753935.14 frames. ], batch size: 67, lr: 1.09e-02, grad_scale: 32.0 +2024-08-26 22:46:11,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=173914.66666666666, ans=0.125 +2024-08-26 22:46:14,224 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.50 vs. limit=5.0 +2024-08-26 22:46:25,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=174021.33333333334, ans=0.125 +2024-08-26 22:46:41,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=174074.66666666666, ans=0.025 +2024-08-26 22:46:54,981 INFO [train.py:1114] (3/4) Epoch 14, batch 300, loss[loss=0.2083, simple_loss=0.2761, pruned_loss=0.05154, ctc_loss=0.09332, over 19518.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2692, pruned_loss=0.04689, ctc_loss=0.08744, over 2999168.00 frames. ], batch size: 61, lr: 1.09e-02, grad_scale: 16.0 +2024-08-26 22:46:56,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=174181.33333333334, ans=0.0 +2024-08-26 22:46:58,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=174181.33333333334, ans=0.125 +2024-08-26 22:46:59,580 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.470e+02 1.728e+02 2.225e+02 3.956e+02, threshold=3.457e+02, percent-clipped=2.0 +2024-08-26 22:47:22,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=174288.0, ans=0.0 +2024-08-26 22:47:39,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=174394.66666666666, ans=0.125 +2024-08-26 22:47:40,214 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.22 vs. limit=6.0 +2024-08-26 22:47:43,366 INFO [train.py:1114] (3/4) Epoch 14, batch 350, loss[loss=0.1758, simple_loss=0.2451, pruned_loss=0.03861, ctc_loss=0.07298, over 19738.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2694, pruned_loss=0.04666, ctc_loss=0.0871, over 3190097.11 frames. ], batch size: 48, lr: 1.08e-02, grad_scale: 16.0 +2024-08-26 22:48:08,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=174448.0, ans=0.0 +2024-08-26 22:48:22,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=174554.66666666666, ans=0.125 +2024-08-26 22:48:27,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=174554.66666666666, ans=0.0 +2024-08-26 22:48:30,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=174608.0, ans=0.125 +2024-08-26 22:48:30,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=174608.0, ans=0.2 +2024-08-26 22:48:48,270 INFO [train.py:1114] (3/4) Epoch 14, batch 400, loss[loss=0.1971, simple_loss=0.274, pruned_loss=0.04332, ctc_loss=0.08407, over 19499.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.269, pruned_loss=0.04635, ctc_loss=0.08653, over 3342697.91 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:48:52,774 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.491e+02 1.630e+02 1.842e+02 3.705e+02, threshold=3.261e+02, percent-clipped=1.0 +2024-08-26 22:49:00,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=174768.0, ans=0.125 +2024-08-26 22:49:03,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=174768.0, ans=0.0 +2024-08-26 22:49:27,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=174874.66666666666, ans=0.07 +2024-08-26 22:49:28,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=174928.0, ans=0.125 +2024-08-26 22:49:29,201 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.28 vs. limit=12.0 +2024-08-26 22:49:39,165 INFO [train.py:1114] (3/4) Epoch 14, batch 450, loss[loss=0.1872, simple_loss=0.2668, pruned_loss=0.0393, ctc_loss=0.07255, over 19617.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2691, pruned_loss=0.04656, ctc_loss=0.08704, over 3451257.46 frames. ], batch size: 55, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:49:50,620 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.80 vs. limit=15.0 +2024-08-26 22:49:53,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.45 vs. limit=6.0 +2024-08-26 22:50:18,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=175194.66666666666, ans=0.125 +2024-08-26 22:50:27,984 INFO [train.py:1114] (3/4) Epoch 14, batch 500, loss[loss=0.215, simple_loss=0.2848, pruned_loss=0.05292, ctc_loss=0.09859, over 19678.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2685, pruned_loss=0.04635, ctc_loss=0.08666, over 3547006.36 frames. ], batch size: 63, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:50:32,533 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.438e+02 1.690e+02 1.988e+02 3.244e+02, threshold=3.379e+02, percent-clipped=0.0 +2024-08-26 22:50:55,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=175408.0, ans=0.125 +2024-08-26 22:51:10,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=175461.33333333334, ans=0.025 +2024-08-26 22:51:15,965 INFO [train.py:1114] (3/4) Epoch 14, batch 550, loss[loss=0.2216, simple_loss=0.2858, pruned_loss=0.05716, ctc_loss=0.1075, over 19229.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2689, pruned_loss=0.04662, ctc_loss=0.0872, over 3608381.59 frames. ], batch size: 71, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:51:29,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=175568.0, ans=0.0 +2024-08-26 22:51:29,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=175568.0, ans=0.025 +2024-08-26 22:51:29,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=175568.0, ans=0.05 +2024-08-26 22:52:15,481 INFO [train.py:1114] (3/4) Epoch 14, batch 600, loss[loss=0.1979, simple_loss=0.2761, pruned_loss=0.04427, ctc_loss=0.07789, over 19367.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2691, pruned_loss=0.04664, ctc_loss=0.08708, over 3664729.30 frames. ], batch size: 67, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:52:18,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=175781.33333333334, ans=0.0 +2024-08-26 22:52:19,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175781.33333333334, ans=0.1 +2024-08-26 22:52:20,033 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.434e+02 1.658e+02 1.951e+02 2.764e+02, threshold=3.317e+02, percent-clipped=0.0 +2024-08-26 22:52:23,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=175781.33333333334, ans=0.0 +2024-08-26 22:52:32,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=175834.66666666666, ans=0.05 +2024-08-26 22:52:37,475 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.59 vs. limit=6.0 +2024-08-26 22:53:02,102 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.33 vs. limit=12.0 +2024-08-26 22:53:17,361 INFO [train.py:1114] (3/4) Epoch 14, batch 650, loss[loss=0.1977, simple_loss=0.2719, pruned_loss=0.04483, ctc_loss=0.08464, over 19768.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2682, pruned_loss=0.04623, ctc_loss=0.08634, over 3715303.48 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:53:23,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=176048.0, ans=0.2 +2024-08-26 22:53:26,318 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.30 vs. limit=10.0 +2024-08-26 22:53:35,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176101.33333333334, ans=0.1 +2024-08-26 22:53:36,795 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.51 vs. limit=15.0 +2024-08-26 22:53:53,251 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.36 vs. limit=12.0 +2024-08-26 22:54:14,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=176208.0, ans=10.0 +2024-08-26 22:54:29,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=176261.33333333334, ans=0.025 +2024-08-26 22:54:37,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=176261.33333333334, ans=0.1 +2024-08-26 22:54:40,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=176314.66666666666, ans=0.0 +2024-08-26 22:54:40,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176314.66666666666, ans=0.1 +2024-08-26 22:54:40,992 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.01 vs. limit=15.0 +2024-08-26 22:54:41,328 INFO [train.py:1114] (3/4) Epoch 14, batch 700, loss[loss=0.1937, simple_loss=0.2619, pruned_loss=0.04552, ctc_loss=0.08621, over 19748.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2687, pruned_loss=0.04624, ctc_loss=0.08664, over 3747071.82 frames. ], batch size: 51, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 22:54:48,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=176314.66666666666, ans=15.0 +2024-08-26 22:54:52,785 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.446e+02 1.597e+02 2.123e+02 3.826e+02, threshold=3.195e+02, percent-clipped=1.0 +2024-08-26 22:54:54,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=176314.66666666666, ans=0.025 +2024-08-26 22:55:22,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=176421.33333333334, ans=0.0 +2024-08-26 22:55:28,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=176421.33333333334, ans=0.95 +2024-08-26 22:57:52,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=176474.66666666666, ans=0.125 +2024-08-26 22:58:12,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=176474.66666666666, ans=0.035 +2024-08-26 22:58:15,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=176474.66666666666, ans=0.025 +2024-08-26 22:59:22,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=176581.33333333334, ans=0.125 +2024-08-26 22:59:23,427 INFO [train.py:1114] (3/4) Epoch 14, batch 750, loss[loss=0.1902, simple_loss=0.2627, pruned_loss=0.04269, ctc_loss=0.08105, over 19497.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2686, pruned_loss=0.0463, ctc_loss=0.08669, over 3774604.89 frames. ], batch size: 54, lr: 1.08e-02, grad_scale: 16.0 +2024-08-26 23:00:33,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=176634.66666666666, ans=0.0 +2024-08-26 23:00:33,332 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.60 vs. limit=15.0 +2024-08-26 23:00:42,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=176634.66666666666, ans=0.125 +2024-08-26 23:00:43,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=176634.66666666666, ans=0.0 +2024-08-26 23:00:59,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=176634.66666666666, ans=0.125 +2024-08-26 23:00:59,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=176634.66666666666, ans=0.04949747468305833 +2024-08-26 23:01:21,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=176688.0, ans=0.125 +2024-08-26 23:01:52,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=176794.66666666666, ans=0.125 +2024-08-26 23:01:58,981 INFO [train.py:1114] (3/4) Epoch 14, batch 800, loss[loss=0.1761, simple_loss=0.2408, pruned_loss=0.04081, ctc_loss=0.07459, over 19821.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2686, pruned_loss=0.04642, ctc_loss=0.08663, over 3796395.50 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 23:02:11,905 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.199e+02 1.464e+02 1.718e+02 2.120e+02 3.590e+02, threshold=3.437e+02, percent-clipped=3.0 +2024-08-26 23:04:45,347 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.75 vs. limit=15.0 +2024-08-26 23:05:31,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=177008.0, ans=0.2 +2024-08-26 23:05:48,733 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.92 vs. limit=15.0 +2024-08-26 23:05:52,829 INFO [train.py:1114] (3/4) Epoch 14, batch 850, loss[loss=0.2202, simple_loss=0.298, pruned_loss=0.05153, ctc_loss=0.09845, over 19649.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2683, pruned_loss=0.04634, ctc_loss=0.08636, over 3815210.30 frames. ], batch size: 59, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 23:06:15,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=177221.33333333334, ans=0.0 +2024-08-26 23:06:24,395 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.85 vs. limit=12.0 +2024-08-26 23:06:27,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=177274.66666666666, ans=0.0 +2024-08-26 23:06:46,777 INFO [train.py:1114] (3/4) Epoch 14, batch 900, loss[loss=0.1745, simple_loss=0.2464, pruned_loss=0.03673, ctc_loss=0.07275, over 19814.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2693, pruned_loss=0.04682, ctc_loss=0.08715, over 3820149.52 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 23:06:52,129 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.084e+02 1.429e+02 1.657e+02 1.986e+02 3.410e+02, threshold=3.315e+02, percent-clipped=0.0 +2024-08-26 23:06:53,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=177381.33333333334, ans=0.0 +2024-08-26 23:07:00,233 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=9.90 vs. limit=22.5 +2024-08-26 23:07:14,421 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.75 vs. limit=10.0 +2024-08-26 23:07:18,266 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.15 vs. limit=10.0 +2024-08-26 23:07:21,830 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.08 vs. limit=6.0 +2024-08-26 23:07:30,415 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.62 vs. limit=15.0 +2024-08-26 23:07:35,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=177594.66666666666, ans=0.125 +2024-08-26 23:07:37,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=177648.0, ans=0.125 +2024-08-26 23:07:38,552 INFO [train.py:1114] (3/4) Epoch 14, batch 950, loss[loss=0.1768, simple_loss=0.2521, pruned_loss=0.03671, ctc_loss=0.07005, over 19503.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2697, pruned_loss=0.04694, ctc_loss=0.08739, over 3821034.01 frames. ], batch size: 49, lr: 1.08e-02, grad_scale: 32.0 +2024-08-26 23:07:48,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=177701.33333333334, ans=0.125 +2024-08-26 23:08:06,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177754.66666666666, ans=0.1 +2024-08-26 23:08:12,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=177808.0, ans=0.125 +2024-08-26 23:08:14,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=177808.0, ans=0.0 +2024-08-26 23:08:25,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=177861.33333333334, ans=0.125 +2024-08-26 23:08:35,564 INFO [train.py:1114] (3/4) Epoch 14, batch 1000, loss[loss=0.2043, simple_loss=0.2706, pruned_loss=0.05002, ctc_loss=0.09494, over 19846.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2711, pruned_loss=0.04759, ctc_loss=0.08871, over 3817118.64 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:08:41,156 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.435e+02 1.639e+02 1.944e+02 3.185e+02, threshold=3.279e+02, percent-clipped=0.0 +2024-08-26 23:08:51,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=177968.0, ans=0.125 +2024-08-26 23:09:01,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=178021.33333333334, ans=0.2 +2024-08-26 23:09:01,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=178021.33333333334, ans=0.125 +2024-08-26 23:09:14,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=178074.66666666666, ans=15.0 +2024-08-26 23:09:24,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=178128.0, ans=0.125 +2024-08-26 23:09:25,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=178128.0, ans=0.2 +2024-08-26 23:09:29,127 INFO [train.py:1114] (3/4) Epoch 14, batch 1050, loss[loss=0.1945, simple_loss=0.2706, pruned_loss=0.04262, ctc_loss=0.08307, over 19823.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2703, pruned_loss=0.04735, ctc_loss=0.08827, over 3824209.19 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:09:37,034 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.19 vs. limit=12.0 +2024-08-26 23:09:42,788 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.69 vs. limit=10.0 +2024-08-26 23:09:47,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=178234.66666666666, ans=0.0 +2024-08-26 23:09:47,408 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.75 vs. limit=15.0 +2024-08-26 23:10:18,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=178394.66666666666, ans=0.125 +2024-08-26 23:10:44,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=178394.66666666666, ans=0.0 +2024-08-26 23:10:47,590 INFO [train.py:1114] (3/4) Epoch 14, batch 1100, loss[loss=0.186, simple_loss=0.267, pruned_loss=0.03841, ctc_loss=0.07036, over 19572.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2697, pruned_loss=0.04679, ctc_loss=0.0873, over 3832090.41 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:10:53,005 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.155e+02 1.389e+02 1.598e+02 1.774e+02 3.032e+02, threshold=3.197e+02, percent-clipped=0.0 +2024-08-26 23:10:59,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=178501.33333333334, ans=0.0 +2024-08-26 23:11:17,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=178608.0, ans=0.1 +2024-08-26 23:11:31,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=178661.33333333334, ans=0.125 +2024-08-26 23:11:37,949 INFO [train.py:1114] (3/4) Epoch 14, batch 1150, loss[loss=0.188, simple_loss=0.2642, pruned_loss=0.03993, ctc_loss=0.08001, over 19592.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2691, pruned_loss=0.04664, ctc_loss=0.08703, over 3830063.13 frames. ], batch size: 52, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:11:40,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178714.66666666666, ans=0.1 +2024-08-26 23:11:42,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178714.66666666666, ans=0.1 +2024-08-26 23:11:51,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=178768.0, ans=0.125 +2024-08-26 23:12:05,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=178821.33333333334, ans=0.2 +2024-08-26 23:12:05,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=178874.66666666666, ans=0.025 +2024-08-26 23:12:14,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=178874.66666666666, ans=0.2 +2024-08-26 23:12:28,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=178928.0, ans=0.125 +2024-08-26 23:12:31,163 INFO [train.py:1114] (3/4) Epoch 14, batch 1200, loss[loss=0.1981, simple_loss=0.2758, pruned_loss=0.04339, ctc_loss=0.08396, over 19835.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2702, pruned_loss=0.04704, ctc_loss=0.08804, over 3826323.75 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:12:36,813 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.458e+02 1.687e+02 2.139e+02 4.936e+02, threshold=3.375e+02, percent-clipped=2.0 +2024-08-26 23:12:54,008 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.09 vs. limit=15.0 +2024-08-26 23:12:54,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179088.0, ans=0.1 +2024-08-26 23:13:20,040 INFO [train.py:1114] (3/4) Epoch 14, batch 1250, loss[loss=0.2112, simple_loss=0.2846, pruned_loss=0.05053, ctc_loss=0.09163, over 19526.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2706, pruned_loss=0.04699, ctc_loss=0.08774, over 3844490.95 frames. ], batch size: 61, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:13:45,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=179354.66666666666, ans=0.2 +2024-08-26 23:13:50,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=179408.0, ans=0.125 +2024-08-26 23:14:10,503 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.68 vs. limit=8.0 +2024-08-26 23:14:12,514 INFO [train.py:1114] (3/4) Epoch 14, batch 1300, loss[loss=0.217, simple_loss=0.2835, pruned_loss=0.05362, ctc_loss=0.108, over 18977.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2696, pruned_loss=0.04654, ctc_loss=0.08696, over 3849016.59 frames. ], batch size: 76, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:14:14,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=179514.66666666666, ans=0.2 +2024-08-26 23:14:16,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=179514.66666666666, ans=0.025 +2024-08-26 23:14:19,140 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.402e+02 1.628e+02 1.914e+02 2.926e+02, threshold=3.256e+02, percent-clipped=0.0 +2024-08-26 23:14:25,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=179568.0, ans=0.125 +2024-08-26 23:14:28,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=179568.0, ans=0.0 +2024-08-26 23:14:33,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=179621.33333333334, ans=0.125 +2024-08-26 23:14:33,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=179621.33333333334, ans=0.125 +2024-08-26 23:14:38,862 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.33 vs. limit=15.0 +2024-08-26 23:14:43,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=179674.66666666666, ans=0.0 +2024-08-26 23:14:58,660 INFO [train.py:1114] (3/4) Epoch 14, batch 1350, loss[loss=0.1783, simple_loss=0.2582, pruned_loss=0.03616, ctc_loss=0.06533, over 19782.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2691, pruned_loss=0.0463, ctc_loss=0.08653, over 3860373.05 frames. ], batch size: 54, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:14:58,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=179781.33333333334, ans=0.0 +2024-08-26 23:16:02,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=179781.33333333334, ans=0.0 +2024-08-26 23:16:04,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=179834.66666666666, ans=0.0 +2024-08-26 23:16:29,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=179834.66666666666, ans=0.125 +2024-08-26 23:16:34,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179888.0, ans=0.1 +2024-08-26 23:16:35,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_na.min_abs, batch_count=179888.0, ans=0.02 +2024-08-26 23:16:51,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=179994.66666666666, ans=0.0 +2024-08-26 23:16:59,404 INFO [train.py:1114] (3/4) Epoch 14, batch 1400, loss[loss=0.1791, simple_loss=0.2379, pruned_loss=0.04359, ctc_loss=0.08294, over 19689.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2688, pruned_loss=0.04621, ctc_loss=0.08642, over 3866720.71 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:17:07,636 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.482e+02 1.624e+02 2.003e+02 3.142e+02, threshold=3.248e+02, percent-clipped=0.0 +2024-08-26 23:17:14,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180101.33333333334, ans=0.1 +2024-08-26 23:17:17,439 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.46 vs. limit=15.0 +2024-08-26 23:17:25,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=180154.66666666666, ans=0.025 +2024-08-26 23:17:50,511 INFO [train.py:1114] (3/4) Epoch 14, batch 1450, loss[loss=0.1999, simple_loss=0.2722, pruned_loss=0.04706, ctc_loss=0.08363, over 19664.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2697, pruned_loss=0.04646, ctc_loss=0.08691, over 3864940.50 frames. ], batch size: 63, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:17:55,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=180314.66666666666, ans=0.125 +2024-08-26 23:18:03,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=180314.66666666666, ans=0.0 +2024-08-26 23:18:12,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=180368.0, ans=0.125 +2024-08-26 23:18:30,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=180474.66666666666, ans=0.125 +2024-08-26 23:18:36,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=180528.0, ans=0.125 +2024-08-26 23:18:39,273 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.41 vs. limit=15.0 +2024-08-26 23:18:43,071 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.58 vs. limit=10.0 +2024-08-26 23:18:44,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=180528.0, ans=0.2 +2024-08-26 23:18:46,192 INFO [train.py:1114] (3/4) Epoch 14, batch 1500, loss[loss=0.2123, simple_loss=0.2868, pruned_loss=0.05121, ctc_loss=0.0886, over 19584.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2695, pruned_loss=0.04631, ctc_loss=0.08637, over 3864728.47 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:18:52,949 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.115e+02 1.461e+02 1.607e+02 1.928e+02 3.862e+02, threshold=3.214e+02, percent-clipped=2.0 +2024-08-26 23:19:01,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=180634.66666666666, ans=0.0 +2024-08-26 23:19:18,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=180741.33333333334, ans=0.125 +2024-08-26 23:20:20,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=180794.66666666666, ans=0.125 +2024-08-26 23:20:24,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=180794.66666666666, ans=0.2 +2024-08-26 23:20:30,222 INFO [train.py:1114] (3/4) Epoch 14, batch 1550, loss[loss=0.213, simple_loss=0.2856, pruned_loss=0.05153, ctc_loss=0.09349, over 19609.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2697, pruned_loss=0.0467, ctc_loss=0.08706, over 3848764.53 frames. ], batch size: 60, lr: 1.07e-02, grad_scale: 16.0 +2024-08-26 23:20:34,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=180848.0, ans=0.0 +2024-08-26 23:20:49,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=180954.66666666666, ans=0.125 +2024-08-26 23:21:02,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=181008.0, ans=0.0 +2024-08-26 23:21:07,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=181008.0, ans=0.125 +2024-08-26 23:21:17,527 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.40 vs. limit=22.5 +2024-08-26 23:21:18,486 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.62 vs. limit=15.0 +2024-08-26 23:21:20,222 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.69 vs. limit=15.0 +2024-08-26 23:21:20,671 INFO [train.py:1114] (3/4) Epoch 14, batch 1600, loss[loss=0.1909, simple_loss=0.2649, pruned_loss=0.04189, ctc_loss=0.08304, over 19841.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2692, pruned_loss=0.04654, ctc_loss=0.08688, over 3837124.52 frames. ], batch size: 57, lr: 1.07e-02, grad_scale: 32.0 +2024-08-26 23:21:22,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=181114.66666666666, ans=0.125 +2024-08-26 23:21:27,130 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.461e+02 1.627e+02 1.971e+02 3.033e+02, threshold=3.255e+02, percent-clipped=0.0 +2024-08-26 23:21:38,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=181168.0, ans=0.125 +2024-08-26 23:23:38,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=181274.66666666666, ans=0.2 +2024-08-26 23:23:42,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=181274.66666666666, ans=0.0 +2024-08-26 23:23:46,912 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.73 vs. limit=15.0 +2024-08-26 23:23:54,621 INFO [train.py:1114] (3/4) Epoch 14, batch 1650, loss[loss=0.1865, simple_loss=0.2643, pruned_loss=0.03871, ctc_loss=0.07817, over 19635.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2692, pruned_loss=0.04656, ctc_loss=0.0869, over 3834583.61 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:24:02,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=181381.33333333334, ans=0.125 +2024-08-26 23:24:15,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=181488.0, ans=0.0 +2024-08-26 23:24:19,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181488.0, ans=0.1 +2024-08-26 23:24:19,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=181488.0, ans=0.2 +2024-08-26 23:24:31,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=181594.66666666666, ans=0.0 +2024-08-26 23:24:34,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=181594.66666666666, ans=0.125 +2024-08-26 23:24:39,471 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.78 vs. limit=6.0 +2024-08-26 23:24:40,743 INFO [train.py:1114] (3/4) Epoch 14, batch 1700, loss[loss=0.1779, simple_loss=0.2417, pruned_loss=0.04174, ctc_loss=0.07665, over 19673.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2689, pruned_loss=0.04629, ctc_loss=0.08623, over 3848471.89 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:24:47,149 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.192e+02 1.441e+02 1.691e+02 2.079e+02 3.382e+02, threshold=3.381e+02, percent-clipped=3.0 +2024-08-26 23:24:49,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=181701.33333333334, ans=0.2 +2024-08-26 23:24:51,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=181701.33333333334, ans=0.0 +2024-08-26 23:25:04,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=181754.66666666666, ans=0.025 +2024-08-26 23:25:14,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=181808.0, ans=0.04949747468305833 +2024-08-26 23:25:23,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181861.33333333334, ans=0.1 +2024-08-26 23:25:25,115 INFO [train.py:1114] (3/4) Epoch 14, batch 1750, loss[loss=0.1828, simple_loss=0.2452, pruned_loss=0.04376, ctc_loss=0.08219, over 19696.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2685, pruned_loss=0.04591, ctc_loss=0.08579, over 3854184.62 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:25:35,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=181968.0, ans=0.0 +2024-08-26 23:25:43,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=182021.33333333334, ans=0.0 +2024-08-26 23:25:54,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=182074.66666666666, ans=0.125 +2024-08-26 23:26:13,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=182128.0, ans=0.0 +2024-08-26 23:26:19,461 INFO [train.py:1114] (3/4) Epoch 14, batch 1800, loss[loss=0.1916, simple_loss=0.2719, pruned_loss=0.04037, ctc_loss=0.07641, over 19601.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2686, pruned_loss=0.04597, ctc_loss=0.08586, over 3855590.03 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:26:19,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=182181.33333333334, ans=0.125 +2024-08-26 23:26:23,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=182181.33333333334, ans=0.125 +2024-08-26 23:26:26,557 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.533e+02 1.884e+02 2.505e+02 4.097e+02, threshold=3.767e+02, percent-clipped=5.0 +2024-08-26 23:26:43,764 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.36 vs. limit=22.5 +2024-08-26 23:26:44,320 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 23:26:54,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=182341.33333333334, ans=0.0 +2024-08-26 23:27:05,184 INFO [train.py:1114] (3/4) Epoch 14, batch 1850, loss[loss=0.1927, simple_loss=0.2702, pruned_loss=0.04222, ctc_loss=0.07667, over 19599.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2687, pruned_loss=0.04592, ctc_loss=0.08566, over 3858221.10 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:27:05,669 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.25 vs. limit=12.0 +2024-08-26 23:28:45,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=182501.33333333334, ans=0.95 +2024-08-26 23:28:45,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=182501.33333333334, ans=0.0 +2024-08-26 23:28:46,540 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.00 vs. limit=22.5 +2024-08-26 23:28:53,836 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=4.91 vs. limit=15.0 +2024-08-26 23:29:05,332 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.82 vs. limit=6.0 +2024-08-26 23:29:14,709 INFO [train.py:1114] (3/4) Epoch 14, batch 1900, loss[loss=0.2179, simple_loss=0.2894, pruned_loss=0.05253, ctc_loss=0.1031, over 19678.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2697, pruned_loss=0.04639, ctc_loss=0.08659, over 3862304.89 frames. ], batch size: 59, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:29:19,395 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.12 vs. limit=15.0 +2024-08-26 23:29:21,588 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.217e+02 1.441e+02 1.690e+02 2.071e+02 3.452e+02, threshold=3.379e+02, percent-clipped=0.0 +2024-08-26 23:29:36,450 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.05 vs. limit=22.5 +2024-08-26 23:29:37,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=182821.33333333334, ans=0.0 +2024-08-26 23:29:37,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=182821.33333333334, ans=0.2 +2024-08-26 23:29:38,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=182821.33333333334, ans=0.2 +2024-08-26 23:29:40,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=182874.66666666666, ans=0.125 +2024-08-26 23:29:46,962 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.46 vs. limit=22.5 +2024-08-26 23:29:56,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=182928.0, ans=0.125 +2024-08-26 23:29:57,797 INFO [train.py:1114] (3/4) Epoch 14, batch 1950, loss[loss=0.1709, simple_loss=0.2482, pruned_loss=0.03347, ctc_loss=0.06648, over 19580.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2708, pruned_loss=0.04666, ctc_loss=0.08717, over 3871186.13 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:30:28,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=183141.33333333334, ans=0.125 +2024-08-26 23:30:28,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=183141.33333333334, ans=0.0 +2024-08-26 23:30:43,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183194.66666666666, ans=0.1 +2024-08-26 23:30:44,845 INFO [train.py:1114] (3/4) Epoch 14, batch 2000, loss[loss=0.1687, simple_loss=0.2434, pruned_loss=0.0342, ctc_loss=0.0638, over 19655.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.271, pruned_loss=0.04688, ctc_loss=0.08764, over 3856216.08 frames. ], batch size: 45, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:30:52,056 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.411e+02 1.571e+02 1.845e+02 2.838e+02, threshold=3.143e+02, percent-clipped=0.0 +2024-08-26 23:31:06,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=183354.66666666666, ans=0.0 +2024-08-26 23:31:10,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.05 vs. limit=15.0 +2024-08-26 23:31:12,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=183408.0, ans=0.2 +2024-08-26 23:31:13,873 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 23:31:56,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183408.0, ans=0.1 +2024-08-26 23:31:57,920 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.04 vs. limit=12.0 +2024-08-26 23:32:03,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=183461.33333333334, ans=0.05 +2024-08-26 23:32:08,135 INFO [train.py:1114] (3/4) Epoch 14, batch 2050, loss[loss=0.184, simple_loss=0.2481, pruned_loss=0.04415, ctc_loss=0.07873, over 19725.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2702, pruned_loss=0.04681, ctc_loss=0.08741, over 3853148.84 frames. ], batch size: 47, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:32:16,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183568.0, ans=0.1 +2024-08-26 23:32:30,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=183621.33333333334, ans=0.05 +2024-08-26 23:32:31,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=183621.33333333334, ans=0.0 +2024-08-26 23:32:42,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=183728.0, ans=0.125 +2024-08-26 23:32:42,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=183728.0, ans=0.2 +2024-08-26 23:32:51,508 INFO [train.py:1114] (3/4) Epoch 14, batch 2100, loss[loss=0.1779, simple_loss=0.2627, pruned_loss=0.03373, ctc_loss=0.06399, over 19763.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2691, pruned_loss=0.04614, ctc_loss=0.08621, over 3859523.11 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:32:58,373 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.193e+02 1.491e+02 1.652e+02 1.860e+02 2.729e+02, threshold=3.304e+02, percent-clipped=0.0 +2024-08-26 23:33:55,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=183888.0, ans=0.125 +2024-08-26 23:34:01,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=183941.33333333334, ans=0.125 +2024-08-26 23:34:15,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184048.0, ans=0.1 +2024-08-26 23:34:16,392 INFO [train.py:1114] (3/4) Epoch 14, batch 2150, loss[loss=0.1968, simple_loss=0.2693, pruned_loss=0.04556, ctc_loss=0.08308, over 19831.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2682, pruned_loss=0.04567, ctc_loss=0.08536, over 3870758.51 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:34:20,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=184048.0, ans=0.015 +2024-08-26 23:34:21,420 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.71 vs. limit=15.0 +2024-08-26 23:34:27,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=184101.33333333334, ans=0.0 +2024-08-26 23:34:59,961 INFO [train.py:1114] (3/4) Epoch 14, batch 2200, loss[loss=0.2045, simple_loss=0.2817, pruned_loss=0.04619, ctc_loss=0.08715, over 19593.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2685, pruned_loss=0.04582, ctc_loss=0.08564, over 3869156.33 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 32.0 +2024-08-26 23:35:05,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=184314.66666666666, ans=0.125 +2024-08-26 23:35:06,950 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.447e+02 1.750e+02 2.552e+02 4.295e+02, threshold=3.499e+02, percent-clipped=8.0 +2024-08-26 23:35:08,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=184368.0, ans=0.0 +2024-08-26 23:35:15,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184368.0, ans=0.1 +2024-08-26 23:35:21,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=184421.33333333334, ans=0.0 +2024-08-26 23:35:25,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=184474.66666666666, ans=0.025 +2024-08-26 23:35:27,499 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.60 vs. limit=15.0 +2024-08-26 23:35:40,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=184528.0, ans=0.025 +2024-08-26 23:35:41,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=184528.0, ans=0.125 +2024-08-26 23:35:41,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=184528.0, ans=0.2 +2024-08-26 23:35:43,854 INFO [train.py:1114] (3/4) Epoch 14, batch 2250, loss[loss=0.2218, simple_loss=0.2871, pruned_loss=0.05731, ctc_loss=0.1045, over 19619.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2686, pruned_loss=0.04589, ctc_loss=0.08564, over 3868179.89 frames. ], batch size: 55, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:36:02,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=184688.0, ans=0.0 +2024-08-26 23:36:05,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=184688.0, ans=0.125 +2024-08-26 23:36:06,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=184688.0, ans=0.2 +2024-08-26 23:36:13,482 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.41 vs. limit=15.0 +2024-08-26 23:36:20,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=184794.66666666666, ans=0.125 +2024-08-26 23:36:26,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=184848.0, ans=0.125 +2024-08-26 23:36:27,365 INFO [train.py:1114] (3/4) Epoch 14, batch 2300, loss[loss=0.205, simple_loss=0.2699, pruned_loss=0.0508, ctc_loss=0.09617, over 19511.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.268, pruned_loss=0.04593, ctc_loss=0.08565, over 3862283.03 frames. ], batch size: 49, lr: 1.06e-02, grad_scale: 16.0 +2024-08-26 23:36:30,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=184848.0, ans=0.125 +2024-08-26 23:36:31,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=184848.0, ans=0.07 +2024-08-26 23:36:35,132 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.458e+02 1.662e+02 2.114e+02 3.033e+02, threshold=3.324e+02, percent-clipped=0.0 +2024-08-26 23:36:37,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=184901.33333333334, ans=0.2 +2024-08-26 23:36:38,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=184901.33333333334, ans=0.09899494936611666 +2024-08-26 23:36:51,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=184954.66666666666, ans=0.0 +2024-08-26 23:36:52,590 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.25 vs. limit=15.0 +2024-08-26 23:36:53,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=185008.0, ans=0.125 +2024-08-26 23:37:01,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=185061.33333333334, ans=0.125 +2024-08-26 23:37:10,954 INFO [train.py:1114] (3/4) Epoch 14, batch 2350, loss[loss=0.217, simple_loss=0.2805, pruned_loss=0.05605, ctc_loss=0.1034, over 19685.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2682, pruned_loss=0.04624, ctc_loss=0.08626, over 3864424.63 frames. ], batch size: 63, lr: 1.05e-02, grad_scale: 16.0 +2024-08-26 23:37:18,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=185114.66666666666, ans=0.1 +2024-08-26 23:37:32,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=185221.33333333334, ans=0.125 +2024-08-26 23:37:33,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=185221.33333333334, ans=0.0 +2024-08-26 23:37:46,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=185328.0, ans=0.0 +2024-08-26 23:37:55,036 INFO [train.py:1114] (3/4) Epoch 14, batch 2400, loss[loss=0.2344, simple_loss=0.2934, pruned_loss=0.06431, ctc_loss=0.117, over 19265.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2707, pruned_loss=0.04741, ctc_loss=0.08833, over 3858364.47 frames. ], batch size: 71, lr: 1.05e-02, grad_scale: 32.0 +2024-08-26 23:38:00,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=185381.33333333334, ans=0.125 +2024-08-26 23:38:02,846 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.081e+02 1.569e+02 1.843e+02 2.357e+02 3.475e+02, threshold=3.685e+02, percent-clipped=2.0 +2024-08-26 23:38:11,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=185488.0, ans=0.0 +2024-08-26 23:38:16,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=185488.0, ans=0.125 +2024-08-26 23:38:20,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=185541.33333333334, ans=0.2 +2024-08-26 23:38:22,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.71 vs. limit=15.0 +2024-08-26 23:38:28,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=185541.33333333334, ans=0.0 +2024-08-26 23:38:29,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=185594.66666666666, ans=0.2 +2024-08-26 23:38:30,552 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.76 vs. limit=22.5 +2024-08-26 23:38:39,320 INFO [train.py:1114] (3/4) Epoch 14, batch 2450, loss[loss=0.2599, simple_loss=0.3008, pruned_loss=0.08077, ctc_loss=0.1437, over 13755.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2746, pruned_loss=0.05023, ctc_loss=0.09377, over 3730663.52 frames. ], batch size: 140, lr: 1.05e-02, grad_scale: 16.0 +2024-08-26 23:38:42,623 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.27 vs. limit=22.5 +2024-08-26 23:39:11,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=185808.0, ans=0.0 +2024-08-26 23:40:44,499 INFO [train.py:1114] (3/4) Epoch 15, batch 0, loss[loss=0.1839, simple_loss=0.2484, pruned_loss=0.04272, ctc_loss=0.08513, over 19803.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2484, pruned_loss=0.04272, ctc_loss=0.08513, over 19803.00 frames. ], batch size: 49, lr: 1.02e-02, grad_scale: 32.0 +2024-08-26 23:40:46,075 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-26 23:41:25,228 INFO [train.py:1146] (3/4) Epoch 15, validation: loss=0.1751, simple_loss=0.2686, pruned_loss=0.03035, ctc_loss=0.05216, over 944034.00 frames. +2024-08-26 23:41:25,229 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12941MB +2024-08-26 23:41:32,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=185856.0, ans=0.125 +2024-08-26 23:41:46,361 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.06 vs. limit=10.0 +2024-08-26 23:41:47,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=185909.33333333334, ans=0.125 +2024-08-26 23:42:02,611 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.67 vs. limit=22.5 +2024-08-26 23:42:32,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=185962.66666666666, ans=0.07 +2024-08-26 23:42:34,503 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.661e+02 1.811e+02 2.041e+02 3.400e+02, threshold=3.623e+02, percent-clipped=0.0 +2024-08-26 23:43:35,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=185962.66666666666, ans=0.125 +2024-08-26 23:45:36,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=186069.33333333334, ans=0.07 +2024-08-26 23:46:22,009 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-26 23:46:24,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=186069.33333333334, ans=0.025 +2024-08-26 23:49:22,241 INFO [train.py:1114] (3/4) Epoch 15, batch 50, loss[loss=0.1802, simple_loss=0.2521, pruned_loss=0.03939, ctc_loss=0.07383, over 19728.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.271, pruned_loss=0.04664, ctc_loss=0.08764, over 845438.59 frames. ], batch size: 47, lr: 1.02e-02, grad_scale: 16.0 +2024-08-26 23:50:26,655 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.40 vs. limit=6.0 +2024-08-26 23:52:29,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=186282.66666666666, ans=0.125 +2024-08-26 23:58:27,677 INFO [train.py:1114] (3/4) Epoch 15, batch 100, loss[loss=0.1879, simple_loss=0.2639, pruned_loss=0.04023, ctc_loss=0.0786, over 19720.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2726, pruned_loss=0.04705, ctc_loss=0.08834, over 1500617.17 frames. ], batch size: 51, lr: 1.02e-02, grad_scale: 16.0 +2024-08-27 00:01:25,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=186389.33333333334, ans=0.2 +2024-08-27 00:06:07,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186442.66666666666, ans=0.1 +2024-08-27 00:06:52,637 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.493e+02 1.771e+02 2.166e+02 3.428e+02, threshold=3.543e+02, percent-clipped=0.0 +2024-08-27 00:06:53,064 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-08-27 00:09:14,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=186496.0, ans=0.2 +2024-08-27 00:12:03,605 INFO [train.py:1114] (3/4) Epoch 15, batch 150, loss[loss=0.1773, simple_loss=0.2501, pruned_loss=0.03806, ctc_loss=0.07098, over 19726.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2699, pruned_loss=0.0459, ctc_loss=0.08583, over 2029400.20 frames. ], batch size: 47, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:12:29,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=186709.33333333334, ans=0.025 +2024-08-27 00:12:29,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.23 vs. limit=22.5 +2024-08-27 00:13:54,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=186709.33333333334, ans=0.125 +2024-08-27 00:14:04,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=186762.66666666666, ans=0.125 +2024-08-27 00:14:12,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.23 vs. limit=6.0 +2024-08-27 00:17:05,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=186869.33333333334, ans=0.2 +2024-08-27 00:17:10,146 INFO [train.py:1114] (3/4) Epoch 15, batch 200, loss[loss=0.2019, simple_loss=0.2739, pruned_loss=0.04683, ctc_loss=0.09068, over 18291.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2683, pruned_loss=0.04577, ctc_loss=0.08544, over 2436424.74 frames. ], batch size: 85, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:17:28,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=186922.66666666666, ans=0.0 +2024-08-27 00:17:54,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=187029.33333333334, ans=0.0 +2024-08-27 00:17:54,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=187029.33333333334, ans=0.2 +2024-08-27 00:17:54,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=187029.33333333334, ans=0.025 +2024-08-27 00:17:59,325 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.145e+02 1.435e+02 1.602e+02 1.959e+02 3.588e+02, threshold=3.205e+02, percent-clipped=1.0 +2024-08-27 00:18:47,710 INFO [train.py:1114] (3/4) Epoch 15, batch 250, loss[loss=0.1917, simple_loss=0.2712, pruned_loss=0.04071, ctc_loss=0.0767, over 19362.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2687, pruned_loss=0.04569, ctc_loss=0.08554, over 2756645.61 frames. ], batch size: 67, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:20:04,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=187242.66666666666, ans=15.0 +2024-08-27 00:20:19,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=187349.33333333334, ans=0.125 +2024-08-27 00:20:34,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=187402.66666666666, ans=0.125 +2024-08-27 00:21:11,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=187456.0, ans=0.125 +2024-08-27 00:21:12,350 INFO [train.py:1114] (3/4) Epoch 15, batch 300, loss[loss=0.2137, simple_loss=0.2888, pruned_loss=0.05056, ctc_loss=0.09344, over 19527.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.268, pruned_loss=0.04548, ctc_loss=0.08509, over 3001399.26 frames. ], batch size: 61, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:21:19,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=187456.0, ans=0.2 +2024-08-27 00:22:03,850 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.482e+02 1.757e+02 2.250e+02 4.561e+02, threshold=3.514e+02, percent-clipped=7.0 +2024-08-27 00:22:10,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=187562.66666666666, ans=0.025 +2024-08-27 00:22:31,079 INFO [train.py:1114] (3/4) Epoch 15, batch 350, loss[loss=0.1547, simple_loss=0.2287, pruned_loss=0.02913, ctc_loss=0.05621, over 19756.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.268, pruned_loss=0.04519, ctc_loss=0.0846, over 3191349.76 frames. ], batch size: 48, lr: 1.01e-02, grad_scale: 16.0 +2024-08-27 00:23:22,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=187722.66666666666, ans=0.2 +2024-08-27 00:24:58,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=187829.33333333334, ans=0.125 +2024-08-27 00:24:59,244 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.96 vs. limit=10.0 +2024-08-27 00:25:03,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=187829.33333333334, ans=0.0 +2024-08-27 00:25:05,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=187829.33333333334, ans=0.5 +2024-08-27 00:25:06,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=187882.66666666666, ans=0.0 +2024-08-27 00:25:25,433 INFO [train.py:1114] (3/4) Epoch 15, batch 400, loss[loss=0.1983, simple_loss=0.2785, pruned_loss=0.04271, ctc_loss=0.08147, over 19495.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2677, pruned_loss=0.04503, ctc_loss=0.0844, over 3343169.69 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:25:26,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=187989.33333333334, ans=0.125 +2024-08-27 00:25:30,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=187989.33333333334, ans=0.0 +2024-08-27 00:25:36,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=188042.66666666666, ans=0.125 +2024-08-27 00:25:46,855 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.414e+02 1.733e+02 2.120e+02 3.671e+02, threshold=3.466e+02, percent-clipped=1.0 +2024-08-27 00:26:33,907 INFO [train.py:1114] (3/4) Epoch 15, batch 450, loss[loss=0.1989, simple_loss=0.2722, pruned_loss=0.04591, ctc_loss=0.08475, over 19615.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2678, pruned_loss=0.0451, ctc_loss=0.0844, over 3451307.41 frames. ], batch size: 55, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:26:34,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=188256.0, ans=0.0 +2024-08-27 00:26:46,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=188256.0, ans=0.2 +2024-08-27 00:27:55,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=188469.33333333334, ans=0.125 +2024-08-27 00:27:58,672 INFO [train.py:1114] (3/4) Epoch 15, batch 500, loss[loss=0.201, simple_loss=0.2767, pruned_loss=0.04595, ctc_loss=0.0838, over 19672.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2671, pruned_loss=0.04498, ctc_loss=0.08409, over 3546011.40 frames. ], batch size: 63, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:28:08,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=188522.66666666666, ans=0.95 +2024-08-27 00:28:25,443 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.423e+02 1.716e+02 2.052e+02 3.766e+02, threshold=3.431e+02, percent-clipped=1.0 +2024-08-27 00:28:26,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=188629.33333333334, ans=0.2 +2024-08-27 00:28:26,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=188629.33333333334, ans=0.1 +2024-08-27 00:28:26,926 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.52 vs. limit=22.5 +2024-08-27 00:28:38,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=188682.66666666666, ans=0.125 +2024-08-27 00:28:48,261 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.46 vs. limit=12.0 +2024-08-27 00:28:49,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=188736.0, ans=0.125 +2024-08-27 00:28:52,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188736.0, ans=0.1 +2024-08-27 00:28:56,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=188789.33333333334, ans=0.04949747468305833 +2024-08-27 00:28:56,819 INFO [train.py:1114] (3/4) Epoch 15, batch 550, loss[loss=0.2244, simple_loss=0.2904, pruned_loss=0.05715, ctc_loss=0.1103, over 19330.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.267, pruned_loss=0.04496, ctc_loss=0.08408, over 3607152.83 frames. ], batch size: 71, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:29:36,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=188789.33333333334, ans=0.0 +2024-08-27 00:29:41,994 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.97 vs. limit=15.0 +2024-08-27 00:29:52,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=188896.0, ans=0.125 +2024-08-27 00:29:56,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=188896.0, ans=0.1 +2024-08-27 00:30:01,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=188949.33333333334, ans=0.125 +2024-08-27 00:30:05,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=188949.33333333334, ans=0.125 +2024-08-27 00:30:17,679 INFO [train.py:1114] (3/4) Epoch 15, batch 600, loss[loss=0.2099, simple_loss=0.2859, pruned_loss=0.04807, ctc_loss=0.0942, over 19413.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2676, pruned_loss=0.04515, ctc_loss=0.08412, over 3665156.89 frames. ], batch size: 67, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:30:24,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=189056.0, ans=0.0 +2024-08-27 00:31:14,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=189109.33333333334, ans=0.125 +2024-08-27 00:31:18,197 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.468e+02 1.719e+02 2.297e+02 4.329e+02, threshold=3.438e+02, percent-clipped=2.0 +2024-08-27 00:31:19,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=189162.66666666666, ans=0.1 +2024-08-27 00:31:21,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=189162.66666666666, ans=0.025 +2024-08-27 00:31:23,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=189162.66666666666, ans=0.125 +2024-08-27 00:31:45,879 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.02 vs. limit=15.0 +2024-08-27 00:31:52,675 INFO [train.py:1114] (3/4) Epoch 15, batch 650, loss[loss=0.1804, simple_loss=0.2569, pruned_loss=0.03706, ctc_loss=0.07449, over 19787.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2667, pruned_loss=0.04458, ctc_loss=0.0833, over 3715693.55 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:32:01,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=189376.0, ans=0.125 +2024-08-27 00:32:04,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=189376.0, ans=0.0 +2024-08-27 00:32:08,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=189376.0, ans=0.0 +2024-08-27 00:32:22,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=189482.66666666666, ans=0.125 +2024-08-27 00:32:26,633 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.58 vs. limit=15.0 +2024-08-27 00:33:03,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=189589.33333333334, ans=0.0 +2024-08-27 00:33:04,265 INFO [train.py:1114] (3/4) Epoch 15, batch 700, loss[loss=0.1912, simple_loss=0.2588, pruned_loss=0.04528, ctc_loss=0.08248, over 19715.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2672, pruned_loss=0.04491, ctc_loss=0.08383, over 3748440.39 frames. ], batch size: 51, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:34:00,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=189642.66666666666, ans=0.04949747468305833 +2024-08-27 00:34:03,649 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.167e+02 1.548e+02 1.878e+02 2.334e+02 4.066e+02, threshold=3.756e+02, percent-clipped=4.0 +2024-08-27 00:34:05,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=189696.0, ans=0.5 +2024-08-27 00:34:08,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189696.0, ans=0.1 +2024-08-27 00:34:09,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=189696.0, ans=0.5 +2024-08-27 00:35:14,934 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.76 vs. limit=15.0 +2024-08-27 00:35:17,170 INFO [train.py:1114] (3/4) Epoch 15, batch 750, loss[loss=0.1933, simple_loss=0.2689, pruned_loss=0.04239, ctc_loss=0.08248, over 19493.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2667, pruned_loss=0.0448, ctc_loss=0.08349, over 3775284.96 frames. ], batch size: 54, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:35:19,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=189856.0, ans=0.035 +2024-08-27 00:35:21,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=189856.0, ans=0.125 +2024-08-27 00:35:32,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=189909.33333333334, ans=0.1 +2024-08-27 00:35:45,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=190016.0, ans=0.07 +2024-08-27 00:36:05,953 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.11 vs. limit=15.0 +2024-08-27 00:36:06,221 INFO [train.py:1114] (3/4) Epoch 15, batch 800, loss[loss=0.1808, simple_loss=0.2461, pruned_loss=0.04227, ctc_loss=0.0772, over 19789.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2665, pruned_loss=0.04471, ctc_loss=0.08344, over 3796497.36 frames. ], batch size: 49, lr: 1.01e-02, grad_scale: 32.0 +2024-08-27 00:36:11,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=190122.66666666666, ans=0.0 +2024-08-27 00:36:11,257 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.51 vs. limit=6.0 +2024-08-27 00:36:27,454 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.32 vs. limit=15.0 +2024-08-27 00:36:29,667 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.516e+02 1.778e+02 2.217e+02 3.654e+02, threshold=3.555e+02, percent-clipped=0.0 +2024-08-27 00:36:29,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=190229.33333333334, ans=0.125 +2024-08-27 00:36:30,869 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:36:40,523 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.31 vs. limit=15.0 +2024-08-27 00:36:51,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=190336.0, ans=0.125 +2024-08-27 00:36:54,849 INFO [train.py:1114] (3/4) Epoch 15, batch 850, loss[loss=0.2034, simple_loss=0.2804, pruned_loss=0.04642, ctc_loss=0.08379, over 19675.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2662, pruned_loss=0.04466, ctc_loss=0.08333, over 3815182.91 frames. ], batch size: 59, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:37:00,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=190389.33333333334, ans=0.125 +2024-08-27 00:37:13,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=190442.66666666666, ans=0.0 +2024-08-27 00:37:26,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=190549.33333333334, ans=0.025 +2024-08-27 00:37:34,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=190602.66666666666, ans=0.2 +2024-08-27 00:37:46,588 INFO [train.py:1114] (3/4) Epoch 15, batch 900, loss[loss=0.17, simple_loss=0.2478, pruned_loss=0.03337, ctc_loss=0.06343, over 19821.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2667, pruned_loss=0.04501, ctc_loss=0.08411, over 3818290.68 frames. ], batch size: 49, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:38:12,638 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.206e+02 1.396e+02 1.546e+02 1.855e+02 3.193e+02, threshold=3.091e+02, percent-clipped=0.0 +2024-08-27 00:38:13,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=190762.66666666666, ans=0.2 +2024-08-27 00:38:23,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=190816.0, ans=0.0 +2024-08-27 00:38:42,119 INFO [train.py:1114] (3/4) Epoch 15, batch 950, loss[loss=0.178, simple_loss=0.2522, pruned_loss=0.0374, ctc_loss=0.07228, over 19516.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2668, pruned_loss=0.04502, ctc_loss=0.08421, over 3821023.95 frames. ], batch size: 49, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:38:43,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=190922.66666666666, ans=0.025 +2024-08-27 00:38:44,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=190922.66666666666, ans=0.09899494936611666 +2024-08-27 00:38:51,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=190922.66666666666, ans=0.125 +2024-08-27 00:39:12,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=191029.33333333334, ans=0.125 +2024-08-27 00:39:17,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=191082.66666666666, ans=10.0 +2024-08-27 00:39:32,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=191136.0, ans=0.125 +2024-08-27 00:39:37,074 INFO [train.py:1114] (3/4) Epoch 15, batch 1000, loss[loss=0.175, simple_loss=0.2506, pruned_loss=0.0363, ctc_loss=0.06692, over 19857.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2672, pruned_loss=0.04538, ctc_loss=0.08481, over 3817004.52 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:39:42,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=191189.33333333334, ans=0.125 +2024-08-27 00:40:00,298 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.061e+02 1.403e+02 1.586e+02 1.924e+02 3.101e+02, threshold=3.172e+02, percent-clipped=1.0 +2024-08-27 00:40:00,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=191296.0, ans=0.0 +2024-08-27 00:40:03,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=191296.0, ans=0.2 +2024-08-27 00:40:05,221 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:40:09,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=191349.33333333334, ans=0.125 +2024-08-27 00:40:10,092 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.05 vs. limit=15.0 +2024-08-27 00:40:12,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191349.33333333334, ans=0.1 +2024-08-27 00:40:14,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=191349.33333333334, ans=0.025 +2024-08-27 00:40:25,461 INFO [train.py:1114] (3/4) Epoch 15, batch 1050, loss[loss=0.1965, simple_loss=0.2787, pruned_loss=0.04127, ctc_loss=0.07943, over 19829.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2667, pruned_loss=0.04525, ctc_loss=0.0846, over 3822843.87 frames. ], batch size: 57, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:40:56,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=191616.0, ans=0.125 +2024-08-27 00:40:58,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=191616.0, ans=0.0 +2024-08-27 00:41:14,672 INFO [train.py:1114] (3/4) Epoch 15, batch 1100, loss[loss=0.1911, simple_loss=0.264, pruned_loss=0.04316, ctc_loss=0.07958, over 19589.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2667, pruned_loss=0.04496, ctc_loss=0.08398, over 3831076.80 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:41:16,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=191722.66666666666, ans=0.125 +2024-08-27 00:41:31,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=191776.0, ans=0.125 +2024-08-27 00:41:31,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=191776.0, ans=0.0 +2024-08-27 00:41:33,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=191829.33333333334, ans=0.0 +2024-08-27 00:41:35,756 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.70 vs. limit=15.0 +2024-08-27 00:41:36,206 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.227e+02 1.518e+02 1.811e+02 2.066e+02 3.149e+02, threshold=3.622e+02, percent-clipped=0.0 +2024-08-27 00:41:39,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=191829.33333333334, ans=0.125 +2024-08-27 00:41:47,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=191882.66666666666, ans=0.125 +2024-08-27 00:42:07,824 INFO [train.py:1114] (3/4) Epoch 15, batch 1150, loss[loss=0.1839, simple_loss=0.2581, pruned_loss=0.04043, ctc_loss=0.07211, over 19581.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2668, pruned_loss=0.04486, ctc_loss=0.08393, over 3828649.12 frames. ], batch size: 52, lr: 1.00e-02, grad_scale: 16.0 +2024-08-27 00:42:26,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=192042.66666666666, ans=0.04949747468305833 +2024-08-27 00:42:29,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=192042.66666666666, ans=0.05 +2024-08-27 00:42:34,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=192096.0, ans=0.2 +2024-08-27 00:42:41,453 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.06 vs. limit=15.0 +2024-08-27 00:42:54,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=192202.66666666666, ans=0.0 +2024-08-27 00:43:04,238 INFO [train.py:1114] (3/4) Epoch 15, batch 1200, loss[loss=0.2078, simple_loss=0.283, pruned_loss=0.04763, ctc_loss=0.09335, over 19844.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2677, pruned_loss=0.04508, ctc_loss=0.08439, over 3825254.90 frames. ], batch size: 57, lr: 1.00e-02, grad_scale: 32.0 +2024-08-27 00:43:16,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=192309.33333333334, ans=0.0 +2024-08-27 00:43:25,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=192309.33333333334, ans=0.125 +2024-08-27 00:43:27,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=192309.33333333334, ans=0.04949747468305833 +2024-08-27 00:43:31,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=192362.66666666666, ans=0.125 +2024-08-27 00:44:32,769 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.478e+02 1.729e+02 2.216e+02 4.347e+02, threshold=3.458e+02, percent-clipped=1.0 +2024-08-27 00:44:34,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=192362.66666666666, ans=0.0 +2024-08-27 00:45:43,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=192416.0, ans=0.125 +2024-08-27 00:45:50,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=192469.33333333334, ans=0.125 +2024-08-27 00:45:57,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=192469.33333333334, ans=0.125 +2024-08-27 00:46:12,620 INFO [train.py:1114] (3/4) Epoch 15, batch 1250, loss[loss=0.2173, simple_loss=0.2875, pruned_loss=0.05326, ctc_loss=0.1015, over 19533.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2681, pruned_loss=0.04502, ctc_loss=0.08426, over 3842670.75 frames. ], batch size: 61, lr: 1.00e-02, grad_scale: 32.0 +2024-08-27 00:46:45,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192576.0, ans=0.1 +2024-08-27 00:47:00,179 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.57 vs. limit=6.0 +2024-08-27 00:47:41,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=192629.33333333334, ans=0.2 +2024-08-27 00:48:26,341 INFO [train.py:1114] (3/4) Epoch 15, batch 1300, loss[loss=0.2061, simple_loss=0.277, pruned_loss=0.04969, ctc_loss=0.08965, over 18844.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2671, pruned_loss=0.04462, ctc_loss=0.08342, over 3846304.33 frames. ], batch size: 76, lr: 9.99e-03, grad_scale: 32.0 +2024-08-27 00:48:40,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=192789.33333333334, ans=0.0 +2024-08-27 00:48:42,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff3.min_abs, batch_count=192789.33333333334, ans=0.2 +2024-08-27 00:49:04,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=192842.66666666666, ans=0.0 +2024-08-27 00:49:15,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=192842.66666666666, ans=0.0 +2024-08-27 00:49:50,653 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.210e+02 1.421e+02 1.669e+02 2.080e+02 3.869e+02, threshold=3.339e+02, percent-clipped=2.0 +2024-08-27 00:49:51,207 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.66 vs. limit=15.0 +2024-08-27 00:50:01,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=192896.0, ans=0.1 +2024-08-27 00:50:40,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=193002.66666666666, ans=0.025 +2024-08-27 00:50:43,600 INFO [train.py:1114] (3/4) Epoch 15, batch 1350, loss[loss=0.1852, simple_loss=0.2694, pruned_loss=0.03651, ctc_loss=0.07012, over 19755.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2664, pruned_loss=0.04431, ctc_loss=0.08255, over 3857938.98 frames. ], batch size: 54, lr: 9.98e-03, grad_scale: 32.0 +2024-08-27 00:51:55,820 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:51:57,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=193109.33333333334, ans=0.2 +2024-08-27 00:52:01,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=193162.66666666666, ans=0.125 +2024-08-27 00:53:13,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=193269.33333333334, ans=0.125 +2024-08-27 00:53:24,201 INFO [train.py:1114] (3/4) Epoch 15, batch 1400, loss[loss=0.1964, simple_loss=0.2514, pruned_loss=0.05233, ctc_loss=0.09178, over 19659.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2661, pruned_loss=0.04444, ctc_loss=0.08278, over 3864298.37 frames. ], batch size: 46, lr: 9.98e-03, grad_scale: 32.0 +2024-08-27 00:53:30,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=193322.66666666666, ans=0.125 +2024-08-27 00:53:32,869 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:53:33,007 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.02 vs. limit=15.0 +2024-08-27 00:53:34,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=193376.0, ans=0.125 +2024-08-27 00:53:57,415 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.449e+02 1.647e+02 2.125e+02 3.032e+02, threshold=3.293e+02, percent-clipped=0.0 +2024-08-27 00:54:16,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.05 vs. limit=15.0 +2024-08-27 00:54:53,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=193482.66666666666, ans=0.0 +2024-08-27 00:54:54,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=193482.66666666666, ans=0.0 +2024-08-27 00:55:08,671 INFO [train.py:1114] (3/4) Epoch 15, batch 1450, loss[loss=0.2283, simple_loss=0.2961, pruned_loss=0.05835, ctc_loss=0.1095, over 19675.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2668, pruned_loss=0.04479, ctc_loss=0.08348, over 3862218.51 frames. ], batch size: 63, lr: 9.97e-03, grad_scale: 32.0 +2024-08-27 00:56:01,155 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=5.64 vs. limit=15.0 +2024-08-27 00:56:07,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=193696.0, ans=0.2 +2024-08-27 00:56:28,267 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:56:38,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=193802.66666666666, ans=0.025 +2024-08-27 00:56:39,795 INFO [train.py:1114] (3/4) Epoch 15, batch 1500, loss[loss=0.2005, simple_loss=0.2742, pruned_loss=0.04617, ctc_loss=0.0862, over 19575.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2671, pruned_loss=0.04489, ctc_loss=0.0836, over 3862151.23 frames. ], batch size: 57, lr: 9.96e-03, grad_scale: 32.0 +2024-08-27 00:57:52,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=193909.33333333334, ans=0.125 +2024-08-27 00:58:20,416 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.504e+02 1.720e+02 2.138e+02 3.076e+02, threshold=3.439e+02, percent-clipped=0.0 +2024-08-27 00:58:20,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=193962.66666666666, ans=0.125 +2024-08-27 00:58:31,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194016.0, ans=0.1 +2024-08-27 00:58:32,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=194016.0, ans=0.5 +2024-08-27 00:58:44,376 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 00:58:50,663 INFO [train.py:1114] (3/4) Epoch 15, batch 1550, loss[loss=0.2176, simple_loss=0.2884, pruned_loss=0.05293, ctc_loss=0.1021, over 19612.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2674, pruned_loss=0.04532, ctc_loss=0.08457, over 3846859.26 frames. ], batch size: 60, lr: 9.96e-03, grad_scale: 32.0 +2024-08-27 00:59:15,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=194229.33333333334, ans=10.0 +2024-08-27 00:59:15,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=194229.33333333334, ans=0.0 +2024-08-27 00:59:30,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194336.0, ans=0.1 +2024-08-27 00:59:32,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=194336.0, ans=0.2 +2024-08-27 00:59:37,714 INFO [train.py:1114] (3/4) Epoch 15, batch 1600, loss[loss=0.194, simple_loss=0.2757, pruned_loss=0.04089, ctc_loss=0.0766, over 19843.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.267, pruned_loss=0.04517, ctc_loss=0.08416, over 3836220.94 frames. ], batch size: 57, lr: 9.95e-03, grad_scale: 32.0 +2024-08-27 00:59:46,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=194442.66666666666, ans=0.125 +2024-08-27 00:59:49,359 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.86 vs. limit=15.0 +2024-08-27 01:00:17,425 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.455e+02 1.710e+02 2.060e+02 3.831e+02, threshold=3.419e+02, percent-clipped=3.0 +2024-08-27 01:00:27,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=194549.33333333334, ans=0.0 +2024-08-27 01:00:28,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=194549.33333333334, ans=0.125 +2024-08-27 01:00:30,875 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.96 vs. limit=15.0 +2024-08-27 01:00:37,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=194549.33333333334, ans=0.125 +2024-08-27 01:00:50,783 INFO [train.py:1114] (3/4) Epoch 15, batch 1650, loss[loss=0.1822, simple_loss=0.2584, pruned_loss=0.03885, ctc_loss=0.07038, over 19672.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2665, pruned_loss=0.04487, ctc_loss=0.08367, over 3833576.65 frames. ], batch size: 59, lr: 9.94e-03, grad_scale: 16.0 +2024-08-27 01:01:04,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=194656.0, ans=0.0 +2024-08-27 01:01:44,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=194816.0, ans=0.2 +2024-08-27 01:02:07,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=194869.33333333334, ans=0.0 +2024-08-27 01:02:11,995 INFO [train.py:1114] (3/4) Epoch 15, batch 1700, loss[loss=0.1671, simple_loss=0.2391, pruned_loss=0.0348, ctc_loss=0.0637, over 19681.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2663, pruned_loss=0.04454, ctc_loss=0.08293, over 3847382.16 frames. ], batch size: 46, lr: 9.94e-03, grad_scale: 16.0 +2024-08-27 01:02:36,948 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.414e+02 1.817e+02 2.372e+02 3.799e+02, threshold=3.634e+02, percent-clipped=1.0 +2024-08-27 01:02:38,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195029.33333333334, ans=0.1 +2024-08-27 01:02:38,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=195029.33333333334, ans=0.125 +2024-08-27 01:02:41,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=195082.66666666666, ans=0.125 +2024-08-27 01:02:51,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=195136.0, ans=0.0 +2024-08-27 01:02:53,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=195136.0, ans=0.0 +2024-08-27 01:02:54,541 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.63 vs. limit=15.0 +2024-08-27 01:03:00,188 INFO [train.py:1114] (3/4) Epoch 15, batch 1750, loss[loss=0.1827, simple_loss=0.2427, pruned_loss=0.04497, ctc_loss=0.08186, over 19631.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2661, pruned_loss=0.04433, ctc_loss=0.08256, over 3852360.31 frames. ], batch size: 45, lr: 9.93e-03, grad_scale: 16.0 +2024-08-27 01:03:09,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=195242.66666666666, ans=0.0 +2024-08-27 01:03:13,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=195242.66666666666, ans=0.125 +2024-08-27 01:03:23,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=195296.0, ans=0.0 +2024-08-27 01:03:35,128 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.93 vs. limit=15.0 +2024-08-27 01:03:36,932 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.37 vs. limit=12.0 +2024-08-27 01:03:44,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=195402.66666666666, ans=0.125 +2024-08-27 01:03:47,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=195402.66666666666, ans=0.0 +2024-08-27 01:03:49,244 INFO [train.py:1114] (3/4) Epoch 15, batch 1800, loss[loss=0.1809, simple_loss=0.2608, pruned_loss=0.03683, ctc_loss=0.06832, over 19619.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2661, pruned_loss=0.0443, ctc_loss=0.08256, over 3854242.67 frames. ], batch size: 55, lr: 9.92e-03, grad_scale: 16.0 +2024-08-27 01:03:49,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.49 vs. limit=10.0 +2024-08-27 01:04:34,447 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.516e+02 1.927e+02 2.557e+02 3.874e+02, threshold=3.854e+02, percent-clipped=2.0 +2024-08-27 01:04:38,309 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.47 vs. limit=15.0 +2024-08-27 01:04:39,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=195616.0, ans=0.0 +2024-08-27 01:05:54,939 INFO [train.py:1114] (3/4) Epoch 15, batch 1850, loss[loss=0.2088, simple_loss=0.2883, pruned_loss=0.04647, ctc_loss=0.091, over 19596.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2665, pruned_loss=0.04453, ctc_loss=0.08306, over 3857016.34 frames. ], batch size: 57, lr: 9.92e-03, grad_scale: 16.0 +2024-08-27 01:05:58,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=195722.66666666666, ans=0.125 +2024-08-27 01:06:09,029 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.69 vs. limit=10.0 +2024-08-27 01:06:15,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=195776.0, ans=0.95 +2024-08-27 01:06:19,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195776.0, ans=0.1 +2024-08-27 01:06:22,092 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.19 vs. limit=22.5 +2024-08-27 01:06:23,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=195829.33333333334, ans=0.125 +2024-08-27 01:06:32,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=195882.66666666666, ans=0.125 +2024-08-27 01:06:37,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=195882.66666666666, ans=0.125 +2024-08-27 01:06:41,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=195936.0, ans=0.07 +2024-08-27 01:06:49,466 INFO [train.py:1114] (3/4) Epoch 15, batch 1900, loss[loss=0.2053, simple_loss=0.2798, pruned_loss=0.04808, ctc_loss=0.08655, over 19674.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2669, pruned_loss=0.04474, ctc_loss=0.08342, over 3861337.71 frames. ], batch size: 59, lr: 9.91e-03, grad_scale: 16.0 +2024-08-27 01:06:49,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=195989.33333333334, ans=0.125 +2024-08-27 01:07:09,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=196096.0, ans=0.025 +2024-08-27 01:07:43,096 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.146e+02 1.422e+02 1.649e+02 2.231e+02 4.535e+02, threshold=3.297e+02, percent-clipped=1.0 +2024-08-27 01:08:01,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=196202.66666666666, ans=0.125 +2024-08-27 01:08:04,610 INFO [train.py:1114] (3/4) Epoch 15, batch 1950, loss[loss=0.1854, simple_loss=0.257, pruned_loss=0.04177, ctc_loss=0.0755, over 19588.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2677, pruned_loss=0.04463, ctc_loss=0.08318, over 3870605.46 frames. ], batch size: 52, lr: 9.90e-03, grad_scale: 16.0 +2024-08-27 01:08:49,927 INFO [train.py:1114] (3/4) Epoch 15, batch 2000, loss[loss=0.1598, simple_loss=0.233, pruned_loss=0.03122, ctc_loss=0.06028, over 19650.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2683, pruned_loss=0.04503, ctc_loss=0.08382, over 3854597.70 frames. ], batch size: 45, lr: 9.90e-03, grad_scale: 32.0 +2024-08-27 01:08:50,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=196522.66666666666, ans=0.025 +2024-08-27 01:09:41,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=196629.33333333334, ans=0.0 +2024-08-27 01:09:46,688 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.403e+02 1.640e+02 2.044e+02 3.050e+02, threshold=3.279e+02, percent-clipped=0.0 +2024-08-27 01:10:00,430 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.79 vs. limit=15.0 +2024-08-27 01:10:01,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=196736.0, ans=0.2 +2024-08-27 01:10:10,625 INFO [train.py:1114] (3/4) Epoch 15, batch 2050, loss[loss=0.1951, simple_loss=0.2524, pruned_loss=0.05072, ctc_loss=0.09075, over 19729.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2673, pruned_loss=0.04489, ctc_loss=0.0836, over 3850197.16 frames. ], batch size: 47, lr: 9.89e-03, grad_scale: 32.0 +2024-08-27 01:10:15,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=196789.33333333334, ans=0.125 +2024-08-27 01:10:15,382 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.51 vs. limit=15.0 +2024-08-27 01:10:22,879 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.64 vs. limit=22.5 +2024-08-27 01:10:34,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=196896.0, ans=0.125 +2024-08-27 01:10:43,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=196949.33333333334, ans=0.025 +2024-08-27 01:10:54,825 INFO [train.py:1114] (3/4) Epoch 15, batch 2100, loss[loss=0.1957, simple_loss=0.2736, pruned_loss=0.04247, ctc_loss=0.08195, over 19757.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2665, pruned_loss=0.04429, ctc_loss=0.08251, over 3858054.45 frames. ], batch size: 54, lr: 9.88e-03, grad_scale: 32.0 +2024-08-27 01:11:26,627 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.202e+02 1.442e+02 1.703e+02 2.065e+02 4.080e+02, threshold=3.406e+02, percent-clipped=2.0 +2024-08-27 01:11:28,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197162.66666666666, ans=0.1 +2024-08-27 01:11:31,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=197216.0, ans=0.0 +2024-08-27 01:11:48,551 INFO [train.py:1114] (3/4) Epoch 15, batch 2150, loss[loss=0.1865, simple_loss=0.262, pruned_loss=0.03975, ctc_loss=0.07846, over 19846.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2656, pruned_loss=0.0439, ctc_loss=0.08183, over 3870024.16 frames. ], batch size: 52, lr: 9.88e-03, grad_scale: 32.0 +2024-08-27 01:12:04,863 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.32 vs. limit=22.5 +2024-08-27 01:12:12,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=197429.33333333334, ans=0.125 +2024-08-27 01:12:14,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=197482.66666666666, ans=0.0 +2024-08-27 01:12:15,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=197482.66666666666, ans=0.5 +2024-08-27 01:12:17,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=197482.66666666666, ans=0.125 +2024-08-27 01:12:21,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=197482.66666666666, ans=0.0 +2024-08-27 01:12:27,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=197536.0, ans=0.125 +2024-08-27 01:12:31,729 INFO [train.py:1114] (3/4) Epoch 15, batch 2200, loss[loss=0.1897, simple_loss=0.2694, pruned_loss=0.03942, ctc_loss=0.07788, over 19591.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2655, pruned_loss=0.04373, ctc_loss=0.08164, over 3868630.81 frames. ], batch size: 57, lr: 9.87e-03, grad_scale: 16.0 +2024-08-27 01:12:31,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=197589.33333333334, ans=0.125 +2024-08-27 01:12:34,740 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.52 vs. limit=12.0 +2024-08-27 01:12:50,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=197696.0, ans=0.125 +2024-08-27 01:12:54,928 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.477e+02 1.816e+02 2.262e+02 3.833e+02, threshold=3.631e+02, percent-clipped=4.0 +2024-08-27 01:12:56,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=197696.0, ans=0.125 +2024-08-27 01:13:15,776 INFO [train.py:1114] (3/4) Epoch 15, batch 2250, loss[loss=0.198, simple_loss=0.276, pruned_loss=0.04288, ctc_loss=0.08556, over 19617.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2661, pruned_loss=0.04405, ctc_loss=0.08231, over 3868730.69 frames. ], batch size: 55, lr: 9.87e-03, grad_scale: 16.0 +2024-08-27 01:13:31,438 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.64 vs. limit=15.0 +2024-08-27 01:13:52,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=198069.33333333334, ans=0.125 +2024-08-27 01:13:52,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=198069.33333333334, ans=0.0 +2024-08-27 01:13:58,195 INFO [train.py:1114] (3/4) Epoch 15, batch 2300, loss[loss=0.1782, simple_loss=0.2521, pruned_loss=0.03778, ctc_loss=0.07164, over 19511.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2654, pruned_loss=0.0442, ctc_loss=0.08259, over 3862189.96 frames. ], batch size: 49, lr: 9.86e-03, grad_scale: 16.0 +2024-08-27 01:14:00,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=198122.66666666666, ans=0.125 +2024-08-27 01:14:08,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=198176.0, ans=15.0 +2024-08-27 01:15:02,212 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.441e+02 1.617e+02 1.954e+02 3.129e+02, threshold=3.235e+02, percent-clipped=0.0 +2024-08-27 01:15:03,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198229.33333333334, ans=0.1 +2024-08-27 01:15:03,941 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.72 vs. limit=15.0 +2024-08-27 01:15:21,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=198336.0, ans=0.125 +2024-08-27 01:15:23,090 INFO [train.py:1114] (3/4) Epoch 15, batch 2350, loss[loss=0.2335, simple_loss=0.2973, pruned_loss=0.0623, ctc_loss=0.1128, over 19665.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2651, pruned_loss=0.04416, ctc_loss=0.08249, over 3864387.38 frames. ], batch size: 63, lr: 9.85e-03, grad_scale: 16.0 +2024-08-27 01:15:32,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=198442.66666666666, ans=0.0 +2024-08-27 01:15:33,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=198442.66666666666, ans=0.0 +2024-08-27 01:15:57,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=198549.33333333334, ans=0.125 +2024-08-27 01:15:59,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=198602.66666666666, ans=0.0 +2024-08-27 01:16:05,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=198602.66666666666, ans=0.0 +2024-08-27 01:16:31,883 INFO [train.py:1114] (3/4) Epoch 15, batch 2400, loss[loss=0.228, simple_loss=0.2934, pruned_loss=0.05898, ctc_loss=0.1116, over 19298.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2677, pruned_loss=0.04529, ctc_loss=0.08436, over 3858996.48 frames. ], batch size: 71, lr: 9.85e-03, grad_scale: 32.0 +2024-08-27 01:17:16,326 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.79 vs. limit=15.0 +2024-08-27 01:17:34,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198762.66666666666, ans=0.1 +2024-08-27 01:17:35,480 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.164e+02 1.452e+02 1.605e+02 2.004e+02 3.213e+02, threshold=3.211e+02, percent-clipped=0.0 +2024-08-27 01:17:37,973 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.53 vs. limit=22.5 +2024-08-27 01:17:48,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=198869.33333333334, ans=0.125 +2024-08-27 01:17:54,610 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.30 vs. limit=12.0 +2024-08-27 01:17:57,659 INFO [train.py:1114] (3/4) Epoch 15, batch 2450, loss[loss=0.2575, simple_loss=0.3009, pruned_loss=0.07803, ctc_loss=0.145, over 13070.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2715, pruned_loss=0.04791, ctc_loss=0.08948, over 3732883.61 frames. ], batch size: 140, lr: 9.84e-03, grad_scale: 32.0 +2024-08-27 01:18:08,515 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=11.10 vs. limit=12.0 +2024-08-27 01:18:32,055 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:20:20,969 INFO [train.py:1114] (3/4) Epoch 16, batch 0, loss[loss=0.1693, simple_loss=0.2389, pruned_loss=0.03671, ctc_loss=0.06582, over 19393.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2389, pruned_loss=0.03671, ctc_loss=0.06582, over 19393.00 frames. ], batch size: 48, lr: 9.52e-03, grad_scale: 32.0 +2024-08-27 01:20:20,969 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-27 01:21:17,375 INFO [train.py:1146] (3/4) Epoch 16, validation: loss=0.1744, simple_loss=0.2673, pruned_loss=0.03034, ctc_loss=0.05204, over 944034.00 frames. +2024-08-27 01:21:17,376 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12941MB +2024-08-27 01:21:17,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=199130.66666666666, ans=0.0 +2024-08-27 01:21:22,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=199130.66666666666, ans=0.125 +2024-08-27 01:21:27,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=199184.0, ans=0.0 +2024-08-27 01:21:31,948 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.80 vs. limit=15.0 +2024-08-27 01:21:54,895 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.674e+02 1.811e+02 2.106e+02 3.737e+02, threshold=3.622e+02, percent-clipped=2.0 +2024-08-27 01:22:02,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=199344.0, ans=0.125 +2024-08-27 01:22:07,231 INFO [train.py:1114] (3/4) Epoch 16, batch 50, loss[loss=0.1706, simple_loss=0.2413, pruned_loss=0.03591, ctc_loss=0.07029, over 19689.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2683, pruned_loss=0.0452, ctc_loss=0.08531, over 843817.09 frames. ], batch size: 47, lr: 9.51e-03, grad_scale: 32.0 +2024-08-27 01:22:07,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=199397.33333333334, ans=0.0 +2024-08-27 01:22:09,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199397.33333333334, ans=0.1 +2024-08-27 01:22:31,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=199504.0, ans=0.0 +2024-08-27 01:22:43,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199610.66666666666, ans=0.1 +2024-08-27 01:22:44,040 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.21 vs. limit=6.0 +2024-08-27 01:22:53,629 INFO [train.py:1114] (3/4) Epoch 16, batch 100, loss[loss=0.1795, simple_loss=0.2557, pruned_loss=0.03756, ctc_loss=0.07045, over 19715.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2698, pruned_loss=0.04503, ctc_loss=0.08501, over 1497155.71 frames. ], batch size: 51, lr: 9.51e-03, grad_scale: 32.0 +2024-08-27 01:23:13,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=199717.33333333334, ans=0.125 +2024-08-27 01:23:19,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=199770.66666666666, ans=0.125 +2024-08-27 01:23:22,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=199770.66666666666, ans=0.125 +2024-08-27 01:23:33,423 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.434e+02 1.536e+02 1.885e+02 3.287e+02, threshold=3.072e+02, percent-clipped=0.0 +2024-08-27 01:23:42,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=199877.33333333334, ans=0.0 +2024-08-27 01:23:45,321 INFO [train.py:1114] (3/4) Epoch 16, batch 150, loss[loss=0.1743, simple_loss=0.2354, pruned_loss=0.04151, ctc_loss=0.07543, over 19693.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2662, pruned_loss=0.04392, ctc_loss=0.08227, over 2025780.12 frames. ], batch size: 47, lr: 9.50e-03, grad_scale: 32.0 +2024-08-27 01:23:47,670 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=5.63 vs. limit=15.0 +2024-08-27 01:23:53,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=199984.0, ans=0.125 +2024-08-27 01:23:57,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=199984.0, ans=0.125 +2024-08-27 01:24:10,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=200037.33333333334, ans=0.125 +2024-08-27 01:24:23,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=200090.66666666666, ans=0.125 +2024-08-27 01:24:29,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=200144.0, ans=0.125 +2024-08-27 01:24:35,702 INFO [train.py:1114] (3/4) Epoch 16, batch 200, loss[loss=0.2095, simple_loss=0.2837, pruned_loss=0.04895, ctc_loss=0.09344, over 18381.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2654, pruned_loss=0.04348, ctc_loss=0.08136, over 2433521.47 frames. ], batch size: 85, lr: 9.49e-03, grad_scale: 32.0 +2024-08-27 01:24:38,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200197.33333333334, ans=0.1 +2024-08-27 01:25:08,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=200357.33333333334, ans=0.0 +2024-08-27 01:25:14,238 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.526e+02 1.826e+02 2.235e+02 3.925e+02, threshold=3.652e+02, percent-clipped=6.0 +2024-08-27 01:25:16,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=200410.66666666666, ans=0.125 +2024-08-27 01:25:46,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=200410.66666666666, ans=0.0 +2024-08-27 01:25:52,464 INFO [train.py:1114] (3/4) Epoch 16, batch 250, loss[loss=0.2068, simple_loss=0.2836, pruned_loss=0.04708, ctc_loss=0.08956, over 19394.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2657, pruned_loss=0.04337, ctc_loss=0.08116, over 2754594.03 frames. ], batch size: 67, lr: 9.49e-03, grad_scale: 32.0 +2024-08-27 01:26:00,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=200464.0, ans=0.035 +2024-08-27 01:26:01,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=200464.0, ans=0.025 +2024-08-27 01:26:04,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=200464.0, ans=0.125 +2024-08-27 01:26:04,210 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.75 vs. limit=10.0 +2024-08-27 01:26:04,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=200464.0, ans=0.125 +2024-08-27 01:26:09,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=200517.33333333334, ans=0.0 +2024-08-27 01:26:15,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=200570.66666666666, ans=0.125 +2024-08-27 01:26:23,956 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.75 vs. limit=12.0 +2024-08-27 01:26:33,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=200624.0, ans=0.2 +2024-08-27 01:26:37,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=200677.33333333334, ans=0.05 +2024-08-27 01:26:44,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=200677.33333333334, ans=0.125 +2024-08-27 01:26:46,534 INFO [train.py:1114] (3/4) Epoch 16, batch 300, loss[loss=0.2126, simple_loss=0.2823, pruned_loss=0.0523, ctc_loss=0.09593, over 19561.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2652, pruned_loss=0.04329, ctc_loss=0.08101, over 3000245.92 frames. ], batch size: 61, lr: 9.48e-03, grad_scale: 32.0 +2024-08-27 01:26:55,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=200730.66666666666, ans=0.0 +2024-08-27 01:27:21,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=200890.66666666666, ans=0.125 +2024-08-27 01:27:22,579 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.450e+02 1.677e+02 2.025e+02 3.129e+02, threshold=3.354e+02, percent-clipped=0.0 +2024-08-27 01:27:35,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=200944.0, ans=0.125 +2024-08-27 01:27:36,103 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.93 vs. limit=15.0 +2024-08-27 01:27:36,615 INFO [train.py:1114] (3/4) Epoch 16, batch 350, loss[loss=0.1626, simple_loss=0.2382, pruned_loss=0.03153, ctc_loss=0.05986, over 19765.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2659, pruned_loss=0.04341, ctc_loss=0.08125, over 3190397.90 frames. ], batch size: 48, lr: 9.48e-03, grad_scale: 32.0 +2024-08-27 01:28:09,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=201157.33333333334, ans=0.0 +2024-08-27 01:28:24,283 INFO [train.py:1114] (3/4) Epoch 16, batch 400, loss[loss=0.1881, simple_loss=0.2674, pruned_loss=0.03932, ctc_loss=0.07551, over 19500.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2652, pruned_loss=0.04345, ctc_loss=0.08125, over 3343357.30 frames. ], batch size: 54, lr: 9.47e-03, grad_scale: 32.0 +2024-08-27 01:28:44,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=201370.66666666666, ans=0.5 +2024-08-27 01:28:53,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=201424.0, ans=0.1 +2024-08-27 01:28:58,538 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.170e+02 1.444e+02 1.663e+02 2.108e+02 3.293e+02, threshold=3.326e+02, percent-clipped=0.0 +2024-08-27 01:29:00,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=201477.33333333334, ans=0.0 +2024-08-27 01:29:03,821 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.66 vs. limit=15.0 +2024-08-27 01:29:09,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=201477.33333333334, ans=0.1 +2024-08-27 01:29:10,810 INFO [train.py:1114] (3/4) Epoch 16, batch 450, loss[loss=0.1821, simple_loss=0.267, pruned_loss=0.03476, ctc_loss=0.06901, over 19621.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2654, pruned_loss=0.04344, ctc_loss=0.08129, over 3450908.43 frames. ], batch size: 55, lr: 9.46e-03, grad_scale: 32.0 +2024-08-27 01:29:50,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=201690.66666666666, ans=0.2 +2024-08-27 01:30:01,649 INFO [train.py:1114] (3/4) Epoch 16, batch 500, loss[loss=0.1948, simple_loss=0.2811, pruned_loss=0.03936, ctc_loss=0.07451, over 19654.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2649, pruned_loss=0.0433, ctc_loss=0.0809, over 3546959.70 frames. ], batch size: 63, lr: 9.46e-03, grad_scale: 32.0 +2024-08-27 01:30:05,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=201797.33333333334, ans=0.125 +2024-08-27 01:30:28,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=201904.0, ans=0.125 +2024-08-27 01:30:39,480 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.484e+02 1.746e+02 2.096e+02 4.072e+02, threshold=3.492e+02, percent-clipped=1.0 +2024-08-27 01:30:41,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=202010.66666666666, ans=0.1 +2024-08-27 01:30:51,401 INFO [train.py:1114] (3/4) Epoch 16, batch 550, loss[loss=0.1977, simple_loss=0.274, pruned_loss=0.04441, ctc_loss=0.08165, over 19186.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2646, pruned_loss=0.04327, ctc_loss=0.08092, over 3608772.35 frames. ], batch size: 71, lr: 9.45e-03, grad_scale: 32.0 +2024-08-27 01:30:52,936 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.96 vs. limit=10.0 +2024-08-27 01:30:56,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=202064.0, ans=0.1 +2024-08-27 01:31:03,917 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.10 vs. limit=15.0 +2024-08-27 01:31:05,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=202117.33333333334, ans=0.125 +2024-08-27 01:31:08,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=202117.33333333334, ans=0.125 +2024-08-27 01:31:12,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202170.66666666666, ans=0.1 +2024-08-27 01:31:18,636 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.07 vs. limit=15.0 +2024-08-27 01:31:20,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=202224.0, ans=0.0 +2024-08-27 01:31:29,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=202277.33333333334, ans=0.0 +2024-08-27 01:31:37,733 INFO [train.py:1114] (3/4) Epoch 16, batch 600, loss[loss=0.2037, simple_loss=0.2763, pruned_loss=0.04738, ctc_loss=0.091, over 19353.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2647, pruned_loss=0.0433, ctc_loss=0.08091, over 3666381.67 frames. ], batch size: 67, lr: 9.45e-03, grad_scale: 32.0 +2024-08-27 01:31:47,452 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.52 vs. limit=15.0 +2024-08-27 01:32:09,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=202490.66666666666, ans=0.125 +2024-08-27 01:32:14,250 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.474e+02 1.879e+02 2.462e+02 5.922e+02, threshold=3.759e+02, percent-clipped=13.0 +2024-08-27 01:32:19,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=202544.0, ans=0.125 +2024-08-27 01:32:21,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=202544.0, ans=0.0 +2024-08-27 01:32:26,174 INFO [train.py:1114] (3/4) Epoch 16, batch 650, loss[loss=0.1756, simple_loss=0.255, pruned_loss=0.03429, ctc_loss=0.06885, over 19773.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2638, pruned_loss=0.04272, ctc_loss=0.0798, over 3716583.34 frames. ], batch size: 54, lr: 9.44e-03, grad_scale: 32.0 +2024-08-27 01:32:30,573 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.77 vs. limit=5.0 +2024-08-27 01:32:44,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=202650.66666666666, ans=0.125 +2024-08-27 01:32:54,528 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.37 vs. limit=6.0 +2024-08-27 01:33:07,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=202810.66666666666, ans=0.0 +2024-08-27 01:33:18,143 INFO [train.py:1114] (3/4) Epoch 16, batch 700, loss[loss=0.1721, simple_loss=0.2517, pruned_loss=0.03352, ctc_loss=0.06381, over 19721.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2642, pruned_loss=0.04292, ctc_loss=0.08022, over 3748262.13 frames. ], batch size: 51, lr: 9.43e-03, grad_scale: 32.0 +2024-08-27 01:33:22,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=202864.0, ans=0.125 +2024-08-27 01:33:32,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=202917.33333333334, ans=0.07 +2024-08-27 01:33:52,578 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.223e+02 1.460e+02 1.707e+02 2.152e+02 4.812e+02, threshold=3.413e+02, percent-clipped=3.0 +2024-08-27 01:33:52,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=203024.0, ans=0.125 +2024-08-27 01:33:55,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=203077.33333333334, ans=0.125 +2024-08-27 01:34:01,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=203077.33333333334, ans=15.0 +2024-08-27 01:34:04,716 INFO [train.py:1114] (3/4) Epoch 16, batch 750, loss[loss=0.1928, simple_loss=0.2694, pruned_loss=0.04205, ctc_loss=0.0802, over 19488.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2637, pruned_loss=0.04267, ctc_loss=0.07971, over 3773674.92 frames. ], batch size: 54, lr: 9.43e-03, grad_scale: 32.0 +2024-08-27 01:34:09,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=203130.66666666666, ans=0.125 +2024-08-27 01:34:09,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=203130.66666666666, ans=0.0 +2024-08-27 01:34:23,692 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.28 vs. limit=22.5 +2024-08-27 01:34:24,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=203237.33333333334, ans=0.05 +2024-08-27 01:34:29,635 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.19 vs. limit=10.0 +2024-08-27 01:34:33,380 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.76 vs. limit=15.0 +2024-08-27 01:34:36,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=203290.66666666666, ans=0.125 +2024-08-27 01:34:46,366 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.14 vs. limit=15.0 +2024-08-27 01:34:52,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=203344.0, ans=0.125 +2024-08-27 01:34:57,338 INFO [train.py:1114] (3/4) Epoch 16, batch 800, loss[loss=0.1875, simple_loss=0.26, pruned_loss=0.04163, ctc_loss=0.0792, over 19796.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2634, pruned_loss=0.04278, ctc_loss=0.07983, over 3795955.34 frames. ], batch size: 49, lr: 9.42e-03, grad_scale: 32.0 +2024-08-27 01:35:13,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=203397.33333333334, ans=0.125 +2024-08-27 01:35:14,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=203397.33333333334, ans=0.125 +2024-08-27 01:35:23,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=203450.66666666666, ans=0.025 +2024-08-27 01:35:29,569 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:35:35,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=203504.0, ans=0.125 +2024-08-27 01:35:49,645 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.508e+02 1.846e+02 2.334e+02 3.502e+02, threshold=3.692e+02, percent-clipped=1.0 +2024-08-27 01:36:01,631 INFO [train.py:1114] (3/4) Epoch 16, batch 850, loss[loss=0.1896, simple_loss=0.265, pruned_loss=0.0418, ctc_loss=0.07653, over 19678.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2637, pruned_loss=0.04289, ctc_loss=0.08018, over 3815245.16 frames. ], batch size: 59, lr: 9.42e-03, grad_scale: 32.0 +2024-08-27 01:36:15,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=203717.33333333334, ans=0.2 +2024-08-27 01:36:17,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=203717.33333333334, ans=0.125 +2024-08-27 01:36:51,742 INFO [train.py:1114] (3/4) Epoch 16, batch 900, loss[loss=0.174, simple_loss=0.243, pruned_loss=0.03873, ctc_loss=0.06869, over 19440.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2641, pruned_loss=0.04327, ctc_loss=0.08074, over 3818838.11 frames. ], batch size: 48, lr: 9.41e-03, grad_scale: 32.0 +2024-08-27 01:36:55,007 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.35 vs. limit=15.0 +2024-08-27 01:37:26,146 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.398e+02 1.563e+02 1.898e+02 3.698e+02, threshold=3.126e+02, percent-clipped=1.0 +2024-08-27 01:37:29,334 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.09 vs. limit=15.0 +2024-08-27 01:37:34,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=204144.0, ans=0.0 +2024-08-27 01:37:34,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=204144.0, ans=0.0 +2024-08-27 01:37:35,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=204144.0, ans=0.025 +2024-08-27 01:37:35,653 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.96 vs. limit=22.5 +2024-08-27 01:37:38,109 INFO [train.py:1114] (3/4) Epoch 16, batch 950, loss[loss=0.1765, simple_loss=0.2487, pruned_loss=0.03817, ctc_loss=0.07008, over 19475.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2647, pruned_loss=0.04354, ctc_loss=0.08135, over 3819271.02 frames. ], batch size: 49, lr: 9.40e-03, grad_scale: 32.0 +2024-08-27 01:37:42,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=204197.33333333334, ans=0.0 +2024-08-27 01:37:43,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=204197.33333333334, ans=0.1 +2024-08-27 01:37:46,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=204250.66666666666, ans=0.125 +2024-08-27 01:37:56,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=204250.66666666666, ans=0.125 +2024-08-27 01:38:00,798 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.54 vs. limit=12.0 +2024-08-27 01:38:07,424 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.16 vs. limit=15.0 +2024-08-27 01:38:12,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=204357.33333333334, ans=10.0 +2024-08-27 01:38:13,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=204357.33333333334, ans=0.0 +2024-08-27 01:38:29,230 INFO [train.py:1114] (3/4) Epoch 16, batch 1000, loss[loss=0.1793, simple_loss=0.2507, pruned_loss=0.03888, ctc_loss=0.07536, over 19848.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2657, pruned_loss=0.04386, ctc_loss=0.08199, over 3815685.28 frames. ], batch size: 52, lr: 9.40e-03, grad_scale: 32.0 +2024-08-27 01:38:33,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=204464.0, ans=0.025 +2024-08-27 01:38:44,743 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.51 vs. limit=15.0 +2024-08-27 01:38:46,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=204517.33333333334, ans=0.125 +2024-08-27 01:38:47,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=204570.66666666666, ans=0.2 +2024-08-27 01:38:52,240 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.04 vs. limit=6.0 +2024-08-27 01:38:54,080 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.66 vs. limit=15.0 +2024-08-27 01:39:07,628 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.409e+02 1.616e+02 2.034e+02 3.159e+02, threshold=3.231e+02, percent-clipped=1.0 +2024-08-27 01:39:09,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=204677.33333333334, ans=0.0 +2024-08-27 01:39:13,092 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.63 vs. limit=6.0 +2024-08-27 01:39:19,877 INFO [train.py:1114] (3/4) Epoch 16, batch 1050, loss[loss=0.2058, simple_loss=0.2865, pruned_loss=0.04533, ctc_loss=0.0858, over 19848.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2651, pruned_loss=0.04367, ctc_loss=0.08158, over 3822643.35 frames. ], batch size: 57, lr: 9.39e-03, grad_scale: 32.0 +2024-08-27 01:39:20,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=204730.66666666666, ans=0.125 +2024-08-27 01:39:27,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=204730.66666666666, ans=0.0 +2024-08-27 01:39:27,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=204730.66666666666, ans=0.125 +2024-08-27 01:39:28,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=204784.0, ans=0.0 +2024-08-27 01:39:30,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=204784.0, ans=0.0 +2024-08-27 01:39:37,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=204784.0, ans=0.0 +2024-08-27 01:39:43,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=204837.33333333334, ans=0.2 +2024-08-27 01:39:52,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=204890.66666666666, ans=10.0 +2024-08-27 01:40:01,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=204944.0, ans=0.2 +2024-08-27 01:40:07,057 INFO [train.py:1114] (3/4) Epoch 16, batch 1100, loss[loss=0.1846, simple_loss=0.2655, pruned_loss=0.03766, ctc_loss=0.07083, over 19573.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2651, pruned_loss=0.0436, ctc_loss=0.08138, over 3830346.76 frames. ], batch size: 52, lr: 9.39e-03, grad_scale: 32.0 +2024-08-27 01:40:23,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=205050.66666666666, ans=0.0 +2024-08-27 01:40:25,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=205104.0, ans=0.1 +2024-08-27 01:40:43,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=205157.33333333334, ans=0.04949747468305833 +2024-08-27 01:40:44,427 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.474e+02 1.664e+02 2.002e+02 3.685e+02, threshold=3.328e+02, percent-clipped=2.0 +2024-08-27 01:40:46,768 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:40:57,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=205210.66666666666, ans=0.0 +2024-08-27 01:40:58,943 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:40:59,559 INFO [train.py:1114] (3/4) Epoch 16, batch 1150, loss[loss=0.1718, simple_loss=0.2498, pruned_loss=0.03367, ctc_loss=0.06637, over 19595.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.265, pruned_loss=0.04369, ctc_loss=0.08164, over 3828951.50 frames. ], batch size: 52, lr: 9.38e-03, grad_scale: 32.0 +2024-08-27 01:42:56,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=205264.0, ans=0.2 +2024-08-27 01:42:59,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.74 vs. limit=22.5 +2024-08-27 01:43:05,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=205317.33333333334, ans=0.125 +2024-08-27 01:43:14,045 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.76 vs. limit=15.0 +2024-08-27 01:43:14,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=205370.66666666666, ans=0.0 +2024-08-27 01:43:16,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=205370.66666666666, ans=0.1 +2024-08-27 01:43:19,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=205424.0, ans=0.0 +2024-08-27 01:43:21,326 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:43:40,069 INFO [train.py:1114] (3/4) Epoch 16, batch 1200, loss[loss=0.2041, simple_loss=0.281, pruned_loss=0.04573, ctc_loss=0.08928, over 19837.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2661, pruned_loss=0.04395, ctc_loss=0.0821, over 3825450.87 frames. ], batch size: 57, lr: 9.38e-03, grad_scale: 32.0 +2024-08-27 01:43:50,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=205584.0, ans=0.0 +2024-08-27 01:43:55,121 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:44:02,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=205637.33333333334, ans=0.0 +2024-08-27 01:44:09,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=205690.66666666666, ans=0.0 +2024-08-27 01:44:10,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=205690.66666666666, ans=0.025 +2024-08-27 01:44:16,057 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.206e+02 1.520e+02 1.803e+02 2.158e+02 3.897e+02, threshold=3.606e+02, percent-clipped=2.0 +2024-08-27 01:44:28,188 INFO [train.py:1114] (3/4) Epoch 16, batch 1250, loss[loss=0.2117, simple_loss=0.2834, pruned_loss=0.05192, ctc_loss=0.09058, over 19533.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2661, pruned_loss=0.04366, ctc_loss=0.08155, over 3843629.75 frames. ], batch size: 61, lr: 9.37e-03, grad_scale: 32.0 +2024-08-27 01:44:38,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=205850.66666666666, ans=0.125 +2024-08-27 01:44:55,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=205957.33333333334, ans=0.025 +2024-08-27 01:45:04,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.62 vs. limit=15.0 +2024-08-27 01:45:08,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=206010.66666666666, ans=0.0 +2024-08-27 01:45:13,426 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:45:16,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=206010.66666666666, ans=0.125 +2024-08-27 01:45:17,711 INFO [train.py:1114] (3/4) Epoch 16, batch 1300, loss[loss=0.1996, simple_loss=0.2726, pruned_loss=0.04622, ctc_loss=0.08515, over 18726.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.265, pruned_loss=0.04321, ctc_loss=0.0807, over 3847465.37 frames. ], batch size: 76, lr: 9.36e-03, grad_scale: 32.0 +2024-08-27 01:45:18,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=206064.0, ans=0.125 +2024-08-27 01:45:19,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=206064.0, ans=0.025 +2024-08-27 01:45:26,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=206117.33333333334, ans=0.125 +2024-08-27 01:45:52,819 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.516e+02 1.773e+02 2.282e+02 3.618e+02, threshold=3.546e+02, percent-clipped=1.0 +2024-08-27 01:45:56,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=206277.33333333334, ans=0.0 +2024-08-27 01:45:58,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=206277.33333333334, ans=0.0 +2024-08-27 01:46:06,822 INFO [train.py:1114] (3/4) Epoch 16, batch 1350, loss[loss=0.1904, simple_loss=0.2699, pruned_loss=0.04082, ctc_loss=0.07332, over 19771.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2647, pruned_loss=0.04291, ctc_loss=0.08014, over 3856232.56 frames. ], batch size: 54, lr: 9.36e-03, grad_scale: 32.0 +2024-08-27 01:46:12,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=206330.66666666666, ans=0.2 +2024-08-27 01:46:20,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=206384.0, ans=0.0 +2024-08-27 01:46:38,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206490.66666666666, ans=0.1 +2024-08-27 01:46:51,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=206544.0, ans=0.0 +2024-08-27 01:46:53,476 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.50 vs. limit=10.0 +2024-08-27 01:46:56,700 INFO [train.py:1114] (3/4) Epoch 16, batch 1400, loss[loss=0.1664, simple_loss=0.2351, pruned_loss=0.03417, ctc_loss=0.07334, over 19667.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2647, pruned_loss=0.04303, ctc_loss=0.08037, over 3863166.97 frames. ], batch size: 46, lr: 9.35e-03, grad_scale: 32.0 +2024-08-27 01:47:05,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=206650.66666666666, ans=0.0 +2024-08-27 01:47:10,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=206650.66666666666, ans=0.0 +2024-08-27 01:47:11,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=206650.66666666666, ans=0.125 +2024-08-27 01:47:27,656 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.32 vs. limit=12.0 +2024-08-27 01:48:25,292 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.410e+02 1.569e+02 1.892e+02 4.037e+02, threshold=3.138e+02, percent-clipped=1.0 +2024-08-27 01:48:26,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=206757.33333333334, ans=0.125 +2024-08-27 01:48:37,424 INFO [train.py:1114] (3/4) Epoch 16, batch 1450, loss[loss=0.1933, simple_loss=0.2688, pruned_loss=0.04282, ctc_loss=0.08062, over 19669.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2651, pruned_loss=0.04325, ctc_loss=0.08064, over 3862211.86 frames. ], batch size: 63, lr: 9.35e-03, grad_scale: 32.0 +2024-08-27 01:48:56,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=206917.33333333334, ans=0.05 +2024-08-27 01:49:04,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=206970.66666666666, ans=0.125 +2024-08-27 01:49:07,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=207024.0, ans=0.025 +2024-08-27 01:49:18,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=207077.33333333334, ans=0.0 +2024-08-27 01:49:25,855 INFO [train.py:1114] (3/4) Epoch 16, batch 1500, loss[loss=0.206, simple_loss=0.2804, pruned_loss=0.04781, ctc_loss=0.09012, over 19576.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2654, pruned_loss=0.04325, ctc_loss=0.08081, over 3862602.86 frames. ], batch size: 57, lr: 9.34e-03, grad_scale: 32.0 +2024-08-27 01:49:45,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=207237.33333333334, ans=0.2 +2024-08-27 01:49:50,577 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.48 vs. limit=6.0 +2024-08-27 01:50:03,758 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.422e+02 1.666e+02 2.042e+02 4.208e+02, threshold=3.332e+02, percent-clipped=3.0 +2024-08-27 01:50:19,006 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.55 vs. limit=6.0 +2024-08-27 01:50:22,135 INFO [train.py:1114] (3/4) Epoch 16, batch 1550, loss[loss=0.2198, simple_loss=0.2855, pruned_loss=0.05691, ctc_loss=0.1008, over 19608.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2655, pruned_loss=0.0437, ctc_loss=0.0817, over 3847341.01 frames. ], batch size: 60, lr: 9.33e-03, grad_scale: 32.0 +2024-08-27 01:51:05,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=207610.66666666666, ans=0.1 +2024-08-27 01:51:10,025 INFO [train.py:1114] (3/4) Epoch 16, batch 1600, loss[loss=0.1928, simple_loss=0.2694, pruned_loss=0.04244, ctc_loss=0.07818, over 19828.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2651, pruned_loss=0.04359, ctc_loss=0.08133, over 3837493.35 frames. ], batch size: 57, lr: 9.33e-03, grad_scale: 32.0 +2024-08-27 01:51:23,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=207717.33333333334, ans=0.125 +2024-08-27 01:51:29,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=207770.66666666666, ans=0.2 +2024-08-27 01:51:32,033 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:51:54,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=207824.0, ans=0.07 +2024-08-27 01:51:54,519 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.57 vs. limit=15.0 +2024-08-27 01:51:55,656 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.401e+02 1.606e+02 1.975e+02 3.175e+02, threshold=3.213e+02, percent-clipped=0.0 +2024-08-27 01:52:06,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.93 vs. limit=15.0 +2024-08-27 01:52:14,370 INFO [train.py:1114] (3/4) Epoch 16, batch 1650, loss[loss=0.2003, simple_loss=0.2731, pruned_loss=0.04645, ctc_loss=0.08661, over 19646.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2648, pruned_loss=0.04333, ctc_loss=0.08099, over 3834750.07 frames. ], batch size: 59, lr: 9.32e-03, grad_scale: 32.0 +2024-08-27 01:52:43,639 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.51 vs. limit=15.0 +2024-08-27 01:52:47,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=208090.66666666666, ans=0.1 +2024-08-27 01:52:48,220 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.21 vs. limit=6.0 +2024-08-27 01:52:49,077 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.50 vs. limit=6.0 +2024-08-27 01:52:52,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=208144.0, ans=0.025 +2024-08-27 01:53:04,345 INFO [train.py:1114] (3/4) Epoch 16, batch 1700, loss[loss=0.1675, simple_loss=0.2383, pruned_loss=0.03515, ctc_loss=0.06575, over 19643.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2642, pruned_loss=0.04289, ctc_loss=0.08027, over 3848173.81 frames. ], batch size: 46, lr: 9.32e-03, grad_scale: 64.0 +2024-08-27 01:53:06,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=208197.33333333334, ans=0.0 +2024-08-27 01:53:10,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=208197.33333333334, ans=0.125 +2024-08-27 01:53:12,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=208197.33333333334, ans=0.0 +2024-08-27 01:53:16,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=208250.66666666666, ans=0.2 +2024-08-27 01:53:42,384 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.216e+02 1.468e+02 1.742e+02 2.214e+02 3.607e+02, threshold=3.484e+02, percent-clipped=2.0 +2024-08-27 01:53:43,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=208410.66666666666, ans=0.0 +2024-08-27 01:53:43,695 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.11 vs. limit=22.5 +2024-08-27 01:53:53,061 INFO [train.py:1114] (3/4) Epoch 16, batch 1750, loss[loss=0.1673, simple_loss=0.2353, pruned_loss=0.03639, ctc_loss=0.06638, over 19636.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2642, pruned_loss=0.04291, ctc_loss=0.08043, over 3852413.95 frames. ], batch size: 45, lr: 9.31e-03, grad_scale: 32.0 +2024-08-27 01:53:54,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=208464.0, ans=0.125 +2024-08-27 01:53:55,345 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.48 vs. limit=15.0 +2024-08-27 01:54:00,557 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.15 vs. limit=15.0 +2024-08-27 01:54:03,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=208517.33333333334, ans=0.125 +2024-08-27 01:54:06,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=208517.33333333334, ans=0.0 +2024-08-27 01:54:18,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=208570.66666666666, ans=0.125 +2024-08-27 01:54:19,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=208624.0, ans=0.09899494936611666 +2024-08-27 01:54:28,188 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.16 vs. limit=15.0 +2024-08-27 01:54:37,052 INFO [train.py:1114] (3/4) Epoch 16, batch 1800, loss[loss=0.1788, simple_loss=0.2611, pruned_loss=0.03531, ctc_loss=0.06459, over 19627.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2645, pruned_loss=0.04287, ctc_loss=0.08039, over 3853919.08 frames. ], batch size: 55, lr: 9.31e-03, grad_scale: 32.0 +2024-08-27 01:54:55,026 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.43 vs. limit=10.0 +2024-08-27 01:55:10,169 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.155e+02 1.563e+02 1.995e+02 2.578e+02 4.186e+02, threshold=3.991e+02, percent-clipped=7.0 +2024-08-27 01:55:10,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=208890.66666666666, ans=0.125 +2024-08-27 01:55:10,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=208890.66666666666, ans=0.05 +2024-08-27 01:55:14,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=208944.0, ans=0.125 +2024-08-27 01:55:19,513 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.18 vs. limit=15.0 +2024-08-27 01:55:20,655 INFO [train.py:1114] (3/4) Epoch 16, batch 1850, loss[loss=0.1965, simple_loss=0.2795, pruned_loss=0.04084, ctc_loss=0.07971, over 19584.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2646, pruned_loss=0.04301, ctc_loss=0.08057, over 3855947.60 frames. ], batch size: 57, lr: 9.30e-03, grad_scale: 32.0 +2024-08-27 01:55:34,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=209050.66666666666, ans=0.125 +2024-08-27 01:55:38,910 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.17 vs. limit=15.0 +2024-08-27 01:55:38,964 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.83 vs. limit=10.0 +2024-08-27 01:55:39,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=209104.0, ans=0.125 +2024-08-27 01:55:48,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=209157.33333333334, ans=0.0 +2024-08-27 01:55:57,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=209210.66666666666, ans=0.0 +2024-08-27 01:56:00,750 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.49 vs. limit=6.0 +2024-08-27 01:56:04,484 INFO [train.py:1114] (3/4) Epoch 16, batch 1900, loss[loss=0.1901, simple_loss=0.2656, pruned_loss=0.04143, ctc_loss=0.07927, over 19658.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2647, pruned_loss=0.04317, ctc_loss=0.08079, over 3860573.92 frames. ], batch size: 59, lr: 9.29e-03, grad_scale: 32.0 +2024-08-27 01:56:05,904 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.76 vs. limit=15.0 +2024-08-27 01:56:09,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=209264.0, ans=0.07 +2024-08-27 01:56:14,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=209317.33333333334, ans=0.125 +2024-08-27 01:56:37,682 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.165e+02 1.418e+02 1.626e+02 2.079e+02 4.675e+02, threshold=3.252e+02, percent-clipped=2.0 +2024-08-27 01:56:48,354 INFO [train.py:1114] (3/4) Epoch 16, batch 1950, loss[loss=0.1862, simple_loss=0.2614, pruned_loss=0.04139, ctc_loss=0.07054, over 19597.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2654, pruned_loss=0.04325, ctc_loss=0.08079, over 3869340.09 frames. ], batch size: 52, lr: 9.29e-03, grad_scale: 32.0 +2024-08-27 01:56:53,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=209530.66666666666, ans=0.125 +2024-08-27 01:56:59,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=209584.0, ans=0.0 +2024-08-27 01:57:03,747 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:57:14,201 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 01:57:15,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=209690.66666666666, ans=0.125 +2024-08-27 01:57:20,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=209690.66666666666, ans=0.05 +2024-08-27 01:57:23,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209690.66666666666, ans=0.1 +2024-08-27 01:57:27,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=209744.0, ans=0.0 +2024-08-27 01:57:30,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=209744.0, ans=0.0 +2024-08-27 01:57:34,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=209744.0, ans=0.2 +2024-08-27 01:57:35,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=209797.33333333334, ans=0.125 +2024-08-27 01:57:35,884 INFO [train.py:1114] (3/4) Epoch 16, batch 2000, loss[loss=0.18, simple_loss=0.2462, pruned_loss=0.04176, ctc_loss=0.07575, over 19634.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2658, pruned_loss=0.04349, ctc_loss=0.08142, over 3853325.83 frames. ], batch size: 45, lr: 9.28e-03, grad_scale: 32.0 +2024-08-27 01:57:43,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=209797.33333333334, ans=0.125 +2024-08-27 01:57:48,029 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.27 vs. limit=22.5 +2024-08-27 01:58:07,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=209957.33333333334, ans=0.125 +2024-08-27 01:58:09,434 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.401e+02 1.655e+02 2.254e+02 4.011e+02, threshold=3.310e+02, percent-clipped=6.0 +2024-08-27 01:58:20,023 INFO [train.py:1114] (3/4) Epoch 16, batch 2050, loss[loss=0.1725, simple_loss=0.2408, pruned_loss=0.03746, ctc_loss=0.07306, over 19677.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2648, pruned_loss=0.04333, ctc_loss=0.08115, over 3850064.81 frames. ], batch size: 47, lr: 9.28e-03, grad_scale: 32.0 +2024-08-27 01:58:25,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=210064.0, ans=0.125 +2024-08-27 01:58:31,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=210117.33333333334, ans=0.0 +2024-08-27 01:58:34,251 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.32 vs. limit=6.0 +2024-08-27 01:58:37,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.18 vs. limit=22.5 +2024-08-27 01:58:47,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210224.0, ans=0.1 +2024-08-27 01:59:03,135 INFO [train.py:1114] (3/4) Epoch 16, batch 2100, loss[loss=0.1874, simple_loss=0.2593, pruned_loss=0.04212, ctc_loss=0.0781, over 19758.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.264, pruned_loss=0.04285, ctc_loss=0.08039, over 3858504.80 frames. ], batch size: 54, lr: 9.27e-03, grad_scale: 32.0 +2024-08-27 01:59:18,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=210384.0, ans=0.0 +2024-08-27 01:59:23,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=210437.33333333334, ans=0.025 +2024-08-27 01:59:30,229 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.80 vs. limit=15.0 +2024-08-27 01:59:35,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=210490.66666666666, ans=0.125 +2024-08-27 01:59:35,717 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.547e+02 1.892e+02 2.472e+02 4.594e+02, threshold=3.784e+02, percent-clipped=3.0 +2024-08-27 01:59:47,044 INFO [train.py:1114] (3/4) Epoch 16, batch 2150, loss[loss=0.163, simple_loss=0.2334, pruned_loss=0.03378, ctc_loss=0.06282, over 19857.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2637, pruned_loss=0.04288, ctc_loss=0.08028, over 3870464.23 frames. ], batch size: 52, lr: 9.27e-03, grad_scale: 32.0 +2024-08-27 01:59:55,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=210650.66666666666, ans=0.5 +2024-08-27 02:00:10,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=210704.0, ans=0.2 +2024-08-27 02:00:10,884 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.43 vs. limit=15.0 +2024-08-27 02:00:25,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=210810.66666666666, ans=0.125 +2024-08-27 02:00:25,747 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.19 vs. limit=12.0 +2024-08-27 02:00:27,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=210810.66666666666, ans=0.125 +2024-08-27 02:00:30,373 INFO [train.py:1114] (3/4) Epoch 16, batch 2200, loss[loss=0.1799, simple_loss=0.2551, pruned_loss=0.0375, ctc_loss=0.07409, over 19609.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2635, pruned_loss=0.04277, ctc_loss=0.07993, over 3868836.05 frames. ], batch size: 57, lr: 9.26e-03, grad_scale: 32.0 +2024-08-27 02:00:42,930 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.55 vs. limit=15.0 +2024-08-27 02:01:02,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=211024.0, ans=0.125 +2024-08-27 02:01:04,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=211024.0, ans=0.125 +2024-08-27 02:01:05,986 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.33 vs. limit=15.0 +2024-08-27 02:01:06,357 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.493e+02 1.671e+02 2.113e+02 4.070e+02, threshold=3.342e+02, percent-clipped=1.0 +2024-08-27 02:01:08,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=211077.33333333334, ans=0.0 +2024-08-27 02:01:13,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=211077.33333333334, ans=0.125 +2024-08-27 02:01:17,551 INFO [train.py:1114] (3/4) Epoch 16, batch 2250, loss[loss=0.1995, simple_loss=0.2778, pruned_loss=0.04408, ctc_loss=0.08278, over 19617.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2638, pruned_loss=0.04291, ctc_loss=0.08001, over 3868508.86 frames. ], batch size: 55, lr: 9.25e-03, grad_scale: 32.0 +2024-08-27 02:01:47,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=211290.66666666666, ans=0.125 +2024-08-27 02:01:52,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=211344.0, ans=0.025 +2024-08-27 02:02:00,438 INFO [train.py:1114] (3/4) Epoch 16, batch 2300, loss[loss=0.1616, simple_loss=0.2324, pruned_loss=0.03309, ctc_loss=0.06157, over 19500.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2627, pruned_loss=0.04285, ctc_loss=0.07996, over 3861386.43 frames. ], batch size: 49, lr: 9.25e-03, grad_scale: 32.0 +2024-08-27 02:02:08,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=211450.66666666666, ans=0.125 +2024-08-27 02:02:18,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=211504.0, ans=0.5 +2024-08-27 02:02:21,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211504.0, ans=0.1 +2024-08-27 02:02:26,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=211557.33333333334, ans=0.125 +2024-08-27 02:02:31,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=211557.33333333334, ans=0.125 +2024-08-27 02:02:33,269 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.186e+02 1.480e+02 1.722e+02 2.096e+02 3.640e+02, threshold=3.444e+02, percent-clipped=3.0 +2024-08-27 02:02:39,846 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.26 vs. limit=6.0 +2024-08-27 02:02:44,154 INFO [train.py:1114] (3/4) Epoch 16, batch 2350, loss[loss=0.2137, simple_loss=0.2837, pruned_loss=0.05236, ctc_loss=0.09747, over 19654.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2629, pruned_loss=0.04295, ctc_loss=0.08004, over 3863092.72 frames. ], batch size: 63, lr: 9.24e-03, grad_scale: 32.0 +2024-08-27 02:02:47,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211664.0, ans=0.1 +2024-08-27 02:02:53,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=211664.0, ans=0.0 +2024-08-27 02:03:20,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=211824.0, ans=0.125 +2024-08-27 02:03:27,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=211877.33333333334, ans=0.0 +2024-08-27 02:03:34,527 INFO [train.py:1114] (3/4) Epoch 16, batch 2400, loss[loss=0.2164, simple_loss=0.2784, pruned_loss=0.05737, ctc_loss=0.09919, over 19287.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2651, pruned_loss=0.04382, ctc_loss=0.08159, over 3857264.20 frames. ], batch size: 71, lr: 9.24e-03, grad_scale: 32.0 +2024-08-27 02:03:42,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=211984.0, ans=0.125 +2024-08-27 02:03:47,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=211984.0, ans=0.0 +2024-08-27 02:03:55,664 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.40 vs. limit=15.0 +2024-08-27 02:04:02,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=212090.66666666666, ans=0.0 +2024-08-27 02:04:07,967 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.276e+02 1.442e+02 1.653e+02 2.239e+02 3.362e+02, threshold=3.307e+02, percent-clipped=0.0 +2024-08-27 02:04:11,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=212144.0, ans=0.0 +2024-08-27 02:04:18,823 INFO [train.py:1114] (3/4) Epoch 16, batch 2450, loss[loss=0.2385, simple_loss=0.2947, pruned_loss=0.06557, ctc_loss=0.1281, over 13699.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2688, pruned_loss=0.04623, ctc_loss=0.08643, over 3730318.33 frames. ], batch size: 140, lr: 9.23e-03, grad_scale: 32.0 +2024-08-27 02:04:25,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=212197.33333333334, ans=0.125 +2024-08-27 02:04:30,272 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.80 vs. limit=22.5 +2024-08-27 02:04:36,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=212304.0, ans=0.125 +2024-08-27 02:04:37,360 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=7.44 vs. limit=15.0 +2024-08-27 02:05:43,557 INFO [train.py:1114] (3/4) Epoch 17, batch 0, loss[loss=0.1823, simple_loss=0.2516, pruned_loss=0.04115, ctc_loss=0.07697, over 19407.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2516, pruned_loss=0.04115, ctc_loss=0.07697, over 19407.00 frames. ], batch size: 48, lr: 8.95e-03, grad_scale: 32.0 +2024-08-27 02:05:43,558 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-27 02:05:51,501 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.1500, 4.2843, 4.3604, 4.4785], device='cuda:3') +2024-08-27 02:05:53,281 INFO [train.py:1146] (3/4) Epoch 17, validation: loss=0.172, simple_loss=0.265, pruned_loss=0.02949, ctc_loss=0.04976, over 944034.00 frames. +2024-08-27 02:05:53,281 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12941MB +2024-08-27 02:06:01,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212458.66666666666, ans=0.1 +2024-08-27 02:06:12,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=212512.0, ans=0.5 +2024-08-27 02:06:12,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212512.0, ans=0.1 +2024-08-27 02:06:24,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=212565.33333333334, ans=0.2 +2024-08-27 02:06:27,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=212565.33333333334, ans=0.125 +2024-08-27 02:06:39,979 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.85 vs. limit=10.0 +2024-08-27 02:06:40,315 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.629e+02 1.801e+02 2.001e+02 3.255e+02, threshold=3.602e+02, percent-clipped=0.0 +2024-08-27 02:06:40,349 INFO [train.py:1114] (3/4) Epoch 17, batch 50, loss[loss=0.1682, simple_loss=0.2358, pruned_loss=0.03658, ctc_loss=0.0688, over 19722.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2644, pruned_loss=0.04281, ctc_loss=0.08024, over 844761.44 frames. ], batch size: 47, lr: 8.94e-03, grad_scale: 16.0 +2024-08-27 02:06:41,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212672.0, ans=0.1 +2024-08-27 02:06:47,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212672.0, ans=0.1 +2024-08-27 02:06:54,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=212725.33333333334, ans=0.0 +2024-08-27 02:06:57,515 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.06 vs. limit=15.0 +2024-08-27 02:07:02,858 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.71 vs. limit=15.0 +2024-08-27 02:07:03,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=212778.66666666666, ans=0.05 +2024-08-27 02:07:13,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=212832.0, ans=0.125 +2024-08-27 02:07:21,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=212885.33333333334, ans=0.5 +2024-08-27 02:07:29,658 INFO [train.py:1114] (3/4) Epoch 17, batch 100, loss[loss=0.1785, simple_loss=0.2537, pruned_loss=0.0378, ctc_loss=0.06934, over 19722.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2662, pruned_loss=0.04331, ctc_loss=0.08137, over 1498730.66 frames. ], batch size: 51, lr: 8.94e-03, grad_scale: 16.0 +2024-08-27 02:07:38,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=212992.0, ans=0.09899494936611666 +2024-08-27 02:07:40,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=212992.0, ans=0.125 +2024-08-27 02:08:08,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=213098.66666666666, ans=0.125 +2024-08-27 02:08:20,134 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.458e+02 1.665e+02 2.006e+02 3.256e+02, threshold=3.330e+02, percent-clipped=0.0 +2024-08-27 02:08:20,168 INFO [train.py:1114] (3/4) Epoch 17, batch 150, loss[loss=0.1676, simple_loss=0.2365, pruned_loss=0.03596, ctc_loss=0.06706, over 19720.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2645, pruned_loss=0.04284, ctc_loss=0.08052, over 2026289.94 frames. ], batch size: 47, lr: 8.93e-03, grad_scale: 16.0 +2024-08-27 02:08:42,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=213312.0, ans=0.0 +2024-08-27 02:08:42,482 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.29 vs. limit=15.0 +2024-08-27 02:09:08,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=213365.33333333334, ans=0.0 +2024-08-27 02:09:50,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=213365.33333333334, ans=0.025 +2024-08-27 02:10:26,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=213418.66666666666, ans=0.125 +2024-08-27 02:10:29,488 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.64 vs. limit=15.0 +2024-08-27 02:10:55,452 INFO [train.py:1114] (3/4) Epoch 17, batch 200, loss[loss=0.2195, simple_loss=0.285, pruned_loss=0.05602, ctc_loss=0.1049, over 18235.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2635, pruned_loss=0.04243, ctc_loss=0.07947, over 2433635.33 frames. ], batch size: 85, lr: 8.93e-03, grad_scale: 16.0 +2024-08-27 02:11:01,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=213472.0, ans=0.04949747468305833 +2024-08-27 02:11:04,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=213472.0, ans=0.1 +2024-08-27 02:11:05,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=213525.33333333334, ans=0.125 +2024-08-27 02:11:09,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=213525.33333333334, ans=0.125 +2024-08-27 02:11:17,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=213525.33333333334, ans=0.0 +2024-08-27 02:11:26,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=213578.66666666666, ans=0.0 +2024-08-27 02:11:39,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=213685.33333333334, ans=0.025 +2024-08-27 02:11:41,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=213685.33333333334, ans=0.0 +2024-08-27 02:11:49,178 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.468e+02 1.730e+02 2.457e+02 4.645e+02, threshold=3.460e+02, percent-clipped=6.0 +2024-08-27 02:11:49,211 INFO [train.py:1114] (3/4) Epoch 17, batch 250, loss[loss=0.1969, simple_loss=0.2756, pruned_loss=0.04323, ctc_loss=0.07936, over 19400.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2634, pruned_loss=0.04192, ctc_loss=0.07855, over 2755065.21 frames. ], batch size: 67, lr: 8.92e-03, grad_scale: 16.0 +2024-08-27 02:12:16,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=213792.0, ans=0.125 +2024-08-27 02:12:28,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=213898.66666666666, ans=0.05 +2024-08-27 02:14:34,922 INFO [train.py:1114] (3/4) Epoch 17, batch 300, loss[loss=0.1985, simple_loss=0.2764, pruned_loss=0.04421, ctc_loss=0.08036, over 19534.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.263, pruned_loss=0.04194, ctc_loss=0.07834, over 2999463.88 frames. ], batch size: 61, lr: 8.92e-03, grad_scale: 16.0 +2024-08-27 02:14:45,645 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.23 vs. limit=6.0 +2024-08-27 02:14:56,896 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.04 vs. limit=15.0 +2024-08-27 02:14:57,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=214112.0, ans=0.125 +2024-08-27 02:16:18,009 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.53 vs. limit=15.0 +2024-08-27 02:16:34,271 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.09 vs. limit=15.0 +2024-08-27 02:16:40,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=214218.66666666666, ans=0.125 +2024-08-27 02:16:48,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=214272.0, ans=0.125 +2024-08-27 02:16:48,694 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.450e+02 1.705e+02 2.074e+02 4.169e+02, threshold=3.410e+02, percent-clipped=2.0 +2024-08-27 02:16:48,726 INFO [train.py:1114] (3/4) Epoch 17, batch 350, loss[loss=0.1758, simple_loss=0.2422, pruned_loss=0.03966, ctc_loss=0.07515, over 19779.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2637, pruned_loss=0.04232, ctc_loss=0.07918, over 3190999.16 frames. ], batch size: 48, lr: 8.91e-03, grad_scale: 16.0 +2024-08-27 02:16:53,068 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.77 vs. limit=15.0 +2024-08-27 02:16:53,961 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.99 vs. limit=22.5 +2024-08-27 02:16:54,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=214272.0, ans=0.125 +2024-08-27 02:17:01,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=214325.33333333334, ans=0.0 +2024-08-27 02:17:21,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=214432.0, ans=0.125 +2024-08-27 02:17:21,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=214432.0, ans=0.125 +2024-08-27 02:17:28,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=214485.33333333334, ans=0.125 +2024-08-27 02:17:36,102 INFO [train.py:1114] (3/4) Epoch 17, batch 400, loss[loss=0.1737, simple_loss=0.2578, pruned_loss=0.03208, ctc_loss=0.06364, over 19500.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.263, pruned_loss=0.04195, ctc_loss=0.0784, over 3343531.34 frames. ], batch size: 54, lr: 8.91e-03, grad_scale: 32.0 +2024-08-27 02:17:40,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214538.66666666666, ans=0.1 +2024-08-27 02:17:48,775 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.56 vs. limit=15.0 +2024-08-27 02:18:05,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=214698.66666666666, ans=0.125 +2024-08-27 02:18:20,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=214752.0, ans=0.125 +2024-08-27 02:18:25,577 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.479e+02 1.707e+02 2.031e+02 4.496e+02, threshold=3.413e+02, percent-clipped=2.0 +2024-08-27 02:18:25,610 INFO [train.py:1114] (3/4) Epoch 17, batch 450, loss[loss=0.1988, simple_loss=0.2758, pruned_loss=0.04459, ctc_loss=0.08175, over 19614.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2638, pruned_loss=0.04205, ctc_loss=0.07873, over 3451227.36 frames. ], batch size: 55, lr: 8.90e-03, grad_scale: 32.0 +2024-08-27 02:19:05,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=214965.33333333334, ans=0.0 +2024-08-27 02:19:09,365 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:19:18,863 INFO [train.py:1114] (3/4) Epoch 17, batch 500, loss[loss=0.2025, simple_loss=0.28, pruned_loss=0.04628, ctc_loss=0.08113, over 19684.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2631, pruned_loss=0.04167, ctc_loss=0.0781, over 3546818.49 frames. ], batch size: 63, lr: 8.90e-03, grad_scale: 32.0 +2024-08-27 02:19:20,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=215072.0, ans=0.0 +2024-08-27 02:19:20,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=215072.0, ans=0.07 +2024-08-27 02:19:22,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=215072.0, ans=0.2 +2024-08-27 02:19:48,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=215178.66666666666, ans=0.125 +2024-08-27 02:19:51,129 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.06 vs. limit=15.0 +2024-08-27 02:19:52,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=215232.0, ans=0.2 +2024-08-27 02:20:20,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=215232.0, ans=0.125 +2024-08-27 02:20:41,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=215285.33333333334, ans=0.035 +2024-08-27 02:20:44,557 INFO [train.py:1114] (3/4) Epoch 17, batch 550, loss[loss=0.1889, simple_loss=0.2701, pruned_loss=0.03933, ctc_loss=0.07288, over 19302.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2628, pruned_loss=0.0417, ctc_loss=0.07811, over 3608612.57 frames. ], batch size: 71, lr: 8.89e-03, grad_scale: 16.0 +2024-08-27 02:20:45,391 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.446e+02 1.711e+02 2.254e+02 3.980e+02, threshold=3.422e+02, percent-clipped=2.0 +2024-08-27 02:20:51,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215338.66666666666, ans=0.1 +2024-08-27 02:21:14,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=215498.66666666666, ans=0.025 +2024-08-27 02:21:32,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=215605.33333333334, ans=0.2 +2024-08-27 02:21:43,251 INFO [train.py:1114] (3/4) Epoch 17, batch 600, loss[loss=0.2072, simple_loss=0.2748, pruned_loss=0.05146, ctc_loss=0.09152, over 19313.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2628, pruned_loss=0.04167, ctc_loss=0.07801, over 3666177.61 frames. ], batch size: 67, lr: 8.88e-03, grad_scale: 16.0 +2024-08-27 02:21:51,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=215605.33333333334, ans=0.025 +2024-08-27 02:22:12,713 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:22:28,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=215818.66666666666, ans=0.0 +2024-08-27 02:22:30,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=215818.66666666666, ans=0.125 +2024-08-27 02:22:35,806 INFO [train.py:1114] (3/4) Epoch 17, batch 650, loss[loss=0.18, simple_loss=0.2575, pruned_loss=0.037, ctc_loss=0.07152, over 19764.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2624, pruned_loss=0.04166, ctc_loss=0.07803, over 3716226.04 frames. ], batch size: 54, lr: 8.88e-03, grad_scale: 16.0 +2024-08-27 02:22:36,659 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.214e+02 1.454e+02 1.765e+02 2.281e+02 4.784e+02, threshold=3.530e+02, percent-clipped=4.0 +2024-08-27 02:23:05,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=215978.66666666666, ans=0.1 +2024-08-27 02:23:25,361 INFO [train.py:1114] (3/4) Epoch 17, batch 700, loss[loss=0.1785, simple_loss=0.254, pruned_loss=0.03704, ctc_loss=0.07231, over 19711.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2627, pruned_loss=0.04167, ctc_loss=0.07788, over 3749092.86 frames. ], batch size: 51, lr: 8.87e-03, grad_scale: 16.0 +2024-08-27 02:23:25,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216138.66666666666, ans=0.1 +2024-08-27 02:23:44,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=216245.33333333334, ans=0.125 +2024-08-27 02:23:55,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216298.66666666666, ans=0.1 +2024-08-27 02:28:23,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=216352.0, ans=0.0 +2024-08-27 02:28:51,368 INFO [train.py:1114] (3/4) Epoch 17, batch 750, loss[loss=0.1974, simple_loss=0.2777, pruned_loss=0.04203, ctc_loss=0.08279, over 19502.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2626, pruned_loss=0.04164, ctc_loss=0.07803, over 3775151.58 frames. ], batch size: 54, lr: 8.87e-03, grad_scale: 16.0 +2024-08-27 02:29:21,547 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.483e+02 1.820e+02 2.509e+02 4.091e+02, threshold=3.640e+02, percent-clipped=8.0 +2024-08-27 02:30:53,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=216405.33333333334, ans=0.125 +2024-08-27 02:33:25,957 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.23 vs. limit=22.5 +2024-08-27 02:36:25,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=216565.33333333334, ans=0.025 +2024-08-27 02:37:59,240 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.25 vs. limit=6.0 +2024-08-27 02:38:07,119 INFO [train.py:1114] (3/4) Epoch 17, batch 800, loss[loss=0.1766, simple_loss=0.245, pruned_loss=0.04, ctc_loss=0.07039, over 19408.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.262, pruned_loss=0.04172, ctc_loss=0.07823, over 3796424.97 frames. ], batch size: 48, lr: 8.86e-03, grad_scale: 32.0 +2024-08-27 02:38:07,674 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.10 vs. limit=15.0 +2024-08-27 02:39:00,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=216672.0, ans=0.2 +2024-08-27 02:39:03,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=216672.0, ans=0.0 +2024-08-27 02:39:11,140 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.36 vs. limit=10.0 +2024-08-27 02:39:11,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=216725.33333333334, ans=0.1 +2024-08-27 02:39:24,175 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.43 vs. limit=15.0 +2024-08-27 02:39:28,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=216725.33333333334, ans=0.2 +2024-08-27 02:40:08,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=216778.66666666666, ans=6.0 +2024-08-27 02:40:33,641 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.87 vs. limit=15.0 +2024-08-27 02:40:35,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=216885.33333333334, ans=0.0 +2024-08-27 02:40:43,406 INFO [train.py:1114] (3/4) Epoch 17, batch 850, loss[loss=0.1817, simple_loss=0.2597, pruned_loss=0.03778, ctc_loss=0.0702, over 19644.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2619, pruned_loss=0.04173, ctc_loss=0.0781, over 3815832.83 frames. ], batch size: 59, lr: 8.86e-03, grad_scale: 32.0 +2024-08-27 02:40:43,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=216938.66666666666, ans=0.125 +2024-08-27 02:40:44,272 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.490e+02 1.788e+02 2.181e+02 3.218e+02, threshold=3.576e+02, percent-clipped=0.0 +2024-08-27 02:40:55,451 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.76 vs. limit=6.0 +2024-08-27 02:40:57,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=216992.0, ans=0.0 +2024-08-27 02:41:04,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216992.0, ans=0.1 +2024-08-27 02:41:20,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=217045.33333333334, ans=0.1 +2024-08-27 02:41:22,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=217045.33333333334, ans=0.125 +2024-08-27 02:41:25,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=217045.33333333334, ans=0.125 +2024-08-27 02:41:26,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=217045.33333333334, ans=0.0 +2024-08-27 02:41:32,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=217098.66666666666, ans=0.0 +2024-08-27 02:41:38,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=217152.0, ans=0.2 +2024-08-27 02:41:40,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=217152.0, ans=0.2 +2024-08-27 02:41:46,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217152.0, ans=0.1 +2024-08-27 02:41:48,113 INFO [train.py:1114] (3/4) Epoch 17, batch 900, loss[loss=0.1732, simple_loss=0.243, pruned_loss=0.03777, ctc_loss=0.06984, over 19787.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2619, pruned_loss=0.04186, ctc_loss=0.07834, over 3820435.20 frames. ], batch size: 49, lr: 8.85e-03, grad_scale: 32.0 +2024-08-27 02:41:53,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=217205.33333333334, ans=0.2 +2024-08-27 02:41:55,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=217205.33333333334, ans=0.0 +2024-08-27 02:42:00,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=217258.66666666666, ans=0.125 +2024-08-27 02:42:07,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=217312.0, ans=0.0 +2024-08-27 02:42:08,001 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.09 vs. limit=15.0 +2024-08-27 02:42:17,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=217312.0, ans=0.125 +2024-08-27 02:42:22,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217365.33333333334, ans=0.1 +2024-08-27 02:42:28,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217365.33333333334, ans=0.1 +2024-08-27 02:42:42,352 INFO [train.py:1114] (3/4) Epoch 17, batch 950, loss[loss=0.1699, simple_loss=0.2393, pruned_loss=0.03691, ctc_loss=0.06657, over 19513.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2625, pruned_loss=0.0421, ctc_loss=0.07873, over 3821540.41 frames. ], batch size: 49, lr: 8.85e-03, grad_scale: 32.0 +2024-08-27 02:42:43,219 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.442e+02 1.596e+02 1.963e+02 3.277e+02, threshold=3.193e+02, percent-clipped=0.0 +2024-08-27 02:43:11,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=217525.33333333334, ans=0.025 +2024-08-27 02:44:08,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=217578.66666666666, ans=0.5 +2024-08-27 02:44:46,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=217632.0, ans=0.125 +2024-08-27 02:45:06,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=217685.33333333334, ans=0.125 +2024-08-27 02:45:22,221 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.30 vs. limit=22.5 +2024-08-27 02:45:22,498 INFO [train.py:1114] (3/4) Epoch 17, batch 1000, loss[loss=0.1868, simple_loss=0.264, pruned_loss=0.03879, ctc_loss=0.07999, over 19848.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2629, pruned_loss=0.04238, ctc_loss=0.07923, over 3816977.63 frames. ], batch size: 52, lr: 8.84e-03, grad_scale: 32.0 +2024-08-27 02:45:33,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=217738.66666666666, ans=0.125 +2024-08-27 02:45:34,247 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:45:45,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=217792.0, ans=0.125 +2024-08-27 02:45:45,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=217792.0, ans=0.125 +2024-08-27 02:45:48,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217792.0, ans=0.1 +2024-08-27 02:46:13,055 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.06 vs. limit=15.0 +2024-08-27 02:46:28,504 INFO [train.py:1114] (3/4) Epoch 17, batch 1050, loss[loss=0.1801, simple_loss=0.2644, pruned_loss=0.03483, ctc_loss=0.06514, over 19835.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2626, pruned_loss=0.04228, ctc_loss=0.07888, over 3823093.81 frames. ], batch size: 57, lr: 8.84e-03, grad_scale: 32.0 +2024-08-27 02:46:29,431 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.403e+02 1.586e+02 2.025e+02 2.959e+02, threshold=3.171e+02, percent-clipped=1.0 +2024-08-27 02:46:35,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=218005.33333333334, ans=0.1 +2024-08-27 02:46:38,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218058.66666666666, ans=0.1 +2024-08-27 02:47:04,877 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.94 vs. limit=22.5 +2024-08-27 02:47:05,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=218058.66666666666, ans=0.125 +2024-08-27 02:47:15,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=218165.33333333334, ans=0.125 +2024-08-27 02:47:22,794 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.74 vs. limit=22.5 +2024-08-27 02:47:24,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=218165.33333333334, ans=0.125 +2024-08-27 02:47:27,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=218218.66666666666, ans=0.2 +2024-08-27 02:47:37,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=218272.0, ans=0.125 +2024-08-27 02:47:38,615 INFO [train.py:1114] (3/4) Epoch 17, batch 1100, loss[loss=0.1777, simple_loss=0.2494, pruned_loss=0.03842, ctc_loss=0.07311, over 19602.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2623, pruned_loss=0.04216, ctc_loss=0.07863, over 3829569.24 frames. ], batch size: 52, lr: 8.83e-03, grad_scale: 32.0 +2024-08-27 02:47:44,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218272.0, ans=0.1 +2024-08-27 02:47:48,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=218272.0, ans=0.0 +2024-08-27 02:49:09,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=218432.0, ans=0.0 +2024-08-27 02:49:27,448 INFO [train.py:1114] (3/4) Epoch 17, batch 1150, loss[loss=0.1808, simple_loss=0.2574, pruned_loss=0.03856, ctc_loss=0.06786, over 19592.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2627, pruned_loss=0.04238, ctc_loss=0.0791, over 3829957.78 frames. ], batch size: 52, lr: 8.83e-03, grad_scale: 32.0 +2024-08-27 02:49:28,318 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.459e+02 1.619e+02 1.965e+02 3.390e+02, threshold=3.239e+02, percent-clipped=1.0 +2024-08-27 02:49:28,926 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.26 vs. limit=15.0 +2024-08-27 02:49:34,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=218538.66666666666, ans=0.025 +2024-08-27 02:49:47,010 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.74 vs. limit=15.0 +2024-08-27 02:49:48,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=218645.33333333334, ans=0.025 +2024-08-27 02:49:50,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=218645.33333333334, ans=0.0 +2024-08-27 02:49:50,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=218645.33333333334, ans=15.0 +2024-08-27 02:50:05,467 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.83 vs. limit=22.5 +2024-08-27 02:50:14,205 INFO [train.py:1114] (3/4) Epoch 17, batch 1200, loss[loss=0.1955, simple_loss=0.2735, pruned_loss=0.04281, ctc_loss=0.07955, over 19837.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.263, pruned_loss=0.0423, ctc_loss=0.07885, over 3825542.18 frames. ], batch size: 57, lr: 8.82e-03, grad_scale: 32.0 +2024-08-27 02:50:42,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=218965.33333333334, ans=0.125 +2024-08-27 02:50:44,078 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=8.14 vs. limit=12.0 +2024-08-27 02:50:50,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=218965.33333333334, ans=0.05 +2024-08-27 02:51:28,571 INFO [train.py:1114] (3/4) Epoch 17, batch 1250, loss[loss=0.207, simple_loss=0.2815, pruned_loss=0.04742, ctc_loss=0.09416, over 19542.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2633, pruned_loss=0.04211, ctc_loss=0.07854, over 3844081.90 frames. ], batch size: 61, lr: 8.82e-03, grad_scale: 32.0 +2024-08-27 02:51:29,451 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.236e+02 1.488e+02 1.826e+02 2.228e+02 3.440e+02, threshold=3.652e+02, percent-clipped=1.0 +2024-08-27 02:51:34,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=219072.0, ans=0.2 +2024-08-27 02:51:46,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=219125.33333333334, ans=0.125 +2024-08-27 02:51:47,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=219125.33333333334, ans=0.125 +2024-08-27 02:51:59,164 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.57 vs. limit=15.0 +2024-08-27 02:52:36,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=219285.33333333334, ans=0.0 +2024-08-27 02:52:40,156 INFO [train.py:1114] (3/4) Epoch 17, batch 1300, loss[loss=0.215, simple_loss=0.2841, pruned_loss=0.05373, ctc_loss=0.09607, over 18817.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2625, pruned_loss=0.04171, ctc_loss=0.07765, over 3846339.73 frames. ], batch size: 76, lr: 8.81e-03, grad_scale: 32.0 +2024-08-27 02:52:48,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=219392.0, ans=0.025 +2024-08-27 02:53:00,969 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.31 vs. limit=12.0 +2024-08-27 02:53:06,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=219445.33333333334, ans=0.2 +2024-08-27 02:53:17,612 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.38 vs. limit=12.0 +2024-08-27 02:53:39,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=219552.0, ans=0.125 +2024-08-27 02:53:41,379 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.90 vs. limit=22.5 +2024-08-27 02:53:43,989 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.25 vs. limit=22.5 +2024-08-27 02:53:49,284 INFO [train.py:1114] (3/4) Epoch 17, batch 1350, loss[loss=0.1728, simple_loss=0.2576, pruned_loss=0.03233, ctc_loss=0.05833, over 19783.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2624, pruned_loss=0.04179, ctc_loss=0.07791, over 3856219.38 frames. ], batch size: 54, lr: 8.81e-03, grad_scale: 32.0 +2024-08-27 02:53:50,137 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.111e+02 1.487e+02 1.709e+02 2.118e+02 3.687e+02, threshold=3.418e+02, percent-clipped=1.0 +2024-08-27 02:54:19,378 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.78 vs. limit=15.0 +2024-08-27 02:54:22,198 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.23 vs. limit=22.5 +2024-08-27 02:54:24,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=219765.33333333334, ans=0.125 +2024-08-27 02:54:35,718 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.48 vs. limit=22.5 +2024-08-27 02:54:38,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=219818.66666666666, ans=0.125 +2024-08-27 02:54:41,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=219818.66666666666, ans=0.0 +2024-08-27 02:54:45,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=219818.66666666666, ans=0.125 +2024-08-27 02:54:47,067 INFO [train.py:1114] (3/4) Epoch 17, batch 1400, loss[loss=0.1879, simple_loss=0.2422, pruned_loss=0.04898, ctc_loss=0.08897, over 19664.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2622, pruned_loss=0.04189, ctc_loss=0.07806, over 3863442.84 frames. ], batch size: 46, lr: 8.80e-03, grad_scale: 32.0 +2024-08-27 02:54:58,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=219925.33333333334, ans=0.125 +2024-08-27 02:54:59,620 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.88 vs. limit=15.0 +2024-08-27 02:55:39,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=220085.33333333334, ans=0.025 +2024-08-27 02:55:39,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=220085.33333333334, ans=0.125 +2024-08-27 02:55:41,673 INFO [train.py:1114] (3/4) Epoch 17, batch 1450, loss[loss=0.2183, simple_loss=0.2821, pruned_loss=0.05765, ctc_loss=0.09812, over 19651.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2626, pruned_loss=0.04203, ctc_loss=0.07823, over 3860840.47 frames. ], batch size: 63, lr: 8.80e-03, grad_scale: 32.0 +2024-08-27 02:55:42,540 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.445e+02 1.654e+02 2.032e+02 3.496e+02, threshold=3.307e+02, percent-clipped=1.0 +2024-08-27 02:55:49,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=220138.66666666666, ans=0.125 +2024-08-27 02:56:08,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=220245.33333333334, ans=0.125 +2024-08-27 02:56:10,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=220245.33333333334, ans=0.125 +2024-08-27 02:56:15,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=220298.66666666666, ans=0.0 +2024-08-27 02:56:29,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=220352.0, ans=0.125 +2024-08-27 02:56:35,353 INFO [train.py:1114] (3/4) Epoch 17, batch 1500, loss[loss=0.2049, simple_loss=0.2795, pruned_loss=0.04624, ctc_loss=0.09474, over 19580.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.263, pruned_loss=0.04216, ctc_loss=0.07855, over 3861253.07 frames. ], batch size: 57, lr: 8.79e-03, grad_scale: 32.0 +2024-08-27 02:56:35,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=220405.33333333334, ans=0.125 +2024-08-27 02:56:47,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=220458.66666666666, ans=0.2 +2024-08-27 02:57:07,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=220565.33333333334, ans=0.125 +2024-08-27 02:57:09,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=220565.33333333334, ans=0.0 +2024-08-27 02:57:09,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=220565.33333333334, ans=0.2 +2024-08-27 02:57:20,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=220618.66666666666, ans=0.125 +2024-08-27 02:57:22,872 INFO [train.py:1114] (3/4) Epoch 17, batch 1550, loss[loss=0.203, simple_loss=0.2743, pruned_loss=0.0483, ctc_loss=0.08761, over 19608.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2628, pruned_loss=0.04219, ctc_loss=0.07886, over 3845897.95 frames. ], batch size: 60, lr: 8.79e-03, grad_scale: 32.0 +2024-08-27 02:57:23,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=220672.0, ans=0.2 +2024-08-27 02:57:23,809 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.186e+02 1.433e+02 1.700e+02 2.311e+02 3.923e+02, threshold=3.401e+02, percent-clipped=1.0 +2024-08-27 02:57:30,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=220672.0, ans=0.0 +2024-08-27 02:57:54,052 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:58:00,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=220778.66666666666, ans=0.2 +2024-08-27 02:58:01,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220778.66666666666, ans=0.1 +2024-08-27 02:58:05,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=220778.66666666666, ans=0.2 +2024-08-27 02:58:14,330 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.63 vs. limit=10.0 +2024-08-27 02:58:25,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=220885.33333333334, ans=0.2 +2024-08-27 02:58:27,695 INFO [train.py:1114] (3/4) Epoch 17, batch 1600, loss[loss=0.1809, simple_loss=0.2573, pruned_loss=0.03775, ctc_loss=0.07246, over 19834.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.263, pruned_loss=0.04231, ctc_loss=0.07926, over 3834852.77 frames. ], batch size: 57, lr: 8.78e-03, grad_scale: 32.0 +2024-08-27 02:58:31,002 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.32 vs. limit=12.0 +2024-08-27 02:59:02,466 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 02:59:09,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=220992.0, ans=0.0 +2024-08-27 02:59:26,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=220992.0, ans=0.025 +2024-08-27 03:00:33,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=221098.66666666666, ans=0.0 +2024-08-27 03:00:46,481 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.84 vs. limit=15.0 +2024-08-27 03:00:48,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=221152.0, ans=0.125 +2024-08-27 03:00:55,413 INFO [train.py:1114] (3/4) Epoch 17, batch 1650, loss[loss=0.182, simple_loss=0.2663, pruned_loss=0.03501, ctc_loss=0.06916, over 19642.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.263, pruned_loss=0.04232, ctc_loss=0.07931, over 3830114.97 frames. ], batch size: 59, lr: 8.77e-03, grad_scale: 32.0 +2024-08-27 03:00:55,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=221205.33333333334, ans=0.0 +2024-08-27 03:00:58,231 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.431e+02 1.952e+02 2.452e+02 3.980e+02, threshold=3.905e+02, percent-clipped=5.0 +2024-08-27 03:01:06,861 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.70 vs. limit=15.0 +2024-08-27 03:01:12,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=221258.66666666666, ans=0.2 +2024-08-27 03:01:52,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=221418.66666666666, ans=0.125 +2024-08-27 03:01:57,126 INFO [train.py:1114] (3/4) Epoch 17, batch 1700, loss[loss=0.1613, simple_loss=0.2315, pruned_loss=0.03318, ctc_loss=0.06205, over 19691.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2631, pruned_loss=0.04199, ctc_loss=0.07875, over 3844046.33 frames. ], batch size: 46, lr: 8.77e-03, grad_scale: 32.0 +2024-08-27 03:02:09,328 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.71 vs. limit=15.0 +2024-08-27 03:02:19,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221525.33333333334, ans=0.1 +2024-08-27 03:02:30,469 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.91 vs. limit=15.0 +2024-08-27 03:02:31,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=221632.0, ans=0.125 +2024-08-27 03:02:36,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=221632.0, ans=0.125 +2024-08-27 03:02:37,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=221632.0, ans=0.125 +2024-08-27 03:02:40,810 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.12 vs. limit=15.0 +2024-08-27 03:02:41,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=221685.33333333334, ans=0.125 +2024-08-27 03:02:44,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=221685.33333333334, ans=0.2 +2024-08-27 03:02:48,261 INFO [train.py:1114] (3/4) Epoch 17, batch 1750, loss[loss=0.1709, simple_loss=0.2377, pruned_loss=0.03745, ctc_loss=0.0729, over 19662.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2625, pruned_loss=0.04173, ctc_loss=0.07821, over 3849771.43 frames. ], batch size: 45, lr: 8.76e-03, grad_scale: 16.0 +2024-08-27 03:02:49,970 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.526e+02 1.896e+02 2.459e+02 4.889e+02, threshold=3.791e+02, percent-clipped=1.0 +2024-08-27 03:03:12,396 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.61 vs. limit=10.0 +2024-08-27 03:03:13,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=221792.0, ans=0.0 +2024-08-27 03:03:17,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=221792.0, ans=0.025 +2024-08-27 03:03:27,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=221845.33333333334, ans=0.125 +2024-08-27 03:03:31,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=221898.66666666666, ans=0.0 +2024-08-27 03:03:32,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221898.66666666666, ans=0.1 +2024-08-27 03:03:40,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=221952.0, ans=0.125 +2024-08-27 03:03:44,443 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.91 vs. limit=15.0 +2024-08-27 03:03:46,745 INFO [train.py:1114] (3/4) Epoch 17, batch 1800, loss[loss=0.2049, simple_loss=0.2791, pruned_loss=0.04845, ctc_loss=0.08447, over 19626.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2626, pruned_loss=0.04186, ctc_loss=0.07838, over 3852200.51 frames. ], batch size: 55, lr: 8.76e-03, grad_scale: 16.0 +2024-08-27 03:04:30,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=222272.0, ans=0.125 +2024-08-27 03:04:30,755 INFO [train.py:1114] (3/4) Epoch 17, batch 1850, loss[loss=0.2018, simple_loss=0.2761, pruned_loss=0.04654, ctc_loss=0.08571, over 19583.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2623, pruned_loss=0.04176, ctc_loss=0.07816, over 3856199.12 frames. ], batch size: 57, lr: 8.75e-03, grad_scale: 16.0 +2024-08-27 03:04:32,491 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.249e+02 1.484e+02 1.846e+02 2.436e+02 4.218e+02, threshold=3.691e+02, percent-clipped=2.0 +2024-08-27 03:04:44,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=222325.33333333334, ans=0.0 +2024-08-27 03:04:57,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=222432.0, ans=0.1 +2024-08-27 03:05:00,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=222432.0, ans=0.2 +2024-08-27 03:05:08,254 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=15.0 +2024-08-27 03:05:14,606 INFO [train.py:1114] (3/4) Epoch 17, batch 1900, loss[loss=0.2064, simple_loss=0.2857, pruned_loss=0.04699, ctc_loss=0.08309, over 19666.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2629, pruned_loss=0.04186, ctc_loss=0.07843, over 3861529.04 frames. ], batch size: 59, lr: 8.75e-03, grad_scale: 16.0 +2024-08-27 03:05:21,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=222538.66666666666, ans=0.0 +2024-08-27 03:05:29,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=222592.0, ans=0.5 +2024-08-27 03:05:29,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=222592.0, ans=0.2 +2024-08-27 03:05:32,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=222645.33333333334, ans=0.125 +2024-08-27 03:05:39,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=222645.33333333334, ans=0.05 +2024-08-27 03:05:53,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=222752.0, ans=0.1 +2024-08-27 03:05:55,079 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.61 vs. limit=22.5 +2024-08-27 03:06:00,577 INFO [train.py:1114] (3/4) Epoch 17, batch 1950, loss[loss=0.1778, simple_loss=0.2541, pruned_loss=0.036, ctc_loss=0.0735, over 19590.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2639, pruned_loss=0.042, ctc_loss=0.07873, over 3870508.85 frames. ], batch size: 52, lr: 8.74e-03, grad_scale: 16.0 +2024-08-27 03:06:02,415 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.462e+02 1.715e+02 2.122e+02 4.504e+02, threshold=3.430e+02, percent-clipped=1.0 +2024-08-27 03:06:02,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222805.33333333334, ans=0.1 +2024-08-27 03:06:08,184 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.95 vs. limit=22.5 +2024-08-27 03:06:10,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=222858.66666666666, ans=0.125 +2024-08-27 03:06:12,560 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.50 vs. limit=15.0 +2024-08-27 03:06:28,085 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.18 vs. limit=15.0 +2024-08-27 03:06:28,718 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.45 vs. limit=12.0 +2024-08-27 03:06:30,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=222965.33333333334, ans=0.025 +2024-08-27 03:06:48,038 INFO [train.py:1114] (3/4) Epoch 17, batch 2000, loss[loss=0.1705, simple_loss=0.239, pruned_loss=0.03697, ctc_loss=0.07042, over 19678.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2646, pruned_loss=0.04245, ctc_loss=0.0794, over 3855534.91 frames. ], batch size: 45, lr: 8.74e-03, grad_scale: 32.0 +2024-08-27 03:07:59,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=223125.33333333334, ans=0.125 +2024-08-27 03:08:04,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=223125.33333333334, ans=0.125 +2024-08-27 03:08:36,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=223125.33333333334, ans=0.0 +2024-08-27 03:08:38,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=223125.33333333334, ans=0.125 +2024-08-27 03:08:40,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=223178.66666666666, ans=0.0 +2024-08-27 03:08:42,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=223178.66666666666, ans=0.2 +2024-08-27 03:09:30,213 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 03:09:41,592 INFO [train.py:1114] (3/4) Epoch 17, batch 2050, loss[loss=0.1633, simple_loss=0.236, pruned_loss=0.03302, ctc_loss=0.0616, over 19740.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2637, pruned_loss=0.04239, ctc_loss=0.0793, over 3852671.33 frames. ], batch size: 47, lr: 8.73e-03, grad_scale: 32.0 +2024-08-27 03:09:43,286 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.433e+02 1.718e+02 2.194e+02 3.489e+02, threshold=3.436e+02, percent-clipped=1.0 +2024-08-27 03:09:54,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=223338.66666666666, ans=0.2 +2024-08-27 03:10:33,120 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 03:10:33,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=223445.33333333334, ans=0.125 +2024-08-27 03:10:48,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=223498.66666666666, ans=0.125 +2024-08-27 03:10:58,769 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.66 vs. limit=10.0 +2024-08-27 03:12:46,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=223552.0, ans=0.0 +2024-08-27 03:13:00,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223552.0, ans=0.1 +2024-08-27 03:13:17,692 INFO [train.py:1114] (3/4) Epoch 17, batch 2100, loss[loss=0.18, simple_loss=0.2585, pruned_loss=0.03698, ctc_loss=0.06857, over 19778.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2632, pruned_loss=0.04198, ctc_loss=0.07848, over 3859898.41 frames. ], batch size: 54, lr: 8.73e-03, grad_scale: 32.0 +2024-08-27 03:13:37,993 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.77 vs. limit=22.5 +2024-08-27 03:13:38,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=223605.33333333334, ans=0.125 +2024-08-27 03:16:13,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=223712.0, ans=0.2 +2024-08-27 03:17:15,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=223712.0, ans=0.125 +2024-08-27 03:19:11,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=223712.0, ans=0.125 +2024-08-27 03:22:44,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=223765.33333333334, ans=0.125 +2024-08-27 03:22:44,510 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.93 vs. limit=10.0 +2024-08-27 03:23:47,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=223818.66666666666, ans=10.0 +2024-08-27 03:24:47,096 INFO [train.py:1114] (3/4) Epoch 17, batch 2150, loss[loss=0.1744, simple_loss=0.2476, pruned_loss=0.0371, ctc_loss=0.06771, over 19843.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2625, pruned_loss=0.04168, ctc_loss=0.07794, over 3871519.46 frames. ], batch size: 52, lr: 8.72e-03, grad_scale: 32.0 +2024-08-27 03:24:49,689 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.181e+02 1.464e+02 1.691e+02 2.317e+02 5.931e+02, threshold=3.382e+02, percent-clipped=6.0 +2024-08-27 03:25:53,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.98 vs. limit=15.0 +2024-08-27 03:26:29,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=224085.33333333334, ans=0.125 +2024-08-27 03:26:31,821 INFO [train.py:1114] (3/4) Epoch 17, batch 2200, loss[loss=0.1939, simple_loss=0.2757, pruned_loss=0.04044, ctc_loss=0.0778, over 19578.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2624, pruned_loss=0.04162, ctc_loss=0.07782, over 3870160.92 frames. ], batch size: 57, lr: 8.72e-03, grad_scale: 32.0 +2024-08-27 03:26:47,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=224192.0, ans=0.05 +2024-08-27 03:26:50,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=224192.0, ans=0.125 +2024-08-27 03:26:51,329 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.64 vs. limit=6.0 +2024-08-27 03:27:09,556 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.37 vs. limit=12.0 +2024-08-27 03:27:26,232 INFO [train.py:1114] (3/4) Epoch 17, batch 2250, loss[loss=0.2062, simple_loss=0.2814, pruned_loss=0.04757, ctc_loss=0.08955, over 19621.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2628, pruned_loss=0.04187, ctc_loss=0.07819, over 3869960.68 frames. ], batch size: 55, lr: 8.71e-03, grad_scale: 32.0 +2024-08-27 03:27:29,879 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.517e+02 1.774e+02 2.256e+02 3.791e+02, threshold=3.548e+02, percent-clipped=1.0 +2024-08-27 03:27:36,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=224405.33333333334, ans=0.2 +2024-08-27 03:27:47,344 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.89 vs. limit=15.0 +2024-08-27 03:27:52,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=224512.0, ans=0.125 +2024-08-27 03:28:26,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=224565.33333333334, ans=0.125 +2024-08-27 03:28:31,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=224565.33333333334, ans=0.0 +2024-08-27 03:29:04,917 INFO [train.py:1114] (3/4) Epoch 17, batch 2300, loss[loss=0.1763, simple_loss=0.2453, pruned_loss=0.03898, ctc_loss=0.07325, over 19502.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2621, pruned_loss=0.04196, ctc_loss=0.07835, over 3863663.04 frames. ], batch size: 49, lr: 8.71e-03, grad_scale: 16.0 +2024-08-27 03:29:06,756 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 03:29:46,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=224725.33333333334, ans=0.0 +2024-08-27 03:36:15,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=224885.33333333334, ans=0.125 +2024-08-27 03:36:19,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=224885.33333333334, ans=0.125 +2024-08-27 03:36:49,581 INFO [train.py:1114] (3/4) Epoch 17, batch 2350, loss[loss=0.231, simple_loss=0.2908, pruned_loss=0.06391, ctc_loss=0.1083, over 19646.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.262, pruned_loss=0.04214, ctc_loss=0.07841, over 3866110.76 frames. ], batch size: 63, lr: 8.70e-03, grad_scale: 16.0 +2024-08-27 03:37:01,819 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.439e+02 1.647e+02 2.102e+02 4.091e+02, threshold=3.295e+02, percent-clipped=1.0 +2024-08-27 03:37:12,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=224938.66666666666, ans=0.05 +2024-08-27 03:37:12,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=224938.66666666666, ans=0.05 +2024-08-27 03:37:41,626 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.61 vs. limit=15.0 +2024-08-27 03:38:20,489 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.06 vs. limit=15.0 +2024-08-27 03:38:30,743 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.33 vs. limit=15.0 +2024-08-27 03:39:26,246 INFO [train.py:1114] (3/4) Epoch 17, batch 2400, loss[loss=0.2187, simple_loss=0.2909, pruned_loss=0.05357, ctc_loss=0.09855, over 19290.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2645, pruned_loss=0.04307, ctc_loss=0.07986, over 3860060.04 frames. ], batch size: 71, lr: 8.70e-03, grad_scale: 32.0 +2024-08-27 03:41:14,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=225258.66666666666, ans=0.125 +2024-08-27 03:42:55,059 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 03:43:29,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=225365.33333333334, ans=10.0 +2024-08-27 03:43:57,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=225418.66666666666, ans=0.025 +2024-08-27 03:43:57,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=225418.66666666666, ans=0.025 +2024-08-27 03:44:04,676 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=7.05 vs. limit=12.0 +2024-08-27 03:44:11,214 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=9.37 vs. limit=12.0 +2024-08-27 03:44:22,614 INFO [train.py:1114] (3/4) Epoch 17, batch 2450, loss[loss=0.2439, simple_loss=0.2939, pruned_loss=0.06978, ctc_loss=0.1356, over 13396.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2678, pruned_loss=0.04515, ctc_loss=0.08417, over 3730534.66 frames. ], batch size: 140, lr: 8.69e-03, grad_scale: 32.0 +2024-08-27 03:44:26,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=225472.0, ans=0.125 +2024-08-27 03:44:30,552 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.519e+02 1.805e+02 2.064e+02 2.900e+02, threshold=3.609e+02, percent-clipped=0.0 +2024-08-27 03:46:03,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=225578.66666666666, ans=0.0 +2024-08-27 03:47:09,119 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.21 vs. limit=15.0 +2024-08-27 03:47:36,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=225632.0, ans=15.0 +2024-08-27 03:47:36,069 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.22 vs. limit=15.0 +2024-08-27 03:50:08,487 INFO [train.py:1114] (3/4) Epoch 18, batch 0, loss[loss=0.1768, simple_loss=0.244, pruned_loss=0.0392, ctc_loss=0.07765, over 19798.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.244, pruned_loss=0.0392, ctc_loss=0.07765, over 19798.00 frames. ], batch size: 49, lr: 8.44e-03, grad_scale: 32.0 +2024-08-27 03:50:08,488 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-27 03:56:59,324 INFO [train.py:1146] (3/4) Epoch 18, validation: loss=0.1731, simple_loss=0.2653, pruned_loss=0.0303, ctc_loss=0.05087, over 944034.00 frames. +2024-08-27 03:56:59,325 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12941MB +2024-08-27 03:57:36,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=225680.0, ans=0.125 +2024-08-27 03:58:13,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=225733.33333333334, ans=0.125 +2024-08-27 03:59:04,155 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.28 vs. limit=10.0 +2024-08-27 03:59:32,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=225893.33333333334, ans=0.0 +2024-08-27 03:59:37,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=225893.33333333334, ans=0.125 +2024-08-27 03:59:40,031 INFO [train.py:1114] (3/4) Epoch 18, batch 50, loss[loss=0.1735, simple_loss=0.2411, pruned_loss=0.03881, ctc_loss=0.07064, over 19711.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2635, pruned_loss=0.04163, ctc_loss=0.07743, over 844773.76 frames. ], batch size: 47, lr: 8.44e-03, grad_scale: 32.0 +2024-08-27 03:59:52,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.52 vs. limit=22.5 +2024-08-27 03:59:52,917 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.620e+02 1.870e+02 2.127e+02 3.474e+02, threshold=3.740e+02, percent-clipped=0.0 +2024-08-27 03:59:53,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=226000.0, ans=0.125 +2024-08-27 04:00:08,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=226106.66666666666, ans=0.125 +2024-08-27 04:00:11,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=226106.66666666666, ans=0.125 +2024-08-27 04:00:11,458 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:00:16,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.07 vs. limit=15.0 +2024-08-27 04:00:27,440 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.83 vs. limit=12.0 +2024-08-27 04:00:34,081 INFO [train.py:1114] (3/4) Epoch 18, batch 100, loss[loss=0.1758, simple_loss=0.2528, pruned_loss=0.03569, ctc_loss=0.06846, over 19730.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2658, pruned_loss=0.04231, ctc_loss=0.07925, over 1499130.94 frames. ], batch size: 51, lr: 8.43e-03, grad_scale: 32.0 +2024-08-27 04:00:34,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=226213.33333333334, ans=0.5 +2024-08-27 04:05:38,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=226320.0, ans=0.0 +2024-08-27 04:05:58,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226426.66666666666, ans=0.1 +2024-08-27 04:05:59,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=226480.0, ans=0.0 +2024-08-27 04:06:00,486 INFO [train.py:1114] (3/4) Epoch 18, batch 150, loss[loss=0.1672, simple_loss=0.2387, pruned_loss=0.03482, ctc_loss=0.06503, over 19710.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.263, pruned_loss=0.04138, ctc_loss=0.07747, over 2028556.29 frames. ], batch size: 47, lr: 8.43e-03, grad_scale: 32.0 +2024-08-27 04:06:11,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=226533.33333333334, ans=0.0 +2024-08-27 04:06:16,211 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.462e+02 1.764e+02 2.186e+02 3.977e+02, threshold=3.529e+02, percent-clipped=1.0 +2024-08-27 04:06:19,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=226533.33333333334, ans=0.0 +2024-08-27 04:06:26,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=226586.66666666666, ans=0.125 +2024-08-27 04:06:38,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=226640.0, ans=0.025 +2024-08-27 04:06:42,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=226693.33333333334, ans=0.0 +2024-08-27 04:06:49,810 INFO [train.py:1114] (3/4) Epoch 18, batch 200, loss[loss=0.2017, simple_loss=0.2796, pruned_loss=0.04425, ctc_loss=0.08821, over 18383.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2622, pruned_loss=0.04105, ctc_loss=0.07674, over 2435908.49 frames. ], batch size: 86, lr: 8.42e-03, grad_scale: 32.0 +2024-08-27 04:06:58,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=226800.0, ans=0.1 +2024-08-27 04:07:05,829 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.67 vs. limit=15.0 +2024-08-27 04:07:35,896 INFO [train.py:1114] (3/4) Epoch 18, batch 250, loss[loss=0.1809, simple_loss=0.2673, pruned_loss=0.0342, ctc_loss=0.0653, over 19385.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2614, pruned_loss=0.04049, ctc_loss=0.07557, over 2755239.67 frames. ], batch size: 67, lr: 8.42e-03, grad_scale: 32.0 +2024-08-27 04:07:40,170 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.64 vs. limit=15.0 +2024-08-27 04:07:44,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=227013.33333333334, ans=0.0 +2024-08-27 04:07:46,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=227066.66666666666, ans=0.0 +2024-08-27 04:07:50,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=227066.66666666666, ans=0.2 +2024-08-27 04:07:50,823 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.244e+02 1.521e+02 1.873e+02 2.606e+02 4.367e+02, threshold=3.746e+02, percent-clipped=8.0 +2024-08-27 04:08:03,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=227120.0, ans=0.0 +2024-08-27 04:08:15,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=227173.33333333334, ans=0.125 +2024-08-27 04:08:17,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=227173.33333333334, ans=0.1 +2024-08-27 04:08:31,210 INFO [train.py:1114] (3/4) Epoch 18, batch 300, loss[loss=0.2037, simple_loss=0.2757, pruned_loss=0.04828, ctc_loss=0.08789, over 19510.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2609, pruned_loss=0.04038, ctc_loss=0.07549, over 3000722.02 frames. ], batch size: 61, lr: 8.41e-03, grad_scale: 32.0 +2024-08-27 04:08:34,704 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.97 vs. limit=15.0 +2024-08-27 04:08:42,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=227333.33333333334, ans=0.1 +2024-08-27 04:08:42,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=227333.33333333334, ans=0.2 +2024-08-27 04:08:50,169 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:09:16,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227546.66666666666, ans=0.1 +2024-08-27 04:09:17,357 INFO [train.py:1114] (3/4) Epoch 18, batch 350, loss[loss=0.1672, simple_loss=0.2338, pruned_loss=0.03626, ctc_loss=0.0705, over 19798.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2616, pruned_loss=0.04066, ctc_loss=0.07597, over 3190837.21 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 32.0 +2024-08-27 04:09:23,641 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=18.68 vs. limit=15.0 +2024-08-27 04:09:30,338 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.460e+02 1.643e+02 1.956e+02 3.165e+02, threshold=3.287e+02, percent-clipped=0.0 +2024-08-27 04:09:38,956 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:09:44,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=227706.66666666666, ans=0.125 +2024-08-27 04:10:14,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=227706.66666666666, ans=0.0 +2024-08-27 04:10:16,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=227706.66666666666, ans=0.2 +2024-08-27 04:10:16,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=227706.66666666666, ans=0.125 +2024-08-27 04:10:33,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=227760.0, ans=0.125 +2024-08-27 04:10:35,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=227760.0, ans=0.125 +2024-08-27 04:10:40,378 INFO [train.py:1114] (3/4) Epoch 18, batch 400, loss[loss=0.188, simple_loss=0.2736, pruned_loss=0.03744, ctc_loss=0.06889, over 19478.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2614, pruned_loss=0.04057, ctc_loss=0.07581, over 3342300.72 frames. ], batch size: 54, lr: 8.40e-03, grad_scale: 32.0 +2024-08-27 04:12:21,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=227973.33333333334, ans=0.125 +2024-08-27 04:12:31,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=228026.66666666666, ans=0.125 +2024-08-27 04:12:32,856 INFO [train.py:1114] (3/4) Epoch 18, batch 450, loss[loss=0.185, simple_loss=0.2635, pruned_loss=0.03864, ctc_loss=0.07327, over 19604.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2617, pruned_loss=0.04076, ctc_loss=0.0762, over 3450789.67 frames. ], batch size: 55, lr: 8.40e-03, grad_scale: 32.0 +2024-08-27 04:12:33,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=228080.0, ans=0.125 +2024-08-27 04:12:54,581 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.00 vs. limit=6.0 +2024-08-27 04:12:59,759 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.395e+02 1.673e+02 2.305e+02 3.910e+02, threshold=3.347e+02, percent-clipped=3.0 +2024-08-27 04:13:04,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=228186.66666666666, ans=0.125 +2024-08-27 04:13:17,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=228240.0, ans=0.125 +2024-08-27 04:13:28,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=228293.33333333334, ans=0.0 +2024-08-27 04:13:32,848 INFO [train.py:1114] (3/4) Epoch 18, batch 500, loss[loss=0.1884, simple_loss=0.2734, pruned_loss=0.03634, ctc_loss=0.07673, over 19664.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.261, pruned_loss=0.04049, ctc_loss=0.07583, over 3545366.65 frames. ], batch size: 63, lr: 8.39e-03, grad_scale: 32.0 +2024-08-27 04:13:39,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=228346.66666666666, ans=0.025 +2024-08-27 04:13:43,223 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:13:46,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=228400.0, ans=0.0 +2024-08-27 04:13:50,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=228400.0, ans=0.025 +2024-08-27 04:13:50,875 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=9.80 vs. limit=22.5 +2024-08-27 04:14:00,797 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:14:17,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=228560.0, ans=0.2 +2024-08-27 04:14:20,876 INFO [train.py:1114] (3/4) Epoch 18, batch 550, loss[loss=0.2108, simple_loss=0.2826, pruned_loss=0.05077, ctc_loss=0.09371, over 19350.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2611, pruned_loss=0.0406, ctc_loss=0.07603, over 3608466.68 frames. ], batch size: 71, lr: 8.39e-03, grad_scale: 32.0 +2024-08-27 04:14:34,517 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.436e+02 1.681e+02 2.031e+02 3.505e+02, threshold=3.363e+02, percent-clipped=1.0 +2024-08-27 04:14:37,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=228666.66666666666, ans=0.0 +2024-08-27 04:14:41,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228720.0, ans=0.1 +2024-08-27 04:14:42,666 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.26 vs. limit=15.0 +2024-08-27 04:15:14,806 INFO [train.py:1114] (3/4) Epoch 18, batch 600, loss[loss=0.184, simple_loss=0.2573, pruned_loss=0.04001, ctc_loss=0.07669, over 19320.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.261, pruned_loss=0.04051, ctc_loss=0.07575, over 3665320.01 frames. ], batch size: 67, lr: 8.38e-03, grad_scale: 32.0 +2024-08-27 04:15:18,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=228880.0, ans=0.125 +2024-08-27 04:15:20,030 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-08-27 04:15:38,111 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.37 vs. limit=6.0 +2024-08-27 04:15:42,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=228986.66666666666, ans=0.125 +2024-08-27 04:15:54,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=229040.0, ans=0.0 +2024-08-27 04:16:55,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=229040.0, ans=0.125 +2024-08-27 04:17:06,969 INFO [train.py:1114] (3/4) Epoch 18, batch 650, loss[loss=0.1826, simple_loss=0.261, pruned_loss=0.03786, ctc_loss=0.07105, over 19775.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2607, pruned_loss=0.04039, ctc_loss=0.07544, over 3715876.17 frames. ], batch size: 54, lr: 8.38e-03, grad_scale: 32.0 +2024-08-27 04:17:12,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=229146.66666666666, ans=0.025 +2024-08-27 04:17:14,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=229146.66666666666, ans=0.0 +2024-08-27 04:17:20,100 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.567e+02 1.955e+02 2.726e+02 4.189e+02, threshold=3.909e+02, percent-clipped=6.0 +2024-08-27 04:17:22,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=229200.0, ans=0.125 +2024-08-27 04:17:22,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=229200.0, ans=0.025 +2024-08-27 04:17:30,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=229253.33333333334, ans=0.125 +2024-08-27 04:17:39,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=229306.66666666666, ans=0.0 +2024-08-27 04:17:45,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=229360.0, ans=0.2 +2024-08-27 04:18:45,827 INFO [train.py:1114] (3/4) Epoch 18, batch 700, loss[loss=0.1621, simple_loss=0.2413, pruned_loss=0.03023, ctc_loss=0.05624, over 19735.00 frames. ], tot_loss[loss=0.186, simple_loss=0.261, pruned_loss=0.04038, ctc_loss=0.07551, over 3746552.88 frames. ], batch size: 51, lr: 8.37e-03, grad_scale: 32.0 +2024-08-27 04:18:53,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=229413.33333333334, ans=0.125 +2024-08-27 04:18:56,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=229413.33333333334, ans=0.0 +2024-08-27 04:19:35,548 INFO [train.py:1114] (3/4) Epoch 18, batch 750, loss[loss=0.1727, simple_loss=0.256, pruned_loss=0.0321, ctc_loss=0.06333, over 19502.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2606, pruned_loss=0.04056, ctc_loss=0.07593, over 3773339.17 frames. ], batch size: 54, lr: 8.37e-03, grad_scale: 16.0 +2024-08-27 04:19:40,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=229680.0, ans=0.2 +2024-08-27 04:19:49,144 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.146e+02 1.399e+02 1.632e+02 2.193e+02 3.721e+02, threshold=3.263e+02, percent-clipped=0.0 +2024-08-27 04:19:54,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=229786.66666666666, ans=0.125 +2024-08-27 04:20:07,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=229840.0, ans=0.125 +2024-08-27 04:20:22,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=229893.33333333334, ans=0.0 +2024-08-27 04:20:25,653 INFO [train.py:1114] (3/4) Epoch 18, batch 800, loss[loss=0.1693, simple_loss=0.2389, pruned_loss=0.03623, ctc_loss=0.06795, over 19800.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2609, pruned_loss=0.04075, ctc_loss=0.07625, over 3794534.88 frames. ], batch size: 49, lr: 8.37e-03, grad_scale: 32.0 +2024-08-27 04:20:59,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=230000.0, ans=0.0 +2024-08-27 04:21:00,534 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.95 vs. limit=15.0 +2024-08-27 04:21:27,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=230160.0, ans=0.125 +2024-08-27 04:21:28,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=230160.0, ans=0.0 +2024-08-27 04:21:31,758 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.24 vs. limit=22.5 +2024-08-27 04:21:33,142 INFO [train.py:1114] (3/4) Epoch 18, batch 850, loss[loss=0.1913, simple_loss=0.2681, pruned_loss=0.04198, ctc_loss=0.07657, over 19618.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.26, pruned_loss=0.04046, ctc_loss=0.07557, over 3812874.07 frames. ], batch size: 59, lr: 8.36e-03, grad_scale: 32.0 +2024-08-27 04:21:36,248 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.31 vs. limit=6.0 +2024-08-27 04:21:39,927 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:21:57,966 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.249e+02 1.452e+02 1.736e+02 2.395e+02 3.551e+02, threshold=3.472e+02, percent-clipped=2.0 +2024-08-27 04:22:02,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230320.0, ans=0.1 +2024-08-27 04:22:09,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=230320.0, ans=0.09899494936611666 +2024-08-27 04:22:15,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230373.33333333334, ans=0.1 +2024-08-27 04:22:23,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=230426.66666666666, ans=0.125 +2024-08-27 04:22:24,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=230426.66666666666, ans=0.125 +2024-08-27 04:22:31,217 INFO [train.py:1114] (3/4) Epoch 18, batch 900, loss[loss=0.1597, simple_loss=0.2325, pruned_loss=0.0312, ctc_loss=0.0614, over 19824.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2605, pruned_loss=0.04104, ctc_loss=0.07642, over 3817990.98 frames. ], batch size: 49, lr: 8.36e-03, grad_scale: 32.0 +2024-08-27 04:22:35,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=230480.0, ans=0.125 +2024-08-27 04:22:53,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=230586.66666666666, ans=0.2 +2024-08-27 04:22:53,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=230586.66666666666, ans=0.0 +2024-08-27 04:23:05,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230640.0, ans=0.1 +2024-08-27 04:23:08,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=230693.33333333334, ans=0.125 +2024-08-27 04:23:17,864 INFO [train.py:1114] (3/4) Epoch 18, batch 950, loss[loss=0.1695, simple_loss=0.2442, pruned_loss=0.03395, ctc_loss=0.06738, over 19489.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2608, pruned_loss=0.04112, ctc_loss=0.0767, over 3820036.99 frames. ], batch size: 49, lr: 8.35e-03, grad_scale: 32.0 +2024-08-27 04:23:24,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=230746.66666666666, ans=0.0 +2024-08-27 04:23:36,283 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.122e+02 1.393e+02 1.674e+02 2.227e+02 4.492e+02, threshold=3.349e+02, percent-clipped=5.0 +2024-08-27 04:23:44,341 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.93 vs. limit=10.0 +2024-08-27 04:23:54,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=230853.33333333334, ans=0.2 +2024-08-27 04:23:55,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=230853.33333333334, ans=0.2 +2024-08-27 04:23:59,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=230906.66666666666, ans=0.0 +2024-08-27 04:24:15,462 INFO [train.py:1114] (3/4) Epoch 18, batch 1000, loss[loss=0.1822, simple_loss=0.2584, pruned_loss=0.0385, ctc_loss=0.07226, over 19864.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2619, pruned_loss=0.04136, ctc_loss=0.07706, over 3816317.46 frames. ], batch size: 52, lr: 8.35e-03, grad_scale: 32.0 +2024-08-27 04:24:30,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=231066.66666666666, ans=0.015 +2024-08-27 04:24:39,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=231120.0, ans=0.125 +2024-08-27 04:24:54,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=231173.33333333334, ans=0.125 +2024-08-27 04:25:03,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=231226.66666666666, ans=0.2 +2024-08-27 04:25:11,435 INFO [train.py:1114] (3/4) Epoch 18, batch 1050, loss[loss=0.1934, simple_loss=0.2737, pruned_loss=0.04002, ctc_loss=0.08242, over 19827.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2613, pruned_loss=0.0411, ctc_loss=0.07678, over 3823782.84 frames. ], batch size: 57, lr: 8.34e-03, grad_scale: 32.0 +2024-08-27 04:25:12,844 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.44 vs. limit=6.0 +2024-08-27 04:25:22,061 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.50 vs. limit=15.0 +2024-08-27 04:25:25,225 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.375e+02 1.549e+02 1.865e+02 3.480e+02, threshold=3.097e+02, percent-clipped=1.0 +2024-08-27 04:25:32,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=231386.66666666666, ans=0.0 +2024-08-27 04:25:38,363 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.08 vs. limit=15.0 +2024-08-27 04:25:41,169 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.90 vs. limit=15.0 +2024-08-27 04:25:53,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=231493.33333333334, ans=0.0 +2024-08-27 04:25:57,498 INFO [train.py:1114] (3/4) Epoch 18, batch 1100, loss[loss=0.1794, simple_loss=0.2567, pruned_loss=0.03726, ctc_loss=0.06875, over 19595.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2612, pruned_loss=0.04089, ctc_loss=0.07642, over 3831868.12 frames. ], batch size: 52, lr: 8.34e-03, grad_scale: 16.0 +2024-08-27 04:25:58,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=231546.66666666666, ans=0.0 +2024-08-27 04:26:48,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=231706.66666666666, ans=0.0 +2024-08-27 04:27:31,641 INFO [train.py:1114] (3/4) Epoch 18, batch 1150, loss[loss=0.1681, simple_loss=0.2521, pruned_loss=0.03039, ctc_loss=0.05824, over 19585.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2611, pruned_loss=0.04088, ctc_loss=0.07637, over 3828941.83 frames. ], batch size: 52, lr: 8.33e-03, grad_scale: 16.0 +2024-08-27 04:27:44,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=231866.66666666666, ans=0.125 +2024-08-27 04:27:50,644 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.153e+02 1.426e+02 1.640e+02 2.078e+02 3.185e+02, threshold=3.280e+02, percent-clipped=3.0 +2024-08-27 04:27:56,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=231866.66666666666, ans=0.04949747468305833 +2024-08-27 04:28:32,345 INFO [train.py:1114] (3/4) Epoch 18, batch 1200, loss[loss=0.1905, simple_loss=0.2714, pruned_loss=0.03985, ctc_loss=0.07464, over 19846.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2619, pruned_loss=0.04109, ctc_loss=0.0767, over 3824642.10 frames. ], batch size: 57, lr: 8.33e-03, grad_scale: 32.0 +2024-08-27 04:28:45,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=232133.33333333334, ans=0.0 +2024-08-27 04:28:56,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=232186.66666666666, ans=0.0 +2024-08-27 04:29:07,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=232240.0, ans=0.0 +2024-08-27 04:29:18,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=232293.33333333334, ans=0.0 +2024-08-27 04:29:19,798 INFO [train.py:1114] (3/4) Epoch 18, batch 1250, loss[loss=0.2069, simple_loss=0.2812, pruned_loss=0.04867, ctc_loss=0.08805, over 19519.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2624, pruned_loss=0.04114, ctc_loss=0.07666, over 3842414.52 frames. ], batch size: 61, lr: 8.32e-03, grad_scale: 32.0 +2024-08-27 04:29:34,595 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.452e+02 1.815e+02 2.295e+02 4.200e+02, threshold=3.630e+02, percent-clipped=5.0 +2024-08-27 04:29:51,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=232506.66666666666, ans=0.0 +2024-08-27 04:30:22,931 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.45 vs. limit=15.0 +2024-08-27 04:30:43,396 INFO [train.py:1114] (3/4) Epoch 18, batch 1300, loss[loss=0.1878, simple_loss=0.2601, pruned_loss=0.04166, ctc_loss=0.0805, over 18885.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2619, pruned_loss=0.04105, ctc_loss=0.07628, over 3845881.93 frames. ], batch size: 76, lr: 8.32e-03, grad_scale: 16.0 +2024-08-27 04:31:10,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=232720.0, ans=0.05 +2024-08-27 04:31:24,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=232826.66666666666, ans=0.125 +2024-08-27 04:31:28,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=232826.66666666666, ans=0.0 +2024-08-27 04:31:30,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=232826.66666666666, ans=0.0 +2024-08-27 04:31:33,098 INFO [train.py:1114] (3/4) Epoch 18, batch 1350, loss[loss=0.1851, simple_loss=0.2614, pruned_loss=0.03969, ctc_loss=0.07332, over 19779.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2613, pruned_loss=0.04063, ctc_loss=0.0756, over 3857636.80 frames. ], batch size: 54, lr: 8.31e-03, grad_scale: 16.0 +2024-08-27 04:31:43,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=232933.33333333334, ans=0.1 +2024-08-27 04:31:46,563 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.30 vs. limit=22.5 +2024-08-27 04:31:48,898 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.387e+02 1.655e+02 2.106e+02 4.022e+02, threshold=3.310e+02, percent-clipped=4.0 +2024-08-27 04:32:19,573 INFO [train.py:1114] (3/4) Epoch 18, batch 1400, loss[loss=0.1874, simple_loss=0.2522, pruned_loss=0.04523, ctc_loss=0.08053, over 19661.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2613, pruned_loss=0.04069, ctc_loss=0.07569, over 3864000.41 frames. ], batch size: 46, lr: 8.31e-03, grad_scale: 16.0 +2024-08-27 04:32:19,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=233146.66666666666, ans=0.0 +2024-08-27 04:32:20,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.54 vs. limit=6.0 +2024-08-27 04:32:32,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=233146.66666666666, ans=0.1 +2024-08-27 04:32:43,427 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:32:58,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=233253.33333333334, ans=0.125 +2024-08-27 04:33:20,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=233306.66666666666, ans=0.125 +2024-08-27 04:33:20,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=233306.66666666666, ans=0.125 +2024-08-27 04:33:23,524 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.52 vs. limit=22.5 +2024-08-27 04:33:36,311 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.63 vs. limit=15.0 +2024-08-27 04:33:40,132 INFO [train.py:1114] (3/4) Epoch 18, batch 1450, loss[loss=0.1955, simple_loss=0.2777, pruned_loss=0.04116, ctc_loss=0.07772, over 19631.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2623, pruned_loss=0.04099, ctc_loss=0.07615, over 3862140.30 frames. ], batch size: 63, lr: 8.30e-03, grad_scale: 16.0 +2024-08-27 04:34:01,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.44 vs. limit=15.0 +2024-08-27 04:34:13,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=233466.66666666666, ans=0.125 +2024-08-27 04:34:14,408 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.14 vs. limit=15.0 +2024-08-27 04:34:15,087 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.17 vs. limit=15.0 +2024-08-27 04:34:21,009 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.457e+02 1.713e+02 1.981e+02 3.848e+02, threshold=3.426e+02, percent-clipped=1.0 +2024-08-27 04:34:34,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=233520.0, ans=0.125 +2024-08-27 04:34:47,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=233573.33333333334, ans=0.0 +2024-08-27 04:35:28,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=233626.66666666666, ans=0.025 +2024-08-27 04:35:52,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=233626.66666666666, ans=0.125 +2024-08-27 04:36:59,902 INFO [train.py:1114] (3/4) Epoch 18, batch 1500, loss[loss=0.1728, simple_loss=0.2476, pruned_loss=0.03557, ctc_loss=0.06729, over 19582.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2623, pruned_loss=0.04096, ctc_loss=0.07613, over 3862528.34 frames. ], batch size: 57, lr: 8.30e-03, grad_scale: 16.0 +2024-08-27 04:37:34,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=233680.0, ans=0.025 +2024-08-27 04:37:38,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=233733.33333333334, ans=0.2 +2024-08-27 04:38:49,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=233893.33333333334, ans=0.04949747468305833 +2024-08-27 04:39:00,370 INFO [train.py:1114] (3/4) Epoch 18, batch 1550, loss[loss=0.2135, simple_loss=0.2854, pruned_loss=0.05159, ctc_loss=0.09632, over 19583.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2625, pruned_loss=0.04117, ctc_loss=0.07683, over 3847134.59 frames. ], batch size: 60, lr: 8.30e-03, grad_scale: 16.0 +2024-08-27 04:39:23,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=233946.66666666666, ans=0.025 +2024-08-27 04:39:35,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=234000.0, ans=0.125 +2024-08-27 04:39:35,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=234000.0, ans=0.125 +2024-08-27 04:39:39,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=234000.0, ans=0.125 +2024-08-27 04:39:39,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=234000.0, ans=0.0 +2024-08-27 04:39:51,775 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.454e+02 1.713e+02 2.109e+02 3.815e+02, threshold=3.426e+02, percent-clipped=1.0 +2024-08-27 04:39:55,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234053.33333333334, ans=0.1 +2024-08-27 04:40:10,046 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=9.55 vs. limit=15.0 +2024-08-27 04:40:25,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=234160.0, ans=0.025 +2024-08-27 04:40:35,462 INFO [train.py:1114] (3/4) Epoch 18, batch 1600, loss[loss=0.196, simple_loss=0.2728, pruned_loss=0.04364, ctc_loss=0.07978, over 19822.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2621, pruned_loss=0.04109, ctc_loss=0.07675, over 3835614.43 frames. ], batch size: 57, lr: 8.29e-03, grad_scale: 32.0 +2024-08-27 04:40:36,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=234213.33333333334, ans=0.125 +2024-08-27 04:40:36,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=234213.33333333334, ans=0.0 +2024-08-27 04:40:45,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=234213.33333333334, ans=0.125 +2024-08-27 04:40:49,753 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.69 vs. limit=15.0 +2024-08-27 04:40:58,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=234266.66666666666, ans=0.025 +2024-08-27 04:40:59,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=234320.0, ans=0.125 +2024-08-27 04:41:10,013 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.63 vs. limit=15.0 +2024-08-27 04:41:31,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=234373.33333333334, ans=0.0 +2024-08-27 04:41:40,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=234426.66666666666, ans=0.125 +2024-08-27 04:41:42,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=234426.66666666666, ans=0.0 +2024-08-27 04:41:53,473 INFO [train.py:1114] (3/4) Epoch 18, batch 1650, loss[loss=0.1938, simple_loss=0.2718, pruned_loss=0.04199, ctc_loss=0.07948, over 19641.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2618, pruned_loss=0.04089, ctc_loss=0.07637, over 3831937.21 frames. ], batch size: 59, lr: 8.29e-03, grad_scale: 32.0 +2024-08-27 04:41:59,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=234480.0, ans=0.125 +2024-08-27 04:42:14,269 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.94 vs. limit=15.0 +2024-08-27 04:42:16,290 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.559e+02 1.894e+02 2.296e+02 3.896e+02, threshold=3.788e+02, percent-clipped=3.0 +2024-08-27 04:42:23,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=234586.66666666666, ans=0.125 +2024-08-27 04:42:24,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=234586.66666666666, ans=0.1 +2024-08-27 04:42:48,621 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.68 vs. limit=10.0 +2024-08-27 04:42:53,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=234693.33333333334, ans=0.125 +2024-08-27 04:43:00,731 INFO [train.py:1114] (3/4) Epoch 18, batch 1700, loss[loss=0.1508, simple_loss=0.2233, pruned_loss=0.0283, ctc_loss=0.05421, over 19662.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2613, pruned_loss=0.04066, ctc_loss=0.07597, over 3845944.45 frames. ], batch size: 46, lr: 8.28e-03, grad_scale: 32.0 +2024-08-27 04:43:01,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=234746.66666666666, ans=0.125 +2024-08-27 04:43:13,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=234800.0, ans=0.125 +2024-08-27 04:43:17,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=234800.0, ans=0.04949747468305833 +2024-08-27 04:43:29,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=234853.33333333334, ans=0.1 +2024-08-27 04:43:45,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=234960.0, ans=0.05 +2024-08-27 04:43:48,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=234960.0, ans=0.025 +2024-08-27 04:43:59,945 INFO [train.py:1114] (3/4) Epoch 18, batch 1750, loss[loss=0.1754, simple_loss=0.2464, pruned_loss=0.03836, ctc_loss=0.06898, over 19645.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2611, pruned_loss=0.04062, ctc_loss=0.07592, over 3850764.40 frames. ], batch size: 45, lr: 8.28e-03, grad_scale: 32.0 +2024-08-27 04:44:01,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=235013.33333333334, ans=0.0 +2024-08-27 04:44:13,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=235066.66666666666, ans=0.0 +2024-08-27 04:44:16,975 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.479e+02 1.670e+02 2.161e+02 3.908e+02, threshold=3.340e+02, percent-clipped=1.0 +2024-08-27 04:44:17,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=235066.66666666666, ans=0.1 +2024-08-27 04:44:19,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=235120.0, ans=0.05 +2024-08-27 04:44:19,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=235120.0, ans=0.125 +2024-08-27 04:44:27,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=235173.33333333334, ans=0.125 +2024-08-27 04:44:32,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=235173.33333333334, ans=0.125 +2024-08-27 04:44:39,699 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.90 vs. limit=15.0 +2024-08-27 04:44:48,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=235226.66666666666, ans=0.125 +2024-08-27 04:44:50,518 INFO [train.py:1114] (3/4) Epoch 18, batch 1800, loss[loss=0.1861, simple_loss=0.2661, pruned_loss=0.0386, ctc_loss=0.07242, over 19616.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2615, pruned_loss=0.04064, ctc_loss=0.07602, over 3851681.83 frames. ], batch size: 55, lr: 8.27e-03, grad_scale: 16.0 +2024-08-27 04:44:55,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=235280.0, ans=0.07 +2024-08-27 04:44:59,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=235333.33333333334, ans=0.05 +2024-08-27 04:45:05,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=235333.33333333334, ans=0.1 +2024-08-27 04:45:27,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=235440.0, ans=0.0 +2024-08-27 04:45:30,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=235440.0, ans=0.125 +2024-08-27 04:45:30,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=235440.0, ans=0.2 +2024-08-27 04:45:36,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=235440.0, ans=0.2 +2024-08-27 04:45:43,709 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.78 vs. limit=15.0 +2024-08-27 04:45:47,315 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.66 vs. limit=15.0 +2024-08-27 04:45:48,625 INFO [train.py:1114] (3/4) Epoch 18, batch 1850, loss[loss=0.1964, simple_loss=0.273, pruned_loss=0.04336, ctc_loss=0.08274, over 19615.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2611, pruned_loss=0.04065, ctc_loss=0.07595, over 3855891.58 frames. ], batch size: 57, lr: 8.27e-03, grad_scale: 8.0 +2024-08-27 04:45:51,693 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.38 vs. limit=15.0 +2024-08-27 04:46:36,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=235600.0, ans=0.0 +2024-08-27 04:46:38,528 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.122e+02 1.500e+02 1.800e+02 2.247e+02 4.177e+02, threshold=3.601e+02, percent-clipped=3.0 +2024-08-27 04:46:41,380 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:46:48,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=235706.66666666666, ans=0.0 +2024-08-27 04:46:54,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.98 vs. limit=15.0 +2024-08-27 04:46:56,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=235706.66666666666, ans=0.125 +2024-08-27 04:46:56,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=235706.66666666666, ans=0.0 +2024-08-27 04:46:58,913 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.49 vs. limit=22.5 +2024-08-27 04:47:06,327 INFO [train.py:1114] (3/4) Epoch 18, batch 1900, loss[loss=0.1871, simple_loss=0.2722, pruned_loss=0.03749, ctc_loss=0.06786, over 19669.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2614, pruned_loss=0.04054, ctc_loss=0.07579, over 3860883.18 frames. ], batch size: 59, lr: 8.26e-03, grad_scale: 8.0 +2024-08-27 04:47:25,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=235920.0, ans=0.025 +2024-08-27 04:47:39,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=235973.33333333334, ans=0.125 +2024-08-27 04:47:50,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=236026.66666666666, ans=0.0 +2024-08-27 04:47:53,597 INFO [train.py:1114] (3/4) Epoch 18, batch 1950, loss[loss=0.1682, simple_loss=0.2478, pruned_loss=0.03231, ctc_loss=0.06005, over 19593.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2622, pruned_loss=0.04068, ctc_loss=0.07614, over 3869736.26 frames. ], batch size: 52, lr: 8.26e-03, grad_scale: 8.0 +2024-08-27 04:48:12,696 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.245e+02 1.481e+02 1.697e+02 2.159e+02 5.555e+02, threshold=3.394e+02, percent-clipped=1.0 +2024-08-27 04:48:13,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=236186.66666666666, ans=0.125 +2024-08-27 04:48:19,317 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.59 vs. limit=22.5 +2024-08-27 04:48:35,273 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.18 vs. limit=15.0 +2024-08-27 04:48:50,307 INFO [train.py:1114] (3/4) Epoch 18, batch 2000, loss[loss=0.1648, simple_loss=0.2331, pruned_loss=0.03506, ctc_loss=0.06578, over 19652.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2625, pruned_loss=0.04099, ctc_loss=0.07666, over 3853632.70 frames. ], batch size: 45, lr: 8.25e-03, grad_scale: 8.0 +2024-08-27 04:49:11,559 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.47 vs. limit=10.0 +2024-08-27 04:49:48,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236453.33333333334, ans=0.1 +2024-08-27 04:49:49,139 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.55 vs. limit=15.0 +2024-08-27 04:49:57,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=236506.66666666666, ans=0.0 +2024-08-27 04:49:59,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=236506.66666666666, ans=0.05 +2024-08-27 04:50:08,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=236560.0, ans=0.0 +2024-08-27 04:50:16,440 INFO [train.py:1114] (3/4) Epoch 18, batch 2050, loss[loss=0.1524, simple_loss=0.2284, pruned_loss=0.02812, ctc_loss=0.05007, over 19737.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2615, pruned_loss=0.04082, ctc_loss=0.07624, over 3850559.28 frames. ], batch size: 47, lr: 8.25e-03, grad_scale: 8.0 +2024-08-27 04:50:40,751 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.69 vs. limit=15.0 +2024-08-27 04:50:47,344 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.468e+02 1.842e+02 2.423e+02 4.039e+02, threshold=3.684e+02, percent-clipped=4.0 +2024-08-27 04:51:09,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=236826.66666666666, ans=0.04949747468305833 +2024-08-27 04:51:13,564 INFO [train.py:1114] (3/4) Epoch 18, batch 2100, loss[loss=0.1858, simple_loss=0.2628, pruned_loss=0.03957, ctc_loss=0.07413, over 19756.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2613, pruned_loss=0.04049, ctc_loss=0.07569, over 3857749.41 frames. ], batch size: 54, lr: 8.25e-03, grad_scale: 8.0 +2024-08-27 04:51:16,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=236880.0, ans=0.125 +2024-08-27 04:51:59,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=237040.0, ans=0.125 +2024-08-27 04:52:00,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=237040.0, ans=0.0 +2024-08-27 04:52:13,281 INFO [train.py:1114] (3/4) Epoch 18, batch 2150, loss[loss=0.1867, simple_loss=0.2598, pruned_loss=0.04146, ctc_loss=0.0768, over 19829.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2607, pruned_loss=0.04035, ctc_loss=0.07539, over 3869587.38 frames. ], batch size: 52, lr: 8.24e-03, grad_scale: 8.0 +2024-08-27 04:52:16,939 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.91 vs. limit=15.0 +2024-08-27 04:52:22,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=237200.0, ans=0.125 +2024-08-27 04:52:26,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=237200.0, ans=0.125 +2024-08-27 04:52:28,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=237200.0, ans=0.1 +2024-08-27 04:52:29,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.22 vs. limit=15.0 +2024-08-27 04:52:31,105 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.418e+02 1.667e+02 2.145e+02 4.483e+02, threshold=3.333e+02, percent-clipped=3.0 +2024-08-27 04:52:36,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=237253.33333333334, ans=0.125 +2024-08-27 04:52:57,218 INFO [train.py:1114] (3/4) Epoch 18, batch 2200, loss[loss=0.1964, simple_loss=0.2656, pruned_loss=0.04615, ctc_loss=0.08727, over 19580.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2605, pruned_loss=0.04019, ctc_loss=0.07503, over 3867603.63 frames. ], batch size: 57, lr: 8.24e-03, grad_scale: 8.0 +2024-08-27 04:53:04,032 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.13 vs. limit=15.0 +2024-08-27 04:53:22,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=237520.0, ans=0.1 +2024-08-27 04:53:34,111 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.15 vs. limit=15.0 +2024-08-27 04:53:45,573 INFO [train.py:1114] (3/4) Epoch 18, batch 2250, loss[loss=0.1828, simple_loss=0.2697, pruned_loss=0.03499, ctc_loss=0.06511, over 19620.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2607, pruned_loss=0.04039, ctc_loss=0.07539, over 3867278.58 frames. ], batch size: 55, lr: 8.23e-03, grad_scale: 8.0 +2024-08-27 04:53:54,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=237680.0, ans=0.125 +2024-08-27 04:57:16,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=237733.33333333334, ans=0.125 +2024-08-27 04:58:06,572 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 04:58:07,208 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.185e+02 1.445e+02 1.673e+02 2.181e+02 3.635e+02, threshold=3.347e+02, percent-clipped=1.0 +2024-08-27 04:59:13,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=237840.0, ans=0.125 +2024-08-27 04:59:39,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=237840.0, ans=0.125 +2024-08-27 05:00:07,294 INFO [train.py:1114] (3/4) Epoch 18, batch 2300, loss[loss=0.1713, simple_loss=0.2469, pruned_loss=0.03476, ctc_loss=0.06564, over 19511.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.26, pruned_loss=0.04051, ctc_loss=0.07557, over 3861205.89 frames. ], batch size: 49, lr: 8.23e-03, grad_scale: 8.0 +2024-08-27 05:01:43,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=237946.66666666666, ans=0.025 +2024-08-27 05:02:46,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238000.0, ans=0.1 +2024-08-27 05:02:47,321 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.33 vs. limit=15.0 +2024-08-27 05:04:40,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=238000.0, ans=0.0 +2024-08-27 05:05:53,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=238106.66666666666, ans=0.125 +2024-08-27 05:05:57,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=238160.0, ans=0.125 +2024-08-27 05:06:14,076 INFO [train.py:1114] (3/4) Epoch 18, batch 2350, loss[loss=0.1915, simple_loss=0.2669, pruned_loss=0.04232, ctc_loss=0.07833, over 19631.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2602, pruned_loss=0.04061, ctc_loss=0.07578, over 3863894.69 frames. ], batch size: 63, lr: 8.22e-03, grad_scale: 8.0 +2024-08-27 05:08:27,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=238266.66666666666, ans=0.125 +2024-08-27 05:09:45,140 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.379e+02 1.605e+02 2.102e+02 3.614e+02, threshold=3.209e+02, percent-clipped=2.0 +2024-08-27 05:09:56,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=238320.0, ans=0.025 +2024-08-27 05:10:15,567 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.23 vs. limit=15.0 +2024-08-27 05:10:17,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=238373.33333333334, ans=0.0 +2024-08-27 05:11:54,345 INFO [train.py:1114] (3/4) Epoch 18, batch 2400, loss[loss=0.2142, simple_loss=0.282, pruned_loss=0.05338, ctc_loss=0.0993, over 19319.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2625, pruned_loss=0.04148, ctc_loss=0.07732, over 3857630.46 frames. ], batch size: 71, lr: 8.22e-03, grad_scale: 16.0 +2024-08-27 05:13:43,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.whiten.whitening_limit, batch_count=238586.66666666666, ans=12.0 +2024-08-27 05:14:36,110 INFO [train.py:1114] (3/4) Epoch 18, batch 2450, loss[loss=0.2396, simple_loss=0.2839, pruned_loss=0.0699, ctc_loss=0.1388, over 13216.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2658, pruned_loss=0.04364, ctc_loss=0.08169, over 3730768.58 frames. ], batch size: 140, lr: 8.21e-03, grad_scale: 16.0 +2024-08-27 05:14:36,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=238746.66666666666, ans=0.0 +2024-08-27 05:14:42,029 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=8.94 vs. limit=22.5 +2024-08-27 05:14:49,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=238800.0, ans=0.125 +2024-08-27 05:15:19,829 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.338e+02 1.631e+02 1.872e+02 2.220e+02 3.951e+02, threshold=3.743e+02, percent-clipped=5.0 +2024-08-27 05:15:26,094 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.22 vs. limit=15.0 +2024-08-27 05:15:29,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238853.33333333334, ans=0.1 +2024-08-27 05:15:32,444 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.63 vs. limit=15.0 +2024-08-27 05:15:41,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=238906.66666666666, ans=0.035 +2024-08-27 05:19:02,754 INFO [train.py:1114] (3/4) Epoch 19, batch 0, loss[loss=0.1935, simple_loss=0.2602, pruned_loss=0.0466, ctc_loss=0.08413, over 19809.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2602, pruned_loss=0.0466, ctc_loss=0.08413, over 19809.00 frames. ], batch size: 49, lr: 7.99e-03, grad_scale: 32.0 +2024-08-27 05:19:02,754 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-27 05:20:00,132 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.5528, 3.7938, 4.4552, 4.4102], device='cuda:3') +2024-08-27 05:20:05,936 INFO [train.py:1146] (3/4) Epoch 19, validation: loss=0.1709, simple_loss=0.2636, pruned_loss=0.02933, ctc_loss=0.04896, over 944034.00 frames. +2024-08-27 05:20:05,937 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12941MB +2024-08-27 05:20:07,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=238954.66666666666, ans=0.125 +2024-08-27 05:20:09,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=238954.66666666666, ans=0.025 +2024-08-27 05:20:15,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239008.0, ans=0.1 +2024-08-27 05:20:18,220 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.60 vs. limit=15.0 +2024-08-27 05:20:22,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=239008.0, ans=0.0 +2024-08-27 05:21:32,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=239114.66666666666, ans=0.125 +2024-08-27 05:22:51,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=239168.0, ans=0.0 +2024-08-27 05:22:53,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=239168.0, ans=0.0 +2024-08-27 05:22:55,127 INFO [train.py:1114] (3/4) Epoch 19, batch 50, loss[loss=0.1637, simple_loss=0.2399, pruned_loss=0.03134, ctc_loss=0.06177, over 19714.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2636, pruned_loss=0.04202, ctc_loss=0.07865, over 844167.97 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-27 05:23:18,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=239221.33333333334, ans=0.0 +2024-08-27 05:23:19,038 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:23:19,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.39 vs. limit=22.5 +2024-08-27 05:23:21,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=239221.33333333334, ans=0.0 +2024-08-27 05:23:21,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=239221.33333333334, ans=0.0 +2024-08-27 05:23:58,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=239381.33333333334, ans=0.0 +2024-08-27 05:23:59,553 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.492e+02 1.734e+02 2.135e+02 3.431e+02, threshold=3.468e+02, percent-clipped=0.0 +2024-08-27 05:24:18,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2.whitening_limit, batch_count=239434.66666666666, ans=15.0 +2024-08-27 05:24:19,419 INFO [train.py:1114] (3/4) Epoch 19, batch 100, loss[loss=0.1592, simple_loss=0.237, pruned_loss=0.02957, ctc_loss=0.05582, over 19723.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2652, pruned_loss=0.04201, ctc_loss=0.07832, over 1498340.35 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 32.0 +2024-08-27 05:24:26,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239488.0, ans=0.1 +2024-08-27 05:24:27,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=239541.33333333334, ans=0.125 +2024-08-27 05:24:28,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=239541.33333333334, ans=0.2 +2024-08-27 05:24:38,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=239541.33333333334, ans=0.125 +2024-08-27 05:25:45,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=239648.0, ans=0.125 +2024-08-27 05:25:46,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=239648.0, ans=0.125 +2024-08-27 05:25:48,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=239648.0, ans=0.125 +2024-08-27 05:25:58,510 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.83 vs. limit=15.0 +2024-08-27 05:26:05,113 INFO [train.py:1114] (3/4) Epoch 19, batch 150, loss[loss=0.1776, simple_loss=0.2465, pruned_loss=0.0395, ctc_loss=0.07442, over 19736.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2625, pruned_loss=0.04109, ctc_loss=0.07667, over 2027311.34 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-27 05:27:17,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=239861.33333333334, ans=0.2 +2024-08-27 05:27:19,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=239914.66666666666, ans=0.125 +2024-08-27 05:27:20,684 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.059e+02 1.500e+02 1.966e+02 2.497e+02 3.604e+02, threshold=3.932e+02, percent-clipped=3.0 +2024-08-27 05:27:46,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=239968.0, ans=0.125 +2024-08-27 05:27:57,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=239968.0, ans=0.125 +2024-08-27 05:28:10,136 INFO [train.py:1114] (3/4) Epoch 19, batch 200, loss[loss=0.1991, simple_loss=0.2733, pruned_loss=0.04522, ctc_loss=0.08585, over 18332.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2611, pruned_loss=0.04042, ctc_loss=0.0753, over 2435699.92 frames. ], batch size: 85, lr: 7.97e-03, grad_scale: 32.0 +2024-08-27 05:28:13,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=240021.33333333334, ans=0.0 +2024-08-27 05:28:13,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240021.33333333334, ans=0.1 +2024-08-27 05:28:27,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240128.0, ans=0.1 +2024-08-27 05:28:27,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=240128.0, ans=0.125 +2024-08-27 05:29:10,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=240128.0, ans=0.5 +2024-08-27 05:29:11,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=240128.0, ans=0.125 +2024-08-27 05:29:12,969 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.76 vs. limit=12.0 +2024-08-27 05:29:31,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=240234.66666666666, ans=0.0 +2024-08-27 05:29:34,215 INFO [train.py:1114] (3/4) Epoch 19, batch 250, loss[loss=0.1976, simple_loss=0.2713, pruned_loss=0.04543, ctc_loss=0.08247, over 19409.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2604, pruned_loss=0.03985, ctc_loss=0.07409, over 2756001.92 frames. ], batch size: 67, lr: 7.97e-03, grad_scale: 32.0 +2024-08-27 05:30:02,564 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.446e+02 1.683e+02 2.499e+02 4.574e+02, threshold=3.367e+02, percent-clipped=7.0 +2024-08-27 05:30:09,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=240448.0, ans=0.125 +2024-08-27 05:30:17,014 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.68 vs. limit=22.5 +2024-08-27 05:30:22,807 INFO [train.py:1114] (3/4) Epoch 19, batch 300, loss[loss=0.2045, simple_loss=0.2831, pruned_loss=0.04607, ctc_loss=0.08444, over 19526.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2598, pruned_loss=0.03967, ctc_loss=0.07381, over 3001126.27 frames. ], batch size: 61, lr: 7.96e-03, grad_scale: 32.0 +2024-08-27 05:30:28,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=240554.66666666666, ans=0.0 +2024-08-27 05:30:36,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=240608.0, ans=0.0 +2024-08-27 05:30:46,038 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.44 vs. limit=22.5 +2024-08-27 05:30:55,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=240714.66666666666, ans=0.125 +2024-08-27 05:31:00,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=240768.0, ans=0.125 +2024-08-27 05:31:07,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=240768.0, ans=0.09899494936611666 +2024-08-27 05:31:09,993 INFO [train.py:1114] (3/4) Epoch 19, batch 350, loss[loss=0.1564, simple_loss=0.2308, pruned_loss=0.02961, ctc_loss=0.05672, over 19758.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2606, pruned_loss=0.03987, ctc_loss=0.07435, over 3191307.89 frames. ], batch size: 48, lr: 7.96e-03, grad_scale: 32.0 +2024-08-27 05:31:25,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=240874.66666666666, ans=0.05 +2024-08-27 05:31:35,828 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.60 vs. limit=15.0 +2024-08-27 05:31:39,930 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.453e+02 1.753e+02 2.405e+02 3.677e+02, threshold=3.507e+02, percent-clipped=2.0 +2024-08-27 05:31:41,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=240981.33333333334, ans=0.2 +2024-08-27 05:31:41,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=240981.33333333334, ans=0.2 +2024-08-27 05:31:41,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=240981.33333333334, ans=0.125 +2024-08-27 05:31:51,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=241034.66666666666, ans=0.0 +2024-08-27 05:31:57,289 INFO [train.py:1114] (3/4) Epoch 19, batch 400, loss[loss=0.1845, simple_loss=0.2648, pruned_loss=0.03817, ctc_loss=0.06979, over 19488.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2601, pruned_loss=0.03973, ctc_loss=0.07425, over 3343080.18 frames. ], batch size: 54, lr: 7.95e-03, grad_scale: 32.0 +2024-08-27 05:32:34,034 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.89 vs. limit=5.0 +2024-08-27 05:32:34,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=241194.66666666666, ans=0.125 +2024-08-27 05:32:44,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=241248.0, ans=0.0 +2024-08-27 05:32:53,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=241301.33333333334, ans=0.2 +2024-08-27 05:32:55,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=241301.33333333334, ans=0.0 +2024-08-27 05:33:00,039 INFO [train.py:1114] (3/4) Epoch 19, batch 450, loss[loss=0.1993, simple_loss=0.2799, pruned_loss=0.043, ctc_loss=0.08166, over 19594.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2601, pruned_loss=0.03988, ctc_loss=0.0744, over 3450659.63 frames. ], batch size: 55, lr: 7.95e-03, grad_scale: 32.0 +2024-08-27 05:33:10,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=241354.66666666666, ans=0.0 +2024-08-27 05:33:14,761 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:33:17,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=241408.0, ans=0.0 +2024-08-27 05:33:18,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=241408.0, ans=0.125 +2024-08-27 05:33:19,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=241408.0, ans=0.1 +2024-08-27 05:33:22,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=241461.33333333334, ans=0.125 +2024-08-27 05:33:30,917 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.397e+02 1.631e+02 2.046e+02 3.175e+02, threshold=3.262e+02, percent-clipped=0.0 +2024-08-27 05:33:49,276 INFO [train.py:1114] (3/4) Epoch 19, batch 500, loss[loss=0.1865, simple_loss=0.2623, pruned_loss=0.04074, ctc_loss=0.07323, over 19678.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2588, pruned_loss=0.03927, ctc_loss=0.07328, over 3546334.18 frames. ], batch size: 63, lr: 7.95e-03, grad_scale: 32.0 +2024-08-27 05:34:10,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=241728.0, ans=15.0 +2024-08-27 05:34:10,062 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.08 vs. limit=15.0 +2024-08-27 05:34:13,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=241728.0, ans=0.125 +2024-08-27 05:34:17,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=241728.0, ans=0.125 +2024-08-27 05:34:21,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=241781.33333333334, ans=0.025 +2024-08-27 05:34:28,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241834.66666666666, ans=0.1 +2024-08-27 05:34:34,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=241834.66666666666, ans=0.0 +2024-08-27 05:34:34,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=241834.66666666666, ans=0.1 +2024-08-27 05:34:39,014 INFO [train.py:1114] (3/4) Epoch 19, batch 550, loss[loss=0.2115, simple_loss=0.2822, pruned_loss=0.05143, ctc_loss=0.09484, over 19290.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2588, pruned_loss=0.03949, ctc_loss=0.07369, over 3608127.09 frames. ], batch size: 71, lr: 7.94e-03, grad_scale: 32.0 +2024-08-27 05:34:45,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=241888.0, ans=0.025 +2024-08-27 05:35:05,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=241994.66666666666, ans=0.2 +2024-08-27 05:35:08,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=242048.0, ans=0.125 +2024-08-27 05:35:09,349 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.385e+02 1.667e+02 1.980e+02 3.512e+02, threshold=3.334e+02, percent-clipped=2.0 +2024-08-27 05:35:20,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=242101.33333333334, ans=0.125 +2024-08-27 05:35:22,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=242101.33333333334, ans=0.0 +2024-08-27 05:35:27,101 INFO [train.py:1114] (3/4) Epoch 19, batch 600, loss[loss=0.2057, simple_loss=0.2848, pruned_loss=0.04651, ctc_loss=0.08411, over 19361.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2598, pruned_loss=0.03969, ctc_loss=0.07401, over 3665674.33 frames. ], batch size: 67, lr: 7.94e-03, grad_scale: 32.0 +2024-08-27 05:35:31,178 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:35:43,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=242208.0, ans=0.0 +2024-08-27 05:35:43,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242208.0, ans=0.1 +2024-08-27 05:36:23,072 INFO [train.py:1114] (3/4) Epoch 19, batch 650, loss[loss=0.1785, simple_loss=0.2565, pruned_loss=0.03652, ctc_loss=0.06866, over 19775.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2591, pruned_loss=0.03936, ctc_loss=0.07358, over 3716489.95 frames. ], batch size: 54, lr: 7.93e-03, grad_scale: 32.0 +2024-08-27 05:36:53,247 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.470e+02 1.907e+02 2.471e+02 4.129e+02, threshold=3.814e+02, percent-clipped=9.0 +2024-08-27 05:36:59,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242581.33333333334, ans=0.1 +2024-08-27 05:37:05,012 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.08 vs. limit=15.0 +2024-08-27 05:37:05,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=242634.66666666666, ans=0.025 +2024-08-27 05:37:29,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=242634.66666666666, ans=0.125 +2024-08-27 05:37:33,323 INFO [train.py:1114] (3/4) Epoch 19, batch 700, loss[loss=0.1693, simple_loss=0.2461, pruned_loss=0.03357, ctc_loss=0.06351, over 19733.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2597, pruned_loss=0.03976, ctc_loss=0.07417, over 3749037.36 frames. ], batch size: 51, lr: 7.93e-03, grad_scale: 32.0 +2024-08-27 05:37:35,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=242688.0, ans=0.05 +2024-08-27 05:37:49,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=242741.33333333334, ans=0.2 +2024-08-27 05:37:59,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=242794.66666666666, ans=0.0 +2024-08-27 05:38:23,011 INFO [train.py:1114] (3/4) Epoch 19, batch 750, loss[loss=0.1747, simple_loss=0.2563, pruned_loss=0.03375, ctc_loss=0.06425, over 19495.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2593, pruned_loss=0.03977, ctc_loss=0.07428, over 3775690.54 frames. ], batch size: 54, lr: 7.92e-03, grad_scale: 32.0 +2024-08-27 05:38:51,402 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.489e+02 1.823e+02 2.314e+02 3.772e+02, threshold=3.647e+02, percent-clipped=0.0 +2024-08-27 05:38:51,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=243114.66666666666, ans=0.0 +2024-08-27 05:39:11,777 INFO [train.py:1114] (3/4) Epoch 19, batch 800, loss[loss=0.1726, simple_loss=0.2444, pruned_loss=0.03696, ctc_loss=0.06754, over 19406.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2594, pruned_loss=0.03992, ctc_loss=0.07445, over 3797056.26 frames. ], batch size: 48, lr: 7.92e-03, grad_scale: 32.0 +2024-08-27 05:39:19,300 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:39:19,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.37 vs. limit=15.0 +2024-08-27 05:39:24,135 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.48 vs. limit=15.0 +2024-08-27 05:39:25,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=243274.66666666666, ans=0.125 +2024-08-27 05:39:39,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=243381.33333333334, ans=0.125 +2024-08-27 05:39:44,961 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.11 vs. limit=15.0 +2024-08-27 05:39:47,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=243381.33333333334, ans=0.125 +2024-08-27 05:39:58,046 INFO [train.py:1114] (3/4) Epoch 19, batch 850, loss[loss=0.1904, simple_loss=0.2733, pruned_loss=0.0395, ctc_loss=0.07134, over 19632.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2589, pruned_loss=0.03957, ctc_loss=0.07373, over 3816610.56 frames. ], batch size: 59, lr: 7.92e-03, grad_scale: 32.0 +2024-08-27 05:40:03,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=243488.0, ans=0.125 +2024-08-27 05:40:05,139 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.09 vs. limit=22.5 +2024-08-27 05:40:08,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.50 vs. limit=15.0 +2024-08-27 05:40:28,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243648.0, ans=0.1 +2024-08-27 05:40:28,403 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.81 vs. limit=22.5 +2024-08-27 05:40:28,703 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.388e+02 1.609e+02 2.074e+02 4.897e+02, threshold=3.218e+02, percent-clipped=1.0 +2024-08-27 05:40:30,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=243648.0, ans=0.0 +2024-08-27 05:40:33,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=243648.0, ans=0.0 +2024-08-27 05:40:40,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=243701.33333333334, ans=0.0 +2024-08-27 05:40:51,594 INFO [train.py:1114] (3/4) Epoch 19, batch 900, loss[loss=0.1665, simple_loss=0.2406, pruned_loss=0.03391, ctc_loss=0.06157, over 19423.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2593, pruned_loss=0.03998, ctc_loss=0.07436, over 3820995.48 frames. ], batch size: 48, lr: 7.91e-03, grad_scale: 32.0 +2024-08-27 05:41:48,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=243861.33333333334, ans=0.0 +2024-08-27 05:44:11,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=243914.66666666666, ans=0.0 +2024-08-27 05:45:22,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=243914.66666666666, ans=15.0 +2024-08-27 05:45:28,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=243914.66666666666, ans=0.125 +2024-08-27 05:45:36,686 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 05:46:16,464 INFO [train.py:1114] (3/4) Epoch 19, batch 950, loss[loss=0.1791, simple_loss=0.2505, pruned_loss=0.03935, ctc_loss=0.07233, over 19517.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2596, pruned_loss=0.04006, ctc_loss=0.07467, over 3820732.71 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-27 05:46:27,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=244021.33333333334, ans=0.09899494936611666 +2024-08-27 05:47:00,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=244128.0, ans=0.0 +2024-08-27 05:47:02,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244128.0, ans=0.1 +2024-08-27 05:47:05,273 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.185e+02 1.465e+02 1.729e+02 2.037e+02 3.385e+02, threshold=3.459e+02, percent-clipped=1.0 +2024-08-27 05:47:23,023 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.01 vs. limit=22.5 +2024-08-27 05:47:24,471 INFO [train.py:1114] (3/4) Epoch 19, batch 1000, loss[loss=0.1681, simple_loss=0.2451, pruned_loss=0.03326, ctc_loss=0.06137, over 19864.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2611, pruned_loss=0.04064, ctc_loss=0.07572, over 3816160.43 frames. ], batch size: 52, lr: 7.90e-03, grad_scale: 32.0 +2024-08-27 05:47:27,885 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.58 vs. limit=22.5 +2024-08-27 05:47:34,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244341.33333333334, ans=0.1 +2024-08-27 05:47:37,047 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.20 vs. limit=6.0 +2024-08-27 05:48:03,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=244501.33333333334, ans=0.0 +2024-08-27 05:48:12,792 INFO [train.py:1114] (3/4) Epoch 19, batch 1050, loss[loss=0.1834, simple_loss=0.2648, pruned_loss=0.03711, ctc_loss=0.06962, over 19837.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2601, pruned_loss=0.04034, ctc_loss=0.07524, over 3823259.35 frames. ], batch size: 57, lr: 7.90e-03, grad_scale: 32.0 +2024-08-27 05:48:14,270 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.21 vs. limit=15.0 +2024-08-27 05:48:24,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=244608.0, ans=0.0 +2024-08-27 05:48:28,866 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.93 vs. limit=10.0 +2024-08-27 05:48:33,220 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.14 vs. limit=10.0 +2024-08-27 05:48:42,881 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.407e+02 1.559e+02 1.901e+02 2.565e+02, threshold=3.118e+02, percent-clipped=0.0 +2024-08-27 05:48:46,186 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.89 vs. limit=15.0 +2024-08-27 05:48:52,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=244768.0, ans=0.125 +2024-08-27 05:49:02,412 INFO [train.py:1114] (3/4) Epoch 19, batch 1100, loss[loss=0.1621, simple_loss=0.2438, pruned_loss=0.0288, ctc_loss=0.05706, over 19584.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2596, pruned_loss=0.03999, ctc_loss=0.07478, over 3831325.97 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-27 05:49:50,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=245034.66666666666, ans=0.125 +2024-08-27 05:49:51,754 INFO [train.py:1114] (3/4) Epoch 19, batch 1150, loss[loss=0.1691, simple_loss=0.2507, pruned_loss=0.03131, ctc_loss=0.0623, over 19576.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2594, pruned_loss=0.03983, ctc_loss=0.07455, over 3829831.16 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-27 05:49:57,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=245088.0, ans=0.0 +2024-08-27 05:51:12,389 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.53 vs. limit=15.0 +2024-08-27 05:51:14,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=245141.33333333334, ans=0.2 +2024-08-27 05:51:17,817 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.51 vs. limit=15.0 +2024-08-27 05:52:27,627 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.437e+02 1.648e+02 2.100e+02 3.411e+02, threshold=3.296e+02, percent-clipped=3.0 +2024-08-27 05:52:47,004 INFO [train.py:1114] (3/4) Epoch 19, batch 1200, loss[loss=0.1831, simple_loss=0.263, pruned_loss=0.03759, ctc_loss=0.07036, over 19836.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2604, pruned_loss=0.04012, ctc_loss=0.07501, over 3824867.73 frames. ], batch size: 57, lr: 7.89e-03, grad_scale: 32.0 +2024-08-27 05:52:56,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.20 vs. limit=6.0 +2024-08-27 05:53:04,818 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=12.0 +2024-08-27 05:53:08,333 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.59 vs. limit=10.0 +2024-08-27 05:53:17,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=245514.66666666666, ans=0.0 +2024-08-27 05:53:25,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=245568.0, ans=0.125 +2024-08-27 05:53:35,247 INFO [train.py:1114] (3/4) Epoch 19, batch 1250, loss[loss=0.2057, simple_loss=0.2734, pruned_loss=0.05105, ctc_loss=0.08979, over 19549.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2607, pruned_loss=0.04015, ctc_loss=0.07489, over 3843427.90 frames. ], batch size: 61, lr: 7.88e-03, grad_scale: 32.0 +2024-08-27 05:53:49,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.60 vs. limit=15.0 +2024-08-27 05:53:57,087 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.66 vs. limit=22.5 +2024-08-27 05:54:05,864 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.230e+02 1.471e+02 1.735e+02 2.173e+02 3.319e+02, threshold=3.470e+02, percent-clipped=1.0 +2024-08-27 05:54:07,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=245781.33333333334, ans=0.2 +2024-08-27 05:54:10,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=245781.33333333334, ans=0.2 +2024-08-27 05:54:15,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=245834.66666666666, ans=0.05 +2024-08-27 05:54:26,202 INFO [train.py:1114] (3/4) Epoch 19, batch 1300, loss[loss=0.1975, simple_loss=0.272, pruned_loss=0.04551, ctc_loss=0.08021, over 18812.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2598, pruned_loss=0.03973, ctc_loss=0.07424, over 3845729.62 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 32.0 +2024-08-27 05:54:35,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=245888.0, ans=0.0 +2024-08-27 05:54:36,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=245941.33333333334, ans=0.1 +2024-08-27 05:54:40,764 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.06 vs. limit=15.0 +2024-08-27 05:54:50,294 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.87 vs. limit=22.5 +2024-08-27 05:54:50,455 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.23 vs. limit=12.0 +2024-08-27 05:54:50,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=245994.66666666666, ans=0.0 +2024-08-27 05:55:05,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.14 vs. limit=15.0 +2024-08-27 05:55:13,898 INFO [train.py:1114] (3/4) Epoch 19, batch 1350, loss[loss=0.1633, simple_loss=0.2432, pruned_loss=0.03024, ctc_loss=0.05735, over 19764.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2593, pruned_loss=0.0394, ctc_loss=0.07357, over 3857959.38 frames. ], batch size: 54, lr: 7.87e-03, grad_scale: 16.0 +2024-08-27 05:55:16,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=246154.66666666666, ans=0.125 +2024-08-27 05:55:20,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=246154.66666666666, ans=0.125 +2024-08-27 05:55:30,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=246208.0, ans=0.125 +2024-08-27 05:55:35,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=246261.33333333334, ans=0.125 +2024-08-27 05:55:45,502 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.414e+02 1.634e+02 2.144e+02 3.359e+02, threshold=3.268e+02, percent-clipped=0.0 +2024-08-27 05:55:57,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=246368.0, ans=0.95 +2024-08-27 05:56:03,878 INFO [train.py:1114] (3/4) Epoch 19, batch 1400, loss[loss=0.1454, simple_loss=0.2195, pruned_loss=0.02552, ctc_loss=0.05064, over 19666.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2588, pruned_loss=0.03922, ctc_loss=0.07324, over 3865333.02 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 16.0 +2024-08-27 05:56:06,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246421.33333333334, ans=0.1 +2024-08-27 05:56:18,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=246474.66666666666, ans=0.0 +2024-08-27 05:56:19,258 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.25 vs. limit=6.0 +2024-08-27 05:56:21,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=246528.0, ans=0.07 +2024-08-27 05:56:24,911 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=6.88 vs. limit=15.0 +2024-08-27 05:56:27,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=246528.0, ans=0.0 +2024-08-27 05:56:29,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=246528.0, ans=0.125 +2024-08-27 05:56:30,681 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.23 vs. limit=15.0 +2024-08-27 05:56:40,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=246581.33333333334, ans=0.125 +2024-08-27 05:56:53,081 INFO [train.py:1114] (3/4) Epoch 19, batch 1450, loss[loss=0.2046, simple_loss=0.2742, pruned_loss=0.04993, ctc_loss=0.08758, over 19675.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2594, pruned_loss=0.03949, ctc_loss=0.07379, over 3863411.14 frames. ], batch size: 63, lr: 7.87e-03, grad_scale: 16.0 +2024-08-27 05:56:59,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246688.0, ans=0.1 +2024-08-27 05:57:14,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=246794.66666666666, ans=0.025 +2024-08-27 05:57:25,423 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.422e+02 1.608e+02 1.963e+02 3.546e+02, threshold=3.216e+02, percent-clipped=4.0 +2024-08-27 05:57:25,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=246848.0, ans=0.125 +2024-08-27 05:57:26,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=246848.0, ans=0.0 +2024-08-27 05:57:36,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=246901.33333333334, ans=0.125 +2024-08-27 05:57:40,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=246901.33333333334, ans=0.1 +2024-08-27 05:57:42,300 INFO [train.py:1114] (3/4) Epoch 19, batch 1500, loss[loss=0.2076, simple_loss=0.2872, pruned_loss=0.04704, ctc_loss=0.08491, over 19559.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2598, pruned_loss=0.03956, ctc_loss=0.07402, over 3862802.71 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 16.0 +2024-08-27 05:57:42,933 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.19 vs. limit=15.0 +2024-08-27 05:57:51,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=247008.0, ans=0.125 +2024-08-27 05:58:08,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247008.0, ans=0.1 +2024-08-27 05:58:42,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247114.66666666666, ans=0.1 +2024-08-27 05:58:56,998 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=11.61 vs. limit=15.0 +2024-08-27 05:59:01,761 INFO [train.py:1114] (3/4) Epoch 19, batch 1550, loss[loss=0.2185, simple_loss=0.2946, pruned_loss=0.05195, ctc_loss=0.09631, over 19587.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2601, pruned_loss=0.03971, ctc_loss=0.07429, over 3847731.08 frames. ], batch size: 60, lr: 7.86e-03, grad_scale: 16.0 +2024-08-27 05:59:07,852 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.42 vs. limit=22.5 +2024-08-27 05:59:09,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=247221.33333333334, ans=0.0 +2024-08-27 05:59:15,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=247274.66666666666, ans=0.025 +2024-08-27 05:59:15,682 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.07 vs. limit=15.0 +2024-08-27 05:59:19,043 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.93 vs. limit=22.5 +2024-08-27 05:59:31,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=247328.0, ans=0.025 +2024-08-27 05:59:37,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247328.0, ans=0.1 +2024-08-27 05:59:43,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=247381.33333333334, ans=0.125 +2024-08-27 05:59:43,865 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.416e+02 1.634e+02 2.007e+02 4.215e+02, threshold=3.267e+02, percent-clipped=2.0 +2024-08-27 05:59:49,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=247381.33333333334, ans=0.0 +2024-08-27 06:00:02,753 INFO [train.py:1114] (3/4) Epoch 19, batch 1600, loss[loss=0.1851, simple_loss=0.2671, pruned_loss=0.03731, ctc_loss=0.07122, over 19839.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.26, pruned_loss=0.03981, ctc_loss=0.07441, over 3836938.66 frames. ], batch size: 57, lr: 7.85e-03, grad_scale: 32.0 +2024-08-27 06:00:11,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=247541.33333333334, ans=0.125 +2024-08-27 06:00:20,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=247541.33333333334, ans=0.0 +2024-08-27 06:00:26,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=247594.66666666666, ans=0.0 +2024-08-27 06:00:26,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=247594.66666666666, ans=0.125 +2024-08-27 06:00:29,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247594.66666666666, ans=0.1 +2024-08-27 06:00:41,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=247648.0, ans=0.0 +2024-08-27 06:00:51,745 INFO [train.py:1114] (3/4) Epoch 19, batch 1650, loss[loss=0.1811, simple_loss=0.2686, pruned_loss=0.0343, ctc_loss=0.06266, over 19648.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2595, pruned_loss=0.03981, ctc_loss=0.0744, over 3833338.69 frames. ], batch size: 59, lr: 7.85e-03, grad_scale: 32.0 +2024-08-27 06:01:21,513 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.147e+02 1.539e+02 1.985e+02 2.467e+02 4.637e+02, threshold=3.969e+02, percent-clipped=10.0 +2024-08-27 06:01:39,971 INFO [train.py:1114] (3/4) Epoch 19, batch 1700, loss[loss=0.1531, simple_loss=0.2276, pruned_loss=0.02872, ctc_loss=0.05306, over 19679.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2592, pruned_loss=0.03931, ctc_loss=0.07345, over 3847403.35 frames. ], batch size: 46, lr: 7.84e-03, grad_scale: 32.0 +2024-08-27 06:01:42,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=248021.33333333334, ans=0.0 +2024-08-27 06:01:43,160 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.55 vs. limit=6.0 +2024-08-27 06:02:03,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=248128.0, ans=0.09899494936611666 +2024-08-27 06:02:05,093 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.39 vs. limit=10.0 +2024-08-27 06:02:06,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.72 vs. limit=15.0 +2024-08-27 06:02:07,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=248181.33333333334, ans=0.09899494936611666 +2024-08-27 06:02:16,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=248234.66666666666, ans=0.125 +2024-08-27 06:02:20,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=248234.66666666666, ans=0.125 +2024-08-27 06:02:22,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=248234.66666666666, ans=0.0 +2024-08-27 06:02:23,948 INFO [train.py:1114] (3/4) Epoch 19, batch 1750, loss[loss=0.1725, simple_loss=0.2411, pruned_loss=0.03849, ctc_loss=0.06727, over 19628.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.259, pruned_loss=0.03927, ctc_loss=0.0734, over 3851796.27 frames. ], batch size: 45, lr: 7.84e-03, grad_scale: 32.0 +2024-08-27 06:02:24,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=248288.0, ans=0.0 +2024-08-27 06:02:29,397 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-27 06:02:39,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=248341.33333333334, ans=0.125 +2024-08-27 06:02:40,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=248341.33333333334, ans=15.0 +2024-08-27 06:02:57,022 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.179e+02 1.492e+02 1.808e+02 2.313e+02 3.735e+02, threshold=3.616e+02, percent-clipped=0.0 +2024-08-27 06:03:10,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=248501.33333333334, ans=0.0 +2024-08-27 06:03:18,759 INFO [train.py:1114] (3/4) Epoch 19, batch 1800, loss[loss=0.1903, simple_loss=0.2731, pruned_loss=0.03963, ctc_loss=0.07066, over 19609.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2595, pruned_loss=0.0394, ctc_loss=0.07349, over 3853062.75 frames. ], batch size: 55, lr: 7.84e-03, grad_scale: 16.0 +2024-08-27 06:03:27,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=248608.0, ans=0.125 +2024-08-27 06:03:28,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=248608.0, ans=0.125 +2024-08-27 06:03:53,754 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.89 vs. limit=15.0 +2024-08-27 06:04:02,712 INFO [train.py:1114] (3/4) Epoch 19, batch 1850, loss[loss=0.2043, simple_loss=0.2814, pruned_loss=0.04566, ctc_loss=0.08962, over 19589.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2591, pruned_loss=0.03933, ctc_loss=0.07329, over 3856845.91 frames. ], batch size: 57, lr: 7.83e-03, grad_scale: 16.0 +2024-08-27 06:04:05,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=248821.33333333334, ans=0.025 +2024-08-27 06:04:18,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=248874.66666666666, ans=0.025 +2024-08-27 06:04:32,747 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.498e+02 2.037e+02 3.063e+02 6.275e+02, threshold=4.074e+02, percent-clipped=13.0 +2024-08-27 06:04:47,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=249088.0, ans=0.125 +2024-08-27 06:04:47,712 INFO [train.py:1114] (3/4) Epoch 19, batch 1900, loss[loss=0.1708, simple_loss=0.2495, pruned_loss=0.03313, ctc_loss=0.06457, over 19656.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2597, pruned_loss=0.03961, ctc_loss=0.0737, over 3862137.32 frames. ], batch size: 59, lr: 7.83e-03, grad_scale: 16.0 +2024-08-27 06:04:51,042 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.83 vs. limit=10.0 +2024-08-27 06:04:55,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=249141.33333333334, ans=0.0 +2024-08-27 06:05:11,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=249194.66666666666, ans=0.125 +2024-08-27 06:05:12,280 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.58 vs. limit=15.0 +2024-08-27 06:05:45,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=249248.0, ans=0.0 +2024-08-27 06:05:47,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=249248.0, ans=0.125 +2024-08-27 06:05:58,948 INFO [train.py:1114] (3/4) Epoch 19, batch 1950, loss[loss=0.1679, simple_loss=0.2365, pruned_loss=0.03626, ctc_loss=0.067, over 19588.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2604, pruned_loss=0.03978, ctc_loss=0.07413, over 3871034.46 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 16.0 +2024-08-27 06:16:15,092 INFO [train.py:1050] (3/4) Caught exception: [Rank 3] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=471644, OpType=ALLREDUCE, NumelIn=745, NumelOut=745, Timeout(ms)=600000) ran for 600006 milliseconds before timing out.. +2024-08-27 06:16:15,093 INFO [checkpoint.py:75] (3/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/bad-model-3.pt diff --git a/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-01-48-53-0 b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-01-48-53-0 new file mode 100644 index 0000000000000000000000000000000000000000..f49caa4035a9a4100864f2d335409ee75909b583 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-01-48-53-0 @@ -0,0 +1,725 @@ +2024-08-29 01:48:53,720 INFO [train.py:1182] (0/4) Training started +2024-08-29 01:48:54,366 INFO [train.py:1192] (0/4) Device: cuda:0 +2024-08-29 01:48:56,897 INFO [train.py:1210] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2538.int.cedar.computecanada.ca', 'IP address': '172.16.145.231'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 19, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 01:48:56,898 INFO [train.py:1212] (0/4) About to create model +2024-08-29 01:48:58,161 INFO [train.py:1216] (0/4) Number of model parameters: 65805511 +2024-08-29 01:48:58,702 INFO [checkpoint.py:112] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-18.pt +2024-08-29 01:49:53,249 INFO [checkpoint.py:131] (0/4) Loading averaged model +2024-08-29 01:49:57,724 INFO [train.py:1231] (0/4) Using DDP +2024-08-29 01:52:46,806 INFO [train.py:1243] (0/4) Loading optimizer state dict +2024-08-29 01:52:46,972 INFO [train.py:1251] (0/4) Loading scheduler state dict +2024-08-29 01:52:46,973 INFO [asr_datamodule.py:894] (0/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 01:52:47,140 INFO [asr_datamodule.py:696] (0/4) Disable MUSAN +2024-08-29 01:52:47,140 INFO [asr_datamodule.py:714] (0/4) Enable SpecAugment +2024-08-29 01:52:47,141 INFO [asr_datamodule.py:715] (0/4) Time warp factor: 80 +2024-08-29 01:52:47,141 INFO [asr_datamodule.py:725] (0/4) Num frame mask: 10 +2024-08-29 01:52:47,141 INFO [asr_datamodule.py:738] (0/4) About to create train dataset +2024-08-29 01:52:47,141 INFO [asr_datamodule.py:765] (0/4) Using DynamicBucketingSampler. +2024-08-29 01:52:48,686 INFO [asr_datamodule.py:782] (0/4) About to create train dataloader +2024-08-29 01:52:48,690 INFO [asr_datamodule.py:911] (0/4) About to get dev-clean cuts +2024-08-29 01:54:40,098 INFO [asr_datamodule.py:918] (0/4) About to get dev-other cuts +2024-08-29 01:54:41,302 INFO [asr_datamodule.py:814] (0/4) About to create dev dataset +2024-08-29 01:54:41,619 INFO [asr_datamodule.py:831] (0/4) About to create dev dataloader +2024-08-29 01:54:41,619 INFO [train.py:1435] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 02:04:12,032 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=3.75 vs. limit=7.5 +2024-08-29 02:04:13,234 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12155MB +2024-08-29 02:04:14,352 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12155MB +2024-08-29 02:12:13,308 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12196MB +2024-08-29 02:12:14,561 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12196MB +2024-08-29 02:16:50,161 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12196MB +2024-08-29 02:16:51,492 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12196MB +2024-08-29 02:16:51,508 INFO [train.py:1344] (0/4) Loading grad scaler state dict +2024-08-29 02:18:18,024 INFO [train.py:1114] (0/4) Epoch 19, batch 0, loss[loss=0.1853, simple_loss=0.2584, pruned_loss=0.04146, ctc_loss=0.07309, over 19421.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2584, pruned_loss=0.04146, ctc_loss=0.07309, over 19421.00 frames. ], batch size: 48, lr: 7.99e-03, grad_scale: 32.0 +2024-08-29 02:18:18,025 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-29 02:19:56,210 INFO [train.py:1146] (0/4) Epoch 19, validation: loss=0.1709, simple_loss=0.2636, pruned_loss=0.02933, ctc_loss=0.04896, over 944034.00 frames. +2024-08-29 02:19:56,210 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 12196MB +2024-08-29 02:19:56,589 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.48 vs. limit=10.0 +2024-08-29 02:21:30,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=238954.66666666666, ans=0.0 +2024-08-29 02:26:18,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=239008.0, ans=0.0 +2024-08-29 02:37:32,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=239008.0, ans=0.125 +2024-08-29 02:39:22,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=239008.0, ans=0.025 +2024-08-29 02:59:02,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=239114.66666666666, ans=0.05 +2024-08-29 03:04:12,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=239168.0, ans=0.125 +2024-08-29 03:07:33,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=239168.0, ans=0.125 +2024-08-29 03:07:49,840 INFO [train.py:1114] (0/4) Epoch 19, batch 50, loss[loss=0.175, simple_loss=0.2467, pruned_loss=0.03705, ctc_loss=0.0732, over 19679.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2619, pruned_loss=0.03976, ctc_loss=0.07477, over 845033.20 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 03:11:05,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=239221.33333333334, ans=0.2 +2024-08-29 03:17:30,641 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.59 vs. limit=10.0 +2024-08-29 03:21:40,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239328.0, ans=0.1 +2024-08-29 03:22:07,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=239328.0, ans=0.125 +2024-08-29 03:22:07,950 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.95 vs. limit=15.0 +2024-08-29 03:22:22,817 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.482e+02 1.734e+02 2.141e+02 3.301e+02, threshold=3.468e+02, percent-clipped=0.0 +2024-08-29 03:22:23,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=239381.33333333334, ans=0.05 +2024-08-29 03:22:34,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=239381.33333333334, ans=0.125 +2024-08-29 03:28:43,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=239381.33333333334, ans=0.125 +2024-08-29 03:35:33,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=239434.66666666666, ans=0.2 +2024-08-29 03:37:40,967 INFO [train.py:1114] (0/4) Epoch 19, batch 100, loss[loss=0.1682, simple_loss=0.2426, pruned_loss=0.03488, ctc_loss=0.06041, over 19728.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2638, pruned_loss=0.04055, ctc_loss=0.07627, over 1498716.65 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 03:41:33,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=239488.0, ans=0.125 +2024-08-29 03:43:21,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239541.33333333334, ans=0.1 +2024-08-29 03:46:34,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239594.66666666666, ans=0.1 +2024-08-29 03:52:12,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=239648.0, ans=0.125 +2024-08-29 03:52:12,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=239648.0, ans=0.125 +2024-08-29 03:52:12,764 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 03:56:40,487 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.64 vs. limit=15.0 +2024-08-29 03:56:43,842 INFO [train.py:1114] (0/4) Epoch 19, batch 150, loss[loss=0.1667, simple_loss=0.2345, pruned_loss=0.03688, ctc_loss=0.06301, over 19754.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2616, pruned_loss=0.04036, ctc_loss=0.07577, over 2027546.02 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 03:56:43,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239754.66666666666, ans=0.1 +2024-08-29 03:57:06,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=239754.66666666666, ans=0.2 +2024-08-29 03:57:14,627 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.79 vs. limit=15.0 +2024-08-29 03:59:54,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=239861.33333333334, ans=0.0 +2024-08-29 04:01:44,337 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.487e+02 1.911e+02 2.455e+02 3.758e+02, threshold=3.822e+02, percent-clipped=3.0 +2024-08-29 04:02:35,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=239914.66666666666, ans=0.0 +2024-08-29 04:09:52,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=240021.33333333334, ans=0.125 +2024-08-29 04:10:04,530 INFO [train.py:1114] (0/4) Epoch 19, batch 200, loss[loss=0.2016, simple_loss=0.2707, pruned_loss=0.04842, ctc_loss=0.08922, over 18444.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2598, pruned_loss=0.03996, ctc_loss=0.07485, over 2435779.19 frames. ], batch size: 85, lr: 7.97e-03, grad_scale: 32.0 +2024-08-29 04:10:15,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=240021.33333333334, ans=0.0 +2024-08-29 04:11:43,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=240021.33333333334, ans=0.0 +2024-08-29 04:12:18,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240021.33333333334, ans=0.1 +2024-08-29 04:17:17,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240128.0, ans=0.1 +2024-08-29 04:17:30,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240128.0, ans=0.1 +2024-08-29 04:19:24,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff2.min_abs, batch_count=240128.0, ans=0.1 +2024-08-29 04:19:49,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=240181.33333333334, ans=0.2 +2024-08-29 04:20:09,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240181.33333333334, ans=0.1 +2024-08-29 04:23:26,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=240234.66666666666, ans=0.125 +2024-08-29 04:23:53,207 INFO [train.py:1114] (0/4) Epoch 19, batch 250, loss[loss=0.1939, simple_loss=0.2693, pruned_loss=0.0433, ctc_loss=0.07965, over 19356.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2587, pruned_loss=0.0393, ctc_loss=0.07374, over 2756817.04 frames. ], batch size: 67, lr: 7.97e-03, grad_scale: 32.0 +2024-08-29 04:23:53,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=240288.0, ans=0.125 +2024-08-29 04:24:32,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=240288.0, ans=0.0 +2024-08-29 04:27:52,138 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.76 vs. limit=15.0 +2024-08-29 04:28:53,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=240394.66666666666, ans=0.125 +2024-08-29 04:29:04,215 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.187e+02 1.446e+02 1.716e+02 2.275e+02 4.235e+02, threshold=3.432e+02, percent-clipped=4.0 +2024-08-29 04:29:04,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=240448.0, ans=0.125 +2024-08-29 04:29:17,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=240448.0, ans=0.0 +2024-08-29 04:29:24,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=240448.0, ans=0.2 +2024-08-29 04:29:25,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=240448.0, ans=0.125 +2024-08-29 04:30:03,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=240501.33333333334, ans=0.125 +2024-08-29 04:31:34,646 INFO [train.py:1114] (0/4) Epoch 19, batch 300, loss[loss=0.2028, simple_loss=0.2805, pruned_loss=0.04547, ctc_loss=0.08527, over 19482.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2586, pruned_loss=0.03909, ctc_loss=0.07335, over 3000995.93 frames. ], batch size: 61, lr: 7.96e-03, grad_scale: 32.0 +2024-08-29 04:32:05,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=240554.66666666666, ans=0.125 +2024-08-29 04:34:19,506 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 04:34:39,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=240661.33333333334, ans=0.125 +2024-08-29 04:35:43,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=240714.66666666666, ans=0.0 +2024-08-29 04:38:10,582 INFO [train.py:1114] (0/4) Epoch 19, batch 350, loss[loss=0.175, simple_loss=0.244, pruned_loss=0.03823, ctc_loss=0.07365, over 19748.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2595, pruned_loss=0.0394, ctc_loss=0.07377, over 3190586.01 frames. ], batch size: 48, lr: 7.96e-03, grad_scale: 32.0 +2024-08-29 04:41:56,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=240928.0, ans=0.0 +2024-08-29 04:44:05,471 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.489e+02 1.897e+02 2.425e+02 4.045e+02, threshold=3.795e+02, percent-clipped=4.0 +2024-08-29 04:44:07,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=240981.33333333334, ans=0.2 +2024-08-29 04:44:10,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=240981.33333333334, ans=0.2 +2024-08-29 04:44:16,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=241034.66666666666, ans=0.2 +2024-08-29 04:44:28,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=241088.0, ans=0.025 +2024-08-29 04:44:29,337 INFO [train.py:1114] (0/4) Epoch 19, batch 400, loss[loss=0.1865, simple_loss=0.2658, pruned_loss=0.03825, ctc_loss=0.07686, over 19494.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2593, pruned_loss=0.03932, ctc_loss=0.07374, over 3342052.19 frames. ], batch size: 54, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 04:44:48,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=241088.0, ans=0.0 +2024-08-29 04:45:09,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=241194.66666666666, ans=0.125 +2024-08-29 04:45:17,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=241194.66666666666, ans=0.125 +2024-08-29 04:45:31,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=241248.0, ans=0.0 +2024-08-29 04:45:36,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=241248.0, ans=0.0 +2024-08-29 04:46:01,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=241301.33333333334, ans=0.125 +2024-08-29 04:46:24,128 INFO [train.py:1114] (0/4) Epoch 19, batch 450, loss[loss=0.1794, simple_loss=0.2669, pruned_loss=0.0326, ctc_loss=0.06664, over 19626.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2593, pruned_loss=0.03927, ctc_loss=0.07363, over 3450414.33 frames. ], batch size: 55, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 04:46:55,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=241354.66666666666, ans=0.125 +2024-08-29 04:46:55,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241408.0, ans=0.1 +2024-08-29 04:46:57,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=241408.0, ans=0.025 +2024-08-29 04:47:40,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=241408.0, ans=22.5 +2024-08-29 04:47:40,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=241461.33333333334, ans=0.125 +2024-08-29 04:47:51,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=241461.33333333334, ans=0.0 +2024-08-29 04:47:54,986 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.149e+02 1.394e+02 1.625e+02 2.143e+02 3.810e+02, threshold=3.251e+02, percent-clipped=1.0 +2024-08-29 04:49:55,365 INFO [train.py:1114] (0/4) Epoch 19, batch 500, loss[loss=0.1958, simple_loss=0.2718, pruned_loss=0.04355, ctc_loss=0.08176, over 19656.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2585, pruned_loss=0.03907, ctc_loss=0.07315, over 3545263.34 frames. ], batch size: 63, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 04:52:45,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=241674.66666666666, ans=0.2 +2024-08-29 04:52:58,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=241728.0, ans=0.025 +2024-08-29 04:53:10,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.whiten.whitening_limit, batch_count=241781.33333333334, ans=12.0 +2024-08-29 04:54:01,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=241781.33333333334, ans=0.125 +2024-08-29 04:54:05,058 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.81 vs. limit=6.0 +2024-08-29 04:54:05,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=241834.66666666666, ans=0.1 +2024-08-29 04:54:08,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-29 04:54:08,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-29 04:54:10,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-29 04:54:15,395 INFO [train.py:1114] (0/4) Epoch 19, batch 550, loss[loss=0.1891, simple_loss=0.2618, pruned_loss=0.04203, ctc_loss=0.08075, over 19368.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2592, pruned_loss=0.03941, ctc_loss=0.07384, over 3607835.04 frames. ], batch size: 71, lr: 7.94e-03, grad_scale: 32.0 +2024-08-29 04:54:17,049 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.83 vs. limit=6.0 +2024-08-29 04:54:51,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=241888.0, ans=0.0 +2024-08-29 04:55:06,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=241994.66666666666, ans=0.125 +2024-08-29 04:55:11,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=241994.66666666666, ans=0.125 +2024-08-29 04:55:27,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=241994.66666666666, ans=0.1 +2024-08-29 04:55:29,722 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.412e+02 1.703e+02 2.107e+02 3.697e+02, threshold=3.406e+02, percent-clipped=1.0 +2024-08-29 04:57:38,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=242101.33333333334, ans=0.0 +2024-08-29 04:57:47,828 INFO [train.py:1114] (0/4) Epoch 19, batch 600, loss[loss=0.1899, simple_loss=0.27, pruned_loss=0.04021, ctc_loss=0.07359, over 19400.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.259, pruned_loss=0.03921, ctc_loss=0.07339, over 3666587.04 frames. ], batch size: 67, lr: 7.94e-03, grad_scale: 32.0 +2024-08-29 04:58:34,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=242208.0, ans=0.0 +2024-08-29 04:58:59,895 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=9.15 vs. limit=12.0 +2024-08-29 04:59:41,621 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.33 vs. limit=12.0 +2024-08-29 04:59:45,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=242368.0, ans=0.0 +2024-08-29 05:00:06,347 INFO [train.py:1114] (0/4) Epoch 19, batch 650, loss[loss=0.163, simple_loss=0.2382, pruned_loss=0.03156, ctc_loss=0.06153, over 19773.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2583, pruned_loss=0.03903, ctc_loss=0.07308, over 3716546.96 frames. ], batch size: 54, lr: 7.93e-03, grad_scale: 32.0 +2024-08-29 05:00:57,635 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.62 vs. limit=12.0 +2024-08-29 05:00:59,056 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.521e+02 1.842e+02 2.430e+02 3.637e+02, threshold=3.684e+02, percent-clipped=5.0 +2024-08-29 05:01:01,479 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.59 vs. limit=15.0 +2024-08-29 05:01:54,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=242581.33333333334, ans=0.0 +2024-08-29 05:01:58,047 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.10 vs. limit=12.0 +2024-08-29 05:02:25,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=242634.66666666666, ans=0.2 +2024-08-29 05:02:31,564 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.44 vs. limit=22.5 +2024-08-29 05:02:33,326 INFO [train.py:1114] (0/4) Epoch 19, batch 700, loss[loss=0.1813, simple_loss=0.2549, pruned_loss=0.03937, ctc_loss=0.0724, over 19733.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2586, pruned_loss=0.03928, ctc_loss=0.07346, over 3748806.62 frames. ], batch size: 51, lr: 7.93e-03, grad_scale: 32.0 +2024-08-29 05:02:38,559 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:02:44,945 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:03:36,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=242741.33333333334, ans=0.0 +2024-08-29 05:03:48,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=242848.0, ans=0.2 +2024-08-29 05:04:46,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242848.0, ans=0.1 +2024-08-29 05:06:34,729 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.79 vs. limit=15.0 +2024-08-29 05:06:34,959 INFO [train.py:1114] (0/4) Epoch 19, batch 750, loss[loss=0.1857, simple_loss=0.274, pruned_loss=0.03658, ctc_loss=0.06055, over 19496.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2588, pruned_loss=0.0395, ctc_loss=0.07389, over 3774571.12 frames. ], batch size: 54, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 05:06:58,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=243008.0, ans=0.125 +2024-08-29 05:07:03,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=243008.0, ans=0.125 +2024-08-29 05:07:10,474 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.58 vs. limit=6.0 +2024-08-29 05:07:19,363 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.172e+02 1.493e+02 1.888e+02 2.375e+02 3.905e+02, threshold=3.776e+02, percent-clipped=3.0 +2024-08-29 05:07:19,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=243114.66666666666, ans=0.125 +2024-08-29 05:08:09,970 INFO [train.py:1114] (0/4) Epoch 19, batch 800, loss[loss=0.1678, simple_loss=0.2398, pruned_loss=0.0352, ctc_loss=0.06352, over 19802.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2592, pruned_loss=0.03974, ctc_loss=0.07418, over 3795407.26 frames. ], batch size: 49, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 05:08:42,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243221.33333333334, ans=0.1 +2024-08-29 05:09:14,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=243274.66666666666, ans=0.0 +2024-08-29 05:09:17,725 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.67 vs. limit=15.0 +2024-08-29 05:09:18,874 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.54 vs. limit=15.0 +2024-08-29 05:09:29,734 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.74 vs. limit=15.0 +2024-08-29 05:09:49,253 INFO [train.py:1114] (0/4) Epoch 19, batch 850, loss[loss=0.1831, simple_loss=0.2681, pruned_loss=0.03559, ctc_loss=0.06733, over 19670.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2589, pruned_loss=0.03963, ctc_loss=0.07389, over 3815217.60 frames. ], batch size: 59, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 05:10:05,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=243488.0, ans=0.025 +2024-08-29 05:10:06,634 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.07 vs. limit=22.5 +2024-08-29 05:10:10,083 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.68 vs. limit=15.0 +2024-08-29 05:10:17,481 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.08 vs. limit=15.0 +2024-08-29 05:10:27,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=243594.66666666666, ans=0.0 +2024-08-29 05:10:31,639 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.122e+02 1.428e+02 1.590e+02 2.047e+02 2.882e+02, threshold=3.180e+02, percent-clipped=0.0 +2024-08-29 05:10:31,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=243648.0, ans=0.09899494936611666 +2024-08-29 05:10:33,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=243648.0, ans=0.125 +2024-08-29 05:10:34,880 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.52 vs. limit=15.0 +2024-08-29 05:10:39,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=243701.33333333334, ans=0.025 +2024-08-29 05:10:43,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243701.33333333334, ans=0.1 +2024-08-29 05:10:49,653 INFO [train.py:1114] (0/4) Epoch 19, batch 900, loss[loss=0.1723, simple_loss=0.2462, pruned_loss=0.03566, ctc_loss=0.06772, over 19806.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2591, pruned_loss=0.03976, ctc_loss=0.07423, over 3819233.74 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-29 05:11:12,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243861.33333333334, ans=0.1 +2024-08-29 05:11:20,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=243914.66666666666, ans=0.025 +2024-08-29 05:11:31,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=243968.0, ans=0.025 +2024-08-29 05:11:41,916 INFO [train.py:1114] (0/4) Epoch 19, batch 950, loss[loss=0.1612, simple_loss=0.236, pruned_loss=0.03176, ctc_loss=0.05693, over 19501.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2596, pruned_loss=0.04, ctc_loss=0.0747, over 3820559.24 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-29 05:11:59,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244074.66666666666, ans=0.1 +2024-08-29 05:12:08,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=244128.0, ans=0.07 +2024-08-29 05:12:12,400 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.442e+02 1.730e+02 2.025e+02 3.837e+02, threshold=3.461e+02, percent-clipped=4.0 +2024-08-29 05:12:30,082 INFO [train.py:1114] (0/4) Epoch 19, batch 1000, loss[loss=0.1677, simple_loss=0.254, pruned_loss=0.02976, ctc_loss=0.05475, over 19856.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2607, pruned_loss=0.04041, ctc_loss=0.07548, over 3815653.69 frames. ], batch size: 52, lr: 7.90e-03, grad_scale: 32.0 +2024-08-29 05:12:40,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=244341.33333333334, ans=0.125 +2024-08-29 05:12:50,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=244341.33333333334, ans=0.125 +2024-08-29 05:13:12,805 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.68 vs. limit=15.0 +2024-08-29 05:13:19,528 INFO [train.py:1114] (0/4) Epoch 19, batch 1050, loss[loss=0.1936, simple_loss=0.2731, pruned_loss=0.04131, ctc_loss=0.07866, over 19834.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2598, pruned_loss=0.04008, ctc_loss=0.07496, over 3822306.47 frames. ], batch size: 57, lr: 7.90e-03, grad_scale: 32.0 +2024-08-29 05:13:34,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=244608.0, ans=0.04949747468305833 +2024-08-29 05:14:05,758 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.364e+02 1.577e+02 1.842e+02 2.540e+02, threshold=3.153e+02, percent-clipped=0.0 +2024-08-29 05:14:07,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.32 vs. limit=15.0 +2024-08-29 05:14:25,271 INFO [train.py:1114] (0/4) Epoch 19, batch 1100, loss[loss=0.1716, simple_loss=0.2468, pruned_loss=0.03523, ctc_loss=0.06479, over 19574.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2593, pruned_loss=0.03966, ctc_loss=0.07434, over 3830280.28 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-29 05:14:31,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=244821.33333333334, ans=0.0 +2024-08-29 05:14:57,825 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.42 vs. limit=15.0 +2024-08-29 05:16:46,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245034.66666666666, ans=0.1 +2024-08-29 05:16:48,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=245034.66666666666, ans=0.0 +2024-08-29 05:16:51,075 INFO [train.py:1114] (0/4) Epoch 19, batch 1150, loss[loss=0.1559, simple_loss=0.2335, pruned_loss=0.02822, ctc_loss=0.05488, over 19596.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2595, pruned_loss=0.03976, ctc_loss=0.07464, over 3828089.38 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-29 05:17:10,739 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.11 vs. limit=15.0 +2024-08-29 05:17:12,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=245088.0, ans=0.125 +2024-08-29 05:18:05,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=245248.0, ans=0.2 +2024-08-29 05:18:08,418 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.413e+02 1.588e+02 2.044e+02 3.492e+02, threshold=3.177e+02, percent-clipped=5.0 +2024-08-29 05:18:12,623 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.31 vs. limit=6.0 +2024-08-29 05:18:13,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=245248.0, ans=0.2 +2024-08-29 05:19:30,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=245301.33333333334, ans=0.0 +2024-08-29 05:19:41,173 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.47 vs. limit=6.0 +2024-08-29 05:19:41,711 INFO [train.py:1114] (0/4) Epoch 19, batch 1200, loss[loss=0.188, simple_loss=0.2609, pruned_loss=0.04161, ctc_loss=0.07966, over 19828.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2598, pruned_loss=0.03968, ctc_loss=0.07445, over 3823389.80 frames. ], batch size: 57, lr: 7.89e-03, grad_scale: 32.0 +2024-08-29 05:19:49,510 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.26 vs. limit=15.0 +2024-08-29 05:19:50,518 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.76 vs. limit=15.0 +2024-08-29 05:20:08,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=245514.66666666666, ans=0.025 +2024-08-29 05:20:11,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245514.66666666666, ans=0.1 +2024-08-29 05:20:27,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=245568.0, ans=0.125 +2024-08-29 05:20:27,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=245568.0, ans=0.125 +2024-08-29 05:20:30,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245568.0, ans=0.1 +2024-08-29 05:20:54,847 INFO [train.py:1114] (0/4) Epoch 19, batch 1250, loss[loss=0.1956, simple_loss=0.2664, pruned_loss=0.04621, ctc_loss=0.08067, over 19532.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2609, pruned_loss=0.04015, ctc_loss=0.07507, over 3841958.57 frames. ], batch size: 61, lr: 7.88e-03, grad_scale: 32.0 +2024-08-29 05:21:43,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=245728.0, ans=0.125 +2024-08-29 05:21:47,051 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.448e+02 1.786e+02 2.342e+02 3.930e+02, threshold=3.573e+02, percent-clipped=1.0 +2024-08-29 05:21:57,110 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.90 vs. limit=15.0 +2024-08-29 05:22:00,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=245781.33333333334, ans=0.025 +2024-08-29 05:22:27,718 INFO [train.py:1114] (0/4) Epoch 19, batch 1300, loss[loss=0.1844, simple_loss=0.2627, pruned_loss=0.03949, ctc_loss=0.06789, over 18889.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2601, pruned_loss=0.03991, ctc_loss=0.07463, over 3846145.41 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 32.0 +2024-08-29 05:22:29,897 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.50 vs. limit=15.0 +2024-08-29 05:22:39,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=245941.33333333334, ans=0.125 +2024-08-29 05:23:08,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=246048.0, ans=0.125 +2024-08-29 05:23:14,721 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.81 vs. limit=15.0 +2024-08-29 05:23:26,138 INFO [train.py:1114] (0/4) Epoch 19, batch 1350, loss[loss=0.1686, simple_loss=0.2514, pruned_loss=0.03173, ctc_loss=0.05577, over 19769.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2595, pruned_loss=0.03957, ctc_loss=0.0739, over 3856948.23 frames. ], batch size: 54, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 05:23:26,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=246154.66666666666, ans=0.125 +2024-08-29 05:23:30,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=246154.66666666666, ans=0.125 +2024-08-29 05:23:38,425 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.64 vs. limit=10.0 +2024-08-29 05:23:41,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=246208.0, ans=0.07 +2024-08-29 05:23:54,948 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.94 vs. limit=15.0 +2024-08-29 05:23:59,061 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.158e+02 1.393e+02 1.600e+02 2.060e+02 3.630e+02, threshold=3.201e+02, percent-clipped=1.0 +2024-08-29 05:23:59,602 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.76 vs. limit=12.0 +2024-08-29 05:24:08,389 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:24:23,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246368.0, ans=0.1 +2024-08-29 05:24:31,010 INFO [train.py:1114] (0/4) Epoch 19, batch 1400, loss[loss=0.1418, simple_loss=0.2193, pruned_loss=0.02323, ctc_loss=0.04454, over 19657.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2593, pruned_loss=0.03962, ctc_loss=0.07398, over 3863914.06 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 05:24:38,244 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=13.18 vs. limit=15.0 +2024-08-29 05:24:43,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=246474.66666666666, ans=0.0 +2024-08-29 05:24:53,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=246474.66666666666, ans=0.2 +2024-08-29 05:24:59,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=246528.0, ans=0.0 +2024-08-29 05:25:03,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=246528.0, ans=0.0 +2024-08-29 05:25:12,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=246581.33333333334, ans=0.0 +2024-08-29 05:25:19,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=246634.66666666666, ans=0.2 +2024-08-29 05:25:21,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=246634.66666666666, ans=0.2 +2024-08-29 05:25:22,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=246634.66666666666, ans=0.125 +2024-08-29 05:25:26,813 INFO [train.py:1114] (0/4) Epoch 19, batch 1450, loss[loss=0.1997, simple_loss=0.2719, pruned_loss=0.04608, ctc_loss=0.08838, over 19667.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2597, pruned_loss=0.0399, ctc_loss=0.07435, over 3862670.75 frames. ], batch size: 63, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 05:25:28,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=246688.0, ans=0.0 +2024-08-29 05:25:40,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=246741.33333333334, ans=0.125 +2024-08-29 05:26:20,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=246848.0, ans=0.0 +2024-08-29 05:26:22,446 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.187e+02 1.397e+02 1.549e+02 1.935e+02 4.281e+02, threshold=3.099e+02, percent-clipped=1.0 +2024-08-29 05:26:41,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=246901.33333333334, ans=0.5 +2024-08-29 05:26:49,600 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.20 vs. limit=6.0 +2024-08-29 05:26:50,097 INFO [train.py:1114] (0/4) Epoch 19, batch 1500, loss[loss=0.1876, simple_loss=0.2665, pruned_loss=0.0393, ctc_loss=0.07518, over 19578.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2598, pruned_loss=0.03972, ctc_loss=0.07412, over 3862431.58 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 32.0 +2024-08-29 05:26:56,355 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.25 vs. limit=6.0 +2024-08-29 05:27:24,153 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.26 vs. limit=15.0 +2024-08-29 05:27:26,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=247008.0, ans=0.0 +2024-08-29 05:27:41,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=247114.66666666666, ans=0.125 +2024-08-29 05:27:51,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=247168.0, ans=0.1 +2024-08-29 05:28:02,443 INFO [train.py:1114] (0/4) Epoch 19, batch 1550, loss[loss=0.2018, simple_loss=0.2794, pruned_loss=0.04439, ctc_loss=0.08827, over 19613.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2597, pruned_loss=0.03973, ctc_loss=0.07439, over 3847641.78 frames. ], batch size: 60, lr: 7.86e-03, grad_scale: 32.0 +2024-08-29 05:28:05,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=247221.33333333334, ans=0.125 +2024-08-29 05:28:05,754 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.24 vs. limit=22.5 +2024-08-29 05:28:09,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247221.33333333334, ans=0.1 +2024-08-29 05:28:28,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=247274.66666666666, ans=0.2 +2024-08-29 05:28:30,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=247328.0, ans=0.2 +2024-08-29 05:28:45,738 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.405e+02 1.646e+02 2.216e+02 3.789e+02, threshold=3.291e+02, percent-clipped=3.0 +2024-08-29 05:28:45,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=247381.33333333334, ans=0.0 +2024-08-29 05:29:02,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=247434.66666666666, ans=0.0 +2024-08-29 05:30:04,277 INFO [train.py:1114] (0/4) Epoch 19, batch 1600, loss[loss=0.1798, simple_loss=0.2643, pruned_loss=0.03376, ctc_loss=0.06954, over 19846.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2597, pruned_loss=0.03985, ctc_loss=0.07454, over 3836418.62 frames. ], batch size: 57, lr: 7.85e-03, grad_scale: 32.0 +2024-08-29 05:30:49,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=247594.66666666666, ans=0.0 +2024-08-29 05:30:49,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=247594.66666666666, ans=0.125 +2024-08-29 05:31:00,213 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.14 vs. limit=15.0 +2024-08-29 05:31:12,547 INFO [train.py:1114] (0/4) Epoch 19, batch 1650, loss[loss=0.2032, simple_loss=0.2809, pruned_loss=0.04526, ctc_loss=0.08757, over 19628.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2597, pruned_loss=0.03998, ctc_loss=0.07456, over 3833472.39 frames. ], batch size: 59, lr: 7.85e-03, grad_scale: 32.0 +2024-08-29 05:31:40,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=247754.66666666666, ans=0.2 +2024-08-29 05:32:15,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=247808.0, ans=0.125 +2024-08-29 05:32:30,013 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.177e+02 1.667e+02 2.011e+02 2.433e+02 4.037e+02, threshold=4.021e+02, percent-clipped=5.0 +2024-08-29 05:33:11,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=247968.0, ans=0.125 +2024-08-29 05:33:13,439 INFO [train.py:1114] (0/4) Epoch 19, batch 1700, loss[loss=0.1806, simple_loss=0.2442, pruned_loss=0.0427, ctc_loss=0.07899, over 19657.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2595, pruned_loss=0.03969, ctc_loss=0.07422, over 3847350.29 frames. ], batch size: 46, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 05:33:15,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=248021.33333333334, ans=0.125 +2024-08-29 05:33:18,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=248021.33333333334, ans=0.2 +2024-08-29 05:33:31,032 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.31 vs. limit=22.5 +2024-08-29 05:33:38,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=248128.0, ans=0.025 +2024-08-29 05:34:36,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=248234.66666666666, ans=0.0 +2024-08-29 05:34:41,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=248234.66666666666, ans=0.125 +2024-08-29 05:34:42,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248234.66666666666, ans=0.1 +2024-08-29 05:34:43,949 INFO [train.py:1114] (0/4) Epoch 19, batch 1750, loss[loss=0.1816, simple_loss=0.2432, pruned_loss=0.04378, ctc_loss=0.08136, over 19654.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2591, pruned_loss=0.03955, ctc_loss=0.07392, over 3852186.62 frames. ], batch size: 45, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 05:34:44,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=248288.0, ans=0.0 +2024-08-29 05:34:56,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=248341.33333333334, ans=0.07 +2024-08-29 05:35:08,269 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:35:11,461 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.174e+02 1.518e+02 1.916e+02 2.294e+02 3.621e+02, threshold=3.832e+02, percent-clipped=0.0 +2024-08-29 05:35:13,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=248448.0, ans=0.125 +2024-08-29 05:35:19,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248501.33333333334, ans=0.1 +2024-08-29 05:35:25,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=248501.33333333334, ans=0.125 +2024-08-29 05:35:27,418 INFO [train.py:1114] (0/4) Epoch 19, batch 1800, loss[loss=0.1829, simple_loss=0.2653, pruned_loss=0.03682, ctc_loss=0.06743, over 19604.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2594, pruned_loss=0.0393, ctc_loss=0.0737, over 3854361.58 frames. ], batch size: 55, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 05:35:36,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=248608.0, ans=0.125 +2024-08-29 05:35:48,566 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.73 vs. limit=15.0 +2024-08-29 05:36:10,856 INFO [train.py:1114] (0/4) Epoch 19, batch 1850, loss[loss=0.2004, simple_loss=0.2769, pruned_loss=0.0449, ctc_loss=0.08501, over 19595.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2596, pruned_loss=0.03939, ctc_loss=0.07372, over 3857567.59 frames. ], batch size: 57, lr: 7.83e-03, grad_scale: 32.0 +2024-08-29 05:36:10,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=248821.33333333334, ans=0.025 +2024-08-29 05:36:11,422 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.90 vs. limit=12.0 +2024-08-29 05:36:12,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=248821.33333333334, ans=0.125 +2024-08-29 05:37:51,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=248874.66666666666, ans=0.0 +2024-08-29 05:37:54,343 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.28 vs. limit=15.0 +2024-08-29 05:37:58,518 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.95 vs. limit=10.0 +2024-08-29 05:38:04,258 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.550e+02 2.027e+02 2.927e+02 4.792e+02, threshold=4.055e+02, percent-clipped=10.0 +2024-08-29 05:38:15,163 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.62 vs. limit=15.0 +2024-08-29 05:38:15,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=249034.66666666666, ans=0.1 +2024-08-29 05:38:23,693 INFO [train.py:1114] (0/4) Epoch 19, batch 1900, loss[loss=0.1921, simple_loss=0.2807, pruned_loss=0.0378, ctc_loss=0.0697, over 19668.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2603, pruned_loss=0.03948, ctc_loss=0.07389, over 3862216.50 frames. ], batch size: 59, lr: 7.83e-03, grad_scale: 32.0 +2024-08-29 05:38:40,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=249141.33333333334, ans=0.0 +2024-08-29 05:39:15,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=249194.66666666666, ans=0.04949747468305833 +2024-08-29 05:39:28,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.53 vs. limit=15.0 +2024-08-29 05:39:36,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=249301.33333333334, ans=0.2 +2024-08-29 05:39:41,146 INFO [train.py:1114] (0/4) Epoch 19, batch 1950, loss[loss=0.164, simple_loss=0.243, pruned_loss=0.03115, ctc_loss=0.05649, over 19586.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.261, pruned_loss=0.03973, ctc_loss=0.07427, over 3871303.88 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 05:40:13,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=249408.0, ans=0.125 +2024-08-29 05:40:17,601 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.04 vs. limit=10.0 +2024-08-29 05:40:23,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=249461.33333333334, ans=0.035 +2024-08-29 05:40:36,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=249461.33333333334, ans=0.125 +2024-08-29 05:40:43,498 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.54 vs. limit=10.0 +2024-08-29 05:40:45,432 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.357e+02 1.563e+02 1.867e+02 4.467e+02, threshold=3.126e+02, percent-clipped=1.0 +2024-08-29 05:40:45,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=249514.66666666666, ans=0.1 +2024-08-29 05:40:50,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=249514.66666666666, ans=0.125 +2024-08-29 05:41:02,865 INFO [train.py:1114] (0/4) Epoch 19, batch 2000, loss[loss=0.1554, simple_loss=0.2263, pruned_loss=0.03024, ctc_loss=0.06025, over 19614.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2615, pruned_loss=0.04005, ctc_loss=0.07483, over 3856103.74 frames. ], batch size: 45, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 05:41:21,192 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=14.88 vs. limit=15.0 +2024-08-29 05:41:23,720 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.26 vs. limit=15.0 +2024-08-29 05:42:21,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249728.0, ans=0.1 +2024-08-29 05:42:29,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=249781.33333333334, ans=0.2 +2024-08-29 05:42:32,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=249781.33333333334, ans=0.0 +2024-08-29 05:42:32,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=249781.33333333334, ans=6.0 +2024-08-29 05:42:35,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=249781.33333333334, ans=0.025 +2024-08-29 05:42:58,866 INFO [train.py:1114] (0/4) Epoch 19, batch 2050, loss[loss=0.1646, simple_loss=0.2338, pruned_loss=0.03522, ctc_loss=0.06242, over 19697.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.26, pruned_loss=0.03966, ctc_loss=0.07426, over 3852752.59 frames. ], batch size: 47, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 05:43:00,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=249888.0, ans=0.2 +2024-08-29 05:43:09,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=249941.33333333334, ans=0.0 +2024-08-29 05:43:13,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=249941.33333333334, ans=0.02 +2024-08-29 05:43:20,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=249994.66666666666, ans=0.1 +2024-08-29 05:43:26,426 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.469e+02 1.713e+02 2.068e+02 3.370e+02, threshold=3.427e+02, percent-clipped=2.0 +2024-08-29 05:43:26,910 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.59 vs. limit=15.0 +2024-08-29 05:43:27,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250048.0, ans=0.1 +2024-08-29 05:43:41,973 INFO [train.py:1114] (0/4) Epoch 19, batch 2100, loss[loss=0.1873, simple_loss=0.2664, pruned_loss=0.04057, ctc_loss=0.06786, over 19782.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2592, pruned_loss=0.03914, ctc_loss=0.07324, over 3859487.84 frames. ], batch size: 54, lr: 7.81e-03, grad_scale: 32.0 +2024-08-29 05:44:55,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250314.66666666666, ans=0.1 +2024-08-29 05:44:58,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=250368.0, ans=0.0 +2024-08-29 05:45:09,918 INFO [train.py:1114] (0/4) Epoch 19, batch 2150, loss[loss=0.1692, simple_loss=0.2505, pruned_loss=0.03222, ctc_loss=0.05846, over 19568.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2587, pruned_loss=0.03915, ctc_loss=0.07296, over 3870372.76 frames. ], batch size: 52, lr: 7.81e-03, grad_scale: 32.0 +2024-08-29 05:45:14,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=250421.33333333334, ans=0.0 +2024-08-29 05:45:17,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=250421.33333333334, ans=0.0 +2024-08-29 05:45:17,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=250474.66666666666, ans=0.1 +2024-08-29 05:45:19,898 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.85 vs. limit=15.0 +2024-08-29 05:45:26,046 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.11 vs. limit=15.0 +2024-08-29 05:45:29,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=250528.0, ans=0.5 +2024-08-29 05:45:43,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=250581.33333333334, ans=0.125 +2024-08-29 05:45:44,552 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.742e+01 1.532e+02 1.812e+02 2.283e+02 4.768e+02, threshold=3.624e+02, percent-clipped=7.0 +2024-08-29 05:45:48,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=250581.33333333334, ans=0.125 +2024-08-29 05:46:03,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=250634.66666666666, ans=0.125 +2024-08-29 05:46:24,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250688.0, ans=0.1 +2024-08-29 05:46:31,677 INFO [train.py:1114] (0/4) Epoch 19, batch 2200, loss[loss=0.1894, simple_loss=0.2678, pruned_loss=0.04066, ctc_loss=0.07422, over 19587.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2588, pruned_loss=0.03913, ctc_loss=0.07287, over 3869227.07 frames. ], batch size: 57, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 05:46:38,200 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.79 vs. limit=15.0 +2024-08-29 05:47:30,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=250794.66666666666, ans=0.0 +2024-08-29 05:47:46,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=250794.66666666666, ans=0.125 +2024-08-29 05:47:46,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=250794.66666666666, ans=0.125 +2024-08-29 05:47:51,063 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.84 vs. limit=15.0 +2024-08-29 05:48:08,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=250901.33333333334, ans=0.0 +2024-08-29 05:48:12,301 INFO [train.py:1114] (0/4) Epoch 19, batch 2250, loss[loss=0.1901, simple_loss=0.2722, pruned_loss=0.03861, ctc_loss=0.07667, over 19623.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2594, pruned_loss=0.03924, ctc_loss=0.07305, over 3869499.94 frames. ], batch size: 55, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 05:48:23,718 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.16 vs. limit=22.5 +2024-08-29 05:48:30,140 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:48:35,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=251061.33333333334, ans=0.0 +2024-08-29 05:48:35,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=251061.33333333334, ans=0.0 +2024-08-29 05:48:39,383 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.496e+02 1.836e+02 2.405e+02 3.916e+02, threshold=3.673e+02, percent-clipped=1.0 +2024-08-29 05:48:47,715 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.48 vs. limit=15.0 +2024-08-29 05:48:48,600 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.90 vs. limit=15.0 +2024-08-29 05:48:58,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=251168.0, ans=0.0 +2024-08-29 05:49:01,359 INFO [train.py:1114] (0/4) Epoch 19, batch 2300, loss[loss=0.1698, simple_loss=0.2416, pruned_loss=0.03472, ctc_loss=0.07133, over 19501.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2584, pruned_loss=0.03919, ctc_loss=0.07307, over 3863587.28 frames. ], batch size: 49, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 05:49:03,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=251221.33333333334, ans=0.07 +2024-08-29 05:50:30,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=251274.66666666666, ans=0.0 +2024-08-29 05:50:51,114 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.94 vs. limit=22.5 +2024-08-29 05:50:59,856 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.37 vs. limit=6.0 +2024-08-29 05:51:03,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=251434.66666666666, ans=0.0 +2024-08-29 05:51:12,204 INFO [train.py:1114] (0/4) Epoch 19, batch 2350, loss[loss=0.1921, simple_loss=0.2703, pruned_loss=0.04115, ctc_loss=0.07908, over 19649.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2585, pruned_loss=0.03947, ctc_loss=0.07356, over 3865582.44 frames. ], batch size: 63, lr: 7.79e-03, grad_scale: 32.0 +2024-08-29 05:51:14,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=251488.0, ans=0.0 +2024-08-29 05:51:37,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251594.66666666666, ans=0.1 +2024-08-29 05:51:43,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=251594.66666666666, ans=0.0 +2024-08-29 05:51:52,850 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.480e+02 1.867e+02 2.502e+02 4.275e+02, threshold=3.733e+02, percent-clipped=4.0 +2024-08-29 05:51:53,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=251648.0, ans=0.0 +2024-08-29 05:52:09,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=251701.33333333334, ans=0.0 +2024-08-29 05:52:13,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=251701.33333333334, ans=0.0 +2024-08-29 05:52:17,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=251701.33333333334, ans=0.2 +2024-08-29 05:52:19,094 INFO [train.py:1114] (0/4) Epoch 19, batch 2400, loss[loss=0.2128, simple_loss=0.2757, pruned_loss=0.05477, ctc_loss=0.1009, over 19372.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2607, pruned_loss=0.04033, ctc_loss=0.07504, over 3861154.57 frames. ], batch size: 67, lr: 7.79e-03, grad_scale: 32.0 +2024-08-29 05:53:18,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=251861.33333333334, ans=0.0 +2024-08-29 05:53:32,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=251861.33333333334, ans=0.125 +2024-08-29 05:53:55,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=252021.33333333334, ans=15.0 +2024-08-29 05:53:55,382 INFO [train.py:1114] (0/4) Epoch 19, batch 2450, loss[loss=0.2351, simple_loss=0.2877, pruned_loss=0.06563, ctc_loss=0.128, over 12910.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2641, pruned_loss=0.04253, ctc_loss=0.07936, over 3737846.95 frames. ], batch size: 141, lr: 7.78e-03, grad_scale: 32.0 +2024-08-29 05:54:18,916 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:54:23,263 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.449e+02 1.688e+02 1.808e+02 3.489e+02, threshold=3.376e+02, percent-clipped=0.0 +2024-08-29 05:54:29,840 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-19.pt +2024-08-29 05:55:27,185 INFO [train.py:1114] (0/4) Epoch 20, batch 0, loss[loss=0.1803, simple_loss=0.244, pruned_loss=0.04232, ctc_loss=0.08016, over 19809.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.244, pruned_loss=0.04232, ctc_loss=0.08016, over 19809.00 frames. ], batch size: 49, lr: 7.58e-03, grad_scale: 32.0 +2024-08-29 05:55:27,187 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-29 05:55:51,561 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.0247, 3.7359, 3.5258, 3.5571], device='cuda:0') +2024-08-29 05:55:55,978 INFO [train.py:1146] (0/4) Epoch 20, validation: loss=0.1713, simple_loss=0.2633, pruned_loss=0.0297, ctc_loss=0.04995, over 944034.00 frames. +2024-08-29 05:55:55,987 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 12778MB +2024-08-29 05:56:07,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=252288.0, ans=0.025 +2024-08-29 05:56:20,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=252288.0, ans=0.125 +2024-08-29 05:56:20,581 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.67 vs. limit=15.0 +2024-08-29 05:56:47,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=252394.66666666666, ans=0.125 +2024-08-29 05:56:52,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=252448.0, ans=0.125 +2024-08-29 05:57:32,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=252448.0, ans=0.0 +2024-08-29 05:57:36,251 INFO [train.py:1114] (0/4) Epoch 20, batch 50, loss[loss=0.1629, simple_loss=0.2376, pruned_loss=0.03171, ctc_loss=0.06181, over 19720.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2602, pruned_loss=0.03989, ctc_loss=0.07454, over 843606.59 frames. ], batch size: 47, lr: 7.58e-03, grad_scale: 16.0 +2024-08-29 05:57:50,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=252501.33333333334, ans=0.95 +2024-08-29 05:58:06,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=252608.0, ans=0.125 +2024-08-29 05:58:32,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=252661.33333333334, ans=0.2 +2024-08-29 05:58:34,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=252714.66666666666, ans=0.125 +2024-08-29 05:58:38,106 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.170e+02 1.450e+02 1.693e+02 1.984e+02 3.027e+02, threshold=3.386e+02, percent-clipped=0.0 +2024-08-29 05:58:48,214 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.99 vs. limit=15.0 +2024-08-29 05:58:48,712 INFO [train.py:1114] (0/4) Epoch 20, batch 100, loss[loss=0.1622, simple_loss=0.2393, pruned_loss=0.03078, ctc_loss=0.05864, over 19733.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2618, pruned_loss=0.04045, ctc_loss=0.07541, over 1498532.54 frames. ], batch size: 51, lr: 7.57e-03, grad_scale: 16.0 +2024-08-29 05:58:55,007 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.09 vs. limit=12.0 +2024-08-29 05:59:06,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=252821.33333333334, ans=0.2 +2024-08-29 05:59:22,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=252928.0, ans=0.125 +2024-08-29 05:59:28,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=252928.0, ans=0.0 +2024-08-29 05:59:29,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=252928.0, ans=0.125 +2024-08-29 05:59:33,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=252981.33333333334, ans=0.125 +2024-08-29 05:59:58,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=252981.33333333334, ans=0.0 +2024-08-29 06:00:02,036 INFO [train.py:1114] (0/4) Epoch 20, batch 150, loss[loss=0.1782, simple_loss=0.2423, pruned_loss=0.04239, ctc_loss=0.07347, over 19685.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2591, pruned_loss=0.03955, ctc_loss=0.07376, over 2026778.29 frames. ], batch size: 47, lr: 7.57e-03, grad_scale: 16.0 +2024-08-29 06:00:03,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=253034.66666666666, ans=0.0 +2024-08-29 06:00:43,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=253088.0, ans=0.0 +2024-08-29 06:00:54,923 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.30 vs. limit=15.0 +2024-08-29 06:01:01,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=253141.33333333334, ans=0.0 +2024-08-29 06:01:19,561 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.374e+02 1.536e+02 1.873e+02 3.368e+02, threshold=3.073e+02, percent-clipped=0.0 +2024-08-29 06:01:20,075 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.10 vs. limit=15.0 +2024-08-29 06:01:25,925 INFO [train.py:1114] (0/4) Epoch 20, batch 200, loss[loss=0.1953, simple_loss=0.2742, pruned_loss=0.04218, ctc_loss=0.08025, over 18195.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2579, pruned_loss=0.03926, ctc_loss=0.0732, over 2434933.67 frames. ], batch size: 85, lr: 7.56e-03, grad_scale: 16.0 +2024-08-29 06:01:56,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=253408.0, ans=0.125 +2024-08-29 06:03:17,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=253461.33333333334, ans=0.125 +2024-08-29 06:03:19,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253461.33333333334, ans=0.1 +2024-08-29 06:03:23,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=253461.33333333334, ans=0.125 +2024-08-29 06:03:27,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=253514.66666666666, ans=0.0 +2024-08-29 06:03:51,877 INFO [train.py:1114] (0/4) Epoch 20, batch 250, loss[loss=0.1845, simple_loss=0.2675, pruned_loss=0.03696, ctc_loss=0.06902, over 19420.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2579, pruned_loss=0.03897, ctc_loss=0.07263, over 2755716.60 frames. ], batch size: 67, lr: 7.56e-03, grad_scale: 16.0 +2024-08-29 06:04:43,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=253674.66666666666, ans=0.025 +2024-08-29 06:04:47,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253674.66666666666, ans=0.1 +2024-08-29 06:04:48,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=253674.66666666666, ans=0.125 +2024-08-29 06:05:13,797 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.460e+02 1.674e+02 2.079e+02 4.615e+02, threshold=3.347e+02, percent-clipped=6.0 +2024-08-29 06:06:01,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=253781.33333333334, ans=0.025 +2024-08-29 06:06:03,017 INFO [train.py:1114] (0/4) Epoch 20, batch 300, loss[loss=0.2119, simple_loss=0.2848, pruned_loss=0.05027, ctc_loss=0.09615, over 19549.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2578, pruned_loss=0.03889, ctc_loss=0.07247, over 3001088.90 frames. ], batch size: 61, lr: 7.56e-03, grad_scale: 16.0 +2024-08-29 06:06:18,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=253888.0, ans=0.125 +2024-08-29 06:07:06,196 INFO [train.py:1114] (0/4) Epoch 20, batch 350, loss[loss=0.1832, simple_loss=0.2469, pruned_loss=0.04418, ctc_loss=0.07775, over 19726.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2586, pruned_loss=0.03907, ctc_loss=0.07277, over 3190959.39 frames. ], batch size: 48, lr: 7.55e-03, grad_scale: 16.0 +2024-08-29 06:07:06,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=254101.33333333334, ans=0.2 +2024-08-29 06:07:09,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=254101.33333333334, ans=0.125 +2024-08-29 06:07:33,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=254261.33333333334, ans=0.0 +2024-08-29 06:07:33,711 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.62 vs. limit=15.0 +2024-08-29 06:07:34,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=254261.33333333334, ans=0.125 +2024-08-29 06:07:36,375 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.12 vs. limit=22.5 +2024-08-29 06:07:43,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=254261.33333333334, ans=0.5 +2024-08-29 06:07:50,373 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.501e+02 1.796e+02 2.192e+02 4.069e+02, threshold=3.593e+02, percent-clipped=5.0 +2024-08-29 06:07:53,644 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.55 vs. limit=22.5 +2024-08-29 06:07:58,763 INFO [train.py:1114] (0/4) Epoch 20, batch 400, loss[loss=0.1855, simple_loss=0.2593, pruned_loss=0.04122, ctc_loss=0.07329, over 19510.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2581, pruned_loss=0.03886, ctc_loss=0.07222, over 3342257.31 frames. ], batch size: 54, lr: 7.55e-03, grad_scale: 32.0 +2024-08-29 06:08:18,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=254474.66666666666, ans=0.2 +2024-08-29 06:08:51,105 INFO [train.py:1114] (0/4) Epoch 20, batch 450, loss[loss=0.1757, simple_loss=0.264, pruned_loss=0.03173, ctc_loss=0.05982, over 19600.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2582, pruned_loss=0.03883, ctc_loss=0.07219, over 3451494.20 frames. ], batch size: 55, lr: 7.55e-03, grad_scale: 16.0 +2024-08-29 06:10:30,942 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.420e+02 1.652e+02 2.202e+02 3.176e+02, threshold=3.303e+02, percent-clipped=0.0 +2024-08-29 06:10:51,252 INFO [train.py:1114] (0/4) Epoch 20, batch 500, loss[loss=0.1878, simple_loss=0.2625, pruned_loss=0.04109, ctc_loss=0.07735, over 19680.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2575, pruned_loss=0.03855, ctc_loss=0.07187, over 3545952.35 frames. ], batch size: 63, lr: 7.54e-03, grad_scale: 8.0 +2024-08-29 06:11:11,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255008.0, ans=0.1 +2024-08-29 06:11:14,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=255008.0, ans=0.0 +2024-08-29 06:11:26,813 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.33 vs. limit=15.0 +2024-08-29 06:11:57,463 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.75 vs. limit=15.0 +2024-08-29 06:11:58,824 INFO [train.py:1114] (0/4) Epoch 20, batch 550, loss[loss=0.1976, simple_loss=0.2743, pruned_loss=0.0435, ctc_loss=0.08495, over 19253.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2577, pruned_loss=0.03856, ctc_loss=0.07204, over 3608173.82 frames. ], batch size: 71, lr: 7.54e-03, grad_scale: 8.0 +2024-08-29 06:12:02,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=255168.0, ans=0.2 +2024-08-29 06:12:31,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=255328.0, ans=0.125 +2024-08-29 06:13:03,358 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.429e+02 1.650e+02 1.993e+02 3.679e+02, threshold=3.299e+02, percent-clipped=2.0 +2024-08-29 06:13:12,592 INFO [train.py:1114] (0/4) Epoch 20, batch 600, loss[loss=0.1908, simple_loss=0.2728, pruned_loss=0.04003, ctc_loss=0.07172, over 19331.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.258, pruned_loss=0.03844, ctc_loss=0.07201, over 3665746.35 frames. ], batch size: 67, lr: 7.53e-03, grad_scale: 8.0 +2024-08-29 06:13:13,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=255434.66666666666, ans=0.0 +2024-08-29 06:13:36,149 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.63 vs. limit=15.0 +2024-08-29 06:13:57,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=255488.0, ans=0.2 +2024-08-29 06:14:23,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=255541.33333333334, ans=0.0 +2024-08-29 06:14:29,050 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.91 vs. limit=15.0 +2024-08-29 06:14:35,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=255594.66666666666, ans=0.0 +2024-08-29 06:14:36,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=255594.66666666666, ans=0.125 +2024-08-29 06:15:00,640 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=11.34 vs. limit=12.0 +2024-08-29 06:15:09,593 INFO [train.py:1114] (0/4) Epoch 20, batch 650, loss[loss=0.1675, simple_loss=0.2528, pruned_loss=0.02957, ctc_loss=0.05757, over 19755.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2577, pruned_loss=0.03843, ctc_loss=0.07214, over 3716724.37 frames. ], batch size: 54, lr: 7.53e-03, grad_scale: 8.0 +2024-08-29 06:15:23,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=255754.66666666666, ans=0.2 +2024-08-29 06:15:30,232 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.20 vs. limit=6.0 +2024-08-29 06:15:59,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=255808.0, ans=0.125 +2024-08-29 06:16:03,745 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.18 vs. limit=15.0 +2024-08-29 06:16:48,642 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.504e+02 1.911e+02 2.476e+02 5.788e+02, threshold=3.821e+02, percent-clipped=14.0 +2024-08-29 06:16:53,254 INFO [train.py:1114] (0/4) Epoch 20, batch 700, loss[loss=0.182, simple_loss=0.26, pruned_loss=0.03773, ctc_loss=0.07149, over 19713.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2586, pruned_loss=0.03867, ctc_loss=0.07245, over 3748489.04 frames. ], batch size: 51, lr: 7.53e-03, grad_scale: 8.0 +2024-08-29 06:17:00,602 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/checkpoint-48000.pt +2024-08-29 06:17:02,800 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.30 vs. limit=6.0 +2024-08-29 06:17:04,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255968.0, ans=0.1 +2024-08-29 06:17:22,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=256074.66666666666, ans=0.2 +2024-08-29 06:17:24,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=256128.0, ans=0.125 +2024-08-29 06:17:29,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=256128.0, ans=0.125 +2024-08-29 06:17:35,522 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:17:42,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=256181.33333333334, ans=0.125 +2024-08-29 06:17:45,331 INFO [train.py:1114] (0/4) Epoch 20, batch 750, loss[loss=0.1727, simple_loss=0.2576, pruned_loss=0.03189, ctc_loss=0.05982, over 19516.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.258, pruned_loss=0.03839, ctc_loss=0.07185, over 3775536.21 frames. ], batch size: 54, lr: 7.52e-03, grad_scale: 8.0 +2024-08-29 06:18:15,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=256394.66666666666, ans=0.0 +2024-08-29 06:18:17,414 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:18:28,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=256448.0, ans=0.125 +2024-08-29 06:18:31,359 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.481e+02 1.912e+02 2.487e+02 4.029e+02, threshold=3.825e+02, percent-clipped=2.0 +2024-08-29 06:18:37,335 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.64 vs. limit=10.0 +2024-08-29 06:18:37,697 INFO [train.py:1114] (0/4) Epoch 20, batch 800, loss[loss=0.1901, simple_loss=0.255, pruned_loss=0.04519, ctc_loss=0.08703, over 19816.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2582, pruned_loss=0.03848, ctc_loss=0.07209, over 3797324.35 frames. ], batch size: 49, lr: 7.52e-03, grad_scale: 16.0 +2024-08-29 06:18:37,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=256501.33333333334, ans=0.125 +2024-08-29 06:19:01,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=256608.0, ans=0.0 +2024-08-29 06:19:02,841 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:19:08,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=256661.33333333334, ans=0.125 +2024-08-29 06:19:31,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=256661.33333333334, ans=0.025 +2024-08-29 06:19:36,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=256714.66666666666, ans=0.1 +2024-08-29 06:19:42,744 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.72 vs. limit=15.0 +2024-08-29 06:19:43,629 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-08-29 06:19:46,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=256714.66666666666, ans=0.125 +2024-08-29 06:19:48,530 INFO [train.py:1114] (0/4) Epoch 20, batch 850, loss[loss=0.1894, simple_loss=0.2667, pruned_loss=0.0414, ctc_loss=0.0735, over 19647.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2578, pruned_loss=0.03841, ctc_loss=0.07187, over 3816331.02 frames. ], batch size: 59, lr: 7.51e-03, grad_scale: 16.0 +2024-08-29 06:19:50,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=256768.0, ans=0.0 +2024-08-29 06:19:51,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=256768.0, ans=0.0 +2024-08-29 06:19:53,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=256768.0, ans=0.0 +2024-08-29 06:20:12,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=256821.33333333334, ans=0.0 +2024-08-29 06:20:24,614 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=12.64 vs. limit=15.0 +2024-08-29 06:20:39,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=256981.33333333334, ans=0.125 +2024-08-29 06:20:41,632 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.961e+01 1.422e+02 1.634e+02 1.945e+02 3.890e+02, threshold=3.267e+02, percent-clipped=1.0 +2024-08-29 06:20:49,508 INFO [train.py:1114] (0/4) Epoch 20, batch 900, loss[loss=0.1559, simple_loss=0.2261, pruned_loss=0.031, ctc_loss=0.05903, over 19410.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2581, pruned_loss=0.03884, ctc_loss=0.07271, over 3819704.95 frames. ], batch size: 48, lr: 7.51e-03, grad_scale: 16.0 +2024-08-29 06:22:18,958 INFO [train.py:1114] (0/4) Epoch 20, batch 950, loss[loss=0.168, simple_loss=0.2363, pruned_loss=0.03532, ctc_loss=0.07249, over 19494.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2583, pruned_loss=0.03895, ctc_loss=0.07298, over 3820524.61 frames. ], batch size: 49, lr: 7.51e-03, grad_scale: 16.0 +2024-08-29 06:22:19,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=257301.33333333334, ans=0.0 +2024-08-29 06:22:48,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=257301.33333333334, ans=0.125 +2024-08-29 06:23:33,868 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.37 vs. limit=12.0 +2024-08-29 06:23:34,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=257461.33333333334, ans=0.125 +2024-08-29 06:23:34,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=257461.33333333334, ans=0.0 +2024-08-29 06:23:47,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=257514.66666666666, ans=0.125 +2024-08-29 06:23:48,046 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.398e+02 1.599e+02 1.937e+02 2.870e+02, threshold=3.197e+02, percent-clipped=0.0 +2024-08-29 06:23:50,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=257514.66666666666, ans=0.125 +2024-08-29 06:23:52,535 INFO [train.py:1114] (0/4) Epoch 20, batch 1000, loss[loss=0.1602, simple_loss=0.2388, pruned_loss=0.02957, ctc_loss=0.05603, over 19855.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2589, pruned_loss=0.03913, ctc_loss=0.07355, over 3817259.76 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 16.0 +2024-08-29 06:23:58,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.72 vs. limit=15.0 +2024-08-29 06:24:45,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=257728.0, ans=0.125 +2024-08-29 06:24:56,441 INFO [train.py:1114] (0/4) Epoch 20, batch 1050, loss[loss=0.1716, simple_loss=0.2546, pruned_loss=0.03183, ctc_loss=0.06247, over 19842.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2582, pruned_loss=0.03883, ctc_loss=0.07282, over 3823486.45 frames. ], batch size: 57, lr: 7.50e-03, grad_scale: 16.0 +2024-08-29 06:25:06,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=257888.0, ans=0.125 +2024-08-29 06:25:09,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=257888.0, ans=0.2 +2024-08-29 06:25:37,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=257994.66666666666, ans=0.035 +2024-08-29 06:25:54,256 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.418e+02 1.590e+02 1.994e+02 3.641e+02, threshold=3.179e+02, percent-clipped=3.0 +2024-08-29 06:25:58,776 INFO [train.py:1114] (0/4) Epoch 20, batch 1100, loss[loss=0.184, simple_loss=0.2555, pruned_loss=0.04139, ctc_loss=0.074, over 19588.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2582, pruned_loss=0.03867, ctc_loss=0.07246, over 3831554.41 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 16.0 +2024-08-29 06:26:33,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=258261.33333333334, ans=0.125 +2024-08-29 06:27:06,943 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=11.71 vs. limit=15.0 +2024-08-29 06:27:30,549 INFO [train.py:1114] (0/4) Epoch 20, batch 1150, loss[loss=0.1858, simple_loss=0.2631, pruned_loss=0.03881, ctc_loss=0.07701, over 19585.00 frames. ], tot_loss[loss=0.182, simple_loss=0.258, pruned_loss=0.03852, ctc_loss=0.07229, over 3828701.45 frames. ], batch size: 52, lr: 7.49e-03, grad_scale: 16.0 +2024-08-29 06:27:36,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258368.0, ans=0.1 +2024-08-29 06:27:45,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=258421.33333333334, ans=0.125 +2024-08-29 06:27:47,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=258421.33333333334, ans=0.0 +2024-08-29 06:28:01,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=5.84 vs. limit=15.0 +2024-08-29 06:28:03,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=258528.0, ans=0.125 +2024-08-29 06:28:08,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=258581.33333333334, ans=0.125 +2024-08-29 06:28:13,433 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.421e+02 1.745e+02 2.163e+02 3.118e+02, threshold=3.490e+02, percent-clipped=0.0 +2024-08-29 06:28:18,037 INFO [train.py:1114] (0/4) Epoch 20, batch 1200, loss[loss=0.2004, simple_loss=0.2808, pruned_loss=0.04432, ctc_loss=0.07827, over 19840.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2588, pruned_loss=0.03884, ctc_loss=0.07278, over 3825325.48 frames. ], batch size: 57, lr: 7.49e-03, grad_scale: 32.0 +2024-08-29 06:29:03,451 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.67 vs. limit=15.0 +2024-08-29 06:29:05,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=258688.0, ans=0.0 +2024-08-29 06:30:22,528 INFO [train.py:1114] (0/4) Epoch 20, batch 1250, loss[loss=0.193, simple_loss=0.2703, pruned_loss=0.0417, ctc_loss=0.0805, over 19541.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2589, pruned_loss=0.03885, ctc_loss=0.07266, over 3843320.66 frames. ], batch size: 61, lr: 7.48e-03, grad_scale: 16.0 +2024-08-29 06:30:24,859 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.63 vs. limit=15.0 +2024-08-29 06:30:39,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.79 vs. limit=15.0 +2024-08-29 06:30:44,448 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.41 vs. limit=15.0 +2024-08-29 06:30:48,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=259008.0, ans=0.125 +2024-08-29 06:30:49,742 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:30:53,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259061.33333333334, ans=0.1 +2024-08-29 06:30:54,905 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=8.92 vs. limit=22.5 +2024-08-29 06:30:59,221 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.84 vs. limit=22.5 +2024-08-29 06:30:59,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259061.33333333334, ans=0.1 +2024-08-29 06:31:05,584 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.78 vs. limit=12.0 +2024-08-29 06:31:09,980 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.413e+02 1.610e+02 2.110e+02 3.599e+02, threshold=3.219e+02, percent-clipped=1.0 +2024-08-29 06:31:51,312 INFO [train.py:1114] (0/4) Epoch 20, batch 1300, loss[loss=0.1917, simple_loss=0.2701, pruned_loss=0.04029, ctc_loss=0.08187, over 18810.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2584, pruned_loss=0.03879, ctc_loss=0.07251, over 3844907.25 frames. ], batch size: 76, lr: 7.48e-03, grad_scale: 16.0 +2024-08-29 06:33:56,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=259168.0, ans=0.125 +2024-08-29 06:34:27,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=259221.33333333334, ans=0.2 +2024-08-29 06:34:31,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=259221.33333333334, ans=0.0 +2024-08-29 06:34:32,080 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.77 vs. limit=15.0 +2024-08-29 06:34:52,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=259328.0, ans=0.125 +2024-08-29 06:35:58,299 INFO [train.py:1114] (0/4) Epoch 20, batch 1350, loss[loss=0.1734, simple_loss=0.2521, pruned_loss=0.03418, ctc_loss=0.06577, over 19761.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2578, pruned_loss=0.03841, ctc_loss=0.07179, over 3855646.42 frames. ], batch size: 54, lr: 7.48e-03, grad_scale: 16.0 +2024-08-29 06:36:13,421 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.67 vs. limit=15.0 +2024-08-29 06:36:16,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=259488.0, ans=0.04949747468305833 +2024-08-29 06:36:17,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=259488.0, ans=0.025 +2024-08-29 06:36:32,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=259594.66666666666, ans=0.0 +2024-08-29 06:36:36,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259648.0, ans=0.1 +2024-08-29 06:36:43,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=259648.0, ans=0.125 +2024-08-29 06:36:45,725 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.394e+02 1.620e+02 1.985e+02 3.317e+02, threshold=3.241e+02, percent-clipped=2.0 +2024-08-29 06:36:49,862 INFO [train.py:1114] (0/4) Epoch 20, batch 1400, loss[loss=0.1485, simple_loss=0.2216, pruned_loss=0.02661, ctc_loss=0.05542, over 19682.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2578, pruned_loss=0.03847, ctc_loss=0.07175, over 3862772.72 frames. ], batch size: 46, lr: 7.47e-03, grad_scale: 16.0 +2024-08-29 06:36:51,131 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.96 vs. limit=10.0 +2024-08-29 06:36:55,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=259701.33333333334, ans=0.0 +2024-08-29 06:37:26,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=259808.0, ans=0.125 +2024-08-29 06:37:37,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=259808.0, ans=0.125 +2024-08-29 06:37:49,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=259914.66666666666, ans=0.125 +2024-08-29 06:37:49,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=259914.66666666666, ans=0.125 +2024-08-29 06:38:00,928 INFO [train.py:1114] (0/4) Epoch 20, batch 1450, loss[loss=0.187, simple_loss=0.2641, pruned_loss=0.03924, ctc_loss=0.07837, over 19667.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2582, pruned_loss=0.03862, ctc_loss=0.0724, over 3861785.17 frames. ], batch size: 63, lr: 7.47e-03, grad_scale: 16.0 +2024-08-29 06:38:02,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=259968.0, ans=0.125 +2024-08-29 06:38:07,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=259968.0, ans=0.125 +2024-08-29 06:38:08,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=259968.0, ans=0.125 +2024-08-29 06:38:11,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=260021.33333333334, ans=0.0 +2024-08-29 06:38:23,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=260074.66666666666, ans=0.0 +2024-08-29 06:38:24,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=260074.66666666666, ans=0.125 +2024-08-29 06:38:31,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=260074.66666666666, ans=0.125 +2024-08-29 06:38:34,118 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.68 vs. limit=15.0 +2024-08-29 06:38:46,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=260181.33333333334, ans=0.2 +2024-08-29 06:38:48,135 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.231e+02 1.378e+02 1.589e+02 1.878e+02 3.405e+02, threshold=3.177e+02, percent-clipped=1.0 +2024-08-29 06:38:49,539 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.36 vs. limit=22.5 +2024-08-29 06:38:51,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.61 vs. limit=15.0 +2024-08-29 06:38:51,864 INFO [train.py:1114] (0/4) Epoch 20, batch 1500, loss[loss=0.2016, simple_loss=0.2754, pruned_loss=0.04677, ctc_loss=0.08557, over 19584.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2586, pruned_loss=0.0387, ctc_loss=0.07247, over 3861372.31 frames. ], batch size: 57, lr: 7.47e-03, grad_scale: 16.0 +2024-08-29 06:38:58,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=260234.66666666666, ans=0.125 +2024-08-29 06:39:10,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=260341.33333333334, ans=0.0 +2024-08-29 06:39:21,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=260394.66666666666, ans=0.0 +2024-08-29 06:39:35,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=260394.66666666666, ans=0.0 +2024-08-29 06:39:36,835 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:39:44,362 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.99 vs. limit=6.0 +2024-08-29 06:40:23,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=260501.33333333334, ans=0.125 +2024-08-29 06:40:24,666 INFO [train.py:1114] (0/4) Epoch 20, batch 1550, loss[loss=0.2026, simple_loss=0.2799, pruned_loss=0.04652, ctc_loss=0.08036, over 19610.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2586, pruned_loss=0.03893, ctc_loss=0.07284, over 3846392.59 frames. ], batch size: 60, lr: 7.46e-03, grad_scale: 16.0 +2024-08-29 06:40:24,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=260501.33333333334, ans=0.0 +2024-08-29 06:40:33,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=260554.66666666666, ans=0.2 +2024-08-29 06:40:40,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=260554.66666666666, ans=0.025 +2024-08-29 06:41:17,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=260661.33333333334, ans=10.0 +2024-08-29 06:41:37,182 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.44 vs. limit=15.0 +2024-08-29 06:41:37,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=260661.33333333334, ans=0.5 +2024-08-29 06:41:38,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=260661.33333333334, ans=0.125 +2024-08-29 06:41:43,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=260714.66666666666, ans=0.025 +2024-08-29 06:41:48,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=260714.66666666666, ans=0.0 +2024-08-29 06:41:55,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260714.66666666666, ans=0.1 +2024-08-29 06:41:55,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=260714.66666666666, ans=0.0 +2024-08-29 06:41:57,061 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.095e+02 1.471e+02 1.781e+02 2.261e+02 3.819e+02, threshold=3.562e+02, percent-clipped=6.0 +2024-08-29 06:41:59,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=260714.66666666666, ans=0.125 +2024-08-29 06:42:30,548 INFO [train.py:1114] (0/4) Epoch 20, batch 1600, loss[loss=0.1877, simple_loss=0.2708, pruned_loss=0.03758, ctc_loss=0.07366, over 19843.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2583, pruned_loss=0.03885, ctc_loss=0.0726, over 3834899.02 frames. ], batch size: 57, lr: 7.46e-03, grad_scale: 32.0 +2024-08-29 06:42:34,013 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.56 vs. limit=15.0 +2024-08-29 06:42:44,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=260821.33333333334, ans=0.125 +2024-08-29 06:43:12,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260981.33333333334, ans=0.1 +2024-08-29 06:43:17,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=260981.33333333334, ans=0.0 +2024-08-29 06:43:21,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=261034.66666666666, ans=0.0 +2024-08-29 06:43:21,813 INFO [train.py:1114] (0/4) Epoch 20, batch 1650, loss[loss=0.1896, simple_loss=0.2688, pruned_loss=0.04016, ctc_loss=0.07511, over 19656.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2584, pruned_loss=0.03894, ctc_loss=0.07269, over 3830835.84 frames. ], batch size: 59, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 06:43:27,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=261034.66666666666, ans=0.125 +2024-08-29 06:44:10,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=261194.66666666666, ans=0.125 +2024-08-29 06:44:13,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=261194.66666666666, ans=0.2 +2024-08-29 06:44:25,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=261248.0, ans=0.025 +2024-08-29 06:44:29,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=261248.0, ans=0.125 +2024-08-29 06:44:30,262 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.155e+02 1.539e+02 1.831e+02 2.496e+02 4.278e+02, threshold=3.663e+02, percent-clipped=6.0 +2024-08-29 06:44:30,447 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:44:33,863 INFO [train.py:1114] (0/4) Epoch 20, batch 1700, loss[loss=0.1505, simple_loss=0.2232, pruned_loss=0.02793, ctc_loss=0.05481, over 19688.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2582, pruned_loss=0.0385, ctc_loss=0.0719, over 3845161.14 frames. ], batch size: 46, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 06:44:38,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=261301.33333333334, ans=15.0 +2024-08-29 06:44:41,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=261301.33333333334, ans=0.0 +2024-08-29 06:44:49,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=261301.33333333334, ans=0.125 +2024-08-29 06:44:49,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=261354.66666666666, ans=0.0 +2024-08-29 06:44:52,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=261354.66666666666, ans=0.0 +2024-08-29 06:45:08,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=261461.33333333334, ans=0.125 +2024-08-29 06:45:12,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=261461.33333333334, ans=0.1 +2024-08-29 06:45:18,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=261461.33333333334, ans=0.2 +2024-08-29 06:45:21,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=261461.33333333334, ans=0.125 +2024-08-29 06:45:23,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=261514.66666666666, ans=0.125 +2024-08-29 06:45:33,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=261514.66666666666, ans=0.0 +2024-08-29 06:45:38,445 INFO [train.py:1114] (0/4) Epoch 20, batch 1750, loss[loss=0.155, simple_loss=0.2301, pruned_loss=0.0297, ctc_loss=0.05153, over 19683.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2576, pruned_loss=0.03857, ctc_loss=0.07201, over 3849342.05 frames. ], batch size: 45, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 06:45:48,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=261568.0, ans=0.125 +2024-08-29 06:46:16,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=261728.0, ans=0.125 +2024-08-29 06:46:26,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=261781.33333333334, ans=0.0 +2024-08-29 06:46:31,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=261781.33333333334, ans=0.125 +2024-08-29 06:46:33,068 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.489e+02 1.816e+02 2.382e+02 3.653e+02, threshold=3.632e+02, percent-clipped=0.0 +2024-08-29 06:46:36,573 INFO [train.py:1114] (0/4) Epoch 20, batch 1800, loss[loss=0.1829, simple_loss=0.2625, pruned_loss=0.03732, ctc_loss=0.07167, over 19608.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2577, pruned_loss=0.03857, ctc_loss=0.07208, over 3852540.42 frames. ], batch size: 55, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 06:46:45,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=261834.66666666666, ans=0.125 +2024-08-29 06:46:48,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=261834.66666666666, ans=0.125 +2024-08-29 06:46:49,714 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.90 vs. limit=12.0 +2024-08-29 06:47:19,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=261994.66666666666, ans=0.0 +2024-08-29 06:47:21,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=261994.66666666666, ans=0.125 +2024-08-29 06:47:22,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=261994.66666666666, ans=0.05 +2024-08-29 06:47:25,495 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.27 vs. limit=22.5 +2024-08-29 06:47:43,607 INFO [train.py:1114] (0/4) Epoch 20, batch 1850, loss[loss=0.1861, simple_loss=0.2704, pruned_loss=0.03741, ctc_loss=0.06779, over 19577.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2577, pruned_loss=0.03859, ctc_loss=0.07194, over 3856244.40 frames. ], batch size: 57, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 06:47:49,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=262101.33333333334, ans=0.0 +2024-08-29 06:47:54,491 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.32 vs. limit=6.0 +2024-08-29 06:47:59,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=262154.6666666667, ans=0.125 +2024-08-29 06:48:15,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=262208.0, ans=0.125 +2024-08-29 06:48:18,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=262208.0, ans=0.0 +2024-08-29 06:48:40,480 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.496e+02 1.744e+02 2.237e+02 4.849e+02, threshold=3.488e+02, percent-clipped=3.0 +2024-08-29 06:48:40,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=262314.6666666667, ans=0.5 +2024-08-29 06:48:46,713 INFO [train.py:1114] (0/4) Epoch 20, batch 1900, loss[loss=0.1905, simple_loss=0.2676, pruned_loss=0.04083, ctc_loss=0.07927, over 19631.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2581, pruned_loss=0.03857, ctc_loss=0.07178, over 3861887.47 frames. ], batch size: 59, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 06:48:49,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=262368.0, ans=0.025 +2024-08-29 06:48:56,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=262368.0, ans=0.035 +2024-08-29 06:48:59,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262421.3333333333, ans=0.1 +2024-08-29 06:49:19,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=262421.3333333333, ans=0.2 +2024-08-29 06:50:07,327 INFO [train.py:1114] (0/4) Epoch 20, batch 1950, loss[loss=0.1738, simple_loss=0.2513, pruned_loss=0.03495, ctc_loss=0.0663, over 19595.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2588, pruned_loss=0.03858, ctc_loss=0.07176, over 3870448.19 frames. ], batch size: 52, lr: 7.43e-03, grad_scale: 32.0 +2024-08-29 06:50:10,589 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.35 vs. limit=15.0 +2024-08-29 06:50:47,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262741.3333333333, ans=0.1 +2024-08-29 06:50:48,879 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.16 vs. limit=15.0 +2024-08-29 06:50:51,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=262794.6666666667, ans=0.125 +2024-08-29 06:51:04,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=262794.6666666667, ans=0.2 +2024-08-29 06:51:07,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=262848.0, ans=0.125 +2024-08-29 06:51:08,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=262848.0, ans=0.125 +2024-08-29 06:51:12,400 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.401e+02 1.548e+02 2.025e+02 3.566e+02, threshold=3.095e+02, percent-clipped=1.0 +2024-08-29 06:51:15,982 INFO [train.py:1114] (0/4) Epoch 20, batch 2000, loss[loss=0.1711, simple_loss=0.2407, pruned_loss=0.03691, ctc_loss=0.0689, over 19654.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2594, pruned_loss=0.03887, ctc_loss=0.07258, over 3855021.75 frames. ], batch size: 45, lr: 7.43e-03, grad_scale: 32.0 +2024-08-29 06:51:44,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=263008.0, ans=0.125 +2024-08-29 06:51:47,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263061.3333333333, ans=0.1 +2024-08-29 06:51:49,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=263061.3333333333, ans=0.2 +2024-08-29 06:52:04,147 INFO [train.py:1114] (0/4) Epoch 20, batch 2050, loss[loss=0.1584, simple_loss=0.231, pruned_loss=0.03139, ctc_loss=0.05763, over 19708.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2583, pruned_loss=0.03861, ctc_loss=0.07207, over 3851251.97 frames. ], batch size: 47, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 06:52:37,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263274.6666666667, ans=0.1 +2024-08-29 06:52:53,529 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.42 vs. limit=22.5 +2024-08-29 06:53:03,400 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.460e+02 1.739e+02 2.291e+02 5.164e+02, threshold=3.479e+02, percent-clipped=12.0 +2024-08-29 06:53:06,948 INFO [train.py:1114] (0/4) Epoch 20, batch 2100, loss[loss=0.1724, simple_loss=0.2478, pruned_loss=0.03538, ctc_loss=0.06572, over 19771.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2576, pruned_loss=0.03821, ctc_loss=0.07142, over 3858598.14 frames. ], batch size: 54, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 06:53:32,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=263541.3333333333, ans=0.2 +2024-08-29 06:53:42,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=263594.6666666667, ans=0.125 +2024-08-29 06:53:51,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=263648.0, ans=0.125 +2024-08-29 06:54:01,625 INFO [train.py:1114] (0/4) Epoch 20, batch 2150, loss[loss=0.1768, simple_loss=0.2575, pruned_loss=0.03456, ctc_loss=0.06756, over 19570.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2569, pruned_loss=0.038, ctc_loss=0.07106, over 3868795.35 frames. ], batch size: 52, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 06:54:05,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=263701.3333333333, ans=0.07 +2024-08-29 06:54:42,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=263914.6666666667, ans=0.2 +2024-08-29 06:54:43,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=263914.6666666667, ans=0.125 +2024-08-29 06:54:44,296 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.492e+02 1.849e+02 2.492e+02 5.041e+02, threshold=3.698e+02, percent-clipped=4.0 +2024-08-29 06:55:33,939 INFO [train.py:1114] (0/4) Epoch 20, batch 2200, loss[loss=0.1748, simple_loss=0.2548, pruned_loss=0.03459, ctc_loss=0.06386, over 19581.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2569, pruned_loss=0.03811, ctc_loss=0.07133, over 3867840.37 frames. ], batch size: 57, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 06:55:39,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=263968.0, ans=0.0 +2024-08-29 06:55:52,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=264021.3333333333, ans=0.125 +2024-08-29 06:56:26,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=264181.3333333333, ans=0.0 +2024-08-29 06:56:37,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.09 vs. limit=15.0 +2024-08-29 06:56:38,509 INFO [train.py:1114] (0/4) Epoch 20, batch 2250, loss[loss=0.179, simple_loss=0.2618, pruned_loss=0.0346, ctc_loss=0.06755, over 19618.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2572, pruned_loss=0.03821, ctc_loss=0.07149, over 3866799.94 frames. ], batch size: 55, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 06:56:54,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=264288.0, ans=0.2 +2024-08-29 06:56:54,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=264288.0, ans=0.1 +2024-08-29 06:56:56,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=264341.3333333333, ans=0.0 +2024-08-29 06:56:59,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=264341.3333333333, ans=0.1 +2024-08-29 06:57:53,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=264448.0, ans=0.125 +2024-08-29 06:57:58,411 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.422e+02 1.843e+02 2.549e+02 5.039e+02, threshold=3.686e+02, percent-clipped=5.0 +2024-08-29 06:58:01,868 INFO [train.py:1114] (0/4) Epoch 20, batch 2300, loss[loss=0.1845, simple_loss=0.2583, pruned_loss=0.04113, ctc_loss=0.07125, over 19496.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2567, pruned_loss=0.03813, ctc_loss=0.0712, over 3859390.45 frames. ], batch size: 49, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 06:59:10,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=264608.0, ans=0.125 +2024-08-29 07:00:02,775 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.93 vs. limit=15.0 +2024-08-29 07:00:03,988 INFO [train.py:1114] (0/4) Epoch 20, batch 2350, loss[loss=0.2042, simple_loss=0.2751, pruned_loss=0.04895, ctc_loss=0.08823, over 19669.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2568, pruned_loss=0.03838, ctc_loss=0.07157, over 3862188.44 frames. ], batch size: 63, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 07:10:25,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=264821.3333333333, ans=0.5 +2024-08-29 07:13:38,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264928.0, ans=0.1 +2024-08-29 07:15:24,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=264928.0, ans=0.125 +2024-08-29 07:16:19,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=264981.3333333333, ans=0.125 +2024-08-29 07:20:35,443 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.441e+02 1.702e+02 2.205e+02 4.204e+02, threshold=3.404e+02, percent-clipped=1.0 +2024-08-29 07:22:42,547 INFO [train.py:1114] (0/4) Epoch 20, batch 2400, loss[loss=0.198, simple_loss=0.276, pruned_loss=0.04485, ctc_loss=0.07549, over 19336.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2589, pruned_loss=0.03899, ctc_loss=0.0725, over 3857312.51 frames. ], batch size: 67, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 07:23:40,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265034.6666666667, ans=0.1 +2024-08-29 07:24:37,262 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-08-29 07:34:27,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=265088.0, ans=0.125 +2024-08-29 07:36:56,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=265141.3333333333, ans=0.125 +2024-08-29 07:41:05,274 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 07:41:06,319 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.26 vs. limit=22.5 +2024-08-29 07:50:39,245 INFO [train.py:1114] (0/4) Epoch 20, batch 2450, loss[loss=0.2549, simple_loss=0.3021, pruned_loss=0.0747, ctc_loss=0.1457, over 13541.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2624, pruned_loss=0.04121, ctc_loss=0.07694, over 3731858.51 frames. ], batch size: 141, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 07:54:23,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=265354.6666666667, ans=0.125 +2024-08-29 07:55:33,453 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.26 vs. limit=15.0 +2024-08-29 08:03:21,900 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 08:03:21,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=265461.3333333333, ans=0.125 +2024-08-29 08:04:08,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=265461.3333333333, ans=0.125 +2024-08-29 08:06:23,735 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-29 08:08:51,800 INFO [train.py:1387] (0/4) Done! diff --git a/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-01-48-53-1 b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-01-48-53-1 new file mode 100644 index 0000000000000000000000000000000000000000..90fe45298a094712d54b222aa0d858d06ff84b7a --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-01-48-53-1 @@ -0,0 +1,704 @@ +2024-08-29 01:48:53,987 INFO [train.py:1182] (1/4) Training started +2024-08-29 01:48:53,988 INFO [train.py:1192] (1/4) Device: cuda:1 +2024-08-29 01:48:56,897 INFO [train.py:1210] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2538.int.cedar.computecanada.ca', 'IP address': '172.16.145.231'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 19, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 01:48:56,897 INFO [train.py:1212] (1/4) About to create model +2024-08-29 01:48:58,148 INFO [train.py:1216] (1/4) Number of model parameters: 65805511 +2024-08-29 01:48:58,379 INFO [checkpoint.py:112] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-18.pt +2024-08-29 01:49:57,726 INFO [train.py:1231] (1/4) Using DDP +2024-08-29 01:52:46,799 INFO [train.py:1243] (1/4) Loading optimizer state dict +2024-08-29 01:52:46,994 INFO [train.py:1251] (1/4) Loading scheduler state dict +2024-08-29 01:52:46,994 INFO [asr_datamodule.py:894] (1/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 01:52:47,139 INFO [asr_datamodule.py:696] (1/4) Disable MUSAN +2024-08-29 01:52:47,139 INFO [asr_datamodule.py:714] (1/4) Enable SpecAugment +2024-08-29 01:52:47,139 INFO [asr_datamodule.py:715] (1/4) Time warp factor: 80 +2024-08-29 01:52:47,140 INFO [asr_datamodule.py:725] (1/4) Num frame mask: 10 +2024-08-29 01:52:47,140 INFO [asr_datamodule.py:738] (1/4) About to create train dataset +2024-08-29 01:52:47,140 INFO [asr_datamodule.py:765] (1/4) Using DynamicBucketingSampler. +2024-08-29 01:52:48,662 INFO [asr_datamodule.py:782] (1/4) About to create train dataloader +2024-08-29 01:52:48,668 INFO [asr_datamodule.py:911] (1/4) About to get dev-clean cuts +2024-08-29 01:54:40,097 INFO [asr_datamodule.py:918] (1/4) About to get dev-other cuts +2024-08-29 01:54:41,302 INFO [asr_datamodule.py:814] (1/4) About to create dev dataset +2024-08-29 01:54:41,611 INFO [asr_datamodule.py:831] (1/4) About to create dev dataloader +2024-08-29 01:54:41,611 INFO [train.py:1435] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 02:04:11,942 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.40 vs. limit=3.0 +2024-08-29 02:04:13,238 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12881MB +2024-08-29 02:04:14,353 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12881MB +2024-08-29 02:12:13,308 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12881MB +2024-08-29 02:12:14,567 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12881MB +2024-08-29 02:16:49,712 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=512, metric=14.07 vs. limit=7.5 +2024-08-29 02:16:50,161 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12881MB +2024-08-29 02:16:51,192 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=384, metric=19.97 vs. limit=7.5 +2024-08-29 02:16:51,488 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12881MB +2024-08-29 02:16:51,508 INFO [train.py:1344] (1/4) Loading grad scaler state dict +2024-08-29 02:18:18,024 INFO [train.py:1114] (1/4) Epoch 19, batch 0, loss[loss=0.1766, simple_loss=0.248, pruned_loss=0.03834, ctc_loss=0.07154, over 19417.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.248, pruned_loss=0.03834, ctc_loss=0.07154, over 19417.00 frames. ], batch size: 48, lr: 7.99e-03, grad_scale: 32.0 +2024-08-29 02:18:18,024 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-29 02:19:56,211 INFO [train.py:1146] (1/4) Epoch 19, validation: loss=0.1709, simple_loss=0.2636, pruned_loss=0.02933, ctc_loss=0.04896, over 944034.00 frames. +2024-08-29 02:19:56,212 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12881MB +2024-08-29 02:19:56,596 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.82 vs. limit=12.0 +2024-08-29 02:20:50,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=238954.66666666666, ans=0.0 +2024-08-29 02:22:54,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=238954.66666666666, ans=0.2 +2024-08-29 02:48:22,130 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.96 vs. limit=15.0 +2024-08-29 02:54:49,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=239114.66666666666, ans=0.125 +2024-08-29 03:07:49,853 INFO [train.py:1114] (1/4) Epoch 19, batch 50, loss[loss=0.1649, simple_loss=0.2366, pruned_loss=0.03318, ctc_loss=0.06698, over 19694.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2621, pruned_loss=0.04168, ctc_loss=0.0786, over 843816.80 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 03:11:05,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=239221.33333333334, ans=0.0 +2024-08-29 03:11:05,297 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 03:11:05,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.28 vs. limit=22.5 +2024-08-29 03:20:39,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=239328.0, ans=0.125 +2024-08-29 03:22:22,816 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.482e+02 1.734e+02 2.141e+02 3.301e+02, threshold=3.468e+02, percent-clipped=0.0 +2024-08-29 03:22:23,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=239381.33333333334, ans=0.0 +2024-08-29 03:28:44,990 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.79 vs. limit=15.0 +2024-08-29 03:37:40,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=239488.0, ans=0.05 +2024-08-29 03:37:40,964 INFO [train.py:1114] (1/4) Epoch 19, batch 100, loss[loss=0.163, simple_loss=0.2366, pruned_loss=0.03214, ctc_loss=0.0626, over 19715.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2627, pruned_loss=0.04115, ctc_loss=0.07742, over 1498509.86 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 03:41:46,350 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 03:45:20,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=239541.33333333334, ans=0.035 +2024-08-29 03:46:16,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=239541.33333333334, ans=0.0 +2024-08-29 03:52:11,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=239648.0, ans=0.0 +2024-08-29 03:56:43,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239754.66666666666, ans=0.1 +2024-08-29 03:56:43,861 INFO [train.py:1114] (1/4) Epoch 19, batch 150, loss[loss=0.1543, simple_loss=0.2297, pruned_loss=0.02929, ctc_loss=0.05109, over 19722.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2608, pruned_loss=0.04033, ctc_loss=0.07553, over 2027222.49 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 03:59:52,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=239861.33333333334, ans=0.0 +2024-08-29 04:01:43,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=239914.66666666666, ans=0.05 +2024-08-29 04:01:44,340 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.487e+02 1.911e+02 2.455e+02 3.758e+02, threshold=3.822e+02, percent-clipped=3.0 +2024-08-29 04:02:34,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=239914.66666666666, ans=0.0 +2024-08-29 04:04:26,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=239968.0, ans=0.125 +2024-08-29 04:05:46,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=239968.0, ans=0.035 +2024-08-29 04:06:28,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=239968.0, ans=0.125 +2024-08-29 04:09:33,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=239968.0, ans=0.0 +2024-08-29 04:10:04,528 INFO [train.py:1114] (1/4) Epoch 19, batch 200, loss[loss=0.2002, simple_loss=0.2755, pruned_loss=0.04511, ctc_loss=0.08689, over 18110.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.26, pruned_loss=0.03993, ctc_loss=0.07462, over 2435690.74 frames. ], batch size: 85, lr: 7.97e-03, grad_scale: 32.0 +2024-08-29 04:18:33,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=240128.0, ans=0.0 +2024-08-29 04:19:45,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240181.33333333334, ans=0.1 +2024-08-29 04:20:10,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=240181.33333333334, ans=0.125 +2024-08-29 04:22:17,234 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 04:23:12,234 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.17 vs. limit=15.0 +2024-08-29 04:23:53,215 INFO [train.py:1114] (1/4) Epoch 19, batch 250, loss[loss=0.198, simple_loss=0.273, pruned_loss=0.0449, ctc_loss=0.08306, over 19370.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2596, pruned_loss=0.03958, ctc_loss=0.07417, over 2755982.86 frames. ], batch size: 67, lr: 7.97e-03, grad_scale: 32.0 +2024-08-29 04:24:21,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=240288.0, ans=0.2 +2024-08-29 04:24:29,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=240288.0, ans=0.1 +2024-08-29 04:27:30,709 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.47 vs. limit=10.0 +2024-08-29 04:29:03,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240448.0, ans=0.1 +2024-08-29 04:29:04,209 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.187e+02 1.446e+02 1.716e+02 2.275e+02 4.235e+02, threshold=3.432e+02, percent-clipped=4.0 +2024-08-29 04:29:16,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=240448.0, ans=0.1 +2024-08-29 04:30:04,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=240501.33333333334, ans=0.125 +2024-08-29 04:30:07,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=240501.33333333334, ans=0.2 +2024-08-29 04:30:08,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=240501.33333333334, ans=0.125 +2024-08-29 04:31:34,654 INFO [train.py:1114] (1/4) Epoch 19, batch 300, loss[loss=0.217, simple_loss=0.2866, pruned_loss=0.05422, ctc_loss=0.09713, over 19524.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2592, pruned_loss=0.03955, ctc_loss=0.07398, over 3000158.22 frames. ], batch size: 61, lr: 7.96e-03, grad_scale: 32.0 +2024-08-29 04:33:08,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=240554.66666666666, ans=0.2 +2024-08-29 04:34:33,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=240661.33333333334, ans=0.0 +2024-08-29 04:34:57,165 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.39 vs. limit=6.0 +2024-08-29 04:35:50,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=240714.66666666666, ans=0.125 +2024-08-29 04:35:51,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=240768.0, ans=0.125 +2024-08-29 04:36:21,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=240768.0, ans=0.125 +2024-08-29 04:38:10,601 INFO [train.py:1114] (1/4) Epoch 19, batch 350, loss[loss=0.1663, simple_loss=0.2371, pruned_loss=0.03436, ctc_loss=0.06717, over 19750.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2598, pruned_loss=0.03974, ctc_loss=0.07426, over 3190941.52 frames. ], batch size: 48, lr: 7.96e-03, grad_scale: 32.0 +2024-08-29 04:38:13,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=240821.33333333334, ans=0.2 +2024-08-29 04:42:00,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=240928.0, ans=0.025 +2024-08-29 04:42:09,393 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.82 vs. limit=22.5 +2024-08-29 04:44:05,474 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.489e+02 1.897e+02 2.425e+02 4.045e+02, threshold=3.795e+02, percent-clipped=4.0 +2024-08-29 04:44:14,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=240981.33333333334, ans=0.2 +2024-08-29 04:44:28,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=241088.0, ans=0.0 +2024-08-29 04:44:29,329 INFO [train.py:1114] (1/4) Epoch 19, batch 400, loss[loss=0.1758, simple_loss=0.2599, pruned_loss=0.03276, ctc_loss=0.06538, over 19516.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2594, pruned_loss=0.03955, ctc_loss=0.07397, over 3342063.45 frames. ], batch size: 54, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 04:44:47,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=241088.0, ans=0.0 +2024-08-29 04:44:53,616 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.16 vs. limit=6.0 +2024-08-29 04:45:20,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.52 vs. limit=22.5 +2024-08-29 04:45:20,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=241194.66666666666, ans=0.125 +2024-08-29 04:45:35,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=241248.0, ans=0.0 +2024-08-29 04:46:24,132 INFO [train.py:1114] (1/4) Epoch 19, batch 450, loss[loss=0.1992, simple_loss=0.2841, pruned_loss=0.04114, ctc_loss=0.08007, over 19611.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2603, pruned_loss=0.04008, ctc_loss=0.07494, over 3450262.93 frames. ], batch size: 55, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 04:46:46,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=241354.66666666666, ans=0.0 +2024-08-29 04:47:00,111 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.40 vs. limit=15.0 +2024-08-29 04:47:33,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241408.0, ans=0.1 +2024-08-29 04:47:37,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=241408.0, ans=0.0 +2024-08-29 04:47:54,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=241514.66666666666, ans=0.125 +2024-08-29 04:47:54,998 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.149e+02 1.394e+02 1.625e+02 2.143e+02 3.810e+02, threshold=3.251e+02, percent-clipped=1.0 +2024-08-29 04:48:27,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=241621.33333333334, ans=0.125 +2024-08-29 04:49:55,367 INFO [train.py:1114] (1/4) Epoch 19, batch 500, loss[loss=0.1951, simple_loss=0.2705, pruned_loss=0.04363, ctc_loss=0.08119, over 19638.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2594, pruned_loss=0.03966, ctc_loss=0.07414, over 3545831.45 frames. ], batch size: 63, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 04:50:01,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=241621.33333333334, ans=0.125 +2024-08-29 04:50:07,616 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.74 vs. limit=15.0 +2024-08-29 04:53:04,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=241728.0, ans=0.2 +2024-08-29 04:54:13,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=241834.66666666666, ans=0.1 +2024-08-29 04:54:14,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=241888.0, ans=0.125 +2024-08-29 04:54:15,415 INFO [train.py:1114] (1/4) Epoch 19, batch 550, loss[loss=0.2219, simple_loss=0.2906, pruned_loss=0.0563, ctc_loss=0.1018, over 19283.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2598, pruned_loss=0.0401, ctc_loss=0.07479, over 3608551.17 frames. ], batch size: 71, lr: 7.94e-03, grad_scale: 32.0 +2024-08-29 04:54:16,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=241888.0, ans=0.0 +2024-08-29 04:55:07,224 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.20 vs. limit=15.0 +2024-08-29 04:55:24,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=241994.66666666666, ans=0.125 +2024-08-29 04:55:29,727 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.412e+02 1.703e+02 2.107e+02 3.697e+02, threshold=3.406e+02, percent-clipped=1.0 +2024-08-29 04:55:29,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242048.0, ans=0.1 +2024-08-29 04:57:33,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=242048.0, ans=0.0 +2024-08-29 04:57:33,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=242048.0, ans=0.0 +2024-08-29 04:57:41,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=242101.33333333334, ans=0.2 +2024-08-29 04:57:47,836 INFO [train.py:1114] (1/4) Epoch 19, batch 600, loss[loss=0.2135, simple_loss=0.2925, pruned_loss=0.04872, ctc_loss=0.09266, over 19427.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2601, pruned_loss=0.04017, ctc_loss=0.07475, over 3666336.13 frames. ], batch size: 67, lr: 7.94e-03, grad_scale: 32.0 +2024-08-29 04:57:53,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=242154.66666666666, ans=0.025 +2024-08-29 04:57:56,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242154.66666666666, ans=0.1 +2024-08-29 04:58:02,098 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 04:58:40,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=242261.33333333334, ans=0.125 +2024-08-29 04:59:40,613 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.11 vs. limit=15.0 +2024-08-29 05:00:05,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=242421.33333333334, ans=0.0 +2024-08-29 05:00:06,344 INFO [train.py:1114] (1/4) Epoch 19, batch 650, loss[loss=0.1891, simple_loss=0.2701, pruned_loss=0.03905, ctc_loss=0.07487, over 19773.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2595, pruned_loss=0.03991, ctc_loss=0.07424, over 3717066.29 frames. ], batch size: 54, lr: 7.93e-03, grad_scale: 32.0 +2024-08-29 05:00:06,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=242421.33333333334, ans=0.0 +2024-08-29 05:00:59,058 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.521e+02 1.842e+02 2.430e+02 3.637e+02, threshold=3.684e+02, percent-clipped=5.0 +2024-08-29 05:01:54,928 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:02:25,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=242634.66666666666, ans=0.0 +2024-08-29 05:02:27,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=242634.66666666666, ans=0.125 +2024-08-29 05:02:27,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=242634.66666666666, ans=0.0 +2024-08-29 05:02:33,324 INFO [train.py:1114] (1/4) Epoch 19, batch 700, loss[loss=0.1752, simple_loss=0.2486, pruned_loss=0.0367, ctc_loss=0.07134, over 19732.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2602, pruned_loss=0.0402, ctc_loss=0.07485, over 3749165.96 frames. ], batch size: 51, lr: 7.93e-03, grad_scale: 32.0 +2024-08-29 05:04:44,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=242848.0, ans=0.0 +2024-08-29 05:05:37,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242901.33333333334, ans=0.1 +2024-08-29 05:05:42,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=242901.33333333334, ans=0.0 +2024-08-29 05:06:34,964 INFO [train.py:1114] (1/4) Epoch 19, batch 750, loss[loss=0.1838, simple_loss=0.2661, pruned_loss=0.03737, ctc_loss=0.06698, over 19487.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2598, pruned_loss=0.03996, ctc_loss=0.07459, over 3774464.38 frames. ], batch size: 54, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 05:06:37,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=242954.66666666666, ans=0.2 +2024-08-29 05:06:41,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=242954.66666666666, ans=0.0 +2024-08-29 05:07:07,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=243061.33333333334, ans=0.0 +2024-08-29 05:07:07,817 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=5.78 vs. limit=15.0 +2024-08-29 05:07:14,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.50 vs. limit=15.0 +2024-08-29 05:07:19,365 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.172e+02 1.493e+02 1.888e+02 2.375e+02 3.905e+02, threshold=3.776e+02, percent-clipped=3.0 +2024-08-29 05:07:24,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=243114.66666666666, ans=0.125 +2024-08-29 05:08:09,975 INFO [train.py:1114] (1/4) Epoch 19, batch 800, loss[loss=0.1558, simple_loss=0.2312, pruned_loss=0.02971, ctc_loss=0.05224, over 19841.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2595, pruned_loss=0.03993, ctc_loss=0.07448, over 3796520.43 frames. ], batch size: 49, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 05:08:10,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243221.33333333334, ans=0.1 +2024-08-29 05:09:13,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=243274.66666666666, ans=0.09899494936611666 +2024-08-29 05:09:17,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=243274.66666666666, ans=0.05 +2024-08-29 05:09:41,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=243434.66666666666, ans=0.125 +2024-08-29 05:09:49,244 INFO [train.py:1114] (1/4) Epoch 19, batch 850, loss[loss=0.1996, simple_loss=0.2761, pruned_loss=0.04483, ctc_loss=0.08373, over 19650.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2592, pruned_loss=0.03984, ctc_loss=0.07435, over 3814948.74 frames. ], batch size: 59, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 05:10:07,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=243488.0, ans=0.0 +2024-08-29 05:10:10,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=243488.0, ans=0.125 +2024-08-29 05:10:14,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=243541.33333333334, ans=0.2 +2024-08-29 05:10:19,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=243541.33333333334, ans=0.0 +2024-08-29 05:10:31,634 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.122e+02 1.428e+02 1.590e+02 2.047e+02 2.882e+02, threshold=3.180e+02, percent-clipped=0.0 +2024-08-29 05:10:39,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=243701.33333333334, ans=0.125 +2024-08-29 05:10:45,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=243701.33333333334, ans=0.2 +2024-08-29 05:10:48,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=243754.66666666666, ans=0.2 +2024-08-29 05:10:49,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=243754.66666666666, ans=0.125 +2024-08-29 05:10:49,659 INFO [train.py:1114] (1/4) Epoch 19, batch 900, loss[loss=0.1646, simple_loss=0.238, pruned_loss=0.03424, ctc_loss=0.057, over 19823.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2593, pruned_loss=0.03995, ctc_loss=0.07446, over 3819183.23 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-29 05:10:57,330 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.06 vs. limit=12.0 +2024-08-29 05:10:59,059 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.11 vs. limit=15.0 +2024-08-29 05:11:17,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=243861.33333333334, ans=0.0 +2024-08-29 05:11:26,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=243914.66666666666, ans=0.0 +2024-08-29 05:11:28,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=243914.66666666666, ans=15.0 +2024-08-29 05:11:31,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=243914.66666666666, ans=0.125 +2024-08-29 05:11:41,940 INFO [train.py:1114] (1/4) Epoch 19, batch 950, loss[loss=0.1637, simple_loss=0.242, pruned_loss=0.03076, ctc_loss=0.05982, over 19491.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2595, pruned_loss=0.04003, ctc_loss=0.07457, over 3820498.17 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-29 05:11:48,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=244021.33333333334, ans=0.04949747468305833 +2024-08-29 05:11:51,550 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.29 vs. limit=6.0 +2024-08-29 05:12:04,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=244128.0, ans=0.125 +2024-08-29 05:12:09,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=244128.0, ans=0.0 +2024-08-29 05:12:12,407 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.442e+02 1.730e+02 2.025e+02 3.837e+02, threshold=3.461e+02, percent-clipped=4.0 +2024-08-29 05:12:17,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=244181.33333333334, ans=0.125 +2024-08-29 05:12:30,076 INFO [train.py:1114] (1/4) Epoch 19, batch 1000, loss[loss=0.1655, simple_loss=0.2413, pruned_loss=0.03194, ctc_loss=0.06462, over 19867.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2603, pruned_loss=0.04034, ctc_loss=0.0751, over 3816829.39 frames. ], batch size: 52, lr: 7.90e-03, grad_scale: 32.0 +2024-08-29 05:12:45,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=244341.33333333334, ans=0.0 +2024-08-29 05:13:18,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=244554.66666666666, ans=0.125 +2024-08-29 05:13:19,549 INFO [train.py:1114] (1/4) Epoch 19, batch 1050, loss[loss=0.1827, simple_loss=0.2636, pruned_loss=0.0368, ctc_loss=0.07045, over 19850.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2595, pruned_loss=0.04008, ctc_loss=0.07473, over 3822541.22 frames. ], batch size: 57, lr: 7.90e-03, grad_scale: 32.0 +2024-08-29 05:13:24,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244554.66666666666, ans=0.1 +2024-08-29 05:13:47,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=244608.0, ans=0.5 +2024-08-29 05:13:49,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=244608.0, ans=0.125 +2024-08-29 05:13:57,457 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.63 vs. limit=12.0 +2024-08-29 05:13:58,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=244661.33333333334, ans=0.0 +2024-08-29 05:14:05,764 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.364e+02 1.577e+02 1.842e+02 2.540e+02, threshold=3.153e+02, percent-clipped=0.0 +2024-08-29 05:14:25,268 INFO [train.py:1114] (1/4) Epoch 19, batch 1100, loss[loss=0.1779, simple_loss=0.2533, pruned_loss=0.03733, ctc_loss=0.06986, over 19592.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2593, pruned_loss=0.03985, ctc_loss=0.07431, over 3830857.76 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-29 05:14:38,545 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.17 vs. limit=15.0 +2024-08-29 05:15:29,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244981.33333333334, ans=0.1 +2024-08-29 05:16:51,095 INFO [train.py:1114] (1/4) Epoch 19, batch 1150, loss[loss=0.1704, simple_loss=0.2531, pruned_loss=0.03217, ctc_loss=0.05823, over 19598.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2593, pruned_loss=0.0396, ctc_loss=0.07407, over 3830026.08 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-29 05:17:09,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=245088.0, ans=0.025 +2024-08-29 05:17:11,686 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.42 vs. limit=22.5 +2024-08-29 05:17:37,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=245141.33333333334, ans=0.125 +2024-08-29 05:17:55,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=245194.66666666666, ans=0.0 +2024-08-29 05:18:08,415 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.413e+02 1.588e+02 2.044e+02 3.492e+02, threshold=3.177e+02, percent-clipped=5.0 +2024-08-29 05:19:30,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245301.33333333334, ans=0.1 +2024-08-29 05:19:40,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=245301.33333333334, ans=0.0 +2024-08-29 05:19:41,764 INFO [train.py:1114] (1/4) Epoch 19, batch 1200, loss[loss=0.1997, simple_loss=0.2744, pruned_loss=0.04543, ctc_loss=0.08554, over 19840.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2602, pruned_loss=0.0399, ctc_loss=0.07454, over 3824926.60 frames. ], batch size: 57, lr: 7.89e-03, grad_scale: 32.0 +2024-08-29 05:19:41,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=245354.66666666666, ans=0.025 +2024-08-29 05:19:49,305 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:20:10,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=245514.66666666666, ans=0.0 +2024-08-29 05:20:13,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=245514.66666666666, ans=0.0 +2024-08-29 05:20:29,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=245568.0, ans=0.07 +2024-08-29 05:20:50,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=245568.0, ans=0.125 +2024-08-29 05:20:54,864 INFO [train.py:1114] (1/4) Epoch 19, batch 1250, loss[loss=0.1999, simple_loss=0.2741, pruned_loss=0.04581, ctc_loss=0.08507, over 19547.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.261, pruned_loss=0.0401, ctc_loss=0.07483, over 3843041.14 frames. ], batch size: 61, lr: 7.88e-03, grad_scale: 32.0 +2024-08-29 05:21:23,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.24 vs. limit=15.0 +2024-08-29 05:21:47,053 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.448e+02 1.786e+02 2.342e+02 3.930e+02, threshold=3.573e+02, percent-clipped=1.0 +2024-08-29 05:22:00,006 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.22 vs. limit=15.0 +2024-08-29 05:22:27,719 INFO [train.py:1114] (1/4) Epoch 19, batch 1300, loss[loss=0.2078, simple_loss=0.2765, pruned_loss=0.05037, ctc_loss=0.09589, over 18776.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2602, pruned_loss=0.03959, ctc_loss=0.07395, over 3846494.21 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 32.0 +2024-08-29 05:22:28,186 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.89 vs. limit=6.0 +2024-08-29 05:22:34,348 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.39 vs. limit=6.0 +2024-08-29 05:22:40,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245941.33333333334, ans=0.1 +2024-08-29 05:22:53,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=245941.33333333334, ans=0.125 +2024-08-29 05:23:00,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245994.66666666666, ans=0.1 +2024-08-29 05:23:03,642 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.22 vs. limit=22.5 +2024-08-29 05:23:15,583 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.62 vs. limit=10.0 +2024-08-29 05:23:16,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=246101.33333333334, ans=0.125 +2024-08-29 05:23:20,114 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.85 vs. limit=15.0 +2024-08-29 05:23:26,151 INFO [train.py:1114] (1/4) Epoch 19, batch 1350, loss[loss=0.1633, simple_loss=0.2448, pruned_loss=0.03033, ctc_loss=0.05284, over 19783.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2595, pruned_loss=0.0393, ctc_loss=0.07331, over 3856904.11 frames. ], batch size: 54, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 05:23:26,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=246154.66666666666, ans=0.125 +2024-08-29 05:23:30,776 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.13 vs. limit=15.0 +2024-08-29 05:23:32,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=246154.66666666666, ans=0.125 +2024-08-29 05:23:59,064 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.158e+02 1.393e+02 1.600e+02 2.060e+02 3.630e+02, threshold=3.201e+02, percent-clipped=1.0 +2024-08-29 05:24:00,486 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.21 vs. limit=15.0 +2024-08-29 05:24:31,018 INFO [train.py:1114] (1/4) Epoch 19, batch 1400, loss[loss=0.1478, simple_loss=0.2181, pruned_loss=0.02777, ctc_loss=0.05474, over 19689.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.259, pruned_loss=0.03918, ctc_loss=0.07306, over 3864018.00 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 05:24:53,719 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:24:54,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=246474.66666666666, ans=0.2 +2024-08-29 05:24:55,080 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=12.0 +2024-08-29 05:24:59,424 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-08-29 05:25:01,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=246528.0, ans=0.2 +2024-08-29 05:25:10,427 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.71 vs. limit=15.0 +2024-08-29 05:25:12,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=246581.33333333334, ans=0.125 +2024-08-29 05:25:18,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=246634.66666666666, ans=0.125 +2024-08-29 05:25:21,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=246634.66666666666, ans=0.05 +2024-08-29 05:25:24,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=246634.66666666666, ans=0.0 +2024-08-29 05:25:26,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=246688.0, ans=0.0 +2024-08-29 05:25:26,811 INFO [train.py:1114] (1/4) Epoch 19, batch 1450, loss[loss=0.1968, simple_loss=0.2707, pruned_loss=0.0453, ctc_loss=0.08051, over 19664.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2594, pruned_loss=0.03919, ctc_loss=0.0733, over 3863145.24 frames. ], batch size: 63, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 05:25:28,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246688.0, ans=0.1 +2024-08-29 05:25:39,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246741.33333333334, ans=0.1 +2024-08-29 05:25:45,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=246741.33333333334, ans=0.2 +2024-08-29 05:26:19,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=246848.0, ans=0.125 +2024-08-29 05:26:22,442 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.187e+02 1.397e+02 1.549e+02 1.935e+02 4.281e+02, threshold=3.099e+02, percent-clipped=1.0 +2024-08-29 05:26:29,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=246901.33333333334, ans=0.2 +2024-08-29 05:26:38,985 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:26:50,095 INFO [train.py:1114] (1/4) Epoch 19, batch 1500, loss[loss=0.2025, simple_loss=0.276, pruned_loss=0.04733, ctc_loss=0.08594, over 19575.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2596, pruned_loss=0.03906, ctc_loss=0.07315, over 3862569.89 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 32.0 +2024-08-29 05:26:55,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=246954.66666666666, ans=0.2 +2024-08-29 05:28:02,450 INFO [train.py:1114] (1/4) Epoch 19, batch 1550, loss[loss=0.2127, simple_loss=0.2792, pruned_loss=0.05319, ctc_loss=0.09957, over 19583.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2596, pruned_loss=0.03934, ctc_loss=0.07371, over 3847411.22 frames. ], batch size: 60, lr: 7.86e-03, grad_scale: 32.0 +2024-08-29 05:28:02,984 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.69 vs. limit=15.0 +2024-08-29 05:28:06,626 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.52 vs. limit=6.0 +2024-08-29 05:28:10,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247221.33333333334, ans=0.1 +2024-08-29 05:28:16,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=247274.66666666666, ans=0.2 +2024-08-29 05:28:18,410 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.35 vs. limit=15.0 +2024-08-29 05:28:31,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=247328.0, ans=0.125 +2024-08-29 05:28:45,729 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.405e+02 1.646e+02 2.216e+02 3.789e+02, threshold=3.291e+02, percent-clipped=3.0 +2024-08-29 05:29:01,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=247381.33333333334, ans=0.125 +2024-08-29 05:30:04,277 INFO [train.py:1114] (1/4) Epoch 19, batch 1600, loss[loss=0.1844, simple_loss=0.2708, pruned_loss=0.0355, ctc_loss=0.06755, over 19843.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2596, pruned_loss=0.03949, ctc_loss=0.0741, over 3836078.74 frames. ], batch size: 57, lr: 7.85e-03, grad_scale: 32.0 +2024-08-29 05:30:06,697 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.59 vs. limit=15.0 +2024-08-29 05:30:27,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=247594.66666666666, ans=0.125 +2024-08-29 05:30:29,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=247594.66666666666, ans=0.125 +2024-08-29 05:31:12,564 INFO [train.py:1114] (1/4) Epoch 19, batch 1650, loss[loss=0.1827, simple_loss=0.2688, pruned_loss=0.03535, ctc_loss=0.06452, over 19660.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2594, pruned_loss=0.03952, ctc_loss=0.07409, over 3832558.49 frames. ], batch size: 59, lr: 7.85e-03, grad_scale: 32.0 +2024-08-29 05:31:44,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=247808.0, ans=0.125 +2024-08-29 05:31:44,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=247808.0, ans=0.0 +2024-08-29 05:32:30,019 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.177e+02 1.667e+02 2.011e+02 2.433e+02 4.037e+02, threshold=4.021e+02, percent-clipped=5.0 +2024-08-29 05:33:12,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=248021.33333333334, ans=0.1 +2024-08-29 05:33:13,441 INFO [train.py:1114] (1/4) Epoch 19, batch 1700, loss[loss=0.1657, simple_loss=0.2323, pruned_loss=0.036, ctc_loss=0.06779, over 19679.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2591, pruned_loss=0.03912, ctc_loss=0.07351, over 3846785.53 frames. ], batch size: 46, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 05:33:33,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=248128.0, ans=0.0 +2024-08-29 05:33:35,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=248128.0, ans=0.025 +2024-08-29 05:33:44,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=248181.33333333334, ans=0.125 +2024-08-29 05:34:43,945 INFO [train.py:1114] (1/4) Epoch 19, batch 1750, loss[loss=0.1766, simple_loss=0.2394, pruned_loss=0.04096, ctc_loss=0.07984, over 19659.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2589, pruned_loss=0.03909, ctc_loss=0.07344, over 3852102.13 frames. ], batch size: 45, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 05:34:50,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=248288.0, ans=0.07 +2024-08-29 05:34:51,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=248288.0, ans=0.125 +2024-08-29 05:35:11,457 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.174e+02 1.518e+02 1.916e+02 2.294e+02 3.621e+02, threshold=3.832e+02, percent-clipped=0.0 +2024-08-29 05:35:16,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=248448.0, ans=0.125 +2024-08-29 05:35:27,428 INFO [train.py:1114] (1/4) Epoch 19, batch 1800, loss[loss=0.1799, simple_loss=0.2631, pruned_loss=0.03572, ctc_loss=0.06318, over 19617.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2589, pruned_loss=0.03911, ctc_loss=0.07347, over 3853513.81 frames. ], batch size: 55, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 05:35:37,490 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.83 vs. limit=15.0 +2024-08-29 05:35:39,336 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.72 vs. limit=12.0 +2024-08-29 05:35:46,018 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.65 vs. limit=12.0 +2024-08-29 05:36:10,857 INFO [train.py:1114] (1/4) Epoch 19, batch 1850, loss[loss=0.2019, simple_loss=0.2801, pruned_loss=0.04519, ctc_loss=0.08355, over 19581.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2592, pruned_loss=0.03926, ctc_loss=0.07362, over 3856194.78 frames. ], batch size: 57, lr: 7.83e-03, grad_scale: 32.0 +2024-08-29 05:36:12,033 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.67 vs. limit=22.5 +2024-08-29 05:37:02,113 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.98 vs. limit=15.0 +2024-08-29 05:37:05,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=248874.66666666666, ans=0.125 +2024-08-29 05:38:04,255 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.550e+02 2.027e+02 2.927e+02 4.792e+02, threshold=4.055e+02, percent-clipped=10.0 +2024-08-29 05:38:04,888 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.68 vs. limit=6.0 +2024-08-29 05:38:08,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=248981.33333333334, ans=0.0 +2024-08-29 05:38:23,694 INFO [train.py:1114] (1/4) Epoch 19, batch 1900, loss[loss=0.1762, simple_loss=0.2586, pruned_loss=0.03424, ctc_loss=0.06335, over 19643.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2599, pruned_loss=0.03957, ctc_loss=0.074, over 3860254.22 frames. ], batch size: 59, lr: 7.83e-03, grad_scale: 32.0 +2024-08-29 05:39:08,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=249194.66666666666, ans=0.07 +2024-08-29 05:39:19,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=249248.0, ans=0.125 +2024-08-29 05:39:30,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249248.0, ans=0.1 +2024-08-29 05:39:40,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=249354.66666666666, ans=0.125 +2024-08-29 05:39:41,149 INFO [train.py:1114] (1/4) Epoch 19, batch 1950, loss[loss=0.1754, simple_loss=0.2536, pruned_loss=0.03502, ctc_loss=0.06776, over 19609.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2613, pruned_loss=0.03982, ctc_loss=0.07451, over 3869695.05 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 05:39:53,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=249354.66666666666, ans=0.125 +2024-08-29 05:40:15,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=249408.0, ans=0.2 +2024-08-29 05:40:17,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=249408.0, ans=0.035 +2024-08-29 05:40:18,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249408.0, ans=0.1 +2024-08-29 05:40:38,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249461.33333333334, ans=0.1 +2024-08-29 05:40:39,796 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.91 vs. limit=12.0 +2024-08-29 05:40:45,436 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.357e+02 1.563e+02 1.867e+02 4.467e+02, threshold=3.126e+02, percent-clipped=1.0 +2024-08-29 05:40:54,764 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.13 vs. limit=12.0 +2024-08-29 05:41:02,877 INFO [train.py:1114] (1/4) Epoch 19, batch 2000, loss[loss=0.1579, simple_loss=0.2274, pruned_loss=0.03198, ctc_loss=0.06084, over 19645.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2618, pruned_loss=0.04003, ctc_loss=0.07487, over 3855107.31 frames. ], batch size: 45, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 05:41:22,859 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.50 vs. limit=22.5 +2024-08-29 05:41:28,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=249674.66666666666, ans=10.0 +2024-08-29 05:41:39,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=249728.0, ans=0.125 +2024-08-29 05:42:26,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249781.33333333334, ans=0.1 +2024-08-29 05:42:30,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=249781.33333333334, ans=0.2 +2024-08-29 05:42:42,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=249834.66666666666, ans=0.0 +2024-08-29 05:42:45,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=249834.66666666666, ans=0.125 +2024-08-29 05:42:58,874 INFO [train.py:1114] (1/4) Epoch 19, batch 2050, loss[loss=0.1727, simple_loss=0.2499, pruned_loss=0.03478, ctc_loss=0.0649, over 19722.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2607, pruned_loss=0.03995, ctc_loss=0.07465, over 3851539.82 frames. ], batch size: 47, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 05:43:12,030 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:43:13,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=249941.33333333334, ans=0.125 +2024-08-29 05:43:15,700 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.17 vs. limit=12.0 +2024-08-29 05:43:23,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=249994.66666666666, ans=0.2 +2024-08-29 05:43:26,425 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.469e+02 1.713e+02 2.068e+02 3.370e+02, threshold=3.427e+02, percent-clipped=2.0 +2024-08-29 05:43:40,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=250101.33333333334, ans=0.125 +2024-08-29 05:43:41,973 INFO [train.py:1114] (1/4) Epoch 19, batch 2100, loss[loss=0.186, simple_loss=0.2648, pruned_loss=0.03872, ctc_loss=0.07441, over 19776.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2601, pruned_loss=0.03957, ctc_loss=0.07412, over 3858510.10 frames. ], batch size: 54, lr: 7.81e-03, grad_scale: 32.0 +2024-08-29 05:43:47,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=250154.66666666666, ans=0.2 +2024-08-29 05:43:57,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=250208.0, ans=0.125 +2024-08-29 05:44:51,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=250314.66666666666, ans=0.125 +2024-08-29 05:45:01,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=250368.0, ans=0.0 +2024-08-29 05:45:09,916 INFO [train.py:1114] (1/4) Epoch 19, batch 2150, loss[loss=0.1707, simple_loss=0.2515, pruned_loss=0.03297, ctc_loss=0.05982, over 19581.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2593, pruned_loss=0.03931, ctc_loss=0.07342, over 3868401.60 frames. ], batch size: 52, lr: 7.81e-03, grad_scale: 32.0 +2024-08-29 05:45:12,158 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.34 vs. limit=12.0 +2024-08-29 05:45:14,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=250421.33333333334, ans=0.125 +2024-08-29 05:45:28,005 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.82 vs. limit=15.0 +2024-08-29 05:45:33,014 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:45:36,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=250528.0, ans=0.125 +2024-08-29 05:45:44,561 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.742e+01 1.532e+02 1.812e+02 2.283e+02 4.768e+02, threshold=3.624e+02, percent-clipped=7.0 +2024-08-29 05:45:48,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=250581.33333333334, ans=0.0 +2024-08-29 05:46:18,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=250634.66666666666, ans=0.125 +2024-08-29 05:46:31,693 INFO [train.py:1114] (1/4) Epoch 19, batch 2200, loss[loss=0.187, simple_loss=0.2649, pruned_loss=0.03886, ctc_loss=0.07877, over 19583.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.259, pruned_loss=0.03918, ctc_loss=0.07326, over 3866828.23 frames. ], batch size: 57, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 05:46:32,321 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.08 vs. limit=15.0 +2024-08-29 05:47:19,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=250741.33333333334, ans=0.0 +2024-08-29 05:48:00,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=250848.0, ans=0.125 +2024-08-29 05:48:00,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=250848.0, ans=0.2 +2024-08-29 05:48:02,186 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.92 vs. limit=22.5 +2024-08-29 05:48:05,053 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.13 vs. limit=15.0 +2024-08-29 05:48:12,303 INFO [train.py:1114] (1/4) Epoch 19, batch 2250, loss[loss=0.1919, simple_loss=0.2708, pruned_loss=0.04179, ctc_loss=0.07377, over 19610.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.259, pruned_loss=0.03932, ctc_loss=0.07349, over 3867414.95 frames. ], batch size: 55, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 05:48:28,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=251061.33333333334, ans=0.125 +2024-08-29 05:48:39,391 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.496e+02 1.836e+02 2.405e+02 3.916e+02, threshold=3.673e+02, percent-clipped=1.0 +2024-08-29 05:48:47,735 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.52 vs. limit=22.5 +2024-08-29 05:48:50,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=251168.0, ans=0.025 +2024-08-29 05:49:01,353 INFO [train.py:1114] (1/4) Epoch 19, batch 2300, loss[loss=0.1785, simple_loss=0.247, pruned_loss=0.04096, ctc_loss=0.07048, over 19536.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2586, pruned_loss=0.03955, ctc_loss=0.07386, over 3861612.16 frames. ], batch size: 49, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 05:49:04,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251221.33333333334, ans=0.1 +2024-08-29 05:49:04,288 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.98 vs. limit=15.0 +2024-08-29 05:50:51,082 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.92 vs. limit=15.0 +2024-08-29 05:50:53,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=251381.33333333334, ans=0.125 +2024-08-29 05:51:11,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=251488.0, ans=0.125 +2024-08-29 05:51:12,199 INFO [train.py:1114] (1/4) Epoch 19, batch 2350, loss[loss=0.2049, simple_loss=0.2737, pruned_loss=0.05022, ctc_loss=0.08909, over 19657.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.259, pruned_loss=0.04004, ctc_loss=0.07468, over 3863892.88 frames. ], batch size: 63, lr: 7.79e-03, grad_scale: 32.0 +2024-08-29 05:51:40,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=251594.66666666666, ans=0.2 +2024-08-29 05:51:44,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=251594.66666666666, ans=0.125 +2024-08-29 05:51:52,850 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.480e+02 1.867e+02 2.502e+02 4.275e+02, threshold=3.733e+02, percent-clipped=4.0 +2024-08-29 05:52:16,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251701.33333333334, ans=0.1 +2024-08-29 05:52:18,775 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.84 vs. limit=15.0 +2024-08-29 05:52:19,097 INFO [train.py:1114] (1/4) Epoch 19, batch 2400, loss[loss=0.2077, simple_loss=0.2847, pruned_loss=0.04875, ctc_loss=0.08296, over 19414.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2611, pruned_loss=0.04079, ctc_loss=0.07587, over 3858113.05 frames. ], batch size: 67, lr: 7.79e-03, grad_scale: 32.0 +2024-08-29 05:52:59,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=251754.66666666666, ans=0.0 +2024-08-29 05:53:15,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=251808.0, ans=0.125 +2024-08-29 05:53:20,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=251861.33333333334, ans=0.2 +2024-08-29 05:53:32,200 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.17 vs. limit=12.0 +2024-08-29 05:53:33,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251861.33333333334, ans=0.1 +2024-08-29 05:53:43,789 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.05 vs. limit=15.0 +2024-08-29 05:53:55,379 INFO [train.py:1114] (1/4) Epoch 19, batch 2450, loss[loss=0.2444, simple_loss=0.2867, pruned_loss=0.07445, ctc_loss=0.1331, over 13285.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.264, pruned_loss=0.04272, ctc_loss=0.07981, over 3731183.79 frames. ], batch size: 141, lr: 7.78e-03, grad_scale: 32.0 +2024-08-29 05:54:19,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=252128.0, ans=0.2 +2024-08-29 05:54:23,268 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.449e+02 1.688e+02 1.808e+02 3.489e+02, threshold=3.376e+02, percent-clipped=0.0 +2024-08-29 05:55:27,186 INFO [train.py:1114] (1/4) Epoch 20, batch 0, loss[loss=0.1825, simple_loss=0.2559, pruned_loss=0.03944, ctc_loss=0.07543, over 19805.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2559, pruned_loss=0.03944, ctc_loss=0.07543, over 19805.00 frames. ], batch size: 49, lr: 7.58e-03, grad_scale: 32.0 +2024-08-29 05:55:27,187 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-29 05:55:50,378 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.0067, 3.7232, 3.5114, 3.5470], device='cuda:1') +2024-08-29 05:55:55,980 INFO [train.py:1146] (1/4) Epoch 20, validation: loss=0.1713, simple_loss=0.2633, pruned_loss=0.0297, ctc_loss=0.04995, over 944034.00 frames. +2024-08-29 05:55:55,988 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12881MB +2024-08-29 05:56:07,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=252288.0, ans=0.125 +2024-08-29 05:56:51,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=252448.0, ans=0.125 +2024-08-29 05:57:32,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=252448.0, ans=0.125 +2024-08-29 05:57:36,257 INFO [train.py:1114] (1/4) Epoch 20, batch 50, loss[loss=0.1594, simple_loss=0.2315, pruned_loss=0.03171, ctc_loss=0.05965, over 19711.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2606, pruned_loss=0.03977, ctc_loss=0.07432, over 845201.66 frames. ], batch size: 47, lr: 7.58e-03, grad_scale: 16.0 +2024-08-29 05:57:55,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=252554.66666666666, ans=0.2 +2024-08-29 05:58:38,106 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.170e+02 1.450e+02 1.693e+02 1.984e+02 3.027e+02, threshold=3.386e+02, percent-clipped=0.0 +2024-08-29 05:58:48,715 INFO [train.py:1114] (1/4) Epoch 20, batch 100, loss[loss=0.161, simple_loss=0.2349, pruned_loss=0.0322, ctc_loss=0.05654, over 19724.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2617, pruned_loss=0.04009, ctc_loss=0.07505, over 1498926.28 frames. ], batch size: 51, lr: 7.57e-03, grad_scale: 16.0 +2024-08-29 05:58:51,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252768.0, ans=0.1 +2024-08-29 05:59:18,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=252928.0, ans=0.2 +2024-08-29 05:59:33,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=252981.33333333334, ans=0.035 +2024-08-29 05:59:39,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=252981.33333333334, ans=0.0 +2024-08-29 06:00:02,037 INFO [train.py:1114] (1/4) Epoch 20, batch 150, loss[loss=0.1707, simple_loss=0.2411, pruned_loss=0.03671, ctc_loss=0.06722, over 19725.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2586, pruned_loss=0.03901, ctc_loss=0.07303, over 2028652.43 frames. ], batch size: 47, lr: 7.57e-03, grad_scale: 16.0 +2024-08-29 06:00:09,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=253034.66666666666, ans=0.1 +2024-08-29 06:00:10,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=253034.66666666666, ans=0.0 +2024-08-29 06:00:12,259 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.16 vs. limit=15.0 +2024-08-29 06:00:13,936 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.09 vs. limit=22.5 +2024-08-29 06:00:58,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=253141.33333333334, ans=0.125 +2024-08-29 06:01:19,570 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.374e+02 1.536e+02 1.873e+02 3.368e+02, threshold=3.073e+02, percent-clipped=0.0 +2024-08-29 06:01:23,466 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:01:25,920 INFO [train.py:1114] (1/4) Epoch 20, batch 200, loss[loss=0.1999, simple_loss=0.2754, pruned_loss=0.0446, ctc_loss=0.08814, over 18278.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.258, pruned_loss=0.039, ctc_loss=0.07314, over 2436406.66 frames. ], batch size: 85, lr: 7.56e-03, grad_scale: 16.0 +2024-08-29 06:02:00,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=253408.0, ans=0.125 +2024-08-29 06:02:36,041 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.65 vs. limit=15.0 +2024-08-29 06:03:15,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=253461.33333333334, ans=0.125 +2024-08-29 06:03:17,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=253461.33333333334, ans=0.125 +2024-08-29 06:03:26,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=253514.66666666666, ans=0.2 +2024-08-29 06:03:28,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=253514.66666666666, ans=0.0 +2024-08-29 06:03:30,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=253514.66666666666, ans=0.0 +2024-08-29 06:03:51,882 INFO [train.py:1114] (1/4) Epoch 20, batch 250, loss[loss=0.1892, simple_loss=0.266, pruned_loss=0.04064, ctc_loss=0.07774, over 19412.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2583, pruned_loss=0.03874, ctc_loss=0.07271, over 2755617.90 frames. ], batch size: 67, lr: 7.56e-03, grad_scale: 16.0 +2024-08-29 06:03:58,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=253568.0, ans=0.2 +2024-08-29 06:04:33,674 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.86 vs. limit=15.0 +2024-08-29 06:04:39,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=253621.33333333334, ans=0.125 +2024-08-29 06:04:49,894 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:05:04,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=253728.0, ans=0.025 +2024-08-29 06:05:13,797 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.460e+02 1.674e+02 2.079e+02 4.615e+02, threshold=3.347e+02, percent-clipped=6.0 +2024-08-29 06:06:03,018 INFO [train.py:1114] (1/4) Epoch 20, batch 300, loss[loss=0.1849, simple_loss=0.2658, pruned_loss=0.0378, ctc_loss=0.07095, over 19501.00 frames. ], tot_loss[loss=0.182, simple_loss=0.258, pruned_loss=0.03852, ctc_loss=0.07227, over 3001492.53 frames. ], batch size: 61, lr: 7.56e-03, grad_scale: 16.0 +2024-08-29 06:06:04,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=253834.66666666666, ans=0.1 +2024-08-29 06:07:06,211 INFO [train.py:1114] (1/4) Epoch 20, batch 350, loss[loss=0.1636, simple_loss=0.2391, pruned_loss=0.03224, ctc_loss=0.05916, over 19766.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2577, pruned_loss=0.0385, ctc_loss=0.07207, over 3190578.29 frames. ], batch size: 48, lr: 7.55e-03, grad_scale: 16.0 +2024-08-29 06:07:14,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=254101.33333333334, ans=0.0 +2024-08-29 06:07:16,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=254154.66666666666, ans=0.125 +2024-08-29 06:07:28,307 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.30 vs. limit=15.0 +2024-08-29 06:07:37,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=254261.33333333334, ans=0.125 +2024-08-29 06:07:40,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=254261.33333333334, ans=0.0 +2024-08-29 06:07:40,788 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.25 vs. limit=6.0 +2024-08-29 06:07:50,375 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.501e+02 1.796e+02 2.192e+02 4.069e+02, threshold=3.593e+02, percent-clipped=5.0 +2024-08-29 06:07:58,761 INFO [train.py:1114] (1/4) Epoch 20, batch 400, loss[loss=0.1698, simple_loss=0.2556, pruned_loss=0.03037, ctc_loss=0.05814, over 19857.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2575, pruned_loss=0.03845, ctc_loss=0.07188, over 3344185.38 frames. ], batch size: 55, lr: 7.55e-03, grad_scale: 32.0 +2024-08-29 06:07:59,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=254368.0, ans=0.1 +2024-08-29 06:08:16,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=254421.33333333334, ans=0.0 +2024-08-29 06:08:16,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=254421.33333333334, ans=0.2 +2024-08-29 06:08:22,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=254474.66666666666, ans=0.0 +2024-08-29 06:08:29,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=254474.66666666666, ans=0.125 +2024-08-29 06:08:31,900 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.67 vs. limit=15.0 +2024-08-29 06:08:51,104 INFO [train.py:1114] (1/4) Epoch 20, batch 450, loss[loss=0.184, simple_loss=0.2639, pruned_loss=0.03803, ctc_loss=0.06978, over 19601.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2581, pruned_loss=0.03882, ctc_loss=0.07253, over 3453379.41 frames. ], batch size: 55, lr: 7.55e-03, grad_scale: 16.0 +2024-08-29 06:08:53,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=254634.66666666666, ans=0.125 +2024-08-29 06:09:14,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=254634.66666666666, ans=0.0 +2024-08-29 06:09:37,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=254688.0, ans=0.125 +2024-08-29 06:09:55,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=254741.33333333334, ans=0.125 +2024-08-29 06:10:23,389 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.80 vs. limit=15.0 +2024-08-29 06:10:30,936 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.420e+02 1.652e+02 2.202e+02 3.176e+02, threshold=3.303e+02, percent-clipped=0.0 +2024-08-29 06:10:48,925 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.87 vs. limit=10.0 +2024-08-29 06:10:51,251 INFO [train.py:1114] (1/4) Epoch 20, batch 500, loss[loss=0.1989, simple_loss=0.2737, pruned_loss=0.04568, ctc_loss=0.08195, over 19698.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2576, pruned_loss=0.0385, ctc_loss=0.07202, over 3548352.56 frames. ], batch size: 63, lr: 7.54e-03, grad_scale: 8.0 +2024-08-29 06:10:56,321 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.83 vs. limit=15.0 +2024-08-29 06:10:59,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.66 vs. limit=6.0 +2024-08-29 06:11:29,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=255114.66666666666, ans=0.125 +2024-08-29 06:11:58,823 INFO [train.py:1114] (1/4) Epoch 20, batch 550, loss[loss=0.1905, simple_loss=0.2647, pruned_loss=0.04159, ctc_loss=0.08289, over 19308.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2576, pruned_loss=0.03873, ctc_loss=0.07238, over 3609596.37 frames. ], batch size: 71, lr: 7.54e-03, grad_scale: 8.0 +2024-08-29 06:12:07,449 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:12:10,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=255221.33333333334, ans=0.125 +2024-08-29 06:12:38,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=255328.0, ans=0.2 +2024-08-29 06:12:58,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=255381.33333333334, ans=0.125 +2024-08-29 06:13:03,363 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.429e+02 1.650e+02 1.993e+02 3.679e+02, threshold=3.299e+02, percent-clipped=2.0 +2024-08-29 06:13:12,591 INFO [train.py:1114] (1/4) Epoch 20, batch 600, loss[loss=0.1914, simple_loss=0.2749, pruned_loss=0.03967, ctc_loss=0.0717, over 19349.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2578, pruned_loss=0.03873, ctc_loss=0.07232, over 3666314.13 frames. ], batch size: 67, lr: 7.53e-03, grad_scale: 8.0 +2024-08-29 06:13:31,597 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:13:35,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=255434.66666666666, ans=0.125 +2024-08-29 06:14:05,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=255541.33333333334, ans=0.125 +2024-08-29 06:14:56,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=255648.0, ans=0.0 +2024-08-29 06:14:56,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=255648.0, ans=0.125 +2024-08-29 06:15:09,602 INFO [train.py:1114] (1/4) Epoch 20, batch 650, loss[loss=0.183, simple_loss=0.2682, pruned_loss=0.03614, ctc_loss=0.06381, over 19776.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2571, pruned_loss=0.03814, ctc_loss=0.07112, over 3716846.63 frames. ], batch size: 54, lr: 7.53e-03, grad_scale: 8.0 +2024-08-29 06:15:55,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255808.0, ans=0.1 +2024-08-29 06:16:20,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=255861.33333333334, ans=0.125 +2024-08-29 06:16:25,610 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.83 vs. limit=6.0 +2024-08-29 06:16:48,639 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.504e+02 1.911e+02 2.476e+02 5.788e+02, threshold=3.821e+02, percent-clipped=14.0 +2024-08-29 06:16:53,275 INFO [train.py:1114] (1/4) Epoch 20, batch 700, loss[loss=0.1631, simple_loss=0.2417, pruned_loss=0.0311, ctc_loss=0.05589, over 19719.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2577, pruned_loss=0.03845, ctc_loss=0.0718, over 3749179.31 frames. ], batch size: 51, lr: 7.53e-03, grad_scale: 8.0 +2024-08-29 06:16:57,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255968.0, ans=0.1 +2024-08-29 06:17:03,716 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.35 vs. limit=6.0 +2024-08-29 06:17:10,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=256021.33333333334, ans=0.0 +2024-08-29 06:17:10,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=256021.33333333334, ans=0.125 +2024-08-29 06:17:11,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=256021.33333333334, ans=0.1 +2024-08-29 06:17:19,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=256074.66666666666, ans=0.025 +2024-08-29 06:17:25,876 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.69 vs. limit=15.0 +2024-08-29 06:17:25,920 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.72 vs. limit=6.0 +2024-08-29 06:17:26,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256128.0, ans=0.1 +2024-08-29 06:17:28,583 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.72 vs. limit=22.5 +2024-08-29 06:17:31,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=256128.0, ans=0.0 +2024-08-29 06:17:36,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=256181.33333333334, ans=0.125 +2024-08-29 06:17:45,341 INFO [train.py:1114] (1/4) Epoch 20, batch 750, loss[loss=0.197, simple_loss=0.2758, pruned_loss=0.0427, ctc_loss=0.08216, over 19487.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2575, pruned_loss=0.0385, ctc_loss=0.07188, over 3775736.67 frames. ], batch size: 54, lr: 7.52e-03, grad_scale: 8.0 +2024-08-29 06:17:50,438 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.25 vs. limit=12.0 +2024-08-29 06:17:54,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=256234.66666666666, ans=0.125 +2024-08-29 06:17:57,534 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.41 vs. limit=15.0 +2024-08-29 06:17:57,804 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.62 vs. limit=15.0 +2024-08-29 06:18:03,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=256288.0, ans=0.125 +2024-08-29 06:18:09,123 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:18:11,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=256341.33333333334, ans=0.2 +2024-08-29 06:18:21,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=256394.66666666666, ans=0.125 +2024-08-29 06:18:31,370 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.481e+02 1.912e+02 2.487e+02 4.029e+02, threshold=3.825e+02, percent-clipped=2.0 +2024-08-29 06:18:37,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=256501.33333333334, ans=0.0 +2024-08-29 06:18:37,702 INFO [train.py:1114] (1/4) Epoch 20, batch 800, loss[loss=0.1728, simple_loss=0.2463, pruned_loss=0.0357, ctc_loss=0.0696, over 19810.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2577, pruned_loss=0.03868, ctc_loss=0.07217, over 3797557.31 frames. ], batch size: 49, lr: 7.52e-03, grad_scale: 16.0 +2024-08-29 06:18:37,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=256501.33333333334, ans=0.04949747468305833 +2024-08-29 06:18:42,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=256501.33333333334, ans=0.125 +2024-08-29 06:19:00,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=256608.0, ans=0.125 +2024-08-29 06:19:33,205 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:19:39,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=256714.66666666666, ans=0.1 +2024-08-29 06:19:48,535 INFO [train.py:1114] (1/4) Epoch 20, batch 850, loss[loss=0.1849, simple_loss=0.2675, pruned_loss=0.03794, ctc_loss=0.06607, over 19661.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2575, pruned_loss=0.0387, ctc_loss=0.07201, over 3816420.74 frames. ], batch size: 59, lr: 7.51e-03, grad_scale: 16.0 +2024-08-29 06:19:49,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=256768.0, ans=0.125 +2024-08-29 06:20:05,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=256821.33333333334, ans=0.125 +2024-08-29 06:20:07,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=256821.33333333334, ans=0.125 +2024-08-29 06:20:41,640 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.961e+01 1.422e+02 1.634e+02 1.945e+02 3.890e+02, threshold=3.267e+02, percent-clipped=1.0 +2024-08-29 06:20:42,833 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:20:48,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=257034.66666666666, ans=0.0 +2024-08-29 06:20:49,511 INFO [train.py:1114] (1/4) Epoch 20, batch 900, loss[loss=0.159, simple_loss=0.2361, pruned_loss=0.0294, ctc_loss=0.05773, over 19408.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2576, pruned_loss=0.03901, ctc_loss=0.07259, over 3819263.87 frames. ], batch size: 48, lr: 7.51e-03, grad_scale: 16.0 +2024-08-29 06:20:57,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=257034.66666666666, ans=0.125 +2024-08-29 06:21:01,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=257088.0, ans=0.125 +2024-08-29 06:21:38,545 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.90 vs. limit=10.0 +2024-08-29 06:22:18,956 INFO [train.py:1114] (1/4) Epoch 20, batch 950, loss[loss=0.1662, simple_loss=0.2437, pruned_loss=0.03204, ctc_loss=0.06142, over 19481.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2588, pruned_loss=0.03955, ctc_loss=0.07368, over 3819608.55 frames. ], batch size: 49, lr: 7.51e-03, grad_scale: 16.0 +2024-08-29 06:22:24,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=257301.33333333334, ans=0.1 +2024-08-29 06:22:56,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=257354.66666666666, ans=0.125 +2024-08-29 06:22:59,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=257354.66666666666, ans=0.125 +2024-08-29 06:23:38,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=257461.33333333334, ans=0.1 +2024-08-29 06:23:40,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=257461.33333333334, ans=0.125 +2024-08-29 06:23:48,042 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.398e+02 1.599e+02 1.937e+02 2.870e+02, threshold=3.197e+02, percent-clipped=0.0 +2024-08-29 06:23:52,529 INFO [train.py:1114] (1/4) Epoch 20, batch 1000, loss[loss=0.1608, simple_loss=0.2394, pruned_loss=0.03003, ctc_loss=0.05555, over 19840.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2588, pruned_loss=0.03957, ctc_loss=0.07379, over 3816334.17 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 16.0 +2024-08-29 06:23:52,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=257568.0, ans=0.125 +2024-08-29 06:24:49,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=257781.33333333334, ans=0.025 +2024-08-29 06:24:51,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=257781.33333333334, ans=0.125 +2024-08-29 06:24:53,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=257781.33333333334, ans=0.2 +2024-08-29 06:24:56,438 INFO [train.py:1114] (1/4) Epoch 20, batch 1050, loss[loss=0.1856, simple_loss=0.264, pruned_loss=0.03931, ctc_loss=0.07128, over 19825.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2576, pruned_loss=0.03916, ctc_loss=0.073, over 3821748.40 frames. ], batch size: 57, lr: 7.50e-03, grad_scale: 16.0 +2024-08-29 06:25:07,236 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.89 vs. limit=6.0 +2024-08-29 06:25:13,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.whiten.whitening_limit, batch_count=257888.0, ans=12.0 +2024-08-29 06:25:27,498 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.71 vs. limit=15.0 +2024-08-29 06:25:28,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=257941.33333333334, ans=0.2 +2024-08-29 06:25:54,256 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.418e+02 1.590e+02 1.994e+02 3.641e+02, threshold=3.179e+02, percent-clipped=3.0 +2024-08-29 06:25:58,770 INFO [train.py:1114] (1/4) Epoch 20, batch 1100, loss[loss=0.1756, simple_loss=0.2572, pruned_loss=0.03408, ctc_loss=0.06448, over 19589.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2575, pruned_loss=0.0388, ctc_loss=0.07246, over 3829696.72 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 16.0 +2024-08-29 06:25:58,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=258101.33333333334, ans=0.125 +2024-08-29 06:26:20,545 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.44 vs. limit=22.5 +2024-08-29 06:27:08,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=258314.66666666666, ans=0.1 +2024-08-29 06:27:30,537 INFO [train.py:1114] (1/4) Epoch 20, batch 1150, loss[loss=0.1744, simple_loss=0.2543, pruned_loss=0.03443, ctc_loss=0.06426, over 19589.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2573, pruned_loss=0.03862, ctc_loss=0.07225, over 3829160.70 frames. ], batch size: 52, lr: 7.49e-03, grad_scale: 16.0 +2024-08-29 06:27:43,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=258421.33333333334, ans=0.125 +2024-08-29 06:27:47,367 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:27:52,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=258474.66666666666, ans=0.0 +2024-08-29 06:27:53,145 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.47 vs. limit=22.5 +2024-08-29 06:27:54,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=258474.66666666666, ans=0.0 +2024-08-29 06:28:12,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=258581.33333333334, ans=0.125 +2024-08-29 06:28:13,432 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.421e+02 1.745e+02 2.163e+02 3.118e+02, threshold=3.490e+02, percent-clipped=0.0 +2024-08-29 06:28:16,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=258581.33333333334, ans=0.125 +2024-08-29 06:28:18,025 INFO [train.py:1114] (1/4) Epoch 20, batch 1200, loss[loss=0.178, simple_loss=0.2626, pruned_loss=0.03343, ctc_loss=0.06635, over 19848.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2577, pruned_loss=0.03866, ctc_loss=0.0723, over 3824397.58 frames. ], batch size: 57, lr: 7.49e-03, grad_scale: 32.0 +2024-08-29 06:28:20,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=258634.66666666666, ans=0.0 +2024-08-29 06:29:12,610 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.25 vs. limit=6.0 +2024-08-29 06:29:59,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=258741.33333333334, ans=0.125 +2024-08-29 06:30:11,355 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.01 vs. limit=12.0 +2024-08-29 06:30:22,549 INFO [train.py:1114] (1/4) Epoch 20, batch 1250, loss[loss=0.1987, simple_loss=0.271, pruned_loss=0.04608, ctc_loss=0.08571, over 19518.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.258, pruned_loss=0.03853, ctc_loss=0.072, over 3842508.95 frames. ], batch size: 61, lr: 7.48e-03, grad_scale: 16.0 +2024-08-29 06:30:32,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=258901.33333333334, ans=0.0 +2024-08-29 06:30:36,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258954.66666666666, ans=0.1 +2024-08-29 06:30:39,392 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.65 vs. limit=5.0 +2024-08-29 06:30:40,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=258954.66666666666, ans=0.07 +2024-08-29 06:30:54,815 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.03 vs. limit=22.5 +2024-08-29 06:31:02,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=259114.66666666666, ans=10.0 +2024-08-29 06:31:09,988 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.413e+02 1.610e+02 2.110e+02 3.599e+02, threshold=3.219e+02, percent-clipped=1.0 +2024-08-29 06:31:47,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=259114.66666666666, ans=0.125 +2024-08-29 06:31:51,320 INFO [train.py:1114] (1/4) Epoch 20, batch 1300, loss[loss=0.2092, simple_loss=0.2814, pruned_loss=0.04986, ctc_loss=0.09301, over 18844.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2577, pruned_loss=0.03851, ctc_loss=0.07188, over 3846249.38 frames. ], batch size: 76, lr: 7.48e-03, grad_scale: 16.0 +2024-08-29 06:34:30,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=259221.33333333334, ans=0.0 +2024-08-29 06:34:32,091 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.44 vs. limit=10.0 +2024-08-29 06:34:35,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=259221.33333333334, ans=0.025 +2024-08-29 06:34:36,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=259274.66666666666, ans=0.0 +2024-08-29 06:35:58,318 INFO [train.py:1114] (1/4) Epoch 20, batch 1350, loss[loss=0.1698, simple_loss=0.2502, pruned_loss=0.03329, ctc_loss=0.0571, over 19790.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2582, pruned_loss=0.03867, ctc_loss=0.072, over 3857281.69 frames. ], batch size: 54, lr: 7.48e-03, grad_scale: 16.0 +2024-08-29 06:35:59,684 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.82 vs. limit=15.0 +2024-08-29 06:36:03,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=259434.66666666666, ans=0.025 +2024-08-29 06:36:19,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=259541.33333333334, ans=0.025 +2024-08-29 06:36:22,089 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.07 vs. limit=22.5 +2024-08-29 06:36:26,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=259541.33333333334, ans=15.0 +2024-08-29 06:36:26,886 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.28 vs. limit=22.5 +2024-08-29 06:36:27,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=259594.66666666666, ans=0.0 +2024-08-29 06:36:45,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=259648.0, ans=0.2 +2024-08-29 06:36:45,734 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.394e+02 1.620e+02 1.985e+02 3.317e+02, threshold=3.241e+02, percent-clipped=2.0 +2024-08-29 06:36:45,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=259648.0, ans=0.125 +2024-08-29 06:36:49,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=259701.33333333334, ans=0.2 +2024-08-29 06:36:49,873 INFO [train.py:1114] (1/4) Epoch 20, batch 1400, loss[loss=0.162, simple_loss=0.2303, pruned_loss=0.03511, ctc_loss=0.05863, over 19662.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.258, pruned_loss=0.0388, ctc_loss=0.0723, over 3864220.63 frames. ], batch size: 46, lr: 7.47e-03, grad_scale: 16.0 +2024-08-29 06:36:51,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=259701.33333333334, ans=0.125 +2024-08-29 06:36:54,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=259701.33333333334, ans=0.0 +2024-08-29 06:36:58,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=259754.66666666666, ans=0.0 +2024-08-29 06:37:17,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=259754.66666666666, ans=0.2 +2024-08-29 06:37:17,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=259754.66666666666, ans=0.125 +2024-08-29 06:37:46,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=259861.33333333334, ans=0.2 +2024-08-29 06:38:00,930 INFO [train.py:1114] (1/4) Epoch 20, batch 1450, loss[loss=0.1858, simple_loss=0.2668, pruned_loss=0.03735, ctc_loss=0.07506, over 19660.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2587, pruned_loss=0.03895, ctc_loss=0.07273, over 3862364.72 frames. ], batch size: 63, lr: 7.47e-03, grad_scale: 16.0 +2024-08-29 06:38:02,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=259968.0, ans=0.0 +2024-08-29 06:38:16,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=260021.33333333334, ans=0.125 +2024-08-29 06:38:24,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=260074.66666666666, ans=0.0 +2024-08-29 06:38:28,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=260074.66666666666, ans=0.125 +2024-08-29 06:38:40,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=260128.0, ans=0.0 +2024-08-29 06:38:48,138 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.231e+02 1.378e+02 1.589e+02 1.878e+02 3.405e+02, threshold=3.177e+02, percent-clipped=1.0 +2024-08-29 06:38:51,881 INFO [train.py:1114] (1/4) Epoch 20, batch 1500, loss[loss=0.1796, simple_loss=0.2626, pruned_loss=0.03389, ctc_loss=0.07207, over 19567.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2587, pruned_loss=0.03867, ctc_loss=0.07246, over 3862278.54 frames. ], batch size: 57, lr: 7.47e-03, grad_scale: 16.0 +2024-08-29 06:38:58,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=260234.66666666666, ans=0.125 +2024-08-29 06:38:59,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=260234.66666666666, ans=0.0 +2024-08-29 06:39:05,642 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.56 vs. limit=15.0 +2024-08-29 06:40:24,682 INFO [train.py:1114] (1/4) Epoch 20, batch 1550, loss[loss=0.1977, simple_loss=0.2718, pruned_loss=0.04487, ctc_loss=0.08474, over 19615.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2585, pruned_loss=0.03889, ctc_loss=0.07289, over 3845948.64 frames. ], batch size: 60, lr: 7.46e-03, grad_scale: 16.0 +2024-08-29 06:40:25,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=260501.33333333334, ans=0.0 +2024-08-29 06:41:08,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=260608.0, ans=0.125 +2024-08-29 06:41:57,055 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.095e+02 1.471e+02 1.781e+02 2.261e+02 3.819e+02, threshold=3.562e+02, percent-clipped=6.0 +2024-08-29 06:41:58,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=260714.66666666666, ans=0.5 +2024-08-29 06:42:00,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=260768.0, ans=0.95 +2024-08-29 06:42:30,554 INFO [train.py:1114] (1/4) Epoch 20, batch 1600, loss[loss=0.174, simple_loss=0.2588, pruned_loss=0.03271, ctc_loss=0.05982, over 19846.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2582, pruned_loss=0.03882, ctc_loss=0.0727, over 3836160.04 frames. ], batch size: 57, lr: 7.46e-03, grad_scale: 32.0 +2024-08-29 06:42:38,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=260768.0, ans=0.0 +2024-08-29 06:42:42,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=260821.33333333334, ans=0.125 +2024-08-29 06:42:48,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=260821.33333333334, ans=0.125 +2024-08-29 06:43:03,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=260928.0, ans=0.125 +2024-08-29 06:43:09,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=260928.0, ans=0.0 +2024-08-29 06:43:11,540 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.17 vs. limit=22.5 +2024-08-29 06:43:21,838 INFO [train.py:1114] (1/4) Epoch 20, batch 1650, loss[loss=0.1699, simple_loss=0.2562, pruned_loss=0.03034, ctc_loss=0.05718, over 19659.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2582, pruned_loss=0.03886, ctc_loss=0.07284, over 3834174.80 frames. ], batch size: 59, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 06:43:24,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=261034.66666666666, ans=0.0 +2024-08-29 06:43:44,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=261088.0, ans=0.125 +2024-08-29 06:43:53,252 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.99 vs. limit=15.0 +2024-08-29 06:44:04,955 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.30 vs. limit=12.0 +2024-08-29 06:44:24,103 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.80 vs. limit=15.0 +2024-08-29 06:44:30,259 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.155e+02 1.539e+02 1.831e+02 2.496e+02 4.278e+02, threshold=3.663e+02, percent-clipped=6.0 +2024-08-29 06:44:33,859 INFO [train.py:1114] (1/4) Epoch 20, batch 1700, loss[loss=0.1644, simple_loss=0.2319, pruned_loss=0.03551, ctc_loss=0.06488, over 19673.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2579, pruned_loss=0.03844, ctc_loss=0.07217, over 3848230.73 frames. ], batch size: 46, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 06:44:35,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=261301.33333333334, ans=0.0 +2024-08-29 06:44:37,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=261301.33333333334, ans=0.07 +2024-08-29 06:44:39,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=261301.33333333334, ans=0.125 +2024-08-29 06:44:49,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=261354.66666666666, ans=0.1 +2024-08-29 06:44:49,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=261354.66666666666, ans=0.025 +2024-08-29 06:44:58,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=261354.66666666666, ans=0.95 +2024-08-29 06:45:01,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=261408.0, ans=0.125 +2024-08-29 06:45:08,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=261461.33333333334, ans=0.2 +2024-08-29 06:45:16,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=261461.33333333334, ans=10.0 +2024-08-29 06:45:27,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=261514.66666666666, ans=0.2 +2024-08-29 06:45:36,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=261514.66666666666, ans=0.1 +2024-08-29 06:45:38,462 INFO [train.py:1114] (1/4) Epoch 20, batch 1750, loss[loss=0.1571, simple_loss=0.2297, pruned_loss=0.03087, ctc_loss=0.05718, over 19648.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2577, pruned_loss=0.03832, ctc_loss=0.07188, over 3852510.28 frames. ], batch size: 45, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 06:45:39,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=261568.0, ans=0.0 +2024-08-29 06:46:04,802 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.19 vs. limit=15.0 +2024-08-29 06:46:19,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=261728.0, ans=0.125 +2024-08-29 06:46:27,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=261781.33333333334, ans=0.125 +2024-08-29 06:46:29,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=261781.33333333334, ans=0.0 +2024-08-29 06:46:33,065 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.489e+02 1.816e+02 2.382e+02 3.653e+02, threshold=3.632e+02, percent-clipped=0.0 +2024-08-29 06:46:36,572 INFO [train.py:1114] (1/4) Epoch 20, batch 1800, loss[loss=0.172, simple_loss=0.256, pruned_loss=0.03121, ctc_loss=0.06395, over 19620.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2582, pruned_loss=0.03837, ctc_loss=0.07199, over 3854196.06 frames. ], batch size: 55, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 06:46:57,529 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.62 vs. limit=15.0 +2024-08-29 06:47:07,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=261941.33333333334, ans=0.0 +2024-08-29 06:47:08,866 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:47:32,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262048.0, ans=0.1 +2024-08-29 06:47:36,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=262048.0, ans=0.125 +2024-08-29 06:47:36,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=262048.0, ans=0.025 +2024-08-29 06:47:43,607 INFO [train.py:1114] (1/4) Epoch 20, batch 1850, loss[loss=0.1962, simple_loss=0.2774, pruned_loss=0.04268, ctc_loss=0.07394, over 19588.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2577, pruned_loss=0.03834, ctc_loss=0.07181, over 3857282.87 frames. ], batch size: 57, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 06:47:50,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=262101.33333333334, ans=0.07 +2024-08-29 06:48:07,975 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.53 vs. limit=6.0 +2024-08-29 06:48:23,654 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.03 vs. limit=15.0 +2024-08-29 06:48:29,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262261.3333333333, ans=0.1 +2024-08-29 06:48:29,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=262261.3333333333, ans=0.2 +2024-08-29 06:48:39,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262314.6666666667, ans=0.1 +2024-08-29 06:48:40,485 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.496e+02 1.744e+02 2.237e+02 4.849e+02, threshold=3.488e+02, percent-clipped=3.0 +2024-08-29 06:48:46,717 INFO [train.py:1114] (1/4) Epoch 20, batch 1900, loss[loss=0.1923, simple_loss=0.2727, pruned_loss=0.04072, ctc_loss=0.07629, over 19660.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2583, pruned_loss=0.0385, ctc_loss=0.07211, over 3861469.85 frames. ], batch size: 59, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 06:48:53,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=262368.0, ans=0.125 +2024-08-29 06:48:56,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=262368.0, ans=0.125 +2024-08-29 06:49:20,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=262421.3333333333, ans=0.125 +2024-08-29 06:49:25,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=262421.3333333333, ans=0.125 +2024-08-29 06:49:37,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=262474.6666666667, ans=0.2 +2024-08-29 06:49:39,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=262474.6666666667, ans=0.125 +2024-08-29 06:49:43,620 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.70 vs. limit=22.5 +2024-08-29 06:49:57,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=262581.3333333333, ans=0.125 +2024-08-29 06:50:04,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=262581.3333333333, ans=0.125 +2024-08-29 06:50:07,326 INFO [train.py:1114] (1/4) Epoch 20, batch 1950, loss[loss=0.1595, simple_loss=0.2398, pruned_loss=0.02874, ctc_loss=0.0543, over 19589.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2592, pruned_loss=0.03874, ctc_loss=0.07266, over 3870567.74 frames. ], batch size: 52, lr: 7.43e-03, grad_scale: 32.0 +2024-08-29 06:50:14,852 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.42 vs. limit=15.0 +2024-08-29 06:50:55,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=262794.6666666667, ans=6.0 +2024-08-29 06:51:05,177 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.35 vs. limit=10.0 +2024-08-29 06:51:06,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=262848.0, ans=0.0 +2024-08-29 06:51:10,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=262848.0, ans=0.125 +2024-08-29 06:51:11,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=262848.0, ans=0.5 +2024-08-29 06:51:12,206 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=7.09 vs. limit=12.0 +2024-08-29 06:51:12,406 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.401e+02 1.548e+02 2.025e+02 3.566e+02, threshold=3.095e+02, percent-clipped=1.0 +2024-08-29 06:51:15,993 INFO [train.py:1114] (1/4) Epoch 20, batch 2000, loss[loss=0.1575, simple_loss=0.2288, pruned_loss=0.03091, ctc_loss=0.06068, over 19661.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2597, pruned_loss=0.03898, ctc_loss=0.07306, over 3854852.83 frames. ], batch size: 45, lr: 7.43e-03, grad_scale: 32.0 +2024-08-29 06:51:35,142 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:51:41,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263008.0, ans=0.1 +2024-08-29 06:51:55,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=263114.6666666667, ans=0.1 +2024-08-29 06:51:59,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=263114.6666666667, ans=0.0 +2024-08-29 06:52:02,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=263114.6666666667, ans=0.2 +2024-08-29 06:52:04,161 INFO [train.py:1114] (1/4) Epoch 20, batch 2050, loss[loss=0.1631, simple_loss=0.2371, pruned_loss=0.03256, ctc_loss=0.06009, over 19690.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2589, pruned_loss=0.03873, ctc_loss=0.07272, over 3849839.36 frames. ], batch size: 47, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 06:52:11,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=263168.0, ans=0.025 +2024-08-29 06:52:15,776 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.74 vs. limit=15.0 +2024-08-29 06:52:28,739 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.35 vs. limit=6.0 +2024-08-29 06:52:54,499 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.99 vs. limit=15.0 +2024-08-29 06:52:55,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=263328.0, ans=0.125 +2024-08-29 06:53:03,406 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.460e+02 1.739e+02 2.291e+02 5.164e+02, threshold=3.479e+02, percent-clipped=12.0 +2024-08-29 06:53:06,948 INFO [train.py:1114] (1/4) Epoch 20, batch 2100, loss[loss=0.184, simple_loss=0.2633, pruned_loss=0.03842, ctc_loss=0.06977, over 19757.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2584, pruned_loss=0.03836, ctc_loss=0.07209, over 3857655.54 frames. ], batch size: 54, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 06:53:33,140 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.50 vs. limit=15.0 +2024-08-29 06:53:42,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=263594.6666666667, ans=0.09899494936611666 +2024-08-29 06:53:50,045 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.39 vs. limit=15.0 +2024-08-29 06:54:01,640 INFO [train.py:1114] (1/4) Epoch 20, batch 2150, loss[loss=0.1803, simple_loss=0.2539, pruned_loss=0.0387, ctc_loss=0.07308, over 19590.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2575, pruned_loss=0.03815, ctc_loss=0.07144, over 3868423.44 frames. ], batch size: 52, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 06:54:06,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=263701.3333333333, ans=0.125 +2024-08-29 06:54:11,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.80 vs. limit=15.0 +2024-08-29 06:54:19,881 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.90 vs. limit=15.0 +2024-08-29 06:54:29,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=263861.3333333333, ans=0.0 +2024-08-29 06:54:33,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263861.3333333333, ans=0.1 +2024-08-29 06:54:44,304 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.492e+02 1.849e+02 2.492e+02 5.041e+02, threshold=3.698e+02, percent-clipped=4.0 +2024-08-29 06:55:33,944 INFO [train.py:1114] (1/4) Epoch 20, batch 2200, loss[loss=0.1917, simple_loss=0.2651, pruned_loss=0.04273, ctc_loss=0.08223, over 19583.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2572, pruned_loss=0.03809, ctc_loss=0.07122, over 3866680.19 frames. ], batch size: 57, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 06:55:36,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=263968.0, ans=0.2 +2024-08-29 06:55:38,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=263968.0, ans=0.125 +2024-08-29 06:55:38,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=263968.0, ans=0.125 +2024-08-29 06:55:49,780 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.50 vs. limit=15.0 +2024-08-29 06:55:51,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=264021.3333333333, ans=0.125 +2024-08-29 06:56:38,503 INFO [train.py:1114] (1/4) Epoch 20, batch 2250, loss[loss=0.1926, simple_loss=0.2712, pruned_loss=0.04168, ctc_loss=0.07666, over 19619.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2576, pruned_loss=0.0382, ctc_loss=0.07134, over 3866736.95 frames. ], batch size: 55, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 06:57:02,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264341.3333333333, ans=0.1 +2024-08-29 06:57:38,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=264394.6666666667, ans=0.125 +2024-08-29 06:57:47,079 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.07 vs. limit=12.0 +2024-08-29 06:57:58,405 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.422e+02 1.843e+02 2.549e+02 5.039e+02, threshold=3.686e+02, percent-clipped=5.0 +2024-08-29 06:58:01,870 INFO [train.py:1114] (1/4) Epoch 20, batch 2300, loss[loss=0.1754, simple_loss=0.2414, pruned_loss=0.03932, ctc_loss=0.07689, over 19515.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2569, pruned_loss=0.03823, ctc_loss=0.07146, over 3860575.75 frames. ], batch size: 49, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 06:58:03,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=264501.3333333333, ans=0.125 +2024-08-29 06:59:08,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=264608.0, ans=0.125 +2024-08-29 06:59:17,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=264608.0, ans=0.025 +2024-08-29 06:59:20,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=264661.3333333333, ans=0.2 +2024-08-29 06:59:31,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=264714.6666666667, ans=0.125 +2024-08-29 06:59:34,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=264714.6666666667, ans=0.1 +2024-08-29 07:00:03,998 INFO [train.py:1114] (1/4) Epoch 20, batch 2350, loss[loss=0.2015, simple_loss=0.273, pruned_loss=0.04752, ctc_loss=0.08715, over 19677.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.257, pruned_loss=0.03856, ctc_loss=0.07197, over 3863388.31 frames. ], batch size: 63, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 07:00:04,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=264768.0, ans=0.125 +2024-08-29 07:11:46,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=264874.6666666667, ans=0.125 +2024-08-29 07:14:14,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=264928.0, ans=0.0 +2024-08-29 07:15:56,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=264981.3333333333, ans=0.09899494936611666 +2024-08-29 07:19:05,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=264981.3333333333, ans=0.125 +2024-08-29 07:20:35,442 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.441e+02 1.702e+02 2.205e+02 4.204e+02, threshold=3.404e+02, percent-clipped=1.0 +2024-08-29 07:22:42,549 INFO [train.py:1114] (1/4) Epoch 20, batch 2400, loss[loss=0.205, simple_loss=0.2729, pruned_loss=0.0496, ctc_loss=0.09479, over 19227.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.259, pruned_loss=0.03926, ctc_loss=0.07313, over 3857894.04 frames. ], batch size: 71, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 07:36:18,526 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.42 vs. limit=15.0 +2024-08-29 07:41:06,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=265194.6666666667, ans=0.025 +2024-08-29 07:42:31,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=265194.6666666667, ans=0.125 +2024-08-29 07:44:35,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=265248.0, ans=0.025 +2024-08-29 07:50:39,246 INFO [train.py:1114] (1/4) Epoch 20, batch 2450, loss[loss=0.2289, simple_loss=0.2855, pruned_loss=0.062, ctc_loss=0.1207, over 13318.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2619, pruned_loss=0.041, ctc_loss=0.07685, over 3731381.34 frames. ], batch size: 140, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 07:56:05,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=265354.6666666667, ans=0.0 +2024-08-29 07:56:51,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=265354.6666666667, ans=0.05 +2024-08-29 07:57:07,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=265354.6666666667, ans=0.2 +2024-08-29 07:57:08,707 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.04 vs. limit=22.5 +2024-08-29 07:57:26,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=265408.0, ans=0.125 +2024-08-29 08:06:23,741 INFO [train.py:1387] (1/4) Done! diff --git a/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-01-48-53-2 b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-01-48-53-2 new file mode 100644 index 0000000000000000000000000000000000000000..b65992db8a0030261dfd897ea8aa18de6b85209e --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-01-48-53-2 @@ -0,0 +1,716 @@ +2024-08-29 01:48:53,989 INFO [train.py:1182] (2/4) Training started +2024-08-29 01:48:53,990 INFO [train.py:1192] (2/4) Device: cuda:2 +2024-08-29 01:48:56,897 INFO [train.py:1210] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2538.int.cedar.computecanada.ca', 'IP address': '172.16.145.231'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 19, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 01:48:56,897 INFO [train.py:1212] (2/4) About to create model +2024-08-29 01:48:58,136 INFO [train.py:1216] (2/4) Number of model parameters: 65805511 +2024-08-29 01:48:58,379 INFO [checkpoint.py:112] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-18.pt +2024-08-29 01:49:57,718 INFO [train.py:1231] (2/4) Using DDP +2024-08-29 01:52:46,800 INFO [train.py:1243] (2/4) Loading optimizer state dict +2024-08-29 01:52:46,998 INFO [train.py:1251] (2/4) Loading scheduler state dict +2024-08-29 01:52:46,999 INFO [asr_datamodule.py:894] (2/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 01:52:47,139 INFO [asr_datamodule.py:696] (2/4) Disable MUSAN +2024-08-29 01:52:47,139 INFO [asr_datamodule.py:714] (2/4) Enable SpecAugment +2024-08-29 01:52:47,139 INFO [asr_datamodule.py:715] (2/4) Time warp factor: 80 +2024-08-29 01:52:47,140 INFO [asr_datamodule.py:725] (2/4) Num frame mask: 10 +2024-08-29 01:52:47,140 INFO [asr_datamodule.py:738] (2/4) About to create train dataset +2024-08-29 01:52:47,140 INFO [asr_datamodule.py:765] (2/4) Using DynamicBucketingSampler. +2024-08-29 01:52:48,683 INFO [asr_datamodule.py:782] (2/4) About to create train dataloader +2024-08-29 01:52:48,684 INFO [asr_datamodule.py:911] (2/4) About to get dev-clean cuts +2024-08-29 01:54:40,097 INFO [asr_datamodule.py:918] (2/4) About to get dev-other cuts +2024-08-29 01:54:41,302 INFO [asr_datamodule.py:814] (2/4) About to create dev dataset +2024-08-29 01:54:41,615 INFO [asr_datamodule.py:831] (2/4) About to create dev dataloader +2024-08-29 01:54:41,616 INFO [train.py:1435] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 02:04:12,037 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=512, metric=3.62 vs. limit=7.5 +2024-08-29 02:04:13,233 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12111MB +2024-08-29 02:04:14,349 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12111MB +2024-08-29 02:12:13,316 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12111MB +2024-08-29 02:12:14,564 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12135MB +2024-08-29 02:16:50,157 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12135MB +2024-08-29 02:16:51,493 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12135MB +2024-08-29 02:16:51,511 INFO [train.py:1344] (2/4) Loading grad scaler state dict +2024-08-29 02:18:18,023 INFO [train.py:1114] (2/4) Epoch 19, batch 0, loss[loss=0.1782, simple_loss=0.248, pruned_loss=0.04, ctc_loss=0.07085, over 19405.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.248, pruned_loss=0.04, ctc_loss=0.07085, over 19405.00 frames. ], batch size: 48, lr: 7.99e-03, grad_scale: 32.0 +2024-08-29 02:18:18,024 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-29 02:19:56,210 INFO [train.py:1146] (2/4) Epoch 19, validation: loss=0.1709, simple_loss=0.2636, pruned_loss=0.02933, ctc_loss=0.04896, over 944034.00 frames. +2024-08-29 02:19:56,210 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12135MB +2024-08-29 02:19:56,690 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.70 vs. limit=12.0 +2024-08-29 02:21:53,889 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.43 vs. limit=15.0 +2024-08-29 02:22:52,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=238954.66666666666, ans=0.125 +2024-08-29 02:26:58,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239008.0, ans=0.1 +2024-08-29 02:58:02,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=239114.66666666666, ans=0.025 +2024-08-29 02:59:11,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=239114.66666666666, ans=0.125 +2024-08-29 03:07:49,835 INFO [train.py:1114] (2/4) Epoch 19, batch 50, loss[loss=0.1805, simple_loss=0.2446, pruned_loss=0.04261, ctc_loss=0.07782, over 19697.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2612, pruned_loss=0.04071, ctc_loss=0.07638, over 843613.11 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 03:14:53,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=239221.33333333334, ans=0.125 +2024-08-29 03:17:24,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=239274.66666666666, ans=0.125 +2024-08-29 03:21:38,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=239328.0, ans=0.2 +2024-08-29 03:22:07,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239328.0, ans=0.1 +2024-08-29 03:22:22,815 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.482e+02 1.734e+02 2.141e+02 3.301e+02, threshold=3.468e+02, percent-clipped=0.0 +2024-08-29 03:37:40,989 INFO [train.py:1114] (2/4) Epoch 19, batch 100, loss[loss=0.157, simple_loss=0.239, pruned_loss=0.02638, ctc_loss=0.05594, over 19711.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.263, pruned_loss=0.04078, ctc_loss=0.0767, over 1498173.55 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 03:42:11,166 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 03:43:29,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=239541.33333333334, ans=0.125 +2024-08-29 03:45:51,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=239541.33333333334, ans=0.125 +2024-08-29 03:46:25,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239594.66666666666, ans=0.125 +2024-08-29 03:54:23,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=239648.0, ans=0.2 +2024-08-29 03:54:39,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=239701.33333333334, ans=0.0 +2024-08-29 03:55:56,685 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.55 vs. limit=15.0 +2024-08-29 03:56:43,841 INFO [train.py:1114] (2/4) Epoch 19, batch 150, loss[loss=0.167, simple_loss=0.2352, pruned_loss=0.03609, ctc_loss=0.06636, over 19712.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2605, pruned_loss=0.04024, ctc_loss=0.07533, over 2027757.76 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 03:56:44,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239754.66666666666, ans=0.1 +2024-08-29 03:57:07,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=239808.0, ans=0.125 +2024-08-29 03:58:56,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=239861.33333333334, ans=0.04949747468305833 +2024-08-29 04:01:43,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=239914.66666666666, ans=0.0 +2024-08-29 04:01:44,334 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.487e+02 1.911e+02 2.455e+02 3.758e+02, threshold=3.822e+02, percent-clipped=3.0 +2024-08-29 04:01:44,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239914.66666666666, ans=0.1 +2024-08-29 04:02:35,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239914.66666666666, ans=0.125 +2024-08-29 04:05:46,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=239968.0, ans=0.125 +2024-08-29 04:09:49,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=240021.33333333334, ans=0.125 +2024-08-29 04:10:04,524 INFO [train.py:1114] (2/4) Epoch 19, batch 200, loss[loss=0.2193, simple_loss=0.2879, pruned_loss=0.0542, ctc_loss=0.106, over 18193.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2605, pruned_loss=0.04013, ctc_loss=0.0752, over 2435345.63 frames. ], batch size: 85, lr: 7.97e-03, grad_scale: 32.0 +2024-08-29 04:10:15,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=240021.33333333334, ans=0.125 +2024-08-29 04:11:36,988 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.48 vs. limit=15.0 +2024-08-29 04:12:22,117 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.79 vs. limit=15.0 +2024-08-29 04:17:17,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240128.0, ans=0.1 +2024-08-29 04:19:24,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=240128.0, ans=0.0 +2024-08-29 04:19:45,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=240181.33333333334, ans=0.125 +2024-08-29 04:23:46,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=240288.0, ans=0.04949747468305833 +2024-08-29 04:23:53,225 INFO [train.py:1114] (2/4) Epoch 19, batch 250, loss[loss=0.1984, simple_loss=0.2701, pruned_loss=0.04645, ctc_loss=0.0848, over 19434.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2598, pruned_loss=0.03963, ctc_loss=0.07424, over 2756225.88 frames. ], batch size: 67, lr: 7.97e-03, grad_scale: 32.0 +2024-08-29 04:24:21,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=240288.0, ans=0.125 +2024-08-29 04:24:28,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=240288.0, ans=0.125 +2024-08-29 04:26:53,932 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.02 vs. limit=15.0 +2024-08-29 04:27:48,298 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.28 vs. limit=15.0 +2024-08-29 04:27:49,623 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.40 vs. limit=22.5 +2024-08-29 04:28:26,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.98 vs. limit=15.0 +2024-08-29 04:28:42,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=240394.66666666666, ans=0.0 +2024-08-29 04:29:03,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=240448.0, ans=0.0 +2024-08-29 04:29:04,219 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.187e+02 1.446e+02 1.716e+02 2.275e+02 4.235e+02, threshold=3.432e+02, percent-clipped=4.0 +2024-08-29 04:29:23,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=240448.0, ans=0.2 +2024-08-29 04:29:24,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=240448.0, ans=0.125 +2024-08-29 04:30:02,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=240501.33333333334, ans=0.09899494936611666 +2024-08-29 04:31:33,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=240501.33333333334, ans=10.0 +2024-08-29 04:31:34,646 INFO [train.py:1114] (2/4) Epoch 19, batch 300, loss[loss=0.199, simple_loss=0.2719, pruned_loss=0.04704, ctc_loss=0.08012, over 19508.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.259, pruned_loss=0.03943, ctc_loss=0.07406, over 3000511.74 frames. ], batch size: 61, lr: 7.96e-03, grad_scale: 32.0 +2024-08-29 04:34:10,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=240554.66666666666, ans=0.125 +2024-08-29 04:34:19,593 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 04:34:39,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=240661.33333333334, ans=0.125 +2024-08-29 04:35:42,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=240714.66666666666, ans=0.0 +2024-08-29 04:38:09,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=240768.0, ans=0.125 +2024-08-29 04:38:10,587 INFO [train.py:1114] (2/4) Epoch 19, batch 350, loss[loss=0.1787, simple_loss=0.2465, pruned_loss=0.03987, ctc_loss=0.07794, over 19796.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2602, pruned_loss=0.03954, ctc_loss=0.07423, over 3190250.24 frames. ], batch size: 48, lr: 7.96e-03, grad_scale: 32.0 +2024-08-29 04:41:53,772 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 04:41:56,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=240928.0, ans=0.125 +2024-08-29 04:44:05,469 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.489e+02 1.897e+02 2.425e+02 4.045e+02, threshold=3.795e+02, percent-clipped=4.0 +2024-08-29 04:44:07,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240981.33333333334, ans=0.1 +2024-08-29 04:44:07,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=240981.33333333334, ans=0.0 +2024-08-29 04:44:14,662 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.22 vs. limit=22.5 +2024-08-29 04:44:18,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=241034.66666666666, ans=0.125 +2024-08-29 04:44:27,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=241034.66666666666, ans=0.025 +2024-08-29 04:44:29,327 INFO [train.py:1114] (2/4) Epoch 19, batch 400, loss[loss=0.1808, simple_loss=0.2627, pruned_loss=0.03589, ctc_loss=0.06772, over 19481.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2597, pruned_loss=0.03931, ctc_loss=0.07371, over 3342450.82 frames. ], batch size: 54, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 04:44:47,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=241088.0, ans=0.2 +2024-08-29 04:45:06,898 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 04:45:20,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=241194.66666666666, ans=0.125 +2024-08-29 04:46:24,124 INFO [train.py:1114] (2/4) Epoch 19, batch 450, loss[loss=0.1816, simple_loss=0.2642, pruned_loss=0.03649, ctc_loss=0.06522, over 19612.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.26, pruned_loss=0.03973, ctc_loss=0.07442, over 3450474.67 frames. ], batch size: 55, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 04:46:51,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=241354.66666666666, ans=0.0 +2024-08-29 04:46:57,891 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 04:47:33,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=241408.0, ans=0.125 +2024-08-29 04:47:34,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=241408.0, ans=0.0 +2024-08-29 04:47:41,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=241461.33333333334, ans=0.125 +2024-08-29 04:47:47,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=241461.33333333334, ans=0.125 +2024-08-29 04:47:54,990 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.149e+02 1.394e+02 1.625e+02 2.143e+02 3.810e+02, threshold=3.251e+02, percent-clipped=1.0 +2024-08-29 04:48:12,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=241514.66666666666, ans=0.04949747468305833 +2024-08-29 04:48:25,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=241568.0, ans=0.2 +2024-08-29 04:48:25,247 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.21 vs. limit=15.0 +2024-08-29 04:49:55,395 INFO [train.py:1114] (2/4) Epoch 19, batch 500, loss[loss=0.2026, simple_loss=0.278, pruned_loss=0.0477, ctc_loss=0.07953, over 19693.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2592, pruned_loss=0.03952, ctc_loss=0.07394, over 3545980.66 frames. ], batch size: 63, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 04:49:57,541 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=8.74 vs. limit=12.0 +2024-08-29 04:52:53,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=241728.0, ans=0.02 +2024-08-29 04:52:53,681 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.38 vs. limit=15.0 +2024-08-29 04:52:59,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241728.0, ans=0.1 +2024-08-29 04:53:09,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=241781.33333333334, ans=0.125 +2024-08-29 04:53:10,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=241781.33333333334, ans=0.025 +2024-08-29 04:54:06,576 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.10 vs. limit=15.0 +2024-08-29 04:54:08,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-29 04:54:14,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241888.0, ans=0.1 +2024-08-29 04:54:15,398 INFO [train.py:1114] (2/4) Epoch 19, batch 550, loss[loss=0.2074, simple_loss=0.2773, pruned_loss=0.05026, ctc_loss=0.09235, over 19316.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.259, pruned_loss=0.03944, ctc_loss=0.07372, over 3608060.51 frames. ], batch size: 71, lr: 7.94e-03, grad_scale: 32.0 +2024-08-29 04:54:50,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241888.0, ans=0.1 +2024-08-29 04:55:11,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=241994.66666666666, ans=0.2 +2024-08-29 04:55:11,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=241994.66666666666, ans=0.0 +2024-08-29 04:55:27,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=241994.66666666666, ans=0.0 +2024-08-29 04:55:29,722 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.412e+02 1.703e+02 2.107e+02 3.697e+02, threshold=3.406e+02, percent-clipped=1.0 +2024-08-29 04:57:39,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=242101.33333333334, ans=0.07 +2024-08-29 04:57:44,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.28 vs. limit=15.0 +2024-08-29 04:57:45,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=242101.33333333334, ans=0.05 +2024-08-29 04:57:47,834 INFO [train.py:1114] (2/4) Epoch 19, batch 600, loss[loss=0.1945, simple_loss=0.2695, pruned_loss=0.04374, ctc_loss=0.08006, over 19426.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2591, pruned_loss=0.03936, ctc_loss=0.07358, over 3664988.97 frames. ], batch size: 67, lr: 7.94e-03, grad_scale: 32.0 +2024-08-29 04:58:02,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=242208.0, ans=0.02 +2024-08-29 04:58:23,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=242208.0, ans=0.125 +2024-08-29 04:58:37,717 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.67 vs. limit=15.0 +2024-08-29 05:00:05,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=242421.33333333334, ans=0.1 +2024-08-29 05:00:06,351 INFO [train.py:1114] (2/4) Epoch 19, batch 650, loss[loss=0.1791, simple_loss=0.2575, pruned_loss=0.03624, ctc_loss=0.07075, over 19774.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2586, pruned_loss=0.03912, ctc_loss=0.07326, over 3715999.86 frames. ], batch size: 54, lr: 7.93e-03, grad_scale: 32.0 +2024-08-29 05:00:14,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=242474.66666666666, ans=0.125 +2024-08-29 05:00:15,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242474.66666666666, ans=0.1 +2024-08-29 05:00:32,079 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.19 vs. limit=22.5 +2024-08-29 05:00:59,058 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.521e+02 1.842e+02 2.430e+02 3.637e+02, threshold=3.684e+02, percent-clipped=5.0 +2024-08-29 05:01:53,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242581.33333333334, ans=0.1 +2024-08-29 05:01:53,366 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.23 vs. limit=12.0 +2024-08-29 05:01:54,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=242581.33333333334, ans=0.125 +2024-08-29 05:02:27,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=242634.66666666666, ans=0.2 +2024-08-29 05:02:33,349 INFO [train.py:1114] (2/4) Epoch 19, batch 700, loss[loss=0.1769, simple_loss=0.2519, pruned_loss=0.03646, ctc_loss=0.0727, over 19734.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2594, pruned_loss=0.03938, ctc_loss=0.07375, over 3749459.92 frames. ], batch size: 51, lr: 7.93e-03, grad_scale: 32.0 +2024-08-29 05:02:34,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=242688.0, ans=0.125 +2024-08-29 05:03:22,171 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.75 vs. limit=15.0 +2024-08-29 05:03:36,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=242741.33333333334, ans=0.0 +2024-08-29 05:03:45,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=242794.66666666666, ans=0.0 +2024-08-29 05:05:35,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=242901.33333333334, ans=0.2 +2024-08-29 05:06:34,957 INFO [train.py:1114] (2/4) Epoch 19, batch 750, loss[loss=0.1683, simple_loss=0.2487, pruned_loss=0.0321, ctc_loss=0.05902, over 19506.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2591, pruned_loss=0.03949, ctc_loss=0.07395, over 3774607.39 frames. ], batch size: 54, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 05:06:37,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=242954.66666666666, ans=0.0 +2024-08-29 05:06:59,660 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.62 vs. limit=15.0 +2024-08-29 05:07:19,368 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.172e+02 1.493e+02 1.888e+02 2.375e+02 3.905e+02, threshold=3.776e+02, percent-clipped=3.0 +2024-08-29 05:07:20,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=243114.66666666666, ans=0.0 +2024-08-29 05:07:22,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.94 vs. limit=12.0 +2024-08-29 05:08:09,968 INFO [train.py:1114] (2/4) Epoch 19, batch 800, loss[loss=0.1635, simple_loss=0.2367, pruned_loss=0.033, ctc_loss=0.06087, over 19799.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2585, pruned_loss=0.0393, ctc_loss=0.07349, over 3796075.64 frames. ], batch size: 49, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 05:09:31,653 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.48 vs. limit=15.0 +2024-08-29 05:09:38,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=243434.66666666666, ans=0.025 +2024-08-29 05:09:49,270 INFO [train.py:1114] (2/4) Epoch 19, batch 850, loss[loss=0.1918, simple_loss=0.2767, pruned_loss=0.03964, ctc_loss=0.0691, over 19632.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2588, pruned_loss=0.0394, ctc_loss=0.07374, over 3814477.48 frames. ], batch size: 59, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 05:10:11,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=243541.33333333334, ans=0.2 +2024-08-29 05:10:21,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243594.66666666666, ans=0.1 +2024-08-29 05:10:29,510 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.53 vs. limit=10.0 +2024-08-29 05:10:31,638 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.122e+02 1.428e+02 1.590e+02 2.047e+02 2.882e+02, threshold=3.180e+02, percent-clipped=0.0 +2024-08-29 05:10:32,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=243648.0, ans=0.025 +2024-08-29 05:10:46,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=243701.33333333334, ans=0.0 +2024-08-29 05:10:46,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=243701.33333333334, ans=0.125 +2024-08-29 05:10:46,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=243701.33333333334, ans=0.125 +2024-08-29 05:10:47,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=243701.33333333334, ans=0.2 +2024-08-29 05:10:49,648 INFO [train.py:1114] (2/4) Epoch 19, batch 900, loss[loss=0.1674, simple_loss=0.2358, pruned_loss=0.03568, ctc_loss=0.06916, over 19400.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2593, pruned_loss=0.03987, ctc_loss=0.07472, over 3817687.94 frames. ], batch size: 48, lr: 7.91e-03, grad_scale: 32.0 +2024-08-29 05:11:16,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=243861.33333333334, ans=0.0 +2024-08-29 05:11:22,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=243914.66666666666, ans=0.125 +2024-08-29 05:11:35,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=243968.0, ans=0.125 +2024-08-29 05:11:41,927 INFO [train.py:1114] (2/4) Epoch 19, batch 950, loss[loss=0.1654, simple_loss=0.2364, pruned_loss=0.03451, ctc_loss=0.06364, over 19503.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2595, pruned_loss=0.04005, ctc_loss=0.07489, over 3817453.70 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-29 05:11:54,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=244074.66666666666, ans=0.035 +2024-08-29 05:12:03,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=244128.0, ans=0.125 +2024-08-29 05:12:09,031 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:12:12,403 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.442e+02 1.730e+02 2.025e+02 3.837e+02, threshold=3.461e+02, percent-clipped=4.0 +2024-08-29 05:12:16,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=244181.33333333334, ans=0.125 +2024-08-29 05:12:30,112 INFO [train.py:1114] (2/4) Epoch 19, batch 1000, loss[loss=0.1628, simple_loss=0.2419, pruned_loss=0.03078, ctc_loss=0.05551, over 19847.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2601, pruned_loss=0.0401, ctc_loss=0.07502, over 3813881.18 frames. ], batch size: 52, lr: 7.90e-03, grad_scale: 32.0 +2024-08-29 05:12:42,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=244341.33333333334, ans=0.2 +2024-08-29 05:13:07,067 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.39 vs. limit=15.0 +2024-08-29 05:13:07,126 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.38 vs. limit=10.0 +2024-08-29 05:13:08,056 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.83 vs. limit=15.0 +2024-08-29 05:13:16,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=244501.33333333334, ans=0.025 +2024-08-29 05:13:19,526 INFO [train.py:1114] (2/4) Epoch 19, batch 1050, loss[loss=0.1862, simple_loss=0.2626, pruned_loss=0.04021, ctc_loss=0.07337, over 19837.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2594, pruned_loss=0.03982, ctc_loss=0.07438, over 3821505.79 frames. ], batch size: 57, lr: 7.90e-03, grad_scale: 32.0 +2024-08-29 05:13:23,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=244554.66666666666, ans=0.1 +2024-08-29 05:13:47,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=244608.0, ans=0.125 +2024-08-29 05:13:48,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=244608.0, ans=0.125 +2024-08-29 05:13:50,133 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.37 vs. limit=15.0 +2024-08-29 05:13:57,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=244661.33333333334, ans=0.125 +2024-08-29 05:14:05,758 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.364e+02 1.577e+02 1.842e+02 2.540e+02, threshold=3.153e+02, percent-clipped=0.0 +2024-08-29 05:14:12,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=244714.66666666666, ans=0.125 +2024-08-29 05:14:25,268 INFO [train.py:1114] (2/4) Epoch 19, batch 1100, loss[loss=0.1797, simple_loss=0.258, pruned_loss=0.03645, ctc_loss=0.07122, over 19574.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2595, pruned_loss=0.03974, ctc_loss=0.07414, over 3828692.34 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-29 05:15:06,831 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.61 vs. limit=15.0 +2024-08-29 05:15:29,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=244981.33333333334, ans=0.2 +2024-08-29 05:16:51,076 INFO [train.py:1114] (2/4) Epoch 19, batch 1150, loss[loss=0.1597, simple_loss=0.2379, pruned_loss=0.02964, ctc_loss=0.05553, over 19586.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2595, pruned_loss=0.03973, ctc_loss=0.07418, over 3826076.37 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-29 05:17:13,739 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.46 vs. limit=10.0 +2024-08-29 05:17:40,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=245141.33333333334, ans=0.125 +2024-08-29 05:17:42,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=245141.33333333334, ans=0.1 +2024-08-29 05:17:55,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=245194.66666666666, ans=0.2 +2024-08-29 05:18:08,430 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.413e+02 1.588e+02 2.044e+02 3.492e+02, threshold=3.177e+02, percent-clipped=5.0 +2024-08-29 05:19:03,920 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:19:40,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=245301.33333333334, ans=0.2 +2024-08-29 05:19:41,714 INFO [train.py:1114] (2/4) Epoch 19, batch 1200, loss[loss=0.1959, simple_loss=0.268, pruned_loss=0.04555, ctc_loss=0.08182, over 19843.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2602, pruned_loss=0.03981, ctc_loss=0.07441, over 3821922.92 frames. ], batch size: 57, lr: 7.89e-03, grad_scale: 32.0 +2024-08-29 05:19:42,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=245354.66666666666, ans=0.125 +2024-08-29 05:20:30,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=245568.0, ans=0.0 +2024-08-29 05:20:30,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=245568.0, ans=0.125 +2024-08-29 05:20:51,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245568.0, ans=0.1 +2024-08-29 05:20:54,845 INFO [train.py:1114] (2/4) Epoch 19, batch 1250, loss[loss=0.2028, simple_loss=0.2759, pruned_loss=0.04724, ctc_loss=0.0881, over 19548.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2611, pruned_loss=0.04014, ctc_loss=0.0749, over 3840961.06 frames. ], batch size: 61, lr: 7.88e-03, grad_scale: 32.0 +2024-08-29 05:21:31,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=245674.66666666666, ans=0.0 +2024-08-29 05:21:47,050 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.448e+02 1.786e+02 2.342e+02 3.930e+02, threshold=3.573e+02, percent-clipped=1.0 +2024-08-29 05:21:56,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=245781.33333333334, ans=0.0 +2024-08-29 05:21:58,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=245781.33333333334, ans=0.0 +2024-08-29 05:21:59,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=245781.33333333334, ans=0.125 +2024-08-29 05:22:27,729 INFO [train.py:1114] (2/4) Epoch 19, batch 1300, loss[loss=0.185, simple_loss=0.2648, pruned_loss=0.03827, ctc_loss=0.07185, over 18921.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2601, pruned_loss=0.03992, ctc_loss=0.0743, over 3844775.59 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 32.0 +2024-08-29 05:22:38,338 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.70 vs. limit=15.0 +2024-08-29 05:22:55,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=245941.33333333334, ans=0.125 +2024-08-29 05:22:56,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=245941.33333333334, ans=0.2 +2024-08-29 05:23:01,939 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.34 vs. limit=15.0 +2024-08-29 05:23:03,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=245994.66666666666, ans=0.95 +2024-08-29 05:23:11,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=246048.0, ans=0.125 +2024-08-29 05:23:21,103 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.35 vs. limit=15.0 +2024-08-29 05:23:23,038 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.47 vs. limit=15.0 +2024-08-29 05:23:26,125 INFO [train.py:1114] (2/4) Epoch 19, batch 1350, loss[loss=0.171, simple_loss=0.2487, pruned_loss=0.03377, ctc_loss=0.06414, over 19770.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2596, pruned_loss=0.03964, ctc_loss=0.07373, over 3855306.68 frames. ], batch size: 54, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 05:23:29,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=246154.66666666666, ans=0.125 +2024-08-29 05:23:44,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246208.0, ans=0.1 +2024-08-29 05:23:50,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=246261.33333333334, ans=0.2 +2024-08-29 05:23:59,057 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.158e+02 1.393e+02 1.600e+02 2.060e+02 3.630e+02, threshold=3.201e+02, percent-clipped=1.0 +2024-08-29 05:24:26,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=246368.0, ans=0.125 +2024-08-29 05:24:31,012 INFO [train.py:1114] (2/4) Epoch 19, batch 1400, loss[loss=0.1713, simple_loss=0.2403, pruned_loss=0.03782, ctc_loss=0.0667, over 19671.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2591, pruned_loss=0.03947, ctc_loss=0.07359, over 3863274.26 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 05:24:33,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246421.33333333334, ans=0.1 +2024-08-29 05:24:53,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=246474.66666666666, ans=0.0 +2024-08-29 05:24:56,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=246528.0, ans=0.0 +2024-08-29 05:24:56,713 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.91 vs. limit=15.0 +2024-08-29 05:24:57,845 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.43 vs. limit=15.0 +2024-08-29 05:24:58,618 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.51 vs. limit=15.0 +2024-08-29 05:25:01,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246528.0, ans=0.1 +2024-08-29 05:25:14,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=246581.33333333334, ans=0.125 +2024-08-29 05:25:21,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=246634.66666666666, ans=0.1 +2024-08-29 05:25:24,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=246634.66666666666, ans=0.025 +2024-08-29 05:25:26,820 INFO [train.py:1114] (2/4) Epoch 19, batch 1450, loss[loss=0.1967, simple_loss=0.2745, pruned_loss=0.04375, ctc_loss=0.07867, over 19636.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2601, pruned_loss=0.04, ctc_loss=0.07466, over 3860184.88 frames. ], batch size: 63, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 05:25:46,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246741.33333333334, ans=0.1 +2024-08-29 05:26:22,440 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.187e+02 1.397e+02 1.549e+02 1.935e+02 4.281e+02, threshold=3.099e+02, percent-clipped=1.0 +2024-08-29 05:26:23,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=246848.0, ans=0.0 +2024-08-29 05:26:23,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=246848.0, ans=0.125 +2024-08-29 05:26:39,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=246901.33333333334, ans=0.125 +2024-08-29 05:26:46,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246901.33333333334, ans=0.1 +2024-08-29 05:26:50,110 INFO [train.py:1114] (2/4) Epoch 19, batch 1500, loss[loss=0.193, simple_loss=0.2704, pruned_loss=0.04229, ctc_loss=0.07779, over 19590.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2605, pruned_loss=0.04005, ctc_loss=0.07463, over 3861306.88 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 32.0 +2024-08-29 05:26:51,430 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:26:52,531 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.15 vs. limit=6.0 +2024-08-29 05:27:21,418 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.95 vs. limit=15.0 +2024-08-29 05:27:26,585 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.57 vs. limit=15.0 +2024-08-29 05:28:02,447 INFO [train.py:1114] (2/4) Epoch 19, batch 1550, loss[loss=0.193, simple_loss=0.2704, pruned_loss=0.04188, ctc_loss=0.07954, over 19613.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2604, pruned_loss=0.04013, ctc_loss=0.07491, over 3845734.36 frames. ], batch size: 60, lr: 7.86e-03, grad_scale: 32.0 +2024-08-29 05:28:04,830 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.10 vs. limit=22.5 +2024-08-29 05:28:05,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=247221.33333333334, ans=10.0 +2024-08-29 05:28:14,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=247274.66666666666, ans=0.0 +2024-08-29 05:28:30,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=247328.0, ans=0.2 +2024-08-29 05:28:32,224 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.16 vs. limit=15.0 +2024-08-29 05:28:37,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=247328.0, ans=0.1 +2024-08-29 05:28:45,736 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.405e+02 1.646e+02 2.216e+02 3.789e+02, threshold=3.291e+02, percent-clipped=3.0 +2024-08-29 05:29:02,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247434.66666666666, ans=0.1 +2024-08-29 05:29:48,444 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.55 vs. limit=15.0 +2024-08-29 05:30:04,282 INFO [train.py:1114] (2/4) Epoch 19, batch 1600, loss[loss=0.1877, simple_loss=0.274, pruned_loss=0.03619, ctc_loss=0.07273, over 19825.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2607, pruned_loss=0.04037, ctc_loss=0.07533, over 3835103.23 frames. ], batch size: 57, lr: 7.85e-03, grad_scale: 32.0 +2024-08-29 05:30:13,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=247541.33333333334, ans=0.09899494936611666 +2024-08-29 05:30:27,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=247594.66666666666, ans=0.125 +2024-08-29 05:30:49,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=247594.66666666666, ans=0.125 +2024-08-29 05:31:05,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=247701.33333333334, ans=0.1 +2024-08-29 05:31:08,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=247701.33333333334, ans=0.2 +2024-08-29 05:31:12,550 INFO [train.py:1114] (2/4) Epoch 19, batch 1650, loss[loss=0.195, simple_loss=0.2701, pruned_loss=0.0441, ctc_loss=0.07906, over 19643.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2602, pruned_loss=0.04022, ctc_loss=0.075, over 3830846.76 frames. ], batch size: 59, lr: 7.85e-03, grad_scale: 32.0 +2024-08-29 05:31:12,972 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.15 vs. limit=15.0 +2024-08-29 05:31:39,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=247754.66666666666, ans=0.125 +2024-08-29 05:31:47,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=247808.0, ans=0.125 +2024-08-29 05:32:30,013 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.177e+02 1.667e+02 2.011e+02 2.433e+02 4.037e+02, threshold=4.021e+02, percent-clipped=5.0 +2024-08-29 05:32:54,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247968.0, ans=0.1 +2024-08-29 05:33:13,446 INFO [train.py:1114] (2/4) Epoch 19, batch 1700, loss[loss=0.1743, simple_loss=0.2416, pruned_loss=0.03925, ctc_loss=0.07133, over 19654.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2598, pruned_loss=0.03975, ctc_loss=0.07421, over 3846161.58 frames. ], batch size: 46, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 05:33:14,815 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.86 vs. limit=15.0 +2024-08-29 05:33:15,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=248021.33333333334, ans=0.1 +2024-08-29 05:33:18,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=248021.33333333334, ans=0.125 +2024-08-29 05:33:37,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=248128.0, ans=0.0 +2024-08-29 05:33:41,404 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.98 vs. limit=15.0 +2024-08-29 05:33:42,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=248181.33333333334, ans=0.125 +2024-08-29 05:33:42,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=248181.33333333334, ans=0.0 +2024-08-29 05:34:36,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=248234.66666666666, ans=0.0 +2024-08-29 05:34:40,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=248234.66666666666, ans=0.125 +2024-08-29 05:34:41,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=248234.66666666666, ans=0.2 +2024-08-29 05:34:43,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=248288.0, ans=0.0 +2024-08-29 05:34:43,949 INFO [train.py:1114] (2/4) Epoch 19, batch 1750, loss[loss=0.1617, simple_loss=0.2384, pruned_loss=0.0303, ctc_loss=0.06103, over 19643.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2591, pruned_loss=0.03934, ctc_loss=0.07367, over 3852816.79 frames. ], batch size: 45, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 05:34:44,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=248288.0, ans=0.04949747468305833 +2024-08-29 05:34:56,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=248341.33333333334, ans=0.125 +2024-08-29 05:35:11,463 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.174e+02 1.518e+02 1.916e+02 2.294e+02 3.621e+02, threshold=3.832e+02, percent-clipped=0.0 +2024-08-29 05:35:13,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=248448.0, ans=0.125 +2024-08-29 05:35:19,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=248501.33333333334, ans=0.125 +2024-08-29 05:35:22,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=248501.33333333334, ans=0.2 +2024-08-29 05:35:27,416 INFO [train.py:1114] (2/4) Epoch 19, batch 1800, loss[loss=0.1968, simple_loss=0.2738, pruned_loss=0.04408, ctc_loss=0.079, over 19631.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2592, pruned_loss=0.03939, ctc_loss=0.0737, over 3852949.68 frames. ], batch size: 55, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 05:35:37,487 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.45 vs. limit=15.0 +2024-08-29 05:36:10,874 INFO [train.py:1114] (2/4) Epoch 19, batch 1850, loss[loss=0.1854, simple_loss=0.2659, pruned_loss=0.03819, ctc_loss=0.07132, over 19557.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2593, pruned_loss=0.03944, ctc_loss=0.07376, over 3855688.77 frames. ], batch size: 57, lr: 7.83e-03, grad_scale: 32.0 +2024-08-29 05:37:05,845 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.64 vs. limit=15.0 +2024-08-29 05:37:56,872 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.69 vs. limit=22.5 +2024-08-29 05:38:01,409 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.69 vs. limit=12.0 +2024-08-29 05:38:04,268 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.550e+02 2.027e+02 2.927e+02 4.792e+02, threshold=4.055e+02, percent-clipped=10.0 +2024-08-29 05:38:23,696 INFO [train.py:1114] (2/4) Epoch 19, batch 1900, loss[loss=0.1887, simple_loss=0.2673, pruned_loss=0.04003, ctc_loss=0.07499, over 19664.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2599, pruned_loss=0.03948, ctc_loss=0.0739, over 3860408.64 frames. ], batch size: 59, lr: 7.83e-03, grad_scale: 32.0 +2024-08-29 05:38:23,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=249088.0, ans=0.125 +2024-08-29 05:38:36,808 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.88 vs. limit=10.0 +2024-08-29 05:38:40,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=249141.33333333334, ans=0.125 +2024-08-29 05:39:28,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=249248.0, ans=0.125 +2024-08-29 05:39:36,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=249301.33333333334, ans=0.05 +2024-08-29 05:39:41,147 INFO [train.py:1114] (2/4) Epoch 19, batch 1950, loss[loss=0.1656, simple_loss=0.2437, pruned_loss=0.03082, ctc_loss=0.0649, over 19576.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2609, pruned_loss=0.03976, ctc_loss=0.07441, over 3869360.89 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 05:40:02,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249354.66666666666, ans=0.1 +2024-08-29 05:40:22,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=249461.33333333334, ans=0.125 +2024-08-29 05:40:44,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=249514.66666666666, ans=0.125 +2024-08-29 05:40:45,034 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.51 vs. limit=6.0 +2024-08-29 05:40:45,437 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.357e+02 1.563e+02 1.867e+02 4.467e+02, threshold=3.126e+02, percent-clipped=1.0 +2024-08-29 05:40:48,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=249514.66666666666, ans=0.125 +2024-08-29 05:41:02,857 INFO [train.py:1114] (2/4) Epoch 19, batch 2000, loss[loss=0.1648, simple_loss=0.231, pruned_loss=0.03588, ctc_loss=0.06724, over 19698.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2616, pruned_loss=0.04007, ctc_loss=0.07501, over 3854395.22 frames. ], batch size: 45, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 05:41:37,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=249728.0, ans=0.07 +2024-08-29 05:41:39,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=249728.0, ans=0.0 +2024-08-29 05:42:25,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=249728.0, ans=0.0 +2024-08-29 05:42:35,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=249781.33333333334, ans=0.125 +2024-08-29 05:42:45,599 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.82 vs. limit=15.0 +2024-08-29 05:42:47,478 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.03 vs. limit=15.0 +2024-08-29 05:42:48,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=249834.66666666666, ans=0.2 +2024-08-29 05:42:58,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249888.0, ans=0.1 +2024-08-29 05:42:58,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=249888.0, ans=0.125 +2024-08-29 05:42:58,871 INFO [train.py:1114] (2/4) Epoch 19, batch 2050, loss[loss=0.1499, simple_loss=0.2241, pruned_loss=0.02723, ctc_loss=0.05279, over 19725.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2604, pruned_loss=0.03974, ctc_loss=0.07432, over 3851573.45 frames. ], batch size: 47, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 05:43:06,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=249941.33333333334, ans=0.2 +2024-08-29 05:43:11,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=249941.33333333334, ans=0.0 +2024-08-29 05:43:24,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=250048.0, ans=0.125 +2024-08-29 05:43:26,433 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.469e+02 1.713e+02 2.068e+02 3.370e+02, threshold=3.427e+02, percent-clipped=2.0 +2024-08-29 05:43:27,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250048.0, ans=0.1 +2024-08-29 05:43:39,148 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.63 vs. limit=5.0 +2024-08-29 05:43:41,989 INFO [train.py:1114] (2/4) Epoch 19, batch 2100, loss[loss=0.1693, simple_loss=0.2565, pruned_loss=0.0292, ctc_loss=0.05904, over 19766.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2594, pruned_loss=0.03916, ctc_loss=0.07336, over 3858392.60 frames. ], batch size: 54, lr: 7.81e-03, grad_scale: 32.0 +2024-08-29 05:43:57,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=250208.0, ans=0.0 +2024-08-29 05:44:01,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=250208.0, ans=0.125 +2024-08-29 05:44:19,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=250261.33333333334, ans=0.1 +2024-08-29 05:44:49,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=250261.33333333334, ans=0.0 +2024-08-29 05:44:51,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=250314.66666666666, ans=0.125 +2024-08-29 05:44:53,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=250314.66666666666, ans=0.125 +2024-08-29 05:44:58,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=250368.0, ans=0.0 +2024-08-29 05:45:09,916 INFO [train.py:1114] (2/4) Epoch 19, batch 2150, loss[loss=0.1778, simple_loss=0.2498, pruned_loss=0.03958, ctc_loss=0.06675, over 19837.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2592, pruned_loss=0.03927, ctc_loss=0.07327, over 3869130.50 frames. ], batch size: 52, lr: 7.81e-03, grad_scale: 32.0 +2024-08-29 05:45:10,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=250421.33333333334, ans=0.04949747468305833 +2024-08-29 05:45:10,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=250421.33333333334, ans=0.125 +2024-08-29 05:45:14,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=250421.33333333334, ans=0.125 +2024-08-29 05:45:17,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=250421.33333333334, ans=0.125 +2024-08-29 05:45:23,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=250474.66666666666, ans=0.125 +2024-08-29 05:45:42,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=250581.33333333334, ans=0.2 +2024-08-29 05:45:44,559 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.742e+01 1.532e+02 1.812e+02 2.283e+02 4.768e+02, threshold=3.624e+02, percent-clipped=7.0 +2024-08-29 05:45:45,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=250581.33333333334, ans=0.125 +2024-08-29 05:46:21,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250634.66666666666, ans=0.1 +2024-08-29 05:46:31,672 INFO [train.py:1114] (2/4) Epoch 19, batch 2200, loss[loss=0.189, simple_loss=0.2662, pruned_loss=0.04007, ctc_loss=0.07921, over 19582.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2593, pruned_loss=0.03927, ctc_loss=0.07339, over 3867627.29 frames. ], batch size: 57, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 05:46:36,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=250688.0, ans=0.125 +2024-08-29 05:47:27,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=250794.66666666666, ans=0.125 +2024-08-29 05:48:12,298 INFO [train.py:1114] (2/4) Epoch 19, batch 2250, loss[loss=0.1881, simple_loss=0.2637, pruned_loss=0.04112, ctc_loss=0.07584, over 19610.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2592, pruned_loss=0.03919, ctc_loss=0.07314, over 3867632.10 frames. ], batch size: 55, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 05:48:15,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250954.66666666666, ans=0.1 +2024-08-29 05:48:29,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251061.33333333334, ans=0.1 +2024-08-29 05:48:30,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=251061.33333333334, ans=0.125 +2024-08-29 05:48:30,368 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.68 vs. limit=22.5 +2024-08-29 05:48:39,386 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.496e+02 1.836e+02 2.405e+02 3.916e+02, threshold=3.673e+02, percent-clipped=1.0 +2024-08-29 05:49:01,369 INFO [train.py:1114] (2/4) Epoch 19, batch 2300, loss[loss=0.1661, simple_loss=0.2419, pruned_loss=0.03277, ctc_loss=0.06192, over 19515.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2586, pruned_loss=0.03928, ctc_loss=0.07333, over 3861449.56 frames. ], batch size: 49, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 05:49:04,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=251221.33333333334, ans=0.125 +2024-08-29 05:50:47,616 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:51:09,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=251434.66666666666, ans=0.04949747468305833 +2024-08-29 05:51:10,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251434.66666666666, ans=0.1 +2024-08-29 05:51:12,209 INFO [train.py:1114] (2/4) Epoch 19, batch 2350, loss[loss=0.1993, simple_loss=0.2726, pruned_loss=0.0462, ctc_loss=0.08376, over 19701.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2587, pruned_loss=0.03965, ctc_loss=0.07372, over 3864096.56 frames. ], batch size: 63, lr: 7.79e-03, grad_scale: 32.0 +2024-08-29 05:51:26,312 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:51:33,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=251541.33333333334, ans=0.0 +2024-08-29 05:51:38,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=251594.66666666666, ans=0.125 +2024-08-29 05:51:42,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=251594.66666666666, ans=0.1 +2024-08-29 05:51:52,855 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.480e+02 1.867e+02 2.502e+02 4.275e+02, threshold=3.733e+02, percent-clipped=4.0 +2024-08-29 05:51:56,578 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.30 vs. limit=6.0 +2024-08-29 05:52:05,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=251648.0, ans=0.125 +2024-08-29 05:52:08,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=251701.33333333334, ans=0.125 +2024-08-29 05:52:18,618 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-08-29 05:52:19,090 INFO [train.py:1114] (2/4) Epoch 19, batch 2400, loss[loss=0.1738, simple_loss=0.2507, pruned_loss=0.03488, ctc_loss=0.06772, over 19333.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2602, pruned_loss=0.04022, ctc_loss=0.07478, over 3857815.16 frames. ], batch size: 71, lr: 7.79e-03, grad_scale: 32.0 +2024-08-29 05:52:55,613 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.31 vs. limit=15.0 +2024-08-29 05:53:05,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=251808.0, ans=0.125 +2024-08-29 05:53:13,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=251808.0, ans=0.2 +2024-08-29 05:53:16,370 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.44 vs. limit=15.0 +2024-08-29 05:53:37,910 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:53:49,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251968.0, ans=0.1 +2024-08-29 05:53:55,387 INFO [train.py:1114] (2/4) Epoch 19, batch 2450, loss[loss=0.2621, simple_loss=0.3037, pruned_loss=0.08012, ctc_loss=0.1506, over 13397.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2641, pruned_loss=0.04268, ctc_loss=0.07969, over 3731230.38 frames. ], batch size: 140, lr: 7.78e-03, grad_scale: 32.0 +2024-08-29 05:54:00,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=252021.33333333334, ans=0.2 +2024-08-29 05:54:02,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=252021.33333333334, ans=0.0 +2024-08-29 05:54:23,272 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.449e+02 1.688e+02 1.808e+02 3.489e+02, threshold=3.376e+02, percent-clipped=0.0 +2024-08-29 05:54:27,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=252181.33333333334, ans=0.125 +2024-08-29 05:55:27,193 INFO [train.py:1114] (2/4) Epoch 20, batch 0, loss[loss=0.1817, simple_loss=0.254, pruned_loss=0.03948, ctc_loss=0.07596, over 19812.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.254, pruned_loss=0.03948, ctc_loss=0.07596, over 19812.00 frames. ], batch size: 49, lr: 7.58e-03, grad_scale: 32.0 +2024-08-29 05:55:27,193 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-29 05:55:51,133 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.9998, 3.7153, 3.5060, 3.5385], device='cuda:2') +2024-08-29 05:55:55,986 INFO [train.py:1146] (2/4) Epoch 20, validation: loss=0.1713, simple_loss=0.2633, pruned_loss=0.0297, ctc_loss=0.04995, over 944034.00 frames. +2024-08-29 05:55:55,988 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12835MB +2024-08-29 05:56:03,902 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.85 vs. limit=15.0 +2024-08-29 05:56:06,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=252288.0, ans=0.125 +2024-08-29 05:56:20,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252288.0, ans=0.1 +2024-08-29 05:56:42,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=252394.66666666666, ans=0.125 +2024-08-29 05:56:51,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=252448.0, ans=0.125 +2024-08-29 05:56:52,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=252448.0, ans=0.0 +2024-08-29 05:56:53,901 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:57:36,252 INFO [train.py:1114] (2/4) Epoch 20, batch 50, loss[loss=0.1612, simple_loss=0.2324, pruned_loss=0.03256, ctc_loss=0.06205, over 19724.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2621, pruned_loss=0.04009, ctc_loss=0.07585, over 844974.64 frames. ], batch size: 47, lr: 7.58e-03, grad_scale: 16.0 +2024-08-29 05:57:54,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=252554.66666666666, ans=0.1 +2024-08-29 05:57:54,301 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.65 vs. limit=15.0 +2024-08-29 05:58:06,782 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.59 vs. limit=15.0 +2024-08-29 05:58:33,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=252661.33333333334, ans=0.07 +2024-08-29 05:58:38,097 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.170e+02 1.450e+02 1.693e+02 1.984e+02 3.027e+02, threshold=3.386e+02, percent-clipped=0.0 +2024-08-29 05:58:48,740 INFO [train.py:1114] (2/4) Epoch 20, batch 100, loss[loss=0.1637, simple_loss=0.2433, pruned_loss=0.03086, ctc_loss=0.05594, over 19701.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2629, pruned_loss=0.04052, ctc_loss=0.07567, over 1499556.84 frames. ], batch size: 51, lr: 7.57e-03, grad_scale: 16.0 +2024-08-29 05:58:58,208 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:59:39,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=252981.33333333334, ans=0.025 +2024-08-29 06:00:02,043 INFO [train.py:1114] (2/4) Epoch 20, batch 150, loss[loss=0.168, simple_loss=0.2406, pruned_loss=0.03419, ctc_loss=0.06735, over 19725.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2604, pruned_loss=0.03958, ctc_loss=0.07412, over 2027658.68 frames. ], batch size: 47, lr: 7.57e-03, grad_scale: 16.0 +2024-08-29 06:00:02,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=253034.66666666666, ans=0.125 +2024-08-29 06:00:08,442 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.49 vs. limit=22.5 +2024-08-29 06:00:41,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=253088.0, ans=0.0 +2024-08-29 06:00:52,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=253088.0, ans=0.125 +2024-08-29 06:01:00,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=253141.33333333334, ans=0.125 +2024-08-29 06:01:19,565 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.374e+02 1.536e+02 1.873e+02 3.368e+02, threshold=3.073e+02, percent-clipped=0.0 +2024-08-29 06:01:25,925 INFO [train.py:1114] (2/4) Epoch 20, batch 200, loss[loss=0.2045, simple_loss=0.2825, pruned_loss=0.04499, ctc_loss=0.09147, over 18199.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2594, pruned_loss=0.03967, ctc_loss=0.07409, over 2435022.78 frames. ], batch size: 85, lr: 7.56e-03, grad_scale: 16.0 +2024-08-29 06:01:45,729 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.90 vs. limit=15.0 +2024-08-29 06:01:54,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=253354.66666666666, ans=0.07 +2024-08-29 06:02:01,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=253408.0, ans=0.2 +2024-08-29 06:02:04,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=253408.0, ans=0.1 +2024-08-29 06:02:34,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=253408.0, ans=0.2 +2024-08-29 06:03:25,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=253514.66666666666, ans=0.125 +2024-08-29 06:03:51,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=253568.0, ans=0.1 +2024-08-29 06:03:51,879 INFO [train.py:1114] (2/4) Epoch 20, batch 250, loss[loss=0.1943, simple_loss=0.2712, pruned_loss=0.04388, ctc_loss=0.07395, over 19424.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2592, pruned_loss=0.03915, ctc_loss=0.07312, over 2755868.39 frames. ], batch size: 67, lr: 7.56e-03, grad_scale: 16.0 +2024-08-29 06:04:39,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=253621.33333333334, ans=0.0 +2024-08-29 06:04:43,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=253674.66666666666, ans=0.125 +2024-08-29 06:04:45,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=253674.66666666666, ans=0.0 +2024-08-29 06:04:47,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=253674.66666666666, ans=0.0 +2024-08-29 06:05:13,801 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.460e+02 1.674e+02 2.079e+02 4.615e+02, threshold=3.347e+02, percent-clipped=6.0 +2024-08-29 06:05:57,284 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.22 vs. limit=15.0 +2024-08-29 06:06:03,009 INFO [train.py:1114] (2/4) Epoch 20, batch 300, loss[loss=0.1919, simple_loss=0.2701, pruned_loss=0.04128, ctc_loss=0.07772, over 19523.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2591, pruned_loss=0.03907, ctc_loss=0.07289, over 3002011.17 frames. ], batch size: 61, lr: 7.56e-03, grad_scale: 16.0 +2024-08-29 06:06:13,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=253888.0, ans=0.2 +2024-08-29 06:06:34,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=253994.66666666666, ans=0.0 +2024-08-29 06:06:43,316 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.64 vs. limit=10.0 +2024-08-29 06:07:04,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=254048.0, ans=0.04949747468305833 +2024-08-29 06:07:06,193 INFO [train.py:1114] (2/4) Epoch 20, batch 350, loss[loss=0.174, simple_loss=0.2401, pruned_loss=0.03883, ctc_loss=0.0757, over 19767.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2588, pruned_loss=0.03892, ctc_loss=0.07267, over 3190861.06 frames. ], batch size: 48, lr: 7.55e-03, grad_scale: 16.0 +2024-08-29 06:07:08,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254101.33333333334, ans=0.1 +2024-08-29 06:07:20,029 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.69 vs. limit=15.0 +2024-08-29 06:07:23,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=254154.66666666666, ans=0.2 +2024-08-29 06:07:27,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=254208.0, ans=0.125 +2024-08-29 06:07:29,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=254208.0, ans=0.125 +2024-08-29 06:07:49,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=254314.66666666666, ans=0.0 +2024-08-29 06:07:50,368 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.501e+02 1.796e+02 2.192e+02 4.069e+02, threshold=3.593e+02, percent-clipped=5.0 +2024-08-29 06:07:58,757 INFO [train.py:1114] (2/4) Epoch 20, batch 400, loss[loss=0.1868, simple_loss=0.2722, pruned_loss=0.03754, ctc_loss=0.06569, over 19488.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2585, pruned_loss=0.03872, ctc_loss=0.07222, over 3341925.11 frames. ], batch size: 54, lr: 7.55e-03, grad_scale: 32.0 +2024-08-29 06:08:08,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=254421.33333333334, ans=0.0 +2024-08-29 06:08:43,930 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.24 vs. limit=15.0 +2024-08-29 06:08:47,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=254581.33333333334, ans=0.125 +2024-08-29 06:08:51,104 INFO [train.py:1114] (2/4) Epoch 20, batch 450, loss[loss=0.1612, simple_loss=0.2481, pruned_loss=0.02649, ctc_loss=0.0532, over 19610.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2586, pruned_loss=0.03876, ctc_loss=0.07232, over 3450704.69 frames. ], batch size: 55, lr: 7.55e-03, grad_scale: 16.0 +2024-08-29 06:08:52,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=254634.66666666666, ans=0.0 +2024-08-29 06:09:14,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=254634.66666666666, ans=0.035 +2024-08-29 06:09:14,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=254634.66666666666, ans=0.125 +2024-08-29 06:10:12,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=254741.33333333334, ans=0.125 +2024-08-29 06:10:30,943 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.420e+02 1.652e+02 2.202e+02 3.176e+02, threshold=3.303e+02, percent-clipped=0.0 +2024-08-29 06:10:51,259 INFO [train.py:1114] (2/4) Epoch 20, batch 500, loss[loss=0.198, simple_loss=0.2796, pruned_loss=0.04262, ctc_loss=0.07795, over 19677.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2574, pruned_loss=0.03826, ctc_loss=0.07145, over 3546812.11 frames. ], batch size: 63, lr: 7.54e-03, grad_scale: 8.0 +2024-08-29 06:11:09,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255008.0, ans=0.1 +2024-08-29 06:11:58,827 INFO [train.py:1114] (2/4) Epoch 20, batch 550, loss[loss=0.2042, simple_loss=0.274, pruned_loss=0.04936, ctc_loss=0.08939, over 19343.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2574, pruned_loss=0.03851, ctc_loss=0.07193, over 3609053.13 frames. ], batch size: 71, lr: 7.54e-03, grad_scale: 8.0 +2024-08-29 06:12:00,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=255168.0, ans=0.0 +2024-08-29 06:12:27,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=255274.66666666666, ans=0.125 +2024-08-29 06:12:38,181 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.36 vs. limit=15.0 +2024-08-29 06:12:58,558 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.13 vs. limit=15.0 +2024-08-29 06:13:03,364 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.429e+02 1.650e+02 1.993e+02 3.679e+02, threshold=3.299e+02, percent-clipped=2.0 +2024-08-29 06:13:08,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=255381.33333333334, ans=0.2 +2024-08-29 06:13:11,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=255434.66666666666, ans=0.0 +2024-08-29 06:13:12,590 INFO [train.py:1114] (2/4) Epoch 20, batch 600, loss[loss=0.1997, simple_loss=0.2837, pruned_loss=0.04259, ctc_loss=0.07623, over 19390.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2578, pruned_loss=0.0386, ctc_loss=0.0721, over 3665874.64 frames. ], batch size: 67, lr: 7.53e-03, grad_scale: 8.0 +2024-08-29 06:13:56,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=255488.0, ans=0.0 +2024-08-29 06:13:56,367 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.53 vs. limit=15.0 +2024-08-29 06:14:23,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255541.33333333334, ans=0.1 +2024-08-29 06:14:36,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255594.66666666666, ans=0.1 +2024-08-29 06:14:37,650 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.50 vs. limit=15.0 +2024-08-29 06:15:09,599 INFO [train.py:1114] (2/4) Epoch 20, batch 650, loss[loss=0.1688, simple_loss=0.2431, pruned_loss=0.0345, ctc_loss=0.06398, over 19779.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2576, pruned_loss=0.03839, ctc_loss=0.07173, over 3715968.33 frames. ], batch size: 54, lr: 7.53e-03, grad_scale: 8.0 +2024-08-29 06:15:29,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=255808.0, ans=22.5 +2024-08-29 06:16:19,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255861.33333333334, ans=0.1 +2024-08-29 06:16:47,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=255914.66666666666, ans=0.125 +2024-08-29 06:16:48,639 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.504e+02 1.911e+02 2.476e+02 5.788e+02, threshold=3.821e+02, percent-clipped=14.0 +2024-08-29 06:16:53,253 INFO [train.py:1114] (2/4) Epoch 20, batch 700, loss[loss=0.1702, simple_loss=0.2459, pruned_loss=0.03388, ctc_loss=0.06673, over 19692.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2583, pruned_loss=0.0387, ctc_loss=0.07244, over 3747565.94 frames. ], batch size: 51, lr: 7.53e-03, grad_scale: 8.0 +2024-08-29 06:16:57,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=255968.0, ans=0.125 +2024-08-29 06:17:00,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=255968.0, ans=0.2 +2024-08-29 06:17:09,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=256021.33333333334, ans=0.1 +2024-08-29 06:17:15,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=256074.66666666666, ans=0.125 +2024-08-29 06:17:18,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256074.66666666666, ans=0.1 +2024-08-29 06:17:42,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=256181.33333333334, ans=0.0 +2024-08-29 06:17:44,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=256234.66666666666, ans=0.0 +2024-08-29 06:17:45,327 INFO [train.py:1114] (2/4) Epoch 20, batch 750, loss[loss=0.1739, simple_loss=0.2619, pruned_loss=0.03096, ctc_loss=0.05987, over 19505.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2583, pruned_loss=0.03873, ctc_loss=0.07239, over 3773491.09 frames. ], batch size: 54, lr: 7.52e-03, grad_scale: 8.0 +2024-08-29 06:17:50,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=256234.66666666666, ans=0.2 +2024-08-29 06:18:10,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=256341.33333333334, ans=0.0 +2024-08-29 06:18:14,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=256394.66666666666, ans=0.2 +2024-08-29 06:18:28,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=256448.0, ans=0.0 +2024-08-29 06:18:31,361 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.481e+02 1.912e+02 2.487e+02 4.029e+02, threshold=3.825e+02, percent-clipped=2.0 +2024-08-29 06:18:37,694 INFO [train.py:1114] (2/4) Epoch 20, batch 800, loss[loss=0.1696, simple_loss=0.2408, pruned_loss=0.03641, ctc_loss=0.06415, over 19396.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2585, pruned_loss=0.03902, ctc_loss=0.07279, over 3795249.99 frames. ], batch size: 48, lr: 7.52e-03, grad_scale: 16.0 +2024-08-29 06:18:52,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=256554.66666666666, ans=0.125 +2024-08-29 06:19:08,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256661.33333333334, ans=0.1 +2024-08-29 06:19:39,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=256714.66666666666, ans=0.125 +2024-08-29 06:19:44,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=256714.66666666666, ans=0.2 +2024-08-29 06:19:48,524 INFO [train.py:1114] (2/4) Epoch 20, batch 850, loss[loss=0.186, simple_loss=0.2706, pruned_loss=0.03604, ctc_loss=0.07357, over 19678.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2581, pruned_loss=0.03867, ctc_loss=0.07228, over 3813468.42 frames. ], batch size: 59, lr: 7.51e-03, grad_scale: 16.0 +2024-08-29 06:20:10,795 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:20:41,636 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.961e+01 1.422e+02 1.634e+02 1.945e+02 3.890e+02, threshold=3.267e+02, percent-clipped=1.0 +2024-08-29 06:20:49,527 INFO [train.py:1114] (2/4) Epoch 20, batch 900, loss[loss=0.1699, simple_loss=0.2384, pruned_loss=0.03733, ctc_loss=0.06682, over 19823.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2584, pruned_loss=0.03903, ctc_loss=0.07281, over 3818082.07 frames. ], batch size: 49, lr: 7.51e-03, grad_scale: 16.0 +2024-08-29 06:20:49,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257034.66666666666, ans=0.1 +2024-08-29 06:20:54,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=257034.66666666666, ans=0.2 +2024-08-29 06:20:58,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=257088.0, ans=0.125 +2024-08-29 06:22:18,954 INFO [train.py:1114] (2/4) Epoch 20, batch 950, loss[loss=0.1532, simple_loss=0.229, pruned_loss=0.02783, ctc_loss=0.05459, over 19523.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2585, pruned_loss=0.03919, ctc_loss=0.073, over 3819962.07 frames. ], batch size: 49, lr: 7.51e-03, grad_scale: 16.0 +2024-08-29 06:22:25,131 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.30 vs. limit=22.5 +2024-08-29 06:22:47,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=257301.33333333334, ans=0.125 +2024-08-29 06:22:48,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=257301.33333333334, ans=0.125 +2024-08-29 06:23:09,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=257408.0, ans=0.125 +2024-08-29 06:23:32,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=257408.0, ans=0.125 +2024-08-29 06:23:48,050 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.398e+02 1.599e+02 1.937e+02 2.870e+02, threshold=3.197e+02, percent-clipped=0.0 +2024-08-29 06:23:50,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=257514.66666666666, ans=0.125 +2024-08-29 06:23:52,547 INFO [train.py:1114] (2/4) Epoch 20, batch 1000, loss[loss=0.1666, simple_loss=0.2445, pruned_loss=0.03206, ctc_loss=0.06128, over 19868.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2592, pruned_loss=0.03932, ctc_loss=0.07331, over 3817270.55 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 16.0 +2024-08-29 06:24:18,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=257621.33333333334, ans=0.125 +2024-08-29 06:24:20,442 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:24:38,980 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.62 vs. limit=10.0 +2024-08-29 06:24:45,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=257728.0, ans=0.0 +2024-08-29 06:24:52,536 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.42 vs. limit=15.0 +2024-08-29 06:24:56,441 INFO [train.py:1114] (2/4) Epoch 20, batch 1050, loss[loss=0.1766, simple_loss=0.2658, pruned_loss=0.03166, ctc_loss=0.06046, over 19844.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2584, pruned_loss=0.03897, ctc_loss=0.07271, over 3823601.14 frames. ], batch size: 57, lr: 7.50e-03, grad_scale: 16.0 +2024-08-29 06:24:56,953 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.84 vs. limit=22.5 +2024-08-29 06:25:08,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=257888.0, ans=0.125 +2024-08-29 06:25:12,906 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.38 vs. limit=15.0 +2024-08-29 06:25:13,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=257888.0, ans=0.2 +2024-08-29 06:25:54,258 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.418e+02 1.590e+02 1.994e+02 3.641e+02, threshold=3.179e+02, percent-clipped=3.0 +2024-08-29 06:25:58,793 INFO [train.py:1114] (2/4) Epoch 20, batch 1100, loss[loss=0.168, simple_loss=0.2463, pruned_loss=0.03298, ctc_loss=0.05938, over 19588.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2581, pruned_loss=0.03854, ctc_loss=0.07208, over 3830386.52 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 16.0 +2024-08-29 06:26:25,391 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.11 vs. limit=6.0 +2024-08-29 06:26:29,757 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.83 vs. limit=15.0 +2024-08-29 06:27:30,561 INFO [train.py:1114] (2/4) Epoch 20, batch 1150, loss[loss=0.1752, simple_loss=0.2544, pruned_loss=0.03547, ctc_loss=0.06281, over 19588.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.258, pruned_loss=0.03868, ctc_loss=0.07228, over 3830406.24 frames. ], batch size: 52, lr: 7.49e-03, grad_scale: 16.0 +2024-08-29 06:27:41,295 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.31 vs. limit=22.5 +2024-08-29 06:27:50,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=258474.66666666666, ans=0.2 +2024-08-29 06:28:04,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=258528.0, ans=0.2 +2024-08-29 06:28:07,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=258528.0, ans=0.125 +2024-08-29 06:28:13,436 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.421e+02 1.745e+02 2.163e+02 3.118e+02, threshold=3.490e+02, percent-clipped=0.0 +2024-08-29 06:28:16,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=258581.33333333334, ans=0.125 +2024-08-29 06:28:18,019 INFO [train.py:1114] (2/4) Epoch 20, batch 1200, loss[loss=0.1877, simple_loss=0.2699, pruned_loss=0.03846, ctc_loss=0.07147, over 19834.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2588, pruned_loss=0.03882, ctc_loss=0.07265, over 3826157.79 frames. ], batch size: 57, lr: 7.49e-03, grad_scale: 32.0 +2024-08-29 06:29:05,182 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.91 vs. limit=15.0 +2024-08-29 06:29:11,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=258741.33333333334, ans=0.0 +2024-08-29 06:30:22,532 INFO [train.py:1114] (2/4) Epoch 20, batch 1250, loss[loss=0.2037, simple_loss=0.2763, pruned_loss=0.04792, ctc_loss=0.08808, over 19528.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2594, pruned_loss=0.03899, ctc_loss=0.07268, over 3844326.38 frames. ], batch size: 61, lr: 7.48e-03, grad_scale: 16.0 +2024-08-29 06:30:27,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=258901.33333333334, ans=0.125 +2024-08-29 06:30:33,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=258954.66666666666, ans=0.0 +2024-08-29 06:30:38,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=258954.66666666666, ans=0.125 +2024-08-29 06:30:59,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=259061.33333333334, ans=0.125 +2024-08-29 06:31:06,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259114.66666666666, ans=0.1 +2024-08-29 06:31:10,042 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.413e+02 1.610e+02 2.110e+02 3.599e+02, threshold=3.219e+02, percent-clipped=1.0 +2024-08-29 06:31:51,311 INFO [train.py:1114] (2/4) Epoch 20, batch 1300, loss[loss=0.2061, simple_loss=0.2844, pruned_loss=0.04673, ctc_loss=0.08567, over 18879.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2585, pruned_loss=0.0386, ctc_loss=0.07206, over 3848500.15 frames. ], batch size: 76, lr: 7.48e-03, grad_scale: 16.0 +2024-08-29 06:34:27,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=259221.33333333334, ans=0.0 +2024-08-29 06:34:32,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=259221.33333333334, ans=0.125 +2024-08-29 06:34:37,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=259274.66666666666, ans=0.125 +2024-08-29 06:34:47,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=259328.0, ans=0.025 +2024-08-29 06:35:50,860 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.49 vs. limit=15.0 +2024-08-29 06:35:51,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=259381.33333333334, ans=0.125 +2024-08-29 06:35:58,298 INFO [train.py:1114] (2/4) Epoch 20, batch 1350, loss[loss=0.1666, simple_loss=0.2542, pruned_loss=0.02873, ctc_loss=0.05385, over 19750.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2586, pruned_loss=0.03855, ctc_loss=0.07198, over 3859791.35 frames. ], batch size: 54, lr: 7.48e-03, grad_scale: 16.0 +2024-08-29 06:36:14,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=259488.0, ans=0.125 +2024-08-29 06:36:21,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=259541.33333333334, ans=0.125 +2024-08-29 06:36:24,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=259541.33333333334, ans=0.0 +2024-08-29 06:36:24,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=259541.33333333334, ans=0.125 +2024-08-29 06:36:30,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=259594.66666666666, ans=0.2 +2024-08-29 06:36:37,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=259648.0, ans=0.125 +2024-08-29 06:36:40,676 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=259648.0, ans=0.125 +2024-08-29 06:36:41,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=259648.0, ans=0.0 +2024-08-29 06:36:45,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=259648.0, ans=0.07 +2024-08-29 06:36:45,723 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.394e+02 1.620e+02 1.985e+02 3.317e+02, threshold=3.241e+02, percent-clipped=2.0 +2024-08-29 06:36:48,218 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.31 vs. limit=15.0 +2024-08-29 06:36:49,859 INFO [train.py:1114] (2/4) Epoch 20, batch 1400, loss[loss=0.1482, simple_loss=0.2198, pruned_loss=0.0275, ctc_loss=0.05418, over 19673.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2586, pruned_loss=0.03868, ctc_loss=0.07212, over 3866412.50 frames. ], batch size: 46, lr: 7.47e-03, grad_scale: 16.0 +2024-08-29 06:36:52,058 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.97 vs. limit=10.0 +2024-08-29 06:36:54,141 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.95 vs. limit=22.5 +2024-08-29 06:36:59,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=259754.66666666666, ans=0.125 +2024-08-29 06:37:12,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=259754.66666666666, ans=0.04949747468305833 +2024-08-29 06:37:21,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=259808.0, ans=0.125 +2024-08-29 06:37:37,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.15 vs. limit=15.0 +2024-08-29 06:37:55,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=259914.66666666666, ans=0.5 +2024-08-29 06:38:00,929 INFO [train.py:1114] (2/4) Epoch 20, batch 1450, loss[loss=0.201, simple_loss=0.2758, pruned_loss=0.04567, ctc_loss=0.08744, over 19651.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2593, pruned_loss=0.03915, ctc_loss=0.07298, over 3863949.13 frames. ], batch size: 63, lr: 7.47e-03, grad_scale: 16.0 +2024-08-29 06:38:07,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=259968.0, ans=0.025 +2024-08-29 06:38:12,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260021.33333333334, ans=0.1 +2024-08-29 06:38:21,179 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.08 vs. limit=15.0 +2024-08-29 06:38:29,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=260074.66666666666, ans=0.125 +2024-08-29 06:38:32,451 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.80 vs. limit=15.0 +2024-08-29 06:38:48,142 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.231e+02 1.378e+02 1.589e+02 1.878e+02 3.405e+02, threshold=3.177e+02, percent-clipped=1.0 +2024-08-29 06:38:51,862 INFO [train.py:1114] (2/4) Epoch 20, batch 1500, loss[loss=0.176, simple_loss=0.2601, pruned_loss=0.03306, ctc_loss=0.0645, over 19614.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2594, pruned_loss=0.03902, ctc_loss=0.07278, over 3863395.95 frames. ], batch size: 57, lr: 7.47e-03, grad_scale: 16.0 +2024-08-29 06:38:52,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=260234.66666666666, ans=0.125 +2024-08-29 06:38:52,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260234.66666666666, ans=0.1 +2024-08-29 06:39:04,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=260288.0, ans=0.0 +2024-08-29 06:39:16,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=260341.33333333334, ans=0.0 +2024-08-29 06:39:42,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=260448.0, ans=0.0 +2024-08-29 06:40:24,665 INFO [train.py:1114] (2/4) Epoch 20, batch 1550, loss[loss=0.2042, simple_loss=0.2733, pruned_loss=0.0498, ctc_loss=0.08893, over 19609.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2589, pruned_loss=0.039, ctc_loss=0.07275, over 3847375.28 frames. ], batch size: 60, lr: 7.46e-03, grad_scale: 16.0 +2024-08-29 06:40:32,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260501.33333333334, ans=0.1 +2024-08-29 06:40:34,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=260554.66666666666, ans=10.0 +2024-08-29 06:40:40,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=260554.66666666666, ans=0.09899494936611666 +2024-08-29 06:41:09,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=260608.0, ans=0.09899494936611666 +2024-08-29 06:41:45,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=260714.66666666666, ans=0.125 +2024-08-29 06:41:48,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=260714.66666666666, ans=0.95 +2024-08-29 06:41:57,056 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.095e+02 1.471e+02 1.781e+02 2.261e+02 3.819e+02, threshold=3.562e+02, percent-clipped=6.0 +2024-08-29 06:42:30,552 INFO [train.py:1114] (2/4) Epoch 20, batch 1600, loss[loss=0.1891, simple_loss=0.2616, pruned_loss=0.04281, ctc_loss=0.07755, over 19838.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2584, pruned_loss=0.03891, ctc_loss=0.07259, over 3837212.03 frames. ], batch size: 57, lr: 7.46e-03, grad_scale: 32.0 +2024-08-29 06:42:32,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260768.0, ans=0.1 +2024-08-29 06:42:38,441 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.27 vs. limit=6.0 +2024-08-29 06:42:41,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=260821.33333333334, ans=0.2 +2024-08-29 06:42:52,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=260874.66666666666, ans=0.1 +2024-08-29 06:43:03,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=260928.0, ans=0.125 +2024-08-29 06:43:20,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=260981.33333333334, ans=0.125 +2024-08-29 06:43:21,817 INFO [train.py:1114] (2/4) Epoch 20, batch 1650, loss[loss=0.1829, simple_loss=0.2651, pruned_loss=0.03659, ctc_loss=0.0687, over 19667.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2582, pruned_loss=0.03903, ctc_loss=0.0727, over 3834348.45 frames. ], batch size: 59, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 06:43:40,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=261034.66666666666, ans=0.5 +2024-08-29 06:43:46,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=261088.0, ans=0.2 +2024-08-29 06:44:30,257 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.155e+02 1.539e+02 1.831e+02 2.496e+02 4.278e+02, threshold=3.663e+02, percent-clipped=6.0 +2024-08-29 06:44:30,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=261248.0, ans=0.0 +2024-08-29 06:44:32,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=261248.0, ans=0.0 +2024-08-29 06:44:33,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=261301.33333333334, ans=0.0 +2024-08-29 06:44:33,856 INFO [train.py:1114] (2/4) Epoch 20, batch 1700, loss[loss=0.1529, simple_loss=0.2224, pruned_loss=0.03024, ctc_loss=0.05721, over 19659.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2578, pruned_loss=0.03863, ctc_loss=0.07207, over 3848218.58 frames. ], batch size: 46, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 06:44:37,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=261301.33333333334, ans=0.1 +2024-08-29 06:44:38,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261301.33333333334, ans=0.1 +2024-08-29 06:44:56,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=261354.66666666666, ans=0.125 +2024-08-29 06:45:00,021 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.17 vs. limit=22.5 +2024-08-29 06:45:08,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=261461.33333333334, ans=0.5 +2024-08-29 06:45:16,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=261461.33333333334, ans=0.1 +2024-08-29 06:45:21,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=261461.33333333334, ans=0.125 +2024-08-29 06:45:36,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=261514.66666666666, ans=0.07 +2024-08-29 06:45:38,439 INFO [train.py:1114] (2/4) Epoch 20, batch 1750, loss[loss=0.1639, simple_loss=0.2342, pruned_loss=0.03402, ctc_loss=0.06409, over 19694.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2577, pruned_loss=0.03841, ctc_loss=0.0717, over 3853221.62 frames. ], batch size: 45, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 06:45:46,508 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.52 vs. limit=15.0 +2024-08-29 06:45:55,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=261621.33333333334, ans=0.125 +2024-08-29 06:46:16,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=261728.0, ans=0.2 +2024-08-29 06:46:16,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=261728.0, ans=0.125 +2024-08-29 06:46:33,064 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.489e+02 1.816e+02 2.382e+02 3.653e+02, threshold=3.632e+02, percent-clipped=0.0 +2024-08-29 06:46:36,581 INFO [train.py:1114] (2/4) Epoch 20, batch 1800, loss[loss=0.2023, simple_loss=0.2765, pruned_loss=0.0467, ctc_loss=0.0866, over 19607.00 frames. ], tot_loss[loss=0.182, simple_loss=0.258, pruned_loss=0.03862, ctc_loss=0.07208, over 3854716.77 frames. ], batch size: 55, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 06:46:41,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=261834.66666666666, ans=0.125 +2024-08-29 06:46:43,504 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.49 vs. limit=6.0 +2024-08-29 06:46:44,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=261834.66666666666, ans=0.0 +2024-08-29 06:46:56,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=261888.0, ans=0.0 +2024-08-29 06:46:56,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261888.0, ans=0.1 +2024-08-29 06:47:02,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=261941.33333333334, ans=0.05 +2024-08-29 06:47:21,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=261994.66666666666, ans=0.0 +2024-08-29 06:47:24,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=261994.66666666666, ans=0.0 +2024-08-29 06:47:36,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=262048.0, ans=0.125 +2024-08-29 06:47:36,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=262048.0, ans=0.1 +2024-08-29 06:47:36,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=262048.0, ans=0.2 +2024-08-29 06:47:43,624 INFO [train.py:1114] (2/4) Epoch 20, batch 1850, loss[loss=0.182, simple_loss=0.2647, pruned_loss=0.03578, ctc_loss=0.0695, over 19587.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2577, pruned_loss=0.03861, ctc_loss=0.07178, over 3858477.29 frames. ], batch size: 57, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 06:48:25,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.54 vs. limit=5.0 +2024-08-29 06:48:29,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=262261.3333333333, ans=0.1 +2024-08-29 06:48:29,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262261.3333333333, ans=0.1 +2024-08-29 06:48:40,481 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.496e+02 1.744e+02 2.237e+02 4.849e+02, threshold=3.488e+02, percent-clipped=3.0 +2024-08-29 06:48:46,713 INFO [train.py:1114] (2/4) Epoch 20, batch 1900, loss[loss=0.1931, simple_loss=0.2714, pruned_loss=0.04228, ctc_loss=0.0755, over 19647.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2578, pruned_loss=0.03855, ctc_loss=0.07174, over 3863146.02 frames. ], batch size: 59, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 06:48:53,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=262368.0, ans=0.0 +2024-08-29 06:48:56,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=262368.0, ans=0.5 +2024-08-29 06:49:20,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=262421.3333333333, ans=0.025 +2024-08-29 06:49:35,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=262474.6666666667, ans=0.0 +2024-08-29 06:49:39,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=262474.6666666667, ans=0.125 +2024-08-29 06:50:00,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=262581.3333333333, ans=0.125 +2024-08-29 06:50:02,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=262581.3333333333, ans=0.0 +2024-08-29 06:50:04,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=262581.3333333333, ans=0.2 +2024-08-29 06:50:07,328 INFO [train.py:1114] (2/4) Epoch 20, batch 1950, loss[loss=0.1704, simple_loss=0.2439, pruned_loss=0.03545, ctc_loss=0.06486, over 19589.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2586, pruned_loss=0.03858, ctc_loss=0.07169, over 3871529.26 frames. ], batch size: 52, lr: 7.43e-03, grad_scale: 32.0 +2024-08-29 06:50:18,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=262688.0, ans=0.09899494936611666 +2024-08-29 06:50:50,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=262741.3333333333, ans=0.125 +2024-08-29 06:50:52,909 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.21 vs. limit=12.0 +2024-08-29 06:50:58,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=262794.6666666667, ans=0.0 +2024-08-29 06:51:07,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=262848.0, ans=0.125 +2024-08-29 06:51:12,405 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.401e+02 1.548e+02 2.025e+02 3.566e+02, threshold=3.095e+02, percent-clipped=1.0 +2024-08-29 06:51:12,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262848.0, ans=0.1 +2024-08-29 06:51:14,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=262848.0, ans=0.125 +2024-08-29 06:51:15,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=262901.3333333333, ans=0.025 +2024-08-29 06:51:15,973 INFO [train.py:1114] (2/4) Epoch 20, batch 2000, loss[loss=0.1493, simple_loss=0.2227, pruned_loss=0.02784, ctc_loss=0.05021, over 19617.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2592, pruned_loss=0.03908, ctc_loss=0.0726, over 3855804.95 frames. ], batch size: 45, lr: 7.43e-03, grad_scale: 32.0 +2024-08-29 06:52:00,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=263114.6666666667, ans=0.125 +2024-08-29 06:52:04,138 INFO [train.py:1114] (2/4) Epoch 20, batch 2050, loss[loss=0.184, simple_loss=0.2502, pruned_loss=0.0437, ctc_loss=0.07587, over 19726.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2584, pruned_loss=0.03901, ctc_loss=0.07253, over 3851100.32 frames. ], batch size: 47, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 06:52:16,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=263168.0, ans=0.0 +2024-08-29 06:52:21,507 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.01 vs. limit=10.0 +2024-08-29 06:53:03,400 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.460e+02 1.739e+02 2.291e+02 5.164e+02, threshold=3.479e+02, percent-clipped=12.0 +2024-08-29 06:53:03,779 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.18 vs. limit=12.0 +2024-08-29 06:53:06,964 INFO [train.py:1114] (2/4) Epoch 20, batch 2100, loss[loss=0.1836, simple_loss=0.2559, pruned_loss=0.04109, ctc_loss=0.07265, over 19766.00 frames. ], tot_loss[loss=0.182, simple_loss=0.258, pruned_loss=0.03864, ctc_loss=0.07187, over 3857313.01 frames. ], batch size: 54, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 06:53:07,510 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.52 vs. limit=10.0 +2024-08-29 06:53:10,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=263434.6666666667, ans=0.125 +2024-08-29 06:54:01,636 INFO [train.py:1114] (2/4) Epoch 20, batch 2150, loss[loss=0.1789, simple_loss=0.2564, pruned_loss=0.03714, ctc_loss=0.06789, over 19843.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2573, pruned_loss=0.03832, ctc_loss=0.07135, over 3867882.17 frames. ], batch size: 52, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 06:54:11,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=263754.6666666667, ans=0.125 +2024-08-29 06:54:15,249 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:54:18,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=263754.6666666667, ans=0.125 +2024-08-29 06:54:33,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=263861.3333333333, ans=0.1 +2024-08-29 06:54:33,692 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.29 vs. limit=6.0 +2024-08-29 06:54:44,297 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.492e+02 1.849e+02 2.492e+02 5.041e+02, threshold=3.698e+02, percent-clipped=4.0 +2024-08-29 06:55:33,942 INFO [train.py:1114] (2/4) Epoch 20, batch 2200, loss[loss=0.1851, simple_loss=0.261, pruned_loss=0.03956, ctc_loss=0.07502, over 19596.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2574, pruned_loss=0.03829, ctc_loss=0.0713, over 3866356.65 frames. ], batch size: 57, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 06:55:39,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=263968.0, ans=0.0 +2024-08-29 06:55:41,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=263968.0, ans=0.07 +2024-08-29 06:55:56,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=264021.3333333333, ans=0.125 +2024-08-29 06:55:58,868 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.16 vs. limit=15.0 +2024-08-29 06:56:00,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=264021.3333333333, ans=0.125 +2024-08-29 06:56:07,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=264074.6666666667, ans=0.1 +2024-08-29 06:56:29,294 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.08 vs. limit=15.0 +2024-08-29 06:56:38,333 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.71 vs. limit=22.5 +2024-08-29 06:56:38,506 INFO [train.py:1114] (2/4) Epoch 20, batch 2250, loss[loss=0.185, simple_loss=0.2728, pruned_loss=0.03473, ctc_loss=0.06959, over 19614.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2585, pruned_loss=0.03858, ctc_loss=0.07192, over 3866128.11 frames. ], batch size: 55, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 06:56:41,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=264234.6666666667, ans=0.0 +2024-08-29 06:57:35,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=264394.6666666667, ans=0.0 +2024-08-29 06:57:58,416 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.422e+02 1.843e+02 2.549e+02 5.039e+02, threshold=3.686e+02, percent-clipped=5.0 +2024-08-29 06:57:58,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264448.0, ans=0.1 +2024-08-29 06:58:01,519 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.10 vs. limit=6.0 +2024-08-29 06:58:01,858 INFO [train.py:1114] (2/4) Epoch 20, batch 2300, loss[loss=0.1656, simple_loss=0.2475, pruned_loss=0.031, ctc_loss=0.05423, over 19497.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2582, pruned_loss=0.03885, ctc_loss=0.07236, over 3860959.48 frames. ], batch size: 49, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 06:58:02,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=264501.3333333333, ans=0.125 +2024-08-29 06:59:12,945 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.28 vs. limit=15.0 +2024-08-29 06:59:15,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=264608.0, ans=0.0 +2024-08-29 06:59:26,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=264661.3333333333, ans=0.125 +2024-08-29 06:59:28,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=264661.3333333333, ans=0.0 +2024-08-29 06:59:29,497 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.40 vs. limit=22.5 +2024-08-29 06:59:36,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264714.6666666667, ans=0.1 +2024-08-29 07:00:03,987 INFO [train.py:1114] (2/4) Epoch 20, batch 2350, loss[loss=0.1952, simple_loss=0.271, pruned_loss=0.04382, ctc_loss=0.07972, over 19642.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2586, pruned_loss=0.03907, ctc_loss=0.07266, over 3863166.37 frames. ], batch size: 63, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 07:00:04,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=264768.0, ans=0.125 +2024-08-29 07:04:00,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=264768.0, ans=0.125 +2024-08-29 07:13:35,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=264928.0, ans=0.025 +2024-08-29 07:14:01,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=264928.0, ans=0.125 +2024-08-29 07:14:14,768 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.93 vs. limit=22.5 +2024-08-29 07:16:19,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264981.3333333333, ans=0.1 +2024-08-29 07:20:35,436 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.441e+02 1.702e+02 2.205e+02 4.204e+02, threshold=3.404e+02, percent-clipped=1.0 +2024-08-29 07:22:42,552 INFO [train.py:1114] (2/4) Epoch 20, batch 2400, loss[loss=0.2122, simple_loss=0.2817, pruned_loss=0.05284, ctc_loss=0.09234, over 19315.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2606, pruned_loss=0.04001, ctc_loss=0.07416, over 3857478.20 frames. ], batch size: 71, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 07:23:06,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=265034.6666666667, ans=0.0 +2024-08-29 07:34:27,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=265088.0, ans=22.5 +2024-08-29 07:34:40,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265141.3333333333, ans=0.1 +2024-08-29 07:34:56,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=265141.3333333333, ans=0.125 +2024-08-29 07:36:31,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=265141.3333333333, ans=0.1 +2024-08-29 07:39:52,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=265194.6666666667, ans=0.0 +2024-08-29 07:50:39,240 INFO [train.py:1114] (2/4) Epoch 20, batch 2450, loss[loss=0.24, simple_loss=0.294, pruned_loss=0.06833, ctc_loss=0.1234, over 13610.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2637, pruned_loss=0.04215, ctc_loss=0.07868, over 3726544.93 frames. ], batch size: 140, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 07:51:15,774 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.78 vs. limit=15.0 +2024-08-29 07:57:27,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=265408.0, ans=0.0 +2024-08-29 07:58:58,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=265408.0, ans=0.125 +2024-08-29 08:06:23,787 INFO [train.py:1387] (2/4) Done! diff --git a/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-01-48-53-3 b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-01-48-53-3 new file mode 100644 index 0000000000000000000000000000000000000000..5ae113a0c1472c60c79e3debbda0939ba69ff5e5 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-01-48-53-3 @@ -0,0 +1,688 @@ +2024-08-29 01:48:53,993 INFO [train.py:1182] (3/4) Training started +2024-08-29 01:49:01,742 INFO [train.py:1192] (3/4) Device: cuda:3 +2024-08-29 01:49:01,745 INFO [train.py:1210] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2538.int.cedar.computecanada.ca', 'IP address': '172.16.145.231'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 19, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 01:49:01,745 INFO [train.py:1212] (3/4) About to create model +2024-08-29 01:49:02,438 INFO [train.py:1216] (3/4) Number of model parameters: 65805511 +2024-08-29 01:49:02,438 INFO [checkpoint.py:112] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-18.pt +2024-08-29 01:49:57,692 INFO [train.py:1231] (3/4) Using DDP +2024-08-29 01:52:46,802 INFO [train.py:1243] (3/4) Loading optimizer state dict +2024-08-29 01:52:46,965 INFO [train.py:1251] (3/4) Loading scheduler state dict +2024-08-29 01:52:46,965 INFO [asr_datamodule.py:894] (3/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 01:52:47,139 INFO [asr_datamodule.py:696] (3/4) Disable MUSAN +2024-08-29 01:52:47,140 INFO [asr_datamodule.py:714] (3/4) Enable SpecAugment +2024-08-29 01:52:47,140 INFO [asr_datamodule.py:715] (3/4) Time warp factor: 80 +2024-08-29 01:52:47,140 INFO [asr_datamodule.py:725] (3/4) Num frame mask: 10 +2024-08-29 01:52:47,140 INFO [asr_datamodule.py:738] (3/4) About to create train dataset +2024-08-29 01:52:47,140 INFO [asr_datamodule.py:765] (3/4) Using DynamicBucketingSampler. +2024-08-29 01:52:48,707 INFO [asr_datamodule.py:782] (3/4) About to create train dataloader +2024-08-29 01:52:48,708 INFO [asr_datamodule.py:911] (3/4) About to get dev-clean cuts +2024-08-29 01:54:40,097 INFO [asr_datamodule.py:918] (3/4) About to get dev-other cuts +2024-08-29 01:54:41,302 INFO [asr_datamodule.py:814] (3/4) About to create dev dataset +2024-08-29 01:54:41,619 INFO [asr_datamodule.py:831] (3/4) About to create dev dataloader +2024-08-29 01:54:41,620 INFO [train.py:1435] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 02:04:12,033 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=3.73 vs. limit=7.5 +2024-08-29 02:04:13,238 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12127MB +2024-08-29 02:04:14,348 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12127MB +2024-08-29 02:12:13,307 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12201MB +2024-08-29 02:12:14,561 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12201MB +2024-08-29 02:16:50,164 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12201MB +2024-08-29 02:16:51,486 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12201MB +2024-08-29 02:16:51,507 INFO [train.py:1344] (3/4) Loading grad scaler state dict +2024-08-29 02:18:18,027 INFO [train.py:1114] (3/4) Epoch 19, batch 0, loss[loss=0.1935, simple_loss=0.2602, pruned_loss=0.04659, ctc_loss=0.08412, over 19809.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2602, pruned_loss=0.04659, ctc_loss=0.08412, over 19809.00 frames. ], batch size: 49, lr: 7.99e-03, grad_scale: 32.0 +2024-08-29 02:18:18,028 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-29 02:19:56,203 INFO [train.py:1146] (3/4) Epoch 19, validation: loss=0.1709, simple_loss=0.2636, pruned_loss=0.02933, ctc_loss=0.04896, over 944034.00 frames. +2024-08-29 02:19:56,204 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12201MB +2024-08-29 02:21:47,758 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.96 vs. limit=22.5 +2024-08-29 02:37:10,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=239008.0, ans=0.125 +2024-08-29 02:38:47,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=239008.0, ans=0.015 +2024-08-29 02:58:01,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=239114.66666666666, ans=0.125 +2024-08-29 02:58:01,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239114.66666666666, ans=0.1 +2024-08-29 02:59:00,345 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.56 vs. limit=10.0 +2024-08-29 03:07:33,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=239168.0, ans=0.125 +2024-08-29 03:07:49,838 INFO [train.py:1114] (3/4) Epoch 19, batch 50, loss[loss=0.1626, simple_loss=0.2395, pruned_loss=0.03074, ctc_loss=0.06068, over 19714.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2635, pruned_loss=0.04204, ctc_loss=0.07864, over 844167.97 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 03:20:39,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=239328.0, ans=0.2 +2024-08-29 03:22:06,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239328.0, ans=0.1 +2024-08-29 03:22:18,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=239381.33333333334, ans=0.05 +2024-08-29 03:22:22,816 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.482e+02 1.734e+02 2.141e+02 3.301e+02, threshold=3.468e+02, percent-clipped=0.0 +2024-08-29 03:23:24,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=239381.33333333334, ans=10.0 +2024-08-29 03:37:40,965 INFO [train.py:1114] (3/4) Epoch 19, batch 100, loss[loss=0.1589, simple_loss=0.2369, pruned_loss=0.0295, ctc_loss=0.0545, over 19723.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2652, pruned_loss=0.04201, ctc_loss=0.07833, over 1498340.35 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 03:39:45,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=239488.0, ans=0.125 +2024-08-29 03:43:18,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=239488.0, ans=0.025 +2024-08-29 03:43:26,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=239541.33333333334, ans=0.125 +2024-08-29 03:45:20,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=239541.33333333334, ans=0.0 +2024-08-29 03:46:33,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239594.66666666666, ans=0.1 +2024-08-29 03:46:41,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=239594.66666666666, ans=0.125 +2024-08-29 03:49:21,973 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.92 vs. limit=10.0 +2024-08-29 03:52:11,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239648.0, ans=0.125 +2024-08-29 03:54:35,921 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.50 vs. limit=15.0 +2024-08-29 03:56:43,841 INFO [train.py:1114] (3/4) Epoch 19, batch 150, loss[loss=0.178, simple_loss=0.2469, pruned_loss=0.0396, ctc_loss=0.07482, over 19736.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2626, pruned_loss=0.04117, ctc_loss=0.07691, over 2027311.34 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 03:57:01,651 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.85 vs. limit=15.0 +2024-08-29 03:57:38,124 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.55 vs. limit=12.0 +2024-08-29 03:57:47,648 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.64 vs. limit=15.0 +2024-08-29 03:58:57,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239861.33333333334, ans=0.125 +2024-08-29 04:01:44,341 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.487e+02 1.911e+02 2.455e+02 3.758e+02, threshold=3.822e+02, percent-clipped=3.0 +2024-08-29 04:02:35,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239914.66666666666, ans=0.125 +2024-08-29 04:02:41,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=239914.66666666666, ans=0.0 +2024-08-29 04:05:47,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=239968.0, ans=0.125 +2024-08-29 04:05:47,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=239968.0, ans=0.125 +2024-08-29 04:08:44,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=239968.0, ans=0.125 +2024-08-29 04:10:04,544 INFO [train.py:1114] (3/4) Epoch 19, batch 200, loss[loss=0.1972, simple_loss=0.2718, pruned_loss=0.04447, ctc_loss=0.0841, over 18332.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2611, pruned_loss=0.04046, ctc_loss=0.07547, over 2435699.92 frames. ], batch size: 85, lr: 7.97e-03, grad_scale: 32.0 +2024-08-29 04:11:35,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=240021.33333333334, ans=0.125 +2024-08-29 04:13:03,097 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.15 vs. limit=15.0 +2024-08-29 04:20:19,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=240181.33333333334, ans=0.5 +2024-08-29 04:20:32,739 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240181.33333333334, ans=0.1 +2024-08-29 04:23:53,212 INFO [train.py:1114] (3/4) Epoch 19, batch 250, loss[loss=0.201, simple_loss=0.2728, pruned_loss=0.04716, ctc_loss=0.08722, over 19409.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2604, pruned_loss=0.03992, ctc_loss=0.07426, over 2756001.92 frames. ], batch size: 67, lr: 7.97e-03, grad_scale: 32.0 +2024-08-29 04:24:21,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240288.0, ans=0.1 +2024-08-29 04:24:25,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=240288.0, ans=0.2 +2024-08-29 04:26:20,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=240341.33333333334, ans=0.125 +2024-08-29 04:29:03,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240448.0, ans=0.1 +2024-08-29 04:29:04,212 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.187e+02 1.446e+02 1.716e+02 2.275e+02 4.235e+02, threshold=3.432e+02, percent-clipped=4.0 +2024-08-29 04:29:16,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=240448.0, ans=0.2 +2024-08-29 04:29:22,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240448.0, ans=0.1 +2024-08-29 04:29:28,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=240501.33333333334, ans=0.125 +2024-08-29 04:30:03,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=240501.33333333334, ans=0.0 +2024-08-29 04:30:04,041 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.08 vs. limit=15.0 +2024-08-29 04:30:04,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=240501.33333333334, ans=10.0 +2024-08-29 04:31:34,651 INFO [train.py:1114] (3/4) Epoch 19, batch 300, loss[loss=0.2064, simple_loss=0.2838, pruned_loss=0.0471, ctc_loss=0.08704, over 19526.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2598, pruned_loss=0.03972, ctc_loss=0.07387, over 3001126.27 frames. ], batch size: 61, lr: 7.96e-03, grad_scale: 32.0 +2024-08-29 04:32:05,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=240554.66666666666, ans=0.125 +2024-08-29 04:33:09,525 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.36 vs. limit=15.0 +2024-08-29 04:34:32,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=240661.33333333334, ans=0.0 +2024-08-29 04:34:41,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=240661.33333333334, ans=0.0 +2024-08-29 04:35:45,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=240714.66666666666, ans=0.0 +2024-08-29 04:38:10,578 INFO [train.py:1114] (3/4) Epoch 19, batch 350, loss[loss=0.156, simple_loss=0.2315, pruned_loss=0.0292, ctc_loss=0.05523, over 19758.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2606, pruned_loss=0.03995, ctc_loss=0.07447, over 3191307.89 frames. ], batch size: 48, lr: 7.96e-03, grad_scale: 32.0 +2024-08-29 04:38:11,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=240821.33333333334, ans=0.125 +2024-08-29 04:41:58,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=240928.0, ans=0.015 +2024-08-29 04:44:05,474 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.489e+02 1.897e+02 2.425e+02 4.045e+02, threshold=3.795e+02, percent-clipped=4.0 +2024-08-29 04:44:07,763 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.36 vs. limit=15.0 +2024-08-29 04:44:12,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=240981.33333333334, ans=0.125 +2024-08-29 04:44:12,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240981.33333333334, ans=0.1 +2024-08-29 04:44:18,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=241034.66666666666, ans=0.125 +2024-08-29 04:44:25,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=241034.66666666666, ans=0.125 +2024-08-29 04:44:26,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=241034.66666666666, ans=0.07 +2024-08-29 04:44:29,338 INFO [train.py:1114] (3/4) Epoch 19, batch 400, loss[loss=0.1794, simple_loss=0.2616, pruned_loss=0.03605, ctc_loss=0.06291, over 19488.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2601, pruned_loss=0.03978, ctc_loss=0.07426, over 3343080.18 frames. ], batch size: 54, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 04:44:31,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=241088.0, ans=0.125 +2024-08-29 04:45:18,079 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 04:45:33,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=241248.0, ans=0.125 +2024-08-29 04:46:24,123 INFO [train.py:1114] (3/4) Epoch 19, batch 450, loss[loss=0.1999, simple_loss=0.2802, pruned_loss=0.04343, ctc_loss=0.08176, over 19594.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2602, pruned_loss=0.03998, ctc_loss=0.07451, over 3450659.63 frames. ], batch size: 55, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 04:46:57,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=241408.0, ans=0.0 +2024-08-29 04:46:59,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.91 vs. limit=22.5 +2024-08-29 04:47:41,368 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.65 vs. limit=12.0 +2024-08-29 04:47:41,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=241461.33333333334, ans=0.0 +2024-08-29 04:47:45,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=241461.33333333334, ans=0.125 +2024-08-29 04:47:46,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=241461.33333333334, ans=0.0 +2024-08-29 04:47:49,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241461.33333333334, ans=0.1 +2024-08-29 04:47:52,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=241461.33333333334, ans=0.0 +2024-08-29 04:47:54,992 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.149e+02 1.394e+02 1.625e+02 2.143e+02 3.810e+02, threshold=3.251e+02, percent-clipped=1.0 +2024-08-29 04:48:25,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=241568.0, ans=0.0 +2024-08-29 04:49:55,367 INFO [train.py:1114] (3/4) Epoch 19, batch 500, loss[loss=0.1865, simple_loss=0.2625, pruned_loss=0.04038, ctc_loss=0.07402, over 19678.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2589, pruned_loss=0.03934, ctc_loss=0.07341, over 3546334.18 frames. ], batch size: 63, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 04:52:58,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=241728.0, ans=0.2 +2024-08-29 04:53:06,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=241728.0, ans=0.125 +2024-08-29 04:53:07,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=241728.0, ans=0.125 +2024-08-29 04:53:10,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=241781.33333333334, ans=0.0 +2024-08-29 04:54:05,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-29 04:54:11,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=241834.66666666666, ans=0.1 +2024-08-29 04:54:11,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-29 04:54:13,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=241834.66666666666, ans=0.0 +2024-08-29 04:54:15,400 INFO [train.py:1114] (3/4) Epoch 19, batch 550, loss[loss=0.2096, simple_loss=0.2812, pruned_loss=0.05052, ctc_loss=0.09268, over 19290.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2588, pruned_loss=0.03949, ctc_loss=0.07374, over 3608127.09 frames. ], batch size: 71, lr: 7.94e-03, grad_scale: 32.0 +2024-08-29 04:55:27,178 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 04:55:29,726 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.412e+02 1.703e+02 2.107e+02 3.697e+02, threshold=3.406e+02, percent-clipped=1.0 +2024-08-29 04:57:30,307 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.37 vs. limit=15.0 +2024-08-29 04:57:30,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=242048.0, ans=0.5 +2024-08-29 04:57:30,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=242048.0, ans=0.0 +2024-08-29 04:57:42,290 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.32 vs. limit=6.0 +2024-08-29 04:57:47,839 INFO [train.py:1114] (3/4) Epoch 19, batch 600, loss[loss=0.2074, simple_loss=0.2852, pruned_loss=0.04759, ctc_loss=0.08583, over 19361.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2598, pruned_loss=0.03968, ctc_loss=0.07401, over 3665674.33 frames. ], batch size: 67, lr: 7.94e-03, grad_scale: 32.0 +2024-08-29 04:58:36,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=242261.33333333334, ans=0.0 +2024-08-29 04:58:38,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=242261.33333333334, ans=0.125 +2024-08-29 04:58:52,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=242314.66666666666, ans=0.125 +2024-08-29 04:58:52,457 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.98 vs. limit=6.0 +2024-08-29 04:58:59,949 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.23 vs. limit=22.5 +2024-08-29 04:59:44,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=242368.0, ans=0.125 +2024-08-29 05:00:06,344 INFO [train.py:1114] (3/4) Epoch 19, batch 650, loss[loss=0.1775, simple_loss=0.2561, pruned_loss=0.03595, ctc_loss=0.06749, over 19775.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.259, pruned_loss=0.03934, ctc_loss=0.07356, over 3716489.95 frames. ], batch size: 54, lr: 7.93e-03, grad_scale: 32.0 +2024-08-29 05:00:27,433 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.52 vs. limit=22.5 +2024-08-29 05:00:59,057 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.521e+02 1.842e+02 2.430e+02 3.637e+02, threshold=3.684e+02, percent-clipped=5.0 +2024-08-29 05:01:54,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=242581.33333333334, ans=0.025 +2024-08-29 05:01:56,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=242634.66666666666, ans=0.0 +2024-08-29 05:02:33,318 INFO [train.py:1114] (3/4) Epoch 19, batch 700, loss[loss=0.1699, simple_loss=0.2462, pruned_loss=0.03421, ctc_loss=0.06319, over 19733.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2597, pruned_loss=0.0398, ctc_loss=0.07433, over 3749037.36 frames. ], batch size: 51, lr: 7.93e-03, grad_scale: 32.0 +2024-08-29 05:03:37,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242741.33333333334, ans=0.1 +2024-08-29 05:04:40,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=242848.0, ans=0.0 +2024-08-29 05:04:48,555 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:05:35,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=242901.33333333334, ans=0.125 +2024-08-29 05:05:46,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=242901.33333333334, ans=0.025 +2024-08-29 05:06:34,964 INFO [train.py:1114] (3/4) Epoch 19, batch 750, loss[loss=0.1748, simple_loss=0.2565, pruned_loss=0.03384, ctc_loss=0.06361, over 19495.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2593, pruned_loss=0.03976, ctc_loss=0.07439, over 3775690.54 frames. ], batch size: 54, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 05:06:36,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=242954.66666666666, ans=0.5 +2024-08-29 05:07:02,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243008.0, ans=0.1 +2024-08-29 05:07:18,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=243114.66666666666, ans=0.125 +2024-08-29 05:07:19,374 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.172e+02 1.493e+02 1.888e+02 2.375e+02 3.905e+02, threshold=3.776e+02, percent-clipped=3.0 +2024-08-29 05:07:30,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=243168.0, ans=0.125 +2024-08-29 05:08:09,973 INFO [train.py:1114] (3/4) Epoch 19, batch 800, loss[loss=0.1711, simple_loss=0.2435, pruned_loss=0.03594, ctc_loss=0.06716, over 19406.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2595, pruned_loss=0.03991, ctc_loss=0.07455, over 3797056.26 frames. ], batch size: 48, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 05:08:15,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243221.33333333334, ans=0.1 +2024-08-29 05:08:41,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=243221.33333333334, ans=0.0 +2024-08-29 05:08:44,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=243274.66666666666, ans=0.0 +2024-08-29 05:09:13,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=243274.66666666666, ans=0.2 +2024-08-29 05:09:17,934 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.64 vs. limit=15.0 +2024-08-29 05:09:26,990 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.35 vs. limit=15.0 +2024-08-29 05:09:34,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=243381.33333333334, ans=0.2 +2024-08-29 05:09:37,754 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.18 vs. limit=6.0 +2024-08-29 05:09:46,128 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.24 vs. limit=6.0 +2024-08-29 05:09:49,244 INFO [train.py:1114] (3/4) Epoch 19, batch 850, loss[loss=0.1892, simple_loss=0.2733, pruned_loss=0.03852, ctc_loss=0.07012, over 19632.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.259, pruned_loss=0.03959, ctc_loss=0.07392, over 3816610.56 frames. ], batch size: 59, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 05:09:51,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=243488.0, ans=0.125 +2024-08-29 05:10:12,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=243541.33333333334, ans=0.1 +2024-08-29 05:10:30,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=243648.0, ans=0.025 +2024-08-29 05:10:31,230 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.31 vs. limit=6.0 +2024-08-29 05:10:31,642 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.122e+02 1.428e+02 1.590e+02 2.047e+02 2.882e+02, threshold=3.180e+02, percent-clipped=0.0 +2024-08-29 05:10:31,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=243648.0, ans=0.125 +2024-08-29 05:10:39,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=243701.33333333334, ans=0.0 +2024-08-29 05:10:43,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=243701.33333333334, ans=0.125 +2024-08-29 05:10:43,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=243701.33333333334, ans=0.125 +2024-08-29 05:10:43,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=243701.33333333334, ans=0.0 +2024-08-29 05:10:49,652 INFO [train.py:1114] (3/4) Epoch 19, batch 900, loss[loss=0.1652, simple_loss=0.2401, pruned_loss=0.03295, ctc_loss=0.06077, over 19423.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2594, pruned_loss=0.04004, ctc_loss=0.07463, over 3820995.48 frames. ], batch size: 48, lr: 7.91e-03, grad_scale: 32.0 +2024-08-29 05:11:19,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=243914.66666666666, ans=0.07 +2024-08-29 05:11:30,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=243914.66666666666, ans=0.125 +2024-08-29 05:11:41,915 INFO [train.py:1114] (3/4) Epoch 19, batch 950, loss[loss=0.1796, simple_loss=0.2517, pruned_loss=0.03944, ctc_loss=0.07187, over 19517.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2598, pruned_loss=0.04015, ctc_loss=0.07495, over 3820732.71 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-29 05:11:43,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=244021.33333333334, ans=0.125 +2024-08-29 05:11:58,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=244074.66666666666, ans=0.2 +2024-08-29 05:12:00,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=244074.66666666666, ans=0.125 +2024-08-29 05:12:12,399 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.442e+02 1.730e+02 2.025e+02 3.837e+02, threshold=3.461e+02, percent-clipped=4.0 +2024-08-29 05:12:29,601 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.30 vs. limit=15.0 +2024-08-29 05:12:30,075 INFO [train.py:1114] (3/4) Epoch 19, batch 1000, loss[loss=0.1664, simple_loss=0.2444, pruned_loss=0.03233, ctc_loss=0.05961, over 19864.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2611, pruned_loss=0.04064, ctc_loss=0.07585, over 3816160.43 frames. ], batch size: 52, lr: 7.90e-03, grad_scale: 32.0 +2024-08-29 05:13:02,470 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-08-29 05:13:03,483 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.48 vs. limit=15.0 +2024-08-29 05:13:07,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=244448.0, ans=0.0 +2024-08-29 05:13:18,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.56 vs. limit=15.0 +2024-08-29 05:13:19,529 INFO [train.py:1114] (3/4) Epoch 19, batch 1050, loss[loss=0.1824, simple_loss=0.2642, pruned_loss=0.03691, ctc_loss=0.0671, over 19837.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2601, pruned_loss=0.04039, ctc_loss=0.07535, over 3823259.35 frames. ], batch size: 57, lr: 7.90e-03, grad_scale: 32.0 +2024-08-29 05:13:34,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=244608.0, ans=0.125 +2024-08-29 05:14:04,620 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.63 vs. limit=15.0 +2024-08-29 05:14:05,763 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.364e+02 1.577e+02 1.842e+02 2.540e+02, threshold=3.153e+02, percent-clipped=0.0 +2024-08-29 05:14:15,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=244768.0, ans=0.0 +2024-08-29 05:14:25,329 INFO [train.py:1114] (3/4) Epoch 19, batch 1100, loss[loss=0.1636, simple_loss=0.2445, pruned_loss=0.02975, ctc_loss=0.05812, over 19584.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2597, pruned_loss=0.04004, ctc_loss=0.07488, over 3831325.97 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-29 05:15:03,282 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.81 vs. limit=22.5 +2024-08-29 05:15:07,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=244981.33333333334, ans=0.0 +2024-08-29 05:15:33,879 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.74 vs. limit=15.0 +2024-08-29 05:16:51,069 INFO [train.py:1114] (3/4) Epoch 19, batch 1150, loss[loss=0.171, simple_loss=0.252, pruned_loss=0.03248, ctc_loss=0.06273, over 19576.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2594, pruned_loss=0.03985, ctc_loss=0.07449, over 3829831.16 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 32.0 +2024-08-29 05:17:13,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=245088.0, ans=0.2 +2024-08-29 05:18:08,424 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.413e+02 1.588e+02 2.044e+02 3.492e+02, threshold=3.177e+02, percent-clipped=5.0 +2024-08-29 05:19:06,893 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.13 vs. limit=15.0 +2024-08-29 05:19:08,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=245301.33333333334, ans=0.2 +2024-08-29 05:19:30,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=245301.33333333334, ans=0.0 +2024-08-29 05:19:41,715 INFO [train.py:1114] (3/4) Epoch 19, batch 1200, loss[loss=0.1819, simple_loss=0.2629, pruned_loss=0.03681, ctc_loss=0.06818, over 19836.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2604, pruned_loss=0.0402, ctc_loss=0.07507, over 3824867.73 frames. ], batch size: 57, lr: 7.89e-03, grad_scale: 32.0 +2024-08-29 05:20:08,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=245514.66666666666, ans=0.1 +2024-08-29 05:20:16,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=245514.66666666666, ans=0.04949747468305833 +2024-08-29 05:20:28,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=245568.0, ans=0.0 +2024-08-29 05:20:29,522 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.23 vs. limit=15.0 +2024-08-29 05:20:31,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=245568.0, ans=0.2 +2024-08-29 05:20:54,845 INFO [train.py:1114] (3/4) Epoch 19, batch 1250, loss[loss=0.207, simple_loss=0.2744, pruned_loss=0.05126, ctc_loss=0.09265, over 19549.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2608, pruned_loss=0.04021, ctc_loss=0.07493, over 3843427.90 frames. ], batch size: 61, lr: 7.88e-03, grad_scale: 32.0 +2024-08-29 05:21:33,939 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.60 vs. limit=10.0 +2024-08-29 05:21:47,056 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.448e+02 1.786e+02 2.342e+02 3.930e+02, threshold=3.573e+02, percent-clipped=1.0 +2024-08-29 05:22:02,752 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.10 vs. limit=15.0 +2024-08-29 05:22:27,719 INFO [train.py:1114] (3/4) Epoch 19, batch 1300, loss[loss=0.1969, simple_loss=0.2717, pruned_loss=0.04483, ctc_loss=0.08138, over 18812.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2597, pruned_loss=0.03975, ctc_loss=0.07417, over 3845729.62 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 32.0 +2024-08-29 05:22:35,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=245888.0, ans=0.04949747468305833 +2024-08-29 05:22:40,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=245941.33333333334, ans=0.125 +2024-08-29 05:23:00,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=245994.66666666666, ans=0.5 +2024-08-29 05:23:09,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=246048.0, ans=0.125 +2024-08-29 05:23:17,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246101.33333333334, ans=0.1 +2024-08-29 05:23:26,134 INFO [train.py:1114] (3/4) Epoch 19, batch 1350, loss[loss=0.162, simple_loss=0.243, pruned_loss=0.02922, ctc_loss=0.05647, over 19764.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2594, pruned_loss=0.03945, ctc_loss=0.07355, over 3857959.38 frames. ], batch size: 54, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 05:23:26,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=246154.66666666666, ans=0.125 +2024-08-29 05:23:31,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246154.66666666666, ans=0.1 +2024-08-29 05:23:41,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=246208.0, ans=0.125 +2024-08-29 05:23:59,064 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.158e+02 1.393e+02 1.600e+02 2.060e+02 3.630e+02, threshold=3.201e+02, percent-clipped=1.0 +2024-08-29 05:24:31,013 INFO [train.py:1114] (3/4) Epoch 19, batch 1400, loss[loss=0.1462, simple_loss=0.2202, pruned_loss=0.02583, ctc_loss=0.05111, over 19666.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2588, pruned_loss=0.03926, ctc_loss=0.07318, over 3865333.02 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 05:25:00,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=246528.0, ans=0.0 +2024-08-29 05:25:07,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=246581.33333333334, ans=0.0 +2024-08-29 05:25:13,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=246581.33333333334, ans=0.0 +2024-08-29 05:25:23,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=246634.66666666666, ans=0.125 +2024-08-29 05:25:26,812 INFO [train.py:1114] (3/4) Epoch 19, batch 1450, loss[loss=0.1998, simple_loss=0.2726, pruned_loss=0.04664, ctc_loss=0.08413, over 19675.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2595, pruned_loss=0.03957, ctc_loss=0.07386, over 3863411.14 frames. ], batch size: 63, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 05:25:30,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=246688.0, ans=0.025 +2024-08-29 05:25:45,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=246741.33333333334, ans=0.125 +2024-08-29 05:25:57,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=246741.33333333334, ans=0.125 +2024-08-29 05:26:21,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=246848.0, ans=0.125 +2024-08-29 05:26:22,443 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.187e+02 1.397e+02 1.549e+02 1.935e+02 4.281e+02, threshold=3.099e+02, percent-clipped=1.0 +2024-08-29 05:26:22,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=246848.0, ans=0.2 +2024-08-29 05:26:48,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=246901.33333333334, ans=0.125 +2024-08-29 05:26:50,099 INFO [train.py:1114] (3/4) Epoch 19, batch 1500, loss[loss=0.2088, simple_loss=0.2885, pruned_loss=0.04715, ctc_loss=0.08726, over 19559.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2599, pruned_loss=0.0396, ctc_loss=0.07408, over 3862802.71 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 32.0 +2024-08-29 05:27:10,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=246954.66666666666, ans=0.125 +2024-08-29 05:27:49,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=247114.66666666666, ans=0.07 +2024-08-29 05:28:02,444 INFO [train.py:1114] (3/4) Epoch 19, batch 1550, loss[loss=0.2179, simple_loss=0.2938, pruned_loss=0.05114, ctc_loss=0.09899, over 19587.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.26, pruned_loss=0.03966, ctc_loss=0.07428, over 3847731.08 frames. ], batch size: 60, lr: 7.86e-03, grad_scale: 32.0 +2024-08-29 05:28:03,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=247221.33333333334, ans=0.125 +2024-08-29 05:28:28,355 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.38 vs. limit=22.5 +2024-08-29 05:28:35,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=247328.0, ans=0.0 +2024-08-29 05:28:40,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=247381.33333333334, ans=0.2 +2024-08-29 05:28:45,732 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.405e+02 1.646e+02 2.216e+02 3.789e+02, threshold=3.291e+02, percent-clipped=3.0 +2024-08-29 05:28:49,414 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.46 vs. limit=15.0 +2024-08-29 05:30:04,277 INFO [train.py:1114] (3/4) Epoch 19, batch 1600, loss[loss=0.186, simple_loss=0.268, pruned_loss=0.03811, ctc_loss=0.06956, over 19839.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.26, pruned_loss=0.03981, ctc_loss=0.07451, over 3836938.66 frames. ], batch size: 57, lr: 7.85e-03, grad_scale: 32.0 +2024-08-29 05:30:05,685 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.02 vs. limit=15.0 +2024-08-29 05:30:11,270 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.91 vs. limit=15.0 +2024-08-29 05:30:11,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=247488.0, ans=0.0 +2024-08-29 05:30:25,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=247541.33333333334, ans=0.125 +2024-08-29 05:30:31,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=247594.66666666666, ans=0.125 +2024-08-29 05:30:46,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=247594.66666666666, ans=0.125 +2024-08-29 05:30:47,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=247594.66666666666, ans=0.125 +2024-08-29 05:30:47,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=247594.66666666666, ans=0.0 +2024-08-29 05:31:04,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=247701.33333333334, ans=0.5 +2024-08-29 05:31:08,524 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.04 vs. limit=15.0 +2024-08-29 05:31:12,550 INFO [train.py:1114] (3/4) Epoch 19, batch 1650, loss[loss=0.1829, simple_loss=0.2693, pruned_loss=0.03532, ctc_loss=0.06466, over 19648.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2595, pruned_loss=0.03976, ctc_loss=0.07432, over 3833338.69 frames. ], batch size: 59, lr: 7.85e-03, grad_scale: 32.0 +2024-08-29 05:31:48,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=247808.0, ans=0.125 +2024-08-29 05:32:30,012 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.177e+02 1.667e+02 2.011e+02 2.433e+02 4.037e+02, threshold=4.021e+02, percent-clipped=5.0 +2024-08-29 05:32:37,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=247914.66666666666, ans=0.09899494936611666 +2024-08-29 05:33:02,642 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.44 vs. limit=12.0 +2024-08-29 05:33:13,437 INFO [train.py:1114] (3/4) Epoch 19, batch 1700, loss[loss=0.1526, simple_loss=0.2277, pruned_loss=0.02773, ctc_loss=0.05494, over 19679.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2593, pruned_loss=0.03936, ctc_loss=0.07356, over 3847403.35 frames. ], batch size: 46, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 05:33:13,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=248021.33333333334, ans=0.2 +2024-08-29 05:33:14,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248021.33333333334, ans=0.1 +2024-08-29 05:33:21,065 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.53 vs. limit=15.0 +2024-08-29 05:33:24,562 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.29 vs. limit=15.0 +2024-08-29 05:33:36,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=248128.0, ans=0.125 +2024-08-29 05:33:37,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=248128.0, ans=0.2 +2024-08-29 05:33:40,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=248181.33333333334, ans=0.0 +2024-08-29 05:33:50,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=248234.66666666666, ans=0.125 +2024-08-29 05:34:40,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=248234.66666666666, ans=0.125 +2024-08-29 05:34:40,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248234.66666666666, ans=0.1 +2024-08-29 05:34:41,861 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=6.83 vs. limit=15.0 +2024-08-29 05:34:43,943 INFO [train.py:1114] (3/4) Epoch 19, batch 1750, loss[loss=0.1729, simple_loss=0.2421, pruned_loss=0.03809, ctc_loss=0.06887, over 19628.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.259, pruned_loss=0.0393, ctc_loss=0.07348, over 3851796.27 frames. ], batch size: 45, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 05:34:57,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248341.33333333334, ans=0.1 +2024-08-29 05:35:09,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=248448.0, ans=0.125 +2024-08-29 05:35:11,460 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.174e+02 1.518e+02 1.916e+02 2.294e+02 3.621e+02, threshold=3.832e+02, percent-clipped=0.0 +2024-08-29 05:35:25,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=248501.33333333334, ans=0.125 +2024-08-29 05:35:27,423 INFO [train.py:1114] (3/4) Epoch 19, batch 1800, loss[loss=0.1924, simple_loss=0.2743, pruned_loss=0.04075, ctc_loss=0.07244, over 19609.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2596, pruned_loss=0.03945, ctc_loss=0.07362, over 3853062.75 frames. ], batch size: 55, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 05:35:31,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=248554.66666666666, ans=0.0 +2024-08-29 05:35:37,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=248608.0, ans=0.125 +2024-08-29 05:35:39,944 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.81 vs. limit=22.5 +2024-08-29 05:35:45,148 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.41 vs. limit=15.0 +2024-08-29 05:36:10,856 INFO [train.py:1114] (3/4) Epoch 19, batch 1850, loss[loss=0.2055, simple_loss=0.2823, pruned_loss=0.0462, ctc_loss=0.09102, over 19589.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2592, pruned_loss=0.03936, ctc_loss=0.0734, over 3856845.91 frames. ], batch size: 57, lr: 7.83e-03, grad_scale: 32.0 +2024-08-29 05:36:14,710 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.99 vs. limit=15.0 +2024-08-29 05:37:01,489 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.61 vs. limit=15.0 +2024-08-29 05:37:02,130 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.46 vs. limit=22.5 +2024-08-29 05:37:10,930 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.97 vs. limit=22.5 +2024-08-29 05:37:51,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=248874.66666666666, ans=0.0 +2024-08-29 05:37:51,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=248874.66666666666, ans=0.0 +2024-08-29 05:38:04,257 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.550e+02 2.027e+02 2.927e+02 4.792e+02, threshold=4.055e+02, percent-clipped=10.0 +2024-08-29 05:38:16,126 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.64 vs. limit=6.0 +2024-08-29 05:38:23,700 INFO [train.py:1114] (3/4) Epoch 19, batch 1900, loss[loss=0.1725, simple_loss=0.2507, pruned_loss=0.03359, ctc_loss=0.06796, over 19656.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2598, pruned_loss=0.03965, ctc_loss=0.0739, over 3862137.32 frames. ], batch size: 59, lr: 7.83e-03, grad_scale: 32.0 +2024-08-29 05:38:32,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=249088.0, ans=0.125 +2024-08-29 05:39:16,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=249194.66666666666, ans=0.0 +2024-08-29 05:39:36,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=249301.33333333334, ans=0.125 +2024-08-29 05:39:41,146 INFO [train.py:1114] (3/4) Epoch 19, batch 1950, loss[loss=0.1688, simple_loss=0.2375, pruned_loss=0.03647, ctc_loss=0.06818, over 19588.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2606, pruned_loss=0.03988, ctc_loss=0.07445, over 3871034.46 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 05:39:57,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=249354.66666666666, ans=0.125 +2024-08-29 05:40:02,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=249354.66666666666, ans=0.125 +2024-08-29 05:40:23,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=249461.33333333334, ans=0.2 +2024-08-29 05:40:37,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=249461.33333333334, ans=0.0 +2024-08-29 05:40:45,095 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.04 vs. limit=15.0 +2024-08-29 05:40:45,437 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.357e+02 1.563e+02 1.867e+02 4.467e+02, threshold=3.126e+02, percent-clipped=1.0 +2024-08-29 05:41:02,864 INFO [train.py:1114] (3/4) Epoch 19, batch 2000, loss[loss=0.1545, simple_loss=0.2249, pruned_loss=0.03115, ctc_loss=0.05449, over 19668.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2612, pruned_loss=0.04025, ctc_loss=0.07519, over 3856443.69 frames. ], batch size: 45, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 05:41:22,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=249621.33333333334, ans=0.125 +2024-08-29 05:41:39,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=249728.0, ans=0.125 +2024-08-29 05:42:20,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249728.0, ans=0.1 +2024-08-29 05:42:21,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=249728.0, ans=0.125 +2024-08-29 05:42:32,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=249781.33333333334, ans=0.125 +2024-08-29 05:42:36,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.60 vs. limit=15.0 +2024-08-29 05:42:37,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=249834.66666666666, ans=0.125 +2024-08-29 05:42:46,622 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.04 vs. limit=10.0 +2024-08-29 05:42:58,866 INFO [train.py:1114] (3/4) Epoch 19, batch 2050, loss[loss=0.1744, simple_loss=0.2449, pruned_loss=0.03789, ctc_loss=0.07041, over 19732.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2604, pruned_loss=0.04026, ctc_loss=0.07518, over 3853661.78 frames. ], batch size: 47, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 05:43:00,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=249888.0, ans=0.05 +2024-08-29 05:43:19,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=249994.66666666666, ans=0.0 +2024-08-29 05:43:21,123 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.23 vs. limit=6.0 +2024-08-29 05:43:26,427 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.469e+02 1.713e+02 2.068e+02 3.370e+02, threshold=3.427e+02, percent-clipped=2.0 +2024-08-29 05:43:41,721 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.61 vs. limit=15.0 +2024-08-29 05:43:41,976 INFO [train.py:1114] (3/4) Epoch 19, batch 2100, loss[loss=0.2, simple_loss=0.27, pruned_loss=0.04717, ctc_loss=0.08932, over 19771.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2597, pruned_loss=0.03972, ctc_loss=0.07435, over 3860442.32 frames. ], batch size: 54, lr: 7.81e-03, grad_scale: 32.0 +2024-08-29 05:44:12,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250208.0, ans=0.1 +2024-08-29 05:44:52,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250314.66666666666, ans=0.1 +2024-08-29 05:44:57,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=250314.66666666666, ans=0.0 +2024-08-29 05:45:08,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=250368.0, ans=0.125 +2024-08-29 05:45:09,913 INFO [train.py:1114] (3/4) Epoch 19, batch 2150, loss[loss=0.1759, simple_loss=0.2515, pruned_loss=0.03701, ctc_loss=0.06559, over 19850.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2591, pruned_loss=0.0396, ctc_loss=0.07395, over 3871247.27 frames. ], batch size: 52, lr: 7.81e-03, grad_scale: 32.0 +2024-08-29 05:45:15,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=250421.33333333334, ans=0.0 +2024-08-29 05:45:17,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=250474.66666666666, ans=0.0 +2024-08-29 05:45:18,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=250474.66666666666, ans=0.0 +2024-08-29 05:45:28,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=250528.0, ans=0.125 +2024-08-29 05:45:31,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=250528.0, ans=0.5 +2024-08-29 05:45:44,556 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.742e+01 1.532e+02 1.812e+02 2.283e+02 4.768e+02, threshold=3.624e+02, percent-clipped=7.0 +2024-08-29 05:46:18,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250634.66666666666, ans=0.1 +2024-08-29 05:46:31,676 INFO [train.py:1114] (3/4) Epoch 19, batch 2200, loss[loss=0.1821, simple_loss=0.2585, pruned_loss=0.03818, ctc_loss=0.07315, over 19589.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2589, pruned_loss=0.03936, ctc_loss=0.07337, over 3869966.80 frames. ], batch size: 57, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 05:47:31,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=250794.66666666666, ans=0.0 +2024-08-29 05:48:02,147 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.44 vs. limit=15.0 +2024-08-29 05:48:12,305 INFO [train.py:1114] (3/4) Epoch 19, batch 2250, loss[loss=0.2096, simple_loss=0.2884, pruned_loss=0.04773, ctc_loss=0.08848, over 19610.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2594, pruned_loss=0.03951, ctc_loss=0.0736, over 3869114.50 frames. ], batch size: 55, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 05:48:34,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251061.33333333334, ans=0.1 +2024-08-29 05:48:35,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=251061.33333333334, ans=0.125 +2024-08-29 05:48:35,576 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.09 vs. limit=22.5 +2024-08-29 05:48:39,383 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.496e+02 1.836e+02 2.405e+02 3.916e+02, threshold=3.673e+02, percent-clipped=1.0 +2024-08-29 05:49:01,361 INFO [train.py:1114] (3/4) Epoch 19, batch 2300, loss[loss=0.1745, simple_loss=0.2469, pruned_loss=0.03743, ctc_loss=0.06816, over 19483.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2585, pruned_loss=0.03942, ctc_loss=0.07347, over 3863046.08 frames. ], batch size: 49, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 05:49:05,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=251221.33333333334, ans=0.125 +2024-08-29 05:50:31,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=251274.66666666666, ans=0.0 +2024-08-29 05:51:12,208 INFO [train.py:1114] (3/4) Epoch 19, batch 2350, loss[loss=0.2006, simple_loss=0.2756, pruned_loss=0.04513, ctc_loss=0.08838, over 19682.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.259, pruned_loss=0.03959, ctc_loss=0.07382, over 3865447.04 frames. ], batch size: 63, lr: 7.79e-03, grad_scale: 32.0 +2024-08-29 05:51:16,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=251488.0, ans=0.0 +2024-08-29 05:51:38,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=251594.66666666666, ans=0.125 +2024-08-29 05:51:44,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=251648.0, ans=0.125 +2024-08-29 05:51:50,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=251648.0, ans=0.1 +2024-08-29 05:51:51,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=251648.0, ans=0.0 +2024-08-29 05:51:52,857 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.480e+02 1.867e+02 2.502e+02 4.275e+02, threshold=3.733e+02, percent-clipped=4.0 +2024-08-29 05:51:54,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251648.0, ans=0.1 +2024-08-29 05:52:13,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=251701.33333333334, ans=0.125 +2024-08-29 05:52:15,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=251701.33333333334, ans=0.125 +2024-08-29 05:52:19,091 INFO [train.py:1114] (3/4) Epoch 19, batch 2400, loss[loss=0.2072, simple_loss=0.2783, pruned_loss=0.05012, ctc_loss=0.0898, over 19361.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2613, pruned_loss=0.04053, ctc_loss=0.07538, over 3858986.01 frames. ], batch size: 71, lr: 7.79e-03, grad_scale: 32.0 +2024-08-29 05:52:20,532 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.88 vs. limit=15.0 +2024-08-29 05:53:04,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=251808.0, ans=0.125 +2024-08-29 05:53:20,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=251861.33333333334, ans=0.125 +2024-08-29 05:53:35,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=251861.33333333334, ans=0.125 +2024-08-29 05:53:55,385 INFO [train.py:1114] (3/4) Epoch 19, batch 2450, loss[loss=0.2481, simple_loss=0.2931, pruned_loss=0.07208, ctc_loss=0.1475, over 12773.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2647, pruned_loss=0.04281, ctc_loss=0.0798, over 3730665.26 frames. ], batch size: 141, lr: 7.78e-03, grad_scale: 32.0 +2024-08-29 05:54:09,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252074.66666666666, ans=0.1 +2024-08-29 05:54:23,269 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.449e+02 1.688e+02 1.808e+02 3.489e+02, threshold=3.376e+02, percent-clipped=0.0 +2024-08-29 05:55:27,186 INFO [train.py:1114] (3/4) Epoch 20, batch 0, loss[loss=0.1712, simple_loss=0.2478, pruned_loss=0.03408, ctc_loss=0.06615, over 19388.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2478, pruned_loss=0.03408, ctc_loss=0.06615, over 19388.00 frames. ], batch size: 48, lr: 7.58e-03, grad_scale: 32.0 +2024-08-29 05:55:27,186 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-29 05:55:50,379 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.9830, 3.6963, 3.5014, 3.5343], device='cuda:3') +2024-08-29 05:55:55,984 INFO [train.py:1146] (3/4) Epoch 20, validation: loss=0.1713, simple_loss=0.2633, pruned_loss=0.0297, ctc_loss=0.04995, over 944034.00 frames. +2024-08-29 05:55:55,987 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12786MB +2024-08-29 05:56:06,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252288.0, ans=0.1 +2024-08-29 05:56:20,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=252288.0, ans=0.2 +2024-08-29 05:56:26,213 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.21 vs. limit=15.0 +2024-08-29 05:56:42,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252394.66666666666, ans=0.1 +2024-08-29 05:56:51,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=252448.0, ans=0.125 +2024-08-29 05:56:53,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff2.min_abs, batch_count=252448.0, ans=0.1 +2024-08-29 05:57:36,253 INFO [train.py:1114] (3/4) Epoch 20, batch 50, loss[loss=0.1664, simple_loss=0.2415, pruned_loss=0.03333, ctc_loss=0.06186, over 19735.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2608, pruned_loss=0.03964, ctc_loss=0.07543, over 843375.14 frames. ], batch size: 47, lr: 7.58e-03, grad_scale: 16.0 +2024-08-29 05:58:11,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=252608.0, ans=10.0 +2024-08-29 05:58:20,309 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.77 vs. limit=22.5 +2024-08-29 05:58:33,022 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.14 vs. limit=10.0 +2024-08-29 05:58:33,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=252661.33333333334, ans=0.125 +2024-08-29 05:58:38,106 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.170e+02 1.450e+02 1.693e+02 1.984e+02 3.027e+02, threshold=3.386e+02, percent-clipped=0.0 +2024-08-29 05:58:48,713 INFO [train.py:1114] (3/4) Epoch 20, batch 100, loss[loss=0.1648, simple_loss=0.2461, pruned_loss=0.02939, ctc_loss=0.06179, over 19726.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2626, pruned_loss=0.04007, ctc_loss=0.0754, over 1497870.57 frames. ], batch size: 51, lr: 7.57e-03, grad_scale: 16.0 +2024-08-29 05:58:48,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=252768.0, ans=0.125 +2024-08-29 05:58:58,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=252821.33333333334, ans=0.09899494936611666 +2024-08-29 05:59:04,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=252821.33333333334, ans=0.125 +2024-08-29 05:59:19,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=252928.0, ans=0.2 +2024-08-29 05:59:23,001 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 05:59:28,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252928.0, ans=0.1 +2024-08-29 05:59:31,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=252981.33333333334, ans=0.09899494936611666 +2024-08-29 05:59:59,551 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:00:02,039 INFO [train.py:1114] (3/4) Epoch 20, batch 150, loss[loss=0.1578, simple_loss=0.2284, pruned_loss=0.03135, ctc_loss=0.06137, over 19717.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2602, pruned_loss=0.03926, ctc_loss=0.07401, over 2027723.87 frames. ], batch size: 47, lr: 7.57e-03, grad_scale: 16.0 +2024-08-29 06:00:02,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=253034.66666666666, ans=0.0 +2024-08-29 06:00:43,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=253088.0, ans=0.125 +2024-08-29 06:00:55,948 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.60 vs. limit=12.0 +2024-08-29 06:01:10,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=253194.66666666666, ans=0.0 +2024-08-29 06:01:14,703 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:01:19,566 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.374e+02 1.536e+02 1.873e+02 3.368e+02, threshold=3.073e+02, percent-clipped=0.0 +2024-08-29 06:01:25,918 INFO [train.py:1114] (3/4) Epoch 20, batch 200, loss[loss=0.1902, simple_loss=0.2659, pruned_loss=0.04153, ctc_loss=0.07842, over 18525.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2589, pruned_loss=0.03883, ctc_loss=0.07282, over 2435784.70 frames. ], batch size: 85, lr: 7.56e-03, grad_scale: 16.0 +2024-08-29 06:02:02,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=253408.0, ans=0.0 +2024-08-29 06:02:35,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253408.0, ans=0.1 +2024-08-29 06:03:12,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=253408.0, ans=0.125 +2024-08-29 06:03:15,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=253461.33333333334, ans=0.125 +2024-08-29 06:03:22,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=253461.33333333334, ans=0.0 +2024-08-29 06:03:25,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253514.66666666666, ans=0.1 +2024-08-29 06:03:48,786 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.62 vs. limit=10.0 +2024-08-29 06:03:51,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=253568.0, ans=0.0 +2024-08-29 06:03:51,883 INFO [train.py:1114] (3/4) Epoch 20, batch 250, loss[loss=0.1977, simple_loss=0.2767, pruned_loss=0.04326, ctc_loss=0.08055, over 19355.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.258, pruned_loss=0.03842, ctc_loss=0.0719, over 2756034.48 frames. ], batch size: 67, lr: 7.56e-03, grad_scale: 16.0 +2024-08-29 06:04:43,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=253674.66666666666, ans=0.125 +2024-08-29 06:04:46,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=253674.66666666666, ans=0.1 +2024-08-29 06:04:46,508 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.69 vs. limit=15.0 +2024-08-29 06:04:50,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=253674.66666666666, ans=0.125 +2024-08-29 06:05:13,803 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.460e+02 1.674e+02 2.079e+02 4.615e+02, threshold=3.347e+02, percent-clipped=6.0 +2024-08-29 06:05:58,153 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.89 vs. limit=15.0 +2024-08-29 06:06:03,009 INFO [train.py:1114] (3/4) Epoch 20, batch 300, loss[loss=0.199, simple_loss=0.2789, pruned_loss=0.04324, ctc_loss=0.08157, over 19518.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2582, pruned_loss=0.0384, ctc_loss=0.07192, over 3000612.69 frames. ], batch size: 61, lr: 7.56e-03, grad_scale: 16.0 +2024-08-29 06:06:14,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=253888.0, ans=0.125 +2024-08-29 06:06:34,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=253994.66666666666, ans=0.0 +2024-08-29 06:07:06,198 INFO [train.py:1114] (3/4) Epoch 20, batch 350, loss[loss=0.1707, simple_loss=0.2366, pruned_loss=0.03846, ctc_loss=0.06974, over 19756.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2588, pruned_loss=0.03869, ctc_loss=0.07252, over 3189836.52 frames. ], batch size: 48, lr: 7.55e-03, grad_scale: 16.0 +2024-08-29 06:07:19,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=254154.66666666666, ans=0.125 +2024-08-29 06:07:23,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=254154.66666666666, ans=0.125 +2024-08-29 06:07:27,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=254208.0, ans=0.125 +2024-08-29 06:07:30,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=254208.0, ans=0.125 +2024-08-29 06:07:31,699 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.42 vs. limit=6.0 +2024-08-29 06:07:50,374 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.501e+02 1.796e+02 2.192e+02 4.069e+02, threshold=3.593e+02, percent-clipped=5.0 +2024-08-29 06:07:58,761 INFO [train.py:1114] (3/4) Epoch 20, batch 400, loss[loss=0.1686, simple_loss=0.2503, pruned_loss=0.031, ctc_loss=0.06197, over 19496.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2582, pruned_loss=0.03841, ctc_loss=0.07205, over 3341479.33 frames. ], batch size: 54, lr: 7.55e-03, grad_scale: 32.0 +2024-08-29 06:08:09,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=254421.33333333334, ans=0.2 +2024-08-29 06:08:09,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=254421.33333333334, ans=0.125 +2024-08-29 06:08:18,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=254474.66666666666, ans=0.05 +2024-08-29 06:08:48,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=254581.33333333334, ans=0.125 +2024-08-29 06:08:50,890 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.87 vs. limit=15.0 +2024-08-29 06:08:51,107 INFO [train.py:1114] (3/4) Epoch 20, batch 450, loss[loss=0.1814, simple_loss=0.2649, pruned_loss=0.0352, ctc_loss=0.06888, over 19610.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2581, pruned_loss=0.0384, ctc_loss=0.07206, over 3448990.92 frames. ], batch size: 55, lr: 7.55e-03, grad_scale: 16.0 +2024-08-29 06:09:15,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=254634.66666666666, ans=0.125 +2024-08-29 06:09:15,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=254634.66666666666, ans=0.0 +2024-08-29 06:09:16,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=254634.66666666666, ans=0.125 +2024-08-29 06:09:33,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=254688.0, ans=0.2 +2024-08-29 06:09:59,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=254741.33333333334, ans=0.125 +2024-08-29 06:10:12,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff2.min_abs, batch_count=254741.33333333334, ans=0.1 +2024-08-29 06:10:19,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=254794.66666666666, ans=0.2 +2024-08-29 06:10:30,937 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.420e+02 1.652e+02 2.202e+02 3.176e+02, threshold=3.303e+02, percent-clipped=0.0 +2024-08-29 06:10:51,257 INFO [train.py:1114] (3/4) Epoch 20, batch 500, loss[loss=0.2013, simple_loss=0.285, pruned_loss=0.04278, ctc_loss=0.07997, over 19642.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.257, pruned_loss=0.03798, ctc_loss=0.07127, over 3545247.99 frames. ], batch size: 63, lr: 7.54e-03, grad_scale: 8.0 +2024-08-29 06:11:11,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=255008.0, ans=0.125 +2024-08-29 06:11:52,142 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.97 vs. limit=15.0 +2024-08-29 06:11:58,825 INFO [train.py:1114] (3/4) Epoch 20, batch 550, loss[loss=0.2108, simple_loss=0.2813, pruned_loss=0.05162, ctc_loss=0.09278, over 19211.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2571, pruned_loss=0.03822, ctc_loss=0.07164, over 3606801.94 frames. ], batch size: 71, lr: 7.54e-03, grad_scale: 8.0 +2024-08-29 06:12:30,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255274.66666666666, ans=0.1 +2024-08-29 06:12:30,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=255274.66666666666, ans=0.125 +2024-08-29 06:12:32,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=255328.0, ans=0.125 +2024-08-29 06:13:03,360 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.429e+02 1.650e+02 1.993e+02 3.679e+02, threshold=3.299e+02, percent-clipped=2.0 +2024-08-29 06:13:09,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=255381.33333333334, ans=10.0 +2024-08-29 06:13:12,591 INFO [train.py:1114] (3/4) Epoch 20, batch 600, loss[loss=0.1987, simple_loss=0.2769, pruned_loss=0.04343, ctc_loss=0.08403, over 19330.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2582, pruned_loss=0.03877, ctc_loss=0.07272, over 3663846.54 frames. ], batch size: 67, lr: 7.53e-03, grad_scale: 8.0 +2024-08-29 06:13:12,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=255434.66666666666, ans=0.125 +2024-08-29 06:13:56,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=255488.0, ans=0.125 +2024-08-29 06:15:09,597 INFO [train.py:1114] (3/4) Epoch 20, batch 650, loss[loss=0.1781, simple_loss=0.2621, pruned_loss=0.03391, ctc_loss=0.06571, over 19774.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2578, pruned_loss=0.03854, ctc_loss=0.07206, over 3714515.65 frames. ], batch size: 54, lr: 7.53e-03, grad_scale: 8.0 +2024-08-29 06:15:19,048 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-08-29 06:16:45,281 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.72 vs. limit=22.5 +2024-08-29 06:16:48,635 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.504e+02 1.911e+02 2.476e+02 5.788e+02, threshold=3.821e+02, percent-clipped=14.0 +2024-08-29 06:16:53,293 INFO [train.py:1114] (3/4) Epoch 20, batch 700, loss[loss=0.1723, simple_loss=0.2467, pruned_loss=0.03512, ctc_loss=0.069, over 19716.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2583, pruned_loss=0.03862, ctc_loss=0.07224, over 3746641.94 frames. ], batch size: 51, lr: 7.53e-03, grad_scale: 8.0 +2024-08-29 06:16:54,435 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.88 vs. limit=15.0 +2024-08-29 06:17:03,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=255968.0, ans=0.125 +2024-08-29 06:17:37,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=256181.33333333334, ans=0.0 +2024-08-29 06:17:41,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=256181.33333333334, ans=0.0 +2024-08-29 06:17:42,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=256181.33333333334, ans=0.0 +2024-08-29 06:17:45,326 INFO [train.py:1114] (3/4) Epoch 20, batch 750, loss[loss=0.1859, simple_loss=0.2686, pruned_loss=0.03715, ctc_loss=0.07236, over 19493.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2583, pruned_loss=0.03877, ctc_loss=0.07246, over 3772097.76 frames. ], batch size: 54, lr: 7.52e-03, grad_scale: 8.0 +2024-08-29 06:17:51,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=256234.66666666666, ans=0.125 +2024-08-29 06:18:04,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=256341.33333333334, ans=0.125 +2024-08-29 06:18:09,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=256341.33333333334, ans=0.0 +2024-08-29 06:18:15,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=256394.66666666666, ans=0.0 +2024-08-29 06:18:28,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=256448.0, ans=0.0 +2024-08-29 06:18:28,940 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:18:28,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=256448.0, ans=0.2 +2024-08-29 06:18:31,358 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.481e+02 1.912e+02 2.487e+02 4.029e+02, threshold=3.825e+02, percent-clipped=2.0 +2024-08-29 06:18:37,696 INFO [train.py:1114] (3/4) Epoch 20, batch 800, loss[loss=0.1823, simple_loss=0.2535, pruned_loss=0.04054, ctc_loss=0.07493, over 19413.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2583, pruned_loss=0.03875, ctc_loss=0.0723, over 3794382.28 frames. ], batch size: 48, lr: 7.52e-03, grad_scale: 16.0 +2024-08-29 06:18:38,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=256501.33333333334, ans=0.125 +2024-08-29 06:18:42,403 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:18:52,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.22 vs. limit=15.0 +2024-08-29 06:18:53,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=256554.66666666666, ans=0.125 +2024-08-29 06:19:38,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=256714.66666666666, ans=0.1 +2024-08-29 06:19:41,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=256714.66666666666, ans=0.125 +2024-08-29 06:19:44,372 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.63 vs. limit=10.0 +2024-08-29 06:19:46,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=256714.66666666666, ans=0.125 +2024-08-29 06:19:48,530 INFO [train.py:1114] (3/4) Epoch 20, batch 850, loss[loss=0.1801, simple_loss=0.2606, pruned_loss=0.03484, ctc_loss=0.07479, over 19650.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2576, pruned_loss=0.03858, ctc_loss=0.07213, over 3814968.13 frames. ], batch size: 59, lr: 7.51e-03, grad_scale: 16.0 +2024-08-29 06:19:52,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=256768.0, ans=0.125 +2024-08-29 06:19:53,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=256768.0, ans=0.125 +2024-08-29 06:20:00,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=256768.0, ans=0.125 +2024-08-29 06:20:16,821 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.34 vs. limit=15.0 +2024-08-29 06:20:41,634 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.961e+01 1.422e+02 1.634e+02 1.945e+02 3.890e+02, threshold=3.267e+02, percent-clipped=1.0 +2024-08-29 06:20:49,506 INFO [train.py:1114] (3/4) Epoch 20, batch 900, loss[loss=0.1704, simple_loss=0.242, pruned_loss=0.03568, ctc_loss=0.06872, over 19400.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2581, pruned_loss=0.03896, ctc_loss=0.07294, over 3818685.56 frames. ], batch size: 48, lr: 7.51e-03, grad_scale: 16.0 +2024-08-29 06:20:49,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=257034.66666666666, ans=15.0 +2024-08-29 06:20:58,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=257088.0, ans=0.025 +2024-08-29 06:21:03,145 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.06 vs. limit=15.0 +2024-08-29 06:21:10,499 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:21:16,996 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.87 vs. limit=15.0 +2024-08-29 06:21:30,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=257194.66666666666, ans=0.0 +2024-08-29 06:21:49,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=257301.33333333334, ans=0.125 +2024-08-29 06:22:18,954 INFO [train.py:1114] (3/4) Epoch 20, batch 950, loss[loss=0.1686, simple_loss=0.2472, pruned_loss=0.03278, ctc_loss=0.06089, over 19498.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2587, pruned_loss=0.03924, ctc_loss=0.07334, over 3819873.11 frames. ], batch size: 49, lr: 7.51e-03, grad_scale: 16.0 +2024-08-29 06:22:47,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=257301.33333333334, ans=0.125 +2024-08-29 06:22:49,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=257354.66666666666, ans=0.125 +2024-08-29 06:22:51,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=257354.66666666666, ans=0.2 +2024-08-29 06:23:02,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=257408.0, ans=0.04949747468305833 +2024-08-29 06:23:09,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=257408.0, ans=0.0 +2024-08-29 06:23:30,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257408.0, ans=0.1 +2024-08-29 06:23:31,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=257408.0, ans=0.2 +2024-08-29 06:23:34,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257461.33333333334, ans=0.1 +2024-08-29 06:23:34,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=257461.33333333334, ans=0.2 +2024-08-29 06:23:35,632 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.72 vs. limit=15.0 +2024-08-29 06:23:48,039 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.398e+02 1.599e+02 1.937e+02 2.870e+02, threshold=3.197e+02, percent-clipped=0.0 +2024-08-29 06:23:49,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=257514.66666666666, ans=0.125 +2024-08-29 06:23:52,530 INFO [train.py:1114] (3/4) Epoch 20, batch 1000, loss[loss=0.1655, simple_loss=0.2414, pruned_loss=0.03265, ctc_loss=0.06099, over 19858.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2594, pruned_loss=0.03955, ctc_loss=0.07402, over 3816153.61 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 16.0 +2024-08-29 06:24:46,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=257781.33333333334, ans=0.0 +2024-08-29 06:24:56,440 INFO [train.py:1114] (3/4) Epoch 20, batch 1050, loss[loss=0.1852, simple_loss=0.2696, pruned_loss=0.03612, ctc_loss=0.07169, over 19836.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2584, pruned_loss=0.03925, ctc_loss=0.07351, over 3821136.31 frames. ], batch size: 57, lr: 7.50e-03, grad_scale: 16.0 +2024-08-29 06:24:58,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=257834.66666666666, ans=0.125 +2024-08-29 06:25:09,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=257888.0, ans=0.2 +2024-08-29 06:25:15,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=257888.0, ans=0.125 +2024-08-29 06:25:37,382 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.10 vs. limit=10.0 +2024-08-29 06:25:37,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=257994.66666666666, ans=0.0 +2024-08-29 06:25:54,250 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.418e+02 1.590e+02 1.994e+02 3.641e+02, threshold=3.179e+02, percent-clipped=3.0 +2024-08-29 06:25:58,772 INFO [train.py:1114] (3/4) Epoch 20, batch 1100, loss[loss=0.1706, simple_loss=0.2567, pruned_loss=0.03041, ctc_loss=0.0593, over 19579.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2583, pruned_loss=0.03894, ctc_loss=0.07285, over 3828453.10 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 16.0 +2024-08-29 06:27:02,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=258261.33333333334, ans=0.0 +2024-08-29 06:27:30,537 INFO [train.py:1114] (3/4) Epoch 20, batch 1150, loss[loss=0.1799, simple_loss=0.2598, pruned_loss=0.03671, ctc_loss=0.06616, over 19584.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2584, pruned_loss=0.039, ctc_loss=0.07296, over 3827007.66 frames. ], batch size: 52, lr: 7.49e-03, grad_scale: 16.0 +2024-08-29 06:27:37,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258368.0, ans=0.1 +2024-08-29 06:27:39,285 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.75 vs. limit=15.0 +2024-08-29 06:27:45,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=258421.33333333334, ans=0.125 +2024-08-29 06:27:47,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=258421.33333333334, ans=0.125 +2024-08-29 06:27:47,472 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:27:48,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=258474.66666666666, ans=0.0 +2024-08-29 06:27:51,316 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.89 vs. limit=15.0 +2024-08-29 06:28:13,432 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.421e+02 1.745e+02 2.163e+02 3.118e+02, threshold=3.490e+02, percent-clipped=0.0 +2024-08-29 06:28:13,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258581.33333333334, ans=0.1 +2024-08-29 06:28:18,012 INFO [train.py:1114] (3/4) Epoch 20, batch 1200, loss[loss=0.1947, simple_loss=0.27, pruned_loss=0.04358, ctc_loss=0.08092, over 19843.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.259, pruned_loss=0.03908, ctc_loss=0.07313, over 3823359.18 frames. ], batch size: 57, lr: 7.49e-03, grad_scale: 32.0 +2024-08-29 06:29:06,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=258741.33333333334, ans=0.125 +2024-08-29 06:29:11,600 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.51 vs. limit=22.5 +2024-08-29 06:30:07,668 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.70 vs. limit=15.0 +2024-08-29 06:30:22,557 INFO [train.py:1114] (3/4) Epoch 20, batch 1250, loss[loss=0.203, simple_loss=0.2765, pruned_loss=0.04638, ctc_loss=0.09192, over 19512.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2595, pruned_loss=0.03913, ctc_loss=0.07316, over 3841902.61 frames. ], batch size: 61, lr: 7.48e-03, grad_scale: 16.0 +2024-08-29 06:30:24,944 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.05 vs. limit=22.5 +2024-08-29 06:30:35,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=258954.66666666666, ans=0.2 +2024-08-29 06:30:49,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=259008.0, ans=0.125 +2024-08-29 06:30:54,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=259061.33333333334, ans=0.1 +2024-08-29 06:31:09,979 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.413e+02 1.610e+02 2.110e+02 3.599e+02, threshold=3.219e+02, percent-clipped=1.0 +2024-08-29 06:31:51,308 INFO [train.py:1114] (3/4) Epoch 20, batch 1300, loss[loss=0.2191, simple_loss=0.2884, pruned_loss=0.05387, ctc_loss=0.1053, over 18920.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2589, pruned_loss=0.03888, ctc_loss=0.07259, over 3845313.70 frames. ], batch size: 76, lr: 7.48e-03, grad_scale: 16.0 +2024-08-29 06:33:58,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=259168.0, ans=0.125 +2024-08-29 06:34:00,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=259168.0, ans=0.125 +2024-08-29 06:34:29,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=259221.33333333334, ans=0.02 +2024-08-29 06:34:50,222 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.91 vs. limit=15.0 +2024-08-29 06:35:58,295 INFO [train.py:1114] (3/4) Epoch 20, batch 1350, loss[loss=0.1716, simple_loss=0.2498, pruned_loss=0.03459, ctc_loss=0.06064, over 19766.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2588, pruned_loss=0.03875, ctc_loss=0.07219, over 3856715.66 frames. ], batch size: 54, lr: 7.48e-03, grad_scale: 16.0 +2024-08-29 06:36:18,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=259541.33333333334, ans=0.125 +2024-08-29 06:36:19,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=259541.33333333334, ans=0.2 +2024-08-29 06:36:19,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=259541.33333333334, ans=0.125 +2024-08-29 06:36:32,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=259594.66666666666, ans=0.125 +2024-08-29 06:36:33,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=259594.66666666666, ans=0.0 +2024-08-29 06:36:33,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=259594.66666666666, ans=0.0 +2024-08-29 06:36:44,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=259648.0, ans=0.0 +2024-08-29 06:36:45,722 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.394e+02 1.620e+02 1.985e+02 3.317e+02, threshold=3.241e+02, percent-clipped=2.0 +2024-08-29 06:36:49,862 INFO [train.py:1114] (3/4) Epoch 20, batch 1400, loss[loss=0.1697, simple_loss=0.2305, pruned_loss=0.04037, ctc_loss=0.07053, over 19657.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2586, pruned_loss=0.03873, ctc_loss=0.07228, over 3863644.08 frames. ], batch size: 46, lr: 7.47e-03, grad_scale: 16.0 +2024-08-29 06:36:55,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=259701.33333333334, ans=0.025 +2024-08-29 06:37:17,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=259754.66666666666, ans=0.025 +2024-08-29 06:37:31,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=259808.0, ans=0.0 +2024-08-29 06:37:50,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=259914.66666666666, ans=0.2 +2024-08-29 06:37:51,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=259914.66666666666, ans=0.0 +2024-08-29 06:38:00,929 INFO [train.py:1114] (3/4) Epoch 20, batch 1450, loss[loss=0.207, simple_loss=0.286, pruned_loss=0.04629, ctc_loss=0.08882, over 19646.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2592, pruned_loss=0.03891, ctc_loss=0.0727, over 3862602.45 frames. ], batch size: 63, lr: 7.47e-03, grad_scale: 16.0 +2024-08-29 06:38:08,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=259968.0, ans=0.0 +2024-08-29 06:38:25,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=260074.66666666666, ans=0.125 +2024-08-29 06:38:30,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=260074.66666666666, ans=0.125 +2024-08-29 06:38:34,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=260128.0, ans=0.025 +2024-08-29 06:38:35,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=260128.0, ans=0.125 +2024-08-29 06:38:47,652 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.29 vs. limit=12.0 +2024-08-29 06:38:48,134 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.231e+02 1.378e+02 1.589e+02 1.878e+02 3.405e+02, threshold=3.177e+02, percent-clipped=1.0 +2024-08-29 06:38:51,862 INFO [train.py:1114] (3/4) Epoch 20, batch 1500, loss[loss=0.1888, simple_loss=0.2725, pruned_loss=0.03826, ctc_loss=0.07152, over 19585.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2596, pruned_loss=0.03891, ctc_loss=0.07268, over 3862617.62 frames. ], batch size: 57, lr: 7.47e-03, grad_scale: 16.0 +2024-08-29 06:38:55,427 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-08-29 06:39:11,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=260341.33333333334, ans=0.025 +2024-08-29 06:39:27,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=260394.66666666666, ans=0.125 +2024-08-29 06:39:36,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260448.0, ans=0.1 +2024-08-29 06:40:24,661 INFO [train.py:1114] (3/4) Epoch 20, batch 1550, loss[loss=0.2101, simple_loss=0.2805, pruned_loss=0.05137, ctc_loss=0.09246, over 19635.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2597, pruned_loss=0.03909, ctc_loss=0.07316, over 3846805.53 frames. ], batch size: 60, lr: 7.46e-03, grad_scale: 16.0 +2024-08-29 06:40:26,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=260501.33333333334, ans=0.125 +2024-08-29 06:40:29,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=260501.33333333334, ans=10.0 +2024-08-29 06:41:39,244 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.43 vs. limit=5.0 +2024-08-29 06:41:41,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=260661.33333333334, ans=0.2 +2024-08-29 06:41:56,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=260714.66666666666, ans=0.0 +2024-08-29 06:41:57,054 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.095e+02 1.471e+02 1.781e+02 2.261e+02 3.819e+02, threshold=3.562e+02, percent-clipped=6.0 +2024-08-29 06:41:59,062 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:42:30,547 INFO [train.py:1114] (3/4) Epoch 20, batch 1600, loss[loss=0.1812, simple_loss=0.2652, pruned_loss=0.03558, ctc_loss=0.06514, over 19854.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2593, pruned_loss=0.03886, ctc_loss=0.07269, over 3836004.15 frames. ], batch size: 57, lr: 7.46e-03, grad_scale: 32.0 +2024-08-29 06:42:30,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=260768.0, ans=0.125 +2024-08-29 06:42:33,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=260768.0, ans=0.0 +2024-08-29 06:42:46,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=260821.33333333334, ans=0.125 +2024-08-29 06:42:52,587 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:43:05,259 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.70 vs. limit=10.0 +2024-08-29 06:43:07,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=260928.0, ans=0.125 +2024-08-29 06:43:13,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=260981.33333333334, ans=0.025 +2024-08-29 06:43:21,814 INFO [train.py:1114] (3/4) Epoch 20, batch 1650, loss[loss=0.1937, simple_loss=0.2731, pruned_loss=0.04087, ctc_loss=0.08159, over 19670.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2587, pruned_loss=0.03877, ctc_loss=0.07261, over 3832121.53 frames. ], batch size: 59, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 06:43:23,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=261034.66666666666, ans=0.125 +2024-08-29 06:43:40,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=261034.66666666666, ans=0.2 +2024-08-29 06:44:11,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=261194.66666666666, ans=0.125 +2024-08-29 06:44:21,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.46 vs. limit=22.5 +2024-08-29 06:44:30,263 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.155e+02 1.539e+02 1.831e+02 2.496e+02 4.278e+02, threshold=3.663e+02, percent-clipped=6.0 +2024-08-29 06:44:30,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=261248.0, ans=0.2 +2024-08-29 06:44:33,860 INFO [train.py:1114] (3/4) Epoch 20, batch 1700, loss[loss=0.1654, simple_loss=0.2305, pruned_loss=0.03658, ctc_loss=0.0678, over 19682.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2585, pruned_loss=0.03838, ctc_loss=0.07208, over 3846224.64 frames. ], batch size: 46, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 06:44:34,309 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.18 vs. limit=6.0 +2024-08-29 06:45:00,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=261408.0, ans=0.125 +2024-08-29 06:45:18,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=261461.33333333334, ans=0.2 +2024-08-29 06:45:38,441 INFO [train.py:1114] (3/4) Epoch 20, batch 1750, loss[loss=0.163, simple_loss=0.2331, pruned_loss=0.03437, ctc_loss=0.06031, over 19644.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2582, pruned_loss=0.03822, ctc_loss=0.07187, over 3851220.45 frames. ], batch size: 45, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 06:46:06,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=261674.66666666666, ans=0.125 +2024-08-29 06:46:07,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.24 vs. limit=15.0 +2024-08-29 06:46:24,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=261781.33333333334, ans=0.0 +2024-08-29 06:46:33,066 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.489e+02 1.816e+02 2.382e+02 3.653e+02, threshold=3.632e+02, percent-clipped=0.0 +2024-08-29 06:46:36,576 INFO [train.py:1114] (3/4) Epoch 20, batch 1800, loss[loss=0.1847, simple_loss=0.2611, pruned_loss=0.03934, ctc_loss=0.07425, over 19626.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2579, pruned_loss=0.03834, ctc_loss=0.07193, over 3852909.19 frames. ], batch size: 55, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 06:46:51,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=261834.66666666666, ans=0.05 +2024-08-29 06:46:55,953 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.85 vs. limit=12.0 +2024-08-29 06:47:15,832 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.31 vs. limit=15.0 +2024-08-29 06:47:19,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=261994.66666666666, ans=0.0 +2024-08-29 06:47:19,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=261994.66666666666, ans=0.04949747468305833 +2024-08-29 06:47:34,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=262048.0, ans=0.0 +2024-08-29 06:47:43,603 INFO [train.py:1114] (3/4) Epoch 20, batch 1850, loss[loss=0.1958, simple_loss=0.2742, pruned_loss=0.04216, ctc_loss=0.08296, over 19584.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2577, pruned_loss=0.03826, ctc_loss=0.07158, over 3857665.79 frames. ], batch size: 57, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 06:47:46,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=262101.33333333334, ans=0.125 +2024-08-29 06:48:09,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=262208.0, ans=0.125 +2024-08-29 06:48:29,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=262261.3333333333, ans=0.125 +2024-08-29 06:48:39,990 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.22 vs. limit=6.0 +2024-08-29 06:48:40,475 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.496e+02 1.744e+02 2.237e+02 4.849e+02, threshold=3.488e+02, percent-clipped=3.0 +2024-08-29 06:48:42,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=262314.6666666667, ans=0.125 +2024-08-29 06:48:46,718 INFO [train.py:1114] (3/4) Epoch 20, batch 1900, loss[loss=0.1774, simple_loss=0.2606, pruned_loss=0.03433, ctc_loss=0.06371, over 19647.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.258, pruned_loss=0.0383, ctc_loss=0.07166, over 3862953.43 frames. ], batch size: 59, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 06:49:44,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=262528.0, ans=0.125 +2024-08-29 06:49:47,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=262528.0, ans=0.0 +2024-08-29 06:49:54,088 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:50:01,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=262581.3333333333, ans=0.125 +2024-08-29 06:50:07,323 INFO [train.py:1114] (3/4) Epoch 20, batch 1950, loss[loss=0.1758, simple_loss=0.2474, pruned_loss=0.03795, ctc_loss=0.071, over 19603.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.259, pruned_loss=0.03851, ctc_loss=0.07188, over 3871290.78 frames. ], batch size: 52, lr: 7.43e-03, grad_scale: 32.0 +2024-08-29 06:50:20,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=262688.0, ans=0.1 +2024-08-29 06:50:32,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=262741.3333333333, ans=0.0 +2024-08-29 06:50:36,692 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.81 vs. limit=15.0 +2024-08-29 06:50:48,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=262741.3333333333, ans=0.125 +2024-08-29 06:50:51,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=262794.6666666667, ans=0.0 +2024-08-29 06:50:53,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=262794.6666666667, ans=0.95 +2024-08-29 06:51:12,402 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.401e+02 1.548e+02 2.025e+02 3.566e+02, threshold=3.095e+02, percent-clipped=1.0 +2024-08-29 06:51:13,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=262848.0, ans=0.125 +2024-08-29 06:51:15,980 INFO [train.py:1114] (3/4) Epoch 20, batch 2000, loss[loss=0.1581, simple_loss=0.2295, pruned_loss=0.03153, ctc_loss=0.05896, over 19675.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2597, pruned_loss=0.03893, ctc_loss=0.07275, over 3854870.91 frames. ], batch size: 45, lr: 7.43e-03, grad_scale: 32.0 +2024-08-29 06:51:41,273 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:52:04,138 INFO [train.py:1114] (3/4) Epoch 20, batch 2050, loss[loss=0.1556, simple_loss=0.232, pruned_loss=0.02855, ctc_loss=0.05521, over 19699.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2587, pruned_loss=0.03892, ctc_loss=0.07281, over 3850390.55 frames. ], batch size: 47, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 06:52:16,594 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.34 vs. limit=15.0 +2024-08-29 06:52:36,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=263274.6666666667, ans=0.125 +2024-08-29 06:52:42,709 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.63 vs. limit=15.0 +2024-08-29 06:53:03,394 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.460e+02 1.739e+02 2.291e+02 5.164e+02, threshold=3.479e+02, percent-clipped=12.0 +2024-08-29 06:53:06,942 INFO [train.py:1114] (3/4) Epoch 20, batch 2100, loss[loss=0.1736, simple_loss=0.2502, pruned_loss=0.03498, ctc_loss=0.06759, over 19774.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2583, pruned_loss=0.03859, ctc_loss=0.07225, over 3858025.09 frames. ], batch size: 54, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 06:53:13,661 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.95 vs. limit=15.0 +2024-08-29 06:53:21,340 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.79 vs. limit=15.0 +2024-08-29 06:53:25,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=263541.3333333333, ans=15.0 +2024-08-29 06:53:57,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=263701.3333333333, ans=0.0 +2024-08-29 06:54:01,617 INFO [train.py:1114] (3/4) Epoch 20, batch 2150, loss[loss=0.1728, simple_loss=0.2505, pruned_loss=0.03494, ctc_loss=0.06323, over 19861.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2574, pruned_loss=0.03836, ctc_loss=0.07181, over 3870347.82 frames. ], batch size: 52, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 06:54:20,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=263808.0, ans=0.125 +2024-08-29 06:54:35,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=263861.3333333333, ans=0.125 +2024-08-29 06:54:37,903 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.81 vs. limit=15.0 +2024-08-29 06:54:38,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=263914.6666666667, ans=0.07 +2024-08-29 06:54:44,297 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.492e+02 1.849e+02 2.492e+02 5.041e+02, threshold=3.698e+02, percent-clipped=4.0 +2024-08-29 06:55:32,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=263914.6666666667, ans=0.125 +2024-08-29 06:55:33,934 INFO [train.py:1114] (3/4) Epoch 20, batch 2200, loss[loss=0.1889, simple_loss=0.2759, pruned_loss=0.03748, ctc_loss=0.06764, over 19581.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2572, pruned_loss=0.03823, ctc_loss=0.07156, over 3868929.77 frames. ], batch size: 57, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 06:55:34,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=263968.0, ans=0.0 +2024-08-29 06:55:40,065 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.65 vs. limit=10.0 +2024-08-29 06:55:45,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=263968.0, ans=0.0 +2024-08-29 06:56:11,146 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.92 vs. limit=15.0 +2024-08-29 06:56:38,509 INFO [train.py:1114] (3/4) Epoch 20, batch 2250, loss[loss=0.1729, simple_loss=0.2523, pruned_loss=0.03368, ctc_loss=0.06568, over 19609.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2577, pruned_loss=0.03833, ctc_loss=0.07179, over 3867828.09 frames. ], batch size: 55, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 06:56:42,428 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 06:56:50,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=264288.0, ans=0.125 +2024-08-29 06:57:35,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264394.6666666667, ans=0.1 +2024-08-29 06:57:50,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=264448.0, ans=0.125 +2024-08-29 06:57:58,404 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.422e+02 1.843e+02 2.549e+02 5.039e+02, threshold=3.686e+02, percent-clipped=5.0 +2024-08-29 06:58:00,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=264448.0, ans=0.125 +2024-08-29 06:58:01,856 INFO [train.py:1114] (3/4) Epoch 20, batch 2300, loss[loss=0.1734, simple_loss=0.2458, pruned_loss=0.03724, ctc_loss=0.06645, over 19514.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2572, pruned_loss=0.03861, ctc_loss=0.07218, over 3862372.82 frames. ], batch size: 49, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 06:58:55,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=264501.3333333333, ans=0.125 +2024-08-29 06:59:17,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=264608.0, ans=0.1 +2024-08-29 06:59:29,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=264661.3333333333, ans=0.0 +2024-08-29 06:59:31,419 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.12 vs. limit=15.0 +2024-08-29 07:00:03,991 INFO [train.py:1114] (3/4) Epoch 20, batch 2350, loss[loss=0.2074, simple_loss=0.2785, pruned_loss=0.05093, ctc_loss=0.08627, over 19659.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.257, pruned_loss=0.03855, ctc_loss=0.0721, over 3865153.71 frames. ], batch size: 63, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 07:05:14,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=264821.3333333333, ans=0.025 +2024-08-29 07:11:14,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=264874.6666666667, ans=0.125 +2024-08-29 07:11:47,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264874.6666666667, ans=0.1 +2024-08-29 07:13:08,315 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.98 vs. limit=15.0 +2024-08-29 07:14:01,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=264928.0, ans=0.125 +2024-08-29 07:14:23,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=264928.0, ans=0.0 +2024-08-29 07:20:35,433 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.441e+02 1.702e+02 2.205e+02 4.204e+02, threshold=3.404e+02, percent-clipped=1.0 +2024-08-29 07:22:42,544 INFO [train.py:1114] (3/4) Epoch 20, batch 2400, loss[loss=0.1917, simple_loss=0.2714, pruned_loss=0.04056, ctc_loss=0.07722, over 19338.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2592, pruned_loss=0.03916, ctc_loss=0.07308, over 3860083.88 frames. ], batch size: 71, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 07:22:42,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=265034.6666666667, ans=0.025 +2024-08-29 07:23:31,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=265034.6666666667, ans=0.125 +2024-08-29 07:23:39,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=265034.6666666667, ans=0.125 +2024-08-29 07:31:37,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265088.0, ans=0.1 +2024-08-29 07:40:39,421 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.25 vs. limit=12.0 +2024-08-29 07:50:39,238 INFO [train.py:1114] (3/4) Epoch 20, batch 2450, loss[loss=0.234, simple_loss=0.2831, pruned_loss=0.06667, ctc_loss=0.1291, over 13237.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2627, pruned_loss=0.04141, ctc_loss=0.07773, over 3732985.70 frames. ], batch size: 140, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 07:51:43,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=265301.3333333333, ans=0.125 +2024-08-29 07:53:18,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=265301.3333333333, ans=0.04949747468305833 +2024-08-29 07:53:31,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=265301.3333333333, ans=0.0 +2024-08-29 07:53:44,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=265301.3333333333, ans=0.0 +2024-08-29 07:59:00,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=265408.0, ans=0.0 +2024-08-29 08:00:25,865 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.19 vs. limit=15.0 +2024-08-29 08:06:23,735 INFO [train.py:1387] (3/4) Done! diff --git a/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-10-47-00-0 b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-10-47-00-0 new file mode 100644 index 0000000000000000000000000000000000000000..417b84cb6c0fd105dd21267280b3b8377455098d --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-10-47-00-0 @@ -0,0 +1,727 @@ +2024-08-29 10:47:00,602 INFO [train.py:1182] (0/4) Training started +2024-08-29 10:47:00,606 INFO [train.py:1192] (0/4) Device: cuda:0 +2024-08-29 10:47:00,825 INFO [train.py:1210] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2563.int.cedar.computecanada.ca', 'IP address': '172.16.146.0'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 19, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 10:47:00,825 INFO [train.py:1212] (0/4) About to create model +2024-08-29 10:50:07,618 INFO [train.py:1216] (0/4) Number of model parameters: 65805511 +2024-08-29 10:50:08,153 INFO [checkpoint.py:112] (0/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-18.pt +2024-08-29 10:54:10,813 INFO [checkpoint.py:131] (0/4) Loading averaged model +2024-08-29 10:54:13,657 INFO [train.py:1231] (0/4) Using DDP +2024-08-29 10:57:38,613 INFO [train.py:1243] (0/4) Loading optimizer state dict +2024-08-29 10:57:38,818 INFO [train.py:1251] (0/4) Loading scheduler state dict +2024-08-29 10:57:39,426 INFO [asr_datamodule.py:894] (0/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:696] (0/4) Disable MUSAN +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:714] (0/4) Enable SpecAugment +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:715] (0/4) Time warp factor: 80 +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:725] (0/4) Num frame mask: 10 +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:738] (0/4) About to create train dataset +2024-08-29 10:57:39,670 INFO [asr_datamodule.py:765] (0/4) Using DynamicBucketingSampler. +2024-08-29 10:57:41,252 INFO [asr_datamodule.py:782] (0/4) About to create train dataloader +2024-08-29 10:57:41,253 INFO [asr_datamodule.py:911] (0/4) About to get dev-clean cuts +2024-08-29 10:58:07,692 INFO [asr_datamodule.py:918] (0/4) About to get dev-other cuts +2024-08-29 10:58:09,678 INFO [asr_datamodule.py:814] (0/4) About to create dev dataset +2024-08-29 10:58:09,998 INFO [asr_datamodule.py:831] (0/4) About to create dev dataloader +2024-08-29 10:58:09,998 INFO [train.py:1435] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 11:07:36,366 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=512, metric=3.75 vs. limit=7.5 +2024-08-29 11:08:02,421 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12155MB +2024-08-29 11:08:03,540 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12155MB +2024-08-29 11:11:47,148 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12196MB +2024-08-29 11:11:48,381 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12196MB +2024-08-29 11:21:31,522 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12196MB +2024-08-29 11:21:32,818 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 12196MB +2024-08-29 11:21:32,837 INFO [train.py:1344] (0/4) Loading grad scaler state dict +2024-08-29 11:27:35,422 INFO [train.py:1114] (0/4) Epoch 19, batch 0, loss[loss=0.1853, simple_loss=0.2584, pruned_loss=0.04146, ctc_loss=0.07309, over 19421.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2584, pruned_loss=0.04146, ctc_loss=0.07309, over 19421.00 frames. ], batch size: 48, lr: 7.99e-03, grad_scale: 32.0 +2024-08-29 11:27:35,423 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-29 11:31:58,883 INFO [train.py:1146] (0/4) Epoch 19, validation: loss=0.1709, simple_loss=0.2636, pruned_loss=0.02933, ctc_loss=0.04896, over 944034.00 frames. +2024-08-29 11:31:58,884 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 12196MB +2024-08-29 11:31:59,258 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.48 vs. limit=10.0 +2024-08-29 11:33:44,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=238954.66666666666, ans=0.0 +2024-08-29 11:50:47,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=239008.0, ans=0.0 +2024-08-29 12:03:09,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=239008.0, ans=0.125 +2024-08-29 12:06:53,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=239008.0, ans=0.025 +2024-08-29 12:33:07,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=239114.66666666666, ans=0.05 +2024-08-29 12:44:34,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=239168.0, ans=0.125 +2024-08-29 12:45:48,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=239168.0, ans=0.125 +2024-08-29 12:46:38,394 INFO [train.py:1114] (0/4) Epoch 19, batch 50, loss[loss=0.175, simple_loss=0.2468, pruned_loss=0.0369, ctc_loss=0.07334, over 19679.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2619, pruned_loss=0.03972, ctc_loss=0.0747, over 845033.20 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 12:48:50,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=239221.33333333334, ans=0.2 +2024-08-29 12:54:09,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.62 vs. limit=10.0 +2024-08-29 12:57:40,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239328.0, ans=0.1 +2024-08-29 12:57:42,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=239328.0, ans=0.125 +2024-08-29 12:57:42,596 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.94 vs. limit=15.0 +2024-08-29 12:57:49,123 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.203e+02 1.479e+02 1.753e+02 2.191e+02 3.244e+02, threshold=3.506e+02, percent-clipped=0.0 +2024-08-29 12:57:56,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=239381.33333333334, ans=0.05 +2024-08-29 12:57:57,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=239381.33333333334, ans=0.125 +2024-08-29 12:58:24,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=239381.33333333334, ans=0.125 +2024-08-29 13:03:54,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=239434.66666666666, ans=0.2 +2024-08-29 13:04:10,873 INFO [train.py:1114] (0/4) Epoch 19, batch 100, loss[loss=0.1674, simple_loss=0.2423, pruned_loss=0.03427, ctc_loss=0.05993, over 19728.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2638, pruned_loss=0.04056, ctc_loss=0.07627, over 1498716.65 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 13:05:00,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=239488.0, ans=0.125 +2024-08-29 13:07:44,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239541.33333333334, ans=0.1 +2024-08-29 13:09:37,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239594.66666666666, ans=0.1 +2024-08-29 13:14:33,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=239648.0, ans=0.125 +2024-08-29 13:14:33,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=239648.0, ans=0.125 +2024-08-29 13:14:33,309 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 13:21:06,828 INFO [train.py:1114] (0/4) Epoch 19, batch 150, loss[loss=0.1662, simple_loss=0.2344, pruned_loss=0.03651, ctc_loss=0.06245, over 19754.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2616, pruned_loss=0.04035, ctc_loss=0.07578, over 2027546.02 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 13:21:07,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=239754.66666666666, ans=0.2 +2024-08-29 13:21:28,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=239754.66666666666, ans=0.125 +2024-08-29 13:24:18,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=239861.33333333334, ans=0.125 +2024-08-29 13:24:22,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239914.66666666666, ans=0.1 +2024-08-29 13:24:24,038 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.028e+02 1.511e+02 1.939e+02 2.474e+02 3.688e+02, threshold=3.878e+02, percent-clipped=4.0 +2024-08-29 13:25:00,299 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.67 vs. limit=15.0 +2024-08-29 13:26:26,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=239968.0, ans=0.125 +2024-08-29 13:27:00,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239968.0, ans=0.125 +2024-08-29 13:27:06,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=239968.0, ans=0.125 +2024-08-29 13:27:34,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=239968.0, ans=0.0 +2024-08-29 13:31:38,317 INFO [train.py:1114] (0/4) Epoch 19, batch 200, loss[loss=0.2016, simple_loss=0.2709, pruned_loss=0.04842, ctc_loss=0.08843, over 18444.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2598, pruned_loss=0.03992, ctc_loss=0.07478, over 2435779.19 frames. ], batch size: 85, lr: 7.97e-03, grad_scale: 32.0 +2024-08-29 13:31:44,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240021.33333333334, ans=0.1 +2024-08-29 13:31:55,504 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.66 vs. limit=22.5 +2024-08-29 13:32:02,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=240128.0, ans=0.125 +2024-08-29 13:32:03,208 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.93 vs. limit=12.0 +2024-08-29 13:32:11,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=240128.0, ans=10.0 +2024-08-29 13:32:15,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=240181.33333333334, ans=0.125 +2024-08-29 13:32:17,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=240181.33333333334, ans=0.04949747468305833 +2024-08-29 13:32:19,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=240181.33333333334, ans=0.125 +2024-08-29 13:32:28,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=240234.66666666666, ans=0.125 +2024-08-29 13:32:33,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=240234.66666666666, ans=0.1 +2024-08-29 13:32:34,370 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.65 vs. limit=10.0 +2024-08-29 13:32:34,889 INFO [train.py:1114] (0/4) Epoch 19, batch 250, loss[loss=0.194, simple_loss=0.2693, pruned_loss=0.04352, ctc_loss=0.079, over 19356.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2588, pruned_loss=0.03929, ctc_loss=0.07385, over 2756817.04 frames. ], batch size: 67, lr: 7.97e-03, grad_scale: 32.0 +2024-08-29 13:33:15,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=240288.0, ans=0.1 +2024-08-29 13:33:26,844 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.50 vs. limit=12.0 +2024-08-29 13:33:39,436 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.435e+02 1.779e+02 2.329e+02 4.143e+02, threshold=3.559e+02, percent-clipped=3.0 +2024-08-29 13:33:43,071 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.87 vs. limit=12.0 +2024-08-29 13:37:11,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=240501.33333333334, ans=0.125 +2024-08-29 13:37:12,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=240501.33333333334, ans=0.125 +2024-08-29 13:37:19,134 INFO [train.py:1114] (0/4) Epoch 19, batch 300, loss[loss=0.2042, simple_loss=0.2816, pruned_loss=0.04581, ctc_loss=0.08786, over 19482.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2586, pruned_loss=0.03907, ctc_loss=0.07336, over 3000995.93 frames. ], batch size: 61, lr: 7.96e-03, grad_scale: 32.0 +2024-08-29 13:37:39,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=240554.66666666666, ans=0.125 +2024-08-29 13:37:46,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=240608.0, ans=0.025 +2024-08-29 13:38:49,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=240661.33333333334, ans=0.125 +2024-08-29 13:40:03,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=240714.66666666666, ans=0.0 +2024-08-29 13:40:04,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=240714.66666666666, ans=0.0 +2024-08-29 13:40:42,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=240768.0, ans=0.125 +2024-08-29 13:40:51,471 INFO [train.py:1114] (0/4) Epoch 19, batch 350, loss[loss=0.1733, simple_loss=0.2433, pruned_loss=0.03731, ctc_loss=0.07159, over 19748.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2596, pruned_loss=0.03938, ctc_loss=0.07376, over 3190586.01 frames. ], batch size: 48, lr: 7.96e-03, grad_scale: 32.0 +2024-08-29 13:41:22,795 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.98 vs. limit=15.0 +2024-08-29 13:41:25,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=240928.0, ans=0.0 +2024-08-29 13:41:25,651 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.15 vs. limit=15.0 +2024-08-29 13:41:41,719 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.478e+02 1.769e+02 2.422e+02 3.784e+02, threshold=3.538e+02, percent-clipped=2.0 +2024-08-29 13:42:15,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=240981.33333333334, ans=0.125 +2024-08-29 13:42:15,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=240981.33333333334, ans=0.1 +2024-08-29 13:43:49,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=241034.66666666666, ans=0.0 +2024-08-29 13:44:27,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=241088.0, ans=0.0 +2024-08-29 13:44:27,834 INFO [train.py:1114] (0/4) Epoch 19, batch 400, loss[loss=0.1872, simple_loss=0.2656, pruned_loss=0.03917, ctc_loss=0.07628, over 19494.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2593, pruned_loss=0.03929, ctc_loss=0.07369, over 3342052.19 frames. ], batch size: 54, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 13:45:45,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=241194.66666666666, ans=0.125 +2024-08-29 13:47:03,744 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.10 vs. limit=15.0 +2024-08-29 13:47:05,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=241301.33333333334, ans=0.0 +2024-08-29 13:47:19,624 INFO [train.py:1114] (0/4) Epoch 19, batch 450, loss[loss=0.1798, simple_loss=0.2675, pruned_loss=0.03259, ctc_loss=0.06745, over 19626.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2592, pruned_loss=0.03921, ctc_loss=0.07357, over 3450414.33 frames. ], batch size: 55, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 13:47:37,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=241408.0, ans=0.125 +2024-08-29 13:48:03,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=241408.0, ans=0.2 +2024-08-29 13:48:45,730 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.422e+02 1.638e+02 2.007e+02 3.524e+02, threshold=3.276e+02, percent-clipped=0.0 +2024-08-29 13:48:56,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=241568.0, ans=0.0 +2024-08-29 13:50:05,036 INFO [train.py:1114] (0/4) Epoch 19, batch 500, loss[loss=0.1949, simple_loss=0.2714, pruned_loss=0.04326, ctc_loss=0.07996, over 19656.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2585, pruned_loss=0.03905, ctc_loss=0.07318, over 3545263.34 frames. ], batch size: 63, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 13:50:29,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=241728.0, ans=0.035 +2024-08-29 13:50:32,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241728.0, ans=0.1 +2024-08-29 13:51:30,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=241728.0, ans=0.0 +2024-08-29 13:51:34,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=241781.33333333334, ans=0.125 +2024-08-29 13:51:37,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=241781.33333333334, ans=0.125 +2024-08-29 13:52:44,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=241834.66666666666, ans=0.2 +2024-08-29 13:52:46,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=241834.66666666666, ans=6.0 +2024-08-29 13:54:05,854 INFO [train.py:1114] (0/4) Epoch 19, batch 550, loss[loss=0.1883, simple_loss=0.2611, pruned_loss=0.04196, ctc_loss=0.07899, over 19368.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2592, pruned_loss=0.03942, ctc_loss=0.07392, over 3607835.04 frames. ], batch size: 71, lr: 7.94e-03, grad_scale: 32.0 +2024-08-29 13:54:30,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=241994.66666666666, ans=0.025 +2024-08-29 13:54:30,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=241994.66666666666, ans=0.125 +2024-08-29 13:54:34,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=242048.0, ans=0.2 +2024-08-29 13:54:34,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=242048.0, ans=0.025 +2024-08-29 13:54:37,005 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.211e+02 1.414e+02 1.681e+02 2.031e+02 3.681e+02, threshold=3.361e+02, percent-clipped=2.0 +2024-08-29 13:54:39,365 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.18 vs. limit=15.0 +2024-08-29 13:54:42,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=242048.0, ans=0.95 +2024-08-29 13:54:51,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=242101.33333333334, ans=0.0 +2024-08-29 13:54:53,941 INFO [train.py:1114] (0/4) Epoch 19, batch 600, loss[loss=0.1907, simple_loss=0.2707, pruned_loss=0.04041, ctc_loss=0.07438, over 19400.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2591, pruned_loss=0.03925, ctc_loss=0.07346, over 3666587.04 frames. ], batch size: 67, lr: 7.94e-03, grad_scale: 32.0 +2024-08-29 13:55:09,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=242208.0, ans=0.125 +2024-08-29 13:55:24,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=242314.66666666666, ans=0.2 +2024-08-29 13:55:24,460 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.46 vs. limit=10.0 +2024-08-29 13:55:42,115 INFO [train.py:1114] (0/4) Epoch 19, batch 650, loss[loss=0.1616, simple_loss=0.2376, pruned_loss=0.03086, ctc_loss=0.05961, over 19773.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2583, pruned_loss=0.03903, ctc_loss=0.07311, over 3716546.96 frames. ], batch size: 54, lr: 7.93e-03, grad_scale: 32.0 +2024-08-29 13:56:02,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=242528.0, ans=0.125 +2024-08-29 13:56:14,589 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.71 vs. limit=15.0 +2024-08-29 13:56:14,923 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.141e+02 1.536e+02 1.948e+02 2.425e+02 3.839e+02, threshold=3.897e+02, percent-clipped=7.0 +2024-08-29 13:56:26,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=242581.33333333334, ans=0.0 +2024-08-29 13:56:28,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=242634.66666666666, ans=0.125 +2024-08-29 13:56:28,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=242634.66666666666, ans=0.125 +2024-08-29 13:56:36,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=242634.66666666666, ans=0.125 +2024-08-29 13:56:40,144 INFO [train.py:1114] (0/4) Epoch 19, batch 700, loss[loss=0.1803, simple_loss=0.2544, pruned_loss=0.03883, ctc_loss=0.07153, over 19733.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2586, pruned_loss=0.0393, ctc_loss=0.07348, over 3748806.62 frames. ], batch size: 51, lr: 7.93e-03, grad_scale: 32.0 +2024-08-29 13:56:49,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=242688.0, ans=0.125 +2024-08-29 13:56:50,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff3.min_abs, batch_count=242688.0, ans=0.2 +2024-08-29 13:57:25,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242848.0, ans=0.1 +2024-08-29 13:57:40,078 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.00 vs. limit=15.0 +2024-08-29 13:57:41,163 INFO [train.py:1114] (0/4) Epoch 19, batch 750, loss[loss=0.1859, simple_loss=0.2747, pruned_loss=0.03588, ctc_loss=0.06315, over 19496.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2588, pruned_loss=0.03947, ctc_loss=0.07378, over 3774571.12 frames. ], batch size: 54, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 13:57:42,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242954.66666666666, ans=0.1 +2024-08-29 13:58:43,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=243114.66666666666, ans=0.015 +2024-08-29 13:58:45,184 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.478e+02 1.857e+02 2.278e+02 3.837e+02, threshold=3.713e+02, percent-clipped=0.0 +2024-08-29 13:58:55,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=243168.0, ans=0.0 +2024-08-29 13:59:02,319 INFO [train.py:1114] (0/4) Epoch 19, batch 800, loss[loss=0.1681, simple_loss=0.2398, pruned_loss=0.03537, ctc_loss=0.06406, over 19802.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2592, pruned_loss=0.03972, ctc_loss=0.07414, over 3795407.26 frames. ], batch size: 49, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 13:59:10,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=243221.33333333334, ans=0.0 +2024-08-29 13:59:13,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=243274.66666666666, ans=0.05 +2024-08-29 13:59:16,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=243274.66666666666, ans=0.0 +2024-08-29 13:59:29,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=243328.0, ans=0.125 +2024-08-29 13:59:48,486 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.04 vs. limit=22.5 +2024-08-29 13:59:53,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=243381.33333333334, ans=0.125 +2024-08-29 14:04:25,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=243434.66666666666, ans=0.0 +2024-08-29 14:04:32,059 INFO [train.py:1114] (0/4) Epoch 19, batch 850, loss[loss=0.1822, simple_loss=0.2682, pruned_loss=0.03504, ctc_loss=0.06538, over 19670.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2589, pruned_loss=0.03964, ctc_loss=0.07389, over 3815217.60 frames. ], batch size: 59, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 14:04:41,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=243488.0, ans=0.125 +2024-08-29 14:04:43,257 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:04:45,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=243541.33333333334, ans=0.1 +2024-08-29 14:05:00,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=243594.66666666666, ans=0.0 +2024-08-29 14:05:01,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=243594.66666666666, ans=0.025 +2024-08-29 14:05:03,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=243648.0, ans=0.0 +2024-08-29 14:05:05,884 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.422e+02 1.643e+02 2.108e+02 3.301e+02, threshold=3.285e+02, percent-clipped=0.0 +2024-08-29 14:05:10,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=243648.0, ans=0.125 +2024-08-29 14:05:13,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=243701.33333333334, ans=0.125 +2024-08-29 14:05:14,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=243701.33333333334, ans=0.125 +2024-08-29 14:05:14,859 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.96 vs. limit=15.0 +2024-08-29 14:08:15,256 INFO [train.py:1114] (0/4) Epoch 19, batch 900, loss[loss=0.1742, simple_loss=0.2483, pruned_loss=0.03652, ctc_loss=0.06766, over 19806.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2591, pruned_loss=0.03974, ctc_loss=0.07408, over 3819233.74 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-29 14:10:21,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=243861.33333333334, ans=0.025 +2024-08-29 14:10:30,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=243914.66666666666, ans=0.125 +2024-08-29 14:10:31,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=243914.66666666666, ans=0.125 +2024-08-29 14:10:36,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=243914.66666666666, ans=0.125 +2024-08-29 14:10:53,186 INFO [train.py:1114] (0/4) Epoch 19, batch 950, loss[loss=0.1617, simple_loss=0.2361, pruned_loss=0.0321, ctc_loss=0.05771, over 19501.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2595, pruned_loss=0.03989, ctc_loss=0.07437, over 3820559.24 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 16.0 +2024-08-29 14:11:00,839 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:11:11,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=244074.66666666666, ans=0.0 +2024-08-29 14:11:12,830 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.70 vs. limit=22.5 +2024-08-29 14:11:15,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=244128.0, ans=0.1 +2024-08-29 14:11:15,839 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.04 vs. limit=15.0 +2024-08-29 14:11:23,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=244128.0, ans=0.125 +2024-08-29 14:11:27,361 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.452e+02 1.728e+02 2.089e+02 3.728e+02, threshold=3.456e+02, percent-clipped=1.0 +2024-08-29 14:11:43,397 INFO [train.py:1114] (0/4) Epoch 19, batch 1000, loss[loss=0.1697, simple_loss=0.2548, pruned_loss=0.03082, ctc_loss=0.05757, over 19856.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2606, pruned_loss=0.04033, ctc_loss=0.07517, over 3815653.69 frames. ], batch size: 52, lr: 7.90e-03, grad_scale: 16.0 +2024-08-29 14:11:48,672 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.92 vs. limit=22.5 +2024-08-29 14:12:39,304 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.10 vs. limit=15.0 +2024-08-29 14:12:49,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=244448.0, ans=0.95 +2024-08-29 14:13:29,684 INFO [train.py:1114] (0/4) Epoch 19, batch 1050, loss[loss=0.1946, simple_loss=0.2743, pruned_loss=0.04176, ctc_loss=0.07839, over 19834.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2597, pruned_loss=0.04, ctc_loss=0.07467, over 3822306.47 frames. ], batch size: 57, lr: 7.90e-03, grad_scale: 16.0 +2024-08-29 14:13:30,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244554.66666666666, ans=0.1 +2024-08-29 14:13:40,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.15 vs. limit=22.5 +2024-08-29 14:13:46,839 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.19 vs. limit=22.5 +2024-08-29 14:14:01,822 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.146e+02 1.357e+02 1.587e+02 1.996e+02 3.012e+02, threshold=3.173e+02, percent-clipped=0.0 +2024-08-29 14:14:17,622 INFO [train.py:1114] (0/4) Epoch 19, batch 1100, loss[loss=0.1692, simple_loss=0.2456, pruned_loss=0.03409, ctc_loss=0.06164, over 19574.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2592, pruned_loss=0.03954, ctc_loss=0.07404, over 3830280.28 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 16.0 +2024-08-29 14:14:26,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=244874.66666666666, ans=0.125 +2024-08-29 14:14:29,779 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:14:47,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=244981.33333333334, ans=0.0 +2024-08-29 14:15:05,486 INFO [train.py:1114] (0/4) Epoch 19, batch 1150, loss[loss=0.1576, simple_loss=0.2338, pruned_loss=0.02919, ctc_loss=0.05762, over 19596.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2593, pruned_loss=0.03968, ctc_loss=0.07437, over 3828089.38 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 16.0 +2024-08-29 14:15:06,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=245088.0, ans=0.125 +2024-08-29 14:15:13,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=245088.0, ans=0.0 +2024-08-29 14:15:19,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=245141.33333333334, ans=0.0 +2024-08-29 14:15:23,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=245141.33333333334, ans=0.125 +2024-08-29 14:15:42,726 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.485e+02 1.714e+02 2.095e+02 3.780e+02, threshold=3.428e+02, percent-clipped=1.0 +2024-08-29 14:15:44,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=245248.0, ans=0.0 +2024-08-29 14:16:19,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=245301.33333333334, ans=0.125 +2024-08-29 14:17:46,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245301.33333333334, ans=0.1 +2024-08-29 14:17:48,250 INFO [train.py:1114] (0/4) Epoch 19, batch 1200, loss[loss=0.1876, simple_loss=0.2612, pruned_loss=0.04104, ctc_loss=0.07963, over 19828.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2598, pruned_loss=0.03961, ctc_loss=0.07422, over 3823389.80 frames. ], batch size: 57, lr: 7.89e-03, grad_scale: 32.0 +2024-08-29 14:17:49,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=245354.66666666666, ans=0.125 +2024-08-29 14:17:50,810 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.88 vs. limit=15.0 +2024-08-29 14:18:23,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=245514.66666666666, ans=0.0 +2024-08-29 14:18:31,938 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.05 vs. limit=22.5 +2024-08-29 14:18:35,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245621.33333333334, ans=0.1 +2024-08-29 14:18:36,310 INFO [train.py:1114] (0/4) Epoch 19, batch 1250, loss[loss=0.1912, simple_loss=0.264, pruned_loss=0.04391, ctc_loss=0.07655, over 19532.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2607, pruned_loss=0.04002, ctc_loss=0.07479, over 3841958.57 frames. ], batch size: 61, lr: 7.88e-03, grad_scale: 32.0 +2024-08-29 14:18:36,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=245621.33333333334, ans=0.025 +2024-08-29 14:18:48,981 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.07 vs. limit=12.0 +2024-08-29 14:18:54,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=245728.0, ans=0.2 +2024-08-29 14:19:06,855 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.125e+02 1.390e+02 1.709e+02 2.116e+02 3.450e+02, threshold=3.419e+02, percent-clipped=1.0 +2024-08-29 14:19:17,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=245834.66666666666, ans=0.125 +2024-08-29 14:19:22,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=245834.66666666666, ans=0.025 +2024-08-29 14:19:27,923 INFO [train.py:1114] (0/4) Epoch 19, batch 1300, loss[loss=0.187, simple_loss=0.2647, pruned_loss=0.04073, ctc_loss=0.06982, over 18889.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.26, pruned_loss=0.03981, ctc_loss=0.07441, over 3846145.41 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 32.0 +2024-08-29 14:19:46,026 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.85 vs. limit=15.0 +2024-08-29 14:19:50,574 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.26 vs. limit=15.0 +2024-08-29 14:19:50,732 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.19 vs. limit=15.0 +2024-08-29 14:19:54,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=246048.0, ans=0.0 +2024-08-29 14:19:54,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=246048.0, ans=0.125 +2024-08-29 14:20:34,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=246101.33333333334, ans=0.125 +2024-08-29 14:20:35,712 INFO [train.py:1114] (0/4) Epoch 19, batch 1350, loss[loss=0.1674, simple_loss=0.2507, pruned_loss=0.03115, ctc_loss=0.05453, over 19769.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2594, pruned_loss=0.03949, ctc_loss=0.07374, over 3856948.23 frames. ], batch size: 54, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 14:21:25,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=246261.33333333334, ans=0.025 +2024-08-29 14:21:35,082 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.448e+02 1.632e+02 2.120e+02 3.289e+02, threshold=3.263e+02, percent-clipped=0.0 +2024-08-29 14:21:51,122 INFO [train.py:1114] (0/4) Epoch 19, batch 1400, loss[loss=0.1447, simple_loss=0.2208, pruned_loss=0.02495, ctc_loss=0.0468, over 19657.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2592, pruned_loss=0.03951, ctc_loss=0.07367, over 3863914.06 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 14:21:59,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=246474.66666666666, ans=0.125 +2024-08-29 14:22:17,551 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:22:24,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=246581.33333333334, ans=0.125 +2024-08-29 14:22:27,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=246581.33333333334, ans=0.2 +2024-08-29 14:22:34,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=246634.66666666666, ans=0.0 +2024-08-29 14:22:45,035 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.27 vs. limit=6.0 +2024-08-29 14:22:49,900 INFO [train.py:1114] (0/4) Epoch 19, batch 1450, loss[loss=0.1985, simple_loss=0.2709, pruned_loss=0.04576, ctc_loss=0.08663, over 19667.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2595, pruned_loss=0.03978, ctc_loss=0.07395, over 3862670.75 frames. ], batch size: 63, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 14:22:51,296 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.71 vs. limit=15.0 +2024-08-29 14:22:54,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=246688.0, ans=0.125 +2024-08-29 14:23:01,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=246741.33333333334, ans=0.025 +2024-08-29 14:24:01,772 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.108e+02 1.408e+02 1.590e+02 1.931e+02 3.612e+02, threshold=3.180e+02, percent-clipped=1.0 +2024-08-29 14:24:03,081 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.29 vs. limit=15.0 +2024-08-29 14:24:11,581 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.44 vs. limit=15.0 +2024-08-29 14:24:17,517 INFO [train.py:1114] (0/4) Epoch 19, batch 1500, loss[loss=0.1866, simple_loss=0.266, pruned_loss=0.03821, ctc_loss=0.07721, over 19578.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2596, pruned_loss=0.03956, ctc_loss=0.0737, over 3862431.58 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 32.0 +2024-08-29 14:24:19,891 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.78 vs. limit=12.0 +2024-08-29 14:24:20,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=246954.66666666666, ans=0.2 +2024-08-29 14:24:24,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=246954.66666666666, ans=0.125 +2024-08-29 14:24:51,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=247008.0, ans=0.05 +2024-08-29 14:26:15,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=247114.66666666666, ans=0.2 +2024-08-29 14:26:16,690 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.99 vs. limit=6.0 +2024-08-29 14:26:25,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=247168.0, ans=0.0 +2024-08-29 14:26:27,025 INFO [train.py:1114] (0/4) Epoch 19, batch 1550, loss[loss=0.2027, simple_loss=0.2799, pruned_loss=0.04535, ctc_loss=0.08726, over 19613.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2595, pruned_loss=0.03968, ctc_loss=0.0741, over 3847641.78 frames. ], batch size: 60, lr: 7.86e-03, grad_scale: 32.0 +2024-08-29 14:26:28,715 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.91 vs. limit=22.5 +2024-08-29 14:27:11,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=247328.0, ans=0.125 +2024-08-29 14:27:14,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=247328.0, ans=0.2 +2024-08-29 14:27:15,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=247328.0, ans=0.2 +2024-08-29 14:27:20,320 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.192e+02 1.466e+02 1.727e+02 2.253e+02 4.003e+02, threshold=3.453e+02, percent-clipped=2.0 +2024-08-29 14:27:37,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=247488.0, ans=0.0 +2024-08-29 14:29:40,639 INFO [train.py:1114] (0/4) Epoch 19, batch 1600, loss[loss=0.1835, simple_loss=0.2666, pruned_loss=0.03535, ctc_loss=0.07421, over 19846.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2596, pruned_loss=0.03988, ctc_loss=0.07439, over 3836418.62 frames. ], batch size: 57, lr: 7.85e-03, grad_scale: 32.0 +2024-08-29 14:31:35,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=247541.33333333334, ans=0.0 +2024-08-29 14:31:44,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247594.66666666666, ans=0.1 +2024-08-29 14:31:52,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=247648.0, ans=0.125 +2024-08-29 14:31:52,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=247648.0, ans=0.2 +2024-08-29 14:31:53,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=247648.0, ans=0.2 +2024-08-29 14:32:04,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=247701.33333333334, ans=0.0 +2024-08-29 14:32:06,878 INFO [train.py:1114] (0/4) Epoch 19, batch 1650, loss[loss=0.2005, simple_loss=0.2797, pruned_loss=0.04417, ctc_loss=0.08215, over 19628.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2597, pruned_loss=0.04009, ctc_loss=0.07459, over 3833472.39 frames. ], batch size: 59, lr: 7.85e-03, grad_scale: 32.0 +2024-08-29 14:32:08,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=247754.66666666666, ans=0.0 +2024-08-29 14:32:10,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247754.66666666666, ans=0.1 +2024-08-29 14:32:19,532 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.48 vs. limit=22.5 +2024-08-29 14:32:21,496 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.80 vs. limit=15.0 +2024-08-29 14:32:29,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=247861.33333333334, ans=0.125 +2024-08-29 14:32:37,729 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.670e+02 2.010e+02 2.374e+02 4.027e+02, threshold=4.020e+02, percent-clipped=3.0 +2024-08-29 14:32:56,803 INFO [train.py:1114] (0/4) Epoch 19, batch 1700, loss[loss=0.1772, simple_loss=0.2425, pruned_loss=0.04077, ctc_loss=0.07559, over 19657.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2594, pruned_loss=0.03977, ctc_loss=0.0741, over 3847350.29 frames. ], batch size: 46, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 14:33:02,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=248021.33333333334, ans=0.09899494936611666 +2024-08-29 14:33:51,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248181.33333333334, ans=0.1 +2024-08-29 14:33:52,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=248181.33333333334, ans=0.0 +2024-08-29 14:33:56,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248234.66666666666, ans=0.1 +2024-08-29 14:34:04,342 INFO [train.py:1114] (0/4) Epoch 19, batch 1750, loss[loss=0.1767, simple_loss=0.2414, pruned_loss=0.04103, ctc_loss=0.07469, over 19654.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2592, pruned_loss=0.0397, ctc_loss=0.07404, over 3852186.62 frames. ], batch size: 45, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 14:34:38,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=248288.0, ans=0.125 +2024-08-29 14:34:42,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=248341.33333333334, ans=0.125 +2024-08-29 14:34:43,213 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.98 vs. limit=22.5 +2024-08-29 14:35:02,762 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.489e+02 1.824e+02 2.243e+02 3.708e+02, threshold=3.648e+02, percent-clipped=0.0 +2024-08-29 14:35:10,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=248501.33333333334, ans=0.125 +2024-08-29 14:35:16,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=248554.66666666666, ans=0.125 +2024-08-29 14:35:17,401 INFO [train.py:1114] (0/4) Epoch 19, batch 1800, loss[loss=0.1805, simple_loss=0.2632, pruned_loss=0.03574, ctc_loss=0.06581, over 19604.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2594, pruned_loss=0.03943, ctc_loss=0.07384, over 3854361.58 frames. ], batch size: 55, lr: 7.84e-03, grad_scale: 16.0 +2024-08-29 14:35:19,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=248554.66666666666, ans=0.125 +2024-08-29 14:35:33,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248608.0, ans=0.1 +2024-08-29 14:35:34,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=248661.33333333334, ans=0.04949747468305833 +2024-08-29 14:35:53,117 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.44 vs. limit=15.0 +2024-08-29 14:36:01,492 INFO [train.py:1114] (0/4) Epoch 19, batch 1850, loss[loss=0.1985, simple_loss=0.2757, pruned_loss=0.04404, ctc_loss=0.08284, over 19595.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2596, pruned_loss=0.03952, ctc_loss=0.07391, over 3857567.59 frames. ], batch size: 57, lr: 7.83e-03, grad_scale: 16.0 +2024-08-29 14:37:39,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248821.33333333334, ans=0.1 +2024-08-29 14:40:01,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=248981.33333333334, ans=0.125 +2024-08-29 14:40:02,621 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.549e+02 1.911e+02 2.362e+02 1.156e+03, threshold=3.822e+02, percent-clipped=7.0 +2024-08-29 14:40:13,545 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.37 vs. limit=15.0 +2024-08-29 14:40:18,263 INFO [train.py:1114] (0/4) Epoch 19, batch 1900, loss[loss=0.1935, simple_loss=0.2812, pruned_loss=0.03852, ctc_loss=0.07163, over 19668.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2603, pruned_loss=0.03962, ctc_loss=0.07404, over 3862216.50 frames. ], batch size: 59, lr: 7.83e-03, grad_scale: 16.0 +2024-08-29 14:40:24,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249088.0, ans=0.1 +2024-08-29 14:40:29,890 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:40:44,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=249248.0, ans=0.0 +2024-08-29 14:40:48,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=249248.0, ans=0.0 +2024-08-29 14:40:50,110 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.67 vs. limit=15.0 +2024-08-29 14:40:55,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249301.33333333334, ans=0.1 +2024-08-29 14:40:58,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=249301.33333333334, ans=0.125 +2024-08-29 14:41:02,133 INFO [train.py:1114] (0/4) Epoch 19, batch 1950, loss[loss=0.1636, simple_loss=0.2433, pruned_loss=0.03073, ctc_loss=0.05603, over 19586.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.261, pruned_loss=0.03989, ctc_loss=0.07442, over 3871303.88 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 16.0 +2024-08-29 14:41:04,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=249354.66666666666, ans=0.125 +2024-08-29 14:41:11,485 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.66 vs. limit=22.5 +2024-08-29 14:41:16,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=249408.0, ans=0.125 +2024-08-29 14:41:25,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=249461.33333333334, ans=0.0 +2024-08-29 14:41:31,730 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.363e+02 1.491e+02 1.694e+02 3.301e+02, threshold=2.983e+02, percent-clipped=0.0 +2024-08-29 14:41:46,957 INFO [train.py:1114] (0/4) Epoch 19, batch 2000, loss[loss=0.1561, simple_loss=0.2267, pruned_loss=0.03058, ctc_loss=0.06082, over 19614.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2615, pruned_loss=0.0402, ctc_loss=0.07495, over 3856103.74 frames. ], batch size: 45, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 14:42:11,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=249728.0, ans=0.125 +2024-08-29 14:42:12,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249781.33333333334, ans=0.1 +2024-08-29 14:42:13,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=249781.33333333334, ans=0.125 +2024-08-29 14:42:17,290 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:42:21,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=249834.66666666666, ans=0.0 +2024-08-29 14:42:21,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=249834.66666666666, ans=0.2 +2024-08-29 14:42:31,042 INFO [train.py:1114] (0/4) Epoch 19, batch 2050, loss[loss=0.1642, simple_loss=0.2339, pruned_loss=0.03478, ctc_loss=0.06248, over 19697.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2601, pruned_loss=0.0398, ctc_loss=0.07449, over 3852752.59 frames. ], batch size: 47, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 14:42:32,320 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.20 vs. limit=15.0 +2024-08-29 14:42:37,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=249888.0, ans=0.125 +2024-08-29 14:42:46,371 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.61 vs. limit=15.0 +2024-08-29 14:42:50,081 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.03 vs. limit=15.0 +2024-08-29 14:42:57,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=250048.0, ans=0.0 +2024-08-29 14:43:00,927 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.056e+02 1.385e+02 1.662e+02 2.291e+02 4.175e+02, threshold=3.324e+02, percent-clipped=7.0 +2024-08-29 14:44:00,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=250101.33333333334, ans=0.0 +2024-08-29 14:44:08,822 INFO [train.py:1114] (0/4) Epoch 19, batch 2100, loss[loss=0.1852, simple_loss=0.2653, pruned_loss=0.03899, ctc_loss=0.06793, over 19782.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2592, pruned_loss=0.03922, ctc_loss=0.0734, over 3859487.84 frames. ], batch size: 54, lr: 7.81e-03, grad_scale: 32.0 +2024-08-29 14:45:13,239 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=5.90 vs. limit=15.0 +2024-08-29 14:45:13,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=250208.0, ans=0.0 +2024-08-29 14:45:15,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250208.0, ans=0.1 +2024-08-29 14:45:24,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=250261.33333333334, ans=0.07 +2024-08-29 14:45:49,974 INFO [train.py:1114] (0/4) Epoch 19, batch 2150, loss[loss=0.1697, simple_loss=0.2511, pruned_loss=0.0325, ctc_loss=0.05817, over 19568.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2587, pruned_loss=0.03918, ctc_loss=0.073, over 3870372.76 frames. ], batch size: 52, lr: 7.81e-03, grad_scale: 32.0 +2024-08-29 14:45:58,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=250474.66666666666, ans=0.125 +2024-08-29 14:46:02,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=250474.66666666666, ans=0.125 +2024-08-29 14:46:08,910 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.44 vs. limit=5.0 +2024-08-29 14:46:20,185 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.504e+02 1.955e+02 2.570e+02 4.900e+02, threshold=3.910e+02, percent-clipped=8.0 +2024-08-29 14:46:25,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=250634.66666666666, ans=0.2 +2024-08-29 14:46:25,817 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.79 vs. limit=10.0 +2024-08-29 14:46:28,974 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:46:33,260 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.14 vs. limit=15.0 +2024-08-29 14:46:35,220 INFO [train.py:1114] (0/4) Epoch 19, batch 2200, loss[loss=0.1902, simple_loss=0.2686, pruned_loss=0.04098, ctc_loss=0.07442, over 19587.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2588, pruned_loss=0.03917, ctc_loss=0.07292, over 3869227.07 frames. ], batch size: 57, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 14:46:38,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=250688.0, ans=0.0 +2024-08-29 14:46:47,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=250741.33333333334, ans=0.2 +2024-08-29 14:46:58,055 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.17 vs. limit=15.0 +2024-08-29 14:47:35,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=250794.66666666666, ans=0.0 +2024-08-29 14:47:36,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=250848.0, ans=0.125 +2024-08-29 14:47:36,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=250848.0, ans=0.125 +2024-08-29 14:47:53,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=250901.33333333334, ans=0.125 +2024-08-29 14:47:53,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=250901.33333333334, ans=0.0 +2024-08-29 14:47:54,986 INFO [train.py:1114] (0/4) Epoch 19, batch 2250, loss[loss=0.1901, simple_loss=0.2713, pruned_loss=0.03899, ctc_loss=0.07742, over 19623.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2593, pruned_loss=0.03926, ctc_loss=0.07305, over 3869499.94 frames. ], batch size: 55, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 14:48:05,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=251008.0, ans=0.125 +2024-08-29 14:48:28,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=251114.66666666666, ans=0.0 +2024-08-29 14:48:29,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=251114.66666666666, ans=0.2 +2024-08-29 14:48:32,691 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.087e+02 1.465e+02 1.864e+02 2.416e+02 3.726e+02, threshold=3.728e+02, percent-clipped=0.0 +2024-08-29 14:48:36,757 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.40 vs. limit=10.0 +2024-08-29 14:48:47,065 INFO [train.py:1114] (0/4) Epoch 19, batch 2300, loss[loss=0.1685, simple_loss=0.2403, pruned_loss=0.03455, ctc_loss=0.06866, over 19501.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2584, pruned_loss=0.03918, ctc_loss=0.07312, over 3863587.28 frames. ], batch size: 49, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 14:48:55,126 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.30 vs. limit=12.0 +2024-08-29 14:48:57,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=251274.66666666666, ans=0.125 +2024-08-29 14:49:22,600 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.39 vs. limit=15.0 +2024-08-29 14:49:22,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=251434.66666666666, ans=0.125 +2024-08-29 14:49:30,459 INFO [train.py:1114] (0/4) Epoch 19, batch 2350, loss[loss=0.1954, simple_loss=0.2715, pruned_loss=0.0427, ctc_loss=0.08496, over 19649.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2585, pruned_loss=0.03938, ctc_loss=0.07349, over 3865582.44 frames. ], batch size: 63, lr: 7.79e-03, grad_scale: 32.0 +2024-08-29 14:49:30,916 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.57 vs. limit=15.0 +2024-08-29 14:49:31,048 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.96 vs. limit=12.0 +2024-08-29 14:49:34,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251488.0, ans=0.1 +2024-08-29 14:49:56,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=251648.0, ans=0.09899494936611666 +2024-08-29 14:49:59,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251648.0, ans=0.1 +2024-08-29 14:49:59,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=251648.0, ans=0.125 +2024-08-29 14:50:00,294 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.463e+02 1.784e+02 2.534e+02 4.062e+02, threshold=3.568e+02, percent-clipped=2.0 +2024-08-29 14:50:20,237 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:50:23,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=251754.66666666666, ans=0.125 +2024-08-29 14:50:24,249 INFO [train.py:1114] (0/4) Epoch 19, batch 2400, loss[loss=0.2113, simple_loss=0.2746, pruned_loss=0.05397, ctc_loss=0.1002, over 19372.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2607, pruned_loss=0.04026, ctc_loss=0.07499, over 3861154.57 frames. ], batch size: 67, lr: 7.79e-03, grad_scale: 32.0 +2024-08-29 14:50:25,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=251754.66666666666, ans=10.0 +2024-08-29 14:50:38,630 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.67 vs. limit=15.0 +2024-08-29 14:50:42,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=251861.33333333334, ans=0.125 +2024-08-29 14:50:46,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=251861.33333333334, ans=0.125 +2024-08-29 14:50:47,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=251861.33333333334, ans=0.1 +2024-08-29 14:51:26,015 INFO [train.py:1114] (0/4) Epoch 19, batch 2450, loss[loss=0.2361, simple_loss=0.2875, pruned_loss=0.06648, ctc_loss=0.1291, over 12910.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2641, pruned_loss=0.04242, ctc_loss=0.07915, over 3737846.95 frames. ], batch size: 141, lr: 7.78e-03, grad_scale: 32.0 +2024-08-29 14:51:31,156 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.22 vs. limit=15.0 +2024-08-29 14:51:35,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=252074.66666666666, ans=0.0 +2024-08-29 14:51:37,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=252074.66666666666, ans=0.025 +2024-08-29 14:51:56,009 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.227e+02 1.531e+02 1.709e+02 1.904e+02 2.805e+02, threshold=3.418e+02, percent-clipped=0.0 +2024-08-29 14:51:59,232 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.23 vs. limit=15.0 +2024-08-29 14:52:02,484 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.64 vs. limit=22.5 +2024-08-29 14:52:03,531 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-19.pt +2024-08-29 14:53:10,849 INFO [train.py:1114] (0/4) Epoch 20, batch 0, loss[loss=0.1779, simple_loss=0.2426, pruned_loss=0.04128, ctc_loss=0.0767, over 19809.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2426, pruned_loss=0.04128, ctc_loss=0.0767, over 19809.00 frames. ], batch size: 49, lr: 7.58e-03, grad_scale: 32.0 +2024-08-29 14:53:10,850 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-29 14:56:28,494 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.0206, 3.7356, 3.5276, 3.5667], device='cuda:0') +2024-08-29 14:59:30,915 INFO [train.py:1146] (0/4) Epoch 20, validation: loss=0.1707, simple_loss=0.2632, pruned_loss=0.02916, ctc_loss=0.04979, over 944034.00 frames. +2024-08-29 14:59:30,916 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 12890MB +2024-08-29 14:59:33,068 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:59:43,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252288.0, ans=0.1 +2024-08-29 14:59:50,577 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.17 vs. limit=10.0 +2024-08-29 15:00:12,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=252448.0, ans=0.05 +2024-08-29 15:00:13,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=252448.0, ans=0.0 +2024-08-29 15:00:18,071 INFO [train.py:1114] (0/4) Epoch 20, batch 50, loss[loss=0.1626, simple_loss=0.2378, pruned_loss=0.03168, ctc_loss=0.05993, over 19720.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2601, pruned_loss=0.03979, ctc_loss=0.07431, over 843606.59 frames. ], batch size: 47, lr: 7.58e-03, grad_scale: 32.0 +2024-08-29 15:00:40,741 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.25 vs. limit=15.0 +2024-08-29 15:01:11,672 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.167e+02 1.410e+02 1.638e+02 1.971e+02 2.993e+02, threshold=3.276e+02, percent-clipped=0.0 +2024-08-29 15:01:16,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=252768.0, ans=0.0 +2024-08-29 15:01:17,296 INFO [train.py:1114] (0/4) Epoch 20, batch 100, loss[loss=0.1638, simple_loss=0.24, pruned_loss=0.03175, ctc_loss=0.06035, over 19733.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2618, pruned_loss=0.0404, ctc_loss=0.07511, over 1498532.54 frames. ], batch size: 51, lr: 7.57e-03, grad_scale: 32.0 +2024-08-29 15:01:33,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=252821.33333333334, ans=0.125 +2024-08-29 15:01:38,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=252874.66666666666, ans=0.2 +2024-08-29 15:01:48,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=252928.0, ans=0.0 +2024-08-29 15:01:52,566 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:01:53,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252928.0, ans=0.1 +2024-08-29 15:01:56,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=252981.33333333334, ans=0.04949747468305833 +2024-08-29 15:02:05,299 INFO [train.py:1114] (0/4) Epoch 20, batch 150, loss[loss=0.1758, simple_loss=0.2414, pruned_loss=0.04052, ctc_loss=0.07289, over 19685.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2592, pruned_loss=0.03963, ctc_loss=0.07383, over 2026778.29 frames. ], batch size: 47, lr: 7.57e-03, grad_scale: 32.0 +2024-08-29 15:02:05,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=253034.66666666666, ans=0.0 +2024-08-29 15:02:22,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=253088.0, ans=0.0 +2024-08-29 15:02:25,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=253141.33333333334, ans=0.125 +2024-08-29 15:02:29,119 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.94 vs. limit=12.0 +2024-08-29 15:02:42,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=253194.66666666666, ans=0.0 +2024-08-29 15:02:47,830 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.088e+02 1.385e+02 1.535e+02 1.856e+02 3.405e+02, threshold=3.069e+02, percent-clipped=1.0 +2024-08-29 15:03:24,275 INFO [train.py:1114] (0/4) Epoch 20, batch 200, loss[loss=0.1955, simple_loss=0.2748, pruned_loss=0.04239, ctc_loss=0.07838, over 18195.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2579, pruned_loss=0.03931, ctc_loss=0.07324, over 2434933.67 frames. ], batch size: 85, lr: 7.56e-03, grad_scale: 32.0 +2024-08-29 15:03:31,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=253301.33333333334, ans=0.125 +2024-08-29 15:03:44,750 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:03:53,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=253461.33333333334, ans=0.0 +2024-08-29 15:03:55,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=253461.33333333334, ans=0.1 +2024-08-29 15:03:58,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=253461.33333333334, ans=0.0 +2024-08-29 15:04:00,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=253461.33333333334, ans=0.125 +2024-08-29 15:04:02,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253514.66666666666, ans=0.1 +2024-08-29 15:04:14,359 INFO [train.py:1114] (0/4) Epoch 20, batch 250, loss[loss=0.1873, simple_loss=0.2692, pruned_loss=0.03829, ctc_loss=0.07194, over 19420.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2579, pruned_loss=0.03898, ctc_loss=0.07255, over 2755716.60 frames. ], batch size: 67, lr: 7.56e-03, grad_scale: 32.0 +2024-08-29 15:04:14,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=253568.0, ans=0.125 +2024-08-29 15:04:24,201 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.64 vs. limit=22.5 +2024-08-29 15:04:34,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=253674.66666666666, ans=0.125 +2024-08-29 15:05:14,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253674.66666666666, ans=0.1 +2024-08-29 15:05:19,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=253728.0, ans=0.2 +2024-08-29 15:06:16,395 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.202e+02 1.436e+02 1.637e+02 2.276e+02 3.998e+02, threshold=3.274e+02, percent-clipped=8.0 +2024-08-29 15:06:18,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=253781.33333333334, ans=0.1 +2024-08-29 15:06:21,872 INFO [train.py:1114] (0/4) Epoch 20, batch 300, loss[loss=0.2097, simple_loss=0.2837, pruned_loss=0.04939, ctc_loss=0.09219, over 19549.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2578, pruned_loss=0.03889, ctc_loss=0.07232, over 3001088.90 frames. ], batch size: 61, lr: 7.56e-03, grad_scale: 32.0 +2024-08-29 15:06:55,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=253994.66666666666, ans=0.125 +2024-08-29 15:07:08,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=254048.0, ans=0.125 +2024-08-29 15:07:35,035 INFO [train.py:1114] (0/4) Epoch 20, batch 350, loss[loss=0.1857, simple_loss=0.2486, pruned_loss=0.04537, ctc_loss=0.08024, over 19726.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2585, pruned_loss=0.0391, ctc_loss=0.07278, over 3190959.39 frames. ], batch size: 48, lr: 7.55e-03, grad_scale: 32.0 +2024-08-29 15:07:36,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254101.33333333334, ans=0.1 +2024-08-29 15:07:49,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=254154.66666666666, ans=0.125 +2024-08-29 15:07:53,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=254208.0, ans=0.05 +2024-08-29 15:08:30,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=254208.0, ans=0.125 +2024-08-29 15:08:33,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=254208.0, ans=0.125 +2024-08-29 15:09:24,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=254261.33333333334, ans=0.125 +2024-08-29 15:09:32,002 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.25 vs. limit=10.0 +2024-08-29 15:09:32,440 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.477e+02 1.781e+02 2.345e+02 4.390e+02, threshold=3.562e+02, percent-clipped=4.0 +2024-08-29 15:09:40,128 INFO [train.py:1114] (0/4) Epoch 20, batch 400, loss[loss=0.1838, simple_loss=0.2581, pruned_loss=0.04033, ctc_loss=0.07195, over 19510.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2581, pruned_loss=0.0389, ctc_loss=0.07227, over 3342257.31 frames. ], batch size: 54, lr: 7.55e-03, grad_scale: 32.0 +2024-08-29 15:09:48,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=254368.0, ans=0.125 +2024-08-29 15:10:09,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=254474.66666666666, ans=0.125 +2024-08-29 15:10:26,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=254581.33333333334, ans=0.0 +2024-08-29 15:10:29,804 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.18 vs. limit=10.0 +2024-08-29 15:10:31,166 INFO [train.py:1114] (0/4) Epoch 20, batch 450, loss[loss=0.1746, simple_loss=0.2648, pruned_loss=0.03082, ctc_loss=0.05707, over 19600.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2581, pruned_loss=0.03881, ctc_loss=0.07209, over 3451494.20 frames. ], batch size: 55, lr: 7.55e-03, grad_scale: 16.0 +2024-08-29 15:10:39,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=254688.0, ans=0.125 +2024-08-29 15:10:42,138 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.94 vs. limit=5.0 +2024-08-29 15:10:52,884 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.36 vs. limit=22.5 +2024-08-29 15:11:12,409 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.415e+02 1.633e+02 1.920e+02 3.508e+02, threshold=3.267e+02, percent-clipped=0.0 +2024-08-29 15:11:16,892 INFO [train.py:1114] (0/4) Epoch 20, batch 500, loss[loss=0.1883, simple_loss=0.2629, pruned_loss=0.04103, ctc_loss=0.07909, over 19680.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2574, pruned_loss=0.03853, ctc_loss=0.07173, over 3545952.35 frames. ], batch size: 63, lr: 7.54e-03, grad_scale: 16.0 +2024-08-29 15:11:26,750 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.49 vs. limit=15.0 +2024-08-29 15:11:41,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255008.0, ans=0.1 +2024-08-29 15:11:48,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255061.33333333334, ans=0.1 +2024-08-29 15:12:06,361 INFO [train.py:1114] (0/4) Epoch 20, batch 550, loss[loss=0.1972, simple_loss=0.2736, pruned_loss=0.04373, ctc_loss=0.08339, over 19253.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2576, pruned_loss=0.03854, ctc_loss=0.07186, over 3608173.82 frames. ], batch size: 71, lr: 7.54e-03, grad_scale: 16.0 +2024-08-29 15:12:30,114 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.90 vs. limit=15.0 +2024-08-29 15:12:33,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255274.66666666666, ans=0.1 +2024-08-29 15:13:44,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=255328.0, ans=0.125 +2024-08-29 15:13:45,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=255328.0, ans=0.035 +2024-08-29 15:14:00,952 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.409e+02 1.685e+02 2.053e+02 3.324e+02, threshold=3.369e+02, percent-clipped=1.0 +2024-08-29 15:14:17,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=255434.66666666666, ans=0.125 +2024-08-29 15:14:18,263 INFO [train.py:1114] (0/4) Epoch 20, batch 600, loss[loss=0.1913, simple_loss=0.2732, pruned_loss=0.04039, ctc_loss=0.07158, over 19331.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2579, pruned_loss=0.03845, ctc_loss=0.07189, over 3665746.35 frames. ], batch size: 67, lr: 7.53e-03, grad_scale: 16.0 +2024-08-29 15:14:41,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=255541.33333333334, ans=0.0 +2024-08-29 15:14:49,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255594.66666666666, ans=0.1 +2024-08-29 15:14:49,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=255594.66666666666, ans=0.0 +2024-08-29 15:14:51,182 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.07 vs. limit=15.0 +2024-08-29 15:15:06,092 INFO [train.py:1114] (0/4) Epoch 20, batch 650, loss[loss=0.1689, simple_loss=0.2534, pruned_loss=0.03025, ctc_loss=0.05959, over 19755.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2576, pruned_loss=0.03842, ctc_loss=0.07203, over 3716724.37 frames. ], batch size: 54, lr: 7.53e-03, grad_scale: 16.0 +2024-08-29 15:15:14,892 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=15.0 +2024-08-29 15:15:21,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=255754.66666666666, ans=0.125 +2024-08-29 15:15:47,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=255914.66666666666, ans=0.0 +2024-08-29 15:15:49,554 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.451e+02 1.804e+02 2.620e+02 6.000e+02, threshold=3.609e+02, percent-clipped=12.0 +2024-08-29 15:15:54,172 INFO [train.py:1114] (0/4) Epoch 20, batch 700, loss[loss=0.182, simple_loss=0.2596, pruned_loss=0.03771, ctc_loss=0.07227, over 19713.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2585, pruned_loss=0.03868, ctc_loss=0.07248, over 3748489.04 frames. ], batch size: 51, lr: 7.53e-03, grad_scale: 16.0 +2024-08-29 15:15:58,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=255968.0, ans=0.2 +2024-08-29 15:15:59,059 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/checkpoint-48000.pt +2024-08-29 15:16:07,717 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:16:20,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=256074.66666666666, ans=0.125 +2024-08-29 15:16:38,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=256181.33333333334, ans=0.025 +2024-08-29 15:16:39,176 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.75 vs. limit=15.0 +2024-08-29 15:16:40,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=256181.33333333334, ans=0.0 +2024-08-29 15:16:44,247 INFO [train.py:1114] (0/4) Epoch 20, batch 750, loss[loss=0.1743, simple_loss=0.2579, pruned_loss=0.03276, ctc_loss=0.06278, over 19516.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.258, pruned_loss=0.03839, ctc_loss=0.0718, over 3775536.21 frames. ], batch size: 54, lr: 7.52e-03, grad_scale: 16.0 +2024-08-29 15:16:45,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=256234.66666666666, ans=0.125 +2024-08-29 15:17:00,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=256288.0, ans=0.1 +2024-08-29 15:17:11,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=256341.33333333334, ans=0.0 +2024-08-29 15:17:20,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=256394.66666666666, ans=22.5 +2024-08-29 15:17:27,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=256448.0, ans=0.125 +2024-08-29 15:17:29,502 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.446e+02 1.840e+02 2.370e+02 3.601e+02, threshold=3.680e+02, percent-clipped=0.0 +2024-08-29 15:17:34,172 INFO [train.py:1114] (0/4) Epoch 20, batch 800, loss[loss=0.1896, simple_loss=0.2548, pruned_loss=0.04495, ctc_loss=0.08615, over 19816.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2581, pruned_loss=0.03846, ctc_loss=0.07202, over 3797324.35 frames. ], batch size: 49, lr: 7.52e-03, grad_scale: 32.0 +2024-08-29 15:17:55,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=256608.0, ans=0.125 +2024-08-29 15:17:57,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=256608.0, ans=0.125 +2024-08-29 15:18:03,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=256661.33333333334, ans=0.0 +2024-08-29 15:18:17,584 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.42 vs. limit=6.0 +2024-08-29 15:18:19,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=256768.0, ans=0.2 +2024-08-29 15:18:20,533 INFO [train.py:1114] (0/4) Epoch 20, batch 850, loss[loss=0.1876, simple_loss=0.2651, pruned_loss=0.04032, ctc_loss=0.07333, over 19647.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2577, pruned_loss=0.03842, ctc_loss=0.07181, over 3816331.02 frames. ], batch size: 59, lr: 7.51e-03, grad_scale: 32.0 +2024-08-29 15:18:23,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=256768.0, ans=15.0 +2024-08-29 15:18:36,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=256821.33333333334, ans=0.2 +2024-08-29 15:18:58,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=256928.0, ans=0.125 +2024-08-29 15:19:04,730 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.094e+02 1.415e+02 1.606e+02 2.010e+02 3.804e+02, threshold=3.213e+02, percent-clipped=1.0 +2024-08-29 15:19:09,480 INFO [train.py:1114] (0/4) Epoch 20, batch 900, loss[loss=0.1587, simple_loss=0.2277, pruned_loss=0.03263, ctc_loss=0.06116, over 19410.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.258, pruned_loss=0.03888, ctc_loss=0.0726, over 3819704.95 frames. ], batch size: 48, lr: 7.51e-03, grad_scale: 32.0 +2024-08-29 15:19:10,801 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.02 vs. limit=6.0 +2024-08-29 15:19:12,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=257034.66666666666, ans=0.0 +2024-08-29 15:19:29,642 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:19:59,864 INFO [train.py:1114] (0/4) Epoch 20, batch 950, loss[loss=0.1673, simple_loss=0.2359, pruned_loss=0.03547, ctc_loss=0.06908, over 19494.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2582, pruned_loss=0.03898, ctc_loss=0.0728, over 3820524.61 frames. ], batch size: 49, lr: 7.51e-03, grad_scale: 32.0 +2024-08-29 15:20:03,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=257301.33333333334, ans=0.125 +2024-08-29 15:20:26,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn2.whiten.whitening_limit, batch_count=257408.0, ans=22.5 +2024-08-29 15:20:39,723 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.21 vs. limit=12.0 +2024-08-29 15:20:42,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=257514.66666666666, ans=0.125 +2024-08-29 15:20:43,844 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.399e+02 1.695e+02 2.094e+02 3.389e+02, threshold=3.390e+02, percent-clipped=1.0 +2024-08-29 15:20:48,480 INFO [train.py:1114] (0/4) Epoch 20, batch 1000, loss[loss=0.1614, simple_loss=0.239, pruned_loss=0.03074, ctc_loss=0.05609, over 19855.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2589, pruned_loss=0.03914, ctc_loss=0.07331, over 3817259.76 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 32.0 +2024-08-29 15:20:53,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=257568.0, ans=0.0 +2024-08-29 15:21:02,077 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.13 vs. limit=6.0 +2024-08-29 15:21:08,638 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.56 vs. limit=10.0 +2024-08-29 15:21:21,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=257728.0, ans=0.025 +2024-08-29 15:21:23,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=257728.0, ans=0.125 +2024-08-29 15:21:24,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=257728.0, ans=0.2 +2024-08-29 15:21:36,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=257834.66666666666, ans=0.125 +2024-08-29 15:21:36,907 INFO [train.py:1114] (0/4) Epoch 20, batch 1050, loss[loss=0.1712, simple_loss=0.2537, pruned_loss=0.03211, ctc_loss=0.06103, over 19842.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2582, pruned_loss=0.03886, ctc_loss=0.07272, over 3823486.45 frames. ], batch size: 57, lr: 7.50e-03, grad_scale: 32.0 +2024-08-29 15:21:44,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=257834.66666666666, ans=0.2 +2024-08-29 15:21:45,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=257888.0, ans=0.0 +2024-08-29 15:21:47,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=257888.0, ans=0.04949747468305833 +2024-08-29 15:21:51,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=257888.0, ans=0.125 +2024-08-29 15:22:04,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=257941.33333333334, ans=0.0 +2024-08-29 15:22:20,122 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.103e+02 1.405e+02 1.673e+02 2.019e+02 3.302e+02, threshold=3.347e+02, percent-clipped=0.0 +2024-08-29 15:22:24,706 INFO [train.py:1114] (0/4) Epoch 20, batch 1100, loss[loss=0.1839, simple_loss=0.2553, pruned_loss=0.04126, ctc_loss=0.07488, over 19588.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2582, pruned_loss=0.03868, ctc_loss=0.07232, over 3831554.41 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 32.0 +2024-08-29 15:22:57,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=258261.33333333334, ans=0.125 +2024-08-29 15:23:14,525 INFO [train.py:1114] (0/4) Epoch 20, batch 1150, loss[loss=0.1818, simple_loss=0.2613, pruned_loss=0.03655, ctc_loss=0.07319, over 19585.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2579, pruned_loss=0.03856, ctc_loss=0.07215, over 3828701.45 frames. ], batch size: 52, lr: 7.49e-03, grad_scale: 32.0 +2024-08-29 15:23:18,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=258368.0, ans=0.025 +2024-08-29 15:23:20,727 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.46 vs. limit=15.0 +2024-08-29 15:23:27,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=258421.33333333334, ans=0.0 +2024-08-29 15:23:29,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=258421.33333333334, ans=0.2 +2024-08-29 15:23:35,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=258474.66666666666, ans=0.0 +2024-08-29 15:23:58,286 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.386e+02 1.703e+02 2.133e+02 3.069e+02, threshold=3.407e+02, percent-clipped=0.0 +2024-08-29 15:24:02,914 INFO [train.py:1114] (0/4) Epoch 20, batch 1200, loss[loss=0.2053, simple_loss=0.2835, pruned_loss=0.04697, ctc_loss=0.08297, over 19840.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2588, pruned_loss=0.03889, ctc_loss=0.07271, over 3825325.48 frames. ], batch size: 57, lr: 7.49e-03, grad_scale: 32.0 +2024-08-29 15:24:18,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=258688.0, ans=10.0 +2024-08-29 15:24:49,723 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:24:50,406 INFO [train.py:1114] (0/4) Epoch 20, batch 1250, loss[loss=0.1994, simple_loss=0.2742, pruned_loss=0.04458, ctc_loss=0.08859, over 19541.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2589, pruned_loss=0.0389, ctc_loss=0.07252, over 3843320.66 frames. ], batch size: 61, lr: 7.48e-03, grad_scale: 32.0 +2024-08-29 15:24:50,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=258901.33333333334, ans=0.125 +2024-08-29 15:24:58,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=258901.33333333334, ans=0.2 +2024-08-29 15:25:15,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=259008.0, ans=0.125 +2024-08-29 15:25:20,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=259061.33333333334, ans=0.125 +2024-08-29 15:25:25,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=259061.33333333334, ans=0.125 +2024-08-29 15:25:35,741 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.412e+02 1.655e+02 2.039e+02 3.415e+02, threshold=3.310e+02, percent-clipped=2.0 +2024-08-29 15:25:40,335 INFO [train.py:1114] (0/4) Epoch 20, batch 1300, loss[loss=0.1937, simple_loss=0.2707, pruned_loss=0.04221, ctc_loss=0.08047, over 18810.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2584, pruned_loss=0.03879, ctc_loss=0.07232, over 3844907.25 frames. ], batch size: 76, lr: 7.48e-03, grad_scale: 32.0 +2024-08-29 15:25:42,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=259168.0, ans=0.125 +2024-08-29 15:25:44,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=259168.0, ans=0.125 +2024-08-29 15:25:51,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=259221.33333333334, ans=0.0 +2024-08-29 15:26:01,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=259274.66666666666, ans=0.025 +2024-08-29 15:26:04,017 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.98 vs. limit=15.0 +2024-08-29 15:26:20,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=259381.33333333334, ans=0.0 +2024-08-29 15:26:26,568 INFO [train.py:1114] (0/4) Epoch 20, batch 1350, loss[loss=0.171, simple_loss=0.2511, pruned_loss=0.0327, ctc_loss=0.06385, over 19761.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2578, pruned_loss=0.03844, ctc_loss=0.07171, over 3855646.42 frames. ], batch size: 54, lr: 7.48e-03, grad_scale: 32.0 +2024-08-29 15:27:21,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=259488.0, ans=0.0 +2024-08-29 15:27:22,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=259488.0, ans=0.125 +2024-08-29 15:27:24,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=259541.33333333334, ans=0.125 +2024-08-29 15:27:31,851 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.15 vs. limit=15.0 +2024-08-29 15:27:35,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=259594.66666666666, ans=0.2 +2024-08-29 15:27:36,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=259594.66666666666, ans=0.125 +2024-08-29 15:27:42,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=259648.0, ans=0.125 +2024-08-29 15:27:47,063 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.431e+02 1.643e+02 2.121e+02 3.858e+02, threshold=3.286e+02, percent-clipped=2.0 +2024-08-29 15:27:51,757 INFO [train.py:1114] (0/4) Epoch 20, batch 1400, loss[loss=0.151, simple_loss=0.2236, pruned_loss=0.02771, ctc_loss=0.05742, over 19682.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2578, pruned_loss=0.03851, ctc_loss=0.07164, over 3862772.72 frames. ], batch size: 46, lr: 7.47e-03, grad_scale: 32.0 +2024-08-29 15:27:54,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259701.33333333334, ans=0.1 +2024-08-29 15:27:54,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=259701.33333333334, ans=0.025 +2024-08-29 15:28:14,652 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.88 vs. limit=10.0 +2024-08-29 15:28:18,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259808.0, ans=0.1 +2024-08-29 15:30:05,109 INFO [train.py:1114] (0/4) Epoch 20, batch 1450, loss[loss=0.1881, simple_loss=0.2646, pruned_loss=0.04006, ctc_loss=0.07851, over 19667.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2583, pruned_loss=0.03874, ctc_loss=0.07236, over 3861785.17 frames. ], batch size: 63, lr: 7.47e-03, grad_scale: 32.0 +2024-08-29 15:30:10,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=259968.0, ans=0.2 +2024-08-29 15:30:11,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=259968.0, ans=0.07 +2024-08-29 15:30:11,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=259968.0, ans=0.125 +2024-08-29 15:30:16,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=260021.33333333334, ans=0.125 +2024-08-29 15:30:23,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=260021.33333333334, ans=0.0 +2024-08-29 15:30:24,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=260021.33333333334, ans=0.0 +2024-08-29 15:30:28,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=260074.66666666666, ans=0.125 +2024-08-29 15:30:34,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=260128.0, ans=0.1 +2024-08-29 15:30:41,855 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.74 vs. limit=15.0 +2024-08-29 15:30:47,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=260181.33333333334, ans=0.0 +2024-08-29 15:30:48,908 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.404e+02 1.559e+02 1.995e+02 3.603e+02, threshold=3.118e+02, percent-clipped=1.0 +2024-08-29 15:30:53,316 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.57 vs. limit=15.0 +2024-08-29 15:30:53,757 INFO [train.py:1114] (0/4) Epoch 20, batch 1500, loss[loss=0.2007, simple_loss=0.2754, pruned_loss=0.04547, ctc_loss=0.0877, over 19584.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2587, pruned_loss=0.0388, ctc_loss=0.07261, over 3861372.31 frames. ], batch size: 57, lr: 7.47e-03, grad_scale: 32.0 +2024-08-29 15:30:54,463 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.29 vs. limit=15.0 +2024-08-29 15:30:59,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=260234.66666666666, ans=0.125 +2024-08-29 15:31:08,793 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.10 vs. limit=15.0 +2024-08-29 15:31:09,610 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.07 vs. limit=15.0 +2024-08-29 15:31:14,355 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.73 vs. limit=6.0 +2024-08-29 15:31:19,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=260341.33333333334, ans=0.125 +2024-08-29 15:31:24,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.65 vs. limit=15.0 +2024-08-29 15:31:30,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=260394.66666666666, ans=0.125 +2024-08-29 15:31:40,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260448.0, ans=0.1 +2024-08-29 15:31:42,534 INFO [train.py:1114] (0/4) Epoch 20, batch 1550, loss[loss=0.2033, simple_loss=0.281, pruned_loss=0.04687, ctc_loss=0.07998, over 19610.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2586, pruned_loss=0.03894, ctc_loss=0.07284, over 3846392.59 frames. ], batch size: 60, lr: 7.46e-03, grad_scale: 16.0 +2024-08-29 15:31:43,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=260501.33333333334, ans=0.2 +2024-08-29 15:31:50,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=260501.33333333334, ans=0.125 +2024-08-29 15:31:56,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=260554.66666666666, ans=0.125 +2024-08-29 15:32:10,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=260661.33333333334, ans=0.125 +2024-08-29 15:32:18,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=260661.33333333334, ans=0.125 +2024-08-29 15:32:26,821 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.439e+02 1.834e+02 2.160e+02 3.604e+02, threshold=3.667e+02, percent-clipped=4.0 +2024-08-29 15:32:30,539 INFO [train.py:1114] (0/4) Epoch 20, batch 1600, loss[loss=0.1904, simple_loss=0.2717, pruned_loss=0.039, ctc_loss=0.07749, over 19843.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2583, pruned_loss=0.03886, ctc_loss=0.07261, over 3834899.02 frames. ], batch size: 57, lr: 7.46e-03, grad_scale: 32.0 +2024-08-29 15:32:31,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=260768.0, ans=0.0 +2024-08-29 15:32:36,589 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.10 vs. limit=22.5 +2024-08-29 15:32:44,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=260821.33333333334, ans=0.125 +2024-08-29 15:33:08,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=260928.0, ans=0.09899494936611666 +2024-08-29 15:33:20,593 INFO [train.py:1114] (0/4) Epoch 20, batch 1650, loss[loss=0.1917, simple_loss=0.2697, pruned_loss=0.04109, ctc_loss=0.07882, over 19656.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2585, pruned_loss=0.03897, ctc_loss=0.07281, over 3830835.84 frames. ], batch size: 59, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 15:33:35,867 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.04 vs. limit=15.0 +2024-08-29 15:33:49,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=261194.66666666666, ans=0.125 +2024-08-29 15:33:50,681 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.08 vs. limit=22.5 +2024-08-29 15:33:56,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=261194.66666666666, ans=0.125 +2024-08-29 15:33:58,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=261248.0, ans=0.125 +2024-08-29 15:33:58,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=261248.0, ans=0.05 +2024-08-29 15:34:03,133 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.083e+02 1.529e+02 1.762e+02 2.426e+02 4.170e+02, threshold=3.524e+02, percent-clipped=3.0 +2024-08-29 15:34:06,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=261301.33333333334, ans=0.09899494936611666 +2024-08-29 15:34:06,781 INFO [train.py:1114] (0/4) Epoch 20, batch 1700, loss[loss=0.1527, simple_loss=0.2245, pruned_loss=0.0288, ctc_loss=0.05812, over 19688.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2582, pruned_loss=0.03854, ctc_loss=0.072, over 3845161.14 frames. ], batch size: 46, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 15:34:10,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=261301.33333333334, ans=0.2 +2024-08-29 15:34:12,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=261301.33333333334, ans=0.125 +2024-08-29 15:34:12,809 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.11 vs. limit=22.5 +2024-08-29 15:34:15,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=261301.33333333334, ans=0.2 +2024-08-29 15:34:23,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=261354.66666666666, ans=0.5 +2024-08-29 15:34:30,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=261408.0, ans=0.125 +2024-08-29 15:34:33,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=261408.0, ans=0.2 +2024-08-29 15:34:37,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261461.33333333334, ans=0.1 +2024-08-29 15:34:39,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=261461.33333333334, ans=0.125 +2024-08-29 15:34:53,279 INFO [train.py:1114] (0/4) Epoch 20, batch 1750, loss[loss=0.1526, simple_loss=0.2289, pruned_loss=0.02795, ctc_loss=0.05082, over 19683.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2576, pruned_loss=0.03853, ctc_loss=0.072, over 3849342.05 frames. ], batch size: 45, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 15:34:53,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=261568.0, ans=0.125 +2024-08-29 15:34:56,354 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.52 vs. limit=15.0 +2024-08-29 15:34:58,796 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:35:07,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=261621.33333333334, ans=0.125 +2024-08-29 15:35:23,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=261728.0, ans=0.125 +2024-08-29 15:35:26,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=261728.0, ans=0.0 +2024-08-29 15:35:26,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=261728.0, ans=0.125 +2024-08-29 15:35:33,447 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.094e+02 1.486e+02 1.910e+02 2.389e+02 3.898e+02, threshold=3.819e+02, percent-clipped=3.0 +2024-08-29 15:35:36,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=261834.66666666666, ans=0.0 +2024-08-29 15:35:37,067 INFO [train.py:1114] (0/4) Epoch 20, batch 1800, loss[loss=0.182, simple_loss=0.2632, pruned_loss=0.03641, ctc_loss=0.07023, over 19608.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2578, pruned_loss=0.03853, ctc_loss=0.07212, over 3852540.42 frames. ], batch size: 55, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 15:35:37,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=261834.66666666666, ans=0.015 +2024-08-29 15:35:41,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=261834.66666666666, ans=0.05 +2024-08-29 15:35:52,044 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=11.53 vs. limit=15.0 +2024-08-29 15:35:53,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=261888.0, ans=0.1 +2024-08-29 15:35:55,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=261941.33333333334, ans=0.125 +2024-08-29 15:35:59,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=261941.33333333334, ans=0.0 +2024-08-29 15:36:02,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=261941.33333333334, ans=0.1 +2024-08-29 15:36:03,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261994.66666666666, ans=0.1 +2024-08-29 15:36:21,934 INFO [train.py:1114] (0/4) Epoch 20, batch 1850, loss[loss=0.187, simple_loss=0.2704, pruned_loss=0.03812, ctc_loss=0.06853, over 19577.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2579, pruned_loss=0.03862, ctc_loss=0.07213, over 3856244.40 frames. ], batch size: 57, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 15:36:28,438 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.92 vs. limit=15.0 +2024-08-29 15:36:32,717 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.37 vs. limit=15.0 +2024-08-29 15:36:40,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=262208.0, ans=0.125 +2024-08-29 15:36:49,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262261.3333333333, ans=0.1 +2024-08-29 15:36:57,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262314.6666666667, ans=0.1 +2024-08-29 15:36:58,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=262314.6666666667, ans=10.0 +2024-08-29 15:37:02,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262314.6666666667, ans=0.1 +2024-08-29 15:37:03,781 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.463e+02 1.737e+02 2.223e+02 4.343e+02, threshold=3.475e+02, percent-clipped=3.0 +2024-08-29 15:37:03,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=262314.6666666667, ans=0.2 +2024-08-29 15:37:07,295 INFO [train.py:1114] (0/4) Epoch 20, batch 1900, loss[loss=0.1902, simple_loss=0.2675, pruned_loss=0.04049, ctc_loss=0.07985, over 19631.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2582, pruned_loss=0.03863, ctc_loss=0.07207, over 3861887.47 frames. ], batch size: 59, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 15:37:08,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=262368.0, ans=0.07 +2024-08-29 15:38:39,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=262474.6666666667, ans=0.125 +2024-08-29 15:38:54,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=262581.3333333333, ans=0.1 +2024-08-29 15:38:55,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=262581.3333333333, ans=0.125 +2024-08-29 15:39:01,402 INFO [train.py:1114] (0/4) Epoch 20, batch 1950, loss[loss=0.1724, simple_loss=0.2494, pruned_loss=0.03444, ctc_loss=0.06624, over 19595.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2588, pruned_loss=0.03856, ctc_loss=0.07189, over 3870448.19 frames. ], batch size: 52, lr: 7.43e-03, grad_scale: 32.0 +2024-08-29 15:39:21,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=262741.3333333333, ans=0.0 +2024-08-29 15:39:31,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=262794.6666666667, ans=0.0 +2024-08-29 15:39:42,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262848.0, ans=0.1 +2024-08-29 15:39:43,480 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.086e+02 1.359e+02 1.503e+02 2.197e+02 3.515e+02, threshold=3.007e+02, percent-clipped=1.0 +2024-08-29 15:39:45,659 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.69 vs. limit=10.0 +2024-08-29 15:39:47,125 INFO [train.py:1114] (0/4) Epoch 20, batch 2000, loss[loss=0.1677, simple_loss=0.2397, pruned_loss=0.03469, ctc_loss=0.0657, over 19654.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2594, pruned_loss=0.03883, ctc_loss=0.07265, over 3855021.75 frames. ], batch size: 45, lr: 7.43e-03, grad_scale: 32.0 +2024-08-29 15:40:05,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=263008.0, ans=0.0 +2024-08-29 15:40:05,838 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:40:07,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=263008.0, ans=0.0 +2024-08-29 15:40:21,675 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:40:31,088 INFO [train.py:1114] (0/4) Epoch 20, batch 2050, loss[loss=0.1578, simple_loss=0.2308, pruned_loss=0.03099, ctc_loss=0.0572, over 19708.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2583, pruned_loss=0.03858, ctc_loss=0.07219, over 3851251.97 frames. ], batch size: 47, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 15:40:33,446 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.56 vs. limit=10.0 +2024-08-29 15:40:44,569 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.11 vs. limit=12.0 +2024-08-29 15:40:46,447 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.23 vs. limit=6.0 +2024-08-29 15:40:49,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263274.6666666667, ans=0.1 +2024-08-29 15:40:50,823 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.60 vs. limit=10.0 +2024-08-29 15:40:53,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=263274.6666666667, ans=0.125 +2024-08-29 15:41:03,766 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.25 vs. limit=15.0 +2024-08-29 15:41:10,998 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.438e+02 1.706e+02 2.328e+02 5.097e+02, threshold=3.413e+02, percent-clipped=11.0 +2024-08-29 15:41:14,531 INFO [train.py:1114] (0/4) Epoch 20, batch 2100, loss[loss=0.1686, simple_loss=0.246, pruned_loss=0.03338, ctc_loss=0.06114, over 19771.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2575, pruned_loss=0.03813, ctc_loss=0.07136, over 3858598.14 frames. ], batch size: 54, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 15:41:44,928 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:41:46,121 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.42 vs. limit=6.0 +2024-08-29 15:41:49,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=263648.0, ans=0.125 +2024-08-29 15:41:55,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=263648.0, ans=0.0 +2024-08-29 15:41:57,778 INFO [train.py:1114] (0/4) Epoch 20, batch 2150, loss[loss=0.1773, simple_loss=0.2577, pruned_loss=0.03516, ctc_loss=0.06658, over 19570.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2569, pruned_loss=0.03796, ctc_loss=0.07102, over 3868795.35 frames. ], batch size: 52, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 15:42:11,238 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.48 vs. limit=15.0 +2024-08-29 15:42:14,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=263808.0, ans=0.125 +2024-08-29 15:42:22,576 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.34 vs. limit=15.0 +2024-08-29 15:42:27,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=263861.3333333333, ans=0.2 +2024-08-29 15:42:30,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263861.3333333333, ans=0.1 +2024-08-29 15:42:31,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=263914.6666666667, ans=0.2 +2024-08-29 15:42:32,037 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.50 vs. limit=15.0 +2024-08-29 15:42:33,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=263914.6666666667, ans=0.125 +2024-08-29 15:42:37,628 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.210e+02 1.475e+02 1.770e+02 2.541e+02 4.904e+02, threshold=3.539e+02, percent-clipped=6.0 +2024-08-29 15:42:41,077 INFO [train.py:1114] (0/4) Epoch 20, batch 2200, loss[loss=0.174, simple_loss=0.2544, pruned_loss=0.03413, ctc_loss=0.06314, over 19581.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2569, pruned_loss=0.03814, ctc_loss=0.07129, over 3867840.37 frames. ], batch size: 57, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 15:42:45,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=263968.0, ans=0.125 +2024-08-29 15:43:11,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=264128.0, ans=0.125 +2024-08-29 15:43:25,174 INFO [train.py:1114] (0/4) Epoch 20, batch 2250, loss[loss=0.1807, simple_loss=0.2604, pruned_loss=0.03661, ctc_loss=0.06976, over 19618.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2571, pruned_loss=0.03819, ctc_loss=0.07137, over 3866799.94 frames. ], batch size: 55, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 15:43:35,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=264288.0, ans=0.125 +2024-08-29 15:43:35,765 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.65 vs. limit=22.5 +2024-08-29 15:43:42,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=264341.3333333333, ans=0.125 +2024-08-29 15:44:04,417 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.490e+02 1.909e+02 2.542e+02 3.813e+02, threshold=3.818e+02, percent-clipped=1.0 +2024-08-29 15:44:07,869 INFO [train.py:1114] (0/4) Epoch 20, batch 2300, loss[loss=0.1857, simple_loss=0.2582, pruned_loss=0.04195, ctc_loss=0.07315, over 19496.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2567, pruned_loss=0.03811, ctc_loss=0.07116, over 3859390.45 frames. ], batch size: 49, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 15:44:12,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=264501.3333333333, ans=0.125 +2024-08-29 15:44:24,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=264554.6666666667, ans=0.0 +2024-08-29 15:44:26,202 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.25 vs. limit=15.0 +2024-08-29 15:44:32,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=264608.0, ans=0.125 +2024-08-29 15:44:35,627 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=10.55 vs. limit=12.0 +2024-08-29 15:44:41,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=264661.3333333333, ans=0.05 +2024-08-29 15:44:50,885 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.69 vs. limit=15.0 +2024-08-29 15:44:52,849 INFO [train.py:1114] (0/4) Epoch 20, batch 2350, loss[loss=0.2043, simple_loss=0.2758, pruned_loss=0.04851, ctc_loss=0.0892, over 19669.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2567, pruned_loss=0.03831, ctc_loss=0.07157, over 3862188.44 frames. ], batch size: 63, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 15:44:59,078 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:45:04,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=264821.3333333333, ans=15.0 +2024-08-29 15:45:05,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=264821.3333333333, ans=0.0 +2024-08-29 15:45:32,196 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.408e+02 1.642e+02 2.194e+02 4.028e+02, threshold=3.284e+02, percent-clipped=1.0 +2024-08-29 15:45:35,646 INFO [train.py:1114] (0/4) Epoch 20, batch 2400, loss[loss=0.1987, simple_loss=0.2766, pruned_loss=0.04491, ctc_loss=0.07733, over 19336.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2589, pruned_loss=0.03897, ctc_loss=0.07252, over 3857312.51 frames. ], batch size: 67, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 15:45:37,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=265034.6666666667, ans=0.0 +2024-08-29 15:45:38,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265034.6666666667, ans=0.1 +2024-08-29 15:45:41,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=265034.6666666667, ans=0.125 +2024-08-29 15:45:46,413 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.98 vs. limit=15.0 +2024-08-29 15:45:54,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=265141.3333333333, ans=0.0 +2024-08-29 15:45:58,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=265141.3333333333, ans=0.125 +2024-08-29 15:45:59,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=265141.3333333333, ans=0.125 +2024-08-29 15:46:02,836 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.65 vs. limit=15.0 +2024-08-29 15:46:07,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=265194.6666666667, ans=0.0 +2024-08-29 15:46:14,702 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.12 vs. limit=15.0 +2024-08-29 15:46:19,417 INFO [train.py:1114] (0/4) Epoch 20, batch 2450, loss[loss=0.2465, simple_loss=0.2974, pruned_loss=0.0712, ctc_loss=0.1329, over 13541.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2624, pruned_loss=0.0412, ctc_loss=0.07703, over 3731858.51 frames. ], batch size: 141, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 15:46:27,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=265354.6666666667, ans=0.0 +2024-08-29 15:46:29,644 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.70 vs. limit=15.0 +2024-08-29 15:46:30,271 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:46:43,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=265408.0, ans=0.5 +2024-08-29 15:46:44,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265408.0, ans=0.1 +2024-08-29 15:46:47,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=265461.3333333333, ans=0.125 +2024-08-29 15:48:24,404 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-20.pt +2024-08-29 15:49:35,406 INFO [train.py:1387] (0/4) Done! diff --git a/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-10-47-00-1 b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-10-47-00-1 new file mode 100644 index 0000000000000000000000000000000000000000..c1ab15ca4a56b37a9d236972ec1c3050b4cd6e13 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-10-47-00-1 @@ -0,0 +1,727 @@ +2024-08-29 10:47:00,757 INFO [train.py:1182] (1/4) Training started +2024-08-29 10:47:00,763 INFO [train.py:1192] (1/4) Device: cuda:1 +2024-08-29 10:47:00,825 INFO [train.py:1210] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2563.int.cedar.computecanada.ca', 'IP address': '172.16.146.0'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 19, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 10:47:00,825 INFO [train.py:1212] (1/4) About to create model +2024-08-29 10:50:07,603 INFO [train.py:1216] (1/4) Number of model parameters: 65805511 +2024-08-29 10:50:07,811 INFO [checkpoint.py:112] (1/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-18.pt +2024-08-29 10:54:13,630 INFO [train.py:1231] (1/4) Using DDP +2024-08-29 10:57:38,607 INFO [train.py:1243] (1/4) Loading optimizer state dict +2024-08-29 10:57:38,761 INFO [train.py:1251] (1/4) Loading scheduler state dict +2024-08-29 10:57:39,426 INFO [asr_datamodule.py:894] (1/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:696] (1/4) Disable MUSAN +2024-08-29 10:57:39,670 INFO [asr_datamodule.py:714] (1/4) Enable SpecAugment +2024-08-29 10:57:39,670 INFO [asr_datamodule.py:715] (1/4) Time warp factor: 80 +2024-08-29 10:57:39,670 INFO [asr_datamodule.py:725] (1/4) Num frame mask: 10 +2024-08-29 10:57:39,670 INFO [asr_datamodule.py:738] (1/4) About to create train dataset +2024-08-29 10:57:39,670 INFO [asr_datamodule.py:765] (1/4) Using DynamicBucketingSampler. +2024-08-29 10:57:41,246 INFO [asr_datamodule.py:782] (1/4) About to create train dataloader +2024-08-29 10:57:41,253 INFO [asr_datamodule.py:911] (1/4) About to get dev-clean cuts +2024-08-29 10:58:07,692 INFO [asr_datamodule.py:918] (1/4) About to get dev-other cuts +2024-08-29 10:58:09,678 INFO [asr_datamodule.py:814] (1/4) About to create dev dataset +2024-08-29 10:58:09,999 INFO [asr_datamodule.py:831] (1/4) About to create dev dataloader +2024-08-29 10:58:09,999 INFO [train.py:1435] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 11:07:35,995 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=4, num_channels=128, metric=3.40 vs. limit=3.0 +2024-08-29 11:08:02,423 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12880MB +2024-08-29 11:08:03,531 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12880MB +2024-08-29 11:11:47,150 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12880MB +2024-08-29 11:11:48,389 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12880MB +2024-08-29 11:21:31,078 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=512, metric=14.07 vs. limit=7.5 +2024-08-29 11:21:31,523 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12880MB +2024-08-29 11:21:32,486 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=384, metric=19.97 vs. limit=7.5 +2024-08-29 11:21:32,826 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 12880MB +2024-08-29 11:21:32,848 INFO [train.py:1344] (1/4) Loading grad scaler state dict +2024-08-29 11:27:35,424 INFO [train.py:1114] (1/4) Epoch 19, batch 0, loss[loss=0.1766, simple_loss=0.248, pruned_loss=0.03834, ctc_loss=0.07154, over 19417.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.248, pruned_loss=0.03834, ctc_loss=0.07154, over 19417.00 frames. ], batch size: 48, lr: 7.99e-03, grad_scale: 32.0 +2024-08-29 11:27:35,425 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-29 11:31:58,881 INFO [train.py:1146] (1/4) Epoch 19, validation: loss=0.1709, simple_loss=0.2636, pruned_loss=0.02933, ctc_loss=0.04896, over 944034.00 frames. +2024-08-29 11:31:58,882 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12880MB +2024-08-29 11:31:59,267 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.82 vs. limit=12.0 +2024-08-29 11:32:00,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=238954.66666666666, ans=0.0 +2024-08-29 11:38:58,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=238954.66666666666, ans=0.2 +2024-08-29 11:50:47,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=239008.0, ans=0.125 +2024-08-29 12:03:09,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=239008.0, ans=0.0 +2024-08-29 12:32:12,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=239114.66666666666, ans=0.0 +2024-08-29 12:39:58,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=239168.0, ans=0.125 +2024-08-29 12:45:44,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=239168.0, ans=0.125 +2024-08-29 12:46:38,396 INFO [train.py:1114] (1/4) Epoch 19, batch 50, loss[loss=0.1675, simple_loss=0.238, pruned_loss=0.03436, ctc_loss=0.07073, over 19694.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2622, pruned_loss=0.04172, ctc_loss=0.07862, over 843816.80 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 12:47:42,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=239221.33333333334, ans=0.0 +2024-08-29 12:47:43,088 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 12:47:43,412 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.88 vs. limit=22.5 +2024-08-29 12:53:37,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=239274.66666666666, ans=0.0 +2024-08-29 12:53:38,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239274.66666666666, ans=0.1 +2024-08-29 12:56:27,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=239328.0, ans=0.2 +2024-08-29 12:56:32,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=239328.0, ans=0.2 +2024-08-29 12:57:43,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=239381.33333333334, ans=0.0 +2024-08-29 12:57:49,122 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.203e+02 1.479e+02 1.753e+02 2.191e+02 3.244e+02, threshold=3.506e+02, percent-clipped=0.0 +2024-08-29 13:01:16,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=239434.66666666666, ans=0.0 +2024-08-29 13:03:54,703 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 13:04:10,868 INFO [train.py:1114] (1/4) Epoch 19, batch 100, loss[loss=0.1616, simple_loss=0.2362, pruned_loss=0.03121, ctc_loss=0.06157, over 19715.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2628, pruned_loss=0.04121, ctc_loss=0.07755, over 1498509.86 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 13:05:30,206 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 13:09:11,845 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.48 vs. limit=15.0 +2024-08-29 13:09:17,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=239541.33333333334, ans=0.2 +2024-08-29 13:14:30,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=239648.0, ans=0.125 +2024-08-29 13:14:31,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=239648.0, ans=0.0 +2024-08-29 13:14:34,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=239648.0, ans=0.0 +2024-08-29 13:14:48,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=239648.0, ans=0.2 +2024-08-29 13:19:08,507 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.41 vs. limit=22.5 +2024-08-29 13:21:06,825 INFO [train.py:1114] (1/4) Epoch 19, batch 150, loss[loss=0.1543, simple_loss=0.2299, pruned_loss=0.02901, ctc_loss=0.05177, over 19722.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2608, pruned_loss=0.0403, ctc_loss=0.07549, over 2027222.49 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 13:21:10,980 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.91 vs. limit=15.0 +2024-08-29 13:24:23,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=239914.66666666666, ans=0.0 +2024-08-29 13:24:24,029 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.028e+02 1.511e+02 1.939e+02 2.474e+02 3.688e+02, threshold=3.878e+02, percent-clipped=4.0 +2024-08-29 13:24:29,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239914.66666666666, ans=0.125 +2024-08-29 13:24:43,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=239914.66666666666, ans=0.0 +2024-08-29 13:27:34,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=239968.0, ans=0.125 +2024-08-29 13:31:38,284 INFO [train.py:1114] (1/4) Epoch 19, batch 200, loss[loss=0.1995, simple_loss=0.2755, pruned_loss=0.04452, ctc_loss=0.08622, over 18110.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.26, pruned_loss=0.0399, ctc_loss=0.07461, over 2435690.74 frames. ], batch size: 85, lr: 7.97e-03, grad_scale: 32.0 +2024-08-29 13:31:48,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.80 vs. limit=15.0 +2024-08-29 13:32:02,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=240128.0, ans=0.125 +2024-08-29 13:32:02,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=240128.0, ans=0.0 +2024-08-29 13:32:09,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=240128.0, ans=0.0 +2024-08-29 13:32:11,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240128.0, ans=0.1 +2024-08-29 13:32:29,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=240234.66666666666, ans=0.0 +2024-08-29 13:32:34,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=240288.0, ans=0.125 +2024-08-29 13:32:34,889 INFO [train.py:1114] (1/4) Epoch 19, batch 250, loss[loss=0.2005, simple_loss=0.2742, pruned_loss=0.04597, ctc_loss=0.08719, over 19370.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2597, pruned_loss=0.0396, ctc_loss=0.07428, over 2755982.86 frames. ], batch size: 67, lr: 7.97e-03, grad_scale: 32.0 +2024-08-29 13:33:39,435 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.435e+02 1.779e+02 2.329e+02 4.143e+02, threshold=3.559e+02, percent-clipped=3.0 +2024-08-29 13:36:54,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=240448.0, ans=0.0 +2024-08-29 13:37:08,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=240448.0, ans=0.125 +2024-08-29 13:37:11,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=240501.33333333334, ans=22.5 +2024-08-29 13:37:12,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=240501.33333333334, ans=0.025 +2024-08-29 13:37:13,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=240501.33333333334, ans=0.125 +2024-08-29 13:37:19,130 INFO [train.py:1114] (1/4) Epoch 19, batch 300, loss[loss=0.2165, simple_loss=0.2862, pruned_loss=0.05391, ctc_loss=0.09754, over 19524.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2591, pruned_loss=0.0395, ctc_loss=0.07392, over 3000158.22 frames. ], batch size: 61, lr: 7.96e-03, grad_scale: 32.0 +2024-08-29 13:37:19,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=240554.66666666666, ans=0.125 +2024-08-29 13:37:39,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=240554.66666666666, ans=0.125 +2024-08-29 13:37:46,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=240608.0, ans=0.07 +2024-08-29 13:38:51,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.73 vs. limit=15.0 +2024-08-29 13:40:04,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240714.66666666666, ans=0.1 +2024-08-29 13:40:05,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=240714.66666666666, ans=0.0 +2024-08-29 13:40:51,465 INFO [train.py:1114] (1/4) Epoch 19, batch 350, loss[loss=0.164, simple_loss=0.2362, pruned_loss=0.03293, ctc_loss=0.06517, over 19750.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2598, pruned_loss=0.03968, ctc_loss=0.07416, over 3190941.52 frames. ], batch size: 48, lr: 7.96e-03, grad_scale: 32.0 +2024-08-29 13:41:25,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=240928.0, ans=0.125 +2024-08-29 13:41:41,721 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.478e+02 1.769e+02 2.422e+02 3.784e+02, threshold=3.538e+02, percent-clipped=2.0 +2024-08-29 13:43:44,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=241034.66666666666, ans=0.2 +2024-08-29 13:43:51,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=241034.66666666666, ans=0.125 +2024-08-29 13:44:27,819 INFO [train.py:1114] (1/4) Epoch 19, batch 400, loss[loss=0.1765, simple_loss=0.2605, pruned_loss=0.03294, ctc_loss=0.06629, over 19516.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2594, pruned_loss=0.03954, ctc_loss=0.07385, over 3342063.45 frames. ], batch size: 54, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 13:44:28,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=241088.0, ans=0.0 +2024-08-29 13:44:44,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=241088.0, ans=0.125 +2024-08-29 13:45:41,785 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.47 vs. limit=15.0 +2024-08-29 13:45:45,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=241194.66666666666, ans=0.125 +2024-08-29 13:45:52,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=241248.0, ans=0.0 +2024-08-29 13:46:22,508 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.11 vs. limit=15.0 +2024-08-29 13:47:04,776 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.03 vs. limit=15.0 +2024-08-29 13:47:17,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=241301.33333333334, ans=0.0 +2024-08-29 13:47:19,614 INFO [train.py:1114] (1/4) Epoch 19, batch 450, loss[loss=0.1988, simple_loss=0.2834, pruned_loss=0.04125, ctc_loss=0.07925, over 19611.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2603, pruned_loss=0.04008, ctc_loss=0.07478, over 3450262.93 frames. ], batch size: 55, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 13:47:38,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241408.0, ans=0.1 +2024-08-29 13:47:38,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=241408.0, ans=0.125 +2024-08-29 13:48:06,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=241461.33333333334, ans=0.0 +2024-08-29 13:48:06,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241461.33333333334, ans=0.1 +2024-08-29 13:48:07,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241461.33333333334, ans=0.1 +2024-08-29 13:48:44,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=241514.66666666666, ans=0.125 +2024-08-29 13:48:45,734 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.422e+02 1.638e+02 2.007e+02 3.524e+02, threshold=3.276e+02, percent-clipped=0.0 +2024-08-29 13:48:53,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=241568.0, ans=0.07 +2024-08-29 13:50:05,037 INFO [train.py:1114] (1/4) Epoch 19, batch 500, loss[loss=0.1927, simple_loss=0.269, pruned_loss=0.04255, ctc_loss=0.07812, over 19638.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2594, pruned_loss=0.03963, ctc_loss=0.07404, over 3545831.45 frames. ], batch size: 63, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 13:50:13,143 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.62 vs. limit=15.0 +2024-08-29 13:51:43,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=241781.33333333334, ans=0.125 +2024-08-29 13:53:05,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=241834.66666666666, ans=0.2 +2024-08-29 13:53:07,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=241834.66666666666, ans=0.04949747468305833 +2024-08-29 13:54:05,849 INFO [train.py:1114] (1/4) Epoch 19, batch 550, loss[loss=0.2227, simple_loss=0.2915, pruned_loss=0.0563, ctc_loss=0.1032, over 19283.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2598, pruned_loss=0.04009, ctc_loss=0.07483, over 3608551.17 frames. ], batch size: 71, lr: 7.94e-03, grad_scale: 32.0 +2024-08-29 13:54:10,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=241888.0, ans=0.0 +2024-08-29 13:54:18,458 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-08-29 13:54:29,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=241994.66666666666, ans=0.0 +2024-08-29 13:54:37,009 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.211e+02 1.414e+02 1.681e+02 2.031e+02 3.681e+02, threshold=3.361e+02, percent-clipped=2.0 +2024-08-29 13:54:37,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=242048.0, ans=0.125 +2024-08-29 13:54:45,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=242101.33333333334, ans=0.5 +2024-08-29 13:54:49,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=242101.33333333334, ans=0.0 +2024-08-29 13:54:53,921 INFO [train.py:1114] (1/4) Epoch 19, batch 600, loss[loss=0.2129, simple_loss=0.2919, pruned_loss=0.04846, ctc_loss=0.09237, over 19427.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2601, pruned_loss=0.04014, ctc_loss=0.07478, over 3666336.13 frames. ], batch size: 67, lr: 7.94e-03, grad_scale: 32.0 +2024-08-29 13:54:55,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=242154.66666666666, ans=0.0 +2024-08-29 13:55:07,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=242208.0, ans=0.125 +2024-08-29 13:55:17,459 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.49 vs. limit=15.0 +2024-08-29 13:55:27,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=242314.66666666666, ans=0.02 +2024-08-29 13:55:39,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242368.0, ans=0.1 +2024-08-29 13:55:40,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242368.0, ans=0.1 +2024-08-29 13:55:42,122 INFO [train.py:1114] (1/4) Epoch 19, batch 650, loss[loss=0.1895, simple_loss=0.2705, pruned_loss=0.0393, ctc_loss=0.07463, over 19773.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2596, pruned_loss=0.03993, ctc_loss=0.07441, over 3717066.29 frames. ], batch size: 54, lr: 7.93e-03, grad_scale: 32.0 +2024-08-29 13:56:05,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=242528.0, ans=0.125 +2024-08-29 13:56:14,936 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.141e+02 1.536e+02 1.948e+02 2.425e+02 3.839e+02, threshold=3.897e+02, percent-clipped=7.0 +2024-08-29 13:56:38,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=242634.66666666666, ans=0.125 +2024-08-29 13:56:40,140 INFO [train.py:1114] (1/4) Epoch 19, batch 700, loss[loss=0.1765, simple_loss=0.2491, pruned_loss=0.03774, ctc_loss=0.07095, over 19732.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2603, pruned_loss=0.04022, ctc_loss=0.07501, over 3749165.96 frames. ], batch size: 51, lr: 7.93e-03, grad_scale: 32.0 +2024-08-29 13:57:10,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=242794.66666666666, ans=0.0 +2024-08-29 13:57:41,164 INFO [train.py:1114] (1/4) Epoch 19, batch 750, loss[loss=0.1839, simple_loss=0.2664, pruned_loss=0.03749, ctc_loss=0.06601, over 19487.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2599, pruned_loss=0.04002, ctc_loss=0.07465, over 3774464.38 frames. ], batch size: 54, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 13:57:42,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=242954.66666666666, ans=0.125 +2024-08-29 13:57:57,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=243008.0, ans=0.0 +2024-08-29 13:57:57,212 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.08 vs. limit=15.0 +2024-08-29 13:58:06,628 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.13 vs. limit=15.0 +2024-08-29 13:58:45,184 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.478e+02 1.857e+02 2.278e+02 3.837e+02, threshold=3.713e+02, percent-clipped=0.0 +2024-08-29 13:58:47,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=243114.66666666666, ans=0.125 +2024-08-29 13:59:02,302 INFO [train.py:1114] (1/4) Epoch 19, batch 800, loss[loss=0.1575, simple_loss=0.2322, pruned_loss=0.03095, ctc_loss=0.05216, over 19841.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2595, pruned_loss=0.03998, ctc_loss=0.07458, over 3796520.43 frames. ], batch size: 49, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 13:59:15,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=243274.66666666666, ans=0.0 +2024-08-29 13:59:18,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=243274.66666666666, ans=0.0 +2024-08-29 13:59:19,130 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.48 vs. limit=5.0 +2024-08-29 14:04:32,058 INFO [train.py:1114] (1/4) Epoch 19, batch 850, loss[loss=0.1957, simple_loss=0.2744, pruned_loss=0.04299, ctc_loss=0.07766, over 19650.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2592, pruned_loss=0.03987, ctc_loss=0.07446, over 3814948.74 frames. ], batch size: 59, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 14:04:45,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=243541.33333333334, ans=0.125 +2024-08-29 14:04:49,057 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.12 vs. limit=12.0 +2024-08-29 14:05:01,528 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.78 vs. limit=15.0 +2024-08-29 14:05:03,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=243648.0, ans=0.0 +2024-08-29 14:05:05,878 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.422e+02 1.643e+02 2.108e+02 3.301e+02, threshold=3.285e+02, percent-clipped=0.0 +2024-08-29 14:05:06,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=243648.0, ans=0.2 +2024-08-29 14:05:11,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=243648.0, ans=0.125 +2024-08-29 14:05:17,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=243701.33333333334, ans=0.2 +2024-08-29 14:05:20,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=243701.33333333334, ans=0.125 +2024-08-29 14:05:20,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=243701.33333333334, ans=0.125 +2024-08-29 14:08:15,264 INFO [train.py:1114] (1/4) Epoch 19, batch 900, loss[loss=0.1642, simple_loss=0.2375, pruned_loss=0.03379, ctc_loss=0.05826, over 19823.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2593, pruned_loss=0.03997, ctc_loss=0.07443, over 3819183.23 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 32.0 +2024-08-29 14:10:27,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=243861.33333333334, ans=0.125 +2024-08-29 14:10:36,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=243914.66666666666, ans=0.04949747468305833 +2024-08-29 14:10:47,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=243968.0, ans=0.07 +2024-08-29 14:10:53,190 INFO [train.py:1114] (1/4) Epoch 19, batch 950, loss[loss=0.1644, simple_loss=0.2424, pruned_loss=0.0311, ctc_loss=0.06065, over 19491.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2594, pruned_loss=0.03999, ctc_loss=0.07455, over 3820498.17 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 16.0 +2024-08-29 14:10:58,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=244021.33333333334, ans=0.125 +2024-08-29 14:11:02,890 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.28 vs. limit=15.0 +2024-08-29 14:11:07,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=244074.66666666666, ans=0.0 +2024-08-29 14:11:23,041 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:11:25,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=244181.33333333334, ans=0.025 +2024-08-29 14:11:27,355 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.452e+02 1.728e+02 2.089e+02 3.728e+02, threshold=3.456e+02, percent-clipped=1.0 +2024-08-29 14:11:30,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=244181.33333333334, ans=0.125 +2024-08-29 14:11:38,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=244234.66666666666, ans=0.025 +2024-08-29 14:11:43,396 INFO [train.py:1114] (1/4) Epoch 19, batch 1000, loss[loss=0.166, simple_loss=0.2416, pruned_loss=0.03196, ctc_loss=0.06646, over 19867.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2603, pruned_loss=0.04033, ctc_loss=0.07516, over 3816829.39 frames. ], batch size: 52, lr: 7.90e-03, grad_scale: 16.0 +2024-08-29 14:12:55,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=244501.33333333334, ans=0.125 +2024-08-29 14:13:27,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244554.66666666666, ans=0.1 +2024-08-29 14:13:29,691 INFO [train.py:1114] (1/4) Epoch 19, batch 1050, loss[loss=0.181, simple_loss=0.2625, pruned_loss=0.03587, ctc_loss=0.06921, over 19850.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2595, pruned_loss=0.04014, ctc_loss=0.07492, over 3822541.22 frames. ], batch size: 57, lr: 7.90e-03, grad_scale: 16.0 +2024-08-29 14:13:43,169 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.49 vs. limit=22.5 +2024-08-29 14:13:44,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=244608.0, ans=0.0 +2024-08-29 14:14:00,314 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:14:01,822 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.146e+02 1.357e+02 1.587e+02 1.996e+02 3.012e+02, threshold=3.173e+02, percent-clipped=0.0 +2024-08-29 14:14:08,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.45 vs. limit=15.0 +2024-08-29 14:14:17,627 INFO [train.py:1114] (1/4) Epoch 19, batch 1100, loss[loss=0.1789, simple_loss=0.2538, pruned_loss=0.03818, ctc_loss=0.06909, over 19592.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2594, pruned_loss=0.03987, ctc_loss=0.07436, over 3830857.76 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 16.0 +2024-08-29 14:14:42,845 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.74 vs. limit=6.0 +2024-08-29 14:15:05,498 INFO [train.py:1114] (1/4) Epoch 19, batch 1150, loss[loss=0.1733, simple_loss=0.2546, pruned_loss=0.03372, ctc_loss=0.06153, over 19598.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2592, pruned_loss=0.03962, ctc_loss=0.0741, over 3830026.08 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 16.0 +2024-08-29 14:15:08,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=245088.0, ans=0.0 +2024-08-29 14:15:10,727 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.96 vs. limit=15.0 +2024-08-29 14:15:18,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=245141.33333333334, ans=0.0 +2024-08-29 14:15:31,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=245194.66666666666, ans=0.0 +2024-08-29 14:15:42,724 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.485e+02 1.714e+02 2.095e+02 3.780e+02, threshold=3.428e+02, percent-clipped=1.0 +2024-08-29 14:15:43,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.04 vs. limit=10.0 +2024-08-29 14:15:44,809 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:15:47,667 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.02 vs. limit=15.0 +2024-08-29 14:16:23,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245301.33333333334, ans=0.1 +2024-08-29 14:17:45,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=245301.33333333334, ans=0.0 +2024-08-29 14:17:48,228 INFO [train.py:1114] (1/4) Epoch 19, batch 1200, loss[loss=0.2002, simple_loss=0.2743, pruned_loss=0.04612, ctc_loss=0.08479, over 19840.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2602, pruned_loss=0.03988, ctc_loss=0.07464, over 3824926.60 frames. ], batch size: 57, lr: 7.89e-03, grad_scale: 32.0 +2024-08-29 14:18:17,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=245514.66666666666, ans=0.05 +2024-08-29 14:18:20,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=245514.66666666666, ans=0.125 +2024-08-29 14:18:22,277 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.05 vs. limit=6.0 +2024-08-29 14:18:25,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=245568.0, ans=0.1 +2024-08-29 14:18:27,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=245568.0, ans=0.0 +2024-08-29 14:18:36,281 INFO [train.py:1114] (1/4) Epoch 19, batch 1250, loss[loss=0.1977, simple_loss=0.2733, pruned_loss=0.04457, ctc_loss=0.08269, over 19547.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2609, pruned_loss=0.03998, ctc_loss=0.07476, over 3843041.14 frames. ], batch size: 61, lr: 7.88e-03, grad_scale: 32.0 +2024-08-29 14:19:03,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=245781.33333333334, ans=0.0 +2024-08-29 14:19:06,848 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.125e+02 1.390e+02 1.709e+02 2.116e+02 3.450e+02, threshold=3.419e+02, percent-clipped=1.0 +2024-08-29 14:19:27,900 INFO [train.py:1114] (1/4) Epoch 19, batch 1300, loss[loss=0.2087, simple_loss=0.2776, pruned_loss=0.05054, ctc_loss=0.09672, over 18776.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2601, pruned_loss=0.03953, ctc_loss=0.07387, over 3846494.21 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 32.0 +2024-08-29 14:19:42,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=245941.33333333334, ans=0.125 +2024-08-29 14:20:20,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246048.0, ans=0.1 +2024-08-29 14:20:27,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=246101.33333333334, ans=0.125 +2024-08-29 14:20:35,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=246154.66666666666, ans=0.0 +2024-08-29 14:20:35,709 INFO [train.py:1114] (1/4) Epoch 19, batch 1350, loss[loss=0.1658, simple_loss=0.2457, pruned_loss=0.03133, ctc_loss=0.05838, over 19783.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2593, pruned_loss=0.03915, ctc_loss=0.07306, over 3856904.11 frames. ], batch size: 54, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 14:20:37,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246154.66666666666, ans=0.1 +2024-08-29 14:20:39,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=246154.66666666666, ans=0.125 +2024-08-29 14:20:40,898 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.99 vs. limit=15.0 +2024-08-29 14:20:45,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=246154.66666666666, ans=0.125 +2024-08-29 14:21:35,087 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.448e+02 1.632e+02 2.120e+02 3.289e+02, threshold=3.263e+02, percent-clipped=0.0 +2024-08-29 14:21:51,129 INFO [train.py:1114] (1/4) Epoch 19, batch 1400, loss[loss=0.1499, simple_loss=0.2198, pruned_loss=0.02854, ctc_loss=0.05718, over 19689.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2588, pruned_loss=0.03902, ctc_loss=0.07281, over 3864018.00 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 14:21:53,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=246421.33333333334, ans=0.125 +2024-08-29 14:22:05,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=246474.66666666666, ans=0.0 +2024-08-29 14:22:08,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=246528.0, ans=0.0 +2024-08-29 14:22:08,924 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.29 vs. limit=15.0 +2024-08-29 14:22:10,169 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.43 vs. limit=15.0 +2024-08-29 14:22:11,014 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.46 vs. limit=15.0 +2024-08-29 14:22:14,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246528.0, ans=0.1 +2024-08-29 14:22:26,642 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.35 vs. limit=8.0 +2024-08-29 14:22:34,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246634.66666666666, ans=0.1 +2024-08-29 14:22:47,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=246634.66666666666, ans=0.125 +2024-08-29 14:22:47,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=246634.66666666666, ans=0.2 +2024-08-29 14:22:49,905 INFO [train.py:1114] (1/4) Epoch 19, batch 1450, loss[loss=0.196, simple_loss=0.2706, pruned_loss=0.04462, ctc_loss=0.08045, over 19664.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2593, pruned_loss=0.03914, ctc_loss=0.07313, over 3863145.24 frames. ], batch size: 63, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 14:22:50,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=246688.0, ans=0.125 +2024-08-29 14:22:54,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246688.0, ans=0.1 +2024-08-29 14:24:00,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=246848.0, ans=0.125 +2024-08-29 14:24:01,764 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.108e+02 1.408e+02 1.590e+02 1.931e+02 3.612e+02, threshold=3.180e+02, percent-clipped=1.0 +2024-08-29 14:24:12,256 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:24:13,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff2.min_abs, batch_count=246901.33333333334, ans=0.1 +2024-08-29 14:24:17,513 INFO [train.py:1114] (1/4) Epoch 19, batch 1500, loss[loss=0.1976, simple_loss=0.2739, pruned_loss=0.04432, ctc_loss=0.08137, over 19575.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2595, pruned_loss=0.039, ctc_loss=0.07302, over 3862569.89 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 32.0 +2024-08-29 14:24:49,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=247008.0, ans=0.125 +2024-08-29 14:26:26,594 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.53 vs. limit=15.0 +2024-08-29 14:26:27,026 INFO [train.py:1114] (1/4) Epoch 19, batch 1550, loss[loss=0.2115, simple_loss=0.278, pruned_loss=0.05253, ctc_loss=0.09981, over 19583.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2596, pruned_loss=0.03927, ctc_loss=0.07362, over 3847411.22 frames. ], batch size: 60, lr: 7.86e-03, grad_scale: 32.0 +2024-08-29 14:26:31,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=247221.33333333334, ans=0.125 +2024-08-29 14:26:36,957 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.95 vs. limit=12.0 +2024-08-29 14:26:38,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=247274.66666666666, ans=0.2 +2024-08-29 14:26:43,446 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.84 vs. limit=15.0 +2024-08-29 14:26:44,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247274.66666666666, ans=0.1 +2024-08-29 14:27:02,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247328.0, ans=0.1 +2024-08-29 14:27:15,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=247328.0, ans=0.0 +2024-08-29 14:27:16,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-08-29 14:27:20,314 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.192e+02 1.466e+02 1.727e+02 2.253e+02 4.003e+02, threshold=3.453e+02, percent-clipped=2.0 +2024-08-29 14:27:22,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247381.33333333334, ans=0.1 +2024-08-29 14:29:40,638 INFO [train.py:1114] (1/4) Epoch 19, batch 1600, loss[loss=0.1847, simple_loss=0.271, pruned_loss=0.03565, ctc_loss=0.06744, over 19843.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2595, pruned_loss=0.03941, ctc_loss=0.07393, over 3836078.74 frames. ], batch size: 57, lr: 7.85e-03, grad_scale: 32.0 +2024-08-29 14:29:46,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=247488.0, ans=0.0 +2024-08-29 14:31:37,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=247594.66666666666, ans=10.0 +2024-08-29 14:31:38,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=247594.66666666666, ans=0.0 +2024-08-29 14:31:38,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=247594.66666666666, ans=0.2 +2024-08-29 14:31:56,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=247648.0, ans=0.5 +2024-08-29 14:31:57,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=247701.33333333334, ans=0.0 +2024-08-29 14:32:06,873 INFO [train.py:1114] (1/4) Epoch 19, batch 1650, loss[loss=0.1842, simple_loss=0.2694, pruned_loss=0.036, ctc_loss=0.06753, over 19660.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2592, pruned_loss=0.03944, ctc_loss=0.07389, over 3832558.49 frames. ], batch size: 59, lr: 7.85e-03, grad_scale: 32.0 +2024-08-29 14:32:08,458 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.02 vs. limit=15.0 +2024-08-29 14:32:37,728 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.670e+02 2.010e+02 2.374e+02 4.027e+02, threshold=4.020e+02, percent-clipped=3.0 +2024-08-29 14:32:38,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=247914.66666666666, ans=0.0 +2024-08-29 14:32:53,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=247968.0, ans=0.0 +2024-08-29 14:32:54,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=247968.0, ans=0.125 +2024-08-29 14:32:56,782 INFO [train.py:1114] (1/4) Epoch 19, batch 1700, loss[loss=0.1624, simple_loss=0.2311, pruned_loss=0.03409, ctc_loss=0.064, over 19679.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2589, pruned_loss=0.03898, ctc_loss=0.07318, over 3846785.53 frames. ], batch size: 46, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 14:33:15,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=248128.0, ans=0.125 +2024-08-29 14:33:51,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=248181.33333333334, ans=0.0 +2024-08-29 14:33:57,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=248234.66666666666, ans=0.125 +2024-08-29 14:33:59,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=248234.66666666666, ans=0.2 +2024-08-29 14:34:04,337 INFO [train.py:1114] (1/4) Epoch 19, batch 1750, loss[loss=0.1758, simple_loss=0.2389, pruned_loss=0.04058, ctc_loss=0.07908, over 19659.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2588, pruned_loss=0.03904, ctc_loss=0.07318, over 3852102.13 frames. ], batch size: 45, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 14:34:39,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=248288.0, ans=0.125 +2024-08-29 14:34:40,639 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.05 vs. limit=15.0 +2024-08-29 14:34:51,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=248394.66666666666, ans=0.2 +2024-08-29 14:34:56,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248394.66666666666, ans=0.1 +2024-08-29 14:35:02,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=248448.0, ans=0.125 +2024-08-29 14:35:02,763 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.489e+02 1.824e+02 2.243e+02 3.708e+02, threshold=3.648e+02, percent-clipped=0.0 +2024-08-29 14:35:11,291 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.97 vs. limit=12.0 +2024-08-29 14:35:17,403 INFO [train.py:1114] (1/4) Epoch 19, batch 1800, loss[loss=0.1808, simple_loss=0.2636, pruned_loss=0.03613, ctc_loss=0.064, over 19617.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2588, pruned_loss=0.03907, ctc_loss=0.0732, over 3853513.81 frames. ], batch size: 55, lr: 7.84e-03, grad_scale: 16.0 +2024-08-29 14:35:19,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=248554.66666666666, ans=0.125 +2024-08-29 14:35:55,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=248768.0, ans=0.0 +2024-08-29 14:36:01,485 INFO [train.py:1114] (1/4) Epoch 19, batch 1850, loss[loss=0.2003, simple_loss=0.2789, pruned_loss=0.04438, ctc_loss=0.08238, over 19581.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.259, pruned_loss=0.03915, ctc_loss=0.07331, over 3856194.78 frames. ], batch size: 57, lr: 7.83e-03, grad_scale: 16.0 +2024-08-29 14:40:02,619 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.549e+02 1.911e+02 2.362e+02 1.156e+03, threshold=3.822e+02, percent-clipped=7.0 +2024-08-29 14:40:06,496 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.72 vs. limit=6.0 +2024-08-29 14:40:10,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=249034.66666666666, ans=0.125 +2024-08-29 14:40:18,260 INFO [train.py:1114] (1/4) Epoch 19, batch 1900, loss[loss=0.1729, simple_loss=0.2569, pruned_loss=0.03227, ctc_loss=0.06105, over 19643.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2597, pruned_loss=0.03945, ctc_loss=0.07367, over 3860254.22 frames. ], batch size: 59, lr: 7.83e-03, grad_scale: 16.0 +2024-08-29 14:40:48,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=249248.0, ans=0.125 +2024-08-29 14:40:50,078 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.00 vs. limit=22.5 +2024-08-29 14:41:00,402 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:41:02,132 INFO [train.py:1114] (1/4) Epoch 19, batch 1950, loss[loss=0.1727, simple_loss=0.2511, pruned_loss=0.03404, ctc_loss=0.06547, over 19609.00 frames. ], tot_loss[loss=0.185, simple_loss=0.261, pruned_loss=0.03968, ctc_loss=0.0741, over 3869695.05 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 16.0 +2024-08-29 14:41:10,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249408.0, ans=0.1 +2024-08-29 14:41:20,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=249461.33333333334, ans=0.0 +2024-08-29 14:41:25,290 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.47 vs. limit=15.0 +2024-08-29 14:41:31,727 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.363e+02 1.491e+02 1.694e+02 3.301e+02, threshold=2.983e+02, percent-clipped=0.0 +2024-08-29 14:41:46,949 INFO [train.py:1114] (1/4) Epoch 19, batch 2000, loss[loss=0.159, simple_loss=0.2279, pruned_loss=0.03279, ctc_loss=0.06118, over 19645.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2616, pruned_loss=0.03993, ctc_loss=0.07448, over 3855107.31 frames. ], batch size: 45, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 14:41:56,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=249674.66666666666, ans=0.125 +2024-08-29 14:42:03,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=249728.0, ans=0.125 +2024-08-29 14:42:06,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=249728.0, ans=0.125 +2024-08-29 14:42:10,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=249728.0, ans=0.2 +2024-08-29 14:42:11,850 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:42:12,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=249781.33333333334, ans=0.125 +2024-08-29 14:42:23,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=249834.66666666666, ans=0.2 +2024-08-29 14:42:23,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=249834.66666666666, ans=0.02 +2024-08-29 14:42:31,041 INFO [train.py:1114] (1/4) Epoch 19, batch 2050, loss[loss=0.1719, simple_loss=0.25, pruned_loss=0.03441, ctc_loss=0.06258, over 19722.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2605, pruned_loss=0.03987, ctc_loss=0.0743, over 3851539.82 frames. ], batch size: 47, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 14:42:32,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=249888.0, ans=0.2 +2024-08-29 14:42:36,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=249888.0, ans=0.025 +2024-08-29 14:42:38,627 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.22 vs. limit=12.0 +2024-08-29 14:42:53,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=249994.66666666666, ans=0.125 +2024-08-29 14:43:00,935 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.056e+02 1.385e+02 1.662e+02 2.291e+02 4.175e+02, threshold=3.324e+02, percent-clipped=7.0 +2024-08-29 14:44:05,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=250101.33333333334, ans=0.125 +2024-08-29 14:44:08,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=250154.66666666666, ans=0.0 +2024-08-29 14:44:08,826 INFO [train.py:1114] (1/4) Epoch 19, batch 2100, loss[loss=0.1875, simple_loss=0.2653, pruned_loss=0.03945, ctc_loss=0.0768, over 19776.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2599, pruned_loss=0.03948, ctc_loss=0.07381, over 3858510.10 frames. ], batch size: 54, lr: 7.81e-03, grad_scale: 32.0 +2024-08-29 14:45:09,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=250154.66666666666, ans=0.125 +2024-08-29 14:45:26,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=250261.33333333334, ans=0.2 +2024-08-29 14:45:28,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=250261.33333333334, ans=0.025 +2024-08-29 14:45:44,232 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.46 vs. limit=6.0 +2024-08-29 14:45:47,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=250368.0, ans=0.0 +2024-08-29 14:45:49,961 INFO [train.py:1114] (1/4) Epoch 19, batch 2150, loss[loss=0.1736, simple_loss=0.2529, pruned_loss=0.03433, ctc_loss=0.06399, over 19581.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2591, pruned_loss=0.03923, ctc_loss=0.07317, over 3868401.60 frames. ], batch size: 52, lr: 7.81e-03, grad_scale: 32.0 +2024-08-29 14:45:50,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=250421.33333333334, ans=0.0 +2024-08-29 14:45:51,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=250421.33333333334, ans=0.1 +2024-08-29 14:45:53,003 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.05 vs. limit=15.0 +2024-08-29 14:45:53,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=250421.33333333334, ans=0.0 +2024-08-29 14:45:58,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=250474.66666666666, ans=0.0 +2024-08-29 14:46:01,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=250474.66666666666, ans=0.125 +2024-08-29 14:46:10,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=250528.0, ans=0.0 +2024-08-29 14:46:12,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=250528.0, ans=0.125 +2024-08-29 14:46:13,811 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.68 vs. limit=22.5 +2024-08-29 14:46:16,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=250581.33333333334, ans=0.125 +2024-08-29 14:46:20,179 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.504e+02 1.955e+02 2.570e+02 4.900e+02, threshold=3.910e+02, percent-clipped=8.0 +2024-08-29 14:46:23,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=250581.33333333334, ans=0.0 +2024-08-29 14:46:35,223 INFO [train.py:1114] (1/4) Epoch 19, batch 2200, loss[loss=0.1868, simple_loss=0.2649, pruned_loss=0.03863, ctc_loss=0.07884, over 19583.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.259, pruned_loss=0.03919, ctc_loss=0.07317, over 3866828.23 frames. ], batch size: 57, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 14:47:41,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=250848.0, ans=0.07 +2024-08-29 14:47:54,988 INFO [train.py:1114] (1/4) Epoch 19, batch 2250, loss[loss=0.1902, simple_loss=0.2712, pruned_loss=0.04025, ctc_loss=0.07186, over 19610.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2589, pruned_loss=0.03919, ctc_loss=0.07316, over 3867414.95 frames. ], batch size: 55, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 14:48:17,140 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.82 vs. limit=15.0 +2024-08-29 14:48:32,688 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.087e+02 1.465e+02 1.864e+02 2.416e+02 3.726e+02, threshold=3.728e+02, percent-clipped=0.0 +2024-08-29 14:48:36,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=251114.66666666666, ans=0.0 +2024-08-29 14:48:41,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=251168.0, ans=0.0 +2024-08-29 14:48:47,059 INFO [train.py:1114] (1/4) Epoch 19, batch 2300, loss[loss=0.1757, simple_loss=0.2456, pruned_loss=0.03923, ctc_loss=0.0682, over 19536.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2585, pruned_loss=0.03945, ctc_loss=0.07365, over 3861612.16 frames. ], batch size: 49, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 14:48:48,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=251221.33333333334, ans=0.0 +2024-08-29 14:48:50,087 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.63 vs. limit=22.5 +2024-08-29 14:49:09,547 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:49:14,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=251381.33333333334, ans=0.125 +2024-08-29 14:49:25,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251434.66666666666, ans=0.1 +2024-08-29 14:49:30,460 INFO [train.py:1114] (1/4) Epoch 19, batch 2350, loss[loss=0.2036, simple_loss=0.2729, pruned_loss=0.04922, ctc_loss=0.08978, over 19657.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.259, pruned_loss=0.03997, ctc_loss=0.07452, over 3863892.88 frames. ], batch size: 63, lr: 7.79e-03, grad_scale: 32.0 +2024-08-29 14:49:34,063 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:49:40,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=251541.33333333334, ans=0.025 +2024-08-29 14:49:46,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=251541.33333333334, ans=0.2 +2024-08-29 14:49:46,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=251541.33333333334, ans=0.125 +2024-08-29 14:49:47,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=251594.66666666666, ans=0.125 +2024-08-29 14:49:51,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251594.66666666666, ans=0.1 +2024-08-29 14:50:00,303 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.463e+02 1.784e+02 2.534e+02 4.062e+02, threshold=3.568e+02, percent-clipped=2.0 +2024-08-29 14:50:01,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=251648.0, ans=0.2 +2024-08-29 14:50:07,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=251701.33333333334, ans=0.125 +2024-08-29 14:50:17,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=251701.33333333334, ans=10.0 +2024-08-29 14:50:24,247 INFO [train.py:1114] (1/4) Epoch 19, batch 2400, loss[loss=0.2075, simple_loss=0.2845, pruned_loss=0.04902, ctc_loss=0.08121, over 19414.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2611, pruned_loss=0.04077, ctc_loss=0.07584, over 3858113.05 frames. ], batch size: 67, lr: 7.79e-03, grad_scale: 32.0 +2024-08-29 14:50:29,628 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.61 vs. limit=15.0 +2024-08-29 14:50:34,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=251808.0, ans=0.2 +2024-08-29 14:50:39,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=251808.0, ans=0.09899494936611666 +2024-08-29 14:50:40,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=251808.0, ans=0.2 +2024-08-29 14:50:48,740 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.83 vs. limit=15.0 +2024-08-29 14:51:18,917 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=6.46 vs. limit=12.0 +2024-08-29 14:51:26,009 INFO [train.py:1114] (1/4) Epoch 19, batch 2450, loss[loss=0.2433, simple_loss=0.2852, pruned_loss=0.07424, ctc_loss=0.1324, over 13285.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2641, pruned_loss=0.0427, ctc_loss=0.07978, over 3731183.79 frames. ], batch size: 141, lr: 7.78e-03, grad_scale: 32.0 +2024-08-29 14:51:29,182 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.16 vs. limit=10.0 +2024-08-29 14:51:30,111 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=9.44 vs. limit=12.0 +2024-08-29 14:51:31,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=252021.33333333334, ans=0.125 +2024-08-29 14:51:51,296 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.01 vs. limit=6.0 +2024-08-29 14:51:56,013 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.227e+02 1.531e+02 1.709e+02 1.904e+02 2.805e+02, threshold=3.418e+02, percent-clipped=0.0 +2024-08-29 14:53:10,855 INFO [train.py:1114] (1/4) Epoch 20, batch 0, loss[loss=0.185, simple_loss=0.2566, pruned_loss=0.04103, ctc_loss=0.07823, over 19805.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2566, pruned_loss=0.04103, ctc_loss=0.07823, over 19805.00 frames. ], batch size: 49, lr: 7.58e-03, grad_scale: 32.0 +2024-08-29 14:53:10,856 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-29 14:53:50,291 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.0001, 3.7243, 3.5198, 3.5611], device='cuda:1') +2024-08-29 14:59:30,905 INFO [train.py:1146] (1/4) Epoch 20, validation: loss=0.1707, simple_loss=0.2632, pruned_loss=0.02916, ctc_loss=0.04979, over 944034.00 frames. +2024-08-29 14:59:30,906 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 12880MB +2024-08-29 14:59:41,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=252288.0, ans=0.0 +2024-08-29 14:59:43,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=252288.0, ans=0.1 +2024-08-29 14:59:50,303 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.31 vs. limit=22.5 +2024-08-29 14:59:54,206 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.16 vs. limit=15.0 +2024-08-29 14:59:57,144 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.82 vs. limit=15.0 +2024-08-29 15:00:03,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=252394.66666666666, ans=0.125 +2024-08-29 15:00:07,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=252448.0, ans=0.2 +2024-08-29 15:00:14,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.99 vs. limit=12.0 +2024-08-29 15:00:15,666 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.78 vs. limit=15.0 +2024-08-29 15:00:18,046 INFO [train.py:1114] (1/4) Epoch 20, batch 50, loss[loss=0.1631, simple_loss=0.233, pruned_loss=0.03426, ctc_loss=0.06157, over 19711.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2606, pruned_loss=0.0399, ctc_loss=0.07413, over 845201.66 frames. ], batch size: 47, lr: 7.58e-03, grad_scale: 32.0 +2024-08-29 15:00:38,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=252554.66666666666, ans=0.0 +2024-08-29 15:01:09,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.85 vs. limit=22.5 +2024-08-29 15:01:11,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=252714.66666666666, ans=0.0 +2024-08-29 15:01:11,678 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.167e+02 1.410e+02 1.638e+02 1.971e+02 2.993e+02, threshold=3.276e+02, percent-clipped=0.0 +2024-08-29 15:01:17,274 INFO [train.py:1114] (1/4) Epoch 20, batch 100, loss[loss=0.1594, simple_loss=0.2346, pruned_loss=0.0312, ctc_loss=0.05463, over 19724.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2619, pruned_loss=0.04042, ctc_loss=0.07535, over 1498926.28 frames. ], batch size: 51, lr: 7.57e-03, grad_scale: 32.0 +2024-08-29 15:01:34,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=252821.33333333334, ans=0.2 +2024-08-29 15:01:49,978 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.02 vs. limit=10.0 +2024-08-29 15:01:57,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=252981.33333333334, ans=0.0 +2024-08-29 15:02:05,279 INFO [train.py:1114] (1/4) Epoch 20, batch 150, loss[loss=0.1716, simple_loss=0.2408, pruned_loss=0.03745, ctc_loss=0.06863, over 19725.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2588, pruned_loss=0.03923, ctc_loss=0.0732, over 2028652.43 frames. ], batch size: 47, lr: 7.57e-03, grad_scale: 32.0 +2024-08-29 15:02:11,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=253034.66666666666, ans=0.125 +2024-08-29 15:02:16,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=253088.0, ans=0.0 +2024-08-29 15:02:17,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=253088.0, ans=0.0 +2024-08-29 15:02:36,670 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.95 vs. limit=15.0 +2024-08-29 15:02:47,829 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.088e+02 1.385e+02 1.535e+02 1.856e+02 3.405e+02, threshold=3.069e+02, percent-clipped=1.0 +2024-08-29 15:03:21,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=253248.0, ans=0.0 +2024-08-29 15:03:24,279 INFO [train.py:1114] (1/4) Epoch 20, batch 200, loss[loss=0.2052, simple_loss=0.2801, pruned_loss=0.0462, ctc_loss=0.09495, over 18278.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2581, pruned_loss=0.03917, ctc_loss=0.07337, over 2436406.66 frames. ], batch size: 85, lr: 7.56e-03, grad_scale: 32.0 +2024-08-29 15:03:41,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=253408.0, ans=0.125 +2024-08-29 15:03:53,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=253461.33333333334, ans=0.125 +2024-08-29 15:04:00,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=253461.33333333334, ans=0.0 +2024-08-29 15:04:02,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253514.66666666666, ans=0.1 +2024-08-29 15:04:02,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=253514.66666666666, ans=0.125 +2024-08-29 15:04:02,447 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.00 vs. limit=15.0 +2024-08-29 15:04:05,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=253514.66666666666, ans=0.1 +2024-08-29 15:04:12,972 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.13 vs. limit=15.0 +2024-08-29 15:04:14,361 INFO [train.py:1114] (1/4) Epoch 20, batch 250, loss[loss=0.1894, simple_loss=0.2666, pruned_loss=0.04089, ctc_loss=0.07613, over 19412.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2583, pruned_loss=0.03881, ctc_loss=0.07269, over 2755617.90 frames. ], batch size: 67, lr: 7.56e-03, grad_scale: 32.0 +2024-08-29 15:04:14,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=253568.0, ans=0.025 +2024-08-29 15:04:16,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=253568.0, ans=0.125 +2024-08-29 15:04:26,980 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.04 vs. limit=15.0 +2024-08-29 15:04:28,694 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.99 vs. limit=15.0 +2024-08-29 15:04:29,672 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.08 vs. limit=12.0 +2024-08-29 15:04:29,779 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.84 vs. limit=12.0 +2024-08-29 15:04:36,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=253674.66666666666, ans=0.1 +2024-08-29 15:04:37,123 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.61 vs. limit=22.5 +2024-08-29 15:06:08,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=253728.0, ans=0.125 +2024-08-29 15:06:10,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=253728.0, ans=0.1 +2024-08-29 15:06:12,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=253781.33333333334, ans=0.0 +2024-08-29 15:06:16,399 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.202e+02 1.436e+02 1.637e+02 2.276e+02 3.998e+02, threshold=3.274e+02, percent-clipped=8.0 +2024-08-29 15:06:21,874 INFO [train.py:1114] (1/4) Epoch 20, batch 300, loss[loss=0.185, simple_loss=0.2649, pruned_loss=0.03853, ctc_loss=0.06992, over 19501.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2581, pruned_loss=0.03864, ctc_loss=0.07233, over 3001492.53 frames. ], batch size: 61, lr: 7.56e-03, grad_scale: 32.0 +2024-08-29 15:06:30,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=253888.0, ans=0.1 +2024-08-29 15:06:40,365 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.91 vs. limit=22.5 +2024-08-29 15:06:46,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=253941.33333333334, ans=0.0 +2024-08-29 15:06:50,882 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.09 vs. limit=15.0 +2024-08-29 15:07:35,043 INFO [train.py:1114] (1/4) Epoch 20, batch 350, loss[loss=0.1661, simple_loss=0.2398, pruned_loss=0.03409, ctc_loss=0.06073, over 19766.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2577, pruned_loss=0.03857, ctc_loss=0.07212, over 3190578.29 frames. ], batch size: 48, lr: 7.55e-03, grad_scale: 32.0 +2024-08-29 15:07:48,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=254154.66666666666, ans=0.04949747468305833 +2024-08-29 15:08:36,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=254261.33333333334, ans=0.125 +2024-08-29 15:09:24,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=254261.33333333334, ans=0.125 +2024-08-29 15:09:30,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=254314.66666666666, ans=0.125 +2024-08-29 15:09:32,447 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.477e+02 1.781e+02 2.345e+02 4.390e+02, threshold=3.562e+02, percent-clipped=4.0 +2024-08-29 15:09:38,820 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.95 vs. limit=22.5 +2024-08-29 15:09:40,121 INFO [train.py:1114] (1/4) Epoch 20, batch 400, loss[loss=0.1676, simple_loss=0.2542, pruned_loss=0.02919, ctc_loss=0.05648, over 19857.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2574, pruned_loss=0.03843, ctc_loss=0.07171, over 3344185.38 frames. ], batch size: 55, lr: 7.55e-03, grad_scale: 32.0 +2024-08-29 15:09:49,346 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.91 vs. limit=22.5 +2024-08-29 15:10:03,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=254474.66666666666, ans=0.1 +2024-08-29 15:10:06,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254474.66666666666, ans=0.1 +2024-08-29 15:10:12,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=254528.0, ans=0.1 +2024-08-29 15:10:15,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=254528.0, ans=0.05 +2024-08-29 15:10:31,168 INFO [train.py:1114] (1/4) Epoch 20, batch 450, loss[loss=0.1813, simple_loss=0.2629, pruned_loss=0.0364, ctc_loss=0.06754, over 19601.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2581, pruned_loss=0.03877, ctc_loss=0.07239, over 3453379.41 frames. ], batch size: 55, lr: 7.55e-03, grad_scale: 16.0 +2024-08-29 15:10:43,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=254688.0, ans=0.0 +2024-08-29 15:10:47,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=254688.0, ans=0.015 +2024-08-29 15:10:48,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=254688.0, ans=0.0 +2024-08-29 15:10:52,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=254741.33333333334, ans=0.0 +2024-08-29 15:10:53,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=254741.33333333334, ans=0.125 +2024-08-29 15:10:56,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=254741.33333333334, ans=0.2 +2024-08-29 15:11:12,414 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.415e+02 1.633e+02 1.920e+02 3.508e+02, threshold=3.267e+02, percent-clipped=0.0 +2024-08-29 15:11:16,889 INFO [train.py:1114] (1/4) Epoch 20, batch 500, loss[loss=0.1989, simple_loss=0.2731, pruned_loss=0.04593, ctc_loss=0.08182, over 19698.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2575, pruned_loss=0.03844, ctc_loss=0.07177, over 3548352.56 frames. ], batch size: 63, lr: 7.54e-03, grad_scale: 16.0 +2024-08-29 15:11:18,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=254901.33333333334, ans=0.0 +2024-08-29 15:11:27,478 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.86 vs. limit=10.0 +2024-08-29 15:11:40,408 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.67 vs. limit=15.0 +2024-08-29 15:11:54,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=255061.33333333334, ans=0.125 +2024-08-29 15:12:03,683 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:12:06,337 INFO [train.py:1114] (1/4) Epoch 20, batch 550, loss[loss=0.192, simple_loss=0.2655, pruned_loss=0.04224, ctc_loss=0.08497, over 19308.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2575, pruned_loss=0.03869, ctc_loss=0.07228, over 3609596.37 frames. ], batch size: 71, lr: 7.54e-03, grad_scale: 16.0 +2024-08-29 15:12:10,601 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.46 vs. limit=22.5 +2024-08-29 15:12:27,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=255274.66666666666, ans=0.2 +2024-08-29 15:13:59,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=255381.33333333334, ans=0.125 +2024-08-29 15:14:00,950 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.409e+02 1.685e+02 2.053e+02 3.324e+02, threshold=3.369e+02, percent-clipped=1.0 +2024-08-29 15:14:15,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=255381.33333333334, ans=0.125 +2024-08-29 15:14:18,266 INFO [train.py:1114] (1/4) Epoch 20, batch 600, loss[loss=0.1906, simple_loss=0.2746, pruned_loss=0.03888, ctc_loss=0.07201, over 19349.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2578, pruned_loss=0.03871, ctc_loss=0.0723, over 3666314.13 frames. ], batch size: 67, lr: 7.53e-03, grad_scale: 16.0 +2024-08-29 15:14:18,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=255434.66666666666, ans=0.125 +2024-08-29 15:14:31,380 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:14:35,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=255488.0, ans=0.125 +2024-08-29 15:14:40,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=255541.33333333334, ans=0.125 +2024-08-29 15:15:00,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=255648.0, ans=0.0 +2024-08-29 15:15:04,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.32 vs. limit=15.0 +2024-08-29 15:15:06,098 INFO [train.py:1114] (1/4) Epoch 20, batch 650, loss[loss=0.1821, simple_loss=0.2684, pruned_loss=0.03543, ctc_loss=0.06238, over 19776.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.257, pruned_loss=0.03804, ctc_loss=0.07091, over 3716846.63 frames. ], batch size: 54, lr: 7.53e-03, grad_scale: 16.0 +2024-08-29 15:15:09,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=255701.33333333334, ans=0.0 +2024-08-29 15:15:20,134 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:15:28,683 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.28 vs. limit=6.0 +2024-08-29 15:15:45,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=255914.66666666666, ans=0.1 +2024-08-29 15:15:49,547 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.451e+02 1.804e+02 2.620e+02 6.000e+02, threshold=3.609e+02, percent-clipped=12.0 +2024-08-29 15:15:49,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=255914.66666666666, ans=0.0 +2024-08-29 15:15:54,173 INFO [train.py:1114] (1/4) Epoch 20, batch 700, loss[loss=0.1652, simple_loss=0.2427, pruned_loss=0.03226, ctc_loss=0.05801, over 19719.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2576, pruned_loss=0.03837, ctc_loss=0.07164, over 3749179.31 frames. ], batch size: 51, lr: 7.53e-03, grad_scale: 16.0 +2024-08-29 15:15:56,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=255968.0, ans=0.125 +2024-08-29 15:16:22,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=256074.66666666666, ans=0.2 +2024-08-29 15:16:23,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=256128.0, ans=0.2 +2024-08-29 15:16:27,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=256128.0, ans=0.125 +2024-08-29 15:16:28,894 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.30 vs. limit=22.5 +2024-08-29 15:16:29,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.01 vs. limit=15.0 +2024-08-29 15:16:32,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=256181.33333333334, ans=0.0 +2024-08-29 15:16:39,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.whiten.whitening_limit, batch_count=256181.33333333334, ans=12.0 +2024-08-29 15:16:44,245 INFO [train.py:1114] (1/4) Epoch 20, batch 750, loss[loss=0.1982, simple_loss=0.2767, pruned_loss=0.0437, ctc_loss=0.08073, over 19487.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2574, pruned_loss=0.03845, ctc_loss=0.07175, over 3775736.67 frames. ], batch size: 54, lr: 7.52e-03, grad_scale: 16.0 +2024-08-29 15:16:46,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=256234.66666666666, ans=0.025 +2024-08-29 15:17:03,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=256341.33333333334, ans=0.0 +2024-08-29 15:17:06,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=256341.33333333334, ans=0.0 +2024-08-29 15:17:07,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=256341.33333333334, ans=0.0 +2024-08-29 15:17:14,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=256394.66666666666, ans=0.125 +2024-08-29 15:17:18,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=256394.66666666666, ans=0.125 +2024-08-29 15:17:28,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=256448.0, ans=0.125 +2024-08-29 15:17:29,505 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.446e+02 1.840e+02 2.370e+02 3.601e+02, threshold=3.680e+02, percent-clipped=0.0 +2024-08-29 15:17:34,165 INFO [train.py:1114] (1/4) Epoch 20, batch 800, loss[loss=0.1745, simple_loss=0.2473, pruned_loss=0.0364, ctc_loss=0.0725, over 19810.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2577, pruned_loss=0.03868, ctc_loss=0.07219, over 3797557.31 frames. ], batch size: 49, lr: 7.52e-03, grad_scale: 32.0 +2024-08-29 15:17:35,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=256501.33333333334, ans=0.0 +2024-08-29 15:17:43,888 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.25 vs. limit=12.0 +2024-08-29 15:17:49,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=256554.66666666666, ans=0.0 +2024-08-29 15:17:50,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=256554.66666666666, ans=0.0 +2024-08-29 15:17:54,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=256608.0, ans=0.125 +2024-08-29 15:18:20,535 INFO [train.py:1114] (1/4) Epoch 20, batch 850, loss[loss=0.186, simple_loss=0.2685, pruned_loss=0.03831, ctc_loss=0.06739, over 19661.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2575, pruned_loss=0.03875, ctc_loss=0.07209, over 3816420.74 frames. ], batch size: 59, lr: 7.51e-03, grad_scale: 32.0 +2024-08-29 15:18:25,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256768.0, ans=0.1 +2024-08-29 15:18:29,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=256768.0, ans=0.125 +2024-08-29 15:18:36,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=256821.33333333334, ans=0.1 +2024-08-29 15:18:38,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=256821.33333333334, ans=0.125 +2024-08-29 15:18:41,786 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.60 vs. limit=10.0 +2024-08-29 15:18:42,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=256874.66666666666, ans=0.125 +2024-08-29 15:18:47,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=256874.66666666666, ans=0.125 +2024-08-29 15:18:49,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=256874.66666666666, ans=0.125 +2024-08-29 15:19:04,726 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.094e+02 1.415e+02 1.606e+02 2.010e+02 3.804e+02, threshold=3.213e+02, percent-clipped=1.0 +2024-08-29 15:19:09,482 INFO [train.py:1114] (1/4) Epoch 20, batch 900, loss[loss=0.1601, simple_loss=0.2366, pruned_loss=0.03027, ctc_loss=0.05757, over 19408.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2577, pruned_loss=0.03905, ctc_loss=0.07264, over 3819263.87 frames. ], batch size: 48, lr: 7.51e-03, grad_scale: 32.0 +2024-08-29 15:19:26,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=257088.0, ans=0.125 +2024-08-29 15:19:27,728 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.55 vs. limit=15.0 +2024-08-29 15:19:35,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=257141.33333333334, ans=0.025 +2024-08-29 15:19:40,025 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.28 vs. limit=15.0 +2024-08-29 15:19:52,147 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.52 vs. limit=15.0 +2024-08-29 15:19:55,788 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.46 vs. limit=22.5 +2024-08-29 15:19:59,373 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.12 vs. limit=15.0 +2024-08-29 15:19:59,860 INFO [train.py:1114] (1/4) Epoch 20, batch 950, loss[loss=0.1692, simple_loss=0.246, pruned_loss=0.03357, ctc_loss=0.06299, over 19481.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2589, pruned_loss=0.03959, ctc_loss=0.07369, over 3819608.55 frames. ], batch size: 49, lr: 7.51e-03, grad_scale: 32.0 +2024-08-29 15:20:02,109 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.37 vs. limit=15.0 +2024-08-29 15:20:08,613 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=8.98 vs. limit=12.0 +2024-08-29 15:20:25,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=257408.0, ans=0.125 +2024-08-29 15:20:32,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=257461.33333333334, ans=0.125 +2024-08-29 15:20:32,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=257461.33333333334, ans=0.0 +2024-08-29 15:20:35,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=257461.33333333334, ans=0.0 +2024-08-29 15:20:43,846 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.399e+02 1.695e+02 2.094e+02 3.389e+02, threshold=3.390e+02, percent-clipped=1.0 +2024-08-29 15:20:48,476 INFO [train.py:1114] (1/4) Epoch 20, batch 1000, loss[loss=0.1594, simple_loss=0.2383, pruned_loss=0.02919, ctc_loss=0.05519, over 19840.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2588, pruned_loss=0.03964, ctc_loss=0.0738, over 3816334.17 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 32.0 +2024-08-29 15:20:52,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=257568.0, ans=0.2 +2024-08-29 15:21:24,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=257728.0, ans=0.025 +2024-08-29 15:21:26,779 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:21:36,905 INFO [train.py:1114] (1/4) Epoch 20, batch 1050, loss[loss=0.1905, simple_loss=0.2663, pruned_loss=0.04245, ctc_loss=0.0747, over 19825.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2577, pruned_loss=0.03923, ctc_loss=0.07308, over 3821748.40 frames. ], batch size: 57, lr: 7.50e-03, grad_scale: 32.0 +2024-08-29 15:21:49,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff3.min_abs, batch_count=257888.0, ans=0.2 +2024-08-29 15:21:51,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=257888.0, ans=0.1 +2024-08-29 15:21:53,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=257888.0, ans=0.0 +2024-08-29 15:22:00,224 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.77 vs. limit=22.5 +2024-08-29 15:22:06,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=257994.66666666666, ans=0.125 +2024-08-29 15:22:10,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=257994.66666666666, ans=0.125 +2024-08-29 15:22:20,122 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.103e+02 1.405e+02 1.673e+02 2.019e+02 3.302e+02, threshold=3.347e+02, percent-clipped=0.0 +2024-08-29 15:22:22,472 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.86 vs. limit=15.0 +2024-08-29 15:22:24,715 INFO [train.py:1114] (1/4) Epoch 20, batch 1100, loss[loss=0.179, simple_loss=0.2588, pruned_loss=0.03577, ctc_loss=0.06924, over 19589.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2575, pruned_loss=0.03888, ctc_loss=0.07258, over 3829696.72 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 32.0 +2024-08-29 15:22:37,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.91 vs. limit=12.0 +2024-08-29 15:23:14,520 INFO [train.py:1114] (1/4) Epoch 20, batch 1150, loss[loss=0.1758, simple_loss=0.2556, pruned_loss=0.0351, ctc_loss=0.06461, over 19589.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2573, pruned_loss=0.03866, ctc_loss=0.07227, over 3829160.70 frames. ], batch size: 52, lr: 7.49e-03, grad_scale: 32.0 +2024-08-29 15:23:19,734 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.85 vs. limit=12.0 +2024-08-29 15:23:44,172 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.31 vs. limit=22.5 +2024-08-29 15:23:50,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=258528.0, ans=0.125 +2024-08-29 15:23:58,293 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.386e+02 1.703e+02 2.133e+02 3.069e+02, threshold=3.407e+02, percent-clipped=0.0 +2024-08-29 15:24:02,922 INFO [train.py:1114] (1/4) Epoch 20, batch 1200, loss[loss=0.1756, simple_loss=0.2614, pruned_loss=0.03228, ctc_loss=0.06296, over 19848.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2577, pruned_loss=0.03861, ctc_loss=0.07221, over 3824397.58 frames. ], batch size: 57, lr: 7.49e-03, grad_scale: 32.0 +2024-08-29 15:24:17,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=258688.0, ans=0.0 +2024-08-29 15:24:19,683 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:24:28,992 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.36 vs. limit=15.0 +2024-08-29 15:24:38,234 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.32 vs. limit=15.0 +2024-08-29 15:24:50,405 INFO [train.py:1114] (1/4) Epoch 20, batch 1250, loss[loss=0.1962, simple_loss=0.2701, pruned_loss=0.04458, ctc_loss=0.08296, over 19518.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.258, pruned_loss=0.03852, ctc_loss=0.07198, over 3842508.95 frames. ], batch size: 61, lr: 7.48e-03, grad_scale: 32.0 +2024-08-29 15:25:02,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=258954.66666666666, ans=0.0 +2024-08-29 15:25:16,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.65 vs. limit=6.0 +2024-08-29 15:25:21,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=259061.33333333334, ans=0.2 +2024-08-29 15:25:33,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=259114.66666666666, ans=0.125 +2024-08-29 15:25:35,748 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.412e+02 1.655e+02 2.039e+02 3.415e+02, threshold=3.310e+02, percent-clipped=2.0 +2024-08-29 15:25:40,341 INFO [train.py:1114] (1/4) Epoch 20, batch 1300, loss[loss=0.2072, simple_loss=0.2798, pruned_loss=0.04957, ctc_loss=0.08834, over 18844.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2577, pruned_loss=0.03846, ctc_loss=0.07175, over 3846249.38 frames. ], batch size: 76, lr: 7.48e-03, grad_scale: 32.0 +2024-08-29 15:25:48,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=259168.0, ans=0.125 +2024-08-29 15:25:58,539 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.21 vs. limit=22.5 +2024-08-29 15:26:16,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=259381.33333333334, ans=0.0 +2024-08-29 15:26:16,789 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.78 vs. limit=22.5 +2024-08-29 15:26:20,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=259381.33333333334, ans=0.0 +2024-08-29 15:26:20,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=259381.33333333334, ans=0.0 +2024-08-29 15:26:22,513 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.37 vs. limit=10.0 +2024-08-29 15:26:26,567 INFO [train.py:1114] (1/4) Epoch 20, batch 1350, loss[loss=0.1734, simple_loss=0.252, pruned_loss=0.03497, ctc_loss=0.06234, over 19790.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2581, pruned_loss=0.03856, ctc_loss=0.07182, over 3857281.69 frames. ], batch size: 54, lr: 7.48e-03, grad_scale: 32.0 +2024-08-29 15:27:31,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259541.33333333334, ans=0.1 +2024-08-29 15:27:47,059 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.431e+02 1.643e+02 2.121e+02 3.858e+02, threshold=3.286e+02, percent-clipped=2.0 +2024-08-29 15:27:50,363 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.14 vs. limit=6.0 +2024-08-29 15:27:51,730 INFO [train.py:1114] (1/4) Epoch 20, batch 1400, loss[loss=0.1637, simple_loss=0.2321, pruned_loss=0.03549, ctc_loss=0.06069, over 19662.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2579, pruned_loss=0.03866, ctc_loss=0.07209, over 3864220.63 frames. ], batch size: 46, lr: 7.47e-03, grad_scale: 32.0 +2024-08-29 15:27:54,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=259701.33333333334, ans=0.0 +2024-08-29 15:27:56,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=259701.33333333334, ans=0.2 +2024-08-29 15:28:07,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=259754.66666666666, ans=0.125 +2024-08-29 15:28:08,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=259754.66666666666, ans=0.125 +2024-08-29 15:28:13,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=259808.0, ans=0.1 +2024-08-29 15:28:14,513 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.43 vs. limit=10.0 +2024-08-29 15:28:16,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=259808.0, ans=0.2 +2024-08-29 15:28:22,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=259861.33333333334, ans=0.2 +2024-08-29 15:29:17,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=259861.33333333334, ans=0.125 +2024-08-29 15:29:32,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=259914.66666666666, ans=0.125 +2024-08-29 15:30:03,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=259914.66666666666, ans=0.09899494936611666 +2024-08-29 15:30:05,117 INFO [train.py:1114] (1/4) Epoch 20, batch 1450, loss[loss=0.1861, simple_loss=0.267, pruned_loss=0.03804, ctc_loss=0.07263, over 19660.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2586, pruned_loss=0.03889, ctc_loss=0.07266, over 3862364.72 frames. ], batch size: 63, lr: 7.47e-03, grad_scale: 32.0 +2024-08-29 15:30:17,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260021.33333333334, ans=0.1 +2024-08-29 15:30:28,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=260074.66666666666, ans=0.2 +2024-08-29 15:30:39,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=260128.0, ans=0.125 +2024-08-29 15:30:43,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=260181.33333333334, ans=0.025 +2024-08-29 15:30:44,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=260181.33333333334, ans=0.1 +2024-08-29 15:30:45,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=260181.33333333334, ans=0.07 +2024-08-29 15:30:45,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=260181.33333333334, ans=0.125 +2024-08-29 15:30:48,908 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.404e+02 1.559e+02 1.995e+02 3.603e+02, threshold=3.118e+02, percent-clipped=1.0 +2024-08-29 15:30:53,753 INFO [train.py:1114] (1/4) Epoch 20, batch 1500, loss[loss=0.1843, simple_loss=0.2649, pruned_loss=0.03693, ctc_loss=0.07478, over 19567.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2587, pruned_loss=0.03867, ctc_loss=0.07242, over 3862278.54 frames. ], batch size: 57, lr: 7.47e-03, grad_scale: 32.0 +2024-08-29 15:30:55,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=260234.66666666666, ans=0.0 +2024-08-29 15:30:55,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=260234.66666666666, ans=0.0 +2024-08-29 15:31:04,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=260288.0, ans=0.015 +2024-08-29 15:31:05,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.37 vs. limit=15.0 +2024-08-29 15:31:06,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=260288.0, ans=0.0 +2024-08-29 15:31:07,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=260288.0, ans=0.125 +2024-08-29 15:31:32,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260448.0, ans=0.1 +2024-08-29 15:31:42,543 INFO [train.py:1114] (1/4) Epoch 20, batch 1550, loss[loss=0.1946, simple_loss=0.2708, pruned_loss=0.04284, ctc_loss=0.08189, over 19615.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2585, pruned_loss=0.03884, ctc_loss=0.07282, over 3845948.64 frames. ], batch size: 60, lr: 7.46e-03, grad_scale: 16.0 +2024-08-29 15:31:45,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=260501.33333333334, ans=0.125 +2024-08-29 15:31:54,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=260554.66666666666, ans=0.125 +2024-08-29 15:32:01,167 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.39 vs. limit=15.0 +2024-08-29 15:32:02,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=260608.0, ans=0.125 +2024-08-29 15:32:16,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=260661.33333333334, ans=0.1 +2024-08-29 15:32:17,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=260661.33333333334, ans=0.125 +2024-08-29 15:32:23,672 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.14 vs. limit=15.0 +2024-08-29 15:32:26,816 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.439e+02 1.834e+02 2.160e+02 3.604e+02, threshold=3.667e+02, percent-clipped=4.0 +2024-08-29 15:32:30,538 INFO [train.py:1114] (1/4) Epoch 20, batch 1600, loss[loss=0.1805, simple_loss=0.2619, pruned_loss=0.03616, ctc_loss=0.06725, over 19846.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2582, pruned_loss=0.03882, ctc_loss=0.07281, over 3836160.04 frames. ], batch size: 57, lr: 7.46e-03, grad_scale: 32.0 +2024-08-29 15:32:32,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=260768.0, ans=0.0 +2024-08-29 15:32:48,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=260821.33333333334, ans=0.125 +2024-08-29 15:32:52,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=260874.66666666666, ans=0.0 +2024-08-29 15:32:53,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=260874.66666666666, ans=0.125 +2024-08-29 15:32:57,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=260874.66666666666, ans=0.0 +2024-08-29 15:33:00,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=260874.66666666666, ans=0.0 +2024-08-29 15:33:05,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=260928.0, ans=0.0 +2024-08-29 15:33:07,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=260928.0, ans=0.0 +2024-08-29 15:33:20,597 INFO [train.py:1114] (1/4) Epoch 20, batch 1650, loss[loss=0.1696, simple_loss=0.2559, pruned_loss=0.0305, ctc_loss=0.05563, over 19659.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2581, pruned_loss=0.03883, ctc_loss=0.07281, over 3834174.80 frames. ], batch size: 59, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 15:33:20,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=261034.66666666666, ans=0.0 +2024-08-29 15:33:21,934 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:33:26,026 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.06 vs. limit=22.5 +2024-08-29 15:33:42,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261141.33333333334, ans=0.1 +2024-08-29 15:33:52,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=261194.66666666666, ans=0.95 +2024-08-29 15:33:57,078 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.35 vs. limit=15.0 +2024-08-29 15:33:58,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=261248.0, ans=0.1 +2024-08-29 15:33:58,986 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.19 vs. limit=10.0 +2024-08-29 15:34:03,139 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.083e+02 1.529e+02 1.762e+02 2.426e+02 4.170e+02, threshold=3.524e+02, percent-clipped=3.0 +2024-08-29 15:34:06,784 INFO [train.py:1114] (1/4) Epoch 20, batch 1700, loss[loss=0.167, simple_loss=0.2335, pruned_loss=0.03684, ctc_loss=0.06689, over 19673.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2578, pruned_loss=0.03843, ctc_loss=0.07217, over 3848230.73 frames. ], batch size: 46, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 15:34:32,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=261408.0, ans=0.0 +2024-08-29 15:34:36,104 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.50 vs. limit=12.0 +2024-08-29 15:34:46,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=261514.66666666666, ans=0.125 +2024-08-29 15:34:49,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=261514.66666666666, ans=0.2 +2024-08-29 15:34:53,252 INFO [train.py:1114] (1/4) Epoch 20, batch 1750, loss[loss=0.1557, simple_loss=0.2293, pruned_loss=0.02992, ctc_loss=0.05578, over 19648.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2578, pruned_loss=0.03835, ctc_loss=0.07191, over 3852510.28 frames. ], batch size: 45, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 15:34:57,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=261568.0, ans=0.2 +2024-08-29 15:35:10,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=261674.66666666666, ans=0.125 +2024-08-29 15:35:12,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=261674.66666666666, ans=0.2 +2024-08-29 15:35:14,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=261674.66666666666, ans=0.125 +2024-08-29 15:35:18,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=261728.0, ans=0.2 +2024-08-29 15:35:33,445 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.094e+02 1.486e+02 1.910e+02 2.389e+02 3.898e+02, threshold=3.819e+02, percent-clipped=3.0 +2024-08-29 15:35:37,073 INFO [train.py:1114] (1/4) Epoch 20, batch 1800, loss[loss=0.1737, simple_loss=0.2572, pruned_loss=0.03171, ctc_loss=0.06698, over 19620.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2581, pruned_loss=0.03837, ctc_loss=0.07195, over 3854196.06 frames. ], batch size: 55, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 15:35:40,288 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.65 vs. limit=10.0 +2024-08-29 15:35:41,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=261834.66666666666, ans=0.125 +2024-08-29 15:35:50,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=261888.0, ans=0.125 +2024-08-29 15:35:58,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=261941.33333333334, ans=0.0 +2024-08-29 15:36:02,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=261941.33333333334, ans=0.0 +2024-08-29 15:36:11,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=261994.66666666666, ans=0.125 +2024-08-29 15:36:11,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=261994.66666666666, ans=0.125 +2024-08-29 15:36:15,328 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=11.65 vs. limit=10.0 +2024-08-29 15:36:16,163 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.58 vs. limit=22.5 +2024-08-29 15:36:21,926 INFO [train.py:1114] (1/4) Epoch 20, batch 1850, loss[loss=0.1946, simple_loss=0.2767, pruned_loss=0.04186, ctc_loss=0.07209, over 19588.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2576, pruned_loss=0.03826, ctc_loss=0.07162, over 3857282.87 frames. ], batch size: 57, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 15:36:28,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=262101.33333333334, ans=0.0 +2024-08-29 15:36:29,544 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.70 vs. limit=15.0 +2024-08-29 15:36:35,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=262154.6666666667, ans=0.2 +2024-08-29 15:36:38,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=262154.6666666667, ans=0.0 +2024-08-29 15:36:38,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=262154.6666666667, ans=0.07 +2024-08-29 15:36:48,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=262208.0, ans=0.0 +2024-08-29 15:36:56,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=262261.3333333333, ans=0.0 +2024-08-29 15:37:03,781 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.463e+02 1.737e+02 2.223e+02 4.343e+02, threshold=3.475e+02, percent-clipped=3.0 +2024-08-29 15:37:07,300 INFO [train.py:1114] (1/4) Epoch 20, batch 1900, loss[loss=0.1907, simple_loss=0.273, pruned_loss=0.03935, ctc_loss=0.07443, over 19660.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2582, pruned_loss=0.03847, ctc_loss=0.07207, over 3861469.85 frames. ], batch size: 59, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 15:37:13,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262368.0, ans=0.1 +2024-08-29 15:37:35,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=262421.3333333333, ans=0.025 +2024-08-29 15:38:32,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.18 vs. limit=8.0 +2024-08-29 15:38:44,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=262528.0, ans=0.0 +2024-08-29 15:38:49,689 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.60 vs. limit=15.0 +2024-08-29 15:38:52,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=262581.3333333333, ans=0.07 +2024-08-29 15:38:56,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=262581.3333333333, ans=0.0 +2024-08-29 15:39:01,407 INFO [train.py:1114] (1/4) Epoch 20, batch 1950, loss[loss=0.1584, simple_loss=0.2394, pruned_loss=0.02811, ctc_loss=0.05302, over 19589.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2591, pruned_loss=0.03868, ctc_loss=0.07252, over 3870567.74 frames. ], batch size: 52, lr: 7.43e-03, grad_scale: 32.0 +2024-08-29 15:39:43,487 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.086e+02 1.359e+02 1.503e+02 2.197e+02 3.515e+02, threshold=3.007e+02, percent-clipped=1.0 +2024-08-29 15:39:47,122 INFO [train.py:1114] (1/4) Epoch 20, batch 2000, loss[loss=0.1559, simple_loss=0.2282, pruned_loss=0.02983, ctc_loss=0.05965, over 19661.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2597, pruned_loss=0.039, ctc_loss=0.073, over 3854852.83 frames. ], batch size: 45, lr: 7.43e-03, grad_scale: 32.0 +2024-08-29 15:40:00,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=262954.6666666667, ans=0.125 +2024-08-29 15:40:05,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=263008.0, ans=0.125 +2024-08-29 15:40:07,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=263008.0, ans=0.125 +2024-08-29 15:40:16,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263061.3333333333, ans=0.1 +2024-08-29 15:40:31,088 INFO [train.py:1114] (1/4) Epoch 20, batch 2050, loss[loss=0.1658, simple_loss=0.2381, pruned_loss=0.03397, ctc_loss=0.06376, over 19690.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2588, pruned_loss=0.03867, ctc_loss=0.07244, over 3849839.36 frames. ], batch size: 47, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 15:40:39,120 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:40:40,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=263221.3333333333, ans=0.0 +2024-08-29 15:40:43,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=263221.3333333333, ans=0.1 +2024-08-29 15:41:11,009 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.438e+02 1.706e+02 2.328e+02 5.097e+02, threshold=3.413e+02, percent-clipped=11.0 +2024-08-29 15:41:14,537 INFO [train.py:1114] (1/4) Epoch 20, batch 2100, loss[loss=0.1825, simple_loss=0.2631, pruned_loss=0.03761, ctc_loss=0.06698, over 19757.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2584, pruned_loss=0.03836, ctc_loss=0.07189, over 3857655.54 frames. ], batch size: 54, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 15:41:21,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263434.6666666667, ans=0.1 +2024-08-29 15:41:24,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=263488.0, ans=0.0 +2024-08-29 15:41:31,779 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.38 vs. limit=10.0 +2024-08-29 15:41:57,775 INFO [train.py:1114] (1/4) Epoch 20, batch 2150, loss[loss=0.1762, simple_loss=0.2512, pruned_loss=0.03645, ctc_loss=0.07061, over 19590.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2575, pruned_loss=0.03815, ctc_loss=0.07135, over 3868423.44 frames. ], batch size: 52, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 15:42:16,294 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:42:20,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263808.0, ans=0.1 +2024-08-29 15:42:26,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=263861.3333333333, ans=0.0 +2024-08-29 15:42:37,627 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.210e+02 1.475e+02 1.770e+02 2.541e+02 4.904e+02, threshold=3.539e+02, percent-clipped=6.0 +2024-08-29 15:42:41,077 INFO [train.py:1114] (1/4) Epoch 20, batch 2200, loss[loss=0.1943, simple_loss=0.2662, pruned_loss=0.04432, ctc_loss=0.08427, over 19583.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2571, pruned_loss=0.03799, ctc_loss=0.071, over 3866680.19 frames. ], batch size: 57, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 15:42:56,336 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.61 vs. limit=15.0 +2024-08-29 15:42:57,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.09 vs. limit=15.0 +2024-08-29 15:42:58,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=264074.6666666667, ans=0.125 +2024-08-29 15:42:58,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=264074.6666666667, ans=0.125 +2024-08-29 15:43:00,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=264074.6666666667, ans=15.0 +2024-08-29 15:43:02,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=264074.6666666667, ans=0.125 +2024-08-29 15:43:08,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=264128.0, ans=0.125 +2024-08-29 15:43:17,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=264181.3333333333, ans=0.125 +2024-08-29 15:43:20,585 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.67 vs. limit=22.5 +2024-08-29 15:43:25,164 INFO [train.py:1114] (1/4) Epoch 20, batch 2250, loss[loss=0.1885, simple_loss=0.2691, pruned_loss=0.03941, ctc_loss=0.07289, over 19619.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2575, pruned_loss=0.03812, ctc_loss=0.07111, over 3866736.95 frames. ], batch size: 55, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 15:43:41,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=264341.3333333333, ans=0.0 +2024-08-29 15:43:57,043 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:44:04,420 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.490e+02 1.909e+02 2.542e+02 3.813e+02, threshold=3.818e+02, percent-clipped=1.0 +2024-08-29 15:44:04,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=264448.0, ans=0.0 +2024-08-29 15:44:05,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264448.0, ans=0.1 +2024-08-29 15:44:06,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=264448.0, ans=0.09899494936611666 +2024-08-29 15:44:07,875 INFO [train.py:1114] (1/4) Epoch 20, batch 2300, loss[loss=0.1744, simple_loss=0.2417, pruned_loss=0.03866, ctc_loss=0.07457, over 19515.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2569, pruned_loss=0.03821, ctc_loss=0.07136, over 3860575.75 frames. ], batch size: 49, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 15:44:32,906 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:44:52,850 INFO [train.py:1114] (1/4) Epoch 20, batch 2350, loss[loss=0.2, simple_loss=0.2714, pruned_loss=0.04678, ctc_loss=0.08783, over 19677.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.257, pruned_loss=0.03855, ctc_loss=0.07191, over 3863388.31 frames. ], batch size: 63, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 15:44:53,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=264768.0, ans=0.125 +2024-08-29 15:44:54,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264768.0, ans=0.1 +2024-08-29 15:45:01,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=264821.3333333333, ans=0.0 +2024-08-29 15:45:04,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=264821.3333333333, ans=0.0 +2024-08-29 15:45:06,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.96 vs. limit=15.0 +2024-08-29 15:45:10,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=264874.6666666667, ans=0.0 +2024-08-29 15:45:26,607 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:45:31,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=264981.3333333333, ans=0.125 +2024-08-29 15:45:32,198 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.408e+02 1.642e+02 2.194e+02 4.028e+02, threshold=3.284e+02, percent-clipped=1.0 +2024-08-29 15:45:32,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=264981.3333333333, ans=0.0 +2024-08-29 15:45:35,650 INFO [train.py:1114] (1/4) Epoch 20, batch 2400, loss[loss=0.2042, simple_loss=0.2728, pruned_loss=0.0487, ctc_loss=0.09524, over 19227.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.259, pruned_loss=0.03936, ctc_loss=0.07321, over 3857894.04 frames. ], batch size: 71, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 15:45:38,691 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.02 vs. limit=15.0 +2024-08-29 15:45:48,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=265088.0, ans=0.125 +2024-08-29 15:45:51,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=265088.0, ans=0.125 +2024-08-29 15:46:03,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=265194.6666666667, ans=0.125 +2024-08-29 15:46:04,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=265194.6666666667, ans=0.125 +2024-08-29 15:46:07,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=265194.6666666667, ans=0.0 +2024-08-29 15:46:08,894 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:46:08,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=265194.6666666667, ans=0.2 +2024-08-29 15:46:19,426 INFO [train.py:1114] (1/4) Epoch 20, batch 2450, loss[loss=0.2305, simple_loss=0.2863, pruned_loss=0.06296, ctc_loss=0.1218, over 13318.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2619, pruned_loss=0.04104, ctc_loss=0.07676, over 3731381.34 frames. ], batch size: 140, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 15:46:19,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=265301.3333333333, ans=0.125 +2024-08-29 15:46:22,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=265301.3333333333, ans=0.0 +2024-08-29 15:46:49,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=265461.3333333333, ans=0.2 +2024-08-29 15:46:51,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=265461.3333333333, ans=0.125 +2024-08-29 15:48:24,414 INFO [train.py:1387] (1/4) Done! diff --git a/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-10-47-00-2 b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-10-47-00-2 new file mode 100644 index 0000000000000000000000000000000000000000..e749de453dd3dbc1395a08a63dc7a17fcdc8db35 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-10-47-00-2 @@ -0,0 +1,675 @@ +2024-08-29 10:47:00,759 INFO [train.py:1182] (2/4) Training started +2024-08-29 10:47:00,761 INFO [train.py:1192] (2/4) Device: cuda:2 +2024-08-29 10:47:00,825 INFO [train.py:1210] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2563.int.cedar.computecanada.ca', 'IP address': '172.16.146.0'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 19, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 10:47:00,825 INFO [train.py:1212] (2/4) About to create model +2024-08-29 10:50:07,614 INFO [train.py:1216] (2/4) Number of model parameters: 65805511 +2024-08-29 10:50:07,811 INFO [checkpoint.py:112] (2/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-18.pt +2024-08-29 10:54:13,640 INFO [train.py:1231] (2/4) Using DDP +2024-08-29 10:57:38,605 INFO [train.py:1243] (2/4) Loading optimizer state dict +2024-08-29 10:57:38,761 INFO [train.py:1251] (2/4) Loading scheduler state dict +2024-08-29 10:57:39,612 INFO [asr_datamodule.py:894] (2/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:696] (2/4) Disable MUSAN +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:714] (2/4) Enable SpecAugment +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:715] (2/4) Time warp factor: 80 +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:725] (2/4) Num frame mask: 10 +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:738] (2/4) About to create train dataset +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:765] (2/4) Using DynamicBucketingSampler. +2024-08-29 10:57:41,246 INFO [asr_datamodule.py:782] (2/4) About to create train dataloader +2024-08-29 10:57:41,253 INFO [asr_datamodule.py:911] (2/4) About to get dev-clean cuts +2024-08-29 10:58:07,692 INFO [asr_datamodule.py:918] (2/4) About to get dev-other cuts +2024-08-29 10:58:09,678 INFO [asr_datamodule.py:814] (2/4) About to create dev dataset +2024-08-29 10:58:09,992 INFO [asr_datamodule.py:831] (2/4) About to create dev dataloader +2024-08-29 10:58:09,992 INFO [train.py:1435] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 11:07:36,362 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=512, metric=3.62 vs. limit=7.5 +2024-08-29 11:08:02,421 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12109MB +2024-08-29 11:08:03,529 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12109MB +2024-08-29 11:11:47,150 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12109MB +2024-08-29 11:11:48,389 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12140MB +2024-08-29 11:21:31,520 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12140MB +2024-08-29 11:21:32,822 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 12140MB +2024-08-29 11:21:32,843 INFO [train.py:1344] (2/4) Loading grad scaler state dict +2024-08-29 11:27:35,429 INFO [train.py:1114] (2/4) Epoch 19, batch 0, loss[loss=0.1782, simple_loss=0.248, pruned_loss=0.04, ctc_loss=0.07085, over 19405.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.248, pruned_loss=0.04, ctc_loss=0.07085, over 19405.00 frames. ], batch size: 48, lr: 7.99e-03, grad_scale: 32.0 +2024-08-29 11:27:35,429 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-29 11:31:58,883 INFO [train.py:1146] (2/4) Epoch 19, validation: loss=0.1709, simple_loss=0.2636, pruned_loss=0.02933, ctc_loss=0.04896, over 944034.00 frames. +2024-08-29 11:31:58,884 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12140MB +2024-08-29 11:31:59,363 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.70 vs. limit=12.0 +2024-08-29 11:33:51,241 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.43 vs. limit=15.0 +2024-08-29 11:38:48,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=238954.66666666666, ans=0.125 +2024-08-29 11:54:21,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239008.0, ans=0.1 +2024-08-29 12:32:18,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=239114.66666666666, ans=0.025 +2024-08-29 12:33:11,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=239114.66666666666, ans=0.125 +2024-08-29 12:46:38,395 INFO [train.py:1114] (2/4) Epoch 19, batch 50, loss[loss=0.1813, simple_loss=0.245, pruned_loss=0.04307, ctc_loss=0.0787, over 19697.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2611, pruned_loss=0.04071, ctc_loss=0.07635, over 843613.11 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 12:48:51,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=239221.33333333334, ans=0.125 +2024-08-29 12:53:57,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=239274.66666666666, ans=0.125 +2024-08-29 12:56:29,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=239328.0, ans=0.2 +2024-08-29 12:57:42,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239328.0, ans=0.1 +2024-08-29 12:57:49,123 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.203e+02 1.479e+02 1.753e+02 2.191e+02 3.244e+02, threshold=3.506e+02, percent-clipped=0.0 +2024-08-29 13:04:10,867 INFO [train.py:1114] (2/4) Epoch 19, batch 100, loss[loss=0.157, simple_loss=0.2391, pruned_loss=0.02629, ctc_loss=0.05577, over 19711.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.263, pruned_loss=0.04077, ctc_loss=0.07664, over 1498173.55 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 13:05:41,248 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 13:07:49,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=239541.33333333334, ans=0.125 +2024-08-29 13:08:30,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=239541.33333333334, ans=0.125 +2024-08-29 13:09:37,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239594.66666666666, ans=0.125 +2024-08-29 13:17:23,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=239648.0, ans=0.2 +2024-08-29 13:17:29,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=239701.33333333334, ans=0.0 +2024-08-29 13:18:05,187 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.70 vs. limit=15.0 +2024-08-29 13:21:06,825 INFO [train.py:1114] (2/4) Epoch 19, batch 150, loss[loss=0.1662, simple_loss=0.2348, pruned_loss=0.03576, ctc_loss=0.06506, over 19712.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2605, pruned_loss=0.04022, ctc_loss=0.07532, over 2027757.76 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 13:21:08,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239754.66666666666, ans=0.1 +2024-08-29 13:21:33,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=239808.0, ans=0.125 +2024-08-29 13:21:52,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=239861.33333333334, ans=0.04949747468305833 +2024-08-29 13:24:23,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=239914.66666666666, ans=0.0 +2024-08-29 13:24:24,035 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.028e+02 1.511e+02 1.939e+02 2.474e+02 3.688e+02, threshold=3.878e+02, percent-clipped=4.0 +2024-08-29 13:24:25,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=239914.66666666666, ans=0.125 +2024-08-29 13:25:00,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=239914.66666666666, ans=0.125 +2024-08-29 13:27:00,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=239968.0, ans=0.125 +2024-08-29 13:27:31,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=239968.0, ans=0.1 +2024-08-29 13:31:37,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=240021.33333333334, ans=0.125 +2024-08-29 13:31:38,287 INFO [train.py:1114] (2/4) Epoch 19, batch 200, loss[loss=0.2202, simple_loss=0.2881, pruned_loss=0.05467, ctc_loss=0.1073, over 18193.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2604, pruned_loss=0.04009, ctc_loss=0.07524, over 2435345.63 frames. ], batch size: 85, lr: 7.97e-03, grad_scale: 32.0 +2024-08-29 13:31:44,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240021.33333333334, ans=0.1 +2024-08-29 13:31:55,449 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.05 vs. limit=12.0 +2024-08-29 13:31:58,278 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.82 vs. limit=22.5 +2024-08-29 13:32:08,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_na.min_abs, batch_count=240128.0, ans=0.02 +2024-08-29 13:32:14,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=240128.0, ans=10.0 +2024-08-29 13:32:17,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=240181.33333333334, ans=0.125 +2024-08-29 13:32:18,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=240181.33333333334, ans=0.0 +2024-08-29 13:32:28,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=240234.66666666666, ans=0.2 +2024-08-29 13:32:34,903 INFO [train.py:1114] (2/4) Epoch 19, batch 250, loss[loss=0.1973, simple_loss=0.2698, pruned_loss=0.04565, ctc_loss=0.08345, over 19434.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2598, pruned_loss=0.03957, ctc_loss=0.07416, over 2756225.88 frames. ], batch size: 67, lr: 7.97e-03, grad_scale: 32.0 +2024-08-29 13:32:47,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=240288.0, ans=0.2 +2024-08-29 13:33:14,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=240288.0, ans=0.0 +2024-08-29 13:33:35,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=240394.66666666666, ans=0.125 +2024-08-29 13:33:39,438 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.435e+02 1.779e+02 2.329e+02 4.143e+02, threshold=3.559e+02, percent-clipped=3.0 +2024-08-29 13:33:40,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=240448.0, ans=0.1 +2024-08-29 13:33:42,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=240448.0, ans=0.2 +2024-08-29 13:37:09,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=240501.33333333334, ans=0.0 +2024-08-29 13:37:09,428 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.03 vs. limit=15.0 +2024-08-29 13:37:09,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=240501.33333333334, ans=0.125 +2024-08-29 13:37:10,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=240501.33333333334, ans=0.025 +2024-08-29 13:37:19,125 INFO [train.py:1114] (2/4) Epoch 19, batch 300, loss[loss=0.2023, simple_loss=0.2753, pruned_loss=0.04807, ctc_loss=0.08275, over 19508.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.259, pruned_loss=0.03934, ctc_loss=0.07391, over 3000511.74 frames. ], batch size: 61, lr: 7.96e-03, grad_scale: 32.0 +2024-08-29 13:38:45,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=240661.33333333334, ans=0.0 +2024-08-29 13:39:58,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=240714.66666666666, ans=0.125 +2024-08-29 13:40:07,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240768.0, ans=0.1 +2024-08-29 13:40:51,490 INFO [train.py:1114] (2/4) Epoch 19, batch 350, loss[loss=0.179, simple_loss=0.2466, pruned_loss=0.04, ctc_loss=0.07875, over 19796.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2602, pruned_loss=0.03947, ctc_loss=0.07408, over 3190250.24 frames. ], batch size: 48, lr: 7.96e-03, grad_scale: 32.0 +2024-08-29 13:41:21,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=240874.66666666666, ans=0.125 +2024-08-29 13:41:41,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=240981.33333333334, ans=15.0 +2024-08-29 13:41:41,714 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.478e+02 1.769e+02 2.422e+02 3.784e+02, threshold=3.538e+02, percent-clipped=2.0 +2024-08-29 13:41:41,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=240981.33333333334, ans=0.125 +2024-08-29 13:42:15,033 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.82 vs. limit=22.5 +2024-08-29 13:43:44,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=241034.66666666666, ans=0.125 +2024-08-29 13:44:27,812 INFO [train.py:1114] (2/4) Epoch 19, batch 400, loss[loss=0.1832, simple_loss=0.2646, pruned_loss=0.03678, ctc_loss=0.07086, over 19481.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2598, pruned_loss=0.03929, ctc_loss=0.07373, over 3342450.82 frames. ], batch size: 54, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 13:45:41,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=241141.33333333334, ans=0.125 +2024-08-29 13:45:43,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=241194.66666666666, ans=0.125 +2024-08-29 13:45:48,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=241194.66666666666, ans=0.125 +2024-08-29 13:45:53,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=241248.0, ans=0.0 +2024-08-29 13:46:20,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=241248.0, ans=0.09899494936611666 +2024-08-29 13:46:21,451 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.17 vs. limit=10.0 +2024-08-29 13:46:40,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=241301.33333333334, ans=0.025 +2024-08-29 13:47:19,618 INFO [train.py:1114] (2/4) Epoch 19, batch 450, loss[loss=0.1837, simple_loss=0.2668, pruned_loss=0.03699, ctc_loss=0.06645, over 19612.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.26, pruned_loss=0.03968, ctc_loss=0.07433, over 3450474.67 frames. ], batch size: 55, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 13:47:31,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=241354.66666666666, ans=0.07 +2024-08-29 13:47:41,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=241408.0, ans=0.0 +2024-08-29 13:47:42,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=241408.0, ans=0.0 +2024-08-29 13:47:42,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=241408.0, ans=0.125 +2024-08-29 13:48:02,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241408.0, ans=0.1 +2024-08-29 13:48:31,244 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.13 vs. limit=15.0 +2024-08-29 13:48:32,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=241461.33333333334, ans=0.125 +2024-08-29 13:48:45,734 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.422e+02 1.638e+02 2.007e+02 3.524e+02, threshold=3.276e+02, percent-clipped=0.0 +2024-08-29 13:48:49,176 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.09 vs. limit=6.0 +2024-08-29 13:48:56,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=241568.0, ans=0.0 +2024-08-29 13:50:03,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=241568.0, ans=0.125 +2024-08-29 13:50:05,026 INFO [train.py:1114] (2/4) Epoch 19, batch 500, loss[loss=0.2018, simple_loss=0.2777, pruned_loss=0.04714, ctc_loss=0.07887, over 19693.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2593, pruned_loss=0.03949, ctc_loss=0.07388, over 3545980.66 frames. ], batch size: 63, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 13:50:21,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=241674.66666666666, ans=0.2 +2024-08-29 13:50:30,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=241728.0, ans=0.025 +2024-08-29 13:51:44,941 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.05 vs. limit=15.0 +2024-08-29 13:52:43,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=241834.66666666666, ans=0.025 +2024-08-29 13:52:47,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=241834.66666666666, ans=0.04949747468305833 +2024-08-29 13:54:05,865 INFO [train.py:1114] (2/4) Epoch 19, batch 550, loss[loss=0.2098, simple_loss=0.2783, pruned_loss=0.05207, ctc_loss=0.09278, over 19316.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2591, pruned_loss=0.03947, ctc_loss=0.07379, over 3608060.51 frames. ], batch size: 71, lr: 7.94e-03, grad_scale: 32.0 +2024-08-29 13:54:35,832 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.95 vs. limit=22.5 +2024-08-29 13:54:37,010 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.211e+02 1.414e+02 1.681e+02 2.031e+02 3.681e+02, threshold=3.361e+02, percent-clipped=2.0 +2024-08-29 13:54:48,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=242101.33333333334, ans=0.2 +2024-08-29 13:54:53,923 INFO [train.py:1114] (2/4) Epoch 19, batch 600, loss[loss=0.1946, simple_loss=0.2702, pruned_loss=0.04338, ctc_loss=0.0807, over 19426.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2592, pruned_loss=0.03939, ctc_loss=0.07363, over 3664988.97 frames. ], batch size: 67, lr: 7.94e-03, grad_scale: 32.0 +2024-08-29 13:55:06,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=242208.0, ans=0.0 +2024-08-29 13:55:11,782 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.61 vs. limit=10.0 +2024-08-29 13:55:12,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=242261.33333333334, ans=0.025 +2024-08-29 13:55:13,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=242261.33333333334, ans=0.2 +2024-08-29 13:55:27,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=242314.66666666666, ans=0.05 +2024-08-29 13:55:35,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=242368.0, ans=0.125 +2024-08-29 13:55:42,136 INFO [train.py:1114] (2/4) Epoch 19, batch 650, loss[loss=0.1795, simple_loss=0.2579, pruned_loss=0.03642, ctc_loss=0.07094, over 19774.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2587, pruned_loss=0.03911, ctc_loss=0.07317, over 3715999.86 frames. ], batch size: 54, lr: 7.93e-03, grad_scale: 32.0 +2024-08-29 13:55:46,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=242421.33333333334, ans=0.0 +2024-08-29 13:56:02,663 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.15 vs. limit=15.0 +2024-08-29 13:56:14,922 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.141e+02 1.536e+02 1.948e+02 2.425e+02 3.839e+02, threshold=3.897e+02, percent-clipped=7.0 +2024-08-29 13:56:33,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242634.66666666666, ans=0.1 +2024-08-29 13:56:40,134 INFO [train.py:1114] (2/4) Epoch 19, batch 700, loss[loss=0.1786, simple_loss=0.2529, pruned_loss=0.03728, ctc_loss=0.07452, over 19734.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2594, pruned_loss=0.03934, ctc_loss=0.07366, over 3749459.92 frames. ], batch size: 51, lr: 7.93e-03, grad_scale: 32.0 +2024-08-29 13:56:40,852 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.54 vs. limit=6.0 +2024-08-29 13:56:55,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242688.0, ans=0.1 +2024-08-29 13:57:09,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=242741.33333333334, ans=0.95 +2024-08-29 13:57:14,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=242794.66666666666, ans=0.035 +2024-08-29 13:57:24,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=242848.0, ans=0.05 +2024-08-29 13:57:30,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=242901.33333333334, ans=0.2 +2024-08-29 13:57:37,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242901.33333333334, ans=0.1 +2024-08-29 13:57:41,161 INFO [train.py:1114] (2/4) Epoch 19, batch 750, loss[loss=0.1695, simple_loss=0.2487, pruned_loss=0.03297, ctc_loss=0.06074, over 19506.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2591, pruned_loss=0.03941, ctc_loss=0.0738, over 3774607.39 frames. ], batch size: 54, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 13:58:02,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=243061.33333333334, ans=0.125 +2024-08-29 13:58:45,192 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.478e+02 1.857e+02 2.278e+02 3.837e+02, threshold=3.713e+02, percent-clipped=0.0 +2024-08-29 13:59:02,304 INFO [train.py:1114] (2/4) Epoch 19, batch 800, loss[loss=0.1655, simple_loss=0.2377, pruned_loss=0.03426, ctc_loss=0.06202, over 19799.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2586, pruned_loss=0.03926, ctc_loss=0.07347, over 3796075.64 frames. ], batch size: 49, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 13:59:07,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=243221.33333333334, ans=0.125 +2024-08-29 14:00:02,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=243434.66666666666, ans=0.2 +2024-08-29 14:04:32,056 INFO [train.py:1114] (2/4) Epoch 19, batch 850, loss[loss=0.1922, simple_loss=0.2771, pruned_loss=0.03977, ctc_loss=0.0691, over 19632.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2589, pruned_loss=0.0394, ctc_loss=0.07376, over 3814477.48 frames. ], batch size: 59, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 14:04:51,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=243541.33333333334, ans=0.125 +2024-08-29 14:04:53,182 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:04:55,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=243594.66666666666, ans=0.1 +2024-08-29 14:05:05,888 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.422e+02 1.643e+02 2.108e+02 3.301e+02, threshold=3.285e+02, percent-clipped=0.0 +2024-08-29 14:05:11,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=243648.0, ans=10.0 +2024-08-29 14:08:14,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=243754.66666666666, ans=0.125 +2024-08-29 14:08:15,263 INFO [train.py:1114] (2/4) Epoch 19, batch 900, loss[loss=0.1657, simple_loss=0.2351, pruned_loss=0.03501, ctc_loss=0.06578, over 19400.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2592, pruned_loss=0.03984, ctc_loss=0.07465, over 3817687.94 frames. ], batch size: 48, lr: 7.91e-03, grad_scale: 32.0 +2024-08-29 14:08:26,742 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.89 vs. limit=12.0 +2024-08-29 14:10:16,009 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.65 vs. limit=15.0 +2024-08-29 14:10:46,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=243968.0, ans=0.125 +2024-08-29 14:10:46,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=243968.0, ans=0.025 +2024-08-29 14:10:53,195 INFO [train.py:1114] (2/4) Epoch 19, batch 950, loss[loss=0.1658, simple_loss=0.2361, pruned_loss=0.03481, ctc_loss=0.06457, over 19503.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2595, pruned_loss=0.04001, ctc_loss=0.07476, over 3817453.70 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 16.0 +2024-08-29 14:11:05,129 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.31 vs. limit=12.0 +2024-08-29 14:11:14,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=244128.0, ans=0.125 +2024-08-29 14:11:22,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=244128.0, ans=0.125 +2024-08-29 14:11:27,359 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.452e+02 1.728e+02 2.089e+02 3.728e+02, threshold=3.456e+02, percent-clipped=1.0 +2024-08-29 14:11:28,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=244181.33333333334, ans=0.1 +2024-08-29 14:11:30,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=244181.33333333334, ans=0.09899494936611666 +2024-08-29 14:11:41,155 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.90 vs. limit=15.0 +2024-08-29 14:11:43,407 INFO [train.py:1114] (2/4) Epoch 19, batch 1000, loss[loss=0.1625, simple_loss=0.2421, pruned_loss=0.03033, ctc_loss=0.05571, over 19847.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2601, pruned_loss=0.04017, ctc_loss=0.07497, over 3813881.18 frames. ], batch size: 52, lr: 7.90e-03, grad_scale: 16.0 +2024-08-29 14:12:26,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=244394.66666666666, ans=0.0 +2024-08-29 14:12:39,197 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.82 vs. limit=15.0 +2024-08-29 14:12:50,081 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.83 vs. limit=15.0 +2024-08-29 14:13:17,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=244501.33333333334, ans=0.125 +2024-08-29 14:13:29,726 INFO [train.py:1114] (2/4) Epoch 19, batch 1050, loss[loss=0.1854, simple_loss=0.2623, pruned_loss=0.0401, ctc_loss=0.07068, over 19837.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2594, pruned_loss=0.03984, ctc_loss=0.07433, over 3821505.79 frames. ], batch size: 57, lr: 7.90e-03, grad_scale: 16.0 +2024-08-29 14:13:39,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=244608.0, ans=0.1 +2024-08-29 14:13:46,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=244608.0, ans=0.0 +2024-08-29 14:13:48,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=244661.33333333334, ans=0.09899494936611666 +2024-08-29 14:13:56,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=244714.66666666666, ans=0.125 +2024-08-29 14:13:56,926 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.09 vs. limit=15.0 +2024-08-29 14:14:01,820 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.146e+02 1.357e+02 1.587e+02 1.996e+02 3.012e+02, threshold=3.173e+02, percent-clipped=0.0 +2024-08-29 14:14:06,797 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.92 vs. limit=15.0 +2024-08-29 14:14:17,626 INFO [train.py:1114] (2/4) Epoch 19, batch 1100, loss[loss=0.1824, simple_loss=0.2591, pruned_loss=0.03816, ctc_loss=0.07356, over 19574.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2595, pruned_loss=0.03978, ctc_loss=0.07424, over 3828692.34 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 16.0 +2024-08-29 14:14:17,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=244821.33333333334, ans=0.0 +2024-08-29 14:14:55,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=245034.66666666666, ans=0.125 +2024-08-29 14:14:56,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=245034.66666666666, ans=0.025 +2024-08-29 14:15:05,497 INFO [train.py:1114] (2/4) Epoch 19, batch 1150, loss[loss=0.1603, simple_loss=0.238, pruned_loss=0.02996, ctc_loss=0.05704, over 19586.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2596, pruned_loss=0.03981, ctc_loss=0.07445, over 3826076.37 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 16.0 +2024-08-29 14:15:21,041 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.41 vs. limit=15.0 +2024-08-29 14:15:24,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=245141.33333333334, ans=0.125 +2024-08-29 14:15:38,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=245194.66666666666, ans=0.09899494936611666 +2024-08-29 14:15:42,718 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.485e+02 1.714e+02 2.095e+02 3.780e+02, threshold=3.428e+02, percent-clipped=1.0 +2024-08-29 14:16:19,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=245301.33333333334, ans=0.0 +2024-08-29 14:17:48,230 INFO [train.py:1114] (2/4) Epoch 19, batch 1200, loss[loss=0.1936, simple_loss=0.2672, pruned_loss=0.04372, ctc_loss=0.08145, over 19843.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2604, pruned_loss=0.03987, ctc_loss=0.07467, over 3821922.92 frames. ], batch size: 57, lr: 7.89e-03, grad_scale: 32.0 +2024-08-29 14:17:50,909 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.85 vs. limit=15.0 +2024-08-29 14:17:52,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245354.66666666666, ans=0.1 +2024-08-29 14:17:55,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=245354.66666666666, ans=0.0 +2024-08-29 14:18:25,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=245568.0, ans=0.0 +2024-08-29 14:18:36,278 INFO [train.py:1114] (2/4) Epoch 19, batch 1250, loss[loss=0.2017, simple_loss=0.2752, pruned_loss=0.04674, ctc_loss=0.08666, over 19548.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2612, pruned_loss=0.04016, ctc_loss=0.07505, over 3840961.06 frames. ], batch size: 61, lr: 7.88e-03, grad_scale: 32.0 +2024-08-29 14:18:36,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=245621.33333333334, ans=0.07 +2024-08-29 14:18:37,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=245621.33333333334, ans=0.025 +2024-08-29 14:18:46,179 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.99 vs. limit=10.0 +2024-08-29 14:18:50,603 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.82 vs. limit=22.5 +2024-08-29 14:18:57,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=245728.0, ans=0.0 +2024-08-29 14:19:06,847 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.125e+02 1.390e+02 1.709e+02 2.116e+02 3.450e+02, threshold=3.419e+02, percent-clipped=1.0 +2024-08-29 14:19:15,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=245781.33333333334, ans=0.125 +2024-08-29 14:19:21,068 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.56 vs. limit=15.0 +2024-08-29 14:19:27,899 INFO [train.py:1114] (2/4) Epoch 19, batch 1300, loss[loss=0.1845, simple_loss=0.2651, pruned_loss=0.03692, ctc_loss=0.07506, over 18921.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2602, pruned_loss=0.03992, ctc_loss=0.0744, over 3844775.59 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 32.0 +2024-08-29 14:19:43,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=245941.33333333334, ans=0.07 +2024-08-29 14:19:49,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=245994.66666666666, ans=0.125 +2024-08-29 14:20:21,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246048.0, ans=0.1 +2024-08-29 14:20:32,405 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.38 vs. limit=15.0 +2024-08-29 14:20:35,736 INFO [train.py:1114] (2/4) Epoch 19, batch 1350, loss[loss=0.1706, simple_loss=0.2487, pruned_loss=0.0336, ctc_loss=0.06338, over 19770.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2596, pruned_loss=0.03961, ctc_loss=0.0738, over 3855306.68 frames. ], batch size: 54, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 14:20:52,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=246208.0, ans=0.125 +2024-08-29 14:21:16,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=246208.0, ans=0.125 +2024-08-29 14:21:20,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=246208.0, ans=0.0 +2024-08-29 14:21:35,079 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.448e+02 1.632e+02 2.120e+02 3.289e+02, threshold=3.263e+02, percent-clipped=0.0 +2024-08-29 14:21:44,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.45 vs. limit=10.0 +2024-08-29 14:21:51,129 INFO [train.py:1114] (2/4) Epoch 19, batch 1400, loss[loss=0.17, simple_loss=0.2413, pruned_loss=0.03678, ctc_loss=0.06304, over 19671.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2592, pruned_loss=0.03949, ctc_loss=0.07369, over 3863274.26 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 14:22:01,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=246474.66666666666, ans=0.0 +2024-08-29 14:22:13,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=246528.0, ans=0.0 +2024-08-29 14:22:18,680 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:22:25,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=246581.33333333334, ans=0.125 +2024-08-29 14:22:29,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=246634.66666666666, ans=0.125 +2024-08-29 14:22:45,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=246634.66666666666, ans=0.125 +2024-08-29 14:22:49,903 INFO [train.py:1114] (2/4) Epoch 19, batch 1450, loss[loss=0.2004, simple_loss=0.2754, pruned_loss=0.04584, ctc_loss=0.08437, over 19636.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2601, pruned_loss=0.04005, ctc_loss=0.0747, over 3860184.88 frames. ], batch size: 63, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 14:22:51,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=246688.0, ans=0.2 +2024-08-29 14:22:54,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=246688.0, ans=0.05 +2024-08-29 14:22:57,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=246688.0, ans=0.125 +2024-08-29 14:23:01,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246741.33333333334, ans=0.1 +2024-08-29 14:24:01,785 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.108e+02 1.408e+02 1.590e+02 1.931e+02 3.612e+02, threshold=3.180e+02, percent-clipped=1.0 +2024-08-29 14:24:07,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=246901.33333333334, ans=0.125 +2024-08-29 14:24:17,516 INFO [train.py:1114] (2/4) Epoch 19, batch 1500, loss[loss=0.1903, simple_loss=0.2691, pruned_loss=0.04063, ctc_loss=0.0756, over 19590.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2605, pruned_loss=0.04006, ctc_loss=0.07468, over 3861306.88 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 32.0 +2024-08-29 14:24:19,926 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.39 vs. limit=12.0 +2024-08-29 14:24:20,199 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.95 vs. limit=15.0 +2024-08-29 14:24:20,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=246954.66666666666, ans=0.2 +2024-08-29 14:24:22,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246954.66666666666, ans=0.1 +2024-08-29 14:26:03,847 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.24 vs. limit=15.0 +2024-08-29 14:26:12,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=247114.66666666666, ans=0.2 +2024-08-29 14:26:27,037 INFO [train.py:1114] (2/4) Epoch 19, batch 1550, loss[loss=0.1952, simple_loss=0.2711, pruned_loss=0.0435, ctc_loss=0.0808, over 19613.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2604, pruned_loss=0.04013, ctc_loss=0.07489, over 3845734.36 frames. ], batch size: 60, lr: 7.86e-03, grad_scale: 32.0 +2024-08-29 14:26:40,863 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.37 vs. limit=22.5 +2024-08-29 14:26:41,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=247274.66666666666, ans=0.125 +2024-08-29 14:26:59,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=247328.0, ans=0.0 +2024-08-29 14:27:02,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=247328.0, ans=0.0 +2024-08-29 14:27:11,539 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.60 vs. limit=6.0 +2024-08-29 14:27:13,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=247328.0, ans=0.0 +2024-08-29 14:27:18,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=247381.33333333334, ans=0.0 +2024-08-29 14:27:20,314 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.192e+02 1.466e+02 1.727e+02 2.253e+02 4.003e+02, threshold=3.453e+02, percent-clipped=2.0 +2024-08-29 14:27:23,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=247381.33333333334, ans=0.0 +2024-08-29 14:27:24,761 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.07 vs. limit=15.0 +2024-08-29 14:29:40,640 INFO [train.py:1114] (2/4) Epoch 19, batch 1600, loss[loss=0.1927, simple_loss=0.2763, pruned_loss=0.03902, ctc_loss=0.07792, over 19825.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2606, pruned_loss=0.04037, ctc_loss=0.07524, over 3835103.23 frames. ], batch size: 57, lr: 7.85e-03, grad_scale: 32.0 +2024-08-29 14:31:45,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=247648.0, ans=0.0 +2024-08-29 14:31:51,888 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.26 vs. limit=15.0 +2024-08-29 14:32:05,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=247701.33333333334, ans=0.125 +2024-08-29 14:32:06,913 INFO [train.py:1114] (2/4) Epoch 19, batch 1650, loss[loss=0.1924, simple_loss=0.2688, pruned_loss=0.0425, ctc_loss=0.07761, over 19643.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2603, pruned_loss=0.04021, ctc_loss=0.07509, over 3830846.76 frames. ], batch size: 59, lr: 7.85e-03, grad_scale: 32.0 +2024-08-29 14:32:37,724 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.670e+02 2.010e+02 2.374e+02 4.027e+02, threshold=4.020e+02, percent-clipped=3.0 +2024-08-29 14:32:56,378 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.56 vs. limit=15.0 +2024-08-29 14:32:56,784 INFO [train.py:1114] (2/4) Epoch 19, batch 1700, loss[loss=0.1731, simple_loss=0.2418, pruned_loss=0.03795, ctc_loss=0.07117, over 19654.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2598, pruned_loss=0.03972, ctc_loss=0.07427, over 3846161.58 frames. ], batch size: 46, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 14:32:57,197 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.72 vs. limit=12.0 +2024-08-29 14:33:01,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248021.33333333334, ans=0.1 +2024-08-29 14:33:07,856 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.54 vs. limit=15.0 +2024-08-29 14:33:16,009 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.68 vs. limit=6.0 +2024-08-29 14:33:50,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248181.33333333334, ans=0.1 +2024-08-29 14:33:58,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248234.66666666666, ans=0.1 +2024-08-29 14:34:04,347 INFO [train.py:1114] (2/4) Epoch 19, batch 1750, loss[loss=0.1604, simple_loss=0.2375, pruned_loss=0.02948, ctc_loss=0.06078, over 19643.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2592, pruned_loss=0.03933, ctc_loss=0.07377, over 3852816.79 frames. ], batch size: 45, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 14:34:33,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=248288.0, ans=0.2 +2024-08-29 14:34:34,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=248288.0, ans=0.125 +2024-08-29 14:34:48,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248341.33333333334, ans=0.1 +2024-08-29 14:34:48,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=248341.33333333334, ans=0.125 +2024-08-29 14:35:02,764 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.489e+02 1.824e+02 2.243e+02 3.708e+02, threshold=3.648e+02, percent-clipped=0.0 +2024-08-29 14:35:04,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=248448.0, ans=0.125 +2024-08-29 14:35:17,406 INFO [train.py:1114] (2/4) Epoch 19, batch 1800, loss[loss=0.194, simple_loss=0.2723, pruned_loss=0.04247, ctc_loss=0.07671, over 19631.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2592, pruned_loss=0.03928, ctc_loss=0.07365, over 3852949.68 frames. ], batch size: 55, lr: 7.84e-03, grad_scale: 16.0 +2024-08-29 14:36:01,489 INFO [train.py:1114] (2/4) Epoch 19, batch 1850, loss[loss=0.1852, simple_loss=0.2659, pruned_loss=0.03799, ctc_loss=0.07154, over 19557.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2592, pruned_loss=0.0393, ctc_loss=0.07371, over 3855688.77 frames. ], batch size: 57, lr: 7.83e-03, grad_scale: 16.0 +2024-08-29 14:39:54,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=248928.0, ans=0.125 +2024-08-29 14:40:02,624 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.549e+02 1.911e+02 2.362e+02 1.156e+03, threshold=3.822e+02, percent-clipped=7.0 +2024-08-29 14:40:09,986 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.32 vs. limit=15.0 +2024-08-29 14:40:10,850 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.67 vs. limit=22.5 +2024-08-29 14:40:18,275 INFO [train.py:1114] (2/4) Epoch 19, batch 1900, loss[loss=0.1917, simple_loss=0.2689, pruned_loss=0.04119, ctc_loss=0.08015, over 19664.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2598, pruned_loss=0.03944, ctc_loss=0.07393, over 3860408.64 frames. ], batch size: 59, lr: 7.83e-03, grad_scale: 16.0 +2024-08-29 14:40:18,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=249088.0, ans=0.125 +2024-08-29 14:40:27,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=249141.33333333334, ans=0.0 +2024-08-29 14:40:28,572 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.14 vs. limit=22.5 +2024-08-29 14:40:48,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=249248.0, ans=0.125 +2024-08-29 14:40:48,340 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.91 vs. limit=22.5 +2024-08-29 14:40:56,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=249301.33333333334, ans=0.125 +2024-08-29 14:41:02,129 INFO [train.py:1114] (2/4) Epoch 19, batch 1950, loss[loss=0.1619, simple_loss=0.241, pruned_loss=0.0292, ctc_loss=0.06105, over 19576.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2609, pruned_loss=0.03977, ctc_loss=0.07455, over 3869360.89 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 16.0 +2024-08-29 14:41:04,643 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.68 vs. limit=15.0 +2024-08-29 14:41:09,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=249354.66666666666, ans=0.125 +2024-08-29 14:41:27,676 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=249514.66666666666, ans=0.0 +2024-08-29 14:41:27,958 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=12.54 vs. limit=15.0 +2024-08-29 14:41:30,533 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.39 vs. limit=12.0 +2024-08-29 14:41:31,735 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.363e+02 1.491e+02 1.694e+02 3.301e+02, threshold=2.983e+02, percent-clipped=0.0 +2024-08-29 14:41:45,566 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.84 vs. limit=12.0 +2024-08-29 14:41:46,942 INFO [train.py:1114] (2/4) Epoch 19, batch 2000, loss[loss=0.1667, simple_loss=0.2323, pruned_loss=0.03667, ctc_loss=0.06925, over 19698.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2615, pruned_loss=0.04001, ctc_loss=0.07491, over 3854395.22 frames. ], batch size: 45, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 14:41:52,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=249621.33333333334, ans=0.125 +2024-08-29 14:42:04,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=249728.0, ans=0.125 +2024-08-29 14:42:11,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=249728.0, ans=0.125 +2024-08-29 14:42:18,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=249781.33333333334, ans=0.125 +2024-08-29 14:42:20,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=249781.33333333334, ans=0.2 +2024-08-29 14:42:31,046 INFO [train.py:1114] (2/4) Epoch 19, batch 2050, loss[loss=0.1549, simple_loss=0.2268, pruned_loss=0.03006, ctc_loss=0.05748, over 19725.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2606, pruned_loss=0.03986, ctc_loss=0.07458, over 3851573.45 frames. ], batch size: 47, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 14:42:39,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=249941.33333333334, ans=0.125 +2024-08-29 14:42:40,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=249941.33333333334, ans=0.0 +2024-08-29 14:42:42,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=249941.33333333334, ans=0.025 +2024-08-29 14:42:45,638 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.14 vs. limit=12.0 +2024-08-29 14:43:00,939 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.056e+02 1.385e+02 1.662e+02 2.291e+02 4.175e+02, threshold=3.324e+02, percent-clipped=7.0 +2024-08-29 14:44:08,823 INFO [train.py:1114] (2/4) Epoch 19, batch 2100, loss[loss=0.1734, simple_loss=0.2584, pruned_loss=0.03119, ctc_loss=0.06488, over 19766.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2596, pruned_loss=0.0393, ctc_loss=0.07372, over 3858392.60 frames. ], batch size: 54, lr: 7.81e-03, grad_scale: 32.0 +2024-08-29 14:44:12,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=250154.66666666666, ans=0.0 +2024-08-29 14:45:13,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=250208.0, ans=0.125 +2024-08-29 14:45:29,675 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.73 vs. limit=10.0 +2024-08-29 14:45:30,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=250261.33333333334, ans=0.2 +2024-08-29 14:45:32,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=250314.66666666666, ans=0.125 +2024-08-29 14:45:49,954 INFO [train.py:1114] (2/4) Epoch 19, batch 2150, loss[loss=0.1756, simple_loss=0.2488, pruned_loss=0.03818, ctc_loss=0.06508, over 19837.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2593, pruned_loss=0.03935, ctc_loss=0.07348, over 3869130.50 frames. ], batch size: 52, lr: 7.81e-03, grad_scale: 32.0 +2024-08-29 14:45:50,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=250421.33333333334, ans=0.125 +2024-08-29 14:45:50,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=250421.33333333334, ans=0.0 +2024-08-29 14:45:56,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=250421.33333333334, ans=0.125 +2024-08-29 14:46:01,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=250474.66666666666, ans=0.0 +2024-08-29 14:46:03,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=250474.66666666666, ans=0.125 +2024-08-29 14:46:11,779 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:46:16,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=250581.33333333334, ans=22.5 +2024-08-29 14:46:20,179 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.504e+02 1.955e+02 2.570e+02 4.900e+02, threshold=3.910e+02, percent-clipped=8.0 +2024-08-29 14:46:23,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=250581.33333333334, ans=0.125 +2024-08-29 14:46:35,223 INFO [train.py:1114] (2/4) Epoch 19, batch 2200, loss[loss=0.1883, simple_loss=0.2664, pruned_loss=0.03989, ctc_loss=0.0763, over 19582.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2594, pruned_loss=0.03938, ctc_loss=0.07363, over 3867627.29 frames. ], batch size: 57, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 14:46:36,596 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.72 vs. limit=15.0 +2024-08-29 14:46:50,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=250741.33333333334, ans=0.0 +2024-08-29 14:47:43,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=250848.0, ans=0.125 +2024-08-29 14:47:44,203 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=9.60 vs. limit=22.5 +2024-08-29 14:47:54,984 INFO [train.py:1114] (2/4) Epoch 19, batch 2250, loss[loss=0.1861, simple_loss=0.2629, pruned_loss=0.03983, ctc_loss=0.07439, over 19610.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2592, pruned_loss=0.03925, ctc_loss=0.0733, over 3867632.10 frames. ], batch size: 55, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 14:47:56,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=250954.66666666666, ans=0.2 +2024-08-29 14:48:02,802 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:48:10,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=251008.0, ans=0.2 +2024-08-29 14:48:32,687 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.087e+02 1.465e+02 1.864e+02 2.416e+02 3.726e+02, threshold=3.728e+02, percent-clipped=0.0 +2024-08-29 14:48:41,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=251168.0, ans=0.2 +2024-08-29 14:48:42,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=251168.0, ans=0.2 +2024-08-29 14:48:44,894 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.89 vs. limit=22.5 +2024-08-29 14:48:47,056 INFO [train.py:1114] (2/4) Epoch 19, batch 2300, loss[loss=0.1646, simple_loss=0.2416, pruned_loss=0.03172, ctc_loss=0.06026, over 19515.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2586, pruned_loss=0.03934, ctc_loss=0.07351, over 3861449.56 frames. ], batch size: 49, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 14:49:15,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=251381.33333333334, ans=0.0 +2024-08-29 14:49:23,031 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:49:27,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=251434.66666666666, ans=0.0 +2024-08-29 14:49:30,477 INFO [train.py:1114] (2/4) Epoch 19, batch 2350, loss[loss=0.1979, simple_loss=0.2721, pruned_loss=0.04525, ctc_loss=0.08301, over 19701.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2588, pruned_loss=0.03968, ctc_loss=0.07393, over 3864096.56 frames. ], batch size: 63, lr: 7.79e-03, grad_scale: 32.0 +2024-08-29 14:49:47,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=251594.66666666666, ans=0.125 +2024-08-29 14:49:47,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=251594.66666666666, ans=0.1 +2024-08-29 14:49:48,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=251594.66666666666, ans=0.07 +2024-08-29 14:49:51,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=251594.66666666666, ans=0.125 +2024-08-29 14:50:00,300 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.463e+02 1.784e+02 2.534e+02 4.062e+02, threshold=3.568e+02, percent-clipped=2.0 +2024-08-29 14:50:01,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=251648.0, ans=0.125 +2024-08-29 14:50:02,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=251648.0, ans=0.125 +2024-08-29 14:50:17,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=251701.33333333334, ans=0.025 +2024-08-29 14:50:24,257 INFO [train.py:1114] (2/4) Epoch 19, batch 2400, loss[loss=0.1795, simple_loss=0.2537, pruned_loss=0.03759, ctc_loss=0.07524, over 19333.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2603, pruned_loss=0.04022, ctc_loss=0.0749, over 3857815.16 frames. ], batch size: 71, lr: 7.79e-03, grad_scale: 32.0 +2024-08-29 14:50:26,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=251754.66666666666, ans=0.0 +2024-08-29 14:50:34,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=251808.0, ans=0.125 +2024-08-29 14:50:35,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=251808.0, ans=0.2 +2024-08-29 14:50:37,701 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.59 vs. limit=10.0 +2024-08-29 14:50:39,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=251808.0, ans=0.07 +2024-08-29 14:50:40,010 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:51:18,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251968.0, ans=0.1 +2024-08-29 14:51:26,014 INFO [train.py:1114] (2/4) Epoch 19, batch 2450, loss[loss=0.2718, simple_loss=0.3074, pruned_loss=0.08563, ctc_loss=0.1626, over 13397.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2641, pruned_loss=0.04264, ctc_loss=0.07969, over 3731230.38 frames. ], batch size: 140, lr: 7.78e-03, grad_scale: 32.0 +2024-08-29 14:51:27,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=252021.33333333334, ans=0.0 +2024-08-29 14:51:42,558 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.67 vs. limit=6.0 +2024-08-29 14:51:56,011 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.227e+02 1.531e+02 1.709e+02 1.904e+02 2.805e+02, threshold=3.418e+02, percent-clipped=0.0 +2024-08-29 14:51:57,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=252181.33333333334, ans=0.125 +2024-08-29 14:53:10,857 INFO [train.py:1114] (2/4) Epoch 20, batch 0, loss[loss=0.184, simple_loss=0.2557, pruned_loss=0.04021, ctc_loss=0.07982, over 19812.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2557, pruned_loss=0.04021, ctc_loss=0.07982, over 19812.00 frames. ], batch size: 49, lr: 7.58e-03, grad_scale: 32.0 +2024-08-29 14:53:10,857 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-29 14:57:03,407 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.9948, 3.7141, 3.5092, 3.5550], device='cuda:2') +2024-08-29 14:59:30,914 INFO [train.py:1146] (2/4) Epoch 20, validation: loss=0.1707, simple_loss=0.2632, pruned_loss=0.02916, ctc_loss=0.04979, over 944034.00 frames. +2024-08-29 14:59:30,915 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 12750MB +2024-08-29 14:59:36,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=252234.66666666666, ans=0.125 +2024-08-29 14:59:41,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=252288.0, ans=0.0 +2024-08-29 14:59:48,612 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=6.09 vs. limit=12.0 +2024-08-29 15:00:06,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=252394.66666666666, ans=0.0 +2024-08-29 15:00:11,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=252448.0, ans=0.0 +2024-08-29 15:00:12,647 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:00:18,047 INFO [train.py:1114] (2/4) Epoch 20, batch 50, loss[loss=0.1584, simple_loss=0.2312, pruned_loss=0.03057, ctc_loss=0.0608, over 19724.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2621, pruned_loss=0.0399, ctc_loss=0.07584, over 844974.64 frames. ], batch size: 47, lr: 7.58e-03, grad_scale: 32.0 +2024-08-29 15:00:25,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=252501.33333333334, ans=0.1 +2024-08-29 15:00:53,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=252608.0, ans=10.0 +2024-08-29 15:01:04,712 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.82 vs. limit=22.5 +2024-08-29 15:01:11,674 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.167e+02 1.410e+02 1.638e+02 1.971e+02 2.993e+02, threshold=3.276e+02, percent-clipped=0.0 +2024-08-29 15:01:14,173 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.42 vs. limit=15.0 +2024-08-29 15:01:17,277 INFO [train.py:1114] (2/4) Epoch 20, batch 100, loss[loss=0.1675, simple_loss=0.2448, pruned_loss=0.03288, ctc_loss=0.06102, over 19701.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.263, pruned_loss=0.04051, ctc_loss=0.07586, over 1499556.84 frames. ], batch size: 51, lr: 7.57e-03, grad_scale: 32.0 +2024-08-29 15:01:33,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=252821.33333333334, ans=0.125 +2024-08-29 15:01:38,212 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.11 vs. limit=22.5 +2024-08-29 15:01:55,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=252981.33333333334, ans=0.025 +2024-08-29 15:02:00,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=252981.33333333334, ans=0.125 +2024-08-29 15:02:05,278 INFO [train.py:1114] (2/4) Epoch 20, batch 150, loss[loss=0.1659, simple_loss=0.2404, pruned_loss=0.03281, ctc_loss=0.06457, over 19725.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2603, pruned_loss=0.03944, ctc_loss=0.07368, over 2027658.68 frames. ], batch size: 47, lr: 7.57e-03, grad_scale: 32.0 +2024-08-29 15:02:40,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=253194.66666666666, ans=0.0 +2024-08-29 15:02:47,831 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.088e+02 1.385e+02 1.535e+02 1.856e+02 3.405e+02, threshold=3.069e+02, percent-clipped=1.0 +2024-08-29 15:03:24,274 INFO [train.py:1114] (2/4) Epoch 20, batch 200, loss[loss=0.2021, simple_loss=0.2806, pruned_loss=0.04406, ctc_loss=0.08896, over 18199.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2594, pruned_loss=0.03955, ctc_loss=0.07396, over 2435022.78 frames. ], batch size: 85, lr: 7.56e-03, grad_scale: 32.0 +2024-08-29 15:03:45,120 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.04 vs. limit=15.0 +2024-08-29 15:03:49,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=253408.0, ans=0.2 +2024-08-29 15:03:51,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=253408.0, ans=0.0 +2024-08-29 15:03:54,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=253461.33333333334, ans=0.1 +2024-08-29 15:03:58,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=253461.33333333334, ans=10.0 +2024-08-29 15:04:12,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=253514.66666666666, ans=0.0 +2024-08-29 15:04:14,365 INFO [train.py:1114] (2/4) Epoch 20, batch 250, loss[loss=0.195, simple_loss=0.2716, pruned_loss=0.04407, ctc_loss=0.07544, over 19424.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2592, pruned_loss=0.03914, ctc_loss=0.07308, over 2755868.39 frames. ], batch size: 67, lr: 7.56e-03, grad_scale: 32.0 +2024-08-29 15:04:35,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=253674.66666666666, ans=0.125 +2024-08-29 15:05:14,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=253674.66666666666, ans=0.125 +2024-08-29 15:06:15,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=253781.33333333334, ans=0.125 +2024-08-29 15:06:16,403 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.202e+02 1.436e+02 1.637e+02 2.276e+02 3.998e+02, threshold=3.274e+02, percent-clipped=8.0 +2024-08-29 15:06:21,879 INFO [train.py:1114] (2/4) Epoch 20, batch 300, loss[loss=0.1957, simple_loss=0.2731, pruned_loss=0.04283, ctc_loss=0.08177, over 19523.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.259, pruned_loss=0.039, ctc_loss=0.0729, over 3002011.17 frames. ], batch size: 61, lr: 7.56e-03, grad_scale: 32.0 +2024-08-29 15:06:31,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=253888.0, ans=0.0 +2024-08-29 15:07:02,607 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.85 vs. limit=10.0 +2024-08-29 15:07:35,051 INFO [train.py:1114] (2/4) Epoch 20, batch 350, loss[loss=0.1732, simple_loss=0.2393, pruned_loss=0.03841, ctc_loss=0.0757, over 19767.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2588, pruned_loss=0.03883, ctc_loss=0.07269, over 3190861.06 frames. ], batch size: 48, lr: 7.55e-03, grad_scale: 32.0 +2024-08-29 15:07:38,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=254101.33333333334, ans=0.05 +2024-08-29 15:07:51,598 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.73 vs. limit=22.5 +2024-08-29 15:07:52,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254208.0, ans=0.1 +2024-08-29 15:07:56,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=254208.0, ans=0.0 +2024-08-29 15:09:32,453 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.477e+02 1.781e+02 2.345e+02 4.390e+02, threshold=3.562e+02, percent-clipped=4.0 +2024-08-29 15:09:40,135 INFO [train.py:1114] (2/4) Epoch 20, batch 400, loss[loss=0.183, simple_loss=0.2706, pruned_loss=0.03517, ctc_loss=0.0626, over 19488.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2585, pruned_loss=0.03865, ctc_loss=0.07232, over 3341925.11 frames. ], batch size: 54, lr: 7.55e-03, grad_scale: 32.0 +2024-08-29 15:09:46,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=254368.0, ans=0.1 +2024-08-29 15:09:50,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=254421.33333333334, ans=0.0 +2024-08-29 15:09:55,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=254421.33333333334, ans=0.025 +2024-08-29 15:10:21,255 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.84 vs. limit=15.0 +2024-08-29 15:10:31,196 INFO [train.py:1114] (2/4) Epoch 20, batch 450, loss[loss=0.1604, simple_loss=0.2488, pruned_loss=0.02571, ctc_loss=0.05143, over 19610.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2585, pruned_loss=0.03864, ctc_loss=0.07232, over 3450704.69 frames. ], batch size: 55, lr: 7.55e-03, grad_scale: 16.0 +2024-08-29 15:10:31,635 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.05 vs. limit=10.0 +2024-08-29 15:10:31,765 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.21 vs. limit=15.0 +2024-08-29 15:10:38,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=254634.66666666666, ans=0.0 +2024-08-29 15:10:39,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=254688.0, ans=0.125 +2024-08-29 15:10:42,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=254688.0, ans=0.5 +2024-08-29 15:10:54,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=254741.33333333334, ans=0.0 +2024-08-29 15:10:58,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=254794.66666666666, ans=6.0 +2024-08-29 15:11:12,411 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.415e+02 1.633e+02 1.920e+02 3.508e+02, threshold=3.267e+02, percent-clipped=0.0 +2024-08-29 15:11:16,891 INFO [train.py:1114] (2/4) Epoch 20, batch 500, loss[loss=0.1976, simple_loss=0.2787, pruned_loss=0.04285, ctc_loss=0.07699, over 19677.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2573, pruned_loss=0.0382, ctc_loss=0.07141, over 3546812.11 frames. ], batch size: 63, lr: 7.54e-03, grad_scale: 16.0 +2024-08-29 15:11:50,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=255061.33333333334, ans=0.125 +2024-08-29 15:12:06,330 INFO [train.py:1114] (2/4) Epoch 20, batch 550, loss[loss=0.2013, simple_loss=0.2727, pruned_loss=0.04766, ctc_loss=0.0866, over 19343.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2573, pruned_loss=0.03843, ctc_loss=0.07177, over 3609053.13 frames. ], batch size: 71, lr: 7.54e-03, grad_scale: 16.0 +2024-08-29 15:12:10,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=255168.0, ans=0.125 +2024-08-29 15:13:45,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=255328.0, ans=0.125 +2024-08-29 15:13:47,698 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.09 vs. limit=12.0 +2024-08-29 15:13:48,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=255328.0, ans=0.125 +2024-08-29 15:14:00,947 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.409e+02 1.685e+02 2.053e+02 3.324e+02, threshold=3.369e+02, percent-clipped=1.0 +2024-08-29 15:14:16,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=255381.33333333334, ans=0.125 +2024-08-29 15:14:18,271 INFO [train.py:1114] (2/4) Epoch 20, batch 600, loss[loss=0.1987, simple_loss=0.283, pruned_loss=0.0416, ctc_loss=0.07776, over 19390.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2578, pruned_loss=0.0385, ctc_loss=0.07191, over 3665874.64 frames. ], batch size: 67, lr: 7.53e-03, grad_scale: 16.0 +2024-08-29 15:14:20,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255434.66666666666, ans=0.1 +2024-08-29 15:14:22,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255434.66666666666, ans=0.1 +2024-08-29 15:14:35,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=255488.0, ans=0.125 +2024-08-29 15:14:36,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255488.0, ans=0.1 +2024-08-29 15:14:38,935 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:14:42,160 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.84 vs. limit=22.5 +2024-08-29 15:14:45,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255541.33333333334, ans=0.1 +2024-08-29 15:14:48,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=255594.66666666666, ans=0.125 +2024-08-29 15:14:52,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=255594.66666666666, ans=0.125 +2024-08-29 15:14:56,525 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.69 vs. limit=15.0 +2024-08-29 15:15:06,120 INFO [train.py:1114] (2/4) Epoch 20, batch 650, loss[loss=0.1659, simple_loss=0.2415, pruned_loss=0.03271, ctc_loss=0.06184, over 19779.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2575, pruned_loss=0.03835, ctc_loss=0.07167, over 3715968.33 frames. ], batch size: 54, lr: 7.53e-03, grad_scale: 16.0 +2024-08-29 15:15:40,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=255861.33333333334, ans=0.2 +2024-08-29 15:15:49,549 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.451e+02 1.804e+02 2.620e+02 6.000e+02, threshold=3.609e+02, percent-clipped=12.0 +2024-08-29 15:15:54,171 INFO [train.py:1114] (2/4) Epoch 20, batch 700, loss[loss=0.1659, simple_loss=0.2447, pruned_loss=0.03126, ctc_loss=0.06152, over 19692.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2582, pruned_loss=0.03861, ctc_loss=0.07225, over 3747565.94 frames. ], batch size: 51, lr: 7.53e-03, grad_scale: 16.0 +2024-08-29 15:16:04,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=256021.33333333334, ans=0.2 +2024-08-29 15:16:14,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=256074.66666666666, ans=0.05 +2024-08-29 15:16:24,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=256128.0, ans=0.025 +2024-08-29 15:16:42,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=256181.33333333334, ans=0.025 +2024-08-29 15:16:44,253 INFO [train.py:1114] (2/4) Epoch 20, batch 750, loss[loss=0.1758, simple_loss=0.262, pruned_loss=0.03267, ctc_loss=0.06055, over 19505.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2582, pruned_loss=0.03864, ctc_loss=0.0722, over 3773491.09 frames. ], batch size: 54, lr: 7.52e-03, grad_scale: 16.0 +2024-08-29 15:16:57,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=256288.0, ans=0.125 +2024-08-29 15:17:07,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=256341.33333333334, ans=0.125 +2024-08-29 15:17:19,874 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.04 vs. limit=10.0 +2024-08-29 15:17:29,509 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.446e+02 1.840e+02 2.370e+02 3.601e+02, threshold=3.680e+02, percent-clipped=0.0 +2024-08-29 15:17:30,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=256448.0, ans=0.0 +2024-08-29 15:17:31,736 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.27 vs. limit=12.0 +2024-08-29 15:17:34,193 INFO [train.py:1114] (2/4) Epoch 20, batch 800, loss[loss=0.1668, simple_loss=0.241, pruned_loss=0.03436, ctc_loss=0.0595, over 19396.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2585, pruned_loss=0.039, ctc_loss=0.07272, over 3795249.99 frames. ], batch size: 48, lr: 7.52e-03, grad_scale: 32.0 +2024-08-29 15:17:35,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=256501.33333333334, ans=0.0 +2024-08-29 15:17:38,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=256501.33333333334, ans=0.125 +2024-08-29 15:17:41,215 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.60 vs. limit=15.0 +2024-08-29 15:18:08,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=256661.33333333334, ans=0.1 +2024-08-29 15:18:09,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=256661.33333333334, ans=0.0 +2024-08-29 15:18:20,536 INFO [train.py:1114] (2/4) Epoch 20, batch 850, loss[loss=0.1887, simple_loss=0.2718, pruned_loss=0.03731, ctc_loss=0.07743, over 19678.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2581, pruned_loss=0.0387, ctc_loss=0.07225, over 3813468.42 frames. ], batch size: 59, lr: 7.51e-03, grad_scale: 32.0 +2024-08-29 15:18:23,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=256768.0, ans=10.0 +2024-08-29 15:18:29,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=256768.0, ans=0.025 +2024-08-29 15:18:43,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=256874.66666666666, ans=0.0 +2024-08-29 15:18:50,903 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:18:54,996 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.37 vs. limit=12.0 +2024-08-29 15:18:55,135 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.46 vs. limit=15.0 +2024-08-29 15:19:04,721 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.094e+02 1.415e+02 1.606e+02 2.010e+02 3.804e+02, threshold=3.213e+02, percent-clipped=1.0 +2024-08-29 15:19:05,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=256981.33333333334, ans=0.125 +2024-08-29 15:19:06,328 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.42 vs. limit=15.0 +2024-08-29 15:19:09,503 INFO [train.py:1114] (2/4) Epoch 20, batch 900, loss[loss=0.1693, simple_loss=0.2382, pruned_loss=0.03709, ctc_loss=0.06579, over 19823.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2584, pruned_loss=0.03906, ctc_loss=0.07284, over 3818082.07 frames. ], batch size: 49, lr: 7.51e-03, grad_scale: 32.0 +2024-08-29 15:19:13,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=257034.66666666666, ans=15.0 +2024-08-29 15:19:18,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=257088.0, ans=0.2 +2024-08-29 15:19:23,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=257088.0, ans=0.125 +2024-08-29 15:19:50,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=257248.0, ans=0.1 +2024-08-29 15:19:59,860 INFO [train.py:1114] (2/4) Epoch 20, batch 950, loss[loss=0.1542, simple_loss=0.2296, pruned_loss=0.02844, ctc_loss=0.05499, over 19523.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2585, pruned_loss=0.03914, ctc_loss=0.0729, over 3819962.07 frames. ], batch size: 49, lr: 7.51e-03, grad_scale: 32.0 +2024-08-29 15:20:15,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=257354.66666666666, ans=0.0 +2024-08-29 15:20:17,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=257354.66666666666, ans=0.125 +2024-08-29 15:20:27,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257408.0, ans=0.1 +2024-08-29 15:20:33,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=257461.33333333334, ans=0.07 +2024-08-29 15:20:43,846 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.399e+02 1.695e+02 2.094e+02 3.389e+02, threshold=3.390e+02, percent-clipped=1.0 +2024-08-29 15:20:48,470 INFO [train.py:1114] (2/4) Epoch 20, batch 1000, loss[loss=0.166, simple_loss=0.2451, pruned_loss=0.03118, ctc_loss=0.06163, over 19868.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2592, pruned_loss=0.0393, ctc_loss=0.07322, over 3817270.55 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 32.0 +2024-08-29 15:21:03,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=257621.33333333334, ans=0.5 +2024-08-29 15:21:30,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=257781.33333333334, ans=0.125 +2024-08-29 15:21:34,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=257781.33333333334, ans=0.0 +2024-08-29 15:21:36,938 INFO [train.py:1114] (2/4) Epoch 20, batch 1050, loss[loss=0.1811, simple_loss=0.2686, pruned_loss=0.03378, ctc_loss=0.06496, over 19844.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2584, pruned_loss=0.03902, ctc_loss=0.07282, over 3823601.14 frames. ], batch size: 57, lr: 7.50e-03, grad_scale: 32.0 +2024-08-29 15:21:46,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=257888.0, ans=0.125 +2024-08-29 15:21:55,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=257941.33333333334, ans=0.125 +2024-08-29 15:21:57,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257941.33333333334, ans=0.1 +2024-08-29 15:22:00,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=257941.33333333334, ans=0.125 +2024-08-29 15:22:20,119 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.103e+02 1.405e+02 1.673e+02 2.019e+02 3.302e+02, threshold=3.347e+02, percent-clipped=0.0 +2024-08-29 15:22:24,719 INFO [train.py:1114] (2/4) Epoch 20, batch 1100, loss[loss=0.1653, simple_loss=0.2449, pruned_loss=0.03169, ctc_loss=0.05586, over 19588.00 frames. ], tot_loss[loss=0.182, simple_loss=0.258, pruned_loss=0.03856, ctc_loss=0.07206, over 3830386.52 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 32.0 +2024-08-29 15:23:09,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=258314.66666666666, ans=0.125 +2024-08-29 15:23:14,552 INFO [train.py:1114] (2/4) Epoch 20, batch 1150, loss[loss=0.1749, simple_loss=0.2538, pruned_loss=0.03545, ctc_loss=0.06297, over 19588.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2578, pruned_loss=0.03867, ctc_loss=0.07227, over 3830406.24 frames. ], batch size: 52, lr: 7.49e-03, grad_scale: 32.0 +2024-08-29 15:23:15,107 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.62 vs. limit=15.0 +2024-08-29 15:23:37,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=258474.66666666666, ans=0.125 +2024-08-29 15:23:46,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=258528.0, ans=0.125 +2024-08-29 15:23:55,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258581.33333333334, ans=0.1 +2024-08-29 15:23:58,294 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.386e+02 1.703e+02 2.133e+02 3.069e+02, threshold=3.407e+02, percent-clipped=0.0 +2024-08-29 15:23:58,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=258581.33333333334, ans=0.1 +2024-08-29 15:24:00,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=258581.33333333334, ans=0.125 +2024-08-29 15:24:02,924 INFO [train.py:1114] (2/4) Epoch 20, batch 1200, loss[loss=0.1878, simple_loss=0.2695, pruned_loss=0.03885, ctc_loss=0.07079, over 19834.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2588, pruned_loss=0.03886, ctc_loss=0.07279, over 3826157.79 frames. ], batch size: 57, lr: 7.49e-03, grad_scale: 32.0 +2024-08-29 15:24:04,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=258634.66666666666, ans=0.125 +2024-08-29 15:24:16,979 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:24:42,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=258848.0, ans=0.2 +2024-08-29 15:24:50,407 INFO [train.py:1114] (2/4) Epoch 20, batch 1250, loss[loss=0.2049, simple_loss=0.2774, pruned_loss=0.04868, ctc_loss=0.08737, over 19528.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2594, pruned_loss=0.03904, ctc_loss=0.07283, over 3844326.38 frames. ], batch size: 61, lr: 7.48e-03, grad_scale: 32.0 +2024-08-29 15:25:13,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=259008.0, ans=0.125 +2024-08-29 15:25:15,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-08-29 15:25:31,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=259114.66666666666, ans=0.125 +2024-08-29 15:25:35,748 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.412e+02 1.655e+02 2.039e+02 3.415e+02, threshold=3.310e+02, percent-clipped=2.0 +2024-08-29 15:25:36,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=259114.66666666666, ans=10.0 +2024-08-29 15:25:40,364 INFO [train.py:1114] (2/4) Epoch 20, batch 1300, loss[loss=0.2053, simple_loss=0.2838, pruned_loss=0.04642, ctc_loss=0.08466, over 18879.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2585, pruned_loss=0.03862, ctc_loss=0.07223, over 3848500.15 frames. ], batch size: 76, lr: 7.48e-03, grad_scale: 32.0 +2024-08-29 15:25:40,736 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.02 vs. limit=15.0 +2024-08-29 15:25:41,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=259168.0, ans=0.125 +2024-08-29 15:25:57,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=259221.33333333334, ans=0.125 +2024-08-29 15:26:02,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=259274.66666666666, ans=0.0 +2024-08-29 15:26:16,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=259381.33333333334, ans=0.0 +2024-08-29 15:26:26,562 INFO [train.py:1114] (2/4) Epoch 20, batch 1350, loss[loss=0.1664, simple_loss=0.2542, pruned_loss=0.0285, ctc_loss=0.0538, over 19750.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2586, pruned_loss=0.03855, ctc_loss=0.0721, over 3859791.35 frames. ], batch size: 54, lr: 7.48e-03, grad_scale: 32.0 +2024-08-29 15:27:05,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=259434.66666666666, ans=0.0 +2024-08-29 15:27:12,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=259488.0, ans=0.125 +2024-08-29 15:27:13,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=259488.0, ans=0.0 +2024-08-29 15:27:15,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=259488.0, ans=0.125 +2024-08-29 15:27:37,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=259594.66666666666, ans=0.025 +2024-08-29 15:27:38,392 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.81 vs. limit=15.0 +2024-08-29 15:27:39,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=259594.66666666666, ans=0.2 +2024-08-29 15:27:47,063 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.431e+02 1.643e+02 2.121e+02 3.858e+02, threshold=3.286e+02, percent-clipped=2.0 +2024-08-29 15:27:51,735 INFO [train.py:1114] (2/4) Epoch 20, batch 1400, loss[loss=0.1513, simple_loss=0.2217, pruned_loss=0.02856, ctc_loss=0.05926, over 19673.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2586, pruned_loss=0.03864, ctc_loss=0.07219, over 3866412.50 frames. ], batch size: 46, lr: 7.47e-03, grad_scale: 32.0 +2024-08-29 15:27:51,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=259701.33333333334, ans=0.125 +2024-08-29 15:27:55,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=259701.33333333334, ans=10.0 +2024-08-29 15:28:00,752 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.73 vs. limit=12.0 +2024-08-29 15:28:11,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=259808.0, ans=0.125 +2024-08-29 15:28:12,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=259808.0, ans=0.125 +2024-08-29 15:28:15,476 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=5.71 vs. limit=15.0 +2024-08-29 15:28:23,029 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.99 vs. limit=15.0 +2024-08-29 15:30:05,119 INFO [train.py:1114] (2/4) Epoch 20, batch 1450, loss[loss=0.2016, simple_loss=0.2752, pruned_loss=0.04665, ctc_loss=0.08654, over 19651.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2593, pruned_loss=0.03908, ctc_loss=0.07298, over 3863949.13 frames. ], batch size: 63, lr: 7.47e-03, grad_scale: 32.0 +2024-08-29 15:30:10,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=259968.0, ans=0.0 +2024-08-29 15:30:39,691 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:30:44,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=260181.33333333334, ans=0.125 +2024-08-29 15:30:48,762 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.58 vs. limit=22.5 +2024-08-29 15:30:48,913 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.404e+02 1.559e+02 1.995e+02 3.603e+02, threshold=3.118e+02, percent-clipped=1.0 +2024-08-29 15:30:53,761 INFO [train.py:1114] (2/4) Epoch 20, batch 1500, loss[loss=0.1782, simple_loss=0.2608, pruned_loss=0.03476, ctc_loss=0.06517, over 19614.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2593, pruned_loss=0.03893, ctc_loss=0.0727, over 3863395.95 frames. ], batch size: 57, lr: 7.47e-03, grad_scale: 32.0 +2024-08-29 15:31:02,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=260288.0, ans=0.0 +2024-08-29 15:31:02,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=260288.0, ans=0.0 +2024-08-29 15:31:14,324 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.22 vs. limit=15.0 +2024-08-29 15:31:14,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=260341.33333333334, ans=0.125 +2024-08-29 15:31:18,214 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.15 vs. limit=15.0 +2024-08-29 15:31:29,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=260394.66666666666, ans=0.0 +2024-08-29 15:31:36,644 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.16 vs. limit=12.0 +2024-08-29 15:31:38,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=260448.0, ans=0.125 +2024-08-29 15:31:42,155 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.99 vs. limit=22.5 +2024-08-29 15:31:42,542 INFO [train.py:1114] (2/4) Epoch 20, batch 1550, loss[loss=0.2037, simple_loss=0.2731, pruned_loss=0.04962, ctc_loss=0.08762, over 19609.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2589, pruned_loss=0.03896, ctc_loss=0.07281, over 3847375.28 frames. ], batch size: 60, lr: 7.46e-03, grad_scale: 16.0 +2024-08-29 15:31:46,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=260501.33333333334, ans=0.025 +2024-08-29 15:31:58,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=260554.66666666666, ans=0.125 +2024-08-29 15:32:00,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=260608.0, ans=0.2 +2024-08-29 15:32:08,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=260608.0, ans=0.025 +2024-08-29 15:32:15,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=260661.33333333334, ans=15.0 +2024-08-29 15:32:20,193 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.67 vs. limit=22.5 +2024-08-29 15:32:26,817 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.439e+02 1.834e+02 2.160e+02 3.604e+02, threshold=3.667e+02, percent-clipped=4.0 +2024-08-29 15:32:30,539 INFO [train.py:1114] (2/4) Epoch 20, batch 1600, loss[loss=0.1849, simple_loss=0.2596, pruned_loss=0.04048, ctc_loss=0.07294, over 19838.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2585, pruned_loss=0.03894, ctc_loss=0.07283, over 3837212.03 frames. ], batch size: 57, lr: 7.46e-03, grad_scale: 32.0 +2024-08-29 15:32:31,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=260768.0, ans=0.2 +2024-08-29 15:32:32,645 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:32:36,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260768.0, ans=0.1 +2024-08-29 15:32:41,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=260821.33333333334, ans=0.0 +2024-08-29 15:32:42,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260821.33333333334, ans=0.1 +2024-08-29 15:32:42,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=260821.33333333334, ans=0.0 +2024-08-29 15:32:45,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=260821.33333333334, ans=0.125 +2024-08-29 15:32:53,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=260874.66666666666, ans=0.0 +2024-08-29 15:33:03,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=260928.0, ans=0.125 +2024-08-29 15:33:20,606 INFO [train.py:1114] (2/4) Epoch 20, batch 1650, loss[loss=0.1805, simple_loss=0.2645, pruned_loss=0.03467, ctc_loss=0.06802, over 19667.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2582, pruned_loss=0.03891, ctc_loss=0.0727, over 3834348.45 frames. ], batch size: 59, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 15:33:26,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=261034.66666666666, ans=0.07 +2024-08-29 15:33:32,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=261088.0, ans=0.125 +2024-08-29 15:33:43,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=261141.33333333334, ans=0.1 +2024-08-29 15:34:03,140 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.083e+02 1.529e+02 1.762e+02 2.426e+02 4.170e+02, threshold=3.524e+02, percent-clipped=3.0 +2024-08-29 15:34:06,370 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.94 vs. limit=15.0 +2024-08-29 15:34:06,479 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.92 vs. limit=22.5 +2024-08-29 15:34:06,790 INFO [train.py:1114] (2/4) Epoch 20, batch 1700, loss[loss=0.1537, simple_loss=0.2224, pruned_loss=0.03077, ctc_loss=0.05866, over 19659.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2578, pruned_loss=0.03859, ctc_loss=0.07205, over 3848218.58 frames. ], batch size: 46, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 15:34:10,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.51 vs. limit=22.5 +2024-08-29 15:34:16,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=261301.33333333334, ans=0.0 +2024-08-29 15:34:17,595 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.00 vs. limit=15.0 +2024-08-29 15:34:19,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=261354.66666666666, ans=0.125 +2024-08-29 15:34:19,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=261354.66666666666, ans=0.05 +2024-08-29 15:34:26,460 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.25 vs. limit=15.0 +2024-08-29 15:34:27,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=261408.0, ans=15.0 +2024-08-29 15:34:30,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=261408.0, ans=0.0 +2024-08-29 15:34:30,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=261408.0, ans=0.0 +2024-08-29 15:34:33,771 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.30 vs. limit=12.0 +2024-08-29 15:34:42,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=261461.33333333334, ans=0.2 +2024-08-29 15:34:49,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=261514.66666666666, ans=0.125 +2024-08-29 15:34:51,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=261514.66666666666, ans=0.2 +2024-08-29 15:34:53,261 INFO [train.py:1114] (2/4) Epoch 20, batch 1750, loss[loss=0.1626, simple_loss=0.234, pruned_loss=0.03343, ctc_loss=0.0608, over 19694.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2577, pruned_loss=0.03837, ctc_loss=0.07165, over 3853221.62 frames. ], batch size: 45, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 15:34:54,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=261568.0, ans=0.0 +2024-08-29 15:34:56,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=261568.0, ans=0.125 +2024-08-29 15:34:58,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=261568.0, ans=15.0 +2024-08-29 15:35:03,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=261621.33333333334, ans=0.125 +2024-08-29 15:35:33,452 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.094e+02 1.486e+02 1.910e+02 2.389e+02 3.898e+02, threshold=3.819e+02, percent-clipped=3.0 +2024-08-29 15:35:37,073 INFO [train.py:1114] (2/4) Epoch 20, batch 1800, loss[loss=0.2028, simple_loss=0.2777, pruned_loss=0.04621, ctc_loss=0.08852, over 19607.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.258, pruned_loss=0.03864, ctc_loss=0.07206, over 3854716.77 frames. ], batch size: 55, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 15:35:45,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=261888.0, ans=0.125 +2024-08-29 15:35:46,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=261888.0, ans=0.1 +2024-08-29 15:35:52,677 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.92 vs. limit=10.0 +2024-08-29 15:35:55,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=261941.33333333334, ans=0.125 +2024-08-29 15:35:55,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=261941.33333333334, ans=0.04949747468305833 +2024-08-29 15:35:59,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=261941.33333333334, ans=0.125 +2024-08-29 15:36:00,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=261941.33333333334, ans=0.025 +2024-08-29 15:36:12,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=262048.0, ans=0.125 +2024-08-29 15:36:21,933 INFO [train.py:1114] (2/4) Epoch 20, batch 1850, loss[loss=0.18, simple_loss=0.2634, pruned_loss=0.03515, ctc_loss=0.06581, over 19587.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2578, pruned_loss=0.03867, ctc_loss=0.07186, over 3858477.29 frames. ], batch size: 57, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 15:36:22,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=262101.33333333334, ans=0.025 +2024-08-29 15:36:22,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff2.min_abs, batch_count=262101.33333333334, ans=0.1 +2024-08-29 15:36:22,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=262101.33333333334, ans=0.0 +2024-08-29 15:37:03,781 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.463e+02 1.737e+02 2.223e+02 4.343e+02, threshold=3.475e+02, percent-clipped=3.0 +2024-08-29 15:37:07,313 INFO [train.py:1114] (2/4) Epoch 20, batch 1900, loss[loss=0.1878, simple_loss=0.2691, pruned_loss=0.03906, ctc_loss=0.07114, over 19647.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2579, pruned_loss=0.03857, ctc_loss=0.07168, over 3863146.02 frames. ], batch size: 59, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 15:37:17,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=262421.3333333333, ans=0.125 +2024-08-29 15:37:35,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=262421.3333333333, ans=0.125 +2024-08-29 15:38:32,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=262421.3333333333, ans=0.025 +2024-08-29 15:38:39,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=262474.6666666667, ans=0.2 +2024-08-29 15:39:01,431 INFO [train.py:1114] (2/4) Epoch 20, batch 1950, loss[loss=0.1671, simple_loss=0.243, pruned_loss=0.03345, ctc_loss=0.06057, over 19589.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2586, pruned_loss=0.0386, ctc_loss=0.07154, over 3871529.26 frames. ], batch size: 52, lr: 7.43e-03, grad_scale: 32.0 +2024-08-29 15:39:04,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=262634.6666666667, ans=0.2 +2024-08-29 15:39:16,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=262688.0, ans=0.125 +2024-08-29 15:39:29,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=262794.6666666667, ans=0.0 +2024-08-29 15:39:36,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=262794.6666666667, ans=6.0 +2024-08-29 15:39:40,500 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.48 vs. limit=15.0 +2024-08-29 15:39:41,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=262848.0, ans=0.025 +2024-08-29 15:39:43,489 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.086e+02 1.359e+02 1.503e+02 2.197e+02 3.515e+02, threshold=3.007e+02, percent-clipped=1.0 +2024-08-29 15:39:47,142 INFO [train.py:1114] (2/4) Epoch 20, batch 2000, loss[loss=0.1463, simple_loss=0.2213, pruned_loss=0.02625, ctc_loss=0.04698, over 19617.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2591, pruned_loss=0.03898, ctc_loss=0.0723, over 3855804.95 frames. ], batch size: 45, lr: 7.43e-03, grad_scale: 32.0 +2024-08-29 15:39:47,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=262901.3333333333, ans=0.0 +2024-08-29 15:39:49,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=262901.3333333333, ans=0.125 +2024-08-29 15:39:53,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=262901.3333333333, ans=0.025 +2024-08-29 15:39:59,995 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.92 vs. limit=15.0 +2024-08-29 15:40:02,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=262954.6666666667, ans=0.125 +2024-08-29 15:40:26,166 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:40:27,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=263114.6666666667, ans=0.125 +2024-08-29 15:40:31,095 INFO [train.py:1114] (2/4) Epoch 20, batch 2050, loss[loss=0.1816, simple_loss=0.2491, pruned_loss=0.04216, ctc_loss=0.07451, over 19726.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2583, pruned_loss=0.0389, ctc_loss=0.07229, over 3851100.32 frames. ], batch size: 47, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 15:41:10,994 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.438e+02 1.706e+02 2.328e+02 5.097e+02, threshold=3.413e+02, percent-clipped=11.0 +2024-08-29 15:41:11,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=263381.3333333333, ans=0.2 +2024-08-29 15:41:14,529 INFO [train.py:1114] (2/4) Epoch 20, batch 2100, loss[loss=0.1804, simple_loss=0.2547, pruned_loss=0.03923, ctc_loss=0.06892, over 19766.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2579, pruned_loss=0.03859, ctc_loss=0.07181, over 3857313.01 frames. ], batch size: 54, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 15:41:20,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=263434.6666666667, ans=0.125 +2024-08-29 15:41:24,470 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:41:39,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=263594.6666666667, ans=0.2 +2024-08-29 15:41:50,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=263648.0, ans=15.0 +2024-08-29 15:41:57,797 INFO [train.py:1114] (2/4) Epoch 20, batch 2150, loss[loss=0.1808, simple_loss=0.2578, pruned_loss=0.03788, ctc_loss=0.07028, over 19843.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2572, pruned_loss=0.03825, ctc_loss=0.07126, over 3867882.17 frames. ], batch size: 52, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 15:41:58,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=263701.3333333333, ans=0.2 +2024-08-29 15:42:08,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=263754.6666666667, ans=0.125 +2024-08-29 15:42:35,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=263914.6666666667, ans=0.125 +2024-08-29 15:42:37,630 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.210e+02 1.475e+02 1.770e+02 2.541e+02 4.904e+02, threshold=3.539e+02, percent-clipped=6.0 +2024-08-29 15:42:41,076 INFO [train.py:1114] (2/4) Epoch 20, batch 2200, loss[loss=0.1859, simple_loss=0.2613, pruned_loss=0.0398, ctc_loss=0.07739, over 19596.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2573, pruned_loss=0.03815, ctc_loss=0.07113, over 3866356.65 frames. ], batch size: 57, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 15:42:50,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=264021.3333333333, ans=0.2 +2024-08-29 15:42:52,867 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.00 vs. limit=15.0 +2024-08-29 15:42:57,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=264074.6666666667, ans=0.125 +2024-08-29 15:43:00,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=264074.6666666667, ans=0.0 +2024-08-29 15:43:25,154 INFO [train.py:1114] (2/4) Epoch 20, batch 2250, loss[loss=0.1844, simple_loss=0.2722, pruned_loss=0.03498, ctc_loss=0.06665, over 19614.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2584, pruned_loss=0.03846, ctc_loss=0.07171, over 3866128.11 frames. ], batch size: 55, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 15:43:32,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=264234.6666666667, ans=0.0 +2024-08-29 15:43:57,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=264394.6666666667, ans=0.1 +2024-08-29 15:44:01,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=264448.0, ans=0.125 +2024-08-29 15:44:04,421 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.490e+02 1.909e+02 2.542e+02 3.813e+02, threshold=3.818e+02, percent-clipped=1.0 +2024-08-29 15:44:07,876 INFO [train.py:1114] (2/4) Epoch 20, batch 2300, loss[loss=0.1692, simple_loss=0.2483, pruned_loss=0.03362, ctc_loss=0.05725, over 19497.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2581, pruned_loss=0.03875, ctc_loss=0.07216, over 3860959.48 frames. ], batch size: 49, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 15:44:08,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=264501.3333333333, ans=0.09899494936611666 +2024-08-29 15:44:10,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=264501.3333333333, ans=0.125 +2024-08-29 15:44:11,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=264501.3333333333, ans=0.07 +2024-08-29 15:44:52,852 INFO [train.py:1114] (2/4) Epoch 20, batch 2350, loss[loss=0.1933, simple_loss=0.2704, pruned_loss=0.04205, ctc_loss=0.08038, over 19642.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2585, pruned_loss=0.03899, ctc_loss=0.07249, over 3863166.37 frames. ], batch size: 63, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 15:44:55,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=264768.0, ans=0.125 +2024-08-29 15:45:32,200 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.408e+02 1.642e+02 2.194e+02 4.028e+02, threshold=3.284e+02, percent-clipped=1.0 +2024-08-29 15:45:35,653 INFO [train.py:1114] (2/4) Epoch 20, batch 2400, loss[loss=0.2109, simple_loss=0.282, pruned_loss=0.05216, ctc_loss=0.08896, over 19315.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2605, pruned_loss=0.04, ctc_loss=0.07405, over 3857478.20 frames. ], batch size: 71, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 15:45:55,871 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.36 vs. limit=15.0 +2024-08-29 15:45:57,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=265141.3333333333, ans=0.125 +2024-08-29 15:45:59,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=265141.3333333333, ans=0.125 +2024-08-29 15:46:07,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=265194.6666666667, ans=0.025 +2024-08-29 15:46:13,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=265248.0, ans=0.125 +2024-08-29 15:46:13,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.69 vs. limit=6.0 +2024-08-29 15:46:19,431 INFO [train.py:1114] (2/4) Epoch 20, batch 2450, loss[loss=0.2388, simple_loss=0.2926, pruned_loss=0.06827, ctc_loss=0.1212, over 13610.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2637, pruned_loss=0.0422, ctc_loss=0.07856, over 3726544.93 frames. ], batch size: 140, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 15:46:40,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=265408.0, ans=0.0 +2024-08-29 15:46:42,824 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:46:43,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=265408.0, ans=0.125 +2024-08-29 15:46:45,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.54 vs. limit=5.0 +2024-08-29 15:48:24,404 INFO [train.py:1387] (2/4) Done! diff --git a/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-10-47-00-3 b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-10-47-00-3 new file mode 100644 index 0000000000000000000000000000000000000000..305240b2f3685f6be9a80d7abee8a58a013d7170 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/log/log-train-2024-08-29-10-47-00-3 @@ -0,0 +1,711 @@ +2024-08-29 10:47:00,758 INFO [train.py:1182] (3/4) Training started +2024-08-29 10:47:00,758 INFO [train.py:1192] (3/4) Device: cuda:3 +2024-08-29 10:47:00,825 INFO [train.py:1210] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': '201257e-dirty', 'icefall-git-date': 'Tue Aug 20 00:02:11 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2563.int.cedar.computecanada.ca', 'IP address': '172.16.146.0'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 19, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': True, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-29 10:47:00,825 INFO [train.py:1212] (3/4) About to create model +2024-08-29 10:50:07,636 INFO [train.py:1216] (3/4) Number of model parameters: 65805511 +2024-08-29 10:50:07,811 INFO [checkpoint.py:112] (3/4) Loading checkpoint from /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/ctc/exp/epoch-18.pt +2024-08-29 10:54:13,655 INFO [train.py:1231] (3/4) Using DDP +2024-08-29 10:57:38,606 INFO [train.py:1243] (3/4) Loading optimizer state dict +2024-08-29 10:57:38,793 INFO [train.py:1251] (3/4) Loading scheduler state dict +2024-08-29 10:57:39,426 INFO [asr_datamodule.py:894] (3/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:696] (3/4) Disable MUSAN +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:714] (3/4) Enable SpecAugment +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:715] (3/4) Time warp factor: 80 +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:725] (3/4) Num frame mask: 10 +2024-08-29 10:57:39,669 INFO [asr_datamodule.py:738] (3/4) About to create train dataset +2024-08-29 10:57:39,670 INFO [asr_datamodule.py:765] (3/4) Using DynamicBucketingSampler. +2024-08-29 10:57:41,266 INFO [asr_datamodule.py:782] (3/4) About to create train dataloader +2024-08-29 10:57:41,267 INFO [asr_datamodule.py:911] (3/4) About to get dev-clean cuts +2024-08-29 10:58:07,692 INFO [asr_datamodule.py:918] (3/4) About to get dev-other cuts +2024-08-29 10:58:09,678 INFO [asr_datamodule.py:814] (3/4) About to create dev dataset +2024-08-29 10:58:10,002 INFO [asr_datamodule.py:831] (3/4) About to create dev dataloader +2024-08-29 10:58:10,002 INFO [train.py:1435] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-29 11:07:36,362 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=3.73 vs. limit=7.5 +2024-08-29 11:08:02,421 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12127MB +2024-08-29 11:08:03,528 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12127MB +2024-08-29 11:11:47,154 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12201MB +2024-08-29 11:11:48,380 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12201MB +2024-08-29 11:21:31,517 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12201MB +2024-08-29 11:21:32,822 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 12201MB +2024-08-29 11:21:32,842 INFO [train.py:1344] (3/4) Loading grad scaler state dict +2024-08-29 11:27:35,423 INFO [train.py:1114] (3/4) Epoch 19, batch 0, loss[loss=0.1935, simple_loss=0.2602, pruned_loss=0.04659, ctc_loss=0.08412, over 19809.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2602, pruned_loss=0.04659, ctc_loss=0.08412, over 19809.00 frames. ], batch size: 49, lr: 7.99e-03, grad_scale: 32.0 +2024-08-29 11:27:35,424 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-29 11:31:58,876 INFO [train.py:1146] (3/4) Epoch 19, validation: loss=0.1709, simple_loss=0.2636, pruned_loss=0.02933, ctc_loss=0.04896, over 944034.00 frames. +2024-08-29 11:31:58,877 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12201MB +2024-08-29 11:33:33,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.96 vs. limit=22.5 +2024-08-29 12:03:08,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=239008.0, ans=0.125 +2024-08-29 12:07:09,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=239008.0, ans=0.015 +2024-08-29 12:29:28,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=239114.66666666666, ans=0.125 +2024-08-29 12:29:41,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239114.66666666666, ans=0.1 +2024-08-29 12:32:20,122 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.58 vs. limit=10.0 +2024-08-29 12:45:48,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=239168.0, ans=0.125 +2024-08-29 12:46:38,399 INFO [train.py:1114] (3/4) Epoch 19, batch 50, loss[loss=0.1627, simple_loss=0.2395, pruned_loss=0.03086, ctc_loss=0.06046, over 19714.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2634, pruned_loss=0.042, ctc_loss=0.0785, over 844167.97 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 12:56:27,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=239328.0, ans=0.2 +2024-08-29 12:57:41,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239328.0, ans=0.1 +2024-08-29 12:57:47,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=239381.33333333334, ans=0.05 +2024-08-29 12:57:49,123 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.203e+02 1.479e+02 1.753e+02 2.191e+02 3.244e+02, threshold=3.506e+02, percent-clipped=0.0 +2024-08-29 12:58:22,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=239381.33333333334, ans=10.0 +2024-08-29 13:04:10,876 INFO [train.py:1114] (3/4) Epoch 19, batch 100, loss[loss=0.1593, simple_loss=0.2371, pruned_loss=0.02967, ctc_loss=0.05517, over 19723.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2651, pruned_loss=0.04195, ctc_loss=0.0782, over 1498340.35 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 13:04:23,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=239488.0, ans=0.125 +2024-08-29 13:07:38,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=239488.0, ans=0.025 +2024-08-29 13:07:44,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=239541.33333333334, ans=0.125 +2024-08-29 13:08:30,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=239541.33333333334, ans=0.0 +2024-08-29 13:09:37,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239594.66666666666, ans=0.1 +2024-08-29 13:09:38,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=239594.66666666666, ans=0.125 +2024-08-29 13:13:55,921 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.93 vs. limit=10.0 +2024-08-29 13:14:31,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239648.0, ans=0.125 +2024-08-29 13:17:25,256 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.32 vs. limit=15.0 +2024-08-29 13:21:06,828 INFO [train.py:1114] (3/4) Epoch 19, batch 150, loss[loss=0.1784, simple_loss=0.247, pruned_loss=0.0399, ctc_loss=0.07514, over 19736.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2626, pruned_loss=0.04114, ctc_loss=0.07682, over 2027311.34 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 32.0 +2024-08-29 13:21:18,754 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.93 vs. limit=15.0 +2024-08-29 13:21:44,928 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.55 vs. limit=12.0 +2024-08-29 13:21:54,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=239861.33333333334, ans=0.0 +2024-08-29 13:21:54,957 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 13:24:20,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=239861.33333333334, ans=0.125 +2024-08-29 13:24:24,030 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.028e+02 1.511e+02 1.939e+02 2.474e+02 3.688e+02, threshold=3.878e+02, percent-clipped=4.0 +2024-08-29 13:25:00,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239914.66666666666, ans=0.125 +2024-08-29 13:25:01,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=239914.66666666666, ans=0.0 +2024-08-29 13:27:00,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=239968.0, ans=0.125 +2024-08-29 13:27:01,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=239968.0, ans=0.125 +2024-08-29 13:27:32,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=239968.0, ans=0.125 +2024-08-29 13:27:34,126 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.58 vs. limit=15.0 +2024-08-29 13:31:38,285 INFO [train.py:1114] (3/4) Epoch 19, batch 200, loss[loss=0.1983, simple_loss=0.2726, pruned_loss=0.04492, ctc_loss=0.08553, over 18332.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.261, pruned_loss=0.04045, ctc_loss=0.07545, over 2435699.92 frames. ], batch size: 85, lr: 7.97e-03, grad_scale: 32.0 +2024-08-29 13:31:38,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=240021.33333333334, ans=0.0 +2024-08-29 13:31:45,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=240021.33333333334, ans=0.125 +2024-08-29 13:31:46,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=240021.33333333334, ans=0.125 +2024-08-29 13:31:56,525 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.77 vs. limit=15.0 +2024-08-29 13:32:04,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=240128.0, ans=0.125 +2024-08-29 13:32:04,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=240128.0, ans=0.0 +2024-08-29 13:32:16,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=240181.33333333334, ans=0.125 +2024-08-29 13:32:17,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=240181.33333333334, ans=0.5 +2024-08-29 13:32:19,807 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.82 vs. limit=15.0 +2024-08-29 13:32:21,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240181.33333333334, ans=0.1 +2024-08-29 13:32:24,074 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 13:32:34,883 INFO [train.py:1114] (3/4) Epoch 19, batch 250, loss[loss=0.199, simple_loss=0.2719, pruned_loss=0.04627, ctc_loss=0.08383, over 19409.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2604, pruned_loss=0.0399, ctc_loss=0.07426, over 2756001.92 frames. ], batch size: 67, lr: 7.97e-03, grad_scale: 32.0 +2024-08-29 13:33:16,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=240341.33333333334, ans=0.125 +2024-08-29 13:33:37,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=240448.0, ans=0.2 +2024-08-29 13:33:39,437 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.435e+02 1.779e+02 2.329e+02 4.143e+02, threshold=3.559e+02, percent-clipped=3.0 +2024-08-29 13:37:04,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=240448.0, ans=0.125 +2024-08-29 13:37:08,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=240448.0, ans=0.2 +2024-08-29 13:37:09,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=240501.33333333334, ans=0.125 +2024-08-29 13:37:12,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=240501.33333333334, ans=0.125 +2024-08-29 13:37:14,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=240501.33333333334, ans=0.125 +2024-08-29 13:37:19,129 INFO [train.py:1114] (3/4) Epoch 19, batch 300, loss[loss=0.2069, simple_loss=0.2841, pruned_loss=0.04738, ctc_loss=0.08737, over 19526.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2598, pruned_loss=0.03974, ctc_loss=0.07391, over 3001126.27 frames. ], batch size: 61, lr: 7.96e-03, grad_scale: 32.0 +2024-08-29 13:37:40,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=240554.66666666666, ans=0.0 +2024-08-29 13:37:47,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=240608.0, ans=0.125 +2024-08-29 13:37:52,138 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 13:38:50,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=240661.33333333334, ans=0.125 +2024-08-29 13:40:04,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=240714.66666666666, ans=0.125 +2024-08-29 13:40:04,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=240714.66666666666, ans=0.125 +2024-08-29 13:40:51,471 INFO [train.py:1114] (3/4) Epoch 19, batch 350, loss[loss=0.1554, simple_loss=0.231, pruned_loss=0.02896, ctc_loss=0.05474, over 19758.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2605, pruned_loss=0.03992, ctc_loss=0.07438, over 3191307.89 frames. ], batch size: 48, lr: 7.96e-03, grad_scale: 32.0 +2024-08-29 13:41:25,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=240928.0, ans=0.025 +2024-08-29 13:41:26,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=240928.0, ans=0.0 +2024-08-29 13:41:41,710 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.478e+02 1.769e+02 2.422e+02 3.784e+02, threshold=3.538e+02, percent-clipped=2.0 +2024-08-29 13:42:16,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240981.33333333334, ans=0.1 +2024-08-29 13:42:16,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=240981.33333333334, ans=0.0 +2024-08-29 13:43:43,876 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.33 vs. limit=22.5 +2024-08-29 13:43:50,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=241034.66666666666, ans=0.125 +2024-08-29 13:44:27,816 INFO [train.py:1114] (3/4) Epoch 19, batch 400, loss[loss=0.1799, simple_loss=0.2616, pruned_loss=0.03652, ctc_loss=0.06271, over 19488.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2601, pruned_loss=0.03978, ctc_loss=0.07419, over 3343080.18 frames. ], batch size: 54, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 13:44:27,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=241088.0, ans=0.025 +2024-08-29 13:45:34,166 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.90 vs. limit=22.5 +2024-08-29 13:45:52,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=241248.0, ans=0.2 +2024-08-29 13:47:19,617 INFO [train.py:1114] (3/4) Epoch 19, batch 450, loss[loss=0.1972, simple_loss=0.2792, pruned_loss=0.04183, ctc_loss=0.07919, over 19594.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2601, pruned_loss=0.03994, ctc_loss=0.07446, over 3450659.63 frames. ], batch size: 55, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 13:47:37,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=241408.0, ans=0.0 +2024-08-29 13:47:39,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=241408.0, ans=0.125 +2024-08-29 13:48:45,732 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.422e+02 1.638e+02 2.007e+02 3.524e+02, threshold=3.276e+02, percent-clipped=0.0 +2024-08-29 13:48:52,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=241568.0, ans=0.04949747468305833 +2024-08-29 13:50:05,024 INFO [train.py:1114] (3/4) Epoch 19, batch 500, loss[loss=0.1875, simple_loss=0.2631, pruned_loss=0.04072, ctc_loss=0.07624, over 19678.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2589, pruned_loss=0.03935, ctc_loss=0.07342, over 3546334.18 frames. ], batch size: 63, lr: 7.95e-03, grad_scale: 32.0 +2024-08-29 13:50:33,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.14 vs. limit=22.5 +2024-08-29 13:51:33,207 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.73 vs. limit=15.0 +2024-08-29 13:51:44,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=241781.33333333334, ans=0.125 +2024-08-29 13:53:05,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-29 13:53:06,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-29 13:53:07,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-29 13:53:07,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=241834.66666666666, ans=0.125 +2024-08-29 13:54:05,852 INFO [train.py:1114] (3/4) Epoch 19, batch 550, loss[loss=0.2082, simple_loss=0.2814, pruned_loss=0.04911, ctc_loss=0.09214, over 19290.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2589, pruned_loss=0.0395, ctc_loss=0.07384, over 3608127.09 frames. ], batch size: 71, lr: 7.94e-03, grad_scale: 32.0 +2024-08-29 13:54:16,818 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.43 vs. limit=12.0 +2024-08-29 13:54:29,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=241994.66666666666, ans=0.0 +2024-08-29 13:54:34,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=242048.0, ans=0.125 +2024-08-29 13:54:34,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242048.0, ans=0.1 +2024-08-29 13:54:37,008 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.211e+02 1.414e+02 1.681e+02 2.031e+02 3.681e+02, threshold=3.361e+02, percent-clipped=2.0 +2024-08-29 13:54:38,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=242048.0, ans=0.0 +2024-08-29 13:54:44,006 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.44 vs. limit=15.0 +2024-08-29 13:54:49,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=242101.33333333334, ans=0.0 +2024-08-29 13:54:53,920 INFO [train.py:1114] (3/4) Epoch 19, batch 600, loss[loss=0.205, simple_loss=0.2841, pruned_loss=0.04621, ctc_loss=0.08386, over 19361.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2598, pruned_loss=0.03966, ctc_loss=0.07404, over 3665674.33 frames. ], batch size: 67, lr: 7.94e-03, grad_scale: 32.0 +2024-08-29 13:54:55,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242154.66666666666, ans=0.1 +2024-08-29 13:55:00,705 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 13:55:36,269 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.08 vs. limit=15.0 +2024-08-29 13:55:40,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=242368.0, ans=0.0 +2024-08-29 13:55:42,119 INFO [train.py:1114] (3/4) Epoch 19, batch 650, loss[loss=0.1774, simple_loss=0.2561, pruned_loss=0.03589, ctc_loss=0.06725, over 19775.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.259, pruned_loss=0.03929, ctc_loss=0.07349, over 3716489.95 frames. ], batch size: 54, lr: 7.93e-03, grad_scale: 32.0 +2024-08-29 13:55:45,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=242421.33333333334, ans=0.125 +2024-08-29 13:55:53,425 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.18 vs. limit=15.0 +2024-08-29 13:55:54,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=242474.66666666666, ans=0.125 +2024-08-29 13:56:05,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=242528.0, ans=0.125 +2024-08-29 13:56:14,926 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.141e+02 1.536e+02 1.948e+02 2.425e+02 3.839e+02, threshold=3.897e+02, percent-clipped=7.0 +2024-08-29 13:56:31,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=242634.66666666666, ans=0.125 +2024-08-29 13:56:38,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=242634.66666666666, ans=0.0 +2024-08-29 13:56:39,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242688.0, ans=0.1 +2024-08-29 13:56:40,132 INFO [train.py:1114] (3/4) Epoch 19, batch 700, loss[loss=0.1714, simple_loss=0.246, pruned_loss=0.03517, ctc_loss=0.06619, over 19733.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2596, pruned_loss=0.03971, ctc_loss=0.07409, over 3749037.36 frames. ], batch size: 51, lr: 7.93e-03, grad_scale: 32.0 +2024-08-29 13:56:52,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=242688.0, ans=0.1 +2024-08-29 13:57:10,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=242794.66666666666, ans=0.125 +2024-08-29 13:57:20,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=242848.0, ans=0.0 +2024-08-29 13:57:21,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=242848.0, ans=0.0 +2024-08-29 13:57:41,164 INFO [train.py:1114] (3/4) Epoch 19, batch 750, loss[loss=0.1767, simple_loss=0.2576, pruned_loss=0.03447, ctc_loss=0.06736, over 19495.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2592, pruned_loss=0.0397, ctc_loss=0.0742, over 3775690.54 frames. ], batch size: 54, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 13:57:45,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=242954.66666666666, ans=0.0 +2024-08-29 13:57:47,128 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-08-29 13:58:45,181 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.478e+02 1.857e+02 2.278e+02 3.837e+02, threshold=3.713e+02, percent-clipped=0.0 +2024-08-29 13:58:58,978 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.08 vs. limit=12.0 +2024-08-29 13:58:59,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=243168.0, ans=0.125 +2024-08-29 13:59:02,305 INFO [train.py:1114] (3/4) Epoch 19, batch 800, loss[loss=0.17, simple_loss=0.2431, pruned_loss=0.03545, ctc_loss=0.06516, over 19406.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2594, pruned_loss=0.03989, ctc_loss=0.07434, over 3797056.26 frames. ], batch size: 48, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 13:59:10,013 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 13:59:17,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=243274.66666666666, ans=0.05 +2024-08-29 13:59:48,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=243381.33333333334, ans=0.125 +2024-08-29 13:59:49,157 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.06 vs. limit=15.0 +2024-08-29 14:04:32,051 INFO [train.py:1114] (3/4) Epoch 19, batch 850, loss[loss=0.1872, simple_loss=0.2716, pruned_loss=0.0379, ctc_loss=0.06739, over 19632.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.259, pruned_loss=0.0396, ctc_loss=0.07379, over 3816610.56 frames. ], batch size: 59, lr: 7.92e-03, grad_scale: 32.0 +2024-08-29 14:04:38,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=243488.0, ans=0.0 +2024-08-29 14:04:40,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=243488.0, ans=22.5 +2024-08-29 14:04:45,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=243541.33333333334, ans=0.2 +2024-08-29 14:05:04,243 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:05:05,882 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.422e+02 1.643e+02 2.108e+02 3.301e+02, threshold=3.285e+02, percent-clipped=0.0 +2024-08-29 14:05:06,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=243648.0, ans=0.2 +2024-08-29 14:05:10,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.83 vs. limit=10.0 +2024-08-29 14:05:19,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243701.33333333334, ans=0.1 +2024-08-29 14:08:12,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=243701.33333333334, ans=0.2 +2024-08-29 14:08:15,258 INFO [train.py:1114] (3/4) Epoch 19, batch 900, loss[loss=0.164, simple_loss=0.2403, pruned_loss=0.03204, ctc_loss=0.05915, over 19423.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2594, pruned_loss=0.04005, ctc_loss=0.07451, over 3820995.48 frames. ], batch size: 48, lr: 7.91e-03, grad_scale: 32.0 +2024-08-29 14:10:35,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=243914.66666666666, ans=0.125 +2024-08-29 14:10:36,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=243914.66666666666, ans=0.125 +2024-08-29 14:10:37,368 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.33 vs. limit=15.0 +2024-08-29 14:10:45,367 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.67 vs. limit=15.0 +2024-08-29 14:10:50,059 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.25 vs. limit=15.0 +2024-08-29 14:10:53,187 INFO [train.py:1114] (3/4) Epoch 19, batch 950, loss[loss=0.1762, simple_loss=0.249, pruned_loss=0.03815, ctc_loss=0.06792, over 19517.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2597, pruned_loss=0.04015, ctc_loss=0.07475, over 3820732.71 frames. ], batch size: 49, lr: 7.91e-03, grad_scale: 16.0 +2024-08-29 14:10:57,207 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.83 vs. limit=15.0 +2024-08-29 14:11:04,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=244074.66666666666, ans=0.0 +2024-08-29 14:11:15,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=244128.0, ans=0.125 +2024-08-29 14:11:19,584 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.12 vs. limit=10.0 +2024-08-29 14:11:21,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244128.0, ans=0.1 +2024-08-29 14:11:24,338 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.87 vs. limit=15.0 +2024-08-29 14:11:27,360 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.452e+02 1.728e+02 2.089e+02 3.728e+02, threshold=3.456e+02, percent-clipped=1.0 +2024-08-29 14:11:33,013 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:11:35,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=244234.66666666666, ans=0.125 +2024-08-29 14:11:43,393 INFO [train.py:1114] (3/4) Epoch 19, batch 1000, loss[loss=0.1679, simple_loss=0.2463, pruned_loss=0.03281, ctc_loss=0.06002, over 19864.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.261, pruned_loss=0.04066, ctc_loss=0.07565, over 3816160.43 frames. ], batch size: 52, lr: 7.90e-03, grad_scale: 16.0 +2024-08-29 14:12:07,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=244341.33333333334, ans=0.035 +2024-08-29 14:13:29,690 INFO [train.py:1114] (3/4) Epoch 19, batch 1050, loss[loss=0.1829, simple_loss=0.2645, pruned_loss=0.03694, ctc_loss=0.06866, over 19837.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2601, pruned_loss=0.04041, ctc_loss=0.07515, over 3823259.35 frames. ], batch size: 57, lr: 7.90e-03, grad_scale: 16.0 +2024-08-29 14:13:31,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244554.66666666666, ans=0.1 +2024-08-29 14:13:39,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=244608.0, ans=0.5 +2024-08-29 14:13:40,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=244608.0, ans=0.125 +2024-08-29 14:14:01,815 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.146e+02 1.357e+02 1.587e+02 1.996e+02 3.012e+02, threshold=3.173e+02, percent-clipped=0.0 +2024-08-29 14:14:03,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=244714.66666666666, ans=0.125 +2024-08-29 14:14:09,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=244768.0, ans=0.0 +2024-08-29 14:14:17,626 INFO [train.py:1114] (3/4) Epoch 19, batch 1100, loss[loss=0.162, simple_loss=0.2437, pruned_loss=0.02886, ctc_loss=0.05648, over 19584.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2597, pruned_loss=0.0401, ctc_loss=0.07478, over 3831325.97 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 16.0 +2024-08-29 14:14:26,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=244874.66666666666, ans=0.0 +2024-08-29 14:14:29,105 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.40 vs. limit=10.0 +2024-08-29 14:14:46,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=244981.33333333334, ans=0.125 +2024-08-29 14:14:47,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244981.33333333334, ans=0.1 +2024-08-29 14:15:04,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=245088.0, ans=0.2 +2024-08-29 14:15:05,493 INFO [train.py:1114] (3/4) Epoch 19, batch 1150, loss[loss=0.1686, simple_loss=0.2501, pruned_loss=0.0314, ctc_loss=0.06066, over 19576.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2593, pruned_loss=0.03987, ctc_loss=0.07436, over 3829831.16 frames. ], batch size: 52, lr: 7.89e-03, grad_scale: 16.0 +2024-08-29 14:15:12,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=245088.0, ans=0.125 +2024-08-29 14:15:18,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=245141.33333333334, ans=0.125 +2024-08-29 14:15:20,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=245141.33333333334, ans=0.125 +2024-08-29 14:15:42,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=245248.0, ans=0.2 +2024-08-29 14:15:42,721 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.485e+02 1.714e+02 2.095e+02 3.780e+02, threshold=3.428e+02, percent-clipped=1.0 +2024-08-29 14:17:48,228 INFO [train.py:1114] (3/4) Epoch 19, batch 1200, loss[loss=0.1808, simple_loss=0.2626, pruned_loss=0.0357, ctc_loss=0.06929, over 19836.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2604, pruned_loss=0.04022, ctc_loss=0.07496, over 3824867.73 frames. ], batch size: 57, lr: 7.89e-03, grad_scale: 32.0 +2024-08-29 14:18:15,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=245514.66666666666, ans=0.05 +2024-08-29 14:18:23,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245514.66666666666, ans=0.1 +2024-08-29 14:18:25,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.26 vs. limit=8.0 +2024-08-29 14:18:28,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=245568.0, ans=0.0 +2024-08-29 14:18:32,256 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.59 vs. limit=15.0 +2024-08-29 14:18:36,277 INFO [train.py:1114] (3/4) Epoch 19, batch 1250, loss[loss=0.2061, simple_loss=0.2737, pruned_loss=0.05112, ctc_loss=0.09087, over 19549.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2608, pruned_loss=0.04024, ctc_loss=0.07491, over 3843427.90 frames. ], batch size: 61, lr: 7.88e-03, grad_scale: 32.0 +2024-08-29 14:18:46,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=245674.66666666666, ans=0.125 +2024-08-29 14:18:47,978 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.65 vs. limit=15.0 +2024-08-29 14:19:02,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=245728.0, ans=0.0 +2024-08-29 14:19:05,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=245781.33333333334, ans=0.125 +2024-08-29 14:19:06,846 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.125e+02 1.390e+02 1.709e+02 2.116e+02 3.450e+02, threshold=3.419e+02, percent-clipped=1.0 +2024-08-29 14:19:27,905 INFO [train.py:1114] (3/4) Epoch 19, batch 1300, loss[loss=0.1972, simple_loss=0.2719, pruned_loss=0.04482, ctc_loss=0.08219, over 18812.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2599, pruned_loss=0.03989, ctc_loss=0.07437, over 3845729.62 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 32.0 +2024-08-29 14:19:31,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245888.0, ans=0.1 +2024-08-29 14:19:45,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=245994.66666666666, ans=0.125 +2024-08-29 14:19:45,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245994.66666666666, ans=0.1 +2024-08-29 14:20:21,522 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.79 vs. limit=15.0 +2024-08-29 14:20:23,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=246048.0, ans=0.125 +2024-08-29 14:20:30,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=246101.33333333334, ans=0.0 +2024-08-29 14:20:33,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=246101.33333333334, ans=0.125 +2024-08-29 14:20:34,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=246101.33333333334, ans=0.125 +2024-08-29 14:20:35,711 INFO [train.py:1114] (3/4) Epoch 19, batch 1350, loss[loss=0.1589, simple_loss=0.2413, pruned_loss=0.02779, ctc_loss=0.05232, over 19764.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2594, pruned_loss=0.0395, ctc_loss=0.07367, over 3857959.38 frames. ], batch size: 54, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 14:21:35,081 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.448e+02 1.632e+02 2.120e+02 3.289e+02, threshold=3.263e+02, percent-clipped=0.0 +2024-08-29 14:21:40,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=246368.0, ans=0.0 +2024-08-29 14:21:47,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=246368.0, ans=0.125 +2024-08-29 14:21:51,128 INFO [train.py:1114] (3/4) Epoch 19, batch 1400, loss[loss=0.1465, simple_loss=0.2202, pruned_loss=0.02602, ctc_loss=0.05198, over 19666.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2588, pruned_loss=0.03922, ctc_loss=0.07316, over 3865333.02 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 14:21:57,949 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.48 vs. limit=15.0 +2024-08-29 14:22:08,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=246528.0, ans=0.125 +2024-08-29 14:22:21,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=246581.33333333334, ans=0.0 +2024-08-29 14:22:26,559 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.64 vs. limit=22.5 +2024-08-29 14:22:28,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=246581.33333333334, ans=0.125 +2024-08-29 14:22:30,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=246634.66666666666, ans=0.05 +2024-08-29 14:22:33,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=246634.66666666666, ans=0.125 +2024-08-29 14:22:48,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=246634.66666666666, ans=0.2 +2024-08-29 14:22:49,898 INFO [train.py:1114] (3/4) Epoch 19, batch 1450, loss[loss=0.2054, simple_loss=0.2754, pruned_loss=0.04967, ctc_loss=0.0899, over 19675.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2594, pruned_loss=0.0395, ctc_loss=0.07369, over 3863411.14 frames. ], batch size: 63, lr: 7.87e-03, grad_scale: 32.0 +2024-08-29 14:22:58,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=246741.33333333334, ans=0.2 +2024-08-29 14:23:27,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=246794.66666666666, ans=0.2 +2024-08-29 14:23:28,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=246794.66666666666, ans=0.125 +2024-08-29 14:24:01,767 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.108e+02 1.408e+02 1.590e+02 1.931e+02 3.612e+02, threshold=3.180e+02, percent-clipped=1.0 +2024-08-29 14:24:08,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=246901.33333333334, ans=0.035 +2024-08-29 14:24:09,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=246901.33333333334, ans=0.07 +2024-08-29 14:24:17,517 INFO [train.py:1114] (3/4) Epoch 19, batch 1500, loss[loss=0.207, simple_loss=0.2872, pruned_loss=0.0465, ctc_loss=0.08447, over 19559.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2599, pruned_loss=0.0396, ctc_loss=0.074, over 3862802.71 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 32.0 +2024-08-29 14:24:19,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=246954.66666666666, ans=0.125 +2024-08-29 14:24:20,049 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.82 vs. limit=15.0 +2024-08-29 14:24:27,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247008.0, ans=0.1 +2024-08-29 14:26:23,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=247168.0, ans=0.125 +2024-08-29 14:26:27,027 INFO [train.py:1114] (3/4) Epoch 19, batch 1550, loss[loss=0.2154, simple_loss=0.2927, pruned_loss=0.05042, ctc_loss=0.09316, over 19587.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2601, pruned_loss=0.03971, ctc_loss=0.07416, over 3847731.08 frames. ], batch size: 60, lr: 7.86e-03, grad_scale: 32.0 +2024-08-29 14:26:28,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=247221.33333333334, ans=0.0 +2024-08-29 14:26:33,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=247221.33333333334, ans=0.125 +2024-08-29 14:26:40,033 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.63 vs. limit=10.0 +2024-08-29 14:27:20,313 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.192e+02 1.466e+02 1.727e+02 2.253e+02 4.003e+02, threshold=3.453e+02, percent-clipped=2.0 +2024-08-29 14:27:20,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=247381.33333333334, ans=0.125 +2024-08-29 14:29:40,641 INFO [train.py:1114] (3/4) Epoch 19, batch 1600, loss[loss=0.1846, simple_loss=0.267, pruned_loss=0.03745, ctc_loss=0.06812, over 19839.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.26, pruned_loss=0.03981, ctc_loss=0.07431, over 3836938.66 frames. ], batch size: 57, lr: 7.85e-03, grad_scale: 32.0 +2024-08-29 14:29:41,998 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.65 vs. limit=15.0 +2024-08-29 14:29:42,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=247488.0, ans=0.0 +2024-08-29 14:29:57,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=247541.33333333334, ans=0.07 +2024-08-29 14:30:39,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247541.33333333334, ans=0.1 +2024-08-29 14:31:36,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=247594.66666666666, ans=0.125 +2024-08-29 14:31:49,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=247648.0, ans=0.0 +2024-08-29 14:31:53,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=247648.0, ans=0.0 +2024-08-29 14:31:56,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=247701.33333333334, ans=0.125 +2024-08-29 14:32:00,711 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:32:06,879 INFO [train.py:1114] (3/4) Epoch 19, batch 1650, loss[loss=0.1829, simple_loss=0.2692, pruned_loss=0.03517, ctc_loss=0.06582, over 19648.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2595, pruned_loss=0.03982, ctc_loss=0.07437, over 3833338.69 frames. ], batch size: 59, lr: 7.85e-03, grad_scale: 32.0 +2024-08-29 14:32:21,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=247808.0, ans=0.125 +2024-08-29 14:32:37,725 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.670e+02 2.010e+02 2.374e+02 4.027e+02, threshold=4.020e+02, percent-clipped=3.0 +2024-08-29 14:32:37,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=247914.66666666666, ans=0.035 +2024-08-29 14:32:53,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=247968.0, ans=0.0 +2024-08-29 14:32:56,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=248021.33333333334, ans=0.025 +2024-08-29 14:32:56,788 INFO [train.py:1114] (3/4) Epoch 19, batch 1700, loss[loss=0.1522, simple_loss=0.2271, pruned_loss=0.02786, ctc_loss=0.05382, over 19679.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2593, pruned_loss=0.03938, ctc_loss=0.07365, over 3847403.35 frames. ], batch size: 46, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 14:33:15,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=248128.0, ans=0.125 +2024-08-29 14:33:16,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=248128.0, ans=0.0 +2024-08-29 14:33:20,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=248128.0, ans=0.125 +2024-08-29 14:33:20,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=248128.0, ans=0.125 +2024-08-29 14:33:21,287 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.13 vs. limit=15.0 +2024-08-29 14:33:25,454 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.01 vs. limit=15.0 +2024-08-29 14:33:52,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248181.33333333334, ans=0.1 +2024-08-29 14:33:55,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.55 vs. limit=15.0 +2024-08-29 14:33:58,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=248234.66666666666, ans=0.0 +2024-08-29 14:34:04,336 INFO [train.py:1114] (3/4) Epoch 19, batch 1750, loss[loss=0.1706, simple_loss=0.2407, pruned_loss=0.03673, ctc_loss=0.06773, over 19628.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.259, pruned_loss=0.03935, ctc_loss=0.07361, over 3851796.27 frames. ], batch size: 45, lr: 7.84e-03, grad_scale: 32.0 +2024-08-29 14:34:42,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248341.33333333334, ans=0.1 +2024-08-29 14:34:51,978 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.56 vs. limit=15.0 +2024-08-29 14:34:54,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=248394.66666666666, ans=0.2 +2024-08-29 14:34:59,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=248448.0, ans=0.125 +2024-08-29 14:35:02,761 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.489e+02 1.824e+02 2.243e+02 3.708e+02, threshold=3.648e+02, percent-clipped=0.0 +2024-08-29 14:35:05,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=248448.0, ans=0.125 +2024-08-29 14:35:08,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=248501.33333333334, ans=0.125 +2024-08-29 14:35:11,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=248501.33333333334, ans=0.125 +2024-08-29 14:35:17,401 INFO [train.py:1114] (3/4) Epoch 19, batch 1800, loss[loss=0.1921, simple_loss=0.2741, pruned_loss=0.04066, ctc_loss=0.07166, over 19609.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2596, pruned_loss=0.03949, ctc_loss=0.07371, over 3853062.75 frames. ], batch size: 55, lr: 7.84e-03, grad_scale: 16.0 +2024-08-29 14:35:18,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=248554.66666666666, ans=0.0 +2024-08-29 14:35:23,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248554.66666666666, ans=0.1 +2024-08-29 14:35:41,662 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.57 vs. limit=15.0 +2024-08-29 14:35:50,510 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.83 vs. limit=12.0 +2024-08-29 14:36:01,483 INFO [train.py:1114] (3/4) Epoch 19, batch 1850, loss[loss=0.2066, simple_loss=0.2821, pruned_loss=0.04733, ctc_loss=0.09133, over 19589.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2592, pruned_loss=0.03947, ctc_loss=0.07364, over 3856845.91 frames. ], batch size: 57, lr: 7.83e-03, grad_scale: 16.0 +2024-08-29 14:37:45,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=248874.66666666666, ans=0.0 +2024-08-29 14:38:09,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=248874.66666666666, ans=0.0 +2024-08-29 14:38:15,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=248928.0, ans=0.07 +2024-08-29 14:38:17,669 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.56 vs. limit=6.0 +2024-08-29 14:40:00,459 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.98 vs. limit=15.0 +2024-08-29 14:40:02,619 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.549e+02 1.911e+02 2.362e+02 1.156e+03, threshold=3.822e+02, percent-clipped=7.0 +2024-08-29 14:40:10,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249034.66666666666, ans=0.1 +2024-08-29 14:40:18,263 INFO [train.py:1114] (3/4) Epoch 19, batch 1900, loss[loss=0.1722, simple_loss=0.25, pruned_loss=0.03384, ctc_loss=0.06689, over 19656.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2598, pruned_loss=0.03973, ctc_loss=0.07401, over 3862137.32 frames. ], batch size: 59, lr: 7.83e-03, grad_scale: 16.0 +2024-08-29 14:40:34,350 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.73 vs. limit=15.0 +2024-08-29 14:40:40,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=249194.66666666666, ans=0.125 +2024-08-29 14:41:02,129 INFO [train.py:1114] (3/4) Epoch 19, batch 1950, loss[loss=0.1649, simple_loss=0.236, pruned_loss=0.03397, ctc_loss=0.06468, over 19588.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2605, pruned_loss=0.0399, ctc_loss=0.07439, over 3871034.46 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 16.0 +2024-08-29 14:41:18,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=249461.33333333334, ans=0.125 +2024-08-29 14:41:19,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=249461.33333333334, ans=0.0 +2024-08-29 14:41:20,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=249461.33333333334, ans=0.0 +2024-08-29 14:41:31,728 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.363e+02 1.491e+02 1.694e+02 3.301e+02, threshold=2.983e+02, percent-clipped=0.0 +2024-08-29 14:41:32,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=249514.66666666666, ans=0.2 +2024-08-29 14:41:46,943 INFO [train.py:1114] (3/4) Epoch 19, batch 2000, loss[loss=0.1535, simple_loss=0.2239, pruned_loss=0.03097, ctc_loss=0.053, over 19668.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2611, pruned_loss=0.04026, ctc_loss=0.07509, over 3856443.69 frames. ], batch size: 45, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 14:41:58,788 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.66 vs. limit=15.0 +2024-08-29 14:42:08,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=249728.0, ans=0.125 +2024-08-29 14:42:16,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=249781.33333333334, ans=0.125 +2024-08-29 14:42:17,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=249781.33333333334, ans=0.125 +2024-08-29 14:42:20,536 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 14:42:23,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=249834.66666666666, ans=0.0 +2024-08-29 14:42:27,839 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.32 vs. limit=15.0 +2024-08-29 14:42:31,050 INFO [train.py:1114] (3/4) Epoch 19, batch 2050, loss[loss=0.174, simple_loss=0.2444, pruned_loss=0.03806, ctc_loss=0.06871, over 19732.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2604, pruned_loss=0.04029, ctc_loss=0.07512, over 3853661.78 frames. ], batch size: 47, lr: 7.82e-03, grad_scale: 32.0 +2024-08-29 14:42:33,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=249888.0, ans=0.125 +2024-08-29 14:42:33,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=249888.0, ans=0.1 +2024-08-29 14:42:41,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=249941.33333333334, ans=0.125 +2024-08-29 14:42:42,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=249941.33333333334, ans=0.125 +2024-08-29 14:43:00,935 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.056e+02 1.385e+02 1.662e+02 2.291e+02 4.175e+02, threshold=3.324e+02, percent-clipped=7.0 +2024-08-29 14:44:08,822 INFO [train.py:1114] (3/4) Epoch 19, batch 2100, loss[loss=0.1993, simple_loss=0.27, pruned_loss=0.04698, ctc_loss=0.08659, over 19771.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2597, pruned_loss=0.03974, ctc_loss=0.07414, over 3860442.32 frames. ], batch size: 54, lr: 7.81e-03, grad_scale: 32.0 +2024-08-29 14:45:12,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=250154.66666666666, ans=0.125 +2024-08-29 14:45:17,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=250208.0, ans=0.125 +2024-08-29 14:45:20,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=250208.0, ans=0.0 +2024-08-29 14:45:30,519 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.61 vs. limit=10.0 +2024-08-29 14:45:32,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=250314.66666666666, ans=0.0 +2024-08-29 14:45:44,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=250368.0, ans=0.125 +2024-08-29 14:45:47,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=250368.0, ans=0.0 +2024-08-29 14:45:49,956 INFO [train.py:1114] (3/4) Epoch 19, batch 2150, loss[loss=0.1762, simple_loss=0.2515, pruned_loss=0.03748, ctc_loss=0.06491, over 19850.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2591, pruned_loss=0.03964, ctc_loss=0.07374, over 3871247.27 frames. ], batch size: 52, lr: 7.81e-03, grad_scale: 32.0 +2024-08-29 14:45:55,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=250421.33333333334, ans=0.0 +2024-08-29 14:46:01,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=250474.66666666666, ans=0.125 +2024-08-29 14:46:19,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=250581.33333333334, ans=0.0 +2024-08-29 14:46:20,186 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.504e+02 1.955e+02 2.570e+02 4.900e+02, threshold=3.910e+02, percent-clipped=8.0 +2024-08-29 14:46:23,285 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=15.0 +2024-08-29 14:46:35,226 INFO [train.py:1114] (3/4) Epoch 19, batch 2200, loss[loss=0.1828, simple_loss=0.2592, pruned_loss=0.03863, ctc_loss=0.073, over 19589.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2588, pruned_loss=0.0394, ctc_loss=0.07321, over 3869966.80 frames. ], batch size: 57, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 14:46:35,666 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.01 vs. limit=22.5 +2024-08-29 14:46:54,550 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.75 vs. limit=15.0 +2024-08-29 14:46:57,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=250794.66666666666, ans=0.025 +2024-08-29 14:47:35,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=250794.66666666666, ans=0.0 +2024-08-29 14:47:41,420 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.90 vs. limit=15.0 +2024-08-29 14:47:54,982 INFO [train.py:1114] (3/4) Epoch 19, batch 2250, loss[loss=0.2137, simple_loss=0.2903, pruned_loss=0.05007, ctc_loss=0.09238, over 19610.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2594, pruned_loss=0.03956, ctc_loss=0.07337, over 3869114.50 frames. ], batch size: 55, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 14:48:03,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=251008.0, ans=0.05 +2024-08-29 14:48:27,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251061.33333333334, ans=0.1 +2024-08-29 14:48:32,694 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.087e+02 1.465e+02 1.864e+02 2.416e+02 3.726e+02, threshold=3.728e+02, percent-clipped=0.0 +2024-08-29 14:48:34,945 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.86 vs. limit=15.0 +2024-08-29 14:48:47,063 INFO [train.py:1114] (3/4) Epoch 19, batch 2300, loss[loss=0.1753, simple_loss=0.2475, pruned_loss=0.03779, ctc_loss=0.06887, over 19483.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2584, pruned_loss=0.0395, ctc_loss=0.07336, over 3863046.08 frames. ], batch size: 49, lr: 7.80e-03, grad_scale: 32.0 +2024-08-29 14:48:49,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=251221.33333333334, ans=0.025 +2024-08-29 14:48:50,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=251221.33333333334, ans=0.125 +2024-08-29 14:48:56,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251274.66666666666, ans=0.1 +2024-08-29 14:49:19,750 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.71 vs. limit=15.0 +2024-08-29 14:49:30,457 INFO [train.py:1114] (3/4) Epoch 19, batch 2350, loss[loss=0.1969, simple_loss=0.2736, pruned_loss=0.04338, ctc_loss=0.08387, over 19682.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.259, pruned_loss=0.03965, ctc_loss=0.07377, over 3865447.04 frames. ], batch size: 63, lr: 7.79e-03, grad_scale: 32.0 +2024-08-29 14:49:34,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=251488.0, ans=0.1 +2024-08-29 14:49:45,172 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.72 vs. limit=10.0 +2024-08-29 14:49:49,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=251594.66666666666, ans=0.025 +2024-08-29 14:49:54,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=251594.66666666666, ans=0.0 +2024-08-29 14:49:55,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=251594.66666666666, ans=0.04949747468305833 +2024-08-29 14:49:59,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=251648.0, ans=0.025 +2024-08-29 14:50:00,298 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.463e+02 1.784e+02 2.534e+02 4.062e+02, threshold=3.568e+02, percent-clipped=2.0 +2024-08-29 14:50:18,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=251701.33333333334, ans=0.125 +2024-08-29 14:50:18,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=251701.33333333334, ans=0.125 +2024-08-29 14:50:24,247 INFO [train.py:1114] (3/4) Epoch 19, batch 2400, loss[loss=0.2079, simple_loss=0.2789, pruned_loss=0.04999, ctc_loss=0.09216, over 19361.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2613, pruned_loss=0.04057, ctc_loss=0.07526, over 3858986.01 frames. ], batch size: 71, lr: 7.79e-03, grad_scale: 32.0 +2024-08-29 14:50:25,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=251754.66666666666, ans=0.025 +2024-08-29 14:50:29,789 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.00 vs. limit=15.0 +2024-08-29 14:50:42,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=251861.33333333334, ans=0.125 +2024-08-29 14:50:46,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=251861.33333333334, ans=0.0 +2024-08-29 14:50:47,047 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.03 vs. limit=15.0 +2024-08-29 14:50:47,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=251861.33333333334, ans=0.125 +2024-08-29 14:51:26,018 INFO [train.py:1114] (3/4) Epoch 19, batch 2450, loss[loss=0.2479, simple_loss=0.2919, pruned_loss=0.07212, ctc_loss=0.1489, over 12773.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2647, pruned_loss=0.0428, ctc_loss=0.07966, over 3730665.26 frames. ], batch size: 141, lr: 7.78e-03, grad_scale: 32.0 +2024-08-29 14:51:35,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=252074.66666666666, ans=0.0 +2024-08-29 14:51:36,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=252074.66666666666, ans=0.125 +2024-08-29 14:51:39,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=252074.66666666666, ans=0.125 +2024-08-29 14:51:50,653 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.75 vs. limit=15.0 +2024-08-29 14:51:56,009 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.227e+02 1.531e+02 1.709e+02 1.904e+02 2.805e+02, threshold=3.418e+02, percent-clipped=0.0 +2024-08-29 14:53:10,846 INFO [train.py:1114] (3/4) Epoch 20, batch 0, loss[loss=0.1697, simple_loss=0.2475, pruned_loss=0.03315, ctc_loss=0.06376, over 19388.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2475, pruned_loss=0.03315, ctc_loss=0.06376, over 19388.00 frames. ], batch size: 48, lr: 7.58e-03, grad_scale: 32.0 +2024-08-29 14:53:10,847 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-29 14:57:04,662 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.9779, 3.6963, 3.5117, 3.5499], device='cuda:3') +2024-08-29 14:59:30,905 INFO [train.py:1146] (3/4) Epoch 20, validation: loss=0.1707, simple_loss=0.2632, pruned_loss=0.02916, ctc_loss=0.04979, over 944034.00 frames. +2024-08-29 14:59:30,906 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 12709MB +2024-08-29 14:59:42,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=252288.0, ans=0.025 +2024-08-29 15:00:07,406 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.33 vs. limit=22.5 +2024-08-29 15:00:13,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=252448.0, ans=0.05 +2024-08-29 15:00:18,045 INFO [train.py:1114] (3/4) Epoch 20, batch 50, loss[loss=0.1659, simple_loss=0.2416, pruned_loss=0.03298, ctc_loss=0.06071, over 19735.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2606, pruned_loss=0.03933, ctc_loss=0.07496, over 843375.14 frames. ], batch size: 47, lr: 7.58e-03, grad_scale: 32.0 +2024-08-29 15:00:36,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=252501.33333333334, ans=15.0 +2024-08-29 15:01:06,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=252661.33333333334, ans=0.0 +2024-08-29 15:01:08,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=252714.66666666666, ans=0.07 +2024-08-29 15:01:11,669 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.167e+02 1.410e+02 1.638e+02 1.971e+02 2.993e+02, threshold=3.276e+02, percent-clipped=0.0 +2024-08-29 15:01:17,272 INFO [train.py:1114] (3/4) Epoch 20, batch 100, loss[loss=0.1658, simple_loss=0.246, pruned_loss=0.03016, ctc_loss=0.06302, over 19726.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2625, pruned_loss=0.03988, ctc_loss=0.07522, over 1497870.57 frames. ], batch size: 51, lr: 7.57e-03, grad_scale: 32.0 +2024-08-29 15:01:17,784 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.46 vs. limit=22.5 +2024-08-29 15:01:18,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=252768.0, ans=0.0 +2024-08-29 15:01:35,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=252821.33333333334, ans=0.125 +2024-08-29 15:01:37,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=252874.66666666666, ans=0.125 +2024-08-29 15:01:40,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=252874.66666666666, ans=0.2 +2024-08-29 15:01:54,544 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=252928.0, ans=0.125 +2024-08-29 15:02:02,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=252981.33333333334, ans=0.2 +2024-08-29 15:02:05,283 INFO [train.py:1114] (3/4) Epoch 20, batch 150, loss[loss=0.1598, simple_loss=0.2289, pruned_loss=0.03244, ctc_loss=0.06457, over 19717.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2601, pruned_loss=0.03913, ctc_loss=0.07392, over 2027723.87 frames. ], batch size: 47, lr: 7.57e-03, grad_scale: 32.0 +2024-08-29 15:02:06,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=253034.66666666666, ans=0.2 +2024-08-29 15:02:07,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=253034.66666666666, ans=0.025 +2024-08-29 15:02:08,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=253034.66666666666, ans=0.0 +2024-08-29 15:02:24,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=253088.0, ans=0.1 +2024-08-29 15:02:34,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=253194.66666666666, ans=0.09899494936611666 +2024-08-29 15:02:43,675 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.18 vs. limit=15.0 +2024-08-29 15:02:47,826 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.088e+02 1.385e+02 1.535e+02 1.856e+02 3.405e+02, threshold=3.069e+02, percent-clipped=1.0 +2024-08-29 15:03:24,278 INFO [train.py:1114] (3/4) Epoch 20, batch 200, loss[loss=0.1906, simple_loss=0.2657, pruned_loss=0.0421, ctc_loss=0.07848, over 18525.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2589, pruned_loss=0.03887, ctc_loss=0.07311, over 2435784.70 frames. ], batch size: 85, lr: 7.56e-03, grad_scale: 32.0 +2024-08-29 15:03:33,047 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.59 vs. limit=22.5 +2024-08-29 15:03:53,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=253461.33333333334, ans=0.125 +2024-08-29 15:03:54,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=253461.33333333334, ans=0.025 +2024-08-29 15:03:54,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=253461.33333333334, ans=0.035 +2024-08-29 15:04:02,535 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.51 vs. limit=15.0 +2024-08-29 15:04:04,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253514.66666666666, ans=0.1 +2024-08-29 15:04:14,360 INFO [train.py:1114] (3/4) Epoch 20, batch 250, loss[loss=0.1965, simple_loss=0.276, pruned_loss=0.04251, ctc_loss=0.08004, over 19355.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2581, pruned_loss=0.0384, ctc_loss=0.07211, over 2756034.48 frames. ], batch size: 67, lr: 7.56e-03, grad_scale: 32.0 +2024-08-29 15:04:15,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=253568.0, ans=0.125 +2024-08-29 15:04:33,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=253674.66666666666, ans=0.025 +2024-08-29 15:04:35,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=253674.66666666666, ans=0.2 +2024-08-29 15:04:37,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=253674.66666666666, ans=0.125 +2024-08-29 15:06:03,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=253728.0, ans=0.125 +2024-08-29 15:06:14,180 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.53 vs. limit=10.0 +2024-08-29 15:06:16,403 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.202e+02 1.436e+02 1.637e+02 2.276e+02 3.998e+02, threshold=3.274e+02, percent-clipped=8.0 +2024-08-29 15:06:21,877 INFO [train.py:1114] (3/4) Epoch 20, batch 300, loss[loss=0.2004, simple_loss=0.2787, pruned_loss=0.04395, ctc_loss=0.08529, over 19518.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2581, pruned_loss=0.03837, ctc_loss=0.07208, over 3000612.69 frames. ], batch size: 61, lr: 7.56e-03, grad_scale: 32.0 +2024-08-29 15:06:35,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=253888.0, ans=0.0 +2024-08-29 15:07:35,031 INFO [train.py:1114] (3/4) Epoch 20, batch 350, loss[loss=0.1667, simple_loss=0.2341, pruned_loss=0.03665, ctc_loss=0.0652, over 19756.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2587, pruned_loss=0.03866, ctc_loss=0.07258, over 3189836.52 frames. ], batch size: 48, lr: 7.55e-03, grad_scale: 32.0 +2024-08-29 15:07:35,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254101.33333333334, ans=0.1 +2024-08-29 15:07:40,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=254101.33333333334, ans=0.0 +2024-08-29 15:07:46,826 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.98 vs. limit=22.5 +2024-08-29 15:07:56,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=254208.0, ans=0.0 +2024-08-29 15:08:34,365 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.80 vs. limit=10.0 +2024-08-29 15:08:35,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=254261.33333333334, ans=0.025 +2024-08-29 15:09:24,751 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.39 vs. limit=15.0 +2024-08-29 15:09:27,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=254261.33333333334, ans=0.125 +2024-08-29 15:09:32,440 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.477e+02 1.781e+02 2.345e+02 4.390e+02, threshold=3.562e+02, percent-clipped=4.0 +2024-08-29 15:09:40,139 INFO [train.py:1114] (3/4) Epoch 20, batch 400, loss[loss=0.168, simple_loss=0.2499, pruned_loss=0.03107, ctc_loss=0.06013, over 19496.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2582, pruned_loss=0.03838, ctc_loss=0.07205, over 3341479.33 frames. ], batch size: 54, lr: 7.55e-03, grad_scale: 32.0 +2024-08-29 15:09:52,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254421.33333333334, ans=0.1 +2024-08-29 15:10:31,162 INFO [train.py:1114] (3/4) Epoch 20, batch 450, loss[loss=0.1833, simple_loss=0.2667, pruned_loss=0.03566, ctc_loss=0.07172, over 19610.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2581, pruned_loss=0.03846, ctc_loss=0.07216, over 3448990.92 frames. ], batch size: 55, lr: 7.55e-03, grad_scale: 16.0 +2024-08-29 15:10:36,307 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.93 vs. limit=10.0 +2024-08-29 15:10:39,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254688.0, ans=0.1 +2024-08-29 15:10:44,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=254688.0, ans=0.125 +2024-08-29 15:10:47,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=254688.0, ans=0.125 +2024-08-29 15:11:02,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=254794.66666666666, ans=0.025 +2024-08-29 15:11:12,406 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.415e+02 1.633e+02 1.920e+02 3.508e+02, threshold=3.267e+02, percent-clipped=0.0 +2024-08-29 15:11:16,893 INFO [train.py:1114] (3/4) Epoch 20, batch 500, loss[loss=0.1984, simple_loss=0.2844, pruned_loss=0.04077, ctc_loss=0.07714, over 19642.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2571, pruned_loss=0.03812, ctc_loss=0.07156, over 3545247.99 frames. ], batch size: 63, lr: 7.54e-03, grad_scale: 16.0 +2024-08-29 15:11:25,591 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.79 vs. limit=6.0 +2024-08-29 15:11:44,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=255008.0, ans=0.125 +2024-08-29 15:12:06,331 INFO [train.py:1114] (3/4) Epoch 20, batch 550, loss[loss=0.2059, simple_loss=0.2801, pruned_loss=0.0487, ctc_loss=0.08586, over 19211.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2571, pruned_loss=0.03831, ctc_loss=0.0719, over 3606801.94 frames. ], batch size: 71, lr: 7.54e-03, grad_scale: 16.0 +2024-08-29 15:12:12,384 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=3.69 vs. limit=15.0 +2024-08-29 15:12:14,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=255168.0, ans=0.0 +2024-08-29 15:13:45,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=255328.0, ans=0.125 +2024-08-29 15:13:51,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=255328.0, ans=0.0 +2024-08-29 15:14:00,948 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.409e+02 1.685e+02 2.053e+02 3.324e+02, threshold=3.369e+02, percent-clipped=1.0 +2024-08-29 15:14:18,264 INFO [train.py:1114] (3/4) Epoch 20, batch 600, loss[loss=0.1995, simple_loss=0.2765, pruned_loss=0.04408, ctc_loss=0.08575, over 19330.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2582, pruned_loss=0.03879, ctc_loss=0.0728, over 3663846.54 frames. ], batch size: 67, lr: 7.53e-03, grad_scale: 16.0 +2024-08-29 15:14:38,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=255541.33333333334, ans=0.0 +2024-08-29 15:14:38,417 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.50 vs. limit=15.0 +2024-08-29 15:14:41,733 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:14:48,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=255594.66666666666, ans=0.125 +2024-08-29 15:14:55,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=255594.66666666666, ans=0.125 +2024-08-29 15:14:56,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=255648.0, ans=0.0 +2024-08-29 15:14:58,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=255648.0, ans=0.025 +2024-08-29 15:15:06,089 INFO [train.py:1114] (3/4) Epoch 20, batch 650, loss[loss=0.1797, simple_loss=0.2625, pruned_loss=0.03496, ctc_loss=0.06774, over 19774.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2578, pruned_loss=0.03861, ctc_loss=0.07223, over 3714515.65 frames. ], batch size: 54, lr: 7.53e-03, grad_scale: 16.0 +2024-08-29 15:15:35,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=255861.33333333334, ans=0.125 +2024-08-29 15:15:36,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=255861.33333333334, ans=0.125 +2024-08-29 15:15:41,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=255861.33333333334, ans=10.0 +2024-08-29 15:15:49,555 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.451e+02 1.804e+02 2.620e+02 6.000e+02, threshold=3.609e+02, percent-clipped=12.0 +2024-08-29 15:15:54,173 INFO [train.py:1114] (3/4) Epoch 20, batch 700, loss[loss=0.1733, simple_loss=0.2479, pruned_loss=0.03502, ctc_loss=0.07167, over 19716.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2584, pruned_loss=0.03871, ctc_loss=0.07251, over 3746641.94 frames. ], batch size: 51, lr: 7.53e-03, grad_scale: 16.0 +2024-08-29 15:16:07,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=256021.33333333334, ans=0.125 +2024-08-29 15:16:14,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=256074.66666666666, ans=0.05 +2024-08-29 15:16:25,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=256128.0, ans=0.025 +2024-08-29 15:16:27,052 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.61 vs. limit=15.0 +2024-08-29 15:16:31,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=256128.0, ans=0.2 +2024-08-29 15:16:44,255 INFO [train.py:1114] (3/4) Epoch 20, batch 750, loss[loss=0.1876, simple_loss=0.2699, pruned_loss=0.03754, ctc_loss=0.07572, over 19493.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2584, pruned_loss=0.03881, ctc_loss=0.07256, over 3772097.76 frames. ], batch size: 54, lr: 7.52e-03, grad_scale: 16.0 +2024-08-29 15:16:45,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=256234.66666666666, ans=0.0 +2024-08-29 15:16:45,593 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.82 vs. limit=15.0 +2024-08-29 15:16:51,246 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.43 vs. limit=6.0 +2024-08-29 15:16:52,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=256288.0, ans=0.125 +2024-08-29 15:16:57,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=256288.0, ans=10.0 +2024-08-29 15:17:07,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=256341.33333333334, ans=0.2 +2024-08-29 15:17:12,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=256341.33333333334, ans=0.05 +2024-08-29 15:17:29,511 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.446e+02 1.840e+02 2.370e+02 3.601e+02, threshold=3.680e+02, percent-clipped=0.0 +2024-08-29 15:17:29,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=256448.0, ans=0.0 +2024-08-29 15:17:30,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=256448.0, ans=0.0 +2024-08-29 15:17:32,654 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.91 vs. limit=22.5 +2024-08-29 15:17:34,164 INFO [train.py:1114] (3/4) Epoch 20, batch 800, loss[loss=0.1817, simple_loss=0.2539, pruned_loss=0.04016, ctc_loss=0.07271, over 19413.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2583, pruned_loss=0.0388, ctc_loss=0.07231, over 3794382.28 frames. ], batch size: 48, lr: 7.52e-03, grad_scale: 32.0 +2024-08-29 15:17:34,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=256501.33333333334, ans=0.125 +2024-08-29 15:18:08,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256661.33333333334, ans=0.1 +2024-08-29 15:18:20,534 INFO [train.py:1114] (3/4) Epoch 20, batch 850, loss[loss=0.1809, simple_loss=0.2613, pruned_loss=0.03544, ctc_loss=0.07411, over 19650.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2576, pruned_loss=0.03865, ctc_loss=0.07226, over 3814968.13 frames. ], batch size: 59, lr: 7.51e-03, grad_scale: 32.0 +2024-08-29 15:18:23,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=256768.0, ans=0.125 +2024-08-29 15:18:28,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=256768.0, ans=0.5 +2024-08-29 15:18:31,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=256821.33333333334, ans=0.125 +2024-08-29 15:18:33,515 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.55 vs. limit=10.0 +2024-08-29 15:18:43,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=256874.66666666666, ans=0.0 +2024-08-29 15:19:04,724 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.094e+02 1.415e+02 1.606e+02 2.010e+02 3.804e+02, threshold=3.213e+02, percent-clipped=1.0 +2024-08-29 15:19:05,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=256981.33333333334, ans=0.125 +2024-08-29 15:19:09,477 INFO [train.py:1114] (3/4) Epoch 20, batch 900, loss[loss=0.1686, simple_loss=0.2409, pruned_loss=0.03501, ctc_loss=0.06566, over 19400.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2581, pruned_loss=0.03906, ctc_loss=0.07306, over 3818685.56 frames. ], batch size: 48, lr: 7.51e-03, grad_scale: 32.0 +2024-08-29 15:19:13,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=257034.66666666666, ans=0.125 +2024-08-29 15:19:22,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=257088.0, ans=0.125 +2024-08-29 15:19:48,296 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.17 vs. limit=10.0 +2024-08-29 15:19:59,862 INFO [train.py:1114] (3/4) Epoch 20, batch 950, loss[loss=0.1653, simple_loss=0.2462, pruned_loss=0.0306, ctc_loss=0.05806, over 19498.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2587, pruned_loss=0.03928, ctc_loss=0.07341, over 3819873.11 frames. ], batch size: 49, lr: 7.51e-03, grad_scale: 32.0 +2024-08-29 15:20:02,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=257301.33333333334, ans=0.1 +2024-08-29 15:20:15,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=257354.66666666666, ans=0.2 +2024-08-29 15:20:17,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=257354.66666666666, ans=0.125 +2024-08-29 15:20:33,195 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.83 vs. limit=12.0 +2024-08-29 15:20:33,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=257461.33333333334, ans=0.125 +2024-08-29 15:20:33,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=257461.33333333334, ans=0.0 +2024-08-29 15:20:43,843 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.399e+02 1.695e+02 2.094e+02 3.389e+02, threshold=3.390e+02, percent-clipped=1.0 +2024-08-29 15:20:46,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=257514.66666666666, ans=0.125 +2024-08-29 15:20:48,472 INFO [train.py:1114] (3/4) Epoch 20, batch 1000, loss[loss=0.1711, simple_loss=0.2445, pruned_loss=0.03548, ctc_loss=0.06691, over 19858.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2594, pruned_loss=0.03955, ctc_loss=0.07401, over 3816153.61 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 32.0 +2024-08-29 15:21:00,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=257621.33333333334, ans=0.1 +2024-08-29 15:21:29,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=257781.33333333334, ans=0.125 +2024-08-29 15:21:31,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=257781.33333333334, ans=0.2 +2024-08-29 15:21:36,903 INFO [train.py:1114] (3/4) Epoch 20, batch 1050, loss[loss=0.1849, simple_loss=0.2702, pruned_loss=0.03535, ctc_loss=0.07241, over 19836.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2585, pruned_loss=0.03928, ctc_loss=0.07362, over 3821136.31 frames. ], batch size: 57, lr: 7.50e-03, grad_scale: 32.0 +2024-08-29 15:21:42,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=257834.66666666666, ans=0.125 +2024-08-29 15:21:53,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=257888.0, ans=0.125 +2024-08-29 15:22:10,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=257994.66666666666, ans=0.125 +2024-08-29 15:22:14,796 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.10 vs. limit=15.0 +2024-08-29 15:22:20,122 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.103e+02 1.405e+02 1.673e+02 2.019e+02 3.302e+02, threshold=3.347e+02, percent-clipped=0.0 +2024-08-29 15:22:24,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=258101.33333333334, ans=0.0 +2024-08-29 15:22:24,715 INFO [train.py:1114] (3/4) Epoch 20, batch 1100, loss[loss=0.1711, simple_loss=0.2565, pruned_loss=0.03108, ctc_loss=0.05858, over 19579.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2583, pruned_loss=0.03893, ctc_loss=0.07283, over 3828453.10 frames. ], batch size: 52, lr: 7.50e-03, grad_scale: 32.0 +2024-08-29 15:22:33,767 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.51 vs. limit=15.0 +2024-08-29 15:23:14,516 INFO [train.py:1114] (3/4) Epoch 20, batch 1150, loss[loss=0.1818, simple_loss=0.2609, pruned_loss=0.03749, ctc_loss=0.06909, over 19584.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2583, pruned_loss=0.03898, ctc_loss=0.07287, over 3827007.66 frames. ], batch size: 52, lr: 7.49e-03, grad_scale: 32.0 +2024-08-29 15:23:15,999 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.58 vs. limit=15.0 +2024-08-29 15:23:34,646 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.30 vs. limit=15.0 +2024-08-29 15:23:54,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=258581.33333333334, ans=0.0 +2024-08-29 15:23:58,294 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.386e+02 1.703e+02 2.133e+02 3.069e+02, threshold=3.407e+02, percent-clipped=0.0 +2024-08-29 15:24:02,918 INFO [train.py:1114] (3/4) Epoch 20, batch 1200, loss[loss=0.1923, simple_loss=0.2685, pruned_loss=0.04218, ctc_loss=0.07951, over 19843.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.259, pruned_loss=0.03905, ctc_loss=0.07308, over 3823359.18 frames. ], batch size: 57, lr: 7.49e-03, grad_scale: 32.0 +2024-08-29 15:24:19,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=258688.0, ans=0.125 +2024-08-29 15:24:26,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=258741.33333333334, ans=0.0 +2024-08-29 15:24:29,275 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.33 vs. limit=15.0 +2024-08-29 15:24:32,742 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.35 vs. limit=15.0 +2024-08-29 15:24:43,696 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.75 vs. limit=6.0 +2024-08-29 15:24:50,407 INFO [train.py:1114] (3/4) Epoch 20, batch 1250, loss[loss=0.2082, simple_loss=0.2785, pruned_loss=0.04956, ctc_loss=0.0971, over 19512.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2595, pruned_loss=0.0392, ctc_loss=0.07323, over 3841902.61 frames. ], batch size: 61, lr: 7.48e-03, grad_scale: 32.0 +2024-08-29 15:25:23,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=259061.33333333334, ans=0.125 +2024-08-29 15:25:24,060 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=9.73 vs. limit=12.0 +2024-08-29 15:25:35,744 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.412e+02 1.655e+02 2.039e+02 3.415e+02, threshold=3.310e+02, percent-clipped=2.0 +2024-08-29 15:25:37,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=259114.66666666666, ans=0.125 +2024-08-29 15:25:40,344 INFO [train.py:1114] (3/4) Epoch 20, batch 1300, loss[loss=0.2191, simple_loss=0.2881, pruned_loss=0.0539, ctc_loss=0.1058, over 18920.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2588, pruned_loss=0.03891, ctc_loss=0.07262, over 3845313.70 frames. ], batch size: 76, lr: 7.48e-03, grad_scale: 32.0 +2024-08-29 15:25:54,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=259221.33333333334, ans=0.125 +2024-08-29 15:25:58,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=259274.66666666666, ans=0.0 +2024-08-29 15:26:13,859 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:26:23,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=259381.33333333334, ans=0.0 +2024-08-29 15:26:24,359 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.42 vs. limit=10.0 +2024-08-29 15:26:26,568 INFO [train.py:1114] (3/4) Epoch 20, batch 1350, loss[loss=0.1689, simple_loss=0.2496, pruned_loss=0.03267, ctc_loss=0.05685, over 19766.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2588, pruned_loss=0.03871, ctc_loss=0.07217, over 3856715.66 frames. ], batch size: 54, lr: 7.48e-03, grad_scale: 32.0 +2024-08-29 15:27:06,654 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.03 vs. limit=15.0 +2024-08-29 15:27:14,080 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.50 vs. limit=15.0 +2024-08-29 15:27:28,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.15 vs. limit=15.0 +2024-08-29 15:27:29,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259541.33333333334, ans=0.1 +2024-08-29 15:27:30,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=259541.33333333334, ans=0.025 +2024-08-29 15:27:32,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=259594.66666666666, ans=0.0 +2024-08-29 15:27:33,796 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.94 vs. limit=10.0 +2024-08-29 15:27:34,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=259594.66666666666, ans=0.125 +2024-08-29 15:27:38,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=259594.66666666666, ans=0.125 +2024-08-29 15:27:46,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=259648.0, ans=0.2 +2024-08-29 15:27:47,059 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.431e+02 1.643e+02 2.121e+02 3.858e+02, threshold=3.286e+02, percent-clipped=2.0 +2024-08-29 15:27:51,739 INFO [train.py:1114] (3/4) Epoch 20, batch 1400, loss[loss=0.1656, simple_loss=0.2288, pruned_loss=0.03797, ctc_loss=0.06618, over 19657.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2586, pruned_loss=0.0387, ctc_loss=0.07223, over 3863644.08 frames. ], batch size: 46, lr: 7.47e-03, grad_scale: 32.0 +2024-08-29 15:27:52,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=259701.33333333334, ans=0.125 +2024-08-29 15:27:54,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=259701.33333333334, ans=0.07 +2024-08-29 15:28:04,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=259754.66666666666, ans=0.0 +2024-08-29 15:30:05,109 INFO [train.py:1114] (3/4) Epoch 20, batch 1450, loss[loss=0.2047, simple_loss=0.2845, pruned_loss=0.04576, ctc_loss=0.08335, over 19646.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2592, pruned_loss=0.03886, ctc_loss=0.07248, over 3862602.45 frames. ], batch size: 63, lr: 7.47e-03, grad_scale: 32.0 +2024-08-29 15:30:23,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=260021.33333333334, ans=0.125 +2024-08-29 15:30:24,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=260021.33333333334, ans=0.125 +2024-08-29 15:30:27,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=260074.66666666666, ans=0.125 +2024-08-29 15:30:33,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=260074.66666666666, ans=0.125 +2024-08-29 15:30:34,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=260128.0, ans=0.0 +2024-08-29 15:30:34,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=260128.0, ans=0.025 +2024-08-29 15:30:39,030 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-08-29 15:30:41,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=260128.0, ans=0.125 +2024-08-29 15:30:43,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=260181.33333333334, ans=0.1 +2024-08-29 15:30:44,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=260181.33333333334, ans=0.125 +2024-08-29 15:30:46,681 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.82 vs. limit=15.0 +2024-08-29 15:30:48,903 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.404e+02 1.559e+02 1.995e+02 3.603e+02, threshold=3.118e+02, percent-clipped=1.0 +2024-08-29 15:30:53,755 INFO [train.py:1114] (3/4) Epoch 20, batch 1500, loss[loss=0.1873, simple_loss=0.2708, pruned_loss=0.03729, ctc_loss=0.07282, over 19585.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2595, pruned_loss=0.03885, ctc_loss=0.07248, over 3862617.62 frames. ], batch size: 57, lr: 7.47e-03, grad_scale: 32.0 +2024-08-29 15:30:57,052 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:31:24,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=260394.66666666666, ans=0.125 +2024-08-29 15:31:41,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=260501.33333333334, ans=0.015 +2024-08-29 15:31:42,534 INFO [train.py:1114] (3/4) Epoch 20, batch 1550, loss[loss=0.2097, simple_loss=0.2778, pruned_loss=0.05195, ctc_loss=0.09411, over 19635.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2597, pruned_loss=0.0391, ctc_loss=0.07309, over 3846805.53 frames. ], batch size: 60, lr: 7.46e-03, grad_scale: 16.0 +2024-08-29 15:32:02,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=260608.0, ans=0.0 +2024-08-29 15:32:08,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260608.0, ans=0.1 +2024-08-29 15:32:25,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=260714.66666666666, ans=0.125 +2024-08-29 15:32:26,818 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.439e+02 1.834e+02 2.160e+02 3.604e+02, threshold=3.667e+02, percent-clipped=4.0 +2024-08-29 15:32:28,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=260714.66666666666, ans=0.2 +2024-08-29 15:32:30,536 INFO [train.py:1114] (3/4) Epoch 20, batch 1600, loss[loss=0.1807, simple_loss=0.2645, pruned_loss=0.03536, ctc_loss=0.06529, over 19854.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2594, pruned_loss=0.03896, ctc_loss=0.07277, over 3836004.15 frames. ], batch size: 57, lr: 7.46e-03, grad_scale: 32.0 +2024-08-29 15:32:31,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.72 vs. limit=12.0 +2024-08-29 15:32:35,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=260768.0, ans=0.0 +2024-08-29 15:33:15,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260981.33333333334, ans=0.1 +2024-08-29 15:33:20,596 INFO [train.py:1114] (3/4) Epoch 20, batch 1650, loss[loss=0.1961, simple_loss=0.2745, pruned_loss=0.04203, ctc_loss=0.08404, over 19670.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2588, pruned_loss=0.03882, ctc_loss=0.07266, over 3832121.53 frames. ], batch size: 59, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 15:33:20,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=261034.66666666666, ans=0.05 +2024-08-29 15:33:37,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=261088.0, ans=0.0 +2024-08-29 15:33:41,404 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.29 vs. limit=6.0 +2024-08-29 15:33:57,409 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.57 vs. limit=15.0 +2024-08-29 15:33:58,700 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:34:03,137 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.083e+02 1.529e+02 1.762e+02 2.426e+02 4.170e+02, threshold=3.524e+02, percent-clipped=3.0 +2024-08-29 15:34:03,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=261248.0, ans=0.0 +2024-08-29 15:34:06,780 INFO [train.py:1114] (3/4) Epoch 20, batch 1700, loss[loss=0.1617, simple_loss=0.2286, pruned_loss=0.03437, ctc_loss=0.06505, over 19682.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2585, pruned_loss=0.03844, ctc_loss=0.07206, over 3846224.64 frames. ], batch size: 46, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 15:34:10,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=261301.33333333334, ans=0.125 +2024-08-29 15:34:13,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=261301.33333333334, ans=0.1 +2024-08-29 15:34:14,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=261301.33333333334, ans=0.035 +2024-08-29 15:34:14,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=261301.33333333334, ans=0.0 +2024-08-29 15:34:25,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=261354.66666666666, ans=0.025 +2024-08-29 15:34:26,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=261408.0, ans=0.125 +2024-08-29 15:34:43,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=261514.66666666666, ans=0.125 +2024-08-29 15:34:48,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=261514.66666666666, ans=0.125 +2024-08-29 15:34:53,256 INFO [train.py:1114] (3/4) Epoch 20, batch 1750, loss[loss=0.1595, simple_loss=0.2316, pruned_loss=0.03239, ctc_loss=0.0562, over 19644.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2583, pruned_loss=0.03831, ctc_loss=0.07186, over 3851220.45 frames. ], batch size: 45, lr: 7.45e-03, grad_scale: 32.0 +2024-08-29 15:34:56,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=261568.0, ans=0.0 +2024-08-29 15:35:05,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=261621.33333333334, ans=0.125 +2024-08-29 15:35:19,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=261728.0, ans=0.0 +2024-08-29 15:35:28,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261781.33333333334, ans=0.1 +2024-08-29 15:35:33,445 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.094e+02 1.486e+02 1.910e+02 2.389e+02 3.898e+02, threshold=3.819e+02, percent-clipped=3.0 +2024-08-29 15:35:33,907 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.24 vs. limit=15.0 +2024-08-29 15:35:36,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=261834.66666666666, ans=0.0 +2024-08-29 15:35:36,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=261834.66666666666, ans=0.125 +2024-08-29 15:35:37,070 INFO [train.py:1114] (3/4) Epoch 20, batch 1800, loss[loss=0.1803, simple_loss=0.2593, pruned_loss=0.03687, ctc_loss=0.06877, over 19626.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.258, pruned_loss=0.03841, ctc_loss=0.0719, over 3852909.19 frames. ], batch size: 55, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 15:35:39,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=261834.66666666666, ans=0.125 +2024-08-29 15:35:42,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=261834.66666666666, ans=0.125 +2024-08-29 15:35:49,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=261888.0, ans=0.125 +2024-08-29 15:35:49,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=261888.0, ans=0.125 +2024-08-29 15:35:52,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=261888.0, ans=0.125 +2024-08-29 15:36:05,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=261994.66666666666, ans=0.125 +2024-08-29 15:36:14,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=262048.0, ans=0.0 +2024-08-29 15:36:21,928 INFO [train.py:1114] (3/4) Epoch 20, batch 1850, loss[loss=0.2019, simple_loss=0.2767, pruned_loss=0.0462, ctc_loss=0.08698, over 19584.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2578, pruned_loss=0.03834, ctc_loss=0.07159, over 3857665.79 frames. ], batch size: 57, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 15:36:23,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=262101.33333333334, ans=0.0 +2024-08-29 15:36:31,514 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:36:40,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=262208.0, ans=0.2 +2024-08-29 15:36:50,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=262261.3333333333, ans=0.125 +2024-08-29 15:36:59,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262314.6666666667, ans=0.1 +2024-08-29 15:37:03,246 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=11.31 vs. limit=12.0 +2024-08-29 15:37:03,773 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.463e+02 1.737e+02 2.223e+02 4.343e+02, threshold=3.475e+02, percent-clipped=3.0 +2024-08-29 15:37:07,294 INFO [train.py:1114] (3/4) Epoch 20, batch 1900, loss[loss=0.1702, simple_loss=0.257, pruned_loss=0.03023, ctc_loss=0.05709, over 19647.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.258, pruned_loss=0.03834, ctc_loss=0.07156, over 3862953.43 frames. ], batch size: 59, lr: 7.44e-03, grad_scale: 32.0 +2024-08-29 15:37:33,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=262421.3333333333, ans=0.07 +2024-08-29 15:38:33,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=262474.6666666667, ans=0.0 +2024-08-29 15:38:56,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=262581.3333333333, ans=0.125 +2024-08-29 15:39:00,174 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.97 vs. limit=12.0 +2024-08-29 15:39:01,408 INFO [train.py:1114] (3/4) Epoch 20, batch 1950, loss[loss=0.1748, simple_loss=0.2466, pruned_loss=0.03794, ctc_loss=0.06766, over 19603.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.259, pruned_loss=0.03855, ctc_loss=0.0719, over 3871290.78 frames. ], batch size: 52, lr: 7.43e-03, grad_scale: 32.0 +2024-08-29 15:39:06,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=262634.6666666667, ans=0.07 +2024-08-29 15:39:22,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=262741.3333333333, ans=0.125 +2024-08-29 15:39:29,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=262794.6666666667, ans=0.04949747468305833 +2024-08-29 15:39:34,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=262794.6666666667, ans=0.125 +2024-08-29 15:39:37,625 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.05 vs. limit=10.0 +2024-08-29 15:39:41,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=262848.0, ans=0.5 +2024-08-29 15:39:43,480 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.086e+02 1.359e+02 1.503e+02 2.197e+02 3.515e+02, threshold=3.007e+02, percent-clipped=1.0 +2024-08-29 15:39:44,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=262848.0, ans=0.0 +2024-08-29 15:39:47,121 INFO [train.py:1114] (3/4) Epoch 20, batch 2000, loss[loss=0.1577, simple_loss=0.229, pruned_loss=0.03166, ctc_loss=0.05756, over 19675.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2597, pruned_loss=0.03894, ctc_loss=0.07278, over 3854870.91 frames. ], batch size: 45, lr: 7.43e-03, grad_scale: 32.0 +2024-08-29 15:39:56,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=262954.6666666667, ans=0.0 +2024-08-29 15:40:20,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=263061.3333333333, ans=0.2 +2024-08-29 15:40:24,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263114.6666666667, ans=0.1 +2024-08-29 15:40:27,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=263114.6666666667, ans=0.0 +2024-08-29 15:40:31,095 INFO [train.py:1114] (3/4) Epoch 20, batch 2050, loss[loss=0.1545, simple_loss=0.2307, pruned_loss=0.02886, ctc_loss=0.05136, over 19699.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2587, pruned_loss=0.03898, ctc_loss=0.07287, over 3850390.55 frames. ], batch size: 47, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 15:41:02,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=263328.0, ans=0.125 +2024-08-29 15:41:10,988 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.438e+02 1.706e+02 2.328e+02 5.097e+02, threshold=3.413e+02, percent-clipped=11.0 +2024-08-29 15:41:14,536 INFO [train.py:1114] (3/4) Epoch 20, batch 2100, loss[loss=0.1759, simple_loss=0.2519, pruned_loss=0.03657, ctc_loss=0.067, over 19774.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2583, pruned_loss=0.03868, ctc_loss=0.07239, over 3858025.09 frames. ], batch size: 54, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 15:41:32,500 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.57 vs. limit=15.0 +2024-08-29 15:41:44,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=263594.6666666667, ans=0.125 +2024-08-29 15:41:51,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=263648.0, ans=0.125 +2024-08-29 15:41:52,112 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.05 vs. limit=15.0 +2024-08-29 15:41:55,751 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.72 vs. limit=15.0 +2024-08-29 15:41:56,501 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.47 vs. limit=15.0 +2024-08-29 15:41:57,782 INFO [train.py:1114] (3/4) Epoch 20, batch 2150, loss[loss=0.1764, simple_loss=0.2518, pruned_loss=0.03681, ctc_loss=0.06858, over 19861.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2574, pruned_loss=0.03843, ctc_loss=0.07197, over 3870347.82 frames. ], batch size: 52, lr: 7.42e-03, grad_scale: 32.0 +2024-08-29 15:41:59,093 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.91 vs. limit=15.0 +2024-08-29 15:42:10,384 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.46 vs. limit=15.0 +2024-08-29 15:42:26,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=263861.3333333333, ans=0.125 +2024-08-29 15:42:27,890 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.05 vs. limit=15.0 +2024-08-29 15:42:37,631 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.210e+02 1.475e+02 1.770e+02 2.541e+02 4.904e+02, threshold=3.539e+02, percent-clipped=6.0 +2024-08-29 15:42:40,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=263968.0, ans=0.125 +2024-08-29 15:42:41,080 INFO [train.py:1114] (3/4) Epoch 20, batch 2200, loss[loss=0.1861, simple_loss=0.2735, pruned_loss=0.03604, ctc_loss=0.06658, over 19581.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2572, pruned_loss=0.03833, ctc_loss=0.07174, over 3868929.77 frames. ], batch size: 57, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 15:42:42,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=263968.0, ans=0.07 +2024-08-29 15:42:42,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263968.0, ans=0.1 +2024-08-29 15:42:47,335 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:42:57,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=264021.3333333333, ans=0.125 +2024-08-29 15:42:57,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264074.6666666667, ans=0.1 +2024-08-29 15:43:22,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=264181.3333333333, ans=0.0 +2024-08-29 15:43:25,157 INFO [train.py:1114] (3/4) Epoch 20, batch 2250, loss[loss=0.1746, simple_loss=0.2538, pruned_loss=0.0343, ctc_loss=0.06678, over 19609.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2577, pruned_loss=0.03837, ctc_loss=0.07181, over 3867828.09 frames. ], batch size: 55, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 15:43:26,498 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.28 vs. limit=6.0 +2024-08-29 15:43:38,087 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-29 15:43:47,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=264341.3333333333, ans=0.04949747468305833 +2024-08-29 15:43:52,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=264394.6666666667, ans=0.0 +2024-08-29 15:43:58,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=264448.0, ans=0.2 +2024-08-29 15:44:01,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=264448.0, ans=0.125 +2024-08-29 15:44:04,424 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.490e+02 1.909e+02 2.542e+02 3.813e+02, threshold=3.818e+02, percent-clipped=1.0 +2024-08-29 15:44:07,873 INFO [train.py:1114] (3/4) Epoch 20, batch 2300, loss[loss=0.1722, simple_loss=0.2454, pruned_loss=0.03629, ctc_loss=0.06599, over 19514.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2572, pruned_loss=0.03872, ctc_loss=0.07223, over 3862372.82 frames. ], batch size: 49, lr: 7.41e-03, grad_scale: 32.0 +2024-08-29 15:44:08,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=264501.3333333333, ans=0.125 +2024-08-29 15:44:22,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=264554.6666666667, ans=0.0 +2024-08-29 15:44:25,398 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.07 vs. limit=15.0 +2024-08-29 15:44:44,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=264714.6666666667, ans=0.125 +2024-08-29 15:44:50,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264714.6666666667, ans=0.1 +2024-08-29 15:44:52,850 INFO [train.py:1114] (3/4) Epoch 20, batch 2350, loss[loss=0.2073, simple_loss=0.2785, pruned_loss=0.05066, ctc_loss=0.08669, over 19659.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2569, pruned_loss=0.03852, ctc_loss=0.07196, over 3865153.71 frames. ], batch size: 63, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 15:45:13,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=264874.6666666667, ans=0.125 +2024-08-29 15:45:20,038 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.87 vs. limit=15.0 +2024-08-29 15:45:24,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=264928.0, ans=0.125 +2024-08-29 15:45:29,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=264981.3333333333, ans=0.0 +2024-08-29 15:45:32,190 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.408e+02 1.642e+02 2.194e+02 4.028e+02, threshold=3.284e+02, percent-clipped=1.0 +2024-08-29 15:45:32,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=264981.3333333333, ans=0.125 +2024-08-29 15:45:35,646 INFO [train.py:1114] (3/4) Epoch 20, batch 2400, loss[loss=0.1969, simple_loss=0.2738, pruned_loss=0.04351, ctc_loss=0.0823, over 19338.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2591, pruned_loss=0.03908, ctc_loss=0.07297, over 3860083.88 frames. ], batch size: 71, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 15:45:42,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=265034.6666666667, ans=0.0 +2024-08-29 15:45:43,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=265088.0, ans=0.035 +2024-08-29 15:45:46,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=265088.0, ans=0.025 +2024-08-29 15:46:02,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=265194.6666666667, ans=0.05 +2024-08-29 15:46:03,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=265194.6666666667, ans=0.07 +2024-08-29 15:46:12,892 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=9.36 vs. limit=12.0 +2024-08-29 15:46:19,426 INFO [train.py:1114] (3/4) Epoch 20, batch 2450, loss[loss=0.2403, simple_loss=0.2864, pruned_loss=0.06988, ctc_loss=0.1359, over 13237.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2626, pruned_loss=0.04131, ctc_loss=0.07762, over 3732985.70 frames. ], batch size: 140, lr: 7.40e-03, grad_scale: 32.0 +2024-08-29 15:46:38,807 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.16 vs. limit=15.0 +2024-08-29 15:46:39,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=265408.0, ans=0.125 +2024-08-29 15:46:40,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=265408.0, ans=0.125 +2024-08-29 15:46:52,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=265461.3333333333, ans=0.0 +2024-08-29 15:48:24,415 INFO [train.py:1387] (3/4) Done! diff --git a/zipformer/pretrained/ctc/non_causal/exp/tensorboard/events.out.tfevents.1724582769.cdr2649.int.cedar.computecanada.ca.70.0 b/zipformer/pretrained/ctc/non_causal/exp/tensorboard/events.out.tfevents.1724582769.cdr2649.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..1a2baa3114714985f914ff88f6e0ca682098406e --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/tensorboard/events.out.tfevents.1724582769.cdr2649.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:96bc39d35154dd36de3e013a5a29e72ef9fd6cdf1008e3faa826fc58e398c9b7 +size 89265 diff --git a/zipformer/pretrained/ctc/non_causal/exp/tensorboard/events.out.tfevents.1724706850.cdr2652.int.cedar.computecanada.ca.70.0 b/zipformer/pretrained/ctc/non_causal/exp/tensorboard/events.out.tfevents.1724706850.cdr2652.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..ef188c0a22c193dc59f86d892e2f9f61c76118b3 --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/tensorboard/events.out.tfevents.1724706850.cdr2652.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7add4eba93d036ac7f5b9c027ebe266cffcbd7ca64eca863032211e07b2ac3b7 +size 476176 diff --git a/zipformer/pretrained/ctc/non_causal/exp/tensorboard/events.out.tfevents.1724921333.cdr2538.int.cedar.computecanada.ca.70.0 b/zipformer/pretrained/ctc/non_causal/exp/tensorboard/events.out.tfevents.1724921333.cdr2538.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..6a038e3f7ead7a2c3933e545ddbd81042b74c73b --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/tensorboard/events.out.tfevents.1724921333.cdr2538.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:298417508396514dd619f432f5bb13a7ced639ff93c989df5ae20fa2807beb13 +size 60574 diff --git a/zipformer/pretrained/ctc/non_causal/exp/tensorboard/events.out.tfevents.1724953620.cdr2563.int.cedar.computecanada.ca.70.0 b/zipformer/pretrained/ctc/non_causal/exp/tensorboard/events.out.tfevents.1724953620.cdr2563.int.cedar.computecanada.ca.70.0 new file mode 100644 index 0000000000000000000000000000000000000000..3b43fbfba7ecfc2e2a329cc61c72610feb6bca4b --- /dev/null +++ b/zipformer/pretrained/ctc/non_causal/exp/tensorboard/events.out.tfevents.1724953620.cdr2563.int.cedar.computecanada.ca.70.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7af9dca7f1f2d8181b709fdf2959c803f079ca80b2d38ad6834bdfa223decfcc +size 60574 diff --git a/zipformer/pretrained/non_ctc/causal/exp/best-train-loss.pt b/zipformer/pretrained/non_ctc/causal/exp/best-train-loss.pt new file mode 100644 index 0000000000000000000000000000000000000000..2b62b56487be8c3448558136bdd986b8d43ada46 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/best-train-loss.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:42a341c05d3aeae728cc848c0727642f61f8a1d681421d7868ddb27199188364 +size 1058859342 diff --git a/zipformer/pretrained/non_ctc/causal/exp/best-valid-loss.pt b/zipformer/pretrained/non_ctc/causal/exp/best-valid-loss.pt new file mode 100644 index 0000000000000000000000000000000000000000..2b62b56487be8c3448558136bdd986b8d43ada46 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/best-valid-loss.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:42a341c05d3aeae728cc848c0727642f61f8a1d681421d7868ddb27199188364 +size 1058859342 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-12000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-12000.pt new file mode 100644 index 0000000000000000000000000000000000000000..d135e95c4ec75b1853cc7332c920a518e19f5129 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-12000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:839ce11a0dd83afde261e80c502f60205608efa0179f354ae215d36277da7e29 +size 1058875766 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-16000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-16000.pt new file mode 100644 index 0000000000000000000000000000000000000000..8030c1514f92090c975f329c15994aaefb2cc6f7 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-16000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:671d841100dc016cb63329da4389584f9bd3357cfe5690abc7fb80f5704b2742 +size 1058875830 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-20000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-20000.pt new file mode 100644 index 0000000000000000000000000000000000000000..d8c6ff7060d912c4b0c3fd36d1948fe818b1307b --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-20000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c548b89fe078eb3ec78b9df0ba4223d59885b553c7e63b9a18f0e1187759c777 +size 1058875894 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-24000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-24000.pt new file mode 100644 index 0000000000000000000000000000000000000000..69ea0d80494125d505342490cdaa5e9f7cc61436 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-24000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1a5b5734e5f0b1942cc0648a749e5af2122264bca52c40c8fb704c12e54c5f0d +size 1058875958 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-28000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-28000.pt new file mode 100644 index 0000000000000000000000000000000000000000..d84b862605ee5c8e29b100d387679496e8438073 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-28000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4b83e9e125da263c7e8a05f935e1f5990dacb304773573fc8f1a7cd287bec899 +size 1058876022 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-32000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-32000.pt new file mode 100644 index 0000000000000000000000000000000000000000..0a14f1fffe301913338819429f6e2a2c19701229 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-32000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3cba9716c443aecac6f5c9f50944ea6c87d21e93b33273ecab2dc4e4796b7296 +size 1058876022 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-36000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-36000.pt new file mode 100644 index 0000000000000000000000000000000000000000..532fbaaf530e14efb24d6188a026c98fce0addcb --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-36000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d246dd7617ab9f12840f0cdeb9082554db62cc3d6746d6576a02076b81f07a9e +size 1058876086 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-4000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-4000.pt new file mode 100644 index 0000000000000000000000000000000000000000..70b966b76bbc865060a57f429ed9405609e3b19e --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-4000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ea003c6eb6b2b396e037705eebcf83f0e3dcc69411234e925a5b7ac2ac2e4ed4 +size 1058873537 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-40000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-40000.pt new file mode 100644 index 0000000000000000000000000000000000000000..061c4369fb0d8e3490e59173895273956afcc9ae --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-40000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:70433385a6929b464fb17714cb1c44c5d4bd31e83260ce95d7704551615760eb +size 1058876150 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-44000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-44000.pt new file mode 100644 index 0000000000000000000000000000000000000000..5cb8769da0b3c2acf56207b61f125d6a1e5404cd --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-44000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d2cf9ecfdc691c67f3c537855789e843208fd1f07b03f35c98b7e3fd57f08bb1 +size 1058876214 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-48000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-48000.pt new file mode 100644 index 0000000000000000000000000000000000000000..483a650f135564b96f25eab462c2f157a1f90656 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-48000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3678a97ac7cee03127748b3fdc8429dd4b1bd278c25644bc16856e02e4e284cd +size 1058876278 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-52000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-52000.pt new file mode 100644 index 0000000000000000000000000000000000000000..8d247399015581606d8b2a026a196b7709165925 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-52000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2e00e52ff73184bcd6a679c7f2809bc807065c8870e521ced86b678a350ed5c7 +size 1058876342 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-56000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-56000.pt new file mode 100644 index 0000000000000000000000000000000000000000..bc7f457f0289bf5d8efc2929fba8b724dde0892a --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-56000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0d3083e49a8614373c9b29e38083e686e38a8e87d6689484230ab37aae765fe8 +size 1058876406 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-60000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-60000.pt new file mode 100644 index 0000000000000000000000000000000000000000..a98f67c1372d790daa60175af68467d101b4068d --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-60000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7369d6e1ea9a293ed3a41a659a77e5d2b49502b31ec6d0cefadb2b5c3eaff0de +size 1058876406 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-64000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-64000.pt new file mode 100644 index 0000000000000000000000000000000000000000..e1c59e964e89f08356b40a45b3c287438cbee60d --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-64000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2e1b7af876a7b55d7d6e697bfbb7786ed9b5cb83b60a881a3305d9e880b8bc12 +size 1058876470 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-68000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-68000.pt new file mode 100644 index 0000000000000000000000000000000000000000..81fcd70034440929ba07f326c815490b19f0009d --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-68000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9cc7ab80707aa423d027fa11b5b9d6a2d5e51b3a5fdb730d4cc443678f6c202f +size 1058876726 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-72000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-72000.pt new file mode 100644 index 0000000000000000000000000000000000000000..eef478d032c8a6bda5934a5ab71aaeff5f65a182 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-72000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:edc6138447af4ee560588b90294a02c1dc17ef8546b46f65ef43e6c5eb386507 +size 1058876790 diff --git a/zipformer/pretrained/non_ctc/causal/exp/checkpoint-8000.pt b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-8000.pt new file mode 100644 index 0000000000000000000000000000000000000000..1612523f86fb1bcc68b99365a3c8ebf3caf1e1ec --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/checkpoint-8000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e020a77e8cb9bcfdd8ed3a2bb91de59615f835e818ae13f745c2123e44f12d72 +size 1058873601 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-1.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-1.pt new file mode 100644 index 0000000000000000000000000000000000000000..2d1f222887c8f98a59f4aadb70e19f6b08e44e56 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-1.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:87423f5691d8a14712ce9d5dd00b1ac5905d4341609d4e4ffacc5637265594f4 +size 1058855577 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-10.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-10.pt new file mode 100644 index 0000000000000000000000000000000000000000..d94b85e97aae9e2dd0ecbbe3e0adc4232798153b --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-10.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ee28033aa7fab3d3075271efd5d2bef1b10a4f2c1bffdfa3e127a405715f6ada +size 1058858638 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-11.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-11.pt new file mode 100644 index 0000000000000000000000000000000000000000..18ad085d54c6d01392c07164e01b7304686d2225 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-11.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cfc852631f0f082c9e8f27af570000ba426657dcca544ad584d5bb7d9a4ad2eb +size 1058858702 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-12.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-12.pt new file mode 100644 index 0000000000000000000000000000000000000000..2bf59f82d844528779f98b544d1249fe8de9d269 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-12.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d5b26ab814d8fe5d2dc53cdd536273273754be53a15962909dccb68835e285cd +size 1058858702 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-13.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-13.pt new file mode 100644 index 0000000000000000000000000000000000000000..6e389ecff0b089793e5ebd0665a5da7ea711b61c --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-13.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b9a0b3ea696c54654ec71911d696172e17d4c33ef8834dba69a1b1d61fd8f9c3 +size 1058858766 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-14.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-14.pt new file mode 100644 index 0000000000000000000000000000000000000000..ebcfd0964165d465ad8ad8e8f4c8f627879d4e29 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-14.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:135c83824733b78e680e349f515538f044e8331f93099675a3a673c9d1ef96d4 +size 1058858830 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-15.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-15.pt new file mode 100644 index 0000000000000000000000000000000000000000..2b96bb50e06676cc192c034d57db74de661c285a --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-15.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f268c3b9cf39a3882af0bff735b5e45929458c1d61e4cf49c4981cf4ca0bbe2d +size 1058858894 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-16.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-16.pt new file mode 100644 index 0000000000000000000000000000000000000000..e68a5e24190b2a7c186a8c5f96c03f90120995b5 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-16.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5f4a556055cd9ab59e1e078d7a31dd6143d20be4961e84604633206f02e8b68f +size 1058858958 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-17.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-17.pt new file mode 100644 index 0000000000000000000000000000000000000000..e37a3e08988cb578493d61254bebbca3cc6f5f18 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-17.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c476ea39618f9bccf56dcd4754bb747b383e87130f6df19169b98820ab9e175f +size 1058858958 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-18.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-18.pt new file mode 100644 index 0000000000000000000000000000000000000000..fda3a6a3b87212383eab78f3f60b73d74bd48095 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-18.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:07059386fdc0fa0a01284eff2c0580e800b4926c4722a0c54832345556d47ec0 +size 1058859022 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-19.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-19.pt new file mode 100644 index 0000000000000000000000000000000000000000..0050bde473e4580dcbb467093dff17d2f1a43c90 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-19.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b5c83e56979ec75e03c0517cfe90506b83cbfc7b823a442d878f55e44f20229b +size 1058859278 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-2.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-2.pt new file mode 100644 index 0000000000000000000000000000000000000000..5ef7a71fbc2f8a1b1d2a56c2f9aaf50660f2c911 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-2.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2ebd68b199752c6531852e9e5d726968a7cba822cd386524f78b171c8f4a6db3 +size 1058855641 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-20.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-20.pt new file mode 100644 index 0000000000000000000000000000000000000000..2b62b56487be8c3448558136bdd986b8d43ada46 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-20.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:42a341c05d3aeae728cc848c0727642f61f8a1d681421d7868ddb27199188364 +size 1058859342 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-3.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-3.pt new file mode 100644 index 0000000000000000000000000000000000000000..076e47e4b991c6822bee2dc883789fb603d15df8 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-3.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5cc6e92e749b08a67fde82af27c5ce261f94ade95dd17ea290094f43d3907690 +size 1058855705 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-4.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-4.pt new file mode 100644 index 0000000000000000000000000000000000000000..490831f99f508c8d310fcc95524364f6f5495832 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-4.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:146ff238a0b1c65c8972843792ffe6204a61043629ef3ef76d3784954caf100d +size 1058855705 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-5.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-5.pt new file mode 100644 index 0000000000000000000000000000000000000000..a714fa8b111fe4b96ed9604cfdea39ce5283dec2 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-5.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cedccb7394008c072c73b565b0e85615315fbafb2765712833f22b8d416c8e79 +size 1058855769 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-6.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-6.pt new file mode 100644 index 0000000000000000000000000000000000000000..4a3b2447fccd6a76f3fab0c673f428e16198e3ac --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-6.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ae695d69c810e582457aad66659a886b9b3aa2e784e679dcf1ff1f3d99016cca +size 1058855833 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-7.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-7.pt new file mode 100644 index 0000000000000000000000000000000000000000..0c46fc4f3116ee69dc3994b34f8c2d5e89c30567 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-7.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:811fdf05badd432582481c5d814dedd2ae1d2112e8723708f5c5b5bbcddc775a +size 1058855897 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-8.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-8.pt new file mode 100644 index 0000000000000000000000000000000000000000..659aba5a2a6a93c6ddb780555807de4a82a1d8eb --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-8.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a2c4ed34ccf68604996ede993b6bd20b448110be7fd52e588231a7e8299d2277 +size 1058855897 diff --git a/zipformer/pretrained/non_ctc/causal/exp/epoch-9.pt b/zipformer/pretrained/non_ctc/causal/exp/epoch-9.pt new file mode 100644 index 0000000000000000000000000000000000000000..953e0b74d61e29b5e38740910536ea1023f1e87c --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/epoch-9.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7f0b9ac45a48e7b367c4fa58ef17af284e8adc0f3adc67e9156c69bfd5e52f87 +size 1058855961 diff --git a/zipformer/pretrained/non_ctc/causal/exp/log/log-train-2024-08-02-23-23-26-0 b/zipformer/pretrained/non_ctc/causal/exp/log/log-train-2024-08-02-23-23-26-0 new file mode 100644 index 0000000000000000000000000000000000000000..ea43f306db82815d2c61209de8a09958e38a1aaf --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/log/log-train-2024-08-02-23-23-26-0 @@ -0,0 +1,10130 @@ +2024-08-02 23:23:26,850 INFO [train.py:1182] (0/4) Training started +2024-08-02 23:23:26,854 INFO [train.py:1192] (0/4) Device: cuda:0 +2024-08-02 23:23:26,890 INFO [train.py:1210] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': 'e3b0958-dirty', 'icefall-git-date': 'Tue Jul 30 21:51:45 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2656.int.cedar.computecanada.ca', 'IP address': '172.16.146.93'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 550, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-02 23:23:26,890 INFO [train.py:1212] (0/4) About to create model +2024-08-02 23:23:31,820 INFO [train.py:1216] (0/4) Number of model parameters: 66110931 +2024-08-02 23:23:33,691 INFO [train.py:1231] (0/4) Using DDP +2024-08-02 23:23:49,629 INFO [asr_datamodule.py:909] (0/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-02 23:23:56,249 INFO [asr_datamodule.py:711] (0/4) Disable MUSAN +2024-08-02 23:23:56,249 INFO [asr_datamodule.py:729] (0/4) Enable SpecAugment +2024-08-02 23:23:56,250 INFO [asr_datamodule.py:730] (0/4) Time warp factor: 80 +2024-08-02 23:23:56,250 INFO [asr_datamodule.py:740] (0/4) Num frame mask: 10 +2024-08-02 23:23:56,250 INFO [asr_datamodule.py:753] (0/4) About to create train dataset +2024-08-02 23:23:56,250 INFO [asr_datamodule.py:780] (0/4) Using DynamicBucketingSampler. +2024-08-02 23:23:57,858 INFO [asr_datamodule.py:797] (0/4) About to create train dataloader +2024-08-02 23:23:57,865 INFO [asr_datamodule.py:926] (0/4) About to get dev-clean cuts +2024-08-02 23:23:58,241 INFO [asr_datamodule.py:933] (0/4) About to get dev-other cuts +2024-08-02 23:23:58,294 INFO [asr_datamodule.py:829] (0/4) About to create dev dataset +2024-08-02 23:23:58,617 INFO [asr_datamodule.py:846] (0/4) About to create dev dataloader +2024-08-02 23:23:58,618 INFO [train.py:1435] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-02 23:30:00,738 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 8432MB +2024-08-02 23:30:01,695 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 8432MB +2024-08-02 23:32:14,987 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 8645MB +2024-08-02 23:32:16,111 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 8645MB +2024-08-02 23:33:32,035 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=256, metric=50.21 vs. limit=7.5 +2024-08-02 23:33:32,300 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=384, metric=47.86 vs. limit=7.5 +2024-08-02 23:33:32,662 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 8645MB +2024-08-02 23:33:33,405 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=384, metric=75.50 vs. limit=7.5 +2024-08-02 23:33:39,254 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 8645MB +2024-08-02 23:34:54,146 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.11 vs. limit=7.5 +2024-08-02 23:34:57,139 INFO [train.py:1114] (0/4) Epoch 1, batch 0, loss[loss=7.882, simple_loss=7.203, pruned_loss=6.781, over 13354.00 frames. ], tot_loss[loss=7.882, simple_loss=7.203, pruned_loss=6.781, over 13354.00 frames. ], batch size: 33, lr: 2.25e-02, grad_scale: 1.0 +2024-08-02 23:34:57,139 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-02 23:35:53,585 INFO [train.py:1146] (0/4) Epoch 1, validation: loss=7.994, simple_loss=7.311, pruned_loss=6.819, over 944034.00 frames. +2024-08-02 23:35:53,586 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 8645MB +2024-08-02 23:37:12,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=0.0, ans=5.0 +2024-08-02 23:39:43,145 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.388e+03 2.561e+03 2.629e+03 3.528e+03 3.944e+03, threshold=1.052e+04, percent-clipped=0.0 +2024-08-02 23:42:45,626 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=52.24 vs. limit=5.009166666666666 +2024-08-02 23:43:00,377 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=45.88 vs. limit=7.555 +2024-08-02 23:43:00,505 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 5.313e+02 2.388e+03 2.781e+03 4.030e+03, threshold=9.553e+03, percent-clipped=0.0 +2024-08-02 23:43:32,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=73.33333333333333, ans=0.7507333333333334 +2024-08-02 23:44:43,415 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=51.48 vs. limit=4.044 +2024-08-02 23:45:02,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=110.0, ans=0.49484375 +2024-08-02 23:45:19,164 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-02 23:45:19,427 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=322.04 vs. limit=7.555 +2024-08-02 23:45:58,758 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 4.964e+02 6.315e+02 2.388e+03 4.030e+03, threshold=2.526e+03, percent-clipped=0.0 +2024-08-02 23:46:50,275 INFO [train.py:1114] (0/4) Epoch 1, batch 50, loss[loss=1.246, simple_loss=1.103, pruned_loss=1.277, over 13404.00 frames. ], tot_loss[loss=3.072, simple_loss=2.818, pruned_loss=2.465, over 578019.24 frames. ], batch size: 32, lr: 2.48e-02, grad_scale: 0.5 +2024-08-02 23:47:22,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=183.33333333333334, ans=0.19312500000000002 +2024-08-02 23:48:25,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=58.93 vs. limit=7.665 +2024-08-02 23:49:55,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=220.0, ans=0.4896875 +2024-08-02 23:49:58,215 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=34.55 vs. limit=5.128333333333333 +2024-08-02 23:51:52,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=256.6666666666667, ans=0.8910166666666667 +2024-08-02 23:51:53,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=293.3333333333333, ans=0.48625 +2024-08-02 23:51:53,680 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=211.04 vs. limit=7.61 +2024-08-02 23:52:21,563 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=15.48 vs. limit=5.073333333333333 +2024-08-02 23:53:24,900 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=60.86 vs. limit=7.72 +2024-08-02 23:53:25,008 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=19.45 vs. limit=5.073333333333333 +2024-08-02 23:54:19,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=330.0, ans=0.09793750000000001 +2024-08-02 23:54:19,645 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=37.95 vs. limit=7.7475 +2024-08-02 23:54:19,715 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=97.78 vs. limit=7.62375 +2024-08-02 23:54:20,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=330.0, ans=0.29669999999999996 +2024-08-02 23:55:14,130 INFO [train.py:1114] (0/4) Epoch 1, batch 100, loss[loss=1.172, simple_loss=1.013, pruned_loss=1.27, over 13519.00 frames. ], tot_loss[loss=2.065, simple_loss=1.866, pruned_loss=1.822, over 1025053.07 frames. ], batch size: 35, lr: 2.70e-02, grad_scale: 1.0 +2024-08-02 23:55:16,159 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=32.41 vs. limit=7.775 +2024-08-02 23:55:16,239 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=86.85 vs. limit=5.183333333333334 +2024-08-02 23:55:16,360 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.793e+01 6.736e+01 1.462e+02 5.319e+02 4.030e+03, threshold=2.924e+02, percent-clipped=0.0 +2024-08-02 23:55:16,785 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=22.15 vs. limit=7.6375 +2024-08-02 23:55:16,839 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=177.33 vs. limit=7.6375 +2024-08-02 23:55:29,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=366.6666666666667, ans=0.4828125 +2024-08-02 23:55:38,539 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=89.21 vs. limit=7.6375 +2024-08-02 23:56:17,407 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=18.74 vs. limit=7.65125 +2024-08-02 23:56:17,459 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=14.43 vs. limit=5.100833333333333 +2024-08-02 23:56:17,670 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=366.03 vs. limit=7.65125 +2024-08-02 23:56:17,675 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=15.26 vs. limit=4.161333333333333 +2024-08-02 23:56:21,796 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=45.92 vs. limit=7.665 +2024-08-02 23:56:47,679 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=62.77 vs. limit=7.665 +2024-08-02 23:57:16,767 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=37.05 vs. limit=7.665 +2024-08-02 23:57:22,429 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=24.59 vs. limit=7.665 +2024-08-03 00:07:38,023 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=44.05 vs. limit=7.67875 +2024-08-03 00:07:38,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=476.6666666666667, ans=0.29523333333333335 +2024-08-03 00:08:30,972 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=57.51 vs. limit=7.8575 +2024-08-03 00:14:27,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=476.6666666666667, ans=0.47765625 +2024-08-03 00:15:39,207 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=31.39 vs. limit=7.885 +2024-08-03 00:15:49,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=513.3333333333334, ans=0.4759375 +2024-08-03 00:16:22,415 INFO [train.py:1114] (0/4) Epoch 1, batch 150, loss[loss=0.9591, simple_loss=0.8172, pruned_loss=1.029, over 13427.00 frames. ], tot_loss[loss=1.645, simple_loss=1.465, pruned_loss=1.542, over 1386670.77 frames. ], batch size: 32, lr: 2.93e-02, grad_scale: 1.0 +2024-08-03 00:16:22,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=550.0, ans=0.47421875 +2024-08-03 00:16:24,569 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=18.85 vs. limit=5.275 +2024-08-03 00:16:54,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=83.78 vs. limit=7.72 +2024-08-03 00:16:55,046 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=22.62 vs. limit=7.94 +2024-08-03 00:17:02,979 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=30.33 vs. limit=7.72 +2024-08-03 00:17:11,459 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=9.71 vs. limit=4.234666666666667 +2024-08-03 00:17:49,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=43.60 vs. limit=7.73375 +2024-08-03 00:19:03,124 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=164.58 vs. limit=7.7475 +2024-08-03 00:19:31,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=31.76 vs. limit=7.76125 +2024-08-03 00:19:32,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=696.6666666666666, ans=0.8756166666666667 +2024-08-03 00:19:48,848 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=27.44 vs. limit=8.0225 +2024-08-03 00:20:24,580 INFO [train.py:1114] (0/4) Epoch 1, batch 200, loss[loss=1.081, simple_loss=0.923, pruned_loss=1.072, over 12787.00 frames. ], tot_loss[loss=1.418, simple_loss=1.249, pruned_loss=1.365, over 1665615.69 frames. ], batch size: 59, lr: 3.15e-02, grad_scale: 2.0 +2024-08-03 00:20:29,211 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.304e+01 4.646e+01 6.073e+01 7.987e+01 1.954e+02, threshold=1.215e+02, percent-clipped=0.0 +2024-08-03 00:20:35,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=733.3333333333334, ans=0.465625 +2024-08-03 00:21:00,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=733.3333333333334, ans=0.09541666666666668 +2024-08-03 00:21:18,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=770.0, ans=0.46390625 +2024-08-03 00:21:30,368 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=32.56 vs. limit=8.0775 +2024-08-03 00:21:39,952 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=20.79 vs. limit=7.78875 +2024-08-03 00:21:41,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=806.6666666666666, ans=0.16975 +2024-08-03 00:22:02,232 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=58.79 vs. limit=7.8025 +2024-08-03 00:22:04,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=806.6666666666666, ans=5.504166666666666 +2024-08-03 00:22:06,217 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=33.03 vs. limit=8.105 +2024-08-03 00:22:45,220 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=29.60 vs. limit=8.1325 +2024-08-03 00:22:54,731 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=12.77 vs. limit=5.210833333333333 +2024-08-03 00:23:11,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=880.0, ans=5.22 +2024-08-03 00:23:11,211 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=51.64 vs. limit=7.83 +2024-08-03 00:23:14,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=880.0, ans=0.2912 +2024-08-03 00:23:15,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=67.37 vs. limit=7.83 +2024-08-03 00:23:17,082 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=10.79 vs. limit=4.352 +2024-08-03 00:23:19,137 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.10 vs. limit=8.1875 +2024-08-03 00:23:19,654 INFO [train.py:1114] (0/4) Epoch 1, batch 250, loss[loss=1.009, simple_loss=0.8509, pruned_loss=0.993, over 13360.00 frames. ], tot_loss[loss=1.279, simple_loss=1.116, pruned_loss=1.243, over 1884846.00 frames. ], batch size: 46, lr: 3.38e-02, grad_scale: 2.0 +2024-08-03 00:23:23,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten.whitening_limit, batch_count=916.6666666666666, ans=7.84375 +2024-08-03 00:23:23,909 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=20.67 vs. limit=7.84375 +2024-08-03 00:23:27,428 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=38.40 vs. limit=7.84375 +2024-08-03 00:23:32,028 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.41 vs. limit=8.1875 +2024-08-03 00:24:29,413 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.94 vs. limit=5.2475 +2024-08-03 00:24:31,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=990.0, ans=0.1943125 +2024-08-03 00:24:36,288 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=8.73 vs. limit=8.2425 +2024-08-03 00:24:44,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=1026.6666666666667, ans=0.37166666666666665 +2024-08-03 00:24:46,209 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.42 vs. limit=4.410666666666667 +2024-08-03 00:24:46,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=1026.6666666666667, ans=0.451875 +2024-08-03 00:24:47,319 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=8.12 vs. limit=8.27 +2024-08-03 00:24:51,721 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=189.30 vs. limit=7.885 +2024-08-03 00:24:55,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=1063.3333333333333, ans=0.076075 +2024-08-03 00:24:58,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=1063.3333333333333, ans=0.45015625 +2024-08-03 00:25:04,678 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=7.47 vs. limit=4.44 +2024-08-03 00:25:05,109 INFO [train.py:1114] (0/4) Epoch 1, batch 300, loss[loss=0.959, simple_loss=0.8013, pruned_loss=0.926, over 13472.00 frames. ], tot_loss[loss=1.187, simple_loss=1.026, pruned_loss=1.156, over 2051826.66 frames. ], batch size: 42, lr: 3.60e-02, grad_scale: 4.0 +2024-08-03 00:25:05,791 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.54 vs. limit=7.9125 +2024-08-03 00:25:09,472 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.00 vs. limit=8.325 +2024-08-03 00:25:10,031 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.936e+01 8.005e+01 9.897e+01 1.290e+02 2.424e+02, threshold=1.979e+02, percent-clipped=29.0 +2024-08-03 00:25:12,678 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=60.26 vs. limit=7.9125 +2024-08-03 00:25:16,033 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=177.97 vs. limit=7.9125 +2024-08-03 00:25:17,910 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=12.95 vs. limit=7.9125 +2024-08-03 00:26:16,755 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.68 vs. limit=8.38 +2024-08-03 00:26:27,448 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=38.10 vs. limit=8.38 +2024-08-03 00:26:45,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=1210.0, ans=0.44328125 +2024-08-03 00:26:48,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=1246.6666666666667, ans=0.44156249999999997 +2024-08-03 00:26:49,129 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=89.78 vs. limit=7.9675 +2024-08-03 00:26:50,308 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.13 vs. limit=4.498666666666667 +2024-08-03 00:27:06,204 INFO [train.py:1114] (0/4) Epoch 1, batch 350, loss[loss=0.8964, simple_loss=0.7377, pruned_loss=0.8661, over 13596.00 frames. ], tot_loss[loss=1.127, simple_loss=0.9655, pruned_loss=1.095, over 2182105.91 frames. ], batch size: 33, lr: 3.83e-02, grad_scale: 4.0 +2024-08-03 00:27:19,644 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.72 vs. limit=8.4625 +2024-08-03 00:27:20,488 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.95 vs. limit=8.4625 +2024-08-03 00:27:30,814 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=28.90 vs. limit=7.995 +2024-08-03 00:27:36,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=1320.0, ans=0.438125 +2024-08-03 00:27:49,489 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.10 vs. limit=5.678333333333334 +2024-08-03 00:27:54,124 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=15.95 vs. limit=5.0 +2024-08-03 00:27:57,218 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=21.76 vs. limit=8.0225 +2024-08-03 00:27:59,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=1393.3333333333333, ans=0.4346875 +2024-08-03 00:28:00,999 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=39.71 vs. limit=8.0225 +2024-08-03 00:28:12,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=1430.0, ans=0.43296875 +2024-08-03 00:28:22,652 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.64 vs. limit=8.03625 +2024-08-03 00:28:36,676 INFO [train.py:1114] (0/4) Epoch 1, batch 400, loss[loss=1.039, simple_loss=0.8507, pruned_loss=0.9759, over 13357.00 frames. ], tot_loss[loss=1.081, simple_loss=0.9175, pruned_loss=1.046, over 2285673.99 frames. ], batch size: 37, lr: 4.05e-02, grad_scale: 8.0 +2024-08-03 00:28:44,138 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.328e+01 8.404e+01 1.145e+02 1.534e+02 2.452e+02, threshold=2.291e+02, percent-clipped=10.0 +2024-08-03 00:29:21,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=1466.6666666666667, ans=0.7646666666666667 +2024-08-03 00:30:30,497 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.01 vs. limit=8.0775 +2024-08-03 00:30:33,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.78 vs. limit=8.0775 +2024-08-03 00:30:35,822 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.34 vs. limit=8.655 +2024-08-03 00:30:35,990 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.67 vs. limit=8.0775 +2024-08-03 00:30:50,123 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.92 vs. limit=5.394166666666667 +2024-08-03 00:30:52,294 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.26 vs. limit=5.788333333333333 +2024-08-03 00:31:35,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=1613.3333333333333, ans=0.424375 +2024-08-03 00:32:27,074 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=20.35 vs. limit=8.105 +2024-08-03 00:32:40,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=1613.3333333333333, ans=0.2242 +2024-08-03 00:32:51,737 INFO [train.py:1114] (0/4) Epoch 1, batch 450, loss[loss=0.9694, simple_loss=0.787, pruned_loss=0.8951, over 13550.00 frames. ], tot_loss[loss=1.048, simple_loss=0.881, pruned_loss=1.005, over 2359013.38 frames. ], batch size: 38, lr: 4.28e-02, grad_scale: 8.0 +2024-08-03 00:33:53,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=1686.6666666666667, ans=0.13674999999999998 +2024-08-03 00:34:34,808 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.46 vs. limit=8.82 +2024-08-03 00:34:45,181 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.56 vs. limit=5.88 +2024-08-03 00:34:45,207 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=9.72 vs. limit=8.16 +2024-08-03 00:34:47,077 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=14.66 vs. limit=8.16 +2024-08-03 00:35:16,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=1796.6666666666667, ans=0.41578125 +2024-08-03 00:35:25,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=1796.6666666666667, ans=0.2820333333333333 +2024-08-03 00:35:38,380 INFO [train.py:1114] (0/4) Epoch 1, batch 500, loss[loss=1.006, simple_loss=0.8193, pruned_loss=0.8895, over 13439.00 frames. ], tot_loss[loss=1.022, simple_loss=0.8524, pruned_loss=0.9683, over 2424526.09 frames. ], batch size: 43, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:35:53,392 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.232e+01 1.074e+02 1.283e+02 1.686e+02 3.614e+02, threshold=2.565e+02, percent-clipped=11.0 +2024-08-03 00:35:57,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=1833.3333333333333, ans=0.13124999999999998 +2024-08-03 00:36:07,522 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.67 vs. limit=8.1875 +2024-08-03 00:36:12,585 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=19.39 vs. limit=8.20125 +2024-08-03 00:36:42,611 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=30.97 vs. limit=8.20125 +2024-08-03 00:36:51,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=14.00 vs. limit=8.20125 +2024-08-03 00:38:03,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1906.6666666666667, ans=0.2809333333333333 +2024-08-03 00:38:57,175 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=17.56 vs. limit=8.22875 +2024-08-03 00:39:03,298 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=14.11 vs. limit=8.22875 +2024-08-03 00:43:15,435 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=72.51 vs. limit=8.22875 +2024-08-03 00:43:26,367 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.30 vs. limit=8.985 +2024-08-03 00:43:38,158 INFO [train.py:1114] (0/4) Epoch 1, batch 550, loss[loss=0.9648, simple_loss=0.7831, pruned_loss=0.8321, over 13009.00 frames. ], tot_loss[loss=1.002, simple_loss=0.8308, pruned_loss=0.9336, over 2467272.68 frames. ], batch size: 48, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:45:11,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=2090.0, ans=6.30625 +2024-08-03 00:45:25,790 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.94 vs. limit=8.28375 +2024-08-03 00:45:26,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=2090.0, ans=0.121625 +2024-08-03 00:45:29,395 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=26.04 vs. limit=8.28375 +2024-08-03 00:45:41,131 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=204.79 vs. limit=8.2975 +2024-08-03 00:45:44,904 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.79 vs. limit=9.094999999999999 +2024-08-03 00:45:52,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2126.6666666666665, ans=0.27873333333333333 +2024-08-03 00:45:53,796 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.64 vs. limit=9.094999999999999 +2024-08-03 00:46:01,624 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=27.39 vs. limit=8.31125 +2024-08-03 00:46:01,671 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=31.17 vs. limit=8.31125 +2024-08-03 00:46:02,766 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.37 vs. limit=6.081666666666667 +2024-08-03 00:46:14,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=2163.3333333333335, ans=0.39859374999999997 +2024-08-03 00:46:16,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=14.56 vs. limit=8.31125 +2024-08-03 00:46:28,711 INFO [train.py:1114] (0/4) Epoch 1, batch 600, loss[loss=0.9709, simple_loss=0.7976, pruned_loss=0.7912, over 13315.00 frames. ], tot_loss[loss=0.9824, simple_loss=0.8127, pruned_loss=0.8935, over 2507339.53 frames. ], batch size: 46, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:46:49,723 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 7.983e+01 1.322e+02 1.697e+02 2.206e+02 6.951e+02, threshold=3.394e+02, percent-clipped=10.0 +2024-08-03 00:47:09,214 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.30 vs. limit=9.1775 +2024-08-03 00:47:31,480 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=7.05 vs. limit=5.568333333333333 +2024-08-03 00:47:34,807 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=33.16 vs. limit=8.3525 +2024-08-03 00:47:43,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=2273.3333333333335, ans=6.420833333333333 +2024-08-03 00:47:45,034 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=32.21 vs. limit=8.36625 +2024-08-03 00:47:54,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2310.0, ans=0.2769 +2024-08-03 00:48:15,205 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.05 vs. limit=5.586666666666667 +2024-08-03 00:48:16,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=2346.6666666666665, ans=0.07 +2024-08-03 00:48:23,501 INFO [train.py:1114] (0/4) Epoch 1, batch 650, loss[loss=0.8191, simple_loss=0.6903, pruned_loss=0.6137, over 13545.00 frames. ], tot_loss[loss=0.9551, simple_loss=0.7912, pruned_loss=0.8424, over 2542495.30 frames. ], batch size: 37, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:48:24,225 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.39 vs. limit=9.2875 +2024-08-03 00:48:29,270 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.39 vs. limit=8.39375 +2024-08-03 00:48:30,399 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.59 vs. limit=8.39375 +2024-08-03 00:48:30,490 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.07 vs. limit=5.595833333333333 +2024-08-03 00:48:48,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=2420.0, ans=0.10925 +2024-08-03 00:49:10,560 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.28 vs. limit=8.42125 +2024-08-03 00:49:14,345 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=11.24 vs. limit=9.3425 +2024-08-03 00:49:34,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=2493.3333333333335, ans=0.383125 +2024-08-03 00:49:35,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=2493.3333333333335, ans=0.04949747468305833 +2024-08-03 00:49:48,078 INFO [train.py:1114] (0/4) Epoch 1, batch 700, loss[loss=0.7615, simple_loss=0.651, pruned_loss=0.5406, over 13543.00 frames. ], tot_loss[loss=0.9213, simple_loss=0.7667, pruned_loss=0.785, over 2564577.00 frames. ], batch size: 35, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:49:48,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=2566.6666666666665, ans=0.04225 +2024-08-03 00:49:53,119 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.309e+01 1.383e+02 1.770e+02 2.360e+02 5.485e+02, threshold=3.539e+02, percent-clipped=6.0 +2024-08-03 00:50:17,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.38 vs. limit=8.49 +2024-08-03 00:50:38,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2676.6666666666665, ans=0.27323333333333333 +2024-08-03 00:50:41,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=2713.3333333333335, ans=0.09824999999999999 +2024-08-03 00:50:48,166 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.25 vs. limit=9.535 +2024-08-03 00:50:50,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=2750.0, ans=5.6875 +2024-08-03 00:50:50,660 INFO [train.py:1114] (0/4) Epoch 1, batch 750, loss[loss=0.747, simple_loss=0.6446, pruned_loss=0.5101, over 13361.00 frames. ], tot_loss[loss=0.8784, simple_loss=0.7357, pruned_loss=0.7219, over 2582649.01 frames. ], batch size: 37, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:50:57,178 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.29 vs. limit=3.4125 +2024-08-03 00:51:23,652 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.89 vs. limit=5.1146666666666665 +2024-08-03 00:51:31,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2823.3333333333335, ans=0.27176666666666666 +2024-08-03 00:51:49,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=2896.6666666666665, ans=0.091375 +2024-08-03 00:51:49,740 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.40 vs. limit=5.724166666666667 +2024-08-03 00:51:54,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=2896.6666666666665, ans=0.36421875000000004 +2024-08-03 00:52:04,751 INFO [train.py:1114] (0/4) Epoch 1, batch 800, loss[loss=0.5978, simple_loss=0.5345, pruned_loss=0.37, over 13345.00 frames. ], tot_loss[loss=0.8345, simple_loss=0.7043, pruned_loss=0.6609, over 2596980.18 frames. ], batch size: 33, lr: 4.49e-02, grad_scale: 16.0 +2024-08-03 00:52:06,747 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.241e+02 1.637e+02 2.042e+02 2.862e+02 4.523e+02, threshold=4.084e+02, percent-clipped=8.0 +2024-08-03 00:52:07,382 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.86 vs. limit=5.733333333333333 +2024-08-03 00:52:08,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=2933.3333333333335, ans=0.244 +2024-08-03 00:52:17,412 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.90 vs. limit=8.61375 +2024-08-03 00:52:25,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=3006.6666666666665, ans=0.08725 +2024-08-03 00:52:28,709 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.45 vs. limit=8.6275 +2024-08-03 00:52:30,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=3006.6666666666665, ans=0.08725 +2024-08-03 00:52:42,605 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.37 vs. limit=9.754999999999999 +2024-08-03 00:52:55,812 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.69 vs. limit=9.81 +2024-08-03 00:52:56,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=3080.0, ans=0.09899494936611666 +2024-08-03 00:52:58,867 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=10.31 vs. limit=9.81 +2024-08-03 00:53:10,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=3080.0, ans=0.2192 +2024-08-03 00:53:14,275 INFO [train.py:1114] (0/4) Epoch 1, batch 850, loss[loss=0.7288, simple_loss=0.6399, pruned_loss=0.4659, over 13332.00 frames. ], tot_loss[loss=0.793, simple_loss=0.6749, pruned_loss=0.6051, over 2609151.22 frames. ], batch size: 40, lr: 4.49e-02, grad_scale: 16.0 +2024-08-03 00:53:29,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3116.6666666666665, ans=0.2688333333333333 +2024-08-03 00:53:29,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.04 vs. limit=8.66875 +2024-08-03 00:53:34,571 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=9.84 vs. limit=9.865 +2024-08-03 00:53:47,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=3190.0, ans=0.35046875 +2024-08-03 00:53:52,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=3190.0, ans=0.35046875 +2024-08-03 00:54:00,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=3190.0, ans=0.028225 +2024-08-03 00:54:11,768 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.09 vs. limit=5.806666666666667 +2024-08-03 00:54:21,602 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.26 vs. limit=5.815833333333334 +2024-08-03 00:54:45,060 INFO [train.py:1114] (0/4) Epoch 1, batch 900, loss[loss=0.5762, simple_loss=0.5203, pruned_loss=0.3425, over 13339.00 frames. ], tot_loss[loss=0.7553, simple_loss=0.6487, pruned_loss=0.5553, over 2611407.06 frames. ], batch size: 33, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:54:46,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=3300.0, ans=0.3453125 +2024-08-03 00:54:47,012 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.046e+02 1.709e+02 2.155e+02 3.211e+02 6.364e+02, threshold=4.310e+02, percent-clipped=14.0 +2024-08-03 00:54:55,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=3300.0, ans=0.267 +2024-08-03 00:55:04,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=3336.6666666666665, ans=0.024925000000000003 +2024-08-03 00:55:27,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3373.3333333333335, ans=0.26626666666666665 +2024-08-03 00:55:33,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=3410.0, ans=0.07374999999999998 +2024-08-03 00:55:35,114 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.76 vs. limit=6.705 +2024-08-03 00:55:49,733 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.60 vs. limit=5.861666666666666 +2024-08-03 00:56:05,984 INFO [train.py:1114] (0/4) Epoch 1, batch 950, loss[loss=0.5114, simple_loss=0.4709, pruned_loss=0.2888, over 13529.00 frames. ], tot_loss[loss=0.7207, simple_loss=0.6247, pruned_loss=0.511, over 2613211.96 frames. ], batch size: 34, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:56:10,700 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.97 vs. limit=8.80625 +2024-08-03 00:56:30,667 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.95 vs. limit=5.870833333333334 +2024-08-03 00:56:42,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=3520.0, ans=0.33499999999999996 +2024-08-03 00:56:52,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=3520.0, ans=0.06799999999999998 +2024-08-03 00:57:20,244 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.99 vs. limit=8.86125 +2024-08-03 00:57:21,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3630.0, ans=0.32984375 +2024-08-03 00:57:21,328 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.84 vs. limit=5.9075 +2024-08-03 00:57:28,046 INFO [train.py:1114] (0/4) Epoch 1, batch 1000, loss[loss=0.5397, simple_loss=0.4986, pruned_loss=0.3013, over 13361.00 frames. ], tot_loss[loss=0.6917, simple_loss=0.605, pruned_loss=0.4738, over 2611452.88 frames. ], batch size: 35, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:57:32,796 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.579e+02 2.012e+02 2.638e+02 6.886e+02, threshold=4.024e+02, percent-clipped=6.0 +2024-08-03 00:57:36,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=3666.6666666666665, ans=0.041666666666666685 +2024-08-03 00:57:58,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=3703.3333333333335, ans=0.7703833333333333 +2024-08-03 00:58:03,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=3703.3333333333335, ans=7.314583333333333 +2024-08-03 00:58:08,084 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.73 vs. limit=10.305 +2024-08-03 00:58:08,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.00 vs. limit=10.305 +2024-08-03 00:58:21,945 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.15 vs. limit=6.87 +2024-08-03 00:58:28,535 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.85 vs. limit=10.3325 +2024-08-03 00:58:34,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=3813.3333333333335, ans=7.383333333333333 +2024-08-03 00:58:39,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=3813.3333333333335, ans=0.32125000000000004 +2024-08-03 00:58:39,424 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.27 vs. limit=10.36 +2024-08-03 00:58:41,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3813.3333333333335, ans=0.32125000000000004 +2024-08-03 00:58:41,599 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=11.63 vs. limit=10.36 +2024-08-03 00:58:48,521 INFO [train.py:1114] (0/4) Epoch 1, batch 1050, loss[loss=0.5438, simple_loss=0.5057, pruned_loss=0.2984, over 13568.00 frames. ], tot_loss[loss=0.6585, simple_loss=0.5819, pruned_loss=0.4357, over 2615145.72 frames. ], batch size: 39, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:59:06,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3886.6666666666665, ans=0.26113333333333333 +2024-08-03 00:59:12,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=3886.6666666666665, ans=0.7639666666666667 +2024-08-03 00:59:17,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=3923.3333333333335, ans=0.25885 +2024-08-03 00:59:26,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=3923.3333333333335, ans=0.31609375 +2024-08-03 00:59:31,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=3960.0, ans=0.0050000000000000044 +2024-08-03 00:59:48,493 INFO [train.py:1114] (0/4) Epoch 1, batch 1100, loss[loss=0.4912, simple_loss=0.465, pruned_loss=0.259, over 13572.00 frames. ], tot_loss[loss=0.6331, simple_loss=0.5646, pruned_loss=0.4057, over 2619122.09 frames. ], batch size: 36, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:59:50,398 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.598e+02 2.010e+02 2.726e+02 4.926e+02, threshold=4.021e+02, percent-clipped=7.0 +2024-08-03 01:00:11,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=4033.3333333333335, ans=0.3109375 +2024-08-03 01:00:19,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=4070.0, ans=0.049708333333333334 +2024-08-03 01:00:23,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=4070.0, ans=0.049708333333333334 +2024-08-03 01:00:37,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=4143.333333333333, ans=0.30578125 +2024-08-03 01:00:48,699 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.28 vs. limit=10.635 +2024-08-03 01:00:55,299 INFO [train.py:1114] (0/4) Epoch 1, batch 1150, loss[loss=0.5668, simple_loss=0.5254, pruned_loss=0.3117, over 13566.00 frames. ], tot_loss[loss=0.6084, simple_loss=0.5477, pruned_loss=0.3781, over 2618944.61 frames. ], batch size: 36, lr: 4.47e-02, grad_scale: 16.0 +2024-08-03 01:01:04,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=4216.666666666667, ans=0.30234375 +2024-08-03 01:01:13,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=4253.333333333333, ans=0.009944927536231885 +2024-08-03 01:01:35,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=4326.666666666667, ans=0.2971875 +2024-08-03 01:01:36,802 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.80 vs. limit=10.745000000000001 +2024-08-03 01:01:55,193 INFO [train.py:1114] (0/4) Epoch 1, batch 1200, loss[loss=0.5252, simple_loss=0.5007, pruned_loss=0.2729, over 13573.00 frames. ], tot_loss[loss=0.5904, simple_loss=0.536, pruned_loss=0.357, over 2616350.81 frames. ], batch size: 39, lr: 4.47e-02, grad_scale: 32.0 +2024-08-03 01:01:57,189 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.679e+02 2.058e+02 2.623e+02 8.489e+02, threshold=4.116e+02, percent-clipped=4.0 +2024-08-03 01:02:01,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=4400.0, ans=0.29375 +2024-08-03 01:02:12,647 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.77 vs. limit=10.8275 +2024-08-03 01:02:13,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=4436.666666666667, ans=0.29203124999999996 +2024-08-03 01:02:21,445 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.42 vs. limit=7.236666666666666 +2024-08-03 01:02:23,509 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.03 vs. limit=10.855 +2024-08-03 01:02:35,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=4510.0, ans=0.047875 +2024-08-03 01:02:38,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4510.0, ans=0.2549 +2024-08-03 01:02:50,598 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.44 vs. limit=10.91 +2024-08-03 01:02:53,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=4583.333333333333, ans=0.7395833333333334 +2024-08-03 01:02:54,142 INFO [train.py:1114] (0/4) Epoch 1, batch 1250, loss[loss=0.5353, simple_loss=0.5054, pruned_loss=0.2836, over 13436.00 frames. ], tot_loss[loss=0.5707, simple_loss=0.5232, pruned_loss=0.3355, over 2628674.40 frames. ], batch size: 42, lr: 4.47e-02, grad_scale: 32.0 +2024-08-03 01:03:13,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=4583.333333333333, ans=0.28515625 +2024-08-03 01:03:24,105 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.28 vs. limit=6.155 +2024-08-03 01:03:38,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=4656.666666666667, ans=0.28171875 +2024-08-03 01:03:41,508 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.36 vs. limit=9.24625 +2024-08-03 01:03:52,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=4693.333333333333, ans=0.04711111111111112 +2024-08-03 01:03:52,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=4693.333333333333, ans=0.28 +2024-08-03 01:04:03,414 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.13 vs. limit=11.0475 +2024-08-03 01:04:08,951 INFO [train.py:1114] (0/4) Epoch 1, batch 1300, loss[loss=0.5296, simple_loss=0.5076, pruned_loss=0.273, over 12854.00 frames. ], tot_loss[loss=0.5539, simple_loss=0.512, pruned_loss=0.3181, over 2630810.47 frames. ], batch size: 52, lr: 4.47e-02, grad_scale: 32.0 +2024-08-03 01:04:10,894 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.265e+02 1.740e+02 2.083e+02 2.560e+02 4.997e+02, threshold=4.167e+02, percent-clipped=2.0 +2024-08-03 01:04:32,841 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.23 vs. limit=6.191666666666666 +2024-08-03 01:04:43,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4803.333333333333, ans=0.25196666666666667 +2024-08-03 01:04:49,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=4840.0, ans=0.009817391304347826 +2024-08-03 01:05:05,822 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.61 vs. limit=7.4383333333333335 +2024-08-03 01:05:06,750 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.56 vs. limit=5.950666666666667 +2024-08-03 01:05:17,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=4913.333333333333, ans=0.7280333333333333 +2024-08-03 01:05:19,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=4913.333333333333, ans=0.009801449275362318 +2024-08-03 01:05:23,878 INFO [train.py:1114] (0/4) Epoch 1, batch 1350, loss[loss=0.4691, simple_loss=0.4589, pruned_loss=0.2333, over 13547.00 frames. ], tot_loss[loss=0.5363, simple_loss=0.5005, pruned_loss=0.3006, over 2638339.13 frames. ], batch size: 37, lr: 4.46e-02, grad_scale: 32.0 +2024-08-03 01:05:24,535 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.40 vs. limit=9.35625 +2024-08-03 01:05:49,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=5023.333333333333, ans=0.27535 +2024-08-03 01:06:08,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=5096.666666666667, ans=0.00976159420289855 +2024-08-03 01:06:13,512 INFO [train.py:1114] (0/4) Epoch 1, batch 1400, loss[loss=0.4301, simple_loss=0.4141, pruned_loss=0.2205, over 13273.00 frames. ], tot_loss[loss=0.5215, simple_loss=0.4907, pruned_loss=0.2864, over 2642051.00 frames. ], batch size: 31, lr: 4.46e-02, grad_scale: 32.0 +2024-08-03 01:06:13,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=5133.333333333333, ans=0.04527777777777778 +2024-08-03 01:06:15,484 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.555e+02 1.828e+02 2.203e+02 3.760e+02, threshold=3.656e+02, percent-clipped=0.0 +2024-08-03 01:06:25,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=5170.0, ans=0.25765625000000003 +2024-08-03 01:06:25,392 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.65 vs. limit=11.3775 +2024-08-03 01:06:31,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=5170.0, ans=0.25765625000000003 +2024-08-03 01:06:34,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=5170.0, ans=0.25765625000000003 +2024-08-03 01:06:53,921 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.87 vs. limit=11.432500000000001 +2024-08-03 01:06:54,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=5280.0, ans=0.24719999999999998 +2024-08-03 01:07:05,498 INFO [train.py:1114] (0/4) Epoch 1, batch 1450, loss[loss=0.5145, simple_loss=0.494, pruned_loss=0.2653, over 13431.00 frames. ], tot_loss[loss=0.5106, simple_loss=0.484, pruned_loss=0.2755, over 2641146.65 frames. ], batch size: 43, lr: 4.46e-02, grad_scale: 32.0 +2024-08-03 01:07:13,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=5316.666666666667, ans=0.00971376811594203 +2024-08-03 01:07:17,564 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.91 vs. limit=9.49375 +2024-08-03 01:07:45,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=5426.666666666667, ans=0.044055555555555556 +2024-08-03 01:07:48,798 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.98 vs. limit=11.57 +2024-08-03 01:07:54,734 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.22 vs. limit=11.5975 +2024-08-03 01:08:00,856 INFO [train.py:1114] (0/4) Epoch 1, batch 1500, loss[loss=0.4495, simple_loss=0.4517, pruned_loss=0.2157, over 13404.00 frames. ], tot_loss[loss=0.4979, simple_loss=0.4761, pruned_loss=0.2638, over 2641037.97 frames. ], batch size: 39, lr: 4.46e-02, grad_scale: 32.0 +2024-08-03 01:08:14,092 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.278e+02 1.665e+02 2.059e+02 2.727e+02 4.755e+02, threshold=4.117e+02, percent-clipped=2.0 +2024-08-03 01:08:23,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=5536.666666666667, ans=0.2446333333333333 +2024-08-03 01:08:30,979 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.32 vs. limit=7.786666666666667 +2024-08-03 01:08:54,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=5610.0, ans=0.7036500000000001 +2024-08-03 01:08:55,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=5610.0, ans=0.23703125000000003 +2024-08-03 01:09:01,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=5610.0, ans=0.1 +2024-08-03 01:09:07,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=5646.666666666667, ans=0.23531249999999998 +2024-08-03 01:09:12,869 INFO [train.py:1114] (0/4) Epoch 1, batch 1550, loss[loss=0.5225, simple_loss=0.5043, pruned_loss=0.2679, over 13403.00 frames. ], tot_loss[loss=0.4883, simple_loss=0.4702, pruned_loss=0.2552, over 2631425.67 frames. ], batch size: 41, lr: 4.45e-02, grad_scale: 32.0 +2024-08-03 01:09:25,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=5720.0, ans=0.025 +2024-08-03 01:09:30,268 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.14 vs. limit=9.645 +2024-08-03 01:09:47,475 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.29 vs. limit=11.79 +2024-08-03 01:10:08,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=5793.333333333333, ans=0.04252777777777778 +2024-08-03 01:10:11,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=5830.0, ans=0.025 +2024-08-03 01:10:17,607 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.32 vs. limit=9.68625 +2024-08-03 01:10:30,131 INFO [train.py:1114] (0/4) Epoch 1, batch 1600, loss[loss=0.442, simple_loss=0.4475, pruned_loss=0.2117, over 13568.00 frames. ], tot_loss[loss=0.4804, simple_loss=0.4652, pruned_loss=0.2482, over 2624933.28 frames. ], batch size: 39, lr: 4.45e-02, grad_scale: 32.0 +2024-08-03 01:10:33,560 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.694e+02 2.197e+02 2.790e+02 6.281e+02, threshold=4.393e+02, percent-clipped=9.0 +2024-08-03 01:10:34,121 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.43 vs. limit=9.7 +2024-08-03 01:10:49,541 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:10:57,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=5903.333333333333, ans=0.009586231884057972 +2024-08-03 01:11:08,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=5940.0, ans=0.04191666666666667 +2024-08-03 01:11:19,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=5976.666666666667, ans=0.06264583333333333 +2024-08-03 01:11:23,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=5976.666666666667, ans=0.24023333333333333 +2024-08-03 01:11:37,607 INFO [train.py:1114] (0/4) Epoch 1, batch 1650, loss[loss=0.5247, simple_loss=0.5069, pruned_loss=0.2693, over 13330.00 frames. ], tot_loss[loss=0.4712, simple_loss=0.4597, pruned_loss=0.2406, over 2621242.79 frames. ], batch size: 40, lr: 4.45e-02, grad_scale: 32.0 +2024-08-03 01:11:54,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=6086.666666666667, ans=0.21468749999999998 +2024-08-03 01:12:25,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=6196.666666666667, ans=0.009522463768115942 +2024-08-03 01:12:35,776 INFO [train.py:1114] (0/4) Epoch 1, batch 1700, loss[loss=0.3614, simple_loss=0.3749, pruned_loss=0.1686, over 13242.00 frames. ], tot_loss[loss=0.4615, simple_loss=0.4545, pruned_loss=0.2325, over 2630393.23 frames. ], batch size: 31, lr: 4.44e-02, grad_scale: 32.0 +2024-08-03 01:12:37,771 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.541e+02 1.894e+02 2.425e+02 4.300e+02, threshold=3.787e+02, percent-clipped=0.0 +2024-08-03 01:12:41,814 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.92 vs. limit=6.493333333333333 +2024-08-03 01:12:45,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=6233.333333333333, ans=0.2078125 +2024-08-03 01:12:45,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=6233.333333333333, ans=0.2078125 +2024-08-03 01:12:47,274 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.86 vs. limit=12.175 +2024-08-03 01:12:48,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=6270.0, ans=0.009506521739130434 +2024-08-03 01:12:51,733 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:12:53,714 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:13:06,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=6306.666666666667, ans=0.04038888888888889 +2024-08-03 01:13:07,333 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.98 vs. limit=12.2575 +2024-08-03 01:13:12,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=6343.333333333333, ans=0.009490579710144928 +2024-08-03 01:13:15,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=6343.333333333333, ans=0.04023611111111111 +2024-08-03 01:13:26,862 INFO [train.py:1114] (0/4) Epoch 1, batch 1750, loss[loss=0.3936, simple_loss=0.4054, pruned_loss=0.1866, over 13541.00 frames. ], tot_loss[loss=0.4537, simple_loss=0.4499, pruned_loss=0.2263, over 2633121.88 frames. ], batch size: 31, lr: 4.44e-02, grad_scale: 32.0 +2024-08-03 01:13:36,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=6453.333333333333, ans=0.1975 +2024-08-03 01:13:48,106 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.36 vs. limit=12.34 +2024-08-03 01:13:48,257 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.72 vs. limit=9.92 +2024-08-03 01:13:51,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=6490.0, ans=0.19578125000000002 +2024-08-03 01:13:55,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=6490.0, ans=0.009458695652173913 +2024-08-03 01:13:55,968 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.34 vs. limit=9.93375 +2024-08-03 01:13:59,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=6490.0, ans=0.19578125000000002 +2024-08-03 01:14:01,514 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.66 vs. limit=6.631666666666667 +2024-08-03 01:14:24,191 INFO [train.py:1114] (0/4) Epoch 1, batch 1800, loss[loss=0.3984, simple_loss=0.4262, pruned_loss=0.1803, over 13551.00 frames. ], tot_loss[loss=0.4483, simple_loss=0.4475, pruned_loss=0.2219, over 2634721.19 frames. ], batch size: 38, lr: 4.44e-02, grad_scale: 32.0 +2024-08-03 01:14:26,047 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.262e+02 1.670e+02 2.044e+02 2.499e+02 4.845e+02, threshold=4.088e+02, percent-clipped=4.0 +2024-08-03 01:14:36,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=6600.0, ans=0.009434782608695652 +2024-08-03 01:14:40,527 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.61 vs. limit=9.98875 +2024-08-03 01:14:40,634 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.66 vs. limit=6.659166666666667 +2024-08-03 01:15:01,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=6673.333333333333, ans=0.1871875 +2024-08-03 01:15:04,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=6673.333333333333, ans=0.1871875 +2024-08-03 01:15:14,748 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.73 vs. limit=8.355 +2024-08-03 01:15:15,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=6710.0, ans=10.01625 +2024-08-03 01:15:30,697 INFO [train.py:1114] (0/4) Epoch 1, batch 1850, loss[loss=0.418, simple_loss=0.4383, pruned_loss=0.1957, over 13399.00 frames. ], tot_loss[loss=0.4401, simple_loss=0.4427, pruned_loss=0.2159, over 2637482.50 frames. ], batch size: 39, lr: 4.43e-02, grad_scale: 32.0 +2024-08-03 01:15:30,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=6783.333333333333, ans=0.6625833333333333 +2024-08-03 01:15:32,123 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.64 vs. limit=6.695833333333333 +2024-08-03 01:15:41,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=6820.0, ans=0.009386956521739131 +2024-08-03 01:15:55,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=6856.666666666667, ans=0.17859375 +2024-08-03 01:16:01,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6893.333333333333, ans=0.23106666666666664 +2024-08-03 01:16:25,388 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.98 vs. limit=12.6975 +2024-08-03 01:16:31,447 INFO [train.py:1114] (0/4) Epoch 1, batch 1900, loss[loss=0.4128, simple_loss=0.4466, pruned_loss=0.1867, over 13321.00 frames. ], tot_loss[loss=0.4368, simple_loss=0.4415, pruned_loss=0.2134, over 2639555.54 frames. ], batch size: 40, lr: 4.43e-02, grad_scale: 32.0 +2024-08-03 01:16:35,170 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.281e+02 1.757e+02 2.130e+02 2.546e+02 5.245e+02, threshold=4.259e+02, percent-clipped=2.0 +2024-08-03 01:16:35,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=6966.666666666667, ans=0.17343750000000002 +2024-08-03 01:16:36,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=6966.666666666667, ans=0.6561666666666667 +2024-08-03 01:16:48,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=6966.666666666667, ans=0.6561666666666667 +2024-08-03 01:16:50,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=6966.666666666667, ans=0.3045 +2024-08-03 01:17:24,697 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.69 vs. limit=10.1675 +2024-08-03 01:17:34,516 INFO [train.py:1114] (0/4) Epoch 1, batch 1950, loss[loss=0.3615, simple_loss=0.403, pruned_loss=0.1586, over 13553.00 frames. ], tot_loss[loss=0.4319, simple_loss=0.4402, pruned_loss=0.2094, over 2646470.02 frames. ], batch size: 36, lr: 4.43e-02, grad_scale: 32.0 +2024-08-03 01:17:36,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=7150.0, ans=0.16484375 +2024-08-03 01:17:47,697 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.84 vs. limit=8.593333333333334 +2024-08-03 01:17:50,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=7186.666666666667, ans=0.6484666666666667 +2024-08-03 01:18:12,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=7223.333333333333, ans=0.16140624999999997 +2024-08-03 01:18:13,768 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.04 vs. limit=10.20875 +2024-08-03 01:18:35,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=7296.666666666667, ans=0.035 +2024-08-03 01:18:42,886 INFO [train.py:1114] (0/4) Epoch 1, batch 2000, loss[loss=0.3368, simple_loss=0.3681, pruned_loss=0.1527, over 13555.00 frames. ], tot_loss[loss=0.4274, simple_loss=0.4382, pruned_loss=0.2063, over 2636553.84 frames. ], batch size: 31, lr: 4.42e-02, grad_scale: 32.0 +2024-08-03 01:18:44,711 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.650e+02 1.978e+02 2.674e+02 4.949e+02, threshold=3.955e+02, percent-clipped=2.0 +2024-08-03 01:19:10,859 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.52 vs. limit=13.0275 +2024-08-03 01:19:22,360 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.72 vs. limit=10.2775 +2024-08-03 01:19:22,417 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.82 vs. limit=10.2775 +2024-08-03 01:19:24,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.whiten.whitening_limit, batch_count=7406.666666666667, ans=6.962666666666667 +2024-08-03 01:19:30,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=7443.333333333333, ans=9.652083333333334 +2024-08-03 01:19:33,780 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:19:44,702 INFO [train.py:1114] (0/4) Epoch 1, batch 2050, loss[loss=0.3475, simple_loss=0.3739, pruned_loss=0.1606, over 13419.00 frames. ], tot_loss[loss=0.4218, simple_loss=0.4347, pruned_loss=0.203, over 2632673.99 frames. ], batch size: 32, lr: 4.42e-02, grad_scale: 64.0 +2024-08-03 01:19:46,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=7516.666666666667, ans=0.14765625 +2024-08-03 01:19:49,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=7516.666666666667, ans=0.035347222222222224 +2024-08-03 01:20:01,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=7553.333333333333, ans=0.009227536231884059 +2024-08-03 01:20:22,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=7590.0, ans=0.31385 +2024-08-03 01:20:27,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.24 vs. limit=10.36 +2024-08-03 01:20:38,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=7626.666666666667, ans=0.14250000000000002 +2024-08-03 01:20:47,810 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.89 vs. limit=13.247499999999999 +2024-08-03 01:20:54,839 INFO [train.py:1114] (0/4) Epoch 1, batch 2100, loss[loss=0.3441, simple_loss=0.3834, pruned_loss=0.1524, over 13534.00 frames. ], tot_loss[loss=0.416, simple_loss=0.4315, pruned_loss=0.1991, over 2638560.17 frames. ], batch size: 37, lr: 4.42e-02, grad_scale: 8.0 +2024-08-03 01:20:59,392 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.800e+02 2.088e+02 2.971e+02 6.141e+02, threshold=4.177e+02, percent-clipped=15.0 +2024-08-03 01:21:02,094 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.01 vs. limit=4.155 +2024-08-03 01:21:09,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.58 vs. limit=6.934166666666667 +2024-08-03 01:21:21,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=7773.333333333333, ans=0.03427777777777778 +2024-08-03 01:21:32,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=7810.0, ans=0.13390625 +2024-08-03 01:21:38,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=7846.666666666667, ans=0.1321875 +2024-08-03 01:21:56,436 INFO [train.py:1114] (0/4) Epoch 1, batch 2150, loss[loss=0.3816, simple_loss=0.4113, pruned_loss=0.1759, over 13554.00 frames. ], tot_loss[loss=0.4104, simple_loss=0.4283, pruned_loss=0.1953, over 2647865.20 frames. ], batch size: 36, lr: 4.41e-02, grad_scale: 8.0 +2024-08-03 01:21:57,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=7883.333333333333, ans=0.13046875000000002 +2024-08-03 01:22:27,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=7993.333333333333, ans=0.009131884057971015 +2024-08-03 01:22:38,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=8030.0, ans=0.125 +2024-08-03 01:22:46,515 INFO [train.py:1114] (0/4) Epoch 1, batch 2200, loss[loss=0.4342, simple_loss=0.4593, pruned_loss=0.2046, over 13403.00 frames. ], tot_loss[loss=0.4074, simple_loss=0.4269, pruned_loss=0.1932, over 2645586.38 frames. ], batch size: 39, lr: 4.41e-02, grad_scale: 8.0 +2024-08-03 01:22:51,103 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.262e+02 1.590e+02 1.905e+02 2.323e+02 5.165e+02, threshold=3.810e+02, percent-clipped=4.0 +2024-08-03 01:23:04,904 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.97 vs. limit=10.5525 +2024-08-03 01:23:07,561 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.39 vs. limit=10.5525 +2024-08-03 01:23:12,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.37 vs. limit=9.07 +2024-08-03 01:23:27,981 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.81 vs. limit=13.6325 +2024-08-03 01:23:30,937 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.85 vs. limit=13.6325 +2024-08-03 01:23:43,781 INFO [train.py:1114] (0/4) Epoch 1, batch 2250, loss[loss=0.4031, simple_loss=0.4373, pruned_loss=0.1845, over 13356.00 frames. ], tot_loss[loss=0.4036, simple_loss=0.425, pruned_loss=0.1906, over 2643144.15 frames. ], batch size: 37, lr: 4.40e-02, grad_scale: 8.0 +2024-08-03 01:23:47,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=8250.0, ans=0.125 +2024-08-03 01:23:48,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=8250.0, ans=0.2175 +2024-08-03 01:23:48,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8250.0, ans=0.2175 +2024-08-03 01:23:50,683 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.11 vs. limit=7.3 +2024-08-03 01:23:51,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=8250.0, ans=0.2175 +2024-08-03 01:23:53,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=8286.666666666666, ans=0.025 +2024-08-03 01:24:15,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=8360.0, ans=0.125 +2024-08-03 01:24:28,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=8396.666666666666, ans=0.125 +2024-08-03 01:24:34,663 INFO [train.py:1114] (0/4) Epoch 1, batch 2300, loss[loss=0.3185, simple_loss=0.3589, pruned_loss=0.139, over 13581.00 frames. ], tot_loss[loss=0.3985, simple_loss=0.4214, pruned_loss=0.1873, over 2639625.94 frames. ], batch size: 33, lr: 4.40e-02, grad_scale: 8.0 +2024-08-03 01:24:51,214 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.412e+02 1.913e+02 2.281e+02 2.883e+02 4.389e+02, threshold=4.562e+02, percent-clipped=6.0 +2024-08-03 01:24:56,623 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.59 vs. limit=9.235 +2024-08-03 01:24:58,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=8470.0, ans=0.125 +2024-08-03 01:25:00,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=8470.0, ans=0.031375 +2024-08-03 01:25:05,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=8506.666666666666, ans=0.125 +2024-08-03 01:25:12,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=8506.666666666666, ans=0.21493333333333334 +2024-08-03 01:25:16,213 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.73 vs. limit=9.271666666666668 +2024-08-03 01:25:18,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=8543.333333333334, ans=0.125 +2024-08-03 01:25:20,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=8543.333333333334, ans=0.031069444444444445 +2024-08-03 01:25:29,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=8580.0, ans=0.5997 +2024-08-03 01:25:33,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=8616.666666666666, ans=9.308333333333334 +2024-08-03 01:25:33,507 INFO [train.py:1114] (0/4) Epoch 1, batch 2350, loss[loss=0.3953, simple_loss=0.4303, pruned_loss=0.1802, over 13546.00 frames. ], tot_loss[loss=0.3958, simple_loss=0.4203, pruned_loss=0.1853, over 2641566.09 frames. ], batch size: 38, lr: 4.40e-02, grad_scale: 8.0 +2024-08-03 01:25:38,598 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.29 vs. limit=10.73125 +2024-08-03 01:25:41,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=8616.666666666666, ans=0.025 +2024-08-03 01:25:50,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8616.666666666666, ans=0.21383333333333332 +2024-08-03 01:26:05,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=8653.333333333334, ans=0.03061111111111111 +2024-08-03 01:26:15,495 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.38 vs. limit=9.344999999999999 +2024-08-03 01:26:20,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=8726.666666666666, ans=0.025 +2024-08-03 01:26:29,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=8763.333333333334, ans=0.125 +2024-08-03 01:26:36,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=8763.333333333334, ans=0.125 +2024-08-03 01:26:38,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=8800.0, ans=0.125 +2024-08-03 01:26:39,407 INFO [train.py:1114] (0/4) Epoch 1, batch 2400, loss[loss=0.4037, simple_loss=0.4182, pruned_loss=0.1946, over 13550.00 frames. ], tot_loss[loss=0.3934, simple_loss=0.4194, pruned_loss=0.1835, over 2643072.02 frames. ], batch size: 35, lr: 4.39e-02, grad_scale: 16.0 +2024-08-03 01:26:41,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=8800.0, ans=0.030000000000000002 +2024-08-03 01:26:44,006 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.261e+02 1.562e+02 1.774e+02 2.172e+02 5.136e+02, threshold=3.548e+02, percent-clipped=1.0 +2024-08-03 01:26:55,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=8836.666666666666, ans=0.008948550724637681 +2024-08-03 01:27:01,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=8873.333333333334, ans=0.029694444444444443 +2024-08-03 01:27:02,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=8873.333333333334, ans=0.125 +2024-08-03 01:27:21,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=8946.666666666666, ans=0.029388888888888895 +2024-08-03 01:27:26,946 INFO [train.py:1114] (0/4) Epoch 1, batch 2450, loss[loss=0.3996, simple_loss=0.4246, pruned_loss=0.1873, over 13356.00 frames. ], tot_loss[loss=0.3945, simple_loss=0.4207, pruned_loss=0.184, over 2632668.16 frames. ], batch size: 37, lr: 4.39e-02, grad_scale: 16.0 +2024-08-03 01:27:40,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=9020.0, ans=0.125 +2024-08-03 01:27:52,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=9056.666666666666, ans=0.025 +2024-08-03 01:28:00,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=9093.333333333334, ans=0.20906666666666665 +2024-08-03 01:28:04,221 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.51 vs. limit=10.92375 +2024-08-03 01:28:14,004 INFO [train.py:1114] (0/4) Epoch 1, batch 2500, loss[loss=0.3906, simple_loss=0.425, pruned_loss=0.1781, over 13389.00 frames. ], tot_loss[loss=0.3922, simple_loss=0.4194, pruned_loss=0.1823, over 2636756.01 frames. ], batch size: 39, lr: 4.38e-02, grad_scale: 16.0 +2024-08-03 01:28:18,362 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.243e+02 1.729e+02 1.988e+02 2.684e+02 1.225e+03, threshold=3.975e+02, percent-clipped=8.0 +2024-08-03 01:28:26,856 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.33 vs. limit=4.3805 +2024-08-03 01:28:27,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=9203.333333333334, ans=0.20796666666666663 +2024-08-03 01:28:28,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=9203.333333333334, ans=0.025 +2024-08-03 01:28:40,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=9276.666666666666, ans=0.07 +2024-08-03 01:28:43,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=9276.666666666666, ans=0.125 +2024-08-03 01:28:46,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=9276.666666666666, ans=0.028013888888888894 +2024-08-03 01:28:58,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=9313.333333333334, ans=0.008844927536231884 +2024-08-03 01:28:59,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=9313.333333333334, ans=0.20686666666666664 +2024-08-03 01:29:01,483 INFO [train.py:1114] (0/4) Epoch 1, batch 2550, loss[loss=0.3693, simple_loss=0.3943, pruned_loss=0.1722, over 13540.00 frames. ], tot_loss[loss=0.3885, simple_loss=0.4175, pruned_loss=0.1796, over 2638793.82 frames. ], batch size: 31, lr: 4.38e-02, grad_scale: 16.0 +2024-08-03 01:29:07,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=9350.0, ans=10.0 +2024-08-03 01:29:22,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=9423.333333333334, ans=0.20576666666666665 +2024-08-03 01:29:25,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=9423.333333333334, ans=0.02740277777777778 +2024-08-03 01:29:38,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=9496.666666666666, ans=0.027097222222222227 +2024-08-03 01:29:48,468 INFO [train.py:1114] (0/4) Epoch 1, batch 2600, loss[loss=0.3627, simple_loss=0.3904, pruned_loss=0.1675, over 13548.00 frames. ], tot_loss[loss=0.3883, simple_loss=0.4177, pruned_loss=0.1794, over 2637936.07 frames. ], batch size: 36, lr: 4.37e-02, grad_scale: 16.0 +2024-08-03 01:29:51,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=9533.333333333334, ans=0.09899494936611666 +2024-08-03 01:29:52,587 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.76 vs. limit=14.65 +2024-08-03 01:29:52,858 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.295e+02 1.601e+02 1.881e+02 2.405e+02 3.900e+02, threshold=3.763e+02, percent-clipped=0.0 +2024-08-03 01:29:53,536 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.48 vs. limit=7.8133333333333335 +2024-08-03 01:29:59,685 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.09 vs. limit=14.6775 +2024-08-03 01:30:03,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=9570.0, ans=0.125 +2024-08-03 01:30:33,681 INFO [train.py:1114] (0/4) Epoch 1, batch 2650, loss[loss=0.4035, simple_loss=0.4355, pruned_loss=0.1858, over 13316.00 frames. ], tot_loss[loss=0.3864, simple_loss=0.4169, pruned_loss=0.1779, over 2641477.84 frames. ], batch size: 46, lr: 4.37e-02, grad_scale: 16.0 +2024-08-03 01:30:33,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=9716.666666666666, ans=0.5599166666666667 +2024-08-03 01:30:42,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=9753.333333333334, ans=0.5586333333333333 +2024-08-03 01:30:42,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=9753.333333333334, ans=0.8475333333333334 +2024-08-03 01:30:43,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=9753.333333333334, ans=0.026027777777777778 +2024-08-03 01:30:54,879 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.03 vs. limit=11.17125 +2024-08-03 01:30:57,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=9790.0, ans=0.125 +2024-08-03 01:30:58,428 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.43 vs. limit=11.17125 +2024-08-03 01:31:01,754 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.49 vs. limit=11.185 +2024-08-03 01:31:04,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=9826.666666666666, ans=0.5560666666666667 +2024-08-03 01:31:09,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=9863.333333333334, ans=0.5547833333333334 +2024-08-03 01:31:10,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=9863.333333333334, ans=0.025 +2024-08-03 01:31:20,740 INFO [train.py:1114] (0/4) Epoch 1, batch 2700, loss[loss=0.3682, simple_loss=0.417, pruned_loss=0.1597, over 13535.00 frames. ], tot_loss[loss=0.3845, simple_loss=0.416, pruned_loss=0.1764, over 2638490.04 frames. ], batch size: 40, lr: 4.36e-02, grad_scale: 16.0 +2024-08-03 01:31:21,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=9900.0, ans=0.02541666666666667 +2024-08-03 01:31:25,954 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.286e+02 1.664e+02 1.951e+02 2.469e+02 5.181e+02, threshold=3.901e+02, percent-clipped=9.0 +2024-08-03 01:31:34,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=9936.666666666666, ans=0.125 +2024-08-03 01:31:56,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=9973.333333333334, ans=0.125 +2024-08-03 01:32:02,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=10010.0, ans=0.1999 +2024-08-03 01:32:04,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=10010.0, ans=0.0 +2024-08-03 01:32:05,246 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.23 vs. limit=11.25375 +2024-08-03 01:32:17,819 INFO [train.py:1114] (0/4) Epoch 1, batch 2750, loss[loss=0.416, simple_loss=0.4325, pruned_loss=0.1997, over 13335.00 frames. ], tot_loss[loss=0.3816, simple_loss=0.4138, pruned_loss=0.1746, over 2635378.27 frames. ], batch size: 34, lr: 4.36e-02, grad_scale: 16.0 +2024-08-03 01:32:20,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=10083.333333333334, ans=0.07 +2024-08-03 01:32:23,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=10083.333333333334, ans=0.125 +2024-08-03 01:32:54,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=10193.333333333334, ans=0.0 +2024-08-03 01:33:04,964 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:33:08,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=10230.0, ans=0.54195 +2024-08-03 01:33:10,604 INFO [train.py:1114] (0/4) Epoch 1, batch 2800, loss[loss=0.4693, simple_loss=0.4564, pruned_loss=0.2411, over 8895.00 frames. ], tot_loss[loss=0.381, simple_loss=0.4136, pruned_loss=0.1742, over 2625649.82 frames. ], batch size: 97, lr: 4.36e-02, grad_scale: 32.0 +2024-08-03 01:33:12,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=10266.666666666666, ans=0.5406666666666667 +2024-08-03 01:33:15,326 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.297e+02 1.678e+02 2.174e+02 2.677e+02 5.163e+02, threshold=4.348e+02, percent-clipped=2.0 +2024-08-03 01:33:45,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=10376.666666666666, ans=0.5368166666666667 +2024-08-03 01:35:19,838 INFO [train.py:1114] (0/4) Epoch 1, batch 2850, loss[loss=0.35, simple_loss=0.3955, pruned_loss=0.1522, over 13371.00 frames. ], tot_loss[loss=0.3819, simple_loss=0.4144, pruned_loss=0.1747, over 2619539.64 frames. ], batch size: 35, lr: 4.35e-02, grad_scale: 32.0 +2024-08-03 01:35:42,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=10486.666666666666, ans=0.022972222222222227 +2024-08-03 01:35:46,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.32 vs. limit=4.573 +2024-08-03 01:35:48,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=10523.333333333334, ans=0.0 +2024-08-03 01:35:54,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=10523.333333333334, ans=0.02281944444444444 +2024-08-03 01:36:03,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=10560.0, ans=0.07 +2024-08-03 01:36:14,791 INFO [train.py:1114] (0/4) Epoch 1, batch 2900, loss[loss=0.3325, simple_loss=0.3838, pruned_loss=0.1407, over 13384.00 frames. ], tot_loss[loss=0.38, simple_loss=0.4142, pruned_loss=0.1729, over 2630724.40 frames. ], batch size: 36, lr: 4.35e-02, grad_scale: 32.0 +2024-08-03 01:36:19,260 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.316e+02 1.668e+02 1.982e+02 2.661e+02 5.002e+02, threshold=3.964e+02, percent-clipped=4.0 +2024-08-03 01:36:24,332 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.69 vs. limit=15.5025 +2024-08-03 01:36:34,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=10706.666666666666, ans=0.02205555555555556 +2024-08-03 01:37:00,013 INFO [train.py:1114] (0/4) Epoch 1, batch 2950, loss[loss=0.358, simple_loss=0.3826, pruned_loss=0.1667, over 13319.00 frames. ], tot_loss[loss=0.3761, simple_loss=0.4107, pruned_loss=0.1708, over 2629651.51 frames. ], batch size: 34, lr: 4.34e-02, grad_scale: 32.0 +2024-08-03 01:37:07,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=10816.666666666666, ans=0.021597222222222226 +2024-08-03 01:37:53,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=10853.333333333334, ans=0.125 +2024-08-03 01:37:54,639 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.94 vs. limit=11.58375 +2024-08-03 01:37:57,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=10890.0, ans=0.008502173913043478 +2024-08-03 01:38:01,047 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.68 vs. limit=15.6675 +2024-08-03 01:38:06,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=10926.666666666666, ans=0.14073333333333332 +2024-08-03 01:38:10,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=10926.666666666666, ans=0.19073333333333334 +2024-08-03 01:38:14,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=10963.333333333334, ans=4.6445 +2024-08-03 01:38:15,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=10963.333333333334, ans=0.09899494936611666 +2024-08-03 01:38:19,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=10963.333333333334, ans=0.125 +2024-08-03 01:38:20,133 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.05 vs. limit=8.385333333333334 +2024-08-03 01:38:21,966 INFO [train.py:1114] (0/4) Epoch 1, batch 3000, loss[loss=0.3368, simple_loss=0.3897, pruned_loss=0.142, over 13534.00 frames. ], tot_loss[loss=0.3743, simple_loss=0.4095, pruned_loss=0.1695, over 2629512.14 frames. ], batch size: 37, lr: 4.34e-02, grad_scale: 32.0 +2024-08-03 01:38:21,967 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 01:39:09,357 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.5992, 1.4445, 2.5730, 2.6294], device='cuda:0') +2024-08-03 01:39:11,531 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([1.8561, 2.1122, 2.8810, 3.1174], device='cuda:0') +2024-08-03 01:39:12,984 INFO [train.py:1146] (0/4) Epoch 1, validation: loss=0.2888, simple_loss=0.3696, pruned_loss=0.104, over 944034.00 frames. +2024-08-03 01:39:12,984 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 01:39:16,944 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:39:17,652 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.298e+02 1.604e+02 1.963e+02 2.352e+02 4.798e+02, threshold=3.927e+02, percent-clipped=2.0 +2024-08-03 01:39:35,489 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.37 vs. limit=11.6525 +2024-08-03 01:39:42,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=11110.0, ans=0.125 +2024-08-03 01:39:43,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=11110.0, ans=0.125 +2024-08-03 01:39:52,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=11146.666666666666, ans=0.125 +2024-08-03 01:40:03,057 INFO [train.py:1114] (0/4) Epoch 1, batch 3050, loss[loss=0.4135, simple_loss=0.4251, pruned_loss=0.201, over 13524.00 frames. ], tot_loss[loss=0.3745, simple_loss=0.4099, pruned_loss=0.1695, over 2627644.92 frames. ], batch size: 35, lr: 4.33e-02, grad_scale: 32.0 +2024-08-03 01:40:35,768 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=7.847e-03 +2024-08-03 01:40:37,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=11293.333333333334, ans=0.019611111111111107 +2024-08-03 01:40:37,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11293.333333333334, ans=0.18706666666666666 +2024-08-03 01:40:38,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=11293.333333333334, ans=0.125 +2024-08-03 01:40:47,530 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.58 vs. limit=11.748750000000001 +2024-08-03 01:40:55,292 INFO [train.py:1114] (0/4) Epoch 1, batch 3100, loss[loss=0.4039, simple_loss=0.4343, pruned_loss=0.1868, over 13321.00 frames. ], tot_loss[loss=0.3726, simple_loss=0.4086, pruned_loss=0.1683, over 2626571.36 frames. ], batch size: 46, lr: 4.33e-02, grad_scale: 32.0 +2024-08-03 01:40:59,560 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.612e+02 1.933e+02 2.547e+02 5.853e+02, threshold=3.866e+02, percent-clipped=4.0 +2024-08-03 01:41:13,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=11403.333333333334, ans=0.019152777777777776 +2024-08-03 01:41:14,615 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.01 vs. limit=11.776250000000001 +2024-08-03 01:41:19,411 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.24 vs. limit=8.576 +2024-08-03 01:41:21,429 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.15 vs. limit=8.576 +2024-08-03 01:41:36,584 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.27 vs. limit=10.738333333333333 +2024-08-03 01:41:42,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=11476.666666666666, ans=0.125 +2024-08-03 01:41:46,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=11513.333333333334, ans=0.125 +2024-08-03 01:41:53,924 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.09 vs. limit=4.7325 +2024-08-03 01:41:54,274 INFO [train.py:1114] (0/4) Epoch 1, batch 3150, loss[loss=0.3876, simple_loss=0.426, pruned_loss=0.1746, over 13029.00 frames. ], tot_loss[loss=0.3722, simple_loss=0.4086, pruned_loss=0.1679, over 2628881.92 frames. ], batch size: 48, lr: 4.32e-02, grad_scale: 32.0 +2024-08-03 01:42:17,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=11623.333333333334, ans=0.18376666666666666 +2024-08-03 01:42:27,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=11660.0, ans=0.49190000000000006 +2024-08-03 01:42:28,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=11660.0, ans=0.125 +2024-08-03 01:42:29,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=11660.0, ans=10.0 +2024-08-03 01:42:35,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=11696.666666666666, ans=0.025 +2024-08-03 01:42:41,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11696.666666666666, ans=0.18303333333333333 +2024-08-03 01:42:44,255 INFO [train.py:1114] (0/4) Epoch 1, batch 3200, loss[loss=0.334, simple_loss=0.3816, pruned_loss=0.1432, over 13533.00 frames. ], tot_loss[loss=0.3699, simple_loss=0.4069, pruned_loss=0.1664, over 2634568.62 frames. ], batch size: 37, lr: 4.32e-02, grad_scale: 32.0 +2024-08-03 01:42:48,503 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.624e+02 1.966e+02 2.680e+02 4.372e+02, threshold=3.932e+02, percent-clipped=2.0 +2024-08-03 01:42:52,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=11770.0, ans=0.48805000000000004 +2024-08-03 01:42:55,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=11770.0, ans=0.008310869565217392 +2024-08-03 01:43:03,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=11770.0, ans=0.008310869565217392 +2024-08-03 01:43:10,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=11806.666666666666, ans=0.125 +2024-08-03 01:43:26,001 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.14 vs. limit=11.955 +2024-08-03 01:43:31,266 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.29 vs. limit=8.751999999999999 +2024-08-03 01:43:33,493 INFO [train.py:1114] (0/4) Epoch 1, batch 3250, loss[loss=0.3613, simple_loss=0.4148, pruned_loss=0.154, over 13382.00 frames. ], tot_loss[loss=0.368, simple_loss=0.4059, pruned_loss=0.165, over 2639474.69 frames. ], batch size: 38, lr: 4.31e-02, grad_scale: 32.0 +2024-08-03 01:43:34,831 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.06 vs. limit=16.4375 +2024-08-03 01:43:47,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=11953.333333333334, ans=0.025 +2024-08-03 01:44:01,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=12026.666666666666, ans=0.08875333333333334 +2024-08-03 01:44:17,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=12100.0, ans=0.04949747468305833 +2024-08-03 01:44:18,531 INFO [train.py:1114] (0/4) Epoch 1, batch 3300, loss[loss=0.3671, simple_loss=0.4112, pruned_loss=0.1615, over 12923.00 frames. ], tot_loss[loss=0.3653, simple_loss=0.404, pruned_loss=0.1634, over 2641039.03 frames. ], batch size: 52, lr: 4.31e-02, grad_scale: 32.0 +2024-08-03 01:44:21,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=12100.0, ans=0.125 +2024-08-03 01:44:22,866 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.222e+02 1.557e+02 1.877e+02 2.344e+02 4.156e+02, threshold=3.753e+02, percent-clipped=2.0 +2024-08-03 01:44:23,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=12100.0, ans=0.125 +2024-08-03 01:44:27,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12136.666666666666, ans=0.17863333333333334 +2024-08-03 01:44:29,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=12136.666666666666, ans=0.125 +2024-08-03 01:44:31,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=12136.666666666666, ans=0.016097222222222228 +2024-08-03 01:44:32,248 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.00 vs. limit=16.6025 +2024-08-03 01:44:47,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=12210.0, ans=0.0 +2024-08-03 01:44:57,922 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.46 vs. limit=12.092500000000001 +2024-08-03 01:45:02,671 INFO [train.py:1114] (0/4) Epoch 1, batch 3350, loss[loss=0.3862, simple_loss=0.4278, pruned_loss=0.1723, over 13018.00 frames. ], tot_loss[loss=0.3652, simple_loss=0.4041, pruned_loss=0.1631, over 2630714.42 frames. ], batch size: 48, lr: 4.30e-02, grad_scale: 32.0 +2024-08-03 01:45:18,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=12320.0, ans=0.0 +2024-08-03 01:45:23,384 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.12 vs. limit=16.7675 +2024-08-03 01:45:43,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=12430.0, ans=16.822499999999998 +2024-08-03 01:45:47,883 INFO [train.py:1114] (0/4) Epoch 1, batch 3400, loss[loss=0.2682, simple_loss=0.3253, pruned_loss=0.1056, over 13543.00 frames. ], tot_loss[loss=0.3638, simple_loss=0.4028, pruned_loss=0.1624, over 2626146.07 frames. ], batch size: 31, lr: 4.29e-02, grad_scale: 32.0 +2024-08-03 01:45:52,235 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.662e+02 2.017e+02 2.620e+02 5.936e+02, threshold=4.033e+02, percent-clipped=10.0 +2024-08-03 01:45:53,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=12466.666666666666, ans=0.125 +2024-08-03 01:45:54,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12466.666666666666, ans=0.17533333333333334 +2024-08-03 01:46:21,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12503.333333333334, ans=0.17496666666666666 +2024-08-03 01:46:48,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=12576.666666666666, ans=0.008135507246376811 +2024-08-03 01:47:02,913 INFO [train.py:1114] (0/4) Epoch 1, batch 3450, loss[loss=0.3971, simple_loss=0.4369, pruned_loss=0.1786, over 12885.00 frames. ], tot_loss[loss=0.3639, simple_loss=0.4032, pruned_loss=0.1623, over 2628961.13 frames. ], batch size: 52, lr: 4.29e-02, grad_scale: 32.0 +2024-08-03 01:47:09,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=12650.0, ans=0.125 +2024-08-03 01:47:11,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=12686.666666666666, ans=0.125 +2024-08-03 01:47:11,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=12686.666666666666, ans=0.0 +2024-08-03 01:47:13,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=12686.666666666666, ans=0.45596666666666674 +2024-08-03 01:47:26,099 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:47:31,632 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.26 vs. limit=11.379999999999999 +2024-08-03 01:47:34,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=12760.0, ans=0.4534 +2024-08-03 01:47:43,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=12796.666666666666, ans=0.125 +2024-08-03 01:47:55,456 INFO [train.py:1114] (0/4) Epoch 1, batch 3500, loss[loss=0.375, simple_loss=0.4056, pruned_loss=0.1722, over 13541.00 frames. ], tot_loss[loss=0.3623, simple_loss=0.4018, pruned_loss=0.1614, over 2631051.71 frames. ], batch size: 34, lr: 4.28e-02, grad_scale: 32.0 +2024-08-03 01:47:55,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=12833.333333333334, ans=0.125 +2024-08-03 01:47:59,791 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.559e+02 1.825e+02 2.381e+02 4.772e+02, threshold=3.650e+02, percent-clipped=2.0 +2024-08-03 01:48:45,628 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.64 vs. limit=4.947 +2024-08-03 01:48:51,939 INFO [train.py:1114] (0/4) Epoch 1, batch 3550, loss[loss=0.3878, simple_loss=0.4215, pruned_loss=0.1771, over 12496.00 frames. ], tot_loss[loss=0.3634, simple_loss=0.4034, pruned_loss=0.1617, over 2629290.98 frames. ], batch size: 58, lr: 4.28e-02, grad_scale: 32.0 +2024-08-03 01:49:01,776 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.33 vs. limit=11.526666666666667 +2024-08-03 01:49:07,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=13053.333333333334, ans=0.008031884057971015 +2024-08-03 01:49:18,589 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.93 vs. limit=17.317500000000003 +2024-08-03 01:49:24,957 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.75 vs. limit=12.4225 +2024-08-03 01:49:47,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=13163.333333333334, ans=0.125 +2024-08-03 01:49:51,096 INFO [train.py:1114] (0/4) Epoch 1, batch 3600, loss[loss=0.4424, simple_loss=0.4425, pruned_loss=0.2212, over 8889.00 frames. ], tot_loss[loss=0.3754, simple_loss=0.4106, pruned_loss=0.1701, over 2490538.45 frames. ], batch size: 96, lr: 4.27e-02, grad_scale: 32.0 +2024-08-03 01:49:52,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=13200.0, ans=0.05 +2024-08-03 01:49:52,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=13200.0, ans=0.04949747468305833 +2024-08-03 01:49:53,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=13200.0, ans=0.438 +2024-08-03 01:49:54,439 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.00 vs. limit=8.3 +2024-08-03 01:49:55,456 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.481e+02 1.802e+02 2.019e+02 3.446e+02, threshold=3.604e+02, percent-clipped=0.0 +2024-08-03 01:50:04,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=13236.666666666666, ans=0.025 +2024-08-03 01:50:12,901 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.00 vs. limit=4.991 +2024-08-03 01:50:27,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=13273.333333333334, ans=0.025 +2024-08-03 01:50:35,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=13310.0, ans=0.43415000000000004 +2024-08-03 01:50:38,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13310.0, ans=0.1669 +2024-08-03 01:50:47,418 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-1.pt +2024-08-03 01:52:08,727 INFO [train.py:1114] (0/4) Epoch 2, batch 0, loss[loss=0.3432, simple_loss=0.3828, pruned_loss=0.1518, over 13351.00 frames. ], tot_loss[loss=0.3432, simple_loss=0.3828, pruned_loss=0.1518, over 13351.00 frames. ], batch size: 33, lr: 4.19e-02, grad_scale: 32.0 +2024-08-03 01:52:08,728 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 01:52:18,754 INFO [train.py:1146] (0/4) Epoch 2, validation: loss=0.2954, simple_loss=0.3785, pruned_loss=0.1062, over 944034.00 frames. +2024-08-03 01:52:18,755 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 01:52:44,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=13420.0, ans=0.125 +2024-08-03 01:53:01,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=13493.333333333334, ans=0.010444444444444444 +2024-08-03 01:53:05,764 INFO [train.py:1114] (0/4) Epoch 2, batch 50, loss[loss=0.3034, simple_loss=0.3573, pruned_loss=0.1248, over 13419.00 frames. ], tot_loss[loss=0.3695, simple_loss=0.4086, pruned_loss=0.1652, over 578302.53 frames. ], batch size: 32, lr: 4.18e-02, grad_scale: 16.0 +2024-08-03 01:53:08,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=13530.0, ans=0.010291666666666671 +2024-08-03 01:53:25,130 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.505e+02 1.833e+02 2.741e+02 6.945e+02, threshold=3.667e+02, percent-clipped=7.0 +2024-08-03 01:53:27,821 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=5.035 +2024-08-03 01:53:35,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=13603.333333333334, ans=0.009986111111111105 +2024-08-03 01:53:52,871 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.93 vs. limit=12.62875 +2024-08-03 01:53:55,605 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.72 vs. limit=11.838333333333333 +2024-08-03 01:53:56,741 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.48 vs. limit=5.057 +2024-08-03 01:53:57,000 INFO [train.py:1114] (0/4) Epoch 2, batch 100, loss[loss=0.3183, simple_loss=0.3621, pruned_loss=0.1372, over 13510.00 frames. ], tot_loss[loss=0.3618, simple_loss=0.4035, pruned_loss=0.1601, over 1024643.92 frames. ], batch size: 35, lr: 4.17e-02, grad_scale: 16.0 +2024-08-03 01:54:07,950 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.68 vs. limit=17.8125 +2024-08-03 01:54:09,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13750.0, ans=0.1625 +2024-08-03 01:54:16,293 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.45 vs. limit=17.8125 +2024-08-03 01:54:24,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=13786.666666666666, ans=0.0 +2024-08-03 01:54:30,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=13823.333333333334, ans=0.125 +2024-08-03 01:54:30,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13823.333333333334, ans=0.16176666666666667 +2024-08-03 01:54:33,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=13823.333333333334, ans=0.007864492753623189 +2024-08-03 01:54:36,741 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.81 vs. limit=12.68375 +2024-08-03 01:54:41,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=13860.0, ans=0.0 +2024-08-03 01:54:48,053 INFO [train.py:1114] (0/4) Epoch 2, batch 150, loss[loss=0.2912, simple_loss=0.3431, pruned_loss=0.1197, over 13419.00 frames. ], tot_loss[loss=0.3554, simple_loss=0.3984, pruned_loss=0.1562, over 1386053.90 frames. ], batch size: 32, lr: 4.17e-02, grad_scale: 16.0 +2024-08-03 01:55:01,881 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.63 vs. limit=12.725 +2024-08-03 01:55:03,043 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.519e+02 1.772e+02 2.227e+02 3.651e+02, threshold=3.544e+02, percent-clipped=0.0 +2024-08-03 01:55:09,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13970.0, ans=0.1603 +2024-08-03 01:55:10,103 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.05 vs. limit=12.73875 +2024-08-03 01:55:11,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=13970.0, ans=0.025 +2024-08-03 01:55:11,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.81 vs. limit=12.73875 +2024-08-03 01:55:16,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.72 vs. limit=12.752500000000001 +2024-08-03 01:55:29,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=14043.333333333334, ans=0.008152777777777773 +2024-08-03 01:55:34,694 INFO [train.py:1114] (0/4) Epoch 2, batch 200, loss[loss=0.3742, simple_loss=0.4141, pruned_loss=0.1671, over 12450.00 frames. ], tot_loss[loss=0.3534, simple_loss=0.3972, pruned_loss=0.1548, over 1664997.56 frames. ], batch size: 59, lr: 4.16e-02, grad_scale: 16.0 +2024-08-03 01:56:10,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=14190.0, ans=0.125 +2024-08-03 01:56:23,306 INFO [train.py:1114] (0/4) Epoch 2, batch 250, loss[loss=0.3566, simple_loss=0.4067, pruned_loss=0.1533, over 13312.00 frames. ], tot_loss[loss=0.3517, simple_loss=0.3963, pruned_loss=0.1536, over 1884317.45 frames. ], batch size: 46, lr: 4.16e-02, grad_scale: 16.0 +2024-08-03 01:56:38,389 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.594e+02 1.964e+02 2.594e+02 6.291e+02, threshold=3.929e+02, percent-clipped=8.0 +2024-08-03 01:56:42,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14336.666666666666, ans=0.15663333333333335 +2024-08-03 01:56:44,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=14336.666666666666, ans=0.15663333333333335 +2024-08-03 01:56:46,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=14336.666666666666, ans=0.125 +2024-08-03 01:56:47,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=14336.666666666666, ans=0.125 +2024-08-03 01:56:51,229 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.72 vs. limit=8.593333333333334 +2024-08-03 01:56:57,640 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.11 vs. limit=18.28 +2024-08-03 01:57:12,777 INFO [train.py:1114] (0/4) Epoch 2, batch 300, loss[loss=0.3743, simple_loss=0.4188, pruned_loss=0.1649, over 13449.00 frames. ], tot_loss[loss=0.3504, simple_loss=0.3952, pruned_loss=0.1528, over 2051545.11 frames. ], batch size: 42, lr: 4.15e-02, grad_scale: 16.0 +2024-08-03 01:57:13,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14446.666666666666, ans=0.15553333333333333 +2024-08-03 01:57:44,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=14556.666666666666, ans=0.04949747468305833 +2024-08-03 01:58:00,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=14593.333333333334, ans=0.125 +2024-08-03 01:58:01,708 INFO [train.py:1114] (0/4) Epoch 2, batch 350, loss[loss=0.3287, simple_loss=0.3674, pruned_loss=0.145, over 13597.00 frames. ], tot_loss[loss=0.3507, simple_loss=0.3956, pruned_loss=0.1528, over 2182401.02 frames. ], batch size: 33, lr: 4.15e-02, grad_scale: 16.0 +2024-08-03 01:58:13,292 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-4000.pt +2024-08-03 01:58:58,107 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.602e+02 1.924e+02 2.648e+02 5.206e+02, threshold=3.847e+02, percent-clipped=6.0 +2024-08-03 01:59:15,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=14740.0, ans=0.005250000000000005 +2024-08-03 01:59:29,770 INFO [train.py:1114] (0/4) Epoch 2, batch 400, loss[loss=0.3048, simple_loss=0.3711, pruned_loss=0.1193, over 13364.00 frames. ], tot_loss[loss=0.3476, simple_loss=0.3934, pruned_loss=0.1509, over 2286346.42 frames. ], batch size: 37, lr: 4.14e-02, grad_scale: 32.0 +2024-08-03 02:03:58,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=14813.333333333334, ans=0.00764927536231884 +2024-08-03 02:04:14,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=14850.0, ans=0.125 +2024-08-03 02:05:18,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=14850.0, ans=0.125 +2024-08-03 02:05:56,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=14960.0, ans=0.007617391304347826 +2024-08-03 02:06:32,468 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=18.15 vs. limit=18.72 +2024-08-03 02:06:33,893 INFO [train.py:1114] (0/4) Epoch 2, batch 450, loss[loss=0.3479, simple_loss=0.3999, pruned_loss=0.148, over 13534.00 frames. ], tot_loss[loss=0.3483, simple_loss=0.3935, pruned_loss=0.1515, over 2358699.07 frames. ], batch size: 38, lr: 4.13e-02, grad_scale: 32.0 +2024-08-03 02:06:48,867 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.226e+02 1.511e+02 1.857e+02 2.288e+02 3.385e+02, threshold=3.714e+02, percent-clipped=0.0 +2024-08-03 02:06:51,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=15070.0, ans=0.125 +2024-08-03 02:07:04,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=15070.0, ans=0.37255000000000005 +2024-08-03 02:07:28,593 INFO [train.py:1114] (0/4) Epoch 2, batch 500, loss[loss=0.3456, simple_loss=0.4036, pruned_loss=0.1437, over 13445.00 frames. ], tot_loss[loss=0.346, simple_loss=0.3913, pruned_loss=0.1503, over 2424488.38 frames. ], batch size: 43, lr: 4.13e-02, grad_scale: 32.0 +2024-08-03 02:07:34,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=15180.0, ans=0.125 +2024-08-03 02:07:38,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=15180.0, ans=0.125 +2024-08-03 02:07:45,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=15216.666666666666, ans=0.007561594202898552 +2024-08-03 02:07:49,252 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:07:50,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=15253.333333333334, ans=0.125 +2024-08-03 02:07:58,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15290.0, ans=0.1471 +2024-08-03 02:08:06,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=15290.0, ans=0.36485 +2024-08-03 02:08:11,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=15326.666666666666, ans=0.125 +2024-08-03 02:08:18,908 INFO [train.py:1114] (0/4) Epoch 2, batch 550, loss[loss=0.4256, simple_loss=0.4463, pruned_loss=0.2024, over 13035.00 frames. ], tot_loss[loss=0.3446, simple_loss=0.3901, pruned_loss=0.1495, over 2467517.27 frames. ], batch size: 48, lr: 4.12e-02, grad_scale: 32.0 +2024-08-03 02:08:21,497 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.29 vs. limit=13.26125 +2024-08-03 02:08:33,981 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.486e+02 1.782e+02 2.081e+02 4.201e+02, threshold=3.563e+02, percent-clipped=2.0 +2024-08-03 02:08:42,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=15436.666666666666, ans=0.3597166666666667 +2024-08-03 02:08:45,965 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.65 vs. limit=12.718333333333334 +2024-08-03 02:08:50,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=15473.333333333334, ans=0.0021944444444444433 +2024-08-03 02:08:50,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=15473.333333333334, ans=0.007505797101449276 +2024-08-03 02:08:52,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=15473.333333333334, ans=0.125 +2024-08-03 02:08:55,902 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.80 vs. limit=10.189333333333334 +2024-08-03 02:09:03,867 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:09:19,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=15510.0, ans=0.125 +2024-08-03 02:09:22,363 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.02 vs. limit=13.33 +2024-08-03 02:09:22,841 INFO [train.py:1114] (0/4) Epoch 2, batch 600, loss[loss=0.3482, simple_loss=0.4085, pruned_loss=0.1439, over 13341.00 frames. ], tot_loss[loss=0.343, simple_loss=0.3893, pruned_loss=0.1484, over 2508085.28 frames. ], batch size: 46, lr: 4.12e-02, grad_scale: 32.0 +2024-08-03 02:09:24,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=15546.666666666666, ans=0.14453333333333335 +2024-08-03 02:09:30,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=15546.666666666666, ans=0.125 +2024-08-03 02:09:30,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=15546.666666666666, ans=0.125 +2024-08-03 02:09:35,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=15583.333333333334, ans=0.35458333333333336 +2024-08-03 02:09:39,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=15583.333333333334, ans=0.125 +2024-08-03 02:09:42,331 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=4.182e-02 +2024-08-03 02:09:45,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15620.0, ans=0.1438 +2024-08-03 02:09:54,668 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.29 vs. limit=8.914166666666667 +2024-08-03 02:10:06,616 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.11 vs. limit=10.277333333333335 +2024-08-03 02:10:12,466 INFO [train.py:1114] (0/4) Epoch 2, batch 650, loss[loss=0.346, simple_loss=0.3978, pruned_loss=0.1471, over 13546.00 frames. ], tot_loss[loss=0.3409, simple_loss=0.3878, pruned_loss=0.147, over 2543088.98 frames. ], batch size: 37, lr: 4.11e-02, grad_scale: 32.0 +2024-08-03 02:10:28,712 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.454e+02 1.669e+02 2.017e+02 2.893e+02, threshold=3.339e+02, percent-clipped=0.0 +2024-08-03 02:10:40,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=15803.333333333334, ans=0.125 +2024-08-03 02:10:55,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.00 vs. limit=19.380000000000003 +2024-08-03 02:10:57,867 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.50 vs. limit=13.440000000000001 +2024-08-03 02:11:02,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=15840.0, ans=0.1416 +2024-08-03 02:11:03,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=15876.666666666666, ans=0.3443166666666667 +2024-08-03 02:11:06,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=15876.666666666666, ans=0.125 +2024-08-03 02:11:13,927 INFO [train.py:1114] (0/4) Epoch 2, batch 700, loss[loss=0.3593, simple_loss=0.4002, pruned_loss=0.1592, over 13535.00 frames. ], tot_loss[loss=0.3399, simple_loss=0.3874, pruned_loss=0.1462, over 2564874.11 frames. ], batch size: 35, lr: 4.11e-02, grad_scale: 8.0 +2024-08-03 02:12:04,806 INFO [train.py:1114] (0/4) Epoch 2, batch 750, loss[loss=0.3498, simple_loss=0.3984, pruned_loss=0.1506, over 13343.00 frames. ], tot_loss[loss=0.3394, simple_loss=0.3867, pruned_loss=0.146, over 2582063.28 frames. ], batch size: 37, lr: 4.10e-02, grad_scale: 8.0 +2024-08-03 02:12:06,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=16096.666666666666, ans=0.007370289855072465 +2024-08-03 02:12:21,722 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.638e+02 1.990e+02 2.530e+02 5.439e+02, threshold=3.980e+02, percent-clipped=7.0 +2024-08-03 02:12:33,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=16206.666666666666, ans=0.125 +2024-08-03 02:12:45,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=16243.333333333334, ans=0.125 +2024-08-03 02:12:59,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=16243.333333333334, ans=0.33148333333333335 +2024-08-03 02:13:00,967 INFO [train.py:1114] (0/4) Epoch 2, batch 800, loss[loss=0.3364, simple_loss=0.3787, pruned_loss=0.147, over 13350.00 frames. ], tot_loss[loss=0.3385, simple_loss=0.3861, pruned_loss=0.1455, over 2597107.62 frames. ], batch size: 33, lr: 4.09e-02, grad_scale: 16.0 +2024-08-03 02:13:01,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=16280.0, ans=0.125 +2024-08-03 02:13:13,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=16316.666666666666, ans=0.025 +2024-08-03 02:13:28,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=16353.333333333334, ans=0.125 +2024-08-03 02:13:34,140 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.91 vs. limit=13.64625 +2024-08-03 02:13:41,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=16390.0, ans=0.0 +2024-08-03 02:13:43,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=16426.666666666668, ans=0.125 +2024-08-03 02:13:52,964 INFO [train.py:1114] (0/4) Epoch 2, batch 850, loss[loss=0.3348, simple_loss=0.3941, pruned_loss=0.1377, over 13332.00 frames. ], tot_loss[loss=0.3385, simple_loss=0.386, pruned_loss=0.1455, over 2609274.27 frames. ], batch size: 40, lr: 4.09e-02, grad_scale: 16.0 +2024-08-03 02:13:56,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=16463.333333333332, ans=0.007290579710144927 +2024-08-03 02:14:04,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=16500.0, ans=0.125 +2024-08-03 02:14:09,062 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.01 vs. limit=10.6 +2024-08-03 02:14:12,137 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.052e+02 1.467e+02 1.720e+02 2.030e+02 3.514e+02, threshold=3.439e+02, percent-clipped=0.0 +2024-08-03 02:14:15,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=16536.666666666668, ans=0.125 +2024-08-03 02:14:40,568 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=1.353e-02 +2024-08-03 02:14:42,241 INFO [train.py:1114] (0/4) Epoch 2, batch 900, loss[loss=0.2745, simple_loss=0.3302, pruned_loss=0.1094, over 13343.00 frames. ], tot_loss[loss=0.3385, simple_loss=0.3863, pruned_loss=0.1454, over 2611320.11 frames. ], batch size: 33, lr: 4.08e-02, grad_scale: 16.0 +2024-08-03 02:15:11,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=16756.666666666668, ans=0.125 +2024-08-03 02:15:30,775 INFO [train.py:1114] (0/4) Epoch 2, batch 950, loss[loss=0.3409, simple_loss=0.3835, pruned_loss=0.1491, over 13513.00 frames. ], tot_loss[loss=0.3387, simple_loss=0.3865, pruned_loss=0.1454, over 2612646.96 frames. ], batch size: 34, lr: 4.08e-02, grad_scale: 16.0 +2024-08-03 02:15:38,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=16830.0, ans=0.04949747468305833 +2024-08-03 02:15:38,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.30 vs. limit=13.811250000000001 +2024-08-03 02:15:49,949 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.484e+02 1.735e+02 2.135e+02 4.344e+02, threshold=3.469e+02, percent-clipped=2.0 +2024-08-03 02:15:54,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=16903.333333333332, ans=0.9190333333333333 +2024-08-03 02:15:58,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=16903.333333333332, ans=0.125 +2024-08-03 02:16:04,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten.whitening_limit, batch_count=16940.0, ans=13.8525 +2024-08-03 02:16:20,083 INFO [train.py:1114] (0/4) Epoch 2, batch 1000, loss[loss=0.2979, simple_loss=0.3567, pruned_loss=0.1196, over 13377.00 frames. ], tot_loss[loss=0.3394, simple_loss=0.3872, pruned_loss=0.1458, over 2611573.90 frames. ], batch size: 35, lr: 4.07e-02, grad_scale: 16.0 +2024-08-03 02:16:40,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=17086.666666666668, ans=0.125 +2024-08-03 02:16:53,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=17123.333333333332, ans=0.125 +2024-08-03 02:17:10,906 INFO [train.py:1114] (0/4) Epoch 2, batch 1050, loss[loss=0.3829, simple_loss=0.4292, pruned_loss=0.1683, over 13579.00 frames. ], tot_loss[loss=0.3381, simple_loss=0.3863, pruned_loss=0.1449, over 2615601.85 frames. ], batch size: 39, lr: 4.06e-02, grad_scale: 16.0 +2024-08-03 02:17:17,769 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.15 vs. limit=10.878666666666668 +2024-08-03 02:17:21,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=17233.333333333332, ans=0.12766666666666668 +2024-08-03 02:17:27,497 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.473e+02 1.878e+02 2.204e+02 3.880e+02, threshold=3.755e+02, percent-clipped=2.0 +2024-08-03 02:17:33,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=17270.0, ans=0.125 +2024-08-03 02:17:36,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=17270.0, ans=0.29555 +2024-08-03 02:18:02,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=17343.333333333332, ans=0.025 +2024-08-03 02:18:04,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17343.333333333332, ans=0.1265666666666667 +2024-08-03 02:18:05,040 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:18:11,157 INFO [train.py:1114] (0/4) Epoch 2, batch 1100, loss[loss=0.3254, simple_loss=0.3715, pruned_loss=0.1396, over 13552.00 frames. ], tot_loss[loss=0.3373, simple_loss=0.3857, pruned_loss=0.1444, over 2619271.60 frames. ], batch size: 36, lr: 4.06e-02, grad_scale: 16.0 +2024-08-03 02:18:11,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=17380.0, ans=0.0 +2024-08-03 02:18:11,804 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.89 vs. limit=10.952 +2024-08-03 02:18:41,034 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.17 vs. limit=13.745 +2024-08-03 02:18:43,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=17490.0, ans=0.125 +2024-08-03 02:18:59,104 INFO [train.py:1114] (0/4) Epoch 2, batch 1150, loss[loss=0.3221, simple_loss=0.3659, pruned_loss=0.1391, over 13562.00 frames. ], tot_loss[loss=0.3361, simple_loss=0.3846, pruned_loss=0.1438, over 2618843.21 frames. ], batch size: 36, lr: 4.05e-02, grad_scale: 16.0 +2024-08-03 02:19:01,559 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.01 vs. limit=14.08625 +2024-08-03 02:19:11,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=17600.0, ans=0.0 +2024-08-03 02:19:16,027 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.117e+02 1.513e+02 2.017e+02 2.624e+02 5.380e+02, threshold=4.034e+02, percent-clipped=4.0 +2024-08-03 02:19:32,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=17636.666666666668, ans=0.007035507246376811 +2024-08-03 02:19:54,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=17710.0, ans=0.1 +2024-08-03 02:20:03,134 INFO [train.py:1114] (0/4) Epoch 2, batch 1200, loss[loss=0.3286, simple_loss=0.3896, pruned_loss=0.1337, over 13586.00 frames. ], tot_loss[loss=0.336, simple_loss=0.3847, pruned_loss=0.1436, over 2616141.70 frames. ], batch size: 39, lr: 4.04e-02, grad_scale: 32.0 +2024-08-03 02:20:36,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=17820.0, ans=0.12180000000000002 +2024-08-03 02:20:37,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17856.666666666668, ans=0.12143333333333331 +2024-08-03 02:20:47,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=17893.333333333332, ans=0.0 +2024-08-03 02:20:49,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=17893.333333333332, ans=0.125 +2024-08-03 02:20:56,926 INFO [train.py:1114] (0/4) Epoch 2, batch 1250, loss[loss=0.4007, simple_loss=0.4385, pruned_loss=0.1814, over 13461.00 frames. ], tot_loss[loss=0.3352, simple_loss=0.3846, pruned_loss=0.1428, over 2628141.40 frames. ], batch size: 42, lr: 4.04e-02, grad_scale: 32.0 +2024-08-03 02:21:06,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=17930.0, ans=0.025 +2024-08-03 02:21:23,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=17966.666666666668, ans=0.125 +2024-08-03 02:21:24,847 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.393e+02 1.566e+02 1.875e+02 3.241e+02, threshold=3.132e+02, percent-clipped=0.0 +2024-08-03 02:21:45,531 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.09 vs. limit=21.057499999999997 +2024-08-03 02:21:51,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=18076.666666666668, ans=0.006939855072463768 +2024-08-03 02:21:54,714 INFO [train.py:1114] (0/4) Epoch 2, batch 1300, loss[loss=0.372, simple_loss=0.4142, pruned_loss=0.1649, over 12906.00 frames. ], tot_loss[loss=0.3347, simple_loss=0.384, pruned_loss=0.1427, over 2630604.02 frames. ], batch size: 52, lr: 4.03e-02, grad_scale: 32.0 +2024-08-03 02:21:59,049 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.76 vs. limit=21.085 +2024-08-03 02:22:04,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=18113.333333333332, ans=0.26603333333333345 +2024-08-03 02:22:06,095 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.87 vs. limit=21.085 +2024-08-03 02:22:07,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=18113.333333333332, ans=0.125 +2024-08-03 02:22:08,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=18150.0, ans=0.125 +2024-08-03 02:22:12,474 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=23.56 vs. limit=14.30625 +2024-08-03 02:22:19,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=18186.666666666668, ans=0.125 +2024-08-03 02:22:25,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=18186.666666666668, ans=0.26346666666666674 +2024-08-03 02:22:28,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=18186.666666666668, ans=0.006915942028985507 +2024-08-03 02:22:31,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=18223.333333333332, ans=0.125 +2024-08-03 02:22:34,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=18223.333333333332, ans=10.0 +2024-08-03 02:22:35,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=18223.333333333332, ans=0.11776666666666669 +2024-08-03 02:22:49,381 INFO [train.py:1114] (0/4) Epoch 2, batch 1350, loss[loss=0.321, simple_loss=0.3765, pruned_loss=0.1328, over 13553.00 frames. ], tot_loss[loss=0.3321, simple_loss=0.3823, pruned_loss=0.141, over 2637205.62 frames. ], batch size: 37, lr: 4.03e-02, grad_scale: 32.0 +2024-08-03 02:22:53,605 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.71 vs. limit=21.2225 +2024-08-03 02:23:07,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=18333.333333333332, ans=0.0068840579710144935 +2024-08-03 02:23:08,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=18333.333333333332, ans=0.125 +2024-08-03 02:23:08,640 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.141e+02 1.434e+02 1.711e+02 2.081e+02 4.051e+02, threshold=3.422e+02, percent-clipped=5.0 +2024-08-03 02:23:10,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18370.0, ans=0.11630000000000001 +2024-08-03 02:23:29,493 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=2.615e-03 +2024-08-03 02:23:32,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=18406.666666666668, ans=0.125 +2024-08-03 02:23:39,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=18443.333333333332, ans=0.125 +2024-08-03 02:23:42,043 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:23:49,067 INFO [train.py:1114] (0/4) Epoch 2, batch 1400, loss[loss=0.272, simple_loss=0.3174, pruned_loss=0.1133, over 13279.00 frames. ], tot_loss[loss=0.3301, simple_loss=0.3806, pruned_loss=0.1398, over 2641288.51 frames. ], batch size: 31, lr: 4.02e-02, grad_scale: 32.0 +2024-08-03 02:23:49,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=18480.0, ans=0.125 +2024-08-03 02:23:53,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=18480.0, ans=0.0 +2024-08-03 02:24:30,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=18626.666666666668, ans=0.125 +2024-08-03 02:24:30,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=18626.666666666668, ans=0.125 +2024-08-03 02:24:31,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18626.666666666668, ans=0.11373333333333333 +2024-08-03 02:24:38,163 INFO [train.py:1114] (0/4) Epoch 2, batch 1450, loss[loss=0.2984, simple_loss=0.3687, pruned_loss=0.114, over 13411.00 frames. ], tot_loss[loss=0.3296, simple_loss=0.3802, pruned_loss=0.1396, over 2640819.76 frames. ], batch size: 43, lr: 4.01e-02, grad_scale: 16.0 +2024-08-03 02:24:44,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=18663.333333333332, ans=0.125 +2024-08-03 02:24:49,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=18700.0, ans=0.125 +2024-08-03 02:24:49,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18700.0, ans=0.11300000000000002 +2024-08-03 02:24:53,469 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=15.29 vs. limit=14.35 +2024-08-03 02:24:54,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=18700.0, ans=0.125 +2024-08-03 02:24:55,527 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.417e+02 1.675e+02 1.959e+02 3.168e+02, threshold=3.351e+02, percent-clipped=0.0 +2024-08-03 02:24:55,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=18736.666666666668, ans=0.24421666666666675 +2024-08-03 02:25:06,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=18773.333333333332, ans=0.125 +2024-08-03 02:25:12,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff2.min_abs, batch_count=18773.333333333332, ans=0.1 +2024-08-03 02:25:16,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=18810.0, ans=0.035 +2024-08-03 02:25:21,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=18810.0, ans=0.125 +2024-08-03 02:25:32,430 INFO [train.py:1114] (0/4) Epoch 2, batch 1500, loss[loss=0.2988, simple_loss=0.3676, pruned_loss=0.115, over 13417.00 frames. ], tot_loss[loss=0.3284, simple_loss=0.3797, pruned_loss=0.1385, over 2641354.39 frames. ], batch size: 39, lr: 4.01e-02, grad_scale: 16.0 +2024-08-03 02:25:38,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=18846.666666666668, ans=0.125 +2024-08-03 02:25:42,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=18883.333333333332, ans=0.125 +2024-08-03 02:25:48,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=18883.333333333332, ans=0.125 +2024-08-03 02:26:11,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=18993.333333333332, ans=0.11006666666666667 +2024-08-03 02:26:19,639 INFO [train.py:1114] (0/4) Epoch 2, batch 1550, loss[loss=0.321, simple_loss=0.3815, pruned_loss=0.1303, over 13385.00 frames. ], tot_loss[loss=0.3309, simple_loss=0.3812, pruned_loss=0.1403, over 2630304.09 frames. ], batch size: 41, lr: 4.00e-02, grad_scale: 16.0 +2024-08-03 02:26:24,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=19030.0, ans=0.125 +2024-08-03 02:26:34,747 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.96 vs. limit=14.65 +2024-08-03 02:26:42,599 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.587e+02 1.878e+02 2.318e+02 8.334e+02, threshold=3.756e+02, percent-clipped=6.0 +2024-08-03 02:26:44,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=19103.333333333332, ans=0.025 +2024-08-03 02:27:11,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=19176.666666666668, ans=0.125 +2024-08-03 02:27:15,350 INFO [train.py:1114] (0/4) Epoch 2, batch 1600, loss[loss=0.3695, simple_loss=0.4212, pruned_loss=0.1589, over 13575.00 frames. ], tot_loss[loss=0.3311, simple_loss=0.3815, pruned_loss=0.1403, over 2624471.56 frames. ], batch size: 39, lr: 4.00e-02, grad_scale: 32.0 +2024-08-03 02:27:34,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=19286.666666666668, ans=0.125 +2024-08-03 02:27:45,371 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.96 vs. limit=21.9925 +2024-08-03 02:27:47,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=19323.333333333332, ans=0.0 +2024-08-03 02:28:00,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=19360.0, ans=0.0 +2024-08-03 02:28:03,151 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:28:03,910 INFO [train.py:1114] (0/4) Epoch 2, batch 1650, loss[loss=0.3003, simple_loss=0.3745, pruned_loss=0.1131, over 13355.00 frames. ], tot_loss[loss=0.3307, simple_loss=0.3809, pruned_loss=0.1403, over 2622153.90 frames. ], batch size: 40, lr: 3.99e-02, grad_scale: 16.0 +2024-08-03 02:28:38,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=19433.333333333332, ans=0.025 +2024-08-03 02:28:40,135 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.530e+02 1.782e+02 2.174e+02 3.857e+02, threshold=3.564e+02, percent-clipped=2.0 +2024-08-03 02:28:42,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=19470.0, ans=0.21855000000000002 +2024-08-03 02:28:51,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=19506.666666666668, ans=0.006628985507246376 +2024-08-03 02:29:08,347 INFO [train.py:1114] (0/4) Epoch 2, batch 1700, loss[loss=0.3041, simple_loss=0.3462, pruned_loss=0.131, over 13240.00 frames. ], tot_loss[loss=0.3282, simple_loss=0.3792, pruned_loss=0.1386, over 2631090.21 frames. ], batch size: 31, lr: 3.98e-02, grad_scale: 16.0 +2024-08-03 02:29:15,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=19580.0, ans=0.125 +2024-08-03 02:29:18,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=19616.666666666668, ans=0.0 +2024-08-03 02:29:21,972 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.44 vs. limit=14.85625 +2024-08-03 02:29:44,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=19690.0, ans=0.006589130434782609 +2024-08-03 02:29:51,560 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.63 vs. limit=22.295 +2024-08-03 02:29:52,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19726.666666666668, ans=0.10273333333333334 +2024-08-03 02:29:59,382 INFO [train.py:1114] (0/4) Epoch 2, batch 1750, loss[loss=0.28, simple_loss=0.3233, pruned_loss=0.1183, over 13555.00 frames. ], tot_loss[loss=0.3274, simple_loss=0.3783, pruned_loss=0.1383, over 2634625.83 frames. ], batch size: 31, lr: 3.98e-02, grad_scale: 16.0 +2024-08-03 02:30:09,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=19800.0, ans=0.125 +2024-08-03 02:30:09,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=19800.0, ans=0.20700000000000007 +2024-08-03 02:30:15,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=19800.0, ans=0.125 +2024-08-03 02:30:16,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=19800.0, ans=0.125 +2024-08-03 02:30:18,379 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.108e+02 1.430e+02 1.665e+02 2.047e+02 3.989e+02, threshold=3.330e+02, percent-clipped=2.0 +2024-08-03 02:30:30,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=19873.333333333332, ans=0.125 +2024-08-03 02:30:33,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=19873.333333333332, ans=0.125 +2024-08-03 02:30:34,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=19873.333333333332, ans=0.125 +2024-08-03 02:30:35,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=19873.333333333332, ans=0.0 +2024-08-03 02:30:46,007 INFO [train.py:1114] (0/4) Epoch 2, batch 1800, loss[loss=0.3172, simple_loss=0.3728, pruned_loss=0.1308, over 13551.00 frames. ], tot_loss[loss=0.3274, simple_loss=0.3783, pruned_loss=0.1382, over 2635940.75 frames. ], batch size: 38, lr: 3.97e-02, grad_scale: 16.0 +2024-08-03 02:30:55,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=19983.333333333332, ans=0.0 +2024-08-03 02:31:01,292 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.77 vs. limit=14.99375 +2024-08-03 02:31:33,935 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:31:37,429 INFO [train.py:1114] (0/4) Epoch 2, batch 1850, loss[loss=0.3218, simple_loss=0.3709, pruned_loss=0.1364, over 13414.00 frames. ], tot_loss[loss=0.326, simple_loss=0.3775, pruned_loss=0.1373, over 2638802.05 frames. ], batch size: 39, lr: 3.96e-02, grad_scale: 16.0 +2024-08-03 02:32:04,607 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.463e+02 1.801e+02 2.661e+02 5.332e+02, threshold=3.601e+02, percent-clipped=10.0 +2024-08-03 02:32:08,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=20203.333333333332, ans=0.125 +2024-08-03 02:32:17,113 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.47 vs. limit=15.0 +2024-08-03 02:32:32,397 INFO [train.py:1114] (0/4) Epoch 2, batch 1900, loss[loss=0.3177, simple_loss=0.3791, pruned_loss=0.1282, over 13325.00 frames. ], tot_loss[loss=0.3259, simple_loss=0.3778, pruned_loss=0.137, over 2640737.00 frames. ], batch size: 40, lr: 3.96e-02, grad_scale: 16.0 +2024-08-03 02:32:39,321 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=27.48 vs. limit=22.5 +2024-08-03 02:32:57,266 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.50 vs. limit=22.5 +2024-08-03 02:32:57,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=20386.666666666668, ans=0.2 +2024-08-03 02:33:11,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=20423.333333333332, ans=0.025 +2024-08-03 02:33:27,811 INFO [train.py:1114] (0/4) Epoch 2, batch 1950, loss[loss=0.2815, simple_loss=0.3443, pruned_loss=0.1094, over 13568.00 frames. ], tot_loss[loss=0.3275, simple_loss=0.3798, pruned_loss=0.1375, over 2646930.19 frames. ], batch size: 36, lr: 3.95e-02, grad_scale: 16.0 +2024-08-03 02:33:28,411 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.54 vs. limit=6.0 +2024-08-03 02:33:34,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=20496.666666666668, ans=0.0 +2024-08-03 02:33:38,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=20533.333333333332, ans=0.2 +2024-08-03 02:33:46,554 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.390e+02 1.603e+02 1.917e+02 3.719e+02, threshold=3.206e+02, percent-clipped=1.0 +2024-08-03 02:33:52,584 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.79 vs. limit=15.0 +2024-08-03 02:33:56,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=20606.666666666668, ans=0.125 +2024-08-03 02:34:09,441 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:34:20,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=20643.333333333332, ans=10.0 +2024-08-03 02:34:21,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=20643.333333333332, ans=0.125 +2024-08-03 02:34:22,757 INFO [train.py:1114] (0/4) Epoch 2, batch 2000, loss[loss=0.2845, simple_loss=0.3448, pruned_loss=0.1121, over 13515.00 frames. ], tot_loss[loss=0.3272, simple_loss=0.3798, pruned_loss=0.1373, over 2636040.64 frames. ], batch size: 31, lr: 3.94e-02, grad_scale: 32.0 +2024-08-03 02:34:33,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=20716.666666666668, ans=0.125 +2024-08-03 02:34:58,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=20790.0, ans=0.125 +2024-08-03 02:34:59,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.91 vs. limit=15.0 +2024-08-03 02:35:05,156 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.79 vs. limit=15.0 +2024-08-03 02:35:09,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=20826.666666666668, ans=0.125 +2024-08-03 02:35:10,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=20826.666666666668, ans=0.125 +2024-08-03 02:35:14,579 INFO [train.py:1114] (0/4) Epoch 2, batch 2050, loss[loss=0.2814, simple_loss=0.3376, pruned_loss=0.1126, over 13413.00 frames. ], tot_loss[loss=0.326, simple_loss=0.3783, pruned_loss=0.1368, over 2632973.40 frames. ], batch size: 32, lr: 3.94e-02, grad_scale: 32.0 +2024-08-03 02:35:23,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=20863.333333333332, ans=0.0 +2024-08-03 02:35:27,256 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.32 vs. limit=12.0 +2024-08-03 02:35:36,033 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.423e+02 1.683e+02 2.101e+02 5.163e+02, threshold=3.365e+02, percent-clipped=3.0 +2024-08-03 02:35:41,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=20936.666666666668, ans=0.125 +2024-08-03 02:35:56,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=20973.333333333332, ans=0.125 +2024-08-03 02:35:57,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=20973.333333333332, ans=0.025 +2024-08-03 02:35:58,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=20973.333333333332, ans=0.006310144927536232 +2024-08-03 02:36:05,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=21010.0, ans=0.125 +2024-08-03 02:36:07,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=21010.0, ans=0.125 +2024-08-03 02:36:09,017 INFO [train.py:1114] (0/4) Epoch 2, batch 2100, loss[loss=0.2956, simple_loss=0.3535, pruned_loss=0.1188, over 13556.00 frames. ], tot_loss[loss=0.3239, simple_loss=0.3768, pruned_loss=0.1355, over 2637510.82 frames. ], batch size: 37, lr: 3.93e-02, grad_scale: 32.0 +2024-08-03 02:36:09,558 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.26 vs. limit=22.5 +2024-08-03 02:36:12,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=21046.666666666668, ans=0.125 +2024-08-03 02:36:15,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=21046.666666666668, ans=0.2 +2024-08-03 02:36:17,692 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.15 vs. limit=15.0 +2024-08-03 02:36:40,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=21156.666666666668, ans=0.125 +2024-08-03 02:36:41,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=21156.666666666668, ans=0.125 +2024-08-03 02:36:56,213 INFO [train.py:1114] (0/4) Epoch 2, batch 2150, loss[loss=0.314, simple_loss=0.3607, pruned_loss=0.1336, over 13565.00 frames. ], tot_loss[loss=0.3226, simple_loss=0.3757, pruned_loss=0.1348, over 2646322.82 frames. ], batch size: 36, lr: 3.93e-02, grad_scale: 16.0 +2024-08-03 02:37:17,962 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.342e+02 1.575e+02 1.914e+02 2.983e+02, threshold=3.149e+02, percent-clipped=0.0 +2024-08-03 02:37:18,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=21303.333333333332, ans=0.125 +2024-08-03 02:37:20,410 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.54 vs. limit=22.5 +2024-08-03 02:37:31,759 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.33 vs. limit=15.0 +2024-08-03 02:37:36,431 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.19 vs. limit=15.0 +2024-08-03 02:37:37,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=21340.0, ans=0.0 +2024-08-03 02:37:39,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=21376.666666666668, ans=0.2 +2024-08-03 02:37:48,036 INFO [train.py:1114] (0/4) Epoch 2, batch 2200, loss[loss=0.3374, simple_loss=0.3949, pruned_loss=0.1399, over 13400.00 frames. ], tot_loss[loss=0.3242, simple_loss=0.3768, pruned_loss=0.1358, over 2644789.22 frames. ], batch size: 39, lr: 3.92e-02, grad_scale: 16.0 +2024-08-03 02:37:56,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=21450.0, ans=0.125 +2024-08-03 02:38:09,745 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.33 vs. limit=15.0 +2024-08-03 02:38:19,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=21523.333333333332, ans=0.125 +2024-08-03 02:38:49,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21560.0, ans=0.1 +2024-08-03 02:39:28,838 INFO [train.py:1114] (0/4) Epoch 2, batch 2250, loss[loss=0.2705, simple_loss=0.3402, pruned_loss=0.1004, over 13349.00 frames. ], tot_loss[loss=0.3235, simple_loss=0.3762, pruned_loss=0.1354, over 2641459.70 frames. ], batch size: 37, lr: 3.91e-02, grad_scale: 16.0 +2024-08-03 02:39:40,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=21596.666666666668, ans=0.125 +2024-08-03 02:39:48,021 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.72 vs. limit=6.0 +2024-08-03 02:39:48,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=21596.666666666668, ans=0.006174637681159421 +2024-08-03 02:39:54,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=21633.333333333332, ans=0.0 +2024-08-03 02:39:57,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=21633.333333333332, ans=0.125 +2024-08-03 02:39:57,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=21633.333333333332, ans=0.125 +2024-08-03 02:39:59,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=21633.333333333332, ans=0.2 +2024-08-03 02:40:05,989 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.129e+02 1.392e+02 1.558e+02 1.925e+02 3.298e+02, threshold=3.115e+02, percent-clipped=1.0 +2024-08-03 02:40:17,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21706.666666666668, ans=0.1 +2024-08-03 02:40:17,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=21706.666666666668, ans=0.125 +2024-08-03 02:40:31,433 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.65 vs. limit=22.5 +2024-08-03 02:40:34,544 INFO [train.py:1114] (0/4) Epoch 2, batch 2300, loss[loss=0.2916, simple_loss=0.347, pruned_loss=0.118, over 13591.00 frames. ], tot_loss[loss=0.323, simple_loss=0.3752, pruned_loss=0.1354, over 2637931.64 frames. ], batch size: 33, lr: 3.91e-02, grad_scale: 8.0 +2024-08-03 02:40:34,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=21780.0, ans=0.125 +2024-08-03 02:40:35,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=21780.0, ans=0.006134782608695652 +2024-08-03 02:40:35,989 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.96 vs. limit=22.5 +2024-08-03 02:40:45,015 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.31 vs. limit=15.0 +2024-08-03 02:40:49,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=21816.666666666668, ans=0.125 +2024-08-03 02:40:58,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=21853.333333333332, ans=0.006118840579710145 +2024-08-03 02:41:03,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=21890.0, ans=0.0 +2024-08-03 02:41:04,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=21890.0, ans=0.05 +2024-08-03 02:41:05,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21890.0, ans=0.1 +2024-08-03 02:41:08,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=21890.0, ans=0.125 +2024-08-03 02:41:16,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=21926.666666666668, ans=0.006102898550724638 +2024-08-03 02:41:16,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=21926.666666666668, ans=0.0 +2024-08-03 02:41:22,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=21926.666666666668, ans=0.0 +2024-08-03 02:41:23,921 INFO [train.py:1114] (0/4) Epoch 2, batch 2350, loss[loss=0.3454, simple_loss=0.3924, pruned_loss=0.1492, over 13565.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3758, pruned_loss=0.1355, over 2640522.61 frames. ], batch size: 38, lr: 3.90e-02, grad_scale: 8.0 +2024-08-03 02:41:32,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=22000.0, ans=0.125 +2024-08-03 02:41:34,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22000.0, ans=0.1 +2024-08-03 02:41:37,215 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:41:40,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=22000.0, ans=0.125 +2024-08-03 02:41:44,432 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.437e+02 1.623e+02 1.973e+02 3.440e+02, threshold=3.245e+02, percent-clipped=2.0 +2024-08-03 02:42:15,867 INFO [train.py:1114] (0/4) Epoch 2, batch 2400, loss[loss=0.3187, simple_loss=0.3688, pruned_loss=0.1343, over 13522.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3759, pruned_loss=0.1354, over 2642114.67 frames. ], batch size: 35, lr: 3.89e-02, grad_scale: 16.0 +2024-08-03 02:42:28,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=22183.333333333332, ans=0.125 +2024-08-03 02:42:38,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=22220.0, ans=0.0060391304347826085 +2024-08-03 02:42:55,147 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.50 vs. limit=15.0 +2024-08-03 02:43:04,901 INFO [train.py:1114] (0/4) Epoch 2, batch 2450, loss[loss=0.3148, simple_loss=0.3743, pruned_loss=0.1276, over 13353.00 frames. ], tot_loss[loss=0.3245, simple_loss=0.377, pruned_loss=0.136, over 2632096.73 frames. ], batch size: 37, lr: 3.89e-02, grad_scale: 16.0 +2024-08-03 02:43:08,193 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.81 vs. limit=15.0 +2024-08-03 02:43:20,321 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.37 vs. limit=22.5 +2024-08-03 02:43:24,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=22403.333333333332, ans=0.0 +2024-08-03 02:43:27,796 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.465e+02 1.678e+02 2.067e+02 5.260e+02, threshold=3.356e+02, percent-clipped=2.0 +2024-08-03 02:43:30,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=22403.333333333332, ans=0.005999275362318841 +2024-08-03 02:43:44,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=22440.0, ans=0.125 +2024-08-03 02:43:59,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=22440.0, ans=0.125 +2024-08-03 02:45:37,298 INFO [train.py:1114] (0/4) Epoch 2, batch 2500, loss[loss=0.3539, simple_loss=0.4005, pruned_loss=0.1536, over 13404.00 frames. ], tot_loss[loss=0.3238, simple_loss=0.3767, pruned_loss=0.1354, over 2636390.42 frames. ], batch size: 39, lr: 3.88e-02, grad_scale: 8.0 +2024-08-03 02:46:39,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=22550.0, ans=0.125 +2024-08-03 02:47:09,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=22586.666666666668, ans=0.125 +2024-08-03 02:47:24,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=22586.666666666668, ans=0.125 +2024-08-03 02:51:30,503 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=8.83 vs. limit=12.0 +2024-08-03 02:51:38,258 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.40 vs. limit=15.0 +2024-08-03 02:51:45,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=22660.0, ans=0.2 +2024-08-03 02:51:58,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=22660.0, ans=0.2 +2024-08-03 02:52:09,582 INFO [train.py:1114] (0/4) Epoch 2, batch 2550, loss[loss=0.2741, simple_loss=0.3239, pruned_loss=0.1121, over 13520.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.3762, pruned_loss=0.1351, over 2638364.09 frames. ], batch size: 31, lr: 3.88e-02, grad_scale: 8.0 +2024-08-03 02:52:16,845 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.00 vs. limit=10.0 +2024-08-03 02:52:20,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=22696.666666666668, ans=0.0 +2024-08-03 02:52:35,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=22696.666666666668, ans=0.005935507246376812 +2024-08-03 02:52:35,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=22696.666666666668, ans=0.125 +2024-08-03 02:52:52,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=22733.333333333332, ans=0.2 +2024-08-03 02:53:31,464 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.373e+02 1.661e+02 2.107e+02 4.285e+02, threshold=3.322e+02, percent-clipped=3.0 +2024-08-03 02:54:15,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=22806.666666666668, ans=0.2 +2024-08-03 02:54:16,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=22806.666666666668, ans=0.0059115942028985504 +2024-08-03 02:54:24,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=22806.666666666668, ans=0.125 +2024-08-03 02:54:48,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=22843.333333333332, ans=0.125 +2024-08-03 02:54:52,167 INFO [train.py:1114] (0/4) Epoch 2, batch 2600, loss[loss=0.3024, simple_loss=0.3635, pruned_loss=0.1206, over 13565.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.3764, pruned_loss=0.135, over 2637792.15 frames. ], batch size: 36, lr: 3.87e-02, grad_scale: 8.0 +2024-08-03 02:55:20,772 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.09 vs. limit=15.0 +2024-08-03 02:55:45,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=22990.0, ans=0.125 +2024-08-03 02:56:03,873 INFO [train.py:1114] (0/4) Epoch 2, batch 2650, loss[loss=0.3503, simple_loss=0.4015, pruned_loss=0.1495, over 13305.00 frames. ], tot_loss[loss=0.3236, simple_loss=0.3767, pruned_loss=0.1352, over 2641062.79 frames. ], batch size: 46, lr: 3.86e-02, grad_scale: 8.0 +2024-08-03 02:56:04,192 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.20 vs. limit=12.0 +2024-08-03 02:56:08,757 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.92 vs. limit=15.0 +2024-08-03 02:56:09,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=23063.333333333332, ans=0.025 +2024-08-03 02:56:19,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=23100.0, ans=0.005847826086956522 +2024-08-03 02:56:23,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=23136.666666666668, ans=0.125 +2024-08-03 02:56:27,903 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.143e+02 1.447e+02 1.734e+02 2.047e+02 3.463e+02, threshold=3.469e+02, percent-clipped=1.0 +2024-08-03 02:56:56,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=23173.333333333332, ans=0.0 +2024-08-03 02:57:16,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=23210.0, ans=0.125 +2024-08-03 02:58:07,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=23246.666666666668, ans=0.125 +2024-08-03 02:58:07,639 INFO [train.py:1114] (0/4) Epoch 2, batch 2700, loss[loss=0.3117, simple_loss=0.3737, pruned_loss=0.1249, over 13551.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3765, pruned_loss=0.1351, over 2638534.84 frames. ], batch size: 40, lr: 3.86e-02, grad_scale: 8.0 +2024-08-03 02:58:26,823 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.11 vs. limit=10.0 +2024-08-03 03:00:27,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=23393.333333333332, ans=0.125 +2024-08-03 03:00:28,900 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.47 vs. limit=15.0 +2024-08-03 03:00:29,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=23393.333333333332, ans=0.125 +2024-08-03 03:00:32,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=23393.333333333332, ans=0.125 +2024-08-03 03:00:32,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=23393.333333333332, ans=0.125 +2024-08-03 03:00:36,554 INFO [train.py:1114] (0/4) Epoch 2, batch 2750, loss[loss=0.2901, simple_loss=0.3539, pruned_loss=0.1132, over 13320.00 frames. ], tot_loss[loss=0.3219, simple_loss=0.3751, pruned_loss=0.1343, over 2637061.32 frames. ], batch size: 34, lr: 3.85e-02, grad_scale: 8.0 +2024-08-03 03:00:43,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=23430.0, ans=15.0 +2024-08-03 03:00:55,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=23466.666666666668, ans=0.5 +2024-08-03 03:00:56,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=23466.666666666668, ans=0.125 +2024-08-03 03:01:03,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=23503.333333333332, ans=0.025 +2024-08-03 03:01:04,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=23503.333333333332, ans=0.0 +2024-08-03 03:01:06,451 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.143e+02 1.416e+02 1.639e+02 1.960e+02 3.073e+02, threshold=3.277e+02, percent-clipped=0.0 +2024-08-03 03:01:15,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=23540.0, ans=0.125 +2024-08-03 03:01:31,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=23576.666666666668, ans=0.125 +2024-08-03 03:01:38,767 INFO [train.py:1114] (0/4) Epoch 2, batch 2800, loss[loss=0.4635, simple_loss=0.4567, pruned_loss=0.2351, over 8831.00 frames. ], tot_loss[loss=0.3241, simple_loss=0.3769, pruned_loss=0.1357, over 2628117.10 frames. ], batch size: 96, lr: 3.84e-02, grad_scale: 16.0 +2024-08-03 03:02:05,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=23650.0, ans=0.025 +2024-08-03 03:02:21,741 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.99 vs. limit=15.0 +2024-08-03 03:02:22,568 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.05 vs. limit=15.0 +2024-08-03 03:02:27,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=23723.333333333332, ans=0.2 +2024-08-03 03:02:29,900 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.92 vs. limit=15.0 +2024-08-03 03:02:39,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=23760.0, ans=0.005704347826086957 +2024-08-03 03:02:47,310 INFO [train.py:1114] (0/4) Epoch 2, batch 2850, loss[loss=0.2594, simple_loss=0.3276, pruned_loss=0.09558, over 13368.00 frames. ], tot_loss[loss=0.3238, simple_loss=0.3768, pruned_loss=0.1354, over 2622439.42 frames. ], batch size: 35, lr: 3.84e-02, grad_scale: 16.0 +2024-08-03 03:03:05,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=23833.333333333332, ans=0.2 +2024-08-03 03:03:09,957 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.500e+02 1.764e+02 2.105e+02 5.677e+02, threshold=3.527e+02, percent-clipped=3.0 +2024-08-03 03:03:19,516 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.83 vs. limit=15.0 +2024-08-03 03:03:27,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=23943.333333333332, ans=0.2 +2024-08-03 03:03:56,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23980.0, ans=0.1 +2024-08-03 03:04:04,032 INFO [train.py:1114] (0/4) Epoch 2, batch 2900, loss[loss=0.2905, simple_loss=0.3567, pruned_loss=0.1121, over 13370.00 frames. ], tot_loss[loss=0.3244, simple_loss=0.3778, pruned_loss=0.1355, over 2632863.89 frames. ], batch size: 36, lr: 3.83e-02, grad_scale: 16.0 +2024-08-03 03:04:21,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=24016.666666666668, ans=0.0 +2024-08-03 03:04:22,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=24016.666666666668, ans=0.2 +2024-08-03 03:04:51,153 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.23 vs. limit=15.0 +2024-08-03 03:04:55,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=24126.666666666668, ans=0.125 +2024-08-03 03:05:08,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=24163.333333333332, ans=0.125 +2024-08-03 03:05:09,686 INFO [train.py:1114] (0/4) Epoch 2, batch 2950, loss[loss=0.3028, simple_loss=0.3533, pruned_loss=0.1261, over 13338.00 frames. ], tot_loss[loss=0.3225, simple_loss=0.376, pruned_loss=0.1346, over 2630532.42 frames. ], batch size: 34, lr: 3.82e-02, grad_scale: 8.0 +2024-08-03 03:05:15,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=24163.333333333332, ans=0.025 +2024-08-03 03:05:24,196 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=15.0 +2024-08-03 03:05:29,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=24200.0, ans=15.0 +2024-08-03 03:05:38,156 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.078e+02 1.432e+02 1.719e+02 2.227e+02 3.350e+02, threshold=3.438e+02, percent-clipped=0.0 +2024-08-03 03:05:39,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=24236.666666666668, ans=0.0 +2024-08-03 03:05:45,318 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.78 vs. limit=10.0 +2024-08-03 03:05:46,854 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.34 vs. limit=22.5 +2024-08-03 03:05:57,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=24310.0, ans=0.0 +2024-08-03 03:06:04,400 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.47 vs. limit=12.0 +2024-08-03 03:06:05,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=24310.0, ans=0.0 +2024-08-03 03:06:07,518 INFO [train.py:1114] (0/4) Epoch 2, batch 3000, loss[loss=0.3389, simple_loss=0.393, pruned_loss=0.1424, over 13541.00 frames. ], tot_loss[loss=0.3217, simple_loss=0.3752, pruned_loss=0.1341, over 2630525.71 frames. ], batch size: 37, lr: 3.82e-02, grad_scale: 8.0 +2024-08-03 03:06:11,314 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 03:06:29,639 INFO [train.py:1146] (0/4) Epoch 2, validation: loss=0.2511, simple_loss=0.3433, pruned_loss=0.07947, over 944034.00 frames. +2024-08-03 03:06:29,640 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 03:06:31,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=24346.666666666668, ans=0.2 +2024-08-03 03:06:45,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=24383.333333333332, ans=0.09899494936611666 +2024-08-03 03:06:56,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=24456.666666666668, ans=0.2 +2024-08-03 03:06:59,532 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.03 vs. limit=15.0 +2024-08-03 03:07:01,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=24456.666666666668, ans=0.125 +2024-08-03 03:07:07,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=24493.333333333332, ans=0.125 +2024-08-03 03:07:12,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=24493.333333333332, ans=0.025 +2024-08-03 03:07:15,182 INFO [train.py:1114] (0/4) Epoch 2, batch 3050, loss[loss=0.2811, simple_loss=0.3465, pruned_loss=0.1078, over 13534.00 frames. ], tot_loss[loss=0.3227, simple_loss=0.3763, pruned_loss=0.1345, over 2627335.16 frames. ], batch size: 35, lr: 3.81e-02, grad_scale: 8.0 +2024-08-03 03:07:37,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24603.333333333332, ans=0.1 +2024-08-03 03:07:37,484 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.16 vs. limit=6.0 +2024-08-03 03:07:37,911 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.153e+02 1.349e+02 1.521e+02 1.830e+02 3.051e+02, threshold=3.043e+02, percent-clipped=0.0 +2024-08-03 03:07:38,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=24603.333333333332, ans=0.125 +2024-08-03 03:07:50,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24640.0, ans=0.1 +2024-08-03 03:08:04,780 INFO [train.py:1114] (0/4) Epoch 2, batch 3100, loss[loss=0.327, simple_loss=0.3828, pruned_loss=0.1356, over 13354.00 frames. ], tot_loss[loss=0.3218, simple_loss=0.3758, pruned_loss=0.1339, over 2627369.36 frames. ], batch size: 46, lr: 3.81e-02, grad_scale: 8.0 +2024-08-03 03:08:07,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=24713.333333333332, ans=0.95 +2024-08-03 03:08:11,638 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.90 vs. limit=15.0 +2024-08-03 03:08:43,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=24786.666666666668, ans=0.125 +2024-08-03 03:09:06,413 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.97 vs. limit=15.0 +2024-08-03 03:09:07,675 INFO [train.py:1114] (0/4) Epoch 2, batch 3150, loss[loss=0.3411, simple_loss=0.4028, pruned_loss=0.1397, over 13047.00 frames. ], tot_loss[loss=0.3205, simple_loss=0.3748, pruned_loss=0.1331, over 2628902.68 frames. ], batch size: 48, lr: 3.80e-02, grad_scale: 8.0 +2024-08-03 03:09:29,250 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.526e+02 1.877e+02 2.299e+02 4.480e+02, threshold=3.753e+02, percent-clipped=6.0 +2024-08-03 03:09:35,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=25006.666666666668, ans=0.005433333333333333 +2024-08-03 03:09:46,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=25043.333333333332, ans=0.025 +2024-08-03 03:09:53,507 INFO [train.py:1114] (0/4) Epoch 2, batch 3200, loss[loss=0.3363, simple_loss=0.393, pruned_loss=0.1398, over 13545.00 frames. ], tot_loss[loss=0.3203, simple_loss=0.3744, pruned_loss=0.1331, over 2634852.33 frames. ], batch size: 37, lr: 3.79e-02, grad_scale: 16.0 +2024-08-03 03:10:07,900 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.83 vs. limit=15.0 +2024-08-03 03:10:08,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=25116.666666666668, ans=0.025 +2024-08-03 03:10:09,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=25116.666666666668, ans=0.125 +2024-08-03 03:10:17,338 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.32 vs. limit=15.0 +2024-08-03 03:10:17,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=25153.333333333332, ans=0.0 +2024-08-03 03:10:49,707 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:10:56,414 INFO [train.py:1114] (0/4) Epoch 2, batch 3250, loss[loss=0.3326, simple_loss=0.3879, pruned_loss=0.1386, over 13393.00 frames. ], tot_loss[loss=0.3197, simple_loss=0.3741, pruned_loss=0.1327, over 2639472.93 frames. ], batch size: 38, lr: 3.79e-02, grad_scale: 16.0 +2024-08-03 03:11:14,436 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.62 vs. limit=15.0 +2024-08-03 03:11:17,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=25300.0, ans=0.0 +2024-08-03 03:11:23,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=25300.0, ans=0.0 +2024-08-03 03:11:25,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=25300.0, ans=0.0 +2024-08-03 03:11:32,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=25336.666666666668, ans=0.125 +2024-08-03 03:11:34,444 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.086e+02 1.442e+02 1.550e+02 1.764e+02 2.865e+02, threshold=3.101e+02, percent-clipped=0.0 +2024-08-03 03:11:34,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=25336.666666666668, ans=0.0 +2024-08-03 03:12:09,621 INFO [train.py:1114] (0/4) Epoch 2, batch 3300, loss[loss=0.305, simple_loss=0.3617, pruned_loss=0.1242, over 12884.00 frames. ], tot_loss[loss=0.3166, simple_loss=0.3716, pruned_loss=0.1308, over 2640716.13 frames. ], batch size: 52, lr: 3.78e-02, grad_scale: 16.0 +2024-08-03 03:12:12,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=25446.666666666668, ans=15.0 +2024-08-03 03:12:21,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=25483.333333333332, ans=0.125 +2024-08-03 03:12:32,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25520.0, ans=0.1 +2024-08-03 03:12:34,171 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.13 vs. limit=15.0 +2024-08-03 03:12:46,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=25593.333333333332, ans=0.005305797101449275 +2024-08-03 03:12:50,771 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=5.125e-03 +2024-08-03 03:12:52,697 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.18 vs. limit=15.0 +2024-08-03 03:12:53,074 INFO [train.py:1114] (0/4) Epoch 2, batch 3350, loss[loss=0.3674, simple_loss=0.4099, pruned_loss=0.1624, over 12994.00 frames. ], tot_loss[loss=0.3189, simple_loss=0.3732, pruned_loss=0.1323, over 2629880.17 frames. ], batch size: 48, lr: 3.77e-02, grad_scale: 16.0 +2024-08-03 03:12:53,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=25630.0, ans=0.04949747468305833 +2024-08-03 03:12:58,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=25630.0, ans=0.0 +2024-08-03 03:13:13,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=25703.333333333332, ans=0.005281884057971015 +2024-08-03 03:13:14,122 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.415e+02 1.657e+02 2.011e+02 3.247e+02, threshold=3.315e+02, percent-clipped=1.0 +2024-08-03 03:13:34,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=25776.666666666668, ans=0.005265942028985508 +2024-08-03 03:13:39,101 INFO [train.py:1114] (0/4) Epoch 2, batch 3400, loss[loss=0.2513, simple_loss=0.3079, pruned_loss=0.09737, over 13523.00 frames. ], tot_loss[loss=0.3182, simple_loss=0.3722, pruned_loss=0.1321, over 2626071.29 frames. ], batch size: 31, lr: 3.77e-02, grad_scale: 16.0 +2024-08-03 03:13:46,503 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.63 vs. limit=15.0 +2024-08-03 03:14:00,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=25886.666666666668, ans=0.2 +2024-08-03 03:14:06,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=25923.333333333332, ans=0.005234057971014493 +2024-08-03 03:14:08,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=25923.333333333332, ans=0.125 +2024-08-03 03:14:12,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=25923.333333333332, ans=0.125 +2024-08-03 03:14:14,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=25960.0, ans=0.125 +2024-08-03 03:14:22,716 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=2.55 vs. limit=15.0 +2024-08-03 03:14:23,918 INFO [train.py:1114] (0/4) Epoch 2, batch 3450, loss[loss=0.3314, simple_loss=0.3877, pruned_loss=0.1375, over 12785.00 frames. ], tot_loss[loss=0.3178, simple_loss=0.372, pruned_loss=0.1318, over 2628880.55 frames. ], batch size: 52, lr: 3.76e-02, grad_scale: 16.0 +2024-08-03 03:14:25,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=25996.666666666668, ans=0.025 +2024-08-03 03:14:40,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=26070.0, ans=0.1 +2024-08-03 03:14:44,183 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.439e+02 1.568e+02 1.776e+02 4.751e+02, threshold=3.136e+02, percent-clipped=2.0 +2024-08-03 03:15:04,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=26143.333333333332, ans=0.125 +2024-08-03 03:15:06,935 INFO [train.py:1114] (0/4) Epoch 2, batch 3500, loss[loss=0.3037, simple_loss=0.3589, pruned_loss=0.1242, over 13536.00 frames. ], tot_loss[loss=0.3174, simple_loss=0.3714, pruned_loss=0.1317, over 2630169.08 frames. ], batch size: 34, lr: 3.76e-02, grad_scale: 16.0 +2024-08-03 03:15:07,279 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.02 vs. limit=15.0 +2024-08-03 03:15:22,830 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.30 vs. limit=15.0 +2024-08-03 03:15:50,769 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.03 vs. limit=12.0 +2024-08-03 03:15:51,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=26326.666666666668, ans=10.0 +2024-08-03 03:15:53,642 INFO [train.py:1114] (0/4) Epoch 2, batch 3550, loss[loss=0.322, simple_loss=0.3747, pruned_loss=0.1346, over 12448.00 frames. ], tot_loss[loss=0.3197, simple_loss=0.3737, pruned_loss=0.1329, over 2628316.29 frames. ], batch size: 58, lr: 3.75e-02, grad_scale: 16.0 +2024-08-03 03:15:57,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26363.333333333332, ans=0.1 +2024-08-03 03:16:02,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=26400.0, ans=0.2 +2024-08-03 03:16:04,560 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.22 vs. limit=22.5 +2024-08-03 03:16:15,275 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.421e+02 1.616e+02 2.006e+02 3.426e+02, threshold=3.231e+02, percent-clipped=2.0 +2024-08-03 03:16:18,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=26436.666666666668, ans=0.005122463768115942 +2024-08-03 03:16:27,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.69 vs. limit=15.0 +2024-08-03 03:16:33,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=26510.0, ans=0.1 +2024-08-03 03:16:39,567 INFO [train.py:1114] (0/4) Epoch 2, batch 3600, loss[loss=0.3595, simple_loss=0.3977, pruned_loss=0.1607, over 9557.00 frames. ], tot_loss[loss=0.3313, simple_loss=0.3809, pruned_loss=0.1408, over 2489436.14 frames. ], batch size: 97, lr: 3.74e-02, grad_scale: 32.0 +2024-08-03 03:16:58,843 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.78 vs. limit=22.5 +2024-08-03 03:17:07,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=26656.666666666668, ans=0.125 +2024-08-03 03:17:14,149 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-2.pt +2024-08-03 03:18:02,774 INFO [train.py:1114] (0/4) Epoch 3, batch 0, loss[loss=0.2846, simple_loss=0.3483, pruned_loss=0.1105, over 13326.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3483, pruned_loss=0.1105, over 13326.00 frames. ], batch size: 33, lr: 3.55e-02, grad_scale: 32.0 +2024-08-03 03:18:02,775 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 03:18:12,653 INFO [train.py:1146] (0/4) Epoch 3, validation: loss=0.2631, simple_loss=0.3546, pruned_loss=0.08577, over 944034.00 frames. +2024-08-03 03:18:12,653 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 03:18:13,285 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.79 vs. limit=15.0 +2024-08-03 03:18:17,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26693.333333333332, ans=0.1 +2024-08-03 03:18:18,029 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.36 vs. limit=22.5 +2024-08-03 03:18:28,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=26730.0, ans=0.125 +2024-08-03 03:18:30,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=26766.666666666668, ans=0.1 +2024-08-03 03:18:37,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=26766.666666666668, ans=15.0 +2024-08-03 03:18:45,704 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.401e+02 1.625e+02 1.929e+02 3.724e+02, threshold=3.249e+02, percent-clipped=3.0 +2024-08-03 03:18:46,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=26803.333333333332, ans=0.125 +2024-08-03 03:18:48,109 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.14 vs. limit=22.5 +2024-08-03 03:18:59,393 INFO [train.py:1114] (0/4) Epoch 3, batch 50, loss[loss=0.2859, simple_loss=0.3474, pruned_loss=0.1122, over 13422.00 frames. ], tot_loss[loss=0.3222, simple_loss=0.3763, pruned_loss=0.134, over 578543.72 frames. ], batch size: 32, lr: 3.55e-02, grad_scale: 32.0 +2024-08-03 03:19:10,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=26913.333333333332, ans=0.125 +2024-08-03 03:19:11,051 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.26 vs. limit=15.0 +2024-08-03 03:19:48,778 INFO [train.py:1114] (0/4) Epoch 3, batch 100, loss[loss=0.2808, simple_loss=0.3372, pruned_loss=0.1122, over 13539.00 frames. ], tot_loss[loss=0.3238, simple_loss=0.3782, pruned_loss=0.1347, over 1026673.97 frames. ], batch size: 35, lr: 3.54e-02, grad_scale: 32.0 +2024-08-03 03:19:50,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=27060.0, ans=0.125 +2024-08-03 03:20:21,032 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.439e+02 1.724e+02 2.172e+02 3.862e+02, threshold=3.447e+02, percent-clipped=4.0 +2024-08-03 03:21:06,899 INFO [train.py:1114] (0/4) Epoch 3, batch 150, loss[loss=0.24, simple_loss=0.3027, pruned_loss=0.08862, over 13411.00 frames. ], tot_loss[loss=0.3173, simple_loss=0.373, pruned_loss=0.1308, over 1387367.83 frames. ], batch size: 32, lr: 3.53e-02, grad_scale: 32.0 +2024-08-03 03:21:10,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=27243.333333333332, ans=0.125 +2024-08-03 03:21:10,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=27243.333333333332, ans=0.035 +2024-08-03 03:21:10,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27243.333333333332, ans=0.1 +2024-08-03 03:21:30,362 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.67 vs. limit=15.0 +2024-08-03 03:21:49,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=27390.0, ans=0.004915217391304348 +2024-08-03 03:22:07,955 INFO [train.py:1114] (0/4) Epoch 3, batch 200, loss[loss=0.356, simple_loss=0.3982, pruned_loss=0.1569, over 12602.00 frames. ], tot_loss[loss=0.3147, simple_loss=0.3706, pruned_loss=0.1294, over 1665657.02 frames. ], batch size: 59, lr: 3.53e-02, grad_scale: 16.0 +2024-08-03 03:22:41,087 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.338e+02 1.522e+02 1.755e+02 2.817e+02, threshold=3.045e+02, percent-clipped=0.0 +2024-08-03 03:22:53,962 INFO [train.py:1114] (0/4) Epoch 3, batch 250, loss[loss=0.3793, simple_loss=0.4226, pruned_loss=0.1679, over 13351.00 frames. ], tot_loss[loss=0.3143, simple_loss=0.3702, pruned_loss=0.1293, over 1884655.60 frames. ], batch size: 46, lr: 3.52e-02, grad_scale: 16.0 +2024-08-03 03:22:57,255 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.25 vs. limit=22.5 +2024-08-03 03:23:14,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=27683.333333333332, ans=0.025 +2024-08-03 03:23:14,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=27683.333333333332, ans=0.125 +2024-08-03 03:23:19,625 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.84 vs. limit=12.0 +2024-08-03 03:23:21,285 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.52 vs. limit=15.0 +2024-08-03 03:23:24,900 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.47 vs. limit=15.0 +2024-08-03 03:23:26,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=27720.0, ans=0.004843478260869565 +2024-08-03 03:23:30,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=27756.666666666668, ans=0.0 +2024-08-03 03:23:31,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=27756.666666666668, ans=0.2 +2024-08-03 03:23:38,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=27756.666666666668, ans=0.125 +2024-08-03 03:23:48,996 INFO [train.py:1114] (0/4) Epoch 3, batch 300, loss[loss=0.3386, simple_loss=0.3887, pruned_loss=0.1443, over 13436.00 frames. ], tot_loss[loss=0.312, simple_loss=0.3682, pruned_loss=0.1279, over 2051755.91 frames. ], batch size: 42, lr: 3.52e-02, grad_scale: 16.0 +2024-08-03 03:23:51,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=27793.333333333332, ans=0.125 +2024-08-03 03:23:53,177 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.91 vs. limit=15.0 +2024-08-03 03:24:03,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=27830.0, ans=0.2 +2024-08-03 03:24:09,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=27866.666666666668, ans=0.0 +2024-08-03 03:24:10,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=27866.666666666668, ans=0.0 +2024-08-03 03:24:10,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=27866.666666666668, ans=0.125 +2024-08-03 03:24:24,871 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.466e+02 1.718e+02 2.215e+02 5.480e+02, threshold=3.437e+02, percent-clipped=5.0 +2024-08-03 03:24:38,903 INFO [train.py:1114] (0/4) Epoch 3, batch 350, loss[loss=0.2983, simple_loss=0.3497, pruned_loss=0.1235, over 13608.00 frames. ], tot_loss[loss=0.3107, simple_loss=0.3678, pruned_loss=0.1268, over 2182665.14 frames. ], batch size: 33, lr: 3.51e-02, grad_scale: 16.0 +2024-08-03 03:24:44,864 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=6.72 vs. limit=12.0 +2024-08-03 03:24:56,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=28050.0, ans=0.125 +2024-08-03 03:25:01,609 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.66 vs. limit=10.0 +2024-08-03 03:25:03,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=28050.0, ans=0.0 +2024-08-03 03:25:25,234 INFO [train.py:1114] (0/4) Epoch 3, batch 400, loss[loss=0.3343, simple_loss=0.3876, pruned_loss=0.1405, over 13377.00 frames. ], tot_loss[loss=0.3101, simple_loss=0.3673, pruned_loss=0.1264, over 2286805.56 frames. ], batch size: 37, lr: 3.50e-02, grad_scale: 32.0 +2024-08-03 03:25:38,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=28196.666666666668, ans=0.0 +2024-08-03 03:26:05,602 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.402e+02 1.627e+02 1.926e+02 3.907e+02, threshold=3.254e+02, percent-clipped=1.0 +2024-08-03 03:26:18,596 INFO [train.py:1114] (0/4) Epoch 3, batch 450, loss[loss=0.3001, simple_loss=0.3614, pruned_loss=0.1194, over 13545.00 frames. ], tot_loss[loss=0.3096, simple_loss=0.3669, pruned_loss=0.1262, over 2359881.34 frames. ], batch size: 38, lr: 3.50e-02, grad_scale: 32.0 +2024-08-03 03:26:18,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=28343.333333333332, ans=0.125 +2024-08-03 03:26:23,374 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:26:42,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=28380.0, ans=0.125 +2024-08-03 03:26:45,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=28416.666666666668, ans=0.004692028985507246 +2024-08-03 03:27:11,504 INFO [train.py:1114] (0/4) Epoch 3, batch 500, loss[loss=0.3482, simple_loss=0.4052, pruned_loss=0.1457, over 13416.00 frames. ], tot_loss[loss=0.308, simple_loss=0.3652, pruned_loss=0.1255, over 2425412.34 frames. ], batch size: 43, lr: 3.49e-02, grad_scale: 32.0 +2024-08-03 03:27:15,626 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.57 vs. limit=15.0 +2024-08-03 03:27:19,489 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.98 vs. limit=10.0 +2024-08-03 03:27:20,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=28563.333333333332, ans=0.0 +2024-08-03 03:27:23,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=28563.333333333332, ans=0.125 +2024-08-03 03:27:26,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=28563.333333333332, ans=0.1 +2024-08-03 03:27:48,039 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.466e+02 1.735e+02 2.174e+02 5.837e+02, threshold=3.470e+02, percent-clipped=2.0 +2024-08-03 03:27:59,878 INFO [train.py:1114] (0/4) Epoch 3, batch 550, loss[loss=0.3367, simple_loss=0.3889, pruned_loss=0.1423, over 13033.00 frames. ], tot_loss[loss=0.3084, simple_loss=0.3654, pruned_loss=0.1257, over 2468099.95 frames. ], batch size: 48, lr: 3.49e-02, grad_scale: 16.0 +2024-08-03 03:28:01,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=28710.0, ans=0.0 +2024-08-03 03:28:19,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=28783.333333333332, ans=0.125 +2024-08-03 03:28:40,365 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.83 vs. limit=15.0 +2024-08-03 03:28:42,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=28856.666666666668, ans=0.125 +2024-08-03 03:28:44,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=28856.666666666668, ans=0.125 +2024-08-03 03:28:48,242 INFO [train.py:1114] (0/4) Epoch 3, batch 600, loss[loss=0.3166, simple_loss=0.3796, pruned_loss=0.1268, over 13314.00 frames. ], tot_loss[loss=0.3078, simple_loss=0.365, pruned_loss=0.1253, over 2507675.65 frames. ], batch size: 46, lr: 3.48e-02, grad_scale: 16.0 +2024-08-03 03:29:17,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=29003.333333333332, ans=0.0 +2024-08-03 03:29:19,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=29003.333333333332, ans=0.125 +2024-08-03 03:29:21,224 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.080e+02 1.381e+02 1.525e+02 1.783e+02 3.115e+02, threshold=3.051e+02, percent-clipped=0.0 +2024-08-03 03:29:25,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=29040.0, ans=0.07 +2024-08-03 03:29:30,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=29040.0, ans=0.125 +2024-08-03 03:29:38,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=29076.666666666668, ans=0.125 +2024-08-03 03:29:39,453 INFO [train.py:1114] (0/4) Epoch 3, batch 650, loss[loss=0.3027, simple_loss=0.37, pruned_loss=0.1177, over 13535.00 frames. ], tot_loss[loss=0.3061, simple_loss=0.3639, pruned_loss=0.1241, over 2542130.62 frames. ], batch size: 37, lr: 3.48e-02, grad_scale: 16.0 +2024-08-03 03:29:40,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.45 vs. limit=22.5 +2024-08-03 03:29:49,342 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.03 vs. limit=12.0 +2024-08-03 03:30:01,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=29150.0, ans=0.025 +2024-08-03 03:30:03,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=29150.0, ans=0.05 +2024-08-03 03:30:17,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=29223.333333333332, ans=0.125 +2024-08-03 03:30:18,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=29223.333333333332, ans=0.125 +2024-08-03 03:30:21,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=29223.333333333332, ans=0.125 +2024-08-03 03:30:26,320 INFO [train.py:1114] (0/4) Epoch 3, batch 700, loss[loss=0.2757, simple_loss=0.3407, pruned_loss=0.1054, over 13530.00 frames. ], tot_loss[loss=0.3064, simple_loss=0.3642, pruned_loss=0.1243, over 2564282.10 frames. ], batch size: 35, lr: 3.47e-02, grad_scale: 16.0 +2024-08-03 03:30:31,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=29260.0, ans=0.0 +2024-08-03 03:31:08,722 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-8000.pt +2024-08-03 03:31:13,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=29333.333333333332, ans=0.0 +2024-08-03 03:31:22,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=29370.0, ans=0.0 +2024-08-03 03:31:28,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29370.0, ans=0.1 +2024-08-03 03:31:29,956 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.985e+01 1.488e+02 1.744e+02 2.083e+02 3.353e+02, threshold=3.487e+02, percent-clipped=2.0 +2024-08-03 03:31:32,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=29406.666666666668, ans=0.125 +2024-08-03 03:31:41,826 INFO [train.py:1114] (0/4) Epoch 3, batch 750, loss[loss=0.2942, simple_loss=0.3592, pruned_loss=0.1146, over 13355.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.3627, pruned_loss=0.1233, over 2582220.99 frames. ], batch size: 37, lr: 3.46e-02, grad_scale: 16.0 +2024-08-03 03:36:14,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=29443.333333333332, ans=0.125 +2024-08-03 03:36:30,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=29480.0, ans=0.125 +2024-08-03 03:37:46,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=29516.666666666668, ans=0.0 +2024-08-03 03:38:26,583 INFO [train.py:1114] (0/4) Epoch 3, batch 800, loss[loss=0.2761, simple_loss=0.3325, pruned_loss=0.1099, over 13341.00 frames. ], tot_loss[loss=0.3047, simple_loss=0.3627, pruned_loss=0.1233, over 2596150.49 frames. ], batch size: 33, lr: 3.46e-02, grad_scale: 32.0 +2024-08-03 03:38:33,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=29626.666666666668, ans=0.1 +2024-08-03 03:38:34,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29626.666666666668, ans=0.1 +2024-08-03 03:38:35,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=29663.333333333332, ans=0.125 +2024-08-03 03:38:39,786 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.19 vs. limit=22.5 +2024-08-03 03:38:46,816 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.66 vs. limit=15.0 +2024-08-03 03:38:49,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=29700.0, ans=0.025 +2024-08-03 03:38:50,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=29700.0, ans=10.0 +2024-08-03 03:38:54,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.75 vs. limit=10.0 +2024-08-03 03:39:00,212 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.412e+02 1.629e+02 2.089e+02 3.471e+02, threshold=3.259e+02, percent-clipped=0.0 +2024-08-03 03:39:12,359 INFO [train.py:1114] (0/4) Epoch 3, batch 850, loss[loss=0.3362, simple_loss=0.3912, pruned_loss=0.1406, over 13326.00 frames. ], tot_loss[loss=0.3045, simple_loss=0.3622, pruned_loss=0.1234, over 2608915.90 frames. ], batch size: 40, lr: 3.45e-02, grad_scale: 32.0 +2024-08-03 03:39:20,945 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.95 vs. limit=15.0 +2024-08-03 03:39:30,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29883.333333333332, ans=0.1 +2024-08-03 03:39:57,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=29956.666666666668, ans=0.025 +2024-08-03 03:40:04,393 INFO [train.py:1114] (0/4) Epoch 3, batch 900, loss[loss=0.2915, simple_loss=0.3465, pruned_loss=0.1182, over 13354.00 frames. ], tot_loss[loss=0.3058, simple_loss=0.3631, pruned_loss=0.1243, over 2611166.99 frames. ], batch size: 33, lr: 3.45e-02, grad_scale: 32.0 +2024-08-03 03:40:04,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=29993.333333333332, ans=0.00434927536231884 +2024-08-03 03:40:09,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=29993.333333333332, ans=0.125 +2024-08-03 03:40:39,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=30103.333333333332, ans=0.125 +2024-08-03 03:40:39,772 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.109e+02 1.370e+02 1.651e+02 2.028e+02 4.342e+02, threshold=3.303e+02, percent-clipped=4.0 +2024-08-03 03:40:50,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=30176.666666666668, ans=0.0 +2024-08-03 03:40:51,091 INFO [train.py:1114] (0/4) Epoch 3, batch 950, loss[loss=0.2739, simple_loss=0.3367, pruned_loss=0.1055, over 13535.00 frames. ], tot_loss[loss=0.3048, simple_loss=0.3622, pruned_loss=0.1237, over 2612634.97 frames. ], batch size: 34, lr: 3.44e-02, grad_scale: 16.0 +2024-08-03 03:41:08,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=30213.333333333332, ans=0.09899494936611666 +2024-08-03 03:41:19,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=30286.666666666668, ans=0.09899494936611666 +2024-08-03 03:41:29,794 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.62 vs. limit=22.5 +2024-08-03 03:41:38,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30323.333333333332, ans=0.1 +2024-08-03 03:41:40,357 INFO [train.py:1114] (0/4) Epoch 3, batch 1000, loss[loss=0.3331, simple_loss=0.3882, pruned_loss=0.139, over 13373.00 frames. ], tot_loss[loss=0.306, simple_loss=0.3636, pruned_loss=0.1242, over 2612449.38 frames. ], batch size: 35, lr: 3.44e-02, grad_scale: 16.0 +2024-08-03 03:41:42,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_na.min_abs, batch_count=30360.0, ans=0.02 +2024-08-03 03:41:47,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=30360.0, ans=0.0 +2024-08-03 03:41:58,544 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.59 vs. limit=22.5 +2024-08-03 03:42:15,780 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.115e+02 1.383e+02 1.605e+02 2.126e+02 5.573e+02, threshold=3.210e+02, percent-clipped=1.0 +2024-08-03 03:42:17,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=30506.666666666668, ans=0.2 +2024-08-03 03:42:27,512 INFO [train.py:1114] (0/4) Epoch 3, batch 1050, loss[loss=0.2913, simple_loss=0.3643, pruned_loss=0.1091, over 13584.00 frames. ], tot_loss[loss=0.3053, simple_loss=0.363, pruned_loss=0.1238, over 2617092.86 frames. ], batch size: 39, lr: 3.43e-02, grad_scale: 16.0 +2024-08-03 03:42:37,045 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.52 vs. limit=15.0 +2024-08-03 03:42:39,633 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:42:46,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=30616.666666666668, ans=0.1 +2024-08-03 03:42:53,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30616.666666666668, ans=0.1 +2024-08-03 03:42:54,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=30653.333333333332, ans=0.125 +2024-08-03 03:42:55,424 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.22 vs. limit=15.0 +2024-08-03 03:42:56,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=30653.333333333332, ans=0.1 +2024-08-03 03:43:02,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=30653.333333333332, ans=0.004205797101449276 +2024-08-03 03:43:06,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=30690.0, ans=0.025 +2024-08-03 03:43:13,164 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.85 vs. limit=15.0 +2024-08-03 03:43:16,386 INFO [train.py:1114] (0/4) Epoch 3, batch 1100, loss[loss=0.2935, simple_loss=0.3599, pruned_loss=0.1136, over 13562.00 frames. ], tot_loss[loss=0.3061, simple_loss=0.3638, pruned_loss=0.1242, over 2620443.40 frames. ], batch size: 36, lr: 3.42e-02, grad_scale: 16.0 +2024-08-03 03:43:25,820 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.87 vs. limit=6.0 +2024-08-03 03:43:41,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=30800.0, ans=0.004173913043478261 +2024-08-03 03:43:55,899 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.095e+02 1.319e+02 1.473e+02 1.688e+02 2.812e+02, threshold=2.945e+02, percent-clipped=0.0 +2024-08-03 03:44:01,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=30873.333333333332, ans=0.02 +2024-08-03 03:44:07,120 INFO [train.py:1114] (0/4) Epoch 3, batch 1150, loss[loss=0.2781, simple_loss=0.3359, pruned_loss=0.1101, over 13539.00 frames. ], tot_loss[loss=0.305, simple_loss=0.3629, pruned_loss=0.1236, over 2618614.14 frames. ], batch size: 36, lr: 3.42e-02, grad_scale: 16.0 +2024-08-03 03:44:12,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=30910.0, ans=0.00415 +2024-08-03 03:44:13,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30910.0, ans=0.1 +2024-08-03 03:44:17,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30946.666666666668, ans=0.1 +2024-08-03 03:44:39,743 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.70 vs. limit=15.0 +2024-08-03 03:45:00,365 INFO [train.py:1114] (0/4) Epoch 3, batch 1200, loss[loss=0.2832, simple_loss=0.3493, pruned_loss=0.1085, over 13596.00 frames. ], tot_loss[loss=0.3053, simple_loss=0.3631, pruned_loss=0.1238, over 2615675.14 frames. ], batch size: 39, lr: 3.41e-02, grad_scale: 32.0 +2024-08-03 03:45:03,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=31093.333333333332, ans=0.0 +2024-08-03 03:45:30,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=31203.333333333332, ans=0.2 +2024-08-03 03:45:34,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=31203.333333333332, ans=0.0 +2024-08-03 03:45:37,481 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.033e+02 1.400e+02 1.587e+02 2.006e+02 3.916e+02, threshold=3.173e+02, percent-clipped=5.0 +2024-08-03 03:45:37,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=31203.333333333332, ans=0.0 +2024-08-03 03:45:42,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=31240.0, ans=0.125 +2024-08-03 03:45:48,724 INFO [train.py:1114] (0/4) Epoch 3, batch 1250, loss[loss=0.3081, simple_loss=0.3756, pruned_loss=0.1203, over 13458.00 frames. ], tot_loss[loss=0.3054, simple_loss=0.3635, pruned_loss=0.1236, over 2627779.88 frames. ], batch size: 42, lr: 3.41e-02, grad_scale: 32.0 +2024-08-03 03:46:01,122 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.98 vs. limit=15.0 +2024-08-03 03:46:06,607 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.29 vs. limit=15.0 +2024-08-03 03:46:10,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=31350.0, ans=0.2 +2024-08-03 03:46:11,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=31350.0, ans=0.2 +2024-08-03 03:46:14,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=31350.0, ans=0.0 +2024-08-03 03:46:17,304 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.87 vs. limit=15.0 +2024-08-03 03:46:19,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=31386.666666666668, ans=0.0 +2024-08-03 03:46:22,071 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.20 vs. limit=15.0 +2024-08-03 03:46:31,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31423.333333333332, ans=0.1 +2024-08-03 03:46:34,553 INFO [train.py:1114] (0/4) Epoch 3, batch 1300, loss[loss=0.2951, simple_loss=0.3623, pruned_loss=0.1139, over 12812.00 frames. ], tot_loss[loss=0.3052, simple_loss=0.3632, pruned_loss=0.1236, over 2629813.94 frames. ], batch size: 52, lr: 3.40e-02, grad_scale: 16.0 +2024-08-03 03:46:39,768 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.93 vs. limit=15.0 +2024-08-03 03:46:52,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=31533.333333333332, ans=0.09899494936611666 +2024-08-03 03:47:00,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.13 vs. limit=6.0 +2024-08-03 03:47:11,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=31570.0, ans=0.125 +2024-08-03 03:47:11,467 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.75 vs. limit=15.0 +2024-08-03 03:47:14,530 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.368e+02 1.596e+02 1.808e+02 3.073e+02, threshold=3.191e+02, percent-clipped=0.0 +2024-08-03 03:47:21,200 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.02 vs. limit=15.0 +2024-08-03 03:47:25,301 INFO [train.py:1114] (0/4) Epoch 3, batch 1350, loss[loss=0.2962, simple_loss=0.3618, pruned_loss=0.1153, over 13543.00 frames. ], tot_loss[loss=0.3035, simple_loss=0.3623, pruned_loss=0.1224, over 2638502.79 frames. ], batch size: 37, lr: 3.40e-02, grad_scale: 16.0 +2024-08-03 03:47:26,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=31643.333333333332, ans=0.2 +2024-08-03 03:47:34,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=31680.0, ans=0.125 +2024-08-03 03:47:34,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31680.0, ans=0.1 +2024-08-03 03:48:11,336 INFO [train.py:1114] (0/4) Epoch 3, batch 1400, loss[loss=0.2727, simple_loss=0.3302, pruned_loss=0.1076, over 13248.00 frames. ], tot_loss[loss=0.3026, simple_loss=0.3617, pruned_loss=0.1217, over 2642504.76 frames. ], batch size: 31, lr: 3.39e-02, grad_scale: 8.0 +2024-08-03 03:48:25,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=31863.333333333332, ans=0.125 +2024-08-03 03:48:32,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=31900.0, ans=0.003934782608695652 +2024-08-03 03:48:38,261 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:48:40,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=31936.666666666668, ans=0.125 +2024-08-03 03:48:47,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=31936.666666666668, ans=0.125 +2024-08-03 03:48:50,659 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.406e+02 1.620e+02 1.890e+02 3.890e+02, threshold=3.239e+02, percent-clipped=2.0 +2024-08-03 03:48:52,079 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.73 vs. limit=22.5 +2024-08-03 03:49:08,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=31973.333333333332, ans=0.125 +2024-08-03 03:49:21,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=31973.333333333332, ans=0.025 +2024-08-03 03:49:26,503 INFO [train.py:1114] (0/4) Epoch 3, batch 1450, loss[loss=0.3135, simple_loss=0.3724, pruned_loss=0.1273, over 13415.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.3631, pruned_loss=0.123, over 2641522.74 frames. ], batch size: 43, lr: 3.38e-02, grad_scale: 8.0 +2024-08-03 03:49:52,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=32046.666666666668, ans=0.0 +2024-08-03 03:50:01,332 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.12 vs. limit=15.0 +2024-08-03 03:50:11,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=32083.333333333332, ans=0.0 +2024-08-03 03:52:37,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=32120.0, ans=0.125 +2024-08-03 03:52:50,968 INFO [train.py:1114] (0/4) Epoch 3, batch 1500, loss[loss=0.2876, simple_loss=0.3564, pruned_loss=0.1094, over 13401.00 frames. ], tot_loss[loss=0.3054, simple_loss=0.364, pruned_loss=0.1234, over 2641387.62 frames. ], batch size: 39, lr: 3.38e-02, grad_scale: 8.0 +2024-08-03 03:52:59,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=32193.333333333332, ans=0.09899494936611666 +2024-08-03 03:53:04,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=32230.0, ans=0.0038630434782608693 +2024-08-03 03:53:07,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=32230.0, ans=0.2 +2024-08-03 03:53:12,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=32266.666666666668, ans=0.2 +2024-08-03 03:53:16,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=32266.666666666668, ans=0.07 +2024-08-03 03:53:17,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32266.666666666668, ans=0.1 +2024-08-03 03:53:22,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=32303.333333333332, ans=0.125 +2024-08-03 03:53:29,876 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.393e+02 1.602e+02 2.004e+02 4.084e+02, threshold=3.204e+02, percent-clipped=1.0 +2024-08-03 03:53:36,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=32340.0, ans=0.2 +2024-08-03 03:53:37,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=32340.0, ans=0.1 +2024-08-03 03:53:38,921 INFO [train.py:1114] (0/4) Epoch 3, batch 1550, loss[loss=0.3136, simple_loss=0.3817, pruned_loss=0.1228, over 13404.00 frames. ], tot_loss[loss=0.3039, simple_loss=0.3626, pruned_loss=0.1227, over 2630878.09 frames. ], batch size: 41, lr: 3.37e-02, grad_scale: 8.0 +2024-08-03 03:53:56,472 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.83 vs. limit=15.0 +2024-08-03 03:54:17,641 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.59 vs. limit=15.0 +2024-08-03 03:54:21,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=32450.0, ans=0.0038152173913043484 +2024-08-03 03:54:27,262 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.63 vs. limit=22.5 +2024-08-03 03:54:28,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32486.666666666668, ans=0.1 +2024-08-03 03:54:33,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=32486.666666666668, ans=0.125 +2024-08-03 03:54:36,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=32486.666666666668, ans=0.125 +2024-08-03 03:54:38,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=32523.333333333332, ans=0.125 +2024-08-03 03:54:42,652 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.99 vs. limit=22.5 +2024-08-03 03:54:57,333 INFO [train.py:1114] (0/4) Epoch 3, batch 1600, loss[loss=0.2936, simple_loss=0.363, pruned_loss=0.1121, over 13574.00 frames. ], tot_loss[loss=0.3047, simple_loss=0.3628, pruned_loss=0.1233, over 2624691.80 frames. ], batch size: 39, lr: 3.37e-02, grad_scale: 16.0 +2024-08-03 03:54:59,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=32560.0, ans=0.0 +2024-08-03 03:55:05,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=32560.0, ans=0.125 +2024-08-03 03:55:07,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=32596.666666666668, ans=0.2 +2024-08-03 03:55:09,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=32596.666666666668, ans=0.125 +2024-08-03 03:55:12,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=32596.666666666668, ans=0.04949747468305833 +2024-08-03 03:55:24,402 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.00 vs. limit=22.5 +2024-08-03 03:55:37,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=32706.666666666668, ans=0.125 +2024-08-03 03:55:37,893 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.348e+02 1.511e+02 1.737e+02 4.413e+02, threshold=3.022e+02, percent-clipped=2.0 +2024-08-03 03:55:38,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=32706.666666666668, ans=0.0 +2024-08-03 03:55:40,950 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:55:48,032 INFO [train.py:1114] (0/4) Epoch 3, batch 1650, loss[loss=0.2851, simple_loss=0.3658, pruned_loss=0.1022, over 13333.00 frames. ], tot_loss[loss=0.3035, simple_loss=0.362, pruned_loss=0.1225, over 2621389.01 frames. ], batch size: 40, lr: 3.36e-02, grad_scale: 16.0 +2024-08-03 03:55:54,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=32743.333333333332, ans=0.125 +2024-08-03 03:56:07,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=32780.0, ans=0.125 +2024-08-03 03:56:20,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=32816.666666666664, ans=0.0 +2024-08-03 03:56:40,961 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.68 vs. limit=15.0 +2024-08-03 03:56:43,057 INFO [train.py:1114] (0/4) Epoch 3, batch 1700, loss[loss=0.283, simple_loss=0.3258, pruned_loss=0.1201, over 13243.00 frames. ], tot_loss[loss=0.3025, simple_loss=0.3612, pruned_loss=0.1219, over 2630216.99 frames. ], batch size: 31, lr: 3.36e-02, grad_scale: 16.0 +2024-08-03 03:56:46,435 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.68 vs. limit=22.5 +2024-08-03 03:57:14,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=33036.666666666664, ans=0.125 +2024-08-03 03:57:16,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=33036.666666666664, ans=0.0 +2024-08-03 03:57:19,656 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.070e+02 1.348e+02 1.541e+02 1.805e+02 2.810e+02, threshold=3.082e+02, percent-clipped=0.0 +2024-08-03 03:57:32,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=33073.333333333336, ans=0.125 +2024-08-03 03:57:34,252 INFO [train.py:1114] (0/4) Epoch 3, batch 1750, loss[loss=0.2814, simple_loss=0.3372, pruned_loss=0.1128, over 13548.00 frames. ], tot_loss[loss=0.3012, simple_loss=0.3601, pruned_loss=0.1212, over 2633978.39 frames. ], batch size: 31, lr: 3.35e-02, grad_scale: 16.0 +2024-08-03 03:57:36,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=33110.0, ans=15.0 +2024-08-03 03:58:04,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=33183.333333333336, ans=0.035 +2024-08-03 03:58:05,237 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.06 vs. limit=15.0 +2024-08-03 03:58:21,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=33220.0, ans=0.125 +2024-08-03 03:58:29,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=33256.666666666664, ans=0.035 +2024-08-03 03:58:36,919 INFO [train.py:1114] (0/4) Epoch 3, batch 1800, loss[loss=0.2824, simple_loss=0.3559, pruned_loss=0.1044, over 13551.00 frames. ], tot_loss[loss=0.3027, simple_loss=0.3612, pruned_loss=0.1221, over 2635137.16 frames. ], batch size: 38, lr: 3.35e-02, grad_scale: 16.0 +2024-08-03 03:58:40,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=33293.333333333336, ans=0.05 +2024-08-03 03:58:44,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=33293.333333333336, ans=0.0 +2024-08-03 03:58:45,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=33293.333333333336, ans=0.09899494936611666 +2024-08-03 03:58:46,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=33330.0, ans=0.125 +2024-08-03 03:58:55,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=33330.0, ans=0.0 +2024-08-03 03:59:04,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=33366.666666666664, ans=0.125 +2024-08-03 03:59:17,230 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.060e+02 1.365e+02 1.589e+02 1.919e+02 3.211e+02, threshold=3.178e+02, percent-clipped=2.0 +2024-08-03 03:59:19,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=33440.0, ans=0.0 +2024-08-03 03:59:23,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=33440.0, ans=0.003599999999999999 +2024-08-03 03:59:24,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=33440.0, ans=0.05 +2024-08-03 03:59:26,710 INFO [train.py:1114] (0/4) Epoch 3, batch 1850, loss[loss=0.3111, simple_loss=0.3746, pruned_loss=0.1238, over 13406.00 frames. ], tot_loss[loss=0.3014, simple_loss=0.36, pruned_loss=0.1214, over 2636615.34 frames. ], batch size: 39, lr: 3.34e-02, grad_scale: 16.0 +2024-08-03 03:59:29,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=33476.666666666664, ans=0.0 +2024-08-03 03:59:31,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=33476.666666666664, ans=0.025 +2024-08-03 03:59:44,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=33513.333333333336, ans=0.125 +2024-08-03 03:59:54,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=33550.0, ans=0.125 +2024-08-03 04:00:03,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=33586.666666666664, ans=0.0 +2024-08-03 04:00:04,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=33586.666666666664, ans=0.125 +2024-08-03 04:00:22,355 INFO [train.py:1114] (0/4) Epoch 3, batch 1900, loss[loss=0.3114, simple_loss=0.3748, pruned_loss=0.124, over 13322.00 frames. ], tot_loss[loss=0.301, simple_loss=0.3604, pruned_loss=0.1208, over 2639750.54 frames. ], batch size: 40, lr: 3.34e-02, grad_scale: 16.0 +2024-08-03 04:00:28,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=33660.0, ans=0.125 +2024-08-03 04:00:45,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=33733.333333333336, ans=0.125 +2024-08-03 04:01:01,318 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.079e+02 1.394e+02 1.539e+02 1.838e+02 3.320e+02, threshold=3.078e+02, percent-clipped=1.0 +2024-08-03 04:01:10,650 INFO [train.py:1114] (0/4) Epoch 3, batch 1950, loss[loss=0.3108, simple_loss=0.3677, pruned_loss=0.1269, over 13565.00 frames. ], tot_loss[loss=0.3025, simple_loss=0.3621, pruned_loss=0.1215, over 2646643.08 frames. ], batch size: 36, lr: 3.33e-02, grad_scale: 16.0 +2024-08-03 04:01:15,517 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.58 vs. limit=15.0 +2024-08-03 04:01:29,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=33880.0, ans=0.2 +2024-08-03 04:02:00,490 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.26 vs. limit=6.0 +2024-08-03 04:02:01,770 INFO [train.py:1114] (0/4) Epoch 3, batch 2000, loss[loss=0.23, simple_loss=0.2975, pruned_loss=0.08126, over 13569.00 frames. ], tot_loss[loss=0.3038, simple_loss=0.3626, pruned_loss=0.1225, over 2636469.57 frames. ], batch size: 31, lr: 3.32e-02, grad_scale: 32.0 +2024-08-03 04:02:22,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=34063.333333333336, ans=0.125 +2024-08-03 04:02:33,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=34100.0, ans=0.125 +2024-08-03 04:02:38,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=34136.666666666664, ans=0.0 +2024-08-03 04:02:48,276 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.90 vs. limit=5.0 +2024-08-03 04:02:48,520 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.054e+02 1.429e+02 1.657e+02 2.036e+02 4.223e+02, threshold=3.314e+02, percent-clipped=3.0 +2024-08-03 04:02:56,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=34173.333333333336, ans=0.0 +2024-08-03 04:02:57,985 INFO [train.py:1114] (0/4) Epoch 3, batch 2050, loss[loss=0.2644, simple_loss=0.3316, pruned_loss=0.09855, over 13444.00 frames. ], tot_loss[loss=0.3023, simple_loss=0.3609, pruned_loss=0.1218, over 2633741.39 frames. ], batch size: 32, lr: 3.32e-02, grad_scale: 32.0 +2024-08-03 04:03:05,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=34210.0, ans=0.2 +2024-08-03 04:03:17,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=34246.666666666664, ans=0.04949747468305833 +2024-08-03 04:03:22,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=34283.333333333336, ans=0.125 +2024-08-03 04:03:28,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=34320.0, ans=0.2 +2024-08-03 04:03:29,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=34320.0, ans=0.0 +2024-08-03 04:03:48,238 INFO [train.py:1114] (0/4) Epoch 3, batch 2100, loss[loss=0.3206, simple_loss=0.3742, pruned_loss=0.1335, over 13538.00 frames. ], tot_loss[loss=0.3008, simple_loss=0.3598, pruned_loss=0.1209, over 2639411.25 frames. ], batch size: 37, lr: 3.31e-02, grad_scale: 32.0 +2024-08-03 04:03:48,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=34393.333333333336, ans=0.125 +2024-08-03 04:04:00,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=34430.0, ans=0.0 +2024-08-03 04:04:29,651 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.385e+02 1.595e+02 1.788e+02 2.690e+02, threshold=3.190e+02, percent-clipped=1.0 +2024-08-03 04:04:31,074 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.54 vs. limit=10.0 +2024-08-03 04:04:34,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=34540.0, ans=22.5 +2024-08-03 04:04:37,868 INFO [train.py:1114] (0/4) Epoch 3, batch 2150, loss[loss=0.3336, simple_loss=0.3786, pruned_loss=0.1443, over 13547.00 frames. ], tot_loss[loss=0.2996, simple_loss=0.3588, pruned_loss=0.1202, over 2647369.29 frames. ], batch size: 36, lr: 3.31e-02, grad_scale: 16.0 +2024-08-03 04:04:57,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.04 vs. limit=22.5 +2024-08-03 04:05:01,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=34613.333333333336, ans=0.09899494936611666 +2024-08-03 04:05:04,207 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.51 vs. limit=15.0 +2024-08-03 04:05:21,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=34686.666666666664, ans=0.0 +2024-08-03 04:05:26,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=34686.666666666664, ans=0.0 +2024-08-03 04:05:30,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=34686.666666666664, ans=0.025 +2024-08-03 04:05:34,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=34723.333333333336, ans=0.125 +2024-08-03 04:05:40,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=34723.333333333336, ans=0.04949747468305833 +2024-08-03 04:05:42,224 INFO [train.py:1114] (0/4) Epoch 3, batch 2200, loss[loss=0.3454, simple_loss=0.3959, pruned_loss=0.1474, over 13393.00 frames. ], tot_loss[loss=0.3, simple_loss=0.3593, pruned_loss=0.1204, over 2645598.77 frames. ], batch size: 39, lr: 3.30e-02, grad_scale: 16.0 +2024-08-03 04:05:47,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=34760.0, ans=0.1 +2024-08-03 04:05:48,565 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:05:52,625 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.88 vs. limit=15.0 +2024-08-03 04:06:01,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=34833.333333333336, ans=0.125 +2024-08-03 04:06:05,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=34833.333333333336, ans=0.125 +2024-08-03 04:06:24,642 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.093e+02 1.404e+02 1.621e+02 1.995e+02 2.772e+02, threshold=3.241e+02, percent-clipped=0.0 +2024-08-03 04:06:29,771 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.46 vs. limit=10.0 +2024-08-03 04:06:34,753 INFO [train.py:1114] (0/4) Epoch 3, batch 2250, loss[loss=0.2854, simple_loss=0.3449, pruned_loss=0.1129, over 13347.00 frames. ], tot_loss[loss=0.2995, simple_loss=0.3588, pruned_loss=0.1201, over 2642888.99 frames. ], batch size: 37, lr: 3.30e-02, grad_scale: 16.0 +2024-08-03 04:06:35,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=34943.333333333336, ans=0.025 +2024-08-03 04:06:36,312 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.83 vs. limit=15.0 +2024-08-03 04:06:37,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=34943.333333333336, ans=0.0 +2024-08-03 04:06:38,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=34943.333333333336, ans=0.125 +2024-08-03 04:07:06,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=35053.333333333336, ans=0.125 +2024-08-03 04:07:20,197 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.45 vs. limit=15.0 +2024-08-03 04:07:20,226 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.99 vs. limit=15.0 +2024-08-03 04:07:20,673 INFO [train.py:1114] (0/4) Epoch 3, batch 2300, loss[loss=0.2807, simple_loss=0.3372, pruned_loss=0.1121, over 13593.00 frames. ], tot_loss[loss=0.2981, simple_loss=0.3572, pruned_loss=0.1195, over 2639371.15 frames. ], batch size: 33, lr: 3.29e-02, grad_scale: 16.0 +2024-08-03 04:07:26,605 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.43 vs. limit=10.0 +2024-08-03 04:07:39,107 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.00 vs. limit=15.0 +2024-08-03 04:07:41,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=35200.0, ans=0.125 +2024-08-03 04:07:59,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=35273.333333333336, ans=0.125 +2024-08-03 04:08:01,044 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.453e+02 1.713e+02 2.224e+02 5.491e+02, threshold=3.425e+02, percent-clipped=5.0 +2024-08-03 04:08:03,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=35273.333333333336, ans=0.125 +2024-08-03 04:08:11,152 INFO [train.py:1114] (0/4) Epoch 3, batch 2350, loss[loss=0.2648, simple_loss=0.3341, pruned_loss=0.09773, over 13549.00 frames. ], tot_loss[loss=0.2982, simple_loss=0.3572, pruned_loss=0.1196, over 2642088.74 frames. ], batch size: 38, lr: 3.29e-02, grad_scale: 16.0 +2024-08-03 04:08:29,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=35383.333333333336, ans=0.125 +2024-08-03 04:08:35,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=35383.333333333336, ans=0.125 +2024-08-03 04:08:39,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=35383.333333333336, ans=0.025 +2024-08-03 04:08:40,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35420.0, ans=0.1 +2024-08-03 04:08:41,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=35420.0, ans=0.0 +2024-08-03 04:08:50,314 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.00 vs. limit=6.0 +2024-08-03 04:08:53,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=35456.666666666664, ans=0.2 +2024-08-03 04:09:00,929 INFO [train.py:1114] (0/4) Epoch 3, batch 2400, loss[loss=0.2949, simple_loss=0.3556, pruned_loss=0.1171, over 13536.00 frames. ], tot_loss[loss=0.2988, simple_loss=0.3582, pruned_loss=0.1196, over 2642579.13 frames. ], batch size: 35, lr: 3.28e-02, grad_scale: 32.0 +2024-08-03 04:09:02,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=35493.333333333336, ans=0.0 +2024-08-03 04:09:20,607 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:09:31,574 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:09:34,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=35603.333333333336, ans=0.125 +2024-08-03 04:09:38,722 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.322e+02 1.493e+02 1.831e+02 3.002e+02, threshold=2.985e+02, percent-clipped=0.0 +2024-08-03 04:09:42,138 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.57 vs. limit=15.0 +2024-08-03 04:09:47,346 INFO [train.py:1114] (0/4) Epoch 3, batch 2450, loss[loss=0.3042, simple_loss=0.3665, pruned_loss=0.1209, over 13358.00 frames. ], tot_loss[loss=0.2998, simple_loss=0.3591, pruned_loss=0.1202, over 2632445.90 frames. ], batch size: 37, lr: 3.28e-02, grad_scale: 32.0 +2024-08-03 04:10:05,556 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.00 vs. limit=15.0 +2024-08-03 04:10:14,855 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.31 vs. limit=6.0 +2024-08-03 04:10:15,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=35750.0, ans=0.09899494936611666 +2024-08-03 04:10:16,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=35750.0, ans=0.125 +2024-08-03 04:10:17,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=35750.0, ans=0.125 +2024-08-03 04:10:20,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=35786.666666666664, ans=0.1 +2024-08-03 04:10:22,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=35786.666666666664, ans=0.125 +2024-08-03 04:10:39,195 INFO [train.py:1114] (0/4) Epoch 3, batch 2500, loss[loss=0.3246, simple_loss=0.3856, pruned_loss=0.1319, over 13425.00 frames. ], tot_loss[loss=0.2991, simple_loss=0.359, pruned_loss=0.1196, over 2636184.33 frames. ], batch size: 39, lr: 3.27e-02, grad_scale: 32.0 +2024-08-03 04:11:03,634 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.45 vs. limit=22.5 +2024-08-03 04:11:10,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=35970.0, ans=0.125 +2024-08-03 04:11:15,835 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.323e+02 1.438e+02 1.681e+02 3.376e+02, threshold=2.876e+02, percent-clipped=2.0 +2024-08-03 04:11:27,376 INFO [train.py:1114] (0/4) Epoch 3, batch 2550, loss[loss=0.271, simple_loss=0.3306, pruned_loss=0.1057, over 13532.00 frames. ], tot_loss[loss=0.2996, simple_loss=0.359, pruned_loss=0.1201, over 2638431.34 frames. ], batch size: 31, lr: 3.27e-02, grad_scale: 32.0 +2024-08-03 04:11:33,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=36043.333333333336, ans=0.125 +2024-08-03 04:11:49,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=36080.0, ans=0.09899494936611666 +2024-08-03 04:11:58,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36116.666666666664, ans=0.1 +2024-08-03 04:12:20,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=36153.333333333336, ans=0.2 +2024-08-03 04:12:38,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=36190.0, ans=0.1 +2024-08-03 04:12:40,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=36190.0, ans=0.04949747468305833 +2024-08-03 04:12:43,971 INFO [train.py:1114] (0/4) Epoch 3, batch 2600, loss[loss=0.2836, simple_loss=0.3366, pruned_loss=0.1153, over 13564.00 frames. ], tot_loss[loss=0.2999, simple_loss=0.3593, pruned_loss=0.1202, over 2638216.42 frames. ], batch size: 36, lr: 3.26e-02, grad_scale: 32.0 +2024-08-03 04:12:52,412 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=18.05 vs. limit=15.0 +2024-08-03 04:12:53,412 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.96 vs. limit=15.0 +2024-08-03 04:13:02,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.35 vs. limit=15.0 +2024-08-03 04:13:33,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=36336.666666666664, ans=0.125 +2024-08-03 04:13:39,922 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.332e+02 1.510e+02 1.763e+02 2.662e+02, threshold=3.019e+02, percent-clipped=0.0 +2024-08-03 04:13:44,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=36373.333333333336, ans=15.0 +2024-08-03 04:13:47,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=36373.333333333336, ans=0.1 +2024-08-03 04:13:49,884 INFO [train.py:1114] (0/4) Epoch 3, batch 2650, loss[loss=0.3141, simple_loss=0.3743, pruned_loss=0.1269, over 13338.00 frames. ], tot_loss[loss=0.3, simple_loss=0.3598, pruned_loss=0.1201, over 2641290.10 frames. ], batch size: 46, lr: 3.26e-02, grad_scale: 32.0 +2024-08-03 04:13:51,779 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.91 vs. limit=15.0 +2024-08-03 04:14:06,633 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.66 vs. limit=15.0 +2024-08-03 04:14:20,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=36483.333333333336, ans=0.125 +2024-08-03 04:14:22,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=36483.333333333336, ans=0.0 +2024-08-03 04:14:23,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=36483.333333333336, ans=0.125 +2024-08-03 04:14:33,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=36520.0, ans=0.125 +2024-08-03 04:14:51,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=36556.666666666664, ans=0.125 +2024-08-03 04:14:52,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36556.666666666664, ans=0.1 +2024-08-03 04:14:55,300 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.85 vs. limit=15.0 +2024-08-03 04:14:57,823 INFO [train.py:1114] (0/4) Epoch 3, batch 2700, loss[loss=0.335, simple_loss=0.3848, pruned_loss=0.1426, over 13542.00 frames. ], tot_loss[loss=0.301, simple_loss=0.3606, pruned_loss=0.1207, over 2638034.44 frames. ], batch size: 40, lr: 3.25e-02, grad_scale: 16.0 +2024-08-03 04:15:23,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=36630.0, ans=0.125 +2024-08-03 04:18:40,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=36666.666666666664, ans=0.0 +2024-08-03 04:19:15,077 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.088e+02 1.378e+02 1.616e+02 1.965e+02 4.698e+02, threshold=3.232e+02, percent-clipped=3.0 +2024-08-03 04:19:22,265 INFO [train.py:1114] (0/4) Epoch 3, batch 2750, loss[loss=0.2772, simple_loss=0.3405, pruned_loss=0.107, over 13329.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3588, pruned_loss=0.1198, over 2636304.86 frames. ], batch size: 34, lr: 3.24e-02, grad_scale: 16.0 +2024-08-03 04:19:25,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=36776.666666666664, ans=0.5 +2024-08-03 04:19:34,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=36813.333333333336, ans=0.2 +2024-08-03 04:19:38,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=36813.333333333336, ans=0.05 +2024-08-03 04:19:47,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=36850.0, ans=0.0028586956521739135 +2024-08-03 04:19:47,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=36850.0, ans=0.125 +2024-08-03 04:19:49,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=36850.0, ans=0.1 +2024-08-03 04:19:59,599 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.53 vs. limit=6.0 +2024-08-03 04:20:01,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=36923.333333333336, ans=0.0 +2024-08-03 04:20:09,782 INFO [train.py:1114] (0/4) Epoch 3, batch 2800, loss[loss=0.395, simple_loss=0.4088, pruned_loss=0.1907, over 9350.00 frames. ], tot_loss[loss=0.2999, simple_loss=0.3591, pruned_loss=0.1203, over 2626934.31 frames. ], batch size: 97, lr: 3.24e-02, grad_scale: 32.0 +2024-08-03 04:20:24,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=36996.666666666664, ans=0.0 +2024-08-03 04:20:29,620 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.25 vs. limit=15.0 +2024-08-03 04:20:31,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=37033.333333333336, ans=0.125 +2024-08-03 04:20:32,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=37033.333333333336, ans=10.0 +2024-08-03 04:20:34,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=37033.333333333336, ans=0.125 +2024-08-03 04:20:39,244 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.85 vs. limit=15.0 +2024-08-03 04:20:46,347 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.018e+02 1.344e+02 1.514e+02 1.782e+02 3.763e+02, threshold=3.028e+02, percent-clipped=1.0 +2024-08-03 04:20:53,505 INFO [train.py:1114] (0/4) Epoch 3, batch 2850, loss[loss=0.2854, simple_loss=0.3377, pruned_loss=0.1166, over 13364.00 frames. ], tot_loss[loss=0.3004, simple_loss=0.3593, pruned_loss=0.1208, over 2621076.98 frames. ], batch size: 35, lr: 3.23e-02, grad_scale: 32.0 +2024-08-03 04:20:54,884 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.23 vs. limit=22.5 +2024-08-03 04:20:56,220 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:21:10,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=37216.666666666664, ans=0.125 +2024-08-03 04:21:18,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=37216.666666666664, ans=0.2 +2024-08-03 04:21:19,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=37253.333333333336, ans=0.2 +2024-08-03 04:21:21,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=37253.333333333336, ans=0.05 +2024-08-03 04:21:41,446 INFO [train.py:1114] (0/4) Epoch 3, batch 2900, loss[loss=0.2558, simple_loss=0.3289, pruned_loss=0.09131, over 13384.00 frames. ], tot_loss[loss=0.3, simple_loss=0.3596, pruned_loss=0.1202, over 2631488.19 frames. ], batch size: 36, lr: 3.23e-02, grad_scale: 32.0 +2024-08-03 04:21:43,634 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.80 vs. limit=15.0 +2024-08-03 04:21:47,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=37326.666666666664, ans=0.125 +2024-08-03 04:21:51,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=37363.333333333336, ans=0.0 +2024-08-03 04:21:57,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=37363.333333333336, ans=0.0027471014492753618 +2024-08-03 04:21:58,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=37400.0, ans=0.125 +2024-08-03 04:22:01,759 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.86 vs. limit=15.0 +2024-08-03 04:22:03,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37400.0, ans=0.1 +2024-08-03 04:22:10,724 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.46 vs. limit=22.5 +2024-08-03 04:22:19,113 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.112e+02 1.360e+02 1.545e+02 1.848e+02 3.511e+02, threshold=3.091e+02, percent-clipped=1.0 +2024-08-03 04:22:26,070 INFO [train.py:1114] (0/4) Epoch 3, batch 2950, loss[loss=0.2706, simple_loss=0.3394, pruned_loss=0.101, over 13332.00 frames. ], tot_loss[loss=0.2982, simple_loss=0.358, pruned_loss=0.1193, over 2630706.22 frames. ], batch size: 34, lr: 3.22e-02, grad_scale: 32.0 +2024-08-03 04:22:35,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=37546.666666666664, ans=0.0 +2024-08-03 04:22:36,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=37546.666666666664, ans=0.125 +2024-08-03 04:22:42,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=37546.666666666664, ans=0.125 +2024-08-03 04:22:43,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=37583.333333333336, ans=0.125 +2024-08-03 04:22:44,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=37583.333333333336, ans=0.0 +2024-08-03 04:22:54,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=37620.0, ans=0.0 +2024-08-03 04:22:57,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37620.0, ans=0.1 +2024-08-03 04:23:08,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=37656.666666666664, ans=0.035 +2024-08-03 04:23:11,264 INFO [train.py:1114] (0/4) Epoch 3, batch 3000, loss[loss=0.259, simple_loss=0.3335, pruned_loss=0.09224, over 13535.00 frames. ], tot_loss[loss=0.2972, simple_loss=0.3571, pruned_loss=0.1187, over 2630818.04 frames. ], batch size: 37, lr: 3.22e-02, grad_scale: 16.0 +2024-08-03 04:23:11,265 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 04:23:46,255 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.2077, 3.3555, 3.1602, 2.5820, 2.3489, 3.8247, 2.0155, 2.9500], + device='cuda:0') +2024-08-03 04:23:46,988 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.0591, 1.2992, 3.2233, 3.2941], device='cuda:0') +2024-08-03 04:23:49,063 INFO [train.py:1146] (0/4) Epoch 3, validation: loss=0.2357, simple_loss=0.3301, pruned_loss=0.07069, over 944034.00 frames. +2024-08-03 04:23:49,063 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 04:23:50,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=37693.333333333336, ans=0.125 +2024-08-03 04:23:51,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=37693.333333333336, ans=0.2 +2024-08-03 04:24:04,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=37730.0, ans=0.0 +2024-08-03 04:24:24,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=37766.666666666664, ans=0.025 +2024-08-03 04:25:17,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=37803.333333333336, ans=0.125 +2024-08-03 04:25:32,446 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.344e+02 1.558e+02 1.814e+02 2.891e+02, threshold=3.117e+02, percent-clipped=0.0 +2024-08-03 04:25:42,606 INFO [train.py:1114] (0/4) Epoch 3, batch 3050, loss[loss=0.2937, simple_loss=0.351, pruned_loss=0.1182, over 13553.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.3584, pruned_loss=0.1192, over 2627934.74 frames. ], batch size: 35, lr: 3.21e-02, grad_scale: 8.0 +2024-08-03 04:25:49,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37876.666666666664, ans=0.1 +2024-08-03 04:25:56,653 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.40 vs. limit=15.0 +2024-08-03 04:26:04,854 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.84 vs. limit=15.0 +2024-08-03 04:26:21,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37986.666666666664, ans=0.1 +2024-08-03 04:26:35,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=37986.666666666664, ans=0.0 +2024-08-03 04:27:00,283 INFO [train.py:1114] (0/4) Epoch 3, batch 3100, loss[loss=0.3043, simple_loss=0.3653, pruned_loss=0.1216, over 13341.00 frames. ], tot_loss[loss=0.298, simple_loss=0.3579, pruned_loss=0.119, over 2627666.19 frames. ], batch size: 46, lr: 3.21e-02, grad_scale: 8.0 +2024-08-03 04:27:01,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=38060.0, ans=0.2 +2024-08-03 04:27:18,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=38096.666666666664, ans=0.002587681159420291 +2024-08-03 04:27:23,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38096.666666666664, ans=0.1 +2024-08-03 04:27:23,865 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.53 vs. limit=15.0 +2024-08-03 04:27:47,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=38170.0, ans=0.0025717391304347827 +2024-08-03 04:27:47,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=38170.0, ans=0.125 +2024-08-03 04:27:53,163 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.089e+02 1.343e+02 1.458e+02 1.761e+02 2.606e+02, threshold=2.915e+02, percent-clipped=0.0 +2024-08-03 04:27:54,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38206.666666666664, ans=0.1 +2024-08-03 04:27:58,280 INFO [train.py:1114] (0/4) Epoch 3, batch 3150, loss[loss=0.3052, simple_loss=0.3745, pruned_loss=0.118, over 12969.00 frames. ], tot_loss[loss=0.2979, simple_loss=0.3578, pruned_loss=0.1189, over 2629326.95 frames. ], batch size: 48, lr: 3.20e-02, grad_scale: 8.0 +2024-08-03 04:28:04,044 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.28 vs. limit=15.0 +2024-08-03 04:28:04,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38243.333333333336, ans=0.1 +2024-08-03 04:28:13,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=38280.0, ans=0.125 +2024-08-03 04:28:15,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=38316.666666666664, ans=0.125 +2024-08-03 04:28:17,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=38316.666666666664, ans=0.125 +2024-08-03 04:28:21,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=38316.666666666664, ans=15.0 +2024-08-03 04:28:39,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38390.0, ans=0.1 +2024-08-03 04:28:42,454 INFO [train.py:1114] (0/4) Epoch 3, batch 3200, loss[loss=0.3051, simple_loss=0.3631, pruned_loss=0.1236, over 13542.00 frames. ], tot_loss[loss=0.2976, simple_loss=0.3574, pruned_loss=0.1189, over 2635020.06 frames. ], batch size: 37, lr: 3.20e-02, grad_scale: 16.0 +2024-08-03 04:28:58,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=38463.333333333336, ans=0.125 +2024-08-03 04:29:20,717 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.040e+02 1.433e+02 1.626e+02 1.828e+02 2.707e+02, threshold=3.253e+02, percent-clipped=0.0 +2024-08-03 04:29:26,740 INFO [train.py:1114] (0/4) Epoch 3, batch 3250, loss[loss=0.2784, simple_loss=0.3519, pruned_loss=0.1025, over 13374.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.3573, pruned_loss=0.1181, over 2639160.79 frames. ], batch size: 38, lr: 3.19e-02, grad_scale: 16.0 +2024-08-03 04:29:27,170 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=25.31 vs. limit=22.5 +2024-08-03 04:29:27,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=38610.0, ans=0.125 +2024-08-03 04:29:28,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=38610.0, ans=0.125 +2024-08-03 04:29:41,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=38646.666666666664, ans=0.125 +2024-08-03 04:29:45,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=38683.333333333336, ans=0.0 +2024-08-03 04:29:51,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=38683.333333333336, ans=0.125 +2024-08-03 04:30:02,072 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.89 vs. limit=15.0 +2024-08-03 04:30:04,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=38756.666666666664, ans=0.125 +2024-08-03 04:30:10,326 INFO [train.py:1114] (0/4) Epoch 3, batch 3300, loss[loss=0.2998, simple_loss=0.3671, pruned_loss=0.1163, over 12803.00 frames. ], tot_loss[loss=0.2945, simple_loss=0.3553, pruned_loss=0.1169, over 2640208.21 frames. ], batch size: 52, lr: 3.19e-02, grad_scale: 16.0 +2024-08-03 04:30:17,773 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.07 vs. limit=22.5 +2024-08-03 04:30:28,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=38866.666666666664, ans=0.125 +2024-08-03 04:30:31,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=38866.666666666664, ans=10.0 +2024-08-03 04:30:39,993 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:30:45,963 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:30:51,257 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:30:51,796 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.360e+02 1.548e+02 1.881e+02 7.173e+02, threshold=3.096e+02, percent-clipped=4.0 +2024-08-03 04:30:55,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38940.0, ans=0.1 +2024-08-03 04:30:58,415 INFO [train.py:1114] (0/4) Epoch 3, batch 3350, loss[loss=0.3258, simple_loss=0.3874, pruned_loss=0.1322, over 13084.00 frames. ], tot_loss[loss=0.2962, simple_loss=0.3567, pruned_loss=0.1179, over 2629613.54 frames. ], batch size: 48, lr: 3.18e-02, grad_scale: 16.0 +2024-08-03 04:31:06,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=38976.666666666664, ans=0.09899494936611666 +2024-08-03 04:31:11,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39013.333333333336, ans=0.1 +2024-08-03 04:31:17,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=39050.0, ans=0.125 +2024-08-03 04:31:28,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=39086.666666666664, ans=0.0023724637681159424 +2024-08-03 04:31:28,436 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.18 vs. limit=15.0 +2024-08-03 04:31:43,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=39086.666666666664, ans=0.125 +2024-08-03 04:32:02,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=39123.333333333336, ans=0.025 +2024-08-03 04:32:13,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=39123.333333333336, ans=0.125 +2024-08-03 04:32:21,172 INFO [train.py:1114] (0/4) Epoch 3, batch 3400, loss[loss=0.2573, simple_loss=0.3216, pruned_loss=0.09645, over 13545.00 frames. ], tot_loss[loss=0.2961, simple_loss=0.3563, pruned_loss=0.1179, over 2624237.23 frames. ], batch size: 31, lr: 3.18e-02, grad_scale: 16.0 +2024-08-03 04:32:30,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=39196.666666666664, ans=0.002348550724637682 +2024-08-03 04:32:43,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=39233.333333333336, ans=0.2 +2024-08-03 04:32:50,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.42 vs. limit=10.0 +2024-08-03 04:33:16,817 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.382e+02 1.601e+02 1.847e+02 2.492e+02, threshold=3.202e+02, percent-clipped=0.0 +2024-08-03 04:33:21,222 INFO [train.py:1114] (0/4) Epoch 3, batch 3450, loss[loss=0.3613, simple_loss=0.4112, pruned_loss=0.1557, over 13000.00 frames. ], tot_loss[loss=0.2962, simple_loss=0.3566, pruned_loss=0.1179, over 2627672.74 frames. ], batch size: 52, lr: 3.17e-02, grad_scale: 8.0 +2024-08-03 04:33:22,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=39343.333333333336, ans=0.025 +2024-08-03 04:33:23,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=39343.333333333336, ans=0.125 +2024-08-03 04:33:24,257 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.22 vs. limit=15.0 +2024-08-03 04:33:27,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=39343.333333333336, ans=0.125 +2024-08-03 04:33:29,068 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:33:45,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=39453.333333333336, ans=0.125 +2024-08-03 04:33:48,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39453.333333333336, ans=0.1 +2024-08-03 04:33:54,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=39490.0, ans=0.125 +2024-08-03 04:34:02,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=39490.0, ans=0.05 +2024-08-03 04:34:03,949 INFO [train.py:1114] (0/4) Epoch 3, batch 3500, loss[loss=0.3044, simple_loss=0.3593, pruned_loss=0.1248, over 13533.00 frames. ], tot_loss[loss=0.2967, simple_loss=0.3567, pruned_loss=0.1184, over 2629972.66 frames. ], batch size: 34, lr: 3.17e-02, grad_scale: 8.0 +2024-08-03 04:34:09,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=39526.666666666664, ans=0.0 +2024-08-03 04:34:11,954 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.11 vs. limit=15.0 +2024-08-03 04:34:20,925 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.85 vs. limit=15.0 +2024-08-03 04:34:25,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=39600.0, ans=0.125 +2024-08-03 04:34:25,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=39600.0, ans=0.2 +2024-08-03 04:34:42,761 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.446e+02 1.687e+02 2.018e+02 4.896e+02, threshold=3.374e+02, percent-clipped=2.0 +2024-08-03 04:34:45,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=39673.333333333336, ans=0.0 +2024-08-03 04:34:47,000 INFO [train.py:1114] (0/4) Epoch 3, batch 3550, loss[loss=0.2976, simple_loss=0.3638, pruned_loss=0.1156, over 12574.00 frames. ], tot_loss[loss=0.3002, simple_loss=0.3596, pruned_loss=0.1204, over 2628281.47 frames. ], batch size: 58, lr: 3.16e-02, grad_scale: 8.0 +2024-08-03 04:35:34,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=39856.666666666664, ans=0.0 +2024-08-03 04:35:36,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=39856.666666666664, ans=0.09899494936611666 +2024-08-03 04:35:39,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=39856.666666666664, ans=0.04949747468305833 +2024-08-03 04:35:40,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=39893.333333333336, ans=0.125 +2024-08-03 04:35:40,851 INFO [train.py:1114] (0/4) Epoch 3, batch 3600, loss[loss=0.3556, simple_loss=0.3938, pruned_loss=0.1587, over 9601.00 frames. ], tot_loss[loss=0.3109, simple_loss=0.3663, pruned_loss=0.1277, over 2485790.46 frames. ], batch size: 96, lr: 3.16e-02, grad_scale: 16.0 +2024-08-03 04:35:47,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=39893.333333333336, ans=0.1 +2024-08-03 04:35:52,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=39930.0, ans=0.0 +2024-08-03 04:35:54,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=39930.0, ans=0.05 +2024-08-03 04:35:55,756 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.15 vs. limit=15.0 +2024-08-03 04:35:59,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=39966.666666666664, ans=0.2 +2024-08-03 04:36:16,438 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-3.pt +2024-08-03 04:37:20,209 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.85 vs. limit=12.0 +2024-08-03 04:37:20,593 INFO [train.py:1114] (0/4) Epoch 4, batch 0, loss[loss=0.2786, simple_loss=0.3414, pruned_loss=0.1079, over 13339.00 frames. ], tot_loss[loss=0.2786, simple_loss=0.3414, pruned_loss=0.1079, over 13339.00 frames. ], batch size: 33, lr: 2.95e-02, grad_scale: 32.0 +2024-08-03 04:37:20,594 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 04:37:30,574 INFO [train.py:1146] (0/4) Epoch 4, validation: loss=0.2412, simple_loss=0.337, pruned_loss=0.07274, over 944034.00 frames. +2024-08-03 04:37:30,575 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 04:43:26,869 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.350e+02 1.466e+02 1.683e+02 2.712e+02, threshold=2.931e+02, percent-clipped=0.0 +2024-08-03 04:43:35,063 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.92 vs. limit=6.0 +2024-08-03 04:45:39,729 INFO [train.py:1114] (0/4) Epoch 4, batch 50, loss[loss=0.2537, simple_loss=0.315, pruned_loss=0.0962, over 13419.00 frames. ], tot_loss[loss=0.3013, simple_loss=0.3608, pruned_loss=0.1209, over 577578.12 frames. ], batch size: 32, lr: 2.95e-02, grad_scale: 32.0 +2024-08-03 04:45:39,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=40227.0, ans=0.125 +2024-08-03 04:45:50,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=40263.666666666664, ans=0.125 +2024-08-03 04:46:00,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=40300.333333333336, ans=0.0 +2024-08-03 04:46:02,487 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.61 vs. limit=6.0 +2024-08-03 04:46:06,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=40300.333333333336, ans=0.125 +2024-08-03 04:46:08,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=40300.333333333336, ans=0.2 +2024-08-03 04:46:13,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=40337.0, ans=0.125 +2024-08-03 04:46:17,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40337.0, ans=0.1 +2024-08-03 04:46:18,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=40337.0, ans=0.05 +2024-08-03 04:46:26,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=40373.666666666664, ans=0.025 +2024-08-03 04:46:29,584 INFO [train.py:1114] (0/4) Epoch 4, batch 100, loss[loss=0.2995, simple_loss=0.3569, pruned_loss=0.121, over 13532.00 frames. ], tot_loss[loss=0.2989, simple_loss=0.3598, pruned_loss=0.119, over 1025466.70 frames. ], batch size: 35, lr: 2.94e-02, grad_scale: 32.0 +2024-08-03 04:46:33,366 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.339e+02 1.516e+02 1.849e+02 3.720e+02, threshold=3.031e+02, percent-clipped=4.0 +2024-08-03 04:46:40,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=40447.0, ans=0.0 +2024-08-03 04:46:43,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=40447.0, ans=0.0 +2024-08-03 04:46:50,177 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.07 vs. limit=15.0 +2024-08-03 04:46:52,949 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.04 vs. limit=15.0 +2024-08-03 04:47:07,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=40557.0, ans=0.2 +2024-08-03 04:47:17,294 INFO [train.py:1114] (0/4) Epoch 4, batch 150, loss[loss=0.258, simple_loss=0.3176, pruned_loss=0.09923, over 13424.00 frames. ], tot_loss[loss=0.2941, simple_loss=0.3557, pruned_loss=0.1162, over 1386835.46 frames. ], batch size: 32, lr: 2.94e-02, grad_scale: 32.0 +2024-08-03 04:47:28,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=40630.333333333336, ans=0.0 +2024-08-03 04:47:34,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=40630.333333333336, ans=0.125 +2024-08-03 04:47:51,655 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.17 vs. limit=15.0 +2024-08-03 04:48:04,789 INFO [train.py:1114] (0/4) Epoch 4, batch 200, loss[loss=0.3253, simple_loss=0.3815, pruned_loss=0.1345, over 12461.00 frames. ], tot_loss[loss=0.2902, simple_loss=0.3521, pruned_loss=0.1142, over 1666162.47 frames. ], batch size: 58, lr: 2.93e-02, grad_scale: 16.0 +2024-08-03 04:48:05,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=40777.0, ans=0.2 +2024-08-03 04:48:09,217 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.032e+02 1.266e+02 1.437e+02 1.719e+02 2.508e+02, threshold=2.875e+02, percent-clipped=0.0 +2024-08-03 04:48:12,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_na.min_abs, batch_count=40777.0, ans=0.02 +2024-08-03 04:48:17,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40813.666666666664, ans=0.1 +2024-08-03 04:48:33,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=40887.0, ans=0.2 +2024-08-03 04:48:50,621 INFO [train.py:1114] (0/4) Epoch 4, batch 250, loss[loss=0.2765, simple_loss=0.3449, pruned_loss=0.104, over 13337.00 frames. ], tot_loss[loss=0.2914, simple_loss=0.3528, pruned_loss=0.115, over 1884502.38 frames. ], batch size: 46, lr: 2.93e-02, grad_scale: 16.0 +2024-08-03 04:49:02,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=40997.0, ans=0.0 +2024-08-03 04:49:07,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=40997.0, ans=0.0 +2024-08-03 04:49:14,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=41033.666666666664, ans=0.09899494936611666 +2024-08-03 04:49:21,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41070.333333333336, ans=0.1 +2024-08-03 04:49:26,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=41070.333333333336, ans=0.0 +2024-08-03 04:49:41,442 INFO [train.py:1114] (0/4) Epoch 4, batch 300, loss[loss=0.3031, simple_loss=0.3617, pruned_loss=0.1222, over 13447.00 frames. ], tot_loss[loss=0.2896, simple_loss=0.3512, pruned_loss=0.1139, over 2050947.71 frames. ], batch size: 42, lr: 2.92e-02, grad_scale: 16.0 +2024-08-03 04:49:46,016 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.010e+02 1.331e+02 1.504e+02 1.895e+02 3.054e+02, threshold=3.007e+02, percent-clipped=2.0 +2024-08-03 04:49:58,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=41180.333333333336, ans=0.125 +2024-08-03 04:49:59,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=41217.0, ans=0.125 +2024-08-03 04:50:03,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=41217.0, ans=0.125 +2024-08-03 04:50:18,381 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.61 vs. limit=15.0 +2024-08-03 04:50:22,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=41290.333333333336, ans=0.1 +2024-08-03 04:50:23,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=41290.333333333336, ans=0.125 +2024-08-03 04:50:27,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=41290.333333333336, ans=0.2 +2024-08-03 04:50:29,637 INFO [train.py:1114] (0/4) Epoch 4, batch 350, loss[loss=0.266, simple_loss=0.3265, pruned_loss=0.1027, over 13564.00 frames. ], tot_loss[loss=0.2878, simple_loss=0.3503, pruned_loss=0.1126, over 2181509.94 frames. ], batch size: 33, lr: 2.92e-02, grad_scale: 16.0 +2024-08-03 04:50:30,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=41327.0, ans=0.1 +2024-08-03 04:50:33,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=41327.0, ans=0.125 +2024-08-03 04:50:34,843 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.57 vs. limit=22.5 +2024-08-03 04:50:34,903 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=16.40 vs. limit=15.0 +2024-08-03 04:50:58,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=41437.0, ans=0.125 +2024-08-03 04:51:17,245 INFO [train.py:1114] (0/4) Epoch 4, batch 400, loss[loss=0.2512, simple_loss=0.3267, pruned_loss=0.08786, over 13367.00 frames. ], tot_loss[loss=0.2871, simple_loss=0.3502, pruned_loss=0.112, over 2286219.29 frames. ], batch size: 37, lr: 2.91e-02, grad_scale: 32.0 +2024-08-03 04:51:19,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=41510.333333333336, ans=0.125 +2024-08-03 04:51:21,801 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.051e+02 1.350e+02 1.537e+02 1.828e+02 3.072e+02, threshold=3.074e+02, percent-clipped=1.0 +2024-08-03 04:51:50,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=41620.333333333336, ans=0.125 +2024-08-03 04:52:05,359 INFO [train.py:1114] (0/4) Epoch 4, batch 450, loss[loss=0.2753, simple_loss=0.3398, pruned_loss=0.1054, over 13545.00 frames. ], tot_loss[loss=0.2862, simple_loss=0.3495, pruned_loss=0.1115, over 2359506.76 frames. ], batch size: 38, lr: 2.91e-02, grad_scale: 32.0 +2024-08-03 04:52:18,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=41693.666666666664, ans=0.04949747468305833 +2024-08-03 04:52:34,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=41767.0, ans=0.2 +2024-08-03 04:52:46,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41803.666666666664, ans=0.1 +2024-08-03 04:52:58,660 INFO [train.py:1114] (0/4) Epoch 4, batch 500, loss[loss=0.2858, simple_loss=0.3538, pruned_loss=0.1089, over 13439.00 frames. ], tot_loss[loss=0.2845, simple_loss=0.3478, pruned_loss=0.1106, over 2424776.65 frames. ], batch size: 43, lr: 2.90e-02, grad_scale: 16.0 +2024-08-03 04:53:02,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=41877.0, ans=0.125 +2024-08-03 04:53:04,080 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.429e+02 1.668e+02 2.120e+02 3.628e+02, threshold=3.335e+02, percent-clipped=2.0 +2024-08-03 04:53:04,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=41877.0, ans=0.0 +2024-08-03 04:53:14,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=41913.666666666664, ans=0.125 +2024-08-03 04:53:27,480 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=8.42 vs. limit=12.0 +2024-08-03 04:53:30,946 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.61 vs. limit=6.0 +2024-08-03 04:53:31,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=41987.0, ans=0.1 +2024-08-03 04:53:37,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=42023.666666666664, ans=0.0 +2024-08-03 04:53:42,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=42023.666666666664, ans=0.0 +2024-08-03 04:53:47,160 INFO [train.py:1114] (0/4) Epoch 4, batch 550, loss[loss=0.3413, simple_loss=0.3988, pruned_loss=0.1419, over 13074.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3485, pruned_loss=0.1111, over 2468363.30 frames. ], batch size: 48, lr: 2.90e-02, grad_scale: 16.0 +2024-08-03 04:54:04,115 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.84 vs. limit=15.0 +2024-08-03 04:54:28,852 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.32 vs. limit=15.0 +2024-08-03 04:54:30,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=42207.0, ans=0.2 +2024-08-03 04:54:35,659 INFO [train.py:1114] (0/4) Epoch 4, batch 600, loss[loss=0.338, simple_loss=0.395, pruned_loss=0.1405, over 13314.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3475, pruned_loss=0.1099, over 2507505.77 frames. ], batch size: 46, lr: 2.90e-02, grad_scale: 8.0 +2024-08-03 04:54:35,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=42243.666666666664, ans=0.05 +2024-08-03 04:54:37,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=42243.666666666664, ans=0.125 +2024-08-03 04:54:41,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=42243.666666666664, ans=0.0016861594202898546 +2024-08-03 04:54:42,094 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.301e+02 1.482e+02 1.829e+02 3.304e+02, threshold=2.963e+02, percent-clipped=0.0 +2024-08-03 04:54:45,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=42280.333333333336, ans=0.125 +2024-08-03 04:54:55,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=42317.0, ans=0.0 +2024-08-03 04:54:58,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=42317.0, ans=0.125 +2024-08-03 04:55:10,113 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.47 vs. limit=22.5 +2024-08-03 04:55:22,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=42390.333333333336, ans=0.125 +2024-08-03 04:55:26,901 INFO [train.py:1114] (0/4) Epoch 4, batch 650, loss[loss=0.2449, simple_loss=0.3327, pruned_loss=0.07852, over 13548.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.3461, pruned_loss=0.1088, over 2542945.37 frames. ], batch size: 37, lr: 2.89e-02, grad_scale: 8.0 +2024-08-03 04:55:32,050 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.23 vs. limit=15.0 +2024-08-03 04:55:56,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=42500.333333333336, ans=10.0 +2024-08-03 04:56:05,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=42537.0, ans=0.125 +2024-08-03 04:56:08,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=42537.0, ans=0.125 +2024-08-03 04:56:19,595 INFO [train.py:1114] (0/4) Epoch 4, batch 700, loss[loss=0.2483, simple_loss=0.3179, pruned_loss=0.08939, over 13539.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3474, pruned_loss=0.1098, over 2564366.69 frames. ], batch size: 35, lr: 2.89e-02, grad_scale: 8.0 +2024-08-03 04:56:26,131 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.281e+02 1.426e+02 1.623e+02 2.957e+02, threshold=2.853e+02, percent-clipped=0.0 +2024-08-03 04:56:29,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=42647.0, ans=0.125 +2024-08-03 04:56:32,218 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.97 vs. limit=15.0 +2024-08-03 04:57:00,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=42757.0, ans=0.125 +2024-08-03 04:57:00,252 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.68 vs. limit=22.5 +2024-08-03 04:57:08,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=42757.0, ans=0.07 +2024-08-03 04:57:09,846 INFO [train.py:1114] (0/4) Epoch 4, batch 750, loss[loss=0.2751, simple_loss=0.348, pruned_loss=0.1011, over 13342.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3467, pruned_loss=0.1096, over 2581431.42 frames. ], batch size: 37, lr: 2.88e-02, grad_scale: 8.0 +2024-08-03 04:57:16,239 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.07 vs. limit=22.5 +2024-08-03 04:57:17,884 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.88 vs. limit=15.0 +2024-08-03 04:57:33,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=42867.0, ans=0.95 +2024-08-03 04:57:59,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=42977.0, ans=0.125 +2024-08-03 04:57:59,799 INFO [train.py:1114] (0/4) Epoch 4, batch 800, loss[loss=0.2796, simple_loss=0.3326, pruned_loss=0.1133, over 13349.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3468, pruned_loss=0.1099, over 2596454.55 frames. ], batch size: 33, lr: 2.88e-02, grad_scale: 16.0 +2024-08-03 04:58:06,225 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.323e+02 1.556e+02 1.905e+02 4.049e+02, threshold=3.112e+02, percent-clipped=3.0 +2024-08-03 04:58:15,021 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=18.60 vs. limit=15.0 +2024-08-03 04:58:42,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=43123.666666666664, ans=0.0 +2024-08-03 04:58:45,989 INFO [train.py:1114] (0/4) Epoch 4, batch 850, loss[loss=0.2673, simple_loss=0.3411, pruned_loss=0.09676, over 13346.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3458, pruned_loss=0.1089, over 2608955.58 frames. ], batch size: 40, lr: 2.87e-02, grad_scale: 16.0 +2024-08-03 04:58:48,062 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.68 vs. limit=6.0 +2024-08-03 04:58:59,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=43197.0, ans=0.125 +2024-08-03 04:59:15,769 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.28 vs. limit=12.0 +2024-08-03 04:59:24,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=43307.0, ans=0.0014549999999999997 +2024-08-03 04:59:26,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=43307.0, ans=0.125 +2024-08-03 04:59:34,797 INFO [train.py:1114] (0/4) Epoch 4, batch 900, loss[loss=0.2583, simple_loss=0.3162, pruned_loss=0.1002, over 13338.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3471, pruned_loss=0.1102, over 2611537.52 frames. ], batch size: 33, lr: 2.87e-02, grad_scale: 16.0 +2024-08-03 04:59:40,976 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.102e+02 1.400e+02 1.608e+02 1.991e+02 3.200e+02, threshold=3.215e+02, percent-clipped=1.0 +2024-08-03 04:59:45,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=43380.333333333336, ans=0.125 +2024-08-03 04:59:45,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=43380.333333333336, ans=0.025 +2024-08-03 04:59:49,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=43380.333333333336, ans=0.125 +2024-08-03 05:00:03,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=43453.666666666664, ans=0.125 +2024-08-03 05:00:05,514 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.45 vs. limit=15.0 +2024-08-03 05:00:17,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43490.333333333336, ans=0.1 +2024-08-03 05:00:22,623 INFO [train.py:1114] (0/4) Epoch 4, batch 950, loss[loss=0.2527, simple_loss=0.3213, pruned_loss=0.09202, over 13520.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3464, pruned_loss=0.1092, over 2612254.96 frames. ], batch size: 34, lr: 2.87e-02, grad_scale: 16.0 +2024-08-03 05:00:51,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=43600.333333333336, ans=0.025 +2024-08-03 05:00:51,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=43600.333333333336, ans=0.0 +2024-08-03 05:00:51,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=43600.333333333336, ans=0.125 +2024-08-03 05:00:54,162 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.88 vs. limit=22.5 +2024-08-03 05:00:56,204 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.03 vs. limit=6.0 +2024-08-03 05:01:11,463 INFO [train.py:1114] (0/4) Epoch 4, batch 1000, loss[loss=0.2479, simple_loss=0.3154, pruned_loss=0.0902, over 13354.00 frames. ], tot_loss[loss=0.2832, simple_loss=0.3469, pruned_loss=0.1097, over 2610519.19 frames. ], batch size: 35, lr: 2.86e-02, grad_scale: 16.0 +2024-08-03 05:01:17,824 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.299e+02 1.424e+02 1.610e+02 2.784e+02, threshold=2.848e+02, percent-clipped=0.0 +2024-08-03 05:01:27,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=43747.0, ans=0.0 +2024-08-03 05:01:30,385 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.85 vs. limit=15.0 +2024-08-03 05:01:44,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=43820.333333333336, ans=0.001343405797101448 +2024-08-03 05:01:51,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=43857.0, ans=0.0 +2024-08-03 05:01:52,007 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.27 vs. limit=22.5 +2024-08-03 05:01:57,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=43857.0, ans=0.09899494936611666 +2024-08-03 05:01:59,637 INFO [train.py:1114] (0/4) Epoch 4, batch 1050, loss[loss=0.2919, simple_loss=0.3691, pruned_loss=0.1073, over 13570.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3457, pruned_loss=0.109, over 2614438.61 frames. ], batch size: 39, lr: 2.86e-02, grad_scale: 16.0 +2024-08-03 05:02:07,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=43893.666666666664, ans=0.125 +2024-08-03 05:02:09,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=43930.333333333336, ans=0.025 +2024-08-03 05:02:10,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=43930.333333333336, ans=0.125 +2024-08-03 05:02:20,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=43967.0, ans=0.025 +2024-08-03 05:02:25,548 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-12000.pt +2024-08-03 05:02:50,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=44040.333333333336, ans=0.001295579710144927 +2024-08-03 05:02:54,695 INFO [train.py:1114] (0/4) Epoch 4, batch 1100, loss[loss=0.2592, simple_loss=0.3278, pruned_loss=0.09527, over 13562.00 frames. ], tot_loss[loss=0.2822, simple_loss=0.3459, pruned_loss=0.1092, over 2618771.08 frames. ], batch size: 36, lr: 2.85e-02, grad_scale: 16.0 +2024-08-03 05:03:00,992 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.033e+02 1.374e+02 1.585e+02 1.899e+02 4.895e+02, threshold=3.171e+02, percent-clipped=1.0 +2024-08-03 05:03:09,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=44113.666666666664, ans=0.04949747468305833 +2024-08-03 05:03:11,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=44113.666666666664, ans=0.1 +2024-08-03 05:03:12,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=44150.333333333336, ans=0.0 +2024-08-03 05:03:13,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=44150.333333333336, ans=0.125 +2024-08-03 05:03:18,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=44150.333333333336, ans=0.125 +2024-08-03 05:03:35,599 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.75 vs. limit=15.0 +2024-08-03 05:03:40,691 INFO [train.py:1114] (0/4) Epoch 4, batch 1150, loss[loss=0.2717, simple_loss=0.3383, pruned_loss=0.1025, over 13542.00 frames. ], tot_loss[loss=0.2811, simple_loss=0.345, pruned_loss=0.1086, over 2617731.45 frames. ], batch size: 36, lr: 2.85e-02, grad_scale: 16.0 +2024-08-03 05:03:53,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=44297.0, ans=0.0 +2024-08-03 05:04:13,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=44370.333333333336, ans=0.0 +2024-08-03 05:04:22,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=44407.0, ans=0.025 +2024-08-03 05:04:32,841 INFO [train.py:1114] (0/4) Epoch 4, batch 1200, loss[loss=0.3126, simple_loss=0.3798, pruned_loss=0.1227, over 13586.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.3466, pruned_loss=0.1092, over 2615076.35 frames. ], batch size: 39, lr: 2.84e-02, grad_scale: 32.0 +2024-08-03 05:04:38,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.38 vs. limit=6.0 +2024-08-03 05:04:39,344 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.292e+02 1.456e+02 1.641e+02 3.622e+02, threshold=2.911e+02, percent-clipped=1.0 +2024-08-03 05:04:48,061 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:05:03,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=44553.666666666664, ans=0.04949747468305833 +2024-08-03 05:05:09,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=44553.666666666664, ans=0.0 +2024-08-03 05:06:21,894 INFO [train.py:1114] (0/4) Epoch 4, batch 1250, loss[loss=0.3241, simple_loss=0.3873, pruned_loss=0.1305, over 13435.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3478, pruned_loss=0.1094, over 2627168.89 frames. ], batch size: 42, lr: 2.84e-02, grad_scale: 32.0 +2024-08-03 05:06:24,970 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.52 vs. limit=10.0 +2024-08-03 05:06:33,415 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.22 vs. limit=15.0 +2024-08-03 05:06:38,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=44663.666666666664, ans=0.125 +2024-08-03 05:06:49,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=44737.0, ans=0.125 +2024-08-03 05:07:00,892 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.65 vs. limit=6.0 +2024-08-03 05:07:08,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=44810.333333333336, ans=0.125 +2024-08-03 05:07:08,954 INFO [train.py:1114] (0/4) Epoch 4, batch 1300, loss[loss=0.268, simple_loss=0.3337, pruned_loss=0.1011, over 12947.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3459, pruned_loss=0.1085, over 2629571.85 frames. ], batch size: 52, lr: 2.84e-02, grad_scale: 16.0 +2024-08-03 05:07:18,016 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.082e+02 1.334e+02 1.637e+02 2.034e+02 3.739e+02, threshold=3.274e+02, percent-clipped=6.0 +2024-08-03 05:07:46,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=44920.333333333336, ans=0.015 +2024-08-03 05:07:48,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=44920.333333333336, ans=0.125 +2024-08-03 05:07:59,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=44957.0, ans=0.1 +2024-08-03 05:08:01,412 INFO [train.py:1114] (0/4) Epoch 4, batch 1350, loss[loss=0.2723, simple_loss=0.3416, pruned_loss=0.1015, over 13541.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3462, pruned_loss=0.1087, over 2637607.02 frames. ], batch size: 37, lr: 2.83e-02, grad_scale: 8.0 +2024-08-03 05:08:19,632 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.68 vs. limit=15.0 +2024-08-03 05:08:50,559 INFO [train.py:1114] (0/4) Epoch 4, batch 1400, loss[loss=0.254, simple_loss=0.311, pruned_loss=0.09848, over 13266.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.346, pruned_loss=0.1089, over 2641409.56 frames. ], batch size: 31, lr: 2.83e-02, grad_scale: 8.0 +2024-08-03 05:08:53,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=45177.0, ans=0.0010484782608695658 +2024-08-03 05:08:57,137 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=1.907e-02 +2024-08-03 05:08:58,655 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.107e+02 1.343e+02 1.530e+02 1.906e+02 3.012e+02, threshold=3.060e+02, percent-clipped=0.0 +2024-08-03 05:09:00,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=45213.666666666664, ans=0.125 +2024-08-03 05:09:00,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_na.min_abs, batch_count=45213.666666666664, ans=0.02 +2024-08-03 05:09:38,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=45360.333333333336, ans=0.2 +2024-08-03 05:09:38,857 INFO [train.py:1114] (0/4) Epoch 4, batch 1450, loss[loss=0.3036, simple_loss=0.3714, pruned_loss=0.1179, over 13414.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3469, pruned_loss=0.1095, over 2640427.04 frames. ], batch size: 43, lr: 2.82e-02, grad_scale: 8.0 +2024-08-03 05:09:42,907 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.76 vs. limit=15.0 +2024-08-03 05:09:44,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=45360.333333333336, ans=0.2 +2024-08-03 05:09:49,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=45397.0, ans=0.0 +2024-08-03 05:09:50,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=45397.0, ans=0.125 +2024-08-03 05:10:09,967 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.13 vs. limit=15.0 +2024-08-03 05:10:12,871 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.24 vs. limit=15.0 +2024-08-03 05:10:13,078 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.77 vs. limit=22.5 +2024-08-03 05:10:18,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=45470.333333333336, ans=0.125 +2024-08-03 05:10:29,032 INFO [train.py:1114] (0/4) Epoch 4, batch 1500, loss[loss=0.311, simple_loss=0.3743, pruned_loss=0.1239, over 13408.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3478, pruned_loss=0.11, over 2640719.58 frames. ], batch size: 39, lr: 2.82e-02, grad_scale: 8.0 +2024-08-03 05:10:33,333 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.41 vs. limit=15.0 +2024-08-03 05:10:37,538 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.331e+02 1.463e+02 1.698e+02 3.158e+02, threshold=2.927e+02, percent-clipped=1.0 +2024-08-03 05:10:47,320 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.03 vs. limit=15.0 +2024-08-03 05:10:49,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=45617.0, ans=0.0 +2024-08-03 05:10:53,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=45617.0, ans=0.125 +2024-08-03 05:10:54,215 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.08 vs. limit=5.0 +2024-08-03 05:10:59,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=45653.666666666664, ans=0.125 +2024-08-03 05:11:23,486 INFO [train.py:1114] (0/4) Epoch 4, batch 1550, loss[loss=0.2608, simple_loss=0.3414, pruned_loss=0.09014, over 13399.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.348, pruned_loss=0.1106, over 2629988.87 frames. ], batch size: 41, lr: 2.81e-02, grad_scale: 8.0 +2024-08-03 05:11:28,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=45727.0, ans=0.0009289130434782609 +2024-08-03 05:11:42,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=45800.333333333336, ans=0.125 +2024-08-03 05:11:42,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=45800.333333333336, ans=0.125 +2024-08-03 05:11:44,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=45800.333333333336, ans=0.125 +2024-08-03 05:11:52,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=45800.333333333336, ans=0.125 +2024-08-03 05:12:00,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=45837.0, ans=0.2 +2024-08-03 05:12:01,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=45837.0, ans=0.035 +2024-08-03 05:12:09,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=45837.0, ans=0.125 +2024-08-03 05:12:21,133 INFO [train.py:1114] (0/4) Epoch 4, batch 1600, loss[loss=0.2587, simple_loss=0.3295, pruned_loss=0.09394, over 13565.00 frames. ], tot_loss[loss=0.2831, simple_loss=0.3469, pruned_loss=0.1097, over 2623380.47 frames. ], batch size: 39, lr: 2.81e-02, grad_scale: 16.0 +2024-08-03 05:12:23,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=45910.333333333336, ans=0.0008890579710144932 +2024-08-03 05:12:30,800 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.396e+02 1.598e+02 1.877e+02 3.901e+02, threshold=3.195e+02, percent-clipped=2.0 +2024-08-03 05:12:31,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=45947.0, ans=0.0 +2024-08-03 05:12:39,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=45947.0, ans=0.0008810869565217382 +2024-08-03 05:12:44,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=45983.666666666664, ans=0.125 +2024-08-03 05:12:49,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=45983.666666666664, ans=0.125 +2024-08-03 05:13:10,771 INFO [train.py:1114] (0/4) Epoch 4, batch 1650, loss[loss=0.2759, simple_loss=0.3433, pruned_loss=0.1043, over 13309.00 frames. ], tot_loss[loss=0.2828, simple_loss=0.3465, pruned_loss=0.1096, over 2619421.26 frames. ], batch size: 40, lr: 2.81e-02, grad_scale: 16.0 +2024-08-03 05:13:11,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=46093.666666666664, ans=0.125 +2024-08-03 05:13:19,975 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.34 vs. limit=22.5 +2024-08-03 05:13:21,916 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.86 vs. limit=10.0 +2024-08-03 05:14:01,265 INFO [train.py:1114] (0/4) Epoch 4, batch 1700, loss[loss=0.2478, simple_loss=0.3115, pruned_loss=0.09207, over 13239.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3452, pruned_loss=0.1083, over 2629122.70 frames. ], batch size: 31, lr: 2.80e-02, grad_scale: 16.0 +2024-08-03 05:14:07,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=46277.0, ans=0.125 +2024-08-03 05:14:09,454 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.050e+02 1.356e+02 1.607e+02 2.015e+02 3.197e+02, threshold=3.213e+02, percent-clipped=1.0 +2024-08-03 05:14:12,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=46313.666666666664, ans=0.1 +2024-08-03 05:14:39,544 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.84 vs. limit=22.5 +2024-08-03 05:14:40,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.whiten.whitening_limit, batch_count=46423.666666666664, ans=12.0 +2024-08-03 05:14:43,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=46423.666666666664, ans=0.125 +2024-08-03 05:14:47,549 INFO [train.py:1114] (0/4) Epoch 4, batch 1750, loss[loss=0.2455, simple_loss=0.3191, pruned_loss=0.08602, over 13555.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3447, pruned_loss=0.1082, over 2633070.36 frames. ], batch size: 31, lr: 2.80e-02, grad_scale: 16.0 +2024-08-03 05:15:03,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=46497.0, ans=0.2 +2024-08-03 05:18:19,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=46607.0, ans=0.0 +2024-08-03 05:18:21,911 INFO [train.py:1114] (0/4) Epoch 4, batch 1800, loss[loss=0.2974, simple_loss=0.3635, pruned_loss=0.1157, over 13551.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3451, pruned_loss=0.1082, over 2634607.90 frames. ], batch size: 38, lr: 2.79e-02, grad_scale: 16.0 +2024-08-03 05:18:27,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=46643.666666666664, ans=0.0 +2024-08-03 05:18:30,257 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.378e+02 1.581e+02 2.012e+02 3.618e+02, threshold=3.161e+02, percent-clipped=2.0 +2024-08-03 05:18:31,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=46680.333333333336, ans=0.1 +2024-08-03 05:18:47,555 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.39 vs. limit=15.0 +2024-08-03 05:18:51,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=46753.666666666664, ans=0.025 +2024-08-03 05:19:09,817 INFO [train.py:1114] (0/4) Epoch 4, batch 1850, loss[loss=0.2638, simple_loss=0.3407, pruned_loss=0.09342, over 13400.00 frames. ], tot_loss[loss=0.2792, simple_loss=0.3438, pruned_loss=0.1073, over 2636834.38 frames. ], batch size: 39, lr: 2.79e-02, grad_scale: 16.0 +2024-08-03 05:19:17,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=46827.0, ans=0.125 +2024-08-03 05:19:24,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=46863.666666666664, ans=0.0 +2024-08-03 05:19:39,362 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.16 vs. limit=15.0 +2024-08-03 05:19:42,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=46937.0, ans=0.125 +2024-08-03 05:19:58,245 INFO [train.py:1114] (0/4) Epoch 4, batch 1900, loss[loss=0.2577, simple_loss=0.3456, pruned_loss=0.08493, over 13331.00 frames. ], tot_loss[loss=0.279, simple_loss=0.344, pruned_loss=0.107, over 2640098.99 frames. ], batch size: 40, lr: 2.79e-02, grad_scale: 16.0 +2024-08-03 05:20:06,386 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.297e+02 1.477e+02 1.706e+02 2.975e+02, threshold=2.953e+02, percent-clipped=0.0 +2024-08-03 05:20:19,463 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:20:23,329 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.92 vs. limit=10.0 +2024-08-03 05:20:31,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=47120.333333333336, ans=0.2 +2024-08-03 05:20:33,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=47120.333333333336, ans=0.0 +2024-08-03 05:20:34,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=47120.333333333336, ans=0.125 +2024-08-03 05:20:38,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=47157.0, ans=0.95 +2024-08-03 05:20:44,111 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.64 vs. limit=6.0 +2024-08-03 05:20:45,529 INFO [train.py:1114] (0/4) Epoch 4, batch 1950, loss[loss=0.2928, simple_loss=0.3429, pruned_loss=0.1214, over 13567.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3459, pruned_loss=0.1078, over 2646939.43 frames. ], batch size: 36, lr: 2.78e-02, grad_scale: 16.0 +2024-08-03 05:20:50,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=47193.666666666664, ans=0.07 +2024-08-03 05:21:04,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=47267.0, ans=0.125 +2024-08-03 05:21:15,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=47303.666666666664, ans=0.1 +2024-08-03 05:21:24,560 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.20 vs. limit=15.0 +2024-08-03 05:21:34,049 INFO [train.py:1114] (0/4) Epoch 4, batch 2000, loss[loss=0.2583, simple_loss=0.3169, pruned_loss=0.09982, over 13551.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3464, pruned_loss=0.108, over 2636539.06 frames. ], batch size: 31, lr: 2.78e-02, grad_scale: 32.0 +2024-08-03 05:21:42,487 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.720e+01 1.383e+02 1.598e+02 1.904e+02 4.710e+02, threshold=3.195e+02, percent-clipped=1.0 +2024-08-03 05:22:05,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=47487.0, ans=0.125 +2024-08-03 05:22:11,669 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.86 vs. limit=15.0 +2024-08-03 05:22:19,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=47560.333333333336, ans=0.125 +2024-08-03 05:22:20,575 INFO [train.py:1114] (0/4) Epoch 4, batch 2050, loss[loss=0.2291, simple_loss=0.2885, pruned_loss=0.08484, over 13441.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.3449, pruned_loss=0.1074, over 2633408.34 frames. ], batch size: 32, lr: 2.77e-02, grad_scale: 32.0 +2024-08-03 05:22:20,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=47560.333333333336, ans=0.125 +2024-08-03 05:22:31,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=47597.0, ans=0.2 +2024-08-03 05:22:33,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=47597.0, ans=0.07 +2024-08-03 05:22:37,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=47597.0, ans=0.125 +2024-08-03 05:22:44,037 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.79 vs. limit=22.5 +2024-08-03 05:22:47,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=47633.666666666664, ans=0.0 +2024-08-03 05:22:52,621 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.16 vs. limit=15.0 +2024-08-03 05:22:53,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=47670.333333333336, ans=0.0 +2024-08-03 05:23:46,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=47707.0, ans=0.0 +2024-08-03 05:23:48,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=47707.0, ans=0.0004984782608695656 +2024-08-03 05:23:52,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=47707.0, ans=0.125 +2024-08-03 05:23:56,612 INFO [train.py:1114] (0/4) Epoch 4, batch 2100, loss[loss=0.26, simple_loss=0.3187, pruned_loss=0.1007, over 13549.00 frames. ], tot_loss[loss=0.2789, simple_loss=0.344, pruned_loss=0.1069, over 2638861.98 frames. ], batch size: 37, lr: 2.77e-02, grad_scale: 32.0 +2024-08-03 05:23:57,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=47743.666666666664, ans=0.025 +2024-08-03 05:24:01,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=47743.666666666664, ans=0.2 +2024-08-03 05:24:06,854 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.064e+02 1.313e+02 1.529e+02 1.934e+02 3.413e+02, threshold=3.058e+02, percent-clipped=1.0 +2024-08-03 05:24:18,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=47817.0, ans=0.2 +2024-08-03 05:24:20,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=47817.0, ans=0.1 +2024-08-03 05:24:26,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=47853.666666666664, ans=0.1 +2024-08-03 05:24:27,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=47853.666666666664, ans=0.125 +2024-08-03 05:24:33,281 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=12.0 +2024-08-03 05:24:40,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=47890.333333333336, ans=0.0004586231884057962 +2024-08-03 05:24:47,068 INFO [train.py:1114] (0/4) Epoch 4, batch 2150, loss[loss=0.2388, simple_loss=0.3161, pruned_loss=0.08076, over 13558.00 frames. ], tot_loss[loss=0.2783, simple_loss=0.3433, pruned_loss=0.1066, over 2647345.52 frames. ], batch size: 36, lr: 2.77e-02, grad_scale: 32.0 +2024-08-03 05:25:03,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=47963.666666666664, ans=0.125 +2024-08-03 05:25:11,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=48000.333333333336, ans=0.125 +2024-08-03 05:25:32,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=48073.666666666664, ans=15.0 +2024-08-03 05:25:33,688 INFO [train.py:1114] (0/4) Epoch 4, batch 2200, loss[loss=0.3193, simple_loss=0.3806, pruned_loss=0.129, over 13410.00 frames. ], tot_loss[loss=0.2775, simple_loss=0.3429, pruned_loss=0.1061, over 2645173.56 frames. ], batch size: 39, lr: 2.76e-02, grad_scale: 32.0 +2024-08-03 05:25:42,095 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.479e+02 1.728e+02 2.109e+02 3.412e+02, threshold=3.456e+02, percent-clipped=2.0 +2024-08-03 05:25:46,284 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.72 vs. limit=6.0 +2024-08-03 05:25:48,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=48147.0, ans=0.1 +2024-08-03 05:25:59,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=48183.666666666664, ans=0.0 +2024-08-03 05:26:22,232 INFO [train.py:1114] (0/4) Epoch 4, batch 2250, loss[loss=0.2588, simple_loss=0.3377, pruned_loss=0.08996, over 13361.00 frames. ], tot_loss[loss=0.277, simple_loss=0.3426, pruned_loss=0.1057, over 2642779.77 frames. ], batch size: 37, lr: 2.76e-02, grad_scale: 32.0 +2024-08-03 05:26:50,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=48403.666666666664, ans=0.2 +2024-08-03 05:27:01,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=48440.333333333336, ans=0.000339057971014493 +2024-08-03 05:27:02,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=48440.333333333336, ans=0.125 +2024-08-03 05:27:15,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=48440.333333333336, ans=0.1 +2024-08-03 05:27:17,464 INFO [train.py:1114] (0/4) Epoch 4, batch 2300, loss[loss=0.2689, simple_loss=0.3245, pruned_loss=0.1067, over 13598.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.3419, pruned_loss=0.1059, over 2638884.10 frames. ], batch size: 33, lr: 2.75e-02, grad_scale: 32.0 +2024-08-03 05:27:30,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=48477.0, ans=0.125 +2024-08-03 05:27:38,607 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.046e+02 1.390e+02 1.580e+02 1.913e+02 3.341e+02, threshold=3.160e+02, percent-clipped=0.0 +2024-08-03 05:27:38,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=48513.666666666664, ans=0.5 +2024-08-03 05:27:53,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=48550.333333333336, ans=0.125 +2024-08-03 05:28:16,181 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.87 vs. limit=10.0 +2024-08-03 05:28:18,359 INFO [train.py:1114] (0/4) Epoch 4, batch 2350, loss[loss=0.2554, simple_loss=0.3328, pruned_loss=0.08899, over 13556.00 frames. ], tot_loss[loss=0.2771, simple_loss=0.3421, pruned_loss=0.106, over 2641266.16 frames. ], batch size: 38, lr: 2.75e-02, grad_scale: 32.0 +2024-08-03 05:28:22,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=48660.333333333336, ans=0.0 +2024-08-03 05:28:25,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=48660.333333333336, ans=0.125 +2024-08-03 05:28:28,381 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.33 vs. limit=15.0 +2024-08-03 05:28:42,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=48733.666666666664, ans=0.0 +2024-08-03 05:28:47,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=48733.666666666664, ans=0.125 +2024-08-03 05:28:49,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=48733.666666666664, ans=0.0 +2024-08-03 05:28:52,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=48733.666666666664, ans=0.125 +2024-08-03 05:29:04,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48770.333333333336, ans=0.1 +2024-08-03 05:29:19,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=48807.0, ans=0.125 +2024-08-03 05:29:32,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.70 vs. limit=15.0 +2024-08-03 05:29:35,537 INFO [train.py:1114] (0/4) Epoch 4, batch 2400, loss[loss=0.2547, simple_loss=0.3256, pruned_loss=0.09189, over 13552.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3423, pruned_loss=0.1056, over 2642721.08 frames. ], batch size: 35, lr: 2.75e-02, grad_scale: 32.0 +2024-08-03 05:29:37,026 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.39 vs. limit=15.0 +2024-08-03 05:29:51,914 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.312e+02 1.493e+02 1.944e+02 3.513e+02, threshold=2.987e+02, percent-clipped=1.0 +2024-08-03 05:29:53,328 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.26 vs. limit=10.0 +2024-08-03 05:30:16,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=48917.0, ans=0.125 +2024-08-03 05:30:17,288 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.00 vs. limit=15.0 +2024-08-03 05:30:23,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=48917.0, ans=0.00023543478260869624 +2024-08-03 05:31:00,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=48953.666666666664, ans=0.125 +2024-08-03 05:31:16,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=48990.333333333336, ans=0.07 +2024-08-03 05:31:27,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=48990.333333333336, ans=0.025 +2024-08-03 05:31:38,649 INFO [train.py:1114] (0/4) Epoch 4, batch 2450, loss[loss=0.2744, simple_loss=0.3465, pruned_loss=0.1011, over 13353.00 frames. ], tot_loss[loss=0.2779, simple_loss=0.3434, pruned_loss=0.1062, over 2633582.34 frames. ], batch size: 37, lr: 2.74e-02, grad_scale: 32.0 +2024-08-03 05:31:43,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=49027.0, ans=0.0002115217391304349 +2024-08-03 05:31:57,574 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:32:13,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=49100.333333333336, ans=0.0 +2024-08-03 05:32:26,632 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:32:29,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=49173.666666666664, ans=0.125 +2024-08-03 05:32:36,604 INFO [train.py:1114] (0/4) Epoch 4, batch 2500, loss[loss=0.2786, simple_loss=0.3511, pruned_loss=0.103, over 13404.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.3431, pruned_loss=0.1058, over 2636968.58 frames. ], batch size: 39, lr: 2.74e-02, grad_scale: 32.0 +2024-08-03 05:32:40,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49210.333333333336, ans=0.1 +2024-08-03 05:32:41,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=49210.333333333336, ans=0.0 +2024-08-03 05:32:41,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=49210.333333333336, ans=0.1 +2024-08-03 05:32:42,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=49210.333333333336, ans=0.0 +2024-08-03 05:32:44,460 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.392e+02 1.612e+02 1.907e+02 3.604e+02, threshold=3.223e+02, percent-clipped=4.0 +2024-08-03 05:32:49,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49247.0, ans=0.1 +2024-08-03 05:33:16,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=49320.333333333336, ans=0.0 +2024-08-03 05:33:16,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=49320.333333333336, ans=0.125 +2024-08-03 05:33:44,745 INFO [train.py:1114] (0/4) Epoch 4, batch 2550, loss[loss=0.2548, simple_loss=0.3213, pruned_loss=0.09419, over 13560.00 frames. ], tot_loss[loss=0.2767, simple_loss=0.3425, pruned_loss=0.1055, over 2638965.99 frames. ], batch size: 31, lr: 2.73e-02, grad_scale: 32.0 +2024-08-03 05:34:06,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=49467.0, ans=0.1 +2024-08-03 05:34:13,034 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.93 vs. limit=22.5 +2024-08-03 05:34:14,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=49467.0, ans=0.0 +2024-08-03 05:34:16,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=49503.666666666664, ans=10.0 +2024-08-03 05:34:20,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49503.666666666664, ans=0.1 +2024-08-03 05:34:23,030 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=26.11 vs. limit=15.0 +2024-08-03 05:34:26,431 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.08 vs. limit=15.0 +2024-08-03 05:34:36,329 INFO [train.py:1114] (0/4) Epoch 4, batch 2600, loss[loss=0.2818, simple_loss=0.3489, pruned_loss=0.1073, over 13567.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3437, pruned_loss=0.1063, over 2638385.01 frames. ], batch size: 36, lr: 2.73e-02, grad_scale: 32.0 +2024-08-03 05:34:40,435 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.36 vs. limit=15.0 +2024-08-03 05:34:40,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=49577.0, ans=0.125 +2024-08-03 05:34:41,040 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.08 vs. limit=15.0 +2024-08-03 05:34:42,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=49577.0, ans=0.125 +2024-08-03 05:34:44,130 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.359e+02 1.570e+02 1.941e+02 3.532e+02, threshold=3.140e+02, percent-clipped=1.0 +2024-08-03 05:34:55,614 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.86 vs. limit=10.0 +2024-08-03 05:35:00,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=49650.333333333336, ans=0.125 +2024-08-03 05:35:04,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=49687.0, ans=0.125 +2024-08-03 05:35:24,705 INFO [train.py:1114] (0/4) Epoch 4, batch 2650, loss[loss=0.3193, simple_loss=0.3791, pruned_loss=0.1298, over 13319.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.3439, pruned_loss=0.1065, over 2641493.06 frames. ], batch size: 46, lr: 2.73e-02, grad_scale: 16.0 +2024-08-03 05:35:25,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=49760.333333333336, ans=0.025 +2024-08-03 05:35:28,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49760.333333333336, ans=0.1 +2024-08-03 05:35:58,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=49870.333333333336, ans=0.2 +2024-08-03 05:36:15,402 INFO [train.py:1114] (0/4) Epoch 4, batch 2700, loss[loss=0.2754, simple_loss=0.3486, pruned_loss=0.1011, over 13552.00 frames. ], tot_loss[loss=0.279, simple_loss=0.3444, pruned_loss=0.1068, over 2639427.80 frames. ], batch size: 40, lr: 2.72e-02, grad_scale: 16.0 +2024-08-03 05:36:18,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=49943.666666666664, ans=0.125 +2024-08-03 05:36:24,110 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.340e+02 1.529e+02 1.834e+02 2.682e+02, threshold=3.057e+02, percent-clipped=0.0 +2024-08-03 05:36:35,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=50017.0, ans=0.125 +2024-08-03 05:36:38,975 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.48 vs. limit=15.0 +2024-08-03 05:36:39,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=50017.0, ans=0.125 +2024-08-03 05:36:40,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50017.0, ans=0.1 +2024-08-03 05:36:46,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=50053.666666666664, ans=0.025 +2024-08-03 05:36:48,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=50053.666666666664, ans=0.2 +2024-08-03 05:36:50,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=50053.666666666664, ans=0.2 +2024-08-03 05:36:51,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50053.666666666664, ans=0.1 +2024-08-03 05:36:55,181 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.30 vs. limit=15.0 +2024-08-03 05:37:03,514 INFO [train.py:1114] (0/4) Epoch 4, batch 2750, loss[loss=0.2402, simple_loss=0.3166, pruned_loss=0.08186, over 13338.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.3426, pruned_loss=0.106, over 2637004.48 frames. ], batch size: 34, lr: 2.72e-02, grad_scale: 16.0 +2024-08-03 05:37:14,409 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.89 vs. limit=15.0 +2024-08-03 05:37:16,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=50163.666666666664, ans=0.125 +2024-08-03 05:37:34,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=50237.0, ans=0.125 +2024-08-03 05:37:45,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=50273.666666666664, ans=0.125 +2024-08-03 05:37:46,930 INFO [train.py:1114] (0/4) Epoch 4, batch 2800, loss[loss=0.3383, simple_loss=0.3788, pruned_loss=0.1489, over 9143.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.3428, pruned_loss=0.1064, over 2628637.18 frames. ], batch size: 96, lr: 2.72e-02, grad_scale: 32.0 +2024-08-03 05:37:55,723 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.137e+02 1.473e+02 1.737e+02 2.107e+02 3.108e+02, threshold=3.473e+02, percent-clipped=1.0 +2024-08-03 05:38:13,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=50420.333333333336, ans=0.2 +2024-08-03 05:38:19,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=50420.333333333336, ans=0.0 +2024-08-03 05:38:23,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=50457.0, ans=0.125 +2024-08-03 05:38:30,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=50457.0, ans=0.0 +2024-08-03 05:38:31,579 INFO [train.py:1114] (0/4) Epoch 4, batch 2850, loss[loss=0.2638, simple_loss=0.3314, pruned_loss=0.0981, over 13351.00 frames. ], tot_loss[loss=0.2777, simple_loss=0.3428, pruned_loss=0.1063, over 2622005.52 frames. ], batch size: 35, lr: 2.71e-02, grad_scale: 16.0 +2024-08-03 05:38:46,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=50530.333333333336, ans=0.125 +2024-08-03 05:39:02,718 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.48 vs. limit=15.0 +2024-08-03 05:39:21,537 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:39:23,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=50640.333333333336, ans=0.125 +2024-08-03 05:39:27,366 INFO [train.py:1114] (0/4) Epoch 4, batch 2900, loss[loss=0.268, simple_loss=0.3353, pruned_loss=0.1004, over 13359.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.344, pruned_loss=0.1064, over 2632798.95 frames. ], batch size: 36, lr: 2.71e-02, grad_scale: 16.0 +2024-08-03 05:39:39,678 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.312e+02 1.485e+02 1.747e+02 2.702e+02, threshold=2.970e+02, percent-clipped=0.0 +2024-08-03 05:39:39,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=50713.666666666664, ans=0.125 +2024-08-03 05:39:40,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=50713.666666666664, ans=0.125 +2024-08-03 05:39:42,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=50713.666666666664, ans=0.1 +2024-08-03 05:39:44,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=50713.666666666664, ans=0.125 +2024-08-03 05:39:44,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=50713.666666666664, ans=0.0 +2024-08-03 05:39:50,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=50750.333333333336, ans=0.125 +2024-08-03 05:40:02,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=50787.0, ans=0.035 +2024-08-03 05:40:13,937 INFO [train.py:1114] (0/4) Epoch 4, batch 2950, loss[loss=0.2683, simple_loss=0.3313, pruned_loss=0.1027, over 13321.00 frames. ], tot_loss[loss=0.2759, simple_loss=0.3416, pruned_loss=0.1051, over 2630942.85 frames. ], batch size: 34, lr: 2.70e-02, grad_scale: 16.0 +2024-08-03 05:40:17,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=50860.333333333336, ans=0.0 +2024-08-03 05:40:22,449 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.09 vs. limit=22.5 +2024-08-03 05:40:25,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=50897.0, ans=0.125 +2024-08-03 05:40:34,206 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:40:34,322 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.27 vs. limit=15.0 +2024-08-03 05:40:59,444 INFO [train.py:1114] (0/4) Epoch 4, batch 3000, loss[loss=0.261, simple_loss=0.3288, pruned_loss=0.09661, over 13530.00 frames. ], tot_loss[loss=0.2755, simple_loss=0.3411, pruned_loss=0.1049, over 2630074.71 frames. ], batch size: 37, lr: 2.70e-02, grad_scale: 16.0 +2024-08-03 05:40:59,445 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 05:41:08,697 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.8059, 4.0154, 3.7112, 3.9406], device='cuda:0') +2024-08-03 05:41:15,390 INFO [train.py:1146] (0/4) Epoch 4, validation: loss=0.2213, simple_loss=0.3178, pruned_loss=0.06237, over 944034.00 frames. +2024-08-03 05:41:15,391 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 05:41:28,409 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.054e+02 1.441e+02 1.719e+02 2.426e+02 4.333e+02, threshold=3.438e+02, percent-clipped=13.0 +2024-08-03 05:41:35,389 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:41:41,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=51117.0, ans=0.125 +2024-08-03 05:41:55,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=51190.333333333336, ans=0.125 +2024-08-03 05:42:02,375 INFO [train.py:1114] (0/4) Epoch 4, batch 3050, loss[loss=0.2821, simple_loss=0.3422, pruned_loss=0.111, over 13539.00 frames. ], tot_loss[loss=0.2755, simple_loss=0.3412, pruned_loss=0.1049, over 2626401.62 frames. ], batch size: 35, lr: 2.70e-02, grad_scale: 16.0 +2024-08-03 05:42:12,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=51263.666666666664, ans=0.025 +2024-08-03 05:42:18,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=51263.666666666664, ans=0.2 +2024-08-03 05:42:34,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=51337.0, ans=0.0 +2024-08-03 05:42:37,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=51373.666666666664, ans=0.0 +2024-08-03 05:42:41,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=51373.666666666664, ans=0.2 +2024-08-03 05:42:51,491 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.86 vs. limit=15.0 +2024-08-03 05:42:57,209 INFO [train.py:1114] (0/4) Epoch 4, batch 3100, loss[loss=0.2681, simple_loss=0.3401, pruned_loss=0.0981, over 13258.00 frames. ], tot_loss[loss=0.2761, simple_loss=0.3418, pruned_loss=0.1052, over 2626097.71 frames. ], batch size: 46, lr: 2.69e-02, grad_scale: 16.0 +2024-08-03 05:52:34,206 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.298e+02 1.531e+02 1.928e+02 3.998e+02, threshold=3.062e+02, percent-clipped=1.0 +2024-08-03 05:54:16,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51447.0, ans=0.1 +2024-08-03 05:54:19,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=51483.666666666664, ans=0.2 +2024-08-03 05:54:34,544 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.64 vs. limit=15.0 +2024-08-03 05:54:38,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=51557.0, ans=0.125 +2024-08-03 05:54:39,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=51557.0, ans=0.2 +2024-08-03 05:54:44,061 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.81 vs. limit=15.0 +2024-08-03 05:54:46,015 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.37 vs. limit=15.0 +2024-08-03 05:54:51,724 INFO [train.py:1114] (0/4) Epoch 4, batch 3150, loss[loss=0.2848, simple_loss=0.3564, pruned_loss=0.1067, over 12986.00 frames. ], tot_loss[loss=0.2762, simple_loss=0.3419, pruned_loss=0.1053, over 2627060.84 frames. ], batch size: 48, lr: 2.69e-02, grad_scale: 16.0 +2024-08-03 05:54:52,161 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.95 vs. limit=15.0 +2024-08-03 05:54:52,970 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.05 vs. limit=15.0 +2024-08-03 05:55:03,323 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.28 vs. limit=15.0 +2024-08-03 05:55:07,612 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.84 vs. limit=15.0 +2024-08-03 05:55:09,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51630.333333333336, ans=0.1 +2024-08-03 05:55:26,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51703.666666666664, ans=0.1 +2024-08-03 05:55:41,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=51740.333333333336, ans=0.025 +2024-08-03 05:55:46,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51740.333333333336, ans=0.1 +2024-08-03 05:55:48,105 INFO [train.py:1114] (0/4) Epoch 4, batch 3200, loss[loss=0.2847, simple_loss=0.3451, pruned_loss=0.1121, over 13543.00 frames. ], tot_loss[loss=0.2749, simple_loss=0.3408, pruned_loss=0.1045, over 2633472.77 frames. ], batch size: 37, lr: 2.69e-02, grad_scale: 32.0 +2024-08-03 05:55:57,488 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.804e+01 1.368e+02 1.621e+02 1.933e+02 3.574e+02, threshold=3.241e+02, percent-clipped=2.0 +2024-08-03 05:56:06,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=51850.333333333336, ans=0.125 +2024-08-03 05:56:13,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=51850.333333333336, ans=0.125 +2024-08-03 05:56:15,001 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.44 vs. limit=15.0 +2024-08-03 05:56:23,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=51887.0, ans=0.125 +2024-08-03 05:56:39,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=51923.666666666664, ans=0.125 +2024-08-03 05:56:40,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=51923.666666666664, ans=0.0 +2024-08-03 05:56:51,448 INFO [train.py:1114] (0/4) Epoch 4, batch 3250, loss[loss=0.304, simple_loss=0.3701, pruned_loss=0.119, over 13390.00 frames. ], tot_loss[loss=0.2744, simple_loss=0.341, pruned_loss=0.1039, over 2638632.29 frames. ], batch size: 38, lr: 2.68e-02, grad_scale: 32.0 +2024-08-03 05:57:12,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=51997.0, ans=0.0 +2024-08-03 05:57:26,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=52033.666666666664, ans=0.125 +2024-08-03 05:58:06,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=52070.333333333336, ans=0.125 +2024-08-03 05:58:17,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52107.0, ans=0.1 +2024-08-03 05:58:22,583 INFO [train.py:1114] (0/4) Epoch 4, batch 3300, loss[loss=0.2847, simple_loss=0.3512, pruned_loss=0.1091, over 12867.00 frames. ], tot_loss[loss=0.274, simple_loss=0.3402, pruned_loss=0.1039, over 2639592.20 frames. ], batch size: 52, lr: 2.68e-02, grad_scale: 32.0 +2024-08-03 05:58:34,523 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.337e+02 1.543e+02 1.796e+02 2.309e+02, threshold=3.087e+02, percent-clipped=0.0 +2024-08-03 05:58:51,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=52217.0, ans=0.0 +2024-08-03 05:59:00,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=52253.666666666664, ans=0.125 +2024-08-03 05:59:14,196 INFO [train.py:1114] (0/4) Epoch 4, batch 3350, loss[loss=0.3157, simple_loss=0.3762, pruned_loss=0.1276, over 13046.00 frames. ], tot_loss[loss=0.2763, simple_loss=0.3421, pruned_loss=0.1053, over 2628700.83 frames. ], batch size: 48, lr: 2.67e-02, grad_scale: 32.0 +2024-08-03 05:59:28,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=52363.666666666664, ans=0.0 +2024-08-03 05:59:46,287 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.28 vs. limit=22.5 +2024-08-03 05:59:56,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=52473.666666666664, ans=0.025 +2024-08-03 05:59:58,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=52473.666666666664, ans=0.0 +2024-08-03 06:00:01,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=52473.666666666664, ans=0.125 +2024-08-03 06:00:05,696 INFO [train.py:1114] (0/4) Epoch 4, batch 3400, loss[loss=0.2398, simple_loss=0.3004, pruned_loss=0.08956, over 13517.00 frames. ], tot_loss[loss=0.2766, simple_loss=0.3421, pruned_loss=0.1056, over 2624428.82 frames. ], batch size: 31, lr: 2.67e-02, grad_scale: 32.0 +2024-08-03 06:00:15,146 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.426e+02 1.702e+02 2.054e+02 4.258e+02, threshold=3.404e+02, percent-clipped=2.0 +2024-08-03 06:00:25,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=52583.666666666664, ans=0.125 +2024-08-03 06:00:37,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=52620.333333333336, ans=0.0 +2024-08-03 06:00:37,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52620.333333333336, ans=0.1 +2024-08-03 06:00:44,167 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.32 vs. limit=15.0 +2024-08-03 06:00:46,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=52657.0, ans=0.0 +2024-08-03 06:00:48,776 INFO [train.py:1114] (0/4) Epoch 4, batch 3450, loss[loss=0.3341, simple_loss=0.3953, pruned_loss=0.1365, over 12968.00 frames. ], tot_loss[loss=0.2762, simple_loss=0.342, pruned_loss=0.1052, over 2628407.74 frames. ], batch size: 52, lr: 2.67e-02, grad_scale: 32.0 +2024-08-03 06:00:50,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=52693.666666666664, ans=0.125 +2024-08-03 06:01:09,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=52767.0, ans=0.0 +2024-08-03 06:01:12,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=52767.0, ans=0.0 +2024-08-03 06:01:16,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52767.0, ans=0.1 +2024-08-03 06:01:19,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=52803.666666666664, ans=0.025 +2024-08-03 06:01:22,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=52803.666666666664, ans=0.125 +2024-08-03 06:01:36,390 INFO [train.py:1114] (0/4) Epoch 4, batch 3500, loss[loss=0.2558, simple_loss=0.3219, pruned_loss=0.09479, over 13535.00 frames. ], tot_loss[loss=0.2753, simple_loss=0.3409, pruned_loss=0.1049, over 2630233.71 frames. ], batch size: 34, lr: 2.66e-02, grad_scale: 32.0 +2024-08-03 06:01:39,760 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:01:44,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=52913.666666666664, ans=0.2 +2024-08-03 06:01:45,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=52913.666666666664, ans=0.0 +2024-08-03 06:01:45,565 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.329e+02 1.542e+02 1.871e+02 3.471e+02, threshold=3.085e+02, percent-clipped=1.0 +2024-08-03 06:01:45,960 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.78 vs. limit=15.0 +2024-08-03 06:02:15,045 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.80 vs. limit=15.0 +2024-08-03 06:02:18,555 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.85 vs. limit=15.0 +2024-08-03 06:02:20,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=52987.0, ans=0.025 +2024-08-03 06:02:23,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52987.0, ans=0.1 +2024-08-03 06:02:37,267 INFO [train.py:1114] (0/4) Epoch 4, batch 3550, loss[loss=0.326, simple_loss=0.3778, pruned_loss=0.1371, over 12413.00 frames. ], tot_loss[loss=0.2777, simple_loss=0.3433, pruned_loss=0.1061, over 2628593.45 frames. ], batch size: 58, lr: 2.66e-02, grad_scale: 32.0 +2024-08-03 06:02:37,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=53060.333333333336, ans=0.05 +2024-08-03 06:03:16,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=53170.333333333336, ans=0.2 +2024-08-03 06:03:20,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53170.333333333336, ans=0.1 +2024-08-03 06:03:32,783 INFO [train.py:1114] (0/4) Epoch 4, batch 3600, loss[loss=0.3123, simple_loss=0.363, pruned_loss=0.1308, over 9117.00 frames. ], tot_loss[loss=0.2872, simple_loss=0.3492, pruned_loss=0.1126, over 2488917.63 frames. ], batch size: 96, lr: 2.66e-02, grad_scale: 16.0 +2024-08-03 06:03:34,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=53243.666666666664, ans=0.125 +2024-08-03 06:03:36,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=53243.666666666664, ans=0.125 +2024-08-03 06:03:57,027 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.095e+02 1.338e+02 1.465e+02 1.631e+02 2.841e+02, threshold=2.930e+02, percent-clipped=0.0 +2024-08-03 06:04:06,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=53317.0, ans=0.125 +2024-08-03 06:04:10,130 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.62 vs. limit=6.0 +2024-08-03 06:04:12,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=53317.0, ans=0.0 +2024-08-03 06:04:15,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=53353.666666666664, ans=0.125 +2024-08-03 06:04:17,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=53353.666666666664, ans=0.0 +2024-08-03 06:04:19,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=53353.666666666664, ans=0.125 +2024-08-03 06:04:22,710 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-4.pt +2024-08-03 06:05:08,610 INFO [train.py:1114] (0/4) Epoch 5, batch 0, loss[loss=0.2386, simple_loss=0.3089, pruned_loss=0.08417, over 13355.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3089, pruned_loss=0.08417, over 13355.00 frames. ], batch size: 33, lr: 2.47e-02, grad_scale: 32.0 +2024-08-03 06:05:08,611 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 06:05:18,611 INFO [train.py:1146] (0/4) Epoch 5, validation: loss=0.231, simple_loss=0.3271, pruned_loss=0.06749, over 944034.00 frames. +2024-08-03 06:05:18,612 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 06:05:31,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=53430.666666666664, ans=0.125 +2024-08-03 06:05:48,159 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.20 vs. limit=15.0 +2024-08-03 06:05:51,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=53504.0, ans=0.125 +2024-08-03 06:05:56,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=53540.666666666664, ans=0.125 +2024-08-03 06:06:05,013 INFO [train.py:1114] (0/4) Epoch 5, batch 50, loss[loss=0.2471, simple_loss=0.317, pruned_loss=0.08855, over 13436.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3447, pruned_loss=0.1058, over 578594.97 frames. ], batch size: 32, lr: 2.47e-02, grad_scale: 32.0 +2024-08-03 06:06:12,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=53577.333333333336, ans=0.035 +2024-08-03 06:06:24,704 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.396e+02 1.612e+02 2.008e+02 3.505e+02, threshold=3.224e+02, percent-clipped=4.0 +2024-08-03 06:06:26,975 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.33 vs. limit=15.0 +2024-08-03 06:06:31,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53687.333333333336, ans=0.1 +2024-08-03 06:06:32,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=53687.333333333336, ans=0.1 +2024-08-03 06:06:41,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=53724.0, ans=0.125 +2024-08-03 06:06:49,913 INFO [train.py:1114] (0/4) Epoch 5, batch 100, loss[loss=0.2955, simple_loss=0.3532, pruned_loss=0.1189, over 13535.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.3446, pruned_loss=0.1054, over 1025775.82 frames. ], batch size: 35, lr: 2.46e-02, grad_scale: 32.0 +2024-08-03 06:07:02,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=53797.333333333336, ans=0.015 +2024-08-03 06:07:42,380 INFO [train.py:1114] (0/4) Epoch 5, batch 150, loss[loss=0.2606, simple_loss=0.3247, pruned_loss=0.09825, over 13419.00 frames. ], tot_loss[loss=0.2719, simple_loss=0.3393, pruned_loss=0.1022, over 1387315.37 frames. ], batch size: 32, lr: 2.46e-02, grad_scale: 32.0 +2024-08-03 06:07:55,498 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.27 vs. limit=15.0 +2024-08-03 06:08:01,997 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.021e+02 1.304e+02 1.445e+02 1.840e+02 3.127e+02, threshold=2.891e+02, percent-clipped=0.0 +2024-08-03 06:08:07,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=54017.333333333336, ans=0.125 +2024-08-03 06:08:19,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54054.0, ans=0.1 +2024-08-03 06:08:56,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=54090.666666666664, ans=0.2 +2024-08-03 06:09:17,821 INFO [train.py:1114] (0/4) Epoch 5, batch 200, loss[loss=0.251, simple_loss=0.3206, pruned_loss=0.09072, over 12360.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3364, pruned_loss=0.1005, over 1665803.28 frames. ], batch size: 58, lr: 2.46e-02, grad_scale: 32.0 +2024-08-03 06:09:26,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=54127.333333333336, ans=0.2 +2024-08-03 06:10:08,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=54237.333333333336, ans=0.1 +2024-08-03 06:10:19,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=54237.333333333336, ans=0.125 +2024-08-03 06:10:36,820 INFO [train.py:1114] (0/4) Epoch 5, batch 250, loss[loss=0.3073, simple_loss=0.3763, pruned_loss=0.1191, over 13337.00 frames. ], tot_loss[loss=0.269, simple_loss=0.337, pruned_loss=0.1006, over 1883931.78 frames. ], batch size: 46, lr: 2.45e-02, grad_scale: 32.0 +2024-08-03 06:10:50,162 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=27.35 vs. limit=22.5 +2024-08-03 06:11:04,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=54384.0, ans=0.125 +2024-08-03 06:11:05,032 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.051e+02 1.288e+02 1.425e+02 1.791e+02 2.775e+02, threshold=2.850e+02, percent-clipped=0.0 +2024-08-03 06:11:11,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=54420.666666666664, ans=0.0 +2024-08-03 06:11:14,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=54420.666666666664, ans=0.1 +2024-08-03 06:11:47,369 INFO [train.py:1114] (0/4) Epoch 5, batch 300, loss[loss=0.2521, simple_loss=0.3292, pruned_loss=0.08745, over 13423.00 frames. ], tot_loss[loss=0.2675, simple_loss=0.3356, pruned_loss=0.0997, over 2050654.80 frames. ], batch size: 42, lr: 2.45e-02, grad_scale: 16.0 +2024-08-03 06:11:59,718 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.52 vs. limit=15.0 +2024-08-03 06:12:06,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=54530.666666666664, ans=0.125 +2024-08-03 06:12:21,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=54567.333333333336, ans=0.125 +2024-08-03 06:12:23,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=54567.333333333336, ans=0.2 +2024-08-03 06:12:31,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=54604.0, ans=0.1 +2024-08-03 06:12:38,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=54640.666666666664, ans=0.0 +2024-08-03 06:12:46,941 INFO [train.py:1114] (0/4) Epoch 5, batch 350, loss[loss=0.2362, simple_loss=0.3072, pruned_loss=0.08261, over 13592.00 frames. ], tot_loss[loss=0.2672, simple_loss=0.3358, pruned_loss=0.09932, over 2180370.92 frames. ], batch size: 33, lr: 2.45e-02, grad_scale: 16.0 +2024-08-03 06:12:49,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.37 vs. limit=15.0 +2024-08-03 06:13:00,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=54714.0, ans=0.025 +2024-08-03 06:13:04,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=54714.0, ans=0.125 +2024-08-03 06:14:22,877 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.49 vs. limit=10.0 +2024-08-03 06:14:29,333 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.016e+02 1.351e+02 1.704e+02 2.152e+02 5.145e+02, threshold=3.407e+02, percent-clipped=8.0 +2024-08-03 06:14:33,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=54750.666666666664, ans=0.2 +2024-08-03 06:14:49,685 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:14:54,827 INFO [train.py:1114] (0/4) Epoch 5, batch 400, loss[loss=0.2695, simple_loss=0.3438, pruned_loss=0.09758, over 13349.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3355, pruned_loss=0.09905, over 2284524.98 frames. ], batch size: 37, lr: 2.44e-02, grad_scale: 32.0 +2024-08-03 06:15:05,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=54897.333333333336, ans=0.125 +2024-08-03 06:15:09,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=54897.333333333336, ans=0.125 +2024-08-03 06:15:30,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=55007.333333333336, ans=0.1 +2024-08-03 06:15:40,330 INFO [train.py:1114] (0/4) Epoch 5, batch 450, loss[loss=0.2745, simple_loss=0.3501, pruned_loss=0.09941, over 13559.00 frames. ], tot_loss[loss=0.2676, simple_loss=0.3361, pruned_loss=0.09958, over 2358377.62 frames. ], batch size: 38, lr: 2.44e-02, grad_scale: 32.0 +2024-08-03 06:15:44,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=55044.0, ans=0.0 +2024-08-03 06:15:51,754 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.73 vs. limit=15.0 +2024-08-03 06:15:53,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=55080.666666666664, ans=0.125 +2024-08-03 06:15:55,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=55080.666666666664, ans=0.2 +2024-08-03 06:16:00,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=55117.333333333336, ans=0.125 +2024-08-03 06:16:01,312 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.058e+02 1.371e+02 1.584e+02 1.939e+02 3.313e+02, threshold=3.167e+02, percent-clipped=0.0 +2024-08-03 06:16:01,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=55117.333333333336, ans=0.0 +2024-08-03 06:16:34,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=55227.333333333336, ans=0.125 +2024-08-03 06:16:34,935 INFO [train.py:1114] (0/4) Epoch 5, batch 500, loss[loss=0.2665, simple_loss=0.3437, pruned_loss=0.09468, over 13424.00 frames. ], tot_loss[loss=0.2665, simple_loss=0.3349, pruned_loss=0.09907, over 2423596.86 frames. ], batch size: 43, lr: 2.44e-02, grad_scale: 32.0 +2024-08-03 06:16:39,904 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.53 vs. limit=6.0 +2024-08-03 06:16:45,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=55264.0, ans=0.0 +2024-08-03 06:16:45,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=55264.0, ans=0.125 +2024-08-03 06:17:43,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=55300.666666666664, ans=0.0 +2024-08-03 06:17:45,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=55337.333333333336, ans=0.125 +2024-08-03 06:17:54,998 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.51 vs. limit=6.0 +2024-08-03 06:19:32,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=55374.0, ans=0.125 +2024-08-03 06:19:33,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=55374.0, ans=0.025 +2024-08-03 06:19:39,803 INFO [train.py:1114] (0/4) Epoch 5, batch 550, loss[loss=0.264, simple_loss=0.3406, pruned_loss=0.09368, over 13015.00 frames. ], tot_loss[loss=0.2656, simple_loss=0.334, pruned_loss=0.09857, over 2465902.63 frames. ], batch size: 48, lr: 2.43e-02, grad_scale: 32.0 +2024-08-03 06:21:08,476 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:21:12,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=55410.666666666664, ans=0.125 +2024-08-03 06:21:26,877 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.054e+02 1.350e+02 1.520e+02 1.792e+02 6.308e+02, threshold=3.041e+02, percent-clipped=2.0 +2024-08-03 06:21:40,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=55520.666666666664, ans=0.1 +2024-08-03 06:21:46,675 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.36 vs. limit=15.0 +2024-08-03 06:22:05,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff3.min_abs, batch_count=55557.333333333336, ans=0.2 +2024-08-03 06:22:14,739 INFO [train.py:1114] (0/4) Epoch 5, batch 600, loss[loss=0.3162, simple_loss=0.3829, pruned_loss=0.1247, over 13339.00 frames. ], tot_loss[loss=0.266, simple_loss=0.3343, pruned_loss=0.09885, over 2506485.47 frames. ], batch size: 46, lr: 2.43e-02, grad_scale: 32.0 +2024-08-03 06:22:19,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=55594.0, ans=0.125 +2024-08-03 06:22:21,210 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.69 vs. limit=15.0 +2024-08-03 06:22:29,077 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.87 vs. limit=15.0 +2024-08-03 06:22:29,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55630.666666666664, ans=0.1 +2024-08-03 06:22:30,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55630.666666666664, ans=0.1 +2024-08-03 06:22:32,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.90 vs. limit=15.0 +2024-08-03 06:22:39,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=55667.333333333336, ans=0.2 +2024-08-03 06:22:48,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=55704.0, ans=0.0 +2024-08-03 06:23:07,604 INFO [train.py:1114] (0/4) Epoch 5, batch 650, loss[loss=0.2224, simple_loss=0.3015, pruned_loss=0.07164, over 13537.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.333, pruned_loss=0.09773, over 2542659.60 frames. ], batch size: 37, lr: 2.43e-02, grad_scale: 32.0 +2024-08-03 06:23:12,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=55777.333333333336, ans=0.04949747468305833 +2024-08-03 06:23:27,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=55777.333333333336, ans=0.07 +2024-08-03 06:23:39,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=55814.0, ans=0.0 +2024-08-03 06:23:43,270 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.044e+02 1.304e+02 1.464e+02 1.924e+02 3.409e+02, threshold=2.927e+02, percent-clipped=2.0 +2024-08-03 06:24:06,197 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:24:09,736 INFO [train.py:1114] (0/4) Epoch 5, batch 700, loss[loss=0.2704, simple_loss=0.3323, pruned_loss=0.1042, over 13544.00 frames. ], tot_loss[loss=0.265, simple_loss=0.3337, pruned_loss=0.09818, over 2565197.15 frames. ], batch size: 35, lr: 2.43e-02, grad_scale: 32.0 +2024-08-03 06:24:13,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=55960.666666666664, ans=0.125 +2024-08-03 06:24:26,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=55960.666666666664, ans=0.2 +2024-08-03 06:24:35,032 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.24 vs. limit=15.0 +2024-08-03 06:24:43,623 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=15.06 vs. limit=15.0 +2024-08-03 06:24:44,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=56034.0, ans=0.025 +2024-08-03 06:25:03,425 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.74 vs. limit=15.0 +2024-08-03 06:25:06,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=56107.333333333336, ans=0.0 +2024-08-03 06:25:08,402 INFO [train.py:1114] (0/4) Epoch 5, batch 750, loss[loss=0.2551, simple_loss=0.3337, pruned_loss=0.08824, over 13357.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.3329, pruned_loss=0.09784, over 2581861.02 frames. ], batch size: 37, lr: 2.42e-02, grad_scale: 32.0 +2024-08-03 06:25:34,268 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.431e+02 1.731e+02 2.437e+02 4.529e+02, threshold=3.462e+02, percent-clipped=10.0 +2024-08-03 06:25:35,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=56217.333333333336, ans=0.0 +2024-08-03 06:25:39,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=56217.333333333336, ans=0.025 +2024-08-03 06:25:39,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=56217.333333333336, ans=0.0 +2024-08-03 06:25:54,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=56290.666666666664, ans=0.0 +2024-08-03 06:25:59,099 INFO [train.py:1114] (0/4) Epoch 5, batch 800, loss[loss=0.2213, simple_loss=0.2918, pruned_loss=0.07544, over 13337.00 frames. ], tot_loss[loss=0.2637, simple_loss=0.3324, pruned_loss=0.09752, over 2596045.31 frames. ], batch size: 33, lr: 2.42e-02, grad_scale: 32.0 +2024-08-03 06:26:07,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=56364.0, ans=0.125 +2024-08-03 06:26:13,548 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.91 vs. limit=15.0 +2024-08-03 06:26:39,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=56437.333333333336, ans=0.0 +2024-08-03 06:26:58,695 INFO [train.py:1114] (0/4) Epoch 5, batch 850, loss[loss=0.2629, simple_loss=0.3415, pruned_loss=0.09219, over 13330.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3324, pruned_loss=0.09761, over 2608746.29 frames. ], batch size: 40, lr: 2.42e-02, grad_scale: 32.0 +2024-08-03 06:27:16,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.87 vs. limit=22.5 +2024-08-03 06:27:19,560 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.023e+02 1.293e+02 1.480e+02 2.210e+02 4.419e+02, threshold=2.961e+02, percent-clipped=1.0 +2024-08-03 06:27:22,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=56584.0, ans=0.125 +2024-08-03 06:27:32,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=56620.666666666664, ans=0.1 +2024-08-03 06:27:41,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=56657.333333333336, ans=0.125 +2024-08-03 06:27:41,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=56657.333333333336, ans=0.1 +2024-08-03 06:27:44,537 INFO [train.py:1114] (0/4) Epoch 5, batch 900, loss[loss=0.2382, simple_loss=0.3166, pruned_loss=0.07985, over 13336.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.3328, pruned_loss=0.0979, over 2611648.50 frames. ], batch size: 33, lr: 2.41e-02, grad_scale: 32.0 +2024-08-03 06:28:07,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=56767.333333333336, ans=0.125 +2024-08-03 06:28:16,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=56804.0, ans=0.1 +2024-08-03 06:28:27,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=56840.666666666664, ans=0.2 +2024-08-03 06:28:39,184 INFO [train.py:1114] (0/4) Epoch 5, batch 950, loss[loss=0.2228, simple_loss=0.3005, pruned_loss=0.07249, over 13528.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3323, pruned_loss=0.09735, over 2612430.44 frames. ], batch size: 34, lr: 2.41e-02, grad_scale: 32.0 +2024-08-03 06:28:54,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=56914.0, ans=0.5 +2024-08-03 06:28:58,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=56950.666666666664, ans=0.0 +2024-08-03 06:29:01,399 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.321e+02 1.545e+02 1.895e+02 5.386e+02, threshold=3.090e+02, percent-clipped=1.0 +2024-08-03 06:29:02,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=56950.666666666664, ans=0.125 +2024-08-03 06:29:02,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56950.666666666664, ans=0.1 +2024-08-03 06:29:07,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=56987.333333333336, ans=0.0 +2024-08-03 06:29:33,794 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.56 vs. limit=15.0 +2024-08-03 06:29:35,305 INFO [train.py:1114] (0/4) Epoch 5, batch 1000, loss[loss=0.2502, simple_loss=0.3147, pruned_loss=0.09285, over 13361.00 frames. ], tot_loss[loss=0.2651, simple_loss=0.3335, pruned_loss=0.09835, over 2611379.57 frames. ], batch size: 35, lr: 2.41e-02, grad_scale: 32.0 +2024-08-03 06:30:31,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=57207.333333333336, ans=0.0 +2024-08-03 06:30:40,641 INFO [train.py:1114] (0/4) Epoch 5, batch 1050, loss[loss=0.2697, simple_loss=0.3442, pruned_loss=0.09756, over 13568.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3325, pruned_loss=0.09757, over 2615138.01 frames. ], batch size: 39, lr: 2.40e-02, grad_scale: 32.0 +2024-08-03 06:31:00,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=57317.333333333336, ans=0.125 +2024-08-03 06:31:01,350 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.349e+02 1.601e+02 2.002e+02 3.488e+02, threshold=3.202e+02, percent-clipped=3.0 +2024-08-03 06:31:20,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.90 vs. limit=15.0 +2024-08-03 06:31:31,793 INFO [train.py:1114] (0/4) Epoch 5, batch 1100, loss[loss=0.2774, simple_loss=0.3435, pruned_loss=0.1057, over 13565.00 frames. ], tot_loss[loss=0.2637, simple_loss=0.3322, pruned_loss=0.09763, over 2619445.07 frames. ], batch size: 36, lr: 2.40e-02, grad_scale: 32.0 +2024-08-03 06:31:35,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=57427.333333333336, ans=0.2 +2024-08-03 06:31:46,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=57464.0, ans=0.125 +2024-08-03 06:31:46,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=57464.0, ans=0.125 +2024-08-03 06:31:47,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=57464.0, ans=0.0 +2024-08-03 06:32:29,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=57574.0, ans=0.0 +2024-08-03 06:32:31,926 INFO [train.py:1114] (0/4) Epoch 5, batch 1150, loss[loss=0.2637, simple_loss=0.3284, pruned_loss=0.09955, over 13551.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3326, pruned_loss=0.09807, over 2618442.80 frames. ], batch size: 36, lr: 2.40e-02, grad_scale: 32.0 +2024-08-03 06:32:43,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=57610.666666666664, ans=0.125 +2024-08-03 06:32:49,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=57647.333333333336, ans=0.05 +2024-08-03 06:32:50,285 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.57 vs. limit=22.5 +2024-08-03 06:32:53,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=57647.333333333336, ans=0.1 +2024-08-03 06:32:59,172 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.317e+02 1.572e+02 1.915e+02 2.951e+02, threshold=3.144e+02, percent-clipped=0.0 +2024-08-03 06:33:09,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=57684.0, ans=0.05 +2024-08-03 06:33:22,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=57720.666666666664, ans=0.125 +2024-08-03 06:33:35,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=57757.333333333336, ans=0.125 +2024-08-03 06:33:44,660 INFO [train.py:1114] (0/4) Epoch 5, batch 1200, loss[loss=0.2786, simple_loss=0.3526, pruned_loss=0.1023, over 13585.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.3337, pruned_loss=0.09839, over 2615571.81 frames. ], batch size: 39, lr: 2.39e-02, grad_scale: 32.0 +2024-08-03 06:33:44,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=57794.0, ans=10.0 +2024-08-03 06:34:03,906 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.59 vs. limit=15.0 +2024-08-03 06:34:11,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=57904.0, ans=0.125 +2024-08-03 06:34:16,541 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.89 vs. limit=15.0 +2024-08-03 06:34:22,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.99 vs. limit=15.0 +2024-08-03 06:34:33,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=57940.666666666664, ans=0.0 +2024-08-03 06:34:33,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=57940.666666666664, ans=0.125 +2024-08-03 06:34:34,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=57940.666666666664, ans=0.125 +2024-08-03 06:34:37,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=57940.666666666664, ans=0.125 +2024-08-03 06:34:39,117 INFO [train.py:1114] (0/4) Epoch 5, batch 1250, loss[loss=0.276, simple_loss=0.3519, pruned_loss=0.1001, over 13444.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3339, pruned_loss=0.09798, over 2627732.85 frames. ], batch size: 42, lr: 2.39e-02, grad_scale: 32.0 +2024-08-03 06:35:05,297 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.303e+02 1.543e+02 2.003e+02 3.165e+02, threshold=3.086e+02, percent-clipped=1.0 +2024-08-03 06:35:13,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58087.333333333336, ans=0.1 +2024-08-03 06:35:17,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=58087.333333333336, ans=0.2 +2024-08-03 06:35:20,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=58087.333333333336, ans=0.125 +2024-08-03 06:35:27,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=58124.0, ans=0.125 +2024-08-03 06:35:32,188 INFO [train.py:1114] (0/4) Epoch 5, batch 1300, loss[loss=0.2977, simple_loss=0.3615, pruned_loss=0.1169, over 12940.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.3329, pruned_loss=0.09748, over 2631239.05 frames. ], batch size: 52, lr: 2.39e-02, grad_scale: 16.0 +2024-08-03 06:35:40,040 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.75 vs. limit=15.0 +2024-08-03 06:35:51,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=58234.0, ans=0.025 +2024-08-03 06:36:08,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=58307.333333333336, ans=0.0 +2024-08-03 06:36:17,118 INFO [train.py:1114] (0/4) Epoch 5, batch 1350, loss[loss=0.2552, simple_loss=0.3253, pruned_loss=0.09251, over 13539.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3328, pruned_loss=0.09734, over 2638488.23 frames. ], batch size: 37, lr: 2.38e-02, grad_scale: 16.0 +2024-08-03 06:36:26,131 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:36:29,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=58380.666666666664, ans=0.0 +2024-08-03 06:36:30,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=58380.666666666664, ans=0.0 +2024-08-03 06:36:35,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58417.333333333336, ans=0.1 +2024-08-03 06:36:39,401 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.960e+01 1.325e+02 1.559e+02 1.988e+02 3.487e+02, threshold=3.118e+02, percent-clipped=2.0 +2024-08-03 06:36:52,874 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.32 vs. limit=22.5 +2024-08-03 06:37:04,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=58454.0, ans=0.125 +2024-08-03 06:37:12,987 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.73 vs. limit=15.0 +2024-08-03 06:37:14,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=58490.666666666664, ans=0.125 +2024-08-03 06:37:16,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=58527.333333333336, ans=0.025 +2024-08-03 06:37:17,006 INFO [train.py:1114] (0/4) Epoch 5, batch 1400, loss[loss=0.2391, simple_loss=0.2937, pruned_loss=0.09225, over 13256.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3321, pruned_loss=0.09732, over 2642290.84 frames. ], batch size: 31, lr: 2.38e-02, grad_scale: 16.0 +2024-08-03 06:37:21,133 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.91 vs. limit=15.0 +2024-08-03 06:37:31,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=58564.0, ans=0.2 +2024-08-03 06:37:51,180 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-16000.pt +2024-08-03 06:37:58,440 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.54 vs. limit=15.0 +2024-08-03 06:38:07,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=58710.666666666664, ans=0.0 +2024-08-03 06:38:07,867 INFO [train.py:1114] (0/4) Epoch 5, batch 1450, loss[loss=0.2889, simple_loss=0.3605, pruned_loss=0.1087, over 13409.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3333, pruned_loss=0.09803, over 2641396.95 frames. ], batch size: 43, lr: 2.38e-02, grad_scale: 16.0 +2024-08-03 06:38:12,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=58710.666666666664, ans=0.2 +2024-08-03 06:38:30,506 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.070e+02 1.374e+02 1.719e+02 2.363e+02 5.392e+02, threshold=3.437e+02, percent-clipped=8.0 +2024-08-03 06:38:41,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58784.0, ans=0.1 +2024-08-03 06:38:53,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=58857.333333333336, ans=0.0 +2024-08-03 06:38:58,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=58857.333333333336, ans=0.0 +2024-08-03 06:39:04,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=58857.333333333336, ans=0.125 +2024-08-03 06:39:05,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=58894.0, ans=0.125 +2024-08-03 06:39:06,292 INFO [train.py:1114] (0/4) Epoch 5, batch 1500, loss[loss=0.3078, simple_loss=0.3748, pruned_loss=0.1204, over 13406.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.3326, pruned_loss=0.09691, over 2640670.32 frames. ], batch size: 39, lr: 2.38e-02, grad_scale: 16.0 +2024-08-03 06:39:06,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58894.0, ans=0.1 +2024-08-03 06:39:07,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=58894.0, ans=0.125 +2024-08-03 06:39:14,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=58894.0, ans=0.04949747468305833 +2024-08-03 06:39:45,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=58930.666666666664, ans=0.05 +2024-08-03 06:40:12,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=59040.666666666664, ans=0.125 +2024-08-03 06:40:16,414 INFO [train.py:1114] (0/4) Epoch 5, batch 1550, loss[loss=0.2672, simple_loss=0.3352, pruned_loss=0.09958, over 13399.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3336, pruned_loss=0.09788, over 2630821.28 frames. ], batch size: 41, lr: 2.37e-02, grad_scale: 16.0 +2024-08-03 06:40:35,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=59150.666666666664, ans=0.2 +2024-08-03 06:40:39,053 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.330e+02 1.569e+02 1.992e+02 3.164e+02, threshold=3.138e+02, percent-clipped=1.0 +2024-08-03 06:40:50,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=59187.333333333336, ans=0.125 +2024-08-03 06:41:38,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=59224.0, ans=0.125 +2024-08-03 06:41:40,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=59224.0, ans=0.125 +2024-08-03 06:41:47,120 INFO [train.py:1114] (0/4) Epoch 5, batch 1600, loss[loss=0.2588, simple_loss=0.3384, pruned_loss=0.08964, over 13577.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.333, pruned_loss=0.09784, over 2623898.01 frames. ], batch size: 39, lr: 2.37e-02, grad_scale: 32.0 +2024-08-03 06:41:58,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=59297.333333333336, ans=0.5 +2024-08-03 06:42:24,423 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.54 vs. limit=12.0 +2024-08-03 06:42:27,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=59407.333333333336, ans=0.025 +2024-08-03 06:42:30,888 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:42:38,319 INFO [train.py:1114] (0/4) Epoch 5, batch 1650, loss[loss=0.2547, simple_loss=0.3333, pruned_loss=0.08802, over 13334.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.3326, pruned_loss=0.09789, over 2620615.64 frames. ], batch size: 40, lr: 2.37e-02, grad_scale: 32.0 +2024-08-03 06:42:50,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=59480.666666666664, ans=0.125 +2024-08-03 06:42:58,015 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.16 vs. limit=10.0 +2024-08-03 06:43:04,543 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.342e+02 1.500e+02 2.074e+02 4.077e+02, threshold=2.999e+02, percent-clipped=4.0 +2024-08-03 06:43:09,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=59554.0, ans=0.125 +2024-08-03 06:43:11,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=59554.0, ans=0.125 +2024-08-03 06:43:24,816 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.18 vs. limit=15.0 +2024-08-03 06:43:27,969 INFO [train.py:1114] (0/4) Epoch 5, batch 1700, loss[loss=0.2192, simple_loss=0.2878, pruned_loss=0.07533, over 13241.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3318, pruned_loss=0.09708, over 2629399.33 frames. ], batch size: 31, lr: 2.36e-02, grad_scale: 32.0 +2024-08-03 06:43:37,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=59664.0, ans=0.0 +2024-08-03 06:43:44,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=59664.0, ans=0.2 +2024-08-03 06:43:45,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.86 vs. limit=22.5 +2024-08-03 06:44:17,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=59774.0, ans=0.125 +2024-08-03 06:44:20,426 INFO [train.py:1114] (0/4) Epoch 5, batch 1750, loss[loss=0.2246, simple_loss=0.2945, pruned_loss=0.0774, over 13535.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.331, pruned_loss=0.09675, over 2632983.48 frames. ], batch size: 31, lr: 2.36e-02, grad_scale: 32.0 +2024-08-03 06:44:30,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=59847.333333333336, ans=0.07 +2024-08-03 06:44:35,561 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:44:36,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=59847.333333333336, ans=0.0 +2024-08-03 06:44:37,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=59847.333333333336, ans=0.125 +2024-08-03 06:44:50,135 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.019e+02 1.258e+02 1.421e+02 1.677e+02 2.914e+02, threshold=2.843e+02, percent-clipped=0.0 +2024-08-03 06:44:50,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=59884.0, ans=22.5 +2024-08-03 06:44:57,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=59920.666666666664, ans=0.125 +2024-08-03 06:45:07,254 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.77 vs. limit=6.0 +2024-08-03 06:45:08,065 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.57 vs. limit=15.0 +2024-08-03 06:45:21,417 INFO [train.py:1114] (0/4) Epoch 5, batch 1800, loss[loss=0.2817, simple_loss=0.3552, pruned_loss=0.1041, over 13531.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3315, pruned_loss=0.0971, over 2634669.51 frames. ], batch size: 38, lr: 2.36e-02, grad_scale: 32.0 +2024-08-03 06:45:33,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=60030.666666666664, ans=0.0 +2024-08-03 06:45:52,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=60104.0, ans=0.0 +2024-08-03 06:49:33,893 INFO [train.py:1114] (0/4) Epoch 5, batch 1850, loss[loss=0.2618, simple_loss=0.3404, pruned_loss=0.09164, over 13397.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.3316, pruned_loss=0.09688, over 2636872.75 frames. ], batch size: 39, lr: 2.35e-02, grad_scale: 32.0 +2024-08-03 06:49:44,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=60177.333333333336, ans=0.125 +2024-08-03 06:51:09,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=60250.666666666664, ans=0.2 +2024-08-03 06:51:23,620 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.104e+02 1.315e+02 1.584e+02 1.966e+02 3.228e+02, threshold=3.167e+02, percent-clipped=4.0 +2024-08-03 06:51:25,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=60250.666666666664, ans=0.125 +2024-08-03 06:51:31,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=60287.333333333336, ans=0.025 +2024-08-03 06:51:37,034 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.87 vs. limit=15.0 +2024-08-03 06:52:37,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=60324.0, ans=0.125 +2024-08-03 06:52:51,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=60324.0, ans=0.2 +2024-08-03 06:52:57,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=60324.0, ans=0.0 +2024-08-03 06:53:02,070 INFO [train.py:1114] (0/4) Epoch 5, batch 1900, loss[loss=0.293, simple_loss=0.369, pruned_loss=0.1084, over 13328.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3325, pruned_loss=0.09726, over 2639071.80 frames. ], batch size: 40, lr: 2.35e-02, grad_scale: 32.0 +2024-08-03 06:53:13,921 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.63 vs. limit=22.5 +2024-08-03 06:57:11,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=60397.333333333336, ans=0.125 +2024-08-03 06:57:55,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=60434.0, ans=0.0 +2024-08-03 06:58:14,452 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.98 vs. limit=6.0 +2024-08-03 06:58:28,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=60470.666666666664, ans=0.025 +2024-08-03 06:58:28,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=60470.666666666664, ans=0.0 +2024-08-03 06:58:54,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=60507.333333333336, ans=0.125 +2024-08-03 06:58:55,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=60507.333333333336, ans=0.2 +2024-08-03 06:58:59,371 INFO [train.py:1114] (0/4) Epoch 5, batch 1950, loss[loss=0.2822, simple_loss=0.3434, pruned_loss=0.1105, over 13556.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.3333, pruned_loss=0.09731, over 2645820.04 frames. ], batch size: 36, lr: 2.35e-02, grad_scale: 32.0 +2024-08-03 06:58:59,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=60544.0, ans=0.05 +2024-08-03 06:59:02,996 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.74 vs. limit=22.5 +2024-08-03 07:01:31,517 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.403e+02 1.665e+02 1.976e+02 3.868e+02, threshold=3.331e+02, percent-clipped=1.0 +2024-08-03 07:02:08,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=60690.666666666664, ans=0.125 +2024-08-03 07:02:10,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=60690.666666666664, ans=0.0 +2024-08-03 07:03:22,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=60727.333333333336, ans=0.0 +2024-08-03 07:03:26,352 INFO [train.py:1114] (0/4) Epoch 5, batch 2000, loss[loss=0.2236, simple_loss=0.2884, pruned_loss=0.07941, over 13494.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.3335, pruned_loss=0.0972, over 2635037.29 frames. ], batch size: 31, lr: 2.35e-02, grad_scale: 32.0 +2024-08-03 07:03:29,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=60727.333333333336, ans=0.125 +2024-08-03 07:03:35,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=60764.0, ans=0.125 +2024-08-03 07:03:40,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=60764.0, ans=0.0 +2024-08-03 07:03:59,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60837.333333333336, ans=0.1 +2024-08-03 07:04:02,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=60837.333333333336, ans=0.125 +2024-08-03 07:04:02,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=60837.333333333336, ans=0.125 +2024-08-03 07:04:31,369 INFO [train.py:1114] (0/4) Epoch 5, batch 2050, loss[loss=0.2283, simple_loss=0.2903, pruned_loss=0.08317, over 13395.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.332, pruned_loss=0.09718, over 2631835.28 frames. ], batch size: 32, lr: 2.34e-02, grad_scale: 32.0 +2024-08-03 07:04:40,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=60910.666666666664, ans=0.125 +2024-08-03 07:05:01,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=60947.333333333336, ans=0.125 +2024-08-03 07:05:08,122 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.320e+02 1.526e+02 1.984e+02 3.306e+02, threshold=3.052e+02, percent-clipped=0.0 +2024-08-03 07:05:13,727 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.81 vs. limit=22.5 +2024-08-03 07:05:29,966 INFO [train.py:1114] (0/4) Epoch 5, batch 2100, loss[loss=0.2365, simple_loss=0.3091, pruned_loss=0.08193, over 13559.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3307, pruned_loss=0.09611, over 2637923.12 frames. ], batch size: 37, lr: 2.34e-02, grad_scale: 16.0 +2024-08-03 07:05:33,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=61094.0, ans=0.125 +2024-08-03 07:05:35,721 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-08-03 07:07:43,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61204.0, ans=0.1 +2024-08-03 07:07:46,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=61204.0, ans=0.125 +2024-08-03 07:07:50,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=61240.666666666664, ans=0.0 +2024-08-03 07:07:53,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61240.666666666664, ans=0.1 +2024-08-03 07:07:57,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.85 vs. limit=15.0 +2024-08-03 07:08:00,471 INFO [train.py:1114] (0/4) Epoch 5, batch 2150, loss[loss=0.274, simple_loss=0.3407, pruned_loss=0.1037, over 13553.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.3298, pruned_loss=0.09566, over 2647098.06 frames. ], batch size: 36, lr: 2.34e-02, grad_scale: 16.0 +2024-08-03 07:08:02,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=61277.333333333336, ans=0.125 +2024-08-03 07:08:06,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=61277.333333333336, ans=0.125 +2024-08-03 07:08:25,508 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.060e+02 1.319e+02 1.581e+02 2.053e+02 4.024e+02, threshold=3.163e+02, percent-clipped=3.0 +2024-08-03 07:08:31,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61387.333333333336, ans=0.1 +2024-08-03 07:08:46,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=61424.0, ans=0.125 +2024-08-03 07:08:50,658 INFO [train.py:1114] (0/4) Epoch 5, batch 2200, loss[loss=0.259, simple_loss=0.3357, pruned_loss=0.09114, over 13395.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3293, pruned_loss=0.09508, over 2644924.82 frames. ], batch size: 39, lr: 2.33e-02, grad_scale: 16.0 +2024-08-03 07:08:54,827 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.42 vs. limit=12.0 +2024-08-03 07:09:13,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=61534.0, ans=0.1 +2024-08-03 07:09:31,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=61607.333333333336, ans=0.0 +2024-08-03 07:09:41,568 INFO [train.py:1114] (0/4) Epoch 5, batch 2250, loss[loss=0.2655, simple_loss=0.34, pruned_loss=0.09555, over 13345.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3286, pruned_loss=0.09447, over 2642742.83 frames. ], batch size: 37, lr: 2.33e-02, grad_scale: 16.0 +2024-08-03 07:10:05,794 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.390e+02 1.682e+02 2.115e+02 4.078e+02, threshold=3.364e+02, percent-clipped=8.0 +2024-08-03 07:10:09,019 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.72 vs. limit=6.0 +2024-08-03 07:10:36,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=61754.0, ans=0.125 +2024-08-03 07:10:55,891 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.53 vs. limit=15.0 +2024-08-03 07:11:58,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=61790.666666666664, ans=0.2 +2024-08-03 07:12:06,479 INFO [train.py:1114] (0/4) Epoch 5, batch 2300, loss[loss=0.2105, simple_loss=0.2825, pruned_loss=0.06926, over 13567.00 frames. ], tot_loss[loss=0.2571, simple_loss=0.3268, pruned_loss=0.09372, over 2639024.23 frames. ], batch size: 33, lr: 2.33e-02, grad_scale: 16.0 +2024-08-03 07:13:49,145 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.76 vs. limit=15.0 +2024-08-03 07:13:52,823 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.76 vs. limit=15.0 +2024-08-03 07:15:22,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=61900.666666666664, ans=0.125 +2024-08-03 07:16:21,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=61974.0, ans=0.125 +2024-08-03 07:16:35,461 INFO [train.py:1114] (0/4) Epoch 5, batch 2350, loss[loss=0.2443, simple_loss=0.3207, pruned_loss=0.08392, over 13554.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3269, pruned_loss=0.09379, over 2641206.95 frames. ], batch size: 38, lr: 2.32e-02, grad_scale: 16.0 +2024-08-03 07:17:04,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=62047.333333333336, ans=0.125 +2024-08-03 07:17:20,772 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.323e+02 1.600e+02 2.155e+02 3.699e+02, threshold=3.200e+02, percent-clipped=2.0 +2024-08-03 07:17:52,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=62120.666666666664, ans=0.0 +2024-08-03 07:18:26,637 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.64 vs. limit=15.0 +2024-08-03 07:18:34,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=62157.333333333336, ans=0.0 +2024-08-03 07:18:36,414 INFO [train.py:1114] (0/4) Epoch 5, batch 2400, loss[loss=0.2536, simple_loss=0.3283, pruned_loss=0.08948, over 13541.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3283, pruned_loss=0.09451, over 2642198.54 frames. ], batch size: 35, lr: 2.32e-02, grad_scale: 32.0 +2024-08-03 07:18:54,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=62194.0, ans=0.125 +2024-08-03 07:19:02,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=62230.666666666664, ans=0.1 +2024-08-03 07:19:28,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=62304.0, ans=0.09899494936611666 +2024-08-03 07:19:34,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=62340.666666666664, ans=0.025 +2024-08-03 07:19:45,675 INFO [train.py:1114] (0/4) Epoch 5, batch 2450, loss[loss=0.2657, simple_loss=0.3442, pruned_loss=0.09359, over 13356.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.3301, pruned_loss=0.09552, over 2632813.51 frames. ], batch size: 37, lr: 2.32e-02, grad_scale: 32.0 +2024-08-03 07:19:50,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=62377.333333333336, ans=0.05 +2024-08-03 07:19:52,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=62377.333333333336, ans=0.125 +2024-08-03 07:20:08,877 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.340e+02 1.643e+02 2.212e+02 4.155e+02, threshold=3.287e+02, percent-clipped=6.0 +2024-08-03 07:20:22,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=62487.333333333336, ans=0.09899494936611666 +2024-08-03 07:20:22,405 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.74 vs. limit=10.0 +2024-08-03 07:20:26,648 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.34 vs. limit=12.0 +2024-08-03 07:20:35,561 INFO [train.py:1114] (0/4) Epoch 5, batch 2500, loss[loss=0.2663, simple_loss=0.338, pruned_loss=0.09735, over 13410.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.3295, pruned_loss=0.09452, over 2637814.06 frames. ], batch size: 39, lr: 2.32e-02, grad_scale: 32.0 +2024-08-03 07:20:40,104 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.83 vs. limit=15.0 +2024-08-03 07:20:41,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=62560.666666666664, ans=0.0 +2024-08-03 07:20:43,442 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.22 vs. limit=15.0 +2024-08-03 07:20:47,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=62597.333333333336, ans=0.025 +2024-08-03 07:20:51,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=62597.333333333336, ans=0.0 +2024-08-03 07:20:56,631 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.29 vs. limit=15.0 +2024-08-03 07:20:57,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=62634.0, ans=0.1 +2024-08-03 07:21:00,744 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.17 vs. limit=22.5 +2024-08-03 07:21:07,766 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.93 vs. limit=22.5 +2024-08-03 07:21:20,456 INFO [train.py:1114] (0/4) Epoch 5, batch 2550, loss[loss=0.2272, simple_loss=0.2952, pruned_loss=0.07954, over 13557.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3294, pruned_loss=0.09437, over 2638791.30 frames. ], batch size: 31, lr: 2.31e-02, grad_scale: 32.0 +2024-08-03 07:21:23,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=62744.0, ans=0.0 +2024-08-03 07:21:32,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=62780.666666666664, ans=0.1 +2024-08-03 07:21:32,890 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.34 vs. limit=15.0 +2024-08-03 07:21:48,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=62817.333333333336, ans=0.0 +2024-08-03 07:21:49,255 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.074e+02 1.290e+02 1.480e+02 1.885e+02 4.380e+02, threshold=2.959e+02, percent-clipped=2.0 +2024-08-03 07:22:09,884 INFO [train.py:1114] (0/4) Epoch 5, batch 2600, loss[loss=0.2454, simple_loss=0.3151, pruned_loss=0.08788, over 13569.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.33, pruned_loss=0.09482, over 2638145.36 frames. ], batch size: 36, lr: 2.31e-02, grad_scale: 32.0 +2024-08-03 07:24:01,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=63037.333333333336, ans=0.09899494936611666 +2024-08-03 07:24:05,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63037.333333333336, ans=0.1 +2024-08-03 07:24:17,369 INFO [train.py:1114] (0/4) Epoch 5, batch 2650, loss[loss=0.2794, simple_loss=0.3464, pruned_loss=0.1062, over 13285.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.3308, pruned_loss=0.09518, over 2641148.26 frames. ], batch size: 46, lr: 2.31e-02, grad_scale: 32.0 +2024-08-03 07:24:18,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=63110.666666666664, ans=0.2 +2024-08-03 07:24:18,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=63110.666666666664, ans=0.025 +2024-08-03 07:24:18,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=63110.666666666664, ans=0.125 +2024-08-03 07:24:20,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=63110.666666666664, ans=0.125 +2024-08-03 07:24:27,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63147.333333333336, ans=0.1 +2024-08-03 07:24:43,700 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.082e+02 1.380e+02 1.559e+02 1.928e+02 2.967e+02, threshold=3.118e+02, percent-clipped=1.0 +2024-08-03 07:25:05,927 INFO [train.py:1114] (0/4) Epoch 5, batch 2700, loss[loss=0.286, simple_loss=0.353, pruned_loss=0.1094, over 13554.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3312, pruned_loss=0.09542, over 2638057.74 frames. ], batch size: 40, lr: 2.31e-02, grad_scale: 32.0 +2024-08-03 07:25:20,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=63330.666666666664, ans=0.1 +2024-08-03 07:25:24,245 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.94 vs. limit=15.0 +2024-08-03 07:25:29,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=63367.333333333336, ans=0.09899494936611666 +2024-08-03 07:25:35,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=63404.0, ans=0.2 +2024-08-03 07:25:48,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63440.666666666664, ans=0.1 +2024-08-03 07:25:49,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=63477.333333333336, ans=0.125 +2024-08-03 07:25:49,673 INFO [train.py:1114] (0/4) Epoch 5, batch 2750, loss[loss=0.2649, simple_loss=0.3313, pruned_loss=0.09925, over 13330.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3304, pruned_loss=0.09529, over 2635445.03 frames. ], batch size: 34, lr: 2.30e-02, grad_scale: 32.0 +2024-08-03 07:25:51,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=63477.333333333336, ans=0.125 +2024-08-03 07:26:22,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=63550.666666666664, ans=0.125 +2024-08-03 07:26:23,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=63550.666666666664, ans=0.0 +2024-08-03 07:26:33,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=63550.666666666664, ans=0.125 +2024-08-03 07:26:35,266 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.092e+02 1.483e+02 1.693e+02 2.147e+02 4.016e+02, threshold=3.386e+02, percent-clipped=6.0 +2024-08-03 07:26:51,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=63624.0, ans=0.0 +2024-08-03 07:26:53,878 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.19 vs. limit=22.5 +2024-08-03 07:26:56,093 INFO [train.py:1114] (0/4) Epoch 5, batch 2800, loss[loss=0.3444, simple_loss=0.3955, pruned_loss=0.1466, over 9213.00 frames. ], tot_loss[loss=0.2604, simple_loss=0.3302, pruned_loss=0.09534, over 2626708.68 frames. ], batch size: 96, lr: 2.30e-02, grad_scale: 32.0 +2024-08-03 07:27:03,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=63660.666666666664, ans=0.0 +2024-08-03 07:27:11,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=63697.333333333336, ans=0.125 +2024-08-03 07:27:22,224 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.44 vs. limit=15.0 +2024-08-03 07:27:28,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=63770.666666666664, ans=0.125 +2024-08-03 07:27:36,320 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.21 vs. limit=10.0 +2024-08-03 07:27:40,327 INFO [train.py:1114] (0/4) Epoch 5, batch 2850, loss[loss=0.2325, simple_loss=0.3098, pruned_loss=0.07757, over 13367.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.3307, pruned_loss=0.09578, over 2620050.37 frames. ], batch size: 35, lr: 2.30e-02, grad_scale: 32.0 +2024-08-03 07:27:54,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff3.min_abs, batch_count=63880.666666666664, ans=0.2 +2024-08-03 07:27:55,293 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.57 vs. limit=12.0 +2024-08-03 07:27:57,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=63880.666666666664, ans=0.125 +2024-08-03 07:28:37,173 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.375e+02 1.611e+02 2.020e+02 3.770e+02, threshold=3.222e+02, percent-clipped=1.0 +2024-08-03 07:28:42,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=63954.0, ans=0.125 +2024-08-03 07:28:50,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=63990.666666666664, ans=0.05 +2024-08-03 07:28:53,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=63990.666666666664, ans=0.0 +2024-08-03 07:28:58,782 INFO [train.py:1114] (0/4) Epoch 5, batch 2900, loss[loss=0.2734, simple_loss=0.3399, pruned_loss=0.1034, over 13367.00 frames. ], tot_loss[loss=0.2618, simple_loss=0.3319, pruned_loss=0.09588, over 2630678.70 frames. ], batch size: 36, lr: 2.29e-02, grad_scale: 32.0 +2024-08-03 07:29:23,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=64100.666666666664, ans=0.1 +2024-08-03 07:29:38,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=64174.0, ans=0.125 +2024-08-03 07:29:45,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=64174.0, ans=0.125 +2024-08-03 07:29:46,297 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.96 vs. limit=12.0 +2024-08-03 07:29:46,629 INFO [train.py:1114] (0/4) Epoch 5, batch 2950, loss[loss=0.2442, simple_loss=0.3064, pruned_loss=0.09102, over 13345.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.3307, pruned_loss=0.09577, over 2628941.74 frames. ], batch size: 34, lr: 2.29e-02, grad_scale: 32.0 +2024-08-03 07:29:55,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=64247.333333333336, ans=0.125 +2024-08-03 07:30:13,360 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.269e+02 1.515e+02 1.903e+02 4.002e+02, threshold=3.030e+02, percent-clipped=2.0 +2024-08-03 07:30:13,629 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:30:27,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=64320.666666666664, ans=0.125 +2024-08-03 07:30:29,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=64357.333333333336, ans=0.125 +2024-08-03 07:30:38,306 INFO [train.py:1114] (0/4) Epoch 5, batch 3000, loss[loss=0.2832, simple_loss=0.3482, pruned_loss=0.1092, over 13540.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3304, pruned_loss=0.0955, over 2628904.25 frames. ], batch size: 37, lr: 2.29e-02, grad_scale: 16.0 +2024-08-03 07:30:38,307 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 07:31:39,948 INFO [train.py:1146] (0/4) Epoch 5, validation: loss=0.2105, simple_loss=0.3083, pruned_loss=0.0563, over 944034.00 frames. +2024-08-03 07:31:39,949 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 07:32:02,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=64467.333333333336, ans=0.2 +2024-08-03 07:32:05,448 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:32:08,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=64504.0, ans=0.0 +2024-08-03 07:32:14,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=64540.666666666664, ans=0.125 +2024-08-03 07:32:30,610 INFO [train.py:1114] (0/4) Epoch 5, batch 3050, loss[loss=0.2164, simple_loss=0.2889, pruned_loss=0.07199, over 13540.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3315, pruned_loss=0.096, over 2625124.84 frames. ], batch size: 35, lr: 2.29e-02, grad_scale: 16.0 +2024-08-03 07:32:32,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=64577.333333333336, ans=0.0 +2024-08-03 07:32:43,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=64614.0, ans=0.125 +2024-08-03 07:32:47,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=64614.0, ans=0.0 +2024-08-03 07:32:58,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64650.666666666664, ans=0.1 +2024-08-03 07:32:59,073 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.073e+02 1.354e+02 1.544e+02 1.924e+02 3.300e+02, threshold=3.088e+02, percent-clipped=4.0 +2024-08-03 07:33:00,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=64650.666666666664, ans=0.125 +2024-08-03 07:33:08,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=64687.333333333336, ans=0.025 +2024-08-03 07:33:10,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=64724.0, ans=0.125 +2024-08-03 07:33:13,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=64724.0, ans=0.2 +2024-08-03 07:33:16,092 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.69 vs. limit=10.0 +2024-08-03 07:33:18,948 INFO [train.py:1114] (0/4) Epoch 5, batch 3100, loss[loss=0.3259, simple_loss=0.3863, pruned_loss=0.1327, over 13336.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3311, pruned_loss=0.09593, over 2625216.14 frames. ], batch size: 46, lr: 2.28e-02, grad_scale: 16.0 +2024-08-03 07:33:23,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=64760.666666666664, ans=0.125 +2024-08-03 07:33:26,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=64797.333333333336, ans=0.125 +2024-08-03 07:33:50,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=64870.666666666664, ans=0.125 +2024-08-03 07:34:01,576 INFO [train.py:1114] (0/4) Epoch 5, batch 3150, loss[loss=0.3009, simple_loss=0.3637, pruned_loss=0.1191, over 12959.00 frames. ], tot_loss[loss=0.2618, simple_loss=0.3311, pruned_loss=0.09624, over 2627284.32 frames. ], batch size: 48, lr: 2.28e-02, grad_scale: 16.0 +2024-08-03 07:34:14,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=64980.666666666664, ans=0.125 +2024-08-03 07:34:20,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=65017.333333333336, ans=0.0 +2024-08-03 07:34:25,112 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.020e+02 1.369e+02 1.582e+02 1.897e+02 3.787e+02, threshold=3.164e+02, percent-clipped=5.0 +2024-08-03 07:34:52,020 INFO [train.py:1114] (0/4) Epoch 5, batch 3200, loss[loss=0.2673, simple_loss=0.3424, pruned_loss=0.09609, over 13542.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3297, pruned_loss=0.09516, over 2633208.20 frames. ], batch size: 37, lr: 2.28e-02, grad_scale: 32.0 +2024-08-03 07:35:01,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=65164.0, ans=0.125 +2024-08-03 07:35:06,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=65164.0, ans=0.1 +2024-08-03 07:35:06,709 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.55 vs. limit=10.0 +2024-08-03 07:35:13,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=65200.666666666664, ans=0.1 +2024-08-03 07:35:35,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=65310.666666666664, ans=0.125 +2024-08-03 07:35:35,992 INFO [train.py:1114] (0/4) Epoch 5, batch 3250, loss[loss=0.2506, simple_loss=0.323, pruned_loss=0.08915, over 13391.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3297, pruned_loss=0.09486, over 2637974.74 frames. ], batch size: 38, lr: 2.27e-02, grad_scale: 32.0 +2024-08-03 07:35:42,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=65310.666666666664, ans=0.1 +2024-08-03 07:35:59,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=65384.0, ans=0.125 +2024-08-03 07:36:00,551 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.084e+02 1.291e+02 1.478e+02 1.851e+02 2.616e+02, threshold=2.956e+02, percent-clipped=0.0 +2024-08-03 07:36:15,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=65457.333333333336, ans=0.0 +2024-08-03 07:36:20,408 INFO [train.py:1114] (0/4) Epoch 5, batch 3300, loss[loss=0.2631, simple_loss=0.3376, pruned_loss=0.09435, over 12886.00 frames. ], tot_loss[loss=0.259, simple_loss=0.329, pruned_loss=0.09445, over 2639559.15 frames. ], batch size: 52, lr: 2.27e-02, grad_scale: 32.0 +2024-08-03 07:36:25,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=65494.0, ans=0.1 +2024-08-03 07:36:26,601 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.56 vs. limit=15.0 +2024-08-03 07:36:58,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=65640.66666666667, ans=0.0 +2024-08-03 07:37:00,919 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.92 vs. limit=10.0 +2024-08-03 07:37:03,886 INFO [train.py:1114] (0/4) Epoch 5, batch 3350, loss[loss=0.2944, simple_loss=0.3564, pruned_loss=0.1161, over 13067.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.33, pruned_loss=0.09472, over 2629429.21 frames. ], batch size: 48, lr: 2.27e-02, grad_scale: 32.0 +2024-08-03 07:37:09,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=65677.33333333333, ans=0.2 +2024-08-03 07:37:10,195 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.33 vs. limit=12.0 +2024-08-03 07:37:12,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=65714.0, ans=0.0 +2024-08-03 07:37:22,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=65750.66666666667, ans=0.125 +2024-08-03 07:37:26,818 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.375e+02 1.557e+02 1.947e+02 3.831e+02, threshold=3.114e+02, percent-clipped=2.0 +2024-08-03 07:37:33,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=65787.33333333333, ans=0.1 +2024-08-03 07:37:46,236 INFO [train.py:1114] (0/4) Epoch 5, batch 3400, loss[loss=0.2522, simple_loss=0.3096, pruned_loss=0.0974, over 13542.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3304, pruned_loss=0.09574, over 2624779.16 frames. ], batch size: 31, lr: 2.27e-02, grad_scale: 32.0 +2024-08-03 07:37:58,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=65897.33333333333, ans=0.05 +2024-08-03 07:38:00,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=65897.33333333333, ans=0.025 +2024-08-03 07:38:13,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=65970.66666666667, ans=0.0 +2024-08-03 07:38:17,119 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.19 vs. limit=15.0 +2024-08-03 07:38:21,507 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.05 vs. limit=15.0 +2024-08-03 07:38:28,621 INFO [train.py:1114] (0/4) Epoch 5, batch 3450, loss[loss=0.2589, simple_loss=0.3383, pruned_loss=0.08976, over 12935.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.3293, pruned_loss=0.09465, over 2629370.99 frames. ], batch size: 52, lr: 2.26e-02, grad_scale: 32.0 +2024-08-03 07:38:29,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=66044.0, ans=0.2 +2024-08-03 07:38:48,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=66117.33333333333, ans=0.0 +2024-08-03 07:38:49,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=66117.33333333333, ans=0.125 +2024-08-03 07:38:51,391 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.130e+02 1.367e+02 1.647e+02 2.162e+02 3.510e+02, threshold=3.294e+02, percent-clipped=1.0 +2024-08-03 07:38:53,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=66154.0, ans=0.125 +2024-08-03 07:38:53,754 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.03 vs. limit=22.5 +2024-08-03 07:39:02,828 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-08-03 07:39:05,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=66190.66666666667, ans=0.025 +2024-08-03 07:39:08,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=66190.66666666667, ans=0.0 +2024-08-03 07:39:10,894 INFO [train.py:1114] (0/4) Epoch 5, batch 3500, loss[loss=0.2125, simple_loss=0.2951, pruned_loss=0.06497, over 13550.00 frames. ], tot_loss[loss=0.2581, simple_loss=0.3282, pruned_loss=0.09402, over 2630386.36 frames. ], batch size: 34, lr: 2.26e-02, grad_scale: 32.0 +2024-08-03 07:39:12,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=66227.33333333333, ans=0.0 +2024-08-03 07:39:25,569 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.08 vs. limit=15.0 +2024-08-03 07:39:28,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=66264.0, ans=0.0 +2024-08-03 07:39:46,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=66337.33333333333, ans=0.125 +2024-08-03 07:39:55,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=66337.33333333333, ans=0.0 +2024-08-03 07:39:58,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=66374.0, ans=0.04949747468305833 +2024-08-03 07:40:02,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66374.0, ans=0.1 +2024-08-03 07:40:05,778 INFO [train.py:1114] (0/4) Epoch 5, batch 3550, loss[loss=0.2633, simple_loss=0.3343, pruned_loss=0.09616, over 12677.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.33, pruned_loss=0.09524, over 2628699.45 frames. ], batch size: 59, lr: 2.26e-02, grad_scale: 16.0 +2024-08-03 07:40:32,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=66447.33333333333, ans=0.125 +2024-08-03 07:40:38,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=66484.0, ans=0.125 +2024-08-03 07:40:41,449 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.465e+02 1.682e+02 2.236e+02 4.572e+02, threshold=3.363e+02, percent-clipped=5.0 +2024-08-03 07:40:48,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=66520.66666666667, ans=0.125 +2024-08-03 07:45:33,000 INFO [train.py:1114] (0/4) Epoch 5, batch 3600, loss[loss=0.3272, simple_loss=0.373, pruned_loss=0.1407, over 9229.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3362, pruned_loss=0.1015, over 2489013.54 frames. ], batch size: 97, lr: 2.26e-02, grad_scale: 32.0 +2024-08-03 07:46:07,667 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-5.pt +2024-08-03 07:47:41,843 INFO [train.py:1114] (0/4) Epoch 6, batch 0, loss[loss=0.2244, simple_loss=0.3, pruned_loss=0.07436, over 13360.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3, pruned_loss=0.07436, over 13360.00 frames. ], batch size: 33, lr: 2.10e-02, grad_scale: 32.0 +2024-08-03 07:47:41,844 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 07:47:51,611 INFO [train.py:1146] (0/4) Epoch 6, validation: loss=0.2159, simple_loss=0.3144, pruned_loss=0.05871, over 944034.00 frames. +2024-08-03 07:47:51,612 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 07:48:04,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=66781.0, ans=0.125 +2024-08-03 07:48:06,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=66781.0, ans=0.0 +2024-08-03 07:48:16,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=66817.66666666667, ans=0.2 +2024-08-03 07:48:28,345 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.298e+02 1.438e+02 1.681e+02 2.917e+02, threshold=2.876e+02, percent-clipped=0.0 +2024-08-03 07:48:38,885 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.87 vs. limit=15.0 +2024-08-03 07:48:39,374 INFO [train.py:1114] (0/4) Epoch 6, batch 50, loss[loss=0.2033, simple_loss=0.2806, pruned_loss=0.06302, over 13435.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3311, pruned_loss=0.09547, over 578535.21 frames. ], batch size: 32, lr: 2.10e-02, grad_scale: 32.0 +2024-08-03 07:48:56,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66964.33333333333, ans=0.1 +2024-08-03 07:49:04,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=67001.0, ans=0.125 +2024-08-03 07:49:08,447 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:49:12,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=67001.0, ans=0.2 +2024-08-03 07:49:29,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=67074.33333333333, ans=0.2 +2024-08-03 07:49:31,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=67074.33333333333, ans=0.07 +2024-08-03 07:49:42,409 INFO [train.py:1114] (0/4) Epoch 6, batch 100, loss[loss=0.2596, simple_loss=0.3221, pruned_loss=0.09854, over 13522.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.3341, pruned_loss=0.09716, over 1025246.27 frames. ], batch size: 35, lr: 2.10e-02, grad_scale: 32.0 +2024-08-03 07:49:48,082 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.79 vs. limit=15.0 +2024-08-03 07:50:19,440 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.79 vs. limit=15.0 +2024-08-03 07:50:22,475 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.327e+02 1.627e+02 2.047e+02 3.063e+02, threshold=3.255e+02, percent-clipped=2.0 +2024-08-03 07:50:38,352 INFO [train.py:1114] (0/4) Epoch 6, batch 150, loss[loss=0.2074, simple_loss=0.2743, pruned_loss=0.07025, over 13430.00 frames. ], tot_loss[loss=0.2578, simple_loss=0.3287, pruned_loss=0.09344, over 1386681.99 frames. ], batch size: 32, lr: 2.10e-02, grad_scale: 16.0 +2024-08-03 07:50:56,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=67294.33333333333, ans=0.025 +2024-08-03 07:51:18,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=67367.66666666667, ans=0.0 +2024-08-03 07:51:28,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=67404.33333333333, ans=0.125 +2024-08-03 07:51:37,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=67441.0, ans=0.2 +2024-08-03 07:51:38,035 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:51:38,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=67441.0, ans=0.0 +2024-08-03 07:51:43,440 INFO [train.py:1114] (0/4) Epoch 6, batch 200, loss[loss=0.2667, simple_loss=0.3359, pruned_loss=0.09878, over 12462.00 frames. ], tot_loss[loss=0.2549, simple_loss=0.3261, pruned_loss=0.09182, over 1665934.93 frames. ], batch size: 58, lr: 2.09e-02, grad_scale: 16.0 +2024-08-03 07:51:46,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=67477.66666666667, ans=0.125 +2024-08-03 07:51:48,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=67477.66666666667, ans=0.125 +2024-08-03 07:51:49,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=67477.66666666667, ans=0.0 +2024-08-03 07:51:57,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=67514.33333333333, ans=0.0 +2024-08-03 07:52:06,136 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.71 vs. limit=5.0 +2024-08-03 07:52:21,680 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.11 vs. limit=6.0 +2024-08-03 07:52:22,921 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.261e+02 1.408e+02 1.836e+02 2.572e+02, threshold=2.817e+02, percent-clipped=0.0 +2024-08-03 07:52:32,921 INFO [train.py:1114] (0/4) Epoch 6, batch 250, loss[loss=0.2986, simple_loss=0.3681, pruned_loss=0.1146, over 13297.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.326, pruned_loss=0.09179, over 1884475.05 frames. ], batch size: 46, lr: 2.09e-02, grad_scale: 16.0 +2024-08-03 07:52:38,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=67661.0, ans=0.125 +2024-08-03 07:52:42,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=67697.66666666667, ans=0.0 +2024-08-03 07:52:55,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67697.66666666667, ans=0.1 +2024-08-03 07:53:04,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=67734.33333333333, ans=0.05 +2024-08-03 07:53:06,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=67734.33333333333, ans=0.2 +2024-08-03 07:53:20,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67807.66666666667, ans=0.1 +2024-08-03 07:53:28,626 INFO [train.py:1114] (0/4) Epoch 6, batch 300, loss[loss=0.2487, simple_loss=0.3287, pruned_loss=0.08437, over 13451.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3261, pruned_loss=0.09173, over 2051060.27 frames. ], batch size: 42, lr: 2.09e-02, grad_scale: 16.0 +2024-08-03 07:53:31,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=67844.33333333333, ans=0.025 +2024-08-03 07:53:45,614 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.66 vs. limit=15.0 +2024-08-03 07:53:47,508 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.36 vs. limit=22.5 +2024-08-03 07:54:04,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=67954.33333333333, ans=0.0 +2024-08-03 07:54:05,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=67954.33333333333, ans=0.125 +2024-08-03 07:54:08,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=67954.33333333333, ans=0.125 +2024-08-03 07:54:08,717 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.896e+01 1.256e+02 1.436e+02 1.831e+02 3.083e+02, threshold=2.872e+02, percent-clipped=2.0 +2024-08-03 07:54:09,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=67991.0, ans=0.0 +2024-08-03 07:54:19,021 INFO [train.py:1114] (0/4) Epoch 6, batch 350, loss[loss=0.2227, simple_loss=0.2977, pruned_loss=0.07385, over 13584.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.3262, pruned_loss=0.0915, over 2181932.12 frames. ], batch size: 33, lr: 2.09e-02, grad_scale: 16.0 +2024-08-03 07:54:34,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=68064.33333333333, ans=0.125 +2024-08-03 07:54:35,193 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.40 vs. limit=22.5 +2024-08-03 07:54:58,597 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.20 vs. limit=10.0 +2024-08-03 07:55:06,641 INFO [train.py:1114] (0/4) Epoch 6, batch 400, loss[loss=0.24, simple_loss=0.3112, pruned_loss=0.0844, over 13353.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3257, pruned_loss=0.09109, over 2285895.12 frames. ], batch size: 37, lr: 2.08e-02, grad_scale: 32.0 +2024-08-03 07:55:09,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=68211.0, ans=0.125 +2024-08-03 07:55:24,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=68284.33333333333, ans=0.025 +2024-08-03 07:55:36,444 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.55 vs. limit=6.0 +2024-08-03 07:55:42,230 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.052e+02 1.404e+02 1.670e+02 2.079e+02 3.576e+02, threshold=3.340e+02, percent-clipped=3.0 +2024-08-03 07:55:52,430 INFO [train.py:1114] (0/4) Epoch 6, batch 450, loss[loss=0.2709, simple_loss=0.3351, pruned_loss=0.1033, over 13548.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3256, pruned_loss=0.09151, over 2358129.13 frames. ], batch size: 38, lr: 2.08e-02, grad_scale: 32.0 +2024-08-03 07:56:13,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=68467.66666666667, ans=0.125 +2024-08-03 07:56:27,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=68504.33333333333, ans=0.2 +2024-08-03 07:56:41,092 INFO [train.py:1114] (0/4) Epoch 6, batch 500, loss[loss=0.3119, simple_loss=0.3721, pruned_loss=0.1259, over 13415.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.3242, pruned_loss=0.09051, over 2423709.92 frames. ], batch size: 43, lr: 2.08e-02, grad_scale: 32.0 +2024-08-03 07:56:52,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68614.33333333333, ans=0.1 +2024-08-03 07:56:53,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=68614.33333333333, ans=0.125 +2024-08-03 07:56:58,713 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.52 vs. limit=15.0 +2024-08-03 07:57:02,406 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.78 vs. limit=15.0 +2024-08-03 07:57:04,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=68651.0, ans=0.125 +2024-08-03 07:57:15,951 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.283e+02 1.463e+02 1.945e+02 3.864e+02, threshold=2.927e+02, percent-clipped=1.0 +2024-08-03 07:57:25,749 INFO [train.py:1114] (0/4) Epoch 6, batch 550, loss[loss=0.3041, simple_loss=0.3592, pruned_loss=0.1245, over 13069.00 frames. ], tot_loss[loss=0.2528, simple_loss=0.3241, pruned_loss=0.09076, over 2466166.10 frames. ], batch size: 48, lr: 2.08e-02, grad_scale: 32.0 +2024-08-03 07:57:43,237 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.70 vs. limit=22.5 +2024-08-03 07:57:50,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=68834.33333333333, ans=0.125 +2024-08-03 07:58:03,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=68871.0, ans=0.2 +2024-08-03 07:58:06,331 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.70 vs. limit=15.0 +2024-08-03 07:58:15,093 INFO [train.py:1114] (0/4) Epoch 6, batch 600, loss[loss=0.2431, simple_loss=0.3199, pruned_loss=0.08316, over 13298.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.3239, pruned_loss=0.09041, over 2507217.51 frames. ], batch size: 46, lr: 2.07e-02, grad_scale: 32.0 +2024-08-03 07:58:20,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=68944.33333333333, ans=0.125 +2024-08-03 07:58:29,061 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.71 vs. limit=15.0 +2024-08-03 07:58:35,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=69017.66666666667, ans=0.025 +2024-08-03 07:58:37,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=69017.66666666667, ans=0.0 +2024-08-03 07:58:47,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=69054.33333333333, ans=10.0 +2024-08-03 07:58:52,944 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.444e+02 1.751e+02 2.367e+02 5.361e+02, threshold=3.502e+02, percent-clipped=14.0 +2024-08-03 07:58:54,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=69091.0, ans=0.0 +2024-08-03 07:59:03,076 INFO [train.py:1114] (0/4) Epoch 6, batch 650, loss[loss=0.2268, simple_loss=0.311, pruned_loss=0.07129, over 13537.00 frames. ], tot_loss[loss=0.2512, simple_loss=0.3229, pruned_loss=0.08976, over 2542695.66 frames. ], batch size: 37, lr: 2.07e-02, grad_scale: 32.0 +2024-08-03 07:59:04,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=69127.66666666667, ans=0.125 +2024-08-03 07:59:16,177 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.65 vs. limit=15.0 +2024-08-03 07:59:16,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=69164.33333333333, ans=0.09899494936611666 +2024-08-03 07:59:28,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=69201.0, ans=0.95 +2024-08-03 07:59:54,486 INFO [train.py:1114] (0/4) Epoch 6, batch 700, loss[loss=0.2466, simple_loss=0.3115, pruned_loss=0.09087, over 13515.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.323, pruned_loss=0.08961, over 2564355.60 frames. ], batch size: 35, lr: 2.07e-02, grad_scale: 32.0 +2024-08-03 07:59:56,542 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:00:06,496 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.37 vs. limit=10.0 +2024-08-03 08:00:14,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=69384.33333333333, ans=0.125 +2024-08-03 08:00:21,777 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.63 vs. limit=12.0 +2024-08-03 08:00:28,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=69421.0, ans=0.125 +2024-08-03 08:00:29,460 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.13 vs. limit=15.0 +2024-08-03 08:00:30,547 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.041e+02 1.317e+02 1.524e+02 2.110e+02 4.129e+02, threshold=3.048e+02, percent-clipped=1.0 +2024-08-03 08:00:33,834 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.24 vs. limit=15.0 +2024-08-03 08:00:39,438 INFO [train.py:1114] (0/4) Epoch 6, batch 750, loss[loss=0.2531, simple_loss=0.3336, pruned_loss=0.08633, over 13365.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3226, pruned_loss=0.08931, over 2581392.05 frames. ], batch size: 37, lr: 2.07e-02, grad_scale: 16.0 +2024-08-03 08:00:44,279 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.15 vs. limit=12.0 +2024-08-03 08:00:46,271 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.32 vs. limit=15.0 +2024-08-03 08:00:47,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=69531.0, ans=0.0 +2024-08-03 08:00:48,178 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.95 vs. limit=15.0 +2024-08-03 08:01:06,329 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.20 vs. limit=15.0 +2024-08-03 08:01:12,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=69604.33333333333, ans=0.2 +2024-08-03 08:01:18,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=69641.0, ans=0.125 +2024-08-03 08:01:25,633 INFO [train.py:1114] (0/4) Epoch 6, batch 800, loss[loss=0.198, simple_loss=0.279, pruned_loss=0.05847, over 13358.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3221, pruned_loss=0.08899, over 2596666.43 frames. ], batch size: 33, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:01:29,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=69677.66666666667, ans=0.2 +2024-08-03 08:01:42,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=69714.33333333333, ans=0.125 +2024-08-03 08:01:55,338 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.37 vs. limit=22.5 +2024-08-03 08:02:03,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=69787.66666666667, ans=0.0 +2024-08-03 08:02:04,563 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.016e+02 1.299e+02 1.541e+02 1.861e+02 5.767e+02, threshold=3.082e+02, percent-clipped=2.0 +2024-08-03 08:02:13,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=69824.33333333333, ans=0.0 +2024-08-03 08:02:15,060 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.35 vs. limit=15.0 +2024-08-03 08:02:15,435 INFO [train.py:1114] (0/4) Epoch 6, batch 850, loss[loss=0.2338, simple_loss=0.321, pruned_loss=0.07327, over 13341.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3214, pruned_loss=0.08891, over 2608747.29 frames. ], batch size: 40, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:02:33,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=69897.66666666667, ans=0.0 +2024-08-03 08:02:35,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=69934.33333333333, ans=0.0 +2024-08-03 08:02:37,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=69934.33333333333, ans=0.07 +2024-08-03 08:02:42,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=69934.33333333333, ans=0.05 +2024-08-03 08:02:54,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=69971.0, ans=0.125 +2024-08-03 08:03:05,420 INFO [train.py:1114] (0/4) Epoch 6, batch 900, loss[loss=0.2087, simple_loss=0.291, pruned_loss=0.06324, over 13341.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3216, pruned_loss=0.08897, over 2611535.02 frames. ], batch size: 33, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:03:07,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=70044.33333333333, ans=0.125 +2024-08-03 08:03:11,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=70044.33333333333, ans=0.0 +2024-08-03 08:03:28,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=70117.66666666667, ans=0.0 +2024-08-03 08:03:47,229 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.449e+02 1.731e+02 2.120e+02 4.168e+02, threshold=3.462e+02, percent-clipped=3.0 +2024-08-03 08:03:53,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=70191.0, ans=0.125 +2024-08-03 08:03:54,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=70191.0, ans=0.125 +2024-08-03 08:03:56,490 INFO [train.py:1114] (0/4) Epoch 6, batch 950, loss[loss=0.2363, simple_loss=0.3119, pruned_loss=0.08035, over 13521.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3222, pruned_loss=0.08937, over 2612444.11 frames. ], batch size: 34, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:03:57,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=70227.66666666667, ans=0.025 +2024-08-03 08:04:05,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70264.33333333333, ans=0.1 +2024-08-03 08:04:05,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=70264.33333333333, ans=0.0 +2024-08-03 08:04:06,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70264.33333333333, ans=0.1 +2024-08-03 08:04:09,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=70264.33333333333, ans=0.2 +2024-08-03 08:04:11,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=70264.33333333333, ans=0.025 +2024-08-03 08:04:16,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=70301.0, ans=0.125 +2024-08-03 08:04:27,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=70337.66666666667, ans=0.125 +2024-08-03 08:04:35,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=70374.33333333333, ans=0.1 +2024-08-03 08:04:43,867 INFO [train.py:1114] (0/4) Epoch 6, batch 1000, loss[loss=0.2316, simple_loss=0.3044, pruned_loss=0.07943, over 13365.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.323, pruned_loss=0.0898, over 2610068.97 frames. ], batch size: 35, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:04:45,843 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.08 vs. limit=15.0 +2024-08-03 08:04:46,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=70411.0, ans=0.125 +2024-08-03 08:04:55,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=70447.66666666667, ans=0.125 +2024-08-03 08:05:06,737 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.30 vs. limit=22.5 +2024-08-03 08:05:20,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=70521.0, ans=0.025 +2024-08-03 08:05:21,660 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.69 vs. limit=15.0 +2024-08-03 08:05:22,975 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.279e+01 1.312e+02 1.654e+02 1.929e+02 3.115e+02, threshold=3.308e+02, percent-clipped=0.0 +2024-08-03 08:05:27,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=70557.66666666667, ans=0.1 +2024-08-03 08:05:34,393 INFO [train.py:1114] (0/4) Epoch 6, batch 1050, loss[loss=0.229, simple_loss=0.3102, pruned_loss=0.07389, over 13579.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3221, pruned_loss=0.08958, over 2614131.12 frames. ], batch size: 39, lr: 2.05e-02, grad_scale: 32.0 +2024-08-03 08:05:40,362 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.83 vs. limit=22.5 +2024-08-03 08:06:01,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=70667.66666666667, ans=0.125 +2024-08-03 08:06:02,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=70704.33333333333, ans=0.2 +2024-08-03 08:06:20,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=70777.66666666667, ans=0.125 +2024-08-03 08:06:21,145 INFO [train.py:1114] (0/4) Epoch 6, batch 1100, loss[loss=0.2635, simple_loss=0.3269, pruned_loss=0.1001, over 13563.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3216, pruned_loss=0.08908, over 2618219.03 frames. ], batch size: 36, lr: 2.05e-02, grad_scale: 32.0 +2024-08-03 08:06:28,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=70777.66666666667, ans=0.015 +2024-08-03 08:06:30,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=70814.33333333333, ans=0.125 +2024-08-03 08:06:41,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=70851.0, ans=0.025 +2024-08-03 08:06:48,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=70887.66666666667, ans=0.125 +2024-08-03 08:06:57,396 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.320e+02 1.582e+02 2.063e+02 3.090e+02, threshold=3.163e+02, percent-clipped=0.0 +2024-08-03 08:07:08,201 INFO [train.py:1114] (0/4) Epoch 6, batch 1150, loss[loss=0.2456, simple_loss=0.3201, pruned_loss=0.08553, over 13569.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3223, pruned_loss=0.08981, over 2617146.54 frames. ], batch size: 36, lr: 2.05e-02, grad_scale: 32.0 +2024-08-03 08:07:16,216 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.63 vs. limit=6.0 +2024-08-03 08:07:26,008 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.82 vs. limit=15.0 +2024-08-03 08:07:53,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=71107.66666666667, ans=0.125 +2024-08-03 08:07:55,754 INFO [train.py:1114] (0/4) Epoch 6, batch 1200, loss[loss=0.2532, simple_loss=0.3345, pruned_loss=0.08599, over 13582.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3235, pruned_loss=0.09039, over 2615041.79 frames. ], batch size: 39, lr: 2.05e-02, grad_scale: 32.0 +2024-08-03 08:07:55,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=71144.33333333333, ans=0.125 +2024-08-03 08:08:04,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=71181.0, ans=0.2 +2024-08-03 08:08:28,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71254.33333333333, ans=0.1 +2024-08-03 08:08:31,455 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.251e+02 1.398e+02 1.677e+02 2.839e+02, threshold=2.796e+02, percent-clipped=0.0 +2024-08-03 08:08:40,470 INFO [train.py:1114] (0/4) Epoch 6, batch 1250, loss[loss=0.2632, simple_loss=0.3341, pruned_loss=0.09612, over 13427.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3225, pruned_loss=0.08892, over 2627381.93 frames. ], batch size: 42, lr: 2.04e-02, grad_scale: 32.0 +2024-08-03 08:09:02,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=71401.0, ans=0.09899494936611666 +2024-08-03 08:09:11,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=71437.66666666667, ans=0.0 +2024-08-03 08:09:13,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=71437.66666666667, ans=0.125 +2024-08-03 08:09:13,375 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.83 vs. limit=15.0 +2024-08-03 08:09:15,888 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.29 vs. limit=15.0 +2024-08-03 08:09:20,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.whiten.whitening_limit, batch_count=71474.33333333333, ans=12.0 +2024-08-03 08:09:27,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=71474.33333333333, ans=0.125 +2024-08-03 08:09:28,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=71511.0, ans=0.025 +2024-08-03 08:09:29,156 INFO [train.py:1114] (0/4) Epoch 6, batch 1300, loss[loss=0.2549, simple_loss=0.3281, pruned_loss=0.09082, over 12800.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3221, pruned_loss=0.0887, over 2630670.12 frames. ], batch size: 52, lr: 2.04e-02, grad_scale: 32.0 +2024-08-03 08:09:52,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=71584.33333333333, ans=0.0 +2024-08-03 08:09:55,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=71584.33333333333, ans=0.125 +2024-08-03 08:10:13,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=71621.0, ans=0.125 +2024-08-03 08:10:21,999 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.007e+02 1.330e+02 1.619e+02 2.134e+02 3.747e+02, threshold=3.238e+02, percent-clipped=6.0 +2024-08-03 08:10:31,268 INFO [train.py:1114] (0/4) Epoch 6, batch 1350, loss[loss=0.2643, simple_loss=0.3343, pruned_loss=0.09718, over 13545.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3223, pruned_loss=0.08905, over 2637199.08 frames. ], batch size: 37, lr: 2.04e-02, grad_scale: 32.0 +2024-08-03 08:10:54,807 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.52 vs. limit=10.0 +2024-08-03 08:11:09,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=71731.0, ans=0.125 +2024-08-03 08:11:47,806 INFO [train.py:1114] (0/4) Epoch 6, batch 1400, loss[loss=0.2544, simple_loss=0.3108, pruned_loss=0.09898, over 13258.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3208, pruned_loss=0.08818, over 2641328.55 frames. ], batch size: 31, lr: 2.04e-02, grad_scale: 32.0 +2024-08-03 08:11:55,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=71877.66666666667, ans=0.2 +2024-08-03 08:12:05,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=71951.0, ans=0.125 +2024-08-03 08:12:24,348 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.721e+01 1.334e+02 1.562e+02 1.833e+02 2.897e+02, threshold=3.124e+02, percent-clipped=0.0 +2024-08-03 08:12:29,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=72024.33333333333, ans=0.125 +2024-08-03 08:12:37,250 INFO [train.py:1114] (0/4) Epoch 6, batch 1450, loss[loss=0.2882, simple_loss=0.3609, pruned_loss=0.1077, over 13429.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3221, pruned_loss=0.089, over 2639753.13 frames. ], batch size: 43, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:12:57,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=72134.33333333333, ans=0.125 +2024-08-03 08:12:58,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=72134.33333333333, ans=0.125 +2024-08-03 08:13:03,661 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.83 vs. limit=15.0 +2024-08-03 08:13:18,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=72207.66666666667, ans=0.125 +2024-08-03 08:13:24,219 INFO [train.py:1114] (0/4) Epoch 6, batch 1500, loss[loss=0.2685, simple_loss=0.3437, pruned_loss=0.0967, over 13393.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.323, pruned_loss=0.08917, over 2640565.34 frames. ], batch size: 39, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:13:51,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=72317.66666666667, ans=0.125 +2024-08-03 08:14:02,719 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.371e+02 1.678e+02 2.032e+02 3.850e+02, threshold=3.356e+02, percent-clipped=2.0 +2024-08-03 08:14:11,850 INFO [train.py:1114] (0/4) Epoch 6, batch 1550, loss[loss=0.2633, simple_loss=0.3465, pruned_loss=0.09004, over 13383.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3232, pruned_loss=0.08945, over 2630093.12 frames. ], batch size: 41, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:14:20,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=72464.33333333333, ans=0.125 +2024-08-03 08:14:33,150 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.82 vs. limit=6.0 +2024-08-03 08:14:34,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=72501.0, ans=0.125 +2024-08-03 08:14:36,873 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.05 vs. limit=6.0 +2024-08-03 08:14:50,606 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.40 vs. limit=15.0 +2024-08-03 08:14:59,406 INFO [train.py:1114] (0/4) Epoch 6, batch 1600, loss[loss=0.2528, simple_loss=0.3329, pruned_loss=0.08631, over 13586.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3225, pruned_loss=0.08904, over 2623467.31 frames. ], batch size: 39, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:15:14,598 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.26 vs. limit=6.0 +2024-08-03 08:15:35,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.08 vs. limit=22.5 +2024-08-03 08:15:38,177 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.361e+02 1.766e+02 2.117e+02 3.688e+02, threshold=3.533e+02, percent-clipped=3.0 +2024-08-03 08:15:42,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=72757.66666666667, ans=0.1 +2024-08-03 08:15:47,206 INFO [train.py:1114] (0/4) Epoch 6, batch 1650, loss[loss=0.2472, simple_loss=0.3263, pruned_loss=0.08402, over 13333.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.322, pruned_loss=0.08881, over 2619684.83 frames. ], batch size: 40, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:16:11,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=72867.66666666667, ans=0.0 +2024-08-03 08:16:20,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=72904.33333333333, ans=0.125 +2024-08-03 08:16:32,584 INFO [train.py:1114] (0/4) Epoch 6, batch 1700, loss[loss=0.2339, simple_loss=0.304, pruned_loss=0.0819, over 13278.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3212, pruned_loss=0.08804, over 2628732.20 frames. ], batch size: 31, lr: 2.02e-02, grad_scale: 32.0 +2024-08-03 08:16:46,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=73014.33333333333, ans=0.125 +2024-08-03 08:17:02,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=73087.66666666667, ans=15.0 +2024-08-03 08:17:12,010 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.052e+02 1.261e+02 1.452e+02 1.741e+02 3.211e+02, threshold=2.904e+02, percent-clipped=0.0 +2024-08-03 08:17:21,252 INFO [train.py:1114] (0/4) Epoch 6, batch 1750, loss[loss=0.2191, simple_loss=0.2926, pruned_loss=0.07283, over 13548.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.321, pruned_loss=0.08801, over 2632559.90 frames. ], batch size: 31, lr: 2.02e-02, grad_scale: 32.0 +2024-08-03 08:17:33,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73197.66666666667, ans=0.1 +2024-08-03 08:17:33,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=73197.66666666667, ans=0.0 +2024-08-03 08:17:54,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=73271.0, ans=0.125 +2024-08-03 08:18:03,327 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-20000.pt +2024-08-03 08:18:08,651 INFO [train.py:1114] (0/4) Epoch 6, batch 1800, loss[loss=0.2431, simple_loss=0.3216, pruned_loss=0.08227, over 13554.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3215, pruned_loss=0.08829, over 2633217.99 frames. ], batch size: 38, lr: 2.02e-02, grad_scale: 32.0 +2024-08-03 08:18:25,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=73381.0, ans=0.0 +2024-08-03 08:18:28,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=73417.66666666667, ans=0.125 +2024-08-03 08:18:45,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=73454.33333333333, ans=0.125 +2024-08-03 08:18:49,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=73491.0, ans=0.07 +2024-08-03 08:18:50,844 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.090e+02 1.331e+02 1.549e+02 2.028e+02 3.164e+02, threshold=3.097e+02, percent-clipped=2.0 +2024-08-03 08:18:51,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=73491.0, ans=0.0 +2024-08-03 08:18:57,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=73491.0, ans=0.0 +2024-08-03 08:18:59,635 INFO [train.py:1114] (0/4) Epoch 6, batch 1850, loss[loss=0.2514, simple_loss=0.322, pruned_loss=0.09042, over 13414.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.3208, pruned_loss=0.08799, over 2636699.51 frames. ], batch size: 39, lr: 2.02e-02, grad_scale: 16.0 +2024-08-03 08:19:14,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=73564.33333333333, ans=0.07 +2024-08-03 08:19:18,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=73564.33333333333, ans=0.125 +2024-08-03 08:19:36,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=73637.66666666667, ans=0.125 +2024-08-03 08:19:46,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=73674.33333333333, ans=0.125 +2024-08-03 08:19:51,048 INFO [train.py:1114] (0/4) Epoch 6, batch 1900, loss[loss=0.2685, simple_loss=0.3489, pruned_loss=0.09407, over 13300.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3214, pruned_loss=0.08799, over 2640189.83 frames. ], batch size: 40, lr: 2.01e-02, grad_scale: 16.0 +2024-08-03 08:20:07,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=73747.66666666667, ans=0.2 +2024-08-03 08:20:16,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=73784.33333333333, ans=0.125 +2024-08-03 08:20:16,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73784.33333333333, ans=0.1 +2024-08-03 08:20:18,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=73784.33333333333, ans=0.125 +2024-08-03 08:20:21,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=73821.0, ans=0.0 +2024-08-03 08:20:29,681 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.282e+02 1.594e+02 1.886e+02 3.634e+02, threshold=3.188e+02, percent-clipped=1.0 +2024-08-03 08:21:06,059 INFO [train.py:1114] (0/4) Epoch 6, batch 1950, loss[loss=0.2407, simple_loss=0.3055, pruned_loss=0.08793, over 13554.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3224, pruned_loss=0.08847, over 2646746.57 frames. ], batch size: 36, lr: 2.01e-02, grad_scale: 16.0 +2024-08-03 08:21:20,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=73894.33333333333, ans=0.1 +2024-08-03 08:21:22,755 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.01 vs. limit=6.0 +2024-08-03 08:21:36,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=73967.66666666667, ans=0.0 +2024-08-03 08:21:36,321 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:21:52,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=74041.0, ans=0.2 +2024-08-03 08:22:00,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=74041.0, ans=0.125 +2024-08-03 08:22:01,536 INFO [train.py:1114] (0/4) Epoch 6, batch 2000, loss[loss=0.227, simple_loss=0.2867, pruned_loss=0.08363, over 13568.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3233, pruned_loss=0.0889, over 2635922.32 frames. ], batch size: 31, lr: 2.01e-02, grad_scale: 32.0 +2024-08-03 08:22:17,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=74114.33333333333, ans=0.125 +2024-08-03 08:22:40,825 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.995e+01 1.304e+02 1.521e+02 1.870e+02 3.402e+02, threshold=3.042e+02, percent-clipped=1.0 +2024-08-03 08:22:42,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=74224.33333333333, ans=0.125 +2024-08-03 08:22:51,650 INFO [train.py:1114] (0/4) Epoch 6, batch 2050, loss[loss=0.2434, simple_loss=0.3152, pruned_loss=0.08581, over 13442.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3211, pruned_loss=0.08768, over 2632589.75 frames. ], batch size: 32, lr: 2.01e-02, grad_scale: 32.0 +2024-08-03 08:23:03,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=74297.66666666667, ans=0.125 +2024-08-03 08:23:11,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=74334.33333333333, ans=0.125 +2024-08-03 08:23:12,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=74334.33333333333, ans=0.125 +2024-08-03 08:23:22,964 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:23:40,248 INFO [train.py:1114] (0/4) Epoch 6, batch 2100, loss[loss=0.2341, simple_loss=0.3089, pruned_loss=0.07962, over 13536.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3204, pruned_loss=0.08733, over 2638045.73 frames. ], batch size: 37, lr: 2.01e-02, grad_scale: 32.0 +2024-08-03 08:23:49,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=74481.0, ans=0.125 +2024-08-03 08:23:50,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=74481.0, ans=0.0 +2024-08-03 08:23:51,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74481.0, ans=0.1 +2024-08-03 08:24:02,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=74517.66666666667, ans=0.125 +2024-08-03 08:24:10,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=74554.33333333333, ans=0.125 +2024-08-03 08:24:11,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=74554.33333333333, ans=0.125 +2024-08-03 08:24:16,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=74591.0, ans=0.0 +2024-08-03 08:24:16,929 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.001e+02 1.291e+02 1.674e+02 2.132e+02 3.817e+02, threshold=3.348e+02, percent-clipped=5.0 +2024-08-03 08:24:18,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=74591.0, ans=0.125 +2024-08-03 08:24:21,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=74591.0, ans=0.125 +2024-08-03 08:24:25,033 INFO [train.py:1114] (0/4) Epoch 6, batch 2150, loss[loss=0.2136, simple_loss=0.2947, pruned_loss=0.06625, over 13569.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3194, pruned_loss=0.08665, over 2647181.09 frames. ], batch size: 36, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:24:32,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=74627.66666666667, ans=0.0 +2024-08-03 08:24:45,568 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.93 vs. limit=6.0 +2024-08-03 08:24:58,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=74737.66666666667, ans=0.1 +2024-08-03 08:25:01,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=74737.66666666667, ans=0.125 +2024-08-03 08:25:03,536 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.30 vs. limit=15.0 +2024-08-03 08:25:12,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=74774.33333333333, ans=0.125 +2024-08-03 08:25:14,067 INFO [train.py:1114] (0/4) Epoch 6, batch 2200, loss[loss=0.2582, simple_loss=0.3352, pruned_loss=0.09064, over 13413.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3193, pruned_loss=0.08653, over 2644479.72 frames. ], batch size: 39, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:25:15,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=74811.0, ans=0.125 +2024-08-03 08:25:24,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=74847.66666666667, ans=0.025 +2024-08-03 08:25:35,858 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.28 vs. limit=10.0 +2024-08-03 08:25:38,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=74884.33333333333, ans=0.2 +2024-08-03 08:25:50,671 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.053e+02 1.295e+02 1.544e+02 1.950e+02 3.525e+02, threshold=3.088e+02, percent-clipped=1.0 +2024-08-03 08:25:59,035 INFO [train.py:1114] (0/4) Epoch 6, batch 2250, loss[loss=0.2447, simple_loss=0.3302, pruned_loss=0.07963, over 13365.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3194, pruned_loss=0.0869, over 2641735.65 frames. ], batch size: 37, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:26:01,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=74994.33333333333, ans=0.125 +2024-08-03 08:26:03,120 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.78 vs. limit=15.0 +2024-08-03 08:26:22,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=75067.66666666667, ans=0.125 +2024-08-03 08:26:23,745 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.94 vs. limit=6.0 +2024-08-03 08:26:35,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=75141.0, ans=0.0 +2024-08-03 08:26:38,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=75141.0, ans=0.07 +2024-08-03 08:26:43,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=75141.0, ans=0.0 +2024-08-03 08:26:48,094 INFO [train.py:1114] (0/4) Epoch 6, batch 2300, loss[loss=0.2124, simple_loss=0.2942, pruned_loss=0.06532, over 13586.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3188, pruned_loss=0.08698, over 2637600.15 frames. ], batch size: 33, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:26:48,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=75177.66666666667, ans=0.0 +2024-08-03 08:26:58,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=75214.33333333333, ans=0.025 +2024-08-03 08:27:02,025 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.23 vs. limit=10.0 +2024-08-03 08:27:19,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=75287.66666666667, ans=0.125 +2024-08-03 08:27:25,145 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.021e+02 1.310e+02 1.601e+02 2.046e+02 3.853e+02, threshold=3.202e+02, percent-clipped=4.0 +2024-08-03 08:27:27,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=75324.33333333333, ans=0.125 +2024-08-03 08:27:27,465 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.76 vs. limit=6.0 +2024-08-03 08:27:33,455 INFO [train.py:1114] (0/4) Epoch 6, batch 2350, loss[loss=0.2749, simple_loss=0.3487, pruned_loss=0.1005, over 13560.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3193, pruned_loss=0.08709, over 2640357.90 frames. ], batch size: 38, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:27:33,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=75361.0, ans=0.125 +2024-08-03 08:27:39,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=75361.0, ans=0.025 +2024-08-03 08:27:40,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=75361.0, ans=0.125 +2024-08-03 08:27:48,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=75397.66666666667, ans=0.07 +2024-08-03 08:27:56,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.73 vs. limit=15.0 +2024-08-03 08:28:01,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=75471.0, ans=0.125 +2024-08-03 08:28:15,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=75507.66666666667, ans=0.125 +2024-08-03 08:28:16,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=75507.66666666667, ans=0.125 +2024-08-03 08:28:18,841 INFO [train.py:1114] (0/4) Epoch 6, batch 2400, loss[loss=0.2158, simple_loss=0.2957, pruned_loss=0.06799, over 13531.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3203, pruned_loss=0.08733, over 2641701.29 frames. ], batch size: 35, lr: 1.99e-02, grad_scale: 32.0 +2024-08-03 08:28:25,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=75544.33333333333, ans=0.125 +2024-08-03 08:28:29,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=75581.0, ans=0.125 +2024-08-03 08:28:59,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=75691.0, ans=0.125 +2024-08-03 08:28:59,505 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.007e+02 1.273e+02 1.529e+02 2.027e+02 4.146e+02, threshold=3.058e+02, percent-clipped=9.0 +2024-08-03 08:29:07,934 INFO [train.py:1114] (0/4) Epoch 6, batch 2450, loss[loss=0.2031, simple_loss=0.2883, pruned_loss=0.05898, over 13352.00 frames. ], tot_loss[loss=0.249, simple_loss=0.322, pruned_loss=0.08804, over 2631600.28 frames. ], batch size: 37, lr: 1.99e-02, grad_scale: 32.0 +2024-08-03 08:29:24,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=75801.0, ans=0.025 +2024-08-03 08:29:33,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=75801.0, ans=0.0 +2024-08-03 08:29:34,494 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.59 vs. limit=15.0 +2024-08-03 08:29:36,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=75837.66666666667, ans=0.95 +2024-08-03 08:29:38,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=75837.66666666667, ans=0.0 +2024-08-03 08:29:51,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=75911.0, ans=0.125 +2024-08-03 08:29:52,479 INFO [train.py:1114] (0/4) Epoch 6, batch 2500, loss[loss=0.2532, simple_loss=0.3348, pruned_loss=0.08576, over 13404.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3215, pruned_loss=0.0879, over 2636098.57 frames. ], batch size: 39, lr: 1.99e-02, grad_scale: 32.0 +2024-08-03 08:30:25,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=76021.0, ans=0.07 +2024-08-03 08:30:31,092 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.038e+02 1.253e+02 1.529e+02 2.022e+02 3.392e+02, threshold=3.058e+02, percent-clipped=3.0 +2024-08-03 08:30:38,265 INFO [train.py:1114] (0/4) Epoch 6, batch 2550, loss[loss=0.2402, simple_loss=0.2997, pruned_loss=0.09032, over 13542.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.321, pruned_loss=0.08786, over 2637795.71 frames. ], batch size: 31, lr: 1.99e-02, grad_scale: 16.0 +2024-08-03 08:30:54,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=76131.0, ans=0.125 +2024-08-03 08:30:57,687 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:31:00,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=76167.66666666667, ans=0.125 +2024-08-03 08:31:01,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=76167.66666666667, ans=0.0 +2024-08-03 08:31:07,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=76204.33333333333, ans=0.125 +2024-08-03 08:31:08,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=76204.33333333333, ans=0.2 +2024-08-03 08:31:14,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=76204.33333333333, ans=22.5 +2024-08-03 08:31:15,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=76241.0, ans=0.2 +2024-08-03 08:31:20,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=76241.0, ans=0.1 +2024-08-03 08:31:21,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=76241.0, ans=0.0 +2024-08-03 08:31:25,222 INFO [train.py:1114] (0/4) Epoch 6, batch 2600, loss[loss=0.2058, simple_loss=0.2889, pruned_loss=0.06137, over 13563.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3216, pruned_loss=0.08798, over 2637393.97 frames. ], batch size: 36, lr: 1.98e-02, grad_scale: 16.0 +2024-08-03 08:31:38,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=76314.33333333333, ans=0.2 +2024-08-03 08:32:01,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=76424.33333333333, ans=0.125 +2024-08-03 08:32:01,833 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.041e+02 1.310e+02 1.554e+02 1.964e+02 3.750e+02, threshold=3.108e+02, percent-clipped=4.0 +2024-08-03 08:32:05,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76424.33333333333, ans=0.1 +2024-08-03 08:32:08,711 INFO [train.py:1114] (0/4) Epoch 6, batch 2650, loss[loss=0.281, simple_loss=0.343, pruned_loss=0.1095, over 13322.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3216, pruned_loss=0.08788, over 2640489.43 frames. ], batch size: 46, lr: 1.98e-02, grad_scale: 16.0 +2024-08-03 08:32:09,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=76461.0, ans=0.0 +2024-08-03 08:32:13,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=76461.0, ans=0.0 +2024-08-03 08:32:25,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=76497.66666666667, ans=0.0 +2024-08-03 08:32:34,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76571.0, ans=0.1 +2024-08-03 08:32:52,937 INFO [train.py:1114] (0/4) Epoch 6, batch 2700, loss[loss=0.2348, simple_loss=0.319, pruned_loss=0.07529, over 13548.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3216, pruned_loss=0.08787, over 2637707.54 frames. ], batch size: 40, lr: 1.98e-02, grad_scale: 16.0 +2024-08-03 08:32:54,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76644.33333333333, ans=0.1 +2024-08-03 08:33:08,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=76681.0, ans=0.125 +2024-08-03 08:33:09,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=76717.66666666667, ans=0.125 +2024-08-03 08:33:10,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=76717.66666666667, ans=0.125 +2024-08-03 08:33:20,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=76754.33333333333, ans=0.0 +2024-08-03 08:33:21,730 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.49 vs. limit=15.0 +2024-08-03 08:33:29,838 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.293e+02 1.589e+02 2.019e+02 3.318e+02, threshold=3.177e+02, percent-clipped=2.0 +2024-08-03 08:33:29,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=76791.0, ans=0.0 +2024-08-03 08:33:35,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=76791.0, ans=0.125 +2024-08-03 08:33:41,622 INFO [train.py:1114] (0/4) Epoch 6, batch 2750, loss[loss=0.2283, simple_loss=0.2937, pruned_loss=0.08142, over 13324.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3196, pruned_loss=0.08693, over 2635838.99 frames. ], batch size: 34, lr: 1.98e-02, grad_scale: 16.0 +2024-08-03 08:34:07,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=76937.66666666667, ans=0.0 +2024-08-03 08:34:25,234 INFO [train.py:1114] (0/4) Epoch 6, batch 2800, loss[loss=0.3232, simple_loss=0.3617, pruned_loss=0.1424, over 9226.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3204, pruned_loss=0.08738, over 2627669.53 frames. ], batch size: 96, lr: 1.98e-02, grad_scale: 32.0 +2024-08-03 08:34:48,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=77084.33333333333, ans=0.125 +2024-08-03 08:34:59,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=77157.66666666667, ans=0.04949747468305833 +2024-08-03 08:35:01,675 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.373e+02 1.725e+02 2.259e+02 3.489e+02, threshold=3.451e+02, percent-clipped=3.0 +2024-08-03 08:35:05,682 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.16 vs. limit=15.0 +2024-08-03 08:35:08,672 INFO [train.py:1114] (0/4) Epoch 6, batch 2850, loss[loss=0.2478, simple_loss=0.3255, pruned_loss=0.08506, over 13355.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3209, pruned_loss=0.08758, over 2621212.68 frames. ], batch size: 35, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:35:19,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=77231.0, ans=0.0 +2024-08-03 08:35:53,867 INFO [train.py:1114] (0/4) Epoch 6, batch 2900, loss[loss=0.2331, simple_loss=0.3111, pruned_loss=0.07757, over 13366.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3221, pruned_loss=0.08761, over 2632120.52 frames. ], batch size: 36, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:35:55,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=77377.66666666667, ans=0.0 +2024-08-03 08:35:58,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=77377.66666666667, ans=0.125 +2024-08-03 08:36:05,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=77414.33333333333, ans=0.125 +2024-08-03 08:36:08,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=77414.33333333333, ans=0.0 +2024-08-03 08:36:14,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=77451.0, ans=0.125 +2024-08-03 08:36:15,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=77451.0, ans=0.2 +2024-08-03 08:36:17,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=77451.0, ans=0.025 +2024-08-03 08:36:17,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=77451.0, ans=0.0 +2024-08-03 08:40:18,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=77487.66666666667, ans=0.0 +2024-08-03 08:40:18,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=77487.66666666667, ans=0.125 +2024-08-03 08:40:18,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77487.66666666667, ans=0.1 +2024-08-03 08:41:12,661 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.754e+01 1.224e+02 1.336e+02 1.587e+02 3.692e+02, threshold=2.672e+02, percent-clipped=1.0 +2024-08-03 08:41:32,107 INFO [train.py:1114] (0/4) Epoch 6, batch 2950, loss[loss=0.2472, simple_loss=0.3207, pruned_loss=0.08688, over 13341.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3205, pruned_loss=0.08724, over 2629931.99 frames. ], batch size: 34, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:41:40,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=77561.0, ans=0.125 +2024-08-03 08:42:26,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.62 vs. limit=6.0 +2024-08-03 08:42:27,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=77671.0, ans=0.0 +2024-08-03 08:42:34,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=77671.0, ans=0.2 +2024-08-03 08:43:00,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=77707.66666666667, ans=0.2 +2024-08-03 08:43:15,150 INFO [train.py:1114] (0/4) Epoch 6, batch 3000, loss[loss=0.234, simple_loss=0.3234, pruned_loss=0.07235, over 13552.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.32, pruned_loss=0.08679, over 2630526.17 frames. ], batch size: 37, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:43:15,151 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 08:43:32,687 INFO [train.py:1146] (0/4) Epoch 6, validation: loss=0.2027, simple_loss=0.301, pruned_loss=0.05225, over 944034.00 frames. +2024-08-03 08:43:32,687 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 08:43:46,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=77744.33333333333, ans=0.125 +2024-08-03 08:44:12,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=77817.66666666667, ans=0.0 +2024-08-03 08:44:31,998 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.61 vs. limit=15.0 +2024-08-03 08:44:33,317 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.660e+01 1.264e+02 1.515e+02 1.854e+02 4.431e+02, threshold=3.030e+02, percent-clipped=3.0 +2024-08-03 08:44:47,350 INFO [train.py:1114] (0/4) Epoch 6, batch 3050, loss[loss=0.2584, simple_loss=0.3255, pruned_loss=0.09569, over 13544.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.3215, pruned_loss=0.08766, over 2627771.75 frames. ], batch size: 35, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:44:55,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.53 vs. limit=15.0 +2024-08-03 08:49:42,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=78001.0, ans=0.125 +2024-08-03 08:49:58,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=78037.66666666667, ans=0.0 +2024-08-03 08:49:58,591 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.47 vs. limit=15.0 +2024-08-03 08:50:18,942 INFO [train.py:1114] (0/4) Epoch 6, batch 3100, loss[loss=0.2416, simple_loss=0.325, pruned_loss=0.07906, over 13286.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.321, pruned_loss=0.08738, over 2627735.98 frames. ], batch size: 46, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:50:34,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=78147.66666666667, ans=0.125 +2024-08-03 08:50:38,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=78147.66666666667, ans=0.125 +2024-08-03 08:50:48,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=78184.33333333333, ans=0.1 +2024-08-03 08:50:53,369 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.78 vs. limit=15.0 +2024-08-03 08:51:05,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=78221.0, ans=0.1 +2024-08-03 08:51:10,022 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.39 vs. limit=15.0 +2024-08-03 08:51:12,352 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.281e+02 1.526e+02 2.102e+02 4.706e+02, threshold=3.052e+02, percent-clipped=7.0 +2024-08-03 08:51:22,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78257.66666666667, ans=0.1 +2024-08-03 08:51:34,807 INFO [train.py:1114] (0/4) Epoch 6, batch 3150, loss[loss=0.274, simple_loss=0.3469, pruned_loss=0.1006, over 13087.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3204, pruned_loss=0.08691, over 2629358.78 frames. ], batch size: 48, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:51:38,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=78294.33333333333, ans=0.125 +2024-08-03 08:52:11,161 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.16 vs. limit=15.0 +2024-08-03 08:52:19,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=78404.33333333333, ans=0.125 +2024-08-03 08:52:34,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=78441.0, ans=0.125 +2024-08-03 08:52:38,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=78441.0, ans=0.0 +2024-08-03 08:52:40,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=78441.0, ans=0.0 +2024-08-03 08:52:43,921 INFO [train.py:1114] (0/4) Epoch 6, batch 3200, loss[loss=0.2338, simple_loss=0.3176, pruned_loss=0.07497, over 13536.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3198, pruned_loss=0.08656, over 2635012.14 frames. ], batch size: 37, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:52:59,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=78514.33333333333, ans=0.125 +2024-08-03 08:53:02,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78514.33333333333, ans=0.1 +2024-08-03 08:53:03,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=78514.33333333333, ans=22.5 +2024-08-03 08:57:38,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=78587.66666666667, ans=22.5 +2024-08-03 08:57:47,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=78624.33333333333, ans=0.125 +2024-08-03 08:57:47,832 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.879e+01 1.222e+02 1.408e+02 1.742e+02 2.685e+02, threshold=2.816e+02, percent-clipped=0.0 +2024-08-03 08:57:58,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=78661.0, ans=0.2 +2024-08-03 08:57:59,194 INFO [train.py:1114] (0/4) Epoch 6, batch 3250, loss[loss=0.2525, simple_loss=0.3292, pruned_loss=0.08787, over 13395.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3193, pruned_loss=0.08608, over 2639298.04 frames. ], batch size: 38, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:58:26,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=78697.66666666667, ans=0.125 +2024-08-03 08:58:42,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=78734.33333333333, ans=0.125 +2024-08-03 08:58:56,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78807.66666666667, ans=0.1 +2024-08-03 08:59:05,508 INFO [train.py:1114] (0/4) Epoch 6, batch 3300, loss[loss=0.2436, simple_loss=0.3174, pruned_loss=0.08486, over 12953.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3186, pruned_loss=0.08588, over 2640431.59 frames. ], batch size: 52, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:59:16,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=78881.0, ans=0.0 +2024-08-03 08:59:17,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=78881.0, ans=0.125 +2024-08-03 08:59:30,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=78917.66666666667, ans=0.125 +2024-08-03 08:59:41,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=78991.0, ans=0.0 +2024-08-03 08:59:42,894 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.013e+02 1.439e+02 1.688e+02 2.442e+02 4.060e+02, threshold=3.376e+02, percent-clipped=9.0 +2024-08-03 08:59:53,402 INFO [train.py:1114] (0/4) Epoch 6, batch 3350, loss[loss=0.2588, simple_loss=0.3279, pruned_loss=0.0949, over 13082.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3199, pruned_loss=0.08676, over 2629705.05 frames. ], batch size: 48, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:00:11,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=79101.0, ans=0.0 +2024-08-03 09:00:20,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=79137.66666666667, ans=0.0 +2024-08-03 09:00:33,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=79174.33333333333, ans=0.125 +2024-08-03 09:00:36,306 INFO [train.py:1114] (0/4) Epoch 6, batch 3400, loss[loss=0.1967, simple_loss=0.2716, pruned_loss=0.06089, over 13530.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3188, pruned_loss=0.08615, over 2625619.02 frames. ], batch size: 31, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:00:52,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=79247.66666666667, ans=0.125 +2024-08-03 09:01:12,729 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.222e+02 1.420e+02 1.734e+02 2.761e+02, threshold=2.839e+02, percent-clipped=0.0 +2024-08-03 09:01:19,612 INFO [train.py:1114] (0/4) Epoch 6, batch 3450, loss[loss=0.3055, simple_loss=0.3665, pruned_loss=0.1222, over 12911.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3192, pruned_loss=0.08599, over 2629626.30 frames. ], batch size: 52, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:01:22,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=79394.33333333333, ans=0.0 +2024-08-03 09:01:28,503 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.91 vs. limit=15.0 +2024-08-03 09:01:32,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=79431.0, ans=0.125 +2024-08-03 09:01:38,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=79467.66666666667, ans=0.0 +2024-08-03 09:01:43,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=79467.66666666667, ans=0.125 +2024-08-03 09:01:49,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=79504.33333333333, ans=0.1 +2024-08-03 09:01:53,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=79541.0, ans=0.1 +2024-08-03 09:02:02,296 INFO [train.py:1114] (0/4) Epoch 6, batch 3500, loss[loss=0.2461, simple_loss=0.3132, pruned_loss=0.08947, over 13530.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3188, pruned_loss=0.08657, over 2630924.72 frames. ], batch size: 34, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:02:05,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=79577.66666666667, ans=0.125 +2024-08-03 09:02:12,199 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.82 vs. limit=10.0 +2024-08-03 09:02:25,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=79651.0, ans=0.2 +2024-08-03 09:02:25,853 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.71 vs. limit=6.0 +2024-08-03 09:02:27,645 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.01 vs. limit=15.0 +2024-08-03 09:02:31,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=79687.66666666667, ans=0.125 +2024-08-03 09:02:33,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=79687.66666666667, ans=0.0 +2024-08-03 09:02:35,310 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.19 vs. limit=6.0 +2024-08-03 09:02:38,272 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.912e+01 1.291e+02 1.420e+02 1.811e+02 3.621e+02, threshold=2.839e+02, percent-clipped=4.0 +2024-08-03 09:02:45,579 INFO [train.py:1114] (0/4) Epoch 6, batch 3550, loss[loss=0.2789, simple_loss=0.3505, pruned_loss=0.1036, over 12461.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3214, pruned_loss=0.08809, over 2629750.67 frames. ], batch size: 58, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:03:18,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=79871.0, ans=0.025 +2024-08-03 09:03:21,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=79907.66666666667, ans=0.0 +2024-08-03 09:03:30,174 INFO [train.py:1114] (0/4) Epoch 6, batch 3600, loss[loss=0.2881, simple_loss=0.3408, pruned_loss=0.1177, over 8870.00 frames. ], tot_loss[loss=0.2577, simple_loss=0.3273, pruned_loss=0.09403, over 2490509.45 frames. ], batch size: 96, lr: 1.94e-02, grad_scale: 32.0 +2024-08-03 09:03:37,761 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.12 vs. limit=15.0 +2024-08-03 09:03:52,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80017.66666666667, ans=0.1 +2024-08-03 09:03:56,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=80054.33333333333, ans=0.2 +2024-08-03 09:03:57,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=80054.33333333333, ans=0.2 +2024-08-03 09:04:05,233 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-6.pt +2024-08-03 09:11:26,776 INFO [train.py:1114] (0/4) Epoch 7, batch 0, loss[loss=0.2256, simple_loss=0.2984, pruned_loss=0.07641, over 13338.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.2984, pruned_loss=0.07641, over 13338.00 frames. ], batch size: 33, lr: 1.82e-02, grad_scale: 32.0 +2024-08-03 09:11:26,777 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 09:11:36,678 INFO [train.py:1146] (0/4) Epoch 7, validation: loss=0.2064, simple_loss=0.3063, pruned_loss=0.05331, over 944034.00 frames. +2024-08-03 09:11:36,678 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 09:11:36,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=80091.0, ans=0.5 +2024-08-03 09:11:39,260 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.328e+02 1.470e+02 1.676e+02 3.542e+02, threshold=2.940e+02, percent-clipped=1.0 +2024-08-03 09:11:40,733 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.82 vs. limit=12.0 +2024-08-03 09:12:06,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=80164.33333333333, ans=0.0 +2024-08-03 09:12:24,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=80237.66666666667, ans=0.125 +2024-08-03 09:12:27,669 INFO [train.py:1114] (0/4) Epoch 7, batch 50, loss[loss=0.2268, simple_loss=0.299, pruned_loss=0.07732, over 13442.00 frames. ], tot_loss[loss=0.2512, simple_loss=0.3244, pruned_loss=0.08904, over 579017.23 frames. ], batch size: 32, lr: 1.82e-02, grad_scale: 32.0 +2024-08-03 09:12:56,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=80347.66666666667, ans=0.125 +2024-08-03 09:13:01,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=80384.33333333333, ans=0.09899494936611666 +2024-08-03 09:13:08,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=80421.0, ans=0.125 +2024-08-03 09:13:19,724 INFO [train.py:1114] (0/4) Epoch 7, batch 100, loss[loss=0.2222, simple_loss=0.2932, pruned_loss=0.07558, over 13549.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3222, pruned_loss=0.08662, over 1026064.97 frames. ], batch size: 35, lr: 1.82e-02, grad_scale: 32.0 +2024-08-03 09:13:22,392 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.031e+02 1.305e+02 1.508e+02 1.904e+02 3.829e+02, threshold=3.017e+02, percent-clipped=4.0 +2024-08-03 09:13:23,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=80457.66666666667, ans=0.2 +2024-08-03 09:13:32,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=80494.33333333333, ans=0.0 +2024-08-03 09:13:43,678 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.32 vs. limit=15.0 +2024-08-03 09:13:44,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=80531.0, ans=0.125 +2024-08-03 09:13:56,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80567.66666666667, ans=0.1 +2024-08-03 09:13:59,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=80604.33333333333, ans=0.07 +2024-08-03 09:14:06,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=80604.33333333333, ans=0.04949747468305833 +2024-08-03 09:14:07,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=80641.0, ans=0.025 +2024-08-03 09:14:08,330 INFO [train.py:1114] (0/4) Epoch 7, batch 150, loss[loss=0.229, simple_loss=0.2943, pruned_loss=0.08191, over 13421.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3196, pruned_loss=0.08559, over 1387170.23 frames. ], batch size: 32, lr: 1.81e-02, grad_scale: 32.0 +2024-08-03 09:14:12,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=80641.0, ans=0.125 +2024-08-03 09:14:26,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=80677.66666666667, ans=0.125 +2024-08-03 09:14:32,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=80714.33333333333, ans=0.1 +2024-08-03 09:14:43,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.whiten.whitening_limit, batch_count=80751.0, ans=12.0 +2024-08-03 09:14:54,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=80787.66666666667, ans=0.125 +2024-08-03 09:14:54,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=80787.66666666667, ans=0.025 +2024-08-03 09:14:56,273 INFO [train.py:1114] (0/4) Epoch 7, batch 200, loss[loss=0.2418, simple_loss=0.3285, pruned_loss=0.0775, over 12376.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.3177, pruned_loss=0.08447, over 1665992.89 frames. ], batch size: 58, lr: 1.81e-02, grad_scale: 16.0 +2024-08-03 09:14:56,874 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.28 vs. limit=12.0 +2024-08-03 09:14:59,745 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.449e+01 1.216e+02 1.369e+02 1.577e+02 2.982e+02, threshold=2.737e+02, percent-clipped=0.0 +2024-08-03 09:15:01,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=80824.33333333333, ans=0.0 +2024-08-03 09:15:09,541 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.36 vs. limit=15.0 +2024-08-03 09:15:12,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=80861.0, ans=0.125 +2024-08-03 09:15:12,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=80861.0, ans=0.1 +2024-08-03 09:15:12,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=80861.0, ans=0.1 +2024-08-03 09:15:20,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=80897.66666666667, ans=0.125 +2024-08-03 09:15:27,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=80934.33333333333, ans=15.0 +2024-08-03 09:15:41,514 INFO [train.py:1114] (0/4) Epoch 7, batch 250, loss[loss=0.29, simple_loss=0.357, pruned_loss=0.1115, over 13318.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.3168, pruned_loss=0.08374, over 1885363.60 frames. ], batch size: 46, lr: 1.81e-02, grad_scale: 16.0 +2024-08-03 09:15:54,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=81044.33333333333, ans=0.125 +2024-08-03 09:15:58,178 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.91 vs. limit=15.0 +2024-08-03 09:16:10,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=81117.66666666667, ans=0.125 +2024-08-03 09:16:19,389 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:16:26,434 INFO [train.py:1114] (0/4) Epoch 7, batch 300, loss[loss=0.2549, simple_loss=0.332, pruned_loss=0.08888, over 13459.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3156, pruned_loss=0.0831, over 2051908.07 frames. ], batch size: 42, lr: 1.81e-02, grad_scale: 16.0 +2024-08-03 09:16:27,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=81191.0, ans=0.0 +2024-08-03 09:16:29,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=81191.0, ans=0.0 +2024-08-03 09:16:30,005 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.067e+02 1.291e+02 1.630e+02 2.116e+02 3.205e+02, threshold=3.259e+02, percent-clipped=7.0 +2024-08-03 09:16:35,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=81227.66666666667, ans=0.125 +2024-08-03 09:16:40,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=81227.66666666667, ans=0.5 +2024-08-03 09:16:44,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81264.33333333333, ans=0.1 +2024-08-03 09:17:15,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=81337.66666666667, ans=0.0 +2024-08-03 09:17:19,152 INFO [train.py:1114] (0/4) Epoch 7, batch 350, loss[loss=0.2073, simple_loss=0.2892, pruned_loss=0.06272, over 13573.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3165, pruned_loss=0.08362, over 2181686.94 frames. ], batch size: 33, lr: 1.81e-02, grad_scale: 16.0 +2024-08-03 09:17:19,537 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.62 vs. limit=15.0 +2024-08-03 09:17:22,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=81374.33333333333, ans=0.125 +2024-08-03 09:17:36,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=81447.66666666667, ans=0.0 +2024-08-03 09:17:51,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=81484.33333333333, ans=0.125 +2024-08-03 09:17:56,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=81521.0, ans=0.0 +2024-08-03 09:17:57,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=81521.0, ans=0.125 +2024-08-03 09:18:06,299 INFO [train.py:1114] (0/4) Epoch 7, batch 400, loss[loss=0.2585, simple_loss=0.3313, pruned_loss=0.09287, over 13358.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.3159, pruned_loss=0.08375, over 2285936.69 frames. ], batch size: 37, lr: 1.81e-02, grad_scale: 32.0 +2024-08-03 09:18:09,970 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.073e+02 1.335e+02 1.614e+02 1.996e+02 4.244e+02, threshold=3.229e+02, percent-clipped=5.0 +2024-08-03 09:18:12,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=81557.66666666667, ans=0.125 +2024-08-03 09:18:19,384 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:18:19,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=81594.33333333333, ans=0.1 +2024-08-03 09:18:21,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=81594.33333333333, ans=0.0 +2024-08-03 09:18:50,335 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.82 vs. limit=15.0 +2024-08-03 09:18:51,575 INFO [train.py:1114] (0/4) Epoch 7, batch 450, loss[loss=0.2187, simple_loss=0.309, pruned_loss=0.06423, over 13550.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3161, pruned_loss=0.08386, over 2360506.78 frames. ], batch size: 38, lr: 1.80e-02, grad_scale: 32.0 +2024-08-03 09:18:52,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=81741.0, ans=0.025 +2024-08-03 09:18:58,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=81741.0, ans=0.125 +2024-08-03 09:19:56,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=81851.0, ans=0.0 +2024-08-03 09:20:00,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=81851.0, ans=0.125 +2024-08-03 09:20:03,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=81851.0, ans=0.1 +2024-08-03 09:20:13,734 INFO [train.py:1114] (0/4) Epoch 7, batch 500, loss[loss=0.2387, simple_loss=0.3231, pruned_loss=0.0771, over 13424.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3147, pruned_loss=0.08325, over 2425215.06 frames. ], batch size: 43, lr: 1.80e-02, grad_scale: 32.0 +2024-08-03 09:20:15,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=81924.33333333333, ans=0.125 +2024-08-03 09:20:17,140 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.053e+02 1.254e+02 1.426e+02 1.803e+02 2.820e+02, threshold=2.853e+02, percent-clipped=0.0 +2024-08-03 09:20:18,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81924.33333333333, ans=0.1 +2024-08-03 09:20:32,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=81997.66666666667, ans=0.125 +2024-08-03 09:20:44,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=82034.33333333333, ans=0.04949747468305833 +2024-08-03 09:20:56,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82071.0, ans=0.1 +2024-08-03 09:20:58,687 INFO [train.py:1114] (0/4) Epoch 7, batch 550, loss[loss=0.2683, simple_loss=0.3449, pruned_loss=0.09591, over 13005.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.315, pruned_loss=0.08317, over 2467324.38 frames. ], batch size: 48, lr: 1.80e-02, grad_scale: 16.0 +2024-08-03 09:21:49,941 INFO [train.py:1114] (0/4) Epoch 7, batch 600, loss[loss=0.2406, simple_loss=0.3243, pruned_loss=0.07849, over 13316.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3152, pruned_loss=0.08292, over 2507186.41 frames. ], batch size: 46, lr: 1.80e-02, grad_scale: 16.0 +2024-08-03 09:21:54,381 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.061e+02 1.313e+02 1.488e+02 1.850e+02 2.717e+02, threshold=2.975e+02, percent-clipped=0.0 +2024-08-03 09:22:00,588 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.47 vs. limit=15.0 +2024-08-03 09:22:14,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=82364.33333333333, ans=0.125 +2024-08-03 09:22:28,156 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.44 vs. limit=22.5 +2024-08-03 09:22:35,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=82437.66666666667, ans=0.125 +2024-08-03 09:22:39,822 INFO [train.py:1114] (0/4) Epoch 7, batch 650, loss[loss=0.2227, simple_loss=0.3035, pruned_loss=0.0709, over 13532.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.3144, pruned_loss=0.08248, over 2542272.23 frames. ], batch size: 37, lr: 1.80e-02, grad_scale: 16.0 +2024-08-03 09:22:52,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=82511.0, ans=0.125 +2024-08-03 09:22:56,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=82547.66666666667, ans=0.0 +2024-08-03 09:23:01,740 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=17.55 vs. limit=22.5 +2024-08-03 09:23:05,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=82547.66666666667, ans=0.5 +2024-08-03 09:23:21,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=82621.0, ans=0.125 +2024-08-03 09:23:23,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=82621.0, ans=0.125 +2024-08-03 09:23:25,556 INFO [train.py:1114] (0/4) Epoch 7, batch 700, loss[loss=0.2604, simple_loss=0.3234, pruned_loss=0.09874, over 13531.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.314, pruned_loss=0.08188, over 2565569.41 frames. ], batch size: 35, lr: 1.79e-02, grad_scale: 16.0 +2024-08-03 09:23:29,925 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.643e+01 1.288e+02 1.544e+02 2.300e+02 4.218e+02, threshold=3.088e+02, percent-clipped=10.0 +2024-08-03 09:23:45,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=82731.0, ans=0.125 +2024-08-03 09:23:45,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=82731.0, ans=0.125 +2024-08-03 09:24:08,425 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.75 vs. limit=10.0 +2024-08-03 09:24:10,557 INFO [train.py:1114] (0/4) Epoch 7, batch 750, loss[loss=0.2247, simple_loss=0.3119, pruned_loss=0.06875, over 13350.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3137, pruned_loss=0.08175, over 2583323.09 frames. ], batch size: 37, lr: 1.79e-02, grad_scale: 16.0 +2024-08-03 09:24:10,679 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:24:19,445 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.43 vs. limit=15.0 +2024-08-03 09:24:26,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=82877.66666666667, ans=0.2 +2024-08-03 09:24:28,384 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.93 vs. limit=22.5 +2024-08-03 09:24:31,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=82877.66666666667, ans=0.2 +2024-08-03 09:24:41,808 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.12 vs. limit=22.5 +2024-08-03 09:24:48,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=82951.0, ans=0.0 +2024-08-03 09:24:52,283 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.74 vs. limit=22.5 +2024-08-03 09:25:05,630 INFO [train.py:1114] (0/4) Epoch 7, batch 800, loss[loss=0.2196, simple_loss=0.2954, pruned_loss=0.07189, over 13348.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3129, pruned_loss=0.08101, over 2597721.47 frames. ], batch size: 33, lr: 1.79e-02, grad_scale: 32.0 +2024-08-03 09:25:10,203 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:25:11,846 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.297e+02 1.506e+02 2.061e+02 3.344e+02, threshold=3.011e+02, percent-clipped=3.0 +2024-08-03 09:25:13,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=83024.33333333333, ans=0.0 +2024-08-03 09:25:15,816 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.82 vs. limit=15.0 +2024-08-03 09:25:26,029 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.94 vs. limit=22.5 +2024-08-03 09:25:26,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=83097.66666666667, ans=0.0 +2024-08-03 09:25:30,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=83097.66666666667, ans=0.04949747468305833 +2024-08-03 09:25:31,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=83097.66666666667, ans=0.0 +2024-08-03 09:25:31,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=83097.66666666667, ans=0.1 +2024-08-03 09:25:34,128 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.52 vs. limit=15.0 +2024-08-03 09:25:35,200 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.96 vs. limit=22.5 +2024-08-03 09:25:48,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=83171.0, ans=0.2 +2024-08-03 09:25:59,644 INFO [train.py:1114] (0/4) Epoch 7, batch 850, loss[loss=0.2145, simple_loss=0.3097, pruned_loss=0.05963, over 13333.00 frames. ], tot_loss[loss=0.239, simple_loss=0.3141, pruned_loss=0.0819, over 2609975.99 frames. ], batch size: 40, lr: 1.79e-02, grad_scale: 32.0 +2024-08-03 09:26:02,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=83207.66666666667, ans=0.2 +2024-08-03 09:26:10,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=83244.33333333333, ans=0.025 +2024-08-03 09:26:29,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=83281.0, ans=0.1 +2024-08-03 09:26:32,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=83281.0, ans=0.1 +2024-08-03 09:26:54,513 INFO [train.py:1114] (0/4) Epoch 7, batch 900, loss[loss=0.2041, simple_loss=0.2851, pruned_loss=0.06155, over 13350.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.314, pruned_loss=0.08149, over 2611054.98 frames. ], batch size: 33, lr: 1.79e-02, grad_scale: 32.0 +2024-08-03 09:26:58,765 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.957e+01 1.340e+02 1.564e+02 1.853e+02 3.494e+02, threshold=3.128e+02, percent-clipped=2.0 +2024-08-03 09:27:06,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.67 vs. limit=15.0 +2024-08-03 09:27:07,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=83427.66666666667, ans=0.2 +2024-08-03 09:27:20,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=83464.33333333333, ans=0.0 +2024-08-03 09:27:22,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=83464.33333333333, ans=0.2 +2024-08-03 09:27:36,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=83537.66666666667, ans=0.125 +2024-08-03 09:27:44,884 INFO [train.py:1114] (0/4) Epoch 7, batch 950, loss[loss=0.1928, simple_loss=0.2693, pruned_loss=0.0581, over 13538.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3136, pruned_loss=0.08132, over 2612080.56 frames. ], batch size: 34, lr: 1.79e-02, grad_scale: 32.0 +2024-08-03 09:27:56,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=83611.0, ans=0.125 +2024-08-03 09:28:06,963 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.66 vs. limit=15.0 +2024-08-03 09:28:19,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=83684.33333333333, ans=0.125 +2024-08-03 09:28:27,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=83721.0, ans=0.0 +2024-08-03 09:28:31,282 INFO [train.py:1114] (0/4) Epoch 7, batch 1000, loss[loss=0.2385, simple_loss=0.3127, pruned_loss=0.08222, over 13365.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3159, pruned_loss=0.08303, over 2610792.16 frames. ], batch size: 35, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:28:38,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=83757.66666666667, ans=0.025 +2024-08-03 09:28:39,593 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.039e+02 1.347e+02 1.651e+02 2.099e+02 3.599e+02, threshold=3.301e+02, percent-clipped=2.0 +2024-08-03 09:28:43,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=83794.33333333333, ans=0.0 +2024-08-03 09:29:24,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=83904.33333333333, ans=0.125 +2024-08-03 09:29:26,010 INFO [train.py:1114] (0/4) Epoch 7, batch 1050, loss[loss=0.2528, simple_loss=0.33, pruned_loss=0.0878, over 13578.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3148, pruned_loss=0.08271, over 2615842.39 frames. ], batch size: 39, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:29:32,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=83941.0, ans=0.125 +2024-08-03 09:29:56,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=84014.33333333333, ans=0.125 +2024-08-03 09:30:18,334 INFO [train.py:1114] (0/4) Epoch 7, batch 1100, loss[loss=0.2344, simple_loss=0.31, pruned_loss=0.07939, over 13556.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.3137, pruned_loss=0.08191, over 2620045.11 frames. ], batch size: 36, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:30:22,733 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.294e+01 1.204e+02 1.427e+02 1.810e+02 3.442e+02, threshold=2.853e+02, percent-clipped=1.0 +2024-08-03 09:30:28,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=84161.0, ans=0.2 +2024-08-03 09:30:32,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=84161.0, ans=0.125 +2024-08-03 09:31:08,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=84271.0, ans=0.125 +2024-08-03 09:31:13,200 INFO [train.py:1114] (0/4) Epoch 7, batch 1150, loss[loss=0.2334, simple_loss=0.307, pruned_loss=0.07986, over 13543.00 frames. ], tot_loss[loss=0.2384, simple_loss=0.3132, pruned_loss=0.08183, over 2619102.65 frames. ], batch size: 36, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:31:29,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=84344.33333333333, ans=0.125 +2024-08-03 09:31:34,018 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=22.33 vs. limit=22.5 +2024-08-03 09:31:41,313 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.67 vs. limit=15.0 +2024-08-03 09:31:47,243 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.27 vs. limit=22.5 +2024-08-03 09:31:51,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=84417.66666666667, ans=0.125 +2024-08-03 09:31:57,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=84454.33333333333, ans=0.125 +2024-08-03 09:32:04,048 INFO [train.py:1114] (0/4) Epoch 7, batch 1200, loss[loss=0.2788, simple_loss=0.3479, pruned_loss=0.1049, over 13578.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.3141, pruned_loss=0.0822, over 2616263.31 frames. ], batch size: 39, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:32:05,426 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.85 vs. limit=22.5 +2024-08-03 09:32:09,677 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.359e+02 1.583e+02 1.870e+02 3.127e+02, threshold=3.166e+02, percent-clipped=2.0 +2024-08-03 09:32:10,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84491.0, ans=0.1 +2024-08-03 09:32:31,802 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.70 vs. limit=22.5 +2024-08-03 09:32:32,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=84601.0, ans=0.2 +2024-08-03 09:32:40,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.84 vs. limit=15.0 +2024-08-03 09:32:52,770 INFO [train.py:1114] (0/4) Epoch 7, batch 1250, loss[loss=0.2575, simple_loss=0.3342, pruned_loss=0.09037, over 13441.00 frames. ], tot_loss[loss=0.239, simple_loss=0.3143, pruned_loss=0.08187, over 2627678.29 frames. ], batch size: 42, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:33:22,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=84747.66666666667, ans=0.125 +2024-08-03 09:33:25,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=84747.66666666667, ans=0.2 +2024-08-03 09:33:26,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=84747.66666666667, ans=0.025 +2024-08-03 09:33:42,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=84821.0, ans=0.125 +2024-08-03 09:33:49,158 INFO [train.py:1114] (0/4) Epoch 7, batch 1300, loss[loss=0.2435, simple_loss=0.3212, pruned_loss=0.08291, over 12825.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.3141, pruned_loss=0.0817, over 2630713.32 frames. ], batch size: 52, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:33:53,586 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.285e+01 1.265e+02 1.441e+02 2.116e+02 4.466e+02, threshold=2.882e+02, percent-clipped=10.0 +2024-08-03 09:34:01,274 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.83 vs. limit=6.0 +2024-08-03 09:34:03,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=84894.33333333333, ans=0.0 +2024-08-03 09:34:04,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=84894.33333333333, ans=0.125 +2024-08-03 09:34:07,716 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.17 vs. limit=15.0 +2024-08-03 09:34:12,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=84931.0, ans=0.0 +2024-08-03 09:34:18,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=84967.66666666667, ans=0.0 +2024-08-03 09:34:26,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=85004.33333333333, ans=0.0 +2024-08-03 09:34:34,102 INFO [train.py:1114] (0/4) Epoch 7, batch 1350, loss[loss=0.2643, simple_loss=0.336, pruned_loss=0.09633, over 13558.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.3129, pruned_loss=0.08075, over 2638010.97 frames. ], batch size: 37, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:34:35,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=85041.0, ans=0.125 +2024-08-03 09:34:35,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=85041.0, ans=0.2 +2024-08-03 09:34:43,921 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:34:44,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=85041.0, ans=0.2 +2024-08-03 09:34:45,744 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.83 vs. limit=6.0 +2024-08-03 09:34:52,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=85077.66666666667, ans=0.125 +2024-08-03 09:34:57,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=85114.33333333333, ans=0.125 +2024-08-03 09:35:03,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=85114.33333333333, ans=0.05 +2024-08-03 09:35:05,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=85151.0, ans=0.125 +2024-08-03 09:35:05,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=85151.0, ans=0.125 +2024-08-03 09:35:07,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.40 vs. limit=15.0 +2024-08-03 09:35:09,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=85151.0, ans=0.125 +2024-08-03 09:35:23,187 INFO [train.py:1114] (0/4) Epoch 7, batch 1400, loss[loss=0.2462, simple_loss=0.3011, pruned_loss=0.09564, over 13273.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3122, pruned_loss=0.08053, over 2642023.10 frames. ], batch size: 31, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:35:27,452 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.48 vs. limit=22.5 +2024-08-03 09:35:27,646 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.041e+02 1.250e+02 1.480e+02 1.868e+02 3.141e+02, threshold=2.961e+02, percent-clipped=2.0 +2024-08-03 09:35:49,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=85334.33333333333, ans=0.04949747468305833 +2024-08-03 09:35:53,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=85334.33333333333, ans=0.015 +2024-08-03 09:35:57,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=85334.33333333333, ans=0.125 +2024-08-03 09:35:58,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=85371.0, ans=0.125 +2024-08-03 09:36:10,277 INFO [train.py:1114] (0/4) Epoch 7, batch 1450, loss[loss=0.2674, simple_loss=0.3386, pruned_loss=0.0981, over 13446.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3126, pruned_loss=0.08097, over 2641235.25 frames. ], batch size: 43, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:36:10,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85407.66666666667, ans=0.1 +2024-08-03 09:36:24,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=85444.33333333333, ans=0.125 +2024-08-03 09:36:31,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=85481.0, ans=0.0 +2024-08-03 09:36:37,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=85481.0, ans=0.125 +2024-08-03 09:36:38,178 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.52 vs. limit=22.5 +2024-08-03 09:36:38,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=85481.0, ans=0.025 +2024-08-03 09:36:49,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=85517.66666666667, ans=0.0 +2024-08-03 09:36:51,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=85517.66666666667, ans=0.09899494936611666 +2024-08-03 09:37:00,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=85554.33333333333, ans=0.2 +2024-08-03 09:37:03,248 INFO [train.py:1114] (0/4) Epoch 7, batch 1500, loss[loss=0.2512, simple_loss=0.3234, pruned_loss=0.08949, over 13410.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3126, pruned_loss=0.08064, over 2640711.10 frames. ], batch size: 39, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:37:07,892 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.276e+02 1.426e+02 1.677e+02 2.585e+02, threshold=2.853e+02, percent-clipped=0.0 +2024-08-03 09:37:09,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=85591.0, ans=10.0 +2024-08-03 09:37:51,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=85701.0, ans=0.07 +2024-08-03 09:38:09,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=85774.33333333333, ans=0.0 +2024-08-03 09:38:10,431 INFO [train.py:1114] (0/4) Epoch 7, batch 1550, loss[loss=0.2387, simple_loss=0.3206, pruned_loss=0.07838, over 13401.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3123, pruned_loss=0.08065, over 2630767.40 frames. ], batch size: 41, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:38:57,202 INFO [train.py:1114] (0/4) Epoch 7, batch 1600, loss[loss=0.2624, simple_loss=0.3434, pruned_loss=0.09067, over 13573.00 frames. ], tot_loss[loss=0.2384, simple_loss=0.3133, pruned_loss=0.08175, over 2624273.86 frames. ], batch size: 39, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:39:02,745 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.893e+01 1.288e+02 1.487e+02 1.890e+02 3.069e+02, threshold=2.975e+02, percent-clipped=2.0 +2024-08-03 09:39:09,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=85994.33333333333, ans=0.2 +2024-08-03 09:39:13,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=85994.33333333333, ans=0.1 +2024-08-03 09:39:23,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=86031.0, ans=0.0 +2024-08-03 09:39:25,731 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.23 vs. limit=22.5 +2024-08-03 09:39:36,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=86104.33333333333, ans=0.125 +2024-08-03 09:39:36,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=86104.33333333333, ans=0.125 +2024-08-03 09:39:37,772 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.76 vs. limit=15.0 +2024-08-03 09:39:45,508 INFO [train.py:1114] (0/4) Epoch 7, batch 1650, loss[loss=0.24, simple_loss=0.3253, pruned_loss=0.07732, over 13324.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.3141, pruned_loss=0.08239, over 2621545.48 frames. ], batch size: 40, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:40:07,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=86214.33333333333, ans=0.125 +2024-08-03 09:40:10,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=86214.33333333333, ans=0.04949747468305833 +2024-08-03 09:40:28,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=86287.66666666667, ans=0.125 +2024-08-03 09:40:30,979 INFO [train.py:1114] (0/4) Epoch 7, batch 1700, loss[loss=0.2049, simple_loss=0.273, pruned_loss=0.06841, over 13239.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3135, pruned_loss=0.08192, over 2630176.85 frames. ], batch size: 31, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:40:36,462 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.893e+01 1.270e+02 1.510e+02 1.921e+02 4.226e+02, threshold=3.020e+02, percent-clipped=3.0 +2024-08-03 09:40:43,964 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.89 vs. limit=22.5 +2024-08-03 09:41:04,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=86434.33333333333, ans=0.125 +2024-08-03 09:41:13,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=86471.0, ans=0.0 +2024-08-03 09:41:15,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=86471.0, ans=0.0 +2024-08-03 09:41:16,017 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.62 vs. limit=15.0 +2024-08-03 09:41:18,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=86471.0, ans=0.5 +2024-08-03 09:41:23,795 INFO [train.py:1114] (0/4) Epoch 7, batch 1750, loss[loss=0.2327, simple_loss=0.2972, pruned_loss=0.08416, over 13557.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.313, pruned_loss=0.08163, over 2633881.62 frames. ], batch size: 31, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:41:28,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=86507.66666666667, ans=0.1 +2024-08-03 09:41:31,884 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:41:36,776 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.37 vs. limit=10.0 +2024-08-03 09:41:44,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=86581.0, ans=0.1 +2024-08-03 09:41:53,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=86617.66666666667, ans=0.2 +2024-08-03 09:41:54,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=86617.66666666667, ans=0.125 +2024-08-03 09:41:57,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=86617.66666666667, ans=0.0 +2024-08-03 09:41:58,849 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:42:02,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=86654.33333333333, ans=0.07 +2024-08-03 09:42:03,381 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.59 vs. limit=6.0 +2024-08-03 09:42:09,865 INFO [train.py:1114] (0/4) Epoch 7, batch 1800, loss[loss=0.259, simple_loss=0.3255, pruned_loss=0.09621, over 13558.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3135, pruned_loss=0.08186, over 2634912.39 frames. ], batch size: 38, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:42:11,322 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.90 vs. limit=10.0 +2024-08-03 09:42:15,296 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.789e+01 1.268e+02 1.407e+02 1.831e+02 3.286e+02, threshold=2.815e+02, percent-clipped=2.0 +2024-08-03 09:42:31,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=86764.33333333333, ans=0.0 +2024-08-03 09:42:37,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=86801.0, ans=0.125 +2024-08-03 09:42:43,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=86801.0, ans=0.0 +2024-08-03 09:42:44,656 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.65 vs. limit=22.5 +2024-08-03 09:42:51,928 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.36 vs. limit=15.0 +2024-08-03 09:42:54,787 INFO [train.py:1114] (0/4) Epoch 7, batch 1850, loss[loss=0.2198, simple_loss=0.3012, pruned_loss=0.06918, over 13404.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3124, pruned_loss=0.08121, over 2637689.89 frames. ], batch size: 39, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:42:57,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=86874.33333333333, ans=0.0 +2024-08-03 09:42:58,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=86874.33333333333, ans=0.025 +2024-08-03 09:43:05,281 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.44 vs. limit=22.5 +2024-08-03 09:43:12,172 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:43:24,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=86984.33333333333, ans=0.125 +2024-08-03 09:43:39,438 INFO [train.py:1114] (0/4) Epoch 7, batch 1900, loss[loss=0.2306, simple_loss=0.3084, pruned_loss=0.07638, over 13311.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3128, pruned_loss=0.08107, over 2640411.07 frames. ], batch size: 40, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:43:44,833 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.827e+01 1.267e+02 1.561e+02 1.810e+02 3.811e+02, threshold=3.121e+02, percent-clipped=4.0 +2024-08-03 09:43:56,682 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.92 vs. limit=15.0 +2024-08-03 09:44:28,664 INFO [train.py:1114] (0/4) Epoch 7, batch 1950, loss[loss=0.2563, simple_loss=0.3216, pruned_loss=0.09553, over 13555.00 frames. ], tot_loss[loss=0.2383, simple_loss=0.314, pruned_loss=0.08132, over 2646772.97 frames. ], batch size: 36, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:44:29,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87241.0, ans=0.1 +2024-08-03 09:44:46,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=87314.33333333333, ans=0.1 +2024-08-03 09:45:06,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=87351.0, ans=0.125 +2024-08-03 09:45:17,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=87387.66666666667, ans=0.1 +2024-08-03 09:45:23,406 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.27 vs. limit=22.5 +2024-08-03 09:45:24,732 INFO [train.py:1114] (0/4) Epoch 7, batch 2000, loss[loss=0.2175, simple_loss=0.2812, pruned_loss=0.0769, over 13530.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3154, pruned_loss=0.08246, over 2636055.31 frames. ], batch size: 31, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:45:25,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=87424.33333333333, ans=0.125 +2024-08-03 09:45:28,129 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.70 vs. limit=15.0 +2024-08-03 09:45:28,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=87424.33333333333, ans=0.2 +2024-08-03 09:45:30,328 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.022e+02 1.315e+02 1.529e+02 1.937e+02 2.914e+02, threshold=3.058e+02, percent-clipped=0.0 +2024-08-03 09:45:33,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=87461.0, ans=0.125 +2024-08-03 09:45:34,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=87461.0, ans=0.125 +2024-08-03 09:45:45,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=87497.66666666667, ans=0.2 +2024-08-03 09:46:01,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=87571.0, ans=0.2 +2024-08-03 09:46:07,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=87571.0, ans=0.125 +2024-08-03 09:46:09,940 INFO [train.py:1114] (0/4) Epoch 7, batch 2050, loss[loss=0.2122, simple_loss=0.2829, pruned_loss=0.07079, over 13442.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.3143, pruned_loss=0.08223, over 2633737.72 frames. ], batch size: 32, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:46:15,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87607.66666666667, ans=0.1 +2024-08-03 09:46:27,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=87644.33333333333, ans=0.2 +2024-08-03 09:46:30,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=87681.0, ans=0.0 +2024-08-03 09:46:35,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=87681.0, ans=0.125 +2024-08-03 09:46:39,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=87717.66666666667, ans=0.5 +2024-08-03 09:46:56,280 INFO [train.py:1114] (0/4) Epoch 7, batch 2100, loss[loss=0.2195, simple_loss=0.3033, pruned_loss=0.06786, over 13536.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.3131, pruned_loss=0.08122, over 2638389.42 frames. ], batch size: 37, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:47:00,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=87791.0, ans=0.125 +2024-08-03 09:47:02,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=87791.0, ans=0.125 +2024-08-03 09:47:03,254 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.729e+01 1.195e+02 1.377e+02 1.752e+02 2.850e+02, threshold=2.753e+02, percent-clipped=0.0 +2024-08-03 09:47:03,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=87791.0, ans=0.2 +2024-08-03 09:47:03,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=87791.0, ans=0.125 +2024-08-03 09:47:06,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=87827.66666666667, ans=0.125 +2024-08-03 09:47:17,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=87864.33333333333, ans=0.025 +2024-08-03 09:47:20,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87864.33333333333, ans=0.1 +2024-08-03 09:47:23,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=87864.33333333333, ans=0.025 +2024-08-03 09:47:24,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=87901.0, ans=0.07 +2024-08-03 09:47:35,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.whiten.whitening_limit, batch_count=87937.66666666667, ans=15.0 +2024-08-03 09:47:37,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=87937.66666666667, ans=0.125 +2024-08-03 09:47:39,679 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.05 vs. limit=6.0 +2024-08-03 09:47:42,919 INFO [train.py:1114] (0/4) Epoch 7, batch 2150, loss[loss=0.2407, simple_loss=0.3198, pruned_loss=0.08078, over 13563.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3122, pruned_loss=0.08031, over 2646642.98 frames. ], batch size: 36, lr: 1.74e-02, grad_scale: 32.0 +2024-08-03 09:47:48,589 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-24000.pt +2024-08-03 09:47:56,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=88011.0, ans=0.0 +2024-08-03 09:47:58,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=88011.0, ans=0.0 +2024-08-03 09:48:22,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=88084.33333333333, ans=0.125 +2024-08-03 09:48:23,519 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.64 vs. limit=22.5 +2024-08-03 09:49:14,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=88121.0, ans=0.0 +2024-08-03 09:49:41,822 INFO [train.py:1114] (0/4) Epoch 7, batch 2200, loss[loss=0.2623, simple_loss=0.3421, pruned_loss=0.09126, over 13409.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3124, pruned_loss=0.08057, over 2644517.75 frames. ], batch size: 39, lr: 1.74e-02, grad_scale: 32.0 +2024-08-03 09:49:44,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=88157.66666666667, ans=0.125 +2024-08-03 09:49:52,172 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.40 vs. limit=15.0 +2024-08-03 09:49:56,407 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.554e+01 1.287e+02 1.626e+02 2.364e+02 4.219e+02, threshold=3.252e+02, percent-clipped=14.0 +2024-08-03 09:50:40,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=88267.66666666667, ans=0.125 +2024-08-03 09:51:41,848 INFO [train.py:1114] (0/4) Epoch 7, batch 2250, loss[loss=0.2043, simple_loss=0.3007, pruned_loss=0.05393, over 13349.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3115, pruned_loss=0.07983, over 2641374.32 frames. ], batch size: 37, lr: 1.74e-02, grad_scale: 16.0 +2024-08-03 09:52:51,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=88487.66666666667, ans=10.0 +2024-08-03 09:52:57,990 INFO [train.py:1114] (0/4) Epoch 7, batch 2300, loss[loss=0.217, simple_loss=0.2894, pruned_loss=0.07227, over 13553.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3107, pruned_loss=0.08021, over 2638890.05 frames. ], batch size: 33, lr: 1.74e-02, grad_scale: 16.0 +2024-08-03 09:53:01,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=88524.33333333333, ans=0.125 +2024-08-03 09:53:04,301 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.405e+01 1.244e+02 1.416e+02 1.864e+02 3.449e+02, threshold=2.832e+02, percent-clipped=2.0 +2024-08-03 09:53:23,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=88597.66666666667, ans=0.2 +2024-08-03 09:53:30,756 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:53:36,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=88634.33333333333, ans=0.07 +2024-08-03 09:53:52,481 INFO [train.py:1114] (0/4) Epoch 7, batch 2350, loss[loss=0.2522, simple_loss=0.3338, pruned_loss=0.08533, over 13540.00 frames. ], tot_loss[loss=0.236, simple_loss=0.311, pruned_loss=0.0805, over 2641374.24 frames. ], batch size: 38, lr: 1.74e-02, grad_scale: 16.0 +2024-08-03 09:55:19,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=88744.33333333333, ans=0.1 +2024-08-03 09:55:26,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=88781.0, ans=0.125 +2024-08-03 09:55:31,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=88781.0, ans=0.125 +2024-08-03 09:55:39,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=88817.66666666667, ans=0.125 +2024-08-03 09:55:52,809 INFO [train.py:1114] (0/4) Epoch 7, batch 2400, loss[loss=0.2107, simple_loss=0.2978, pruned_loss=0.06177, over 13536.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3105, pruned_loss=0.08007, over 2642711.86 frames. ], batch size: 35, lr: 1.74e-02, grad_scale: 16.0 +2024-08-03 09:55:55,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=88891.0, ans=0.2 +2024-08-03 09:55:57,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=88891.0, ans=0.125 +2024-08-03 09:56:07,277 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.040e+02 1.279e+02 1.511e+02 1.745e+02 2.971e+02, threshold=3.023e+02, percent-clipped=1.0 +2024-08-03 09:56:12,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=88927.66666666667, ans=0.1 +2024-08-03 09:56:18,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=88927.66666666667, ans=0.125 +2024-08-03 09:56:37,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=89001.0, ans=0.2 +2024-08-03 09:56:46,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=89037.66666666667, ans=0.0 +2024-08-03 09:56:48,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=89037.66666666667, ans=0.1 +2024-08-03 09:56:53,292 INFO [train.py:1114] (0/4) Epoch 7, batch 2450, loss[loss=0.2411, simple_loss=0.317, pruned_loss=0.08258, over 13359.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3119, pruned_loss=0.08065, over 2632319.44 frames. ], batch size: 37, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 09:57:43,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=89221.0, ans=0.125 +2024-08-03 09:57:50,669 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.74 vs. limit=6.0 +2024-08-03 09:57:52,086 INFO [train.py:1114] (0/4) Epoch 7, batch 2500, loss[loss=0.2364, simple_loss=0.3232, pruned_loss=0.07474, over 13407.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.312, pruned_loss=0.08073, over 2636324.74 frames. ], batch size: 39, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 09:57:56,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=89257.66666666667, ans=0.0 +2024-08-03 09:57:59,334 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.252e+02 1.492e+02 2.074e+02 3.860e+02, threshold=2.984e+02, percent-clipped=5.0 +2024-08-03 09:58:00,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=89294.33333333333, ans=0.0 +2024-08-03 09:58:02,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=89294.33333333333, ans=0.0 +2024-08-03 09:58:28,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=89404.33333333333, ans=0.125 +2024-08-03 09:58:36,016 INFO [train.py:1114] (0/4) Epoch 7, batch 2550, loss[loss=0.2259, simple_loss=0.294, pruned_loss=0.0789, over 13549.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.3115, pruned_loss=0.08041, over 2638617.73 frames. ], batch size: 31, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 09:58:43,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=89441.0, ans=0.125 +2024-08-03 09:58:43,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=89441.0, ans=0.07 +2024-08-03 09:58:48,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=89477.66666666667, ans=0.125 +2024-08-03 09:58:57,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=89514.33333333333, ans=0.125 +2024-08-03 09:58:58,351 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.68 vs. limit=22.5 +2024-08-03 09:58:58,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=89514.33333333333, ans=0.0 +2024-08-03 09:59:19,647 INFO [train.py:1114] (0/4) Epoch 7, batch 2600, loss[loss=0.2305, simple_loss=0.31, pruned_loss=0.0755, over 13567.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.312, pruned_loss=0.0806, over 2637745.43 frames. ], batch size: 36, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 09:59:23,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=89624.33333333333, ans=0.125 +2024-08-03 09:59:26,382 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.355e+01 1.229e+02 1.441e+02 1.780e+02 3.809e+02, threshold=2.882e+02, percent-clipped=4.0 +2024-08-03 09:59:35,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=89697.66666666667, ans=0.0 +2024-08-03 09:59:37,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=89697.66666666667, ans=0.2 +2024-08-03 09:59:43,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=89697.66666666667, ans=0.0 +2024-08-03 09:59:49,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=89734.33333333333, ans=0.0 +2024-08-03 09:59:49,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=89734.33333333333, ans=0.125 +2024-08-03 09:59:53,494 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:59:56,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=89771.0, ans=0.0 +2024-08-03 09:59:56,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=89771.0, ans=0.125 +2024-08-03 09:59:57,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=89771.0, ans=0.025 +2024-08-03 10:00:02,692 INFO [train.py:1114] (0/4) Epoch 7, batch 2650, loss[loss=0.2387, simple_loss=0.3191, pruned_loss=0.0792, over 13357.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3126, pruned_loss=0.08083, over 2640142.17 frames. ], batch size: 46, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 10:00:08,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=89807.66666666667, ans=0.0 +2024-08-03 10:00:16,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=89844.33333333333, ans=0.0 +2024-08-03 10:00:18,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=89844.33333333333, ans=0.1 +2024-08-03 10:01:08,366 INFO [train.py:1114] (0/4) Epoch 7, batch 2700, loss[loss=0.2593, simple_loss=0.3394, pruned_loss=0.08961, over 13554.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3127, pruned_loss=0.08113, over 2637745.01 frames. ], batch size: 40, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 10:01:12,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=89991.0, ans=22.5 +2024-08-03 10:01:16,146 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.262e+02 1.504e+02 2.229e+02 3.961e+02, threshold=3.008e+02, percent-clipped=4.0 +2024-08-03 10:01:18,739 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.48 vs. limit=15.0 +2024-08-03 10:01:22,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=90027.66666666667, ans=0.2 +2024-08-03 10:01:22,812 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:01:23,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=90027.66666666667, ans=0.125 +2024-08-03 10:01:27,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=90064.33333333333, ans=0.125 +2024-08-03 10:01:30,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=90064.33333333333, ans=0.125 +2024-08-03 10:01:53,307 INFO [train.py:1114] (0/4) Epoch 7, batch 2750, loss[loss=0.237, simple_loss=0.3137, pruned_loss=0.08014, over 13334.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3118, pruned_loss=0.0809, over 2635664.21 frames. ], batch size: 34, lr: 1.73e-02, grad_scale: 8.0 +2024-08-03 10:02:11,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=90247.66666666667, ans=0.0 +2024-08-03 10:02:15,743 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.19 vs. limit=6.0 +2024-08-03 10:02:28,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=90321.0, ans=0.07 +2024-08-03 10:02:29,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=90321.0, ans=0.125 +2024-08-03 10:02:33,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=90321.0, ans=0.125 +2024-08-03 10:02:37,969 INFO [train.py:1114] (0/4) Epoch 7, batch 2800, loss[loss=0.3282, simple_loss=0.367, pruned_loss=0.1447, over 9049.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3117, pruned_loss=0.08083, over 2626996.55 frames. ], batch size: 96, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:02:42,086 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.49 vs. limit=15.0 +2024-08-03 10:02:43,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90357.66666666667, ans=0.1 +2024-08-03 10:02:46,067 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.878e+01 1.254e+02 1.435e+02 1.719e+02 3.010e+02, threshold=2.870e+02, percent-clipped=1.0 +2024-08-03 10:03:27,735 INFO [train.py:1114] (0/4) Epoch 7, batch 2850, loss[loss=0.2246, simple_loss=0.3025, pruned_loss=0.07335, over 13365.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.3129, pruned_loss=0.08136, over 2620747.18 frames. ], batch size: 35, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:03:30,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=90541.0, ans=0.125 +2024-08-03 10:03:32,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=90541.0, ans=0.025 +2024-08-03 10:03:35,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=90541.0, ans=0.2 +2024-08-03 10:03:38,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=90577.66666666667, ans=0.0 +2024-08-03 10:03:53,080 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.57 vs. limit=15.0 +2024-08-03 10:03:53,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90614.33333333333, ans=0.1 +2024-08-03 10:03:58,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=90651.0, ans=0.0 +2024-08-03 10:04:02,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=90651.0, ans=0.0 +2024-08-03 10:04:06,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=90687.66666666667, ans=0.0 +2024-08-03 10:04:09,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=90687.66666666667, ans=0.125 +2024-08-03 10:04:12,877 INFO [train.py:1114] (0/4) Epoch 7, batch 2900, loss[loss=0.2297, simple_loss=0.3127, pruned_loss=0.07332, over 13348.00 frames. ], tot_loss[loss=0.238, simple_loss=0.3136, pruned_loss=0.08123, over 2631373.53 frames. ], batch size: 36, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:04:23,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=90724.33333333333, ans=0.0 +2024-08-03 10:04:24,260 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.250e+02 1.523e+02 1.874e+02 3.482e+02, threshold=3.046e+02, percent-clipped=1.0 +2024-08-03 10:04:28,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=90761.0, ans=0.025 +2024-08-03 10:04:30,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=90761.0, ans=0.0 +2024-08-03 10:04:49,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=90834.33333333333, ans=0.125 +2024-08-03 10:04:49,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=90834.33333333333, ans=0.125 +2024-08-03 10:04:51,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=90871.0, ans=0.125 +2024-08-03 10:05:03,297 INFO [train.py:1114] (0/4) Epoch 7, batch 2950, loss[loss=0.2299, simple_loss=0.3029, pruned_loss=0.07849, over 13345.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.3128, pruned_loss=0.08142, over 2629811.12 frames. ], batch size: 34, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:05:23,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=90981.0, ans=0.025 +2024-08-03 10:05:30,566 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.62 vs. limit=10.0 +2024-08-03 10:05:34,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=91017.66666666667, ans=0.125 +2024-08-03 10:05:38,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=91054.33333333333, ans=0.0 +2024-08-03 10:05:42,562 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.57 vs. limit=15.0 +2024-08-03 10:05:45,874 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.75 vs. limit=15.0 +2024-08-03 10:05:47,197 INFO [train.py:1114] (0/4) Epoch 7, batch 3000, loss[loss=0.2545, simple_loss=0.3272, pruned_loss=0.09096, over 13541.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3116, pruned_loss=0.08056, over 2629864.14 frames. ], batch size: 37, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:05:47,198 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 10:06:06,639 INFO [train.py:1146] (0/4) Epoch 7, validation: loss=0.1942, simple_loss=0.2938, pruned_loss=0.04733, over 944034.00 frames. +2024-08-03 10:06:06,639 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 10:06:09,077 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.94 vs. limit=6.0 +2024-08-03 10:06:14,476 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.238e+02 1.419e+02 1.719e+02 4.359e+02, threshold=2.839e+02, percent-clipped=6.0 +2024-08-03 10:06:20,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=91127.66666666667, ans=0.2 +2024-08-03 10:06:28,106 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.38 vs. limit=15.0 +2024-08-03 10:06:30,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=91164.33333333333, ans=0.125 +2024-08-03 10:06:31,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=91164.33333333333, ans=0.0 +2024-08-03 10:06:35,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=91201.0, ans=0.125 +2024-08-03 10:06:51,250 INFO [train.py:1114] (0/4) Epoch 7, batch 3050, loss[loss=0.2091, simple_loss=0.296, pruned_loss=0.06113, over 13545.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3121, pruned_loss=0.08035, over 2627705.91 frames. ], batch size: 35, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:06:54,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=91274.33333333333, ans=0.125 +2024-08-03 10:06:57,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=91274.33333333333, ans=0.035 +2024-08-03 10:07:18,549 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.32 vs. limit=10.0 +2024-08-03 10:07:20,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=91384.33333333333, ans=0.05 +2024-08-03 10:07:27,757 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:07:34,427 INFO [train.py:1114] (0/4) Epoch 7, batch 3100, loss[loss=0.2908, simple_loss=0.3531, pruned_loss=0.1143, over 13335.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.3125, pruned_loss=0.08094, over 2626878.51 frames. ], batch size: 46, lr: 1.71e-02, grad_scale: 16.0 +2024-08-03 10:07:37,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=91457.66666666667, ans=0.125 +2024-08-03 10:07:41,838 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.29 vs. limit=15.0 +2024-08-03 10:07:42,190 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.930e+01 1.259e+02 1.446e+02 1.808e+02 2.827e+02, threshold=2.891e+02, percent-clipped=0.0 +2024-08-03 10:07:46,035 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.90 vs. limit=6.0 +2024-08-03 10:07:57,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=91531.0, ans=0.0 +2024-08-03 10:08:00,903 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.90 vs. limit=15.0 +2024-08-03 10:08:02,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=91567.66666666667, ans=0.0 +2024-08-03 10:08:16,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=91641.0, ans=0.125 +2024-08-03 10:08:16,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=91641.0, ans=0.2 +2024-08-03 10:08:17,449 INFO [train.py:1114] (0/4) Epoch 7, batch 3150, loss[loss=0.2437, simple_loss=0.3259, pruned_loss=0.08076, over 12994.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3123, pruned_loss=0.08052, over 2628658.27 frames. ], batch size: 48, lr: 1.71e-02, grad_scale: 16.0 +2024-08-03 10:08:25,636 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:08:40,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=91714.33333333333, ans=0.0 +2024-08-03 10:09:01,225 INFO [train.py:1114] (0/4) Epoch 7, batch 3200, loss[loss=0.2447, simple_loss=0.325, pruned_loss=0.0822, over 13549.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3116, pruned_loss=0.08031, over 2633782.72 frames. ], batch size: 37, lr: 1.71e-02, grad_scale: 32.0 +2024-08-03 10:09:02,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=91824.33333333333, ans=0.0 +2024-08-03 10:09:08,920 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.005e+02 1.267e+02 1.711e+02 2.068e+02 3.292e+02, threshold=3.421e+02, percent-clipped=4.0 +2024-08-03 10:09:14,641 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=26.27 vs. limit=22.5 +2024-08-03 10:09:45,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=91971.0, ans=0.0 +2024-08-03 10:09:48,375 INFO [train.py:1114] (0/4) Epoch 7, batch 3250, loss[loss=0.2216, simple_loss=0.3089, pruned_loss=0.06714, over 13379.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3117, pruned_loss=0.07984, over 2638137.61 frames. ], batch size: 38, lr: 1.71e-02, grad_scale: 32.0 +2024-08-03 10:10:10,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=92081.0, ans=0.025 +2024-08-03 10:10:33,759 INFO [train.py:1114] (0/4) Epoch 7, batch 3300, loss[loss=0.2444, simple_loss=0.3238, pruned_loss=0.08244, over 12965.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3105, pruned_loss=0.07927, over 2640334.31 frames. ], batch size: 52, lr: 1.71e-02, grad_scale: 16.0 +2024-08-03 10:10:36,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=92191.0, ans=0.0 +2024-08-03 10:10:42,157 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.670e+01 1.272e+02 1.617e+02 1.965e+02 3.247e+02, threshold=3.234e+02, percent-clipped=0.0 +2024-08-03 10:10:46,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=92227.66666666667, ans=0.0 +2024-08-03 10:10:47,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=92227.66666666667, ans=0.2 +2024-08-03 10:10:59,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=92301.0, ans=0.2 +2024-08-03 10:11:08,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=92337.66666666667, ans=0.0 +2024-08-03 10:11:15,414 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.77 vs. limit=15.0 +2024-08-03 10:11:15,783 INFO [train.py:1114] (0/4) Epoch 7, batch 3350, loss[loss=0.2544, simple_loss=0.3222, pruned_loss=0.09337, over 13050.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3126, pruned_loss=0.08098, over 2630097.31 frames. ], batch size: 48, lr: 1.71e-02, grad_scale: 16.0 +2024-08-03 10:11:17,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=92374.33333333333, ans=0.125 +2024-08-03 10:11:18,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=92374.33333333333, ans=10.0 +2024-08-03 10:11:19,582 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.47 vs. limit=12.0 +2024-08-03 10:11:21,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=92374.33333333333, ans=0.125 +2024-08-03 10:11:24,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=92411.0, ans=0.0 +2024-08-03 10:11:25,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=92411.0, ans=0.0 +2024-08-03 10:11:26,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=92411.0, ans=0.125 +2024-08-03 10:11:27,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=92411.0, ans=0.125 +2024-08-03 10:11:42,308 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.29 vs. limit=15.0 +2024-08-03 10:11:44,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=92484.33333333333, ans=0.125 +2024-08-03 10:11:59,777 INFO [train.py:1114] (0/4) Epoch 7, batch 3400, loss[loss=0.2077, simple_loss=0.2778, pruned_loss=0.06879, over 13561.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3121, pruned_loss=0.08119, over 2625942.53 frames. ], batch size: 31, lr: 1.70e-02, grad_scale: 16.0 +2024-08-03 10:12:03,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=92557.66666666667, ans=0.05 +2024-08-03 10:12:08,999 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.271e+02 1.505e+02 1.907e+02 3.089e+02, threshold=3.010e+02, percent-clipped=0.0 +2024-08-03 10:12:13,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=92594.33333333333, ans=0.125 +2024-08-03 10:12:20,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=92631.0, ans=0.2 +2024-08-03 10:12:21,282 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.72 vs. limit=10.0 +2024-08-03 10:12:25,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92667.66666666667, ans=0.1 +2024-08-03 10:12:30,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=92667.66666666667, ans=0.125 +2024-08-03 10:12:37,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=92704.33333333333, ans=0.0 +2024-08-03 10:12:42,910 INFO [train.py:1114] (0/4) Epoch 7, batch 3450, loss[loss=0.2589, simple_loss=0.3311, pruned_loss=0.09336, over 12842.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.3123, pruned_loss=0.08101, over 2629736.80 frames. ], batch size: 52, lr: 1.70e-02, grad_scale: 16.0 +2024-08-03 10:12:44,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=92741.0, ans=0.2 +2024-08-03 10:13:02,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=92814.33333333333, ans=0.0 +2024-08-03 10:13:25,188 INFO [train.py:1114] (0/4) Epoch 7, batch 3500, loss[loss=0.2238, simple_loss=0.298, pruned_loss=0.0748, over 13526.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3111, pruned_loss=0.08038, over 2631454.14 frames. ], batch size: 34, lr: 1.70e-02, grad_scale: 16.0 +2024-08-03 10:13:33,818 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.754e+01 1.223e+02 1.539e+02 1.881e+02 2.645e+02, threshold=3.078e+02, percent-clipped=0.0 +2024-08-03 10:13:44,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=92997.66666666667, ans=0.0 +2024-08-03 10:13:47,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=92997.66666666667, ans=0.125 +2024-08-03 10:13:52,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=93034.33333333333, ans=0.125 +2024-08-03 10:14:09,033 INFO [train.py:1114] (0/4) Epoch 7, batch 3550, loss[loss=0.2362, simple_loss=0.3107, pruned_loss=0.08085, over 12560.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3134, pruned_loss=0.08141, over 2629924.01 frames. ], batch size: 59, lr: 1.70e-02, grad_scale: 16.0 +2024-08-03 10:14:12,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=93107.66666666667, ans=0.125 +2024-08-03 10:14:26,480 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.95 vs. limit=6.0 +2024-08-03 10:14:30,160 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.07 vs. limit=15.0 +2024-08-03 10:14:40,911 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.08 vs. limit=15.0 +2024-08-03 10:14:48,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=93254.33333333333, ans=0.0 +2024-08-03 10:14:53,539 INFO [train.py:1114] (0/4) Epoch 7, batch 3600, loss[loss=0.2887, simple_loss=0.3418, pruned_loss=0.1178, over 9123.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3189, pruned_loss=0.08669, over 2488835.44 frames. ], batch size: 96, lr: 1.70e-02, grad_scale: 32.0 +2024-08-03 10:15:02,199 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.315e+02 1.480e+02 1.683e+02 2.632e+02, threshold=2.960e+02, percent-clipped=0.0 +2024-08-03 10:15:27,187 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-7.pt +2024-08-03 10:16:13,470 INFO [train.py:1114] (0/4) Epoch 8, batch 0, loss[loss=0.2003, simple_loss=0.2823, pruned_loss=0.05917, over 13344.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2823, pruned_loss=0.05917, over 13344.00 frames. ], batch size: 33, lr: 1.60e-02, grad_scale: 32.0 +2024-08-03 10:16:13,473 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 10:16:24,029 INFO [train.py:1146] (0/4) Epoch 8, validation: loss=0.1977, simple_loss=0.2989, pruned_loss=0.04829, over 944034.00 frames. +2024-08-03 10:16:24,030 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 10:16:26,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=93437.66666666667, ans=0.125 +2024-08-03 10:16:39,291 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=22.30 vs. limit=15.0 +2024-08-03 10:16:42,866 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.34 vs. limit=22.5 +2024-08-03 10:16:42,954 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.90 vs. limit=15.0 +2024-08-03 10:16:43,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=93511.0, ans=0.125 +2024-08-03 10:16:59,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=93584.33333333333, ans=0.025 +2024-08-03 10:17:01,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=93584.33333333333, ans=0.125 +2024-08-03 10:17:09,630 INFO [train.py:1114] (0/4) Epoch 8, batch 50, loss[loss=0.1889, simple_loss=0.2723, pruned_loss=0.05274, over 13410.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.3134, pruned_loss=0.08108, over 578960.96 frames. ], batch size: 32, lr: 1.60e-02, grad_scale: 16.0 +2024-08-03 10:17:30,617 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.247e+02 1.447e+02 2.039e+02 3.809e+02, threshold=2.894e+02, percent-clipped=5.0 +2024-08-03 10:17:36,425 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.77 vs. limit=6.0 +2024-08-03 10:17:43,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.50 vs. limit=15.0 +2024-08-03 10:17:44,548 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.21 vs. limit=15.0 +2024-08-03 10:17:57,652 INFO [train.py:1114] (0/4) Epoch 8, batch 100, loss[loss=0.2472, simple_loss=0.3157, pruned_loss=0.08936, over 13545.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.314, pruned_loss=0.0802, over 1025213.11 frames. ], batch size: 35, lr: 1.60e-02, grad_scale: 16.0 +2024-08-03 10:18:43,049 INFO [train.py:1114] (0/4) Epoch 8, batch 150, loss[loss=0.1941, simple_loss=0.2693, pruned_loss=0.05949, over 13431.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3104, pruned_loss=0.0783, over 1386750.07 frames. ], batch size: 32, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:18:45,668 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.02 vs. limit=10.0 +2024-08-03 10:18:46,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=93987.66666666667, ans=0.07 +2024-08-03 10:18:48,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=93987.66666666667, ans=0.2 +2024-08-03 10:18:54,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=94024.33333333333, ans=0.125 +2024-08-03 10:19:01,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=94061.0, ans=0.125 +2024-08-03 10:19:02,669 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.928e+01 1.201e+02 1.310e+02 1.526e+02 2.654e+02, threshold=2.621e+02, percent-clipped=0.0 +2024-08-03 10:19:05,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94061.0, ans=0.1 +2024-08-03 10:19:06,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.74 vs. limit=15.0 +2024-08-03 10:19:20,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=94134.33333333333, ans=0.0 +2024-08-03 10:19:28,096 INFO [train.py:1114] (0/4) Epoch 8, batch 200, loss[loss=0.2412, simple_loss=0.3123, pruned_loss=0.08505, over 12598.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3077, pruned_loss=0.07704, over 1665159.25 frames. ], batch size: 58, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:19:30,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.52 vs. limit=15.0 +2024-08-03 10:19:51,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=94244.33333333333, ans=0.125 +2024-08-03 10:19:57,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=94244.33333333333, ans=0.125 +2024-08-03 10:19:57,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=94244.33333333333, ans=0.125 +2024-08-03 10:20:18,163 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.62 vs. limit=15.0 +2024-08-03 10:20:23,201 INFO [train.py:1114] (0/4) Epoch 8, batch 250, loss[loss=0.2408, simple_loss=0.3174, pruned_loss=0.08212, over 13318.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3085, pruned_loss=0.07753, over 1883030.22 frames. ], batch size: 46, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:20:29,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=94354.33333333333, ans=0.0 +2024-08-03 10:20:46,766 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.61 vs. limit=22.5 +2024-08-03 10:20:47,229 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.316e+01 1.275e+02 1.578e+02 1.902e+02 3.207e+02, threshold=3.155e+02, percent-clipped=3.0 +2024-08-03 10:20:54,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=94427.66666666667, ans=0.125 +2024-08-03 10:21:11,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=94501.0, ans=0.0 +2024-08-03 10:21:17,502 INFO [train.py:1114] (0/4) Epoch 8, batch 300, loss[loss=0.2353, simple_loss=0.3237, pruned_loss=0.07345, over 13445.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3077, pruned_loss=0.07721, over 2050255.55 frames. ], batch size: 42, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:22:12,144 INFO [train.py:1114] (0/4) Epoch 8, batch 350, loss[loss=0.233, simple_loss=0.3022, pruned_loss=0.0819, over 13576.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3087, pruned_loss=0.07767, over 2180908.66 frames. ], batch size: 33, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:22:23,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=94757.66666666667, ans=0.05 +2024-08-03 10:22:26,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=94757.66666666667, ans=0.125 +2024-08-03 10:22:32,144 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.441e+01 1.242e+02 1.508e+02 2.025e+02 3.534e+02, threshold=3.015e+02, percent-clipped=1.0 +2024-08-03 10:22:34,613 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.43 vs. limit=15.0 +2024-08-03 10:22:35,493 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.14 vs. limit=6.0 +2024-08-03 10:22:36,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94794.33333333333, ans=0.1 +2024-08-03 10:22:48,540 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.24 vs. limit=10.0 +2024-08-03 10:22:50,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.76 vs. limit=15.0 +2024-08-03 10:22:56,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=94904.33333333333, ans=0.0 +2024-08-03 10:22:57,070 INFO [train.py:1114] (0/4) Epoch 8, batch 400, loss[loss=0.246, simple_loss=0.3235, pruned_loss=0.08423, over 13365.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3088, pruned_loss=0.07802, over 2285054.73 frames. ], batch size: 37, lr: 1.59e-02, grad_scale: 32.0 +2024-08-03 10:23:01,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=94904.33333333333, ans=0.125 +2024-08-03 10:23:02,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=94904.33333333333, ans=0.125 +2024-08-03 10:23:04,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=94904.33333333333, ans=0.125 +2024-08-03 10:23:11,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=94941.0, ans=0.025 +2024-08-03 10:23:21,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=94977.66666666667, ans=0.1 +2024-08-03 10:23:40,239 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.40 vs. limit=12.0 +2024-08-03 10:23:44,499 INFO [train.py:1114] (0/4) Epoch 8, batch 450, loss[loss=0.236, simple_loss=0.3155, pruned_loss=0.07828, over 13543.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3083, pruned_loss=0.07785, over 2358648.15 frames. ], batch size: 38, lr: 1.59e-02, grad_scale: 32.0 +2024-08-03 10:23:53,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=95087.66666666667, ans=0.2 +2024-08-03 10:23:54,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=95087.66666666667, ans=0.2 +2024-08-03 10:24:08,008 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.730e+01 1.246e+02 1.455e+02 1.839e+02 3.207e+02, threshold=2.909e+02, percent-clipped=1.0 +2024-08-03 10:24:20,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=95197.66666666667, ans=0.125 +2024-08-03 10:24:23,034 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.22 vs. limit=15.0 +2024-08-03 10:24:26,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=95234.33333333333, ans=0.1 +2024-08-03 10:24:33,370 INFO [train.py:1114] (0/4) Epoch 8, batch 500, loss[loss=0.2578, simple_loss=0.3342, pruned_loss=0.09074, over 13424.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3071, pruned_loss=0.07695, over 2424270.65 frames. ], batch size: 43, lr: 1.58e-02, grad_scale: 32.0 +2024-08-03 10:24:33,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=95271.0, ans=0.2 +2024-08-03 10:24:38,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=95271.0, ans=10.0 +2024-08-03 10:24:39,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.48 vs. limit=22.5 +2024-08-03 10:24:47,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.41 vs. limit=22.5 +2024-08-03 10:24:53,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.15 vs. limit=15.0 +2024-08-03 10:25:07,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=95381.0, ans=0.125 +2024-08-03 10:25:19,028 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.92 vs. limit=15.0 +2024-08-03 10:25:21,151 INFO [train.py:1114] (0/4) Epoch 8, batch 550, loss[loss=0.2642, simple_loss=0.3322, pruned_loss=0.09806, over 13283.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3074, pruned_loss=0.07724, over 2467971.24 frames. ], batch size: 49, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:25:24,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=95454.33333333333, ans=0.125 +2024-08-03 10:25:42,954 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.590e+01 1.201e+02 1.471e+02 1.924e+02 3.912e+02, threshold=2.942e+02, percent-clipped=7.0 +2024-08-03 10:25:45,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=95527.66666666667, ans=0.125 +2024-08-03 10:25:53,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=95564.33333333333, ans=0.125 +2024-08-03 10:25:53,596 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.41 vs. limit=12.0 +2024-08-03 10:26:06,489 INFO [train.py:1114] (0/4) Epoch 8, batch 600, loss[loss=0.2282, simple_loss=0.311, pruned_loss=0.07267, over 13270.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3075, pruned_loss=0.077, over 2507892.98 frames. ], batch size: 46, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:26:18,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=95637.66666666667, ans=0.1 +2024-08-03 10:26:41,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=95711.0, ans=0.125 +2024-08-03 10:26:54,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=95784.33333333333, ans=0.0 +2024-08-03 10:26:56,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=95784.33333333333, ans=0.125 +2024-08-03 10:26:58,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=95784.33333333333, ans=0.125 +2024-08-03 10:27:01,421 INFO [train.py:1114] (0/4) Epoch 8, batch 650, loss[loss=0.1898, simple_loss=0.273, pruned_loss=0.05325, over 13543.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3068, pruned_loss=0.07657, over 2543118.79 frames. ], batch size: 37, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:27:03,660 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.18 vs. limit=15.0 +2024-08-03 10:27:16,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=95857.66666666667, ans=0.125 +2024-08-03 10:27:16,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=95857.66666666667, ans=0.0 +2024-08-03 10:27:22,948 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.916e+01 1.338e+02 1.718e+02 2.265e+02 3.658e+02, threshold=3.436e+02, percent-clipped=6.0 +2024-08-03 10:27:46,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=95967.66666666667, ans=0.125 +2024-08-03 10:27:48,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=95967.66666666667, ans=0.125 +2024-08-03 10:27:49,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.03 vs. limit=15.0 +2024-08-03 10:27:52,502 INFO [train.py:1114] (0/4) Epoch 8, batch 700, loss[loss=0.1953, simple_loss=0.2689, pruned_loss=0.0609, over 13527.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3074, pruned_loss=0.07697, over 2565247.63 frames. ], batch size: 35, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:27:52,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96004.33333333333, ans=0.1 +2024-08-03 10:28:18,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=96077.66666666667, ans=0.125 +2024-08-03 10:28:22,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=96114.33333333333, ans=0.125 +2024-08-03 10:28:23,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=96114.33333333333, ans=0.125 +2024-08-03 10:28:37,999 INFO [train.py:1114] (0/4) Epoch 8, batch 750, loss[loss=0.2049, simple_loss=0.2882, pruned_loss=0.06075, over 13358.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3069, pruned_loss=0.07688, over 2582320.02 frames. ], batch size: 37, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:28:55,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=96224.33333333333, ans=0.125 +2024-08-03 10:28:56,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.25 vs. limit=12.0 +2024-08-03 10:29:15,355 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.402e+01 1.294e+02 1.560e+02 2.121e+02 3.650e+02, threshold=3.121e+02, percent-clipped=1.0 +2024-08-03 10:29:23,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=96297.66666666667, ans=0.2 +2024-08-03 10:29:43,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=96297.66666666667, ans=0.125 +2024-08-03 10:29:43,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=96297.66666666667, ans=0.125 +2024-08-03 10:29:48,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=96297.66666666667, ans=0.125 +2024-08-03 10:29:49,382 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.46 vs. limit=15.0 +2024-08-03 10:29:53,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=96334.33333333333, ans=0.125 +2024-08-03 10:29:58,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=96334.33333333333, ans=0.125 +2024-08-03 10:30:00,644 INFO [train.py:1114] (0/4) Epoch 8, batch 800, loss[loss=0.2093, simple_loss=0.2892, pruned_loss=0.06472, over 13348.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3066, pruned_loss=0.07679, over 2596761.97 frames. ], batch size: 33, lr: 1.58e-02, grad_scale: 16.0 +2024-08-03 10:30:06,580 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.09 vs. limit=10.0 +2024-08-03 10:30:08,316 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.54 vs. limit=15.0 +2024-08-03 10:30:16,548 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.03 vs. limit=15.0 +2024-08-03 10:30:53,116 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.12 vs. limit=10.0 +2024-08-03 10:30:59,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=96481.0, ans=0.125 +2024-08-03 10:31:10,618 INFO [train.py:1114] (0/4) Epoch 8, batch 850, loss[loss=0.2299, simple_loss=0.3157, pruned_loss=0.07207, over 13320.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3065, pruned_loss=0.07675, over 2609118.37 frames. ], batch size: 40, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:31:10,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=96554.33333333333, ans=0.2 +2024-08-03 10:31:11,326 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.45 vs. limit=5.0 +2024-08-03 10:31:11,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96554.33333333333, ans=0.1 +2024-08-03 10:31:12,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=96554.33333333333, ans=0.025 +2024-08-03 10:31:23,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=96591.0, ans=0.125 +2024-08-03 10:31:23,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96591.0, ans=0.1 +2024-08-03 10:31:32,150 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.603e+01 1.283e+02 1.438e+02 1.736e+02 2.880e+02, threshold=2.876e+02, percent-clipped=0.0 +2024-08-03 10:31:34,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=96627.66666666667, ans=0.125 +2024-08-03 10:31:48,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=96701.0, ans=0.0 +2024-08-03 10:31:56,059 INFO [train.py:1114] (0/4) Epoch 8, batch 900, loss[loss=0.2277, simple_loss=0.2968, pruned_loss=0.07926, over 13333.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3061, pruned_loss=0.07622, over 2611906.72 frames. ], batch size: 33, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:31:58,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=96737.66666666667, ans=0.1 +2024-08-03 10:32:01,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=96737.66666666667, ans=0.125 +2024-08-03 10:32:17,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=96774.33333333333, ans=0.05 +2024-08-03 10:32:23,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=96811.0, ans=0.125 +2024-08-03 10:32:39,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=96884.33333333333, ans=0.125 +2024-08-03 10:32:47,021 INFO [train.py:1114] (0/4) Epoch 8, batch 950, loss[loss=0.2151, simple_loss=0.2915, pruned_loss=0.06934, over 13518.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3063, pruned_loss=0.07629, over 2612847.80 frames. ], batch size: 34, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:32:53,847 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.78 vs. limit=10.0 +2024-08-03 10:32:54,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96921.0, ans=0.1 +2024-08-03 10:33:06,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96994.33333333333, ans=0.1 +2024-08-03 10:33:08,793 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.880e+01 1.218e+02 1.421e+02 1.776e+02 3.206e+02, threshold=2.842e+02, percent-clipped=1.0 +2024-08-03 10:33:16,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=97031.0, ans=0.0 +2024-08-03 10:33:18,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=97031.0, ans=0.125 +2024-08-03 10:33:32,187 INFO [train.py:1114] (0/4) Epoch 8, batch 1000, loss[loss=0.2375, simple_loss=0.3113, pruned_loss=0.08187, over 13361.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3076, pruned_loss=0.07713, over 2611533.51 frames. ], batch size: 35, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:33:35,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=97104.33333333333, ans=0.0 +2024-08-03 10:33:36,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=97104.33333333333, ans=0.0 +2024-08-03 10:33:37,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=97104.33333333333, ans=0.2 +2024-08-03 10:34:14,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97251.0, ans=0.1 +2024-08-03 10:34:19,192 INFO [train.py:1114] (0/4) Epoch 8, batch 1050, loss[loss=0.2306, simple_loss=0.3121, pruned_loss=0.07449, over 13569.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3062, pruned_loss=0.07636, over 2617006.84 frames. ], batch size: 39, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:34:26,670 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.69 vs. limit=12.0 +2024-08-03 10:34:40,969 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.371e+01 1.174e+02 1.326e+02 1.659e+02 3.865e+02, threshold=2.652e+02, percent-clipped=4.0 +2024-08-03 10:35:04,456 INFO [train.py:1114] (0/4) Epoch 8, batch 1100, loss[loss=0.2519, simple_loss=0.328, pruned_loss=0.08791, over 13560.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3063, pruned_loss=0.07612, over 2619817.36 frames. ], batch size: 36, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:35:10,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=97471.0, ans=0.125 +2024-08-03 10:35:25,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=97544.33333333333, ans=0.0 +2024-08-03 10:35:40,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=97581.0, ans=0.125 +2024-08-03 10:35:41,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=97581.0, ans=0.125 +2024-08-03 10:35:53,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=97617.66666666667, ans=0.2 +2024-08-03 10:35:56,409 INFO [train.py:1114] (0/4) Epoch 8, batch 1150, loss[loss=0.2119, simple_loss=0.2894, pruned_loss=0.06719, over 13571.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3068, pruned_loss=0.0767, over 2619052.98 frames. ], batch size: 36, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:35:57,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=97654.33333333333, ans=0.1 +2024-08-03 10:36:05,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=97691.0, ans=0.125 +2024-08-03 10:36:12,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=97691.0, ans=0.0 +2024-08-03 10:36:15,627 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.93 vs. limit=10.0 +2024-08-03 10:36:17,815 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.235e+02 1.421e+02 1.826e+02 2.699e+02, threshold=2.842e+02, percent-clipped=1.0 +2024-08-03 10:36:31,161 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.28 vs. limit=15.0 +2024-08-03 10:36:41,680 INFO [train.py:1114] (0/4) Epoch 8, batch 1200, loss[loss=0.2045, simple_loss=0.2932, pruned_loss=0.05786, over 13589.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3067, pruned_loss=0.07606, over 2615506.25 frames. ], batch size: 39, lr: 1.57e-02, grad_scale: 32.0 +2024-08-03 10:36:43,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=97837.66666666667, ans=0.125 +2024-08-03 10:36:49,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=97837.66666666667, ans=0.125 +2024-08-03 10:37:03,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=97911.0, ans=0.0 +2024-08-03 10:37:17,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=97947.66666666667, ans=0.09899494936611666 +2024-08-03 10:37:37,225 INFO [train.py:1114] (0/4) Epoch 8, batch 1250, loss[loss=0.2721, simple_loss=0.3419, pruned_loss=0.1012, over 13445.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3066, pruned_loss=0.07571, over 2627383.04 frames. ], batch size: 42, lr: 1.56e-02, grad_scale: 32.0 +2024-08-03 10:37:38,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=98021.0, ans=0.125 +2024-08-03 10:37:54,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=98094.33333333333, ans=0.125 +2024-08-03 10:37:58,743 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.995e+01 1.185e+02 1.323e+02 1.561e+02 3.297e+02, threshold=2.645e+02, percent-clipped=2.0 +2024-08-03 10:37:58,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=98094.33333333333, ans=0.125 +2024-08-03 10:38:02,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=98094.33333333333, ans=0.125 +2024-08-03 10:38:22,560 INFO [train.py:1114] (0/4) Epoch 8, batch 1300, loss[loss=0.2261, simple_loss=0.3075, pruned_loss=0.07233, over 12814.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3047, pruned_loss=0.07464, over 2630572.60 frames. ], batch size: 52, lr: 1.56e-02, grad_scale: 16.0 +2024-08-03 10:38:58,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=98241.0, ans=0.2 +2024-08-03 10:39:05,086 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.42 vs. limit=15.0 +2024-08-03 10:39:14,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=98314.33333333333, ans=0.1 +2024-08-03 10:39:24,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=98351.0, ans=0.0 +2024-08-03 10:39:30,882 INFO [train.py:1114] (0/4) Epoch 8, batch 1350, loss[loss=0.2186, simple_loss=0.3027, pruned_loss=0.06723, over 13539.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3041, pruned_loss=0.07429, over 2638424.38 frames. ], batch size: 37, lr: 1.56e-02, grad_scale: 4.0 +2024-08-03 10:40:00,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=98461.0, ans=0.0 +2024-08-03 10:40:01,342 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.797e+01 1.224e+02 1.395e+02 1.638e+02 2.508e+02, threshold=2.789e+02, percent-clipped=0.0 +2024-08-03 10:40:07,393 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.12 vs. limit=15.0 +2024-08-03 10:40:08,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=98497.66666666667, ans=0.0 +2024-08-03 10:40:22,183 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.31 vs. limit=6.0 +2024-08-03 10:40:22,577 INFO [train.py:1114] (0/4) Epoch 8, batch 1400, loss[loss=0.2013, simple_loss=0.2709, pruned_loss=0.06584, over 13274.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3056, pruned_loss=0.07535, over 2642380.05 frames. ], batch size: 31, lr: 1.56e-02, grad_scale: 8.0 +2024-08-03 10:40:27,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=98571.0, ans=0.2 +2024-08-03 10:40:32,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=98607.66666666667, ans=0.125 +2024-08-03 10:40:51,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=98681.0, ans=0.025 +2024-08-03 10:40:55,644 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.40 vs. limit=10.0 +2024-08-03 10:41:07,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=98754.33333333333, ans=0.125 +2024-08-03 10:41:07,794 INFO [train.py:1114] (0/4) Epoch 8, batch 1450, loss[loss=0.2251, simple_loss=0.3135, pruned_loss=0.06834, over 13396.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3063, pruned_loss=0.07589, over 2641689.37 frames. ], batch size: 43, lr: 1.56e-02, grad_scale: 8.0 +2024-08-03 10:41:18,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=98791.0, ans=0.125 +2024-08-03 10:41:20,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=98791.0, ans=0.125 +2024-08-03 10:41:34,033 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.227e+02 1.462e+02 1.726e+02 3.399e+02, threshold=2.923e+02, percent-clipped=2.0 +2024-08-03 10:41:35,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=98827.66666666667, ans=0.0 +2024-08-03 10:41:45,830 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:41:49,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=98901.0, ans=0.125 +2024-08-03 10:41:54,852 INFO [train.py:1114] (0/4) Epoch 8, batch 1500, loss[loss=0.2467, simple_loss=0.3292, pruned_loss=0.08214, over 13403.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3062, pruned_loss=0.07578, over 2641642.03 frames. ], batch size: 39, lr: 1.56e-02, grad_scale: 8.0 +2024-08-03 10:41:59,209 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.11 vs. limit=15.0 +2024-08-03 10:42:02,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=98937.66666666667, ans=0.125 +2024-08-03 10:42:04,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=98974.33333333333, ans=0.1 +2024-08-03 10:42:14,655 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.86 vs. limit=22.5 +2024-08-03 10:42:21,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=99011.0, ans=0.125 +2024-08-03 10:42:32,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=99084.33333333333, ans=0.125 +2024-08-03 10:42:33,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=99084.33333333333, ans=0.0 +2024-08-03 10:42:35,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=99084.33333333333, ans=0.125 +2024-08-03 10:42:38,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=99084.33333333333, ans=0.125 +2024-08-03 10:42:40,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=99121.0, ans=0.125 +2024-08-03 10:42:40,624 INFO [train.py:1114] (0/4) Epoch 8, batch 1550, loss[loss=0.2648, simple_loss=0.3396, pruned_loss=0.09503, over 13393.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3062, pruned_loss=0.07596, over 2632542.24 frames. ], batch size: 41, lr: 1.56e-02, grad_scale: 8.0 +2024-08-03 10:42:52,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=99157.66666666667, ans=0.125 +2024-08-03 10:42:53,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=99157.66666666667, ans=0.125 +2024-08-03 10:42:55,608 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.31 vs. limit=5.0 +2024-08-03 10:43:04,922 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.564e+01 1.246e+02 1.506e+02 1.858e+02 4.061e+02, threshold=3.012e+02, percent-clipped=4.0 +2024-08-03 10:43:05,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99194.33333333333, ans=0.1 +2024-08-03 10:43:10,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=99231.0, ans=0.025 +2024-08-03 10:43:12,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=99231.0, ans=0.125 +2024-08-03 10:43:25,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=99267.66666666667, ans=0.09899494936611666 +2024-08-03 10:43:26,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=99267.66666666667, ans=0.125 +2024-08-03 10:43:29,797 INFO [train.py:1114] (0/4) Epoch 8, batch 1600, loss[loss=0.235, simple_loss=0.313, pruned_loss=0.07848, over 13573.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3059, pruned_loss=0.07574, over 2626135.89 frames. ], batch size: 39, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:43:42,038 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:43:45,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99341.0, ans=0.1 +2024-08-03 10:43:48,729 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.04 vs. limit=15.0 +2024-08-03 10:43:55,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=99377.66666666667, ans=0.125 +2024-08-03 10:44:15,505 INFO [train.py:1114] (0/4) Epoch 8, batch 1650, loss[loss=0.2166, simple_loss=0.3073, pruned_loss=0.06297, over 13343.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3058, pruned_loss=0.07568, over 2623424.97 frames. ], batch size: 40, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:44:26,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=99524.33333333333, ans=15.0 +2024-08-03 10:44:31,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99524.33333333333, ans=0.1 +2024-08-03 10:44:40,419 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.002e+02 1.254e+02 1.426e+02 1.791e+02 5.006e+02, threshold=2.852e+02, percent-clipped=4.0 +2024-08-03 10:44:42,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=99597.66666666667, ans=0.07 +2024-08-03 10:44:45,646 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.97 vs. limit=15.0 +2024-08-03 10:44:46,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=99597.66666666667, ans=0.125 +2024-08-03 10:44:50,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=99597.66666666667, ans=0.2 +2024-08-03 10:45:03,314 INFO [train.py:1114] (0/4) Epoch 8, batch 1700, loss[loss=0.2068, simple_loss=0.2706, pruned_loss=0.07148, over 13244.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3061, pruned_loss=0.07567, over 2631135.49 frames. ], batch size: 31, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:45:12,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99707.66666666667, ans=0.1 +2024-08-03 10:45:45,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99817.66666666667, ans=0.1 +2024-08-03 10:45:47,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=99854.33333333333, ans=0.2 +2024-08-03 10:45:48,414 INFO [train.py:1114] (0/4) Epoch 8, batch 1750, loss[loss=0.2172, simple_loss=0.2877, pruned_loss=0.0733, over 13521.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3046, pruned_loss=0.07485, over 2634875.92 frames. ], batch size: 31, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:45:55,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=99854.33333333333, ans=0.0 +2024-08-03 10:46:13,076 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.856e+01 1.232e+02 1.552e+02 2.162e+02 4.270e+02, threshold=3.103e+02, percent-clipped=14.0 +2024-08-03 10:46:20,268 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.47 vs. limit=15.0 +2024-08-03 10:46:29,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=100001.0, ans=0.125 +2024-08-03 10:46:34,037 INFO [train.py:1114] (0/4) Epoch 8, batch 1800, loss[loss=0.225, simple_loss=0.3115, pruned_loss=0.06928, over 13546.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3047, pruned_loss=0.07506, over 2635792.22 frames. ], batch size: 38, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:46:41,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=100037.66666666667, ans=0.125 +2024-08-03 10:46:44,588 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.63 vs. limit=15.0 +2024-08-03 10:46:45,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=100074.33333333333, ans=0.2 +2024-08-03 10:46:51,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=100074.33333333333, ans=0.0 +2024-08-03 10:47:09,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=100147.66666666667, ans=0.125 +2024-08-03 10:47:15,068 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.02 vs. limit=15.0 +2024-08-03 10:47:24,486 INFO [train.py:1114] (0/4) Epoch 8, batch 1850, loss[loss=0.2245, simple_loss=0.3121, pruned_loss=0.06849, over 13386.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3044, pruned_loss=0.07496, over 2637682.07 frames. ], batch size: 39, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:47:26,791 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.68 vs. limit=10.0 +2024-08-03 10:47:28,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100221.0, ans=0.1 +2024-08-03 10:47:30,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=100221.0, ans=0.125 +2024-08-03 10:47:31,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=100221.0, ans=0.125 +2024-08-03 10:47:33,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=100257.66666666667, ans=0.125 +2024-08-03 10:47:37,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=100257.66666666667, ans=0.125 +2024-08-03 10:47:40,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=100257.66666666667, ans=0.0 +2024-08-03 10:47:48,909 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.993e+01 1.278e+02 1.540e+02 2.004e+02 3.260e+02, threshold=3.079e+02, percent-clipped=4.0 +2024-08-03 10:47:49,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=100294.33333333333, ans=0.09899494936611666 +2024-08-03 10:47:53,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100331.0, ans=0.1 +2024-08-03 10:47:55,685 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.72 vs. limit=15.0 +2024-08-03 10:47:59,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=100367.66666666667, ans=0.125 +2024-08-03 10:48:04,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=100367.66666666667, ans=0.95 +2024-08-03 10:48:09,643 INFO [train.py:1114] (0/4) Epoch 8, batch 1900, loss[loss=0.2677, simple_loss=0.3441, pruned_loss=0.09568, over 13325.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.306, pruned_loss=0.0755, over 2639641.21 frames. ], batch size: 40, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:48:10,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100404.33333333333, ans=0.125 +2024-08-03 10:48:11,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=100404.33333333333, ans=0.95 +2024-08-03 10:48:19,971 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.15 vs. limit=22.5 +2024-08-03 10:48:30,283 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.25 vs. limit=15.0 +2024-08-03 10:48:44,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100514.33333333333, ans=0.1 +2024-08-03 10:48:56,508 INFO [train.py:1114] (0/4) Epoch 8, batch 1950, loss[loss=0.2173, simple_loss=0.2968, pruned_loss=0.06895, over 13546.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3071, pruned_loss=0.07592, over 2646289.47 frames. ], batch size: 36, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:48:57,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.55 vs. limit=22.5 +2024-08-03 10:48:58,027 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.91 vs. limit=15.0 +2024-08-03 10:48:58,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=100587.66666666667, ans=0.0 +2024-08-03 10:49:01,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=100587.66666666667, ans=0.0 +2024-08-03 10:49:03,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=100587.66666666667, ans=0.0 +2024-08-03 10:49:08,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=100624.33333333333, ans=0.125 +2024-08-03 10:49:13,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=100624.33333333333, ans=0.125 +2024-08-03 10:49:18,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=100661.0, ans=0.125 +2024-08-03 10:49:21,461 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.223e+02 1.431e+02 1.772e+02 2.626e+02, threshold=2.861e+02, percent-clipped=0.0 +2024-08-03 10:49:42,417 INFO [train.py:1114] (0/4) Epoch 8, batch 2000, loss[loss=0.1982, simple_loss=0.2715, pruned_loss=0.06242, over 13538.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3069, pruned_loss=0.07597, over 2636084.31 frames. ], batch size: 31, lr: 1.54e-02, grad_scale: 32.0 +2024-08-03 10:50:00,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=100807.66666666667, ans=0.125 +2024-08-03 10:50:26,615 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.48 vs. limit=15.0 +2024-08-03 10:50:28,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=100917.66666666667, ans=0.0 +2024-08-03 10:50:38,504 INFO [train.py:1114] (0/4) Epoch 8, batch 2050, loss[loss=0.2085, simple_loss=0.2735, pruned_loss=0.07175, over 13421.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3063, pruned_loss=0.07617, over 2633860.93 frames. ], batch size: 32, lr: 1.54e-02, grad_scale: 32.0 +2024-08-03 10:50:39,139 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.14 vs. limit=15.0 +2024-08-03 10:50:42,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=100954.33333333333, ans=0.0 +2024-08-03 10:50:46,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=100954.33333333333, ans=0.0 +2024-08-03 10:50:58,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=101027.66666666667, ans=0.125 +2024-08-03 10:51:03,415 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.251e+02 1.508e+02 1.862e+02 2.983e+02, threshold=3.016e+02, percent-clipped=1.0 +2024-08-03 10:51:09,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=101064.33333333333, ans=0.2 +2024-08-03 10:51:12,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=101064.33333333333, ans=0.125 +2024-08-03 10:51:23,589 INFO [train.py:1114] (0/4) Epoch 8, batch 2100, loss[loss=0.2032, simple_loss=0.2916, pruned_loss=0.05744, over 13540.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.3051, pruned_loss=0.07504, over 2639382.39 frames. ], batch size: 37, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:51:33,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=101174.33333333333, ans=0.125 +2024-08-03 10:51:43,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101211.0, ans=0.1 +2024-08-03 10:51:46,990 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.45 vs. limit=22.5 +2024-08-03 10:51:48,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=101211.0, ans=0.125 +2024-08-03 10:52:00,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=101284.33333333333, ans=0.2 +2024-08-03 10:52:09,818 INFO [train.py:1114] (0/4) Epoch 8, batch 2150, loss[loss=0.2091, simple_loss=0.2838, pruned_loss=0.06723, over 13545.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3041, pruned_loss=0.07424, over 2647743.22 frames. ], batch size: 36, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:52:28,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.47 vs. limit=10.0 +2024-08-03 10:52:36,953 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.507e+01 1.244e+02 1.463e+02 2.141e+02 4.797e+02, threshold=2.925e+02, percent-clipped=7.0 +2024-08-03 10:52:37,242 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:52:39,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=101431.0, ans=0.09899494936611666 +2024-08-03 10:52:43,427 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.45 vs. limit=8.0 +2024-08-03 10:52:57,017 INFO [train.py:1114] (0/4) Epoch 8, batch 2200, loss[loss=0.2278, simple_loss=0.3136, pruned_loss=0.07102, over 13407.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3044, pruned_loss=0.07444, over 2645646.45 frames. ], batch size: 39, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:53:04,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=101504.33333333333, ans=0.125 +2024-08-03 10:53:09,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=101541.0, ans=0.125 +2024-08-03 10:53:13,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101541.0, ans=0.1 +2024-08-03 10:53:19,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101577.66666666667, ans=0.1 +2024-08-03 10:53:24,188 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:53:27,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=101614.33333333333, ans=0.025 +2024-08-03 10:53:36,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=101651.0, ans=0.0 +2024-08-03 10:53:42,343 INFO [train.py:1114] (0/4) Epoch 8, batch 2250, loss[loss=0.2146, simple_loss=0.2962, pruned_loss=0.06651, over 13360.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3043, pruned_loss=0.07431, over 2642793.31 frames. ], batch size: 37, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:53:44,479 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:53:49,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=101687.66666666667, ans=0.125 +2024-08-03 10:53:52,918 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.55 vs. limit=22.5 +2024-08-03 10:53:54,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=101724.33333333333, ans=0.0 +2024-08-03 10:54:06,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=101761.0, ans=0.0 +2024-08-03 10:54:08,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101761.0, ans=0.1 +2024-08-03 10:54:09,244 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.704e+01 1.191e+02 1.378e+02 1.872e+02 3.290e+02, threshold=2.756e+02, percent-clipped=1.0 +2024-08-03 10:54:27,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=101834.33333333333, ans=0.125 +2024-08-03 10:54:39,907 INFO [train.py:1114] (0/4) Epoch 8, batch 2300, loss[loss=0.2417, simple_loss=0.3037, pruned_loss=0.08987, over 13582.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3035, pruned_loss=0.0744, over 2638573.48 frames. ], batch size: 33, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:54:43,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=101871.0, ans=0.0 +2024-08-03 10:54:55,152 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.99 vs. limit=15.0 +2024-08-03 10:54:55,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=101907.66666666667, ans=0.2 +2024-08-03 10:55:02,281 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.51 vs. limit=15.0 +2024-08-03 10:55:03,906 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.05 vs. limit=15.0 +2024-08-03 10:55:07,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=101981.0, ans=0.125 +2024-08-03 10:55:12,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=101981.0, ans=0.0 +2024-08-03 10:55:20,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=102017.66666666667, ans=0.125 +2024-08-03 10:55:22,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=102017.66666666667, ans=0.0 +2024-08-03 10:55:24,950 INFO [train.py:1114] (0/4) Epoch 8, batch 2350, loss[loss=0.2029, simple_loss=0.2872, pruned_loss=0.05927, over 13540.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3035, pruned_loss=0.07443, over 2641302.97 frames. ], batch size: 38, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:55:48,793 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.26 vs. limit=10.0 +2024-08-03 10:55:55,515 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.744e+01 1.287e+02 1.695e+02 2.279e+02 3.908e+02, threshold=3.390e+02, percent-clipped=9.0 +2024-08-03 10:55:55,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=102127.66666666667, ans=0.125 +2024-08-03 10:55:55,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=102127.66666666667, ans=0.07 +2024-08-03 10:55:57,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=102164.33333333333, ans=0.0 +2024-08-03 10:56:10,782 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.27 vs. limit=15.0 +2024-08-03 10:56:16,843 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.51 vs. limit=12.0 +2024-08-03 10:56:23,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=102201.0, ans=0.0 +2024-08-03 10:56:23,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=102201.0, ans=0.125 +2024-08-03 10:56:24,760 INFO [train.py:1114] (0/4) Epoch 8, batch 2400, loss[loss=0.1981, simple_loss=0.2791, pruned_loss=0.05857, over 13527.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3038, pruned_loss=0.07438, over 2642188.01 frames. ], batch size: 35, lr: 1.53e-02, grad_scale: 32.0 +2024-08-03 10:56:29,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=102237.66666666667, ans=0.125 +2024-08-03 10:56:32,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=102237.66666666667, ans=0.125 +2024-08-03 10:56:36,703 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.95 vs. limit=15.0 +2024-08-03 10:56:38,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=102274.33333333333, ans=0.125 +2024-08-03 10:57:01,715 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.14 vs. limit=15.0 +2024-08-03 10:57:11,127 INFO [train.py:1114] (0/4) Epoch 8, batch 2450, loss[loss=0.2503, simple_loss=0.3267, pruned_loss=0.08697, over 13357.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3047, pruned_loss=0.07493, over 2631683.32 frames. ], batch size: 37, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:57:19,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=102457.66666666667, ans=0.125 +2024-08-03 10:57:26,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=102457.66666666667, ans=0.125 +2024-08-03 10:57:26,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=102457.66666666667, ans=0.1 +2024-08-03 10:57:40,053 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.286e+02 1.590e+02 1.975e+02 2.991e+02, threshold=3.180e+02, percent-clipped=0.0 +2024-08-03 10:57:41,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=102531.0, ans=0.125 +2024-08-03 10:57:44,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=102531.0, ans=0.125 +2024-08-03 10:57:46,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=102531.0, ans=10.0 +2024-08-03 10:57:49,581 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.00 vs. limit=15.0 +2024-08-03 10:57:51,527 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.23 vs. limit=15.0 +2024-08-03 10:57:53,146 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.19 vs. limit=15.0 +2024-08-03 10:57:59,056 INFO [train.py:1114] (0/4) Epoch 8, batch 2500, loss[loss=0.2434, simple_loss=0.3229, pruned_loss=0.08192, over 13406.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3049, pruned_loss=0.07507, over 2636221.73 frames. ], batch size: 39, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:58:02,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=102604.33333333333, ans=0.0 +2024-08-03 10:58:07,204 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:58:13,315 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-28000.pt +2024-08-03 10:58:34,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=102714.33333333333, ans=0.1 +2024-08-03 10:58:36,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=102714.33333333333, ans=0.125 +2024-08-03 10:58:40,478 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.33 vs. limit=15.0 +2024-08-03 10:58:45,777 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.89 vs. limit=10.0 +2024-08-03 10:58:50,587 INFO [train.py:1114] (0/4) Epoch 8, batch 2550, loss[loss=0.2024, simple_loss=0.2759, pruned_loss=0.06446, over 13539.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3058, pruned_loss=0.07596, over 2637838.84 frames. ], batch size: 31, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:58:51,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=102787.66666666667, ans=0.0 +2024-08-03 10:59:00,447 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.63 vs. limit=6.0 +2024-08-03 10:59:12,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=102861.0, ans=0.5 +2024-08-03 10:59:13,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=102861.0, ans=0.025 +2024-08-03 10:59:15,654 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.849e+01 1.203e+02 1.342e+02 1.554e+02 2.450e+02, threshold=2.684e+02, percent-clipped=0.0 +2024-08-03 10:59:15,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=102897.66666666667, ans=0.0 +2024-08-03 10:59:22,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=102897.66666666667, ans=0.125 +2024-08-03 10:59:34,150 INFO [train.py:1114] (0/4) Epoch 8, batch 2600, loss[loss=0.2114, simple_loss=0.2949, pruned_loss=0.06397, over 13571.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3059, pruned_loss=0.0759, over 2635838.47 frames. ], batch size: 36, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:59:49,469 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.68 vs. limit=22.5 +2024-08-03 10:59:51,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=103044.33333333333, ans=0.125 +2024-08-03 10:59:52,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=103044.33333333333, ans=0.125 +2024-08-03 10:59:56,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=103044.33333333333, ans=0.2 +2024-08-03 11:00:01,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=103044.33333333333, ans=0.0 +2024-08-03 11:00:20,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=103117.66666666667, ans=0.125 +2024-08-03 11:00:21,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=103117.66666666667, ans=0.125 +2024-08-03 11:00:23,305 INFO [train.py:1114] (0/4) Epoch 8, batch 2650, loss[loss=0.2467, simple_loss=0.328, pruned_loss=0.08267, over 13323.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3068, pruned_loss=0.07636, over 2639272.33 frames. ], batch size: 46, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 11:00:23,761 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.18 vs. limit=15.0 +2024-08-03 11:00:36,730 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.02 vs. limit=22.5 +2024-08-03 11:00:48,524 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.954e+01 1.189e+02 1.373e+02 1.657e+02 2.856e+02, threshold=2.745e+02, percent-clipped=2.0 +2024-08-03 11:01:03,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=103301.0, ans=0.5 +2024-08-03 11:01:07,302 INFO [train.py:1114] (0/4) Epoch 8, batch 2700, loss[loss=0.2316, simple_loss=0.3102, pruned_loss=0.07649, over 13557.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3063, pruned_loss=0.076, over 2636323.06 frames. ], batch size: 40, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 11:01:13,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=103337.66666666667, ans=6.0 +2024-08-03 11:01:28,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=103411.0, ans=0.125 +2024-08-03 11:01:37,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=103447.66666666667, ans=0.125 +2024-08-03 11:01:48,826 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.24 vs. limit=15.0 +2024-08-03 11:01:51,077 INFO [train.py:1114] (0/4) Epoch 8, batch 2750, loss[loss=0.2013, simple_loss=0.2821, pruned_loss=0.0602, over 13322.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3048, pruned_loss=0.07537, over 2633271.90 frames. ], batch size: 34, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:01:57,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=103521.0, ans=0.125 +2024-08-03 11:02:18,842 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.453e+01 1.294e+02 1.512e+02 1.970e+02 3.598e+02, threshold=3.023e+02, percent-clipped=4.0 +2024-08-03 11:02:21,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=103631.0, ans=0.2 +2024-08-03 11:02:28,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=103667.66666666667, ans=0.2 +2024-08-03 11:02:29,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.71 vs. limit=10.0 +2024-08-03 11:02:35,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=103667.66666666667, ans=0.125 +2024-08-03 11:02:37,602 INFO [train.py:1114] (0/4) Epoch 8, batch 2800, loss[loss=0.3168, simple_loss=0.3566, pruned_loss=0.1385, over 9272.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3055, pruned_loss=0.07591, over 2625368.13 frames. ], batch size: 96, lr: 1.52e-02, grad_scale: 32.0 +2024-08-03 11:02:38,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=103704.33333333333, ans=0.0 +2024-08-03 11:02:39,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=103704.33333333333, ans=0.125 +2024-08-03 11:02:41,525 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.21 vs. limit=15.0 +2024-08-03 11:02:49,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=103741.0, ans=0.0 +2024-08-03 11:02:57,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103777.66666666667, ans=0.1 +2024-08-03 11:03:06,571 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:03:10,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=103814.33333333333, ans=0.0 +2024-08-03 11:03:22,230 INFO [train.py:1114] (0/4) Epoch 8, batch 2850, loss[loss=0.197, simple_loss=0.284, pruned_loss=0.05502, over 13355.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3056, pruned_loss=0.07599, over 2619469.73 frames. ], batch size: 35, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:03:26,658 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:03:34,751 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.96 vs. limit=15.0 +2024-08-03 11:03:40,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=103961.0, ans=0.0 +2024-08-03 11:03:47,796 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.776e+01 1.266e+02 1.558e+02 2.014e+02 3.574e+02, threshold=3.117e+02, percent-clipped=3.0 +2024-08-03 11:03:49,889 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.47 vs. limit=15.0 +2024-08-03 11:03:50,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=103997.66666666667, ans=0.0 +2024-08-03 11:03:59,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=104034.33333333333, ans=0.0 +2024-08-03 11:04:04,750 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.85 vs. limit=22.5 +2024-08-03 11:04:05,039 INFO [train.py:1114] (0/4) Epoch 8, batch 2900, loss[loss=0.2359, simple_loss=0.3092, pruned_loss=0.08133, over 13375.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3058, pruned_loss=0.07512, over 2630767.81 frames. ], batch size: 36, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:04:35,213 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.89 vs. limit=15.0 +2024-08-03 11:04:46,459 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.11 vs. limit=15.0 +2024-08-03 11:04:48,492 INFO [train.py:1114] (0/4) Epoch 8, batch 2950, loss[loss=0.2276, simple_loss=0.2997, pruned_loss=0.07771, over 13326.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3046, pruned_loss=0.0749, over 2628817.71 frames. ], batch size: 34, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:05:00,981 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.43 vs. limit=15.0 +2024-08-03 11:05:06,191 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.20 vs. limit=15.0 +2024-08-03 11:05:09,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=104327.66666666667, ans=0.0 +2024-08-03 11:05:14,605 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.699e+01 1.150e+02 1.316e+02 1.628e+02 4.465e+02, threshold=2.631e+02, percent-clipped=1.0 +2024-08-03 11:05:15,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=104364.33333333333, ans=0.0 +2024-08-03 11:05:20,050 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:05:27,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=104401.0, ans=0.125 +2024-08-03 11:05:31,936 INFO [train.py:1114] (0/4) Epoch 8, batch 3000, loss[loss=0.223, simple_loss=0.3058, pruned_loss=0.07012, over 13547.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3044, pruned_loss=0.07493, over 2629536.95 frames. ], batch size: 37, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:05:31,937 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 11:05:42,233 INFO [train.py:1146] (0/4) Epoch 8, validation: loss=0.1886, simple_loss=0.2887, pruned_loss=0.04428, over 944034.00 frames. +2024-08-03 11:05:42,234 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 11:05:46,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=104437.66666666667, ans=0.125 +2024-08-03 11:06:07,786 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.01 vs. limit=22.5 +2024-08-03 11:06:08,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=104547.66666666667, ans=0.125 +2024-08-03 11:06:25,425 INFO [train.py:1114] (0/4) Epoch 8, batch 3050, loss[loss=0.2358, simple_loss=0.3069, pruned_loss=0.08236, over 13532.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.3051, pruned_loss=0.07501, over 2626799.24 frames. ], batch size: 35, lr: 1.52e-02, grad_scale: 8.0 +2024-08-03 11:06:36,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=104657.66666666667, ans=0.125 +2024-08-03 11:06:36,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=104657.66666666667, ans=0.125 +2024-08-03 11:06:44,024 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.08 vs. limit=15.0 +2024-08-03 11:06:44,160 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.14 vs. limit=15.0 +2024-08-03 11:06:47,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=104694.33333333333, ans=0.125 +2024-08-03 11:06:54,264 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.678e+01 1.159e+02 1.376e+02 1.941e+02 3.361e+02, threshold=2.751e+02, percent-clipped=3.0 +2024-08-03 11:07:10,690 INFO [train.py:1114] (0/4) Epoch 8, batch 3100, loss[loss=0.2454, simple_loss=0.3231, pruned_loss=0.08382, over 13327.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3049, pruned_loss=0.07492, over 2626509.51 frames. ], batch size: 46, lr: 1.52e-02, grad_scale: 8.0 +2024-08-03 11:07:28,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.61 vs. limit=10.0 +2024-08-03 11:07:30,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=104877.66666666667, ans=0.07 +2024-08-03 11:07:33,090 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.67 vs. limit=22.5 +2024-08-03 11:07:39,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=104914.33333333333, ans=0.125 +2024-08-03 11:07:47,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=104951.0, ans=0.0 +2024-08-03 11:07:53,830 INFO [train.py:1114] (0/4) Epoch 8, batch 3150, loss[loss=0.2163, simple_loss=0.3025, pruned_loss=0.06512, over 13082.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3049, pruned_loss=0.07483, over 2627973.33 frames. ], batch size: 48, lr: 1.51e-02, grad_scale: 8.0 +2024-08-03 11:08:20,101 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.438e+02 1.843e+02 2.666e+02 3.777e+02, threshold=3.687e+02, percent-clipped=21.0 +2024-08-03 11:08:36,237 INFO [train.py:1114] (0/4) Epoch 8, batch 3200, loss[loss=0.2229, simple_loss=0.2991, pruned_loss=0.07336, over 13552.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3045, pruned_loss=0.07481, over 2632899.54 frames. ], batch size: 37, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:08:45,149 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.40 vs. limit=15.0 +2024-08-03 11:08:49,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=105207.66666666667, ans=0.0 +2024-08-03 11:08:56,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=105244.33333333333, ans=0.09899494936611666 +2024-08-03 11:09:06,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105281.0, ans=0.1 +2024-08-03 11:09:19,476 INFO [train.py:1114] (0/4) Epoch 8, batch 3250, loss[loss=0.2328, simple_loss=0.3135, pruned_loss=0.07608, over 13386.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.3056, pruned_loss=0.07487, over 2638031.31 frames. ], batch size: 38, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:09:32,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=105391.0, ans=0.025 +2024-08-03 11:09:33,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=105391.0, ans=0.1 +2024-08-03 11:09:47,365 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.799e+01 1.281e+02 1.591e+02 1.983e+02 2.904e+02, threshold=3.182e+02, percent-clipped=0.0 +2024-08-03 11:09:53,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=105464.33333333333, ans=0.125 +2024-08-03 11:10:03,919 INFO [train.py:1114] (0/4) Epoch 8, batch 3300, loss[loss=0.2499, simple_loss=0.325, pruned_loss=0.08737, over 12941.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3043, pruned_loss=0.0745, over 2639001.90 frames. ], batch size: 52, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:10:10,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=105537.66666666667, ans=0.2 +2024-08-03 11:10:11,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=105574.33333333333, ans=0.125 +2024-08-03 11:10:12,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=105574.33333333333, ans=0.09899494936611666 +2024-08-03 11:10:44,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.41 vs. limit=15.0 +2024-08-03 11:10:49,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=105684.33333333333, ans=0.05 +2024-08-03 11:10:51,474 INFO [train.py:1114] (0/4) Epoch 8, batch 3350, loss[loss=0.2509, simple_loss=0.3285, pruned_loss=0.08669, over 13063.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.305, pruned_loss=0.07474, over 2628442.28 frames. ], batch size: 48, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:11:15,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=105757.66666666667, ans=0.0 +2024-08-03 11:11:41,999 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.800e+01 1.191e+02 1.380e+02 1.620e+02 2.699e+02, threshold=2.759e+02, percent-clipped=0.0 +2024-08-03 11:11:54,041 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.97 vs. limit=15.0 +2024-08-03 11:12:00,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105867.66666666667, ans=0.1 +2024-08-03 11:12:09,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=105904.33333333333, ans=0.2 +2024-08-03 11:12:10,264 INFO [train.py:1114] (0/4) Epoch 8, batch 3400, loss[loss=0.1916, simple_loss=0.2676, pruned_loss=0.05776, over 13551.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3051, pruned_loss=0.07517, over 2625514.62 frames. ], batch size: 31, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:12:16,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=105904.33333333333, ans=0.0 +2024-08-03 11:12:43,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=105977.66666666667, ans=0.125 +2024-08-03 11:12:50,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=106014.33333333333, ans=0.125 +2024-08-03 11:12:54,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=106014.33333333333, ans=0.5 +2024-08-03 11:12:56,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=106014.33333333333, ans=0.125 +2024-08-03 11:13:00,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=106051.0, ans=0.125 +2024-08-03 11:13:06,193 INFO [train.py:1114] (0/4) Epoch 8, batch 3450, loss[loss=0.2593, simple_loss=0.3348, pruned_loss=0.09192, over 12972.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3046, pruned_loss=0.07458, over 2628036.52 frames. ], batch size: 52, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:13:11,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=106087.66666666667, ans=0.125 +2024-08-03 11:13:14,648 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.33 vs. limit=12.0 +2024-08-03 11:13:17,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=106124.33333333333, ans=0.125 +2024-08-03 11:13:20,620 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.28 vs. limit=15.0 +2024-08-03 11:13:21,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=106124.33333333333, ans=0.025 +2024-08-03 11:13:28,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106161.0, ans=0.1 +2024-08-03 11:13:34,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=106197.66666666667, ans=0.125 +2024-08-03 11:13:35,951 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.667e+01 1.192e+02 1.350e+02 1.586e+02 3.469e+02, threshold=2.701e+02, percent-clipped=1.0 +2024-08-03 11:13:36,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=106197.66666666667, ans=0.2 +2024-08-03 11:13:54,437 INFO [train.py:1114] (0/4) Epoch 8, batch 3500, loss[loss=0.2486, simple_loss=0.3105, pruned_loss=0.09333, over 13534.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3038, pruned_loss=0.07467, over 2630525.58 frames. ], batch size: 34, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:13:54,588 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:14:13,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=106344.33333333333, ans=0.125 +2024-08-03 11:14:13,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=106344.33333333333, ans=0.0 +2024-08-03 11:14:20,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=106381.0, ans=0.0 +2024-08-03 11:14:27,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106381.0, ans=0.1 +2024-08-03 11:14:42,320 INFO [train.py:1114] (0/4) Epoch 8, batch 3550, loss[loss=0.2202, simple_loss=0.3038, pruned_loss=0.06834, over 12502.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3065, pruned_loss=0.07639, over 2629432.49 frames. ], batch size: 58, lr: 1.50e-02, grad_scale: 16.0 +2024-08-03 11:14:42,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=106454.33333333333, ans=0.0 +2024-08-03 11:15:01,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=106491.0, ans=0.125 +2024-08-03 11:15:26,481 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.912e+01 1.312e+02 1.471e+02 1.710e+02 3.286e+02, threshold=2.943e+02, percent-clipped=3.0 +2024-08-03 11:15:37,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=106601.0, ans=0.1 +2024-08-03 11:15:48,853 INFO [train.py:1114] (0/4) Epoch 8, batch 3600, loss[loss=0.2782, simple_loss=0.3377, pruned_loss=0.1093, over 9183.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.3116, pruned_loss=0.08142, over 2486827.63 frames. ], batch size: 96, lr: 1.50e-02, grad_scale: 32.0 +2024-08-03 11:15:49,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=106637.66666666667, ans=0.125 +2024-08-03 11:15:50,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=106637.66666666667, ans=0.025 +2024-08-03 11:15:57,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=106674.33333333333, ans=0.125 +2024-08-03 11:16:04,516 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.54 vs. limit=8.0 +2024-08-03 11:16:09,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=106711.0, ans=0.125 +2024-08-03 11:16:25,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=106747.66666666667, ans=0.025 +2024-08-03 11:16:28,469 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-8.pt +2024-08-03 11:17:27,613 INFO [train.py:1114] (0/4) Epoch 9, batch 0, loss[loss=0.1908, simple_loss=0.2716, pruned_loss=0.05504, over 13350.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2716, pruned_loss=0.05504, over 13350.00 frames. ], batch size: 33, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:17:27,614 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 11:17:39,635 INFO [train.py:1146] (0/4) Epoch 9, validation: loss=0.1935, simple_loss=0.2948, pruned_loss=0.04614, over 944034.00 frames. +2024-08-03 11:17:39,635 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 11:17:42,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106788.0, ans=0.1 +2024-08-03 11:17:47,569 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.87 vs. limit=22.5 +2024-08-03 11:17:50,253 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.25 vs. limit=22.5 +2024-08-03 11:17:58,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=106861.33333333333, ans=0.125 +2024-08-03 11:17:59,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=106861.33333333333, ans=0.125 +2024-08-03 11:18:15,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=106898.0, ans=0.125 +2024-08-03 11:18:18,801 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.039e+02 1.291e+02 1.434e+02 1.801e+02 3.339e+02, threshold=2.868e+02, percent-clipped=2.0 +2024-08-03 11:18:23,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=106934.66666666667, ans=0.1 +2024-08-03 11:18:27,054 INFO [train.py:1114] (0/4) Epoch 9, batch 50, loss[loss=0.1852, simple_loss=0.2671, pruned_loss=0.0516, over 13434.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3092, pruned_loss=0.07759, over 578075.38 frames. ], batch size: 32, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:18:30,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=106971.33333333333, ans=0.125 +2024-08-03 11:18:34,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=106971.33333333333, ans=0.0 +2024-08-03 11:18:36,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=106971.33333333333, ans=0.2 +2024-08-03 11:18:43,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=107008.0, ans=0.025 +2024-08-03 11:18:56,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=107081.33333333333, ans=0.0 +2024-08-03 11:19:16,440 INFO [train.py:1114] (0/4) Epoch 9, batch 100, loss[loss=0.2362, simple_loss=0.3043, pruned_loss=0.08405, over 13542.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3087, pruned_loss=0.07652, over 1025804.89 frames. ], batch size: 35, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:19:30,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=107191.33333333333, ans=0.125 +2024-08-03 11:19:30,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=107191.33333333333, ans=0.0 +2024-08-03 11:19:38,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=107228.0, ans=0.1 +2024-08-03 11:19:42,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=107228.0, ans=0.0 +2024-08-03 11:19:48,016 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.00 vs. limit=15.0 +2024-08-03 11:19:54,662 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.233e+02 1.427e+02 1.918e+02 3.132e+02, threshold=2.853e+02, percent-clipped=1.0 +2024-08-03 11:19:54,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=107301.33333333333, ans=0.125 +2024-08-03 11:19:56,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=107301.33333333333, ans=0.05 +2024-08-03 11:20:04,724 INFO [train.py:1114] (0/4) Epoch 9, batch 150, loss[loss=0.2089, simple_loss=0.2833, pruned_loss=0.06724, over 13423.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3038, pruned_loss=0.07374, over 1386499.48 frames. ], batch size: 32, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:20:09,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=107338.0, ans=0.125 +2024-08-03 11:20:33,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=107448.0, ans=0.125 +2024-08-03 11:20:43,952 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.34 vs. limit=15.0 +2024-08-03 11:20:56,483 INFO [train.py:1114] (0/4) Epoch 9, batch 200, loss[loss=0.2403, simple_loss=0.3175, pruned_loss=0.08157, over 12394.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3022, pruned_loss=0.07217, over 1665312.27 frames. ], batch size: 58, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:20:59,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=107521.33333333333, ans=0.125 +2024-08-03 11:21:04,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=107521.33333333333, ans=0.125 +2024-08-03 11:21:15,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=107558.0, ans=0.0 +2024-08-03 11:21:17,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=107558.0, ans=0.0 +2024-08-03 11:21:22,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=107594.66666666667, ans=0.2 +2024-08-03 11:21:37,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=107668.0, ans=0.125 +2024-08-03 11:21:39,288 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.622e+01 1.326e+02 1.622e+02 2.251e+02 3.498e+02, threshold=3.245e+02, percent-clipped=9.0 +2024-08-03 11:21:46,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=107668.0, ans=0.125 +2024-08-03 11:21:47,903 INFO [train.py:1114] (0/4) Epoch 9, batch 250, loss[loss=0.2261, simple_loss=0.3152, pruned_loss=0.06846, over 13377.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.3032, pruned_loss=0.07265, over 1883130.31 frames. ], batch size: 46, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:21:49,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107704.66666666667, ans=0.1 +2024-08-03 11:21:53,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=107704.66666666667, ans=0.0 +2024-08-03 11:21:54,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=107704.66666666667, ans=0.0 +2024-08-03 11:22:03,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=107741.33333333333, ans=0.125 +2024-08-03 11:22:07,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=107778.0, ans=0.0 +2024-08-03 11:22:08,134 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.98 vs. limit=15.0 +2024-08-03 11:22:19,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107814.66666666667, ans=0.125 +2024-08-03 11:22:24,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=107814.66666666667, ans=0.125 +2024-08-03 11:22:35,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=107851.33333333333, ans=0.125 +2024-08-03 11:22:35,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=107851.33333333333, ans=0.0 +2024-08-03 11:22:38,778 INFO [train.py:1114] (0/4) Epoch 9, batch 300, loss[loss=0.2297, simple_loss=0.3117, pruned_loss=0.07383, over 13430.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.302, pruned_loss=0.07225, over 2050353.66 frames. ], batch size: 42, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:22:53,793 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.18 vs. limit=10.0 +2024-08-03 11:22:57,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=107961.33333333333, ans=0.125 +2024-08-03 11:23:04,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=107961.33333333333, ans=0.0 +2024-08-03 11:23:05,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.39 vs. limit=12.0 +2024-08-03 11:23:06,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107961.33333333333, ans=0.1 +2024-08-03 11:23:12,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=107998.0, ans=0.025 +2024-08-03 11:23:12,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107998.0, ans=0.1 +2024-08-03 11:23:18,260 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.922e+01 1.191e+02 1.365e+02 1.684e+02 2.533e+02, threshold=2.730e+02, percent-clipped=0.0 +2024-08-03 11:23:27,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=108034.66666666667, ans=0.0 +2024-08-03 11:23:32,772 INFO [train.py:1114] (0/4) Epoch 9, batch 350, loss[loss=0.1825, simple_loss=0.2652, pruned_loss=0.04987, over 13587.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.3017, pruned_loss=0.07159, over 2181719.53 frames. ], batch size: 33, lr: 1.41e-02, grad_scale: 32.0 +2024-08-03 11:23:36,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=108071.33333333333, ans=0.2 +2024-08-03 11:24:44,596 INFO [train.py:1114] (0/4) Epoch 9, batch 400, loss[loss=0.2352, simple_loss=0.3148, pruned_loss=0.07785, over 13365.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3008, pruned_loss=0.07133, over 2286237.69 frames. ], batch size: 37, lr: 1.41e-02, grad_scale: 32.0 +2024-08-03 11:24:45,157 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.26 vs. limit=15.0 +2024-08-03 11:25:14,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108254.66666666667, ans=0.0 +2024-08-03 11:25:40,348 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.69 vs. limit=12.0 +2024-08-03 11:25:45,636 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.19 vs. limit=15.0 +2024-08-03 11:25:46,047 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.902e+01 1.178e+02 1.332e+02 1.607e+02 2.662e+02, threshold=2.664e+02, percent-clipped=0.0 +2024-08-03 11:25:50,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=108401.33333333333, ans=0.04949747468305833 +2024-08-03 11:26:00,581 INFO [train.py:1114] (0/4) Epoch 9, batch 450, loss[loss=0.2583, simple_loss=0.3325, pruned_loss=0.09207, over 13570.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3014, pruned_loss=0.07177, over 2360258.95 frames. ], batch size: 38, lr: 1.41e-02, grad_scale: 32.0 +2024-08-03 11:26:01,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=108438.0, ans=0.125 +2024-08-03 11:26:01,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=108438.0, ans=0.125 +2024-08-03 11:26:05,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=108438.0, ans=0.125 +2024-08-03 11:26:16,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=108474.66666666667, ans=0.025 +2024-08-03 11:26:29,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=108548.0, ans=0.09899494936611666 +2024-08-03 11:26:31,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=108548.0, ans=0.125 +2024-08-03 11:26:44,315 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:26:46,788 INFO [train.py:1114] (0/4) Epoch 9, batch 500, loss[loss=0.2434, simple_loss=0.3198, pruned_loss=0.08351, over 13437.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3, pruned_loss=0.07102, over 2425087.43 frames. ], batch size: 43, lr: 1.41e-02, grad_scale: 32.0 +2024-08-03 11:26:47,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=108621.33333333333, ans=0.0 +2024-08-03 11:26:50,898 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.76 vs. limit=5.0 +2024-08-03 11:27:04,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=108658.0, ans=0.125 +2024-08-03 11:27:15,060 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.92 vs. limit=15.0 +2024-08-03 11:27:30,448 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.278e+01 1.224e+02 1.411e+02 1.818e+02 3.084e+02, threshold=2.822e+02, percent-clipped=2.0 +2024-08-03 11:27:37,712 INFO [train.py:1114] (0/4) Epoch 9, batch 550, loss[loss=0.2629, simple_loss=0.341, pruned_loss=0.09236, over 13023.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3, pruned_loss=0.07133, over 2467042.29 frames. ], batch size: 48, lr: 1.41e-02, grad_scale: 16.0 +2024-08-03 11:28:12,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=108878.0, ans=0.125 +2024-08-03 11:28:20,364 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=12.99 vs. limit=15.0 +2024-08-03 11:28:40,785 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.58 vs. limit=15.0 +2024-08-03 11:28:43,893 INFO [train.py:1114] (0/4) Epoch 9, batch 600, loss[loss=0.2583, simple_loss=0.3305, pruned_loss=0.09307, over 13319.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3002, pruned_loss=0.07147, over 2506739.08 frames. ], batch size: 46, lr: 1.41e-02, grad_scale: 16.0 +2024-08-03 11:28:48,954 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.68 vs. limit=22.5 +2024-08-03 11:29:03,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=109061.33333333333, ans=0.125 +2024-08-03 11:29:16,851 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.61 vs. limit=15.0 +2024-08-03 11:29:21,679 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.363e+01 1.197e+02 1.407e+02 1.902e+02 4.020e+02, threshold=2.813e+02, percent-clipped=3.0 +2024-08-03 11:29:26,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=109134.66666666667, ans=0.125 +2024-08-03 11:29:28,843 INFO [train.py:1114] (0/4) Epoch 9, batch 650, loss[loss=0.187, simple_loss=0.275, pruned_loss=0.04944, over 13537.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2996, pruned_loss=0.07114, over 2541971.55 frames. ], batch size: 37, lr: 1.41e-02, grad_scale: 16.0 +2024-08-03 11:29:47,114 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.74 vs. limit=15.0 +2024-08-03 11:29:48,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=109244.66666666667, ans=0.0 +2024-08-03 11:29:57,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=109281.33333333333, ans=0.125 +2024-08-03 11:30:05,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.10 vs. limit=15.0 +2024-08-03 11:30:11,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=109318.0, ans=0.0 +2024-08-03 11:30:15,772 INFO [train.py:1114] (0/4) Epoch 9, batch 700, loss[loss=0.1867, simple_loss=0.2693, pruned_loss=0.05208, over 13532.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2998, pruned_loss=0.071, over 2564063.01 frames. ], batch size: 35, lr: 1.41e-02, grad_scale: 8.0 +2024-08-03 11:30:22,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=109354.66666666667, ans=0.125 +2024-08-03 11:30:25,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=109391.33333333333, ans=0.125 +2024-08-03 11:30:34,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=109428.0, ans=0.2 +2024-08-03 11:30:38,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=109428.0, ans=0.0 +2024-08-03 11:30:56,870 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.372e+01 1.239e+02 1.392e+02 1.880e+02 3.301e+02, threshold=2.784e+02, percent-clipped=6.0 +2024-08-03 11:31:03,195 INFO [train.py:1114] (0/4) Epoch 9, batch 750, loss[loss=0.2129, simple_loss=0.2955, pruned_loss=0.06508, over 13360.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2991, pruned_loss=0.07072, over 2580984.82 frames. ], batch size: 37, lr: 1.41e-02, grad_scale: 8.0 +2024-08-03 11:31:21,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=109574.66666666667, ans=0.125 +2024-08-03 11:31:53,674 INFO [train.py:1114] (0/4) Epoch 9, batch 800, loss[loss=0.197, simple_loss=0.2769, pruned_loss=0.05856, over 13357.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2984, pruned_loss=0.07015, over 2596678.19 frames. ], batch size: 33, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:31:57,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109721.33333333333, ans=0.1 +2024-08-03 11:31:58,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=109721.33333333333, ans=0.125 +2024-08-03 11:32:34,396 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.676e+01 1.153e+02 1.323e+02 1.787e+02 2.891e+02, threshold=2.646e+02, percent-clipped=1.0 +2024-08-03 11:32:40,825 INFO [train.py:1114] (0/4) Epoch 9, batch 850, loss[loss=0.216, simple_loss=0.3009, pruned_loss=0.06551, over 13307.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2978, pruned_loss=0.06975, over 2609311.22 frames. ], batch size: 40, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:32:47,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=109904.66666666667, ans=0.0 +2024-08-03 11:32:49,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=109941.33333333333, ans=0.125 +2024-08-03 11:33:05,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=109978.0, ans=0.125 +2024-08-03 11:33:26,426 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.62 vs. limit=15.0 +2024-08-03 11:33:26,750 INFO [train.py:1114] (0/4) Epoch 9, batch 900, loss[loss=0.21, simple_loss=0.2825, pruned_loss=0.06876, over 13357.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2987, pruned_loss=0.0707, over 2611539.42 frames. ], batch size: 33, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:33:59,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=110198.0, ans=0.0 +2024-08-03 11:34:05,774 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.797e+01 1.134e+02 1.308e+02 1.698e+02 3.183e+02, threshold=2.616e+02, percent-clipped=1.0 +2024-08-03 11:38:02,442 INFO [train.py:1114] (0/4) Epoch 9, batch 950, loss[loss=0.2127, simple_loss=0.2846, pruned_loss=0.07044, over 13525.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.299, pruned_loss=0.07089, over 2612005.31 frames. ], batch size: 34, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:46:14,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=110271.33333333333, ans=0.2 +2024-08-03 11:46:15,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=110271.33333333333, ans=0.2 +2024-08-03 11:46:18,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110308.0, ans=0.1 +2024-08-03 11:47:44,245 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.44 vs. limit=15.0 +2024-08-03 11:55:59,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=110381.33333333333, ans=0.125 +2024-08-03 11:57:10,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=110381.33333333333, ans=0.0 +2024-08-03 11:57:24,469 INFO [train.py:1114] (0/4) Epoch 9, batch 1000, loss[loss=0.1965, simple_loss=0.2791, pruned_loss=0.05693, over 13358.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2991, pruned_loss=0.07093, over 2610052.84 frames. ], batch size: 35, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:58:02,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=110454.66666666667, ans=15.0 +2024-08-03 11:58:04,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=110454.66666666667, ans=0.125 +2024-08-03 11:58:26,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110528.0, ans=0.1 +2024-08-03 11:59:06,701 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.51 vs. limit=15.0 +2024-08-03 11:59:16,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=110564.66666666667, ans=0.0 +2024-08-03 12:02:11,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=110601.33333333333, ans=0.04949747468305833 +2024-08-03 12:02:14,654 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.417e+01 1.320e+02 1.683e+02 2.294e+02 6.382e+02, threshold=3.366e+02, percent-clipped=18.0 +2024-08-03 12:02:46,969 INFO [train.py:1114] (0/4) Epoch 9, batch 1050, loss[loss=0.223, simple_loss=0.3061, pruned_loss=0.06992, over 13574.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2988, pruned_loss=0.07105, over 2614419.93 frames. ], batch size: 39, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 12:03:01,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=110638.0, ans=0.125 +2024-08-03 12:03:07,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=110674.66666666667, ans=0.0 +2024-08-03 12:03:30,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110748.0, ans=0.1 +2024-08-03 12:03:31,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=110748.0, ans=0.125 +2024-08-03 12:03:41,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=110784.66666666667, ans=0.1 +2024-08-03 12:03:43,589 INFO [train.py:1114] (0/4) Epoch 9, batch 1100, loss[loss=0.2284, simple_loss=0.301, pruned_loss=0.07788, over 13558.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2997, pruned_loss=0.07168, over 2619058.52 frames. ], batch size: 36, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 12:03:56,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=110858.0, ans=0.0 +2024-08-03 12:04:07,407 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.83 vs. limit=22.5 +2024-08-03 12:04:07,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=110894.66666666667, ans=6.0 +2024-08-03 12:04:16,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=110931.33333333333, ans=0.125 +2024-08-03 12:04:18,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=110931.33333333333, ans=0.125 +2024-08-03 12:04:23,173 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.596e+01 1.248e+02 1.447e+02 1.784e+02 2.947e+02, threshold=2.893e+02, percent-clipped=0.0 +2024-08-03 12:04:28,550 INFO [train.py:1114] (0/4) Epoch 9, batch 1150, loss[loss=0.2283, simple_loss=0.3005, pruned_loss=0.07812, over 13560.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2994, pruned_loss=0.07154, over 2617524.77 frames. ], batch size: 36, lr: 1.40e-02, grad_scale: 8.0 +2024-08-03 12:04:32,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=111004.66666666667, ans=0.125 +2024-08-03 12:04:55,111 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.35 vs. limit=15.0 +2024-08-03 12:05:00,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=111114.66666666667, ans=0.0 +2024-08-03 12:05:11,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=111151.33333333333, ans=0.125 +2024-08-03 12:05:17,669 INFO [train.py:1114] (0/4) Epoch 9, batch 1200, loss[loss=0.215, simple_loss=0.3024, pruned_loss=0.06376, over 13572.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2998, pruned_loss=0.07129, over 2614847.77 frames. ], batch size: 39, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 12:05:17,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=111188.0, ans=0.1 +2024-08-03 12:05:21,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=111188.0, ans=0.2 +2024-08-03 12:05:24,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=111188.0, ans=0.125 +2024-08-03 12:05:29,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=111224.66666666667, ans=0.05 +2024-08-03 12:05:30,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=111224.66666666667, ans=0.125 +2024-08-03 12:05:43,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=111261.33333333333, ans=0.5 +2024-08-03 12:05:44,933 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.51 vs. limit=15.0 +2024-08-03 12:05:54,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=111298.0, ans=0.125 +2024-08-03 12:05:55,741 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.65 vs. limit=10.0 +2024-08-03 12:06:00,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=111334.66666666667, ans=0.2 +2024-08-03 12:06:01,837 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.516e+01 1.186e+02 1.386e+02 1.604e+02 2.506e+02, threshold=2.772e+02, percent-clipped=0.0 +2024-08-03 12:06:06,348 INFO [train.py:1114] (0/4) Epoch 9, batch 1250, loss[loss=0.2397, simple_loss=0.3132, pruned_loss=0.08314, over 13448.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2997, pruned_loss=0.07064, over 2626985.03 frames. ], batch size: 42, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:06:31,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=111444.66666666667, ans=22.5 +2024-08-03 12:06:34,075 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.72 vs. limit=6.0 +2024-08-03 12:06:45,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=111518.0, ans=0.05 +2024-08-03 12:06:53,530 INFO [train.py:1114] (0/4) Epoch 9, batch 1300, loss[loss=0.2361, simple_loss=0.3204, pruned_loss=0.07593, over 12811.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2993, pruned_loss=0.07045, over 2630720.07 frames. ], batch size: 52, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:06:55,548 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.69 vs. limit=10.0 +2024-08-03 12:06:58,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=111554.66666666667, ans=0.2 +2024-08-03 12:07:09,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=111591.33333333333, ans=0.0 +2024-08-03 12:07:20,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=111664.66666666667, ans=0.2 +2024-08-03 12:07:21,251 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.27 vs. limit=15.0 +2024-08-03 12:07:52,279 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.616e+01 1.204e+02 1.402e+02 1.778e+02 3.607e+02, threshold=2.805e+02, percent-clipped=3.0 +2024-08-03 12:07:56,906 INFO [train.py:1114] (0/4) Epoch 9, batch 1350, loss[loss=0.2039, simple_loss=0.2936, pruned_loss=0.05706, over 13546.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2996, pruned_loss=0.07055, over 2636939.87 frames. ], batch size: 37, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:08:05,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=111774.66666666667, ans=0.0 +2024-08-03 12:08:08,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111774.66666666667, ans=0.125 +2024-08-03 12:08:11,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=111774.66666666667, ans=0.0 +2024-08-03 12:08:14,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=111774.66666666667, ans=0.125 +2024-08-03 12:08:29,917 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:08:29,973 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:08:45,489 INFO [train.py:1114] (0/4) Epoch 9, batch 1400, loss[loss=0.1929, simple_loss=0.2678, pruned_loss=0.05904, over 13258.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2994, pruned_loss=0.07051, over 2641214.09 frames. ], batch size: 31, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:08:53,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=111958.0, ans=0.125 +2024-08-03 12:08:55,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=111958.0, ans=0.0 +2024-08-03 12:10:25,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=112068.0, ans=0.125 +2024-08-03 12:10:29,032 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.010e+02 1.189e+02 1.393e+02 1.717e+02 2.790e+02, threshold=2.787e+02, percent-clipped=0.0 +2024-08-03 12:10:41,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=112068.0, ans=0.04949747468305833 +2024-08-03 12:10:45,221 INFO [train.py:1114] (0/4) Epoch 9, batch 1450, loss[loss=0.2438, simple_loss=0.321, pruned_loss=0.08328, over 13422.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.3, pruned_loss=0.07059, over 2640061.73 frames. ], batch size: 43, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:10:55,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112104.66666666667, ans=0.1 +2024-08-03 12:11:12,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=112104.66666666667, ans=0.025 +2024-08-03 12:11:47,097 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.90 vs. limit=22.5 +2024-08-03 12:12:01,995 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.77 vs. limit=22.5 +2024-08-03 12:12:29,515 INFO [train.py:1114] (0/4) Epoch 9, batch 1500, loss[loss=0.2153, simple_loss=0.2966, pruned_loss=0.06703, over 13397.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3004, pruned_loss=0.07089, over 2640074.69 frames. ], batch size: 39, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:12:32,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112288.0, ans=0.1 +2024-08-03 12:12:34,827 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.00 vs. limit=22.5 +2024-08-03 12:12:37,099 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:12:49,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=112361.33333333333, ans=0.025 +2024-08-03 12:12:50,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=112361.33333333333, ans=0.125 +2024-08-03 12:12:59,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=112398.0, ans=0.0 +2024-08-03 12:13:03,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=112398.0, ans=0.125 +2024-08-03 12:13:07,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=112434.66666666667, ans=0.07 +2024-08-03 12:13:10,792 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.784e+01 1.196e+02 1.437e+02 1.780e+02 2.962e+02, threshold=2.875e+02, percent-clipped=1.0 +2024-08-03 12:13:15,293 INFO [train.py:1114] (0/4) Epoch 9, batch 1550, loss[loss=0.2118, simple_loss=0.3027, pruned_loss=0.06051, over 13398.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.3001, pruned_loss=0.07078, over 2630564.42 frames. ], batch size: 41, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:13:17,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=112471.33333333333, ans=0.05 +2024-08-03 12:13:22,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=112471.33333333333, ans=0.125 +2024-08-03 12:13:28,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=112508.0, ans=0.05 +2024-08-03 12:13:29,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=112508.0, ans=0.125 +2024-08-03 12:13:54,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=112618.0, ans=0.95 +2024-08-03 12:13:58,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=112618.0, ans=0.125 +2024-08-03 12:14:00,234 INFO [train.py:1114] (0/4) Epoch 9, batch 1600, loss[loss=0.2279, simple_loss=0.3059, pruned_loss=0.07498, over 13580.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2996, pruned_loss=0.07074, over 2624249.51 frames. ], batch size: 39, lr: 1.39e-02, grad_scale: 16.0 +2024-08-03 12:14:01,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=112654.66666666667, ans=0.0 +2024-08-03 12:14:14,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=112691.33333333333, ans=0.025 +2024-08-03 12:14:18,576 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.34 vs. limit=15.0 +2024-08-03 12:14:43,182 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.525e+01 1.266e+02 1.505e+02 1.991e+02 3.418e+02, threshold=3.010e+02, percent-clipped=5.0 +2024-08-03 12:14:46,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112801.33333333333, ans=0.1 +2024-08-03 12:14:47,571 INFO [train.py:1114] (0/4) Epoch 9, batch 1650, loss[loss=0.2372, simple_loss=0.3273, pruned_loss=0.07355, over 13314.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2998, pruned_loss=0.07122, over 2620801.43 frames. ], batch size: 40, lr: 1.39e-02, grad_scale: 16.0 +2024-08-03 12:15:10,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112911.33333333333, ans=0.1 +2024-08-03 12:15:26,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112984.66666666667, ans=0.1 +2024-08-03 12:15:37,664 INFO [train.py:1114] (0/4) Epoch 9, batch 1700, loss[loss=0.2305, simple_loss=0.2964, pruned_loss=0.08223, over 13257.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2996, pruned_loss=0.07084, over 2629442.36 frames. ], batch size: 31, lr: 1.39e-02, grad_scale: 16.0 +2024-08-03 12:15:43,343 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.23 vs. limit=12.0 +2024-08-03 12:15:50,991 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:15:51,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.26 vs. limit=22.5 +2024-08-03 12:16:02,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=113094.66666666667, ans=0.0 +2024-08-03 12:16:11,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=113131.33333333333, ans=0.0 +2024-08-03 12:16:19,708 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.237e+01 1.197e+02 1.424e+02 1.856e+02 4.679e+02, threshold=2.848e+02, percent-clipped=5.0 +2024-08-03 12:16:20,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.84 vs. limit=15.0 +2024-08-03 12:16:24,264 INFO [train.py:1114] (0/4) Epoch 9, batch 1750, loss[loss=0.2033, simple_loss=0.2744, pruned_loss=0.06609, over 13562.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2991, pruned_loss=0.07074, over 2633983.80 frames. ], batch size: 31, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:16:24,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=113204.66666666667, ans=0.125 +2024-08-03 12:16:39,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=113241.33333333333, ans=0.125 +2024-08-03 12:16:47,671 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.61 vs. limit=6.0 +2024-08-03 12:17:09,923 INFO [train.py:1114] (0/4) Epoch 9, batch 1800, loss[loss=0.2342, simple_loss=0.3141, pruned_loss=0.07714, over 13560.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3004, pruned_loss=0.07151, over 2634916.49 frames. ], batch size: 38, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:17:11,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=113388.0, ans=0.125 +2024-08-03 12:17:14,100 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.94 vs. limit=15.0 +2024-08-03 12:17:18,753 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.47 vs. limit=6.0 +2024-08-03 12:17:18,793 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.27 vs. limit=15.0 +2024-08-03 12:17:41,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=113498.0, ans=0.0 +2024-08-03 12:17:47,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=113534.66666666667, ans=0.0 +2024-08-03 12:17:50,717 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.294e+02 1.753e+02 2.320e+02 3.685e+02, threshold=3.507e+02, percent-clipped=11.0 +2024-08-03 12:17:54,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=113571.33333333333, ans=0.125 +2024-08-03 12:17:55,235 INFO [train.py:1114] (0/4) Epoch 9, batch 1850, loss[loss=0.2324, simple_loss=0.3188, pruned_loss=0.07297, over 13408.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3006, pruned_loss=0.07173, over 2637817.00 frames. ], batch size: 39, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:17:57,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.17 vs. limit=22.5 +2024-08-03 12:18:01,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=113571.33333333333, ans=0.0 +2024-08-03 12:18:13,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=113608.0, ans=0.2 +2024-08-03 12:18:27,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=113681.33333333333, ans=0.1 +2024-08-03 12:18:27,252 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.58 vs. limit=15.0 +2024-08-03 12:18:43,046 INFO [train.py:1114] (0/4) Epoch 9, batch 1900, loss[loss=0.2046, simple_loss=0.2934, pruned_loss=0.05787, over 13338.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3007, pruned_loss=0.07148, over 2640170.15 frames. ], batch size: 40, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:18:49,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=113754.66666666667, ans=0.125 +2024-08-03 12:19:02,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=113828.0, ans=0.2 +2024-08-03 12:19:08,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=113828.0, ans=0.125 +2024-08-03 12:19:13,607 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:19:17,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=113864.66666666667, ans=0.125 +2024-08-03 12:19:26,848 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.010e+02 1.181e+02 1.342e+02 1.556e+02 3.723e+02, threshold=2.684e+02, percent-clipped=1.0 +2024-08-03 12:19:30,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=113901.33333333333, ans=0.2 +2024-08-03 12:19:30,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=113901.33333333333, ans=0.125 +2024-08-03 12:19:31,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=113901.33333333333, ans=0.0 +2024-08-03 12:19:33,202 INFO [train.py:1114] (0/4) Epoch 9, batch 1950, loss[loss=0.2052, simple_loss=0.2869, pruned_loss=0.06173, over 13580.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3013, pruned_loss=0.07113, over 2646879.77 frames. ], batch size: 36, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:19:39,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=113938.0, ans=0.0 +2024-08-03 12:19:50,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=113974.66666666667, ans=0.2 +2024-08-03 12:19:58,078 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.91 vs. limit=15.0 +2024-08-03 12:20:11,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=114048.0, ans=0.1 +2024-08-03 12:20:23,643 INFO [train.py:1114] (0/4) Epoch 9, batch 2000, loss[loss=0.1845, simple_loss=0.2574, pruned_loss=0.05584, over 13535.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3019, pruned_loss=0.07193, over 2635527.05 frames. ], batch size: 31, lr: 1.38e-02, grad_scale: 32.0 +2024-08-03 12:20:25,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=114121.33333333333, ans=0.2 +2024-08-03 12:20:34,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=114158.0, ans=0.125 +2024-08-03 12:20:35,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114158.0, ans=0.1 +2024-08-03 12:20:37,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=114158.0, ans=0.125 +2024-08-03 12:20:53,270 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.57 vs. limit=22.5 +2024-08-03 12:21:02,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=114268.0, ans=0.025 +2024-08-03 12:21:05,457 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.389e+01 1.230e+02 1.563e+02 1.827e+02 3.181e+02, threshold=3.125e+02, percent-clipped=4.0 +2024-08-03 12:21:10,020 INFO [train.py:1114] (0/4) Epoch 9, batch 2050, loss[loss=0.1754, simple_loss=0.256, pruned_loss=0.04743, over 13405.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3007, pruned_loss=0.07151, over 2632951.07 frames. ], batch size: 32, lr: 1.38e-02, grad_scale: 32.0 +2024-08-03 12:21:15,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=114304.66666666667, ans=0.125 +2024-08-03 12:21:19,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=114341.33333333333, ans=0.125 +2024-08-03 12:21:20,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=114341.33333333333, ans=0.07 +2024-08-03 12:21:22,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=114341.33333333333, ans=0.04949747468305833 +2024-08-03 12:21:48,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=114451.33333333333, ans=0.125 +2024-08-03 12:21:53,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=114451.33333333333, ans=0.2 +2024-08-03 12:21:53,818 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.10 vs. limit=15.0 +2024-08-03 12:21:57,224 INFO [train.py:1114] (0/4) Epoch 9, batch 2100, loss[loss=0.2114, simple_loss=0.2929, pruned_loss=0.06492, over 13557.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2998, pruned_loss=0.07104, over 2638866.71 frames. ], batch size: 37, lr: 1.38e-02, grad_scale: 32.0 +2024-08-03 12:21:59,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=114488.0, ans=0.125 +2024-08-03 12:22:07,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=114524.66666666667, ans=0.2 +2024-08-03 12:22:09,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=114524.66666666667, ans=0.2 +2024-08-03 12:22:11,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=114524.66666666667, ans=0.2 +2024-08-03 12:22:53,817 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:22:54,560 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.023e+02 1.194e+02 1.363e+02 1.768e+02 4.718e+02, threshold=2.726e+02, percent-clipped=3.0 +2024-08-03 12:22:54,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=114634.66666666667, ans=0.125 +2024-08-03 12:23:07,972 INFO [train.py:1114] (0/4) Epoch 9, batch 2150, loss[loss=0.2369, simple_loss=0.3093, pruned_loss=0.08226, over 13561.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3004, pruned_loss=0.07188, over 2647059.91 frames. ], batch size: 36, lr: 1.38e-02, grad_scale: 32.0 +2024-08-03 12:23:39,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=114708.0, ans=10.0 +2024-08-03 12:23:49,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=114708.0, ans=0.5 +2024-08-03 12:24:32,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=114781.33333333333, ans=0.125 +2024-08-03 12:24:32,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=114781.33333333333, ans=0.0 +2024-08-03 12:24:33,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=114781.33333333333, ans=0.1 +2024-08-03 12:24:56,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=114818.0, ans=0.2 +2024-08-03 12:24:59,182 INFO [train.py:1114] (0/4) Epoch 9, batch 2200, loss[loss=0.2527, simple_loss=0.3355, pruned_loss=0.08493, over 13422.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2994, pruned_loss=0.07121, over 2645123.73 frames. ], batch size: 39, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:24:59,351 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:25:00,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=114854.66666666667, ans=0.125 +2024-08-03 12:25:03,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=114854.66666666667, ans=0.0 +2024-08-03 12:25:07,522 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.76 vs. limit=15.0 +2024-08-03 12:25:47,882 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.835e+01 1.281e+02 1.616e+02 2.381e+02 3.635e+02, threshold=3.231e+02, percent-clipped=12.0 +2024-08-03 12:25:52,562 INFO [train.py:1114] (0/4) Epoch 9, batch 2250, loss[loss=0.1822, simple_loss=0.2724, pruned_loss=0.04598, over 13365.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2994, pruned_loss=0.07117, over 2642063.81 frames. ], batch size: 37, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:26:14,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=115111.33333333333, ans=0.0 +2024-08-03 12:26:26,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=115148.0, ans=0.125 +2024-08-03 12:26:29,722 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:26:43,664 INFO [train.py:1114] (0/4) Epoch 9, batch 2300, loss[loss=0.1749, simple_loss=0.2525, pruned_loss=0.04869, over 13562.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.298, pruned_loss=0.0708, over 2638268.02 frames. ], batch size: 33, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:27:05,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=115294.66666666667, ans=0.125 +2024-08-03 12:27:20,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=115331.33333333333, ans=0.125 +2024-08-03 12:27:22,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=115331.33333333333, ans=0.0 +2024-08-03 12:27:24,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=115368.0, ans=0.125 +2024-08-03 12:27:25,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=115368.0, ans=0.0 +2024-08-03 12:27:28,229 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.909e+01 1.211e+02 1.428e+02 1.712e+02 2.709e+02, threshold=2.855e+02, percent-clipped=0.0 +2024-08-03 12:27:32,884 INFO [train.py:1114] (0/4) Epoch 9, batch 2350, loss[loss=0.2391, simple_loss=0.3147, pruned_loss=0.08174, over 13533.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.298, pruned_loss=0.07091, over 2640823.69 frames. ], batch size: 38, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:27:52,430 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.10 vs. limit=6.0 +2024-08-03 12:27:53,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=115478.0, ans=0.125 +2024-08-03 12:27:53,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=115478.0, ans=0.0 +2024-08-03 12:27:59,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=115478.0, ans=0.0 +2024-08-03 12:28:00,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=115478.0, ans=0.125 +2024-08-03 12:28:09,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=115514.66666666667, ans=0.125 +2024-08-03 12:28:09,611 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.30 vs. limit=15.0 +2024-08-03 12:28:13,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=115551.33333333333, ans=0.0 +2024-08-03 12:28:16,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=115551.33333333333, ans=0.125 +2024-08-03 12:28:21,705 INFO [train.py:1114] (0/4) Epoch 9, batch 2400, loss[loss=0.1788, simple_loss=0.2642, pruned_loss=0.04664, over 13526.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2981, pruned_loss=0.07058, over 2642391.20 frames. ], batch size: 35, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:28:30,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=115588.0, ans=0.0 +2024-08-03 12:28:44,240 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.87 vs. limit=5.0 +2024-08-03 12:28:55,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=115698.0, ans=0.0 +2024-08-03 12:28:55,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=115698.0, ans=0.125 +2024-08-03 12:28:59,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=115734.66666666667, ans=0.125 +2024-08-03 12:29:04,116 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.274e+02 1.512e+02 2.104e+02 3.890e+02, threshold=3.023e+02, percent-clipped=4.0 +2024-08-03 12:29:05,658 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.34 vs. limit=8.0 +2024-08-03 12:29:07,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=115734.66666666667, ans=0.125 +2024-08-03 12:29:08,560 INFO [train.py:1114] (0/4) Epoch 9, batch 2450, loss[loss=0.2225, simple_loss=0.3054, pruned_loss=0.06985, over 13358.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2991, pruned_loss=0.07098, over 2632720.37 frames. ], batch size: 37, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:29:21,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=115808.0, ans=0.125 +2024-08-03 12:29:38,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=115881.33333333333, ans=0.2 +2024-08-03 12:29:41,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115881.33333333333, ans=0.1 +2024-08-03 12:29:48,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=115918.0, ans=0.125 +2024-08-03 12:29:53,261 INFO [train.py:1114] (0/4) Epoch 9, batch 2500, loss[loss=0.234, simple_loss=0.3176, pruned_loss=0.07521, over 13390.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2986, pruned_loss=0.07029, over 2636948.22 frames. ], batch size: 39, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:30:05,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=115991.33333333333, ans=0.025 +2024-08-03 12:30:31,836 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.707e+01 1.175e+02 1.472e+02 1.847e+02 3.243e+02, threshold=2.944e+02, percent-clipped=1.0 +2024-08-03 12:30:33,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=116101.33333333333, ans=0.125 +2024-08-03 12:30:36,220 INFO [train.py:1114] (0/4) Epoch 9, batch 2550, loss[loss=0.1935, simple_loss=0.2686, pruned_loss=0.05917, over 13554.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2982, pruned_loss=0.06982, over 2638258.29 frames. ], batch size: 31, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:30:37,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=116138.0, ans=0.125 +2024-08-03 12:30:40,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=116138.0, ans=0.0 +2024-08-03 12:31:05,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=116248.0, ans=0.125 +2024-08-03 12:31:12,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=116284.66666666667, ans=0.2 +2024-08-03 12:31:14,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=116284.66666666667, ans=0.125 +2024-08-03 12:31:20,924 INFO [train.py:1114] (0/4) Epoch 9, batch 2600, loss[loss=0.2052, simple_loss=0.2771, pruned_loss=0.06662, over 13564.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2987, pruned_loss=0.07003, over 2636643.71 frames. ], batch size: 36, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:31:28,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=116321.33333333333, ans=0.0 +2024-08-03 12:31:29,157 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.16 vs. limit=12.0 +2024-08-03 12:31:29,204 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.34 vs. limit=15.0 +2024-08-03 12:31:30,049 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.97 vs. limit=15.0 +2024-08-03 12:31:36,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=116358.0, ans=0.125 +2024-08-03 12:32:00,493 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.327e+01 1.173e+02 1.340e+02 1.707e+02 3.211e+02, threshold=2.680e+02, percent-clipped=1.0 +2024-08-03 12:32:03,944 INFO [train.py:1114] (0/4) Epoch 9, batch 2650, loss[loss=0.212, simple_loss=0.2958, pruned_loss=0.06412, over 13297.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2987, pruned_loss=0.06977, over 2639633.63 frames. ], batch size: 46, lr: 1.37e-02, grad_scale: 16.0 +2024-08-03 12:32:19,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=116541.33333333333, ans=0.0 +2024-08-03 12:32:30,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=116614.66666666667, ans=0.05 +2024-08-03 12:32:33,235 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.04 vs. limit=12.0 +2024-08-03 12:32:34,290 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=12.36 vs. limit=15.0 +2024-08-03 12:32:38,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=116651.33333333333, ans=0.025 +2024-08-03 12:32:46,446 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.96 vs. limit=15.0 +2024-08-03 12:32:47,492 INFO [train.py:1114] (0/4) Epoch 9, batch 2700, loss[loss=0.2446, simple_loss=0.3154, pruned_loss=0.0869, over 13543.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2987, pruned_loss=0.06978, over 2637661.37 frames. ], batch size: 40, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:32:59,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=116724.66666666667, ans=0.125 +2024-08-03 12:33:00,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=116724.66666666667, ans=0.2 +2024-08-03 12:33:10,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=116761.33333333333, ans=0.2 +2024-08-03 12:33:28,905 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.391e+01 1.337e+02 1.607e+02 2.047e+02 3.156e+02, threshold=3.214e+02, percent-clipped=11.0 +2024-08-03 12:33:30,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=116834.66666666667, ans=0.95 +2024-08-03 12:33:32,425 INFO [train.py:1114] (0/4) Epoch 9, batch 2750, loss[loss=0.2142, simple_loss=0.2853, pruned_loss=0.07157, over 13322.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2978, pruned_loss=0.06951, over 2635106.41 frames. ], batch size: 34, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:33:39,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=116871.33333333333, ans=0.125 +2024-08-03 12:33:41,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=116908.0, ans=0.2 +2024-08-03 12:33:51,270 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:34:09,848 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:34:16,673 INFO [train.py:1114] (0/4) Epoch 9, batch 2800, loss[loss=0.2868, simple_loss=0.343, pruned_loss=0.1153, over 9234.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2984, pruned_loss=0.07015, over 2626661.78 frames. ], batch size: 96, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:34:34,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117128.0, ans=0.1 +2024-08-03 12:34:36,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=117128.0, ans=0.025 +2024-08-03 12:34:37,352 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.94 vs. limit=15.0 +2024-08-03 12:34:38,331 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.81 vs. limit=8.0 +2024-08-03 12:34:46,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=117164.66666666667, ans=0.2 +2024-08-03 12:34:47,715 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.88 vs. limit=15.0 +2024-08-03 12:34:53,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=117201.33333333333, ans=0.125 +2024-08-03 12:34:58,832 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.896e+01 1.288e+02 1.462e+02 1.862e+02 3.632e+02, threshold=2.925e+02, percent-clipped=2.0 +2024-08-03 12:35:01,503 INFO [train.py:1114] (0/4) Epoch 9, batch 2850, loss[loss=0.1963, simple_loss=0.2786, pruned_loss=0.057, over 13359.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2994, pruned_loss=0.07092, over 2621065.49 frames. ], batch size: 35, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:35:12,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=117274.66666666667, ans=0.125 +2024-08-03 12:35:19,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=117311.33333333333, ans=0.125 +2024-08-03 12:35:23,031 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-32000.pt +2024-08-03 12:36:01,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=117384.66666666667, ans=0.035 +2024-08-03 12:36:05,391 INFO [train.py:1114] (0/4) Epoch 9, batch 2900, loss[loss=0.1981, simple_loss=0.2731, pruned_loss=0.06155, over 13369.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2998, pruned_loss=0.07064, over 2631296.23 frames. ], batch size: 36, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:36:10,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=117421.33333333333, ans=0.125 +2024-08-03 12:36:14,644 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.06 vs. limit=15.0 +2024-08-03 12:36:28,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=117494.66666666667, ans=0.1 +2024-08-03 12:36:28,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=117494.66666666667, ans=0.0 +2024-08-03 12:36:47,221 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.425e+01 1.178e+02 1.305e+02 1.613e+02 2.693e+02, threshold=2.610e+02, percent-clipped=0.0 +2024-08-03 12:36:48,957 INFO [train.py:1114] (0/4) Epoch 9, batch 2950, loss[loss=0.1868, simple_loss=0.2694, pruned_loss=0.0521, over 13353.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2987, pruned_loss=0.07033, over 2627883.50 frames. ], batch size: 34, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:36:55,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=117604.66666666667, ans=0.0 +2024-08-03 12:36:57,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=117641.33333333333, ans=0.125 +2024-08-03 12:37:05,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=117678.0, ans=0.1 +2024-08-03 12:37:32,425 INFO [train.py:1114] (0/4) Epoch 9, batch 3000, loss[loss=0.2136, simple_loss=0.2902, pruned_loss=0.06853, over 13537.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2984, pruned_loss=0.0702, over 2628395.63 frames. ], batch size: 37, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:37:32,425 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 12:37:45,827 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([1.4897, 2.1342, 2.1252, 1.8890, 1.9918, 2.2541, 1.0218, 1.6383], + device='cuda:0') +2024-08-03 12:37:49,955 INFO [train.py:1146] (0/4) Epoch 9, validation: loss=0.1846, simple_loss=0.2849, pruned_loss=0.04217, over 944034.00 frames. +2024-08-03 12:37:49,955 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 12:37:51,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=117788.0, ans=0.125 +2024-08-03 12:37:58,518 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.96 vs. limit=15.0 +2024-08-03 12:38:11,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=117861.33333333333, ans=0.125 +2024-08-03 12:38:16,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=117898.0, ans=0.2 +2024-08-03 12:38:21,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=117898.0, ans=0.0 +2024-08-03 12:38:31,063 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.539e+01 1.182e+02 1.340e+02 1.696e+02 3.056e+02, threshold=2.681e+02, percent-clipped=1.0 +2024-08-03 12:38:31,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=117934.66666666667, ans=0.125 +2024-08-03 12:38:32,964 INFO [train.py:1114] (0/4) Epoch 9, batch 3050, loss[loss=0.1914, simple_loss=0.271, pruned_loss=0.0559, over 13523.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3002, pruned_loss=0.07087, over 2626031.54 frames. ], batch size: 35, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:38:35,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=117971.33333333333, ans=0.2 +2024-08-03 12:38:37,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=117971.33333333333, ans=0.0 +2024-08-03 12:38:42,794 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.43 vs. limit=22.5 +2024-08-03 12:38:44,632 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.09 vs. limit=22.5 +2024-08-03 12:38:54,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118044.66666666667, ans=0.1 +2024-08-03 12:39:17,096 INFO [train.py:1114] (0/4) Epoch 9, batch 3100, loss[loss=0.2421, simple_loss=0.3177, pruned_loss=0.08324, over 13343.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2992, pruned_loss=0.07008, over 2625549.31 frames. ], batch size: 46, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:39:25,435 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.62 vs. limit=5.0 +2024-08-03 12:39:49,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=118264.66666666667, ans=0.025 +2024-08-03 12:39:53,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=118301.33333333333, ans=0.125 +2024-08-03 12:39:54,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=118301.33333333333, ans=0.125 +2024-08-03 12:39:55,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=118301.33333333333, ans=0.125 +2024-08-03 12:39:58,240 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.498e+01 1.162e+02 1.416e+02 1.749e+02 3.223e+02, threshold=2.833e+02, percent-clipped=4.0 +2024-08-03 12:40:00,002 INFO [train.py:1114] (0/4) Epoch 9, batch 3150, loss[loss=0.2237, simple_loss=0.3038, pruned_loss=0.07176, over 13083.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2986, pruned_loss=0.06958, over 2627161.34 frames. ], batch size: 48, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:40:01,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=118338.0, ans=0.125 +2024-08-03 12:40:15,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=118374.66666666667, ans=0.2 +2024-08-03 12:40:23,252 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:40:23,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=118411.33333333333, ans=0.0 +2024-08-03 12:40:24,472 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.81 vs. limit=22.5 +2024-08-03 12:42:06,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118448.0, ans=0.1 +2024-08-03 12:42:09,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=118448.0, ans=0.125 +2024-08-03 12:42:10,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=118448.0, ans=0.125 +2024-08-03 12:42:23,263 INFO [train.py:1114] (0/4) Epoch 9, batch 3200, loss[loss=0.217, simple_loss=0.3062, pruned_loss=0.06386, over 13534.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2984, pruned_loss=0.06957, over 2633789.12 frames. ], batch size: 37, lr: 1.35e-02, grad_scale: 16.0 +2024-08-03 12:42:25,380 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.99 vs. limit=15.0 +2024-08-03 12:42:28,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=118521.33333333333, ans=0.0 +2024-08-03 12:42:31,061 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:43:06,447 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.961e+01 1.301e+02 1.590e+02 2.129e+02 3.021e+02, threshold=3.180e+02, percent-clipped=5.0 +2024-08-03 12:43:07,348 INFO [train.py:1114] (0/4) Epoch 9, batch 3250, loss[loss=0.2195, simple_loss=0.2991, pruned_loss=0.0699, over 13390.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2985, pruned_loss=0.06914, over 2638405.17 frames. ], batch size: 38, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:43:16,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=118741.33333333333, ans=0.125 +2024-08-03 12:43:37,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=118814.66666666667, ans=0.125 +2024-08-03 12:43:41,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=118851.33333333333, ans=0.125 +2024-08-03 12:43:55,818 INFO [train.py:1114] (0/4) Epoch 9, batch 3300, loss[loss=0.2454, simple_loss=0.3216, pruned_loss=0.08464, over 12970.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2967, pruned_loss=0.06854, over 2639571.35 frames. ], batch size: 52, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:44:05,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=118888.0, ans=0.0 +2024-08-03 12:44:24,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=118961.33333333333, ans=0.0 +2024-08-03 12:44:27,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.02 vs. limit=10.0 +2024-08-03 12:44:37,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=119034.66666666667, ans=0.125 +2024-08-03 12:44:43,223 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.376e+01 1.208e+02 1.436e+02 1.732e+02 3.249e+02, threshold=2.873e+02, percent-clipped=1.0 +2024-08-03 12:44:44,093 INFO [train.py:1114] (0/4) Epoch 9, batch 3350, loss[loss=0.221, simple_loss=0.3042, pruned_loss=0.06886, over 12968.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2979, pruned_loss=0.06925, over 2629182.11 frames. ], batch size: 48, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:44:47,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119071.33333333333, ans=0.1 +2024-08-03 12:44:48,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=119071.33333333333, ans=0.125 +2024-08-03 12:44:55,588 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.46 vs. limit=15.0 +2024-08-03 12:44:59,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=119108.0, ans=0.125 +2024-08-03 12:45:07,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=119144.66666666667, ans=15.0 +2024-08-03 12:45:26,435 INFO [train.py:1114] (0/4) Epoch 9, batch 3400, loss[loss=0.1898, simple_loss=0.2649, pruned_loss=0.05739, over 13555.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2981, pruned_loss=0.06971, over 2624299.17 frames. ], batch size: 31, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:45:31,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=119254.66666666667, ans=0.05 +2024-08-03 12:45:32,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=119254.66666666667, ans=0.025 +2024-08-03 12:45:32,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=119254.66666666667, ans=0.0 +2024-08-03 12:45:44,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=119328.0, ans=0.2 +2024-08-03 12:45:45,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=119328.0, ans=0.125 +2024-08-03 12:45:46,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=119328.0, ans=0.0 +2024-08-03 12:45:54,340 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.93 vs. limit=10.0 +2024-08-03 12:45:54,425 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.89 vs. limit=15.0 +2024-08-03 12:46:00,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=119364.66666666667, ans=0.025 +2024-08-03 12:46:01,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119364.66666666667, ans=0.1 +2024-08-03 12:46:17,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=119401.33333333333, ans=0.125 +2024-08-03 12:46:21,252 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.303e+01 1.200e+02 1.400e+02 1.853e+02 3.003e+02, threshold=2.800e+02, percent-clipped=1.0 +2024-08-03 12:46:21,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119438.0, ans=0.1 +2024-08-03 12:46:22,086 INFO [train.py:1114] (0/4) Epoch 9, batch 3450, loss[loss=0.2253, simple_loss=0.3059, pruned_loss=0.07237, over 12940.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.298, pruned_loss=0.06961, over 2628265.91 frames. ], batch size: 52, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:46:26,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=119438.0, ans=0.125 +2024-08-03 12:46:27,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=119438.0, ans=0.125 +2024-08-03 12:46:36,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=119474.66666666667, ans=10.0 +2024-08-03 12:46:42,239 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.33 vs. limit=15.0 +2024-08-03 12:46:54,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=119548.0, ans=0.125 +2024-08-03 12:47:01,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=119584.66666666667, ans=0.125 +2024-08-03 12:47:03,166 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.60 vs. limit=6.0 +2024-08-03 12:47:06,100 INFO [train.py:1114] (0/4) Epoch 9, batch 3500, loss[loss=0.195, simple_loss=0.2713, pruned_loss=0.05937, over 13510.00 frames. ], tot_loss[loss=0.219, simple_loss=0.298, pruned_loss=0.07, over 2629890.24 frames. ], batch size: 34, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:47:16,256 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.47 vs. limit=10.0 +2024-08-03 12:47:24,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119694.66666666667, ans=0.1 +2024-08-03 12:47:27,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=119694.66666666667, ans=0.125 +2024-08-03 12:47:27,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=119694.66666666667, ans=10.0 +2024-08-03 12:47:31,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=119731.33333333333, ans=0.0 +2024-08-03 12:47:51,576 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.983e+01 1.270e+02 1.503e+02 1.772e+02 3.260e+02, threshold=3.007e+02, percent-clipped=1.0 +2024-08-03 12:47:52,458 INFO [train.py:1114] (0/4) Epoch 9, batch 3550, loss[loss=0.2262, simple_loss=0.3087, pruned_loss=0.07181, over 12581.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3008, pruned_loss=0.07161, over 2629014.82 frames. ], batch size: 59, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:47:58,652 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.90 vs. limit=22.5 +2024-08-03 12:48:02,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=119841.33333333333, ans=0.035 +2024-08-03 12:48:05,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=119841.33333333333, ans=0.125 +2024-08-03 12:48:13,285 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.32 vs. limit=10.0 +2024-08-03 12:48:18,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=119914.66666666667, ans=0.125 +2024-08-03 12:48:37,812 INFO [train.py:1114] (0/4) Epoch 9, batch 3600, loss[loss=0.258, simple_loss=0.3226, pruned_loss=0.09669, over 8741.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3064, pruned_loss=0.07708, over 2485688.34 frames. ], batch size: 96, lr: 1.35e-02, grad_scale: 16.0 +2024-08-03 12:48:38,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=119988.0, ans=0.025 +2024-08-03 12:48:57,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=120061.33333333333, ans=0.125 +2024-08-03 12:49:01,575 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.42 vs. limit=15.0 +2024-08-03 12:49:02,611 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.34 vs. limit=15.0 +2024-08-03 12:49:11,663 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-9.pt +2024-08-03 12:50:09,321 INFO [train.py:1114] (0/4) Epoch 10, batch 0, loss[loss=0.2039, simple_loss=0.2771, pruned_loss=0.06534, over 13337.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2771, pruned_loss=0.06534, over 13337.00 frames. ], batch size: 33, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:50:09,322 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 12:50:15,894 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.1666, 1.2753, 3.1218, 3.0376], device='cuda:0') +2024-08-03 12:50:19,384 INFO [train.py:1146] (0/4) Epoch 10, validation: loss=0.1895, simple_loss=0.2901, pruned_loss=0.04443, over 944034.00 frames. +2024-08-03 12:50:19,385 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 12:50:27,506 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.231e+02 1.354e+02 1.561e+02 3.235e+02, threshold=2.709e+02, percent-clipped=1.0 +2024-08-03 12:50:29,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=120171.33333333333, ans=0.125 +2024-08-03 12:50:29,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=120171.33333333333, ans=0.125 +2024-08-03 12:50:41,780 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.52 vs. limit=12.0 +2024-08-03 12:50:43,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=120208.0, ans=0.0 +2024-08-03 12:50:49,048 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.61 vs. limit=15.0 +2024-08-03 12:50:50,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.19 vs. limit=15.0 +2024-08-03 12:50:51,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=120244.66666666667, ans=0.025 +2024-08-03 12:50:57,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=120281.33333333333, ans=0.125 +2024-08-03 12:50:59,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=120281.33333333333, ans=0.125 +2024-08-03 12:51:07,196 INFO [train.py:1114] (0/4) Epoch 10, batch 50, loss[loss=0.174, simple_loss=0.2502, pruned_loss=0.04886, over 13412.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3031, pruned_loss=0.07275, over 578442.84 frames. ], batch size: 32, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:51:12,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=120318.0, ans=0.05 +2024-08-03 12:51:12,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=120318.0, ans=0.2 +2024-08-03 12:51:21,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.97 vs. limit=15.0 +2024-08-03 12:51:34,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=120428.0, ans=0.07 +2024-08-03 12:51:53,137 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.92 vs. limit=15.0 +2024-08-03 12:51:54,515 INFO [train.py:1114] (0/4) Epoch 10, batch 100, loss[loss=0.2245, simple_loss=0.2989, pruned_loss=0.07505, over 13544.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3021, pruned_loss=0.07005, over 1026472.33 frames. ], batch size: 35, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:51:54,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=120501.33333333333, ans=0.125 +2024-08-03 12:52:01,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=120501.33333333333, ans=0.2 +2024-08-03 12:52:02,527 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.137e+01 1.185e+02 1.436e+02 1.784e+02 2.704e+02, threshold=2.871e+02, percent-clipped=0.0 +2024-08-03 12:52:05,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.12 vs. limit=15.0 +2024-08-03 12:52:08,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=120538.0, ans=0.025 +2024-08-03 12:52:11,333 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.15 vs. limit=15.0 +2024-08-03 12:52:15,468 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.26 vs. limit=10.0 +2024-08-03 12:52:25,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=120611.33333333333, ans=0.0 +2024-08-03 12:52:35,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=120648.0, ans=0.2 +2024-08-03 12:52:39,341 INFO [train.py:1114] (0/4) Epoch 10, batch 150, loss[loss=0.1974, simple_loss=0.2717, pruned_loss=0.06153, over 13425.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2992, pruned_loss=0.06915, over 1387806.81 frames. ], batch size: 32, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:53:23,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=120831.33333333333, ans=0.025 +2024-08-03 12:53:25,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=120831.33333333333, ans=0.2 +2024-08-03 12:53:27,592 INFO [train.py:1114] (0/4) Epoch 10, batch 200, loss[loss=0.2488, simple_loss=0.3308, pruned_loss=0.08334, over 12490.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2973, pruned_loss=0.06804, over 1666763.31 frames. ], batch size: 58, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:53:35,558 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.851e+01 1.179e+02 1.388e+02 1.956e+02 3.362e+02, threshold=2.775e+02, percent-clipped=2.0 +2024-08-03 12:53:41,189 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:53:43,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=120904.66666666667, ans=0.125 +2024-08-03 12:53:48,569 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=5.447e-03 +2024-08-03 12:54:01,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=120978.0, ans=0.2 +2024-08-03 12:54:11,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=121014.66666666667, ans=0.2 +2024-08-03 12:54:15,690 INFO [train.py:1114] (0/4) Epoch 10, batch 250, loss[loss=0.22, simple_loss=0.2972, pruned_loss=0.07143, over 13298.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2955, pruned_loss=0.06728, over 1885320.84 frames. ], batch size: 46, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:54:15,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=121051.33333333333, ans=0.0 +2024-08-03 12:54:16,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=121051.33333333333, ans=0.125 +2024-08-03 12:54:18,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=121051.33333333333, ans=0.0 +2024-08-03 12:54:23,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=121051.33333333333, ans=0.125 +2024-08-03 12:54:36,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=121124.66666666667, ans=0.125 +2024-08-03 12:54:38,210 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.13 vs. limit=15.0 +2024-08-03 12:54:42,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=121124.66666666667, ans=0.125 +2024-08-03 12:54:49,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=121161.33333333333, ans=0.1 +2024-08-03 12:54:49,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=121161.33333333333, ans=0.0 +2024-08-03 12:55:03,440 INFO [train.py:1114] (0/4) Epoch 10, batch 300, loss[loss=0.2381, simple_loss=0.3123, pruned_loss=0.08197, over 13437.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2955, pruned_loss=0.06769, over 2052025.31 frames. ], batch size: 42, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:55:11,656 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.594e+01 1.264e+02 1.454e+02 1.818e+02 3.044e+02, threshold=2.909e+02, percent-clipped=3.0 +2024-08-03 12:55:12,002 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:55:16,031 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.31 vs. limit=6.0 +2024-08-03 12:55:36,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121344.66666666667, ans=0.1 +2024-08-03 12:55:51,191 INFO [train.py:1114] (0/4) Epoch 10, batch 350, loss[loss=0.1738, simple_loss=0.2549, pruned_loss=0.04632, over 13581.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2962, pruned_loss=0.06764, over 2183107.97 frames. ], batch size: 33, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:56:03,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=121454.66666666667, ans=0.0 +2024-08-03 12:56:06,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=121454.66666666667, ans=0.2 +2024-08-03 12:56:21,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=121528.0, ans=0.125 +2024-08-03 12:56:35,652 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.46 vs. limit=6.0 +2024-08-03 12:56:36,001 INFO [train.py:1114] (0/4) Epoch 10, batch 400, loss[loss=0.2017, simple_loss=0.2831, pruned_loss=0.06008, over 13351.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2957, pruned_loss=0.06723, over 2288209.91 frames. ], batch size: 37, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:56:44,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=121601.33333333333, ans=0.1 +2024-08-03 12:56:46,297 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.011e+02 1.271e+02 1.420e+02 1.744e+02 2.813e+02, threshold=2.840e+02, percent-clipped=0.0 +2024-08-03 12:56:49,628 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.67 vs. limit=22.5 +2024-08-03 12:56:50,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=121638.0, ans=0.0 +2024-08-03 12:57:05,112 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.79 vs. limit=10.0 +2024-08-03 12:57:10,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=121711.33333333333, ans=0.05 +2024-08-03 12:57:14,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=121748.0, ans=0.025 +2024-08-03 12:57:23,499 INFO [train.py:1114] (0/4) Epoch 10, batch 450, loss[loss=0.2177, simple_loss=0.3016, pruned_loss=0.06693, over 13551.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2958, pruned_loss=0.06742, over 2360819.51 frames. ], batch size: 38, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:57:30,404 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.20 vs. limit=15.0 +2024-08-03 12:57:33,995 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.66 vs. limit=22.5 +2024-08-03 12:57:41,099 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.21 vs. limit=15.0 +2024-08-03 12:57:50,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=121858.0, ans=0.0 +2024-08-03 12:58:12,703 INFO [train.py:1114] (0/4) Epoch 10, batch 500, loss[loss=0.2389, simple_loss=0.32, pruned_loss=0.07892, over 13422.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2944, pruned_loss=0.06682, over 2426004.81 frames. ], batch size: 43, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:58:13,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=121968.0, ans=0.0 +2024-08-03 12:58:20,907 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.605e+01 1.105e+02 1.306e+02 1.598e+02 3.062e+02, threshold=2.611e+02, percent-clipped=1.0 +2024-08-03 12:58:23,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=122004.66666666667, ans=0.125 +2024-08-03 12:58:25,004 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.86 vs. limit=15.0 +2024-08-03 12:58:27,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=122004.66666666667, ans=0.2 +2024-08-03 12:58:38,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=122041.33333333333, ans=0.0 +2024-08-03 12:58:53,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=122114.66666666667, ans=0.125 +2024-08-03 12:59:00,204 INFO [train.py:1114] (0/4) Epoch 10, batch 550, loss[loss=0.2474, simple_loss=0.3248, pruned_loss=0.08502, over 12951.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2948, pruned_loss=0.06712, over 2468261.28 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:59:08,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=122188.0, ans=0.0 +2024-08-03 12:59:14,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=122188.0, ans=0.5 +2024-08-03 12:59:20,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=122224.66666666667, ans=0.0 +2024-08-03 12:59:34,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=122261.33333333333, ans=0.2 +2024-08-03 12:59:40,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=122298.0, ans=0.025 +2024-08-03 12:59:45,499 INFO [train.py:1114] (0/4) Epoch 10, batch 600, loss[loss=0.2265, simple_loss=0.3134, pruned_loss=0.06981, over 13296.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2948, pruned_loss=0.06686, over 2508256.64 frames. ], batch size: 46, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:59:53,553 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.876e+01 1.160e+02 1.307e+02 1.564e+02 2.892e+02, threshold=2.615e+02, percent-clipped=4.0 +2024-08-03 12:59:59,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=122371.33333333333, ans=0.05 +2024-08-03 13:00:11,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=122408.0, ans=0.125 +2024-08-03 13:00:12,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=122444.66666666667, ans=0.125 +2024-08-03 13:00:12,395 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.78 vs. limit=15.0 +2024-08-03 13:00:27,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=122481.33333333333, ans=0.0 +2024-08-03 13:00:31,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=122481.33333333333, ans=0.125 +2024-08-03 13:00:33,120 INFO [train.py:1114] (0/4) Epoch 10, batch 650, loss[loss=0.1976, simple_loss=0.2796, pruned_loss=0.05779, over 13537.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2945, pruned_loss=0.0667, over 2543536.00 frames. ], batch size: 37, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 13:00:45,993 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.01 vs. limit=15.0 +2024-08-03 13:01:08,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122628.0, ans=0.1 +2024-08-03 13:01:08,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.95 vs. limit=15.0 +2024-08-03 13:01:17,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=122664.66666666667, ans=0.125 +2024-08-03 13:01:19,901 INFO [train.py:1114] (0/4) Epoch 10, batch 700, loss[loss=0.1838, simple_loss=0.2685, pruned_loss=0.04948, over 13550.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2951, pruned_loss=0.0671, over 2565784.74 frames. ], batch size: 35, lr: 1.27e-02, grad_scale: 16.0 +2024-08-03 13:01:27,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=122701.33333333333, ans=0.125 +2024-08-03 13:01:28,970 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.487e+01 1.261e+02 1.582e+02 2.111e+02 3.773e+02, threshold=3.165e+02, percent-clipped=11.0 +2024-08-03 13:01:41,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=122774.66666666667, ans=0.125 +2024-08-03 13:01:51,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=122811.33333333333, ans=0.0 +2024-08-03 13:02:02,902 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.88 vs. limit=22.5 +2024-08-03 13:02:06,760 INFO [train.py:1114] (0/4) Epoch 10, batch 750, loss[loss=0.1984, simple_loss=0.2932, pruned_loss=0.05184, over 13354.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2946, pruned_loss=0.06696, over 2582672.57 frames. ], batch size: 37, lr: 1.27e-02, grad_scale: 16.0 +2024-08-03 13:02:08,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=122884.66666666667, ans=0.2 +2024-08-03 13:02:34,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122958.0, ans=0.1 +2024-08-03 13:02:46,502 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.80 vs. limit=8.0 +2024-08-03 13:08:35,468 INFO [train.py:1114] (0/4) Epoch 10, batch 800, loss[loss=0.1799, simple_loss=0.2569, pruned_loss=0.05144, over 13341.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2942, pruned_loss=0.06685, over 2596964.48 frames. ], batch size: 33, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 13:09:00,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=123104.66666666667, ans=0.2 +2024-08-03 13:09:00,772 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.986e+01 1.304e+02 1.516e+02 1.968e+02 2.999e+02, threshold=3.032e+02, percent-clipped=0.0 +2024-08-03 13:09:18,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=123141.33333333333, ans=0.0 +2024-08-03 13:10:07,656 INFO [train.py:1114] (0/4) Epoch 10, batch 850, loss[loss=0.2212, simple_loss=0.311, pruned_loss=0.0657, over 13344.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.294, pruned_loss=0.06706, over 2609103.33 frames. ], batch size: 40, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:10:12,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=123251.33333333333, ans=0.125 +2024-08-03 13:10:14,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=123251.33333333333, ans=0.125 +2024-08-03 13:10:18,100 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.51 vs. limit=22.5 +2024-08-03 13:10:25,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=123288.0, ans=0.1 +2024-08-03 13:10:28,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=123324.66666666667, ans=0.125 +2024-08-03 13:10:44,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=123398.0, ans=0.2 +2024-08-03 13:10:54,536 INFO [train.py:1114] (0/4) Epoch 10, batch 900, loss[loss=0.1982, simple_loss=0.2694, pruned_loss=0.06346, over 13324.00 frames. ], tot_loss[loss=0.214, simple_loss=0.294, pruned_loss=0.06706, over 2612159.04 frames. ], batch size: 33, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:10:55,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=123434.66666666667, ans=0.125 +2024-08-03 13:11:04,169 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.190e+01 1.156e+02 1.356e+02 1.629e+02 2.273e+02, threshold=2.713e+02, percent-clipped=0.0 +2024-08-03 13:11:12,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=123471.33333333333, ans=0.0 +2024-08-03 13:11:17,426 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.22 vs. limit=15.0 +2024-08-03 13:11:17,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=123508.0, ans=0.1 +2024-08-03 13:11:41,295 INFO [train.py:1114] (0/4) Epoch 10, batch 950, loss[loss=0.1988, simple_loss=0.2825, pruned_loss=0.0575, over 13530.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2947, pruned_loss=0.06733, over 2612948.46 frames. ], batch size: 34, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:12:09,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=123691.33333333333, ans=0.1 +2024-08-03 13:12:12,246 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.08 vs. limit=15.0 +2024-08-03 13:12:28,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=123801.33333333333, ans=0.2 +2024-08-03 13:12:28,923 INFO [train.py:1114] (0/4) Epoch 10, batch 1000, loss[loss=0.1967, simple_loss=0.2713, pruned_loss=0.06108, over 13347.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2958, pruned_loss=0.06756, over 2611593.51 frames. ], batch size: 35, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:12:29,457 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.39 vs. limit=22.5 +2024-08-03 13:12:41,050 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.614e+01 1.218e+02 1.406e+02 1.942e+02 3.222e+02, threshold=2.813e+02, percent-clipped=3.0 +2024-08-03 13:12:44,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=123838.0, ans=0.125 +2024-08-03 13:12:56,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=123874.66666666667, ans=0.0 +2024-08-03 13:13:00,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=123911.33333333333, ans=0.5 +2024-08-03 13:13:14,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=123948.0, ans=0.0 +2024-08-03 13:13:16,762 INFO [train.py:1114] (0/4) Epoch 10, batch 1050, loss[loss=0.2068, simple_loss=0.2946, pruned_loss=0.05952, over 13576.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2946, pruned_loss=0.06728, over 2616027.82 frames. ], batch size: 39, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:13:17,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=123984.66666666667, ans=0.125 +2024-08-03 13:13:18,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=123984.66666666667, ans=0.125 +2024-08-03 13:13:19,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=123984.66666666667, ans=10.0 +2024-08-03 13:13:33,163 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=2.855e-03 +2024-08-03 13:13:35,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=124058.0, ans=0.125 +2024-08-03 13:13:43,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=124094.66666666667, ans=0.025 +2024-08-03 13:13:49,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=124094.66666666667, ans=10.0 +2024-08-03 13:13:49,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=124094.66666666667, ans=0.125 +2024-08-03 13:13:55,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=124131.33333333333, ans=0.0 +2024-08-03 13:14:02,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=124131.33333333333, ans=0.0 +2024-08-03 13:14:03,757 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.85 vs. limit=15.0 +2024-08-03 13:14:05,009 INFO [train.py:1114] (0/4) Epoch 10, batch 1100, loss[loss=0.1934, simple_loss=0.2682, pruned_loss=0.0593, over 13554.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2951, pruned_loss=0.06764, over 2619149.17 frames. ], batch size: 36, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:14:11,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=124168.0, ans=0.125 +2024-08-03 13:14:11,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=124168.0, ans=0.125 +2024-08-03 13:14:14,714 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.380e+01 1.135e+02 1.251e+02 1.585e+02 3.709e+02, threshold=2.501e+02, percent-clipped=2.0 +2024-08-03 13:14:27,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=124241.33333333333, ans=0.125 +2024-08-03 13:14:43,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=124314.66666666667, ans=0.125 +2024-08-03 13:14:51,685 INFO [train.py:1114] (0/4) Epoch 10, batch 1150, loss[loss=0.1944, simple_loss=0.2755, pruned_loss=0.05666, over 13576.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2946, pruned_loss=0.06728, over 2618500.45 frames. ], batch size: 36, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:15:21,230 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.04 vs. limit=15.0 +2024-08-03 13:15:23,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=124461.33333333333, ans=0.07 +2024-08-03 13:15:33,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124498.0, ans=0.1 +2024-08-03 13:15:33,805 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.42 vs. limit=12.0 +2024-08-03 13:15:34,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124498.0, ans=0.1 +2024-08-03 13:15:38,781 INFO [train.py:1114] (0/4) Epoch 10, batch 1200, loss[loss=0.2167, simple_loss=0.3108, pruned_loss=0.06128, over 13575.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.295, pruned_loss=0.06682, over 2616084.17 frames. ], batch size: 39, lr: 1.26e-02, grad_scale: 32.0 +2024-08-03 13:15:40,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=124534.66666666667, ans=0.0 +2024-08-03 13:15:48,577 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.153e+02 1.332e+02 1.610e+02 2.864e+02, threshold=2.663e+02, percent-clipped=2.0 +2024-08-03 13:16:02,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=124608.0, ans=0.125 +2024-08-03 13:16:03,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=124608.0, ans=0.0 +2024-08-03 13:16:05,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.33 vs. limit=22.5 +2024-08-03 13:16:13,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=124644.66666666667, ans=0.0 +2024-08-03 13:16:16,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=124681.33333333333, ans=0.2 +2024-08-03 13:16:25,767 INFO [train.py:1114] (0/4) Epoch 10, batch 1250, loss[loss=0.2413, simple_loss=0.3153, pruned_loss=0.08361, over 13449.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.295, pruned_loss=0.06677, over 2628088.70 frames. ], batch size: 42, lr: 1.26e-02, grad_scale: 32.0 +2024-08-03 13:17:08,136 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.29 vs. limit=15.0 +2024-08-03 13:17:09,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124864.66666666667, ans=0.1 +2024-08-03 13:17:11,298 INFO [train.py:1114] (0/4) Epoch 10, batch 1300, loss[loss=0.2196, simple_loss=0.3039, pruned_loss=0.06763, over 12829.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.294, pruned_loss=0.06624, over 2631006.46 frames. ], batch size: 52, lr: 1.26e-02, grad_scale: 32.0 +2024-08-03 13:17:15,412 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.96 vs. limit=10.0 +2024-08-03 13:17:20,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=124938.0, ans=0.125 +2024-08-03 13:17:21,172 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.141e+01 1.219e+02 1.549e+02 1.853e+02 2.795e+02, threshold=3.098e+02, percent-clipped=1.0 +2024-08-03 13:17:29,206 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.91 vs. limit=15.0 +2024-08-03 13:17:51,891 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.84 vs. limit=12.0 +2024-08-03 13:17:58,640 INFO [train.py:1114] (0/4) Epoch 10, batch 1350, loss[loss=0.2022, simple_loss=0.2877, pruned_loss=0.05836, over 13539.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2942, pruned_loss=0.06671, over 2638261.99 frames. ], batch size: 37, lr: 1.26e-02, grad_scale: 32.0 +2024-08-03 13:18:07,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=125121.33333333333, ans=0.125 +2024-08-03 13:18:25,731 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:18:28,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=125194.66666666667, ans=0.0 +2024-08-03 13:18:28,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=125194.66666666667, ans=0.125 +2024-08-03 13:18:34,545 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:18:42,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=125231.33333333333, ans=0.125 +2024-08-03 13:18:45,222 INFO [train.py:1114] (0/4) Epoch 10, batch 1400, loss[loss=0.1979, simple_loss=0.2741, pruned_loss=0.06079, over 13237.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2933, pruned_loss=0.06621, over 2643041.56 frames. ], batch size: 31, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:18:48,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=125268.0, ans=0.0 +2024-08-03 13:18:54,986 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.794e+01 1.134e+02 1.256e+02 1.534e+02 3.011e+02, threshold=2.513e+02, percent-clipped=0.0 +2024-08-03 13:19:09,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=125341.33333333333, ans=0.125 +2024-08-03 13:19:13,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=125378.0, ans=0.1 +2024-08-03 13:19:17,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=125378.0, ans=0.0 +2024-08-03 13:19:18,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=125378.0, ans=0.125 +2024-08-03 13:19:19,055 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.03 vs. limit=15.0 +2024-08-03 13:19:19,948 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=12.0 +2024-08-03 13:19:25,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=125414.66666666667, ans=0.0 +2024-08-03 13:19:31,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=125451.33333333333, ans=0.125 +2024-08-03 13:19:31,997 INFO [train.py:1114] (0/4) Epoch 10, batch 1450, loss[loss=0.26, simple_loss=0.3414, pruned_loss=0.08935, over 13438.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2945, pruned_loss=0.06698, over 2641914.01 frames. ], batch size: 43, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:19:37,188 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.47 vs. limit=15.0 +2024-08-03 13:19:50,840 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.68 vs. limit=12.0 +2024-08-03 13:20:09,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=125598.0, ans=0.125 +2024-08-03 13:20:10,521 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.74 vs. limit=22.5 +2024-08-03 13:20:14,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=125598.0, ans=0.2 +2024-08-03 13:20:19,174 INFO [train.py:1114] (0/4) Epoch 10, batch 1500, loss[loss=0.2165, simple_loss=0.3029, pruned_loss=0.06507, over 13402.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2938, pruned_loss=0.06609, over 2642092.35 frames. ], batch size: 39, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:20:21,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=125634.66666666667, ans=0.0 +2024-08-03 13:20:21,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.74 vs. limit=22.5 +2024-08-03 13:20:22,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=125634.66666666667, ans=0.125 +2024-08-03 13:20:24,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=125634.66666666667, ans=0.0 +2024-08-03 13:20:25,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=125634.66666666667, ans=0.0 +2024-08-03 13:20:27,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=125671.33333333333, ans=0.0 +2024-08-03 13:20:29,218 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.867e+01 1.156e+02 1.320e+02 1.724e+02 3.764e+02, threshold=2.640e+02, percent-clipped=6.0 +2024-08-03 13:20:32,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=125671.33333333333, ans=0.125 +2024-08-03 13:20:34,282 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.73 vs. limit=10.0 +2024-08-03 13:20:37,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=125708.0, ans=0.125 +2024-08-03 13:20:43,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=125708.0, ans=0.04949747468305833 +2024-08-03 13:20:45,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=125744.66666666667, ans=0.0 +2024-08-03 13:21:02,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=125781.33333333333, ans=0.125 +2024-08-03 13:21:05,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=125818.0, ans=0.125 +2024-08-03 13:21:06,624 INFO [train.py:1114] (0/4) Epoch 10, batch 1550, loss[loss=0.2634, simple_loss=0.3375, pruned_loss=0.09463, over 13406.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2947, pruned_loss=0.06674, over 2631829.84 frames. ], batch size: 41, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:21:08,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=125818.0, ans=0.0 +2024-08-03 13:21:08,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=125818.0, ans=0.125 +2024-08-03 13:21:29,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=125891.33333333333, ans=0.125 +2024-08-03 13:21:31,643 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.17 vs. limit=15.0 +2024-08-03 13:21:33,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=125928.0, ans=0.125 +2024-08-03 13:21:36,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=125928.0, ans=0.2 +2024-08-03 13:21:44,827 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.30 vs. limit=10.0 +2024-08-03 13:21:51,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=125964.66666666667, ans=0.2 +2024-08-03 13:21:54,146 INFO [train.py:1114] (0/4) Epoch 10, batch 1600, loss[loss=0.195, simple_loss=0.2884, pruned_loss=0.05079, over 13567.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2949, pruned_loss=0.06704, over 2624237.26 frames. ], batch size: 39, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:21:54,797 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.54 vs. limit=15.0 +2024-08-03 13:21:56,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=126001.33333333333, ans=0.0 +2024-08-03 13:22:04,505 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.527e+01 1.158e+02 1.419e+02 1.742e+02 3.880e+02, threshold=2.837e+02, percent-clipped=3.0 +2024-08-03 13:22:06,728 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.67 vs. limit=22.5 +2024-08-03 13:22:12,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=126074.66666666667, ans=0.0 +2024-08-03 13:22:20,453 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.60 vs. limit=10.0 +2024-08-03 13:22:22,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=126111.33333333333, ans=0.125 +2024-08-03 13:22:29,702 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.46 vs. limit=15.0 +2024-08-03 13:22:32,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=126148.0, ans=0.2 +2024-08-03 13:22:41,778 INFO [train.py:1114] (0/4) Epoch 10, batch 1650, loss[loss=0.1905, simple_loss=0.2768, pruned_loss=0.05209, over 13320.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2942, pruned_loss=0.06673, over 2621035.34 frames. ], batch size: 40, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:22:52,430 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.70 vs. limit=22.5 +2024-08-03 13:23:00,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126258.0, ans=0.1 +2024-08-03 13:23:29,242 INFO [train.py:1114] (0/4) Epoch 10, batch 1700, loss[loss=0.2017, simple_loss=0.2745, pruned_loss=0.06445, over 13247.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2941, pruned_loss=0.06677, over 2629832.70 frames. ], batch size: 31, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:23:35,104 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.11 vs. limit=15.0 +2024-08-03 13:23:38,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=126404.66666666667, ans=0.125 +2024-08-03 13:23:39,070 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.220e+01 1.201e+02 1.479e+02 1.994e+02 3.572e+02, threshold=2.957e+02, percent-clipped=7.0 +2024-08-03 13:23:44,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126404.66666666667, ans=0.1 +2024-08-03 13:23:48,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=126441.33333333333, ans=0.125 +2024-08-03 13:24:00,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=126478.0, ans=0.0 +2024-08-03 13:24:11,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=126514.66666666667, ans=0.125 +2024-08-03 13:24:11,404 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:24:14,894 INFO [train.py:1114] (0/4) Epoch 10, batch 1750, loss[loss=0.2047, simple_loss=0.284, pruned_loss=0.06269, over 13539.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2936, pruned_loss=0.06667, over 2633945.55 frames. ], batch size: 31, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:24:18,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=126551.33333333333, ans=0.2 +2024-08-03 13:24:25,193 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.48 vs. limit=15.0 +2024-08-03 13:24:43,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.74 vs. limit=15.0 +2024-08-03 13:24:45,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=126661.33333333333, ans=0.125 +2024-08-03 13:24:56,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=126698.0, ans=0.0 +2024-08-03 13:25:01,907 INFO [train.py:1114] (0/4) Epoch 10, batch 1800, loss[loss=0.2254, simple_loss=0.3101, pruned_loss=0.07034, over 13552.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2937, pruned_loss=0.06673, over 2634784.26 frames. ], batch size: 38, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:25:02,572 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=21.13 vs. limit=22.5 +2024-08-03 13:25:11,949 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.656e+01 1.185e+02 1.312e+02 1.554e+02 2.308e+02, threshold=2.624e+02, percent-clipped=0.0 +2024-08-03 13:25:15,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=126771.33333333333, ans=0.125 +2024-08-03 13:25:21,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=126808.0, ans=0.2 +2024-08-03 13:25:40,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=126881.33333333333, ans=0.125 +2024-08-03 13:25:41,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=126881.33333333333, ans=0.125 +2024-08-03 13:25:49,046 INFO [train.py:1114] (0/4) Epoch 10, batch 1850, loss[loss=0.2115, simple_loss=0.2983, pruned_loss=0.06237, over 13377.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2942, pruned_loss=0.0669, over 2638203.76 frames. ], batch size: 39, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:25:49,364 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:25:57,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126954.66666666667, ans=0.1 +2024-08-03 13:25:59,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=126954.66666666667, ans=0.0 +2024-08-03 13:26:05,265 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.07 vs. limit=10.0 +2024-08-03 13:26:08,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=126991.33333333333, ans=0.0 +2024-08-03 13:26:16,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=127028.0, ans=0.0 +2024-08-03 13:26:32,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=127064.66666666667, ans=0.125 +2024-08-03 13:26:35,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=127064.66666666667, ans=0.125 +2024-08-03 13:26:36,801 INFO [train.py:1114] (0/4) Epoch 10, batch 1900, loss[loss=0.2338, simple_loss=0.3137, pruned_loss=0.07691, over 13339.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.295, pruned_loss=0.06717, over 2641067.40 frames. ], batch size: 40, lr: 1.25e-02, grad_scale: 16.0 +2024-08-03 13:26:45,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=127101.33333333333, ans=0.125 +2024-08-03 13:26:49,383 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.604e+01 1.255e+02 1.783e+02 2.547e+02 3.918e+02, threshold=3.565e+02, percent-clipped=23.0 +2024-08-03 13:26:53,192 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:26:58,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=127174.66666666667, ans=0.125 +2024-08-03 13:27:08,859 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.61 vs. limit=15.0 +2024-08-03 13:27:19,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=127248.0, ans=0.0 +2024-08-03 13:27:24,285 INFO [train.py:1114] (0/4) Epoch 10, batch 1950, loss[loss=0.1927, simple_loss=0.2741, pruned_loss=0.05563, over 13548.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2957, pruned_loss=0.06722, over 2647415.21 frames. ], batch size: 36, lr: 1.24e-02, grad_scale: 16.0 +2024-08-03 13:27:24,689 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.64 vs. limit=10.0 +2024-08-03 13:27:32,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=127284.66666666667, ans=0.0 +2024-08-03 13:27:46,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=127358.0, ans=0.125 +2024-08-03 13:28:07,983 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:28:08,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=127431.33333333333, ans=0.1 +2024-08-03 13:28:11,451 INFO [train.py:1114] (0/4) Epoch 10, batch 2000, loss[loss=0.1902, simple_loss=0.2683, pruned_loss=0.056, over 13584.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2961, pruned_loss=0.06757, over 2636300.68 frames. ], batch size: 31, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:28:19,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=127468.0, ans=0.025 +2024-08-03 13:28:22,597 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.471e+01 1.146e+02 1.313e+02 1.617e+02 2.483e+02, threshold=2.626e+02, percent-clipped=0.0 +2024-08-03 13:28:23,972 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.64 vs. limit=10.0 +2024-08-03 13:28:24,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=127504.66666666667, ans=0.125 +2024-08-03 13:28:59,019 INFO [train.py:1114] (0/4) Epoch 10, batch 2050, loss[loss=0.1891, simple_loss=0.2658, pruned_loss=0.0562, over 13419.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2951, pruned_loss=0.06732, over 2633887.02 frames. ], batch size: 32, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:29:02,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=127651.33333333333, ans=0.125 +2024-08-03 13:29:07,360 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:29:12,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=127688.0, ans=0.0 +2024-08-03 13:29:14,802 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.94 vs. limit=8.0 +2024-08-03 13:29:27,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=127761.33333333333, ans=0.0 +2024-08-03 13:29:35,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=127798.0, ans=0.0 +2024-08-03 13:29:39,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=127798.0, ans=0.125 +2024-08-03 13:29:43,990 INFO [train.py:1114] (0/4) Epoch 10, batch 2100, loss[loss=0.2034, simple_loss=0.2878, pruned_loss=0.05955, over 13544.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2949, pruned_loss=0.06715, over 2638798.48 frames. ], batch size: 37, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:29:56,755 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.808e+01 1.235e+02 1.463e+02 1.746e+02 3.043e+02, threshold=2.927e+02, percent-clipped=3.0 +2024-08-03 13:30:03,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=127908.0, ans=0.2 +2024-08-03 13:30:07,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=127908.0, ans=0.0 +2024-08-03 13:30:15,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=127944.66666666667, ans=0.1 +2024-08-03 13:30:18,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=127944.66666666667, ans=0.125 +2024-08-03 13:30:20,971 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.09 vs. limit=15.0 +2024-08-03 13:30:21,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=127944.66666666667, ans=0.125 +2024-08-03 13:30:24,523 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.35 vs. limit=15.0 +2024-08-03 13:30:33,150 INFO [train.py:1114] (0/4) Epoch 10, batch 2150, loss[loss=0.1808, simple_loss=0.2674, pruned_loss=0.04711, over 13552.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2938, pruned_loss=0.06647, over 2647591.39 frames. ], batch size: 36, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:30:49,670 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:30:54,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=128091.33333333333, ans=0.125 +2024-08-03 13:31:09,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=128164.66666666667, ans=0.125 +2024-08-03 13:31:12,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=128164.66666666667, ans=10.0 +2024-08-03 13:31:18,239 INFO [train.py:1114] (0/4) Epoch 10, batch 2200, loss[loss=0.2438, simple_loss=0.3284, pruned_loss=0.07956, over 13386.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2938, pruned_loss=0.06638, over 2645462.75 frames. ], batch size: 39, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:31:29,283 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.267e+02 1.540e+02 2.050e+02 4.140e+02, threshold=3.080e+02, percent-clipped=6.0 +2024-08-03 13:31:31,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=128238.0, ans=0.04949747468305833 +2024-08-03 13:31:49,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=128311.33333333333, ans=0.0 +2024-08-03 13:32:00,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=128348.0, ans=0.2 +2024-08-03 13:32:05,497 INFO [train.py:1114] (0/4) Epoch 10, batch 2250, loss[loss=0.1839, simple_loss=0.2751, pruned_loss=0.04632, over 13368.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2937, pruned_loss=0.06639, over 2642210.14 frames. ], batch size: 37, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:32:09,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=128384.66666666667, ans=0.2 +2024-08-03 13:32:12,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=128384.66666666667, ans=0.025 +2024-08-03 13:32:26,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=128458.0, ans=0.125 +2024-08-03 13:32:43,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=128531.33333333333, ans=0.2 +2024-08-03 13:32:52,594 INFO [train.py:1114] (0/4) Epoch 10, batch 2300, loss[loss=0.1921, simple_loss=0.2691, pruned_loss=0.05752, over 13568.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2928, pruned_loss=0.0662, over 2638227.37 frames. ], batch size: 33, lr: 1.24e-02, grad_scale: 16.0 +2024-08-03 13:33:01,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=128604.66666666667, ans=0.125 +2024-08-03 13:33:02,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=128604.66666666667, ans=0.125 +2024-08-03 13:33:04,014 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.537e+01 1.182e+02 1.366e+02 1.663e+02 2.762e+02, threshold=2.732e+02, percent-clipped=0.0 +2024-08-03 13:33:39,447 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.54 vs. limit=15.0 +2024-08-03 13:33:39,801 INFO [train.py:1114] (0/4) Epoch 10, batch 2350, loss[loss=0.2188, simple_loss=0.3108, pruned_loss=0.06338, over 13549.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2927, pruned_loss=0.0662, over 2640503.87 frames. ], batch size: 38, lr: 1.24e-02, grad_scale: 16.0 +2024-08-03 13:33:42,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=128751.33333333333, ans=0.0 +2024-08-03 13:33:46,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=128751.33333333333, ans=0.125 +2024-08-03 13:34:05,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128824.66666666667, ans=0.1 +2024-08-03 13:34:12,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=128861.33333333333, ans=0.125 +2024-08-03 13:34:14,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=128861.33333333333, ans=0.05 +2024-08-03 13:34:18,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=128898.0, ans=0.0 +2024-08-03 13:34:22,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=128898.0, ans=0.125 +2024-08-03 13:34:27,063 INFO [train.py:1114] (0/4) Epoch 10, batch 2400, loss[loss=0.1834, simple_loss=0.2702, pruned_loss=0.04833, over 13522.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2931, pruned_loss=0.06649, over 2642167.63 frames. ], batch size: 35, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:34:39,023 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.606e+01 1.219e+02 1.512e+02 2.010e+02 3.572e+02, threshold=3.023e+02, percent-clipped=5.0 +2024-08-03 13:34:39,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=128971.33333333333, ans=0.125 +2024-08-03 13:34:44,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=128971.33333333333, ans=0.0 +2024-08-03 13:34:45,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=129008.0, ans=0.125 +2024-08-03 13:34:47,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=129008.0, ans=0.125 +2024-08-03 13:35:03,997 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.33 vs. limit=8.0 +2024-08-03 13:35:13,332 INFO [train.py:1114] (0/4) Epoch 10, batch 2450, loss[loss=0.2226, simple_loss=0.3071, pruned_loss=0.06902, over 13344.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.295, pruned_loss=0.06772, over 2632593.18 frames. ], batch size: 37, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:35:19,135 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.89 vs. limit=15.0 +2024-08-03 13:35:32,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=129191.33333333333, ans=0.0 +2024-08-03 13:35:35,632 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.52 vs. limit=15.0 +2024-08-03 13:35:39,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=129191.33333333333, ans=0.025 +2024-08-03 13:36:00,235 INFO [train.py:1114] (0/4) Epoch 10, batch 2500, loss[loss=0.218, simple_loss=0.3055, pruned_loss=0.06523, over 13416.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2949, pruned_loss=0.0676, over 2637207.56 frames. ], batch size: 39, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:36:01,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=129301.33333333333, ans=0.125 +2024-08-03 13:36:11,366 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.968e+01 1.203e+02 1.318e+02 1.532e+02 2.282e+02, threshold=2.635e+02, percent-clipped=0.0 +2024-08-03 13:36:12,680 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.17 vs. limit=15.0 +2024-08-03 13:36:14,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=129338.0, ans=0.0 +2024-08-03 13:36:15,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=129338.0, ans=0.025 +2024-08-03 13:36:26,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=129374.66666666667, ans=0.125 +2024-08-03 13:36:31,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=129411.33333333333, ans=0.0 +2024-08-03 13:36:44,474 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.25 vs. limit=6.0 +2024-08-03 13:36:45,891 INFO [train.py:1114] (0/4) Epoch 10, batch 2550, loss[loss=0.185, simple_loss=0.2577, pruned_loss=0.05618, over 13548.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2943, pruned_loss=0.0673, over 2638775.50 frames. ], batch size: 31, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:36:48,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.87 vs. limit=15.0 +2024-08-03 13:36:48,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=129484.66666666667, ans=0.0 +2024-08-03 13:36:49,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=129484.66666666667, ans=0.125 +2024-08-03 13:36:55,736 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.25 vs. limit=6.0 +2024-08-03 13:37:05,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=129558.0, ans=0.025 +2024-08-03 13:37:06,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=129558.0, ans=0.125 +2024-08-03 13:37:07,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=129558.0, ans=0.0 +2024-08-03 13:37:17,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=129594.66666666667, ans=0.09899494936611666 +2024-08-03 13:37:22,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=129631.33333333333, ans=0.1 +2024-08-03 13:37:29,300 INFO [train.py:1114] (0/4) Epoch 10, batch 2600, loss[loss=0.1976, simple_loss=0.2875, pruned_loss=0.05384, over 13556.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2942, pruned_loss=0.06694, over 2638063.71 frames. ], batch size: 36, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:37:36,704 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.98 vs. limit=15.0 +2024-08-03 13:37:40,631 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.809e+01 1.167e+02 1.488e+02 1.878e+02 3.119e+02, threshold=2.976e+02, percent-clipped=4.0 +2024-08-03 13:37:43,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=129704.66666666667, ans=0.125 +2024-08-03 13:37:44,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=129704.66666666667, ans=0.04949747468305833 +2024-08-03 13:37:44,545 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.83 vs. limit=12.0 +2024-08-03 13:37:56,596 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=1.575e-02 +2024-08-03 13:38:09,244 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.95 vs. limit=15.0 +2024-08-03 13:38:13,137 INFO [train.py:1114] (0/4) Epoch 10, batch 2650, loss[loss=0.2159, simple_loss=0.2987, pruned_loss=0.06655, over 13328.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2944, pruned_loss=0.06704, over 2640455.27 frames. ], batch size: 46, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:38:13,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=129851.33333333333, ans=0.0 +2024-08-03 13:38:16,929 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.85 vs. limit=15.0 +2024-08-03 13:38:19,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=129851.33333333333, ans=0.125 +2024-08-03 13:38:24,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=129888.0, ans=0.0 +2024-08-03 13:38:26,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=129888.0, ans=0.125 +2024-08-03 13:38:38,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=129961.33333333333, ans=0.2 +2024-08-03 13:38:49,245 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.79 vs. limit=15.0 +2024-08-03 13:38:56,667 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.09 vs. limit=12.0 +2024-08-03 13:38:57,912 INFO [train.py:1114] (0/4) Epoch 10, batch 2700, loss[loss=0.1939, simple_loss=0.2843, pruned_loss=0.05177, over 13562.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2951, pruned_loss=0.06714, over 2637382.21 frames. ], batch size: 40, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:38:58,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=130034.66666666667, ans=0.0 +2024-08-03 13:39:10,506 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.234e+02 1.365e+02 1.791e+02 3.628e+02, threshold=2.731e+02, percent-clipped=1.0 +2024-08-03 13:39:18,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=130108.0, ans=0.125 +2024-08-03 13:39:38,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=130181.33333333333, ans=0.05 +2024-08-03 13:39:41,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=130218.0, ans=0.2 +2024-08-03 13:39:42,463 INFO [train.py:1114] (0/4) Epoch 10, batch 2750, loss[loss=0.2075, simple_loss=0.2853, pruned_loss=0.06479, over 13321.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2937, pruned_loss=0.06672, over 2635048.44 frames. ], batch size: 34, lr: 1.23e-02, grad_scale: 16.0 +2024-08-03 13:40:16,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=130364.66666666667, ans=0.125 +2024-08-03 13:40:26,208 INFO [train.py:1114] (0/4) Epoch 10, batch 2800, loss[loss=0.2913, simple_loss=0.3428, pruned_loss=0.1198, over 9071.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2943, pruned_loss=0.0674, over 2626483.91 frames. ], batch size: 96, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:40:38,302 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.441e+01 1.179e+02 1.306e+02 1.650e+02 3.137e+02, threshold=2.611e+02, percent-clipped=1.0 +2024-08-03 13:40:46,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=130474.66666666667, ans=0.0 +2024-08-03 13:40:46,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=130474.66666666667, ans=0.0 +2024-08-03 13:40:50,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=130474.66666666667, ans=0.0 +2024-08-03 13:40:59,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=130511.33333333333, ans=0.025 +2024-08-03 13:41:05,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=130548.0, ans=0.1 +2024-08-03 13:41:08,656 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.34 vs. limit=10.0 +2024-08-03 13:41:09,742 INFO [train.py:1114] (0/4) Epoch 10, batch 2850, loss[loss=0.1894, simple_loss=0.277, pruned_loss=0.05084, over 13367.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2953, pruned_loss=0.06777, over 2620036.82 frames. ], batch size: 35, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:41:17,078 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.65 vs. limit=12.0 +2024-08-03 13:41:21,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=130621.33333333333, ans=0.0 +2024-08-03 13:41:32,638 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.20 vs. limit=8.0 +2024-08-03 13:41:45,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=130731.33333333333, ans=0.2 +2024-08-03 13:41:45,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=130731.33333333333, ans=0.125 +2024-08-03 13:41:50,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=130731.33333333333, ans=0.125 +2024-08-03 13:41:52,751 INFO [train.py:1114] (0/4) Epoch 10, batch 2900, loss[loss=0.2303, simple_loss=0.3055, pruned_loss=0.07754, over 13357.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2962, pruned_loss=0.06768, over 2631483.07 frames. ], batch size: 36, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:41:52,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=130768.0, ans=0.025 +2024-08-03 13:42:05,055 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.521e+01 1.253e+02 1.591e+02 2.001e+02 4.136e+02, threshold=3.182e+02, percent-clipped=6.0 +2024-08-03 13:42:10,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=130841.33333333333, ans=0.0 +2024-08-03 13:42:36,428 INFO [train.py:1114] (0/4) Epoch 10, batch 2950, loss[loss=0.1932, simple_loss=0.273, pruned_loss=0.0567, over 13324.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2949, pruned_loss=0.06728, over 2629941.27 frames. ], batch size: 34, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:42:56,883 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.93 vs. limit=15.0 +2024-08-03 13:43:29,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=131098.0, ans=0.0 +2024-08-03 13:43:37,096 INFO [train.py:1114] (0/4) Epoch 10, batch 3000, loss[loss=0.2182, simple_loss=0.2976, pruned_loss=0.06942, over 13539.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2947, pruned_loss=0.06711, over 2630231.62 frames. ], batch size: 37, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:43:37,097 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 13:43:49,244 INFO [train.py:1146] (0/4) Epoch 10, validation: loss=0.1798, simple_loss=0.2807, pruned_loss=0.03945, over 944034.00 frames. +2024-08-03 13:43:49,245 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 13:43:52,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=131134.66666666666, ans=0.07 +2024-08-03 13:44:02,241 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.869e+01 1.161e+02 1.330e+02 1.562e+02 2.944e+02, threshold=2.661e+02, percent-clipped=0.0 +2024-08-03 13:44:25,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=131244.66666666666, ans=0.125 +2024-08-03 13:44:28,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=131244.66666666666, ans=0.125 +2024-08-03 13:44:37,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=131281.33333333334, ans=0.2 +2024-08-03 13:44:43,927 INFO [train.py:1114] (0/4) Epoch 10, batch 3050, loss[loss=0.2116, simple_loss=0.293, pruned_loss=0.06512, over 13534.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2959, pruned_loss=0.0675, over 2626600.63 frames. ], batch size: 35, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:44:46,109 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.93 vs. limit=15.0 +2024-08-03 13:44:48,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=131318.0, ans=0.025 +2024-08-03 13:44:51,408 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.85 vs. limit=15.0 +2024-08-03 13:44:53,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=131354.66666666666, ans=0.125 +2024-08-03 13:45:01,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=131391.33333333334, ans=0.125 +2024-08-03 13:45:16,494 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.71 vs. limit=15.0 +2024-08-03 13:45:30,536 INFO [train.py:1114] (0/4) Epoch 10, batch 3100, loss[loss=0.2425, simple_loss=0.3219, pruned_loss=0.08157, over 13313.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2954, pruned_loss=0.06758, over 2626729.58 frames. ], batch size: 46, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:45:31,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=131501.33333333334, ans=0.04949747468305833 +2024-08-03 13:45:35,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=131501.33333333334, ans=0.1 +2024-08-03 13:45:43,065 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.582e+01 1.186e+02 1.379e+02 1.722e+02 2.702e+02, threshold=2.757e+02, percent-clipped=2.0 +2024-08-03 13:45:45,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=131538.0, ans=0.125 +2024-08-03 13:45:51,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=131574.66666666666, ans=0.0 +2024-08-03 13:46:09,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=131648.0, ans=0.0 +2024-08-03 13:46:24,092 INFO [train.py:1114] (0/4) Epoch 10, batch 3150, loss[loss=0.2226, simple_loss=0.3093, pruned_loss=0.06801, over 13058.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2949, pruned_loss=0.06719, over 2628481.14 frames. ], batch size: 48, lr: 1.22e-02, grad_scale: 32.0 +2024-08-03 13:46:56,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=131794.66666666666, ans=0.125 +2024-08-03 13:47:05,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=131831.33333333334, ans=0.025 +2024-08-03 13:47:05,282 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.34 vs. limit=6.0 +2024-08-03 13:47:09,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=131831.33333333334, ans=0.025 +2024-08-03 13:47:10,964 INFO [train.py:1114] (0/4) Epoch 10, batch 3200, loss[loss=0.2357, simple_loss=0.3143, pruned_loss=0.07856, over 13551.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2936, pruned_loss=0.06641, over 2634710.93 frames. ], batch size: 37, lr: 1.22e-02, grad_scale: 32.0 +2024-08-03 13:47:11,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=131868.0, ans=0.0 +2024-08-03 13:47:14,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=131868.0, ans=0.125 +2024-08-03 13:47:15,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=131868.0, ans=0.125 +2024-08-03 13:47:22,873 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.160e+01 1.174e+02 1.311e+02 1.747e+02 3.069e+02, threshold=2.622e+02, percent-clipped=2.0 +2024-08-03 13:47:41,147 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-36000.pt +2024-08-03 13:47:43,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131978.0, ans=0.1 +2024-08-03 13:47:54,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=132014.66666666666, ans=0.125 +2024-08-03 13:47:55,722 INFO [train.py:1114] (0/4) Epoch 10, batch 3250, loss[loss=0.2358, simple_loss=0.3204, pruned_loss=0.07556, over 13370.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2943, pruned_loss=0.06665, over 2639475.42 frames. ], batch size: 38, lr: 1.22e-02, grad_scale: 32.0 +2024-08-03 13:48:09,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=132088.0, ans=0.0 +2024-08-03 13:48:17,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=132124.66666666666, ans=0.125 +2024-08-03 13:48:20,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=132161.33333333334, ans=0.0 +2024-08-03 13:48:33,457 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.37 vs. limit=15.0 +2024-08-03 13:48:35,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=132198.0, ans=0.2 +2024-08-03 13:48:37,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=132234.66666666666, ans=0.1 +2024-08-03 13:48:38,131 INFO [train.py:1114] (0/4) Epoch 10, batch 3300, loss[loss=0.2298, simple_loss=0.3061, pruned_loss=0.07678, over 12963.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2933, pruned_loss=0.06613, over 2640986.43 frames. ], batch size: 52, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:48:50,932 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.252e+01 1.284e+02 1.634e+02 2.035e+02 3.075e+02, threshold=3.268e+02, percent-clipped=7.0 +2024-08-03 13:48:51,427 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.03 vs. limit=15.0 +2024-08-03 13:48:57,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=132308.0, ans=0.125 +2024-08-03 13:48:58,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=132308.0, ans=0.125 +2024-08-03 13:49:06,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=132344.66666666666, ans=0.0 +2024-08-03 13:49:07,929 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-08-03 13:49:09,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132344.66666666666, ans=0.1 +2024-08-03 13:49:13,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132381.33333333334, ans=0.1 +2024-08-03 13:49:18,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=132381.33333333334, ans=0.2 +2024-08-03 13:49:21,120 INFO [train.py:1114] (0/4) Epoch 10, batch 3350, loss[loss=0.2236, simple_loss=0.3036, pruned_loss=0.0718, over 13048.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2942, pruned_loss=0.06665, over 2630809.52 frames. ], batch size: 48, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:49:36,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=132454.66666666666, ans=0.2 +2024-08-03 13:49:39,216 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:49:47,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=132528.0, ans=0.125 +2024-08-03 13:50:03,844 INFO [train.py:1114] (0/4) Epoch 10, batch 3400, loss[loss=0.2056, simple_loss=0.2809, pruned_loss=0.06518, over 13511.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2941, pruned_loss=0.06691, over 2627196.07 frames. ], batch size: 31, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:50:03,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=132601.33333333334, ans=0.0 +2024-08-03 13:50:09,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=132601.33333333334, ans=0.125 +2024-08-03 13:50:13,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=132638.0, ans=0.025 +2024-08-03 13:50:16,689 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.511e+01 1.188e+02 1.377e+02 1.704e+02 3.995e+02, threshold=2.755e+02, percent-clipped=1.0 +2024-08-03 13:50:19,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=132638.0, ans=0.0 +2024-08-03 13:50:27,076 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.75 vs. limit=22.5 +2024-08-03 13:50:46,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=132784.66666666666, ans=0.125 +2024-08-03 13:50:47,253 INFO [train.py:1114] (0/4) Epoch 10, batch 3450, loss[loss=0.2225, simple_loss=0.3051, pruned_loss=0.06998, over 12874.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2947, pruned_loss=0.06707, over 2630117.34 frames. ], batch size: 52, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:50:47,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=132784.66666666666, ans=0.125 +2024-08-03 13:50:47,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=132784.66666666666, ans=0.0 +2024-08-03 13:50:48,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=132784.66666666666, ans=0.125 +2024-08-03 13:50:57,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=132821.33333333334, ans=0.0 +2024-08-03 13:51:00,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=132821.33333333334, ans=0.125 +2024-08-03 13:51:10,363 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.41 vs. limit=6.0 +2024-08-03 13:51:14,579 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.47 vs. limit=15.0 +2024-08-03 13:51:30,948 INFO [train.py:1114] (0/4) Epoch 10, batch 3500, loss[loss=0.2187, simple_loss=0.2972, pruned_loss=0.0701, over 13544.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2937, pruned_loss=0.06676, over 2631904.28 frames. ], batch size: 34, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:51:32,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=132968.0, ans=0.125 +2024-08-03 13:51:36,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=132968.0, ans=0.5 +2024-08-03 13:51:43,632 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.579e+01 1.188e+02 1.502e+02 1.811e+02 2.689e+02, threshold=3.004e+02, percent-clipped=0.0 +2024-08-03 13:51:46,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=133004.66666666666, ans=0.125 +2024-08-03 13:52:11,720 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.29 vs. limit=15.0 +2024-08-03 13:52:13,065 INFO [train.py:1114] (0/4) Epoch 10, batch 3550, loss[loss=0.2077, simple_loss=0.282, pruned_loss=0.06673, over 12654.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2954, pruned_loss=0.06704, over 2630823.48 frames. ], batch size: 59, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:52:13,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=133151.33333333334, ans=0.125 +2024-08-03 13:52:25,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133188.0, ans=0.1 +2024-08-03 13:52:27,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=133188.0, ans=0.125 +2024-08-03 13:52:34,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=133224.66666666666, ans=22.5 +2024-08-03 13:52:36,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=133224.66666666666, ans=0.04949747468305833 +2024-08-03 13:52:44,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=133261.33333333334, ans=0.2 +2024-08-03 13:52:50,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133298.0, ans=0.1 +2024-08-03 13:52:52,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=133298.0, ans=0.95 +2024-08-03 13:52:54,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=133298.0, ans=0.0 +2024-08-03 13:52:56,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=133334.66666666666, ans=0.125 +2024-08-03 13:52:56,943 INFO [train.py:1114] (0/4) Epoch 10, batch 3600, loss[loss=0.2569, simple_loss=0.3272, pruned_loss=0.09332, over 9379.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.3006, pruned_loss=0.072, over 2492358.07 frames. ], batch size: 96, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:52:59,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=133334.66666666666, ans=10.0 +2024-08-03 13:53:04,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=133371.33333333334, ans=0.0 +2024-08-03 13:53:10,859 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.160e+02 1.272e+02 1.395e+02 1.858e+02, threshold=2.544e+02, percent-clipped=0.0 +2024-08-03 13:53:16,652 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.45 vs. limit=22.5 +2024-08-03 13:53:17,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=133408.0, ans=0.1 +2024-08-03 13:53:18,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=133408.0, ans=0.125 +2024-08-03 13:53:21,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=133408.0, ans=0.04949747468305833 +2024-08-03 13:53:31,360 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-10.pt +2024-08-03 13:55:05,753 INFO [train.py:1114] (0/4) Epoch 11, batch 0, loss[loss=0.2095, simple_loss=0.2884, pruned_loss=0.06532, over 13370.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2884, pruned_loss=0.06532, over 13370.00 frames. ], batch size: 33, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:55:05,753 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 13:55:17,609 INFO [train.py:1146] (0/4) Epoch 11, validation: loss=0.1876, simple_loss=0.2878, pruned_loss=0.04367, over 944034.00 frames. +2024-08-03 13:55:17,610 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 13:55:36,298 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.54 vs. limit=10.0 +2024-08-03 13:55:44,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=133591.33333333334, ans=0.5 +2024-08-03 13:56:05,952 INFO [train.py:1114] (0/4) Epoch 11, batch 50, loss[loss=0.1703, simple_loss=0.2533, pruned_loss=0.04366, over 13420.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2999, pruned_loss=0.06893, over 578001.75 frames. ], batch size: 32, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:56:11,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=133664.66666666666, ans=0.025 +2024-08-03 13:56:18,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=133701.33333333334, ans=0.0 +2024-08-03 13:56:23,681 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.24 vs. limit=15.0 +2024-08-03 13:56:31,306 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.301e+01 1.198e+02 1.313e+02 1.584e+02 3.827e+02, threshold=2.627e+02, percent-clipped=3.0 +2024-08-03 13:56:41,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=133774.66666666666, ans=0.0 +2024-08-03 13:56:46,076 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:56:53,150 INFO [train.py:1114] (0/4) Epoch 11, batch 100, loss[loss=0.2075, simple_loss=0.2894, pruned_loss=0.06281, over 13526.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2991, pruned_loss=0.06863, over 1026426.97 frames. ], batch size: 35, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:57:44,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133921.33333333334, ans=0.1 +2024-08-03 13:57:57,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=133958.0, ans=0.125 +2024-08-03 13:58:05,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=133994.66666666666, ans=0.125 +2024-08-03 13:58:12,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=134031.33333333334, ans=0.0 +2024-08-03 13:58:13,288 INFO [train.py:1114] (0/4) Epoch 11, batch 150, loss[loss=0.1882, simple_loss=0.2686, pruned_loss=0.05386, over 13417.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2949, pruned_loss=0.06664, over 1388152.16 frames. ], batch size: 32, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:58:21,802 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.89 vs. limit=12.0 +2024-08-03 13:58:36,665 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.551e+01 1.130e+02 1.367e+02 1.649e+02 2.945e+02, threshold=2.733e+02, percent-clipped=2.0 +2024-08-03 13:58:39,681 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:58:39,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=134141.33333333334, ans=0.025 +2024-08-03 13:58:45,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=134141.33333333334, ans=0.0 +2024-08-03 13:58:49,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=134178.0, ans=0.1 +2024-08-03 13:58:51,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=134178.0, ans=0.2 +2024-08-03 13:58:57,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=134214.66666666666, ans=0.125 +2024-08-03 13:58:58,085 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.75 vs. limit=15.0 +2024-08-03 13:58:58,449 INFO [train.py:1114] (0/4) Epoch 11, batch 200, loss[loss=0.2207, simple_loss=0.3066, pruned_loss=0.06743, over 12742.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2925, pruned_loss=0.06542, over 1665649.67 frames. ], batch size: 59, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:59:20,575 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.81 vs. limit=12.0 +2024-08-03 13:59:26,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=134288.0, ans=0.0 +2024-08-03 14:00:21,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=134324.66666666666, ans=0.125 +2024-08-03 14:00:27,649 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.66 vs. limit=6.0 +2024-08-03 14:00:30,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=134361.33333333334, ans=0.125 +2024-08-03 14:00:35,773 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.21 vs. limit=15.0 +2024-08-03 14:00:36,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=134398.0, ans=0.025 +2024-08-03 14:00:37,211 INFO [train.py:1114] (0/4) Epoch 11, batch 250, loss[loss=0.2399, simple_loss=0.317, pruned_loss=0.08136, over 13334.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2925, pruned_loss=0.06516, over 1885051.32 frames. ], batch size: 46, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:00:43,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=134398.0, ans=0.0 +2024-08-03 14:00:50,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=134434.66666666666, ans=0.07 +2024-08-03 14:00:51,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=134434.66666666666, ans=0.125 +2024-08-03 14:00:53,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134434.66666666666, ans=0.1 +2024-08-03 14:00:55,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=134471.33333333334, ans=0.2 +2024-08-03 14:01:00,736 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.508e+01 1.185e+02 1.387e+02 1.656e+02 4.049e+02, threshold=2.774e+02, percent-clipped=1.0 +2024-08-03 14:01:00,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=134471.33333333334, ans=0.2 +2024-08-03 14:01:17,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=134544.66666666666, ans=0.125 +2024-08-03 14:01:17,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=134544.66666666666, ans=0.125 +2024-08-03 14:01:25,027 INFO [train.py:1114] (0/4) Epoch 11, batch 300, loss[loss=0.225, simple_loss=0.3042, pruned_loss=0.07284, over 13436.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2925, pruned_loss=0.06561, over 2052162.10 frames. ], batch size: 42, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:01:28,300 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.06 vs. limit=15.0 +2024-08-03 14:01:37,547 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.70 vs. limit=22.5 +2024-08-03 14:01:42,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=134654.66666666666, ans=0.09899494936611666 +2024-08-03 14:01:49,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=134654.66666666666, ans=0.0 +2024-08-03 14:01:54,860 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:01:56,106 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.13 vs. limit=15.0 +2024-08-03 14:02:06,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=134728.0, ans=0.1 +2024-08-03 14:02:11,938 INFO [train.py:1114] (0/4) Epoch 11, batch 350, loss[loss=0.191, simple_loss=0.2711, pruned_loss=0.05546, over 13586.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2925, pruned_loss=0.06549, over 2183271.41 frames. ], batch size: 33, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:02:13,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=134764.66666666666, ans=0.0 +2024-08-03 14:02:21,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=134801.33333333334, ans=0.0 +2024-08-03 14:02:32,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=134838.0, ans=0.0 +2024-08-03 14:02:37,150 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.915e+01 1.230e+02 1.440e+02 1.763e+02 3.166e+02, threshold=2.879e+02, percent-clipped=2.0 +2024-08-03 14:02:44,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=134874.66666666666, ans=0.125 +2024-08-03 14:02:44,845 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.67 vs. limit=15.0 +2024-08-03 14:02:50,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=134911.33333333334, ans=0.0 +2024-08-03 14:02:55,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=134911.33333333334, ans=0.125 +2024-08-03 14:02:56,606 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.28 vs. limit=22.5 +2024-08-03 14:02:57,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=134911.33333333334, ans=0.0 +2024-08-03 14:02:57,514 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.86 vs. limit=10.0 +2024-08-03 14:02:58,878 INFO [train.py:1114] (0/4) Epoch 11, batch 400, loss[loss=0.1926, simple_loss=0.2745, pruned_loss=0.05528, over 13374.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2917, pruned_loss=0.06482, over 2286472.04 frames. ], batch size: 37, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:03:21,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=135021.33333333334, ans=0.0 +2024-08-03 14:03:27,298 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.68 vs. limit=12.0 +2024-08-03 14:03:32,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=135058.0, ans=0.2 +2024-08-03 14:03:43,668 INFO [train.py:1114] (0/4) Epoch 11, batch 450, loss[loss=0.2247, simple_loss=0.3155, pruned_loss=0.06691, over 13542.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2918, pruned_loss=0.06475, over 2360118.93 frames. ], batch size: 38, lr: 1.15e-02, grad_scale: 32.0 +2024-08-03 14:04:08,549 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.113e+02 1.253e+02 1.576e+02 3.089e+02, threshold=2.506e+02, percent-clipped=1.0 +2024-08-03 14:04:23,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=135278.0, ans=0.125 +2024-08-03 14:04:30,225 INFO [train.py:1114] (0/4) Epoch 11, batch 500, loss[loss=0.2225, simple_loss=0.3096, pruned_loss=0.06766, over 13401.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2908, pruned_loss=0.06401, over 2425543.44 frames. ], batch size: 43, lr: 1.15e-02, grad_scale: 32.0 +2024-08-03 14:04:53,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=135388.0, ans=0.09899494936611666 +2024-08-03 14:05:15,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=135461.33333333334, ans=0.2 +2024-08-03 14:05:17,644 INFO [train.py:1114] (0/4) Epoch 11, batch 550, loss[loss=0.2122, simple_loss=0.293, pruned_loss=0.06571, over 13291.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2904, pruned_loss=0.0639, over 2468869.41 frames. ], batch size: 49, lr: 1.15e-02, grad_scale: 32.0 +2024-08-03 14:05:23,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=135498.0, ans=0.0 +2024-08-03 14:05:23,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=135498.0, ans=0.125 +2024-08-03 14:05:25,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=135498.0, ans=0.125 +2024-08-03 14:05:26,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135534.66666666666, ans=0.1 +2024-08-03 14:05:27,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=135534.66666666666, ans=0.05 +2024-08-03 14:05:28,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=135534.66666666666, ans=0.2 +2024-08-03 14:05:44,382 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.929e+01 1.236e+02 1.527e+02 1.937e+02 2.923e+02, threshold=3.054e+02, percent-clipped=2.0 +2024-08-03 14:06:01,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=135644.66666666666, ans=0.125 +2024-08-03 14:06:07,521 INFO [train.py:1114] (0/4) Epoch 11, batch 600, loss[loss=0.2427, simple_loss=0.3173, pruned_loss=0.08408, over 13268.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2906, pruned_loss=0.06401, over 2507859.84 frames. ], batch size: 46, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:06:12,366 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.84 vs. limit=22.5 +2024-08-03 14:06:12,432 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.74 vs. limit=15.0 +2024-08-03 14:06:14,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=135681.33333333334, ans=0.2 +2024-08-03 14:06:34,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=135791.33333333334, ans=0.125 +2024-08-03 14:06:51,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=135864.66666666666, ans=0.1 +2024-08-03 14:06:52,173 INFO [train.py:1114] (0/4) Epoch 11, batch 650, loss[loss=0.197, simple_loss=0.2847, pruned_loss=0.05468, over 13545.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2894, pruned_loss=0.06328, over 2542631.30 frames. ], batch size: 37, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:06:57,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=135864.66666666666, ans=0.2 +2024-08-03 14:06:58,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=135864.66666666666, ans=0.07 +2024-08-03 14:06:58,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=135864.66666666666, ans=0.125 +2024-08-03 14:07:16,378 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.852e+01 1.178e+02 1.481e+02 2.104e+02 3.972e+02, threshold=2.962e+02, percent-clipped=10.0 +2024-08-03 14:07:39,669 INFO [train.py:1114] (0/4) Epoch 11, batch 700, loss[loss=0.1818, simple_loss=0.2662, pruned_loss=0.04867, over 13549.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2898, pruned_loss=0.0633, over 2564834.58 frames. ], batch size: 35, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:07:45,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=136048.0, ans=0.0 +2024-08-03 14:07:48,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=136084.66666666666, ans=0.125 +2024-08-03 14:07:58,162 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-08-03 14:08:06,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=136158.0, ans=0.0 +2024-08-03 14:08:26,738 INFO [train.py:1114] (0/4) Epoch 11, batch 750, loss[loss=0.2136, simple_loss=0.2946, pruned_loss=0.0663, over 13359.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2891, pruned_loss=0.06302, over 2582084.91 frames. ], batch size: 37, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:08:42,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=136268.0, ans=0.0 +2024-08-03 14:08:51,159 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.398e+01 1.179e+02 1.348e+02 1.712e+02 2.826e+02, threshold=2.695e+02, percent-clipped=0.0 +2024-08-03 14:09:07,052 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.97 vs. limit=15.0 +2024-08-03 14:09:12,261 INFO [train.py:1114] (0/4) Epoch 11, batch 800, loss[loss=0.1848, simple_loss=0.2582, pruned_loss=0.05563, over 13330.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2889, pruned_loss=0.063, over 2596323.97 frames. ], batch size: 33, lr: 1.15e-02, grad_scale: 32.0 +2024-08-03 14:09:12,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=136414.66666666666, ans=0.07 +2024-08-03 14:09:16,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=136414.66666666666, ans=0.125 +2024-08-03 14:09:18,053 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.45 vs. limit=15.0 +2024-08-03 14:09:20,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=136414.66666666666, ans=0.0 +2024-08-03 14:09:25,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=136451.33333333334, ans=0.2 +2024-08-03 14:09:37,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=136488.0, ans=0.125 +2024-08-03 14:10:01,226 INFO [train.py:1114] (0/4) Epoch 11, batch 850, loss[loss=0.2183, simple_loss=0.3008, pruned_loss=0.06787, over 13305.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2894, pruned_loss=0.06324, over 2608865.13 frames. ], batch size: 40, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:10:16,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=136634.66666666666, ans=0.125 +2024-08-03 14:10:20,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=136671.33333333334, ans=0.125 +2024-08-03 14:10:26,150 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.536e+01 1.181e+02 1.335e+02 1.644e+02 2.754e+02, threshold=2.669e+02, percent-clipped=1.0 +2024-08-03 14:10:43,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=136744.66666666666, ans=0.2 +2024-08-03 14:10:45,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=136781.33333333334, ans=0.025 +2024-08-03 14:10:46,300 INFO [train.py:1114] (0/4) Epoch 11, batch 900, loss[loss=0.1927, simple_loss=0.2672, pruned_loss=0.05906, over 13358.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2889, pruned_loss=0.06287, over 2611252.25 frames. ], batch size: 33, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:10:46,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=136781.33333333334, ans=0.2 +2024-08-03 14:11:11,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=136854.66666666666, ans=0.0 +2024-08-03 14:11:12,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=136854.66666666666, ans=0.0 +2024-08-03 14:11:13,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=136854.66666666666, ans=0.025 +2024-08-03 14:11:33,349 INFO [train.py:1114] (0/4) Epoch 11, batch 950, loss[loss=0.2074, simple_loss=0.2859, pruned_loss=0.06443, over 13536.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2897, pruned_loss=0.0631, over 2611235.74 frames. ], batch size: 34, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:11:47,567 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.50 vs. limit=15.0 +2024-08-03 14:11:55,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=137038.0, ans=0.125 +2024-08-03 14:11:59,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=137038.0, ans=0.125 +2024-08-03 14:12:02,585 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.938e+01 1.284e+02 1.520e+02 1.871e+02 3.091e+02, threshold=3.040e+02, percent-clipped=3.0 +2024-08-03 14:12:08,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=137074.66666666666, ans=0.125 +2024-08-03 14:12:20,809 INFO [train.py:1114] (0/4) Epoch 11, batch 1000, loss[loss=0.1865, simple_loss=0.2701, pruned_loss=0.05144, over 13363.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2909, pruned_loss=0.06372, over 2609479.80 frames. ], batch size: 35, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:12:29,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=137184.66666666666, ans=0.125 +2024-08-03 14:12:39,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=137221.33333333334, ans=0.0 +2024-08-03 14:12:49,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=137258.0, ans=0.125 +2024-08-03 14:12:55,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=137258.0, ans=0.2 +2024-08-03 14:13:09,255 INFO [train.py:1114] (0/4) Epoch 11, batch 1050, loss[loss=0.2199, simple_loss=0.3063, pruned_loss=0.0667, over 13588.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2898, pruned_loss=0.06314, over 2613893.02 frames. ], batch size: 39, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:13:12,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=137331.33333333334, ans=0.0 +2024-08-03 14:13:21,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=137368.0, ans=0.125 +2024-08-03 14:13:22,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=137368.0, ans=0.125 +2024-08-03 14:13:36,250 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.744e+01 1.143e+02 1.275e+02 1.569e+02 2.169e+02, threshold=2.550e+02, percent-clipped=0.0 +2024-08-03 14:13:41,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=137441.33333333334, ans=0.125 +2024-08-03 14:13:42,376 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.87 vs. limit=15.0 +2024-08-03 14:13:54,638 INFO [train.py:1114] (0/4) Epoch 11, batch 1100, loss[loss=0.1921, simple_loss=0.2791, pruned_loss=0.05256, over 13549.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2903, pruned_loss=0.06348, over 2618501.14 frames. ], batch size: 36, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:13:58,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=137514.66666666666, ans=0.125 +2024-08-03 14:14:14,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=137588.0, ans=0.1 +2024-08-03 14:14:20,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=137624.66666666666, ans=0.125 +2024-08-03 14:14:24,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=137624.66666666666, ans=0.125 +2024-08-03 14:14:25,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=137624.66666666666, ans=0.2 +2024-08-03 14:14:29,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=137624.66666666666, ans=0.125 +2024-08-03 14:14:30,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137661.33333333334, ans=0.1 +2024-08-03 14:14:38,442 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.22 vs. limit=6.0 +2024-08-03 14:14:39,746 INFO [train.py:1114] (0/4) Epoch 11, batch 1150, loss[loss=0.1982, simple_loss=0.2722, pruned_loss=0.06213, over 13567.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2905, pruned_loss=0.06368, over 2618118.78 frames. ], batch size: 36, lr: 1.14e-02, grad_scale: 8.0 +2024-08-03 14:15:05,195 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.13 vs. limit=10.0 +2024-08-03 14:15:09,312 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.634e+01 1.163e+02 1.278e+02 1.596e+02 2.243e+02, threshold=2.555e+02, percent-clipped=0.0 +2024-08-03 14:15:18,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=137844.66666666666, ans=0.04949747468305833 +2024-08-03 14:15:22,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=137844.66666666666, ans=0.2 +2024-08-03 14:15:29,819 INFO [train.py:1114] (0/4) Epoch 11, batch 1200, loss[loss=0.226, simple_loss=0.3137, pruned_loss=0.06916, over 13576.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2918, pruned_loss=0.06432, over 2615314.17 frames. ], batch size: 39, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:15:42,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=137918.0, ans=0.0 +2024-08-03 14:15:49,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=137954.66666666666, ans=0.0 +2024-08-03 14:15:51,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.66 vs. limit=15.0 +2024-08-03 14:15:57,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=137991.33333333334, ans=0.05 +2024-08-03 14:16:11,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=138028.0, ans=0.0 +2024-08-03 14:16:14,573 INFO [train.py:1114] (0/4) Epoch 11, batch 1250, loss[loss=0.218, simple_loss=0.2959, pruned_loss=0.07008, over 13455.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.292, pruned_loss=0.0642, over 2627959.46 frames. ], batch size: 42, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:16:15,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=138064.66666666666, ans=0.125 +2024-08-03 14:16:36,018 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.34 vs. limit=15.0 +2024-08-03 14:16:36,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=138138.0, ans=0.125 +2024-08-03 14:16:43,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=138138.0, ans=0.1 +2024-08-03 14:16:45,550 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.814e+01 1.117e+02 1.380e+02 1.651e+02 4.437e+02, threshold=2.760e+02, percent-clipped=2.0 +2024-08-03 14:17:01,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=138211.33333333334, ans=0.125 +2024-08-03 14:17:03,497 INFO [train.py:1114] (0/4) Epoch 11, batch 1300, loss[loss=0.2395, simple_loss=0.3281, pruned_loss=0.0754, over 12930.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2911, pruned_loss=0.06349, over 2631115.79 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:17:25,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=138321.33333333334, ans=0.125 +2024-08-03 14:17:25,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=138321.33333333334, ans=0.125 +2024-08-03 14:17:29,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138321.33333333334, ans=0.1 +2024-08-03 14:17:39,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=138394.66666666666, ans=0.125 +2024-08-03 14:17:41,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=138394.66666666666, ans=0.0 +2024-08-03 14:17:44,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=138394.66666666666, ans=0.125 +2024-08-03 14:17:45,659 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.91 vs. limit=15.0 +2024-08-03 14:17:48,830 INFO [train.py:1114] (0/4) Epoch 11, batch 1350, loss[loss=0.1872, simple_loss=0.2682, pruned_loss=0.05309, over 13544.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2906, pruned_loss=0.06355, over 2638689.10 frames. ], batch size: 37, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:17:52,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138431.33333333334, ans=0.1 +2024-08-03 14:17:53,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=138431.33333333334, ans=0.125 +2024-08-03 14:18:02,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=138468.0, ans=0.0 +2024-08-03 14:18:04,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=138468.0, ans=0.0 +2024-08-03 14:18:05,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=138468.0, ans=0.0 +2024-08-03 14:18:15,983 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.205e+02 1.432e+02 1.737e+02 2.785e+02, threshold=2.864e+02, percent-clipped=2.0 +2024-08-03 14:18:30,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=138578.0, ans=0.025 +2024-08-03 14:18:30,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138578.0, ans=0.1 +2024-08-03 14:18:35,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=138614.66666666666, ans=0.125 +2024-08-03 14:18:36,202 INFO [train.py:1114] (0/4) Epoch 11, batch 1400, loss[loss=0.1848, simple_loss=0.2592, pruned_loss=0.0552, over 13280.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2905, pruned_loss=0.06353, over 2642490.36 frames. ], batch size: 31, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:18:47,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=138651.33333333334, ans=0.125 +2024-08-03 14:18:47,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=138651.33333333334, ans=0.125 +2024-08-03 14:18:53,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138688.0, ans=0.1 +2024-08-03 14:18:59,900 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.40 vs. limit=12.0 +2024-08-03 14:19:11,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=138724.66666666666, ans=0.125 +2024-08-03 14:19:23,042 INFO [train.py:1114] (0/4) Epoch 11, batch 1450, loss[loss=0.2049, simple_loss=0.3001, pruned_loss=0.05484, over 13455.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2915, pruned_loss=0.06414, over 2641579.82 frames. ], batch size: 43, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:19:25,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=138798.0, ans=0.125 +2024-08-03 14:19:30,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=138798.0, ans=0.125 +2024-08-03 14:19:34,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=138834.66666666666, ans=0.125 +2024-08-03 14:19:44,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=138871.33333333334, ans=0.125 +2024-08-03 14:19:46,847 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.84 vs. limit=6.0 +2024-08-03 14:19:48,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=138871.33333333334, ans=0.2 +2024-08-03 14:19:49,935 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.222e+01 1.153e+02 1.341e+02 1.677e+02 2.779e+02, threshold=2.682e+02, percent-clipped=0.0 +2024-08-03 14:19:55,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=138908.0, ans=0.1 +2024-08-03 14:20:10,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=138944.66666666666, ans=0.1 +2024-08-03 14:20:11,576 INFO [train.py:1114] (0/4) Epoch 11, batch 1500, loss[loss=0.2457, simple_loss=0.3225, pruned_loss=0.08447, over 13411.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2918, pruned_loss=0.06415, over 2641725.83 frames. ], batch size: 39, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:20:28,836 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.41 vs. limit=6.0 +2024-08-03 14:20:41,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=139018.0, ans=0.2 +2024-08-03 14:20:43,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=139054.66666666666, ans=0.0 +2024-08-03 14:20:45,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139054.66666666666, ans=0.1 +2024-08-03 14:20:46,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=139054.66666666666, ans=0.125 +2024-08-03 14:20:48,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=139054.66666666666, ans=0.125 +2024-08-03 14:20:52,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=139054.66666666666, ans=0.125 +2024-08-03 14:21:11,702 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.81 vs. limit=12.0 +2024-08-03 14:21:12,167 INFO [train.py:1114] (0/4) Epoch 11, batch 1550, loss[loss=0.2194, simple_loss=0.3013, pruned_loss=0.06877, over 13394.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2913, pruned_loss=0.0641, over 2631234.41 frames. ], batch size: 41, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:21:17,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=139164.66666666666, ans=0.0 +2024-08-03 14:21:23,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=139201.33333333334, ans=0.0 +2024-08-03 14:21:26,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=139201.33333333334, ans=0.0 +2024-08-03 14:21:31,886 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.22 vs. limit=22.5 +2024-08-03 14:21:39,464 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.226e+01 1.184e+02 1.477e+02 1.893e+02 3.709e+02, threshold=2.955e+02, percent-clipped=6.0 +2024-08-03 14:21:50,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=139311.33333333334, ans=15.0 +2024-08-03 14:21:57,719 INFO [train.py:1114] (0/4) Epoch 11, batch 1600, loss[loss=0.2435, simple_loss=0.3258, pruned_loss=0.08057, over 13586.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2912, pruned_loss=0.06459, over 2623979.73 frames. ], batch size: 39, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:22:00,934 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.65 vs. limit=22.5 +2024-08-03 14:22:01,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=139348.0, ans=0.04949747468305833 +2024-08-03 14:22:18,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=139421.33333333334, ans=0.2 +2024-08-03 14:22:26,088 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.18 vs. limit=15.0 +2024-08-03 14:22:27,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=139458.0, ans=0.125 +2024-08-03 14:22:35,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=139494.66666666666, ans=0.125 +2024-08-03 14:22:47,787 INFO [train.py:1114] (0/4) Epoch 11, batch 1650, loss[loss=0.2075, simple_loss=0.2991, pruned_loss=0.05795, over 13315.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2907, pruned_loss=0.06447, over 2621400.37 frames. ], batch size: 40, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:23:14,852 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.757e+01 1.241e+02 1.421e+02 1.904e+02 3.771e+02, threshold=2.842e+02, percent-clipped=2.0 +2024-08-03 14:23:15,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=139641.33333333334, ans=0.0 +2024-08-03 14:23:32,932 INFO [train.py:1114] (0/4) Epoch 11, batch 1700, loss[loss=0.1892, simple_loss=0.2622, pruned_loss=0.05809, over 13245.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2902, pruned_loss=0.06382, over 2630128.11 frames. ], batch size: 31, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:23:33,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=139714.66666666666, ans=0.125 +2024-08-03 14:23:34,226 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.19 vs. limit=15.0 +2024-08-03 14:23:37,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=139714.66666666666, ans=0.125 +2024-08-03 14:23:42,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=139751.33333333334, ans=0.0 +2024-08-03 14:23:45,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=139751.33333333334, ans=0.2 +2024-08-03 14:24:14,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=139861.33333333334, ans=0.125 +2024-08-03 14:24:20,122 INFO [train.py:1114] (0/4) Epoch 11, batch 1750, loss[loss=0.1847, simple_loss=0.2651, pruned_loss=0.05212, over 13566.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2903, pruned_loss=0.06413, over 2633121.47 frames. ], batch size: 31, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:24:23,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=139898.0, ans=0.0 +2024-08-03 14:24:47,662 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.098e+01 1.175e+02 1.427e+02 2.048e+02 3.147e+02, threshold=2.855e+02, percent-clipped=3.0 +2024-08-03 14:25:06,038 INFO [train.py:1114] (0/4) Epoch 11, batch 1800, loss[loss=0.2172, simple_loss=0.3017, pruned_loss=0.06634, over 13549.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2901, pruned_loss=0.06387, over 2634372.64 frames. ], batch size: 38, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:25:12,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=140081.33333333334, ans=0.0 +2024-08-03 14:25:30,688 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.61 vs. limit=15.0 +2024-08-03 14:25:37,430 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.33 vs. limit=15.0 +2024-08-03 14:25:47,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=140154.66666666666, ans=0.0 +2024-08-03 14:25:53,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=140191.33333333334, ans=0.125 +2024-08-03 14:26:12,415 INFO [train.py:1114] (0/4) Epoch 11, batch 1850, loss[loss=0.2158, simple_loss=0.3063, pruned_loss=0.06258, over 13401.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2899, pruned_loss=0.06385, over 2637285.46 frames. ], batch size: 39, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:26:17,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=140264.66666666666, ans=0.025 +2024-08-03 14:26:36,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=140338.0, ans=0.2 +2024-08-03 14:26:41,000 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.650e+01 1.194e+02 1.468e+02 2.041e+02 3.479e+02, threshold=2.936e+02, percent-clipped=2.0 +2024-08-03 14:26:41,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140374.66666666666, ans=0.1 +2024-08-03 14:27:02,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=140448.0, ans=0.125 +2024-08-03 14:27:02,741 INFO [train.py:1114] (0/4) Epoch 11, batch 1900, loss[loss=0.2139, simple_loss=0.2963, pruned_loss=0.06574, over 13318.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2906, pruned_loss=0.06411, over 2639860.86 frames. ], batch size: 40, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:27:51,893 INFO [train.py:1114] (0/4) Epoch 11, batch 1950, loss[loss=0.2201, simple_loss=0.2964, pruned_loss=0.07192, over 13575.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.292, pruned_loss=0.06468, over 2646334.46 frames. ], batch size: 36, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:28:06,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=140668.0, ans=0.125 +2024-08-03 14:28:17,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=140704.66666666666, ans=0.0 +2024-08-03 14:28:19,130 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.848e+01 1.152e+02 1.250e+02 1.577e+02 2.279e+02, threshold=2.500e+02, percent-clipped=0.0 +2024-08-03 14:28:24,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=140741.33333333334, ans=0.125 +2024-08-03 14:28:31,797 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:28:37,516 INFO [train.py:1114] (0/4) Epoch 11, batch 2000, loss[loss=0.1941, simple_loss=0.2698, pruned_loss=0.05918, over 13542.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2924, pruned_loss=0.06489, over 2636034.05 frames. ], batch size: 31, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:29:00,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=140888.0, ans=0.125 +2024-08-03 14:29:23,049 INFO [train.py:1114] (0/4) Epoch 11, batch 2050, loss[loss=0.179, simple_loss=0.2592, pruned_loss=0.04946, over 13423.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2915, pruned_loss=0.06471, over 2632671.37 frames. ], batch size: 32, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:29:24,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=140998.0, ans=0.0 +2024-08-03 14:29:24,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=140998.0, ans=0.2 +2024-08-03 14:29:43,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=141071.33333333334, ans=0.125 +2024-08-03 14:29:52,690 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.045e+01 1.175e+02 1.416e+02 1.784e+02 2.828e+02, threshold=2.832e+02, percent-clipped=4.0 +2024-08-03 14:30:09,687 INFO [train.py:1114] (0/4) Epoch 11, batch 2100, loss[loss=0.1876, simple_loss=0.2785, pruned_loss=0.04832, over 13545.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2914, pruned_loss=0.06437, over 2637612.97 frames. ], batch size: 37, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:30:31,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141254.66666666666, ans=0.1 +2024-08-03 14:30:35,203 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.79 vs. limit=15.0 +2024-08-03 14:30:56,297 INFO [train.py:1114] (0/4) Epoch 11, batch 2150, loss[loss=0.2047, simple_loss=0.2936, pruned_loss=0.05789, over 13558.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2907, pruned_loss=0.06404, over 2646766.09 frames. ], batch size: 36, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:31:01,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141364.66666666666, ans=0.1 +2024-08-03 14:31:01,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=141364.66666666666, ans=0.125 +2024-08-03 14:31:15,906 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.30 vs. limit=10.0 +2024-08-03 14:31:22,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=141438.0, ans=0.125 +2024-08-03 14:31:25,706 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.25 vs. limit=12.0 +2024-08-03 14:31:27,805 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.279e+01 1.207e+02 1.412e+02 1.929e+02 3.002e+02, threshold=2.825e+02, percent-clipped=1.0 +2024-08-03 14:31:33,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=141474.66666666666, ans=0.125 +2024-08-03 14:31:34,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=141474.66666666666, ans=0.1 +2024-08-03 14:31:44,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=141548.0, ans=0.0 +2024-08-03 14:31:45,120 INFO [train.py:1114] (0/4) Epoch 11, batch 2200, loss[loss=0.2151, simple_loss=0.3029, pruned_loss=0.06368, over 13395.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2904, pruned_loss=0.06371, over 2644514.44 frames. ], batch size: 39, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:31:57,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=141584.66666666666, ans=0.1 +2024-08-03 14:32:06,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=141621.33333333334, ans=0.125 +2024-08-03 14:32:30,741 INFO [train.py:1114] (0/4) Epoch 11, batch 2250, loss[loss=0.2023, simple_loss=0.2913, pruned_loss=0.05664, over 13369.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.29, pruned_loss=0.06352, over 2642308.49 frames. ], batch size: 37, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:32:39,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=141768.0, ans=0.125 +2024-08-03 14:32:53,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=141804.66666666666, ans=0.125 +2024-08-03 14:32:58,698 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.445e+01 1.221e+02 1.460e+02 1.800e+02 3.358e+02, threshold=2.920e+02, percent-clipped=4.0 +2024-08-03 14:33:06,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=141878.0, ans=0.125 +2024-08-03 14:33:16,187 INFO [train.py:1114] (0/4) Epoch 11, batch 2300, loss[loss=0.1713, simple_loss=0.2517, pruned_loss=0.04543, over 13555.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2887, pruned_loss=0.06335, over 2638097.02 frames. ], batch size: 33, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:33:22,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=141914.66666666666, ans=0.125 +2024-08-03 14:33:34,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=141951.33333333334, ans=0.0 +2024-08-03 14:33:39,488 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.84 vs. limit=15.0 +2024-08-03 14:33:42,651 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:34:02,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=142061.33333333334, ans=0.07 +2024-08-03 14:34:05,425 INFO [train.py:1114] (0/4) Epoch 11, batch 2350, loss[loss=0.1921, simple_loss=0.2875, pruned_loss=0.04833, over 13550.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2887, pruned_loss=0.06291, over 2640132.85 frames. ], batch size: 38, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:34:08,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=142098.0, ans=0.125 +2024-08-03 14:34:22,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142134.66666666666, ans=0.1 +2024-08-03 14:34:34,202 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.779e+01 1.150e+02 1.456e+02 1.792e+02 2.996e+02, threshold=2.912e+02, percent-clipped=1.0 +2024-08-03 14:34:37,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=142208.0, ans=10.0 +2024-08-03 14:34:38,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142208.0, ans=0.1 +2024-08-03 14:34:48,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=142244.66666666666, ans=0.0 +2024-08-03 14:34:55,327 INFO [train.py:1114] (0/4) Epoch 11, batch 2400, loss[loss=0.1925, simple_loss=0.2801, pruned_loss=0.05251, over 13526.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2894, pruned_loss=0.06315, over 2641235.45 frames. ], batch size: 35, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:35:07,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142318.0, ans=0.1 +2024-08-03 14:35:14,304 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.05 vs. limit=15.0 +2024-08-03 14:35:15,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=142354.66666666666, ans=0.0 +2024-08-03 14:35:15,858 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.85 vs. limit=15.0 +2024-08-03 14:35:17,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=142354.66666666666, ans=0.05 +2024-08-03 14:35:20,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=142354.66666666666, ans=0.2 +2024-08-03 14:35:40,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=142464.66666666666, ans=0.2 +2024-08-03 14:35:40,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=142464.66666666666, ans=0.125 +2024-08-03 14:35:40,807 INFO [train.py:1114] (0/4) Epoch 11, batch 2450, loss[loss=0.198, simple_loss=0.2806, pruned_loss=0.0577, over 13353.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2901, pruned_loss=0.06353, over 2630599.86 frames. ], batch size: 37, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:35:48,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=142464.66666666666, ans=0.125 +2024-08-03 14:35:54,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=142501.33333333334, ans=0.125 +2024-08-03 14:36:03,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=142538.0, ans=0.2 +2024-08-03 14:36:08,683 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.765e+01 1.220e+02 1.473e+02 1.922e+02 3.559e+02, threshold=2.946e+02, percent-clipped=1.0 +2024-08-03 14:36:14,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=142574.66666666666, ans=0.125 +2024-08-03 14:36:25,795 INFO [train.py:1114] (0/4) Epoch 11, batch 2500, loss[loss=0.2162, simple_loss=0.2986, pruned_loss=0.06684, over 13392.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2903, pruned_loss=0.06322, over 2635532.04 frames. ], batch size: 39, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:36:36,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=142684.66666666666, ans=0.125 +2024-08-03 14:36:59,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=142758.0, ans=0.125 +2024-08-03 14:37:09,695 INFO [train.py:1114] (0/4) Epoch 11, batch 2550, loss[loss=0.1882, simple_loss=0.2631, pruned_loss=0.05668, over 13554.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2897, pruned_loss=0.06323, over 2637197.63 frames. ], batch size: 31, lr: 1.12e-02, grad_scale: 32.0 +2024-08-03 14:37:22,919 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:37:36,292 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.364e+01 1.185e+02 1.436e+02 1.900e+02 4.163e+02, threshold=2.872e+02, percent-clipped=5.0 +2024-08-03 14:37:47,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142978.0, ans=0.1 +2024-08-03 14:37:54,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.30 vs. limit=15.0 +2024-08-03 14:37:54,715 INFO [train.py:1114] (0/4) Epoch 11, batch 2600, loss[loss=0.2002, simple_loss=0.2833, pruned_loss=0.05852, over 13560.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2903, pruned_loss=0.06336, over 2635666.98 frames. ], batch size: 36, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:38:00,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=143014.66666666666, ans=0.125 +2024-08-03 14:38:02,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=143051.33333333334, ans=0.125 +2024-08-03 14:38:08,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=143051.33333333334, ans=0.125 +2024-08-03 14:38:09,938 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.15 vs. limit=15.0 +2024-08-03 14:38:17,973 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.09 vs. limit=15.0 +2024-08-03 14:38:30,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=143161.33333333334, ans=0.0 +2024-08-03 14:38:38,297 INFO [train.py:1114] (0/4) Epoch 11, batch 2650, loss[loss=0.2043, simple_loss=0.2951, pruned_loss=0.05675, over 13294.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2905, pruned_loss=0.06367, over 2639080.67 frames. ], batch size: 46, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:38:53,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143234.66666666666, ans=0.1 +2024-08-03 14:38:57,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.56 vs. limit=12.0 +2024-08-03 14:39:01,205 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.63 vs. limit=12.0 +2024-08-03 14:39:07,480 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.944e+01 1.197e+02 1.327e+02 1.649e+02 2.749e+02, threshold=2.654e+02, percent-clipped=0.0 +2024-08-03 14:39:08,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=143308.0, ans=0.2 +2024-08-03 14:39:13,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=143344.66666666666, ans=0.0 +2024-08-03 14:39:15,961 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.10 vs. limit=22.5 +2024-08-03 14:39:23,395 INFO [train.py:1114] (0/4) Epoch 11, batch 2700, loss[loss=0.2363, simple_loss=0.3147, pruned_loss=0.07898, over 13543.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2908, pruned_loss=0.06384, over 2636160.19 frames. ], batch size: 40, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:39:26,506 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.98 vs. limit=15.0 +2024-08-03 14:39:27,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=143381.33333333334, ans=0.0 +2024-08-03 14:39:52,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=143491.33333333334, ans=0.2 +2024-08-03 14:40:05,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=143528.0, ans=0.0 +2024-08-03 14:40:06,636 INFO [train.py:1114] (0/4) Epoch 11, batch 2750, loss[loss=0.2054, simple_loss=0.2822, pruned_loss=0.06429, over 13336.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2901, pruned_loss=0.06363, over 2634588.03 frames. ], batch size: 34, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:40:17,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=143601.33333333334, ans=0.125 +2024-08-03 14:40:33,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=143674.66666666666, ans=0.2 +2024-08-03 14:40:34,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=143674.66666666666, ans=0.1 +2024-08-03 14:40:34,885 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.875e+01 1.140e+02 1.438e+02 1.760e+02 3.626e+02, threshold=2.877e+02, percent-clipped=2.0 +2024-08-03 14:40:36,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=143674.66666666666, ans=0.025 +2024-08-03 14:40:38,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143674.66666666666, ans=0.1 +2024-08-03 14:40:39,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=143674.66666666666, ans=0.125 +2024-08-03 14:40:43,986 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.57 vs. limit=15.0 +2024-08-03 14:40:44,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=143711.33333333334, ans=0.125 +2024-08-03 14:40:51,384 INFO [train.py:1114] (0/4) Epoch 11, batch 2800, loss[loss=0.256, simple_loss=0.3254, pruned_loss=0.09325, over 9290.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2897, pruned_loss=0.06334, over 2626201.88 frames. ], batch size: 97, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:40:51,645 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:40:52,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143748.0, ans=0.1 +2024-08-03 14:40:53,467 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.01 vs. limit=12.0 +2024-08-03 14:40:59,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=143784.66666666666, ans=0.125 +2024-08-03 14:41:05,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.14 vs. limit=10.0 +2024-08-03 14:41:36,388 INFO [train.py:1114] (0/4) Epoch 11, batch 2850, loss[loss=0.2018, simple_loss=0.2829, pruned_loss=0.06029, over 13360.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2905, pruned_loss=0.06395, over 2619633.85 frames. ], batch size: 35, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:41:38,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143931.33333333334, ans=0.1 +2024-08-03 14:41:40,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=143931.33333333334, ans=0.125 +2024-08-03 14:41:54,598 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.14 vs. limit=12.0 +2024-08-03 14:42:04,511 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.280e+01 1.169e+02 1.350e+02 1.770e+02 2.759e+02, threshold=2.700e+02, percent-clipped=0.0 +2024-08-03 14:42:19,101 INFO [train.py:1114] (0/4) Epoch 11, batch 2900, loss[loss=0.1887, simple_loss=0.2735, pruned_loss=0.05195, over 13349.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2915, pruned_loss=0.06373, over 2630229.20 frames. ], batch size: 36, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:42:23,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=144114.66666666666, ans=0.0 +2024-08-03 14:42:48,253 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-08-03 14:43:00,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=144261.33333333334, ans=0.025 +2024-08-03 14:43:00,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=144261.33333333334, ans=0.125 +2024-08-03 14:43:02,599 INFO [train.py:1114] (0/4) Epoch 11, batch 2950, loss[loss=0.1854, simple_loss=0.2655, pruned_loss=0.05268, over 13335.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2897, pruned_loss=0.06324, over 2628665.39 frames. ], batch size: 34, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:43:07,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=144298.0, ans=0.125 +2024-08-03 14:43:10,176 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.56 vs. limit=15.0 +2024-08-03 14:43:22,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=144371.33333333334, ans=0.0 +2024-08-03 14:43:23,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=144371.33333333334, ans=0.0 +2024-08-03 14:43:23,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=144371.33333333334, ans=0.125 +2024-08-03 14:43:25,572 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.61 vs. limit=12.0 +2024-08-03 14:43:31,430 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.788e+01 1.243e+02 1.438e+02 2.009e+02 3.771e+02, threshold=2.877e+02, percent-clipped=8.0 +2024-08-03 14:43:32,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=144408.0, ans=0.2 +2024-08-03 14:43:38,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=144444.66666666666, ans=0.0 +2024-08-03 14:43:46,213 INFO [train.py:1114] (0/4) Epoch 11, batch 3000, loss[loss=0.2004, simple_loss=0.2843, pruned_loss=0.05831, over 13536.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2891, pruned_loss=0.06289, over 2628646.08 frames. ], batch size: 37, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:43:46,214 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 14:43:56,412 INFO [train.py:1146] (0/4) Epoch 11, validation: loss=0.1797, simple_loss=0.2796, pruned_loss=0.03992, over 944034.00 frames. +2024-08-03 14:43:56,413 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 14:43:56,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144481.33333333334, ans=0.1 +2024-08-03 14:44:03,088 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.42 vs. limit=15.0 +2024-08-03 14:44:14,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=144554.66666666666, ans=0.125 +2024-08-03 14:44:16,058 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.42 vs. limit=15.0 +2024-08-03 14:44:20,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=144554.66666666666, ans=0.2 +2024-08-03 14:44:22,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=144591.33333333334, ans=0.04949747468305833 +2024-08-03 14:44:23,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=144591.33333333334, ans=0.5 +2024-08-03 14:44:23,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=144591.33333333334, ans=0.125 +2024-08-03 14:44:26,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144591.33333333334, ans=0.1 +2024-08-03 14:44:26,970 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.96 vs. limit=10.0 +2024-08-03 14:44:39,728 INFO [train.py:1114] (0/4) Epoch 11, batch 3050, loss[loss=0.1777, simple_loss=0.2672, pruned_loss=0.04404, over 13537.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2897, pruned_loss=0.06323, over 2626154.42 frames. ], batch size: 35, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:44:45,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=144664.66666666666, ans=0.125 +2024-08-03 14:44:57,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=144738.0, ans=0.0 +2024-08-03 14:45:03,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=144738.0, ans=0.125 +2024-08-03 14:45:04,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=144738.0, ans=0.125 +2024-08-03 14:45:05,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=144774.66666666666, ans=0.125 +2024-08-03 14:45:08,195 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.491e+01 1.118e+02 1.274e+02 1.524e+02 2.549e+02, threshold=2.548e+02, percent-clipped=0.0 +2024-08-03 14:45:18,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=144811.33333333334, ans=0.07 +2024-08-03 14:45:22,757 INFO [train.py:1114] (0/4) Epoch 11, batch 3100, loss[loss=0.2264, simple_loss=0.3096, pruned_loss=0.07156, over 13269.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2886, pruned_loss=0.06245, over 2625917.87 frames. ], batch size: 46, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:45:30,068 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.39 vs. limit=15.0 +2024-08-03 14:45:32,088 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.54 vs. limit=12.0 +2024-08-03 14:45:34,239 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:45:54,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=144958.0, ans=0.125 +2024-08-03 14:45:56,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=144958.0, ans=0.025 +2024-08-03 14:45:56,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=144958.0, ans=0.1 +2024-08-03 14:45:58,212 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.06 vs. limit=15.0 +2024-08-03 14:46:05,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=144994.66666666666, ans=0.0 +2024-08-03 14:46:07,301 INFO [train.py:1114] (0/4) Epoch 11, batch 3150, loss[loss=0.2207, simple_loss=0.3055, pruned_loss=0.06792, over 12992.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2887, pruned_loss=0.06235, over 2628235.96 frames. ], batch size: 48, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:46:09,253 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:46:09,484 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.81 vs. limit=15.0 +2024-08-03 14:46:09,540 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.96 vs. limit=15.0 +2024-08-03 14:46:19,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=145068.0, ans=0.125 +2024-08-03 14:46:19,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=145068.0, ans=0.125 +2024-08-03 14:46:25,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.53 vs. limit=15.0 +2024-08-03 14:46:25,487 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=15.0 +2024-08-03 14:46:36,199 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.294e+01 1.169e+02 1.363e+02 1.667e+02 3.402e+02, threshold=2.726e+02, percent-clipped=3.0 +2024-08-03 14:46:45,137 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.10 vs. limit=10.0 +2024-08-03 14:46:46,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=145178.0, ans=0.0 +2024-08-03 14:46:50,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145214.66666666666, ans=0.1 +2024-08-03 14:46:51,167 INFO [train.py:1114] (0/4) Epoch 11, batch 3200, loss[loss=0.2103, simple_loss=0.2898, pruned_loss=0.0654, over 13538.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2885, pruned_loss=0.06232, over 2633562.58 frames. ], batch size: 37, lr: 1.12e-02, grad_scale: 32.0 +2024-08-03 14:46:54,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=145214.66666666666, ans=0.2 +2024-08-03 14:47:07,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=145251.33333333334, ans=0.0 +2024-08-03 14:47:14,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145288.0, ans=0.1 +2024-08-03 14:47:17,465 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.63 vs. limit=15.0 +2024-08-03 14:47:29,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=145361.33333333334, ans=0.125 +2024-08-03 14:47:35,716 INFO [train.py:1114] (0/4) Epoch 11, batch 3250, loss[loss=0.2142, simple_loss=0.2998, pruned_loss=0.06432, over 13389.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2901, pruned_loss=0.06291, over 2638799.25 frames. ], batch size: 38, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:47:52,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=145471.33333333334, ans=0.0 +2024-08-03 14:47:58,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=145471.33333333334, ans=0.5 +2024-08-03 14:48:04,046 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.409e+01 1.172e+02 1.394e+02 1.962e+02 3.481e+02, threshold=2.788e+02, percent-clipped=6.0 +2024-08-03 14:48:15,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145544.66666666666, ans=0.1 +2024-08-03 14:48:18,823 INFO [train.py:1114] (0/4) Epoch 11, batch 3300, loss[loss=0.2112, simple_loss=0.2945, pruned_loss=0.06394, over 12853.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2887, pruned_loss=0.06224, over 2640137.33 frames. ], batch size: 52, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:48:19,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=145581.33333333334, ans=0.0 +2024-08-03 14:48:25,873 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:48:38,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=145654.66666666666, ans=0.0 +2024-08-03 14:48:42,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=145654.66666666666, ans=0.5 +2024-08-03 14:48:53,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=145728.0, ans=0.0 +2024-08-03 14:48:59,645 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.94 vs. limit=22.5 +2024-08-03 14:49:07,249 INFO [train.py:1114] (0/4) Epoch 11, batch 3350, loss[loss=0.2134, simple_loss=0.2925, pruned_loss=0.06708, over 13261.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2899, pruned_loss=0.06303, over 2630644.08 frames. ], batch size: 49, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:49:17,722 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.43 vs. limit=15.0 +2024-08-03 14:49:22,065 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.67 vs. limit=15.0 +2024-08-03 14:49:35,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=145838.0, ans=0.0 +2024-08-03 14:49:37,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=145838.0, ans=0.125 +2024-08-03 14:49:39,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=145838.0, ans=0.125 +2024-08-03 14:49:46,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=145874.66666666666, ans=0.125 +2024-08-03 14:49:48,375 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.672e+01 1.163e+02 1.273e+02 1.475e+02 2.297e+02, threshold=2.547e+02, percent-clipped=0.0 +2024-08-03 14:50:46,512 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.91 vs. limit=15.0 +2024-08-03 14:50:53,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=145911.33333333334, ans=0.125 +2024-08-03 14:50:56,093 INFO [train.py:1114] (0/4) Epoch 11, batch 3400, loss[loss=0.1757, simple_loss=0.2551, pruned_loss=0.0481, over 13524.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2897, pruned_loss=0.06324, over 2626352.28 frames. ], batch size: 31, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:51:40,270 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.45 vs. limit=6.0 +2024-08-03 14:51:52,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=146094.66666666666, ans=0.125 +2024-08-03 14:51:53,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.62 vs. limit=10.0 +2024-08-03 14:51:57,624 INFO [train.py:1114] (0/4) Epoch 11, batch 3450, loss[loss=0.2301, simple_loss=0.3098, pruned_loss=0.07519, over 12932.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2896, pruned_loss=0.06307, over 2628983.85 frames. ], batch size: 52, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:52:10,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=146168.0, ans=0.2 +2024-08-03 14:52:18,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=146204.66666666666, ans=0.025 +2024-08-03 14:52:19,015 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.68 vs. limit=22.5 +2024-08-03 14:52:28,429 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.083e+01 1.190e+02 1.396e+02 1.805e+02 2.896e+02, threshold=2.793e+02, percent-clipped=1.0 +2024-08-03 14:52:44,383 INFO [train.py:1114] (0/4) Epoch 11, batch 3500, loss[loss=0.1758, simple_loss=0.262, pruned_loss=0.04482, over 13528.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2885, pruned_loss=0.06281, over 2631275.24 frames. ], batch size: 34, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:52:58,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=146351.33333333334, ans=0.125 +2024-08-03 14:53:33,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=146388.0, ans=0.125 +2024-08-03 14:53:46,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=146388.0, ans=0.05 +2024-08-03 14:53:48,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=146424.66666666666, ans=0.1 +2024-08-03 14:53:53,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=146424.66666666666, ans=0.125 +2024-08-03 14:53:55,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=146461.33333333334, ans=0.0 +2024-08-03 14:54:04,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=146461.33333333334, ans=0.125 +2024-08-03 14:54:41,905 INFO [train.py:1114] (0/4) Epoch 11, batch 3550, loss[loss=0.2124, simple_loss=0.2943, pruned_loss=0.06521, over 12736.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2915, pruned_loss=0.06423, over 2630163.75 frames. ], batch size: 59, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:54:46,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=146498.0, ans=0.0 +2024-08-03 14:54:48,917 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.98 vs. limit=10.0 +2024-08-03 14:55:08,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=146608.0, ans=0.0 +2024-08-03 14:55:11,615 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.699e+01 1.206e+02 1.319e+02 1.556e+02 2.603e+02, threshold=2.638e+02, percent-clipped=0.0 +2024-08-03 14:55:22,410 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-40000.pt +2024-08-03 14:55:27,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=146681.33333333334, ans=0.0 +2024-08-03 14:55:28,464 INFO [train.py:1114] (0/4) Epoch 11, batch 3600, loss[loss=0.2243, simple_loss=0.3004, pruned_loss=0.07407, over 9085.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2963, pruned_loss=0.06888, over 2485952.42 frames. ], batch size: 96, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:55:28,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=146681.33333333334, ans=0.125 +2024-08-03 14:55:29,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146681.33333333334, ans=0.1 +2024-08-03 14:55:36,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=146718.0, ans=0.0 +2024-08-03 14:55:41,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.60 vs. limit=15.0 +2024-08-03 14:55:53,170 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.83 vs. limit=22.5 +2024-08-03 14:56:03,422 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-11.pt +2024-08-03 14:56:58,468 INFO [train.py:1114] (0/4) Epoch 12, batch 0, loss[loss=0.1803, simple_loss=0.2609, pruned_loss=0.04985, over 13365.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2609, pruned_loss=0.04985, over 13365.00 frames. ], batch size: 33, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 14:56:58,469 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 14:57:09,821 INFO [train.py:1146] (0/4) Epoch 12, validation: loss=0.1815, simple_loss=0.2827, pruned_loss=0.04015, over 944034.00 frames. +2024-08-03 14:57:09,821 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 14:57:11,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=146828.0, ans=0.2 +2024-08-03 14:57:47,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=146974.66666666666, ans=0.0 +2024-08-03 14:57:49,737 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.441e+01 1.252e+02 1.419e+02 1.568e+02 2.905e+02, threshold=2.838e+02, percent-clipped=2.0 +2024-08-03 14:57:55,181 INFO [train.py:1114] (0/4) Epoch 12, batch 50, loss[loss=0.1774, simple_loss=0.2551, pruned_loss=0.04983, over 13409.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.293, pruned_loss=0.06501, over 578741.85 frames. ], batch size: 32, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 14:57:55,433 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:57:58,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=147011.33333333334, ans=0.2 +2024-08-03 14:58:00,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=147011.33333333334, ans=0.07 +2024-08-03 14:58:04,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147048.0, ans=0.1 +2024-08-03 14:58:06,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=147048.0, ans=0.0 +2024-08-03 14:58:11,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=147048.0, ans=0.125 +2024-08-03 14:58:25,210 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.46 vs. limit=15.0 +2024-08-03 14:58:38,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=147158.0, ans=0.0 +2024-08-03 14:58:40,615 INFO [train.py:1114] (0/4) Epoch 12, batch 100, loss[loss=0.2095, simple_loss=0.2872, pruned_loss=0.06594, over 13543.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2938, pruned_loss=0.0648, over 1026994.45 frames. ], batch size: 35, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 14:58:54,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=147231.33333333334, ans=0.125 +2024-08-03 14:58:57,854 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:59:00,139 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.93 vs. limit=15.0 +2024-08-03 14:59:22,273 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.865e+01 1.262e+02 1.549e+02 1.868e+02 3.478e+02, threshold=3.099e+02, percent-clipped=1.0 +2024-08-03 14:59:22,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=147341.33333333334, ans=0.2 +2024-08-03 14:59:27,468 INFO [train.py:1114] (0/4) Epoch 12, batch 150, loss[loss=0.1771, simple_loss=0.2553, pruned_loss=0.04949, over 13421.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2907, pruned_loss=0.06352, over 1387776.87 frames. ], batch size: 32, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 14:59:27,928 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.58 vs. limit=10.0 +2024-08-03 14:59:38,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147414.66666666666, ans=0.1 +2024-08-03 14:59:48,502 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:59:57,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=147488.0, ans=0.025 +2024-08-03 15:00:00,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=147488.0, ans=0.125 +2024-08-03 15:00:17,997 INFO [train.py:1114] (0/4) Epoch 12, batch 200, loss[loss=0.2443, simple_loss=0.3247, pruned_loss=0.08189, over 12364.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2885, pruned_loss=0.06254, over 1666012.28 frames. ], batch size: 58, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:00:19,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=147561.33333333334, ans=0.125 +2024-08-03 15:00:40,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147634.66666666666, ans=0.1 +2024-08-03 15:00:44,403 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.91 vs. limit=22.5 +2024-08-03 15:00:45,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=147671.33333333334, ans=0.0 +2024-08-03 15:00:55,983 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.39 vs. limit=22.5 +2024-08-03 15:00:56,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=147708.0, ans=0.125 +2024-08-03 15:00:57,971 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.496e+01 1.132e+02 1.278e+02 1.609e+02 2.884e+02, threshold=2.557e+02, percent-clipped=0.0 +2024-08-03 15:01:02,557 INFO [train.py:1114] (0/4) Epoch 12, batch 250, loss[loss=0.2278, simple_loss=0.3007, pruned_loss=0.07747, over 13312.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2885, pruned_loss=0.06236, over 1884793.48 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:01:10,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=147744.66666666666, ans=0.125 +2024-08-03 15:01:17,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=147781.33333333334, ans=0.2 +2024-08-03 15:01:20,973 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.72 vs. limit=6.0 +2024-08-03 15:01:22,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=147818.0, ans=0.05 +2024-08-03 15:01:22,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=147818.0, ans=0.125 +2024-08-03 15:01:29,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=147854.66666666666, ans=0.0 +2024-08-03 15:01:36,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=147854.66666666666, ans=0.125 +2024-08-03 15:01:48,253 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.86 vs. limit=15.0 +2024-08-03 15:01:48,809 INFO [train.py:1114] (0/4) Epoch 12, batch 300, loss[loss=0.2243, simple_loss=0.3085, pruned_loss=0.07012, over 13460.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2879, pruned_loss=0.0623, over 2052443.09 frames. ], batch size: 42, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:01:52,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=147928.0, ans=0.125 +2024-08-03 15:01:56,678 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.11 vs. limit=15.0 +2024-08-03 15:02:00,971 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.04 vs. limit=15.0 +2024-08-03 15:02:29,624 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.695e+01 1.130e+02 1.266e+02 1.669e+02 3.180e+02, threshold=2.531e+02, percent-clipped=2.0 +2024-08-03 15:02:32,945 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.77 vs. limit=15.0 +2024-08-03 15:02:34,180 INFO [train.py:1114] (0/4) Epoch 12, batch 350, loss[loss=0.1933, simple_loss=0.2714, pruned_loss=0.05765, over 13555.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2891, pruned_loss=0.06272, over 2183119.67 frames. ], batch size: 33, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:02:35,774 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.43 vs. limit=6.0 +2024-08-03 15:02:37,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=148111.33333333334, ans=0.125 +2024-08-03 15:03:02,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.61 vs. limit=22.5 +2024-08-03 15:03:15,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=148258.0, ans=0.0 +2024-08-03 15:03:15,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=148258.0, ans=0.0 +2024-08-03 15:03:23,951 INFO [train.py:1114] (0/4) Epoch 12, batch 400, loss[loss=0.189, simple_loss=0.2807, pruned_loss=0.0486, over 13361.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2884, pruned_loss=0.06209, over 2287034.44 frames. ], batch size: 37, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 15:03:25,409 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.85 vs. limit=22.5 +2024-08-03 15:04:05,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=148441.33333333334, ans=0.125 +2024-08-03 15:04:10,400 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.063e+01 1.155e+02 1.349e+02 1.691e+02 2.771e+02, threshold=2.698e+02, percent-clipped=3.0 +2024-08-03 15:04:30,842 INFO [train.py:1114] (0/4) Epoch 12, batch 450, loss[loss=0.1963, simple_loss=0.2817, pruned_loss=0.05547, over 13549.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2881, pruned_loss=0.06216, over 2360325.68 frames. ], batch size: 38, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:04:35,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=148478.0, ans=0.0 +2024-08-03 15:05:08,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=148624.66666666666, ans=0.0 +2024-08-03 15:05:11,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=148624.66666666666, ans=0.025 +2024-08-03 15:05:16,103 INFO [train.py:1114] (0/4) Epoch 12, batch 500, loss[loss=0.2264, simple_loss=0.3078, pruned_loss=0.07252, over 13430.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2871, pruned_loss=0.06139, over 2425374.63 frames. ], batch size: 43, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:05:21,009 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.10 vs. limit=22.5 +2024-08-03 15:05:23,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=148661.33333333334, ans=0.0 +2024-08-03 15:05:25,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=148661.33333333334, ans=0.125 +2024-08-03 15:05:38,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148734.66666666666, ans=0.1 +2024-08-03 15:05:45,919 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.31 vs. limit=6.0 +2024-08-03 15:05:48,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.25 vs. limit=15.0 +2024-08-03 15:05:59,420 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.251e+01 1.174e+02 1.357e+02 1.973e+02 3.338e+02, threshold=2.713e+02, percent-clipped=6.0 +2024-08-03 15:06:01,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=148808.0, ans=0.05 +2024-08-03 15:06:01,875 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.87 vs. limit=10.0 +2024-08-03 15:06:02,938 INFO [train.py:1114] (0/4) Epoch 12, batch 550, loss[loss=0.2082, simple_loss=0.2986, pruned_loss=0.05892, over 13038.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2868, pruned_loss=0.06098, over 2467559.22 frames. ], batch size: 48, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:06:09,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148844.66666666666, ans=0.1 +2024-08-03 15:06:14,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=148881.33333333334, ans=0.125 +2024-08-03 15:06:25,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=148918.0, ans=0.0 +2024-08-03 15:06:31,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=148954.66666666666, ans=0.04949747468305833 +2024-08-03 15:06:35,123 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.75 vs. limit=15.0 +2024-08-03 15:06:37,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=148954.66666666666, ans=0.125 +2024-08-03 15:06:48,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=148991.33333333334, ans=0.0 +2024-08-03 15:06:50,068 INFO [train.py:1114] (0/4) Epoch 12, batch 600, loss[loss=0.2094, simple_loss=0.2933, pruned_loss=0.06277, over 13324.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2871, pruned_loss=0.06087, over 2507357.78 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:06:51,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=149028.0, ans=0.5 +2024-08-03 15:06:51,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=149028.0, ans=0.0 +2024-08-03 15:06:54,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=149028.0, ans=0.0 +2024-08-03 15:07:08,707 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.55 vs. limit=6.0 +2024-08-03 15:07:13,895 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.44 vs. limit=22.5 +2024-08-03 15:07:15,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=149101.33333333334, ans=0.125 +2024-08-03 15:07:33,071 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.645e+01 1.231e+02 1.471e+02 1.906e+02 4.499e+02, threshold=2.942e+02, percent-clipped=14.0 +2024-08-03 15:07:36,544 INFO [train.py:1114] (0/4) Epoch 12, batch 650, loss[loss=0.1832, simple_loss=0.2711, pruned_loss=0.04761, over 13541.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2864, pruned_loss=0.06058, over 2543129.08 frames. ], batch size: 37, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:07:37,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=149211.33333333334, ans=0.2 +2024-08-03 15:07:41,281 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.57 vs. limit=10.0 +2024-08-03 15:07:47,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=149248.0, ans=0.125 +2024-08-03 15:07:47,672 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.15 vs. limit=15.0 +2024-08-03 15:07:52,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=149248.0, ans=0.125 +2024-08-03 15:08:14,737 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.54 vs. limit=15.0 +2024-08-03 15:08:24,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=149358.0, ans=0.125 +2024-08-03 15:08:25,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149394.66666666666, ans=0.1 +2024-08-03 15:08:26,303 INFO [train.py:1114] (0/4) Epoch 12, batch 700, loss[loss=0.1944, simple_loss=0.2732, pruned_loss=0.0578, over 13533.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2867, pruned_loss=0.06065, over 2567101.40 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:08:26,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=149394.66666666666, ans=0.125 +2024-08-03 15:08:28,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=149394.66666666666, ans=0.125 +2024-08-03 15:08:42,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=149431.33333333334, ans=0.125 +2024-08-03 15:09:06,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=149541.33333333334, ans=0.0 +2024-08-03 15:09:07,557 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.082e+01 1.154e+02 1.315e+02 1.690e+02 3.404e+02, threshold=2.630e+02, percent-clipped=2.0 +2024-08-03 15:09:09,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=149541.33333333334, ans=0.025 +2024-08-03 15:09:11,196 INFO [train.py:1114] (0/4) Epoch 12, batch 750, loss[loss=0.2084, simple_loss=0.2959, pruned_loss=0.06049, over 13360.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2864, pruned_loss=0.06064, over 2584727.57 frames. ], batch size: 37, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:09:13,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=149578.0, ans=0.125 +2024-08-03 15:09:20,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=149614.66666666666, ans=0.125 +2024-08-03 15:09:38,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=149688.0, ans=0.0 +2024-08-03 15:09:41,843 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.89 vs. limit=15.0 +2024-08-03 15:09:44,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=149688.0, ans=0.05 +2024-08-03 15:09:50,841 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.01 vs. limit=10.0 +2024-08-03 15:09:56,694 INFO [train.py:1114] (0/4) Epoch 12, batch 800, loss[loss=0.2019, simple_loss=0.2748, pruned_loss=0.06456, over 13348.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.287, pruned_loss=0.06115, over 2598770.95 frames. ], batch size: 33, lr: 1.05e-02, grad_scale: 32.0 +2024-08-03 15:10:03,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=149761.33333333334, ans=0.125 +2024-08-03 15:10:03,923 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:10:08,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=149798.0, ans=0.125 +2024-08-03 15:10:12,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=149798.0, ans=0.125 +2024-08-03 15:10:22,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=149834.66666666666, ans=0.0 +2024-08-03 15:10:23,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=149834.66666666666, ans=0.125 +2024-08-03 15:10:41,755 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.822e+01 1.144e+02 1.285e+02 1.607e+02 2.448e+02, threshold=2.570e+02, percent-clipped=0.0 +2024-08-03 15:10:45,411 INFO [train.py:1114] (0/4) Epoch 12, batch 850, loss[loss=0.22, simple_loss=0.3168, pruned_loss=0.06156, over 13302.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2864, pruned_loss=0.06093, over 2611119.96 frames. ], batch size: 40, lr: 1.05e-02, grad_scale: 32.0 +2024-08-03 15:10:47,709 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.72 vs. limit=22.5 +2024-08-03 15:11:09,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=150018.0, ans=0.0 +2024-08-03 15:11:20,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=150054.66666666666, ans=0.05 +2024-08-03 15:11:20,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=150054.66666666666, ans=0.125 +2024-08-03 15:11:26,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=150091.33333333334, ans=0.04949747468305833 +2024-08-03 15:11:34,331 INFO [train.py:1114] (0/4) Epoch 12, batch 900, loss[loss=0.1753, simple_loss=0.2602, pruned_loss=0.04516, over 13341.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2866, pruned_loss=0.06099, over 2613433.60 frames. ], batch size: 33, lr: 1.05e-02, grad_scale: 32.0 +2024-08-03 15:11:45,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=150164.66666666666, ans=0.0 +2024-08-03 15:11:48,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=150164.66666666666, ans=15.0 +2024-08-03 15:11:57,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=150201.33333333334, ans=0.1 +2024-08-03 15:12:14,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=150274.66666666666, ans=0.0 +2024-08-03 15:12:15,736 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.906e+01 1.165e+02 1.316e+02 1.823e+02 3.379e+02, threshold=2.632e+02, percent-clipped=3.0 +2024-08-03 15:12:19,459 INFO [train.py:1114] (0/4) Epoch 12, batch 950, loss[loss=0.1937, simple_loss=0.277, pruned_loss=0.05517, over 13530.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2869, pruned_loss=0.06112, over 2614082.29 frames. ], batch size: 34, lr: 1.05e-02, grad_scale: 32.0 +2024-08-03 15:12:54,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=150421.33333333334, ans=0.07 +2024-08-03 15:13:02,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=150458.0, ans=0.125 +2024-08-03 15:13:03,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150458.0, ans=0.1 +2024-08-03 15:13:05,520 INFO [train.py:1114] (0/4) Epoch 12, batch 1000, loss[loss=0.1859, simple_loss=0.2691, pruned_loss=0.05135, over 13372.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2879, pruned_loss=0.06168, over 2613153.91 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:13:06,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=150494.66666666666, ans=0.0 +2024-08-03 15:13:20,857 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.96 vs. limit=15.0 +2024-08-03 15:13:23,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=150568.0, ans=0.125 +2024-08-03 15:13:28,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=150568.0, ans=0.125 +2024-08-03 15:13:34,122 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.51 vs. limit=10.0 +2024-08-03 15:13:44,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=150641.33333333334, ans=0.125 +2024-08-03 15:13:47,784 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.724e+01 1.124e+02 1.317e+02 1.509e+02 2.289e+02, threshold=2.634e+02, percent-clipped=0.0 +2024-08-03 15:13:52,458 INFO [train.py:1114] (0/4) Epoch 12, batch 1050, loss[loss=0.2141, simple_loss=0.3044, pruned_loss=0.06196, over 13576.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2878, pruned_loss=0.0617, over 2616289.33 frames. ], batch size: 39, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:13:53,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=150678.0, ans=0.125 +2024-08-03 15:14:14,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=150751.33333333334, ans=0.2 +2024-08-03 15:14:15,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=150751.33333333334, ans=0.125 +2024-08-03 15:14:17,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=150751.33333333334, ans=0.0 +2024-08-03 15:14:18,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=150751.33333333334, ans=0.1 +2024-08-03 15:14:20,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=150788.0, ans=0.125 +2024-08-03 15:14:23,300 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.12 vs. limit=15.0 +2024-08-03 15:14:32,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=150824.66666666666, ans=0.2 +2024-08-03 15:14:39,500 INFO [train.py:1114] (0/4) Epoch 12, batch 1100, loss[loss=0.1781, simple_loss=0.2642, pruned_loss=0.04599, over 13564.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2859, pruned_loss=0.06065, over 2619607.61 frames. ], batch size: 36, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:14:52,866 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.45 vs. limit=15.0 +2024-08-03 15:15:07,596 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.64 vs. limit=6.0 +2024-08-03 15:15:14,030 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.09 vs. limit=22.5 +2024-08-03 15:15:25,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=151008.0, ans=0.125 +2024-08-03 15:15:26,422 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.356e+01 1.180e+02 1.359e+02 1.591e+02 2.320e+02, threshold=2.719e+02, percent-clipped=0.0 +2024-08-03 15:15:26,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=151008.0, ans=0.125 +2024-08-03 15:15:27,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=151008.0, ans=0.0 +2024-08-03 15:15:29,092 INFO [train.py:1114] (0/4) Epoch 12, batch 1150, loss[loss=0.2008, simple_loss=0.2843, pruned_loss=0.05869, over 13584.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.286, pruned_loss=0.06082, over 2618670.83 frames. ], batch size: 36, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:15:30,970 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.46 vs. limit=22.5 +2024-08-03 15:15:38,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151081.33333333334, ans=0.1 +2024-08-03 15:15:41,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=151081.33333333334, ans=0.2 +2024-08-03 15:15:47,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=151118.0, ans=0.1 +2024-08-03 15:15:50,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=151118.0, ans=0.04949747468305833 +2024-08-03 15:15:50,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=151118.0, ans=0.5 +2024-08-03 15:15:55,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=151118.0, ans=0.05 +2024-08-03 15:16:15,299 INFO [train.py:1114] (0/4) Epoch 12, batch 1200, loss[loss=0.2155, simple_loss=0.3017, pruned_loss=0.06463, over 13576.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.287, pruned_loss=0.06084, over 2616015.64 frames. ], batch size: 39, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:16:50,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151374.66666666666, ans=0.1 +2024-08-03 15:16:58,389 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.940e+01 1.145e+02 1.336e+02 1.664e+02 3.085e+02, threshold=2.672e+02, percent-clipped=3.0 +2024-08-03 15:17:00,265 INFO [train.py:1114] (0/4) Epoch 12, batch 1250, loss[loss=0.216, simple_loss=0.293, pruned_loss=0.06949, over 13426.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2875, pruned_loss=0.06092, over 2628302.80 frames. ], batch size: 42, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:17:10,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=151448.0, ans=0.2 +2024-08-03 15:17:17,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151448.0, ans=0.1 +2024-08-03 15:17:22,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=151484.66666666666, ans=0.1 +2024-08-03 15:17:49,355 INFO [train.py:1114] (0/4) Epoch 12, batch 1300, loss[loss=0.2169, simple_loss=0.303, pruned_loss=0.06542, over 12915.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2872, pruned_loss=0.06086, over 2631458.14 frames. ], batch size: 52, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:17:53,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=151594.66666666666, ans=0.125 +2024-08-03 15:18:00,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=151631.33333333334, ans=0.0 +2024-08-03 15:18:09,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=151668.0, ans=0.025 +2024-08-03 15:18:34,754 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.477e+01 1.244e+02 1.482e+02 1.823e+02 3.057e+02, threshold=2.965e+02, percent-clipped=1.0 +2024-08-03 15:18:36,580 INFO [train.py:1114] (0/4) Epoch 12, batch 1350, loss[loss=0.2117, simple_loss=0.2973, pruned_loss=0.06307, over 13551.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2864, pruned_loss=0.0603, over 2639456.95 frames. ], batch size: 37, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:18:48,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151814.66666666666, ans=0.1 +2024-08-03 15:18:50,089 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.04 vs. limit=15.0 +2024-08-03 15:18:50,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=151814.66666666666, ans=0.125 +2024-08-03 15:19:00,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=151851.33333333334, ans=0.05 +2024-08-03 15:19:01,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=151851.33333333334, ans=0.125 +2024-08-03 15:19:02,566 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.26 vs. limit=12.0 +2024-08-03 15:19:03,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=151851.33333333334, ans=0.125 +2024-08-03 15:19:07,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=151888.0, ans=0.0 +2024-08-03 15:19:10,134 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.27 vs. limit=15.0 +2024-08-03 15:19:25,349 INFO [train.py:1114] (0/4) Epoch 12, batch 1400, loss[loss=0.2046, simple_loss=0.2691, pruned_loss=0.07001, over 13259.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2859, pruned_loss=0.06001, over 2643204.94 frames. ], batch size: 31, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:19:26,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=151961.33333333334, ans=0.025 +2024-08-03 15:19:31,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151961.33333333334, ans=0.1 +2024-08-03 15:19:33,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=151961.33333333334, ans=0.125 +2024-08-03 15:19:34,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=151998.0, ans=0.125 +2024-08-03 15:19:43,728 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.76 vs. limit=15.0 +2024-08-03 15:19:46,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=152034.66666666666, ans=0.125 +2024-08-03 15:19:47,349 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.66 vs. limit=22.5 +2024-08-03 15:20:05,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=152108.0, ans=0.1 +2024-08-03 15:20:09,506 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.408e+01 1.177e+02 1.443e+02 1.884e+02 3.508e+02, threshold=2.887e+02, percent-clipped=1.0 +2024-08-03 15:20:11,317 INFO [train.py:1114] (0/4) Epoch 12, batch 1450, loss[loss=0.2147, simple_loss=0.3041, pruned_loss=0.06268, over 13436.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2871, pruned_loss=0.06097, over 2641603.41 frames. ], batch size: 43, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:20:11,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=152144.66666666666, ans=0.1 +2024-08-03 15:20:22,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152181.33333333334, ans=0.1 +2024-08-03 15:20:32,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=152218.0, ans=0.125 +2024-08-03 15:20:43,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=152254.66666666666, ans=0.0 +2024-08-03 15:20:50,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=152291.33333333334, ans=0.2 +2024-08-03 15:20:52,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=152291.33333333334, ans=0.07 +2024-08-03 15:20:56,430 INFO [train.py:1114] (0/4) Epoch 12, batch 1500, loss[loss=0.1767, simple_loss=0.2673, pruned_loss=0.04299, over 13412.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2874, pruned_loss=0.06111, over 2641367.18 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:21:14,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=152364.66666666666, ans=0.125 +2024-08-03 15:21:18,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=152401.33333333334, ans=0.0 +2024-08-03 15:21:19,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=152401.33333333334, ans=10.0 +2024-08-03 15:21:29,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=152438.0, ans=0.125 +2024-08-03 15:21:38,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=152474.66666666666, ans=0.025 +2024-08-03 15:21:43,772 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.575e+01 1.302e+02 1.536e+02 1.991e+02 2.999e+02, threshold=3.072e+02, percent-clipped=1.0 +2024-08-03 15:21:47,445 INFO [train.py:1114] (0/4) Epoch 12, batch 1550, loss[loss=0.2144, simple_loss=0.3049, pruned_loss=0.062, over 13389.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.288, pruned_loss=0.06154, over 2630737.79 frames. ], batch size: 41, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:21:48,588 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.67 vs. limit=10.0 +2024-08-03 15:21:57,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=152548.0, ans=0.125 +2024-08-03 15:22:09,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=152584.66666666666, ans=0.125 +2024-08-03 15:22:14,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=152584.66666666666, ans=0.1 +2024-08-03 15:22:27,252 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:22:28,449 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.83 vs. limit=15.0 +2024-08-03 15:22:32,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=152658.0, ans=0.5 +2024-08-03 15:22:36,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152658.0, ans=0.1 +2024-08-03 15:22:38,643 INFO [train.py:1114] (0/4) Epoch 12, batch 1600, loss[loss=0.2117, simple_loss=0.301, pruned_loss=0.06123, over 13568.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2876, pruned_loss=0.06154, over 2623680.91 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 32.0 +2024-08-03 15:22:44,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152694.66666666666, ans=0.1 +2024-08-03 15:22:45,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=152694.66666666666, ans=0.125 +2024-08-03 15:23:01,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=152768.0, ans=0.025 +2024-08-03 15:23:03,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152768.0, ans=0.1 +2024-08-03 15:23:15,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=152841.33333333334, ans=0.07 +2024-08-03 15:23:22,360 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.356e+01 1.191e+02 1.409e+02 1.604e+02 3.528e+02, threshold=2.818e+02, percent-clipped=1.0 +2024-08-03 15:23:24,247 INFO [train.py:1114] (0/4) Epoch 12, batch 1650, loss[loss=0.2173, simple_loss=0.3051, pruned_loss=0.06477, over 13314.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2879, pruned_loss=0.06214, over 2620582.18 frames. ], batch size: 40, lr: 1.04e-02, grad_scale: 32.0 +2024-08-03 15:23:30,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=152878.0, ans=0.125 +2024-08-03 15:23:30,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=152878.0, ans=0.025 +2024-08-03 15:23:33,149 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.50 vs. limit=15.0 +2024-08-03 15:23:36,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=152914.66666666666, ans=0.0 +2024-08-03 15:23:45,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=152951.33333333334, ans=0.0 +2024-08-03 15:23:45,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=152951.33333333334, ans=0.2 +2024-08-03 15:23:47,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152951.33333333334, ans=0.1 +2024-08-03 15:23:52,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=152988.0, ans=0.125 +2024-08-03 15:23:53,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=152988.0, ans=0.0 +2024-08-03 15:23:57,893 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:23:59,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=153024.66666666666, ans=0.125 +2024-08-03 15:24:04,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=153024.66666666666, ans=0.1 +2024-08-03 15:24:06,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=153024.66666666666, ans=0.0 +2024-08-03 15:24:06,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=153024.66666666666, ans=0.1 +2024-08-03 15:24:09,386 INFO [train.py:1114] (0/4) Epoch 12, batch 1700, loss[loss=0.1827, simple_loss=0.259, pruned_loss=0.05325, over 13269.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2871, pruned_loss=0.06116, over 2629557.94 frames. ], batch size: 31, lr: 1.04e-02, grad_scale: 32.0 +2024-08-03 15:24:19,878 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.76 vs. limit=12.0 +2024-08-03 15:24:33,744 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.86 vs. limit=15.0 +2024-08-03 15:24:57,295 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.203e+01 1.265e+02 1.510e+02 1.884e+02 3.458e+02, threshold=3.019e+02, percent-clipped=4.0 +2024-08-03 15:24:58,028 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.46 vs. limit=15.0 +2024-08-03 15:24:58,261 INFO [train.py:1114] (0/4) Epoch 12, batch 1750, loss[loss=0.1648, simple_loss=0.2426, pruned_loss=0.04348, over 13515.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2865, pruned_loss=0.0609, over 2633302.24 frames. ], batch size: 31, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:25:06,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=153281.33333333334, ans=0.125 +2024-08-03 15:25:07,240 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.62 vs. limit=22.5 +2024-08-03 15:25:27,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=153354.66666666666, ans=0.125 +2024-08-03 15:25:31,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=153354.66666666666, ans=0.0 +2024-08-03 15:25:45,811 INFO [train.py:1114] (0/4) Epoch 12, batch 1800, loss[loss=0.2108, simple_loss=0.2914, pruned_loss=0.06514, over 13559.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2864, pruned_loss=0.06071, over 2634262.27 frames. ], batch size: 38, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:26:07,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=153501.33333333334, ans=0.125 +2024-08-03 15:26:15,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=153538.0, ans=0.125 +2024-08-03 15:26:22,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=153574.66666666666, ans=0.2 +2024-08-03 15:26:31,788 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.247e+01 1.215e+02 1.564e+02 1.986e+02 3.414e+02, threshold=3.127e+02, percent-clipped=2.0 +2024-08-03 15:26:32,689 INFO [train.py:1114] (0/4) Epoch 12, batch 1850, loss[loss=0.2087, simple_loss=0.2978, pruned_loss=0.05977, over 13393.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2866, pruned_loss=0.06069, over 2636818.14 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:26:36,815 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.40 vs. limit=12.0 +2024-08-03 15:26:40,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=153611.33333333334, ans=0.1 +2024-08-03 15:26:55,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=153684.66666666666, ans=0.2 +2024-08-03 15:27:17,664 INFO [train.py:1114] (0/4) Epoch 12, batch 1900, loss[loss=0.2041, simple_loss=0.2956, pruned_loss=0.05628, over 13328.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2869, pruned_loss=0.06073, over 2639497.86 frames. ], batch size: 40, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:27:30,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=153831.33333333334, ans=0.025 +2024-08-03 15:27:32,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=153831.33333333334, ans=0.125 +2024-08-03 15:27:37,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=153868.0, ans=0.125 +2024-08-03 15:27:58,784 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.79 vs. limit=10.0 +2024-08-03 15:28:02,823 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.586e+01 1.133e+02 1.329e+02 1.671e+02 4.322e+02, threshold=2.659e+02, percent-clipped=4.0 +2024-08-03 15:28:02,860 INFO [train.py:1114] (0/4) Epoch 12, batch 1950, loss[loss=0.1954, simple_loss=0.2743, pruned_loss=0.05825, over 13566.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2879, pruned_loss=0.06097, over 2646066.97 frames. ], batch size: 36, lr: 1.04e-02, grad_scale: 8.0 +2024-08-03 15:28:12,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=153978.0, ans=0.2 +2024-08-03 15:28:15,980 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:28:19,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=154014.66666666666, ans=0.125 +2024-08-03 15:28:33,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=154088.0, ans=0.125 +2024-08-03 15:28:46,183 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.23 vs. limit=22.5 +2024-08-03 15:28:51,857 INFO [train.py:1114] (0/4) Epoch 12, batch 2000, loss[loss=0.1969, simple_loss=0.2696, pruned_loss=0.06208, over 13531.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2882, pruned_loss=0.06135, over 2635698.52 frames. ], batch size: 31, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:28:53,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=154161.33333333334, ans=0.125 +2024-08-03 15:28:59,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=154161.33333333334, ans=0.0 +2024-08-03 15:29:08,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=154198.0, ans=0.125 +2024-08-03 15:29:29,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=154308.0, ans=0.125 +2024-08-03 15:29:33,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.31 vs. limit=12.0 +2024-08-03 15:29:37,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.92 vs. limit=15.0 +2024-08-03 15:29:40,808 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.317e+01 1.213e+02 1.428e+02 1.743e+02 2.865e+02, threshold=2.857e+02, percent-clipped=1.0 +2024-08-03 15:29:40,861 INFO [train.py:1114] (0/4) Epoch 12, batch 2050, loss[loss=0.1649, simple_loss=0.2451, pruned_loss=0.04234, over 13445.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2866, pruned_loss=0.06077, over 2633288.84 frames. ], batch size: 32, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:29:45,520 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:29:51,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=154381.33333333334, ans=0.025 +2024-08-03 15:29:54,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=154381.33333333334, ans=0.025 +2024-08-03 15:30:00,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=154418.0, ans=0.125 +2024-08-03 15:30:06,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=154454.66666666666, ans=0.0 +2024-08-03 15:30:09,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=154454.66666666666, ans=0.0 +2024-08-03 15:30:13,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=154454.66666666666, ans=0.125 +2024-08-03 15:30:23,355 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.08 vs. limit=15.0 +2024-08-03 15:30:25,495 INFO [train.py:1114] (0/4) Epoch 12, batch 2100, loss[loss=0.1904, simple_loss=0.275, pruned_loss=0.05288, over 13546.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2857, pruned_loss=0.0604, over 2638704.50 frames. ], batch size: 37, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:30:25,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=154528.0, ans=0.0 +2024-08-03 15:30:28,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=154528.0, ans=0.1 +2024-08-03 15:30:29,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=154528.0, ans=0.125 +2024-08-03 15:30:34,815 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.29 vs. limit=15.0 +2024-08-03 15:30:37,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=154564.66666666666, ans=0.125 +2024-08-03 15:30:39,425 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.52 vs. limit=15.0 +2024-08-03 15:30:40,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=154564.66666666666, ans=0.1 +2024-08-03 15:30:45,791 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.54 vs. limit=15.0 +2024-08-03 15:30:45,959 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.67 vs. limit=15.0 +2024-08-03 15:31:10,330 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.387e+01 1.104e+02 1.282e+02 1.725e+02 3.211e+02, threshold=2.564e+02, percent-clipped=3.0 +2024-08-03 15:31:10,366 INFO [train.py:1114] (0/4) Epoch 12, batch 2150, loss[loss=0.1858, simple_loss=0.2639, pruned_loss=0.05387, over 13559.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2845, pruned_loss=0.06001, over 2647401.37 frames. ], batch size: 36, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:31:16,062 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:31:17,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=154711.33333333334, ans=0.125 +2024-08-03 15:31:27,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=154784.66666666666, ans=0.0 +2024-08-03 15:31:31,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=154784.66666666666, ans=0.2 +2024-08-03 15:31:31,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=154784.66666666666, ans=0.0 +2024-08-03 15:31:32,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=154784.66666666666, ans=0.2 +2024-08-03 15:31:34,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=154784.66666666666, ans=0.0 +2024-08-03 15:31:35,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=154784.66666666666, ans=0.0 +2024-08-03 15:31:52,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=154858.0, ans=0.2 +2024-08-03 15:31:55,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=154858.0, ans=0.0 +2024-08-03 15:31:59,004 INFO [train.py:1114] (0/4) Epoch 12, batch 2200, loss[loss=0.2296, simple_loss=0.3125, pruned_loss=0.07342, over 13393.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2847, pruned_loss=0.05975, over 2645998.07 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:32:04,370 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.23 vs. limit=12.0 +2024-08-03 15:32:27,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=155004.66666666666, ans=0.125 +2024-08-03 15:32:40,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=155041.33333333334, ans=0.125 +2024-08-03 15:32:46,544 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.416e+01 1.202e+02 1.524e+02 1.986e+02 3.191e+02, threshold=3.048e+02, percent-clipped=7.0 +2024-08-03 15:32:46,597 INFO [train.py:1114] (0/4) Epoch 12, batch 2250, loss[loss=0.197, simple_loss=0.2855, pruned_loss=0.05424, over 13360.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2853, pruned_loss=0.06017, over 2643145.31 frames. ], batch size: 37, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:32:51,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=155078.0, ans=0.125 +2024-08-03 15:32:58,718 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.53 vs. limit=15.0 +2024-08-03 15:33:02,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=155114.66666666666, ans=0.0 +2024-08-03 15:33:04,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=155151.33333333334, ans=0.125 +2024-08-03 15:33:06,802 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=3.88 vs. limit=15.0 +2024-08-03 15:33:16,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=155188.0, ans=0.2 +2024-08-03 15:33:20,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=155188.0, ans=0.0 +2024-08-03 15:33:30,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=155224.66666666666, ans=0.125 +2024-08-03 15:33:34,549 INFO [train.py:1114] (0/4) Epoch 12, batch 2300, loss[loss=0.1774, simple_loss=0.2483, pruned_loss=0.05321, over 13573.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2838, pruned_loss=0.05991, over 2639639.02 frames. ], batch size: 33, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:33:46,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=155298.0, ans=0.125 +2024-08-03 15:34:06,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=155371.33333333334, ans=0.025 +2024-08-03 15:34:12,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=155408.0, ans=0.025 +2024-08-03 15:34:19,897 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.892e+01 1.184e+02 1.344e+02 1.744e+02 3.184e+02, threshold=2.689e+02, percent-clipped=1.0 +2024-08-03 15:34:19,950 INFO [train.py:1114] (0/4) Epoch 12, batch 2350, loss[loss=0.1879, simple_loss=0.28, pruned_loss=0.04794, over 13546.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2839, pruned_loss=0.05968, over 2641602.98 frames. ], batch size: 38, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:34:22,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155444.66666666666, ans=0.1 +2024-08-03 15:34:30,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=155481.33333333334, ans=0.125 +2024-08-03 15:34:40,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=155518.0, ans=0.015 +2024-08-03 15:34:41,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=155518.0, ans=0.2 +2024-08-03 15:34:53,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155554.66666666666, ans=0.1 +2024-08-03 15:34:56,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=155591.33333333334, ans=0.125 +2024-08-03 15:35:05,426 INFO [train.py:1114] (0/4) Epoch 12, batch 2400, loss[loss=0.1882, simple_loss=0.2782, pruned_loss=0.04913, over 13547.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2852, pruned_loss=0.06007, over 2642955.51 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 32.0 +2024-08-03 15:35:07,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=155628.0, ans=0.125 +2024-08-03 15:35:25,225 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.05 vs. limit=15.0 +2024-08-03 15:35:27,982 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.96 vs. limit=15.0 +2024-08-03 15:35:28,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=155701.33333333334, ans=0.2 +2024-08-03 15:35:31,333 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:35:34,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=155701.33333333334, ans=0.0 +2024-08-03 15:35:38,698 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.56 vs. limit=15.0 +2024-08-03 15:35:54,429 INFO [train.py:1114] (0/4) Epoch 12, batch 2450, loss[loss=0.242, simple_loss=0.3212, pruned_loss=0.08134, over 13372.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2867, pruned_loss=0.06096, over 2633714.50 frames. ], batch size: 37, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:35:55,283 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.374e+01 1.163e+02 1.350e+02 1.845e+02 2.920e+02, threshold=2.699e+02, percent-clipped=1.0 +2024-08-03 15:36:03,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=155848.0, ans=0.125 +2024-08-03 15:36:04,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=155848.0, ans=0.125 +2024-08-03 15:36:06,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=155848.0, ans=0.0 +2024-08-03 15:36:09,360 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.41 vs. limit=6.0 +2024-08-03 15:36:20,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=155884.66666666666, ans=0.2 +2024-08-03 15:36:41,782 INFO [train.py:1114] (0/4) Epoch 12, batch 2500, loss[loss=0.186, simple_loss=0.2776, pruned_loss=0.04716, over 13404.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2866, pruned_loss=0.06066, over 2637635.71 frames. ], batch size: 39, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:36:45,661 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.73 vs. limit=15.0 +2024-08-03 15:36:58,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=156031.33333333334, ans=0.025 +2024-08-03 15:37:14,622 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.45 vs. limit=22.5 +2024-08-03 15:37:27,571 INFO [train.py:1114] (0/4) Epoch 12, batch 2550, loss[loss=0.1681, simple_loss=0.2469, pruned_loss=0.04469, over 13550.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2864, pruned_loss=0.06047, over 2639250.76 frames. ], batch size: 31, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:37:28,348 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.903e+01 1.161e+02 1.332e+02 1.717e+02 3.575e+02, threshold=2.664e+02, percent-clipped=6.0 +2024-08-03 15:37:53,270 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.50 vs. limit=6.0 +2024-08-03 15:38:02,228 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.28 vs. limit=15.0 +2024-08-03 15:38:04,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=156324.66666666666, ans=0.0 +2024-08-03 15:38:09,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=156324.66666666666, ans=0.125 +2024-08-03 15:38:12,942 INFO [train.py:1114] (0/4) Epoch 12, batch 2600, loss[loss=0.2018, simple_loss=0.2777, pruned_loss=0.06292, over 13567.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2864, pruned_loss=0.0603, over 2637704.37 frames. ], batch size: 36, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:38:17,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=156361.33333333334, ans=0.125 +2024-08-03 15:38:19,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=156361.33333333334, ans=0.2 +2024-08-03 15:38:23,711 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.20 vs. limit=22.5 +2024-08-03 15:38:25,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=156398.0, ans=0.025 +2024-08-03 15:38:37,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=156434.66666666666, ans=0.0 +2024-08-03 15:38:38,574 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.00 vs. limit=15.0 +2024-08-03 15:38:49,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=156508.0, ans=0.125 +2024-08-03 15:38:56,389 INFO [train.py:1114] (0/4) Epoch 12, batch 2650, loss[loss=0.234, simple_loss=0.3163, pruned_loss=0.07587, over 13304.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2867, pruned_loss=0.06039, over 2639957.64 frames. ], batch size: 46, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:38:56,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=156544.66666666666, ans=0.1 +2024-08-03 15:38:57,266 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.952e+01 1.176e+02 1.444e+02 1.768e+02 3.309e+02, threshold=2.888e+02, percent-clipped=8.0 +2024-08-03 15:39:05,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=156581.33333333334, ans=0.125 +2024-08-03 15:39:08,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=156581.33333333334, ans=0.125 +2024-08-03 15:39:08,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=156581.33333333334, ans=0.2 +2024-08-03 15:39:16,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=156618.0, ans=0.025 +2024-08-03 15:39:31,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=156691.33333333334, ans=0.1 +2024-08-03 15:39:37,859 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.25 vs. limit=15.0 +2024-08-03 15:39:40,091 INFO [train.py:1114] (0/4) Epoch 12, batch 2700, loss[loss=0.1693, simple_loss=0.2588, pruned_loss=0.0399, over 13537.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2871, pruned_loss=0.06056, over 2636733.49 frames. ], batch size: 40, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:39:40,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=156728.0, ans=0.0 +2024-08-03 15:39:42,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=156728.0, ans=0.125 +2024-08-03 15:39:59,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=156801.33333333334, ans=0.0 +2024-08-03 15:40:00,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=156801.33333333334, ans=0.2 +2024-08-03 15:40:02,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=156801.33333333334, ans=0.125 +2024-08-03 15:40:11,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=156838.0, ans=0.125 +2024-08-03 15:40:23,045 INFO [train.py:1114] (0/4) Epoch 12, batch 2750, loss[loss=0.1915, simple_loss=0.2695, pruned_loss=0.05676, over 13330.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2858, pruned_loss=0.06019, over 2635034.13 frames. ], batch size: 34, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:40:23,804 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.506e+01 1.130e+02 1.309e+02 1.569e+02 2.980e+02, threshold=2.619e+02, percent-clipped=1.0 +2024-08-03 15:40:48,346 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.33 vs. limit=15.0 +2024-08-03 15:40:52,038 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.35 vs. limit=22.5 +2024-08-03 15:41:03,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=157058.0, ans=0.125 +2024-08-03 15:41:03,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=157058.0, ans=0.07 +2024-08-03 15:41:04,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=157058.0, ans=0.0 +2024-08-03 15:41:06,539 INFO [train.py:1114] (0/4) Epoch 12, batch 2800, loss[loss=0.276, simple_loss=0.3338, pruned_loss=0.109, over 9360.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2861, pruned_loss=0.06053, over 2626860.02 frames. ], batch size: 97, lr: 1.03e-02, grad_scale: 32.0 +2024-08-03 15:41:49,767 INFO [train.py:1114] (0/4) Epoch 12, batch 2850, loss[loss=0.1984, simple_loss=0.2767, pruned_loss=0.06006, over 13364.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2858, pruned_loss=0.06043, over 2620410.45 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:41:51,428 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.668e+01 1.162e+02 1.250e+02 1.511e+02 3.589e+02, threshold=2.501e+02, percent-clipped=1.0 +2024-08-03 15:41:55,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=157278.0, ans=0.025 +2024-08-03 15:42:09,646 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.73 vs. limit=12.0 +2024-08-03 15:42:24,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=157388.0, ans=0.125 +2024-08-03 15:42:35,681 INFO [train.py:1114] (0/4) Epoch 12, batch 2900, loss[loss=0.1895, simple_loss=0.2713, pruned_loss=0.05388, over 13348.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2868, pruned_loss=0.06037, over 2631055.35 frames. ], batch size: 36, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:42:37,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=157461.33333333334, ans=0.125 +2024-08-03 15:42:43,341 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.96 vs. limit=22.5 +2024-08-03 15:42:51,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=157498.0, ans=0.2 +2024-08-03 15:42:54,305 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:42:57,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=157534.66666666666, ans=0.125 +2024-08-03 15:43:01,554 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.82 vs. limit=15.0 +2024-08-03 15:43:04,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=157571.33333333334, ans=0.2 +2024-08-03 15:43:09,171 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.60 vs. limit=15.0 +2024-08-03 15:43:13,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=157608.0, ans=0.0 +2024-08-03 15:43:18,709 INFO [train.py:1114] (0/4) Epoch 12, batch 2950, loss[loss=0.1776, simple_loss=0.2659, pruned_loss=0.04462, over 13325.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2847, pruned_loss=0.05982, over 2628946.77 frames. ], batch size: 34, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:43:19,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=157644.66666666666, ans=0.125 +2024-08-03 15:43:20,462 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.484e+01 1.179e+02 1.423e+02 1.782e+02 2.994e+02, threshold=2.847e+02, percent-clipped=4.0 +2024-08-03 15:43:24,717 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.69 vs. limit=22.5 +2024-08-03 15:43:25,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=157644.66666666666, ans=0.125 +2024-08-03 15:43:26,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=157681.33333333334, ans=0.0 +2024-08-03 15:43:27,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=157681.33333333334, ans=0.0 +2024-08-03 15:43:31,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=157681.33333333334, ans=0.0 +2024-08-03 15:43:34,919 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.43 vs. limit=15.0 +2024-08-03 15:43:59,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=157791.33333333334, ans=10.0 +2024-08-03 15:44:01,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157828.0, ans=0.1 +2024-08-03 15:44:01,734 INFO [train.py:1114] (0/4) Epoch 12, batch 3000, loss[loss=0.175, simple_loss=0.2699, pruned_loss=0.04004, over 13541.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2851, pruned_loss=0.05975, over 2629254.49 frames. ], batch size: 37, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:44:01,735 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 15:44:07,478 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.4584, 2.3139, 2.1794, 2.1101], device='cuda:0') +2024-08-03 15:44:11,713 INFO [train.py:1146] (0/4) Epoch 12, validation: loss=0.178, simple_loss=0.2775, pruned_loss=0.03924, over 944034.00 frames. +2024-08-03 15:44:11,714 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 15:44:18,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=157828.0, ans=0.0 +2024-08-03 15:44:33,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=157901.33333333334, ans=0.0 +2024-08-03 15:44:35,004 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.05 vs. limit=22.5 +2024-08-03 15:44:35,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=157901.33333333334, ans=0.0 +2024-08-03 15:44:53,645 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.20 vs. limit=12.0 +2024-08-03 15:44:55,167 INFO [train.py:1114] (0/4) Epoch 12, batch 3050, loss[loss=0.1861, simple_loss=0.2697, pruned_loss=0.05129, over 13535.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2861, pruned_loss=0.06029, over 2626001.48 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:44:56,885 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.316e+01 1.167e+02 1.304e+02 1.686e+02 2.790e+02, threshold=2.608e+02, percent-clipped=0.0 +2024-08-03 15:44:59,204 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.05 vs. limit=12.0 +2024-08-03 15:45:22,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=158121.33333333334, ans=0.2 +2024-08-03 15:45:24,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=158121.33333333334, ans=0.2 +2024-08-03 15:45:39,560 INFO [train.py:1114] (0/4) Epoch 12, batch 3100, loss[loss=0.2124, simple_loss=0.3006, pruned_loss=0.06207, over 13337.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2855, pruned_loss=0.06002, over 2626694.34 frames. ], batch size: 46, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:45:48,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=158231.33333333334, ans=0.0 +2024-08-03 15:46:11,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=158304.66666666666, ans=0.1 +2024-08-03 15:46:22,124 INFO [train.py:1114] (0/4) Epoch 12, batch 3150, loss[loss=0.2195, simple_loss=0.3105, pruned_loss=0.06426, over 13052.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2853, pruned_loss=0.05973, over 2628167.34 frames. ], batch size: 48, lr: 1.02e-02, grad_scale: 16.0 +2024-08-03 15:46:23,783 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.679e+01 1.169e+02 1.445e+02 1.962e+02 3.331e+02, threshold=2.890e+02, percent-clipped=6.0 +2024-08-03 15:46:23,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=158378.0, ans=0.0 +2024-08-03 15:46:24,279 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.27 vs. limit=15.0 +2024-08-03 15:46:24,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=158378.0, ans=0.2 +2024-08-03 15:46:24,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=158378.0, ans=0.0 +2024-08-03 15:46:28,914 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.36 vs. limit=15.0 +2024-08-03 15:46:36,500 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.79 vs. limit=10.0 +2024-08-03 15:46:46,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.37 vs. limit=15.0 +2024-08-03 15:46:50,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=158488.0, ans=0.125 +2024-08-03 15:46:56,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=158524.66666666666, ans=0.025 +2024-08-03 15:46:56,950 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.94 vs. limit=15.0 +2024-08-03 15:46:59,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=158524.66666666666, ans=0.125 +2024-08-03 15:47:03,017 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.41 vs. limit=10.0 +2024-08-03 15:47:05,177 INFO [train.py:1114] (0/4) Epoch 12, batch 3200, loss[loss=0.2141, simple_loss=0.2887, pruned_loss=0.06971, over 13546.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2847, pruned_loss=0.05977, over 2634155.67 frames. ], batch size: 37, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:47:10,760 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=13.15 vs. limit=15.0 +2024-08-03 15:47:17,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=158598.0, ans=0.0 +2024-08-03 15:47:28,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=158634.66666666666, ans=0.025 +2024-08-03 15:47:42,142 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.87 vs. limit=15.0 +2024-08-03 15:47:48,572 INFO [train.py:1114] (0/4) Epoch 12, batch 3250, loss[loss=0.2169, simple_loss=0.2948, pruned_loss=0.06949, over 13383.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2856, pruned_loss=0.05994, over 2639312.83 frames. ], batch size: 38, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:47:48,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=158744.66666666666, ans=0.04949747468305833 +2024-08-03 15:47:50,191 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.188e+01 1.142e+02 1.340e+02 1.709e+02 3.212e+02, threshold=2.679e+02, percent-clipped=3.0 +2024-08-03 15:48:15,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=158854.66666666666, ans=0.0 +2024-08-03 15:48:16,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158854.66666666666, ans=0.1 +2024-08-03 15:48:28,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=158891.33333333334, ans=0.0 +2024-08-03 15:48:32,120 INFO [train.py:1114] (0/4) Epoch 12, batch 3300, loss[loss=0.2352, simple_loss=0.31, pruned_loss=0.0802, over 12814.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2848, pruned_loss=0.0598, over 2640462.18 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:48:38,570 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.94 vs. limit=15.0 +2024-08-03 15:48:43,828 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.00 vs. limit=15.0 +2024-08-03 15:49:00,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=159038.0, ans=0.125 +2024-08-03 15:49:15,292 INFO [train.py:1114] (0/4) Epoch 12, batch 3350, loss[loss=0.2135, simple_loss=0.2943, pruned_loss=0.06634, over 13004.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.286, pruned_loss=0.06058, over 2630221.65 frames. ], batch size: 48, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:49:15,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=159111.33333333334, ans=0.125 +2024-08-03 15:49:17,012 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.306e+01 1.184e+02 1.362e+02 1.748e+02 2.695e+02, threshold=2.725e+02, percent-clipped=2.0 +2024-08-03 15:49:18,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=159111.33333333334, ans=0.0 +2024-08-03 15:49:18,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=159111.33333333334, ans=0.125 +2024-08-03 15:49:23,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=159148.0, ans=0.2 +2024-08-03 15:49:33,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=159184.66666666666, ans=0.035 +2024-08-03 15:49:39,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=159184.66666666666, ans=0.125 +2024-08-03 15:49:59,029 INFO [train.py:1114] (0/4) Epoch 12, batch 3400, loss[loss=0.1782, simple_loss=0.252, pruned_loss=0.05219, over 13534.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2858, pruned_loss=0.06053, over 2625216.25 frames. ], batch size: 31, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:49:59,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=159294.66666666666, ans=0.125 +2024-08-03 15:50:01,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=159294.66666666666, ans=0.125 +2024-08-03 15:50:20,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=159368.0, ans=0.025 +2024-08-03 15:50:23,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=159368.0, ans=0.0 +2024-08-03 15:50:30,910 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.39 vs. limit=10.0 +2024-08-03 15:50:42,886 INFO [train.py:1114] (0/4) Epoch 12, batch 3450, loss[loss=0.1954, simple_loss=0.283, pruned_loss=0.05396, over 12950.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2858, pruned_loss=0.06029, over 2629329.13 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:50:43,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=159478.0, ans=0.0 +2024-08-03 15:50:44,510 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.325e+01 1.219e+02 1.408e+02 1.757e+02 3.423e+02, threshold=2.817e+02, percent-clipped=3.0 +2024-08-03 15:50:53,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=159514.66666666666, ans=0.125 +2024-08-03 15:51:05,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=159551.33333333334, ans=0.125 +2024-08-03 15:51:18,629 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.53 vs. limit=12.0 +2024-08-03 15:51:21,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=159624.66666666666, ans=0.125 +2024-08-03 15:51:24,775 INFO [train.py:1114] (0/4) Epoch 12, batch 3500, loss[loss=0.1985, simple_loss=0.2811, pruned_loss=0.05791, over 13539.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2846, pruned_loss=0.06003, over 2631492.09 frames. ], batch size: 34, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:51:46,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=159734.66666666666, ans=0.125 +2024-08-03 15:51:48,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=159734.66666666666, ans=0.025 +2024-08-03 15:51:59,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=159808.0, ans=0.125 +2024-08-03 15:52:01,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=159808.0, ans=0.125 +2024-08-03 15:52:05,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=159808.0, ans=0.04949747468305833 +2024-08-03 15:52:07,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=159844.66666666666, ans=0.035 +2024-08-03 15:52:07,859 INFO [train.py:1114] (0/4) Epoch 12, batch 3550, loss[loss=0.2212, simple_loss=0.3031, pruned_loss=0.06963, over 12339.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2881, pruned_loss=0.06192, over 2629701.76 frames. ], batch size: 58, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:52:09,500 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.339e+01 1.127e+02 1.244e+02 1.565e+02 2.847e+02, threshold=2.489e+02, percent-clipped=1.0 +2024-08-03 15:52:15,107 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.45 vs. limit=15.0 +2024-08-03 15:52:51,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=159991.33333333334, ans=0.125 +2024-08-03 15:52:53,376 INFO [train.py:1114] (0/4) Epoch 12, batch 3600, loss[loss=0.2808, simple_loss=0.3393, pruned_loss=0.1111, over 8670.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2934, pruned_loss=0.06675, over 2488624.95 frames. ], batch size: 96, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:52:59,113 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.17 vs. limit=10.0 +2024-08-03 15:53:07,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=160064.66666666666, ans=0.035 +2024-08-03 15:53:07,854 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.70 vs. limit=15.0 +2024-08-03 15:53:19,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=160138.0, ans=0.0 +2024-08-03 15:53:27,573 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-12.pt +2024-08-03 15:54:13,936 INFO [train.py:1114] (0/4) Epoch 13, batch 0, loss[loss=0.1798, simple_loss=0.263, pruned_loss=0.04827, over 13332.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.263, pruned_loss=0.04827, over 13332.00 frames. ], batch size: 33, lr: 9.79e-03, grad_scale: 32.0 +2024-08-03 15:54:13,937 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 15:54:23,996 INFO [train.py:1146] (0/4) Epoch 13, validation: loss=0.179, simple_loss=0.2806, pruned_loss=0.03875, over 944034.00 frames. +2024-08-03 15:54:23,997 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 15:54:25,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=160174.66666666666, ans=0.0 +2024-08-03 15:54:30,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=160174.66666666666, ans=0.015 +2024-08-03 15:54:37,807 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.239e+02 1.394e+02 1.533e+02 2.538e+02, threshold=2.789e+02, percent-clipped=1.0 +2024-08-03 15:54:48,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=160248.0, ans=0.07 +2024-08-03 15:55:11,509 INFO [train.py:1114] (0/4) Epoch 13, batch 50, loss[loss=0.1798, simple_loss=0.2621, pruned_loss=0.04875, over 13442.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2892, pruned_loss=0.06137, over 579058.73 frames. ], batch size: 32, lr: 9.79e-03, grad_scale: 32.0 +2024-08-03 15:55:11,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=160358.0, ans=0.125 +2024-08-03 15:55:12,171 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.50 vs. limit=15.0 +2024-08-03 15:55:20,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=160394.66666666666, ans=0.07 +2024-08-03 15:55:25,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=160394.66666666666, ans=0.125 +2024-08-03 15:55:27,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=160394.66666666666, ans=0.0 +2024-08-03 15:55:49,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=160504.66666666666, ans=0.0 +2024-08-03 15:55:58,987 INFO [train.py:1114] (0/4) Epoch 13, batch 100, loss[loss=0.2017, simple_loss=0.2814, pruned_loss=0.06099, over 13531.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.288, pruned_loss=0.05986, over 1026088.95 frames. ], batch size: 35, lr: 9.78e-03, grad_scale: 32.0 +2024-08-03 15:56:00,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=160541.33333333334, ans=0.125 +2024-08-03 15:56:02,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=160541.33333333334, ans=0.125 +2024-08-03 15:56:04,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=160541.33333333334, ans=0.125 +2024-08-03 15:56:10,680 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.344e+01 1.098e+02 1.241e+02 1.448e+02 3.539e+02, threshold=2.482e+02, percent-clipped=1.0 +2024-08-03 15:56:14,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=160578.0, ans=0.125 +2024-08-03 15:56:31,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2.whitening_limit, batch_count=160651.33333333334, ans=15.0 +2024-08-03 15:56:32,763 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:56:35,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=160688.0, ans=0.0 +2024-08-03 15:56:41,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=160688.0, ans=0.025 +2024-08-03 15:56:44,367 INFO [train.py:1114] (0/4) Epoch 13, batch 150, loss[loss=0.1613, simple_loss=0.2453, pruned_loss=0.03866, over 13418.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2852, pruned_loss=0.05842, over 1387469.15 frames. ], batch size: 32, lr: 9.78e-03, grad_scale: 32.0 +2024-08-03 15:56:44,911 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.32 vs. limit=15.0 +2024-08-03 15:56:48,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160724.66666666666, ans=0.1 +2024-08-03 15:56:59,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=160761.33333333334, ans=0.1 +2024-08-03 15:57:06,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=160798.0, ans=0.025 +2024-08-03 15:57:33,272 INFO [train.py:1114] (0/4) Epoch 13, batch 200, loss[loss=0.2001, simple_loss=0.2924, pruned_loss=0.05387, over 12332.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2843, pruned_loss=0.05814, over 1665762.72 frames. ], batch size: 58, lr: 9.77e-03, grad_scale: 16.0 +2024-08-03 15:57:36,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=160908.0, ans=0.2 +2024-08-03 15:57:37,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=160908.0, ans=0.125 +2024-08-03 15:57:45,691 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.389e+01 1.180e+02 1.439e+02 1.786e+02 2.514e+02, threshold=2.877e+02, percent-clipped=2.0 +2024-08-03 15:57:53,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160981.33333333334, ans=0.1 +2024-08-03 15:57:59,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=161018.0, ans=0.0 +2024-08-03 15:57:59,684 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.59 vs. limit=12.0 +2024-08-03 15:58:20,402 INFO [train.py:1114] (0/4) Epoch 13, batch 250, loss[loss=0.2238, simple_loss=0.3005, pruned_loss=0.07355, over 13266.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2856, pruned_loss=0.05919, over 1883787.72 frames. ], batch size: 46, lr: 9.77e-03, grad_scale: 16.0 +2024-08-03 15:58:22,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=161091.33333333334, ans=0.125 +2024-08-03 15:58:26,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161091.33333333334, ans=0.1 +2024-08-03 15:58:30,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=161128.0, ans=0.0 +2024-08-03 15:58:36,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=161128.0, ans=0.2 +2024-08-03 15:58:45,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=161164.66666666666, ans=0.125 +2024-08-03 15:58:47,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=161201.33333333334, ans=0.125 +2024-08-03 15:58:49,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=161201.33333333334, ans=0.0 +2024-08-03 15:58:54,323 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.39 vs. limit=15.0 +2024-08-03 15:59:05,420 INFO [train.py:1114] (0/4) Epoch 13, batch 300, loss[loss=0.2141, simple_loss=0.2997, pruned_loss=0.06425, over 13451.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2844, pruned_loss=0.05867, over 2051008.58 frames. ], batch size: 42, lr: 9.76e-03, grad_scale: 16.0 +2024-08-03 15:59:06,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=161274.66666666666, ans=0.125 +2024-08-03 15:59:20,166 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.241e+01 1.100e+02 1.297e+02 1.682e+02 2.744e+02, threshold=2.594e+02, percent-clipped=0.0 +2024-08-03 15:59:21,394 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-44000.pt +2024-08-03 15:59:37,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161348.0, ans=0.1 +2024-08-03 15:59:41,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=161384.66666666666, ans=0.125 +2024-08-03 15:59:47,642 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:59:49,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=161384.66666666666, ans=0.125 +2024-08-03 16:00:00,483 INFO [train.py:1114] (0/4) Epoch 13, batch 350, loss[loss=0.1852, simple_loss=0.262, pruned_loss=0.05422, over 13572.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2848, pruned_loss=0.05868, over 2182477.04 frames. ], batch size: 33, lr: 9.76e-03, grad_scale: 16.0 +2024-08-03 16:00:14,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=161494.66666666666, ans=0.125 +2024-08-03 16:00:15,470 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:00:31,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=161568.0, ans=0.2 +2024-08-03 16:00:42,664 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.40 vs. limit=15.0 +2024-08-03 16:00:45,722 INFO [train.py:1114] (0/4) Epoch 13, batch 400, loss[loss=0.2078, simple_loss=0.293, pruned_loss=0.06127, over 13377.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2836, pruned_loss=0.05832, over 2287045.61 frames. ], batch size: 37, lr: 9.75e-03, grad_scale: 32.0 +2024-08-03 16:00:46,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=161641.33333333334, ans=0.125 +2024-08-03 16:00:49,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=161641.33333333334, ans=0.2 +2024-08-03 16:00:56,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=161678.0, ans=0.125 +2024-08-03 16:01:00,502 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.412e+01 1.099e+02 1.258e+02 1.504e+02 2.448e+02, threshold=2.516e+02, percent-clipped=0.0 +2024-08-03 16:01:18,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=161751.33333333334, ans=0.125 +2024-08-03 16:01:18,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=161751.33333333334, ans=0.07 +2024-08-03 16:01:39,148 INFO [train.py:1114] (0/4) Epoch 13, batch 450, loss[loss=0.209, simple_loss=0.2947, pruned_loss=0.06168, over 13552.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2834, pruned_loss=0.05787, over 2360280.03 frames. ], batch size: 38, lr: 9.75e-03, grad_scale: 32.0 +2024-08-03 16:01:44,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=161824.66666666666, ans=0.2 +2024-08-03 16:02:24,164 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.37 vs. limit=15.0 +2024-08-03 16:02:26,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=161971.33333333334, ans=0.125 +2024-08-03 16:02:28,142 INFO [train.py:1114] (0/4) Epoch 13, batch 500, loss[loss=0.2346, simple_loss=0.3133, pruned_loss=0.07801, over 13410.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.283, pruned_loss=0.05769, over 2425240.54 frames. ], batch size: 43, lr: 9.74e-03, grad_scale: 16.0 +2024-08-03 16:02:31,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=162008.0, ans=0.125 +2024-08-03 16:02:41,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=162044.66666666666, ans=0.125 +2024-08-03 16:02:44,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=162044.66666666666, ans=0.5 +2024-08-03 16:02:44,990 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.845e+01 1.131e+02 1.351e+02 1.618e+02 2.590e+02, threshold=2.702e+02, percent-clipped=1.0 +2024-08-03 16:02:50,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=162081.33333333334, ans=0.125 +2024-08-03 16:02:56,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=162081.33333333334, ans=0.1 +2024-08-03 16:03:06,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=162118.0, ans=0.2 +2024-08-03 16:03:18,803 INFO [train.py:1114] (0/4) Epoch 13, batch 550, loss[loss=0.2271, simple_loss=0.3027, pruned_loss=0.0757, over 13001.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.283, pruned_loss=0.05802, over 2467636.64 frames. ], batch size: 48, lr: 9.74e-03, grad_scale: 16.0 +2024-08-03 16:03:18,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=162191.33333333334, ans=0.125 +2024-08-03 16:03:26,507 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.02 vs. limit=22.5 +2024-08-03 16:03:40,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=162264.66666666666, ans=0.0 +2024-08-03 16:03:41,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=162264.66666666666, ans=0.125 +2024-08-03 16:04:00,968 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.90 vs. limit=15.0 +2024-08-03 16:04:04,095 INFO [train.py:1114] (0/4) Epoch 13, batch 600, loss[loss=0.2144, simple_loss=0.2968, pruned_loss=0.066, over 13359.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2833, pruned_loss=0.05795, over 2507947.99 frames. ], batch size: 46, lr: 9.73e-03, grad_scale: 16.0 +2024-08-03 16:04:10,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=162374.66666666666, ans=0.125 +2024-08-03 16:04:17,368 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.493e+01 1.180e+02 1.372e+02 1.849e+02 3.441e+02, threshold=2.744e+02, percent-clipped=2.0 +2024-08-03 16:04:22,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162448.0, ans=0.1 +2024-08-03 16:04:25,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=162448.0, ans=0.2 +2024-08-03 16:04:33,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=162484.66666666666, ans=0.0 +2024-08-03 16:04:35,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=162484.66666666666, ans=0.015 +2024-08-03 16:04:48,902 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.26 vs. limit=15.0 +2024-08-03 16:04:51,057 INFO [train.py:1114] (0/4) Epoch 13, batch 650, loss[loss=0.181, simple_loss=0.2647, pruned_loss=0.04866, over 13549.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2827, pruned_loss=0.05784, over 2543272.52 frames. ], batch size: 37, lr: 9.72e-03, grad_scale: 16.0 +2024-08-03 16:05:04,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=162594.66666666666, ans=0.125 +2024-08-03 16:05:10,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=162631.33333333334, ans=0.05 +2024-08-03 16:05:11,288 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.14 vs. limit=22.5 +2024-08-03 16:05:32,162 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.24 vs. limit=22.5 +2024-08-03 16:05:39,777 INFO [train.py:1114] (0/4) Epoch 13, batch 700, loss[loss=0.1823, simple_loss=0.267, pruned_loss=0.04876, over 13554.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2834, pruned_loss=0.0581, over 2566035.50 frames. ], batch size: 35, lr: 9.72e-03, grad_scale: 16.0 +2024-08-03 16:05:48,668 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.26 vs. limit=6.0 +2024-08-03 16:05:53,435 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.882e+01 1.139e+02 1.377e+02 1.797e+02 3.206e+02, threshold=2.754e+02, percent-clipped=4.0 +2024-08-03 16:05:57,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=162778.0, ans=0.5 +2024-08-03 16:06:03,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=162814.66666666666, ans=0.0 +2024-08-03 16:06:10,123 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.55 vs. limit=15.0 +2024-08-03 16:06:11,878 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.89 vs. limit=22.5 +2024-08-03 16:06:28,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=162888.0, ans=0.125 +2024-08-03 16:06:31,549 INFO [train.py:1114] (0/4) Epoch 13, batch 750, loss[loss=0.2219, simple_loss=0.3137, pruned_loss=0.06505, over 13370.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2827, pruned_loss=0.05768, over 2583471.27 frames. ], batch size: 37, lr: 9.71e-03, grad_scale: 16.0 +2024-08-03 16:07:36,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=163034.66666666666, ans=0.125 +2024-08-03 16:07:39,522 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:07:43,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=163071.33333333334, ans=0.125 +2024-08-03 16:07:46,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=163071.33333333334, ans=0.125 +2024-08-03 16:07:49,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=163071.33333333334, ans=0.1 +2024-08-03 16:07:50,949 INFO [train.py:1114] (0/4) Epoch 13, batch 800, loss[loss=0.187, simple_loss=0.2654, pruned_loss=0.05429, over 13340.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2831, pruned_loss=0.05804, over 2597050.61 frames. ], batch size: 33, lr: 9.71e-03, grad_scale: 32.0 +2024-08-03 16:07:53,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=163108.0, ans=0.125 +2024-08-03 16:07:56,099 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.41 vs. limit=15.0 +2024-08-03 16:08:04,636 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.131e+01 1.126e+02 1.310e+02 1.667e+02 3.702e+02, threshold=2.620e+02, percent-clipped=3.0 +2024-08-03 16:08:13,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=163181.33333333334, ans=0.125 +2024-08-03 16:08:22,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=163218.0, ans=0.025 +2024-08-03 16:08:27,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=163254.66666666666, ans=0.125 +2024-08-03 16:08:28,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=163254.66666666666, ans=0.125 +2024-08-03 16:08:36,425 INFO [train.py:1114] (0/4) Epoch 13, batch 850, loss[loss=0.1991, simple_loss=0.289, pruned_loss=0.05461, over 13337.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2829, pruned_loss=0.058, over 2608869.16 frames. ], batch size: 40, lr: 9.70e-03, grad_scale: 32.0 +2024-08-03 16:08:41,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=163291.33333333334, ans=0.125 +2024-08-03 16:08:57,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=163364.66666666666, ans=0.125 +2024-08-03 16:09:15,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=163438.0, ans=0.0 +2024-08-03 16:09:25,359 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.94 vs. limit=15.0 +2024-08-03 16:09:25,542 INFO [train.py:1114] (0/4) Epoch 13, batch 900, loss[loss=0.1975, simple_loss=0.2761, pruned_loss=0.05945, over 13349.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2831, pruned_loss=0.0583, over 2611332.45 frames. ], batch size: 33, lr: 9.70e-03, grad_scale: 32.0 +2024-08-03 16:09:26,819 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.98 vs. limit=12.0 +2024-08-03 16:09:33,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.43 vs. limit=15.0 +2024-08-03 16:09:39,002 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.048e+01 1.164e+02 1.408e+02 1.726e+02 2.750e+02, threshold=2.816e+02, percent-clipped=1.0 +2024-08-03 16:09:41,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=163511.33333333334, ans=0.125 +2024-08-03 16:10:11,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=163621.33333333334, ans=0.025 +2024-08-03 16:10:12,918 INFO [train.py:1114] (0/4) Epoch 13, batch 950, loss[loss=0.1772, simple_loss=0.2633, pruned_loss=0.04557, over 13519.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2834, pruned_loss=0.05836, over 2612329.14 frames. ], batch size: 34, lr: 9.69e-03, grad_scale: 32.0 +2024-08-03 16:10:14,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=163658.0, ans=0.125 +2024-08-03 16:10:25,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163694.66666666666, ans=0.1 +2024-08-03 16:10:36,227 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.39 vs. limit=15.0 +2024-08-03 16:10:47,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=163768.0, ans=0.1 +2024-08-03 16:10:49,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163804.66666666666, ans=0.1 +2024-08-03 16:11:00,387 INFO [train.py:1114] (0/4) Epoch 13, batch 1000, loss[loss=0.1882, simple_loss=0.271, pruned_loss=0.05263, over 13365.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2844, pruned_loss=0.0592, over 2611932.80 frames. ], batch size: 35, lr: 9.69e-03, grad_scale: 32.0 +2024-08-03 16:11:05,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=163841.33333333334, ans=0.0 +2024-08-03 16:11:14,077 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.003e+01 1.149e+02 1.296e+02 1.618e+02 2.591e+02, threshold=2.593e+02, percent-clipped=0.0 +2024-08-03 16:11:35,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=163988.0, ans=0.125 +2024-08-03 16:11:44,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=163988.0, ans=0.0 +2024-08-03 16:11:45,729 INFO [train.py:1114] (0/4) Epoch 13, batch 1050, loss[loss=0.2001, simple_loss=0.2883, pruned_loss=0.05594, over 13569.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2832, pruned_loss=0.05834, over 2615836.62 frames. ], batch size: 39, lr: 9.68e-03, grad_scale: 32.0 +2024-08-03 16:11:53,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=164024.66666666666, ans=0.0 +2024-08-03 16:11:53,533 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.03 vs. limit=6.0 +2024-08-03 16:11:59,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=164061.33333333334, ans=0.2 +2024-08-03 16:12:17,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=164134.66666666666, ans=0.2 +2024-08-03 16:12:32,443 INFO [train.py:1114] (0/4) Epoch 13, batch 1100, loss[loss=0.1915, simple_loss=0.2768, pruned_loss=0.05307, over 13546.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2834, pruned_loss=0.05857, over 2620102.65 frames. ], batch size: 36, lr: 9.68e-03, grad_scale: 32.0 +2024-08-03 16:12:45,859 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.230e+01 1.162e+02 1.483e+02 1.783e+02 2.652e+02, threshold=2.966e+02, percent-clipped=1.0 +2024-08-03 16:12:49,385 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.11 vs. limit=12.0 +2024-08-03 16:12:58,270 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.19 vs. limit=22.5 +2024-08-03 16:13:15,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.83 vs. limit=15.0 +2024-08-03 16:13:16,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=164354.66666666666, ans=0.2 +2024-08-03 16:13:19,729 INFO [train.py:1114] (0/4) Epoch 13, batch 1150, loss[loss=0.1976, simple_loss=0.2799, pruned_loss=0.05763, over 13561.00 frames. ], tot_loss[loss=0.2, simple_loss=0.283, pruned_loss=0.05844, over 2618554.11 frames. ], batch size: 36, lr: 9.67e-03, grad_scale: 32.0 +2024-08-03 16:13:30,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=164428.0, ans=0.0 +2024-08-03 16:13:35,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164428.0, ans=0.1 +2024-08-03 16:13:37,178 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.20 vs. limit=15.0 +2024-08-03 16:13:55,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=164501.33333333334, ans=0.125 +2024-08-03 16:13:56,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=164501.33333333334, ans=0.025 +2024-08-03 16:14:06,402 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.73 vs. limit=15.0 +2024-08-03 16:14:06,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=164574.66666666666, ans=0.125 +2024-08-03 16:14:07,688 INFO [train.py:1114] (0/4) Epoch 13, batch 1200, loss[loss=0.2014, simple_loss=0.2936, pruned_loss=0.05466, over 13568.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2842, pruned_loss=0.05874, over 2615643.95 frames. ], batch size: 39, lr: 9.67e-03, grad_scale: 32.0 +2024-08-03 16:14:16,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=164611.33333333334, ans=0.125 +2024-08-03 16:14:21,157 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.721e+01 1.160e+02 1.448e+02 1.730e+02 2.788e+02, threshold=2.895e+02, percent-clipped=0.0 +2024-08-03 16:14:22,284 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:14:24,018 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:14:25,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=164648.0, ans=0.1 +2024-08-03 16:14:46,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164721.33333333334, ans=0.1 +2024-08-03 16:14:47,443 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.29 vs. limit=15.0 +2024-08-03 16:14:54,293 INFO [train.py:1114] (0/4) Epoch 13, batch 1250, loss[loss=0.2167, simple_loss=0.3042, pruned_loss=0.06464, over 13446.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2848, pruned_loss=0.05903, over 2627667.10 frames. ], batch size: 42, lr: 9.66e-03, grad_scale: 32.0 +2024-08-03 16:15:05,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=164794.66666666666, ans=0.025 +2024-08-03 16:15:07,709 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.19 vs. limit=15.0 +2024-08-03 16:15:13,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=164831.33333333334, ans=0.125 +2024-08-03 16:15:18,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=164831.33333333334, ans=0.05 +2024-08-03 16:15:39,461 INFO [train.py:1114] (0/4) Epoch 13, batch 1300, loss[loss=0.2214, simple_loss=0.2974, pruned_loss=0.07268, over 12950.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.284, pruned_loss=0.05854, over 2630582.28 frames. ], batch size: 52, lr: 9.66e-03, grad_scale: 32.0 +2024-08-03 16:15:40,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=164941.33333333334, ans=0.125 +2024-08-03 16:15:40,584 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:15:46,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164941.33333333334, ans=0.1 +2024-08-03 16:15:52,788 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.108e+01 1.112e+02 1.319e+02 1.683e+02 3.006e+02, threshold=2.638e+02, percent-clipped=1.0 +2024-08-03 16:16:07,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=165051.33333333334, ans=0.0 +2024-08-03 16:16:15,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=165051.33333333334, ans=0.2 +2024-08-03 16:16:19,738 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.77 vs. limit=15.0 +2024-08-03 16:16:21,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=165088.0, ans=0.125 +2024-08-03 16:16:27,974 INFO [train.py:1114] (0/4) Epoch 13, batch 1350, loss[loss=0.1984, simple_loss=0.2814, pruned_loss=0.05768, over 13544.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2836, pruned_loss=0.05847, over 2638024.36 frames. ], batch size: 37, lr: 9.65e-03, grad_scale: 32.0 +2024-08-03 16:16:29,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=165124.66666666666, ans=0.125 +2024-08-03 16:17:02,079 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:17:15,468 INFO [train.py:1114] (0/4) Epoch 13, batch 1400, loss[loss=0.1743, simple_loss=0.255, pruned_loss=0.04678, over 13275.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.283, pruned_loss=0.05769, over 2641931.71 frames. ], batch size: 31, lr: 9.65e-03, grad_scale: 16.0 +2024-08-03 16:17:20,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=165308.0, ans=0.025 +2024-08-03 16:17:24,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=165344.66666666666, ans=0.0 +2024-08-03 16:17:26,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=165344.66666666666, ans=0.125 +2024-08-03 16:17:29,726 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.085e+01 1.131e+02 1.241e+02 1.412e+02 2.386e+02, threshold=2.482e+02, percent-clipped=0.0 +2024-08-03 16:17:30,321 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.05 vs. limit=15.0 +2024-08-03 16:17:31,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=165344.66666666666, ans=0.95 +2024-08-03 16:17:32,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=165344.66666666666, ans=0.2 +2024-08-03 16:17:52,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=165454.66666666666, ans=0.0 +2024-08-03 16:18:01,136 INFO [train.py:1114] (0/4) Epoch 13, batch 1450, loss[loss=0.1989, simple_loss=0.2814, pruned_loss=0.05817, over 13414.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2831, pruned_loss=0.05789, over 2641221.26 frames. ], batch size: 43, lr: 9.64e-03, grad_scale: 16.0 +2024-08-03 16:18:17,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=165528.0, ans=0.95 +2024-08-03 16:18:28,861 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.42 vs. limit=6.0 +2024-08-03 16:18:29,782 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.52 vs. limit=22.5 +2024-08-03 16:18:43,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=165638.0, ans=0.125 +2024-08-03 16:18:47,894 INFO [train.py:1114] (0/4) Epoch 13, batch 1500, loss[loss=0.1845, simple_loss=0.273, pruned_loss=0.04805, over 13406.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2832, pruned_loss=0.05761, over 2641112.08 frames. ], batch size: 39, lr: 9.64e-03, grad_scale: 16.0 +2024-08-03 16:19:02,694 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.982e+01 1.158e+02 1.427e+02 1.728e+02 2.727e+02, threshold=2.854e+02, percent-clipped=3.0 +2024-08-03 16:19:03,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=165711.33333333334, ans=0.1 +2024-08-03 16:19:04,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=165711.33333333334, ans=0.025 +2024-08-03 16:19:12,458 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.57 vs. limit=6.0 +2024-08-03 16:19:13,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=165748.0, ans=0.125 +2024-08-03 16:19:19,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=165784.66666666666, ans=0.125 +2024-08-03 16:19:22,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=165784.66666666666, ans=0.1 +2024-08-03 16:19:35,402 INFO [train.py:1114] (0/4) Epoch 13, batch 1550, loss[loss=0.1943, simple_loss=0.2886, pruned_loss=0.04996, over 13407.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2836, pruned_loss=0.05819, over 2631124.86 frames. ], batch size: 41, lr: 9.63e-03, grad_scale: 16.0 +2024-08-03 16:20:19,508 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.92 vs. limit=10.0 +2024-08-03 16:20:22,622 INFO [train.py:1114] (0/4) Epoch 13, batch 1600, loss[loss=0.1994, simple_loss=0.2878, pruned_loss=0.05551, over 13578.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2844, pruned_loss=0.05872, over 2624181.32 frames. ], batch size: 39, lr: 9.63e-03, grad_scale: 32.0 +2024-08-03 16:20:43,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.75 vs. limit=22.5 +2024-08-03 16:20:59,202 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.495e+01 1.166e+02 1.333e+02 1.673e+02 3.385e+02, threshold=2.665e+02, percent-clipped=4.0 +2024-08-03 16:21:07,346 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.46 vs. limit=15.0 +2024-08-03 16:21:09,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166114.66666666666, ans=0.1 +2024-08-03 16:21:30,138 INFO [train.py:1114] (0/4) Epoch 13, batch 1650, loss[loss=0.2142, simple_loss=0.3111, pruned_loss=0.05862, over 13336.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2843, pruned_loss=0.05887, over 2620709.96 frames. ], batch size: 40, lr: 9.62e-03, grad_scale: 32.0 +2024-08-03 16:21:32,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=166224.66666666666, ans=0.0 +2024-08-03 16:21:33,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=166224.66666666666, ans=0.035 +2024-08-03 16:21:35,176 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.95 vs. limit=12.0 +2024-08-03 16:21:46,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166261.33333333334, ans=0.1 +2024-08-03 16:22:02,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=166334.66666666666, ans=0.0 +2024-08-03 16:22:17,147 INFO [train.py:1114] (0/4) Epoch 13, batch 1700, loss[loss=0.2029, simple_loss=0.2739, pruned_loss=0.06597, over 13262.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2842, pruned_loss=0.05886, over 2629977.70 frames. ], batch size: 31, lr: 9.61e-03, grad_scale: 16.0 +2024-08-03 16:22:18,395 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.92 vs. limit=10.0 +2024-08-03 16:22:25,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=166444.66666666666, ans=0.0 +2024-08-03 16:22:32,761 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.948e+01 1.164e+02 1.401e+02 1.757e+02 2.684e+02, threshold=2.802e+02, percent-clipped=1.0 +2024-08-03 16:22:39,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=166481.33333333334, ans=0.125 +2024-08-03 16:23:03,319 INFO [train.py:1114] (0/4) Epoch 13, batch 1750, loss[loss=0.1947, simple_loss=0.2628, pruned_loss=0.06323, over 13554.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2836, pruned_loss=0.05866, over 2633980.10 frames. ], batch size: 31, lr: 9.61e-03, grad_scale: 16.0 +2024-08-03 16:23:08,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=166591.33333333334, ans=0.09899494936611666 +2024-08-03 16:23:09,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=166591.33333333334, ans=0.125 +2024-08-03 16:23:10,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=166591.33333333334, ans=0.125 +2024-08-03 16:23:14,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=166628.0, ans=0.125 +2024-08-03 16:23:33,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=166701.33333333334, ans=0.0 +2024-08-03 16:23:34,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166701.33333333334, ans=0.1 +2024-08-03 16:23:36,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=166701.33333333334, ans=0.05 +2024-08-03 16:23:42,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=166738.0, ans=0.2 +2024-08-03 16:23:44,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=166738.0, ans=0.0 +2024-08-03 16:23:46,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=166738.0, ans=0.0 +2024-08-03 16:23:50,918 INFO [train.py:1114] (0/4) Epoch 13, batch 1800, loss[loss=0.1851, simple_loss=0.279, pruned_loss=0.04558, over 13557.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2835, pruned_loss=0.0582, over 2634773.63 frames. ], batch size: 38, lr: 9.60e-03, grad_scale: 16.0 +2024-08-03 16:23:53,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=166774.66666666666, ans=0.2 +2024-08-03 16:24:07,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=166811.33333333334, ans=0.025 +2024-08-03 16:24:08,446 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.884e+01 1.178e+02 1.346e+02 1.574e+02 2.406e+02, threshold=2.692e+02, percent-clipped=0.0 +2024-08-03 16:24:11,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=166848.0, ans=0.125 +2024-08-03 16:24:22,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=166884.66666666666, ans=0.125 +2024-08-03 16:24:35,472 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.11 vs. limit=6.0 +2024-08-03 16:24:39,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=166958.0, ans=0.125 +2024-08-03 16:24:40,322 INFO [train.py:1114] (0/4) Epoch 13, batch 1850, loss[loss=0.1939, simple_loss=0.2784, pruned_loss=0.0547, over 13396.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2832, pruned_loss=0.05807, over 2636858.64 frames. ], batch size: 39, lr: 9.60e-03, grad_scale: 16.0 +2024-08-03 16:24:40,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=166958.0, ans=0.0 +2024-08-03 16:25:15,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167068.0, ans=0.1 +2024-08-03 16:25:26,639 INFO [train.py:1114] (0/4) Epoch 13, batch 1900, loss[loss=0.2053, simple_loss=0.2953, pruned_loss=0.05767, over 13307.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2837, pruned_loss=0.05833, over 2640269.51 frames. ], batch size: 40, lr: 9.59e-03, grad_scale: 16.0 +2024-08-03 16:25:30,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=167141.33333333334, ans=0.0 +2024-08-03 16:25:32,592 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.84 vs. limit=22.5 +2024-08-03 16:25:44,174 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.192e+01 1.122e+02 1.325e+02 1.918e+02 3.257e+02, threshold=2.651e+02, percent-clipped=9.0 +2024-08-03 16:25:49,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=167214.66666666666, ans=0.05 +2024-08-03 16:25:53,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=167214.66666666666, ans=0.125 +2024-08-03 16:25:54,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=167214.66666666666, ans=0.125 +2024-08-03 16:26:14,633 INFO [train.py:1114] (0/4) Epoch 13, batch 1950, loss[loss=0.1974, simple_loss=0.2832, pruned_loss=0.05577, over 13575.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2853, pruned_loss=0.05899, over 2647196.93 frames. ], batch size: 36, lr: 9.59e-03, grad_scale: 16.0 +2024-08-03 16:26:21,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=167324.66666666666, ans=0.0 +2024-08-03 16:26:24,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=167361.33333333334, ans=0.0 +2024-08-03 16:26:33,343 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.98 vs. limit=15.0 +2024-08-03 16:26:33,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=167398.0, ans=0.125 +2024-08-03 16:26:34,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167398.0, ans=0.1 +2024-08-03 16:26:37,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=167398.0, ans=0.1 +2024-08-03 16:26:37,762 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.32 vs. limit=10.0 +2024-08-03 16:27:01,463 INFO [train.py:1114] (0/4) Epoch 13, batch 2000, loss[loss=0.1706, simple_loss=0.242, pruned_loss=0.04964, over 13534.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2852, pruned_loss=0.05892, over 2636781.45 frames. ], batch size: 31, lr: 9.58e-03, grad_scale: 32.0 +2024-08-03 16:27:17,131 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.733e+01 1.160e+02 1.429e+02 1.703e+02 2.821e+02, threshold=2.859e+02, percent-clipped=2.0 +2024-08-03 16:27:18,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=167544.66666666666, ans=0.125 +2024-08-03 16:27:22,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=167581.33333333334, ans=0.125 +2024-08-03 16:27:30,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=167618.0, ans=0.2 +2024-08-03 16:27:32,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=167618.0, ans=0.125 +2024-08-03 16:27:34,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=167618.0, ans=0.125 +2024-08-03 16:27:49,197 INFO [train.py:1114] (0/4) Epoch 13, batch 2050, loss[loss=0.1708, simple_loss=0.2447, pruned_loss=0.04841, over 13431.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2845, pruned_loss=0.05902, over 2633630.44 frames. ], batch size: 32, lr: 9.58e-03, grad_scale: 32.0 +2024-08-03 16:27:51,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=167691.33333333334, ans=0.07 +2024-08-03 16:28:10,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167764.66666666666, ans=0.1 +2024-08-03 16:28:24,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=167801.33333333334, ans=0.07 +2024-08-03 16:28:28,584 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.44 vs. limit=15.0 +2024-08-03 16:28:36,272 INFO [train.py:1114] (0/4) Epoch 13, batch 2100, loss[loss=0.1881, simple_loss=0.2755, pruned_loss=0.05034, over 13544.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2834, pruned_loss=0.05825, over 2639403.53 frames. ], batch size: 37, lr: 9.57e-03, grad_scale: 32.0 +2024-08-03 16:28:39,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=167874.66666666666, ans=0.0 +2024-08-03 16:28:41,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=167874.66666666666, ans=0.125 +2024-08-03 16:28:51,614 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.366e+01 1.102e+02 1.273e+02 1.593e+02 3.536e+02, threshold=2.546e+02, percent-clipped=4.0 +2024-08-03 16:28:52,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=167911.33333333334, ans=0.125 +2024-08-03 16:29:03,002 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.15 vs. limit=22.5 +2024-08-03 16:29:17,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=168021.33333333334, ans=0.125 +2024-08-03 16:29:21,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=168021.33333333334, ans=0.2 +2024-08-03 16:29:23,277 INFO [train.py:1114] (0/4) Epoch 13, batch 2150, loss[loss=0.1789, simple_loss=0.2635, pruned_loss=0.04717, over 13570.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2829, pruned_loss=0.05829, over 2647340.61 frames. ], batch size: 36, lr: 9.57e-03, grad_scale: 32.0 +2024-08-03 16:29:28,375 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.05 vs. limit=15.0 +2024-08-03 16:29:30,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=168058.0, ans=0.125 +2024-08-03 16:29:32,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168094.66666666666, ans=0.1 +2024-08-03 16:29:44,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=168131.33333333334, ans=0.125 +2024-08-03 16:30:07,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=168241.33333333334, ans=0.125 +2024-08-03 16:30:08,462 INFO [train.py:1114] (0/4) Epoch 13, batch 2200, loss[loss=0.2046, simple_loss=0.2972, pruned_loss=0.056, over 13405.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2823, pruned_loss=0.05782, over 2645400.24 frames. ], batch size: 39, lr: 9.56e-03, grad_scale: 32.0 +2024-08-03 16:30:09,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=168241.33333333334, ans=0.04949747468305833 +2024-08-03 16:30:18,307 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.20 vs. limit=15.0 +2024-08-03 16:30:22,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=168278.0, ans=0.1 +2024-08-03 16:30:23,850 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.382e+01 1.307e+02 1.724e+02 2.157e+02 3.326e+02, threshold=3.447e+02, percent-clipped=16.0 +2024-08-03 16:30:45,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=168388.0, ans=0.125 +2024-08-03 16:30:50,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=168388.0, ans=0.95 +2024-08-03 16:30:55,470 INFO [train.py:1114] (0/4) Epoch 13, batch 2250, loss[loss=0.1655, simple_loss=0.2561, pruned_loss=0.03741, over 13362.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.282, pruned_loss=0.05781, over 2642544.15 frames. ], batch size: 37, lr: 9.56e-03, grad_scale: 16.0 +2024-08-03 16:31:00,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=168424.66666666666, ans=0.125 +2024-08-03 16:31:16,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=168498.0, ans=0.125 +2024-08-03 16:31:17,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=168498.0, ans=0.125 +2024-08-03 16:31:32,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168571.33333333334, ans=0.1 +2024-08-03 16:31:44,498 INFO [train.py:1114] (0/4) Epoch 13, batch 2300, loss[loss=0.1668, simple_loss=0.2474, pruned_loss=0.04313, over 13557.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2815, pruned_loss=0.05787, over 2638478.43 frames. ], batch size: 33, lr: 9.55e-03, grad_scale: 16.0 +2024-08-03 16:31:46,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=168608.0, ans=0.0 +2024-08-03 16:32:01,031 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.657e+01 1.154e+02 1.335e+02 1.728e+02 3.672e+02, threshold=2.670e+02, percent-clipped=1.0 +2024-08-03 16:32:15,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=168718.0, ans=0.2 +2024-08-03 16:32:29,771 INFO [train.py:1114] (0/4) Epoch 13, batch 2350, loss[loss=0.2019, simple_loss=0.2916, pruned_loss=0.05607, over 13539.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2821, pruned_loss=0.05801, over 2641251.14 frames. ], batch size: 38, lr: 9.55e-03, grad_scale: 16.0 +2024-08-03 16:32:29,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=168791.33333333334, ans=0.0 +2024-08-03 16:32:31,188 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.68 vs. limit=22.5 +2024-08-03 16:32:48,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=168828.0, ans=0.125 +2024-08-03 16:32:53,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=168864.66666666666, ans=0.125 +2024-08-03 16:32:54,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=168864.66666666666, ans=0.125 +2024-08-03 16:33:00,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=168901.33333333334, ans=0.125 +2024-08-03 16:33:05,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=168901.33333333334, ans=0.125 +2024-08-03 16:33:15,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=168938.0, ans=0.0 +2024-08-03 16:33:17,374 INFO [train.py:1114] (0/4) Epoch 13, batch 2400, loss[loss=0.187, simple_loss=0.272, pruned_loss=0.051, over 13532.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2825, pruned_loss=0.05816, over 2642965.20 frames. ], batch size: 35, lr: 9.54e-03, grad_scale: 32.0 +2024-08-03 16:33:19,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=168974.66666666666, ans=0.125 +2024-08-03 16:33:33,790 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.921e+01 1.143e+02 1.305e+02 1.687e+02 2.768e+02, threshold=2.610e+02, percent-clipped=2.0 +2024-08-03 16:33:40,467 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=2.686e-03 +2024-08-03 16:33:54,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=169121.33333333334, ans=0.0 +2024-08-03 16:33:55,738 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:33:57,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=169121.33333333334, ans=0.0 +2024-08-03 16:34:04,454 INFO [train.py:1114] (0/4) Epoch 13, batch 2450, loss[loss=0.1992, simple_loss=0.2934, pruned_loss=0.05249, over 13344.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2837, pruned_loss=0.05893, over 2632481.49 frames. ], batch size: 37, lr: 9.54e-03, grad_scale: 32.0 +2024-08-03 16:34:10,331 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.15 vs. limit=22.5 +2024-08-03 16:34:24,194 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.78 vs. limit=15.0 +2024-08-03 16:34:24,896 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.90 vs. limit=10.0 +2024-08-03 16:34:36,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=169268.0, ans=0.0 +2024-08-03 16:34:36,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=169268.0, ans=0.1 +2024-08-03 16:34:38,394 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.82 vs. limit=15.0 +2024-08-03 16:34:51,538 INFO [train.py:1114] (0/4) Epoch 13, batch 2500, loss[loss=0.1933, simple_loss=0.2913, pruned_loss=0.0477, over 13399.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2838, pruned_loss=0.0588, over 2636771.60 frames. ], batch size: 39, lr: 9.53e-03, grad_scale: 32.0 +2024-08-03 16:34:57,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=169341.33333333334, ans=0.125 +2024-08-03 16:34:58,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=169341.33333333334, ans=0.125 +2024-08-03 16:35:08,283 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.080e+01 1.139e+02 1.387e+02 1.623e+02 2.338e+02, threshold=2.774e+02, percent-clipped=0.0 +2024-08-03 16:35:21,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=169451.33333333334, ans=0.2 +2024-08-03 16:35:23,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=169451.33333333334, ans=0.0 +2024-08-03 16:35:30,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=169488.0, ans=0.125 +2024-08-03 16:35:35,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=169524.66666666666, ans=0.125 +2024-08-03 16:35:38,356 INFO [train.py:1114] (0/4) Epoch 13, batch 2550, loss[loss=0.1991, simple_loss=0.2718, pruned_loss=0.06317, over 13519.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2841, pruned_loss=0.05879, over 2637989.54 frames. ], batch size: 31, lr: 9.53e-03, grad_scale: 16.0 +2024-08-03 16:35:56,029 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.56 vs. limit=15.0 +2024-08-03 16:35:58,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=169598.0, ans=0.0 +2024-08-03 16:36:05,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=169634.66666666666, ans=0.2 +2024-08-03 16:36:21,559 INFO [train.py:1114] (0/4) Epoch 13, batch 2600, loss[loss=0.1941, simple_loss=0.2796, pruned_loss=0.05433, over 13555.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2837, pruned_loss=0.05827, over 2637062.48 frames. ], batch size: 36, lr: 9.52e-03, grad_scale: 16.0 +2024-08-03 16:36:21,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=169708.0, ans=0.2 +2024-08-03 16:36:34,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=169744.66666666666, ans=0.1 +2024-08-03 16:36:35,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=169744.66666666666, ans=0.0 +2024-08-03 16:36:39,612 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.885e+01 1.136e+02 1.295e+02 1.531e+02 3.554e+02, threshold=2.589e+02, percent-clipped=4.0 +2024-08-03 16:36:48,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=169818.0, ans=0.125 +2024-08-03 16:36:49,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=169818.0, ans=0.125 +2024-08-03 16:36:57,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=169854.66666666666, ans=0.2 +2024-08-03 16:37:05,504 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.63 vs. limit=15.0 +2024-08-03 16:37:06,856 INFO [train.py:1114] (0/4) Epoch 13, batch 2650, loss[loss=0.1974, simple_loss=0.2787, pruned_loss=0.05805, over 13299.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2844, pruned_loss=0.059, over 2640171.13 frames. ], batch size: 46, lr: 9.52e-03, grad_scale: 16.0 +2024-08-03 16:37:34,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=170001.33333333334, ans=0.125 +2024-08-03 16:37:37,191 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.57 vs. limit=10.0 +2024-08-03 16:37:38,784 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.89 vs. limit=15.0 +2024-08-03 16:37:40,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=170001.33333333334, ans=0.035 +2024-08-03 16:37:41,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=170038.0, ans=0.125 +2024-08-03 16:37:41,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=170038.0, ans=0.2 +2024-08-03 16:37:50,568 INFO [train.py:1114] (0/4) Epoch 13, batch 2700, loss[loss=0.2081, simple_loss=0.3016, pruned_loss=0.0573, over 13549.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2841, pruned_loss=0.05853, over 2637830.34 frames. ], batch size: 40, lr: 9.51e-03, grad_scale: 16.0 +2024-08-03 16:38:02,044 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:38:07,173 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.611e+01 1.174e+02 1.343e+02 1.652e+02 2.925e+02, threshold=2.686e+02, percent-clipped=2.0 +2024-08-03 16:38:12,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170148.0, ans=0.1 +2024-08-03 16:38:20,982 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.42 vs. limit=22.5 +2024-08-03 16:38:21,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=170184.66666666666, ans=0.0 +2024-08-03 16:38:26,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=170221.33333333334, ans=0.035 +2024-08-03 16:38:27,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170221.33333333334, ans=0.1 +2024-08-03 16:38:36,366 INFO [train.py:1114] (0/4) Epoch 13, batch 2750, loss[loss=0.1756, simple_loss=0.2616, pruned_loss=0.04476, over 13341.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2832, pruned_loss=0.05845, over 2636037.19 frames. ], batch size: 34, lr: 9.51e-03, grad_scale: 16.0 +2024-08-03 16:38:38,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=170258.0, ans=0.125 +2024-08-03 16:38:45,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=170294.66666666666, ans=0.025 +2024-08-03 16:38:46,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=170294.66666666666, ans=0.125 +2024-08-03 16:38:57,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=170331.33333333334, ans=0.125 +2024-08-03 16:38:59,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=170331.33333333334, ans=0.125 +2024-08-03 16:39:00,296 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.98 vs. limit=15.0 +2024-08-03 16:39:12,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170404.66666666666, ans=0.1 +2024-08-03 16:39:20,240 INFO [train.py:1114] (0/4) Epoch 13, batch 2800, loss[loss=0.2604, simple_loss=0.3253, pruned_loss=0.09774, over 9467.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2831, pruned_loss=0.05853, over 2627074.02 frames. ], batch size: 96, lr: 9.50e-03, grad_scale: 32.0 +2024-08-03 16:39:36,325 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.827e+01 1.148e+02 1.326e+02 1.634e+02 2.406e+02, threshold=2.653e+02, percent-clipped=0.0 +2024-08-03 16:39:42,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=170514.66666666666, ans=0.125 +2024-08-03 16:39:48,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=170551.33333333334, ans=0.025 +2024-08-03 16:39:57,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170588.0, ans=0.1 +2024-08-03 16:40:03,108 INFO [train.py:1114] (0/4) Epoch 13, batch 2850, loss[loss=0.1823, simple_loss=0.2668, pruned_loss=0.04889, over 13375.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2838, pruned_loss=0.059, over 2620835.49 frames. ], batch size: 35, lr: 9.50e-03, grad_scale: 16.0 +2024-08-03 16:40:19,249 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.56 vs. limit=15.0 +2024-08-03 16:40:30,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=170734.66666666666, ans=0.2 +2024-08-03 16:40:46,454 INFO [train.py:1114] (0/4) Epoch 13, batch 2900, loss[loss=0.1778, simple_loss=0.2624, pruned_loss=0.04661, over 13368.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2838, pruned_loss=0.05829, over 2631884.23 frames. ], batch size: 36, lr: 9.49e-03, grad_scale: 16.0 +2024-08-03 16:40:50,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=170808.0, ans=10.0 +2024-08-03 16:40:58,396 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.18 vs. limit=15.0 +2024-08-03 16:41:03,994 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.894e+01 1.100e+02 1.263e+02 1.445e+02 2.759e+02, threshold=2.526e+02, percent-clipped=1.0 +2024-08-03 16:41:25,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=170954.66666666666, ans=0.125 +2024-08-03 16:41:28,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=170954.66666666666, ans=0.125 +2024-08-03 16:41:30,961 INFO [train.py:1114] (0/4) Epoch 13, batch 2950, loss[loss=0.179, simple_loss=0.2633, pruned_loss=0.04736, over 13326.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2831, pruned_loss=0.0584, over 2628909.51 frames. ], batch size: 34, lr: 9.49e-03, grad_scale: 16.0 +2024-08-03 16:41:40,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=171028.0, ans=0.125 +2024-08-03 16:41:41,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=171028.0, ans=0.0 +2024-08-03 16:41:42,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=171028.0, ans=0.05 +2024-08-03 16:41:43,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=171028.0, ans=0.125 +2024-08-03 16:41:49,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=171064.66666666666, ans=0.0 +2024-08-03 16:42:00,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=171101.33333333334, ans=0.125 +2024-08-03 16:42:05,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171138.0, ans=0.1 +2024-08-03 16:42:13,167 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.62 vs. limit=10.0 +2024-08-03 16:42:14,340 INFO [train.py:1114] (0/4) Epoch 13, batch 3000, loss[loss=0.2122, simple_loss=0.301, pruned_loss=0.06172, over 13550.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2829, pruned_loss=0.05822, over 2629177.96 frames. ], batch size: 37, lr: 9.48e-03, grad_scale: 16.0 +2024-08-03 16:42:14,341 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 16:42:28,564 INFO [train.py:1146] (0/4) Epoch 13, validation: loss=0.1746, simple_loss=0.2745, pruned_loss=0.03731, over 944034.00 frames. +2024-08-03 16:42:28,564 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 16:42:30,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171174.66666666666, ans=0.1 +2024-08-03 16:42:32,587 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.20 vs. limit=15.0 +2024-08-03 16:42:34,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=171174.66666666666, ans=0.125 +2024-08-03 16:42:34,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=171174.66666666666, ans=0.0 +2024-08-03 16:42:35,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=171174.66666666666, ans=0.0 +2024-08-03 16:42:41,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=171211.33333333334, ans=0.05 +2024-08-03 16:42:42,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=171211.33333333334, ans=0.0 +2024-08-03 16:42:43,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=171211.33333333334, ans=0.07 +2024-08-03 16:42:45,838 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.103e+01 1.098e+02 1.258e+02 1.464e+02 2.884e+02, threshold=2.515e+02, percent-clipped=2.0 +2024-08-03 16:42:51,760 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.15 vs. limit=12.0 +2024-08-03 16:42:59,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=171284.66666666666, ans=0.125 +2024-08-03 16:43:12,962 INFO [train.py:1114] (0/4) Epoch 13, batch 3050, loss[loss=0.1918, simple_loss=0.2759, pruned_loss=0.05388, over 13539.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2836, pruned_loss=0.05844, over 2627145.09 frames. ], batch size: 35, lr: 9.48e-03, grad_scale: 16.0 +2024-08-03 16:43:16,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=171358.0, ans=0.0 +2024-08-03 16:43:18,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=171358.0, ans=0.125 +2024-08-03 16:43:22,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=171394.66666666666, ans=0.0 +2024-08-03 16:43:28,571 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.80 vs. limit=22.5 +2024-08-03 16:43:40,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=171468.0, ans=0.0 +2024-08-03 16:43:42,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=171468.0, ans=0.0 +2024-08-03 16:43:42,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=171468.0, ans=0.09899494936611666 +2024-08-03 16:43:52,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=171504.66666666666, ans=0.0 +2024-08-03 16:43:57,529 INFO [train.py:1114] (0/4) Epoch 13, batch 3100, loss[loss=0.2176, simple_loss=0.3019, pruned_loss=0.06667, over 13315.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.283, pruned_loss=0.0581, over 2627622.10 frames. ], batch size: 46, lr: 9.47e-03, grad_scale: 16.0 +2024-08-03 16:44:14,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=171614.66666666666, ans=0.0 +2024-08-03 16:44:14,833 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.222e+01 1.118e+02 1.244e+02 1.594e+02 3.299e+02, threshold=2.487e+02, percent-clipped=5.0 +2024-08-03 16:44:25,621 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.52 vs. limit=15.0 +2024-08-03 16:44:40,338 INFO [train.py:1114] (0/4) Epoch 13, batch 3150, loss[loss=0.2383, simple_loss=0.3144, pruned_loss=0.08112, over 13305.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2827, pruned_loss=0.05789, over 2629179.42 frames. ], batch size: 49, lr: 9.47e-03, grad_scale: 16.0 +2024-08-03 16:45:14,180 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.83 vs. limit=15.0 +2024-08-03 16:45:14,370 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.76 vs. limit=15.0 +2024-08-03 16:45:24,109 INFO [train.py:1114] (0/4) Epoch 13, batch 3200, loss[loss=0.1967, simple_loss=0.2834, pruned_loss=0.05501, over 13545.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2816, pruned_loss=0.05705, over 2635245.15 frames. ], batch size: 37, lr: 9.46e-03, grad_scale: 32.0 +2024-08-03 16:45:27,038 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.82 vs. limit=12.0 +2024-08-03 16:45:27,941 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.73 vs. limit=22.5 +2024-08-03 16:45:41,822 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.110e+01 1.144e+02 1.329e+02 1.843e+02 2.975e+02, threshold=2.659e+02, percent-clipped=4.0 +2024-08-03 16:45:52,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=172018.0, ans=0.0 +2024-08-03 16:45:59,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=172054.66666666666, ans=0.125 +2024-08-03 16:46:03,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=172054.66666666666, ans=0.125 +2024-08-03 16:46:05,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=172091.33333333334, ans=0.125 +2024-08-03 16:46:06,405 INFO [train.py:1114] (0/4) Epoch 13, batch 3250, loss[loss=0.2158, simple_loss=0.3012, pruned_loss=0.06525, over 13393.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2819, pruned_loss=0.05681, over 2639944.45 frames. ], batch size: 38, lr: 9.46e-03, grad_scale: 16.0 +2024-08-03 16:46:12,285 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.58 vs. limit=15.0 +2024-08-03 16:46:20,573 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.76 vs. limit=10.0 +2024-08-03 16:46:27,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=172164.66666666666, ans=0.125 +2024-08-03 16:46:31,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=172201.33333333334, ans=0.0 +2024-08-03 16:46:40,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=172238.0, ans=0.125 +2024-08-03 16:46:50,200 INFO [train.py:1114] (0/4) Epoch 13, batch 3300, loss[loss=0.2113, simple_loss=0.2944, pruned_loss=0.06406, over 12868.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2808, pruned_loss=0.05646, over 2640206.71 frames. ], batch size: 52, lr: 9.45e-03, grad_scale: 16.0 +2024-08-03 16:47:04,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=172311.33333333334, ans=0.125 +2024-08-03 16:47:08,241 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.464e+01 1.115e+02 1.316e+02 1.603e+02 3.409e+02, threshold=2.632e+02, percent-clipped=2.0 +2024-08-03 16:47:21,923 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:47:22,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=172384.66666666666, ans=0.09899494936611666 +2024-08-03 16:47:32,868 INFO [train.py:1114] (0/4) Epoch 13, batch 3350, loss[loss=0.2346, simple_loss=0.3141, pruned_loss=0.07762, over 12924.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2823, pruned_loss=0.05739, over 2630012.03 frames. ], batch size: 48, lr: 9.45e-03, grad_scale: 16.0 +2024-08-03 16:47:41,177 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.65 vs. limit=15.0 +2024-08-03 16:47:42,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=172494.66666666666, ans=0.0 +2024-08-03 16:47:46,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=172494.66666666666, ans=0.125 +2024-08-03 16:47:47,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=172494.66666666666, ans=0.2 +2024-08-03 16:48:03,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=172568.0, ans=0.0 +2024-08-03 16:48:08,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=172604.66666666666, ans=0.125 +2024-08-03 16:48:15,662 INFO [train.py:1114] (0/4) Epoch 13, batch 3400, loss[loss=0.1572, simple_loss=0.2378, pruned_loss=0.03835, over 13569.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2822, pruned_loss=0.05758, over 2624984.64 frames. ], batch size: 31, lr: 9.44e-03, grad_scale: 8.0 +2024-08-03 16:48:16,945 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=15.0 +2024-08-03 16:48:20,279 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.27 vs. limit=15.0 +2024-08-03 16:48:34,569 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.232e+01 1.137e+02 1.264e+02 1.560e+02 2.546e+02, threshold=2.528e+02, percent-clipped=0.0 +2024-08-03 16:48:43,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=172751.33333333334, ans=0.125 +2024-08-03 16:48:46,754 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:48:47,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=172751.33333333334, ans=0.025 +2024-08-03 16:48:51,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=172788.0, ans=0.125 +2024-08-03 16:48:53,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=172788.0, ans=0.0 +2024-08-03 16:48:58,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=172788.0, ans=0.2 +2024-08-03 16:48:59,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=172824.66666666666, ans=0.125 +2024-08-03 16:49:00,075 INFO [train.py:1114] (0/4) Epoch 13, batch 3450, loss[loss=0.2287, simple_loss=0.308, pruned_loss=0.07465, over 12902.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2831, pruned_loss=0.05806, over 2628876.33 frames. ], batch size: 52, lr: 9.44e-03, grad_scale: 8.0 +2024-08-03 16:49:24,443 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.28 vs. limit=15.0 +2024-08-03 16:49:26,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172934.66666666666, ans=0.1 +2024-08-03 16:49:36,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=172971.33333333334, ans=0.125 +2024-08-03 16:49:37,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=172971.33333333334, ans=0.2 +2024-08-03 16:49:43,780 INFO [train.py:1114] (0/4) Epoch 13, batch 3500, loss[loss=0.1627, simple_loss=0.2481, pruned_loss=0.03864, over 13544.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2822, pruned_loss=0.05785, over 2630900.71 frames. ], batch size: 34, lr: 9.43e-03, grad_scale: 8.0 +2024-08-03 16:49:53,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=173044.66666666666, ans=0.0 +2024-08-03 16:50:03,120 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.363e+01 1.139e+02 1.315e+02 1.608e+02 2.660e+02, threshold=2.630e+02, percent-clipped=2.0 +2024-08-03 16:50:21,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=173154.66666666666, ans=0.125 +2024-08-03 16:50:27,045 INFO [train.py:1114] (0/4) Epoch 13, batch 3550, loss[loss=0.1911, simple_loss=0.2787, pruned_loss=0.05173, over 12488.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.285, pruned_loss=0.05909, over 2629479.94 frames. ], batch size: 58, lr: 9.43e-03, grad_scale: 8.0 +2024-08-03 16:50:51,033 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.04 vs. limit=15.0 +2024-08-03 16:51:04,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=173338.0, ans=0.125 +2024-08-03 16:51:11,545 INFO [train.py:1114] (0/4) Epoch 13, batch 3600, loss[loss=0.2417, simple_loss=0.3078, pruned_loss=0.08774, over 9308.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2895, pruned_loss=0.06316, over 2486371.05 frames. ], batch size: 96, lr: 9.42e-03, grad_scale: 16.0 +2024-08-03 16:51:14,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=173374.66666666666, ans=0.125 +2024-08-03 16:51:20,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=173411.33333333334, ans=0.125 +2024-08-03 16:51:21,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=173411.33333333334, ans=0.0 +2024-08-03 16:51:23,888 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.23 vs. limit=15.0 +2024-08-03 16:51:26,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=173411.33333333334, ans=0.0 +2024-08-03 16:51:31,033 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.031e+02 1.213e+02 1.304e+02 1.372e+02 1.765e+02, threshold=2.609e+02, percent-clipped=0.0 +2024-08-03 16:51:39,929 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.38 vs. limit=22.5 +2024-08-03 16:51:41,633 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.17 vs. limit=6.0 +2024-08-03 16:51:45,853 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-13.pt +2024-08-03 16:52:47,633 INFO [train.py:1114] (0/4) Epoch 14, batch 0, loss[loss=0.1708, simple_loss=0.2617, pruned_loss=0.03998, over 13344.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2617, pruned_loss=0.03998, over 13344.00 frames. ], batch size: 33, lr: 9.08e-03, grad_scale: 32.0 +2024-08-03 16:52:47,634 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 16:52:54,109 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.6199, 2.9236, 2.4960, 2.1959], device='cuda:0') +2024-08-03 16:53:02,074 INFO [train.py:1146] (0/4) Epoch 14, validation: loss=0.1773, simple_loss=0.2784, pruned_loss=0.03813, over 944034.00 frames. +2024-08-03 16:53:02,075 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 16:53:04,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=173521.33333333334, ans=0.125 +2024-08-03 16:53:27,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173594.66666666666, ans=0.1 +2024-08-03 16:53:45,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=173668.0, ans=0.2 +2024-08-03 16:53:49,558 INFO [train.py:1114] (0/4) Epoch 14, batch 50, loss[loss=0.1777, simple_loss=0.2654, pruned_loss=0.04494, over 13428.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2839, pruned_loss=0.05748, over 577328.85 frames. ], batch size: 32, lr: 9.07e-03, grad_scale: 32.0 +2024-08-03 16:53:58,666 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:54:03,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=173741.33333333334, ans=0.0 +2024-08-03 16:54:22,282 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.70 vs. limit=10.0 +2024-08-03 16:54:27,228 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.315e+01 1.175e+02 1.367e+02 1.781e+02 2.550e+02, threshold=2.735e+02, percent-clipped=0.0 +2024-08-03 16:54:30,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=173814.66666666666, ans=0.125 +2024-08-03 16:54:33,932 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.36 vs. limit=15.0 +2024-08-03 16:54:35,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173851.33333333334, ans=0.1 +2024-08-03 16:54:42,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=173851.33333333334, ans=0.125 +2024-08-03 16:54:43,512 INFO [train.py:1114] (0/4) Epoch 14, batch 100, loss[loss=0.1831, simple_loss=0.2729, pruned_loss=0.04665, over 13531.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.285, pruned_loss=0.05779, over 1025753.81 frames. ], batch size: 35, lr: 9.07e-03, grad_scale: 32.0 +2024-08-03 16:54:57,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=173924.66666666666, ans=0.025 +2024-08-03 16:55:09,296 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.64 vs. limit=15.0 +2024-08-03 16:55:17,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=173998.0, ans=0.0 +2024-08-03 16:55:30,141 INFO [train.py:1114] (0/4) Epoch 14, batch 150, loss[loss=0.1938, simple_loss=0.2673, pruned_loss=0.06012, over 13431.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2821, pruned_loss=0.05622, over 1386806.37 frames. ], batch size: 32, lr: 9.06e-03, grad_scale: 32.0 +2024-08-03 16:55:30,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=174071.33333333334, ans=0.0 +2024-08-03 16:55:42,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=174108.0, ans=0.0 +2024-08-03 16:55:58,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=174144.66666666666, ans=0.025 +2024-08-03 16:56:03,377 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.908e+01 1.123e+02 1.329e+02 1.786e+02 3.044e+02, threshold=2.658e+02, percent-clipped=1.0 +2024-08-03 16:56:28,040 INFO [train.py:1114] (0/4) Epoch 14, batch 200, loss[loss=0.2043, simple_loss=0.2884, pruned_loss=0.0601, over 12421.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2811, pruned_loss=0.05632, over 1665483.25 frames. ], batch size: 58, lr: 9.06e-03, grad_scale: 32.0 +2024-08-03 16:56:30,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=174254.66666666666, ans=0.125 +2024-08-03 16:56:36,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=174291.33333333334, ans=0.1 +2024-08-03 16:56:40,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=174291.33333333334, ans=0.0 +2024-08-03 16:56:55,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=174364.66666666666, ans=0.025 +2024-08-03 16:56:55,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=174364.66666666666, ans=0.04949747468305833 +2024-08-03 16:57:00,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=174364.66666666666, ans=0.125 +2024-08-03 16:57:07,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=174401.33333333334, ans=0.2 +2024-08-03 16:57:14,951 INFO [train.py:1114] (0/4) Epoch 14, batch 250, loss[loss=0.2194, simple_loss=0.2998, pruned_loss=0.06947, over 13276.00 frames. ], tot_loss[loss=0.197, simple_loss=0.281, pruned_loss=0.05643, over 1884407.76 frames. ], batch size: 46, lr: 9.05e-03, grad_scale: 32.0 +2024-08-03 16:57:16,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=174438.0, ans=0.125 +2024-08-03 16:57:33,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=174474.66666666666, ans=0.025 +2024-08-03 16:57:43,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=174511.33333333334, ans=0.125 +2024-08-03 16:57:49,035 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.116e+01 1.178e+02 1.381e+02 1.725e+02 3.085e+02, threshold=2.762e+02, percent-clipped=4.0 +2024-08-03 16:57:50,654 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.05 vs. limit=22.5 +2024-08-03 16:57:52,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=174548.0, ans=0.125 +2024-08-03 16:58:05,305 INFO [train.py:1114] (0/4) Epoch 14, batch 300, loss[loss=0.2244, simple_loss=0.3109, pruned_loss=0.06888, over 13433.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.281, pruned_loss=0.05624, over 2051704.52 frames. ], batch size: 42, lr: 9.05e-03, grad_scale: 32.0 +2024-08-03 16:58:25,204 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.91 vs. limit=15.0 +2024-08-03 16:58:29,402 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.60 vs. limit=22.5 +2024-08-03 16:58:31,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=174731.33333333334, ans=0.2 +2024-08-03 16:58:35,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=174731.33333333334, ans=0.2 +2024-08-03 16:58:52,972 INFO [train.py:1114] (0/4) Epoch 14, batch 350, loss[loss=0.1803, simple_loss=0.2556, pruned_loss=0.05255, over 13593.00 frames. ], tot_loss[loss=0.198, simple_loss=0.282, pruned_loss=0.05697, over 2181855.11 frames. ], batch size: 33, lr: 9.04e-03, grad_scale: 32.0 +2024-08-03 16:59:20,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.65 vs. limit=10.0 +2024-08-03 16:59:30,510 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=7.879e-03 +2024-08-03 16:59:32,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=174878.0, ans=0.2 +2024-08-03 16:59:33,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=174878.0, ans=0.125 +2024-08-03 16:59:38,117 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.558e+01 1.118e+02 1.275e+02 1.546e+02 2.611e+02, threshold=2.551e+02, percent-clipped=0.0 +2024-08-03 16:59:57,469 INFO [train.py:1114] (0/4) Epoch 14, batch 400, loss[loss=0.2066, simple_loss=0.2955, pruned_loss=0.05879, over 13359.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2824, pruned_loss=0.05705, over 2285507.54 frames. ], batch size: 37, lr: 9.04e-03, grad_scale: 32.0 +2024-08-03 16:59:59,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=174988.0, ans=0.125 +2024-08-03 17:00:02,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=174988.0, ans=0.0 +2024-08-03 17:00:11,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=175024.66666666666, ans=0.125 +2024-08-03 17:00:15,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=175024.66666666666, ans=0.015 +2024-08-03 17:00:15,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=175024.66666666666, ans=0.125 +2024-08-03 17:00:28,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=175098.0, ans=0.2 +2024-08-03 17:00:34,358 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.55 vs. limit=22.5 +2024-08-03 17:00:47,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=175171.33333333334, ans=0.125 +2024-08-03 17:00:48,049 INFO [train.py:1114] (0/4) Epoch 14, batch 450, loss[loss=0.1918, simple_loss=0.2823, pruned_loss=0.05068, over 13547.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2822, pruned_loss=0.05695, over 2359415.00 frames. ], batch size: 38, lr: 9.04e-03, grad_scale: 32.0 +2024-08-03 17:01:18,913 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.283e+01 1.087e+02 1.281e+02 1.631e+02 3.461e+02, threshold=2.562e+02, percent-clipped=3.0 +2024-08-03 17:01:36,929 INFO [train.py:1114] (0/4) Epoch 14, batch 500, loss[loss=0.2278, simple_loss=0.3076, pruned_loss=0.07399, over 13452.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.281, pruned_loss=0.05634, over 2425056.78 frames. ], batch size: 43, lr: 9.03e-03, grad_scale: 32.0 +2024-08-03 17:02:04,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=175464.66666666666, ans=0.0 +2024-08-03 17:02:08,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=175464.66666666666, ans=0.025 +2024-08-03 17:02:15,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=175501.33333333334, ans=0.125 +2024-08-03 17:02:18,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=175501.33333333334, ans=0.125 +2024-08-03 17:02:21,557 INFO [train.py:1114] (0/4) Epoch 14, batch 550, loss[loss=0.1971, simple_loss=0.29, pruned_loss=0.05205, over 13022.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2807, pruned_loss=0.05612, over 2466634.77 frames. ], batch size: 48, lr: 9.03e-03, grad_scale: 32.0 +2024-08-03 17:02:53,537 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.358e+01 1.151e+02 1.294e+02 1.518e+02 2.416e+02, threshold=2.587e+02, percent-clipped=0.0 +2024-08-03 17:02:55,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175648.0, ans=0.1 +2024-08-03 17:03:03,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=175684.66666666666, ans=0.125 +2024-08-03 17:03:08,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=175721.33333333334, ans=0.0 +2024-08-03 17:03:08,898 INFO [train.py:1114] (0/4) Epoch 14, batch 600, loss[loss=0.2015, simple_loss=0.2871, pruned_loss=0.05794, over 13304.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.281, pruned_loss=0.0561, over 2505698.27 frames. ], batch size: 46, lr: 9.02e-03, grad_scale: 16.0 +2024-08-03 17:03:28,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=175794.66666666666, ans=0.125 +2024-08-03 17:03:29,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=175794.66666666666, ans=0.125 +2024-08-03 17:03:31,927 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.21 vs. limit=15.0 +2024-08-03 17:03:34,567 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.97 vs. limit=22.5 +2024-08-03 17:03:57,679 INFO [train.py:1114] (0/4) Epoch 14, batch 650, loss[loss=0.18, simple_loss=0.2738, pruned_loss=0.0431, over 13531.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2809, pruned_loss=0.05595, over 2541104.64 frames. ], batch size: 37, lr: 9.02e-03, grad_scale: 8.0 +2024-08-03 17:04:04,388 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.09 vs. limit=22.5 +2024-08-03 17:04:06,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=175941.33333333334, ans=0.025 +2024-08-03 17:04:09,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=175941.33333333334, ans=0.2 +2024-08-03 17:04:13,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175941.33333333334, ans=0.1 +2024-08-03 17:04:20,193 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-48000.pt +2024-08-03 17:04:23,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=175978.0, ans=0.0 +2024-08-03 17:04:25,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=176014.66666666666, ans=0.125 +2024-08-03 17:04:29,806 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.301e+01 1.160e+02 1.386e+02 1.901e+02 3.564e+02, threshold=2.772e+02, percent-clipped=5.0 +2024-08-03 17:04:30,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=176014.66666666666, ans=0.125 +2024-08-03 17:04:30,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176014.66666666666, ans=0.1 +2024-08-03 17:04:34,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=176051.33333333334, ans=0.0 +2024-08-03 17:04:46,215 INFO [train.py:1114] (0/4) Epoch 14, batch 700, loss[loss=0.1925, simple_loss=0.275, pruned_loss=0.05502, over 13512.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2808, pruned_loss=0.05603, over 2563608.13 frames. ], batch size: 35, lr: 9.01e-03, grad_scale: 8.0 +2024-08-03 17:04:46,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=176088.0, ans=0.0 +2024-08-03 17:04:52,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176088.0, ans=0.1 +2024-08-03 17:04:55,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=176124.66666666666, ans=0.125 +2024-08-03 17:04:56,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=176124.66666666666, ans=0.2 +2024-08-03 17:05:01,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=176124.66666666666, ans=0.5 +2024-08-03 17:05:15,216 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.71 vs. limit=10.0 +2024-08-03 17:05:15,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=176198.0, ans=0.0 +2024-08-03 17:05:18,452 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:05:22,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=176234.66666666666, ans=0.125 +2024-08-03 17:05:23,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=176234.66666666666, ans=0.125 +2024-08-03 17:05:31,382 INFO [train.py:1114] (0/4) Epoch 14, batch 750, loss[loss=0.2123, simple_loss=0.3003, pruned_loss=0.06211, over 13370.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2805, pruned_loss=0.05629, over 2581333.86 frames. ], batch size: 37, lr: 9.01e-03, grad_scale: 8.0 +2024-08-03 17:05:31,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176271.33333333334, ans=0.1 +2024-08-03 17:05:36,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=176271.33333333334, ans=0.125 +2024-08-03 17:05:37,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=176271.33333333334, ans=0.125 +2024-08-03 17:05:38,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=176271.33333333334, ans=0.0 +2024-08-03 17:06:00,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=176381.33333333334, ans=0.125 +2024-08-03 17:06:02,474 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.800e+01 1.140e+02 1.290e+02 1.721e+02 6.299e+02, threshold=2.581e+02, percent-clipped=4.0 +2024-08-03 17:06:07,768 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.81 vs. limit=15.0 +2024-08-03 17:06:09,371 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.32 vs. limit=22.5 +2024-08-03 17:06:17,185 INFO [train.py:1114] (0/4) Epoch 14, batch 800, loss[loss=0.1913, simple_loss=0.2766, pruned_loss=0.05298, over 13359.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2807, pruned_loss=0.05643, over 2595954.33 frames. ], batch size: 33, lr: 9.00e-03, grad_scale: 16.0 +2024-08-03 17:06:19,969 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:06:20,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=176454.66666666666, ans=0.125 +2024-08-03 17:06:24,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=176454.66666666666, ans=0.0 +2024-08-03 17:06:32,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=176491.33333333334, ans=0.125 +2024-08-03 17:06:34,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176491.33333333334, ans=0.1 +2024-08-03 17:06:40,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=176528.0, ans=0.125 +2024-08-03 17:07:05,376 INFO [train.py:1114] (0/4) Epoch 14, batch 850, loss[loss=0.1871, simple_loss=0.2859, pruned_loss=0.04417, over 13314.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2808, pruned_loss=0.05656, over 2608732.49 frames. ], batch size: 40, lr: 9.00e-03, grad_scale: 16.0 +2024-08-03 17:07:15,024 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.00 vs. limit=15.0 +2024-08-03 17:07:40,309 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.443e+01 1.087e+02 1.211e+02 1.412e+02 2.074e+02, threshold=2.422e+02, percent-clipped=0.0 +2024-08-03 17:07:43,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=176748.0, ans=0.125 +2024-08-03 17:07:45,360 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:07:55,298 INFO [train.py:1114] (0/4) Epoch 14, batch 900, loss[loss=0.1813, simple_loss=0.2585, pruned_loss=0.05206, over 13356.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2809, pruned_loss=0.0566, over 2611046.65 frames. ], batch size: 33, lr: 8.99e-03, grad_scale: 16.0 +2024-08-03 17:08:12,226 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.69 vs. limit=15.0 +2024-08-03 17:08:20,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=176894.66666666666, ans=0.125 +2024-08-03 17:08:24,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=176931.33333333334, ans=0.125 +2024-08-03 17:08:25,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=176931.33333333334, ans=0.125 +2024-08-03 17:08:42,719 INFO [train.py:1114] (0/4) Epoch 14, batch 950, loss[loss=0.182, simple_loss=0.2677, pruned_loss=0.04817, over 13534.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.282, pruned_loss=0.05724, over 2612118.56 frames. ], batch size: 34, lr: 8.99e-03, grad_scale: 16.0 +2024-08-03 17:08:46,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=177004.66666666666, ans=0.0 +2024-08-03 17:08:56,376 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.42 vs. limit=22.5 +2024-08-03 17:08:58,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=177041.33333333334, ans=0.125 +2024-08-03 17:08:59,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=177041.33333333334, ans=0.09899494936611666 +2024-08-03 17:09:02,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=177078.0, ans=0.2 +2024-08-03 17:09:02,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=177078.0, ans=0.07 +2024-08-03 17:09:03,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=177078.0, ans=0.125 +2024-08-03 17:09:15,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=177114.66666666666, ans=0.0 +2024-08-03 17:09:15,830 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.964e+01 1.158e+02 1.398e+02 1.727e+02 2.347e+02, threshold=2.796e+02, percent-clipped=0.0 +2024-08-03 17:09:21,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=177151.33333333334, ans=0.125 +2024-08-03 17:09:27,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=177151.33333333334, ans=0.0 +2024-08-03 17:09:30,732 INFO [train.py:1114] (0/4) Epoch 14, batch 1000, loss[loss=0.1868, simple_loss=0.2746, pruned_loss=0.04948, over 13347.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2824, pruned_loss=0.0576, over 2610731.56 frames. ], batch size: 35, lr: 8.99e-03, grad_scale: 16.0 +2024-08-03 17:09:33,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177188.0, ans=0.1 +2024-08-03 17:09:35,840 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.42 vs. limit=15.0 +2024-08-03 17:09:36,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=177188.0, ans=10.0 +2024-08-03 17:09:44,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=177224.66666666666, ans=0.125 +2024-08-03 17:09:56,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=177261.33333333334, ans=0.025 +2024-08-03 17:10:18,999 INFO [train.py:1114] (0/4) Epoch 14, batch 1050, loss[loss=0.2067, simple_loss=0.292, pruned_loss=0.06069, over 13578.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2806, pruned_loss=0.05651, over 2614763.97 frames. ], batch size: 39, lr: 8.98e-03, grad_scale: 16.0 +2024-08-03 17:10:21,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=177371.33333333334, ans=0.025 +2024-08-03 17:10:31,240 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.08 vs. limit=22.5 +2024-08-03 17:10:32,155 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.71 vs. limit=15.0 +2024-08-03 17:10:49,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=177481.33333333334, ans=0.0 +2024-08-03 17:10:51,917 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.165e+01 1.080e+02 1.235e+02 1.446e+02 2.124e+02, threshold=2.470e+02, percent-clipped=0.0 +2024-08-03 17:11:06,551 INFO [train.py:1114] (0/4) Epoch 14, batch 1100, loss[loss=0.2007, simple_loss=0.2887, pruned_loss=0.05636, over 13569.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2806, pruned_loss=0.05651, over 2618928.13 frames. ], batch size: 36, lr: 8.98e-03, grad_scale: 16.0 +2024-08-03 17:11:27,979 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.93 vs. limit=6.0 +2024-08-03 17:11:28,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=177628.0, ans=0.125 +2024-08-03 17:11:28,813 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.01 vs. limit=15.0 +2024-08-03 17:11:29,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=177628.0, ans=0.125 +2024-08-03 17:11:50,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=177701.33333333334, ans=0.0 +2024-08-03 17:11:56,971 INFO [train.py:1114] (0/4) Epoch 14, batch 1150, loss[loss=0.1895, simple_loss=0.2757, pruned_loss=0.05161, over 13569.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2804, pruned_loss=0.05652, over 2618047.06 frames. ], batch size: 36, lr: 8.97e-03, grad_scale: 16.0 +2024-08-03 17:12:10,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177774.66666666666, ans=0.1 +2024-08-03 17:12:28,533 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.383e+01 1.181e+02 1.323e+02 1.686e+02 3.018e+02, threshold=2.646e+02, percent-clipped=3.0 +2024-08-03 17:12:41,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=177884.66666666666, ans=0.0 +2024-08-03 17:12:43,164 INFO [train.py:1114] (0/4) Epoch 14, batch 1200, loss[loss=0.1954, simple_loss=0.2802, pruned_loss=0.05533, over 13577.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2812, pruned_loss=0.05677, over 2615307.49 frames. ], batch size: 39, lr: 8.97e-03, grad_scale: 32.0 +2024-08-03 17:12:47,214 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=9.32 vs. limit=15.0 +2024-08-03 17:12:51,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=177958.0, ans=0.0 +2024-08-03 17:12:58,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177958.0, ans=0.1 +2024-08-03 17:13:01,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=177994.66666666666, ans=0.2 +2024-08-03 17:13:28,059 INFO [train.py:1114] (0/4) Epoch 14, batch 1250, loss[loss=0.2085, simple_loss=0.2933, pruned_loss=0.06182, over 13438.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2815, pruned_loss=0.05666, over 2627353.74 frames. ], batch size: 42, lr: 8.96e-03, grad_scale: 32.0 +2024-08-03 17:13:29,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=178104.66666666666, ans=0.07 +2024-08-03 17:13:31,126 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.91 vs. limit=12.0 +2024-08-03 17:14:01,215 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.606e+01 1.145e+02 1.312e+02 1.553e+02 2.666e+02, threshold=2.625e+02, percent-clipped=1.0 +2024-08-03 17:14:01,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=178214.66666666666, ans=0.0 +2024-08-03 17:14:11,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=178251.33333333334, ans=0.125 +2024-08-03 17:14:12,676 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.72 vs. limit=22.5 +2024-08-03 17:14:15,815 INFO [train.py:1114] (0/4) Epoch 14, batch 1300, loss[loss=0.2131, simple_loss=0.294, pruned_loss=0.06612, over 12895.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2805, pruned_loss=0.0562, over 2630473.32 frames. ], batch size: 52, lr: 8.96e-03, grad_scale: 32.0 +2024-08-03 17:14:33,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=178324.66666666666, ans=0.125 +2024-08-03 17:14:43,840 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.42 vs. limit=15.0 +2024-08-03 17:14:47,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=178398.0, ans=0.0 +2024-08-03 17:14:48,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=178398.0, ans=0.1 +2024-08-03 17:15:05,154 INFO [train.py:1114] (0/4) Epoch 14, batch 1350, loss[loss=0.1841, simple_loss=0.2747, pruned_loss=0.04669, over 13551.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2801, pruned_loss=0.05594, over 2638656.19 frames. ], batch size: 37, lr: 8.95e-03, grad_scale: 32.0 +2024-08-03 17:15:11,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=178471.33333333334, ans=0.0 +2024-08-03 17:15:12,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=178471.33333333334, ans=0.125 +2024-08-03 17:15:20,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=178508.0, ans=0.0 +2024-08-03 17:15:36,211 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.97 vs. limit=10.0 +2024-08-03 17:15:37,513 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.701e+01 1.127e+02 1.257e+02 1.561e+02 2.635e+02, threshold=2.514e+02, percent-clipped=1.0 +2024-08-03 17:15:43,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=178618.0, ans=0.125 +2024-08-03 17:15:51,704 INFO [train.py:1114] (0/4) Epoch 14, batch 1400, loss[loss=0.1763, simple_loss=0.2506, pruned_loss=0.05104, over 13273.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2794, pruned_loss=0.05559, over 2642722.05 frames. ], batch size: 31, lr: 8.95e-03, grad_scale: 16.0 +2024-08-03 17:16:05,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=178691.33333333334, ans=0.125 +2024-08-03 17:16:10,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=178728.0, ans=0.125 +2024-08-03 17:16:20,093 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.02 vs. limit=15.0 +2024-08-03 17:16:20,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178764.66666666666, ans=0.1 +2024-08-03 17:16:28,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=178801.33333333334, ans=0.125 +2024-08-03 17:16:28,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=178801.33333333334, ans=0.125 +2024-08-03 17:16:30,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178801.33333333334, ans=0.1 +2024-08-03 17:16:30,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=178801.33333333334, ans=0.125 +2024-08-03 17:16:31,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=178801.33333333334, ans=0.0 +2024-08-03 17:16:36,741 INFO [train.py:1114] (0/4) Epoch 14, batch 1450, loss[loss=0.2075, simple_loss=0.297, pruned_loss=0.05893, over 13400.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2809, pruned_loss=0.05672, over 2641550.36 frames. ], batch size: 43, lr: 8.94e-03, grad_scale: 16.0 +2024-08-03 17:16:36,903 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:16:43,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=178838.0, ans=0.125 +2024-08-03 17:16:56,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=178911.33333333334, ans=0.1 +2024-08-03 17:16:57,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178911.33333333334, ans=0.1 +2024-08-03 17:16:58,078 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.16 vs. limit=15.0 +2024-08-03 17:17:08,372 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.238e+01 1.149e+02 1.315e+02 1.594e+02 2.634e+02, threshold=2.629e+02, percent-clipped=1.0 +2024-08-03 17:17:24,034 INFO [train.py:1114] (0/4) Epoch 14, batch 1500, loss[loss=0.1899, simple_loss=0.2852, pruned_loss=0.04726, over 13399.00 frames. ], tot_loss[loss=0.197, simple_loss=0.281, pruned_loss=0.05653, over 2640944.94 frames. ], batch size: 39, lr: 8.94e-03, grad_scale: 16.0 +2024-08-03 17:17:26,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=179021.33333333334, ans=0.125 +2024-08-03 17:17:31,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=179021.33333333334, ans=0.125 +2024-08-03 17:17:40,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=179058.0, ans=0.125 +2024-08-03 17:17:42,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=179094.66666666666, ans=0.1 +2024-08-03 17:17:48,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=179094.66666666666, ans=0.125 +2024-08-03 17:17:49,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=179094.66666666666, ans=0.125 +2024-08-03 17:17:50,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=179094.66666666666, ans=0.025 +2024-08-03 17:18:10,635 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.57 vs. limit=15.0 +2024-08-03 17:18:11,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=179204.66666666666, ans=0.125 +2024-08-03 17:18:11,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=179204.66666666666, ans=0.125 +2024-08-03 17:18:11,852 INFO [train.py:1114] (0/4) Epoch 14, batch 1550, loss[loss=0.2163, simple_loss=0.3092, pruned_loss=0.06171, over 13389.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2815, pruned_loss=0.05674, over 2631117.33 frames. ], batch size: 41, lr: 8.94e-03, grad_scale: 16.0 +2024-08-03 17:18:18,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=179204.66666666666, ans=0.0 +2024-08-03 17:18:22,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=179241.33333333334, ans=0.2 +2024-08-03 17:18:30,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=179241.33333333334, ans=0.04949747468305833 +2024-08-03 17:18:31,982 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.63 vs. limit=22.5 +2024-08-03 17:18:37,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=179278.0, ans=0.125 +2024-08-03 17:18:45,085 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.351e+01 1.093e+02 1.288e+02 1.698e+02 2.728e+02, threshold=2.576e+02, percent-clipped=2.0 +2024-08-03 17:19:00,522 INFO [train.py:1114] (0/4) Epoch 14, batch 1600, loss[loss=0.1939, simple_loss=0.2773, pruned_loss=0.05522, over 13571.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2809, pruned_loss=0.05635, over 2624502.05 frames. ], batch size: 39, lr: 8.93e-03, grad_scale: 32.0 +2024-08-03 17:19:01,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=179388.0, ans=0.125 +2024-08-03 17:19:07,525 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.44 vs. limit=15.0 +2024-08-03 17:19:23,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=179461.33333333334, ans=0.125 +2024-08-03 17:19:25,785 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.54 vs. limit=15.0 +2024-08-03 17:19:45,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179571.33333333334, ans=0.1 +2024-08-03 17:19:46,115 INFO [train.py:1114] (0/4) Epoch 14, batch 1650, loss[loss=0.196, simple_loss=0.2885, pruned_loss=0.05178, over 13324.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2808, pruned_loss=0.05655, over 2622180.89 frames. ], batch size: 40, lr: 8.93e-03, grad_scale: 32.0 +2024-08-03 17:19:46,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=179571.33333333334, ans=0.125 +2024-08-03 17:19:47,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=179571.33333333334, ans=0.125 +2024-08-03 17:19:57,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=179608.0, ans=0.2 +2024-08-03 17:19:58,559 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.29 vs. limit=6.0 +2024-08-03 17:20:12,119 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.93 vs. limit=15.0 +2024-08-03 17:20:33,198 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.085e+01 1.145e+02 1.327e+02 1.825e+02 3.127e+02, threshold=2.655e+02, percent-clipped=5.0 +2024-08-03 17:20:33,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=179681.33333333334, ans=0.0 +2024-08-03 17:20:46,602 INFO [train.py:1114] (0/4) Epoch 14, batch 1700, loss[loss=0.1506, simple_loss=0.2343, pruned_loss=0.03339, over 13248.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2807, pruned_loss=0.05641, over 2630995.84 frames. ], batch size: 31, lr: 8.92e-03, grad_scale: 32.0 +2024-08-03 17:20:47,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=179754.66666666666, ans=0.125 +2024-08-03 17:20:55,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=179791.33333333334, ans=0.125 +2024-08-03 17:21:09,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179828.0, ans=0.1 +2024-08-03 17:21:16,640 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:21:17,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=179864.66666666666, ans=0.125 +2024-08-03 17:21:23,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=179901.33333333334, ans=0.0 +2024-08-03 17:21:29,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=179901.33333333334, ans=0.1 +2024-08-03 17:21:33,646 INFO [train.py:1114] (0/4) Epoch 14, batch 1750, loss[loss=0.1863, simple_loss=0.2704, pruned_loss=0.05109, over 13522.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2803, pruned_loss=0.05604, over 2634472.98 frames. ], batch size: 31, lr: 8.92e-03, grad_scale: 32.0 +2024-08-03 17:21:38,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=179938.0, ans=0.2 +2024-08-03 17:21:52,807 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.73 vs. limit=12.0 +2024-08-03 17:21:56,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.95 vs. limit=10.0 +2024-08-03 17:21:58,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=180011.33333333334, ans=0.125 +2024-08-03 17:22:07,571 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.076e+01 1.125e+02 1.266e+02 1.724e+02 3.044e+02, threshold=2.532e+02, percent-clipped=5.0 +2024-08-03 17:22:08,157 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.46 vs. limit=15.0 +2024-08-03 17:22:13,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=180084.66666666666, ans=0.2 +2024-08-03 17:22:16,567 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.58 vs. limit=5.0 +2024-08-03 17:22:23,007 INFO [train.py:1114] (0/4) Epoch 14, batch 1800, loss[loss=0.2166, simple_loss=0.3061, pruned_loss=0.06351, over 13552.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2805, pruned_loss=0.05626, over 2635894.43 frames. ], batch size: 38, lr: 8.91e-03, grad_scale: 32.0 +2024-08-03 17:22:33,762 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.42 vs. limit=12.0 +2024-08-03 17:22:41,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=180158.0, ans=0.125 +2024-08-03 17:22:43,017 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.00 vs. limit=15.0 +2024-08-03 17:22:51,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=180231.33333333334, ans=0.025 +2024-08-03 17:22:58,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=180231.33333333334, ans=0.07 +2024-08-03 17:23:00,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180231.33333333334, ans=0.1 +2024-08-03 17:23:01,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=180268.0, ans=0.125 +2024-08-03 17:23:10,735 INFO [train.py:1114] (0/4) Epoch 14, batch 1850, loss[loss=0.211, simple_loss=0.2907, pruned_loss=0.06566, over 13403.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2803, pruned_loss=0.05616, over 2638151.23 frames. ], batch size: 39, lr: 8.91e-03, grad_scale: 32.0 +2024-08-03 17:23:19,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.15 vs. limit=12.0 +2024-08-03 17:23:38,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=180414.66666666666, ans=0.125 +2024-08-03 17:23:42,948 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.173e+01 1.187e+02 1.383e+02 1.867e+02 3.590e+02, threshold=2.765e+02, percent-clipped=8.0 +2024-08-03 17:23:53,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=180451.33333333334, ans=0.0 +2024-08-03 17:23:53,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=180451.33333333334, ans=0.125 +2024-08-03 17:23:56,653 INFO [train.py:1114] (0/4) Epoch 14, batch 1900, loss[loss=0.2099, simple_loss=0.2971, pruned_loss=0.0614, over 13327.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2809, pruned_loss=0.05633, over 2639938.80 frames. ], batch size: 40, lr: 8.90e-03, grad_scale: 32.0 +2024-08-03 17:23:57,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=180488.0, ans=0.2 +2024-08-03 17:23:57,913 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=5.308e-03 +2024-08-03 17:24:06,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=180524.66666666666, ans=0.1 +2024-08-03 17:24:12,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=180524.66666666666, ans=0.125 +2024-08-03 17:24:26,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=180598.0, ans=0.125 +2024-08-03 17:24:32,291 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.44 vs. limit=22.5 +2024-08-03 17:24:40,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=180634.66666666666, ans=0.0 +2024-08-03 17:24:43,811 INFO [train.py:1114] (0/4) Epoch 14, batch 1950, loss[loss=0.187, simple_loss=0.2699, pruned_loss=0.05209, over 13555.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2814, pruned_loss=0.05622, over 2646453.96 frames. ], batch size: 36, lr: 8.90e-03, grad_scale: 16.0 +2024-08-03 17:24:47,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=180671.33333333334, ans=0.125 +2024-08-03 17:24:58,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=180708.0, ans=0.09899494936611666 +2024-08-03 17:25:13,867 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.32 vs. limit=15.0 +2024-08-03 17:25:19,256 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.397e+01 1.089e+02 1.232e+02 1.473e+02 2.566e+02, threshold=2.463e+02, percent-clipped=0.0 +2024-08-03 17:25:27,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=180818.0, ans=0.0 +2024-08-03 17:25:32,217 INFO [train.py:1114] (0/4) Epoch 14, batch 2000, loss[loss=0.188, simple_loss=0.2636, pruned_loss=0.05625, over 13537.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2822, pruned_loss=0.05682, over 2635992.11 frames. ], batch size: 31, lr: 8.90e-03, grad_scale: 32.0 +2024-08-03 17:25:32,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=180854.66666666666, ans=0.125 +2024-08-03 17:25:38,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=180854.66666666666, ans=0.125 +2024-08-03 17:25:42,825 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.76 vs. limit=15.0 +2024-08-03 17:26:21,667 INFO [train.py:1114] (0/4) Epoch 14, batch 2050, loss[loss=0.1757, simple_loss=0.2545, pruned_loss=0.04847, over 13405.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2808, pruned_loss=0.0566, over 2633009.02 frames. ], batch size: 32, lr: 8.89e-03, grad_scale: 32.0 +2024-08-03 17:26:29,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181038.0, ans=0.1 +2024-08-03 17:26:33,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=181074.66666666666, ans=0.2 +2024-08-03 17:26:38,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=181074.66666666666, ans=0.0 +2024-08-03 17:26:54,259 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.315e+01 1.117e+02 1.302e+02 1.562e+02 2.500e+02, threshold=2.604e+02, percent-clipped=1.0 +2024-08-03 17:26:54,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=181148.0, ans=0.2 +2024-08-03 17:26:56,511 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.38 vs. limit=6.0 +2024-08-03 17:27:06,823 INFO [train.py:1114] (0/4) Epoch 14, batch 2100, loss[loss=0.2066, simple_loss=0.2811, pruned_loss=0.06604, over 13541.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2799, pruned_loss=0.05598, over 2638481.40 frames. ], batch size: 37, lr: 8.89e-03, grad_scale: 32.0 +2024-08-03 17:27:08,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=181221.33333333334, ans=0.125 +2024-08-03 17:27:27,230 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.29 vs. limit=15.0 +2024-08-03 17:27:27,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=181294.66666666666, ans=0.125 +2024-08-03 17:27:41,001 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.62 vs. limit=22.5 +2024-08-03 17:27:41,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=181331.33333333334, ans=0.0 +2024-08-03 17:27:51,982 INFO [train.py:1114] (0/4) Epoch 14, batch 2150, loss[loss=0.1904, simple_loss=0.2798, pruned_loss=0.05047, over 13570.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.28, pruned_loss=0.05609, over 2647077.57 frames. ], batch size: 36, lr: 8.88e-03, grad_scale: 32.0 +2024-08-03 17:28:19,120 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.41 vs. limit=15.0 +2024-08-03 17:28:26,858 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.184e+01 1.154e+02 1.453e+02 1.954e+02 3.704e+02, threshold=2.907e+02, percent-clipped=11.0 +2024-08-03 17:28:30,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=181551.33333333334, ans=0.1 +2024-08-03 17:28:30,665 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:28:30,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=181551.33333333334, ans=0.1 +2024-08-03 17:28:39,569 INFO [train.py:1114] (0/4) Epoch 14, batch 2200, loss[loss=0.2266, simple_loss=0.3034, pruned_loss=0.07483, over 13410.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2799, pruned_loss=0.05593, over 2645456.27 frames. ], batch size: 39, lr: 8.88e-03, grad_scale: 32.0 +2024-08-03 17:28:50,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=181624.66666666666, ans=0.0 +2024-08-03 17:28:52,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=181624.66666666666, ans=0.125 +2024-08-03 17:28:52,564 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.92 vs. limit=15.0 +2024-08-03 17:28:58,422 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.31 vs. limit=10.0 +2024-08-03 17:29:23,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181734.66666666666, ans=0.1 +2024-08-03 17:29:26,882 INFO [train.py:1114] (0/4) Epoch 14, batch 2250, loss[loss=0.1941, simple_loss=0.2836, pruned_loss=0.05227, over 13360.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2797, pruned_loss=0.05564, over 2642066.27 frames. ], batch size: 37, lr: 8.87e-03, grad_scale: 32.0 +2024-08-03 17:29:28,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=181771.33333333334, ans=0.0 +2024-08-03 17:29:29,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=181771.33333333334, ans=0.0 +2024-08-03 17:29:36,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=181771.33333333334, ans=0.125 +2024-08-03 17:29:40,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=181808.0, ans=0.125 +2024-08-03 17:29:43,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=181808.0, ans=0.2 +2024-08-03 17:29:54,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=181844.66666666666, ans=0.125 +2024-08-03 17:29:56,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=181844.66666666666, ans=0.125 +2024-08-03 17:30:01,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=181881.33333333334, ans=0.0 +2024-08-03 17:30:03,284 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.261e+01 1.193e+02 1.520e+02 1.872e+02 2.993e+02, threshold=3.040e+02, percent-clipped=1.0 +2024-08-03 17:30:07,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=181918.0, ans=0.125 +2024-08-03 17:30:15,693 INFO [train.py:1114] (0/4) Epoch 14, batch 2300, loss[loss=0.17, simple_loss=0.2517, pruned_loss=0.04413, over 13566.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2782, pruned_loss=0.05495, over 2638324.66 frames. ], batch size: 33, lr: 8.87e-03, grad_scale: 32.0 +2024-08-03 17:30:20,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=181954.66666666666, ans=0.125 +2024-08-03 17:30:37,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=182028.0, ans=0.125 +2024-08-03 17:30:43,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=182064.66666666666, ans=0.0 +2024-08-03 17:30:51,650 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.70 vs. limit=15.0 +2024-08-03 17:30:56,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=182101.33333333334, ans=0.125 +2024-08-03 17:31:01,058 INFO [train.py:1114] (0/4) Epoch 14, batch 2350, loss[loss=0.2103, simple_loss=0.3025, pruned_loss=0.05904, over 13550.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2789, pruned_loss=0.0553, over 2641011.19 frames. ], batch size: 38, lr: 8.87e-03, grad_scale: 32.0 +2024-08-03 17:31:02,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=182138.0, ans=0.1 +2024-08-03 17:31:24,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=182211.33333333334, ans=0.125 +2024-08-03 17:31:33,676 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.345e+01 1.136e+02 1.357e+02 1.723e+02 3.270e+02, threshold=2.715e+02, percent-clipped=1.0 +2024-08-03 17:31:40,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=182284.66666666666, ans=0.0 +2024-08-03 17:31:46,428 INFO [train.py:1114] (0/4) Epoch 14, batch 2400, loss[loss=0.1843, simple_loss=0.2707, pruned_loss=0.04898, over 13526.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2795, pruned_loss=0.05541, over 2641876.41 frames. ], batch size: 35, lr: 8.86e-03, grad_scale: 32.0 +2024-08-03 17:32:04,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=182358.0, ans=0.125 +2024-08-03 17:32:05,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=182358.0, ans=0.125 +2024-08-03 17:32:09,776 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.26 vs. limit=10.0 +2024-08-03 17:32:11,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=182394.66666666666, ans=0.1 +2024-08-03 17:32:25,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=182468.0, ans=15.0 +2024-08-03 17:32:36,600 INFO [train.py:1114] (0/4) Epoch 14, batch 2450, loss[loss=0.2125, simple_loss=0.2971, pruned_loss=0.06395, over 13362.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.281, pruned_loss=0.05642, over 2631610.94 frames. ], batch size: 37, lr: 8.86e-03, grad_scale: 32.0 +2024-08-03 17:32:42,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=182504.66666666666, ans=0.025 +2024-08-03 17:32:44,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182541.33333333334, ans=0.1 +2024-08-03 17:32:50,613 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.75 vs. limit=15.0 +2024-08-03 17:32:57,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=182578.0, ans=0.0 +2024-08-03 17:33:11,019 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.955e+01 1.136e+02 1.286e+02 1.596e+02 2.665e+02, threshold=2.571e+02, percent-clipped=0.0 +2024-08-03 17:33:25,712 INFO [train.py:1114] (0/4) Epoch 14, batch 2500, loss[loss=0.2079, simple_loss=0.2961, pruned_loss=0.0598, over 13398.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.281, pruned_loss=0.05611, over 2635421.20 frames. ], batch size: 39, lr: 8.85e-03, grad_scale: 32.0 +2024-08-03 17:33:26,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=182688.0, ans=0.07 +2024-08-03 17:33:26,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=182688.0, ans=0.025 +2024-08-03 17:33:31,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=182688.0, ans=0.0 +2024-08-03 17:33:48,124 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.57 vs. limit=15.0 +2024-08-03 17:34:03,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=182834.66666666666, ans=0.0 +2024-08-03 17:34:06,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=182834.66666666666, ans=10.0 +2024-08-03 17:34:07,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=182834.66666666666, ans=0.0 +2024-08-03 17:34:08,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=182834.66666666666, ans=0.1 +2024-08-03 17:34:09,939 INFO [train.py:1114] (0/4) Epoch 14, batch 2550, loss[loss=0.1837, simple_loss=0.2546, pruned_loss=0.05639, over 13541.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2809, pruned_loss=0.05603, over 2636971.83 frames. ], batch size: 31, lr: 8.85e-03, grad_scale: 32.0 +2024-08-03 17:34:21,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=182908.0, ans=0.0 +2024-08-03 17:34:35,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=182981.33333333334, ans=0.125 +2024-08-03 17:34:37,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182981.33333333334, ans=0.1 +2024-08-03 17:34:37,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=182981.33333333334, ans=0.125 +2024-08-03 17:34:41,381 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.643e+01 1.149e+02 1.432e+02 2.081e+02 4.007e+02, threshold=2.864e+02, percent-clipped=10.0 +2024-08-03 17:34:43,668 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.24 vs. limit=15.0 +2024-08-03 17:34:53,666 INFO [train.py:1114] (0/4) Epoch 14, batch 2600, loss[loss=0.187, simple_loss=0.269, pruned_loss=0.05247, over 13572.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.281, pruned_loss=0.0559, over 2636564.96 frames. ], batch size: 36, lr: 8.84e-03, grad_scale: 32.0 +2024-08-03 17:34:57,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=183054.66666666666, ans=0.125 +2024-08-03 17:34:57,464 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:35:01,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=183091.33333333334, ans=0.125 +2024-08-03 17:35:03,661 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:35:04,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=183091.33333333334, ans=0.125 +2024-08-03 17:35:12,571 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.30 vs. limit=22.5 +2024-08-03 17:35:13,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=183128.0, ans=0.125 +2024-08-03 17:35:18,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=183128.0, ans=0.0 +2024-08-03 17:35:25,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183164.66666666666, ans=0.1 +2024-08-03 17:35:30,548 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=11.01 vs. limit=15.0 +2024-08-03 17:35:38,004 INFO [train.py:1114] (0/4) Epoch 14, batch 2650, loss[loss=0.1969, simple_loss=0.2888, pruned_loss=0.05253, over 13279.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2811, pruned_loss=0.05578, over 2639765.34 frames. ], batch size: 46, lr: 8.84e-03, grad_scale: 32.0 +2024-08-03 17:35:43,604 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.82 vs. limit=15.0 +2024-08-03 17:36:05,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=183348.0, ans=0.035 +2024-08-03 17:36:09,727 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.484e+01 1.156e+02 1.338e+02 1.561e+02 2.649e+02, threshold=2.677e+02, percent-clipped=0.0 +2024-08-03 17:36:16,090 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.37 vs. limit=15.0 +2024-08-03 17:36:25,551 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.46 vs. limit=10.0 +2024-08-03 17:36:47,899 INFO [train.py:1114] (0/4) Epoch 14, batch 2700, loss[loss=0.1878, simple_loss=0.278, pruned_loss=0.04885, over 13552.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2812, pruned_loss=0.05589, over 2637090.77 frames. ], batch size: 40, lr: 8.83e-03, grad_scale: 32.0 +2024-08-03 17:37:15,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183531.33333333334, ans=0.1 +2024-08-03 17:37:31,439 INFO [train.py:1114] (0/4) Epoch 14, batch 2750, loss[loss=0.175, simple_loss=0.2618, pruned_loss=0.04412, over 13334.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2796, pruned_loss=0.05543, over 2634582.74 frames. ], batch size: 34, lr: 8.83e-03, grad_scale: 32.0 +2024-08-03 17:37:51,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=183678.0, ans=0.0 +2024-08-03 17:38:02,654 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.576e+01 1.249e+02 1.500e+02 2.010e+02 3.327e+02, threshold=3.000e+02, percent-clipped=3.0 +2024-08-03 17:38:05,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=183751.33333333334, ans=0.025 +2024-08-03 17:38:05,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=183751.33333333334, ans=0.125 +2024-08-03 17:38:07,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=183751.33333333334, ans=0.0 +2024-08-03 17:38:12,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=183751.33333333334, ans=0.125 +2024-08-03 17:38:15,238 INFO [train.py:1114] (0/4) Epoch 14, batch 2800, loss[loss=0.2748, simple_loss=0.3284, pruned_loss=0.1106, over 9319.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.28, pruned_loss=0.05546, over 2626448.88 frames. ], batch size: 96, lr: 8.83e-03, grad_scale: 32.0 +2024-08-03 17:38:22,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=183788.0, ans=0.2 +2024-08-03 17:38:32,179 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.53 vs. limit=22.5 +2024-08-03 17:38:32,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=183861.33333333334, ans=0.125 +2024-08-03 17:38:34,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=183861.33333333334, ans=0.2 +2024-08-03 17:38:39,129 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.49 vs. limit=15.0 +2024-08-03 17:38:51,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183934.66666666666, ans=0.1 +2024-08-03 17:38:52,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=183934.66666666666, ans=0.025 +2024-08-03 17:39:00,507 INFO [train.py:1114] (0/4) Epoch 14, batch 2850, loss[loss=0.1854, simple_loss=0.2696, pruned_loss=0.05062, over 13373.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2808, pruned_loss=0.05582, over 2620107.39 frames. ], batch size: 35, lr: 8.82e-03, grad_scale: 32.0 +2024-08-03 17:39:08,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=184008.0, ans=0.09899494936611666 +2024-08-03 17:39:21,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=184044.66666666666, ans=0.125 +2024-08-03 17:39:22,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=184044.66666666666, ans=0.125 +2024-08-03 17:39:28,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=184081.33333333334, ans=0.0 +2024-08-03 17:39:33,101 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.234e+01 1.082e+02 1.194e+02 1.402e+02 2.334e+02, threshold=2.389e+02, percent-clipped=0.0 +2024-08-03 17:39:35,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=184081.33333333334, ans=0.125 +2024-08-03 17:39:45,218 INFO [train.py:1114] (0/4) Epoch 14, batch 2900, loss[loss=0.1768, simple_loss=0.2604, pruned_loss=0.04657, over 13366.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2817, pruned_loss=0.05617, over 2631840.50 frames. ], batch size: 36, lr: 8.82e-03, grad_scale: 32.0 +2024-08-03 17:39:47,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=184154.66666666666, ans=0.125 +2024-08-03 17:39:53,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=184191.33333333334, ans=0.2 +2024-08-03 17:39:56,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=184191.33333333334, ans=0.125 +2024-08-03 17:40:03,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=184228.0, ans=0.125 +2024-08-03 17:40:04,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=184228.0, ans=0.07 +2024-08-03 17:40:28,419 INFO [train.py:1114] (0/4) Epoch 14, batch 2950, loss[loss=0.1957, simple_loss=0.279, pruned_loss=0.05618, over 13324.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2808, pruned_loss=0.05623, over 2629797.38 frames. ], batch size: 34, lr: 8.81e-03, grad_scale: 32.0 +2024-08-03 17:40:41,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=184374.66666666666, ans=0.0 +2024-08-03 17:40:54,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=184411.33333333334, ans=0.2 +2024-08-03 17:41:00,873 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.120e+01 1.167e+02 1.400e+02 1.731e+02 2.660e+02, threshold=2.799e+02, percent-clipped=4.0 +2024-08-03 17:41:08,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=184484.66666666666, ans=0.125 +2024-08-03 17:41:13,013 INFO [train.py:1114] (0/4) Epoch 14, batch 3000, loss[loss=0.2021, simple_loss=0.2846, pruned_loss=0.05983, over 13562.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2808, pruned_loss=0.05635, over 2630233.80 frames. ], batch size: 37, lr: 8.81e-03, grad_scale: 32.0 +2024-08-03 17:41:13,014 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 17:41:23,007 INFO [train.py:1146] (0/4) Epoch 14, validation: loss=0.1738, simple_loss=0.2731, pruned_loss=0.03723, over 944034.00 frames. +2024-08-03 17:41:23,008 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 17:41:31,554 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.79 vs. limit=12.0 +2024-08-03 17:41:38,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=184558.0, ans=0.0 +2024-08-03 17:41:39,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=184558.0, ans=0.2 +2024-08-03 17:41:39,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=184558.0, ans=0.0 +2024-08-03 17:41:44,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=184594.66666666666, ans=0.125 +2024-08-03 17:41:51,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=184631.33333333334, ans=0.125 +2024-08-03 17:42:08,950 INFO [train.py:1114] (0/4) Epoch 14, batch 3050, loss[loss=0.1763, simple_loss=0.2649, pruned_loss=0.04392, over 13522.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2816, pruned_loss=0.05671, over 2627417.33 frames. ], batch size: 35, lr: 8.80e-03, grad_scale: 16.0 +2024-08-03 17:42:09,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=184704.66666666666, ans=0.125 +2024-08-03 17:42:22,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=184741.33333333334, ans=0.125 +2024-08-03 17:42:27,530 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.33 vs. limit=12.0 +2024-08-03 17:42:41,265 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.416e+01 1.107e+02 1.206e+02 1.425e+02 2.070e+02, threshold=2.412e+02, percent-clipped=0.0 +2024-08-03 17:42:41,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=184814.66666666666, ans=0.2 +2024-08-03 17:42:44,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=184851.33333333334, ans=0.025 +2024-08-03 17:42:47,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=184851.33333333334, ans=0.125 +2024-08-03 17:42:52,681 INFO [train.py:1114] (0/4) Epoch 14, batch 3100, loss[loss=0.2278, simple_loss=0.3107, pruned_loss=0.07247, over 13313.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2809, pruned_loss=0.05636, over 2627599.64 frames. ], batch size: 46, lr: 8.80e-03, grad_scale: 16.0 +2024-08-03 17:42:53,186 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.74 vs. limit=22.5 +2024-08-03 17:42:55,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=184888.0, ans=0.125 +2024-08-03 17:43:03,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=184924.66666666666, ans=0.125 +2024-08-03 17:43:10,528 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.33 vs. limit=15.0 +2024-08-03 17:43:12,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=184961.33333333334, ans=0.125 +2024-08-03 17:43:12,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=184961.33333333334, ans=0.125 +2024-08-03 17:43:14,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=184961.33333333334, ans=0.125 +2024-08-03 17:43:26,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185034.66666666666, ans=0.1 +2024-08-03 17:43:31,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=185034.66666666666, ans=0.0 +2024-08-03 17:43:35,769 INFO [train.py:1114] (0/4) Epoch 14, batch 3150, loss[loss=0.2331, simple_loss=0.3118, pruned_loss=0.07726, over 13014.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2809, pruned_loss=0.05643, over 2628840.44 frames. ], batch size: 48, lr: 8.80e-03, grad_scale: 16.0 +2024-08-03 17:43:36,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=185071.33333333334, ans=0.1 +2024-08-03 17:43:44,366 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.71 vs. limit=5.0 +2024-08-03 17:43:58,267 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.84 vs. limit=22.5 +2024-08-03 17:44:01,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=185181.33333333334, ans=0.1 +2024-08-03 17:44:03,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=185181.33333333334, ans=0.1 +2024-08-03 17:44:08,669 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.659e+01 1.212e+02 1.528e+02 2.079e+02 4.163e+02, threshold=3.057e+02, percent-clipped=18.0 +2024-08-03 17:44:18,873 INFO [train.py:1114] (0/4) Epoch 14, batch 3200, loss[loss=0.1919, simple_loss=0.2746, pruned_loss=0.05462, over 13556.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.28, pruned_loss=0.05592, over 2635241.55 frames. ], batch size: 37, lr: 8.79e-03, grad_scale: 16.0 +2024-08-03 17:44:36,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=185328.0, ans=0.025 +2024-08-03 17:44:37,968 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.46 vs. limit=15.0 +2024-08-03 17:44:50,697 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.48 vs. limit=10.0 +2024-08-03 17:45:00,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=185401.33333333334, ans=0.2 +2024-08-03 17:45:01,560 INFO [train.py:1114] (0/4) Epoch 14, batch 3250, loss[loss=0.1938, simple_loss=0.284, pruned_loss=0.05178, over 13398.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2807, pruned_loss=0.05599, over 2640131.84 frames. ], batch size: 38, lr: 8.79e-03, grad_scale: 16.0 +2024-08-03 17:45:01,908 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.24 vs. limit=15.0 +2024-08-03 17:45:06,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=185438.0, ans=0.125 +2024-08-03 17:45:11,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=185474.66666666666, ans=0.125 +2024-08-03 17:45:15,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=185474.66666666666, ans=0.0 +2024-08-03 17:45:15,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=185474.66666666666, ans=0.2 +2024-08-03 17:45:27,783 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.27 vs. limit=15.0 +2024-08-03 17:45:30,411 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.20 vs. limit=22.5 +2024-08-03 17:45:35,110 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.801e+01 1.165e+02 1.318e+02 1.646e+02 3.018e+02, threshold=2.636e+02, percent-clipped=0.0 +2024-08-03 17:45:35,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185548.0, ans=0.1 +2024-08-03 17:45:36,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=185584.66666666666, ans=0.5 +2024-08-03 17:45:37,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=185584.66666666666, ans=0.0 +2024-08-03 17:45:44,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185621.33333333334, ans=0.1 +2024-08-03 17:45:45,456 INFO [train.py:1114] (0/4) Epoch 14, batch 3300, loss[loss=0.2165, simple_loss=0.298, pruned_loss=0.06753, over 12793.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2793, pruned_loss=0.05546, over 2641031.88 frames. ], batch size: 52, lr: 8.78e-03, grad_scale: 16.0 +2024-08-03 17:46:04,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185694.66666666666, ans=0.1 +2024-08-03 17:46:19,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=185768.0, ans=0.09899494936611666 +2024-08-03 17:46:23,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=185768.0, ans=0.0 +2024-08-03 17:46:25,696 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.68 vs. limit=15.0 +2024-08-03 17:46:26,415 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.93 vs. limit=15.0 +2024-08-03 17:46:26,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=185768.0, ans=15.0 +2024-08-03 17:46:28,631 INFO [train.py:1114] (0/4) Epoch 14, batch 3350, loss[loss=0.2317, simple_loss=0.3177, pruned_loss=0.07285, over 13052.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2804, pruned_loss=0.05612, over 2630682.01 frames. ], batch size: 48, lr: 8.78e-03, grad_scale: 8.0 +2024-08-03 17:46:32,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=185804.66666666666, ans=0.0 +2024-08-03 17:46:33,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185804.66666666666, ans=0.1 +2024-08-03 17:46:34,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=185804.66666666666, ans=0.125 +2024-08-03 17:46:41,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=185841.33333333334, ans=0.07 +2024-08-03 17:46:54,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=185914.66666666666, ans=0.025 +2024-08-03 17:47:01,821 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.039e+01 1.138e+02 1.305e+02 1.515e+02 2.289e+02, threshold=2.609e+02, percent-clipped=0.0 +2024-08-03 17:47:04,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=185951.33333333334, ans=0.1 +2024-08-03 17:47:05,824 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.03 vs. limit=6.0 +2024-08-03 17:47:11,859 INFO [train.py:1114] (0/4) Epoch 14, batch 3400, loss[loss=0.1923, simple_loss=0.2731, pruned_loss=0.05577, over 13564.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2802, pruned_loss=0.05621, over 2626156.48 frames. ], batch size: 31, lr: 8.78e-03, grad_scale: 8.0 +2024-08-03 17:47:16,860 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:47:25,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=186024.66666666666, ans=0.0 +2024-08-03 17:47:35,176 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.33 vs. limit=15.0 +2024-08-03 17:47:39,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=186098.0, ans=0.125 +2024-08-03 17:47:44,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=186098.0, ans=0.2 +2024-08-03 17:47:55,938 INFO [train.py:1114] (0/4) Epoch 14, batch 3450, loss[loss=0.2023, simple_loss=0.2898, pruned_loss=0.05737, over 12896.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2796, pruned_loss=0.05561, over 2629948.93 frames. ], batch size: 52, lr: 8.77e-03, grad_scale: 8.0 +2024-08-03 17:48:29,137 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.095e+01 1.141e+02 1.344e+02 1.531e+02 2.504e+02, threshold=2.687e+02, percent-clipped=0.0 +2024-08-03 17:48:36,335 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.11 vs. limit=15.0 +2024-08-03 17:48:37,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=186354.66666666666, ans=0.125 +2024-08-03 17:48:38,411 INFO [train.py:1114] (0/4) Epoch 14, batch 3500, loss[loss=0.183, simple_loss=0.2697, pruned_loss=0.04815, over 13533.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.279, pruned_loss=0.0557, over 2631306.79 frames. ], batch size: 34, lr: 8.77e-03, grad_scale: 8.0 +2024-08-03 17:48:42,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=186354.66666666666, ans=0.025 +2024-08-03 17:48:46,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=186391.33333333334, ans=0.1 +2024-08-03 17:49:04,454 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.66 vs. limit=22.5 +2024-08-03 17:49:07,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=186464.66666666666, ans=0.2 +2024-08-03 17:49:21,440 INFO [train.py:1114] (0/4) Epoch 14, batch 3550, loss[loss=0.1795, simple_loss=0.2723, pruned_loss=0.04341, over 12682.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2813, pruned_loss=0.05662, over 2629956.52 frames. ], batch size: 59, lr: 8.76e-03, grad_scale: 8.0 +2024-08-03 17:49:29,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=186574.66666666666, ans=0.125 +2024-08-03 17:49:37,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=186574.66666666666, ans=0.1 +2024-08-03 17:49:38,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186611.33333333334, ans=0.1 +2024-08-03 17:49:38,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=186611.33333333334, ans=0.05 +2024-08-03 17:49:39,204 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.93 vs. limit=15.0 +2024-08-03 17:49:55,143 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.83 vs. limit=6.0 +2024-08-03 17:49:56,377 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.367e+01 1.245e+02 1.346e+02 1.510e+02 2.403e+02, threshold=2.693e+02, percent-clipped=0.0 +2024-08-03 17:49:59,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=186684.66666666666, ans=0.1 +2024-08-03 17:50:06,105 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.91 vs. limit=10.0 +2024-08-03 17:50:06,470 INFO [train.py:1114] (0/4) Epoch 14, batch 3600, loss[loss=0.2707, simple_loss=0.3384, pruned_loss=0.1015, over 8879.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2864, pruned_loss=0.0609, over 2487650.46 frames. ], batch size: 96, lr: 8.76e-03, grad_scale: 16.0 +2024-08-03 17:50:07,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=186721.33333333334, ans=0.125 +2024-08-03 17:50:12,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.93 vs. limit=22.5 +2024-08-03 17:50:13,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=186721.33333333334, ans=0.0 +2024-08-03 17:50:14,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=186758.0, ans=0.0 +2024-08-03 17:50:18,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=186758.0, ans=0.125 +2024-08-03 17:50:37,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=186831.33333333334, ans=0.125 +2024-08-03 17:50:41,816 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-14.pt +2024-08-03 17:54:05,176 INFO [train.py:1114] (0/4) Epoch 15, batch 0, loss[loss=0.1895, simple_loss=0.2732, pruned_loss=0.05293, over 13337.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2732, pruned_loss=0.05293, over 13337.00 frames. ], batch size: 33, lr: 8.46e-03, grad_scale: 32.0 +2024-08-03 17:54:05,177 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 17:54:17,154 INFO [train.py:1146] (0/4) Epoch 15, validation: loss=0.1774, simple_loss=0.2778, pruned_loss=0.03851, over 944034.00 frames. +2024-08-03 17:54:17,155 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 17:54:19,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=186871.66666666666, ans=0.125 +2024-08-03 17:54:25,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=186908.33333333334, ans=0.125 +2024-08-03 17:54:27,802 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.86 vs. limit=15.0 +2024-08-03 17:54:30,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=186908.33333333334, ans=0.125 +2024-08-03 17:54:32,322 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.93 vs. limit=22.5 +2024-08-03 17:54:34,052 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.86 vs. limit=22.5 +2024-08-03 17:54:53,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=186945.0, ans=0.125 +2024-08-03 17:54:53,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=186945.0, ans=0.0 +2024-08-03 17:55:05,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=187018.33333333334, ans=0.0 +2024-08-03 17:55:12,112 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.609e+01 1.155e+02 1.252e+02 1.382e+02 2.620e+02, threshold=2.503e+02, percent-clipped=0.0 +2024-08-03 17:55:13,944 INFO [train.py:1114] (0/4) Epoch 15, batch 50, loss[loss=0.1456, simple_loss=0.2221, pruned_loss=0.03459, over 13413.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.281, pruned_loss=0.05619, over 579137.41 frames. ], batch size: 32, lr: 8.45e-03, grad_scale: 32.0 +2024-08-03 17:55:23,541 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.66 vs. limit=22.5 +2024-08-03 17:55:51,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=187165.0, ans=0.125 +2024-08-03 17:55:58,001 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.09 vs. limit=5.0 +2024-08-03 17:56:05,331 INFO [train.py:1114] (0/4) Epoch 15, batch 100, loss[loss=0.1673, simple_loss=0.259, pruned_loss=0.03777, over 13536.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2814, pruned_loss=0.05573, over 1026204.01 frames. ], batch size: 35, lr: 8.45e-03, grad_scale: 32.0 +2024-08-03 17:56:18,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=187275.0, ans=0.125 +2024-08-03 17:56:48,422 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.485e+01 1.136e+02 1.309e+02 1.649e+02 2.921e+02, threshold=2.617e+02, percent-clipped=2.0 +2024-08-03 17:56:50,225 INFO [train.py:1114] (0/4) Epoch 15, batch 150, loss[loss=0.1694, simple_loss=0.2502, pruned_loss=0.04433, over 13426.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2802, pruned_loss=0.05497, over 1387564.77 frames. ], batch size: 32, lr: 8.44e-03, grad_scale: 32.0 +2024-08-03 17:56:55,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=187421.66666666666, ans=0.125 +2024-08-03 17:56:55,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=187421.66666666666, ans=0.0 +2024-08-03 17:57:02,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=187458.33333333334, ans=0.0 +2024-08-03 17:57:14,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=187495.0, ans=0.125 +2024-08-03 17:57:30,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=187531.66666666666, ans=0.125 +2024-08-03 17:57:43,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=187605.0, ans=0.2 +2024-08-03 17:57:43,988 INFO [train.py:1114] (0/4) Epoch 15, batch 200, loss[loss=0.1961, simple_loss=0.2795, pruned_loss=0.0564, over 12429.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2784, pruned_loss=0.05439, over 1665913.18 frames. ], batch size: 58, lr: 8.44e-03, grad_scale: 32.0 +2024-08-03 17:58:16,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=187715.0, ans=0.1 +2024-08-03 17:58:19,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=187715.0, ans=0.1 +2024-08-03 17:58:29,622 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.054e+01 1.128e+02 1.299e+02 1.749e+02 3.562e+02, threshold=2.599e+02, percent-clipped=4.0 +2024-08-03 17:58:31,491 INFO [train.py:1114] (0/4) Epoch 15, batch 250, loss[loss=0.2113, simple_loss=0.3021, pruned_loss=0.06028, over 13210.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2789, pruned_loss=0.05461, over 1884669.13 frames. ], batch size: 46, lr: 8.44e-03, grad_scale: 32.0 +2024-08-03 17:58:54,139 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.02 vs. limit=15.0 +2024-08-03 17:59:03,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=187898.33333333334, ans=10.0 +2024-08-03 17:59:11,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=187935.0, ans=0.0 +2024-08-03 17:59:19,798 INFO [train.py:1114] (0/4) Epoch 15, batch 300, loss[loss=0.2215, simple_loss=0.3036, pruned_loss=0.06973, over 13425.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2778, pruned_loss=0.0539, over 2051352.89 frames. ], batch size: 42, lr: 8.43e-03, grad_scale: 32.0 +2024-08-03 17:59:22,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=187971.66666666666, ans=0.2 +2024-08-03 17:59:29,250 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.28 vs. limit=15.0 +2024-08-03 17:59:30,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=188008.33333333334, ans=0.025 +2024-08-03 18:00:05,573 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.050e+01 1.089e+02 1.187e+02 1.395e+02 2.688e+02, threshold=2.374e+02, percent-clipped=1.0 +2024-08-03 18:00:07,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.63 vs. limit=15.0 +2024-08-03 18:00:07,411 INFO [train.py:1114] (0/4) Epoch 15, batch 350, loss[loss=0.1753, simple_loss=0.2556, pruned_loss=0.04757, over 13601.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2786, pruned_loss=0.05409, over 2181952.73 frames. ], batch size: 33, lr: 8.43e-03, grad_scale: 32.0 +2024-08-03 18:00:30,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=188228.33333333334, ans=0.0 +2024-08-03 18:00:36,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=188265.0, ans=0.025 +2024-08-03 18:00:38,445 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.13 vs. limit=22.5 +2024-08-03 18:00:41,202 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.10 vs. limit=22.5 +2024-08-03 18:00:41,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=188265.0, ans=0.125 +2024-08-03 18:00:54,365 INFO [train.py:1114] (0/4) Epoch 15, batch 400, loss[loss=0.2054, simple_loss=0.2943, pruned_loss=0.05822, over 13360.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.278, pruned_loss=0.05389, over 2286092.72 frames. ], batch size: 37, lr: 8.42e-03, grad_scale: 32.0 +2024-08-03 18:01:12,610 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.31 vs. limit=15.0 +2024-08-03 18:01:37,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.33 vs. limit=15.0 +2024-08-03 18:01:38,640 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.296e+01 1.094e+02 1.225e+02 1.629e+02 4.007e+02, threshold=2.451e+02, percent-clipped=6.0 +2024-08-03 18:01:39,621 INFO [train.py:1114] (0/4) Epoch 15, batch 450, loss[loss=0.1857, simple_loss=0.271, pruned_loss=0.0502, over 13548.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2778, pruned_loss=0.05357, over 2359492.36 frames. ], batch size: 38, lr: 8.42e-03, grad_scale: 32.0 +2024-08-03 18:01:56,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=188558.33333333334, ans=0.1 +2024-08-03 18:02:00,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188595.0, ans=0.1 +2024-08-03 18:02:26,759 INFO [train.py:1114] (0/4) Epoch 15, batch 500, loss[loss=0.2174, simple_loss=0.3, pruned_loss=0.06739, over 13402.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2767, pruned_loss=0.0532, over 2424579.03 frames. ], batch size: 43, lr: 8.42e-03, grad_scale: 32.0 +2024-08-03 18:02:48,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=188778.33333333334, ans=0.0 +2024-08-03 18:03:08,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=188851.66666666666, ans=0.125 +2024-08-03 18:03:12,335 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.88 vs. limit=15.0 +2024-08-03 18:03:12,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=188851.66666666666, ans=0.07 +2024-08-03 18:03:15,314 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.367e+01 1.124e+02 1.290e+02 1.584e+02 2.757e+02, threshold=2.579e+02, percent-clipped=2.0 +2024-08-03 18:03:15,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=188888.33333333334, ans=0.0 +2024-08-03 18:03:16,217 INFO [train.py:1114] (0/4) Epoch 15, batch 550, loss[loss=0.1964, simple_loss=0.2836, pruned_loss=0.05462, over 13031.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2774, pruned_loss=0.05369, over 2467014.86 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 32.0 +2024-08-03 18:03:24,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=188925.0, ans=0.0 +2024-08-03 18:03:24,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188925.0, ans=0.1 +2024-08-03 18:03:47,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=188998.33333333334, ans=0.2 +2024-08-03 18:03:48,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=188998.33333333334, ans=0.2 +2024-08-03 18:04:01,290 INFO [train.py:1114] (0/4) Epoch 15, batch 600, loss[loss=0.216, simple_loss=0.3045, pruned_loss=0.06379, over 13326.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2779, pruned_loss=0.05405, over 2506834.93 frames. ], batch size: 46, lr: 8.41e-03, grad_scale: 32.0 +2024-08-03 18:04:05,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=189071.66666666666, ans=0.125 +2024-08-03 18:04:07,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=189071.66666666666, ans=0.125 +2024-08-03 18:04:11,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=189108.33333333334, ans=0.125 +2024-08-03 18:04:24,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=189145.0, ans=0.0 +2024-08-03 18:04:25,606 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.03 vs. limit=22.5 +2024-08-03 18:04:29,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=189181.66666666666, ans=0.125 +2024-08-03 18:04:30,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=189181.66666666666, ans=0.5 +2024-08-03 18:04:32,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=189181.66666666666, ans=0.025 +2024-08-03 18:04:47,693 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.457e+01 1.170e+02 1.382e+02 2.012e+02 3.539e+02, threshold=2.764e+02, percent-clipped=13.0 +2024-08-03 18:04:48,678 INFO [train.py:1114] (0/4) Epoch 15, batch 650, loss[loss=0.1969, simple_loss=0.2844, pruned_loss=0.05467, over 13556.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2778, pruned_loss=0.05399, over 2542208.52 frames. ], batch size: 37, lr: 8.40e-03, grad_scale: 32.0 +2024-08-03 18:04:56,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=189255.0, ans=0.125 +2024-08-03 18:05:06,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=189328.33333333334, ans=0.125 +2024-08-03 18:05:09,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=189328.33333333334, ans=0.035 +2024-08-03 18:05:10,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=189328.33333333334, ans=0.125 +2024-08-03 18:05:11,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=189328.33333333334, ans=0.125 +2024-08-03 18:05:19,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=189365.0, ans=0.5 +2024-08-03 18:05:28,272 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.45 vs. limit=6.0 +2024-08-03 18:05:29,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=189401.66666666666, ans=0.125 +2024-08-03 18:05:29,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=189401.66666666666, ans=0.125 +2024-08-03 18:05:35,759 INFO [train.py:1114] (0/4) Epoch 15, batch 700, loss[loss=0.2013, simple_loss=0.2848, pruned_loss=0.05888, over 13543.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2778, pruned_loss=0.05408, over 2563642.95 frames. ], batch size: 35, lr: 8.40e-03, grad_scale: 32.0 +2024-08-03 18:05:50,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=189475.0, ans=0.125 +2024-08-03 18:05:54,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=189511.66666666666, ans=0.2 +2024-08-03 18:05:57,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=189511.66666666666, ans=0.125 +2024-08-03 18:06:13,425 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.08 vs. limit=6.0 +2024-08-03 18:06:22,762 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.490e+01 1.100e+02 1.247e+02 1.589e+02 2.626e+02, threshold=2.494e+02, percent-clipped=0.0 +2024-08-03 18:06:22,799 INFO [train.py:1114] (0/4) Epoch 15, batch 750, loss[loss=0.1978, simple_loss=0.2963, pruned_loss=0.0497, over 13348.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2772, pruned_loss=0.0537, over 2581648.14 frames. ], batch size: 37, lr: 8.40e-03, grad_scale: 16.0 +2024-08-03 18:06:28,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=189621.66666666666, ans=0.0 +2024-08-03 18:06:28,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=189621.66666666666, ans=0.125 +2024-08-03 18:06:41,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=189658.33333333334, ans=0.025 +2024-08-03 18:06:42,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=189658.33333333334, ans=0.2 +2024-08-03 18:06:44,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=189695.0, ans=0.1 +2024-08-03 18:06:51,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=189695.0, ans=0.125 +2024-08-03 18:06:51,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=189695.0, ans=0.1 +2024-08-03 18:06:58,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=189731.66666666666, ans=0.2 +2024-08-03 18:06:59,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=189731.66666666666, ans=0.0 +2024-08-03 18:07:01,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=189731.66666666666, ans=0.2 +2024-08-03 18:07:03,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=189768.33333333334, ans=0.2 +2024-08-03 18:07:12,613 INFO [train.py:1114] (0/4) Epoch 15, batch 800, loss[loss=0.1881, simple_loss=0.2643, pruned_loss=0.05598, over 13327.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2769, pruned_loss=0.05339, over 2596382.74 frames. ], batch size: 33, lr: 8.39e-03, grad_scale: 32.0 +2024-08-03 18:07:16,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=189805.0, ans=0.0 +2024-08-03 18:07:25,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=189841.66666666666, ans=0.125 +2024-08-03 18:07:27,571 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.83 vs. limit=12.0 +2024-08-03 18:07:38,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=189878.33333333334, ans=0.125 +2024-08-03 18:07:59,540 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.22 vs. limit=15.0 +2024-08-03 18:07:59,903 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.253e+01 1.110e+02 1.253e+02 1.558e+02 2.817e+02, threshold=2.505e+02, percent-clipped=1.0 +2024-08-03 18:07:59,956 INFO [train.py:1114] (0/4) Epoch 15, batch 850, loss[loss=0.1902, simple_loss=0.2821, pruned_loss=0.04912, over 13323.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2767, pruned_loss=0.05357, over 2609582.59 frames. ], batch size: 40, lr: 8.39e-03, grad_scale: 32.0 +2024-08-03 18:08:05,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189988.33333333334, ans=0.1 +2024-08-03 18:08:07,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=189988.33333333334, ans=0.025 +2024-08-03 18:08:18,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=190025.0, ans=0.0 +2024-08-03 18:08:45,445 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=9.35 vs. limit=15.0 +2024-08-03 18:08:47,484 INFO [train.py:1114] (0/4) Epoch 15, batch 900, loss[loss=0.186, simple_loss=0.2723, pruned_loss=0.04984, over 13347.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2776, pruned_loss=0.05435, over 2612472.25 frames. ], batch size: 33, lr: 8.38e-03, grad_scale: 32.0 +2024-08-03 18:08:55,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=190208.33333333334, ans=0.0 +2024-08-03 18:09:11,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=190245.0, ans=0.125 +2024-08-03 18:09:32,159 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.97 vs. limit=15.0 +2024-08-03 18:09:35,335 INFO [train.py:1114] (0/4) Epoch 15, batch 950, loss[loss=0.1782, simple_loss=0.2663, pruned_loss=0.04504, over 13524.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2774, pruned_loss=0.05417, over 2613706.18 frames. ], batch size: 34, lr: 8.38e-03, grad_scale: 16.0 +2024-08-03 18:09:36,163 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.983e+01 1.147e+02 1.407e+02 1.582e+02 2.602e+02, threshold=2.813e+02, percent-clipped=2.0 +2024-08-03 18:09:39,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=190355.0, ans=0.125 +2024-08-03 18:09:47,690 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.62 vs. limit=22.5 +2024-08-03 18:10:10,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=190465.0, ans=0.2 +2024-08-03 18:10:13,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=190465.0, ans=0.125 +2024-08-03 18:10:14,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=190465.0, ans=0.125 +2024-08-03 18:10:14,839 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:10:22,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=190501.66666666666, ans=0.0 +2024-08-03 18:10:24,642 INFO [train.py:1114] (0/4) Epoch 15, batch 1000, loss[loss=0.1853, simple_loss=0.2733, pruned_loss=0.04872, over 13358.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2787, pruned_loss=0.05492, over 2612400.45 frames. ], batch size: 35, lr: 8.38e-03, grad_scale: 16.0 +2024-08-03 18:10:24,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=190538.33333333334, ans=0.2 +2024-08-03 18:10:25,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=190538.33333333334, ans=0.125 +2024-08-03 18:10:46,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=190611.66666666666, ans=0.025 +2024-08-03 18:10:51,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=190648.33333333334, ans=0.125 +2024-08-03 18:10:53,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=190648.33333333334, ans=0.125 +2024-08-03 18:10:56,095 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-52000.pt +2024-08-03 18:11:00,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=190648.33333333334, ans=0.125 +2024-08-03 18:11:11,739 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.00 vs. limit=15.0 +2024-08-03 18:11:12,186 INFO [train.py:1114] (0/4) Epoch 15, batch 1050, loss[loss=0.1986, simple_loss=0.29, pruned_loss=0.05355, over 13582.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2781, pruned_loss=0.05455, over 2615289.34 frames. ], batch size: 39, lr: 8.37e-03, grad_scale: 16.0 +2024-08-03 18:11:12,982 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.909e+01 1.089e+02 1.310e+02 1.512e+02 2.407e+02, threshold=2.620e+02, percent-clipped=0.0 +2024-08-03 18:11:29,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=190795.0, ans=0.0 +2024-08-03 18:11:51,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190868.33333333334, ans=0.1 +2024-08-03 18:11:57,843 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.71 vs. limit=15.0 +2024-08-03 18:11:59,029 INFO [train.py:1114] (0/4) Epoch 15, batch 1100, loss[loss=0.2049, simple_loss=0.2853, pruned_loss=0.06224, over 13537.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2777, pruned_loss=0.0543, over 2619494.30 frames. ], batch size: 36, lr: 8.37e-03, grad_scale: 16.0 +2024-08-03 18:12:03,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=190905.0, ans=0.125 +2024-08-03 18:12:05,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=190905.0, ans=0.125 +2024-08-03 18:12:05,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=190905.0, ans=0.125 +2024-08-03 18:12:08,646 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.22 vs. limit=22.5 +2024-08-03 18:12:15,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=190941.66666666666, ans=0.0 +2024-08-03 18:12:41,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=191051.66666666666, ans=0.0 +2024-08-03 18:12:41,883 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.45 vs. limit=22.5 +2024-08-03 18:12:42,021 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.00 vs. limit=22.5 +2024-08-03 18:12:45,855 INFO [train.py:1114] (0/4) Epoch 15, batch 1150, loss[loss=0.1852, simple_loss=0.2612, pruned_loss=0.05462, over 13548.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2782, pruned_loss=0.05485, over 2618474.42 frames. ], batch size: 36, lr: 8.36e-03, grad_scale: 8.0 +2024-08-03 18:12:47,601 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.299e+01 1.143e+02 1.336e+02 1.684e+02 2.618e+02, threshold=2.671e+02, percent-clipped=0.0 +2024-08-03 18:12:48,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=191088.33333333334, ans=0.125 +2024-08-03 18:12:54,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=191088.33333333334, ans=0.125 +2024-08-03 18:12:56,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=191125.0, ans=0.07 +2024-08-03 18:12:56,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=191125.0, ans=0.5 +2024-08-03 18:12:57,025 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.39 vs. limit=22.5 +2024-08-03 18:12:57,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=191125.0, ans=0.0 +2024-08-03 18:13:17,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=191198.33333333334, ans=0.0 +2024-08-03 18:13:28,093 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.23 vs. limit=22.5 +2024-08-03 18:13:35,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=191235.0, ans=0.125 +2024-08-03 18:13:40,342 INFO [train.py:1114] (0/4) Epoch 15, batch 1200, loss[loss=0.1903, simple_loss=0.2856, pruned_loss=0.04748, over 13576.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2795, pruned_loss=0.05511, over 2616288.71 frames. ], batch size: 39, lr: 8.36e-03, grad_scale: 16.0 +2024-08-03 18:13:40,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=191271.66666666666, ans=0.125 +2024-08-03 18:13:43,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191271.66666666666, ans=0.1 +2024-08-03 18:13:58,528 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:14:03,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=191345.0, ans=0.125 +2024-08-03 18:14:15,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191381.66666666666, ans=0.1 +2024-08-03 18:14:17,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=191381.66666666666, ans=0.125 +2024-08-03 18:14:17,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=191381.66666666666, ans=0.04949747468305833 +2024-08-03 18:14:28,855 INFO [train.py:1114] (0/4) Epoch 15, batch 1250, loss[loss=0.2009, simple_loss=0.2906, pruned_loss=0.0556, over 13421.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2797, pruned_loss=0.05504, over 2628020.27 frames. ], batch size: 42, lr: 8.36e-03, grad_scale: 16.0 +2024-08-03 18:14:30,566 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.500e+01 1.104e+02 1.280e+02 1.551e+02 2.607e+02, threshold=2.559e+02, percent-clipped=0.0 +2024-08-03 18:14:48,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=191528.33333333334, ans=0.125 +2024-08-03 18:15:01,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=191565.0, ans=0.2 +2024-08-03 18:15:09,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=191601.66666666666, ans=0.2 +2024-08-03 18:15:14,022 INFO [train.py:1114] (0/4) Epoch 15, batch 1300, loss[loss=0.2212, simple_loss=0.3118, pruned_loss=0.06529, over 12749.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2786, pruned_loss=0.05472, over 2630521.97 frames. ], batch size: 52, lr: 8.35e-03, grad_scale: 16.0 +2024-08-03 18:15:14,441 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.20 vs. limit=15.0 +2024-08-03 18:15:31,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=191675.0, ans=0.125 +2024-08-03 18:15:44,283 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.40 vs. limit=6.0 +2024-08-03 18:15:45,091 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.99 vs. limit=15.0 +2024-08-03 18:16:01,022 INFO [train.py:1114] (0/4) Epoch 15, batch 1350, loss[loss=0.209, simple_loss=0.2872, pruned_loss=0.06542, over 13548.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2785, pruned_loss=0.05463, over 2637734.06 frames. ], batch size: 37, lr: 8.35e-03, grad_scale: 16.0 +2024-08-03 18:16:02,866 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.620e+01 1.149e+02 1.398e+02 1.802e+02 2.548e+02, threshold=2.797e+02, percent-clipped=0.0 +2024-08-03 18:16:09,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=191858.33333333334, ans=0.025 +2024-08-03 18:16:13,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=191858.33333333334, ans=0.125 +2024-08-03 18:16:14,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=191858.33333333334, ans=0.125 +2024-08-03 18:16:18,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=191895.0, ans=0.2 +2024-08-03 18:16:25,818 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.04 vs. limit=15.0 +2024-08-03 18:16:34,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=191931.66666666666, ans=0.125 +2024-08-03 18:16:47,547 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.47 vs. limit=15.0 +2024-08-03 18:16:47,824 INFO [train.py:1114] (0/4) Epoch 15, batch 1400, loss[loss=0.1641, simple_loss=0.2402, pruned_loss=0.04396, over 13254.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2783, pruned_loss=0.05461, over 2641687.38 frames. ], batch size: 31, lr: 8.34e-03, grad_scale: 16.0 +2024-08-03 18:16:48,919 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:17:20,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=192115.0, ans=0.0 +2024-08-03 18:17:26,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.59 vs. limit=22.5 +2024-08-03 18:17:27,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=192151.66666666666, ans=0.1 +2024-08-03 18:17:31,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff2.min_abs, batch_count=192151.66666666666, ans=0.1 +2024-08-03 18:17:39,172 INFO [train.py:1114] (0/4) Epoch 15, batch 1450, loss[loss=0.1984, simple_loss=0.285, pruned_loss=0.05591, over 13420.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2787, pruned_loss=0.05464, over 2640576.75 frames. ], batch size: 43, lr: 8.34e-03, grad_scale: 16.0 +2024-08-03 18:17:40,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=192188.33333333334, ans=0.025 +2024-08-03 18:17:40,949 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.258e+01 1.126e+02 1.352e+02 1.648e+02 3.700e+02, threshold=2.704e+02, percent-clipped=1.0 +2024-08-03 18:18:01,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=192261.66666666666, ans=0.07 +2024-08-03 18:18:07,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=192298.33333333334, ans=0.125 +2024-08-03 18:18:24,987 INFO [train.py:1114] (0/4) Epoch 15, batch 1500, loss[loss=0.2056, simple_loss=0.2936, pruned_loss=0.05882, over 13411.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2791, pruned_loss=0.05447, over 2641053.54 frames. ], batch size: 39, lr: 8.34e-03, grad_scale: 16.0 +2024-08-03 18:18:41,819 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.11 vs. limit=6.0 +2024-08-03 18:18:43,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=192445.0, ans=0.125 +2024-08-03 18:18:43,479 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:18:50,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192445.0, ans=0.1 +2024-08-03 18:18:53,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=192481.66666666666, ans=10.0 +2024-08-03 18:19:11,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=192555.0, ans=0.0 +2024-08-03 18:19:12,238 INFO [train.py:1114] (0/4) Epoch 15, batch 1550, loss[loss=0.1923, simple_loss=0.274, pruned_loss=0.05531, over 13399.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2785, pruned_loss=0.05443, over 2630744.73 frames. ], batch size: 41, lr: 8.33e-03, grad_scale: 16.0 +2024-08-03 18:19:14,138 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.182e+01 1.117e+02 1.315e+02 1.608e+02 2.647e+02, threshold=2.631e+02, percent-clipped=0.0 +2024-08-03 18:19:22,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=192591.66666666666, ans=0.0 +2024-08-03 18:19:26,422 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.50 vs. limit=22.5 +2024-08-03 18:19:59,536 INFO [train.py:1114] (0/4) Epoch 15, batch 1600, loss[loss=0.1755, simple_loss=0.269, pruned_loss=0.04102, over 13574.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2778, pruned_loss=0.05424, over 2623714.66 frames. ], batch size: 39, lr: 8.33e-03, grad_scale: 32.0 +2024-08-03 18:20:12,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=192775.0, ans=0.025 +2024-08-03 18:20:20,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=192811.66666666666, ans=10.0 +2024-08-03 18:20:32,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192848.33333333334, ans=0.1 +2024-08-03 18:20:32,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192848.33333333334, ans=0.1 +2024-08-03 18:20:44,740 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:20:47,090 INFO [train.py:1114] (0/4) Epoch 15, batch 1650, loss[loss=0.193, simple_loss=0.2856, pruned_loss=0.05023, over 13341.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2778, pruned_loss=0.05426, over 2620941.70 frames. ], batch size: 40, lr: 8.33e-03, grad_scale: 32.0 +2024-08-03 18:20:48,900 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.213e+01 1.145e+02 1.280e+02 1.838e+02 3.870e+02, threshold=2.560e+02, percent-clipped=5.0 +2024-08-03 18:20:59,036 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:21:01,743 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:21:12,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=192995.0, ans=0.125 +2024-08-03 18:21:20,269 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-08-03 18:21:22,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=193031.66666666666, ans=0.0 +2024-08-03 18:21:26,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=193068.33333333334, ans=0.0 +2024-08-03 18:21:29,231 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.32 vs. limit=22.5 +2024-08-03 18:21:32,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=193068.33333333334, ans=0.2 +2024-08-03 18:21:34,187 INFO [train.py:1114] (0/4) Epoch 15, batch 1700, loss[loss=0.1647, simple_loss=0.2428, pruned_loss=0.0433, over 13280.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2772, pruned_loss=0.05379, over 2629242.59 frames. ], batch size: 31, lr: 8.32e-03, grad_scale: 32.0 +2024-08-03 18:21:53,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=193178.33333333334, ans=0.95 +2024-08-03 18:21:53,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=193178.33333333334, ans=0.1 +2024-08-03 18:22:13,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=193251.66666666666, ans=0.125 +2024-08-03 18:22:19,630 INFO [train.py:1114] (0/4) Epoch 15, batch 1750, loss[loss=0.1835, simple_loss=0.2628, pruned_loss=0.05206, over 13571.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2768, pruned_loss=0.05364, over 2632542.98 frames. ], batch size: 31, lr: 8.32e-03, grad_scale: 32.0 +2024-08-03 18:22:21,333 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.690e+01 1.123e+02 1.340e+02 1.586e+02 3.403e+02, threshold=2.681e+02, percent-clipped=7.0 +2024-08-03 18:22:21,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=193288.33333333334, ans=0.0 +2024-08-03 18:22:23,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=193288.33333333334, ans=0.0 +2024-08-03 18:22:37,396 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.81 vs. limit=6.0 +2024-08-03 18:22:51,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=193398.33333333334, ans=0.125 +2024-08-03 18:23:03,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=193435.0, ans=0.2 +2024-08-03 18:23:06,732 INFO [train.py:1114] (0/4) Epoch 15, batch 1800, loss[loss=0.1939, simple_loss=0.2797, pruned_loss=0.05405, over 13557.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2768, pruned_loss=0.05349, over 2633458.12 frames. ], batch size: 38, lr: 8.31e-03, grad_scale: 32.0 +2024-08-03 18:23:07,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=193471.66666666666, ans=0.0 +2024-08-03 18:23:15,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=193508.33333333334, ans=0.125 +2024-08-03 18:23:22,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=193508.33333333334, ans=0.2 +2024-08-03 18:23:35,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=193581.66666666666, ans=0.1 +2024-08-03 18:23:50,179 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.01 vs. limit=15.0 +2024-08-03 18:23:53,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=193618.33333333334, ans=0.0 +2024-08-03 18:23:54,301 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.51 vs. limit=15.0 +2024-08-03 18:23:55,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=193655.0, ans=0.1 +2024-08-03 18:23:55,707 INFO [train.py:1114] (0/4) Epoch 15, batch 1850, loss[loss=0.1943, simple_loss=0.2837, pruned_loss=0.05243, over 13401.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2763, pruned_loss=0.05334, over 2636615.52 frames. ], batch size: 39, lr: 8.31e-03, grad_scale: 32.0 +2024-08-03 18:23:57,402 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.035e+01 1.191e+02 1.556e+02 2.123e+02 2.973e+02, threshold=3.112e+02, percent-clipped=3.0 +2024-08-03 18:24:00,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=193655.0, ans=0.2 +2024-08-03 18:24:29,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=193765.0, ans=0.2 +2024-08-03 18:24:38,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=193801.66666666666, ans=0.2 +2024-08-03 18:24:44,766 INFO [train.py:1114] (0/4) Epoch 15, batch 1900, loss[loss=0.233, simple_loss=0.322, pruned_loss=0.07194, over 13319.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2766, pruned_loss=0.05358, over 2639622.80 frames. ], batch size: 40, lr: 8.31e-03, grad_scale: 32.0 +2024-08-03 18:24:46,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=193838.33333333334, ans=0.1 +2024-08-03 18:24:48,701 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.65 vs. limit=15.0 +2024-08-03 18:24:56,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=193875.0, ans=0.1 +2024-08-03 18:25:15,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=193948.33333333334, ans=0.0 +2024-08-03 18:25:19,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=193948.33333333334, ans=0.2 +2024-08-03 18:25:21,122 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.17 vs. limit=22.5 +2024-08-03 18:25:23,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=193985.0, ans=0.125 +2024-08-03 18:25:23,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=193985.0, ans=0.125 +2024-08-03 18:25:28,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=193985.0, ans=0.0 +2024-08-03 18:25:29,792 INFO [train.py:1114] (0/4) Epoch 15, batch 1950, loss[loss=0.2108, simple_loss=0.2849, pruned_loss=0.06831, over 13544.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2781, pruned_loss=0.05402, over 2645940.00 frames. ], batch size: 36, lr: 8.30e-03, grad_scale: 32.0 +2024-08-03 18:25:31,624 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.973e+01 1.188e+02 1.452e+02 1.828e+02 3.234e+02, threshold=2.903e+02, percent-clipped=1.0 +2024-08-03 18:25:35,884 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.59 vs. limit=22.5 +2024-08-03 18:25:39,681 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.74 vs. limit=15.0 +2024-08-03 18:25:55,122 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.32 vs. limit=15.0 +2024-08-03 18:26:04,454 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.50 vs. limit=22.5 +2024-08-03 18:26:06,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=194168.33333333334, ans=0.0 +2024-08-03 18:26:15,854 INFO [train.py:1114] (0/4) Epoch 15, batch 2000, loss[loss=0.1745, simple_loss=0.2444, pruned_loss=0.0523, over 13518.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2788, pruned_loss=0.05425, over 2636932.03 frames. ], batch size: 31, lr: 8.30e-03, grad_scale: 32.0 +2024-08-03 18:26:16,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=194205.0, ans=0.125 +2024-08-03 18:26:22,773 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:26:28,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194241.66666666666, ans=0.1 +2024-08-03 18:26:34,495 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.21 vs. limit=6.0 +2024-08-03 18:26:41,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=194278.33333333334, ans=0.1 +2024-08-03 18:26:53,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=194315.0, ans=0.0 +2024-08-03 18:27:06,172 INFO [train.py:1114] (0/4) Epoch 15, batch 2050, loss[loss=0.157, simple_loss=0.241, pruned_loss=0.03657, over 13424.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2778, pruned_loss=0.05417, over 2633290.51 frames. ], batch size: 32, lr: 8.29e-03, grad_scale: 32.0 +2024-08-03 18:27:07,806 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.454e+01 1.180e+02 1.343e+02 1.712e+02 4.642e+02, threshold=2.687e+02, percent-clipped=2.0 +2024-08-03 18:27:17,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=194425.0, ans=0.125 +2024-08-03 18:27:37,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=194498.33333333334, ans=0.07 +2024-08-03 18:27:44,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=194535.0, ans=0.0 +2024-08-03 18:27:51,118 INFO [train.py:1114] (0/4) Epoch 15, batch 2100, loss[loss=0.1736, simple_loss=0.2635, pruned_loss=0.04187, over 13538.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2778, pruned_loss=0.05412, over 2637994.00 frames. ], batch size: 37, lr: 8.29e-03, grad_scale: 32.0 +2024-08-03 18:27:52,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=194571.66666666666, ans=0.125 +2024-08-03 18:28:10,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=194608.33333333334, ans=0.125 +2024-08-03 18:28:11,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=194608.33333333334, ans=0.04949747468305833 +2024-08-03 18:28:20,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=194645.0, ans=0.0 +2024-08-03 18:28:22,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=194681.66666666666, ans=0.125 +2024-08-03 18:28:24,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=194681.66666666666, ans=0.125 +2024-08-03 18:28:24,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=194681.66666666666, ans=0.1 +2024-08-03 18:28:24,890 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.75 vs. limit=15.0 +2024-08-03 18:28:30,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=194718.33333333334, ans=0.125 +2024-08-03 18:28:40,554 INFO [train.py:1114] (0/4) Epoch 15, batch 2150, loss[loss=0.1807, simple_loss=0.2635, pruned_loss=0.049, over 13560.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.277, pruned_loss=0.05387, over 2646302.54 frames. ], batch size: 36, lr: 8.29e-03, grad_scale: 32.0 +2024-08-03 18:28:42,293 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.720e+01 1.112e+02 1.243e+02 1.782e+02 4.136e+02, threshold=2.485e+02, percent-clipped=5.0 +2024-08-03 18:28:53,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=194791.66666666666, ans=0.0 +2024-08-03 18:29:08,827 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.48 vs. limit=15.0 +2024-08-03 18:29:09,052 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.60 vs. limit=15.0 +2024-08-03 18:29:13,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=194865.0, ans=0.125 +2024-08-03 18:29:13,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=194865.0, ans=0.125 +2024-08-03 18:29:14,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194865.0, ans=0.1 +2024-08-03 18:29:17,822 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.45 vs. limit=15.0 +2024-08-03 18:29:25,499 INFO [train.py:1114] (0/4) Epoch 15, batch 2200, loss[loss=0.2177, simple_loss=0.2974, pruned_loss=0.06906, over 13395.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2768, pruned_loss=0.05363, over 2644826.66 frames. ], batch size: 39, lr: 8.28e-03, grad_scale: 32.0 +2024-08-03 18:29:25,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=194938.33333333334, ans=0.125 +2024-08-03 18:29:31,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=194938.33333333334, ans=0.2 +2024-08-03 18:29:42,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=195011.66666666666, ans=0.125 +2024-08-03 18:30:10,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.whiten.whitening_limit, batch_count=195085.0, ans=15.0 +2024-08-03 18:30:11,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=195085.0, ans=0.125 +2024-08-03 18:30:13,244 INFO [train.py:1114] (0/4) Epoch 15, batch 2250, loss[loss=0.1768, simple_loss=0.2684, pruned_loss=0.04261, over 13357.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2765, pruned_loss=0.05342, over 2642126.16 frames. ], batch size: 37, lr: 8.28e-03, grad_scale: 32.0 +2024-08-03 18:30:15,098 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.620e+01 1.201e+02 1.486e+02 1.910e+02 3.582e+02, threshold=2.971e+02, percent-clipped=11.0 +2024-08-03 18:30:15,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=195121.66666666666, ans=0.125 +2024-08-03 18:30:24,044 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.70 vs. limit=22.5 +2024-08-03 18:30:28,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=195158.33333333334, ans=0.0 +2024-08-03 18:30:30,002 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.94 vs. limit=15.0 +2024-08-03 18:30:37,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=195195.0, ans=0.0 +2024-08-03 18:30:41,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=195231.66666666666, ans=0.95 +2024-08-03 18:30:48,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195231.66666666666, ans=0.1 +2024-08-03 18:30:54,252 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.51 vs. limit=15.0 +2024-08-03 18:31:01,083 INFO [train.py:1114] (0/4) Epoch 15, batch 2300, loss[loss=0.1433, simple_loss=0.2314, pruned_loss=0.02761, over 13599.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2754, pruned_loss=0.05334, over 2637584.78 frames. ], batch size: 33, lr: 8.28e-03, grad_scale: 32.0 +2024-08-03 18:31:05,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=195305.0, ans=0.0 +2024-08-03 18:31:13,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=195305.0, ans=0.125 +2024-08-03 18:31:20,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=195341.66666666666, ans=0.025 +2024-08-03 18:31:25,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=195378.33333333334, ans=0.035 +2024-08-03 18:31:29,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=195378.33333333334, ans=0.0 +2024-08-03 18:31:31,780 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.22 vs. limit=15.0 +2024-08-03 18:31:34,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=195378.33333333334, ans=0.5 +2024-08-03 18:31:38,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=195415.0, ans=0.025 +2024-08-03 18:31:42,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=195415.0, ans=0.125 +2024-08-03 18:31:58,483 INFO [train.py:1114] (0/4) Epoch 15, batch 2350, loss[loss=0.2056, simple_loss=0.2889, pruned_loss=0.0611, over 13532.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2757, pruned_loss=0.05343, over 2640397.12 frames. ], batch size: 38, lr: 8.27e-03, grad_scale: 32.0 +2024-08-03 18:32:00,492 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.540e+01 1.095e+02 1.335e+02 1.545e+02 2.606e+02, threshold=2.670e+02, percent-clipped=0.0 +2024-08-03 18:32:23,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=195561.66666666666, ans=0.125 +2024-08-03 18:32:29,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=195598.33333333334, ans=0.035 +2024-08-03 18:32:30,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=195598.33333333334, ans=0.125 +2024-08-03 18:32:33,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=195598.33333333334, ans=0.0 +2024-08-03 18:32:35,449 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.53 vs. limit=15.0 +2024-08-03 18:32:43,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=195635.0, ans=0.0 +2024-08-03 18:32:44,895 INFO [train.py:1114] (0/4) Epoch 15, batch 2400, loss[loss=0.1721, simple_loss=0.2613, pruned_loss=0.04142, over 13530.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2764, pruned_loss=0.0534, over 2641726.89 frames. ], batch size: 35, lr: 8.27e-03, grad_scale: 32.0 +2024-08-03 18:33:05,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=195745.0, ans=0.0 +2024-08-03 18:33:07,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=195745.0, ans=0.125 +2024-08-03 18:33:12,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=195781.66666666666, ans=0.2 +2024-08-03 18:33:12,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195781.66666666666, ans=0.1 +2024-08-03 18:33:14,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=195781.66666666666, ans=0.015 +2024-08-03 18:33:17,411 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.36 vs. limit=12.0 +2024-08-03 18:33:26,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=195818.33333333334, ans=0.125 +2024-08-03 18:33:30,344 INFO [train.py:1114] (0/4) Epoch 15, batch 2450, loss[loss=0.2122, simple_loss=0.2946, pruned_loss=0.0649, over 13370.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2778, pruned_loss=0.05408, over 2632853.58 frames. ], batch size: 37, lr: 8.26e-03, grad_scale: 16.0 +2024-08-03 18:33:33,028 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.379e+01 1.105e+02 1.269e+02 1.556e+02 2.604e+02, threshold=2.537e+02, percent-clipped=0.0 +2024-08-03 18:33:49,845 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.89 vs. limit=10.0 +2024-08-03 18:33:54,531 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.20 vs. limit=12.0 +2024-08-03 18:33:56,022 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:34:06,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=195965.0, ans=0.0 +2024-08-03 18:34:11,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=196001.66666666666, ans=0.07 +2024-08-03 18:34:17,545 INFO [train.py:1114] (0/4) Epoch 15, batch 2500, loss[loss=0.2023, simple_loss=0.2959, pruned_loss=0.05436, over 13415.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2779, pruned_loss=0.05411, over 2637088.48 frames. ], batch size: 39, lr: 8.26e-03, grad_scale: 16.0 +2024-08-03 18:34:21,352 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.17 vs. limit=22.5 +2024-08-03 18:34:23,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=196038.33333333334, ans=0.125 +2024-08-03 18:34:36,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=196111.66666666666, ans=0.2 +2024-08-03 18:34:37,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=196111.66666666666, ans=0.07 +2024-08-03 18:34:53,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=196148.33333333334, ans=0.025 +2024-08-03 18:34:53,909 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.14 vs. limit=10.0 +2024-08-03 18:35:01,100 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.11 vs. limit=15.0 +2024-08-03 18:35:03,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=196221.66666666666, ans=0.0 +2024-08-03 18:35:04,090 INFO [train.py:1114] (0/4) Epoch 15, batch 2550, loss[loss=0.1844, simple_loss=0.2599, pruned_loss=0.0544, over 13547.00 frames. ], tot_loss[loss=0.193, simple_loss=0.278, pruned_loss=0.05401, over 2638060.13 frames. ], batch size: 31, lr: 8.26e-03, grad_scale: 16.0 +2024-08-03 18:35:06,690 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.866e+01 1.097e+02 1.275e+02 1.738e+02 2.775e+02, threshold=2.550e+02, percent-clipped=2.0 +2024-08-03 18:35:10,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=196221.66666666666, ans=0.0 +2024-08-03 18:35:18,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=196258.33333333334, ans=0.2 +2024-08-03 18:35:19,035 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:35:21,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=196295.0, ans=0.125 +2024-08-03 18:35:27,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=196295.0, ans=0.125 +2024-08-03 18:35:32,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=196331.66666666666, ans=0.0 +2024-08-03 18:35:33,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196331.66666666666, ans=0.1 +2024-08-03 18:35:45,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=196368.33333333334, ans=0.04949747468305833 +2024-08-03 18:35:47,177 INFO [train.py:1114] (0/4) Epoch 15, batch 2600, loss[loss=0.2001, simple_loss=0.2802, pruned_loss=0.05998, over 13570.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2782, pruned_loss=0.05397, over 2638255.91 frames. ], batch size: 36, lr: 8.25e-03, grad_scale: 16.0 +2024-08-03 18:35:59,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=196441.66666666666, ans=0.125 +2024-08-03 18:36:19,903 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.65 vs. limit=15.0 +2024-08-03 18:36:20,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=196515.0, ans=0.1 +2024-08-03 18:36:28,487 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.51 vs. limit=15.0 +2024-08-03 18:36:29,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=196551.66666666666, ans=0.125 +2024-08-03 18:36:30,675 INFO [train.py:1114] (0/4) Epoch 15, batch 2650, loss[loss=0.1919, simple_loss=0.2799, pruned_loss=0.05194, over 13316.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2784, pruned_loss=0.05416, over 2641044.28 frames. ], batch size: 46, lr: 8.25e-03, grad_scale: 16.0 +2024-08-03 18:36:33,202 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.155e+01 1.086e+02 1.260e+02 1.535e+02 2.930e+02, threshold=2.521e+02, percent-clipped=3.0 +2024-08-03 18:36:41,543 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.51 vs. limit=22.5 +2024-08-03 18:36:42,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196625.0, ans=0.1 +2024-08-03 18:36:49,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=196661.66666666666, ans=0.1 +2024-08-03 18:37:01,398 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.60 vs. limit=15.0 +2024-08-03 18:37:02,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=196698.33333333334, ans=0.0 +2024-08-03 18:37:10,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=196735.0, ans=0.125 +2024-08-03 18:37:12,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=196735.0, ans=0.0 +2024-08-03 18:37:17,597 INFO [train.py:1114] (0/4) Epoch 15, batch 2700, loss[loss=0.1986, simple_loss=0.287, pruned_loss=0.05513, over 13542.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2786, pruned_loss=0.05439, over 2637676.04 frames. ], batch size: 40, lr: 8.24e-03, grad_scale: 16.0 +2024-08-03 18:37:23,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=196771.66666666666, ans=0.0 +2024-08-03 18:37:48,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=196881.66666666666, ans=0.1 +2024-08-03 18:37:49,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=196881.66666666666, ans=0.125 +2024-08-03 18:37:57,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=196918.33333333334, ans=0.2 +2024-08-03 18:38:01,015 INFO [train.py:1114] (0/4) Epoch 15, batch 2750, loss[loss=0.2055, simple_loss=0.2834, pruned_loss=0.06383, over 13322.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.277, pruned_loss=0.05382, over 2635650.81 frames. ], batch size: 34, lr: 8.24e-03, grad_scale: 16.0 +2024-08-03 18:38:03,560 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.526e+01 1.073e+02 1.243e+02 1.451e+02 2.224e+02, threshold=2.486e+02, percent-clipped=0.0 +2024-08-03 18:38:05,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=196955.0, ans=0.125 +2024-08-03 18:38:10,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=196991.66666666666, ans=0.125 +2024-08-03 18:38:12,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=196991.66666666666, ans=0.125 +2024-08-03 18:38:19,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=197028.33333333334, ans=0.0 +2024-08-03 18:38:38,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=197101.66666666666, ans=0.0 +2024-08-03 18:38:44,507 INFO [train.py:1114] (0/4) Epoch 15, batch 2800, loss[loss=0.2211, simple_loss=0.2961, pruned_loss=0.07304, over 9147.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2778, pruned_loss=0.05397, over 2627697.68 frames. ], batch size: 96, lr: 8.24e-03, grad_scale: 32.0 +2024-08-03 18:38:49,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.81 vs. limit=12.0 +2024-08-03 18:38:55,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=197175.0, ans=15.0 +2024-08-03 18:38:58,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=197175.0, ans=0.2 +2024-08-03 18:39:03,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.06 vs. limit=15.0 +2024-08-03 18:39:10,829 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.36 vs. limit=15.0 +2024-08-03 18:39:13,448 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.78 vs. limit=22.5 +2024-08-03 18:39:26,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=197285.0, ans=0.0 +2024-08-03 18:39:27,871 INFO [train.py:1114] (0/4) Epoch 15, batch 2850, loss[loss=0.1737, simple_loss=0.2625, pruned_loss=0.04243, over 13353.00 frames. ], tot_loss[loss=0.194, simple_loss=0.279, pruned_loss=0.05448, over 2621258.88 frames. ], batch size: 35, lr: 8.23e-03, grad_scale: 32.0 +2024-08-03 18:39:30,447 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.971e+01 1.137e+02 1.312e+02 1.532e+02 3.029e+02, threshold=2.624e+02, percent-clipped=2.0 +2024-08-03 18:39:34,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=197321.66666666666, ans=0.2 +2024-08-03 18:39:38,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=197358.33333333334, ans=0.025 +2024-08-03 18:39:43,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=197358.33333333334, ans=0.125 +2024-08-03 18:40:11,212 INFO [train.py:1114] (0/4) Epoch 15, batch 2900, loss[loss=0.1861, simple_loss=0.2682, pruned_loss=0.05203, over 13372.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2796, pruned_loss=0.05444, over 2632022.49 frames. ], batch size: 36, lr: 8.23e-03, grad_scale: 32.0 +2024-08-03 18:40:16,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=197505.0, ans=0.2 +2024-08-03 18:40:17,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197505.0, ans=0.1 +2024-08-03 18:40:18,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=197505.0, ans=0.125 +2024-08-03 18:40:27,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=197578.33333333334, ans=0.1 +2024-08-03 18:40:51,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=197651.66666666666, ans=0.125 +2024-08-03 18:40:54,525 INFO [train.py:1114] (0/4) Epoch 15, batch 2950, loss[loss=0.1787, simple_loss=0.2657, pruned_loss=0.04581, over 13326.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2782, pruned_loss=0.05423, over 2630358.84 frames. ], batch size: 34, lr: 8.23e-03, grad_scale: 32.0 +2024-08-03 18:40:57,047 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.080e+01 1.111e+02 1.238e+02 1.494e+02 2.430e+02, threshold=2.476e+02, percent-clipped=0.0 +2024-08-03 18:41:25,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=197798.33333333334, ans=0.1 +2024-08-03 18:41:38,131 INFO [train.py:1114] (0/4) Epoch 15, batch 3000, loss[loss=0.171, simple_loss=0.2519, pruned_loss=0.04501, over 13535.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2782, pruned_loss=0.05427, over 2630348.99 frames. ], batch size: 37, lr: 8.22e-03, grad_scale: 32.0 +2024-08-03 18:41:38,132 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 18:41:48,175 INFO [train.py:1146] (0/4) Epoch 15, validation: loss=0.1719, simple_loss=0.2717, pruned_loss=0.03605, over 944034.00 frames. +2024-08-03 18:41:48,176 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 18:41:50,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=197871.66666666666, ans=0.0 +2024-08-03 18:41:59,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=197908.33333333334, ans=0.025 +2024-08-03 18:42:07,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=197945.0, ans=0.125 +2024-08-03 18:42:17,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=197981.66666666666, ans=0.07 +2024-08-03 18:42:19,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=197981.66666666666, ans=0.125 +2024-08-03 18:42:32,415 INFO [train.py:1114] (0/4) Epoch 15, batch 3050, loss[loss=0.1627, simple_loss=0.2516, pruned_loss=0.03691, over 13549.00 frames. ], tot_loss[loss=0.194, simple_loss=0.279, pruned_loss=0.05457, over 2627208.74 frames. ], batch size: 35, lr: 8.22e-03, grad_scale: 32.0 +2024-08-03 18:42:35,005 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.644e+01 1.092e+02 1.243e+02 1.494e+02 2.695e+02, threshold=2.487e+02, percent-clipped=3.0 +2024-08-03 18:42:36,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=198055.0, ans=0.125 +2024-08-03 18:42:36,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=198055.0, ans=0.125 +2024-08-03 18:42:49,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=198091.66666666666, ans=0.2 +2024-08-03 18:42:57,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=198128.33333333334, ans=0.125 +2024-08-03 18:42:59,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=198165.0, ans=0.125 +2024-08-03 18:43:07,342 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.21 vs. limit=15.0 +2024-08-03 18:43:16,337 INFO [train.py:1114] (0/4) Epoch 15, batch 3100, loss[loss=0.232, simple_loss=0.3087, pruned_loss=0.07763, over 13225.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2786, pruned_loss=0.05422, over 2627187.50 frames. ], batch size: 46, lr: 8.21e-03, grad_scale: 32.0 +2024-08-03 18:43:31,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=198275.0, ans=0.04949747468305833 +2024-08-03 18:43:39,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=198311.66666666666, ans=0.2 +2024-08-03 18:43:41,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=198348.33333333334, ans=0.1 +2024-08-03 18:44:00,847 INFO [train.py:1114] (0/4) Epoch 15, batch 3150, loss[loss=0.2062, simple_loss=0.2998, pruned_loss=0.05629, over 13057.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2783, pruned_loss=0.05412, over 2628933.54 frames. ], batch size: 48, lr: 8.21e-03, grad_scale: 32.0 +2024-08-03 18:44:02,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=198421.66666666666, ans=0.0 +2024-08-03 18:44:02,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff2.min_abs, batch_count=198421.66666666666, ans=0.1 +2024-08-03 18:44:03,298 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.819e+01 1.142e+02 1.468e+02 1.823e+02 3.085e+02, threshold=2.937e+02, percent-clipped=3.0 +2024-08-03 18:44:03,739 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.94 vs. limit=10.0 +2024-08-03 18:44:08,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=198458.33333333334, ans=0.0 +2024-08-03 18:44:10,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=198458.33333333334, ans=0.0 +2024-08-03 18:44:12,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=198458.33333333334, ans=0.125 +2024-08-03 18:44:30,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=198495.0, ans=0.0 +2024-08-03 18:44:37,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=198531.66666666666, ans=0.0 +2024-08-03 18:44:55,024 INFO [train.py:1114] (0/4) Epoch 15, batch 3200, loss[loss=0.2051, simple_loss=0.2888, pruned_loss=0.06069, over 13536.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2774, pruned_loss=0.05354, over 2634515.67 frames. ], batch size: 37, lr: 8.21e-03, grad_scale: 32.0 +2024-08-03 18:45:04,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198641.66666666666, ans=0.1 +2024-08-03 18:45:05,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=198641.66666666666, ans=0.125 +2024-08-03 18:45:09,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=198641.66666666666, ans=0.125 +2024-08-03 18:45:09,347 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:45:16,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198678.33333333334, ans=0.1 +2024-08-03 18:45:38,327 INFO [train.py:1114] (0/4) Epoch 15, batch 3250, loss[loss=0.2005, simple_loss=0.2889, pruned_loss=0.05605, over 13398.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2772, pruned_loss=0.05316, over 2638796.93 frames. ], batch size: 38, lr: 8.20e-03, grad_scale: 32.0 +2024-08-03 18:45:40,866 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.293e+01 1.129e+02 1.312e+02 1.541e+02 2.254e+02, threshold=2.624e+02, percent-clipped=0.0 +2024-08-03 18:45:51,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=198825.0, ans=0.125 +2024-08-03 18:45:55,959 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.89 vs. limit=6.0 +2024-08-03 18:46:03,243 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.63 vs. limit=6.0 +2024-08-03 18:46:05,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=198898.33333333334, ans=0.0 +2024-08-03 18:46:16,673 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.21 vs. limit=15.0 +2024-08-03 18:46:21,438 INFO [train.py:1114] (0/4) Epoch 15, batch 3300, loss[loss=0.2097, simple_loss=0.2983, pruned_loss=0.06058, over 12898.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.276, pruned_loss=0.0527, over 2640222.03 frames. ], batch size: 52, lr: 8.20e-03, grad_scale: 16.0 +2024-08-03 18:46:25,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=198971.66666666666, ans=0.125 +2024-08-03 18:46:40,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=199045.0, ans=0.025 +2024-08-03 18:46:52,616 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.46 vs. limit=10.0 +2024-08-03 18:46:53,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=199081.66666666666, ans=0.0 +2024-08-03 18:46:54,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=199118.33333333334, ans=0.125 +2024-08-03 18:47:00,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=199118.33333333334, ans=0.0 +2024-08-03 18:47:04,160 INFO [train.py:1114] (0/4) Epoch 15, batch 3350, loss[loss=0.1784, simple_loss=0.2648, pruned_loss=0.04602, over 13262.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2763, pruned_loss=0.05302, over 2630371.85 frames. ], batch size: 49, lr: 8.20e-03, grad_scale: 16.0 +2024-08-03 18:47:07,471 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.754e+01 1.143e+02 1.314e+02 1.590e+02 2.231e+02, threshold=2.628e+02, percent-clipped=0.0 +2024-08-03 18:47:07,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=199155.0, ans=0.1 +2024-08-03 18:47:08,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=199155.0, ans=0.125 +2024-08-03 18:47:08,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=199155.0, ans=0.1 +2024-08-03 18:47:20,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=199228.33333333334, ans=0.025 +2024-08-03 18:47:35,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=199265.0, ans=0.125 +2024-08-03 18:47:41,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=199301.66666666666, ans=0.125 +2024-08-03 18:47:47,304 INFO [train.py:1114] (0/4) Epoch 15, batch 3400, loss[loss=0.1718, simple_loss=0.25, pruned_loss=0.04679, over 13520.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2758, pruned_loss=0.05293, over 2625477.30 frames. ], batch size: 31, lr: 8.19e-03, grad_scale: 16.0 +2024-08-03 18:47:49,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=199338.33333333334, ans=0.2 +2024-08-03 18:47:54,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=199338.33333333334, ans=0.0 +2024-08-03 18:47:55,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.21 vs. limit=15.0 +2024-08-03 18:47:56,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=199375.0, ans=0.0 +2024-08-03 18:47:58,684 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.16 vs. limit=15.0 +2024-08-03 18:48:06,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=199411.66666666666, ans=0.02 +2024-08-03 18:48:15,638 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.29 vs. limit=12.0 +2024-08-03 18:48:21,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=199485.0, ans=0.2 +2024-08-03 18:48:24,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199485.0, ans=0.1 +2024-08-03 18:48:30,481 INFO [train.py:1114] (0/4) Epoch 15, batch 3450, loss[loss=0.189, simple_loss=0.2714, pruned_loss=0.05334, over 12817.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.277, pruned_loss=0.05368, over 2627967.02 frames. ], batch size: 52, lr: 8.19e-03, grad_scale: 16.0 +2024-08-03 18:48:32,661 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.99 vs. limit=10.0 +2024-08-03 18:48:33,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=199521.66666666666, ans=0.125 +2024-08-03 18:48:33,769 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.288e+01 1.166e+02 1.372e+02 1.679e+02 2.920e+02, threshold=2.743e+02, percent-clipped=3.0 +2024-08-03 18:48:38,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199558.33333333334, ans=0.1 +2024-08-03 18:48:43,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=199558.33333333334, ans=0.125 +2024-08-03 18:48:50,946 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:48:54,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=199595.0, ans=0.125 +2024-08-03 18:48:58,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=199631.66666666666, ans=0.09899494936611666 +2024-08-03 18:49:13,506 INFO [train.py:1114] (0/4) Epoch 15, batch 3500, loss[loss=0.1638, simple_loss=0.2467, pruned_loss=0.04044, over 13535.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2761, pruned_loss=0.05336, over 2630427.76 frames. ], batch size: 34, lr: 8.19e-03, grad_scale: 16.0 +2024-08-03 18:49:22,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=199741.66666666666, ans=0.0 +2024-08-03 18:49:47,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=199851.66666666666, ans=0.125 +2024-08-03 18:49:49,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=199851.66666666666, ans=0.125 +2024-08-03 18:49:53,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=199851.66666666666, ans=0.2 +2024-08-03 18:49:55,944 INFO [train.py:1114] (0/4) Epoch 15, batch 3550, loss[loss=0.1808, simple_loss=0.2665, pruned_loss=0.04756, over 12422.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2786, pruned_loss=0.0546, over 2628922.08 frames. ], batch size: 58, lr: 8.18e-03, grad_scale: 16.0 +2024-08-03 18:49:59,302 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.733e+01 1.107e+02 1.282e+02 1.546e+02 2.459e+02, threshold=2.565e+02, percent-clipped=0.0 +2024-08-03 18:50:09,407 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.62 vs. limit=15.0 +2024-08-03 18:50:17,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199961.66666666666, ans=0.1 +2024-08-03 18:50:17,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=199961.66666666666, ans=0.0 +2024-08-03 18:50:17,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=199961.66666666666, ans=0.1 +2024-08-03 18:50:17,514 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.68 vs. limit=15.0 +2024-08-03 18:50:18,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=199961.66666666666, ans=0.125 +2024-08-03 18:50:20,463 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=14.35 vs. limit=15.0 +2024-08-03 18:50:22,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=199998.33333333334, ans=0.0 +2024-08-03 18:50:32,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=200035.0, ans=0.125 +2024-08-03 18:50:34,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=200035.0, ans=0.025 +2024-08-03 18:50:38,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=200035.0, ans=0.125 +2024-08-03 18:50:40,662 INFO [train.py:1114] (0/4) Epoch 15, batch 3600, loss[loss=0.2153, simple_loss=0.2937, pruned_loss=0.06844, over 9106.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2834, pruned_loss=0.05874, over 2489320.31 frames. ], batch size: 96, lr: 8.18e-03, grad_scale: 32.0 +2024-08-03 18:50:43,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=200071.66666666666, ans=0.125 +2024-08-03 18:50:46,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=200071.66666666666, ans=0.0 +2024-08-03 18:50:53,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=200108.33333333334, ans=0.2 +2024-08-03 18:50:53,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=200108.33333333334, ans=0.125 +2024-08-03 18:51:16,687 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-15.pt +2024-08-03 18:52:09,584 INFO [train.py:1114] (0/4) Epoch 16, batch 0, loss[loss=0.1462, simple_loss=0.2381, pruned_loss=0.02721, over 13365.00 frames. ], tot_loss[loss=0.1462, simple_loss=0.2381, pruned_loss=0.02721, over 13365.00 frames. ], batch size: 33, lr: 7.91e-03, grad_scale: 16.0 +2024-08-03 18:52:09,585 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 18:52:19,527 INFO [train.py:1146] (0/4) Epoch 16, validation: loss=0.1763, simple_loss=0.2767, pruned_loss=0.03798, over 944034.00 frames. +2024-08-03 18:52:19,527 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 18:52:24,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=200222.0, ans=0.0 +2024-08-03 18:52:32,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=200258.66666666666, ans=0.125 +2024-08-03 18:52:33,155 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.265e+01 1.242e+02 1.434e+02 1.560e+02 1.878e+02, threshold=2.867e+02, percent-clipped=0.0 +2024-08-03 18:52:50,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=200332.0, ans=0.125 +2024-08-03 18:53:01,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=200368.66666666666, ans=0.2 +2024-08-03 18:53:05,399 INFO [train.py:1114] (0/4) Epoch 16, batch 50, loss[loss=0.182, simple_loss=0.2567, pruned_loss=0.05365, over 13379.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2799, pruned_loss=0.05517, over 578264.11 frames. ], batch size: 32, lr: 7.91e-03, grad_scale: 16.0 +2024-08-03 18:53:38,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=200515.33333333334, ans=0.05 +2024-08-03 18:53:43,132 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.25 vs. limit=15.0 +2024-08-03 18:53:50,766 INFO [train.py:1114] (0/4) Epoch 16, batch 100, loss[loss=0.1835, simple_loss=0.2715, pruned_loss=0.04777, over 13550.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2804, pruned_loss=0.05469, over 1025384.01 frames. ], batch size: 35, lr: 7.91e-03, grad_scale: 16.0 +2024-08-03 18:53:57,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=200588.66666666666, ans=0.125 +2024-08-03 18:54:04,093 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.98 vs. limit=22.5 +2024-08-03 18:54:06,128 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.720e+01 1.140e+02 1.333e+02 1.689e+02 2.611e+02, threshold=2.666e+02, percent-clipped=0.0 +2024-08-03 18:54:13,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200662.0, ans=0.1 +2024-08-03 18:54:19,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=200662.0, ans=0.125 +2024-08-03 18:54:21,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=200662.0, ans=0.125 +2024-08-03 18:54:38,697 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.78 vs. limit=15.0 +2024-08-03 18:54:43,466 INFO [train.py:1114] (0/4) Epoch 16, batch 150, loss[loss=0.1778, simple_loss=0.2559, pruned_loss=0.04984, over 13425.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2774, pruned_loss=0.0528, over 1386777.93 frames. ], batch size: 32, lr: 7.90e-03, grad_scale: 16.0 +2024-08-03 18:54:51,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=200808.66666666666, ans=0.125 +2024-08-03 18:54:52,274 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.33 vs. limit=10.0 +2024-08-03 18:54:52,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=200808.66666666666, ans=0.025 +2024-08-03 18:54:59,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.80 vs. limit=10.0 +2024-08-03 18:55:28,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=200955.33333333334, ans=0.025 +2024-08-03 18:55:29,223 INFO [train.py:1114] (0/4) Epoch 16, batch 200, loss[loss=0.1958, simple_loss=0.2865, pruned_loss=0.05253, over 12430.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2756, pruned_loss=0.05222, over 1665962.95 frames. ], batch size: 58, lr: 7.90e-03, grad_scale: 16.0 +2024-08-03 18:55:29,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=200955.33333333334, ans=0.0 +2024-08-03 18:55:36,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=200955.33333333334, ans=0.125 +2024-08-03 18:55:37,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=200992.0, ans=0.0 +2024-08-03 18:55:37,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=200992.0, ans=0.05 +2024-08-03 18:55:41,603 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.10 vs. limit=15.0 +2024-08-03 18:55:42,873 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.144e+01 1.093e+02 1.229e+02 1.620e+02 3.492e+02, threshold=2.459e+02, percent-clipped=5.0 +2024-08-03 18:55:52,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=201028.66666666666, ans=0.1 +2024-08-03 18:55:59,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=201065.33333333334, ans=0.0 +2024-08-03 18:56:18,890 INFO [train.py:1114] (0/4) Epoch 16, batch 250, loss[loss=0.1718, simple_loss=0.2703, pruned_loss=0.03671, over 13306.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2754, pruned_loss=0.05206, over 1885514.30 frames. ], batch size: 46, lr: 7.89e-03, grad_scale: 16.0 +2024-08-03 18:56:23,758 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.12 vs. limit=15.0 +2024-08-03 18:56:26,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=201138.66666666666, ans=0.125 +2024-08-03 18:56:57,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=201285.33333333334, ans=0.125 +2024-08-03 18:56:57,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=201285.33333333334, ans=0.0 +2024-08-03 18:57:01,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=201285.33333333334, ans=0.07 +2024-08-03 18:57:05,303 INFO [train.py:1114] (0/4) Epoch 16, batch 300, loss[loss=0.1955, simple_loss=0.2771, pruned_loss=0.057, over 13445.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2738, pruned_loss=0.05149, over 2051903.08 frames. ], batch size: 42, lr: 7.89e-03, grad_scale: 16.0 +2024-08-03 18:57:19,199 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.378e+01 1.118e+02 1.251e+02 1.604e+02 3.551e+02, threshold=2.502e+02, percent-clipped=3.0 +2024-08-03 18:57:25,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=201395.33333333334, ans=0.125 +2024-08-03 18:57:39,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=201432.0, ans=0.125 +2024-08-03 18:57:49,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=201468.66666666666, ans=0.125 +2024-08-03 18:57:58,352 INFO [train.py:1114] (0/4) Epoch 16, batch 350, loss[loss=0.1791, simple_loss=0.257, pruned_loss=0.05066, over 13593.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2746, pruned_loss=0.05159, over 2182204.35 frames. ], batch size: 33, lr: 7.89e-03, grad_scale: 16.0 +2024-08-03 18:57:58,790 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.79 vs. limit=15.0 +2024-08-03 18:58:24,960 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=2.71 vs. limit=12.0 +2024-08-03 18:58:25,955 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.55 vs. limit=6.0 +2024-08-03 18:58:34,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=201615.33333333334, ans=0.2 +2024-08-03 18:58:37,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=201615.33333333334, ans=0.2 +2024-08-03 18:58:47,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=201688.66666666666, ans=0.07 +2024-08-03 18:58:47,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=201688.66666666666, ans=0.125 +2024-08-03 18:58:48,050 INFO [train.py:1114] (0/4) Epoch 16, batch 400, loss[loss=0.1948, simple_loss=0.279, pruned_loss=0.05531, over 13356.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2744, pruned_loss=0.05166, over 2286562.80 frames. ], batch size: 37, lr: 7.88e-03, grad_scale: 32.0 +2024-08-03 18:58:50,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=201688.66666666666, ans=0.125 +2024-08-03 18:58:56,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201725.33333333334, ans=0.1 +2024-08-03 18:59:01,847 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.809e+01 1.125e+02 1.305e+02 1.618e+02 2.689e+02, threshold=2.611e+02, percent-clipped=3.0 +2024-08-03 18:59:12,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=201762.0, ans=0.0 +2024-08-03 18:59:25,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=201835.33333333334, ans=0.0 +2024-08-03 18:59:27,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=201835.33333333334, ans=0.2 +2024-08-03 18:59:32,921 INFO [train.py:1114] (0/4) Epoch 16, batch 450, loss[loss=0.1827, simple_loss=0.2738, pruned_loss=0.04583, over 13549.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2744, pruned_loss=0.05163, over 2361013.69 frames. ], batch size: 38, lr: 7.88e-03, grad_scale: 32.0 +2024-08-03 18:59:55,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=201945.33333333334, ans=0.125 +2024-08-03 19:00:52,742 INFO [train.py:1114] (0/4) Epoch 16, batch 500, loss[loss=0.2315, simple_loss=0.3157, pruned_loss=0.07367, over 13451.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2744, pruned_loss=0.05161, over 2426314.95 frames. ], batch size: 43, lr: 7.88e-03, grad_scale: 32.0 +2024-08-03 19:00:53,210 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.62 vs. limit=15.0 +2024-08-03 19:01:06,070 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.045e+01 1.073e+02 1.280e+02 1.513e+02 2.984e+02, threshold=2.559e+02, percent-clipped=3.0 +2024-08-03 19:01:09,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=202128.66666666666, ans=0.125 +2024-08-03 19:01:16,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=202128.66666666666, ans=0.125 +2024-08-03 19:01:30,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=202202.0, ans=0.0 +2024-08-03 19:01:35,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=202202.0, ans=0.125 +2024-08-03 19:01:37,635 INFO [train.py:1114] (0/4) Epoch 16, batch 550, loss[loss=0.2179, simple_loss=0.2987, pruned_loss=0.06856, over 13008.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2744, pruned_loss=0.05138, over 2469508.22 frames. ], batch size: 48, lr: 7.87e-03, grad_scale: 32.0 +2024-08-03 19:01:39,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=202238.66666666666, ans=0.125 +2024-08-03 19:01:48,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=202275.33333333334, ans=0.2 +2024-08-03 19:01:49,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=202275.33333333334, ans=0.0 +2024-08-03 19:01:59,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=202312.0, ans=0.05 +2024-08-03 19:02:00,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=202312.0, ans=0.0 +2024-08-03 19:02:25,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=202385.33333333334, ans=0.0 +2024-08-03 19:02:30,486 INFO [train.py:1114] (0/4) Epoch 16, batch 600, loss[loss=0.1914, simple_loss=0.2831, pruned_loss=0.04985, over 13326.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2748, pruned_loss=0.05158, over 2509110.92 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 32.0 +2024-08-03 19:02:43,893 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.542e+01 1.053e+02 1.176e+02 1.457e+02 2.332e+02, threshold=2.351e+02, percent-clipped=0.0 +2024-08-03 19:03:06,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=202568.66666666666, ans=0.125 +2024-08-03 19:03:14,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=202605.33333333334, ans=0.125 +2024-08-03 19:03:15,309 INFO [train.py:1114] (0/4) Epoch 16, batch 650, loss[loss=0.1731, simple_loss=0.2628, pruned_loss=0.04171, over 13565.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2739, pruned_loss=0.05103, over 2544376.60 frames. ], batch size: 37, lr: 7.87e-03, grad_scale: 32.0 +2024-08-03 19:03:21,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=202605.33333333334, ans=0.0 +2024-08-03 19:03:28,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=202642.0, ans=0.0 +2024-08-03 19:03:29,165 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.78 vs. limit=15.0 +2024-08-03 19:03:42,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=202715.33333333334, ans=0.025 +2024-08-03 19:03:48,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=202715.33333333334, ans=0.125 +2024-08-03 19:04:00,443 INFO [train.py:1114] (0/4) Epoch 16, batch 700, loss[loss=0.1693, simple_loss=0.2561, pruned_loss=0.04126, over 13538.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2743, pruned_loss=0.05108, over 2566917.19 frames. ], batch size: 35, lr: 7.86e-03, grad_scale: 32.0 +2024-08-03 19:04:00,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=202788.66666666666, ans=0.0 +2024-08-03 19:04:00,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=202788.66666666666, ans=0.2 +2024-08-03 19:04:01,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=202788.66666666666, ans=0.1 +2024-08-03 19:04:13,877 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.401e+01 1.143e+02 1.370e+02 1.738e+02 3.116e+02, threshold=2.740e+02, percent-clipped=8.0 +2024-08-03 19:04:36,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=202935.33333333334, ans=0.0 +2024-08-03 19:04:38,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=202935.33333333334, ans=0.125 +2024-08-03 19:04:45,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_na.min_abs, batch_count=202972.0, ans=0.02 +2024-08-03 19:04:45,952 INFO [train.py:1114] (0/4) Epoch 16, batch 750, loss[loss=0.1808, simple_loss=0.2738, pruned_loss=0.04395, over 13379.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2741, pruned_loss=0.05145, over 2584388.56 frames. ], batch size: 37, lr: 7.86e-03, grad_scale: 32.0 +2024-08-03 19:04:49,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=202972.0, ans=0.2 +2024-08-03 19:04:51,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202972.0, ans=0.1 +2024-08-03 19:05:05,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=203045.33333333334, ans=0.2 +2024-08-03 19:05:33,740 INFO [train.py:1114] (0/4) Epoch 16, batch 800, loss[loss=0.1473, simple_loss=0.2299, pruned_loss=0.03236, over 13322.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2738, pruned_loss=0.05116, over 2598160.22 frames. ], batch size: 33, lr: 7.86e-03, grad_scale: 32.0 +2024-08-03 19:05:36,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=203155.33333333334, ans=0.125 +2024-08-03 19:05:49,284 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.111e+01 1.103e+02 1.311e+02 1.683e+02 3.142e+02, threshold=2.622e+02, percent-clipped=1.0 +2024-08-03 19:05:50,694 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.34 vs. limit=15.0 +2024-08-03 19:06:00,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=203228.66666666666, ans=0.025 +2024-08-03 19:06:07,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=203265.33333333334, ans=0.125 +2024-08-03 19:06:17,979 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.71 vs. limit=15.0 +2024-08-03 19:06:25,701 INFO [train.py:1114] (0/4) Epoch 16, batch 850, loss[loss=0.2007, simple_loss=0.2902, pruned_loss=0.0556, over 13327.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2738, pruned_loss=0.05108, over 2610393.01 frames. ], batch size: 40, lr: 7.85e-03, grad_scale: 32.0 +2024-08-03 19:06:28,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=203338.66666666666, ans=0.025 +2024-08-03 19:06:31,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=203338.66666666666, ans=0.125 +2024-08-03 19:06:43,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=203412.0, ans=0.07 +2024-08-03 19:06:58,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=203448.66666666666, ans=0.125 +2024-08-03 19:07:10,042 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:07:10,796 INFO [train.py:1114] (0/4) Epoch 16, batch 900, loss[loss=0.1682, simple_loss=0.2451, pruned_loss=0.04567, over 13343.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2744, pruned_loss=0.05144, over 2613114.28 frames. ], batch size: 33, lr: 7.85e-03, grad_scale: 32.0 +2024-08-03 19:07:15,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=203522.0, ans=0.125 +2024-08-03 19:07:24,145 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.183e+01 1.118e+02 1.381e+02 1.663e+02 2.638e+02, threshold=2.763e+02, percent-clipped=1.0 +2024-08-03 19:07:50,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=203668.66666666666, ans=0.025 +2024-08-03 19:07:51,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=203668.66666666666, ans=0.2 +2024-08-03 19:07:55,952 INFO [train.py:1114] (0/4) Epoch 16, batch 950, loss[loss=0.1647, simple_loss=0.2487, pruned_loss=0.04029, over 13530.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2744, pruned_loss=0.05152, over 2613329.51 frames. ], batch size: 34, lr: 7.85e-03, grad_scale: 32.0 +2024-08-03 19:07:59,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=203705.33333333334, ans=0.0 +2024-08-03 19:08:07,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=203742.0, ans=0.07 +2024-08-03 19:08:09,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=203742.0, ans=0.125 +2024-08-03 19:08:17,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=203778.66666666666, ans=0.0 +2024-08-03 19:08:25,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=203815.33333333334, ans=0.125 +2024-08-03 19:08:31,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=203852.0, ans=0.125 +2024-08-03 19:08:34,589 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.40 vs. limit=6.0 +2024-08-03 19:08:37,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=203852.0, ans=0.125 +2024-08-03 19:08:41,483 INFO [train.py:1114] (0/4) Epoch 16, batch 1000, loss[loss=0.1978, simple_loss=0.2896, pruned_loss=0.05305, over 13364.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2756, pruned_loss=0.05212, over 2611679.63 frames. ], batch size: 35, lr: 7.84e-03, grad_scale: 32.0 +2024-08-03 19:08:55,036 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.698e+01 1.080e+02 1.221e+02 1.447e+02 2.524e+02, threshold=2.442e+02, percent-clipped=0.0 +2024-08-03 19:08:55,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=203925.33333333334, ans=0.125 +2024-08-03 19:08:56,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=203925.33333333334, ans=0.0 +2024-08-03 19:09:02,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=203962.0, ans=0.0 +2024-08-03 19:09:08,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=203962.0, ans=0.125 +2024-08-03 19:09:14,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=203998.66666666666, ans=0.125 +2024-08-03 19:09:34,160 INFO [train.py:1114] (0/4) Epoch 16, batch 1050, loss[loss=0.1878, simple_loss=0.282, pruned_loss=0.04684, over 13571.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.275, pruned_loss=0.05226, over 2616049.18 frames. ], batch size: 39, lr: 7.84e-03, grad_scale: 32.0 +2024-08-03 19:09:42,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=204072.0, ans=0.125 +2024-08-03 19:09:44,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=204108.66666666666, ans=0.0 +2024-08-03 19:09:52,948 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.39 vs. limit=15.0 +2024-08-03 19:10:08,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=204182.0, ans=0.0 +2024-08-03 19:10:20,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=204255.33333333334, ans=0.025 +2024-08-03 19:10:21,118 INFO [train.py:1114] (0/4) Epoch 16, batch 1100, loss[loss=0.185, simple_loss=0.2682, pruned_loss=0.05091, over 13559.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2747, pruned_loss=0.05168, over 2619777.20 frames. ], batch size: 36, lr: 7.84e-03, grad_scale: 16.0 +2024-08-03 19:10:22,688 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.28 vs. limit=15.0 +2024-08-03 19:10:24,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=204255.33333333334, ans=0.07 +2024-08-03 19:10:35,476 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.269e+01 1.085e+02 1.218e+02 1.448e+02 2.223e+02, threshold=2.436e+02, percent-clipped=0.0 +2024-08-03 19:10:35,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=204292.0, ans=0.125 +2024-08-03 19:10:58,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=204402.0, ans=0.2 +2024-08-03 19:11:06,545 INFO [train.py:1114] (0/4) Epoch 16, batch 1150, loss[loss=0.1899, simple_loss=0.2725, pruned_loss=0.0537, over 13564.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2748, pruned_loss=0.05181, over 2618297.73 frames. ], batch size: 36, lr: 7.83e-03, grad_scale: 16.0 +2024-08-03 19:11:27,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=204512.0, ans=0.0 +2024-08-03 19:11:29,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=204512.0, ans=0.025 +2024-08-03 19:11:38,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=204548.66666666666, ans=0.125 +2024-08-03 19:11:44,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=204585.33333333334, ans=0.025 +2024-08-03 19:11:46,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=204585.33333333334, ans=0.125 +2024-08-03 19:11:49,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=204585.33333333334, ans=0.125 +2024-08-03 19:11:51,381 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=9.59 vs. limit=12.0 +2024-08-03 19:11:52,582 INFO [train.py:1114] (0/4) Epoch 16, batch 1200, loss[loss=0.1889, simple_loss=0.2803, pruned_loss=0.0488, over 13580.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2759, pruned_loss=0.05227, over 2615599.09 frames. ], batch size: 39, lr: 7.83e-03, grad_scale: 32.0 +2024-08-03 19:11:55,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=11.17 vs. limit=15.0 +2024-08-03 19:11:57,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=204622.0, ans=0.125 +2024-08-03 19:12:06,270 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.81 vs. limit=6.0 +2024-08-03 19:12:06,570 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.982e+01 1.149e+02 1.396e+02 1.741e+02 2.381e+02, threshold=2.791e+02, percent-clipped=0.0 +2024-08-03 19:12:09,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=204695.33333333334, ans=0.0 +2024-08-03 19:12:16,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=204695.33333333334, ans=0.1 +2024-08-03 19:12:39,512 INFO [train.py:1114] (0/4) Epoch 16, batch 1250, loss[loss=0.1965, simple_loss=0.2881, pruned_loss=0.05243, over 13413.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2762, pruned_loss=0.05237, over 2627573.44 frames. ], batch size: 42, lr: 7.83e-03, grad_scale: 32.0 +2024-08-03 19:12:53,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=204842.0, ans=0.0 +2024-08-03 19:13:00,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=204878.66666666666, ans=0.125 +2024-08-03 19:13:16,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=204915.33333333334, ans=0.0 +2024-08-03 19:13:20,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=204952.0, ans=0.125 +2024-08-03 19:13:27,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=204952.0, ans=0.95 +2024-08-03 19:13:30,422 INFO [train.py:1114] (0/4) Epoch 16, batch 1300, loss[loss=0.1785, simple_loss=0.2725, pruned_loss=0.04226, over 12980.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.275, pruned_loss=0.05179, over 2630425.99 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 32.0 +2024-08-03 19:13:44,835 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.037e+01 1.093e+02 1.262e+02 1.670e+02 2.902e+02, threshold=2.524e+02, percent-clipped=1.0 +2024-08-03 19:13:55,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=205062.0, ans=0.125 +2024-08-03 19:14:01,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=205098.66666666666, ans=0.2 +2024-08-03 19:14:06,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=205135.33333333334, ans=0.2 +2024-08-03 19:14:08,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=205135.33333333334, ans=0.125 +2024-08-03 19:14:15,779 INFO [train.py:1114] (0/4) Epoch 16, batch 1350, loss[loss=0.1917, simple_loss=0.2758, pruned_loss=0.05383, over 13537.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2745, pruned_loss=0.05158, over 2638557.99 frames. ], batch size: 37, lr: 7.82e-03, grad_scale: 32.0 +2024-08-03 19:14:20,786 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.64 vs. limit=22.5 +2024-08-03 19:14:26,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=205208.66666666666, ans=0.0 +2024-08-03 19:14:36,992 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.31 vs. limit=6.0 +2024-08-03 19:14:53,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205318.66666666666, ans=0.1 +2024-08-03 19:14:54,975 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-56000.pt +2024-08-03 19:15:02,775 INFO [train.py:1114] (0/4) Epoch 16, batch 1400, loss[loss=0.1726, simple_loss=0.2512, pruned_loss=0.04697, over 13244.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2746, pruned_loss=0.05175, over 2642781.19 frames. ], batch size: 31, lr: 7.81e-03, grad_scale: 32.0 +2024-08-03 19:15:03,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=205355.33333333334, ans=0.125 +2024-08-03 19:15:07,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=205355.33333333334, ans=0.125 +2024-08-03 19:15:15,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=205392.0, ans=0.125 +2024-08-03 19:15:17,274 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.642e+01 1.154e+02 1.285e+02 1.682e+02 2.521e+02, threshold=2.570e+02, percent-clipped=0.0 +2024-08-03 19:15:29,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=205465.33333333334, ans=0.1 +2024-08-03 19:15:29,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=205465.33333333334, ans=0.1 +2024-08-03 19:15:30,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=205465.33333333334, ans=0.0 +2024-08-03 19:15:34,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=205465.33333333334, ans=0.125 +2024-08-03 19:15:36,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=205465.33333333334, ans=0.125 +2024-08-03 19:15:44,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=205502.0, ans=0.125 +2024-08-03 19:15:48,517 INFO [train.py:1114] (0/4) Epoch 16, batch 1450, loss[loss=0.2262, simple_loss=0.3119, pruned_loss=0.0702, over 13439.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2749, pruned_loss=0.05188, over 2641049.35 frames. ], batch size: 43, lr: 7.81e-03, grad_scale: 32.0 +2024-08-03 19:16:03,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=205575.33333333334, ans=0.125 +2024-08-03 19:16:22,225 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.29 vs. limit=15.0 +2024-08-03 19:16:26,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=205685.33333333334, ans=0.125 +2024-08-03 19:16:28,728 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.32 vs. limit=15.0 +2024-08-03 19:16:32,767 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:16:37,177 INFO [train.py:1114] (0/4) Epoch 16, batch 1500, loss[loss=0.2231, simple_loss=0.3069, pruned_loss=0.06968, over 13395.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2748, pruned_loss=0.05163, over 2640884.09 frames. ], batch size: 39, lr: 7.81e-03, grad_scale: 32.0 +2024-08-03 19:16:56,114 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.464e+01 1.141e+02 1.324e+02 1.628e+02 2.574e+02, threshold=2.648e+02, percent-clipped=1.0 +2024-08-03 19:16:58,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=205758.66666666666, ans=0.125 +2024-08-03 19:17:22,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.19 vs. limit=15.0 +2024-08-03 19:17:26,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=205905.33333333334, ans=0.05 +2024-08-03 19:17:27,207 INFO [train.py:1114] (0/4) Epoch 16, batch 1550, loss[loss=0.197, simple_loss=0.2954, pruned_loss=0.04925, over 13391.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2752, pruned_loss=0.05197, over 2631175.30 frames. ], batch size: 41, lr: 7.80e-03, grad_scale: 32.0 +2024-08-03 19:17:39,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=205942.0, ans=0.0 +2024-08-03 19:17:48,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=205978.66666666666, ans=0.0 +2024-08-03 19:18:09,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206052.0, ans=0.1 +2024-08-03 19:18:12,984 INFO [train.py:1114] (0/4) Epoch 16, batch 1600, loss[loss=0.2015, simple_loss=0.2943, pruned_loss=0.05432, over 13567.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2753, pruned_loss=0.05227, over 2624317.11 frames. ], batch size: 39, lr: 7.80e-03, grad_scale: 32.0 +2024-08-03 19:18:25,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=206125.33333333334, ans=0.125 +2024-08-03 19:18:25,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=206125.33333333334, ans=0.0 +2024-08-03 19:18:27,559 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.061e+01 1.163e+02 1.376e+02 1.726e+02 3.125e+02, threshold=2.751e+02, percent-clipped=2.0 +2024-08-03 19:18:45,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff3.min_abs, batch_count=206198.66666666666, ans=0.2 +2024-08-03 19:18:54,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=206235.33333333334, ans=0.0 +2024-08-03 19:18:58,357 INFO [train.py:1114] (0/4) Epoch 16, batch 1650, loss[loss=0.2047, simple_loss=0.2941, pruned_loss=0.05759, over 13323.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2753, pruned_loss=0.05249, over 2621635.87 frames. ], batch size: 40, lr: 7.80e-03, grad_scale: 16.0 +2024-08-03 19:19:00,704 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.07 vs. limit=12.0 +2024-08-03 19:19:00,813 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.65 vs. limit=15.0 +2024-08-03 19:19:08,969 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.97 vs. limit=6.0 +2024-08-03 19:19:16,333 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.42 vs. limit=15.0 +2024-08-03 19:19:19,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=206345.33333333334, ans=0.07 +2024-08-03 19:19:31,483 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.06 vs. limit=22.5 +2024-08-03 19:19:31,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=206382.0, ans=0.04949747468305833 +2024-08-03 19:19:41,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=206418.66666666666, ans=0.0 +2024-08-03 19:19:42,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=206418.66666666666, ans=0.125 +2024-08-03 19:19:45,384 INFO [train.py:1114] (0/4) Epoch 16, batch 1700, loss[loss=0.1611, simple_loss=0.2384, pruned_loss=0.04195, over 13231.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2748, pruned_loss=0.05199, over 2630106.33 frames. ], batch size: 31, lr: 7.79e-03, grad_scale: 16.0 +2024-08-03 19:19:48,439 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.35 vs. limit=15.0 +2024-08-03 19:19:59,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=206492.0, ans=0.07 +2024-08-03 19:20:00,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=206492.0, ans=0.1 +2024-08-03 19:20:00,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=206492.0, ans=0.125 +2024-08-03 19:20:02,485 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.148e+01 1.167e+02 1.345e+02 1.765e+02 2.775e+02, threshold=2.690e+02, percent-clipped=1.0 +2024-08-03 19:20:04,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=206528.66666666666, ans=0.125 +2024-08-03 19:20:29,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=206602.0, ans=0.1 +2024-08-03 19:20:31,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=206602.0, ans=0.025 +2024-08-03 19:20:35,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=206638.66666666666, ans=0.0 +2024-08-03 19:20:36,089 INFO [train.py:1114] (0/4) Epoch 16, batch 1750, loss[loss=0.1567, simple_loss=0.2379, pruned_loss=0.0377, over 13533.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2744, pruned_loss=0.05227, over 2633254.94 frames. ], batch size: 31, lr: 7.79e-03, grad_scale: 16.0 +2024-08-03 19:20:38,599 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.25 vs. limit=6.0 +2024-08-03 19:20:48,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=206675.33333333334, ans=0.035 +2024-08-03 19:20:51,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=206675.33333333334, ans=0.125 +2024-08-03 19:20:53,192 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=28.42 vs. limit=22.5 +2024-08-03 19:20:55,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=206712.0, ans=0.125 +2024-08-03 19:21:01,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=206712.0, ans=0.125 +2024-08-03 19:21:08,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=206748.66666666666, ans=22.5 +2024-08-03 19:21:17,278 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.83 vs. limit=10.0 +2024-08-03 19:21:21,432 INFO [train.py:1114] (0/4) Epoch 16, batch 1800, loss[loss=0.1748, simple_loss=0.2624, pruned_loss=0.04355, over 13559.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.274, pruned_loss=0.0517, over 2633748.25 frames. ], batch size: 38, lr: 7.79e-03, grad_scale: 16.0 +2024-08-03 19:21:36,892 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.527e+01 1.147e+02 1.312e+02 1.685e+02 2.855e+02, threshold=2.624e+02, percent-clipped=1.0 +2024-08-03 19:21:39,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=206895.33333333334, ans=0.0 +2024-08-03 19:21:51,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=206932.0, ans=0.2 +2024-08-03 19:21:57,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=206968.66666666666, ans=0.125 +2024-08-03 19:21:58,458 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.03 vs. limit=15.0 +2024-08-03 19:22:06,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=207005.33333333334, ans=0.025 +2024-08-03 19:22:06,966 INFO [train.py:1114] (0/4) Epoch 16, batch 1850, loss[loss=0.1967, simple_loss=0.2904, pruned_loss=0.05151, over 13403.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2742, pruned_loss=0.05177, over 2636218.98 frames. ], batch size: 39, lr: 7.78e-03, grad_scale: 16.0 +2024-08-03 19:22:16,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=207042.0, ans=0.125 +2024-08-03 19:22:25,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=207078.66666666666, ans=0.0 +2024-08-03 19:22:43,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=207115.33333333334, ans=0.0 +2024-08-03 19:22:58,924 INFO [train.py:1114] (0/4) Epoch 16, batch 1900, loss[loss=0.1944, simple_loss=0.283, pruned_loss=0.05287, over 13304.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2742, pruned_loss=0.05157, over 2639255.22 frames. ], batch size: 40, lr: 7.78e-03, grad_scale: 16.0 +2024-08-03 19:23:19,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=207188.66666666666, ans=0.0 +2024-08-03 19:23:21,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=207188.66666666666, ans=0.1 +2024-08-03 19:23:23,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=207225.33333333334, ans=0.1 +2024-08-03 19:23:29,103 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.263e+01 1.127e+02 1.354e+02 1.894e+02 2.950e+02, threshold=2.708e+02, percent-clipped=4.0 +2024-08-03 19:23:56,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=207335.33333333334, ans=0.0 +2024-08-03 19:24:03,051 INFO [train.py:1114] (0/4) Epoch 16, batch 1950, loss[loss=0.197, simple_loss=0.2812, pruned_loss=0.05641, over 13570.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2755, pruned_loss=0.05201, over 2645948.86 frames. ], batch size: 36, lr: 7.78e-03, grad_scale: 16.0 +2024-08-03 19:24:03,759 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.76 vs. limit=8.0 +2024-08-03 19:24:08,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=207372.0, ans=0.125 +2024-08-03 19:24:10,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=207372.0, ans=0.125 +2024-08-03 19:24:21,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=207408.66666666666, ans=0.95 +2024-08-03 19:24:37,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=207482.0, ans=0.125 +2024-08-03 19:24:40,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=207482.0, ans=0.2 +2024-08-03 19:24:47,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=207518.66666666666, ans=0.125 +2024-08-03 19:24:50,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=207518.66666666666, ans=0.1 +2024-08-03 19:24:53,792 INFO [train.py:1114] (0/4) Epoch 16, batch 2000, loss[loss=0.1629, simple_loss=0.2427, pruned_loss=0.04157, over 13528.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2762, pruned_loss=0.05238, over 2635282.19 frames. ], batch size: 31, lr: 7.77e-03, grad_scale: 32.0 +2024-08-03 19:24:53,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=207555.33333333334, ans=0.2 +2024-08-03 19:24:53,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=207555.33333333334, ans=0.5 +2024-08-03 19:24:56,148 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-08-03 19:25:00,456 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:25:01,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=207555.33333333334, ans=0.125 +2024-08-03 19:25:02,483 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.09 vs. limit=22.5 +2024-08-03 19:25:06,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=207592.0, ans=0.04949747468305833 +2024-08-03 19:25:09,404 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.745e+01 1.151e+02 1.402e+02 1.831e+02 3.066e+02, threshold=2.804e+02, percent-clipped=4.0 +2024-08-03 19:25:17,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=207628.66666666666, ans=15.0 +2024-08-03 19:25:24,367 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.35 vs. limit=15.0 +2024-08-03 19:25:29,332 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:25:30,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=207702.0, ans=0.125 +2024-08-03 19:25:31,491 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.08 vs. limit=6.0 +2024-08-03 19:25:38,899 INFO [train.py:1114] (0/4) Epoch 16, batch 2050, loss[loss=0.1596, simple_loss=0.2463, pruned_loss=0.03647, over 13439.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2753, pruned_loss=0.05213, over 2632024.25 frames. ], batch size: 32, lr: 7.77e-03, grad_scale: 32.0 +2024-08-03 19:25:45,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=207738.66666666666, ans=0.95 +2024-08-03 19:26:11,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=207848.66666666666, ans=0.025 +2024-08-03 19:26:14,747 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:26:22,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=207885.33333333334, ans=0.125 +2024-08-03 19:26:23,789 INFO [train.py:1114] (0/4) Epoch 16, batch 2100, loss[loss=0.1859, simple_loss=0.2749, pruned_loss=0.04845, over 13550.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.275, pruned_loss=0.05202, over 2637990.06 frames. ], batch size: 37, lr: 7.77e-03, grad_scale: 32.0 +2024-08-03 19:26:38,691 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.022e+01 1.073e+02 1.217e+02 1.568e+02 3.232e+02, threshold=2.433e+02, percent-clipped=1.0 +2024-08-03 19:26:40,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207995.33333333334, ans=0.1 +2024-08-03 19:26:41,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=207995.33333333334, ans=0.0 +2024-08-03 19:26:41,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=207995.33333333334, ans=0.125 +2024-08-03 19:26:51,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=208032.0, ans=0.125 +2024-08-03 19:27:04,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=208068.66666666666, ans=0.1 +2024-08-03 19:27:10,303 INFO [train.py:1114] (0/4) Epoch 16, batch 2150, loss[loss=0.1852, simple_loss=0.2719, pruned_loss=0.04923, over 13563.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2738, pruned_loss=0.05148, over 2646633.98 frames. ], batch size: 36, lr: 7.76e-03, grad_scale: 32.0 +2024-08-03 19:27:17,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=208105.33333333334, ans=0.0 +2024-08-03 19:27:22,332 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.25 vs. limit=15.0 +2024-08-03 19:27:57,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=208252.0, ans=0.125 +2024-08-03 19:27:59,568 INFO [train.py:1114] (0/4) Epoch 16, batch 2200, loss[loss=0.1854, simple_loss=0.2738, pruned_loss=0.04853, over 13416.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2738, pruned_loss=0.05139, over 2643713.04 frames. ], batch size: 39, lr: 7.76e-03, grad_scale: 32.0 +2024-08-03 19:28:16,743 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.518e+01 1.187e+02 1.408e+02 1.826e+02 3.967e+02, threshold=2.817e+02, percent-clipped=9.0 +2024-08-03 19:28:19,801 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.02 vs. limit=12.0 +2024-08-03 19:28:46,820 INFO [train.py:1114] (0/4) Epoch 16, batch 2250, loss[loss=0.1776, simple_loss=0.269, pruned_loss=0.04305, over 13355.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2743, pruned_loss=0.05176, over 2641718.34 frames. ], batch size: 37, lr: 7.76e-03, grad_scale: 32.0 +2024-08-03 19:28:54,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=208472.0, ans=0.125 +2024-08-03 19:29:15,071 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.99 vs. limit=15.0 +2024-08-03 19:29:18,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=208582.0, ans=0.125 +2024-08-03 19:29:21,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=208582.0, ans=0.125 +2024-08-03 19:29:26,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=208618.66666666666, ans=0.125 +2024-08-03 19:29:33,615 INFO [train.py:1114] (0/4) Epoch 16, batch 2300, loss[loss=0.1699, simple_loss=0.249, pruned_loss=0.04539, over 13610.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2733, pruned_loss=0.05157, over 2638368.37 frames. ], batch size: 33, lr: 7.75e-03, grad_scale: 32.0 +2024-08-03 19:29:43,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=208655.33333333334, ans=0.125 +2024-08-03 19:29:46,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=208692.0, ans=0.1 +2024-08-03 19:29:54,063 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.353e+01 1.164e+02 1.344e+02 1.643e+02 2.956e+02, threshold=2.688e+02, percent-clipped=1.0 +2024-08-03 19:30:06,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=208765.33333333334, ans=0.125 +2024-08-03 19:30:07,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=208765.33333333334, ans=0.025 +2024-08-03 19:30:10,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=208765.33333333334, ans=0.125 +2024-08-03 19:30:10,531 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=5.541e-03 +2024-08-03 19:30:14,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=208802.0, ans=0.125 +2024-08-03 19:30:17,043 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.61 vs. limit=15.0 +2024-08-03 19:30:18,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=208802.0, ans=0.0 +2024-08-03 19:30:21,716 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.38 vs. limit=22.5 +2024-08-03 19:30:24,662 INFO [train.py:1114] (0/4) Epoch 16, batch 2350, loss[loss=0.1978, simple_loss=0.2892, pruned_loss=0.05318, over 13545.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2734, pruned_loss=0.05142, over 2640423.73 frames. ], batch size: 38, lr: 7.75e-03, grad_scale: 32.0 +2024-08-03 19:30:30,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=208838.66666666666, ans=0.2 +2024-08-03 19:30:54,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=208912.0, ans=0.0 +2024-08-03 19:31:14,371 INFO [train.py:1114] (0/4) Epoch 16, batch 2400, loss[loss=0.1732, simple_loss=0.2595, pruned_loss=0.04351, over 13537.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2742, pruned_loss=0.05195, over 2641658.70 frames. ], batch size: 35, lr: 7.75e-03, grad_scale: 32.0 +2024-08-03 19:31:33,384 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.31 vs. limit=15.0 +2024-08-03 19:31:40,585 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.713e+01 1.195e+02 1.361e+02 1.735e+02 2.883e+02, threshold=2.722e+02, percent-clipped=1.0 +2024-08-03 19:31:40,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=209058.66666666666, ans=0.125 +2024-08-03 19:31:48,607 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=11.76 vs. limit=12.0 +2024-08-03 19:31:48,670 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.02 vs. limit=22.5 +2024-08-03 19:32:18,679 INFO [train.py:1114] (0/4) Epoch 16, batch 2450, loss[loss=0.2079, simple_loss=0.2944, pruned_loss=0.06072, over 13360.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2755, pruned_loss=0.05233, over 2631362.82 frames. ], batch size: 37, lr: 7.74e-03, grad_scale: 32.0 +2024-08-03 19:32:21,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=209205.33333333334, ans=0.125 +2024-08-03 19:32:44,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=209278.66666666666, ans=0.125 +2024-08-03 19:32:45,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=209315.33333333334, ans=0.2 +2024-08-03 19:32:54,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=209352.0, ans=0.125 +2024-08-03 19:32:57,626 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:33:03,135 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:33:03,796 INFO [train.py:1114] (0/4) Epoch 16, batch 2500, loss[loss=0.235, simple_loss=0.3235, pruned_loss=0.07328, over 13413.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2751, pruned_loss=0.05224, over 2635370.81 frames. ], batch size: 39, lr: 7.74e-03, grad_scale: 32.0 +2024-08-03 19:33:05,244 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.61 vs. limit=22.5 +2024-08-03 19:33:16,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=209425.33333333334, ans=0.125 +2024-08-03 19:33:21,418 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.166e+01 1.109e+02 1.260e+02 1.584e+02 2.146e+02, threshold=2.521e+02, percent-clipped=0.0 +2024-08-03 19:33:24,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=209462.0, ans=0.2 +2024-08-03 19:33:41,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=209535.33333333334, ans=0.025 +2024-08-03 19:33:43,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=209535.33333333334, ans=10.0 +2024-08-03 19:33:50,670 INFO [train.py:1114] (0/4) Epoch 16, batch 2550, loss[loss=0.1813, simple_loss=0.2604, pruned_loss=0.0511, over 13533.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2755, pruned_loss=0.05223, over 2637818.40 frames. ], batch size: 31, lr: 7.74e-03, grad_scale: 32.0 +2024-08-03 19:34:08,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=209608.66666666666, ans=0.0 +2024-08-03 19:34:11,003 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.33 vs. limit=22.5 +2024-08-03 19:34:20,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=209682.0, ans=0.125 +2024-08-03 19:34:27,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=209718.66666666666, ans=0.125 +2024-08-03 19:34:29,688 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=6.0 +2024-08-03 19:34:33,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=209718.66666666666, ans=0.125 +2024-08-03 19:34:36,107 INFO [train.py:1114] (0/4) Epoch 16, batch 2600, loss[loss=0.1823, simple_loss=0.2704, pruned_loss=0.04713, over 13573.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2755, pruned_loss=0.05223, over 2636590.68 frames. ], batch size: 36, lr: 7.73e-03, grad_scale: 32.0 +2024-08-03 19:34:44,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=209792.0, ans=0.125 +2024-08-03 19:34:47,655 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.33 vs. limit=22.5 +2024-08-03 19:34:50,513 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.224e+01 1.145e+02 1.272e+02 1.680e+02 2.511e+02, threshold=2.545e+02, percent-clipped=0.0 +2024-08-03 19:35:03,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=209865.33333333334, ans=0.0 +2024-08-03 19:35:05,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=209865.33333333334, ans=0.0 +2024-08-03 19:35:18,983 INFO [train.py:1114] (0/4) Epoch 16, batch 2650, loss[loss=0.1924, simple_loss=0.2825, pruned_loss=0.0511, over 13348.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2753, pruned_loss=0.05191, over 2639317.79 frames. ], batch size: 46, lr: 7.73e-03, grad_scale: 32.0 +2024-08-03 19:35:27,858 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.45 vs. limit=15.0 +2024-08-03 19:35:41,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=210012.0, ans=0.025 +2024-08-03 19:35:52,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=210085.33333333334, ans=0.0 +2024-08-03 19:35:52,998 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.63 vs. limit=15.0 +2024-08-03 19:36:02,133 INFO [train.py:1114] (0/4) Epoch 16, batch 2700, loss[loss=0.1731, simple_loss=0.27, pruned_loss=0.03808, over 13541.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2753, pruned_loss=0.05195, over 2636718.06 frames. ], batch size: 40, lr: 7.73e-03, grad_scale: 32.0 +2024-08-03 19:36:16,700 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.859e+01 1.215e+02 1.396e+02 1.823e+02 2.794e+02, threshold=2.792e+02, percent-clipped=5.0 +2024-08-03 19:36:19,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=210195.33333333334, ans=0.125 +2024-08-03 19:36:25,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=210195.33333333334, ans=0.2 +2024-08-03 19:36:32,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=210232.0, ans=0.125 +2024-08-03 19:36:33,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=210232.0, ans=0.2 +2024-08-03 19:36:36,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=210268.66666666666, ans=0.0 +2024-08-03 19:36:42,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=210268.66666666666, ans=0.125 +2024-08-03 19:36:45,433 INFO [train.py:1114] (0/4) Epoch 16, batch 2750, loss[loss=0.1864, simple_loss=0.268, pruned_loss=0.05241, over 13336.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2746, pruned_loss=0.05199, over 2634298.95 frames. ], batch size: 34, lr: 7.72e-03, grad_scale: 32.0 +2024-08-03 19:37:02,251 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.35 vs. limit=15.0 +2024-08-03 19:37:06,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=210378.66666666666, ans=0.125 +2024-08-03 19:37:07,616 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.32 vs. limit=6.0 +2024-08-03 19:37:08,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=210378.66666666666, ans=0.125 +2024-08-03 19:37:11,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=210378.66666666666, ans=0.125 +2024-08-03 19:37:12,631 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.00 vs. limit=15.0 +2024-08-03 19:37:21,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210452.0, ans=0.1 +2024-08-03 19:37:25,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=210452.0, ans=0.025 +2024-08-03 19:37:31,263 INFO [train.py:1114] (0/4) Epoch 16, batch 2800, loss[loss=0.2332, simple_loss=0.3116, pruned_loss=0.07737, over 8905.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2752, pruned_loss=0.05218, over 2626512.77 frames. ], batch size: 96, lr: 7.72e-03, grad_scale: 32.0 +2024-08-03 19:37:45,868 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.762e+01 1.112e+02 1.298e+02 1.652e+02 2.703e+02, threshold=2.596e+02, percent-clipped=0.0 +2024-08-03 19:37:47,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=210562.0, ans=0.125 +2024-08-03 19:37:52,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=210562.0, ans=0.5 +2024-08-03 19:37:54,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=210562.0, ans=0.125 +2024-08-03 19:38:06,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=210635.33333333334, ans=0.025 +2024-08-03 19:38:10,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=210635.33333333334, ans=0.0 +2024-08-03 19:38:15,867 INFO [train.py:1114] (0/4) Epoch 16, batch 2850, loss[loss=0.1869, simple_loss=0.2744, pruned_loss=0.04976, over 13368.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2757, pruned_loss=0.05236, over 2620395.66 frames. ], batch size: 35, lr: 7.72e-03, grad_scale: 32.0 +2024-08-03 19:38:18,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=210672.0, ans=0.125 +2024-08-03 19:38:19,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=210672.0, ans=0.125 +2024-08-03 19:38:25,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=210708.66666666666, ans=0.0 +2024-08-03 19:38:32,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=210745.33333333334, ans=0.125 +2024-08-03 19:38:40,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=210745.33333333334, ans=0.125 +2024-08-03 19:38:41,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=210782.0, ans=0.95 +2024-08-03 19:38:48,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=210782.0, ans=0.125 +2024-08-03 19:38:55,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=210818.66666666666, ans=0.0 +2024-08-03 19:39:00,435 INFO [train.py:1114] (0/4) Epoch 16, batch 2900, loss[loss=0.1971, simple_loss=0.2833, pruned_loss=0.05544, over 13360.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2767, pruned_loss=0.05232, over 2630941.54 frames. ], batch size: 36, lr: 7.71e-03, grad_scale: 32.0 +2024-08-03 19:39:00,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=210855.33333333334, ans=0.025 +2024-08-03 19:39:02,646 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.28 vs. limit=10.0 +2024-08-03 19:39:12,595 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.36 vs. limit=22.5 +2024-08-03 19:39:16,543 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.624e+01 1.079e+02 1.232e+02 1.534e+02 2.946e+02, threshold=2.465e+02, percent-clipped=2.0 +2024-08-03 19:39:22,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=210928.66666666666, ans=0.0 +2024-08-03 19:39:23,902 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.88 vs. limit=15.0 +2024-08-03 19:39:29,455 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.31 vs. limit=6.0 +2024-08-03 19:39:46,450 INFO [train.py:1114] (0/4) Epoch 16, batch 2950, loss[loss=0.1938, simple_loss=0.2709, pruned_loss=0.05841, over 13332.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2756, pruned_loss=0.0524, over 2628795.30 frames. ], batch size: 34, lr: 7.71e-03, grad_scale: 32.0 +2024-08-03 19:39:46,861 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.17 vs. limit=22.5 +2024-08-03 19:40:04,434 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.77 vs. limit=15.0 +2024-08-03 19:40:17,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=211148.66666666666, ans=0.035 +2024-08-03 19:40:21,821 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:40:23,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=211185.33333333334, ans=0.125 +2024-08-03 19:40:28,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=211185.33333333334, ans=0.0 +2024-08-03 19:40:30,437 INFO [train.py:1114] (0/4) Epoch 16, batch 3000, loss[loss=0.1813, simple_loss=0.2728, pruned_loss=0.04495, over 13543.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2753, pruned_loss=0.05231, over 2629664.20 frames. ], batch size: 37, lr: 7.71e-03, grad_scale: 32.0 +2024-08-03 19:40:30,438 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 19:40:41,284 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([1.2977, 1.9922, 1.6807, 1.9123, 2.1944, 1.1152, 2.3174, 1.7959], + device='cuda:0') +2024-08-03 19:40:42,217 INFO [train.py:1146] (0/4) Epoch 16, validation: loss=0.1717, simple_loss=0.2708, pruned_loss=0.03625, over 944034.00 frames. +2024-08-03 19:40:42,218 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 19:40:50,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=211258.66666666666, ans=0.05 +2024-08-03 19:40:53,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=211258.66666666666, ans=0.125 +2024-08-03 19:40:56,642 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.281e+01 1.094e+02 1.222e+02 1.516e+02 2.979e+02, threshold=2.443e+02, percent-clipped=5.0 +2024-08-03 19:40:59,845 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.92 vs. limit=15.0 +2024-08-03 19:41:00,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=211295.33333333334, ans=0.2 +2024-08-03 19:41:10,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=211332.0, ans=0.1 +2024-08-03 19:41:24,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=211368.66666666666, ans=0.0 +2024-08-03 19:41:26,012 INFO [train.py:1114] (0/4) Epoch 16, batch 3050, loss[loss=0.1859, simple_loss=0.2742, pruned_loss=0.04878, over 13525.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2757, pruned_loss=0.05234, over 2626840.21 frames. ], batch size: 35, lr: 7.70e-03, grad_scale: 32.0 +2024-08-03 19:41:29,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=211405.33333333334, ans=0.125 +2024-08-03 19:41:29,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=211405.33333333334, ans=0.0 +2024-08-03 19:41:29,928 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.55 vs. limit=22.5 +2024-08-03 19:41:35,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=211442.0, ans=0.025 +2024-08-03 19:41:40,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=211442.0, ans=0.025 +2024-08-03 19:41:50,069 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.38 vs. limit=15.0 +2024-08-03 19:41:54,287 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.37 vs. limit=12.0 +2024-08-03 19:42:09,488 INFO [train.py:1114] (0/4) Epoch 16, batch 3100, loss[loss=0.2113, simple_loss=0.3002, pruned_loss=0.06115, over 13340.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2753, pruned_loss=0.05198, over 2625726.89 frames. ], batch size: 46, lr: 7.70e-03, grad_scale: 32.0 +2024-08-03 19:42:15,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=211588.66666666666, ans=0.0 +2024-08-03 19:42:16,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=211588.66666666666, ans=0.125 +2024-08-03 19:42:23,786 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.933e+01 1.091e+02 1.250e+02 1.567e+02 2.776e+02, threshold=2.501e+02, percent-clipped=2.0 +2024-08-03 19:42:27,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211662.0, ans=0.1 +2024-08-03 19:42:34,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=211698.66666666666, ans=0.07 +2024-08-03 19:42:51,920 INFO [train.py:1114] (0/4) Epoch 16, batch 3150, loss[loss=0.2239, simple_loss=0.3011, pruned_loss=0.07337, over 13318.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2759, pruned_loss=0.05221, over 2627943.11 frames. ], batch size: 49, lr: 7.70e-03, grad_scale: 32.0 +2024-08-03 19:42:59,257 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.36 vs. limit=22.5 +2024-08-03 19:43:15,258 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.46 vs. limit=15.0 +2024-08-03 19:43:35,472 INFO [train.py:1114] (0/4) Epoch 16, batch 3200, loss[loss=0.211, simple_loss=0.2931, pruned_loss=0.06441, over 13556.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2759, pruned_loss=0.05225, over 2634232.44 frames. ], batch size: 37, lr: 7.69e-03, grad_scale: 32.0 +2024-08-03 19:43:49,846 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.470e+01 1.178e+02 1.467e+02 1.849e+02 2.870e+02, threshold=2.934e+02, percent-clipped=4.0 +2024-08-03 19:43:49,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=211992.0, ans=0.0 +2024-08-03 19:43:50,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=211992.0, ans=0.125 +2024-08-03 19:44:08,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=212102.0, ans=0.0 +2024-08-03 19:44:18,725 INFO [train.py:1114] (0/4) Epoch 16, batch 3250, loss[loss=0.1762, simple_loss=0.273, pruned_loss=0.03971, over 13399.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2766, pruned_loss=0.05232, over 2638789.15 frames. ], batch size: 38, lr: 7.69e-03, grad_scale: 32.0 +2024-08-03 19:44:20,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212138.66666666666, ans=0.1 +2024-08-03 19:44:27,690 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.46 vs. limit=15.0 +2024-08-03 19:44:47,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=212248.66666666666, ans=0.0 +2024-08-03 19:44:55,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=212285.33333333334, ans=0.125 +2024-08-03 19:45:01,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=212285.33333333334, ans=0.125 +2024-08-03 19:45:01,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=212322.0, ans=0.125 +2024-08-03 19:45:02,680 INFO [train.py:1114] (0/4) Epoch 16, batch 3300, loss[loss=0.2218, simple_loss=0.2998, pruned_loss=0.07188, over 12865.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.276, pruned_loss=0.05237, over 2640327.77 frames. ], batch size: 52, lr: 7.69e-03, grad_scale: 32.0 +2024-08-03 19:45:12,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=212358.66666666666, ans=0.125 +2024-08-03 19:45:14,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=212358.66666666666, ans=0.0 +2024-08-03 19:45:16,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=212358.66666666666, ans=0.125 +2024-08-03 19:45:17,671 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.789e+01 1.171e+02 1.334e+02 1.762e+02 2.468e+02, threshold=2.668e+02, percent-clipped=0.0 +2024-08-03 19:45:24,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=212395.33333333334, ans=0.0 +2024-08-03 19:45:41,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=212468.66666666666, ans=0.025 +2024-08-03 19:45:45,708 INFO [train.py:1114] (0/4) Epoch 16, batch 3350, loss[loss=0.2116, simple_loss=0.295, pruned_loss=0.0641, over 13061.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2761, pruned_loss=0.05236, over 2630662.58 frames. ], batch size: 48, lr: 7.68e-03, grad_scale: 32.0 +2024-08-03 19:45:53,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=212542.0, ans=0.0 +2024-08-03 19:46:03,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=212578.66666666666, ans=0.05 +2024-08-03 19:46:11,232 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=16.43 vs. limit=15.0 +2024-08-03 19:46:20,888 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.89 vs. limit=15.0 +2024-08-03 19:46:29,959 INFO [train.py:1114] (0/4) Epoch 16, batch 3400, loss[loss=0.1844, simple_loss=0.2637, pruned_loss=0.05257, over 13535.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.276, pruned_loss=0.05235, over 2626678.57 frames. ], batch size: 31, lr: 7.68e-03, grad_scale: 32.0 +2024-08-03 19:46:44,569 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.890e+01 1.091e+02 1.257e+02 1.485e+02 2.568e+02, threshold=2.513e+02, percent-clipped=0.0 +2024-08-03 19:46:57,903 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.10 vs. limit=15.0 +2024-08-03 19:47:12,577 INFO [train.py:1114] (0/4) Epoch 16, batch 3450, loss[loss=0.1979, simple_loss=0.2887, pruned_loss=0.05356, over 13038.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2764, pruned_loss=0.05243, over 2630109.45 frames. ], batch size: 53, lr: 7.68e-03, grad_scale: 32.0 +2024-08-03 19:47:21,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=212908.66666666666, ans=0.025 +2024-08-03 19:47:26,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=212908.66666666666, ans=0.0 +2024-08-03 19:47:39,600 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.06 vs. limit=15.0 +2024-08-03 19:47:49,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=213018.66666666666, ans=0.0 +2024-08-03 19:47:55,215 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.31 vs. limit=6.0 +2024-08-03 19:47:55,632 INFO [train.py:1114] (0/4) Epoch 16, batch 3500, loss[loss=0.1579, simple_loss=0.2424, pruned_loss=0.0367, over 13540.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.276, pruned_loss=0.05277, over 2631156.08 frames. ], batch size: 34, lr: 7.67e-03, grad_scale: 32.0 +2024-08-03 19:47:56,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=213055.33333333334, ans=0.125 +2024-08-03 19:47:57,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=213055.33333333334, ans=0.0 +2024-08-03 19:48:03,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=213092.0, ans=0.125 +2024-08-03 19:48:08,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=213092.0, ans=0.125 +2024-08-03 19:48:10,728 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.160e+01 1.228e+02 1.407e+02 1.881e+02 3.021e+02, threshold=2.813e+02, percent-clipped=7.0 +2024-08-03 19:48:24,854 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.74 vs. limit=15.0 +2024-08-03 19:48:38,352 INFO [train.py:1114] (0/4) Epoch 16, batch 3550, loss[loss=0.2066, simple_loss=0.291, pruned_loss=0.06112, over 12514.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2777, pruned_loss=0.0534, over 2629323.06 frames. ], batch size: 58, lr: 7.67e-03, grad_scale: 16.0 +2024-08-03 19:48:40,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=213238.66666666666, ans=0.125 +2024-08-03 19:48:49,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=213275.33333333334, ans=0.125 +2024-08-03 19:49:04,223 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=8.85 vs. limit=12.0 +2024-08-03 19:49:17,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=213385.33333333334, ans=0.125 +2024-08-03 19:49:21,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=213385.33333333334, ans=10.0 +2024-08-03 19:49:23,454 INFO [train.py:1114] (0/4) Epoch 16, batch 3600, loss[loss=0.237, simple_loss=0.3122, pruned_loss=0.08086, over 9404.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.282, pruned_loss=0.05722, over 2487459.30 frames. ], batch size: 97, lr: 7.67e-03, grad_scale: 32.0 +2024-08-03 19:49:25,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=213422.0, ans=0.125 +2024-08-03 19:49:28,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=213422.0, ans=0.2 +2024-08-03 19:49:39,130 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.558e+01 1.114e+02 1.218e+02 1.319e+02 1.769e+02, threshold=2.437e+02, percent-clipped=0.0 +2024-08-03 19:49:48,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=213495.33333333334, ans=0.125 +2024-08-03 19:49:51,845 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=9.01 vs. limit=15.0 +2024-08-03 19:49:56,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=213532.0, ans=0.0 +2024-08-03 19:49:57,835 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-16.pt +2024-08-03 19:50:42,661 INFO [train.py:1114] (0/4) Epoch 17, batch 0, loss[loss=0.1742, simple_loss=0.2588, pruned_loss=0.04483, over 13335.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2588, pruned_loss=0.04483, over 13335.00 frames. ], batch size: 33, lr: 7.43e-03, grad_scale: 32.0 +2024-08-03 19:50:42,662 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 19:50:47,822 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.5577, 2.9326, 2.5942, 2.7320], device='cuda:0') +2024-08-03 19:50:48,929 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.6650, 3.4881, 3.8875, 3.6189], device='cuda:0') +2024-08-03 19:50:49,239 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.0289, 3.3980, 3.4314, 1.7816], device='cuda:0') +2024-08-03 19:50:52,770 INFO [train.py:1146] (0/4) Epoch 17, validation: loss=0.17, simple_loss=0.2717, pruned_loss=0.03416, over 944034.00 frames. +2024-08-03 19:50:52,770 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 19:51:05,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=213605.33333333334, ans=0.125 +2024-08-03 19:51:07,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=213605.33333333334, ans=0.0 +2024-08-03 19:51:08,994 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.12 vs. limit=10.0 +2024-08-03 19:51:11,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=213605.33333333334, ans=0.2 +2024-08-03 19:51:13,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=213642.0, ans=0.0 +2024-08-03 19:51:29,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=213678.66666666666, ans=0.125 +2024-08-03 19:51:40,075 INFO [train.py:1114] (0/4) Epoch 17, batch 50, loss[loss=0.1606, simple_loss=0.2508, pruned_loss=0.0352, over 13415.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2754, pruned_loss=0.05231, over 576932.19 frames. ], batch size: 32, lr: 7.43e-03, grad_scale: 32.0 +2024-08-03 19:51:40,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=213752.0, ans=0.125 +2024-08-03 19:51:48,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=213788.66666666666, ans=0.0 +2024-08-03 19:52:03,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=213825.33333333334, ans=0.125 +2024-08-03 19:52:05,562 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.139e+01 1.157e+02 1.306e+02 1.728e+02 3.229e+02, threshold=2.612e+02, percent-clipped=8.0 +2024-08-03 19:52:16,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=213898.66666666666, ans=0.125 +2024-08-03 19:52:18,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=213898.66666666666, ans=0.035 +2024-08-03 19:52:25,608 INFO [train.py:1114] (0/4) Epoch 17, batch 100, loss[loss=0.21, simple_loss=0.291, pruned_loss=0.06446, over 13535.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2773, pruned_loss=0.05281, over 1025271.39 frames. ], batch size: 35, lr: 7.43e-03, grad_scale: 32.0 +2024-08-03 19:52:37,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=213972.0, ans=0.125 +2024-08-03 19:52:39,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=213972.0, ans=0.125 +2024-08-03 19:52:47,490 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.74 vs. limit=12.0 +2024-08-03 19:53:09,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=214082.0, ans=0.2 +2024-08-03 19:53:13,882 INFO [train.py:1114] (0/4) Epoch 17, batch 150, loss[loss=0.1555, simple_loss=0.2348, pruned_loss=0.03813, over 13424.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2748, pruned_loss=0.05121, over 1386418.11 frames. ], batch size: 32, lr: 7.42e-03, grad_scale: 32.0 +2024-08-03 19:53:15,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=214118.66666666666, ans=0.125 +2024-08-03 19:53:18,893 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.89 vs. limit=6.0 +2024-08-03 19:53:39,026 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.782e+01 1.100e+02 1.230e+02 1.473e+02 3.065e+02, threshold=2.460e+02, percent-clipped=1.0 +2024-08-03 19:53:42,530 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.26 vs. limit=15.0 +2024-08-03 19:53:42,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=214228.66666666666, ans=0.0 +2024-08-03 19:53:44,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=214228.66666666666, ans=0.2 +2024-08-03 19:53:48,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=214228.66666666666, ans=0.0 +2024-08-03 19:53:52,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=214265.33333333334, ans=0.125 +2024-08-03 19:53:54,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=214265.33333333334, ans=0.125 +2024-08-03 19:53:56,917 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.32 vs. limit=10.0 +2024-08-03 19:53:57,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=214265.33333333334, ans=0.125 +2024-08-03 19:53:58,953 INFO [train.py:1114] (0/4) Epoch 17, batch 200, loss[loss=0.2198, simple_loss=0.3097, pruned_loss=0.06495, over 12368.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2727, pruned_loss=0.05055, over 1665291.15 frames. ], batch size: 58, lr: 7.42e-03, grad_scale: 32.0 +2024-08-03 19:54:03,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214302.0, ans=0.1 +2024-08-03 19:54:07,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=214338.66666666666, ans=0.125 +2024-08-03 19:54:11,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=214338.66666666666, ans=0.125 +2024-08-03 19:54:11,820 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.49 vs. limit=15.0 +2024-08-03 19:54:15,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=214338.66666666666, ans=0.125 +2024-08-03 19:54:43,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=214448.66666666666, ans=0.0 +2024-08-03 19:54:46,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214448.66666666666, ans=0.1 +2024-08-03 19:54:46,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=214448.66666666666, ans=0.125 +2024-08-03 19:54:48,674 INFO [train.py:1114] (0/4) Epoch 17, batch 250, loss[loss=0.2036, simple_loss=0.2863, pruned_loss=0.06041, over 13340.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2734, pruned_loss=0.0507, over 1883996.38 frames. ], batch size: 46, lr: 7.42e-03, grad_scale: 32.0 +2024-08-03 19:54:56,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=214485.33333333334, ans=0.04949747468305833 +2024-08-03 19:55:05,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=214522.0, ans=0.05 +2024-08-03 19:55:14,671 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.730e+01 1.099e+02 1.340e+02 1.709e+02 3.717e+02, threshold=2.680e+02, percent-clipped=7.0 +2024-08-03 19:55:16,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=214595.33333333334, ans=0.125 +2024-08-03 19:55:27,126 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.69 vs. limit=15.0 +2024-08-03 19:55:33,733 INFO [train.py:1114] (0/4) Epoch 17, batch 300, loss[loss=0.2006, simple_loss=0.2922, pruned_loss=0.0545, over 13445.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2728, pruned_loss=0.05049, over 2050552.19 frames. ], batch size: 42, lr: 7.42e-03, grad_scale: 16.0 +2024-08-03 19:55:34,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=214668.66666666666, ans=0.125 +2024-08-03 19:55:35,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=214668.66666666666, ans=0.0 +2024-08-03 19:55:43,216 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.96 vs. limit=10.0 +2024-08-03 19:55:56,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=214742.0, ans=0.125 +2024-08-03 19:56:20,713 INFO [train.py:1114] (0/4) Epoch 17, batch 350, loss[loss=0.1589, simple_loss=0.2374, pruned_loss=0.0402, over 13572.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.273, pruned_loss=0.0503, over 2181550.13 frames. ], batch size: 33, lr: 7.41e-03, grad_scale: 16.0 +2024-08-03 19:56:20,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214852.0, ans=0.1 +2024-08-03 19:56:44,529 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.44 vs. limit=15.0 +2024-08-03 19:56:49,377 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.060e+01 1.097e+02 1.266e+02 1.426e+02 2.641e+02, threshold=2.532e+02, percent-clipped=0.0 +2024-08-03 19:57:01,280 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:57:02,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=214998.66666666666, ans=0.2 +2024-08-03 19:57:06,388 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.72 vs. limit=15.0 +2024-08-03 19:57:08,540 INFO [train.py:1114] (0/4) Epoch 17, batch 400, loss[loss=0.184, simple_loss=0.2727, pruned_loss=0.04766, over 13355.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.273, pruned_loss=0.05037, over 2285670.43 frames. ], batch size: 37, lr: 7.41e-03, grad_scale: 32.0 +2024-08-03 19:57:26,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=215108.66666666666, ans=0.2 +2024-08-03 19:57:42,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=215145.33333333334, ans=0.2 +2024-08-03 19:57:52,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=215182.0, ans=0.1 +2024-08-03 19:57:53,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215218.66666666666, ans=0.1 +2024-08-03 19:57:54,033 INFO [train.py:1114] (0/4) Epoch 17, batch 450, loss[loss=0.1798, simple_loss=0.2732, pruned_loss=0.04315, over 13556.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2738, pruned_loss=0.05053, over 2360603.19 frames. ], batch size: 38, lr: 7.41e-03, grad_scale: 32.0 +2024-08-03 19:58:00,937 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-08-03 19:58:04,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=215255.33333333334, ans=0.0 +2024-08-03 19:58:05,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=215255.33333333334, ans=0.125 +2024-08-03 19:58:23,993 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.558e+01 1.110e+02 1.275e+02 1.603e+02 2.813e+02, threshold=2.549e+02, percent-clipped=2.0 +2024-08-03 19:58:32,593 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.73 vs. limit=22.5 +2024-08-03 19:58:33,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=215365.33333333334, ans=0.125 +2024-08-03 19:58:34,477 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.77 vs. limit=12.0 +2024-08-03 19:58:35,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=215365.33333333334, ans=0.025 +2024-08-03 19:58:36,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=215365.33333333334, ans=0.025 +2024-08-03 19:58:39,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=215365.33333333334, ans=15.0 +2024-08-03 19:58:43,008 INFO [train.py:1114] (0/4) Epoch 17, batch 500, loss[loss=0.204, simple_loss=0.2928, pruned_loss=0.05761, over 13447.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2729, pruned_loss=0.04994, over 2426113.28 frames. ], batch size: 43, lr: 7.40e-03, grad_scale: 32.0 +2024-08-03 19:58:44,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=215402.0, ans=0.125 +2024-08-03 19:58:52,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=215438.66666666666, ans=0.125 +2024-08-03 19:58:52,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=215438.66666666666, ans=0.2 +2024-08-03 19:59:07,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=215475.33333333334, ans=0.125 +2024-08-03 19:59:16,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=215512.0, ans=0.125 +2024-08-03 19:59:24,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=215548.66666666666, ans=0.125 +2024-08-03 19:59:27,804 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.83 vs. limit=22.5 +2024-08-03 19:59:28,255 INFO [train.py:1114] (0/4) Epoch 17, batch 550, loss[loss=0.2163, simple_loss=0.2969, pruned_loss=0.06788, over 13334.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2725, pruned_loss=0.04996, over 2469201.93 frames. ], batch size: 49, lr: 7.40e-03, grad_scale: 32.0 +2024-08-03 19:59:32,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=215585.33333333334, ans=0.0 +2024-08-03 19:59:57,005 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.283e+01 1.155e+02 1.328e+02 1.624e+02 2.790e+02, threshold=2.656e+02, percent-clipped=3.0 +2024-08-03 20:00:11,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=215732.0, ans=0.125 +2024-08-03 20:00:18,066 INFO [train.py:1114] (0/4) Epoch 17, batch 600, loss[loss=0.1802, simple_loss=0.2674, pruned_loss=0.04652, over 13331.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.273, pruned_loss=0.05033, over 2507773.45 frames. ], batch size: 46, lr: 7.40e-03, grad_scale: 32.0 +2024-08-03 20:00:18,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215768.66666666666, ans=0.1 +2024-08-03 20:00:28,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215805.33333333334, ans=0.1 +2024-08-03 20:00:55,018 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:00:55,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=215915.33333333334, ans=0.0 +2024-08-03 20:00:58,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=215915.33333333334, ans=0.125 +2024-08-03 20:01:02,893 INFO [train.py:1114] (0/4) Epoch 17, batch 650, loss[loss=0.1947, simple_loss=0.2877, pruned_loss=0.05086, over 13548.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2721, pruned_loss=0.04975, over 2542886.16 frames. ], batch size: 37, lr: 7.39e-03, grad_scale: 32.0 +2024-08-03 20:01:07,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=215952.0, ans=0.07 +2024-08-03 20:01:22,332 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.38 vs. limit=15.0 +2024-08-03 20:01:29,732 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.069e+01 1.111e+02 1.221e+02 1.677e+02 3.173e+02, threshold=2.441e+02, percent-clipped=3.0 +2024-08-03 20:01:31,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=216062.0, ans=0.2 +2024-08-03 20:01:34,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff3.min_abs, batch_count=216062.0, ans=0.2 +2024-08-03 20:01:51,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216135.33333333334, ans=0.1 +2024-08-03 20:01:52,171 INFO [train.py:1114] (0/4) Epoch 17, batch 700, loss[loss=0.189, simple_loss=0.2726, pruned_loss=0.05266, over 13535.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2728, pruned_loss=0.05003, over 2564140.13 frames. ], batch size: 35, lr: 7.39e-03, grad_scale: 16.0 +2024-08-03 20:02:12,740 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.42 vs. limit=8.0 +2024-08-03 20:02:20,753 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.44 vs. limit=15.0 +2024-08-03 20:02:28,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=216282.0, ans=0.0 +2024-08-03 20:02:35,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=216282.0, ans=0.04949747468305833 +2024-08-03 20:02:37,437 INFO [train.py:1114] (0/4) Epoch 17, batch 750, loss[loss=0.1833, simple_loss=0.2725, pruned_loss=0.0471, over 13366.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2719, pruned_loss=0.04966, over 2581789.92 frames. ], batch size: 37, lr: 7.39e-03, grad_scale: 16.0 +2024-08-03 20:02:38,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=216318.66666666666, ans=0.125 +2024-08-03 20:02:39,689 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.77 vs. limit=15.0 +2024-08-03 20:03:05,010 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.455e+01 1.109e+02 1.251e+02 1.578e+02 2.500e+02, threshold=2.502e+02, percent-clipped=1.0 +2024-08-03 20:03:20,989 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.27 vs. limit=12.0 +2024-08-03 20:03:22,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=216502.0, ans=0.125 +2024-08-03 20:03:23,163 INFO [train.py:1114] (0/4) Epoch 17, batch 800, loss[loss=0.1581, simple_loss=0.2408, pruned_loss=0.03773, over 13343.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2715, pruned_loss=0.04965, over 2596463.94 frames. ], batch size: 33, lr: 7.38e-03, grad_scale: 32.0 +2024-08-03 20:03:31,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=216538.66666666666, ans=0.025 +2024-08-03 20:03:37,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=216538.66666666666, ans=0.125 +2024-08-03 20:03:39,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.16 vs. limit=15.0 +2024-08-03 20:03:47,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=216575.33333333334, ans=0.5 +2024-08-03 20:03:50,232 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.36 vs. limit=15.0 +2024-08-03 20:03:50,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=216575.33333333334, ans=0.125 +2024-08-03 20:03:55,470 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.30 vs. limit=10.0 +2024-08-03 20:04:11,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=216648.66666666666, ans=0.0 +2024-08-03 20:04:13,150 INFO [train.py:1114] (0/4) Epoch 17, batch 850, loss[loss=0.1767, simple_loss=0.2742, pruned_loss=0.03959, over 13324.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2711, pruned_loss=0.04947, over 2608444.99 frames. ], batch size: 40, lr: 7.38e-03, grad_scale: 16.0 +2024-08-03 20:04:16,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216685.33333333334, ans=0.1 +2024-08-03 20:04:17,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=216685.33333333334, ans=0.125 +2024-08-03 20:04:25,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=216722.0, ans=0.0 +2024-08-03 20:04:35,143 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.73 vs. limit=22.5 +2024-08-03 20:04:36,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=216758.66666666666, ans=0.0 +2024-08-03 20:04:40,990 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.726e+01 1.085e+02 1.274e+02 1.570e+02 2.707e+02, threshold=2.548e+02, percent-clipped=2.0 +2024-08-03 20:04:43,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=216795.33333333334, ans=0.2 +2024-08-03 20:04:58,434 INFO [train.py:1114] (0/4) Epoch 17, batch 900, loss[loss=0.1622, simple_loss=0.2409, pruned_loss=0.04178, over 13330.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2719, pruned_loss=0.05008, over 2611007.23 frames. ], batch size: 33, lr: 7.38e-03, grad_scale: 16.0 +2024-08-03 20:05:03,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=216868.66666666666, ans=0.0 +2024-08-03 20:05:04,383 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.34 vs. limit=15.0 +2024-08-03 20:05:21,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=216942.0, ans=0.0 +2024-08-03 20:05:35,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=216978.66666666666, ans=0.0 +2024-08-03 20:05:47,231 INFO [train.py:1114] (0/4) Epoch 17, batch 950, loss[loss=0.1573, simple_loss=0.2425, pruned_loss=0.03605, over 13532.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2721, pruned_loss=0.05015, over 2612141.64 frames. ], batch size: 34, lr: 7.38e-03, grad_scale: 16.0 +2024-08-03 20:05:51,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=217052.0, ans=0.125 +2024-08-03 20:06:08,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=217125.33333333334, ans=0.125 +2024-08-03 20:06:12,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=217125.33333333334, ans=0.125 +2024-08-03 20:06:15,645 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.469e+01 1.104e+02 1.288e+02 1.565e+02 2.337e+02, threshold=2.575e+02, percent-clipped=0.0 +2024-08-03 20:06:17,069 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.36 vs. limit=15.0 +2024-08-03 20:06:18,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217162.0, ans=0.1 +2024-08-03 20:06:26,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217198.66666666666, ans=0.1 +2024-08-03 20:06:26,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=217198.66666666666, ans=0.1 +2024-08-03 20:06:33,328 INFO [train.py:1114] (0/4) Epoch 17, batch 1000, loss[loss=0.1724, simple_loss=0.2579, pruned_loss=0.04343, over 13352.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2727, pruned_loss=0.0505, over 2611297.39 frames. ], batch size: 35, lr: 7.37e-03, grad_scale: 8.0 +2024-08-03 20:06:40,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=217235.33333333334, ans=0.125 +2024-08-03 20:07:07,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=217345.33333333334, ans=0.2 +2024-08-03 20:07:20,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=217382.0, ans=6.0 +2024-08-03 20:07:23,475 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:07:27,019 INFO [train.py:1114] (0/4) Epoch 17, batch 1050, loss[loss=0.1923, simple_loss=0.2817, pruned_loss=0.05147, over 13565.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2723, pruned_loss=0.05062, over 2615828.93 frames. ], batch size: 39, lr: 7.37e-03, grad_scale: 8.0 +2024-08-03 20:07:31,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=217418.66666666666, ans=0.125 +2024-08-03 20:07:40,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=217455.33333333334, ans=0.0 +2024-08-03 20:07:43,731 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.25 vs. limit=6.0 +2024-08-03 20:07:53,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=217528.66666666666, ans=0.2 +2024-08-03 20:07:55,672 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.928e+01 1.069e+02 1.230e+02 1.488e+02 2.448e+02, threshold=2.459e+02, percent-clipped=0.0 +2024-08-03 20:07:55,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=217528.66666666666, ans=0.0 +2024-08-03 20:07:56,266 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.84 vs. limit=15.0 +2024-08-03 20:08:09,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=217565.33333333334, ans=0.125 +2024-08-03 20:08:12,127 INFO [train.py:1114] (0/4) Epoch 17, batch 1100, loss[loss=0.2052, simple_loss=0.2881, pruned_loss=0.06108, over 13551.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2719, pruned_loss=0.05041, over 2620203.45 frames. ], batch size: 36, lr: 7.37e-03, grad_scale: 8.0 +2024-08-03 20:08:19,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=217602.0, ans=0.1 +2024-08-03 20:08:22,499 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:08:40,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=217712.0, ans=0.125 +2024-08-03 20:08:44,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=217712.0, ans=0.0 +2024-08-03 20:08:57,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=217748.66666666666, ans=0.07 +2024-08-03 20:08:59,123 INFO [train.py:1114] (0/4) Epoch 17, batch 1150, loss[loss=0.1877, simple_loss=0.2723, pruned_loss=0.05153, over 13562.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2719, pruned_loss=0.05027, over 2620027.86 frames. ], batch size: 36, lr: 7.36e-03, grad_scale: 8.0 +2024-08-03 20:09:17,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=217822.0, ans=0.125 +2024-08-03 20:09:24,397 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.05 vs. limit=10.0 +2024-08-03 20:09:30,348 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.508e+01 1.111e+02 1.257e+02 1.521e+02 2.461e+02, threshold=2.515e+02, percent-clipped=1.0 +2024-08-03 20:09:32,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=217895.33333333334, ans=0.0 +2024-08-03 20:09:46,457 INFO [train.py:1114] (0/4) Epoch 17, batch 1200, loss[loss=0.188, simple_loss=0.2783, pruned_loss=0.04889, over 13567.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2731, pruned_loss=0.05042, over 2616207.43 frames. ], batch size: 39, lr: 7.36e-03, grad_scale: 16.0 +2024-08-03 20:10:16,071 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.98 vs. limit=10.0 +2024-08-03 20:10:27,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218115.33333333334, ans=0.1 +2024-08-03 20:10:30,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=218115.33333333334, ans=0.0 +2024-08-03 20:10:32,177 INFO [train.py:1114] (0/4) Epoch 17, batch 1250, loss[loss=0.2125, simple_loss=0.2984, pruned_loss=0.06333, over 13469.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2739, pruned_loss=0.05062, over 2628281.11 frames. ], batch size: 42, lr: 7.36e-03, grad_scale: 16.0 +2024-08-03 20:10:33,183 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:10:40,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=218188.66666666666, ans=0.025 +2024-08-03 20:10:50,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=218188.66666666666, ans=0.0 +2024-08-03 20:11:01,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218225.33333333334, ans=0.1 +2024-08-03 20:11:05,413 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.654e+01 1.175e+02 1.463e+02 1.905e+02 2.984e+02, threshold=2.925e+02, percent-clipped=5.0 +2024-08-03 20:11:05,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=218262.0, ans=0.125 +2024-08-03 20:11:07,556 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.83 vs. limit=6.0 +2024-08-03 20:11:09,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=218262.0, ans=0.0 +2024-08-03 20:11:21,611 INFO [train.py:1114] (0/4) Epoch 17, batch 1300, loss[loss=0.1854, simple_loss=0.2757, pruned_loss=0.04756, over 12856.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2731, pruned_loss=0.0502, over 2630751.49 frames. ], batch size: 52, lr: 7.35e-03, grad_scale: 16.0 +2024-08-03 20:11:21,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218335.33333333334, ans=0.1 +2024-08-03 20:11:26,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=218335.33333333334, ans=0.125 +2024-08-03 20:11:30,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=218372.0, ans=0.125 +2024-08-03 20:11:48,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=218445.33333333334, ans=0.07 +2024-08-03 20:12:06,824 INFO [train.py:1114] (0/4) Epoch 17, batch 1350, loss[loss=0.1959, simple_loss=0.2798, pruned_loss=0.05598, over 13536.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2733, pruned_loss=0.05045, over 2638632.80 frames. ], batch size: 37, lr: 7.35e-03, grad_scale: 16.0 +2024-08-03 20:12:08,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=218518.66666666666, ans=0.0 +2024-08-03 20:12:26,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=218592.0, ans=0.1 +2024-08-03 20:12:29,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=218592.0, ans=0.125 +2024-08-03 20:12:37,644 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.610e+01 1.168e+02 1.323e+02 1.597e+02 2.527e+02, threshold=2.645e+02, percent-clipped=0.0 +2024-08-03 20:12:37,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=218628.66666666666, ans=0.125 +2024-08-03 20:12:49,328 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.15 vs. limit=22.5 +2024-08-03 20:12:51,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=218665.33333333334, ans=0.025 +2024-08-03 20:12:56,123 INFO [train.py:1114] (0/4) Epoch 17, batch 1400, loss[loss=0.17, simple_loss=0.251, pruned_loss=0.04455, over 13259.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.273, pruned_loss=0.05031, over 2642240.14 frames. ], batch size: 31, lr: 7.35e-03, grad_scale: 16.0 +2024-08-03 20:13:27,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=218812.0, ans=0.0 +2024-08-03 20:13:37,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=218848.66666666666, ans=0.125 +2024-08-03 20:13:37,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=218848.66666666666, ans=0.2 +2024-08-03 20:13:38,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=218848.66666666666, ans=0.125 +2024-08-03 20:13:41,297 INFO [train.py:1114] (0/4) Epoch 17, batch 1450, loss[loss=0.2005, simple_loss=0.2898, pruned_loss=0.05561, over 13433.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2725, pruned_loss=0.05001, over 2641370.34 frames. ], batch size: 43, lr: 7.34e-03, grad_scale: 16.0 +2024-08-03 20:13:46,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=218885.33333333334, ans=0.125 +2024-08-03 20:14:10,053 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.837e+01 1.093e+02 1.261e+02 1.597e+02 2.531e+02, threshold=2.522e+02, percent-clipped=0.0 +2024-08-03 20:14:19,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=219032.0, ans=0.125 +2024-08-03 20:14:20,270 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.84 vs. limit=22.5 +2024-08-03 20:14:21,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=219032.0, ans=0.125 +2024-08-03 20:14:22,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=219032.0, ans=0.0 +2024-08-03 20:14:23,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219032.0, ans=0.1 +2024-08-03 20:14:29,633 INFO [train.py:1114] (0/4) Epoch 17, batch 1500, loss[loss=0.1795, simple_loss=0.2735, pruned_loss=0.04279, over 13386.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2726, pruned_loss=0.04967, over 2641235.93 frames. ], batch size: 39, lr: 7.34e-03, grad_scale: 16.0 +2024-08-03 20:14:30,354 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.52 vs. limit=22.5 +2024-08-03 20:14:41,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=219105.33333333334, ans=0.125 +2024-08-03 20:14:52,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=219142.0, ans=0.09899494936611666 +2024-08-03 20:14:53,075 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.42 vs. limit=5.0 +2024-08-03 20:15:14,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=219215.33333333334, ans=0.125 +2024-08-03 20:15:15,949 INFO [train.py:1114] (0/4) Epoch 17, batch 1550, loss[loss=0.1913, simple_loss=0.2879, pruned_loss=0.04734, over 13385.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.273, pruned_loss=0.05018, over 2632372.39 frames. ], batch size: 41, lr: 7.34e-03, grad_scale: 16.0 +2024-08-03 20:15:21,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=219252.0, ans=0.2 +2024-08-03 20:15:26,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=219288.66666666666, ans=0.125 +2024-08-03 20:15:41,725 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:15:45,320 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.134e+01 1.116e+02 1.285e+02 1.566e+02 3.410e+02, threshold=2.570e+02, percent-clipped=5.0 +2024-08-03 20:15:50,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=219362.0, ans=0.125 +2024-08-03 20:16:02,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=219435.33333333334, ans=0.1 +2024-08-03 20:16:03,436 INFO [train.py:1114] (0/4) Epoch 17, batch 1600, loss[loss=0.1849, simple_loss=0.2814, pruned_loss=0.04416, over 13567.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2728, pruned_loss=0.05018, over 2625168.82 frames. ], batch size: 39, lr: 7.34e-03, grad_scale: 32.0 +2024-08-03 20:16:28,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.70 vs. limit=22.5 +2024-08-03 20:16:38,847 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.60 vs. limit=12.0 +2024-08-03 20:16:39,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=219545.33333333334, ans=0.5 +2024-08-03 20:16:49,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=219582.0, ans=0.0 +2024-08-03 20:16:55,682 INFO [train.py:1114] (0/4) Epoch 17, batch 1650, loss[loss=0.2073, simple_loss=0.3023, pruned_loss=0.05612, over 13330.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2733, pruned_loss=0.0504, over 2621877.63 frames. ], batch size: 40, lr: 7.33e-03, grad_scale: 32.0 +2024-08-03 20:16:56,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=219618.66666666666, ans=0.125 +2024-08-03 20:17:18,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=219692.0, ans=0.125 +2024-08-03 20:17:24,622 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.842e+01 1.100e+02 1.247e+02 1.816e+02 3.503e+02, threshold=2.494e+02, percent-clipped=6.0 +2024-08-03 20:17:28,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=219728.66666666666, ans=0.2 +2024-08-03 20:17:30,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=219728.66666666666, ans=0.0 +2024-08-03 20:17:41,083 INFO [train.py:1114] (0/4) Epoch 17, batch 1700, loss[loss=0.1793, simple_loss=0.2508, pruned_loss=0.05391, over 13258.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2734, pruned_loss=0.05053, over 2630621.70 frames. ], batch size: 31, lr: 7.33e-03, grad_scale: 32.0 +2024-08-03 20:17:54,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=219838.66666666666, ans=0.1 +2024-08-03 20:18:02,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=219875.33333333334, ans=0.125 +2024-08-03 20:18:11,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=219912.0, ans=0.0 +2024-08-03 20:18:29,595 INFO [train.py:1114] (0/4) Epoch 17, batch 1750, loss[loss=0.1594, simple_loss=0.2424, pruned_loss=0.03814, over 13509.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2731, pruned_loss=0.0506, over 2634235.05 frames. ], batch size: 31, lr: 7.33e-03, grad_scale: 32.0 +2024-08-03 20:18:32,545 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-60000.pt +2024-08-03 20:18:43,759 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.22 vs. limit=22.5 +2024-08-03 20:18:47,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=220022.0, ans=0.125 +2024-08-03 20:18:48,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=220022.0, ans=0.0 +2024-08-03 20:19:00,586 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.983e+01 1.113e+02 1.270e+02 1.558e+02 2.524e+02, threshold=2.540e+02, percent-clipped=1.0 +2024-08-03 20:19:11,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=220132.0, ans=0.0 +2024-08-03 20:19:15,579 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.16 vs. limit=15.0 +2024-08-03 20:19:16,796 INFO [train.py:1114] (0/4) Epoch 17, batch 1800, loss[loss=0.1912, simple_loss=0.2775, pruned_loss=0.05246, over 13548.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.273, pruned_loss=0.05036, over 2635234.76 frames. ], batch size: 38, lr: 7.32e-03, grad_scale: 32.0 +2024-08-03 20:20:06,473 INFO [train.py:1114] (0/4) Epoch 17, batch 1850, loss[loss=0.1809, simple_loss=0.2814, pruned_loss=0.04023, over 13375.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2719, pruned_loss=0.04978, over 2637494.73 frames. ], batch size: 39, lr: 7.32e-03, grad_scale: 32.0 +2024-08-03 20:20:13,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=220352.0, ans=0.0 +2024-08-03 20:20:30,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=220425.33333333334, ans=0.125 +2024-08-03 20:20:35,736 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.176e+01 1.159e+02 1.677e+02 2.408e+02 3.560e+02, threshold=3.354e+02, percent-clipped=19.0 +2024-08-03 20:20:42,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=220498.66666666666, ans=0.125 +2024-08-03 20:20:45,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=220498.66666666666, ans=0.025 +2024-08-03 20:20:52,104 INFO [train.py:1114] (0/4) Epoch 17, batch 1900, loss[loss=0.1967, simple_loss=0.2935, pruned_loss=0.04991, over 13299.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2724, pruned_loss=0.04965, over 2640329.21 frames. ], batch size: 40, lr: 7.32e-03, grad_scale: 32.0 +2024-08-03 20:20:55,874 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:20:57,293 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.97 vs. limit=10.0 +2024-08-03 20:20:57,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=220535.33333333334, ans=0.0 +2024-08-03 20:21:03,694 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.29 vs. limit=6.0 +2024-08-03 20:21:10,397 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:21:16,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=220608.66666666666, ans=0.1 +2024-08-03 20:21:17,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=220608.66666666666, ans=0.125 +2024-08-03 20:21:31,920 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.76 vs. limit=12.0 +2024-08-03 20:21:38,838 INFO [train.py:1114] (0/4) Epoch 17, batch 1950, loss[loss=0.1876, simple_loss=0.2696, pruned_loss=0.0528, over 13568.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2736, pruned_loss=0.05004, over 2646849.88 frames. ], batch size: 36, lr: 7.31e-03, grad_scale: 32.0 +2024-08-03 20:21:44,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=220718.66666666666, ans=0.125 +2024-08-03 20:21:45,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=220718.66666666666, ans=0.07 +2024-08-03 20:21:49,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=220755.33333333334, ans=0.125 +2024-08-03 20:21:51,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220755.33333333334, ans=0.1 +2024-08-03 20:22:10,356 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.292e+01 1.160e+02 1.302e+02 1.581e+02 2.993e+02, threshold=2.604e+02, percent-clipped=0.0 +2024-08-03 20:22:15,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=220828.66666666666, ans=0.0 +2024-08-03 20:22:16,111 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.11 vs. limit=15.0 +2024-08-03 20:22:23,997 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:22:26,620 INFO [train.py:1114] (0/4) Epoch 17, batch 2000, loss[loss=0.1685, simple_loss=0.2476, pruned_loss=0.04472, over 13521.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2742, pruned_loss=0.05056, over 2636206.18 frames. ], batch size: 31, lr: 7.31e-03, grad_scale: 32.0 +2024-08-03 20:22:44,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=220975.33333333334, ans=0.2 +2024-08-03 20:22:44,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=220975.33333333334, ans=0.025 +2024-08-03 20:22:51,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=220975.33333333334, ans=0.0 +2024-08-03 20:23:00,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=221012.0, ans=0.125 +2024-08-03 20:23:01,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=221012.0, ans=0.125 +2024-08-03 20:23:01,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=221012.0, ans=0.0 +2024-08-03 20:23:14,199 INFO [train.py:1114] (0/4) Epoch 17, batch 2050, loss[loss=0.1632, simple_loss=0.2427, pruned_loss=0.04191, over 13445.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2736, pruned_loss=0.05064, over 2633265.29 frames. ], batch size: 32, lr: 7.31e-03, grad_scale: 32.0 +2024-08-03 20:23:42,770 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.04 vs. limit=10.0 +2024-08-03 20:23:45,006 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.513e+01 1.116e+02 1.230e+02 1.630e+02 2.618e+02, threshold=2.461e+02, percent-clipped=1.0 +2024-08-03 20:23:50,813 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.46 vs. limit=15.0 +2024-08-03 20:23:52,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=221232.0, ans=0.125 +2024-08-03 20:23:57,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=221232.0, ans=0.125 +2024-08-03 20:23:58,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=221232.0, ans=0.0 +2024-08-03 20:24:01,312 INFO [train.py:1114] (0/4) Epoch 17, batch 2100, loss[loss=0.1955, simple_loss=0.2794, pruned_loss=0.05581, over 13543.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2729, pruned_loss=0.05002, over 2638889.76 frames. ], batch size: 37, lr: 7.31e-03, grad_scale: 32.0 +2024-08-03 20:24:17,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=221305.33333333334, ans=0.025 +2024-08-03 20:24:20,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=221342.0, ans=0.0 +2024-08-03 20:24:22,462 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.20 vs. limit=15.0 +2024-08-03 20:24:27,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=221378.66666666666, ans=0.125 +2024-08-03 20:24:41,980 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.84 vs. limit=15.0 +2024-08-03 20:24:45,901 INFO [train.py:1114] (0/4) Epoch 17, batch 2150, loss[loss=0.1732, simple_loss=0.2634, pruned_loss=0.04148, over 13557.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2721, pruned_loss=0.04978, over 2647333.27 frames. ], batch size: 36, lr: 7.30e-03, grad_scale: 32.0 +2024-08-03 20:24:47,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=221452.0, ans=0.125 +2024-08-03 20:25:03,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=221488.66666666666, ans=0.025 +2024-08-03 20:25:06,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=221525.33333333334, ans=0.0 +2024-08-03 20:25:06,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=221525.33333333334, ans=0.125 +2024-08-03 20:25:07,468 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.31 vs. limit=15.0 +2024-08-03 20:25:16,731 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.666e+01 1.100e+02 1.262e+02 1.658e+02 2.819e+02, threshold=2.523e+02, percent-clipped=4.0 +2024-08-03 20:25:18,048 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.93 vs. limit=15.0 +2024-08-03 20:25:30,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=221598.66666666666, ans=0.0 +2024-08-03 20:25:34,761 INFO [train.py:1114] (0/4) Epoch 17, batch 2200, loss[loss=0.1718, simple_loss=0.2647, pruned_loss=0.03938, over 13408.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2724, pruned_loss=0.04962, over 2646036.75 frames. ], batch size: 39, lr: 7.30e-03, grad_scale: 32.0 +2024-08-03 20:26:16,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=221782.0, ans=0.09899494936611666 +2024-08-03 20:26:20,166 INFO [train.py:1114] (0/4) Epoch 17, batch 2250, loss[loss=0.1572, simple_loss=0.2531, pruned_loss=0.03066, over 13359.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2723, pruned_loss=0.04986, over 2642943.04 frames. ], batch size: 37, lr: 7.30e-03, grad_scale: 32.0 +2024-08-03 20:26:24,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=221818.66666666666, ans=0.0 +2024-08-03 20:26:41,598 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.73 vs. limit=15.0 +2024-08-03 20:26:41,622 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.80 vs. limit=15.0 +2024-08-03 20:26:45,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=221892.0, ans=0.0 +2024-08-03 20:26:48,987 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.784e+01 1.159e+02 1.389e+02 1.848e+02 3.074e+02, threshold=2.777e+02, percent-clipped=8.0 +2024-08-03 20:26:54,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=221928.66666666666, ans=0.125 +2024-08-03 20:26:56,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=221928.66666666666, ans=0.125 +2024-08-03 20:26:56,842 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.92 vs. limit=6.0 +2024-08-03 20:26:56,963 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.82 vs. limit=15.0 +2024-08-03 20:27:05,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=221965.33333333334, ans=0.025 +2024-08-03 20:27:05,512 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.56 vs. limit=22.5 +2024-08-03 20:27:10,484 INFO [train.py:1114] (0/4) Epoch 17, batch 2300, loss[loss=0.1701, simple_loss=0.2492, pruned_loss=0.0455, over 13560.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2715, pruned_loss=0.04974, over 2640030.03 frames. ], batch size: 33, lr: 7.29e-03, grad_scale: 32.0 +2024-08-03 20:27:11,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=222002.0, ans=0.0 +2024-08-03 20:27:22,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=222038.66666666666, ans=0.1 +2024-08-03 20:27:33,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=222075.33333333334, ans=0.035 +2024-08-03 20:27:34,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=222075.33333333334, ans=0.2 +2024-08-03 20:27:39,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=222112.0, ans=0.2 +2024-08-03 20:27:41,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=222112.0, ans=0.05 +2024-08-03 20:27:42,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222112.0, ans=0.1 +2024-08-03 20:27:56,112 INFO [train.py:1114] (0/4) Epoch 17, batch 2350, loss[loss=0.1856, simple_loss=0.2752, pruned_loss=0.04803, over 13543.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2715, pruned_loss=0.04953, over 2642111.37 frames. ], batch size: 38, lr: 7.29e-03, grad_scale: 16.0 +2024-08-03 20:28:06,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_na.min_abs, batch_count=222222.0, ans=0.02 +2024-08-03 20:28:06,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=222222.0, ans=0.125 +2024-08-03 20:28:20,120 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.36 vs. limit=12.0 +2024-08-03 20:28:25,887 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.384e+01 1.095e+02 1.277e+02 1.611e+02 2.837e+02, threshold=2.555e+02, percent-clipped=1.0 +2024-08-03 20:28:43,106 INFO [train.py:1114] (0/4) Epoch 17, batch 2400, loss[loss=0.1774, simple_loss=0.2665, pruned_loss=0.04418, over 13536.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2727, pruned_loss=0.05032, over 2643004.59 frames. ], batch size: 35, lr: 7.29e-03, grad_scale: 32.0 +2024-08-03 20:29:03,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=222442.0, ans=10.0 +2024-08-03 20:29:06,448 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.54 vs. limit=15.0 +2024-08-03 20:29:06,535 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.23 vs. limit=12.0 +2024-08-03 20:29:15,114 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:29:30,183 INFO [train.py:1114] (0/4) Epoch 17, batch 2450, loss[loss=0.1966, simple_loss=0.2869, pruned_loss=0.0532, over 13361.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2738, pruned_loss=0.0508, over 2632984.49 frames. ], batch size: 37, lr: 7.28e-03, grad_scale: 32.0 +2024-08-03 20:29:30,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=222552.0, ans=0.125 +2024-08-03 20:29:38,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=222588.66666666666, ans=0.125 +2024-08-03 20:29:59,847 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.386e+01 1.131e+02 1.264e+02 1.537e+02 2.363e+02, threshold=2.529e+02, percent-clipped=0.0 +2024-08-03 20:30:00,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=222662.0, ans=0.0 +2024-08-03 20:30:15,170 INFO [train.py:1114] (0/4) Epoch 17, batch 2500, loss[loss=0.1867, simple_loss=0.2772, pruned_loss=0.04806, over 13398.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2729, pruned_loss=0.05034, over 2637382.87 frames. ], batch size: 39, lr: 7.28e-03, grad_scale: 32.0 +2024-08-03 20:30:25,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=222772.0, ans=0.2 +2024-08-03 20:30:36,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=222808.66666666666, ans=0.2 +2024-08-03 20:30:41,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=222845.33333333334, ans=0.1 +2024-08-03 20:30:45,796 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.79 vs. limit=15.0 +2024-08-03 20:30:55,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=222882.0, ans=0.025 +2024-08-03 20:30:56,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222882.0, ans=0.1 +2024-08-03 20:30:56,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=222882.0, ans=0.0 +2024-08-03 20:30:57,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=222882.0, ans=0.125 +2024-08-03 20:31:00,459 INFO [train.py:1114] (0/4) Epoch 17, batch 2550, loss[loss=0.1835, simple_loss=0.2562, pruned_loss=0.05537, over 13527.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2724, pruned_loss=0.05026, over 2639162.12 frames. ], batch size: 31, lr: 7.28e-03, grad_scale: 32.0 +2024-08-03 20:31:01,498 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:31:08,219 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.59 vs. limit=15.0 +2024-08-03 20:31:20,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=222992.0, ans=0.1 +2024-08-03 20:31:27,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=222992.0, ans=0.1 +2024-08-03 20:31:27,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=223028.66666666666, ans=0.2 +2024-08-03 20:31:27,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=223028.66666666666, ans=0.0 +2024-08-03 20:31:31,226 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.999e+01 1.159e+02 1.553e+02 2.092e+02 3.686e+02, threshold=3.106e+02, percent-clipped=10.0 +2024-08-03 20:31:42,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=223065.33333333334, ans=0.0 +2024-08-03 20:31:46,290 INFO [train.py:1114] (0/4) Epoch 17, batch 2600, loss[loss=0.1877, simple_loss=0.273, pruned_loss=0.05124, over 13580.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2731, pruned_loss=0.0504, over 2638326.02 frames. ], batch size: 36, lr: 7.28e-03, grad_scale: 32.0 +2024-08-03 20:31:55,574 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.52 vs. limit=22.5 +2024-08-03 20:31:59,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=223138.66666666666, ans=0.125 +2024-08-03 20:31:59,577 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.39 vs. limit=22.5 +2024-08-03 20:32:01,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=223138.66666666666, ans=0.0 +2024-08-03 20:32:01,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=223138.66666666666, ans=0.125 +2024-08-03 20:32:02,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=223175.33333333334, ans=0.0 +2024-08-03 20:32:04,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=223175.33333333334, ans=0.04949747468305833 +2024-08-03 20:32:08,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=223175.33333333334, ans=0.125 +2024-08-03 20:32:29,423 INFO [train.py:1114] (0/4) Epoch 17, batch 2650, loss[loss=0.224, simple_loss=0.3082, pruned_loss=0.06988, over 13350.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2739, pruned_loss=0.05063, over 2640738.20 frames. ], batch size: 46, lr: 7.27e-03, grad_scale: 16.0 +2024-08-03 20:32:39,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=223322.0, ans=0.0 +2024-08-03 20:32:41,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=223322.0, ans=0.125 +2024-08-03 20:32:44,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=223322.0, ans=0.2 +2024-08-03 20:32:45,152 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.54 vs. limit=22.5 +2024-08-03 20:32:57,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=223395.33333333334, ans=0.0 +2024-08-03 20:32:58,629 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.512e+01 1.129e+02 1.471e+02 1.804e+02 3.189e+02, threshold=2.942e+02, percent-clipped=1.0 +2024-08-03 20:32:59,065 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.73 vs. limit=15.0 +2024-08-03 20:33:02,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223395.33333333334, ans=0.1 +2024-08-03 20:33:10,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=223432.0, ans=0.0 +2024-08-03 20:33:11,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=223468.66666666666, ans=0.125 +2024-08-03 20:33:12,556 INFO [train.py:1114] (0/4) Epoch 17, batch 2700, loss[loss=0.1964, simple_loss=0.2868, pruned_loss=0.053, over 13537.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2737, pruned_loss=0.05038, over 2637295.19 frames. ], batch size: 40, lr: 7.27e-03, grad_scale: 16.0 +2024-08-03 20:33:22,479 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:33:28,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=223505.33333333334, ans=6.0 +2024-08-03 20:33:30,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=223542.0, ans=0.0 +2024-08-03 20:33:35,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=223542.0, ans=0.0 +2024-08-03 20:33:43,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=223578.66666666666, ans=0.0 +2024-08-03 20:33:51,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=223615.33333333334, ans=0.125 +2024-08-03 20:33:56,057 INFO [train.py:1114] (0/4) Epoch 17, batch 2750, loss[loss=0.1768, simple_loss=0.2637, pruned_loss=0.04494, over 13328.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2728, pruned_loss=0.0504, over 2635911.75 frames. ], batch size: 34, lr: 7.27e-03, grad_scale: 16.0 +2024-08-03 20:33:56,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=223652.0, ans=0.125 +2024-08-03 20:34:00,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=223652.0, ans=0.125 +2024-08-03 20:34:26,198 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.377e+01 1.115e+02 1.294e+02 1.597e+02 2.305e+02, threshold=2.588e+02, percent-clipped=0.0 +2024-08-03 20:34:40,292 INFO [train.py:1114] (0/4) Epoch 17, batch 2800, loss[loss=0.2334, simple_loss=0.3003, pruned_loss=0.08326, over 9761.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2727, pruned_loss=0.05046, over 2627547.48 frames. ], batch size: 97, lr: 7.26e-03, grad_scale: 32.0 +2024-08-03 20:34:44,830 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.28 vs. limit=10.0 +2024-08-03 20:35:18,205 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.85 vs. limit=22.5 +2024-08-03 20:35:25,254 INFO [train.py:1114] (0/4) Epoch 17, batch 2850, loss[loss=0.1868, simple_loss=0.2777, pruned_loss=0.04795, over 13367.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2737, pruned_loss=0.05066, over 2622000.87 frames. ], batch size: 35, lr: 7.26e-03, grad_scale: 32.0 +2024-08-03 20:35:40,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=224055.33333333334, ans=0.125 +2024-08-03 20:35:42,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=224092.0, ans=0.025 +2024-08-03 20:35:52,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=224128.66666666666, ans=0.04949747468305833 +2024-08-03 20:35:54,819 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.742e+01 1.205e+02 1.427e+02 1.924e+02 3.362e+02, threshold=2.855e+02, percent-clipped=10.0 +2024-08-03 20:36:01,963 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.42 vs. limit=12.0 +2024-08-03 20:36:02,940 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.60 vs. limit=22.5 +2024-08-03 20:36:04,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=224165.33333333334, ans=0.125 +2024-08-03 20:36:09,575 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.75 vs. limit=15.0 +2024-08-03 20:36:10,085 INFO [train.py:1114] (0/4) Epoch 17, batch 2900, loss[loss=0.1702, simple_loss=0.249, pruned_loss=0.04567, over 13364.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2748, pruned_loss=0.05099, over 2632551.16 frames. ], batch size: 36, lr: 7.26e-03, grad_scale: 32.0 +2024-08-03 20:36:10,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=224202.0, ans=0.1 +2024-08-03 20:36:26,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=224238.66666666666, ans=0.0 +2024-08-03 20:36:33,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=224275.33333333334, ans=0.125 +2024-08-03 20:36:53,526 INFO [train.py:1114] (0/4) Epoch 17, batch 2950, loss[loss=0.1708, simple_loss=0.2588, pruned_loss=0.04143, over 13332.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2737, pruned_loss=0.05072, over 2630504.32 frames. ], batch size: 34, lr: 7.26e-03, grad_scale: 32.0 +2024-08-03 20:36:54,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=224385.33333333334, ans=0.0 +2024-08-03 20:37:05,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=224422.0, ans=0.125 +2024-08-03 20:37:06,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=224422.0, ans=0.125 +2024-08-03 20:37:15,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=224458.66666666666, ans=0.0 +2024-08-03 20:37:17,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=224458.66666666666, ans=0.1 +2024-08-03 20:37:23,421 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.999e+01 1.109e+02 1.313e+02 1.570e+02 2.324e+02, threshold=2.625e+02, percent-clipped=1.0 +2024-08-03 20:37:37,386 INFO [train.py:1114] (0/4) Epoch 17, batch 3000, loss[loss=0.1917, simple_loss=0.275, pruned_loss=0.05418, over 13547.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2736, pruned_loss=0.05066, over 2629970.53 frames. ], batch size: 37, lr: 7.25e-03, grad_scale: 32.0 +2024-08-03 20:37:37,676 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 20:37:44,099 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([0.7309, 1.2747, 2.9312, 2.9532], device='cuda:0') +2024-08-03 20:37:47,676 INFO [train.py:1146] (0/4) Epoch 17, validation: loss=0.1723, simple_loss=0.2712, pruned_loss=0.03676, over 944034.00 frames. +2024-08-03 20:37:47,976 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 20:38:26,325 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.21 vs. limit=22.5 +2024-08-03 20:38:33,104 INFO [train.py:1114] (0/4) Epoch 17, batch 3050, loss[loss=0.1763, simple_loss=0.2608, pruned_loss=0.04587, over 13530.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2738, pruned_loss=0.05086, over 2627946.50 frames. ], batch size: 35, lr: 7.25e-03, grad_scale: 32.0 +2024-08-03 20:38:52,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=224825.33333333334, ans=0.1 +2024-08-03 20:39:04,759 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.913e+01 1.103e+02 1.242e+02 1.449e+02 2.712e+02, threshold=2.483e+02, percent-clipped=1.0 +2024-08-03 20:39:10,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=224898.66666666666, ans=0.125 +2024-08-03 20:39:16,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=224898.66666666666, ans=0.0 +2024-08-03 20:39:17,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=224935.33333333334, ans=0.1 +2024-08-03 20:39:17,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=224935.33333333334, ans=0.04949747468305833 +2024-08-03 20:39:18,069 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.91 vs. limit=10.0 +2024-08-03 20:39:18,568 INFO [train.py:1114] (0/4) Epoch 17, batch 3100, loss[loss=0.1957, simple_loss=0.2803, pruned_loss=0.05553, over 13353.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2733, pruned_loss=0.05063, over 2627725.59 frames. ], batch size: 46, lr: 7.25e-03, grad_scale: 32.0 +2024-08-03 20:39:22,397 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.71 vs. limit=10.0 +2024-08-03 20:39:30,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=224972.0, ans=0.05 +2024-08-03 20:39:31,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=224972.0, ans=0.035 +2024-08-03 20:39:34,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=225008.66666666666, ans=0.125 +2024-08-03 20:39:55,839 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.18 vs. limit=12.0 +2024-08-03 20:40:01,383 INFO [train.py:1114] (0/4) Epoch 17, batch 3150, loss[loss=0.203, simple_loss=0.2893, pruned_loss=0.05838, over 13000.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2727, pruned_loss=0.05012, over 2629828.07 frames. ], batch size: 48, lr: 7.24e-03, grad_scale: 32.0 +2024-08-03 20:40:16,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225155.33333333334, ans=0.1 +2024-08-03 20:40:30,634 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.291e+01 1.117e+02 1.300e+02 1.745e+02 2.777e+02, threshold=2.600e+02, percent-clipped=1.0 +2024-08-03 20:40:44,331 INFO [train.py:1114] (0/4) Epoch 17, batch 3200, loss[loss=0.1768, simple_loss=0.2645, pruned_loss=0.04456, over 13556.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2724, pruned_loss=0.04984, over 2634779.11 frames. ], batch size: 37, lr: 7.24e-03, grad_scale: 32.0 +2024-08-03 20:40:49,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=225302.0, ans=0.125 +2024-08-03 20:41:03,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=225375.33333333334, ans=0.125 +2024-08-03 20:41:14,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225412.0, ans=0.1 +2024-08-03 20:41:27,827 INFO [train.py:1114] (0/4) Epoch 17, batch 3250, loss[loss=0.1786, simple_loss=0.2601, pruned_loss=0.04854, over 13386.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2727, pruned_loss=0.0496, over 2638790.96 frames. ], batch size: 38, lr: 7.24e-03, grad_scale: 32.0 +2024-08-03 20:41:30,608 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:41:30,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=225485.33333333334, ans=0.125 +2024-08-03 20:41:32,799 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.12 vs. limit=6.0 +2024-08-03 20:41:35,345 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.83 vs. limit=10.0 +2024-08-03 20:41:42,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=225522.0, ans=0.125 +2024-08-03 20:41:45,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=225558.66666666666, ans=0.0 +2024-08-03 20:41:51,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=225558.66666666666, ans=0.025 +2024-08-03 20:41:53,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=225595.33333333334, ans=0.125 +2024-08-03 20:41:57,628 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.641e+01 1.154e+02 1.402e+02 1.667e+02 2.489e+02, threshold=2.804e+02, percent-clipped=0.0 +2024-08-03 20:41:59,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=225595.33333333334, ans=0.125 +2024-08-03 20:42:11,284 INFO [train.py:1114] (0/4) Epoch 17, batch 3300, loss[loss=0.2085, simple_loss=0.2929, pruned_loss=0.06201, over 12908.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2713, pruned_loss=0.04909, over 2640182.62 frames. ], batch size: 52, lr: 7.23e-03, grad_scale: 32.0 +2024-08-03 20:42:23,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225705.33333333334, ans=0.1 +2024-08-03 20:42:36,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=225778.66666666666, ans=0.025 +2024-08-03 20:42:41,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=225778.66666666666, ans=0.125 +2024-08-03 20:42:48,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=225815.33333333334, ans=0.125 +2024-08-03 20:42:53,716 INFO [train.py:1114] (0/4) Epoch 17, batch 3350, loss[loss=0.203, simple_loss=0.2905, pruned_loss=0.05775, over 13030.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2726, pruned_loss=0.0501, over 2628730.80 frames. ], batch size: 48, lr: 7.23e-03, grad_scale: 32.0 +2024-08-03 20:43:03,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=225888.66666666666, ans=0.1 +2024-08-03 20:43:10,751 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.58 vs. limit=15.0 +2024-08-03 20:43:14,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=225925.33333333334, ans=0.0 +2024-08-03 20:43:16,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225925.33333333334, ans=0.1 +2024-08-03 20:43:16,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=225925.33333333334, ans=0.125 +2024-08-03 20:43:16,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=225925.33333333334, ans=0.2 +2024-08-03 20:43:19,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=225962.0, ans=0.125 +2024-08-03 20:43:22,994 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.784e+01 1.128e+02 1.245e+02 1.447e+02 2.027e+02, threshold=2.490e+02, percent-clipped=0.0 +2024-08-03 20:43:33,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=225998.66666666666, ans=0.125 +2024-08-03 20:43:36,783 INFO [train.py:1114] (0/4) Epoch 17, batch 3400, loss[loss=0.159, simple_loss=0.2386, pruned_loss=0.03972, over 13540.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2729, pruned_loss=0.05026, over 2624736.71 frames. ], batch size: 31, lr: 7.23e-03, grad_scale: 32.0 +2024-08-03 20:43:36,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=226035.33333333334, ans=0.125 +2024-08-03 20:43:49,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=226072.0, ans=0.125 +2024-08-03 20:43:52,193 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:43:54,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=226108.66666666666, ans=0.125 +2024-08-03 20:44:07,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=226145.33333333334, ans=0.125 +2024-08-03 20:44:14,815 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.94 vs. limit=15.0 +2024-08-03 20:44:15,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=226182.0, ans=0.2 +2024-08-03 20:44:20,078 INFO [train.py:1114] (0/4) Epoch 17, batch 3450, loss[loss=0.1898, simple_loss=0.2801, pruned_loss=0.04971, over 12779.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2724, pruned_loss=0.04974, over 2629018.82 frames. ], batch size: 52, lr: 7.23e-03, grad_scale: 32.0 +2024-08-03 20:44:24,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=226218.66666666666, ans=0.2 +2024-08-03 20:44:28,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226255.33333333334, ans=0.1 +2024-08-03 20:44:49,799 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.757e+01 1.151e+02 1.446e+02 1.763e+02 3.368e+02, threshold=2.892e+02, percent-clipped=3.0 +2024-08-03 20:44:50,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=226328.66666666666, ans=0.0 +2024-08-03 20:45:02,692 INFO [train.py:1114] (0/4) Epoch 17, batch 3500, loss[loss=0.1799, simple_loss=0.2642, pruned_loss=0.04782, over 13532.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2716, pruned_loss=0.04955, over 2630060.15 frames. ], batch size: 34, lr: 7.22e-03, grad_scale: 16.0 +2024-08-03 20:45:08,833 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:45:11,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=226438.66666666666, ans=0.0 +2024-08-03 20:45:19,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=226475.33333333334, ans=0.1 +2024-08-03 20:45:23,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=226475.33333333334, ans=0.125 +2024-08-03 20:45:37,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=226548.66666666666, ans=0.125 +2024-08-03 20:45:42,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=226548.66666666666, ans=0.0 +2024-08-03 20:45:45,722 INFO [train.py:1114] (0/4) Epoch 17, batch 3550, loss[loss=0.1888, simple_loss=0.2784, pruned_loss=0.04956, over 12483.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2739, pruned_loss=0.05106, over 2628343.76 frames. ], batch size: 58, lr: 7.22e-03, grad_scale: 16.0 +2024-08-03 20:45:51,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=226585.33333333334, ans=0.0 +2024-08-03 20:45:59,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=226622.0, ans=0.125 +2024-08-03 20:46:17,976 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.551e+01 1.203e+02 1.370e+02 1.580e+02 2.866e+02, threshold=2.739e+02, percent-clipped=0.0 +2024-08-03 20:46:30,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=226768.66666666666, ans=0.1 +2024-08-03 20:46:31,250 INFO [train.py:1114] (0/4) Epoch 17, batch 3600, loss[loss=0.2261, simple_loss=0.3042, pruned_loss=0.07399, over 9491.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2785, pruned_loss=0.05492, over 2490540.88 frames. ], batch size: 96, lr: 7.22e-03, grad_scale: 32.0 +2024-08-03 20:46:35,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=226768.66666666666, ans=0.125 +2024-08-03 20:46:39,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=226805.33333333334, ans=0.0 +2024-08-03 20:46:44,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=226805.33333333334, ans=0.0 +2024-08-03 20:46:47,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=226805.33333333334, ans=0.125 +2024-08-03 20:46:50,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=226842.0, ans=0.0 +2024-08-03 20:46:54,083 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.84 vs. limit=5.0 +2024-08-03 20:47:06,013 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-17.pt +2024-08-03 20:47:48,381 INFO [train.py:1114] (0/4) Epoch 18, batch 0, loss[loss=0.1788, simple_loss=0.2593, pruned_loss=0.04917, over 13352.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2593, pruned_loss=0.04917, over 13352.00 frames. ], batch size: 33, lr: 7.01e-03, grad_scale: 32.0 +2024-08-03 20:47:48,381 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 20:47:54,165 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.2517, 2.6469, 2.4653, 2.4460], device='cuda:0') +2024-08-03 20:47:58,990 INFO [train.py:1146] (0/4) Epoch 18, validation: loss=0.1737, simple_loss=0.274, pruned_loss=0.03673, over 944034.00 frames. +2024-08-03 20:47:58,991 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 20:48:00,992 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=8.990e-02 +2024-08-03 20:48:09,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=226952.0, ans=0.125 +2024-08-03 20:48:31,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=227025.33333333334, ans=0.125 +2024-08-03 20:48:32,541 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.66 vs. limit=15.0 +2024-08-03 20:48:37,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=227062.0, ans=0.2 +2024-08-03 20:48:39,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=227062.0, ans=0.125 +2024-08-03 20:48:40,350 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.175e+01 1.169e+02 1.268e+02 1.393e+02 2.818e+02, threshold=2.535e+02, percent-clipped=2.0 +2024-08-03 20:48:42,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=227062.0, ans=0.0 +2024-08-03 20:48:44,898 INFO [train.py:1114] (0/4) Epoch 18, batch 50, loss[loss=0.1554, simple_loss=0.2407, pruned_loss=0.03507, over 13418.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2741, pruned_loss=0.05118, over 578366.05 frames. ], batch size: 32, lr: 7.01e-03, grad_scale: 32.0 +2024-08-03 20:49:06,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=227172.0, ans=0.125 +2024-08-03 20:49:10,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=227172.0, ans=0.125 +2024-08-03 20:49:22,032 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.90 vs. limit=6.0 +2024-08-03 20:49:30,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=227282.0, ans=0.05 +2024-08-03 20:49:31,407 INFO [train.py:1114] (0/4) Epoch 18, batch 100, loss[loss=0.182, simple_loss=0.2705, pruned_loss=0.04676, over 13543.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2762, pruned_loss=0.05122, over 1025340.79 frames. ], batch size: 35, lr: 7.00e-03, grad_scale: 32.0 +2024-08-03 20:50:03,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=227392.0, ans=0.2 +2024-08-03 20:50:04,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=227392.0, ans=0.125 +2024-08-03 20:50:16,430 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.987e+01 1.102e+02 1.290e+02 1.676e+02 3.343e+02, threshold=2.579e+02, percent-clipped=6.0 +2024-08-03 20:50:18,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=227428.66666666666, ans=0.125 +2024-08-03 20:50:20,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=227465.33333333334, ans=0.0 +2024-08-03 20:50:20,811 INFO [train.py:1114] (0/4) Epoch 18, batch 150, loss[loss=0.1606, simple_loss=0.2383, pruned_loss=0.04142, over 13406.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2735, pruned_loss=0.05071, over 1386521.36 frames. ], batch size: 32, lr: 7.00e-03, grad_scale: 32.0 +2024-08-03 20:50:28,752 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.75 vs. limit=15.0 +2024-08-03 20:50:32,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=227502.0, ans=0.125 +2024-08-03 20:50:33,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=227502.0, ans=0.125 +2024-08-03 20:50:34,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=227502.0, ans=0.125 +2024-08-03 20:50:41,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=227538.66666666666, ans=0.125 +2024-08-03 20:50:45,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=227538.66666666666, ans=0.125 +2024-08-03 20:50:54,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=227575.33333333334, ans=0.125 +2024-08-03 20:51:07,724 INFO [train.py:1114] (0/4) Epoch 18, batch 200, loss[loss=0.1998, simple_loss=0.2931, pruned_loss=0.05328, over 12417.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2725, pruned_loss=0.0502, over 1664733.41 frames. ], batch size: 58, lr: 7.00e-03, grad_scale: 32.0 +2024-08-03 20:51:07,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=227648.66666666666, ans=0.125 +2024-08-03 20:51:14,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=227648.66666666666, ans=0.0 +2024-08-03 20:51:19,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=227685.33333333334, ans=0.2 +2024-08-03 20:51:20,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=227685.33333333334, ans=0.0 +2024-08-03 20:51:37,853 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.23 vs. limit=15.0 +2024-08-03 20:51:39,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=227758.66666666666, ans=0.125 +2024-08-03 20:51:41,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=227758.66666666666, ans=0.95 +2024-08-03 20:51:48,070 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.522e+01 1.097e+02 1.244e+02 1.547e+02 2.709e+02, threshold=2.488e+02, percent-clipped=2.0 +2024-08-03 20:51:52,901 INFO [train.py:1114] (0/4) Epoch 18, batch 250, loss[loss=0.2016, simple_loss=0.2851, pruned_loss=0.05901, over 13305.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2717, pruned_loss=0.04983, over 1884518.58 frames. ], batch size: 46, lr: 7.00e-03, grad_scale: 32.0 +2024-08-03 20:51:59,561 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:51:59,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=227832.0, ans=0.09899494936611666 +2024-08-03 20:52:15,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=227905.33333333334, ans=0.125 +2024-08-03 20:52:25,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=227942.0, ans=0.125 +2024-08-03 20:52:38,231 INFO [train.py:1114] (0/4) Epoch 18, batch 300, loss[loss=0.194, simple_loss=0.2817, pruned_loss=0.05312, over 13486.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2725, pruned_loss=0.05043, over 2052333.99 frames. ], batch size: 42, lr: 6.99e-03, grad_scale: 32.0 +2024-08-03 20:53:09,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=228125.33333333334, ans=0.125 +2024-08-03 20:53:19,656 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.85 vs. limit=15.0 +2024-08-03 20:53:21,385 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=13.43 vs. limit=15.0 +2024-08-03 20:53:22,565 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.234e+01 1.098e+02 1.173e+02 1.552e+02 2.222e+02, threshold=2.347e+02, percent-clipped=0.0 +2024-08-03 20:53:27,251 INFO [train.py:1114] (0/4) Epoch 18, batch 350, loss[loss=0.1892, simple_loss=0.2717, pruned_loss=0.05335, over 13593.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2728, pruned_loss=0.05013, over 2183421.99 frames. ], batch size: 33, lr: 6.99e-03, grad_scale: 32.0 +2024-08-03 20:53:39,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=228235.33333333334, ans=0.2 +2024-08-03 20:54:09,062 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.93 vs. limit=6.0 +2024-08-03 20:54:15,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=228345.33333333334, ans=15.0 +2024-08-03 20:54:16,799 INFO [train.py:1114] (0/4) Epoch 18, batch 400, loss[loss=0.1884, simple_loss=0.2767, pruned_loss=0.05003, over 13365.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2718, pruned_loss=0.04947, over 2287617.75 frames. ], batch size: 37, lr: 6.99e-03, grad_scale: 32.0 +2024-08-03 20:54:29,113 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.24 vs. limit=15.0 +2024-08-03 20:54:53,480 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.31 vs. limit=15.0 +2024-08-03 20:54:57,265 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.985e+01 1.132e+02 1.280e+02 1.629e+02 3.189e+02, threshold=2.560e+02, percent-clipped=4.0 +2024-08-03 20:54:57,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228528.66666666666, ans=0.1 +2024-08-03 20:55:01,714 INFO [train.py:1114] (0/4) Epoch 18, batch 450, loss[loss=0.184, simple_loss=0.2804, pruned_loss=0.04381, over 13542.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2713, pruned_loss=0.0492, over 2360891.64 frames. ], batch size: 38, lr: 6.98e-03, grad_scale: 32.0 +2024-08-03 20:55:06,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=228565.33333333334, ans=0.0 +2024-08-03 20:55:24,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=228638.66666666666, ans=0.125 +2024-08-03 20:55:28,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228675.33333333334, ans=0.1 +2024-08-03 20:55:28,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=228675.33333333334, ans=0.125 +2024-08-03 20:55:32,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=228675.33333333334, ans=0.0 +2024-08-03 20:55:44,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=228712.0, ans=0.125 +2024-08-03 20:55:46,691 INFO [train.py:1114] (0/4) Epoch 18, batch 500, loss[loss=0.1849, simple_loss=0.2787, pruned_loss=0.0456, over 13432.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2699, pruned_loss=0.04868, over 2426874.12 frames. ], batch size: 43, lr: 6.98e-03, grad_scale: 32.0 +2024-08-03 20:55:59,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=228785.33333333334, ans=0.025 +2024-08-03 20:56:05,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=228822.0, ans=0.2 +2024-08-03 20:56:14,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=228858.66666666666, ans=0.125 +2024-08-03 20:56:21,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=228858.66666666666, ans=0.2 +2024-08-03 20:56:26,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=228895.33333333334, ans=15.0 +2024-08-03 20:56:27,947 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.312e+01 1.074e+02 1.245e+02 1.559e+02 2.675e+02, threshold=2.490e+02, percent-clipped=1.0 +2024-08-03 20:56:30,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=228895.33333333334, ans=0.125 +2024-08-03 20:56:32,428 INFO [train.py:1114] (0/4) Epoch 18, batch 550, loss[loss=0.2012, simple_loss=0.2854, pruned_loss=0.05857, over 13033.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2705, pruned_loss=0.04895, over 2468744.04 frames. ], batch size: 48, lr: 6.98e-03, grad_scale: 32.0 +2024-08-03 20:56:36,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=228932.0, ans=0.05 +2024-08-03 20:56:53,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=229005.33333333334, ans=0.125 +2024-08-03 20:56:57,284 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.62 vs. limit=22.5 +2024-08-03 20:57:14,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=229078.66666666666, ans=0.125 +2024-08-03 20:57:22,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=229115.33333333334, ans=0.0 +2024-08-03 20:57:23,286 INFO [train.py:1114] (0/4) Epoch 18, batch 600, loss[loss=0.2049, simple_loss=0.2885, pruned_loss=0.06062, over 13305.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2706, pruned_loss=0.04898, over 2509060.38 frames. ], batch size: 46, lr: 6.98e-03, grad_scale: 16.0 +2024-08-03 20:57:27,548 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.27 vs. limit=15.0 +2024-08-03 20:57:28,579 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.66 vs. limit=15.0 +2024-08-03 20:57:55,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=229225.33333333334, ans=0.0 +2024-08-03 20:57:55,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=229225.33333333334, ans=0.125 +2024-08-03 20:57:58,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=229225.33333333334, ans=0.95 +2024-08-03 20:58:05,827 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.952e+01 1.114e+02 1.293e+02 1.855e+02 3.099e+02, threshold=2.585e+02, percent-clipped=2.0 +2024-08-03 20:58:08,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=229298.66666666666, ans=0.125 +2024-08-03 20:58:09,380 INFO [train.py:1114] (0/4) Epoch 18, batch 650, loss[loss=0.1694, simple_loss=0.2581, pruned_loss=0.04035, over 13546.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2696, pruned_loss=0.0483, over 2544133.40 frames. ], batch size: 37, lr: 6.97e-03, grad_scale: 16.0 +2024-08-03 20:58:15,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229298.66666666666, ans=0.1 +2024-08-03 20:58:22,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=229335.33333333334, ans=0.125 +2024-08-03 20:58:25,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.17 vs. limit=15.0 +2024-08-03 20:58:28,067 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.08 vs. limit=6.0 +2024-08-03 20:58:45,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=229445.33333333334, ans=0.05 +2024-08-03 20:58:46,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=229445.33333333334, ans=0.125 +2024-08-03 20:58:54,985 INFO [train.py:1114] (0/4) Epoch 18, batch 700, loss[loss=0.1958, simple_loss=0.2707, pruned_loss=0.0604, over 13529.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2702, pruned_loss=0.04859, over 2564772.54 frames. ], batch size: 35, lr: 6.97e-03, grad_scale: 16.0 +2024-08-03 20:58:56,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=229482.0, ans=0.0 +2024-08-03 20:59:00,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229482.0, ans=0.1 +2024-08-03 20:59:02,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=229482.0, ans=0.07 +2024-08-03 20:59:27,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=229592.0, ans=0.125 +2024-08-03 20:59:34,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=229592.0, ans=0.0 +2024-08-03 20:59:41,522 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.312e+01 1.096e+02 1.191e+02 1.436e+02 2.621e+02, threshold=2.382e+02, percent-clipped=1.0 +2024-08-03 20:59:44,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=229665.33333333334, ans=0.035 +2024-08-03 20:59:45,180 INFO [train.py:1114] (0/4) Epoch 18, batch 750, loss[loss=0.1766, simple_loss=0.2721, pruned_loss=0.04057, over 13353.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2697, pruned_loss=0.04837, over 2581835.38 frames. ], batch size: 37, lr: 6.97e-03, grad_scale: 16.0 +2024-08-03 20:59:50,170 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.11 vs. limit=15.0 +2024-08-03 20:59:54,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229702.0, ans=0.1 +2024-08-03 20:59:57,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=229702.0, ans=0.125 +2024-08-03 21:00:20,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=229812.0, ans=0.0 +2024-08-03 21:00:29,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=229848.66666666666, ans=0.2 +2024-08-03 21:00:30,238 INFO [train.py:1114] (0/4) Epoch 18, batch 800, loss[loss=0.1622, simple_loss=0.2482, pruned_loss=0.03809, over 13339.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2694, pruned_loss=0.04816, over 2596452.02 frames. ], batch size: 33, lr: 6.96e-03, grad_scale: 32.0 +2024-08-03 21:00:37,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=229848.66666666666, ans=0.0 +2024-08-03 21:00:54,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=229885.33333333334, ans=0.0 +2024-08-03 21:00:58,335 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.25 vs. limit=22.5 +2024-08-03 21:01:28,074 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.223e+01 1.040e+02 1.288e+02 1.609e+02 2.437e+02, threshold=2.577e+02, percent-clipped=2.0 +2024-08-03 21:01:30,770 INFO [train.py:1114] (0/4) Epoch 18, batch 850, loss[loss=0.1745, simple_loss=0.2701, pruned_loss=0.0395, over 13335.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2691, pruned_loss=0.0482, over 2609344.96 frames. ], batch size: 40, lr: 6.96e-03, grad_scale: 16.0 +2024-08-03 21:01:42,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=230068.66666666666, ans=0.0 +2024-08-03 21:01:54,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=230105.33333333334, ans=0.1 +2024-08-03 21:02:07,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=230178.66666666666, ans=0.025 +2024-08-03 21:02:12,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=230178.66666666666, ans=10.0 +2024-08-03 21:02:13,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=230178.66666666666, ans=0.2 +2024-08-03 21:02:15,875 INFO [train.py:1114] (0/4) Epoch 18, batch 900, loss[loss=0.166, simple_loss=0.2583, pruned_loss=0.03685, over 13346.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2705, pruned_loss=0.04913, over 2611232.22 frames. ], batch size: 33, lr: 6.96e-03, grad_scale: 16.0 +2024-08-03 21:02:22,707 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.17 vs. limit=22.5 +2024-08-03 21:02:26,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=230252.0, ans=0.125 +2024-08-03 21:02:44,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=230325.33333333334, ans=0.125 +2024-08-03 21:02:48,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=230325.33333333334, ans=0.125 +2024-08-03 21:02:48,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=230325.33333333334, ans=0.025 +2024-08-03 21:02:51,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=230325.33333333334, ans=0.0 +2024-08-03 21:02:59,119 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.753e+01 1.118e+02 1.313e+02 1.560e+02 2.225e+02, threshold=2.625e+02, percent-clipped=0.0 +2024-08-03 21:03:02,085 INFO [train.py:1114] (0/4) Epoch 18, batch 950, loss[loss=0.1633, simple_loss=0.248, pruned_loss=0.03927, over 13521.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2706, pruned_loss=0.04909, over 2611367.71 frames. ], batch size: 34, lr: 6.96e-03, grad_scale: 16.0 +2024-08-03 21:03:09,580 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:03:11,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=230435.33333333334, ans=0.125 +2024-08-03 21:03:13,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=230435.33333333334, ans=0.5 +2024-08-03 21:03:33,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=230508.66666666666, ans=0.0 +2024-08-03 21:03:40,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=230545.33333333334, ans=0.2 +2024-08-03 21:03:48,170 INFO [train.py:1114] (0/4) Epoch 18, batch 1000, loss[loss=0.1553, simple_loss=0.2391, pruned_loss=0.03572, over 13374.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2715, pruned_loss=0.04955, over 2609457.48 frames. ], batch size: 35, lr: 6.95e-03, grad_scale: 16.0 +2024-08-03 21:03:51,122 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:03:51,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=230582.0, ans=0.125 +2024-08-03 21:03:56,882 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.70 vs. limit=22.5 +2024-08-03 21:04:03,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.whiten.whitening_limit, batch_count=230618.66666666666, ans=12.0 +2024-08-03 21:04:10,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=230655.33333333334, ans=0.0 +2024-08-03 21:04:22,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=230692.0, ans=0.0 +2024-08-03 21:04:34,938 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.346e+01 1.082e+02 1.211e+02 1.465e+02 2.308e+02, threshold=2.421e+02, percent-clipped=0.0 +2024-08-03 21:04:39,889 INFO [train.py:1114] (0/4) Epoch 18, batch 1050, loss[loss=0.1731, simple_loss=0.2712, pruned_loss=0.03747, over 13578.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2707, pruned_loss=0.04902, over 2614077.55 frames. ], batch size: 39, lr: 6.95e-03, grad_scale: 16.0 +2024-08-03 21:04:40,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=230765.33333333334, ans=0.05 +2024-08-03 21:04:40,484 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.40 vs. limit=15.0 +2024-08-03 21:04:43,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=230765.33333333334, ans=0.125 +2024-08-03 21:04:57,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=230802.0, ans=0.0 +2024-08-03 21:05:18,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=230912.0, ans=0.125 +2024-08-03 21:05:26,834 INFO [train.py:1114] (0/4) Epoch 18, batch 1100, loss[loss=0.193, simple_loss=0.2769, pruned_loss=0.05448, over 13563.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2706, pruned_loss=0.04893, over 2619267.47 frames. ], batch size: 36, lr: 6.95e-03, grad_scale: 16.0 +2024-08-03 21:05:35,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=230985.33333333334, ans=0.2 +2024-08-03 21:05:43,687 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.47 vs. limit=12.0 +2024-08-03 21:05:53,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=231058.66666666666, ans=0.0 +2024-08-03 21:06:07,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=231095.33333333334, ans=0.07 +2024-08-03 21:06:09,962 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.017e+01 1.089e+02 1.225e+02 1.560e+02 2.576e+02, threshold=2.450e+02, percent-clipped=1.0 +2024-08-03 21:06:12,615 INFO [train.py:1114] (0/4) Epoch 18, batch 1150, loss[loss=0.181, simple_loss=0.2629, pruned_loss=0.04959, over 13561.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2704, pruned_loss=0.04893, over 2618714.04 frames. ], batch size: 36, lr: 6.95e-03, grad_scale: 16.0 +2024-08-03 21:06:13,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=231132.0, ans=0.1 +2024-08-03 21:06:19,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=231132.0, ans=0.125 +2024-08-03 21:06:20,561 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.40 vs. limit=15.0 +2024-08-03 21:06:24,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=231168.66666666666, ans=0.125 +2024-08-03 21:06:58,353 INFO [train.py:1114] (0/4) Epoch 18, batch 1200, loss[loss=0.1888, simple_loss=0.2852, pruned_loss=0.04615, over 13581.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2715, pruned_loss=0.04938, over 2616481.86 frames. ], batch size: 39, lr: 6.94e-03, grad_scale: 32.0 +2024-08-03 21:07:09,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=231352.0, ans=0.2 +2024-08-03 21:07:13,399 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.27 vs. limit=15.0 +2024-08-03 21:07:16,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=231388.66666666666, ans=0.125 +2024-08-03 21:07:18,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=231388.66666666666, ans=0.025 +2024-08-03 21:07:25,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=231425.33333333334, ans=0.2 +2024-08-03 21:07:36,680 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.41 vs. limit=10.0 +2024-08-03 21:07:37,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=231462.0, ans=0.0 +2024-08-03 21:07:40,624 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.911e+01 1.076e+02 1.246e+02 1.591e+02 2.283e+02, threshold=2.493e+02, percent-clipped=0.0 +2024-08-03 21:07:43,394 INFO [train.py:1114] (0/4) Epoch 18, batch 1250, loss[loss=0.1886, simple_loss=0.2738, pruned_loss=0.0517, over 13441.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2723, pruned_loss=0.04946, over 2628480.37 frames. ], batch size: 42, lr: 6.94e-03, grad_scale: 32.0 +2024-08-03 21:07:55,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=231535.33333333334, ans=0.1 +2024-08-03 21:08:00,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=231535.33333333334, ans=0.125 +2024-08-03 21:08:04,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=231572.0, ans=0.2 +2024-08-03 21:08:13,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=231572.0, ans=0.2 +2024-08-03 21:08:23,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=231608.66666666666, ans=0.125 +2024-08-03 21:08:31,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=231645.33333333334, ans=0.5 +2024-08-03 21:08:34,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=231645.33333333334, ans=0.0 +2024-08-03 21:08:36,240 INFO [train.py:1114] (0/4) Epoch 18, batch 1300, loss[loss=0.1821, simple_loss=0.2701, pruned_loss=0.04704, over 12860.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2721, pruned_loss=0.04955, over 2630413.75 frames. ], batch size: 52, lr: 6.94e-03, grad_scale: 32.0 +2024-08-03 21:08:44,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=231718.66666666666, ans=0.2 +2024-08-03 21:08:44,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=231718.66666666666, ans=0.0 +2024-08-03 21:08:58,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=231755.33333333334, ans=0.125 +2024-08-03 21:09:09,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=231792.0, ans=0.2 +2024-08-03 21:09:10,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=231792.0, ans=0.0 +2024-08-03 21:09:19,126 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.689e+01 1.086e+02 1.232e+02 1.465e+02 2.299e+02, threshold=2.463e+02, percent-clipped=0.0 +2024-08-03 21:09:21,780 INFO [train.py:1114] (0/4) Epoch 18, batch 1350, loss[loss=0.1767, simple_loss=0.2661, pruned_loss=0.04361, over 13553.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2713, pruned_loss=0.04897, over 2638534.22 frames. ], batch size: 37, lr: 6.93e-03, grad_scale: 32.0 +2024-08-03 21:09:22,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=231865.33333333334, ans=0.1 +2024-08-03 21:09:56,686 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.35 vs. limit=22.5 +2024-08-03 21:10:07,024 INFO [train.py:1114] (0/4) Epoch 18, batch 1400, loss[loss=0.1723, simple_loss=0.2547, pruned_loss=0.04498, over 13259.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2713, pruned_loss=0.04907, over 2642158.20 frames. ], batch size: 31, lr: 6.93e-03, grad_scale: 32.0 +2024-08-03 21:10:14,568 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.20 vs. limit=15.0 +2024-08-03 21:10:18,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=232085.33333333334, ans=0.0 +2024-08-03 21:10:30,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=232122.0, ans=0.125 +2024-08-03 21:10:43,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=232195.33333333334, ans=0.125 +2024-08-03 21:10:45,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=232195.33333333334, ans=0.125 +2024-08-03 21:10:49,387 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.39 vs. limit=15.0 +2024-08-03 21:10:49,666 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 7.779e+01 1.159e+02 1.340e+02 1.703e+02 2.344e+02, threshold=2.679e+02, percent-clipped=0.0 +2024-08-03 21:10:52,379 INFO [train.py:1114] (0/4) Epoch 18, batch 1450, loss[loss=0.2129, simple_loss=0.3005, pruned_loss=0.06262, over 13421.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2722, pruned_loss=0.04955, over 2641190.82 frames. ], batch size: 43, lr: 6.93e-03, grad_scale: 32.0 +2024-08-03 21:11:05,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=232268.66666666666, ans=0.2 +2024-08-03 21:11:20,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=232342.0, ans=0.0 +2024-08-03 21:11:31,234 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.78 vs. limit=15.0 +2024-08-03 21:11:33,894 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.91 vs. limit=22.5 +2024-08-03 21:11:35,835 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.12 vs. limit=15.0 +2024-08-03 21:11:36,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=232378.66666666666, ans=0.04949747468305833 +2024-08-03 21:11:40,701 INFO [train.py:1114] (0/4) Epoch 18, batch 1500, loss[loss=0.1787, simple_loss=0.2761, pruned_loss=0.04063, over 13395.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2725, pruned_loss=0.04953, over 2640412.45 frames. ], batch size: 39, lr: 6.93e-03, grad_scale: 16.0 +2024-08-03 21:11:42,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=232415.33333333334, ans=0.0 +2024-08-03 21:11:52,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=232452.0, ans=0.0 +2024-08-03 21:11:59,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=232452.0, ans=0.125 +2024-08-03 21:12:01,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=232452.0, ans=0.0 +2024-08-03 21:12:28,171 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.948e+01 1.098e+02 1.405e+02 1.700e+02 3.079e+02, threshold=2.810e+02, percent-clipped=1.0 +2024-08-03 21:12:29,980 INFO [train.py:1114] (0/4) Epoch 18, batch 1550, loss[loss=0.1833, simple_loss=0.2648, pruned_loss=0.05085, over 13399.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2716, pruned_loss=0.04914, over 2630682.48 frames. ], batch size: 41, lr: 6.92e-03, grad_scale: 16.0 +2024-08-03 21:12:41,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=232635.33333333334, ans=0.125 +2024-08-03 21:12:43,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=232635.33333333334, ans=0.0 +2024-08-03 21:12:44,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=232635.33333333334, ans=0.2 +2024-08-03 21:12:50,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=232672.0, ans=0.125 +2024-08-03 21:12:53,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=232672.0, ans=0.125 +2024-08-03 21:12:56,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=232708.66666666666, ans=0.0 +2024-08-03 21:13:05,073 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.94 vs. limit=15.0 +2024-08-03 21:13:15,286 INFO [train.py:1114] (0/4) Epoch 18, batch 1600, loss[loss=0.2048, simple_loss=0.2911, pruned_loss=0.05927, over 13563.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2715, pruned_loss=0.04936, over 2623368.89 frames. ], batch size: 39, lr: 6.92e-03, grad_scale: 32.0 +2024-08-03 21:13:18,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=232782.0, ans=0.0 +2024-08-03 21:13:20,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=232782.0, ans=0.125 +2024-08-03 21:13:20,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=232782.0, ans=0.1 +2024-08-03 21:13:37,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=232855.33333333334, ans=0.125 +2024-08-03 21:13:37,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232855.33333333334, ans=0.1 +2024-08-03 21:13:38,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=232855.33333333334, ans=10.0 +2024-08-03 21:13:39,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=232855.33333333334, ans=0.125 +2024-08-03 21:13:40,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=232855.33333333334, ans=0.2 +2024-08-03 21:13:41,820 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.08 vs. limit=15.0 +2024-08-03 21:13:42,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.64 vs. limit=5.0 +2024-08-03 21:13:49,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232892.0, ans=0.1 +2024-08-03 21:13:57,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=232928.66666666666, ans=0.2 +2024-08-03 21:14:01,391 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.392e+01 1.109e+02 1.247e+02 1.559e+02 3.003e+02, threshold=2.495e+02, percent-clipped=1.0 +2024-08-03 21:14:03,132 INFO [train.py:1114] (0/4) Epoch 18, batch 1650, loss[loss=0.178, simple_loss=0.2724, pruned_loss=0.04185, over 13309.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2715, pruned_loss=0.04955, over 2620241.90 frames. ], batch size: 40, lr: 6.92e-03, grad_scale: 32.0 +2024-08-03 21:14:09,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=232965.33333333334, ans=0.125 +2024-08-03 21:14:23,468 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.11 vs. limit=15.0 +2024-08-03 21:14:23,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=233038.66666666666, ans=0.125 +2024-08-03 21:14:47,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=233148.66666666666, ans=0.1 +2024-08-03 21:14:48,608 INFO [train.py:1114] (0/4) Epoch 18, batch 1700, loss[loss=0.1772, simple_loss=0.2547, pruned_loss=0.04985, over 13239.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2719, pruned_loss=0.04971, over 2629438.12 frames. ], batch size: 31, lr: 6.92e-03, grad_scale: 32.0 +2024-08-03 21:15:10,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=233222.0, ans=0.125 +2024-08-03 21:15:11,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=233222.0, ans=0.125 +2024-08-03 21:15:18,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=233222.0, ans=0.2 +2024-08-03 21:15:23,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=233258.66666666666, ans=0.2 +2024-08-03 21:15:36,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=233295.33333333334, ans=0.2 +2024-08-03 21:15:41,602 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.564e+01 1.087e+02 1.250e+02 1.627e+02 3.379e+02, threshold=2.500e+02, percent-clipped=6.0 +2024-08-03 21:15:42,578 INFO [train.py:1114] (0/4) Epoch 18, batch 1750, loss[loss=0.1821, simple_loss=0.2662, pruned_loss=0.04898, over 13526.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2717, pruned_loss=0.04962, over 2633385.00 frames. ], batch size: 31, lr: 6.91e-03, grad_scale: 16.0 +2024-08-03 21:15:55,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=233368.66666666666, ans=0.0 +2024-08-03 21:15:56,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=233368.66666666666, ans=0.0 +2024-08-03 21:16:01,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=233405.33333333334, ans=0.125 +2024-08-03 21:16:02,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=233405.33333333334, ans=0.95 +2024-08-03 21:16:03,270 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.33 vs. limit=6.0 +2024-08-03 21:16:04,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=233405.33333333334, ans=0.05 +2024-08-03 21:16:04,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=233405.33333333334, ans=0.125 +2024-08-03 21:16:14,304 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.27 vs. limit=15.0 +2024-08-03 21:16:28,003 INFO [train.py:1114] (0/4) Epoch 18, batch 1800, loss[loss=0.2036, simple_loss=0.296, pruned_loss=0.05555, over 13553.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2721, pruned_loss=0.04975, over 2634325.75 frames. ], batch size: 38, lr: 6.91e-03, grad_scale: 16.0 +2024-08-03 21:16:30,321 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.22 vs. limit=12.0 +2024-08-03 21:16:40,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=233552.0, ans=0.09899494936611666 +2024-08-03 21:16:43,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=233552.0, ans=0.1 +2024-08-03 21:16:45,961 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.41 vs. limit=15.0 +2024-08-03 21:16:51,193 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.61 vs. limit=22.5 +2024-08-03 21:16:54,931 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.57 vs. limit=15.0 +2024-08-03 21:16:55,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=233625.33333333334, ans=0.95 +2024-08-03 21:16:57,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=233625.33333333334, ans=0.125 +2024-08-03 21:16:58,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=233625.33333333334, ans=0.125 +2024-08-03 21:17:12,819 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.303e+01 1.188e+02 1.406e+02 1.892e+02 3.223e+02, threshold=2.812e+02, percent-clipped=8.0 +2024-08-03 21:17:13,770 INFO [train.py:1114] (0/4) Epoch 18, batch 1850, loss[loss=0.195, simple_loss=0.2873, pruned_loss=0.05137, over 13393.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2717, pruned_loss=0.04955, over 2636786.24 frames. ], batch size: 39, lr: 6.91e-03, grad_scale: 16.0 +2024-08-03 21:17:14,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=233698.66666666666, ans=0.125 +2024-08-03 21:17:29,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=233735.33333333334, ans=0.125 +2024-08-03 21:17:41,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=233808.66666666666, ans=0.125 +2024-08-03 21:17:44,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=233808.66666666666, ans=0.025 +2024-08-03 21:17:45,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=233808.66666666666, ans=0.2 +2024-08-03 21:17:49,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=233845.33333333334, ans=0.125 +2024-08-03 21:17:49,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=233845.33333333334, ans=0.0 +2024-08-03 21:17:58,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=233882.0, ans=0.2 +2024-08-03 21:17:59,037 INFO [train.py:1114] (0/4) Epoch 18, batch 1900, loss[loss=0.22, simple_loss=0.3037, pruned_loss=0.06819, over 13300.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2726, pruned_loss=0.04997, over 2639849.11 frames. ], batch size: 40, lr: 6.91e-03, grad_scale: 16.0 +2024-08-03 21:17:59,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=233882.0, ans=0.07 +2024-08-03 21:18:01,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233882.0, ans=0.1 +2024-08-03 21:18:18,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=233955.33333333334, ans=0.125 +2024-08-03 21:18:20,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=233955.33333333334, ans=0.125 +2024-08-03 21:18:38,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=234028.66666666666, ans=0.125 +2024-08-03 21:18:45,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=234028.66666666666, ans=0.0 +2024-08-03 21:18:48,036 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.661e+01 1.108e+02 1.336e+02 1.650e+02 2.713e+02, threshold=2.672e+02, percent-clipped=0.0 +2024-08-03 21:18:48,996 INFO [train.py:1114] (0/4) Epoch 18, batch 1950, loss[loss=0.1649, simple_loss=0.2539, pruned_loss=0.03798, over 13564.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2737, pruned_loss=0.05033, over 2646897.53 frames. ], batch size: 36, lr: 6.90e-03, grad_scale: 16.0 +2024-08-03 21:19:05,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=234102.0, ans=0.0 +2024-08-03 21:19:07,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=234102.0, ans=0.125 +2024-08-03 21:19:12,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234138.66666666666, ans=0.1 +2024-08-03 21:19:21,168 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.98 vs. limit=12.0 +2024-08-03 21:19:43,526 INFO [train.py:1114] (0/4) Epoch 18, batch 2000, loss[loss=0.1653, simple_loss=0.2394, pruned_loss=0.04557, over 13546.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.274, pruned_loss=0.05036, over 2636743.67 frames. ], batch size: 31, lr: 6.90e-03, grad_scale: 32.0 +2024-08-03 21:20:11,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=234358.66666666666, ans=0.125 +2024-08-03 21:20:18,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234358.66666666666, ans=0.1 +2024-08-03 21:20:28,746 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.785e+01 1.141e+02 1.284e+02 1.591e+02 2.253e+02, threshold=2.569e+02, percent-clipped=0.0 +2024-08-03 21:20:29,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=234432.0, ans=0.0 +2024-08-03 21:20:29,731 INFO [train.py:1114] (0/4) Epoch 18, batch 2050, loss[loss=0.1693, simple_loss=0.2466, pruned_loss=0.04605, over 13427.00 frames. ], tot_loss[loss=0.187, simple_loss=0.273, pruned_loss=0.05052, over 2633771.92 frames. ], batch size: 32, lr: 6.90e-03, grad_scale: 32.0 +2024-08-03 21:20:45,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.88 vs. limit=15.0 +2024-08-03 21:21:06,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=234578.66666666666, ans=0.1 +2024-08-03 21:21:14,940 INFO [train.py:1114] (0/4) Epoch 18, batch 2100, loss[loss=0.1608, simple_loss=0.2519, pruned_loss=0.03483, over 13551.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2723, pruned_loss=0.0501, over 2639439.73 frames. ], batch size: 37, lr: 6.89e-03, grad_scale: 32.0 +2024-08-03 21:21:15,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=234615.33333333334, ans=0.04949747468305833 +2024-08-03 21:21:20,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=234615.33333333334, ans=0.0 +2024-08-03 21:21:24,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=234652.0, ans=0.125 +2024-08-03 21:21:27,003 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-64000.pt +2024-08-03 21:21:31,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=234652.0, ans=0.09899494936611666 +2024-08-03 21:21:47,630 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.34 vs. limit=15.0 +2024-08-03 21:21:48,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=234725.33333333334, ans=0.125 +2024-08-03 21:21:52,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=234762.0, ans=0.125 +2024-08-03 21:21:54,786 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.89 vs. limit=6.0 +2024-08-03 21:21:57,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=234762.0, ans=0.125 +2024-08-03 21:21:59,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=234762.0, ans=0.0 +2024-08-03 21:22:01,495 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.837e+01 1.081e+02 1.198e+02 1.435e+02 3.177e+02, threshold=2.396e+02, percent-clipped=1.0 +2024-08-03 21:22:02,389 INFO [train.py:1114] (0/4) Epoch 18, batch 2150, loss[loss=0.1647, simple_loss=0.2573, pruned_loss=0.0361, over 13563.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2713, pruned_loss=0.04961, over 2647750.40 frames. ], batch size: 36, lr: 6.89e-03, grad_scale: 32.0 +2024-08-03 21:22:15,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=234835.33333333334, ans=0.09899494936611666 +2024-08-03 21:22:15,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=234835.33333333334, ans=0.0 +2024-08-03 21:22:18,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=234835.33333333334, ans=0.125 +2024-08-03 21:22:18,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=234835.33333333334, ans=0.125 +2024-08-03 21:22:23,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=234872.0, ans=0.125 +2024-08-03 21:22:30,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=234872.0, ans=0.125 +2024-08-03 21:22:34,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=234908.66666666666, ans=0.95 +2024-08-03 21:22:34,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=234908.66666666666, ans=0.2 +2024-08-03 21:22:35,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=234908.66666666666, ans=0.0 +2024-08-03 21:22:45,741 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.00 vs. limit=15.0 +2024-08-03 21:22:48,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=234945.33333333334, ans=0.125 +2024-08-03 21:22:52,474 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:22:54,825 INFO [train.py:1114] (0/4) Epoch 18, batch 2200, loss[loss=0.1854, simple_loss=0.2847, pruned_loss=0.04309, over 13399.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2713, pruned_loss=0.04933, over 2644294.17 frames. ], batch size: 39, lr: 6.89e-03, grad_scale: 16.0 +2024-08-03 21:23:01,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=234982.0, ans=0.125 +2024-08-03 21:23:15,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=235055.33333333334, ans=0.0 +2024-08-03 21:23:27,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=235092.0, ans=0.0 +2024-08-03 21:23:28,065 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.09 vs. limit=12.0 +2024-08-03 21:23:30,951 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.53 vs. limit=22.5 +2024-08-03 21:23:40,431 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.106e+01 1.127e+02 1.422e+02 1.837e+02 2.804e+02, threshold=2.844e+02, percent-clipped=6.0 +2024-08-03 21:23:40,485 INFO [train.py:1114] (0/4) Epoch 18, batch 2250, loss[loss=0.1734, simple_loss=0.2636, pruned_loss=0.04157, over 13362.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2707, pruned_loss=0.04907, over 2642079.93 frames. ], batch size: 37, lr: 6.89e-03, grad_scale: 16.0 +2024-08-03 21:23:40,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235165.33333333334, ans=0.1 +2024-08-03 21:24:23,390 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.45 vs. limit=22.5 +2024-08-03 21:24:25,634 INFO [train.py:1114] (0/4) Epoch 18, batch 2300, loss[loss=0.1709, simple_loss=0.2523, pruned_loss=0.04474, over 13578.00 frames. ], tot_loss[loss=0.183, simple_loss=0.269, pruned_loss=0.04847, over 2637850.02 frames. ], batch size: 33, lr: 6.88e-03, grad_scale: 16.0 +2024-08-03 21:24:26,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=235348.66666666666, ans=0.1 +2024-08-03 21:24:29,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=235348.66666666666, ans=0.125 +2024-08-03 21:24:45,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=235422.0, ans=0.125 +2024-08-03 21:25:10,864 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.665e+01 1.085e+02 1.233e+02 1.641e+02 2.605e+02, threshold=2.466e+02, percent-clipped=0.0 +2024-08-03 21:25:10,901 INFO [train.py:1114] (0/4) Epoch 18, batch 2350, loss[loss=0.1925, simple_loss=0.28, pruned_loss=0.05252, over 13549.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2698, pruned_loss=0.04895, over 2641007.07 frames. ], batch size: 38, lr: 6.88e-03, grad_scale: 16.0 +2024-08-03 21:25:16,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=235532.0, ans=0.125 +2024-08-03 21:25:19,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235568.66666666666, ans=0.1 +2024-08-03 21:25:47,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=235678.66666666666, ans=0.125 +2024-08-03 21:25:52,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=235678.66666666666, ans=0.0 +2024-08-03 21:25:57,726 INFO [train.py:1114] (0/4) Epoch 18, batch 2400, loss[loss=0.1662, simple_loss=0.2617, pruned_loss=0.03534, over 13543.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2701, pruned_loss=0.04883, over 2642397.56 frames. ], batch size: 35, lr: 6.88e-03, grad_scale: 32.0 +2024-08-03 21:26:01,684 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.93 vs. limit=15.0 +2024-08-03 21:26:18,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=235752.0, ans=0.2 +2024-08-03 21:26:21,301 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.24 vs. limit=15.0 +2024-08-03 21:26:24,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=235788.66666666666, ans=0.0 +2024-08-03 21:26:33,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=235825.33333333334, ans=0.125 +2024-08-03 21:26:35,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=235825.33333333334, ans=0.0 +2024-08-03 21:26:40,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=235862.0, ans=0.125 +2024-08-03 21:26:43,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=235862.0, ans=0.0 +2024-08-03 21:26:48,965 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.162e+01 1.095e+02 1.213e+02 1.558e+02 2.561e+02, threshold=2.426e+02, percent-clipped=1.0 +2024-08-03 21:26:49,002 INFO [train.py:1114] (0/4) Epoch 18, batch 2450, loss[loss=0.1874, simple_loss=0.2794, pruned_loss=0.04774, over 13350.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2715, pruned_loss=0.0495, over 2632986.37 frames. ], batch size: 37, lr: 6.88e-03, grad_scale: 32.0 +2024-08-03 21:26:50,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=235898.66666666666, ans=0.125 +2024-08-03 21:27:05,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=235935.33333333334, ans=0.125 +2024-08-03 21:27:16,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=236008.66666666666, ans=0.125 +2024-08-03 21:27:19,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=236008.66666666666, ans=0.0 +2024-08-03 21:27:20,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=236008.66666666666, ans=0.125 +2024-08-03 21:27:26,842 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.10 vs. limit=10.0 +2024-08-03 21:27:34,628 INFO [train.py:1114] (0/4) Epoch 18, batch 2500, loss[loss=0.1984, simple_loss=0.2847, pruned_loss=0.05605, over 13401.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2712, pruned_loss=0.04927, over 2636871.84 frames. ], batch size: 39, lr: 6.87e-03, grad_scale: 32.0 +2024-08-03 21:27:43,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=236118.66666666666, ans=0.2 +2024-08-03 21:27:47,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=236118.66666666666, ans=0.125 +2024-08-03 21:28:06,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=236192.0, ans=0.04949747468305833 +2024-08-03 21:28:09,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=236228.66666666666, ans=0.0 +2024-08-03 21:28:19,329 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.251e+01 1.154e+02 1.291e+02 1.813e+02 3.422e+02, threshold=2.583e+02, percent-clipped=8.0 +2024-08-03 21:28:19,382 INFO [train.py:1114] (0/4) Epoch 18, batch 2550, loss[loss=0.1667, simple_loss=0.245, pruned_loss=0.04424, over 13545.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2709, pruned_loss=0.04928, over 2638641.77 frames. ], batch size: 31, lr: 6.87e-03, grad_scale: 32.0 +2024-08-03 21:28:27,464 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.33 vs. limit=22.5 +2024-08-03 21:28:38,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=236338.66666666666, ans=0.125 +2024-08-03 21:28:41,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=236338.66666666666, ans=0.125 +2024-08-03 21:28:58,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=236412.0, ans=0.2 +2024-08-03 21:29:03,075 INFO [train.py:1114] (0/4) Epoch 18, batch 2600, loss[loss=0.1852, simple_loss=0.2647, pruned_loss=0.05283, over 13572.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2711, pruned_loss=0.04913, over 2637067.62 frames. ], batch size: 36, lr: 6.87e-03, grad_scale: 16.0 +2024-08-03 21:29:04,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=236448.66666666666, ans=10.0 +2024-08-03 21:29:16,628 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.72 vs. limit=15.0 +2024-08-03 21:29:32,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=236558.66666666666, ans=0.125 +2024-08-03 21:29:33,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236558.66666666666, ans=0.1 +2024-08-03 21:29:47,196 INFO [train.py:1114] (0/4) Epoch 18, batch 2650, loss[loss=0.1967, simple_loss=0.2794, pruned_loss=0.05704, over 13300.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2712, pruned_loss=0.04934, over 2639848.19 frames. ], batch size: 46, lr: 6.87e-03, grad_scale: 16.0 +2024-08-03 21:29:48,045 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.186e+01 1.172e+02 1.315e+02 1.569e+02 3.387e+02, threshold=2.631e+02, percent-clipped=2.0 +2024-08-03 21:30:20,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=236742.0, ans=0.125 +2024-08-03 21:30:20,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=236778.66666666666, ans=0.125 +2024-08-03 21:30:30,374 INFO [train.py:1114] (0/4) Epoch 18, batch 2700, loss[loss=0.2101, simple_loss=0.2909, pruned_loss=0.0647, over 13549.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2717, pruned_loss=0.0493, over 2636424.99 frames. ], batch size: 40, lr: 6.86e-03, grad_scale: 16.0 +2024-08-03 21:30:32,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=236815.33333333334, ans=0.125 +2024-08-03 21:30:39,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=236852.0, ans=0.035 +2024-08-03 21:30:44,136 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.99 vs. limit=12.0 +2024-08-03 21:30:45,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=236852.0, ans=0.125 +2024-08-03 21:30:47,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=236888.66666666666, ans=0.0 +2024-08-03 21:31:00,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=236925.33333333334, ans=0.125 +2024-08-03 21:31:05,421 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.06 vs. limit=15.0 +2024-08-03 21:31:13,959 INFO [train.py:1114] (0/4) Epoch 18, batch 2750, loss[loss=0.1717, simple_loss=0.2582, pruned_loss=0.0426, over 13323.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2696, pruned_loss=0.04807, over 2633813.48 frames. ], batch size: 34, lr: 6.86e-03, grad_scale: 16.0 +2024-08-03 21:31:14,723 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.742e+01 1.152e+02 1.311e+02 1.647e+02 2.709e+02, threshold=2.622e+02, percent-clipped=1.0 +2024-08-03 21:31:16,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=236998.66666666666, ans=0.09899494936611666 +2024-08-03 21:31:31,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=237072.0, ans=0.05 +2024-08-03 21:31:36,943 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.99 vs. limit=10.0 +2024-08-03 21:31:44,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=237108.66666666666, ans=0.125 +2024-08-03 21:31:44,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=237108.66666666666, ans=0.0 +2024-08-03 21:31:46,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=237108.66666666666, ans=0.125 +2024-08-03 21:31:50,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=237145.33333333334, ans=0.0 +2024-08-03 21:31:57,331 INFO [train.py:1114] (0/4) Epoch 18, batch 2800, loss[loss=0.2093, simple_loss=0.2946, pruned_loss=0.06205, over 9202.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2698, pruned_loss=0.04848, over 2625574.44 frames. ], batch size: 97, lr: 6.86e-03, grad_scale: 16.0 +2024-08-03 21:31:58,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=237182.0, ans=0.0 +2024-08-03 21:32:34,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=237328.66666666666, ans=0.125 +2024-08-03 21:32:41,594 INFO [train.py:1114] (0/4) Epoch 18, batch 2850, loss[loss=0.1571, simple_loss=0.2467, pruned_loss=0.03378, over 13377.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2707, pruned_loss=0.049, over 2620336.03 frames. ], batch size: 35, lr: 6.86e-03, grad_scale: 16.0 +2024-08-03 21:32:43,245 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.041e+01 1.131e+02 1.338e+02 1.690e+02 3.058e+02, threshold=2.676e+02, percent-clipped=5.0 +2024-08-03 21:32:51,238 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.29 vs. limit=15.0 +2024-08-03 21:33:00,232 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.29 vs. limit=12.0 +2024-08-03 21:33:01,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=237438.66666666666, ans=0.0 +2024-08-03 21:33:04,163 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:33:09,522 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.77 vs. limit=6.0 +2024-08-03 21:33:14,550 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.70 vs. limit=15.0 +2024-08-03 21:33:21,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=237512.0, ans=0.95 +2024-08-03 21:33:22,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=237512.0, ans=0.0 +2024-08-03 21:33:28,476 INFO [train.py:1114] (0/4) Epoch 18, batch 2900, loss[loss=0.1817, simple_loss=0.2667, pruned_loss=0.04838, over 13353.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2721, pruned_loss=0.04952, over 2631052.95 frames. ], batch size: 36, lr: 6.85e-03, grad_scale: 16.0 +2024-08-03 21:33:29,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=237548.66666666666, ans=0.1 +2024-08-03 21:33:34,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=237548.66666666666, ans=0.125 +2024-08-03 21:34:03,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=237695.33333333334, ans=0.0 +2024-08-03 21:34:11,760 INFO [train.py:1114] (0/4) Epoch 18, batch 2950, loss[loss=0.178, simple_loss=0.2597, pruned_loss=0.04818, over 13349.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2711, pruned_loss=0.04954, over 2629049.79 frames. ], batch size: 34, lr: 6.85e-03, grad_scale: 16.0 +2024-08-03 21:34:13,344 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.218e+01 1.166e+02 1.489e+02 1.763e+02 2.783e+02, threshold=2.978e+02, percent-clipped=2.0 +2024-08-03 21:34:14,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=237732.0, ans=0.125 +2024-08-03 21:34:22,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=237768.66666666666, ans=0.0 +2024-08-03 21:34:25,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=237768.66666666666, ans=0.125 +2024-08-03 21:34:26,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=237768.66666666666, ans=0.025 +2024-08-03 21:34:28,237 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:34:34,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=237805.33333333334, ans=0.0 +2024-08-03 21:34:39,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=237842.0, ans=0.125 +2024-08-03 21:34:51,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=237878.66666666666, ans=0.0 +2024-08-03 21:34:54,890 INFO [train.py:1114] (0/4) Epoch 18, batch 3000, loss[loss=0.1622, simple_loss=0.2463, pruned_loss=0.039, over 13555.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2712, pruned_loss=0.04942, over 2628847.98 frames. ], batch size: 37, lr: 6.85e-03, grad_scale: 16.0 +2024-08-03 21:34:54,891 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 21:34:59,271 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.5569, 4.9883, 5.1535, 5.0810], device='cuda:0') +2024-08-03 21:35:04,729 INFO [train.py:1146] (0/4) Epoch 18, validation: loss=0.1701, simple_loss=0.269, pruned_loss=0.03557, over 944034.00 frames. +2024-08-03 21:35:04,730 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 21:35:09,738 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.27 vs. limit=15.0 +2024-08-03 21:35:23,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=237988.66666666666, ans=0.025 +2024-08-03 21:35:35,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=238025.33333333334, ans=0.07 +2024-08-03 21:35:36,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=238025.33333333334, ans=0.125 +2024-08-03 21:35:48,172 INFO [train.py:1114] (0/4) Epoch 18, batch 3050, loss[loss=0.1675, simple_loss=0.2533, pruned_loss=0.04086, over 13536.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2715, pruned_loss=0.04941, over 2625590.70 frames. ], batch size: 35, lr: 6.84e-03, grad_scale: 16.0 +2024-08-03 21:35:49,911 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.482e+01 1.049e+02 1.161e+02 1.346e+02 2.617e+02, threshold=2.322e+02, percent-clipped=0.0 +2024-08-03 21:36:17,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_na.min_abs, batch_count=238208.66666666666, ans=0.02 +2024-08-03 21:36:26,632 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.28 vs. limit=22.5 +2024-08-03 21:36:31,289 INFO [train.py:1114] (0/4) Epoch 18, batch 3100, loss[loss=0.2103, simple_loss=0.2968, pruned_loss=0.0619, over 13316.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2713, pruned_loss=0.04946, over 2626335.93 frames. ], batch size: 46, lr: 6.84e-03, grad_scale: 16.0 +2024-08-03 21:36:35,337 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.73 vs. limit=15.0 +2024-08-03 21:36:42,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=238318.66666666666, ans=0.125 +2024-08-03 21:36:44,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=238318.66666666666, ans=0.5 +2024-08-03 21:37:07,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=238428.66666666666, ans=0.125 +2024-08-03 21:37:15,552 INFO [train.py:1114] (0/4) Epoch 18, batch 3150, loss[loss=0.1871, simple_loss=0.2778, pruned_loss=0.0482, over 12986.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2716, pruned_loss=0.04955, over 2628296.15 frames. ], batch size: 48, lr: 6.84e-03, grad_scale: 16.0 +2024-08-03 21:37:17,267 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.157e+01 1.121e+02 1.376e+02 1.775e+02 3.223e+02, threshold=2.752e+02, percent-clipped=7.0 +2024-08-03 21:37:27,345 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.64 vs. limit=15.0 +2024-08-03 21:37:29,040 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.53 vs. limit=15.0 +2024-08-03 21:37:53,415 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.33 vs. limit=15.0 +2024-08-03 21:37:58,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=238648.66666666666, ans=0.2 +2024-08-03 21:37:58,769 INFO [train.py:1114] (0/4) Epoch 18, batch 3200, loss[loss=0.1942, simple_loss=0.2817, pruned_loss=0.05337, over 13550.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2715, pruned_loss=0.04954, over 2634323.77 frames. ], batch size: 37, lr: 6.84e-03, grad_scale: 32.0 +2024-08-03 21:38:01,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=238648.66666666666, ans=0.0 +2024-08-03 21:38:06,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=238685.33333333334, ans=0.125 +2024-08-03 21:38:10,506 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.62 vs. limit=15.0 +2024-08-03 21:38:18,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=238722.0, ans=0.125 +2024-08-03 21:38:22,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238722.0, ans=0.1 +2024-08-03 21:38:40,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=238795.33333333334, ans=0.125 +2024-08-03 21:38:43,454 INFO [train.py:1114] (0/4) Epoch 18, batch 3250, loss[loss=0.1973, simple_loss=0.2863, pruned_loss=0.0542, over 13379.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2726, pruned_loss=0.04968, over 2638335.99 frames. ], batch size: 38, lr: 6.83e-03, grad_scale: 32.0 +2024-08-03 21:38:45,173 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.730e+01 1.129e+02 1.271e+02 1.537e+02 2.545e+02, threshold=2.542e+02, percent-clipped=0.0 +2024-08-03 21:39:04,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=238905.33333333334, ans=0.125 +2024-08-03 21:39:04,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=238905.33333333334, ans=0.0 +2024-08-03 21:39:11,432 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.06 vs. limit=12.0 +2024-08-03 21:39:11,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=238942.0, ans=0.0 +2024-08-03 21:39:20,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=238978.66666666666, ans=0.0 +2024-08-03 21:39:22,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=238978.66666666666, ans=10.0 +2024-08-03 21:39:27,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=239015.33333333334, ans=0.125 +2024-08-03 21:39:27,912 INFO [train.py:1114] (0/4) Epoch 18, batch 3300, loss[loss=0.2016, simple_loss=0.2886, pruned_loss=0.05727, over 12967.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2714, pruned_loss=0.04942, over 2640017.92 frames. ], batch size: 52, lr: 6.83e-03, grad_scale: 32.0 +2024-08-03 21:39:45,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239088.66666666666, ans=0.1 +2024-08-03 21:39:48,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=239088.66666666666, ans=0.2 +2024-08-03 21:39:51,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=239088.66666666666, ans=0.125 +2024-08-03 21:40:10,663 INFO [train.py:1114] (0/4) Epoch 18, batch 3350, loss[loss=0.202, simple_loss=0.2918, pruned_loss=0.05605, over 13007.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2722, pruned_loss=0.04948, over 2629418.89 frames. ], batch size: 48, lr: 6.83e-03, grad_scale: 32.0 +2024-08-03 21:40:12,341 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.013e+01 1.097e+02 1.292e+02 1.574e+02 2.403e+02, threshold=2.585e+02, percent-clipped=0.0 +2024-08-03 21:40:17,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=239198.66666666666, ans=0.0 +2024-08-03 21:40:35,723 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.80 vs. limit=6.0 +2024-08-03 21:40:37,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=239308.66666666666, ans=0.0 +2024-08-03 21:40:50,056 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:40:52,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239382.0, ans=0.125 +2024-08-03 21:40:52,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=239382.0, ans=0.0 +2024-08-03 21:40:53,415 INFO [train.py:1114] (0/4) Epoch 18, batch 3400, loss[loss=0.1775, simple_loss=0.2515, pruned_loss=0.05174, over 13545.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2717, pruned_loss=0.04929, over 2625981.01 frames. ], batch size: 31, lr: 6.83e-03, grad_scale: 16.0 +2024-08-03 21:40:53,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=239382.0, ans=0.125 +2024-08-03 21:40:59,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=239382.0, ans=0.125 +2024-08-03 21:41:09,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239455.33333333334, ans=0.1 +2024-08-03 21:41:15,060 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.10 vs. limit=15.0 +2024-08-03 21:41:25,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239492.0, ans=0.1 +2024-08-03 21:41:36,136 INFO [train.py:1114] (0/4) Epoch 18, batch 3450, loss[loss=0.1972, simple_loss=0.288, pruned_loss=0.05317, over 12867.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2719, pruned_loss=0.04923, over 2629695.04 frames. ], batch size: 52, lr: 6.82e-03, grad_scale: 16.0 +2024-08-03 21:41:37,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=239565.33333333334, ans=0.0 +2024-08-03 21:41:38,577 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.763e+01 1.067e+02 1.315e+02 1.546e+02 2.791e+02, threshold=2.630e+02, percent-clipped=1.0 +2024-08-03 21:41:39,854 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.89 vs. limit=15.0 +2024-08-03 21:41:43,780 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=2.600e-03 +2024-08-03 21:41:44,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239602.0, ans=0.1 +2024-08-03 21:41:45,175 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.80 vs. limit=15.0 +2024-08-03 21:41:45,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=239602.0, ans=0.125 +2024-08-03 21:41:58,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=239638.66666666666, ans=0.05 +2024-08-03 21:42:08,791 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.04 vs. limit=12.0 +2024-08-03 21:42:10,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=239712.0, ans=0.0 +2024-08-03 21:42:12,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=239712.0, ans=0.0 +2024-08-03 21:42:18,555 INFO [train.py:1114] (0/4) Epoch 18, batch 3500, loss[loss=0.1633, simple_loss=0.2486, pruned_loss=0.03901, over 13516.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2711, pruned_loss=0.04928, over 2630793.08 frames. ], batch size: 34, lr: 6.82e-03, grad_scale: 16.0 +2024-08-03 21:42:19,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=239748.66666666666, ans=0.125 +2024-08-03 21:42:23,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=239748.66666666666, ans=0.05 +2024-08-03 21:42:23,143 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.01 vs. limit=15.0 +2024-08-03 21:42:24,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=239748.66666666666, ans=0.125 +2024-08-03 21:42:41,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=239822.0, ans=0.0 +2024-08-03 21:43:01,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=239895.33333333334, ans=0.07 +2024-08-03 21:43:02,817 INFO [train.py:1114] (0/4) Epoch 18, batch 3550, loss[loss=0.2144, simple_loss=0.3043, pruned_loss=0.06225, over 12402.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2741, pruned_loss=0.05061, over 2629304.42 frames. ], batch size: 58, lr: 6.82e-03, grad_scale: 16.0 +2024-08-03 21:43:03,003 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:43:05,280 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.515e+01 1.163e+02 1.331e+02 1.591e+02 2.731e+02, threshold=2.663e+02, percent-clipped=1.0 +2024-08-03 21:43:11,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=239968.66666666666, ans=0.0 +2024-08-03 21:43:15,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239968.66666666666, ans=0.1 +2024-08-03 21:43:19,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=239968.66666666666, ans=0.0 +2024-08-03 21:43:35,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240042.0, ans=0.1 +2024-08-03 21:43:47,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=240115.33333333334, ans=0.025 +2024-08-03 21:43:47,780 INFO [train.py:1114] (0/4) Epoch 18, batch 3600, loss[loss=0.217, simple_loss=0.2967, pruned_loss=0.06866, over 9485.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.278, pruned_loss=0.05407, over 2488661.12 frames. ], batch size: 96, lr: 6.82e-03, grad_scale: 32.0 +2024-08-03 21:44:21,702 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-18.pt +2024-08-03 21:45:02,651 INFO [train.py:1114] (0/4) Epoch 19, batch 0, loss[loss=0.1775, simple_loss=0.2642, pruned_loss=0.04538, over 13353.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2642, pruned_loss=0.04538, over 13353.00 frames. ], batch size: 33, lr: 6.63e-03, grad_scale: 32.0 +2024-08-03 21:45:02,651 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 21:45:07,564 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.0994, 3.1563, 3.0475, 1.8639], device='cuda:0') +2024-08-03 21:45:13,155 INFO [train.py:1146] (0/4) Epoch 19, validation: loss=0.1699, simple_loss=0.2705, pruned_loss=0.03462, over 944034.00 frames. +2024-08-03 21:45:13,175 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 21:45:15,713 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.68 vs. limit=15.0 +2024-08-03 21:45:17,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=240262.0, ans=0.125 +2024-08-03 21:45:23,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240298.66666666666, ans=0.1 +2024-08-03 21:45:26,651 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.737e+01 1.210e+02 1.300e+02 1.388e+02 2.591e+02, threshold=2.600e+02, percent-clipped=0.0 +2024-08-03 21:45:39,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=240335.33333333334, ans=0.5 +2024-08-03 21:45:48,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=240372.0, ans=0.125 +2024-08-03 21:45:49,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=240372.0, ans=0.0 +2024-08-03 21:46:05,653 INFO [train.py:1114] (0/4) Epoch 19, batch 50, loss[loss=0.1579, simple_loss=0.2411, pruned_loss=0.03731, over 13453.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2768, pruned_loss=0.0526, over 579232.83 frames. ], batch size: 32, lr: 6.63e-03, grad_scale: 32.0 +2024-08-03 21:46:07,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=240445.33333333334, ans=0.125 +2024-08-03 21:46:11,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=240445.33333333334, ans=0.2 +2024-08-03 21:46:53,417 INFO [train.py:1114] (0/4) Epoch 19, batch 100, loss[loss=0.1662, simple_loss=0.2498, pruned_loss=0.04133, over 13521.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2743, pruned_loss=0.0499, over 1025710.05 frames. ], batch size: 35, lr: 6.62e-03, grad_scale: 32.0 +2024-08-03 21:46:57,552 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.06 vs. limit=10.0 +2024-08-03 21:47:00,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=240628.66666666666, ans=0.125 +2024-08-03 21:47:05,126 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.610e+01 1.117e+02 1.255e+02 1.420e+02 2.602e+02, threshold=2.511e+02, percent-clipped=1.0 +2024-08-03 21:47:08,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=240665.33333333334, ans=0.0 +2024-08-03 21:47:19,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=240738.66666666666, ans=0.07 +2024-08-03 21:47:22,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=240738.66666666666, ans=0.0 +2024-08-03 21:47:34,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=240775.33333333334, ans=0.2 +2024-08-03 21:47:35,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=240775.33333333334, ans=0.125 +2024-08-03 21:47:38,555 INFO [train.py:1114] (0/4) Epoch 19, batch 150, loss[loss=0.1533, simple_loss=0.232, pruned_loss=0.03727, over 13397.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2714, pruned_loss=0.04873, over 1386717.74 frames. ], batch size: 32, lr: 6.62e-03, grad_scale: 32.0 +2024-08-03 21:47:41,624 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.41 vs. limit=10.0 +2024-08-03 21:48:17,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=240958.66666666666, ans=0.125 +2024-08-03 21:48:22,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=240958.66666666666, ans=0.125 +2024-08-03 21:48:26,391 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.89 vs. limit=15.0 +2024-08-03 21:48:26,773 INFO [train.py:1114] (0/4) Epoch 19, batch 200, loss[loss=0.1774, simple_loss=0.2709, pruned_loss=0.04197, over 12422.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2708, pruned_loss=0.04876, over 1665481.12 frames. ], batch size: 58, lr: 6.62e-03, grad_scale: 32.0 +2024-08-03 21:48:27,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=240995.33333333334, ans=0.125 +2024-08-03 21:48:33,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=240995.33333333334, ans=0.0 +2024-08-03 21:48:38,535 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.918e+01 1.063e+02 1.213e+02 1.459e+02 3.041e+02, threshold=2.427e+02, percent-clipped=0.0 +2024-08-03 21:48:41,729 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.58 vs. limit=22.5 +2024-08-03 21:48:43,666 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.59 vs. limit=10.0 +2024-08-03 21:48:46,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=241068.66666666666, ans=0.0 +2024-08-03 21:49:00,319 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:49:05,139 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.08 vs. limit=12.0 +2024-08-03 21:49:11,682 INFO [train.py:1114] (0/4) Epoch 19, batch 250, loss[loss=0.1975, simple_loss=0.2918, pruned_loss=0.05159, over 13341.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2711, pruned_loss=0.04882, over 1884202.56 frames. ], batch size: 46, lr: 6.62e-03, grad_scale: 32.0 +2024-08-03 21:49:14,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=241178.66666666666, ans=0.0 +2024-08-03 21:49:31,927 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.53 vs. limit=15.0 +2024-08-03 21:49:42,620 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:49:44,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=241288.66666666666, ans=0.2 +2024-08-03 21:49:45,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=241288.66666666666, ans=0.125 +2024-08-03 21:49:58,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241325.33333333334, ans=0.1 +2024-08-03 21:49:59,623 INFO [train.py:1114] (0/4) Epoch 19, batch 300, loss[loss=0.1992, simple_loss=0.2857, pruned_loss=0.05633, over 13445.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2713, pruned_loss=0.049, over 2051998.23 frames. ], batch size: 42, lr: 6.61e-03, grad_scale: 32.0 +2024-08-03 21:50:06,400 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.56 vs. limit=15.0 +2024-08-03 21:50:10,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=241398.66666666666, ans=0.0 +2024-08-03 21:50:13,617 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.068e+01 1.095e+02 1.222e+02 1.449e+02 2.776e+02, threshold=2.445e+02, percent-clipped=4.0 +2024-08-03 21:50:23,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=241435.33333333334, ans=0.125 +2024-08-03 21:50:27,928 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.30 vs. limit=10.0 +2024-08-03 21:50:38,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=241472.0, ans=0.0 +2024-08-03 21:50:40,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=241508.66666666666, ans=0.0 +2024-08-03 21:50:42,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=241508.66666666666, ans=0.125 +2024-08-03 21:50:49,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=241545.33333333334, ans=0.125 +2024-08-03 21:50:52,446 INFO [train.py:1114] (0/4) Epoch 19, batch 350, loss[loss=0.1637, simple_loss=0.2457, pruned_loss=0.04081, over 13563.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2709, pruned_loss=0.0486, over 2182837.55 frames. ], batch size: 33, lr: 6.61e-03, grad_scale: 32.0 +2024-08-03 21:51:07,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241582.0, ans=0.1 +2024-08-03 21:51:27,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=241655.33333333334, ans=0.0 +2024-08-03 21:51:34,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=241692.0, ans=0.05 +2024-08-03 21:51:40,078 INFO [train.py:1114] (0/4) Epoch 19, batch 400, loss[loss=0.2077, simple_loss=0.2873, pruned_loss=0.06409, over 13361.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2698, pruned_loss=0.04824, over 2287070.83 frames. ], batch size: 37, lr: 6.61e-03, grad_scale: 32.0 +2024-08-03 21:51:52,136 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.902e+01 1.083e+02 1.174e+02 1.521e+02 2.282e+02, threshold=2.347e+02, percent-clipped=0.0 +2024-08-03 21:51:56,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=241765.33333333334, ans=0.125 +2024-08-03 21:51:59,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=241765.33333333334, ans=0.125 +2024-08-03 21:52:22,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=241875.33333333334, ans=0.2 +2024-08-03 21:52:28,389 INFO [train.py:1114] (0/4) Epoch 19, batch 450, loss[loss=0.1601, simple_loss=0.256, pruned_loss=0.03216, over 13542.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2697, pruned_loss=0.04813, over 2359975.17 frames. ], batch size: 38, lr: 6.61e-03, grad_scale: 32.0 +2024-08-03 21:52:38,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=241948.66666666666, ans=0.025 +2024-08-03 21:52:40,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=241948.66666666666, ans=0.0 +2024-08-03 21:52:42,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=241948.66666666666, ans=0.125 +2024-08-03 21:52:45,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=241948.66666666666, ans=0.0 +2024-08-03 21:52:50,957 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.75 vs. limit=22.5 +2024-08-03 21:52:51,859 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.05 vs. limit=15.0 +2024-08-03 21:52:56,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242022.0, ans=0.1 +2024-08-03 21:53:10,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=242058.66666666666, ans=0.125 +2024-08-03 21:53:16,398 INFO [train.py:1114] (0/4) Epoch 19, batch 500, loss[loss=0.2196, simple_loss=0.2995, pruned_loss=0.06982, over 13431.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2687, pruned_loss=0.04777, over 2425355.98 frames. ], batch size: 43, lr: 6.60e-03, grad_scale: 32.0 +2024-08-03 21:53:25,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242132.0, ans=0.1 +2024-08-03 21:53:27,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=242132.0, ans=0.0 +2024-08-03 21:53:29,123 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.780e+01 1.159e+02 1.379e+02 1.825e+02 3.055e+02, threshold=2.757e+02, percent-clipped=7.0 +2024-08-03 21:53:30,546 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.62 vs. limit=15.0 +2024-08-03 21:53:33,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=242132.0, ans=0.035 +2024-08-03 21:53:44,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=242205.33333333334, ans=0.125 +2024-08-03 21:53:50,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=242205.33333333334, ans=0.125 +2024-08-03 21:54:01,136 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.61 vs. limit=5.0 +2024-08-03 21:54:03,930 INFO [train.py:1114] (0/4) Epoch 19, batch 550, loss[loss=0.18, simple_loss=0.2664, pruned_loss=0.04684, over 13031.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2689, pruned_loss=0.0478, over 2468632.46 frames. ], batch size: 48, lr: 6.60e-03, grad_scale: 16.0 +2024-08-03 21:54:05,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242278.66666666666, ans=0.1 +2024-08-03 21:54:06,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=242278.66666666666, ans=0.125 +2024-08-03 21:54:10,596 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.98 vs. limit=6.0 +2024-08-03 21:54:11,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=242278.66666666666, ans=10.0 +2024-08-03 21:54:24,153 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.60 vs. limit=15.0 +2024-08-03 21:54:27,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=242352.0, ans=0.2 +2024-08-03 21:54:27,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=242352.0, ans=0.025 +2024-08-03 21:54:35,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=242388.66666666666, ans=10.0 +2024-08-03 21:54:44,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=242425.33333333334, ans=10.0 +2024-08-03 21:54:48,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242425.33333333334, ans=0.1 +2024-08-03 21:54:51,047 INFO [train.py:1114] (0/4) Epoch 19, batch 600, loss[loss=0.1992, simple_loss=0.2797, pruned_loss=0.05942, over 13319.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2687, pruned_loss=0.04785, over 2508571.07 frames. ], batch size: 46, lr: 6.60e-03, grad_scale: 16.0 +2024-08-03 21:55:00,345 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.64 vs. limit=15.0 +2024-08-03 21:55:00,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242498.66666666666, ans=0.1 +2024-08-03 21:55:01,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242498.66666666666, ans=0.1 +2024-08-03 21:55:03,311 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.902e+01 1.087e+02 1.240e+02 1.431e+02 2.352e+02, threshold=2.480e+02, percent-clipped=0.0 +2024-08-03 21:55:13,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=242535.33333333334, ans=0.0 +2024-08-03 21:55:28,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=242608.66666666666, ans=0.0 +2024-08-03 21:55:31,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=242608.66666666666, ans=0.07 +2024-08-03 21:55:37,890 INFO [train.py:1114] (0/4) Epoch 19, batch 650, loss[loss=0.1863, simple_loss=0.2761, pruned_loss=0.04822, over 13545.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.268, pruned_loss=0.04753, over 2544279.47 frames. ], batch size: 37, lr: 6.60e-03, grad_scale: 16.0 +2024-08-03 21:55:49,423 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.18 vs. limit=6.0 +2024-08-03 21:55:56,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=242682.0, ans=0.125 +2024-08-03 21:56:09,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=242755.33333333334, ans=0.125 +2024-08-03 21:56:10,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=242755.33333333334, ans=0.0 +2024-08-03 21:56:14,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=242755.33333333334, ans=0.125 +2024-08-03 21:56:25,339 INFO [train.py:1114] (0/4) Epoch 19, batch 700, loss[loss=0.1584, simple_loss=0.2519, pruned_loss=0.03243, over 13534.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2684, pruned_loss=0.04744, over 2565853.52 frames. ], batch size: 35, lr: 6.59e-03, grad_scale: 16.0 +2024-08-03 21:56:38,221 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.29 vs. limit=6.0 +2024-08-03 21:56:38,421 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.287e+01 1.160e+02 1.383e+02 1.887e+02 3.094e+02, threshold=2.766e+02, percent-clipped=5.0 +2024-08-03 21:56:52,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=242902.0, ans=0.125 +2024-08-03 21:56:57,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242938.66666666666, ans=0.1 +2024-08-03 21:57:13,153 INFO [train.py:1114] (0/4) Epoch 19, batch 750, loss[loss=0.1839, simple_loss=0.2795, pruned_loss=0.04419, over 13363.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2689, pruned_loss=0.04771, over 2582041.05 frames. ], batch size: 37, lr: 6.59e-03, grad_scale: 16.0 +2024-08-03 21:57:30,897 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=10.32 vs. limit=15.0 +2024-08-03 21:57:42,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=243122.0, ans=0.125 +2024-08-03 21:58:00,315 INFO [train.py:1114] (0/4) Epoch 19, batch 800, loss[loss=0.1797, simple_loss=0.2652, pruned_loss=0.04711, over 13351.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2686, pruned_loss=0.04777, over 2596456.52 frames. ], batch size: 33, lr: 6.59e-03, grad_scale: 32.0 +2024-08-03 21:58:00,929 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.76 vs. limit=15.0 +2024-08-03 21:58:01,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243195.33333333334, ans=0.1 +2024-08-03 21:58:03,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=243195.33333333334, ans=0.2 +2024-08-03 21:58:05,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=243195.33333333334, ans=0.05 +2024-08-03 21:58:12,634 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.835e+01 1.100e+02 1.252e+02 1.465e+02 2.313e+02, threshold=2.504e+02, percent-clipped=0.0 +2024-08-03 21:58:19,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243268.66666666666, ans=0.1 +2024-08-03 21:58:24,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=243268.66666666666, ans=0.0 +2024-08-03 21:58:37,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=243342.0, ans=0.07 +2024-08-03 21:58:47,332 INFO [train.py:1114] (0/4) Epoch 19, batch 850, loss[loss=0.1602, simple_loss=0.2555, pruned_loss=0.0325, over 13323.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2681, pruned_loss=0.04742, over 2609167.11 frames. ], batch size: 40, lr: 6.59e-03, grad_scale: 32.0 +2024-08-03 21:59:00,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=243415.33333333334, ans=0.2 +2024-08-03 21:59:03,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=243415.33333333334, ans=0.0 +2024-08-03 21:59:03,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=243415.33333333334, ans=0.125 +2024-08-03 21:59:08,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=243452.0, ans=0.1 +2024-08-03 21:59:10,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=243452.0, ans=0.125 +2024-08-03 21:59:21,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=243488.66666666666, ans=0.0 +2024-08-03 21:59:21,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=243488.66666666666, ans=0.0 +2024-08-03 21:59:22,198 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.07 vs. limit=10.0 +2024-08-03 21:59:34,625 INFO [train.py:1114] (0/4) Epoch 19, batch 900, loss[loss=0.1619, simple_loss=0.2486, pruned_loss=0.03755, over 13337.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2687, pruned_loss=0.04798, over 2612362.08 frames. ], batch size: 33, lr: 6.59e-03, grad_scale: 16.0 +2024-08-03 21:59:34,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=243562.0, ans=0.125 +2024-08-03 21:59:46,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=243598.66666666666, ans=15.0 +2024-08-03 21:59:48,159 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.299e+01 1.094e+02 1.385e+02 1.717e+02 2.818e+02, threshold=2.769e+02, percent-clipped=4.0 +2024-08-03 21:59:57,993 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.41 vs. limit=15.0 +2024-08-03 22:00:01,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243672.0, ans=0.1 +2024-08-03 22:00:03,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=243672.0, ans=0.125 +2024-08-03 22:00:14,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=243708.66666666666, ans=0.05 +2024-08-03 22:00:15,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=243708.66666666666, ans=0.125 +2024-08-03 22:00:22,269 INFO [train.py:1114] (0/4) Epoch 19, batch 950, loss[loss=0.1752, simple_loss=0.2564, pruned_loss=0.04696, over 13551.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.269, pruned_loss=0.04817, over 2613124.46 frames. ], batch size: 34, lr: 6.58e-03, grad_scale: 16.0 +2024-08-03 22:00:25,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=243745.33333333334, ans=0.2 +2024-08-03 22:00:26,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=243745.33333333334, ans=0.0 +2024-08-03 22:00:31,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=243782.0, ans=0.125 +2024-08-03 22:00:37,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=243782.0, ans=0.0 +2024-08-03 22:00:52,287 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.60 vs. limit=15.0 +2024-08-03 22:00:59,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=243892.0, ans=0.125 +2024-08-03 22:01:07,650 INFO [train.py:1114] (0/4) Epoch 19, batch 1000, loss[loss=0.1834, simple_loss=0.2609, pruned_loss=0.05291, over 13365.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2697, pruned_loss=0.04838, over 2611379.48 frames. ], batch size: 35, lr: 6.58e-03, grad_scale: 16.0 +2024-08-03 22:01:09,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=243928.66666666666, ans=0.0 +2024-08-03 22:01:13,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243928.66666666666, ans=0.1 +2024-08-03 22:01:21,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243965.33333333334, ans=0.1 +2024-08-03 22:01:23,455 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.931e+01 1.118e+02 1.271e+02 1.540e+02 2.481e+02, threshold=2.543e+02, percent-clipped=0.0 +2024-08-03 22:01:30,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=244002.0, ans=0.125 +2024-08-03 22:01:39,376 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.97 vs. limit=15.0 +2024-08-03 22:01:55,467 INFO [train.py:1114] (0/4) Epoch 19, batch 1050, loss[loss=0.2222, simple_loss=0.3094, pruned_loss=0.06743, over 13572.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2684, pruned_loss=0.04773, over 2615646.73 frames. ], batch size: 39, lr: 6.58e-03, grad_scale: 16.0 +2024-08-03 22:02:13,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=244185.33333333334, ans=0.1 +2024-08-03 22:02:42,832 INFO [train.py:1114] (0/4) Epoch 19, batch 1100, loss[loss=0.1842, simple_loss=0.2733, pruned_loss=0.04749, over 13564.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2687, pruned_loss=0.04772, over 2619777.58 frames. ], batch size: 36, lr: 6.58e-03, grad_scale: 16.0 +2024-08-03 22:02:43,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=244295.33333333334, ans=0.2 +2024-08-03 22:02:53,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=244332.0, ans=0.0 +2024-08-03 22:02:58,081 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.403e+01 1.088e+02 1.287e+02 1.607e+02 2.579e+02, threshold=2.574e+02, percent-clipped=1.0 +2024-08-03 22:03:01,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=244332.0, ans=0.125 +2024-08-03 22:03:04,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=244368.66666666666, ans=0.025 +2024-08-03 22:03:11,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=244405.33333333334, ans=0.125 +2024-08-03 22:03:19,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=244442.0, ans=0.125 +2024-08-03 22:03:29,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.whiten.whitening_limit, batch_count=244478.66666666666, ans=12.0 +2024-08-03 22:03:29,692 INFO [train.py:1114] (0/4) Epoch 19, batch 1150, loss[loss=0.1686, simple_loss=0.2569, pruned_loss=0.04012, over 13553.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2688, pruned_loss=0.04772, over 2619784.27 frames. ], batch size: 36, lr: 6.57e-03, grad_scale: 16.0 +2024-08-03 22:03:32,178 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.59 vs. limit=22.5 +2024-08-03 22:03:32,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=244478.66666666666, ans=0.0 +2024-08-03 22:03:42,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=244515.33333333334, ans=0.125 +2024-08-03 22:03:52,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244552.0, ans=0.1 +2024-08-03 22:04:00,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=244588.66666666666, ans=0.125 +2024-08-03 22:04:04,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=244588.66666666666, ans=0.2 +2024-08-03 22:04:06,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=244588.66666666666, ans=0.125 +2024-08-03 22:04:07,762 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.25 vs. limit=15.0 +2024-08-03 22:04:17,212 INFO [train.py:1114] (0/4) Epoch 19, batch 1200, loss[loss=0.1967, simple_loss=0.291, pruned_loss=0.05116, over 13562.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2699, pruned_loss=0.04798, over 2617133.73 frames. ], batch size: 39, lr: 6.57e-03, grad_scale: 32.0 +2024-08-03 22:04:30,752 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.729e+01 1.075e+02 1.205e+02 1.408e+02 2.455e+02, threshold=2.410e+02, percent-clipped=0.0 +2024-08-03 22:04:39,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=244735.33333333334, ans=0.0 +2024-08-03 22:04:45,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=244772.0, ans=0.125 +2024-08-03 22:04:45,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=244772.0, ans=0.2 +2024-08-03 22:04:49,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=244772.0, ans=0.125 +2024-08-03 22:04:59,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=244808.66666666666, ans=0.0 +2024-08-03 22:05:04,387 INFO [train.py:1114] (0/4) Epoch 19, batch 1250, loss[loss=0.2003, simple_loss=0.2877, pruned_loss=0.0564, over 13439.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2705, pruned_loss=0.04799, over 2629259.11 frames. ], batch size: 42, lr: 6.57e-03, grad_scale: 32.0 +2024-08-03 22:05:20,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=244882.0, ans=0.125 +2024-08-03 22:05:22,134 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.58 vs. limit=22.5 +2024-08-03 22:05:23,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=244918.66666666666, ans=0.2 +2024-08-03 22:05:24,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244918.66666666666, ans=0.1 +2024-08-03 22:05:27,353 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:05:44,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=244992.0, ans=0.0 +2024-08-03 22:05:44,975 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.71 vs. limit=22.5 +2024-08-03 22:05:52,101 INFO [train.py:1114] (0/4) Epoch 19, batch 1300, loss[loss=0.1635, simple_loss=0.2611, pruned_loss=0.03293, over 12839.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2692, pruned_loss=0.04756, over 2631758.69 frames. ], batch size: 52, lr: 6.57e-03, grad_scale: 16.0 +2024-08-03 22:05:54,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=245028.66666666666, ans=0.2 +2024-08-03 22:06:07,828 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.978e+01 1.097e+02 1.270e+02 1.535e+02 2.662e+02, threshold=2.541e+02, percent-clipped=5.0 +2024-08-03 22:06:29,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=245138.66666666666, ans=0.04949747468305833 +2024-08-03 22:06:37,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245175.33333333334, ans=0.1 +2024-08-03 22:06:40,469 INFO [train.py:1114] (0/4) Epoch 19, batch 1350, loss[loss=0.1824, simple_loss=0.277, pruned_loss=0.04387, over 13550.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2687, pruned_loss=0.04715, over 2639863.95 frames. ], batch size: 37, lr: 6.56e-03, grad_scale: 16.0 +2024-08-03 22:06:59,314 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:07:11,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=245322.0, ans=0.0 +2024-08-03 22:07:11,654 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.12 vs. limit=15.0 +2024-08-03 22:07:20,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=245358.66666666666, ans=0.125 +2024-08-03 22:07:21,901 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.18 vs. limit=12.0 +2024-08-03 22:07:26,146 INFO [train.py:1114] (0/4) Epoch 19, batch 1400, loss[loss=0.1512, simple_loss=0.229, pruned_loss=0.03671, over 13241.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.268, pruned_loss=0.04665, over 2642846.54 frames. ], batch size: 31, lr: 6.56e-03, grad_scale: 8.0 +2024-08-03 22:07:27,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=245395.33333333334, ans=0.025 +2024-08-03 22:07:38,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=245432.0, ans=0.125 +2024-08-03 22:07:39,114 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.35 vs. limit=15.0 +2024-08-03 22:07:41,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=245432.0, ans=0.125 +2024-08-03 22:07:44,093 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.615e+01 1.110e+02 1.328e+02 1.668e+02 3.835e+02, threshold=2.657e+02, percent-clipped=2.0 +2024-08-03 22:07:59,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=245505.33333333334, ans=0.125 +2024-08-03 22:08:13,741 INFO [train.py:1114] (0/4) Epoch 19, batch 1450, loss[loss=0.202, simple_loss=0.2858, pruned_loss=0.05909, over 13421.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2689, pruned_loss=0.0475, over 2641512.79 frames. ], batch size: 43, lr: 6.56e-03, grad_scale: 8.0 +2024-08-03 22:08:19,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=245578.66666666666, ans=0.125 +2024-08-03 22:08:21,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=245615.33333333334, ans=0.125 +2024-08-03 22:08:23,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=245615.33333333334, ans=0.125 +2024-08-03 22:08:33,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=245652.0, ans=0.125 +2024-08-03 22:08:47,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=245688.66666666666, ans=0.0 +2024-08-03 22:09:01,180 INFO [train.py:1114] (0/4) Epoch 19, batch 1500, loss[loss=0.1762, simple_loss=0.2719, pruned_loss=0.04021, over 13397.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2694, pruned_loss=0.04773, over 2641392.45 frames. ], batch size: 39, lr: 6.56e-03, grad_scale: 8.0 +2024-08-03 22:09:14,242 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:09:16,763 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.464e+01 1.116e+02 1.262e+02 1.580e+02 2.631e+02, threshold=2.524e+02, percent-clipped=0.0 +2024-08-03 22:09:32,150 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.79 vs. limit=22.5 +2024-08-03 22:09:39,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=245908.66666666666, ans=0.2 +2024-08-03 22:09:47,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=245908.66666666666, ans=0.125 +2024-08-03 22:09:48,983 INFO [train.py:1114] (0/4) Epoch 19, batch 1550, loss[loss=0.1975, simple_loss=0.2933, pruned_loss=0.05086, over 13397.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2696, pruned_loss=0.04784, over 2630925.14 frames. ], batch size: 41, lr: 6.55e-03, grad_scale: 8.0 +2024-08-03 22:09:49,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=245945.33333333334, ans=0.025 +2024-08-03 22:09:52,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245945.33333333334, ans=0.1 +2024-08-03 22:09:53,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=245945.33333333334, ans=0.2 +2024-08-03 22:09:58,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=245982.0, ans=0.09899494936611666 +2024-08-03 22:09:59,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245982.0, ans=0.1 +2024-08-03 22:10:06,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=245982.0, ans=0.0 +2024-08-03 22:10:17,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=246055.33333333334, ans=0.125 +2024-08-03 22:10:24,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246055.33333333334, ans=0.1 +2024-08-03 22:10:29,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=246092.0, ans=0.0 +2024-08-03 22:10:31,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=246092.0, ans=0.125 +2024-08-03 22:10:36,199 INFO [train.py:1114] (0/4) Epoch 19, batch 1600, loss[loss=0.1979, simple_loss=0.2871, pruned_loss=0.05433, over 13576.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2687, pruned_loss=0.04759, over 2624283.89 frames. ], batch size: 39, lr: 6.55e-03, grad_scale: 16.0 +2024-08-03 22:10:51,519 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.932e+01 1.109e+02 1.298e+02 1.770e+02 3.045e+02, threshold=2.595e+02, percent-clipped=6.0 +2024-08-03 22:10:54,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=246202.0, ans=0.125 +2024-08-03 22:11:04,302 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.49 vs. limit=22.5 +2024-08-03 22:11:11,910 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.28 vs. limit=22.5 +2024-08-03 22:11:20,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=246275.33333333334, ans=0.0 +2024-08-03 22:11:23,645 INFO [train.py:1114] (0/4) Epoch 19, batch 1650, loss[loss=0.1672, simple_loss=0.269, pruned_loss=0.03276, over 13330.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2691, pruned_loss=0.04786, over 2621668.10 frames. ], batch size: 40, lr: 6.55e-03, grad_scale: 16.0 +2024-08-03 22:11:40,102 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.54 vs. limit=15.0 +2024-08-03 22:11:57,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246422.0, ans=0.1 +2024-08-03 22:11:59,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=246458.66666666666, ans=0.0 +2024-08-03 22:12:02,237 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.52 vs. limit=15.0 +2024-08-03 22:12:11,750 INFO [train.py:1114] (0/4) Epoch 19, batch 1700, loss[loss=0.163, simple_loss=0.2399, pruned_loss=0.04306, over 13265.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2686, pruned_loss=0.0474, over 2629726.58 frames. ], batch size: 31, lr: 6.55e-03, grad_scale: 16.0 +2024-08-03 22:12:19,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=246495.33333333334, ans=0.125 +2024-08-03 22:12:19,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=246495.33333333334, ans=0.0 +2024-08-03 22:12:27,029 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.554e+01 1.124e+02 1.376e+02 1.723e+02 2.933e+02, threshold=2.751e+02, percent-clipped=2.0 +2024-08-03 22:12:56,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=246642.0, ans=0.125 +2024-08-03 22:12:57,851 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.68 vs. limit=15.0 +2024-08-03 22:12:58,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=246678.66666666666, ans=0.125 +2024-08-03 22:12:59,185 INFO [train.py:1114] (0/4) Epoch 19, batch 1750, loss[loss=0.1516, simple_loss=0.2342, pruned_loss=0.03449, over 13539.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2678, pruned_loss=0.04735, over 2632831.91 frames. ], batch size: 31, lr: 6.54e-03, grad_scale: 16.0 +2024-08-03 22:13:11,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=246715.33333333334, ans=0.125 +2024-08-03 22:13:11,183 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:13:15,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=246715.33333333334, ans=0.125 +2024-08-03 22:13:21,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=246752.0, ans=0.0 +2024-08-03 22:13:24,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=246752.0, ans=0.125 +2024-08-03 22:13:27,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=246788.66666666666, ans=0.0 +2024-08-03 22:13:34,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=246788.66666666666, ans=0.2 +2024-08-03 22:13:42,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=246825.33333333334, ans=0.125 +2024-08-03 22:13:43,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=246825.33333333334, ans=0.025 +2024-08-03 22:13:46,410 INFO [train.py:1114] (0/4) Epoch 19, batch 1800, loss[loss=0.1817, simple_loss=0.2704, pruned_loss=0.04651, over 13554.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2681, pruned_loss=0.04704, over 2634223.20 frames. ], batch size: 38, lr: 6.54e-03, grad_scale: 16.0 +2024-08-03 22:13:46,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=246862.0, ans=0.0 +2024-08-03 22:14:02,226 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.932e+01 1.153e+02 1.366e+02 1.717e+02 2.450e+02, threshold=2.732e+02, percent-clipped=0.0 +2024-08-03 22:14:11,838 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.27 vs. limit=15.0 +2024-08-03 22:14:16,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=246972.0, ans=0.125 +2024-08-03 22:14:19,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=246972.0, ans=0.0 +2024-08-03 22:14:20,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=246972.0, ans=10.0 +2024-08-03 22:14:32,266 INFO [train.py:1114] (0/4) Epoch 19, batch 1850, loss[loss=0.1896, simple_loss=0.2806, pruned_loss=0.04933, over 13408.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2684, pruned_loss=0.04725, over 2637665.08 frames. ], batch size: 39, lr: 6.54e-03, grad_scale: 16.0 +2024-08-03 22:14:53,417 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.47 vs. limit=6.0 +2024-08-03 22:15:09,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=247155.33333333334, ans=0.1 +2024-08-03 22:15:12,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247192.0, ans=0.1 +2024-08-03 22:15:19,832 INFO [train.py:1114] (0/4) Epoch 19, batch 1900, loss[loss=0.2065, simple_loss=0.2964, pruned_loss=0.05831, over 13313.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2694, pruned_loss=0.04783, over 2640041.25 frames. ], batch size: 40, lr: 6.54e-03, grad_scale: 16.0 +2024-08-03 22:15:34,870 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.927e+01 1.140e+02 1.255e+02 1.731e+02 2.677e+02, threshold=2.509e+02, percent-clipped=0.0 +2024-08-03 22:15:43,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=247302.0, ans=0.1 +2024-08-03 22:16:07,352 INFO [train.py:1114] (0/4) Epoch 19, batch 1950, loss[loss=0.1834, simple_loss=0.2695, pruned_loss=0.04861, over 13564.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2707, pruned_loss=0.04806, over 2646848.21 frames. ], batch size: 36, lr: 6.53e-03, grad_scale: 16.0 +2024-08-03 22:16:10,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=247412.0, ans=0.0 +2024-08-03 22:16:18,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=247448.66666666666, ans=0.0 +2024-08-03 22:16:32,629 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.78 vs. limit=10.0 +2024-08-03 22:16:44,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=247522.0, ans=0.0 +2024-08-03 22:16:44,525 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:16:48,672 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=8.78 vs. limit=15.0 +2024-08-03 22:16:55,414 INFO [train.py:1114] (0/4) Epoch 19, batch 2000, loss[loss=0.1442, simple_loss=0.2248, pruned_loss=0.03182, over 13542.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.272, pruned_loss=0.04879, over 2636035.23 frames. ], batch size: 31, lr: 6.53e-03, grad_scale: 32.0 +2024-08-03 22:16:57,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247595.33333333334, ans=0.1 +2024-08-03 22:16:58,597 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:17:05,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=247632.0, ans=0.2 +2024-08-03 22:17:07,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=247632.0, ans=0.125 +2024-08-03 22:17:13,011 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.093e+02 1.300e+02 1.628e+02 2.543e+02, threshold=2.600e+02, percent-clipped=1.0 +2024-08-03 22:17:13,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=247632.0, ans=0.025 +2024-08-03 22:17:14,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=247632.0, ans=0.2 +2024-08-03 22:17:16,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=247668.66666666666, ans=0.1 +2024-08-03 22:17:21,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=247668.66666666666, ans=0.125 +2024-08-03 22:17:24,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=247705.33333333334, ans=0.2 +2024-08-03 22:17:36,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=247742.0, ans=0.0 +2024-08-03 22:17:40,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247742.0, ans=0.1 +2024-08-03 22:17:40,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=247742.0, ans=0.0 +2024-08-03 22:17:42,977 INFO [train.py:1114] (0/4) Epoch 19, batch 2050, loss[loss=0.1601, simple_loss=0.2454, pruned_loss=0.03739, over 13420.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2718, pruned_loss=0.04915, over 2633806.03 frames. ], batch size: 32, lr: 6.53e-03, grad_scale: 32.0 +2024-08-03 22:17:53,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=247815.33333333334, ans=0.2 +2024-08-03 22:17:53,436 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.98 vs. limit=15.0 +2024-08-03 22:17:54,250 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.33 vs. limit=15.0 +2024-08-03 22:17:56,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=247815.33333333334, ans=0.015 +2024-08-03 22:18:04,390 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.30 vs. limit=15.0 +2024-08-03 22:18:11,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=247888.66666666666, ans=0.125 +2024-08-03 22:18:18,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=247888.66666666666, ans=0.125 +2024-08-03 22:18:30,936 INFO [train.py:1114] (0/4) Epoch 19, batch 2100, loss[loss=0.1803, simple_loss=0.2721, pruned_loss=0.04423, over 13554.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2706, pruned_loss=0.04824, over 2638709.94 frames. ], batch size: 37, lr: 6.53e-03, grad_scale: 32.0 +2024-08-03 22:18:38,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=247962.0, ans=0.125 +2024-08-03 22:18:43,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=247998.66666666666, ans=0.125 +2024-08-03 22:18:46,391 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.343e+01 1.123e+02 1.234e+02 1.440e+02 2.542e+02, threshold=2.468e+02, percent-clipped=0.0 +2024-08-03 22:18:50,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=248035.33333333334, ans=0.0 +2024-08-03 22:18:53,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=248035.33333333334, ans=0.0 +2024-08-03 22:19:04,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=248072.0, ans=0.2 +2024-08-03 22:19:16,492 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.96 vs. limit=15.0 +2024-08-03 22:19:16,702 INFO [train.py:1114] (0/4) Epoch 19, batch 2150, loss[loss=0.1809, simple_loss=0.2658, pruned_loss=0.048, over 13548.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2697, pruned_loss=0.04764, over 2646631.54 frames. ], batch size: 36, lr: 6.52e-03, grad_scale: 32.0 +2024-08-03 22:19:25,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248182.0, ans=0.1 +2024-08-03 22:19:30,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=248182.0, ans=0.2 +2024-08-03 22:19:31,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=248182.0, ans=0.125 +2024-08-03 22:19:44,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=248218.66666666666, ans=0.0 +2024-08-03 22:19:55,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=248292.0, ans=0.0 +2024-08-03 22:20:06,581 INFO [train.py:1114] (0/4) Epoch 19, batch 2200, loss[loss=0.1949, simple_loss=0.2939, pruned_loss=0.04797, over 13414.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2696, pruned_loss=0.04752, over 2644577.63 frames. ], batch size: 39, lr: 6.52e-03, grad_scale: 16.0 +2024-08-03 22:20:07,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=248328.66666666666, ans=0.125 +2024-08-03 22:20:22,940 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.876e+01 1.107e+02 1.433e+02 1.774e+02 2.441e+02, threshold=2.865e+02, percent-clipped=0.0 +2024-08-03 22:20:29,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=248402.0, ans=0.04949747468305833 +2024-08-03 22:20:53,618 INFO [train.py:1114] (0/4) Epoch 19, batch 2250, loss[loss=0.1673, simple_loss=0.2605, pruned_loss=0.03706, over 13375.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2692, pruned_loss=0.04744, over 2642246.40 frames. ], batch size: 37, lr: 6.52e-03, grad_scale: 16.0 +2024-08-03 22:20:55,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=248512.0, ans=0.125 +2024-08-03 22:20:56,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=248512.0, ans=0.1 +2024-08-03 22:21:26,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=248622.0, ans=0.0 +2024-08-03 22:21:39,634 INFO [train.py:1114] (0/4) Epoch 19, batch 2300, loss[loss=0.1587, simple_loss=0.251, pruned_loss=0.03323, over 13580.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2683, pruned_loss=0.0475, over 2638261.53 frames. ], batch size: 33, lr: 6.52e-03, grad_scale: 16.0 +2024-08-03 22:21:57,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=248732.0, ans=0.125 +2024-08-03 22:21:58,041 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.536e+01 1.062e+02 1.236e+02 1.586e+02 2.214e+02, threshold=2.472e+02, percent-clipped=0.0 +2024-08-03 22:22:13,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=248805.33333333334, ans=0.2 +2024-08-03 22:22:27,317 INFO [train.py:1114] (0/4) Epoch 19, batch 2350, loss[loss=0.1584, simple_loss=0.2552, pruned_loss=0.03081, over 13557.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2679, pruned_loss=0.04714, over 2640914.48 frames. ], batch size: 38, lr: 6.52e-03, grad_scale: 16.0 +2024-08-03 22:22:36,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248915.33333333334, ans=0.1 +2024-08-03 22:22:36,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=248915.33333333334, ans=0.125 +2024-08-03 22:22:45,969 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.99 vs. limit=15.0 +2024-08-03 22:22:47,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=248952.0, ans=0.125 +2024-08-03 22:22:53,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=248988.66666666666, ans=0.5 +2024-08-03 22:23:13,908 INFO [train.py:1114] (0/4) Epoch 19, batch 2400, loss[loss=0.1794, simple_loss=0.2603, pruned_loss=0.04918, over 13532.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2687, pruned_loss=0.04723, over 2642913.00 frames. ], batch size: 35, lr: 6.51e-03, grad_scale: 32.0 +2024-08-03 22:23:16,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=249062.0, ans=0.125 +2024-08-03 22:23:18,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=249062.0, ans=0.125 +2024-08-03 22:23:31,383 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.983e+01 1.101e+02 1.278e+02 1.688e+02 2.593e+02, threshold=2.556e+02, percent-clipped=1.0 +2024-08-03 22:23:44,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=249172.0, ans=0.025 +2024-08-03 22:23:50,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=249172.0, ans=0.125 +2024-08-03 22:24:00,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=249208.66666666666, ans=0.015 +2024-08-03 22:24:01,890 INFO [train.py:1114] (0/4) Epoch 19, batch 2450, loss[loss=0.1964, simple_loss=0.2842, pruned_loss=0.05426, over 13366.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2699, pruned_loss=0.0477, over 2632514.41 frames. ], batch size: 37, lr: 6.51e-03, grad_scale: 16.0 +2024-08-03 22:24:09,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=249245.33333333334, ans=0.125 +2024-08-03 22:24:11,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=249282.0, ans=0.2 +2024-08-03 22:24:14,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=249282.0, ans=0.0 +2024-08-03 22:24:21,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=249318.66666666666, ans=0.125 +2024-08-03 22:24:23,414 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-68000.pt +2024-08-03 22:24:26,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=249318.66666666666, ans=0.0 +2024-08-03 22:24:26,479 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.24 vs. limit=10.0 +2024-08-03 22:24:47,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=249392.0, ans=0.1 +2024-08-03 22:24:49,891 INFO [train.py:1114] (0/4) Epoch 19, batch 2500, loss[loss=0.1908, simple_loss=0.2858, pruned_loss=0.0479, over 13424.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2699, pruned_loss=0.04745, over 2636857.55 frames. ], batch size: 39, lr: 6.51e-03, grad_scale: 16.0 +2024-08-03 22:24:56,338 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=8.66 vs. limit=12.0 +2024-08-03 22:24:58,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=249465.33333333334, ans=0.125 +2024-08-03 22:25:06,703 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.566e+01 1.106e+02 1.263e+02 1.596e+02 2.870e+02, threshold=2.527e+02, percent-clipped=4.0 +2024-08-03 22:25:07,248 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.07 vs. limit=15.0 +2024-08-03 22:25:14,072 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.71 vs. limit=15.0 +2024-08-03 22:25:22,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=249538.66666666666, ans=0.04949747468305833 +2024-08-03 22:25:26,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=249575.33333333334, ans=0.0 +2024-08-03 22:25:29,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=249575.33333333334, ans=0.125 +2024-08-03 22:25:34,294 INFO [train.py:1114] (0/4) Epoch 19, batch 2550, loss[loss=0.1647, simple_loss=0.2436, pruned_loss=0.04285, over 13551.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.27, pruned_loss=0.04768, over 2638623.98 frames. ], batch size: 31, lr: 6.51e-03, grad_scale: 16.0 +2024-08-03 22:25:35,537 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.35 vs. limit=22.5 +2024-08-03 22:25:43,279 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.07 vs. limit=22.5 +2024-08-03 22:25:54,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=249685.33333333334, ans=0.0 +2024-08-03 22:26:02,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=249722.0, ans=0.0 +2024-08-03 22:26:07,823 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.87 vs. limit=15.0 +2024-08-03 22:26:08,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=249758.66666666666, ans=0.0 +2024-08-03 22:26:18,047 INFO [train.py:1114] (0/4) Epoch 19, batch 2600, loss[loss=0.1494, simple_loss=0.2403, pruned_loss=0.02921, over 13563.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2702, pruned_loss=0.04778, over 2637267.83 frames. ], batch size: 36, lr: 6.50e-03, grad_scale: 16.0 +2024-08-03 22:26:23,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=249795.33333333334, ans=0.1 +2024-08-03 22:26:26,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=249832.0, ans=0.0 +2024-08-03 22:26:36,441 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.660e+01 1.133e+02 1.412e+02 1.915e+02 3.004e+02, threshold=2.824e+02, percent-clipped=7.0 +2024-08-03 22:26:42,928 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:26:55,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=249942.0, ans=0.125 +2024-08-03 22:27:01,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=249942.0, ans=0.1 +2024-08-03 22:27:03,524 INFO [train.py:1114] (0/4) Epoch 19, batch 2650, loss[loss=0.2074, simple_loss=0.2968, pruned_loss=0.05903, over 13218.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2702, pruned_loss=0.04788, over 2640810.56 frames. ], batch size: 46, lr: 6.50e-03, grad_scale: 16.0 +2024-08-03 22:27:14,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=250015.33333333334, ans=0.0 +2024-08-03 22:27:33,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=250088.66666666666, ans=0.0 +2024-08-03 22:27:45,666 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=7.13 vs. limit=12.0 +2024-08-03 22:27:47,063 INFO [train.py:1114] (0/4) Epoch 19, batch 2700, loss[loss=0.1794, simple_loss=0.2725, pruned_loss=0.04314, over 13537.00 frames. ], tot_loss[loss=0.183, simple_loss=0.27, pruned_loss=0.04797, over 2638418.56 frames. ], batch size: 40, lr: 6.50e-03, grad_scale: 16.0 +2024-08-03 22:27:48,967 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:28:03,575 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.168e+01 1.091e+02 1.247e+02 1.559e+02 2.482e+02, threshold=2.495e+02, percent-clipped=0.0 +2024-08-03 22:28:14,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=250272.0, ans=0.125 +2024-08-03 22:28:21,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250308.66666666666, ans=0.1 +2024-08-03 22:28:30,861 INFO [train.py:1114] (0/4) Epoch 19, batch 2750, loss[loss=0.1657, simple_loss=0.2488, pruned_loss=0.04132, over 13343.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2688, pruned_loss=0.04769, over 2635616.03 frames. ], batch size: 34, lr: 6.50e-03, grad_scale: 16.0 +2024-08-03 22:28:35,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=250345.33333333334, ans=0.125 +2024-08-03 22:28:47,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=250382.0, ans=0.0 +2024-08-03 22:28:51,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=250418.66666666666, ans=0.125 +2024-08-03 22:29:00,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250455.33333333334, ans=0.1 +2024-08-03 22:29:00,703 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:29:07,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=250492.0, ans=15.0 +2024-08-03 22:29:08,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250492.0, ans=0.1 +2024-08-03 22:29:13,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=250492.0, ans=0.07 +2024-08-03 22:29:15,856 INFO [train.py:1114] (0/4) Epoch 19, batch 2800, loss[loss=0.2432, simple_loss=0.319, pruned_loss=0.08376, over 9141.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2698, pruned_loss=0.04829, over 2626917.29 frames. ], batch size: 96, lr: 6.49e-03, grad_scale: 32.0 +2024-08-03 22:29:30,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=250565.33333333334, ans=0.0 +2024-08-03 22:29:38,856 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.968e+01 1.091e+02 1.270e+02 1.499e+02 3.648e+02, threshold=2.541e+02, percent-clipped=3.0 +2024-08-03 22:29:45,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=250602.0, ans=0.125 +2024-08-03 22:30:07,681 INFO [train.py:1114] (0/4) Epoch 19, batch 2850, loss[loss=0.1899, simple_loss=0.275, pruned_loss=0.05242, over 13362.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2707, pruned_loss=0.04884, over 2620342.11 frames. ], batch size: 35, lr: 6.49e-03, grad_scale: 32.0 +2024-08-03 22:30:10,014 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.46 vs. limit=15.0 +2024-08-03 22:30:41,110 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.17 vs. limit=6.0 +2024-08-03 22:30:41,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=250822.0, ans=0.125 +2024-08-03 22:30:44,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=250858.66666666666, ans=0.0 +2024-08-03 22:30:53,161 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=6.27 vs. limit=15.0 +2024-08-03 22:30:54,422 INFO [train.py:1114] (0/4) Epoch 19, batch 2900, loss[loss=0.1819, simple_loss=0.2643, pruned_loss=0.0498, over 13355.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2717, pruned_loss=0.04884, over 2631023.25 frames. ], batch size: 36, lr: 6.49e-03, grad_scale: 32.0 +2024-08-03 22:31:02,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=250932.0, ans=0.025 +2024-08-03 22:31:11,720 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.871e+01 1.142e+02 1.452e+02 2.110e+02 3.268e+02, threshold=2.903e+02, percent-clipped=11.0 +2024-08-03 22:31:18,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=250968.66666666666, ans=0.0 +2024-08-03 22:31:25,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=251005.33333333334, ans=0.125 +2024-08-03 22:31:37,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=251042.0, ans=0.5 +2024-08-03 22:31:39,965 INFO [train.py:1114] (0/4) Epoch 19, batch 2950, loss[loss=0.1695, simple_loss=0.2564, pruned_loss=0.04131, over 13337.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2702, pruned_loss=0.04842, over 2629689.25 frames. ], batch size: 34, lr: 6.49e-03, grad_scale: 16.0 +2024-08-03 22:31:40,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=251078.66666666666, ans=0.0 +2024-08-03 22:31:45,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=251078.66666666666, ans=0.125 +2024-08-03 22:31:47,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251078.66666666666, ans=0.1 +2024-08-03 22:31:50,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=251115.33333333334, ans=0.125 +2024-08-03 22:32:02,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=251152.0, ans=0.0 +2024-08-03 22:32:04,977 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.32 vs. limit=10.0 +2024-08-03 22:32:11,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.59 vs. limit=22.5 +2024-08-03 22:32:16,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=251225.33333333334, ans=0.125 +2024-08-03 22:32:22,011 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.38 vs. limit=10.0 +2024-08-03 22:32:23,277 INFO [train.py:1114] (0/4) Epoch 19, batch 3000, loss[loss=0.1776, simple_loss=0.2654, pruned_loss=0.04494, over 13541.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2696, pruned_loss=0.04814, over 2630012.03 frames. ], batch size: 37, lr: 6.48e-03, grad_scale: 16.0 +2024-08-03 22:32:23,277 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 22:32:34,411 INFO [train.py:1146] (0/4) Epoch 19, validation: loss=0.169, simple_loss=0.2683, pruned_loss=0.03491, over 944034.00 frames. +2024-08-03 22:32:34,412 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 22:32:41,106 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.16 vs. limit=15.0 +2024-08-03 22:32:51,810 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.029e+01 1.088e+02 1.228e+02 1.356e+02 2.065e+02, threshold=2.455e+02, percent-clipped=0.0 +2024-08-03 22:32:56,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=251335.33333333334, ans=0.125 +2024-08-03 22:33:06,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=251372.0, ans=0.125 +2024-08-03 22:33:14,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=251408.66666666666, ans=0.0 +2024-08-03 22:33:17,664 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=15.0 +2024-08-03 22:33:18,003 INFO [train.py:1114] (0/4) Epoch 19, batch 3050, loss[loss=0.1575, simple_loss=0.2432, pruned_loss=0.03591, over 13524.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2699, pruned_loss=0.04817, over 2627437.55 frames. ], batch size: 35, lr: 6.48e-03, grad_scale: 16.0 +2024-08-03 22:33:25,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=251482.0, ans=0.125 +2024-08-03 22:33:26,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=251482.0, ans=0.0 +2024-08-03 22:33:40,921 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:33:44,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=251555.33333333334, ans=0.125 +2024-08-03 22:33:51,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=251592.0, ans=0.125 +2024-08-03 22:33:53,103 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.77 vs. limit=22.5 +2024-08-03 22:33:59,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=251592.0, ans=0.125 +2024-08-03 22:34:01,088 INFO [train.py:1114] (0/4) Epoch 19, batch 3100, loss[loss=0.1843, simple_loss=0.2718, pruned_loss=0.04842, over 13274.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2692, pruned_loss=0.04774, over 2627083.08 frames. ], batch size: 46, lr: 6.48e-03, grad_scale: 16.0 +2024-08-03 22:34:01,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251628.66666666666, ans=0.1 +2024-08-03 22:34:11,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=251665.33333333334, ans=0.025 +2024-08-03 22:34:16,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=251665.33333333334, ans=0.125 +2024-08-03 22:34:19,148 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.004e+01 1.074e+02 1.205e+02 1.545e+02 4.065e+02, threshold=2.411e+02, percent-clipped=2.0 +2024-08-03 22:34:21,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=251702.0, ans=0.125 +2024-08-03 22:34:23,427 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.85 vs. limit=15.0 +2024-08-03 22:34:28,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=251702.0, ans=0.025 +2024-08-03 22:34:46,855 INFO [train.py:1114] (0/4) Epoch 19, batch 3150, loss[loss=0.1963, simple_loss=0.2877, pruned_loss=0.05243, over 13255.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2703, pruned_loss=0.0483, over 2629006.05 frames. ], batch size: 49, lr: 6.48e-03, grad_scale: 16.0 +2024-08-03 22:35:05,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=251885.33333333334, ans=0.0 +2024-08-03 22:35:12,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=251922.0, ans=0.125 +2024-08-03 22:35:13,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=251922.0, ans=0.1 +2024-08-03 22:35:18,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=251922.0, ans=0.0 +2024-08-03 22:35:19,295 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:35:22,016 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.54 vs. limit=22.5 +2024-08-03 22:35:30,194 INFO [train.py:1114] (0/4) Epoch 19, batch 3200, loss[loss=0.1737, simple_loss=0.2678, pruned_loss=0.03977, over 13534.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2697, pruned_loss=0.0477, over 2634796.59 frames. ], batch size: 37, lr: 6.48e-03, grad_scale: 32.0 +2024-08-03 22:35:31,511 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.47 vs. limit=15.0 +2024-08-03 22:35:45,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=252032.0, ans=0.0 +2024-08-03 22:35:47,284 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.593e+01 1.152e+02 1.475e+02 1.954e+02 2.995e+02, threshold=2.949e+02, percent-clipped=9.0 +2024-08-03 22:35:48,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=252068.66666666666, ans=0.1 +2024-08-03 22:35:51,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=252068.66666666666, ans=0.0 +2024-08-03 22:35:56,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=252105.33333333334, ans=0.0 +2024-08-03 22:36:07,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252142.0, ans=0.1 +2024-08-03 22:36:13,789 INFO [train.py:1114] (0/4) Epoch 19, batch 3250, loss[loss=0.1755, simple_loss=0.268, pruned_loss=0.04149, over 13396.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2703, pruned_loss=0.04795, over 2638800.77 frames. ], batch size: 38, lr: 6.47e-03, grad_scale: 32.0 +2024-08-03 22:36:17,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=252178.66666666666, ans=0.125 +2024-08-03 22:36:18,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252178.66666666666, ans=0.1 +2024-08-03 22:36:23,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=252215.33333333334, ans=0.125 +2024-08-03 22:36:36,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=252252.0, ans=0.04949747468305833 +2024-08-03 22:36:40,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=252288.66666666666, ans=0.125 +2024-08-03 22:36:57,932 INFO [train.py:1114] (0/4) Epoch 19, batch 3300, loss[loss=0.1717, simple_loss=0.264, pruned_loss=0.03972, over 12818.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2689, pruned_loss=0.04739, over 2639668.17 frames. ], batch size: 52, lr: 6.47e-03, grad_scale: 32.0 +2024-08-03 22:37:01,734 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.95 vs. limit=22.5 +2024-08-03 22:37:14,951 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.912e+01 1.101e+02 1.274e+02 1.526e+02 2.579e+02, threshold=2.548e+02, percent-clipped=0.0 +2024-08-03 22:37:17,104 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.80 vs. limit=22.5 +2024-08-03 22:37:17,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=252435.33333333334, ans=0.0 +2024-08-03 22:37:21,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252435.33333333334, ans=0.1 +2024-08-03 22:37:23,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=252472.0, ans=0.125 +2024-08-03 22:37:27,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=252472.0, ans=0.0 +2024-08-03 22:37:31,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=252508.66666666666, ans=0.0 +2024-08-03 22:37:35,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=252508.66666666666, ans=0.5 +2024-08-03 22:37:36,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=252508.66666666666, ans=0.0 +2024-08-03 22:37:39,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=252508.66666666666, ans=0.0 +2024-08-03 22:37:40,621 INFO [train.py:1114] (0/4) Epoch 19, batch 3350, loss[loss=0.1969, simple_loss=0.2854, pruned_loss=0.05419, over 13029.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2704, pruned_loss=0.04807, over 2628647.06 frames. ], batch size: 48, lr: 6.47e-03, grad_scale: 32.0 +2024-08-03 22:37:41,049 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.97 vs. limit=15.0 +2024-08-03 22:37:51,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=252582.0, ans=0.5 +2024-08-03 22:38:04,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=252618.66666666666, ans=0.125 +2024-08-03 22:38:05,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=252655.33333333334, ans=0.0 +2024-08-03 22:38:19,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=252692.0, ans=0.0 +2024-08-03 22:38:23,948 INFO [train.py:1114] (0/4) Epoch 19, batch 3400, loss[loss=0.1636, simple_loss=0.247, pruned_loss=0.04013, over 13534.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2699, pruned_loss=0.04792, over 2624649.36 frames. ], batch size: 31, lr: 6.47e-03, grad_scale: 16.0 +2024-08-03 22:38:32,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=252765.33333333334, ans=0.0 +2024-08-03 22:38:41,988 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.435e+01 1.094e+02 1.256e+02 1.561e+02 2.442e+02, threshold=2.511e+02, percent-clipped=0.0 +2024-08-03 22:39:07,295 INFO [train.py:1114] (0/4) Epoch 19, batch 3450, loss[loss=0.1866, simple_loss=0.2735, pruned_loss=0.04983, over 12882.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2695, pruned_loss=0.0476, over 2627891.65 frames. ], batch size: 52, lr: 6.46e-03, grad_scale: 16.0 +2024-08-03 22:39:07,446 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:39:09,647 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.63 vs. limit=15.0 +2024-08-03 22:39:21,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=252948.66666666666, ans=0.125 +2024-08-03 22:39:25,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=252985.33333333334, ans=0.0 +2024-08-03 22:39:36,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=253022.0, ans=0.125 +2024-08-03 22:39:39,508 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=8.58 vs. limit=12.0 +2024-08-03 22:39:44,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=253058.66666666666, ans=0.125 +2024-08-03 22:39:46,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=253058.66666666666, ans=0.07 +2024-08-03 22:39:49,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=253095.33333333334, ans=0.04949747468305833 +2024-08-03 22:39:50,117 INFO [train.py:1114] (0/4) Epoch 19, batch 3500, loss[loss=0.1586, simple_loss=0.2453, pruned_loss=0.03591, over 13532.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2687, pruned_loss=0.04745, over 2630243.17 frames. ], batch size: 34, lr: 6.46e-03, grad_scale: 16.0 +2024-08-03 22:39:55,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=253095.33333333334, ans=0.04949747468305833 +2024-08-03 22:40:07,943 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.416e+01 1.103e+02 1.254e+02 1.609e+02 3.004e+02, threshold=2.508e+02, percent-clipped=2.0 +2024-08-03 22:40:09,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=253168.66666666666, ans=0.0 +2024-08-03 22:40:15,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=253205.33333333334, ans=0.125 +2024-08-03 22:40:33,557 INFO [train.py:1114] (0/4) Epoch 19, batch 3550, loss[loss=0.1926, simple_loss=0.2782, pruned_loss=0.05353, over 12380.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.271, pruned_loss=0.04887, over 2628986.68 frames. ], batch size: 58, lr: 6.46e-03, grad_scale: 16.0 +2024-08-03 22:40:44,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=253315.33333333334, ans=0.125 +2024-08-03 22:40:44,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=253315.33333333334, ans=0.125 +2024-08-03 22:40:49,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=253315.33333333334, ans=0.125 +2024-08-03 22:41:17,632 INFO [train.py:1114] (0/4) Epoch 19, batch 3600, loss[loss=0.2398, simple_loss=0.3075, pruned_loss=0.086, over 9316.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2755, pruned_loss=0.05246, over 2487104.50 frames. ], batch size: 97, lr: 6.46e-03, grad_scale: 32.0 +2024-08-03 22:41:31,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=253498.66666666666, ans=0.025 +2024-08-03 22:41:35,764 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.007e+02 1.196e+02 1.277e+02 1.470e+02 2.167e+02, threshold=2.555e+02, percent-clipped=0.0 +2024-08-03 22:41:39,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=253535.33333333334, ans=0.125 +2024-08-03 22:41:52,412 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-19.pt +2024-08-03 22:42:35,195 INFO [train.py:1114] (0/4) Epoch 20, batch 0, loss[loss=0.1627, simple_loss=0.2455, pruned_loss=0.03999, over 13344.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2455, pruned_loss=0.03999, over 13344.00 frames. ], batch size: 33, lr: 6.29e-03, grad_scale: 32.0 +2024-08-03 22:42:35,196 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 22:42:39,785 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.6041, 2.8746, 2.5608, 2.7554], device='cuda:0') +2024-08-03 22:42:45,194 INFO [train.py:1146] (0/4) Epoch 20, validation: loss=0.1683, simple_loss=0.2688, pruned_loss=0.0339, over 944034.00 frames. +2024-08-03 22:42:45,195 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 22:42:50,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=253612.33333333334, ans=0.0 +2024-08-03 22:42:58,886 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.74 vs. limit=15.0 +2024-08-03 22:43:03,073 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.84 vs. limit=10.0 +2024-08-03 22:43:04,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=253685.66666666666, ans=0.0 +2024-08-03 22:43:14,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=253722.33333333334, ans=0.125 +2024-08-03 22:43:18,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253722.33333333334, ans=0.1 +2024-08-03 22:43:26,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=253759.0, ans=0.0 +2024-08-03 22:43:30,987 INFO [train.py:1114] (0/4) Epoch 20, batch 50, loss[loss=0.1615, simple_loss=0.2395, pruned_loss=0.04176, over 13427.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2691, pruned_loss=0.04855, over 578229.63 frames. ], batch size: 32, lr: 6.29e-03, grad_scale: 32.0 +2024-08-03 22:43:41,307 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.65 vs. limit=15.0 +2024-08-03 22:44:02,676 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.939e+01 1.083e+02 1.261e+02 1.490e+02 2.691e+02, threshold=2.522e+02, percent-clipped=1.0 +2024-08-03 22:44:19,843 INFO [train.py:1114] (0/4) Epoch 20, batch 100, loss[loss=0.1652, simple_loss=0.2525, pruned_loss=0.03898, over 13526.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2729, pruned_loss=0.04907, over 1025971.22 frames. ], batch size: 35, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:44:21,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=253979.0, ans=0.2 +2024-08-03 22:44:26,950 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.67 vs. limit=15.0 +2024-08-03 22:44:32,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=254015.66666666666, ans=0.2 +2024-08-03 22:44:43,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=254052.33333333334, ans=0.125 +2024-08-03 22:44:54,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=254089.0, ans=0.0 +2024-08-03 22:45:00,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=254125.66666666666, ans=0.0 +2024-08-03 22:45:03,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=254125.66666666666, ans=0.125 +2024-08-03 22:45:04,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=254125.66666666666, ans=0.1 +2024-08-03 22:45:04,942 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.29 vs. limit=12.0 +2024-08-03 22:45:07,144 INFO [train.py:1114] (0/4) Epoch 20, batch 150, loss[loss=0.1693, simple_loss=0.2544, pruned_loss=0.04214, over 13433.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2705, pruned_loss=0.04778, over 1387369.91 frames. ], batch size: 32, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:45:09,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=254162.33333333334, ans=0.1 +2024-08-03 22:45:10,430 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.14 vs. limit=15.0 +2024-08-03 22:45:22,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=254199.0, ans=0.0 +2024-08-03 22:45:24,644 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.07 vs. limit=22.5 +2024-08-03 22:45:30,675 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:45:33,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=254272.33333333334, ans=0.07 +2024-08-03 22:45:35,896 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.685e+01 1.074e+02 1.304e+02 1.730e+02 2.668e+02, threshold=2.608e+02, percent-clipped=1.0 +2024-08-03 22:45:54,470 INFO [train.py:1114] (0/4) Epoch 20, batch 200, loss[loss=0.1915, simple_loss=0.2855, pruned_loss=0.04875, over 12518.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2693, pruned_loss=0.04706, over 1665488.37 frames. ], batch size: 58, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:45:54,979 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.18 vs. limit=15.0 +2024-08-03 22:46:00,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=254345.66666666666, ans=0.0 +2024-08-03 22:46:15,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=254419.0, ans=0.125 +2024-08-03 22:46:20,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=254419.0, ans=0.025 +2024-08-03 22:46:21,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.07 vs. limit=15.0 +2024-08-03 22:46:24,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=254455.66666666666, ans=0.0 +2024-08-03 22:46:29,659 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.98 vs. limit=15.0 +2024-08-03 22:46:36,677 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:46:40,114 INFO [train.py:1114] (0/4) Epoch 20, batch 250, loss[loss=0.1918, simple_loss=0.2832, pruned_loss=0.05023, over 13248.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2691, pruned_loss=0.04731, over 1884053.98 frames. ], batch size: 46, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:46:47,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=254529.0, ans=0.125 +2024-08-03 22:46:54,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254565.66666666666, ans=0.1 +2024-08-03 22:47:00,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=254602.33333333334, ans=0.0 +2024-08-03 22:47:03,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=254602.33333333334, ans=0.125 +2024-08-03 22:47:04,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=254602.33333333334, ans=0.025 +2024-08-03 22:47:10,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254639.0, ans=0.1 +2024-08-03 22:47:10,944 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.021e+01 1.100e+02 1.239e+02 1.581e+02 3.543e+02, threshold=2.478e+02, percent-clipped=3.0 +2024-08-03 22:47:14,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=254639.0, ans=0.125 +2024-08-03 22:47:26,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=254712.33333333334, ans=0.2 +2024-08-03 22:47:27,273 INFO [train.py:1114] (0/4) Epoch 20, batch 300, loss[loss=0.1789, simple_loss=0.2643, pruned_loss=0.04674, over 13430.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2686, pruned_loss=0.04713, over 2050417.77 frames. ], batch size: 42, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:47:44,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=254749.0, ans=0.125 +2024-08-03 22:47:58,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=254822.33333333334, ans=0.125 +2024-08-03 22:48:02,100 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.58 vs. limit=15.0 +2024-08-03 22:48:15,276 INFO [train.py:1114] (0/4) Epoch 20, batch 350, loss[loss=0.1465, simple_loss=0.231, pruned_loss=0.03096, over 13584.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2694, pruned_loss=0.04739, over 2181267.25 frames. ], batch size: 33, lr: 6.27e-03, grad_scale: 16.0 +2024-08-03 22:48:29,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.50 vs. limit=15.0 +2024-08-03 22:48:41,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=254969.0, ans=0.125 +2024-08-03 22:48:42,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=254969.0, ans=0.0 +2024-08-03 22:48:46,587 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.302e+01 1.074e+02 1.204e+02 1.489e+02 2.516e+02, threshold=2.409e+02, percent-clipped=1.0 +2024-08-03 22:48:49,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=255005.66666666666, ans=0.125 +2024-08-03 22:49:02,805 INFO [train.py:1114] (0/4) Epoch 20, batch 400, loss[loss=0.1944, simple_loss=0.2834, pruned_loss=0.05266, over 13359.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.269, pruned_loss=0.0473, over 2285615.05 frames. ], batch size: 37, lr: 6.27e-03, grad_scale: 32.0 +2024-08-03 22:49:09,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=255079.0, ans=0.0 +2024-08-03 22:49:15,181 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:49:23,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=255152.33333333334, ans=0.05 +2024-08-03 22:49:25,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=255152.33333333334, ans=0.0 +2024-08-03 22:49:41,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=255225.66666666666, ans=0.0 +2024-08-03 22:49:41,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=255225.66666666666, ans=0.0 +2024-08-03 22:49:46,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=255225.66666666666, ans=0.125 +2024-08-03 22:49:48,777 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.05 vs. limit=15.0 +2024-08-03 22:49:51,088 INFO [train.py:1114] (0/4) Epoch 20, batch 450, loss[loss=0.1951, simple_loss=0.2917, pruned_loss=0.04923, over 13547.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2686, pruned_loss=0.04709, over 2359411.25 frames. ], batch size: 38, lr: 6.27e-03, grad_scale: 32.0 +2024-08-03 22:49:54,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=255262.33333333334, ans=0.125 +2024-08-03 22:49:59,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255299.0, ans=0.1 +2024-08-03 22:50:05,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=255299.0, ans=0.0 +2024-08-03 22:50:20,457 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.506e+01 1.112e+02 1.300e+02 1.604e+02 2.595e+02, threshold=2.600e+02, percent-clipped=3.0 +2024-08-03 22:50:35,561 INFO [train.py:1114] (0/4) Epoch 20, batch 500, loss[loss=0.1908, simple_loss=0.2867, pruned_loss=0.04742, over 13410.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2677, pruned_loss=0.04657, over 2424608.58 frames. ], batch size: 43, lr: 6.27e-03, grad_scale: 16.0 +2024-08-03 22:50:43,467 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.95 vs. limit=15.0 +2024-08-03 22:51:01,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=255519.0, ans=0.125 +2024-08-03 22:51:07,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=255555.66666666666, ans=0.125 +2024-08-03 22:51:25,391 INFO [train.py:1114] (0/4) Epoch 20, batch 550, loss[loss=0.1836, simple_loss=0.2731, pruned_loss=0.04707, over 13037.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2683, pruned_loss=0.04684, over 2466789.52 frames. ], batch size: 48, lr: 6.26e-03, grad_scale: 16.0 +2024-08-03 22:51:27,421 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:51:27,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=255629.0, ans=0.05 +2024-08-03 22:51:28,403 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:51:30,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=255629.0, ans=10.0 +2024-08-03 22:51:55,566 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.709e+01 1.108e+02 1.273e+02 1.483e+02 2.115e+02, threshold=2.547e+02, percent-clipped=0.0 +2024-08-03 22:52:13,741 INFO [train.py:1114] (0/4) Epoch 20, batch 600, loss[loss=0.1948, simple_loss=0.2839, pruned_loss=0.05287, over 13349.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2686, pruned_loss=0.04718, over 2506624.91 frames. ], batch size: 46, lr: 6.26e-03, grad_scale: 16.0 +2024-08-03 22:52:21,578 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.15 vs. limit=15.0 +2024-08-03 22:52:35,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=255885.66666666666, ans=0.125 +2024-08-03 22:52:36,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=255885.66666666666, ans=0.125 +2024-08-03 22:52:43,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=255922.33333333334, ans=0.125 +2024-08-03 22:52:44,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=255922.33333333334, ans=0.125 +2024-08-03 22:53:01,425 INFO [train.py:1114] (0/4) Epoch 20, batch 650, loss[loss=0.1789, simple_loss=0.2741, pruned_loss=0.04189, over 13536.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2679, pruned_loss=0.04702, over 2542111.79 frames. ], batch size: 37, lr: 6.26e-03, grad_scale: 8.0 +2024-08-03 22:53:25,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256069.0, ans=0.1 +2024-08-03 22:53:30,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=256105.66666666666, ans=0.09899494936611666 +2024-08-03 22:53:32,260 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.291e+01 1.089e+02 1.229e+02 1.482e+02 2.680e+02, threshold=2.459e+02, percent-clipped=1.0 +2024-08-03 22:53:39,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=256142.33333333334, ans=0.125 +2024-08-03 22:53:47,113 INFO [train.py:1114] (0/4) Epoch 20, batch 700, loss[loss=0.1777, simple_loss=0.2609, pruned_loss=0.04721, over 13536.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2681, pruned_loss=0.04711, over 2564734.40 frames. ], batch size: 35, lr: 6.26e-03, grad_scale: 8.0 +2024-08-03 22:53:52,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=256179.0, ans=0.1 +2024-08-03 22:53:52,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=256179.0, ans=0.0 +2024-08-03 22:53:54,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=256179.0, ans=0.125 +2024-08-03 22:54:06,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=256252.33333333334, ans=0.125 +2024-08-03 22:54:08,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=256252.33333333334, ans=0.1 +2024-08-03 22:54:15,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=256289.0, ans=0.125 +2024-08-03 22:54:19,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=256289.0, ans=0.0 +2024-08-03 22:54:26,744 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.83 vs. limit=12.0 +2024-08-03 22:54:33,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=256325.66666666666, ans=0.125 +2024-08-03 22:54:34,717 INFO [train.py:1114] (0/4) Epoch 20, batch 750, loss[loss=0.1569, simple_loss=0.2492, pruned_loss=0.03232, over 13354.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2669, pruned_loss=0.04655, over 2582839.96 frames. ], batch size: 37, lr: 6.26e-03, grad_scale: 8.0 +2024-08-03 22:54:45,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=256399.0, ans=0.125 +2024-08-03 22:54:51,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=256399.0, ans=0.0 +2024-08-03 22:54:53,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=256435.66666666666, ans=0.025 +2024-08-03 22:55:02,080 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=6.34 vs. limit=15.0 +2024-08-03 22:55:06,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=256472.33333333334, ans=0.2 +2024-08-03 22:55:08,791 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.791e+01 1.111e+02 1.273e+02 1.584e+02 2.450e+02, threshold=2.545e+02, percent-clipped=0.0 +2024-08-03 22:55:09,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=256472.33333333334, ans=0.125 +2024-08-03 22:55:10,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=256472.33333333334, ans=0.025 +2024-08-03 22:55:11,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=256472.33333333334, ans=0.0 +2024-08-03 22:55:23,255 INFO [train.py:1114] (0/4) Epoch 20, batch 800, loss[loss=0.1659, simple_loss=0.246, pruned_loss=0.04292, over 13318.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2664, pruned_loss=0.04616, over 2597534.01 frames. ], batch size: 33, lr: 6.25e-03, grad_scale: 16.0 +2024-08-03 22:55:39,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256582.33333333334, ans=0.1 +2024-08-03 22:55:50,157 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.01 vs. limit=15.0 +2024-08-03 22:56:03,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=256692.33333333334, ans=0.125 +2024-08-03 22:56:10,744 INFO [train.py:1114] (0/4) Epoch 20, batch 850, loss[loss=0.1844, simple_loss=0.2828, pruned_loss=0.043, over 13326.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2664, pruned_loss=0.04606, over 2609961.11 frames. ], batch size: 40, lr: 6.25e-03, grad_scale: 16.0 +2024-08-03 22:56:12,100 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.65 vs. limit=15.0 +2024-08-03 22:56:13,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256729.0, ans=0.1 +2024-08-03 22:56:14,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=256729.0, ans=0.09899494936611666 +2024-08-03 22:56:20,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=256765.66666666666, ans=0.125 +2024-08-03 22:56:37,702 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:56:44,113 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.111e+01 1.084e+02 1.248e+02 1.708e+02 3.125e+02, threshold=2.496e+02, percent-clipped=3.0 +2024-08-03 22:56:49,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=256875.66666666666, ans=0.125 +2024-08-03 22:56:52,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=256875.66666666666, ans=0.07 +2024-08-03 22:56:54,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=256875.66666666666, ans=0.125 +2024-08-03 22:56:58,795 INFO [train.py:1114] (0/4) Epoch 20, batch 900, loss[loss=0.1628, simple_loss=0.2414, pruned_loss=0.04208, over 13329.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2668, pruned_loss=0.04641, over 2612124.33 frames. ], batch size: 33, lr: 6.25e-03, grad_scale: 16.0 +2024-08-03 22:57:05,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=256912.33333333334, ans=0.05 +2024-08-03 22:57:07,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=256949.0, ans=0.0 +2024-08-03 22:57:07,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=256949.0, ans=0.1 +2024-08-03 22:57:24,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256985.66666666666, ans=0.1 +2024-08-03 22:57:28,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=257022.33333333334, ans=0.125 +2024-08-03 22:57:43,714 INFO [train.py:1114] (0/4) Epoch 20, batch 950, loss[loss=0.1714, simple_loss=0.2592, pruned_loss=0.04177, over 13522.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2665, pruned_loss=0.04626, over 2613853.34 frames. ], batch size: 34, lr: 6.25e-03, grad_scale: 16.0 +2024-08-03 22:57:47,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=257095.66666666666, ans=0.2 +2024-08-03 22:57:52,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=257132.33333333334, ans=0.0 +2024-08-03 22:57:54,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=257132.33333333334, ans=0.0 +2024-08-03 22:57:54,659 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.42 vs. limit=12.0 +2024-08-03 22:58:12,167 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:58:16,420 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.792e+01 1.096e+02 1.434e+02 1.875e+02 2.963e+02, threshold=2.868e+02, percent-clipped=4.0 +2024-08-03 22:58:31,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=257242.33333333334, ans=0.025 +2024-08-03 22:58:32,910 INFO [train.py:1114] (0/4) Epoch 20, batch 1000, loss[loss=0.1749, simple_loss=0.2618, pruned_loss=0.044, over 13342.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2675, pruned_loss=0.04675, over 2611867.46 frames. ], batch size: 35, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 22:58:42,405 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:59:06,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=257389.0, ans=0.04949747468305833 +2024-08-03 22:59:09,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=257425.66666666666, ans=0.125 +2024-08-03 22:59:09,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=257425.66666666666, ans=0.0 +2024-08-03 22:59:21,333 INFO [train.py:1114] (0/4) Epoch 20, batch 1050, loss[loss=0.1709, simple_loss=0.2626, pruned_loss=0.03963, over 13578.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2671, pruned_loss=0.0468, over 2616487.36 frames. ], batch size: 39, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 22:59:22,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=257462.33333333334, ans=0.0 +2024-08-03 22:59:50,971 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.10 vs. limit=22.5 +2024-08-03 22:59:52,250 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.716e+01 1.110e+02 1.251e+02 1.540e+02 2.508e+02, threshold=2.503e+02, percent-clipped=0.0 +2024-08-03 22:59:55,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=257572.33333333334, ans=0.0 +2024-08-03 22:59:57,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=257609.0, ans=0.125 +2024-08-03 23:00:03,976 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.25 vs. limit=22.5 +2024-08-03 23:00:07,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=257609.0, ans=0.0 +2024-08-03 23:00:09,001 INFO [train.py:1114] (0/4) Epoch 20, batch 1100, loss[loss=0.1756, simple_loss=0.2633, pruned_loss=0.04398, over 13556.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2666, pruned_loss=0.04624, over 2619779.95 frames. ], batch size: 36, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 23:00:42,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=257755.66666666666, ans=0.125 +2024-08-03 23:00:54,199 INFO [train.py:1114] (0/4) Epoch 20, batch 1150, loss[loss=0.2114, simple_loss=0.2914, pruned_loss=0.06566, over 13559.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2668, pruned_loss=0.04616, over 2618984.65 frames. ], batch size: 36, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 23:01:16,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=257902.33333333334, ans=0.0 +2024-08-03 23:01:19,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=257902.33333333334, ans=0.0 +2024-08-03 23:01:26,028 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.874e+01 1.110e+02 1.277e+02 1.674e+02 2.760e+02, threshold=2.554e+02, percent-clipped=1.0 +2024-08-03 23:01:32,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=257975.66666666666, ans=0.125 +2024-08-03 23:01:33,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=257975.66666666666, ans=0.2 +2024-08-03 23:01:39,103 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.89 vs. limit=15.0 +2024-08-03 23:01:39,682 INFO [train.py:1114] (0/4) Epoch 20, batch 1200, loss[loss=0.2033, simple_loss=0.2974, pruned_loss=0.05461, over 13574.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2675, pruned_loss=0.04619, over 2616354.47 frames. ], batch size: 39, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 23:01:46,742 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.71 vs. limit=6.0 +2024-08-03 23:02:11,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=258122.33333333334, ans=0.025 +2024-08-03 23:02:21,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=258159.0, ans=0.025 +2024-08-03 23:02:27,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=258195.66666666666, ans=0.0 +2024-08-03 23:02:28,529 INFO [train.py:1114] (0/4) Epoch 20, batch 1250, loss[loss=0.1719, simple_loss=0.2635, pruned_loss=0.04018, over 13462.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2683, pruned_loss=0.04648, over 2628447.57 frames. ], batch size: 42, lr: 6.23e-03, grad_scale: 16.0 +2024-08-03 23:03:01,941 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.994e+01 1.078e+02 1.315e+02 1.640e+02 2.788e+02, threshold=2.629e+02, percent-clipped=1.0 +2024-08-03 23:03:08,986 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.36 vs. limit=12.0 +2024-08-03 23:03:09,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=258342.33333333334, ans=0.125 +2024-08-03 23:03:15,649 INFO [train.py:1114] (0/4) Epoch 20, batch 1300, loss[loss=0.1819, simple_loss=0.2706, pruned_loss=0.04663, over 12928.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2684, pruned_loss=0.04692, over 2631131.12 frames. ], batch size: 52, lr: 6.23e-03, grad_scale: 16.0 +2024-08-03 23:03:16,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=258379.0, ans=0.0 +2024-08-03 23:03:18,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=258379.0, ans=0.125 +2024-08-03 23:03:20,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=258379.0, ans=0.125 +2024-08-03 23:03:40,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258452.33333333334, ans=0.1 +2024-08-03 23:03:45,383 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.49 vs. limit=10.0 +2024-08-03 23:04:02,929 INFO [train.py:1114] (0/4) Epoch 20, batch 1350, loss[loss=0.1577, simple_loss=0.255, pruned_loss=0.03017, over 13541.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2679, pruned_loss=0.04662, over 2638486.02 frames. ], batch size: 37, lr: 6.23e-03, grad_scale: 16.0 +2024-08-03 23:04:12,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=258599.0, ans=0.2 +2024-08-03 23:04:16,089 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.60 vs. limit=15.0 +2024-08-03 23:04:32,255 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.36 vs. limit=15.0 +2024-08-03 23:04:34,297 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.766e+01 1.164e+02 1.354e+02 1.727e+02 2.558e+02, threshold=2.707e+02, percent-clipped=0.0 +2024-08-03 23:04:37,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=258672.33333333334, ans=0.125 +2024-08-03 23:04:39,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=258709.0, ans=0.0 +2024-08-03 23:04:42,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=258709.0, ans=0.0 +2024-08-03 23:04:47,961 INFO [train.py:1114] (0/4) Epoch 20, batch 1400, loss[loss=0.1395, simple_loss=0.217, pruned_loss=0.03099, over 13261.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2672, pruned_loss=0.04616, over 2642145.12 frames. ], batch size: 31, lr: 6.23e-03, grad_scale: 16.0 +2024-08-03 23:05:10,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=258819.0, ans=10.0 +2024-08-03 23:05:12,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=258819.0, ans=0.125 +2024-08-03 23:05:12,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=258819.0, ans=0.0 +2024-08-03 23:05:35,405 INFO [train.py:1114] (0/4) Epoch 20, batch 1450, loss[loss=0.1801, simple_loss=0.2705, pruned_loss=0.04488, over 13437.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2677, pruned_loss=0.04636, over 2641957.07 frames. ], batch size: 43, lr: 6.22e-03, grad_scale: 16.0 +2024-08-03 23:05:37,743 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.74 vs. limit=12.0 +2024-08-03 23:05:43,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=258929.0, ans=0.125 +2024-08-03 23:05:46,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=258965.66666666666, ans=0.125 +2024-08-03 23:05:58,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=259002.33333333334, ans=0.0 +2024-08-03 23:06:08,776 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.191e+01 1.099e+02 1.241e+02 1.472e+02 2.601e+02, threshold=2.481e+02, percent-clipped=0.0 +2024-08-03 23:06:15,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=259075.66666666666, ans=0.125 +2024-08-03 23:06:16,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259075.66666666666, ans=0.1 +2024-08-03 23:06:22,448 INFO [train.py:1114] (0/4) Epoch 20, batch 1500, loss[loss=0.188, simple_loss=0.2783, pruned_loss=0.04886, over 13397.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2687, pruned_loss=0.04684, over 2641379.87 frames. ], batch size: 39, lr: 6.22e-03, grad_scale: 16.0 +2024-08-03 23:06:34,861 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.80 vs. limit=10.0 +2024-08-03 23:06:40,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=259149.0, ans=0.0 +2024-08-03 23:06:57,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=259222.33333333334, ans=0.1 +2024-08-03 23:06:58,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=259222.33333333334, ans=0.125 +2024-08-03 23:07:03,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=259259.0, ans=0.125 +2024-08-03 23:07:04,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=259259.0, ans=0.0 +2024-08-03 23:07:04,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=259259.0, ans=0.0 +2024-08-03 23:07:12,329 INFO [train.py:1114] (0/4) Epoch 20, batch 1550, loss[loss=0.1855, simple_loss=0.2758, pruned_loss=0.04759, over 13416.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2685, pruned_loss=0.04691, over 2631065.81 frames. ], batch size: 41, lr: 6.22e-03, grad_scale: 16.0 +2024-08-03 23:07:12,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=259295.66666666666, ans=0.2 +2024-08-03 23:07:15,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=259295.66666666666, ans=0.2 +2024-08-03 23:07:24,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=259332.33333333334, ans=0.125 +2024-08-03 23:07:24,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=259332.33333333334, ans=0.125 +2024-08-03 23:07:45,376 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.913e+01 1.100e+02 1.241e+02 1.619e+02 2.779e+02, threshold=2.482e+02, percent-clipped=4.0 +2024-08-03 23:07:47,805 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.91 vs. limit=22.5 +2024-08-03 23:07:57,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=259442.33333333334, ans=0.2 +2024-08-03 23:07:59,017 INFO [train.py:1114] (0/4) Epoch 20, batch 1600, loss[loss=0.2005, simple_loss=0.2891, pruned_loss=0.05596, over 13581.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2684, pruned_loss=0.04717, over 2623759.67 frames. ], batch size: 39, lr: 6.22e-03, grad_scale: 32.0 +2024-08-03 23:08:09,644 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.97 vs. limit=15.0 +2024-08-03 23:08:44,991 INFO [train.py:1114] (0/4) Epoch 20, batch 1650, loss[loss=0.1787, simple_loss=0.2743, pruned_loss=0.04159, over 13314.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2675, pruned_loss=0.04689, over 2620890.09 frames. ], batch size: 40, lr: 6.22e-03, grad_scale: 32.0 +2024-08-03 23:08:45,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=259662.33333333334, ans=0.125 +2024-08-03 23:08:48,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259662.33333333334, ans=0.1 +2024-08-03 23:08:51,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=259662.33333333334, ans=0.1 +2024-08-03 23:09:03,297 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.74 vs. limit=15.0 +2024-08-03 23:09:20,931 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.088e+02 1.223e+02 1.648e+02 3.559e+02, threshold=2.446e+02, percent-clipped=8.0 +2024-08-03 23:09:25,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=259809.0, ans=0.125 +2024-08-03 23:09:34,499 INFO [train.py:1114] (0/4) Epoch 20, batch 1700, loss[loss=0.1637, simple_loss=0.2388, pruned_loss=0.04427, over 13251.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2674, pruned_loss=0.04637, over 2629870.18 frames. ], batch size: 31, lr: 6.21e-03, grad_scale: 32.0 +2024-08-03 23:09:50,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=259882.33333333334, ans=0.2 +2024-08-03 23:09:51,598 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.39 vs. limit=15.0 +2024-08-03 23:10:02,160 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.03 vs. limit=15.0 +2024-08-03 23:10:05,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=259955.66666666666, ans=0.0 +2024-08-03 23:10:08,459 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.67 vs. limit=15.0 +2024-08-03 23:10:09,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=259955.66666666666, ans=0.0 +2024-08-03 23:10:21,442 INFO [train.py:1114] (0/4) Epoch 20, batch 1750, loss[loss=0.1565, simple_loss=0.2375, pruned_loss=0.03775, over 13582.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2665, pruned_loss=0.046, over 2633248.59 frames. ], batch size: 31, lr: 6.21e-03, grad_scale: 32.0 +2024-08-03 23:10:25,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=260029.0, ans=0.2 +2024-08-03 23:10:36,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=260065.66666666666, ans=0.125 +2024-08-03 23:10:42,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=260102.33333333334, ans=0.125 +2024-08-03 23:10:45,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=260102.33333333334, ans=0.2 +2024-08-03 23:10:54,932 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.991e+01 1.099e+02 1.220e+02 1.451e+02 2.480e+02, threshold=2.439e+02, percent-clipped=1.0 +2024-08-03 23:10:59,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=260175.66666666666, ans=0.2 +2024-08-03 23:11:04,631 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.21 vs. limit=15.0 +2024-08-03 23:11:08,878 INFO [train.py:1114] (0/4) Epoch 20, batch 1800, loss[loss=0.1977, simple_loss=0.2888, pruned_loss=0.05332, over 13543.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2667, pruned_loss=0.04593, over 2634694.26 frames. ], batch size: 38, lr: 6.21e-03, grad_scale: 32.0 +2024-08-03 23:11:14,087 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.54 vs. limit=15.0 +2024-08-03 23:11:32,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=260285.66666666666, ans=0.125 +2024-08-03 23:11:37,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=260322.33333333334, ans=0.07 +2024-08-03 23:11:39,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=260322.33333333334, ans=0.125 +2024-08-03 23:11:39,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=260322.33333333334, ans=0.025 +2024-08-03 23:11:49,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=260359.0, ans=0.125 +2024-08-03 23:11:54,224 INFO [train.py:1114] (0/4) Epoch 20, batch 1850, loss[loss=0.1927, simple_loss=0.2813, pruned_loss=0.05203, over 13384.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2665, pruned_loss=0.04574, over 2637689.56 frames. ], batch size: 39, lr: 6.21e-03, grad_scale: 32.0 +2024-08-03 23:11:57,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=260395.66666666666, ans=0.0 +2024-08-03 23:12:05,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=260432.33333333334, ans=0.0 +2024-08-03 23:12:09,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=260432.33333333334, ans=0.125 +2024-08-03 23:12:21,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=260505.66666666666, ans=0.04949747468305833 +2024-08-03 23:12:25,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260505.66666666666, ans=0.1 +2024-08-03 23:12:26,243 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.233e+01 1.108e+02 1.354e+02 1.892e+02 2.843e+02, threshold=2.709e+02, percent-clipped=7.0 +2024-08-03 23:12:44,219 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.02 vs. limit=15.0 +2024-08-03 23:12:58,348 INFO [train.py:1114] (0/4) Epoch 20, batch 1900, loss[loss=0.1969, simple_loss=0.2933, pruned_loss=0.0502, over 13324.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2677, pruned_loss=0.04617, over 2640336.30 frames. ], batch size: 40, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:13:04,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=260579.0, ans=0.07 +2024-08-03 23:13:20,161 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.84 vs. limit=15.0 +2024-08-03 23:13:26,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=260652.33333333334, ans=0.0 +2024-08-03 23:13:29,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=260689.0, ans=0.125 +2024-08-03 23:13:45,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=260762.33333333334, ans=0.125 +2024-08-03 23:13:45,887 INFO [train.py:1114] (0/4) Epoch 20, batch 1950, loss[loss=0.16, simple_loss=0.2489, pruned_loss=0.03552, over 13579.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2691, pruned_loss=0.04664, over 2646809.19 frames. ], batch size: 36, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:14:09,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=260835.66666666666, ans=0.0 +2024-08-03 23:14:19,452 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.342e+01 1.097e+02 1.231e+02 1.483e+02 2.195e+02, threshold=2.462e+02, percent-clipped=0.0 +2024-08-03 23:14:35,114 INFO [train.py:1114] (0/4) Epoch 20, batch 2000, loss[loss=0.1911, simple_loss=0.2679, pruned_loss=0.05719, over 13539.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2693, pruned_loss=0.04674, over 2635863.47 frames. ], batch size: 31, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:14:36,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=260945.66666666666, ans=0.125 +2024-08-03 23:14:43,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=260982.33333333334, ans=0.125 +2024-08-03 23:14:58,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=261019.0, ans=0.125 +2024-08-03 23:15:07,403 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.95 vs. limit=15.0 +2024-08-03 23:15:20,639 INFO [train.py:1114] (0/4) Epoch 20, batch 2050, loss[loss=0.1592, simple_loss=0.2456, pruned_loss=0.03644, over 13437.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2688, pruned_loss=0.04681, over 2633903.09 frames. ], batch size: 32, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:15:21,962 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.35 vs. limit=22.5 +2024-08-03 23:15:41,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.45 vs. limit=15.0 +2024-08-03 23:15:41,812 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.18 vs. limit=22.5 +2024-08-03 23:15:46,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=261239.0, ans=0.05 +2024-08-03 23:15:47,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=261239.0, ans=0.125 +2024-08-03 23:15:52,104 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.005e+01 1.074e+02 1.228e+02 1.423e+02 2.984e+02, threshold=2.455e+02, percent-clipped=1.0 +2024-08-03 23:15:56,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=261275.66666666666, ans=0.0 +2024-08-03 23:16:05,630 INFO [train.py:1114] (0/4) Epoch 20, batch 2100, loss[loss=0.2052, simple_loss=0.2888, pruned_loss=0.06078, over 13529.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2685, pruned_loss=0.04672, over 2639153.27 frames. ], batch size: 37, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:16:06,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=261312.33333333334, ans=15.0 +2024-08-03 23:16:43,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.59 vs. limit=22.5 +2024-08-03 23:16:48,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=261459.0, ans=0.125 +2024-08-03 23:16:54,301 INFO [train.py:1114] (0/4) Epoch 20, batch 2150, loss[loss=0.1866, simple_loss=0.2749, pruned_loss=0.04917, over 13552.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2685, pruned_loss=0.04717, over 2648052.25 frames. ], batch size: 36, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:16:56,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=261495.66666666666, ans=0.0 +2024-08-03 23:17:09,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=261532.33333333334, ans=10.0 +2024-08-03 23:17:26,357 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.602e+01 1.138e+02 1.478e+02 2.029e+02 3.755e+02, threshold=2.955e+02, percent-clipped=14.0 +2024-08-03 23:17:41,092 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:17:41,798 INFO [train.py:1114] (0/4) Epoch 20, batch 2200, loss[loss=0.184, simple_loss=0.2713, pruned_loss=0.0484, over 13418.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2683, pruned_loss=0.04696, over 2646267.28 frames. ], batch size: 39, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:17:51,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=261715.66666666666, ans=0.125 +2024-08-03 23:18:22,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=261825.66666666666, ans=0.04949747468305833 +2024-08-03 23:18:31,229 INFO [train.py:1114] (0/4) Epoch 20, batch 2250, loss[loss=0.1737, simple_loss=0.2616, pruned_loss=0.04284, over 13352.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2679, pruned_loss=0.04681, over 2643564.29 frames. ], batch size: 37, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:18:33,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=261862.33333333334, ans=0.04949747468305833 +2024-08-03 23:18:43,485 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.47 vs. limit=10.0 +2024-08-03 23:18:46,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=261899.0, ans=0.125 +2024-08-03 23:18:51,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=261935.66666666666, ans=0.125 +2024-08-03 23:18:55,953 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.13 vs. limit=15.0 +2024-08-03 23:19:02,738 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.914e+01 1.098e+02 1.238e+02 1.485e+02 2.172e+02, threshold=2.476e+02, percent-clipped=0.0 +2024-08-03 23:19:16,424 INFO [train.py:1114] (0/4) Epoch 20, batch 2300, loss[loss=0.1571, simple_loss=0.2476, pruned_loss=0.03327, over 13568.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2669, pruned_loss=0.04655, over 2640022.89 frames. ], batch size: 33, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:19:18,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=262045.66666666666, ans=0.2 +2024-08-03 23:19:46,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=262155.6666666667, ans=0.125 +2024-08-03 23:20:01,891 INFO [train.py:1114] (0/4) Epoch 20, batch 2350, loss[loss=0.1795, simple_loss=0.2696, pruned_loss=0.04475, over 13542.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2668, pruned_loss=0.04672, over 2641858.81 frames. ], batch size: 38, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:20:02,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=262229.0, ans=0.0 +2024-08-03 23:20:04,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=262229.0, ans=0.125 +2024-08-03 23:20:22,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=262265.6666666667, ans=0.125 +2024-08-03 23:20:33,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=262339.0, ans=0.0 +2024-08-03 23:20:38,442 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.532e+01 1.125e+02 1.356e+02 1.575e+02 2.756e+02, threshold=2.712e+02, percent-clipped=1.0 +2024-08-03 23:20:40,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=262339.0, ans=0.125 +2024-08-03 23:20:52,235 INFO [train.py:1114] (0/4) Epoch 20, batch 2400, loss[loss=0.1841, simple_loss=0.2676, pruned_loss=0.05028, over 13529.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2673, pruned_loss=0.04669, over 2642690.94 frames. ], batch size: 35, lr: 6.18e-03, grad_scale: 32.0 +2024-08-03 23:20:55,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=262412.3333333333, ans=0.0 +2024-08-03 23:20:56,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=262412.3333333333, ans=0.05 +2024-08-03 23:20:58,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=262412.3333333333, ans=0.2 +2024-08-03 23:21:04,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=262449.0, ans=0.0 +2024-08-03 23:21:10,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=262449.0, ans=0.95 +2024-08-03 23:21:17,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=262485.6666666667, ans=0.125 +2024-08-03 23:21:20,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=262522.3333333333, ans=0.125 +2024-08-03 23:21:30,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=262559.0, ans=0.125 +2024-08-03 23:21:33,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=262559.0, ans=0.125 +2024-08-03 23:21:35,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=262559.0, ans=0.0 +2024-08-03 23:21:41,802 INFO [train.py:1114] (0/4) Epoch 20, batch 2450, loss[loss=0.1722, simple_loss=0.2683, pruned_loss=0.03806, over 13354.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2684, pruned_loss=0.04689, over 2632307.92 frames. ], batch size: 37, lr: 6.18e-03, grad_scale: 32.0 +2024-08-03 23:21:48,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=262595.6666666667, ans=0.0 +2024-08-03 23:21:52,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=262632.3333333333, ans=0.1 +2024-08-03 23:21:55,727 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-08-03 23:22:14,228 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.032e+01 1.119e+02 1.273e+02 1.496e+02 2.494e+02, threshold=2.546e+02, percent-clipped=0.0 +2024-08-03 23:22:18,316 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.79 vs. limit=22.5 +2024-08-03 23:22:21,707 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:22:22,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=262742.3333333333, ans=0.0 +2024-08-03 23:22:27,177 INFO [train.py:1114] (0/4) Epoch 20, batch 2500, loss[loss=0.1739, simple_loss=0.2607, pruned_loss=0.04358, over 13400.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2677, pruned_loss=0.04677, over 2636932.54 frames. ], batch size: 39, lr: 6.18e-03, grad_scale: 16.0 +2024-08-03 23:22:36,385 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.81 vs. limit=15.0 +2024-08-03 23:22:42,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=262815.6666666667, ans=0.2 +2024-08-03 23:22:43,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=262815.6666666667, ans=0.125 +2024-08-03 23:22:46,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=262852.3333333333, ans=0.125 +2024-08-03 23:22:51,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=262852.3333333333, ans=0.125 +2024-08-03 23:22:59,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=262889.0, ans=0.125 +2024-08-03 23:23:04,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=262925.6666666667, ans=0.0 +2024-08-03 23:23:05,121 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:23:08,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=262925.6666666667, ans=0.0 +2024-08-03 23:23:08,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=262925.6666666667, ans=0.0 +2024-08-03 23:23:09,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=262925.6666666667, ans=0.1 +2024-08-03 23:23:11,079 INFO [train.py:1114] (0/4) Epoch 20, batch 2550, loss[loss=0.1616, simple_loss=0.237, pruned_loss=0.04315, over 13543.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2677, pruned_loss=0.04684, over 2638783.13 frames. ], batch size: 31, lr: 6.18e-03, grad_scale: 16.0 +2024-08-03 23:23:19,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=262999.0, ans=0.0 +2024-08-03 23:23:20,251 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.67 vs. limit=6.0 +2024-08-03 23:23:21,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=262999.0, ans=0.0 +2024-08-03 23:23:22,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=262999.0, ans=0.2 +2024-08-03 23:23:42,188 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.295e+01 1.081e+02 1.432e+02 1.962e+02 3.343e+02, threshold=2.864e+02, percent-clipped=8.0 +2024-08-03 23:23:47,180 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.73 vs. limit=6.0 +2024-08-03 23:23:50,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=263109.0, ans=0.125 +2024-08-03 23:23:52,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=263109.0, ans=0.0 +2024-08-03 23:23:54,454 INFO [train.py:1114] (0/4) Epoch 20, batch 2600, loss[loss=0.1659, simple_loss=0.2556, pruned_loss=0.03806, over 13556.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2678, pruned_loss=0.04664, over 2637870.04 frames. ], batch size: 36, lr: 6.17e-03, grad_scale: 16.0 +2024-08-03 23:23:58,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=263145.6666666667, ans=0.125 +2024-08-03 23:24:12,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=263219.0, ans=0.2 +2024-08-03 23:24:19,955 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.09 vs. limit=15.0 +2024-08-03 23:24:22,982 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.13 vs. limit=15.0 +2024-08-03 23:24:32,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=263292.3333333333, ans=0.025 +2024-08-03 23:24:38,437 INFO [train.py:1114] (0/4) Epoch 20, batch 2650, loss[loss=0.1741, simple_loss=0.2683, pruned_loss=0.04, over 13338.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2683, pruned_loss=0.04681, over 2641143.27 frames. ], batch size: 46, lr: 6.17e-03, grad_scale: 16.0 +2024-08-03 23:24:41,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=263329.0, ans=0.125 +2024-08-03 23:25:06,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=263439.0, ans=0.125 +2024-08-03 23:25:09,825 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.851e+01 1.115e+02 1.313e+02 1.651e+02 2.845e+02, threshold=2.627e+02, percent-clipped=0.0 +2024-08-03 23:25:18,870 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.60 vs. limit=22.5 +2024-08-03 23:25:20,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=263475.6666666667, ans=0.0 +2024-08-03 23:25:21,941 INFO [train.py:1114] (0/4) Epoch 20, batch 2700, loss[loss=0.178, simple_loss=0.2709, pruned_loss=0.04255, over 13549.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.269, pruned_loss=0.04693, over 2637514.84 frames. ], batch size: 40, lr: 6.17e-03, grad_scale: 16.0 +2024-08-03 23:25:24,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=263512.3333333333, ans=0.0 +2024-08-03 23:25:32,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=263549.0, ans=0.125 +2024-08-03 23:25:34,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=263549.0, ans=0.035 +2024-08-03 23:25:34,714 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.10 vs. limit=15.0 +2024-08-03 23:25:35,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=263549.0, ans=0.125 +2024-08-03 23:25:35,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=263549.0, ans=0.125 +2024-08-03 23:25:37,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=263549.0, ans=0.0 +2024-08-03 23:25:43,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263585.6666666667, ans=0.1 +2024-08-03 23:26:08,056 INFO [train.py:1114] (0/4) Epoch 20, batch 2750, loss[loss=0.1908, simple_loss=0.2695, pruned_loss=0.05601, over 13325.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2677, pruned_loss=0.04654, over 2635052.67 frames. ], batch size: 34, lr: 6.17e-03, grad_scale: 16.0 +2024-08-03 23:26:22,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=263732.3333333333, ans=0.125 +2024-08-03 23:26:33,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=263805.6666666667, ans=0.125 +2024-08-03 23:26:39,455 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.617e+01 1.130e+02 1.320e+02 1.634e+02 2.919e+02, threshold=2.640e+02, percent-clipped=4.0 +2024-08-03 23:26:39,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=263805.6666666667, ans=0.2 +2024-08-03 23:26:51,636 INFO [train.py:1114] (0/4) Epoch 20, batch 2800, loss[loss=0.2295, simple_loss=0.3072, pruned_loss=0.07587, over 9179.00 frames. ], tot_loss[loss=0.181, simple_loss=0.268, pruned_loss=0.04697, over 2627692.42 frames. ], batch size: 96, lr: 6.17e-03, grad_scale: 32.0 +2024-08-03 23:27:06,523 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=8.65 vs. limit=15.0 +2024-08-03 23:27:15,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=263952.3333333333, ans=0.0 +2024-08-03 23:27:15,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=263952.3333333333, ans=0.05 +2024-08-03 23:27:20,021 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/checkpoint-72000.pt +2024-08-03 23:27:30,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=264025.6666666667, ans=0.125 +2024-08-03 23:27:35,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=264025.6666666667, ans=0.125 +2024-08-03 23:27:37,591 INFO [train.py:1114] (0/4) Epoch 20, batch 2850, loss[loss=0.1506, simple_loss=0.2383, pruned_loss=0.03144, over 13356.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2683, pruned_loss=0.04713, over 2621277.28 frames. ], batch size: 35, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:27:43,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264062.3333333333, ans=0.1 +2024-08-03 23:27:53,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=264099.0, ans=0.025 +2024-08-03 23:27:58,788 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.68 vs. limit=15.0 +2024-08-03 23:27:59,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=264135.6666666667, ans=0.0 +2024-08-03 23:28:00,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264135.6666666667, ans=0.1 +2024-08-03 23:28:06,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264172.3333333333, ans=0.1 +2024-08-03 23:28:07,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=264172.3333333333, ans=0.0 +2024-08-03 23:28:08,790 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.256e+01 1.084e+02 1.260e+02 1.608e+02 3.133e+02, threshold=2.519e+02, percent-clipped=4.0 +2024-08-03 23:28:22,069 INFO [train.py:1114] (0/4) Epoch 20, batch 2900, loss[loss=0.1844, simple_loss=0.2765, pruned_loss=0.04609, over 13374.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2701, pruned_loss=0.04774, over 2632102.00 frames. ], batch size: 36, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:28:31,631 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.62 vs. limit=22.5 +2024-08-03 23:28:33,983 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.88 vs. limit=6.0 +2024-08-03 23:29:05,320 INFO [train.py:1114] (0/4) Epoch 20, batch 2950, loss[loss=0.1629, simple_loss=0.2514, pruned_loss=0.03716, over 13325.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2681, pruned_loss=0.0471, over 2630329.35 frames. ], batch size: 34, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:29:16,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=264465.6666666667, ans=0.025 +2024-08-03 23:29:29,033 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.56 vs. limit=22.5 +2024-08-03 23:29:32,967 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:29:37,836 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 7.731e+01 1.151e+02 1.380e+02 1.780e+02 2.510e+02, threshold=2.761e+02, percent-clipped=0.0 +2024-08-03 23:29:45,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=264575.6666666667, ans=0.1 +2024-08-03 23:29:45,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=264575.6666666667, ans=0.2 +2024-08-03 23:29:46,328 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.53 vs. limit=15.0 +2024-08-03 23:29:49,956 INFO [train.py:1114] (0/4) Epoch 20, batch 3000, loss[loss=0.1968, simple_loss=0.2846, pruned_loss=0.05455, over 13554.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2677, pruned_loss=0.04676, over 2630878.30 frames. ], batch size: 37, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:29:49,957 INFO [train.py:1137] (0/4) Computing validation loss +2024-08-03 23:29:57,497 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.5786, 3.8417, 4.0323, 3.9032], device='cuda:0') +2024-08-03 23:29:59,831 INFO [train.py:1146] (0/4) Epoch 20, validation: loss=0.1683, simple_loss=0.267, pruned_loss=0.03482, over 944034.00 frames. +2024-08-03 23:29:59,831 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 10018MB +2024-08-03 23:30:05,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=264612.3333333333, ans=0.0 +2024-08-03 23:30:11,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=264649.0, ans=0.2 +2024-08-03 23:30:11,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=264649.0, ans=0.0 +2024-08-03 23:30:13,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264649.0, ans=0.1 +2024-08-03 23:30:13,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=264649.0, ans=0.125 +2024-08-03 23:30:14,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=264649.0, ans=0.2 +2024-08-03 23:30:30,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=264722.3333333333, ans=0.025 +2024-08-03 23:30:36,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264759.0, ans=0.1 +2024-08-03 23:30:38,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264759.0, ans=0.1 +2024-08-03 23:30:38,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=264759.0, ans=0.05 +2024-08-03 23:30:39,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=264759.0, ans=0.125 +2024-08-03 23:30:43,194 INFO [train.py:1114] (0/4) Epoch 20, batch 3050, loss[loss=0.1843, simple_loss=0.2653, pruned_loss=0.05161, over 13541.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2684, pruned_loss=0.04697, over 2627979.05 frames. ], batch size: 35, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:31:05,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=264832.3333333333, ans=0.035 +2024-08-03 23:31:09,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.whiten.whitening_limit, batch_count=264869.0, ans=15.0 +2024-08-03 23:31:20,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=264905.6666666667, ans=0.1 +2024-08-03 23:31:22,601 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.840e+01 1.051e+02 1.185e+02 1.395e+02 2.152e+02, threshold=2.371e+02, percent-clipped=0.0 +2024-08-03 23:31:24,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=264905.6666666667, ans=0.025 +2024-08-03 23:31:31,387 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:31:34,621 INFO [train.py:1114] (0/4) Epoch 20, batch 3100, loss[loss=0.1807, simple_loss=0.274, pruned_loss=0.04374, over 13311.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2682, pruned_loss=0.04695, over 2627708.62 frames. ], batch size: 46, lr: 6.15e-03, grad_scale: 32.0 +2024-08-03 23:31:38,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=264979.0, ans=0.125 +2024-08-03 23:31:39,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=264979.0, ans=0.2 +2024-08-03 23:31:45,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=265015.6666666667, ans=0.125 +2024-08-03 23:31:50,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=265015.6666666667, ans=0.125 +2024-08-03 23:31:58,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=265052.3333333333, ans=0.125 +2024-08-03 23:32:05,101 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.99 vs. limit=8.0 +2024-08-03 23:32:11,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=265125.6666666667, ans=0.125 +2024-08-03 23:32:17,274 INFO [train.py:1114] (0/4) Epoch 20, batch 3150, loss[loss=0.1988, simple_loss=0.2859, pruned_loss=0.05584, over 13040.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2679, pruned_loss=0.0467, over 2629540.03 frames. ], batch size: 48, lr: 6.15e-03, grad_scale: 32.0 +2024-08-03 23:32:24,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=265162.3333333333, ans=0.2 +2024-08-03 23:32:33,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=265235.6666666667, ans=0.125 +2024-08-03 23:32:48,872 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.568e+01 1.088e+02 1.255e+02 1.655e+02 2.829e+02, threshold=2.511e+02, percent-clipped=5.0 +2024-08-03 23:33:01,495 INFO [train.py:1114] (0/4) Epoch 20, batch 3200, loss[loss=0.1685, simple_loss=0.2579, pruned_loss=0.03952, over 13553.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2672, pruned_loss=0.04654, over 2634880.94 frames. ], batch size: 37, lr: 6.15e-03, grad_scale: 32.0 +2024-08-03 23:33:01,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=265345.6666666667, ans=0.125 +2024-08-03 23:33:19,807 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.85 vs. limit=15.0 +2024-08-03 23:33:43,994 INFO [train.py:1114] (0/4) Epoch 20, batch 3250, loss[loss=0.1761, simple_loss=0.2627, pruned_loss=0.04473, over 13395.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2676, pruned_loss=0.0463, over 2639667.94 frames. ], batch size: 38, lr: 6.15e-03, grad_scale: 16.0 +2024-08-03 23:33:55,528 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.10 vs. limit=15.0 +2024-08-03 23:33:58,047 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.69 vs. limit=15.0 +2024-08-03 23:34:00,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=265602.3333333333, ans=0.125 +2024-08-03 23:34:02,193 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.54 vs. limit=15.0 +2024-08-03 23:34:07,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265602.3333333333, ans=0.1 +2024-08-03 23:34:16,939 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.340e+01 1.119e+02 1.289e+02 1.600e+02 2.225e+02, threshold=2.578e+02, percent-clipped=0.0 +2024-08-03 23:34:22,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=265675.6666666667, ans=0.125 +2024-08-03 23:34:26,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=265712.3333333333, ans=0.0 +2024-08-03 23:34:27,190 INFO [train.py:1114] (0/4) Epoch 20, batch 3300, loss[loss=0.1887, simple_loss=0.2838, pruned_loss=0.04676, over 12795.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2675, pruned_loss=0.04662, over 2640802.20 frames. ], batch size: 52, lr: 6.15e-03, grad_scale: 16.0 +2024-08-03 23:34:31,986 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=13.45 vs. limit=15.0 +2024-08-03 23:34:47,050 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.48 vs. limit=15.0 +2024-08-03 23:34:54,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=265822.3333333333, ans=0.125 +2024-08-03 23:34:57,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=265822.3333333333, ans=0.125 +2024-08-03 23:35:00,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=265822.3333333333, ans=0.2 +2024-08-03 23:35:01,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=265859.0, ans=0.2 +2024-08-03 23:35:04,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=265859.0, ans=0.125 +2024-08-03 23:35:09,983 INFO [train.py:1114] (0/4) Epoch 20, batch 3350, loss[loss=0.183, simple_loss=0.2704, pruned_loss=0.04786, over 13045.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2687, pruned_loss=0.04719, over 2630060.16 frames. ], batch size: 48, lr: 6.14e-03, grad_scale: 16.0 +2024-08-03 23:35:20,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=265932.3333333333, ans=0.2 +2024-08-03 23:35:25,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=265932.3333333333, ans=0.2 +2024-08-03 23:35:42,730 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.982e+01 1.153e+02 1.327e+02 1.503e+02 2.183e+02, threshold=2.655e+02, percent-clipped=0.0 +2024-08-03 23:35:53,126 INFO [train.py:1114] (0/4) Epoch 20, batch 3400, loss[loss=0.1616, simple_loss=0.2373, pruned_loss=0.04299, over 13532.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2685, pruned_loss=0.04732, over 2626176.27 frames. ], batch size: 31, lr: 6.14e-03, grad_scale: 16.0 +2024-08-03 23:36:18,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=266189.0, ans=0.1 +2024-08-03 23:36:20,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=266189.0, ans=0.2 +2024-08-03 23:36:35,522 INFO [train.py:1114] (0/4) Epoch 20, batch 3450, loss[loss=0.1753, simple_loss=0.2687, pruned_loss=0.04095, over 12934.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2687, pruned_loss=0.04726, over 2629212.72 frames. ], batch size: 52, lr: 6.14e-03, grad_scale: 16.0 +2024-08-03 23:36:37,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=266262.3333333333, ans=0.0 +2024-08-03 23:36:58,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=266335.6666666667, ans=0.0 +2024-08-03 23:37:08,063 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.153e+01 1.075e+02 1.228e+02 1.591e+02 2.797e+02, threshold=2.457e+02, percent-clipped=1.0 +2024-08-03 23:37:15,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=266409.0, ans=0.125 +2024-08-03 23:37:18,296 INFO [train.py:1114] (0/4) Epoch 20, batch 3500, loss[loss=0.1597, simple_loss=0.2463, pruned_loss=0.03649, over 13532.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2674, pruned_loss=0.04676, over 2630188.86 frames. ], batch size: 34, lr: 6.14e-03, grad_scale: 16.0 +2024-08-03 23:37:30,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=266482.3333333333, ans=0.125 +2024-08-03 23:37:36,522 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.78 vs. limit=15.0 +2024-08-03 23:37:44,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=266555.6666666667, ans=0.125 +2024-08-03 23:37:49,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=266555.6666666667, ans=0.0 +2024-08-03 23:37:50,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=266555.6666666667, ans=0.0 +2024-08-03 23:37:52,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=266592.3333333333, ans=0.0 +2024-08-03 23:37:56,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=266592.3333333333, ans=0.125 +2024-08-03 23:37:58,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=266592.3333333333, ans=0.125 +2024-08-03 23:37:58,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=266592.3333333333, ans=0.125 +2024-08-03 23:38:00,661 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.18 vs. limit=15.0 +2024-08-03 23:38:02,720 INFO [train.py:1114] (0/4) Epoch 20, batch 3550, loss[loss=0.1915, simple_loss=0.2754, pruned_loss=0.05377, over 12517.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2694, pruned_loss=0.04775, over 2629215.90 frames. ], batch size: 58, lr: 6.13e-03, grad_scale: 16.0 +2024-08-03 23:38:26,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=266702.3333333333, ans=0.125 +2024-08-03 23:38:36,939 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.117e+01 1.134e+02 1.257e+02 1.418e+02 2.840e+02, threshold=2.514e+02, percent-clipped=1.0 +2024-08-03 23:38:43,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=266775.6666666667, ans=0.125 +2024-08-03 23:38:45,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=266775.6666666667, ans=15.0 +2024-08-03 23:38:47,725 INFO [train.py:1114] (0/4) Epoch 20, batch 3600, loss[loss=0.2293, simple_loss=0.2974, pruned_loss=0.0806, over 9006.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2737, pruned_loss=0.05136, over 2488634.22 frames. ], batch size: 97, lr: 6.13e-03, grad_scale: 32.0 +2024-08-03 23:39:01,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=266849.0, ans=0.2 +2024-08-03 23:39:10,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=266885.6666666667, ans=0.1 +2024-08-03 23:39:22,655 INFO [checkpoint.py:75] (0/4) Saving checkpoint to /home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp/epoch-20.pt +2024-08-03 23:39:27,720 INFO [train.py:1387] (0/4) Done! diff --git a/zipformer/pretrained/non_ctc/causal/exp/log/log-train-2024-08-02-23-23-28-1 b/zipformer/pretrained/non_ctc/causal/exp/log/log-train-2024-08-02-23-23-28-1 new file mode 100644 index 0000000000000000000000000000000000000000..0f49bd520396e5e37db995111392b2e50073ed70 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/log/log-train-2024-08-02-23-23-28-1 @@ -0,0 +1,10264 @@ +2024-08-02 23:23:28,700 INFO [train.py:1182] (1/4) Training started +2024-08-02 23:23:28,701 INFO [train.py:1192] (1/4) Device: cuda:1 +2024-08-02 23:23:28,704 INFO [train.py:1210] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': 'e3b0958-dirty', 'icefall-git-date': 'Tue Jul 30 21:51:45 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2656.int.cedar.computecanada.ca', 'IP address': '172.16.146.93'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 550, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-02 23:23:28,704 INFO [train.py:1212] (1/4) About to create model +2024-08-02 23:23:31,845 INFO [train.py:1216] (1/4) Number of model parameters: 66110931 +2024-08-02 23:23:33,392 INFO [train.py:1231] (1/4) Using DDP +2024-08-02 23:23:49,628 INFO [asr_datamodule.py:909] (1/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-02 23:23:56,249 INFO [asr_datamodule.py:711] (1/4) Disable MUSAN +2024-08-02 23:23:56,249 INFO [asr_datamodule.py:729] (1/4) Enable SpecAugment +2024-08-02 23:23:56,249 INFO [asr_datamodule.py:730] (1/4) Time warp factor: 80 +2024-08-02 23:23:56,250 INFO [asr_datamodule.py:740] (1/4) Num frame mask: 10 +2024-08-02 23:23:56,250 INFO [asr_datamodule.py:753] (1/4) About to create train dataset +2024-08-02 23:23:56,250 INFO [asr_datamodule.py:780] (1/4) Using DynamicBucketingSampler. +2024-08-02 23:23:57,877 INFO [asr_datamodule.py:797] (1/4) About to create train dataloader +2024-08-02 23:23:57,880 INFO [asr_datamodule.py:926] (1/4) About to get dev-clean cuts +2024-08-02 23:23:58,242 INFO [asr_datamodule.py:933] (1/4) About to get dev-other cuts +2024-08-02 23:23:58,294 INFO [asr_datamodule.py:829] (1/4) About to create dev dataset +2024-08-02 23:23:58,629 INFO [asr_datamodule.py:846] (1/4) About to create dev dataloader +2024-08-02 23:23:58,629 INFO [train.py:1435] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-02 23:30:00,741 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 8503MB +2024-08-02 23:30:01,697 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 8503MB +2024-08-02 23:32:14,988 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 8644MB +2024-08-02 23:32:16,118 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 8644MB +2024-08-02 23:33:32,663 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 8644MB +2024-08-02 23:33:33,251 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=256, metric=28.00 vs. limit=7.5 +2024-08-02 23:33:39,253 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 8644MB +2024-08-02 23:34:57,142 INFO [train.py:1114] (1/4) Epoch 1, batch 0, loss[loss=7.853, simple_loss=7.177, pruned_loss=6.749, over 13331.00 frames. ], tot_loss[loss=7.853, simple_loss=7.177, pruned_loss=6.749, over 13331.00 frames. ], batch size: 33, lr: 2.25e-02, grad_scale: 1.0 +2024-08-02 23:34:57,143 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-02 23:35:53,582 INFO [train.py:1146] (1/4) Epoch 1, validation: loss=7.994, simple_loss=7.311, pruned_loss=6.819, over 944034.00 frames. +2024-08-02 23:35:53,582 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 8664MB +2024-08-02 23:36:58,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=0.0, ans=0.2 +2024-08-02 23:37:13,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=0.0, ans=0.3 +2024-08-02 23:38:15,835 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.80 vs. limit=7.5 +2024-08-02 23:38:40,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=36.666666666666664, ans=0.49828125 +2024-08-02 23:39:43,146 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.388e+03 2.561e+03 2.629e+03 3.528e+03 3.944e+03, threshold=1.052e+04, percent-clipped=0.0 +2024-08-02 23:41:17,775 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=49.39 vs. limit=7.51375 +2024-08-02 23:43:00,300 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=111.40 vs. limit=5.0 +2024-08-02 23:43:00,502 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 5.313e+02 2.388e+03 2.781e+03 4.030e+03, threshold=9.553e+03, percent-clipped=0.0 +2024-08-02 23:43:32,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=73.33333333333333, ans=0.4965625 +2024-08-02 23:43:45,595 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=482.18 vs. limit=7.54125 +2024-08-02 23:44:03,940 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=313.97 vs. limit=7.5825 +2024-08-02 23:44:56,597 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=294.89 vs. limit=7.54125 +2024-08-02 23:45:58,756 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 4.964e+02 6.315e+02 2.388e+03 4.030e+03, threshold=2.526e+03, percent-clipped=0.0 +2024-08-02 23:46:45,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=146.66666666666666, ans=5.091666666666667 +2024-08-02 23:46:46,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=183.33333333333334, ans=0.095875 +2024-08-02 23:46:50,275 INFO [train.py:1114] (1/4) Epoch 1, batch 50, loss[loss=1.22, simple_loss=1.081, pruned_loss=1.247, over 13407.00 frames. ], tot_loss[loss=3.062, simple_loss=2.808, pruned_loss=2.465, over 577703.18 frames. ], batch size: 32, lr: 2.48e-02, grad_scale: 0.5 +2024-08-02 23:48:02,510 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.32 vs. limit=3.0275 +2024-08-02 23:48:03,530 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=11.99 vs. limit=4.088 +2024-08-02 23:48:23,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=220.0, ans=0.4896875 +2024-08-02 23:48:40,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=220.0, ans=0.4896875 +2024-08-02 23:48:40,982 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=141.40 vs. limit=7.5825 +2024-08-02 23:50:33,972 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=15.49 vs. limit=4.102666666666667 +2024-08-02 23:50:43,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=256.6666666666667, ans=0.8910166666666667 +2024-08-02 23:51:14,074 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=132.87 vs. limit=7.59625 +2024-08-02 23:52:12,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=293.3333333333333, ans=0.48625 +2024-08-02 23:53:58,445 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=237.26 vs. limit=5.1466666666666665 +2024-08-02 23:54:18,419 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=203.33 vs. limit=7.61 +2024-08-02 23:54:28,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=330.0, ans=0.48453125 +2024-08-02 23:54:28,801 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=19.46 vs. limit=7.7475 +2024-08-02 23:54:28,883 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.24 vs. limit=5.0825 +2024-08-02 23:54:34,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=330.0, ans=0.29669999999999996 +2024-08-02 23:54:36,448 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=61.92 vs. limit=7.62375 +2024-08-02 23:54:55,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=233.04 vs. limit=5.165 +2024-08-02 23:55:14,126 INFO [train.py:1114] (1/4) Epoch 1, batch 100, loss[loss=1.157, simple_loss=0.9974, pruned_loss=1.27, over 13521.00 frames. ], tot_loss[loss=2.063, simple_loss=1.863, pruned_loss=1.825, over 1025367.91 frames. ], batch size: 35, lr: 2.70e-02, grad_scale: 1.0 +2024-08-02 23:55:16,357 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.793e+01 6.736e+01 1.462e+02 5.319e+02 4.030e+03, threshold=2.924e+02, percent-clipped=0.0 +2024-08-02 23:55:37,331 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=252.64 vs. limit=7.6375 +2024-08-02 23:55:47,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=403.3333333333333, ans=0.2273125 +2024-08-02 23:55:47,307 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=59.01 vs. limit=7.8025 +2024-08-02 23:56:19,888 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=20.59 vs. limit=7.665 +2024-08-02 23:56:24,035 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=29.10 vs. limit=7.665 +2024-08-02 23:56:51,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=440.0, ans=7.665 +2024-08-03 00:01:27,797 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=40.55 vs. limit=7.67875 +2024-08-03 00:07:41,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=476.6666666666667, ans=0.47765625 +2024-08-03 00:15:37,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=513.3333333333334, ans=5.320833333333334 +2024-08-03 00:15:59,628 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=24.09 vs. limit=5.128333333333333 +2024-08-03 00:16:22,410 INFO [train.py:1114] (1/4) Epoch 1, batch 150, loss[loss=0.9106, simple_loss=0.7747, pruned_loss=0.9834, over 13431.00 frames. ], tot_loss[loss=1.645, simple_loss=1.465, pruned_loss=1.546, over 1386857.41 frames. ], batch size: 32, lr: 2.93e-02, grad_scale: 1.0 +2024-08-03 00:16:23,032 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=101.60 vs. limit=7.70625 +2024-08-03 00:16:28,803 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=13.21 vs. limit=5.1375 +2024-08-03 00:16:29,970 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=258.98 vs. limit=7.70625 +2024-08-03 00:16:56,895 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=173.79 vs. limit=7.72 +2024-08-03 00:17:10,064 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=94.18 vs. limit=7.72 +2024-08-03 00:17:27,114 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=13.13 vs. limit=5.155833333333334 +2024-08-03 00:17:28,418 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.16 vs. limit=5.155833333333334 +2024-08-03 00:17:48,152 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.42 vs. limit=3.0935 +2024-08-03 00:17:48,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=623.3333333333334, ans=7.73375 +2024-08-03 00:17:51,810 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=30.56 vs. limit=7.73375 +2024-08-03 00:17:52,412 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=184.97 vs. limit=7.73375 +2024-08-03 00:17:56,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=623.3333333333334, ans=0.47078125 +2024-08-03 00:18:10,127 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=18.88 vs. limit=7.995 +2024-08-03 00:18:10,340 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=63.63 vs. limit=7.7475 +2024-08-03 00:19:01,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.48 vs. limit=7.995 +2024-08-03 00:19:49,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=696.6666666666666, ans=0.46734375 +2024-08-03 00:19:53,182 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=100.86 vs. limit=7.76125 +2024-08-03 00:20:24,584 INFO [train.py:1114] (1/4) Epoch 1, batch 200, loss[loss=1.005, simple_loss=0.8566, pruned_loss=1.006, over 12468.00 frames. ], tot_loss[loss=1.42, simple_loss=1.251, pruned_loss=1.369, over 1665327.12 frames. ], batch size: 58, lr: 3.15e-02, grad_scale: 2.0 +2024-08-03 00:20:24,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=733.3333333333334, ans=0.4083333333333333 +2024-08-03 00:20:24,986 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=16.90 vs. limit=7.775 +2024-08-03 00:20:25,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=733.3333333333334, ans=0.29266666666666663 +2024-08-03 00:20:29,215 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.304e+01 4.646e+01 6.073e+01 7.987e+01 1.954e+02, threshold=1.215e+02, percent-clipped=0.0 +2024-08-03 00:20:30,890 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=114.28 vs. limit=5.366666666666667 +2024-08-03 00:21:28,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=770.0, ans=0.171125 +2024-08-03 00:21:30,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=32.83 vs. limit=8.0775 +2024-08-03 00:21:31,614 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.89 vs. limit=8.0775 +2024-08-03 00:21:44,783 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=12.59 vs. limit=4.322666666666667 +2024-08-03 00:22:50,039 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=10.28 vs. limit=5.210833333333333 +2024-08-03 00:22:50,395 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.59 vs. limit=8.1325 +2024-08-03 00:22:51,476 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=69.61 vs. limit=7.81625 +2024-08-03 00:22:51,543 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=28.69 vs. limit=8.1325 +2024-08-03 00:22:53,609 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=21.13 vs. limit=7.81625 +2024-08-03 00:22:55,747 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=15.45 vs. limit=7.81625 +2024-08-03 00:23:03,246 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=12.32 vs. limit=5.22 +2024-08-03 00:23:14,243 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=18.64 vs. limit=7.83 +2024-08-03 00:23:18,171 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=28.55 vs. limit=5.44 +2024-08-03 00:23:18,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=916.6666666666666, ans=0.079375 +2024-08-03 00:23:19,658 INFO [train.py:1114] (1/4) Epoch 1, batch 250, loss[loss=1.004, simple_loss=0.8473, pruned_loss=0.9847, over 13330.00 frames. ], tot_loss[loss=1.281, simple_loss=1.117, pruned_loss=1.246, over 1884149.34 frames. ], batch size: 46, lr: 3.38e-02, grad_scale: 2.0 +2024-08-03 00:23:24,919 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.07 vs. limit=8.1875 +2024-08-03 00:23:31,267 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=10.03 vs. limit=4.366666666666666 +2024-08-03 00:23:31,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=916.6666666666666, ans=0.165625 +2024-08-03 00:23:33,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=953.3333333333334, ans=0.4553125 +2024-08-03 00:23:33,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=953.3333333333334, ans=0.07855000000000001 +2024-08-03 00:23:33,384 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=9.05 vs. limit=5.238333333333333 +2024-08-03 00:24:00,354 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.79 vs. limit=5.238333333333333 +2024-08-03 00:24:34,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=990.0, ans=0.162875 +2024-08-03 00:24:36,112 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=18.64 vs. limit=7.87125 +2024-08-03 00:24:43,796 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=51.54 vs. limit=7.885 +2024-08-03 00:24:48,177 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=10.78 vs. limit=4.410666666666667 +2024-08-03 00:24:51,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=1026.6666666666667, ans=0.451875 +2024-08-03 00:24:53,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=1063.3333333333333, ans=0.45015625 +2024-08-03 00:24:54,543 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=75.86 vs. limit=7.89875 +2024-08-03 00:24:58,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=1063.3333333333333, ans=0.1901875 +2024-08-03 00:24:59,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=1063.3333333333333, ans=0.16012500000000002 +2024-08-03 00:25:02,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=1063.3333333333333, ans=0.1901875 +2024-08-03 00:25:05,116 INFO [train.py:1114] (1/4) Epoch 1, batch 300, loss[loss=1.019, simple_loss=0.85, pruned_loss=0.9875, over 13444.00 frames. ], tot_loss[loss=1.188, simple_loss=1.027, pruned_loss=1.158, over 2051003.92 frames. ], batch size: 42, lr: 3.60e-02, grad_scale: 4.0 +2024-08-03 00:25:09,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=20.04 vs. limit=7.9125 +2024-08-03 00:25:10,029 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.936e+01 8.005e+01 9.897e+01 1.290e+02 2.424e+02, threshold=1.979e+02, percent-clipped=29.0 +2024-08-03 00:25:10,709 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=10.14 vs. limit=4.44 +2024-08-03 00:25:15,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=1100.0, ans=0.4484375 +2024-08-03 00:25:22,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=1136.6666666666667, ans=0.44671875 +2024-08-03 00:25:59,461 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=38.77 vs. limit=8.3525 +2024-08-03 00:26:06,423 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.93 vs. limit=5.284166666666667 +2024-08-03 00:26:16,028 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=39.32 vs. limit=7.94 +2024-08-03 00:26:23,054 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.11 vs. limit=4.469333333333333 +2024-08-03 00:26:29,842 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=100.20 vs. limit=7.95375 +2024-08-03 00:27:05,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=1283.3333333333333, ans=0.5 +2024-08-03 00:27:06,203 INFO [train.py:1114] (1/4) Epoch 1, batch 350, loss[loss=0.9128, simple_loss=0.7501, pruned_loss=0.8859, over 13588.00 frames. ], tot_loss[loss=1.126, simple_loss=0.9642, pruned_loss=1.095, over 2181919.78 frames. ], batch size: 33, lr: 3.83e-02, grad_scale: 4.0 +2024-08-03 00:27:06,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=1283.3333333333333, ans=0.43984375 +2024-08-03 00:27:18,421 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.23 vs. limit=5.320833333333333 +2024-08-03 00:27:20,945 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=9.84 vs. limit=7.98125 +2024-08-03 00:27:21,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1283.3333333333333, ans=0.2871666666666667 +2024-08-03 00:27:24,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=1283.3333333333333, ans=0.43984375 +2024-08-03 00:27:26,519 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=45.84 vs. limit=5.641666666666667 +2024-08-03 00:27:30,657 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=25.05 vs. limit=7.995 +2024-08-03 00:27:31,739 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=24.60 vs. limit=7.995 +2024-08-03 00:27:36,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=1320.0, ans=0.438125 +2024-08-03 00:27:50,427 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=43.75 vs. limit=8.00875 +2024-08-03 00:27:51,506 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=80.34 vs. limit=8.00875 +2024-08-03 00:27:54,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=1356.6666666666667, ans=0.43640625 +2024-08-03 00:27:54,901 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.46 vs. limit=5.339166666666666 +2024-08-03 00:28:02,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=1393.3333333333333, ans=0.4346875 +2024-08-03 00:28:06,471 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=186.13 vs. limit=8.0225 +2024-08-03 00:28:07,632 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=34.07 vs. limit=8.0225 +2024-08-03 00:28:13,589 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.09 vs. limit=8.03625 +2024-08-03 00:28:23,312 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=34.58 vs. limit=8.03625 +2024-08-03 00:28:36,682 INFO [train.py:1114] (1/4) Epoch 1, batch 400, loss[loss=0.9941, simple_loss=0.8148, pruned_loss=0.9306, over 13366.00 frames. ], tot_loss[loss=1.081, simple_loss=0.9172, pruned_loss=1.046, over 2285250.75 frames. ], batch size: 37, lr: 4.05e-02, grad_scale: 8.0 +2024-08-03 00:28:44,138 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.328e+01 8.404e+01 1.145e+02 1.534e+02 2.452e+02, threshold=2.291e+02, percent-clipped=10.0 +2024-08-03 00:29:29,919 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.69 vs. limit=5.366666666666667 +2024-08-03 00:30:07,558 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=179.08 vs. limit=8.06375 +2024-08-03 00:30:14,080 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.00 vs. limit=5.751666666666667 +2024-08-03 00:30:17,479 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=11.57 vs. limit=8.6275 +2024-08-03 00:30:21,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=1503.3333333333333, ans=0.42953125000000003 +2024-08-03 00:30:21,943 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=6.07 vs. limit=4.601333333333333 +2024-08-03 00:30:29,460 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.46 vs. limit=8.655 +2024-08-03 00:30:56,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1576.6666666666667, ans=0.28423333333333334 +2024-08-03 00:30:57,299 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=30.81 vs. limit=8.09125 +2024-08-03 00:30:58,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=1576.6666666666667, ans=0.42609375 +2024-08-03 00:32:29,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=1613.3333333333333, ans=6.008333333333333 +2024-08-03 00:32:51,728 INFO [train.py:1114] (1/4) Epoch 1, batch 450, loss[loss=1.01, simple_loss=0.8208, pruned_loss=0.9309, over 13556.00 frames. ], tot_loss[loss=1.049, simple_loss=0.8822, pruned_loss=1.007, over 2358658.54 frames. ], batch size: 38, lr: 4.28e-02, grad_scale: 8.0 +2024-08-03 00:33:08,598 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=7.65 vs. limit=4.66 +2024-08-03 00:33:45,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=1686.6666666666667, ans=0.4209375 +2024-08-03 00:33:45,813 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.68 vs. limit=8.765 +2024-08-03 00:33:54,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=1686.6666666666667, ans=0.155125 +2024-08-03 00:34:04,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=1723.3333333333333, ans=0.13537500000000002 +2024-08-03 00:34:04,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=1723.3333333333333, ans=0.41921875 +2024-08-03 00:34:12,964 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=1.795e+00 +2024-08-03 00:34:28,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=1723.3333333333333, ans=0.035 +2024-08-03 00:34:36,772 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.56 vs. limit=5.88 +2024-08-03 00:34:43,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=1760.0, ans=0.4175 +2024-08-03 00:34:58,161 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=23.08 vs. limit=8.16 +2024-08-03 00:35:25,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=1796.6666666666667, ans=0.059575 +2024-08-03 00:35:29,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=1796.6666666666667, ans=0.41578125 +2024-08-03 00:35:38,383 INFO [train.py:1114] (1/4) Epoch 1, batch 500, loss[loss=0.9964, simple_loss=0.8075, pruned_loss=0.8912, over 13420.00 frames. ], tot_loss[loss=1.024, simple_loss=0.8541, pruned_loss=0.9707, over 2424229.84 frames. ], batch size: 43, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:35:53,389 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.232e+01 1.074e+02 1.283e+02 1.686e+02 3.614e+02, threshold=2.565e+02, percent-clipped=11.0 +2024-08-03 00:36:10,749 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.76 vs. limit=8.875 +2024-08-03 00:36:14,137 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.36 vs. limit=8.20125 +2024-08-03 00:36:14,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=1870.0, ans=0.41234375 +2024-08-03 00:37:05,560 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=40.81 vs. limit=8.20125 +2024-08-03 00:37:35,078 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=15.76 vs. limit=8.215 +2024-08-03 00:37:49,820 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=162.94 vs. limit=8.215 +2024-08-03 00:39:02,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=1943.3333333333333, ans=0.8319833333333333 +2024-08-03 00:39:02,993 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.07 vs. limit=8.9575 +2024-08-03 00:41:32,340 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.15 vs. limit=8.9575 +2024-08-03 00:41:32,918 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.34 vs. limit=8.22875 +2024-08-03 00:41:34,108 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=10.66 vs. limit=5.485833333333333 +2024-08-03 00:41:36,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=1943.3333333333333, ans=0.40890625 +2024-08-03 00:43:38,155 INFO [train.py:1114] (1/4) Epoch 1, batch 550, loss[loss=1.01, simple_loss=0.8225, pruned_loss=0.8643, over 13031.00 frames. ], tot_loss[loss=1.006, simple_loss=0.8338, pruned_loss=0.9375, over 2467137.98 frames. ], batch size: 48, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:43:55,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=2016.6666666666667, ans=0.8294166666666667 +2024-08-03 00:43:56,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=2016.6666666666667, ans=0.8294166666666667 +2024-08-03 00:44:29,328 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=15.82 vs. limit=8.27 +2024-08-03 00:44:52,149 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=12.97 vs. limit=8.27 +2024-08-03 00:45:08,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2053.3333333333335, ans=0.27946666666666664 +2024-08-03 00:45:09,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=2090.0, ans=0.121625 +2024-08-03 00:45:10,225 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.00 vs. limit=8.28375 +2024-08-03 00:45:20,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=2090.0, ans=0.052975 +2024-08-03 00:45:21,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=2090.0, ans=0.40203125 +2024-08-03 00:45:24,468 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=16.76 vs. limit=8.28375 +2024-08-03 00:45:26,885 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=33.41 vs. limit=8.28375 +2024-08-03 00:45:27,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=2090.0, ans=0.23135 +2024-08-03 00:45:29,322 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=12.30 vs. limit=8.28375 +2024-08-03 00:45:42,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.82 vs. limit=8.2975 +2024-08-03 00:45:50,892 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.56 vs. limit=9.094999999999999 +2024-08-03 00:45:53,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=2126.6666666666665, ans=0.12025000000000001 +2024-08-03 00:46:08,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=2163.3333333333335, ans=0.39859374999999997 +2024-08-03 00:46:13,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=2163.3333333333335, ans=8.31125 +2024-08-03 00:46:15,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2163.3333333333335, ans=0.27836666666666665 +2024-08-03 00:46:28,710 INFO [train.py:1114] (1/4) Epoch 1, batch 600, loss[loss=0.9305, simple_loss=0.7649, pruned_loss=0.7571, over 13330.00 frames. ], tot_loss[loss=0.9854, simple_loss=0.815, pruned_loss=0.8965, over 2507487.91 frames. ], batch size: 46, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:46:49,719 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.983e+01 1.322e+02 1.697e+02 2.206e+02 6.951e+02, threshold=3.394e+02, percent-clipped=10.0 +2024-08-03 00:47:40,111 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=6.86 vs. limit=4.9093333333333335 +2024-08-03 00:47:40,362 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=18.85 vs. limit=8.3525 +2024-08-03 00:47:47,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=2310.0, ans=0.7731 +2024-08-03 00:47:58,531 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.13 vs. limit=9.2325 +2024-08-03 00:48:00,298 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.33 vs. limit=8.38 +2024-08-03 00:48:00,633 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.45 vs. limit=8.38 +2024-08-03 00:48:14,293 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.72 vs. limit=8.38 +2024-08-03 00:48:14,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=2346.6666666666665, ans=0.112 +2024-08-03 00:48:23,485 INFO [train.py:1114] (1/4) Epoch 1, batch 650, loss[loss=0.8701, simple_loss=0.7342, pruned_loss=0.65, over 13534.00 frames. ], tot_loss[loss=0.9574, simple_loss=0.7931, pruned_loss=0.8447, over 2543176.66 frames. ], batch size: 37, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:48:29,340 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=23.11 vs. limit=8.39375 +2024-08-03 00:48:50,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=2420.0, ans=0.1975 +2024-08-03 00:48:56,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.15 vs. limit=4.968 +2024-08-03 00:49:19,998 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.87 vs. limit=8.435 +2024-08-03 00:49:35,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=2493.3333333333335, ans=0.043899999999999995 +2024-08-03 00:49:42,215 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.03 vs. limit=8.44875 +2024-08-03 00:49:48,080 INFO [train.py:1114] (1/4) Epoch 1, batch 700, loss[loss=0.752, simple_loss=0.6375, pruned_loss=0.5445, over 13538.00 frames. ], tot_loss[loss=0.9216, simple_loss=0.767, pruned_loss=0.7855, over 2564702.51 frames. ], batch size: 35, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:49:53,121 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.309e+01 1.383e+02 1.770e+02 2.360e+02 5.485e+02, threshold=3.539e+02, percent-clipped=6.0 +2024-08-03 00:50:00,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=2566.6666666666665, ans=0.3796875 +2024-08-03 00:50:00,537 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.89 vs. limit=8.4625 +2024-08-03 00:50:12,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=2603.3333333333335, ans=0.37796875 +2024-08-03 00:50:14,287 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=10.78 vs. limit=9.4525 +2024-08-03 00:50:36,487 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.38 vs. limit=9.5075 +2024-08-03 00:50:42,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=2713.3333333333335, ans=0.3728125 +2024-08-03 00:50:43,118 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.16 vs. limit=8.5175 +2024-08-03 00:50:50,661 INFO [train.py:1114] (1/4) Epoch 1, batch 750, loss[loss=0.7325, simple_loss=0.6328, pruned_loss=0.499, over 13368.00 frames. ], tot_loss[loss=0.8792, simple_loss=0.7363, pruned_loss=0.7228, over 2581331.94 frames. ], batch size: 37, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:50:58,125 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.18 vs. limit=5.1 +2024-08-03 00:51:00,421 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.27 vs. limit=5.6875 +2024-08-03 00:51:22,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=2786.6666666666665, ans=0.04129166666666667 +2024-08-03 00:51:36,489 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.06 vs. limit=8.5725 +2024-08-03 00:51:39,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=2860.0, ans=0.09275 +2024-08-03 00:51:40,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=2860.0, ans=0.03565 +2024-08-03 00:51:41,489 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.03 vs. limit=5.144 +2024-08-03 00:51:42,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=2860.0, ans=0.03565 +2024-08-03 00:51:54,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=2896.6666666666665, ans=0.27103333333333335 +2024-08-03 00:52:00,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=2896.6666666666665, ans=0.36421875000000004 +2024-08-03 00:52:04,754 INFO [train.py:1114] (1/4) Epoch 1, batch 800, loss[loss=0.5946, simple_loss=0.5256, pruned_loss=0.3785, over 13343.00 frames. ], tot_loss[loss=0.8359, simple_loss=0.7054, pruned_loss=0.6622, over 2596265.16 frames. ], batch size: 33, lr: 4.49e-02, grad_scale: 16.0 +2024-08-03 00:52:06,748 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.241e+02 1.637e+02 2.042e+02 2.862e+02 4.523e+02, threshold=4.084e+02, percent-clipped=8.0 +2024-08-03 00:52:08,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=2933.3333333333335, ans=0.08999999999999998 +2024-08-03 00:52:16,275 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.71 vs. limit=8.61375 +2024-08-03 00:52:30,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=3006.6666666666665, ans=0.3590625 +2024-08-03 00:52:32,534 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.70 vs. limit=8.6275 +2024-08-03 00:52:57,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=3080.0, ans=0.7922 +2024-08-03 00:53:14,275 INFO [train.py:1114] (1/4) Epoch 1, batch 850, loss[loss=0.6274, simple_loss=0.5626, pruned_loss=0.3819, over 13312.00 frames. ], tot_loss[loss=0.7926, simple_loss=0.6748, pruned_loss=0.6045, over 2608924.28 frames. ], batch size: 40, lr: 4.49e-02, grad_scale: 16.0 +2024-08-03 00:53:24,443 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.54 vs. limit=5.779166666666667 +2024-08-03 00:53:33,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=3153.3333333333335, ans=0.10583333333333333 +2024-08-03 00:53:37,523 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.05 vs. limit=5.261333333333333 +2024-08-03 00:53:49,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=3190.0, ans=0.080375 +2024-08-03 00:53:58,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=3190.0, ans=0.09899494936611666 +2024-08-03 00:54:19,571 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.84 vs. limit=5.3053333333333335 +2024-08-03 00:54:22,480 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.78 vs. limit=8.723749999999999 +2024-08-03 00:54:42,645 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=10.68 vs. limit=9.9475 +2024-08-03 00:54:45,056 INFO [train.py:1114] (1/4) Epoch 1, batch 900, loss[loss=0.6001, simple_loss=0.5399, pruned_loss=0.3597, over 13339.00 frames. ], tot_loss[loss=0.7566, simple_loss=0.6498, pruned_loss=0.5563, over 2612112.38 frames. ], batch size: 33, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:54:47,014 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.046e+02 1.709e+02 2.155e+02 3.211e+02 6.364e+02, threshold=4.310e+02, percent-clipped=14.0 +2024-08-03 00:54:56,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=3300.0, ans=0.3453125 +2024-08-03 00:54:59,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=3336.6666666666665, ans=0.0829166666666667 +2024-08-03 00:55:05,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=3336.6666666666665, ans=0.34359375000000003 +2024-08-03 00:55:15,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=3336.6666666666665, ans=0.34359375000000003 +2024-08-03 00:55:29,931 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.69 vs. limit=10.03 +2024-08-03 00:55:32,717 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.48 vs. limit=8.77875 +2024-08-03 00:55:33,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=3410.0, ans=0.072125 +2024-08-03 00:55:35,014 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.29 vs. limit=8.77875 +2024-08-03 00:55:49,715 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.77 vs. limit=8.7925 +2024-08-03 00:56:05,965 INFO [train.py:1114] (1/4) Epoch 1, batch 950, loss[loss=0.5612, simple_loss=0.5122, pruned_loss=0.3235, over 13522.00 frames. ], tot_loss[loss=0.7221, simple_loss=0.6259, pruned_loss=0.5121, over 2613554.23 frames. ], batch size: 34, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:56:31,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=3483.3333333333335, ans=0.21516666666666667 +2024-08-03 00:56:32,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=3483.3333333333335, ans=0.7780833333333333 +2024-08-03 00:56:53,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=3520.0, ans=0.7768 +2024-08-03 00:57:28,047 INFO [train.py:1114] (1/4) Epoch 1, batch 1000, loss[loss=0.4698, simple_loss=0.4454, pruned_loss=0.2468, over 13360.00 frames. ], tot_loss[loss=0.6904, simple_loss=0.6039, pruned_loss=0.4731, over 2611977.97 frames. ], batch size: 35, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:57:32,799 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.579e+02 2.012e+02 2.638e+02 6.886e+02, threshold=4.024e+02, percent-clipped=6.0 +2024-08-03 00:57:34,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3666.6666666666665, ans=0.328125 +2024-08-03 00:57:36,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=3666.6666666666665, ans=0.328125 +2024-08-03 00:58:40,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=3813.3333333333335, ans=0.035500000000000004 +2024-08-03 00:58:40,547 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.34 vs. limit=8.93 +2024-08-03 00:58:48,522 INFO [train.py:1114] (1/4) Epoch 1, batch 1050, loss[loss=0.5827, simple_loss=0.5362, pruned_loss=0.3271, over 13563.00 frames. ], tot_loss[loss=0.6591, simple_loss=0.5822, pruned_loss=0.4363, over 2617065.81 frames. ], batch size: 39, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:59:12,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=3886.6666666666665, ans=0.3178125 +2024-08-03 00:59:13,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3886.6666666666665, ans=0.26113333333333333 +2024-08-03 00:59:30,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=3960.0, ans=0.2594 +2024-08-03 00:59:30,709 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.26 vs. limit=10.47 +2024-08-03 00:59:34,818 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.34 vs. limit=10.47 +2024-08-03 00:59:39,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=3996.6666666666665, ans=0.7601166666666667 +2024-08-03 00:59:48,494 INFO [train.py:1114] (1/4) Epoch 1, batch 1100, loss[loss=0.5059, simple_loss=0.473, pruned_loss=0.2738, over 13568.00 frames. ], tot_loss[loss=0.6302, simple_loss=0.5624, pruned_loss=0.4036, over 2621176.50 frames. ], batch size: 36, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:59:50,398 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.598e+02 2.010e+02 2.726e+02 4.926e+02, threshold=4.021e+02, percent-clipped=7.0 +2024-08-03 01:00:38,327 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:00:40,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=4143.333333333333, ans=0.009968840579710146 +2024-08-03 01:00:54,702 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.93 vs. limit=9.08125 +2024-08-03 01:00:55,288 INFO [train.py:1114] (1/4) Epoch 1, batch 1150, loss[loss=0.5057, simple_loss=0.4735, pruned_loss=0.2725, over 13555.00 frames. ], tot_loss[loss=0.6069, simple_loss=0.5468, pruned_loss=0.3767, over 2619714.01 frames. ], batch size: 36, lr: 4.47e-02, grad_scale: 16.0 +2024-08-03 01:01:55,174 INFO [train.py:1114] (1/4) Epoch 1, batch 1200, loss[loss=0.5388, simple_loss=0.5142, pruned_loss=0.2794, over 13568.00 frames. ], tot_loss[loss=0.5893, simple_loss=0.5353, pruned_loss=0.356, over 2617116.36 frames. ], batch size: 39, lr: 4.47e-02, grad_scale: 32.0 +2024-08-03 01:01:57,191 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.679e+02 2.058e+02 2.623e+02 8.489e+02, threshold=4.116e+02, percent-clipped=4.0 +2024-08-03 01:02:09,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=4436.666666666667, ans=0.29203124999999996 +2024-08-03 01:02:09,557 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.94 vs. limit=9.16375 +2024-08-03 01:02:14,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=4436.666666666667, ans=0.04949747468305833 +2024-08-03 01:02:15,787 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.17 vs. limit=9.16375 +2024-08-03 01:02:19,601 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.40 vs. limit=3.6710000000000003 +2024-08-03 01:02:27,394 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.94 vs. limit=5.789333333333333 +2024-08-03 01:02:52,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=4546.666666666667, ans=0.7408666666666667 +2024-08-03 01:02:54,145 INFO [train.py:1114] (1/4) Epoch 1, batch 1250, loss[loss=0.5114, simple_loss=0.4861, pruned_loss=0.2675, over 13439.00 frames. ], tot_loss[loss=0.5679, simple_loss=0.5211, pruned_loss=0.3335, over 2628683.33 frames. ], batch size: 42, lr: 4.47e-02, grad_scale: 32.0 +2024-08-03 01:03:12,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=4583.333333333333, ans=0.1 +2024-08-03 01:03:29,812 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.62 vs. limit=9.2325 +2024-08-03 01:03:31,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=4620.0, ans=0.7383 +2024-08-03 01:03:51,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=4693.333333333333, ans=0.7357333333333334 +2024-08-03 01:03:53,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=4693.333333333333, ans=0.07 +2024-08-03 01:03:58,595 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.32 vs. limit=6.1825 +2024-08-03 01:04:08,973 INFO [train.py:1114] (1/4) Epoch 1, batch 1300, loss[loss=0.4928, simple_loss=0.4736, pruned_loss=0.2527, over 12927.00 frames. ], tot_loss[loss=0.549, simple_loss=0.5082, pruned_loss=0.3146, over 2631119.95 frames. ], batch size: 52, lr: 4.47e-02, grad_scale: 32.0 +2024-08-03 01:04:10,894 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.265e+02 1.740e+02 2.083e+02 2.560e+02 4.997e+02, threshold=4.167e+02, percent-clipped=2.0 +2024-08-03 01:04:33,058 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.81 vs. limit=11.075 +2024-08-03 01:04:38,977 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.54 vs. limit=9.30125 +2024-08-03 01:05:04,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=4876.666666666667, ans=0.27140624999999996 +2024-08-03 01:05:09,816 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.00 vs. limit=5.950666666666667 +2024-08-03 01:05:11,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=4876.666666666667, ans=0.27140624999999996 +2024-08-03 01:05:14,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=4913.333333333333, ans=0.2508666666666667 +2024-08-03 01:05:23,859 INFO [train.py:1114] (1/4) Epoch 1, batch 1350, loss[loss=0.4722, simple_loss=0.4583, pruned_loss=0.2382, over 13544.00 frames. ], tot_loss[loss=0.533, simple_loss=0.4978, pruned_loss=0.2984, over 2637832.52 frames. ], batch size: 37, lr: 4.46e-02, grad_scale: 32.0 +2024-08-03 01:05:29,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=4950.0, ans=0.009793478260869565 +2024-08-03 01:05:43,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=5023.333333333333, ans=0.009777536231884059 +2024-08-03 01:05:44,026 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.79 vs. limit=7.511666666666667 +2024-08-03 01:06:06,779 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.07 vs. limit=9.411249999999999 +2024-08-03 01:06:13,511 INFO [train.py:1114] (1/4) Epoch 1, batch 1400, loss[loss=0.4159, simple_loss=0.4131, pruned_loss=0.2019, over 13254.00 frames. ], tot_loss[loss=0.518, simple_loss=0.4881, pruned_loss=0.2838, over 2641793.97 frames. ], batch size: 31, lr: 4.46e-02, grad_scale: 32.0 +2024-08-03 01:06:15,473 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.555e+02 1.828e+02 2.203e+02 3.760e+02, threshold=3.656e+02, percent-clipped=0.0 +2024-08-03 01:06:19,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=5133.333333333333, ans=0.259375 +2024-08-03 01:06:27,494 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.75 vs. limit=6.068 +2024-08-03 01:06:37,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=5206.666666666667, ans=0.25593750000000004 +2024-08-03 01:06:56,006 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.14 vs. limit=7.640000000000001 +2024-08-03 01:07:05,505 INFO [train.py:1114] (1/4) Epoch 1, batch 1450, loss[loss=0.4786, simple_loss=0.4683, pruned_loss=0.2393, over 13397.00 frames. ], tot_loss[loss=0.5078, simple_loss=0.4816, pruned_loss=0.2738, over 2640699.34 frames. ], batch size: 43, lr: 4.46e-02, grad_scale: 32.0 +2024-08-03 01:07:29,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=11.86 vs. limit=11.515 +2024-08-03 01:07:32,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=5390.0, ans=0.044208333333333336 +2024-08-03 01:07:40,327 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.07 vs. limit=6.3566666666666665 +2024-08-03 01:07:45,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=5426.666666666667, ans=11.57 +2024-08-03 01:07:46,885 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.71 vs. limit=6.170666666666667 +2024-08-03 01:07:51,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=5463.333333333333, ans=0.043902777777777784 +2024-08-03 01:08:00,851 INFO [train.py:1114] (1/4) Epoch 1, batch 1500, loss[loss=0.5471, simple_loss=0.5254, pruned_loss=0.2822, over 13394.00 frames. ], tot_loss[loss=0.497, simple_loss=0.4752, pruned_loss=0.2634, over 2640859.98 frames. ], batch size: 39, lr: 4.46e-02, grad_scale: 32.0 +2024-08-03 01:08:02,651 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.30 vs. limit=6.2 +2024-08-03 01:08:14,028 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.278e+02 1.665e+02 2.059e+02 2.727e+02 4.755e+02, threshold=4.117e+02, percent-clipped=2.0 +2024-08-03 01:08:25,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=5536.666666666667, ans=0.06539583333333333 +2024-08-03 01:08:55,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=5610.0, ans=0.00965 +2024-08-03 01:09:01,358 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:09:05,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=5646.666666666667, ans=0.24353333333333332 +2024-08-03 01:09:08,534 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.54 vs. limit=11.735 +2024-08-03 01:09:12,866 INFO [train.py:1114] (1/4) Epoch 1, batch 1550, loss[loss=0.4608, simple_loss=0.4594, pruned_loss=0.2251, over 13405.00 frames. ], tot_loss[loss=0.4888, simple_loss=0.47, pruned_loss=0.2559, over 2630379.91 frames. ], batch size: 41, lr: 4.45e-02, grad_scale: 32.0 +2024-08-03 01:09:29,282 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.52 vs. limit=6.288 +2024-08-03 01:09:45,571 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.15 vs. limit=7.859999999999999 +2024-08-03 01:09:50,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=5756.666666666667, ans=0.23015625 +2024-08-03 01:09:51,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=5756.666666666667, ans=0.042680555555555555 +2024-08-03 01:09:55,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=5756.666666666667, ans=0.24243333333333333 +2024-08-03 01:09:59,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=5756.666666666667, ans=0.042680555555555555 +2024-08-03 01:10:06,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=5793.333333333333, ans=0.22843750000000002 +2024-08-03 01:10:18,528 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.21 vs. limit=7.915 +2024-08-03 01:10:30,112 INFO [train.py:1114] (1/4) Epoch 1, batch 1600, loss[loss=0.4758, simple_loss=0.4729, pruned_loss=0.2342, over 13577.00 frames. ], tot_loss[loss=0.4816, simple_loss=0.4656, pruned_loss=0.2495, over 2623274.11 frames. ], batch size: 39, lr: 4.45e-02, grad_scale: 32.0 +2024-08-03 01:10:33,556 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.694e+02 2.197e+02 2.790e+02 6.281e+02, threshold=4.393e+02, percent-clipped=9.0 +2024-08-03 01:10:53,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5866.666666666667, ans=0.22499999999999998 +2024-08-03 01:10:57,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.63 vs. limit=9.713750000000001 +2024-08-03 01:11:00,867 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.81 vs. limit=11.9275 +2024-08-03 01:11:10,653 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.61 vs. limit=6.485 +2024-08-03 01:11:13,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=5940.0, ans=0.2215625 +2024-08-03 01:11:26,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6013.333333333333, ans=0.23986666666666667 +2024-08-03 01:11:28,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=6013.333333333333, ans=0.23986666666666667 +2024-08-03 01:11:37,608 INFO [train.py:1114] (1/4) Epoch 1, batch 1650, loss[loss=0.4395, simple_loss=0.4464, pruned_loss=0.2104, over 13320.00 frames. ], tot_loss[loss=0.4726, simple_loss=0.4603, pruned_loss=0.2418, over 2619528.37 frames. ], batch size: 40, lr: 4.45e-02, grad_scale: 32.0 +2024-08-03 01:11:40,157 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.36 vs. limit=12.0375 +2024-08-03 01:11:43,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=6050.0, ans=0.04145833333333333 +2024-08-03 01:11:54,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=6086.666666666667, ans=0.21468749999999998 +2024-08-03 01:12:03,867 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.76 vs. limit=12.092500000000001 +2024-08-03 01:12:06,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6123.333333333333, ans=0.23876666666666665 +2024-08-03 01:12:25,453 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:12:34,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=6196.666666666667, ans=0.04084722222222222 +2024-08-03 01:12:35,772 INFO [train.py:1114] (1/4) Epoch 1, batch 1700, loss[loss=0.4075, simple_loss=0.4031, pruned_loss=0.2032, over 13261.00 frames. ], tot_loss[loss=0.4624, simple_loss=0.4545, pruned_loss=0.2334, over 2629061.32 frames. ], batch size: 31, lr: 4.44e-02, grad_scale: 32.0 +2024-08-03 01:12:37,766 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.541e+02 1.894e+02 2.425e+02 4.300e+02, threshold=3.787e+02, percent-clipped=0.0 +2024-08-03 01:12:43,050 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.53 vs. limit=9.8375 +2024-08-03 01:12:50,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=6270.0, ans=0.68055 +2024-08-03 01:12:55,917 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.09 vs. limit=9.85125 +2024-08-03 01:13:02,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=6306.666666666667, ans=12.23 +2024-08-03 01:13:12,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=6343.333333333333, ans=0.04023611111111111 +2024-08-03 01:13:21,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=6380.0, ans=0.025 +2024-08-03 01:13:26,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=6416.666666666667, ans=0.03993055555555555 +2024-08-03 01:13:26,484 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.14 vs. limit=12.3125 +2024-08-03 01:13:26,844 INFO [train.py:1114] (1/4) Epoch 1, batch 1750, loss[loss=0.3798, simple_loss=0.394, pruned_loss=0.1782, over 13530.00 frames. ], tot_loss[loss=0.4553, simple_loss=0.4504, pruned_loss=0.2279, over 2631958.02 frames. ], batch size: 31, lr: 4.44e-02, grad_scale: 32.0 +2024-08-03 01:14:05,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=6526.666666666667, ans=0.23473333333333332 +2024-08-03 01:14:06,278 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.85 vs. limit=9.9475 +2024-08-03 01:14:13,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=6563.333333333333, ans=0.19234374999999998 +2024-08-03 01:14:24,172 INFO [train.py:1114] (1/4) Epoch 1, batch 1800, loss[loss=0.4373, simple_loss=0.4516, pruned_loss=0.2076, over 13532.00 frames. ], tot_loss[loss=0.4507, simple_loss=0.4483, pruned_loss=0.2241, over 2632928.06 frames. ], batch size: 38, lr: 4.44e-02, grad_scale: 32.0 +2024-08-03 01:14:26,047 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.262e+02 1.670e+02 2.044e+02 2.499e+02 4.845e+02, threshold=4.088e+02, percent-clipped=4.0 +2024-08-03 01:14:43,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6636.666666666667, ans=0.23363333333333333 +2024-08-03 01:14:49,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=6636.666666666667, ans=0.18890625 +2024-08-03 01:15:16,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=6710.0, ans=0.18546875000000002 +2024-08-03 01:15:22,044 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.47 vs. limit=12.559999999999999 +2024-08-03 01:15:30,699 INFO [train.py:1114] (1/4) Epoch 1, batch 1850, loss[loss=0.4131, simple_loss=0.4344, pruned_loss=0.1927, over 13401.00 frames. ], tot_loss[loss=0.4432, simple_loss=0.4444, pruned_loss=0.2184, over 2634069.58 frames. ], batch size: 39, lr: 4.43e-02, grad_scale: 32.0 +2024-08-03 01:15:37,111 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.65 vs. limit=12.5875 +2024-08-03 01:15:44,750 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.38 vs. limit=12.615 +2024-08-03 01:15:48,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=6820.0, ans=0.038250000000000006 +2024-08-03 01:15:49,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=6856.666666666667, ans=0.6600166666666667 +2024-08-03 01:15:50,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.32 vs. limit=10.07125 +2024-08-03 01:15:53,175 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:15:53,609 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.19 vs. limit=5.0 +2024-08-03 01:16:09,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=6930.0, ans=0.17515625 +2024-08-03 01:16:09,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=6930.0, ans=0.23070000000000002 +2024-08-03 01:16:24,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=6930.0, ans=0.17515625 +2024-08-03 01:16:31,449 INFO [train.py:1114] (1/4) Epoch 1, batch 1900, loss[loss=0.3677, simple_loss=0.4042, pruned_loss=0.1628, over 13338.00 frames. ], tot_loss[loss=0.4375, simple_loss=0.4418, pruned_loss=0.2141, over 2636808.87 frames. ], batch size: 40, lr: 4.43e-02, grad_scale: 32.0 +2024-08-03 01:16:35,170 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.281e+02 1.757e+02 2.130e+02 2.546e+02 5.245e+02, threshold=4.259e+02, percent-clipped=2.0 +2024-08-03 01:16:36,538 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.45 vs. limit=12.725 +2024-08-03 01:16:48,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=6966.666666666667, ans=0.17343750000000002 +2024-08-03 01:16:51,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=7003.333333333333, ans=0.17171874999999998 +2024-08-03 01:17:01,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=7003.333333333333, ans=0.17171874999999998 +2024-08-03 01:17:06,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=7040.0, ans=0.2296 +2024-08-03 01:17:22,816 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.84 vs. limit=10.15375 +2024-08-03 01:17:27,567 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.70 vs. limit=12.835 +2024-08-03 01:17:29,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=7113.333333333333, ans=0.0 +2024-08-03 01:17:34,517 INFO [train.py:1114] (1/4) Epoch 1, batch 1950, loss[loss=0.3745, simple_loss=0.4151, pruned_loss=0.1655, over 13560.00 frames. ], tot_loss[loss=0.4326, simple_loss=0.4406, pruned_loss=0.21, over 2644113.37 frames. ], batch size: 36, lr: 4.43e-02, grad_scale: 32.0 +2024-08-03 01:17:40,938 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.17 vs. limit=8.575 +2024-08-03 01:17:42,902 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.72 vs. limit=10.18125 +2024-08-03 01:17:47,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=7186.666666666667, ans=0.036722222222222226 +2024-08-03 01:17:48,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=7186.666666666667, ans=0.036722222222222226 +2024-08-03 01:17:56,274 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.31 vs. limit=8.611666666666666 +2024-08-03 01:18:12,768 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.91 vs. limit=10.20875 +2024-08-03 01:18:15,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=7223.333333333333, ans=0.16140624999999997 +2024-08-03 01:18:19,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=7260.0, ans=0.03641666666666667 +2024-08-03 01:18:30,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=7260.0, ans=0.009291304347826088 +2024-08-03 01:18:32,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=7296.666666666667, ans=0.03626388888888889 +2024-08-03 01:18:42,884 INFO [train.py:1114] (1/4) Epoch 1, batch 2000, loss[loss=0.3159, simple_loss=0.3539, pruned_loss=0.139, over 13541.00 frames. ], tot_loss[loss=0.4285, simple_loss=0.4388, pruned_loss=0.2072, over 2634439.75 frames. ], batch size: 31, lr: 4.42e-02, grad_scale: 32.0 +2024-08-03 01:18:44,711 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.650e+02 1.978e+02 2.674e+02 4.949e+02, threshold=3.955e+02, percent-clipped=2.0 +2024-08-03 01:18:58,574 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.74 vs. limit=13.0 +2024-08-03 01:19:21,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=7406.666666666667, ans=0.15281250000000002 +2024-08-03 01:19:25,427 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.29 vs. limit=10.29125 +2024-08-03 01:19:44,683 INFO [train.py:1114] (1/4) Epoch 1, batch 2050, loss[loss=0.3733, simple_loss=0.3992, pruned_loss=0.1736, over 13420.00 frames. ], tot_loss[loss=0.4234, simple_loss=0.4358, pruned_loss=0.204, over 2631922.04 frames. ], batch size: 32, lr: 4.42e-02, grad_scale: 64.0 +2024-08-03 01:19:49,849 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.74 vs. limit=10.31875 +2024-08-03 01:19:59,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=7516.666666666667, ans=0.31275000000000003 +2024-08-03 01:20:09,320 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.38 vs. limit=8.776666666666667 +2024-08-03 01:20:23,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=7590.0, ans=0.14421875 +2024-08-03 01:20:27,114 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.39 vs. limit=13.219999999999999 +2024-08-03 01:20:54,841 INFO [train.py:1114] (1/4) Epoch 1, batch 2100, loss[loss=0.3908, simple_loss=0.4161, pruned_loss=0.1828, over 13550.00 frames. ], tot_loss[loss=0.4177, simple_loss=0.433, pruned_loss=0.2, over 2637583.87 frames. ], batch size: 37, lr: 4.42e-02, grad_scale: 8.0 +2024-08-03 01:20:56,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.41 vs. limit=8.85 +2024-08-03 01:20:56,764 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:20:59,388 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.800e+02 2.088e+02 2.971e+02 6.141e+02, threshold=4.177e+02, percent-clipped=15.0 +2024-08-03 01:21:01,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=7700.0, ans=0.13906249999999998 +2024-08-03 01:21:04,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=7736.666666666667, ans=0.22263333333333332 +2024-08-03 01:21:04,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=7736.666666666667, ans=0.13734375 +2024-08-03 01:21:13,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=7773.333333333333, ans=0.22226666666666667 +2024-08-03 01:21:28,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=7810.0, ans=0.13390625 +2024-08-03 01:21:34,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=7810.0, ans=0.62665 +2024-08-03 01:21:41,327 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.89 vs. limit=13.385 +2024-08-03 01:21:56,435 INFO [train.py:1114] (1/4) Epoch 1, batch 2150, loss[loss=0.403, simple_loss=0.4269, pruned_loss=0.1896, over 13569.00 frames. ], tot_loss[loss=0.4137, simple_loss=0.4311, pruned_loss=0.1973, over 2645871.71 frames. ], batch size: 36, lr: 4.41e-02, grad_scale: 8.0 +2024-08-03 01:22:09,681 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:22:18,232 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:22:26,740 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:22:46,516 INFO [train.py:1114] (1/4) Epoch 1, batch 2200, loss[loss=0.4545, simple_loss=0.4723, pruned_loss=0.2183, over 13384.00 frames. ], tot_loss[loss=0.4106, simple_loss=0.4297, pruned_loss=0.195, over 2644824.82 frames. ], batch size: 39, lr: 4.41e-02, grad_scale: 8.0 +2024-08-03 01:22:51,103 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.262e+02 1.590e+02 1.905e+02 2.323e+02 5.165e+02, threshold=3.810e+02, percent-clipped=4.0 +2024-08-03 01:22:58,399 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.85 vs. limit=7.025833333333333 +2024-08-03 01:23:02,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=8103.333333333333, ans=0.125 +2024-08-03 01:23:34,037 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=8.68 vs. limit=7.285333333333334 +2024-08-03 01:23:40,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=8213.333333333334, ans=0.6125333333333334 +2024-08-03 01:23:43,781 INFO [train.py:1114] (1/4) Epoch 1, batch 2250, loss[loss=0.4412, simple_loss=0.4502, pruned_loss=0.2161, over 13355.00 frames. ], tot_loss[loss=0.4068, simple_loss=0.4277, pruned_loss=0.1924, over 2642379.90 frames. ], batch size: 37, lr: 4.40e-02, grad_scale: 8.0 +2024-08-03 01:23:49,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=8250.0, ans=0.125 +2024-08-03 01:23:50,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8250.0, ans=0.2175 +2024-08-03 01:24:05,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=8323.333333333334, ans=0.1 +2024-08-03 01:24:09,817 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:24:24,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.01 vs. limit=13.7975 +2024-08-03 01:24:31,496 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.73 vs. limit=10.64875 +2024-08-03 01:24:31,633 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=4.29 vs. limit=10.64875 +2024-08-03 01:24:34,659 INFO [train.py:1114] (1/4) Epoch 1, batch 2300, loss[loss=0.3291, simple_loss=0.3747, pruned_loss=0.1418, over 13599.00 frames. ], tot_loss[loss=0.4009, simple_loss=0.4235, pruned_loss=0.1887, over 2638494.08 frames. ], batch size: 33, lr: 4.40e-02, grad_scale: 8.0 +2024-08-03 01:24:51,214 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.412e+02 1.913e+02 2.281e+02 2.883e+02 4.389e+02, threshold=4.562e+02, percent-clipped=6.0 +2024-08-03 01:24:55,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8470.0, ans=0.2153 +2024-08-03 01:25:18,176 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.05 vs. limit=7.135833333333334 +2024-08-03 01:25:19,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8543.333333333334, ans=0.21456666666666666 +2024-08-03 01:25:25,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=8580.0, ans=0.125 +2024-08-03 01:25:29,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=8580.0, ans=0.125 +2024-08-03 01:25:31,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=8580.0, ans=0.025 +2024-08-03 01:25:33,514 INFO [train.py:1114] (1/4) Epoch 1, batch 2350, loss[loss=0.3937, simple_loss=0.43, pruned_loss=0.1787, over 13547.00 frames. ], tot_loss[loss=0.3988, simple_loss=0.4226, pruned_loss=0.1871, over 2641137.56 frames. ], batch size: 38, lr: 4.40e-02, grad_scale: 8.0 +2024-08-03 01:25:37,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=8616.666666666666, ans=0.5984166666666667 +2024-08-03 01:25:43,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=8616.666666666666, ans=0.008996376811594204 +2024-08-03 01:25:59,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=8653.333333333334, ans=0.5971333333333333 +2024-08-03 01:26:15,533 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.90 vs. limit=7.172499999999999 +2024-08-03 01:26:19,733 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.12 vs. limit=14.044999999999998 +2024-08-03 01:26:20,593 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.84 vs. limit=10.772499999999999 +2024-08-03 01:26:21,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=8726.666666666666, ans=0.125 +2024-08-03 01:26:26,106 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.91 vs. limit=14.044999999999998 +2024-08-03 01:26:32,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8763.333333333334, ans=0.21236666666666665 +2024-08-03 01:26:34,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=8763.333333333334, ans=0.030152777777777775 +2024-08-03 01:26:39,408 INFO [train.py:1114] (1/4) Epoch 1, batch 2400, loss[loss=0.3348, simple_loss=0.3787, pruned_loss=0.1454, over 13535.00 frames. ], tot_loss[loss=0.3962, simple_loss=0.4218, pruned_loss=0.1851, over 2641647.71 frames. ], batch size: 35, lr: 4.39e-02, grad_scale: 16.0 +2024-08-03 01:26:44,006 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.261e+02 1.562e+02 1.774e+02 2.172e+02 5.136e+02, threshold=3.548e+02, percent-clipped=1.0 +2024-08-03 01:26:45,562 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.46 vs. limit=9.4 +2024-08-03 01:26:57,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=8873.333333333334, ans=0.029694444444444443 +2024-08-03 01:27:16,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=8946.666666666666, ans=0.025 +2024-08-03 01:27:21,383 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.44 vs. limit=9.473333333333333 +2024-08-03 01:27:22,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=8946.666666666666, ans=0.0 +2024-08-03 01:27:26,946 INFO [train.py:1114] (1/4) Epoch 1, batch 2450, loss[loss=0.3794, simple_loss=0.4083, pruned_loss=0.1753, over 13352.00 frames. ], tot_loss[loss=0.3959, simple_loss=0.4222, pruned_loss=0.1846, over 2631641.88 frames. ], batch size: 37, lr: 4.39e-02, grad_scale: 16.0 +2024-08-03 01:27:40,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=9020.0, ans=0.125 +2024-08-03 01:27:47,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=9056.666666666666, ans=0.025 +2024-08-03 01:28:00,473 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.97 vs. limit=14.32 +2024-08-03 01:28:12,847 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.18 vs. limit=7.652 +2024-08-03 01:28:14,000 INFO [train.py:1114] (1/4) Epoch 1, batch 2500, loss[loss=0.4131, simple_loss=0.4413, pruned_loss=0.1924, over 13397.00 frames. ], tot_loss[loss=0.3919, simple_loss=0.42, pruned_loss=0.1818, over 2635508.23 frames. ], batch size: 39, lr: 4.38e-02, grad_scale: 16.0 +2024-08-03 01:28:18,357 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.243e+02 1.729e+02 1.988e+02 2.684e+02 1.225e+03, threshold=3.975e+02, percent-clipped=8.0 +2024-08-03 01:28:29,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=9203.333333333334, ans=0.125 +2024-08-03 01:28:29,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=9203.333333333334, ans=0.125 +2024-08-03 01:28:51,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=9313.333333333334, ans=0.125 +2024-08-03 01:29:01,488 INFO [train.py:1114] (1/4) Epoch 1, batch 2550, loss[loss=0.3573, simple_loss=0.3843, pruned_loss=0.1652, over 13550.00 frames. ], tot_loss[loss=0.3894, simple_loss=0.4186, pruned_loss=0.18, over 2637238.99 frames. ], batch size: 31, lr: 4.38e-02, grad_scale: 16.0 +2024-08-03 01:29:01,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=9350.0, ans=0.125 +2024-08-03 01:29:08,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=9350.0, ans=0.07 +2024-08-03 01:29:15,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=9386.666666666666, ans=0.125 +2024-08-03 01:29:21,904 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:29:23,913 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.02 vs. limit=7.355833333333333 +2024-08-03 01:29:27,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=9423.333333333334, ans=0.008821014492753623 +2024-08-03 01:29:48,469 INFO [train.py:1114] (1/4) Epoch 1, batch 2600, loss[loss=0.364, simple_loss=0.4037, pruned_loss=0.1621, over 13551.00 frames. ], tot_loss[loss=0.3881, simple_loss=0.4181, pruned_loss=0.179, over 2635326.73 frames. ], batch size: 36, lr: 4.37e-02, grad_scale: 16.0 +2024-08-03 01:29:51,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=9533.333333333334, ans=0.025 +2024-08-03 01:29:52,859 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.295e+02 1.601e+02 1.881e+02 2.405e+02 3.900e+02, threshold=3.763e+02, percent-clipped=0.0 +2024-08-03 01:30:04,193 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.99 vs. limit=9.785 +2024-08-03 01:30:15,181 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.45 vs. limit=11.11625 +2024-08-03 01:30:24,912 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.58 vs. limit=14.76 +2024-08-03 01:30:26,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=9680.0, ans=0.125 +2024-08-03 01:30:33,661 INFO [train.py:1114] (1/4) Epoch 1, batch 2650, loss[loss=0.389, simple_loss=0.4272, pruned_loss=0.1754, over 13269.00 frames. ], tot_loss[loss=0.3868, simple_loss=0.4175, pruned_loss=0.178, over 2639008.89 frames. ], batch size: 46, lr: 4.37e-02, grad_scale: 16.0 +2024-08-03 01:30:34,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=9716.666666666666, ans=0.125 +2024-08-03 01:30:44,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=9753.333333333334, ans=0.026027777777777778 +2024-08-03 01:31:06,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=9826.666666666666, ans=0.125 +2024-08-03 01:31:07,439 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.25 vs. limit=11.185 +2024-08-03 01:31:08,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=9863.333333333334, ans=0.00872536231884058 +2024-08-03 01:31:20,736 INFO [train.py:1114] (1/4) Epoch 1, batch 2700, loss[loss=0.384, simple_loss=0.4292, pruned_loss=0.1695, over 13545.00 frames. ], tot_loss[loss=0.3863, simple_loss=0.4173, pruned_loss=0.1776, over 2636504.73 frames. ], batch size: 40, lr: 4.36e-02, grad_scale: 16.0 +2024-08-03 01:31:22,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=9900.0, ans=0.025 +2024-08-03 01:31:25,955 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.286e+02 1.664e+02 1.951e+02 2.469e+02 5.181e+02, threshold=3.901e+02, percent-clipped=9.0 +2024-08-03 01:31:35,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=9936.666666666666, ans=0.20063333333333333 +2024-08-03 01:31:41,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=9936.666666666666, ans=0.125 +2024-08-03 01:31:56,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=9973.333333333334, ans=0.125 +2024-08-03 01:32:02,532 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.60 vs. limit=15.0075 +2024-08-03 01:32:17,815 INFO [train.py:1114] (1/4) Epoch 1, batch 2750, loss[loss=0.3684, simple_loss=0.3929, pruned_loss=0.172, over 13349.00 frames. ], tot_loss[loss=0.3834, simple_loss=0.4149, pruned_loss=0.1759, over 2634883.36 frames. ], batch size: 34, lr: 4.36e-02, grad_scale: 16.0 +2024-08-03 01:32:23,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=10083.333333333334, ans=0.19916666666666666 +2024-08-03 01:32:28,962 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.72 vs. limit=15.09 +2024-08-03 01:32:41,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=10156.666666666666, ans=0.125 +2024-08-03 01:33:07,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=10230.0, ans=0.125 +2024-08-03 01:33:08,896 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.88 vs. limit=15.1725 +2024-08-03 01:33:10,608 INFO [train.py:1114] (1/4) Epoch 1, batch 2800, loss[loss=0.521, simple_loss=0.4934, pruned_loss=0.2744, over 8795.00 frames. ], tot_loss[loss=0.3814, simple_loss=0.4138, pruned_loss=0.1745, over 2626282.61 frames. ], batch size: 96, lr: 4.36e-02, grad_scale: 32.0 +2024-08-03 01:33:11,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=10266.666666666666, ans=0.5406666666666667 +2024-08-03 01:33:15,322 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.297e+02 1.678e+02 2.174e+02 2.677e+02 5.163e+02, threshold=4.348e+02, percent-clipped=2.0 +2024-08-03 01:33:29,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=10340.0, ans=0.5381 +2024-08-03 01:33:31,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=10340.0, ans=0.0 +2024-08-03 01:33:40,634 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.78 vs. limit=7.594166666666666 +2024-08-03 01:35:19,834 INFO [train.py:1114] (1/4) Epoch 1, batch 2850, loss[loss=0.3565, simple_loss=0.3981, pruned_loss=0.1574, over 13364.00 frames. ], tot_loss[loss=0.3838, simple_loss=0.4153, pruned_loss=0.1761, over 2620459.98 frames. ], batch size: 35, lr: 4.35e-02, grad_scale: 32.0 +2024-08-03 01:35:35,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=10450.0, ans=0.125 +2024-08-03 01:35:39,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=10486.666666666666, ans=0.125 +2024-08-03 01:35:41,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=10486.666666666666, ans=0.125 +2024-08-03 01:36:01,513 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.02 vs. limit=15.42 +2024-08-03 01:36:02,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=10560.0, ans=0.008573913043478262 +2024-08-03 01:36:05,797 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.34 vs. limit=4.5895 +2024-08-03 01:36:14,790 INFO [train.py:1114] (1/4) Epoch 1, batch 2900, loss[loss=0.3747, simple_loss=0.4102, pruned_loss=0.1696, over 13362.00 frames. ], tot_loss[loss=0.3799, simple_loss=0.4134, pruned_loss=0.1732, over 2631765.48 frames. ], batch size: 36, lr: 4.35e-02, grad_scale: 32.0 +2024-08-03 01:36:19,261 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.316e+02 1.668e+02 1.982e+02 2.661e+02 5.002e+02, threshold=3.964e+02, percent-clipped=4.0 +2024-08-03 01:36:19,790 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.83 vs. limit=11.4875 +2024-08-03 01:36:20,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.96 vs. limit=8.253333333333334 +2024-08-03 01:36:26,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=10670.0, ans=0.5265500000000001 +2024-08-03 01:36:28,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=10670.0, ans=0.5265500000000001 +2024-08-03 01:36:30,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=10670.0, ans=0.0 +2024-08-03 01:36:35,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=10706.666666666666, ans=0.05 +2024-08-03 01:36:42,503 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.68 vs. limit=4.6114999999999995 +2024-08-03 01:36:45,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=10743.333333333334, ans=0.025 +2024-08-03 01:36:54,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.00 vs. limit=11.5425 +2024-08-03 01:36:54,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=10780.0, ans=0.125 +2024-08-03 01:37:00,008 INFO [train.py:1114] (1/4) Epoch 1, batch 2950, loss[loss=0.3505, simple_loss=0.3866, pruned_loss=0.1572, over 13353.00 frames. ], tot_loss[loss=0.3765, simple_loss=0.4105, pruned_loss=0.1712, over 2629778.79 frames. ], batch size: 34, lr: 4.34e-02, grad_scale: 32.0 +2024-08-03 01:37:03,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=10816.666666666666, ans=0.021597222222222226 +2024-08-03 01:37:49,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=10853.333333333334, ans=0.025 +2024-08-03 01:38:03,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=10926.666666666666, ans=0.19073333333333334 +2024-08-03 01:38:04,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=10926.666666666666, ans=0.125 +2024-08-03 01:38:15,820 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.05 vs. limit=11.61125 +2024-08-03 01:38:21,970 INFO [train.py:1114] (1/4) Epoch 1, batch 3000, loss[loss=0.329, simple_loss=0.3802, pruned_loss=0.1389, over 13529.00 frames. ], tot_loss[loss=0.3738, simple_loss=0.4089, pruned_loss=0.1693, over 2629476.67 frames. ], batch size: 37, lr: 4.34e-02, grad_scale: 32.0 +2024-08-03 01:38:21,971 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 01:39:12,990 INFO [train.py:1146] (1/4) Epoch 1, validation: loss=0.2888, simple_loss=0.3696, pruned_loss=0.104, over 944034.00 frames. +2024-08-03 01:39:12,992 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 01:39:17,653 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.298e+02 1.604e+02 1.963e+02 2.352e+02 4.798e+02, threshold=3.927e+02, percent-clipped=2.0 +2024-08-03 01:39:30,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=11073.333333333334, ans=0.125 +2024-08-03 01:39:32,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=11073.333333333334, ans=0.5124333333333333 +2024-08-03 01:39:32,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=11073.333333333334, ans=0.125 +2024-08-03 01:39:39,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=11110.0, ans=0.008454347826086957 +2024-08-03 01:39:44,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=11110.0, ans=0.125 +2024-08-03 01:39:46,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=11110.0, ans=0.125 +2024-08-03 01:39:54,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=11146.666666666666, ans=10.573333333333334 +2024-08-03 01:40:03,052 INFO [train.py:1114] (1/4) Epoch 1, batch 3050, loss[loss=0.3597, simple_loss=0.4008, pruned_loss=0.1593, over 13527.00 frames. ], tot_loss[loss=0.3729, simple_loss=0.4088, pruned_loss=0.1685, over 2626631.23 frames. ], batch size: 35, lr: 4.33e-02, grad_scale: 32.0 +2024-08-03 01:40:11,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11183.333333333334, ans=0.18816666666666665 +2024-08-03 01:40:22,391 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.04 vs. limit=8.488 +2024-08-03 01:40:33,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=11256.666666666666, ans=0.019763888888888893 +2024-08-03 01:40:33,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=11256.666666666666, ans=0.008422463768115942 +2024-08-03 01:40:35,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=11256.666666666666, ans=0.125 +2024-08-03 01:40:55,295 INFO [train.py:1114] (1/4) Epoch 1, batch 3100, loss[loss=0.3877, simple_loss=0.4223, pruned_loss=0.1766, over 13289.00 frames. ], tot_loss[loss=0.3696, simple_loss=0.4065, pruned_loss=0.1663, over 2626316.61 frames. ], batch size: 46, lr: 4.33e-02, grad_scale: 32.0 +2024-08-03 01:40:59,560 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.612e+02 1.933e+02 2.547e+02 5.853e+02, threshold=3.866e+02, percent-clipped=4.0 +2024-08-03 01:41:34,886 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.60 vs. limit=16.08 +2024-08-03 01:41:42,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=11476.666666666666, ans=0.125 +2024-08-03 01:41:52,822 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.37 vs. limit=11.817499999999999 +2024-08-03 01:41:54,262 INFO [train.py:1114] (1/4) Epoch 1, batch 3150, loss[loss=0.3947, simple_loss=0.4336, pruned_loss=0.1779, over 13053.00 frames. ], tot_loss[loss=0.3686, simple_loss=0.4063, pruned_loss=0.1654, over 2628237.35 frames. ], batch size: 48, lr: 4.32e-02, grad_scale: 32.0 +2024-08-03 01:42:02,617 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.59 vs. limit=11.83125 +2024-08-03 01:42:05,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=11550.0, ans=0.008358695652173913 +2024-08-03 01:42:12,196 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.43 vs. limit=7.8966666666666665 +2024-08-03 01:42:15,994 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.18 vs. limit=10.811666666666667 +2024-08-03 01:42:22,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=11623.333333333334, ans=0.125 +2024-08-03 01:42:24,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten.whitening_limit, batch_count=11623.333333333334, ans=16.2175 +2024-08-03 01:42:44,259 INFO [train.py:1114] (1/4) Epoch 1, batch 3200, loss[loss=0.3862, simple_loss=0.4227, pruned_loss=0.1749, over 13537.00 frames. ], tot_loss[loss=0.3663, simple_loss=0.4047, pruned_loss=0.1639, over 2634747.10 frames. ], batch size: 37, lr: 4.32e-02, grad_scale: 32.0 +2024-08-03 01:42:46,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=11733.333333333334, ans=0.125 +2024-08-03 01:42:48,503 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.624e+02 1.966e+02 2.680e+02 4.372e+02, threshold=3.932e+02, percent-clipped=2.0 +2024-08-03 01:42:50,818 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.78 vs. limit=11.9 +2024-08-03 01:43:06,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=11806.666666666666, ans=0.13193333333333332 +2024-08-03 01:43:24,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=11880.0, ans=16.41 +2024-08-03 01:43:33,494 INFO [train.py:1114] (1/4) Epoch 1, batch 3250, loss[loss=0.375, simple_loss=0.4226, pruned_loss=0.1637, over 13400.00 frames. ], tot_loss[loss=0.3648, simple_loss=0.4038, pruned_loss=0.1628, over 2639266.71 frames. ], batch size: 38, lr: 4.31e-02, grad_scale: 32.0 +2024-08-03 01:43:37,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11916.666666666666, ans=0.18083333333333335 +2024-08-03 01:43:49,200 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.56 vs. limit=10.976666666666667 +2024-08-03 01:44:09,597 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.23 vs. limit=16.5475 +2024-08-03 01:44:11,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=12063.333333333334, ans=0.125 +2024-08-03 01:44:15,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12063.333333333334, ans=0.17936666666666667 +2024-08-03 01:44:18,533 INFO [train.py:1114] (1/4) Epoch 1, batch 3300, loss[loss=0.4316, simple_loss=0.4505, pruned_loss=0.2064, over 12821.00 frames. ], tot_loss[loss=0.3632, simple_loss=0.4022, pruned_loss=0.1621, over 2641228.60 frames. ], batch size: 52, lr: 4.31e-02, grad_scale: 32.0 +2024-08-03 01:44:19,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=12100.0, ans=0.01625 +2024-08-03 01:44:22,866 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.222e+02 1.557e+02 1.877e+02 2.344e+02 4.156e+02, threshold=3.753e+02, percent-clipped=2.0 +2024-08-03 01:44:24,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=12100.0, ans=0.125 +2024-08-03 01:44:30,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=12136.666666666666, ans=0.0 +2024-08-03 01:44:43,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=12173.333333333334, ans=0.0 +2024-08-03 01:45:02,673 INFO [train.py:1114] (1/4) Epoch 1, batch 3350, loss[loss=0.3808, simple_loss=0.4156, pruned_loss=0.173, over 12996.00 frames. ], tot_loss[loss=0.3634, simple_loss=0.4027, pruned_loss=0.162, over 2631123.73 frames. ], batch size: 48, lr: 4.30e-02, grad_scale: 32.0 +2024-08-03 01:45:10,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=12320.0, ans=0.125 +2024-08-03 01:45:11,208 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.82 vs. limit=12.120000000000001 +2024-08-03 01:45:12,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=12320.0, ans=0.125 +2024-08-03 01:45:24,443 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.62 vs. limit=8.942666666666668 +2024-08-03 01:45:27,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=12356.666666666666, ans=0.125 +2024-08-03 01:45:30,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12393.333333333334, ans=0.17606666666666665 +2024-08-03 01:45:40,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=12430.0, ans=0.008167391304347826 +2024-08-03 01:45:47,880 INFO [train.py:1114] (1/4) Epoch 1, batch 3400, loss[loss=0.3402, simple_loss=0.3719, pruned_loss=0.1543, over 13545.00 frames. ], tot_loss[loss=0.3627, simple_loss=0.4021, pruned_loss=0.1617, over 2626268.71 frames. ], batch size: 31, lr: 4.29e-02, grad_scale: 32.0 +2024-08-03 01:45:52,235 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.662e+02 2.017e+02 2.620e+02 5.936e+02, threshold=4.033e+02, percent-clipped=10.0 +2024-08-03 01:46:39,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=12540.0, ans=0.4611 +2024-08-03 01:46:43,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=12540.0, ans=0.014416666666666668 +2024-08-03 01:46:54,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=12613.333333333334, ans=0.125 +2024-08-03 01:47:02,911 INFO [train.py:1114] (1/4) Epoch 1, batch 3450, loss[loss=0.3819, simple_loss=0.421, pruned_loss=0.1714, over 12951.00 frames. ], tot_loss[loss=0.3618, simple_loss=0.4018, pruned_loss=0.1609, over 2630143.74 frames. ], batch size: 52, lr: 4.29e-02, grad_scale: 32.0 +2024-08-03 01:47:04,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=12650.0, ans=0.125 +2024-08-03 01:47:09,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=12650.0, ans=0.013958333333333336 +2024-08-03 01:47:11,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=12686.666666666666, ans=0.125 +2024-08-03 01:47:26,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=12723.333333333334, ans=0.013652777777777778 +2024-08-03 01:47:37,565 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=17.78 vs. limit=12.285 +2024-08-03 01:47:45,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=12796.666666666666, ans=0.125 +2024-08-03 01:47:47,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=12796.666666666666, ans=0.00808768115942029 +2024-08-03 01:47:55,453 INFO [train.py:1114] (1/4) Epoch 1, batch 3500, loss[loss=0.3574, simple_loss=0.3908, pruned_loss=0.162, over 13536.00 frames. ], tot_loss[loss=0.3592, simple_loss=0.3995, pruned_loss=0.1595, over 2632339.47 frames. ], batch size: 34, lr: 4.28e-02, grad_scale: 32.0 +2024-08-03 01:47:59,792 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.559e+02 1.825e+02 2.381e+02 4.772e+02, threshold=3.650e+02, percent-clipped=2.0 +2024-08-03 01:48:06,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=12870.0, ans=0.125 +2024-08-03 01:48:12,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=12906.666666666666, ans=0.125 +2024-08-03 01:48:16,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=12906.666666666666, ans=0.125 +2024-08-03 01:48:23,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12906.666666666666, ans=0.17093333333333333 +2024-08-03 01:48:49,857 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.09 vs. limit=9.192 +2024-08-03 01:48:51,942 INFO [train.py:1114] (1/4) Epoch 1, batch 3550, loss[loss=0.3955, simple_loss=0.4276, pruned_loss=0.1817, over 12408.00 frames. ], tot_loss[loss=0.3624, simple_loss=0.4026, pruned_loss=0.1611, over 2629394.71 frames. ], batch size: 58, lr: 4.28e-02, grad_scale: 32.0 +2024-08-03 01:48:53,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=13016.666666666666, ans=0.125 +2024-08-03 01:48:59,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=13016.666666666666, ans=0.125 +2024-08-03 01:49:17,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=13090.0, ans=0.125 +2024-08-03 01:49:24,923 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=14.76 vs. limit=12.4225 +2024-08-03 01:49:26,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=13126.666666666666, ans=10.0 +2024-08-03 01:49:28,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=13126.666666666666, ans=0.011972222222222224 +2024-08-03 01:49:35,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=13163.333333333334, ans=0.16836666666666666 +2024-08-03 01:49:41,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=13163.333333333334, ans=0.125 +2024-08-03 01:49:46,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=13163.333333333334, ans=10.0 +2024-08-03 01:49:46,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=13163.333333333334, ans=0.011819444444444438 +2024-08-03 01:49:51,064 INFO [train.py:1114] (1/4) Epoch 1, batch 3600, loss[loss=0.4451, simple_loss=0.4489, pruned_loss=0.2207, over 9556.00 frames. ], tot_loss[loss=0.3726, simple_loss=0.4086, pruned_loss=0.1683, over 2488391.25 frames. ], batch size: 96, lr: 4.27e-02, grad_scale: 32.0 +2024-08-03 01:49:55,456 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.481e+02 1.802e+02 2.019e+02 3.446e+02, threshold=3.604e+02, percent-clipped=0.0 +2024-08-03 01:49:59,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=13200.0, ans=0.125 +2024-08-03 01:50:02,290 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.77 vs. limit=12.463750000000001 +2024-08-03 01:50:03,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13236.666666666666, ans=0.16763333333333333 +2024-08-03 01:50:08,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=13236.666666666666, ans=10.0 +2024-08-03 01:50:09,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=13236.666666666666, ans=0.125 +2024-08-03 01:50:13,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=13273.333333333334, ans=0.125 +2024-08-03 01:50:14,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=10.14 vs. limit=9.309333333333335 +2024-08-03 01:50:29,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=13273.333333333334, ans=0.125 +2024-08-03 01:50:31,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=13310.0, ans=0.43415000000000004 +2024-08-03 01:52:08,734 INFO [train.py:1114] (1/4) Epoch 2, batch 0, loss[loss=0.3295, simple_loss=0.385, pruned_loss=0.137, over 13332.00 frames. ], tot_loss[loss=0.3295, simple_loss=0.385, pruned_loss=0.137, over 13332.00 frames. ], batch size: 33, lr: 4.19e-02, grad_scale: 32.0 +2024-08-03 01:52:08,735 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 01:52:18,754 INFO [train.py:1146] (1/4) Epoch 2, validation: loss=0.2954, simple_loss=0.3785, pruned_loss=0.1062, over 944034.00 frames. +2024-08-03 01:52:18,755 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 01:52:18,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=13346.666666666666, ans=0.1 +2024-08-03 01:52:21,940 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.16 vs. limit=12.504999999999999 +2024-08-03 01:52:25,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=13346.666666666666, ans=0.025 +2024-08-03 01:52:39,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=13420.0, ans=0.125 +2024-08-03 01:52:40,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=13420.0, ans=0.125 +2024-08-03 01:52:43,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=13420.0, ans=0.0 +2024-08-03 01:52:45,674 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.59 vs. limit=17.564999999999998 +2024-08-03 01:53:05,766 INFO [train.py:1114] (1/4) Epoch 2, batch 50, loss[loss=0.3088, simple_loss=0.3603, pruned_loss=0.1287, over 13443.00 frames. ], tot_loss[loss=0.3682, simple_loss=0.4071, pruned_loss=0.1646, over 577853.20 frames. ], batch size: 32, lr: 4.18e-02, grad_scale: 16.0 +2024-08-03 01:53:08,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=13530.0, ans=0.125 +2024-08-03 01:53:08,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13530.0, ans=0.16469999999999999 +2024-08-03 01:53:18,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13566.666666666666, ans=0.16433333333333333 +2024-08-03 01:53:22,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=13566.666666666666, ans=0.125 +2024-08-03 01:53:25,138 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.505e+02 1.833e+02 2.741e+02 6.945e+02, threshold=3.667e+02, percent-clipped=7.0 +2024-08-03 01:53:27,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=13566.666666666666, ans=0.125 +2024-08-03 01:53:34,312 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.59 vs. limit=12.60125 +2024-08-03 01:53:36,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=13603.333333333334, ans=0.009986111111111105 +2024-08-03 01:53:41,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13640.0, ans=0.1636 +2024-08-03 01:53:44,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=13640.0, ans=0.125 +2024-08-03 01:53:53,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.77 vs. limit=17.7575 +2024-08-03 01:53:56,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=13713.333333333334, ans=0.009527777777777774 +2024-08-03 01:53:56,981 INFO [train.py:1114] (1/4) Epoch 2, batch 100, loss[loss=0.3211, simple_loss=0.3753, pruned_loss=0.1335, over 13539.00 frames. ], tot_loss[loss=0.3631, simple_loss=0.4049, pruned_loss=0.1606, over 1026021.24 frames. ], batch size: 35, lr: 4.17e-02, grad_scale: 16.0 +2024-08-03 01:53:58,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=13713.333333333334, ans=0.125 +2024-08-03 01:54:05,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13750.0, ans=0.1625 +2024-08-03 01:54:09,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=13750.0, ans=0.125 +2024-08-03 01:54:28,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=13786.666666666666, ans=0.16213333333333332 +2024-08-03 01:54:35,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=13823.333333333334, ans=0.0 +2024-08-03 01:54:46,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=13860.0, ans=0.125 +2024-08-03 01:54:48,056 INFO [train.py:1114] (1/4) Epoch 2, batch 150, loss[loss=0.3195, simple_loss=0.3661, pruned_loss=0.1365, over 13421.00 frames. ], tot_loss[loss=0.3567, simple_loss=0.3995, pruned_loss=0.157, over 1387013.58 frames. ], batch size: 32, lr: 4.17e-02, grad_scale: 16.0 +2024-08-03 01:54:49,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=13896.666666666666, ans=0.0 +2024-08-03 01:54:52,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=13896.666666666666, ans=0.125 +2024-08-03 01:54:59,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=13933.333333333334, ans=0.125 +2024-08-03 01:55:03,038 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.519e+02 1.772e+02 2.227e+02 3.651e+02, threshold=3.544e+02, percent-clipped=0.0 +2024-08-03 01:55:09,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=13970.0, ans=0.1603 +2024-08-03 01:55:20,244 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.40 vs. limit=12.752500000000001 +2024-08-03 01:55:34,694 INFO [train.py:1114] (1/4) Epoch 2, batch 200, loss[loss=0.4164, simple_loss=0.4421, pruned_loss=0.1953, over 12404.00 frames. ], tot_loss[loss=0.3559, simple_loss=0.3982, pruned_loss=0.1568, over 1665482.94 frames. ], batch size: 58, lr: 4.16e-02, grad_scale: 16.0 +2024-08-03 01:55:45,762 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.13 vs. limit=6.823333333333333 +2024-08-03 01:55:54,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=14153.333333333334, ans=0.125 +2024-08-03 01:56:16,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=14226.666666666666, ans=0.125 +2024-08-03 01:56:23,310 INFO [train.py:1114] (1/4) Epoch 2, batch 250, loss[loss=0.4192, simple_loss=0.4496, pruned_loss=0.1944, over 13281.00 frames. ], tot_loss[loss=0.3547, simple_loss=0.3971, pruned_loss=0.1561, over 1883793.17 frames. ], batch size: 46, lr: 4.16e-02, grad_scale: 16.0 +2024-08-03 01:56:38,385 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.594e+02 1.964e+02 2.594e+02 6.291e+02, threshold=3.929e+02, percent-clipped=8.0 +2024-08-03 01:56:49,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=14336.666666666666, ans=0.125 +2024-08-03 01:56:51,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=14373.333333333334, ans=0.125 +2024-08-03 01:57:08,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=14410.0, ans=0.125 +2024-08-03 01:57:12,757 INFO [train.py:1114] (1/4) Epoch 2, batch 300, loss[loss=0.3998, simple_loss=0.4381, pruned_loss=0.1807, over 13452.00 frames. ], tot_loss[loss=0.3524, simple_loss=0.3953, pruned_loss=0.1548, over 2051793.23 frames. ], batch size: 42, lr: 4.15e-02, grad_scale: 16.0 +2024-08-03 01:57:37,669 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.80 vs. limit=12.945 +2024-08-03 01:57:39,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.20 vs. limit=18.39 +2024-08-03 01:57:40,017 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:57:43,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=14556.666666666666, ans=0.007705072463768116 +2024-08-03 01:57:52,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=14593.333333333334, ans=0.025 +2024-08-03 01:57:54,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.84 vs. limit=18.445 +2024-08-03 01:57:59,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=14593.333333333334, ans=0.005861111111111109 +2024-08-03 01:58:01,708 INFO [train.py:1114] (1/4) Epoch 2, batch 350, loss[loss=0.3304, simple_loss=0.3704, pruned_loss=0.1452, over 13554.00 frames. ], tot_loss[loss=0.3509, simple_loss=0.3944, pruned_loss=0.1537, over 2182112.73 frames. ], batch size: 33, lr: 4.15e-02, grad_scale: 16.0 +2024-08-03 01:58:55,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=14666.666666666666, ans=0.025 +2024-08-03 01:58:58,104 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.602e+02 1.924e+02 2.648e+02 5.206e+02, threshold=3.847e+02, percent-clipped=6.0 +2024-08-03 01:59:01,653 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.68 vs. limit=12.351666666666667 +2024-08-03 01:59:25,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=14776.666666666666, ans=0.125 +2024-08-03 01:59:29,770 INFO [train.py:1114] (1/4) Epoch 2, batch 400, loss[loss=0.3657, simple_loss=0.4062, pruned_loss=0.1626, over 13368.00 frames. ], tot_loss[loss=0.3486, simple_loss=0.3932, pruned_loss=0.152, over 2286657.95 frames. ], batch size: 37, lr: 4.14e-02, grad_scale: 32.0 +2024-08-03 02:04:06,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=14850.0, ans=0.004791666666666666 +2024-08-03 02:04:14,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=14850.0, ans=0.07 +2024-08-03 02:05:23,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=14886.666666666666, ans=0.3789666666666667 +2024-08-03 02:05:28,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=14923.333333333334, ans=0.004486111111111107 +2024-08-03 02:05:41,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=14923.333333333334, ans=0.004486111111111107 +2024-08-03 02:05:42,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=14923.333333333334, ans=0.025 +2024-08-03 02:05:46,635 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:05:54,056 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.43 vs. limit=5.244 +2024-08-03 02:06:33,894 INFO [train.py:1114] (1/4) Epoch 2, batch 450, loss[loss=0.3658, simple_loss=0.4117, pruned_loss=0.1599, over 13538.00 frames. ], tot_loss[loss=0.3463, simple_loss=0.3917, pruned_loss=0.1504, over 2359628.38 frames. ], batch size: 38, lr: 4.13e-02, grad_scale: 32.0 +2024-08-03 02:06:40,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=14996.666666666666, ans=0.125 +2024-08-03 02:06:47,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=15033.333333333334, ans=0.05 +2024-08-03 02:06:48,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=15033.333333333334, ans=0.0076014492753623195 +2024-08-03 02:06:48,867 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.226e+02 1.511e+02 1.857e+02 2.288e+02 3.385e+02, threshold=3.714e+02, percent-clipped=0.0 +2024-08-03 02:06:52,235 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.33 vs. limit=5.2605 +2024-08-03 02:07:01,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=15070.0, ans=0.37255000000000005 +2024-08-03 02:07:10,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=15106.666666666666, ans=0.007585507246376811 +2024-08-03 02:07:17,670 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.55 vs. limit=12.553333333333333 +2024-08-03 02:07:19,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15143.333333333334, ans=0.14856666666666665 +2024-08-03 02:07:24,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=15143.333333333334, ans=0.07 +2024-08-03 02:07:26,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=15143.333333333334, ans=0.125 +2024-08-03 02:07:28,592 INFO [train.py:1114] (1/4) Epoch 2, batch 500, loss[loss=0.3619, simple_loss=0.4081, pruned_loss=0.1578, over 13425.00 frames. ], tot_loss[loss=0.3435, simple_loss=0.3897, pruned_loss=0.1487, over 2425315.50 frames. ], batch size: 43, lr: 4.13e-02, grad_scale: 32.0 +2024-08-03 02:07:33,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=15180.0, ans=0.0 +2024-08-03 02:07:53,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=15253.333333333334, ans=0.125 +2024-08-03 02:08:18,911 INFO [train.py:1114] (1/4) Epoch 2, batch 550, loss[loss=0.3736, simple_loss=0.4163, pruned_loss=0.1654, over 13124.00 frames. ], tot_loss[loss=0.3425, simple_loss=0.3891, pruned_loss=0.148, over 2468521.53 frames. ], batch size: 48, lr: 4.12e-02, grad_scale: 32.0 +2024-08-03 02:08:25,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=15363.333333333334, ans=0.125 +2024-08-03 02:08:31,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=15400.0, ans=0.125 +2024-08-03 02:08:33,978 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.486e+02 1.782e+02 2.081e+02 4.201e+02, threshold=3.563e+02, percent-clipped=2.0 +2024-08-03 02:08:35,589 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.42 vs. limit=13.275 +2024-08-03 02:08:55,069 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.05 vs. limit=19.105 +2024-08-03 02:08:55,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15473.333333333334, ans=0.14526666666666666 +2024-08-03 02:09:00,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=15510.0, ans=0.35714999999999997 +2024-08-03 02:09:22,842 INFO [train.py:1114] (1/4) Epoch 2, batch 600, loss[loss=0.3936, simple_loss=0.4279, pruned_loss=0.1797, over 13378.00 frames. ], tot_loss[loss=0.3412, simple_loss=0.3885, pruned_loss=0.147, over 2508034.92 frames. ], batch size: 46, lr: 4.12e-02, grad_scale: 32.0 +2024-08-03 02:09:24,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=15546.666666666666, ans=0.125 +2024-08-03 02:09:27,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=15546.666666666666, ans=0.14453333333333335 +2024-08-03 02:09:43,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=15620.0, ans=0.125 +2024-08-03 02:09:49,283 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.60 vs. limit=7.1240000000000006 +2024-08-03 02:09:56,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=15656.666666666666, ans=0.125 +2024-08-03 02:10:07,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=15693.333333333334, ans=0.125 +2024-08-03 02:10:09,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15693.333333333334, ans=0.14306666666666668 +2024-08-03 02:10:10,403 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.59 vs. limit=13.385 +2024-08-03 02:10:12,467 INFO [train.py:1114] (1/4) Epoch 2, batch 650, loss[loss=0.3339, simple_loss=0.3858, pruned_loss=0.141, over 13548.00 frames. ], tot_loss[loss=0.3387, simple_loss=0.3867, pruned_loss=0.1454, over 2543456.39 frames. ], batch size: 37, lr: 4.11e-02, grad_scale: 32.0 +2024-08-03 02:10:12,956 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.76 vs. limit=19.2975 +2024-08-03 02:10:21,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=15730.0, ans=0.14270000000000002 +2024-08-03 02:10:24,588 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.33 vs. limit=8.941666666666666 +2024-08-03 02:10:28,712 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.454e+02 1.669e+02 2.017e+02 2.893e+02, threshold=3.339e+02, percent-clipped=0.0 +2024-08-03 02:10:50,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=15803.333333333334, ans=0.0008194444444444421 +2024-08-03 02:11:01,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=15840.0, ans=0.125 +2024-08-03 02:11:10,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15876.666666666666, ans=0.14123333333333335 +2024-08-03 02:11:13,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=15913.333333333334, ans=0.125 +2024-08-03 02:11:13,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=15913.333333333334, ans=0.125 +2024-08-03 02:11:13,923 INFO [train.py:1114] (1/4) Epoch 2, batch 700, loss[loss=0.292, simple_loss=0.3485, pruned_loss=0.1178, over 13534.00 frames. ], tot_loss[loss=0.3395, simple_loss=0.3874, pruned_loss=0.1458, over 2564537.05 frames. ], batch size: 35, lr: 4.11e-02, grad_scale: 8.0 +2024-08-03 02:11:24,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=15950.0, ans=0.34175 +2024-08-03 02:11:32,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=15986.666666666666, ans=0.3404666666666667 +2024-08-03 02:11:33,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=15986.666666666666, ans=0.125 +2024-08-03 02:11:43,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=16023.333333333334, ans=0.025 +2024-08-03 02:11:48,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=16023.333333333334, ans=0.125 +2024-08-03 02:12:03,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=16060.0, ans=0.13940000000000002 +2024-08-03 02:12:04,801 INFO [train.py:1114] (1/4) Epoch 2, batch 750, loss[loss=0.3042, simple_loss=0.3704, pruned_loss=0.119, over 13359.00 frames. ], tot_loss[loss=0.3382, simple_loss=0.3863, pruned_loss=0.145, over 2581467.19 frames. ], batch size: 37, lr: 4.10e-02, grad_scale: 8.0 +2024-08-03 02:12:21,722 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.638e+02 1.990e+02 2.530e+02 5.439e+02, threshold=3.980e+02, percent-clipped=7.0 +2024-08-03 02:13:00,968 INFO [train.py:1114] (1/4) Epoch 2, batch 800, loss[loss=0.3185, simple_loss=0.3645, pruned_loss=0.1362, over 13329.00 frames. ], tot_loss[loss=0.3387, simple_loss=0.3866, pruned_loss=0.1454, over 2595961.04 frames. ], batch size: 33, lr: 4.09e-02, grad_scale: 16.0 +2024-08-03 02:13:26,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=16353.333333333334, ans=0.125 +2024-08-03 02:13:35,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=16390.0, ans=0.125 +2024-08-03 02:13:41,614 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.34 vs. limit=13.64625 +2024-08-03 02:13:52,945 INFO [train.py:1114] (1/4) Epoch 2, batch 850, loss[loss=0.3352, simple_loss=0.3884, pruned_loss=0.141, over 13323.00 frames. ], tot_loss[loss=0.3384, simple_loss=0.3862, pruned_loss=0.1453, over 2609265.46 frames. ], batch size: 40, lr: 4.09e-02, grad_scale: 16.0 +2024-08-03 02:14:05,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=16500.0, ans=0.0 +2024-08-03 02:14:12,137 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.052e+02 1.467e+02 1.720e+02 2.030e+02 3.514e+02, threshold=3.439e+02, percent-clipped=0.0 +2024-08-03 02:14:21,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=16536.666666666668, ans=0.00727463768115942 +2024-08-03 02:14:24,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=16573.333333333332, ans=0.007266666666666668 +2024-08-03 02:14:30,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=16573.333333333332, ans=0.0 +2024-08-03 02:14:42,219 INFO [train.py:1114] (1/4) Epoch 2, batch 900, loss[loss=0.3149, simple_loss=0.3623, pruned_loss=0.1337, over 13344.00 frames. ], tot_loss[loss=0.3378, simple_loss=0.3856, pruned_loss=0.145, over 2613317.32 frames. ], batch size: 33, lr: 4.08e-02, grad_scale: 16.0 +2024-08-03 02:14:43,307 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.265e-02 +2024-08-03 02:14:48,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=16646.666666666668, ans=0.125 +2024-08-03 02:15:03,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=16720.0, ans=0.025 +2024-08-03 02:15:12,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=16756.666666666668, ans=0.007226811594202898 +2024-08-03 02:15:23,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=16793.333333333332, ans=0.125 +2024-08-03 02:15:24,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=16793.333333333332, ans=0.025 +2024-08-03 02:15:30,493 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.02 vs. limit=13.415 +2024-08-03 02:15:30,754 INFO [train.py:1114] (1/4) Epoch 2, batch 950, loss[loss=0.3093, simple_loss=0.3705, pruned_loss=0.124, over 13528.00 frames. ], tot_loss[loss=0.3378, simple_loss=0.3857, pruned_loss=0.1449, over 2613788.32 frames. ], batch size: 34, lr: 4.08e-02, grad_scale: 16.0 +2024-08-03 02:15:35,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=16830.0, ans=0.125 +2024-08-03 02:15:38,223 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:15:49,944 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.484e+02 1.735e+02 2.135e+02 4.344e+02, threshold=3.469e+02, percent-clipped=2.0 +2024-08-03 02:16:02,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=16940.0, ans=0.125 +2024-08-03 02:16:20,064 INFO [train.py:1114] (1/4) Epoch 2, batch 1000, loss[loss=0.2923, simple_loss=0.3553, pruned_loss=0.1146, over 13369.00 frames. ], tot_loss[loss=0.3395, simple_loss=0.3876, pruned_loss=0.1457, over 2612166.69 frames. ], batch size: 35, lr: 4.07e-02, grad_scale: 16.0 +2024-08-03 02:16:32,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=17050.0, ans=0.125 +2024-08-03 02:16:32,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=17050.0, ans=0.125 +2024-08-03 02:16:38,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=17086.666666666668, ans=0.3019666666666667 +2024-08-03 02:16:38,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=17086.666666666668, ans=0.3019666666666667 +2024-08-03 02:16:49,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=17123.333333333332, ans=0.3006833333333334 +2024-08-03 02:16:57,946 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.41 vs. limit=5.5685 +2024-08-03 02:17:10,907 INFO [train.py:1114] (1/4) Epoch 2, batch 1050, loss[loss=0.3425, simple_loss=0.4014, pruned_loss=0.1418, over 13578.00 frames. ], tot_loss[loss=0.3368, simple_loss=0.3855, pruned_loss=0.144, over 2616178.69 frames. ], batch size: 39, lr: 4.06e-02, grad_scale: 16.0 +2024-08-03 02:17:11,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=17196.666666666668, ans=0.007131159420289855 +2024-08-03 02:17:12,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=17196.666666666668, ans=0.007131159420289855 +2024-08-03 02:17:27,497 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.473e+02 1.878e+02 2.204e+02 3.880e+02, threshold=3.755e+02, percent-clipped=2.0 +2024-08-03 02:17:57,478 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.95 vs. limit=10.922666666666668 +2024-08-03 02:18:10,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=17380.0, ans=0.125 +2024-08-03 02:18:11,159 INFO [train.py:1114] (1/4) Epoch 2, batch 1100, loss[loss=0.3006, simple_loss=0.3584, pruned_loss=0.1214, over 13576.00 frames. ], tot_loss[loss=0.3359, simple_loss=0.385, pruned_loss=0.1434, over 2619011.75 frames. ], batch size: 36, lr: 4.06e-02, grad_scale: 16.0 +2024-08-03 02:18:30,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=17453.333333333332, ans=0.9245333333333333 +2024-08-03 02:18:33,742 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.05 vs. limit=14.044999999999998 +2024-08-03 02:18:43,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.24 vs. limit=20.6175 +2024-08-03 02:18:59,086 INFO [train.py:1114] (1/4) Epoch 2, batch 1150, loss[loss=0.3614, simple_loss=0.4039, pruned_loss=0.1595, over 13553.00 frames. ], tot_loss[loss=0.3358, simple_loss=0.3848, pruned_loss=0.1434, over 2617915.07 frames. ], batch size: 36, lr: 4.05e-02, grad_scale: 16.0 +2024-08-03 02:19:05,360 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.68 vs. limit=14.08625 +2024-08-03 02:19:07,122 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.02 vs. limit=14.08625 +2024-08-03 02:19:16,027 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.117e+02 1.513e+02 2.017e+02 2.624e+02 5.380e+02, threshold=4.034e+02, percent-clipped=4.0 +2024-08-03 02:19:16,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=17600.0, ans=0.28400000000000003 +2024-08-03 02:19:57,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=17710.0, ans=0.0 +2024-08-03 02:20:03,136 INFO [train.py:1114] (1/4) Epoch 2, batch 1200, loss[loss=0.3233, simple_loss=0.3794, pruned_loss=0.1336, over 13580.00 frames. ], tot_loss[loss=0.3361, simple_loss=0.3852, pruned_loss=0.1435, over 2615016.36 frames. ], batch size: 39, lr: 4.04e-02, grad_scale: 32.0 +2024-08-03 02:20:31,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=17820.0, ans=0.025 +2024-08-03 02:20:34,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=17820.0, ans=0.125 +2024-08-03 02:20:47,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=17893.333333333332, ans=0.125 +2024-08-03 02:20:56,922 INFO [train.py:1114] (1/4) Epoch 2, batch 1250, loss[loss=0.3268, simple_loss=0.3823, pruned_loss=0.1356, over 13470.00 frames. ], tot_loss[loss=0.335, simple_loss=0.3846, pruned_loss=0.1427, over 2627258.66 frames. ], batch size: 42, lr: 4.04e-02, grad_scale: 32.0 +2024-08-03 02:21:23,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=17966.666666666668, ans=0.0 +2024-08-03 02:21:24,843 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.393e+02 1.566e+02 1.875e+02 3.241e+02, threshold=3.132e+02, percent-clipped=0.0 +2024-08-03 02:21:32,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=18003.333333333332, ans=0.09899494936611666 +2024-08-03 02:21:42,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=18040.0, ans=0.025 +2024-08-03 02:21:54,715 INFO [train.py:1114] (1/4) Epoch 2, batch 1300, loss[loss=0.3224, simple_loss=0.3816, pruned_loss=0.1316, over 12871.00 frames. ], tot_loss[loss=0.3338, simple_loss=0.3834, pruned_loss=0.142, over 2631148.03 frames. ], batch size: 52, lr: 4.03e-02, grad_scale: 32.0 +2024-08-03 02:22:15,085 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:22:15,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=18150.0, ans=0.11850000000000002 +2024-08-03 02:22:15,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=18150.0, ans=0.125 +2024-08-03 02:22:23,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=18186.666666666668, ans=0.125 +2024-08-03 02:22:35,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=18223.333333333332, ans=0.125 +2024-08-03 02:22:36,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=18223.333333333332, ans=0.0 +2024-08-03 02:22:38,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18223.333333333332, ans=0.11776666666666669 +2024-08-03 02:22:42,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=18260.0, ans=0.125 +2024-08-03 02:22:49,377 INFO [train.py:1114] (1/4) Epoch 2, batch 1350, loss[loss=0.3475, simple_loss=0.3993, pruned_loss=0.1478, over 13535.00 frames. ], tot_loss[loss=0.3319, simple_loss=0.3823, pruned_loss=0.1408, over 2639123.96 frames. ], batch size: 37, lr: 4.03e-02, grad_scale: 32.0 +2024-08-03 02:22:55,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=18296.666666666668, ans=0.2596166666666667 +2024-08-03 02:23:05,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=18333.333333333332, ans=0.1166666666666667 +2024-08-03 02:23:08,635 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.141e+02 1.434e+02 1.711e+02 2.081e+02 4.051e+02, threshold=3.422e+02, percent-clipped=5.0 +2024-08-03 02:23:26,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=18370.0, ans=0.25705 +2024-08-03 02:23:47,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18443.333333333332, ans=0.11556666666666668 +2024-08-03 02:23:49,069 INFO [train.py:1114] (1/4) Epoch 2, batch 1400, loss[loss=0.3037, simple_loss=0.3464, pruned_loss=0.1305, over 13257.00 frames. ], tot_loss[loss=0.33, simple_loss=0.3809, pruned_loss=0.1395, over 2642500.87 frames. ], batch size: 31, lr: 4.02e-02, grad_scale: 32.0 +2024-08-03 02:23:56,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=18480.0, ans=0.05 +2024-08-03 02:23:58,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=18516.666666666668, ans=0.125 +2024-08-03 02:24:08,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=18516.666666666668, ans=0.125 +2024-08-03 02:24:16,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=18553.333333333332, ans=0.07 +2024-08-03 02:24:21,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=18590.0, ans=0.0 +2024-08-03 02:24:26,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=18590.0, ans=0.125 +2024-08-03 02:24:38,143 INFO [train.py:1114] (1/4) Epoch 2, batch 1450, loss[loss=0.3252, simple_loss=0.3825, pruned_loss=0.134, over 13427.00 frames. ], tot_loss[loss=0.3313, simple_loss=0.3821, pruned_loss=0.1402, over 2642070.25 frames. ], batch size: 43, lr: 4.01e-02, grad_scale: 16.0 +2024-08-03 02:24:44,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=18663.333333333332, ans=0.0 +2024-08-03 02:24:53,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=18700.0, ans=0.125 +2024-08-03 02:24:55,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=18700.0, ans=0.0 +2024-08-03 02:24:55,528 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.417e+02 1.675e+02 1.959e+02 3.168e+02, threshold=3.351e+02, percent-clipped=0.0 +2024-08-03 02:24:59,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=18736.666666666668, ans=0.11263333333333331 +2024-08-03 02:25:09,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=18773.333333333332, ans=0.0 +2024-08-03 02:25:10,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=18773.333333333332, ans=0.4816 +2024-08-03 02:25:12,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=18773.333333333332, ans=0.0 +2024-08-03 02:25:14,531 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.86 vs. limit=14.54 +2024-08-03 02:25:17,533 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.62 vs. limit=11.524000000000001 +2024-08-03 02:25:23,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=18810.0, ans=0.24165000000000003 +2024-08-03 02:25:29,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=18810.0, ans=0.0 +2024-08-03 02:25:29,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=18810.0, ans=0.125 +2024-08-03 02:25:32,400 INFO [train.py:1114] (1/4) Epoch 2, batch 1500, loss[loss=0.3357, simple_loss=0.3882, pruned_loss=0.1416, over 13412.00 frames. ], tot_loss[loss=0.3312, simple_loss=0.3821, pruned_loss=0.1401, over 2641610.36 frames. ], batch size: 39, lr: 4.01e-02, grad_scale: 16.0 +2024-08-03 02:25:50,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=18920.0, ans=0.125 +2024-08-03 02:25:51,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=18920.0, ans=0.125 +2024-08-03 02:25:53,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=18920.0, ans=0.125 +2024-08-03 02:25:55,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=18920.0, ans=0.11080000000000001 +2024-08-03 02:26:07,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=18956.666666666668, ans=0.0067485507246376805 +2024-08-03 02:26:19,640 INFO [train.py:1114] (1/4) Epoch 2, batch 1550, loss[loss=0.302, simple_loss=0.3703, pruned_loss=0.1169, over 13419.00 frames. ], tot_loss[loss=0.3321, simple_loss=0.3823, pruned_loss=0.1409, over 2631324.05 frames. ], batch size: 41, lr: 4.00e-02, grad_scale: 16.0 +2024-08-03 02:26:22,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=19030.0, ans=0.125 +2024-08-03 02:26:34,437 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:26:40,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=19066.666666666668, ans=0.125 +2024-08-03 02:26:40,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=19066.666666666668, ans=0.0 +2024-08-03 02:26:41,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.87 vs. limit=9.766666666666667 +2024-08-03 02:26:42,595 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.587e+02 1.878e+02 2.318e+02 8.334e+02, threshold=3.756e+02, percent-clipped=6.0 +2024-08-03 02:27:06,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=19176.666666666668, ans=0.0 +2024-08-03 02:27:08,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=19176.666666666668, ans=0.125 +2024-08-03 02:27:11,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=19176.666666666668, ans=0.058233333333333304 +2024-08-03 02:27:15,320 INFO [train.py:1114] (1/4) Epoch 2, batch 1600, loss[loss=0.3648, simple_loss=0.4199, pruned_loss=0.1548, over 13569.00 frames. ], tot_loss[loss=0.3315, simple_loss=0.3817, pruned_loss=0.1407, over 2623625.62 frames. ], batch size: 39, lr: 4.00e-02, grad_scale: 32.0 +2024-08-03 02:27:35,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=19286.666666666668, ans=0.0 +2024-08-03 02:27:38,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=19286.666666666668, ans=0.125 +2024-08-03 02:27:53,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=19323.333333333332, ans=14.74625 +2024-08-03 02:28:02,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=19360.0, ans=0.125 +2024-08-03 02:28:03,911 INFO [train.py:1114] (1/4) Epoch 2, batch 1650, loss[loss=0.3067, simple_loss=0.3736, pruned_loss=0.1199, over 13332.00 frames. ], tot_loss[loss=0.331, simple_loss=0.3812, pruned_loss=0.1404, over 2621128.77 frames. ], batch size: 40, lr: 3.99e-02, grad_scale: 16.0 +2024-08-03 02:28:22,358 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.47 vs. limit=14.7875 +2024-08-03 02:28:40,135 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.530e+02 1.782e+02 2.174e+02 3.857e+02, threshold=3.564e+02, percent-clipped=2.0 +2024-08-03 02:28:40,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=19470.0, ans=0.0066369565217391305 +2024-08-03 02:28:51,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=19506.666666666668, ans=0.125 +2024-08-03 02:28:53,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=19506.666666666668, ans=0.125 +2024-08-03 02:29:00,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=19543.333333333332, ans=0.1045666666666667 +2024-08-03 02:29:04,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=19543.333333333332, ans=0.0 +2024-08-03 02:29:07,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=19580.0, ans=0.0 +2024-08-03 02:29:08,343 INFO [train.py:1114] (1/4) Epoch 2, batch 1700, loss[loss=0.276, simple_loss=0.3278, pruned_loss=0.1121, over 13259.00 frames. ], tot_loss[loss=0.3294, simple_loss=0.3802, pruned_loss=0.1393, over 2630399.72 frames. ], batch size: 31, lr: 3.98e-02, grad_scale: 16.0 +2024-08-03 02:29:24,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=19616.666666666668, ans=0.10383333333333333 +2024-08-03 02:29:25,333 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=1.149e-02 +2024-08-03 02:29:32,728 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.15 vs. limit=14.870000000000001 +2024-08-03 02:29:33,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=19653.333333333332, ans=0.006597101449275363 +2024-08-03 02:29:45,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=19690.0, ans=0.125 +2024-08-03 02:29:57,017 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.06 vs. limit=22.295 +2024-08-03 02:29:59,362 INFO [train.py:1114] (1/4) Epoch 2, batch 1750, loss[loss=0.2849, simple_loss=0.3334, pruned_loss=0.1182, over 13531.00 frames. ], tot_loss[loss=0.3298, simple_loss=0.3803, pruned_loss=0.1397, over 2633554.38 frames. ], batch size: 31, lr: 3.98e-02, grad_scale: 16.0 +2024-08-03 02:30:08,089 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=11.97 vs. limit=14.925 +2024-08-03 02:30:09,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=19800.0, ans=0.006565217391304348 +2024-08-03 02:30:13,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=19800.0, ans=0.025 +2024-08-03 02:30:18,375 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.108e+02 1.430e+02 1.665e+02 2.047e+02 3.989e+02, threshold=3.330e+02, percent-clipped=2.0 +2024-08-03 02:30:26,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.42 vs. limit=14.938749999999999 +2024-08-03 02:30:30,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19873.333333333332, ans=0.1012666666666667 +2024-08-03 02:30:31,900 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.87 vs. limit=11.949333333333332 +2024-08-03 02:30:32,811 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.50 vs. limit=14.9525 +2024-08-03 02:30:33,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=19873.333333333332, ans=0.006549275362318841 +2024-08-03 02:30:43,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=19910.0, ans=0.025 +2024-08-03 02:30:46,003 INFO [train.py:1114] (1/4) Epoch 2, batch 1800, loss[loss=0.3281, simple_loss=0.3895, pruned_loss=0.1334, over 13542.00 frames. ], tot_loss[loss=0.3302, simple_loss=0.3806, pruned_loss=0.1399, over 2634628.52 frames. ], batch size: 38, lr: 3.97e-02, grad_scale: 16.0 +2024-08-03 02:30:48,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=19946.666666666668, ans=0.125 +2024-08-03 02:30:52,078 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.90 vs. limit=14.973333333333334 +2024-08-03 02:31:06,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=19983.333333333332, ans=0.20058333333333345 +2024-08-03 02:31:11,205 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:31:11,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20020.0, ans=0.1 +2024-08-03 02:31:21,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=20056.666666666668, ans=0.0 +2024-08-03 02:31:31,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=20093.333333333332, ans=0.125 +2024-08-03 02:31:37,411 INFO [train.py:1114] (1/4) Epoch 2, batch 1850, loss[loss=0.3076, simple_loss=0.3718, pruned_loss=0.1217, over 13391.00 frames. ], tot_loss[loss=0.3279, simple_loss=0.3791, pruned_loss=0.1384, over 2638299.50 frames. ], batch size: 39, lr: 3.96e-02, grad_scale: 16.0 +2024-08-03 02:31:53,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=20130.0, ans=10.0 +2024-08-03 02:31:58,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=20166.666666666668, ans=0.125 +2024-08-03 02:32:04,607 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.463e+02 1.801e+02 2.661e+02 5.332e+02, threshold=3.601e+02, percent-clipped=10.0 +2024-08-03 02:32:12,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20203.333333333332, ans=0.1 +2024-08-03 02:32:20,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20240.0, ans=0.1 +2024-08-03 02:32:22,761 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.30 vs. limit=15.0 +2024-08-03 02:32:32,393 INFO [train.py:1114] (1/4) Epoch 2, batch 1900, loss[loss=0.3297, simple_loss=0.3741, pruned_loss=0.1427, over 13342.00 frames. ], tot_loss[loss=0.3271, simple_loss=0.3788, pruned_loss=0.1377, over 2640889.67 frames. ], batch size: 40, lr: 3.96e-02, grad_scale: 16.0 +2024-08-03 02:32:57,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=20386.666666666668, ans=0.125 +2024-08-03 02:33:11,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=20423.333333333332, ans=0.006429710144927537 +2024-08-03 02:33:17,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=20460.0, ans=0.1 +2024-08-03 02:33:27,796 INFO [train.py:1114] (1/4) Epoch 2, batch 1950, loss[loss=0.309, simple_loss=0.3638, pruned_loss=0.1271, over 13568.00 frames. ], tot_loss[loss=0.33, simple_loss=0.3815, pruned_loss=0.1392, over 2647531.12 frames. ], batch size: 36, lr: 3.95e-02, grad_scale: 16.0 +2024-08-03 02:33:46,554 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.390e+02 1.603e+02 1.917e+02 3.719e+02, threshold=3.206e+02, percent-clipped=1.0 +2024-08-03 02:33:52,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=20570.0, ans=0.125 +2024-08-03 02:34:14,439 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.00 vs. limit=6.0 +2024-08-03 02:34:22,758 INFO [train.py:1114] (1/4) Epoch 2, batch 2000, loss[loss=0.2894, simple_loss=0.3416, pruned_loss=0.1187, over 13521.00 frames. ], tot_loss[loss=0.3312, simple_loss=0.3826, pruned_loss=0.1399, over 2637981.04 frames. ], batch size: 31, lr: 3.94e-02, grad_scale: 32.0 +2024-08-03 02:34:22,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=20680.0, ans=0.006373913043478261 +2024-08-03 02:34:42,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=20716.666666666668, ans=0.125 +2024-08-03 02:34:44,069 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.17 vs. limit=22.5 +2024-08-03 02:34:46,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=20753.333333333332, ans=0.1 +2024-08-03 02:35:10,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=20826.666666666668, ans=0.5 +2024-08-03 02:35:13,204 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.24 vs. limit=10.0 +2024-08-03 02:35:14,581 INFO [train.py:1114] (1/4) Epoch 2, batch 2050, loss[loss=0.3434, simple_loss=0.376, pruned_loss=0.1554, over 13441.00 frames. ], tot_loss[loss=0.3299, simple_loss=0.381, pruned_loss=0.1394, over 2635708.28 frames. ], batch size: 32, lr: 3.94e-02, grad_scale: 32.0 +2024-08-03 02:35:25,665 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.65 vs. limit=15.0 +2024-08-03 02:35:27,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=20900.0, ans=0.125 +2024-08-03 02:35:28,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=20900.0, ans=0.0 +2024-08-03 02:35:36,030 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.423e+02 1.683e+02 2.101e+02 5.163e+02, threshold=3.365e+02, percent-clipped=3.0 +2024-08-03 02:35:45,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=20936.666666666668, ans=0.5 +2024-08-03 02:35:55,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=20973.333333333332, ans=0.0 +2024-08-03 02:36:02,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=21010.0, ans=0.125 +2024-08-03 02:36:03,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=21010.0, ans=0.006302173913043479 +2024-08-03 02:36:04,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=21010.0, ans=0.2 +2024-08-03 02:36:09,020 INFO [train.py:1114] (1/4) Epoch 2, batch 2100, loss[loss=0.3157, simple_loss=0.3748, pruned_loss=0.1283, over 13547.00 frames. ], tot_loss[loss=0.3279, simple_loss=0.3797, pruned_loss=0.138, over 2640504.50 frames. ], batch size: 37, lr: 3.93e-02, grad_scale: 32.0 +2024-08-03 02:36:15,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=21046.666666666668, ans=0.035 +2024-08-03 02:36:24,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=21083.333333333332, ans=0.2 +2024-08-03 02:36:25,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=21083.333333333332, ans=0.125 +2024-08-03 02:36:50,063 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.09 vs. limit=15.0 +2024-08-03 02:36:50,638 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:36:56,214 INFO [train.py:1114] (1/4) Epoch 2, batch 2150, loss[loss=0.3149, simple_loss=0.375, pruned_loss=0.1274, over 13552.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3777, pruned_loss=0.1367, over 2648250.27 frames. ], batch size: 36, lr: 3.93e-02, grad_scale: 16.0 +2024-08-03 02:36:58,764 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.25 vs. limit=6.0 +2024-08-03 02:37:11,076 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.69 vs. limit=15.0 +2024-08-03 02:37:17,960 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.342e+02 1.575e+02 1.914e+02 2.983e+02, threshold=3.149e+02, percent-clipped=0.0 +2024-08-03 02:37:33,693 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.55 vs. limit=22.5 +2024-08-03 02:37:38,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=21376.666666666668, ans=0.05 +2024-08-03 02:37:42,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=21376.666666666668, ans=0.125 +2024-08-03 02:37:46,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=21376.666666666668, ans=0.125 +2024-08-03 02:37:47,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=21413.333333333332, ans=0.035 +2024-08-03 02:37:48,037 INFO [train.py:1114] (1/4) Epoch 2, batch 2200, loss[loss=0.3203, simple_loss=0.375, pruned_loss=0.1328, over 13402.00 frames. ], tot_loss[loss=0.326, simple_loss=0.378, pruned_loss=0.137, over 2646499.65 frames. ], batch size: 39, lr: 3.92e-02, grad_scale: 16.0 +2024-08-03 02:37:54,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=21413.333333333332, ans=0.025 +2024-08-03 02:37:55,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=21413.333333333332, ans=0.0062144927536231884 +2024-08-03 02:37:56,269 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.12 vs. limit=15.0 +2024-08-03 02:38:17,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=21523.333333333332, ans=0.2 +2024-08-03 02:38:22,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=21523.333333333332, ans=0.0 +2024-08-03 02:38:49,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=21560.0, ans=0.0 +2024-08-03 02:38:51,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=21560.0, ans=0.0 +2024-08-03 02:39:28,809 INFO [train.py:1114] (1/4) Epoch 2, batch 2250, loss[loss=0.2903, simple_loss=0.357, pruned_loss=0.1118, over 13352.00 frames. ], tot_loss[loss=0.325, simple_loss=0.3772, pruned_loss=0.1364, over 2643730.53 frames. ], batch size: 37, lr: 3.91e-02, grad_scale: 16.0 +2024-08-03 02:39:48,001 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.70 vs. limit=12.0 +2024-08-03 02:39:56,536 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.60 vs. limit=15.0 +2024-08-03 02:39:56,674 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.84 vs. limit=15.0 +2024-08-03 02:39:57,315 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.43 vs. limit=6.0 +2024-08-03 02:40:05,990 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.129e+02 1.392e+02 1.558e+02 1.925e+02 3.298e+02, threshold=3.115e+02, percent-clipped=1.0 +2024-08-03 02:40:06,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=21670.0, ans=0.5 +2024-08-03 02:40:13,547 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.06 vs. limit=12.0 +2024-08-03 02:40:15,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21706.666666666668, ans=0.1 +2024-08-03 02:40:20,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21706.666666666668, ans=0.1 +2024-08-03 02:40:25,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=21743.333333333332, ans=0.125 +2024-08-03 02:40:34,542 INFO [train.py:1114] (1/4) Epoch 2, batch 2300, loss[loss=0.2781, simple_loss=0.3261, pruned_loss=0.115, over 13591.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3753, pruned_loss=0.1358, over 2639807.45 frames. ], batch size: 33, lr: 3.91e-02, grad_scale: 8.0 +2024-08-03 02:40:53,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=21816.666666666668, ans=0.025 +2024-08-03 02:41:04,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=21890.0, ans=0.006110869565217392 +2024-08-03 02:41:05,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=21890.0, ans=0.125 +2024-08-03 02:41:19,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=21926.666666666668, ans=0.125 +2024-08-03 02:41:19,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=21926.666666666668, ans=0.125 +2024-08-03 02:41:21,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=21926.666666666668, ans=0.05 +2024-08-03 02:41:23,902 INFO [train.py:1114] (1/4) Epoch 2, batch 2350, loss[loss=0.3301, simple_loss=0.3848, pruned_loss=0.1377, over 13554.00 frames. ], tot_loss[loss=0.322, simple_loss=0.3742, pruned_loss=0.1348, over 2642027.41 frames. ], batch size: 38, lr: 3.90e-02, grad_scale: 8.0 +2024-08-03 02:41:25,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=21963.333333333332, ans=0.125 +2024-08-03 02:41:30,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=21963.333333333332, ans=0.0 +2024-08-03 02:41:44,433 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.437e+02 1.623e+02 1.973e+02 3.440e+02, threshold=3.245e+02, percent-clipped=2.0 +2024-08-03 02:42:10,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=22110.0, ans=0.0 +2024-08-03 02:42:15,869 INFO [train.py:1114] (1/4) Epoch 2, batch 2400, loss[loss=0.2742, simple_loss=0.343, pruned_loss=0.1027, over 13532.00 frames. ], tot_loss[loss=0.323, simple_loss=0.3751, pruned_loss=0.1355, over 2643256.58 frames. ], batch size: 35, lr: 3.89e-02, grad_scale: 16.0 +2024-08-03 02:42:18,223 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.53 vs. limit=6.0 +2024-08-03 02:42:25,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=22146.666666666668, ans=0.0 +2024-08-03 02:42:29,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=22183.333333333332, ans=0.125 +2024-08-03 02:42:53,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=22256.666666666668, ans=0.025 +2024-08-03 02:43:04,907 INFO [train.py:1114] (1/4) Epoch 2, batch 2450, loss[loss=0.334, simple_loss=0.3862, pruned_loss=0.1409, over 13362.00 frames. ], tot_loss[loss=0.3253, simple_loss=0.3772, pruned_loss=0.1367, over 2633241.91 frames. ], batch size: 37, lr: 3.89e-02, grad_scale: 16.0 +2024-08-03 02:43:23,171 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.27 vs. limit=15.0 +2024-08-03 02:43:27,809 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.465e+02 1.678e+02 2.067e+02 5.260e+02, threshold=3.356e+02, percent-clipped=2.0 +2024-08-03 02:43:28,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=22403.333333333332, ans=0.025 +2024-08-03 02:43:29,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=22403.333333333332, ans=0.125 +2024-08-03 02:43:54,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=22440.0, ans=0.0 +2024-08-03 02:44:57,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=22476.666666666668, ans=0.2 +2024-08-03 02:45:11,645 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.65 vs. limit=15.0 +2024-08-03 02:45:17,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=22476.666666666668, ans=0.09899494936611666 +2024-08-03 02:45:36,903 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.49 vs. limit=15.0 +2024-08-03 02:45:36,956 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.75 vs. limit=15.0 +2024-08-03 02:45:37,295 INFO [train.py:1114] (1/4) Epoch 2, batch 2500, loss[loss=0.3354, simple_loss=0.3898, pruned_loss=0.1405, over 13405.00 frames. ], tot_loss[loss=0.3241, simple_loss=0.3763, pruned_loss=0.1359, over 2637818.14 frames. ], batch size: 39, lr: 3.88e-02, grad_scale: 8.0 +2024-08-03 02:47:09,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=22586.666666666668, ans=0.125 +2024-08-03 02:48:02,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=22586.666666666668, ans=0.125 +2024-08-03 02:48:55,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=22623.333333333332, ans=0.1 +2024-08-03 02:51:29,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.20 vs. limit=15.0 +2024-08-03 02:51:39,043 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.07 vs. limit=15.0 +2024-08-03 02:52:09,588 INFO [train.py:1114] (1/4) Epoch 2, batch 2550, loss[loss=0.2961, simple_loss=0.3454, pruned_loss=0.1234, over 13517.00 frames. ], tot_loss[loss=0.3226, simple_loss=0.3752, pruned_loss=0.135, over 2639083.20 frames. ], batch size: 31, lr: 3.88e-02, grad_scale: 8.0 +2024-08-03 02:52:16,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=22696.666666666668, ans=0.0 +2024-08-03 02:53:12,230 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.86 vs. limit=22.5 +2024-08-03 02:53:24,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=22770.0, ans=0.125 +2024-08-03 02:53:24,550 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.94 vs. limit=12.0 +2024-08-03 02:53:26,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=22770.0, ans=0.0059195652173913045 +2024-08-03 02:53:31,466 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.373e+02 1.661e+02 2.107e+02 4.285e+02, threshold=3.322e+02, percent-clipped=3.0 +2024-08-03 02:53:59,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=22770.0, ans=0.125 +2024-08-03 02:54:41,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=22843.333333333332, ans=0.005903623188405797 +2024-08-03 02:54:41,324 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.35 vs. limit=22.5 +2024-08-03 02:54:42,567 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.99 vs. limit=10.0 +2024-08-03 02:54:52,180 INFO [train.py:1114] (1/4) Epoch 2, batch 2600, loss[loss=0.3287, simple_loss=0.382, pruned_loss=0.1377, over 13554.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3761, pruned_loss=0.1354, over 2637192.27 frames. ], batch size: 36, lr: 3.87e-02, grad_scale: 8.0 +2024-08-03 02:54:52,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=22880.0, ans=0.125 +2024-08-03 02:54:55,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=22880.0, ans=0.2 +2024-08-03 02:56:03,858 INFO [train.py:1114] (1/4) Epoch 2, batch 2650, loss[loss=0.3567, simple_loss=0.4016, pruned_loss=0.1558, over 13305.00 frames. ], tot_loss[loss=0.3237, simple_loss=0.3765, pruned_loss=0.1355, over 2639209.65 frames. ], batch size: 46, lr: 3.86e-02, grad_scale: 8.0 +2024-08-03 02:56:10,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=23063.333333333332, ans=0.125 +2024-08-03 02:56:16,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=23100.0, ans=0.125 +2024-08-03 02:56:17,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=23100.0, ans=0.125 +2024-08-03 02:56:19,501 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.93 vs. limit=15.0 +2024-08-03 02:56:27,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=23136.666666666668, ans=0.2 +2024-08-03 02:56:27,904 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.143e+02 1.447e+02 1.734e+02 2.047e+02 3.463e+02, threshold=3.469e+02, percent-clipped=1.0 +2024-08-03 02:56:55,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=23173.333333333332, ans=0.005831884057971015 +2024-08-03 02:58:07,638 INFO [train.py:1114] (1/4) Epoch 2, batch 2700, loss[loss=0.2877, simple_loss=0.353, pruned_loss=0.1112, over 13543.00 frames. ], tot_loss[loss=0.3241, simple_loss=0.377, pruned_loss=0.1356, over 2636954.88 frames. ], batch size: 40, lr: 3.86e-02, grad_scale: 8.0 +2024-08-03 02:58:16,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=23246.666666666668, ans=0.125 +2024-08-03 02:58:27,754 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.35 vs. limit=15.0 +2024-08-03 02:58:37,905 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.50 vs. limit=10.0 +2024-08-03 02:59:55,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=23356.666666666668, ans=0.07 +2024-08-03 03:00:36,558 INFO [train.py:1114] (1/4) Epoch 2, batch 2750, loss[loss=0.3328, simple_loss=0.3786, pruned_loss=0.1435, over 13336.00 frames. ], tot_loss[loss=0.3217, simple_loss=0.3749, pruned_loss=0.1343, over 2635392.30 frames. ], batch size: 34, lr: 3.85e-02, grad_scale: 8.0 +2024-08-03 03:00:56,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23466.666666666668, ans=0.1 +2024-08-03 03:01:06,451 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.143e+02 1.416e+02 1.639e+02 1.960e+02 3.073e+02, threshold=3.277e+02, percent-clipped=0.0 +2024-08-03 03:01:06,982 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.82 vs. limit=22.5 +2024-08-03 03:01:12,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=23540.0, ans=0.2 +2024-08-03 03:01:15,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=23540.0, ans=0.2 +2024-08-03 03:01:18,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=23540.0, ans=15.0 +2024-08-03 03:01:20,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=23540.0, ans=0.125 +2024-08-03 03:01:38,770 INFO [train.py:1114] (1/4) Epoch 2, batch 2800, loss[loss=0.4201, simple_loss=0.4336, pruned_loss=0.2033, over 8712.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.3759, pruned_loss=0.1352, over 2626105.33 frames. ], batch size: 96, lr: 3.84e-02, grad_scale: 16.0 +2024-08-03 03:01:48,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=23613.333333333332, ans=0.125 +2024-08-03 03:01:50,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.53 vs. limit=15.0 +2024-08-03 03:01:50,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=23613.333333333332, ans=0.125 +2024-08-03 03:02:23,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=23686.666666666668, ans=0.09899494936611666 +2024-08-03 03:02:38,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=23760.0, ans=0.025 +2024-08-03 03:02:38,533 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.25 vs. limit=22.5 +2024-08-03 03:02:45,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=23760.0, ans=0.1 +2024-08-03 03:02:47,284 INFO [train.py:1114] (1/4) Epoch 2, batch 2850, loss[loss=0.3366, simple_loss=0.3921, pruned_loss=0.1406, over 13370.00 frames. ], tot_loss[loss=0.3243, simple_loss=0.3765, pruned_loss=0.136, over 2620206.59 frames. ], batch size: 35, lr: 3.84e-02, grad_scale: 16.0 +2024-08-03 03:02:48,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=23796.666666666668, ans=0.125 +2024-08-03 03:03:04,489 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.93 vs. limit=15.0 +2024-08-03 03:03:09,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=23870.0, ans=0.5 +2024-08-03 03:03:09,962 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.500e+02 1.764e+02 2.105e+02 5.677e+02, threshold=3.527e+02, percent-clipped=3.0 +2024-08-03 03:03:11,034 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:03:12,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=23870.0, ans=0.125 +2024-08-03 03:03:14,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=23870.0, ans=0.125 +2024-08-03 03:03:19,318 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:04:03,995 INFO [train.py:1114] (1/4) Epoch 2, batch 2900, loss[loss=0.2766, simple_loss=0.3375, pruned_loss=0.1078, over 13366.00 frames. ], tot_loss[loss=0.3239, simple_loss=0.377, pruned_loss=0.1354, over 2631754.72 frames. ], batch size: 36, lr: 3.83e-02, grad_scale: 16.0 +2024-08-03 03:04:15,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=23980.0, ans=0.125 +2024-08-03 03:04:40,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.41 vs. limit=10.0 +2024-08-03 03:04:52,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=24090.0, ans=0.125 +2024-08-03 03:05:06,994 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.56 vs. limit=15.0 +2024-08-03 03:05:09,683 INFO [train.py:1114] (1/4) Epoch 2, batch 2950, loss[loss=0.3138, simple_loss=0.3649, pruned_loss=0.1314, over 13337.00 frames. ], tot_loss[loss=0.3231, simple_loss=0.3758, pruned_loss=0.1352, over 2631181.10 frames. ], batch size: 34, lr: 3.82e-02, grad_scale: 8.0 +2024-08-03 03:05:11,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=24163.333333333332, ans=0.025 +2024-08-03 03:05:38,154 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.078e+02 1.432e+02 1.719e+02 2.227e+02 3.350e+02, threshold=3.438e+02, percent-clipped=0.0 +2024-08-03 03:06:07,500 INFO [train.py:1114] (1/4) Epoch 2, batch 3000, loss[loss=0.362, simple_loss=0.4135, pruned_loss=0.1552, over 13539.00 frames. ], tot_loss[loss=0.3201, simple_loss=0.3737, pruned_loss=0.1333, over 2630494.65 frames. ], batch size: 37, lr: 3.82e-02, grad_scale: 8.0 +2024-08-03 03:06:07,501 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 03:06:29,640 INFO [train.py:1146] (1/4) Epoch 2, validation: loss=0.2511, simple_loss=0.3433, pruned_loss=0.07947, over 944034.00 frames. +2024-08-03 03:06:29,640 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 03:06:32,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=24346.666666666668, ans=0.07 +2024-08-03 03:06:41,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24383.333333333332, ans=0.1 +2024-08-03 03:06:42,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=24383.333333333332, ans=0.125 +2024-08-03 03:06:56,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=24456.666666666668, ans=0.0055528985507246375 +2024-08-03 03:07:15,183 INFO [train.py:1114] (1/4) Epoch 2, batch 3050, loss[loss=0.2747, simple_loss=0.3407, pruned_loss=0.1044, over 13535.00 frames. ], tot_loss[loss=0.3202, simple_loss=0.3738, pruned_loss=0.1333, over 2626982.77 frames. ], batch size: 35, lr: 3.81e-02, grad_scale: 8.0 +2024-08-03 03:07:19,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=24530.0, ans=0.125 +2024-08-03 03:07:21,049 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.62 vs. limit=12.0 +2024-08-03 03:07:27,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24566.666666666668, ans=0.1 +2024-08-03 03:07:33,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=24603.333333333332, ans=0.125 +2024-08-03 03:07:37,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=24603.333333333332, ans=0.005521014492753624 +2024-08-03 03:07:37,908 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.153e+02 1.349e+02 1.521e+02 1.830e+02 3.051e+02, threshold=3.043e+02, percent-clipped=0.0 +2024-08-03 03:07:52,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=24640.0, ans=0.125 +2024-08-03 03:07:53,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=24640.0, ans=0.125 +2024-08-03 03:08:04,763 INFO [train.py:1114] (1/4) Epoch 2, batch 3100, loss[loss=0.3245, simple_loss=0.3783, pruned_loss=0.1353, over 13273.00 frames. ], tot_loss[loss=0.3195, simple_loss=0.3732, pruned_loss=0.1329, over 2627434.23 frames. ], batch size: 46, lr: 3.81e-02, grad_scale: 8.0 +2024-08-03 03:08:26,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=24750.0, ans=10.0 +2024-08-03 03:08:48,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.36 vs. limit=15.0 +2024-08-03 03:09:07,676 INFO [train.py:1114] (1/4) Epoch 2, batch 3150, loss[loss=0.338, simple_loss=0.392, pruned_loss=0.142, over 13114.00 frames. ], tot_loss[loss=0.3192, simple_loss=0.3731, pruned_loss=0.1327, over 2628721.00 frames. ], batch size: 48, lr: 3.80e-02, grad_scale: 8.0 +2024-08-03 03:09:15,889 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:09:29,251 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.526e+02 1.877e+02 2.299e+02 4.480e+02, threshold=3.753e+02, percent-clipped=6.0 +2024-08-03 03:09:52,206 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.08 vs. limit=15.0 +2024-08-03 03:09:53,508 INFO [train.py:1114] (1/4) Epoch 2, batch 3200, loss[loss=0.2759, simple_loss=0.3452, pruned_loss=0.1033, over 13558.00 frames. ], tot_loss[loss=0.3187, simple_loss=0.3724, pruned_loss=0.1325, over 2634638.57 frames. ], batch size: 37, lr: 3.79e-02, grad_scale: 16.0 +2024-08-03 03:10:06,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=25080.0, ans=0.0 +2024-08-03 03:10:09,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=25116.666666666668, ans=0.07 +2024-08-03 03:10:13,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=25116.666666666668, ans=0.2 +2024-08-03 03:10:17,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=25153.333333333332, ans=0.1 +2024-08-03 03:10:28,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=25190.0, ans=0.0 +2024-08-03 03:10:28,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=25190.0, ans=0.2 +2024-08-03 03:10:46,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=25226.666666666668, ans=0.2 +2024-08-03 03:10:52,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=25226.666666666668, ans=0.125 +2024-08-03 03:10:56,396 INFO [train.py:1114] (1/4) Epoch 2, batch 3250, loss[loss=0.3107, simple_loss=0.3683, pruned_loss=0.1266, over 13396.00 frames. ], tot_loss[loss=0.3188, simple_loss=0.3729, pruned_loss=0.1324, over 2638935.68 frames. ], batch size: 38, lr: 3.79e-02, grad_scale: 16.0 +2024-08-03 03:11:09,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=25263.333333333332, ans=0.125 +2024-08-03 03:11:12,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=25263.333333333332, ans=0.04949747468305833 +2024-08-03 03:11:13,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=25263.333333333332, ans=0.005377536231884058 +2024-08-03 03:11:20,996 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.30 vs. limit=15.0 +2024-08-03 03:11:32,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=25336.666666666668, ans=0.125 +2024-08-03 03:11:34,445 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.086e+02 1.442e+02 1.550e+02 1.764e+02 2.865e+02, threshold=3.101e+02, percent-clipped=0.0 +2024-08-03 03:11:49,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=25336.666666666668, ans=0.125 +2024-08-03 03:11:51,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=25373.333333333332, ans=0.0 +2024-08-03 03:12:02,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=25410.0, ans=0.125 +2024-08-03 03:12:05,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=25410.0, ans=0.025 +2024-08-03 03:12:07,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=25410.0, ans=0.125 +2024-08-03 03:12:09,624 INFO [train.py:1114] (1/4) Epoch 2, batch 3300, loss[loss=0.3784, simple_loss=0.4139, pruned_loss=0.1715, over 12884.00 frames. ], tot_loss[loss=0.3177, simple_loss=0.3718, pruned_loss=0.1318, over 2639314.84 frames. ], batch size: 52, lr: 3.78e-02, grad_scale: 16.0 +2024-08-03 03:12:19,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=25483.333333333332, ans=0.125 +2024-08-03 03:12:19,551 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.80 vs. limit=10.0 +2024-08-03 03:12:22,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=25483.333333333332, ans=0.0 +2024-08-03 03:12:23,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.20 vs. limit=15.0 +2024-08-03 03:12:30,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=25520.0, ans=0.07 +2024-08-03 03:12:37,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=25556.666666666668, ans=0.125 +2024-08-03 03:12:53,076 INFO [train.py:1114] (1/4) Epoch 2, batch 3350, loss[loss=0.3393, simple_loss=0.3942, pruned_loss=0.1422, over 13016.00 frames. ], tot_loss[loss=0.3194, simple_loss=0.3733, pruned_loss=0.1328, over 2627901.49 frames. ], batch size: 48, lr: 3.77e-02, grad_scale: 16.0 +2024-08-03 03:12:59,502 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.83 vs. limit=15.0 +2024-08-03 03:13:01,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=25666.666666666668, ans=0.125 +2024-08-03 03:13:14,122 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.415e+02 1.657e+02 2.011e+02 3.247e+02, threshold=3.315e+02, percent-clipped=1.0 +2024-08-03 03:13:21,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=25740.0, ans=0.035 +2024-08-03 03:13:23,985 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.59 vs. limit=6.0 +2024-08-03 03:13:25,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=25740.0, ans=0.125 +2024-08-03 03:13:33,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=25776.666666666668, ans=0.125 +2024-08-03 03:13:37,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=25776.666666666668, ans=0.2 +2024-08-03 03:13:39,081 INFO [train.py:1114] (1/4) Epoch 2, batch 3400, loss[loss=0.2569, simple_loss=0.3203, pruned_loss=0.09673, over 13528.00 frames. ], tot_loss[loss=0.3192, simple_loss=0.373, pruned_loss=0.1327, over 2623487.54 frames. ], batch size: 31, lr: 3.77e-02, grad_scale: 16.0 +2024-08-03 03:13:39,726 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.61 vs. limit=15.0 +2024-08-03 03:13:58,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=25886.666666666668, ans=0.2 +2024-08-03 03:13:59,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=25886.666666666668, ans=0.0 +2024-08-03 03:14:17,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=25960.0, ans=0.025 +2024-08-03 03:14:23,920 INFO [train.py:1114] (1/4) Epoch 2, batch 3450, loss[loss=0.3232, simple_loss=0.3815, pruned_loss=0.1324, over 12958.00 frames. ], tot_loss[loss=0.3179, simple_loss=0.3722, pruned_loss=0.1318, over 2627785.89 frames. ], batch size: 52, lr: 3.76e-02, grad_scale: 16.0 +2024-08-03 03:14:27,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=25996.666666666668, ans=0.0 +2024-08-03 03:14:35,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=26033.333333333332, ans=0.125 +2024-08-03 03:14:42,856 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.19 vs. limit=15.0 +2024-08-03 03:14:44,183 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.439e+02 1.568e+02 1.776e+02 4.751e+02, threshold=3.136e+02, percent-clipped=2.0 +2024-08-03 03:14:50,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=26106.666666666668, ans=0.0051942028985507245 +2024-08-03 03:15:06,936 INFO [train.py:1114] (1/4) Epoch 2, batch 3500, loss[loss=0.2857, simple_loss=0.3428, pruned_loss=0.1143, over 13530.00 frames. ], tot_loss[loss=0.3174, simple_loss=0.3712, pruned_loss=0.1318, over 2628599.72 frames. ], batch size: 34, lr: 3.76e-02, grad_scale: 16.0 +2024-08-03 03:15:32,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=26290.0, ans=0.0 +2024-08-03 03:15:45,261 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.15 vs. limit=15.0 +2024-08-03 03:15:47,746 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.92 vs. limit=15.0 +2024-08-03 03:15:50,826 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.95 vs. limit=15.0 +2024-08-03 03:15:52,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=26363.333333333332, ans=0.025 +2024-08-03 03:15:53,621 INFO [train.py:1114] (1/4) Epoch 2, batch 3550, loss[loss=0.3327, simple_loss=0.3877, pruned_loss=0.1388, over 12374.00 frames. ], tot_loss[loss=0.3202, simple_loss=0.374, pruned_loss=0.1332, over 2627527.55 frames. ], batch size: 58, lr: 3.75e-02, grad_scale: 16.0 +2024-08-03 03:15:57,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=26363.333333333332, ans=0.2 +2024-08-03 03:16:00,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=26363.333333333332, ans=0.1 +2024-08-03 03:16:11,355 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.79 vs. limit=15.0 +2024-08-03 03:16:14,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=26436.666666666668, ans=0.2 +2024-08-03 03:16:15,275 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.421e+02 1.616e+02 2.006e+02 3.426e+02, threshold=3.231e+02, percent-clipped=2.0 +2024-08-03 03:16:39,548 INFO [train.py:1114] (1/4) Epoch 2, batch 3600, loss[loss=0.3576, simple_loss=0.3922, pruned_loss=0.1615, over 8751.00 frames. ], tot_loss[loss=0.3299, simple_loss=0.3801, pruned_loss=0.1399, over 2487604.82 frames. ], batch size: 96, lr: 3.74e-02, grad_scale: 32.0 +2024-08-03 03:17:55,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=26693.333333333332, ans=0.07 +2024-08-03 03:18:02,777 INFO [train.py:1114] (1/4) Epoch 3, batch 0, loss[loss=0.2785, simple_loss=0.3417, pruned_loss=0.1077, over 13328.00 frames. ], tot_loss[loss=0.2785, simple_loss=0.3417, pruned_loss=0.1077, over 13328.00 frames. ], batch size: 33, lr: 3.55e-02, grad_scale: 32.0 +2024-08-03 03:18:02,777 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 03:18:12,653 INFO [train.py:1146] (1/4) Epoch 3, validation: loss=0.2631, simple_loss=0.3546, pruned_loss=0.08577, over 944034.00 frames. +2024-08-03 03:18:12,654 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 03:18:14,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=26693.333333333332, ans=0.125 +2024-08-03 03:18:28,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=26730.0, ans=0.125 +2024-08-03 03:18:32,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=26766.666666666668, ans=0.025 +2024-08-03 03:18:33,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.59 vs. limit=22.5 +2024-08-03 03:18:45,700 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.401e+02 1.625e+02 1.929e+02 3.724e+02, threshold=3.249e+02, percent-clipped=3.0 +2024-08-03 03:18:59,362 INFO [train.py:1114] (1/4) Epoch 3, batch 50, loss[loss=0.2906, simple_loss=0.34, pruned_loss=0.1206, over 13422.00 frames. ], tot_loss[loss=0.3259, simple_loss=0.3794, pruned_loss=0.1362, over 578420.55 frames. ], batch size: 32, lr: 3.55e-02, grad_scale: 32.0 +2024-08-03 03:19:13,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=26913.333333333332, ans=0.125 +2024-08-03 03:19:36,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.77 vs. limit=15.0 +2024-08-03 03:19:48,778 INFO [train.py:1114] (1/4) Epoch 3, batch 100, loss[loss=0.3209, simple_loss=0.3671, pruned_loss=0.1373, over 13524.00 frames. ], tot_loss[loss=0.3243, simple_loss=0.3782, pruned_loss=0.1351, over 1025957.04 frames. ], batch size: 35, lr: 3.54e-02, grad_scale: 32.0 +2024-08-03 03:20:01,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=27096.666666666668, ans=0.125 +2024-08-03 03:20:07,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=27133.333333333332, ans=0.125 +2024-08-03 03:20:14,997 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.53 vs. limit=15.0 +2024-08-03 03:20:21,028 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.439e+02 1.724e+02 2.172e+02 3.862e+02, threshold=3.447e+02, percent-clipped=4.0 +2024-08-03 03:20:57,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=27206.666666666668, ans=0.0 +2024-08-03 03:21:00,115 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.96 vs. limit=15.0 +2024-08-03 03:21:06,901 INFO [train.py:1114] (1/4) Epoch 3, batch 150, loss[loss=0.2711, simple_loss=0.3248, pruned_loss=0.1087, over 13428.00 frames. ], tot_loss[loss=0.3162, simple_loss=0.3715, pruned_loss=0.1304, over 1387036.43 frames. ], batch size: 32, lr: 3.53e-02, grad_scale: 32.0 +2024-08-03 03:21:10,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=27243.333333333332, ans=0.0 +2024-08-03 03:21:10,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=27243.333333333332, ans=0.125 +2024-08-03 03:21:13,609 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.59 vs. limit=15.0 +2024-08-03 03:21:20,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=27280.0, ans=0.2 +2024-08-03 03:21:38,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=27353.333333333332, ans=0.0 +2024-08-03 03:22:02,141 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.83 vs. limit=22.5 +2024-08-03 03:22:07,956 INFO [train.py:1114] (1/4) Epoch 3, batch 200, loss[loss=0.314, simple_loss=0.38, pruned_loss=0.124, over 12626.00 frames. ], tot_loss[loss=0.3134, simple_loss=0.3693, pruned_loss=0.1287, over 1666190.13 frames. ], batch size: 58, lr: 3.53e-02, grad_scale: 16.0 +2024-08-03 03:22:16,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27463.333333333332, ans=0.1 +2024-08-03 03:22:23,745 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.79 vs. limit=10.0 +2024-08-03 03:22:24,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=27463.333333333332, ans=0.125 +2024-08-03 03:22:30,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=27500.0, ans=0.0 +2024-08-03 03:22:31,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=27500.0, ans=0.125 +2024-08-03 03:22:37,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27536.666666666668, ans=0.1 +2024-08-03 03:22:41,087 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.338e+02 1.522e+02 1.755e+02 2.817e+02, threshold=3.045e+02, percent-clipped=0.0 +2024-08-03 03:22:48,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=27573.333333333332, ans=0.125 +2024-08-03 03:22:52,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27573.333333333332, ans=0.1 +2024-08-03 03:22:53,962 INFO [train.py:1114] (1/4) Epoch 3, batch 250, loss[loss=0.3022, simple_loss=0.3689, pruned_loss=0.1178, over 13328.00 frames. ], tot_loss[loss=0.313, simple_loss=0.369, pruned_loss=0.1285, over 1884389.62 frames. ], batch size: 46, lr: 3.52e-02, grad_scale: 16.0 +2024-08-03 03:22:58,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27610.0, ans=0.1 +2024-08-03 03:23:01,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=27610.0, ans=0.125 +2024-08-03 03:23:03,942 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.77 vs. limit=15.0 +2024-08-03 03:23:13,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27683.333333333332, ans=0.1 +2024-08-03 03:23:48,977 INFO [train.py:1114] (1/4) Epoch 3, batch 300, loss[loss=0.3127, simple_loss=0.37, pruned_loss=0.1277, over 13431.00 frames. ], tot_loss[loss=0.3113, simple_loss=0.3677, pruned_loss=0.1275, over 2051562.68 frames. ], batch size: 42, lr: 3.52e-02, grad_scale: 16.0 +2024-08-03 03:23:51,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=27793.333333333332, ans=0.125 +2024-08-03 03:24:08,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=27830.0, ans=0.0 +2024-08-03 03:24:14,308 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:24:19,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=27903.333333333332, ans=0.125 +2024-08-03 03:24:24,866 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.466e+02 1.718e+02 2.215e+02 5.480e+02, threshold=3.437e+02, percent-clipped=5.0 +2024-08-03 03:24:38,884 INFO [train.py:1114] (1/4) Epoch 3, batch 350, loss[loss=0.2952, simple_loss=0.3399, pruned_loss=0.1253, over 13593.00 frames. ], tot_loss[loss=0.3126, simple_loss=0.369, pruned_loss=0.1281, over 2181730.68 frames. ], batch size: 33, lr: 3.51e-02, grad_scale: 16.0 +2024-08-03 03:24:39,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=27976.666666666668, ans=0.125 +2024-08-03 03:24:41,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=27976.666666666668, ans=0.0 +2024-08-03 03:24:54,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=28013.333333333332, ans=0.0047797101449275365 +2024-08-03 03:25:04,373 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.41 vs. limit=22.5 +2024-08-03 03:25:04,474 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.03 vs. limit=10.0 +2024-08-03 03:25:05,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=28050.0, ans=0.0 +2024-08-03 03:25:11,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=28086.666666666668, ans=0.125 +2024-08-03 03:25:14,572 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.44 vs. limit=15.0 +2024-08-03 03:25:17,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=28123.333333333332, ans=0.125 +2024-08-03 03:25:25,235 INFO [train.py:1114] (1/4) Epoch 3, batch 400, loss[loss=0.3627, simple_loss=0.4075, pruned_loss=0.1589, over 13360.00 frames. ], tot_loss[loss=0.3117, simple_loss=0.3681, pruned_loss=0.1277, over 2286018.70 frames. ], batch size: 37, lr: 3.50e-02, grad_scale: 32.0 +2024-08-03 03:25:36,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=28196.666666666668, ans=0.0 +2024-08-03 03:25:38,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=28196.666666666668, ans=0.125 +2024-08-03 03:25:40,111 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.12 vs. limit=15.0 +2024-08-03 03:25:52,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=28233.333333333332, ans=0.125 +2024-08-03 03:25:52,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=28233.333333333332, ans=0.0 +2024-08-03 03:25:58,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=28233.333333333332, ans=0.025 +2024-08-03 03:26:05,597 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.402e+02 1.627e+02 1.926e+02 3.907e+02, threshold=3.254e+02, percent-clipped=1.0 +2024-08-03 03:26:09,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=28306.666666666668, ans=0.0 +2024-08-03 03:26:10,109 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.33 vs. limit=15.0 +2024-08-03 03:26:18,597 INFO [train.py:1114] (1/4) Epoch 3, batch 450, loss[loss=0.3153, simple_loss=0.3757, pruned_loss=0.1275, over 13557.00 frames. ], tot_loss[loss=0.309, simple_loss=0.3662, pruned_loss=0.1259, over 2359813.64 frames. ], batch size: 38, lr: 3.50e-02, grad_scale: 32.0 +2024-08-03 03:26:36,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=28380.0, ans=0.125 +2024-08-03 03:26:54,669 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.70 vs. limit=10.0 +2024-08-03 03:27:01,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=28490.0, ans=0.125 +2024-08-03 03:27:03,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=28490.0, ans=0.125 +2024-08-03 03:27:06,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=28490.0, ans=0.0 +2024-08-03 03:27:11,500 INFO [train.py:1114] (1/4) Epoch 3, batch 500, loss[loss=0.3342, simple_loss=0.3854, pruned_loss=0.1415, over 13441.00 frames. ], tot_loss[loss=0.3076, simple_loss=0.3646, pruned_loss=0.1253, over 2425204.06 frames. ], batch size: 43, lr: 3.49e-02, grad_scale: 32.0 +2024-08-03 03:27:20,399 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.82 vs. limit=15.0 +2024-08-03 03:27:33,967 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:27:39,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=28636.666666666668, ans=0.004644202898550724 +2024-08-03 03:27:47,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=28636.666666666668, ans=0.004644202898550724 +2024-08-03 03:27:48,040 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.466e+02 1.735e+02 2.174e+02 5.837e+02, threshold=3.470e+02, percent-clipped=2.0 +2024-08-03 03:27:50,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=28673.333333333332, ans=0.004636231884057971 +2024-08-03 03:27:53,873 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.81 vs. limit=15.0 +2024-08-03 03:27:55,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=28673.333333333332, ans=0.1 +2024-08-03 03:27:59,878 INFO [train.py:1114] (1/4) Epoch 3, batch 550, loss[loss=0.3572, simple_loss=0.4067, pruned_loss=0.1538, over 13101.00 frames. ], tot_loss[loss=0.3085, simple_loss=0.3652, pruned_loss=0.1259, over 2466556.64 frames. ], batch size: 48, lr: 3.49e-02, grad_scale: 16.0 +2024-08-03 03:28:02,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=28710.0, ans=0.1 +2024-08-03 03:28:10,702 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.43 vs. limit=15.0 +2024-08-03 03:28:16,440 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.48 vs. limit=15.0 +2024-08-03 03:28:38,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=28856.666666666668, ans=0.125 +2024-08-03 03:28:46,579 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=5.260e-03 +2024-08-03 03:28:47,004 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.79 vs. limit=15.0 +2024-08-03 03:28:47,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_na.min_abs, batch_count=28893.333333333332, ans=0.02 +2024-08-03 03:28:48,243 INFO [train.py:1114] (1/4) Epoch 3, batch 600, loss[loss=0.3059, simple_loss=0.3745, pruned_loss=0.1187, over 13324.00 frames. ], tot_loss[loss=0.3075, simple_loss=0.3647, pruned_loss=0.1251, over 2505046.12 frames. ], batch size: 46, lr: 3.48e-02, grad_scale: 16.0 +2024-08-03 03:29:21,219 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.080e+02 1.381e+02 1.525e+02 1.783e+02 3.115e+02, threshold=3.051e+02, percent-clipped=0.0 +2024-08-03 03:29:25,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=29040.0, ans=0.125 +2024-08-03 03:29:37,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=29040.0, ans=0.004556521739130435 +2024-08-03 03:29:39,454 INFO [train.py:1114] (1/4) Epoch 3, batch 650, loss[loss=0.2838, simple_loss=0.3478, pruned_loss=0.1099, over 13547.00 frames. ], tot_loss[loss=0.305, simple_loss=0.3629, pruned_loss=0.1236, over 2540990.62 frames. ], batch size: 37, lr: 3.48e-02, grad_scale: 16.0 +2024-08-03 03:29:44,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=29076.666666666668, ans=0.0 +2024-08-03 03:29:49,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=29113.333333333332, ans=0.0045405797101449276 +2024-08-03 03:29:50,326 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.42 vs. limit=15.0 +2024-08-03 03:29:54,881 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.63 vs. limit=15.0 +2024-08-03 03:30:03,372 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.55 vs. limit=22.5 +2024-08-03 03:30:04,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=29150.0, ans=0.004532608695652174 +2024-08-03 03:30:06,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=29186.666666666668, ans=0.125 +2024-08-03 03:30:08,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=29186.666666666668, ans=0.5 +2024-08-03 03:30:14,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.21 vs. limit=6.0 +2024-08-03 03:30:17,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=29223.333333333332, ans=0.125 +2024-08-03 03:30:22,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=29223.333333333332, ans=0.125 +2024-08-03 03:30:23,064 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.84 vs. limit=15.0 +2024-08-03 03:30:26,321 INFO [train.py:1114] (1/4) Epoch 3, batch 700, loss[loss=0.3051, simple_loss=0.3582, pruned_loss=0.126, over 13525.00 frames. ], tot_loss[loss=0.3061, simple_loss=0.3638, pruned_loss=0.1242, over 2563713.44 frames. ], batch size: 35, lr: 3.47e-02, grad_scale: 16.0 +2024-08-03 03:30:27,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=29260.0, ans=0.0 +2024-08-03 03:31:20,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=29333.333333333332, ans=0.125 +2024-08-03 03:31:23,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29370.0, ans=0.1 +2024-08-03 03:31:25,758 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.69 vs. limit=22.5 +2024-08-03 03:31:29,956 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.985e+01 1.488e+02 1.744e+02 2.083e+02 3.353e+02, threshold=3.487e+02, percent-clipped=2.0 +2024-08-03 03:31:33,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=29406.666666666668, ans=0.125 +2024-08-03 03:31:41,806 INFO [train.py:1114] (1/4) Epoch 3, batch 750, loss[loss=0.3317, simple_loss=0.387, pruned_loss=0.1382, over 13367.00 frames. ], tot_loss[loss=0.3058, simple_loss=0.3636, pruned_loss=0.124, over 2581904.58 frames. ], batch size: 37, lr: 3.46e-02, grad_scale: 16.0 +2024-08-03 03:36:16,519 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.510e-03 +2024-08-03 03:36:27,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=29480.0, ans=0.2 +2024-08-03 03:37:41,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29480.0, ans=0.1 +2024-08-03 03:38:26,553 INFO [train.py:1114] (1/4) Epoch 3, batch 800, loss[loss=0.3033, simple_loss=0.3591, pruned_loss=0.1238, over 13344.00 frames. ], tot_loss[loss=0.3042, simple_loss=0.3622, pruned_loss=0.1231, over 2596327.12 frames. ], batch size: 33, lr: 3.46e-02, grad_scale: 32.0 +2024-08-03 03:38:26,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29626.666666666668, ans=0.1 +2024-08-03 03:38:32,170 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:38:41,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=29663.333333333332, ans=0.125 +2024-08-03 03:38:43,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=29663.333333333332, ans=0.2 +2024-08-03 03:38:51,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=29700.0, ans=0.125 +2024-08-03 03:39:00,213 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.412e+02 1.629e+02 2.089e+02 3.471e+02, threshold=3.259e+02, percent-clipped=0.0 +2024-08-03 03:39:06,139 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:39:12,373 INFO [train.py:1114] (1/4) Epoch 3, batch 850, loss[loss=0.3419, simple_loss=0.4008, pruned_loss=0.1415, over 13308.00 frames. ], tot_loss[loss=0.3045, simple_loss=0.3624, pruned_loss=0.1233, over 2609111.87 frames. ], batch size: 40, lr: 3.45e-02, grad_scale: 32.0 +2024-08-03 03:39:15,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=29810.0, ans=0.1 +2024-08-03 03:39:15,395 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:39:19,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=29810.0, ans=0.125 +2024-08-03 03:39:19,432 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.36 vs. limit=12.0 +2024-08-03 03:39:34,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=29883.333333333332, ans=0.2 +2024-08-03 03:39:38,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=29883.333333333332, ans=0.125 +2024-08-03 03:39:50,145 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.48 vs. limit=15.0 +2024-08-03 03:40:04,389 INFO [train.py:1114] (1/4) Epoch 3, batch 900, loss[loss=0.2866, simple_loss=0.3359, pruned_loss=0.1186, over 13355.00 frames. ], tot_loss[loss=0.3047, simple_loss=0.3627, pruned_loss=0.1233, over 2611326.43 frames. ], batch size: 33, lr: 3.45e-02, grad_scale: 32.0 +2024-08-03 03:40:04,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=29993.333333333332, ans=0.0 +2024-08-03 03:40:09,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=29993.333333333332, ans=0.0 +2024-08-03 03:40:13,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=30030.0, ans=0.1 +2024-08-03 03:40:27,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=30066.666666666668, ans=0.125 +2024-08-03 03:40:28,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=30066.666666666668, ans=0.0 +2024-08-03 03:40:29,040 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.87 vs. limit=15.0 +2024-08-03 03:40:39,772 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.109e+02 1.370e+02 1.651e+02 2.028e+02 4.342e+02, threshold=3.303e+02, percent-clipped=4.0 +2024-08-03 03:40:51,072 INFO [train.py:1114] (1/4) Epoch 3, batch 950, loss[loss=0.2661, simple_loss=0.3332, pruned_loss=0.09944, over 13531.00 frames. ], tot_loss[loss=0.304, simple_loss=0.3623, pruned_loss=0.1228, over 2612483.49 frames. ], batch size: 34, lr: 3.44e-02, grad_scale: 16.0 +2024-08-03 03:40:53,511 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=2.84 vs. limit=15.0 +2024-08-03 03:41:12,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=30250.0, ans=0.004293478260869565 +2024-08-03 03:41:15,412 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.98 vs. limit=15.0 +2024-08-03 03:41:40,358 INFO [train.py:1114] (1/4) Epoch 3, batch 1000, loss[loss=0.3129, simple_loss=0.3721, pruned_loss=0.1269, over 13365.00 frames. ], tot_loss[loss=0.3057, simple_loss=0.3637, pruned_loss=0.1239, over 2610610.26 frames. ], batch size: 35, lr: 3.44e-02, grad_scale: 16.0 +2024-08-03 03:41:55,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=30396.666666666668, ans=0.1 +2024-08-03 03:42:15,780 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.115e+02 1.383e+02 1.605e+02 2.126e+02 5.573e+02, threshold=3.210e+02, percent-clipped=1.0 +2024-08-03 03:42:27,492 INFO [train.py:1114] (1/4) Epoch 3, batch 1050, loss[loss=0.3021, simple_loss=0.3769, pruned_loss=0.1136, over 13577.00 frames. ], tot_loss[loss=0.305, simple_loss=0.3629, pruned_loss=0.1236, over 2614341.94 frames. ], batch size: 39, lr: 3.43e-02, grad_scale: 16.0 +2024-08-03 03:42:27,771 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:42:36,769 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:42:36,967 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.98 vs. limit=12.0 +2024-08-03 03:42:49,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=30616.666666666668, ans=0.1 +2024-08-03 03:43:07,732 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=20.46 vs. limit=15.0 +2024-08-03 03:43:16,388 INFO [train.py:1114] (1/4) Epoch 3, batch 1100, loss[loss=0.3093, simple_loss=0.3628, pruned_loss=0.1279, over 13562.00 frames. ], tot_loss[loss=0.3058, simple_loss=0.3635, pruned_loss=0.124, over 2619263.93 frames. ], batch size: 36, lr: 3.42e-02, grad_scale: 16.0 +2024-08-03 03:43:17,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=30726.666666666668, ans=0.0 +2024-08-03 03:43:29,389 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.59 vs. limit=22.5 +2024-08-03 03:43:42,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=30800.0, ans=0.125 +2024-08-03 03:43:50,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=30836.666666666668, ans=22.5 +2024-08-03 03:43:53,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=30836.666666666668, ans=0.125 +2024-08-03 03:43:55,895 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.095e+02 1.319e+02 1.473e+02 1.688e+02 2.812e+02, threshold=2.945e+02, percent-clipped=0.0 +2024-08-03 03:44:04,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=30873.333333333332, ans=0.125 +2024-08-03 03:44:07,121 INFO [train.py:1114] (1/4) Epoch 3, batch 1150, loss[loss=0.3155, simple_loss=0.3642, pruned_loss=0.1334, over 13582.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.3624, pruned_loss=0.1234, over 2618235.41 frames. ], batch size: 36, lr: 3.42e-02, grad_scale: 16.0 +2024-08-03 03:44:12,900 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:44:14,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=30910.0, ans=0.125 +2024-08-03 03:44:20,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=30946.666666666668, ans=0.1 +2024-08-03 03:44:27,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=30983.333333333332, ans=0.1 +2024-08-03 03:45:00,366 INFO [train.py:1114] (1/4) Epoch 3, batch 1200, loss[loss=0.3314, simple_loss=0.4015, pruned_loss=0.1307, over 13574.00 frames. ], tot_loss[loss=0.3057, simple_loss=0.3636, pruned_loss=0.1239, over 2615961.84 frames. ], batch size: 39, lr: 3.41e-02, grad_scale: 32.0 +2024-08-03 03:45:24,717 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:45:24,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=31166.666666666668, ans=0.0 +2024-08-03 03:45:37,479 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.033e+02 1.400e+02 1.587e+02 2.006e+02 3.916e+02, threshold=3.173e+02, percent-clipped=5.0 +2024-08-03 03:45:48,704 INFO [train.py:1114] (1/4) Epoch 3, batch 1250, loss[loss=0.3397, simple_loss=0.4, pruned_loss=0.1397, over 13417.00 frames. ], tot_loss[loss=0.3065, simple_loss=0.3646, pruned_loss=0.1242, over 2628427.33 frames. ], batch size: 42, lr: 3.41e-02, grad_scale: 32.0 +2024-08-03 03:45:53,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=31276.666666666668, ans=0.04949747468305833 +2024-08-03 03:45:56,556 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.04 vs. limit=15.0 +2024-08-03 03:45:58,659 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.42 vs. limit=15.0 +2024-08-03 03:46:00,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.82 vs. limit=5.0 +2024-08-03 03:46:00,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=31313.333333333332, ans=0.125 +2024-08-03 03:46:01,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=31313.333333333332, ans=0.125 +2024-08-03 03:46:09,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=31350.0, ans=0.0 +2024-08-03 03:46:32,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=31423.333333333332, ans=0.125 +2024-08-03 03:46:34,553 INFO [train.py:1114] (1/4) Epoch 3, batch 1300, loss[loss=0.2944, simple_loss=0.3609, pruned_loss=0.114, over 12924.00 frames. ], tot_loss[loss=0.3043, simple_loss=0.3627, pruned_loss=0.123, over 2631192.76 frames. ], batch size: 52, lr: 3.40e-02, grad_scale: 16.0 +2024-08-03 03:46:34,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=31460.0, ans=0.125 +2024-08-03 03:46:37,017 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.98 vs. limit=15.0 +2024-08-03 03:46:44,382 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.69 vs. limit=6.0 +2024-08-03 03:46:58,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=31533.333333333332, ans=0.025 +2024-08-03 03:47:04,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=31570.0, ans=0.125 +2024-08-03 03:47:14,531 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.368e+02 1.596e+02 1.808e+02 3.073e+02, threshold=3.191e+02, percent-clipped=0.0 +2024-08-03 03:47:15,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=31606.666666666668, ans=0.0 +2024-08-03 03:47:16,626 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.40 vs. limit=22.5 +2024-08-03 03:47:18,002 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:47:25,302 INFO [train.py:1114] (1/4) Epoch 3, batch 1350, loss[loss=0.2789, simple_loss=0.3495, pruned_loss=0.1041, over 13541.00 frames. ], tot_loss[loss=0.3031, simple_loss=0.3615, pruned_loss=0.1223, over 2638394.80 frames. ], batch size: 37, lr: 3.40e-02, grad_scale: 16.0 +2024-08-03 03:47:39,524 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.21 vs. limit=22.5 +2024-08-03 03:48:11,337 INFO [train.py:1114] (1/4) Epoch 3, batch 1400, loss[loss=0.2393, simple_loss=0.2983, pruned_loss=0.0901, over 13260.00 frames. ], tot_loss[loss=0.3026, simple_loss=0.3612, pruned_loss=0.122, over 2641979.91 frames. ], batch size: 31, lr: 3.39e-02, grad_scale: 8.0 +2024-08-03 03:48:12,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=31826.666666666668, ans=0.003950724637681159 +2024-08-03 03:48:16,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=31826.666666666668, ans=0.125 +2024-08-03 03:48:16,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=31826.666666666668, ans=0.025 +2024-08-03 03:48:27,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=31863.333333333332, ans=0.125 +2024-08-03 03:48:42,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31936.666666666668, ans=0.1 +2024-08-03 03:48:48,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31936.666666666668, ans=0.1 +2024-08-03 03:48:50,659 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.406e+02 1.620e+02 1.890e+02 3.890e+02, threshold=3.239e+02, percent-clipped=2.0 +2024-08-03 03:49:26,504 INFO [train.py:1114] (1/4) Epoch 3, batch 1450, loss[loss=0.3178, simple_loss=0.376, pruned_loss=0.1298, over 13390.00 frames. ], tot_loss[loss=0.3037, simple_loss=0.3623, pruned_loss=0.1225, over 2641587.51 frames. ], batch size: 43, lr: 3.38e-02, grad_scale: 8.0 +2024-08-03 03:49:43,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=32010.0, ans=0.125 +2024-08-03 03:49:44,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=32010.0, ans=0.125 +2024-08-03 03:52:44,898 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.82 vs. limit=22.5 +2024-08-03 03:52:48,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=32156.666666666668, ans=0.5 +2024-08-03 03:52:50,970 INFO [train.py:1114] (1/4) Epoch 3, batch 1500, loss[loss=0.3313, simple_loss=0.3908, pruned_loss=0.1359, over 13390.00 frames. ], tot_loss[loss=0.3032, simple_loss=0.3621, pruned_loss=0.1221, over 2641591.10 frames. ], batch size: 39, lr: 3.38e-02, grad_scale: 8.0 +2024-08-03 03:52:54,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=32193.333333333332, ans=0.125 +2024-08-03 03:53:05,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32230.0, ans=0.1 +2024-08-03 03:53:08,792 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.21 vs. limit=22.5 +2024-08-03 03:53:10,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=32266.666666666668, ans=0.0 +2024-08-03 03:53:28,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=32303.333333333332, ans=0.09899494936611666 +2024-08-03 03:53:29,873 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.393e+02 1.602e+02 2.004e+02 4.084e+02, threshold=3.204e+02, percent-clipped=1.0 +2024-08-03 03:53:37,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=32340.0, ans=0.125 +2024-08-03 03:53:37,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=32340.0, ans=0.0 +2024-08-03 03:53:38,882 INFO [train.py:1114] (1/4) Epoch 3, batch 1550, loss[loss=0.3102, simple_loss=0.3688, pruned_loss=0.1258, over 13373.00 frames. ], tot_loss[loss=0.3041, simple_loss=0.3623, pruned_loss=0.1229, over 2631403.53 frames. ], batch size: 41, lr: 3.37e-02, grad_scale: 8.0 +2024-08-03 03:54:14,967 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.16 vs. limit=15.0 +2024-08-03 03:54:26,438 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.13 vs. limit=15.0 +2024-08-03 03:54:27,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=32486.666666666668, ans=0.125 +2024-08-03 03:54:49,629 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:54:57,313 INFO [train.py:1114] (1/4) Epoch 3, batch 1600, loss[loss=0.3238, simple_loss=0.3867, pruned_loss=0.1305, over 13572.00 frames. ], tot_loss[loss=0.3043, simple_loss=0.3621, pruned_loss=0.1232, over 2624550.36 frames. ], batch size: 39, lr: 3.37e-02, grad_scale: 16.0 +2024-08-03 03:54:58,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32560.0, ans=0.1 +2024-08-03 03:55:11,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=32596.666666666668, ans=0.125 +2024-08-03 03:55:16,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.54 vs. limit=15.0 +2024-08-03 03:55:19,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=32633.333333333332, ans=0.2 +2024-08-03 03:55:27,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=32633.333333333332, ans=0.025 +2024-08-03 03:55:30,904 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:55:32,655 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:55:35,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=32670.0, ans=0.0 +2024-08-03 03:55:37,888 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.348e+02 1.511e+02 1.737e+02 4.413e+02, threshold=3.022e+02, percent-clipped=2.0 +2024-08-03 03:55:46,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=32743.333333333332, ans=0.05 +2024-08-03 03:55:48,035 INFO [train.py:1114] (1/4) Epoch 3, batch 1650, loss[loss=0.2908, simple_loss=0.3595, pruned_loss=0.111, over 13313.00 frames. ], tot_loss[loss=0.3042, simple_loss=0.3619, pruned_loss=0.1232, over 2620111.72 frames. ], batch size: 40, lr: 3.36e-02, grad_scale: 16.0 +2024-08-03 03:55:54,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=32743.333333333332, ans=0.125 +2024-08-03 03:55:57,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=32743.333333333332, ans=0.025 +2024-08-03 03:55:57,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=32743.333333333332, ans=0.125 +2024-08-03 03:56:14,203 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.72 vs. limit=15.0 +2024-08-03 03:56:21,933 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.43 vs. limit=10.0 +2024-08-03 03:56:22,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=32816.666666666664, ans=0.0 +2024-08-03 03:56:26,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=32853.333333333336, ans=0.125 +2024-08-03 03:56:29,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=32853.333333333336, ans=0.0 +2024-08-03 03:56:43,054 INFO [train.py:1114] (1/4) Epoch 3, batch 1700, loss[loss=0.2592, simple_loss=0.3166, pruned_loss=0.1009, over 13241.00 frames. ], tot_loss[loss=0.3033, simple_loss=0.3613, pruned_loss=0.1227, over 2629327.01 frames. ], batch size: 31, lr: 3.36e-02, grad_scale: 16.0 +2024-08-03 03:56:52,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=32963.333333333336, ans=0.003703623188405796 +2024-08-03 03:57:03,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=33000.0, ans=0.2 +2024-08-03 03:57:04,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=33000.0, ans=0.125 +2024-08-03 03:57:05,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=33000.0, ans=0.0036956521739130435 +2024-08-03 03:57:19,657 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.070e+02 1.348e+02 1.541e+02 1.805e+02 2.810e+02, threshold=3.082e+02, percent-clipped=0.0 +2024-08-03 03:57:20,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33073.333333333336, ans=0.1 +2024-08-03 03:57:33,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=33110.0, ans=0.125 +2024-08-03 03:57:34,249 INFO [train.py:1114] (1/4) Epoch 3, batch 1750, loss[loss=0.2799, simple_loss=0.3333, pruned_loss=0.1133, over 13533.00 frames. ], tot_loss[loss=0.3029, simple_loss=0.3608, pruned_loss=0.1225, over 2633272.98 frames. ], batch size: 31, lr: 3.35e-02, grad_scale: 16.0 +2024-08-03 03:58:33,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=33256.666666666664, ans=0.0036398550724637685 +2024-08-03 03:58:36,899 INFO [train.py:1114] (1/4) Epoch 3, batch 1800, loss[loss=0.2712, simple_loss=0.3459, pruned_loss=0.09828, over 13555.00 frames. ], tot_loss[loss=0.3024, simple_loss=0.3607, pruned_loss=0.122, over 2634654.86 frames. ], batch size: 38, lr: 3.35e-02, grad_scale: 16.0 +2024-08-03 03:58:39,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=33293.333333333336, ans=0.125 +2024-08-03 03:58:42,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=33293.333333333336, ans=0.125 +2024-08-03 03:59:00,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=33366.666666666664, ans=15.0 +2024-08-03 03:59:03,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=33366.666666666664, ans=0.125 +2024-08-03 03:59:04,084 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.95 vs. limit=22.5 +2024-08-03 03:59:14,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=33403.333333333336, ans=0.125 +2024-08-03 03:59:15,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=33403.333333333336, ans=0.003607971014492753 +2024-08-03 03:59:16,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33440.0, ans=0.1 +2024-08-03 03:59:17,226 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.060e+02 1.365e+02 1.589e+02 1.919e+02 3.211e+02, threshold=3.178e+02, percent-clipped=2.0 +2024-08-03 03:59:18,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=33440.0, ans=0.125 +2024-08-03 03:59:20,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=33440.0, ans=0.125 +2024-08-03 03:59:26,730 INFO [train.py:1114] (1/4) Epoch 3, batch 1850, loss[loss=0.3393, simple_loss=0.3908, pruned_loss=0.1439, over 13403.00 frames. ], tot_loss[loss=0.3004, simple_loss=0.3593, pruned_loss=0.1208, over 2637616.78 frames. ], batch size: 39, lr: 3.34e-02, grad_scale: 16.0 +2024-08-03 03:59:37,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.27 vs. limit=15.0 +2024-08-03 03:59:41,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=33513.333333333336, ans=0.0 +2024-08-03 03:59:42,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=33513.333333333336, ans=0.125 +2024-08-03 04:00:04,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=33586.666666666664, ans=0.0 +2024-08-03 04:00:21,235 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.73 vs. limit=15.0 +2024-08-03 04:00:22,356 INFO [train.py:1114] (1/4) Epoch 3, batch 1900, loss[loss=0.2878, simple_loss=0.3631, pruned_loss=0.1062, over 13332.00 frames. ], tot_loss[loss=0.3011, simple_loss=0.3603, pruned_loss=0.121, over 2640250.01 frames. ], batch size: 40, lr: 3.34e-02, grad_scale: 16.0 +2024-08-03 04:00:33,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=33696.666666666664, ans=0.0 +2024-08-03 04:00:36,580 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.38 vs. limit=10.0 +2024-08-03 04:00:41,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=33733.333333333336, ans=0.2 +2024-08-03 04:00:42,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=33733.333333333336, ans=0.125 +2024-08-03 04:00:47,790 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.72 vs. limit=15.0 +2024-08-03 04:01:01,319 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.079e+02 1.394e+02 1.539e+02 1.838e+02 3.320e+02, threshold=3.078e+02, percent-clipped=1.0 +2024-08-03 04:01:04,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=33806.666666666664, ans=0.09899494936611666 +2024-08-03 04:01:10,630 INFO [train.py:1114] (1/4) Epoch 3, batch 1950, loss[loss=0.3119, simple_loss=0.3686, pruned_loss=0.1277, over 13546.00 frames. ], tot_loss[loss=0.3023, simple_loss=0.3615, pruned_loss=0.1215, over 2646883.96 frames. ], batch size: 36, lr: 3.33e-02, grad_scale: 16.0 +2024-08-03 04:01:25,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=33880.0, ans=0.125 +2024-08-03 04:01:30,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=33880.0, ans=0.025 +2024-08-03 04:02:01,769 INFO [train.py:1114] (1/4) Epoch 3, batch 2000, loss[loss=0.2424, simple_loss=0.3021, pruned_loss=0.0914, over 13514.00 frames. ], tot_loss[loss=0.3032, simple_loss=0.3625, pruned_loss=0.122, over 2636619.32 frames. ], batch size: 31, lr: 3.32e-02, grad_scale: 32.0 +2024-08-03 04:02:02,267 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.87 vs. limit=22.5 +2024-08-03 04:02:04,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=34026.666666666664, ans=0.2 +2024-08-03 04:02:08,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=34026.666666666664, ans=0.125 +2024-08-03 04:02:24,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=34063.333333333336, ans=0.125 +2024-08-03 04:02:48,520 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.054e+02 1.429e+02 1.657e+02 2.036e+02 4.223e+02, threshold=3.314e+02, percent-clipped=3.0 +2024-08-03 04:02:50,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=34173.333333333336, ans=0.125 +2024-08-03 04:02:50,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=34173.333333333336, ans=0.04949747468305833 +2024-08-03 04:02:57,966 INFO [train.py:1114] (1/4) Epoch 3, batch 2050, loss[loss=0.2283, simple_loss=0.295, pruned_loss=0.08073, over 13422.00 frames. ], tot_loss[loss=0.3023, simple_loss=0.3614, pruned_loss=0.1216, over 2634017.64 frames. ], batch size: 32, lr: 3.32e-02, grad_scale: 32.0 +2024-08-03 04:03:11,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=34246.666666666664, ans=0.0 +2024-08-03 04:03:14,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34246.666666666664, ans=0.1 +2024-08-03 04:03:22,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34283.333333333336, ans=0.1 +2024-08-03 04:03:31,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=34320.0, ans=0.0034086956521739136 +2024-08-03 04:03:31,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=34320.0, ans=0.07 +2024-08-03 04:03:33,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=34320.0, ans=0.0 +2024-08-03 04:03:45,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=34356.666666666664, ans=0.125 +2024-08-03 04:03:47,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=34393.333333333336, ans=0.2 +2024-08-03 04:03:48,239 INFO [train.py:1114] (1/4) Epoch 3, batch 2100, loss[loss=0.2966, simple_loss=0.3566, pruned_loss=0.1183, over 13547.00 frames. ], tot_loss[loss=0.3013, simple_loss=0.3605, pruned_loss=0.1211, over 2640311.89 frames. ], batch size: 37, lr: 3.31e-02, grad_scale: 32.0 +2024-08-03 04:03:57,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=34393.333333333336, ans=0.125 +2024-08-03 04:04:09,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=34466.666666666664, ans=0.07 +2024-08-03 04:04:16,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=34466.666666666664, ans=0.125 +2024-08-03 04:04:18,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=34466.666666666664, ans=0.2 +2024-08-03 04:04:19,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=34503.333333333336, ans=0.125 +2024-08-03 04:04:26,675 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.87 vs. limit=15.0 +2024-08-03 04:04:29,651 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.385e+02 1.595e+02 1.788e+02 2.690e+02, threshold=3.190e+02, percent-clipped=1.0 +2024-08-03 04:04:37,546 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.48 vs. limit=15.0 +2024-08-03 04:04:37,868 INFO [train.py:1114] (1/4) Epoch 3, batch 2150, loss[loss=0.2858, simple_loss=0.3433, pruned_loss=0.1141, over 13551.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.3585, pruned_loss=0.1192, over 2648633.88 frames. ], batch size: 36, lr: 3.31e-02, grad_scale: 16.0 +2024-08-03 04:05:14,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=34650.0, ans=0.125 +2024-08-03 04:05:14,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=34650.0, ans=0.0 +2024-08-03 04:05:20,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=34650.0, ans=0.025 +2024-08-03 04:05:25,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=34686.666666666664, ans=0.0 +2024-08-03 04:05:32,589 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=17.90 vs. limit=15.0 +2024-08-03 04:05:42,226 INFO [train.py:1114] (1/4) Epoch 3, batch 2200, loss[loss=0.3162, simple_loss=0.3771, pruned_loss=0.1276, over 13389.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3587, pruned_loss=0.1192, over 2646561.13 frames. ], batch size: 39, lr: 3.30e-02, grad_scale: 16.0 +2024-08-03 04:05:46,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=34760.0, ans=0.125 +2024-08-03 04:05:52,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=34796.666666666664, ans=0.003305072463768117 +2024-08-03 04:05:54,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=34796.666666666664, ans=0.125 +2024-08-03 04:06:23,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=34906.666666666664, ans=0.125 +2024-08-03 04:06:24,638 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.093e+02 1.404e+02 1.621e+02 1.995e+02 2.772e+02, threshold=3.241e+02, percent-clipped=0.0 +2024-08-03 04:06:34,731 INFO [train.py:1114] (1/4) Epoch 3, batch 2250, loss[loss=0.2983, simple_loss=0.3717, pruned_loss=0.1125, over 13357.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3587, pruned_loss=0.1193, over 2642638.95 frames. ], batch size: 37, lr: 3.30e-02, grad_scale: 16.0 +2024-08-03 04:06:41,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=34943.333333333336, ans=0.1 +2024-08-03 04:06:48,995 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.82 vs. limit=22.5 +2024-08-03 04:06:57,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=35016.666666666664, ans=0.025 +2024-08-03 04:07:02,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=35053.333333333336, ans=0.125 +2024-08-03 04:07:20,671 INFO [train.py:1114] (1/4) Epoch 3, batch 2300, loss[loss=0.2588, simple_loss=0.3266, pruned_loss=0.09551, over 13593.00 frames. ], tot_loss[loss=0.2983, simple_loss=0.3577, pruned_loss=0.1194, over 2638789.53 frames. ], batch size: 33, lr: 3.29e-02, grad_scale: 16.0 +2024-08-03 04:07:31,315 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.04 vs. limit=12.0 +2024-08-03 04:07:40,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=35200.0, ans=0.04949747468305833 +2024-08-03 04:07:50,556 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.26 vs. limit=15.0 +2024-08-03 04:07:53,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=35236.666666666664, ans=0.125 +2024-08-03 04:08:01,044 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.453e+02 1.713e+02 2.224e+02 5.491e+02, threshold=3.425e+02, percent-clipped=5.0 +2024-08-03 04:08:01,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=35273.333333333336, ans=0.0032014492753623183 +2024-08-03 04:08:11,152 INFO [train.py:1114] (1/4) Epoch 3, batch 2350, loss[loss=0.3328, simple_loss=0.386, pruned_loss=0.1399, over 13547.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3585, pruned_loss=0.12, over 2641346.80 frames. ], batch size: 38, lr: 3.29e-02, grad_scale: 16.0 +2024-08-03 04:08:15,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=35310.0, ans=0.0 +2024-08-03 04:08:26,118 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:08:27,313 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.77 vs. limit=6.0 +2024-08-03 04:08:31,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=35383.333333333336, ans=0.125 +2024-08-03 04:08:42,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=35420.0, ans=0.125 +2024-08-03 04:08:47,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=35420.0, ans=0.0 +2024-08-03 04:08:54,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=35456.666666666664, ans=0.125 +2024-08-03 04:09:00,931 INFO [train.py:1114] (1/4) Epoch 3, batch 2400, loss[loss=0.2977, simple_loss=0.3599, pruned_loss=0.1177, over 13543.00 frames. ], tot_loss[loss=0.2998, simple_loss=0.3591, pruned_loss=0.1203, over 2642538.39 frames. ], batch size: 35, lr: 3.28e-02, grad_scale: 32.0 +2024-08-03 04:09:06,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=35493.333333333336, ans=10.0 +2024-08-03 04:09:38,722 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.322e+02 1.493e+02 1.831e+02 3.002e+02, threshold=2.985e+02, percent-clipped=0.0 +2024-08-03 04:09:47,348 INFO [train.py:1114] (1/4) Epoch 3, batch 2450, loss[loss=0.2945, simple_loss=0.3622, pruned_loss=0.1134, over 13359.00 frames. ], tot_loss[loss=0.3008, simple_loss=0.3601, pruned_loss=0.1208, over 2632850.10 frames. ], batch size: 37, lr: 3.28e-02, grad_scale: 32.0 +2024-08-03 04:10:03,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=35713.333333333336, ans=0.125 +2024-08-03 04:10:08,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=35750.0, ans=0.125 +2024-08-03 04:10:20,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=35786.666666666664, ans=0.125 +2024-08-03 04:10:31,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=35823.333333333336, ans=0.125 +2024-08-03 04:10:33,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=35823.333333333336, ans=0.07 +2024-08-03 04:10:37,194 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.15 vs. limit=15.0 +2024-08-03 04:10:39,196 INFO [train.py:1114] (1/4) Epoch 3, batch 2500, loss[loss=0.3043, simple_loss=0.3659, pruned_loss=0.1214, over 13387.00 frames. ], tot_loss[loss=0.2996, simple_loss=0.3594, pruned_loss=0.1199, over 2636522.35 frames. ], batch size: 39, lr: 3.27e-02, grad_scale: 32.0 +2024-08-03 04:10:39,623 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.55 vs. limit=15.0 +2024-08-03 04:10:46,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=35860.0, ans=0.2 +2024-08-03 04:10:53,533 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=3.244e-01 +2024-08-03 04:10:55,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=35896.666666666664, ans=0.003065942028985508 +2024-08-03 04:11:05,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=35970.0, ans=0.125 +2024-08-03 04:11:15,828 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.323e+02 1.438e+02 1.681e+02 3.376e+02, threshold=2.876e+02, percent-clipped=2.0 +2024-08-03 04:11:27,373 INFO [train.py:1114] (1/4) Epoch 3, batch 2550, loss[loss=0.258, simple_loss=0.3112, pruned_loss=0.1024, over 13554.00 frames. ], tot_loss[loss=0.2983, simple_loss=0.3585, pruned_loss=0.119, over 2638820.48 frames. ], batch size: 31, lr: 3.27e-02, grad_scale: 32.0 +2024-08-03 04:11:28,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=36043.333333333336, ans=0.125 +2024-08-03 04:11:29,230 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.01 vs. limit=6.0 +2024-08-03 04:11:31,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36043.333333333336, ans=0.1 +2024-08-03 04:11:32,050 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.06 vs. limit=6.0 +2024-08-03 04:11:38,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=36043.333333333336, ans=0.07 +2024-08-03 04:11:49,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=36080.0, ans=0.125 +2024-08-03 04:11:54,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=36080.0, ans=0.04949747468305833 +2024-08-03 04:11:57,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=36080.0, ans=0.0 +2024-08-03 04:12:33,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=36190.0, ans=0.09899494936611666 +2024-08-03 04:12:43,961 INFO [train.py:1114] (1/4) Epoch 3, batch 2600, loss[loss=0.2809, simple_loss=0.3495, pruned_loss=0.1061, over 13559.00 frames. ], tot_loss[loss=0.2985, simple_loss=0.3588, pruned_loss=0.1191, over 2637478.49 frames. ], batch size: 36, lr: 3.26e-02, grad_scale: 32.0 +2024-08-03 04:13:33,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=36336.666666666664, ans=0.125 +2024-08-03 04:13:39,925 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.332e+02 1.510e+02 1.763e+02 2.662e+02, threshold=3.019e+02, percent-clipped=0.0 +2024-08-03 04:13:40,099 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:13:45,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=36373.333333333336, ans=0.125 +2024-08-03 04:13:46,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=36373.333333333336, ans=0.0029623188405797102 +2024-08-03 04:13:49,889 INFO [train.py:1114] (1/4) Epoch 3, batch 2650, loss[loss=0.2795, simple_loss=0.3621, pruned_loss=0.09845, over 13280.00 frames. ], tot_loss[loss=0.2989, simple_loss=0.3594, pruned_loss=0.1191, over 2640695.60 frames. ], batch size: 46, lr: 3.26e-02, grad_scale: 32.0 +2024-08-03 04:14:27,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=36520.0, ans=0.125 +2024-08-03 04:14:37,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=36520.0, ans=0.125 +2024-08-03 04:14:57,818 INFO [train.py:1114] (1/4) Epoch 3, batch 2700, loss[loss=0.3049, simple_loss=0.3688, pruned_loss=0.1205, over 13538.00 frames. ], tot_loss[loss=0.2996, simple_loss=0.3599, pruned_loss=0.1196, over 2638032.43 frames. ], batch size: 40, lr: 3.25e-02, grad_scale: 16.0 +2024-08-03 04:15:23,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=36630.0, ans=0.04949747468305833 +2024-08-03 04:15:42,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=36666.666666666664, ans=0.125 +2024-08-03 04:15:45,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=36666.666666666664, ans=0.125 +2024-08-03 04:19:05,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=36703.333333333336, ans=0.125 +2024-08-03 04:19:05,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=36703.333333333336, ans=0.0 +2024-08-03 04:19:15,072 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.088e+02 1.378e+02 1.616e+02 1.965e+02 4.698e+02, threshold=3.232e+02, percent-clipped=3.0 +2024-08-03 04:19:15,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=36740.0, ans=0.125 +2024-08-03 04:19:22,262 INFO [train.py:1114] (1/4) Epoch 3, batch 2750, loss[loss=0.2637, simple_loss=0.3333, pruned_loss=0.09705, over 13322.00 frames. ], tot_loss[loss=0.298, simple_loss=0.3583, pruned_loss=0.1189, over 2636806.56 frames. ], batch size: 34, lr: 3.24e-02, grad_scale: 16.0 +2024-08-03 04:19:22,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=36776.666666666664, ans=0.0 +2024-08-03 04:19:36,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=36813.333333333336, ans=0.125 +2024-08-03 04:19:44,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=36850.0, ans=0.0 +2024-08-03 04:19:52,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=36886.666666666664, ans=0.025 +2024-08-03 04:20:05,852 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.31 vs. limit=10.0 +2024-08-03 04:20:09,777 INFO [train.py:1114] (1/4) Epoch 3, batch 2800, loss[loss=0.386, simple_loss=0.4084, pruned_loss=0.1818, over 9520.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3585, pruned_loss=0.1193, over 2628446.44 frames. ], batch size: 97, lr: 3.24e-02, grad_scale: 32.0 +2024-08-03 04:20:20,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=36996.666666666664, ans=0.0 +2024-08-03 04:20:26,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=37033.333333333336, ans=0.125 +2024-08-03 04:20:29,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=37033.333333333336, ans=0.035 +2024-08-03 04:20:29,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=37033.333333333336, ans=0.5 +2024-08-03 04:20:30,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=37033.333333333336, ans=0.125 +2024-08-03 04:20:33,810 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.09 vs. limit=15.0 +2024-08-03 04:20:40,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=37070.0, ans=0.0028108695652173917 +2024-08-03 04:20:42,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=37070.0, ans=0.0 +2024-08-03 04:20:46,347 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.018e+02 1.344e+02 1.514e+02 1.782e+02 3.763e+02, threshold=3.028e+02, percent-clipped=1.0 +2024-08-03 04:20:53,515 INFO [train.py:1114] (1/4) Epoch 3, batch 2850, loss[loss=0.2922, simple_loss=0.3493, pruned_loss=0.1176, over 13362.00 frames. ], tot_loss[loss=0.2996, simple_loss=0.3594, pruned_loss=0.1199, over 2622225.14 frames. ], batch size: 35, lr: 3.23e-02, grad_scale: 32.0 +2024-08-03 04:20:53,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=37143.333333333336, ans=0.125 +2024-08-03 04:20:53,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=37143.333333333336, ans=0.0 +2024-08-03 04:20:59,057 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.79 vs. limit=15.0 +2024-08-03 04:21:07,883 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.44 vs. limit=10.0 +2024-08-03 04:21:17,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=37216.666666666664, ans=0.125 +2024-08-03 04:21:29,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=37253.333333333336, ans=0.025 +2024-08-03 04:21:30,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=37253.333333333336, ans=0.2 +2024-08-03 04:21:36,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=37290.0, ans=0.125 +2024-08-03 04:21:38,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=37290.0, ans=0.125 +2024-08-03 04:21:41,447 INFO [train.py:1114] (1/4) Epoch 3, batch 2900, loss[loss=0.3164, simple_loss=0.3652, pruned_loss=0.1338, over 13358.00 frames. ], tot_loss[loss=0.2999, simple_loss=0.3603, pruned_loss=0.1197, over 2632068.76 frames. ], batch size: 36, lr: 3.23e-02, grad_scale: 32.0 +2024-08-03 04:21:55,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=37363.333333333336, ans=0.1 +2024-08-03 04:21:57,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=37363.333333333336, ans=0.0027471014492753618 +2024-08-03 04:22:17,922 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.58 vs. limit=6.0 +2024-08-03 04:22:19,106 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.112e+02 1.360e+02 1.545e+02 1.848e+02 3.511e+02, threshold=3.091e+02, percent-clipped=1.0 +2024-08-03 04:22:25,252 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:22:26,070 INFO [train.py:1114] (1/4) Epoch 3, batch 2950, loss[loss=0.2699, simple_loss=0.3348, pruned_loss=0.1025, over 13326.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.3589, pruned_loss=0.1193, over 2629649.57 frames. ], batch size: 34, lr: 3.22e-02, grad_scale: 32.0 +2024-08-03 04:22:49,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=37583.333333333336, ans=10.0 +2024-08-03 04:22:54,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=37620.0, ans=0.125 +2024-08-03 04:23:09,856 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=19.56 vs. limit=22.5 +2024-08-03 04:23:11,265 INFO [train.py:1114] (1/4) Epoch 3, batch 3000, loss[loss=0.2967, simple_loss=0.3608, pruned_loss=0.1163, over 13546.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.3584, pruned_loss=0.1191, over 2630172.38 frames. ], batch size: 37, lr: 3.22e-02, grad_scale: 16.0 +2024-08-03 04:23:11,266 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 04:23:46,531 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.1303, 3.3037, 3.2544, 1.4418], device='cuda:1') +2024-08-03 04:23:49,063 INFO [train.py:1146] (1/4) Epoch 3, validation: loss=0.2357, simple_loss=0.3301, pruned_loss=0.07069, over 944034.00 frames. +2024-08-03 04:23:49,064 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 04:25:32,442 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.344e+02 1.558e+02 1.814e+02 2.891e+02, threshold=3.117e+02, percent-clipped=0.0 +2024-08-03 04:25:42,602 INFO [train.py:1114] (1/4) Epoch 3, batch 3050, loss[loss=0.2929, simple_loss=0.3492, pruned_loss=0.1183, over 13547.00 frames. ], tot_loss[loss=0.2997, simple_loss=0.3597, pruned_loss=0.1198, over 2626845.28 frames. ], batch size: 35, lr: 3.21e-02, grad_scale: 8.0 +2024-08-03 04:25:44,357 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.89 vs. limit=12.0 +2024-08-03 04:25:57,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=37913.333333333336, ans=0.125 +2024-08-03 04:26:18,967 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.62 vs. limit=15.0 +2024-08-03 04:26:34,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=37986.666666666664, ans=0.125 +2024-08-03 04:26:35,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=37986.666666666664, ans=0.0 +2024-08-03 04:26:39,598 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.19 vs. limit=15.0 +2024-08-03 04:27:00,290 INFO [train.py:1114] (1/4) Epoch 3, batch 3100, loss[loss=0.3085, simple_loss=0.3721, pruned_loss=0.1225, over 13320.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.3578, pruned_loss=0.1184, over 2626274.35 frames. ], batch size: 46, lr: 3.21e-02, grad_scale: 8.0 +2024-08-03 04:27:09,204 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.49 vs. limit=15.0 +2024-08-03 04:27:11,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=38060.0, ans=0.125 +2024-08-03 04:27:21,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=38096.666666666664, ans=0.125 +2024-08-03 04:27:50,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=38206.666666666664, ans=0.2 +2024-08-03 04:27:53,165 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.089e+02 1.343e+02 1.458e+02 1.761e+02 2.606e+02, threshold=2.915e+02, percent-clipped=0.0 +2024-08-03 04:27:58,277 INFO [train.py:1114] (1/4) Epoch 3, batch 3150, loss[loss=0.3113, simple_loss=0.3734, pruned_loss=0.1246, over 12964.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.3579, pruned_loss=0.1184, over 2627372.71 frames. ], batch size: 48, lr: 3.20e-02, grad_scale: 8.0 +2024-08-03 04:28:02,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=38243.333333333336, ans=0.125 +2024-08-03 04:28:06,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=38280.0, ans=0.07 +2024-08-03 04:28:08,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=38280.0, ans=0.1 +2024-08-03 04:28:15,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=38316.666666666664, ans=0.125 +2024-08-03 04:28:28,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=38353.333333333336, ans=0.04949747468305833 +2024-08-03 04:28:29,952 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=5.102e-03 +2024-08-03 04:28:39,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=38390.0, ans=0.125 +2024-08-03 04:28:42,456 INFO [train.py:1114] (1/4) Epoch 3, batch 3200, loss[loss=0.3014, simple_loss=0.3617, pruned_loss=0.1206, over 13533.00 frames. ], tot_loss[loss=0.297, simple_loss=0.3574, pruned_loss=0.1183, over 2633561.42 frames. ], batch size: 37, lr: 3.20e-02, grad_scale: 16.0 +2024-08-03 04:28:48,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=38426.666666666664, ans=0.0 +2024-08-03 04:29:06,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=38500.0, ans=0.2 +2024-08-03 04:29:20,718 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.040e+02 1.433e+02 1.626e+02 1.828e+02 2.707e+02, threshold=3.253e+02, percent-clipped=0.0 +2024-08-03 04:29:25,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=38610.0, ans=0.0 +2024-08-03 04:29:26,741 INFO [train.py:1114] (1/4) Epoch 3, batch 3250, loss[loss=0.2834, simple_loss=0.3539, pruned_loss=0.1065, over 13384.00 frames. ], tot_loss[loss=0.2967, simple_loss=0.3578, pruned_loss=0.1177, over 2638066.09 frames. ], batch size: 38, lr: 3.19e-02, grad_scale: 16.0 +2024-08-03 04:29:26,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=38610.0, ans=0.125 +2024-08-03 04:29:27,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=38610.0, ans=0.002476086956521739 +2024-08-03 04:29:36,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=38646.666666666664, ans=0.025 +2024-08-03 04:29:44,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=38683.333333333336, ans=0.2 +2024-08-03 04:30:04,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=38756.666666666664, ans=0.2 +2024-08-03 04:30:10,327 INFO [train.py:1114] (1/4) Epoch 3, batch 3300, loss[loss=0.3436, simple_loss=0.3987, pruned_loss=0.1443, over 12866.00 frames. ], tot_loss[loss=0.2936, simple_loss=0.3549, pruned_loss=0.1161, over 2639973.52 frames. ], batch size: 52, lr: 3.19e-02, grad_scale: 16.0 +2024-08-03 04:30:14,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=38793.333333333336, ans=0.2 +2024-08-03 04:30:20,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=38830.0, ans=0.0 +2024-08-03 04:30:21,360 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.70 vs. limit=15.0 +2024-08-03 04:30:25,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=38830.0, ans=0.2 +2024-08-03 04:30:28,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38866.666666666664, ans=0.1 +2024-08-03 04:30:33,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=38866.666666666664, ans=0.0 +2024-08-03 04:30:33,659 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.40 vs. limit=12.0 +2024-08-03 04:30:44,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=38903.333333333336, ans=0.0 +2024-08-03 04:30:51,791 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.360e+02 1.548e+02 1.881e+02 7.173e+02, threshold=3.096e+02, percent-clipped=4.0 +2024-08-03 04:30:58,396 INFO [train.py:1114] (1/4) Epoch 3, batch 3350, loss[loss=0.3081, simple_loss=0.3637, pruned_loss=0.1263, over 13076.00 frames. ], tot_loss[loss=0.2958, simple_loss=0.3567, pruned_loss=0.1175, over 2630391.84 frames. ], batch size: 48, lr: 3.18e-02, grad_scale: 16.0 +2024-08-03 04:31:01,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=38976.666666666664, ans=0.0023963768115942037 +2024-08-03 04:31:18,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39050.0, ans=0.1 +2024-08-03 04:31:28,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=39086.666666666664, ans=0.125 +2024-08-03 04:31:42,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39086.666666666664, ans=0.1 +2024-08-03 04:32:21,173 INFO [train.py:1114] (1/4) Epoch 3, batch 3400, loss[loss=0.292, simple_loss=0.3538, pruned_loss=0.1151, over 13550.00 frames. ], tot_loss[loss=0.2958, simple_loss=0.3566, pruned_loss=0.1175, over 2625463.37 frames. ], batch size: 31, lr: 3.18e-02, grad_scale: 16.0 +2024-08-03 04:32:25,742 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.37 vs. limit=6.0 +2024-08-03 04:32:26,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=39160.0, ans=0.2 +2024-08-03 04:32:32,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=39196.666666666664, ans=0.0 +2024-08-03 04:32:45,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=39233.333333333336, ans=0.025 +2024-08-03 04:32:45,480 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.32 vs. limit=15.0 +2024-08-03 04:32:52,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=39270.0, ans=0.0 +2024-08-03 04:33:16,814 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.382e+02 1.601e+02 1.847e+02 2.492e+02, threshold=3.202e+02, percent-clipped=0.0 +2024-08-03 04:33:20,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39343.333333333336, ans=0.1 +2024-08-03 04:33:21,218 INFO [train.py:1114] (1/4) Epoch 3, batch 3450, loss[loss=0.351, simple_loss=0.3949, pruned_loss=0.1536, over 12857.00 frames. ], tot_loss[loss=0.2963, simple_loss=0.3567, pruned_loss=0.1179, over 2629386.66 frames. ], batch size: 52, lr: 3.17e-02, grad_scale: 8.0 +2024-08-03 04:33:42,891 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.58 vs. limit=15.0 +2024-08-03 04:33:48,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39453.333333333336, ans=0.1 +2024-08-03 04:33:48,889 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.25 vs. limit=22.5 +2024-08-03 04:33:54,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=39490.0, ans=0.125 +2024-08-03 04:33:56,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39490.0, ans=0.1 +2024-08-03 04:33:58,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.whiten.whitening_limit, batch_count=39490.0, ans=12.0 +2024-08-03 04:34:03,917 INFO [train.py:1114] (1/4) Epoch 3, batch 3500, loss[loss=0.2568, simple_loss=0.3311, pruned_loss=0.09125, over 13529.00 frames. ], tot_loss[loss=0.2948, simple_loss=0.3553, pruned_loss=0.1171, over 2630547.15 frames. ], batch size: 34, lr: 3.17e-02, grad_scale: 8.0 +2024-08-03 04:34:09,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=39526.666666666664, ans=0.0 +2024-08-03 04:34:27,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=39600.0, ans=0.07 +2024-08-03 04:34:42,758 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.446e+02 1.687e+02 2.018e+02 4.896e+02, threshold=3.374e+02, percent-clipped=2.0 +2024-08-03 04:34:46,984 INFO [train.py:1114] (1/4) Epoch 3, batch 3550, loss[loss=0.3665, simple_loss=0.4117, pruned_loss=0.1606, over 12315.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.3587, pruned_loss=0.1193, over 2629513.08 frames. ], batch size: 58, lr: 3.16e-02, grad_scale: 8.0 +2024-08-03 04:35:08,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=39746.666666666664, ans=0.2 +2024-08-03 04:35:15,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=39783.333333333336, ans=0.125 +2024-08-03 04:35:20,441 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=11.80 vs. limit=12.0 +2024-08-03 04:35:25,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=39820.0, ans=0.125 +2024-08-03 04:35:36,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=39856.666666666664, ans=0.125 +2024-08-03 04:35:36,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=39856.666666666664, ans=0.125 +2024-08-03 04:35:40,846 INFO [train.py:1114] (1/4) Epoch 3, batch 3600, loss[loss=0.3795, simple_loss=0.4105, pruned_loss=0.1743, over 9154.00 frames. ], tot_loss[loss=0.3103, simple_loss=0.3661, pruned_loss=0.1272, over 2488978.70 frames. ], batch size: 96, lr: 3.16e-02, grad_scale: 16.0 +2024-08-03 04:35:49,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=39930.0, ans=0.1 +2024-08-03 04:35:50,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=39930.0, ans=0.0021891304347826084 +2024-08-03 04:35:54,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=39930.0, ans=0.025 +2024-08-03 04:35:58,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=39966.666666666664, ans=0.0 +2024-08-03 04:36:01,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=39966.666666666664, ans=0.05 +2024-08-03 04:36:01,846 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=9.76 vs. limit=15.0 +2024-08-03 04:36:12,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=40003.333333333336, ans=0.07 +2024-08-03 04:36:13,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=40003.333333333336, ans=0.125 +2024-08-03 04:36:13,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=40003.333333333336, ans=0.07 +2024-08-03 04:36:56,759 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.86 vs. limit=12.0 +2024-08-03 04:37:20,579 INFO [train.py:1114] (1/4) Epoch 4, batch 0, loss[loss=0.2879, simple_loss=0.3463, pruned_loss=0.1148, over 13346.00 frames. ], tot_loss[loss=0.2879, simple_loss=0.3463, pruned_loss=0.1148, over 13346.00 frames. ], batch size: 33, lr: 2.95e-02, grad_scale: 32.0 +2024-08-03 04:37:20,580 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 04:37:30,575 INFO [train.py:1146] (1/4) Epoch 4, validation: loss=0.2412, simple_loss=0.337, pruned_loss=0.07274, over 944034.00 frames. +2024-08-03 04:37:30,576 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 04:43:26,868 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.350e+02 1.466e+02 1.683e+02 2.712e+02, threshold=2.931e+02, percent-clipped=0.0 +2024-08-03 04:43:33,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40080.333333333336, ans=0.1 +2024-08-03 04:43:35,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=40080.333333333336, ans=0.125 +2024-08-03 04:45:13,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=40117.0, ans=0.125 +2024-08-03 04:45:16,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=40117.0, ans=0.125 +2024-08-03 04:45:30,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=40190.333333333336, ans=0.125 +2024-08-03 04:45:39,709 INFO [train.py:1114] (1/4) Epoch 4, batch 50, loss[loss=0.2444, simple_loss=0.3074, pruned_loss=0.09069, over 13406.00 frames. ], tot_loss[loss=0.3039, simple_loss=0.3634, pruned_loss=0.1221, over 578731.08 frames. ], batch size: 32, lr: 2.95e-02, grad_scale: 32.0 +2024-08-03 04:45:40,234 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.04 vs. limit=10.0 +2024-08-03 04:45:41,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=40227.0, ans=0.2 +2024-08-03 04:45:55,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=40263.666666666664, ans=0.125 +2024-08-03 04:45:56,275 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.02 vs. limit=15.0 +2024-08-03 04:46:00,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=40300.333333333336, ans=0.125 +2024-08-03 04:46:09,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=40300.333333333336, ans=0.125 +2024-08-03 04:46:16,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.47 vs. limit=10.0 +2024-08-03 04:46:16,814 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=7.18 vs. limit=12.0 +2024-08-03 04:46:26,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=40373.666666666664, ans=0.2 +2024-08-03 04:46:29,583 INFO [train.py:1114] (1/4) Epoch 4, batch 100, loss[loss=0.2952, simple_loss=0.3497, pruned_loss=0.1203, over 13530.00 frames. ], tot_loss[loss=0.2998, simple_loss=0.3608, pruned_loss=0.1194, over 1025906.60 frames. ], batch size: 35, lr: 2.94e-02, grad_scale: 32.0 +2024-08-03 04:46:29,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=40410.333333333336, ans=0.125 +2024-08-03 04:46:33,366 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.339e+02 1.516e+02 1.849e+02 3.720e+02, threshold=3.031e+02, percent-clipped=4.0 +2024-08-03 04:46:43,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=40447.0, ans=0.002076739130434782 +2024-08-03 04:47:11,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=40557.0, ans=0.125 +2024-08-03 04:47:17,274 INFO [train.py:1114] (1/4) Epoch 4, batch 150, loss[loss=0.286, simple_loss=0.3451, pruned_loss=0.1135, over 13423.00 frames. ], tot_loss[loss=0.2925, simple_loss=0.3547, pruned_loss=0.1151, over 1387121.22 frames. ], batch size: 32, lr: 2.94e-02, grad_scale: 32.0 +2024-08-03 04:47:17,804 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.44 vs. limit=15.0 +2024-08-03 04:47:21,355 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.67 vs. limit=15.0 +2024-08-03 04:47:27,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=40630.333333333336, ans=0.125 +2024-08-03 04:47:28,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=40630.333333333336, ans=0.125 +2024-08-03 04:47:35,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=40630.333333333336, ans=0.0 +2024-08-03 04:47:53,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=40703.666666666664, ans=0.125 +2024-08-03 04:47:54,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=40740.333333333336, ans=0.035 +2024-08-03 04:48:04,766 INFO [train.py:1114] (1/4) Epoch 4, batch 200, loss[loss=0.2726, simple_loss=0.3494, pruned_loss=0.09792, over 12407.00 frames. ], tot_loss[loss=0.2883, simple_loss=0.3513, pruned_loss=0.1126, over 1665788.74 frames. ], batch size: 58, lr: 2.93e-02, grad_scale: 16.0 +2024-08-03 04:48:09,213 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.032e+02 1.266e+02 1.437e+02 1.719e+02 2.508e+02, threshold=2.875e+02, percent-clipped=0.0 +2024-08-03 04:48:19,940 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.77 vs. limit=15.0 +2024-08-03 04:48:21,637 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.96 vs. limit=15.0 +2024-08-03 04:48:24,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=40850.333333333336, ans=0.2 +2024-08-03 04:48:33,551 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.02 vs. limit=10.0 +2024-08-03 04:48:36,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=40887.0, ans=0.125 +2024-08-03 04:48:50,622 INFO [train.py:1114] (1/4) Epoch 4, batch 250, loss[loss=0.2805, simple_loss=0.3571, pruned_loss=0.1019, over 13237.00 frames. ], tot_loss[loss=0.2887, simple_loss=0.3515, pruned_loss=0.113, over 1884708.04 frames. ], batch size: 46, lr: 2.93e-02, grad_scale: 16.0 +2024-08-03 04:48:50,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=40960.333333333336, ans=0.125 +2024-08-03 04:49:11,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=41033.666666666664, ans=0.015 +2024-08-03 04:49:16,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=41033.666666666664, ans=0.0019492028985507257 +2024-08-03 04:49:21,901 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.00 vs. limit=15.0 +2024-08-03 04:49:41,443 INFO [train.py:1114] (1/4) Epoch 4, batch 300, loss[loss=0.2751, simple_loss=0.3465, pruned_loss=0.1018, over 13448.00 frames. ], tot_loss[loss=0.2871, simple_loss=0.35, pruned_loss=0.1121, over 2052233.84 frames. ], batch size: 42, lr: 2.92e-02, grad_scale: 16.0 +2024-08-03 04:49:46,016 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.010e+02 1.331e+02 1.504e+02 1.895e+02 3.054e+02, threshold=3.007e+02, percent-clipped=2.0 +2024-08-03 04:49:54,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=41180.333333333336, ans=0.1 +2024-08-03 04:49:59,294 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.05 vs. limit=22.5 +2024-08-03 04:50:24,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=41290.333333333336, ans=0.04949747468305833 +2024-08-03 04:50:25,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=41290.333333333336, ans=0.1 +2024-08-03 04:50:26,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=41290.333333333336, ans=0.2 +2024-08-03 04:50:29,618 INFO [train.py:1114] (1/4) Epoch 4, batch 350, loss[loss=0.2715, simple_loss=0.3238, pruned_loss=0.1096, over 13589.00 frames. ], tot_loss[loss=0.2872, simple_loss=0.3503, pruned_loss=0.112, over 2183980.53 frames. ], batch size: 33, lr: 2.92e-02, grad_scale: 16.0 +2024-08-03 04:50:44,852 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.54 vs. limit=12.0 +2024-08-03 04:51:00,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=41437.0, ans=0.5 +2024-08-03 04:51:13,224 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.88 vs. limit=22.5 +2024-08-03 04:51:17,216 INFO [train.py:1114] (1/4) Epoch 4, batch 400, loss[loss=0.2958, simple_loss=0.3603, pruned_loss=0.1156, over 13348.00 frames. ], tot_loss[loss=0.2864, simple_loss=0.3495, pruned_loss=0.1117, over 2286944.40 frames. ], batch size: 37, lr: 2.91e-02, grad_scale: 32.0 +2024-08-03 04:51:21,801 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.051e+02 1.350e+02 1.537e+02 1.828e+02 3.072e+02, threshold=3.074e+02, percent-clipped=1.0 +2024-08-03 04:51:26,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=41547.0, ans=0.125 +2024-08-03 04:51:27,453 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.29 vs. limit=15.0 +2024-08-03 04:51:28,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=41547.0, ans=0.125 +2024-08-03 04:51:31,890 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.28 vs. limit=22.5 +2024-08-03 04:51:34,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=41547.0, ans=0.1 +2024-08-03 04:52:00,529 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.47 vs. limit=22.5 +2024-08-03 04:52:05,342 INFO [train.py:1114] (1/4) Epoch 4, batch 450, loss[loss=0.3258, simple_loss=0.3896, pruned_loss=0.131, over 13555.00 frames. ], tot_loss[loss=0.2866, simple_loss=0.3498, pruned_loss=0.1117, over 2360019.05 frames. ], batch size: 38, lr: 2.91e-02, grad_scale: 32.0 +2024-08-03 04:52:12,608 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.24 vs. limit=15.0 +2024-08-03 04:52:16,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=41693.666666666664, ans=0.04949747468305833 +2024-08-03 04:52:24,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=41730.333333333336, ans=0.125 +2024-08-03 04:52:25,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=41730.333333333336, ans=0.125 +2024-08-03 04:52:35,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=41767.0, ans=0.0017897826086956514 +2024-08-03 04:52:45,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=41803.666666666664, ans=0.0017818115942028999 +2024-08-03 04:52:58,606 INFO [train.py:1114] (1/4) Epoch 4, batch 500, loss[loss=0.261, simple_loss=0.3414, pruned_loss=0.09027, over 13436.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.3481, pruned_loss=0.1107, over 2425520.45 frames. ], batch size: 43, lr: 2.90e-02, grad_scale: 16.0 +2024-08-03 04:53:03,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.53 vs. limit=12.0 +2024-08-03 04:53:04,080 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.429e+02 1.668e+02 2.120e+02 3.628e+02, threshold=3.335e+02, percent-clipped=2.0 +2024-08-03 04:53:47,139 INFO [train.py:1114] (1/4) Epoch 4, batch 550, loss[loss=0.3435, simple_loss=0.3919, pruned_loss=0.1475, over 13026.00 frames. ], tot_loss[loss=0.2852, simple_loss=0.3485, pruned_loss=0.111, over 2467493.11 frames. ], batch size: 48, lr: 2.90e-02, grad_scale: 16.0 +2024-08-03 04:53:49,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42060.333333333336, ans=0.1 +2024-08-03 04:54:10,332 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.56 vs. limit=15.0 +2024-08-03 04:54:17,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=42170.333333333336, ans=0.125 +2024-08-03 04:54:23,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=42170.333333333336, ans=0.04949747468305833 +2024-08-03 04:54:27,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=42207.0, ans=0.125 +2024-08-03 04:54:27,879 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.13 vs. limit=22.5 +2024-08-03 04:54:29,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=42207.0, ans=0.0 +2024-08-03 04:54:35,659 INFO [train.py:1114] (1/4) Epoch 4, batch 600, loss[loss=0.3163, simple_loss=0.3759, pruned_loss=0.1283, over 13318.00 frames. ], tot_loss[loss=0.2852, simple_loss=0.3486, pruned_loss=0.1109, over 2507676.62 frames. ], batch size: 46, lr: 2.90e-02, grad_scale: 8.0 +2024-08-03 04:54:38,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=42243.666666666664, ans=0.0 +2024-08-03 04:54:40,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=42243.666666666664, ans=0.125 +2024-08-03 04:54:42,095 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.301e+02 1.482e+02 1.829e+02 3.304e+02, threshold=2.963e+02, percent-clipped=0.0 +2024-08-03 04:54:44,603 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.94 vs. limit=15.0 +2024-08-03 04:54:48,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=42280.333333333336, ans=0.125 +2024-08-03 04:55:05,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=42317.0, ans=0.015 +2024-08-03 04:55:24,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=42390.333333333336, ans=0.125 +2024-08-03 04:55:26,870 INFO [train.py:1114] (1/4) Epoch 4, batch 650, loss[loss=0.2859, simple_loss=0.3592, pruned_loss=0.1063, over 13552.00 frames. ], tot_loss[loss=0.284, simple_loss=0.3476, pruned_loss=0.1102, over 2543391.11 frames. ], batch size: 37, lr: 2.89e-02, grad_scale: 8.0 +2024-08-03 04:55:39,090 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.32 vs. limit=22.5 +2024-08-03 04:56:11,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=42573.666666666664, ans=0.125 +2024-08-03 04:56:11,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=42573.666666666664, ans=0.0 +2024-08-03 04:56:17,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=42573.666666666664, ans=0.125 +2024-08-03 04:56:19,576 INFO [train.py:1114] (1/4) Epoch 4, batch 700, loss[loss=0.2968, simple_loss=0.3472, pruned_loss=0.1232, over 13514.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.3482, pruned_loss=0.1106, over 2565982.69 frames. ], batch size: 35, lr: 2.89e-02, grad_scale: 8.0 +2024-08-03 04:56:26,128 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.281e+02 1.426e+02 1.623e+02 2.957e+02, threshold=2.853e+02, percent-clipped=0.0 +2024-08-03 04:56:29,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=42610.333333333336, ans=0.2 +2024-08-03 04:56:40,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=42647.0, ans=0.0 +2024-08-03 04:56:42,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=42683.666666666664, ans=0.125 +2024-08-03 04:56:58,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=42720.333333333336, ans=0.125 +2024-08-03 04:56:58,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=42720.333333333336, ans=0.0 +2024-08-03 04:57:00,253 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.59 vs. limit=22.5 +2024-08-03 04:57:09,321 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.09 vs. limit=10.0 +2024-08-03 04:57:09,842 INFO [train.py:1114] (1/4) Epoch 4, batch 750, loss[loss=0.254, simple_loss=0.3331, pruned_loss=0.08748, over 13353.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3467, pruned_loss=0.1099, over 2583083.99 frames. ], batch size: 37, lr: 2.88e-02, grad_scale: 8.0 +2024-08-03 04:57:13,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=42793.666666666664, ans=0.2 +2024-08-03 04:57:15,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=42793.666666666664, ans=0.0015665942028985514 +2024-08-03 04:57:30,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=42867.0, ans=0.125 +2024-08-03 04:57:37,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=42867.0, ans=0.2 +2024-08-03 04:57:39,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=42903.666666666664, ans=0.0 +2024-08-03 04:57:49,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=42940.333333333336, ans=0.125 +2024-08-03 04:57:59,779 INFO [train.py:1114] (1/4) Epoch 4, batch 800, loss[loss=0.2545, simple_loss=0.3177, pruned_loss=0.09559, over 13337.00 frames. ], tot_loss[loss=0.2836, simple_loss=0.3469, pruned_loss=0.1101, over 2597405.45 frames. ], batch size: 33, lr: 2.88e-02, grad_scale: 16.0 +2024-08-03 04:58:02,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=42977.0, ans=0.2 +2024-08-03 04:58:02,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42977.0, ans=0.1 +2024-08-03 04:58:06,221 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.323e+02 1.556e+02 1.905e+02 4.049e+02, threshold=3.112e+02, percent-clipped=3.0 +2024-08-03 04:58:10,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=43013.666666666664, ans=0.125 +2024-08-03 04:58:26,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43087.0, ans=0.1 +2024-08-03 04:58:27,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=43087.0, ans=0.025 +2024-08-03 04:58:40,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43123.666666666664, ans=0.1 +2024-08-03 04:58:45,957 INFO [train.py:1114] (1/4) Epoch 4, batch 850, loss[loss=0.305, simple_loss=0.3679, pruned_loss=0.121, over 13324.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.3462, pruned_loss=0.1095, over 2610130.46 frames. ], batch size: 40, lr: 2.87e-02, grad_scale: 16.0 +2024-08-03 04:58:46,223 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:58:50,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=43160.333333333336, ans=0.0014868840579710142 +2024-08-03 04:58:58,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=43197.0, ans=0.001478913043478261 +2024-08-03 04:59:06,142 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.36 vs. limit=22.5 +2024-08-03 04:59:08,245 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.58 vs. limit=15.0 +2024-08-03 04:59:16,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=43270.333333333336, ans=0.2 +2024-08-03 04:59:22,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=43270.333333333336, ans=0.125 +2024-08-03 04:59:25,622 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:59:29,772 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.86 vs. limit=10.0 +2024-08-03 04:59:31,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=43307.0, ans=0.09899494936611666 +2024-08-03 04:59:31,575 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.66 vs. limit=22.5 +2024-08-03 04:59:34,793 INFO [train.py:1114] (1/4) Epoch 4, batch 900, loss[loss=0.2467, simple_loss=0.3144, pruned_loss=0.08949, over 13337.00 frames. ], tot_loss[loss=0.2831, simple_loss=0.3468, pruned_loss=0.1097, over 2613331.68 frames. ], batch size: 33, lr: 2.87e-02, grad_scale: 16.0 +2024-08-03 04:59:36,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=43343.666666666664, ans=15.0 +2024-08-03 04:59:40,972 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.102e+02 1.400e+02 1.608e+02 1.991e+02 3.200e+02, threshold=3.215e+02, percent-clipped=1.0 +2024-08-03 04:59:45,188 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.12 vs. limit=15.0 +2024-08-03 04:59:46,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43380.333333333336, ans=0.1 +2024-08-03 04:59:59,897 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.93 vs. limit=22.5 +2024-08-03 05:00:01,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=43453.666666666664, ans=0.125 +2024-08-03 05:00:22,625 INFO [train.py:1114] (1/4) Epoch 4, batch 950, loss[loss=0.2654, simple_loss=0.3373, pruned_loss=0.09673, over 13544.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.3478, pruned_loss=0.1102, over 2614331.37 frames. ], batch size: 34, lr: 2.87e-02, grad_scale: 16.0 +2024-08-03 05:00:22,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=43527.0, ans=0.0014071739130434788 +2024-08-03 05:00:39,922 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.65 vs. limit=15.0 +2024-08-03 05:00:56,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=43637.0, ans=0.07 +2024-08-03 05:00:56,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=43637.0, ans=0.0 +2024-08-03 05:00:59,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=43637.0, ans=0.125 +2024-08-03 05:01:03,668 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.97 vs. limit=15.0 +2024-08-03 05:01:11,444 INFO [train.py:1114] (1/4) Epoch 4, batch 1000, loss[loss=0.2309, simple_loss=0.3041, pruned_loss=0.07888, over 13353.00 frames. ], tot_loss[loss=0.2845, simple_loss=0.3481, pruned_loss=0.1104, over 2611722.07 frames. ], batch size: 35, lr: 2.86e-02, grad_scale: 16.0 +2024-08-03 05:01:17,820 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.299e+02 1.424e+02 1.610e+02 2.784e+02, threshold=2.848e+02, percent-clipped=0.0 +2024-08-03 05:01:34,055 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.88 vs. limit=15.0 +2024-08-03 05:01:40,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43820.333333333336, ans=0.1 +2024-08-03 05:01:59,638 INFO [train.py:1114] (1/4) Epoch 4, batch 1050, loss[loss=0.2809, simple_loss=0.3497, pruned_loss=0.1061, over 13579.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3468, pruned_loss=0.1099, over 2616217.49 frames. ], batch size: 39, lr: 2.86e-02, grad_scale: 16.0 +2024-08-03 05:02:01,860 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.80 vs. limit=12.0 +2024-08-03 05:02:03,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=43893.666666666664, ans=0.09899494936611666 +2024-08-03 05:02:19,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=43967.0, ans=0.0 +2024-08-03 05:02:36,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=44003.666666666664, ans=0.125 +2024-08-03 05:02:42,397 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.77 vs. limit=10.0 +2024-08-03 05:02:45,949 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.30 vs. limit=10.0 +2024-08-03 05:02:50,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=44040.333333333336, ans=0.125 +2024-08-03 05:02:51,473 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.15 vs. limit=15.0 +2024-08-03 05:02:54,675 INFO [train.py:1114] (1/4) Epoch 4, batch 1100, loss[loss=0.2596, simple_loss=0.3253, pruned_loss=0.09697, over 13561.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3464, pruned_loss=0.1092, over 2620386.47 frames. ], batch size: 36, lr: 2.85e-02, grad_scale: 16.0 +2024-08-03 05:03:00,988 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.033e+02 1.374e+02 1.585e+02 1.899e+02 4.895e+02, threshold=3.171e+02, percent-clipped=1.0 +2024-08-03 05:03:03,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=44113.666666666664, ans=0.95 +2024-08-03 05:03:03,470 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.11 vs. limit=10.0 +2024-08-03 05:03:06,907 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.84 vs. limit=22.5 +2024-08-03 05:03:10,385 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.89 vs. limit=15.0 +2024-08-03 05:03:12,321 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.15 vs. limit=12.0 +2024-08-03 05:03:14,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=44150.333333333336, ans=0.0 +2024-08-03 05:03:17,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=44150.333333333336, ans=0.0 +2024-08-03 05:03:40,660 INFO [train.py:1114] (1/4) Epoch 4, batch 1150, loss[loss=0.2784, simple_loss=0.3401, pruned_loss=0.1084, over 13554.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.346, pruned_loss=0.1089, over 2618693.13 frames. ], batch size: 36, lr: 2.85e-02, grad_scale: 16.0 +2024-08-03 05:03:40,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=44260.333333333336, ans=0.125 +2024-08-03 05:03:55,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=44297.0, ans=0.125 +2024-08-03 05:03:55,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=44297.0, ans=0.125 +2024-08-03 05:04:03,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=44333.666666666664, ans=0.125 +2024-08-03 05:04:10,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=44333.666666666664, ans=0.0 +2024-08-03 05:04:32,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=44443.666666666664, ans=0.2 +2024-08-03 05:04:32,838 INFO [train.py:1114] (1/4) Epoch 4, batch 1200, loss[loss=0.2891, simple_loss=0.3626, pruned_loss=0.1079, over 13577.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.346, pruned_loss=0.1085, over 2616665.81 frames. ], batch size: 39, lr: 2.84e-02, grad_scale: 32.0 +2024-08-03 05:04:39,345 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.292e+02 1.456e+02 1.641e+02 3.622e+02, threshold=2.911e+02, percent-clipped=1.0 +2024-08-03 05:04:45,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=44480.333333333336, ans=0.2 +2024-08-03 05:05:03,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=44553.666666666664, ans=0.125 +2024-08-03 05:06:13,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=44590.333333333336, ans=0.125 +2024-08-03 05:06:14,324 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.09 vs. limit=22.5 +2024-08-03 05:06:21,862 INFO [train.py:1114] (1/4) Epoch 4, batch 1250, loss[loss=0.2879, simple_loss=0.3566, pruned_loss=0.1096, over 13430.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3467, pruned_loss=0.1084, over 2628612.15 frames. ], batch size: 42, lr: 2.84e-02, grad_scale: 32.0 +2024-08-03 05:06:23,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=44627.0, ans=0.125 +2024-08-03 05:06:24,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=44627.0, ans=0.0 +2024-08-03 05:06:52,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=44737.0, ans=0.0 +2024-08-03 05:06:55,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=44737.0, ans=0.025 +2024-08-03 05:07:08,955 INFO [train.py:1114] (1/4) Epoch 4, batch 1300, loss[loss=0.3178, simple_loss=0.3708, pruned_loss=0.1324, over 12925.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3456, pruned_loss=0.108, over 2631942.14 frames. ], batch size: 52, lr: 2.84e-02, grad_scale: 16.0 +2024-08-03 05:07:17,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=44810.333333333336, ans=0.95 +2024-08-03 05:07:18,016 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.082e+02 1.334e+02 1.637e+02 2.034e+02 3.739e+02, threshold=3.274e+02, percent-clipped=6.0 +2024-08-03 05:07:25,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=44847.0, ans=0.0 +2024-08-03 05:07:47,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=44920.333333333336, ans=0.2 +2024-08-03 05:07:53,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=44957.0, ans=0.125 +2024-08-03 05:08:01,413 INFO [train.py:1114] (1/4) Epoch 4, batch 1350, loss[loss=0.2534, simple_loss=0.3265, pruned_loss=0.09018, over 13550.00 frames. ], tot_loss[loss=0.2804, simple_loss=0.3454, pruned_loss=0.1077, over 2639223.23 frames. ], batch size: 37, lr: 2.83e-02, grad_scale: 8.0 +2024-08-03 05:08:02,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=44993.666666666664, ans=0.125 +2024-08-03 05:08:05,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=44993.666666666664, ans=0.125 +2024-08-03 05:08:07,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=44993.666666666664, ans=0.0 +2024-08-03 05:08:09,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=44993.666666666664, ans=0.125 +2024-08-03 05:08:17,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=45030.333333333336, ans=0.0010803623188405803 +2024-08-03 05:08:26,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=45067.0, ans=0.035 +2024-08-03 05:08:27,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=45067.0, ans=0.125 +2024-08-03 05:08:39,209 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.11 vs. limit=15.0 +2024-08-03 05:08:43,781 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=5.278e-03 +2024-08-03 05:08:50,554 INFO [train.py:1114] (1/4) Epoch 4, batch 1400, loss[loss=0.2484, simple_loss=0.309, pruned_loss=0.09389, over 13275.00 frames. ], tot_loss[loss=0.2797, simple_loss=0.3447, pruned_loss=0.1073, over 2643116.43 frames. ], batch size: 31, lr: 2.83e-02, grad_scale: 8.0 +2024-08-03 05:08:50,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=45177.0, ans=0.0 +2024-08-03 05:08:58,652 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.107e+02 1.343e+02 1.530e+02 1.906e+02 3.012e+02, threshold=3.060e+02, percent-clipped=0.0 +2024-08-03 05:08:58,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=45213.666666666664, ans=0.125 +2024-08-03 05:09:15,721 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.37 vs. limit=15.0 +2024-08-03 05:09:16,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.74 vs. limit=15.0 +2024-08-03 05:09:17,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=45287.0, ans=0.125 +2024-08-03 05:09:29,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=45323.666666666664, ans=0.2 +2024-08-03 05:09:37,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=45323.666666666664, ans=0.2 +2024-08-03 05:09:38,860 INFO [train.py:1114] (1/4) Epoch 4, batch 1450, loss[loss=0.3244, simple_loss=0.3822, pruned_loss=0.1334, over 13400.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.3451, pruned_loss=0.1077, over 2641474.37 frames. ], batch size: 43, lr: 2.82e-02, grad_scale: 8.0 +2024-08-03 05:10:07,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45433.666666666664, ans=0.1 +2024-08-03 05:10:21,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=45507.0, ans=0.1 +2024-08-03 05:10:22,175 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.77 vs. limit=15.0 +2024-08-03 05:10:22,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=45507.0, ans=0.125 +2024-08-03 05:10:27,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=45507.0, ans=0.0 +2024-08-03 05:10:28,999 INFO [train.py:1114] (1/4) Epoch 4, batch 1500, loss[loss=0.2834, simple_loss=0.3502, pruned_loss=0.1083, over 13409.00 frames. ], tot_loss[loss=0.2801, simple_loss=0.345, pruned_loss=0.1075, over 2641262.42 frames. ], batch size: 39, lr: 2.82e-02, grad_scale: 8.0 +2024-08-03 05:10:30,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.75 vs. limit=22.5 +2024-08-03 05:10:36,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=45543.666666666664, ans=0.125 +2024-08-03 05:10:37,536 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.331e+02 1.463e+02 1.698e+02 3.158e+02, threshold=2.927e+02, percent-clipped=1.0 +2024-08-03 05:11:23,486 INFO [train.py:1114] (1/4) Epoch 4, batch 1550, loss[loss=0.2587, simple_loss=0.3382, pruned_loss=0.08955, over 13418.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.3448, pruned_loss=0.1076, over 2629858.16 frames. ], batch size: 41, lr: 2.81e-02, grad_scale: 8.0 +2024-08-03 05:11:25,741 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.55 vs. limit=15.0 +2024-08-03 05:11:27,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=45727.0, ans=0.125 +2024-08-03 05:11:27,515 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.05 vs. limit=15.0 +2024-08-03 05:11:57,033 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.46 vs. limit=15.0 +2024-08-03 05:12:21,100 INFO [train.py:1114] (1/4) Epoch 4, batch 1600, loss[loss=0.2389, simple_loss=0.3181, pruned_loss=0.07988, over 13578.00 frames. ], tot_loss[loss=0.2805, simple_loss=0.3449, pruned_loss=0.1081, over 2624385.55 frames. ], batch size: 39, lr: 2.81e-02, grad_scale: 16.0 +2024-08-03 05:12:30,797 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.396e+02 1.598e+02 1.877e+02 3.901e+02, threshold=3.195e+02, percent-clipped=2.0 +2024-08-03 05:12:43,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=45983.666666666664, ans=0.125 +2024-08-03 05:13:01,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=46057.0, ans=0.05 +2024-08-03 05:13:06,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=46057.0, ans=0.0 +2024-08-03 05:13:10,773 INFO [train.py:1114] (1/4) Epoch 4, batch 1650, loss[loss=0.2946, simple_loss=0.3678, pruned_loss=0.1107, over 13304.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3457, pruned_loss=0.1088, over 2620874.71 frames. ], batch size: 40, lr: 2.81e-02, grad_scale: 16.0 +2024-08-03 05:13:43,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=46203.666666666664, ans=0.125 +2024-08-03 05:13:48,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=46203.666666666664, ans=0.0008252898550724641 +2024-08-03 05:13:58,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=46240.333333333336, ans=0.125 +2024-08-03 05:14:01,234 INFO [train.py:1114] (1/4) Epoch 4, batch 1700, loss[loss=0.2443, simple_loss=0.2996, pruned_loss=0.09448, over 13260.00 frames. ], tot_loss[loss=0.2805, simple_loss=0.345, pruned_loss=0.108, over 2630462.62 frames. ], batch size: 31, lr: 2.80e-02, grad_scale: 16.0 +2024-08-03 05:14:09,448 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.050e+02 1.356e+02 1.607e+02 2.015e+02 3.197e+02, threshold=3.213e+02, percent-clipped=1.0 +2024-08-03 05:14:19,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=46350.333333333336, ans=0.07 +2024-08-03 05:14:47,549 INFO [train.py:1114] (1/4) Epoch 4, batch 1750, loss[loss=0.2467, simple_loss=0.3074, pruned_loss=0.09293, over 13530.00 frames. ], tot_loss[loss=0.28, simple_loss=0.3443, pruned_loss=0.1079, over 2634109.85 frames. ], batch size: 31, lr: 2.80e-02, grad_scale: 16.0 +2024-08-03 05:15:00,437 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.89 vs. limit=6.0 +2024-08-03 05:15:01,484 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=28.68 vs. limit=22.5 +2024-08-03 05:15:07,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=46533.666666666664, ans=0.2 +2024-08-03 05:18:21,912 INFO [train.py:1114] (1/4) Epoch 4, batch 1800, loss[loss=0.2684, simple_loss=0.3371, pruned_loss=0.09986, over 13541.00 frames. ], tot_loss[loss=0.2802, simple_loss=0.3447, pruned_loss=0.1078, over 2635231.71 frames. ], batch size: 38, lr: 2.79e-02, grad_scale: 16.0 +2024-08-03 05:18:22,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46643.666666666664, ans=0.1 +2024-08-03 05:18:29,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=46643.666666666664, ans=0.125 +2024-08-03 05:18:30,257 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.378e+02 1.581e+02 2.012e+02 3.618e+02, threshold=3.161e+02, percent-clipped=2.0 +2024-08-03 05:18:30,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=46680.333333333336, ans=0.2 +2024-08-03 05:18:30,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=46680.333333333336, ans=0.125 +2024-08-03 05:18:41,708 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:18:59,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=46790.333333333336, ans=0.125 +2024-08-03 05:19:00,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=46790.333333333336, ans=0.125 +2024-08-03 05:19:09,818 INFO [train.py:1114] (1/4) Epoch 4, batch 1850, loss[loss=0.2696, simple_loss=0.342, pruned_loss=0.09858, over 13396.00 frames. ], tot_loss[loss=0.2795, simple_loss=0.344, pruned_loss=0.1075, over 2637863.53 frames. ], batch size: 39, lr: 2.79e-02, grad_scale: 16.0 +2024-08-03 05:19:27,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=46900.333333333336, ans=0.125 +2024-08-03 05:19:45,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46937.0, ans=0.1 +2024-08-03 05:19:50,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=46973.666666666664, ans=0.1 +2024-08-03 05:19:52,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=46973.666666666664, ans=0.0 +2024-08-03 05:19:58,246 INFO [train.py:1114] (1/4) Epoch 4, batch 1900, loss[loss=0.2947, simple_loss=0.3657, pruned_loss=0.1119, over 13321.00 frames. ], tot_loss[loss=0.279, simple_loss=0.3441, pruned_loss=0.1069, over 2640015.75 frames. ], batch size: 40, lr: 2.79e-02, grad_scale: 16.0 +2024-08-03 05:20:06,387 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.297e+02 1.477e+02 1.706e+02 2.975e+02, threshold=2.953e+02, percent-clipped=0.0 +2024-08-03 05:20:12,441 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.15 vs. limit=15.0 +2024-08-03 05:20:22,030 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:20:30,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=47120.333333333336, ans=0.125 +2024-08-03 05:20:34,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=47120.333333333336, ans=0.2 +2024-08-03 05:20:36,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=47157.0, ans=0.125 +2024-08-03 05:20:37,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=47157.0, ans=0.0 +2024-08-03 05:20:45,529 INFO [train.py:1114] (1/4) Epoch 4, batch 1950, loss[loss=0.3042, simple_loss=0.3609, pruned_loss=0.1238, over 13573.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3463, pruned_loss=0.1078, over 2646747.75 frames. ], batch size: 36, lr: 2.78e-02, grad_scale: 16.0 +2024-08-03 05:21:03,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=47267.0, ans=0.0 +2024-08-03 05:21:05,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=47267.0, ans=0.125 +2024-08-03 05:21:19,986 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.95 vs. limit=10.0 +2024-08-03 05:21:27,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=47340.333333333336, ans=0.04949747468305833 +2024-08-03 05:21:33,509 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.89 vs. limit=6.0 +2024-08-03 05:21:34,018 INFO [train.py:1114] (1/4) Epoch 4, batch 2000, loss[loss=0.2446, simple_loss=0.3016, pruned_loss=0.09384, over 13550.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.3469, pruned_loss=0.1085, over 2636459.08 frames. ], batch size: 31, lr: 2.78e-02, grad_scale: 32.0 +2024-08-03 05:21:42,488 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.720e+01 1.383e+02 1.598e+02 1.904e+02 4.710e+02, threshold=3.195e+02, percent-clipped=1.0 +2024-08-03 05:21:46,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=47413.666666666664, ans=0.125 +2024-08-03 05:21:53,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=47450.333333333336, ans=0.125 +2024-08-03 05:22:00,725 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.48 vs. limit=15.0 +2024-08-03 05:22:08,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=47487.0, ans=0.0 +2024-08-03 05:22:09,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=47487.0, ans=0.125 +2024-08-03 05:22:10,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=47523.666666666664, ans=0.125 +2024-08-03 05:22:17,233 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:22:18,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=47523.666666666664, ans=0.025 +2024-08-03 05:22:20,556 INFO [train.py:1114] (1/4) Epoch 4, batch 2050, loss[loss=0.2927, simple_loss=0.3407, pruned_loss=0.1224, over 13424.00 frames. ], tot_loss[loss=0.2822, simple_loss=0.3464, pruned_loss=0.109, over 2633453.80 frames. ], batch size: 32, lr: 2.77e-02, grad_scale: 32.0 +2024-08-03 05:22:24,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=47560.333333333336, ans=0.0 +2024-08-03 05:22:33,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=47597.0, ans=0.2 +2024-08-03 05:22:36,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=47597.0, ans=0.07 +2024-08-03 05:22:47,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=47633.666666666664, ans=0.0 +2024-08-03 05:23:44,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=47670.333333333336, ans=0.1 +2024-08-03 05:23:44,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47670.333333333336, ans=0.1 +2024-08-03 05:23:51,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=47707.0, ans=0.125 +2024-08-03 05:23:56,612 INFO [train.py:1114] (1/4) Epoch 4, batch 2100, loss[loss=0.2719, simple_loss=0.346, pruned_loss=0.09894, over 13540.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.3449, pruned_loss=0.1074, over 2638350.64 frames. ], batch size: 37, lr: 2.77e-02, grad_scale: 32.0 +2024-08-03 05:24:00,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=47743.666666666664, ans=0.2 +2024-08-03 05:24:06,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=47743.666666666664, ans=0.07 +2024-08-03 05:24:06,853 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.064e+02 1.313e+02 1.529e+02 1.934e+02 3.413e+02, threshold=3.058e+02, percent-clipped=1.0 +2024-08-03 05:24:07,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=47780.333333333336, ans=0.125 +2024-08-03 05:24:08,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=47780.333333333336, ans=0.125 +2024-08-03 05:24:12,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=47780.333333333336, ans=0.125 +2024-08-03 05:24:20,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=47817.0, ans=0.1 +2024-08-03 05:24:28,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=47853.666666666664, ans=0.025 +2024-08-03 05:24:42,137 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.01 vs. limit=15.0 +2024-08-03 05:24:42,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=47890.333333333336, ans=0.0 +2024-08-03 05:24:47,069 INFO [train.py:1114] (1/4) Epoch 4, batch 2150, loss[loss=0.2852, simple_loss=0.3457, pruned_loss=0.1124, over 13563.00 frames. ], tot_loss[loss=0.2793, simple_loss=0.3443, pruned_loss=0.1071, over 2647512.40 frames. ], batch size: 36, lr: 2.77e-02, grad_scale: 32.0 +2024-08-03 05:24:49,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=47927.0, ans=0.0 +2024-08-03 05:24:51,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=47927.0, ans=0.07 +2024-08-03 05:25:05,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=48000.333333333336, ans=0.07 +2024-08-03 05:25:17,454 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.83 vs. limit=15.0 +2024-08-03 05:25:27,721 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.34 vs. limit=15.0 +2024-08-03 05:25:33,684 INFO [train.py:1114] (1/4) Epoch 4, batch 2200, loss[loss=0.2662, simple_loss=0.3447, pruned_loss=0.09384, over 13407.00 frames. ], tot_loss[loss=0.2789, simple_loss=0.3442, pruned_loss=0.1068, over 2644267.88 frames. ], batch size: 39, lr: 2.76e-02, grad_scale: 32.0 +2024-08-03 05:25:35,171 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.52 vs. limit=10.0 +2024-08-03 05:25:42,095 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.479e+02 1.728e+02 2.109e+02 3.412e+02, threshold=3.456e+02, percent-clipped=2.0 +2024-08-03 05:25:50,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=48147.0, ans=0.125 +2024-08-03 05:26:08,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=48220.333333333336, ans=0.0 +2024-08-03 05:26:22,232 INFO [train.py:1114] (1/4) Epoch 4, batch 2250, loss[loss=0.2491, simple_loss=0.3324, pruned_loss=0.08291, over 13355.00 frames. ], tot_loss[loss=0.2793, simple_loss=0.3444, pruned_loss=0.1072, over 2641252.07 frames. ], batch size: 37, lr: 2.76e-02, grad_scale: 32.0 +2024-08-03 05:26:56,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=48403.666666666664, ans=0.0 +2024-08-03 05:27:02,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=48440.333333333336, ans=15.0 +2024-08-03 05:27:17,464 INFO [train.py:1114] (1/4) Epoch 4, batch 2300, loss[loss=0.2317, simple_loss=0.2993, pruned_loss=0.08206, over 13594.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.343, pruned_loss=0.1067, over 2637530.41 frames. ], batch size: 33, lr: 2.75e-02, grad_scale: 32.0 +2024-08-03 05:27:38,603 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.046e+02 1.390e+02 1.580e+02 1.913e+02 3.341e+02, threshold=3.160e+02, percent-clipped=0.0 +2024-08-03 05:27:40,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=48513.666666666664, ans=0.025 +2024-08-03 05:27:46,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=48513.666666666664, ans=0.125 +2024-08-03 05:27:51,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=48550.333333333336, ans=0.125 +2024-08-03 05:27:52,123 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.16 vs. limit=8.0 +2024-08-03 05:28:00,144 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.06 vs. limit=22.5 +2024-08-03 05:28:12,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=48623.666666666664, ans=0.0 +2024-08-03 05:28:18,341 INFO [train.py:1114] (1/4) Epoch 4, batch 2350, loss[loss=0.3024, simple_loss=0.3699, pruned_loss=0.1175, over 13549.00 frames. ], tot_loss[loss=0.278, simple_loss=0.3428, pruned_loss=0.1066, over 2639755.44 frames. ], batch size: 38, lr: 2.75e-02, grad_scale: 32.0 +2024-08-03 05:28:36,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=48697.0, ans=0.2 +2024-08-03 05:29:04,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=48770.333333333336, ans=0.2 +2024-08-03 05:29:20,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=48807.0, ans=0.0 +2024-08-03 05:29:32,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=48807.0, ans=0.00025934782608695757 +2024-08-03 05:29:32,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.06 vs. limit=22.5 +2024-08-03 05:29:35,509 INFO [train.py:1114] (1/4) Epoch 4, batch 2400, loss[loss=0.2689, simple_loss=0.3427, pruned_loss=0.09752, over 13522.00 frames. ], tot_loss[loss=0.2779, simple_loss=0.343, pruned_loss=0.1064, over 2641172.27 frames. ], batch size: 35, lr: 2.75e-02, grad_scale: 32.0 +2024-08-03 05:29:51,915 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.312e+02 1.493e+02 1.944e+02 3.513e+02, threshold=2.987e+02, percent-clipped=1.0 +2024-08-03 05:30:29,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=48953.666666666664, ans=0.125 +2024-08-03 05:30:36,207 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.83 vs. limit=22.5 +2024-08-03 05:31:30,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=48990.333333333336, ans=0.125 +2024-08-03 05:31:38,651 INFO [train.py:1114] (1/4) Epoch 4, batch 2450, loss[loss=0.3206, simple_loss=0.3849, pruned_loss=0.1281, over 13351.00 frames. ], tot_loss[loss=0.2792, simple_loss=0.344, pruned_loss=0.1072, over 2631804.32 frames. ], batch size: 37, lr: 2.74e-02, grad_scale: 32.0 +2024-08-03 05:31:43,862 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.14 vs. limit=10.0 +2024-08-03 05:31:51,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=49063.666666666664, ans=0.125 +2024-08-03 05:32:15,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=49100.333333333336, ans=0.2 +2024-08-03 05:32:26,115 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.56 vs. limit=15.0 +2024-08-03 05:32:32,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=49173.666666666664, ans=0.2 +2024-08-03 05:32:36,603 INFO [train.py:1114] (1/4) Epoch 4, batch 2500, loss[loss=0.3035, simple_loss=0.3663, pruned_loss=0.1204, over 13394.00 frames. ], tot_loss[loss=0.2781, simple_loss=0.3435, pruned_loss=0.1063, over 2635239.76 frames. ], batch size: 39, lr: 2.74e-02, grad_scale: 32.0 +2024-08-03 05:32:38,016 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.66 vs. limit=15.0 +2024-08-03 05:32:43,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=49210.333333333336, ans=0.125 +2024-08-03 05:32:44,461 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.392e+02 1.612e+02 1.907e+02 3.604e+02, threshold=3.223e+02, percent-clipped=4.0 +2024-08-03 05:32:44,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=49247.0, ans=0.125 +2024-08-03 05:32:48,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=49247.0, ans=0.0 +2024-08-03 05:33:00,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=49283.666666666664, ans=0.125 +2024-08-03 05:33:15,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=49320.333333333336, ans=0.00014775362318840586 +2024-08-03 05:33:39,499 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.63 vs. limit=6.0 +2024-08-03 05:33:41,071 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.82 vs. limit=15.0 +2024-08-03 05:33:44,738 INFO [train.py:1114] (1/4) Epoch 4, batch 2550, loss[loss=0.223, simple_loss=0.295, pruned_loss=0.07555, over 13541.00 frames. ], tot_loss[loss=0.2779, simple_loss=0.3433, pruned_loss=0.1063, over 2636923.89 frames. ], batch size: 31, lr: 2.73e-02, grad_scale: 32.0 +2024-08-03 05:33:53,609 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.46 vs. limit=15.0 +2024-08-03 05:34:15,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=49503.666666666664, ans=0.00010789855072463814 +2024-08-03 05:34:26,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=49503.666666666664, ans=0.025 +2024-08-03 05:34:35,023 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.58 vs. limit=6.0 +2024-08-03 05:34:36,330 INFO [train.py:1114] (1/4) Epoch 4, batch 2600, loss[loss=0.2515, simple_loss=0.32, pruned_loss=0.09149, over 13557.00 frames. ], tot_loss[loss=0.2785, simple_loss=0.3437, pruned_loss=0.1066, over 2636379.01 frames. ], batch size: 36, lr: 2.73e-02, grad_scale: 32.0 +2024-08-03 05:34:44,131 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.359e+02 1.570e+02 1.941e+02 3.532e+02, threshold=3.140e+02, percent-clipped=1.0 +2024-08-03 05:34:51,375 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.74 vs. limit=15.0 +2024-08-03 05:34:52,194 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.38 vs. limit=15.0 +2024-08-03 05:35:02,295 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:35:15,129 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=15.0 +2024-08-03 05:35:24,688 INFO [train.py:1114] (1/4) Epoch 4, batch 2650, loss[loss=0.2885, simple_loss=0.3598, pruned_loss=0.1086, over 13300.00 frames. ], tot_loss[loss=0.2787, simple_loss=0.3439, pruned_loss=0.1068, over 2640196.29 frames. ], batch size: 46, lr: 2.73e-02, grad_scale: 16.0 +2024-08-03 05:35:27,568 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:35:31,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=49760.333333333336, ans=0.1 +2024-08-03 05:35:42,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=49833.666666666664, ans=0.025 +2024-08-03 05:36:07,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=49907.0, ans=0.125 +2024-08-03 05:36:15,381 INFO [train.py:1114] (1/4) Epoch 4, batch 2700, loss[loss=0.2743, simple_loss=0.3484, pruned_loss=0.1001, over 13557.00 frames. ], tot_loss[loss=0.2781, simple_loss=0.3435, pruned_loss=0.1063, over 2638086.19 frames. ], batch size: 40, lr: 2.72e-02, grad_scale: 16.0 +2024-08-03 05:36:17,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=49943.666666666664, ans=0.0 +2024-08-03 05:36:21,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49943.666666666664, ans=0.1 +2024-08-03 05:36:24,106 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.340e+02 1.529e+02 1.834e+02 2.682e+02, threshold=3.057e+02, percent-clipped=0.0 +2024-08-03 05:36:45,611 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:36:51,864 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.74 vs. limit=15.0 +2024-08-03 05:36:56,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=50090.333333333336, ans=0.0 +2024-08-03 05:37:03,511 INFO [train.py:1114] (1/4) Epoch 4, batch 2750, loss[loss=0.2335, simple_loss=0.3057, pruned_loss=0.08065, over 13328.00 frames. ], tot_loss[loss=0.2758, simple_loss=0.3414, pruned_loss=0.1051, over 2636535.33 frames. ], batch size: 34, lr: 2.72e-02, grad_scale: 16.0 +2024-08-03 05:37:20,397 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.14 vs. limit=15.0 +2024-08-03 05:37:38,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=50273.666666666664, ans=0.1 +2024-08-03 05:37:46,914 INFO [train.py:1114] (1/4) Epoch 4, batch 2800, loss[loss=0.3301, simple_loss=0.3805, pruned_loss=0.1398, over 9670.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3422, pruned_loss=0.1057, over 2628032.95 frames. ], batch size: 98, lr: 2.72e-02, grad_scale: 32.0 +2024-08-03 05:37:47,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=50310.333333333336, ans=0.04949747468305833 +2024-08-03 05:37:55,719 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.137e+02 1.473e+02 1.737e+02 2.107e+02 3.108e+02, threshold=3.473e+02, percent-clipped=1.0 +2024-08-03 05:38:11,946 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.48 vs. limit=12.0 +2024-08-03 05:38:28,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=50457.0, ans=0.125 +2024-08-03 05:38:31,579 INFO [train.py:1114] (1/4) Epoch 4, batch 2850, loss[loss=0.2487, simple_loss=0.3205, pruned_loss=0.08849, over 13369.00 frames. ], tot_loss[loss=0.2777, simple_loss=0.3429, pruned_loss=0.1062, over 2622168.35 frames. ], batch size: 35, lr: 2.71e-02, grad_scale: 16.0 +2024-08-03 05:38:35,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=50493.666666666664, ans=0.125 +2024-08-03 05:38:45,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=50530.333333333336, ans=0.2 +2024-08-03 05:38:52,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=50530.333333333336, ans=0.0 +2024-08-03 05:38:53,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=50530.333333333336, ans=0.125 +2024-08-03 05:39:02,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=50567.0, ans=0.2 +2024-08-03 05:39:25,226 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.96 vs. limit=6.0 +2024-08-03 05:39:27,367 INFO [train.py:1114] (1/4) Epoch 4, batch 2900, loss[loss=0.2676, simple_loss=0.3297, pruned_loss=0.1027, over 13370.00 frames. ], tot_loss[loss=0.2786, simple_loss=0.344, pruned_loss=0.1066, over 2632515.32 frames. ], batch size: 36, lr: 2.71e-02, grad_scale: 16.0 +2024-08-03 05:39:27,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=50677.0, ans=0.1 +2024-08-03 05:39:39,679 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.312e+02 1.485e+02 1.747e+02 2.702e+02, threshold=2.970e+02, percent-clipped=0.0 +2024-08-03 05:39:42,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=50713.666666666664, ans=0.0 +2024-08-03 05:39:47,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=50750.333333333336, ans=0.0 +2024-08-03 05:39:56,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=50787.0, ans=0.025 +2024-08-03 05:40:02,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=50787.0, ans=0.0 +2024-08-03 05:40:03,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=50787.0, ans=0.2 +2024-08-03 05:40:05,695 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.84 vs. limit=10.0 +2024-08-03 05:40:07,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=50823.666666666664, ans=0.125 +2024-08-03 05:40:11,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=50823.666666666664, ans=15.0 +2024-08-03 05:40:13,934 INFO [train.py:1114] (1/4) Epoch 4, batch 2950, loss[loss=0.2521, simple_loss=0.3218, pruned_loss=0.09118, over 13345.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.343, pruned_loss=0.1063, over 2630278.99 frames. ], batch size: 34, lr: 2.70e-02, grad_scale: 16.0 +2024-08-03 05:40:27,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=50897.0, ans=0.0 +2024-08-03 05:40:38,686 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.28 vs. limit=15.0 +2024-08-03 05:40:38,728 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.93 vs. limit=15.0 +2024-08-03 05:40:51,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=51007.0, ans=0.125 +2024-08-03 05:40:59,440 INFO [train.py:1114] (1/4) Epoch 4, batch 3000, loss[loss=0.2645, simple_loss=0.334, pruned_loss=0.09745, over 13547.00 frames. ], tot_loss[loss=0.2767, simple_loss=0.3422, pruned_loss=0.1056, over 2630081.62 frames. ], batch size: 37, lr: 2.70e-02, grad_scale: 16.0 +2024-08-03 05:40:59,440 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 05:41:15,392 INFO [train.py:1146] (1/4) Epoch 4, validation: loss=0.2213, simple_loss=0.3178, pruned_loss=0.06237, over 944034.00 frames. +2024-08-03 05:41:15,393 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 05:41:28,395 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.054e+02 1.441e+02 1.719e+02 2.426e+02 4.333e+02, threshold=3.438e+02, percent-clipped=13.0 +2024-08-03 05:41:29,018 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.43 vs. limit=15.0 +2024-08-03 05:41:34,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51080.333333333336, ans=0.1 +2024-08-03 05:41:42,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=51117.0, ans=0.1 +2024-08-03 05:41:52,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51190.333333333336, ans=0.1 +2024-08-03 05:42:02,343 INFO [train.py:1114] (1/4) Epoch 4, batch 3050, loss[loss=0.2488, simple_loss=0.32, pruned_loss=0.08878, over 13533.00 frames. ], tot_loss[loss=0.2775, simple_loss=0.3429, pruned_loss=0.106, over 2627187.00 frames. ], batch size: 35, lr: 2.70e-02, grad_scale: 16.0 +2024-08-03 05:42:12,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=51263.666666666664, ans=0.125 +2024-08-03 05:42:21,692 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.30 vs. limit=15.0 +2024-08-03 05:42:56,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=51410.333333333336, ans=0.04949747468305833 +2024-08-03 05:42:57,209 INFO [train.py:1114] (1/4) Epoch 4, batch 3100, loss[loss=0.2844, simple_loss=0.3596, pruned_loss=0.1046, over 13319.00 frames. ], tot_loss[loss=0.2759, simple_loss=0.3421, pruned_loss=0.1049, over 2627432.33 frames. ], batch size: 46, lr: 2.69e-02, grad_scale: 16.0 +2024-08-03 05:52:34,206 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.298e+02 1.531e+02 1.928e+02 3.998e+02, threshold=3.062e+02, percent-clipped=1.0 +2024-08-03 05:54:26,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=51483.666666666664, ans=0.125 +2024-08-03 05:54:40,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=51557.0, ans=0.125 +2024-08-03 05:54:41,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=51557.0, ans=0.05 +2024-08-03 05:54:48,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=51557.0, ans=0.125 +2024-08-03 05:54:51,696 INFO [train.py:1114] (1/4) Epoch 4, batch 3150, loss[loss=0.3174, simple_loss=0.3708, pruned_loss=0.132, over 13026.00 frames. ], tot_loss[loss=0.2764, simple_loss=0.3424, pruned_loss=0.1052, over 2628742.47 frames. ], batch size: 48, lr: 2.69e-02, grad_scale: 16.0 +2024-08-03 05:55:00,440 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.81 vs. limit=15.0 +2024-08-03 05:55:05,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=51630.333333333336, ans=0.125 +2024-08-03 05:55:08,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=51630.333333333336, ans=0.125 +2024-08-03 05:55:33,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=51703.666666666664, ans=0.0 +2024-08-03 05:55:48,105 INFO [train.py:1114] (1/4) Epoch 4, batch 3200, loss[loss=0.2381, simple_loss=0.3114, pruned_loss=0.08245, over 13545.00 frames. ], tot_loss[loss=0.2757, simple_loss=0.3413, pruned_loss=0.105, over 2634630.60 frames. ], batch size: 37, lr: 2.69e-02, grad_scale: 32.0 +2024-08-03 05:55:50,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=51777.0, ans=0.0 +2024-08-03 05:55:56,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=51813.666666666664, ans=0.125 +2024-08-03 05:55:57,489 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.804e+01 1.368e+02 1.621e+02 1.933e+02 3.574e+02, threshold=3.241e+02, percent-clipped=2.0 +2024-08-03 05:56:14,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=51887.0, ans=0.125 +2024-08-03 05:56:15,974 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.32 vs. limit=10.0 +2024-08-03 05:56:23,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=51887.0, ans=0.025 +2024-08-03 05:56:40,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=51923.666666666664, ans=0.125 +2024-08-03 05:56:44,915 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.36 vs. limit=10.0 +2024-08-03 05:56:51,452 INFO [train.py:1114] (1/4) Epoch 4, batch 3250, loss[loss=0.2919, simple_loss=0.3528, pruned_loss=0.1155, over 13389.00 frames. ], tot_loss[loss=0.2762, simple_loss=0.3421, pruned_loss=0.1052, over 2639094.35 frames. ], batch size: 38, lr: 2.68e-02, grad_scale: 32.0 +2024-08-03 05:57:22,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=51997.0, ans=0.0 +2024-08-03 05:58:22,585 INFO [train.py:1114] (1/4) Epoch 4, batch 3300, loss[loss=0.2979, simple_loss=0.3577, pruned_loss=0.1191, over 12833.00 frames. ], tot_loss[loss=0.2738, simple_loss=0.3399, pruned_loss=0.1038, over 2640286.66 frames. ], batch size: 52, lr: 2.68e-02, grad_scale: 32.0 +2024-08-03 05:58:23,939 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.58 vs. limit=15.0 +2024-08-03 05:58:34,520 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.337e+02 1.543e+02 1.796e+02 2.309e+02, threshold=3.087e+02, percent-clipped=0.0 +2024-08-03 05:58:35,917 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.82 vs. limit=15.0 +2024-08-03 05:58:51,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=52217.0, ans=0.125 +2024-08-03 05:58:54,151 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.38 vs. limit=22.5 +2024-08-03 05:58:54,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=52217.0, ans=0.125 +2024-08-03 05:59:00,321 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.21 vs. limit=22.5 +2024-08-03 05:59:01,200 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.77 vs. limit=15.0 +2024-08-03 05:59:06,106 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.93 vs. limit=12.0 +2024-08-03 05:59:14,205 INFO [train.py:1114] (1/4) Epoch 4, batch 3350, loss[loss=0.3217, simple_loss=0.3773, pruned_loss=0.133, over 13036.00 frames. ], tot_loss[loss=0.2747, simple_loss=0.3407, pruned_loss=0.1043, over 2629803.42 frames. ], batch size: 48, lr: 2.67e-02, grad_scale: 32.0 +2024-08-03 05:59:22,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=52363.666666666664, ans=0.2 +2024-08-03 05:59:23,292 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.44 vs. limit=6.0 +2024-08-03 05:59:28,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=52363.666666666664, ans=0.125 +2024-08-03 05:59:47,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=52437.0, ans=0.1 +2024-08-03 05:59:48,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=52437.0, ans=0.5 +2024-08-03 05:59:48,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52437.0, ans=0.1 +2024-08-03 05:59:49,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=52437.0, ans=0.0 +2024-08-03 05:59:50,483 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.34 vs. limit=10.0 +2024-08-03 05:59:57,392 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.48 vs. limit=15.0 +2024-08-03 06:00:01,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=52473.666666666664, ans=0.125 +2024-08-03 06:00:03,206 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:00:05,675 INFO [train.py:1114] (1/4) Epoch 4, batch 3400, loss[loss=0.2502, simple_loss=0.3119, pruned_loss=0.09424, over 13535.00 frames. ], tot_loss[loss=0.2751, simple_loss=0.3408, pruned_loss=0.1046, over 2625517.24 frames. ], batch size: 31, lr: 2.67e-02, grad_scale: 32.0 +2024-08-03 06:00:09,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=52510.333333333336, ans=0.125 +2024-08-03 06:00:10,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=52510.333333333336, ans=0.125 +2024-08-03 06:00:15,141 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.426e+02 1.702e+02 2.054e+02 4.258e+02, threshold=3.404e+02, percent-clipped=2.0 +2024-08-03 06:00:24,153 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.32 vs. limit=15.0 +2024-08-03 06:00:24,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=52583.666666666664, ans=0.125 +2024-08-03 06:00:27,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=52583.666666666664, ans=0.2 +2024-08-03 06:00:27,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=52583.666666666664, ans=0.125 +2024-08-03 06:00:29,366 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.76 vs. limit=15.0 +2024-08-03 06:00:44,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52657.0, ans=0.1 +2024-08-03 06:00:44,896 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.41 vs. limit=15.0 +2024-08-03 06:00:48,744 INFO [train.py:1114] (1/4) Epoch 4, batch 3450, loss[loss=0.3118, simple_loss=0.3785, pruned_loss=0.1225, over 12912.00 frames. ], tot_loss[loss=0.2759, simple_loss=0.3419, pruned_loss=0.105, over 2627764.46 frames. ], batch size: 52, lr: 2.67e-02, grad_scale: 32.0 +2024-08-03 06:00:51,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=52693.666666666664, ans=0.1 +2024-08-03 06:00:55,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=52693.666666666664, ans=0.2 +2024-08-03 06:01:05,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=52730.333333333336, ans=0.035 +2024-08-03 06:01:10,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=52767.0, ans=0.0 +2024-08-03 06:01:14,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=52767.0, ans=0.5 +2024-08-03 06:01:23,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=52803.666666666664, ans=0.125 +2024-08-03 06:01:23,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=52803.666666666664, ans=0.1 +2024-08-03 06:01:24,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.06 vs. limit=15.0 +2024-08-03 06:01:30,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=52840.333333333336, ans=0.125 +2024-08-03 06:01:31,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=52840.333333333336, ans=0.125 +2024-08-03 06:01:36,371 INFO [train.py:1114] (1/4) Epoch 4, batch 3500, loss[loss=0.2402, simple_loss=0.3183, pruned_loss=0.08101, over 13523.00 frames. ], tot_loss[loss=0.2752, simple_loss=0.341, pruned_loss=0.1046, over 2629642.25 frames. ], batch size: 34, lr: 2.66e-02, grad_scale: 32.0 +2024-08-03 06:01:36,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=52877.0, ans=0.2 +2024-08-03 06:01:45,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=52913.666666666664, ans=0.0 +2024-08-03 06:01:45,561 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.329e+02 1.542e+02 1.871e+02 3.471e+02, threshold=3.085e+02, percent-clipped=1.0 +2024-08-03 06:01:55,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=52913.666666666664, ans=0.125 +2024-08-03 06:02:12,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=52950.333333333336, ans=0.2 +2024-08-03 06:02:22,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=52987.0, ans=0.2 +2024-08-03 06:02:22,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=52987.0, ans=0.0 +2024-08-03 06:02:24,190 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.23 vs. limit=15.0 +2024-08-03 06:02:37,235 INFO [train.py:1114] (1/4) Epoch 4, batch 3550, loss[loss=0.3352, simple_loss=0.38, pruned_loss=0.1452, over 12363.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.3433, pruned_loss=0.1062, over 2628559.77 frames. ], batch size: 58, lr: 2.66e-02, grad_scale: 32.0 +2024-08-03 06:02:58,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=53097.0, ans=0.125 +2024-08-03 06:03:10,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=53133.666666666664, ans=0.125 +2024-08-03 06:03:21,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53170.333333333336, ans=0.1 +2024-08-03 06:03:21,627 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.63 vs. limit=6.0 +2024-08-03 06:03:31,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=53207.0, ans=0.125 +2024-08-03 06:03:31,374 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.60 vs. limit=15.0 +2024-08-03 06:03:32,779 INFO [train.py:1114] (1/4) Epoch 4, batch 3600, loss[loss=0.3813, simple_loss=0.4093, pruned_loss=0.1766, over 8886.00 frames. ], tot_loss[loss=0.2877, simple_loss=0.3498, pruned_loss=0.1128, over 2488696.63 frames. ], batch size: 97, lr: 2.66e-02, grad_scale: 16.0 +2024-08-03 06:03:57,022 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.095e+02 1.338e+02 1.465e+02 1.631e+02 2.841e+02, threshold=2.930e+02, percent-clipped=0.0 +2024-08-03 06:04:03,576 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.50 vs. limit=15.0 +2024-08-03 06:04:18,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=53353.666666666664, ans=0.125 +2024-08-03 06:04:20,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=53353.666666666664, ans=0.025 +2024-08-03 06:05:08,616 INFO [train.py:1114] (1/4) Epoch 5, batch 0, loss[loss=0.2453, simple_loss=0.3177, pruned_loss=0.08643, over 13346.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3177, pruned_loss=0.08643, over 13346.00 frames. ], batch size: 33, lr: 2.47e-02, grad_scale: 32.0 +2024-08-03 06:05:08,617 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 06:05:18,615 INFO [train.py:1146] (1/4) Epoch 5, validation: loss=0.231, simple_loss=0.3271, pruned_loss=0.06749, over 944034.00 frames. +2024-08-03 06:05:18,616 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 06:05:27,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=53430.666666666664, ans=0.025 +2024-08-03 06:05:44,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=53467.333333333336, ans=0.05 +2024-08-03 06:05:47,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=53504.0, ans=0.0 +2024-08-03 06:06:04,992 INFO [train.py:1114] (1/4) Epoch 5, batch 50, loss[loss=0.2536, simple_loss=0.317, pruned_loss=0.09513, over 13419.00 frames. ], tot_loss[loss=0.2796, simple_loss=0.3451, pruned_loss=0.1071, over 577805.44 frames. ], batch size: 32, lr: 2.47e-02, grad_scale: 32.0 +2024-08-03 06:06:12,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=53614.0, ans=0.015 +2024-08-03 06:06:19,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.19 vs. limit=15.0 +2024-08-03 06:06:24,699 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.396e+02 1.612e+02 2.008e+02 3.505e+02, threshold=3.224e+02, percent-clipped=4.0 +2024-08-03 06:06:32,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=53687.333333333336, ans=0.2 +2024-08-03 06:06:38,642 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.96 vs. limit=22.5 +2024-08-03 06:06:45,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53724.0, ans=0.1 +2024-08-03 06:06:49,894 INFO [train.py:1114] (1/4) Epoch 5, batch 100, loss[loss=0.2742, simple_loss=0.3382, pruned_loss=0.1051, over 13529.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3437, pruned_loss=0.1049, over 1026097.63 frames. ], batch size: 35, lr: 2.46e-02, grad_scale: 32.0 +2024-08-03 06:06:51,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=53760.666666666664, ans=0.125 +2024-08-03 06:07:00,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=53797.333333333336, ans=0.2 +2024-08-03 06:07:02,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=53797.333333333336, ans=0.0 +2024-08-03 06:07:10,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=53834.0, ans=0.125 +2024-08-03 06:07:19,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53834.0, ans=0.1 +2024-08-03 06:07:33,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=53907.333333333336, ans=0.125 +2024-08-03 06:07:36,081 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.18 vs. limit=22.5 +2024-08-03 06:07:39,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=53907.333333333336, ans=0.125 +2024-08-03 06:07:42,375 INFO [train.py:1114] (1/4) Epoch 5, batch 150, loss[loss=0.2576, simple_loss=0.3192, pruned_loss=0.09796, over 13397.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.3399, pruned_loss=0.1028, over 1387036.73 frames. ], batch size: 32, lr: 2.46e-02, grad_scale: 32.0 +2024-08-03 06:07:52,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=53980.666666666664, ans=0.125 +2024-08-03 06:07:53,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=53980.666666666664, ans=0.2 +2024-08-03 06:08:01,993 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.021e+02 1.304e+02 1.445e+02 1.840e+02 3.127e+02, threshold=2.891e+02, percent-clipped=0.0 +2024-08-03 06:08:25,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=54054.0, ans=0.125 +2024-08-03 06:09:07,970 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.38 vs. limit=22.5 +2024-08-03 06:09:17,806 INFO [train.py:1114] (1/4) Epoch 5, batch 200, loss[loss=0.259, simple_loss=0.3365, pruned_loss=0.09073, over 12559.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3369, pruned_loss=0.1011, over 1665830.78 frames. ], batch size: 58, lr: 2.46e-02, grad_scale: 32.0 +2024-08-03 06:09:31,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=54127.333333333336, ans=0.125 +2024-08-03 06:09:50,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=54200.666666666664, ans=0.1 +2024-08-03 06:10:14,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=54237.333333333336, ans=0.025 +2024-08-03 06:10:32,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=54274.0, ans=0.2 +2024-08-03 06:10:36,821 INFO [train.py:1114] (1/4) Epoch 5, batch 250, loss[loss=0.2686, simple_loss=0.3382, pruned_loss=0.09952, over 13280.00 frames. ], tot_loss[loss=0.2688, simple_loss=0.3366, pruned_loss=0.1005, over 1884767.06 frames. ], batch size: 46, lr: 2.45e-02, grad_scale: 32.0 +2024-08-03 06:11:05,033 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.051e+02 1.288e+02 1.425e+02 1.791e+02 2.775e+02, threshold=2.850e+02, percent-clipped=0.0 +2024-08-03 06:11:05,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=54384.0, ans=0.0 +2024-08-03 06:11:09,542 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.32 vs. limit=6.0 +2024-08-03 06:11:15,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=54420.666666666664, ans=0.125 +2024-08-03 06:11:24,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=54420.666666666664, ans=0.07 +2024-08-03 06:11:25,664 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.94 vs. limit=15.0 +2024-08-03 06:11:28,528 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.83 vs. limit=10.0 +2024-08-03 06:11:31,421 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.43 vs. limit=15.0 +2024-08-03 06:11:47,371 INFO [train.py:1114] (1/4) Epoch 5, batch 300, loss[loss=0.276, simple_loss=0.3423, pruned_loss=0.1049, over 13443.00 frames. ], tot_loss[loss=0.2683, simple_loss=0.3357, pruned_loss=0.1004, over 2051616.81 frames. ], batch size: 42, lr: 2.45e-02, grad_scale: 16.0 +2024-08-03 06:12:08,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=54530.666666666664, ans=0.125 +2024-08-03 06:12:15,708 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.13 vs. limit=12.0 +2024-08-03 06:12:19,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=54567.333333333336, ans=0.125 +2024-08-03 06:12:21,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=54567.333333333336, ans=0.0 +2024-08-03 06:12:22,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=54567.333333333336, ans=0.125 +2024-08-03 06:12:35,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=54640.666666666664, ans=0.0 +2024-08-03 06:12:41,675 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.79 vs. limit=22.5 +2024-08-03 06:12:46,944 INFO [train.py:1114] (1/4) Epoch 5, batch 350, loss[loss=0.2679, simple_loss=0.326, pruned_loss=0.1049, over 13574.00 frames. ], tot_loss[loss=0.27, simple_loss=0.3373, pruned_loss=0.1013, over 2181518.63 frames. ], batch size: 33, lr: 2.45e-02, grad_scale: 16.0 +2024-08-03 06:12:49,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=54677.333333333336, ans=0.0 +2024-08-03 06:13:00,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=54714.0, ans=0.125 +2024-08-03 06:14:29,328 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.016e+02 1.351e+02 1.704e+02 2.152e+02 5.145e+02, threshold=3.407e+02, percent-clipped=8.0 +2024-08-03 06:14:30,694 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-08-03 06:14:54,830 INFO [train.py:1114] (1/4) Epoch 5, batch 400, loss[loss=0.2793, simple_loss=0.353, pruned_loss=0.1027, over 13363.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.3362, pruned_loss=0.1006, over 2284909.09 frames. ], batch size: 37, lr: 2.44e-02, grad_scale: 32.0 +2024-08-03 06:15:00,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=54860.666666666664, ans=0.0 +2024-08-03 06:15:04,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=54897.333333333336, ans=0.125 +2024-08-03 06:15:07,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=54897.333333333336, ans=0.125 +2024-08-03 06:15:10,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.69 vs. limit=15.0 +2024-08-03 06:15:14,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=54934.0, ans=0.125 +2024-08-03 06:15:15,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=54934.0, ans=0.05 +2024-08-03 06:15:24,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=54970.666666666664, ans=0.0 +2024-08-03 06:15:29,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=54970.666666666664, ans=0.0 +2024-08-03 06:15:40,332 INFO [train.py:1114] (1/4) Epoch 5, batch 450, loss[loss=0.2799, simple_loss=0.3419, pruned_loss=0.1089, over 13560.00 frames. ], tot_loss[loss=0.27, simple_loss=0.3375, pruned_loss=0.1013, over 2359038.04 frames. ], batch size: 38, lr: 2.44e-02, grad_scale: 32.0 +2024-08-03 06:15:46,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=55044.0, ans=0.125 +2024-08-03 06:15:55,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=55080.666666666664, ans=0.125 +2024-08-03 06:16:01,335 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.058e+02 1.371e+02 1.584e+02 1.939e+02 3.313e+02, threshold=3.167e+02, percent-clipped=0.0 +2024-08-03 06:16:07,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=55154.0, ans=0.0 +2024-08-03 06:16:23,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=55154.0, ans=0.0 +2024-08-03 06:16:26,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=55190.666666666664, ans=0.125 +2024-08-03 06:16:32,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=55190.666666666664, ans=0.125 +2024-08-03 06:16:34,936 INFO [train.py:1114] (1/4) Epoch 5, batch 500, loss[loss=0.2754, simple_loss=0.3499, pruned_loss=0.1005, over 13427.00 frames. ], tot_loss[loss=0.2683, simple_loss=0.3359, pruned_loss=0.1003, over 2424686.00 frames. ], batch size: 43, lr: 2.44e-02, grad_scale: 32.0 +2024-08-03 06:17:37,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=55300.666666666664, ans=0.1 +2024-08-03 06:17:41,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=55300.666666666664, ans=0.125 +2024-08-03 06:17:43,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=55300.666666666664, ans=0.0 +2024-08-03 06:17:53,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=55337.333333333336, ans=0.1 +2024-08-03 06:19:39,787 INFO [train.py:1114] (1/4) Epoch 5, batch 550, loss[loss=0.2912, simple_loss=0.3545, pruned_loss=0.114, over 13073.00 frames. ], tot_loss[loss=0.2675, simple_loss=0.3351, pruned_loss=0.09997, over 2467676.47 frames. ], batch size: 48, lr: 2.43e-02, grad_scale: 32.0 +2024-08-03 06:21:10,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=55410.666666666664, ans=0.2 +2024-08-03 06:21:23,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=55484.0, ans=0.2 +2024-08-03 06:21:26,878 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.054e+02 1.350e+02 1.520e+02 1.792e+02 6.308e+02, threshold=3.041e+02, percent-clipped=2.0 +2024-08-03 06:22:09,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=55557.333333333336, ans=0.0 +2024-08-03 06:22:13,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=55594.0, ans=0.125 +2024-08-03 06:22:14,708 INFO [train.py:1114] (1/4) Epoch 5, batch 600, loss[loss=0.2699, simple_loss=0.344, pruned_loss=0.09788, over 13382.00 frames. ], tot_loss[loss=0.2674, simple_loss=0.3354, pruned_loss=0.09976, over 2506692.98 frames. ], batch size: 46, lr: 2.43e-02, grad_scale: 32.0 +2024-08-03 06:22:16,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=55594.0, ans=15.0 +2024-08-03 06:22:20,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=55594.0, ans=0.125 +2024-08-03 06:22:26,561 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.09 vs. limit=15.0 +2024-08-03 06:22:39,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=55667.333333333336, ans=0.125 +2024-08-03 06:23:01,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=55740.666666666664, ans=0.125 +2024-08-03 06:23:04,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=55740.666666666664, ans=0.2 +2024-08-03 06:23:07,074 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.10 vs. limit=15.0 +2024-08-03 06:23:07,603 INFO [train.py:1114] (1/4) Epoch 5, batch 650, loss[loss=0.3402, simple_loss=0.3994, pruned_loss=0.1405, over 13556.00 frames. ], tot_loss[loss=0.2664, simple_loss=0.3344, pruned_loss=0.09923, over 2542272.52 frames. ], batch size: 37, lr: 2.43e-02, grad_scale: 32.0 +2024-08-03 06:23:25,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=55777.333333333336, ans=0.125 +2024-08-03 06:23:27,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=55777.333333333336, ans=0.125 +2024-08-03 06:23:40,264 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.62 vs. limit=15.0 +2024-08-03 06:23:43,266 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.044e+02 1.304e+02 1.464e+02 1.924e+02 3.409e+02, threshold=2.927e+02, percent-clipped=2.0 +2024-08-03 06:24:09,706 INFO [train.py:1114] (1/4) Epoch 5, batch 700, loss[loss=0.2292, simple_loss=0.2904, pruned_loss=0.08402, over 13526.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3342, pruned_loss=0.09909, over 2565371.68 frames. ], batch size: 35, lr: 2.43e-02, grad_scale: 32.0 +2024-08-03 06:24:30,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=55997.333333333336, ans=0.04949747468305833 +2024-08-03 06:24:45,400 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.82 vs. limit=15.0 +2024-08-03 06:24:49,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=56070.666666666664, ans=0.5 +2024-08-03 06:25:08,402 INFO [train.py:1114] (1/4) Epoch 5, batch 750, loss[loss=0.2867, simple_loss=0.3523, pruned_loss=0.1105, over 13350.00 frames. ], tot_loss[loss=0.2654, simple_loss=0.3338, pruned_loss=0.09854, over 2582448.21 frames. ], batch size: 37, lr: 2.42e-02, grad_scale: 32.0 +2024-08-03 06:25:19,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56144.0, ans=0.1 +2024-08-03 06:25:21,225 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.90 vs. limit=22.5 +2024-08-03 06:25:22,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=56180.666666666664, ans=0.125 +2024-08-03 06:25:26,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=56180.666666666664, ans=0.125 +2024-08-03 06:25:28,133 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:25:34,264 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.431e+02 1.731e+02 2.437e+02 4.529e+02, threshold=3.462e+02, percent-clipped=10.0 +2024-08-03 06:25:44,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=56254.0, ans=0.125 +2024-08-03 06:25:51,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=56290.666666666664, ans=0.025 +2024-08-03 06:25:59,105 INFO [train.py:1114] (1/4) Epoch 5, batch 800, loss[loss=0.2376, simple_loss=0.3048, pruned_loss=0.08516, over 13359.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3334, pruned_loss=0.09821, over 2597414.37 frames. ], batch size: 33, lr: 2.42e-02, grad_scale: 32.0 +2024-08-03 06:26:20,934 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=9.59 vs. limit=12.0 +2024-08-03 06:26:35,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=56437.333333333336, ans=0.125 +2024-08-03 06:26:40,320 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.47 vs. limit=15.0 +2024-08-03 06:26:41,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=56437.333333333336, ans=0.125 +2024-08-03 06:26:58,696 INFO [train.py:1114] (1/4) Epoch 5, batch 850, loss[loss=0.2526, simple_loss=0.3302, pruned_loss=0.0875, over 13302.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3328, pruned_loss=0.09802, over 2609684.36 frames. ], batch size: 40, lr: 2.42e-02, grad_scale: 32.0 +2024-08-03 06:27:01,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=56510.666666666664, ans=0.125 +2024-08-03 06:27:19,558 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.023e+02 1.293e+02 1.480e+02 2.210e+02 4.419e+02, threshold=2.961e+02, percent-clipped=1.0 +2024-08-03 06:27:23,872 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.34 vs. limit=15.0 +2024-08-03 06:27:31,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56620.666666666664, ans=0.1 +2024-08-03 06:27:41,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=56657.333333333336, ans=0.125 +2024-08-03 06:27:42,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=56657.333333333336, ans=0.0 +2024-08-03 06:27:42,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=56657.333333333336, ans=0.0 +2024-08-03 06:27:44,541 INFO [train.py:1114] (1/4) Epoch 5, batch 900, loss[loss=0.2486, simple_loss=0.3102, pruned_loss=0.09351, over 13332.00 frames. ], tot_loss[loss=0.265, simple_loss=0.3334, pruned_loss=0.09833, over 2613113.97 frames. ], batch size: 33, lr: 2.41e-02, grad_scale: 32.0 +2024-08-03 06:27:57,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=56730.666666666664, ans=0.125 +2024-08-03 06:28:06,139 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.76 vs. limit=15.0 +2024-08-03 06:28:09,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=56767.333333333336, ans=0.125 +2024-08-03 06:28:12,729 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.58 vs. limit=6.0 +2024-08-03 06:28:13,487 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.63 vs. limit=6.0 +2024-08-03 06:28:14,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=56804.0, ans=0.1 +2024-08-03 06:28:33,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=56840.666666666664, ans=0.2 +2024-08-03 06:28:39,188 INFO [train.py:1114] (1/4) Epoch 5, batch 950, loss[loss=0.2544, simple_loss=0.3192, pruned_loss=0.0948, over 13519.00 frames. ], tot_loss[loss=0.2654, simple_loss=0.3336, pruned_loss=0.09856, over 2614032.19 frames. ], batch size: 34, lr: 2.41e-02, grad_scale: 32.0 +2024-08-03 06:28:39,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=56877.333333333336, ans=0.0 +2024-08-03 06:28:47,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=56914.0, ans=0.2 +2024-08-03 06:28:54,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.65 vs. limit=22.5 +2024-08-03 06:28:58,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=56950.666666666664, ans=0.5 +2024-08-03 06:29:00,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=56950.666666666664, ans=0.0 +2024-08-03 06:29:00,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=56950.666666666664, ans=0.09899494936611666 +2024-08-03 06:29:01,396 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.321e+02 1.545e+02 1.895e+02 5.386e+02, threshold=3.090e+02, percent-clipped=1.0 +2024-08-03 06:29:29,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=57024.0, ans=0.2 +2024-08-03 06:29:35,304 INFO [train.py:1114] (1/4) Epoch 5, batch 1000, loss[loss=0.2426, simple_loss=0.3138, pruned_loss=0.08566, over 13387.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.335, pruned_loss=0.09938, over 2612031.78 frames. ], batch size: 35, lr: 2.41e-02, grad_scale: 32.0 +2024-08-03 06:29:45,955 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:29:49,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=57060.666666666664, ans=0.125 +2024-08-03 06:29:59,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57097.333333333336, ans=0.1 +2024-08-03 06:30:26,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=57207.333333333336, ans=0.125 +2024-08-03 06:30:40,644 INFO [train.py:1114] (1/4) Epoch 5, batch 1050, loss[loss=0.3189, simple_loss=0.384, pruned_loss=0.1269, over 13566.00 frames. ], tot_loss[loss=0.2657, simple_loss=0.334, pruned_loss=0.09872, over 2615628.99 frames. ], batch size: 39, lr: 2.40e-02, grad_scale: 32.0 +2024-08-03 06:31:01,347 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.349e+02 1.601e+02 2.002e+02 3.488e+02, threshold=3.202e+02, percent-clipped=3.0 +2024-08-03 06:31:27,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=57390.666666666664, ans=0.025 +2024-08-03 06:31:31,765 INFO [train.py:1114] (1/4) Epoch 5, batch 1100, loss[loss=0.282, simple_loss=0.3408, pruned_loss=0.1116, over 13560.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3336, pruned_loss=0.09815, over 2619591.72 frames. ], batch size: 36, lr: 2.40e-02, grad_scale: 32.0 +2024-08-03 06:31:50,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=57464.0, ans=0.0 +2024-08-03 06:31:59,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=57500.666666666664, ans=0.025 +2024-08-03 06:32:03,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57500.666666666664, ans=0.1 +2024-08-03 06:32:06,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=57537.333333333336, ans=0.0 +2024-08-03 06:32:08,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=57537.333333333336, ans=0.5 +2024-08-03 06:32:19,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57537.333333333336, ans=0.1 +2024-08-03 06:32:25,181 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.00 vs. limit=15.0 +2024-08-03 06:32:28,937 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=17.67 vs. limit=15.0 +2024-08-03 06:32:31,911 INFO [train.py:1114] (1/4) Epoch 5, batch 1150, loss[loss=0.2786, simple_loss=0.3432, pruned_loss=0.107, over 13549.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3332, pruned_loss=0.09814, over 2619549.14 frames. ], batch size: 36, lr: 2.40e-02, grad_scale: 32.0 +2024-08-03 06:32:47,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57647.333333333336, ans=0.1 +2024-08-03 06:32:52,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=57647.333333333336, ans=0.2 +2024-08-03 06:32:59,168 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.317e+02 1.572e+02 1.915e+02 2.951e+02, threshold=3.144e+02, percent-clipped=0.0 +2024-08-03 06:33:37,992 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.01 vs. limit=22.5 +2024-08-03 06:33:39,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=57757.333333333336, ans=0.125 +2024-08-03 06:33:41,548 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.07 vs. limit=15.0 +2024-08-03 06:33:44,661 INFO [train.py:1114] (1/4) Epoch 5, batch 1200, loss[loss=0.2591, simple_loss=0.3351, pruned_loss=0.0916, over 13582.00 frames. ], tot_loss[loss=0.2665, simple_loss=0.3349, pruned_loss=0.09903, over 2617304.44 frames. ], batch size: 39, lr: 2.39e-02, grad_scale: 32.0 +2024-08-03 06:34:10,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=57904.0, ans=0.0 +2024-08-03 06:34:18,541 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.22 vs. limit=22.5 +2024-08-03 06:34:39,141 INFO [train.py:1114] (1/4) Epoch 5, batch 1250, loss[loss=0.2831, simple_loss=0.355, pruned_loss=0.1056, over 13446.00 frames. ], tot_loss[loss=0.2666, simple_loss=0.3355, pruned_loss=0.09891, over 2628622.35 frames. ], batch size: 42, lr: 2.39e-02, grad_scale: 32.0 +2024-08-03 06:34:40,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=57977.333333333336, ans=0.0 +2024-08-03 06:34:41,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=57977.333333333336, ans=0.125 +2024-08-03 06:34:43,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=57977.333333333336, ans=0.125 +2024-08-03 06:34:43,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=57977.333333333336, ans=0.125 +2024-08-03 06:34:58,808 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.48 vs. limit=22.5 +2024-08-03 06:35:05,297 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.303e+02 1.543e+02 2.003e+02 3.165e+02, threshold=3.086e+02, percent-clipped=1.0 +2024-08-03 06:35:23,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=58124.0, ans=0.0 +2024-08-03 06:35:26,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=58124.0, ans=0.0 +2024-08-03 06:35:27,368 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.28 vs. limit=15.0 +2024-08-03 06:35:31,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=58160.666666666664, ans=0.125 +2024-08-03 06:35:32,168 INFO [train.py:1114] (1/4) Epoch 5, batch 1300, loss[loss=0.274, simple_loss=0.3423, pruned_loss=0.1029, over 12948.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.3343, pruned_loss=0.09869, over 2631573.16 frames. ], batch size: 52, lr: 2.39e-02, grad_scale: 16.0 +2024-08-03 06:35:53,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=58234.0, ans=0.05 +2024-08-03 06:35:54,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=58234.0, ans=0.0 +2024-08-03 06:35:56,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58234.0, ans=0.1 +2024-08-03 06:36:16,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=58344.0, ans=0.0 +2024-08-03 06:36:17,086 INFO [train.py:1114] (1/4) Epoch 5, batch 1350, loss[loss=0.2544, simple_loss=0.3227, pruned_loss=0.09308, over 13550.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3334, pruned_loss=0.09794, over 2639072.21 frames. ], batch size: 37, lr: 2.38e-02, grad_scale: 16.0 +2024-08-03 06:36:23,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=58344.0, ans=0.2 +2024-08-03 06:36:31,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=58380.666666666664, ans=0.125 +2024-08-03 06:36:31,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=58380.666666666664, ans=0.125 +2024-08-03 06:36:33,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=58380.666666666664, ans=0.125 +2024-08-03 06:36:39,396 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.960e+01 1.325e+02 1.559e+02 1.988e+02 3.487e+02, threshold=3.118e+02, percent-clipped=2.0 +2024-08-03 06:37:17,003 INFO [train.py:1114] (1/4) Epoch 5, batch 1400, loss[loss=0.2302, simple_loss=0.2956, pruned_loss=0.0824, over 13257.00 frames. ], tot_loss[loss=0.2637, simple_loss=0.3327, pruned_loss=0.09737, over 2642877.65 frames. ], batch size: 31, lr: 2.38e-02, grad_scale: 16.0 +2024-08-03 06:37:27,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=58564.0, ans=0.125 +2024-08-03 06:37:29,393 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.28 vs. limit=15.0 +2024-08-03 06:37:40,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=58600.666666666664, ans=0.125 +2024-08-03 06:37:40,394 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.55 vs. limit=12.0 +2024-08-03 06:37:59,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=58674.0, ans=0.125 +2024-08-03 06:38:03,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.56 vs. limit=10.0 +2024-08-03 06:38:07,871 INFO [train.py:1114] (1/4) Epoch 5, batch 1450, loss[loss=0.2802, simple_loss=0.3582, pruned_loss=0.1011, over 13412.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3329, pruned_loss=0.09761, over 2642253.61 frames. ], batch size: 43, lr: 2.38e-02, grad_scale: 16.0 +2024-08-03 06:38:21,004 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.88 vs. limit=22.5 +2024-08-03 06:38:23,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=58747.333333333336, ans=10.0 +2024-08-03 06:38:30,503 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.070e+02 1.374e+02 1.719e+02 2.363e+02 5.392e+02, threshold=3.437e+02, percent-clipped=8.0 +2024-08-03 06:38:47,991 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.99 vs. limit=10.0 +2024-08-03 06:38:50,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=58820.666666666664, ans=0.025 +2024-08-03 06:38:58,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58857.333333333336, ans=0.1 +2024-08-03 06:39:04,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=58857.333333333336, ans=0.125 +2024-08-03 06:39:04,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=58857.333333333336, ans=0.0 +2024-08-03 06:39:06,273 INFO [train.py:1114] (1/4) Epoch 5, batch 1500, loss[loss=0.2685, simple_loss=0.3472, pruned_loss=0.09491, over 13396.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.3334, pruned_loss=0.09758, over 2642415.65 frames. ], batch size: 39, lr: 2.38e-02, grad_scale: 16.0 +2024-08-03 06:39:06,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=58894.0, ans=0.0 +2024-08-03 06:39:37,999 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.21 vs. limit=15.0 +2024-08-03 06:39:40,759 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.28 vs. limit=10.0 +2024-08-03 06:39:42,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=58930.666666666664, ans=0.125 +2024-08-03 06:39:58,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=59004.0, ans=0.2 +2024-08-03 06:40:12,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=59040.666666666664, ans=0.125 +2024-08-03 06:40:13,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=59040.666666666664, ans=0.0 +2024-08-03 06:40:16,381 INFO [train.py:1114] (1/4) Epoch 5, batch 1550, loss[loss=0.2826, simple_loss=0.3606, pruned_loss=0.1023, over 13376.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.3334, pruned_loss=0.09778, over 2631162.26 frames. ], batch size: 41, lr: 2.37e-02, grad_scale: 16.0 +2024-08-03 06:40:23,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=59077.333333333336, ans=0.125 +2024-08-03 06:40:23,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=59077.333333333336, ans=0.1 +2024-08-03 06:40:30,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59114.0, ans=0.1 +2024-08-03 06:40:32,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=59114.0, ans=0.125 +2024-08-03 06:40:34,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=59150.666666666664, ans=0.1 +2024-08-03 06:40:39,048 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.330e+02 1.569e+02 1.992e+02 3.164e+02, threshold=3.138e+02, percent-clipped=1.0 +2024-08-03 06:40:40,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.13 vs. limit=15.0 +2024-08-03 06:40:46,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=59187.333333333336, ans=0.2 +2024-08-03 06:41:38,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=59224.0, ans=0.0 +2024-08-03 06:41:39,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=59224.0, ans=0.125 +2024-08-03 06:41:47,101 INFO [train.py:1114] (1/4) Epoch 5, batch 1600, loss[loss=0.2788, simple_loss=0.3486, pruned_loss=0.1045, over 13589.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3323, pruned_loss=0.09723, over 2623726.07 frames. ], batch size: 39, lr: 2.37e-02, grad_scale: 32.0 +2024-08-03 06:41:47,570 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.09 vs. limit=15.0 +2024-08-03 06:41:51,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=59260.666666666664, ans=0.0 +2024-08-03 06:41:54,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=59260.666666666664, ans=0.125 +2024-08-03 06:42:09,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=59334.0, ans=0.0 +2024-08-03 06:42:12,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59334.0, ans=0.1 +2024-08-03 06:42:14,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=59334.0, ans=0.0 +2024-08-03 06:42:20,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=59370.666666666664, ans=0.5 +2024-08-03 06:42:25,352 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.59 vs. limit=6.0 +2024-08-03 06:42:30,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=59407.333333333336, ans=0.2 +2024-08-03 06:42:36,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=59407.333333333336, ans=0.0 +2024-08-03 06:42:38,321 INFO [train.py:1114] (1/4) Epoch 5, batch 1650, loss[loss=0.2534, simple_loss=0.3409, pruned_loss=0.08297, over 13334.00 frames. ], tot_loss[loss=0.2637, simple_loss=0.3324, pruned_loss=0.0975, over 2620538.85 frames. ], batch size: 40, lr: 2.37e-02, grad_scale: 32.0 +2024-08-03 06:42:41,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=59444.0, ans=0.125 +2024-08-03 06:42:54,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59480.666666666664, ans=0.1 +2024-08-03 06:42:54,465 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.40 vs. limit=15.0 +2024-08-03 06:43:04,539 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.342e+02 1.500e+02 2.074e+02 4.077e+02, threshold=2.999e+02, percent-clipped=4.0 +2024-08-03 06:43:27,952 INFO [train.py:1114] (1/4) Epoch 5, batch 1700, loss[loss=0.2534, simple_loss=0.3146, pruned_loss=0.0961, over 13258.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3314, pruned_loss=0.09648, over 2629378.13 frames. ], batch size: 31, lr: 2.36e-02, grad_scale: 32.0 +2024-08-03 06:43:32,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=59627.333333333336, ans=0.0 +2024-08-03 06:43:41,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=59664.0, ans=0.2 +2024-08-03 06:43:52,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=59700.666666666664, ans=0.2 +2024-08-03 06:44:02,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=59737.333333333336, ans=0.125 +2024-08-03 06:44:10,984 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.26 vs. limit=6.0 +2024-08-03 06:44:13,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=59774.0, ans=0.05 +2024-08-03 06:44:20,423 INFO [train.py:1114] (1/4) Epoch 5, batch 1750, loss[loss=0.2772, simple_loss=0.3279, pruned_loss=0.1133, over 13547.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3306, pruned_loss=0.09604, over 2633091.39 frames. ], batch size: 31, lr: 2.36e-02, grad_scale: 32.0 +2024-08-03 06:44:20,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=59810.666666666664, ans=0.0 +2024-08-03 06:44:27,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=59810.666666666664, ans=0.125 +2024-08-03 06:44:50,131 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.019e+02 1.258e+02 1.421e+02 1.677e+02 2.914e+02, threshold=2.843e+02, percent-clipped=0.0 +2024-08-03 06:44:56,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=59920.666666666664, ans=0.0 +2024-08-03 06:45:15,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=59957.333333333336, ans=0.125 +2024-08-03 06:45:20,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=59994.0, ans=0.0 +2024-08-03 06:45:21,416 INFO [train.py:1114] (1/4) Epoch 5, batch 1800, loss[loss=0.2676, simple_loss=0.3383, pruned_loss=0.09845, over 13552.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3308, pruned_loss=0.09603, over 2634625.14 frames. ], batch size: 38, lr: 2.36e-02, grad_scale: 32.0 +2024-08-03 06:45:25,366 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:45:31,228 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.26 vs. limit=15.0 +2024-08-03 06:45:41,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=60067.333333333336, ans=0.025 +2024-08-03 06:46:00,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60140.666666666664, ans=0.1 +2024-08-03 06:46:03,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=60140.666666666664, ans=0.125 +2024-08-03 06:49:33,864 INFO [train.py:1114] (1/4) Epoch 5, batch 1850, loss[loss=0.2546, simple_loss=0.3266, pruned_loss=0.09125, over 13411.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3308, pruned_loss=0.09595, over 2637801.04 frames. ], batch size: 39, lr: 2.35e-02, grad_scale: 32.0 +2024-08-03 06:49:43,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=60177.333333333336, ans=0.0 +2024-08-03 06:50:09,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=60177.333333333336, ans=0.0 +2024-08-03 06:50:45,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=60214.0, ans=0.125 +2024-08-03 06:51:23,623 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.104e+02 1.315e+02 1.584e+02 1.966e+02 3.228e+02, threshold=3.167e+02, percent-clipped=4.0 +2024-08-03 06:52:39,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=60324.0, ans=0.125 +2024-08-03 06:53:02,070 INFO [train.py:1114] (1/4) Epoch 5, batch 1900, loss[loss=0.2926, simple_loss=0.3537, pruned_loss=0.1157, over 13327.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3315, pruned_loss=0.09615, over 2639796.33 frames. ], batch size: 40, lr: 2.35e-02, grad_scale: 32.0 +2024-08-03 06:53:14,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=60360.666666666664, ans=0.0 +2024-08-03 06:53:14,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=60360.666666666664, ans=0.2 +2024-08-03 06:53:14,995 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.32 vs. limit=15.0 +2024-08-03 06:58:13,010 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.18 vs. limit=15.0 +2024-08-03 06:58:49,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=60507.333333333336, ans=0.125 +2024-08-03 06:58:59,371 INFO [train.py:1114] (1/4) Epoch 5, batch 1950, loss[loss=0.2358, simple_loss=0.3144, pruned_loss=0.07863, over 13549.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3324, pruned_loss=0.09601, over 2646658.36 frames. ], batch size: 36, lr: 2.35e-02, grad_scale: 32.0 +2024-08-03 06:59:03,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60544.0, ans=0.1 +2024-08-03 07:01:31,512 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.403e+02 1.665e+02 1.976e+02 3.868e+02, threshold=3.331e+02, percent-clipped=1.0 +2024-08-03 07:01:31,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=60617.333333333336, ans=0.125 +2024-08-03 07:01:47,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=60654.0, ans=0.025 +2024-08-03 07:03:26,354 INFO [train.py:1114] (1/4) Epoch 5, batch 2000, loss[loss=0.2256, simple_loss=0.2926, pruned_loss=0.07926, over 13546.00 frames. ], tot_loss[loss=0.2626, simple_loss=0.3326, pruned_loss=0.0963, over 2636328.53 frames. ], batch size: 31, lr: 2.35e-02, grad_scale: 32.0 +2024-08-03 07:03:27,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=60727.333333333336, ans=0.125 +2024-08-03 07:03:38,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=60764.0, ans=0.125 +2024-08-03 07:03:38,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60764.0, ans=0.1 +2024-08-03 07:03:42,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=60764.0, ans=0.0 +2024-08-03 07:03:44,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=60800.666666666664, ans=0.125 +2024-08-03 07:03:48,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=60800.666666666664, ans=0.0 +2024-08-03 07:03:50,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=60800.666666666664, ans=0.0 +2024-08-03 07:04:01,327 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.68 vs. limit=12.0 +2024-08-03 07:04:03,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=60874.0, ans=0.2 +2024-08-03 07:04:24,860 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.18 vs. limit=15.0 +2024-08-03 07:04:27,486 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.18 vs. limit=6.0 +2024-08-03 07:04:31,373 INFO [train.py:1114] (1/4) Epoch 5, batch 2050, loss[loss=0.2158, simple_loss=0.2834, pruned_loss=0.07406, over 13420.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3311, pruned_loss=0.09603, over 2633229.57 frames. ], batch size: 32, lr: 2.34e-02, grad_scale: 32.0 +2024-08-03 07:04:48,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=60910.666666666664, ans=0.125 +2024-08-03 07:05:08,117 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.320e+02 1.526e+02 1.984e+02 3.306e+02, threshold=3.052e+02, percent-clipped=0.0 +2024-08-03 07:05:08,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=60984.0, ans=0.07 +2024-08-03 07:05:11,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=61020.666666666664, ans=0.2 +2024-08-03 07:05:12,275 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.74 vs. limit=15.0 +2024-08-03 07:05:17,936 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.36 vs. limit=22.5 +2024-08-03 07:05:24,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=61057.333333333336, ans=0.2 +2024-08-03 07:05:24,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61057.333333333336, ans=0.1 +2024-08-03 07:05:29,946 INFO [train.py:1114] (1/4) Epoch 5, batch 2100, loss[loss=0.242, simple_loss=0.313, pruned_loss=0.08545, over 13553.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3297, pruned_loss=0.09494, over 2638121.16 frames. ], batch size: 37, lr: 2.34e-02, grad_scale: 16.0 +2024-08-03 07:05:34,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=61094.0, ans=0.0 +2024-08-03 07:05:39,073 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:05:48,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=61167.333333333336, ans=0.2 +2024-08-03 07:07:52,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=61240.666666666664, ans=0.125 +2024-08-03 07:08:00,450 INFO [train.py:1114] (1/4) Epoch 5, batch 2150, loss[loss=0.2409, simple_loss=0.3196, pruned_loss=0.08108, over 13571.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3288, pruned_loss=0.09426, over 2646514.08 frames. ], batch size: 36, lr: 2.34e-02, grad_scale: 16.0 +2024-08-03 07:08:02,927 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.48 vs. limit=15.0 +2024-08-03 07:08:08,212 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.93 vs. limit=12.0 +2024-08-03 07:08:11,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=61314.0, ans=0.125 +2024-08-03 07:08:18,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61314.0, ans=0.1 +2024-08-03 07:08:25,510 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.060e+02 1.319e+02 1.581e+02 2.053e+02 4.024e+02, threshold=3.163e+02, percent-clipped=3.0 +2024-08-03 07:08:31,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=61387.333333333336, ans=0.0 +2024-08-03 07:08:36,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=61387.333333333336, ans=0.125 +2024-08-03 07:08:50,639 INFO [train.py:1114] (1/4) Epoch 5, batch 2200, loss[loss=0.2576, simple_loss=0.3362, pruned_loss=0.08953, over 13399.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3286, pruned_loss=0.09423, over 2643524.99 frames. ], batch size: 39, lr: 2.33e-02, grad_scale: 16.0 +2024-08-03 07:08:56,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=61460.666666666664, ans=0.125 +2024-08-03 07:09:05,620 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.83 vs. limit=15.0 +2024-08-03 07:09:32,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=61607.333333333336, ans=0.125 +2024-08-03 07:09:37,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=61607.333333333336, ans=0.0 +2024-08-03 07:09:40,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61644.0, ans=0.1 +2024-08-03 07:09:41,548 INFO [train.py:1114] (1/4) Epoch 5, batch 2250, loss[loss=0.234, simple_loss=0.3032, pruned_loss=0.08237, over 13367.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.329, pruned_loss=0.09468, over 2641118.64 frames. ], batch size: 37, lr: 2.33e-02, grad_scale: 16.0 +2024-08-03 07:09:41,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61644.0, ans=0.1 +2024-08-03 07:09:46,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.81 vs. limit=15.0 +2024-08-03 07:09:48,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=61644.0, ans=0.0 +2024-08-03 07:09:53,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=61680.666666666664, ans=0.0 +2024-08-03 07:10:05,790 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.390e+02 1.682e+02 2.115e+02 4.078e+02, threshold=3.364e+02, percent-clipped=8.0 +2024-08-03 07:10:34,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=61754.0, ans=0.2 +2024-08-03 07:10:54,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=61790.666666666664, ans=0.0 +2024-08-03 07:12:06,481 INFO [train.py:1114] (1/4) Epoch 5, batch 2300, loss[loss=0.204, simple_loss=0.2765, pruned_loss=0.06573, over 13576.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3283, pruned_loss=0.09486, over 2638259.46 frames. ], batch size: 33, lr: 2.33e-02, grad_scale: 16.0 +2024-08-03 07:15:29,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=61900.666666666664, ans=0.125 +2024-08-03 07:16:07,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=61937.333333333336, ans=0.125 +2024-08-03 07:16:09,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.93 vs. limit=22.5 +2024-08-03 07:16:30,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=61974.0, ans=0.125 +2024-08-03 07:16:32,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61974.0, ans=0.1 +2024-08-03 07:16:35,463 INFO [train.py:1114] (1/4) Epoch 5, batch 2350, loss[loss=0.267, simple_loss=0.3438, pruned_loss=0.09512, over 13561.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3287, pruned_loss=0.09499, over 2640892.23 frames. ], batch size: 38, lr: 2.32e-02, grad_scale: 16.0 +2024-08-03 07:16:47,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=62010.666666666664, ans=0.2 +2024-08-03 07:17:00,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=62047.333333333336, ans=0.0 +2024-08-03 07:17:03,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=62047.333333333336, ans=0.07 +2024-08-03 07:17:03,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=62047.333333333336, ans=0.0 +2024-08-03 07:17:20,768 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.323e+02 1.600e+02 2.155e+02 3.699e+02, threshold=3.200e+02, percent-clipped=2.0 +2024-08-03 07:17:34,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=62120.666666666664, ans=0.125 +2024-08-03 07:18:36,409 INFO [train.py:1114] (1/4) Epoch 5, batch 2400, loss[loss=0.2359, simple_loss=0.3154, pruned_loss=0.07818, over 13541.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3288, pruned_loss=0.09458, over 2642402.58 frames. ], batch size: 35, lr: 2.32e-02, grad_scale: 32.0 +2024-08-03 07:18:54,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=62194.0, ans=0.0 +2024-08-03 07:18:58,739 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:19:27,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=62304.0, ans=0.125 +2024-08-03 07:19:40,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=62340.666666666664, ans=0.125 +2024-08-03 07:19:45,670 INFO [train.py:1114] (1/4) Epoch 5, batch 2450, loss[loss=0.263, simple_loss=0.3381, pruned_loss=0.09395, over 13340.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.3304, pruned_loss=0.09557, over 2632022.81 frames. ], batch size: 37, lr: 2.32e-02, grad_scale: 32.0 +2024-08-03 07:20:02,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=62450.666666666664, ans=0.2 +2024-08-03 07:20:08,871 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.340e+02 1.643e+02 2.212e+02 4.155e+02, threshold=3.287e+02, percent-clipped=6.0 +2024-08-03 07:20:09,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=62450.666666666664, ans=0.1 +2024-08-03 07:20:17,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=62487.333333333336, ans=0.125 +2024-08-03 07:20:28,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=62524.0, ans=0.125 +2024-08-03 07:20:35,555 INFO [train.py:1114] (1/4) Epoch 5, batch 2500, loss[loss=0.2349, simple_loss=0.3109, pruned_loss=0.07945, over 13402.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3298, pruned_loss=0.0951, over 2636577.62 frames. ], batch size: 39, lr: 2.32e-02, grad_scale: 32.0 +2024-08-03 07:20:47,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62597.333333333336, ans=0.1 +2024-08-03 07:20:59,740 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:21:02,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62670.666666666664, ans=0.1 +2024-08-03 07:21:05,724 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=62670.666666666664, ans=0.125 +2024-08-03 07:21:12,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=62707.333333333336, ans=0.125 +2024-08-03 07:21:20,458 INFO [train.py:1114] (1/4) Epoch 5, batch 2550, loss[loss=0.247, simple_loss=0.3124, pruned_loss=0.09082, over 13546.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.3307, pruned_loss=0.09578, over 2637674.92 frames. ], batch size: 31, lr: 2.31e-02, grad_scale: 32.0 +2024-08-03 07:21:30,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=62780.666666666664, ans=6.0 +2024-08-03 07:21:49,255 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.074e+02 1.290e+02 1.480e+02 1.885e+02 4.380e+02, threshold=2.959e+02, percent-clipped=2.0 +2024-08-03 07:21:52,105 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.18 vs. limit=15.0 +2024-08-03 07:22:07,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=62890.666666666664, ans=0.2 +2024-08-03 07:22:09,866 INFO [train.py:1114] (1/4) Epoch 5, batch 2600, loss[loss=0.2816, simple_loss=0.346, pruned_loss=0.1086, over 13549.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3315, pruned_loss=0.09616, over 2636995.45 frames. ], batch size: 36, lr: 2.31e-02, grad_scale: 32.0 +2024-08-03 07:22:19,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=62964.0, ans=0.0 +2024-08-03 07:22:20,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=62964.0, ans=0.1 +2024-08-03 07:22:23,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=62964.0, ans=0.07 +2024-08-03 07:23:50,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=63000.666666666664, ans=0.125 +2024-08-03 07:24:17,371 INFO [train.py:1114] (1/4) Epoch 5, batch 2650, loss[loss=0.2828, simple_loss=0.3547, pruned_loss=0.1054, over 13333.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.3321, pruned_loss=0.09664, over 2640025.96 frames. ], batch size: 46, lr: 2.31e-02, grad_scale: 32.0 +2024-08-03 07:24:38,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=63184.0, ans=0.0 +2024-08-03 07:24:43,701 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.082e+02 1.380e+02 1.559e+02 1.928e+02 2.967e+02, threshold=3.118e+02, percent-clipped=1.0 +2024-08-03 07:25:05,925 INFO [train.py:1114] (1/4) Epoch 5, batch 2700, loss[loss=0.2863, simple_loss=0.3541, pruned_loss=0.1093, over 13526.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3313, pruned_loss=0.09604, over 2637338.91 frames. ], batch size: 40, lr: 2.31e-02, grad_scale: 32.0 +2024-08-03 07:25:08,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=63294.0, ans=0.0 +2024-08-03 07:25:11,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63294.0, ans=0.1 +2024-08-03 07:25:12,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=63294.0, ans=0.2 +2024-08-03 07:25:17,293 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.65 vs. limit=6.0 +2024-08-03 07:25:27,706 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-08-03 07:25:49,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=63477.333333333336, ans=0.2 +2024-08-03 07:25:49,274 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.11 vs. limit=22.5 +2024-08-03 07:25:49,674 INFO [train.py:1114] (1/4) Epoch 5, batch 2750, loss[loss=0.2341, simple_loss=0.3127, pruned_loss=0.0777, over 13336.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3294, pruned_loss=0.09498, over 2634951.31 frames. ], batch size: 34, lr: 2.30e-02, grad_scale: 32.0 +2024-08-03 07:26:13,138 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.83 vs. limit=22.5 +2024-08-03 07:26:21,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=63514.0, ans=0.0 +2024-08-03 07:26:35,262 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.092e+02 1.483e+02 1.693e+02 2.147e+02 4.016e+02, threshold=3.386e+02, percent-clipped=6.0 +2024-08-03 07:26:51,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=63624.0, ans=0.0 +2024-08-03 07:26:52,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=63624.0, ans=0.125 +2024-08-03 07:26:56,073 INFO [train.py:1114] (1/4) Epoch 5, batch 2800, loss[loss=0.319, simple_loss=0.3645, pruned_loss=0.1367, over 9099.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3306, pruned_loss=0.09595, over 2625779.91 frames. ], batch size: 97, lr: 2.30e-02, grad_scale: 32.0 +2024-08-03 07:27:18,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=63734.0, ans=0.125 +2024-08-03 07:27:21,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=63734.0, ans=0.125 +2024-08-03 07:27:25,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=63770.666666666664, ans=0.0 +2024-08-03 07:27:32,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=63807.333333333336, ans=0.1 +2024-08-03 07:27:32,892 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.34 vs. limit=15.0 +2024-08-03 07:27:39,842 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.28 vs. limit=15.0 +2024-08-03 07:27:40,307 INFO [train.py:1114] (1/4) Epoch 5, batch 2850, loss[loss=0.2518, simple_loss=0.3217, pruned_loss=0.09094, over 13369.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3307, pruned_loss=0.09606, over 2619739.69 frames. ], batch size: 35, lr: 2.30e-02, grad_scale: 32.0 +2024-08-03 07:27:46,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=63844.0, ans=0.025 +2024-08-03 07:28:00,434 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.94 vs. limit=22.5 +2024-08-03 07:28:01,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63917.333333333336, ans=0.1 +2024-08-03 07:28:37,169 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.375e+02 1.611e+02 2.020e+02 3.770e+02, threshold=3.222e+02, percent-clipped=1.0 +2024-08-03 07:28:46,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=63954.0, ans=0.5 +2024-08-03 07:28:47,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63954.0, ans=0.1 +2024-08-03 07:28:52,973 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:28:53,448 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.84 vs. limit=22.5 +2024-08-03 07:28:53,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=63990.666666666664, ans=0.125 +2024-08-03 07:28:58,762 INFO [train.py:1114] (1/4) Epoch 5, batch 2900, loss[loss=0.2148, simple_loss=0.2889, pruned_loss=0.07037, over 13358.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3315, pruned_loss=0.09588, over 2630927.36 frames. ], batch size: 36, lr: 2.29e-02, grad_scale: 32.0 +2024-08-03 07:29:01,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=64027.333333333336, ans=0.0 +2024-08-03 07:29:07,363 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.53 vs. limit=15.0 +2024-08-03 07:29:12,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=64064.0, ans=0.2 +2024-08-03 07:29:21,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=64100.666666666664, ans=0.125 +2024-08-03 07:29:21,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=64100.666666666664, ans=0.1 +2024-08-03 07:29:27,318 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:29:40,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=64174.0, ans=0.025 +2024-08-03 07:29:46,624 INFO [train.py:1114] (1/4) Epoch 5, batch 2950, loss[loss=0.2513, simple_loss=0.3184, pruned_loss=0.0921, over 13343.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.3302, pruned_loss=0.09545, over 2628793.69 frames. ], batch size: 34, lr: 2.29e-02, grad_scale: 32.0 +2024-08-03 07:29:47,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=64210.666666666664, ans=0.125 +2024-08-03 07:29:51,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=64210.666666666664, ans=0.125 +2024-08-03 07:30:08,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=64284.0, ans=0.125 +2024-08-03 07:30:08,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=64284.0, ans=0.125 +2024-08-03 07:30:13,352 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.269e+02 1.515e+02 1.903e+02 4.002e+02, threshold=3.030e+02, percent-clipped=2.0 +2024-08-03 07:30:21,252 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=2.760e-03 +2024-08-03 07:30:33,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=64357.333333333336, ans=0.125 +2024-08-03 07:30:33,497 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.44 vs. limit=22.5 +2024-08-03 07:30:38,300 INFO [train.py:1114] (1/4) Epoch 5, batch 3000, loss[loss=0.2757, simple_loss=0.3413, pruned_loss=0.105, over 13546.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3298, pruned_loss=0.0953, over 2629441.56 frames. ], batch size: 37, lr: 2.29e-02, grad_scale: 16.0 +2024-08-03 07:30:38,300 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 07:31:39,948 INFO [train.py:1146] (1/4) Epoch 5, validation: loss=0.2105, simple_loss=0.3083, pruned_loss=0.0563, over 944034.00 frames. +2024-08-03 07:31:39,949 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 07:31:48,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=64430.666666666664, ans=0.125 +2024-08-03 07:31:49,168 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.10 vs. limit=22.5 +2024-08-03 07:31:59,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=64467.333333333336, ans=0.1 +2024-08-03 07:32:02,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=64467.333333333336, ans=0.0 +2024-08-03 07:32:05,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=64504.0, ans=0.035 +2024-08-03 07:32:06,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=64504.0, ans=0.125 +2024-08-03 07:32:12,726 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.12 vs. limit=12.0 +2024-08-03 07:32:13,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=64504.0, ans=0.0 +2024-08-03 07:32:26,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64540.666666666664, ans=0.1 +2024-08-03 07:32:28,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=64540.666666666664, ans=0.125 +2024-08-03 07:32:30,610 INFO [train.py:1114] (1/4) Epoch 5, batch 3050, loss[loss=0.2275, simple_loss=0.3015, pruned_loss=0.07676, over 13537.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3307, pruned_loss=0.09556, over 2625867.31 frames. ], batch size: 35, lr: 2.29e-02, grad_scale: 16.0 +2024-08-03 07:32:31,981 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.79 vs. limit=15.0 +2024-08-03 07:32:46,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64614.0, ans=0.1 +2024-08-03 07:32:47,560 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.95 vs. limit=10.0 +2024-08-03 07:32:56,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=64650.666666666664, ans=0.05 +2024-08-03 07:32:59,071 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.073e+02 1.354e+02 1.544e+02 1.924e+02 3.300e+02, threshold=3.088e+02, percent-clipped=4.0 +2024-08-03 07:33:04,746 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.26 vs. limit=10.0 +2024-08-03 07:33:14,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=64724.0, ans=0.0 +2024-08-03 07:33:18,950 INFO [train.py:1114] (1/4) Epoch 5, batch 3100, loss[loss=0.2511, simple_loss=0.3253, pruned_loss=0.0884, over 13286.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3293, pruned_loss=0.09472, over 2626157.10 frames. ], batch size: 46, lr: 2.28e-02, grad_scale: 16.0 +2024-08-03 07:33:25,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=64760.666666666664, ans=0.0 +2024-08-03 07:33:38,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=64834.0, ans=0.125 +2024-08-03 07:33:44,178 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.84 vs. limit=15.0 +2024-08-03 07:33:51,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=64870.666666666664, ans=0.05 +2024-08-03 07:33:55,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=64907.333333333336, ans=0.025 +2024-08-03 07:34:01,576 INFO [train.py:1114] (1/4) Epoch 5, batch 3150, loss[loss=0.2753, simple_loss=0.3483, pruned_loss=0.1012, over 12994.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3303, pruned_loss=0.09537, over 2628329.83 frames. ], batch size: 48, lr: 2.28e-02, grad_scale: 16.0 +2024-08-03 07:34:11,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=64980.666666666664, ans=0.0 +2024-08-03 07:34:25,112 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.020e+02 1.369e+02 1.582e+02 1.897e+02 3.787e+02, threshold=3.164e+02, percent-clipped=5.0 +2024-08-03 07:34:40,886 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.86 vs. limit=15.0 +2024-08-03 07:34:41,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=65090.666666666664, ans=0.125 +2024-08-03 07:34:46,550 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.49 vs. limit=10.0 +2024-08-03 07:34:47,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=65090.666666666664, ans=0.0 +2024-08-03 07:34:51,988 INFO [train.py:1114] (1/4) Epoch 5, batch 3200, loss[loss=0.2519, simple_loss=0.3231, pruned_loss=0.09036, over 13536.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3302, pruned_loss=0.09516, over 2634682.10 frames. ], batch size: 37, lr: 2.28e-02, grad_scale: 32.0 +2024-08-03 07:34:52,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=65127.333333333336, ans=0.125 +2024-08-03 07:34:52,399 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.75 vs. limit=22.5 +2024-08-03 07:34:54,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=65127.333333333336, ans=0.2 +2024-08-03 07:35:06,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=65164.0, ans=0.0 +2024-08-03 07:35:22,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=65237.333333333336, ans=0.125 +2024-08-03 07:35:34,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=65274.0, ans=0.2 +2024-08-03 07:35:35,993 INFO [train.py:1114] (1/4) Epoch 5, batch 3250, loss[loss=0.2318, simple_loss=0.3142, pruned_loss=0.07468, over 13396.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3308, pruned_loss=0.09493, over 2639742.17 frames. ], batch size: 38, lr: 2.27e-02, grad_scale: 32.0 +2024-08-03 07:35:46,639 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.10 vs. limit=15.0 +2024-08-03 07:35:52,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=65347.333333333336, ans=0.125 +2024-08-03 07:36:00,549 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.084e+02 1.291e+02 1.478e+02 1.851e+02 2.616e+02, threshold=2.956e+02, percent-clipped=0.0 +2024-08-03 07:36:20,409 INFO [train.py:1114] (1/4) Epoch 5, batch 3300, loss[loss=0.2814, simple_loss=0.3492, pruned_loss=0.1068, over 12972.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.3291, pruned_loss=0.09404, over 2642273.93 frames. ], batch size: 52, lr: 2.27e-02, grad_scale: 32.0 +2024-08-03 07:36:23,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=65494.0, ans=0.125 +2024-08-03 07:36:29,030 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=3.645e-02 +2024-08-03 07:36:37,246 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:36:42,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=65567.33333333333, ans=0.125 +2024-08-03 07:36:59,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=65640.66666666667, ans=0.125 +2024-08-03 07:37:03,881 INFO [train.py:1114] (1/4) Epoch 5, batch 3350, loss[loss=0.2373, simple_loss=0.3218, pruned_loss=0.07639, over 13296.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3304, pruned_loss=0.09499, over 2630786.10 frames. ], batch size: 49, lr: 2.27e-02, grad_scale: 32.0 +2024-08-03 07:37:04,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=65677.33333333333, ans=0.1 +2024-08-03 07:37:11,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=65714.0, ans=0.2 +2024-08-03 07:37:14,763 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.03 vs. limit=15.0 +2024-08-03 07:37:18,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=65714.0, ans=0.0 +2024-08-03 07:37:26,818 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.375e+02 1.557e+02 1.947e+02 3.831e+02, threshold=3.114e+02, percent-clipped=2.0 +2024-08-03 07:37:46,240 INFO [train.py:1114] (1/4) Epoch 5, batch 3400, loss[loss=0.2402, simple_loss=0.3003, pruned_loss=0.09012, over 13547.00 frames. ], tot_loss[loss=0.2589, simple_loss=0.3291, pruned_loss=0.09434, over 2626180.86 frames. ], batch size: 31, lr: 2.27e-02, grad_scale: 32.0 +2024-08-03 07:37:59,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=65897.33333333333, ans=0.025 +2024-08-03 07:38:02,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=65934.0, ans=0.125 +2024-08-03 07:38:18,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=65970.66666666667, ans=0.0 +2024-08-03 07:38:28,108 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.31 vs. limit=15.0 +2024-08-03 07:38:28,621 INFO [train.py:1114] (1/4) Epoch 5, batch 3450, loss[loss=0.2768, simple_loss=0.3513, pruned_loss=0.1012, over 12875.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.329, pruned_loss=0.09384, over 2629511.90 frames. ], batch size: 52, lr: 2.26e-02, grad_scale: 32.0 +2024-08-03 07:38:46,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=66117.33333333333, ans=0.125 +2024-08-03 07:38:49,247 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.86 vs. limit=15.0 +2024-08-03 07:38:50,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=66117.33333333333, ans=0.0 +2024-08-03 07:38:51,386 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.130e+02 1.367e+02 1.647e+02 2.162e+02 3.510e+02, threshold=3.294e+02, percent-clipped=1.0 +2024-08-03 07:39:01,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=66190.66666666667, ans=0.125 +2024-08-03 07:39:03,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=66190.66666666667, ans=0.125 +2024-08-03 07:39:04,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=66190.66666666667, ans=0.125 +2024-08-03 07:39:07,011 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.21 vs. limit=10.0 +2024-08-03 07:39:10,861 INFO [train.py:1114] (1/4) Epoch 5, batch 3500, loss[loss=0.249, simple_loss=0.3156, pruned_loss=0.09126, over 13546.00 frames. ], tot_loss[loss=0.2578, simple_loss=0.328, pruned_loss=0.09381, over 2631637.35 frames. ], batch size: 34, lr: 2.26e-02, grad_scale: 32.0 +2024-08-03 07:39:15,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=66227.33333333333, ans=0.125 +2024-08-03 07:39:21,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=66264.0, ans=0.025 +2024-08-03 07:39:24,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=66264.0, ans=0.125 +2024-08-03 07:39:25,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=66264.0, ans=0.125 +2024-08-03 07:39:27,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=66264.0, ans=0.2 +2024-08-03 07:39:29,024 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-08-03 07:39:39,171 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:39:48,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=66337.33333333333, ans=0.025 +2024-08-03 07:40:00,076 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.29 vs. limit=22.5 +2024-08-03 07:40:02,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=66374.0, ans=0.125 +2024-08-03 07:40:03,364 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.24 vs. limit=15.0 +2024-08-03 07:40:05,779 INFO [train.py:1114] (1/4) Epoch 5, batch 3550, loss[loss=0.2607, simple_loss=0.337, pruned_loss=0.09223, over 12516.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.33, pruned_loss=0.09454, over 2630486.32 frames. ], batch size: 58, lr: 2.26e-02, grad_scale: 16.0 +2024-08-03 07:40:22,192 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.91 vs. limit=6.0 +2024-08-03 07:40:26,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=66447.33333333333, ans=0.125 +2024-08-03 07:40:30,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=66447.33333333333, ans=0.1 +2024-08-03 07:40:41,449 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.465e+02 1.682e+02 2.236e+02 4.572e+02, threshold=3.363e+02, percent-clipped=5.0 +2024-08-03 07:40:42,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=66520.66666666667, ans=0.125 +2024-08-03 07:40:48,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=66520.66666666667, ans=0.125 +2024-08-03 07:40:57,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=66557.33333333333, ans=0.125 +2024-08-03 07:45:31,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=66557.33333333333, ans=0.125 +2024-08-03 07:45:32,996 INFO [train.py:1114] (1/4) Epoch 5, batch 3600, loss[loss=0.3382, simple_loss=0.3784, pruned_loss=0.1489, over 9339.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3365, pruned_loss=0.1011, over 2490761.69 frames. ], batch size: 96, lr: 2.26e-02, grad_scale: 32.0 +2024-08-03 07:45:49,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=66667.33333333333, ans=0.0 +2024-08-03 07:45:52,114 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.98 vs. limit=15.0 +2024-08-03 07:45:52,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=66667.33333333333, ans=10.0 +2024-08-03 07:45:56,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=66667.33333333333, ans=0.025 +2024-08-03 07:46:03,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=66704.0, ans=0.0 +2024-08-03 07:46:06,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=66740.66666666667, ans=0.0 +2024-08-03 07:47:41,854 INFO [train.py:1114] (1/4) Epoch 6, batch 0, loss[loss=0.244, simple_loss=0.3201, pruned_loss=0.08389, over 13346.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3201, pruned_loss=0.08389, over 13346.00 frames. ], batch size: 33, lr: 2.10e-02, grad_scale: 32.0 +2024-08-03 07:47:41,855 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 07:47:51,611 INFO [train.py:1146] (1/4) Epoch 6, validation: loss=0.2159, simple_loss=0.3144, pruned_loss=0.05871, over 944034.00 frames. +2024-08-03 07:47:51,612 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 07:48:08,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=66781.0, ans=0.125 +2024-08-03 07:48:14,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=66817.66666666667, ans=0.0 +2024-08-03 07:48:17,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=66817.66666666667, ans=0.1 +2024-08-03 07:48:28,344 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.298e+02 1.438e+02 1.681e+02 2.917e+02, threshold=2.876e+02, percent-clipped=0.0 +2024-08-03 07:48:31,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=66891.0, ans=0.2 +2024-08-03 07:48:37,165 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.02 vs. limit=15.0 +2024-08-03 07:48:39,354 INFO [train.py:1114] (1/4) Epoch 6, batch 50, loss[loss=0.2575, simple_loss=0.3171, pruned_loss=0.09899, over 13422.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3349, pruned_loss=0.09925, over 577610.32 frames. ], batch size: 32, lr: 2.10e-02, grad_scale: 32.0 +2024-08-03 07:48:48,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=66927.66666666667, ans=0.125 +2024-08-03 07:48:54,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66964.33333333333, ans=0.1 +2024-08-03 07:49:00,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66964.33333333333, ans=0.1 +2024-08-03 07:49:04,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=67001.0, ans=0.0 +2024-08-03 07:49:12,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=67001.0, ans=0.125 +2024-08-03 07:49:13,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=67001.0, ans=0.2 +2024-08-03 07:49:14,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=67001.0, ans=0.0 +2024-08-03 07:49:14,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=67001.0, ans=0.0 +2024-08-03 07:49:15,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67001.0, ans=0.1 +2024-08-03 07:49:19,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=67037.66666666667, ans=0.125 +2024-08-03 07:49:30,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=67074.33333333333, ans=0.025 +2024-08-03 07:49:35,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=67074.33333333333, ans=0.2 +2024-08-03 07:49:42,389 INFO [train.py:1114] (1/4) Epoch 6, batch 100, loss[loss=0.2419, simple_loss=0.313, pruned_loss=0.08536, over 13532.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3325, pruned_loss=0.09569, over 1025396.91 frames. ], batch size: 35, lr: 2.10e-02, grad_scale: 32.0 +2024-08-03 07:50:13,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=67221.0, ans=0.025 +2024-08-03 07:50:22,156 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.03 vs. limit=6.0 +2024-08-03 07:50:22,470 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.327e+02 1.627e+02 2.047e+02 3.063e+02, threshold=3.255e+02, percent-clipped=2.0 +2024-08-03 07:50:36,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=67257.66666666667, ans=0.125 +2024-08-03 07:50:38,353 INFO [train.py:1114] (1/4) Epoch 6, batch 150, loss[loss=0.2169, simple_loss=0.2922, pruned_loss=0.07087, over 13410.00 frames. ], tot_loss[loss=0.2581, simple_loss=0.3288, pruned_loss=0.09365, over 1386642.70 frames. ], batch size: 32, lr: 2.10e-02, grad_scale: 16.0 +2024-08-03 07:51:03,364 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.67 vs. limit=15.0 +2024-08-03 07:51:08,062 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.69 vs. limit=15.0 +2024-08-03 07:51:26,114 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:51:27,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=67404.33333333333, ans=0.0 +2024-08-03 07:51:29,037 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.70 vs. limit=22.5 +2024-08-03 07:51:31,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=67404.33333333333, ans=0.0 +2024-08-03 07:51:36,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=67441.0, ans=0.125 +2024-08-03 07:51:40,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=67441.0, ans=0.0 +2024-08-03 07:51:43,439 INFO [train.py:1114] (1/4) Epoch 6, batch 200, loss[loss=0.2589, simple_loss=0.3308, pruned_loss=0.09343, over 12471.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3258, pruned_loss=0.09153, over 1664736.75 frames. ], batch size: 58, lr: 2.09e-02, grad_scale: 16.0 +2024-08-03 07:51:48,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=67477.66666666667, ans=0.025 +2024-08-03 07:51:49,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=67477.66666666667, ans=0.0 +2024-08-03 07:51:53,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=67514.33333333333, ans=0.05 +2024-08-03 07:51:56,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=67514.33333333333, ans=0.0 +2024-08-03 07:52:22,922 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.261e+02 1.408e+02 1.836e+02 2.572e+02, threshold=2.817e+02, percent-clipped=0.0 +2024-08-03 07:52:28,729 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.49 vs. limit=15.0 +2024-08-03 07:52:32,910 INFO [train.py:1114] (1/4) Epoch 6, batch 250, loss[loss=0.2663, simple_loss=0.3368, pruned_loss=0.09784, over 13297.00 frames. ], tot_loss[loss=0.2536, simple_loss=0.3252, pruned_loss=0.09099, over 1884071.22 frames. ], batch size: 46, lr: 2.09e-02, grad_scale: 16.0 +2024-08-03 07:52:33,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67661.0, ans=0.1 +2024-08-03 07:52:35,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff3.min_abs, batch_count=67661.0, ans=0.2 +2024-08-03 07:52:37,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=67661.0, ans=0.125 +2024-08-03 07:52:39,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=67661.0, ans=0.2 +2024-08-03 07:52:59,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=67697.66666666667, ans=0.0 +2024-08-03 07:53:03,084 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.27 vs. limit=15.0 +2024-08-03 07:53:06,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=67734.33333333333, ans=0.2 +2024-08-03 07:53:22,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=67807.66666666667, ans=0.0 +2024-08-03 07:53:27,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=67844.33333333333, ans=0.0 +2024-08-03 07:53:28,621 INFO [train.py:1114] (1/4) Epoch 6, batch 300, loss[loss=0.2576, simple_loss=0.3311, pruned_loss=0.092, over 13441.00 frames. ], tot_loss[loss=0.2542, simple_loss=0.3255, pruned_loss=0.09148, over 2051657.94 frames. ], batch size: 42, lr: 2.09e-02, grad_scale: 16.0 +2024-08-03 07:53:36,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67844.33333333333, ans=0.1 +2024-08-03 07:53:37,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=67844.33333333333, ans=0.125 +2024-08-03 07:53:43,898 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.21 vs. limit=15.0 +2024-08-03 07:53:48,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=67917.66666666667, ans=0.125 +2024-08-03 07:54:04,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=67954.33333333333, ans=0.0 +2024-08-03 07:54:05,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=67954.33333333333, ans=0.025 +2024-08-03 07:54:08,717 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.896e+01 1.256e+02 1.436e+02 1.831e+02 3.083e+02, threshold=2.872e+02, percent-clipped=2.0 +2024-08-03 07:54:09,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=67991.0, ans=0.2 +2024-08-03 07:54:10,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=67991.0, ans=0.0 +2024-08-03 07:54:12,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=67991.0, ans=0.0 +2024-08-03 07:54:19,022 INFO [train.py:1114] (1/4) Epoch 6, batch 350, loss[loss=0.2344, simple_loss=0.3093, pruned_loss=0.07977, over 13573.00 frames. ], tot_loss[loss=0.2542, simple_loss=0.3257, pruned_loss=0.09134, over 2182421.28 frames. ], batch size: 33, lr: 2.09e-02, grad_scale: 16.0 +2024-08-03 07:54:19,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=68027.66666666667, ans=0.125 +2024-08-03 07:54:31,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=68064.33333333333, ans=0.0 +2024-08-03 07:54:32,472 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.14 vs. limit=15.0 +2024-08-03 07:54:42,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=68101.0, ans=0.025 +2024-08-03 07:54:42,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=68101.0, ans=0.125 +2024-08-03 07:55:02,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68174.33333333333, ans=0.1 +2024-08-03 07:55:06,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=68211.0, ans=0.125 +2024-08-03 07:55:06,638 INFO [train.py:1114] (1/4) Epoch 6, batch 400, loss[loss=0.262, simple_loss=0.3352, pruned_loss=0.09433, over 13352.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.3251, pruned_loss=0.09111, over 2286339.94 frames. ], batch size: 37, lr: 2.08e-02, grad_scale: 32.0 +2024-08-03 07:55:14,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=68211.0, ans=0.125 +2024-08-03 07:55:25,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=68284.33333333333, ans=0.125 +2024-08-03 07:55:42,226 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.052e+02 1.404e+02 1.670e+02 2.079e+02 3.576e+02, threshold=3.340e+02, percent-clipped=3.0 +2024-08-03 07:55:52,431 INFO [train.py:1114] (1/4) Epoch 6, batch 450, loss[loss=0.2618, simple_loss=0.3344, pruned_loss=0.09455, over 13545.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3249, pruned_loss=0.09091, over 2359326.62 frames. ], batch size: 38, lr: 2.08e-02, grad_scale: 32.0 +2024-08-03 07:55:52,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=68394.33333333333, ans=0.2 +2024-08-03 07:55:54,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=68394.33333333333, ans=0.0 +2024-08-03 07:55:54,696 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.46 vs. limit=15.0 +2024-08-03 07:55:59,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=68394.33333333333, ans=0.0 +2024-08-03 07:56:00,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=68394.33333333333, ans=0.0 +2024-08-03 07:56:20,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=68467.66666666667, ans=0.125 +2024-08-03 07:56:32,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=68541.0, ans=0.125 +2024-08-03 07:56:33,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=68541.0, ans=0.0 +2024-08-03 07:56:41,073 INFO [train.py:1114] (1/4) Epoch 6, batch 500, loss[loss=0.2567, simple_loss=0.3331, pruned_loss=0.09015, over 13444.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3237, pruned_loss=0.08999, over 2424719.33 frames. ], batch size: 43, lr: 2.08e-02, grad_scale: 32.0 +2024-08-03 07:56:41,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=68577.66666666667, ans=0.1 +2024-08-03 07:57:02,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=68651.0, ans=0.125 +2024-08-03 07:57:11,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=68687.66666666667, ans=0.0 +2024-08-03 07:57:15,947 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.283e+02 1.463e+02 1.945e+02 3.864e+02, threshold=2.927e+02, percent-clipped=1.0 +2024-08-03 07:57:17,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=68724.33333333333, ans=0.2 +2024-08-03 07:57:20,698 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:57:25,751 INFO [train.py:1114] (1/4) Epoch 6, batch 550, loss[loss=0.302, simple_loss=0.3572, pruned_loss=0.1234, over 13288.00 frames. ], tot_loss[loss=0.2522, simple_loss=0.324, pruned_loss=0.09021, over 2466917.63 frames. ], batch size: 49, lr: 2.08e-02, grad_scale: 32.0 +2024-08-03 07:57:29,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=68761.0, ans=0.0 +2024-08-03 07:57:38,165 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.44 vs. limit=15.0 +2024-08-03 07:58:13,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=68907.66666666667, ans=0.0 +2024-08-03 07:58:15,092 INFO [train.py:1114] (1/4) Epoch 6, batch 600, loss[loss=0.2717, simple_loss=0.35, pruned_loss=0.09664, over 13334.00 frames. ], tot_loss[loss=0.2512, simple_loss=0.3231, pruned_loss=0.08961, over 2507105.46 frames. ], batch size: 46, lr: 2.07e-02, grad_scale: 32.0 +2024-08-03 07:58:18,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=68944.33333333333, ans=0.125 +2024-08-03 07:58:45,440 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.46 vs. limit=22.5 +2024-08-03 07:58:45,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=69054.33333333333, ans=0.125 +2024-08-03 07:58:46,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=69054.33333333333, ans=0.0 +2024-08-03 07:58:47,152 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.82 vs. limit=15.0 +2024-08-03 07:58:50,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=69054.33333333333, ans=0.125 +2024-08-03 07:58:52,940 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.444e+02 1.751e+02 2.367e+02 5.361e+02, threshold=3.502e+02, percent-clipped=14.0 +2024-08-03 07:59:03,044 INFO [train.py:1114] (1/4) Epoch 6, batch 650, loss[loss=0.2362, simple_loss=0.3203, pruned_loss=0.07607, over 13549.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3228, pruned_loss=0.0896, over 2542615.78 frames. ], batch size: 37, lr: 2.07e-02, grad_scale: 32.0 +2024-08-03 07:59:06,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=69127.66666666667, ans=0.125 +2024-08-03 07:59:20,857 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.24 vs. limit=15.0 +2024-08-03 07:59:24,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=69201.0, ans=0.125 +2024-08-03 07:59:54,467 INFO [train.py:1114] (1/4) Epoch 6, batch 700, loss[loss=0.2511, simple_loss=0.3112, pruned_loss=0.09553, over 13539.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3235, pruned_loss=0.08972, over 2565134.14 frames. ], batch size: 35, lr: 2.07e-02, grad_scale: 32.0 +2024-08-03 08:00:09,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=69347.66666666667, ans=0.125 +2024-08-03 08:00:11,010 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.50 vs. limit=22.5 +2024-08-03 08:00:16,313 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.30 vs. limit=10.0 +2024-08-03 08:00:30,544 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.041e+02 1.317e+02 1.524e+02 2.110e+02 4.129e+02, threshold=3.048e+02, percent-clipped=1.0 +2024-08-03 08:00:32,573 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:00:38,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=69494.33333333333, ans=0.0 +2024-08-03 08:00:39,419 INFO [train.py:1114] (1/4) Epoch 6, batch 750, loss[loss=0.2639, simple_loss=0.3431, pruned_loss=0.09237, over 13354.00 frames. ], tot_loss[loss=0.2515, simple_loss=0.3233, pruned_loss=0.08986, over 2583485.61 frames. ], batch size: 37, lr: 2.07e-02, grad_scale: 16.0 +2024-08-03 08:01:00,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=69567.66666666667, ans=0.125 +2024-08-03 08:01:04,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=69567.66666666667, ans=0.0 +2024-08-03 08:01:12,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=69604.33333333333, ans=0.125 +2024-08-03 08:01:18,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=69641.0, ans=0.2 +2024-08-03 08:01:24,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=69641.0, ans=0.125 +2024-08-03 08:01:25,633 INFO [train.py:1114] (1/4) Epoch 6, batch 800, loss[loss=0.1838, simple_loss=0.2629, pruned_loss=0.05231, over 13350.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3226, pruned_loss=0.08954, over 2597549.84 frames. ], batch size: 33, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:01:30,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=69677.66666666667, ans=0.0 +2024-08-03 08:01:38,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=69714.33333333333, ans=0.035 +2024-08-03 08:02:04,558 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.016e+02 1.299e+02 1.541e+02 1.861e+02 5.767e+02, threshold=3.082e+02, percent-clipped=2.0 +2024-08-03 08:02:09,739 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.43 vs. limit=15.0 +2024-08-03 08:02:14,990 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.30 vs. limit=15.0 +2024-08-03 08:02:15,431 INFO [train.py:1114] (1/4) Epoch 6, batch 850, loss[loss=0.2656, simple_loss=0.3389, pruned_loss=0.09616, over 13316.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3219, pruned_loss=0.08913, over 2609514.34 frames. ], batch size: 40, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:02:25,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=69897.66666666667, ans=0.125 +2024-08-03 08:02:29,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=69897.66666666667, ans=0.125 +2024-08-03 08:02:45,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69971.0, ans=0.1 +2024-08-03 08:02:55,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=70007.66666666667, ans=0.125 +2024-08-03 08:02:55,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=70007.66666666667, ans=0.05 +2024-08-03 08:02:56,717 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.65 vs. limit=15.0 +2024-08-03 08:03:02,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=70007.66666666667, ans=0.0 +2024-08-03 08:03:05,399 INFO [train.py:1114] (1/4) Epoch 6, batch 900, loss[loss=0.2254, simple_loss=0.2959, pruned_loss=0.07751, over 13344.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3224, pruned_loss=0.08929, over 2612025.60 frames. ], batch size: 33, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:03:07,740 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.48 vs. limit=6.0 +2024-08-03 08:03:12,244 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.75 vs. limit=15.0 +2024-08-03 08:03:17,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=70081.0, ans=0.025 +2024-08-03 08:03:26,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=70117.66666666667, ans=0.2 +2024-08-03 08:03:35,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=70117.66666666667, ans=0.04949747468305833 +2024-08-03 08:03:41,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=70154.33333333333, ans=0.025 +2024-08-03 08:03:42,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-08-03 08:03:47,225 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.449e+02 1.731e+02 2.120e+02 4.168e+02, threshold=3.462e+02, percent-clipped=3.0 +2024-08-03 08:03:49,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=70191.0, ans=0.125 +2024-08-03 08:03:56,470 INFO [train.py:1114] (1/4) Epoch 6, batch 950, loss[loss=0.2034, simple_loss=0.2857, pruned_loss=0.06058, over 13531.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3224, pruned_loss=0.08913, over 2612026.67 frames. ], batch size: 34, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:04:02,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=70227.66666666667, ans=0.0 +2024-08-03 08:04:18,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=70301.0, ans=0.025 +2024-08-03 08:04:19,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=70301.0, ans=0.125 +2024-08-03 08:04:22,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=70301.0, ans=0.0 +2024-08-03 08:04:26,670 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.16 vs. limit=15.0 +2024-08-03 08:04:29,386 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.03 vs. limit=15.0 +2024-08-03 08:04:37,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=70374.33333333333, ans=0.09899494936611666 +2024-08-03 08:04:38,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=70374.33333333333, ans=0.0 +2024-08-03 08:04:43,846 INFO [train.py:1114] (1/4) Epoch 6, batch 1000, loss[loss=0.2578, simple_loss=0.3275, pruned_loss=0.09401, over 13363.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.324, pruned_loss=0.09043, over 2610830.82 frames. ], batch size: 35, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:04:44,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=70411.0, ans=0.1 +2024-08-03 08:04:56,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=70447.66666666667, ans=0.125 +2024-08-03 08:05:08,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=70484.33333333333, ans=0.125 +2024-08-03 08:05:09,587 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.05 vs. limit=15.0 +2024-08-03 08:05:19,776 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.51 vs. limit=12.0 +2024-08-03 08:05:22,971 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.279e+01 1.312e+02 1.654e+02 1.929e+02 3.115e+02, threshold=3.308e+02, percent-clipped=0.0 +2024-08-03 08:05:27,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=70557.66666666667, ans=0.2 +2024-08-03 08:05:28,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=70557.66666666667, ans=0.07 +2024-08-03 08:05:34,388 INFO [train.py:1114] (1/4) Epoch 6, batch 1050, loss[loss=0.2669, simple_loss=0.3501, pruned_loss=0.09187, over 13570.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.3233, pruned_loss=0.09005, over 2614755.96 frames. ], batch size: 39, lr: 2.05e-02, grad_scale: 32.0 +2024-08-03 08:05:36,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=70594.33333333333, ans=0.2 +2024-08-03 08:05:42,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=70631.0, ans=0.07 +2024-08-03 08:06:14,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=70741.0, ans=0.125 +2024-08-03 08:06:15,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=70741.0, ans=0.0 +2024-08-03 08:06:21,126 INFO [train.py:1114] (1/4) Epoch 6, batch 1100, loss[loss=0.251, simple_loss=0.3238, pruned_loss=0.08913, over 13558.00 frames. ], tot_loss[loss=0.2512, simple_loss=0.3229, pruned_loss=0.08972, over 2618956.25 frames. ], batch size: 36, lr: 2.05e-02, grad_scale: 32.0 +2024-08-03 08:06:32,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=70814.33333333333, ans=0.2 +2024-08-03 08:06:43,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=70851.0, ans=0.5 +2024-08-03 08:06:54,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=70887.66666666667, ans=0.0 +2024-08-03 08:06:57,396 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.320e+02 1.582e+02 2.063e+02 3.090e+02, threshold=3.163e+02, percent-clipped=0.0 +2024-08-03 08:07:04,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=70924.33333333333, ans=0.0 +2024-08-03 08:07:08,201 INFO [train.py:1114] (1/4) Epoch 6, batch 1150, loss[loss=0.2479, simple_loss=0.3145, pruned_loss=0.09064, over 13556.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3224, pruned_loss=0.08972, over 2618828.73 frames. ], batch size: 36, lr: 2.05e-02, grad_scale: 32.0 +2024-08-03 08:07:28,579 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.31 vs. limit=15.0 +2024-08-03 08:07:39,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=71071.0, ans=0.2 +2024-08-03 08:07:43,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=71071.0, ans=0.0 +2024-08-03 08:07:54,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=71144.33333333333, ans=0.125 +2024-08-03 08:07:55,754 INFO [train.py:1114] (1/4) Epoch 6, batch 1200, loss[loss=0.2675, simple_loss=0.3435, pruned_loss=0.09571, over 13593.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3228, pruned_loss=0.08954, over 2617062.42 frames. ], batch size: 39, lr: 2.05e-02, grad_scale: 32.0 +2024-08-03 08:08:18,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=71217.66666666667, ans=0.09899494936611666 +2024-08-03 08:08:18,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=71217.66666666667, ans=0.1 +2024-08-03 08:08:31,455 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.251e+02 1.398e+02 1.677e+02 2.839e+02, threshold=2.796e+02, percent-clipped=0.0 +2024-08-03 08:08:38,255 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.22 vs. limit=6.0 +2024-08-03 08:08:40,470 INFO [train.py:1114] (1/4) Epoch 6, batch 1250, loss[loss=0.2481, simple_loss=0.318, pruned_loss=0.08909, over 13424.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.323, pruned_loss=0.08931, over 2628395.33 frames. ], batch size: 42, lr: 2.04e-02, grad_scale: 32.0 +2024-08-03 08:08:46,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=71327.66666666667, ans=0.0 +2024-08-03 08:08:52,086 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.28 vs. limit=12.0 +2024-08-03 08:09:12,460 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.83 vs. limit=6.0 +2024-08-03 08:09:28,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=71511.0, ans=0.125 +2024-08-03 08:09:29,156 INFO [train.py:1114] (1/4) Epoch 6, batch 1300, loss[loss=0.2819, simple_loss=0.3491, pruned_loss=0.1074, over 12995.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3226, pruned_loss=0.08915, over 2632025.35 frames. ], batch size: 52, lr: 2.04e-02, grad_scale: 32.0 +2024-08-03 08:09:42,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=71547.66666666667, ans=0.0 +2024-08-03 08:09:46,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=71547.66666666667, ans=0.0 +2024-08-03 08:09:48,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=71547.66666666667, ans=0.125 +2024-08-03 08:09:52,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=71584.33333333333, ans=0.0 +2024-08-03 08:09:56,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=71584.33333333333, ans=0.2 +2024-08-03 08:10:06,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=71584.33333333333, ans=0.0 +2024-08-03 08:10:06,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=71584.33333333333, ans=0.025 +2024-08-03 08:10:06,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=71584.33333333333, ans=0.025 +2024-08-03 08:10:07,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=71584.33333333333, ans=0.125 +2024-08-03 08:10:09,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=71584.33333333333, ans=0.0 +2024-08-03 08:10:20,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=71621.0, ans=0.05 +2024-08-03 08:10:21,995 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.007e+02 1.330e+02 1.619e+02 2.134e+02 3.747e+02, threshold=3.238e+02, percent-clipped=6.0 +2024-08-03 08:10:23,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=71657.66666666667, ans=0.125 +2024-08-03 08:10:31,264 INFO [train.py:1114] (1/4) Epoch 6, batch 1350, loss[loss=0.2375, simple_loss=0.3149, pruned_loss=0.08006, over 13544.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3219, pruned_loss=0.08877, over 2639579.05 frames. ], batch size: 37, lr: 2.04e-02, grad_scale: 32.0 +2024-08-03 08:10:46,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=71694.33333333333, ans=0.0 +2024-08-03 08:10:48,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=71694.33333333333, ans=0.125 +2024-08-03 08:10:52,935 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.92 vs. limit=10.0 +2024-08-03 08:11:16,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=71767.66666666667, ans=0.0 +2024-08-03 08:11:27,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=71804.33333333333, ans=0.2 +2024-08-03 08:11:28,314 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.77 vs. limit=22.5 +2024-08-03 08:11:35,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=71804.33333333333, ans=0.0 +2024-08-03 08:11:47,807 INFO [train.py:1114] (1/4) Epoch 6, batch 1400, loss[loss=0.214, simple_loss=0.2843, pruned_loss=0.07181, over 13262.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3214, pruned_loss=0.08859, over 2643323.15 frames. ], batch size: 31, lr: 2.04e-02, grad_scale: 32.0 +2024-08-03 08:12:12,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=71951.0, ans=0.125 +2024-08-03 08:12:12,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=71951.0, ans=0.2 +2024-08-03 08:12:13,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=71951.0, ans=0.125 +2024-08-03 08:12:24,348 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.721e+01 1.334e+02 1.562e+02 1.833e+02 2.897e+02, threshold=3.124e+02, percent-clipped=0.0 +2024-08-03 08:12:37,231 INFO [train.py:1114] (1/4) Epoch 6, batch 1450, loss[loss=0.2613, simple_loss=0.3415, pruned_loss=0.09058, over 13429.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.322, pruned_loss=0.08886, over 2642170.02 frames. ], batch size: 43, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:12:46,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=72097.66666666667, ans=0.0 +2024-08-03 08:13:16,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=72207.66666666667, ans=0.0 +2024-08-03 08:13:24,199 INFO [train.py:1114] (1/4) Epoch 6, batch 1500, loss[loss=0.2944, simple_loss=0.3652, pruned_loss=0.1118, over 13403.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3231, pruned_loss=0.08927, over 2641763.68 frames. ], batch size: 39, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:13:25,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=72244.33333333333, ans=0.05 +2024-08-03 08:13:50,477 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.05 vs. limit=10.0 +2024-08-03 08:14:02,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=72391.0, ans=0.0 +2024-08-03 08:14:02,719 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.371e+02 1.678e+02 2.032e+02 3.850e+02, threshold=3.356e+02, percent-clipped=2.0 +2024-08-03 08:14:09,634 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.69 vs. limit=22.5 +2024-08-03 08:14:11,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=72427.66666666667, ans=0.0 +2024-08-03 08:14:11,852 INFO [train.py:1114] (1/4) Epoch 6, batch 1550, loss[loss=0.2408, simple_loss=0.316, pruned_loss=0.08277, over 13395.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3226, pruned_loss=0.08927, over 2631393.59 frames. ], batch size: 41, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:14:37,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=72501.0, ans=0.2 +2024-08-03 08:14:38,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=72537.66666666667, ans=0.125 +2024-08-03 08:14:41,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=72537.66666666667, ans=0.035 +2024-08-03 08:14:59,387 INFO [train.py:1114] (1/4) Epoch 6, batch 1600, loss[loss=0.2489, simple_loss=0.3236, pruned_loss=0.08711, over 13570.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3226, pruned_loss=0.08955, over 2624272.02 frames. ], batch size: 39, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:15:15,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=72647.66666666667, ans=0.125 +2024-08-03 08:15:24,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=72684.33333333333, ans=0.0 +2024-08-03 08:15:26,830 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.65 vs. limit=15.0 +2024-08-03 08:15:26,846 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.63 vs. limit=15.0 +2024-08-03 08:15:34,172 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.38 vs. limit=12.0 +2024-08-03 08:15:36,134 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.58 vs. limit=15.0 +2024-08-03 08:15:38,173 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.361e+02 1.766e+02 2.117e+02 3.688e+02, threshold=3.533e+02, percent-clipped=3.0 +2024-08-03 08:15:42,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=72757.66666666667, ans=0.0 +2024-08-03 08:15:43,123 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.77 vs. limit=15.0 +2024-08-03 08:15:46,780 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.58 vs. limit=15.0 +2024-08-03 08:15:47,187 INFO [train.py:1114] (1/4) Epoch 6, batch 1650, loss[loss=0.2223, simple_loss=0.2998, pruned_loss=0.07238, over 13328.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.323, pruned_loss=0.09004, over 2620662.97 frames. ], batch size: 40, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:16:04,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72867.66666666667, ans=0.1 +2024-08-03 08:16:29,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=72941.0, ans=0.0 +2024-08-03 08:16:32,584 INFO [train.py:1114] (1/4) Epoch 6, batch 1700, loss[loss=0.206, simple_loss=0.2813, pruned_loss=0.06533, over 13263.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3222, pruned_loss=0.08905, over 2629801.92 frames. ], batch size: 31, lr: 2.02e-02, grad_scale: 32.0 +2024-08-03 08:16:44,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=73014.33333333333, ans=0.125 +2024-08-03 08:16:45,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=73014.33333333333, ans=0.125 +2024-08-03 08:16:49,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=73014.33333333333, ans=0.0 +2024-08-03 08:17:05,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=73087.66666666667, ans=0.125 +2024-08-03 08:17:12,006 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.052e+02 1.261e+02 1.452e+02 1.741e+02 3.211e+02, threshold=2.904e+02, percent-clipped=0.0 +2024-08-03 08:17:21,253 INFO [train.py:1114] (1/4) Epoch 6, batch 1750, loss[loss=0.2389, simple_loss=0.2935, pruned_loss=0.09216, over 13551.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3212, pruned_loss=0.08873, over 2633403.43 frames. ], batch size: 31, lr: 2.02e-02, grad_scale: 32.0 +2024-08-03 08:17:37,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=73197.66666666667, ans=0.125 +2024-08-03 08:17:44,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=73234.33333333333, ans=0.07 +2024-08-03 08:17:45,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=73234.33333333333, ans=0.125 +2024-08-03 08:17:50,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=73271.0, ans=0.125 +2024-08-03 08:17:59,999 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.33 vs. limit=22.5 +2024-08-03 08:18:02,690 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.58 vs. limit=10.0 +2024-08-03 08:18:08,631 INFO [train.py:1114] (1/4) Epoch 6, batch 1800, loss[loss=0.2418, simple_loss=0.3176, pruned_loss=0.08304, over 13553.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3223, pruned_loss=0.08942, over 2635648.00 frames. ], batch size: 38, lr: 2.02e-02, grad_scale: 32.0 +2024-08-03 08:18:13,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=73344.33333333333, ans=0.0 +2024-08-03 08:18:22,595 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.13 vs. limit=15.0 +2024-08-03 08:18:24,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=73381.0, ans=0.0 +2024-08-03 08:18:36,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=73417.66666666667, ans=0.125 +2024-08-03 08:18:37,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=73454.33333333333, ans=0.125 +2024-08-03 08:18:39,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=73454.33333333333, ans=0.1 +2024-08-03 08:18:39,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff3.min_abs, batch_count=73454.33333333333, ans=0.2 +2024-08-03 08:18:50,839 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.090e+02 1.331e+02 1.549e+02 2.028e+02 3.164e+02, threshold=3.097e+02, percent-clipped=2.0 +2024-08-03 08:18:53,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=73491.0, ans=0.0 +2024-08-03 08:18:59,636 INFO [train.py:1114] (1/4) Epoch 6, batch 1850, loss[loss=0.2472, simple_loss=0.3232, pruned_loss=0.08564, over 13409.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3217, pruned_loss=0.0889, over 2638553.74 frames. ], batch size: 39, lr: 2.02e-02, grad_scale: 16.0 +2024-08-03 08:19:03,762 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.60 vs. limit=22.5 +2024-08-03 08:19:06,489 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.16 vs. limit=15.0 +2024-08-03 08:19:12,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=73564.33333333333, ans=0.0 +2024-08-03 08:19:13,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=73564.33333333333, ans=0.125 +2024-08-03 08:19:15,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=73564.33333333333, ans=0.125 +2024-08-03 08:19:17,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=73564.33333333333, ans=0.0 +2024-08-03 08:19:29,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=73637.66666666667, ans=0.125 +2024-08-03 08:19:31,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=73637.66666666667, ans=0.125 +2024-08-03 08:19:36,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=73637.66666666667, ans=0.125 +2024-08-03 08:19:38,193 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.94 vs. limit=15.0 +2024-08-03 08:19:43,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=73674.33333333333, ans=0.125 +2024-08-03 08:19:51,017 INFO [train.py:1114] (1/4) Epoch 6, batch 1900, loss[loss=0.2528, simple_loss=0.3269, pruned_loss=0.08935, over 13313.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3226, pruned_loss=0.08918, over 2640901.80 frames. ], batch size: 40, lr: 2.01e-02, grad_scale: 16.0 +2024-08-03 08:19:58,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=73711.0, ans=0.125 +2024-08-03 08:20:02,797 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:20:15,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=73784.33333333333, ans=0.0 +2024-08-03 08:20:22,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=73821.0, ans=0.2 +2024-08-03 08:20:29,681 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.282e+02 1.594e+02 1.886e+02 3.634e+02, threshold=3.188e+02, percent-clipped=1.0 +2024-08-03 08:21:06,039 INFO [train.py:1114] (1/4) Epoch 6, batch 1950, loss[loss=0.2159, simple_loss=0.2948, pruned_loss=0.06849, over 13562.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3233, pruned_loss=0.08907, over 2647566.42 frames. ], batch size: 36, lr: 2.01e-02, grad_scale: 16.0 +2024-08-03 08:21:13,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=73894.33333333333, ans=0.125 +2024-08-03 08:21:25,517 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.83 vs. limit=15.0 +2024-08-03 08:21:47,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=74004.33333333333, ans=0.125 +2024-08-03 08:21:47,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=74004.33333333333, ans=0.025 +2024-08-03 08:21:54,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=74041.0, ans=0.125 +2024-08-03 08:21:57,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74041.0, ans=0.1 +2024-08-03 08:22:01,505 INFO [train.py:1114] (1/4) Epoch 6, batch 2000, loss[loss=0.2497, simple_loss=0.3069, pruned_loss=0.09623, over 13525.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.3239, pruned_loss=0.08978, over 2635867.13 frames. ], batch size: 31, lr: 2.01e-02, grad_scale: 32.0 +2024-08-03 08:22:17,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=74114.33333333333, ans=0.0 +2024-08-03 08:22:19,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=74151.0, ans=0.2 +2024-08-03 08:22:29,567 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.13 vs. limit=15.0 +2024-08-03 08:22:37,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=74187.66666666667, ans=0.125 +2024-08-03 08:22:40,825 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.995e+01 1.304e+02 1.521e+02 1.870e+02 3.402e+02, threshold=3.042e+02, percent-clipped=1.0 +2024-08-03 08:22:51,648 INFO [train.py:1114] (1/4) Epoch 6, batch 2050, loss[loss=0.2145, simple_loss=0.294, pruned_loss=0.06752, over 13418.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3227, pruned_loss=0.08911, over 2632703.06 frames. ], batch size: 32, lr: 2.01e-02, grad_scale: 32.0 +2024-08-03 08:22:51,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=74261.0, ans=0.125 +2024-08-03 08:22:51,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=74261.0, ans=0.2 +2024-08-03 08:23:13,053 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.34 vs. limit=15.0 +2024-08-03 08:23:13,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=74334.33333333333, ans=0.125 +2024-08-03 08:23:15,827 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.71 vs. limit=15.0 +2024-08-03 08:23:28,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=74371.0, ans=0.0 +2024-08-03 08:23:29,953 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.65 vs. limit=15.0 +2024-08-03 08:23:34,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=74407.66666666667, ans=0.125 +2024-08-03 08:23:40,250 INFO [train.py:1114] (1/4) Epoch 6, batch 2100, loss[loss=0.2363, simple_loss=0.3176, pruned_loss=0.07755, over 13534.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3222, pruned_loss=0.08851, over 2638030.52 frames. ], batch size: 37, lr: 2.01e-02, grad_scale: 32.0 +2024-08-03 08:23:41,667 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.08 vs. limit=15.0 +2024-08-03 08:23:55,204 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.36 vs. limit=12.0 +2024-08-03 08:23:55,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74481.0, ans=0.1 +2024-08-03 08:23:57,778 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.36 vs. limit=10.0 +2024-08-03 08:23:59,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=74517.66666666667, ans=0.125 +2024-08-03 08:24:06,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=74554.33333333333, ans=0.0 +2024-08-03 08:24:16,929 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.001e+02 1.291e+02 1.674e+02 2.132e+02 3.817e+02, threshold=3.348e+02, percent-clipped=5.0 +2024-08-03 08:24:17,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=74591.0, ans=0.2 +2024-08-03 08:24:25,014 INFO [train.py:1114] (1/4) Epoch 6, batch 2150, loss[loss=0.2259, simple_loss=0.3108, pruned_loss=0.07049, over 13545.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3201, pruned_loss=0.08711, over 2646735.02 frames. ], batch size: 36, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:24:36,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=74664.33333333333, ans=0.2 +2024-08-03 08:24:46,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=74701.0, ans=0.2 +2024-08-03 08:24:54,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=74737.66666666667, ans=0.0 +2024-08-03 08:25:14,035 INFO [train.py:1114] (1/4) Epoch 6, batch 2200, loss[loss=0.2704, simple_loss=0.3432, pruned_loss=0.09885, over 13389.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.32, pruned_loss=0.08706, over 2645293.66 frames. ], batch size: 39, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:25:17,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=74811.0, ans=0.2 +2024-08-03 08:25:23,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=74847.66666666667, ans=0.125 +2024-08-03 08:25:31,411 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.87 vs. limit=15.0 +2024-08-03 08:25:32,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=74884.33333333333, ans=0.125 +2024-08-03 08:25:37,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=74884.33333333333, ans=0.125 +2024-08-03 08:25:41,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=74921.0, ans=0.125 +2024-08-03 08:25:43,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=74921.0, ans=0.025 +2024-08-03 08:25:50,667 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.053e+02 1.295e+02 1.544e+02 1.950e+02 3.525e+02, threshold=3.088e+02, percent-clipped=1.0 +2024-08-03 08:25:59,015 INFO [train.py:1114] (1/4) Epoch 6, batch 2250, loss[loss=0.2358, simple_loss=0.3094, pruned_loss=0.08112, over 13358.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3195, pruned_loss=0.08683, over 2643231.50 frames. ], batch size: 37, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:26:08,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=75031.0, ans=0.125 +2024-08-03 08:26:09,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=75031.0, ans=0.0 +2024-08-03 08:26:14,471 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.95 vs. limit=6.0 +2024-08-03 08:26:21,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75067.66666666667, ans=0.1 +2024-08-03 08:26:38,268 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.73 vs. limit=15.0 +2024-08-03 08:26:46,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=75141.0, ans=0.125 +2024-08-03 08:26:48,088 INFO [train.py:1114] (1/4) Epoch 6, batch 2300, loss[loss=0.2394, simple_loss=0.3078, pruned_loss=0.08546, over 13595.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3185, pruned_loss=0.08655, over 2637917.24 frames. ], batch size: 33, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:27:01,928 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.16 vs. limit=15.0 +2024-08-03 08:27:08,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=75251.0, ans=0.025 +2024-08-03 08:27:09,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=75251.0, ans=0.125 +2024-08-03 08:27:18,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=75287.66666666667, ans=0.025 +2024-08-03 08:27:21,930 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.10 vs. limit=10.0 +2024-08-03 08:27:25,145 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.021e+02 1.310e+02 1.601e+02 2.046e+02 3.853e+02, threshold=3.202e+02, percent-clipped=4.0 +2024-08-03 08:27:28,332 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.71 vs. limit=10.0 +2024-08-03 08:27:33,456 INFO [train.py:1114] (1/4) Epoch 6, batch 2350, loss[loss=0.2372, simple_loss=0.3188, pruned_loss=0.07776, over 13556.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3187, pruned_loss=0.08661, over 2640872.55 frames. ], batch size: 38, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:27:54,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=75434.33333333333, ans=0.07 +2024-08-03 08:28:05,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=75471.0, ans=0.0 +2024-08-03 08:28:18,838 INFO [train.py:1114] (1/4) Epoch 6, batch 2400, loss[loss=0.196, simple_loss=0.2794, pruned_loss=0.05623, over 13546.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3194, pruned_loss=0.08686, over 2642754.49 frames. ], batch size: 35, lr: 1.99e-02, grad_scale: 32.0 +2024-08-03 08:28:21,265 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.35 vs. limit=15.0 +2024-08-03 08:28:23,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=75544.33333333333, ans=0.2 +2024-08-03 08:28:41,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=75617.66666666667, ans=0.125 +2024-08-03 08:28:49,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=75654.33333333333, ans=0.125 +2024-08-03 08:28:51,186 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.19 vs. limit=15.0 +2024-08-03 08:28:56,435 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.66 vs. limit=6.0 +2024-08-03 08:28:59,501 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.007e+02 1.273e+02 1.529e+02 2.027e+02 4.146e+02, threshold=3.058e+02, percent-clipped=9.0 +2024-08-03 08:28:59,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=75691.0, ans=0.0 +2024-08-03 08:29:07,936 INFO [train.py:1114] (1/4) Epoch 6, batch 2450, loss[loss=0.2816, simple_loss=0.3495, pruned_loss=0.1068, over 13339.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.321, pruned_loss=0.08786, over 2632053.24 frames. ], batch size: 37, lr: 1.99e-02, grad_scale: 32.0 +2024-08-03 08:29:09,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=75727.66666666667, ans=0.0 +2024-08-03 08:29:12,784 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.14 vs. limit=22.5 +2024-08-03 08:29:20,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=75764.33333333333, ans=0.0 +2024-08-03 08:29:22,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=75764.33333333333, ans=0.0 +2024-08-03 08:29:26,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=75801.0, ans=0.125 +2024-08-03 08:29:27,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=75801.0, ans=0.0 +2024-08-03 08:29:46,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=75874.33333333333, ans=0.125 +2024-08-03 08:29:52,480 INFO [train.py:1114] (1/4) Epoch 6, batch 2500, loss[loss=0.2314, simple_loss=0.3156, pruned_loss=0.07364, over 13396.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.3207, pruned_loss=0.08705, over 2636584.74 frames. ], batch size: 39, lr: 1.99e-02, grad_scale: 32.0 +2024-08-03 08:29:59,008 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.00 vs. limit=15.0 +2024-08-03 08:30:14,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=75984.33333333333, ans=0.0 +2024-08-03 08:30:26,311 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.89 vs. limit=6.0 +2024-08-03 08:30:26,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=76021.0, ans=0.05 +2024-08-03 08:30:27,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=76021.0, ans=0.2 +2024-08-03 08:30:31,088 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.038e+02 1.253e+02 1.529e+02 2.022e+02 3.392e+02, threshold=3.058e+02, percent-clipped=3.0 +2024-08-03 08:30:32,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=76057.66666666667, ans=0.125 +2024-08-03 08:30:38,263 INFO [train.py:1114] (1/4) Epoch 6, batch 2550, loss[loss=0.2145, simple_loss=0.2839, pruned_loss=0.0726, over 13576.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3202, pruned_loss=0.08705, over 2637709.96 frames. ], batch size: 31, lr: 1.99e-02, grad_scale: 16.0 +2024-08-03 08:30:51,025 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.48 vs. limit=22.5 +2024-08-03 08:30:53,620 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.99 vs. limit=15.0 +2024-08-03 08:31:18,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=76241.0, ans=0.1 +2024-08-03 08:31:19,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=76241.0, ans=0.0 +2024-08-03 08:31:19,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=76241.0, ans=0.0 +2024-08-03 08:31:25,203 INFO [train.py:1114] (1/4) Epoch 6, batch 2600, loss[loss=0.2255, simple_loss=0.3048, pruned_loss=0.07307, over 13551.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.3207, pruned_loss=0.08691, over 2637057.85 frames. ], batch size: 36, lr: 1.98e-02, grad_scale: 16.0 +2024-08-03 08:31:29,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=76277.66666666667, ans=0.125 +2024-08-03 08:31:39,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=76314.33333333333, ans=0.0 +2024-08-03 08:31:41,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=76351.0, ans=0.0 +2024-08-03 08:31:56,000 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.93 vs. limit=6.0 +2024-08-03 08:31:56,216 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.92 vs. limit=15.0 +2024-08-03 08:32:01,829 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.041e+02 1.310e+02 1.554e+02 1.964e+02 3.750e+02, threshold=3.108e+02, percent-clipped=4.0 +2024-08-03 08:32:05,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=76424.33333333333, ans=0.2 +2024-08-03 08:32:07,375 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.18 vs. limit=10.0 +2024-08-03 08:32:08,706 INFO [train.py:1114] (1/4) Epoch 6, batch 2650, loss[loss=0.2664, simple_loss=0.3402, pruned_loss=0.09632, over 13301.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.3213, pruned_loss=0.08719, over 2640460.73 frames. ], batch size: 46, lr: 1.98e-02, grad_scale: 16.0 +2024-08-03 08:32:08,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=76461.0, ans=0.2 +2024-08-03 08:32:14,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=76461.0, ans=0.1 +2024-08-03 08:32:17,081 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:32:34,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=76571.0, ans=0.0 +2024-08-03 08:32:37,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=76571.0, ans=0.0 +2024-08-03 08:32:38,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=76571.0, ans=0.2 +2024-08-03 08:32:39,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=76571.0, ans=0.125 +2024-08-03 08:32:52,918 INFO [train.py:1114] (1/4) Epoch 6, batch 2700, loss[loss=0.2294, simple_loss=0.3101, pruned_loss=0.07442, over 13542.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.3215, pruned_loss=0.08763, over 2636957.20 frames. ], batch size: 40, lr: 1.98e-02, grad_scale: 16.0 +2024-08-03 08:33:12,757 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.09 vs. limit=22.5 +2024-08-03 08:33:14,717 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.72 vs. limit=10.0 +2024-08-03 08:33:17,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=76717.66666666667, ans=0.125 +2024-08-03 08:33:19,996 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.69 vs. limit=15.0 +2024-08-03 08:33:29,856 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.293e+02 1.589e+02 2.019e+02 3.318e+02, threshold=3.177e+02, percent-clipped=2.0 +2024-08-03 08:33:41,623 INFO [train.py:1114] (1/4) Epoch 6, batch 2750, loss[loss=0.2277, simple_loss=0.3072, pruned_loss=0.07405, over 13335.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3199, pruned_loss=0.08719, over 2634154.04 frames. ], batch size: 34, lr: 1.98e-02, grad_scale: 16.0 +2024-08-03 08:33:50,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=76864.33333333333, ans=0.0 +2024-08-03 08:33:50,735 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.80 vs. limit=6.0 +2024-08-03 08:33:58,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=76901.0, ans=0.0 +2024-08-03 08:34:06,167 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.60 vs. limit=15.0 +2024-08-03 08:34:10,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=76937.66666666667, ans=0.1 +2024-08-03 08:34:12,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=76937.66666666667, ans=0.125 +2024-08-03 08:34:17,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=76974.33333333333, ans=0.0 +2024-08-03 08:34:18,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=76974.33333333333, ans=0.1 +2024-08-03 08:34:25,228 INFO [train.py:1114] (1/4) Epoch 6, batch 2800, loss[loss=0.3349, simple_loss=0.3709, pruned_loss=0.1494, over 9494.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3196, pruned_loss=0.08725, over 2626926.36 frames. ], batch size: 96, lr: 1.98e-02, grad_scale: 32.0 +2024-08-03 08:34:34,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=77047.66666666667, ans=0.0 +2024-08-03 08:34:39,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=77047.66666666667, ans=0.125 +2024-08-03 08:34:47,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=77084.33333333333, ans=0.125 +2024-08-03 08:34:50,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=77121.0, ans=0.04949747468305833 +2024-08-03 08:34:53,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=77121.0, ans=0.0 +2024-08-03 08:35:00,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=77157.66666666667, ans=0.0 +2024-08-03 08:35:01,670 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.373e+02 1.725e+02 2.259e+02 3.489e+02, threshold=3.451e+02, percent-clipped=3.0 +2024-08-03 08:35:05,670 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.23 vs. limit=22.5 +2024-08-03 08:35:08,643 INFO [train.py:1114] (1/4) Epoch 6, batch 2850, loss[loss=0.2144, simple_loss=0.2949, pruned_loss=0.0669, over 13349.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.3202, pruned_loss=0.08728, over 2620940.33 frames. ], batch size: 35, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:35:10,514 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:35:10,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=77194.33333333333, ans=10.0 +2024-08-03 08:35:16,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=77231.0, ans=0.2 +2024-08-03 08:35:29,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=77267.66666666667, ans=0.0 +2024-08-03 08:35:37,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=77304.33333333333, ans=0.025 +2024-08-03 08:35:45,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=77341.0, ans=0.125 +2024-08-03 08:35:53,867 INFO [train.py:1114] (1/4) Epoch 6, batch 2900, loss[loss=0.2359, simple_loss=0.3061, pruned_loss=0.08287, over 13360.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3206, pruned_loss=0.08681, over 2631598.19 frames. ], batch size: 36, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:35:55,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=77377.66666666667, ans=0.0 +2024-08-03 08:35:56,877 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.18 vs. limit=10.0 +2024-08-03 08:36:03,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=77414.33333333333, ans=0.2 +2024-08-03 08:36:06,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=77414.33333333333, ans=0.025 +2024-08-03 08:40:06,055 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.41 vs. limit=12.0 +2024-08-03 08:41:12,657 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.754e+01 1.224e+02 1.336e+02 1.587e+02 3.692e+02, threshold=2.672e+02, percent-clipped=1.0 +2024-08-03 08:41:12,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=77524.33333333333, ans=0.125 +2024-08-03 08:41:32,108 INFO [train.py:1114] (1/4) Epoch 6, batch 2950, loss[loss=0.2407, simple_loss=0.3057, pruned_loss=0.08779, over 13328.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3188, pruned_loss=0.08641, over 2630113.88 frames. ], batch size: 34, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:41:44,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=77561.0, ans=0.125 +2024-08-03 08:41:44,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=77561.0, ans=0.025 +2024-08-03 08:42:01,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=77597.66666666667, ans=0.125 +2024-08-03 08:42:01,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=77597.66666666667, ans=0.0 +2024-08-03 08:42:09,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=77634.33333333333, ans=0.125 +2024-08-03 08:42:33,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=77671.0, ans=0.0 +2024-08-03 08:42:34,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=77671.0, ans=0.125 +2024-08-03 08:42:48,227 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=2.508e-03 +2024-08-03 08:43:15,146 INFO [train.py:1114] (1/4) Epoch 6, batch 3000, loss[loss=0.2158, simple_loss=0.3017, pruned_loss=0.06495, over 13551.00 frames. ], tot_loss[loss=0.245, simple_loss=0.3183, pruned_loss=0.08588, over 2630570.59 frames. ], batch size: 37, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:43:15,147 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 08:43:29,969 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.3884, 2.6184, 2.7173, 2.5488], device='cuda:1') +2024-08-03 08:43:32,686 INFO [train.py:1146] (1/4) Epoch 6, validation: loss=0.2027, simple_loss=0.301, pruned_loss=0.05225, over 944034.00 frames. +2024-08-03 08:43:32,687 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 08:43:58,996 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.13 vs. limit=15.0 +2024-08-03 08:44:05,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=77781.0, ans=0.0 +2024-08-03 08:44:07,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=77781.0, ans=10.0 +2024-08-03 08:44:18,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77817.66666666667, ans=0.1 +2024-08-03 08:44:30,573 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.52 vs. limit=15.0 +2024-08-03 08:44:33,316 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.660e+01 1.264e+02 1.515e+02 1.854e+02 4.431e+02, threshold=3.030e+02, percent-clipped=3.0 +2024-08-03 08:44:43,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=77891.0, ans=0.125 +2024-08-03 08:44:47,347 INFO [train.py:1114] (1/4) Epoch 6, batch 3050, loss[loss=0.2155, simple_loss=0.2907, pruned_loss=0.0702, over 13529.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3197, pruned_loss=0.08691, over 2627747.60 frames. ], batch size: 35, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:44:47,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77927.66666666667, ans=0.1 +2024-08-03 08:49:31,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=77964.33333333333, ans=0.2 +2024-08-03 08:49:40,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78001.0, ans=0.1 +2024-08-03 08:49:41,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=78001.0, ans=0.0 +2024-08-03 08:50:18,936 INFO [train.py:1114] (1/4) Epoch 6, batch 3100, loss[loss=0.266, simple_loss=0.3393, pruned_loss=0.09632, over 13325.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3187, pruned_loss=0.08628, over 2626363.18 frames. ], batch size: 46, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:50:23,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=78111.0, ans=0.09899494936611666 +2024-08-03 08:50:28,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=78111.0, ans=0.0 +2024-08-03 08:50:29,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=78111.0, ans=0.125 +2024-08-03 08:50:37,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=78147.66666666667, ans=0.2 +2024-08-03 08:50:45,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=78184.33333333333, ans=0.025 +2024-08-03 08:50:49,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=78184.33333333333, ans=0.2 +2024-08-03 08:51:04,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=78221.0, ans=0.125 +2024-08-03 08:51:12,346 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.281e+02 1.526e+02 2.102e+02 4.706e+02, threshold=3.052e+02, percent-clipped=7.0 +2024-08-03 08:51:34,801 INFO [train.py:1114] (1/4) Epoch 6, batch 3150, loss[loss=0.2795, simple_loss=0.3576, pruned_loss=0.1007, over 13019.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3187, pruned_loss=0.08613, over 2628906.82 frames. ], batch size: 48, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:51:38,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=78294.33333333333, ans=0.07 +2024-08-03 08:51:46,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=78331.0, ans=0.125 +2024-08-03 08:51:54,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78331.0, ans=0.1 +2024-08-03 08:52:06,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=78367.66666666667, ans=0.125 +2024-08-03 08:52:12,627 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.92 vs. limit=6.0 +2024-08-03 08:52:34,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=78441.0, ans=0.125 +2024-08-03 08:52:43,922 INFO [train.py:1114] (1/4) Epoch 6, batch 3200, loss[loss=0.2537, simple_loss=0.3275, pruned_loss=0.08998, over 13540.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3176, pruned_loss=0.0855, over 2634511.74 frames. ], batch size: 37, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:52:54,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=78477.66666666667, ans=0.125 +2024-08-03 08:53:06,681 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:53:07,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=78514.33333333333, ans=0.0 +2024-08-03 08:57:46,534 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.59 vs. limit=15.0 +2024-08-03 08:57:47,831 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.879e+01 1.222e+02 1.408e+02 1.742e+02 2.685e+02, threshold=2.816e+02, percent-clipped=0.0 +2024-08-03 08:57:48,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=78624.33333333333, ans=0.0 +2024-08-03 08:57:58,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=78661.0, ans=0.125 +2024-08-03 08:57:59,195 INFO [train.py:1114] (1/4) Epoch 6, batch 3250, loss[loss=0.2786, simple_loss=0.3449, pruned_loss=0.1062, over 13390.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3184, pruned_loss=0.08546, over 2638448.19 frames. ], batch size: 38, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:58:32,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=78697.66666666667, ans=0.025 +2024-08-03 08:58:40,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=78734.33333333333, ans=0.125 +2024-08-03 08:58:50,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=78771.0, ans=0.025 +2024-08-03 08:58:58,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78807.66666666667, ans=0.1 +2024-08-03 08:59:05,489 INFO [train.py:1114] (1/4) Epoch 6, batch 3300, loss[loss=0.3043, simple_loss=0.3653, pruned_loss=0.1217, over 12869.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3177, pruned_loss=0.08513, over 2638703.63 frames. ], batch size: 52, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:59:05,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=78844.33333333333, ans=0.125 +2024-08-03 08:59:24,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=78917.66666666667, ans=0.125 +2024-08-03 08:59:24,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=78917.66666666667, ans=0.0 +2024-08-03 08:59:28,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78917.66666666667, ans=0.1 +2024-08-03 08:59:37,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=78954.33333333333, ans=0.125 +2024-08-03 08:59:39,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=78954.33333333333, ans=0.125 +2024-08-03 08:59:42,888 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.013e+02 1.439e+02 1.688e+02 2.442e+02 4.060e+02, threshold=3.376e+02, percent-clipped=9.0 +2024-08-03 08:59:47,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=78991.0, ans=0.0 +2024-08-03 08:59:47,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=78991.0, ans=0.09899494936611666 +2024-08-03 08:59:53,402 INFO [train.py:1114] (1/4) Epoch 6, batch 3350, loss[loss=0.2629, simple_loss=0.3353, pruned_loss=0.09521, over 13067.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3188, pruned_loss=0.08595, over 2628865.07 frames. ], batch size: 48, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 08:59:56,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=79027.66666666667, ans=0.0 +2024-08-03 09:00:24,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=79137.66666666667, ans=0.95 +2024-08-03 09:00:25,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=79137.66666666667, ans=0.125 +2024-08-03 09:00:27,190 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.65 vs. limit=15.0 +2024-08-03 09:00:30,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=79174.33333333333, ans=0.0 +2024-08-03 09:00:33,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=79174.33333333333, ans=0.2 +2024-08-03 09:00:34,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=79174.33333333333, ans=0.125 +2024-08-03 09:00:36,287 INFO [train.py:1114] (1/4) Epoch 6, batch 3400, loss[loss=0.2215, simple_loss=0.2837, pruned_loss=0.07971, over 13560.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.3191, pruned_loss=0.08674, over 2625470.11 frames. ], batch size: 31, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:00:57,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=79284.33333333333, ans=0.125 +2024-08-03 09:01:08,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=79321.0, ans=0.05 +2024-08-03 09:01:12,725 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.222e+02 1.420e+02 1.734e+02 2.761e+02, threshold=2.839e+02, percent-clipped=0.0 +2024-08-03 09:01:14,036 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.25 vs. limit=15.0 +2024-08-03 09:01:18,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=79357.66666666667, ans=0.2 +2024-08-03 09:01:18,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=79394.33333333333, ans=0.1 +2024-08-03 09:01:19,592 INFO [train.py:1114] (1/4) Epoch 6, batch 3450, loss[loss=0.2565, simple_loss=0.3398, pruned_loss=0.0866, over 12848.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3191, pruned_loss=0.08632, over 2628811.37 frames. ], batch size: 52, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:01:25,729 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:01:31,091 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.50 vs. limit=22.5 +2024-08-03 09:01:40,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=79467.66666666667, ans=0.125 +2024-08-03 09:02:02,275 INFO [train.py:1114] (1/4) Epoch 6, batch 3500, loss[loss=0.235, simple_loss=0.3193, pruned_loss=0.07538, over 13533.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3186, pruned_loss=0.08644, over 2629804.67 frames. ], batch size: 34, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:02:10,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=79614.33333333333, ans=0.1 +2024-08-03 09:02:21,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=79651.0, ans=0.125 +2024-08-03 09:02:27,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=79687.66666666667, ans=0.1 +2024-08-03 09:02:33,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=79687.66666666667, ans=0.125 +2024-08-03 09:02:38,268 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.912e+01 1.291e+02 1.420e+02 1.811e+02 3.621e+02, threshold=2.839e+02, percent-clipped=4.0 +2024-08-03 09:02:45,561 INFO [train.py:1114] (1/4) Epoch 6, batch 3550, loss[loss=0.2889, simple_loss=0.3529, pruned_loss=0.1124, over 12485.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.3213, pruned_loss=0.08826, over 2628835.52 frames. ], batch size: 58, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:02:51,273 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.96 vs. limit=10.0 +2024-08-03 09:02:53,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=79797.66666666667, ans=0.125 +2024-08-03 09:02:55,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=79797.66666666667, ans=0.125 +2024-08-03 09:02:56,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=79797.66666666667, ans=0.125 +2024-08-03 09:03:28,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=79907.66666666667, ans=0.0 +2024-08-03 09:03:30,154 INFO [train.py:1114] (1/4) Epoch 6, batch 3600, loss[loss=0.3587, simple_loss=0.3893, pruned_loss=0.164, over 9167.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3277, pruned_loss=0.0946, over 2487439.69 frames. ], batch size: 96, lr: 1.94e-02, grad_scale: 32.0 +2024-08-03 09:03:36,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=79944.33333333333, ans=0.2 +2024-08-03 09:03:58,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=80054.33333333333, ans=0.2 +2024-08-03 09:11:26,779 INFO [train.py:1114] (1/4) Epoch 7, batch 0, loss[loss=0.2131, simple_loss=0.282, pruned_loss=0.0721, over 13320.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.282, pruned_loss=0.0721, over 13320.00 frames. ], batch size: 33, lr: 1.82e-02, grad_scale: 32.0 +2024-08-03 09:11:26,779 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 09:11:36,680 INFO [train.py:1146] (1/4) Epoch 7, validation: loss=0.2064, simple_loss=0.3063, pruned_loss=0.05331, over 944034.00 frames. +2024-08-03 09:11:36,681 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 09:11:39,259 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.328e+02 1.470e+02 1.676e+02 3.542e+02, threshold=2.940e+02, percent-clipped=1.0 +2024-08-03 09:11:40,764 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.93 vs. limit=15.0 +2024-08-03 09:11:47,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=80091.0, ans=0.0 +2024-08-03 09:12:05,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=80164.33333333333, ans=0.0 +2024-08-03 09:12:11,258 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.28 vs. limit=22.5 +2024-08-03 09:12:17,814 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.80 vs. limit=22.5 +2024-08-03 09:12:27,671 INFO [train.py:1114] (1/4) Epoch 7, batch 50, loss[loss=0.2228, simple_loss=0.289, pruned_loss=0.07834, over 13434.00 frames. ], tot_loss[loss=0.2512, simple_loss=0.3244, pruned_loss=0.08903, over 577726.78 frames. ], batch size: 32, lr: 1.82e-02, grad_scale: 32.0 +2024-08-03 09:13:03,937 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.35 vs. limit=15.0 +2024-08-03 09:13:19,704 INFO [train.py:1114] (1/4) Epoch 7, batch 100, loss[loss=0.2122, simple_loss=0.2875, pruned_loss=0.06849, over 13537.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3227, pruned_loss=0.08738, over 1025949.19 frames. ], batch size: 35, lr: 1.82e-02, grad_scale: 32.0 +2024-08-03 09:13:22,392 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.031e+02 1.305e+02 1.508e+02 1.904e+02 3.829e+02, threshold=3.017e+02, percent-clipped=4.0 +2024-08-03 09:13:24,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=80457.66666666667, ans=0.0 +2024-08-03 09:13:32,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=80494.33333333333, ans=0.125 +2024-08-03 09:13:33,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=80494.33333333333, ans=0.2 +2024-08-03 09:13:48,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=80531.0, ans=0.125 +2024-08-03 09:14:00,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=80604.33333333333, ans=0.05 +2024-08-03 09:14:08,297 INFO [train.py:1114] (1/4) Epoch 7, batch 150, loss[loss=0.2134, simple_loss=0.2786, pruned_loss=0.07412, over 13408.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3186, pruned_loss=0.08559, over 1386715.94 frames. ], batch size: 32, lr: 1.81e-02, grad_scale: 32.0 +2024-08-03 09:14:08,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=80641.0, ans=0.0 +2024-08-03 09:14:23,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=80677.66666666667, ans=0.125 +2024-08-03 09:14:35,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80714.33333333333, ans=0.1 +2024-08-03 09:14:43,693 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.80 vs. limit=6.0 +2024-08-03 09:14:53,797 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:14:56,252 INFO [train.py:1114] (1/4) Epoch 7, batch 200, loss[loss=0.2335, simple_loss=0.3135, pruned_loss=0.07674, over 12480.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3154, pruned_loss=0.08315, over 1665657.44 frames. ], batch size: 58, lr: 1.81e-02, grad_scale: 16.0 +2024-08-03 09:14:57,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=80824.33333333333, ans=0.125 +2024-08-03 09:14:57,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=80824.33333333333, ans=0.2 +2024-08-03 09:14:59,741 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.449e+01 1.216e+02 1.369e+02 1.577e+02 2.982e+02, threshold=2.737e+02, percent-clipped=0.0 +2024-08-03 09:15:01,126 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=15.0 +2024-08-03 09:15:03,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=80824.33333333333, ans=0.09899494936611666 +2024-08-03 09:15:17,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=80897.66666666667, ans=0.125 +2024-08-03 09:15:20,476 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.46 vs. limit=15.0 +2024-08-03 09:15:41,493 INFO [train.py:1114] (1/4) Epoch 7, batch 250, loss[loss=0.2585, simple_loss=0.3331, pruned_loss=0.09198, over 13318.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3158, pruned_loss=0.08359, over 1885327.13 frames. ], batch size: 46, lr: 1.81e-02, grad_scale: 16.0 +2024-08-03 09:15:43,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=81007.66666666667, ans=0.125 +2024-08-03 09:16:01,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=81081.0, ans=0.125 +2024-08-03 09:16:04,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=81081.0, ans=0.0 +2024-08-03 09:16:12,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81117.66666666667, ans=0.1 +2024-08-03 09:16:18,429 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:16:26,435 INFO [train.py:1114] (1/4) Epoch 7, batch 300, loss[loss=0.2717, simple_loss=0.3458, pruned_loss=0.09882, over 13432.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3149, pruned_loss=0.08329, over 2052983.16 frames. ], batch size: 42, lr: 1.81e-02, grad_scale: 16.0 +2024-08-03 09:16:30,005 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.067e+02 1.291e+02 1.630e+02 2.116e+02 3.205e+02, threshold=3.259e+02, percent-clipped=7.0 +2024-08-03 09:16:32,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=81191.0, ans=0.125 +2024-08-03 09:16:35,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=81227.66666666667, ans=0.125 +2024-08-03 09:16:38,200 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-08-03 09:16:42,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=81227.66666666667, ans=0.125 +2024-08-03 09:16:55,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=81264.33333333333, ans=0.125 +2024-08-03 09:16:57,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=81301.0, ans=0.025 +2024-08-03 09:17:12,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=81337.66666666667, ans=0.125 +2024-08-03 09:17:19,134 INFO [train.py:1114] (1/4) Epoch 7, batch 350, loss[loss=0.2255, simple_loss=0.2935, pruned_loss=0.07873, over 13584.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.316, pruned_loss=0.08372, over 2183471.07 frames. ], batch size: 33, lr: 1.81e-02, grad_scale: 16.0 +2024-08-03 09:17:26,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=81374.33333333333, ans=0.125 +2024-08-03 09:17:46,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=81484.33333333333, ans=0.125 +2024-08-03 09:17:59,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=81521.0, ans=0.125 +2024-08-03 09:18:06,299 INFO [train.py:1114] (1/4) Epoch 7, batch 400, loss[loss=0.2429, simple_loss=0.3123, pruned_loss=0.08672, over 13369.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3151, pruned_loss=0.08312, over 2287845.94 frames. ], batch size: 37, lr: 1.81e-02, grad_scale: 32.0 +2024-08-03 09:18:09,970 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.073e+02 1.335e+02 1.614e+02 1.996e+02 4.244e+02, threshold=3.229e+02, percent-clipped=5.0 +2024-08-03 09:18:13,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=81557.66666666667, ans=0.2 +2024-08-03 09:18:14,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=81594.33333333333, ans=0.09899494936611666 +2024-08-03 09:18:16,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=81594.33333333333, ans=0.125 +2024-08-03 09:18:21,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81594.33333333333, ans=0.1 +2024-08-03 09:18:21,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=81594.33333333333, ans=0.2 +2024-08-03 09:18:29,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=81631.0, ans=0.0 +2024-08-03 09:18:44,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=81704.33333333333, ans=0.2 +2024-08-03 09:18:47,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=81704.33333333333, ans=0.0 +2024-08-03 09:18:49,332 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.41 vs. limit=15.0 +2024-08-03 09:18:51,576 INFO [train.py:1114] (1/4) Epoch 7, batch 450, loss[loss=0.2565, simple_loss=0.3269, pruned_loss=0.09301, over 13554.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3161, pruned_loss=0.08383, over 2359855.49 frames. ], batch size: 38, lr: 1.80e-02, grad_scale: 32.0 +2024-08-03 09:18:59,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=81741.0, ans=0.125 +2024-08-03 09:19:55,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81851.0, ans=0.1 +2024-08-03 09:20:05,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=81887.66666666667, ans=0.1 +2024-08-03 09:20:13,715 INFO [train.py:1114] (1/4) Epoch 7, batch 500, loss[loss=0.2568, simple_loss=0.3318, pruned_loss=0.09091, over 13405.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.3143, pruned_loss=0.08271, over 2425850.79 frames. ], batch size: 43, lr: 1.80e-02, grad_scale: 32.0 +2024-08-03 09:20:17,134 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.053e+02 1.254e+02 1.426e+02 1.803e+02 2.820e+02, threshold=2.853e+02, percent-clipped=0.0 +2024-08-03 09:20:21,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=81924.33333333333, ans=0.0 +2024-08-03 09:20:43,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=82034.33333333333, ans=0.125 +2024-08-03 09:20:47,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=82034.33333333333, ans=0.5 +2024-08-03 09:20:54,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=82071.0, ans=0.0 +2024-08-03 09:20:58,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=82107.66666666667, ans=0.0 +2024-08-03 09:20:58,669 INFO [train.py:1114] (1/4) Epoch 7, batch 550, loss[loss=0.2668, simple_loss=0.3376, pruned_loss=0.09793, over 12990.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.3143, pruned_loss=0.0826, over 2466530.41 frames. ], batch size: 48, lr: 1.80e-02, grad_scale: 16.0 +2024-08-03 09:21:25,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82181.0, ans=0.1 +2024-08-03 09:21:26,739 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.91 vs. limit=15.0 +2024-08-03 09:21:36,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=82217.66666666667, ans=0.0 +2024-08-03 09:21:45,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=82254.33333333333, ans=0.035 +2024-08-03 09:21:49,910 INFO [train.py:1114] (1/4) Epoch 7, batch 600, loss[loss=0.2652, simple_loss=0.3413, pruned_loss=0.09454, over 13320.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3149, pruned_loss=0.08289, over 2506379.85 frames. ], batch size: 46, lr: 1.80e-02, grad_scale: 16.0 +2024-08-03 09:21:53,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=82291.0, ans=0.125 +2024-08-03 09:21:54,381 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.061e+02 1.313e+02 1.488e+02 1.850e+02 2.717e+02, threshold=2.975e+02, percent-clipped=0.0 +2024-08-03 09:21:57,285 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:22:08,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82327.66666666667, ans=0.1 +2024-08-03 09:22:17,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=82364.33333333333, ans=0.0 +2024-08-03 09:22:18,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82364.33333333333, ans=0.1 +2024-08-03 09:22:25,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=82401.0, ans=0.125 +2024-08-03 09:22:32,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=82437.66666666667, ans=0.025 +2024-08-03 09:22:39,823 INFO [train.py:1114] (1/4) Epoch 7, batch 650, loss[loss=0.2165, simple_loss=0.3012, pruned_loss=0.06591, over 13539.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3138, pruned_loss=0.08221, over 2542160.48 frames. ], batch size: 37, lr: 1.80e-02, grad_scale: 16.0 +2024-08-03 09:22:55,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=82511.0, ans=0.0 +2024-08-03 09:22:58,346 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.12 vs. limit=12.0 +2024-08-03 09:23:04,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=82547.66666666667, ans=0.2 +2024-08-03 09:23:25,558 INFO [train.py:1114] (1/4) Epoch 7, batch 700, loss[loss=0.2808, simple_loss=0.3398, pruned_loss=0.1109, over 13525.00 frames. ], tot_loss[loss=0.2393, simple_loss=0.3142, pruned_loss=0.08227, over 2564168.50 frames. ], batch size: 35, lr: 1.79e-02, grad_scale: 16.0 +2024-08-03 09:23:26,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=82657.66666666667, ans=0.0 +2024-08-03 09:23:29,925 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.643e+01 1.288e+02 1.544e+02 2.300e+02 4.218e+02, threshold=3.088e+02, percent-clipped=10.0 +2024-08-03 09:23:53,467 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:23:55,725 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.88 vs. limit=15.0 +2024-08-03 09:24:03,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=82804.33333333333, ans=0.2 +2024-08-03 09:24:07,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=82804.33333333333, ans=0.125 +2024-08-03 09:24:10,537 INFO [train.py:1114] (1/4) Epoch 7, batch 750, loss[loss=0.2187, simple_loss=0.3043, pruned_loss=0.06659, over 13361.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.3136, pruned_loss=0.08213, over 2581756.92 frames. ], batch size: 37, lr: 1.79e-02, grad_scale: 16.0 +2024-08-03 09:24:24,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=82877.66666666667, ans=0.125 +2024-08-03 09:24:33,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=82914.33333333333, ans=0.2 +2024-08-03 09:25:04,350 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.54 vs. limit=15.0 +2024-08-03 09:25:05,595 INFO [train.py:1114] (1/4) Epoch 7, batch 800, loss[loss=0.2507, simple_loss=0.3131, pruned_loss=0.09421, over 13368.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3138, pruned_loss=0.08219, over 2596277.77 frames. ], batch size: 33, lr: 1.79e-02, grad_scale: 32.0 +2024-08-03 09:25:06,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=83024.33333333333, ans=0.2 +2024-08-03 09:25:11,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=83024.33333333333, ans=0.125 +2024-08-03 09:25:11,847 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.297e+02 1.506e+02 2.061e+02 3.344e+02, threshold=3.011e+02, percent-clipped=3.0 +2024-08-03 09:25:16,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=83061.0, ans=0.125 +2024-08-03 09:25:23,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=83061.0, ans=0.125 +2024-08-03 09:25:25,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=83061.0, ans=0.0 +2024-08-03 09:25:26,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=83097.66666666667, ans=0.1 +2024-08-03 09:25:31,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=83097.66666666667, ans=0.0 +2024-08-03 09:25:32,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=83097.66666666667, ans=0.0 +2024-08-03 09:25:35,075 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.08 vs. limit=15.0 +2024-08-03 09:25:35,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=83134.33333333333, ans=0.04949747468305833 +2024-08-03 09:25:37,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=83134.33333333333, ans=0.125 +2024-08-03 09:25:59,645 INFO [train.py:1114] (1/4) Epoch 7, batch 850, loss[loss=0.2599, simple_loss=0.3366, pruned_loss=0.09156, over 13324.00 frames. ], tot_loss[loss=0.2393, simple_loss=0.3139, pruned_loss=0.08233, over 2609248.23 frames. ], batch size: 40, lr: 1.79e-02, grad_scale: 32.0 +2024-08-03 09:26:07,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=83207.66666666667, ans=0.0 +2024-08-03 09:26:09,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=83207.66666666667, ans=0.1 +2024-08-03 09:26:10,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=83244.33333333333, ans=0.0 +2024-08-03 09:26:30,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=83281.0, ans=0.125 +2024-08-03 09:26:33,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=83281.0, ans=0.125 +2024-08-03 09:26:34,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=83281.0, ans=0.125 +2024-08-03 09:26:35,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=83317.66666666667, ans=0.025 +2024-08-03 09:26:54,483 INFO [train.py:1114] (1/4) Epoch 7, batch 900, loss[loss=0.2038, simple_loss=0.2806, pruned_loss=0.06351, over 13344.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3138, pruned_loss=0.08214, over 2610663.33 frames. ], batch size: 33, lr: 1.79e-02, grad_scale: 32.0 +2024-08-03 09:26:55,855 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.01 vs. limit=15.0 +2024-08-03 09:26:58,761 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.957e+01 1.340e+02 1.564e+02 1.853e+02 3.494e+02, threshold=3.128e+02, percent-clipped=2.0 +2024-08-03 09:27:16,197 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.79 vs. limit=10.0 +2024-08-03 09:27:17,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=83464.33333333333, ans=0.1 +2024-08-03 09:27:18,115 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.68 vs. limit=22.5 +2024-08-03 09:27:29,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=83501.0, ans=0.0 +2024-08-03 09:27:33,276 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=6.324e-02 +2024-08-03 09:27:44,884 INFO [train.py:1114] (1/4) Epoch 7, batch 950, loss[loss=0.2127, simple_loss=0.2856, pruned_loss=0.06996, over 13527.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.3142, pruned_loss=0.08263, over 2611396.46 frames. ], batch size: 34, lr: 1.79e-02, grad_scale: 32.0 +2024-08-03 09:27:47,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=83574.33333333333, ans=0.0 +2024-08-03 09:28:09,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=83647.66666666667, ans=0.125 +2024-08-03 09:28:20,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=83684.33333333333, ans=0.125 +2024-08-03 09:28:23,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=83721.0, ans=0.0 +2024-08-03 09:28:30,833 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.65 vs. limit=22.5 +2024-08-03 09:28:31,247 INFO [train.py:1114] (1/4) Epoch 7, batch 1000, loss[loss=0.2515, simple_loss=0.3232, pruned_loss=0.0899, over 13363.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3155, pruned_loss=0.08365, over 2611867.38 frames. ], batch size: 35, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:28:39,587 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.039e+02 1.347e+02 1.651e+02 2.099e+02 3.599e+02, threshold=3.301e+02, percent-clipped=2.0 +2024-08-03 09:29:14,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=83867.66666666667, ans=0.07 +2024-08-03 09:29:21,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=83904.33333333333, ans=0.0 +2024-08-03 09:29:25,992 INFO [train.py:1114] (1/4) Epoch 7, batch 1050, loss[loss=0.2199, simple_loss=0.3035, pruned_loss=0.06812, over 13579.00 frames. ], tot_loss[loss=0.24, simple_loss=0.3144, pruned_loss=0.08277, over 2616175.90 frames. ], batch size: 39, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:29:40,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=83977.66666666667, ans=0.2 +2024-08-03 09:29:43,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=83977.66666666667, ans=0.2 +2024-08-03 09:29:47,570 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.92 vs. limit=22.5 +2024-08-03 09:29:48,537 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.46 vs. limit=15.0 +2024-08-03 09:29:58,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=84051.0, ans=0.0 +2024-08-03 09:30:03,553 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.44 vs. limit=12.0 +2024-08-03 09:30:07,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=84087.66666666667, ans=0.09899494936611666 +2024-08-03 09:30:16,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=84124.33333333333, ans=0.025 +2024-08-03 09:30:16,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=84124.33333333333, ans=0.0 +2024-08-03 09:30:18,331 INFO [train.py:1114] (1/4) Epoch 7, batch 1100, loss[loss=0.1954, simple_loss=0.2752, pruned_loss=0.05785, over 13561.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3132, pruned_loss=0.08194, over 2620036.42 frames. ], batch size: 36, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:30:21,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=84124.33333333333, ans=0.125 +2024-08-03 09:30:22,733 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.294e+01 1.204e+02 1.427e+02 1.810e+02 3.442e+02, threshold=2.853e+02, percent-clipped=1.0 +2024-08-03 09:30:46,516 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.92 vs. limit=15.0 +2024-08-03 09:30:50,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=84197.66666666667, ans=0.125 +2024-08-03 09:30:55,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84234.33333333333, ans=0.1 +2024-08-03 09:30:55,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=84234.33333333333, ans=0.2 +2024-08-03 09:30:56,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=84234.33333333333, ans=0.125 +2024-08-03 09:31:00,939 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:31:02,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=84271.0, ans=0.125 +2024-08-03 09:31:05,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=84271.0, ans=0.0 +2024-08-03 09:31:10,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=84271.0, ans=0.125 +2024-08-03 09:31:13,201 INFO [train.py:1114] (1/4) Epoch 7, batch 1150, loss[loss=0.2416, simple_loss=0.3138, pruned_loss=0.0847, over 13572.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.313, pruned_loss=0.08207, over 2619241.53 frames. ], batch size: 36, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:31:47,960 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.13 vs. limit=15.0 +2024-08-03 09:31:53,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=84417.66666666667, ans=0.125 +2024-08-03 09:32:04,051 INFO [train.py:1114] (1/4) Epoch 7, batch 1200, loss[loss=0.2157, simple_loss=0.3068, pruned_loss=0.06228, over 13571.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.3141, pruned_loss=0.08231, over 2615834.78 frames. ], batch size: 39, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:32:09,677 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.359e+02 1.583e+02 1.870e+02 3.127e+02, threshold=3.166e+02, percent-clipped=2.0 +2024-08-03 09:32:16,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=84527.66666666667, ans=0.125 +2024-08-03 09:32:34,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=84601.0, ans=0.125 +2024-08-03 09:32:51,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=84637.66666666667, ans=0.0 +2024-08-03 09:32:52,750 INFO [train.py:1114] (1/4) Epoch 7, batch 1250, loss[loss=0.2667, simple_loss=0.34, pruned_loss=0.09674, over 13445.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3151, pruned_loss=0.08253, over 2627737.58 frames. ], batch size: 42, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:32:54,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=84674.33333333333, ans=0.0 +2024-08-03 09:33:26,935 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:33:30,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=84784.33333333333, ans=0.125 +2024-08-03 09:33:36,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=84784.33333333333, ans=0.125 +2024-08-03 09:33:45,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=84821.0, ans=0.125 +2024-08-03 09:33:49,159 INFO [train.py:1114] (1/4) Epoch 7, batch 1300, loss[loss=0.2389, simple_loss=0.3241, pruned_loss=0.07683, over 12911.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3143, pruned_loss=0.08194, over 2630395.09 frames. ], batch size: 52, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:33:52,490 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.23 vs. limit=15.0 +2024-08-03 09:33:53,581 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.285e+01 1.265e+02 1.441e+02 2.116e+02 4.466e+02, threshold=2.882e+02, percent-clipped=10.0 +2024-08-03 09:34:03,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=84894.33333333333, ans=0.125 +2024-08-03 09:34:15,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=84967.66666666667, ans=0.0 +2024-08-03 09:34:20,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=84967.66666666667, ans=0.125 +2024-08-03 09:34:27,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=85004.33333333333, ans=0.125 +2024-08-03 09:34:34,082 INFO [train.py:1114] (1/4) Epoch 7, batch 1350, loss[loss=0.2308, simple_loss=0.3126, pruned_loss=0.07447, over 13543.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3139, pruned_loss=0.0817, over 2637364.51 frames. ], batch size: 37, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:34:37,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=85041.0, ans=0.125 +2024-08-03 09:34:50,175 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=1.672e-02 +2024-08-03 09:34:50,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=85077.66666666667, ans=0.2 +2024-08-03 09:34:57,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=85114.33333333333, ans=0.0 +2024-08-03 09:35:04,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=85151.0, ans=0.125 +2024-08-03 09:35:09,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=85151.0, ans=0.025 +2024-08-03 09:35:11,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=85151.0, ans=0.125 +2024-08-03 09:35:11,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=85151.0, ans=0.125 +2024-08-03 09:35:23,188 INFO [train.py:1114] (1/4) Epoch 7, batch 1400, loss[loss=0.1882, simple_loss=0.2661, pruned_loss=0.0551, over 13291.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.3126, pruned_loss=0.08094, over 2642139.52 frames. ], batch size: 31, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:35:27,643 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.041e+02 1.250e+02 1.480e+02 1.868e+02 3.141e+02, threshold=2.961e+02, percent-clipped=2.0 +2024-08-03 09:35:29,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=85224.33333333333, ans=0.0 +2024-08-03 09:35:50,684 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.99 vs. limit=15.0 +2024-08-03 09:35:55,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=85334.33333333333, ans=0.125 +2024-08-03 09:35:59,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=85371.0, ans=0.0 +2024-08-03 09:36:04,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=85371.0, ans=0.125 +2024-08-03 09:36:10,257 INFO [train.py:1114] (1/4) Epoch 7, batch 1450, loss[loss=0.2469, simple_loss=0.326, pruned_loss=0.08394, over 13443.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.314, pruned_loss=0.08174, over 2641740.60 frames. ], batch size: 43, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:36:25,126 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.83 vs. limit=15.0 +2024-08-03 09:36:29,813 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.84 vs. limit=22.5 +2024-08-03 09:36:29,908 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.85 vs. limit=15.0 +2024-08-03 09:36:32,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=85481.0, ans=0.125 +2024-08-03 09:36:51,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=85517.66666666667, ans=0.125 +2024-08-03 09:36:59,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=85554.33333333333, ans=0.125 +2024-08-03 09:37:03,215 INFO [train.py:1114] (1/4) Epoch 7, batch 1500, loss[loss=0.2513, simple_loss=0.3271, pruned_loss=0.08776, over 13404.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.3143, pruned_loss=0.08172, over 2641829.10 frames. ], batch size: 39, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:37:05,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=85591.0, ans=0.0 +2024-08-03 09:37:05,819 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.12 vs. limit=10.0 +2024-08-03 09:37:07,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=85591.0, ans=0.2 +2024-08-03 09:37:07,888 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.276e+02 1.426e+02 1.677e+02 2.585e+02, threshold=2.853e+02, percent-clipped=0.0 +2024-08-03 09:37:08,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=85591.0, ans=0.125 +2024-08-03 09:37:40,645 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.95 vs. limit=6.0 +2024-08-03 09:37:58,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=85701.0, ans=0.07 +2024-08-03 09:38:04,682 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.22 vs. limit=12.0 +2024-08-03 09:38:10,431 INFO [train.py:1114] (1/4) Epoch 7, batch 1550, loss[loss=0.2685, simple_loss=0.3353, pruned_loss=0.1009, over 13396.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.3144, pruned_loss=0.08218, over 2631108.92 frames. ], batch size: 41, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:38:19,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=85774.33333333333, ans=0.125 +2024-08-03 09:38:20,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=85811.0, ans=0.0 +2024-08-03 09:38:24,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=85811.0, ans=0.0 +2024-08-03 09:38:48,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=85921.0, ans=0.09899494936611666 +2024-08-03 09:38:53,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=85921.0, ans=0.025 +2024-08-03 09:38:57,205 INFO [train.py:1114] (1/4) Epoch 7, batch 1600, loss[loss=0.2466, simple_loss=0.3263, pruned_loss=0.08345, over 13577.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.314, pruned_loss=0.08219, over 2623592.55 frames. ], batch size: 39, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:38:58,381 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:39:02,745 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.893e+01 1.288e+02 1.487e+02 1.890e+02 3.069e+02, threshold=2.975e+02, percent-clipped=2.0 +2024-08-03 09:39:05,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=85957.66666666667, ans=0.125 +2024-08-03 09:39:08,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=85994.33333333333, ans=0.125 +2024-08-03 09:39:32,881 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.23 vs. limit=10.0 +2024-08-03 09:39:39,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=86104.33333333333, ans=0.125 +2024-08-03 09:39:45,490 INFO [train.py:1114] (1/4) Epoch 7, batch 1650, loss[loss=0.2425, simple_loss=0.327, pruned_loss=0.07902, over 13318.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.3141, pruned_loss=0.08239, over 2619465.66 frames. ], batch size: 40, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:39:46,846 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.58 vs. limit=15.0 +2024-08-03 09:39:57,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=86177.66666666667, ans=0.0 +2024-08-03 09:40:09,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=86214.33333333333, ans=0.125 +2024-08-03 09:40:30,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=86324.33333333333, ans=0.1 +2024-08-03 09:40:30,960 INFO [train.py:1114] (1/4) Epoch 7, batch 1700, loss[loss=0.2202, simple_loss=0.2865, pruned_loss=0.07692, over 13242.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.3133, pruned_loss=0.08116, over 2628355.36 frames. ], batch size: 31, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:40:36,462 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.893e+01 1.270e+02 1.510e+02 1.921e+02 4.226e+02, threshold=3.020e+02, percent-clipped=3.0 +2024-08-03 09:40:36,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=86324.33333333333, ans=0.95 +2024-08-03 09:40:50,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=86361.0, ans=0.0 +2024-08-03 09:40:54,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=86397.66666666667, ans=0.2 +2024-08-03 09:41:01,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=86397.66666666667, ans=0.025 +2024-08-03 09:41:06,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=86434.33333333333, ans=0.0 +2024-08-03 09:41:08,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.51 vs. limit=15.0 +2024-08-03 09:41:14,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=86471.0, ans=0.125 +2024-08-03 09:41:23,774 INFO [train.py:1114] (1/4) Epoch 7, batch 1750, loss[loss=0.2059, simple_loss=0.2764, pruned_loss=0.06771, over 13522.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3123, pruned_loss=0.08054, over 2632410.76 frames. ], batch size: 31, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:41:23,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=86507.66666666667, ans=0.2 +2024-08-03 09:41:25,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=86507.66666666667, ans=0.125 +2024-08-03 09:41:29,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=86507.66666666667, ans=0.125 +2024-08-03 09:41:34,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=86544.33333333333, ans=0.95 +2024-08-03 09:41:35,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=86544.33333333333, ans=0.125 +2024-08-03 09:41:39,208 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:41:57,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=86617.66666666667, ans=0.0 +2024-08-03 09:41:58,874 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:42:00,652 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.78 vs. limit=10.0 +2024-08-03 09:42:09,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=86691.0, ans=0.0 +2024-08-03 09:42:09,845 INFO [train.py:1114] (1/4) Epoch 7, batch 1800, loss[loss=0.2207, simple_loss=0.3093, pruned_loss=0.06604, over 13547.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3127, pruned_loss=0.08103, over 2634013.27 frames. ], batch size: 38, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:42:15,298 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.789e+01 1.268e+02 1.407e+02 1.831e+02 3.286e+02, threshold=2.815e+02, percent-clipped=2.0 +2024-08-03 09:42:34,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86764.33333333333, ans=0.1 +2024-08-03 09:42:46,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=86837.66666666667, ans=0.025 +2024-08-03 09:42:49,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=86837.66666666667, ans=0.125 +2024-08-03 09:42:54,767 INFO [train.py:1114] (1/4) Epoch 7, batch 1850, loss[loss=0.2322, simple_loss=0.312, pruned_loss=0.07624, over 13384.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3116, pruned_loss=0.08054, over 2636662.87 frames. ], batch size: 39, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:42:55,229 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.65 vs. limit=22.5 +2024-08-03 09:42:55,325 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.24 vs. limit=12.0 +2024-08-03 09:43:05,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86911.0, ans=0.1 +2024-08-03 09:43:06,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=86911.0, ans=0.0 +2024-08-03 09:43:26,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=86984.33333333333, ans=0.125 +2024-08-03 09:43:32,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=87021.0, ans=0.0 +2024-08-03 09:43:39,418 INFO [train.py:1114] (1/4) Epoch 7, batch 1900, loss[loss=0.2364, simple_loss=0.3171, pruned_loss=0.07785, over 13329.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3122, pruned_loss=0.0807, over 2639386.03 frames. ], batch size: 40, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:43:44,830 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.827e+01 1.267e+02 1.561e+02 1.810e+02 3.811e+02, threshold=3.121e+02, percent-clipped=4.0 +2024-08-03 09:43:47,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=87094.33333333333, ans=0.1 +2024-08-03 09:43:52,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=87094.33333333333, ans=0.0 +2024-08-03 09:43:59,538 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.00 vs. limit=10.0 +2024-08-03 09:44:27,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=87204.33333333333, ans=0.09899494936611666 +2024-08-03 09:44:28,644 INFO [train.py:1114] (1/4) Epoch 7, batch 1950, loss[loss=0.2286, simple_loss=0.3071, pruned_loss=0.07508, over 13578.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.3138, pruned_loss=0.08131, over 2646094.93 frames. ], batch size: 36, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:44:38,391 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.05 vs. limit=6.0 +2024-08-03 09:44:47,878 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.60 vs. limit=22.5 +2024-08-03 09:45:04,898 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.59 vs. limit=22.5 +2024-08-03 09:45:16,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=87387.66666666667, ans=15.0 +2024-08-03 09:45:24,713 INFO [train.py:1114] (1/4) Epoch 7, batch 2000, loss[loss=0.2184, simple_loss=0.2922, pruned_loss=0.07226, over 13519.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.3145, pruned_loss=0.08199, over 2636243.46 frames. ], batch size: 31, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:45:27,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=87424.33333333333, ans=0.2 +2024-08-03 09:45:30,328 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.022e+02 1.315e+02 1.529e+02 1.937e+02 2.914e+02, threshold=3.058e+02, percent-clipped=0.0 +2024-08-03 09:45:35,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=87461.0, ans=0.0 +2024-08-03 09:45:37,210 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.10 vs. limit=15.0 +2024-08-03 09:45:38,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=87461.0, ans=0.2 +2024-08-03 09:45:42,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=87497.66666666667, ans=0.0 +2024-08-03 09:46:01,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=87571.0, ans=0.0 +2024-08-03 09:46:09,935 INFO [train.py:1114] (1/4) Epoch 7, batch 2050, loss[loss=0.1943, simple_loss=0.2681, pruned_loss=0.06027, over 13430.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3131, pruned_loss=0.08153, over 2632987.15 frames. ], batch size: 32, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:46:11,204 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.36 vs. limit=15.0 +2024-08-03 09:46:14,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=87607.66666666667, ans=0.125 +2024-08-03 09:46:17,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=87607.66666666667, ans=0.2 +2024-08-03 09:46:25,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=87644.33333333333, ans=0.125 +2024-08-03 09:46:39,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=87717.66666666667, ans=0.1 +2024-08-03 09:46:40,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=87717.66666666667, ans=0.125 +2024-08-03 09:46:44,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=87717.66666666667, ans=0.2 +2024-08-03 09:46:44,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.34 vs. limit=15.0 +2024-08-03 09:46:48,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=87754.33333333333, ans=0.0 +2024-08-03 09:46:52,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=87754.33333333333, ans=0.125 +2024-08-03 09:46:56,261 INFO [train.py:1114] (1/4) Epoch 7, batch 2100, loss[loss=0.2304, simple_loss=0.3109, pruned_loss=0.07497, over 13559.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3126, pruned_loss=0.08122, over 2638589.25 frames. ], batch size: 37, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:47:03,250 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.729e+01 1.195e+02 1.377e+02 1.752e+02 2.850e+02, threshold=2.753e+02, percent-clipped=0.0 +2024-08-03 09:47:11,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=87827.66666666667, ans=0.0 +2024-08-03 09:47:14,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=87827.66666666667, ans=0.125 +2024-08-03 09:47:17,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=87864.33333333333, ans=0.125 +2024-08-03 09:47:28,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=87901.0, ans=0.0 +2024-08-03 09:47:29,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=87901.0, ans=0.125 +2024-08-03 09:47:31,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=87901.0, ans=0.0 +2024-08-03 09:47:35,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=87937.66666666667, ans=0.0 +2024-08-03 09:47:42,914 INFO [train.py:1114] (1/4) Epoch 7, batch 2150, loss[loss=0.28, simple_loss=0.3388, pruned_loss=0.1106, over 13559.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3115, pruned_loss=0.08062, over 2647097.22 frames. ], batch size: 36, lr: 1.74e-02, grad_scale: 32.0 +2024-08-03 09:47:44,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=87974.33333333333, ans=0.04949747468305833 +2024-08-03 09:47:55,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=88011.0, ans=0.125 +2024-08-03 09:47:57,918 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.62 vs. limit=22.5 +2024-08-03 09:48:07,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=88047.66666666667, ans=0.125 +2024-08-03 09:48:10,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=88047.66666666667, ans=0.1 +2024-08-03 09:48:34,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=88084.33333333333, ans=0.0 +2024-08-03 09:49:41,826 INFO [train.py:1114] (1/4) Epoch 7, batch 2200, loss[loss=0.2364, simple_loss=0.3144, pruned_loss=0.07919, over 13417.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3113, pruned_loss=0.08048, over 2645115.83 frames. ], batch size: 39, lr: 1.74e-02, grad_scale: 32.0 +2024-08-03 09:49:43,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=88157.66666666667, ans=0.0 +2024-08-03 09:49:45,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=88157.66666666667, ans=0.0 +2024-08-03 09:49:45,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=88157.66666666667, ans=0.125 +2024-08-03 09:49:56,411 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.554e+01 1.287e+02 1.626e+02 2.364e+02 4.219e+02, threshold=3.252e+02, percent-clipped=14.0 +2024-08-03 09:50:07,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=88194.33333333333, ans=0.1 +2024-08-03 09:50:24,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=88231.0, ans=0.125 +2024-08-03 09:50:31,778 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.52 vs. limit=12.0 +2024-08-03 09:50:41,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=88267.66666666667, ans=0.125 +2024-08-03 09:51:40,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=88304.33333333333, ans=0.0 +2024-08-03 09:51:41,817 INFO [train.py:1114] (1/4) Epoch 7, batch 2250, loss[loss=0.2517, simple_loss=0.3295, pruned_loss=0.08697, over 13350.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3108, pruned_loss=0.07995, over 2642622.07 frames. ], batch size: 37, lr: 1.74e-02, grad_scale: 16.0 +2024-08-03 09:51:52,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=88341.0, ans=10.0 +2024-08-03 09:52:00,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.44 vs. limit=8.0 +2024-08-03 09:52:57,971 INFO [train.py:1114] (1/4) Epoch 7, batch 2300, loss[loss=0.2115, simple_loss=0.2866, pruned_loss=0.0682, over 13557.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3097, pruned_loss=0.07972, over 2638545.12 frames. ], batch size: 33, lr: 1.74e-02, grad_scale: 16.0 +2024-08-03 09:52:58,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=88524.33333333333, ans=0.2 +2024-08-03 09:53:04,297 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.405e+01 1.244e+02 1.416e+02 1.864e+02 3.449e+02, threshold=2.832e+02, percent-clipped=2.0 +2024-08-03 09:53:08,498 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.39 vs. limit=15.0 +2024-08-03 09:53:09,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=88561.0, ans=0.2 +2024-08-03 09:53:44,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=88671.0, ans=0.0 +2024-08-03 09:53:52,476 INFO [train.py:1114] (1/4) Epoch 7, batch 2350, loss[loss=0.2234, simple_loss=0.3033, pruned_loss=0.07175, over 13552.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3098, pruned_loss=0.07962, over 2641812.51 frames. ], batch size: 38, lr: 1.74e-02, grad_scale: 16.0 +2024-08-03 09:55:27,524 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.26 vs. limit=15.0 +2024-08-03 09:55:33,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=88817.66666666667, ans=0.2 +2024-08-03 09:55:38,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=88817.66666666667, ans=0.1 +2024-08-03 09:55:47,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=88854.33333333333, ans=0.2 +2024-08-03 09:55:52,790 INFO [train.py:1114] (1/4) Epoch 7, batch 2400, loss[loss=0.2044, simple_loss=0.2922, pruned_loss=0.05828, over 13534.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3112, pruned_loss=0.08033, over 2642744.06 frames. ], batch size: 35, lr: 1.74e-02, grad_scale: 16.0 +2024-08-03 09:56:07,277 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.040e+02 1.279e+02 1.511e+02 1.745e+02 2.971e+02, threshold=3.023e+02, percent-clipped=1.0 +2024-08-03 09:56:14,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=88927.66666666667, ans=0.0 +2024-08-03 09:56:33,646 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.63 vs. limit=15.0 +2024-08-03 09:56:36,634 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.39 vs. limit=15.0 +2024-08-03 09:56:53,285 INFO [train.py:1114] (1/4) Epoch 7, batch 2450, loss[loss=0.2561, simple_loss=0.3382, pruned_loss=0.08699, over 13358.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.313, pruned_loss=0.08135, over 2632391.12 frames. ], batch size: 37, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 09:56:57,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=89074.33333333333, ans=0.2 +2024-08-03 09:56:58,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=89074.33333333333, ans=0.125 +2024-08-03 09:56:59,946 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.92 vs. limit=15.0 +2024-08-03 09:57:11,465 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.34 vs. limit=10.0 +2024-08-03 09:57:23,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=89147.66666666667, ans=0.125 +2024-08-03 09:57:44,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=89221.0, ans=0.0 +2024-08-03 09:57:52,088 INFO [train.py:1114] (1/4) Epoch 7, batch 2500, loss[loss=0.2649, simple_loss=0.3353, pruned_loss=0.09728, over 13400.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3122, pruned_loss=0.08054, over 2636520.28 frames. ], batch size: 39, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 09:57:59,334 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.252e+02 1.492e+02 2.074e+02 3.860e+02, threshold=2.984e+02, percent-clipped=5.0 +2024-08-03 09:58:07,274 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.86 vs. limit=10.0 +2024-08-03 09:58:08,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=89294.33333333333, ans=0.1 +2024-08-03 09:58:09,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=89331.0, ans=0.125 +2024-08-03 09:58:12,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=89331.0, ans=0.025 +2024-08-03 09:58:16,496 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.78 vs. limit=15.0 +2024-08-03 09:58:16,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=89331.0, ans=0.125 +2024-08-03 09:58:30,255 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.94 vs. limit=15.0 +2024-08-03 09:58:35,997 INFO [train.py:1114] (1/4) Epoch 7, batch 2550, loss[loss=0.2099, simple_loss=0.2851, pruned_loss=0.0674, over 13520.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3122, pruned_loss=0.0806, over 2637372.64 frames. ], batch size: 31, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 09:58:46,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=89477.66666666667, ans=0.125 +2024-08-03 09:58:53,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=89514.33333333333, ans=0.0 +2024-08-03 09:59:00,679 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.53 vs. limit=15.0 +2024-08-03 09:59:06,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=89551.0, ans=0.125 +2024-08-03 09:59:18,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=89624.33333333333, ans=0.125 +2024-08-03 09:59:19,627 INFO [train.py:1114] (1/4) Epoch 7, batch 2600, loss[loss=0.2357, simple_loss=0.31, pruned_loss=0.08073, over 13568.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3129, pruned_loss=0.081, over 2636541.73 frames. ], batch size: 36, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 09:59:20,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.59 vs. limit=15.0 +2024-08-03 09:59:23,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=89624.33333333333, ans=0.2 +2024-08-03 09:59:26,383 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.355e+01 1.229e+02 1.441e+02 1.780e+02 3.809e+02, threshold=2.882e+02, percent-clipped=4.0 +2024-08-03 09:59:28,561 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.94 vs. limit=12.0 +2024-08-03 09:59:35,374 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.19 vs. limit=15.0 +2024-08-03 09:59:36,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=89697.66666666667, ans=0.025 +2024-08-03 09:59:36,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=89697.66666666667, ans=0.0 +2024-08-03 09:59:43,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=89697.66666666667, ans=0.0 +2024-08-03 09:59:45,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=89734.33333333333, ans=0.0 +2024-08-03 10:00:02,671 INFO [train.py:1114] (1/4) Epoch 7, batch 2650, loss[loss=0.2528, simple_loss=0.337, pruned_loss=0.08432, over 13314.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3129, pruned_loss=0.08066, over 2639376.90 frames. ], batch size: 46, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 10:00:07,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=89807.66666666667, ans=0.0 +2024-08-03 10:00:10,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=89844.33333333333, ans=0.125 +2024-08-03 10:01:01,910 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=9.03 vs. limit=15.0 +2024-08-03 10:01:02,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=89954.33333333333, ans=0.125 +2024-08-03 10:01:08,366 INFO [train.py:1114] (1/4) Epoch 7, batch 2700, loss[loss=0.2833, simple_loss=0.3479, pruned_loss=0.1093, over 13563.00 frames. ], tot_loss[loss=0.2383, simple_loss=0.3138, pruned_loss=0.08139, over 2636687.73 frames. ], batch size: 40, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 10:01:08,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=89991.0, ans=0.0 +2024-08-03 10:01:11,538 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.17 vs. limit=15.0 +2024-08-03 10:01:13,029 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.73 vs. limit=15.0 +2024-08-03 10:01:16,147 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.262e+02 1.504e+02 2.229e+02 3.961e+02, threshold=3.008e+02, percent-clipped=4.0 +2024-08-03 10:01:26,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=90064.33333333333, ans=0.125 +2024-08-03 10:01:35,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=90101.0, ans=0.125 +2024-08-03 10:01:43,168 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.09 vs. limit=15.0 +2024-08-03 10:01:53,310 INFO [train.py:1114] (1/4) Epoch 7, batch 2750, loss[loss=0.2081, simple_loss=0.2836, pruned_loss=0.06627, over 13340.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3122, pruned_loss=0.08103, over 2635706.14 frames. ], batch size: 34, lr: 1.73e-02, grad_scale: 8.0 +2024-08-03 10:01:55,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90174.33333333333, ans=0.1 +2024-08-03 10:02:03,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=90211.0, ans=0.025 +2024-08-03 10:02:13,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90247.66666666667, ans=0.1 +2024-08-03 10:02:20,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90284.33333333333, ans=0.1 +2024-08-03 10:02:29,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90321.0, ans=0.1 +2024-08-03 10:02:32,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=90321.0, ans=0.0 +2024-08-03 10:02:37,941 INFO [train.py:1114] (1/4) Epoch 7, batch 2800, loss[loss=0.3278, simple_loss=0.3749, pruned_loss=0.1403, over 9618.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3123, pruned_loss=0.08099, over 2627004.59 frames. ], batch size: 96, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:02:46,063 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.878e+01 1.254e+02 1.435e+02 1.719e+02 3.010e+02, threshold=2.870e+02, percent-clipped=1.0 +2024-08-03 10:02:47,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=90394.33333333333, ans=0.0 +2024-08-03 10:03:25,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=90504.33333333333, ans=0.2 +2024-08-03 10:03:27,736 INFO [train.py:1114] (1/4) Epoch 7, batch 2850, loss[loss=0.2037, simple_loss=0.2796, pruned_loss=0.06384, over 13374.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.3128, pruned_loss=0.08134, over 2620820.25 frames. ], batch size: 35, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:03:35,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=90541.0, ans=0.125 +2024-08-03 10:03:50,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=90614.33333333333, ans=0.2 +2024-08-03 10:03:52,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=90614.33333333333, ans=0.125 +2024-08-03 10:03:59,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=90651.0, ans=0.125 +2024-08-03 10:04:01,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=90651.0, ans=0.0 +2024-08-03 10:04:08,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=90687.66666666667, ans=0.2 +2024-08-03 10:04:12,845 INFO [train.py:1114] (1/4) Epoch 7, batch 2900, loss[loss=0.2315, simple_loss=0.2977, pruned_loss=0.08264, over 13357.00 frames. ], tot_loss[loss=0.2388, simple_loss=0.314, pruned_loss=0.08182, over 2632057.82 frames. ], batch size: 36, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:04:14,166 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.24 vs. limit=15.0 +2024-08-03 10:04:24,256 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.250e+02 1.523e+02 1.874e+02 3.482e+02, threshold=3.046e+02, percent-clipped=1.0 +2024-08-03 10:04:28,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.17 vs. limit=22.5 +2024-08-03 10:04:32,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=90761.0, ans=0.0 +2024-08-03 10:04:47,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=90834.33333333333, ans=0.125 +2024-08-03 10:04:54,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=90871.0, ans=0.0 +2024-08-03 10:05:02,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=90907.66666666667, ans=0.0 +2024-08-03 10:05:03,276 INFO [train.py:1114] (1/4) Epoch 7, batch 2950, loss[loss=0.241, simple_loss=0.3154, pruned_loss=0.08333, over 13334.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.3131, pruned_loss=0.08167, over 2630743.65 frames. ], batch size: 34, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:05:12,705 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.05 vs. limit=22.5 +2024-08-03 10:05:16,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.18 vs. limit=6.0 +2024-08-03 10:05:29,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=91017.66666666667, ans=0.125 +2024-08-03 10:05:30,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=91017.66666666667, ans=0.125 +2024-08-03 10:05:31,218 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.88 vs. limit=15.0 +2024-08-03 10:05:37,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=91054.33333333333, ans=0.0 +2024-08-03 10:05:43,290 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.66 vs. limit=6.0 +2024-08-03 10:05:47,198 INFO [train.py:1114] (1/4) Epoch 7, batch 3000, loss[loss=0.2399, simple_loss=0.3172, pruned_loss=0.08131, over 13543.00 frames. ], tot_loss[loss=0.238, simple_loss=0.3131, pruned_loss=0.08143, over 2631034.55 frames. ], batch size: 37, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:05:47,198 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 10:06:06,638 INFO [train.py:1146] (1/4) Epoch 7, validation: loss=0.1942, simple_loss=0.2938, pruned_loss=0.04733, over 944034.00 frames. +2024-08-03 10:06:06,639 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 10:06:11,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=91091.0, ans=0.125 +2024-08-03 10:06:14,471 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.238e+02 1.419e+02 1.719e+02 4.359e+02, threshold=2.839e+02, percent-clipped=6.0 +2024-08-03 10:06:17,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=91127.66666666667, ans=0.125 +2024-08-03 10:06:44,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=91237.66666666667, ans=0.125 +2024-08-03 10:06:46,592 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.95 vs. limit=15.0 +2024-08-03 10:06:50,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=91274.33333333333, ans=0.0 +2024-08-03 10:06:51,245 INFO [train.py:1114] (1/4) Epoch 7, batch 3050, loss[loss=0.2274, simple_loss=0.2999, pruned_loss=0.07742, over 13522.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.3136, pruned_loss=0.08171, over 2628000.66 frames. ], batch size: 35, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:07:00,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=91311.0, ans=0.035 +2024-08-03 10:07:01,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91311.0, ans=0.1 +2024-08-03 10:07:07,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=91347.66666666667, ans=0.125 +2024-08-03 10:07:14,188 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.29 vs. limit=12.0 +2024-08-03 10:07:34,428 INFO [train.py:1114] (1/4) Epoch 7, batch 3100, loss[loss=0.2508, simple_loss=0.3271, pruned_loss=0.08724, over 13305.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.313, pruned_loss=0.08078, over 2627134.28 frames. ], batch size: 46, lr: 1.71e-02, grad_scale: 16.0 +2024-08-03 10:07:42,185 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.930e+01 1.259e+02 1.446e+02 1.808e+02 2.827e+02, threshold=2.891e+02, percent-clipped=0.0 +2024-08-03 10:07:47,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=91494.33333333333, ans=0.125 +2024-08-03 10:07:53,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=91531.0, ans=0.5 +2024-08-03 10:07:59,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=91567.66666666667, ans=0.125 +2024-08-03 10:08:17,430 INFO [train.py:1114] (1/4) Epoch 7, batch 3150, loss[loss=0.2138, simple_loss=0.2939, pruned_loss=0.06689, over 13034.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3119, pruned_loss=0.07975, over 2628631.89 frames. ], batch size: 48, lr: 1.71e-02, grad_scale: 16.0 +2024-08-03 10:08:20,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=91641.0, ans=0.0 +2024-08-03 10:08:28,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=91677.66666666667, ans=0.125 +2024-08-03 10:08:30,440 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.83 vs. limit=15.0 +2024-08-03 10:08:47,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=91751.0, ans=0.125 +2024-08-03 10:08:49,548 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.21 vs. limit=6.0 +2024-08-03 10:08:55,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=91787.66666666667, ans=0.07 +2024-08-03 10:09:01,220 INFO [train.py:1114] (1/4) Epoch 7, batch 3200, loss[loss=0.2311, simple_loss=0.3081, pruned_loss=0.07706, over 13537.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3117, pruned_loss=0.07995, over 2633631.67 frames. ], batch size: 37, lr: 1.71e-02, grad_scale: 32.0 +2024-08-03 10:09:02,502 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.48 vs. limit=15.0 +2024-08-03 10:09:03,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=91824.33333333333, ans=0.125 +2024-08-03 10:09:08,916 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.005e+02 1.267e+02 1.711e+02 2.068e+02 3.292e+02, threshold=3.421e+02, percent-clipped=4.0 +2024-08-03 10:09:19,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=91897.66666666667, ans=0.125 +2024-08-03 10:09:20,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=91897.66666666667, ans=0.035 +2024-08-03 10:09:37,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=91934.33333333333, ans=0.05 +2024-08-03 10:09:38,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=91971.0, ans=0.2 +2024-08-03 10:09:42,948 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.27 vs. limit=15.0 +2024-08-03 10:09:46,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=91971.0, ans=0.0 +2024-08-03 10:09:48,353 INFO [train.py:1114] (1/4) Epoch 7, batch 3250, loss[loss=0.259, simple_loss=0.3341, pruned_loss=0.09195, over 13383.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3124, pruned_loss=0.08019, over 2638394.23 frames. ], batch size: 38, lr: 1.71e-02, grad_scale: 32.0 +2024-08-03 10:09:58,062 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.37 vs. limit=15.0 +2024-08-03 10:10:33,758 INFO [train.py:1114] (1/4) Epoch 7, batch 3300, loss[loss=0.2333, simple_loss=0.3191, pruned_loss=0.07374, over 12929.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3102, pruned_loss=0.07939, over 2639771.17 frames. ], batch size: 52, lr: 1.71e-02, grad_scale: 16.0 +2024-08-03 10:10:34,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=92191.0, ans=0.2 +2024-08-03 10:10:34,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=92191.0, ans=0.0 +2024-08-03 10:10:35,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=92191.0, ans=0.0 +2024-08-03 10:10:42,158 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.670e+01 1.272e+02 1.617e+02 1.965e+02 3.247e+02, threshold=3.234e+02, percent-clipped=0.0 +2024-08-03 10:10:59,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=92301.0, ans=0.125 +2024-08-03 10:11:15,784 INFO [train.py:1114] (1/4) Epoch 7, batch 3350, loss[loss=0.2352, simple_loss=0.3128, pruned_loss=0.07875, over 13089.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.311, pruned_loss=0.07977, over 2630438.98 frames. ], batch size: 48, lr: 1.71e-02, grad_scale: 16.0 +2024-08-03 10:11:16,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=92374.33333333333, ans=0.125 +2024-08-03 10:11:27,039 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.15 vs. limit=15.0 +2024-08-03 10:11:49,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=92484.33333333333, ans=0.2 +2024-08-03 10:11:59,778 INFO [train.py:1114] (1/4) Epoch 7, batch 3400, loss[loss=0.2362, simple_loss=0.3001, pruned_loss=0.08613, over 13562.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3113, pruned_loss=0.08026, over 2625643.63 frames. ], batch size: 31, lr: 1.70e-02, grad_scale: 16.0 +2024-08-03 10:12:04,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=92557.66666666667, ans=0.125 +2024-08-03 10:12:07,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=92557.66666666667, ans=0.2 +2024-08-03 10:12:08,993 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.271e+02 1.505e+02 1.907e+02 3.089e+02, threshold=3.010e+02, percent-clipped=0.0 +2024-08-03 10:12:14,492 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.72 vs. limit=6.0 +2024-08-03 10:12:15,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.33 vs. limit=6.0 +2024-08-03 10:12:16,346 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.82 vs. limit=15.0 +2024-08-03 10:12:25,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=92667.66666666667, ans=0.95 +2024-08-03 10:12:26,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=92667.66666666667, ans=0.0 +2024-08-03 10:12:31,480 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.14 vs. limit=22.5 +2024-08-03 10:12:38,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=92704.33333333333, ans=0.125 +2024-08-03 10:12:42,890 INFO [train.py:1114] (1/4) Epoch 7, batch 3450, loss[loss=0.2217, simple_loss=0.3065, pruned_loss=0.06844, over 12878.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3122, pruned_loss=0.08057, over 2628853.67 frames. ], batch size: 52, lr: 1.70e-02, grad_scale: 16.0 +2024-08-03 10:12:52,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=92777.66666666667, ans=0.0 +2024-08-03 10:12:53,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=92777.66666666667, ans=0.125 +2024-08-03 10:13:02,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=92814.33333333333, ans=0.125 +2024-08-03 10:13:12,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=92851.0, ans=0.125 +2024-08-03 10:13:24,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92924.33333333333, ans=0.1 +2024-08-03 10:13:25,195 INFO [train.py:1114] (1/4) Epoch 7, batch 3500, loss[loss=0.2453, simple_loss=0.3146, pruned_loss=0.08797, over 13551.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3112, pruned_loss=0.08039, over 2630732.57 frames. ], batch size: 34, lr: 1.70e-02, grad_scale: 16.0 +2024-08-03 10:13:33,822 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.754e+01 1.223e+02 1.539e+02 1.881e+02 2.645e+02, threshold=3.078e+02, percent-clipped=0.0 +2024-08-03 10:13:34,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=92961.0, ans=0.125 +2024-08-03 10:13:40,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=92961.0, ans=0.05 +2024-08-03 10:13:49,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=93034.33333333333, ans=0.125 +2024-08-03 10:14:03,222 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.54 vs. limit=15.0 +2024-08-03 10:14:09,051 INFO [train.py:1114] (1/4) Epoch 7, batch 3550, loss[loss=0.2627, simple_loss=0.337, pruned_loss=0.09417, over 12332.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.314, pruned_loss=0.08209, over 2629501.15 frames. ], batch size: 58, lr: 1.70e-02, grad_scale: 16.0 +2024-08-03 10:14:12,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=93107.66666666667, ans=0.125 +2024-08-03 10:14:19,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=93144.33333333333, ans=0.125 +2024-08-03 10:14:39,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=93217.66666666667, ans=0.125 +2024-08-03 10:14:53,541 INFO [train.py:1114] (1/4) Epoch 7, batch 3600, loss[loss=0.3393, simple_loss=0.3693, pruned_loss=0.1546, over 9026.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3199, pruned_loss=0.08828, over 2485664.10 frames. ], batch size: 96, lr: 1.70e-02, grad_scale: 32.0 +2024-08-03 10:14:56,691 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.20 vs. limit=10.0 +2024-08-03 10:14:59,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93291.0, ans=0.1 +2024-08-03 10:15:02,198 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.315e+02 1.480e+02 1.683e+02 2.632e+02, threshold=2.960e+02, percent-clipped=0.0 +2024-08-03 10:15:03,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=93327.66666666667, ans=0.125 +2024-08-03 10:16:13,466 INFO [train.py:1114] (1/4) Epoch 8, batch 0, loss[loss=0.1977, simple_loss=0.2788, pruned_loss=0.0583, over 13345.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2788, pruned_loss=0.0583, over 13345.00 frames. ], batch size: 33, lr: 1.60e-02, grad_scale: 32.0 +2024-08-03 10:16:13,467 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 10:16:24,030 INFO [train.py:1146] (1/4) Epoch 8, validation: loss=0.1977, simple_loss=0.2989, pruned_loss=0.04829, over 944034.00 frames. +2024-08-03 10:16:24,031 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 10:16:24,456 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=15.0 +2024-08-03 10:16:26,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=93437.66666666667, ans=0.125 +2024-08-03 10:16:30,987 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.69 vs. limit=10.0 +2024-08-03 10:16:38,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=93474.33333333333, ans=0.0 +2024-08-03 10:16:43,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=93511.0, ans=0.0 +2024-08-03 10:17:00,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=93584.33333333333, ans=0.2 +2024-08-03 10:17:01,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=93584.33333333333, ans=0.025 +2024-08-03 10:17:09,632 INFO [train.py:1114] (1/4) Epoch 8, batch 50, loss[loss=0.1959, simple_loss=0.2677, pruned_loss=0.0621, over 13426.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3108, pruned_loss=0.07739, over 579195.01 frames. ], batch size: 32, lr: 1.60e-02, grad_scale: 16.0 +2024-08-03 10:17:10,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=93621.0, ans=0.125 +2024-08-03 10:17:22,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=93657.66666666667, ans=0.0 +2024-08-03 10:17:30,613 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.247e+02 1.447e+02 2.039e+02 3.809e+02, threshold=2.894e+02, percent-clipped=5.0 +2024-08-03 10:17:56,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=93804.33333333333, ans=0.0 +2024-08-03 10:17:57,632 INFO [train.py:1114] (1/4) Epoch 8, batch 100, loss[loss=0.2318, simple_loss=0.3037, pruned_loss=0.0799, over 13519.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.3133, pruned_loss=0.08019, over 1025932.67 frames. ], batch size: 35, lr: 1.60e-02, grad_scale: 16.0 +2024-08-03 10:18:06,213 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.11 vs. limit=15.0 +2024-08-03 10:18:10,676 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.97 vs. limit=15.0 +2024-08-03 10:18:16,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=93877.66666666667, ans=0.125 +2024-08-03 10:18:30,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=93914.33333333333, ans=0.125 +2024-08-03 10:18:38,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=93951.0, ans=0.025 +2024-08-03 10:18:40,123 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.67 vs. limit=15.0 +2024-08-03 10:18:43,049 INFO [train.py:1114] (1/4) Epoch 8, batch 150, loss[loss=0.2001, simple_loss=0.2741, pruned_loss=0.06304, over 13438.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.311, pruned_loss=0.07889, over 1387606.97 frames. ], batch size: 32, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:18:44,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=93987.66666666667, ans=0.2 +2024-08-03 10:19:02,666 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.928e+01 1.201e+02 1.310e+02 1.526e+02 2.654e+02, threshold=2.621e+02, percent-clipped=0.0 +2024-08-03 10:19:08,699 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.85 vs. limit=15.0 +2024-08-03 10:19:17,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=94097.66666666667, ans=0.125 +2024-08-03 10:19:20,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=94134.33333333333, ans=0.1 +2024-08-03 10:19:28,076 INFO [train.py:1114] (1/4) Epoch 8, batch 200, loss[loss=0.2727, simple_loss=0.3513, pruned_loss=0.0971, over 12585.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.309, pruned_loss=0.07774, over 1666033.86 frames. ], batch size: 58, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:19:38,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=94207.66666666667, ans=0.025 +2024-08-03 10:19:42,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=94207.66666666667, ans=0.1 +2024-08-03 10:19:49,926 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.52 vs. limit=6.0 +2024-08-03 10:19:51,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=94244.33333333333, ans=0.1 +2024-08-03 10:19:54,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=94244.33333333333, ans=0.2 +2024-08-03 10:19:57,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=94244.33333333333, ans=10.0 +2024-08-03 10:20:01,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=94281.0, ans=0.0 +2024-08-03 10:20:23,202 INFO [train.py:1114] (1/4) Epoch 8, batch 250, loss[loss=0.2197, simple_loss=0.3118, pruned_loss=0.0638, over 13308.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3093, pruned_loss=0.07808, over 1885315.22 frames. ], batch size: 46, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:20:29,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=94354.33333333333, ans=0.0 +2024-08-03 10:20:40,718 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.29 vs. limit=10.0 +2024-08-03 10:20:41,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=94391.0, ans=0.0 +2024-08-03 10:20:47,225 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.316e+01 1.275e+02 1.578e+02 1.902e+02 3.207e+02, threshold=3.155e+02, percent-clipped=3.0 +2024-08-03 10:21:07,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94501.0, ans=0.1 +2024-08-03 10:21:17,483 INFO [train.py:1114] (1/4) Epoch 8, batch 300, loss[loss=0.2506, simple_loss=0.3181, pruned_loss=0.09159, over 13441.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.309, pruned_loss=0.07792, over 2052211.63 frames. ], batch size: 42, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:21:28,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=94574.33333333333, ans=0.125 +2024-08-03 10:21:38,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=94574.33333333333, ans=0.2 +2024-08-03 10:21:45,841 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:21:46,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94611.0, ans=0.1 +2024-08-03 10:21:52,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=94647.66666666667, ans=0.125 +2024-08-03 10:22:05,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=94684.33333333333, ans=0.0 +2024-08-03 10:22:12,140 INFO [train.py:1114] (1/4) Epoch 8, batch 350, loss[loss=0.1869, simple_loss=0.2622, pruned_loss=0.05577, over 13592.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.309, pruned_loss=0.07798, over 2182731.00 frames. ], batch size: 33, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:22:30,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=94794.33333333333, ans=0.125 +2024-08-03 10:22:32,140 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.441e+01 1.242e+02 1.508e+02 2.025e+02 3.534e+02, threshold=3.015e+02, percent-clipped=1.0 +2024-08-03 10:22:32,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=94794.33333333333, ans=0.125 +2024-08-03 10:22:57,070 INFO [train.py:1114] (1/4) Epoch 8, batch 400, loss[loss=0.2609, simple_loss=0.3357, pruned_loss=0.09303, over 13349.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3088, pruned_loss=0.07778, over 2287050.04 frames. ], batch size: 37, lr: 1.59e-02, grad_scale: 32.0 +2024-08-03 10:23:00,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=94904.33333333333, ans=0.125 +2024-08-03 10:23:11,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=94941.0, ans=0.125 +2024-08-03 10:23:16,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=94977.66666666667, ans=0.125 +2024-08-03 10:23:26,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.64 vs. limit=15.0 +2024-08-03 10:23:39,333 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.01 vs. limit=15.0 +2024-08-03 10:23:44,500 INFO [train.py:1114] (1/4) Epoch 8, batch 450, loss[loss=0.2664, simple_loss=0.3398, pruned_loss=0.09652, over 13549.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3089, pruned_loss=0.07787, over 2360451.27 frames. ], batch size: 38, lr: 1.59e-02, grad_scale: 32.0 +2024-08-03 10:23:45,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.50 vs. limit=15.0 +2024-08-03 10:23:49,486 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.98 vs. limit=15.0 +2024-08-03 10:24:05,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=95161.0, ans=0.2 +2024-08-03 10:24:08,007 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.730e+01 1.246e+02 1.455e+02 1.839e+02 3.207e+02, threshold=2.909e+02, percent-clipped=1.0 +2024-08-03 10:24:17,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=95197.66666666667, ans=0.125 +2024-08-03 10:24:20,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=95197.66666666667, ans=0.2 +2024-08-03 10:24:26,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=95234.33333333333, ans=0.125 +2024-08-03 10:24:28,459 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.70 vs. limit=15.0 +2024-08-03 10:24:33,351 INFO [train.py:1114] (1/4) Epoch 8, batch 500, loss[loss=0.2554, simple_loss=0.3288, pruned_loss=0.09103, over 13420.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.3079, pruned_loss=0.0774, over 2426187.44 frames. ], batch size: 43, lr: 1.58e-02, grad_scale: 32.0 +2024-08-03 10:24:34,810 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.85 vs. limit=22.5 +2024-08-03 10:24:36,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=95271.0, ans=10.0 +2024-08-03 10:24:41,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=95307.66666666667, ans=0.125 +2024-08-03 10:24:54,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=95344.33333333333, ans=0.1 +2024-08-03 10:24:59,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=95344.33333333333, ans=0.2 +2024-08-03 10:25:16,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=95417.66666666667, ans=0.125 +2024-08-03 10:25:21,150 INFO [train.py:1114] (1/4) Epoch 8, batch 550, loss[loss=0.2713, simple_loss=0.3423, pruned_loss=0.1002, over 13101.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.308, pruned_loss=0.07734, over 2467989.30 frames. ], batch size: 48, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:25:21,735 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.60 vs. limit=6.0 +2024-08-03 10:25:26,219 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.21 vs. limit=15.0 +2024-08-03 10:25:28,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=95454.33333333333, ans=0.125 +2024-08-03 10:25:31,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=95491.0, ans=0.1 +2024-08-03 10:25:34,300 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.55 vs. limit=15.0 +2024-08-03 10:25:41,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=95527.66666666667, ans=0.125 +2024-08-03 10:25:42,949 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.590e+01 1.201e+02 1.471e+02 1.924e+02 3.912e+02, threshold=2.942e+02, percent-clipped=7.0 +2024-08-03 10:25:52,512 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.32 vs. limit=15.0 +2024-08-03 10:25:58,040 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.14 vs. limit=15.0 +2024-08-03 10:25:59,677 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.53 vs. limit=15.0 +2024-08-03 10:26:04,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=95601.0, ans=0.125 +2024-08-03 10:26:06,484 INFO [train.py:1114] (1/4) Epoch 8, batch 600, loss[loss=0.2458, simple_loss=0.3284, pruned_loss=0.08163, over 13346.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3076, pruned_loss=0.07714, over 2507488.45 frames. ], batch size: 46, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:26:52,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=95784.33333333333, ans=0.1 +2024-08-03 10:26:53,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=95784.33333333333, ans=0.125 +2024-08-03 10:27:01,400 INFO [train.py:1114] (1/4) Epoch 8, batch 650, loss[loss=0.2247, simple_loss=0.3005, pruned_loss=0.0744, over 13547.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3067, pruned_loss=0.07644, over 2542469.55 frames. ], batch size: 37, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:27:10,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=95857.66666666667, ans=0.125 +2024-08-03 10:27:20,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=95894.33333333333, ans=0.125 +2024-08-03 10:27:22,944 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.916e+01 1.338e+02 1.718e+02 2.265e+02 3.658e+02, threshold=3.436e+02, percent-clipped=6.0 +2024-08-03 10:27:24,249 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.26 vs. limit=12.0 +2024-08-03 10:27:40,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=95931.0, ans=0.125 +2024-08-03 10:27:45,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=95967.66666666667, ans=0.125 +2024-08-03 10:27:52,485 INFO [train.py:1114] (1/4) Epoch 8, batch 700, loss[loss=0.2171, simple_loss=0.2964, pruned_loss=0.06888, over 13548.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3068, pruned_loss=0.07644, over 2565404.74 frames. ], batch size: 35, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:28:06,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=96041.0, ans=0.0 +2024-08-03 10:28:15,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=96077.66666666667, ans=0.025 +2024-08-03 10:28:20,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=96114.33333333333, ans=0.125 +2024-08-03 10:28:30,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=96151.0, ans=0.0 +2024-08-03 10:28:38,000 INFO [train.py:1114] (1/4) Epoch 8, batch 750, loss[loss=0.2181, simple_loss=0.3035, pruned_loss=0.0663, over 13353.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3061, pruned_loss=0.07585, over 2583113.74 frames. ], batch size: 37, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:28:38,383 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.94 vs. limit=15.0 +2024-08-03 10:28:40,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=22.02 vs. limit=22.5 +2024-08-03 10:28:47,494 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.99 vs. limit=15.0 +2024-08-03 10:28:54,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=96224.33333333333, ans=0.125 +2024-08-03 10:28:56,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=96224.33333333333, ans=0.1 +2024-08-03 10:29:15,355 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.402e+01 1.294e+02 1.560e+02 2.121e+02 3.650e+02, threshold=3.121e+02, percent-clipped=1.0 +2024-08-03 10:29:44,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=96297.66666666667, ans=0.09899494936611666 +2024-08-03 10:29:47,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=96297.66666666667, ans=0.0 +2024-08-03 10:29:54,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=96334.33333333333, ans=0.0 +2024-08-03 10:30:00,639 INFO [train.py:1114] (1/4) Epoch 8, batch 800, loss[loss=0.2229, simple_loss=0.2931, pruned_loss=0.07639, over 13340.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3064, pruned_loss=0.07587, over 2597852.82 frames. ], batch size: 33, lr: 1.58e-02, grad_scale: 16.0 +2024-08-03 10:30:01,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=96371.0, ans=0.125 +2024-08-03 10:30:04,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=96371.0, ans=0.125 +2024-08-03 10:30:05,698 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=13.90 vs. limit=15.0 +2024-08-03 10:30:08,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=96407.66666666667, ans=0.2 +2024-08-03 10:30:09,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=96407.66666666667, ans=0.0 +2024-08-03 10:30:10,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=96407.66666666667, ans=0.1 +2024-08-03 10:30:42,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=96444.33333333333, ans=0.125 +2024-08-03 10:30:49,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=96444.33333333333, ans=0.0 +2024-08-03 10:31:01,933 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.54 vs. limit=15.0 +2024-08-03 10:31:10,618 INFO [train.py:1114] (1/4) Epoch 8, batch 850, loss[loss=0.2235, simple_loss=0.3044, pruned_loss=0.07125, over 13301.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3052, pruned_loss=0.07519, over 2609910.08 frames. ], batch size: 40, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:31:16,311 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.34 vs. limit=22.5 +2024-08-03 10:31:19,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=96591.0, ans=0.0 +2024-08-03 10:31:32,146 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.603e+01 1.283e+02 1.438e+02 1.736e+02 2.880e+02, threshold=2.876e+02, percent-clipped=0.0 +2024-08-03 10:31:40,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=96664.33333333333, ans=0.125 +2024-08-03 10:31:49,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=96701.0, ans=0.125 +2024-08-03 10:31:50,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=96701.0, ans=0.0 +2024-08-03 10:31:52,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=96701.0, ans=0.0 +2024-08-03 10:31:53,835 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.87 vs. limit=22.5 +2024-08-03 10:31:55,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=96737.66666666667, ans=0.125 +2024-08-03 10:31:56,060 INFO [train.py:1114] (1/4) Epoch 8, batch 900, loss[loss=0.2189, simple_loss=0.2904, pruned_loss=0.07375, over 13333.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3061, pruned_loss=0.07581, over 2612663.85 frames. ], batch size: 33, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:31:56,630 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.03 vs. limit=15.0 +2024-08-03 10:31:58,258 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.64 vs. limit=22.5 +2024-08-03 10:32:07,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96774.33333333333, ans=0.1 +2024-08-03 10:32:27,513 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.41 vs. limit=15.0 +2024-08-03 10:32:41,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96884.33333333333, ans=0.1 +2024-08-03 10:32:44,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=96884.33333333333, ans=0.0 +2024-08-03 10:32:47,020 INFO [train.py:1114] (1/4) Epoch 8, batch 950, loss[loss=0.2086, simple_loss=0.2968, pruned_loss=0.06021, over 13546.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3064, pruned_loss=0.07613, over 2613118.84 frames. ], batch size: 34, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:32:49,335 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.89 vs. limit=10.0 +2024-08-03 10:33:00,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=96957.66666666667, ans=0.0 +2024-08-03 10:33:08,790 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.880e+01 1.218e+02 1.421e+02 1.776e+02 3.206e+02, threshold=2.842e+02, percent-clipped=1.0 +2024-08-03 10:33:29,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=97067.66666666667, ans=0.0 +2024-08-03 10:33:32,182 INFO [train.py:1114] (1/4) Epoch 8, batch 1000, loss[loss=0.2102, simple_loss=0.285, pruned_loss=0.06768, over 13359.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3072, pruned_loss=0.07651, over 2611088.81 frames. ], batch size: 35, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:33:46,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=97141.0, ans=0.1 +2024-08-03 10:33:47,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=97141.0, ans=0.125 +2024-08-03 10:33:47,582 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.61 vs. limit=6.0 +2024-08-03 10:33:54,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=97177.66666666667, ans=0.025 +2024-08-03 10:34:07,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=97214.33333333333, ans=0.125 +2024-08-03 10:34:19,173 INFO [train.py:1114] (1/4) Epoch 8, batch 1050, loss[loss=0.2382, simple_loss=0.3189, pruned_loss=0.07877, over 13586.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3064, pruned_loss=0.07631, over 2615706.19 frames. ], batch size: 39, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:34:20,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=97287.66666666667, ans=0.125 +2024-08-03 10:34:27,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=97324.33333333333, ans=0.125 +2024-08-03 10:34:28,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=97324.33333333333, ans=0.125 +2024-08-03 10:34:28,766 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.62 vs. limit=6.0 +2024-08-03 10:34:32,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=97324.33333333333, ans=0.125 +2024-08-03 10:34:36,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=97361.0, ans=0.125 +2024-08-03 10:34:37,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=97361.0, ans=0.0 +2024-08-03 10:34:40,965 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.371e+01 1.174e+02 1.326e+02 1.659e+02 3.865e+02, threshold=2.652e+02, percent-clipped=4.0 +2024-08-03 10:34:41,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=97361.0, ans=0.125 +2024-08-03 10:34:42,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=97361.0, ans=0.125 +2024-08-03 10:34:51,308 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.97 vs. limit=15.0 +2024-08-03 10:34:59,621 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.59 vs. limit=15.0 +2024-08-03 10:35:04,456 INFO [train.py:1114] (1/4) Epoch 8, batch 1100, loss[loss=0.2517, simple_loss=0.3154, pruned_loss=0.09401, over 13555.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3054, pruned_loss=0.07566, over 2619402.58 frames. ], batch size: 36, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:35:16,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=97507.66666666667, ans=0.09899494936611666 +2024-08-03 10:35:20,318 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.48 vs. limit=15.0 +2024-08-03 10:35:21,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=97544.33333333333, ans=0.125 +2024-08-03 10:35:31,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=97581.0, ans=15.0 +2024-08-03 10:35:50,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=97617.66666666667, ans=0.025 +2024-08-03 10:35:56,402 INFO [train.py:1114] (1/4) Epoch 8, batch 1150, loss[loss=0.2273, simple_loss=0.3127, pruned_loss=0.07091, over 13530.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3055, pruned_loss=0.07579, over 2619444.13 frames. ], batch size: 36, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:36:06,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=97691.0, ans=0.125 +2024-08-03 10:36:09,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=97691.0, ans=0.125 +2024-08-03 10:36:17,092 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:36:17,814 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.235e+02 1.421e+02 1.826e+02 2.699e+02, threshold=2.842e+02, percent-clipped=1.0 +2024-08-03 10:36:20,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97727.66666666667, ans=0.1 +2024-08-03 10:36:23,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.27 vs. limit=15.0 +2024-08-03 10:36:39,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=97801.0, ans=0.125 +2024-08-03 10:36:41,662 INFO [train.py:1114] (1/4) Epoch 8, batch 1200, loss[loss=0.2329, simple_loss=0.3102, pruned_loss=0.07782, over 13583.00 frames. ], tot_loss[loss=0.229, simple_loss=0.306, pruned_loss=0.07593, over 2616891.44 frames. ], batch size: 39, lr: 1.57e-02, grad_scale: 32.0 +2024-08-03 10:36:41,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=97837.66666666667, ans=0.125 +2024-08-03 10:36:48,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=97837.66666666667, ans=0.025 +2024-08-03 10:37:01,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=97911.0, ans=0.125 +2024-08-03 10:37:03,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=97911.0, ans=0.07 +2024-08-03 10:37:08,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=97947.66666666667, ans=0.025 +2024-08-03 10:37:20,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=97984.33333333333, ans=0.1 +2024-08-03 10:37:37,206 INFO [train.py:1114] (1/4) Epoch 8, batch 1250, loss[loss=0.2312, simple_loss=0.3132, pruned_loss=0.0746, over 13445.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3065, pruned_loss=0.07593, over 2628747.69 frames. ], batch size: 42, lr: 1.56e-02, grad_scale: 32.0 +2024-08-03 10:37:39,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=98021.0, ans=0.125 +2024-08-03 10:37:57,449 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.38 vs. limit=12.0 +2024-08-03 10:37:58,744 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.995e+01 1.185e+02 1.323e+02 1.561e+02 3.297e+02, threshold=2.645e+02, percent-clipped=2.0 +2024-08-03 10:38:15,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=98167.66666666667, ans=0.0 +2024-08-03 10:38:22,560 INFO [train.py:1114] (1/4) Epoch 8, batch 1300, loss[loss=0.2403, simple_loss=0.3226, pruned_loss=0.07902, over 12794.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3057, pruned_loss=0.07539, over 2631822.02 frames. ], batch size: 52, lr: 1.56e-02, grad_scale: 16.0 +2024-08-03 10:39:07,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=98277.66666666667, ans=0.1 +2024-08-03 10:39:30,882 INFO [train.py:1114] (1/4) Epoch 8, batch 1350, loss[loss=0.2511, simple_loss=0.329, pruned_loss=0.08662, over 13543.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3061, pruned_loss=0.07569, over 2639543.44 frames. ], batch size: 37, lr: 1.56e-02, grad_scale: 4.0 +2024-08-03 10:39:39,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=98387.66666666667, ans=0.1 +2024-08-03 10:39:43,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=98424.33333333333, ans=0.125 +2024-08-03 10:39:45,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=98424.33333333333, ans=0.09899494936611666 +2024-08-03 10:40:01,338 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.797e+01 1.224e+02 1.395e+02 1.638e+02 2.508e+02, threshold=2.789e+02, percent-clipped=0.0 +2024-08-03 10:40:15,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=98534.33333333333, ans=0.125 +2024-08-03 10:40:18,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=98534.33333333333, ans=0.125 +2024-08-03 10:40:19,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98534.33333333333, ans=0.1 +2024-08-03 10:40:22,544 INFO [train.py:1114] (1/4) Epoch 8, batch 1400, loss[loss=0.2072, simple_loss=0.2694, pruned_loss=0.07249, over 13270.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3056, pruned_loss=0.07532, over 2643261.48 frames. ], batch size: 31, lr: 1.56e-02, grad_scale: 8.0 +2024-08-03 10:40:26,517 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.86 vs. limit=15.0 +2024-08-03 10:40:49,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=98681.0, ans=0.125 +2024-08-03 10:40:50,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=98681.0, ans=0.2 +2024-08-03 10:40:55,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=98681.0, ans=0.0 +2024-08-03 10:40:55,659 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.12 vs. limit=15.0 +2024-08-03 10:41:07,774 INFO [train.py:1114] (1/4) Epoch 8, batch 1450, loss[loss=0.2334, simple_loss=0.316, pruned_loss=0.07545, over 13442.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3055, pruned_loss=0.07528, over 2641420.78 frames. ], batch size: 43, lr: 1.56e-02, grad_scale: 8.0 +2024-08-03 10:41:22,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=98791.0, ans=0.2 +2024-08-03 10:41:24,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=98791.0, ans=0.1 +2024-08-03 10:41:34,027 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.227e+02 1.462e+02 1.726e+02 3.399e+02, threshold=2.923e+02, percent-clipped=2.0 +2024-08-03 10:41:43,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=98864.33333333333, ans=10.0 +2024-08-03 10:41:54,820 INFO [train.py:1114] (1/4) Epoch 8, batch 1500, loss[loss=0.2382, simple_loss=0.3199, pruned_loss=0.07821, over 13411.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3058, pruned_loss=0.07532, over 2641305.87 frames. ], batch size: 39, lr: 1.56e-02, grad_scale: 8.0 +2024-08-03 10:41:59,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=98937.66666666667, ans=0.95 +2024-08-03 10:42:19,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=99011.0, ans=0.125 +2024-08-03 10:42:21,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten.whitening_limit, batch_count=99011.0, ans=22.5 +2024-08-03 10:42:40,619 INFO [train.py:1114] (1/4) Epoch 8, batch 1550, loss[loss=0.2202, simple_loss=0.3129, pruned_loss=0.0637, over 13389.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3066, pruned_loss=0.07607, over 2630794.28 frames. ], batch size: 41, lr: 1.56e-02, grad_scale: 8.0 +2024-08-03 10:42:42,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99121.0, ans=0.1 +2024-08-03 10:42:59,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=99194.33333333333, ans=0.09899494936611666 +2024-08-03 10:43:02,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=99194.33333333333, ans=0.125 +2024-08-03 10:43:04,922 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.564e+01 1.246e+02 1.506e+02 1.858e+02 4.061e+02, threshold=3.012e+02, percent-clipped=4.0 +2024-08-03 10:43:22,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=99267.66666666667, ans=0.0 +2024-08-03 10:43:23,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=99267.66666666667, ans=0.025 +2024-08-03 10:43:29,764 INFO [train.py:1114] (1/4) Epoch 8, batch 1600, loss[loss=0.2038, simple_loss=0.2934, pruned_loss=0.05707, over 13587.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3062, pruned_loss=0.07611, over 2623219.34 frames. ], batch size: 39, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:43:37,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=99304.33333333333, ans=0.125 +2024-08-03 10:43:55,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.56 vs. limit=10.0 +2024-08-03 10:44:15,504 INFO [train.py:1114] (1/4) Epoch 8, batch 1650, loss[loss=0.1976, simple_loss=0.2859, pruned_loss=0.05458, over 13316.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3058, pruned_loss=0.07587, over 2620164.08 frames. ], batch size: 40, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:44:16,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=99487.66666666667, ans=6.0 +2024-08-03 10:44:29,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=99524.33333333333, ans=0.2 +2024-08-03 10:44:32,022 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.03 vs. limit=22.5 +2024-08-03 10:44:36,379 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.07 vs. limit=15.0 +2024-08-03 10:44:40,416 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.002e+02 1.254e+02 1.426e+02 1.791e+02 5.006e+02, threshold=2.852e+02, percent-clipped=4.0 +2024-08-03 10:44:46,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=99597.66666666667, ans=0.125 +2024-08-03 10:44:48,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=99597.66666666667, ans=0.025 +2024-08-03 10:44:49,453 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.30 vs. limit=22.5 +2024-08-03 10:45:03,283 INFO [train.py:1114] (1/4) Epoch 8, batch 1700, loss[loss=0.1797, simple_loss=0.2547, pruned_loss=0.05235, over 13279.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.306, pruned_loss=0.07573, over 2629699.05 frames. ], batch size: 31, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:45:08,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=99671.0, ans=0.0 +2024-08-03 10:45:28,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=99744.33333333333, ans=0.0 +2024-08-03 10:45:48,394 INFO [train.py:1114] (1/4) Epoch 8, batch 1750, loss[loss=0.2156, simple_loss=0.2834, pruned_loss=0.07393, over 13544.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3053, pruned_loss=0.07545, over 2633178.69 frames. ], batch size: 31, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:46:03,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=99891.0, ans=0.125 +2024-08-03 10:46:13,077 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.856e+01 1.232e+02 1.552e+02 2.162e+02 4.270e+02, threshold=3.103e+02, percent-clipped=14.0 +2024-08-03 10:46:34,038 INFO [train.py:1114] (1/4) Epoch 8, batch 1800, loss[loss=0.2337, simple_loss=0.3098, pruned_loss=0.07879, over 13554.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3055, pruned_loss=0.07552, over 2634422.15 frames. ], batch size: 38, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:46:38,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=100037.66666666667, ans=0.09899494936611666 +2024-08-03 10:46:50,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=100074.33333333333, ans=0.2 +2024-08-03 10:47:04,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=100111.0, ans=0.125 +2024-08-03 10:47:08,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100147.66666666667, ans=0.1 +2024-08-03 10:47:17,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=100184.33333333333, ans=0.125 +2024-08-03 10:47:22,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=100184.33333333333, ans=0.0 +2024-08-03 10:47:24,467 INFO [train.py:1114] (1/4) Epoch 8, batch 1850, loss[loss=0.2453, simple_loss=0.3265, pruned_loss=0.08211, over 13408.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.3051, pruned_loss=0.07507, over 2637459.74 frames. ], batch size: 39, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:47:34,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=100257.66666666667, ans=0.0 +2024-08-03 10:47:39,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=100257.66666666667, ans=0.2 +2024-08-03 10:47:48,905 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.993e+01 1.278e+02 1.540e+02 2.004e+02 3.260e+02, threshold=3.079e+02, percent-clipped=4.0 +2024-08-03 10:48:07,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=100367.66666666667, ans=0.125 +2024-08-03 10:48:08,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100367.66666666667, ans=0.1 +2024-08-03 10:48:09,644 INFO [train.py:1114] (1/4) Epoch 8, batch 1900, loss[loss=0.2443, simple_loss=0.3215, pruned_loss=0.08353, over 13321.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3055, pruned_loss=0.07533, over 2640279.76 frames. ], batch size: 40, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:48:09,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=100404.33333333333, ans=0.125 +2024-08-03 10:48:10,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=100404.33333333333, ans=0.125 +2024-08-03 10:48:19,724 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=100441.0, ans=0.125 +2024-08-03 10:48:21,037 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.03 vs. limit=5.0 +2024-08-03 10:48:28,209 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.37 vs. limit=15.0 +2024-08-03 10:48:28,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=100477.66666666667, ans=0.125 +2024-08-03 10:48:31,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=100477.66666666667, ans=0.125 +2024-08-03 10:48:34,461 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.99 vs. limit=22.5 +2024-08-03 10:48:42,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=100514.33333333333, ans=0.125 +2024-08-03 10:48:43,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=100514.33333333333, ans=0.125 +2024-08-03 10:48:53,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=100551.0, ans=0.0 +2024-08-03 10:48:56,509 INFO [train.py:1114] (1/4) Epoch 8, batch 1950, loss[loss=0.2036, simple_loss=0.2895, pruned_loss=0.05887, over 13571.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3063, pruned_loss=0.0755, over 2646768.58 frames. ], batch size: 36, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:48:56,674 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:49:05,479 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.67 vs. limit=12.0 +2024-08-03 10:49:13,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=100624.33333333333, ans=0.125 +2024-08-03 10:49:17,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=100661.0, ans=0.0 +2024-08-03 10:49:21,461 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.223e+02 1.431e+02 1.772e+02 2.626e+02, threshold=2.861e+02, percent-clipped=0.0 +2024-08-03 10:49:22,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=100661.0, ans=0.2 +2024-08-03 10:49:42,384 INFO [train.py:1114] (1/4) Epoch 8, batch 2000, loss[loss=0.2296, simple_loss=0.2993, pruned_loss=0.07991, over 13544.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3071, pruned_loss=0.07623, over 2636006.27 frames. ], batch size: 31, lr: 1.54e-02, grad_scale: 32.0 +2024-08-03 10:49:51,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=100807.66666666667, ans=0.0 +2024-08-03 10:50:15,852 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.09 vs. limit=22.5 +2024-08-03 10:50:38,484 INFO [train.py:1114] (1/4) Epoch 8, batch 2050, loss[loss=0.2074, simple_loss=0.2777, pruned_loss=0.06857, over 13451.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3066, pruned_loss=0.07641, over 2633406.19 frames. ], batch size: 32, lr: 1.54e-02, grad_scale: 32.0 +2024-08-03 10:50:56,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=101027.66666666667, ans=0.2 +2024-08-03 10:50:58,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=101027.66666666667, ans=15.0 +2024-08-03 10:51:01,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=101027.66666666667, ans=0.2 +2024-08-03 10:51:03,410 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.251e+02 1.508e+02 1.862e+02 2.983e+02, threshold=3.016e+02, percent-clipped=1.0 +2024-08-03 10:51:19,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=101101.0, ans=0.125 +2024-08-03 10:51:23,589 INFO [train.py:1114] (1/4) Epoch 8, batch 2100, loss[loss=0.2227, simple_loss=0.3038, pruned_loss=0.07082, over 13561.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3055, pruned_loss=0.07543, over 2639037.47 frames. ], batch size: 37, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:51:26,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=101137.66666666667, ans=0.07 +2024-08-03 10:51:30,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=101137.66666666667, ans=0.125 +2024-08-03 10:51:44,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=101211.0, ans=0.0 +2024-08-03 10:52:02,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=101284.33333333333, ans=0.125 +2024-08-03 10:52:04,877 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.82 vs. limit=15.0 +2024-08-03 10:52:07,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=101284.33333333333, ans=0.125 +2024-08-03 10:52:09,818 INFO [train.py:1114] (1/4) Epoch 8, batch 2150, loss[loss=0.2007, simple_loss=0.2864, pruned_loss=0.05743, over 13561.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3045, pruned_loss=0.0747, over 2647323.14 frames. ], batch size: 36, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:52:32,212 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.08 vs. limit=15.0 +2024-08-03 10:52:36,949 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.507e+01 1.244e+02 1.463e+02 2.141e+02 4.797e+02, threshold=2.925e+02, percent-clipped=7.0 +2024-08-03 10:52:42,258 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.58 vs. limit=15.0 +2024-08-03 10:52:50,473 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.51 vs. limit=10.0 +2024-08-03 10:52:57,017 INFO [train.py:1114] (1/4) Epoch 8, batch 2200, loss[loss=0.2269, simple_loss=0.3138, pruned_loss=0.06994, over 13399.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3042, pruned_loss=0.0742, over 2645271.26 frames. ], batch size: 39, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:53:07,412 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.14 vs. limit=10.0 +2024-08-03 10:53:11,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=101541.0, ans=0.1 +2024-08-03 10:53:16,436 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.09 vs. limit=15.0 +2024-08-03 10:53:33,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=101651.0, ans=0.07 +2024-08-03 10:53:34,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=101651.0, ans=0.025 +2024-08-03 10:53:42,323 INFO [train.py:1114] (1/4) Epoch 8, batch 2250, loss[loss=0.2206, simple_loss=0.3007, pruned_loss=0.0703, over 13352.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3056, pruned_loss=0.07542, over 2642725.42 frames. ], batch size: 37, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:53:44,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=101687.66666666667, ans=0.0 +2024-08-03 10:54:09,240 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.704e+01 1.191e+02 1.378e+02 1.872e+02 3.290e+02, threshold=2.756e+02, percent-clipped=1.0 +2024-08-03 10:54:18,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=101797.66666666667, ans=0.125 +2024-08-03 10:54:27,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=101834.33333333333, ans=0.09899494936611666 +2024-08-03 10:54:39,887 INFO [train.py:1114] (1/4) Epoch 8, batch 2300, loss[loss=0.2137, simple_loss=0.2831, pruned_loss=0.07215, over 13569.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3044, pruned_loss=0.07509, over 2638652.64 frames. ], batch size: 33, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:54:42,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=101871.0, ans=0.0 +2024-08-03 10:54:45,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101871.0, ans=0.1 +2024-08-03 10:54:51,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=101907.66666666667, ans=0.125 +2024-08-03 10:55:00,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=101944.33333333333, ans=0.125 +2024-08-03 10:55:14,007 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.40 vs. limit=22.5 +2024-08-03 10:55:21,652 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.04 vs. limit=10.0 +2024-08-03 10:55:24,950 INFO [train.py:1114] (1/4) Epoch 8, batch 2350, loss[loss=0.2202, simple_loss=0.3097, pruned_loss=0.0654, over 13543.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3041, pruned_loss=0.07469, over 2640863.38 frames. ], batch size: 38, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:55:34,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=102054.33333333333, ans=0.0 +2024-08-03 10:55:40,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=102091.0, ans=0.0 +2024-08-03 10:55:40,749 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.45 vs. limit=12.0 +2024-08-03 10:55:48,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=102127.66666666667, ans=0.125 +2024-08-03 10:55:53,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=102127.66666666667, ans=0.125 +2024-08-03 10:55:53,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=102127.66666666667, ans=0.2 +2024-08-03 10:55:55,516 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.744e+01 1.287e+02 1.695e+02 2.279e+02 3.908e+02, threshold=3.390e+02, percent-clipped=9.0 +2024-08-03 10:56:02,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=102164.33333333333, ans=0.125 +2024-08-03 10:56:04,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=102164.33333333333, ans=0.125 +2024-08-03 10:56:09,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=102201.0, ans=0.035 +2024-08-03 10:56:09,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=102201.0, ans=0.1 +2024-08-03 10:56:10,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=102201.0, ans=0.2 +2024-08-03 10:56:24,743 INFO [train.py:1114] (1/4) Epoch 8, batch 2400, loss[loss=0.181, simple_loss=0.2662, pruned_loss=0.04791, over 13531.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3049, pruned_loss=0.07501, over 2642005.45 frames. ], batch size: 35, lr: 1.53e-02, grad_scale: 32.0 +2024-08-03 10:56:35,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102274.33333333333, ans=0.1 +2024-08-03 10:57:11,095 INFO [train.py:1114] (1/4) Epoch 8, batch 2450, loss[loss=0.206, simple_loss=0.292, pruned_loss=0.05999, over 13364.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3056, pruned_loss=0.07545, over 2632295.24 frames. ], batch size: 37, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:57:13,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=102421.0, ans=0.0 +2024-08-03 10:57:24,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102457.66666666667, ans=0.1 +2024-08-03 10:57:24,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=102457.66666666667, ans=0.125 +2024-08-03 10:57:27,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=102457.66666666667, ans=0.0 +2024-08-03 10:57:40,049 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.286e+02 1.590e+02 1.975e+02 2.991e+02, threshold=3.180e+02, percent-clipped=0.0 +2024-08-03 10:57:45,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102531.0, ans=0.1 +2024-08-03 10:57:50,381 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.28 vs. limit=22.5 +2024-08-03 10:57:57,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102567.66666666667, ans=0.1 +2024-08-03 10:57:59,036 INFO [train.py:1114] (1/4) Epoch 8, batch 2500, loss[loss=0.2454, simple_loss=0.328, pruned_loss=0.08143, over 13386.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3055, pruned_loss=0.0757, over 2636091.02 frames. ], batch size: 39, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:57:59,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102604.33333333333, ans=0.1 +2024-08-03 10:58:20,839 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.38 vs. limit=12.0 +2024-08-03 10:58:21,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=102677.66666666667, ans=0.0 +2024-08-03 10:58:50,129 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.01 vs. limit=22.5 +2024-08-03 10:58:50,555 INFO [train.py:1114] (1/4) Epoch 8, batch 2550, loss[loss=0.2004, simple_loss=0.2627, pruned_loss=0.06899, over 13554.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3046, pruned_loss=0.07516, over 2636950.25 frames. ], batch size: 31, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:59:01,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=102824.33333333333, ans=0.05 +2024-08-03 10:59:15,648 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.849e+01 1.203e+02 1.342e+02 1.554e+02 2.450e+02, threshold=2.684e+02, percent-clipped=0.0 +2024-08-03 10:59:27,414 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.17 vs. limit=22.5 +2024-08-03 10:59:29,594 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.41 vs. limit=5.0 +2024-08-03 10:59:31,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=102934.33333333333, ans=0.125 +2024-08-03 10:59:34,145 INFO [train.py:1114] (1/4) Epoch 8, batch 2600, loss[loss=0.1991, simple_loss=0.2793, pruned_loss=0.05946, over 13571.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3053, pruned_loss=0.07547, over 2637154.05 frames. ], batch size: 36, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:59:54,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=103044.33333333333, ans=0.125 +2024-08-03 10:59:54,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=103044.33333333333, ans=0.07 +2024-08-03 11:00:22,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=103154.33333333333, ans=0.0 +2024-08-03 11:00:23,272 INFO [train.py:1114] (1/4) Epoch 8, batch 2650, loss[loss=0.2279, simple_loss=0.3236, pruned_loss=0.06609, over 13377.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3057, pruned_loss=0.07562, over 2640785.50 frames. ], batch size: 46, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 11:00:36,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=103191.0, ans=0.125 +2024-08-03 11:00:38,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=103191.0, ans=0.2 +2024-08-03 11:00:42,162 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.86 vs. limit=15.0 +2024-08-03 11:00:48,524 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.954e+01 1.189e+02 1.373e+02 1.657e+02 2.856e+02, threshold=2.745e+02, percent-clipped=2.0 +2024-08-03 11:00:49,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=103264.33333333333, ans=0.125 +2024-08-03 11:00:49,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=103264.33333333333, ans=0.125 +2024-08-03 11:01:07,302 INFO [train.py:1114] (1/4) Epoch 8, batch 2700, loss[loss=0.2147, simple_loss=0.2995, pruned_loss=0.06493, over 13537.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3054, pruned_loss=0.07535, over 2637748.70 frames. ], batch size: 40, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 11:01:09,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=103337.66666666667, ans=0.2 +2024-08-03 11:01:12,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=103337.66666666667, ans=0.025 +2024-08-03 11:01:25,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=103411.0, ans=0.125 +2024-08-03 11:01:32,995 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:01:39,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=103447.66666666667, ans=0.0 +2024-08-03 11:01:51,073 INFO [train.py:1114] (1/4) Epoch 8, batch 2750, loss[loss=0.2316, simple_loss=0.2915, pruned_loss=0.08587, over 13325.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.3047, pruned_loss=0.07527, over 2635259.16 frames. ], batch size: 34, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:01:57,364 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.44 vs. limit=22.5 +2024-08-03 11:02:07,034 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.08 vs. limit=22.5 +2024-08-03 11:02:13,335 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.54 vs. limit=6.0 +2024-08-03 11:02:13,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=103594.33333333333, ans=0.2 +2024-08-03 11:02:18,841 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.453e+01 1.294e+02 1.512e+02 1.970e+02 3.598e+02, threshold=3.023e+02, percent-clipped=4.0 +2024-08-03 11:02:19,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=103631.0, ans=0.2 +2024-08-03 11:02:30,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=103667.66666666667, ans=0.125 +2024-08-03 11:02:37,583 INFO [train.py:1114] (1/4) Epoch 8, batch 2800, loss[loss=0.3224, simple_loss=0.3637, pruned_loss=0.1406, over 9701.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3046, pruned_loss=0.07543, over 2627182.34 frames. ], batch size: 96, lr: 1.52e-02, grad_scale: 32.0 +2024-08-03 11:02:44,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=103704.33333333333, ans=0.125 +2024-08-03 11:02:46,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103741.0, ans=0.1 +2024-08-03 11:02:50,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=103741.0, ans=0.125 +2024-08-03 11:03:16,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=103851.0, ans=0.0 +2024-08-03 11:03:18,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103851.0, ans=0.1 +2024-08-03 11:03:22,215 INFO [train.py:1114] (1/4) Epoch 8, batch 2850, loss[loss=0.2039, simple_loss=0.2785, pruned_loss=0.06461, over 13353.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3054, pruned_loss=0.07609, over 2621660.72 frames. ], batch size: 35, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:03:25,975 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.56 vs. limit=15.0 +2024-08-03 11:03:26,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=103887.66666666667, ans=0.0 +2024-08-03 11:03:29,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=103887.66666666667, ans=0.125 +2024-08-03 11:03:35,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=103924.33333333333, ans=0.07 +2024-08-03 11:03:47,796 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.776e+01 1.266e+02 1.558e+02 2.014e+02 3.574e+02, threshold=3.117e+02, percent-clipped=3.0 +2024-08-03 11:04:00,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104034.33333333333, ans=0.1 +2024-08-03 11:04:05,039 INFO [train.py:1114] (1/4) Epoch 8, batch 2900, loss[loss=0.2217, simple_loss=0.2996, pruned_loss=0.07191, over 13367.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3063, pruned_loss=0.07619, over 2632465.93 frames. ], batch size: 36, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:04:06,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=104071.0, ans=0.125 +2024-08-03 11:04:30,993 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.96 vs. limit=22.5 +2024-08-03 11:04:35,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=104181.0, ans=0.05 +2024-08-03 11:04:48,494 INFO [train.py:1114] (1/4) Epoch 8, batch 2950, loss[loss=0.224, simple_loss=0.2884, pruned_loss=0.07981, over 13350.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3053, pruned_loss=0.07579, over 2630367.17 frames. ], batch size: 34, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:04:55,826 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.16 vs. limit=15.0 +2024-08-03 11:05:07,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=104327.66666666667, ans=0.125 +2024-08-03 11:05:12,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=104327.66666666667, ans=0.025 +2024-08-03 11:05:13,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=104327.66666666667, ans=0.125 +2024-08-03 11:05:14,601 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.699e+01 1.150e+02 1.316e+02 1.628e+02 4.465e+02, threshold=2.631e+02, percent-clipped=1.0 +2024-08-03 11:05:31,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=104437.66666666667, ans=0.1 +2024-08-03 11:05:31,937 INFO [train.py:1114] (1/4) Epoch 8, batch 3000, loss[loss=0.2243, simple_loss=0.3038, pruned_loss=0.07243, over 13552.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3043, pruned_loss=0.07506, over 2630158.00 frames. ], batch size: 37, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:05:31,938 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 11:05:42,233 INFO [train.py:1146] (1/4) Epoch 8, validation: loss=0.1886, simple_loss=0.2887, pruned_loss=0.04428, over 944034.00 frames. +2024-08-03 11:05:42,234 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 11:05:46,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=104437.66666666667, ans=0.0 +2024-08-03 11:05:48,907 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.69 vs. limit=15.0 +2024-08-03 11:05:55,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=104474.33333333333, ans=0.125 +2024-08-03 11:06:10,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=104547.66666666667, ans=0.125 +2024-08-03 11:06:25,405 INFO [train.py:1114] (1/4) Epoch 8, batch 3050, loss[loss=0.1969, simple_loss=0.2763, pruned_loss=0.05878, over 13536.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3051, pruned_loss=0.07522, over 2627166.91 frames. ], batch size: 35, lr: 1.52e-02, grad_scale: 8.0 +2024-08-03 11:06:30,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=104621.0, ans=0.2 +2024-08-03 11:06:32,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=104621.0, ans=0.2 +2024-08-03 11:06:43,298 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.94 vs. limit=22.5 +2024-08-03 11:06:50,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=104694.33333333333, ans=0.125 +2024-08-03 11:06:54,265 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.678e+01 1.159e+02 1.376e+02 1.941e+02 3.361e+02, threshold=2.751e+02, percent-clipped=3.0 +2024-08-03 11:06:57,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=104731.0, ans=0.125 +2024-08-03 11:06:58,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=104731.0, ans=0.125 +2024-08-03 11:07:02,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=104767.66666666667, ans=0.025 +2024-08-03 11:07:10,695 INFO [train.py:1114] (1/4) Epoch 8, batch 3100, loss[loss=0.2388, simple_loss=0.3149, pruned_loss=0.08134, over 13325.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.304, pruned_loss=0.0745, over 2626602.69 frames. ], batch size: 46, lr: 1.52e-02, grad_scale: 8.0 +2024-08-03 11:07:12,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=104804.33333333333, ans=0.0 +2024-08-03 11:07:14,553 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.70 vs. limit=15.0 +2024-08-03 11:07:35,584 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.38 vs. limit=15.0 +2024-08-03 11:07:39,551 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.78 vs. limit=22.5 +2024-08-03 11:07:46,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=104951.0, ans=0.125 +2024-08-03 11:07:48,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104951.0, ans=0.1 +2024-08-03 11:07:53,831 INFO [train.py:1114] (1/4) Epoch 8, batch 3150, loss[loss=0.2388, simple_loss=0.3172, pruned_loss=0.0802, over 13015.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.304, pruned_loss=0.07418, over 2627589.92 frames. ], batch size: 48, lr: 1.51e-02, grad_scale: 8.0 +2024-08-03 11:08:13,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=105061.0, ans=0.125 +2024-08-03 11:08:17,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=105061.0, ans=0.125 +2024-08-03 11:08:20,101 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.438e+02 1.843e+02 2.666e+02 3.777e+02, threshold=3.687e+02, percent-clipped=21.0 +2024-08-03 11:08:22,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=105097.66666666667, ans=0.2 +2024-08-03 11:08:29,850 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.19 vs. limit=15.0 +2024-08-03 11:08:33,375 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.09 vs. limit=15.0 +2024-08-03 11:08:33,977 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.50 vs. limit=12.0 +2024-08-03 11:08:34,089 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.98 vs. limit=10.0 +2024-08-03 11:08:35,006 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.61 vs. limit=15.0 +2024-08-03 11:08:36,204 INFO [train.py:1114] (1/4) Epoch 8, batch 3200, loss[loss=0.2285, simple_loss=0.3052, pruned_loss=0.07589, over 13540.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3042, pruned_loss=0.07445, over 2633170.94 frames. ], batch size: 37, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:08:41,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.71 vs. limit=22.5 +2024-08-03 11:08:44,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=105207.66666666667, ans=0.125 +2024-08-03 11:08:44,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=105207.66666666667, ans=0.0 +2024-08-03 11:09:03,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=105281.0, ans=0.125 +2024-08-03 11:09:05,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=105281.0, ans=0.2 +2024-08-03 11:09:19,471 INFO [train.py:1114] (1/4) Epoch 8, batch 3250, loss[loss=0.2188, simple_loss=0.3013, pruned_loss=0.06821, over 13388.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.305, pruned_loss=0.07472, over 2638177.32 frames. ], batch size: 38, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:09:30,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=105391.0, ans=0.09899494936611666 +2024-08-03 11:09:31,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=105391.0, ans=0.0 +2024-08-03 11:09:33,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105391.0, ans=0.1 +2024-08-03 11:09:36,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=105391.0, ans=0.125 +2024-08-03 11:09:38,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=105427.66666666667, ans=0.09899494936611666 +2024-08-03 11:09:42,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=105427.66666666667, ans=0.05 +2024-08-03 11:09:47,360 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.799e+01 1.281e+02 1.591e+02 1.983e+02 2.904e+02, threshold=3.182e+02, percent-clipped=0.0 +2024-08-03 11:09:49,641 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.37 vs. limit=15.0 +2024-08-03 11:10:03,914 INFO [train.py:1114] (1/4) Epoch 8, batch 3300, loss[loss=0.2465, simple_loss=0.3257, pruned_loss=0.08361, over 12881.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3034, pruned_loss=0.07381, over 2640009.68 frames. ], batch size: 52, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:10:13,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105574.33333333333, ans=0.1 +2024-08-03 11:10:25,633 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.37 vs. limit=10.0 +2024-08-03 11:10:31,866 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:10:50,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=105721.0, ans=0.125 +2024-08-03 11:10:51,478 INFO [train.py:1114] (1/4) Epoch 8, batch 3350, loss[loss=0.2519, simple_loss=0.329, pruned_loss=0.08738, over 13101.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3047, pruned_loss=0.07469, over 2629984.96 frames. ], batch size: 48, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:10:51,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=105721.0, ans=0.125 +2024-08-03 11:10:59,768 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.32 vs. limit=22.5 +2024-08-03 11:11:04,286 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.92 vs. limit=15.0 +2024-08-03 11:11:13,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=105757.66666666667, ans=0.125 +2024-08-03 11:11:33,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=105794.33333333333, ans=0.125 +2024-08-03 11:11:33,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=105794.33333333333, ans=0.0 +2024-08-03 11:11:42,001 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.800e+01 1.191e+02 1.380e+02 1.620e+02 2.699e+02, threshold=2.759e+02, percent-clipped=0.0 +2024-08-03 11:12:03,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=105867.66666666667, ans=0.0 +2024-08-03 11:12:10,265 INFO [train.py:1114] (1/4) Epoch 8, batch 3400, loss[loss=0.1924, simple_loss=0.2617, pruned_loss=0.06162, over 13536.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3042, pruned_loss=0.07484, over 2625655.56 frames. ], batch size: 31, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:12:13,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=105904.33333333333, ans=0.125 +2024-08-03 11:12:34,413 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.69 vs. limit=15.0 +2024-08-03 11:12:46,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=106014.33333333333, ans=0.125 +2024-08-03 11:12:56,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=106014.33333333333, ans=0.05 +2024-08-03 11:13:00,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=106051.0, ans=0.0 +2024-08-03 11:13:04,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=106051.0, ans=0.2 +2024-08-03 11:13:06,207 INFO [train.py:1114] (1/4) Epoch 8, batch 3450, loss[loss=0.2642, simple_loss=0.3387, pruned_loss=0.09486, over 12895.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3041, pruned_loss=0.07449, over 2629345.37 frames. ], batch size: 52, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:13:12,858 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.47 vs. limit=22.5 +2024-08-03 11:13:35,947 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.667e+01 1.192e+02 1.350e+02 1.586e+02 3.469e+02, threshold=2.701e+02, percent-clipped=1.0 +2024-08-03 11:13:48,081 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.06 vs. limit=10.0 +2024-08-03 11:13:53,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=106271.0, ans=0.2 +2024-08-03 11:13:54,440 INFO [train.py:1114] (1/4) Epoch 8, batch 3500, loss[loss=0.2109, simple_loss=0.2914, pruned_loss=0.0652, over 13536.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3036, pruned_loss=0.07423, over 2631851.83 frames. ], batch size: 34, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:14:03,202 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.60 vs. limit=15.0 +2024-08-03 11:14:09,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=106307.66666666667, ans=0.125 +2024-08-03 11:14:19,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=106381.0, ans=0.0 +2024-08-03 11:14:42,315 INFO [train.py:1114] (1/4) Epoch 8, batch 3550, loss[loss=0.2634, simple_loss=0.3322, pruned_loss=0.09726, over 12521.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3061, pruned_loss=0.07566, over 2629996.13 frames. ], batch size: 58, lr: 1.50e-02, grad_scale: 16.0 +2024-08-03 11:14:50,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=106454.33333333333, ans=0.0 +2024-08-03 11:15:07,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=106491.0, ans=0.1 +2024-08-03 11:15:10,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=106491.0, ans=0.2 +2024-08-03 11:15:13,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=106527.66666666667, ans=0.0 +2024-08-03 11:15:21,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=106527.66666666667, ans=0.035 +2024-08-03 11:15:26,477 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.912e+01 1.312e+02 1.471e+02 1.710e+02 3.286e+02, threshold=2.943e+02, percent-clipped=3.0 +2024-08-03 11:15:35,341 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.78 vs. limit=10.0 +2024-08-03 11:15:48,851 INFO [train.py:1114] (1/4) Epoch 8, batch 3600, loss[loss=0.2841, simple_loss=0.3436, pruned_loss=0.1123, over 9279.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3112, pruned_loss=0.08048, over 2486518.02 frames. ], batch size: 96, lr: 1.50e-02, grad_scale: 32.0 +2024-08-03 11:15:55,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=106637.66666666667, ans=0.0 +2024-08-03 11:15:59,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=106674.33333333333, ans=0.125 +2024-08-03 11:16:02,517 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.02 vs. limit=15.0 +2024-08-03 11:16:24,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=106747.66666666667, ans=0.0 +2024-08-03 11:17:27,618 INFO [train.py:1114] (1/4) Epoch 9, batch 0, loss[loss=0.1919, simple_loss=0.2714, pruned_loss=0.05622, over 13321.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2714, pruned_loss=0.05622, over 13321.00 frames. ], batch size: 33, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:17:27,619 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 11:17:39,634 INFO [train.py:1146] (1/4) Epoch 9, validation: loss=0.1935, simple_loss=0.2948, pruned_loss=0.04614, over 944034.00 frames. +2024-08-03 11:17:39,635 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 11:17:42,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106788.0, ans=0.1 +2024-08-03 11:17:44,705 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.89 vs. limit=15.0 +2024-08-03 11:17:48,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=106824.66666666667, ans=0.125 +2024-08-03 11:17:50,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=106824.66666666667, ans=0.2 +2024-08-03 11:18:13,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=106898.0, ans=0.125 +2024-08-03 11:18:18,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=106934.66666666667, ans=0.125 +2024-08-03 11:18:18,801 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.039e+02 1.291e+02 1.434e+02 1.801e+02 3.339e+02, threshold=2.868e+02, percent-clipped=2.0 +2024-08-03 11:18:20,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106934.66666666667, ans=0.1 +2024-08-03 11:18:27,049 INFO [train.py:1114] (1/4) Epoch 9, batch 50, loss[loss=0.1998, simple_loss=0.2757, pruned_loss=0.06194, over 13426.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3088, pruned_loss=0.07723, over 578817.78 frames. ], batch size: 32, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:18:36,071 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.75 vs. limit=15.0 +2024-08-03 11:18:43,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=107008.0, ans=0.025 +2024-08-03 11:18:49,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=107044.66666666667, ans=0.125 +2024-08-03 11:18:59,912 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.59 vs. limit=15.0 +2024-08-03 11:19:02,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=107081.33333333333, ans=0.0 +2024-08-03 11:19:03,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=107081.33333333333, ans=0.125 +2024-08-03 11:19:16,442 INFO [train.py:1114] (1/4) Epoch 9, batch 100, loss[loss=0.2269, simple_loss=0.309, pruned_loss=0.07243, over 13540.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3077, pruned_loss=0.0763, over 1025442.45 frames. ], batch size: 35, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:19:16,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=107154.66666666667, ans=0.0 +2024-08-03 11:19:20,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=107154.66666666667, ans=0.2 +2024-08-03 11:19:35,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=107228.0, ans=0.125 +2024-08-03 11:19:37,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=107228.0, ans=0.125 +2024-08-03 11:19:41,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=107228.0, ans=0.125 +2024-08-03 11:19:42,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=107228.0, ans=0.0 +2024-08-03 11:19:46,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=107264.66666666667, ans=0.0 +2024-08-03 11:19:50,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=107264.66666666667, ans=0.0 +2024-08-03 11:19:54,655 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.233e+02 1.427e+02 1.918e+02 3.132e+02, threshold=2.853e+02, percent-clipped=1.0 +2024-08-03 11:19:55,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=107301.33333333333, ans=0.2 +2024-08-03 11:20:04,702 INFO [train.py:1114] (1/4) Epoch 9, batch 150, loss[loss=0.181, simple_loss=0.2555, pruned_loss=0.05323, over 13406.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3039, pruned_loss=0.0738, over 1386954.86 frames. ], batch size: 32, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:20:06,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=107338.0, ans=0.0 +2024-08-03 11:20:39,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=107448.0, ans=0.125 +2024-08-03 11:20:48,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=107484.66666666667, ans=0.0 +2024-08-03 11:20:56,478 INFO [train.py:1114] (1/4) Epoch 9, batch 200, loss[loss=0.2237, simple_loss=0.3029, pruned_loss=0.07226, over 12613.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3022, pruned_loss=0.07294, over 1664874.20 frames. ], batch size: 59, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:20:58,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=107521.33333333333, ans=0.125 +2024-08-03 11:21:13,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=107558.0, ans=0.2 +2024-08-03 11:21:27,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=107631.33333333333, ans=0.025 +2024-08-03 11:21:36,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=107631.33333333333, ans=0.125 +2024-08-03 11:21:39,289 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.622e+01 1.326e+02 1.622e+02 2.251e+02 3.498e+02, threshold=3.245e+02, percent-clipped=9.0 +2024-08-03 11:21:39,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=107668.0, ans=0.125 +2024-08-03 11:21:45,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=107668.0, ans=0.125 +2024-08-03 11:21:47,906 INFO [train.py:1114] (1/4) Epoch 9, batch 250, loss[loss=0.2451, simple_loss=0.3314, pruned_loss=0.07945, over 13292.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3019, pruned_loss=0.07237, over 1883614.34 frames. ], batch size: 46, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:21:48,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=107704.66666666667, ans=0.125 +2024-08-03 11:21:48,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=107704.66666666667, ans=0.125 +2024-08-03 11:22:00,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107741.33333333333, ans=0.125 +2024-08-03 11:22:02,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=13.90 vs. limit=15.0 +2024-08-03 11:22:03,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=107741.33333333333, ans=0.0 +2024-08-03 11:22:20,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=107814.66666666667, ans=0.125 +2024-08-03 11:22:20,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=107814.66666666667, ans=0.09899494936611666 +2024-08-03 11:22:31,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=107851.33333333333, ans=0.2 +2024-08-03 11:22:34,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=107851.33333333333, ans=0.2 +2024-08-03 11:22:34,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=107851.33333333333, ans=0.125 +2024-08-03 11:22:38,746 INFO [train.py:1114] (1/4) Epoch 9, batch 300, loss[loss=0.2169, simple_loss=0.2968, pruned_loss=0.06848, over 13454.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.3015, pruned_loss=0.07237, over 2051462.95 frames. ], batch size: 42, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:22:38,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=107888.0, ans=0.0 +2024-08-03 11:22:47,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107924.66666666667, ans=0.1 +2024-08-03 11:22:55,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=107924.66666666667, ans=0.0 +2024-08-03 11:23:02,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.06 vs. limit=22.5 +2024-08-03 11:23:10,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107998.0, ans=0.1 +2024-08-03 11:23:11,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=107998.0, ans=0.0 +2024-08-03 11:23:11,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107998.0, ans=0.125 +2024-08-03 11:23:11,735 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=23.33 vs. limit=22.5 +2024-08-03 11:23:14,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=107998.0, ans=0.2 +2024-08-03 11:23:18,258 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.922e+01 1.191e+02 1.365e+02 1.684e+02 2.533e+02, threshold=2.730e+02, percent-clipped=0.0 +2024-08-03 11:23:32,779 INFO [train.py:1114] (1/4) Epoch 9, batch 350, loss[loss=0.2234, simple_loss=0.2973, pruned_loss=0.0747, over 13584.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3022, pruned_loss=0.07289, over 2183149.54 frames. ], batch size: 33, lr: 1.41e-02, grad_scale: 32.0 +2024-08-03 11:23:33,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=108071.33333333333, ans=0.0 +2024-08-03 11:24:37,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=108218.0, ans=0.09899494936611666 +2024-08-03 11:24:38,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108218.0, ans=0.1 +2024-08-03 11:24:44,565 INFO [train.py:1114] (1/4) Epoch 9, batch 400, loss[loss=0.1978, simple_loss=0.2837, pruned_loss=0.05592, over 13367.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3013, pruned_loss=0.07224, over 2287446.63 frames. ], batch size: 37, lr: 1.41e-02, grad_scale: 32.0 +2024-08-03 11:25:30,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=108328.0, ans=0.125 +2024-08-03 11:25:45,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=108401.33333333333, ans=0.125 +2024-08-03 11:25:46,045 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.902e+01 1.178e+02 1.332e+02 1.607e+02 2.662e+02, threshold=2.664e+02, percent-clipped=0.0 +2024-08-03 11:25:48,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=108401.33333333333, ans=0.0 +2024-08-03 11:25:49,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=108401.33333333333, ans=0.2 +2024-08-03 11:26:00,583 INFO [train.py:1114] (1/4) Epoch 9, batch 450, loss[loss=0.2462, simple_loss=0.3172, pruned_loss=0.0876, over 13552.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3015, pruned_loss=0.07258, over 2360281.19 frames. ], batch size: 38, lr: 1.41e-02, grad_scale: 32.0 +2024-08-03 11:26:00,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=108438.0, ans=0.125 +2024-08-03 11:26:00,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_na.min_abs, batch_count=108438.0, ans=0.02 +2024-08-03 11:26:04,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=108438.0, ans=0.0 +2024-08-03 11:26:17,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=108474.66666666667, ans=0.2 +2024-08-03 11:26:46,757 INFO [train.py:1114] (1/4) Epoch 9, batch 500, loss[loss=0.2333, simple_loss=0.3075, pruned_loss=0.07957, over 13418.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3, pruned_loss=0.07174, over 2426251.89 frames. ], batch size: 43, lr: 1.41e-02, grad_scale: 32.0 +2024-08-03 11:27:18,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=108731.33333333333, ans=0.125 +2024-08-03 11:27:19,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=108731.33333333333, ans=0.125 +2024-08-03 11:27:27,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-08-03 11:27:30,443 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.278e+01 1.224e+02 1.411e+02 1.818e+02 3.084e+02, threshold=2.822e+02, percent-clipped=2.0 +2024-08-03 11:27:36,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=108768.0, ans=0.0 +2024-08-03 11:27:37,712 INFO [train.py:1114] (1/4) Epoch 9, batch 550, loss[loss=0.2303, simple_loss=0.3095, pruned_loss=0.07549, over 13017.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3006, pruned_loss=0.07238, over 2469643.82 frames. ], batch size: 48, lr: 1.41e-02, grad_scale: 16.0 +2024-08-03 11:28:04,879 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.85 vs. limit=6.0 +2024-08-03 11:28:25,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=108914.66666666667, ans=0.0 +2024-08-03 11:28:27,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=108914.66666666667, ans=0.125 +2024-08-03 11:28:43,874 INFO [train.py:1114] (1/4) Epoch 9, batch 600, loss[loss=0.2335, simple_loss=0.3159, pruned_loss=0.07558, over 13347.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3012, pruned_loss=0.07273, over 2509437.97 frames. ], batch size: 46, lr: 1.41e-02, grad_scale: 16.0 +2024-08-03 11:28:52,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=109024.66666666667, ans=0.07 +2024-08-03 11:29:02,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=109061.33333333333, ans=0.125 +2024-08-03 11:29:12,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=109098.0, ans=0.04949747468305833 +2024-08-03 11:29:15,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=109098.0, ans=0.2 +2024-08-03 11:29:16,893 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.74 vs. limit=15.0 +2024-08-03 11:29:17,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=109098.0, ans=0.0 +2024-08-03 11:29:21,674 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.363e+01 1.197e+02 1.407e+02 1.902e+02 4.020e+02, threshold=2.813e+02, percent-clipped=3.0 +2024-08-03 11:29:28,825 INFO [train.py:1114] (1/4) Epoch 9, batch 650, loss[loss=0.2098, simple_loss=0.2925, pruned_loss=0.06352, over 13552.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.3002, pruned_loss=0.07207, over 2544474.17 frames. ], batch size: 37, lr: 1.41e-02, grad_scale: 16.0 +2024-08-03 11:29:47,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=109244.66666666667, ans=0.2 +2024-08-03 11:29:55,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=109281.33333333333, ans=0.125 +2024-08-03 11:29:58,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=109281.33333333333, ans=0.0 +2024-08-03 11:30:15,752 INFO [train.py:1114] (1/4) Epoch 9, batch 700, loss[loss=0.2156, simple_loss=0.297, pruned_loss=0.06703, over 13528.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.301, pruned_loss=0.07231, over 2567037.04 frames. ], batch size: 35, lr: 1.41e-02, grad_scale: 8.0 +2024-08-03 11:30:29,574 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.10 vs. limit=22.5 +2024-08-03 11:30:34,059 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.68 vs. limit=15.0 +2024-08-03 11:30:51,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=109464.66666666667, ans=0.125 +2024-08-03 11:30:56,864 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.372e+01 1.239e+02 1.392e+02 1.880e+02 3.301e+02, threshold=2.784e+02, percent-clipped=6.0 +2024-08-03 11:31:03,166 INFO [train.py:1114] (1/4) Epoch 9, batch 750, loss[loss=0.2061, simple_loss=0.2951, pruned_loss=0.05854, over 13360.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3001, pruned_loss=0.07171, over 2583771.97 frames. ], batch size: 37, lr: 1.41e-02, grad_scale: 8.0 +2024-08-03 11:31:08,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109538.0, ans=0.1 +2024-08-03 11:31:22,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=109574.66666666667, ans=0.09899494936611666 +2024-08-03 11:31:37,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=109648.0, ans=0.125 +2024-08-03 11:31:45,900 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.92 vs. limit=15.0 +2024-08-03 11:31:53,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=109721.33333333333, ans=0.125 +2024-08-03 11:31:53,675 INFO [train.py:1114] (1/4) Epoch 9, batch 800, loss[loss=0.1931, simple_loss=0.2664, pruned_loss=0.05993, over 13357.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3, pruned_loss=0.0718, over 2598149.39 frames. ], batch size: 33, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:31:54,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=109721.33333333333, ans=0.125 +2024-08-03 11:32:00,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=109721.33333333333, ans=0.125 +2024-08-03 11:32:15,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109794.66666666667, ans=0.1 +2024-08-03 11:32:20,403 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.31 vs. limit=22.5 +2024-08-03 11:32:23,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109831.33333333333, ans=0.1 +2024-08-03 11:32:25,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=109831.33333333333, ans=0.07 +2024-08-03 11:32:34,397 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.676e+01 1.153e+02 1.323e+02 1.787e+02 2.891e+02, threshold=2.646e+02, percent-clipped=1.0 +2024-08-03 11:32:36,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109868.0, ans=0.1 +2024-08-03 11:32:40,792 INFO [train.py:1114] (1/4) Epoch 9, batch 850, loss[loss=0.2081, simple_loss=0.304, pruned_loss=0.05609, over 13325.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2995, pruned_loss=0.07123, over 2610538.07 frames. ], batch size: 40, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:32:48,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=109904.66666666667, ans=0.125 +2024-08-03 11:32:56,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109941.33333333333, ans=0.1 +2024-08-03 11:32:59,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=109978.0, ans=0.125 +2024-08-03 11:33:07,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=110014.66666666667, ans=0.125 +2024-08-03 11:33:11,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=110014.66666666667, ans=0.0 +2024-08-03 11:33:26,730 INFO [train.py:1114] (1/4) Epoch 9, batch 900, loss[loss=0.1984, simple_loss=0.276, pruned_loss=0.06046, over 13340.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2998, pruned_loss=0.07095, over 2611810.70 frames. ], batch size: 33, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:33:33,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=110088.0, ans=0.125 +2024-08-03 11:33:38,638 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-08-03 11:33:39,465 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.43 vs. limit=22.5 +2024-08-03 11:33:41,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110124.66666666667, ans=0.1 +2024-08-03 11:33:58,838 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.03 vs. limit=15.0 +2024-08-03 11:34:05,774 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.797e+01 1.134e+02 1.308e+02 1.698e+02 3.183e+02, threshold=2.616e+02, percent-clipped=1.0 +2024-08-03 11:38:02,441 INFO [train.py:1114] (1/4) Epoch 9, batch 950, loss[loss=0.207, simple_loss=0.2797, pruned_loss=0.06718, over 13525.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2998, pruned_loss=0.07105, over 2612245.78 frames. ], batch size: 34, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:46:16,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=110271.33333333333, ans=0.0 +2024-08-03 11:46:17,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=110271.33333333333, ans=0.125 +2024-08-03 11:47:39,499 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.27 vs. limit=15.0 +2024-08-03 11:47:43,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=110308.0, ans=0.1 +2024-08-03 11:55:46,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=110344.66666666667, ans=0.125 +2024-08-03 11:55:50,981 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.28 vs. limit=15.0 +2024-08-03 11:57:23,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=110454.66666666667, ans=0.0 +2024-08-03 11:57:24,449 INFO [train.py:1114] (1/4) Epoch 9, batch 1000, loss[loss=0.1978, simple_loss=0.2769, pruned_loss=0.05938, over 13364.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3005, pruned_loss=0.07155, over 2610699.39 frames. ], batch size: 35, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:58:10,891 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.50 vs. limit=10.0 +2024-08-03 11:58:16,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=110491.33333333333, ans=0.125 +2024-08-03 11:59:05,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=110528.0, ans=0.1 +2024-08-03 11:59:13,576 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:02:14,655 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.417e+01 1.320e+02 1.683e+02 2.294e+02 6.382e+02, threshold=3.366e+02, percent-clipped=18.0 +2024-08-03 12:02:46,936 INFO [train.py:1114] (1/4) Epoch 9, batch 1050, loss[loss=0.2315, simple_loss=0.317, pruned_loss=0.07295, over 13578.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3004, pruned_loss=0.07156, over 2614572.37 frames. ], batch size: 39, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 12:02:58,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=110638.0, ans=0.125 +2024-08-03 12:03:09,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=110674.66666666667, ans=0.1 +2024-08-03 12:03:10,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=110674.66666666667, ans=0.0 +2024-08-03 12:03:12,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=110674.66666666667, ans=0.0 +2024-08-03 12:03:23,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=110711.33333333333, ans=0.125 +2024-08-03 12:03:25,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=110748.0, ans=0.125 +2024-08-03 12:03:41,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=110784.66666666667, ans=0.0 +2024-08-03 12:03:43,569 INFO [train.py:1114] (1/4) Epoch 9, batch 1100, loss[loss=0.2116, simple_loss=0.2953, pruned_loss=0.06393, over 13564.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.3001, pruned_loss=0.07137, over 2618880.55 frames. ], batch size: 36, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 12:03:43,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=110821.33333333333, ans=10.0 +2024-08-03 12:03:50,032 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:03:50,474 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.27 vs. limit=15.0 +2024-08-03 12:04:06,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.77 vs. limit=22.5 +2024-08-03 12:04:23,167 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.596e+01 1.248e+02 1.447e+02 1.784e+02 2.947e+02, threshold=2.893e+02, percent-clipped=0.0 +2024-08-03 12:04:28,531 INFO [train.py:1114] (1/4) Epoch 9, batch 1150, loss[loss=0.2149, simple_loss=0.2929, pruned_loss=0.06843, over 13569.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3, pruned_loss=0.07146, over 2620107.15 frames. ], batch size: 36, lr: 1.40e-02, grad_scale: 8.0 +2024-08-03 12:04:37,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=111004.66666666667, ans=0.0 +2024-08-03 12:05:01,885 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.16 vs. limit=10.0 +2024-08-03 12:05:13,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=111151.33333333333, ans=0.125 +2024-08-03 12:05:17,650 INFO [train.py:1114] (1/4) Epoch 9, batch 1200, loss[loss=0.2593, simple_loss=0.3366, pruned_loss=0.09102, over 13564.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.3013, pruned_loss=0.072, over 2617278.33 frames. ], batch size: 39, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 12:05:22,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=111188.0, ans=0.0 +2024-08-03 12:05:27,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=111224.66666666667, ans=0.0 +2024-08-03 12:05:28,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=111224.66666666667, ans=0.125 +2024-08-03 12:05:33,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111224.66666666667, ans=0.125 +2024-08-03 12:05:44,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=111261.33333333333, ans=0.0 +2024-08-03 12:05:45,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=111298.0, ans=0.0 +2024-08-03 12:05:48,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=111298.0, ans=0.0 +2024-08-03 12:06:01,825 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.516e+01 1.186e+02 1.386e+02 1.604e+02 2.506e+02, threshold=2.772e+02, percent-clipped=0.0 +2024-08-03 12:06:02,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=111334.66666666667, ans=0.125 +2024-08-03 12:06:02,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=111334.66666666667, ans=0.5 +2024-08-03 12:06:06,348 INFO [train.py:1114] (1/4) Epoch 9, batch 1250, loss[loss=0.236, simple_loss=0.3197, pruned_loss=0.07616, over 13450.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3019, pruned_loss=0.07187, over 2628642.90 frames. ], batch size: 42, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:06:09,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=111371.33333333333, ans=0.125 +2024-08-03 12:06:48,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=111518.0, ans=0.95 +2024-08-03 12:06:48,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111518.0, ans=0.1 +2024-08-03 12:06:51,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=111518.0, ans=0.2 +2024-08-03 12:06:53,531 INFO [train.py:1114] (1/4) Epoch 9, batch 1300, loss[loss=0.1962, simple_loss=0.2848, pruned_loss=0.05383, over 12881.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3004, pruned_loss=0.07109, over 2632033.79 frames. ], batch size: 52, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:06:56,632 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.63 vs. limit=15.0 +2024-08-03 12:07:01,086 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.82 vs. limit=15.0 +2024-08-03 12:07:17,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=111628.0, ans=0.125 +2024-08-03 12:07:25,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=111664.66666666667, ans=0.05 +2024-08-03 12:07:49,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=111701.33333333333, ans=0.2 +2024-08-03 12:07:49,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=111701.33333333333, ans=0.0 +2024-08-03 12:07:52,279 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.616e+01 1.204e+02 1.402e+02 1.778e+02 3.607e+02, threshold=2.805e+02, percent-clipped=3.0 +2024-08-03 12:07:56,906 INFO [train.py:1114] (1/4) Epoch 9, batch 1350, loss[loss=0.2087, simple_loss=0.2877, pruned_loss=0.06487, over 13545.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3002, pruned_loss=0.07112, over 2639064.01 frames. ], batch size: 37, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:08:10,217 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.42 vs. limit=6.0 +2024-08-03 12:08:16,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=111811.33333333333, ans=0.125 +2024-08-03 12:08:20,210 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.57 vs. limit=15.0 +2024-08-03 12:08:45,491 INFO [train.py:1114] (1/4) Epoch 9, batch 1400, loss[loss=0.1824, simple_loss=0.2563, pruned_loss=0.05419, over 13261.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2995, pruned_loss=0.07078, over 2642865.04 frames. ], batch size: 31, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:08:54,946 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:08:56,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=111958.0, ans=0.0 +2024-08-03 12:09:01,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=111958.0, ans=0.125 +2024-08-03 12:09:21,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=112031.33333333333, ans=0.0 +2024-08-03 12:10:28,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=112068.0, ans=0.2 +2024-08-03 12:10:29,032 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.010e+02 1.189e+02 1.393e+02 1.717e+02 2.790e+02, threshold=2.787e+02, percent-clipped=0.0 +2024-08-03 12:10:29,647 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.68 vs. limit=12.0 +2024-08-03 12:10:45,200 INFO [train.py:1114] (1/4) Epoch 9, batch 1450, loss[loss=0.236, simple_loss=0.321, pruned_loss=0.07553, over 13418.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3007, pruned_loss=0.07139, over 2642140.50 frames. ], batch size: 43, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:11:09,751 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.03 vs. limit=15.0 +2024-08-03 12:11:47,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=112178.0, ans=0.09899494936611666 +2024-08-03 12:12:29,511 INFO [train.py:1114] (1/4) Epoch 9, batch 1500, loss[loss=0.2064, simple_loss=0.293, pruned_loss=0.05988, over 13382.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3009, pruned_loss=0.07134, over 2642122.05 frames. ], batch size: 39, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:12:34,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=112288.0, ans=0.0 +2024-08-03 12:12:34,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=112288.0, ans=0.2 +2024-08-03 12:12:42,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=112324.66666666667, ans=0.125 +2024-08-03 12:12:51,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=112361.33333333333, ans=0.0 +2024-08-03 12:13:06,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=112434.66666666667, ans=0.0 +2024-08-03 12:13:10,792 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.784e+01 1.196e+02 1.437e+02 1.780e+02 2.962e+02, threshold=2.875e+02, percent-clipped=1.0 +2024-08-03 12:13:15,274 INFO [train.py:1114] (1/4) Epoch 9, batch 1550, loss[loss=0.2018, simple_loss=0.2918, pruned_loss=0.05591, over 13413.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.3009, pruned_loss=0.07174, over 2631395.53 frames. ], batch size: 41, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:13:20,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=112471.33333333333, ans=0.0 +2024-08-03 12:13:25,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=112508.0, ans=0.125 +2024-08-03 12:13:27,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=112508.0, ans=0.07 +2024-08-03 12:13:29,371 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.36 vs. limit=15.0 +2024-08-03 12:13:31,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=112508.0, ans=0.1 +2024-08-03 12:13:33,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=112544.66666666667, ans=0.125 +2024-08-03 12:13:38,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=112544.66666666667, ans=0.025 +2024-08-03 12:13:39,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=112544.66666666667, ans=0.125 +2024-08-03 12:13:47,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=112581.33333333333, ans=0.0 +2024-08-03 12:14:00,203 INFO [train.py:1114] (1/4) Epoch 9, batch 1600, loss[loss=0.2034, simple_loss=0.2942, pruned_loss=0.05632, over 13582.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3004, pruned_loss=0.07159, over 2625322.15 frames. ], batch size: 39, lr: 1.39e-02, grad_scale: 16.0 +2024-08-03 12:14:43,177 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.525e+01 1.266e+02 1.505e+02 1.991e+02 3.418e+02, threshold=3.010e+02, percent-clipped=5.0 +2024-08-03 12:14:47,573 INFO [train.py:1114] (1/4) Epoch 9, batch 1650, loss[loss=0.1948, simple_loss=0.2884, pruned_loss=0.05058, over 13323.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2998, pruned_loss=0.07122, over 2622831.61 frames. ], batch size: 40, lr: 1.39e-02, grad_scale: 16.0 +2024-08-03 12:15:03,517 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.98 vs. limit=15.0 +2024-08-03 12:15:07,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=112874.66666666667, ans=0.125 +2024-08-03 12:15:07,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=112874.66666666667, ans=0.125 +2024-08-03 12:15:10,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=112911.33333333333, ans=0.125 +2024-08-03 12:15:25,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=112948.0, ans=15.0 +2024-08-03 12:15:37,664 INFO [train.py:1114] (1/4) Epoch 9, batch 1700, loss[loss=0.1899, simple_loss=0.266, pruned_loss=0.05686, over 13248.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3, pruned_loss=0.07118, over 2630878.08 frames. ], batch size: 31, lr: 1.39e-02, grad_scale: 16.0 +2024-08-03 12:15:43,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=113021.33333333333, ans=0.125 +2024-08-03 12:15:57,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=113094.66666666667, ans=0.0 +2024-08-03 12:16:19,702 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.237e+01 1.197e+02 1.424e+02 1.856e+02 4.679e+02, threshold=2.848e+02, percent-clipped=5.0 +2024-08-03 12:16:20,377 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.68 vs. limit=15.0 +2024-08-03 12:16:24,265 INFO [train.py:1114] (1/4) Epoch 9, batch 1750, loss[loss=0.2091, simple_loss=0.2881, pruned_loss=0.06504, over 13516.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2992, pruned_loss=0.07075, over 2634083.76 frames. ], batch size: 31, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:16:28,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=113204.66666666667, ans=0.125 +2024-08-03 12:16:32,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=113241.33333333333, ans=0.0 +2024-08-03 12:16:45,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=113278.0, ans=0.125 +2024-08-03 12:16:45,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=113278.0, ans=0.05 +2024-08-03 12:16:49,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=113278.0, ans=0.5 +2024-08-03 12:16:52,246 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.42 vs. limit=15.0 +2024-08-03 12:17:09,924 INFO [train.py:1114] (1/4) Epoch 9, batch 1800, loss[loss=0.2367, simple_loss=0.3266, pruned_loss=0.07345, over 13559.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2999, pruned_loss=0.07125, over 2635094.72 frames. ], batch size: 38, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:17:22,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=113424.66666666667, ans=0.125 +2024-08-03 12:17:25,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=113424.66666666667, ans=0.125 +2024-08-03 12:17:31,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=113461.33333333333, ans=0.125 +2024-08-03 12:17:37,823 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=16.92 vs. limit=15.0 +2024-08-03 12:17:45,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=113534.66666666667, ans=0.125 +2024-08-03 12:17:50,712 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.294e+02 1.753e+02 2.320e+02 3.685e+02, threshold=3.507e+02, percent-clipped=11.0 +2024-08-03 12:17:55,235 INFO [train.py:1114] (1/4) Epoch 9, batch 1850, loss[loss=0.2593, simple_loss=0.335, pruned_loss=0.09177, over 13415.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2999, pruned_loss=0.07133, over 2638550.67 frames. ], batch size: 39, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:18:01,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=113571.33333333333, ans=0.125 +2024-08-03 12:18:08,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=113608.0, ans=0.1 +2024-08-03 12:18:08,904 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.16 vs. limit=15.0 +2024-08-03 12:18:22,593 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.58 vs. limit=22.5 +2024-08-03 12:18:43,013 INFO [train.py:1114] (1/4) Epoch 9, batch 1900, loss[loss=0.2332, simple_loss=0.3152, pruned_loss=0.07556, over 13323.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2994, pruned_loss=0.07058, over 2641039.61 frames. ], batch size: 40, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:18:50,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=113754.66666666667, ans=0.0 +2024-08-03 12:18:55,011 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=2.529e-03 +2024-08-03 12:18:59,553 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.84 vs. limit=15.0 +2024-08-03 12:19:12,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113864.66666666667, ans=0.1 +2024-08-03 12:19:15,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=113864.66666666667, ans=0.0 +2024-08-03 12:19:17,376 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.92 vs. limit=15.0 +2024-08-03 12:19:26,849 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.010e+02 1.181e+02 1.342e+02 1.556e+02 3.723e+02, threshold=2.684e+02, percent-clipped=1.0 +2024-08-03 12:19:29,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=113901.33333333333, ans=0.125 +2024-08-03 12:19:30,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=113901.33333333333, ans=0.0 +2024-08-03 12:19:33,197 INFO [train.py:1114] (1/4) Epoch 9, batch 1950, loss[loss=0.2176, simple_loss=0.2876, pruned_loss=0.07379, over 13562.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.3006, pruned_loss=0.07106, over 2647268.11 frames. ], batch size: 36, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:19:35,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=113938.0, ans=0.125 +2024-08-03 12:19:36,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=113938.0, ans=0.0 +2024-08-03 12:19:36,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=113938.0, ans=0.125 +2024-08-03 12:19:55,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=114011.33333333333, ans=0.2 +2024-08-03 12:19:59,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=114011.33333333333, ans=0.0 +2024-08-03 12:20:04,246 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.80 vs. limit=15.0 +2024-08-03 12:20:08,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.91 vs. limit=15.0 +2024-08-03 12:20:10,380 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.62 vs. limit=10.0 +2024-08-03 12:20:23,613 INFO [train.py:1114] (1/4) Epoch 9, batch 2000, loss[loss=0.1992, simple_loss=0.2773, pruned_loss=0.06059, over 13537.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3012, pruned_loss=0.07154, over 2637253.17 frames. ], batch size: 31, lr: 1.38e-02, grad_scale: 32.0 +2024-08-03 12:20:23,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=114121.33333333333, ans=0.125 +2024-08-03 12:20:44,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=114194.66666666667, ans=0.1 +2024-08-03 12:20:50,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=114194.66666666667, ans=0.125 +2024-08-03 12:21:02,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=114268.0, ans=0.125 +2024-08-03 12:21:05,457 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.389e+01 1.230e+02 1.563e+02 1.827e+02 3.181e+02, threshold=3.125e+02, percent-clipped=4.0 +2024-08-03 12:21:10,000 INFO [train.py:1114] (1/4) Epoch 9, batch 2050, loss[loss=0.2017, simple_loss=0.2719, pruned_loss=0.06572, over 13405.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.3002, pruned_loss=0.07133, over 2634261.43 frames. ], batch size: 32, lr: 1.38e-02, grad_scale: 32.0 +2024-08-03 12:21:11,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=114304.66666666667, ans=0.2 +2024-08-03 12:21:50,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=114451.33333333333, ans=0.125 +2024-08-03 12:21:51,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.69 vs. limit=15.0 +2024-08-03 12:21:57,221 INFO [train.py:1114] (1/4) Epoch 9, batch 2100, loss[loss=0.2324, simple_loss=0.309, pruned_loss=0.07788, over 13546.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2994, pruned_loss=0.07078, over 2639210.87 frames. ], batch size: 37, lr: 1.38e-02, grad_scale: 32.0 +2024-08-03 12:22:20,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=114561.33333333333, ans=0.1 +2024-08-03 12:22:25,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=114598.0, ans=0.1 +2024-08-03 12:22:26,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=114598.0, ans=0.125 +2024-08-03 12:22:43,302 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.73 vs. limit=15.0 +2024-08-03 12:22:49,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=114634.66666666667, ans=0.125 +2024-08-03 12:22:54,560 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.023e+02 1.194e+02 1.363e+02 1.768e+02 4.718e+02, threshold=2.726e+02, percent-clipped=3.0 +2024-08-03 12:22:55,241 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.57 vs. limit=15.0 +2024-08-03 12:23:07,973 INFO [train.py:1114] (1/4) Epoch 9, batch 2150, loss[loss=0.2148, simple_loss=0.294, pruned_loss=0.06781, over 13565.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2986, pruned_loss=0.07018, over 2647776.47 frames. ], batch size: 36, lr: 1.38e-02, grad_scale: 32.0 +2024-08-03 12:23:21,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=114671.33333333333, ans=0.125 +2024-08-03 12:23:28,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=114671.33333333333, ans=0.0 +2024-08-03 12:23:45,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114708.0, ans=0.1 +2024-08-03 12:23:54,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=114744.66666666667, ans=0.0 +2024-08-03 12:23:59,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=114744.66666666667, ans=0.125 +2024-08-03 12:24:18,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=114781.33333333333, ans=0.0 +2024-08-03 12:24:28,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=114781.33333333333, ans=0.2 +2024-08-03 12:24:44,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=114818.0, ans=10.0 +2024-08-03 12:24:59,207 INFO [train.py:1114] (1/4) Epoch 9, batch 2200, loss[loss=0.2242, simple_loss=0.3036, pruned_loss=0.07237, over 13418.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2983, pruned_loss=0.06986, over 2645605.84 frames. ], batch size: 39, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:25:05,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=114854.66666666667, ans=0.0 +2024-08-03 12:25:05,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.whiten.whitening_limit, batch_count=114854.66666666667, ans=12.0 +2024-08-03 12:25:34,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=114964.66666666667, ans=10.0 +2024-08-03 12:25:39,604 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.51 vs. limit=22.5 +2024-08-03 12:25:47,878 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.835e+01 1.281e+02 1.616e+02 2.381e+02 3.635e+02, threshold=3.231e+02, percent-clipped=12.0 +2024-08-03 12:25:52,558 INFO [train.py:1114] (1/4) Epoch 9, batch 2250, loss[loss=0.1959, simple_loss=0.2863, pruned_loss=0.05271, over 13351.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2979, pruned_loss=0.06957, over 2643229.72 frames. ], batch size: 37, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:25:58,165 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.99 vs. limit=10.0 +2024-08-03 12:26:20,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=115111.33333333333, ans=0.0 +2024-08-03 12:26:32,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=115184.66666666667, ans=0.125 +2024-08-03 12:26:43,635 INFO [train.py:1114] (1/4) Epoch 9, batch 2300, loss[loss=0.2096, simple_loss=0.2843, pruned_loss=0.06746, over 13572.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2972, pruned_loss=0.0695, over 2640352.92 frames. ], batch size: 33, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:27:09,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=115294.66666666667, ans=0.125 +2024-08-03 12:27:22,611 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.46 vs. limit=10.0 +2024-08-03 12:27:26,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=115368.0, ans=0.0 +2024-08-03 12:27:28,234 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.909e+01 1.211e+02 1.428e+02 1.712e+02 2.709e+02, threshold=2.855e+02, percent-clipped=0.0 +2024-08-03 12:27:28,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=115368.0, ans=0.0 +2024-08-03 12:27:29,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=115368.0, ans=0.5 +2024-08-03 12:27:32,875 INFO [train.py:1114] (1/4) Epoch 9, batch 2350, loss[loss=0.2122, simple_loss=0.3006, pruned_loss=0.06192, over 13551.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2973, pruned_loss=0.06933, over 2642167.40 frames. ], batch size: 38, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:27:53,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=115478.0, ans=0.125 +2024-08-03 12:27:59,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=115478.0, ans=0.0 +2024-08-03 12:28:00,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=115478.0, ans=0.025 +2024-08-03 12:28:18,440 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.21 vs. limit=15.0 +2024-08-03 12:28:21,708 INFO [train.py:1114] (1/4) Epoch 9, batch 2400, loss[loss=0.2159, simple_loss=0.2985, pruned_loss=0.06667, over 13537.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2989, pruned_loss=0.07048, over 2643035.45 frames. ], batch size: 35, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:28:38,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=115624.66666666667, ans=0.0 +2024-08-03 12:28:38,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=115624.66666666667, ans=0.125 +2024-08-03 12:28:56,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=115698.0, ans=0.125 +2024-08-03 12:29:02,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=115734.66666666667, ans=0.0 +2024-08-03 12:29:04,111 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.274e+02 1.512e+02 2.104e+02 3.890e+02, threshold=3.023e+02, percent-clipped=4.0 +2024-08-03 12:29:08,562 INFO [train.py:1114] (1/4) Epoch 9, batch 2450, loss[loss=0.2434, simple_loss=0.3212, pruned_loss=0.08277, over 13373.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3004, pruned_loss=0.07176, over 2632831.70 frames. ], batch size: 37, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:29:08,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=115771.33333333333, ans=0.125 +2024-08-03 12:29:17,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=115808.0, ans=0.2 +2024-08-03 12:29:28,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=115844.66666666667, ans=0.1 +2024-08-03 12:29:44,809 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.76 vs. limit=15.0 +2024-08-03 12:29:46,501 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=15.0 +2024-08-03 12:29:53,264 INFO [train.py:1114] (1/4) Epoch 9, batch 2500, loss[loss=0.2224, simple_loss=0.3022, pruned_loss=0.07133, over 13401.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.3001, pruned_loss=0.07133, over 2637175.33 frames. ], batch size: 39, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:30:00,367 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:30:24,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=116064.66666666667, ans=0.125 +2024-08-03 12:30:31,833 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.707e+01 1.175e+02 1.472e+02 1.847e+02 3.243e+02, threshold=2.944e+02, percent-clipped=1.0 +2024-08-03 12:30:36,224 INFO [train.py:1114] (1/4) Epoch 9, batch 2550, loss[loss=0.1899, simple_loss=0.2597, pruned_loss=0.06007, over 13512.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2996, pruned_loss=0.07088, over 2638674.96 frames. ], batch size: 31, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:30:43,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=116138.0, ans=0.1 +2024-08-03 12:31:10,194 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.87 vs. limit=12.0 +2024-08-03 12:31:10,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=116284.66666666667, ans=0.0 +2024-08-03 12:31:11,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=116284.66666666667, ans=0.2 +2024-08-03 12:31:20,891 INFO [train.py:1114] (1/4) Epoch 9, batch 2600, loss[loss=0.2242, simple_loss=0.3084, pruned_loss=0.07002, over 13552.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2993, pruned_loss=0.07045, over 2637203.04 frames. ], batch size: 36, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:31:25,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=116321.33333333333, ans=0.125 +2024-08-03 12:31:32,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=116358.0, ans=0.0 +2024-08-03 12:32:00,487 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.327e+01 1.173e+02 1.340e+02 1.707e+02 3.211e+02, threshold=2.680e+02, percent-clipped=1.0 +2024-08-03 12:32:03,944 INFO [train.py:1114] (1/4) Epoch 9, batch 2650, loss[loss=0.2494, simple_loss=0.328, pruned_loss=0.08537, over 13291.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.3, pruned_loss=0.07069, over 2639760.04 frames. ], batch size: 46, lr: 1.37e-02, grad_scale: 16.0 +2024-08-03 12:32:16,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=116541.33333333333, ans=0.125 +2024-08-03 12:32:18,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=116541.33333333333, ans=0.2 +2024-08-03 12:32:20,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=116578.0, ans=0.0 +2024-08-03 12:32:23,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=116578.0, ans=0.0 +2024-08-03 12:32:24,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=116578.0, ans=0.125 +2024-08-03 12:32:27,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=116578.0, ans=0.0 +2024-08-03 12:32:31,474 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.96 vs. limit=15.0 +2024-08-03 12:32:39,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff3.min_abs, batch_count=116651.33333333333, ans=0.2 +2024-08-03 12:32:39,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=116651.33333333333, ans=0.125 +2024-08-03 12:32:40,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=116651.33333333333, ans=0.2 +2024-08-03 12:32:41,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=116651.33333333333, ans=0.025 +2024-08-03 12:32:47,472 INFO [train.py:1114] (1/4) Epoch 9, batch 2700, loss[loss=0.2145, simple_loss=0.2987, pruned_loss=0.06509, over 13557.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.3016, pruned_loss=0.07178, over 2637122.42 frames. ], batch size: 40, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:32:48,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=116688.0, ans=0.125 +2024-08-03 12:32:50,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=116688.0, ans=0.0 +2024-08-03 12:32:59,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=116724.66666666667, ans=0.125 +2024-08-03 12:33:11,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=116761.33333333333, ans=0.125 +2024-08-03 12:33:20,400 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.94 vs. limit=12.0 +2024-08-03 12:33:24,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=116834.66666666667, ans=0.125 +2024-08-03 12:33:27,074 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.13 vs. limit=15.0 +2024-08-03 12:33:28,907 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.391e+01 1.337e+02 1.607e+02 2.047e+02 3.156e+02, threshold=3.214e+02, percent-clipped=11.0 +2024-08-03 12:33:32,425 INFO [train.py:1114] (1/4) Epoch 9, batch 2750, loss[loss=0.222, simple_loss=0.3005, pruned_loss=0.07173, over 13309.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3004, pruned_loss=0.07171, over 2634143.67 frames. ], batch size: 34, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:33:33,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=116871.33333333333, ans=0.1 +2024-08-03 12:33:39,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=116871.33333333333, ans=0.04949747468305833 +2024-08-03 12:33:45,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=116908.0, ans=0.0 +2024-08-03 12:33:46,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=116908.0, ans=0.125 +2024-08-03 12:33:47,878 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.94 vs. limit=6.0 +2024-08-03 12:33:51,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=116944.66666666667, ans=0.125 +2024-08-03 12:33:53,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=116944.66666666667, ans=0.125 +2024-08-03 12:33:58,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.07 vs. limit=15.0 +2024-08-03 12:33:58,302 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.67 vs. limit=22.5 +2024-08-03 12:34:12,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=117018.0, ans=0.125 +2024-08-03 12:34:16,673 INFO [train.py:1114] (1/4) Epoch 9, batch 2800, loss[loss=0.2943, simple_loss=0.3547, pruned_loss=0.117, over 9169.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3007, pruned_loss=0.0718, over 2625535.27 frames. ], batch size: 98, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:34:18,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=117054.66666666667, ans=0.125 +2024-08-03 12:34:21,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=117054.66666666667, ans=0.125 +2024-08-03 12:34:43,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=117164.66666666667, ans=0.025 +2024-08-03 12:34:45,876 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.21 vs. limit=15.0 +2024-08-03 12:34:47,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=117164.66666666667, ans=0.0 +2024-08-03 12:34:48,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117164.66666666667, ans=0.1 +2024-08-03 12:34:51,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=117164.66666666667, ans=10.0 +2024-08-03 12:34:53,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=117201.33333333333, ans=0.1 +2024-08-03 12:34:58,828 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.896e+01 1.288e+02 1.462e+02 1.862e+02 3.632e+02, threshold=2.925e+02, percent-clipped=2.0 +2024-08-03 12:35:00,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=117238.0, ans=0.2 +2024-08-03 12:35:01,470 INFO [train.py:1114] (1/4) Epoch 9, batch 2850, loss[loss=0.2339, simple_loss=0.31, pruned_loss=0.07892, over 13362.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.3014, pruned_loss=0.07239, over 2619898.21 frames. ], batch size: 35, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:35:01,925 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.19 vs. limit=15.0 +2024-08-03 12:35:22,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=117311.33333333333, ans=0.0 +2024-08-03 12:35:46,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=117311.33333333333, ans=0.0 +2024-08-03 12:35:47,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=117348.0, ans=0.0 +2024-08-03 12:35:48,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117348.0, ans=0.1 +2024-08-03 12:35:49,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=117348.0, ans=0.2 +2024-08-03 12:36:04,223 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.53 vs. limit=15.0 +2024-08-03 12:36:05,393 INFO [train.py:1114] (1/4) Epoch 9, batch 2900, loss[loss=0.2316, simple_loss=0.3055, pruned_loss=0.07883, over 13375.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3016, pruned_loss=0.07189, over 2631458.50 frames. ], batch size: 36, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:36:05,999 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.71 vs. limit=6.0 +2024-08-03 12:36:26,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=117494.66666666667, ans=0.125 +2024-08-03 12:36:45,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=117568.0, ans=0.125 +2024-08-03 12:36:47,217 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.425e+01 1.178e+02 1.305e+02 1.613e+02 2.693e+02, threshold=2.610e+02, percent-clipped=0.0 +2024-08-03 12:36:48,957 INFO [train.py:1114] (1/4) Epoch 9, batch 2950, loss[loss=0.2017, simple_loss=0.2879, pruned_loss=0.05778, over 13327.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.3007, pruned_loss=0.07198, over 2629316.38 frames. ], batch size: 34, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:37:15,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=117714.66666666667, ans=0.125 +2024-08-03 12:37:27,403 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:37:32,405 INFO [train.py:1114] (1/4) Epoch 9, batch 3000, loss[loss=0.1894, simple_loss=0.2779, pruned_loss=0.05049, over 13543.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2997, pruned_loss=0.0713, over 2629030.13 frames. ], batch size: 37, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:37:32,405 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 12:37:49,955 INFO [train.py:1146] (1/4) Epoch 9, validation: loss=0.1846, simple_loss=0.2849, pruned_loss=0.04217, over 944034.00 frames. +2024-08-03 12:37:49,956 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 12:38:03,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=117824.66666666667, ans=0.125 +2024-08-03 12:38:08,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117861.33333333333, ans=0.1 +2024-08-03 12:38:10,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=117861.33333333333, ans=0.2 +2024-08-03 12:38:13,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=117861.33333333333, ans=0.125 +2024-08-03 12:38:29,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=117934.66666666667, ans=0.125 +2024-08-03 12:38:31,057 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.539e+01 1.182e+02 1.340e+02 1.696e+02 3.056e+02, threshold=2.681e+02, percent-clipped=1.0 +2024-08-03 12:38:32,960 INFO [train.py:1114] (1/4) Epoch 9, batch 3050, loss[loss=0.1921, simple_loss=0.2727, pruned_loss=0.05573, over 13525.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2998, pruned_loss=0.07095, over 2625952.84 frames. ], batch size: 35, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:38:33,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=117971.33333333333, ans=0.0 +2024-08-03 12:38:48,018 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.39 vs. limit=22.5 +2024-08-03 12:38:49,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=118044.66666666667, ans=0.0 +2024-08-03 12:38:51,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=118044.66666666667, ans=0.0 +2024-08-03 12:38:56,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118044.66666666667, ans=0.1 +2024-08-03 12:39:10,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=118118.0, ans=0.125 +2024-08-03 12:39:10,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=118118.0, ans=0.2 +2024-08-03 12:39:14,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=118118.0, ans=0.125 +2024-08-03 12:39:17,093 INFO [train.py:1114] (1/4) Epoch 9, batch 3100, loss[loss=0.2504, simple_loss=0.3275, pruned_loss=0.08668, over 13301.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2999, pruned_loss=0.0709, over 2625800.01 frames. ], batch size: 46, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:39:18,012 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:39:18,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=118154.66666666667, ans=0.0 +2024-08-03 12:39:21,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=118154.66666666667, ans=0.125 +2024-08-03 12:39:24,404 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.29 vs. limit=12.0 +2024-08-03 12:39:25,269 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.40 vs. limit=15.0 +2024-08-03 12:39:32,130 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.88 vs. limit=15.0 +2024-08-03 12:39:36,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=118228.0, ans=0.025 +2024-08-03 12:39:48,442 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.76 vs. limit=15.0 +2024-08-03 12:39:51,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=118301.33333333333, ans=0.025 +2024-08-03 12:39:53,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=118301.33333333333, ans=0.1 +2024-08-03 12:39:58,236 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.498e+01 1.162e+02 1.416e+02 1.749e+02 3.223e+02, threshold=2.833e+02, percent-clipped=4.0 +2024-08-03 12:40:00,002 INFO [train.py:1114] (1/4) Epoch 9, batch 3150, loss[loss=0.244, simple_loss=0.3169, pruned_loss=0.08561, over 13278.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3002, pruned_loss=0.07105, over 2627565.18 frames. ], batch size: 49, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:40:01,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=118338.0, ans=0.1 +2024-08-03 12:40:05,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=118338.0, ans=0.0 +2024-08-03 12:40:07,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=118338.0, ans=0.125 +2024-08-03 12:40:09,933 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.30 vs. limit=15.0 +2024-08-03 12:40:10,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=118374.66666666667, ans=0.125 +2024-08-03 12:40:18,584 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.21 vs. limit=15.0 +2024-08-03 12:42:04,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=118448.0, ans=0.025 +2024-08-03 12:42:23,245 INFO [train.py:1114] (1/4) Epoch 9, batch 3200, loss[loss=0.1954, simple_loss=0.2753, pruned_loss=0.05775, over 13537.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2994, pruned_loss=0.07076, over 2634032.20 frames. ], batch size: 37, lr: 1.35e-02, grad_scale: 16.0 +2024-08-03 12:42:24,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=118521.33333333333, ans=0.125 +2024-08-03 12:42:25,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=118521.33333333333, ans=0.0 +2024-08-03 12:42:26,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=118521.33333333333, ans=0.09899494936611666 +2024-08-03 12:42:28,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=118521.33333333333, ans=0.125 +2024-08-03 12:42:28,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=118521.33333333333, ans=0.025 +2024-08-03 12:42:30,341 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.33 vs. limit=15.0 +2024-08-03 12:43:01,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=118668.0, ans=0.125 +2024-08-03 12:43:06,443 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.961e+01 1.301e+02 1.590e+02 2.129e+02 3.021e+02, threshold=3.180e+02, percent-clipped=5.0 +2024-08-03 12:43:07,374 INFO [train.py:1114] (1/4) Epoch 9, batch 3250, loss[loss=0.2343, simple_loss=0.3205, pruned_loss=0.07409, over 13384.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2992, pruned_loss=0.07055, over 2638934.43 frames. ], batch size: 38, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:43:35,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=118814.66666666667, ans=0.125 +2024-08-03 12:43:36,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=118814.66666666667, ans=0.0 +2024-08-03 12:43:42,363 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.23 vs. limit=15.0 +2024-08-03 12:43:53,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=118851.33333333333, ans=0.0 +2024-08-03 12:43:55,819 INFO [train.py:1114] (1/4) Epoch 9, batch 3300, loss[loss=0.2465, simple_loss=0.328, pruned_loss=0.08255, over 12857.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2978, pruned_loss=0.06957, over 2641015.21 frames. ], batch size: 52, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:43:58,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=118888.0, ans=0.0 +2024-08-03 12:44:00,257 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.61 vs. limit=15.0 +2024-08-03 12:44:06,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=118888.0, ans=0.125 +2024-08-03 12:44:07,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=118888.0, ans=0.125 +2024-08-03 12:44:12,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118924.66666666667, ans=0.1 +2024-08-03 12:44:15,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=118924.66666666667, ans=0.0 +2024-08-03 12:44:32,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=118998.0, ans=0.0 +2024-08-03 12:44:43,220 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.376e+01 1.208e+02 1.436e+02 1.732e+02 3.249e+02, threshold=2.873e+02, percent-clipped=1.0 +2024-08-03 12:44:44,073 INFO [train.py:1114] (1/4) Epoch 9, batch 3350, loss[loss=0.2408, simple_loss=0.3195, pruned_loss=0.08107, over 13000.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2994, pruned_loss=0.07032, over 2629847.46 frames. ], batch size: 48, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:45:03,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=119144.66666666667, ans=0.125 +2024-08-03 12:45:06,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=119144.66666666667, ans=0.2 +2024-08-03 12:45:11,395 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.81 vs. limit=15.0 +2024-08-03 12:45:24,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=119218.0, ans=0.0 +2024-08-03 12:45:26,435 INFO [train.py:1114] (1/4) Epoch 9, batch 3400, loss[loss=0.1891, simple_loss=0.2609, pruned_loss=0.05863, over 13558.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2991, pruned_loss=0.07078, over 2625805.94 frames. ], batch size: 31, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:45:27,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=119254.66666666667, ans=0.0 +2024-08-03 12:45:41,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=119291.33333333333, ans=0.0 +2024-08-03 12:45:54,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=119364.66666666667, ans=0.2 +2024-08-03 12:46:04,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=119401.33333333333, ans=0.0 +2024-08-03 12:46:06,140 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.63 vs. limit=22.5 +2024-08-03 12:46:17,544 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.47 vs. limit=15.0 +2024-08-03 12:46:18,511 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.90 vs. limit=10.0 +2024-08-03 12:46:21,252 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.303e+01 1.200e+02 1.400e+02 1.853e+02 3.003e+02, threshold=2.800e+02, percent-clipped=1.0 +2024-08-03 12:46:22,086 INFO [train.py:1114] (1/4) Epoch 9, batch 3450, loss[loss=0.2461, simple_loss=0.3228, pruned_loss=0.0847, over 12871.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2993, pruned_loss=0.07086, over 2629309.15 frames. ], batch size: 52, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:46:23,856 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:46:37,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=119474.66666666667, ans=0.025 +2024-08-03 12:46:43,270 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.48 vs. limit=15.0 +2024-08-03 12:46:58,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=119584.66666666667, ans=0.0 +2024-08-03 12:46:59,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=119584.66666666667, ans=0.125 +2024-08-03 12:46:59,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=119584.66666666667, ans=0.0 +2024-08-03 12:47:06,101 INFO [train.py:1114] (1/4) Epoch 9, batch 3500, loss[loss=0.2115, simple_loss=0.2823, pruned_loss=0.07035, over 13521.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2992, pruned_loss=0.07116, over 2631094.86 frames. ], batch size: 34, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:47:31,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=119731.33333333333, ans=0.125 +2024-08-03 12:47:37,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=119731.33333333333, ans=0.125 +2024-08-03 12:47:42,889 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.11 vs. limit=22.5 +2024-08-03 12:47:44,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=119768.0, ans=0.1 +2024-08-03 12:47:51,577 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.983e+01 1.270e+02 1.503e+02 1.772e+02 3.260e+02, threshold=3.007e+02, percent-clipped=1.0 +2024-08-03 12:47:52,439 INFO [train.py:1114] (1/4) Epoch 9, batch 3550, loss[loss=0.2404, simple_loss=0.3288, pruned_loss=0.07602, over 12399.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3013, pruned_loss=0.07214, over 2629714.64 frames. ], batch size: 58, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:48:00,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=119841.33333333333, ans=0.125 +2024-08-03 12:48:03,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119841.33333333333, ans=0.1 +2024-08-03 12:48:07,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119841.33333333333, ans=0.1 +2024-08-03 12:48:13,842 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:48:15,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=119878.0, ans=0.125 +2024-08-03 12:48:36,419 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=19.05 vs. limit=15.0 +2024-08-03 12:48:37,779 INFO [train.py:1114] (1/4) Epoch 9, batch 3600, loss[loss=0.2803, simple_loss=0.3412, pruned_loss=0.1096, over 9413.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3067, pruned_loss=0.07726, over 2488238.23 frames. ], batch size: 96, lr: 1.35e-02, grad_scale: 16.0 +2024-08-03 12:48:41,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=119988.0, ans=0.125 +2024-08-03 12:48:58,188 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.23 vs. limit=6.0 +2024-08-03 12:50:09,320 INFO [train.py:1114] (1/4) Epoch 10, batch 0, loss[loss=0.2021, simple_loss=0.278, pruned_loss=0.06313, over 13339.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.278, pruned_loss=0.06313, over 13339.00 frames. ], batch size: 33, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:50:09,321 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 12:50:16,202 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.2099, 1.2865, 3.1396, 3.0262], device='cuda:1') +2024-08-03 12:50:19,383 INFO [train.py:1146] (1/4) Epoch 10, validation: loss=0.1895, simple_loss=0.2901, pruned_loss=0.04443, over 944034.00 frames. +2024-08-03 12:50:19,383 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 12:50:22,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120134.66666666667, ans=0.1 +2024-08-03 12:50:23,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=120134.66666666667, ans=0.125 +2024-08-03 12:50:27,506 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.231e+02 1.354e+02 1.561e+02 3.235e+02, threshold=2.709e+02, percent-clipped=1.0 +2024-08-03 12:50:34,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=120171.33333333333, ans=0.125 +2024-08-03 12:50:42,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=120208.0, ans=0.2 +2024-08-03 12:50:45,448 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.82 vs. limit=22.5 +2024-08-03 12:50:49,285 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.69 vs. limit=15.0 +2024-08-03 12:50:52,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=120244.66666666667, ans=0.0 +2024-08-03 12:51:00,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=120281.33333333333, ans=0.0 +2024-08-03 12:51:07,176 INFO [train.py:1114] (1/4) Epoch 10, batch 50, loss[loss=0.1868, simple_loss=0.2612, pruned_loss=0.05619, over 13421.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.3002, pruned_loss=0.07021, over 578371.70 frames. ], batch size: 32, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:51:09,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=120318.0, ans=0.09899494936611666 +2024-08-03 12:51:14,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=120318.0, ans=0.0 +2024-08-03 12:51:20,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=120354.66666666667, ans=0.035 +2024-08-03 12:51:20,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120354.66666666667, ans=0.1 +2024-08-03 12:51:20,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=120354.66666666667, ans=0.125 +2024-08-03 12:51:36,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=120428.0, ans=0.125 +2024-08-03 12:51:38,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=120428.0, ans=10.0 +2024-08-03 12:51:45,132 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.69 vs. limit=15.0 +2024-08-03 12:51:50,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=120464.66666666667, ans=0.2 +2024-08-03 12:51:54,515 INFO [train.py:1114] (1/4) Epoch 10, batch 100, loss[loss=0.1854, simple_loss=0.2651, pruned_loss=0.05286, over 13528.00 frames. ], tot_loss[loss=0.219, simple_loss=0.3001, pruned_loss=0.06893, over 1025781.22 frames. ], batch size: 35, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:52:01,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=120501.33333333333, ans=0.125 +2024-08-03 12:52:02,523 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.137e+01 1.185e+02 1.436e+02 1.784e+02 2.704e+02, threshold=2.871e+02, percent-clipped=0.0 +2024-08-03 12:52:03,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=120538.0, ans=0.0 +2024-08-03 12:52:09,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=120538.0, ans=0.025 +2024-08-03 12:52:37,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=120648.0, ans=0.2 +2024-08-03 12:52:39,341 INFO [train.py:1114] (1/4) Epoch 10, batch 150, loss[loss=0.2035, simple_loss=0.2734, pruned_loss=0.06681, over 13413.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2975, pruned_loss=0.06747, over 1386502.88 frames. ], batch size: 32, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:52:43,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=120684.66666666667, ans=0.125 +2024-08-03 12:52:46,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=120684.66666666667, ans=0.125 +2024-08-03 12:53:04,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=120758.0, ans=0.04949747468305833 +2024-08-03 12:53:07,895 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.65 vs. limit=15.0 +2024-08-03 12:53:09,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=120794.66666666667, ans=0.125 +2024-08-03 12:53:12,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=120794.66666666667, ans=0.125 +2024-08-03 12:53:24,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=120831.33333333333, ans=0.0 +2024-08-03 12:53:27,591 INFO [train.py:1114] (1/4) Epoch 10, batch 200, loss[loss=0.2119, simple_loss=0.3008, pruned_loss=0.06152, over 12787.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2963, pruned_loss=0.06769, over 1665335.67 frames. ], batch size: 59, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:53:33,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=120868.0, ans=0.025 +2024-08-03 12:53:35,554 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.851e+01 1.179e+02 1.388e+02 1.956e+02 3.362e+02, threshold=2.775e+02, percent-clipped=2.0 +2024-08-03 12:53:36,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=120904.66666666667, ans=0.125 +2024-08-03 12:53:44,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn1.whiten.whitening_limit, batch_count=120904.66666666667, ans=22.5 +2024-08-03 12:53:44,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=120941.33333333333, ans=0.125 +2024-08-03 12:53:52,329 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:53:56,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=120978.0, ans=0.0 +2024-08-03 12:54:15,686 INFO [train.py:1114] (1/4) Epoch 10, batch 250, loss[loss=0.2335, simple_loss=0.3156, pruned_loss=0.0757, over 13312.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.296, pruned_loss=0.06749, over 1884677.67 frames. ], batch size: 46, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:54:16,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=121051.33333333333, ans=0.025 +2024-08-03 12:54:18,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=121051.33333333333, ans=0.125 +2024-08-03 12:54:26,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=121088.0, ans=15.0 +2024-08-03 12:54:38,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=121124.66666666667, ans=0.2 +2024-08-03 12:54:51,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=121161.33333333333, ans=0.0 +2024-08-03 12:54:56,468 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.03 vs. limit=15.0 +2024-08-03 12:55:01,028 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.87 vs. limit=15.0 +2024-08-03 12:55:02,052 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.68 vs. limit=15.0 +2024-08-03 12:55:03,435 INFO [train.py:1114] (1/4) Epoch 10, batch 300, loss[loss=0.247, simple_loss=0.3274, pruned_loss=0.08335, over 13459.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2954, pruned_loss=0.06733, over 2051410.91 frames. ], batch size: 42, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:55:09,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=121234.66666666667, ans=0.125 +2024-08-03 12:55:11,648 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.594e+01 1.264e+02 1.454e+02 1.818e+02 3.044e+02, threshold=2.909e+02, percent-clipped=3.0 +2024-08-03 12:55:13,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=121271.33333333333, ans=0.125 +2024-08-03 12:55:15,226 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.02 vs. limit=10.0 +2024-08-03 12:55:23,647 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.58 vs. limit=10.0 +2024-08-03 12:55:31,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=121308.0, ans=0.0 +2024-08-03 12:55:34,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=121344.66666666667, ans=0.125 +2024-08-03 12:55:36,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=121344.66666666667, ans=0.125 +2024-08-03 12:55:43,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=121381.33333333333, ans=0.025 +2024-08-03 12:55:50,005 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.53 vs. limit=15.0 +2024-08-03 12:55:51,192 INFO [train.py:1114] (1/4) Epoch 10, batch 350, loss[loss=0.1895, simple_loss=0.2661, pruned_loss=0.05648, over 13577.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2977, pruned_loss=0.06859, over 2181983.08 frames. ], batch size: 33, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:55:53,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=121418.0, ans=0.125 +2024-08-03 12:55:58,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=121418.0, ans=0.1 +2024-08-03 12:56:05,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=121454.66666666667, ans=0.2 +2024-08-03 12:56:11,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121491.33333333333, ans=0.1 +2024-08-03 12:56:19,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.47 vs. limit=15.0 +2024-08-03 12:56:35,970 INFO [train.py:1114] (1/4) Epoch 10, batch 400, loss[loss=0.2058, simple_loss=0.2956, pruned_loss=0.05801, over 13356.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2974, pruned_loss=0.06851, over 2285940.70 frames. ], batch size: 37, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:56:46,294 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.011e+02 1.271e+02 1.420e+02 1.744e+02 2.813e+02, threshold=2.840e+02, percent-clipped=0.0 +2024-08-03 12:57:13,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=121748.0, ans=0.125 +2024-08-03 12:57:19,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=121748.0, ans=0.0 +2024-08-03 12:57:23,499 INFO [train.py:1114] (1/4) Epoch 10, batch 450, loss[loss=0.2216, simple_loss=0.304, pruned_loss=0.06959, over 13552.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2973, pruned_loss=0.06826, over 2359246.86 frames. ], batch size: 38, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:57:42,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=121858.0, ans=0.0 +2024-08-03 12:57:45,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=121858.0, ans=10.0 +2024-08-03 12:58:00,833 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.95 vs. limit=22.5 +2024-08-03 12:58:05,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=121931.33333333333, ans=0.0 +2024-08-03 12:58:12,705 INFO [train.py:1114] (1/4) Epoch 10, batch 500, loss[loss=0.222, simple_loss=0.307, pruned_loss=0.06848, over 13443.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2958, pruned_loss=0.06765, over 2425391.59 frames. ], batch size: 43, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:58:19,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=121968.0, ans=0.95 +2024-08-03 12:58:19,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121968.0, ans=0.1 +2024-08-03 12:58:20,907 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.605e+01 1.105e+02 1.306e+02 1.598e+02 3.062e+02, threshold=2.611e+02, percent-clipped=1.0 +2024-08-03 12:58:24,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=122004.66666666667, ans=0.125 +2024-08-03 12:58:32,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=122041.33333333333, ans=0.125 +2024-08-03 12:58:49,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=122114.66666666667, ans=0.0 +2024-08-03 12:58:49,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=122114.66666666667, ans=0.2 +2024-08-03 12:58:52,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=122114.66666666667, ans=0.0 +2024-08-03 12:59:00,199 INFO [train.py:1114] (1/4) Epoch 10, batch 550, loss[loss=0.2577, simple_loss=0.3304, pruned_loss=0.09249, over 13298.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2964, pruned_loss=0.06821, over 2468291.27 frames. ], batch size: 49, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:59:03,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.whiten.whitening_limit, batch_count=122151.33333333333, ans=12.0 +2024-08-03 12:59:14,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=122188.0, ans=0.125 +2024-08-03 12:59:15,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122188.0, ans=0.1 +2024-08-03 12:59:24,928 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:59:25,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=122224.66666666667, ans=0.0 +2024-08-03 12:59:37,852 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.37 vs. limit=22.5 +2024-08-03 12:59:45,499 INFO [train.py:1114] (1/4) Epoch 10, batch 600, loss[loss=0.2276, simple_loss=0.3076, pruned_loss=0.07386, over 13336.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2959, pruned_loss=0.06777, over 2506779.78 frames. ], batch size: 46, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:59:53,553 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.876e+01 1.160e+02 1.307e+02 1.564e+02 2.892e+02, threshold=2.615e+02, percent-clipped=4.0 +2024-08-03 13:00:01,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=122371.33333333333, ans=0.05 +2024-08-03 13:00:22,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=122444.66666666667, ans=0.125 +2024-08-03 13:00:23,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=122481.33333333333, ans=0.125 +2024-08-03 13:00:31,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=122481.33333333333, ans=0.025 +2024-08-03 13:00:33,101 INFO [train.py:1114] (1/4) Epoch 10, batch 650, loss[loss=0.1894, simple_loss=0.2814, pruned_loss=0.04872, over 13533.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2952, pruned_loss=0.06745, over 2542119.42 frames. ], batch size: 37, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 13:00:58,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=122591.33333333333, ans=0.125 +2024-08-03 13:00:59,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122628.0, ans=0.1 +2024-08-03 13:01:03,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=122628.0, ans=0.125 +2024-08-03 13:01:19,901 INFO [train.py:1114] (1/4) Epoch 10, batch 700, loss[loss=0.1998, simple_loss=0.2805, pruned_loss=0.05952, over 13517.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.295, pruned_loss=0.06709, over 2564753.15 frames. ], batch size: 35, lr: 1.27e-02, grad_scale: 16.0 +2024-08-03 13:01:21,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=122701.33333333333, ans=0.125 +2024-08-03 13:01:22,816 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:01:28,966 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.487e+01 1.261e+02 1.582e+02 2.111e+02 3.773e+02, threshold=3.165e+02, percent-clipped=11.0 +2024-08-03 13:01:37,834 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.57 vs. limit=15.0 +2024-08-03 13:01:38,389 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:01:58,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=122848.0, ans=0.1 +2024-08-03 13:02:00,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=122848.0, ans=0.0 +2024-08-03 13:02:06,739 INFO [train.py:1114] (1/4) Epoch 10, batch 750, loss[loss=0.2111, simple_loss=0.3001, pruned_loss=0.061, over 13361.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2947, pruned_loss=0.0668, over 2581250.06 frames. ], batch size: 37, lr: 1.27e-02, grad_scale: 16.0 +2024-08-03 13:02:15,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=122921.33333333333, ans=0.125 +2024-08-03 13:02:24,524 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.05 vs. limit=15.0 +2024-08-03 13:02:25,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=122958.0, ans=0.125 +2024-08-03 13:02:51,729 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.24 vs. limit=15.0 +2024-08-03 13:08:35,468 INFO [train.py:1114] (1/4) Epoch 10, batch 800, loss[loss=0.1938, simple_loss=0.2734, pruned_loss=0.05705, over 13342.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2947, pruned_loss=0.06676, over 2595229.43 frames. ], batch size: 33, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 13:09:00,772 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.986e+01 1.304e+02 1.516e+02 1.968e+02 2.999e+02, threshold=3.032e+02, percent-clipped=0.0 +2024-08-03 13:09:11,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=123104.66666666667, ans=0.125 +2024-08-03 13:09:22,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=123141.33333333333, ans=0.04949747468305833 +2024-08-03 13:09:32,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=123214.66666666667, ans=0.125 +2024-08-03 13:10:07,624 INFO [train.py:1114] (1/4) Epoch 10, batch 850, loss[loss=0.1825, simple_loss=0.2768, pruned_loss=0.04408, over 13315.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2943, pruned_loss=0.0666, over 2608267.85 frames. ], batch size: 40, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:10:16,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=123288.0, ans=0.125 +2024-08-03 13:10:24,335 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.97 vs. limit=15.0 +2024-08-03 13:10:24,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=123288.0, ans=0.125 +2024-08-03 13:10:36,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=123361.33333333333, ans=0.1 +2024-08-03 13:10:51,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=123398.0, ans=0.125 +2024-08-03 13:10:54,530 INFO [train.py:1114] (1/4) Epoch 10, batch 900, loss[loss=0.1917, simple_loss=0.2698, pruned_loss=0.05679, over 13352.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2947, pruned_loss=0.06674, over 2611715.62 frames. ], batch size: 33, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:10:56,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=123434.66666666667, ans=0.2 +2024-08-03 13:10:59,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=123434.66666666667, ans=0.0 +2024-08-03 13:11:00,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=123434.66666666667, ans=0.125 +2024-08-03 13:11:04,165 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.190e+01 1.156e+02 1.356e+02 1.629e+02 2.273e+02, threshold=2.713e+02, percent-clipped=0.0 +2024-08-03 13:11:21,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=123508.0, ans=0.125 +2024-08-03 13:11:28,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=123544.66666666667, ans=0.025 +2024-08-03 13:11:36,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=123581.33333333333, ans=0.0 +2024-08-03 13:11:41,263 INFO [train.py:1114] (1/4) Epoch 10, batch 950, loss[loss=0.1958, simple_loss=0.278, pruned_loss=0.05683, over 13550.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2947, pruned_loss=0.06689, over 2612211.08 frames. ], batch size: 34, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:11:46,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=123618.0, ans=0.125 +2024-08-03 13:12:11,466 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.43 vs. limit=6.0 +2024-08-03 13:12:26,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=123764.66666666667, ans=0.1 +2024-08-03 13:12:28,923 INFO [train.py:1114] (1/4) Epoch 10, batch 1000, loss[loss=0.2092, simple_loss=0.2847, pruned_loss=0.06689, over 13354.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2964, pruned_loss=0.06783, over 2611564.02 frames. ], batch size: 35, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:12:33,194 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:12:41,050 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.614e+01 1.218e+02 1.406e+02 1.942e+02 3.222e+02, threshold=2.813e+02, percent-clipped=3.0 +2024-08-03 13:12:42,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=123838.0, ans=0.0 +2024-08-03 13:12:43,567 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.28 vs. limit=15.0 +2024-08-03 13:12:47,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=123838.0, ans=0.125 +2024-08-03 13:12:58,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=123911.33333333333, ans=0.125 +2024-08-03 13:13:03,324 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.49 vs. limit=15.0 +2024-08-03 13:13:04,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=123911.33333333333, ans=0.2 +2024-08-03 13:13:09,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=123948.0, ans=0.07 +2024-08-03 13:13:16,730 INFO [train.py:1114] (1/4) Epoch 10, batch 1050, loss[loss=0.188, simple_loss=0.2787, pruned_loss=0.04866, over 13573.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2955, pruned_loss=0.0676, over 2616147.73 frames. ], batch size: 39, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:13:32,411 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.81 vs. limit=15.0 +2024-08-03 13:13:46,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=124094.66666666667, ans=0.025 +2024-08-03 13:13:54,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=124131.33333333333, ans=10.0 +2024-08-03 13:13:56,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=124131.33333333333, ans=0.125 +2024-08-03 13:14:05,009 INFO [train.py:1114] (1/4) Epoch 10, batch 1100, loss[loss=0.1963, simple_loss=0.2829, pruned_loss=0.05481, over 13558.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2949, pruned_loss=0.0671, over 2619893.94 frames. ], batch size: 36, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:14:14,709 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.380e+01 1.135e+02 1.251e+02 1.585e+02 3.709e+02, threshold=2.501e+02, percent-clipped=2.0 +2024-08-03 13:14:29,418 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.95 vs. limit=22.5 +2024-08-03 13:14:30,421 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.36 vs. limit=15.0 +2024-08-03 13:14:50,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=124351.33333333333, ans=0.025 +2024-08-03 13:14:51,686 INFO [train.py:1114] (1/4) Epoch 10, batch 1150, loss[loss=0.1988, simple_loss=0.2769, pruned_loss=0.06033, over 13556.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2947, pruned_loss=0.06716, over 2618756.59 frames. ], batch size: 36, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:14:52,248 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.44 vs. limit=15.0 +2024-08-03 13:14:52,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=124351.33333333333, ans=0.125 +2024-08-03 13:15:05,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=124388.0, ans=0.0 +2024-08-03 13:15:15,198 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.77 vs. limit=15.0 +2024-08-03 13:15:18,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=124461.33333333333, ans=0.025 +2024-08-03 13:15:38,761 INFO [train.py:1114] (1/4) Epoch 10, batch 1200, loss[loss=0.2097, simple_loss=0.2902, pruned_loss=0.06462, over 13574.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2955, pruned_loss=0.06725, over 2616632.91 frames. ], batch size: 39, lr: 1.26e-02, grad_scale: 32.0 +2024-08-03 13:15:39,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=124534.66666666667, ans=0.04949747468305833 +2024-08-03 13:15:45,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=124534.66666666667, ans=0.0 +2024-08-03 13:15:48,572 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.153e+02 1.332e+02 1.610e+02 2.864e+02, threshold=2.663e+02, percent-clipped=2.0 +2024-08-03 13:16:17,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=124681.33333333333, ans=0.125 +2024-08-03 13:16:18,972 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.87 vs. limit=15.0 +2024-08-03 13:16:20,767 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.72 vs. limit=15.0 +2024-08-03 13:16:25,768 INFO [train.py:1114] (1/4) Epoch 10, batch 1250, loss[loss=0.2117, simple_loss=0.2959, pruned_loss=0.06374, over 13433.00 frames. ], tot_loss[loss=0.214, simple_loss=0.295, pruned_loss=0.0665, over 2628055.97 frames. ], batch size: 42, lr: 1.26e-02, grad_scale: 32.0 +2024-08-03 13:16:25,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=124718.0, ans=0.0 +2024-08-03 13:16:41,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=124754.66666666667, ans=0.5 +2024-08-03 13:16:43,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=124791.33333333333, ans=0.0 +2024-08-03 13:16:45,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124791.33333333333, ans=0.1 +2024-08-03 13:16:54,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=124828.0, ans=0.0 +2024-08-03 13:17:11,299 INFO [train.py:1114] (1/4) Epoch 10, batch 1300, loss[loss=0.2339, simple_loss=0.322, pruned_loss=0.0729, over 12826.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.294, pruned_loss=0.06615, over 2630687.97 frames. ], batch size: 52, lr: 1.26e-02, grad_scale: 32.0 +2024-08-03 13:17:19,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124938.0, ans=0.1 +2024-08-03 13:17:21,173 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.141e+01 1.219e+02 1.549e+02 1.853e+02 2.795e+02, threshold=3.098e+02, percent-clipped=1.0 +2024-08-03 13:17:28,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=124938.0, ans=0.125 +2024-08-03 13:17:45,409 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.63 vs. limit=15.0 +2024-08-03 13:17:47,426 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.34 vs. limit=15.0 +2024-08-03 13:17:58,635 INFO [train.py:1114] (1/4) Epoch 10, batch 1350, loss[loss=0.1876, simple_loss=0.2713, pruned_loss=0.05199, over 13546.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.294, pruned_loss=0.0663, over 2638372.58 frames. ], batch size: 37, lr: 1.26e-02, grad_scale: 32.0 +2024-08-03 13:18:02,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=125084.66666666667, ans=0.1 +2024-08-03 13:18:12,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=125121.33333333333, ans=0.125 +2024-08-03 13:18:12,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=125121.33333333333, ans=0.125 +2024-08-03 13:18:18,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=125158.0, ans=0.125 +2024-08-03 13:18:20,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=125158.0, ans=0.125 +2024-08-03 13:18:26,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=125194.66666666667, ans=0.0 +2024-08-03 13:18:27,657 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.27 vs. limit=22.5 +2024-08-03 13:18:29,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=125194.66666666667, ans=0.125 +2024-08-03 13:18:29,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=125194.66666666667, ans=0.125 +2024-08-03 13:18:34,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=125194.66666666667, ans=0.07 +2024-08-03 13:18:38,374 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.76 vs. limit=22.5 +2024-08-03 13:18:45,222 INFO [train.py:1114] (1/4) Epoch 10, batch 1400, loss[loss=0.1836, simple_loss=0.2528, pruned_loss=0.05725, over 13252.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2938, pruned_loss=0.06645, over 2642327.89 frames. ], batch size: 31, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:18:45,776 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.48 vs. limit=15.0 +2024-08-03 13:18:48,384 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.15 vs. limit=22.5 +2024-08-03 13:18:49,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=125268.0, ans=0.0 +2024-08-03 13:18:54,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=125304.66666666667, ans=0.125 +2024-08-03 13:18:54,981 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.794e+01 1.134e+02 1.256e+02 1.534e+02 3.011e+02, threshold=2.513e+02, percent-clipped=0.0 +2024-08-03 13:18:55,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=125304.66666666667, ans=0.0 +2024-08-03 13:19:02,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=125341.33333333333, ans=0.0 +2024-08-03 13:19:02,839 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=12.69 vs. limit=15.0 +2024-08-03 13:19:19,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=125378.0, ans=0.07 +2024-08-03 13:19:21,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=125378.0, ans=0.125 +2024-08-03 13:19:27,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=125414.66666666667, ans=0.125 +2024-08-03 13:19:27,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=125414.66666666667, ans=0.125 +2024-08-03 13:19:28,823 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.08 vs. limit=15.0 +2024-08-03 13:19:31,965 INFO [train.py:1114] (1/4) Epoch 10, batch 1450, loss[loss=0.232, simple_loss=0.3212, pruned_loss=0.07133, over 13436.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.295, pruned_loss=0.06696, over 2641636.93 frames. ], batch size: 43, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:19:49,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=125488.0, ans=0.0 +2024-08-03 13:19:50,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=125488.0, ans=0.0 +2024-08-03 13:20:17,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=125598.0, ans=0.2 +2024-08-03 13:20:18,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=125634.66666666667, ans=0.0 +2024-08-03 13:20:19,154 INFO [train.py:1114] (1/4) Epoch 10, batch 1500, loss[loss=0.243, simple_loss=0.329, pruned_loss=0.07849, over 13402.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2954, pruned_loss=0.06725, over 2641188.97 frames. ], batch size: 39, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:20:29,214 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.867e+01 1.156e+02 1.320e+02 1.724e+02 3.764e+02, threshold=2.640e+02, percent-clipped=6.0 +2024-08-03 13:20:42,170 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:21:06,619 INFO [train.py:1114] (1/4) Epoch 10, batch 1550, loss[loss=0.2093, simple_loss=0.2929, pruned_loss=0.06286, over 13399.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2953, pruned_loss=0.06762, over 2630772.07 frames. ], batch size: 41, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:21:11,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=125818.0, ans=0.2 +2024-08-03 13:21:38,140 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.19 vs. limit=12.0 +2024-08-03 13:21:39,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=125928.0, ans=0.1 +2024-08-03 13:21:51,049 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.85 vs. limit=22.5 +2024-08-03 13:21:53,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126001.33333333333, ans=0.1 +2024-08-03 13:21:54,146 INFO [train.py:1114] (1/4) Epoch 10, batch 1600, loss[loss=0.2216, simple_loss=0.3089, pruned_loss=0.06717, over 13576.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2953, pruned_loss=0.06774, over 2623562.13 frames. ], batch size: 39, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:22:00,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=126001.33333333333, ans=0.2 +2024-08-03 13:22:04,501 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.527e+01 1.158e+02 1.419e+02 1.742e+02 3.880e+02, threshold=2.837e+02, percent-clipped=3.0 +2024-08-03 13:22:15,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=126074.66666666667, ans=0.125 +2024-08-03 13:22:22,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=126111.33333333333, ans=0.2 +2024-08-03 13:22:41,774 INFO [train.py:1114] (1/4) Epoch 10, batch 1650, loss[loss=0.2089, simple_loss=0.2962, pruned_loss=0.06081, over 13332.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2948, pruned_loss=0.06763, over 2621690.43 frames. ], batch size: 40, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:22:42,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=126184.66666666667, ans=0.025 +2024-08-03 13:22:44,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=126184.66666666667, ans=0.125 +2024-08-03 13:22:46,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=126184.66666666667, ans=0.0 +2024-08-03 13:22:50,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=126221.33333333333, ans=0.2 +2024-08-03 13:23:14,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=126294.66666666667, ans=0.125 +2024-08-03 13:23:29,223 INFO [train.py:1114] (1/4) Epoch 10, batch 1700, loss[loss=0.211, simple_loss=0.2922, pruned_loss=0.06486, over 13257.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2947, pruned_loss=0.06738, over 2629446.22 frames. ], batch size: 31, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:23:31,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=126368.0, ans=0.125 +2024-08-03 13:23:39,070 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.220e+01 1.201e+02 1.479e+02 1.994e+02 3.572e+02, threshold=2.957e+02, percent-clipped=7.0 +2024-08-03 13:23:40,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=126404.66666666667, ans=0.0 +2024-08-03 13:23:54,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=126441.33333333333, ans=0.04949747468305833 +2024-08-03 13:24:02,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=126478.0, ans=0.0 +2024-08-03 13:24:11,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=126514.66666666667, ans=0.125 +2024-08-03 13:24:14,863 INFO [train.py:1114] (1/4) Epoch 10, batch 1750, loss[loss=0.2173, simple_loss=0.2832, pruned_loss=0.07567, over 13529.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2947, pruned_loss=0.06762, over 2632956.03 frames. ], batch size: 31, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:24:29,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=126588.0, ans=0.125 +2024-08-03 13:24:35,518 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.32 vs. limit=15.0 +2024-08-03 13:25:01,907 INFO [train.py:1114] (1/4) Epoch 10, batch 1800, loss[loss=0.2141, simple_loss=0.2966, pruned_loss=0.06575, over 13548.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2952, pruned_loss=0.06796, over 2634488.43 frames. ], batch size: 38, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:25:07,765 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.32 vs. limit=22.5 +2024-08-03 13:25:09,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=126734.66666666667, ans=0.2 +2024-08-03 13:25:11,949 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.656e+01 1.185e+02 1.312e+02 1.554e+02 2.308e+02, threshold=2.624e+02, percent-clipped=0.0 +2024-08-03 13:25:21,375 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.10 vs. limit=15.0 +2024-08-03 13:25:31,195 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.56 vs. limit=15.0 +2024-08-03 13:25:46,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=126881.33333333333, ans=0.125 +2024-08-03 13:25:48,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=126918.0, ans=0.2 +2024-08-03 13:25:49,046 INFO [train.py:1114] (1/4) Epoch 10, batch 1850, loss[loss=0.2202, simple_loss=0.3046, pruned_loss=0.06787, over 13388.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2945, pruned_loss=0.06735, over 2637090.24 frames. ], batch size: 39, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:26:04,210 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.54 vs. limit=22.5 +2024-08-03 13:26:10,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=126991.33333333333, ans=0.0 +2024-08-03 13:26:23,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=127028.0, ans=0.125 +2024-08-03 13:26:28,137 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.01 vs. limit=12.0 +2024-08-03 13:26:36,794 INFO [train.py:1114] (1/4) Epoch 10, batch 1900, loss[loss=0.1973, simple_loss=0.2927, pruned_loss=0.05093, over 13330.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2951, pruned_loss=0.06758, over 2639623.41 frames. ], batch size: 40, lr: 1.25e-02, grad_scale: 16.0 +2024-08-03 13:26:39,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127101.33333333333, ans=0.1 +2024-08-03 13:26:46,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127138.0, ans=0.1 +2024-08-03 13:26:49,384 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.604e+01 1.255e+02 1.783e+02 2.547e+02 3.918e+02, threshold=3.565e+02, percent-clipped=23.0 +2024-08-03 13:26:58,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=127174.66666666667, ans=0.0 +2024-08-03 13:27:00,053 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.30 vs. limit=15.0 +2024-08-03 13:27:10,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=127211.33333333333, ans=0.125 +2024-08-03 13:27:24,285 INFO [train.py:1114] (1/4) Epoch 10, batch 1950, loss[loss=0.1974, simple_loss=0.2756, pruned_loss=0.05959, over 13571.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2959, pruned_loss=0.06758, over 2646055.82 frames. ], batch size: 36, lr: 1.24e-02, grad_scale: 16.0 +2024-08-03 13:27:27,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=127284.66666666667, ans=0.0 +2024-08-03 13:27:28,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=127284.66666666667, ans=0.125 +2024-08-03 13:27:44,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=127358.0, ans=0.125 +2024-08-03 13:27:59,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127394.66666666667, ans=0.1 +2024-08-03 13:28:04,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=127431.33333333333, ans=0.125 +2024-08-03 13:28:11,432 INFO [train.py:1114] (1/4) Epoch 10, batch 2000, loss[loss=0.1704, simple_loss=0.2465, pruned_loss=0.04719, over 13543.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2965, pruned_loss=0.06794, over 2635948.86 frames. ], batch size: 31, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:28:12,821 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.55 vs. limit=22.5 +2024-08-03 13:28:22,597 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.471e+01 1.146e+02 1.313e+02 1.617e+02 2.483e+02, threshold=2.626e+02, percent-clipped=0.0 +2024-08-03 13:28:22,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=127504.66666666667, ans=0.025 +2024-08-03 13:28:23,132 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.62 vs. limit=22.5 +2024-08-03 13:28:29,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=127541.33333333333, ans=0.0 +2024-08-03 13:28:32,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=127541.33333333333, ans=0.025 +2024-08-03 13:28:52,361 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.47 vs. limit=15.0 +2024-08-03 13:28:53,276 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.53 vs. limit=6.0 +2024-08-03 13:28:59,000 INFO [train.py:1114] (1/4) Epoch 10, batch 2050, loss[loss=0.1833, simple_loss=0.2604, pruned_loss=0.05307, over 13404.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2953, pruned_loss=0.0676, over 2632253.27 frames. ], batch size: 32, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:29:01,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=127651.33333333333, ans=0.1 +2024-08-03 13:29:05,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=127651.33333333333, ans=0.0 +2024-08-03 13:29:43,990 INFO [train.py:1114] (1/4) Epoch 10, batch 2100, loss[loss=0.2007, simple_loss=0.2878, pruned_loss=0.05677, over 13557.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2943, pruned_loss=0.06672, over 2637524.49 frames. ], batch size: 37, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:29:56,753 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.808e+01 1.235e+02 1.463e+02 1.746e+02 3.043e+02, threshold=2.927e+02, percent-clipped=3.0 +2024-08-03 13:29:57,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=127871.33333333333, ans=0.2 +2024-08-03 13:30:03,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=127908.0, ans=0.0 +2024-08-03 13:30:26,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=127981.33333333333, ans=0.0 +2024-08-03 13:30:26,322 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.56 vs. limit=15.0 +2024-08-03 13:30:33,150 INFO [train.py:1114] (1/4) Epoch 10, batch 2150, loss[loss=0.2019, simple_loss=0.2796, pruned_loss=0.06209, over 13570.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2931, pruned_loss=0.06608, over 2646826.41 frames. ], batch size: 36, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:30:35,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=128018.0, ans=0.125 +2024-08-03 13:30:43,612 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.42 vs. limit=15.0 +2024-08-03 13:30:51,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=128091.33333333333, ans=0.0 +2024-08-03 13:30:51,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=128091.33333333333, ans=6.0 +2024-08-03 13:30:56,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=128091.33333333333, ans=0.025 +2024-08-03 13:31:05,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=128128.0, ans=0.125 +2024-08-03 13:31:18,220 INFO [train.py:1114] (1/4) Epoch 10, batch 2200, loss[loss=0.1903, simple_loss=0.2856, pruned_loss=0.04747, over 13395.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2933, pruned_loss=0.06608, over 2645627.94 frames. ], batch size: 39, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:31:24,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=128201.33333333333, ans=0.125 +2024-08-03 13:31:28,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128238.0, ans=0.1 +2024-08-03 13:31:29,283 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.267e+02 1.540e+02 2.050e+02 4.140e+02, threshold=3.080e+02, percent-clipped=6.0 +2024-08-03 13:31:30,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=128238.0, ans=0.125 +2024-08-03 13:31:37,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=128274.66666666667, ans=0.125 +2024-08-03 13:32:05,477 INFO [train.py:1114] (1/4) Epoch 10, batch 2250, loss[loss=0.2118, simple_loss=0.3057, pruned_loss=0.05896, over 13358.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2938, pruned_loss=0.06637, over 2643493.32 frames. ], batch size: 37, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:32:13,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=128421.33333333333, ans=0.04949747468305833 +2024-08-03 13:32:28,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=128458.0, ans=0.125 +2024-08-03 13:32:30,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=128458.0, ans=0.125 +2024-08-03 13:32:47,724 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=8.88 vs. limit=12.0 +2024-08-03 13:32:49,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=128531.33333333333, ans=0.125 +2024-08-03 13:32:50,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=128531.33333333333, ans=0.025 +2024-08-03 13:32:52,563 INFO [train.py:1114] (1/4) Epoch 10, batch 2300, loss[loss=0.2042, simple_loss=0.2727, pruned_loss=0.06788, over 13595.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2927, pruned_loss=0.06619, over 2639336.92 frames. ], batch size: 33, lr: 1.24e-02, grad_scale: 16.0 +2024-08-03 13:32:59,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=128568.0, ans=0.2 +2024-08-03 13:33:04,014 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.537e+01 1.182e+02 1.366e+02 1.663e+02 2.762e+02, threshold=2.732e+02, percent-clipped=0.0 +2024-08-03 13:33:17,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=128641.33333333333, ans=0.2 +2024-08-03 13:33:25,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=128678.0, ans=0.0 +2024-08-03 13:33:29,521 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.08 vs. limit=5.0 +2024-08-03 13:33:30,211 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.26 vs. limit=10.0 +2024-08-03 13:33:32,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=128714.66666666667, ans=0.2 +2024-08-03 13:33:33,089 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.58 vs. limit=12.0 +2024-08-03 13:33:39,783 INFO [train.py:1114] (1/4) Epoch 10, batch 2350, loss[loss=0.1884, simple_loss=0.2789, pruned_loss=0.04891, over 13542.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2924, pruned_loss=0.06595, over 2642032.28 frames. ], batch size: 38, lr: 1.24e-02, grad_scale: 16.0 +2024-08-03 13:33:49,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=128751.33333333333, ans=0.0 +2024-08-03 13:34:13,059 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.89 vs. limit=15.0 +2024-08-03 13:34:14,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=128861.33333333333, ans=0.0 +2024-08-03 13:34:27,065 INFO [train.py:1114] (1/4) Epoch 10, batch 2400, loss[loss=0.182, simple_loss=0.2682, pruned_loss=0.04794, over 13531.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2927, pruned_loss=0.06598, over 2643580.82 frames. ], batch size: 35, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:34:28,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=128934.66666666667, ans=0.025 +2024-08-03 13:34:38,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=128971.33333333333, ans=0.125 +2024-08-03 13:34:39,019 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.606e+01 1.219e+02 1.512e+02 2.010e+02 3.572e+02, threshold=3.023e+02, percent-clipped=5.0 +2024-08-03 13:34:46,311 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.75 vs. limit=15.0 +2024-08-03 13:34:54,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=129044.66666666667, ans=0.125 +2024-08-03 13:35:02,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=129044.66666666667, ans=0.0 +2024-08-03 13:35:13,312 INFO [train.py:1114] (1/4) Epoch 10, batch 2450, loss[loss=0.2301, simple_loss=0.3144, pruned_loss=0.07295, over 13356.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2941, pruned_loss=0.06675, over 2632590.87 frames. ], batch size: 37, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:35:22,593 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.83 vs. limit=15.0 +2024-08-03 13:35:43,772 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.71 vs. limit=10.0 +2024-08-03 13:35:48,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=129228.0, ans=0.125 +2024-08-03 13:35:53,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=129264.66666666667, ans=0.0 +2024-08-03 13:35:56,292 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.74 vs. limit=22.5 +2024-08-03 13:36:00,203 INFO [train.py:1114] (1/4) Epoch 10, batch 2500, loss[loss=0.2125, simple_loss=0.3042, pruned_loss=0.06034, over 13413.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2936, pruned_loss=0.06643, over 2637083.01 frames. ], batch size: 39, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:36:00,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=129301.33333333333, ans=0.0 +2024-08-03 13:36:11,366 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.968e+01 1.203e+02 1.318e+02 1.532e+02 2.282e+02, threshold=2.635e+02, percent-clipped=0.0 +2024-08-03 13:36:15,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=129338.0, ans=0.5 +2024-08-03 13:36:15,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=129338.0, ans=0.025 +2024-08-03 13:36:43,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129448.0, ans=0.1 +2024-08-03 13:36:45,871 INFO [train.py:1114] (1/4) Epoch 10, batch 2550, loss[loss=0.206, simple_loss=0.2723, pruned_loss=0.06983, over 13521.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2935, pruned_loss=0.06656, over 2638144.27 frames. ], batch size: 31, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:36:47,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=129484.66666666667, ans=0.0 +2024-08-03 13:36:50,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=129484.66666666667, ans=0.0 +2024-08-03 13:37:02,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=129558.0, ans=0.125 +2024-08-03 13:37:05,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=129558.0, ans=0.025 +2024-08-03 13:37:23,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=129631.33333333333, ans=0.05 +2024-08-03 13:37:24,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=129631.33333333333, ans=0.125 +2024-08-03 13:37:29,295 INFO [train.py:1114] (1/4) Epoch 10, batch 2600, loss[loss=0.2125, simple_loss=0.2948, pruned_loss=0.06507, over 13575.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2947, pruned_loss=0.06726, over 2637990.09 frames. ], batch size: 36, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:37:30,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=129668.0, ans=0.125 +2024-08-03 13:37:40,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=129704.66666666667, ans=0.125 +2024-08-03 13:37:40,627 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.809e+01 1.167e+02 1.488e+02 1.878e+02 3.119e+02, threshold=2.976e+02, percent-clipped=4.0 +2024-08-03 13:37:44,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=129704.66666666667, ans=0.125 +2024-08-03 13:37:46,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=129741.33333333333, ans=0.07 +2024-08-03 13:37:49,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=129741.33333333333, ans=0.1 +2024-08-03 13:37:59,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=129778.0, ans=0.125 +2024-08-03 13:38:08,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=129814.66666666667, ans=0.0 +2024-08-03 13:38:10,060 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.31 vs. limit=15.0 +2024-08-03 13:38:13,140 INFO [train.py:1114] (1/4) Epoch 10, batch 2650, loss[loss=0.2548, simple_loss=0.3307, pruned_loss=0.08949, over 13329.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2955, pruned_loss=0.0676, over 2640172.67 frames. ], batch size: 46, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:38:17,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=129851.33333333333, ans=0.0 +2024-08-03 13:38:20,426 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.07 vs. limit=22.5 +2024-08-03 13:38:26,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=129888.0, ans=0.125 +2024-08-03 13:38:27,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=129888.0, ans=0.0 +2024-08-03 13:38:28,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=129888.0, ans=0.125 +2024-08-03 13:38:41,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=129961.33333333333, ans=0.125 +2024-08-03 13:38:43,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=129961.33333333333, ans=0.125 +2024-08-03 13:38:48,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=129998.0, ans=0.09899494936611666 +2024-08-03 13:38:50,927 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.29 vs. limit=15.0 +2024-08-03 13:38:57,881 INFO [train.py:1114] (1/4) Epoch 10, batch 2700, loss[loss=0.1932, simple_loss=0.2826, pruned_loss=0.0519, over 13536.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2955, pruned_loss=0.06749, over 2636907.46 frames. ], batch size: 40, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:38:57,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=130034.66666666667, ans=0.125 +2024-08-03 13:39:00,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=130034.66666666667, ans=0.2 +2024-08-03 13:39:10,501 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.234e+02 1.365e+02 1.791e+02 3.628e+02, threshold=2.731e+02, percent-clipped=1.0 +2024-08-03 13:39:19,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=130108.0, ans=10.0 +2024-08-03 13:39:23,320 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.77 vs. limit=6.0 +2024-08-03 13:39:32,221 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.94 vs. limit=15.0 +2024-08-03 13:39:42,464 INFO [train.py:1114] (1/4) Epoch 10, batch 2750, loss[loss=0.2038, simple_loss=0.2813, pruned_loss=0.06317, over 13332.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2941, pruned_loss=0.06717, over 2633945.80 frames. ], batch size: 34, lr: 1.23e-02, grad_scale: 16.0 +2024-08-03 13:39:44,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=130218.0, ans=0.0 +2024-08-03 13:39:49,117 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.08 vs. limit=15.0 +2024-08-03 13:39:53,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=130254.66666666667, ans=0.09899494936611666 +2024-08-03 13:39:53,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130254.66666666667, ans=0.1 +2024-08-03 13:39:54,500 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.79 vs. limit=15.0 +2024-08-03 13:39:55,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130254.66666666667, ans=0.1 +2024-08-03 13:39:56,001 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.37 vs. limit=10.0 +2024-08-03 13:40:02,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=130291.33333333333, ans=0.0 +2024-08-03 13:40:18,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130364.66666666667, ans=0.1 +2024-08-03 13:40:26,208 INFO [train.py:1114] (1/4) Epoch 10, batch 2800, loss[loss=0.2568, simple_loss=0.3245, pruned_loss=0.09453, over 9670.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.294, pruned_loss=0.06694, over 2626610.96 frames. ], batch size: 97, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:40:33,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=130401.33333333333, ans=0.125 +2024-08-03 13:40:38,302 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.441e+01 1.179e+02 1.306e+02 1.650e+02 3.137e+02, threshold=2.611e+02, percent-clipped=1.0 +2024-08-03 13:40:41,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=130438.0, ans=0.125 +2024-08-03 13:40:43,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=130474.66666666667, ans=0.0 +2024-08-03 13:40:53,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=130511.33333333333, ans=0.0 +2024-08-03 13:40:54,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=130511.33333333333, ans=0.125 +2024-08-03 13:41:02,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=130548.0, ans=0.125 +2024-08-03 13:41:05,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=130548.0, ans=0.0 +2024-08-03 13:41:09,744 INFO [train.py:1114] (1/4) Epoch 10, batch 2850, loss[loss=0.1825, simple_loss=0.2686, pruned_loss=0.04814, over 13360.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2951, pruned_loss=0.06732, over 2621066.59 frames. ], batch size: 35, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:41:15,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=130584.66666666667, ans=0.125 +2024-08-03 13:41:18,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=130621.33333333333, ans=0.125 +2024-08-03 13:41:36,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=130694.66666666667, ans=0.125 +2024-08-03 13:41:40,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=130694.66666666667, ans=0.0 +2024-08-03 13:41:52,752 INFO [train.py:1114] (1/4) Epoch 10, batch 2900, loss[loss=0.1916, simple_loss=0.2753, pruned_loss=0.05395, over 13359.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2964, pruned_loss=0.06749, over 2632287.07 frames. ], batch size: 36, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:42:05,051 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.521e+01 1.253e+02 1.591e+02 2.001e+02 4.136e+02, threshold=3.182e+02, percent-clipped=6.0 +2024-08-03 13:42:16,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=130841.33333333333, ans=0.2 +2024-08-03 13:42:26,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=130878.0, ans=0.125 +2024-08-03 13:42:29,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=130914.66666666667, ans=0.0 +2024-08-03 13:42:31,462 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:42:36,429 INFO [train.py:1114] (1/4) Epoch 10, batch 2950, loss[loss=0.2022, simple_loss=0.2754, pruned_loss=0.06451, over 13347.00 frames. ], tot_loss[loss=0.215, simple_loss=0.295, pruned_loss=0.06754, over 2630491.11 frames. ], batch size: 34, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:42:55,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=130988.0, ans=0.2 +2024-08-03 13:42:55,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=130988.0, ans=0.125 +2024-08-03 13:43:28,870 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.29 vs. limit=15.0 +2024-08-03 13:43:37,098 INFO [train.py:1114] (1/4) Epoch 10, batch 3000, loss[loss=0.1857, simple_loss=0.273, pruned_loss=0.04924, over 13555.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2946, pruned_loss=0.06748, over 2629786.33 frames. ], batch size: 37, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:43:37,099 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 13:43:49,246 INFO [train.py:1146] (1/4) Epoch 10, validation: loss=0.1798, simple_loss=0.2807, pruned_loss=0.03945, over 944034.00 frames. +2024-08-03 13:43:49,248 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 13:43:53,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=131134.66666666666, ans=0.1 +2024-08-03 13:44:02,239 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.869e+01 1.161e+02 1.330e+02 1.562e+02 2.944e+02, threshold=2.661e+02, percent-clipped=0.0 +2024-08-03 13:44:10,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=131208.0, ans=0.0 +2024-08-03 13:44:22,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=131244.66666666666, ans=0.0 +2024-08-03 13:44:38,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=131281.33333333334, ans=0.025 +2024-08-03 13:44:43,931 INFO [train.py:1114] (1/4) Epoch 10, batch 3050, loss[loss=0.1713, simple_loss=0.2643, pruned_loss=0.03918, over 13534.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.295, pruned_loss=0.06733, over 2627015.41 frames. ], batch size: 35, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:44:44,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131318.0, ans=0.1 +2024-08-03 13:44:50,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131318.0, ans=0.1 +2024-08-03 13:44:52,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=131318.0, ans=0.025 +2024-08-03 13:44:56,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131354.66666666666, ans=0.1 +2024-08-03 13:44:57,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=131354.66666666666, ans=0.125 +2024-08-03 13:45:01,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=131391.33333333334, ans=0.125 +2024-08-03 13:45:07,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=131391.33333333334, ans=0.0 +2024-08-03 13:45:22,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=131464.66666666666, ans=0.125 +2024-08-03 13:45:30,516 INFO [train.py:1114] (1/4) Epoch 10, batch 3100, loss[loss=0.2535, simple_loss=0.332, pruned_loss=0.08752, over 13328.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2951, pruned_loss=0.06718, over 2626747.96 frames. ], batch size: 46, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:45:43,066 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.582e+01 1.186e+02 1.379e+02 1.722e+02 2.702e+02, threshold=2.757e+02, percent-clipped=2.0 +2024-08-03 13:45:49,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=131574.66666666666, ans=0.1 +2024-08-03 13:45:50,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=131574.66666666666, ans=0.2 +2024-08-03 13:45:55,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=131611.33333333334, ans=0.125 +2024-08-03 13:46:24,095 INFO [train.py:1114] (1/4) Epoch 10, batch 3150, loss[loss=0.2191, simple_loss=0.2991, pruned_loss=0.0696, over 13012.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2948, pruned_loss=0.06676, over 2628719.19 frames. ], batch size: 48, lr: 1.22e-02, grad_scale: 32.0 +2024-08-03 13:46:33,160 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.41 vs. limit=15.0 +2024-08-03 13:46:40,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=131721.33333333334, ans=0.125 +2024-08-03 13:46:46,575 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.52 vs. limit=22.5 +2024-08-03 13:46:47,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=131758.0, ans=0.0 +2024-08-03 13:46:48,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131758.0, ans=0.1 +2024-08-03 13:47:10,945 INFO [train.py:1114] (1/4) Epoch 10, batch 3200, loss[loss=0.2131, simple_loss=0.2908, pruned_loss=0.06772, over 13542.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2938, pruned_loss=0.06612, over 2634958.35 frames. ], batch size: 37, lr: 1.22e-02, grad_scale: 32.0 +2024-08-03 13:47:22,874 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.160e+01 1.174e+02 1.311e+02 1.747e+02 3.069e+02, threshold=2.622e+02, percent-clipped=2.0 +2024-08-03 13:47:24,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=131904.66666666666, ans=0.0 +2024-08-03 13:47:26,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=131904.66666666666, ans=0.125 +2024-08-03 13:47:28,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131941.33333333334, ans=0.1 +2024-08-03 13:47:49,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=132014.66666666666, ans=0.025 +2024-08-03 13:47:55,718 INFO [train.py:1114] (1/4) Epoch 10, batch 3250, loss[loss=0.1814, simple_loss=0.2711, pruned_loss=0.04588, over 13419.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2939, pruned_loss=0.06572, over 2638551.93 frames. ], batch size: 38, lr: 1.22e-02, grad_scale: 32.0 +2024-08-03 13:47:56,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=132051.33333333334, ans=0.07 +2024-08-03 13:47:56,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=132051.33333333334, ans=0.0 +2024-08-03 13:47:59,669 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.90 vs. limit=15.0 +2024-08-03 13:48:06,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=132088.0, ans=0.1 +2024-08-03 13:48:16,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=132124.66666666666, ans=0.2 +2024-08-03 13:48:16,268 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:48:23,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=132161.33333333334, ans=0.1 +2024-08-03 13:48:31,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=132198.0, ans=0.2 +2024-08-03 13:48:37,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=132234.66666666666, ans=0.125 +2024-08-03 13:48:38,133 INFO [train.py:1114] (1/4) Epoch 10, batch 3300, loss[loss=0.2402, simple_loss=0.3159, pruned_loss=0.0822, over 12995.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2922, pruned_loss=0.06499, over 2640077.11 frames. ], batch size: 52, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:48:49,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=132271.33333333334, ans=0.2 +2024-08-03 13:48:50,933 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.252e+01 1.284e+02 1.634e+02 2.035e+02 3.075e+02, threshold=3.268e+02, percent-clipped=7.0 +2024-08-03 13:49:12,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=132381.33333333334, ans=0.1 +2024-08-03 13:49:14,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=132381.33333333334, ans=0.035 +2024-08-03 13:49:21,122 INFO [train.py:1114] (1/4) Epoch 10, batch 3350, loss[loss=0.2356, simple_loss=0.3199, pruned_loss=0.07568, over 13322.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2935, pruned_loss=0.06599, over 2630305.95 frames. ], batch size: 49, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:49:24,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=132418.0, ans=0.0 +2024-08-03 13:49:26,606 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.39 vs. limit=15.0 +2024-08-03 13:49:28,538 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.67 vs. limit=22.5 +2024-08-03 13:49:29,158 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.58 vs. limit=22.5 +2024-08-03 13:49:30,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=132454.66666666666, ans=0.05 +2024-08-03 13:49:36,906 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.21 vs. limit=22.5 +2024-08-03 13:49:44,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=132491.33333333334, ans=0.0 +2024-08-03 13:49:58,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=132564.66666666666, ans=0.5 +2024-08-03 13:50:03,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=132601.33333333334, ans=0.125 +2024-08-03 13:50:03,845 INFO [train.py:1114] (1/4) Epoch 10, batch 3400, loss[loss=0.1863, simple_loss=0.2612, pruned_loss=0.05571, over 13545.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2938, pruned_loss=0.06631, over 2626289.62 frames. ], batch size: 31, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:50:08,569 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.97 vs. limit=15.0 +2024-08-03 13:50:16,690 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.511e+01 1.188e+02 1.377e+02 1.704e+02 3.995e+02, threshold=2.755e+02, percent-clipped=1.0 +2024-08-03 13:50:18,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=132638.0, ans=0.1 +2024-08-03 13:50:39,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=132748.0, ans=0.125 +2024-08-03 13:50:39,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132748.0, ans=0.1 +2024-08-03 13:50:47,220 INFO [train.py:1114] (1/4) Epoch 10, batch 3450, loss[loss=0.2375, simple_loss=0.32, pruned_loss=0.07752, over 12927.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2934, pruned_loss=0.06594, over 2629711.97 frames. ], batch size: 52, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:51:02,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=132821.33333333334, ans=0.125 +2024-08-03 13:51:16,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=132894.66666666666, ans=0.0 +2024-08-03 13:51:16,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=132894.66666666666, ans=0.0 +2024-08-03 13:51:26,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=132931.33333333334, ans=0.0 +2024-08-03 13:51:30,949 INFO [train.py:1114] (1/4) Epoch 10, batch 3500, loss[loss=0.1862, simple_loss=0.2672, pruned_loss=0.05255, over 13519.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2932, pruned_loss=0.06629, over 2630216.65 frames. ], batch size: 34, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:51:43,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=133004.66666666666, ans=0.125 +2024-08-03 13:51:43,631 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.579e+01 1.188e+02 1.502e+02 1.811e+02 2.689e+02, threshold=3.004e+02, percent-clipped=0.0 +2024-08-03 13:51:47,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=133041.33333333334, ans=0.125 +2024-08-03 13:52:00,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=133078.0, ans=0.0 +2024-08-03 13:52:08,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=133114.66666666666, ans=0.0 +2024-08-03 13:52:10,249 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.01 vs. limit=22.5 +2024-08-03 13:52:13,034 INFO [train.py:1114] (1/4) Epoch 10, batch 3550, loss[loss=0.2243, simple_loss=0.3059, pruned_loss=0.07139, over 12873.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2951, pruned_loss=0.06719, over 2629101.46 frames. ], batch size: 59, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:52:24,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=133188.0, ans=0.125 +2024-08-03 13:52:38,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=133224.66666666666, ans=0.07 +2024-08-03 13:52:40,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=133261.33333333334, ans=0.025 +2024-08-03 13:52:41,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=133261.33333333334, ans=0.07 +2024-08-03 13:52:46,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=133261.33333333334, ans=0.125 +2024-08-03 13:52:56,943 INFO [train.py:1114] (1/4) Epoch 10, batch 3600, loss[loss=0.2766, simple_loss=0.3366, pruned_loss=0.1083, over 8336.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3012, pruned_loss=0.0728, over 2488057.39 frames. ], batch size: 96, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:52:58,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=133334.66666666666, ans=0.0 +2024-08-03 13:53:10,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=133371.33333333334, ans=0.0 +2024-08-03 13:53:10,853 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.160e+02 1.272e+02 1.395e+02 1.858e+02, threshold=2.544e+02, percent-clipped=0.0 +2024-08-03 13:53:19,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=133408.0, ans=0.2 +2024-08-03 13:53:19,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=133408.0, ans=0.2 +2024-08-03 13:53:26,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=133444.66666666666, ans=0.0 +2024-08-03 13:55:05,757 INFO [train.py:1114] (1/4) Epoch 11, batch 0, loss[loss=0.2087, simple_loss=0.2879, pruned_loss=0.06475, over 13346.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2879, pruned_loss=0.06475, over 13346.00 frames. ], batch size: 33, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:55:05,758 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 13:55:17,610 INFO [train.py:1146] (1/4) Epoch 11, validation: loss=0.1876, simple_loss=0.2878, pruned_loss=0.04367, over 944034.00 frames. +2024-08-03 13:55:17,611 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 13:55:35,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=133554.66666666666, ans=0.0 +2024-08-03 13:55:42,665 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.39 vs. limit=12.0 +2024-08-03 13:55:44,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=133591.33333333334, ans=0.2 +2024-08-03 13:56:01,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=133628.0, ans=0.0 +2024-08-03 13:56:05,954 INFO [train.py:1114] (1/4) Epoch 11, batch 50, loss[loss=0.1837, simple_loss=0.2647, pruned_loss=0.05138, over 13424.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2981, pruned_loss=0.06899, over 577656.66 frames. ], batch size: 32, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:56:08,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=133664.66666666666, ans=0.125 +2024-08-03 13:56:13,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=133664.66666666666, ans=0.0 +2024-08-03 13:56:16,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=133701.33333333334, ans=0.125 +2024-08-03 13:56:18,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=133701.33333333334, ans=0.125 +2024-08-03 13:56:19,265 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.46 vs. limit=15.0 +2024-08-03 13:56:29,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=133738.0, ans=22.5 +2024-08-03 13:56:29,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=133738.0, ans=0.0 +2024-08-03 13:56:31,307 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.301e+01 1.198e+02 1.313e+02 1.584e+02 3.827e+02, threshold=2.627e+02, percent-clipped=3.0 +2024-08-03 13:56:32,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=133738.0, ans=0.125 +2024-08-03 13:56:41,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=133774.66666666666, ans=0.025 +2024-08-03 13:56:41,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=133774.66666666666, ans=0.2 +2024-08-03 13:56:53,152 INFO [train.py:1114] (1/4) Epoch 11, batch 100, loss[loss=0.1978, simple_loss=0.2777, pruned_loss=0.05891, over 13536.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2974, pruned_loss=0.06735, over 1025123.87 frames. ], batch size: 35, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:57:55,900 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.28 vs. limit=12.0 +2024-08-03 13:57:59,410 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.80 vs. limit=15.0 +2024-08-03 13:58:09,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=133994.66666666666, ans=0.125 +2024-08-03 13:58:10,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.04 vs. limit=15.0 +2024-08-03 13:58:13,268 INFO [train.py:1114] (1/4) Epoch 11, batch 150, loss[loss=0.1987, simple_loss=0.2742, pruned_loss=0.06159, over 13423.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2953, pruned_loss=0.06677, over 1386796.98 frames. ], batch size: 32, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:58:14,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.25 vs. limit=6.0 +2024-08-03 13:58:36,661 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.551e+01 1.130e+02 1.367e+02 1.649e+02 2.945e+02, threshold=2.733e+02, percent-clipped=2.0 +2024-08-03 13:58:45,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=134141.33333333334, ans=0.125 +2024-08-03 13:58:45,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134141.33333333334, ans=0.1 +2024-08-03 13:58:51,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=134178.0, ans=0.125 +2024-08-03 13:58:58,427 INFO [train.py:1114] (1/4) Epoch 11, batch 200, loss[loss=0.2336, simple_loss=0.316, pruned_loss=0.07559, over 12436.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2926, pruned_loss=0.06538, over 1665857.15 frames. ], batch size: 58, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:59:15,804 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.42 vs. limit=15.0 +2024-08-03 13:59:16,653 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.19 vs. limit=22.5 +2024-08-03 13:59:25,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=134288.0, ans=0.125 +2024-08-03 14:00:35,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134361.33333333334, ans=0.1 +2024-08-03 14:00:37,211 INFO [train.py:1114] (1/4) Epoch 11, batch 250, loss[loss=0.2017, simple_loss=0.2921, pruned_loss=0.0557, over 13339.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2921, pruned_loss=0.06481, over 1884219.79 frames. ], batch size: 46, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:00:48,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=134434.66666666666, ans=0.025 +2024-08-03 14:00:50,669 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.95 vs. limit=8.0 +2024-08-03 14:00:52,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=134434.66666666666, ans=0.125 +2024-08-03 14:00:53,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=134434.66666666666, ans=0.125 +2024-08-03 14:01:00,736 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.508e+01 1.185e+02 1.387e+02 1.656e+02 4.049e+02, threshold=2.774e+02, percent-clipped=1.0 +2024-08-03 14:01:18,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=134544.66666666666, ans=0.1 +2024-08-03 14:01:19,774 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:01:19,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=134544.66666666666, ans=0.125 +2024-08-03 14:01:20,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=134544.66666666666, ans=0.125 +2024-08-03 14:01:25,007 INFO [train.py:1114] (1/4) Epoch 11, batch 300, loss[loss=0.2029, simple_loss=0.2884, pruned_loss=0.05869, over 13465.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2913, pruned_loss=0.06469, over 2051213.69 frames. ], batch size: 42, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:01:26,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=134581.33333333334, ans=0.1 +2024-08-03 14:01:32,952 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.11 vs. limit=15.0 +2024-08-03 14:01:50,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=134654.66666666666, ans=0.2 +2024-08-03 14:01:55,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=134691.33333333334, ans=0.0 +2024-08-03 14:02:11,939 INFO [train.py:1114] (1/4) Epoch 11, batch 350, loss[loss=0.1968, simple_loss=0.2773, pruned_loss=0.05817, over 13562.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2914, pruned_loss=0.06471, over 2182156.99 frames. ], batch size: 33, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:02:24,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=134801.33333333334, ans=0.125 +2024-08-03 14:02:29,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=134801.33333333334, ans=0.0 +2024-08-03 14:02:32,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=134838.0, ans=0.2 +2024-08-03 14:02:37,146 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.915e+01 1.230e+02 1.440e+02 1.763e+02 3.166e+02, threshold=2.879e+02, percent-clipped=2.0 +2024-08-03 14:02:41,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=134874.66666666666, ans=0.125 +2024-08-03 14:02:46,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=134874.66666666666, ans=0.2 +2024-08-03 14:02:48,365 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-08-03 14:02:52,589 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:02:58,880 INFO [train.py:1114] (1/4) Epoch 11, batch 400, loss[loss=0.2214, simple_loss=0.3028, pruned_loss=0.07003, over 13357.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2912, pruned_loss=0.06462, over 2286316.74 frames. ], batch size: 37, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:02:59,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=134948.0, ans=0.0 +2024-08-03 14:03:12,757 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.83 vs. limit=15.0 +2024-08-03 14:03:14,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=134984.66666666666, ans=0.2 +2024-08-03 14:03:15,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=134984.66666666666, ans=0.125 +2024-08-03 14:03:21,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=135021.33333333334, ans=0.0 +2024-08-03 14:03:35,222 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.37 vs. limit=15.0 +2024-08-03 14:03:43,647 INFO [train.py:1114] (1/4) Epoch 11, batch 450, loss[loss=0.2181, simple_loss=0.3065, pruned_loss=0.06481, over 13549.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2913, pruned_loss=0.06431, over 2360012.04 frames. ], batch size: 38, lr: 1.15e-02, grad_scale: 32.0 +2024-08-03 14:03:53,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=135168.0, ans=0.125 +2024-08-03 14:04:02,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=135204.66666666666, ans=0.2 +2024-08-03 14:04:08,549 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.113e+02 1.253e+02 1.576e+02 3.089e+02, threshold=2.506e+02, percent-clipped=1.0 +2024-08-03 14:04:19,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=135241.33333333334, ans=0.035 +2024-08-03 14:04:30,225 INFO [train.py:1114] (1/4) Epoch 11, batch 500, loss[loss=0.2354, simple_loss=0.3115, pruned_loss=0.0797, over 13398.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.291, pruned_loss=0.06412, over 2426135.11 frames. ], batch size: 43, lr: 1.15e-02, grad_scale: 32.0 +2024-08-03 14:04:31,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=135314.66666666666, ans=0.125 +2024-08-03 14:04:37,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=135314.66666666666, ans=0.125 +2024-08-03 14:05:08,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=135461.33333333334, ans=0.0 +2024-08-03 14:05:11,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=135461.33333333334, ans=0.0 +2024-08-03 14:05:17,624 INFO [train.py:1114] (1/4) Epoch 11, batch 550, loss[loss=0.2458, simple_loss=0.3267, pruned_loss=0.08245, over 12995.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2914, pruned_loss=0.06445, over 2468131.39 frames. ], batch size: 48, lr: 1.15e-02, grad_scale: 32.0 +2024-08-03 14:05:19,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=135498.0, ans=0.125 +2024-08-03 14:05:24,520 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.99 vs. limit=15.0 +2024-08-03 14:05:44,377 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.929e+01 1.236e+02 1.527e+02 1.937e+02 2.923e+02, threshold=3.054e+02, percent-clipped=2.0 +2024-08-03 14:05:56,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135608.0, ans=0.1 +2024-08-03 14:06:06,065 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.87 vs. limit=15.0 +2024-08-03 14:06:07,518 INFO [train.py:1114] (1/4) Epoch 11, batch 600, loss[loss=0.2359, simple_loss=0.3085, pruned_loss=0.08159, over 13325.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2907, pruned_loss=0.064, over 2508568.97 frames. ], batch size: 46, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:06:13,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_na.min_abs, batch_count=135681.33333333334, ans=0.02 +2024-08-03 14:06:18,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=135718.0, ans=0.025 +2024-08-03 14:06:48,259 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.30 vs. limit=22.5 +2024-08-03 14:06:52,172 INFO [train.py:1114] (1/4) Epoch 11, batch 650, loss[loss=0.1871, simple_loss=0.2777, pruned_loss=0.04828, over 13538.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2891, pruned_loss=0.06298, over 2544181.87 frames. ], batch size: 37, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:06:52,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=135864.66666666666, ans=0.2 +2024-08-03 14:06:53,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=135864.66666666666, ans=15.0 +2024-08-03 14:06:54,618 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.81 vs. limit=22.5 +2024-08-03 14:07:00,423 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:07:04,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=135901.33333333334, ans=0.125 +2024-08-03 14:07:04,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=135901.33333333334, ans=0.125 +2024-08-03 14:07:05,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=135901.33333333334, ans=0.0 +2024-08-03 14:07:16,372 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.852e+01 1.178e+02 1.481e+02 2.104e+02 3.972e+02, threshold=2.962e+02, percent-clipped=10.0 +2024-08-03 14:07:34,606 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.45 vs. limit=6.0 +2024-08-03 14:07:39,638 INFO [train.py:1114] (1/4) Epoch 11, batch 700, loss[loss=0.2099, simple_loss=0.287, pruned_loss=0.06641, over 13523.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.29, pruned_loss=0.06337, over 2565481.64 frames. ], batch size: 35, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:07:45,439 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.53 vs. limit=22.5 +2024-08-03 14:08:01,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=136121.33333333334, ans=0.025 +2024-08-03 14:08:07,226 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.22 vs. limit=6.0 +2024-08-03 14:08:08,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=136158.0, ans=0.125 +2024-08-03 14:08:08,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=136158.0, ans=0.2 +2024-08-03 14:08:26,738 INFO [train.py:1114] (1/4) Epoch 11, batch 750, loss[loss=0.2205, simple_loss=0.304, pruned_loss=0.06848, over 13356.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2899, pruned_loss=0.06358, over 2583260.84 frames. ], batch size: 37, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:08:32,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=136231.33333333334, ans=0.125 +2024-08-03 14:08:40,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=136268.0, ans=15.0 +2024-08-03 14:08:43,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=136268.0, ans=0.125 +2024-08-03 14:08:51,159 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.398e+01 1.179e+02 1.348e+02 1.712e+02 2.826e+02, threshold=2.695e+02, percent-clipped=0.0 +2024-08-03 14:08:54,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=136341.33333333334, ans=0.0 +2024-08-03 14:08:55,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=136341.33333333334, ans=0.125 +2024-08-03 14:09:03,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=136378.0, ans=0.125 +2024-08-03 14:09:06,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=136378.0, ans=0.0 +2024-08-03 14:09:10,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=136378.0, ans=0.0 +2024-08-03 14:09:12,257 INFO [train.py:1114] (1/4) Epoch 11, batch 800, loss[loss=0.2008, simple_loss=0.2811, pruned_loss=0.0603, over 13330.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2899, pruned_loss=0.06368, over 2597106.43 frames. ], batch size: 33, lr: 1.15e-02, grad_scale: 32.0 +2024-08-03 14:10:01,227 INFO [train.py:1114] (1/4) Epoch 11, batch 850, loss[loss=0.2165, simple_loss=0.3034, pruned_loss=0.06481, over 13310.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2893, pruned_loss=0.06364, over 2609810.29 frames. ], batch size: 40, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:10:04,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=136598.0, ans=0.125 +2024-08-03 14:10:26,151 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.536e+01 1.181e+02 1.335e+02 1.644e+02 2.754e+02, threshold=2.669e+02, percent-clipped=1.0 +2024-08-03 14:10:37,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=136744.66666666666, ans=0.05 +2024-08-03 14:10:38,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=136744.66666666666, ans=0.2 +2024-08-03 14:10:40,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=136744.66666666666, ans=0.0 +2024-08-03 14:10:46,279 INFO [train.py:1114] (1/4) Epoch 11, batch 900, loss[loss=0.198, simple_loss=0.2786, pruned_loss=0.05865, over 13338.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2896, pruned_loss=0.06381, over 2612445.34 frames. ], batch size: 33, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:10:57,317 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:11:01,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=136818.0, ans=0.125 +2024-08-03 14:11:07,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=136854.66666666666, ans=0.2 +2024-08-03 14:11:13,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=136854.66666666666, ans=0.0 +2024-08-03 14:11:33,344 INFO [train.py:1114] (1/4) Epoch 11, batch 950, loss[loss=0.1832, simple_loss=0.2682, pruned_loss=0.04906, over 13552.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.29, pruned_loss=0.06389, over 2612251.17 frames. ], batch size: 34, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:11:36,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=136964.66666666666, ans=0.125 +2024-08-03 14:11:50,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=137038.0, ans=0.025 +2024-08-03 14:11:51,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.26 vs. limit=12.0 +2024-08-03 14:11:51,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=137038.0, ans=0.125 +2024-08-03 14:11:51,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=137038.0, ans=0.0 +2024-08-03 14:11:54,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=137038.0, ans=0.025 +2024-08-03 14:12:02,581 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.938e+01 1.284e+02 1.520e+02 1.871e+02 3.091e+02, threshold=3.040e+02, percent-clipped=3.0 +2024-08-03 14:12:20,788 INFO [train.py:1114] (1/4) Epoch 11, batch 1000, loss[loss=0.1877, simple_loss=0.2686, pruned_loss=0.05336, over 13376.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2905, pruned_loss=0.06457, over 2610875.02 frames. ], batch size: 35, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:12:21,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=137148.0, ans=0.0 +2024-08-03 14:12:27,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=137148.0, ans=0.0 +2024-08-03 14:12:32,852 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.67 vs. limit=15.0 +2024-08-03 14:12:40,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=137221.33333333334, ans=0.015 +2024-08-03 14:12:54,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=137258.0, ans=0.125 +2024-08-03 14:12:59,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=137294.66666666666, ans=0.2 +2024-08-03 14:13:04,399 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.80 vs. limit=15.0 +2024-08-03 14:13:05,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=137294.66666666666, ans=0.0 +2024-08-03 14:13:09,235 INFO [train.py:1114] (1/4) Epoch 11, batch 1050, loss[loss=0.2169, simple_loss=0.3057, pruned_loss=0.06406, over 13580.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2899, pruned_loss=0.06422, over 2615462.24 frames. ], batch size: 39, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:13:13,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=137331.33333333334, ans=0.125 +2024-08-03 14:13:17,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=137368.0, ans=0.0 +2024-08-03 14:13:36,245 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.744e+01 1.143e+02 1.275e+02 1.569e+02 2.169e+02, threshold=2.550e+02, percent-clipped=0.0 +2024-08-03 14:13:54,633 INFO [train.py:1114] (1/4) Epoch 11, batch 1100, loss[loss=0.2077, simple_loss=0.2923, pruned_loss=0.0616, over 13556.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2901, pruned_loss=0.06382, over 2619889.99 frames. ], batch size: 36, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:13:55,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=137514.66666666666, ans=0.2 +2024-08-03 14:14:39,729 INFO [train.py:1114] (1/4) Epoch 11, batch 1150, loss[loss=0.2158, simple_loss=0.2961, pruned_loss=0.06776, over 13567.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2902, pruned_loss=0.06387, over 2618871.89 frames. ], batch size: 36, lr: 1.14e-02, grad_scale: 8.0 +2024-08-03 14:15:03,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=137771.33333333334, ans=0.125 +2024-08-03 14:15:09,307 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.634e+01 1.163e+02 1.278e+02 1.596e+02 2.243e+02, threshold=2.555e+02, percent-clipped=0.0 +2024-08-03 14:15:09,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=137808.0, ans=0.125 +2024-08-03 14:15:11,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=137808.0, ans=0.125 +2024-08-03 14:15:16,996 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.21 vs. limit=22.5 +2024-08-03 14:15:25,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=137844.66666666666, ans=0.125 +2024-08-03 14:15:28,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=137844.66666666666, ans=0.125 +2024-08-03 14:15:29,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=137881.33333333334, ans=0.05 +2024-08-03 14:15:29,799 INFO [train.py:1114] (1/4) Epoch 11, batch 1200, loss[loss=0.2418, simple_loss=0.323, pruned_loss=0.08033, over 13584.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2914, pruned_loss=0.06452, over 2616587.10 frames. ], batch size: 39, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:15:31,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=137881.33333333334, ans=0.1 +2024-08-03 14:15:38,632 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.86 vs. limit=15.0 +2024-08-03 14:16:01,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=137991.33333333334, ans=0.125 +2024-08-03 14:16:05,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=138028.0, ans=0.125 +2024-08-03 14:16:08,980 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.21 vs. limit=15.0 +2024-08-03 14:16:12,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=138028.0, ans=0.1 +2024-08-03 14:16:14,554 INFO [train.py:1114] (1/4) Epoch 11, batch 1250, loss[loss=0.2088, simple_loss=0.3017, pruned_loss=0.05801, over 13416.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.292, pruned_loss=0.06432, over 2628440.17 frames. ], batch size: 42, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:16:28,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=138101.33333333334, ans=0.0 +2024-08-03 14:16:37,916 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.61 vs. limit=6.0 +2024-08-03 14:16:42,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=138138.0, ans=0.125 +2024-08-03 14:16:45,546 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.814e+01 1.117e+02 1.380e+02 1.651e+02 4.437e+02, threshold=2.760e+02, percent-clipped=2.0 +2024-08-03 14:16:45,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=138174.66666666666, ans=0.1 +2024-08-03 14:17:00,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=138211.33333333334, ans=0.125 +2024-08-03 14:17:03,498 INFO [train.py:1114] (1/4) Epoch 11, batch 1300, loss[loss=0.2169, simple_loss=0.2982, pruned_loss=0.06784, over 12838.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2916, pruned_loss=0.06423, over 2631294.29 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:17:10,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=138248.0, ans=0.0 +2024-08-03 14:17:15,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138284.66666666666, ans=0.1 +2024-08-03 14:17:34,027 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.12 vs. limit=15.0 +2024-08-03 14:17:38,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=138358.0, ans=0.07 +2024-08-03 14:17:48,827 INFO [train.py:1114] (1/4) Epoch 11, batch 1350, loss[loss=0.221, simple_loss=0.2992, pruned_loss=0.07136, over 13546.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2908, pruned_loss=0.06407, over 2638892.70 frames. ], batch size: 37, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:17:49,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff3.min_abs, batch_count=138431.33333333334, ans=0.2 +2024-08-03 14:17:51,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_na.min_abs, batch_count=138431.33333333334, ans=0.02 +2024-08-03 14:18:00,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=138468.0, ans=0.07 +2024-08-03 14:18:01,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=138468.0, ans=0.125 +2024-08-03 14:18:15,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=138541.33333333334, ans=0.05 +2024-08-03 14:18:15,978 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.205e+02 1.432e+02 1.737e+02 2.785e+02, threshold=2.864e+02, percent-clipped=2.0 +2024-08-03 14:18:20,084 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.43 vs. limit=22.5 +2024-08-03 14:18:28,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=138578.0, ans=0.025 +2024-08-03 14:18:36,197 INFO [train.py:1114] (1/4) Epoch 11, batch 1400, loss[loss=0.1881, simple_loss=0.2597, pruned_loss=0.05824, over 13247.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2908, pruned_loss=0.06407, over 2642593.09 frames. ], batch size: 31, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:18:48,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=138651.33333333334, ans=0.0 +2024-08-03 14:19:13,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=138761.33333333334, ans=0.125 +2024-08-03 14:19:16,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=138761.33333333334, ans=0.2 +2024-08-03 14:19:23,023 INFO [train.py:1114] (1/4) Epoch 11, batch 1450, loss[loss=0.2269, simple_loss=0.3107, pruned_loss=0.07158, over 13430.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.292, pruned_loss=0.0646, over 2641673.99 frames. ], batch size: 43, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:19:32,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=138834.66666666666, ans=0.125 +2024-08-03 14:19:49,931 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.222e+01 1.153e+02 1.341e+02 1.677e+02 2.779e+02, threshold=2.682e+02, percent-clipped=0.0 +2024-08-03 14:19:58,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=138908.0, ans=0.1 +2024-08-03 14:20:08,323 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.29 vs. limit=15.0 +2024-08-03 14:20:11,557 INFO [train.py:1114] (1/4) Epoch 11, batch 1500, loss[loss=0.226, simple_loss=0.3076, pruned_loss=0.0722, over 13396.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2915, pruned_loss=0.06415, over 2642049.26 frames. ], batch size: 39, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:20:28,617 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.89 vs. limit=22.5 +2024-08-03 14:20:32,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138981.33333333334, ans=0.1 +2024-08-03 14:20:35,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=139018.0, ans=0.125 +2024-08-03 14:20:39,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=139018.0, ans=0.125 +2024-08-03 14:20:47,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=139054.66666666666, ans=0.125 +2024-08-03 14:20:58,963 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.68 vs. limit=12.0 +2024-08-03 14:21:03,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=139128.0, ans=0.0 +2024-08-03 14:21:09,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=139128.0, ans=0.125 +2024-08-03 14:21:12,170 INFO [train.py:1114] (1/4) Epoch 11, batch 1550, loss[loss=0.2146, simple_loss=0.3021, pruned_loss=0.06354, over 13409.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.291, pruned_loss=0.06393, over 2631418.02 frames. ], batch size: 41, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:21:31,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=139238.0, ans=0.125 +2024-08-03 14:21:39,465 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.226e+01 1.184e+02 1.477e+02 1.893e+02 3.709e+02, threshold=2.955e+02, percent-clipped=6.0 +2024-08-03 14:21:49,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=139311.33333333334, ans=0.0 +2024-08-03 14:21:57,699 INFO [train.py:1114] (1/4) Epoch 11, batch 1600, loss[loss=0.2399, simple_loss=0.3192, pruned_loss=0.08026, over 13573.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2907, pruned_loss=0.06377, over 2623418.86 frames. ], batch size: 39, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:22:21,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=139421.33333333334, ans=0.125 +2024-08-03 14:22:23,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=139421.33333333334, ans=0.1 +2024-08-03 14:22:24,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=139421.33333333334, ans=0.025 +2024-08-03 14:22:29,544 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:22:34,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=139458.0, ans=0.025 +2024-08-03 14:22:37,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=139494.66666666666, ans=0.2 +2024-08-03 14:22:47,787 INFO [train.py:1114] (1/4) Epoch 11, batch 1650, loss[loss=0.2131, simple_loss=0.3041, pruned_loss=0.06105, over 13317.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2906, pruned_loss=0.06411, over 2620493.06 frames. ], batch size: 40, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:23:14,849 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.757e+01 1.241e+02 1.421e+02 1.904e+02 3.771e+02, threshold=2.842e+02, percent-clipped=2.0 +2024-08-03 14:23:17,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=139641.33333333334, ans=0.0 +2024-08-03 14:23:20,940 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.29 vs. limit=15.0 +2024-08-03 14:23:26,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=139678.0, ans=0.125 +2024-08-03 14:23:32,953 INFO [train.py:1114] (1/4) Epoch 11, batch 1700, loss[loss=0.1742, simple_loss=0.2474, pruned_loss=0.05052, over 13284.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2903, pruned_loss=0.06402, over 2629193.34 frames. ], batch size: 31, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:23:46,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=139751.33333333334, ans=0.125 +2024-08-03 14:24:16,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=139861.33333333334, ans=0.125 +2024-08-03 14:24:20,122 INFO [train.py:1114] (1/4) Epoch 11, batch 1750, loss[loss=0.1987, simple_loss=0.274, pruned_loss=0.06175, over 13548.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2901, pruned_loss=0.06398, over 2632973.88 frames. ], batch size: 31, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:24:33,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=139934.66666666666, ans=0.125 +2024-08-03 14:24:39,174 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.23 vs. limit=15.0 +2024-08-03 14:24:43,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=139971.33333333334, ans=0.125 +2024-08-03 14:24:47,662 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.098e+01 1.175e+02 1.427e+02 2.048e+02 3.147e+02, threshold=2.855e+02, percent-clipped=3.0 +2024-08-03 14:24:49,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=140008.0, ans=0.2 +2024-08-03 14:24:55,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=140008.0, ans=0.0 +2024-08-03 14:25:06,039 INFO [train.py:1114] (1/4) Epoch 11, batch 1800, loss[loss=0.211, simple_loss=0.3039, pruned_loss=0.05901, over 13550.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2897, pruned_loss=0.06349, over 2633895.07 frames. ], batch size: 38, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:25:30,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=140118.0, ans=0.125 +2024-08-03 14:25:48,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=140154.66666666666, ans=0.125 +2024-08-03 14:25:52,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=140191.33333333334, ans=0.125 +2024-08-03 14:25:59,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=140191.33333333334, ans=0.0 +2024-08-03 14:26:12,416 INFO [train.py:1114] (1/4) Epoch 11, batch 1850, loss[loss=0.2145, simple_loss=0.2927, pruned_loss=0.06816, over 13414.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2895, pruned_loss=0.06357, over 2636313.43 frames. ], batch size: 39, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:26:17,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=140264.66666666666, ans=0.2 +2024-08-03 14:26:18,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=140264.66666666666, ans=0.125 +2024-08-03 14:26:20,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=140301.33333333334, ans=0.125 +2024-08-03 14:26:22,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_na.min_abs, batch_count=140301.33333333334, ans=0.02 +2024-08-03 14:26:23,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=140301.33333333334, ans=0.05 +2024-08-03 14:26:25,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=140301.33333333334, ans=0.125 +2024-08-03 14:26:40,995 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.650e+01 1.194e+02 1.468e+02 2.041e+02 3.479e+02, threshold=2.936e+02, percent-clipped=2.0 +2024-08-03 14:26:53,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=140411.33333333334, ans=0.0 +2024-08-03 14:27:02,736 INFO [train.py:1114] (1/4) Epoch 11, batch 1900, loss[loss=0.2331, simple_loss=0.3196, pruned_loss=0.07326, over 13318.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2906, pruned_loss=0.06387, over 2639031.38 frames. ], batch size: 40, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:27:04,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.22 vs. limit=15.0 +2024-08-03 14:27:35,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=140558.0, ans=0.125 +2024-08-03 14:27:46,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=140594.66666666666, ans=0.07 +2024-08-03 14:27:49,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=140594.66666666666, ans=0.0 +2024-08-03 14:27:51,873 INFO [train.py:1114] (1/4) Epoch 11, batch 1950, loss[loss=0.2052, simple_loss=0.2823, pruned_loss=0.0641, over 13555.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2917, pruned_loss=0.06406, over 2646017.40 frames. ], batch size: 36, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:27:53,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=140631.33333333334, ans=0.025 +2024-08-03 14:27:54,202 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.30 vs. limit=12.0 +2024-08-03 14:27:54,321 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.84 vs. limit=15.0 +2024-08-03 14:27:57,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140631.33333333334, ans=0.1 +2024-08-03 14:28:16,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=140704.66666666666, ans=0.125 +2024-08-03 14:28:19,126 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.848e+01 1.152e+02 1.250e+02 1.577e+02 2.279e+02, threshold=2.500e+02, percent-clipped=0.0 +2024-08-03 14:28:21,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=140741.33333333334, ans=0.025 +2024-08-03 14:28:32,082 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.26 vs. limit=15.0 +2024-08-03 14:28:33,105 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.79 vs. limit=6.0 +2024-08-03 14:28:37,495 INFO [train.py:1114] (1/4) Epoch 11, batch 2000, loss[loss=0.1839, simple_loss=0.264, pruned_loss=0.05194, over 13554.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2917, pruned_loss=0.06414, over 2635886.38 frames. ], batch size: 31, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:29:00,069 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.67 vs. limit=15.0 +2024-08-03 14:29:04,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=140924.66666666666, ans=0.2 +2024-08-03 14:29:10,773 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.57 vs. limit=15.0 +2024-08-03 14:29:23,049 INFO [train.py:1114] (1/4) Epoch 11, batch 2050, loss[loss=0.1829, simple_loss=0.2552, pruned_loss=0.05526, over 13424.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2907, pruned_loss=0.06418, over 2632797.30 frames. ], batch size: 32, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:29:36,758 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.49 vs. limit=6.0 +2024-08-03 14:29:40,560 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.44 vs. limit=15.0 +2024-08-03 14:29:45,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=141071.33333333334, ans=0.0 +2024-08-03 14:29:46,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=141071.33333333334, ans=0.125 +2024-08-03 14:29:52,685 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.045e+01 1.175e+02 1.416e+02 1.784e+02 2.828e+02, threshold=2.832e+02, percent-clipped=4.0 +2024-08-03 14:30:02,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=141144.66666666666, ans=0.125 +2024-08-03 14:30:09,687 INFO [train.py:1114] (1/4) Epoch 11, batch 2100, loss[loss=0.2251, simple_loss=0.3007, pruned_loss=0.07472, over 13536.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2897, pruned_loss=0.0633, over 2638751.78 frames. ], batch size: 37, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:30:11,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten.whitening_limit, batch_count=141181.33333333334, ans=15.0 +2024-08-03 14:30:12,168 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.31 vs. limit=22.5 +2024-08-03 14:30:23,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=141218.0, ans=0.125 +2024-08-03 14:30:29,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=141254.66666666666, ans=0.2 +2024-08-03 14:30:33,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=141254.66666666666, ans=0.025 +2024-08-03 14:30:35,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=141254.66666666666, ans=0.125 +2024-08-03 14:30:47,895 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.37 vs. limit=6.0 +2024-08-03 14:30:56,298 INFO [train.py:1114] (1/4) Epoch 11, batch 2150, loss[loss=0.1909, simple_loss=0.2806, pruned_loss=0.05061, over 13558.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2885, pruned_loss=0.06234, over 2647399.17 frames. ], batch size: 36, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:31:07,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=141401.33333333334, ans=0.025 +2024-08-03 14:31:27,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=141474.66666666666, ans=0.025 +2024-08-03 14:31:27,805 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.279e+01 1.207e+02 1.412e+02 1.929e+02 3.002e+02, threshold=2.825e+02, percent-clipped=1.0 +2024-08-03 14:31:39,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=141511.33333333334, ans=0.0 +2024-08-03 14:31:39,761 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:31:45,119 INFO [train.py:1114] (1/4) Epoch 11, batch 2200, loss[loss=0.2145, simple_loss=0.2971, pruned_loss=0.06594, over 13384.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2886, pruned_loss=0.0625, over 2645217.82 frames. ], batch size: 39, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:31:52,259 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.36 vs. limit=10.0 +2024-08-03 14:31:55,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=141584.66666666666, ans=0.0 +2024-08-03 14:32:00,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=141584.66666666666, ans=0.2 +2024-08-03 14:32:08,103 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.58 vs. limit=15.0 +2024-08-03 14:32:11,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=141658.0, ans=0.125 +2024-08-03 14:32:13,835 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.72 vs. limit=15.0 +2024-08-03 14:32:26,666 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.56 vs. limit=15.0 +2024-08-03 14:32:30,720 INFO [train.py:1114] (1/4) Epoch 11, batch 2250, loss[loss=0.182, simple_loss=0.2728, pruned_loss=0.04563, over 13351.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2884, pruned_loss=0.06245, over 2643132.50 frames. ], batch size: 37, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:32:36,568 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.29 vs. limit=15.0 +2024-08-03 14:32:39,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=141768.0, ans=0.0 +2024-08-03 14:32:41,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=141768.0, ans=0.2 +2024-08-03 14:32:44,705 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.41 vs. limit=15.0 +2024-08-03 14:32:48,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=141804.66666666666, ans=0.0 +2024-08-03 14:32:51,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141804.66666666666, ans=0.1 +2024-08-03 14:32:58,698 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.445e+01 1.221e+02 1.460e+02 1.800e+02 3.358e+02, threshold=2.920e+02, percent-clipped=4.0 +2024-08-03 14:33:10,478 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.84 vs. limit=22.5 +2024-08-03 14:33:16,156 INFO [train.py:1114] (1/4) Epoch 11, batch 2300, loss[loss=0.1635, simple_loss=0.2441, pruned_loss=0.04143, over 13560.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2873, pruned_loss=0.06193, over 2638696.62 frames. ], batch size: 33, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:33:17,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=141914.66666666666, ans=0.1 +2024-08-03 14:33:48,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=142024.66666666666, ans=0.2 +2024-08-03 14:34:01,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=142061.33333333334, ans=0.0 +2024-08-03 14:34:03,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142061.33333333334, ans=0.1 +2024-08-03 14:34:05,405 INFO [train.py:1114] (1/4) Epoch 11, batch 2350, loss[loss=0.2193, simple_loss=0.2986, pruned_loss=0.07, over 13561.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2874, pruned_loss=0.06192, over 2641160.97 frames. ], batch size: 38, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:34:05,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142098.0, ans=0.1 +2024-08-03 14:34:06,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=142098.0, ans=0.125 +2024-08-03 14:34:14,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=142134.66666666666, ans=0.035 +2024-08-03 14:34:15,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=142134.66666666666, ans=0.2 +2024-08-03 14:34:27,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=142171.33333333334, ans=0.2 +2024-08-03 14:34:34,198 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.779e+01 1.150e+02 1.456e+02 1.792e+02 2.996e+02, threshold=2.912e+02, percent-clipped=1.0 +2024-08-03 14:34:37,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=142208.0, ans=0.125 +2024-08-03 14:34:40,994 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.55 vs. limit=10.0 +2024-08-03 14:34:51,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=142244.66666666666, ans=0.2 +2024-08-03 14:34:55,294 INFO [train.py:1114] (1/4) Epoch 11, batch 2400, loss[loss=0.1875, simple_loss=0.2764, pruned_loss=0.04932, over 13535.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2891, pruned_loss=0.06269, over 2642451.39 frames. ], batch size: 35, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:34:55,728 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.06 vs. limit=15.0 +2024-08-03 14:34:58,427 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.15 vs. limit=6.0 +2024-08-03 14:35:01,132 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.98 vs. limit=22.5 +2024-08-03 14:35:01,258 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.09 vs. limit=6.0 +2024-08-03 14:35:08,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142318.0, ans=0.1 +2024-08-03 14:35:14,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142354.66666666666, ans=0.1 +2024-08-03 14:35:15,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142354.66666666666, ans=0.1 +2024-08-03 14:35:23,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=142391.33333333334, ans=15.0 +2024-08-03 14:35:30,081 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.68 vs. limit=15.0 +2024-08-03 14:35:40,808 INFO [train.py:1114] (1/4) Epoch 11, batch 2450, loss[loss=0.1963, simple_loss=0.2835, pruned_loss=0.05459, over 13350.00 frames. ], tot_loss[loss=0.209, simple_loss=0.291, pruned_loss=0.06349, over 2633220.93 frames. ], batch size: 37, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:35:49,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=142501.33333333334, ans=0.125 +2024-08-03 14:36:08,680 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.765e+01 1.220e+02 1.473e+02 1.922e+02 3.559e+02, threshold=2.946e+02, percent-clipped=1.0 +2024-08-03 14:36:25,796 INFO [train.py:1114] (1/4) Epoch 11, batch 2500, loss[loss=0.2297, simple_loss=0.315, pruned_loss=0.07216, over 13424.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2906, pruned_loss=0.06327, over 2636871.57 frames. ], batch size: 39, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:36:30,276 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:36:33,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=142684.66666666666, ans=0.125 +2024-08-03 14:36:36,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=142684.66666666666, ans=0.1 +2024-08-03 14:36:39,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=142684.66666666666, ans=0.125 +2024-08-03 14:36:56,989 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.85 vs. limit=6.0 +2024-08-03 14:37:06,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=142794.66666666666, ans=0.125 +2024-08-03 14:37:07,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=142794.66666666666, ans=0.09899494936611666 +2024-08-03 14:37:09,695 INFO [train.py:1114] (1/4) Epoch 11, batch 2550, loss[loss=0.1639, simple_loss=0.242, pruned_loss=0.04285, over 13531.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2898, pruned_loss=0.06312, over 2638921.75 frames. ], batch size: 31, lr: 1.12e-02, grad_scale: 32.0 +2024-08-03 14:37:11,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=142831.33333333334, ans=0.125 +2024-08-03 14:37:27,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142904.66666666666, ans=0.1 +2024-08-03 14:37:28,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=142904.66666666666, ans=0.0 +2024-08-03 14:37:33,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142904.66666666666, ans=0.1 +2024-08-03 14:37:35,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=142941.33333333334, ans=0.125 +2024-08-03 14:37:35,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=142941.33333333334, ans=0.0 +2024-08-03 14:37:36,293 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.364e+01 1.185e+02 1.436e+02 1.900e+02 4.163e+02, threshold=2.872e+02, percent-clipped=5.0 +2024-08-03 14:37:36,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=142941.33333333334, ans=0.125 +2024-08-03 14:37:41,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=142941.33333333334, ans=0.125 +2024-08-03 14:37:52,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=142978.0, ans=0.2 +2024-08-03 14:37:54,711 INFO [train.py:1114] (1/4) Epoch 11, batch 2600, loss[loss=0.2087, simple_loss=0.2849, pruned_loss=0.06623, over 13560.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2898, pruned_loss=0.06304, over 2638648.21 frames. ], batch size: 36, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:37:56,041 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.53 vs. limit=12.0 +2024-08-03 14:37:57,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=143014.66666666666, ans=0.2 +2024-08-03 14:38:04,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=143051.33333333334, ans=0.07 +2024-08-03 14:38:16,136 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.63 vs. limit=6.0 +2024-08-03 14:38:28,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=143161.33333333334, ans=0.125 +2024-08-03 14:38:38,297 INFO [train.py:1114] (1/4) Epoch 11, batch 2650, loss[loss=0.2219, simple_loss=0.3031, pruned_loss=0.0703, over 13313.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2902, pruned_loss=0.06328, over 2640469.88 frames. ], batch size: 46, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:38:47,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=143234.66666666666, ans=0.0 +2024-08-03 14:38:59,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=143271.33333333334, ans=0.2 +2024-08-03 14:39:05,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=143308.0, ans=0.0 +2024-08-03 14:39:07,480 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.944e+01 1.197e+02 1.327e+02 1.649e+02 2.749e+02, threshold=2.654e+02, percent-clipped=0.0 +2024-08-03 14:39:08,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=143308.0, ans=0.125 +2024-08-03 14:39:21,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=143344.66666666666, ans=0.125 +2024-08-03 14:39:23,391 INFO [train.py:1114] (1/4) Epoch 11, batch 2700, loss[loss=0.2244, simple_loss=0.3053, pruned_loss=0.07173, over 13543.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2908, pruned_loss=0.06375, over 2637855.05 frames. ], batch size: 40, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:39:23,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=143381.33333333334, ans=0.0 +2024-08-03 14:39:26,268 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:39:27,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=143381.33333333334, ans=0.125 +2024-08-03 14:39:38,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=143418.0, ans=0.125 +2024-08-03 14:39:56,093 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.75 vs. limit=15.0 +2024-08-03 14:40:04,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=143528.0, ans=0.2 +2024-08-03 14:40:06,617 INFO [train.py:1114] (1/4) Epoch 11, batch 2750, loss[loss=0.2168, simple_loss=0.2912, pruned_loss=0.07123, over 13347.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.29, pruned_loss=0.06345, over 2635961.23 frames. ], batch size: 34, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:40:09,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=143564.66666666666, ans=0.125 +2024-08-03 14:40:16,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143601.33333333334, ans=0.1 +2024-08-03 14:40:19,042 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.60 vs. limit=22.5 +2024-08-03 14:40:26,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=143638.0, ans=0.125 +2024-08-03 14:40:27,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=143638.0, ans=0.025 +2024-08-03 14:40:34,880 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.875e+01 1.140e+02 1.438e+02 1.760e+02 3.626e+02, threshold=2.877e+02, percent-clipped=2.0 +2024-08-03 14:40:50,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=143748.0, ans=0.125 +2024-08-03 14:40:51,366 INFO [train.py:1114] (1/4) Epoch 11, batch 2800, loss[loss=0.2676, simple_loss=0.3265, pruned_loss=0.1044, over 9583.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2907, pruned_loss=0.06411, over 2627818.49 frames. ], batch size: 96, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:40:51,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=143748.0, ans=0.125 +2024-08-03 14:41:02,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=143784.66666666666, ans=0.0 +2024-08-03 14:41:08,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=143821.33333333334, ans=0.0 +2024-08-03 14:41:15,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=143821.33333333334, ans=0.125 +2024-08-03 14:41:25,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=143858.0, ans=0.0 +2024-08-03 14:41:33,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=143894.66666666666, ans=0.05 +2024-08-03 14:41:36,389 INFO [train.py:1114] (1/4) Epoch 11, batch 2850, loss[loss=0.1943, simple_loss=0.2709, pruned_loss=0.05883, over 13371.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2912, pruned_loss=0.06444, over 2621104.00 frames. ], batch size: 35, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:41:48,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=143968.0, ans=0.125 +2024-08-03 14:41:50,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=143968.0, ans=0.04949747468305833 +2024-08-03 14:42:04,511 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.280e+01 1.169e+02 1.350e+02 1.770e+02 2.759e+02, threshold=2.700e+02, percent-clipped=0.0 +2024-08-03 14:42:08,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=144041.33333333334, ans=0.1 +2024-08-03 14:42:17,784 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-08-03 14:42:19,096 INFO [train.py:1114] (1/4) Epoch 11, batch 2900, loss[loss=0.2198, simple_loss=0.2974, pruned_loss=0.07105, over 13373.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2921, pruned_loss=0.0642, over 2631987.17 frames. ], batch size: 36, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:42:23,978 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.11 vs. limit=15.0 +2024-08-03 14:42:29,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=144151.33333333334, ans=0.2 +2024-08-03 14:42:30,448 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.93 vs. limit=15.0 +2024-08-03 14:42:32,203 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.60 vs. limit=6.0 +2024-08-03 14:42:33,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=144151.33333333334, ans=0.015 +2024-08-03 14:42:43,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=144188.0, ans=0.015 +2024-08-03 14:42:51,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=144224.66666666666, ans=0.0 +2024-08-03 14:43:02,601 INFO [train.py:1114] (1/4) Epoch 11, batch 2950, loss[loss=0.1983, simple_loss=0.2793, pruned_loss=0.05862, over 13324.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2906, pruned_loss=0.06367, over 2630128.71 frames. ], batch size: 34, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:43:02,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=144298.0, ans=0.0 +2024-08-03 14:43:22,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=144371.33333333334, ans=0.1 +2024-08-03 14:43:27,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=144371.33333333334, ans=0.0 +2024-08-03 14:43:31,430 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.788e+01 1.243e+02 1.438e+02 2.009e+02 3.771e+02, threshold=2.877e+02, percent-clipped=8.0 +2024-08-03 14:43:33,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=144408.0, ans=0.125 +2024-08-03 14:43:46,216 INFO [train.py:1114] (1/4) Epoch 11, batch 3000, loss[loss=0.2008, simple_loss=0.2937, pruned_loss=0.05394, over 13530.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2898, pruned_loss=0.06359, over 2630173.31 frames. ], batch size: 37, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:43:46,217 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 14:43:51,706 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.2717, 3.4478, 3.9748, 3.6980], device='cuda:1') +2024-08-03 14:43:56,412 INFO [train.py:1146] (1/4) Epoch 11, validation: loss=0.1797, simple_loss=0.2796, pruned_loss=0.03992, over 944034.00 frames. +2024-08-03 14:43:56,413 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 14:44:19,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=144554.66666666666, ans=0.125 +2024-08-03 14:44:29,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=144591.33333333334, ans=0.125 +2024-08-03 14:44:39,728 INFO [train.py:1114] (1/4) Epoch 11, batch 3050, loss[loss=0.1862, simple_loss=0.2714, pruned_loss=0.0505, over 13514.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.291, pruned_loss=0.06377, over 2627271.10 frames. ], batch size: 35, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:44:43,390 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:44:44,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=144664.66666666666, ans=0.035 +2024-08-03 14:45:08,191 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.491e+01 1.118e+02 1.274e+02 1.524e+02 2.549e+02, threshold=2.548e+02, percent-clipped=0.0 +2024-08-03 14:45:11,083 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.36 vs. limit=6.0 +2024-08-03 14:45:14,692 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.05 vs. limit=15.0 +2024-08-03 14:45:20,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144811.33333333334, ans=0.1 +2024-08-03 14:45:22,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=144848.0, ans=0.125 +2024-08-03 14:45:22,752 INFO [train.py:1114] (1/4) Epoch 11, batch 3100, loss[loss=0.2164, simple_loss=0.3033, pruned_loss=0.06475, over 13303.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2899, pruned_loss=0.06316, over 2627437.15 frames. ], batch size: 46, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:45:22,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=144848.0, ans=0.1 +2024-08-03 14:45:25,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=144848.0, ans=0.125 +2024-08-03 14:45:32,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=144884.66666666666, ans=0.125 +2024-08-03 14:45:45,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=144921.33333333334, ans=0.09899494936611666 +2024-08-03 14:46:03,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=144994.66666666666, ans=0.125 +2024-08-03 14:46:05,162 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.65 vs. limit=6.0 +2024-08-03 14:46:07,302 INFO [train.py:1114] (1/4) Epoch 11, batch 3150, loss[loss=0.2378, simple_loss=0.3133, pruned_loss=0.08108, over 13053.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2899, pruned_loss=0.06312, over 2629084.25 frames. ], batch size: 48, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:46:13,937 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.28 vs. limit=15.0 +2024-08-03 14:46:16,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=145068.0, ans=0.125 +2024-08-03 14:46:17,745 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.71 vs. limit=6.0 +2024-08-03 14:46:28,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.75 vs. limit=22.5 +2024-08-03 14:46:31,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=145104.66666666666, ans=0.5 +2024-08-03 14:46:34,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=145141.33333333334, ans=0.0 +2024-08-03 14:46:36,199 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.294e+01 1.169e+02 1.363e+02 1.667e+02 3.402e+02, threshold=2.726e+02, percent-clipped=3.0 +2024-08-03 14:46:36,592 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.43 vs. limit=12.0 +2024-08-03 14:46:47,066 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.15 vs. limit=6.0 +2024-08-03 14:46:51,169 INFO [train.py:1114] (1/4) Epoch 11, batch 3200, loss[loss=0.2126, simple_loss=0.2964, pruned_loss=0.06438, over 13553.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.29, pruned_loss=0.06334, over 2635221.63 frames. ], batch size: 37, lr: 1.12e-02, grad_scale: 32.0 +2024-08-03 14:46:55,289 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.33 vs. limit=6.0 +2024-08-03 14:47:07,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=145251.33333333334, ans=0.125 +2024-08-03 14:47:08,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=145288.0, ans=0.125 +2024-08-03 14:47:35,717 INFO [train.py:1114] (1/4) Epoch 11, batch 3250, loss[loss=0.1943, simple_loss=0.2852, pruned_loss=0.05169, over 13397.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2903, pruned_loss=0.06343, over 2639678.86 frames. ], batch size: 38, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:47:38,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=145398.0, ans=0.0 +2024-08-03 14:47:43,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=145434.66666666666, ans=0.0 +2024-08-03 14:47:50,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=145434.66666666666, ans=0.125 +2024-08-03 14:47:55,667 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.15 vs. limit=15.0 +2024-08-03 14:48:02,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=145508.0, ans=0.125 +2024-08-03 14:48:04,043 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.409e+01 1.172e+02 1.394e+02 1.962e+02 3.481e+02, threshold=2.788e+02, percent-clipped=6.0 +2024-08-03 14:48:16,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=145544.66666666666, ans=0.0 +2024-08-03 14:48:18,819 INFO [train.py:1114] (1/4) Epoch 11, batch 3300, loss[loss=0.2144, simple_loss=0.3009, pruned_loss=0.0639, over 12896.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2891, pruned_loss=0.06305, over 2640164.20 frames. ], batch size: 52, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:48:19,537 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.76 vs. limit=15.0 +2024-08-03 14:48:25,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=145581.33333333334, ans=0.125 +2024-08-03 14:48:45,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=145691.33333333334, ans=10.0 +2024-08-03 14:48:47,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145691.33333333334, ans=0.1 +2024-08-03 14:49:07,250 INFO [train.py:1114] (1/4) Epoch 11, batch 3350, loss[loss=0.2286, simple_loss=0.3082, pruned_loss=0.07446, over 13039.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2901, pruned_loss=0.0636, over 2630119.22 frames. ], batch size: 48, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:49:13,946 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=2.96 vs. limit=12.0 +2024-08-03 14:49:14,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=145764.66666666666, ans=0.125 +2024-08-03 14:49:37,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=145838.0, ans=0.0 +2024-08-03 14:49:37,910 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.03 vs. limit=15.0 +2024-08-03 14:49:48,374 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.672e+01 1.163e+02 1.273e+02 1.475e+02 2.297e+02, threshold=2.547e+02, percent-clipped=0.0 +2024-08-03 14:50:42,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145911.33333333334, ans=0.1 +2024-08-03 14:50:46,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=145911.33333333334, ans=0.0 +2024-08-03 14:50:56,095 INFO [train.py:1114] (1/4) Epoch 11, batch 3400, loss[loss=0.1767, simple_loss=0.2589, pruned_loss=0.04723, over 13538.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2903, pruned_loss=0.06398, over 2625412.68 frames. ], batch size: 31, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:51:02,490 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.47 vs. limit=15.0 +2024-08-03 14:51:25,080 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.22 vs. limit=15.0 +2024-08-03 14:51:57,624 INFO [train.py:1114] (1/4) Epoch 11, batch 3450, loss[loss=0.2078, simple_loss=0.2923, pruned_loss=0.06166, over 12924.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2906, pruned_loss=0.06376, over 2628388.16 frames. ], batch size: 52, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:52:16,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=146204.66666666666, ans=0.0 +2024-08-03 14:52:19,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=146204.66666666666, ans=0.0 +2024-08-03 14:52:26,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=146241.33333333334, ans=0.1 +2024-08-03 14:52:27,371 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.89 vs. limit=10.0 +2024-08-03 14:52:28,425 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.083e+01 1.190e+02 1.396e+02 1.805e+02 2.896e+02, threshold=2.793e+02, percent-clipped=1.0 +2024-08-03 14:52:31,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=146241.33333333334, ans=0.125 +2024-08-03 14:52:32,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=146241.33333333334, ans=0.125 +2024-08-03 14:52:37,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=146278.0, ans=0.025 +2024-08-03 14:52:39,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=146278.0, ans=0.0 +2024-08-03 14:52:43,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=146314.66666666666, ans=0.125 +2024-08-03 14:52:44,384 INFO [train.py:1114] (1/4) Epoch 11, batch 3500, loss[loss=0.1982, simple_loss=0.2778, pruned_loss=0.05934, over 13528.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2895, pruned_loss=0.0633, over 2630622.19 frames. ], batch size: 34, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:52:49,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=146314.66666666666, ans=0.0 +2024-08-03 14:53:49,407 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.94 vs. limit=15.0 +2024-08-03 14:53:51,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=146424.66666666666, ans=0.0 +2024-08-03 14:54:04,723 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=8.26 vs. limit=15.0 +2024-08-03 14:54:41,872 INFO [train.py:1114] (1/4) Epoch 11, batch 3550, loss[loss=0.1974, simple_loss=0.2809, pruned_loss=0.05698, over 12502.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2922, pruned_loss=0.06462, over 2628615.35 frames. ], batch size: 58, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:54:48,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=146498.0, ans=0.125 +2024-08-03 14:55:08,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.06 vs. limit=22.5 +2024-08-03 14:55:11,613 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.699e+01 1.206e+02 1.319e+02 1.556e+02 2.603e+02, threshold=2.638e+02, percent-clipped=0.0 +2024-08-03 14:55:12,910 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=22.57 vs. limit=15.0 +2024-08-03 14:55:16,716 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=17.45 vs. limit=15.0 +2024-08-03 14:55:28,463 INFO [train.py:1114] (1/4) Epoch 11, batch 3600, loss[loss=0.2342, simple_loss=0.3049, pruned_loss=0.08179, over 9116.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2967, pruned_loss=0.06894, over 2489323.64 frames. ], batch size: 96, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:55:51,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=146754.66666666666, ans=0.2 +2024-08-03 14:55:54,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=146791.33333333334, ans=0.05 +2024-08-03 14:55:57,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=146791.33333333334, ans=0.125 +2024-08-03 14:56:58,474 INFO [train.py:1114] (1/4) Epoch 12, batch 0, loss[loss=0.2022, simple_loss=0.2766, pruned_loss=0.06388, over 13354.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2766, pruned_loss=0.06388, over 13354.00 frames. ], batch size: 33, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 14:56:58,535 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 14:57:05,347 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.3768, 2.6374, 2.4866, 1.8620], device='cuda:1') +2024-08-03 14:57:09,820 INFO [train.py:1146] (1/4) Epoch 12, validation: loss=0.1815, simple_loss=0.2827, pruned_loss=0.04015, over 944034.00 frames. +2024-08-03 14:57:09,821 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 14:57:11,321 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.25 vs. limit=15.0 +2024-08-03 14:57:23,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=146864.66666666666, ans=0.125 +2024-08-03 14:57:49,739 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.441e+01 1.252e+02 1.419e+02 1.568e+02 2.905e+02, threshold=2.838e+02, percent-clipped=2.0 +2024-08-03 14:57:54,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=147011.33333333334, ans=0.125 +2024-08-03 14:57:55,160 INFO [train.py:1114] (1/4) Epoch 12, batch 50, loss[loss=0.1941, simple_loss=0.2733, pruned_loss=0.05743, over 13435.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2914, pruned_loss=0.06378, over 579913.18 frames. ], batch size: 32, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 14:58:04,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=147048.0, ans=0.125 +2024-08-03 14:58:07,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=147048.0, ans=0.125 +2024-08-03 14:58:09,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=147048.0, ans=0.2 +2024-08-03 14:58:20,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=147084.66666666666, ans=0.125 +2024-08-03 14:58:21,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=147121.33333333334, ans=0.0 +2024-08-03 14:58:38,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=147158.0, ans=0.0 +2024-08-03 14:58:40,614 INFO [train.py:1114] (1/4) Epoch 12, batch 100, loss[loss=0.2124, simple_loss=0.2897, pruned_loss=0.06757, over 13530.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2924, pruned_loss=0.06317, over 1026969.90 frames. ], batch size: 35, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 14:58:41,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147194.66666666666, ans=0.1 +2024-08-03 14:58:44,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=147194.66666666666, ans=0.2 +2024-08-03 14:59:04,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=147268.0, ans=0.0 +2024-08-03 14:59:15,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=147304.66666666666, ans=0.125 +2024-08-03 14:59:17,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=147304.66666666666, ans=0.125 +2024-08-03 14:59:17,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=147304.66666666666, ans=0.125 +2024-08-03 14:59:22,268 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.865e+01 1.262e+02 1.549e+02 1.868e+02 3.478e+02, threshold=3.099e+02, percent-clipped=1.0 +2024-08-03 14:59:22,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=147341.33333333334, ans=0.2 +2024-08-03 14:59:27,469 INFO [train.py:1114] (1/4) Epoch 12, batch 150, loss[loss=0.1848, simple_loss=0.2595, pruned_loss=0.05505, over 13409.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2884, pruned_loss=0.0611, over 1387697.39 frames. ], batch size: 32, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 14:59:29,000 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.34 vs. limit=15.0 +2024-08-03 14:59:48,496 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:59:59,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=147488.0, ans=0.0 +2024-08-03 15:00:08,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=147524.66666666666, ans=0.0 +2024-08-03 15:00:10,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=147524.66666666666, ans=0.0 +2024-08-03 15:00:17,997 INFO [train.py:1114] (1/4) Epoch 12, batch 200, loss[loss=0.1906, simple_loss=0.279, pruned_loss=0.0511, over 12409.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2862, pruned_loss=0.06019, over 1665655.39 frames. ], batch size: 58, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:00:20,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=147561.33333333334, ans=0.125 +2024-08-03 15:00:47,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=147671.33333333334, ans=0.07 +2024-08-03 15:00:57,968 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.496e+01 1.132e+02 1.278e+02 1.609e+02 2.884e+02, threshold=2.557e+02, percent-clipped=0.0 +2024-08-03 15:00:59,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=147708.0, ans=0.125 +2024-08-03 15:01:02,538 INFO [train.py:1114] (1/4) Epoch 12, batch 250, loss[loss=0.2109, simple_loss=0.2927, pruned_loss=0.06454, over 13293.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2864, pruned_loss=0.06046, over 1884993.01 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:01:04,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=147744.66666666666, ans=0.0 +2024-08-03 15:01:05,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=147744.66666666666, ans=0.125 +2024-08-03 15:01:08,491 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.70 vs. limit=6.0 +2024-08-03 15:01:17,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=147781.33333333334, ans=0.125 +2024-08-03 15:01:28,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=147854.66666666666, ans=0.0 +2024-08-03 15:01:29,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=147854.66666666666, ans=0.0 +2024-08-03 15:01:34,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=147854.66666666666, ans=0.125 +2024-08-03 15:01:41,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=147891.33333333334, ans=0.07 +2024-08-03 15:01:48,776 INFO [train.py:1114] (1/4) Epoch 12, batch 300, loss[loss=0.2095, simple_loss=0.2959, pruned_loss=0.06158, over 13427.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2855, pruned_loss=0.06005, over 2052149.61 frames. ], batch size: 42, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:02:02,595 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:02:02,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=147964.66666666666, ans=0.2 +2024-08-03 15:02:18,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=148038.0, ans=0.0 +2024-08-03 15:02:24,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=148074.66666666666, ans=0.125 +2024-08-03 15:02:29,620 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.695e+01 1.130e+02 1.266e+02 1.669e+02 3.180e+02, threshold=2.531e+02, percent-clipped=2.0 +2024-08-03 15:02:29,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=148074.66666666666, ans=0.125 +2024-08-03 15:02:29,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=148074.66666666666, ans=0.125 +2024-08-03 15:02:32,610 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:02:34,181 INFO [train.py:1114] (1/4) Epoch 12, batch 350, loss[loss=0.1864, simple_loss=0.2642, pruned_loss=0.05431, over 13571.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2864, pruned_loss=0.06067, over 2183279.56 frames. ], batch size: 33, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:02:37,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=148111.33333333334, ans=10.0 +2024-08-03 15:02:38,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=148111.33333333334, ans=0.0 +2024-08-03 15:02:51,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=148148.0, ans=0.2 +2024-08-03 15:02:55,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.30 vs. limit=15.0 +2024-08-03 15:03:05,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=148221.33333333334, ans=0.2 +2024-08-03 15:03:07,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=148221.33333333334, ans=0.0 +2024-08-03 15:03:13,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=148221.33333333334, ans=0.125 +2024-08-03 15:03:16,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=148258.0, ans=0.0 +2024-08-03 15:03:23,952 INFO [train.py:1114] (1/4) Epoch 12, batch 400, loss[loss=0.2329, simple_loss=0.3151, pruned_loss=0.07535, over 13360.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2862, pruned_loss=0.06044, over 2287196.22 frames. ], batch size: 37, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 15:03:30,076 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.61 vs. limit=15.0 +2024-08-03 15:03:57,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=148404.66666666666, ans=0.0 +2024-08-03 15:04:10,396 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.063e+01 1.155e+02 1.349e+02 1.691e+02 2.771e+02, threshold=2.698e+02, percent-clipped=3.0 +2024-08-03 15:04:30,842 INFO [train.py:1114] (1/4) Epoch 12, batch 450, loss[loss=0.1962, simple_loss=0.2889, pruned_loss=0.05176, over 13548.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2853, pruned_loss=0.05992, over 2361478.59 frames. ], batch size: 38, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:04:33,877 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:04:41,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=148514.66666666666, ans=0.0 +2024-08-03 15:05:15,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=148661.33333333334, ans=0.1 +2024-08-03 15:05:16,104 INFO [train.py:1114] (1/4) Epoch 12, batch 500, loss[loss=0.1973, simple_loss=0.2883, pruned_loss=0.05312, over 13420.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.285, pruned_loss=0.06016, over 2426095.45 frames. ], batch size: 43, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:05:41,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=148734.66666666666, ans=0.125 +2024-08-03 15:05:43,877 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.33 vs. limit=15.0 +2024-08-03 15:05:59,421 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.251e+01 1.174e+02 1.357e+02 1.973e+02 3.338e+02, threshold=2.713e+02, percent-clipped=6.0 +2024-08-03 15:06:00,810 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.83 vs. limit=15.0 +2024-08-03 15:06:02,955 INFO [train.py:1114] (1/4) Epoch 12, batch 550, loss[loss=0.2392, simple_loss=0.3228, pruned_loss=0.07781, over 13020.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2851, pruned_loss=0.06021, over 2468196.18 frames. ], batch size: 48, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:06:03,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.22 vs. limit=6.0 +2024-08-03 15:06:14,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=148881.33333333334, ans=0.125 +2024-08-03 15:06:14,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=148881.33333333334, ans=0.0 +2024-08-03 15:06:23,196 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.01 vs. limit=15.0 +2024-08-03 15:06:30,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=148954.66666666666, ans=0.1 +2024-08-03 15:06:36,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=148954.66666666666, ans=0.0 +2024-08-03 15:06:45,109 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.56 vs. limit=6.0 +2024-08-03 15:06:50,049 INFO [train.py:1114] (1/4) Epoch 12, batch 600, loss[loss=0.2168, simple_loss=0.3022, pruned_loss=0.06566, over 13292.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2847, pruned_loss=0.06005, over 2508005.01 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:06:53,226 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.97 vs. limit=22.5 +2024-08-03 15:06:54,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=149028.0, ans=0.125 +2024-08-03 15:07:08,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=149064.66666666666, ans=0.125 +2024-08-03 15:07:10,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=149101.33333333334, ans=0.1 +2024-08-03 15:07:14,647 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.71 vs. limit=15.0 +2024-08-03 15:07:22,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=149138.0, ans=0.125 +2024-08-03 15:07:24,127 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:07:33,068 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.645e+01 1.231e+02 1.471e+02 1.906e+02 4.499e+02, threshold=2.942e+02, percent-clipped=14.0 +2024-08-03 15:07:33,316 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:07:35,233 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.76 vs. limit=15.0 +2024-08-03 15:07:36,525 INFO [train.py:1114] (1/4) Epoch 12, batch 650, loss[loss=0.1859, simple_loss=0.2779, pruned_loss=0.04691, over 13539.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2846, pruned_loss=0.0596, over 2542870.86 frames. ], batch size: 37, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:07:44,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=149211.33333333334, ans=0.0 +2024-08-03 15:07:44,484 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:07:46,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=149211.33333333334, ans=0.125 +2024-08-03 15:07:47,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=149248.0, ans=0.125 +2024-08-03 15:07:52,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149248.0, ans=0.1 +2024-08-03 15:07:59,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149284.66666666666, ans=0.1 +2024-08-03 15:08:21,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=149358.0, ans=0.04949747468305833 +2024-08-03 15:08:22,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=149358.0, ans=0.125 +2024-08-03 15:08:23,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=149358.0, ans=0.125 +2024-08-03 15:08:24,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=149358.0, ans=0.05 +2024-08-03 15:08:26,284 INFO [train.py:1114] (1/4) Epoch 12, batch 700, loss[loss=0.1952, simple_loss=0.2725, pruned_loss=0.05893, over 13547.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2848, pruned_loss=0.0596, over 2564567.41 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:08:43,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149468.0, ans=0.1 +2024-08-03 15:09:07,553 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.082e+01 1.154e+02 1.315e+02 1.690e+02 3.404e+02, threshold=2.630e+02, percent-clipped=2.0 +2024-08-03 15:09:11,177 INFO [train.py:1114] (1/4) Epoch 12, batch 750, loss[loss=0.1849, simple_loss=0.2735, pruned_loss=0.0482, over 13355.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2847, pruned_loss=0.05963, over 2581784.53 frames. ], batch size: 37, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:09:20,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=149614.66666666666, ans=0.0 +2024-08-03 15:09:23,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=149614.66666666666, ans=0.125 +2024-08-03 15:09:23,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=149614.66666666666, ans=0.125 +2024-08-03 15:09:25,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=149614.66666666666, ans=0.125 +2024-08-03 15:09:39,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=149688.0, ans=0.07 +2024-08-03 15:09:51,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=149724.66666666666, ans=0.125 +2024-08-03 15:09:56,674 INFO [train.py:1114] (1/4) Epoch 12, batch 800, loss[loss=0.2052, simple_loss=0.2843, pruned_loss=0.06305, over 13340.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2854, pruned_loss=0.05982, over 2595720.02 frames. ], batch size: 33, lr: 1.05e-02, grad_scale: 32.0 +2024-08-03 15:09:59,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=149761.33333333334, ans=0.2 +2024-08-03 15:10:02,581 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.16 vs. limit=22.5 +2024-08-03 15:10:04,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=149798.0, ans=0.125 +2024-08-03 15:10:15,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=149834.66666666666, ans=0.025 +2024-08-03 15:10:16,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=149834.66666666666, ans=0.125 +2024-08-03 15:10:21,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=149834.66666666666, ans=0.125 +2024-08-03 15:10:27,987 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.97 vs. limit=22.5 +2024-08-03 15:10:32,424 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.49 vs. limit=6.0 +2024-08-03 15:10:39,810 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.25 vs. limit=8.0 +2024-08-03 15:10:41,752 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.822e+01 1.144e+02 1.285e+02 1.607e+02 2.448e+02, threshold=2.570e+02, percent-clipped=0.0 +2024-08-03 15:10:42,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=149908.0, ans=0.2 +2024-08-03 15:10:45,392 INFO [train.py:1114] (1/4) Epoch 12, batch 850, loss[loss=0.2109, simple_loss=0.3009, pruned_loss=0.06045, over 13320.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2852, pruned_loss=0.05987, over 2608675.32 frames. ], batch size: 40, lr: 1.05e-02, grad_scale: 32.0 +2024-08-03 15:10:51,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=149944.66666666666, ans=0.0 +2024-08-03 15:10:54,599 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.30 vs. limit=15.0 +2024-08-03 15:11:01,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=149981.33333333334, ans=0.125 +2024-08-03 15:11:06,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=150018.0, ans=0.125 +2024-08-03 15:11:11,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=150054.66666666666, ans=0.07 +2024-08-03 15:11:26,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=150091.33333333334, ans=0.2 +2024-08-03 15:11:34,312 INFO [train.py:1114] (1/4) Epoch 12, batch 900, loss[loss=0.1875, simple_loss=0.2626, pruned_loss=0.05625, over 13363.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2857, pruned_loss=0.06015, over 2610735.37 frames. ], batch size: 33, lr: 1.05e-02, grad_scale: 32.0 +2024-08-03 15:11:38,503 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.43 vs. limit=12.0 +2024-08-03 15:11:57,299 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.94 vs. limit=12.0 +2024-08-03 15:12:15,737 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.906e+01 1.165e+02 1.316e+02 1.823e+02 3.379e+02, threshold=2.632e+02, percent-clipped=3.0 +2024-08-03 15:12:19,460 INFO [train.py:1114] (1/4) Epoch 12, batch 950, loss[loss=0.1804, simple_loss=0.2636, pruned_loss=0.04857, over 13515.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2865, pruned_loss=0.06036, over 2611495.49 frames. ], batch size: 34, lr: 1.05e-02, grad_scale: 32.0 +2024-08-03 15:12:23,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=150311.33333333334, ans=0.125 +2024-08-03 15:12:25,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150311.33333333334, ans=0.1 +2024-08-03 15:12:43,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=150384.66666666666, ans=10.0 +2024-08-03 15:12:48,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=150421.33333333334, ans=0.125 +2024-08-03 15:12:58,757 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.64 vs. limit=6.0 +2024-08-03 15:13:01,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=150458.0, ans=0.0 +2024-08-03 15:13:05,464 INFO [train.py:1114] (1/4) Epoch 12, batch 1000, loss[loss=0.1987, simple_loss=0.2858, pruned_loss=0.05577, over 13367.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2871, pruned_loss=0.06078, over 2610407.11 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:13:13,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=150494.66666666666, ans=0.125 +2024-08-03 15:13:13,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=150494.66666666666, ans=0.125 +2024-08-03 15:13:24,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=150568.0, ans=0.125 +2024-08-03 15:13:38,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=150604.66666666666, ans=0.0 +2024-08-03 15:13:47,785 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.724e+01 1.124e+02 1.317e+02 1.509e+02 2.289e+02, threshold=2.634e+02, percent-clipped=0.0 +2024-08-03 15:13:48,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=150641.33333333334, ans=0.2 +2024-08-03 15:13:52,460 INFO [train.py:1114] (1/4) Epoch 12, batch 1050, loss[loss=0.1949, simple_loss=0.2888, pruned_loss=0.05054, over 13575.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2859, pruned_loss=0.0601, over 2615412.25 frames. ], batch size: 39, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:14:09,216 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.56 vs. limit=15.0 +2024-08-03 15:14:19,907 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.13 vs. limit=15.0 +2024-08-03 15:14:28,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=150788.0, ans=0.125 +2024-08-03 15:14:39,481 INFO [train.py:1114] (1/4) Epoch 12, batch 1100, loss[loss=0.1924, simple_loss=0.2793, pruned_loss=0.05277, over 13562.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2856, pruned_loss=0.05991, over 2619237.54 frames. ], batch size: 36, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:14:44,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=150861.33333333334, ans=0.0 +2024-08-03 15:14:55,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=150898.0, ans=0.125 +2024-08-03 15:15:03,506 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.28 vs. limit=15.0 +2024-08-03 15:15:12,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=150971.33333333334, ans=0.125 +2024-08-03 15:15:13,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=150971.33333333334, ans=0.2 +2024-08-03 15:15:23,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=151008.0, ans=0.125 +2024-08-03 15:15:26,422 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.356e+01 1.180e+02 1.359e+02 1.591e+02 2.320e+02, threshold=2.719e+02, percent-clipped=0.0 +2024-08-03 15:15:29,093 INFO [train.py:1114] (1/4) Epoch 12, batch 1150, loss[loss=0.2024, simple_loss=0.287, pruned_loss=0.05891, over 13538.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2858, pruned_loss=0.06039, over 2618597.83 frames. ], batch size: 36, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:15:33,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=151044.66666666666, ans=0.0 +2024-08-03 15:15:36,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=151044.66666666666, ans=0.125 +2024-08-03 15:15:39,695 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.67 vs. limit=15.0 +2024-08-03 15:15:42,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151081.33333333334, ans=0.1 +2024-08-03 15:15:45,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=151081.33333333334, ans=0.0 +2024-08-03 15:15:47,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=151118.0, ans=0.025 +2024-08-03 15:15:51,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=151118.0, ans=0.0 +2024-08-03 15:15:59,710 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.95 vs. limit=10.0 +2024-08-03 15:16:00,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=151154.66666666666, ans=0.0 +2024-08-03 15:16:11,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=151191.33333333334, ans=0.125 +2024-08-03 15:16:15,299 INFO [train.py:1114] (1/4) Epoch 12, batch 1200, loss[loss=0.2077, simple_loss=0.2912, pruned_loss=0.06211, over 13573.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.287, pruned_loss=0.06073, over 2614750.05 frames. ], batch size: 39, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:16:58,389 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.940e+01 1.145e+02 1.336e+02 1.664e+02 3.085e+02, threshold=2.672e+02, percent-clipped=3.0 +2024-08-03 15:17:00,263 INFO [train.py:1114] (1/4) Epoch 12, batch 1250, loss[loss=0.2317, simple_loss=0.316, pruned_loss=0.07375, over 13467.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2873, pruned_loss=0.06072, over 2626541.04 frames. ], batch size: 42, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:17:11,839 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.88 vs. limit=12.0 +2024-08-03 15:17:14,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=151448.0, ans=0.125 +2024-08-03 15:17:26,669 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=19.45 vs. limit=22.5 +2024-08-03 15:17:27,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=151484.66666666666, ans=0.125 +2024-08-03 15:17:36,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151521.33333333334, ans=0.1 +2024-08-03 15:17:49,356 INFO [train.py:1114] (1/4) Epoch 12, batch 1300, loss[loss=0.1843, simple_loss=0.2763, pruned_loss=0.04615, over 12876.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2871, pruned_loss=0.06066, over 2629322.86 frames. ], batch size: 52, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:18:02,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=151631.33333333334, ans=0.5 +2024-08-03 15:18:07,702 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.94 vs. limit=15.0 +2024-08-03 15:18:34,752 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.477e+01 1.244e+02 1.482e+02 1.823e+02 3.057e+02, threshold=2.965e+02, percent-clipped=1.0 +2024-08-03 15:18:36,562 INFO [train.py:1114] (1/4) Epoch 12, batch 1350, loss[loss=0.1815, simple_loss=0.2646, pruned_loss=0.04923, over 13545.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2857, pruned_loss=0.05991, over 2636787.83 frames. ], batch size: 37, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:18:41,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=151778.0, ans=0.07 +2024-08-03 15:18:53,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=151814.66666666666, ans=0.125 +2024-08-03 15:18:57,747 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:19:00,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=151851.33333333334, ans=0.05 +2024-08-03 15:19:00,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=151851.33333333334, ans=0.025 +2024-08-03 15:19:16,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=151924.66666666666, ans=0.0 +2024-08-03 15:19:22,250 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.17 vs. limit=15.0 +2024-08-03 15:19:23,061 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.30 vs. limit=15.0 +2024-08-03 15:19:25,329 INFO [train.py:1114] (1/4) Epoch 12, batch 1400, loss[loss=0.1772, simple_loss=0.2502, pruned_loss=0.05212, over 13244.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2855, pruned_loss=0.05999, over 2641250.55 frames. ], batch size: 31, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:19:25,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151961.33333333334, ans=0.1 +2024-08-03 15:19:26,672 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.41 vs. limit=15.0 +2024-08-03 15:19:30,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=151961.33333333334, ans=0.0 +2024-08-03 15:19:35,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151998.0, ans=0.1 +2024-08-03 15:19:38,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=151998.0, ans=0.125 +2024-08-03 15:20:03,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=152108.0, ans=0.125 +2024-08-03 15:20:04,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=152108.0, ans=0.2 +2024-08-03 15:20:09,507 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.408e+01 1.177e+02 1.443e+02 1.884e+02 3.508e+02, threshold=2.887e+02, percent-clipped=1.0 +2024-08-03 15:20:11,317 INFO [train.py:1114] (1/4) Epoch 12, batch 1450, loss[loss=0.2138, simple_loss=0.295, pruned_loss=0.06632, over 13412.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2859, pruned_loss=0.06045, over 2640015.54 frames. ], batch size: 43, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:20:14,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152144.66666666666, ans=0.1 +2024-08-03 15:20:21,861 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.26 vs. limit=15.0 +2024-08-03 15:20:41,331 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:20:43,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=152254.66666666666, ans=0.125 +2024-08-03 15:20:56,411 INFO [train.py:1114] (1/4) Epoch 12, batch 1500, loss[loss=0.2229, simple_loss=0.3115, pruned_loss=0.06714, over 13397.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2864, pruned_loss=0.06057, over 2640475.49 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:21:14,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=152364.66666666666, ans=0.125 +2024-08-03 15:21:19,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=152401.33333333334, ans=0.0 +2024-08-03 15:21:21,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=152401.33333333334, ans=0.0 +2024-08-03 15:21:43,773 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.575e+01 1.302e+02 1.536e+02 1.991e+02 2.999e+02, threshold=3.072e+02, percent-clipped=1.0 +2024-08-03 15:21:45,761 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:21:47,446 INFO [train.py:1114] (1/4) Epoch 12, batch 1550, loss[loss=0.2008, simple_loss=0.2846, pruned_loss=0.0585, over 13421.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2866, pruned_loss=0.06071, over 2630044.88 frames. ], batch size: 41, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:22:07,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=152548.0, ans=0.125 +2024-08-03 15:22:09,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=152584.66666666666, ans=0.125 +2024-08-03 15:22:36,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=152658.0, ans=0.0 +2024-08-03 15:22:37,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=152658.0, ans=0.125 +2024-08-03 15:22:38,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=152694.66666666666, ans=0.125 +2024-08-03 15:22:38,643 INFO [train.py:1114] (1/4) Epoch 12, batch 1600, loss[loss=0.2145, simple_loss=0.3043, pruned_loss=0.06237, over 13574.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2867, pruned_loss=0.0612, over 2622812.94 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 32.0 +2024-08-03 15:22:50,082 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-08-03 15:22:54,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=152731.33333333334, ans=0.1 +2024-08-03 15:22:56,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=152768.0, ans=0.125 +2024-08-03 15:23:14,786 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.36 vs. limit=15.0 +2024-08-03 15:23:16,441 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.24 vs. limit=15.0 +2024-08-03 15:23:20,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=152841.33333333334, ans=0.125 +2024-08-03 15:23:20,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=152841.33333333334, ans=0.125 +2024-08-03 15:23:22,361 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.356e+01 1.191e+02 1.409e+02 1.604e+02 3.528e+02, threshold=2.818e+02, percent-clipped=1.0 +2024-08-03 15:23:24,248 INFO [train.py:1114] (1/4) Epoch 12, batch 1650, loss[loss=0.1867, simple_loss=0.2857, pruned_loss=0.04383, over 13335.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2863, pruned_loss=0.06077, over 2620755.39 frames. ], batch size: 40, lr: 1.04e-02, grad_scale: 32.0 +2024-08-03 15:23:27,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152878.0, ans=0.1 +2024-08-03 15:23:34,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152914.66666666666, ans=0.1 +2024-08-03 15:23:37,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=152914.66666666666, ans=0.125 +2024-08-03 15:23:45,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=152951.33333333334, ans=0.0 +2024-08-03 15:23:55,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=152988.0, ans=0.125 +2024-08-03 15:23:56,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=152988.0, ans=0.125 +2024-08-03 15:24:09,368 INFO [train.py:1114] (1/4) Epoch 12, batch 1700, loss[loss=0.2015, simple_loss=0.2689, pruned_loss=0.06698, over 13237.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2857, pruned_loss=0.06034, over 2629792.00 frames. ], batch size: 31, lr: 1.04e-02, grad_scale: 32.0 +2024-08-03 15:24:27,813 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:24:55,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=153208.0, ans=0.125 +2024-08-03 15:24:56,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=153208.0, ans=0.0 +2024-08-03 15:24:57,292 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.203e+01 1.265e+02 1.510e+02 1.884e+02 3.458e+02, threshold=3.019e+02, percent-clipped=4.0 +2024-08-03 15:24:58,242 INFO [train.py:1114] (1/4) Epoch 12, batch 1750, loss[loss=0.1771, simple_loss=0.2536, pruned_loss=0.05027, over 13564.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2861, pruned_loss=0.06083, over 2633781.78 frames. ], batch size: 31, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:25:02,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=153244.66666666666, ans=0.125 +2024-08-03 15:25:07,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=153281.33333333334, ans=0.0 +2024-08-03 15:25:09,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=153281.33333333334, ans=0.125 +2024-08-03 15:25:24,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=153318.0, ans=0.2 +2024-08-03 15:25:25,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=153354.66666666666, ans=0.0 +2024-08-03 15:25:45,812 INFO [train.py:1114] (1/4) Epoch 12, batch 1800, loss[loss=0.2162, simple_loss=0.2977, pruned_loss=0.06736, over 13554.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2858, pruned_loss=0.06049, over 2634499.08 frames. ], batch size: 38, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:26:05,213 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.71 vs. limit=15.0 +2024-08-03 15:26:10,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=153501.33333333334, ans=0.0 +2024-08-03 15:26:10,375 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:26:13,309 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.22 vs. limit=10.0 +2024-08-03 15:26:22,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=153538.0, ans=0.2 +2024-08-03 15:26:23,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=153574.66666666666, ans=0.5 +2024-08-03 15:26:26,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=153574.66666666666, ans=0.025 +2024-08-03 15:26:31,783 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.247e+01 1.215e+02 1.564e+02 1.986e+02 3.414e+02, threshold=3.127e+02, percent-clipped=2.0 +2024-08-03 15:26:32,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=153611.33333333334, ans=0.0 +2024-08-03 15:26:32,685 INFO [train.py:1114] (1/4) Epoch 12, batch 1850, loss[loss=0.2023, simple_loss=0.2869, pruned_loss=0.05887, over 13407.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2856, pruned_loss=0.06038, over 2637774.54 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:27:00,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=153721.33333333334, ans=0.125 +2024-08-03 15:27:08,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=153758.0, ans=0.125 +2024-08-03 15:27:11,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=153758.0, ans=0.125 +2024-08-03 15:27:17,665 INFO [train.py:1114] (1/4) Epoch 12, batch 1900, loss[loss=0.2152, simple_loss=0.3031, pruned_loss=0.06367, over 13328.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2864, pruned_loss=0.06068, over 2640532.89 frames. ], batch size: 40, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:27:19,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=153794.66666666666, ans=0.1 +2024-08-03 15:27:20,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=153794.66666666666, ans=0.125 +2024-08-03 15:27:24,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=153794.66666666666, ans=0.125 +2024-08-03 15:27:25,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=153794.66666666666, ans=0.1 +2024-08-03 15:27:25,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=153831.33333333334, ans=0.025 +2024-08-03 15:27:36,353 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.29 vs. limit=12.0 +2024-08-03 15:27:58,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=153941.33333333334, ans=0.05 +2024-08-03 15:28:02,822 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.586e+01 1.133e+02 1.329e+02 1.671e+02 4.322e+02, threshold=2.659e+02, percent-clipped=4.0 +2024-08-03 15:28:02,859 INFO [train.py:1114] (1/4) Epoch 12, batch 1950, loss[loss=0.1794, simple_loss=0.2719, pruned_loss=0.04346, over 13554.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2868, pruned_loss=0.06036, over 2647204.97 frames. ], batch size: 36, lr: 1.04e-02, grad_scale: 8.0 +2024-08-03 15:28:03,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=153978.0, ans=0.1 +2024-08-03 15:28:30,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=154051.33333333334, ans=0.2 +2024-08-03 15:28:42,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=154124.66666666666, ans=0.025 +2024-08-03 15:28:45,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=154124.66666666666, ans=0.025 +2024-08-03 15:28:48,771 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.86 vs. limit=22.5 +2024-08-03 15:28:49,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=154124.66666666666, ans=0.1 +2024-08-03 15:28:51,857 INFO [train.py:1114] (1/4) Epoch 12, batch 2000, loss[loss=0.1847, simple_loss=0.2658, pruned_loss=0.05175, over 13539.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2871, pruned_loss=0.06069, over 2635579.53 frames. ], batch size: 31, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:28:54,273 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.03 vs. limit=22.5 +2024-08-03 15:29:15,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=154234.66666666666, ans=0.025 +2024-08-03 15:29:17,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154234.66666666666, ans=0.1 +2024-08-03 15:29:40,805 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.317e+01 1.213e+02 1.428e+02 1.743e+02 2.865e+02, threshold=2.857e+02, percent-clipped=1.0 +2024-08-03 15:29:40,842 INFO [train.py:1114] (1/4) Epoch 12, batch 2050, loss[loss=0.1752, simple_loss=0.2581, pruned_loss=0.04619, over 13404.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2866, pruned_loss=0.06081, over 2632099.51 frames. ], batch size: 32, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:29:54,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=154381.33333333334, ans=0.0 +2024-08-03 15:29:57,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=154381.33333333334, ans=0.0 +2024-08-03 15:29:58,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=154418.0, ans=0.125 +2024-08-03 15:29:59,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=154418.0, ans=0.125 +2024-08-03 15:30:06,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=154454.66666666666, ans=0.1 +2024-08-03 15:30:07,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=154454.66666666666, ans=0.0 +2024-08-03 15:30:25,476 INFO [train.py:1114] (1/4) Epoch 12, batch 2100, loss[loss=0.1975, simple_loss=0.2807, pruned_loss=0.05713, over 13554.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2859, pruned_loss=0.0603, over 2637340.06 frames. ], batch size: 37, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:30:44,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=154601.33333333334, ans=0.125 +2024-08-03 15:31:10,330 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.387e+01 1.104e+02 1.282e+02 1.725e+02 3.211e+02, threshold=2.564e+02, percent-clipped=3.0 +2024-08-03 15:31:10,367 INFO [train.py:1114] (1/4) Epoch 12, batch 2150, loss[loss=0.1961, simple_loss=0.2749, pruned_loss=0.05859, over 13564.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2854, pruned_loss=0.06002, over 2645980.34 frames. ], batch size: 36, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:31:25,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.64 vs. limit=12.0 +2024-08-03 15:31:26,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=154748.0, ans=0.125 +2024-08-03 15:31:40,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154821.33333333334, ans=0.1 +2024-08-03 15:31:40,260 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:31:44,292 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.53 vs. limit=12.0 +2024-08-03 15:31:59,005 INFO [train.py:1114] (1/4) Epoch 12, batch 2200, loss[loss=0.1993, simple_loss=0.2878, pruned_loss=0.05542, over 13401.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2846, pruned_loss=0.05937, over 2643856.37 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:32:18,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=154968.0, ans=0.0 +2024-08-03 15:32:19,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=154968.0, ans=0.125 +2024-08-03 15:32:24,834 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.88 vs. limit=15.0 +2024-08-03 15:32:33,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=155004.66666666666, ans=0.125 +2024-08-03 15:32:41,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.77 vs. limit=12.0 +2024-08-03 15:32:46,541 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.416e+01 1.202e+02 1.524e+02 1.986e+02 3.191e+02, threshold=3.048e+02, percent-clipped=7.0 +2024-08-03 15:32:46,578 INFO [train.py:1114] (1/4) Epoch 12, batch 2250, loss[loss=0.1697, simple_loss=0.263, pruned_loss=0.0382, over 13346.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2842, pruned_loss=0.05925, over 2641254.80 frames. ], batch size: 37, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:32:49,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=155078.0, ans=0.2 +2024-08-03 15:33:07,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155151.33333333334, ans=0.1 +2024-08-03 15:33:08,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.01 vs. limit=15.0 +2024-08-03 15:33:19,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=155188.0, ans=0.125 +2024-08-03 15:33:32,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=155224.66666666666, ans=0.125 +2024-08-03 15:33:32,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=155224.66666666666, ans=0.025 +2024-08-03 15:33:34,529 INFO [train.py:1114] (1/4) Epoch 12, batch 2300, loss[loss=0.2082, simple_loss=0.2806, pruned_loss=0.06794, over 13573.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2839, pruned_loss=0.05957, over 2636299.54 frames. ], batch size: 33, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:33:51,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.44 vs. limit=15.0 +2024-08-03 15:34:10,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=155408.0, ans=0.2 +2024-08-03 15:34:19,893 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.892e+01 1.184e+02 1.344e+02 1.744e+02 3.184e+02, threshold=2.689e+02, percent-clipped=1.0 +2024-08-03 15:34:19,930 INFO [train.py:1114] (1/4) Epoch 12, batch 2350, loss[loss=0.2198, simple_loss=0.3062, pruned_loss=0.06667, over 13546.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2845, pruned_loss=0.05991, over 2639475.01 frames. ], batch size: 38, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:34:27,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=155444.66666666666, ans=0.0 +2024-08-03 15:34:33,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=155481.33333333334, ans=0.125 +2024-08-03 15:34:46,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=155554.66666666666, ans=0.2 +2024-08-03 15:34:51,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=155554.66666666666, ans=0.0 +2024-08-03 15:34:55,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=155591.33333333334, ans=0.125 +2024-08-03 15:35:05,407 INFO [train.py:1114] (1/4) Epoch 12, batch 2400, loss[loss=0.172, simple_loss=0.2585, pruned_loss=0.04274, over 13532.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2856, pruned_loss=0.06033, over 2641207.99 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 32.0 +2024-08-03 15:35:06,657 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.21 vs. limit=12.0 +2024-08-03 15:35:25,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=155664.66666666666, ans=0.125 +2024-08-03 15:35:32,633 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.62 vs. limit=15.0 +2024-08-03 15:35:41,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=155738.0, ans=0.2 +2024-08-03 15:35:46,916 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.31 vs. limit=22.5 +2024-08-03 15:35:47,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=155774.66666666666, ans=0.125 +2024-08-03 15:35:54,431 INFO [train.py:1114] (1/4) Epoch 12, batch 2450, loss[loss=0.2041, simple_loss=0.2897, pruned_loss=0.05924, over 13356.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2868, pruned_loss=0.06101, over 2632511.43 frames. ], batch size: 37, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:35:55,283 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.374e+01 1.163e+02 1.350e+02 1.845e+02 2.920e+02, threshold=2.699e+02, percent-clipped=1.0 +2024-08-03 15:36:01,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=155811.33333333334, ans=0.0 +2024-08-03 15:36:06,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=155848.0, ans=0.025 +2024-08-03 15:36:33,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=155958.0, ans=22.5 +2024-08-03 15:36:36,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=155958.0, ans=0.125 +2024-08-03 15:36:41,778 INFO [train.py:1114] (1/4) Epoch 12, batch 2500, loss[loss=0.1969, simple_loss=0.2859, pruned_loss=0.05395, over 13402.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2866, pruned_loss=0.06038, over 2636302.81 frames. ], batch size: 39, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:37:00,347 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:37:11,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=156104.66666666666, ans=0.125 +2024-08-03 15:37:19,250 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.01 vs. limit=15.0 +2024-08-03 15:37:20,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=156141.33333333334, ans=0.1 +2024-08-03 15:37:27,539 INFO [train.py:1114] (1/4) Epoch 12, batch 2550, loss[loss=0.166, simple_loss=0.2451, pruned_loss=0.0434, over 13545.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2866, pruned_loss=0.06059, over 2638747.94 frames. ], batch size: 31, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:37:28,349 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.903e+01 1.161e+02 1.332e+02 1.717e+02 3.575e+02, threshold=2.664e+02, percent-clipped=6.0 +2024-08-03 15:37:28,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=156178.0, ans=0.125 +2024-08-03 15:37:28,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=156178.0, ans=0.125 +2024-08-03 15:37:50,513 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.33 vs. limit=22.5 +2024-08-03 15:38:08,149 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.35 vs. limit=10.0 +2024-08-03 15:38:12,943 INFO [train.py:1114] (1/4) Epoch 12, batch 2600, loss[loss=0.2173, simple_loss=0.2934, pruned_loss=0.07062, over 13559.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.287, pruned_loss=0.06079, over 2639021.05 frames. ], batch size: 36, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:38:14,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=156361.33333333334, ans=0.125 +2024-08-03 15:38:15,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=156361.33333333334, ans=0.125 +2024-08-03 15:38:24,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=156398.0, ans=0.07 +2024-08-03 15:38:29,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=156434.66666666666, ans=0.0 +2024-08-03 15:38:30,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=156434.66666666666, ans=0.2 +2024-08-03 15:38:37,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=156434.66666666666, ans=0.0 +2024-08-03 15:38:52,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=156508.0, ans=0.2 +2024-08-03 15:38:56,391 INFO [train.py:1114] (1/4) Epoch 12, batch 2650, loss[loss=0.2111, simple_loss=0.3017, pruned_loss=0.06019, over 13301.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2878, pruned_loss=0.06113, over 2641990.30 frames. ], batch size: 46, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:38:56,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=156544.66666666666, ans=0.125 +2024-08-03 15:38:57,261 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.952e+01 1.176e+02 1.444e+02 1.768e+02 3.309e+02, threshold=2.888e+02, percent-clipped=8.0 +2024-08-03 15:39:10,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=156581.33333333334, ans=0.2 +2024-08-03 15:39:16,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=156618.0, ans=0.0 +2024-08-03 15:39:22,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=156654.66666666666, ans=0.125 +2024-08-03 15:39:34,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=156691.33333333334, ans=0.125 +2024-08-03 15:39:40,091 INFO [train.py:1114] (1/4) Epoch 12, batch 2700, loss[loss=0.211, simple_loss=0.2999, pruned_loss=0.06111, over 13556.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2877, pruned_loss=0.0609, over 2638703.55 frames. ], batch size: 40, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:39:49,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=156764.66666666666, ans=0.125 +2024-08-03 15:39:53,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=156764.66666666666, ans=0.0 +2024-08-03 15:39:56,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=156801.33333333334, ans=0.125 +2024-08-03 15:40:08,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=156838.0, ans=0.125 +2024-08-03 15:40:10,752 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.88 vs. limit=15.0 +2024-08-03 15:40:13,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=156874.66666666666, ans=0.0 +2024-08-03 15:40:23,013 INFO [train.py:1114] (1/4) Epoch 12, batch 2750, loss[loss=0.1903, simple_loss=0.2747, pruned_loss=0.05299, over 13318.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2859, pruned_loss=0.06014, over 2636619.57 frames. ], batch size: 34, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:40:23,800 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.506e+01 1.130e+02 1.309e+02 1.569e+02 2.980e+02, threshold=2.619e+02, percent-clipped=1.0 +2024-08-03 15:40:38,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=156948.0, ans=0.0 +2024-08-03 15:40:52,558 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.461e-02 +2024-08-03 15:40:52,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=157021.33333333334, ans=0.0 +2024-08-03 15:40:52,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=157021.33333333334, ans=0.2 +2024-08-03 15:40:53,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=157021.33333333334, ans=0.125 +2024-08-03 15:40:59,024 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.96 vs. limit=15.0 +2024-08-03 15:41:01,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=157058.0, ans=0.125 +2024-08-03 15:41:03,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=157058.0, ans=0.0 +2024-08-03 15:41:06,520 INFO [train.py:1114] (1/4) Epoch 12, batch 2800, loss[loss=0.2414, simple_loss=0.3137, pruned_loss=0.08458, over 8909.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2862, pruned_loss=0.06043, over 2627603.97 frames. ], batch size: 97, lr: 1.03e-02, grad_scale: 32.0 +2024-08-03 15:41:16,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=157131.33333333334, ans=0.125 +2024-08-03 15:41:17,264 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.34 vs. limit=10.0 +2024-08-03 15:41:22,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=157131.33333333334, ans=0.0 +2024-08-03 15:41:27,806 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.85 vs. limit=15.0 +2024-08-03 15:41:43,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=157241.33333333334, ans=0.0 +2024-08-03 15:41:48,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=157278.0, ans=0.05 +2024-08-03 15:41:49,747 INFO [train.py:1114] (1/4) Epoch 12, batch 2850, loss[loss=0.174, simple_loss=0.2638, pruned_loss=0.04211, over 13365.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2866, pruned_loss=0.06068, over 2620174.02 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:41:51,424 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.668e+01 1.162e+02 1.250e+02 1.511e+02 3.589e+02, threshold=2.501e+02, percent-clipped=1.0 +2024-08-03 15:41:53,727 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.70 vs. limit=15.0 +2024-08-03 15:41:59,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=157314.66666666666, ans=0.2 +2024-08-03 15:42:34,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157424.66666666666, ans=0.1 +2024-08-03 15:42:35,662 INFO [train.py:1114] (1/4) Epoch 12, batch 2900, loss[loss=0.1944, simple_loss=0.2789, pruned_loss=0.055, over 13366.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.288, pruned_loss=0.06112, over 2631274.27 frames. ], batch size: 36, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:42:55,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=157534.66666666666, ans=0.2 +2024-08-03 15:43:07,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=157571.33333333334, ans=0.2 +2024-08-03 15:43:12,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=157608.0, ans=0.125 +2024-08-03 15:43:12,441 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.64 vs. limit=15.0 +2024-08-03 15:43:18,712 INFO [train.py:1114] (1/4) Epoch 12, batch 2950, loss[loss=0.1762, simple_loss=0.2568, pruned_loss=0.04775, over 13346.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2867, pruned_loss=0.06093, over 2628997.51 frames. ], batch size: 34, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:43:20,457 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.484e+01 1.179e+02 1.423e+02 1.782e+02 2.994e+02, threshold=2.847e+02, percent-clipped=4.0 +2024-08-03 15:43:29,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=157681.33333333334, ans=0.0 +2024-08-03 15:43:44,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=157754.66666666666, ans=0.05 +2024-08-03 15:43:54,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=157791.33333333334, ans=0.0 +2024-08-03 15:43:59,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.whiten.whitening_limit, batch_count=157791.33333333334, ans=12.0 +2024-08-03 15:44:00,634 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.62 vs. limit=22.5 +2024-08-03 15:44:01,734 INFO [train.py:1114] (1/4) Epoch 12, batch 3000, loss[loss=0.211, simple_loss=0.2897, pruned_loss=0.06619, over 13537.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2865, pruned_loss=0.0608, over 2629056.67 frames. ], batch size: 37, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:44:01,734 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 15:44:11,713 INFO [train.py:1146] (1/4) Epoch 12, validation: loss=0.178, simple_loss=0.2775, pruned_loss=0.03924, over 944034.00 frames. +2024-08-03 15:44:11,714 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 15:44:50,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=157974.66666666666, ans=0.125 +2024-08-03 15:44:50,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=157974.66666666666, ans=0.125 +2024-08-03 15:44:54,730 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.62 vs. limit=22.5 +2024-08-03 15:44:55,162 INFO [train.py:1114] (1/4) Epoch 12, batch 3050, loss[loss=0.2002, simple_loss=0.2779, pruned_loss=0.0612, over 13539.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2875, pruned_loss=0.06128, over 2626346.89 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:44:56,885 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.316e+01 1.167e+02 1.304e+02 1.686e+02 2.790e+02, threshold=2.608e+02, percent-clipped=0.0 +2024-08-03 15:44:59,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=158011.33333333334, ans=0.0 +2024-08-03 15:44:59,858 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:45:15,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158084.66666666666, ans=0.1 +2024-08-03 15:45:26,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=158121.33333333334, ans=0.125 +2024-08-03 15:45:31,507 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.25 vs. limit=22.5 +2024-08-03 15:45:32,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=158158.0, ans=0.05 +2024-08-03 15:45:39,540 INFO [train.py:1114] (1/4) Epoch 12, batch 3100, loss[loss=0.2008, simple_loss=0.2875, pruned_loss=0.05702, over 13342.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2866, pruned_loss=0.06093, over 2626325.20 frames. ], batch size: 46, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:45:43,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=158194.66666666666, ans=0.125 +2024-08-03 15:45:48,651 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.17 vs. limit=15.0 +2024-08-03 15:45:49,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=158231.33333333334, ans=0.0 +2024-08-03 15:45:51,034 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=15.0 +2024-08-03 15:45:56,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=158268.0, ans=0.0 +2024-08-03 15:46:05,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=158304.66666666666, ans=0.0 +2024-08-03 15:46:07,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=158304.66666666666, ans=0.02 +2024-08-03 15:46:12,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=158341.33333333334, ans=0.125 +2024-08-03 15:46:13,126 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-08-03 15:46:13,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.00 vs. limit=15.0 +2024-08-03 15:46:22,125 INFO [train.py:1114] (1/4) Epoch 12, batch 3150, loss[loss=0.2279, simple_loss=0.3129, pruned_loss=0.07148, over 13040.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2872, pruned_loss=0.06101, over 2629037.49 frames. ], batch size: 48, lr: 1.02e-02, grad_scale: 16.0 +2024-08-03 15:46:23,778 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.679e+01 1.169e+02 1.445e+02 1.962e+02 3.331e+02, threshold=2.890e+02, percent-clipped=6.0 +2024-08-03 15:46:37,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=158414.66666666666, ans=0.2 +2024-08-03 15:46:44,853 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.45 vs. limit=15.0 +2024-08-03 15:46:51,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=158488.0, ans=0.125 +2024-08-03 15:46:55,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=158488.0, ans=0.0 +2024-08-03 15:46:55,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158524.66666666666, ans=0.1 +2024-08-03 15:46:55,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=158524.66666666666, ans=0.2 +2024-08-03 15:47:05,158 INFO [train.py:1114] (1/4) Epoch 12, batch 3200, loss[loss=0.1835, simple_loss=0.2716, pruned_loss=0.04774, over 13540.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2867, pruned_loss=0.06076, over 2634972.42 frames. ], batch size: 37, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:47:13,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=158598.0, ans=0.1 +2024-08-03 15:47:41,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=158708.0, ans=0.125 +2024-08-03 15:47:42,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=158708.0, ans=0.125 +2024-08-03 15:47:44,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=158708.0, ans=0.0 +2024-08-03 15:47:48,552 INFO [train.py:1114] (1/4) Epoch 12, batch 3250, loss[loss=0.2073, simple_loss=0.2884, pruned_loss=0.06311, over 13374.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2868, pruned_loss=0.06084, over 2638945.21 frames. ], batch size: 38, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:47:50,191 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.188e+01 1.142e+02 1.340e+02 1.709e+02 3.212e+02, threshold=2.679e+02, percent-clipped=3.0 +2024-08-03 15:47:51,647 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.01 vs. limit=15.0 +2024-08-03 15:48:04,933 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:48:10,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=158818.0, ans=0.125 +2024-08-03 15:48:11,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=158818.0, ans=0.0 +2024-08-03 15:48:32,120 INFO [train.py:1114] (1/4) Epoch 12, batch 3300, loss[loss=0.1927, simple_loss=0.2754, pruned_loss=0.05494, over 12818.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2856, pruned_loss=0.06032, over 2639238.13 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:48:39,424 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.24 vs. limit=15.0 +2024-08-03 15:48:42,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=158964.66666666666, ans=0.07 +2024-08-03 15:48:42,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=158964.66666666666, ans=0.125 +2024-08-03 15:48:43,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=158964.66666666666, ans=0.125 +2024-08-03 15:48:47,789 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:49:11,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=159074.66666666666, ans=0.125 +2024-08-03 15:49:12,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=159074.66666666666, ans=0.0 +2024-08-03 15:49:15,288 INFO [train.py:1114] (1/4) Epoch 12, batch 3350, loss[loss=0.2015, simple_loss=0.2896, pruned_loss=0.05672, over 13160.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2871, pruned_loss=0.06121, over 2628917.61 frames. ], batch size: 48, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:49:17,008 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.306e+01 1.184e+02 1.362e+02 1.748e+02 2.695e+02, threshold=2.725e+02, percent-clipped=2.0 +2024-08-03 15:49:25,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=159148.0, ans=0.0 +2024-08-03 15:49:27,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=159148.0, ans=0.125 +2024-08-03 15:49:30,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.whiten.whitening_limit, batch_count=159148.0, ans=15.0 +2024-08-03 15:49:30,728 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.48 vs. limit=22.5 +2024-08-03 15:49:44,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=159221.33333333334, ans=0.125 +2024-08-03 15:49:56,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=159258.0, ans=0.05 +2024-08-03 15:49:59,029 INFO [train.py:1114] (1/4) Epoch 12, batch 3400, loss[loss=0.1944, simple_loss=0.2654, pruned_loss=0.06175, over 13570.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2868, pruned_loss=0.06105, over 2624512.99 frames. ], batch size: 31, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:50:10,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=159331.33333333334, ans=0.2 +2024-08-03 15:50:11,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=159331.33333333334, ans=0.025 +2024-08-03 15:50:15,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=159331.33333333334, ans=0.125 +2024-08-03 15:50:24,219 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.07 vs. limit=22.5 +2024-08-03 15:50:42,887 INFO [train.py:1114] (1/4) Epoch 12, batch 3450, loss[loss=0.1968, simple_loss=0.2819, pruned_loss=0.0558, over 12857.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2868, pruned_loss=0.0609, over 2628239.25 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:50:44,511 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.325e+01 1.219e+02 1.408e+02 1.757e+02 3.423e+02, threshold=2.817e+02, percent-clipped=3.0 +2024-08-03 15:50:45,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=159478.0, ans=0.125 +2024-08-03 15:51:04,975 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.88 vs. limit=15.0 +2024-08-03 15:51:09,137 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.84 vs. limit=10.0 +2024-08-03 15:51:10,889 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.24 vs. limit=15.0 +2024-08-03 15:51:12,524 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.09 vs. limit=15.0 +2024-08-03 15:51:16,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=159624.66666666666, ans=0.125 +2024-08-03 15:51:24,776 INFO [train.py:1114] (1/4) Epoch 12, batch 3500, loss[loss=0.1768, simple_loss=0.2639, pruned_loss=0.04486, over 13539.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2856, pruned_loss=0.06033, over 2629331.36 frames. ], batch size: 34, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:51:31,026 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.41 vs. limit=15.0 +2024-08-03 15:51:51,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=159771.33333333334, ans=0.125 +2024-08-03 15:52:03,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=159808.0, ans=0.125 +2024-08-03 15:52:04,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159808.0, ans=0.1 +2024-08-03 15:52:07,854 INFO [train.py:1114] (1/4) Epoch 12, batch 3550, loss[loss=0.2125, simple_loss=0.2933, pruned_loss=0.06589, over 12563.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2877, pruned_loss=0.06141, over 2627381.39 frames. ], batch size: 58, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:52:09,497 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.339e+01 1.127e+02 1.244e+02 1.565e+02 2.847e+02, threshold=2.489e+02, percent-clipped=1.0 +2024-08-03 15:52:11,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=159844.66666666666, ans=0.0 +2024-08-03 15:52:53,376 INFO [train.py:1114] (1/4) Epoch 12, batch 3600, loss[loss=0.2789, simple_loss=0.3302, pruned_loss=0.1138, over 9428.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2927, pruned_loss=0.06606, over 2488830.84 frames. ], batch size: 96, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:53:15,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=160101.33333333334, ans=0.125 +2024-08-03 15:54:13,945 INFO [train.py:1114] (1/4) Epoch 13, batch 0, loss[loss=0.1863, simple_loss=0.2658, pruned_loss=0.05339, over 13346.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2658, pruned_loss=0.05339, over 13346.00 frames. ], batch size: 33, lr: 9.79e-03, grad_scale: 32.0 +2024-08-03 15:54:13,945 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 15:54:23,996 INFO [train.py:1146] (1/4) Epoch 13, validation: loss=0.179, simple_loss=0.2806, pruned_loss=0.03875, over 944034.00 frames. +2024-08-03 15:54:23,997 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 15:54:25,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=160174.66666666666, ans=0.0 +2024-08-03 15:54:30,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=160174.66666666666, ans=0.125 +2024-08-03 15:54:37,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=160211.33333333334, ans=0.0 +2024-08-03 15:54:37,804 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.239e+02 1.394e+02 1.533e+02 2.538e+02, threshold=2.789e+02, percent-clipped=1.0 +2024-08-03 15:54:41,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160211.33333333334, ans=0.1 +2024-08-03 15:54:46,637 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.40 vs. limit=12.0 +2024-08-03 15:54:48,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=160248.0, ans=0.07 +2024-08-03 15:55:02,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=160321.33333333334, ans=0.0 +2024-08-03 15:55:11,490 INFO [train.py:1114] (1/4) Epoch 13, batch 50, loss[loss=0.1842, simple_loss=0.262, pruned_loss=0.05322, over 13424.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2902, pruned_loss=0.06298, over 579846.49 frames. ], batch size: 32, lr: 9.79e-03, grad_scale: 32.0 +2024-08-03 15:55:25,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=160394.66666666666, ans=0.2 +2024-08-03 15:55:27,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=160394.66666666666, ans=0.125 +2024-08-03 15:55:31,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=160431.33333333334, ans=0.125 +2024-08-03 15:55:49,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=160504.66666666666, ans=0.0 +2024-08-03 15:55:58,989 INFO [train.py:1114] (1/4) Epoch 13, batch 100, loss[loss=0.1826, simple_loss=0.2641, pruned_loss=0.05051, over 13511.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2889, pruned_loss=0.06106, over 1026621.51 frames. ], batch size: 35, lr: 9.78e-03, grad_scale: 32.0 +2024-08-03 15:56:01,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=160541.33333333334, ans=0.07 +2024-08-03 15:56:01,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=160541.33333333334, ans=0.125 +2024-08-03 15:56:02,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=160541.33333333334, ans=0.125 +2024-08-03 15:56:06,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=160541.33333333334, ans=0.1 +2024-08-03 15:56:10,676 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.344e+01 1.098e+02 1.241e+02 1.448e+02 3.539e+02, threshold=2.482e+02, percent-clipped=1.0 +2024-08-03 15:56:11,352 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.99 vs. limit=15.0 +2024-08-03 15:56:22,071 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:56:23,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=160614.66666666666, ans=0.125 +2024-08-03 15:56:28,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=160651.33333333334, ans=0.0 +2024-08-03 15:56:29,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=160651.33333333334, ans=0.0 +2024-08-03 15:56:29,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=160651.33333333334, ans=0.125 +2024-08-03 15:56:32,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=160651.33333333334, ans=0.04949747468305833 +2024-08-03 15:56:36,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=160688.0, ans=0.2 +2024-08-03 15:56:39,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=160688.0, ans=0.125 +2024-08-03 15:56:39,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=160688.0, ans=0.125 +2024-08-03 15:56:44,363 INFO [train.py:1114] (1/4) Epoch 13, batch 150, loss[loss=0.1701, simple_loss=0.2477, pruned_loss=0.04623, over 13424.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2862, pruned_loss=0.05979, over 1387838.96 frames. ], batch size: 32, lr: 9.78e-03, grad_scale: 32.0 +2024-08-03 15:56:45,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=160724.66666666666, ans=0.125 +2024-08-03 15:56:50,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=160724.66666666666, ans=0.07 +2024-08-03 15:57:02,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=160798.0, ans=0.2 +2024-08-03 15:57:06,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160798.0, ans=0.1 +2024-08-03 15:57:07,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=160798.0, ans=0.125 +2024-08-03 15:57:09,553 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:57:13,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=160834.66666666666, ans=22.5 +2024-08-03 15:57:14,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=160834.66666666666, ans=0.0 +2024-08-03 15:57:16,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=160834.66666666666, ans=0.125 +2024-08-03 15:57:33,252 INFO [train.py:1114] (1/4) Epoch 13, batch 200, loss[loss=0.2075, simple_loss=0.2896, pruned_loss=0.06268, over 12425.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2855, pruned_loss=0.06009, over 1666403.01 frames. ], batch size: 58, lr: 9.77e-03, grad_scale: 16.0 +2024-08-03 15:57:40,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=160908.0, ans=0.125 +2024-08-03 15:57:44,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=160944.66666666666, ans=0.125 +2024-08-03 15:57:45,687 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.389e+01 1.180e+02 1.439e+02 1.786e+02 2.514e+02, threshold=2.877e+02, percent-clipped=2.0 +2024-08-03 15:57:47,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=160944.66666666666, ans=0.0 +2024-08-03 15:57:49,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=160944.66666666666, ans=0.125 +2024-08-03 15:57:49,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.57 vs. limit=12.0 +2024-08-03 15:57:50,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=160981.33333333334, ans=15.0 +2024-08-03 15:58:01,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=161018.0, ans=0.125 +2024-08-03 15:58:03,320 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.61 vs. limit=12.0 +2024-08-03 15:58:20,396 INFO [train.py:1114] (1/4) Epoch 13, batch 250, loss[loss=0.2193, simple_loss=0.2982, pruned_loss=0.0702, over 13307.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2847, pruned_loss=0.05934, over 1885122.32 frames. ], batch size: 46, lr: 9.77e-03, grad_scale: 16.0 +2024-08-03 15:58:26,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=161091.33333333334, ans=0.0 +2024-08-03 15:58:27,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161091.33333333334, ans=0.1 +2024-08-03 15:58:28,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=161128.0, ans=0.125 +2024-08-03 15:58:30,753 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.84 vs. limit=15.0 +2024-08-03 15:58:32,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=161128.0, ans=0.0 +2024-08-03 15:58:49,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161201.33333333334, ans=0.1 +2024-08-03 15:58:50,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=161201.33333333334, ans=0.125 +2024-08-03 15:58:50,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=161201.33333333334, ans=0.0 +2024-08-03 15:58:56,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=161238.0, ans=0.025 +2024-08-03 15:59:01,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=161238.0, ans=0.1 +2024-08-03 15:59:01,579 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.39 vs. limit=8.0 +2024-08-03 15:59:02,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=161238.0, ans=0.125 +2024-08-03 15:59:05,421 INFO [train.py:1114] (1/4) Epoch 13, batch 300, loss[loss=0.1852, simple_loss=0.2748, pruned_loss=0.04778, over 13413.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2843, pruned_loss=0.05911, over 2050588.94 frames. ], batch size: 42, lr: 9.76e-03, grad_scale: 16.0 +2024-08-03 15:59:20,162 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.241e+01 1.100e+02 1.297e+02 1.682e+02 2.744e+02, threshold=2.594e+02, percent-clipped=0.0 +2024-08-03 15:59:39,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=161348.0, ans=0.125 +2024-08-03 15:59:43,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=161384.66666666666, ans=0.125 +2024-08-03 15:59:44,136 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:00:00,218 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.24 vs. limit=15.0 +2024-08-03 16:00:00,484 INFO [train.py:1114] (1/4) Epoch 13, batch 350, loss[loss=0.1815, simple_loss=0.2588, pruned_loss=0.05208, over 13590.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.284, pruned_loss=0.05824, over 2181218.68 frames. ], batch size: 33, lr: 9.76e-03, grad_scale: 16.0 +2024-08-03 16:00:08,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=161458.0, ans=0.125 +2024-08-03 16:00:11,058 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.79 vs. limit=22.5 +2024-08-03 16:00:14,771 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.54 vs. limit=15.0 +2024-08-03 16:00:27,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=161568.0, ans=0.125 +2024-08-03 16:00:42,452 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.88 vs. limit=12.0 +2024-08-03 16:00:45,703 INFO [train.py:1114] (1/4) Epoch 13, batch 400, loss[loss=0.2017, simple_loss=0.2814, pruned_loss=0.06098, over 13357.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2843, pruned_loss=0.05843, over 2285576.68 frames. ], batch size: 37, lr: 9.75e-03, grad_scale: 32.0 +2024-08-03 16:00:46,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=161641.33333333334, ans=0.125 +2024-08-03 16:00:52,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=161641.33333333334, ans=0.2 +2024-08-03 16:00:56,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=161678.0, ans=0.0 +2024-08-03 16:00:57,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=161678.0, ans=0.125 +2024-08-03 16:01:00,503 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.412e+01 1.099e+02 1.258e+02 1.504e+02 2.448e+02, threshold=2.516e+02, percent-clipped=0.0 +2024-08-03 16:01:18,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=161751.33333333334, ans=0.025 +2024-08-03 16:01:20,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=161751.33333333334, ans=0.125 +2024-08-03 16:01:39,132 INFO [train.py:1114] (1/4) Epoch 13, batch 450, loss[loss=0.2247, simple_loss=0.3014, pruned_loss=0.07396, over 13539.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.284, pruned_loss=0.05828, over 2359908.72 frames. ], batch size: 38, lr: 9.75e-03, grad_scale: 32.0 +2024-08-03 16:01:56,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161861.33333333334, ans=0.1 +2024-08-03 16:02:11,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=161934.66666666666, ans=0.125 +2024-08-03 16:02:28,143 INFO [train.py:1114] (1/4) Epoch 13, batch 500, loss[loss=0.2106, simple_loss=0.3039, pruned_loss=0.05868, over 13446.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2823, pruned_loss=0.05737, over 2425628.23 frames. ], batch size: 43, lr: 9.74e-03, grad_scale: 16.0 +2024-08-03 16:02:31,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=162008.0, ans=0.125 +2024-08-03 16:02:39,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=162044.66666666666, ans=0.0 +2024-08-03 16:02:44,988 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.845e+01 1.131e+02 1.351e+02 1.618e+02 2.590e+02, threshold=2.702e+02, percent-clipped=1.0 +2024-08-03 16:02:47,275 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:02:50,064 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:02:51,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=162081.33333333334, ans=0.2 +2024-08-03 16:02:51,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=162081.33333333334, ans=0.125 +2024-08-03 16:02:57,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=162081.33333333334, ans=0.125 +2024-08-03 16:03:18,772 INFO [train.py:1114] (1/4) Epoch 13, batch 550, loss[loss=0.2191, simple_loss=0.3001, pruned_loss=0.06906, over 13048.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2829, pruned_loss=0.05803, over 2468075.02 frames. ], batch size: 48, lr: 9.74e-03, grad_scale: 16.0 +2024-08-03 16:03:41,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten.whitening_limit, batch_count=162264.66666666666, ans=15.0 +2024-08-03 16:03:56,945 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:04:04,091 INFO [train.py:1114] (1/4) Epoch 13, batch 600, loss[loss=0.2094, simple_loss=0.2876, pruned_loss=0.06562, over 13399.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2827, pruned_loss=0.05812, over 2508625.99 frames. ], batch size: 46, lr: 9.73e-03, grad_scale: 16.0 +2024-08-03 16:04:17,370 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.493e+01 1.180e+02 1.372e+02 1.849e+02 3.441e+02, threshold=2.744e+02, percent-clipped=2.0 +2024-08-03 16:04:18,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=162411.33333333334, ans=0.125 +2024-08-03 16:04:18,738 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.65 vs. limit=15.0 +2024-08-03 16:04:29,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=162448.0, ans=0.125 +2024-08-03 16:04:30,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=162484.66666666666, ans=0.125 +2024-08-03 16:04:32,945 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.31 vs. limit=15.0 +2024-08-03 16:04:51,059 INFO [train.py:1114] (1/4) Epoch 13, batch 650, loss[loss=0.173, simple_loss=0.2635, pruned_loss=0.04125, over 13544.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.282, pruned_loss=0.0578, over 2543778.88 frames. ], batch size: 37, lr: 9.72e-03, grad_scale: 16.0 +2024-08-03 16:05:03,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=162594.66666666666, ans=0.0 +2024-08-03 16:05:12,455 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.83 vs. limit=6.0 +2024-08-03 16:05:12,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=162631.33333333334, ans=0.2 +2024-08-03 16:05:16,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.12 vs. limit=15.0 +2024-08-03 16:05:18,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=162631.33333333334, ans=0.1 +2024-08-03 16:05:19,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=162668.0, ans=0.0 +2024-08-03 16:05:19,302 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.04 vs. limit=15.0 +2024-08-03 16:05:25,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.03 vs. limit=6.0 +2024-08-03 16:05:28,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=162704.66666666666, ans=0.025 +2024-08-03 16:05:32,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=162704.66666666666, ans=0.125 +2024-08-03 16:05:33,948 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.47 vs. limit=10.0 +2024-08-03 16:05:34,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162704.66666666666, ans=0.1 +2024-08-03 16:05:39,757 INFO [train.py:1114] (1/4) Epoch 13, batch 700, loss[loss=0.1916, simple_loss=0.2748, pruned_loss=0.05414, over 13536.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2826, pruned_loss=0.05786, over 2565796.73 frames. ], batch size: 35, lr: 9.72e-03, grad_scale: 16.0 +2024-08-03 16:05:45,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=162741.33333333334, ans=0.125 +2024-08-03 16:05:50,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162778.0, ans=0.1 +2024-08-03 16:05:53,432 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.882e+01 1.139e+02 1.377e+02 1.797e+02 3.206e+02, threshold=2.754e+02, percent-clipped=4.0 +2024-08-03 16:05:56,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=162778.0, ans=0.125 +2024-08-03 16:06:00,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=162814.66666666666, ans=0.125 +2024-08-03 16:06:08,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=162851.33333333334, ans=0.125 +2024-08-03 16:06:11,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=162851.33333333334, ans=0.2 +2024-08-03 16:06:31,547 INFO [train.py:1114] (1/4) Epoch 13, batch 750, loss[loss=0.2026, simple_loss=0.294, pruned_loss=0.05562, over 13375.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2829, pruned_loss=0.05796, over 2583341.47 frames. ], batch size: 37, lr: 9.71e-03, grad_scale: 16.0 +2024-08-03 16:06:51,981 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.77 vs. limit=15.0 +2024-08-03 16:06:56,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=162998.0, ans=0.125 +2024-08-03 16:06:56,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=162998.0, ans=0.5 +2024-08-03 16:07:33,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=163034.66666666666, ans=0.125 +2024-08-03 16:07:33,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=163034.66666666666, ans=0.0 +2024-08-03 16:07:36,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163034.66666666666, ans=0.1 +2024-08-03 16:07:50,952 INFO [train.py:1114] (1/4) Epoch 13, batch 800, loss[loss=0.1766, simple_loss=0.256, pruned_loss=0.04857, over 13359.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2825, pruned_loss=0.0577, over 2597636.67 frames. ], batch size: 33, lr: 9.71e-03, grad_scale: 32.0 +2024-08-03 16:07:52,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=163108.0, ans=0.0 +2024-08-03 16:07:59,057 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.57 vs. limit=6.0 +2024-08-03 16:08:04,633 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.131e+01 1.126e+02 1.310e+02 1.667e+02 3.702e+02, threshold=2.620e+02, percent-clipped=3.0 +2024-08-03 16:08:11,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=163181.33333333334, ans=0.125 +2024-08-03 16:08:12,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=163181.33333333334, ans=0.125 +2024-08-03 16:08:13,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=163181.33333333334, ans=0.0 +2024-08-03 16:08:20,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=163218.0, ans=0.2 +2024-08-03 16:08:23,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=163218.0, ans=0.125 +2024-08-03 16:08:28,814 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.50 vs. limit=15.0 +2024-08-03 16:08:30,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=163254.66666666666, ans=0.2 +2024-08-03 16:08:36,393 INFO [train.py:1114] (1/4) Epoch 13, batch 850, loss[loss=0.1805, simple_loss=0.2831, pruned_loss=0.03898, over 13311.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2821, pruned_loss=0.05778, over 2609623.92 frames. ], batch size: 40, lr: 9.70e-03, grad_scale: 32.0 +2024-08-03 16:08:51,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=163328.0, ans=0.07 +2024-08-03 16:09:00,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=163364.66666666666, ans=0.2 +2024-08-03 16:09:16,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=163438.0, ans=0.0 +2024-08-03 16:09:20,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=163438.0, ans=0.02 +2024-08-03 16:09:22,528 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.25 vs. limit=15.0 +2024-08-03 16:09:25,544 INFO [train.py:1114] (1/4) Epoch 13, batch 900, loss[loss=0.1832, simple_loss=0.2636, pruned_loss=0.05142, over 13357.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2825, pruned_loss=0.05821, over 2611829.01 frames. ], batch size: 33, lr: 9.70e-03, grad_scale: 32.0 +2024-08-03 16:09:38,997 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.048e+01 1.164e+02 1.408e+02 1.726e+02 2.750e+02, threshold=2.816e+02, percent-clipped=1.0 +2024-08-03 16:09:45,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=163548.0, ans=0.04949747468305833 +2024-08-03 16:09:56,166 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.90 vs. limit=15.0 +2024-08-03 16:10:12,886 INFO [train.py:1114] (1/4) Epoch 13, batch 950, loss[loss=0.1761, simple_loss=0.259, pruned_loss=0.04657, over 13528.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2829, pruned_loss=0.05855, over 2612558.61 frames. ], batch size: 34, lr: 9.69e-03, grad_scale: 32.0 +2024-08-03 16:10:18,421 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:10:29,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=163694.66666666666, ans=0.2 +2024-08-03 16:10:52,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=163804.66666666666, ans=0.125 +2024-08-03 16:10:52,305 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:11:00,368 INFO [train.py:1114] (1/4) Epoch 13, batch 1000, loss[loss=0.1906, simple_loss=0.2811, pruned_loss=0.05002, over 13362.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2834, pruned_loss=0.059, over 2610777.26 frames. ], batch size: 35, lr: 9.69e-03, grad_scale: 32.0 +2024-08-03 16:11:06,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=163841.33333333334, ans=0.125 +2024-08-03 16:11:09,282 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.86 vs. limit=15.0 +2024-08-03 16:11:10,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=163878.0, ans=0.1 +2024-08-03 16:11:14,073 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.003e+01 1.149e+02 1.296e+02 1.618e+02 2.591e+02, threshold=2.593e+02, percent-clipped=0.0 +2024-08-03 16:11:25,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=163914.66666666666, ans=0.025 +2024-08-03 16:11:29,833 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.38 vs. limit=15.0 +2024-08-03 16:11:32,329 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:11:45,696 INFO [train.py:1114] (1/4) Epoch 13, batch 1050, loss[loss=0.1955, simple_loss=0.2832, pruned_loss=0.05389, over 13574.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.283, pruned_loss=0.05907, over 2614879.31 frames. ], batch size: 39, lr: 9.68e-03, grad_scale: 32.0 +2024-08-03 16:12:20,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=164171.33333333334, ans=0.125 +2024-08-03 16:12:21,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=164171.33333333334, ans=0.125 +2024-08-03 16:12:32,444 INFO [train.py:1114] (1/4) Epoch 13, batch 1100, loss[loss=0.1813, simple_loss=0.2611, pruned_loss=0.05077, over 13548.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2827, pruned_loss=0.05868, over 2618786.61 frames. ], batch size: 36, lr: 9.68e-03, grad_scale: 32.0 +2024-08-03 16:12:34,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=164208.0, ans=0.04949747468305833 +2024-08-03 16:12:36,574 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.87 vs. limit=6.0 +2024-08-03 16:12:41,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=164244.66666666666, ans=0.025 +2024-08-03 16:12:45,859 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.230e+01 1.162e+02 1.483e+02 1.783e+02 2.652e+02, threshold=2.966e+02, percent-clipped=1.0 +2024-08-03 16:12:54,867 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.40 vs. limit=22.5 +2024-08-03 16:12:59,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=164281.33333333334, ans=0.0 +2024-08-03 16:13:09,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=164354.66666666666, ans=0.125 +2024-08-03 16:13:19,698 INFO [train.py:1114] (1/4) Epoch 13, batch 1150, loss[loss=0.2036, simple_loss=0.2867, pruned_loss=0.06027, over 13561.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2835, pruned_loss=0.05944, over 2617695.97 frames. ], batch size: 36, lr: 9.67e-03, grad_scale: 32.0 +2024-08-03 16:13:24,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=164391.33333333334, ans=0.125 +2024-08-03 16:13:25,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=164391.33333333334, ans=0.04949747468305833 +2024-08-03 16:13:30,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=164428.0, ans=0.125 +2024-08-03 16:13:31,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=164428.0, ans=0.125 +2024-08-03 16:13:38,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=164428.0, ans=0.125 +2024-08-03 16:13:39,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=164464.66666666666, ans=0.125 +2024-08-03 16:13:44,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=164464.66666666666, ans=15.0 +2024-08-03 16:14:02,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=164538.0, ans=15.0 +2024-08-03 16:14:06,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164538.0, ans=0.1 +2024-08-03 16:14:07,670 INFO [train.py:1114] (1/4) Epoch 13, batch 1200, loss[loss=0.194, simple_loss=0.2865, pruned_loss=0.05078, over 13584.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2841, pruned_loss=0.05927, over 2614681.60 frames. ], batch size: 39, lr: 9.67e-03, grad_scale: 32.0 +2024-08-03 16:14:17,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=164611.33333333334, ans=0.0 +2024-08-03 16:14:21,157 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.721e+01 1.160e+02 1.448e+02 1.730e+02 2.788e+02, threshold=2.895e+02, percent-clipped=0.0 +2024-08-03 16:14:37,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=164684.66666666666, ans=0.0 +2024-08-03 16:14:43,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=164684.66666666666, ans=0.125 +2024-08-03 16:14:54,288 INFO [train.py:1114] (1/4) Epoch 13, batch 1250, loss[loss=0.2286, simple_loss=0.3087, pruned_loss=0.07425, over 13463.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2847, pruned_loss=0.05894, over 2626909.33 frames. ], batch size: 42, lr: 9.66e-03, grad_scale: 32.0 +2024-08-03 16:14:54,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff2.min_abs, batch_count=164758.0, ans=0.1 +2024-08-03 16:14:57,115 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:14:58,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=164758.0, ans=0.0 +2024-08-03 16:15:07,369 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.06 vs. limit=12.0 +2024-08-03 16:15:16,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164831.33333333334, ans=0.1 +2024-08-03 16:15:17,371 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.00 vs. limit=15.0 +2024-08-03 16:15:19,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=164831.33333333334, ans=0.025 +2024-08-03 16:15:26,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=164868.0, ans=0.125 +2024-08-03 16:15:39,441 INFO [train.py:1114] (1/4) Epoch 13, batch 1300, loss[loss=0.1891, simple_loss=0.2781, pruned_loss=0.05002, over 12941.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2833, pruned_loss=0.05822, over 2630057.35 frames. ], batch size: 52, lr: 9.66e-03, grad_scale: 32.0 +2024-08-03 16:15:48,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=164978.0, ans=0.0 +2024-08-03 16:15:50,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=164978.0, ans=0.125 +2024-08-03 16:15:52,784 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.108e+01 1.112e+02 1.319e+02 1.683e+02 3.006e+02, threshold=2.638e+02, percent-clipped=1.0 +2024-08-03 16:16:06,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=165014.66666666666, ans=0.125 +2024-08-03 16:16:20,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=165088.0, ans=0.0 +2024-08-03 16:16:27,970 INFO [train.py:1114] (1/4) Epoch 13, batch 1350, loss[loss=0.1972, simple_loss=0.2838, pruned_loss=0.05527, over 13543.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2828, pruned_loss=0.05821, over 2637253.61 frames. ], batch size: 37, lr: 9.65e-03, grad_scale: 32.0 +2024-08-03 16:16:38,983 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:17:02,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=165234.66666666666, ans=0.0 +2024-08-03 16:17:06,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=165271.33333333334, ans=0.025 +2024-08-03 16:17:08,639 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.83 vs. limit=22.5 +2024-08-03 16:17:15,448 INFO [train.py:1114] (1/4) Epoch 13, batch 1400, loss[loss=0.2019, simple_loss=0.2682, pruned_loss=0.06782, over 13294.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2833, pruned_loss=0.05853, over 2641573.96 frames. ], batch size: 31, lr: 9.65e-03, grad_scale: 16.0 +2024-08-03 16:17:24,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=165344.66666666666, ans=0.125 +2024-08-03 16:17:28,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165344.66666666666, ans=0.1 +2024-08-03 16:17:29,722 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.085e+01 1.131e+02 1.241e+02 1.412e+02 2.386e+02, threshold=2.482e+02, percent-clipped=0.0 +2024-08-03 16:17:35,280 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.79 vs. limit=15.0 +2024-08-03 16:17:44,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=165418.0, ans=0.0 +2024-08-03 16:18:01,117 INFO [train.py:1114] (1/4) Epoch 13, batch 1450, loss[loss=0.1851, simple_loss=0.2759, pruned_loss=0.04716, over 13415.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2838, pruned_loss=0.05849, over 2640404.49 frames. ], batch size: 43, lr: 9.64e-03, grad_scale: 16.0 +2024-08-03 16:18:09,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=165491.33333333334, ans=0.125 +2024-08-03 16:18:10,562 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.74 vs. limit=15.0 +2024-08-03 16:18:11,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=165528.0, ans=0.0 +2024-08-03 16:18:11,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=165528.0, ans=0.125 +2024-08-03 16:18:18,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=165528.0, ans=0.1 +2024-08-03 16:18:19,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=165528.0, ans=0.125 +2024-08-03 16:18:21,307 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.92 vs. limit=22.5 +2024-08-03 16:18:23,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=165564.66666666666, ans=0.0 +2024-08-03 16:18:26,891 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.24 vs. limit=15.0 +2024-08-03 16:18:47,874 INFO [train.py:1114] (1/4) Epoch 13, batch 1500, loss[loss=0.2082, simple_loss=0.296, pruned_loss=0.06023, over 13402.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2841, pruned_loss=0.05803, over 2640352.74 frames. ], batch size: 39, lr: 9.64e-03, grad_scale: 16.0 +2024-08-03 16:18:48,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=165674.66666666666, ans=0.5 +2024-08-03 16:18:53,870 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.12 vs. limit=6.0 +2024-08-03 16:19:02,695 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.982e+01 1.158e+02 1.427e+02 1.728e+02 2.727e+02, threshold=2.854e+02, percent-clipped=3.0 +2024-08-03 16:19:09,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=165748.0, ans=0.0 +2024-08-03 16:19:09,502 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.21 vs. limit=15.0 +2024-08-03 16:19:12,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=165748.0, ans=0.125 +2024-08-03 16:19:27,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=165821.33333333334, ans=0.125 +2024-08-03 16:19:35,403 INFO [train.py:1114] (1/4) Epoch 13, batch 1550, loss[loss=0.2173, simple_loss=0.3053, pruned_loss=0.06461, over 13400.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2841, pruned_loss=0.05822, over 2630559.54 frames. ], batch size: 41, lr: 9.63e-03, grad_scale: 16.0 +2024-08-03 16:19:45,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=165894.66666666666, ans=0.2 +2024-08-03 16:19:48,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165894.66666666666, ans=0.1 +2024-08-03 16:19:54,132 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.05 vs. limit=15.0 +2024-08-03 16:20:02,227 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.30 vs. limit=15.0 +2024-08-03 16:20:07,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=165968.0, ans=0.125 +2024-08-03 16:20:22,602 INFO [train.py:1114] (1/4) Epoch 13, batch 1600, loss[loss=0.1885, simple_loss=0.2774, pruned_loss=0.04977, over 13583.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2835, pruned_loss=0.05824, over 2623907.36 frames. ], batch size: 39, lr: 9.63e-03, grad_scale: 32.0 +2024-08-03 16:20:44,012 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.84 vs. limit=10.0 +2024-08-03 16:20:53,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=166078.0, ans=0.125 +2024-08-03 16:20:59,197 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.495e+01 1.166e+02 1.333e+02 1.673e+02 3.385e+02, threshold=2.665e+02, percent-clipped=4.0 +2024-08-03 16:21:06,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=166114.66666666666, ans=0.125 +2024-08-03 16:21:14,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166151.33333333334, ans=0.1 +2024-08-03 16:21:24,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=166188.0, ans=0.125 +2024-08-03 16:21:26,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=166188.0, ans=0.0 +2024-08-03 16:21:30,119 INFO [train.py:1114] (1/4) Epoch 13, batch 1650, loss[loss=0.1789, simple_loss=0.2654, pruned_loss=0.0462, over 13314.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2837, pruned_loss=0.05843, over 2621007.06 frames. ], batch size: 40, lr: 9.62e-03, grad_scale: 32.0 +2024-08-03 16:21:36,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=166224.66666666666, ans=0.125 +2024-08-03 16:21:36,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=166224.66666666666, ans=0.05 +2024-08-03 16:21:53,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=166298.0, ans=0.2 +2024-08-03 16:21:59,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.25 vs. limit=15.0 +2024-08-03 16:22:06,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=166334.66666666666, ans=0.125 +2024-08-03 16:22:11,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=166371.33333333334, ans=0.125 +2024-08-03 16:22:17,150 INFO [train.py:1114] (1/4) Epoch 13, batch 1700, loss[loss=0.1768, simple_loss=0.2546, pruned_loss=0.04953, over 13257.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2833, pruned_loss=0.05828, over 2629483.88 frames. ], batch size: 31, lr: 9.61e-03, grad_scale: 16.0 +2024-08-03 16:22:17,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=166408.0, ans=0.1 +2024-08-03 16:22:30,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=166444.66666666666, ans=0.0 +2024-08-03 16:22:32,762 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.948e+01 1.164e+02 1.401e+02 1.757e+02 2.684e+02, threshold=2.802e+02, percent-clipped=1.0 +2024-08-03 16:22:40,858 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.17 vs. limit=15.0 +2024-08-03 16:22:59,166 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.62 vs. limit=10.0 +2024-08-03 16:23:03,319 INFO [train.py:1114] (1/4) Epoch 13, batch 1750, loss[loss=0.1743, simple_loss=0.2553, pruned_loss=0.04659, over 13554.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.283, pruned_loss=0.05808, over 2632870.87 frames. ], batch size: 31, lr: 9.61e-03, grad_scale: 16.0 +2024-08-03 16:23:09,345 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.36 vs. limit=22.5 +2024-08-03 16:23:15,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=166628.0, ans=0.0 +2024-08-03 16:23:16,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166628.0, ans=0.1 +2024-08-03 16:23:20,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=166628.0, ans=0.125 +2024-08-03 16:23:21,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=166628.0, ans=0.125 +2024-08-03 16:23:22,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=166664.66666666666, ans=0.0 +2024-08-03 16:23:39,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=166701.33333333334, ans=0.0 +2024-08-03 16:23:40,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166738.0, ans=0.1 +2024-08-03 16:23:42,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=166738.0, ans=0.05 +2024-08-03 16:23:43,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=166738.0, ans=0.025 +2024-08-03 16:23:47,531 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:23:50,886 INFO [train.py:1114] (1/4) Epoch 13, batch 1800, loss[loss=0.1966, simple_loss=0.28, pruned_loss=0.05657, over 13561.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2827, pruned_loss=0.05764, over 2634736.01 frames. ], batch size: 38, lr: 9.60e-03, grad_scale: 16.0 +2024-08-03 16:23:52,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=166774.66666666666, ans=0.0 +2024-08-03 16:24:00,984 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.41 vs. limit=15.0 +2024-08-03 16:24:08,446 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.884e+01 1.178e+02 1.346e+02 1.574e+02 2.406e+02, threshold=2.692e+02, percent-clipped=0.0 +2024-08-03 16:24:08,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=166811.33333333334, ans=0.2 +2024-08-03 16:24:14,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=166848.0, ans=0.1 +2024-08-03 16:24:20,625 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.36 vs. limit=15.0 +2024-08-03 16:24:21,560 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.64 vs. limit=15.0 +2024-08-03 16:24:34,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=166921.33333333334, ans=0.2 +2024-08-03 16:24:38,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=166921.33333333334, ans=0.125 +2024-08-03 16:24:40,317 INFO [train.py:1114] (1/4) Epoch 13, batch 1850, loss[loss=0.181, simple_loss=0.2727, pruned_loss=0.04459, over 13388.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2819, pruned_loss=0.05731, over 2637631.00 frames. ], batch size: 39, lr: 9.60e-03, grad_scale: 16.0 +2024-08-03 16:24:47,427 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.49 vs. limit=15.0 +2024-08-03 16:24:50,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166994.66666666666, ans=0.1 +2024-08-03 16:24:54,685 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.26 vs. limit=12.0 +2024-08-03 16:25:14,303 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.73 vs. limit=22.5 +2024-08-03 16:25:14,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=167068.0, ans=0.125 +2024-08-03 16:25:20,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=167104.66666666666, ans=0.0 +2024-08-03 16:25:23,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=167104.66666666666, ans=0.0 +2024-08-03 16:25:26,608 INFO [train.py:1114] (1/4) Epoch 13, batch 1900, loss[loss=0.2278, simple_loss=0.309, pruned_loss=0.07327, over 13317.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2823, pruned_loss=0.0573, over 2639595.80 frames. ], batch size: 40, lr: 9.59e-03, grad_scale: 16.0 +2024-08-03 16:25:29,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=167141.33333333334, ans=0.125 +2024-08-03 16:25:40,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=167178.0, ans=0.125 +2024-08-03 16:25:44,170 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.192e+01 1.122e+02 1.325e+02 1.918e+02 3.257e+02, threshold=2.651e+02, percent-clipped=9.0 +2024-08-03 16:25:45,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.24 vs. limit=22.5 +2024-08-03 16:25:47,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=167214.66666666666, ans=0.125 +2024-08-03 16:25:48,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=167214.66666666666, ans=0.125 +2024-08-03 16:26:00,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=167251.33333333334, ans=0.125 +2024-08-03 16:26:02,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=167251.33333333334, ans=0.125 +2024-08-03 16:26:12,452 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.86 vs. limit=15.0 +2024-08-03 16:26:14,633 INFO [train.py:1114] (1/4) Epoch 13, batch 1950, loss[loss=0.1935, simple_loss=0.2799, pruned_loss=0.05357, over 13587.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.284, pruned_loss=0.05819, over 2646404.56 frames. ], batch size: 36, lr: 9.59e-03, grad_scale: 16.0 +2024-08-03 16:26:17,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167324.66666666666, ans=0.1 +2024-08-03 16:26:17,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=167324.66666666666, ans=0.125 +2024-08-03 16:26:39,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=167398.0, ans=0.125 +2024-08-03 16:26:40,556 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.43 vs. limit=6.0 +2024-08-03 16:26:48,385 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.33 vs. limit=6.0 +2024-08-03 16:26:55,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=167471.33333333334, ans=0.125 +2024-08-03 16:27:01,466 INFO [train.py:1114] (1/4) Epoch 13, batch 2000, loss[loss=0.1852, simple_loss=0.2567, pruned_loss=0.05687, over 13546.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2841, pruned_loss=0.05814, over 2635596.44 frames. ], batch size: 31, lr: 9.58e-03, grad_scale: 32.0 +2024-08-03 16:27:06,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=167508.0, ans=0.025 +2024-08-03 16:27:17,126 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.733e+01 1.160e+02 1.429e+02 1.703e+02 2.821e+02, threshold=2.859e+02, percent-clipped=2.0 +2024-08-03 16:27:29,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=167618.0, ans=0.0 +2024-08-03 16:27:46,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167654.66666666666, ans=0.1 +2024-08-03 16:27:49,178 INFO [train.py:1114] (1/4) Epoch 13, batch 2050, loss[loss=0.1831, simple_loss=0.2607, pruned_loss=0.05274, over 13430.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2831, pruned_loss=0.05801, over 2632418.11 frames. ], batch size: 32, lr: 9.58e-03, grad_scale: 32.0 +2024-08-03 16:27:59,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.62 vs. limit=15.0 +2024-08-03 16:28:10,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=167764.66666666666, ans=0.125 +2024-08-03 16:28:11,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=167764.66666666666, ans=0.125 +2024-08-03 16:28:32,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=167838.0, ans=0.125 +2024-08-03 16:28:34,952 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.32 vs. limit=15.0 +2024-08-03 16:28:36,268 INFO [train.py:1114] (1/4) Epoch 13, batch 2100, loss[loss=0.1998, simple_loss=0.2744, pruned_loss=0.06259, over 13543.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2821, pruned_loss=0.05737, over 2637893.64 frames. ], batch size: 37, lr: 9.57e-03, grad_scale: 32.0 +2024-08-03 16:28:43,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=167874.66666666666, ans=0.0 +2024-08-03 16:28:51,252 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.43 vs. limit=6.0 +2024-08-03 16:28:51,610 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.366e+01 1.102e+02 1.273e+02 1.593e+02 3.536e+02, threshold=2.546e+02, percent-clipped=4.0 +2024-08-03 16:28:51,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=167911.33333333334, ans=0.125 +2024-08-03 16:28:58,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=167948.0, ans=0.0 +2024-08-03 16:29:08,259 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.86 vs. limit=15.0 +2024-08-03 16:29:17,347 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.89 vs. limit=15.0 +2024-08-03 16:29:23,258 INFO [train.py:1114] (1/4) Epoch 13, batch 2150, loss[loss=0.1982, simple_loss=0.2776, pruned_loss=0.05942, over 13549.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2816, pruned_loss=0.05732, over 2646499.82 frames. ], batch size: 36, lr: 9.57e-03, grad_scale: 32.0 +2024-08-03 16:29:30,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=168058.0, ans=0.05 +2024-08-03 16:29:37,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=168094.66666666666, ans=0.09899494936611666 +2024-08-03 16:29:45,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=168131.33333333334, ans=0.0 +2024-08-03 16:29:57,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=168168.0, ans=0.0 +2024-08-03 16:30:08,430 INFO [train.py:1114] (1/4) Epoch 13, batch 2200, loss[loss=0.2013, simple_loss=0.2882, pruned_loss=0.0572, over 13373.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2818, pruned_loss=0.05715, over 2643782.91 frames. ], batch size: 39, lr: 9.56e-03, grad_scale: 32.0 +2024-08-03 16:30:23,845 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.382e+01 1.307e+02 1.724e+02 2.157e+02 3.326e+02, threshold=3.447e+02, percent-clipped=16.0 +2024-08-03 16:30:38,709 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.07 vs. limit=22.5 +2024-08-03 16:30:45,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=168388.0, ans=0.125 +2024-08-03 16:30:48,555 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.53 vs. limit=15.0 +2024-08-03 16:30:49,274 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:30:55,465 INFO [train.py:1114] (1/4) Epoch 13, batch 2250, loss[loss=0.1821, simple_loss=0.2816, pruned_loss=0.04132, over 13366.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2814, pruned_loss=0.05713, over 2641314.74 frames. ], batch size: 37, lr: 9.56e-03, grad_scale: 16.0 +2024-08-03 16:31:04,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=168461.33333333334, ans=0.125 +2024-08-03 16:31:17,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=168498.0, ans=0.125 +2024-08-03 16:31:19,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=168498.0, ans=0.07 +2024-08-03 16:31:31,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=168534.66666666666, ans=0.125 +2024-08-03 16:31:35,173 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.36 vs. limit=15.0 +2024-08-03 16:31:41,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=168571.33333333334, ans=0.125 +2024-08-03 16:31:44,479 INFO [train.py:1114] (1/4) Epoch 13, batch 2300, loss[loss=0.1515, simple_loss=0.2325, pruned_loss=0.03522, over 13575.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2806, pruned_loss=0.05681, over 2637973.07 frames. ], batch size: 33, lr: 9.55e-03, grad_scale: 16.0 +2024-08-03 16:31:46,770 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.59 vs. limit=22.5 +2024-08-03 16:31:56,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=168644.66666666666, ans=0.1 +2024-08-03 16:32:01,026 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.657e+01 1.154e+02 1.335e+02 1.728e+02 3.672e+02, threshold=2.670e+02, percent-clipped=1.0 +2024-08-03 16:32:02,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.41 vs. limit=15.0 +2024-08-03 16:32:02,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=168681.33333333334, ans=15.0 +2024-08-03 16:32:03,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=168681.33333333334, ans=0.2 +2024-08-03 16:32:10,459 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.99 vs. limit=15.0 +2024-08-03 16:32:14,053 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=8.25 vs. limit=15.0 +2024-08-03 16:32:17,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=168718.0, ans=0.1 +2024-08-03 16:32:20,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=168754.66666666666, ans=0.95 +2024-08-03 16:32:22,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168754.66666666666, ans=0.1 +2024-08-03 16:32:29,771 INFO [train.py:1114] (1/4) Epoch 13, batch 2350, loss[loss=0.1895, simple_loss=0.2814, pruned_loss=0.04884, over 13562.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2806, pruned_loss=0.05681, over 2640175.12 frames. ], batch size: 38, lr: 9.55e-03, grad_scale: 16.0 +2024-08-03 16:33:02,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=168901.33333333334, ans=0.0 +2024-08-03 16:33:05,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=168901.33333333334, ans=0.125 +2024-08-03 16:33:13,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=168938.0, ans=0.125 +2024-08-03 16:33:13,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=168938.0, ans=0.125 +2024-08-03 16:33:17,354 INFO [train.py:1114] (1/4) Epoch 13, batch 2400, loss[loss=0.186, simple_loss=0.2732, pruned_loss=0.04943, over 13552.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2814, pruned_loss=0.05692, over 2641329.61 frames. ], batch size: 35, lr: 9.54e-03, grad_scale: 32.0 +2024-08-03 16:33:17,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=168974.66666666666, ans=0.2 +2024-08-03 16:33:17,906 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=13.37 vs. limit=15.0 +2024-08-03 16:33:21,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=168974.66666666666, ans=0.125 +2024-08-03 16:33:28,718 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.37 vs. limit=15.0 +2024-08-03 16:33:33,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=169011.33333333334, ans=0.1 +2024-08-03 16:33:33,791 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.921e+01 1.143e+02 1.305e+02 1.687e+02 2.768e+02, threshold=2.610e+02, percent-clipped=2.0 +2024-08-03 16:33:35,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=169048.0, ans=0.07 +2024-08-03 16:33:51,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=169084.66666666666, ans=0.1 +2024-08-03 16:33:56,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=169121.33333333334, ans=0.0 +2024-08-03 16:34:02,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=169121.33333333334, ans=0.125 +2024-08-03 16:34:04,455 INFO [train.py:1114] (1/4) Epoch 13, batch 2450, loss[loss=0.2051, simple_loss=0.294, pruned_loss=0.05806, over 13362.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2828, pruned_loss=0.0576, over 2631604.60 frames. ], batch size: 37, lr: 9.54e-03, grad_scale: 32.0 +2024-08-03 16:34:12,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=169194.66666666666, ans=0.0 +2024-08-03 16:34:13,635 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:34:15,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=169194.66666666666, ans=0.0 +2024-08-03 16:34:20,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169194.66666666666, ans=0.1 +2024-08-03 16:34:29,547 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.31 vs. limit=12.0 +2024-08-03 16:34:37,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=169268.0, ans=0.125 +2024-08-03 16:34:39,197 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.92 vs. limit=10.0 +2024-08-03 16:34:39,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=169304.66666666666, ans=0.125 +2024-08-03 16:34:45,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=169304.66666666666, ans=0.2 +2024-08-03 16:34:51,537 INFO [train.py:1114] (1/4) Epoch 13, batch 2500, loss[loss=0.2073, simple_loss=0.2897, pruned_loss=0.06241, over 13395.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2822, pruned_loss=0.05715, over 2636356.92 frames. ], batch size: 39, lr: 9.53e-03, grad_scale: 32.0 +2024-08-03 16:34:53,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=169341.33333333334, ans=0.2 +2024-08-03 16:34:55,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=169341.33333333334, ans=0.125 +2024-08-03 16:34:59,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=169378.0, ans=0.0 +2024-08-03 16:35:08,282 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.080e+01 1.139e+02 1.387e+02 1.623e+02 2.338e+02, threshold=2.774e+02, percent-clipped=0.0 +2024-08-03 16:35:12,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=169414.66666666666, ans=0.025 +2024-08-03 16:35:13,181 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.61 vs. limit=10.0 +2024-08-03 16:35:13,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.31 vs. limit=22.5 +2024-08-03 16:35:16,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=169414.66666666666, ans=0.125 +2024-08-03 16:35:18,754 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:35:24,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=169451.33333333334, ans=0.125 +2024-08-03 16:35:34,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=169488.0, ans=0.09899494936611666 +2024-08-03 16:35:38,358 INFO [train.py:1114] (1/4) Epoch 13, batch 2550, loss[loss=0.1734, simple_loss=0.2504, pruned_loss=0.04821, over 13520.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.283, pruned_loss=0.05759, over 2638185.47 frames. ], batch size: 31, lr: 9.53e-03, grad_scale: 16.0 +2024-08-03 16:35:41,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=169524.66666666666, ans=0.09899494936611666 +2024-08-03 16:35:42,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=169524.66666666666, ans=0.0 +2024-08-03 16:36:08,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=169634.66666666666, ans=0.0 +2024-08-03 16:36:15,960 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.45 vs. limit=22.5 +2024-08-03 16:36:17,011 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.25 vs. limit=15.0 +2024-08-03 16:36:20,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=169671.33333333334, ans=0.0 +2024-08-03 16:36:21,539 INFO [train.py:1114] (1/4) Epoch 13, batch 2600, loss[loss=0.1799, simple_loss=0.2636, pruned_loss=0.04805, over 13559.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2833, pruned_loss=0.0576, over 2637875.62 frames. ], batch size: 36, lr: 9.52e-03, grad_scale: 16.0 +2024-08-03 16:36:25,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=169708.0, ans=0.035 +2024-08-03 16:36:30,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=169708.0, ans=0.0 +2024-08-03 16:36:31,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=169744.66666666666, ans=0.125 +2024-08-03 16:36:39,612 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.885e+01 1.136e+02 1.295e+02 1.531e+02 3.554e+02, threshold=2.589e+02, percent-clipped=4.0 +2024-08-03 16:36:44,728 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.75 vs. limit=8.0 +2024-08-03 16:37:03,495 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:37:06,852 INFO [train.py:1114] (1/4) Epoch 13, batch 2650, loss[loss=0.2148, simple_loss=0.298, pruned_loss=0.06583, over 13351.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2838, pruned_loss=0.05792, over 2640457.75 frames. ], batch size: 46, lr: 9.52e-03, grad_scale: 16.0 +2024-08-03 16:37:06,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=169891.33333333334, ans=0.95 +2024-08-03 16:37:15,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=169928.0, ans=0.125 +2024-08-03 16:37:28,668 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.65 vs. limit=5.0 +2024-08-03 16:37:48,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=170038.0, ans=0.125 +2024-08-03 16:37:50,564 INFO [train.py:1114] (1/4) Epoch 13, batch 2700, loss[loss=0.2138, simple_loss=0.2957, pruned_loss=0.06596, over 13547.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2836, pruned_loss=0.05804, over 2637624.45 frames. ], batch size: 40, lr: 9.51e-03, grad_scale: 16.0 +2024-08-03 16:37:50,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=170074.66666666666, ans=0.125 +2024-08-03 16:37:58,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=170111.33333333334, ans=0.125 +2024-08-03 16:37:59,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170111.33333333334, ans=0.1 +2024-08-03 16:37:59,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=170111.33333333334, ans=0.0 +2024-08-03 16:38:07,173 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.611e+01 1.174e+02 1.343e+02 1.652e+02 2.925e+02, threshold=2.686e+02, percent-clipped=2.0 +2024-08-03 16:38:14,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=170148.0, ans=0.125 +2024-08-03 16:38:36,366 INFO [train.py:1114] (1/4) Epoch 13, batch 2750, loss[loss=0.172, simple_loss=0.2569, pruned_loss=0.0435, over 13331.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2824, pruned_loss=0.05774, over 2635655.03 frames. ], batch size: 34, lr: 9.51e-03, grad_scale: 16.0 +2024-08-03 16:38:40,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170258.0, ans=0.1 +2024-08-03 16:38:49,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170294.66666666666, ans=0.1 +2024-08-03 16:38:55,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=170331.33333333334, ans=0.0 +2024-08-03 16:38:58,417 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.13 vs. limit=15.0 +2024-08-03 16:39:00,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=170331.33333333334, ans=0.07 +2024-08-03 16:39:04,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=170368.0, ans=0.0 +2024-08-03 16:39:20,222 INFO [train.py:1114] (1/4) Epoch 13, batch 2800, loss[loss=0.2187, simple_loss=0.2947, pruned_loss=0.07138, over 9045.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2831, pruned_loss=0.05848, over 2627784.92 frames. ], batch size: 97, lr: 9.50e-03, grad_scale: 32.0 +2024-08-03 16:39:20,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=170441.33333333334, ans=0.025 +2024-08-03 16:39:20,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=170441.33333333334, ans=0.0 +2024-08-03 16:39:27,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=170478.0, ans=0.125 +2024-08-03 16:39:30,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=170478.0, ans=22.5 +2024-08-03 16:39:31,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=170478.0, ans=0.125 +2024-08-03 16:39:35,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=170478.0, ans=0.07 +2024-08-03 16:39:36,325 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.827e+01 1.148e+02 1.326e+02 1.634e+02 2.406e+02, threshold=2.653e+02, percent-clipped=0.0 +2024-08-03 16:39:37,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=170514.66666666666, ans=0.0 +2024-08-03 16:39:44,461 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:40:03,088 INFO [train.py:1114] (1/4) Epoch 13, batch 2850, loss[loss=0.1798, simple_loss=0.2718, pruned_loss=0.04385, over 13364.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2832, pruned_loss=0.05844, over 2620562.95 frames. ], batch size: 35, lr: 9.50e-03, grad_scale: 16.0 +2024-08-03 16:40:04,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=170624.66666666666, ans=0.125 +2024-08-03 16:40:08,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=170624.66666666666, ans=0.125 +2024-08-03 16:40:25,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=170698.0, ans=0.1 +2024-08-03 16:40:41,728 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.73 vs. limit=15.0 +2024-08-03 16:40:46,449 INFO [train.py:1114] (1/4) Epoch 13, batch 2900, loss[loss=0.1909, simple_loss=0.2779, pruned_loss=0.05195, over 13374.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2851, pruned_loss=0.05898, over 2631592.02 frames. ], batch size: 36, lr: 9.49e-03, grad_scale: 16.0 +2024-08-03 16:40:46,663 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:41:04,015 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.894e+01 1.100e+02 1.263e+02 1.445e+02 2.759e+02, threshold=2.526e+02, percent-clipped=1.0 +2024-08-03 16:41:30,940 INFO [train.py:1114] (1/4) Epoch 13, batch 2950, loss[loss=0.1948, simple_loss=0.2784, pruned_loss=0.05556, over 13338.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2828, pruned_loss=0.05815, over 2629452.29 frames. ], batch size: 34, lr: 9.49e-03, grad_scale: 16.0 +2024-08-03 16:41:43,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171028.0, ans=0.1 +2024-08-03 16:41:44,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=171028.0, ans=0.125 +2024-08-03 16:41:49,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=171064.66666666666, ans=0.1 +2024-08-03 16:41:53,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=171064.66666666666, ans=0.2 +2024-08-03 16:42:14,321 INFO [train.py:1114] (1/4) Epoch 13, batch 3000, loss[loss=0.1839, simple_loss=0.2699, pruned_loss=0.0489, over 13534.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2827, pruned_loss=0.05842, over 2629623.34 frames. ], batch size: 37, lr: 9.48e-03, grad_scale: 16.0 +2024-08-03 16:42:14,322 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 16:42:24,359 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([2.1484, 2.2823, 1.8440, 2.3462, 3.1687, 2.7135, 2.7195, 3.0594], + device='cuda:1') +2024-08-03 16:42:28,569 INFO [train.py:1146] (1/4) Epoch 13, validation: loss=0.1746, simple_loss=0.2745, pruned_loss=0.03731, over 944034.00 frames. +2024-08-03 16:42:28,570 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 16:42:44,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=171211.33333333334, ans=0.2 +2024-08-03 16:42:45,834 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.103e+01 1.098e+02 1.258e+02 1.464e+02 2.884e+02, threshold=2.515e+02, percent-clipped=2.0 +2024-08-03 16:42:53,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=171284.66666666666, ans=0.0 +2024-08-03 16:42:58,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=171284.66666666666, ans=0.0 +2024-08-03 16:42:59,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=171284.66666666666, ans=0.1 +2024-08-03 16:42:59,265 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.42 vs. limit=12.0 +2024-08-03 16:43:00,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=171284.66666666666, ans=0.0 +2024-08-03 16:43:03,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=171321.33333333334, ans=0.0 +2024-08-03 16:43:05,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171321.33333333334, ans=0.1 +2024-08-03 16:43:07,372 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.39 vs. limit=22.5 +2024-08-03 16:43:12,963 INFO [train.py:1114] (1/4) Epoch 13, batch 3050, loss[loss=0.1886, simple_loss=0.2728, pruned_loss=0.05224, over 13522.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2832, pruned_loss=0.05884, over 2626797.64 frames. ], batch size: 35, lr: 9.48e-03, grad_scale: 16.0 +2024-08-03 16:43:14,450 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.47 vs. limit=15.0 +2024-08-03 16:43:17,253 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.45 vs. limit=22.5 +2024-08-03 16:43:24,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=171394.66666666666, ans=0.125 +2024-08-03 16:43:42,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=171468.0, ans=0.0 +2024-08-03 16:43:53,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171504.66666666666, ans=0.1 +2024-08-03 16:43:56,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=171541.33333333334, ans=0.125 +2024-08-03 16:43:57,529 INFO [train.py:1114] (1/4) Epoch 13, batch 3100, loss[loss=0.2235, simple_loss=0.3063, pruned_loss=0.07036, over 13324.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2836, pruned_loss=0.05904, over 2626370.94 frames. ], batch size: 46, lr: 9.47e-03, grad_scale: 16.0 +2024-08-03 16:44:06,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=171578.0, ans=0.0 +2024-08-03 16:44:08,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=171578.0, ans=0.125 +2024-08-03 16:44:14,829 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.222e+01 1.118e+02 1.244e+02 1.594e+02 3.299e+02, threshold=2.487e+02, percent-clipped=5.0 +2024-08-03 16:44:17,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=171614.66666666666, ans=0.95 +2024-08-03 16:44:26,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=171651.33333333334, ans=0.125 +2024-08-03 16:44:30,325 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:44:40,339 INFO [train.py:1114] (1/4) Epoch 13, batch 3150, loss[loss=0.1863, simple_loss=0.2776, pruned_loss=0.04753, over 13301.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2834, pruned_loss=0.05868, over 2628338.17 frames. ], batch size: 49, lr: 9.47e-03, grad_scale: 16.0 +2024-08-03 16:44:46,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=171724.66666666666, ans=0.125 +2024-08-03 16:44:55,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=171761.33333333334, ans=0.2 +2024-08-03 16:45:13,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=171871.33333333334, ans=0.125 +2024-08-03 16:45:13,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=171871.33333333334, ans=0.07 +2024-08-03 16:45:24,089 INFO [train.py:1114] (1/4) Epoch 13, batch 3200, loss[loss=0.2074, simple_loss=0.2973, pruned_loss=0.05878, over 13536.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2833, pruned_loss=0.05838, over 2635319.51 frames. ], batch size: 37, lr: 9.46e-03, grad_scale: 32.0 +2024-08-03 16:45:25,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=171908.0, ans=0.125 +2024-08-03 16:45:26,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171908.0, ans=0.1 +2024-08-03 16:45:31,514 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.69 vs. limit=5.0 +2024-08-03 16:45:41,820 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.110e+01 1.144e+02 1.329e+02 1.843e+02 2.975e+02, threshold=2.659e+02, percent-clipped=4.0 +2024-08-03 16:45:45,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=171981.33333333334, ans=0.0 +2024-08-03 16:45:47,354 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.02 vs. limit=10.0 +2024-08-03 16:45:59,836 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:46:06,405 INFO [train.py:1114] (1/4) Epoch 13, batch 3250, loss[loss=0.214, simple_loss=0.2949, pruned_loss=0.06651, over 13376.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2833, pruned_loss=0.05823, over 2639769.67 frames. ], batch size: 38, lr: 9.46e-03, grad_scale: 16.0 +2024-08-03 16:46:15,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=172128.0, ans=0.125 +2024-08-03 16:46:21,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=172128.0, ans=0.0 +2024-08-03 16:46:35,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=172201.33333333334, ans=0.125 +2024-08-03 16:46:37,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=172201.33333333334, ans=0.0 +2024-08-03 16:46:50,167 INFO [train.py:1114] (1/4) Epoch 13, batch 3300, loss[loss=0.2075, simple_loss=0.2903, pruned_loss=0.06235, over 12840.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2817, pruned_loss=0.0576, over 2640968.82 frames. ], batch size: 52, lr: 9.45e-03, grad_scale: 16.0 +2024-08-03 16:46:58,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=172311.33333333334, ans=0.2 +2024-08-03 16:47:00,890 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.94 vs. limit=15.0 +2024-08-03 16:47:08,241 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.464e+01 1.115e+02 1.316e+02 1.603e+02 3.409e+02, threshold=2.632e+02, percent-clipped=2.0 +2024-08-03 16:47:11,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=172348.0, ans=0.125 +2024-08-03 16:47:24,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=172421.33333333334, ans=0.0 +2024-08-03 16:47:32,850 INFO [train.py:1114] (1/4) Epoch 13, batch 3350, loss[loss=0.2323, simple_loss=0.3122, pruned_loss=0.07623, over 13035.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2835, pruned_loss=0.05848, over 2629949.57 frames. ], batch size: 48, lr: 9.45e-03, grad_scale: 16.0 +2024-08-03 16:47:46,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172494.66666666666, ans=0.1 +2024-08-03 16:48:01,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=172568.0, ans=0.1 +2024-08-03 16:48:07,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=172604.66666666666, ans=0.0 +2024-08-03 16:48:10,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=172604.66666666666, ans=0.0 +2024-08-03 16:48:13,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=172604.66666666666, ans=0.0 +2024-08-03 16:48:15,665 INFO [train.py:1114] (1/4) Epoch 13, batch 3400, loss[loss=0.1945, simple_loss=0.2685, pruned_loss=0.06025, over 13518.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2829, pruned_loss=0.05838, over 2624849.35 frames. ], batch size: 31, lr: 9.44e-03, grad_scale: 8.0 +2024-08-03 16:48:34,565 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.232e+01 1.137e+02 1.264e+02 1.560e+02 2.546e+02, threshold=2.528e+02, percent-clipped=0.0 +2024-08-03 16:48:38,271 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.25 vs. limit=10.0 +2024-08-03 16:48:49,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=172751.33333333334, ans=0.2 +2024-08-03 16:48:55,547 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.63 vs. limit=15.0 +2024-08-03 16:49:00,075 INFO [train.py:1114] (1/4) Epoch 13, batch 3450, loss[loss=0.192, simple_loss=0.2798, pruned_loss=0.05215, over 12843.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2829, pruned_loss=0.05806, over 2628559.85 frames. ], batch size: 52, lr: 9.44e-03, grad_scale: 8.0 +2024-08-03 16:49:12,085 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:49:17,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172898.0, ans=0.1 +2024-08-03 16:49:17,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=172898.0, ans=0.125 +2024-08-03 16:49:18,249 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.34 vs. limit=15.0 +2024-08-03 16:49:24,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=172898.0, ans=0.125 +2024-08-03 16:49:34,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=172971.33333333334, ans=0.0 +2024-08-03 16:49:43,781 INFO [train.py:1114] (1/4) Epoch 13, batch 3500, loss[loss=0.1785, simple_loss=0.2674, pruned_loss=0.04477, over 13535.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2827, pruned_loss=0.05815, over 2630871.87 frames. ], batch size: 34, lr: 9.43e-03, grad_scale: 8.0 +2024-08-03 16:49:49,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=173008.0, ans=0.0 +2024-08-03 16:50:03,121 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.363e+01 1.139e+02 1.315e+02 1.608e+02 2.660e+02, threshold=2.630e+02, percent-clipped=2.0 +2024-08-03 16:50:06,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=173081.33333333334, ans=0.0 +2024-08-03 16:50:11,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=173118.0, ans=0.0 +2024-08-03 16:50:15,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173118.0, ans=0.1 +2024-08-03 16:50:17,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=173118.0, ans=0.025 +2024-08-03 16:50:24,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=173154.66666666666, ans=22.5 +2024-08-03 16:50:27,046 INFO [train.py:1114] (1/4) Epoch 13, batch 3550, loss[loss=0.2521, simple_loss=0.3257, pruned_loss=0.08923, over 12496.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2848, pruned_loss=0.05921, over 2629983.53 frames. ], batch size: 58, lr: 9.43e-03, grad_scale: 8.0 +2024-08-03 16:50:39,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=173228.0, ans=10.0 +2024-08-03 16:50:54,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=173301.33333333334, ans=0.2 +2024-08-03 16:50:58,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173301.33333333334, ans=0.1 +2024-08-03 16:51:05,262 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.75 vs. limit=22.5 +2024-08-03 16:51:11,547 INFO [train.py:1114] (1/4) Epoch 13, batch 3600, loss[loss=0.2486, simple_loss=0.3169, pruned_loss=0.09013, over 8933.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2897, pruned_loss=0.06354, over 2489853.08 frames. ], batch size: 97, lr: 9.42e-03, grad_scale: 16.0 +2024-08-03 16:51:14,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=173374.66666666666, ans=0.125 +2024-08-03 16:51:17,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=173374.66666666666, ans=0.0 +2024-08-03 16:51:20,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.48 vs. limit=5.0 +2024-08-03 16:51:31,029 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.031e+02 1.213e+02 1.304e+02 1.372e+02 1.765e+02, threshold=2.609e+02, percent-clipped=0.0 +2024-08-03 16:51:31,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=173448.0, ans=0.1 +2024-08-03 16:51:39,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=173484.66666666666, ans=0.02 +2024-08-03 16:52:47,634 INFO [train.py:1114] (1/4) Epoch 14, batch 0, loss[loss=0.1599, simple_loss=0.2448, pruned_loss=0.03747, over 13344.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2448, pruned_loss=0.03747, over 13344.00 frames. ], batch size: 33, lr: 9.08e-03, grad_scale: 32.0 +2024-08-03 16:52:47,635 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 16:52:54,067 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.7079, 3.1006, 2.5247, 2.2239], device='cuda:1') +2024-08-03 16:53:02,073 INFO [train.py:1146] (1/4) Epoch 14, validation: loss=0.1773, simple_loss=0.2784, pruned_loss=0.03813, over 944034.00 frames. +2024-08-03 16:53:02,073 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 16:53:10,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=173558.0, ans=0.0 +2024-08-03 16:53:23,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=173594.66666666666, ans=0.0 +2024-08-03 16:53:45,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=173668.0, ans=0.125 +2024-08-03 16:53:49,559 INFO [train.py:1114] (1/4) Epoch 14, batch 50, loss[loss=0.1881, simple_loss=0.265, pruned_loss=0.0556, over 13422.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2831, pruned_loss=0.05839, over 577887.52 frames. ], batch size: 32, lr: 9.07e-03, grad_scale: 32.0 +2024-08-03 16:53:52,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173704.66666666666, ans=0.1 +2024-08-03 16:53:57,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=173741.33333333334, ans=0.0 +2024-08-03 16:54:03,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=173741.33333333334, ans=0.1 +2024-08-03 16:54:19,493 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.89 vs. limit=15.0 +2024-08-03 16:54:20,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=173778.0, ans=0.125 +2024-08-03 16:54:26,788 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.65 vs. limit=15.0 +2024-08-03 16:54:27,224 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.315e+01 1.175e+02 1.367e+02 1.781e+02 2.550e+02, threshold=2.735e+02, percent-clipped=0.0 +2024-08-03 16:54:30,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=173814.66666666666, ans=0.0 +2024-08-03 16:54:43,513 INFO [train.py:1114] (1/4) Epoch 14, batch 100, loss[loss=0.1896, simple_loss=0.2623, pruned_loss=0.05849, over 13541.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2849, pruned_loss=0.05871, over 1025109.18 frames. ], batch size: 35, lr: 9.07e-03, grad_scale: 32.0 +2024-08-03 16:55:07,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173961.33333333334, ans=0.1 +2024-08-03 16:55:15,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=173998.0, ans=0.125 +2024-08-03 16:55:27,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=174034.66666666666, ans=0.125 +2024-08-03 16:55:30,121 INFO [train.py:1114] (1/4) Epoch 14, batch 150, loss[loss=0.2026, simple_loss=0.2718, pruned_loss=0.06672, over 13428.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2826, pruned_loss=0.0573, over 1386591.19 frames. ], batch size: 32, lr: 9.06e-03, grad_scale: 32.0 +2024-08-03 16:55:37,144 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.65 vs. limit=15.0 +2024-08-03 16:55:38,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.88 vs. limit=15.0 +2024-08-03 16:55:42,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=174108.0, ans=0.2 +2024-08-03 16:55:44,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=174108.0, ans=0.0 +2024-08-03 16:55:47,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.43 vs. limit=15.0 +2024-08-03 16:55:48,766 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.40 vs. limit=15.0 +2024-08-03 16:56:02,898 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.41 vs. limit=15.0 +2024-08-03 16:56:03,372 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.908e+01 1.123e+02 1.329e+02 1.786e+02 3.044e+02, threshold=2.658e+02, percent-clipped=1.0 +2024-08-03 16:56:08,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=174181.33333333334, ans=0.125 +2024-08-03 16:56:28,040 INFO [train.py:1114] (1/4) Epoch 14, batch 200, loss[loss=0.2, simple_loss=0.2866, pruned_loss=0.05674, over 12492.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.282, pruned_loss=0.0572, over 1665022.44 frames. ], batch size: 58, lr: 9.06e-03, grad_scale: 32.0 +2024-08-03 16:56:30,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=174254.66666666666, ans=0.125 +2024-08-03 16:56:31,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=174254.66666666666, ans=0.0 +2024-08-03 16:56:36,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=174291.33333333334, ans=0.125 +2024-08-03 16:56:40,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=174291.33333333334, ans=0.0 +2024-08-03 16:56:41,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=174291.33333333334, ans=0.0 +2024-08-03 16:56:56,422 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.25 vs. limit=15.0 +2024-08-03 16:56:56,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=174364.66666666666, ans=0.0 +2024-08-03 16:57:06,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=174401.33333333334, ans=0.125 +2024-08-03 16:57:08,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=174401.33333333334, ans=0.0 +2024-08-03 16:57:13,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=174401.33333333334, ans=0.125 +2024-08-03 16:57:14,950 INFO [train.py:1114] (1/4) Epoch 14, batch 250, loss[loss=0.2154, simple_loss=0.2966, pruned_loss=0.06707, over 13381.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2818, pruned_loss=0.05711, over 1884675.70 frames. ], batch size: 46, lr: 9.05e-03, grad_scale: 32.0 +2024-08-03 16:57:44,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=174511.33333333334, ans=0.125 +2024-08-03 16:57:49,036 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.116e+01 1.178e+02 1.381e+02 1.725e+02 3.085e+02, threshold=2.762e+02, percent-clipped=4.0 +2024-08-03 16:57:50,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=174548.0, ans=0.1 +2024-08-03 16:57:54,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=174548.0, ans=0.025 +2024-08-03 16:57:59,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=174584.66666666666, ans=0.2 +2024-08-03 16:58:03,029 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.52 vs. limit=15.0 +2024-08-03 16:58:05,300 INFO [train.py:1114] (1/4) Epoch 14, batch 300, loss[loss=0.199, simple_loss=0.2821, pruned_loss=0.05791, over 13459.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2808, pruned_loss=0.05646, over 2050886.61 frames. ], batch size: 42, lr: 9.05e-03, grad_scale: 32.0 +2024-08-03 16:58:27,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=174694.66666666666, ans=0.125 +2024-08-03 16:58:39,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=174731.33333333334, ans=0.0 +2024-08-03 16:58:45,840 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.33 vs. limit=10.0 +2024-08-03 16:58:52,954 INFO [train.py:1114] (1/4) Epoch 14, batch 350, loss[loss=0.1694, simple_loss=0.2426, pruned_loss=0.04811, over 13581.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2814, pruned_loss=0.0569, over 2181298.73 frames. ], batch size: 33, lr: 9.04e-03, grad_scale: 32.0 +2024-08-03 16:59:13,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=174804.66666666666, ans=0.125 +2024-08-03 16:59:33,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=174878.0, ans=0.0 +2024-08-03 16:59:38,113 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.558e+01 1.118e+02 1.275e+02 1.546e+02 2.611e+02, threshold=2.551e+02, percent-clipped=0.0 +2024-08-03 16:59:43,560 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.44 vs. limit=15.0 +2024-08-03 16:59:45,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=174914.66666666666, ans=0.0 +2024-08-03 16:59:57,450 INFO [train.py:1114] (1/4) Epoch 14, batch 400, loss[loss=0.1888, simple_loss=0.2796, pruned_loss=0.04901, over 13352.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2809, pruned_loss=0.05655, over 2285184.58 frames. ], batch size: 37, lr: 9.04e-03, grad_scale: 32.0 +2024-08-03 17:00:12,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175024.66666666666, ans=0.1 +2024-08-03 17:00:19,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=175061.33333333334, ans=0.125 +2024-08-03 17:00:41,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=175134.66666666666, ans=0.0 +2024-08-03 17:00:48,050 INFO [train.py:1114] (1/4) Epoch 14, batch 450, loss[loss=0.1918, simple_loss=0.284, pruned_loss=0.04975, over 13560.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2814, pruned_loss=0.05661, over 2359034.34 frames. ], batch size: 38, lr: 9.04e-03, grad_scale: 32.0 +2024-08-03 17:01:00,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175208.0, ans=0.1 +2024-08-03 17:01:18,914 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.283e+01 1.087e+02 1.281e+02 1.631e+02 3.461e+02, threshold=2.562e+02, percent-clipped=3.0 +2024-08-03 17:01:36,930 INFO [train.py:1114] (1/4) Epoch 14, batch 500, loss[loss=0.2024, simple_loss=0.2848, pruned_loss=0.06002, over 13420.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2796, pruned_loss=0.0555, over 2425116.66 frames. ], batch size: 43, lr: 9.03e-03, grad_scale: 32.0 +2024-08-03 17:02:20,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=175501.33333333334, ans=15.0 +2024-08-03 17:02:21,557 INFO [train.py:1114] (1/4) Epoch 14, batch 550, loss[loss=0.2473, simple_loss=0.3185, pruned_loss=0.08803, over 13265.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2798, pruned_loss=0.05569, over 2467833.39 frames. ], batch size: 49, lr: 9.03e-03, grad_scale: 32.0 +2024-08-03 17:02:22,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=175538.0, ans=0.025 +2024-08-03 17:02:26,660 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.86 vs. limit=15.0 +2024-08-03 17:02:53,529 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.358e+01 1.151e+02 1.294e+02 1.518e+02 2.416e+02, threshold=2.587e+02, percent-clipped=0.0 +2024-08-03 17:02:58,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=175684.66666666666, ans=0.2 +2024-08-03 17:03:08,898 INFO [train.py:1114] (1/4) Epoch 14, batch 600, loss[loss=0.2254, simple_loss=0.3054, pruned_loss=0.07274, over 13293.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2799, pruned_loss=0.05596, over 2507779.75 frames. ], batch size: 46, lr: 9.02e-03, grad_scale: 16.0 +2024-08-03 17:03:12,993 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.29 vs. limit=15.0 +2024-08-03 17:03:18,156 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.59 vs. limit=15.0 +2024-08-03 17:03:29,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=175794.66666666666, ans=0.125 +2024-08-03 17:03:57,660 INFO [train.py:1114] (1/4) Epoch 14, batch 650, loss[loss=0.1857, simple_loss=0.2685, pruned_loss=0.05139, over 13548.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2794, pruned_loss=0.05586, over 2542862.13 frames. ], batch size: 37, lr: 9.02e-03, grad_scale: 8.0 +2024-08-03 17:03:58,943 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.30 vs. limit=12.0 +2024-08-03 17:04:01,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=175904.66666666666, ans=0.2 +2024-08-03 17:04:04,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=175904.66666666666, ans=0.5 +2024-08-03 17:04:18,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175978.0, ans=0.1 +2024-08-03 17:04:29,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=176014.66666666666, ans=0.0 +2024-08-03 17:04:29,807 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.301e+01 1.160e+02 1.386e+02 1.901e+02 3.564e+02, threshold=2.772e+02, percent-clipped=5.0 +2024-08-03 17:04:32,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=176014.66666666666, ans=0.0 +2024-08-03 17:04:34,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=176051.33333333334, ans=0.125 +2024-08-03 17:04:46,195 INFO [train.py:1114] (1/4) Epoch 14, batch 700, loss[loss=0.1928, simple_loss=0.2688, pruned_loss=0.05845, over 13527.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2795, pruned_loss=0.05564, over 2565088.72 frames. ], batch size: 35, lr: 9.01e-03, grad_scale: 8.0 +2024-08-03 17:04:49,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=176088.0, ans=0.0 +2024-08-03 17:04:59,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=176124.66666666666, ans=0.2 +2024-08-03 17:05:05,256 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.77 vs. limit=12.0 +2024-08-03 17:05:16,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=176198.0, ans=0.025 +2024-08-03 17:05:26,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=176234.66666666666, ans=0.025 +2024-08-03 17:05:29,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=176234.66666666666, ans=0.0 +2024-08-03 17:05:31,384 INFO [train.py:1114] (1/4) Epoch 14, batch 750, loss[loss=0.168, simple_loss=0.2647, pruned_loss=0.03563, over 13375.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2791, pruned_loss=0.0554, over 2581455.03 frames. ], batch size: 37, lr: 9.01e-03, grad_scale: 8.0 +2024-08-03 17:05:33,551 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:05:36,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=176271.33333333334, ans=0.125 +2024-08-03 17:06:02,471 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.800e+01 1.140e+02 1.290e+02 1.721e+02 6.299e+02, threshold=2.581e+02, percent-clipped=4.0 +2024-08-03 17:06:17,180 INFO [train.py:1114] (1/4) Epoch 14, batch 800, loss[loss=0.2217, simple_loss=0.2895, pruned_loss=0.07692, over 13337.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2789, pruned_loss=0.05542, over 2595753.79 frames. ], batch size: 33, lr: 9.00e-03, grad_scale: 16.0 +2024-08-03 17:06:29,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=176491.33333333334, ans=0.5 +2024-08-03 17:06:48,713 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.33 vs. limit=22.5 +2024-08-03 17:06:56,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176601.33333333334, ans=0.1 +2024-08-03 17:07:04,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=176638.0, ans=0.09899494936611666 +2024-08-03 17:07:05,356 INFO [train.py:1114] (1/4) Epoch 14, batch 850, loss[loss=0.1914, simple_loss=0.2822, pruned_loss=0.05032, over 13311.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2789, pruned_loss=0.05573, over 2608691.93 frames. ], batch size: 40, lr: 9.00e-03, grad_scale: 16.0 +2024-08-03 17:07:39,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176748.0, ans=0.1 +2024-08-03 17:07:40,305 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.443e+01 1.087e+02 1.211e+02 1.412e+02 2.074e+02, threshold=2.422e+02, percent-clipped=0.0 +2024-08-03 17:07:53,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=176784.66666666666, ans=0.025 +2024-08-03 17:07:55,321 INFO [train.py:1114] (1/4) Epoch 14, batch 900, loss[loss=0.175, simple_loss=0.2627, pruned_loss=0.04364, over 13344.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2793, pruned_loss=0.05584, over 2611561.56 frames. ], batch size: 33, lr: 8.99e-03, grad_scale: 16.0 +2024-08-03 17:07:57,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=176821.33333333334, ans=0.0 +2024-08-03 17:08:35,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=176968.0, ans=0.1 +2024-08-03 17:08:36,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=176968.0, ans=0.1 +2024-08-03 17:08:36,993 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.10 vs. limit=15.0 +2024-08-03 17:08:42,700 INFO [train.py:1114] (1/4) Epoch 14, batch 950, loss[loss=0.1839, simple_loss=0.2635, pruned_loss=0.05214, over 13524.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2799, pruned_loss=0.05635, over 2612976.18 frames. ], batch size: 34, lr: 8.99e-03, grad_scale: 16.0 +2024-08-03 17:08:47,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=177004.66666666666, ans=0.07 +2024-08-03 17:09:01,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177041.33333333334, ans=0.1 +2024-08-03 17:09:02,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=177078.0, ans=0.04949747468305833 +2024-08-03 17:09:05,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=177078.0, ans=0.0 +2024-08-03 17:09:06,519 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.20 vs. limit=10.0 +2024-08-03 17:09:12,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177114.66666666666, ans=0.1 +2024-08-03 17:09:12,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177114.66666666666, ans=0.1 +2024-08-03 17:09:15,826 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.964e+01 1.158e+02 1.398e+02 1.727e+02 2.347e+02, threshold=2.796e+02, percent-clipped=0.0 +2024-08-03 17:09:20,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=177151.33333333334, ans=0.02 +2024-08-03 17:09:25,211 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:09:30,713 INFO [train.py:1114] (1/4) Epoch 14, batch 1000, loss[loss=0.2068, simple_loss=0.2889, pruned_loss=0.06233, over 13379.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2807, pruned_loss=0.05673, over 2611788.21 frames. ], batch size: 35, lr: 8.99e-03, grad_scale: 16.0 +2024-08-03 17:09:44,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177224.66666666666, ans=0.1 +2024-08-03 17:09:47,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=177224.66666666666, ans=0.125 +2024-08-03 17:10:10,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=177334.66666666666, ans=0.125 +2024-08-03 17:10:10,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=177334.66666666666, ans=0.2 +2024-08-03 17:10:18,994 INFO [train.py:1114] (1/4) Epoch 14, batch 1050, loss[loss=0.1736, simple_loss=0.2686, pruned_loss=0.03929, over 13577.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2797, pruned_loss=0.05619, over 2616407.97 frames. ], batch size: 39, lr: 8.98e-03, grad_scale: 16.0 +2024-08-03 17:10:29,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=177408.0, ans=0.0 +2024-08-03 17:10:34,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=177408.0, ans=0.125 +2024-08-03 17:10:51,917 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.165e+01 1.080e+02 1.235e+02 1.446e+02 2.124e+02, threshold=2.470e+02, percent-clipped=0.0 +2024-08-03 17:10:54,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=177481.33333333334, ans=0.125 +2024-08-03 17:11:03,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=177518.0, ans=0.125 +2024-08-03 17:11:06,533 INFO [train.py:1114] (1/4) Epoch 14, batch 1100, loss[loss=0.2053, simple_loss=0.282, pruned_loss=0.06428, over 13544.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2797, pruned_loss=0.05605, over 2620850.76 frames. ], batch size: 36, lr: 8.98e-03, grad_scale: 16.0 +2024-08-03 17:11:15,470 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.27 vs. limit=22.5 +2024-08-03 17:11:20,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=177591.33333333334, ans=0.0 +2024-08-03 17:11:39,428 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.62 vs. limit=6.0 +2024-08-03 17:11:52,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=177701.33333333334, ans=0.125 +2024-08-03 17:11:56,968 INFO [train.py:1114] (1/4) Epoch 14, batch 1150, loss[loss=0.1883, simple_loss=0.2753, pruned_loss=0.0507, over 13562.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2799, pruned_loss=0.05648, over 2620614.09 frames. ], batch size: 36, lr: 8.97e-03, grad_scale: 16.0 +2024-08-03 17:12:11,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=177774.66666666666, ans=0.125 +2024-08-03 17:12:19,680 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.68 vs. limit=15.0 +2024-08-03 17:12:23,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=177811.33333333334, ans=0.0 +2024-08-03 17:12:28,533 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.383e+01 1.181e+02 1.323e+02 1.686e+02 3.018e+02, threshold=2.646e+02, percent-clipped=3.0 +2024-08-03 17:12:29,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=177848.0, ans=0.0 +2024-08-03 17:12:43,145 INFO [train.py:1114] (1/4) Epoch 14, batch 1200, loss[loss=0.2091, simple_loss=0.2965, pruned_loss=0.06089, over 13572.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2811, pruned_loss=0.05677, over 2617069.95 frames. ], batch size: 39, lr: 8.97e-03, grad_scale: 32.0 +2024-08-03 17:12:59,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=177958.0, ans=0.125 +2024-08-03 17:13:11,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=178031.33333333334, ans=0.125 +2024-08-03 17:13:13,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=178031.33333333334, ans=0.125 +2024-08-03 17:13:13,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178031.33333333334, ans=0.1 +2024-08-03 17:13:14,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=178031.33333333334, ans=0.2 +2024-08-03 17:13:26,544 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:13:28,039 INFO [train.py:1114] (1/4) Epoch 14, batch 1250, loss[loss=0.2225, simple_loss=0.3074, pruned_loss=0.06879, over 13427.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.282, pruned_loss=0.05695, over 2628579.20 frames. ], batch size: 42, lr: 8.96e-03, grad_scale: 32.0 +2024-08-03 17:13:40,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=178141.33333333334, ans=10.0 +2024-08-03 17:13:42,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=178141.33333333334, ans=0.2 +2024-08-03 17:13:45,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=178141.33333333334, ans=0.125 +2024-08-03 17:14:01,211 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.606e+01 1.145e+02 1.312e+02 1.553e+02 2.666e+02, threshold=2.625e+02, percent-clipped=1.0 +2024-08-03 17:14:03,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=178214.66666666666, ans=0.0 +2024-08-03 17:14:04,811 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.26 vs. limit=15.0 +2024-08-03 17:14:05,496 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.30 vs. limit=10.0 +2024-08-03 17:14:05,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=178251.33333333334, ans=0.125 +2024-08-03 17:14:06,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=178251.33333333334, ans=0.0 +2024-08-03 17:14:10,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=178251.33333333334, ans=0.0 +2024-08-03 17:14:15,817 INFO [train.py:1114] (1/4) Epoch 14, batch 1300, loss[loss=0.1818, simple_loss=0.2715, pruned_loss=0.04604, over 12935.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2811, pruned_loss=0.05693, over 2631744.07 frames. ], batch size: 52, lr: 8.96e-03, grad_scale: 32.0 +2024-08-03 17:14:21,133 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.76 vs. limit=15.0 +2024-08-03 17:14:25,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=178288.0, ans=0.125 +2024-08-03 17:14:27,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=178324.66666666666, ans=0.2 +2024-08-03 17:14:38,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=178361.33333333334, ans=0.0 +2024-08-03 17:14:38,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=178361.33333333334, ans=0.1 +2024-08-03 17:14:48,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=178398.0, ans=0.0 +2024-08-03 17:14:56,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178434.66666666666, ans=0.1 +2024-08-03 17:14:57,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178434.66666666666, ans=0.1 +2024-08-03 17:15:01,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=178434.66666666666, ans=0.125 +2024-08-03 17:15:05,135 INFO [train.py:1114] (1/4) Epoch 14, batch 1350, loss[loss=0.1716, simple_loss=0.2596, pruned_loss=0.04176, over 13546.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2802, pruned_loss=0.05652, over 2639255.80 frames. ], batch size: 37, lr: 8.95e-03, grad_scale: 32.0 +2024-08-03 17:15:10,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=178471.33333333334, ans=0.1 +2024-08-03 17:15:11,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178471.33333333334, ans=0.1 +2024-08-03 17:15:19,105 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.85 vs. limit=6.0 +2024-08-03 17:15:22,803 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.94 vs. limit=15.0 +2024-08-03 17:15:37,512 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.701e+01 1.127e+02 1.257e+02 1.561e+02 2.635e+02, threshold=2.514e+02, percent-clipped=1.0 +2024-08-03 17:15:37,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=178581.33333333334, ans=0.1 +2024-08-03 17:15:51,671 INFO [train.py:1114] (1/4) Epoch 14, batch 1400, loss[loss=0.1649, simple_loss=0.2426, pruned_loss=0.04356, over 13256.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2797, pruned_loss=0.05613, over 2643292.41 frames. ], batch size: 31, lr: 8.95e-03, grad_scale: 16.0 +2024-08-03 17:15:55,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=178654.66666666666, ans=0.0 +2024-08-03 17:16:01,872 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:16:02,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=178691.33333333334, ans=0.07 +2024-08-03 17:16:03,129 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.43 vs. limit=15.0 +2024-08-03 17:16:18,356 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.63 vs. limit=10.0 +2024-08-03 17:16:36,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=178838.0, ans=0.0 +2024-08-03 17:16:36,742 INFO [train.py:1114] (1/4) Epoch 14, batch 1450, loss[loss=0.2424, simple_loss=0.3206, pruned_loss=0.08206, over 13406.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2802, pruned_loss=0.05601, over 2642643.61 frames. ], batch size: 43, lr: 8.94e-03, grad_scale: 16.0 +2024-08-03 17:16:41,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=178838.0, ans=0.125 +2024-08-03 17:16:44,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=178838.0, ans=0.0 +2024-08-03 17:16:45,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=178874.66666666666, ans=0.07 +2024-08-03 17:16:46,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=178874.66666666666, ans=0.09899494936611666 +2024-08-03 17:16:58,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=178911.33333333334, ans=0.125 +2024-08-03 17:17:08,368 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.238e+01 1.149e+02 1.315e+02 1.594e+02 2.634e+02, threshold=2.629e+02, percent-clipped=1.0 +2024-08-03 17:17:12,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=178984.66666666666, ans=0.05 +2024-08-03 17:17:13,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=178984.66666666666, ans=0.0 +2024-08-03 17:17:18,912 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.49 vs. limit=10.0 +2024-08-03 17:17:24,002 INFO [train.py:1114] (1/4) Epoch 14, batch 1500, loss[loss=0.1975, simple_loss=0.2912, pruned_loss=0.05191, over 13415.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2808, pruned_loss=0.05626, over 2641844.42 frames. ], batch size: 39, lr: 8.94e-03, grad_scale: 16.0 +2024-08-03 17:17:26,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=179021.33333333334, ans=0.125 +2024-08-03 17:17:29,351 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.62 vs. limit=10.0 +2024-08-03 17:17:33,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=179058.0, ans=0.2 +2024-08-03 17:17:35,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=179058.0, ans=0.125 +2024-08-03 17:18:01,446 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.60 vs. limit=15.0 +2024-08-03 17:18:05,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=179168.0, ans=0.04949747468305833 +2024-08-03 17:18:11,853 INFO [train.py:1114] (1/4) Epoch 14, batch 1550, loss[loss=0.188, simple_loss=0.2792, pruned_loss=0.04847, over 13404.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2812, pruned_loss=0.05657, over 2631328.50 frames. ], batch size: 41, lr: 8.94e-03, grad_scale: 16.0 +2024-08-03 17:18:12,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=179204.66666666666, ans=0.125 +2024-08-03 17:18:12,447 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.21 vs. limit=15.0 +2024-08-03 17:18:16,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=179204.66666666666, ans=0.0 +2024-08-03 17:18:21,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=179241.33333333334, ans=0.125 +2024-08-03 17:18:29,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=179241.33333333334, ans=0.0 +2024-08-03 17:18:29,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=179241.33333333334, ans=15.0 +2024-08-03 17:18:34,881 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.95 vs. limit=22.5 +2024-08-03 17:18:40,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=179314.66666666666, ans=0.2 +2024-08-03 17:18:45,085 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.351e+01 1.093e+02 1.288e+02 1.698e+02 2.728e+02, threshold=2.576e+02, percent-clipped=2.0 +2024-08-03 17:18:52,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=179351.33333333334, ans=0.1 +2024-08-03 17:19:00,522 INFO [train.py:1114] (1/4) Epoch 14, batch 1600, loss[loss=0.2199, simple_loss=0.3065, pruned_loss=0.06665, over 13580.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2805, pruned_loss=0.05647, over 2623984.11 frames. ], batch size: 39, lr: 8.93e-03, grad_scale: 32.0 +2024-08-03 17:19:21,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=179461.33333333334, ans=0.125 +2024-08-03 17:19:21,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=179461.33333333334, ans=0.125 +2024-08-03 17:19:25,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=179461.33333333334, ans=0.09899494936611666 +2024-08-03 17:19:35,082 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.41 vs. limit=15.0 +2024-08-03 17:19:46,115 INFO [train.py:1114] (1/4) Epoch 14, batch 1650, loss[loss=0.1979, simple_loss=0.2945, pruned_loss=0.0506, over 13297.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.28, pruned_loss=0.05593, over 2621162.88 frames. ], batch size: 40, lr: 8.93e-03, grad_scale: 32.0 +2024-08-03 17:19:46,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179571.33333333334, ans=0.1 +2024-08-03 17:20:10,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=179644.66666666666, ans=0.125 +2024-08-03 17:20:33,194 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.085e+01 1.145e+02 1.327e+02 1.825e+02 3.127e+02, threshold=2.655e+02, percent-clipped=5.0 +2024-08-03 17:20:40,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=179718.0, ans=0.0 +2024-08-03 17:20:42,724 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.28 vs. limit=15.0 +2024-08-03 17:20:46,571 INFO [train.py:1114] (1/4) Epoch 14, batch 1700, loss[loss=0.1649, simple_loss=0.2407, pruned_loss=0.04452, over 13255.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.279, pruned_loss=0.05512, over 2629955.46 frames. ], batch size: 31, lr: 8.92e-03, grad_scale: 32.0 +2024-08-03 17:20:48,030 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.53 vs. limit=15.0 +2024-08-03 17:20:57,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=3.88 vs. limit=5.0 +2024-08-03 17:21:05,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=179828.0, ans=0.125 +2024-08-03 17:21:10,644 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.00 vs. limit=15.0 +2024-08-03 17:21:12,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=179828.0, ans=0.125 +2024-08-03 17:21:12,449 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.19 vs. limit=12.0 +2024-08-03 17:21:22,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=179864.66666666666, ans=0.2 +2024-08-03 17:21:33,627 INFO [train.py:1114] (1/4) Epoch 14, batch 1750, loss[loss=0.1837, simple_loss=0.2612, pruned_loss=0.05311, over 13531.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2793, pruned_loss=0.05572, over 2633312.43 frames. ], batch size: 31, lr: 8.92e-03, grad_scale: 32.0 +2024-08-03 17:21:36,505 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:21:36,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=179938.0, ans=0.125 +2024-08-03 17:21:50,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179974.66666666666, ans=0.1 +2024-08-03 17:21:55,273 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:21:55,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=180011.33333333334, ans=0.2 +2024-08-03 17:21:57,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=180011.33333333334, ans=0.1 +2024-08-03 17:21:58,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=180011.33333333334, ans=0.125 +2024-08-03 17:22:07,567 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.076e+01 1.125e+02 1.266e+02 1.724e+02 3.044e+02, threshold=2.532e+02, percent-clipped=5.0 +2024-08-03 17:22:13,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=180084.66666666666, ans=0.2 +2024-08-03 17:22:20,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=180084.66666666666, ans=0.07 +2024-08-03 17:22:22,979 INFO [train.py:1114] (1/4) Epoch 14, batch 1800, loss[loss=0.2099, simple_loss=0.296, pruned_loss=0.06195, over 13557.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2803, pruned_loss=0.05635, over 2635189.82 frames. ], batch size: 38, lr: 8.91e-03, grad_scale: 32.0 +2024-08-03 17:22:30,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=180121.33333333334, ans=0.0 +2024-08-03 17:22:40,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=180158.0, ans=0.0 +2024-08-03 17:22:43,912 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.43 vs. limit=22.5 +2024-08-03 17:23:02,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=180268.0, ans=0.125 +2024-08-03 17:23:10,716 INFO [train.py:1114] (1/4) Epoch 14, batch 1850, loss[loss=0.204, simple_loss=0.2949, pruned_loss=0.05656, over 13392.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2796, pruned_loss=0.05558, over 2638130.52 frames. ], batch size: 39, lr: 8.91e-03, grad_scale: 32.0 +2024-08-03 17:23:19,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=180341.33333333334, ans=0.0 +2024-08-03 17:23:21,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=180341.33333333334, ans=0.125 +2024-08-03 17:23:37,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=180414.66666666666, ans=0.0 +2024-08-03 17:23:42,953 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.173e+01 1.187e+02 1.383e+02 1.867e+02 3.590e+02, threshold=2.765e+02, percent-clipped=8.0 +2024-08-03 17:23:49,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=180451.33333333334, ans=0.2 +2024-08-03 17:23:56,654 INFO [train.py:1114] (1/4) Epoch 14, batch 1900, loss[loss=0.2376, simple_loss=0.3201, pruned_loss=0.07754, over 13318.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2803, pruned_loss=0.05614, over 2640778.14 frames. ], batch size: 40, lr: 8.90e-03, grad_scale: 32.0 +2024-08-03 17:23:58,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=180488.0, ans=0.125 +2024-08-03 17:24:15,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=180561.33333333334, ans=0.125 +2024-08-03 17:24:16,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=180561.33333333334, ans=0.125 +2024-08-03 17:24:20,626 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.94 vs. limit=6.0 +2024-08-03 17:24:28,723 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.75 vs. limit=15.0 +2024-08-03 17:24:34,214 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.83 vs. limit=15.0 +2024-08-03 17:24:34,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=180634.66666666666, ans=0.025 +2024-08-03 17:24:43,811 INFO [train.py:1114] (1/4) Epoch 14, batch 1950, loss[loss=0.1919, simple_loss=0.268, pruned_loss=0.05795, over 13559.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2813, pruned_loss=0.05636, over 2646955.74 frames. ], batch size: 36, lr: 8.90e-03, grad_scale: 16.0 +2024-08-03 17:24:44,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=180671.33333333334, ans=0.125 +2024-08-03 17:25:12,246 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.43 vs. limit=22.5 +2024-08-03 17:25:19,256 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.397e+01 1.089e+02 1.232e+02 1.473e+02 2.566e+02, threshold=2.463e+02, percent-clipped=0.0 +2024-08-03 17:25:23,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=180818.0, ans=0.025 +2024-08-03 17:25:25,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=180818.0, ans=0.125 +2024-08-03 17:25:29,604 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:25:32,198 INFO [train.py:1114] (1/4) Epoch 14, batch 2000, loss[loss=0.1507, simple_loss=0.2329, pruned_loss=0.03422, over 13547.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.283, pruned_loss=0.05764, over 2636430.56 frames. ], batch size: 31, lr: 8.90e-03, grad_scale: 32.0 +2024-08-03 17:25:53,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=180928.0, ans=0.125 +2024-08-03 17:26:21,668 INFO [train.py:1114] (1/4) Epoch 14, batch 2050, loss[loss=0.1748, simple_loss=0.2585, pruned_loss=0.04558, over 13416.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2822, pruned_loss=0.0573, over 2633672.66 frames. ], batch size: 32, lr: 8.89e-03, grad_scale: 32.0 +2024-08-03 17:26:24,818 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.69 vs. limit=22.5 +2024-08-03 17:26:25,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=181038.0, ans=0.125 +2024-08-03 17:26:45,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=181111.33333333334, ans=0.1 +2024-08-03 17:26:45,667 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.19 vs. limit=15.0 +2024-08-03 17:26:52,620 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:26:54,259 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.315e+01 1.117e+02 1.302e+02 1.562e+02 2.500e+02, threshold=2.604e+02, percent-clipped=1.0 +2024-08-03 17:26:56,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=181148.0, ans=0.07 +2024-08-03 17:27:06,805 INFO [train.py:1114] (1/4) Epoch 14, batch 2100, loss[loss=0.1872, simple_loss=0.2761, pruned_loss=0.04918, over 13556.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2815, pruned_loss=0.05677, over 2638893.59 frames. ], batch size: 37, lr: 8.89e-03, grad_scale: 32.0 +2024-08-03 17:27:06,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=181221.33333333334, ans=0.05 +2024-08-03 17:27:34,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=181331.33333333334, ans=0.125 +2024-08-03 17:27:48,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=181368.0, ans=0.125 +2024-08-03 17:27:51,774 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.12 vs. limit=6.0 +2024-08-03 17:27:51,983 INFO [train.py:1114] (1/4) Epoch 14, batch 2150, loss[loss=0.2249, simple_loss=0.3046, pruned_loss=0.07261, over 13563.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.281, pruned_loss=0.05673, over 2647414.78 frames. ], batch size: 36, lr: 8.88e-03, grad_scale: 32.0 +2024-08-03 17:27:52,510 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.19 vs. limit=15.0 +2024-08-03 17:28:20,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=181514.66666666666, ans=0.125 +2024-08-03 17:28:26,854 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.184e+01 1.154e+02 1.453e+02 1.954e+02 3.704e+02, threshold=2.907e+02, percent-clipped=11.0 +2024-08-03 17:28:30,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=181551.33333333334, ans=0.125 +2024-08-03 17:28:36,635 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.01 vs. limit=22.5 +2024-08-03 17:28:39,549 INFO [train.py:1114] (1/4) Epoch 14, batch 2200, loss[loss=0.1908, simple_loss=0.2765, pruned_loss=0.05258, over 13397.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.28, pruned_loss=0.05625, over 2645573.69 frames. ], batch size: 39, lr: 8.88e-03, grad_scale: 32.0 +2024-08-03 17:28:40,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=181588.0, ans=0.125 +2024-08-03 17:28:55,471 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.93 vs. limit=22.5 +2024-08-03 17:29:11,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=181698.0, ans=0.125 +2024-08-03 17:29:12,398 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.63 vs. limit=15.0 +2024-08-03 17:29:17,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=181734.66666666666, ans=0.125 +2024-08-03 17:29:19,724 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=181734.66666666666, ans=0.2 +2024-08-03 17:29:26,882 INFO [train.py:1114] (1/4) Epoch 14, batch 2250, loss[loss=0.2253, simple_loss=0.3114, pruned_loss=0.06963, over 13359.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2799, pruned_loss=0.05615, over 2642722.34 frames. ], batch size: 37, lr: 8.87e-03, grad_scale: 32.0 +2024-08-03 17:29:42,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=181808.0, ans=0.125 +2024-08-03 17:29:43,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=181808.0, ans=0.07 +2024-08-03 17:29:53,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=181844.66666666666, ans=0.2 +2024-08-03 17:29:58,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=181881.33333333334, ans=0.125 +2024-08-03 17:30:03,285 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.261e+01 1.193e+02 1.520e+02 1.872e+02 2.993e+02, threshold=3.040e+02, percent-clipped=1.0 +2024-08-03 17:30:12,739 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.11 vs. limit=15.0 +2024-08-03 17:30:15,676 INFO [train.py:1114] (1/4) Epoch 14, batch 2300, loss[loss=0.1565, simple_loss=0.2407, pruned_loss=0.03609, over 13592.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2783, pruned_loss=0.05553, over 2638701.55 frames. ], batch size: 33, lr: 8.87e-03, grad_scale: 32.0 +2024-08-03 17:30:38,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=182028.0, ans=0.125 +2024-08-03 17:30:43,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=182064.66666666666, ans=0.125 +2024-08-03 17:30:46,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=182064.66666666666, ans=0.07 +2024-08-03 17:31:00,643 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.61 vs. limit=15.0 +2024-08-03 17:31:01,038 INFO [train.py:1114] (1/4) Epoch 14, batch 2350, loss[loss=0.1866, simple_loss=0.2857, pruned_loss=0.04375, over 13557.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2779, pruned_loss=0.05498, over 2640217.70 frames. ], batch size: 38, lr: 8.87e-03, grad_scale: 32.0 +2024-08-03 17:31:06,825 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.66 vs. limit=22.5 +2024-08-03 17:31:08,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=182138.0, ans=0.95 +2024-08-03 17:31:26,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=182211.33333333334, ans=0.125 +2024-08-03 17:31:28,818 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.76 vs. limit=22.5 +2024-08-03 17:31:32,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=182248.0, ans=0.07 +2024-08-03 17:31:33,676 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.345e+01 1.136e+02 1.357e+02 1.723e+02 3.270e+02, threshold=2.715e+02, percent-clipped=1.0 +2024-08-03 17:31:35,110 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.22 vs. limit=12.0 +2024-08-03 17:31:46,428 INFO [train.py:1114] (1/4) Epoch 14, batch 2400, loss[loss=0.1971, simple_loss=0.281, pruned_loss=0.05661, over 13531.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2785, pruned_loss=0.05515, over 2641019.13 frames. ], batch size: 35, lr: 8.86e-03, grad_scale: 32.0 +2024-08-03 17:31:46,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=182321.33333333334, ans=0.125 +2024-08-03 17:31:55,296 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.34 vs. limit=10.0 +2024-08-03 17:31:55,345 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.45 vs. limit=15.0 +2024-08-03 17:32:02,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=182358.0, ans=0.125 +2024-08-03 17:32:11,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=182394.66666666666, ans=0.125 +2024-08-03 17:32:13,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=182394.66666666666, ans=0.125 +2024-08-03 17:32:36,580 INFO [train.py:1114] (1/4) Epoch 14, batch 2450, loss[loss=0.1937, simple_loss=0.2834, pruned_loss=0.05203, over 13362.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2798, pruned_loss=0.05613, over 2630117.77 frames. ], batch size: 37, lr: 8.86e-03, grad_scale: 32.0 +2024-08-03 17:32:52,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=182541.33333333334, ans=0.125 +2024-08-03 17:32:54,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=182578.0, ans=0.0 +2024-08-03 17:33:11,020 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.955e+01 1.136e+02 1.286e+02 1.596e+02 2.665e+02, threshold=2.571e+02, percent-clipped=0.0 +2024-08-03 17:33:17,001 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.41 vs. limit=12.0 +2024-08-03 17:33:25,708 INFO [train.py:1114] (1/4) Epoch 14, batch 2500, loss[loss=0.2229, simple_loss=0.3021, pruned_loss=0.07185, over 13394.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2801, pruned_loss=0.05634, over 2634545.12 frames. ], batch size: 39, lr: 8.85e-03, grad_scale: 32.0 +2024-08-03 17:33:52,654 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.76 vs. limit=15.0 +2024-08-03 17:34:03,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=182834.66666666666, ans=0.125 +2024-08-03 17:34:09,940 INFO [train.py:1114] (1/4) Epoch 14, batch 2550, loss[loss=0.1808, simple_loss=0.2553, pruned_loss=0.0531, over 13530.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2802, pruned_loss=0.05632, over 2637332.48 frames. ], batch size: 31, lr: 8.85e-03, grad_scale: 32.0 +2024-08-03 17:34:23,334 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=2.98 vs. limit=12.0 +2024-08-03 17:34:41,382 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.643e+01 1.149e+02 1.432e+02 2.081e+02 4.007e+02, threshold=2.864e+02, percent-clipped=10.0 +2024-08-03 17:34:44,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=183018.0, ans=0.2 +2024-08-03 17:34:44,692 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.29 vs. limit=15.0 +2024-08-03 17:34:53,666 INFO [train.py:1114] (1/4) Epoch 14, batch 2600, loss[loss=0.1757, simple_loss=0.2639, pruned_loss=0.04376, over 13567.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.281, pruned_loss=0.05679, over 2636252.48 frames. ], batch size: 36, lr: 8.84e-03, grad_scale: 32.0 +2024-08-03 17:35:01,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=183054.66666666666, ans=0.125 +2024-08-03 17:35:12,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183128.0, ans=0.1 +2024-08-03 17:35:19,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=183164.66666666666, ans=0.0 +2024-08-03 17:35:24,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=183164.66666666666, ans=0.125 +2024-08-03 17:35:28,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=183201.33333333334, ans=10.0 +2024-08-03 17:35:36,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=183201.33333333334, ans=0.125 +2024-08-03 17:35:38,000 INFO [train.py:1114] (1/4) Epoch 14, batch 2650, loss[loss=0.2297, simple_loss=0.303, pruned_loss=0.07824, over 13297.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2813, pruned_loss=0.05683, over 2639620.12 frames. ], batch size: 46, lr: 8.84e-03, grad_scale: 32.0 +2024-08-03 17:35:40,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=183238.0, ans=0.2 +2024-08-03 17:35:48,692 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:35:53,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=183274.66666666666, ans=0.125 +2024-08-03 17:35:57,704 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.16 vs. limit=22.5 +2024-08-03 17:36:01,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=183311.33333333334, ans=0.125 +2024-08-03 17:36:06,756 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.91 vs. limit=15.0 +2024-08-03 17:36:08,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=183348.0, ans=0.125 +2024-08-03 17:36:08,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=183348.0, ans=0.0 +2024-08-03 17:36:09,722 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.484e+01 1.156e+02 1.338e+02 1.561e+02 2.649e+02, threshold=2.677e+02, percent-clipped=0.0 +2024-08-03 17:36:16,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=183348.0, ans=0.125 +2024-08-03 17:36:47,879 INFO [train.py:1114] (1/4) Epoch 14, batch 2700, loss[loss=0.2069, simple_loss=0.2966, pruned_loss=0.05863, over 13554.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2814, pruned_loss=0.05676, over 2637060.23 frames. ], batch size: 40, lr: 8.83e-03, grad_scale: 32.0 +2024-08-03 17:36:58,054 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.37 vs. limit=12.0 +2024-08-03 17:37:19,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183531.33333333334, ans=0.1 +2024-08-03 17:37:19,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=183531.33333333334, ans=0.0 +2024-08-03 17:37:28,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=183568.0, ans=0.2 +2024-08-03 17:37:30,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183604.66666666666, ans=0.1 +2024-08-03 17:37:31,435 INFO [train.py:1114] (1/4) Epoch 14, batch 2750, loss[loss=0.1756, simple_loss=0.2583, pruned_loss=0.04641, over 13348.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2805, pruned_loss=0.05687, over 2635093.73 frames. ], batch size: 34, lr: 8.83e-03, grad_scale: 32.0 +2024-08-03 17:37:31,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=183604.66666666666, ans=0.125 +2024-08-03 17:37:33,543 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.44 vs. limit=15.0 +2024-08-03 17:37:40,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=183641.33333333334, ans=0.125 +2024-08-03 17:37:59,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=183714.66666666666, ans=0.125 +2024-08-03 17:38:00,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=183714.66666666666, ans=0.125 +2024-08-03 17:38:02,647 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.576e+01 1.249e+02 1.500e+02 2.010e+02 3.327e+02, threshold=3.000e+02, percent-clipped=3.0 +2024-08-03 17:38:15,239 INFO [train.py:1114] (1/4) Epoch 14, batch 2800, loss[loss=0.3187, simple_loss=0.361, pruned_loss=0.1382, over 8916.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.281, pruned_loss=0.0572, over 2626317.01 frames. ], batch size: 96, lr: 8.83e-03, grad_scale: 32.0 +2024-08-03 17:38:31,466 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.07 vs. limit=10.0 +2024-08-03 17:38:40,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=183898.0, ans=0.125 +2024-08-03 17:38:47,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=183898.0, ans=0.0 +2024-08-03 17:38:50,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=183898.0, ans=0.2 +2024-08-03 17:38:57,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.79 vs. limit=6.0 +2024-08-03 17:39:00,482 INFO [train.py:1114] (1/4) Epoch 14, batch 2850, loss[loss=0.1833, simple_loss=0.2624, pruned_loss=0.05214, over 13365.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2816, pruned_loss=0.05751, over 2620973.65 frames. ], batch size: 35, lr: 8.82e-03, grad_scale: 32.0 +2024-08-03 17:39:06,567 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.30 vs. limit=12.0 +2024-08-03 17:39:15,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184008.0, ans=0.1 +2024-08-03 17:39:17,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=184008.0, ans=0.0 +2024-08-03 17:39:33,097 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.234e+01 1.082e+02 1.194e+02 1.402e+02 2.334e+02, threshold=2.389e+02, percent-clipped=0.0 +2024-08-03 17:39:34,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=184081.33333333334, ans=0.025 +2024-08-03 17:39:45,197 INFO [train.py:1114] (1/4) Epoch 14, batch 2900, loss[loss=0.1894, simple_loss=0.2754, pruned_loss=0.05171, over 13357.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2826, pruned_loss=0.05754, over 2631641.15 frames. ], batch size: 36, lr: 8.82e-03, grad_scale: 32.0 +2024-08-03 17:39:45,776 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.11 vs. limit=15.0 +2024-08-03 17:39:46,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=184154.66666666666, ans=0.125 +2024-08-03 17:40:01,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=184191.33333333334, ans=0.125 +2024-08-03 17:40:17,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=184264.66666666666, ans=0.025 +2024-08-03 17:40:18,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=184264.66666666666, ans=0.0 +2024-08-03 17:40:28,418 INFO [train.py:1114] (1/4) Epoch 14, batch 2950, loss[loss=0.1855, simple_loss=0.2699, pruned_loss=0.05051, over 13336.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.281, pruned_loss=0.05708, over 2630094.47 frames. ], batch size: 34, lr: 8.81e-03, grad_scale: 32.0 +2024-08-03 17:40:33,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=184338.0, ans=0.125 +2024-08-03 17:40:43,770 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-08-03 17:40:44,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184374.66666666666, ans=0.1 +2024-08-03 17:40:51,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=184411.33333333334, ans=0.125 +2024-08-03 17:41:00,868 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.120e+01 1.167e+02 1.400e+02 1.731e+02 2.660e+02, threshold=2.799e+02, percent-clipped=4.0 +2024-08-03 17:41:13,014 INFO [train.py:1114] (1/4) Epoch 14, batch 3000, loss[loss=0.1969, simple_loss=0.2771, pruned_loss=0.05836, over 13541.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2803, pruned_loss=0.0566, over 2630800.25 frames. ], batch size: 37, lr: 8.81e-03, grad_scale: 32.0 +2024-08-03 17:41:13,015 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 17:41:18,329 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.2305, 1.4369, 1.3385, 1.4378, 1.6835, 1.1657, 1.2702, 0.9594], + device='cuda:1') +2024-08-03 17:41:23,007 INFO [train.py:1146] (1/4) Epoch 14, validation: loss=0.1738, simple_loss=0.2731, pruned_loss=0.03723, over 944034.00 frames. +2024-08-03 17:41:23,008 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 17:41:24,362 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.76 vs. limit=15.0 +2024-08-03 17:41:25,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184521.33333333334, ans=0.1 +2024-08-03 17:41:30,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.70 vs. limit=15.0 +2024-08-03 17:41:44,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=184594.66666666666, ans=0.125 +2024-08-03 17:42:07,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=184668.0, ans=0.0 +2024-08-03 17:42:08,951 INFO [train.py:1114] (1/4) Epoch 14, batch 3050, loss[loss=0.2067, simple_loss=0.284, pruned_loss=0.06468, over 13541.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2814, pruned_loss=0.05697, over 2627521.03 frames. ], batch size: 35, lr: 8.80e-03, grad_scale: 16.0 +2024-08-03 17:42:15,156 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:42:41,260 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.416e+01 1.107e+02 1.206e+02 1.425e+02 2.070e+02, threshold=2.412e+02, percent-clipped=0.0 +2024-08-03 17:42:43,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184851.33333333334, ans=0.1 +2024-08-03 17:42:52,661 INFO [train.py:1114] (1/4) Epoch 14, batch 3100, loss[loss=0.2192, simple_loss=0.3013, pruned_loss=0.06854, over 13373.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2808, pruned_loss=0.05651, over 2627714.40 frames. ], batch size: 46, lr: 8.80e-03, grad_scale: 16.0 +2024-08-03 17:42:52,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=184888.0, ans=0.125 +2024-08-03 17:42:54,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=184888.0, ans=0.125 +2024-08-03 17:43:05,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=184924.66666666666, ans=0.125 +2024-08-03 17:43:09,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=184961.33333333334, ans=0.125 +2024-08-03 17:43:17,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=184998.0, ans=0.0 +2024-08-03 17:43:28,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=185034.66666666666, ans=0.07 +2024-08-03 17:43:35,493 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.09 vs. limit=15.0 +2024-08-03 17:43:35,770 INFO [train.py:1114] (1/4) Epoch 14, batch 3150, loss[loss=0.2457, simple_loss=0.3227, pruned_loss=0.08432, over 12967.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2811, pruned_loss=0.05659, over 2628142.81 frames. ], batch size: 48, lr: 8.80e-03, grad_scale: 16.0 +2024-08-03 17:43:39,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=185071.33333333334, ans=0.2 +2024-08-03 17:43:55,435 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.72 vs. limit=15.0 +2024-08-03 17:44:02,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=185181.33333333334, ans=0.0 +2024-08-03 17:44:08,664 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.659e+01 1.212e+02 1.528e+02 2.079e+02 4.163e+02, threshold=3.057e+02, percent-clipped=18.0 +2024-08-03 17:44:09,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=185218.0, ans=0.125 +2024-08-03 17:44:16,727 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.53 vs. limit=15.0 +2024-08-03 17:44:18,868 INFO [train.py:1114] (1/4) Epoch 14, batch 3200, loss[loss=0.1763, simple_loss=0.2641, pruned_loss=0.04429, over 13542.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2809, pruned_loss=0.05653, over 2634145.65 frames. ], batch size: 37, lr: 8.79e-03, grad_scale: 16.0 +2024-08-03 17:44:27,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=185291.33333333334, ans=0.09899494936611666 +2024-08-03 17:44:39,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=185328.0, ans=0.025 +2024-08-03 17:44:40,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=185328.0, ans=0.125 +2024-08-03 17:44:46,422 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.79 vs. limit=15.0 +2024-08-03 17:45:01,570 INFO [train.py:1114] (1/4) Epoch 14, batch 3250, loss[loss=0.1808, simple_loss=0.273, pruned_loss=0.04425, over 13380.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2813, pruned_loss=0.05609, over 2638343.29 frames. ], batch size: 38, lr: 8.79e-03, grad_scale: 16.0 +2024-08-03 17:45:03,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=185438.0, ans=0.125 +2024-08-03 17:45:31,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=185548.0, ans=0.05 +2024-08-03 17:45:34,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=185548.0, ans=0.125 +2024-08-03 17:45:35,109 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.801e+01 1.165e+02 1.318e+02 1.646e+02 3.018e+02, threshold=2.636e+02, percent-clipped=0.0 +2024-08-03 17:45:45,437 INFO [train.py:1114] (1/4) Epoch 14, batch 3300, loss[loss=0.1965, simple_loss=0.2884, pruned_loss=0.05228, over 12930.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2798, pruned_loss=0.05553, over 2639850.67 frames. ], batch size: 52, lr: 8.78e-03, grad_scale: 16.0 +2024-08-03 17:46:11,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=185731.33333333334, ans=0.2 +2024-08-03 17:46:28,631 INFO [train.py:1114] (1/4) Epoch 14, batch 3350, loss[loss=0.2167, simple_loss=0.3051, pruned_loss=0.06416, over 13081.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2812, pruned_loss=0.05641, over 2630616.34 frames. ], batch size: 48, lr: 8.78e-03, grad_scale: 8.0 +2024-08-03 17:46:43,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=185841.33333333334, ans=0.125 +2024-08-03 17:46:47,837 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.74 vs. limit=22.5 +2024-08-03 17:46:57,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=185914.66666666666, ans=0.125 +2024-08-03 17:47:01,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.whiten.whitening_limit, batch_count=185914.66666666666, ans=12.0 +2024-08-03 17:47:01,822 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.039e+01 1.138e+02 1.305e+02 1.515e+02 2.289e+02, threshold=2.609e+02, percent-clipped=0.0 +2024-08-03 17:47:11,855 INFO [train.py:1114] (1/4) Epoch 14, batch 3400, loss[loss=0.174, simple_loss=0.2505, pruned_loss=0.04875, over 13575.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2808, pruned_loss=0.05644, over 2625467.22 frames. ], batch size: 31, lr: 8.78e-03, grad_scale: 8.0 +2024-08-03 17:47:34,405 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.22 vs. limit=6.0 +2024-08-03 17:47:44,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=186098.0, ans=0.0 +2024-08-03 17:47:46,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=186134.66666666666, ans=0.0 +2024-08-03 17:47:51,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=186134.66666666666, ans=0.125 +2024-08-03 17:47:55,939 INFO [train.py:1114] (1/4) Epoch 14, batch 3450, loss[loss=0.2213, simple_loss=0.2995, pruned_loss=0.07153, over 12946.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2801, pruned_loss=0.05573, over 2629481.13 frames. ], batch size: 52, lr: 8.77e-03, grad_scale: 8.0 +2024-08-03 17:48:01,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=186171.33333333334, ans=0.125 +2024-08-03 17:48:10,055 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.63 vs. limit=15.0 +2024-08-03 17:48:17,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=186244.66666666666, ans=0.0 +2024-08-03 17:48:28,838 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.84 vs. limit=22.5 +2024-08-03 17:48:29,132 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.095e+01 1.141e+02 1.344e+02 1.531e+02 2.504e+02, threshold=2.687e+02, percent-clipped=0.0 +2024-08-03 17:48:38,015 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.46 vs. limit=15.0 +2024-08-03 17:48:38,413 INFO [train.py:1114] (1/4) Epoch 14, batch 3500, loss[loss=0.1987, simple_loss=0.273, pruned_loss=0.06214, over 13530.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2793, pruned_loss=0.05601, over 2630971.06 frames. ], batch size: 34, lr: 8.77e-03, grad_scale: 8.0 +2024-08-03 17:48:38,803 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.35 vs. limit=15.0 +2024-08-03 17:48:41,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=186354.66666666666, ans=0.0 +2024-08-03 17:48:43,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=186354.66666666666, ans=0.125 +2024-08-03 17:48:48,272 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.20 vs. limit=6.0 +2024-08-03 17:48:54,119 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.34 vs. limit=12.0 +2024-08-03 17:48:55,048 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.61 vs. limit=15.0 +2024-08-03 17:49:00,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=186428.0, ans=10.0 +2024-08-03 17:49:04,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=186464.66666666666, ans=0.125 +2024-08-03 17:49:09,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=186464.66666666666, ans=0.1 +2024-08-03 17:49:20,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=186538.0, ans=0.125 +2024-08-03 17:49:21,419 INFO [train.py:1114] (1/4) Epoch 14, batch 3550, loss[loss=0.2088, simple_loss=0.2928, pruned_loss=0.06244, over 12414.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2818, pruned_loss=0.05732, over 2629772.41 frames. ], batch size: 58, lr: 8.76e-03, grad_scale: 8.0 +2024-08-03 17:49:26,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=186538.0, ans=0.125 +2024-08-03 17:49:35,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=186574.66666666666, ans=0.1 +2024-08-03 17:49:42,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=186611.33333333334, ans=0.125 +2024-08-03 17:49:45,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=186611.33333333334, ans=0.125 +2024-08-03 17:49:56,372 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.367e+01 1.245e+02 1.346e+02 1.510e+02 2.403e+02, threshold=2.693e+02, percent-clipped=0.0 +2024-08-03 17:49:57,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=186684.66666666666, ans=0.1 +2024-08-03 17:49:58,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=186684.66666666666, ans=0.125 +2024-08-03 17:49:59,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=186684.66666666666, ans=0.05 +2024-08-03 17:50:06,470 INFO [train.py:1114] (1/4) Epoch 14, batch 3600, loss[loss=0.2447, simple_loss=0.315, pruned_loss=0.08721, over 9444.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2868, pruned_loss=0.06182, over 2489261.05 frames. ], batch size: 98, lr: 8.76e-03, grad_scale: 16.0 +2024-08-03 17:50:06,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=186721.33333333334, ans=0.125 +2024-08-03 17:50:38,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=186831.33333333334, ans=0.0 +2024-08-03 17:50:38,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=186831.33333333334, ans=0.125 +2024-08-03 17:54:05,197 INFO [train.py:1114] (1/4) Epoch 15, batch 0, loss[loss=0.1753, simple_loss=0.2604, pruned_loss=0.04508, over 13355.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2604, pruned_loss=0.04508, over 13355.00 frames. ], batch size: 33, lr: 8.46e-03, grad_scale: 32.0 +2024-08-03 17:54:05,198 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 17:54:17,155 INFO [train.py:1146] (1/4) Epoch 15, validation: loss=0.1774, simple_loss=0.2778, pruned_loss=0.03851, over 944034.00 frames. +2024-08-03 17:54:17,156 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 17:54:19,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=186871.66666666666, ans=0.125 +2024-08-03 17:54:25,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=186908.33333333334, ans=0.125 +2024-08-03 17:54:27,759 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.21 vs. limit=15.0 +2024-08-03 17:54:30,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=186908.33333333334, ans=0.125 +2024-08-03 17:54:50,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=186945.0, ans=0.1 +2024-08-03 17:54:57,598 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:55:05,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=187018.33333333334, ans=0.07 +2024-08-03 17:55:12,109 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.609e+01 1.155e+02 1.252e+02 1.382e+02 2.620e+02, threshold=2.503e+02, percent-clipped=0.0 +2024-08-03 17:55:13,945 INFO [train.py:1114] (1/4) Epoch 15, batch 50, loss[loss=0.1784, simple_loss=0.2548, pruned_loss=0.05099, over 13406.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2814, pruned_loss=0.05715, over 578325.36 frames. ], batch size: 32, lr: 8.45e-03, grad_scale: 32.0 +2024-08-03 17:55:46,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=187165.0, ans=0.0 +2024-08-03 17:55:50,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=187165.0, ans=0.2 +2024-08-03 17:55:51,663 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.36 vs. limit=15.0 +2024-08-03 17:56:05,327 INFO [train.py:1114] (1/4) Epoch 15, batch 100, loss[loss=0.2044, simple_loss=0.2787, pruned_loss=0.06509, over 13543.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.281, pruned_loss=0.05582, over 1025803.50 frames. ], batch size: 35, lr: 8.45e-03, grad_scale: 32.0 +2024-08-03 17:56:09,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=187238.33333333334, ans=0.0 +2024-08-03 17:56:15,777 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.01 vs. limit=6.0 +2024-08-03 17:56:44,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=187385.0, ans=0.025 +2024-08-03 17:56:48,422 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.485e+01 1.136e+02 1.309e+02 1.649e+02 2.921e+02, threshold=2.617e+02, percent-clipped=2.0 +2024-08-03 17:56:50,226 INFO [train.py:1114] (1/4) Epoch 15, batch 150, loss[loss=0.1605, simple_loss=0.2389, pruned_loss=0.04102, over 13443.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2802, pruned_loss=0.05571, over 1386567.55 frames. ], batch size: 32, lr: 8.44e-03, grad_scale: 32.0 +2024-08-03 17:56:54,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=187421.66666666666, ans=0.0 +2024-08-03 17:57:04,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=187458.33333333334, ans=0.04949747468305833 +2024-08-03 17:57:40,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=187568.33333333334, ans=0.2 +2024-08-03 17:57:43,989 INFO [train.py:1114] (1/4) Epoch 15, batch 200, loss[loss=0.1932, simple_loss=0.286, pruned_loss=0.05023, over 12318.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2782, pruned_loss=0.05485, over 1664685.73 frames. ], batch size: 58, lr: 8.44e-03, grad_scale: 32.0 +2024-08-03 17:57:49,820 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.61 vs. limit=10.0 +2024-08-03 17:58:13,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=187715.0, ans=0.125 +2024-08-03 17:58:29,622 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.054e+01 1.128e+02 1.299e+02 1.749e+02 3.562e+02, threshold=2.599e+02, percent-clipped=4.0 +2024-08-03 17:58:29,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=187751.66666666666, ans=0.125 +2024-08-03 17:58:31,492 INFO [train.py:1114] (1/4) Epoch 15, batch 250, loss[loss=0.2204, simple_loss=0.306, pruned_loss=0.06743, over 13331.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2782, pruned_loss=0.05495, over 1884048.14 frames. ], batch size: 46, lr: 8.44e-03, grad_scale: 32.0 +2024-08-03 17:58:42,586 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.78 vs. limit=22.5 +2024-08-03 17:59:08,364 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.42 vs. limit=15.0 +2024-08-03 17:59:14,776 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.35 vs. limit=15.0 +2024-08-03 17:59:19,796 INFO [train.py:1114] (1/4) Epoch 15, batch 300, loss[loss=0.2081, simple_loss=0.2941, pruned_loss=0.06102, over 13457.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2779, pruned_loss=0.05499, over 2051291.45 frames. ], batch size: 42, lr: 8.43e-03, grad_scale: 32.0 +2024-08-03 17:59:27,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=187971.66666666666, ans=0.125 +2024-08-03 17:59:27,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=187971.66666666666, ans=0.035 +2024-08-03 17:59:33,913 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.39 vs. limit=6.0 +2024-08-03 17:59:49,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=188081.66666666666, ans=0.0 +2024-08-03 18:00:00,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=188118.33333333334, ans=0.0 +2024-08-03 18:00:03,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=188118.33333333334, ans=0.125 +2024-08-03 18:00:05,573 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.050e+01 1.089e+02 1.187e+02 1.395e+02 2.688e+02, threshold=2.374e+02, percent-clipped=1.0 +2024-08-03 18:00:07,391 INFO [train.py:1114] (1/4) Epoch 15, batch 350, loss[loss=0.1936, simple_loss=0.269, pruned_loss=0.05913, over 13587.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2783, pruned_loss=0.05495, over 2181782.97 frames. ], batch size: 33, lr: 8.43e-03, grad_scale: 32.0 +2024-08-03 18:00:08,712 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.74 vs. limit=15.0 +2024-08-03 18:00:13,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=188155.0, ans=0.125 +2024-08-03 18:00:33,029 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.01 vs. limit=22.5 +2024-08-03 18:00:38,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=188265.0, ans=0.1 +2024-08-03 18:00:43,746 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.93 vs. limit=12.0 +2024-08-03 18:00:54,365 INFO [train.py:1114] (1/4) Epoch 15, batch 400, loss[loss=0.2029, simple_loss=0.286, pruned_loss=0.05993, over 13356.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2782, pruned_loss=0.05476, over 2286274.56 frames. ], batch size: 37, lr: 8.42e-03, grad_scale: 32.0 +2024-08-03 18:00:58,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=188338.33333333334, ans=0.0 +2024-08-03 18:01:00,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=188338.33333333334, ans=0.05 +2024-08-03 18:01:11,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=188411.66666666666, ans=0.02 +2024-08-03 18:01:14,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.77 vs. limit=22.5 +2024-08-03 18:01:38,640 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.296e+01 1.094e+02 1.225e+02 1.629e+02 4.007e+02, threshold=2.451e+02, percent-clipped=6.0 +2024-08-03 18:01:39,621 INFO [train.py:1114] (1/4) Epoch 15, batch 450, loss[loss=0.2046, simple_loss=0.2867, pruned_loss=0.06124, over 13561.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.278, pruned_loss=0.0544, over 2360451.28 frames. ], batch size: 38, lr: 8.42e-03, grad_scale: 32.0 +2024-08-03 18:01:41,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=188521.66666666666, ans=0.125 +2024-08-03 18:01:41,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=188521.66666666666, ans=0.95 +2024-08-03 18:01:53,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=188558.33333333334, ans=15.0 +2024-08-03 18:02:02,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=188595.0, ans=0.025 +2024-08-03 18:02:03,785 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.05 vs. limit=15.0 +2024-08-03 18:02:09,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=188631.66666666666, ans=0.2 +2024-08-03 18:02:26,760 INFO [train.py:1114] (1/4) Epoch 15, batch 500, loss[loss=0.2014, simple_loss=0.2843, pruned_loss=0.05926, over 13418.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2772, pruned_loss=0.05422, over 2425509.27 frames. ], batch size: 43, lr: 8.42e-03, grad_scale: 32.0 +2024-08-03 18:02:28,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=188705.0, ans=0.125 +2024-08-03 18:02:33,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=188705.0, ans=0.1 +2024-08-03 18:02:43,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=188741.66666666666, ans=0.125 +2024-08-03 18:03:09,352 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.03 vs. limit=15.0 +2024-08-03 18:03:15,310 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.367e+01 1.124e+02 1.290e+02 1.584e+02 2.757e+02, threshold=2.579e+02, percent-clipped=2.0 +2024-08-03 18:03:16,217 INFO [train.py:1114] (1/4) Epoch 15, batch 550, loss[loss=0.2174, simple_loss=0.3055, pruned_loss=0.06469, over 12971.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2772, pruned_loss=0.05437, over 2467438.39 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 32.0 +2024-08-03 18:03:21,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=188888.33333333334, ans=0.125 +2024-08-03 18:03:23,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=188888.33333333334, ans=0.125 +2024-08-03 18:03:37,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=188961.66666666666, ans=0.0 +2024-08-03 18:03:38,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=188961.66666666666, ans=10.0 +2024-08-03 18:03:41,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=188961.66666666666, ans=0.0 +2024-08-03 18:03:45,774 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.27 vs. limit=6.0 +2024-08-03 18:03:46,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188998.33333333334, ans=0.1 +2024-08-03 18:04:00,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=189071.66666666666, ans=0.05 +2024-08-03 18:04:01,258 INFO [train.py:1114] (1/4) Epoch 15, batch 600, loss[loss=0.2127, simple_loss=0.298, pruned_loss=0.06374, over 13318.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2772, pruned_loss=0.05396, over 2508407.00 frames. ], batch size: 46, lr: 8.41e-03, grad_scale: 32.0 +2024-08-03 18:04:04,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189071.66666666666, ans=0.1 +2024-08-03 18:04:09,115 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.14 vs. limit=15.0 +2024-08-03 18:04:28,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.46 vs. limit=15.0 +2024-08-03 18:04:44,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=189218.33333333334, ans=0.125 +2024-08-03 18:04:47,690 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.457e+01 1.170e+02 1.382e+02 2.012e+02 3.539e+02, threshold=2.764e+02, percent-clipped=13.0 +2024-08-03 18:04:48,659 INFO [train.py:1114] (1/4) Epoch 15, batch 650, loss[loss=0.1859, simple_loss=0.2764, pruned_loss=0.04768, over 13553.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2764, pruned_loss=0.05345, over 2543888.73 frames. ], batch size: 37, lr: 8.40e-03, grad_scale: 32.0 +2024-08-03 18:04:51,077 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.86 vs. limit=15.0 +2024-08-03 18:05:01,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=189291.66666666666, ans=0.125 +2024-08-03 18:05:04,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=189291.66666666666, ans=0.2 +2024-08-03 18:05:15,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=189365.0, ans=0.125 +2024-08-03 18:05:27,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189401.66666666666, ans=0.1 +2024-08-03 18:05:35,752 INFO [train.py:1114] (1/4) Epoch 15, batch 700, loss[loss=0.1886, simple_loss=0.276, pruned_loss=0.0506, over 13532.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2766, pruned_loss=0.05331, over 2566370.72 frames. ], batch size: 35, lr: 8.40e-03, grad_scale: 32.0 +2024-08-03 18:05:45,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=189475.0, ans=0.125 +2024-08-03 18:05:53,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=189511.66666666666, ans=0.125 +2024-08-03 18:05:53,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=189511.66666666666, ans=0.0 +2024-08-03 18:05:56,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=189511.66666666666, ans=10.0 +2024-08-03 18:06:12,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=189585.0, ans=0.125 +2024-08-03 18:06:22,757 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.490e+01 1.100e+02 1.247e+02 1.589e+02 2.626e+02, threshold=2.494e+02, percent-clipped=0.0 +2024-08-03 18:06:22,794 INFO [train.py:1114] (1/4) Epoch 15, batch 750, loss[loss=0.1779, simple_loss=0.2625, pruned_loss=0.04664, over 13356.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2766, pruned_loss=0.05318, over 2582802.41 frames. ], batch size: 37, lr: 8.40e-03, grad_scale: 16.0 +2024-08-03 18:06:24,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=189621.66666666666, ans=0.0 +2024-08-03 18:06:28,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189621.66666666666, ans=0.1 +2024-08-03 18:06:28,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=189621.66666666666, ans=0.2 +2024-08-03 18:06:35,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.98 vs. limit=22.5 +2024-08-03 18:06:39,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=189658.33333333334, ans=0.125 +2024-08-03 18:06:39,568 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.21 vs. limit=22.5 +2024-08-03 18:06:40,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=189658.33333333334, ans=0.1 +2024-08-03 18:06:41,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=189658.33333333334, ans=0.1 +2024-08-03 18:06:44,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=189695.0, ans=0.125 +2024-08-03 18:06:51,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=189695.0, ans=0.125 +2024-08-03 18:06:58,819 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.46 vs. limit=22.5 +2024-08-03 18:06:59,662 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=11.99 vs. limit=15.0 +2024-08-03 18:07:10,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=189768.33333333334, ans=0.125 +2024-08-03 18:07:12,593 INFO [train.py:1114] (1/4) Epoch 15, batch 800, loss[loss=0.1771, simple_loss=0.2637, pruned_loss=0.04526, over 13329.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2763, pruned_loss=0.05303, over 2596985.41 frames. ], batch size: 33, lr: 8.39e-03, grad_scale: 32.0 +2024-08-03 18:07:22,244 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.13 vs. limit=15.0 +2024-08-03 18:07:44,873 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.03 vs. limit=15.0 +2024-08-03 18:07:59,900 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.253e+01 1.110e+02 1.253e+02 1.558e+02 2.817e+02, threshold=2.505e+02, percent-clipped=1.0 +2024-08-03 18:07:59,937 INFO [train.py:1114] (1/4) Epoch 15, batch 850, loss[loss=0.1827, simple_loss=0.2764, pruned_loss=0.04451, over 13326.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2763, pruned_loss=0.05277, over 2609681.06 frames. ], batch size: 40, lr: 8.39e-03, grad_scale: 32.0 +2024-08-03 18:08:17,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=190025.0, ans=0.0 +2024-08-03 18:08:22,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=190061.66666666666, ans=0.125 +2024-08-03 18:08:27,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=190061.66666666666, ans=0.125 +2024-08-03 18:08:45,850 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:08:47,479 INFO [train.py:1114] (1/4) Epoch 15, batch 900, loss[loss=0.1806, simple_loss=0.2622, pruned_loss=0.04954, over 13326.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2771, pruned_loss=0.05334, over 2612503.69 frames. ], batch size: 33, lr: 8.38e-03, grad_scale: 32.0 +2024-08-03 18:09:01,093 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.99 vs. limit=15.0 +2024-08-03 18:09:21,137 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.64 vs. limit=6.0 +2024-08-03 18:09:35,331 INFO [train.py:1114] (1/4) Epoch 15, batch 950, loss[loss=0.162, simple_loss=0.2454, pruned_loss=0.03931, over 13553.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2773, pruned_loss=0.0535, over 2612665.09 frames. ], batch size: 34, lr: 8.38e-03, grad_scale: 16.0 +2024-08-03 18:09:36,163 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.983e+01 1.147e+02 1.407e+02 1.582e+02 2.602e+02, threshold=2.813e+02, percent-clipped=2.0 +2024-08-03 18:09:42,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=190355.0, ans=0.0 +2024-08-03 18:09:47,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=190391.66666666666, ans=0.125 +2024-08-03 18:09:59,833 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.19 vs. limit=15.0 +2024-08-03 18:10:01,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=190428.33333333334, ans=0.125 +2024-08-03 18:10:07,157 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.16 vs. limit=22.5 +2024-08-03 18:10:20,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=190501.66666666666, ans=0.125 +2024-08-03 18:10:21,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=190501.66666666666, ans=0.0 +2024-08-03 18:10:24,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=190538.33333333334, ans=0.2 +2024-08-03 18:10:24,640 INFO [train.py:1114] (1/4) Epoch 15, batch 1000, loss[loss=0.1885, simple_loss=0.2713, pruned_loss=0.05279, over 13361.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2782, pruned_loss=0.05404, over 2611438.98 frames. ], batch size: 35, lr: 8.38e-03, grad_scale: 16.0 +2024-08-03 18:10:28,793 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.03 vs. limit=15.0 +2024-08-03 18:10:33,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=190575.0, ans=0.0 +2024-08-03 18:10:49,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=190611.66666666666, ans=0.2 +2024-08-03 18:11:00,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=190648.33333333334, ans=0.09899494936611666 +2024-08-03 18:11:03,383 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.88 vs. limit=10.0 +2024-08-03 18:11:12,155 INFO [train.py:1114] (1/4) Epoch 15, batch 1050, loss[loss=0.2045, simple_loss=0.2889, pruned_loss=0.05999, over 13574.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2775, pruned_loss=0.05403, over 2615367.40 frames. ], batch size: 39, lr: 8.37e-03, grad_scale: 16.0 +2024-08-03 18:11:12,982 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.909e+01 1.089e+02 1.310e+02 1.512e+02 2.407e+02, threshold=2.620e+02, percent-clipped=0.0 +2024-08-03 18:11:15,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=190721.66666666666, ans=15.0 +2024-08-03 18:11:41,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=190831.66666666666, ans=0.125 +2024-08-03 18:11:57,073 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.90 vs. limit=22.5 +2024-08-03 18:11:59,031 INFO [train.py:1114] (1/4) Epoch 15, batch 1100, loss[loss=0.1711, simple_loss=0.2569, pruned_loss=0.04266, over 13570.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2769, pruned_loss=0.05381, over 2619879.95 frames. ], batch size: 36, lr: 8.37e-03, grad_scale: 16.0 +2024-08-03 18:12:04,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=190905.0, ans=0.0 +2024-08-03 18:12:04,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190905.0, ans=0.1 +2024-08-03 18:12:07,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=190941.66666666666, ans=0.125 +2024-08-03 18:12:15,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190941.66666666666, ans=0.1 +2024-08-03 18:12:17,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=190978.33333333334, ans=0.0 +2024-08-03 18:12:25,837 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.99 vs. limit=15.0 +2024-08-03 18:12:45,855 INFO [train.py:1114] (1/4) Epoch 15, batch 1150, loss[loss=0.1938, simple_loss=0.2769, pruned_loss=0.05539, over 13568.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2766, pruned_loss=0.05364, over 2619718.78 frames. ], batch size: 36, lr: 8.36e-03, grad_scale: 8.0 +2024-08-03 18:12:46,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=191088.33333333334, ans=0.125 +2024-08-03 18:12:47,595 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.299e+01 1.143e+02 1.336e+02 1.684e+02 2.618e+02, threshold=2.671e+02, percent-clipped=0.0 +2024-08-03 18:12:53,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=191088.33333333334, ans=0.0 +2024-08-03 18:12:55,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=191125.0, ans=0.125 +2024-08-03 18:12:57,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=191125.0, ans=0.0 +2024-08-03 18:13:02,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=191125.0, ans=0.04949747468305833 +2024-08-03 18:13:40,335 INFO [train.py:1114] (1/4) Epoch 15, batch 1200, loss[loss=0.1947, simple_loss=0.2795, pruned_loss=0.055, over 13584.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2781, pruned_loss=0.05419, over 2616496.74 frames. ], batch size: 39, lr: 8.36e-03, grad_scale: 16.0 +2024-08-03 18:13:59,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=191345.0, ans=0.0 +2024-08-03 18:14:00,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=191345.0, ans=0.125 +2024-08-03 18:14:20,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=191418.33333333334, ans=0.125 +2024-08-03 18:14:27,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=191418.33333333334, ans=0.2 +2024-08-03 18:14:28,834 INFO [train.py:1114] (1/4) Epoch 15, batch 1250, loss[loss=0.1805, simple_loss=0.2736, pruned_loss=0.04368, over 13441.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2782, pruned_loss=0.05431, over 2628200.46 frames. ], batch size: 42, lr: 8.36e-03, grad_scale: 16.0 +2024-08-03 18:14:30,564 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.500e+01 1.104e+02 1.280e+02 1.551e+02 2.607e+02, threshold=2.559e+02, percent-clipped=0.0 +2024-08-03 18:14:35,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=191455.0, ans=0.125 +2024-08-03 18:14:47,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=191528.33333333334, ans=0.125 +2024-08-03 18:14:51,228 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.46 vs. limit=12.0 +2024-08-03 18:14:52,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=191528.33333333334, ans=0.125 +2024-08-03 18:14:53,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=191528.33333333334, ans=0.1 +2024-08-03 18:14:53,861 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.29 vs. limit=15.0 +2024-08-03 18:15:07,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=191601.66666666666, ans=0.02 +2024-08-03 18:15:08,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=191601.66666666666, ans=0.125 +2024-08-03 18:15:08,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=191601.66666666666, ans=0.2 +2024-08-03 18:15:14,021 INFO [train.py:1114] (1/4) Epoch 15, batch 1300, loss[loss=0.2184, simple_loss=0.3013, pruned_loss=0.06775, over 13061.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2772, pruned_loss=0.05378, over 2631282.03 frames. ], batch size: 53, lr: 8.35e-03, grad_scale: 16.0 +2024-08-03 18:15:17,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=191638.33333333334, ans=0.02 +2024-08-03 18:15:21,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=191638.33333333334, ans=0.1 +2024-08-03 18:15:24,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=191675.0, ans=0.125 +2024-08-03 18:15:40,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191711.66666666666, ans=0.1 +2024-08-03 18:15:41,623 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.42 vs. limit=15.0 +2024-08-03 18:15:56,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=191785.0, ans=0.2 +2024-08-03 18:16:01,023 INFO [train.py:1114] (1/4) Epoch 15, batch 1350, loss[loss=0.2169, simple_loss=0.2992, pruned_loss=0.06729, over 13537.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2768, pruned_loss=0.05372, over 2638675.76 frames. ], batch size: 37, lr: 8.35e-03, grad_scale: 16.0 +2024-08-03 18:16:02,864 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.620e+01 1.149e+02 1.398e+02 1.802e+02 2.548e+02, threshold=2.797e+02, percent-clipped=0.0 +2024-08-03 18:16:09,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=191858.33333333334, ans=0.0 +2024-08-03 18:16:09,665 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.07 vs. limit=15.0 +2024-08-03 18:16:24,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.45 vs. limit=15.0 +2024-08-03 18:16:26,508 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:16:35,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=191931.66666666666, ans=0.025 +2024-08-03 18:16:42,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=191968.33333333334, ans=0.125 +2024-08-03 18:16:47,827 INFO [train.py:1114] (1/4) Epoch 15, batch 1400, loss[loss=0.1801, simple_loss=0.2551, pruned_loss=0.05259, over 13224.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2764, pruned_loss=0.05339, over 2642564.59 frames. ], batch size: 31, lr: 8.34e-03, grad_scale: 16.0 +2024-08-03 18:16:49,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=192005.0, ans=0.5 +2024-08-03 18:16:52,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=192005.0, ans=0.0 +2024-08-03 18:16:53,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192005.0, ans=0.1 +2024-08-03 18:16:58,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192041.66666666666, ans=0.1 +2024-08-03 18:17:00,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=192041.66666666666, ans=0.0 +2024-08-03 18:17:11,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=192078.33333333334, ans=0.025 +2024-08-03 18:17:16,832 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.50 vs. limit=15.0 +2024-08-03 18:17:20,458 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.508e-03 +2024-08-03 18:17:24,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=192115.0, ans=0.125 +2024-08-03 18:17:26,762 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.06 vs. limit=22.5 +2024-08-03 18:17:39,152 INFO [train.py:1114] (1/4) Epoch 15, batch 1450, loss[loss=0.1974, simple_loss=0.2875, pruned_loss=0.0536, over 13387.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2773, pruned_loss=0.05408, over 2640982.07 frames. ], batch size: 43, lr: 8.34e-03, grad_scale: 16.0 +2024-08-03 18:17:40,948 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.258e+01 1.126e+02 1.352e+02 1.648e+02 3.700e+02, threshold=2.704e+02, percent-clipped=1.0 +2024-08-03 18:17:42,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=192188.33333333334, ans=0.0 +2024-08-03 18:17:56,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=192225.0, ans=0.09899494936611666 +2024-08-03 18:18:09,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=192298.33333333334, ans=0.125 +2024-08-03 18:18:15,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=192335.0, ans=0.125 +2024-08-03 18:18:17,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=192335.0, ans=0.015 +2024-08-03 18:18:24,967 INFO [train.py:1114] (1/4) Epoch 15, batch 1500, loss[loss=0.184, simple_loss=0.2815, pruned_loss=0.04322, over 13399.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2775, pruned_loss=0.05394, over 2641609.22 frames. ], batch size: 39, lr: 8.34e-03, grad_scale: 16.0 +2024-08-03 18:18:31,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=192371.66666666666, ans=0.125 +2024-08-03 18:18:42,841 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.75 vs. limit=6.0 +2024-08-03 18:18:45,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=192445.0, ans=0.0 +2024-08-03 18:18:47,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192445.0, ans=0.1 +2024-08-03 18:18:48,227 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.89 vs. limit=22.5 +2024-08-03 18:18:58,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=192481.66666666666, ans=0.0 +2024-08-03 18:19:02,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=192518.33333333334, ans=0.125 +2024-08-03 18:19:12,238 INFO [train.py:1114] (1/4) Epoch 15, batch 1550, loss[loss=0.2158, simple_loss=0.3026, pruned_loss=0.06453, over 13405.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2774, pruned_loss=0.05408, over 2630797.20 frames. ], batch size: 41, lr: 8.33e-03, grad_scale: 16.0 +2024-08-03 18:19:14,134 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.182e+01 1.117e+02 1.315e+02 1.608e+02 2.647e+02, threshold=2.631e+02, percent-clipped=0.0 +2024-08-03 18:19:30,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=192628.33333333334, ans=0.0 +2024-08-03 18:19:30,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=192628.33333333334, ans=0.125 +2024-08-03 18:19:33,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192628.33333333334, ans=0.1 +2024-08-03 18:19:33,560 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.88 vs. limit=6.0 +2024-08-03 18:19:47,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=192665.0, ans=0.0 +2024-08-03 18:19:48,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=192701.66666666666, ans=0.05 +2024-08-03 18:19:59,536 INFO [train.py:1114] (1/4) Epoch 15, batch 1600, loss[loss=0.2266, simple_loss=0.3074, pruned_loss=0.07288, over 13571.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2775, pruned_loss=0.05441, over 2623567.61 frames. ], batch size: 39, lr: 8.33e-03, grad_scale: 32.0 +2024-08-03 18:20:02,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=192738.33333333334, ans=0.0 +2024-08-03 18:20:06,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=192738.33333333334, ans=0.025 +2024-08-03 18:20:13,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=192775.0, ans=0.125 +2024-08-03 18:20:20,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=192811.66666666666, ans=0.125 +2024-08-03 18:20:31,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=192848.33333333334, ans=0.125 +2024-08-03 18:20:36,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=192885.0, ans=0.0 +2024-08-03 18:20:44,221 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.22 vs. limit=15.0 +2024-08-03 18:20:47,091 INFO [train.py:1114] (1/4) Epoch 15, batch 1650, loss[loss=0.1836, simple_loss=0.2807, pruned_loss=0.04319, over 13314.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2773, pruned_loss=0.0543, over 2620799.20 frames. ], batch size: 40, lr: 8.33e-03, grad_scale: 32.0 +2024-08-03 18:20:48,899 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.213e+01 1.145e+02 1.280e+02 1.838e+02 3.870e+02, threshold=2.560e+02, percent-clipped=5.0 +2024-08-03 18:20:53,853 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:21:13,104 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.50 vs. limit=22.5 +2024-08-03 18:21:15,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=193031.66666666666, ans=0.07 +2024-08-03 18:21:22,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=193031.66666666666, ans=0.035 +2024-08-03 18:21:22,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=193031.66666666666, ans=0.125 +2024-08-03 18:21:22,866 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.78 vs. limit=15.0 +2024-08-03 18:21:25,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=193068.33333333334, ans=0.0 +2024-08-03 18:21:29,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=193068.33333333334, ans=0.025 +2024-08-03 18:21:34,182 INFO [train.py:1114] (1/4) Epoch 15, batch 1700, loss[loss=0.1745, simple_loss=0.2518, pruned_loss=0.0486, over 13256.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2772, pruned_loss=0.05413, over 2629464.34 frames. ], batch size: 31, lr: 8.32e-03, grad_scale: 32.0 +2024-08-03 18:22:08,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=193215.0, ans=0.025 +2024-08-03 18:22:19,611 INFO [train.py:1114] (1/4) Epoch 15, batch 1750, loss[loss=0.1632, simple_loss=0.2415, pruned_loss=0.04245, over 13537.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2766, pruned_loss=0.05382, over 2633276.46 frames. ], batch size: 31, lr: 8.32e-03, grad_scale: 32.0 +2024-08-03 18:22:21,329 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.690e+01 1.123e+02 1.340e+02 1.586e+02 3.403e+02, threshold=2.681e+02, percent-clipped=7.0 +2024-08-03 18:22:38,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=193361.66666666666, ans=0.125 +2024-08-03 18:22:45,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=193361.66666666666, ans=0.2 +2024-08-03 18:22:55,297 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.27 vs. limit=22.5 +2024-08-03 18:22:57,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=193435.0, ans=0.0 +2024-08-03 18:22:59,854 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.26 vs. limit=15.0 +2024-08-03 18:23:01,239 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:23:01,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=193435.0, ans=0.0 +2024-08-03 18:23:03,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=193435.0, ans=0.125 +2024-08-03 18:23:06,733 INFO [train.py:1114] (1/4) Epoch 15, batch 1800, loss[loss=0.2011, simple_loss=0.2879, pruned_loss=0.05721, over 13552.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2773, pruned_loss=0.05417, over 2633830.96 frames. ], batch size: 38, lr: 8.31e-03, grad_scale: 32.0 +2024-08-03 18:23:10,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=193471.66666666666, ans=0.0 +2024-08-03 18:23:10,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=193471.66666666666, ans=0.1 +2024-08-03 18:23:27,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=193545.0, ans=0.125 +2024-08-03 18:23:36,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=193581.66666666666, ans=0.1 +2024-08-03 18:23:43,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=193581.66666666666, ans=0.1 +2024-08-03 18:23:54,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=193618.33333333334, ans=0.1 +2024-08-03 18:23:55,686 INFO [train.py:1114] (1/4) Epoch 15, batch 1850, loss[loss=0.1996, simple_loss=0.2864, pruned_loss=0.05641, over 13393.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.277, pruned_loss=0.05401, over 2635650.69 frames. ], batch size: 39, lr: 8.31e-03, grad_scale: 32.0 +2024-08-03 18:23:55,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=193655.0, ans=0.125 +2024-08-03 18:23:57,398 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.035e+01 1.191e+02 1.556e+02 2.123e+02 2.973e+02, threshold=3.112e+02, percent-clipped=3.0 +2024-08-03 18:23:57,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=193655.0, ans=0.025 +2024-08-03 18:24:18,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=193728.33333333334, ans=0.125 +2024-08-03 18:24:24,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=193765.0, ans=0.0 +2024-08-03 18:24:28,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=193765.0, ans=0.125 +2024-08-03 18:24:29,730 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.50 vs. limit=12.0 +2024-08-03 18:24:41,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=193801.66666666666, ans=0.1 +2024-08-03 18:24:44,747 INFO [train.py:1114] (1/4) Epoch 15, batch 1900, loss[loss=0.198, simple_loss=0.2878, pruned_loss=0.0541, over 13325.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2779, pruned_loss=0.05441, over 2638410.91 frames. ], batch size: 40, lr: 8.31e-03, grad_scale: 32.0 +2024-08-03 18:24:45,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=193838.33333333334, ans=0.2 +2024-08-03 18:25:10,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=193911.66666666666, ans=0.0 +2024-08-03 18:25:18,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=193948.33333333334, ans=0.0 +2024-08-03 18:25:18,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=193948.33333333334, ans=0.1 +2024-08-03 18:25:20,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=193985.0, ans=0.95 +2024-08-03 18:25:25,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=193985.0, ans=0.125 +2024-08-03 18:25:29,773 INFO [train.py:1114] (1/4) Epoch 15, batch 1950, loss[loss=0.1777, simple_loss=0.2719, pruned_loss=0.04174, over 13574.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.279, pruned_loss=0.05464, over 2645283.01 frames. ], batch size: 36, lr: 8.30e-03, grad_scale: 32.0 +2024-08-03 18:25:31,620 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.973e+01 1.188e+02 1.452e+02 1.828e+02 3.234e+02, threshold=2.903e+02, percent-clipped=1.0 +2024-08-03 18:25:34,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194021.66666666666, ans=0.1 +2024-08-03 18:25:47,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=194095.0, ans=0.2 +2024-08-03 18:25:56,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=194131.66666666666, ans=0.025 +2024-08-03 18:25:58,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=194131.66666666666, ans=0.0 +2024-08-03 18:26:02,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=194131.66666666666, ans=0.0 +2024-08-03 18:26:15,834 INFO [train.py:1114] (1/4) Epoch 15, batch 2000, loss[loss=0.1683, simple_loss=0.2473, pruned_loss=0.04471, over 13512.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2798, pruned_loss=0.05506, over 2634638.08 frames. ], batch size: 31, lr: 8.30e-03, grad_scale: 32.0 +2024-08-03 18:26:35,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=194241.66666666666, ans=0.125 +2024-08-03 18:26:57,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=194351.66666666666, ans=0.0 +2024-08-03 18:26:59,601 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.94 vs. limit=15.0 +2024-08-03 18:27:06,153 INFO [train.py:1114] (1/4) Epoch 15, batch 2050, loss[loss=0.1551, simple_loss=0.2388, pruned_loss=0.03564, over 13423.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.279, pruned_loss=0.05506, over 2632107.67 frames. ], batch size: 32, lr: 8.29e-03, grad_scale: 32.0 +2024-08-03 18:27:07,806 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.454e+01 1.180e+02 1.343e+02 1.712e+02 4.642e+02, threshold=2.687e+02, percent-clipped=2.0 +2024-08-03 18:27:12,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=194388.33333333334, ans=0.125 +2024-08-03 18:27:29,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194461.66666666666, ans=0.1 +2024-08-03 18:27:33,860 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.91 vs. limit=15.0 +2024-08-03 18:27:47,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=194535.0, ans=0.125 +2024-08-03 18:27:51,118 INFO [train.py:1114] (1/4) Epoch 15, batch 2100, loss[loss=0.174, simple_loss=0.2712, pruned_loss=0.03835, over 13542.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2782, pruned_loss=0.05448, over 2638072.84 frames. ], batch size: 37, lr: 8.29e-03, grad_scale: 32.0 +2024-08-03 18:27:51,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=194571.66666666666, ans=0.125 +2024-08-03 18:28:08,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=194608.33333333334, ans=0.125 +2024-08-03 18:28:18,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=194645.0, ans=0.125 +2024-08-03 18:28:19,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=194645.0, ans=0.125 +2024-08-03 18:28:23,647 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:28:39,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=194718.33333333334, ans=0.125 +2024-08-03 18:28:40,554 INFO [train.py:1114] (1/4) Epoch 15, batch 2150, loss[loss=0.1868, simple_loss=0.2693, pruned_loss=0.05216, over 13560.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2771, pruned_loss=0.05365, over 2646884.39 frames. ], batch size: 36, lr: 8.29e-03, grad_scale: 32.0 +2024-08-03 18:28:42,289 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.720e+01 1.112e+02 1.243e+02 1.782e+02 4.136e+02, threshold=2.485e+02, percent-clipped=5.0 +2024-08-03 18:28:47,391 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.78 vs. limit=15.0 +2024-08-03 18:28:57,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=194828.33333333334, ans=0.125 +2024-08-03 18:29:03,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=194828.33333333334, ans=0.125 +2024-08-03 18:29:15,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=194901.66666666666, ans=0.0 +2024-08-03 18:29:17,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=194901.66666666666, ans=0.125 +2024-08-03 18:29:20,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=194901.66666666666, ans=0.125 +2024-08-03 18:29:24,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=194938.33333333334, ans=0.2 +2024-08-03 18:29:25,500 INFO [train.py:1114] (1/4) Epoch 15, batch 2200, loss[loss=0.1976, simple_loss=0.2872, pruned_loss=0.05403, over 13397.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2765, pruned_loss=0.05319, over 2645415.17 frames. ], batch size: 39, lr: 8.28e-03, grad_scale: 32.0 +2024-08-03 18:29:36,130 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.53 vs. limit=15.0 +2024-08-03 18:29:36,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=194975.0, ans=0.0 +2024-08-03 18:29:40,891 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.95 vs. limit=15.0 +2024-08-03 18:29:41,492 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.49 vs. limit=15.0 +2024-08-03 18:29:54,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=195048.33333333334, ans=0.125 +2024-08-03 18:30:00,122 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.84 vs. limit=6.0 +2024-08-03 18:30:03,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195085.0, ans=0.1 +2024-08-03 18:30:04,818 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=14.22 vs. limit=15.0 +2024-08-03 18:30:08,371 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.56 vs. limit=15.0 +2024-08-03 18:30:13,223 INFO [train.py:1114] (1/4) Epoch 15, batch 2250, loss[loss=0.177, simple_loss=0.2686, pruned_loss=0.04263, over 13358.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2773, pruned_loss=0.05393, over 2642925.04 frames. ], batch size: 37, lr: 8.28e-03, grad_scale: 32.0 +2024-08-03 18:30:13,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=195121.66666666666, ans=0.0 +2024-08-03 18:30:15,094 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.620e+01 1.201e+02 1.486e+02 1.910e+02 3.582e+02, threshold=2.971e+02, percent-clipped=11.0 +2024-08-03 18:30:27,966 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:30:28,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=195158.33333333334, ans=0.0 +2024-08-03 18:30:28,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=195158.33333333334, ans=0.05 +2024-08-03 18:30:57,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=195268.33333333334, ans=0.125 +2024-08-03 18:30:58,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=195268.33333333334, ans=0.2 +2024-08-03 18:31:01,064 INFO [train.py:1114] (1/4) Epoch 15, batch 2300, loss[loss=0.1664, simple_loss=0.2492, pruned_loss=0.04185, over 13583.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2773, pruned_loss=0.05422, over 2639350.80 frames. ], batch size: 33, lr: 8.28e-03, grad_scale: 32.0 +2024-08-03 18:31:14,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=195305.0, ans=0.125 +2024-08-03 18:31:17,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=195341.66666666666, ans=15.0 +2024-08-03 18:31:19,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=195341.66666666666, ans=0.125 +2024-08-03 18:31:23,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=195341.66666666666, ans=0.1 +2024-08-03 18:31:23,593 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.21 vs. limit=15.0 +2024-08-03 18:31:28,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=195378.33333333334, ans=0.125 +2024-08-03 18:31:58,483 INFO [train.py:1114] (1/4) Epoch 15, batch 2350, loss[loss=0.1956, simple_loss=0.2899, pruned_loss=0.05063, over 13554.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2769, pruned_loss=0.05376, over 2641146.10 frames. ], batch size: 38, lr: 8.27e-03, grad_scale: 32.0 +2024-08-03 18:32:00,492 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.540e+01 1.095e+02 1.335e+02 1.545e+02 2.606e+02, threshold=2.670e+02, percent-clipped=0.0 +2024-08-03 18:32:19,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=195561.66666666666, ans=0.0 +2024-08-03 18:32:20,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=195561.66666666666, ans=0.125 +2024-08-03 18:32:23,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=195561.66666666666, ans=0.125 +2024-08-03 18:32:27,073 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.48 vs. limit=10.0 +2024-08-03 18:32:37,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=195635.0, ans=0.125 +2024-08-03 18:32:40,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=195635.0, ans=0.0 +2024-08-03 18:32:41,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=195635.0, ans=0.125 +2024-08-03 18:32:44,896 INFO [train.py:1114] (1/4) Epoch 15, batch 2400, loss[loss=0.1813, simple_loss=0.2668, pruned_loss=0.04793, over 13524.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2774, pruned_loss=0.05379, over 2642630.14 frames. ], batch size: 35, lr: 8.27e-03, grad_scale: 32.0 +2024-08-03 18:32:46,938 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:32:59,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=195708.33333333334, ans=0.0 +2024-08-03 18:33:01,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=195708.33333333334, ans=0.125 +2024-08-03 18:33:07,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=195745.0, ans=0.125 +2024-08-03 18:33:23,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=195818.33333333334, ans=0.025 +2024-08-03 18:33:30,325 INFO [train.py:1114] (1/4) Epoch 15, batch 2450, loss[loss=0.2183, simple_loss=0.3025, pruned_loss=0.06701, over 13355.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2786, pruned_loss=0.05437, over 2632423.82 frames. ], batch size: 37, lr: 8.26e-03, grad_scale: 16.0 +2024-08-03 18:33:33,024 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.379e+01 1.105e+02 1.269e+02 1.556e+02 2.604e+02, threshold=2.537e+02, percent-clipped=0.0 +2024-08-03 18:33:33,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=195855.0, ans=0.0 +2024-08-03 18:33:59,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195965.0, ans=0.1 +2024-08-03 18:34:17,546 INFO [train.py:1114] (1/4) Epoch 15, batch 2500, loss[loss=0.1835, simple_loss=0.2718, pruned_loss=0.04762, over 13408.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2787, pruned_loss=0.05455, over 2636507.34 frames. ], batch size: 39, lr: 8.26e-03, grad_scale: 16.0 +2024-08-03 18:34:24,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=196038.33333333334, ans=0.125 +2024-08-03 18:34:25,152 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.08 vs. limit=6.0 +2024-08-03 18:34:32,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=196075.0, ans=0.125 +2024-08-03 18:34:53,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=196148.33333333334, ans=0.0 +2024-08-03 18:34:54,436 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:34:58,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=196185.0, ans=10.0 +2024-08-03 18:35:04,093 INFO [train.py:1114] (1/4) Epoch 15, batch 2550, loss[loss=0.1753, simple_loss=0.2536, pruned_loss=0.04848, over 13536.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2782, pruned_loss=0.05417, over 2637991.95 frames. ], batch size: 31, lr: 8.26e-03, grad_scale: 16.0 +2024-08-03 18:35:05,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=196221.66666666666, ans=0.07 +2024-08-03 18:35:06,686 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.866e+01 1.097e+02 1.275e+02 1.738e+02 2.775e+02, threshold=2.550e+02, percent-clipped=2.0 +2024-08-03 18:35:25,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=196295.0, ans=0.025 +2024-08-03 18:35:47,178 INFO [train.py:1114] (1/4) Epoch 15, batch 2600, loss[loss=0.1831, simple_loss=0.2696, pruned_loss=0.04832, over 13564.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2789, pruned_loss=0.05442, over 2637290.84 frames. ], batch size: 36, lr: 8.25e-03, grad_scale: 16.0 +2024-08-03 18:35:47,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=196405.0, ans=0.125 +2024-08-03 18:35:51,077 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=7.98 vs. limit=15.0 +2024-08-03 18:36:08,089 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.44 vs. limit=10.0 +2024-08-03 18:36:14,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=196515.0, ans=0.2 +2024-08-03 18:36:30,677 INFO [train.py:1114] (1/4) Epoch 15, batch 2650, loss[loss=0.1866, simple_loss=0.2704, pruned_loss=0.0514, over 13317.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2786, pruned_loss=0.05423, over 2640710.69 frames. ], batch size: 46, lr: 8.25e-03, grad_scale: 16.0 +2024-08-03 18:36:33,202 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.155e+01 1.086e+02 1.260e+02 1.535e+02 2.930e+02, threshold=2.521e+02, percent-clipped=3.0 +2024-08-03 18:36:40,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=196625.0, ans=0.07 +2024-08-03 18:36:41,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196625.0, ans=0.1 +2024-08-03 18:37:04,489 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:37:17,597 INFO [train.py:1114] (1/4) Epoch 15, batch 2700, loss[loss=0.2042, simple_loss=0.2923, pruned_loss=0.05802, over 13546.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2782, pruned_loss=0.05407, over 2637766.92 frames. ], batch size: 40, lr: 8.24e-03, grad_scale: 16.0 +2024-08-03 18:37:22,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=196771.66666666666, ans=0.025 +2024-08-03 18:37:26,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=196808.33333333334, ans=0.025 +2024-08-03 18:37:39,778 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.71 vs. limit=15.0 +2024-08-03 18:37:49,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=196881.66666666666, ans=0.125 +2024-08-03 18:37:53,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=196918.33333333334, ans=0.2 +2024-08-03 18:38:00,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.54 vs. limit=10.0 +2024-08-03 18:38:01,016 INFO [train.py:1114] (1/4) Epoch 15, batch 2750, loss[loss=0.1772, simple_loss=0.2585, pruned_loss=0.04801, over 13314.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2772, pruned_loss=0.05395, over 2634881.49 frames. ], batch size: 34, lr: 8.24e-03, grad_scale: 16.0 +2024-08-03 18:38:03,556 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.526e+01 1.073e+02 1.243e+02 1.451e+02 2.224e+02, threshold=2.486e+02, percent-clipped=0.0 +2024-08-03 18:38:04,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=196955.0, ans=0.125 +2024-08-03 18:38:19,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=197028.33333333334, ans=0.125 +2024-08-03 18:38:36,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=197101.66666666666, ans=0.125 +2024-08-03 18:38:43,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=197138.33333333334, ans=0.0 +2024-08-03 18:38:44,475 INFO [train.py:1114] (1/4) Epoch 15, batch 2800, loss[loss=0.2653, simple_loss=0.3246, pruned_loss=0.1029, over 9626.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2782, pruned_loss=0.05476, over 2626792.16 frames. ], batch size: 97, lr: 8.24e-03, grad_scale: 32.0 +2024-08-03 18:38:55,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=197175.0, ans=0.125 +2024-08-03 18:38:58,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=197175.0, ans=0.125 +2024-08-03 18:39:03,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=197211.66666666666, ans=0.0 +2024-08-03 18:39:07,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=197211.66666666666, ans=0.1 +2024-08-03 18:39:12,413 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.38 vs. limit=15.0 +2024-08-03 18:39:22,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=197285.0, ans=0.125 +2024-08-03 18:39:27,872 INFO [train.py:1114] (1/4) Epoch 15, batch 2850, loss[loss=0.1641, simple_loss=0.2482, pruned_loss=0.04, over 13355.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2789, pruned_loss=0.05521, over 2621567.77 frames. ], batch size: 35, lr: 8.23e-03, grad_scale: 32.0 +2024-08-03 18:39:30,443 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.971e+01 1.137e+02 1.312e+02 1.532e+02 3.029e+02, threshold=2.624e+02, percent-clipped=2.0 +2024-08-03 18:39:32,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=197321.66666666666, ans=0.125 +2024-08-03 18:39:59,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=197431.66666666666, ans=0.0 +2024-08-03 18:40:05,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197468.33333333334, ans=0.1 +2024-08-03 18:40:11,207 INFO [train.py:1114] (1/4) Epoch 15, batch 2900, loss[loss=0.2321, simple_loss=0.3082, pruned_loss=0.07801, over 13353.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2793, pruned_loss=0.05477, over 2632588.58 frames. ], batch size: 36, lr: 8.23e-03, grad_scale: 32.0 +2024-08-03 18:40:11,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=197505.0, ans=0.0 +2024-08-03 18:40:16,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=197505.0, ans=0.125 +2024-08-03 18:40:44,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=197615.0, ans=0.1 +2024-08-03 18:40:47,955 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.49 vs. limit=15.0 +2024-08-03 18:40:52,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=197651.66666666666, ans=0.1 +2024-08-03 18:40:54,520 INFO [train.py:1114] (1/4) Epoch 15, batch 2950, loss[loss=0.1686, simple_loss=0.2589, pruned_loss=0.03914, over 13329.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2779, pruned_loss=0.05442, over 2630576.05 frames. ], batch size: 34, lr: 8.23e-03, grad_scale: 32.0 +2024-08-03 18:40:57,043 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.080e+01 1.111e+02 1.238e+02 1.494e+02 2.430e+02, threshold=2.476e+02, percent-clipped=0.0 +2024-08-03 18:41:17,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=197761.66666666666, ans=0.2 +2024-08-03 18:41:19,447 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.27 vs. limit=15.0 +2024-08-03 18:41:23,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=197798.33333333334, ans=0.125 +2024-08-03 18:41:38,131 INFO [train.py:1114] (1/4) Epoch 15, batch 3000, loss[loss=0.1855, simple_loss=0.2753, pruned_loss=0.04781, over 13554.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.278, pruned_loss=0.0547, over 2631453.91 frames. ], batch size: 37, lr: 8.22e-03, grad_scale: 32.0 +2024-08-03 18:41:38,132 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 18:41:48,175 INFO [train.py:1146] (1/4) Epoch 15, validation: loss=0.1719, simple_loss=0.2717, pruned_loss=0.03605, over 944034.00 frames. +2024-08-03 18:41:48,176 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 18:41:58,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=197908.33333333334, ans=0.125 +2024-08-03 18:41:59,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=197908.33333333334, ans=0.2 +2024-08-03 18:42:04,279 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.90 vs. limit=22.5 +2024-08-03 18:42:04,539 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.11 vs. limit=22.5 +2024-08-03 18:42:11,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=197945.0, ans=0.2 +2024-08-03 18:42:12,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=197945.0, ans=0.025 +2024-08-03 18:42:15,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=197981.66666666666, ans=0.125 +2024-08-03 18:42:18,883 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.23 vs. limit=15.0 +2024-08-03 18:42:28,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=198018.33333333334, ans=0.2 +2024-08-03 18:42:32,396 INFO [train.py:1114] (1/4) Epoch 15, batch 3050, loss[loss=0.1706, simple_loss=0.256, pruned_loss=0.04262, over 13525.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.278, pruned_loss=0.05429, over 2627278.37 frames. ], batch size: 35, lr: 8.22e-03, grad_scale: 32.0 +2024-08-03 18:42:35,000 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.644e+01 1.092e+02 1.243e+02 1.494e+02 2.695e+02, threshold=2.487e+02, percent-clipped=3.0 +2024-08-03 18:42:35,468 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.58 vs. limit=15.0 +2024-08-03 18:42:40,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=198091.66666666666, ans=0.125 +2024-08-03 18:42:57,998 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.91 vs. limit=15.0 +2024-08-03 18:42:59,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198165.0, ans=0.1 +2024-08-03 18:43:10,690 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.19 vs. limit=15.0 +2024-08-03 18:43:12,746 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.52 vs. limit=22.5 +2024-08-03 18:43:16,338 INFO [train.py:1114] (1/4) Epoch 15, batch 3100, loss[loss=0.2125, simple_loss=0.3016, pruned_loss=0.06172, over 13265.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2773, pruned_loss=0.0538, over 2627030.22 frames. ], batch size: 46, lr: 8.21e-03, grad_scale: 32.0 +2024-08-03 18:43:23,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=198238.33333333334, ans=0.125 +2024-08-03 18:43:24,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198275.0, ans=0.1 +2024-08-03 18:43:28,155 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.60 vs. limit=6.0 +2024-08-03 18:43:30,628 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.05 vs. limit=15.0 +2024-08-03 18:43:42,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=198348.33333333334, ans=0.0 +2024-08-03 18:43:54,537 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.56 vs. limit=15.0 +2024-08-03 18:44:00,848 INFO [train.py:1114] (1/4) Epoch 15, batch 3150, loss[loss=0.2085, simple_loss=0.3017, pruned_loss=0.05766, over 12986.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2766, pruned_loss=0.05316, over 2628918.97 frames. ], batch size: 48, lr: 8.21e-03, grad_scale: 32.0 +2024-08-03 18:44:03,294 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.819e+01 1.142e+02 1.468e+02 1.823e+02 3.085e+02, threshold=2.937e+02, percent-clipped=3.0 +2024-08-03 18:44:13,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=198458.33333333334, ans=0.125 +2024-08-03 18:44:27,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=198458.33333333334, ans=0.0 +2024-08-03 18:44:32,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=198495.0, ans=0.125 +2024-08-03 18:44:32,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=198495.0, ans=0.0 +2024-08-03 18:44:35,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=198495.0, ans=0.125 +2024-08-03 18:44:35,923 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.77 vs. limit=15.0 +2024-08-03 18:44:44,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=198531.66666666666, ans=0.125 +2024-08-03 18:44:54,992 INFO [train.py:1114] (1/4) Epoch 15, batch 3200, loss[loss=0.1885, simple_loss=0.2769, pruned_loss=0.05007, over 13539.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2767, pruned_loss=0.05344, over 2634827.79 frames. ], batch size: 37, lr: 8.21e-03, grad_scale: 32.0 +2024-08-03 18:45:19,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=198678.33333333334, ans=0.2 +2024-08-03 18:45:22,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=10.35 vs. limit=15.0 +2024-08-03 18:45:25,028 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.84 vs. limit=5.0 +2024-08-03 18:45:37,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=198788.33333333334, ans=0.125 +2024-08-03 18:45:38,328 INFO [train.py:1114] (1/4) Epoch 15, batch 3250, loss[loss=0.1827, simple_loss=0.2697, pruned_loss=0.04782, over 13392.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2767, pruned_loss=0.05316, over 2639254.55 frames. ], batch size: 38, lr: 8.20e-03, grad_scale: 32.0 +2024-08-03 18:45:38,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=198788.33333333334, ans=0.125 +2024-08-03 18:45:40,866 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.293e+01 1.129e+02 1.312e+02 1.541e+02 2.254e+02, threshold=2.624e+02, percent-clipped=0.0 +2024-08-03 18:45:45,707 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.42 vs. limit=6.0 +2024-08-03 18:45:54,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=198825.0, ans=0.125 +2024-08-03 18:46:04,972 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.45 vs. limit=15.0 +2024-08-03 18:46:07,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=198898.33333333334, ans=0.0 +2024-08-03 18:46:13,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=198935.0, ans=0.125 +2024-08-03 18:46:21,421 INFO [train.py:1114] (1/4) Epoch 15, batch 3300, loss[loss=0.2033, simple_loss=0.2941, pruned_loss=0.05621, over 12968.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2759, pruned_loss=0.05291, over 2640628.75 frames. ], batch size: 52, lr: 8.20e-03, grad_scale: 16.0 +2024-08-03 18:46:28,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=198971.66666666666, ans=0.125 +2024-08-03 18:46:34,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=199008.33333333334, ans=0.125 +2024-08-03 18:46:40,001 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.16 vs. limit=6.0 +2024-08-03 18:46:43,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=199045.0, ans=0.125 +2024-08-03 18:46:57,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=199118.33333333334, ans=0.125 +2024-08-03 18:46:57,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=199118.33333333334, ans=0.1 +2024-08-03 18:46:58,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=199118.33333333334, ans=0.125 +2024-08-03 18:47:04,155 INFO [train.py:1114] (1/4) Epoch 15, batch 3350, loss[loss=0.2119, simple_loss=0.2894, pruned_loss=0.06715, over 13115.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2773, pruned_loss=0.05385, over 2630208.19 frames. ], batch size: 48, lr: 8.20e-03, grad_scale: 16.0 +2024-08-03 18:47:07,471 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.754e+01 1.143e+02 1.314e+02 1.590e+02 2.231e+02, threshold=2.628e+02, percent-clipped=0.0 +2024-08-03 18:47:08,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=199155.0, ans=0.125 +2024-08-03 18:47:15,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=199191.66666666666, ans=0.125 +2024-08-03 18:47:22,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199228.33333333334, ans=0.1 +2024-08-03 18:47:25,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=199228.33333333334, ans=0.125 +2024-08-03 18:47:25,795 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.26 vs. limit=15.0 +2024-08-03 18:47:26,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=199228.33333333334, ans=0.125 +2024-08-03 18:47:27,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=199228.33333333334, ans=0.0 +2024-08-03 18:47:28,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=199228.33333333334, ans=0.125 +2024-08-03 18:47:38,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199301.66666666666, ans=0.1 +2024-08-03 18:47:42,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=199301.66666666666, ans=0.025 +2024-08-03 18:47:47,283 INFO [train.py:1114] (1/4) Epoch 15, batch 3400, loss[loss=0.1691, simple_loss=0.2412, pruned_loss=0.04852, over 13559.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2773, pruned_loss=0.05404, over 2625603.67 frames. ], batch size: 31, lr: 8.19e-03, grad_scale: 16.0 +2024-08-03 18:47:49,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=199338.33333333334, ans=0.1 +2024-08-03 18:47:59,792 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.61 vs. limit=12.0 +2024-08-03 18:48:22,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=199485.0, ans=0.015 +2024-08-03 18:48:29,203 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.29 vs. limit=22.5 +2024-08-03 18:48:30,463 INFO [train.py:1114] (1/4) Epoch 15, batch 3450, loss[loss=0.2169, simple_loss=0.2992, pruned_loss=0.06727, over 12984.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.278, pruned_loss=0.05411, over 2629785.24 frames. ], batch size: 52, lr: 8.19e-03, grad_scale: 16.0 +2024-08-03 18:48:32,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=199521.66666666666, ans=0.125 +2024-08-03 18:48:33,769 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.288e+01 1.166e+02 1.372e+02 1.679e+02 2.920e+02, threshold=2.743e+02, percent-clipped=3.0 +2024-08-03 18:48:36,756 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.29 vs. limit=10.0 +2024-08-03 18:48:36,769 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.84 vs. limit=15.0 +2024-08-03 18:48:45,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=199558.33333333334, ans=0.125 +2024-08-03 18:48:54,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=199595.0, ans=0.125 +2024-08-03 18:48:59,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=199631.66666666666, ans=0.0 +2024-08-03 18:49:13,502 INFO [train.py:1114] (1/4) Epoch 15, batch 3500, loss[loss=0.1604, simple_loss=0.2475, pruned_loss=0.03667, over 13540.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2771, pruned_loss=0.05438, over 2631375.08 frames. ], batch size: 34, lr: 8.19e-03, grad_scale: 16.0 +2024-08-03 18:49:36,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=199778.33333333334, ans=0.125 +2024-08-03 18:49:40,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=199815.0, ans=0.125 +2024-08-03 18:49:50,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=199851.66666666666, ans=0.0 +2024-08-03 18:49:55,944 INFO [train.py:1114] (1/4) Epoch 15, batch 3550, loss[loss=0.1852, simple_loss=0.2825, pruned_loss=0.04398, over 12329.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2797, pruned_loss=0.05586, over 2629526.97 frames. ], batch size: 58, lr: 8.18e-03, grad_scale: 16.0 +2024-08-03 18:49:57,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=199888.33333333334, ans=0.0 +2024-08-03 18:49:59,302 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.733e+01 1.107e+02 1.282e+02 1.546e+02 2.459e+02, threshold=2.565e+02, percent-clipped=0.0 +2024-08-03 18:50:04,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=199925.0, ans=0.125 +2024-08-03 18:50:04,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=199925.0, ans=0.1 +2024-08-03 18:50:10,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=199925.0, ans=0.125 +2024-08-03 18:50:18,503 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.73 vs. limit=15.0 +2024-08-03 18:50:20,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=199961.66666666666, ans=0.125 +2024-08-03 18:50:23,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=199998.33333333334, ans=0.125 +2024-08-03 18:50:26,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=199998.33333333334, ans=0.125 +2024-08-03 18:50:40,662 INFO [train.py:1114] (1/4) Epoch 15, batch 3600, loss[loss=0.2628, simple_loss=0.3287, pruned_loss=0.09843, over 9227.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2835, pruned_loss=0.05922, over 2487962.07 frames. ], batch size: 96, lr: 8.18e-03, grad_scale: 32.0 +2024-08-03 18:50:42,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=200071.66666666666, ans=0.125 +2024-08-03 18:50:49,045 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.02 vs. limit=22.5 +2024-08-03 18:51:04,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200145.0, ans=0.1 +2024-08-03 18:52:09,587 INFO [train.py:1114] (1/4) Epoch 16, batch 0, loss[loss=0.1597, simple_loss=0.2428, pruned_loss=0.03825, over 13343.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2428, pruned_loss=0.03825, over 13343.00 frames. ], batch size: 33, lr: 7.91e-03, grad_scale: 16.0 +2024-08-03 18:52:09,588 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 18:52:19,527 INFO [train.py:1146] (1/4) Epoch 16, validation: loss=0.1763, simple_loss=0.2767, pruned_loss=0.03798, over 944034.00 frames. +2024-08-03 18:52:19,528 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 18:52:26,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=200222.0, ans=0.0 +2024-08-03 18:52:29,026 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.01 vs. limit=15.0 +2024-08-03 18:52:29,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=200258.66666666666, ans=0.0 +2024-08-03 18:52:31,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=200258.66666666666, ans=0.125 +2024-08-03 18:52:33,151 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.265e+01 1.242e+02 1.434e+02 1.560e+02 1.878e+02, threshold=2.867e+02, percent-clipped=0.0 +2024-08-03 18:52:36,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=200258.66666666666, ans=0.125 +2024-08-03 18:52:42,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=200295.33333333334, ans=0.2 +2024-08-03 18:52:47,391 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.85 vs. limit=15.0 +2024-08-03 18:53:01,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=200368.66666666666, ans=0.125 +2024-08-03 18:53:05,367 INFO [train.py:1114] (1/4) Epoch 16, batch 50, loss[loss=0.2079, simple_loss=0.2799, pruned_loss=0.06789, over 13440.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2801, pruned_loss=0.05623, over 577966.39 frames. ], batch size: 32, lr: 7.91e-03, grad_scale: 16.0 +2024-08-03 18:53:09,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=200405.33333333334, ans=0.2 +2024-08-03 18:53:22,262 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=7.57 vs. limit=15.0 +2024-08-03 18:53:36,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=200515.33333333334, ans=0.0 +2024-08-03 18:53:40,931 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:53:50,758 INFO [train.py:1114] (1/4) Epoch 16, batch 100, loss[loss=0.1756, simple_loss=0.2612, pruned_loss=0.04505, over 13546.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2812, pruned_loss=0.05584, over 1024909.11 frames. ], batch size: 35, lr: 7.91e-03, grad_scale: 16.0 +2024-08-03 18:54:06,122 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.720e+01 1.140e+02 1.333e+02 1.689e+02 2.611e+02, threshold=2.666e+02, percent-clipped=0.0 +2024-08-03 18:54:06,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=200625.33333333334, ans=0.125 +2024-08-03 18:54:10,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=200625.33333333334, ans=0.125 +2024-08-03 18:54:27,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=200698.66666666666, ans=0.0 +2024-08-03 18:54:28,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=200698.66666666666, ans=0.0 +2024-08-03 18:54:39,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=200735.33333333334, ans=0.2 +2024-08-03 18:54:43,433 INFO [train.py:1114] (1/4) Epoch 16, batch 150, loss[loss=0.1608, simple_loss=0.2437, pruned_loss=0.03892, over 13442.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2778, pruned_loss=0.05383, over 1386538.97 frames. ], batch size: 32, lr: 7.90e-03, grad_scale: 16.0 +2024-08-03 18:54:49,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=200772.0, ans=0.125 +2024-08-03 18:54:49,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=200772.0, ans=0.1 +2024-08-03 18:55:05,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=200845.33333333334, ans=0.125 +2024-08-03 18:55:13,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=200882.0, ans=0.125 +2024-08-03 18:55:17,749 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.16 vs. limit=6.0 +2024-08-03 18:55:20,480 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.18 vs. limit=15.0 +2024-08-03 18:55:29,203 INFO [train.py:1114] (1/4) Epoch 16, batch 200, loss[loss=0.188, simple_loss=0.2764, pruned_loss=0.04983, over 12582.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2757, pruned_loss=0.05282, over 1666036.90 frames. ], batch size: 59, lr: 7.90e-03, grad_scale: 16.0 +2024-08-03 18:55:37,071 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.54 vs. limit=15.0 +2024-08-03 18:55:42,873 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.144e+01 1.093e+02 1.229e+02 1.620e+02 3.492e+02, threshold=2.459e+02, percent-clipped=5.0 +2024-08-03 18:56:03,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=201065.33333333334, ans=0.125 +2024-08-03 18:56:10,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=201102.0, ans=0.09899494936611666 +2024-08-03 18:56:15,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=201102.0, ans=0.125 +2024-08-03 18:56:18,892 INFO [train.py:1114] (1/4) Epoch 16, batch 250, loss[loss=0.2188, simple_loss=0.3002, pruned_loss=0.06874, over 13353.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2761, pruned_loss=0.05307, over 1885435.21 frames. ], batch size: 46, lr: 7.89e-03, grad_scale: 16.0 +2024-08-03 18:56:23,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=201138.66666666666, ans=0.2 +2024-08-03 18:56:38,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=201212.0, ans=0.0 +2024-08-03 18:56:44,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=201212.0, ans=0.125 +2024-08-03 18:56:55,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=201285.33333333334, ans=22.5 +2024-08-03 18:56:59,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=201285.33333333334, ans=0.125 +2024-08-03 18:57:05,285 INFO [train.py:1114] (1/4) Epoch 16, batch 300, loss[loss=0.2008, simple_loss=0.2884, pruned_loss=0.05664, over 13451.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2756, pruned_loss=0.05287, over 2051542.61 frames. ], batch size: 42, lr: 7.89e-03, grad_scale: 16.0 +2024-08-03 18:57:11,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=201322.0, ans=0.0 +2024-08-03 18:57:19,199 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.378e+01 1.118e+02 1.251e+02 1.604e+02 3.551e+02, threshold=2.502e+02, percent-clipped=3.0 +2024-08-03 18:57:37,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=201432.0, ans=0.125 +2024-08-03 18:57:47,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=201468.66666666666, ans=0.0 +2024-08-03 18:57:58,323 INFO [train.py:1114] (1/4) Epoch 16, batch 350, loss[loss=0.1719, simple_loss=0.251, pruned_loss=0.04638, over 13582.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2761, pruned_loss=0.05309, over 2182371.78 frames. ], batch size: 33, lr: 7.89e-03, grad_scale: 16.0 +2024-08-03 18:57:59,911 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.80 vs. limit=15.0 +2024-08-03 18:58:05,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=201505.33333333334, ans=0.125 +2024-08-03 18:58:19,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.92 vs. limit=15.0 +2024-08-03 18:58:28,479 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.33 vs. limit=10.0 +2024-08-03 18:58:43,656 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:58:44,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=201652.0, ans=0.1 +2024-08-03 18:58:48,033 INFO [train.py:1114] (1/4) Epoch 16, batch 400, loss[loss=0.1934, simple_loss=0.2846, pruned_loss=0.0511, over 13361.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2754, pruned_loss=0.05282, over 2286679.20 frames. ], batch size: 37, lr: 7.88e-03, grad_scale: 32.0 +2024-08-03 18:58:53,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=201688.66666666666, ans=0.2 +2024-08-03 18:58:58,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=201725.33333333334, ans=0.125 +2024-08-03 18:59:01,843 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.809e+01 1.125e+02 1.305e+02 1.618e+02 2.689e+02, threshold=2.611e+02, percent-clipped=3.0 +2024-08-03 18:59:05,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=201762.0, ans=0.5 +2024-08-03 18:59:09,513 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.97 vs. limit=12.0 +2024-08-03 18:59:11,217 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.51 vs. limit=15.0 +2024-08-03 18:59:32,888 INFO [train.py:1114] (1/4) Epoch 16, batch 450, loss[loss=0.1971, simple_loss=0.2908, pruned_loss=0.05165, over 13546.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2757, pruned_loss=0.05284, over 2360724.47 frames. ], batch size: 38, lr: 7.88e-03, grad_scale: 32.0 +2024-08-03 18:59:48,906 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.68 vs. limit=15.0 +2024-08-03 19:00:01,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=201982.0, ans=0.125 +2024-08-03 19:00:09,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=202018.66666666666, ans=0.125 +2024-08-03 19:00:52,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=202055.33333333334, ans=0.125 +2024-08-03 19:00:52,722 INFO [train.py:1114] (1/4) Epoch 16, batch 500, loss[loss=0.2103, simple_loss=0.2992, pruned_loss=0.06066, over 13430.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.275, pruned_loss=0.05215, over 2424905.43 frames. ], batch size: 43, lr: 7.88e-03, grad_scale: 32.0 +2024-08-03 19:01:06,065 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.045e+01 1.073e+02 1.280e+02 1.513e+02 2.984e+02, threshold=2.559e+02, percent-clipped=3.0 +2024-08-03 19:01:06,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=202092.0, ans=0.125 +2024-08-03 19:01:36,249 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.66 vs. limit=15.0 +2024-08-03 19:01:37,604 INFO [train.py:1114] (1/4) Epoch 16, batch 550, loss[loss=0.2113, simple_loss=0.2966, pruned_loss=0.06299, over 13080.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.275, pruned_loss=0.05213, over 2466645.91 frames. ], batch size: 48, lr: 7.87e-03, grad_scale: 32.0 +2024-08-03 19:01:47,969 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.14 vs. limit=10.0 +2024-08-03 19:01:48,064 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.99 vs. limit=10.0 +2024-08-03 19:02:07,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202348.66666666666, ans=0.1 +2024-08-03 19:02:18,630 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.95 vs. limit=6.0 +2024-08-03 19:02:30,467 INFO [train.py:1114] (1/4) Epoch 16, batch 600, loss[loss=0.191, simple_loss=0.2772, pruned_loss=0.05238, over 13364.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2754, pruned_loss=0.05237, over 2506758.63 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 32.0 +2024-08-03 19:02:33,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=202422.0, ans=0.0 +2024-08-03 19:02:36,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=202422.0, ans=6.0 +2024-08-03 19:02:42,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=202458.66666666666, ans=0.09899494936611666 +2024-08-03 19:02:43,888 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.542e+01 1.053e+02 1.176e+02 1.457e+02 2.332e+02, threshold=2.351e+02, percent-clipped=0.0 +2024-08-03 19:02:48,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=202495.33333333334, ans=0.04949747468305833 +2024-08-03 19:02:54,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=202495.33333333334, ans=0.5 +2024-08-03 19:03:15,280 INFO [train.py:1114] (1/4) Epoch 16, batch 650, loss[loss=0.1896, simple_loss=0.2773, pruned_loss=0.05099, over 13549.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2751, pruned_loss=0.05198, over 2542783.68 frames. ], batch size: 37, lr: 7.87e-03, grad_scale: 32.0 +2024-08-03 19:03:25,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=202642.0, ans=0.1 +2024-08-03 19:03:27,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=202642.0, ans=0.0 +2024-08-03 19:03:28,381 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.44 vs. limit=15.0 +2024-08-03 19:03:37,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202678.66666666666, ans=0.1 +2024-08-03 19:03:40,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=202678.66666666666, ans=0.0 +2024-08-03 19:03:52,108 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.76 vs. limit=15.0 +2024-08-03 19:03:59,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=202788.66666666666, ans=0.0 +2024-08-03 19:04:00,439 INFO [train.py:1114] (1/4) Epoch 16, batch 700, loss[loss=0.1604, simple_loss=0.258, pruned_loss=0.03138, over 13514.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2759, pruned_loss=0.05217, over 2565019.45 frames. ], batch size: 35, lr: 7.86e-03, grad_scale: 32.0 +2024-08-03 19:04:12,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=202825.33333333334, ans=0.2 +2024-08-03 19:04:13,873 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.401e+01 1.143e+02 1.370e+02 1.738e+02 3.116e+02, threshold=2.740e+02, percent-clipped=8.0 +2024-08-03 19:04:32,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=202898.66666666666, ans=0.125 +2024-08-03 19:04:43,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=202935.33333333334, ans=0.0 +2024-08-03 19:04:45,954 INFO [train.py:1114] (1/4) Epoch 16, batch 750, loss[loss=0.1711, simple_loss=0.2642, pruned_loss=0.03898, over 13355.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2749, pruned_loss=0.05202, over 2582903.09 frames. ], batch size: 37, lr: 7.86e-03, grad_scale: 32.0 +2024-08-03 19:04:52,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=202972.0, ans=0.0 +2024-08-03 19:04:55,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=203008.66666666666, ans=0.0 +2024-08-03 19:05:11,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=203045.33333333334, ans=0.0 +2024-08-03 19:05:24,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=203118.66666666666, ans=0.125 +2024-08-03 19:05:33,740 INFO [train.py:1114] (1/4) Epoch 16, batch 800, loss[loss=0.1522, simple_loss=0.2368, pruned_loss=0.03378, over 13333.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2747, pruned_loss=0.05214, over 2597874.04 frames. ], batch size: 33, lr: 7.86e-03, grad_scale: 32.0 +2024-08-03 19:05:49,281 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.111e+01 1.103e+02 1.311e+02 1.683e+02 3.142e+02, threshold=2.622e+02, percent-clipped=1.0 +2024-08-03 19:05:51,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=203192.0, ans=0.1 +2024-08-03 19:05:51,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=203192.0, ans=0.125 +2024-08-03 19:06:10,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=203265.33333333334, ans=0.125 +2024-08-03 19:06:14,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=203265.33333333334, ans=0.0 +2024-08-03 19:06:25,697 INFO [train.py:1114] (1/4) Epoch 16, batch 850, loss[loss=0.2086, simple_loss=0.3034, pruned_loss=0.0569, over 13343.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2745, pruned_loss=0.05193, over 2610739.17 frames. ], batch size: 40, lr: 7.85e-03, grad_scale: 32.0 +2024-08-03 19:06:30,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=203338.66666666666, ans=0.5 +2024-08-03 19:06:33,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=203338.66666666666, ans=0.0 +2024-08-03 19:06:58,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=203448.66666666666, ans=0.125 +2024-08-03 19:07:10,791 INFO [train.py:1114] (1/4) Epoch 16, batch 900, loss[loss=0.1562, simple_loss=0.244, pruned_loss=0.03415, over 13341.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2743, pruned_loss=0.05192, over 2612843.08 frames. ], batch size: 33, lr: 7.85e-03, grad_scale: 32.0 +2024-08-03 19:07:24,141 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.183e+01 1.118e+02 1.381e+02 1.663e+02 2.638e+02, threshold=2.763e+02, percent-clipped=1.0 +2024-08-03 19:07:25,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=203558.66666666666, ans=0.1 +2024-08-03 19:07:26,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=203558.66666666666, ans=0.125 +2024-08-03 19:07:42,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=203632.0, ans=0.2 +2024-08-03 19:07:52,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=203668.66666666666, ans=0.0 +2024-08-03 19:07:55,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=203705.33333333334, ans=0.2 +2024-08-03 19:07:55,952 INFO [train.py:1114] (1/4) Epoch 16, batch 950, loss[loss=0.1846, simple_loss=0.2665, pruned_loss=0.05139, over 13533.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2742, pruned_loss=0.05199, over 2613238.46 frames. ], batch size: 34, lr: 7.85e-03, grad_scale: 32.0 +2024-08-03 19:08:00,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=203705.33333333334, ans=0.125 +2024-08-03 19:08:01,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=203705.33333333334, ans=0.125 +2024-08-03 19:08:02,621 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.35 vs. limit=15.0 +2024-08-03 19:08:13,906 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.21 vs. limit=12.0 +2024-08-03 19:08:41,464 INFO [train.py:1114] (1/4) Epoch 16, batch 1000, loss[loss=0.1819, simple_loss=0.2679, pruned_loss=0.04799, over 13364.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.275, pruned_loss=0.05238, over 2611573.52 frames. ], batch size: 35, lr: 7.84e-03, grad_scale: 32.0 +2024-08-03 19:08:42,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.92 vs. limit=15.0 +2024-08-03 19:08:46,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=203888.66666666666, ans=0.0 +2024-08-03 19:08:55,033 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.698e+01 1.080e+02 1.221e+02 1.447e+02 2.524e+02, threshold=2.442e+02, percent-clipped=0.0 +2024-08-03 19:09:10,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=203998.66666666666, ans=0.025 +2024-08-03 19:09:17,343 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.11 vs. limit=15.0 +2024-08-03 19:09:23,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=204035.33333333334, ans=0.0 +2024-08-03 19:09:30,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=204035.33333333334, ans=0.0 +2024-08-03 19:09:30,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=204035.33333333334, ans=0.0 +2024-08-03 19:09:34,128 INFO [train.py:1114] (1/4) Epoch 16, batch 1050, loss[loss=0.2072, simple_loss=0.3005, pruned_loss=0.05701, over 13578.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2743, pruned_loss=0.05201, over 2615279.13 frames. ], batch size: 39, lr: 7.84e-03, grad_scale: 32.0 +2024-08-03 19:09:36,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204072.0, ans=0.1 +2024-08-03 19:09:37,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204072.0, ans=0.1 +2024-08-03 19:09:42,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=204072.0, ans=0.0 +2024-08-03 19:09:45,864 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=8.54 vs. limit=15.0 +2024-08-03 19:09:49,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=204108.66666666666, ans=0.0 +2024-08-03 19:10:21,099 INFO [train.py:1114] (1/4) Epoch 16, batch 1100, loss[loss=0.1755, simple_loss=0.262, pruned_loss=0.04453, over 13551.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.274, pruned_loss=0.05166, over 2619134.18 frames. ], batch size: 36, lr: 7.84e-03, grad_scale: 16.0 +2024-08-03 19:10:35,472 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.269e+01 1.085e+02 1.218e+02 1.448e+02 2.223e+02, threshold=2.436e+02, percent-clipped=0.0 +2024-08-03 19:10:43,281 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.76 vs. limit=12.0 +2024-08-03 19:10:50,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=204365.33333333334, ans=0.0 +2024-08-03 19:10:52,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=204365.33333333334, ans=0.025 +2024-08-03 19:11:05,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=204438.66666666666, ans=0.125 +2024-08-03 19:11:06,546 INFO [train.py:1114] (1/4) Epoch 16, batch 1150, loss[loss=0.1997, simple_loss=0.2774, pruned_loss=0.06102, over 13559.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2743, pruned_loss=0.05211, over 2618571.95 frames. ], batch size: 36, lr: 7.83e-03, grad_scale: 16.0 +2024-08-03 19:11:22,711 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.90 vs. limit=15.0 +2024-08-03 19:11:32,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=204512.0, ans=0.0 +2024-08-03 19:11:43,457 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:11:44,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204585.33333333334, ans=0.1 +2024-08-03 19:11:46,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=204585.33333333334, ans=0.0 +2024-08-03 19:11:52,578 INFO [train.py:1114] (1/4) Epoch 16, batch 1200, loss[loss=0.1983, simple_loss=0.2881, pruned_loss=0.05423, over 13585.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2757, pruned_loss=0.05256, over 2616318.79 frames. ], batch size: 39, lr: 7.83e-03, grad_scale: 32.0 +2024-08-03 19:11:54,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=204622.0, ans=0.125 +2024-08-03 19:11:55,808 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.78 vs. limit=15.0 +2024-08-03 19:12:01,882 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.53 vs. limit=22.5 +2024-08-03 19:12:06,566 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.982e+01 1.149e+02 1.396e+02 1.741e+02 2.381e+02, threshold=2.791e+02, percent-clipped=0.0 +2024-08-03 19:12:18,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=204732.0, ans=0.2 +2024-08-03 19:12:35,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204768.66666666666, ans=0.1 +2024-08-03 19:12:39,494 INFO [train.py:1114] (1/4) Epoch 16, batch 1250, loss[loss=0.2128, simple_loss=0.291, pruned_loss=0.0673, over 13443.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2759, pruned_loss=0.05264, over 2628100.20 frames. ], batch size: 42, lr: 7.83e-03, grad_scale: 32.0 +2024-08-03 19:12:47,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=204842.0, ans=0.0 +2024-08-03 19:13:05,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204878.66666666666, ans=0.1 +2024-08-03 19:13:15,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=204915.33333333334, ans=0.2 +2024-08-03 19:13:22,085 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.25 vs. limit=15.0 +2024-08-03 19:13:25,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=204952.0, ans=0.125 +2024-08-03 19:13:30,418 INFO [train.py:1114] (1/4) Epoch 16, batch 1300, loss[loss=0.1924, simple_loss=0.2859, pruned_loss=0.04943, over 12970.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2756, pruned_loss=0.05254, over 2630618.76 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 32.0 +2024-08-03 19:13:44,835 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.037e+01 1.093e+02 1.262e+02 1.670e+02 2.902e+02, threshold=2.524e+02, percent-clipped=1.0 +2024-08-03 19:13:54,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=205062.0, ans=0.1 +2024-08-03 19:13:56,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=205062.0, ans=0.04949747468305833 +2024-08-03 19:14:04,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=205098.66666666666, ans=0.125 +2024-08-03 19:14:15,779 INFO [train.py:1114] (1/4) Epoch 16, batch 1350, loss[loss=0.2027, simple_loss=0.2857, pruned_loss=0.05984, over 13527.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2752, pruned_loss=0.05229, over 2639007.05 frames. ], batch size: 37, lr: 7.82e-03, grad_scale: 32.0 +2024-08-03 19:14:32,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=205208.66666666666, ans=0.125 +2024-08-03 19:14:33,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=205245.33333333334, ans=0.125 +2024-08-03 19:14:33,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=205245.33333333334, ans=0.125 +2024-08-03 19:14:35,242 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.19 vs. limit=15.0 +2024-08-03 19:14:42,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=205282.0, ans=0.2 +2024-08-03 19:14:49,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=205282.0, ans=0.125 +2024-08-03 19:14:50,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=205282.0, ans=0.0 +2024-08-03 19:14:54,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=205318.66666666666, ans=0.1 +2024-08-03 19:15:02,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=205355.33333333334, ans=0.02 +2024-08-03 19:15:02,776 INFO [train.py:1114] (1/4) Epoch 16, batch 1400, loss[loss=0.1802, simple_loss=0.2609, pruned_loss=0.04971, over 13254.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2745, pruned_loss=0.05209, over 2642722.36 frames. ], batch size: 31, lr: 7.81e-03, grad_scale: 32.0 +2024-08-03 19:15:12,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205392.0, ans=0.1 +2024-08-03 19:15:17,270 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.642e+01 1.154e+02 1.285e+02 1.682e+02 2.521e+02, threshold=2.570e+02, percent-clipped=0.0 +2024-08-03 19:15:17,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=205392.0, ans=0.0 +2024-08-03 19:15:24,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=205428.66666666666, ans=0.05 +2024-08-03 19:15:30,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=205465.33333333334, ans=0.125 +2024-08-03 19:15:31,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=205465.33333333334, ans=0.0 +2024-08-03 19:15:48,516 INFO [train.py:1114] (1/4) Epoch 16, batch 1450, loss[loss=0.2173, simple_loss=0.2994, pruned_loss=0.06766, over 13413.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2756, pruned_loss=0.05253, over 2641966.85 frames. ], batch size: 43, lr: 7.81e-03, grad_scale: 32.0 +2024-08-03 19:16:04,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=205575.33333333334, ans=0.07 +2024-08-03 19:16:11,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=205612.0, ans=0.125 +2024-08-03 19:16:35,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=205685.33333333334, ans=0.0 +2024-08-03 19:16:37,158 INFO [train.py:1114] (1/4) Epoch 16, batch 1500, loss[loss=0.2011, simple_loss=0.2879, pruned_loss=0.05719, over 13415.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2754, pruned_loss=0.05216, over 2641789.86 frames. ], batch size: 39, lr: 7.81e-03, grad_scale: 32.0 +2024-08-03 19:16:40,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205722.0, ans=0.1 +2024-08-03 19:16:41,654 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.55 vs. limit=22.5 +2024-08-03 19:16:56,109 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.464e+01 1.141e+02 1.324e+02 1.628e+02 2.574e+02, threshold=2.648e+02, percent-clipped=1.0 +2024-08-03 19:17:01,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=205795.33333333334, ans=0.125 +2024-08-03 19:17:10,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff2.min_abs, batch_count=205832.0, ans=0.1 +2024-08-03 19:17:27,188 INFO [train.py:1114] (1/4) Epoch 16, batch 1550, loss[loss=0.1922, simple_loss=0.2829, pruned_loss=0.05074, over 13409.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2759, pruned_loss=0.05261, over 2630951.18 frames. ], batch size: 41, lr: 7.80e-03, grad_scale: 32.0 +2024-08-03 19:17:36,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=205942.0, ans=0.125 +2024-08-03 19:17:40,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=205942.0, ans=0.2 +2024-08-03 19:17:45,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=205978.66666666666, ans=0.125 +2024-08-03 19:18:12,965 INFO [train.py:1114] (1/4) Epoch 16, batch 1600, loss[loss=0.2116, simple_loss=0.3019, pruned_loss=0.06067, over 13579.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2759, pruned_loss=0.05275, over 2623313.12 frames. ], batch size: 39, lr: 7.80e-03, grad_scale: 32.0 +2024-08-03 19:18:27,555 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.061e+01 1.163e+02 1.376e+02 1.726e+02 3.125e+02, threshold=2.751e+02, percent-clipped=2.0 +2024-08-03 19:18:33,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=206162.0, ans=0.125 +2024-08-03 19:18:37,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=206162.0, ans=15.0 +2024-08-03 19:18:37,432 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.53 vs. limit=5.0 +2024-08-03 19:18:42,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=206198.66666666666, ans=0.0 +2024-08-03 19:18:42,559 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:18:57,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=206272.0, ans=0.1 +2024-08-03 19:18:58,352 INFO [train.py:1114] (1/4) Epoch 16, batch 1650, loss[loss=0.1881, simple_loss=0.2857, pruned_loss=0.04527, over 13337.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2761, pruned_loss=0.0529, over 2620334.45 frames. ], batch size: 40, lr: 7.80e-03, grad_scale: 16.0 +2024-08-03 19:18:59,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=206272.0, ans=6.0 +2024-08-03 19:19:00,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=206272.0, ans=0.0 +2024-08-03 19:19:06,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=206308.66666666666, ans=0.125 +2024-08-03 19:19:08,634 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:19:13,397 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.96 vs. limit=22.5 +2024-08-03 19:19:17,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=206345.33333333334, ans=0.125 +2024-08-03 19:19:18,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=206345.33333333334, ans=0.05 +2024-08-03 19:19:19,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=206345.33333333334, ans=0.0 +2024-08-03 19:19:21,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=206345.33333333334, ans=0.035 +2024-08-03 19:19:25,946 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.58 vs. limit=15.0 +2024-08-03 19:19:33,271 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.92 vs. limit=15.0 +2024-08-03 19:19:45,354 INFO [train.py:1114] (1/4) Epoch 16, batch 1700, loss[loss=0.1829, simple_loss=0.2563, pruned_loss=0.05474, over 13293.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.276, pruned_loss=0.05264, over 2629530.95 frames. ], batch size: 31, lr: 7.79e-03, grad_scale: 16.0 +2024-08-03 19:19:45,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206455.33333333334, ans=0.1 +2024-08-03 19:19:45,949 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.31 vs. limit=10.0 +2024-08-03 19:20:00,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=206492.0, ans=0.125 +2024-08-03 19:20:02,481 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.148e+01 1.167e+02 1.345e+02 1.765e+02 2.775e+02, threshold=2.690e+02, percent-clipped=1.0 +2024-08-03 19:20:22,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=206565.33333333334, ans=0.125 +2024-08-03 19:20:24,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=206565.33333333334, ans=0.2 +2024-08-03 19:20:26,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=206602.0, ans=0.125 +2024-08-03 19:20:36,091 INFO [train.py:1114] (1/4) Epoch 16, batch 1750, loss[loss=0.1764, simple_loss=0.2526, pruned_loss=0.05011, over 13531.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2754, pruned_loss=0.05261, over 2632740.21 frames. ], batch size: 31, lr: 7.79e-03, grad_scale: 16.0 +2024-08-03 19:20:41,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=206638.66666666666, ans=0.125 +2024-08-03 19:20:55,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=206712.0, ans=0.0 +2024-08-03 19:21:20,110 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.61 vs. limit=10.0 +2024-08-03 19:21:21,429 INFO [train.py:1114] (1/4) Epoch 16, batch 1800, loss[loss=0.1781, simple_loss=0.2695, pruned_loss=0.04336, over 13546.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2752, pruned_loss=0.05242, over 2634319.90 frames. ], batch size: 38, lr: 7.79e-03, grad_scale: 16.0 +2024-08-03 19:21:22,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=206822.0, ans=0.0 +2024-08-03 19:21:35,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=206858.66666666666, ans=0.125 +2024-08-03 19:21:36,889 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.527e+01 1.147e+02 1.312e+02 1.685e+02 2.855e+02, threshold=2.624e+02, percent-clipped=1.0 +2024-08-03 19:21:48,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=206932.0, ans=0.0 +2024-08-03 19:21:49,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=206932.0, ans=0.125 +2024-08-03 19:21:54,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=206932.0, ans=0.025 +2024-08-03 19:22:04,735 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.82 vs. limit=15.0 +2024-08-03 19:22:05,846 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.61 vs. limit=15.0 +2024-08-03 19:22:06,967 INFO [train.py:1114] (1/4) Epoch 16, batch 1850, loss[loss=0.1701, simple_loss=0.2611, pruned_loss=0.03958, over 13403.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2757, pruned_loss=0.05268, over 2636453.03 frames. ], batch size: 39, lr: 7.78e-03, grad_scale: 16.0 +2024-08-03 19:22:15,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=207042.0, ans=0.125 +2024-08-03 19:22:23,498 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:22:41,124 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:22:52,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=207152.0, ans=0.07 +2024-08-03 19:22:58,926 INFO [train.py:1114] (1/4) Epoch 16, batch 1900, loss[loss=0.2358, simple_loss=0.3251, pruned_loss=0.07318, over 13317.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2768, pruned_loss=0.05308, over 2638931.67 frames. ], batch size: 40, lr: 7.78e-03, grad_scale: 16.0 +2024-08-03 19:23:18,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=207188.66666666666, ans=0.125 +2024-08-03 19:23:18,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=207188.66666666666, ans=0.2 +2024-08-03 19:23:29,099 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.263e+01 1.127e+02 1.354e+02 1.894e+02 2.950e+02, threshold=2.708e+02, percent-clipped=4.0 +2024-08-03 19:23:29,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=207225.33333333334, ans=0.125 +2024-08-03 19:23:31,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=207262.0, ans=0.125 +2024-08-03 19:23:38,079 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.79 vs. limit=15.0 +2024-08-03 19:23:38,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=207262.0, ans=0.0 +2024-08-03 19:23:54,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=207335.33333333334, ans=0.0 +2024-08-03 19:24:03,033 INFO [train.py:1114] (1/4) Epoch 16, batch 1950, loss[loss=0.1903, simple_loss=0.272, pruned_loss=0.05431, over 13554.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2771, pruned_loss=0.05272, over 2645776.38 frames. ], batch size: 36, lr: 7.78e-03, grad_scale: 16.0 +2024-08-03 19:24:28,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=207445.33333333334, ans=0.0 +2024-08-03 19:24:29,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207445.33333333334, ans=0.1 +2024-08-03 19:24:35,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=207482.0, ans=0.0 +2024-08-03 19:24:41,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207482.0, ans=0.1 +2024-08-03 19:24:53,793 INFO [train.py:1114] (1/4) Epoch 16, batch 2000, loss[loss=0.1733, simple_loss=0.253, pruned_loss=0.04674, over 13533.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2776, pruned_loss=0.05323, over 2635215.25 frames. ], batch size: 31, lr: 7.77e-03, grad_scale: 32.0 +2024-08-03 19:25:05,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=207592.0, ans=0.125 +2024-08-03 19:25:09,399 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.745e+01 1.151e+02 1.402e+02 1.831e+02 3.066e+02, threshold=2.804e+02, percent-clipped=4.0 +2024-08-03 19:25:09,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=207592.0, ans=0.025 +2024-08-03 19:25:20,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=207665.33333333334, ans=0.0 +2024-08-03 19:25:38,900 INFO [train.py:1114] (1/4) Epoch 16, batch 2050, loss[loss=0.1707, simple_loss=0.2507, pruned_loss=0.04539, over 13415.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2767, pruned_loss=0.05322, over 2632803.83 frames. ], batch size: 32, lr: 7.77e-03, grad_scale: 32.0 +2024-08-03 19:25:43,831 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.88 vs. limit=15.0 +2024-08-03 19:25:44,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=207738.66666666666, ans=0.125 +2024-08-03 19:26:23,759 INFO [train.py:1114] (1/4) Epoch 16, batch 2100, loss[loss=0.2127, simple_loss=0.2941, pruned_loss=0.06566, over 13561.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2759, pruned_loss=0.05268, over 2637970.13 frames. ], batch size: 37, lr: 7.77e-03, grad_scale: 32.0 +2024-08-03 19:26:26,891 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.83 vs. limit=15.0 +2024-08-03 19:26:38,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=207958.66666666666, ans=0.0 +2024-08-03 19:26:38,692 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.022e+01 1.073e+02 1.217e+02 1.568e+02 3.232e+02, threshold=2.433e+02, percent-clipped=1.0 +2024-08-03 19:26:38,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=207958.66666666666, ans=0.04949747468305833 +2024-08-03 19:26:46,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=207995.33333333334, ans=0.2 +2024-08-03 19:26:51,753 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.64 vs. limit=12.0 +2024-08-03 19:27:10,303 INFO [train.py:1114] (1/4) Epoch 16, batch 2150, loss[loss=0.164, simple_loss=0.2464, pruned_loss=0.04077, over 13555.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2751, pruned_loss=0.05238, over 2646793.24 frames. ], batch size: 36, lr: 7.76e-03, grad_scale: 32.0 +2024-08-03 19:27:30,887 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.16 vs. limit=15.0 +2024-08-03 19:27:33,141 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:27:44,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=208215.33333333334, ans=0.0 +2024-08-03 19:27:59,569 INFO [train.py:1114] (1/4) Epoch 16, batch 2200, loss[loss=0.1813, simple_loss=0.2751, pruned_loss=0.04376, over 13392.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2741, pruned_loss=0.05182, over 2644644.53 frames. ], batch size: 39, lr: 7.76e-03, grad_scale: 32.0 +2024-08-03 19:28:06,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=208288.66666666666, ans=0.025 +2024-08-03 19:28:16,738 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.518e+01 1.187e+02 1.408e+02 1.826e+02 3.967e+02, threshold=2.817e+02, percent-clipped=9.0 +2024-08-03 19:28:17,803 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:28:43,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=208435.33333333334, ans=0.125 +2024-08-03 19:28:46,815 INFO [train.py:1114] (1/4) Epoch 16, batch 2250, loss[loss=0.1923, simple_loss=0.2824, pruned_loss=0.05115, over 13360.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.275, pruned_loss=0.05235, over 2641182.64 frames. ], batch size: 37, lr: 7.76e-03, grad_scale: 32.0 +2024-08-03 19:28:48,231 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.87 vs. limit=15.0 +2024-08-03 19:29:05,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=208545.33333333334, ans=0.125 +2024-08-03 19:29:12,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=208545.33333333334, ans=0.0 +2024-08-03 19:29:21,366 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.98 vs. limit=15.0 +2024-08-03 19:29:30,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=208618.66666666666, ans=0.2 +2024-08-03 19:29:33,616 INFO [train.py:1114] (1/4) Epoch 16, batch 2300, loss[loss=0.1749, simple_loss=0.2577, pruned_loss=0.04605, over 13583.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2737, pruned_loss=0.05216, over 2637445.24 frames. ], batch size: 33, lr: 7.75e-03, grad_scale: 32.0 +2024-08-03 19:29:48,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=208692.0, ans=0.125 +2024-08-03 19:29:48,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=208692.0, ans=0.125 +2024-08-03 19:29:54,059 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.353e+01 1.164e+02 1.344e+02 1.643e+02 2.956e+02, threshold=2.688e+02, percent-clipped=1.0 +2024-08-03 19:29:54,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=208692.0, ans=0.125 +2024-08-03 19:29:57,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=208728.66666666666, ans=0.125 +2024-08-03 19:30:04,194 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:30:24,644 INFO [train.py:1114] (1/4) Epoch 16, batch 2350, loss[loss=0.185, simple_loss=0.2719, pruned_loss=0.04903, over 13551.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2743, pruned_loss=0.05212, over 2640222.72 frames. ], batch size: 38, lr: 7.75e-03, grad_scale: 32.0 +2024-08-03 19:30:32,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=208838.66666666666, ans=0.125 +2024-08-03 19:30:47,076 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.88 vs. limit=15.0 +2024-08-03 19:30:52,477 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.01 vs. limit=22.5 +2024-08-03 19:30:56,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=208948.66666666666, ans=0.0 +2024-08-03 19:31:00,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=208948.66666666666, ans=0.0 +2024-08-03 19:31:01,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=208948.66666666666, ans=0.125 +2024-08-03 19:31:11,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=208985.33333333334, ans=0.025 +2024-08-03 19:31:13,060 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.50 vs. limit=15.0 +2024-08-03 19:31:14,338 INFO [train.py:1114] (1/4) Epoch 16, batch 2400, loss[loss=0.1623, simple_loss=0.251, pruned_loss=0.03679, over 13536.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2742, pruned_loss=0.05189, over 2641741.34 frames. ], batch size: 35, lr: 7.75e-03, grad_scale: 32.0 +2024-08-03 19:31:17,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=209022.0, ans=0.125 +2024-08-03 19:31:34,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=209058.66666666666, ans=0.125 +2024-08-03 19:31:40,581 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.713e+01 1.195e+02 1.361e+02 1.735e+02 2.883e+02, threshold=2.722e+02, percent-clipped=1.0 +2024-08-03 19:31:49,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=209095.33333333334, ans=0.2 +2024-08-03 19:31:52,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=209095.33333333334, ans=0.125 +2024-08-03 19:32:08,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=209168.66666666666, ans=0.125 +2024-08-03 19:32:18,680 INFO [train.py:1114] (1/4) Epoch 16, batch 2450, loss[loss=0.1874, simple_loss=0.2754, pruned_loss=0.04971, over 13366.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2746, pruned_loss=0.05211, over 2632122.52 frames. ], batch size: 37, lr: 7.74e-03, grad_scale: 32.0 +2024-08-03 19:32:26,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=209205.33333333334, ans=0.0 +2024-08-03 19:32:31,849 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.11 vs. limit=22.5 +2024-08-03 19:32:50,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=209315.33333333334, ans=0.0 +2024-08-03 19:33:03,796 INFO [train.py:1114] (1/4) Epoch 16, batch 2500, loss[loss=0.1925, simple_loss=0.2836, pruned_loss=0.05069, over 13395.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2746, pruned_loss=0.05223, over 2635664.41 frames. ], batch size: 39, lr: 7.74e-03, grad_scale: 32.0 +2024-08-03 19:33:04,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=209388.66666666666, ans=0.0 +2024-08-03 19:33:07,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=209388.66666666666, ans=0.125 +2024-08-03 19:33:10,476 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.04 vs. limit=15.0 +2024-08-03 19:33:18,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=209425.33333333334, ans=0.125 +2024-08-03 19:33:21,418 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.166e+01 1.109e+02 1.260e+02 1.584e+02 2.146e+02, threshold=2.521e+02, percent-clipped=0.0 +2024-08-03 19:33:23,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=209462.0, ans=0.0 +2024-08-03 19:33:27,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=209462.0, ans=10.0 +2024-08-03 19:33:27,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=209462.0, ans=0.0 +2024-08-03 19:33:50,673 INFO [train.py:1114] (1/4) Epoch 16, batch 2550, loss[loss=0.1671, simple_loss=0.2503, pruned_loss=0.04199, over 13540.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2747, pruned_loss=0.05227, over 2636801.58 frames. ], batch size: 31, lr: 7.74e-03, grad_scale: 32.0 +2024-08-03 19:34:04,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=209608.66666666666, ans=0.125 +2024-08-03 19:34:16,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=209645.33333333334, ans=0.025 +2024-08-03 19:34:36,108 INFO [train.py:1114] (1/4) Epoch 16, batch 2600, loss[loss=0.1791, simple_loss=0.2696, pruned_loss=0.0443, over 13555.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2749, pruned_loss=0.05203, over 2636175.58 frames. ], batch size: 36, lr: 7.73e-03, grad_scale: 32.0 +2024-08-03 19:34:37,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=209755.33333333334, ans=0.07 +2024-08-03 19:34:38,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=209755.33333333334, ans=0.025 +2024-08-03 19:34:39,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=209755.33333333334, ans=0.125 +2024-08-03 19:34:41,486 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:34:49,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=209792.0, ans=0.2 +2024-08-03 19:34:50,513 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.224e+01 1.145e+02 1.272e+02 1.680e+02 2.511e+02, threshold=2.545e+02, percent-clipped=0.0 +2024-08-03 19:34:50,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=209792.0, ans=0.0 +2024-08-03 19:35:05,713 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.82 vs. limit=12.0 +2024-08-03 19:35:18,984 INFO [train.py:1114] (1/4) Epoch 16, batch 2650, loss[loss=0.1954, simple_loss=0.2817, pruned_loss=0.05454, over 13300.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2747, pruned_loss=0.05182, over 2639642.33 frames. ], batch size: 46, lr: 7.73e-03, grad_scale: 32.0 +2024-08-03 19:35:30,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=209975.33333333334, ans=0.025 +2024-08-03 19:35:36,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=210012.0, ans=0.125 +2024-08-03 19:35:53,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=210085.33333333334, ans=10.0 +2024-08-03 19:36:02,101 INFO [train.py:1114] (1/4) Epoch 16, batch 2700, loss[loss=0.1918, simple_loss=0.282, pruned_loss=0.05082, over 13528.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2749, pruned_loss=0.05186, over 2636983.14 frames. ], batch size: 40, lr: 7.73e-03, grad_scale: 32.0 +2024-08-03 19:36:09,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=210122.0, ans=0.025 +2024-08-03 19:36:16,696 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.859e+01 1.215e+02 1.396e+02 1.823e+02 2.794e+02, threshold=2.792e+02, percent-clipped=5.0 +2024-08-03 19:36:27,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=210232.0, ans=0.125 +2024-08-03 19:36:39,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=210268.66666666666, ans=0.04949747468305833 +2024-08-03 19:36:45,434 INFO [train.py:1114] (1/4) Epoch 16, batch 2750, loss[loss=0.1868, simple_loss=0.2665, pruned_loss=0.05352, over 13339.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2737, pruned_loss=0.05165, over 2634638.58 frames. ], batch size: 34, lr: 7.72e-03, grad_scale: 32.0 +2024-08-03 19:36:45,812 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.49 vs. limit=12.0 +2024-08-03 19:36:49,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=210305.33333333334, ans=0.125 +2024-08-03 19:36:57,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=210342.0, ans=0.125 +2024-08-03 19:37:00,211 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:37:08,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=210378.66666666666, ans=0.125 +2024-08-03 19:37:14,432 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.08 vs. limit=12.0 +2024-08-03 19:37:30,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=210488.66666666666, ans=0.125 +2024-08-03 19:37:31,231 INFO [train.py:1114] (1/4) Epoch 16, batch 2800, loss[loss=0.2757, simple_loss=0.3288, pruned_loss=0.1113, over 9270.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2741, pruned_loss=0.05195, over 2625734.69 frames. ], batch size: 96, lr: 7.72e-03, grad_scale: 32.0 +2024-08-03 19:37:33,297 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.88 vs. limit=6.0 +2024-08-03 19:37:34,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=210488.66666666666, ans=0.0 +2024-08-03 19:37:44,020 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.04 vs. limit=8.0 +2024-08-03 19:37:45,863 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.762e+01 1.112e+02 1.298e+02 1.652e+02 2.703e+02, threshold=2.596e+02, percent-clipped=0.0 +2024-08-03 19:37:47,158 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.42 vs. limit=15.0 +2024-08-03 19:38:00,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=210598.66666666666, ans=0.125 +2024-08-03 19:38:02,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=210598.66666666666, ans=0.0 +2024-08-03 19:38:06,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=210635.33333333334, ans=0.125 +2024-08-03 19:38:15,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=210672.0, ans=0.125 +2024-08-03 19:38:15,867 INFO [train.py:1114] (1/4) Epoch 16, batch 2850, loss[loss=0.1657, simple_loss=0.2547, pruned_loss=0.03835, over 13360.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2753, pruned_loss=0.05228, over 2620067.72 frames. ], batch size: 35, lr: 7.72e-03, grad_scale: 32.0 +2024-08-03 19:38:35,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=210745.33333333334, ans=0.0 +2024-08-03 19:39:00,430 INFO [train.py:1114] (1/4) Epoch 16, batch 2900, loss[loss=0.1606, simple_loss=0.2457, pruned_loss=0.03771, over 13360.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2756, pruned_loss=0.05223, over 2630778.19 frames. ], batch size: 36, lr: 7.71e-03, grad_scale: 32.0 +2024-08-03 19:39:02,483 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.81 vs. limit=22.5 +2024-08-03 19:39:05,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=210855.33333333334, ans=0.2 +2024-08-03 19:39:06,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=210855.33333333334, ans=0.0 +2024-08-03 19:39:12,518 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.16 vs. limit=10.0 +2024-08-03 19:39:16,543 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.624e+01 1.079e+02 1.232e+02 1.534e+02 2.946e+02, threshold=2.465e+02, percent-clipped=2.0 +2024-08-03 19:39:27,206 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:39:44,232 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.08 vs. limit=10.0 +2024-08-03 19:39:45,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=211038.66666666666, ans=0.1 +2024-08-03 19:39:46,444 INFO [train.py:1114] (1/4) Epoch 16, batch 2950, loss[loss=0.187, simple_loss=0.27, pruned_loss=0.05199, over 13334.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.274, pruned_loss=0.0516, over 2628683.24 frames. ], batch size: 34, lr: 7.71e-03, grad_scale: 32.0 +2024-08-03 19:40:01,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=211075.33333333334, ans=0.025 +2024-08-03 19:40:15,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=211148.66666666666, ans=0.125 +2024-08-03 19:40:23,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211185.33333333334, ans=0.1 +2024-08-03 19:40:28,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=211185.33333333334, ans=0.125 +2024-08-03 19:40:30,438 INFO [train.py:1114] (1/4) Epoch 16, batch 3000, loss[loss=0.186, simple_loss=0.2751, pruned_loss=0.04845, over 13539.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2746, pruned_loss=0.05204, over 2629336.01 frames. ], batch size: 37, lr: 7.71e-03, grad_scale: 32.0 +2024-08-03 19:40:30,439 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 19:40:42,216 INFO [train.py:1146] (1/4) Epoch 16, validation: loss=0.1717, simple_loss=0.2708, pruned_loss=0.03625, over 944034.00 frames. +2024-08-03 19:40:42,217 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 19:40:48,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=211222.0, ans=0.0 +2024-08-03 19:40:51,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=211258.66666666666, ans=0.125 +2024-08-03 19:40:56,642 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.281e+01 1.094e+02 1.222e+02 1.516e+02 2.979e+02, threshold=2.443e+02, percent-clipped=5.0 +2024-08-03 19:41:06,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=211295.33333333334, ans=0.2 +2024-08-03 19:41:07,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=211332.0, ans=0.2 +2024-08-03 19:41:10,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=211332.0, ans=0.2 +2024-08-03 19:41:21,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211368.66666666666, ans=0.1 +2024-08-03 19:41:24,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=211368.66666666666, ans=0.0 +2024-08-03 19:41:25,992 INFO [train.py:1114] (1/4) Epoch 16, batch 3050, loss[loss=0.1702, simple_loss=0.2543, pruned_loss=0.04308, over 13547.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2751, pruned_loss=0.05227, over 2626423.31 frames. ], batch size: 35, lr: 7.70e-03, grad_scale: 32.0 +2024-08-03 19:41:37,056 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.08 vs. limit=6.0 +2024-08-03 19:41:47,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211478.66666666666, ans=0.1 +2024-08-03 19:41:52,261 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:41:56,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=211515.33333333334, ans=0.07 +2024-08-03 19:41:56,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=211515.33333333334, ans=0.0 +2024-08-03 19:41:56,769 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.81 vs. limit=15.0 +2024-08-03 19:42:01,111 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.71 vs. limit=12.0 +2024-08-03 19:42:04,540 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.17 vs. limit=22.5 +2024-08-03 19:42:09,195 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.60 vs. limit=22.5 +2024-08-03 19:42:09,453 INFO [train.py:1114] (1/4) Epoch 16, batch 3100, loss[loss=0.2041, simple_loss=0.2934, pruned_loss=0.05739, over 13338.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2752, pruned_loss=0.05228, over 2626191.08 frames. ], batch size: 46, lr: 7.70e-03, grad_scale: 32.0 +2024-08-03 19:42:22,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=211625.33333333334, ans=0.125 +2024-08-03 19:42:23,781 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.933e+01 1.091e+02 1.250e+02 1.567e+02 2.776e+02, threshold=2.501e+02, percent-clipped=2.0 +2024-08-03 19:42:31,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=211662.0, ans=0.125 +2024-08-03 19:42:34,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=211698.66666666666, ans=0.0 +2024-08-03 19:42:35,464 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.16 vs. limit=15.0 +2024-08-03 19:42:51,920 INFO [train.py:1114] (1/4) Epoch 16, batch 3150, loss[loss=0.2181, simple_loss=0.3011, pruned_loss=0.06749, over 13017.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.275, pruned_loss=0.05198, over 2628688.07 frames. ], batch size: 48, lr: 7.70e-03, grad_scale: 32.0 +2024-08-03 19:43:09,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=211845.33333333334, ans=0.5 +2024-08-03 19:43:35,467 INFO [train.py:1114] (1/4) Epoch 16, batch 3200, loss[loss=0.1759, simple_loss=0.2591, pruned_loss=0.04633, over 13546.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2745, pruned_loss=0.05193, over 2633976.19 frames. ], batch size: 37, lr: 7.69e-03, grad_scale: 32.0 +2024-08-03 19:43:36,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=211955.33333333334, ans=0.125 +2024-08-03 19:43:39,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211955.33333333334, ans=0.1 +2024-08-03 19:43:49,847 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.470e+01 1.178e+02 1.467e+02 1.849e+02 2.870e+02, threshold=2.934e+02, percent-clipped=4.0 +2024-08-03 19:43:59,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=212028.66666666666, ans=0.0 +2024-08-03 19:44:03,767 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.17 vs. limit=12.0 +2024-08-03 19:44:18,726 INFO [train.py:1114] (1/4) Epoch 16, batch 3250, loss[loss=0.203, simple_loss=0.2911, pruned_loss=0.05745, over 13388.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2753, pruned_loss=0.05185, over 2638513.93 frames. ], batch size: 38, lr: 7.69e-03, grad_scale: 32.0 +2024-08-03 19:44:24,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=212138.66666666666, ans=0.0 +2024-08-03 19:44:32,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=212175.33333333334, ans=0.0 +2024-08-03 19:44:35,417 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.15 vs. limit=6.0 +2024-08-03 19:44:41,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=212212.0, ans=0.125 +2024-08-03 19:44:42,303 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.13 vs. limit=15.0 +2024-08-03 19:44:42,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=212212.0, ans=0.2 +2024-08-03 19:44:45,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=212248.66666666666, ans=0.125 +2024-08-03 19:44:54,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=212285.33333333334, ans=0.125 +2024-08-03 19:44:55,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=212285.33333333334, ans=15.0 +2024-08-03 19:44:57,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=212285.33333333334, ans=0.0 +2024-08-03 19:44:59,669 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.30 vs. limit=6.0 +2024-08-03 19:45:02,682 INFO [train.py:1114] (1/4) Epoch 16, batch 3300, loss[loss=0.1943, simple_loss=0.2844, pruned_loss=0.05209, over 12891.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2742, pruned_loss=0.0516, over 2640264.24 frames. ], batch size: 52, lr: 7.69e-03, grad_scale: 32.0 +2024-08-03 19:45:08,989 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.32 vs. limit=6.0 +2024-08-03 19:45:14,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212358.66666666666, ans=0.1 +2024-08-03 19:45:17,671 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.789e+01 1.171e+02 1.334e+02 1.762e+02 2.468e+02, threshold=2.668e+02, percent-clipped=0.0 +2024-08-03 19:45:30,050 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.12 vs. limit=15.0 +2024-08-03 19:45:30,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=212432.0, ans=0.0 +2024-08-03 19:45:33,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=212432.0, ans=0.125 +2024-08-03 19:45:35,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=212432.0, ans=0.2 +2024-08-03 19:45:44,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=212468.66666666666, ans=0.125 +2024-08-03 19:45:45,687 INFO [train.py:1114] (1/4) Epoch 16, batch 3350, loss[loss=0.2098, simple_loss=0.3058, pruned_loss=0.05693, over 13107.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2752, pruned_loss=0.05227, over 2629971.72 frames. ], batch size: 48, lr: 7.68e-03, grad_scale: 32.0 +2024-08-03 19:45:49,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=212505.33333333334, ans=0.125 +2024-08-03 19:46:10,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=212578.66666666666, ans=0.125 +2024-08-03 19:46:10,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=212615.33333333334, ans=0.0 +2024-08-03 19:46:16,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=212615.33333333334, ans=0.025 +2024-08-03 19:46:18,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=212615.33333333334, ans=0.2 +2024-08-03 19:46:29,927 INFO [train.py:1114] (1/4) Epoch 16, batch 3400, loss[loss=0.1813, simple_loss=0.2564, pruned_loss=0.05314, over 13546.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2748, pruned_loss=0.05218, over 2625080.70 frames. ], batch size: 31, lr: 7.68e-03, grad_scale: 32.0 +2024-08-03 19:46:44,565 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.890e+01 1.091e+02 1.257e+02 1.485e+02 2.568e+02, threshold=2.513e+02, percent-clipped=0.0 +2024-08-03 19:46:45,845 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.01 vs. limit=15.0 +2024-08-03 19:46:50,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=212762.0, ans=0.0 +2024-08-03 19:47:03,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212835.33333333334, ans=0.1 +2024-08-03 19:47:07,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=212835.33333333334, ans=0.1 +2024-08-03 19:47:12,577 INFO [train.py:1114] (1/4) Epoch 16, batch 3450, loss[loss=0.1843, simple_loss=0.2777, pruned_loss=0.04545, over 12921.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.275, pruned_loss=0.05207, over 2629989.80 frames. ], batch size: 52, lr: 7.68e-03, grad_scale: 32.0 +2024-08-03 19:47:13,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=212872.0, ans=0.125 +2024-08-03 19:47:23,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=212908.66666666666, ans=0.0 +2024-08-03 19:47:30,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=212945.33333333334, ans=0.09899494936611666 +2024-08-03 19:47:37,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=212982.0, ans=0.125 +2024-08-03 19:47:48,393 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.09 vs. limit=10.0 +2024-08-03 19:47:55,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=213055.33333333334, ans=0.2 +2024-08-03 19:47:55,628 INFO [train.py:1114] (1/4) Epoch 16, batch 3500, loss[loss=0.1788, simple_loss=0.2566, pruned_loss=0.05047, over 13525.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2742, pruned_loss=0.05213, over 2632009.50 frames. ], batch size: 34, lr: 7.67e-03, grad_scale: 32.0 +2024-08-03 19:47:59,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=213055.33333333334, ans=0.0 +2024-08-03 19:48:10,728 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.160e+01 1.228e+02 1.407e+02 1.881e+02 3.021e+02, threshold=2.813e+02, percent-clipped=7.0 +2024-08-03 19:48:33,294 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.37 vs. limit=10.0 +2024-08-03 19:48:38,352 INFO [train.py:1114] (1/4) Epoch 16, batch 3550, loss[loss=0.2048, simple_loss=0.2894, pruned_loss=0.06007, over 12768.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2766, pruned_loss=0.05332, over 2630278.59 frames. ], batch size: 59, lr: 7.67e-03, grad_scale: 16.0 +2024-08-03 19:48:46,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=213275.33333333334, ans=0.025 +2024-08-03 19:49:11,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=213348.66666666666, ans=0.95 +2024-08-03 19:49:15,897 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.40 vs. limit=15.0 +2024-08-03 19:49:15,950 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.86 vs. limit=15.0 +2024-08-03 19:49:23,439 INFO [train.py:1114] (1/4) Epoch 16, batch 3600, loss[loss=0.2479, simple_loss=0.3159, pruned_loss=0.0899, over 9178.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2809, pruned_loss=0.05695, over 2488419.97 frames. ], batch size: 97, lr: 7.67e-03, grad_scale: 32.0 +2024-08-03 19:49:24,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=213422.0, ans=0.125 +2024-08-03 19:49:28,326 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.17 vs. limit=22.5 +2024-08-03 19:49:36,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=213458.66666666666, ans=0.0 +2024-08-03 19:49:39,124 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.558e+01 1.114e+02 1.218e+02 1.319e+02 1.769e+02, threshold=2.437e+02, percent-clipped=0.0 +2024-08-03 19:49:40,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=213495.33333333334, ans=0.0 +2024-08-03 19:49:43,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=213495.33333333334, ans=0.1 +2024-08-03 19:49:54,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=213532.0, ans=0.0 +2024-08-03 19:50:42,668 INFO [train.py:1114] (1/4) Epoch 17, batch 0, loss[loss=0.171, simple_loss=0.2609, pruned_loss=0.04057, over 13324.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2609, pruned_loss=0.04057, over 13324.00 frames. ], batch size: 33, lr: 7.43e-03, grad_scale: 32.0 +2024-08-03 19:50:42,669 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 19:50:47,544 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.5571, 2.9373, 2.5850, 2.7214], device='cuda:1') +2024-08-03 19:50:48,653 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.6763, 3.5168, 3.8886, 3.6119], device='cuda:1') +2024-08-03 19:50:48,962 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.0497, 3.4196, 3.4332, 1.7841], device='cuda:1') +2024-08-03 19:50:52,770 INFO [train.py:1146] (1/4) Epoch 17, validation: loss=0.17, simple_loss=0.2717, pruned_loss=0.03416, over 944034.00 frames. +2024-08-03 19:50:52,770 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 19:51:07,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=213605.33333333334, ans=0.0 +2024-08-03 19:51:11,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=213605.33333333334, ans=0.2 +2024-08-03 19:51:13,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=213642.0, ans=0.125 +2024-08-03 19:51:13,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=213642.0, ans=0.125 +2024-08-03 19:51:28,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=213678.66666666666, ans=0.125 +2024-08-03 19:51:40,075 INFO [train.py:1114] (1/4) Epoch 17, batch 50, loss[loss=0.1558, simple_loss=0.2365, pruned_loss=0.03754, over 13421.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.275, pruned_loss=0.05143, over 577954.67 frames. ], batch size: 32, lr: 7.43e-03, grad_scale: 32.0 +2024-08-03 19:51:47,040 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.19 vs. limit=15.0 +2024-08-03 19:52:05,563 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.139e+01 1.157e+02 1.306e+02 1.728e+02 3.229e+02, threshold=2.612e+02, percent-clipped=8.0 +2024-08-03 19:52:10,544 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.16 vs. limit=12.0 +2024-08-03 19:52:14,086 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.43 vs. limit=10.0 +2024-08-03 19:52:16,131 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.12 vs. limit=12.0 +2024-08-03 19:52:17,185 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.65 vs. limit=15.0 +2024-08-03 19:52:17,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=213898.66666666666, ans=0.125 +2024-08-03 19:52:20,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=213898.66666666666, ans=0.125 +2024-08-03 19:52:25,588 INFO [train.py:1114] (1/4) Epoch 17, batch 100, loss[loss=0.1804, simple_loss=0.2661, pruned_loss=0.04735, over 13539.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2761, pruned_loss=0.05168, over 1025417.22 frames. ], batch size: 35, lr: 7.43e-03, grad_scale: 32.0 +2024-08-03 19:52:31,320 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.90 vs. limit=15.0 +2024-08-03 19:52:34,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=213935.33333333334, ans=0.0 +2024-08-03 19:52:39,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=213972.0, ans=0.125 +2024-08-03 19:52:59,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=214045.33333333334, ans=0.125 +2024-08-03 19:53:09,901 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.92 vs. limit=15.0 +2024-08-03 19:53:11,455 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:53:12,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=214082.0, ans=0.125 +2024-08-03 19:53:13,863 INFO [train.py:1114] (1/4) Epoch 17, batch 150, loss[loss=0.1537, simple_loss=0.2366, pruned_loss=0.03534, over 13432.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2746, pruned_loss=0.05083, over 1386479.55 frames. ], batch size: 32, lr: 7.42e-03, grad_scale: 32.0 +2024-08-03 19:53:39,022 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.782e+01 1.100e+02 1.230e+02 1.473e+02 3.065e+02, threshold=2.460e+02, percent-clipped=1.0 +2024-08-03 19:53:40,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=214228.66666666666, ans=0.125 +2024-08-03 19:53:45,827 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:53:47,843 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.59 vs. limit=15.0 +2024-08-03 19:53:55,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=214265.33333333334, ans=0.1 +2024-08-03 19:53:58,932 INFO [train.py:1114] (1/4) Epoch 17, batch 200, loss[loss=0.1847, simple_loss=0.2719, pruned_loss=0.04875, over 12388.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2729, pruned_loss=0.05053, over 1664906.07 frames. ], batch size: 58, lr: 7.42e-03, grad_scale: 32.0 +2024-08-03 19:54:01,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=214302.0, ans=0.2 +2024-08-03 19:54:02,146 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.01 vs. limit=15.0 +2024-08-03 19:54:10,011 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.96 vs. limit=15.0 +2024-08-03 19:54:11,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=214338.66666666666, ans=0.0 +2024-08-03 19:54:12,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=214338.66666666666, ans=0.0 +2024-08-03 19:54:38,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=214448.66666666666, ans=0.125 +2024-08-03 19:54:40,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=214448.66666666666, ans=0.025 +2024-08-03 19:54:44,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=214448.66666666666, ans=0.125 +2024-08-03 19:54:48,675 INFO [train.py:1114] (1/4) Epoch 17, batch 250, loss[loss=0.1934, simple_loss=0.2857, pruned_loss=0.05053, over 13272.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2728, pruned_loss=0.05064, over 1883721.22 frames. ], batch size: 46, lr: 7.42e-03, grad_scale: 32.0 +2024-08-03 19:54:49,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=214485.33333333334, ans=0.05 +2024-08-03 19:54:49,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=214485.33333333334, ans=0.0 +2024-08-03 19:54:54,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=214485.33333333334, ans=0.2 +2024-08-03 19:54:59,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=214522.0, ans=0.2 +2024-08-03 19:55:05,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=214522.0, ans=0.025 +2024-08-03 19:55:05,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=214522.0, ans=0.2 +2024-08-03 19:55:08,679 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:55:14,702 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.730e+01 1.099e+02 1.340e+02 1.709e+02 3.717e+02, threshold=2.680e+02, percent-clipped=7.0 +2024-08-03 19:55:21,642 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.63 vs. limit=15.0 +2024-08-03 19:55:33,713 INFO [train.py:1114] (1/4) Epoch 17, batch 300, loss[loss=0.2024, simple_loss=0.2854, pruned_loss=0.05965, over 13439.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2717, pruned_loss=0.05033, over 2050119.36 frames. ], batch size: 42, lr: 7.42e-03, grad_scale: 16.0 +2024-08-03 19:55:40,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=214668.66666666666, ans=0.025 +2024-08-03 19:55:40,610 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.28 vs. limit=10.0 +2024-08-03 19:55:43,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=214705.33333333334, ans=0.0 +2024-08-03 19:55:44,979 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.42 vs. limit=15.0 +2024-08-03 19:55:46,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=214705.33333333334, ans=0.125 +2024-08-03 19:55:52,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=214742.0, ans=0.125 +2024-08-03 19:56:05,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.05 vs. limit=15.0 +2024-08-03 19:56:09,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214815.33333333334, ans=0.1 +2024-08-03 19:56:18,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=214815.33333333334, ans=0.2 +2024-08-03 19:56:20,681 INFO [train.py:1114] (1/4) Epoch 17, batch 350, loss[loss=0.1824, simple_loss=0.2688, pruned_loss=0.04801, over 13600.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2723, pruned_loss=0.05021, over 2180472.26 frames. ], batch size: 33, lr: 7.41e-03, grad_scale: 16.0 +2024-08-03 19:56:28,034 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.59 vs. limit=15.0 +2024-08-03 19:56:49,373 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.060e+01 1.097e+02 1.266e+02 1.426e+02 2.641e+02, threshold=2.532e+02, percent-clipped=0.0 +2024-08-03 19:56:53,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=214962.0, ans=0.025 +2024-08-03 19:56:55,804 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:56:59,161 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.48 vs. limit=6.0 +2024-08-03 19:56:59,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=214998.66666666666, ans=0.2 +2024-08-03 19:56:59,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=214998.66666666666, ans=0.0 +2024-08-03 19:57:08,521 INFO [train.py:1114] (1/4) Epoch 17, batch 400, loss[loss=0.197, simple_loss=0.287, pruned_loss=0.05345, over 13367.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2719, pruned_loss=0.05025, over 2284782.28 frames. ], batch size: 37, lr: 7.41e-03, grad_scale: 32.0 +2024-08-03 19:57:24,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=215072.0, ans=0.125 +2024-08-03 19:57:48,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=215182.0, ans=0.09899494936611666 +2024-08-03 19:57:50,840 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.21 vs. limit=15.0 +2024-08-03 19:57:54,026 INFO [train.py:1114] (1/4) Epoch 17, batch 450, loss[loss=0.176, simple_loss=0.2649, pruned_loss=0.04351, over 13559.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2727, pruned_loss=0.05039, over 2358460.78 frames. ], batch size: 38, lr: 7.41e-03, grad_scale: 32.0 +2024-08-03 19:58:03,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=215218.66666666666, ans=0.125 +2024-08-03 19:58:17,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=215292.0, ans=0.0 +2024-08-03 19:58:21,877 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.52 vs. limit=22.5 +2024-08-03 19:58:23,994 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.558e+01 1.110e+02 1.275e+02 1.603e+02 2.813e+02, threshold=2.549e+02, percent-clipped=2.0 +2024-08-03 19:58:31,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=215328.66666666666, ans=0.125 +2024-08-03 19:58:32,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=215328.66666666666, ans=0.125 +2024-08-03 19:58:42,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=215402.0, ans=0.125 +2024-08-03 19:58:42,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=215402.0, ans=0.0 +2024-08-03 19:58:42,989 INFO [train.py:1114] (1/4) Epoch 17, batch 500, loss[loss=0.1975, simple_loss=0.2825, pruned_loss=0.05628, over 13382.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2718, pruned_loss=0.05011, over 2423740.80 frames. ], batch size: 43, lr: 7.40e-03, grad_scale: 32.0 +2024-08-03 19:59:10,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=215512.0, ans=0.2 +2024-08-03 19:59:23,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=215548.66666666666, ans=0.0 +2024-08-03 19:59:28,224 INFO [train.py:1114] (1/4) Epoch 17, batch 550, loss[loss=0.2264, simple_loss=0.3101, pruned_loss=0.07135, over 13041.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2718, pruned_loss=0.04984, over 2466525.07 frames. ], batch size: 48, lr: 7.40e-03, grad_scale: 32.0 +2024-08-03 19:59:30,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=215585.33333333334, ans=0.125 +2024-08-03 19:59:34,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=215585.33333333334, ans=0.1 +2024-08-03 19:59:47,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=215658.66666666666, ans=0.0 +2024-08-03 19:59:47,410 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.40 vs. limit=15.0 +2024-08-03 19:59:57,001 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.283e+01 1.155e+02 1.328e+02 1.624e+02 2.790e+02, threshold=2.656e+02, percent-clipped=3.0 +2024-08-03 20:00:16,622 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.25 vs. limit=12.0 +2024-08-03 20:00:18,066 INFO [train.py:1114] (1/4) Epoch 17, batch 600, loss[loss=0.1735, simple_loss=0.2595, pruned_loss=0.04376, over 13332.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2723, pruned_loss=0.05024, over 2506792.57 frames. ], batch size: 46, lr: 7.40e-03, grad_scale: 32.0 +2024-08-03 20:00:26,189 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:00:28,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=215805.33333333334, ans=0.2 +2024-08-03 20:00:36,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=215842.0, ans=0.125 +2024-08-03 20:01:02,895 INFO [train.py:1114] (1/4) Epoch 17, batch 650, loss[loss=0.1676, simple_loss=0.2647, pruned_loss=0.03527, over 13550.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.272, pruned_loss=0.05008, over 2542488.53 frames. ], batch size: 37, lr: 7.39e-03, grad_scale: 32.0 +2024-08-03 20:01:10,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=215952.0, ans=0.0 +2024-08-03 20:01:29,731 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.069e+01 1.111e+02 1.221e+02 1.677e+02 3.173e+02, threshold=2.441e+02, percent-clipped=3.0 +2024-08-03 20:01:41,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=216098.66666666666, ans=0.0 +2024-08-03 20:01:52,173 INFO [train.py:1114] (1/4) Epoch 17, batch 700, loss[loss=0.167, simple_loss=0.2556, pruned_loss=0.03922, over 13514.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2722, pruned_loss=0.05003, over 2565226.42 frames. ], batch size: 35, lr: 7.39e-03, grad_scale: 16.0 +2024-08-03 20:01:53,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=216135.33333333334, ans=0.125 +2024-08-03 20:02:09,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=216208.66666666666, ans=0.125 +2024-08-03 20:02:19,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216245.33333333334, ans=0.1 +2024-08-03 20:02:26,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=216245.33333333334, ans=0.125 +2024-08-03 20:02:33,302 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.29 vs. limit=15.0 +2024-08-03 20:02:37,428 INFO [train.py:1114] (1/4) Epoch 17, batch 750, loss[loss=0.1821, simple_loss=0.2754, pruned_loss=0.04442, over 13371.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2715, pruned_loss=0.04961, over 2581718.73 frames. ], batch size: 37, lr: 7.39e-03, grad_scale: 16.0 +2024-08-03 20:02:57,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216392.0, ans=0.1 +2024-08-03 20:03:05,010 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.455e+01 1.109e+02 1.251e+02 1.578e+02 2.500e+02, threshold=2.502e+02, percent-clipped=1.0 +2024-08-03 20:03:07,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=216428.66666666666, ans=0.1 +2024-08-03 20:03:11,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=216428.66666666666, ans=0.0 +2024-08-03 20:03:20,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=216465.33333333334, ans=0.5 +2024-08-03 20:03:23,144 INFO [train.py:1114] (1/4) Epoch 17, batch 800, loss[loss=0.1885, simple_loss=0.2675, pruned_loss=0.05477, over 13350.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2724, pruned_loss=0.05007, over 2596637.71 frames. ], batch size: 33, lr: 7.38e-03, grad_scale: 32.0 +2024-08-03 20:03:24,177 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:03:27,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=216502.0, ans=0.125 +2024-08-03 20:04:09,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=216648.66666666666, ans=0.125 +2024-08-03 20:04:12,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=216685.33333333334, ans=0.1 +2024-08-03 20:04:13,151 INFO [train.py:1114] (1/4) Epoch 17, batch 850, loss[loss=0.1814, simple_loss=0.2802, pruned_loss=0.0413, over 13338.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2723, pruned_loss=0.05002, over 2609650.12 frames. ], batch size: 40, lr: 7.38e-03, grad_scale: 16.0 +2024-08-03 20:04:14,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=216685.33333333334, ans=0.025 +2024-08-03 20:04:17,187 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.66 vs. limit=6.0 +2024-08-03 20:04:31,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=216758.66666666666, ans=0.2 +2024-08-03 20:04:32,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=216758.66666666666, ans=0.2 +2024-08-03 20:04:32,564 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.97 vs. limit=12.0 +2024-08-03 20:04:33,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=216758.66666666666, ans=0.2 +2024-08-03 20:04:34,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=216758.66666666666, ans=0.125 +2024-08-03 20:04:35,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=216758.66666666666, ans=0.125 +2024-08-03 20:04:40,985 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.726e+01 1.085e+02 1.274e+02 1.570e+02 2.707e+02, threshold=2.548e+02, percent-clipped=2.0 +2024-08-03 20:04:47,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216795.33333333334, ans=0.1 +2024-08-03 20:04:57,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.73 vs. limit=15.0 +2024-08-03 20:04:58,402 INFO [train.py:1114] (1/4) Epoch 17, batch 900, loss[loss=0.1514, simple_loss=0.2336, pruned_loss=0.03458, over 13330.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2724, pruned_loss=0.05039, over 2612079.63 frames. ], batch size: 33, lr: 7.38e-03, grad_scale: 16.0 +2024-08-03 20:05:01,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.98 vs. limit=22.5 +2024-08-03 20:05:06,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=216905.33333333334, ans=0.125 +2024-08-03 20:05:12,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=216905.33333333334, ans=0.2 +2024-08-03 20:05:17,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=216942.0, ans=0.025 +2024-08-03 20:05:22,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=216942.0, ans=0.125 +2024-08-03 20:05:47,232 INFO [train.py:1114] (1/4) Epoch 17, batch 950, loss[loss=0.1791, simple_loss=0.2632, pruned_loss=0.04745, over 13532.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2719, pruned_loss=0.04999, over 2613616.22 frames. ], batch size: 34, lr: 7.38e-03, grad_scale: 16.0 +2024-08-03 20:05:47,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=217052.0, ans=0.125 +2024-08-03 20:05:50,182 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.539e-03 +2024-08-03 20:05:55,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=217088.66666666666, ans=0.125 +2024-08-03 20:06:00,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=217088.66666666666, ans=0.2 +2024-08-03 20:06:15,640 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.469e+01 1.104e+02 1.288e+02 1.565e+02 2.337e+02, threshold=2.575e+02, percent-clipped=0.0 +2024-08-03 20:06:17,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=217162.0, ans=0.125 +2024-08-03 20:06:21,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=217162.0, ans=0.125 +2024-08-03 20:06:33,353 INFO [train.py:1114] (1/4) Epoch 17, batch 1000, loss[loss=0.1811, simple_loss=0.267, pruned_loss=0.04758, over 13362.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2731, pruned_loss=0.05056, over 2611492.11 frames. ], batch size: 35, lr: 7.37e-03, grad_scale: 8.0 +2024-08-03 20:06:36,517 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:06:40,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=217235.33333333334, ans=0.0 +2024-08-03 20:06:48,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=217272.0, ans=0.125 +2024-08-03 20:06:52,140 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.62 vs. limit=15.0 +2024-08-03 20:07:02,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=217308.66666666666, ans=0.025 +2024-08-03 20:07:09,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=217345.33333333334, ans=0.125 +2024-08-03 20:07:13,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=217345.33333333334, ans=0.025 +2024-08-03 20:07:19,244 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.65 vs. limit=15.0 +2024-08-03 20:07:20,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=217382.0, ans=0.0 +2024-08-03 20:07:26,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=217418.66666666666, ans=0.0 +2024-08-03 20:07:26,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=217418.66666666666, ans=0.2 +2024-08-03 20:07:27,030 INFO [train.py:1114] (1/4) Epoch 17, batch 1050, loss[loss=0.2033, simple_loss=0.2937, pruned_loss=0.05649, over 13578.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2726, pruned_loss=0.05021, over 2615522.40 frames. ], batch size: 39, lr: 7.37e-03, grad_scale: 8.0 +2024-08-03 20:07:38,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=217455.33333333334, ans=0.125 +2024-08-03 20:07:54,416 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.33 vs. limit=22.5 +2024-08-03 20:07:55,672 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.928e+01 1.069e+02 1.230e+02 1.488e+02 2.448e+02, threshold=2.459e+02, percent-clipped=0.0 +2024-08-03 20:08:00,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217528.66666666666, ans=0.1 +2024-08-03 20:08:00,734 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.59 vs. limit=22.5 +2024-08-03 20:08:04,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=217565.33333333334, ans=0.2 +2024-08-03 20:08:05,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=217565.33333333334, ans=0.125 +2024-08-03 20:08:12,129 INFO [train.py:1114] (1/4) Epoch 17, batch 1100, loss[loss=0.1828, simple_loss=0.2649, pruned_loss=0.0504, over 13556.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2722, pruned_loss=0.05025, over 2619393.90 frames. ], batch size: 36, lr: 7.37e-03, grad_scale: 8.0 +2024-08-03 20:08:15,325 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.40 vs. limit=22.5 +2024-08-03 20:08:19,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=217602.0, ans=0.2 +2024-08-03 20:08:26,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=217638.66666666666, ans=0.0 +2024-08-03 20:08:34,334 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.29 vs. limit=15.0 +2024-08-03 20:08:35,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217675.33333333334, ans=0.1 +2024-08-03 20:08:38,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=217712.0, ans=0.125 +2024-08-03 20:08:56,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.85 vs. limit=22.5 +2024-08-03 20:08:57,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=217748.66666666666, ans=0.0 +2024-08-03 20:08:59,105 INFO [train.py:1114] (1/4) Epoch 17, batch 1150, loss[loss=0.1658, simple_loss=0.2449, pruned_loss=0.04335, over 13570.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2718, pruned_loss=0.05031, over 2619238.23 frames. ], batch size: 36, lr: 7.36e-03, grad_scale: 8.0 +2024-08-03 20:08:59,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=217785.33333333334, ans=0.0 +2024-08-03 20:09:05,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217785.33333333334, ans=0.1 +2024-08-03 20:09:11,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=217822.0, ans=0.125 +2024-08-03 20:09:14,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=217822.0, ans=0.025 +2024-08-03 20:09:16,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=217822.0, ans=0.0 +2024-08-03 20:09:17,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=217822.0, ans=0.025 +2024-08-03 20:09:25,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=217858.66666666666, ans=0.025 +2024-08-03 20:09:30,351 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.508e+01 1.111e+02 1.257e+02 1.521e+02 2.461e+02, threshold=2.515e+02, percent-clipped=1.0 +2024-08-03 20:09:44,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=217932.0, ans=0.125 +2024-08-03 20:09:46,440 INFO [train.py:1114] (1/4) Epoch 17, batch 1200, loss[loss=0.1904, simple_loss=0.2738, pruned_loss=0.05347, over 13598.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2723, pruned_loss=0.05024, over 2616560.91 frames. ], batch size: 39, lr: 7.36e-03, grad_scale: 16.0 +2024-08-03 20:10:09,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=218042.0, ans=0.0 +2024-08-03 20:10:14,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=218078.66666666666, ans=0.0 +2024-08-03 20:10:29,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=218115.33333333334, ans=0.2 +2024-08-03 20:10:32,172 INFO [train.py:1114] (1/4) Epoch 17, batch 1250, loss[loss=0.1951, simple_loss=0.2779, pruned_loss=0.05619, over 13435.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2726, pruned_loss=0.05046, over 2628175.71 frames. ], batch size: 42, lr: 7.36e-03, grad_scale: 16.0 +2024-08-03 20:11:05,408 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.654e+01 1.175e+02 1.463e+02 1.905e+02 2.984e+02, threshold=2.925e+02, percent-clipped=5.0 +2024-08-03 20:11:13,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=218298.66666666666, ans=0.2 +2024-08-03 20:11:18,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=218298.66666666666, ans=0.0 +2024-08-03 20:11:21,611 INFO [train.py:1114] (1/4) Epoch 17, batch 1300, loss[loss=0.2026, simple_loss=0.2881, pruned_loss=0.05854, over 12889.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2715, pruned_loss=0.04991, over 2631355.95 frames. ], batch size: 52, lr: 7.35e-03, grad_scale: 16.0 +2024-08-03 20:11:23,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=218335.33333333334, ans=0.0 +2024-08-03 20:11:30,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=218372.0, ans=0.0 +2024-08-03 20:12:06,805 INFO [train.py:1114] (1/4) Epoch 17, batch 1350, loss[loss=0.1919, simple_loss=0.2697, pruned_loss=0.05705, over 13529.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2718, pruned_loss=0.05003, over 2639816.13 frames. ], batch size: 37, lr: 7.35e-03, grad_scale: 16.0 +2024-08-03 20:12:22,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=218555.33333333334, ans=0.125 +2024-08-03 20:12:33,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=218592.0, ans=0.125 +2024-08-03 20:12:37,639 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.610e+01 1.168e+02 1.323e+02 1.597e+02 2.527e+02, threshold=2.645e+02, percent-clipped=0.0 +2024-08-03 20:12:42,198 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.67 vs. limit=22.5 +2024-08-03 20:12:54,787 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.43 vs. limit=15.0 +2024-08-03 20:12:56,123 INFO [train.py:1114] (1/4) Epoch 17, batch 1400, loss[loss=0.1698, simple_loss=0.2412, pruned_loss=0.04921, over 13271.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2719, pruned_loss=0.05004, over 2643408.53 frames. ], batch size: 31, lr: 7.35e-03, grad_scale: 16.0 +2024-08-03 20:12:58,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=218702.0, ans=0.125 +2024-08-03 20:13:16,404 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.34 vs. limit=15.0 +2024-08-03 20:13:36,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=218848.66666666666, ans=0.125 +2024-08-03 20:13:38,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=218848.66666666666, ans=0.0 +2024-08-03 20:13:41,301 INFO [train.py:1114] (1/4) Epoch 17, batch 1450, loss[loss=0.2039, simple_loss=0.2886, pruned_loss=0.05959, over 13431.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2724, pruned_loss=0.05025, over 2641387.65 frames. ], batch size: 43, lr: 7.34e-03, grad_scale: 16.0 +2024-08-03 20:13:55,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=218922.0, ans=0.025 +2024-08-03 20:13:57,508 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:14:10,050 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.837e+01 1.093e+02 1.261e+02 1.597e+02 2.531e+02, threshold=2.522e+02, percent-clipped=0.0 +2024-08-03 20:14:29,614 INFO [train.py:1114] (1/4) Epoch 17, batch 1500, loss[loss=0.1772, simple_loss=0.2704, pruned_loss=0.04196, over 13389.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2728, pruned_loss=0.0504, over 2641072.91 frames. ], batch size: 39, lr: 7.34e-03, grad_scale: 16.0 +2024-08-03 20:14:42,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=219105.33333333334, ans=0.125 +2024-08-03 20:14:47,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=219142.0, ans=0.125 +2024-08-03 20:14:49,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=219142.0, ans=0.0 +2024-08-03 20:15:12,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=219215.33333333334, ans=0.125 +2024-08-03 20:15:15,944 INFO [train.py:1114] (1/4) Epoch 17, batch 1550, loss[loss=0.2118, simple_loss=0.2909, pruned_loss=0.06636, over 13389.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2732, pruned_loss=0.05073, over 2629941.51 frames. ], batch size: 41, lr: 7.34e-03, grad_scale: 16.0 +2024-08-03 20:15:25,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=219288.66666666666, ans=0.125 +2024-08-03 20:15:28,597 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.28 vs. limit=6.0 +2024-08-03 20:15:37,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=219325.33333333334, ans=0.125 +2024-08-03 20:15:45,315 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.134e+01 1.116e+02 1.285e+02 1.566e+02 3.410e+02, threshold=2.570e+02, percent-clipped=5.0 +2024-08-03 20:16:03,416 INFO [train.py:1114] (1/4) Epoch 17, batch 1600, loss[loss=0.2009, simple_loss=0.2917, pruned_loss=0.05502, over 13573.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2727, pruned_loss=0.05053, over 2624026.76 frames. ], batch size: 39, lr: 7.34e-03, grad_scale: 32.0 +2024-08-03 20:16:03,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=219435.33333333334, ans=0.0 +2024-08-03 20:16:26,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=219472.0, ans=0.5 +2024-08-03 20:16:44,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219545.33333333334, ans=0.1 +2024-08-03 20:16:51,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=219582.0, ans=0.125 +2024-08-03 20:16:55,677 INFO [train.py:1114] (1/4) Epoch 17, batch 1650, loss[loss=0.1977, simple_loss=0.2869, pruned_loss=0.05427, over 13320.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2731, pruned_loss=0.05101, over 2621628.92 frames. ], batch size: 40, lr: 7.33e-03, grad_scale: 32.0 +2024-08-03 20:16:56,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=219618.66666666666, ans=0.125 +2024-08-03 20:16:58,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=219618.66666666666, ans=0.2 +2024-08-03 20:17:01,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=219618.66666666666, ans=0.0 +2024-08-03 20:17:18,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=219692.0, ans=0.125 +2024-08-03 20:17:21,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=219692.0, ans=0.125 +2024-08-03 20:17:24,617 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.842e+01 1.100e+02 1.247e+02 1.816e+02 3.503e+02, threshold=2.494e+02, percent-clipped=6.0 +2024-08-03 20:17:25,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=219728.66666666666, ans=0.125 +2024-08-03 20:17:31,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=219765.33333333334, ans=0.125 +2024-08-03 20:17:40,635 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.17 vs. limit=15.0 +2024-08-03 20:17:41,108 INFO [train.py:1114] (1/4) Epoch 17, batch 1700, loss[loss=0.1724, simple_loss=0.252, pruned_loss=0.0464, over 13253.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2733, pruned_loss=0.05068, over 2630428.84 frames. ], batch size: 31, lr: 7.33e-03, grad_scale: 32.0 +2024-08-03 20:17:47,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=219802.0, ans=0.125 +2024-08-03 20:17:50,429 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.51 vs. limit=15.0 +2024-08-03 20:17:58,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=219838.66666666666, ans=0.1 +2024-08-03 20:18:03,236 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.45 vs. limit=15.0 +2024-08-03 20:18:29,576 INFO [train.py:1114] (1/4) Epoch 17, batch 1750, loss[loss=0.1698, simple_loss=0.25, pruned_loss=0.04484, over 13536.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2722, pruned_loss=0.05008, over 2633362.23 frames. ], batch size: 31, lr: 7.33e-03, grad_scale: 32.0 +2024-08-03 20:18:44,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=220022.0, ans=0.0 +2024-08-03 20:18:48,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=220022.0, ans=0.025 +2024-08-03 20:18:51,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=220058.66666666666, ans=0.0 +2024-08-03 20:18:55,630 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.82 vs. limit=15.0 +2024-08-03 20:18:58,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=220095.33333333334, ans=0.2 +2024-08-03 20:18:58,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=220095.33333333334, ans=10.0 +2024-08-03 20:18:58,351 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.27 vs. limit=22.5 +2024-08-03 20:18:58,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.49 vs. limit=15.0 +2024-08-03 20:19:00,571 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.983e+01 1.113e+02 1.270e+02 1.558e+02 2.524e+02, threshold=2.540e+02, percent-clipped=1.0 +2024-08-03 20:19:16,796 INFO [train.py:1114] (1/4) Epoch 17, batch 1800, loss[loss=0.2036, simple_loss=0.2909, pruned_loss=0.05812, over 13553.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2722, pruned_loss=0.04956, over 2634809.00 frames. ], batch size: 38, lr: 7.32e-03, grad_scale: 32.0 +2024-08-03 20:19:23,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=220168.66666666666, ans=0.125 +2024-08-03 20:19:29,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=220205.33333333334, ans=0.125 +2024-08-03 20:19:33,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=220205.33333333334, ans=0.0 +2024-08-03 20:19:54,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=220278.66666666666, ans=0.125 +2024-08-03 20:19:58,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=220315.33333333334, ans=0.025 +2024-08-03 20:20:06,474 INFO [train.py:1114] (1/4) Epoch 17, batch 1850, loss[loss=0.1866, simple_loss=0.2807, pruned_loss=0.04626, over 13394.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2717, pruned_loss=0.0495, over 2636934.32 frames. ], batch size: 39, lr: 7.32e-03, grad_scale: 32.0 +2024-08-03 20:20:18,609 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:20:20,603 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.18 vs. limit=6.0 +2024-08-03 20:20:22,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=220388.66666666666, ans=0.125 +2024-08-03 20:20:24,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=220425.33333333334, ans=0.0 +2024-08-03 20:20:25,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=220425.33333333334, ans=0.125 +2024-08-03 20:20:35,725 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.176e+01 1.159e+02 1.677e+02 2.408e+02 3.560e+02, threshold=3.354e+02, percent-clipped=19.0 +2024-08-03 20:20:38,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=220462.0, ans=0.1 +2024-08-03 20:20:52,107 INFO [train.py:1114] (1/4) Epoch 17, batch 1900, loss[loss=0.1952, simple_loss=0.291, pruned_loss=0.04966, over 13327.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2732, pruned_loss=0.05021, over 2639708.79 frames. ], batch size: 40, lr: 7.32e-03, grad_scale: 32.0 +2024-08-03 20:21:05,377 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.37 vs. limit=10.0 +2024-08-03 20:21:08,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220572.0, ans=0.1 +2024-08-03 20:21:11,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=220608.66666666666, ans=0.1 +2024-08-03 20:21:18,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220645.33333333334, ans=0.1 +2024-08-03 20:21:20,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=220645.33333333334, ans=0.125 +2024-08-03 20:21:36,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=220682.0, ans=0.125 +2024-08-03 20:21:38,839 INFO [train.py:1114] (1/4) Epoch 17, batch 1950, loss[loss=0.1693, simple_loss=0.255, pruned_loss=0.04179, over 13541.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2742, pruned_loss=0.05028, over 2646564.02 frames. ], batch size: 36, lr: 7.31e-03, grad_scale: 32.0 +2024-08-03 20:21:58,104 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.93 vs. limit=10.0 +2024-08-03 20:22:03,684 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.70 vs. limit=6.0 +2024-08-03 20:22:10,355 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.292e+01 1.160e+02 1.302e+02 1.581e+02 2.993e+02, threshold=2.604e+02, percent-clipped=0.0 +2024-08-03 20:22:23,328 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.01 vs. limit=22.5 +2024-08-03 20:22:26,601 INFO [train.py:1114] (1/4) Epoch 17, batch 2000, loss[loss=0.1786, simple_loss=0.2539, pruned_loss=0.05164, over 13555.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2751, pruned_loss=0.05072, over 2637511.72 frames. ], batch size: 31, lr: 7.31e-03, grad_scale: 32.0 +2024-08-03 20:22:30,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=220902.0, ans=0.125 +2024-08-03 20:22:47,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220975.33333333334, ans=0.1 +2024-08-03 20:22:58,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=221012.0, ans=0.0 +2024-08-03 20:23:14,197 INFO [train.py:1114] (1/4) Epoch 17, batch 2050, loss[loss=0.166, simple_loss=0.2462, pruned_loss=0.04288, over 13419.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2735, pruned_loss=0.05033, over 2633810.04 frames. ], batch size: 32, lr: 7.31e-03, grad_scale: 32.0 +2024-08-03 20:23:34,042 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.94 vs. limit=5.0 +2024-08-03 20:23:38,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=221158.66666666666, ans=0.0 +2024-08-03 20:23:39,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=221158.66666666666, ans=0.125 +2024-08-03 20:23:42,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=221195.33333333334, ans=0.05 +2024-08-03 20:23:44,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=221195.33333333334, ans=0.2 +2024-08-03 20:23:45,011 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.513e+01 1.116e+02 1.230e+02 1.630e+02 2.618e+02, threshold=2.461e+02, percent-clipped=1.0 +2024-08-03 20:23:59,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=221232.0, ans=0.125 +2024-08-03 20:24:01,281 INFO [train.py:1114] (1/4) Epoch 17, batch 2100, loss[loss=0.2007, simple_loss=0.2892, pruned_loss=0.05613, over 13535.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2728, pruned_loss=0.04982, over 2639753.90 frames. ], batch size: 37, lr: 7.31e-03, grad_scale: 32.0 +2024-08-03 20:24:03,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.28 vs. limit=22.5 +2024-08-03 20:24:07,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=221268.66666666666, ans=0.0 +2024-08-03 20:24:12,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=221305.33333333334, ans=0.125 +2024-08-03 20:24:22,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=221342.0, ans=0.125 +2024-08-03 20:24:23,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=221342.0, ans=0.0 +2024-08-03 20:24:29,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=221378.66666666666, ans=0.2 +2024-08-03 20:24:32,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=221378.66666666666, ans=0.125 +2024-08-03 20:24:45,886 INFO [train.py:1114] (1/4) Epoch 17, batch 2150, loss[loss=0.1931, simple_loss=0.2807, pruned_loss=0.05281, over 13557.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2725, pruned_loss=0.04972, over 2648231.37 frames. ], batch size: 36, lr: 7.30e-03, grad_scale: 32.0 +2024-08-03 20:24:56,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=221488.66666666666, ans=0.0 +2024-08-03 20:25:08,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=221525.33333333334, ans=0.125 +2024-08-03 20:25:16,727 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.666e+01 1.100e+02 1.262e+02 1.658e+02 2.819e+02, threshold=2.523e+02, percent-clipped=4.0 +2024-08-03 20:25:19,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=221562.0, ans=0.2 +2024-08-03 20:25:28,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=221598.66666666666, ans=0.0 +2024-08-03 20:25:34,762 INFO [train.py:1114] (1/4) Epoch 17, batch 2200, loss[loss=0.1991, simple_loss=0.2976, pruned_loss=0.05033, over 13411.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2723, pruned_loss=0.04952, over 2645565.35 frames. ], batch size: 39, lr: 7.30e-03, grad_scale: 32.0 +2024-08-03 20:25:34,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=221635.33333333334, ans=0.025 +2024-08-03 20:25:41,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=221635.33333333334, ans=0.09899494936611666 +2024-08-03 20:25:52,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=221708.66666666666, ans=0.0 +2024-08-03 20:26:09,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221745.33333333334, ans=0.1 +2024-08-03 20:26:20,147 INFO [train.py:1114] (1/4) Epoch 17, batch 2250, loss[loss=0.1819, simple_loss=0.2817, pruned_loss=0.04105, over 13361.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2725, pruned_loss=0.04981, over 2643043.54 frames. ], batch size: 37, lr: 7.30e-03, grad_scale: 32.0 +2024-08-03 20:26:20,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=221818.66666666666, ans=0.125 +2024-08-03 20:26:31,375 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.06 vs. limit=22.5 +2024-08-03 20:26:42,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=221892.0, ans=0.125 +2024-08-03 20:26:46,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=221928.66666666666, ans=0.0 +2024-08-03 20:26:48,987 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.784e+01 1.159e+02 1.389e+02 1.848e+02 3.074e+02, threshold=2.777e+02, percent-clipped=8.0 +2024-08-03 20:26:53,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=221928.66666666666, ans=0.125 +2024-08-03 20:26:54,688 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:26:59,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=221965.33333333334, ans=0.2 +2024-08-03 20:27:10,464 INFO [train.py:1114] (1/4) Epoch 17, batch 2300, loss[loss=0.1723, simple_loss=0.2553, pruned_loss=0.04465, over 13584.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2714, pruned_loss=0.04992, over 2639223.39 frames. ], batch size: 33, lr: 7.29e-03, grad_scale: 32.0 +2024-08-03 20:27:14,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=222002.0, ans=0.0 +2024-08-03 20:27:26,034 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:27:30,667 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.77 vs. limit=22.5 +2024-08-03 20:27:31,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=222075.33333333334, ans=0.125 +2024-08-03 20:27:49,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=222148.66666666666, ans=0.125 +2024-08-03 20:27:49,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=222148.66666666666, ans=0.125 +2024-08-03 20:27:51,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=222148.66666666666, ans=0.0 +2024-08-03 20:27:56,080 INFO [train.py:1114] (1/4) Epoch 17, batch 2350, loss[loss=0.183, simple_loss=0.2721, pruned_loss=0.04692, over 13554.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2713, pruned_loss=0.04956, over 2641554.22 frames. ], batch size: 38, lr: 7.29e-03, grad_scale: 16.0 +2024-08-03 20:28:02,187 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.73 vs. limit=15.0 +2024-08-03 20:28:12,835 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.15 vs. limit=15.0 +2024-08-03 20:28:25,882 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.384e+01 1.095e+02 1.277e+02 1.611e+02 2.837e+02, threshold=2.555e+02, percent-clipped=1.0 +2024-08-03 20:28:43,085 INFO [train.py:1114] (1/4) Epoch 17, batch 2400, loss[loss=0.1704, simple_loss=0.2554, pruned_loss=0.04267, over 13534.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2718, pruned_loss=0.04973, over 2642851.52 frames. ], batch size: 35, lr: 7.29e-03, grad_scale: 32.0 +2024-08-03 20:28:43,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=222368.66666666666, ans=0.0 +2024-08-03 20:28:44,419 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.08 vs. limit=6.0 +2024-08-03 20:28:47,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=222368.66666666666, ans=0.025 +2024-08-03 20:29:02,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=222442.0, ans=0.125 +2024-08-03 20:29:11,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=222478.66666666666, ans=0.1 +2024-08-03 20:29:11,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=222478.66666666666, ans=0.125 +2024-08-03 20:29:21,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=222515.33333333334, ans=0.125 +2024-08-03 20:29:27,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=222515.33333333334, ans=0.0 +2024-08-03 20:29:30,151 INFO [train.py:1114] (1/4) Epoch 17, batch 2450, loss[loss=0.1989, simple_loss=0.2866, pruned_loss=0.05557, over 13350.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2739, pruned_loss=0.05093, over 2631992.82 frames. ], batch size: 37, lr: 7.28e-03, grad_scale: 32.0 +2024-08-03 20:29:42,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=222588.66666666666, ans=10.0 +2024-08-03 20:29:42,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=222588.66666666666, ans=0.0 +2024-08-03 20:29:59,843 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.386e+01 1.131e+02 1.264e+02 1.537e+02 2.363e+02, threshold=2.529e+02, percent-clipped=0.0 +2024-08-03 20:30:15,150 INFO [train.py:1114] (1/4) Epoch 17, batch 2500, loss[loss=0.1868, simple_loss=0.2768, pruned_loss=0.0484, over 13399.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2743, pruned_loss=0.05106, over 2636377.19 frames. ], batch size: 39, lr: 7.28e-03, grad_scale: 32.0 +2024-08-03 20:30:16,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=222735.33333333334, ans=0.125 +2024-08-03 20:30:23,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=222772.0, ans=0.125 +2024-08-03 20:30:33,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=222808.66666666666, ans=0.125 +2024-08-03 20:30:34,577 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.91 vs. limit=10.0 +2024-08-03 20:30:40,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=222808.66666666666, ans=0.0 +2024-08-03 20:30:47,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222845.33333333334, ans=0.1 +2024-08-03 20:30:48,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=222845.33333333334, ans=0.0 +2024-08-03 20:30:59,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=222918.66666666666, ans=0.125 +2024-08-03 20:31:00,459 INFO [train.py:1114] (1/4) Epoch 17, batch 2550, loss[loss=0.1526, simple_loss=0.2373, pruned_loss=0.03393, over 13553.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2736, pruned_loss=0.0507, over 2638163.56 frames. ], batch size: 31, lr: 7.28e-03, grad_scale: 32.0 +2024-08-03 20:31:05,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=222918.66666666666, ans=0.0 +2024-08-03 20:31:09,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=222918.66666666666, ans=0.5 +2024-08-03 20:31:10,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=222955.33333333334, ans=0.0 +2024-08-03 20:31:13,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=222955.33333333334, ans=0.1 +2024-08-03 20:31:21,242 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=6.98 vs. limit=15.0 +2024-08-03 20:31:24,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=222992.0, ans=0.0 +2024-08-03 20:31:25,477 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.59 vs. limit=15.0 +2024-08-03 20:31:31,224 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.999e+01 1.159e+02 1.553e+02 2.092e+02 3.686e+02, threshold=3.106e+02, percent-clipped=10.0 +2024-08-03 20:31:31,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=223028.66666666666, ans=0.125 +2024-08-03 20:31:41,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=223065.33333333334, ans=0.125 +2024-08-03 20:31:44,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=223065.33333333334, ans=0.035 +2024-08-03 20:31:46,271 INFO [train.py:1114] (1/4) Epoch 17, batch 2600, loss[loss=0.1876, simple_loss=0.2758, pruned_loss=0.04971, over 13555.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2739, pruned_loss=0.05092, over 2637120.78 frames. ], batch size: 36, lr: 7.28e-03, grad_scale: 32.0 +2024-08-03 20:31:50,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=223102.0, ans=0.025 +2024-08-03 20:31:51,477 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=223102.0, ans=0.125 +2024-08-03 20:31:59,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223138.66666666666, ans=0.1 +2024-08-03 20:32:00,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223138.66666666666, ans=0.1 +2024-08-03 20:32:02,334 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.58 vs. limit=15.0 +2024-08-03 20:32:09,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=223175.33333333334, ans=0.0 +2024-08-03 20:32:11,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=223212.0, ans=0.125 +2024-08-03 20:32:14,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223212.0, ans=0.1 +2024-08-03 20:32:24,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=223248.66666666666, ans=0.125 +2024-08-03 20:32:27,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=223248.66666666666, ans=0.125 +2024-08-03 20:32:29,419 INFO [train.py:1114] (1/4) Epoch 17, batch 2650, loss[loss=0.1939, simple_loss=0.2869, pruned_loss=0.05041, over 13268.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2745, pruned_loss=0.0512, over 2640188.06 frames. ], batch size: 46, lr: 7.27e-03, grad_scale: 16.0 +2024-08-03 20:32:40,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=223322.0, ans=0.1 +2024-08-03 20:32:43,602 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.31 vs. limit=15.0 +2024-08-03 20:32:58,624 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.512e+01 1.129e+02 1.471e+02 1.804e+02 3.189e+02, threshold=2.942e+02, percent-clipped=1.0 +2024-08-03 20:33:08,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=223432.0, ans=0.125 +2024-08-03 20:33:12,557 INFO [train.py:1114] (1/4) Epoch 17, batch 2700, loss[loss=0.2026, simple_loss=0.2937, pruned_loss=0.0558, over 13557.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2746, pruned_loss=0.05128, over 2636977.26 frames. ], batch size: 40, lr: 7.27e-03, grad_scale: 16.0 +2024-08-03 20:33:15,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=223468.66666666666, ans=0.0 +2024-08-03 20:33:26,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=223505.33333333334, ans=0.125 +2024-08-03 20:33:27,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=223505.33333333334, ans=0.125 +2024-08-03 20:33:45,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=223578.66666666666, ans=0.0 +2024-08-03 20:33:45,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=223578.66666666666, ans=0.5 +2024-08-03 20:33:48,721 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.30 vs. limit=15.0 +2024-08-03 20:33:56,037 INFO [train.py:1114] (1/4) Epoch 17, batch 2750, loss[loss=0.1552, simple_loss=0.2455, pruned_loss=0.03244, over 13341.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.273, pruned_loss=0.05097, over 2634816.33 frames. ], batch size: 34, lr: 7.27e-03, grad_scale: 16.0 +2024-08-03 20:34:13,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=223725.33333333334, ans=0.1 +2024-08-03 20:34:14,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=223725.33333333334, ans=0.125 +2024-08-03 20:34:20,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223725.33333333334, ans=0.1 +2024-08-03 20:34:23,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=223762.0, ans=0.125 +2024-08-03 20:34:26,195 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.377e+01 1.115e+02 1.294e+02 1.597e+02 2.305e+02, threshold=2.588e+02, percent-clipped=0.0 +2024-08-03 20:34:29,424 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.24 vs. limit=15.0 +2024-08-03 20:34:32,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223798.66666666666, ans=0.1 +2024-08-03 20:34:40,273 INFO [train.py:1114] (1/4) Epoch 17, batch 2800, loss[loss=0.2265, simple_loss=0.2969, pruned_loss=0.078, over 8672.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2739, pruned_loss=0.05134, over 2626376.43 frames. ], batch size: 96, lr: 7.26e-03, grad_scale: 32.0 +2024-08-03 20:35:01,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=223908.66666666666, ans=0.125 +2024-08-03 20:35:03,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=223908.66666666666, ans=0.2 +2024-08-03 20:35:23,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=223982.0, ans=0.015 +2024-08-03 20:35:25,233 INFO [train.py:1114] (1/4) Epoch 17, batch 2850, loss[loss=0.155, simple_loss=0.2428, pruned_loss=0.03362, over 13361.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2741, pruned_loss=0.05119, over 2620545.36 frames. ], batch size: 35, lr: 7.26e-03, grad_scale: 32.0 +2024-08-03 20:35:54,814 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.742e+01 1.205e+02 1.427e+02 1.924e+02 3.362e+02, threshold=2.855e+02, percent-clipped=10.0 +2024-08-03 20:36:03,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=224165.33333333334, ans=0.0 +2024-08-03 20:36:10,086 INFO [train.py:1114] (1/4) Epoch 17, batch 2900, loss[loss=0.1845, simple_loss=0.271, pruned_loss=0.04899, over 13370.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2752, pruned_loss=0.05165, over 2631362.48 frames. ], batch size: 36, lr: 7.26e-03, grad_scale: 32.0 +2024-08-03 20:36:10,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=224202.0, ans=0.2 +2024-08-03 20:36:46,268 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.33 vs. limit=6.0 +2024-08-03 20:36:47,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=224348.66666666666, ans=0.125 +2024-08-03 20:36:50,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=224348.66666666666, ans=0.125 +2024-08-03 20:36:53,522 INFO [train.py:1114] (1/4) Epoch 17, batch 2950, loss[loss=0.1761, simple_loss=0.2636, pruned_loss=0.04425, over 13352.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2743, pruned_loss=0.05155, over 2629255.11 frames. ], batch size: 34, lr: 7.26e-03, grad_scale: 32.0 +2024-08-03 20:37:01,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=224422.0, ans=0.0 +2024-08-03 20:37:23,420 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.999e+01 1.109e+02 1.313e+02 1.570e+02 2.324e+02, threshold=2.625e+02, percent-clipped=1.0 +2024-08-03 20:37:27,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=224495.33333333334, ans=0.125 +2024-08-03 20:37:30,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=224532.0, ans=0.125 +2024-08-03 20:37:37,367 INFO [train.py:1114] (1/4) Epoch 17, batch 3000, loss[loss=0.1652, simple_loss=0.2537, pruned_loss=0.03833, over 13547.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2738, pruned_loss=0.05128, over 2630337.90 frames. ], batch size: 37, lr: 7.25e-03, grad_scale: 32.0 +2024-08-03 20:37:37,675 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 20:37:47,663 INFO [train.py:1146] (1/4) Epoch 17, validation: loss=0.1723, simple_loss=0.2712, pruned_loss=0.03676, over 944034.00 frames. +2024-08-03 20:37:47,975 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 20:38:09,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=224642.0, ans=0.1 +2024-08-03 20:38:24,639 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.70 vs. limit=22.5 +2024-08-03 20:38:32,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=224752.0, ans=0.125 +2024-08-03 20:38:33,104 INFO [train.py:1114] (1/4) Epoch 17, batch 3050, loss[loss=0.1532, simple_loss=0.2407, pruned_loss=0.03285, over 13514.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2746, pruned_loss=0.05146, over 2627747.64 frames. ], batch size: 35, lr: 7.25e-03, grad_scale: 32.0 +2024-08-03 20:38:41,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=224788.66666666666, ans=0.1 +2024-08-03 20:39:01,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=224862.0, ans=0.125 +2024-08-03 20:39:04,754 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.913e+01 1.103e+02 1.242e+02 1.449e+02 2.712e+02, threshold=2.483e+02, percent-clipped=1.0 +2024-08-03 20:39:05,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=224862.0, ans=0.125 +2024-08-03 20:39:06,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=224862.0, ans=0.07 +2024-08-03 20:39:15,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=224898.66666666666, ans=0.125 +2024-08-03 20:39:15,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=224898.66666666666, ans=0.125 +2024-08-03 20:39:18,548 INFO [train.py:1114] (1/4) Epoch 17, batch 3100, loss[loss=0.2226, simple_loss=0.301, pruned_loss=0.07214, over 13321.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2742, pruned_loss=0.05129, over 2627237.04 frames. ], batch size: 46, lr: 7.25e-03, grad_scale: 32.0 +2024-08-03 20:39:35,233 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.28 vs. limit=12.0 +2024-08-03 20:40:01,363 INFO [train.py:1114] (1/4) Epoch 17, batch 3150, loss[loss=0.2137, simple_loss=0.3039, pruned_loss=0.06171, over 13025.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.274, pruned_loss=0.05094, over 2629008.85 frames. ], batch size: 48, lr: 7.24e-03, grad_scale: 32.0 +2024-08-03 20:40:20,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=225192.0, ans=15.0 +2024-08-03 20:40:30,631 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.291e+01 1.117e+02 1.300e+02 1.745e+02 2.777e+02, threshold=2.600e+02, percent-clipped=1.0 +2024-08-03 20:40:34,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=225228.66666666666, ans=0.125 +2024-08-03 20:40:44,311 INFO [train.py:1114] (1/4) Epoch 17, batch 3200, loss[loss=0.1891, simple_loss=0.2768, pruned_loss=0.05074, over 13538.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2735, pruned_loss=0.05097, over 2634392.52 frames. ], batch size: 37, lr: 7.24e-03, grad_scale: 32.0 +2024-08-03 20:40:45,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=225302.0, ans=0.2 +2024-08-03 20:40:47,373 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.44 vs. limit=22.5 +2024-08-03 20:41:00,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=225338.66666666666, ans=0.0 +2024-08-03 20:41:11,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=225412.0, ans=0.1 +2024-08-03 20:41:19,608 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.09 vs. limit=15.0 +2024-08-03 20:41:27,807 INFO [train.py:1114] (1/4) Epoch 17, batch 3250, loss[loss=0.1985, simple_loss=0.2884, pruned_loss=0.05427, over 13401.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2746, pruned_loss=0.05115, over 2638848.72 frames. ], batch size: 38, lr: 7.24e-03, grad_scale: 32.0 +2024-08-03 20:41:28,724 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=225485.33333333334, ans=0.0 +2024-08-03 20:41:35,469 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.86 vs. limit=22.5 +2024-08-03 20:41:39,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=225522.0, ans=0.125 +2024-08-03 20:41:49,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.32 vs. limit=15.0 +2024-08-03 20:41:57,624 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.641e+01 1.154e+02 1.402e+02 1.667e+02 2.489e+02, threshold=2.804e+02, percent-clipped=0.0 +2024-08-03 20:42:05,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=225632.0, ans=0.125 +2024-08-03 20:42:09,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=225632.0, ans=0.04949747468305833 +2024-08-03 20:42:09,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=225632.0, ans=0.05 +2024-08-03 20:42:11,278 INFO [train.py:1114] (1/4) Epoch 17, batch 3300, loss[loss=0.1809, simple_loss=0.2701, pruned_loss=0.0459, over 12881.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2728, pruned_loss=0.0503, over 2640219.07 frames. ], batch size: 52, lr: 7.23e-03, grad_scale: 32.0 +2024-08-03 20:42:13,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=225668.66666666666, ans=0.0 +2024-08-03 20:42:16,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=225668.66666666666, ans=0.025 +2024-08-03 20:42:29,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225742.0, ans=0.1 +2024-08-03 20:42:38,792 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.69 vs. limit=12.0 +2024-08-03 20:42:53,697 INFO [train.py:1114] (1/4) Epoch 17, batch 3350, loss[loss=0.2093, simple_loss=0.3022, pruned_loss=0.05822, over 13027.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2737, pruned_loss=0.05082, over 2631065.25 frames. ], batch size: 48, lr: 7.23e-03, grad_scale: 32.0 +2024-08-03 20:42:58,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=225852.0, ans=0.125 +2024-08-03 20:43:15,818 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.88 vs. limit=15.0 +2024-08-03 20:43:22,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=225962.0, ans=0.125 +2024-08-03 20:43:22,993 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.784e+01 1.128e+02 1.245e+02 1.447e+02 2.027e+02, threshold=2.490e+02, percent-clipped=0.0 +2024-08-03 20:43:27,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225998.66666666666, ans=0.1 +2024-08-03 20:43:36,784 INFO [train.py:1114] (1/4) Epoch 17, batch 3400, loss[loss=0.1643, simple_loss=0.2446, pruned_loss=0.04199, over 13587.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2738, pruned_loss=0.05111, over 2626527.34 frames. ], batch size: 31, lr: 7.23e-03, grad_scale: 32.0 +2024-08-03 20:43:39,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=226035.33333333334, ans=0.125 +2024-08-03 20:43:45,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226072.0, ans=0.1 +2024-08-03 20:43:53,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=226108.66666666666, ans=0.125 +2024-08-03 20:43:53,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=226108.66666666666, ans=0.0 +2024-08-03 20:44:00,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=226108.66666666666, ans=0.125 +2024-08-03 20:44:04,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=226145.33333333334, ans=0.0 +2024-08-03 20:44:15,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=226182.0, ans=0.2 +2024-08-03 20:44:19,647 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.26 vs. limit=10.0 +2024-08-03 20:44:20,073 INFO [train.py:1114] (1/4) Epoch 17, batch 3450, loss[loss=0.2114, simple_loss=0.2969, pruned_loss=0.06295, over 12837.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2741, pruned_loss=0.05134, over 2629633.98 frames. ], batch size: 52, lr: 7.23e-03, grad_scale: 32.0 +2024-08-03 20:44:20,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.95 vs. limit=15.0 +2024-08-03 20:44:29,987 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.67 vs. limit=22.5 +2024-08-03 20:44:30,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=226255.33333333334, ans=0.125 +2024-08-03 20:44:32,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=226255.33333333334, ans=0.2 +2024-08-03 20:44:33,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=226255.33333333334, ans=0.125 +2024-08-03 20:44:35,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=226255.33333333334, ans=0.125 +2024-08-03 20:44:43,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226292.0, ans=0.1 +2024-08-03 20:44:49,405 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.26 vs. limit=10.0 +2024-08-03 20:44:49,799 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.757e+01 1.151e+02 1.446e+02 1.763e+02 3.368e+02, threshold=2.892e+02, percent-clipped=3.0 +2024-08-03 20:44:55,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=226365.33333333334, ans=0.025 +2024-08-03 20:44:55,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226365.33333333334, ans=0.1 +2024-08-03 20:45:02,687 INFO [train.py:1114] (1/4) Epoch 17, batch 3500, loss[loss=0.1609, simple_loss=0.25, pruned_loss=0.03588, over 13540.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2739, pruned_loss=0.05157, over 2631182.96 frames. ], batch size: 34, lr: 7.22e-03, grad_scale: 16.0 +2024-08-03 20:45:07,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=226402.0, ans=0.0 +2024-08-03 20:45:28,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=226512.0, ans=0.0 +2024-08-03 20:45:30,146 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.32 vs. limit=15.0 +2024-08-03 20:45:33,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=226512.0, ans=0.0 +2024-08-03 20:45:37,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=226548.66666666666, ans=0.0 +2024-08-03 20:45:45,721 INFO [train.py:1114] (1/4) Epoch 17, batch 3550, loss[loss=0.204, simple_loss=0.2903, pruned_loss=0.05887, over 12420.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2755, pruned_loss=0.05204, over 2629669.58 frames. ], batch size: 58, lr: 7.22e-03, grad_scale: 16.0 +2024-08-03 20:45:45,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=226585.33333333334, ans=0.125 +2024-08-03 20:45:47,095 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.10 vs. limit=15.0 +2024-08-03 20:45:47,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=226585.33333333334, ans=0.0 +2024-08-03 20:45:53,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=226585.33333333334, ans=0.5 +2024-08-03 20:45:53,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=226585.33333333334, ans=0.125 +2024-08-03 20:46:10,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226658.66666666666, ans=0.1 +2024-08-03 20:46:14,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=226695.33333333334, ans=0.0 +2024-08-03 20:46:17,978 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.551e+01 1.203e+02 1.370e+02 1.580e+02 2.866e+02, threshold=2.739e+02, percent-clipped=0.0 +2024-08-03 20:46:22,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226732.0, ans=0.1 +2024-08-03 20:46:23,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.01 vs. limit=15.0 +2024-08-03 20:46:31,219 INFO [train.py:1114] (1/4) Epoch 17, batch 3600, loss[loss=0.2246, simple_loss=0.3022, pruned_loss=0.07351, over 9144.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2799, pruned_loss=0.05601, over 2491520.83 frames. ], batch size: 96, lr: 7.22e-03, grad_scale: 32.0 +2024-08-03 20:46:31,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=226768.66666666666, ans=0.05 +2024-08-03 20:46:58,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=226878.66666666666, ans=0.2 +2024-08-03 20:47:48,360 INFO [train.py:1114] (1/4) Epoch 18, batch 0, loss[loss=0.1816, simple_loss=0.2662, pruned_loss=0.04847, over 13334.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2662, pruned_loss=0.04847, over 13334.00 frames. ], batch size: 33, lr: 7.01e-03, grad_scale: 32.0 +2024-08-03 20:47:48,361 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 20:47:53,676 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.2811, 2.6695, 2.4741, 2.4358], device='cuda:1') +2024-08-03 20:47:58,989 INFO [train.py:1146] (1/4) Epoch 18, validation: loss=0.1737, simple_loss=0.274, pruned_loss=0.03673, over 944034.00 frames. +2024-08-03 20:47:58,990 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 20:48:09,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=226952.0, ans=0.02 +2024-08-03 20:48:22,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226988.66666666666, ans=0.1 +2024-08-03 20:48:33,382 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.64 vs. limit=6.0 +2024-08-03 20:48:39,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=227062.0, ans=0.125 +2024-08-03 20:48:40,125 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.67 vs. limit=15.0 +2024-08-03 20:48:40,349 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.175e+01 1.169e+02 1.268e+02 1.393e+02 2.818e+02, threshold=2.535e+02, percent-clipped=2.0 +2024-08-03 20:48:44,899 INFO [train.py:1114] (1/4) Epoch 18, batch 50, loss[loss=0.1411, simple_loss=0.2253, pruned_loss=0.02843, over 13433.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2722, pruned_loss=0.05045, over 578233.55 frames. ], batch size: 32, lr: 7.01e-03, grad_scale: 32.0 +2024-08-03 20:49:06,684 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.26 vs. limit=10.0 +2024-08-03 20:49:22,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=227245.33333333334, ans=0.0 +2024-08-03 20:49:26,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=227245.33333333334, ans=0.0 +2024-08-03 20:49:27,387 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.28 vs. limit=15.0 +2024-08-03 20:49:31,408 INFO [train.py:1114] (1/4) Epoch 18, batch 100, loss[loss=0.1798, simple_loss=0.2638, pruned_loss=0.0479, over 13532.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2751, pruned_loss=0.05072, over 1026547.84 frames. ], batch size: 35, lr: 7.00e-03, grad_scale: 32.0 +2024-08-03 20:49:34,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=227282.0, ans=0.125 +2024-08-03 20:49:40,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=227282.0, ans=0.0 +2024-08-03 20:49:42,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=227318.66666666666, ans=0.0 +2024-08-03 20:49:57,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=227355.33333333334, ans=0.0 +2024-08-03 20:50:16,427 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.987e+01 1.102e+02 1.290e+02 1.676e+02 3.343e+02, threshold=2.579e+02, percent-clipped=6.0 +2024-08-03 20:50:19,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=227428.66666666666, ans=0.0 +2024-08-03 20:50:20,795 INFO [train.py:1114] (1/4) Epoch 18, batch 150, loss[loss=0.1485, simple_loss=0.2363, pruned_loss=0.03033, over 13412.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2729, pruned_loss=0.04972, over 1387669.78 frames. ], batch size: 32, lr: 7.00e-03, grad_scale: 32.0 +2024-08-03 20:50:28,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=227465.33333333334, ans=0.125 +2024-08-03 20:50:36,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=227502.0, ans=0.125 +2024-08-03 20:50:38,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=227502.0, ans=0.0 +2024-08-03 20:50:43,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=227538.66666666666, ans=0.125 +2024-08-03 20:50:44,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=227538.66666666666, ans=0.09899494936611666 +2024-08-03 20:50:49,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227575.33333333334, ans=0.1 +2024-08-03 20:51:07,692 INFO [train.py:1114] (1/4) Epoch 18, batch 200, loss[loss=0.177, simple_loss=0.2708, pruned_loss=0.0416, over 12405.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2724, pruned_loss=0.04955, over 1666640.61 frames. ], batch size: 58, lr: 7.00e-03, grad_scale: 32.0 +2024-08-03 20:51:13,366 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.68 vs. limit=15.0 +2024-08-03 20:51:24,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227722.0, ans=0.1 +2024-08-03 20:51:34,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=227758.66666666666, ans=0.2 +2024-08-03 20:51:37,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=227758.66666666666, ans=0.125 +2024-08-03 20:51:42,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=227795.33333333334, ans=0.125 +2024-08-03 20:51:43,302 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.36 vs. limit=15.0 +2024-08-03 20:51:46,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=227795.33333333334, ans=10.0 +2024-08-03 20:51:48,063 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.522e+01 1.097e+02 1.244e+02 1.547e+02 2.709e+02, threshold=2.488e+02, percent-clipped=2.0 +2024-08-03 20:51:52,894 INFO [train.py:1114] (1/4) Epoch 18, batch 250, loss[loss=0.2088, simple_loss=0.2869, pruned_loss=0.06532, over 13327.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2716, pruned_loss=0.0493, over 1885196.16 frames. ], batch size: 46, lr: 7.00e-03, grad_scale: 32.0 +2024-08-03 20:51:55,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=227832.0, ans=0.2 +2024-08-03 20:52:02,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=227868.66666666666, ans=0.0 +2024-08-03 20:52:08,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=227868.66666666666, ans=0.125 +2024-08-03 20:52:30,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=227978.66666666666, ans=0.0 +2024-08-03 20:52:37,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=228015.33333333334, ans=0.0 +2024-08-03 20:52:38,226 INFO [train.py:1114] (1/4) Epoch 18, batch 300, loss[loss=0.1996, simple_loss=0.2832, pruned_loss=0.05796, over 13458.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2714, pruned_loss=0.04949, over 2051936.87 frames. ], batch size: 42, lr: 6.99e-03, grad_scale: 32.0 +2024-08-03 20:53:03,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=228088.66666666666, ans=0.0 +2024-08-03 20:53:04,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=228125.33333333334, ans=0.125 +2024-08-03 20:53:14,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=228125.33333333334, ans=0.125 +2024-08-03 20:53:22,561 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.234e+01 1.098e+02 1.173e+02 1.552e+02 2.222e+02, threshold=2.347e+02, percent-clipped=0.0 +2024-08-03 20:53:27,233 INFO [train.py:1114] (1/4) Epoch 18, batch 350, loss[loss=0.1471, simple_loss=0.2322, pruned_loss=0.03103, over 13579.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2714, pruned_loss=0.04945, over 2182536.41 frames. ], batch size: 33, lr: 6.99e-03, grad_scale: 32.0 +2024-08-03 20:53:59,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=228308.66666666666, ans=0.0 +2024-08-03 20:54:01,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=228308.66666666666, ans=15.0 +2024-08-03 20:54:10,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=228345.33333333334, ans=0.125 +2024-08-03 20:54:13,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=228345.33333333334, ans=0.0 +2024-08-03 20:54:16,767 INFO [train.py:1114] (1/4) Epoch 18, batch 400, loss[loss=0.1859, simple_loss=0.2777, pruned_loss=0.04707, over 13368.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.271, pruned_loss=0.04928, over 2286704.16 frames. ], batch size: 37, lr: 6.99e-03, grad_scale: 32.0 +2024-08-03 20:54:16,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=228382.0, ans=0.125 +2024-08-03 20:54:21,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=228382.0, ans=0.025 +2024-08-03 20:54:39,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=228455.33333333334, ans=0.125 +2024-08-03 20:54:44,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228492.0, ans=0.1 +2024-08-03 20:54:57,265 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.985e+01 1.132e+02 1.280e+02 1.629e+02 3.189e+02, threshold=2.560e+02, percent-clipped=4.0 +2024-08-03 20:54:58,789 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.10 vs. limit=22.5 +2024-08-03 20:55:01,715 INFO [train.py:1114] (1/4) Epoch 18, batch 450, loss[loss=0.1705, simple_loss=0.2616, pruned_loss=0.03965, over 13554.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2712, pruned_loss=0.04917, over 2360782.66 frames. ], batch size: 38, lr: 6.98e-03, grad_scale: 32.0 +2024-08-03 20:55:21,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=228638.66666666666, ans=0.0 +2024-08-03 20:55:22,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=228638.66666666666, ans=10.0 +2024-08-03 20:55:28,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=228675.33333333334, ans=0.2 +2024-08-03 20:55:29,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=228675.33333333334, ans=0.125 +2024-08-03 20:55:35,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=228675.33333333334, ans=0.0 +2024-08-03 20:55:46,659 INFO [train.py:1114] (1/4) Epoch 18, batch 500, loss[loss=0.2011, simple_loss=0.2879, pruned_loss=0.05718, over 13410.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2699, pruned_loss=0.04862, over 2426255.52 frames. ], batch size: 43, lr: 6.98e-03, grad_scale: 32.0 +2024-08-03 20:55:56,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=228785.33333333334, ans=0.0 +2024-08-03 20:55:57,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=228785.33333333334, ans=0.125 +2024-08-03 20:56:06,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=228822.0, ans=0.125 +2024-08-03 20:56:10,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228822.0, ans=0.1 +2024-08-03 20:56:11,233 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.48 vs. limit=15.0 +2024-08-03 20:56:18,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=228858.66666666666, ans=0.2 +2024-08-03 20:56:27,948 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.312e+01 1.074e+02 1.245e+02 1.559e+02 2.675e+02, threshold=2.490e+02, percent-clipped=1.0 +2024-08-03 20:56:28,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=228895.33333333334, ans=0.09899494936611666 +2024-08-03 20:56:30,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=228895.33333333334, ans=0.125 +2024-08-03 20:56:32,408 INFO [train.py:1114] (1/4) Epoch 18, batch 550, loss[loss=0.1978, simple_loss=0.282, pruned_loss=0.05679, over 13036.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2699, pruned_loss=0.04852, over 2468733.80 frames. ], batch size: 48, lr: 6.98e-03, grad_scale: 32.0 +2024-08-03 20:56:43,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=228968.66666666666, ans=0.2 +2024-08-03 20:56:59,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229005.33333333334, ans=0.1 +2024-08-03 20:57:00,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=229005.33333333334, ans=0.0 +2024-08-03 20:57:23,267 INFO [train.py:1114] (1/4) Epoch 18, batch 600, loss[loss=0.1901, simple_loss=0.2823, pruned_loss=0.04901, over 13272.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2702, pruned_loss=0.04842, over 2507702.34 frames. ], batch size: 46, lr: 6.98e-03, grad_scale: 16.0 +2024-08-03 20:57:31,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=229115.33333333334, ans=0.0 +2024-08-03 20:57:33,737 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.12 vs. limit=15.0 +2024-08-03 20:57:37,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.50 vs. limit=15.0 +2024-08-03 20:57:41,594 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.48 vs. limit=22.5 +2024-08-03 20:57:53,161 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.96 vs. limit=15.0 +2024-08-03 20:57:59,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=229225.33333333334, ans=0.025 +2024-08-03 20:58:00,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=229262.0, ans=0.025 +2024-08-03 20:58:00,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=229262.0, ans=0.125 +2024-08-03 20:58:05,825 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.952e+01 1.114e+02 1.293e+02 1.855e+02 3.099e+02, threshold=2.585e+02, percent-clipped=2.0 +2024-08-03 20:58:09,360 INFO [train.py:1114] (1/4) Epoch 18, batch 650, loss[loss=0.1741, simple_loss=0.2692, pruned_loss=0.0395, over 13536.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2696, pruned_loss=0.0482, over 2543195.33 frames. ], batch size: 37, lr: 6.97e-03, grad_scale: 16.0 +2024-08-03 20:58:12,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=229298.66666666666, ans=0.09899494936611666 +2024-08-03 20:58:15,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=229298.66666666666, ans=0.125 +2024-08-03 20:58:23,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=229335.33333333334, ans=0.125 +2024-08-03 20:58:27,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=229372.0, ans=0.2 +2024-08-03 20:58:30,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=229372.0, ans=0.04949747468305833 +2024-08-03 20:58:34,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=229372.0, ans=0.025 +2024-08-03 20:58:42,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=229408.66666666666, ans=0.125 +2024-08-03 20:58:42,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.35 vs. limit=15.0 +2024-08-03 20:58:42,714 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.33 vs. limit=15.0 +2024-08-03 20:58:46,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=229445.33333333334, ans=0.125 +2024-08-03 20:58:49,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=229445.33333333334, ans=0.05 +2024-08-03 20:58:50,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=229445.33333333334, ans=0.125 +2024-08-03 20:58:54,984 INFO [train.py:1114] (1/4) Epoch 18, batch 700, loss[loss=0.1548, simple_loss=0.2359, pruned_loss=0.03685, over 13537.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.27, pruned_loss=0.04849, over 2565137.54 frames. ], batch size: 35, lr: 6.97e-03, grad_scale: 16.0 +2024-08-03 20:59:00,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=229482.0, ans=0.0 +2024-08-03 20:59:41,522 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.312e+01 1.096e+02 1.191e+02 1.436e+02 2.621e+02, threshold=2.382e+02, percent-clipped=1.0 +2024-08-03 20:59:45,180 INFO [train.py:1114] (1/4) Epoch 18, batch 750, loss[loss=0.1922, simple_loss=0.2831, pruned_loss=0.05063, over 13359.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2695, pruned_loss=0.04843, over 2582002.15 frames. ], batch size: 37, lr: 6.97e-03, grad_scale: 16.0 +2024-08-03 20:59:49,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=229665.33333333334, ans=0.2 +2024-08-03 21:00:18,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=229775.33333333334, ans=0.0 +2024-08-03 21:00:21,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=229812.0, ans=0.0 +2024-08-03 21:00:22,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=229812.0, ans=0.2 +2024-08-03 21:00:23,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=229812.0, ans=0.125 +2024-08-03 21:00:30,238 INFO [train.py:1114] (1/4) Epoch 18, batch 800, loss[loss=0.1705, simple_loss=0.2552, pruned_loss=0.04294, over 13336.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2697, pruned_loss=0.04869, over 2596635.46 frames. ], batch size: 33, lr: 6.96e-03, grad_scale: 32.0 +2024-08-03 21:00:37,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=229848.66666666666, ans=0.025 +2024-08-03 21:00:48,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=229848.66666666666, ans=0.0 +2024-08-03 21:01:09,212 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.95 vs. limit=15.0 +2024-08-03 21:01:26,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=229995.33333333334, ans=0.0 +2024-08-03 21:01:27,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=229995.33333333334, ans=0.025 +2024-08-03 21:01:28,072 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.223e+01 1.040e+02 1.288e+02 1.609e+02 2.437e+02, threshold=2.577e+02, percent-clipped=2.0 +2024-08-03 21:01:30,765 INFO [train.py:1114] (1/4) Epoch 18, batch 850, loss[loss=0.183, simple_loss=0.2755, pruned_loss=0.04523, over 13327.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2701, pruned_loss=0.04878, over 2609697.25 frames. ], batch size: 40, lr: 6.96e-03, grad_scale: 16.0 +2024-08-03 21:01:35,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=230032.0, ans=0.2 +2024-08-03 21:01:35,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=230032.0, ans=0.0 +2024-08-03 21:02:10,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=230178.66666666666, ans=0.1 +2024-08-03 21:02:11,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=230178.66666666666, ans=0.0 +2024-08-03 21:02:15,876 INFO [train.py:1114] (1/4) Epoch 18, batch 900, loss[loss=0.1613, simple_loss=0.2437, pruned_loss=0.03946, over 13333.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2708, pruned_loss=0.04898, over 2612129.09 frames. ], batch size: 33, lr: 6.96e-03, grad_scale: 16.0 +2024-08-03 21:02:18,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=230215.33333333334, ans=0.125 +2024-08-03 21:02:37,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=230288.66666666666, ans=0.025 +2024-08-03 21:02:49,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=230325.33333333334, ans=0.05 +2024-08-03 21:02:50,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=230325.33333333334, ans=0.125 +2024-08-03 21:02:59,119 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.753e+01 1.118e+02 1.313e+02 1.560e+02 2.225e+02, threshold=2.625e+02, percent-clipped=0.0 +2024-08-03 21:03:02,040 INFO [train.py:1114] (1/4) Epoch 18, batch 950, loss[loss=0.1513, simple_loss=0.239, pruned_loss=0.03181, over 13544.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2708, pruned_loss=0.0487, over 2613888.69 frames. ], batch size: 34, lr: 6.96e-03, grad_scale: 16.0 +2024-08-03 21:03:05,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=230398.66666666666, ans=0.125 +2024-08-03 21:03:48,150 INFO [train.py:1114] (1/4) Epoch 18, batch 1000, loss[loss=0.2001, simple_loss=0.2833, pruned_loss=0.05842, over 13376.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2719, pruned_loss=0.04955, over 2612333.72 frames. ], batch size: 35, lr: 6.95e-03, grad_scale: 16.0 +2024-08-03 21:03:53,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=230582.0, ans=0.05 +2024-08-03 21:03:55,836 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:03:58,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=230618.66666666666, ans=0.125 +2024-08-03 21:04:18,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=230655.33333333334, ans=0.125 +2024-08-03 21:04:27,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=230692.0, ans=0.125 +2024-08-03 21:04:34,935 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.346e+01 1.082e+02 1.211e+02 1.465e+02 2.308e+02, threshold=2.421e+02, percent-clipped=0.0 +2024-08-03 21:04:39,858 INFO [train.py:1114] (1/4) Epoch 18, batch 1050, loss[loss=0.1907, simple_loss=0.2908, pruned_loss=0.04525, over 13568.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2718, pruned_loss=0.0497, over 2616525.28 frames. ], batch size: 39, lr: 6.95e-03, grad_scale: 16.0 +2024-08-03 21:04:43,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=230765.33333333334, ans=0.2 +2024-08-03 21:04:43,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230765.33333333334, ans=0.1 +2024-08-03 21:04:48,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=230802.0, ans=0.2 +2024-08-03 21:04:48,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=230802.0, ans=0.5 +2024-08-03 21:04:56,501 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.63 vs. limit=15.0 +2024-08-03 21:05:01,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=230838.66666666666, ans=0.0 +2024-08-03 21:05:14,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=230875.33333333334, ans=0.0 +2024-08-03 21:05:16,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230912.0, ans=0.1 +2024-08-03 21:05:23,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=230912.0, ans=0.125 +2024-08-03 21:05:26,815 INFO [train.py:1114] (1/4) Epoch 18, batch 1100, loss[loss=0.1654, simple_loss=0.2539, pruned_loss=0.03846, over 13562.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2715, pruned_loss=0.04952, over 2619799.14 frames. ], batch size: 36, lr: 6.95e-03, grad_scale: 16.0 +2024-08-03 21:06:05,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=231095.33333333334, ans=0.2 +2024-08-03 21:06:09,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=231095.33333333334, ans=0.0 +2024-08-03 21:06:09,958 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.017e+01 1.089e+02 1.225e+02 1.560e+02 2.576e+02, threshold=2.450e+02, percent-clipped=1.0 +2024-08-03 21:06:12,616 INFO [train.py:1114] (1/4) Epoch 18, batch 1150, loss[loss=0.1771, simple_loss=0.2618, pruned_loss=0.04622, over 13552.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.272, pruned_loss=0.04967, over 2619187.43 frames. ], batch size: 36, lr: 6.95e-03, grad_scale: 16.0 +2024-08-03 21:06:32,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=231205.33333333334, ans=0.2 +2024-08-03 21:06:37,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231205.33333333334, ans=0.1 +2024-08-03 21:06:40,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=231242.0, ans=0.125 +2024-08-03 21:06:42,507 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.89 vs. limit=15.0 +2024-08-03 21:06:44,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=231242.0, ans=0.125 +2024-08-03 21:06:44,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=231242.0, ans=0.0 +2024-08-03 21:06:46,630 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:06:51,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=231278.66666666666, ans=0.035 +2024-08-03 21:06:58,352 INFO [train.py:1114] (1/4) Epoch 18, batch 1200, loss[loss=0.1767, simple_loss=0.2754, pruned_loss=0.03904, over 13563.00 frames. ], tot_loss[loss=0.185, simple_loss=0.272, pruned_loss=0.049, over 2616761.33 frames. ], batch size: 39, lr: 6.94e-03, grad_scale: 32.0 +2024-08-03 21:07:15,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=231388.66666666666, ans=0.125 +2024-08-03 21:07:19,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=231388.66666666666, ans=0.125 +2024-08-03 21:07:21,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=231388.66666666666, ans=0.125 +2024-08-03 21:07:23,860 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.60 vs. limit=15.0 +2024-08-03 21:07:30,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=231425.33333333334, ans=0.125 +2024-08-03 21:07:35,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=231462.0, ans=0.0 +2024-08-03 21:07:35,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=231462.0, ans=0.125 +2024-08-03 21:07:40,620 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.911e+01 1.076e+02 1.246e+02 1.591e+02 2.283e+02, threshold=2.493e+02, percent-clipped=0.0 +2024-08-03 21:07:41,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=231462.0, ans=0.125 +2024-08-03 21:07:43,362 INFO [train.py:1114] (1/4) Epoch 18, batch 1250, loss[loss=0.2159, simple_loss=0.3046, pruned_loss=0.06356, over 13428.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.273, pruned_loss=0.04974, over 2628923.32 frames. ], batch size: 42, lr: 6.94e-03, grad_scale: 32.0 +2024-08-03 21:07:55,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=231535.33333333334, ans=0.125 +2024-08-03 21:07:58,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231535.33333333334, ans=0.1 +2024-08-03 21:07:59,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=231535.33333333334, ans=0.2 +2024-08-03 21:08:04,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=231572.0, ans=0.0 +2024-08-03 21:08:15,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=231608.66666666666, ans=0.0 +2024-08-03 21:08:16,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=231608.66666666666, ans=0.015 +2024-08-03 21:08:28,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=231645.33333333334, ans=0.025 +2024-08-03 21:08:36,235 INFO [train.py:1114] (1/4) Epoch 18, batch 1300, loss[loss=0.1882, simple_loss=0.2804, pruned_loss=0.04797, over 12823.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.272, pruned_loss=0.04925, over 2631986.09 frames. ], batch size: 52, lr: 6.94e-03, grad_scale: 32.0 +2024-08-03 21:08:36,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=231682.0, ans=0.2 +2024-08-03 21:08:38,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231682.0, ans=0.1 +2024-08-03 21:08:46,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.11 vs. limit=15.0 +2024-08-03 21:09:14,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231828.66666666666, ans=0.1 +2024-08-03 21:09:17,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=231828.66666666666, ans=0.125 +2024-08-03 21:09:18,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=231828.66666666666, ans=0.025 +2024-08-03 21:09:18,786 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.05 vs. limit=15.0 +2024-08-03 21:09:19,121 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.689e+01 1.086e+02 1.232e+02 1.465e+02 2.299e+02, threshold=2.463e+02, percent-clipped=0.0 +2024-08-03 21:09:21,776 INFO [train.py:1114] (1/4) Epoch 18, batch 1350, loss[loss=0.1775, simple_loss=0.2654, pruned_loss=0.0448, over 13541.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2724, pruned_loss=0.04972, over 2639197.25 frames. ], batch size: 37, lr: 6.93e-03, grad_scale: 32.0 +2024-08-03 21:09:23,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=231865.33333333334, ans=0.125 +2024-08-03 21:10:07,017 INFO [train.py:1114] (1/4) Epoch 18, batch 1400, loss[loss=0.1688, simple_loss=0.2511, pruned_loss=0.0433, over 13233.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2716, pruned_loss=0.04927, over 2642862.27 frames. ], batch size: 31, lr: 6.93e-03, grad_scale: 32.0 +2024-08-03 21:10:07,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=232048.66666666666, ans=0.125 +2024-08-03 21:10:16,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=232085.33333333334, ans=0.025 +2024-08-03 21:10:28,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=232122.0, ans=0.125 +2024-08-03 21:10:49,664 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.779e+01 1.159e+02 1.340e+02 1.703e+02 2.344e+02, threshold=2.679e+02, percent-clipped=0.0 +2024-08-03 21:10:52,347 INFO [train.py:1114] (1/4) Epoch 18, batch 1450, loss[loss=0.2047, simple_loss=0.2905, pruned_loss=0.05946, over 13435.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2722, pruned_loss=0.04958, over 2642176.32 frames. ], batch size: 43, lr: 6.93e-03, grad_scale: 32.0 +2024-08-03 21:10:52,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=232232.0, ans=0.025 +2024-08-03 21:11:08,960 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.32 vs. limit=22.5 +2024-08-03 21:11:11,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=232305.33333333334, ans=0.125 +2024-08-03 21:11:20,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=232342.0, ans=0.025 +2024-08-03 21:11:23,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=232342.0, ans=0.125 +2024-08-03 21:11:28,695 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.93 vs. limit=6.0 +2024-08-03 21:11:40,681 INFO [train.py:1114] (1/4) Epoch 18, batch 1500, loss[loss=0.181, simple_loss=0.2754, pruned_loss=0.04337, over 13389.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2717, pruned_loss=0.04882, over 2641510.50 frames. ], batch size: 39, lr: 6.93e-03, grad_scale: 16.0 +2024-08-03 21:11:48,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=232415.33333333334, ans=0.125 +2024-08-03 21:11:52,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=232452.0, ans=0.0 +2024-08-03 21:11:59,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=232452.0, ans=0.0 +2024-08-03 21:12:05,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=232488.66666666666, ans=0.125 +2024-08-03 21:12:28,167 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.948e+01 1.098e+02 1.405e+02 1.700e+02 3.079e+02, threshold=2.810e+02, percent-clipped=1.0 +2024-08-03 21:12:29,961 INFO [train.py:1114] (1/4) Epoch 18, batch 1550, loss[loss=0.2264, simple_loss=0.309, pruned_loss=0.0719, over 13404.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2714, pruned_loss=0.0488, over 2630970.18 frames. ], batch size: 41, lr: 6.92e-03, grad_scale: 16.0 +2024-08-03 21:12:49,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=232672.0, ans=0.125 +2024-08-03 21:12:59,552 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.14 vs. limit=15.0 +2024-08-03 21:13:05,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=232745.33333333334, ans=0.125 +2024-08-03 21:13:06,696 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.56 vs. limit=22.5 +2024-08-03 21:13:15,286 INFO [train.py:1114] (1/4) Epoch 18, batch 1600, loss[loss=0.1885, simple_loss=0.287, pruned_loss=0.04498, over 13590.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.271, pruned_loss=0.04881, over 2624103.68 frames. ], batch size: 39, lr: 6.92e-03, grad_scale: 32.0 +2024-08-03 21:13:22,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=232782.0, ans=0.025 +2024-08-03 21:13:24,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=232818.66666666666, ans=0.1 +2024-08-03 21:13:30,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=232818.66666666666, ans=0.0 +2024-08-03 21:13:34,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=232855.33333333334, ans=0.0 +2024-08-03 21:13:48,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=232892.0, ans=0.125 +2024-08-03 21:13:49,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=232892.0, ans=0.125 +2024-08-03 21:13:52,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=232892.0, ans=0.0 +2024-08-03 21:14:01,387 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.392e+01 1.109e+02 1.247e+02 1.559e+02 3.003e+02, threshold=2.495e+02, percent-clipped=1.0 +2024-08-03 21:14:03,138 INFO [train.py:1114] (1/4) Epoch 18, batch 1650, loss[loss=0.1934, simple_loss=0.2891, pruned_loss=0.04886, over 13315.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2705, pruned_loss=0.04869, over 2621761.16 frames. ], batch size: 40, lr: 6.92e-03, grad_scale: 32.0 +2024-08-03 21:14:13,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=233002.0, ans=0.04949747468305833 +2024-08-03 21:14:29,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=233075.33333333334, ans=0.2 +2024-08-03 21:14:48,608 INFO [train.py:1114] (1/4) Epoch 18, batch 1700, loss[loss=0.1864, simple_loss=0.2583, pruned_loss=0.05722, over 13257.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2703, pruned_loss=0.04863, over 2630543.33 frames. ], batch size: 31, lr: 6.92e-03, grad_scale: 32.0 +2024-08-03 21:15:03,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=233185.33333333334, ans=0.1 +2024-08-03 21:15:17,275 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.65 vs. limit=10.0 +2024-08-03 21:15:21,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=233258.66666666666, ans=0.125 +2024-08-03 21:15:27,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=233258.66666666666, ans=0.0 +2024-08-03 21:15:35,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233295.33333333334, ans=0.1 +2024-08-03 21:15:41,602 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.564e+01 1.087e+02 1.250e+02 1.627e+02 3.379e+02, threshold=2.500e+02, percent-clipped=6.0 +2024-08-03 21:15:42,574 INFO [train.py:1114] (1/4) Epoch 18, batch 1750, loss[loss=0.1717, simple_loss=0.2499, pruned_loss=0.04677, over 13547.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2706, pruned_loss=0.04881, over 2633823.70 frames. ], batch size: 31, lr: 6.91e-03, grad_scale: 16.0 +2024-08-03 21:15:59,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=233368.66666666666, ans=0.125 +2024-08-03 21:16:06,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=233405.33333333334, ans=0.125 +2024-08-03 21:16:25,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=233478.66666666666, ans=0.125 +2024-08-03 21:16:26,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=233478.66666666666, ans=0.125 +2024-08-03 21:16:27,997 INFO [train.py:1114] (1/4) Epoch 18, batch 1800, loss[loss=0.2205, simple_loss=0.302, pruned_loss=0.06952, over 13558.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2708, pruned_loss=0.04929, over 2634250.19 frames. ], batch size: 38, lr: 6.91e-03, grad_scale: 16.0 +2024-08-03 21:16:34,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=233515.33333333334, ans=0.0 +2024-08-03 21:16:46,748 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.58 vs. limit=15.0 +2024-08-03 21:17:01,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=233625.33333333334, ans=0.125 +2024-08-03 21:17:02,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=233625.33333333334, ans=0.1 +2024-08-03 21:17:12,815 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.303e+01 1.188e+02 1.406e+02 1.892e+02 3.223e+02, threshold=2.812e+02, percent-clipped=8.0 +2024-08-03 21:17:13,751 INFO [train.py:1114] (1/4) Epoch 18, batch 1850, loss[loss=0.1987, simple_loss=0.2897, pruned_loss=0.05383, over 13406.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.271, pruned_loss=0.04905, over 2637308.10 frames. ], batch size: 39, lr: 6.91e-03, grad_scale: 16.0 +2024-08-03 21:17:39,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=233772.0, ans=0.125 +2024-08-03 21:17:45,635 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.53 vs. limit=15.0 +2024-08-03 21:17:56,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=233845.33333333334, ans=0.125 +2024-08-03 21:17:58,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=233882.0, ans=0.125 +2024-08-03 21:17:59,005 INFO [train.py:1114] (1/4) Epoch 18, batch 1900, loss[loss=0.194, simple_loss=0.2788, pruned_loss=0.0546, over 13314.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2716, pruned_loss=0.04945, over 2640501.74 frames. ], batch size: 40, lr: 6.91e-03, grad_scale: 16.0 +2024-08-03 21:18:01,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=233882.0, ans=0.04949747468305833 +2024-08-03 21:18:21,536 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.14 vs. limit=15.0 +2024-08-03 21:18:30,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233992.0, ans=0.1 +2024-08-03 21:18:32,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=233992.0, ans=0.1 +2024-08-03 21:18:48,032 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.661e+01 1.108e+02 1.336e+02 1.650e+02 2.713e+02, threshold=2.672e+02, percent-clipped=0.0 +2024-08-03 21:18:48,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=234065.33333333334, ans=0.2 +2024-08-03 21:18:48,976 INFO [train.py:1114] (1/4) Epoch 18, batch 1950, loss[loss=0.1809, simple_loss=0.2623, pruned_loss=0.04975, over 13549.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2725, pruned_loss=0.04955, over 2647093.67 frames. ], batch size: 36, lr: 6.90e-03, grad_scale: 16.0 +2024-08-03 21:18:52,304 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.89 vs. limit=15.0 +2024-08-03 21:19:06,088 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:19:06,979 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:19:19,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=234138.66666666666, ans=10.0 +2024-08-03 21:19:27,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=234175.33333333334, ans=0.125 +2024-08-03 21:19:27,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=234175.33333333334, ans=0.5 +2024-08-03 21:19:35,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=234212.0, ans=0.025 +2024-08-03 21:19:43,527 INFO [train.py:1114] (1/4) Epoch 18, batch 2000, loss[loss=0.168, simple_loss=0.2485, pruned_loss=0.04377, over 13531.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2733, pruned_loss=0.05001, over 2636313.09 frames. ], batch size: 31, lr: 6.90e-03, grad_scale: 32.0 +2024-08-03 21:19:52,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=234285.33333333334, ans=0.0 +2024-08-03 21:20:19,374 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.03 vs. limit=12.0 +2024-08-03 21:20:23,927 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.03 vs. limit=15.0 +2024-08-03 21:20:28,746 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.785e+01 1.141e+02 1.284e+02 1.591e+02 2.253e+02, threshold=2.569e+02, percent-clipped=0.0 +2024-08-03 21:20:29,731 INFO [train.py:1114] (1/4) Epoch 18, batch 2050, loss[loss=0.1531, simple_loss=0.2336, pruned_loss=0.03634, over 13442.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2721, pruned_loss=0.04958, over 2633632.32 frames. ], batch size: 32, lr: 6.90e-03, grad_scale: 32.0 +2024-08-03 21:20:32,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=234432.0, ans=0.0 +2024-08-03 21:20:40,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=234468.66666666666, ans=0.125 +2024-08-03 21:20:58,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=234542.0, ans=0.125 +2024-08-03 21:20:59,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=234542.0, ans=0.025 +2024-08-03 21:21:14,907 INFO [train.py:1114] (1/4) Epoch 18, batch 2100, loss[loss=0.1829, simple_loss=0.2783, pruned_loss=0.04376, over 13543.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2714, pruned_loss=0.04926, over 2638793.59 frames. ], batch size: 37, lr: 6.89e-03, grad_scale: 32.0 +2024-08-03 21:21:18,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=234615.33333333334, ans=0.125 +2024-08-03 21:21:30,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=234652.0, ans=0.0 +2024-08-03 21:21:31,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234652.0, ans=0.1 +2024-08-03 21:21:33,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=234652.0, ans=0.0 +2024-08-03 21:21:35,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=234688.66666666666, ans=0.025 +2024-08-03 21:21:42,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=234688.66666666666, ans=0.125 +2024-08-03 21:21:44,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=234725.33333333334, ans=0.2 +2024-08-03 21:21:52,602 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:22:01,490 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.837e+01 1.081e+02 1.198e+02 1.435e+02 3.177e+02, threshold=2.396e+02, percent-clipped=1.0 +2024-08-03 21:22:01,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=234798.66666666666, ans=0.0 +2024-08-03 21:22:02,383 INFO [train.py:1114] (1/4) Epoch 18, batch 2150, loss[loss=0.1489, simple_loss=0.2447, pruned_loss=0.02653, over 13556.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2702, pruned_loss=0.04864, over 2647418.31 frames. ], batch size: 36, lr: 6.89e-03, grad_scale: 32.0 +2024-08-03 21:22:23,854 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.66 vs. limit=10.0 +2024-08-03 21:22:24,643 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=15.0 +2024-08-03 21:22:34,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=234908.66666666666, ans=0.0 +2024-08-03 21:22:35,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=234908.66666666666, ans=0.0 +2024-08-03 21:22:38,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234908.66666666666, ans=0.1 +2024-08-03 21:22:50,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=234945.33333333334, ans=0.95 +2024-08-03 21:22:51,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=234945.33333333334, ans=0.125 +2024-08-03 21:22:54,821 INFO [train.py:1114] (1/4) Epoch 18, batch 2200, loss[loss=0.1788, simple_loss=0.2712, pruned_loss=0.04317, over 13408.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2698, pruned_loss=0.0481, over 2645174.92 frames. ], batch size: 39, lr: 6.89e-03, grad_scale: 16.0 +2024-08-03 21:23:13,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=235055.33333333334, ans=0.0 +2024-08-03 21:23:23,589 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.84 vs. limit=15.0 +2024-08-03 21:23:40,427 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.106e+01 1.127e+02 1.422e+02 1.837e+02 2.804e+02, threshold=2.844e+02, percent-clipped=6.0 +2024-08-03 21:23:40,464 INFO [train.py:1114] (1/4) Epoch 18, batch 2250, loss[loss=0.1991, simple_loss=0.2862, pruned_loss=0.05598, over 13353.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2699, pruned_loss=0.04808, over 2642460.09 frames. ], batch size: 37, lr: 6.89e-03, grad_scale: 16.0 +2024-08-03 21:23:51,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=235202.0, ans=0.025 +2024-08-03 21:23:52,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.13 vs. limit=15.0 +2024-08-03 21:23:58,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=235238.66666666666, ans=0.2 +2024-08-03 21:24:01,642 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.48 vs. limit=15.0 +2024-08-03 21:24:25,615 INFO [train.py:1114] (1/4) Epoch 18, batch 2300, loss[loss=0.1802, simple_loss=0.2557, pruned_loss=0.05237, over 13596.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2686, pruned_loss=0.04801, over 2639102.76 frames. ], batch size: 33, lr: 6.88e-03, grad_scale: 16.0 +2024-08-03 21:24:42,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=235385.33333333334, ans=0.0 +2024-08-03 21:24:57,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=235458.66666666666, ans=0.95 +2024-08-03 21:24:58,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.27 vs. limit=15.0 +2024-08-03 21:25:04,149 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.96 vs. limit=6.0 +2024-08-03 21:25:06,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=235495.33333333334, ans=0.125 +2024-08-03 21:25:10,864 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.665e+01 1.085e+02 1.233e+02 1.641e+02 2.605e+02, threshold=2.466e+02, percent-clipped=0.0 +2024-08-03 21:25:10,901 INFO [train.py:1114] (1/4) Epoch 18, batch 2350, loss[loss=0.1898, simple_loss=0.2821, pruned_loss=0.04876, over 13552.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2682, pruned_loss=0.04782, over 2641031.82 frames. ], batch size: 38, lr: 6.88e-03, grad_scale: 16.0 +2024-08-03 21:25:14,226 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.61 vs. limit=22.5 +2024-08-03 21:25:29,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=235605.33333333334, ans=0.07 +2024-08-03 21:25:34,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=235605.33333333334, ans=0.125 +2024-08-03 21:25:47,223 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:25:57,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=235715.33333333334, ans=0.2 +2024-08-03 21:25:57,726 INFO [train.py:1114] (1/4) Epoch 18, batch 2400, loss[loss=0.1684, simple_loss=0.2636, pruned_loss=0.03664, over 13540.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2691, pruned_loss=0.04816, over 2642420.88 frames. ], batch size: 35, lr: 6.88e-03, grad_scale: 32.0 +2024-08-03 21:26:08,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=235752.0, ans=0.0 +2024-08-03 21:26:08,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=235752.0, ans=0.125 +2024-08-03 21:26:08,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=235752.0, ans=0.0 +2024-08-03 21:26:20,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=235752.0, ans=0.125 +2024-08-03 21:26:25,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=235788.66666666666, ans=0.125 +2024-08-03 21:26:38,459 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.53 vs. limit=22.5 +2024-08-03 21:26:38,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.36 vs. limit=12.0 +2024-08-03 21:26:41,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=235862.0, ans=0.125 +2024-08-03 21:26:48,965 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.162e+01 1.095e+02 1.213e+02 1.558e+02 2.561e+02, threshold=2.426e+02, percent-clipped=1.0 +2024-08-03 21:26:49,003 INFO [train.py:1114] (1/4) Epoch 18, batch 2450, loss[loss=0.2034, simple_loss=0.2926, pruned_loss=0.0571, over 13351.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.27, pruned_loss=0.04854, over 2632252.47 frames. ], batch size: 37, lr: 6.88e-03, grad_scale: 32.0 +2024-08-03 21:26:50,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=235898.66666666666, ans=0.125 +2024-08-03 21:26:56,105 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.20 vs. limit=15.0 +2024-08-03 21:27:34,609 INFO [train.py:1114] (1/4) Epoch 18, batch 2500, loss[loss=0.2065, simple_loss=0.2975, pruned_loss=0.05774, over 13407.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2705, pruned_loss=0.04865, over 2636680.82 frames. ], batch size: 39, lr: 6.87e-03, grad_scale: 32.0 +2024-08-03 21:27:35,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=236082.0, ans=0.125 +2024-08-03 21:28:17,905 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=6.63 vs. limit=15.0 +2024-08-03 21:28:19,323 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.251e+01 1.154e+02 1.291e+02 1.813e+02 3.422e+02, threshold=2.583e+02, percent-clipped=8.0 +2024-08-03 21:28:19,360 INFO [train.py:1114] (1/4) Epoch 18, batch 2550, loss[loss=0.1535, simple_loss=0.2336, pruned_loss=0.03668, over 13535.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.27, pruned_loss=0.04832, over 2638188.65 frames. ], batch size: 31, lr: 6.87e-03, grad_scale: 32.0 +2024-08-03 21:28:22,370 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=7.30 vs. limit=15.0 +2024-08-03 21:28:30,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=236302.0, ans=0.125 +2024-08-03 21:28:31,872 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.86 vs. limit=15.0 +2024-08-03 21:28:33,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=236302.0, ans=0.1 +2024-08-03 21:28:34,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=236302.0, ans=0.1 +2024-08-03 21:28:44,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=236375.33333333334, ans=0.2 +2024-08-03 21:28:44,978 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.78 vs. limit=22.5 +2024-08-03 21:28:52,086 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.84 vs. limit=15.0 +2024-08-03 21:28:55,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=236412.0, ans=0.0 +2024-08-03 21:29:00,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=236412.0, ans=0.125 +2024-08-03 21:29:03,076 INFO [train.py:1114] (1/4) Epoch 18, batch 2600, loss[loss=0.1656, simple_loss=0.2621, pruned_loss=0.03448, over 13557.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2709, pruned_loss=0.04887, over 2637568.86 frames. ], batch size: 36, lr: 6.87e-03, grad_scale: 16.0 +2024-08-03 21:29:03,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=236448.66666666666, ans=0.125 +2024-08-03 21:29:24,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=236522.0, ans=0.125 +2024-08-03 21:29:28,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=236558.66666666666, ans=0.125 +2024-08-03 21:29:41,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=236595.33333333334, ans=0.125 +2024-08-03 21:29:47,191 INFO [train.py:1114] (1/4) Epoch 18, batch 2650, loss[loss=0.1907, simple_loss=0.271, pruned_loss=0.05519, over 13349.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2712, pruned_loss=0.04906, over 2640338.66 frames. ], batch size: 46, lr: 6.87e-03, grad_scale: 16.0 +2024-08-03 21:29:47,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=236632.0, ans=0.125 +2024-08-03 21:29:48,040 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.186e+01 1.172e+02 1.315e+02 1.569e+02 3.387e+02, threshold=2.631e+02, percent-clipped=2.0 +2024-08-03 21:29:55,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236668.66666666666, ans=0.1 +2024-08-03 21:30:07,376 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.65 vs. limit=22.5 +2024-08-03 21:30:28,303 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.32 vs. limit=6.0 +2024-08-03 21:30:28,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=236778.66666666666, ans=0.125 +2024-08-03 21:30:30,354 INFO [train.py:1114] (1/4) Epoch 18, batch 2700, loss[loss=0.1958, simple_loss=0.2803, pruned_loss=0.05565, over 13543.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2712, pruned_loss=0.0491, over 2636957.63 frames. ], batch size: 40, lr: 6.86e-03, grad_scale: 16.0 +2024-08-03 21:30:31,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=236815.33333333334, ans=0.125 +2024-08-03 21:31:01,497 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.42 vs. limit=10.0 +2024-08-03 21:31:02,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=236925.33333333334, ans=0.2 +2024-08-03 21:31:13,958 INFO [train.py:1114] (1/4) Epoch 18, batch 2750, loss[loss=0.1735, simple_loss=0.2603, pruned_loss=0.04331, over 13326.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2703, pruned_loss=0.04886, over 2634548.00 frames. ], batch size: 34, lr: 6.86e-03, grad_scale: 16.0 +2024-08-03 21:31:14,723 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.742e+01 1.152e+02 1.311e+02 1.647e+02 2.709e+02, threshold=2.622e+02, percent-clipped=1.0 +2024-08-03 21:31:16,225 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.47 vs. limit=22.5 +2024-08-03 21:31:22,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=237035.33333333334, ans=0.2 +2024-08-03 21:31:22,905 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:31:37,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=237072.0, ans=0.125 +2024-08-03 21:31:38,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=237072.0, ans=0.125 +2024-08-03 21:31:40,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=237108.66666666666, ans=0.05 +2024-08-03 21:31:45,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=237108.66666666666, ans=0.125 +2024-08-03 21:31:46,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=237108.66666666666, ans=0.125 +2024-08-03 21:31:46,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=237108.66666666666, ans=0.125 +2024-08-03 21:31:48,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=237145.33333333334, ans=0.025 +2024-08-03 21:31:57,332 INFO [train.py:1114] (1/4) Epoch 18, batch 2800, loss[loss=0.246, simple_loss=0.3113, pruned_loss=0.09034, over 9064.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2704, pruned_loss=0.04887, over 2626611.49 frames. ], batch size: 96, lr: 6.86e-03, grad_scale: 16.0 +2024-08-03 21:32:41,589 INFO [train.py:1114] (1/4) Epoch 18, batch 2850, loss[loss=0.1579, simple_loss=0.2505, pruned_loss=0.03267, over 13346.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2714, pruned_loss=0.04903, over 2619606.44 frames. ], batch size: 35, lr: 6.86e-03, grad_scale: 16.0 +2024-08-03 21:32:41,721 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:32:43,240 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.041e+01 1.131e+02 1.338e+02 1.690e+02 3.058e+02, threshold=2.676e+02, percent-clipped=5.0 +2024-08-03 21:33:20,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=237512.0, ans=0.025 +2024-08-03 21:33:25,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=237512.0, ans=0.025 +2024-08-03 21:33:28,455 INFO [train.py:1114] (1/4) Epoch 18, batch 2900, loss[loss=0.174, simple_loss=0.2613, pruned_loss=0.04336, over 13365.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2722, pruned_loss=0.04896, over 2630571.12 frames. ], batch size: 36, lr: 6.85e-03, grad_scale: 16.0 +2024-08-03 21:33:37,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=237585.33333333334, ans=0.0 +2024-08-03 21:33:42,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=237585.33333333334, ans=0.025 +2024-08-03 21:33:46,893 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=17.60 vs. limit=22.5 +2024-08-03 21:33:49,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=237622.0, ans=0.2 +2024-08-03 21:33:54,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237658.66666666666, ans=0.1 +2024-08-03 21:34:03,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=237695.33333333334, ans=0.125 +2024-08-03 21:34:11,740 INFO [train.py:1114] (1/4) Epoch 18, batch 2950, loss[loss=0.1765, simple_loss=0.2618, pruned_loss=0.04564, over 13346.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2713, pruned_loss=0.04907, over 2628571.09 frames. ], batch size: 34, lr: 6.85e-03, grad_scale: 16.0 +2024-08-03 21:34:13,341 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.218e+01 1.166e+02 1.489e+02 1.763e+02 2.783e+02, threshold=2.978e+02, percent-clipped=2.0 +2024-08-03 21:34:45,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=237878.66666666666, ans=0.125 +2024-08-03 21:34:48,323 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:34:53,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=237878.66666666666, ans=0.0 +2024-08-03 21:34:54,885 INFO [train.py:1114] (1/4) Epoch 18, batch 3000, loss[loss=0.1708, simple_loss=0.2592, pruned_loss=0.04123, over 13543.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2706, pruned_loss=0.04884, over 2628932.55 frames. ], batch size: 37, lr: 6.85e-03, grad_scale: 16.0 +2024-08-03 21:34:54,886 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 21:35:00,425 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.3597, 3.9561, 3.6413, 3.4301], device='cuda:1') +2024-08-03 21:35:04,731 INFO [train.py:1146] (1/4) Epoch 18, validation: loss=0.1701, simple_loss=0.269, pruned_loss=0.03557, over 944034.00 frames. +2024-08-03 21:35:04,732 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 21:35:09,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=237915.33333333334, ans=0.125 +2024-08-03 21:35:19,154 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.35 vs. limit=12.0 +2024-08-03 21:35:22,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=237988.66666666666, ans=0.125 +2024-08-03 21:35:28,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=237988.66666666666, ans=0.125 +2024-08-03 21:35:31,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=238025.33333333334, ans=0.0 +2024-08-03 21:35:38,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=238025.33333333334, ans=0.0 +2024-08-03 21:35:48,164 INFO [train.py:1114] (1/4) Epoch 18, batch 3050, loss[loss=0.1717, simple_loss=0.2616, pruned_loss=0.04093, over 13539.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.272, pruned_loss=0.04945, over 2626509.32 frames. ], batch size: 35, lr: 6.84e-03, grad_scale: 16.0 +2024-08-03 21:35:49,907 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.482e+01 1.049e+02 1.161e+02 1.346e+02 2.617e+02, threshold=2.322e+02, percent-clipped=0.0 +2024-08-03 21:36:08,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=238172.0, ans=0.125 +2024-08-03 21:36:09,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=238172.0, ans=0.5 +2024-08-03 21:36:13,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=238208.66666666666, ans=0.0 +2024-08-03 21:36:16,917 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.57 vs. limit=15.0 +2024-08-03 21:36:31,289 INFO [train.py:1114] (1/4) Epoch 18, batch 3100, loss[loss=0.1923, simple_loss=0.2776, pruned_loss=0.0535, over 13301.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2716, pruned_loss=0.04948, over 2626280.64 frames. ], batch size: 46, lr: 6.84e-03, grad_scale: 16.0 +2024-08-03 21:36:33,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238282.0, ans=0.1 +2024-08-03 21:36:40,395 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:36:54,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=238355.33333333334, ans=0.125 +2024-08-03 21:36:56,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=238355.33333333334, ans=0.0 +2024-08-03 21:37:04,628 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.31 vs. limit=15.0 +2024-08-03 21:37:14,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=238465.33333333334, ans=0.125 +2024-08-03 21:37:15,533 INFO [train.py:1114] (1/4) Epoch 18, batch 3150, loss[loss=0.1813, simple_loss=0.2674, pruned_loss=0.04754, over 13079.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2713, pruned_loss=0.04918, over 2629188.00 frames. ], batch size: 48, lr: 6.84e-03, grad_scale: 16.0 +2024-08-03 21:37:17,262 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.157e+01 1.121e+02 1.376e+02 1.775e+02 3.223e+02, threshold=2.752e+02, percent-clipped=7.0 +2024-08-03 21:37:33,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=238538.66666666666, ans=0.0 +2024-08-03 21:37:37,744 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.23 vs. limit=15.0 +2024-08-03 21:37:39,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238538.66666666666, ans=0.1 +2024-08-03 21:37:44,407 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.06 vs. limit=15.0 +2024-08-03 21:37:45,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=238575.33333333334, ans=0.125 +2024-08-03 21:37:52,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238612.0, ans=0.1 +2024-08-03 21:37:58,750 INFO [train.py:1114] (1/4) Epoch 18, batch 3200, loss[loss=0.1852, simple_loss=0.2674, pruned_loss=0.05156, over 13539.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.271, pruned_loss=0.04898, over 2635336.88 frames. ], batch size: 37, lr: 6.84e-03, grad_scale: 32.0 +2024-08-03 21:38:10,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=238685.33333333334, ans=0.04949747468305833 +2024-08-03 21:38:19,960 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.93 vs. limit=15.0 +2024-08-03 21:38:40,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=238795.33333333334, ans=0.0 +2024-08-03 21:38:41,114 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.16 vs. limit=15.0 +2024-08-03 21:38:43,449 INFO [train.py:1114] (1/4) Epoch 18, batch 3250, loss[loss=0.1931, simple_loss=0.2813, pruned_loss=0.05241, over 13407.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2722, pruned_loss=0.0493, over 2640145.73 frames. ], batch size: 38, lr: 6.83e-03, grad_scale: 32.0 +2024-08-03 21:38:45,173 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.730e+01 1.129e+02 1.271e+02 1.537e+02 2.545e+02, threshold=2.542e+02, percent-clipped=0.0 +2024-08-03 21:38:45,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=238832.0, ans=0.0 +2024-08-03 21:39:00,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238905.33333333334, ans=0.1 +2024-08-03 21:39:03,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.whiten.whitening_limit, batch_count=238905.33333333334, ans=12.0 +2024-08-03 21:39:04,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=238905.33333333334, ans=0.125 +2024-08-03 21:39:10,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=238942.0, ans=0.0 +2024-08-03 21:39:10,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238942.0, ans=0.1 +2024-08-03 21:39:13,783 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.30 vs. limit=22.5 +2024-08-03 21:39:13,840 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.60 vs. limit=22.5 +2024-08-03 21:39:27,912 INFO [train.py:1114] (1/4) Epoch 18, batch 3300, loss[loss=0.1752, simple_loss=0.262, pruned_loss=0.04416, over 12996.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.271, pruned_loss=0.0492, over 2641960.49 frames. ], batch size: 52, lr: 6.83e-03, grad_scale: 32.0 +2024-08-03 21:39:37,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=239052.0, ans=0.125 +2024-08-03 21:39:43,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=239052.0, ans=10.0 +2024-08-03 21:39:46,401 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.86 vs. limit=22.5 +2024-08-03 21:39:47,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=239088.66666666666, ans=0.125 +2024-08-03 21:39:52,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=239088.66666666666, ans=0.0 +2024-08-03 21:39:59,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=239125.33333333334, ans=0.125 +2024-08-03 21:40:10,663 INFO [train.py:1114] (1/4) Epoch 18, batch 3350, loss[loss=0.1985, simple_loss=0.2842, pruned_loss=0.05642, over 12987.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.272, pruned_loss=0.0496, over 2631161.08 frames. ], batch size: 48, lr: 6.83e-03, grad_scale: 32.0 +2024-08-03 21:40:12,337 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.013e+01 1.097e+02 1.292e+02 1.574e+02 2.403e+02, threshold=2.585e+02, percent-clipped=0.0 +2024-08-03 21:40:13,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239198.66666666666, ans=0.125 +2024-08-03 21:40:20,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=239235.33333333334, ans=0.07 +2024-08-03 21:40:26,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=239272.0, ans=0.0 +2024-08-03 21:40:38,245 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.59 vs. limit=22.5 +2024-08-03 21:40:40,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=239308.66666666666, ans=0.125 +2024-08-03 21:40:40,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=239308.66666666666, ans=0.125 +2024-08-03 21:40:41,018 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.53 vs. limit=12.0 +2024-08-03 21:40:53,382 INFO [train.py:1114] (1/4) Epoch 18, batch 3400, loss[loss=0.1515, simple_loss=0.2371, pruned_loss=0.03297, over 13559.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2713, pruned_loss=0.04909, over 2626249.25 frames. ], batch size: 31, lr: 6.83e-03, grad_scale: 16.0 +2024-08-03 21:40:56,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=239382.0, ans=0.0 +2024-08-03 21:41:03,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=239418.66666666666, ans=15.0 +2024-08-03 21:41:06,509 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.64 vs. limit=15.0 +2024-08-03 21:41:13,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=239455.33333333334, ans=0.1 +2024-08-03 21:41:17,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=239455.33333333334, ans=0.0 +2024-08-03 21:41:19,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=239492.0, ans=0.09899494936611666 +2024-08-03 21:41:25,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=239492.0, ans=0.125 +2024-08-03 21:41:27,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=239528.66666666666, ans=0.025 +2024-08-03 21:41:28,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=239528.66666666666, ans=0.125 +2024-08-03 21:41:32,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=239528.66666666666, ans=0.0 +2024-08-03 21:41:36,136 INFO [train.py:1114] (1/4) Epoch 18, batch 3450, loss[loss=0.1741, simple_loss=0.2672, pruned_loss=0.04052, over 12857.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.271, pruned_loss=0.04909, over 2629860.77 frames. ], batch size: 52, lr: 6.82e-03, grad_scale: 16.0 +2024-08-03 21:41:36,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239565.33333333334, ans=0.125 +2024-08-03 21:41:36,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=239565.33333333334, ans=0.125 +2024-08-03 21:41:38,577 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.763e+01 1.067e+02 1.315e+02 1.546e+02 2.791e+02, threshold=2.630e+02, percent-clipped=1.0 +2024-08-03 21:41:48,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=239602.0, ans=0.125 +2024-08-03 21:41:54,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=239638.66666666666, ans=0.025 +2024-08-03 21:42:13,368 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.09 vs. limit=15.0 +2024-08-03 21:42:18,553 INFO [train.py:1114] (1/4) Epoch 18, batch 3500, loss[loss=0.1707, simple_loss=0.2563, pruned_loss=0.04256, over 13527.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2709, pruned_loss=0.04942, over 2632416.31 frames. ], batch size: 34, lr: 6.82e-03, grad_scale: 16.0 +2024-08-03 21:42:36,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=239822.0, ans=0.0 +2024-08-03 21:42:38,231 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.76 vs. limit=15.0 +2024-08-03 21:42:41,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=239822.0, ans=0.125 +2024-08-03 21:42:50,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=239858.66666666666, ans=0.2 +2024-08-03 21:42:54,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=239895.33333333334, ans=0.025 +2024-08-03 21:42:54,462 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=7.15 vs. limit=15.0 +2024-08-03 21:43:02,818 INFO [train.py:1114] (1/4) Epoch 18, batch 3550, loss[loss=0.1751, simple_loss=0.2591, pruned_loss=0.04554, over 12401.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.273, pruned_loss=0.05069, over 2630378.34 frames. ], batch size: 58, lr: 6.82e-03, grad_scale: 16.0 +2024-08-03 21:43:03,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=239932.0, ans=0.125 +2024-08-03 21:43:05,283 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.515e+01 1.163e+02 1.331e+02 1.591e+02 2.731e+02, threshold=2.663e+02, percent-clipped=1.0 +2024-08-03 21:43:17,825 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.16 vs. limit=10.0 +2024-08-03 21:43:28,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=240005.33333333334, ans=0.125 +2024-08-03 21:43:47,806 INFO [train.py:1114] (1/4) Epoch 18, batch 3600, loss[loss=0.2496, simple_loss=0.3207, pruned_loss=0.0892, over 8757.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2771, pruned_loss=0.05402, over 2486774.76 frames. ], batch size: 96, lr: 6.82e-03, grad_scale: 32.0 +2024-08-03 21:45:02,737 INFO [train.py:1114] (1/4) Epoch 19, batch 0, loss[loss=0.1615, simple_loss=0.2501, pruned_loss=0.03647, over 13337.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2501, pruned_loss=0.03647, over 13337.00 frames. ], batch size: 33, lr: 6.63e-03, grad_scale: 32.0 +2024-08-03 21:45:02,739 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 21:45:07,636 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.1115, 3.1347, 3.0089, 1.8694], device='cuda:1') +2024-08-03 21:45:13,117 INFO [train.py:1146] (1/4) Epoch 19, validation: loss=0.1699, simple_loss=0.2705, pruned_loss=0.03462, over 944034.00 frames. +2024-08-03 21:45:13,175 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 21:45:16,764 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.54 vs. limit=22.5 +2024-08-03 21:45:23,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=240298.66666666666, ans=0.125 +2024-08-03 21:45:26,671 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.737e+01 1.210e+02 1.300e+02 1.388e+02 2.591e+02, threshold=2.600e+02, percent-clipped=0.0 +2024-08-03 21:46:03,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=240408.66666666666, ans=0.125 +2024-08-03 21:46:05,633 INFO [train.py:1114] (1/4) Epoch 19, batch 50, loss[loss=0.153, simple_loss=0.2347, pruned_loss=0.03562, over 13419.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2738, pruned_loss=0.05106, over 578969.94 frames. ], batch size: 32, lr: 6.63e-03, grad_scale: 32.0 +2024-08-03 21:46:06,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=240445.33333333334, ans=15.0 +2024-08-03 21:46:18,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=240482.0, ans=0.07 +2024-08-03 21:46:19,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=240482.0, ans=0.125 +2024-08-03 21:46:32,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=240555.33333333334, ans=0.025 +2024-08-03 21:46:51,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=240592.0, ans=0.1 +2024-08-03 21:46:53,414 INFO [train.py:1114] (1/4) Epoch 19, batch 100, loss[loss=0.1811, simple_loss=0.2686, pruned_loss=0.04678, over 13534.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2736, pruned_loss=0.04972, over 1026301.70 frames. ], batch size: 35, lr: 6.62e-03, grad_scale: 32.0 +2024-08-03 21:47:03,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=240665.33333333334, ans=0.0 +2024-08-03 21:47:05,121 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.610e+01 1.117e+02 1.255e+02 1.420e+02 2.602e+02, threshold=2.511e+02, percent-clipped=1.0 +2024-08-03 21:47:09,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=240665.33333333334, ans=0.125 +2024-08-03 21:47:18,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240702.0, ans=0.1 +2024-08-03 21:47:27,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=240738.66666666666, ans=0.125 +2024-08-03 21:47:36,586 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.56 vs. limit=15.0 +2024-08-03 21:47:38,536 INFO [train.py:1114] (1/4) Epoch 19, batch 150, loss[loss=0.169, simple_loss=0.2539, pruned_loss=0.04209, over 13426.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2709, pruned_loss=0.04825, over 1387671.37 frames. ], batch size: 32, lr: 6.62e-03, grad_scale: 32.0 +2024-08-03 21:48:08,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=240922.0, ans=0.07 +2024-08-03 21:48:10,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=240922.0, ans=0.125 +2024-08-03 21:48:19,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=240958.66666666666, ans=0.0 +2024-08-03 21:48:21,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=240958.66666666666, ans=0.125 +2024-08-03 21:48:24,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=240958.66666666666, ans=0.125 +2024-08-03 21:48:26,777 INFO [train.py:1114] (1/4) Epoch 19, batch 200, loss[loss=0.1937, simple_loss=0.2773, pruned_loss=0.05502, over 12822.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.269, pruned_loss=0.04779, over 1667415.43 frames. ], batch size: 59, lr: 6.62e-03, grad_scale: 32.0 +2024-08-03 21:48:28,159 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.68 vs. limit=15.0 +2024-08-03 21:48:29,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=240995.33333333334, ans=0.125 +2024-08-03 21:48:38,533 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.918e+01 1.063e+02 1.213e+02 1.459e+02 3.041e+02, threshold=2.427e+02, percent-clipped=0.0 +2024-08-03 21:48:40,801 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.14 vs. limit=15.0 +2024-08-03 21:48:44,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=241068.66666666666, ans=0.125 +2024-08-03 21:48:45,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=241068.66666666666, ans=0.125 +2024-08-03 21:48:51,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=241068.66666666666, ans=0.5 +2024-08-03 21:49:11,662 INFO [train.py:1114] (1/4) Epoch 19, batch 250, loss[loss=0.193, simple_loss=0.2778, pruned_loss=0.05409, over 13364.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2699, pruned_loss=0.04825, over 1887167.34 frames. ], batch size: 46, lr: 6.62e-03, grad_scale: 32.0 +2024-08-03 21:49:44,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=241288.66666666666, ans=0.125 +2024-08-03 21:49:47,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=241288.66666666666, ans=0.125 +2024-08-03 21:49:48,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=241288.66666666666, ans=0.125 +2024-08-03 21:49:59,623 INFO [train.py:1114] (1/4) Epoch 19, batch 300, loss[loss=0.1982, simple_loss=0.2806, pruned_loss=0.05787, over 13459.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2694, pruned_loss=0.04816, over 2053451.76 frames. ], batch size: 42, lr: 6.61e-03, grad_scale: 32.0 +2024-08-03 21:50:13,614 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.068e+01 1.095e+02 1.222e+02 1.449e+02 2.776e+02, threshold=2.445e+02, percent-clipped=4.0 +2024-08-03 21:50:21,744 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.24 vs. limit=6.0 +2024-08-03 21:50:25,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241435.33333333334, ans=0.1 +2024-08-03 21:50:34,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=241472.0, ans=0.125 +2024-08-03 21:50:35,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=241472.0, ans=0.025 +2024-08-03 21:50:36,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=241472.0, ans=0.0 +2024-08-03 21:50:39,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=241508.66666666666, ans=0.0 +2024-08-03 21:50:40,128 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.32 vs. limit=15.0 +2024-08-03 21:50:41,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=241508.66666666666, ans=0.125 +2024-08-03 21:50:52,428 INFO [train.py:1114] (1/4) Epoch 19, batch 350, loss[loss=0.173, simple_loss=0.2519, pruned_loss=0.04706, over 13585.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2695, pruned_loss=0.04797, over 2183783.50 frames. ], batch size: 33, lr: 6.61e-03, grad_scale: 32.0 +2024-08-03 21:51:07,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=241582.0, ans=0.125 +2024-08-03 21:51:21,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=241655.33333333334, ans=0.0 +2024-08-03 21:51:34,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=241692.0, ans=0.0 +2024-08-03 21:51:34,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=241692.0, ans=0.1 +2024-08-03 21:51:38,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=241692.0, ans=0.2 +2024-08-03 21:51:40,079 INFO [train.py:1114] (1/4) Epoch 19, batch 400, loss[loss=0.2007, simple_loss=0.2855, pruned_loss=0.05796, over 13364.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2693, pruned_loss=0.04773, over 2287561.94 frames. ], batch size: 37, lr: 6.61e-03, grad_scale: 32.0 +2024-08-03 21:51:52,136 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.902e+01 1.083e+02 1.174e+02 1.521e+02 2.282e+02, threshold=2.347e+02, percent-clipped=0.0 +2024-08-03 21:52:01,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=241802.0, ans=0.125 +2024-08-03 21:52:02,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241802.0, ans=0.1 +2024-08-03 21:52:10,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=241838.66666666666, ans=0.125 +2024-08-03 21:52:28,386 INFO [train.py:1114] (1/4) Epoch 19, batch 450, loss[loss=0.1869, simple_loss=0.2777, pruned_loss=0.04803, over 13559.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2695, pruned_loss=0.04784, over 2361865.69 frames. ], batch size: 38, lr: 6.61e-03, grad_scale: 32.0 +2024-08-03 21:52:34,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=241912.0, ans=0.0 +2024-08-03 21:52:36,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=241948.66666666666, ans=0.0 +2024-08-03 21:52:43,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=241948.66666666666, ans=0.2 +2024-08-03 21:52:43,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=241948.66666666666, ans=0.125 +2024-08-03 21:52:44,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=241948.66666666666, ans=0.0 +2024-08-03 21:52:44,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=241948.66666666666, ans=0.125 +2024-08-03 21:52:45,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=241948.66666666666, ans=0.0 +2024-08-03 21:53:07,736 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-08-03 21:53:16,372 INFO [train.py:1114] (1/4) Epoch 19, batch 500, loss[loss=0.1918, simple_loss=0.2835, pruned_loss=0.05005, over 13431.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2685, pruned_loss=0.04745, over 2426551.74 frames. ], batch size: 43, lr: 6.60e-03, grad_scale: 32.0 +2024-08-03 21:53:24,135 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.11 vs. limit=15.0 +2024-08-03 21:53:29,123 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.780e+01 1.159e+02 1.379e+02 1.825e+02 3.055e+02, threshold=2.757e+02, percent-clipped=7.0 +2024-08-03 21:53:29,889 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.46 vs. limit=6.0 +2024-08-03 21:53:51,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=242205.33333333334, ans=0.125 +2024-08-03 21:53:56,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=242242.0, ans=0.125 +2024-08-03 21:53:56,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=242242.0, ans=0.125 +2024-08-03 21:54:03,931 INFO [train.py:1114] (1/4) Epoch 19, batch 550, loss[loss=0.1989, simple_loss=0.2795, pruned_loss=0.05914, over 13284.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2681, pruned_loss=0.04759, over 2468719.17 frames. ], batch size: 49, lr: 6.60e-03, grad_scale: 16.0 +2024-08-03 21:54:12,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=242278.66666666666, ans=0.125 +2024-08-03 21:54:18,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=242315.33333333334, ans=0.0 +2024-08-03 21:54:45,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242425.33333333334, ans=0.1 +2024-08-03 21:54:48,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242425.33333333334, ans=0.1 +2024-08-03 21:54:51,042 INFO [train.py:1114] (1/4) Epoch 19, batch 600, loss[loss=0.2035, simple_loss=0.2875, pruned_loss=0.05971, over 13304.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2687, pruned_loss=0.04775, over 2509118.09 frames. ], batch size: 46, lr: 6.60e-03, grad_scale: 16.0 +2024-08-03 21:54:55,117 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.80 vs. limit=6.0 +2024-08-03 21:55:03,305 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.902e+01 1.087e+02 1.240e+02 1.431e+02 2.352e+02, threshold=2.480e+02, percent-clipped=0.0 +2024-08-03 21:55:09,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=242498.66666666666, ans=0.125 +2024-08-03 21:55:11,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242535.33333333334, ans=0.1 +2024-08-03 21:55:36,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=242608.66666666666, ans=0.2 +2024-08-03 21:55:37,874 INFO [train.py:1114] (1/4) Epoch 19, batch 650, loss[loss=0.1717, simple_loss=0.2599, pruned_loss=0.04172, over 13544.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2685, pruned_loss=0.04747, over 2543954.15 frames. ], batch size: 37, lr: 6.60e-03, grad_scale: 16.0 +2024-08-03 21:55:43,742 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=7.801e-03 +2024-08-03 21:56:11,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=242755.33333333334, ans=0.2 +2024-08-03 21:56:15,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=242792.0, ans=0.0 +2024-08-03 21:56:17,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=242792.0, ans=0.125 +2024-08-03 21:56:22,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=242792.0, ans=0.125 +2024-08-03 21:56:25,339 INFO [train.py:1114] (1/4) Epoch 19, batch 700, loss[loss=0.1677, simple_loss=0.26, pruned_loss=0.03768, over 13538.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2688, pruned_loss=0.04743, over 2566214.12 frames. ], batch size: 35, lr: 6.59e-03, grad_scale: 16.0 +2024-08-03 21:56:29,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=242828.66666666666, ans=0.2 +2024-08-03 21:56:33,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=242828.66666666666, ans=0.0 +2024-08-03 21:56:38,421 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.287e+01 1.160e+02 1.383e+02 1.887e+02 3.094e+02, threshold=2.766e+02, percent-clipped=5.0 +2024-08-03 21:56:38,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=242865.33333333334, ans=0.125 +2024-08-03 21:56:46,935 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.03 vs. limit=15.0 +2024-08-03 21:56:57,523 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.10 vs. limit=10.0 +2024-08-03 21:56:59,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=242938.66666666666, ans=0.125 +2024-08-03 21:57:03,168 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.68 vs. limit=15.0 +2024-08-03 21:57:13,133 INFO [train.py:1114] (1/4) Epoch 19, batch 750, loss[loss=0.1883, simple_loss=0.2791, pruned_loss=0.04874, over 13349.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2686, pruned_loss=0.04769, over 2583212.66 frames. ], batch size: 37, lr: 6.59e-03, grad_scale: 16.0 +2024-08-03 21:57:13,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=243012.0, ans=0.1 +2024-08-03 21:57:36,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243085.33333333334, ans=0.1 +2024-08-03 21:57:45,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=243122.0, ans=0.2 +2024-08-03 21:58:00,319 INFO [train.py:1114] (1/4) Epoch 19, batch 800, loss[loss=0.1613, simple_loss=0.2464, pruned_loss=0.03807, over 13349.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2683, pruned_loss=0.04757, over 2596345.61 frames. ], batch size: 33, lr: 6.59e-03, grad_scale: 32.0 +2024-08-03 21:58:04,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=243195.33333333334, ans=0.0 +2024-08-03 21:58:11,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=243232.0, ans=0.2 +2024-08-03 21:58:12,633 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.835e+01 1.100e+02 1.252e+02 1.465e+02 2.313e+02, threshold=2.504e+02, percent-clipped=0.0 +2024-08-03 21:58:47,360 INFO [train.py:1114] (1/4) Epoch 19, batch 850, loss[loss=0.1617, simple_loss=0.2572, pruned_loss=0.03308, over 13344.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2677, pruned_loss=0.04743, over 2609635.24 frames. ], batch size: 40, lr: 6.59e-03, grad_scale: 32.0 +2024-08-03 21:58:56,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=243415.33333333334, ans=0.0 +2024-08-03 21:59:01,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=243415.33333333334, ans=0.2 +2024-08-03 21:59:05,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=243452.0, ans=0.0 +2024-08-03 21:59:10,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=243452.0, ans=0.5 +2024-08-03 21:59:13,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=243488.66666666666, ans=0.0 +2024-08-03 21:59:32,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=243525.33333333334, ans=0.04949747468305833 +2024-08-03 21:59:33,299 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.64 vs. limit=22.5 +2024-08-03 21:59:34,605 INFO [train.py:1114] (1/4) Epoch 19, batch 900, loss[loss=0.1765, simple_loss=0.2573, pruned_loss=0.04781, over 13351.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2683, pruned_loss=0.04787, over 2612518.95 frames. ], batch size: 33, lr: 6.59e-03, grad_scale: 16.0 +2024-08-03 21:59:48,154 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.299e+01 1.094e+02 1.385e+02 1.717e+02 2.818e+02, threshold=2.769e+02, percent-clipped=4.0 +2024-08-03 21:59:55,250 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.35 vs. limit=5.0 +2024-08-03 21:59:56,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=243635.33333333334, ans=0.0 +2024-08-03 22:00:05,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=243672.0, ans=0.0 +2024-08-03 22:00:09,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=243672.0, ans=0.125 +2024-08-03 22:00:13,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=243708.66666666666, ans=0.125 +2024-08-03 22:00:21,840 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.10 vs. limit=15.0 +2024-08-03 22:00:22,265 INFO [train.py:1114] (1/4) Epoch 19, batch 950, loss[loss=0.155, simple_loss=0.244, pruned_loss=0.03297, over 13535.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2682, pruned_loss=0.04754, over 2612917.44 frames. ], batch size: 34, lr: 6.58e-03, grad_scale: 16.0 +2024-08-03 22:00:28,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=243745.33333333334, ans=0.125 +2024-08-03 22:00:34,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=243782.0, ans=0.125 +2024-08-03 22:00:37,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=243782.0, ans=0.0 +2024-08-03 22:00:38,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243782.0, ans=0.1 +2024-08-03 22:00:59,473 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.80 vs. limit=15.0 +2024-08-03 22:01:03,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=243892.0, ans=0.0 +2024-08-03 22:01:04,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=243892.0, ans=0.0 +2024-08-03 22:01:07,650 INFO [train.py:1114] (1/4) Epoch 19, batch 1000, loss[loss=0.1643, simple_loss=0.2559, pruned_loss=0.03634, over 13376.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2697, pruned_loss=0.04811, over 2610897.00 frames. ], batch size: 35, lr: 6.58e-03, grad_scale: 16.0 +2024-08-03 22:01:23,102 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.98 vs. limit=22.5 +2024-08-03 22:01:23,455 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.931e+01 1.118e+02 1.271e+02 1.540e+02 2.481e+02, threshold=2.543e+02, percent-clipped=0.0 +2024-08-03 22:01:30,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=244002.0, ans=6.0 +2024-08-03 22:01:39,492 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.99 vs. limit=15.0 +2024-08-03 22:01:45,883 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=7.62 vs. limit=15.0 +2024-08-03 22:01:55,458 INFO [train.py:1114] (1/4) Epoch 19, batch 1050, loss[loss=0.1799, simple_loss=0.2726, pruned_loss=0.04361, over 13572.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2688, pruned_loss=0.04785, over 2616069.15 frames. ], batch size: 39, lr: 6.58e-03, grad_scale: 16.0 +2024-08-03 22:02:02,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=244112.0, ans=0.125 +2024-08-03 22:02:08,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=244148.66666666666, ans=0.125 +2024-08-03 22:02:09,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=244148.66666666666, ans=0.0 +2024-08-03 22:02:10,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=244148.66666666666, ans=0.2 +2024-08-03 22:02:18,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.00 vs. limit=15.0 +2024-08-03 22:02:32,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=244258.66666666666, ans=0.0 +2024-08-03 22:02:34,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244258.66666666666, ans=0.1 +2024-08-03 22:02:34,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=244258.66666666666, ans=0.025 +2024-08-03 22:02:42,813 INFO [train.py:1114] (1/4) Epoch 19, batch 1100, loss[loss=0.1761, simple_loss=0.2631, pruned_loss=0.04452, over 13564.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2694, pruned_loss=0.04809, over 2620110.86 frames. ], batch size: 36, lr: 6.58e-03, grad_scale: 16.0 +2024-08-03 22:02:58,062 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.403e+01 1.088e+02 1.287e+02 1.607e+02 2.579e+02, threshold=2.574e+02, percent-clipped=1.0 +2024-08-03 22:03:06,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=244368.66666666666, ans=0.0 +2024-08-03 22:03:17,436 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.74 vs. limit=15.0 +2024-08-03 22:03:29,660 INFO [train.py:1114] (1/4) Epoch 19, batch 1150, loss[loss=0.1892, simple_loss=0.2738, pruned_loss=0.05229, over 13563.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2689, pruned_loss=0.04776, over 2618673.16 frames. ], batch size: 36, lr: 6.57e-03, grad_scale: 16.0 +2024-08-03 22:03:39,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=244515.33333333334, ans=0.125 +2024-08-03 22:03:51,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244552.0, ans=0.1 +2024-08-03 22:03:52,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=244552.0, ans=0.125 +2024-08-03 22:03:55,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=244552.0, ans=0.125 +2024-08-03 22:04:11,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=244625.33333333334, ans=0.0 +2024-08-03 22:04:14,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=244625.33333333334, ans=0.125 +2024-08-03 22:04:15,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=244625.33333333334, ans=0.2 +2024-08-03 22:04:17,233 INFO [train.py:1114] (1/4) Epoch 19, batch 1200, loss[loss=0.1854, simple_loss=0.2815, pruned_loss=0.04467, over 13573.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2693, pruned_loss=0.04775, over 2616026.00 frames. ], batch size: 39, lr: 6.57e-03, grad_scale: 32.0 +2024-08-03 22:04:19,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=244662.0, ans=0.125 +2024-08-03 22:04:30,314 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.85 vs. limit=15.0 +2024-08-03 22:04:30,752 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.729e+01 1.075e+02 1.205e+02 1.408e+02 2.455e+02, threshold=2.410e+02, percent-clipped=0.0 +2024-08-03 22:04:32,812 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:04:50,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=244772.0, ans=0.125 +2024-08-03 22:04:59,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=244808.66666666666, ans=0.0 +2024-08-03 22:05:02,059 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.95 vs. limit=15.0 +2024-08-03 22:05:04,355 INFO [train.py:1114] (1/4) Epoch 19, batch 1250, loss[loss=0.1817, simple_loss=0.2678, pruned_loss=0.04783, over 13417.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2697, pruned_loss=0.04783, over 2628006.52 frames. ], batch size: 42, lr: 6.57e-03, grad_scale: 32.0 +2024-08-03 22:05:20,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=244882.0, ans=0.0 +2024-08-03 22:05:32,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=244955.33333333334, ans=0.025 +2024-08-03 22:05:35,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=244955.33333333334, ans=0.0 +2024-08-03 22:05:39,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=244955.33333333334, ans=0.125 +2024-08-03 22:05:52,086 INFO [train.py:1114] (1/4) Epoch 19, batch 1300, loss[loss=0.2133, simple_loss=0.2965, pruned_loss=0.06509, over 12886.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2687, pruned_loss=0.04749, over 2631438.10 frames. ], batch size: 52, lr: 6.57e-03, grad_scale: 16.0 +2024-08-03 22:05:55,973 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.74 vs. limit=22.5 +2024-08-03 22:06:07,828 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.978e+01 1.097e+02 1.270e+02 1.535e+02 2.662e+02, threshold=2.541e+02, percent-clipped=5.0 +2024-08-03 22:06:08,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=245065.33333333334, ans=0.0 +2024-08-03 22:06:09,148 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.96 vs. limit=15.0 +2024-08-03 22:06:15,019 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=19.39 vs. limit=22.5 +2024-08-03 22:06:20,940 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.03 vs. limit=12.0 +2024-08-03 22:06:37,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.78 vs. limit=15.0 +2024-08-03 22:06:40,449 INFO [train.py:1114] (1/4) Epoch 19, batch 1350, loss[loss=0.1686, simple_loss=0.2597, pruned_loss=0.0388, over 13560.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.269, pruned_loss=0.04768, over 2639049.46 frames. ], batch size: 37, lr: 6.56e-03, grad_scale: 16.0 +2024-08-03 22:06:47,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=245212.0, ans=0.125 +2024-08-03 22:07:03,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=245285.33333333334, ans=0.125 +2024-08-03 22:07:08,165 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.20 vs. limit=12.0 +2024-08-03 22:07:17,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=245358.66666666666, ans=0.125 +2024-08-03 22:07:20,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=245358.66666666666, ans=0.035 +2024-08-03 22:07:24,026 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.69 vs. limit=15.0 +2024-08-03 22:07:26,150 INFO [train.py:1114] (1/4) Epoch 19, batch 1400, loss[loss=0.1703, simple_loss=0.2458, pruned_loss=0.04734, over 13256.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2688, pruned_loss=0.0477, over 2642917.17 frames. ], batch size: 31, lr: 6.56e-03, grad_scale: 8.0 +2024-08-03 22:07:38,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=245432.0, ans=0.125 +2024-08-03 22:07:41,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=245432.0, ans=0.125 +2024-08-03 22:07:44,089 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.615e+01 1.110e+02 1.328e+02 1.668e+02 3.835e+02, threshold=2.657e+02, percent-clipped=2.0 +2024-08-03 22:07:47,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=245468.66666666666, ans=0.0 +2024-08-03 22:07:56,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=245505.33333333334, ans=0.125 +2024-08-03 22:07:59,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245505.33333333334, ans=0.1 +2024-08-03 22:08:01,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=245505.33333333334, ans=0.0 +2024-08-03 22:08:12,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=245578.66666666666, ans=0.125 +2024-08-03 22:08:13,508 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.46 vs. limit=15.0 +2024-08-03 22:08:13,721 INFO [train.py:1114] (1/4) Epoch 19, batch 1450, loss[loss=0.1902, simple_loss=0.2845, pruned_loss=0.048, over 13411.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2691, pruned_loss=0.0477, over 2642447.15 frames. ], batch size: 43, lr: 6.56e-03, grad_scale: 8.0 +2024-08-03 22:08:21,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=245615.33333333334, ans=0.125 +2024-08-03 22:08:22,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=245615.33333333334, ans=0.0 +2024-08-03 22:08:35,847 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.93 vs. limit=6.0 +2024-08-03 22:08:42,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=245688.66666666666, ans=0.025 +2024-08-03 22:08:44,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=245688.66666666666, ans=0.125 +2024-08-03 22:08:44,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=245688.66666666666, ans=0.125 +2024-08-03 22:09:01,147 INFO [train.py:1114] (1/4) Epoch 19, batch 1500, loss[loss=0.1796, simple_loss=0.2721, pruned_loss=0.04351, over 13397.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2692, pruned_loss=0.04743, over 2641902.88 frames. ], batch size: 39, lr: 6.56e-03, grad_scale: 8.0 +2024-08-03 22:09:03,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=245762.0, ans=0.07 +2024-08-03 22:09:14,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=245798.66666666666, ans=0.0 +2024-08-03 22:09:16,758 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.464e+01 1.116e+02 1.262e+02 1.580e+02 2.631e+02, threshold=2.524e+02, percent-clipped=0.0 +2024-08-03 22:09:23,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245835.33333333334, ans=0.1 +2024-08-03 22:09:40,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=245908.66666666666, ans=0.0 +2024-08-03 22:09:48,983 INFO [train.py:1114] (1/4) Epoch 19, batch 1550, loss[loss=0.1835, simple_loss=0.276, pruned_loss=0.04544, over 13394.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2696, pruned_loss=0.04779, over 2631250.90 frames. ], batch size: 41, lr: 6.55e-03, grad_scale: 8.0 +2024-08-03 22:09:57,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=245982.0, ans=10.0 +2024-08-03 22:10:01,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=245982.0, ans=0.125 +2024-08-03 22:10:04,758 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.03 vs. limit=15.0 +2024-08-03 22:10:07,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=245982.0, ans=0.2 +2024-08-03 22:10:36,200 INFO [train.py:1114] (1/4) Epoch 19, batch 1600, loss[loss=0.182, simple_loss=0.279, pruned_loss=0.04251, over 13571.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.27, pruned_loss=0.04834, over 2624348.12 frames. ], batch size: 39, lr: 6.55e-03, grad_scale: 16.0 +2024-08-03 22:10:47,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=246165.33333333334, ans=0.125 +2024-08-03 22:10:51,519 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.932e+01 1.109e+02 1.298e+02 1.770e+02 3.045e+02, threshold=2.595e+02, percent-clipped=6.0 +2024-08-03 22:11:04,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246238.66666666666, ans=0.1 +2024-08-03 22:11:23,256 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.15 vs. limit=15.0 +2024-08-03 22:11:23,646 INFO [train.py:1114] (1/4) Epoch 19, batch 1650, loss[loss=0.1932, simple_loss=0.286, pruned_loss=0.05023, over 13321.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2703, pruned_loss=0.0486, over 2621197.01 frames. ], batch size: 40, lr: 6.55e-03, grad_scale: 16.0 +2024-08-03 22:11:28,908 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.74 vs. limit=22.5 +2024-08-03 22:11:37,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=246348.66666666666, ans=0.2 +2024-08-03 22:11:40,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=246385.33333333334, ans=0.025 +2024-08-03 22:12:03,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246458.66666666666, ans=0.1 +2024-08-03 22:12:11,718 INFO [train.py:1114] (1/4) Epoch 19, batch 1700, loss[loss=0.1856, simple_loss=0.2589, pruned_loss=0.05615, over 13259.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2696, pruned_loss=0.04809, over 2629932.24 frames. ], batch size: 31, lr: 6.55e-03, grad_scale: 16.0 +2024-08-03 22:12:20,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=246532.0, ans=0.0 +2024-08-03 22:12:27,025 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.554e+01 1.124e+02 1.376e+02 1.723e+02 2.933e+02, threshold=2.751e+02, percent-clipped=2.0 +2024-08-03 22:12:32,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=246568.66666666666, ans=0.125 +2024-08-03 22:12:46,824 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.37 vs. limit=6.0 +2024-08-03 22:12:47,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=246642.0, ans=0.0 +2024-08-03 22:12:56,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=246642.0, ans=0.125 +2024-08-03 22:12:56,969 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.48 vs. limit=15.0 +2024-08-03 22:12:59,185 INFO [train.py:1114] (1/4) Epoch 19, batch 1750, loss[loss=0.1583, simple_loss=0.2376, pruned_loss=0.03946, over 13557.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2691, pruned_loss=0.04788, over 2634106.69 frames. ], batch size: 31, lr: 6.54e-03, grad_scale: 16.0 +2024-08-03 22:13:01,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=246678.66666666666, ans=0.125 +2024-08-03 22:13:10,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=246715.33333333334, ans=0.2 +2024-08-03 22:13:15,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=246715.33333333334, ans=0.125 +2024-08-03 22:13:32,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=246788.66666666666, ans=0.125 +2024-08-03 22:13:46,378 INFO [train.py:1114] (1/4) Epoch 19, batch 1800, loss[loss=0.1775, simple_loss=0.268, pruned_loss=0.04346, over 13550.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2693, pruned_loss=0.04777, over 2634699.05 frames. ], batch size: 38, lr: 6.54e-03, grad_scale: 16.0 +2024-08-03 22:13:50,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=246862.0, ans=0.025 +2024-08-03 22:13:52,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=246862.0, ans=0.0 +2024-08-03 22:13:56,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=246898.66666666666, ans=0.0 +2024-08-03 22:14:02,226 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.932e+01 1.153e+02 1.366e+02 1.717e+02 2.450e+02, threshold=2.732e+02, percent-clipped=0.0 +2024-08-03 22:14:04,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=246935.33333333334, ans=0.125 +2024-08-03 22:14:05,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=246935.33333333334, ans=0.125 +2024-08-03 22:14:18,812 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:14:32,246 INFO [train.py:1114] (1/4) Epoch 19, batch 1850, loss[loss=0.1938, simple_loss=0.2779, pruned_loss=0.05487, over 13415.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2684, pruned_loss=0.04742, over 2636888.20 frames. ], batch size: 39, lr: 6.54e-03, grad_scale: 16.0 +2024-08-03 22:14:46,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=247082.0, ans=0.1 +2024-08-03 22:14:55,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=247118.66666666666, ans=0.125 +2024-08-03 22:15:02,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=247155.33333333334, ans=0.07 +2024-08-03 22:15:03,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=247155.33333333334, ans=0.125 +2024-08-03 22:15:16,854 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.28 vs. limit=15.0 +2024-08-03 22:15:19,800 INFO [train.py:1114] (1/4) Epoch 19, batch 1900, loss[loss=0.2, simple_loss=0.2888, pruned_loss=0.05556, over 13316.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2693, pruned_loss=0.04773, over 2639180.48 frames. ], batch size: 40, lr: 6.54e-03, grad_scale: 16.0 +2024-08-03 22:15:23,876 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=9.85 vs. limit=15.0 +2024-08-03 22:15:34,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=247265.33333333334, ans=0.2 +2024-08-03 22:15:34,865 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.927e+01 1.140e+02 1.255e+02 1.731e+02 2.677e+02, threshold=2.509e+02, percent-clipped=0.0 +2024-08-03 22:15:44,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=247302.0, ans=0.125 +2024-08-03 22:15:51,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=247338.66666666666, ans=0.0 +2024-08-03 22:15:56,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=247338.66666666666, ans=0.125 +2024-08-03 22:16:01,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=247375.33333333334, ans=10.0 +2024-08-03 22:16:06,797 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.23 vs. limit=6.0 +2024-08-03 22:16:07,334 INFO [train.py:1114] (1/4) Epoch 19, batch 1950, loss[loss=0.153, simple_loss=0.2386, pruned_loss=0.03372, over 13557.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2709, pruned_loss=0.04833, over 2645973.67 frames. ], batch size: 36, lr: 6.53e-03, grad_scale: 16.0 +2024-08-03 22:16:11,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=247412.0, ans=0.2 +2024-08-03 22:16:22,442 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.08 vs. limit=15.0 +2024-08-03 22:16:23,897 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:16:45,804 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.68 vs. limit=12.0 +2024-08-03 22:16:48,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=247558.66666666666, ans=0.0 +2024-08-03 22:16:55,381 INFO [train.py:1114] (1/4) Epoch 19, batch 2000, loss[loss=0.1754, simple_loss=0.2542, pruned_loss=0.04831, over 13549.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2715, pruned_loss=0.04873, over 2636526.75 frames. ], batch size: 31, lr: 6.53e-03, grad_scale: 32.0 +2024-08-03 22:17:10,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=247632.0, ans=0.125 +2024-08-03 22:17:13,007 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.093e+02 1.300e+02 1.628e+02 2.543e+02, threshold=2.600e+02, percent-clipped=1.0 +2024-08-03 22:17:21,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=247668.66666666666, ans=0.125 +2024-08-03 22:17:24,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=247705.33333333334, ans=0.025 +2024-08-03 22:17:29,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=247705.33333333334, ans=0.0 +2024-08-03 22:17:39,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247742.0, ans=0.1 +2024-08-03 22:17:42,956 INFO [train.py:1114] (1/4) Epoch 19, batch 2050, loss[loss=0.1692, simple_loss=0.2529, pruned_loss=0.04276, over 13429.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2706, pruned_loss=0.04859, over 2633248.81 frames. ], batch size: 32, lr: 6.53e-03, grad_scale: 32.0 +2024-08-03 22:17:44,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=247778.66666666666, ans=0.0 +2024-08-03 22:17:52,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=247815.33333333334, ans=0.125 +2024-08-03 22:17:54,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=247815.33333333334, ans=0.125 +2024-08-03 22:17:57,556 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:17:58,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=247815.33333333334, ans=0.09899494936611666 +2024-08-03 22:18:07,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=247852.0, ans=0.2 +2024-08-03 22:18:20,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=247925.33333333334, ans=0.05 +2024-08-03 22:18:22,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=247925.33333333334, ans=0.025 +2024-08-03 22:18:30,934 INFO [train.py:1114] (1/4) Epoch 19, batch 2100, loss[loss=0.1815, simple_loss=0.2699, pruned_loss=0.04655, over 13531.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2691, pruned_loss=0.04776, over 2638385.39 frames. ], batch size: 37, lr: 6.53e-03, grad_scale: 32.0 +2024-08-03 22:18:37,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=247962.0, ans=0.125 +2024-08-03 22:18:46,391 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.343e+01 1.123e+02 1.234e+02 1.440e+02 2.542e+02, threshold=2.468e+02, percent-clipped=0.0 +2024-08-03 22:18:55,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=248035.33333333334, ans=0.125 +2024-08-03 22:19:06,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=248072.0, ans=0.0 +2024-08-03 22:19:08,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=248108.66666666666, ans=0.0 +2024-08-03 22:19:09,939 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.97 vs. limit=15.0 +2024-08-03 22:19:13,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=248108.66666666666, ans=0.0 +2024-08-03 22:19:16,682 INFO [train.py:1114] (1/4) Epoch 19, batch 2150, loss[loss=0.1895, simple_loss=0.2735, pruned_loss=0.05274, over 13558.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2685, pruned_loss=0.04744, over 2647499.08 frames. ], batch size: 36, lr: 6.52e-03, grad_scale: 32.0 +2024-08-03 22:19:17,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=248145.33333333334, ans=0.2 +2024-08-03 22:19:27,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=248182.0, ans=0.0 +2024-08-03 22:19:33,800 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:19:45,626 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.35 vs. limit=5.0 +2024-08-03 22:19:54,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=248255.33333333334, ans=0.125 +2024-08-03 22:20:01,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=248292.0, ans=0.2 +2024-08-03 22:20:04,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=248292.0, ans=0.025 +2024-08-03 22:20:06,563 INFO [train.py:1114] (1/4) Epoch 19, batch 2200, loss[loss=0.1846, simple_loss=0.2811, pruned_loss=0.04407, over 13392.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2682, pruned_loss=0.04729, over 2645630.12 frames. ], batch size: 39, lr: 6.52e-03, grad_scale: 16.0 +2024-08-03 22:20:15,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=248365.33333333334, ans=0.2 +2024-08-03 22:20:18,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=248365.33333333334, ans=0.0 +2024-08-03 22:20:22,936 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.876e+01 1.107e+02 1.433e+02 1.774e+02 2.441e+02, threshold=2.865e+02, percent-clipped=0.0 +2024-08-03 22:20:24,303 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.74 vs. limit=12.0 +2024-08-03 22:20:33,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=248438.66666666666, ans=0.025 +2024-08-03 22:20:41,431 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.14 vs. limit=15.0 +2024-08-03 22:20:47,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=248475.33333333334, ans=0.025 +2024-08-03 22:20:47,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=248475.33333333334, ans=0.125 +2024-08-03 22:20:52,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248475.33333333334, ans=0.1 +2024-08-03 22:20:53,612 INFO [train.py:1114] (1/4) Epoch 19, batch 2250, loss[loss=0.1737, simple_loss=0.2665, pruned_loss=0.04049, over 13351.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.268, pruned_loss=0.04713, over 2642105.46 frames. ], batch size: 37, lr: 6.52e-03, grad_scale: 16.0 +2024-08-03 22:21:05,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=248548.66666666666, ans=0.2 +2024-08-03 22:21:20,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=248622.0, ans=0.07 +2024-08-03 22:21:25,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=248622.0, ans=0.125 +2024-08-03 22:21:28,203 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.49 vs. limit=15.0 +2024-08-03 22:21:30,110 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.85 vs. limit=15.0 +2024-08-03 22:21:30,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=248658.66666666666, ans=0.125 +2024-08-03 22:21:38,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248695.33333333334, ans=0.1 +2024-08-03 22:21:39,614 INFO [train.py:1114] (1/4) Epoch 19, batch 2300, loss[loss=0.1592, simple_loss=0.2401, pruned_loss=0.03918, over 13583.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2671, pruned_loss=0.0469, over 2638703.58 frames. ], batch size: 33, lr: 6.52e-03, grad_scale: 16.0 +2024-08-03 22:21:58,042 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.536e+01 1.062e+02 1.236e+02 1.586e+02 2.214e+02, threshold=2.472e+02, percent-clipped=0.0 +2024-08-03 22:22:09,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=248805.33333333334, ans=0.0 +2024-08-03 22:22:18,665 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.51 vs. limit=15.0 +2024-08-03 22:22:27,298 INFO [train.py:1114] (1/4) Epoch 19, batch 2350, loss[loss=0.1758, simple_loss=0.2716, pruned_loss=0.04, over 13554.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2677, pruned_loss=0.04721, over 2641188.62 frames. ], batch size: 38, lr: 6.52e-03, grad_scale: 16.0 +2024-08-03 22:22:28,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=248878.66666666666, ans=0.125 +2024-08-03 22:22:37,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=248915.33333333334, ans=0.2 +2024-08-03 22:22:41,558 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.73 vs. limit=22.5 +2024-08-03 22:22:55,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=248988.66666666666, ans=0.07 +2024-08-03 22:22:59,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=248988.66666666666, ans=0.0 +2024-08-03 22:23:00,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=248988.66666666666, ans=0.0 +2024-08-03 22:23:02,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=249025.33333333334, ans=0.125 +2024-08-03 22:23:10,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=249025.33333333334, ans=0.1 +2024-08-03 22:23:12,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=249025.33333333334, ans=0.125 +2024-08-03 22:23:13,909 INFO [train.py:1114] (1/4) Epoch 19, batch 2400, loss[loss=0.1638, simple_loss=0.2447, pruned_loss=0.04146, over 13536.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2683, pruned_loss=0.04747, over 2642014.24 frames. ], batch size: 35, lr: 6.51e-03, grad_scale: 32.0 +2024-08-03 22:23:31,370 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.983e+01 1.101e+02 1.278e+02 1.688e+02 2.593e+02, threshold=2.556e+02, percent-clipped=1.0 +2024-08-03 22:23:36,574 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.38 vs. limit=22.5 +2024-08-03 22:23:44,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249172.0, ans=0.1 +2024-08-03 22:24:01,860 INFO [train.py:1114] (1/4) Epoch 19, batch 2450, loss[loss=0.1812, simple_loss=0.2743, pruned_loss=0.04402, over 13358.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2694, pruned_loss=0.04782, over 2631283.33 frames. ], batch size: 37, lr: 6.51e-03, grad_scale: 16.0 +2024-08-03 22:24:37,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=249355.33333333334, ans=0.125 +2024-08-03 22:24:49,892 INFO [train.py:1114] (1/4) Epoch 19, batch 2500, loss[loss=0.1901, simple_loss=0.2799, pruned_loss=0.05014, over 13380.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2696, pruned_loss=0.04788, over 2635462.64 frames. ], batch size: 39, lr: 6.51e-03, grad_scale: 16.0 +2024-08-03 22:24:50,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=249428.66666666666, ans=0.0 +2024-08-03 22:25:06,703 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.566e+01 1.106e+02 1.263e+02 1.596e+02 2.870e+02, threshold=2.527e+02, percent-clipped=4.0 +2024-08-03 22:25:10,659 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.96 vs. limit=15.0 +2024-08-03 22:25:19,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=249538.66666666666, ans=0.125 +2024-08-03 22:25:23,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=249538.66666666666, ans=0.025 +2024-08-03 22:25:23,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=249538.66666666666, ans=0.0 +2024-08-03 22:25:34,295 INFO [train.py:1114] (1/4) Epoch 19, batch 2550, loss[loss=0.1552, simple_loss=0.2357, pruned_loss=0.03734, over 13547.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2694, pruned_loss=0.04792, over 2636941.32 frames. ], batch size: 31, lr: 6.51e-03, grad_scale: 16.0 +2024-08-03 22:25:35,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=249612.0, ans=0.0 +2024-08-03 22:25:48,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=249648.66666666666, ans=0.125 +2024-08-03 22:25:55,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=249685.33333333334, ans=0.125 +2024-08-03 22:26:00,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=249722.0, ans=0.125 +2024-08-03 22:26:05,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=249722.0, ans=0.09899494936611666 +2024-08-03 22:26:18,048 INFO [train.py:1114] (1/4) Epoch 19, batch 2600, loss[loss=0.1911, simple_loss=0.2734, pruned_loss=0.05441, over 13560.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2693, pruned_loss=0.04784, over 2636497.17 frames. ], batch size: 36, lr: 6.50e-03, grad_scale: 16.0 +2024-08-03 22:26:18,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=249795.33333333334, ans=0.125 +2024-08-03 22:26:20,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=249795.33333333334, ans=0.125 +2024-08-03 22:26:23,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=249795.33333333334, ans=0.0 +2024-08-03 22:26:24,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=249795.33333333334, ans=0.025 +2024-08-03 22:26:27,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=249832.0, ans=0.0 +2024-08-03 22:26:29,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=249832.0, ans=0.125 +2024-08-03 22:26:34,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=249832.0, ans=0.2 +2024-08-03 22:26:35,127 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.40 vs. limit=15.0 +2024-08-03 22:26:36,436 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.660e+01 1.133e+02 1.412e+02 1.915e+02 3.004e+02, threshold=2.824e+02, percent-clipped=7.0 +2024-08-03 22:26:43,157 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.64 vs. limit=15.0 +2024-08-03 22:26:43,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=249868.66666666666, ans=0.125 +2024-08-03 22:26:49,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=249905.33333333334, ans=0.0 +2024-08-03 22:27:01,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=249942.0, ans=0.07 +2024-08-03 22:27:01,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249942.0, ans=0.1 +2024-08-03 22:27:03,492 INFO [train.py:1114] (1/4) Epoch 19, batch 2650, loss[loss=0.1863, simple_loss=0.2788, pruned_loss=0.04687, over 13300.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2699, pruned_loss=0.04821, over 2639142.87 frames. ], batch size: 46, lr: 6.50e-03, grad_scale: 16.0 +2024-08-03 22:27:22,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=250052.0, ans=0.125 +2024-08-03 22:27:47,043 INFO [train.py:1114] (1/4) Epoch 19, batch 2700, loss[loss=0.1876, simple_loss=0.2843, pruned_loss=0.04548, over 13545.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2701, pruned_loss=0.04806, over 2636893.66 frames. ], batch size: 40, lr: 6.50e-03, grad_scale: 16.0 +2024-08-03 22:27:55,294 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.75 vs. limit=10.0 +2024-08-03 22:27:55,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=250198.66666666666, ans=0.125 +2024-08-03 22:28:03,575 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.168e+01 1.091e+02 1.247e+02 1.559e+02 2.482e+02, threshold=2.495e+02, percent-clipped=0.0 +2024-08-03 22:28:17,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250272.0, ans=0.1 +2024-08-03 22:28:18,701 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:28:30,861 INFO [train.py:1114] (1/4) Epoch 19, batch 2750, loss[loss=0.1851, simple_loss=0.2641, pruned_loss=0.05308, over 13338.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2688, pruned_loss=0.04787, over 2634649.79 frames. ], batch size: 34, lr: 6.50e-03, grad_scale: 16.0 +2024-08-03 22:28:35,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=250345.33333333334, ans=0.95 +2024-08-03 22:28:58,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250455.33333333334, ans=0.1 +2024-08-03 22:29:02,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=250455.33333333334, ans=0.025 +2024-08-03 22:29:09,109 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.42 vs. limit=6.0 +2024-08-03 22:29:12,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=250492.0, ans=0.125 +2024-08-03 22:29:15,856 INFO [train.py:1114] (1/4) Epoch 19, batch 2800, loss[loss=0.2857, simple_loss=0.337, pruned_loss=0.1172, over 9063.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2691, pruned_loss=0.04808, over 2626560.81 frames. ], batch size: 96, lr: 6.49e-03, grad_scale: 32.0 +2024-08-03 22:29:23,371 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.84 vs. limit=22.5 +2024-08-03 22:29:28,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=250565.33333333334, ans=0.125 +2024-08-03 22:29:38,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250565.33333333334, ans=0.1 +2024-08-03 22:29:38,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=250565.33333333334, ans=0.2 +2024-08-03 22:29:38,854 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.968e+01 1.091e+02 1.270e+02 1.499e+02 3.648e+02, threshold=2.541e+02, percent-clipped=3.0 +2024-08-03 22:29:40,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=250602.0, ans=0.125 +2024-08-03 22:29:42,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.68 vs. limit=22.5 +2024-08-03 22:29:48,537 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.77 vs. limit=10.0 +2024-08-03 22:29:55,893 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.53 vs. limit=12.0 +2024-08-03 22:29:58,289 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.16 vs. limit=15.0 +2024-08-03 22:30:07,679 INFO [train.py:1114] (1/4) Epoch 19, batch 2850, loss[loss=0.1899, simple_loss=0.2821, pruned_loss=0.04888, over 13349.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2697, pruned_loss=0.04831, over 2618666.63 frames. ], batch size: 35, lr: 6.49e-03, grad_scale: 32.0 +2024-08-03 22:30:37,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250822.0, ans=0.1 +2024-08-03 22:30:54,422 INFO [train.py:1114] (1/4) Epoch 19, batch 2900, loss[loss=0.1789, simple_loss=0.2726, pruned_loss=0.04255, over 13372.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2709, pruned_loss=0.04848, over 2629502.82 frames. ], batch size: 36, lr: 6.49e-03, grad_scale: 32.0 +2024-08-03 22:31:05,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=250932.0, ans=0.0 +2024-08-03 22:31:10,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=250932.0, ans=0.025 +2024-08-03 22:31:11,719 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.871e+01 1.142e+02 1.452e+02 2.110e+02 3.268e+02, threshold=2.903e+02, percent-clipped=11.0 +2024-08-03 22:31:19,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=251005.33333333334, ans=0.125 +2024-08-03 22:31:39,960 INFO [train.py:1114] (1/4) Epoch 19, batch 2950, loss[loss=0.1755, simple_loss=0.2614, pruned_loss=0.04482, over 13325.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2689, pruned_loss=0.04775, over 2628290.54 frames. ], batch size: 34, lr: 6.49e-03, grad_scale: 16.0 +2024-08-03 22:31:44,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=251078.66666666666, ans=0.125 +2024-08-03 22:32:14,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=251225.33333333334, ans=0.025 +2024-08-03 22:32:15,080 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.23 vs. limit=22.5 +2024-08-03 22:32:15,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=251225.33333333334, ans=0.125 +2024-08-03 22:32:16,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.31 vs. limit=15.0 +2024-08-03 22:32:23,259 INFO [train.py:1114] (1/4) Epoch 19, batch 3000, loss[loss=0.1827, simple_loss=0.2709, pruned_loss=0.04725, over 13541.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2685, pruned_loss=0.04739, over 2627517.50 frames. ], batch size: 37, lr: 6.48e-03, grad_scale: 16.0 +2024-08-03 22:32:23,259 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 22:32:34,408 INFO [train.py:1146] (1/4) Epoch 19, validation: loss=0.169, simple_loss=0.2683, pruned_loss=0.03491, over 944034.00 frames. +2024-08-03 22:32:34,408 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 22:32:40,129 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-08-03 22:32:41,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=251262.0, ans=0.0 +2024-08-03 22:32:45,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251298.66666666666, ans=0.1 +2024-08-03 22:32:51,339 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.78 vs. limit=6.0 +2024-08-03 22:32:51,805 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.029e+01 1.088e+02 1.228e+02 1.356e+02 2.065e+02, threshold=2.455e+02, percent-clipped=0.0 +2024-08-03 22:32:58,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=251335.33333333334, ans=0.125 +2024-08-03 22:33:01,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=251372.0, ans=0.0 +2024-08-03 22:33:12,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=251408.66666666666, ans=0.125 +2024-08-03 22:33:17,983 INFO [train.py:1114] (1/4) Epoch 19, batch 3050, loss[loss=0.1763, simple_loss=0.2651, pruned_loss=0.04374, over 13522.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2695, pruned_loss=0.0477, over 2625057.90 frames. ], batch size: 35, lr: 6.48e-03, grad_scale: 16.0 +2024-08-03 22:33:18,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=251445.33333333334, ans=0.09899494936611666 +2024-08-03 22:33:20,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=251445.33333333334, ans=0.125 +2024-08-03 22:33:23,060 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.27 vs. limit=6.0 +2024-08-03 22:33:40,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=251518.66666666666, ans=0.125 +2024-08-03 22:33:56,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=251592.0, ans=0.125 +2024-08-03 22:34:01,089 INFO [train.py:1114] (1/4) Epoch 19, batch 3100, loss[loss=0.2009, simple_loss=0.2945, pruned_loss=0.05371, over 13280.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2695, pruned_loss=0.0479, over 2625914.45 frames. ], batch size: 46, lr: 6.48e-03, grad_scale: 16.0 +2024-08-03 22:34:06,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=251628.66666666666, ans=0.0 +2024-08-03 22:34:19,145 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.004e+01 1.074e+02 1.205e+02 1.545e+02 4.065e+02, threshold=2.411e+02, percent-clipped=2.0 +2024-08-03 22:34:25,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=251702.0, ans=0.125 +2024-08-03 22:34:27,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=251702.0, ans=0.0 +2024-08-03 22:34:31,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=251738.66666666666, ans=0.0 +2024-08-03 22:34:38,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=251775.33333333334, ans=0.0 +2024-08-03 22:34:40,919 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.15 vs. limit=22.5 +2024-08-03 22:34:46,840 INFO [train.py:1114] (1/4) Epoch 19, batch 3150, loss[loss=0.1803, simple_loss=0.2775, pruned_loss=0.04156, over 12974.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2698, pruned_loss=0.04791, over 2627591.08 frames. ], batch size: 48, lr: 6.48e-03, grad_scale: 16.0 +2024-08-03 22:35:02,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=251848.66666666666, ans=0.125 +2024-08-03 22:35:10,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=251885.33333333334, ans=0.5 +2024-08-03 22:35:21,182 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.15 vs. limit=12.0 +2024-08-03 22:35:24,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=251958.66666666666, ans=0.0 +2024-08-03 22:35:25,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=251958.66666666666, ans=0.2 +2024-08-03 22:35:30,190 INFO [train.py:1114] (1/4) Epoch 19, batch 3200, loss[loss=0.2071, simple_loss=0.2888, pruned_loss=0.06267, over 13544.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2699, pruned_loss=0.04818, over 2632865.81 frames. ], batch size: 37, lr: 6.48e-03, grad_scale: 32.0 +2024-08-03 22:35:37,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=251995.33333333334, ans=0.125 +2024-08-03 22:35:44,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=252032.0, ans=0.0 +2024-08-03 22:35:47,280 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.593e+01 1.152e+02 1.475e+02 1.954e+02 2.995e+02, threshold=2.949e+02, percent-clipped=9.0 +2024-08-03 22:35:53,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=252068.66666666666, ans=0.125 +2024-08-03 22:35:55,460 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.81 vs. limit=15.0 +2024-08-03 22:36:02,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=252105.33333333334, ans=0.125 +2024-08-03 22:36:04,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=252142.0, ans=0.0 +2024-08-03 22:36:13,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=252178.66666666666, ans=0.125 +2024-08-03 22:36:13,784 INFO [train.py:1114] (1/4) Epoch 19, batch 3250, loss[loss=0.1814, simple_loss=0.2764, pruned_loss=0.0432, over 13387.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2708, pruned_loss=0.04819, over 2637960.24 frames. ], batch size: 38, lr: 6.47e-03, grad_scale: 32.0 +2024-08-03 22:36:16,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=252178.66666666666, ans=0.125 +2024-08-03 22:36:19,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=252178.66666666666, ans=0.0 +2024-08-03 22:36:19,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=252178.66666666666, ans=0.2 +2024-08-03 22:36:35,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=252252.0, ans=0.025 +2024-08-03 22:36:37,658 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.11 vs. limit=15.0 +2024-08-03 22:36:43,630 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.63 vs. limit=15.0 +2024-08-03 22:36:57,933 INFO [train.py:1114] (1/4) Epoch 19, batch 3300, loss[loss=0.2047, simple_loss=0.2935, pruned_loss=0.05791, over 12895.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2696, pruned_loss=0.04775, over 2639235.15 frames. ], batch size: 52, lr: 6.47e-03, grad_scale: 32.0 +2024-08-03 22:37:03,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=252362.0, ans=0.125 +2024-08-03 22:37:12,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=252398.66666666666, ans=0.2 +2024-08-03 22:37:14,951 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.912e+01 1.101e+02 1.274e+02 1.526e+02 2.579e+02, threshold=2.548e+02, percent-clipped=0.0 +2024-08-03 22:37:18,078 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.04 vs. limit=15.0 +2024-08-03 22:37:19,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=252435.33333333334, ans=0.0 +2024-08-03 22:37:26,601 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.84 vs. limit=15.0 +2024-08-03 22:37:28,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=252472.0, ans=0.025 +2024-08-03 22:37:40,616 INFO [train.py:1114] (1/4) Epoch 19, batch 3350, loss[loss=0.2166, simple_loss=0.3049, pruned_loss=0.06417, over 12932.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2708, pruned_loss=0.04855, over 2628732.21 frames. ], batch size: 48, lr: 6.47e-03, grad_scale: 32.0 +2024-08-03 22:37:42,477 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=252545.33333333334, ans=0.2 +2024-08-03 22:37:47,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=252545.33333333334, ans=0.125 +2024-08-03 22:37:49,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=252582.0, ans=0.125 +2024-08-03 22:37:56,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=252618.66666666666, ans=0.125 +2024-08-03 22:38:02,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=252618.66666666666, ans=0.025 +2024-08-03 22:38:23,950 INFO [train.py:1114] (1/4) Epoch 19, batch 3400, loss[loss=0.1905, simple_loss=0.2657, pruned_loss=0.05772, over 13550.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2703, pruned_loss=0.04828, over 2624765.01 frames. ], batch size: 31, lr: 6.47e-03, grad_scale: 16.0 +2024-08-03 22:38:24,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=252728.66666666666, ans=0.0 +2024-08-03 22:38:31,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=252765.33333333334, ans=0.0 +2024-08-03 22:38:36,417 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.68 vs. limit=22.5 +2024-08-03 22:38:41,984 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.435e+01 1.094e+02 1.256e+02 1.561e+02 2.442e+02, threshold=2.511e+02, percent-clipped=0.0 +2024-08-03 22:38:42,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=252802.0, ans=0.0 +2024-08-03 22:38:54,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=252838.66666666666, ans=0.0 +2024-08-03 22:38:57,764 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.52 vs. limit=15.0 +2024-08-03 22:39:07,296 INFO [train.py:1114] (1/4) Epoch 19, batch 3450, loss[loss=0.1673, simple_loss=0.2618, pruned_loss=0.03637, over 12872.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2703, pruned_loss=0.04812, over 2628531.73 frames. ], batch size: 52, lr: 6.46e-03, grad_scale: 16.0 +2024-08-03 22:39:11,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252912.0, ans=0.1 +2024-08-03 22:39:16,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=252948.66666666666, ans=0.125 +2024-08-03 22:39:21,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=252948.66666666666, ans=0.125 +2024-08-03 22:39:24,653 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.91 vs. limit=6.0 +2024-08-03 22:39:27,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=252985.33333333334, ans=0.0 +2024-08-03 22:39:28,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=252985.33333333334, ans=0.125 +2024-08-03 22:39:30,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=252985.33333333334, ans=0.2 +2024-08-03 22:39:39,412 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.98 vs. limit=15.0 +2024-08-03 22:39:43,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=253058.66666666666, ans=0.0 +2024-08-03 22:39:50,108 INFO [train.py:1114] (1/4) Epoch 19, batch 3500, loss[loss=0.161, simple_loss=0.2456, pruned_loss=0.03817, over 13534.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2698, pruned_loss=0.04798, over 2630552.85 frames. ], batch size: 34, lr: 6.46e-03, grad_scale: 16.0 +2024-08-03 22:39:51,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=253095.33333333334, ans=0.0 +2024-08-03 22:40:07,937 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.416e+01 1.103e+02 1.254e+02 1.609e+02 3.004e+02, threshold=2.508e+02, percent-clipped=2.0 +2024-08-03 22:40:33,552 INFO [train.py:1114] (1/4) Epoch 19, batch 3550, loss[loss=0.2082, simple_loss=0.2872, pruned_loss=0.06457, over 12555.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2717, pruned_loss=0.04892, over 2629010.23 frames. ], batch size: 58, lr: 6.46e-03, grad_scale: 16.0 +2024-08-03 22:40:37,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=253278.66666666666, ans=0.02 +2024-08-03 22:40:50,461 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:41:03,429 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.21 vs. limit=15.0 +2024-08-03 22:41:04,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=253388.66666666666, ans=0.125 +2024-08-03 22:41:04,864 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=19.32 vs. limit=15.0 +2024-08-03 22:41:10,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=253425.33333333334, ans=0.2 +2024-08-03 22:41:10,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=253425.33333333334, ans=0.125 +2024-08-03 22:41:11,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=253425.33333333334, ans=0.0 +2024-08-03 22:41:16,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=253425.33333333334, ans=0.125 +2024-08-03 22:41:17,600 INFO [train.py:1114] (1/4) Epoch 19, batch 3600, loss[loss=0.2405, simple_loss=0.3027, pruned_loss=0.08916, over 8836.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.276, pruned_loss=0.05254, over 2487977.49 frames. ], batch size: 97, lr: 6.46e-03, grad_scale: 32.0 +2024-08-03 22:41:21,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=253462.0, ans=0.125 +2024-08-03 22:41:35,765 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.007e+02 1.196e+02 1.277e+02 1.470e+02 2.167e+02, threshold=2.555e+02, percent-clipped=0.0 +2024-08-03 22:41:38,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=253535.33333333334, ans=0.125 +2024-08-03 22:41:44,577 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.80 vs. limit=22.5 +2024-08-03 22:42:35,194 INFO [train.py:1114] (1/4) Epoch 20, batch 0, loss[loss=0.17, simple_loss=0.2505, pruned_loss=0.04477, over 13334.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2505, pruned_loss=0.04477, over 13334.00 frames. ], batch size: 33, lr: 6.29e-03, grad_scale: 32.0 +2024-08-03 22:42:35,195 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 22:42:39,792 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.6071, 2.8807, 2.5689, 2.7406], device='cuda:1') +2024-08-03 22:42:45,195 INFO [train.py:1146] (1/4) Epoch 20, validation: loss=0.1683, simple_loss=0.2688, pruned_loss=0.0339, over 944034.00 frames. +2024-08-03 22:42:45,196 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 22:42:50,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253612.33333333334, ans=0.1 +2024-08-03 22:42:55,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=253649.0, ans=0.125 +2024-08-03 22:43:14,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=253722.33333333334, ans=0.025 +2024-08-03 22:43:18,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=253722.33333333334, ans=0.0 +2024-08-03 22:43:21,414 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.45 vs. limit=15.0 +2024-08-03 22:43:24,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=253759.0, ans=0.1 +2024-08-03 22:43:30,968 INFO [train.py:1114] (1/4) Epoch 20, batch 50, loss[loss=0.1489, simple_loss=0.2303, pruned_loss=0.03375, over 13411.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.27, pruned_loss=0.04816, over 577626.02 frames. ], batch size: 32, lr: 6.29e-03, grad_scale: 32.0 +2024-08-03 22:43:41,268 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.77 vs. limit=15.0 +2024-08-03 22:43:41,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=253832.33333333334, ans=0.125 +2024-08-03 22:43:57,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=253869.0, ans=0.015 +2024-08-03 22:44:02,672 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.939e+01 1.083e+02 1.261e+02 1.490e+02 2.691e+02, threshold=2.522e+02, percent-clipped=1.0 +2024-08-03 22:44:02,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=253905.66666666666, ans=0.025 +2024-08-03 22:44:07,576 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.92 vs. limit=22.5 +2024-08-03 22:44:09,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=253942.33333333334, ans=0.2 +2024-08-03 22:44:19,844 INFO [train.py:1114] (1/4) Epoch 20, batch 100, loss[loss=0.1597, simple_loss=0.245, pruned_loss=0.03725, over 13537.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2725, pruned_loss=0.04852, over 1025065.12 frames. ], batch size: 35, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:44:33,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=254015.66666666666, ans=0.2 +2024-08-03 22:44:35,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=254015.66666666666, ans=0.07 +2024-08-03 22:44:56,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=254089.0, ans=0.2 +2024-08-03 22:45:07,146 INFO [train.py:1114] (1/4) Epoch 20, batch 150, loss[loss=0.1446, simple_loss=0.228, pruned_loss=0.03054, over 13413.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2693, pruned_loss=0.04695, over 1386573.16 frames. ], batch size: 32, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:45:26,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=254235.66666666666, ans=0.125 +2024-08-03 22:45:35,892 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.685e+01 1.074e+02 1.304e+02 1.730e+02 2.668e+02, threshold=2.608e+02, percent-clipped=1.0 +2024-08-03 22:45:51,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=254309.0, ans=0.05 +2024-08-03 22:45:54,468 INFO [train.py:1114] (1/4) Epoch 20, batch 200, loss[loss=0.1899, simple_loss=0.2817, pruned_loss=0.04903, over 12595.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2674, pruned_loss=0.04615, over 1665510.73 frames. ], batch size: 59, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:45:54,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=254345.66666666666, ans=0.07 +2024-08-03 22:46:06,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=254382.33333333334, ans=0.0 +2024-08-03 22:46:09,526 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.74 vs. limit=15.0 +2024-08-03 22:46:13,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=254419.0, ans=0.125 +2024-08-03 22:46:20,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=254419.0, ans=0.2 +2024-08-03 22:46:39,767 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.83 vs. limit=10.0 +2024-08-03 22:46:40,094 INFO [train.py:1114] (1/4) Epoch 20, batch 250, loss[loss=0.1901, simple_loss=0.2934, pruned_loss=0.04341, over 13274.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2682, pruned_loss=0.0463, over 1884751.87 frames. ], batch size: 46, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:46:53,225 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.85 vs. limit=22.5 +2024-08-03 22:47:10,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=254639.0, ans=0.125 +2024-08-03 22:47:10,943 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.021e+01 1.100e+02 1.239e+02 1.581e+02 3.543e+02, threshold=2.478e+02, percent-clipped=3.0 +2024-08-03 22:47:14,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=254639.0, ans=0.1 +2024-08-03 22:47:15,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254639.0, ans=0.1 +2024-08-03 22:47:25,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254675.66666666666, ans=0.1 +2024-08-03 22:47:27,272 INFO [train.py:1114] (1/4) Epoch 20, batch 300, loss[loss=0.1877, simple_loss=0.2858, pruned_loss=0.04479, over 13449.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2674, pruned_loss=0.04625, over 2051163.79 frames. ], batch size: 42, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:47:43,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=254749.0, ans=0.0 +2024-08-03 22:47:50,398 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.83 vs. limit=22.5 +2024-08-03 22:47:51,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=254785.66666666666, ans=0.025 +2024-08-03 22:47:54,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=254785.66666666666, ans=0.125 +2024-08-03 22:48:01,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=254822.33333333334, ans=0.125 +2024-08-03 22:48:02,133 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.73 vs. limit=15.0 +2024-08-03 22:48:15,256 INFO [train.py:1114] (1/4) Epoch 20, batch 350, loss[loss=0.1627, simple_loss=0.2455, pruned_loss=0.03998, over 13583.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2677, pruned_loss=0.04602, over 2181776.32 frames. ], batch size: 33, lr: 6.27e-03, grad_scale: 16.0 +2024-08-03 22:48:18,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=254895.66666666666, ans=0.07 +2024-08-03 22:48:44,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=255005.66666666666, ans=0.125 +2024-08-03 22:48:45,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255005.66666666666, ans=0.1 +2024-08-03 22:48:46,583 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.302e+01 1.074e+02 1.204e+02 1.489e+02 2.516e+02, threshold=2.409e+02, percent-clipped=1.0 +2024-08-03 22:49:02,806 INFO [train.py:1114] (1/4) Epoch 20, batch 400, loss[loss=0.1775, simple_loss=0.2738, pruned_loss=0.04066, over 13371.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2677, pruned_loss=0.04597, over 2286058.28 frames. ], batch size: 37, lr: 6.27e-03, grad_scale: 32.0 +2024-08-03 22:49:28,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255152.33333333334, ans=0.1 +2024-08-03 22:49:33,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=255189.0, ans=0.125 +2024-08-03 22:49:43,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=255225.66666666666, ans=0.2 +2024-08-03 22:49:50,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=255262.33333333334, ans=0.035 +2024-08-03 22:49:51,088 INFO [train.py:1114] (1/4) Epoch 20, batch 450, loss[loss=0.1992, simple_loss=0.294, pruned_loss=0.05216, over 13556.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.268, pruned_loss=0.04626, over 2359694.73 frames. ], batch size: 38, lr: 6.27e-03, grad_scale: 32.0 +2024-08-03 22:49:52,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=255262.33333333334, ans=0.125 +2024-08-03 22:49:56,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=255262.33333333334, ans=0.125 +2024-08-03 22:50:00,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=255299.0, ans=0.125 +2024-08-03 22:50:18,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255372.33333333334, ans=0.1 +2024-08-03 22:50:20,453 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.506e+01 1.112e+02 1.300e+02 1.604e+02 2.595e+02, threshold=2.600e+02, percent-clipped=3.0 +2024-08-03 22:50:27,393 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.92 vs. limit=15.0 +2024-08-03 22:50:35,541 INFO [train.py:1114] (1/4) Epoch 20, batch 500, loss[loss=0.1975, simple_loss=0.283, pruned_loss=0.05599, over 13445.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2672, pruned_loss=0.04605, over 2425612.84 frames. ], batch size: 43, lr: 6.27e-03, grad_scale: 16.0 +2024-08-03 22:50:45,226 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=8.45 vs. limit=12.0 +2024-08-03 22:50:56,189 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.65 vs. limit=15.0 +2024-08-03 22:50:57,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=255519.0, ans=0.125 +2024-08-03 22:51:06,384 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.68 vs. limit=15.0 +2024-08-03 22:51:12,552 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.95 vs. limit=15.0 +2024-08-03 22:51:12,620 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.44 vs. limit=15.0 +2024-08-03 22:51:25,361 INFO [train.py:1114] (1/4) Epoch 20, batch 550, loss[loss=0.195, simple_loss=0.2885, pruned_loss=0.05075, over 13043.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2676, pruned_loss=0.04635, over 2467677.02 frames. ], batch size: 48, lr: 6.26e-03, grad_scale: 16.0 +2024-08-03 22:51:30,348 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.66 vs. limit=22.5 +2024-08-03 22:51:50,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=255702.33333333334, ans=0.125 +2024-08-03 22:51:54,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=255739.0, ans=0.0 +2024-08-03 22:51:55,566 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.709e+01 1.108e+02 1.273e+02 1.483e+02 2.115e+02, threshold=2.547e+02, percent-clipped=0.0 +2024-08-03 22:51:55,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=255739.0, ans=0.125 +2024-08-03 22:51:56,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=255739.0, ans=0.125 +2024-08-03 22:52:07,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=255775.66666666666, ans=0.125 +2024-08-03 22:52:13,741 INFO [train.py:1114] (1/4) Epoch 20, batch 600, loss[loss=0.1924, simple_loss=0.2843, pruned_loss=0.05025, over 13331.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2676, pruned_loss=0.0465, over 2507660.16 frames. ], batch size: 46, lr: 6.26e-03, grad_scale: 16.0 +2024-08-03 22:52:21,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=255812.33333333334, ans=0.125 +2024-08-03 22:52:38,582 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.97 vs. limit=22.5 +2024-08-03 22:52:50,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=255959.0, ans=0.125 +2024-08-03 22:53:01,425 INFO [train.py:1114] (1/4) Epoch 20, batch 650, loss[loss=0.1732, simple_loss=0.2647, pruned_loss=0.04085, over 13534.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2671, pruned_loss=0.04642, over 2542967.34 frames. ], batch size: 37, lr: 6.26e-03, grad_scale: 8.0 +2024-08-03 22:53:10,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=256032.33333333334, ans=0.125 +2024-08-03 22:53:18,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=256069.0, ans=0.2 +2024-08-03 22:53:25,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256069.0, ans=0.1 +2024-08-03 22:53:27,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=256069.0, ans=0.125 +2024-08-03 22:53:32,260 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.291e+01 1.089e+02 1.229e+02 1.482e+02 2.680e+02, threshold=2.459e+02, percent-clipped=1.0 +2024-08-03 22:53:43,774 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.64 vs. limit=15.0 +2024-08-03 22:53:44,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.16 vs. limit=15.0 +2024-08-03 22:53:47,112 INFO [train.py:1114] (1/4) Epoch 20, batch 700, loss[loss=0.1634, simple_loss=0.2456, pruned_loss=0.04066, over 13530.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2672, pruned_loss=0.04631, over 2564878.23 frames. ], batch size: 35, lr: 6.26e-03, grad_scale: 8.0 +2024-08-03 22:53:56,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=256215.66666666666, ans=0.125 +2024-08-03 22:53:56,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=256215.66666666666, ans=0.125 +2024-08-03 22:53:58,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.whiten.whitening_limit, batch_count=256215.66666666666, ans=12.0 +2024-08-03 22:54:10,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=256252.33333333334, ans=0.2 +2024-08-03 22:54:21,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=256289.0, ans=0.125 +2024-08-03 22:54:29,325 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.33 vs. limit=12.0 +2024-08-03 22:54:34,714 INFO [train.py:1114] (1/4) Epoch 20, batch 750, loss[loss=0.174, simple_loss=0.272, pruned_loss=0.03796, over 13363.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.266, pruned_loss=0.04564, over 2582144.44 frames. ], batch size: 37, lr: 6.26e-03, grad_scale: 8.0 +2024-08-03 22:54:35,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=256362.33333333334, ans=0.0 +2024-08-03 22:54:41,538 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.85 vs. limit=12.0 +2024-08-03 22:54:43,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=256399.0, ans=0.125 +2024-08-03 22:54:47,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256399.0, ans=0.1 +2024-08-03 22:55:06,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=256472.33333333334, ans=0.2 +2024-08-03 22:55:08,786 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.791e+01 1.111e+02 1.273e+02 1.584e+02 2.450e+02, threshold=2.545e+02, percent-clipped=0.0 +2024-08-03 22:55:21,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=256509.0, ans=0.0 +2024-08-03 22:55:21,922 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.14 vs. limit=22.5 +2024-08-03 22:55:23,256 INFO [train.py:1114] (1/4) Epoch 20, batch 800, loss[loss=0.1619, simple_loss=0.2448, pruned_loss=0.0395, over 13319.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2656, pruned_loss=0.04533, over 2595987.29 frames. ], batch size: 33, lr: 6.25e-03, grad_scale: 16.0 +2024-08-03 22:55:23,683 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.84 vs. limit=15.0 +2024-08-03 22:55:30,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=256545.66666666666, ans=0.125 +2024-08-03 22:55:31,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=256582.33333333334, ans=0.0 +2024-08-03 22:55:31,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=256582.33333333334, ans=0.0 +2024-08-03 22:55:52,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=256655.66666666666, ans=0.125 +2024-08-03 22:56:10,724 INFO [train.py:1114] (1/4) Epoch 20, batch 850, loss[loss=0.169, simple_loss=0.2647, pruned_loss=0.03669, over 13325.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2658, pruned_loss=0.0455, over 2609197.77 frames. ], batch size: 40, lr: 6.25e-03, grad_scale: 16.0 +2024-08-03 22:56:12,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=256729.0, ans=0.125 +2024-08-03 22:56:18,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=256729.0, ans=0.025 +2024-08-03 22:56:22,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=256765.66666666666, ans=0.125 +2024-08-03 22:56:26,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=256765.66666666666, ans=22.5 +2024-08-03 22:56:44,113 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.111e+01 1.084e+02 1.248e+02 1.708e+02 3.125e+02, threshold=2.496e+02, percent-clipped=3.0 +2024-08-03 22:56:50,168 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.37 vs. limit=15.0 +2024-08-03 22:56:52,933 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.04 vs. limit=22.5 +2024-08-03 22:56:58,774 INFO [train.py:1114] (1/4) Epoch 20, batch 900, loss[loss=0.16, simple_loss=0.2434, pruned_loss=0.03834, over 13330.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2668, pruned_loss=0.04612, over 2611825.38 frames. ], batch size: 33, lr: 6.25e-03, grad_scale: 16.0 +2024-08-03 22:57:19,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=256985.66666666666, ans=0.125 +2024-08-03 22:57:34,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=257059.0, ans=0.0 +2024-08-03 22:57:36,931 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.61 vs. limit=15.0 +2024-08-03 22:57:43,695 INFO [train.py:1114] (1/4) Epoch 20, batch 950, loss[loss=0.1667, simple_loss=0.2553, pruned_loss=0.03906, over 13532.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2667, pruned_loss=0.04598, over 2612874.35 frames. ], batch size: 34, lr: 6.25e-03, grad_scale: 16.0 +2024-08-03 22:57:45,421 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.99 vs. limit=15.0 +2024-08-03 22:57:48,952 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.13 vs. limit=22.5 +2024-08-03 22:57:55,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=257132.33333333334, ans=0.0 +2024-08-03 22:57:55,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=257132.33333333334, ans=0.0 +2024-08-03 22:57:56,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=257132.33333333334, ans=0.125 +2024-08-03 22:58:14,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=257205.66666666666, ans=0.0 +2024-08-03 22:58:14,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257205.66666666666, ans=0.1 +2024-08-03 22:58:15,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=257205.66666666666, ans=0.2 +2024-08-03 22:58:16,416 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.792e+01 1.096e+02 1.434e+02 1.875e+02 2.963e+02, threshold=2.868e+02, percent-clipped=4.0 +2024-08-03 22:58:32,890 INFO [train.py:1114] (1/4) Epoch 20, batch 1000, loss[loss=0.1619, simple_loss=0.2444, pruned_loss=0.03967, over 13369.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2674, pruned_loss=0.04654, over 2611543.53 frames. ], batch size: 35, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 22:58:33,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=257279.0, ans=0.0 +2024-08-03 22:58:37,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=257279.0, ans=10.0 +2024-08-03 22:59:14,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=257425.66666666666, ans=0.0 +2024-08-03 22:59:16,046 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.77 vs. limit=15.0 +2024-08-03 22:59:21,334 INFO [train.py:1114] (1/4) Epoch 20, batch 1050, loss[loss=0.1794, simple_loss=0.2748, pruned_loss=0.04197, over 13578.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2672, pruned_loss=0.04644, over 2615348.53 frames. ], batch size: 39, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 22:59:26,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=257462.33333333334, ans=0.125 +2024-08-03 22:59:31,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=257499.0, ans=0.125 +2024-08-03 22:59:52,247 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.716e+01 1.110e+02 1.251e+02 1.540e+02 2.508e+02, threshold=2.503e+02, percent-clipped=0.0 +2024-08-03 23:00:01,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=257609.0, ans=0.2 +2024-08-03 23:00:02,847 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.45 vs. limit=15.0 +2024-08-03 23:00:08,983 INFO [train.py:1114] (1/4) Epoch 20, batch 1100, loss[loss=0.1761, simple_loss=0.2598, pruned_loss=0.04618, over 13545.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2673, pruned_loss=0.04642, over 2618776.67 frames. ], batch size: 36, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 23:00:13,917 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.62 vs. limit=22.5 +2024-08-03 23:00:14,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=257645.66666666666, ans=0.125 +2024-08-03 23:00:21,664 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:00:38,948 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.09 vs. limit=15.0 +2024-08-03 23:00:47,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=257792.33333333334, ans=0.05 +2024-08-03 23:00:48,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=257792.33333333334, ans=0.125 +2024-08-03 23:00:50,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=257792.33333333334, ans=0.0 +2024-08-03 23:00:51,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=257792.33333333334, ans=0.2 +2024-08-03 23:00:52,968 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.09 vs. limit=15.0 +2024-08-03 23:00:54,179 INFO [train.py:1114] (1/4) Epoch 20, batch 1150, loss[loss=0.1855, simple_loss=0.2671, pruned_loss=0.05196, over 13559.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2665, pruned_loss=0.04602, over 2617741.96 frames. ], batch size: 36, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 23:00:59,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=257829.0, ans=0.0 +2024-08-03 23:01:16,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=257902.33333333334, ans=0.0 +2024-08-03 23:01:16,562 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.09 vs. limit=15.0 +2024-08-03 23:01:17,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=257902.33333333334, ans=0.0 +2024-08-03 23:01:21,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=257939.0, ans=0.2 +2024-08-03 23:01:26,022 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.874e+01 1.110e+02 1.277e+02 1.674e+02 2.760e+02, threshold=2.554e+02, percent-clipped=1.0 +2024-08-03 23:01:28,280 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=7.74 vs. limit=15.0 +2024-08-03 23:01:30,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257975.66666666666, ans=0.1 +2024-08-03 23:01:35,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=257975.66666666666, ans=0.125 +2024-08-03 23:01:39,163 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.36 vs. limit=22.5 +2024-08-03 23:01:39,684 INFO [train.py:1114] (1/4) Epoch 20, batch 1200, loss[loss=0.1826, simple_loss=0.2716, pruned_loss=0.04682, over 13575.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2669, pruned_loss=0.0461, over 2615340.77 frames. ], batch size: 39, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 23:01:39,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=258012.33333333334, ans=0.015 +2024-08-03 23:01:49,912 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.62 vs. limit=22.5 +2024-08-03 23:02:05,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=258085.66666666666, ans=0.125 +2024-08-03 23:02:16,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=258122.33333333334, ans=0.125 +2024-08-03 23:02:19,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258159.0, ans=0.1 +2024-08-03 23:02:28,510 INFO [train.py:1114] (1/4) Epoch 20, batch 1250, loss[loss=0.2097, simple_loss=0.2998, pruned_loss=0.05976, over 13450.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.268, pruned_loss=0.04645, over 2627245.80 frames. ], batch size: 42, lr: 6.23e-03, grad_scale: 16.0 +2024-08-03 23:03:00,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=258305.66666666666, ans=0.2 +2024-08-03 23:03:01,938 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.994e+01 1.078e+02 1.315e+02 1.640e+02 2.788e+02, threshold=2.629e+02, percent-clipped=1.0 +2024-08-03 23:03:15,616 INFO [train.py:1114] (1/4) Epoch 20, batch 1300, loss[loss=0.1938, simple_loss=0.2847, pruned_loss=0.05142, over 12933.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2674, pruned_loss=0.04634, over 2630470.90 frames. ], batch size: 52, lr: 6.23e-03, grad_scale: 16.0 +2024-08-03 23:03:15,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258379.0, ans=0.1 +2024-08-03 23:03:15,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=258379.0, ans=0.025 +2024-08-03 23:03:16,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=258379.0, ans=0.125 +2024-08-03 23:03:24,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=258415.66666666666, ans=0.0 +2024-08-03 23:03:26,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258415.66666666666, ans=0.1 +2024-08-03 23:03:35,015 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.65 vs. limit=12.0 +2024-08-03 23:04:01,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=258525.66666666666, ans=0.125 +2024-08-03 23:04:02,909 INFO [train.py:1114] (1/4) Epoch 20, batch 1350, loss[loss=0.1871, simple_loss=0.2784, pruned_loss=0.04786, over 13538.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2666, pruned_loss=0.04575, over 2638247.54 frames. ], batch size: 37, lr: 6.23e-03, grad_scale: 16.0 +2024-08-03 23:04:15,292 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.42 vs. limit=6.0 +2024-08-03 23:04:24,283 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.31 vs. limit=8.0 +2024-08-03 23:04:34,292 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.766e+01 1.164e+02 1.354e+02 1.727e+02 2.558e+02, threshold=2.707e+02, percent-clipped=0.0 +2024-08-03 23:04:35,629 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=3.83 vs. limit=12.0 +2024-08-03 23:04:47,929 INFO [train.py:1114] (1/4) Epoch 20, batch 1400, loss[loss=0.1663, simple_loss=0.2424, pruned_loss=0.04513, over 13263.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2662, pruned_loss=0.04563, over 2642013.92 frames. ], batch size: 31, lr: 6.23e-03, grad_scale: 16.0 +2024-08-03 23:04:48,424 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.95 vs. limit=15.0 +2024-08-03 23:04:52,328 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.12 vs. limit=22.5 +2024-08-03 23:04:57,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=258782.33333333334, ans=0.125 +2024-08-03 23:04:57,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=258782.33333333334, ans=0.04949747468305833 +2024-08-03 23:04:59,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=258782.33333333334, ans=0.0 +2024-08-03 23:04:59,712 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.00 vs. limit=15.0 +2024-08-03 23:05:00,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=258782.33333333334, ans=10.0 +2024-08-03 23:05:06,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=258819.0, ans=0.125 +2024-08-03 23:05:13,236 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.90 vs. limit=22.5 +2024-08-03 23:05:14,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=258855.66666666666, ans=0.1 +2024-08-03 23:05:17,129 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=17.33 vs. limit=15.0 +2024-08-03 23:05:19,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=258855.66666666666, ans=0.0 +2024-08-03 23:05:28,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=258892.33333333334, ans=0.125 +2024-08-03 23:05:30,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=258892.33333333334, ans=0.025 +2024-08-03 23:05:32,174 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.59 vs. limit=15.0 +2024-08-03 23:05:32,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=258892.33333333334, ans=0.2 +2024-08-03 23:05:35,406 INFO [train.py:1114] (1/4) Epoch 20, batch 1450, loss[loss=0.1783, simple_loss=0.268, pruned_loss=0.04431, over 13403.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2671, pruned_loss=0.04626, over 2640979.52 frames. ], batch size: 43, lr: 6.22e-03, grad_scale: 16.0 +2024-08-03 23:05:39,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=258929.0, ans=0.125 +2024-08-03 23:05:49,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=258965.66666666666, ans=0.0 +2024-08-03 23:05:56,801 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.65 vs. limit=6.0 +2024-08-03 23:06:07,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259039.0, ans=0.1 +2024-08-03 23:06:08,776 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.191e+01 1.099e+02 1.241e+02 1.472e+02 2.601e+02, threshold=2.481e+02, percent-clipped=0.0 +2024-08-03 23:06:21,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=259112.33333333334, ans=0.125 +2024-08-03 23:06:22,416 INFO [train.py:1114] (1/4) Epoch 20, batch 1500, loss[loss=0.1738, simple_loss=0.2742, pruned_loss=0.0367, over 13409.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2674, pruned_loss=0.04618, over 2641312.88 frames. ], batch size: 39, lr: 6.22e-03, grad_scale: 16.0 +2024-08-03 23:06:45,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=259185.66666666666, ans=0.0 +2024-08-03 23:07:04,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=259259.0, ans=0.125 +2024-08-03 23:07:10,179 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.35 vs. limit=22.5 +2024-08-03 23:07:12,329 INFO [train.py:1114] (1/4) Epoch 20, batch 1550, loss[loss=0.1809, simple_loss=0.2752, pruned_loss=0.0433, over 13393.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2673, pruned_loss=0.04622, over 2631416.73 frames. ], batch size: 41, lr: 6.22e-03, grad_scale: 16.0 +2024-08-03 23:07:12,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=259295.66666666666, ans=0.5 +2024-08-03 23:07:12,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=259295.66666666666, ans=0.125 +2024-08-03 23:07:22,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259332.33333333334, ans=0.1 +2024-08-03 23:07:30,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=259332.33333333334, ans=0.125 +2024-08-03 23:07:34,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=259369.0, ans=15.0 +2024-08-03 23:07:40,410 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.10 vs. limit=15.0 +2024-08-03 23:07:43,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259405.66666666666, ans=0.1 +2024-08-03 23:07:45,376 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.913e+01 1.100e+02 1.241e+02 1.619e+02 2.779e+02, threshold=2.482e+02, percent-clipped=4.0 +2024-08-03 23:07:51,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=259442.33333333334, ans=0.125 +2024-08-03 23:07:59,034 INFO [train.py:1114] (1/4) Epoch 20, batch 1600, loss[loss=0.1645, simple_loss=0.2625, pruned_loss=0.03322, over 13583.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2678, pruned_loss=0.04667, over 2625204.95 frames. ], batch size: 39, lr: 6.22e-03, grad_scale: 32.0 +2024-08-03 23:08:00,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=259479.0, ans=0.125 +2024-08-03 23:08:00,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=259479.0, ans=0.125 +2024-08-03 23:08:05,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=259479.0, ans=0.2 +2024-08-03 23:08:28,752 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.65 vs. limit=15.0 +2024-08-03 23:08:40,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=259625.66666666666, ans=0.0 +2024-08-03 23:08:44,988 INFO [train.py:1114] (1/4) Epoch 20, batch 1650, loss[loss=0.182, simple_loss=0.2781, pruned_loss=0.04301, over 13326.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2677, pruned_loss=0.04675, over 2621913.82 frames. ], batch size: 40, lr: 6.22e-03, grad_scale: 32.0 +2024-08-03 23:08:49,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=259662.33333333334, ans=0.95 +2024-08-03 23:08:52,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259662.33333333334, ans=0.1 +2024-08-03 23:09:12,204 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.23 vs. limit=22.5 +2024-08-03 23:09:20,926 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.088e+02 1.223e+02 1.648e+02 3.559e+02, threshold=2.446e+02, percent-clipped=8.0 +2024-08-03 23:09:28,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259809.0, ans=0.1 +2024-08-03 23:09:29,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.84 vs. limit=15.0 +2024-08-03 23:09:30,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=259809.0, ans=0.1 +2024-08-03 23:09:31,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=259809.0, ans=0.0 +2024-08-03 23:09:34,467 INFO [train.py:1114] (1/4) Epoch 20, batch 1700, loss[loss=0.1619, simple_loss=0.2418, pruned_loss=0.04099, over 13259.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2676, pruned_loss=0.04665, over 2630833.38 frames. ], batch size: 31, lr: 6.21e-03, grad_scale: 32.0 +2024-08-03 23:09:38,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=259845.66666666666, ans=0.125 +2024-08-03 23:09:45,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=259882.33333333334, ans=0.035 +2024-08-03 23:10:06,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=259955.66666666666, ans=0.1 +2024-08-03 23:10:06,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=259955.66666666666, ans=0.125 +2024-08-03 23:10:07,564 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.20 vs. limit=12.0 +2024-08-03 23:10:21,422 INFO [train.py:1114] (1/4) Epoch 20, batch 1750, loss[loss=0.1693, simple_loss=0.2481, pruned_loss=0.04527, over 13520.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2672, pruned_loss=0.04646, over 2634527.83 frames. ], batch size: 31, lr: 6.21e-03, grad_scale: 32.0 +2024-08-03 23:10:42,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=260102.33333333334, ans=0.125 +2024-08-03 23:10:46,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=260102.33333333334, ans=0.125 +2024-08-03 23:10:54,928 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.991e+01 1.099e+02 1.220e+02 1.451e+02 2.480e+02, threshold=2.439e+02, percent-clipped=1.0 +2024-08-03 23:11:03,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=260175.66666666666, ans=0.125 +2024-08-03 23:11:03,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=260175.66666666666, ans=0.0 +2024-08-03 23:11:06,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=260175.66666666666, ans=0.2 +2024-08-03 23:11:08,878 INFO [train.py:1114] (1/4) Epoch 20, batch 1800, loss[loss=0.1831, simple_loss=0.277, pruned_loss=0.04464, over 13550.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2675, pruned_loss=0.04683, over 2635220.21 frames. ], batch size: 38, lr: 6.21e-03, grad_scale: 32.0 +2024-08-03 23:11:16,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=260212.33333333334, ans=0.125 +2024-08-03 23:11:18,541 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.00 vs. limit=15.0 +2024-08-03 23:11:21,706 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:11:35,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=260322.33333333334, ans=0.0 +2024-08-03 23:11:39,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=260322.33333333334, ans=0.125 +2024-08-03 23:11:47,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=260359.0, ans=0.125 +2024-08-03 23:11:53,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260395.66666666666, ans=0.1 +2024-08-03 23:11:54,205 INFO [train.py:1114] (1/4) Epoch 20, batch 1850, loss[loss=0.176, simple_loss=0.2624, pruned_loss=0.04485, over 13418.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2677, pruned_loss=0.04671, over 2637867.68 frames. ], batch size: 39, lr: 6.21e-03, grad_scale: 32.0 +2024-08-03 23:12:08,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=260432.33333333334, ans=0.2 +2024-08-03 23:12:11,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=260469.0, ans=0.125 +2024-08-03 23:12:13,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=260469.0, ans=0.125 +2024-08-03 23:12:17,593 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.71 vs. limit=22.5 +2024-08-03 23:12:26,245 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.233e+01 1.108e+02 1.354e+02 1.892e+02 2.843e+02, threshold=2.709e+02, percent-clipped=7.0 +2024-08-03 23:12:57,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=260579.0, ans=15.0 +2024-08-03 23:12:58,316 INFO [train.py:1114] (1/4) Epoch 20, batch 1900, loss[loss=0.1893, simple_loss=0.2865, pruned_loss=0.04605, over 13317.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2682, pruned_loss=0.04681, over 2640201.37 frames. ], batch size: 40, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:13:03,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=260579.0, ans=0.0 +2024-08-03 23:13:13,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=260615.66666666666, ans=0.125 +2024-08-03 23:13:27,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=260689.0, ans=0.125 +2024-08-03 23:13:32,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=260689.0, ans=0.125 +2024-08-03 23:13:45,867 INFO [train.py:1114] (1/4) Epoch 20, batch 1950, loss[loss=0.1705, simple_loss=0.2561, pruned_loss=0.04241, over 13541.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2692, pruned_loss=0.04724, over 2647098.81 frames. ], batch size: 36, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:13:48,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=260762.33333333334, ans=0.125 +2024-08-03 23:13:57,177 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.44 vs. limit=6.0 +2024-08-03 23:14:12,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=260835.66666666666, ans=0.2 +2024-08-03 23:14:19,448 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.342e+01 1.097e+02 1.231e+02 1.483e+02 2.195e+02, threshold=2.462e+02, percent-clipped=0.0 +2024-08-03 23:14:25,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=260909.0, ans=0.125 +2024-08-03 23:14:35,082 INFO [train.py:1114] (1/4) Epoch 20, batch 2000, loss[loss=0.1672, simple_loss=0.2427, pruned_loss=0.04586, over 13549.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2696, pruned_loss=0.04749, over 2636519.17 frames. ], batch size: 31, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:14:39,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260945.66666666666, ans=0.1 +2024-08-03 23:14:52,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=261019.0, ans=0.07 +2024-08-03 23:14:54,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=261019.0, ans=0.0 +2024-08-03 23:14:58,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=261019.0, ans=0.04949747468305833 +2024-08-03 23:15:02,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=261055.66666666666, ans=0.125 +2024-08-03 23:15:10,998 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.16 vs. limit=15.0 +2024-08-03 23:15:20,639 INFO [train.py:1114] (1/4) Epoch 20, batch 2050, loss[loss=0.1659, simple_loss=0.2454, pruned_loss=0.04325, over 13418.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2689, pruned_loss=0.04762, over 2634221.05 frames. ], batch size: 32, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:15:24,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=261129.0, ans=0.125 +2024-08-03 23:15:27,547 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.84 vs. limit=15.0 +2024-08-03 23:15:38,696 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:15:46,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=261239.0, ans=0.1 +2024-08-03 23:15:49,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=261239.0, ans=0.0 +2024-08-03 23:15:49,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=261239.0, ans=0.025 +2024-08-03 23:15:52,099 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.005e+01 1.074e+02 1.228e+02 1.423e+02 2.984e+02, threshold=2.455e+02, percent-clipped=1.0 +2024-08-03 23:15:55,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=261275.66666666666, ans=0.2 +2024-08-03 23:16:05,625 INFO [train.py:1114] (1/4) Epoch 20, batch 2100, loss[loss=0.1772, simple_loss=0.2698, pruned_loss=0.04232, over 13548.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2681, pruned_loss=0.04711, over 2639311.15 frames. ], batch size: 37, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:16:10,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=261312.33333333334, ans=0.125 +2024-08-03 23:16:23,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=261385.66666666666, ans=0.0 +2024-08-03 23:16:32,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=261385.66666666666, ans=0.2 +2024-08-03 23:16:54,281 INFO [train.py:1114] (1/4) Epoch 20, batch 2150, loss[loss=0.1813, simple_loss=0.2656, pruned_loss=0.04851, over 13554.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2673, pruned_loss=0.04682, over 2647711.65 frames. ], batch size: 36, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:17:00,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=261495.66666666666, ans=0.1 +2024-08-03 23:17:03,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=261532.33333333334, ans=0.07 +2024-08-03 23:17:06,699 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.32 vs. limit=15.0 +2024-08-03 23:17:12,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=261569.0, ans=0.125 +2024-08-03 23:17:13,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=261569.0, ans=0.0 +2024-08-03 23:17:13,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=261569.0, ans=0.125 +2024-08-03 23:17:16,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=261569.0, ans=10.0 +2024-08-03 23:17:26,353 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.602e+01 1.138e+02 1.478e+02 2.029e+02 3.755e+02, threshold=2.955e+02, percent-clipped=14.0 +2024-08-03 23:17:27,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=261605.66666666666, ans=0.0 +2024-08-03 23:17:35,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=261642.33333333334, ans=0.125 +2024-08-03 23:17:35,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=261642.33333333334, ans=0.1 +2024-08-03 23:17:37,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=261642.33333333334, ans=0.125 +2024-08-03 23:17:38,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=261642.33333333334, ans=0.125 +2024-08-03 23:17:41,769 INFO [train.py:1114] (1/4) Epoch 20, batch 2200, loss[loss=0.1777, simple_loss=0.2711, pruned_loss=0.04219, over 13398.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2672, pruned_loss=0.04662, over 2645518.72 frames. ], batch size: 39, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:17:46,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=261679.0, ans=0.0 +2024-08-03 23:17:50,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261715.66666666666, ans=0.1 +2024-08-03 23:17:54,111 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.96 vs. limit=15.0 +2024-08-03 23:17:54,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=261715.66666666666, ans=0.125 +2024-08-03 23:18:13,654 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.25 vs. limit=15.0 +2024-08-03 23:18:31,224 INFO [train.py:1114] (1/4) Epoch 20, batch 2250, loss[loss=0.1782, simple_loss=0.2667, pruned_loss=0.04488, over 13355.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2671, pruned_loss=0.04642, over 2643473.37 frames. ], batch size: 37, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:18:40,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.18 vs. limit=15.0 +2024-08-03 23:19:02,739 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.914e+01 1.098e+02 1.238e+02 1.485e+02 2.172e+02, threshold=2.476e+02, percent-clipped=0.0 +2024-08-03 23:19:04,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=261972.33333333334, ans=0.07 +2024-08-03 23:19:13,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262009.0, ans=0.1 +2024-08-03 23:19:16,425 INFO [train.py:1114] (1/4) Epoch 20, batch 2300, loss[loss=0.1536, simple_loss=0.2288, pruned_loss=0.03921, over 13563.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2659, pruned_loss=0.04601, over 2639178.70 frames. ], batch size: 33, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:19:16,804 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.88 vs. limit=15.0 +2024-08-03 23:19:32,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=262082.33333333334, ans=0.125 +2024-08-03 23:19:33,305 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.00 vs. limit=15.0 +2024-08-03 23:19:35,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=262119.0, ans=0.125 +2024-08-03 23:19:37,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=262119.0, ans=0.0 +2024-08-03 23:19:46,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=262155.6666666667, ans=0.125 +2024-08-03 23:19:47,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262155.6666666667, ans=0.1 +2024-08-03 23:19:48,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=262155.6666666667, ans=0.025 +2024-08-03 23:19:54,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=262192.3333333333, ans=0.125 +2024-08-03 23:19:55,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=262192.3333333333, ans=0.0 +2024-08-03 23:20:01,871 INFO [train.py:1114] (1/4) Epoch 20, batch 2350, loss[loss=0.1679, simple_loss=0.2595, pruned_loss=0.03812, over 13556.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.266, pruned_loss=0.04561, over 2641450.07 frames. ], batch size: 38, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:20:02,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=262229.0, ans=0.0 +2024-08-03 23:20:05,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=262229.0, ans=0.125 +2024-08-03 23:20:23,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=262302.3333333333, ans=0.2 +2024-08-03 23:20:33,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=262339.0, ans=0.125 +2024-08-03 23:20:37,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=262339.0, ans=0.0 +2024-08-03 23:20:38,437 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.532e+01 1.125e+02 1.356e+02 1.575e+02 2.756e+02, threshold=2.712e+02, percent-clipped=1.0 +2024-08-03 23:20:41,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=262339.0, ans=0.2 +2024-08-03 23:20:52,236 INFO [train.py:1114] (1/4) Epoch 20, batch 2400, loss[loss=0.1543, simple_loss=0.2459, pruned_loss=0.03131, over 13530.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2666, pruned_loss=0.04568, over 2643036.96 frames. ], batch size: 35, lr: 6.18e-03, grad_scale: 32.0 +2024-08-03 23:20:56,245 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.49 vs. limit=22.5 +2024-08-03 23:20:57,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=262412.3333333333, ans=0.0 +2024-08-03 23:21:04,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=262449.0, ans=0.2 +2024-08-03 23:21:04,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=262449.0, ans=0.0 +2024-08-03 23:21:24,668 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.86 vs. limit=15.0 +2024-08-03 23:21:26,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=262522.3333333333, ans=0.2 +2024-08-03 23:21:27,249 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.80 vs. limit=15.0 +2024-08-03 23:21:28,855 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:21:41,782 INFO [train.py:1114] (1/4) Epoch 20, batch 2450, loss[loss=0.1942, simple_loss=0.2878, pruned_loss=0.05027, over 13355.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2678, pruned_loss=0.04644, over 2633926.53 frames. ], batch size: 37, lr: 6.18e-03, grad_scale: 32.0 +2024-08-03 23:21:50,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=262632.3333333333, ans=0.09899494936611666 +2024-08-03 23:21:52,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=262632.3333333333, ans=0.2 +2024-08-03 23:21:53,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=262632.3333333333, ans=0.025 +2024-08-03 23:21:56,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=262632.3333333333, ans=0.2 +2024-08-03 23:22:10,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=262705.6666666667, ans=0.035 +2024-08-03 23:22:14,228 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.032e+01 1.119e+02 1.273e+02 1.496e+02 2.494e+02, threshold=2.546e+02, percent-clipped=0.0 +2024-08-03 23:22:20,073 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.73 vs. limit=15.0 +2024-08-03 23:22:20,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262742.3333333333, ans=0.1 +2024-08-03 23:22:20,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=262742.3333333333, ans=0.2 +2024-08-03 23:22:24,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=262742.3333333333, ans=0.0 +2024-08-03 23:22:27,157 INFO [train.py:1114] (1/4) Epoch 20, batch 2500, loss[loss=0.2083, simple_loss=0.3004, pruned_loss=0.05806, over 13394.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2679, pruned_loss=0.04625, over 2637496.04 frames. ], batch size: 39, lr: 6.18e-03, grad_scale: 16.0 +2024-08-03 23:22:38,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=262815.6666666667, ans=0.125 +2024-08-03 23:22:38,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=262815.6666666667, ans=0.09899494936611666 +2024-08-03 23:22:40,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=262815.6666666667, ans=0.2 +2024-08-03 23:22:40,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262815.6666666667, ans=0.1 +2024-08-03 23:22:43,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=262852.3333333333, ans=0.125 +2024-08-03 23:22:54,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=262889.0, ans=0.125 +2024-08-03 23:23:01,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=262925.6666666667, ans=0.025 +2024-08-03 23:23:02,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=262925.6666666667, ans=0.0 +2024-08-03 23:23:04,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=262925.6666666667, ans=0.125 +2024-08-03 23:23:06,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=262925.6666666667, ans=0.125 +2024-08-03 23:23:11,080 INFO [train.py:1114] (1/4) Epoch 20, batch 2550, loss[loss=0.1458, simple_loss=0.2285, pruned_loss=0.03157, over 13554.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2673, pruned_loss=0.04585, over 2638789.19 frames. ], batch size: 31, lr: 6.18e-03, grad_scale: 16.0 +2024-08-03 23:23:19,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=262999.0, ans=0.2 +2024-08-03 23:23:24,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=262999.0, ans=0.07 +2024-08-03 23:23:26,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=262999.0, ans=0.125 +2024-08-03 23:23:27,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=263035.6666666667, ans=0.0 +2024-08-03 23:23:31,458 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.92 vs. limit=15.0 +2024-08-03 23:23:38,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=263072.3333333333, ans=0.125 +2024-08-03 23:23:42,183 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.295e+01 1.081e+02 1.432e+02 1.962e+02 3.343e+02, threshold=2.864e+02, percent-clipped=8.0 +2024-08-03 23:23:44,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=263072.3333333333, ans=0.0 +2024-08-03 23:23:54,450 INFO [train.py:1114] (1/4) Epoch 20, batch 2600, loss[loss=0.1884, simple_loss=0.2716, pruned_loss=0.05265, over 13548.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2677, pruned_loss=0.04614, over 2638120.68 frames. ], batch size: 36, lr: 6.17e-03, grad_scale: 16.0 +2024-08-03 23:24:19,136 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.47 vs. limit=22.5 +2024-08-03 23:24:22,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=263255.6666666667, ans=0.025 +2024-08-03 23:24:38,438 INFO [train.py:1114] (1/4) Epoch 20, batch 2650, loss[loss=0.1868, simple_loss=0.2738, pruned_loss=0.04994, over 13343.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2684, pruned_loss=0.04642, over 2640750.09 frames. ], batch size: 46, lr: 6.17e-03, grad_scale: 16.0 +2024-08-03 23:24:39,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=263329.0, ans=0.125 +2024-08-03 23:24:42,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=263329.0, ans=0.125 +2024-08-03 23:24:52,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=263365.6666666667, ans=0.1 +2024-08-03 23:24:53,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=263365.6666666667, ans=0.1 +2024-08-03 23:24:56,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=263402.3333333333, ans=0.125 +2024-08-03 23:24:59,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=263402.3333333333, ans=0.0 +2024-08-03 23:25:03,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263402.3333333333, ans=0.1 +2024-08-03 23:25:09,825 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.851e+01 1.115e+02 1.313e+02 1.651e+02 2.845e+02, threshold=2.627e+02, percent-clipped=0.0 +2024-08-03 23:25:21,921 INFO [train.py:1114] (1/4) Epoch 20, batch 2700, loss[loss=0.1818, simple_loss=0.278, pruned_loss=0.04277, over 13554.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2691, pruned_loss=0.04674, over 2637656.37 frames. ], batch size: 40, lr: 6.17e-03, grad_scale: 16.0 +2024-08-03 23:25:24,125 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.53 vs. limit=15.0 +2024-08-03 23:25:28,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=263512.3333333333, ans=0.1 +2024-08-03 23:25:28,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=263512.3333333333, ans=0.125 +2024-08-03 23:25:29,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=263512.3333333333, ans=10.0 +2024-08-03 23:25:29,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=263549.0, ans=0.125 +2024-08-03 23:25:42,370 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.42 vs. limit=15.0 +2024-08-03 23:25:47,536 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.60 vs. limit=15.0 +2024-08-03 23:26:08,056 INFO [train.py:1114] (1/4) Epoch 20, batch 2750, loss[loss=0.1585, simple_loss=0.2498, pruned_loss=0.03356, over 13329.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2678, pruned_loss=0.04665, over 2635638.05 frames. ], batch size: 34, lr: 6.17e-03, grad_scale: 16.0 +2024-08-03 23:26:10,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=263695.6666666667, ans=0.07 +2024-08-03 23:26:11,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=263695.6666666667, ans=0.125 +2024-08-03 23:26:14,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263695.6666666667, ans=0.1 +2024-08-03 23:26:18,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=263732.3333333333, ans=0.5 +2024-08-03 23:26:19,855 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.00 vs. limit=15.0 +2024-08-03 23:26:29,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=263769.0, ans=0.025 +2024-08-03 23:26:30,792 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:26:39,456 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.617e+01 1.130e+02 1.320e+02 1.634e+02 2.919e+02, threshold=2.640e+02, percent-clipped=4.0 +2024-08-03 23:26:51,631 INFO [train.py:1114] (1/4) Epoch 20, batch 2800, loss[loss=0.2361, simple_loss=0.31, pruned_loss=0.08107, over 9624.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2689, pruned_loss=0.0472, over 2627741.55 frames. ], batch size: 97, lr: 6.17e-03, grad_scale: 32.0 +2024-08-03 23:26:58,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=263879.0, ans=0.025 +2024-08-03 23:27:10,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=263952.3333333333, ans=0.0 +2024-08-03 23:27:10,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=263952.3333333333, ans=0.05 +2024-08-03 23:27:26,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=263989.0, ans=0.125 +2024-08-03 23:27:26,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=263989.0, ans=0.0 +2024-08-03 23:27:37,561 INFO [train.py:1114] (1/4) Epoch 20, batch 2850, loss[loss=0.1763, simple_loss=0.2606, pruned_loss=0.04606, over 13368.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2692, pruned_loss=0.04748, over 2621016.95 frames. ], batch size: 35, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:27:42,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=264062.3333333333, ans=0.125 +2024-08-03 23:27:47,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=264099.0, ans=0.125 +2024-08-03 23:27:48,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=264099.0, ans=0.0 +2024-08-03 23:27:50,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=264099.0, ans=0.125 +2024-08-03 23:27:54,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=264135.6666666667, ans=0.125 +2024-08-03 23:27:55,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=264135.6666666667, ans=0.125 +2024-08-03 23:27:56,098 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.15 vs. limit=15.0 +2024-08-03 23:28:08,784 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.256e+01 1.084e+02 1.260e+02 1.608e+02 3.133e+02, threshold=2.519e+02, percent-clipped=4.0 +2024-08-03 23:28:13,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=264209.0, ans=0.125 +2024-08-03 23:28:21,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=264245.6666666667, ans=6.0 +2024-08-03 23:28:22,048 INFO [train.py:1114] (1/4) Epoch 20, batch 2900, loss[loss=0.1756, simple_loss=0.2672, pruned_loss=0.042, over 13370.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2705, pruned_loss=0.04778, over 2631524.73 frames. ], batch size: 36, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:28:28,788 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.07 vs. limit=15.0 +2024-08-03 23:28:31,937 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:28:38,427 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.41 vs. limit=10.0 +2024-08-03 23:28:43,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=264319.0, ans=0.125 +2024-08-03 23:28:53,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=264355.6666666667, ans=0.125 +2024-08-03 23:29:05,316 INFO [train.py:1114] (1/4) Epoch 20, batch 2950, loss[loss=0.1696, simple_loss=0.2622, pruned_loss=0.0385, over 13362.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.269, pruned_loss=0.04733, over 2629620.34 frames. ], batch size: 34, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:29:06,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=264429.0, ans=10.0 +2024-08-03 23:29:08,321 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.52 vs. limit=15.0 +2024-08-03 23:29:08,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=264429.0, ans=0.125 +2024-08-03 23:29:21,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=264465.6666666667, ans=0.0 +2024-08-03 23:29:37,835 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.731e+01 1.151e+02 1.380e+02 1.780e+02 2.510e+02, threshold=2.761e+02, percent-clipped=0.0 +2024-08-03 23:29:39,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=264539.0, ans=0.125 +2024-08-03 23:29:46,829 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.17 vs. limit=15.0 +2024-08-03 23:29:48,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=264575.6666666667, ans=0.0 +2024-08-03 23:29:49,936 INFO [train.py:1114] (1/4) Epoch 20, batch 3000, loss[loss=0.1952, simple_loss=0.2792, pruned_loss=0.05561, over 13538.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2688, pruned_loss=0.0475, over 2629688.97 frames. ], batch size: 37, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:29:49,936 INFO [train.py:1137] (1/4) Computing validation loss +2024-08-03 23:29:59,831 INFO [train.py:1146] (1/4) Epoch 20, validation: loss=0.1683, simple_loss=0.267, pruned_loss=0.03482, over 944034.00 frames. +2024-08-03 23:29:59,832 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 10056MB +2024-08-03 23:29:59,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=264612.3333333333, ans=0.125 +2024-08-03 23:30:00,249 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.60 vs. limit=22.5 +2024-08-03 23:30:05,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=264612.3333333333, ans=0.125 +2024-08-03 23:30:11,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=264649.0, ans=0.0 +2024-08-03 23:30:12,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264649.0, ans=0.1 +2024-08-03 23:30:16,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=264685.6666666667, ans=0.0 +2024-08-03 23:30:17,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=264685.6666666667, ans=0.125 +2024-08-03 23:30:39,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=264759.0, ans=0.125 +2024-08-03 23:30:43,187 INFO [train.py:1114] (1/4) Epoch 20, batch 3050, loss[loss=0.1777, simple_loss=0.2672, pruned_loss=0.04415, over 13531.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2697, pruned_loss=0.04781, over 2625930.43 frames. ], batch size: 35, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:30:49,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=264795.6666666667, ans=0.0 +2024-08-03 23:30:58,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=264795.6666666667, ans=0.125 +2024-08-03 23:30:58,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=264795.6666666667, ans=0.0 +2024-08-03 23:31:02,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=264832.3333333333, ans=0.125 +2024-08-03 23:31:07,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=264832.3333333333, ans=0.125 +2024-08-03 23:31:22,607 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.840e+01 1.051e+02 1.185e+02 1.395e+02 2.152e+02, threshold=2.371e+02, percent-clipped=0.0 +2024-08-03 23:31:29,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=264942.3333333333, ans=0.125 +2024-08-03 23:31:34,621 INFO [train.py:1114] (1/4) Epoch 20, batch 3100, loss[loss=0.1918, simple_loss=0.2814, pruned_loss=0.05116, over 13297.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2687, pruned_loss=0.04731, over 2625783.13 frames. ], batch size: 46, lr: 6.15e-03, grad_scale: 32.0 +2024-08-03 23:31:36,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=264979.0, ans=0.125 +2024-08-03 23:31:40,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=264979.0, ans=0.025 +2024-08-03 23:31:42,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=265015.6666666667, ans=0.125 +2024-08-03 23:31:42,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=265015.6666666667, ans=0.125 +2024-08-03 23:32:15,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=265125.6666666667, ans=0.125 +2024-08-03 23:32:17,266 INFO [train.py:1114] (1/4) Epoch 20, batch 3150, loss[loss=0.176, simple_loss=0.2707, pruned_loss=0.0407, over 13059.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2687, pruned_loss=0.04718, over 2627346.46 frames. ], batch size: 48, lr: 6.15e-03, grad_scale: 32.0 +2024-08-03 23:32:24,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=265162.3333333333, ans=0.0 +2024-08-03 23:32:39,949 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.65 vs. limit=22.5 +2024-08-03 23:32:43,178 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:32:48,869 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.568e+01 1.088e+02 1.255e+02 1.655e+02 2.829e+02, threshold=2.511e+02, percent-clipped=5.0 +2024-08-03 23:33:01,472 INFO [train.py:1114] (1/4) Epoch 20, batch 3200, loss[loss=0.1813, simple_loss=0.2701, pruned_loss=0.04619, over 13548.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2685, pruned_loss=0.04704, over 2633370.59 frames. ], batch size: 37, lr: 6.15e-03, grad_scale: 32.0 +2024-08-03 23:33:03,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=265345.6666666667, ans=0.09899494936611666 +2024-08-03 23:33:13,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=265382.3333333333, ans=0.2 +2024-08-03 23:33:15,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=265382.3333333333, ans=0.125 +2024-08-03 23:33:15,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=265382.3333333333, ans=0.09899494936611666 +2024-08-03 23:33:24,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=265419.0, ans=0.2 +2024-08-03 23:33:29,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=265455.6666666667, ans=0.0 +2024-08-03 23:33:36,108 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.62 vs. limit=10.0 +2024-08-03 23:33:43,974 INFO [train.py:1114] (1/4) Epoch 20, batch 3250, loss[loss=0.2036, simple_loss=0.2896, pruned_loss=0.05881, over 13391.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.269, pruned_loss=0.04718, over 2637014.99 frames. ], batch size: 38, lr: 6.15e-03, grad_scale: 16.0 +2024-08-03 23:33:44,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=265529.0, ans=0.025 +2024-08-03 23:33:50,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=265529.0, ans=0.2 +2024-08-03 23:33:50,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=265529.0, ans=0.125 +2024-08-03 23:34:03,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=265602.3333333333, ans=0.0 +2024-08-03 23:34:10,617 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.26 vs. limit=15.0 +2024-08-03 23:34:16,935 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.340e+01 1.119e+02 1.289e+02 1.600e+02 2.225e+02, threshold=2.578e+02, percent-clipped=0.0 +2024-08-03 23:34:17,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265639.0, ans=0.1 +2024-08-03 23:34:27,185 INFO [train.py:1114] (1/4) Epoch 20, batch 3300, loss[loss=0.1921, simple_loss=0.2818, pruned_loss=0.05117, over 12870.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2674, pruned_loss=0.04656, over 2639138.03 frames. ], batch size: 52, lr: 6.15e-03, grad_scale: 16.0 +2024-08-03 23:34:27,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=265712.3333333333, ans=0.125 +2024-08-03 23:34:28,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=265712.3333333333, ans=0.125 +2024-08-03 23:34:33,557 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.45 vs. limit=15.0 +2024-08-03 23:34:45,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=265785.6666666667, ans=0.125 +2024-08-03 23:34:46,187 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.81 vs. limit=22.5 +2024-08-03 23:34:58,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265822.3333333333, ans=0.1 +2024-08-03 23:34:58,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=265822.3333333333, ans=0.2 +2024-08-03 23:34:59,488 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.82 vs. limit=15.0 +2024-08-03 23:35:00,453 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.14 vs. limit=6.0 +2024-08-03 23:35:01,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=265859.0, ans=0.95 +2024-08-03 23:35:09,980 INFO [train.py:1114] (1/4) Epoch 20, batch 3350, loss[loss=0.1869, simple_loss=0.2777, pruned_loss=0.04799, over 13295.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2683, pruned_loss=0.04704, over 2630055.30 frames. ], batch size: 49, lr: 6.14e-03, grad_scale: 16.0 +2024-08-03 23:35:27,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265969.0, ans=0.1 +2024-08-03 23:35:36,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=266005.6666666667, ans=0.2 +2024-08-03 23:35:42,730 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.982e+01 1.153e+02 1.327e+02 1.503e+02 2.183e+02, threshold=2.655e+02, percent-clipped=0.0 +2024-08-03 23:35:53,122 INFO [train.py:1114] (1/4) Epoch 20, batch 3400, loss[loss=0.1509, simple_loss=0.2361, pruned_loss=0.03289, over 13541.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2681, pruned_loss=0.04682, over 2625732.04 frames. ], batch size: 31, lr: 6.14e-03, grad_scale: 16.0 +2024-08-03 23:35:55,947 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.04 vs. limit=15.0 +2024-08-03 23:35:57,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=266079.0, ans=0.125 +2024-08-03 23:36:05,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=266115.6666666667, ans=0.2 +2024-08-03 23:36:17,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=266189.0, ans=0.1 +2024-08-03 23:36:23,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=266189.0, ans=0.0 +2024-08-03 23:36:23,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=266189.0, ans=0.125 +2024-08-03 23:36:31,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=266225.6666666667, ans=0.125 +2024-08-03 23:36:31,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=266225.6666666667, ans=0.125 +2024-08-03 23:36:33,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=266225.6666666667, ans=0.07 +2024-08-03 23:36:35,523 INFO [train.py:1114] (1/4) Epoch 20, batch 3450, loss[loss=0.1887, simple_loss=0.2811, pruned_loss=0.04815, over 12881.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2685, pruned_loss=0.047, over 2629282.21 frames. ], batch size: 52, lr: 6.14e-03, grad_scale: 16.0 +2024-08-03 23:36:49,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=266299.0, ans=0.125 +2024-08-03 23:37:03,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=266372.3333333333, ans=0.2 +2024-08-03 23:37:08,061 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.153e+01 1.075e+02 1.228e+02 1.591e+02 2.797e+02, threshold=2.457e+02, percent-clipped=1.0 +2024-08-03 23:37:11,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=266409.0, ans=0.1 +2024-08-03 23:37:15,297 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.72 vs. limit=15.0 +2024-08-03 23:37:18,296 INFO [train.py:1114] (1/4) Epoch 20, batch 3500, loss[loss=0.1815, simple_loss=0.2663, pruned_loss=0.04835, over 13528.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2673, pruned_loss=0.04655, over 2631236.17 frames. ], batch size: 34, lr: 6.14e-03, grad_scale: 16.0 +2024-08-03 23:37:30,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=266482.3333333333, ans=0.125 +2024-08-03 23:37:34,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=266482.3333333333, ans=0.125 +2024-08-03 23:37:41,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=266519.0, ans=0.2 +2024-08-03 23:37:44,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=266555.6666666667, ans=0.125 +2024-08-03 23:37:48,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=266555.6666666667, ans=0.0 +2024-08-03 23:37:49,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=266555.6666666667, ans=0.0 +2024-08-03 23:38:02,721 INFO [train.py:1114] (1/4) Epoch 20, batch 3550, loss[loss=0.1714, simple_loss=0.2622, pruned_loss=0.04035, over 12580.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2699, pruned_loss=0.0478, over 2629845.76 frames. ], batch size: 58, lr: 6.13e-03, grad_scale: 16.0 +2024-08-03 23:38:09,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=266629.0, ans=0.125 +2024-08-03 23:38:23,133 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=9.44 vs. limit=15.0 +2024-08-03 23:38:26,617 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.20 vs. limit=10.0 +2024-08-03 23:38:29,356 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.47 vs. limit=15.0 +2024-08-03 23:38:36,934 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.117e+01 1.134e+02 1.257e+02 1.418e+02 2.840e+02, threshold=2.514e+02, percent-clipped=1.0 +2024-08-03 23:38:47,708 INFO [train.py:1114] (1/4) Epoch 20, batch 3600, loss[loss=0.227, simple_loss=0.3081, pruned_loss=0.07294, over 9266.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2738, pruned_loss=0.05112, over 2490611.04 frames. ], batch size: 97, lr: 6.13e-03, grad_scale: 32.0 +2024-08-03 23:38:52,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=266812.3333333333, ans=0.125 +2024-08-03 23:38:55,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=266849.0, ans=0.0 +2024-08-03 23:39:13,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=266922.3333333333, ans=0.125 +2024-08-03 23:39:17,253 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.50 vs. limit=15.0 +2024-08-03 23:39:22,637 INFO [train.py:1387] (1/4) Done! diff --git a/zipformer/pretrained/non_ctc/causal/exp/log/log-train-2024-08-02-23-23-28-2 b/zipformer/pretrained/non_ctc/causal/exp/log/log-train-2024-08-02-23-23-28-2 new file mode 100644 index 0000000000000000000000000000000000000000..e19d542e51360a6d70bb86b88508c4eef8c207d3 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/log/log-train-2024-08-02-23-23-28-2 @@ -0,0 +1,10185 @@ +2024-08-02 23:23:28,706 INFO [train.py:1182] (2/4) Training started +2024-08-02 23:23:40,036 INFO [train.py:1192] (2/4) Device: cuda:2 +2024-08-02 23:23:40,038 INFO [train.py:1210] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': 'e3b0958-dirty', 'icefall-git-date': 'Tue Jul 30 21:51:45 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2656.int.cedar.computecanada.ca', 'IP address': '172.16.146.93'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 550, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-02 23:23:40,039 INFO [train.py:1212] (2/4) About to create model +2024-08-02 23:23:40,748 INFO [train.py:1216] (2/4) Number of model parameters: 66110931 +2024-08-02 23:23:40,875 INFO [train.py:1231] (2/4) Using DDP +2024-08-02 23:23:49,626 INFO [asr_datamodule.py:909] (2/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-02 23:23:56,249 INFO [asr_datamodule.py:711] (2/4) Disable MUSAN +2024-08-02 23:23:56,249 INFO [asr_datamodule.py:729] (2/4) Enable SpecAugment +2024-08-02 23:23:56,249 INFO [asr_datamodule.py:730] (2/4) Time warp factor: 80 +2024-08-02 23:23:56,250 INFO [asr_datamodule.py:740] (2/4) Num frame mask: 10 +2024-08-02 23:23:56,250 INFO [asr_datamodule.py:753] (2/4) About to create train dataset +2024-08-02 23:23:56,250 INFO [asr_datamodule.py:780] (2/4) Using DynamicBucketingSampler. +2024-08-02 23:23:57,857 INFO [asr_datamodule.py:797] (2/4) About to create train dataloader +2024-08-02 23:23:57,865 INFO [asr_datamodule.py:926] (2/4) About to get dev-clean cuts +2024-08-02 23:23:58,241 INFO [asr_datamodule.py:933] (2/4) About to get dev-other cuts +2024-08-02 23:23:58,294 INFO [asr_datamodule.py:829] (2/4) About to create dev dataset +2024-08-02 23:23:58,615 INFO [asr_datamodule.py:846] (2/4) About to create dev dataloader +2024-08-02 23:23:58,615 INFO [train.py:1435] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-02 23:30:00,740 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 8866MB +2024-08-02 23:30:01,696 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 9188MB +2024-08-02 23:32:14,987 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 9188MB +2024-08-02 23:32:16,113 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 9188MB +2024-08-02 23:33:32,667 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 9188MB +2024-08-02 23:33:39,256 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 9188MB +2024-08-02 23:34:57,140 INFO [train.py:1114] (2/4) Epoch 1, batch 0, loss[loss=7.91, simple_loss=7.229, pruned_loss=6.798, over 13355.00 frames. ], tot_loss[loss=7.91, simple_loss=7.229, pruned_loss=6.798, over 13355.00 frames. ], batch size: 33, lr: 2.25e-02, grad_scale: 1.0 +2024-08-02 23:34:57,140 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-02 23:35:53,583 INFO [train.py:1146] (2/4) Epoch 1, validation: loss=7.994, simple_loss=7.311, pruned_loss=6.819, over 944034.00 frames. +2024-08-02 23:35:53,584 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 9188MB +2024-08-02 23:35:55,116 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.60 vs. limit=7.5 +2024-08-02 23:36:01,600 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.90 vs. limit=7.5 +2024-08-02 23:36:59,144 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.17 vs. limit=7.5 +2024-08-02 23:38:24,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=0.0, ans=0.9 +2024-08-02 23:39:13,744 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.16 vs. limit=7.5275 +2024-08-02 23:39:43,146 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.388e+03 2.561e+03 2.629e+03 3.528e+03 3.944e+03, threshold=1.052e+04, percent-clipped=0.0 +2024-08-02 23:41:04,235 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=292.46 vs. limit=7.5275 +2024-08-02 23:41:55,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=166.49 vs. limit=7.5275 +2024-08-02 23:42:59,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=73.33333333333333, ans=0.5 +2024-08-02 23:43:00,498 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 5.313e+02 2.388e+03 2.781e+03 4.030e+03, threshold=9.553e+03, percent-clipped=0.0 +2024-08-02 23:43:22,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=73.33333333333333, ans=0.4965625 +2024-08-02 23:43:22,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=73.33333333333333, ans=0.2992666666666667 +2024-08-02 23:44:43,320 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=53.88 vs. limit=4.044 +2024-08-02 23:45:01,825 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=185.72 vs. limit=7.54125 +2024-08-02 23:45:22,423 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=72.18 vs. limit=5.036666666666667 +2024-08-02 23:45:36,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=146.66666666666666, ans=0.493125 +2024-08-02 23:45:51,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=31.08 vs. limit=7.61 +2024-08-02 23:45:58,750 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 4.964e+02 6.315e+02 2.388e+03 4.030e+03, threshold=2.526e+03, percent-clipped=0.0 +2024-08-02 23:45:59,468 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=16.88 vs. limit=4.058666666666666 +2024-08-02 23:46:23,906 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=171.68 vs. limit=5.073333333333333 +2024-08-02 23:46:45,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=146.66666666666666, ans=0.8948666666666667 +2024-08-02 23:46:50,276 INFO [train.py:1114] (2/4) Epoch 1, batch 50, loss[loss=1.243, simple_loss=1.1, pruned_loss=1.278, over 13407.00 frames. ], tot_loss[loss=3.064, simple_loss=2.81, pruned_loss=2.461, over 577926.43 frames. ], batch size: 32, lr: 2.48e-02, grad_scale: 0.5 +2024-08-02 23:47:13,160 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=108.49 vs. limit=7.56875 +2024-08-02 23:48:01,600 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=326.82 vs. limit=5.091666666666667 +2024-08-02 23:48:01,745 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.56 vs. limit=7.6375 +2024-08-02 23:48:07,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=297.37 vs. limit=7.5825 +2024-08-02 23:48:39,756 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=40.62 vs. limit=7.5825 +2024-08-02 23:50:44,521 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=21.10 vs. limit=5.064166666666667 +2024-08-02 23:50:57,984 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=114.26 vs. limit=7.59625 +2024-08-02 23:52:12,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=293.3333333333333, ans=0.29706666666666665 +2024-08-02 23:52:56,567 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=27.70 vs. limit=7.61 +2024-08-02 23:53:25,950 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=49.85 vs. limit=5.1466666666666665 +2024-08-02 23:54:20,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=330.0, ans=0.18762500000000001 +2024-08-02 23:54:28,597 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=189.66 vs. limit=7.62375 +2024-08-02 23:54:28,865 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=22.77 vs. limit=7.62375 +2024-08-02 23:54:29,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=330.0, ans=0.2467 +2024-08-02 23:54:45,320 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=117.80 vs. limit=7.7475 +2024-08-02 23:54:55,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=330.0, ans=0.48453125 +2024-08-02 23:55:14,123 INFO [train.py:1114] (2/4) Epoch 1, batch 100, loss[loss=1.143, simple_loss=0.985, pruned_loss=1.258, over 13535.00 frames. ], tot_loss[loss=2.064, simple_loss=1.864, pruned_loss=1.823, over 1025020.78 frames. ], batch size: 35, lr: 2.70e-02, grad_scale: 1.0 +2024-08-02 23:55:14,576 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=9.65 vs. limit=5.091666666666667 +2024-08-02 23:55:16,354 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.793e+01 6.736e+01 1.462e+02 5.319e+02 4.030e+03, threshold=2.924e+02, percent-clipped=0.0 +2024-08-02 23:55:41,197 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=108.06 vs. limit=7.6375 +2024-08-02 23:55:41,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn1.whiten.whitening_limit, batch_count=366.6666666666667, ans=7.775 +2024-08-02 23:56:07,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=403.3333333333333, ans=0.048739583333333336 +2024-08-02 23:56:20,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=440.0, ans=0.479375 +2024-08-02 23:56:50,452 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=2.53 vs. limit=3.066 +2024-08-02 23:57:16,457 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=16.77 vs. limit=7.665 +2024-08-02 23:57:28,107 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=13.76 vs. limit=7.665 +2024-08-03 00:12:27,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=476.6666666666667, ans=5.238333333333333 +2024-08-03 00:14:48,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=513.3333333333334, ans=0.18075000000000002 +2024-08-03 00:15:51,154 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=90.20 vs. limit=7.6925 +2024-08-03 00:15:51,515 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=52.96 vs. limit=7.6925 +2024-08-03 00:15:55,533 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=38.49 vs. limit=7.6925 +2024-08-03 00:16:00,513 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=50.02 vs. limit=5.256666666666667 +2024-08-03 00:16:22,409 INFO [train.py:1114] (2/4) Epoch 1, batch 150, loss[loss=0.9869, simple_loss=0.8417, pruned_loss=1.054, over 13416.00 frames. ], tot_loss[loss=1.649, simple_loss=1.469, pruned_loss=1.548, over 1386672.48 frames. ], batch size: 32, lr: 2.93e-02, grad_scale: 1.0 +2024-08-03 00:16:29,767 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=78.48 vs. limit=7.70625 +2024-08-03 00:17:51,784 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=10.87 vs. limit=4.249333333333333 +2024-08-03 00:17:52,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=357.48 vs. limit=7.73375 +2024-08-03 00:17:57,748 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=48.78 vs. limit=7.73375 +2024-08-03 00:18:06,364 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 00:19:32,677 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=23.82 vs. limit=7.76125 +2024-08-03 00:19:53,425 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.98 vs. limit=8.0225 +2024-08-03 00:19:54,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=696.6666666666666, ans=0.41291666666666665 +2024-08-03 00:19:55,047 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=26.45 vs. limit=7.76125 +2024-08-03 00:20:05,522 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=86.86 vs. limit=5.348333333333334 +2024-08-03 00:20:24,579 INFO [train.py:1114] (2/4) Epoch 1, batch 200, loss[loss=1.038, simple_loss=0.8864, pruned_loss=1.03, over 12509.00 frames. ], tot_loss[loss=1.421, simple_loss=1.251, pruned_loss=1.367, over 1665647.12 frames. ], batch size: 58, lr: 3.15e-02, grad_scale: 2.0 +2024-08-03 00:20:26,370 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.53 vs. limit=8.05 +2024-08-03 00:20:29,219 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.304e+01 4.646e+01 6.073e+01 7.987e+01 1.954e+02, threshold=1.215e+02, percent-clipped=0.0 +2024-08-03 00:20:29,757 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=10.57 vs. limit=5.183333333333334 +2024-08-03 00:20:37,444 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.02 vs. limit=8.05 +2024-08-03 00:21:07,770 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=23.55 vs. limit=7.78875 +2024-08-03 00:22:44,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=843.3333333333334, ans=0.168375 +2024-08-03 00:22:54,559 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.27 vs. limit=3.1265 +2024-08-03 00:22:58,110 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=224.22 vs. limit=7.81625 +2024-08-03 00:22:59,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=52.64 vs. limit=7.81625 +2024-08-03 00:23:19,674 INFO [train.py:1114] (2/4) Epoch 1, batch 250, loss[loss=1.015, simple_loss=0.8555, pruned_loss=1.001, over 13333.00 frames. ], tot_loss[loss=1.28, simple_loss=1.117, pruned_loss=1.245, over 1884398.75 frames. ], batch size: 46, lr: 3.38e-02, grad_scale: 2.0 +2024-08-03 00:23:25,162 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=14.19 vs. limit=7.84375 +2024-08-03 00:23:26,006 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=16.22 vs. limit=5.458333333333333 +2024-08-03 00:23:29,182 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=11.35 vs. limit=5.229166666666667 +2024-08-03 00:23:31,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=916.6666666666666, ans=0.165625 +2024-08-03 00:23:34,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=953.3333333333334, ans=0.4553125 +2024-08-03 00:23:34,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=953.3333333333334, ans=0.07855000000000001 +2024-08-03 00:23:39,237 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.10 vs. limit=8.215 +2024-08-03 00:23:40,383 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=13.89 vs. limit=7.8575 +2024-08-03 00:24:30,731 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=10.21 vs. limit=7.87125 +2024-08-03 00:24:32,735 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=25.14 vs. limit=8.2425 +2024-08-03 00:24:33,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=990.0, ans=0.37625 +2024-08-03 00:24:35,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=990.0, ans=0.2401 +2024-08-03 00:24:42,256 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=74.05 vs. limit=7.885 +2024-08-03 00:24:46,942 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=30.87 vs. limit=8.27 +2024-08-03 00:24:50,444 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=7.89 vs. limit=4.410666666666667 +2024-08-03 00:24:51,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten.whitening_limit, batch_count=1026.6666666666667, ans=7.885 +2024-08-03 00:24:53,749 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.32 vs. limit=8.2975 +2024-08-03 00:24:55,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=1063.3333333333333, ans=8.2975 +2024-08-03 00:24:59,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=1063.3333333333333, ans=0.3670833333333333 +2024-08-03 00:25:00,322 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=10.67 vs. limit=5.265833333333333 +2024-08-03 00:25:02,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=1063.3333333333333, ans=0.8627833333333333 +2024-08-03 00:25:03,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=1100.0, ans=7.9125 +2024-08-03 00:25:05,111 INFO [train.py:1114] (2/4) Epoch 1, batch 300, loss[loss=0.9777, simple_loss=0.8166, pruned_loss=0.9453, over 13437.00 frames. ], tot_loss[loss=1.187, simple_loss=1.026, pruned_loss=1.156, over 2051702.34 frames. ], batch size: 42, lr: 3.60e-02, grad_scale: 4.0 +2024-08-03 00:25:10,026 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.936e+01 8.005e+01 9.897e+01 1.290e+02 2.424e+02, threshold=1.979e+02, percent-clipped=29.0 +2024-08-03 00:25:14,865 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=183.67 vs. limit=5.55 +2024-08-03 00:25:21,394 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.41 vs. limit=5.568333333333333 +2024-08-03 00:26:10,642 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=22.59 vs. limit=7.94 +2024-08-03 00:26:12,951 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=18.25 vs. limit=7.94 +2024-08-03 00:26:23,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=1173.3333333333333, ans=0.0736 +2024-08-03 00:26:47,215 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=30.94 vs. limit=5.605 +2024-08-03 00:27:05,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=1283.3333333333333, ans=0.07112500000000001 +2024-08-03 00:27:06,199 INFO [train.py:1114] (2/4) Epoch 1, batch 350, loss[loss=0.8974, simple_loss=0.7378, pruned_loss=0.8699, over 13580.00 frames. ], tot_loss[loss=1.125, simple_loss=0.9637, pruned_loss=1.094, over 2182166.32 frames. ], batch size: 33, lr: 3.83e-02, grad_scale: 4.0 +2024-08-03 00:27:06,633 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=19.37 vs. limit=7.98125 +2024-08-03 00:27:18,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=1283.3333333333333, ans=0.21925 +2024-08-03 00:27:19,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=1283.3333333333333, ans=0.07112500000000001 +2024-08-03 00:27:36,931 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=46.51 vs. limit=7.995 +2024-08-03 00:27:41,036 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=186.29 vs. limit=7.995 +2024-08-03 00:27:52,788 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.92 vs. limit=8.5175 +2024-08-03 00:28:05,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=1393.3333333333333, ans=0.4346875 +2024-08-03 00:28:24,495 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.30 vs. limit=8.03625 +2024-08-03 00:28:36,034 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=33.56 vs. limit=8.05 +2024-08-03 00:28:36,683 INFO [train.py:1114] (2/4) Epoch 1, batch 400, loss[loss=0.9519, simple_loss=0.7778, pruned_loss=0.8987, over 13365.00 frames. ], tot_loss[loss=1.079, simple_loss=0.9156, pruned_loss=1.044, over 2286355.88 frames. ], batch size: 37, lr: 4.05e-02, grad_scale: 8.0 +2024-08-03 00:28:37,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=16.36 vs. limit=8.05 +2024-08-03 00:28:44,139 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.328e+01 8.404e+01 1.145e+02 1.534e+02 2.452e+02, threshold=2.291e+02, percent-clipped=10.0 +2024-08-03 00:29:38,595 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=60.65 vs. limit=8.06375 +2024-08-03 00:30:30,418 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=12.48 vs. limit=8.0775 +2024-08-03 00:30:31,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=1540.0, ans=0.4278125 +2024-08-03 00:32:28,222 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=21.74 vs. limit=8.105 +2024-08-03 00:32:36,111 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=32.13 vs. limit=8.105 +2024-08-03 00:32:51,723 INFO [train.py:1114] (2/4) Epoch 1, batch 450, loss[loss=0.9994, simple_loss=0.812, pruned_loss=0.9208, over 13552.00 frames. ], tot_loss[loss=1.049, simple_loss=0.8819, pruned_loss=1.006, over 2358654.93 frames. ], batch size: 38, lr: 4.28e-02, grad_scale: 8.0 +2024-08-03 00:32:54,101 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.92 vs. limit=4.66 +2024-08-03 00:33:12,932 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=20.58 vs. limit=8.11875 +2024-08-03 00:33:22,579 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.26 vs. limit=8.7375 +2024-08-03 00:33:32,048 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.31 vs. limit=8.765 +2024-08-03 00:33:38,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=1686.6666666666667, ans=0.4209375 +2024-08-03 00:33:57,203 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.51 vs. limit=3.253 +2024-08-03 00:34:00,945 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.89 vs. limit=8.7925 +2024-08-03 00:34:04,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=1723.3333333333333, ans=8.14625 +2024-08-03 00:34:14,348 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.74 vs. limit=5.430833333333333 +2024-08-03 00:34:44,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=1760.0, ans=8.16 +2024-08-03 00:34:57,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=1760.0, ans=0.2824 +2024-08-03 00:35:09,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=1796.6666666666667, ans=0.059575 +2024-08-03 00:35:31,097 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.83 vs. limit=8.8475 +2024-08-03 00:35:38,400 INFO [train.py:1114] (2/4) Epoch 1, batch 500, loss[loss=1.007, simple_loss=0.8157, pruned_loss=0.9009, over 13418.00 frames. ], tot_loss[loss=1.023, simple_loss=0.8533, pruned_loss=0.9698, over 2424823.17 frames. ], batch size: 43, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:35:53,388 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.232e+01 1.074e+02 1.283e+02 1.686e+02 3.614e+02, threshold=2.565e+02, percent-clipped=11.0 +2024-08-03 00:36:06,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=1833.3333333333333, ans=0.4140625 +2024-08-03 00:36:06,261 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=28.81 vs. limit=8.1875 +2024-08-03 00:36:06,280 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.72 vs. limit=8.875 +2024-08-03 00:36:10,924 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=22.06 vs. limit=8.1875 +2024-08-03 00:36:43,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=1870.0, ans=0.41234375 +2024-08-03 00:37:05,659 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.36 vs. limit=8.9025 +2024-08-03 00:38:31,617 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.98 vs. limit=8.22875 +2024-08-03 00:38:57,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=1943.3333333333333, ans=0.035 +2024-08-03 00:41:37,835 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.83 vs. limit=8.9575 +2024-08-03 00:43:26,306 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.39 vs. limit=8.985 +2024-08-03 00:43:27,282 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=33.13 vs. limit=8.2425 +2024-08-03 00:43:34,914 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.62 vs. limit=8.985 +2024-08-03 00:43:35,019 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.02 vs. limit=4.792 +2024-08-03 00:43:38,151 INFO [train.py:1114] (2/4) Epoch 1, batch 550, loss[loss=0.996, simple_loss=0.8085, pruned_loss=0.859, over 13042.00 frames. ], tot_loss[loss=1.004, simple_loss=0.8325, pruned_loss=0.9356, over 2467207.42 frames. ], batch size: 48, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:43:49,653 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.43 vs. limit=8.25625 +2024-08-03 00:44:19,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=2016.6666666666667, ans=0.2798333333333333 +2024-08-03 00:44:23,078 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=7.24 vs. limit=4.8213333333333335 +2024-08-03 00:44:33,450 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=15.89 vs. limit=8.27 +2024-08-03 00:45:06,111 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=40.14 vs. limit=8.27 +2024-08-03 00:45:09,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2090.0, ans=0.2791 +2024-08-03 00:45:11,705 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=13.71 vs. limit=8.28375 +2024-08-03 00:45:22,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=2090.0, ans=0.40203125 +2024-08-03 00:45:31,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=2090.0, ans=0.40203125 +2024-08-03 00:45:38,633 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.93 vs. limit=9.094999999999999 +2024-08-03 00:45:40,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=2126.6666666666665, ans=6.329166666666667 +2024-08-03 00:45:40,986 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=26.92 vs. limit=8.2975 +2024-08-03 00:45:47,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=2126.6666666666665, ans=0.4003125 +2024-08-03 00:45:53,686 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.53 vs. limit=5.531666666666666 +2024-08-03 00:45:54,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2163.3333333333335, ans=0.27836666666666665 +2024-08-03 00:45:54,688 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.58 vs. limit=9.1225 +2024-08-03 00:46:13,378 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=17.65 vs. limit=8.31125 +2024-08-03 00:46:18,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=2163.3333333333335, ans=0.8242833333333334 +2024-08-03 00:46:25,843 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.69 vs. limit=8.31125 +2024-08-03 00:46:28,715 INFO [train.py:1114] (2/4) Epoch 1, batch 600, loss[loss=0.9237, simple_loss=0.7608, pruned_loss=0.7483, over 13337.00 frames. ], tot_loss[loss=0.9847, simple_loss=0.8145, pruned_loss=0.8956, over 2507507.75 frames. ], batch size: 46, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:46:47,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=2200.0, ans=0.12625 +2024-08-03 00:46:48,336 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.51 vs. limit=9.15 +2024-08-03 00:46:49,718 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.983e+01 1.322e+02 1.697e+02 2.206e+02 6.951e+02, threshold=3.394e+02, percent-clipped=10.0 +2024-08-03 00:47:05,064 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.39 vs. limit=5.55 +2024-08-03 00:47:18,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=2236.6666666666665, ans=0.2204166666666667 +2024-08-03 00:47:24,278 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.08 vs. limit=9.1775 +2024-08-03 00:47:56,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=2310.0, ans=0.39171875 +2024-08-03 00:48:02,735 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.85 vs. limit=6.173333333333333 +2024-08-03 00:48:03,556 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.04 vs. limit=8.38 +2024-08-03 00:48:16,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=2346.6666666666665, ans=0.112 +2024-08-03 00:48:17,423 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=24.42 vs. limit=8.38 +2024-08-03 00:48:17,446 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.96 vs. limit=5.586666666666667 +2024-08-03 00:48:17,534 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.78 vs. limit=9.26 +2024-08-03 00:48:23,491 INFO [train.py:1114] (2/4) Epoch 1, batch 650, loss[loss=0.8331, simple_loss=0.6986, pruned_loss=0.6316, over 13531.00 frames. ], tot_loss[loss=0.9553, simple_loss=0.7914, pruned_loss=0.8428, over 2543115.79 frames. ], batch size: 37, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:48:26,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=6.22 vs. limit=4.953333333333333 +2024-08-03 00:48:32,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=2383.3333333333335, ans=0.046375 +2024-08-03 00:49:00,292 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.76 vs. limit=9.3425 +2024-08-03 00:49:05,198 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.11 vs. limit=9.3425 +2024-08-03 00:49:10,484 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=19.57 vs. limit=8.42125 +2024-08-03 00:49:15,389 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.73 vs. limit=9.3425 +2024-08-03 00:49:19,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=2493.3333333333335, ans=0.042208333333333334 +2024-08-03 00:49:35,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=2493.3333333333335, ans=0.1883333333333333 +2024-08-03 00:49:36,783 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.45 vs. limit=8.44875 +2024-08-03 00:49:46,317 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.24 vs. limit=9.3975 +2024-08-03 00:49:48,097 INFO [train.py:1114] (2/4) Epoch 1, batch 700, loss[loss=0.7399, simple_loss=0.6292, pruned_loss=0.5317, over 13535.00 frames. ], tot_loss[loss=0.9207, simple_loss=0.7662, pruned_loss=0.7846, over 2565541.43 frames. ], batch size: 35, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:49:53,119 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.309e+01 1.383e+02 1.770e+02 2.360e+02 5.485e+02, threshold=3.539e+02, percent-clipped=6.0 +2024-08-03 00:50:00,551 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.90 vs. limit=8.4625 +2024-08-03 00:50:04,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=2566.6666666666665, ans=0.3796875 +2024-08-03 00:50:15,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=2603.3333333333335, ans=0.8088833333333334 +2024-08-03 00:50:30,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=2676.6666666666665, ans=0.27323333333333333 +2024-08-03 00:50:32,136 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.35 vs. limit=8.50375 +2024-08-03 00:50:36,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=2676.6666666666665, ans=0.37453125 +2024-08-03 00:50:43,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=2713.3333333333335, ans=0.3728125 +2024-08-03 00:50:50,665 INFO [train.py:1114] (2/4) Epoch 1, batch 750, loss[loss=0.7112, simple_loss=0.6171, pruned_loss=0.4795, over 13366.00 frames. ], tot_loss[loss=0.8773, simple_loss=0.7349, pruned_loss=0.7209, over 2583587.86 frames. ], batch size: 37, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:51:00,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=2750.0, ans=0.37109375 +2024-08-03 00:51:00,228 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.43 vs. limit=8.53125 +2024-08-03 00:51:31,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=2823.3333333333335, ans=0.24235 +2024-08-03 00:51:37,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=2860.0, ans=0.0410625 +2024-08-03 00:51:40,494 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.12 vs. limit=9.645 +2024-08-03 00:52:02,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=2896.6666666666665, ans=0.36421875000000004 +2024-08-03 00:52:04,757 INFO [train.py:1114] (2/4) Epoch 1, batch 800, loss[loss=0.629, simple_loss=0.5523, pruned_loss=0.4069, over 13347.00 frames. ], tot_loss[loss=0.8347, simple_loss=0.7047, pruned_loss=0.6608, over 2597610.32 frames. ], batch size: 33, lr: 4.49e-02, grad_scale: 16.0 +2024-08-03 00:52:06,761 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.241e+02 1.637e+02 2.042e+02 2.862e+02 4.523e+02, threshold=4.084e+02, percent-clipped=8.0 +2024-08-03 00:52:08,231 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=9.70 vs. limit=9.7 +2024-08-03 00:52:11,500 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.92 vs. limit=9.7 +2024-08-03 00:52:16,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=2970.0, ans=0.08143750000000001 +2024-08-03 00:52:27,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=3006.6666666666665, ans=0.09899494936611666 +2024-08-03 00:52:29,639 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.32 vs. limit=5.2026666666666666 +2024-08-03 00:52:32,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=3006.6666666666665, ans=0.3590625 +2024-08-03 00:53:09,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=3080.0, ans=0.2692 +2024-08-03 00:53:12,761 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.93 vs. limit=5.77 +2024-08-03 00:53:14,295 INFO [train.py:1114] (2/4) Epoch 1, batch 850, loss[loss=0.6759, simple_loss=0.5979, pruned_loss=0.4249, over 13319.00 frames. ], tot_loss[loss=0.793, simple_loss=0.6749, pruned_loss=0.6051, over 2610080.77 frames. ], batch size: 40, lr: 4.49e-02, grad_scale: 16.0 +2024-08-03 00:53:23,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=3116.6666666666665, ans=0.35390625 +2024-08-03 00:53:32,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=3153.3333333333335, ans=0.26846666666666663 +2024-08-03 00:53:34,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3153.3333333333335, ans=0.26846666666666663 +2024-08-03 00:54:04,936 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=4.59 vs. limit=4.645333333333333 +2024-08-03 00:54:18,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=3226.6666666666665, ans=0.027399999999999994 +2024-08-03 00:54:45,061 INFO [train.py:1114] (2/4) Epoch 1, batch 900, loss[loss=0.5577, simple_loss=0.5069, pruned_loss=0.3263, over 13359.00 frames. ], tot_loss[loss=0.7546, simple_loss=0.6481, pruned_loss=0.5549, over 2612482.48 frames. ], batch size: 33, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:54:47,012 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.046e+02 1.709e+02 2.155e+02 3.211e+02 6.364e+02, threshold=4.310e+02, percent-clipped=14.0 +2024-08-03 00:55:07,336 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.94 vs. limit=10.0025 +2024-08-03 00:55:20,423 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.49 vs. limit=10.03 +2024-08-03 00:55:25,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=3373.3333333333335, ans=0.21626666666666666 +2024-08-03 00:55:26,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=3373.3333333333335, ans=0.34187500000000004 +2024-08-03 00:55:34,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=3410.0, ans=0.072125 +2024-08-03 00:56:05,965 INFO [train.py:1114] (2/4) Epoch 1, batch 950, loss[loss=0.5671, simple_loss=0.5174, pruned_loss=0.3271, over 13533.00 frames. ], tot_loss[loss=0.7214, simple_loss=0.6252, pruned_loss=0.5117, over 2614019.27 frames. ], batch size: 34, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:56:10,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=3483.3333333333335, ans=0.02162499999999999 +2024-08-03 00:56:11,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=3483.3333333333335, ans=0.33671875 +2024-08-03 00:56:42,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=3520.0, ans=0.2648 +2024-08-03 00:56:58,600 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.39 vs. limit=10.1675 +2024-08-03 00:57:03,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=3556.6666666666665, ans=0.7755166666666667 +2024-08-03 00:57:18,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=3630.0, ans=0.018520000000000002 +2024-08-03 00:57:19,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=3630.0, ans=0.018324999999999994 +2024-08-03 00:57:22,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=3630.0, ans=0.06387499999999999 +2024-08-03 00:57:23,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=3630.0, ans=0.0773125 +2024-08-03 00:57:28,052 INFO [train.py:1114] (2/4) Epoch 1, batch 1000, loss[loss=0.5425, simple_loss=0.5039, pruned_loss=0.2991, over 13360.00 frames. ], tot_loss[loss=0.6915, simple_loss=0.6048, pruned_loss=0.4738, over 2612730.04 frames. ], batch size: 35, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:57:32,813 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.579e+02 2.012e+02 2.638e+02 6.886e+02, threshold=4.024e+02, percent-clipped=6.0 +2024-08-03 00:57:35,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.43 vs. limit=6.833333333333333 +2024-08-03 00:57:54,716 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.71 vs. limit=5.925833333333333 +2024-08-03 00:57:59,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=3703.3333333333335, ans=0.016674999999999995 +2024-08-03 00:58:36,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=3813.3333333333335, ans=0.32125000000000004 +2024-08-03 00:58:42,115 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 00:58:43,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3813.3333333333335, ans=0.32125000000000004 +2024-08-03 00:58:46,059 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.75 vs. limit=6.906666666666666 +2024-08-03 00:58:48,541 INFO [train.py:1114] (2/4) Epoch 1, batch 1050, loss[loss=0.5966, simple_loss=0.5451, pruned_loss=0.3398, over 13581.00 frames. ], tot_loss[loss=0.6603, simple_loss=0.5831, pruned_loss=0.4372, over 2616266.13 frames. ], batch size: 39, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:59:05,624 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.52 vs. limit=5.971666666666667 +2024-08-03 00:59:18,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=3923.3333333333335, ans=5.980833333333333 +2024-08-03 00:59:27,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=3960.0, ans=0.05149999999999999 +2024-08-03 00:59:34,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=3960.0, ans=0.07525000000000001 +2024-08-03 00:59:36,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=3960.0, ans=0.010899999999999993 +2024-08-03 00:59:40,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=3996.6666666666665, ans=0.010075 +2024-08-03 00:59:48,513 INFO [train.py:1114] (2/4) Epoch 1, batch 1100, loss[loss=0.5429, simple_loss=0.5043, pruned_loss=0.2979, over 13572.00 frames. ], tot_loss[loss=0.6331, simple_loss=0.5644, pruned_loss=0.406, over 2619790.83 frames. ], batch size: 36, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:59:50,403 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.598e+02 2.010e+02 2.726e+02 4.926e+02, threshold=4.021e+02, percent-clipped=7.0 +2024-08-03 00:59:53,798 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.18 vs. limit=6.008333333333333 +2024-08-03 00:59:53,940 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.02 vs. limit=6.008333333333333 +2024-08-03 01:00:12,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=4033.3333333333335, ans=0.7903333333333333 +2024-08-03 01:00:18,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4070.0, ans=0.2593 +2024-08-03 01:00:24,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=4106.666666666667, ans=0.07433333333333333 +2024-08-03 01:00:38,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=4143.333333333333, ans=0.30578125 +2024-08-03 01:00:42,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=4143.333333333333, ans=0.25856666666666667 +2024-08-03 01:00:50,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=4180.0, ans=0.2582 +2024-08-03 01:00:55,286 INFO [train.py:1114] (2/4) Epoch 1, batch 1150, loss[loss=0.5447, simple_loss=0.5064, pruned_loss=0.2978, over 13552.00 frames. ], tot_loss[loss=0.6095, simple_loss=0.5486, pruned_loss=0.3789, over 2619314.76 frames. ], batch size: 36, lr: 4.47e-02, grad_scale: 16.0 +2024-08-03 01:01:09,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=4216.666666666667, ans=0.30234375 +2024-08-03 01:01:09,757 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.37 vs. limit=6.054166666666667 +2024-08-03 01:01:10,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=4253.333333333333, ans=0.30062500000000003 +2024-08-03 01:01:13,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=4253.333333333333, ans=0.0 +2024-08-03 01:01:28,357 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.53 vs. limit=9.10875 +2024-08-03 01:01:43,627 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.36 vs. limit=9.1225 +2024-08-03 01:01:52,497 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.79 vs. limit=9.13625 +2024-08-03 01:01:55,174 INFO [train.py:1114] (2/4) Epoch 1, batch 1200, loss[loss=0.5076, simple_loss=0.4868, pruned_loss=0.2607, over 13584.00 frames. ], tot_loss[loss=0.5888, simple_loss=0.535, pruned_loss=0.3556, over 2616730.16 frames. ], batch size: 39, lr: 4.47e-02, grad_scale: 32.0 +2024-08-03 01:01:57,186 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.679e+02 2.058e+02 2.623e+02 8.489e+02, threshold=4.116e+02, percent-clipped=4.0 +2024-08-03 01:02:16,638 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.72 vs. limit=7.218333333333334 +2024-08-03 01:02:38,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=4510.0, ans=0.28859375 +2024-08-03 01:02:40,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=4510.0, ans=10.8825 +2024-08-03 01:02:54,161 INFO [train.py:1114] (2/4) Epoch 1, batch 1250, loss[loss=0.538, simple_loss=0.505, pruned_loss=0.2879, over 13408.00 frames. ], tot_loss[loss=0.5709, simple_loss=0.5234, pruned_loss=0.3357, over 2628736.63 frames. ], batch size: 42, lr: 4.47e-02, grad_scale: 32.0 +2024-08-03 01:03:15,675 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.55 vs. limit=6.145833333333333 +2024-08-03 01:03:21,643 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=4583.333333333333, ans=0.28515625 +2024-08-03 01:03:22,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=4583.333333333333, ans=0.25416666666666665 +2024-08-03 01:03:23,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=4620.0, ans=0.04741666666666667 +2024-08-03 01:03:24,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=4620.0, ans=0.2834375 +2024-08-03 01:03:52,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=4693.333333333333, ans=0.0 +2024-08-03 01:03:52,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=4693.333333333333, ans=0.28 +2024-08-03 01:03:52,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=4693.333333333333, ans=0.009849275362318841 +2024-08-03 01:04:06,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=4730.0, ans=0.27828125000000004 +2024-08-03 01:04:08,947 INFO [train.py:1114] (2/4) Epoch 1, batch 1300, loss[loss=0.5155, simple_loss=0.487, pruned_loss=0.2726, over 12863.00 frames. ], tot_loss[loss=0.5515, simple_loss=0.5101, pruned_loss=0.3164, over 2630584.28 frames. ], batch size: 52, lr: 4.47e-02, grad_scale: 32.0 +2024-08-03 01:04:10,899 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.265e+02 1.740e+02 2.083e+02 2.560e+02 4.997e+02, threshold=4.167e+02, percent-clipped=2.0 +2024-08-03 01:04:33,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=4766.666666666667, ans=0.15590833333333334 +2024-08-03 01:04:46,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=4840.0, ans=0.025 +2024-08-03 01:04:47,907 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.16 vs. limit=6.21 +2024-08-03 01:04:49,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=4840.0, ans=0.273125 +2024-08-03 01:05:03,668 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.01 vs. limit=11.1575 +2024-08-03 01:05:12,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4876.666666666667, ans=0.2512333333333333 +2024-08-03 01:05:13,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=4913.333333333333, ans=0.2508666666666667 +2024-08-03 01:05:16,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=4913.333333333333, ans=0.04619444444444445 +2024-08-03 01:05:21,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=4913.333333333333, ans=0.26968749999999997 +2024-08-03 01:05:23,858 INFO [train.py:1114] (2/4) Epoch 1, batch 1350, loss[loss=0.4661, simple_loss=0.4545, pruned_loss=0.2331, over 13549.00 frames. ], tot_loss[loss=0.5347, simple_loss=0.4992, pruned_loss=0.2995, over 2638992.10 frames. ], batch size: 37, lr: 4.46e-02, grad_scale: 32.0 +2024-08-03 01:05:24,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=4950.0, ans=0.009793478260869565 +2024-08-03 01:05:25,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=4950.0, ans=0.26796875 +2024-08-03 01:05:27,421 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.17 vs. limit=6.2375 +2024-08-03 01:05:29,457 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.63 vs. limit=11.2125 +2024-08-03 01:05:35,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=4986.666666666667, ans=0.2501333333333333 +2024-08-03 01:06:04,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=5096.666666666667, ans=0.26109375 +2024-08-03 01:06:10,761 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.09 vs. limit=11.3225 +2024-08-03 01:06:13,509 INFO [train.py:1114] (2/4) Epoch 1, batch 1400, loss[loss=0.4099, simple_loss=0.4031, pruned_loss=0.2025, over 13240.00 frames. ], tot_loss[loss=0.519, simple_loss=0.489, pruned_loss=0.2844, over 2642811.34 frames. ], batch size: 31, lr: 4.46e-02, grad_scale: 32.0 +2024-08-03 01:06:15,473 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.555e+02 1.828e+02 2.203e+02 3.760e+02, threshold=3.656e+02, percent-clipped=0.0 +2024-08-03 01:06:17,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=5133.333333333333, ans=0.259375 +2024-08-03 01:06:31,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=5170.0, ans=0.7190500000000001 +2024-08-03 01:06:33,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=5170.0, ans=0.25765625000000003 +2024-08-03 01:06:55,099 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.87 vs. limit=9.48 +2024-08-03 01:06:55,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=5280.0, ans=0.7152000000000001 +2024-08-03 01:07:05,500 INFO [train.py:1114] (2/4) Epoch 1, batch 1450, loss[loss=0.5022, simple_loss=0.493, pruned_loss=0.2497, over 13430.00 frames. ], tot_loss[loss=0.5081, simple_loss=0.4823, pruned_loss=0.2736, over 2641444.42 frames. ], batch size: 43, lr: 4.46e-02, grad_scale: 32.0 +2024-08-03 01:07:22,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=5353.333333333333, ans=0.24906250000000002 +2024-08-03 01:07:29,076 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.12 vs. limit=11.515 +2024-08-03 01:07:30,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=5390.0, ans=0.71135 +2024-08-03 01:07:36,120 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.92 vs. limit=9.52125 +2024-08-03 01:07:36,320 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.64 vs. limit=9.52125 +2024-08-03 01:07:37,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=5390.0, ans=0.1961 +2024-08-03 01:07:38,325 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.15 vs. limit=6.3475 +2024-08-03 01:07:42,173 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=9.535 +2024-08-03 01:07:58,467 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.71 vs. limit=9.54875 +2024-08-03 01:08:00,850 INFO [train.py:1114] (2/4) Epoch 1, batch 1500, loss[loss=0.4528, simple_loss=0.4573, pruned_loss=0.2155, over 13397.00 frames. ], tot_loss[loss=0.497, simple_loss=0.4757, pruned_loss=0.2631, over 2641040.13 frames. ], batch size: 39, lr: 4.46e-02, grad_scale: 32.0 +2024-08-03 01:08:14,031 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.278e+02 1.665e+02 2.059e+02 2.727e+02 4.755e+02, threshold=4.117e+02, percent-clipped=2.0 +2024-08-03 01:08:29,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=5536.666666666667, ans=0.24046875 +2024-08-03 01:08:52,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=5610.0, ans=0.00965 +2024-08-03 01:08:55,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=5610.0, ans=0.025 +2024-08-03 01:09:12,862 INFO [train.py:1114] (2/4) Epoch 1, batch 1550, loss[loss=0.4574, simple_loss=0.4609, pruned_loss=0.2196, over 13403.00 frames. ], tot_loss[loss=0.4879, simple_loss=0.4698, pruned_loss=0.255, over 2631134.62 frames. ], batch size: 41, lr: 4.45e-02, grad_scale: 32.0 +2024-08-03 01:09:13,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=5683.333333333333, ans=8.552083333333332 +2024-08-03 01:09:21,514 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.23 vs. limit=11.7625 +2024-08-03 01:10:03,096 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.95 vs. limit=9.6725 +2024-08-03 01:10:03,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=5793.333333333333, ans=0.035 +2024-08-03 01:10:07,895 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.08 vs. limit=9.6725 +2024-08-03 01:10:10,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=5830.0, ans=0.031781250000000004 +2024-08-03 01:10:19,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=5830.0, ans=0.22671875000000002 +2024-08-03 01:10:30,114 INFO [train.py:1114] (2/4) Epoch 1, batch 1600, loss[loss=0.5086, simple_loss=0.5031, pruned_loss=0.2521, over 13573.00 frames. ], tot_loss[loss=0.4792, simple_loss=0.4644, pruned_loss=0.2474, over 2623542.73 frames. ], batch size: 39, lr: 4.45e-02, grad_scale: 32.0 +2024-08-03 01:10:32,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=5866.666666666667, ans=8.666666666666668 +2024-08-03 01:10:33,556 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.694e+02 2.197e+02 2.790e+02 6.281e+02, threshold=4.393e+02, percent-clipped=9.0 +2024-08-03 01:10:55,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=5903.333333333333, ans=0.6933833333333334 +2024-08-03 01:10:55,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=5903.333333333333, ans=0.22328125 +2024-08-03 01:11:20,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=5976.666666666667, ans=0.04176388888888889 +2024-08-03 01:11:23,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=5976.666666666667, ans=0.0 +2024-08-03 01:11:23,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=5976.666666666667, ans=0.04176388888888889 +2024-08-03 01:11:28,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=6013.333333333333, ans=0.04161111111111111 +2024-08-03 01:11:37,625 INFO [train.py:1114] (2/4) Epoch 1, batch 1650, loss[loss=0.4814, simple_loss=0.4791, pruned_loss=0.2374, over 13324.00 frames. ], tot_loss[loss=0.4727, simple_loss=0.4608, pruned_loss=0.2415, over 2620448.52 frames. ], batch size: 40, lr: 4.45e-02, grad_scale: 32.0 +2024-08-03 01:11:50,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=6086.666666666667, ans=0.2391333333333333 +2024-08-03 01:11:56,294 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.92 vs. limit=9.7825 +2024-08-03 01:12:21,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=6160.0, ans=0.041 +2024-08-03 01:12:35,771 INFO [train.py:1114] (2/4) Epoch 1, batch 1700, loss[loss=0.3481, simple_loss=0.3655, pruned_loss=0.1595, over 13245.00 frames. ], tot_loss[loss=0.4634, simple_loss=0.4554, pruned_loss=0.234, over 2629934.68 frames. ], batch size: 31, lr: 4.44e-02, grad_scale: 32.0 +2024-08-03 01:12:37,766 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.541e+02 1.894e+02 2.425e+02 4.300e+02, threshold=3.787e+02, percent-clipped=0.0 +2024-08-03 01:12:51,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=6270.0, ans=0.04054166666666667 +2024-08-03 01:12:54,094 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.56 vs. limit=8.135 +2024-08-03 01:12:57,855 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.93 vs. limit=3.9459999999999997 +2024-08-03 01:12:58,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=6306.666666666667, ans=0.009498550724637681 +2024-08-03 01:13:03,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=6306.666666666667, ans=0.009498550724637681 +2024-08-03 01:13:09,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=6343.333333333333, ans=0.6779833333333334 +2024-08-03 01:13:10,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=6343.333333333333, ans=0.009490579710144928 +2024-08-03 01:13:21,823 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.45 vs. limit=6.595 +2024-08-03 01:13:26,842 INFO [train.py:1114] (2/4) Epoch 1, batch 1750, loss[loss=0.3895, simple_loss=0.3952, pruned_loss=0.1884, over 13553.00 frames. ], tot_loss[loss=0.457, simple_loss=0.4519, pruned_loss=0.2288, over 2633921.37 frames. ], batch size: 31, lr: 4.44e-02, grad_scale: 32.0 +2024-08-03 01:13:31,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=6416.666666666667, ans=0.03993055555555555 +2024-08-03 01:13:39,642 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.97 vs. limit=12.34 +2024-08-03 01:13:50,097 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.40 vs. limit=12.34 +2024-08-03 01:13:50,181 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.80 vs. limit=9.92 +2024-08-03 01:13:54,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=6490.0, ans=0.07 +2024-08-03 01:14:02,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=6526.666666666667, ans=0.19406250000000003 +2024-08-03 01:14:13,022 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.58 vs. limit=12.4225 +2024-08-03 01:14:15,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=6563.333333333333, ans=0.6702833333333333 +2024-08-03 01:14:18,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=6563.333333333333, ans=0.6702833333333333 +2024-08-03 01:14:24,174 INFO [train.py:1114] (2/4) Epoch 1, batch 1800, loss[loss=0.4628, simple_loss=0.469, pruned_loss=0.2251, over 13545.00 frames. ], tot_loss[loss=0.4511, simple_loss=0.449, pruned_loss=0.224, over 2635586.86 frames. ], batch size: 38, lr: 4.44e-02, grad_scale: 32.0 +2024-08-03 01:14:26,051 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.262e+02 1.670e+02 2.044e+02 2.499e+02 4.845e+02, threshold=4.088e+02, percent-clipped=4.0 +2024-08-03 01:14:31,426 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.90 vs. limit=12.45 +2024-08-03 01:14:38,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=6600.0, ans=0.0 +2024-08-03 01:14:45,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=6636.666666666667, ans=0.6677166666666667 +2024-08-03 01:14:51,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6673.333333333333, ans=0.23326666666666668 +2024-08-03 01:14:52,248 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.57 vs. limit=10.0025 +2024-08-03 01:14:52,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=6673.333333333333, ans=0.025 +2024-08-03 01:15:00,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=6673.333333333333, ans=0.03886111111111112 +2024-08-03 01:15:05,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=6673.333333333333, ans=0.6664333333333334 +2024-08-03 01:15:10,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=6710.0, ans=0.18546875000000002 +2024-08-03 01:15:26,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=6746.666666666667, ans=0.6638666666666666 +2024-08-03 01:15:30,698 INFO [train.py:1114] (2/4) Epoch 1, batch 1850, loss[loss=0.448, simple_loss=0.4593, pruned_loss=0.2157, over 13388.00 frames. ], tot_loss[loss=0.4441, simple_loss=0.4452, pruned_loss=0.2188, over 2638711.81 frames. ], batch size: 39, lr: 4.43e-02, grad_scale: 32.0 +2024-08-03 01:15:37,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=6783.333333333333, ans=0.23216666666666666 +2024-08-03 01:15:47,770 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.68 vs. limit=10.057500000000001 +2024-08-03 01:15:54,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=6856.666666666667, ans=10.07125 +2024-08-03 01:15:57,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=6856.666666666667, ans=0.009378985507246376 +2024-08-03 01:16:07,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=6893.333333333333, ans=0.23106666666666664 +2024-08-03 01:16:11,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=6930.0, ans=0.17515625 +2024-08-03 01:16:12,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=6930.0, ans=0.07 +2024-08-03 01:16:25,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=6930.0, ans=0.6574500000000001 +2024-08-03 01:16:27,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=6930.0, ans=0.03779166666666667 +2024-08-03 01:16:31,453 INFO [train.py:1114] (2/4) Epoch 1, batch 1900, loss[loss=0.4586, simple_loss=0.4595, pruned_loss=0.2276, over 13330.00 frames. ], tot_loss[loss=0.4394, simple_loss=0.4433, pruned_loss=0.2152, over 2640451.70 frames. ], batch size: 40, lr: 4.43e-02, grad_scale: 32.0 +2024-08-03 01:16:35,176 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.281e+02 1.757e+02 2.130e+02 2.546e+02 5.245e+02, threshold=4.259e+02, percent-clipped=2.0 +2024-08-03 01:16:44,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=6966.666666666667, ans=0.17343750000000002 +2024-08-03 01:17:20,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=7076.666666666667, ans=0.16828125 +2024-08-03 01:17:30,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=7113.333333333333, ans=0.1665625 +2024-08-03 01:17:34,535 INFO [train.py:1114] (2/4) Epoch 1, batch 1950, loss[loss=0.3655, simple_loss=0.3995, pruned_loss=0.1644, over 13560.00 frames. ], tot_loss[loss=0.4355, simple_loss=0.4425, pruned_loss=0.212, over 2646771.11 frames. ], batch size: 36, lr: 4.43e-02, grad_scale: 32.0 +2024-08-03 01:18:42,883 INFO [train.py:1114] (2/4) Epoch 1, batch 2000, loss[loss=0.3711, simple_loss=0.3956, pruned_loss=0.1733, over 13559.00 frames. ], tot_loss[loss=0.4314, simple_loss=0.4404, pruned_loss=0.2093, over 2635621.08 frames. ], batch size: 31, lr: 4.42e-02, grad_scale: 32.0 +2024-08-03 01:18:44,715 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.650e+02 1.978e+02 2.674e+02 4.949e+02, threshold=3.955e+02, percent-clipped=2.0 +2024-08-03 01:18:58,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=7333.333333333333, ans=0.15625 +2024-08-03 01:18:59,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.98 vs. limit=6.933333333333334 +2024-08-03 01:19:11,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=7370.0, ans=0.2263 +2024-08-03 01:19:30,071 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.20 vs. limit=4.1165 +2024-08-03 01:19:30,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=7443.333333333333, ans=0.15109375000000003 +2024-08-03 01:19:32,938 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.81 vs. limit=13.0825 +2024-08-03 01:19:41,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=7480.0, ans=0.035500000000000004 +2024-08-03 01:19:42,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=7480.0, ans=0.025 +2024-08-03 01:19:44,682 INFO [train.py:1114] (2/4) Epoch 1, batch 2050, loss[loss=0.376, simple_loss=0.3962, pruned_loss=0.1779, over 13409.00 frames. ], tot_loss[loss=0.4253, simple_loss=0.4369, pruned_loss=0.2054, over 2632842.24 frames. ], batch size: 32, lr: 4.42e-02, grad_scale: 64.0 +2024-08-03 01:20:03,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=7553.333333333333, ans=0.035194444444444445 +2024-08-03 01:20:12,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=7553.333333333333, ans=0.1459375 +2024-08-03 01:20:22,409 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.52 vs. limit=13.192499999999999 +2024-08-03 01:20:24,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=7590.0, ans=0.009219565217391305 +2024-08-03 01:20:40,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=7626.666666666667, ans=10.36 +2024-08-03 01:20:49,736 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.18 vs. limit=13.247499999999999 +2024-08-03 01:20:50,589 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.26 vs. limit=10.37375 +2024-08-03 01:20:54,874 INFO [train.py:1114] (2/4) Epoch 1, batch 2100, loss[loss=0.3801, simple_loss=0.4148, pruned_loss=0.1727, over 13546.00 frames. ], tot_loss[loss=0.4185, simple_loss=0.4331, pruned_loss=0.2008, over 2638966.42 frames. ], batch size: 37, lr: 4.42e-02, grad_scale: 8.0 +2024-08-03 01:20:59,391 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.800e+02 2.088e+02 2.971e+02 6.141e+02, threshold=4.177e+02, percent-clipped=15.0 +2024-08-03 01:21:12,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff3.min_abs, batch_count=7773.333333333333, ans=0.2 +2024-08-03 01:21:25,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=7773.333333333333, ans=13.33 +2024-08-03 01:21:36,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=7810.0, ans=0.034125 +2024-08-03 01:21:43,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=7846.666666666667, ans=9.904166666666667 +2024-08-03 01:21:53,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=7846.666666666667, ans=0.1321875 +2024-08-03 01:21:53,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=7846.666666666667, ans=0.6253666666666666 +2024-08-03 01:21:53,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=7846.666666666667, ans=0.1321875 +2024-08-03 01:21:56,054 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.38 vs. limit=10.45625 +2024-08-03 01:21:56,455 INFO [train.py:1114] (2/4) Epoch 1, batch 2150, loss[loss=0.3449, simple_loss=0.3929, pruned_loss=0.1485, over 13550.00 frames. ], tot_loss[loss=0.4108, simple_loss=0.4286, pruned_loss=0.1956, over 2647306.05 frames. ], batch size: 36, lr: 4.41e-02, grad_scale: 8.0 +2024-08-03 01:21:58,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.42 vs. limit=6.970833333333333 +2024-08-03 01:22:11,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=7920.0, ans=0.025 +2024-08-03 01:22:22,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=7956.666666666667, ans=0.12703124999999998 +2024-08-03 01:22:35,397 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.83 vs. limit=10.4975 +2024-08-03 01:22:42,918 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.28 vs. limit=13.5225 +2024-08-03 01:22:46,522 INFO [train.py:1114] (2/4) Epoch 1, batch 2200, loss[loss=0.345, simple_loss=0.401, pruned_loss=0.1445, over 13386.00 frames. ], tot_loss[loss=0.4084, simple_loss=0.4278, pruned_loss=0.1938, over 2645301.87 frames. ], batch size: 39, lr: 4.41e-02, grad_scale: 8.0 +2024-08-03 01:22:51,107 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.262e+02 1.590e+02 1.905e+02 2.323e+02 5.165e+02, threshold=3.810e+02, percent-clipped=4.0 +2024-08-03 01:23:01,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8103.333333333333, ans=0.21896666666666664 +2024-08-03 01:23:30,846 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.43 vs. limit=13.6325 +2024-08-03 01:23:31,933 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.96 vs. limit=10.56625 +2024-08-03 01:23:32,318 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.25 vs. limit=4.2265 +2024-08-03 01:23:39,859 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.71 vs. limit=13.66 +2024-08-03 01:23:42,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=8250.0, ans=0.125 +2024-08-03 01:23:43,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=8250.0, ans=0.125 +2024-08-03 01:23:43,801 INFO [train.py:1114] (2/4) Epoch 1, batch 2250, loss[loss=0.4124, simple_loss=0.4383, pruned_loss=0.1932, over 13346.00 frames. ], tot_loss[loss=0.4053, simple_loss=0.4262, pruned_loss=0.1917, over 2642580.04 frames. ], batch size: 37, lr: 4.40e-02, grad_scale: 8.0 +2024-08-03 01:23:45,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=8250.0, ans=0.125 +2024-08-03 01:23:47,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=8250.0, ans=0.125 +2024-08-03 01:23:58,241 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.44 vs. limit=10.6075 +2024-08-03 01:23:58,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=8286.666666666666, ans=0.125 +2024-08-03 01:24:34,657 INFO [train.py:1114] (2/4) Epoch 1, batch 2300, loss[loss=0.3352, simple_loss=0.369, pruned_loss=0.1508, over 13574.00 frames. ], tot_loss[loss=0.4015, simple_loss=0.4234, pruned_loss=0.1894, over 2639012.81 frames. ], batch size: 33, lr: 4.40e-02, grad_scale: 8.0 +2024-08-03 01:24:35,145 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.86 vs. limit=10.662500000000001 +2024-08-03 01:24:51,217 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.412e+02 1.913e+02 2.281e+02 2.883e+02 4.389e+02, threshold=4.562e+02, percent-clipped=6.0 +2024-08-03 01:25:01,099 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.86 vs. limit=13.8525 +2024-08-03 01:25:01,273 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.91 vs. limit=13.8525 +2024-08-03 01:25:06,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8506.666666666666, ans=0.21493333333333334 +2024-08-03 01:25:23,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=8580.0, ans=0.0 +2024-08-03 01:25:25,761 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.08 vs. limit=13.934999999999999 +2024-08-03 01:25:32,272 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.62 vs. limit=9.29 +2024-08-03 01:25:33,512 INFO [train.py:1114] (2/4) Epoch 1, batch 2350, loss[loss=0.3799, simple_loss=0.4263, pruned_loss=0.1667, over 13550.00 frames. ], tot_loss[loss=0.3983, simple_loss=0.422, pruned_loss=0.187, over 2641475.36 frames. ], batch size: 38, lr: 4.40e-02, grad_scale: 8.0 +2024-08-03 01:25:33,979 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.62 vs. limit=7.154166666666667 +2024-08-03 01:25:50,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=8616.666666666666, ans=0.125 +2024-08-03 01:26:10,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=8690.0, ans=0.008980434782608696 +2024-08-03 01:26:13,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=8690.0, ans=0.05 +2024-08-03 01:26:14,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=8690.0, ans=0.030458333333333337 +2024-08-03 01:26:19,892 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.95 vs. limit=14.044999999999998 +2024-08-03 01:26:23,522 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.62 vs. limit=10.772499999999999 +2024-08-03 01:26:31,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=8763.333333333334, ans=0.030152777777777775 +2024-08-03 01:26:32,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8763.333333333334, ans=0.21236666666666665 +2024-08-03 01:26:36,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_na.min_abs, batch_count=8763.333333333334, ans=0.02 +2024-08-03 01:26:39,409 INFO [train.py:1114] (2/4) Epoch 1, batch 2400, loss[loss=0.3633, simple_loss=0.3916, pruned_loss=0.1675, over 13529.00 frames. ], tot_loss[loss=0.3968, simple_loss=0.4217, pruned_loss=0.1857, over 2642998.46 frames. ], batch size: 35, lr: 4.39e-02, grad_scale: 16.0 +2024-08-03 01:26:44,010 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.261e+02 1.562e+02 1.774e+02 2.172e+02 5.136e+02, threshold=3.548e+02, percent-clipped=1.0 +2024-08-03 01:26:54,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=8836.666666666666, ans=0.008948550724637681 +2024-08-03 01:26:58,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=8873.333333333334, ans=0.008940579710144928 +2024-08-03 01:26:59,944 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.31 vs. limit=10.8275 +2024-08-03 01:27:08,364 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.99 vs. limit=10.84125 +2024-08-03 01:27:08,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=8910.0, ans=0.125 +2024-08-03 01:27:11,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=8910.0, ans=0.02954166666666667 +2024-08-03 01:27:16,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=8946.666666666666, ans=0.125 +2024-08-03 01:27:26,966 INFO [train.py:1114] (2/4) Epoch 1, batch 2450, loss[loss=0.3998, simple_loss=0.4334, pruned_loss=0.1831, over 13355.00 frames. ], tot_loss[loss=0.397, simple_loss=0.4227, pruned_loss=0.1855, over 2632683.78 frames. ], batch size: 37, lr: 4.39e-02, grad_scale: 16.0 +2024-08-03 01:27:30,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=8983.333333333334, ans=0.125 +2024-08-03 01:27:34,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=8983.333333333334, ans=0.125 +2024-08-03 01:27:37,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=9020.0, ans=0.125 +2024-08-03 01:27:49,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=9056.666666666666, ans=0.02893055555555556 +2024-08-03 01:28:12,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=9130.0, ans=0.2087 +2024-08-03 01:28:13,999 INFO [train.py:1114] (2/4) Epoch 1, batch 2500, loss[loss=0.3743, simple_loss=0.4204, pruned_loss=0.1641, over 13403.00 frames. ], tot_loss[loss=0.3935, simple_loss=0.421, pruned_loss=0.1828, over 2636995.32 frames. ], batch size: 39, lr: 4.38e-02, grad_scale: 16.0 +2024-08-03 01:28:18,357 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.243e+02 1.729e+02 1.988e+02 2.684e+02 1.225e+03, threshold=3.975e+02, percent-clipped=8.0 +2024-08-03 01:28:44,262 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.89 vs. limit=10.97875 +2024-08-03 01:28:57,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=9313.333333333334, ans=0.125 +2024-08-03 01:28:58,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=9313.333333333334, ans=0.20686666666666664 +2024-08-03 01:29:01,504 INFO [train.py:1114] (2/4) Epoch 1, batch 2550, loss[loss=0.3652, simple_loss=0.3936, pruned_loss=0.1684, over 13546.00 frames. ], tot_loss[loss=0.3916, simple_loss=0.42, pruned_loss=0.1815, over 2638842.70 frames. ], batch size: 31, lr: 4.38e-02, grad_scale: 16.0 +2024-08-03 01:29:15,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=9386.666666666666, ans=14.54 +2024-08-03 01:29:16,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=9386.666666666666, ans=0.5714666666666668 +2024-08-03 01:29:38,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=9496.666666666666, ans=0.20503333333333335 +2024-08-03 01:29:45,723 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.72 vs. limit=11.06125 +2024-08-03 01:29:46,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=9496.666666666666, ans=0.125 +2024-08-03 01:29:48,475 INFO [train.py:1114] (2/4) Epoch 1, batch 2600, loss[loss=0.3412, simple_loss=0.3842, pruned_loss=0.1491, over 13572.00 frames. ], tot_loss[loss=0.39, simple_loss=0.4194, pruned_loss=0.1802, over 2638355.98 frames. ], batch size: 36, lr: 4.37e-02, grad_scale: 16.0 +2024-08-03 01:29:50,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=9533.333333333334, ans=0.04949747468305833 +2024-08-03 01:29:52,863 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.295e+02 1.601e+02 1.881e+02 2.405e+02 3.900e+02, threshold=3.763e+02, percent-clipped=0.0 +2024-08-03 01:30:03,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=9570.0, ans=0.008789130434782608 +2024-08-03 01:30:04,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=9570.0, ans=0.02679166666666667 +2024-08-03 01:30:33,661 INFO [train.py:1114] (2/4) Epoch 1, batch 2650, loss[loss=0.4323, simple_loss=0.4582, pruned_loss=0.2032, over 13318.00 frames. ], tot_loss[loss=0.3862, simple_loss=0.4172, pruned_loss=0.1775, over 2641585.18 frames. ], batch size: 46, lr: 4.37e-02, grad_scale: 16.0 +2024-08-03 01:30:37,007 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.52 vs. limit=7.886666666666667 +2024-08-03 01:30:42,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=9753.333333333334, ans=0.026027777777777778 +2024-08-03 01:30:43,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=9753.333333333334, ans=0.5586333333333333 +2024-08-03 01:30:58,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=9790.0, ans=0.125 +2024-08-03 01:31:01,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=7.91 vs. limit=7.456666666666667 +2024-08-03 01:31:06,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=9826.666666666666, ans=9.913333333333334 +2024-08-03 01:31:09,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=9863.333333333334, ans=0.125 +2024-08-03 01:31:09,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=9863.333333333334, ans=0.025569444444444447 +2024-08-03 01:31:11,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=9863.333333333334, ans=0.20136666666666667 +2024-08-03 01:31:20,736 INFO [train.py:1114] (2/4) Epoch 1, batch 2700, loss[loss=0.3728, simple_loss=0.414, pruned_loss=0.1658, over 13522.00 frames. ], tot_loss[loss=0.3873, simple_loss=0.4185, pruned_loss=0.178, over 2638893.78 frames. ], batch size: 40, lr: 4.36e-02, grad_scale: 16.0 +2024-08-03 01:31:24,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=9900.0, ans=0.02541666666666667 +2024-08-03 01:31:27,492 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.286e+02 1.664e+02 1.951e+02 2.469e+02 5.181e+02, threshold=3.901e+02, percent-clipped=9.0 +2024-08-03 01:31:29,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=9900.0, ans=0.02541666666666667 +2024-08-03 01:31:30,693 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.06 vs. limit=11.2125 +2024-08-03 01:31:32,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=9900.0, ans=0.5535000000000001 +2024-08-03 01:31:39,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=9936.666666666666, ans=14.9525 +2024-08-03 01:31:54,407 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:31:57,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=9973.333333333334, ans=0.125 +2024-08-03 01:31:57,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=10010.0, ans=0.125 +2024-08-03 01:32:10,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=10046.666666666666, ans=0.5483666666666667 +2024-08-03 01:32:14,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10046.666666666666, ans=0.19953333333333334 +2024-08-03 01:32:17,813 INFO [train.py:1114] (2/4) Epoch 1, batch 2750, loss[loss=0.3705, simple_loss=0.3994, pruned_loss=0.1708, over 13313.00 frames. ], tot_loss[loss=0.3842, simple_loss=0.4158, pruned_loss=0.1763, over 2637133.89 frames. ], batch size: 34, lr: 4.36e-02, grad_scale: 16.0 +2024-08-03 01:32:19,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=10083.333333333334, ans=0.19916666666666666 +2024-08-03 01:32:41,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=10156.666666666666, ans=0.5445166666666668 +2024-08-03 01:32:45,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=10156.666666666666, ans=0.125 +2024-08-03 01:32:50,035 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.29 vs. limit=4.5235 +2024-08-03 01:32:50,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=10156.666666666666, ans=0.025 +2024-08-03 01:32:56,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=10193.333333333334, ans=0.5432333333333333 +2024-08-03 01:33:05,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=10230.0, ans=0.008645652173913044 +2024-08-03 01:33:10,637 INFO [train.py:1114] (2/4) Epoch 1, batch 2800, loss[loss=0.4556, simple_loss=0.4427, pruned_loss=0.2343, over 9250.00 frames. ], tot_loss[loss=0.3826, simple_loss=0.4144, pruned_loss=0.1754, over 2628411.51 frames. ], batch size: 97, lr: 4.36e-02, grad_scale: 32.0 +2024-08-03 01:33:13,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=10266.666666666666, ans=0.035 +2024-08-03 01:33:15,322 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.297e+02 1.678e+02 2.174e+02 2.677e+02 5.163e+02, threshold=4.348e+02, percent-clipped=2.0 +2024-08-03 01:33:16,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=10266.666666666666, ans=0.5406666666666667 +2024-08-03 01:33:19,469 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.27 vs. limit=15.2275 +2024-08-03 01:33:28,608 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.75 vs. limit=15.254999999999999 +2024-08-03 01:33:33,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=10340.0, ans=0.025 +2024-08-03 01:33:34,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=10340.0, ans=10.0 +2024-08-03 01:33:39,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=10376.666666666666, ans=0.02343055555555556 +2024-08-03 01:33:45,266 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.32 vs. limit=11.39125 +2024-08-03 01:35:11,686 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.39 vs. limit=15.309999999999999 +2024-08-03 01:35:14,243 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.70 vs. limit=7.6033333333333335 +2024-08-03 01:35:19,833 INFO [train.py:1114] (2/4) Epoch 1, batch 2850, loss[loss=0.3388, simple_loss=0.3878, pruned_loss=0.1449, over 13364.00 frames. ], tot_loss[loss=0.3811, simple_loss=0.4136, pruned_loss=0.1742, over 2622432.04 frames. ], batch size: 35, lr: 4.35e-02, grad_scale: 32.0 +2024-08-03 01:35:36,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.25 vs. limit=15.3375 +2024-08-03 01:35:51,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=10523.333333333334, ans=0.14476666666666665 +2024-08-03 01:35:53,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=10523.333333333334, ans=0.008581884057971015 +2024-08-03 01:35:56,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.39 vs. limit=8.209333333333333 +2024-08-03 01:36:13,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=10596.666666666666, ans=0.5291166666666667 +2024-08-03 01:36:14,796 INFO [train.py:1114] (2/4) Epoch 1, batch 2900, loss[loss=0.3833, simple_loss=0.4079, pruned_loss=0.1793, over 13363.00 frames. ], tot_loss[loss=0.3807, simple_loss=0.4146, pruned_loss=0.1734, over 2632578.76 frames. ], batch size: 36, lr: 4.35e-02, grad_scale: 32.0 +2024-08-03 01:36:19,264 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.316e+02 1.668e+02 1.982e+02 2.661e+02 5.002e+02, threshold=3.964e+02, percent-clipped=4.0 +2024-08-03 01:36:29,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=10670.0, ans=0.022208333333333337 +2024-08-03 01:36:34,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=10706.666666666666, ans=0.125 +2024-08-03 01:36:36,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=10706.666666666666, ans=0.125 +2024-08-03 01:36:41,736 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.92 vs. limit=15.5575 +2024-08-03 01:36:41,915 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.49 vs. limit=15.5575 +2024-08-03 01:36:43,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=10743.333333333334, ans=0.125 +2024-08-03 01:36:51,754 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.57 vs. limit=7.695 +2024-08-03 01:37:00,014 INFO [train.py:1114] (2/4) Epoch 1, batch 2950, loss[loss=0.3718, simple_loss=0.4026, pruned_loss=0.1705, over 13353.00 frames. ], tot_loss[loss=0.3773, simple_loss=0.4118, pruned_loss=0.1715, over 2630644.96 frames. ], batch size: 34, lr: 4.34e-02, grad_scale: 32.0 +2024-08-03 01:37:45,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10853.333333333334, ans=0.19146666666666667 +2024-08-03 01:38:09,714 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.91 vs. limit=7.7316666666666665 +2024-08-03 01:38:21,974 INFO [train.py:1114] (2/4) Epoch 1, batch 3000, loss[loss=0.3581, simple_loss=0.409, pruned_loss=0.1536, over 13561.00 frames. ], tot_loss[loss=0.3747, simple_loss=0.4101, pruned_loss=0.1696, over 2630562.86 frames. ], batch size: 37, lr: 4.34e-02, grad_scale: 32.0 +2024-08-03 01:38:21,974 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 01:39:12,983 INFO [train.py:1146] (2/4) Epoch 1, validation: loss=0.2888, simple_loss=0.3696, pruned_loss=0.104, over 944034.00 frames. +2024-08-03 01:39:12,984 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 01:39:17,657 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.298e+02 1.604e+02 1.963e+02 2.352e+02 4.798e+02, threshold=3.927e+02, percent-clipped=2.0 +2024-08-03 01:39:17,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=11000.0, ans=0.125 +2024-08-03 01:39:23,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=11036.666666666666, ans=0.025 +2024-08-03 01:39:27,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=11036.666666666666, ans=0.0 +2024-08-03 01:39:28,717 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.25 vs. limit=11.63875 +2024-08-03 01:39:29,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=11036.666666666666, ans=0.008470289855072465 +2024-08-03 01:39:37,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=11073.333333333334, ans=0.04949747468305833 +2024-08-03 01:39:40,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=11110.0, ans=0.125 +2024-08-03 01:39:40,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=11110.0, ans=0.008454347826086957 +2024-08-03 01:40:03,051 INFO [train.py:1114] (2/4) Epoch 1, batch 3050, loss[loss=0.328, simple_loss=0.3849, pruned_loss=0.1356, over 13526.00 frames. ], tot_loss[loss=0.3741, simple_loss=0.41, pruned_loss=0.1691, over 2627379.43 frames. ], batch size: 35, lr: 4.33e-02, grad_scale: 32.0 +2024-08-03 01:40:14,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=11183.333333333334, ans=0.125 +2024-08-03 01:40:19,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=11220.0, ans=0.0 +2024-08-03 01:40:29,875 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.99 vs. limit=11.721250000000001 +2024-08-03 01:40:30,578 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.81 vs. limit=11.721250000000001 +2024-08-03 01:40:34,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=11256.666666666666, ans=0.125 +2024-08-03 01:40:43,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=11293.333333333334, ans=0.019611111111111107 +2024-08-03 01:40:55,298 INFO [train.py:1114] (2/4) Epoch 1, batch 3100, loss[loss=0.3972, simple_loss=0.4351, pruned_loss=0.1796, over 13309.00 frames. ], tot_loss[loss=0.374, simple_loss=0.4098, pruned_loss=0.1691, over 2626458.81 frames. ], batch size: 46, lr: 4.33e-02, grad_scale: 32.0 +2024-08-03 01:40:59,565 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.612e+02 1.933e+02 2.547e+02 5.853e+02, threshold=3.866e+02, percent-clipped=4.0 +2024-08-03 01:41:08,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=11403.333333333334, ans=0.125 +2024-08-03 01:41:09,201 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:41:11,128 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.01 vs. limit=11.776250000000001 +2024-08-03 01:41:14,740 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.11 vs. limit=16.052500000000002 +2024-08-03 01:41:16,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=11440.0, ans=0.04949747468305833 +2024-08-03 01:41:22,232 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.17 vs. limit=4.716 +2024-08-03 01:41:31,409 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.46 vs. limit=16.08 +2024-08-03 01:41:49,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=11513.333333333334, ans=0.125 +2024-08-03 01:41:54,260 INFO [train.py:1114] (2/4) Epoch 1, batch 3150, loss[loss=0.3803, simple_loss=0.4175, pruned_loss=0.1715, over 13044.00 frames. ], tot_loss[loss=0.3713, simple_loss=0.4082, pruned_loss=0.1672, over 2628048.76 frames. ], batch size: 48, lr: 4.32e-02, grad_scale: 32.0 +2024-08-03 01:41:54,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=11550.0, ans=0.125 +2024-08-03 01:42:05,315 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.84 vs. limit=7.8875 +2024-08-03 01:42:11,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=11586.666666666666, ans=0.18413333333333334 +2024-08-03 01:42:44,269 INFO [train.py:1114] (2/4) Epoch 1, batch 3200, loss[loss=0.3527, simple_loss=0.4013, pruned_loss=0.1521, over 13546.00 frames. ], tot_loss[loss=0.3677, simple_loss=0.4056, pruned_loss=0.1649, over 2633730.60 frames. ], batch size: 37, lr: 4.32e-02, grad_scale: 32.0 +2024-08-03 01:42:48,503 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.624e+02 1.966e+02 2.680e+02 4.372e+02, threshold=3.932e+02, percent-clipped=2.0 +2024-08-03 01:43:00,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=11770.0, ans=0.37655000000000005 +2024-08-03 01:43:01,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=11770.0, ans=0.125 +2024-08-03 01:43:04,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=11770.0, ans=0.48805000000000004 +2024-08-03 01:43:06,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=11806.666666666666, ans=0.48676666666666674 +2024-08-03 01:43:18,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=11843.333333333334, ans=0.37765000000000004 +2024-08-03 01:43:27,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=11880.0, ans=0.125 +2024-08-03 01:43:33,498 INFO [train.py:1114] (2/4) Epoch 1, batch 3250, loss[loss=0.3705, simple_loss=0.4182, pruned_loss=0.1614, over 13387.00 frames. ], tot_loss[loss=0.365, simple_loss=0.4043, pruned_loss=0.1628, over 2637965.45 frames. ], batch size: 38, lr: 4.31e-02, grad_scale: 32.0 +2024-08-03 01:43:38,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=11916.666666666666, ans=0.0 +2024-08-03 01:43:38,319 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.07 vs. limit=16.4375 +2024-08-03 01:44:18,563 INFO [train.py:1114] (2/4) Epoch 1, batch 3300, loss[loss=0.3984, simple_loss=0.4275, pruned_loss=0.1847, over 12845.00 frames. ], tot_loss[loss=0.3628, simple_loss=0.4024, pruned_loss=0.1616, over 2639630.43 frames. ], batch size: 52, lr: 4.31e-02, grad_scale: 32.0 +2024-08-03 01:44:22,870 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.222e+02 1.557e+02 1.877e+02 2.344e+02 4.156e+02, threshold=3.753e+02, percent-clipped=2.0 +2024-08-03 01:44:32,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=12136.666666666666, ans=0.17863333333333334 +2024-08-03 01:44:39,354 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.34 vs. limit=12.065000000000001 +2024-08-03 01:44:45,491 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.02 vs. limit=12.07875 +2024-08-03 01:44:47,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=12210.0, ans=0.125 +2024-08-03 01:44:51,587 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:45:02,676 INFO [train.py:1114] (2/4) Epoch 1, batch 3350, loss[loss=0.3582, simple_loss=0.4102, pruned_loss=0.1531, over 13060.00 frames. ], tot_loss[loss=0.3649, simple_loss=0.404, pruned_loss=0.1629, over 2628880.14 frames. ], batch size: 48, lr: 4.30e-02, grad_scale: 32.0 +2024-08-03 01:45:09,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=12283.333333333334, ans=0.125 +2024-08-03 01:45:14,638 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.20 vs. limit=12.120000000000001 +2024-08-03 01:45:23,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=12356.666666666666, ans=0.125 +2024-08-03 01:45:47,885 INFO [train.py:1114] (2/4) Epoch 1, batch 3400, loss[loss=0.3511, simple_loss=0.3841, pruned_loss=0.1591, over 13548.00 frames. ], tot_loss[loss=0.3641, simple_loss=0.4034, pruned_loss=0.1624, over 2624319.33 frames. ], batch size: 31, lr: 4.29e-02, grad_scale: 32.0 +2024-08-03 01:45:50,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=12466.666666666666, ans=0.125 +2024-08-03 01:45:52,235 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.662e+02 2.017e+02 2.620e+02 5.936e+02, threshold=4.033e+02, percent-clipped=10.0 +2024-08-03 01:45:59,316 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.12 vs. limit=16.877499999999998 +2024-08-03 01:46:01,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=12503.333333333334, ans=0.125 +2024-08-03 01:46:20,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=12503.333333333334, ans=0.125 +2024-08-03 01:46:40,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=12540.0, ans=0.008143478260869565 +2024-08-03 01:46:42,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=12540.0, ans=0.04949747468305833 +2024-08-03 01:46:43,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=12540.0, ans=0.4611 +2024-08-03 01:46:45,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=12576.666666666666, ans=0.125 +2024-08-03 01:47:02,941 INFO [train.py:1114] (2/4) Epoch 1, batch 3450, loss[loss=0.4221, simple_loss=0.4524, pruned_loss=0.1959, over 12826.00 frames. ], tot_loss[loss=0.3634, simple_loss=0.4029, pruned_loss=0.162, over 2629037.63 frames. ], batch size: 52, lr: 4.29e-02, grad_scale: 32.0 +2024-08-03 01:47:19,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=12686.666666666666, ans=0.025 +2024-08-03 01:47:23,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=12723.333333333334, ans=0.025 +2024-08-03 01:47:25,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=12723.333333333334, ans=0.013652777777777778 +2024-08-03 01:47:36,503 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=1.360e-02 +2024-08-03 01:47:41,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=12796.666666666666, ans=0.125 +2024-08-03 01:47:45,143 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.70 vs. limit=4.9195 +2024-08-03 01:47:55,450 INFO [train.py:1114] (2/4) Epoch 1, batch 3500, loss[loss=0.34, simple_loss=0.3822, pruned_loss=0.149, over 13529.00 frames. ], tot_loss[loss=0.3627, simple_loss=0.4016, pruned_loss=0.1619, over 2629667.61 frames. ], batch size: 34, lr: 4.28e-02, grad_scale: 32.0 +2024-08-03 01:47:59,796 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.559e+02 1.825e+02 2.381e+02 4.772e+02, threshold=3.650e+02, percent-clipped=2.0 +2024-08-03 01:47:59,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=12833.333333333334, ans=0.125 +2024-08-03 01:48:06,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=12870.0, ans=0.125 +2024-08-03 01:48:07,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=12870.0, ans=0.125 +2024-08-03 01:48:40,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=12943.333333333334, ans=0.025 +2024-08-03 01:48:41,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=12980.0, ans=0.125 +2024-08-03 01:48:46,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=12980.0, ans=0.125 +2024-08-03 01:48:51,944 INFO [train.py:1114] (2/4) Epoch 1, batch 3550, loss[loss=0.3392, simple_loss=0.3929, pruned_loss=0.1428, over 12491.00 frames. ], tot_loss[loss=0.3653, simple_loss=0.4042, pruned_loss=0.1632, over 2628550.23 frames. ], batch size: 58, lr: 4.28e-02, grad_scale: 32.0 +2024-08-03 01:48:59,272 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:49:27,414 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.00 vs. limit=8.281666666666666 +2024-08-03 01:49:35,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=13163.333333333334, ans=0.025 +2024-08-03 01:49:47,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=13163.333333333334, ans=0.125 +2024-08-03 01:49:51,065 INFO [train.py:1114] (2/4) Epoch 1, batch 3600, loss[loss=0.4518, simple_loss=0.4596, pruned_loss=0.222, over 8824.00 frames. ], tot_loss[loss=0.3757, simple_loss=0.4106, pruned_loss=0.1704, over 2488730.95 frames. ], batch size: 96, lr: 4.27e-02, grad_scale: 32.0 +2024-08-03 01:49:55,461 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.481e+02 1.802e+02 2.019e+02 3.446e+02, threshold=3.604e+02, percent-clipped=0.0 +2024-08-03 01:50:04,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=13236.666666666666, ans=0.125 +2024-08-03 01:50:05,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=13236.666666666666, ans=0.125 +2024-08-03 01:50:07,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13236.666666666666, ans=0.16763333333333333 +2024-08-03 01:50:08,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=13236.666666666666, ans=0.011513888888888893 +2024-08-03 01:50:09,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=13236.666666666666, ans=0.4367166666666667 +2024-08-03 01:50:11,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13236.666666666666, ans=0.16763333333333333 +2024-08-03 01:50:16,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=13273.333333333334, ans=0.011361111111111107 +2024-08-03 01:50:28,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=13273.333333333334, ans=0.025 +2024-08-03 01:52:08,730 INFO [train.py:1114] (2/4) Epoch 2, batch 0, loss[loss=0.3513, simple_loss=0.3966, pruned_loss=0.153, over 13342.00 frames. ], tot_loss[loss=0.3513, simple_loss=0.3966, pruned_loss=0.153, over 13342.00 frames. ], batch size: 33, lr: 4.19e-02, grad_scale: 32.0 +2024-08-03 01:52:08,731 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 01:52:18,753 INFO [train.py:1146] (2/4) Epoch 2, validation: loss=0.2954, simple_loss=0.3785, pruned_loss=0.1062, over 944034.00 frames. +2024-08-03 01:52:18,754 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 01:52:38,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=13420.0, ans=0.4303 +2024-08-03 01:52:41,945 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.94 vs. limit=12.532499999999999 +2024-08-03 01:52:45,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=13420.0, ans=0.125 +2024-08-03 01:52:47,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=13456.666666666666, ans=0.125 +2024-08-03 01:52:51,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=13456.666666666666, ans=0.125 +2024-08-03 01:53:00,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13493.333333333334, ans=0.16506666666666667 +2024-08-03 01:53:05,770 INFO [train.py:1114] (2/4) Epoch 2, batch 50, loss[loss=0.3363, simple_loss=0.3733, pruned_loss=0.1496, over 13401.00 frames. ], tot_loss[loss=0.3688, simple_loss=0.4081, pruned_loss=0.1648, over 578281.17 frames. ], batch size: 32, lr: 4.18e-02, grad_scale: 16.0 +2024-08-03 01:53:12,724 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.14 vs. limit=17.6475 +2024-08-03 01:53:13,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=13530.0, ans=0.125 +2024-08-03 01:53:19,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=13566.666666666666, ans=0.010138888888888892 +2024-08-03 01:53:25,127 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.505e+02 1.833e+02 2.741e+02 6.945e+02, threshold=3.667e+02, percent-clipped=7.0 +2024-08-03 01:53:33,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13603.333333333334, ans=0.16396666666666665 +2024-08-03 01:53:55,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=13676.666666666666, ans=0.125 +2024-08-03 01:53:56,980 INFO [train.py:1114] (2/4) Epoch 2, batch 100, loss[loss=0.3448, simple_loss=0.3824, pruned_loss=0.1536, over 13550.00 frames. ], tot_loss[loss=0.364, simple_loss=0.4058, pruned_loss=0.1611, over 1025710.49 frames. ], batch size: 35, lr: 4.17e-02, grad_scale: 16.0 +2024-08-03 01:54:02,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=13713.333333333334, ans=0.125 +2024-08-03 01:54:04,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=13713.333333333334, ans=0.009527777777777774 +2024-08-03 01:54:07,099 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.83 vs. limit=17.8125 +2024-08-03 01:54:24,710 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.30 vs. limit=5.068 +2024-08-03 01:54:30,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=13823.333333333334, ans=0.05 +2024-08-03 01:54:32,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=13823.333333333334, ans=0.007864492753623189 +2024-08-03 01:54:42,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=13860.0, ans=0.00891666666666667 +2024-08-03 01:54:44,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13860.0, ans=0.16140000000000002 +2024-08-03 01:54:48,073 INFO [train.py:1114] (2/4) Epoch 2, batch 150, loss[loss=0.3049, simple_loss=0.3553, pruned_loss=0.1273, over 13429.00 frames. ], tot_loss[loss=0.3577, simple_loss=0.4009, pruned_loss=0.1573, over 1387044.40 frames. ], batch size: 32, lr: 4.17e-02, grad_scale: 16.0 +2024-08-03 01:54:58,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=13933.333333333334, ans=0.41233333333333333 +2024-08-03 01:55:03,038 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.519e+02 1.772e+02 2.227e+02 3.651e+02, threshold=3.544e+02, percent-clipped=0.0 +2024-08-03 01:55:17,396 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.19 vs. limit=12.752500000000001 +2024-08-03 01:55:18,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=14006.666666666666, ans=0.125 +2024-08-03 01:55:19,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=14006.666666666666, ans=0.125 +2024-08-03 01:55:27,871 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.18 vs. limit=12.76625 +2024-08-03 01:55:34,714 INFO [train.py:1114] (2/4) Epoch 2, batch 200, loss[loss=0.3812, simple_loss=0.4127, pruned_loss=0.1748, over 12538.00 frames. ], tot_loss[loss=0.3555, simple_loss=0.3984, pruned_loss=0.1563, over 1665598.65 frames. ], batch size: 58, lr: 4.16e-02, grad_scale: 16.0 +2024-08-03 01:55:45,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=14116.666666666666, ans=0.125 +2024-08-03 01:55:51,882 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.77 vs. limit=18.0875 +2024-08-03 01:55:55,557 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.39 vs. limit=9.661333333333333 +2024-08-03 01:56:23,335 INFO [train.py:1114] (2/4) Epoch 2, batch 250, loss[loss=0.336, simple_loss=0.3961, pruned_loss=0.1379, over 13358.00 frames. ], tot_loss[loss=0.3531, simple_loss=0.3967, pruned_loss=0.1547, over 1884466.55 frames. ], batch size: 46, lr: 4.16e-02, grad_scale: 16.0 +2024-08-03 01:56:27,762 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.50 vs. limit=12.848749999999999 +2024-08-03 01:56:31,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=14263.333333333334, ans=0.4007833333333333 +2024-08-03 01:56:38,385 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.594e+02 1.964e+02 2.594e+02 6.291e+02, threshold=3.929e+02, percent-clipped=8.0 +2024-08-03 01:56:43,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=14336.666666666666, ans=0.125 +2024-08-03 01:56:45,495 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.14 vs. limit=12.876249999999999 +2024-08-03 01:57:12,757 INFO [train.py:1114] (2/4) Epoch 2, batch 300, loss[loss=0.3426, simple_loss=0.3906, pruned_loss=0.1473, over 13437.00 frames. ], tot_loss[loss=0.3518, simple_loss=0.3956, pruned_loss=0.154, over 2052396.59 frames. ], batch size: 42, lr: 4.15e-02, grad_scale: 16.0 +2024-08-03 01:57:16,171 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.75 vs. limit=9.778666666666666 +2024-08-03 01:57:25,709 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.89 vs. limit=5.172499999999999 +2024-08-03 01:57:43,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=14556.666666666666, ans=0.125 +2024-08-03 01:58:01,727 INFO [train.py:1114] (2/4) Epoch 2, batch 350, loss[loss=0.2825, simple_loss=0.3327, pruned_loss=0.1162, over 13603.00 frames. ], tot_loss[loss=0.3513, simple_loss=0.3955, pruned_loss=0.1536, over 2182773.42 frames. ], batch size: 33, lr: 4.15e-02, grad_scale: 16.0 +2024-08-03 01:58:10,824 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.70 vs. limit=12.98625 +2024-08-03 01:58:12,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=14630.0, ans=0.125 +2024-08-03 01:58:52,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=14666.666666666666, ans=0.125 +2024-08-03 01:58:56,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=14666.666666666666, ans=0.3866666666666667 +2024-08-03 01:58:58,114 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.602e+02 1.924e+02 2.648e+02 5.206e+02, threshold=3.847e+02, percent-clipped=6.0 +2024-08-03 01:59:05,238 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.33 vs. limit=13.01375 +2024-08-03 01:59:06,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=14703.333333333334, ans=0.005402777777777777 +2024-08-03 01:59:15,421 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.60 vs. limit=13.0275 +2024-08-03 01:59:23,006 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.68 vs. limit=9.910666666666668 +2024-08-03 01:59:29,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=14813.333333333334, ans=0.025 +2024-08-03 01:59:29,790 INFO [train.py:1114] (2/4) Epoch 2, batch 400, loss[loss=0.3683, simple_loss=0.4118, pruned_loss=0.1624, over 13351.00 frames. ], tot_loss[loss=0.3502, simple_loss=0.3944, pruned_loss=0.153, over 2286835.11 frames. ], batch size: 37, lr: 4.14e-02, grad_scale: 32.0 +2024-08-03 02:03:59,582 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.12 vs. limit=12.406666666666666 +2024-08-03 02:04:11,485 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:04:13,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=14850.0, ans=0.0 +2024-08-03 02:06:33,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=14996.666666666666, ans=0.004180555555555555 +2024-08-03 02:06:33,899 INFO [train.py:1114] (2/4) Epoch 2, batch 450, loss[loss=0.3546, simple_loss=0.409, pruned_loss=0.1501, over 13546.00 frames. ], tot_loss[loss=0.3491, simple_loss=0.3936, pruned_loss=0.1523, over 2360517.34 frames. ], batch size: 38, lr: 4.13e-02, grad_scale: 32.0 +2024-08-03 02:06:34,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=14996.666666666666, ans=0.37511666666666676 +2024-08-03 02:06:45,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=15033.333333333334, ans=0.0 +2024-08-03 02:06:48,870 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.226e+02 1.511e+02 1.857e+02 2.288e+02 3.385e+02, threshold=3.714e+02, percent-clipped=0.0 +2024-08-03 02:07:02,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=15070.0, ans=0.125 +2024-08-03 02:07:14,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=15106.666666666666, ans=0.125 +2024-08-03 02:07:15,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=15106.666666666666, ans=0.125 +2024-08-03 02:07:15,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15106.666666666666, ans=0.14893333333333333 +2024-08-03 02:07:28,612 INFO [train.py:1114] (2/4) Epoch 2, batch 500, loss[loss=0.3895, simple_loss=0.4279, pruned_loss=0.1755, over 13436.00 frames. ], tot_loss[loss=0.3459, simple_loss=0.3912, pruned_loss=0.1504, over 2426361.30 frames. ], batch size: 43, lr: 4.13e-02, grad_scale: 32.0 +2024-08-03 02:07:30,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15180.0, ans=0.1482 +2024-08-03 02:07:39,480 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.03 vs. limit=13.192499999999999 +2024-08-03 02:07:40,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=15216.666666666666, ans=0.0 +2024-08-03 02:07:44,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=15216.666666666666, ans=0.125 +2024-08-03 02:07:51,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=15253.333333333334, ans=0.007553623188405797 +2024-08-03 02:07:52,305 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.82 vs. limit=18.939999999999998 +2024-08-03 02:07:53,174 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.84 vs. limit=13.219999999999999 +2024-08-03 02:07:54,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=15253.333333333334, ans=0.025 +2024-08-03 02:08:00,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=15290.0, ans=0.36485 +2024-08-03 02:08:02,608 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.07 vs. limit=5.2935 +2024-08-03 02:08:04,601 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.36 vs. limit=8.8225 +2024-08-03 02:08:06,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=15290.0, ans=0.007545652173913044 +2024-08-03 02:08:12,141 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.26 vs. limit=13.247499999999999 +2024-08-03 02:08:13,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=15326.666666666666, ans=0.125 +2024-08-03 02:08:18,927 INFO [train.py:1114] (2/4) Epoch 2, batch 550, loss[loss=0.3374, simple_loss=0.3897, pruned_loss=0.1425, over 13048.00 frames. ], tot_loss[loss=0.3455, simple_loss=0.3908, pruned_loss=0.1501, over 2468169.43 frames. ], batch size: 48, lr: 4.12e-02, grad_scale: 32.0 +2024-08-03 02:08:19,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=15363.333333333334, ans=0.025 +2024-08-03 02:08:33,978 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.486e+02 1.782e+02 2.081e+02 4.201e+02, threshold=3.563e+02, percent-clipped=2.0 +2024-08-03 02:08:37,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=15436.666666666666, ans=0.3597166666666667 +2024-08-03 02:08:49,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=15473.333333333334, ans=0.0021944444444444433 +2024-08-03 02:08:59,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=15473.333333333334, ans=0.0021944444444444433 +2024-08-03 02:09:00,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=15510.0, ans=0.9051 +2024-08-03 02:09:00,501 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.59 vs. limit=19.1325 +2024-08-03 02:09:02,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=15510.0, ans=0.025 +2024-08-03 02:09:04,216 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.31 vs. limit=19.1325 +2024-08-03 02:09:20,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=15510.0, ans=0.125 +2024-08-03 02:09:22,856 INFO [train.py:1114] (2/4) Epoch 2, batch 600, loss[loss=0.3945, simple_loss=0.4308, pruned_loss=0.1792, over 13296.00 frames. ], tot_loss[loss=0.344, simple_loss=0.39, pruned_loss=0.149, over 2507457.48 frames. ], batch size: 46, lr: 4.12e-02, grad_scale: 32.0 +2024-08-03 02:09:25,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=15546.666666666666, ans=0.0018888888888888913 +2024-08-03 02:09:33,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15583.333333333334, ans=0.14416666666666667 +2024-08-03 02:09:35,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15583.333333333334, ans=0.14416666666666667 +2024-08-03 02:09:58,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=15656.666666666666, ans=0.125 +2024-08-03 02:10:01,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=15693.333333333334, ans=0.125 +2024-08-03 02:10:11,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=15730.0, ans=0.025 +2024-08-03 02:10:12,465 INFO [train.py:1114] (2/4) Epoch 2, batch 650, loss[loss=0.3461, simple_loss=0.395, pruned_loss=0.1487, over 13548.00 frames. ], tot_loss[loss=0.3419, simple_loss=0.3885, pruned_loss=0.1477, over 2543152.51 frames. ], batch size: 37, lr: 4.11e-02, grad_scale: 32.0 +2024-08-03 02:10:28,716 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.454e+02 1.669e+02 2.017e+02 2.893e+02, threshold=3.339e+02, percent-clipped=0.0 +2024-08-03 02:11:09,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=15876.666666666666, ans=0.3443166666666667 +2024-08-03 02:11:13,922 INFO [train.py:1114] (2/4) Epoch 2, batch 700, loss[loss=0.2788, simple_loss=0.3413, pruned_loss=0.1082, over 13526.00 frames. ], tot_loss[loss=0.3424, simple_loss=0.3891, pruned_loss=0.1478, over 2564329.21 frames. ], batch size: 35, lr: 4.11e-02, grad_scale: 8.0 +2024-08-03 02:11:18,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=15913.333333333334, ans=0.025 +2024-08-03 02:11:20,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=15913.333333333334, ans=0.14086666666666667 +2024-08-03 02:11:40,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15986.666666666666, ans=0.14013333333333333 +2024-08-03 02:11:52,767 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.18 vs. limit=19.5175 +2024-08-03 02:11:57,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=16060.0, ans=0.0 +2024-08-03 02:12:01,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=16060.0, ans=0.125 +2024-08-03 02:12:04,800 INFO [train.py:1114] (2/4) Epoch 2, batch 750, loss[loss=0.3113, simple_loss=0.3771, pruned_loss=0.1227, over 13375.00 frames. ], tot_loss[loss=0.3399, simple_loss=0.3874, pruned_loss=0.1462, over 2582877.24 frames. ], batch size: 37, lr: 4.10e-02, grad_scale: 8.0 +2024-08-03 02:12:21,726 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.638e+02 1.990e+02 2.530e+02 5.439e+02, threshold=3.980e+02, percent-clipped=7.0 +2024-08-03 02:12:35,280 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.95 vs. limit=13.5775 +2024-08-03 02:12:40,988 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.30 vs. limit=10.482666666666667 +2024-08-03 02:12:45,661 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.14 vs. limit=19.682499999999997 +2024-08-03 02:12:46,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=16243.333333333334, ans=0.05 +2024-08-03 02:12:47,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=16243.333333333334, ans=0.125 +2024-08-03 02:13:00,988 INFO [train.py:1114] (2/4) Epoch 2, batch 800, loss[loss=0.3055, simple_loss=0.3515, pruned_loss=0.1298, over 13329.00 frames. ], tot_loss[loss=0.3395, simple_loss=0.3872, pruned_loss=0.1459, over 2597838.59 frames. ], batch size: 33, lr: 4.09e-02, grad_scale: 16.0 +2024-08-03 02:13:17,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=16316.666666666666, ans=0.0 +2024-08-03 02:13:34,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=16390.0, ans=0.125 +2024-08-03 02:13:41,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=16390.0, ans=5.4585 +2024-08-03 02:13:52,944 INFO [train.py:1114] (2/4) Epoch 2, batch 850, loss[loss=0.3004, simple_loss=0.3668, pruned_loss=0.1171, over 13326.00 frames. ], tot_loss[loss=0.3375, simple_loss=0.3857, pruned_loss=0.1446, over 2609561.65 frames. ], batch size: 40, lr: 4.09e-02, grad_scale: 16.0 +2024-08-03 02:14:12,142 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.052e+02 1.467e+02 1.720e+02 2.030e+02 3.514e+02, threshold=3.439e+02, percent-clipped=0.0 +2024-08-03 02:14:16,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=16536.666666666668, ans=0.125 +2024-08-03 02:14:23,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=16573.333333333332, ans=0.025 +2024-08-03 02:14:30,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16573.333333333332, ans=0.13426666666666667 +2024-08-03 02:14:42,214 INFO [train.py:1114] (2/4) Epoch 2, batch 900, loss[loss=0.3064, simple_loss=0.357, pruned_loss=0.1279, over 13381.00 frames. ], tot_loss[loss=0.3379, simple_loss=0.386, pruned_loss=0.1449, over 2613360.55 frames. ], batch size: 33, lr: 4.08e-02, grad_scale: 16.0 +2024-08-03 02:14:56,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=16683.333333333332, ans=0.025 +2024-08-03 02:14:57,640 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.32 vs. limit=20.0125 +2024-08-03 02:15:06,243 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.38 vs. limit=9.18 +2024-08-03 02:15:09,931 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.37 vs. limit=20.04 +2024-08-03 02:15:23,863 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.54 vs. limit=20.095 +2024-08-03 02:15:28,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=16793.333333333332, ans=0.025 +2024-08-03 02:15:30,753 INFO [train.py:1114] (2/4) Epoch 2, batch 950, loss[loss=0.2881, simple_loss=0.3418, pruned_loss=0.1172, over 13522.00 frames. ], tot_loss[loss=0.3374, simple_loss=0.3859, pruned_loss=0.1445, over 2613351.24 frames. ], batch size: 34, lr: 4.08e-02, grad_scale: 16.0 +2024-08-03 02:15:32,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=16830.0, ans=0.025 +2024-08-03 02:15:36,933 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.52 vs. limit=7.366 +2024-08-03 02:15:42,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=16866.666666666668, ans=0.30966666666666676 +2024-08-03 02:15:44,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=16866.666666666668, ans=0.125 +2024-08-03 02:15:45,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=16866.666666666668, ans=0.07 +2024-08-03 02:15:49,944 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.484e+02 1.735e+02 2.135e+02 4.344e+02, threshold=3.469e+02, percent-clipped=2.0 +2024-08-03 02:15:52,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=16903.333333333332, ans=0.09899494936611666 +2024-08-03 02:15:58,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=16903.333333333332, ans=0.30838333333333345 +2024-08-03 02:15:58,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=16903.333333333332, ans=0.125 +2024-08-03 02:15:59,147 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.09 vs. limit=13.83875 +2024-08-03 02:16:04,876 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.68 vs. limit=13.47 +2024-08-03 02:16:12,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=16976.666666666668, ans=0.1302333333333333 +2024-08-03 02:16:19,890 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.83 vs. limit=9.253333333333334 +2024-08-03 02:16:20,063 INFO [train.py:1114] (2/4) Epoch 2, batch 1000, loss[loss=0.3136, simple_loss=0.3734, pruned_loss=0.1269, over 13363.00 frames. ], tot_loss[loss=0.3375, simple_loss=0.3862, pruned_loss=0.1444, over 2612400.44 frames. ], batch size: 35, lr: 4.07e-02, grad_scale: 16.0 +2024-08-03 02:16:28,178 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.29 vs. limit=13.879999999999999 +2024-08-03 02:16:46,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=17086.666666666668, ans=0.3019666666666667 +2024-08-03 02:17:08,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=17160.0, ans=0.125 +2024-08-03 02:17:10,911 INFO [train.py:1114] (2/4) Epoch 2, batch 1050, loss[loss=0.3471, simple_loss=0.4, pruned_loss=0.1471, over 13584.00 frames. ], tot_loss[loss=0.336, simple_loss=0.3847, pruned_loss=0.1436, over 2616837.06 frames. ], batch size: 39, lr: 4.06e-02, grad_scale: 16.0 +2024-08-03 02:17:26,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=17233.333333333332, ans=0.125 +2024-08-03 02:17:27,501 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.473e+02 1.878e+02 2.204e+02 3.880e+02, threshold=3.755e+02, percent-clipped=2.0 +2024-08-03 02:18:04,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=17343.333333333332, ans=0.125 +2024-08-03 02:18:05,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17343.333333333332, ans=0.1265666666666667 +2024-08-03 02:18:06,196 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.86 vs. limit=20.5075 +2024-08-03 02:18:11,161 INFO [train.py:1114] (2/4) Epoch 2, batch 1100, loss[loss=0.3487, simple_loss=0.3979, pruned_loss=0.1497, over 13584.00 frames. ], tot_loss[loss=0.3346, simple_loss=0.3838, pruned_loss=0.1427, over 2620461.45 frames. ], batch size: 36, lr: 4.06e-02, grad_scale: 16.0 +2024-08-03 02:18:17,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=17380.0, ans=0.0 +2024-08-03 02:18:19,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17416.666666666668, ans=0.12583333333333332 +2024-08-03 02:18:31,196 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.62 vs. limit=14.044999999999998 +2024-08-03 02:18:32,812 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.81 vs. limit=14.044999999999998 +2024-08-03 02:18:59,084 INFO [train.py:1114] (2/4) Epoch 2, batch 1150, loss[loss=0.3315, simple_loss=0.3815, pruned_loss=0.1407, over 13559.00 frames. ], tot_loss[loss=0.3347, simple_loss=0.3836, pruned_loss=0.1429, over 2619846.73 frames. ], batch size: 36, lr: 4.05e-02, grad_scale: 16.0 +2024-08-03 02:19:10,144 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.58 vs. limit=13.8 +2024-08-03 02:19:10,208 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.69 vs. limit=14.1 +2024-08-03 02:19:16,031 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.117e+02 1.513e+02 2.017e+02 2.624e+02 5.380e+02, threshold=4.034e+02, percent-clipped=4.0 +2024-08-03 02:19:44,889 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:19:46,055 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.37 vs. limit=20.755000000000003 +2024-08-03 02:19:49,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=17673.333333333332, ans=0.007027536231884058 +2024-08-03 02:20:03,154 INFO [train.py:1114] (2/4) Epoch 2, batch 1200, loss[loss=0.3554, simple_loss=0.4027, pruned_loss=0.154, over 13574.00 frames. ], tot_loss[loss=0.3346, simple_loss=0.3841, pruned_loss=0.1426, over 2617360.65 frames. ], batch size: 39, lr: 4.04e-02, grad_scale: 32.0 +2024-08-03 02:20:24,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=17783.333333333332, ans=0.0 +2024-08-03 02:20:28,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=17820.0, ans=0.2763 +2024-08-03 02:20:29,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=17820.0, ans=0.0069956521739130435 +2024-08-03 02:20:29,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=17820.0, ans=0.0069956521739130435 +2024-08-03 02:20:40,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=17856.666666666668, ans=0.0 +2024-08-03 02:20:55,622 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.55 vs. limit=14.21 +2024-08-03 02:20:56,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=17930.0, ans=0.0 +2024-08-03 02:20:56,920 INFO [train.py:1114] (2/4) Epoch 2, batch 1250, loss[loss=0.3147, simple_loss=0.3742, pruned_loss=0.1276, over 13448.00 frames. ], tot_loss[loss=0.3331, simple_loss=0.3833, pruned_loss=0.1414, over 2629171.09 frames. ], batch size: 42, lr: 4.04e-02, grad_scale: 32.0 +2024-08-03 02:21:08,067 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.72 vs. limit=13.965 +2024-08-03 02:21:18,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17966.666666666668, ans=0.12033333333333332 +2024-08-03 02:21:24,842 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.393e+02 1.566e+02 1.875e+02 3.241e+02, threshold=3.132e+02, percent-clipped=0.0 +2024-08-03 02:21:34,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=18003.333333333332, ans=0.125 +2024-08-03 02:21:41,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=18040.0, ans=0.26860000000000006 +2024-08-03 02:21:47,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=18076.666666666668, ans=0.125 +2024-08-03 02:21:50,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=18076.666666666668, ans=0.125 +2024-08-03 02:21:54,730 INFO [train.py:1114] (2/4) Epoch 2, batch 1300, loss[loss=0.3979, simple_loss=0.4341, pruned_loss=0.1809, over 12910.00 frames. ], tot_loss[loss=0.3333, simple_loss=0.383, pruned_loss=0.1418, over 2631714.71 frames. ], batch size: 52, lr: 4.03e-02, grad_scale: 32.0 +2024-08-03 02:22:06,108 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.37 vs. limit=21.085 +2024-08-03 02:22:07,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=18113.333333333332, ans=0.11886666666666668 +2024-08-03 02:22:08,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18150.0, ans=0.11850000000000002 +2024-08-03 02:22:23,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=18186.666666666668, ans=0.125 +2024-08-03 02:22:32,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=18223.333333333332, ans=0.26218333333333343 +2024-08-03 02:22:44,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18260.0, ans=0.1174 +2024-08-03 02:22:49,375 INFO [train.py:1114] (2/4) Epoch 2, batch 1350, loss[loss=0.3085, simple_loss=0.3678, pruned_loss=0.1246, over 13547.00 frames. ], tot_loss[loss=0.3332, simple_loss=0.3828, pruned_loss=0.1418, over 2639002.09 frames. ], batch size: 37, lr: 4.03e-02, grad_scale: 32.0 +2024-08-03 02:22:50,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=18296.666666666668, ans=0.0 +2024-08-03 02:22:52,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=18296.666666666668, ans=0.2596166666666667 +2024-08-03 02:22:55,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=18296.666666666668, ans=0.125 +2024-08-03 02:22:58,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=18333.333333333332, ans=0.0 +2024-08-03 02:23:08,635 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.141e+02 1.434e+02 1.711e+02 2.081e+02 4.051e+02, threshold=3.422e+02, percent-clipped=5.0 +2024-08-03 02:23:09,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=18370.0, ans=0.125 +2024-08-03 02:23:10,241 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.39 vs. limit=14.38875 +2024-08-03 02:23:27,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.whiten.whitening_limit, batch_count=18370.0, ans=11.347999999999999 +2024-08-03 02:23:34,410 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.97 vs. limit=14.4025 +2024-08-03 02:23:39,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=18443.333333333332, ans=0.0 +2024-08-03 02:23:42,013 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:23:44,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=18443.333333333332, ans=0.025 +2024-08-03 02:23:49,073 INFO [train.py:1114] (2/4) Epoch 2, batch 1400, loss[loss=0.2868, simple_loss=0.3394, pruned_loss=0.1171, over 13252.00 frames. ], tot_loss[loss=0.3315, simple_loss=0.3815, pruned_loss=0.1407, over 2642976.14 frames. ], batch size: 31, lr: 4.02e-02, grad_scale: 32.0 +2024-08-03 02:23:58,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=18516.666666666668, ans=0.006844202898550724 +2024-08-03 02:24:01,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=18516.666666666668, ans=0.2519166666666667 +2024-08-03 02:24:05,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=18516.666666666668, ans=0.0 +2024-08-03 02:24:32,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=18626.666666666668, ans=0.0 +2024-08-03 02:24:34,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=18626.666666666668, ans=0.0 +2024-08-03 02:24:38,143 INFO [train.py:1114] (2/4) Epoch 2, batch 1450, loss[loss=0.3054, simple_loss=0.3716, pruned_loss=0.1196, over 13420.00 frames. ], tot_loss[loss=0.3327, simple_loss=0.3822, pruned_loss=0.1416, over 2641831.52 frames. ], batch size: 43, lr: 4.01e-02, grad_scale: 16.0 +2024-08-03 02:24:55,532 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.417e+02 1.675e+02 1.959e+02 3.168e+02, threshold=3.351e+02, percent-clipped=0.0 +2024-08-03 02:25:13,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=18773.333333333332, ans=0.00678840579710145 +2024-08-03 02:25:13,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=18773.333333333332, ans=0.125 +2024-08-03 02:25:24,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=18810.0, ans=0.125 +2024-08-03 02:25:28,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=18810.0, ans=0.0 +2024-08-03 02:25:32,424 INFO [train.py:1114] (2/4) Epoch 2, batch 1500, loss[loss=0.3375, simple_loss=0.387, pruned_loss=0.144, over 13400.00 frames. ], tot_loss[loss=0.3315, simple_loss=0.382, pruned_loss=0.1405, over 2641371.75 frames. ], batch size: 39, lr: 4.01e-02, grad_scale: 16.0 +2024-08-03 02:25:33,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=18846.666666666668, ans=0.11153333333333332 +2024-08-03 02:25:40,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=18846.666666666668, ans=0.0 +2024-08-03 02:25:44,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=18883.333333333332, ans=0.125 +2024-08-03 02:25:54,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=18920.0, ans=0.05 +2024-08-03 02:25:56,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18920.0, ans=0.11080000000000001 +2024-08-03 02:25:57,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_na.min_abs, batch_count=18920.0, ans=0.02 +2024-08-03 02:26:01,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=18956.666666666668, ans=0.0 +2024-08-03 02:26:04,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=18956.666666666668, ans=0.2365166666666667 +2024-08-03 02:26:15,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=18993.333333333332, ans=0.025 +2024-08-03 02:26:19,658 INFO [train.py:1114] (2/4) Epoch 2, batch 1550, loss[loss=0.3322, simple_loss=0.3921, pruned_loss=0.1362, over 13410.00 frames. ], tot_loss[loss=0.3313, simple_loss=0.3817, pruned_loss=0.1404, over 2631145.42 frames. ], batch size: 41, lr: 4.00e-02, grad_scale: 16.0 +2024-08-03 02:26:20,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=19030.0, ans=0.0 +2024-08-03 02:26:38,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=19066.666666666668, ans=0.125 +2024-08-03 02:26:42,594 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.587e+02 1.878e+02 2.318e+02 8.334e+02, threshold=3.756e+02, percent-clipped=6.0 +2024-08-03 02:26:48,735 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.86 vs. limit=14.66375 +2024-08-03 02:27:01,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=19140.0, ans=0.1086 +2024-08-03 02:27:02,928 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=14.6775 +2024-08-03 02:27:08,494 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.89 vs. limit=21.8825 +2024-08-03 02:27:10,487 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.83 vs. limit=14.588333333333335 +2024-08-03 02:27:11,885 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:27:15,318 INFO [train.py:1114] (2/4) Epoch 2, batch 1600, loss[loss=0.3255, simple_loss=0.3812, pruned_loss=0.1349, over 13574.00 frames. ], tot_loss[loss=0.3309, simple_loss=0.3814, pruned_loss=0.1402, over 2624086.81 frames. ], batch size: 39, lr: 4.00e-02, grad_scale: 32.0 +2024-08-03 02:27:16,691 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.59 vs. limit=5.882 +2024-08-03 02:27:23,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=19250.0, ans=0.0 +2024-08-03 02:27:45,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19323.333333333332, ans=0.10676666666666668 +2024-08-03 02:27:50,444 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:27:58,890 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.57 vs. limit=14.76 +2024-08-03 02:28:03,915 INFO [train.py:1114] (2/4) Epoch 2, batch 1650, loss[loss=0.3297, simple_loss=0.3942, pruned_loss=0.1326, over 13307.00 frames. ], tot_loss[loss=0.3307, simple_loss=0.3811, pruned_loss=0.1402, over 2620446.32 frames. ], batch size: 40, lr: 3.99e-02, grad_scale: 16.0 +2024-08-03 02:28:13,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19396.666666666668, ans=0.10603333333333334 +2024-08-03 02:28:13,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=19396.666666666668, ans=0.125 +2024-08-03 02:28:31,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=19433.333333333332, ans=0.025 +2024-08-03 02:28:40,139 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.530e+02 1.782e+02 2.174e+02 3.857e+02, threshold=3.564e+02, percent-clipped=2.0 +2024-08-03 02:28:45,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=19470.0, ans=10.0 +2024-08-03 02:29:02,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=19543.333333333332, ans=0.0 +2024-08-03 02:29:04,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19543.333333333332, ans=0.1045666666666667 +2024-08-03 02:29:08,342 INFO [train.py:1114] (2/4) Epoch 2, batch 1700, loss[loss=0.2855, simple_loss=0.3343, pruned_loss=0.1184, over 13272.00 frames. ], tot_loss[loss=0.3288, simple_loss=0.3799, pruned_loss=0.1389, over 2629510.94 frames. ], batch size: 31, lr: 3.98e-02, grad_scale: 16.0 +2024-08-03 02:29:26,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=19616.666666666668, ans=0.2134166666666667 +2024-08-03 02:29:34,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=19653.333333333332, ans=0.006597101449275363 +2024-08-03 02:29:36,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=19653.333333333332, ans=0.09899494936611666 +2024-08-03 02:29:40,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=19690.0, ans=0.125 +2024-08-03 02:29:59,013 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.87 vs. limit=5.9645 +2024-08-03 02:29:59,362 INFO [train.py:1114] (2/4) Epoch 2, batch 1750, loss[loss=0.2803, simple_loss=0.3316, pruned_loss=0.1145, over 13547.00 frames. ], tot_loss[loss=0.3272, simple_loss=0.3786, pruned_loss=0.1379, over 2632352.89 frames. ], batch size: 31, lr: 3.98e-02, grad_scale: 16.0 +2024-08-03 02:30:00,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=19763.333333333332, ans=0.0 +2024-08-03 02:30:01,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=19763.333333333332, ans=0.125 +2024-08-03 02:30:18,375 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.108e+02 1.430e+02 1.665e+02 2.047e+02 3.989e+02, threshold=3.330e+02, percent-clipped=2.0 +2024-08-03 02:30:24,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=19836.666666666668, ans=0.006557246376811594 +2024-08-03 02:30:36,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=19910.0, ans=0.125 +2024-08-03 02:30:38,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=19910.0, ans=0.0 +2024-08-03 02:30:46,002 INFO [train.py:1114] (2/4) Epoch 2, batch 1800, loss[loss=0.3322, simple_loss=0.383, pruned_loss=0.1407, over 13557.00 frames. ], tot_loss[loss=0.3276, simple_loss=0.3788, pruned_loss=0.1383, over 2633683.32 frames. ], batch size: 38, lr: 3.97e-02, grad_scale: 16.0 +2024-08-03 02:31:04,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=19983.333333333332, ans=0.0065253623188405805 +2024-08-03 02:31:05,034 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=14.99375 +2024-08-03 02:31:06,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=19983.333333333332, ans=0.125 +2024-08-03 02:31:18,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=20056.666666666668, ans=0.125 +2024-08-03 02:31:25,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=20056.666666666668, ans=0.125 +2024-08-03 02:31:35,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20093.333333333332, ans=0.1 +2024-08-03 02:31:36,034 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.31 vs. limit=15.0 +2024-08-03 02:31:37,410 INFO [train.py:1114] (2/4) Epoch 2, batch 1850, loss[loss=0.302, simple_loss=0.36, pruned_loss=0.122, over 13402.00 frames. ], tot_loss[loss=0.3256, simple_loss=0.3775, pruned_loss=0.1369, over 2635615.63 frames. ], batch size: 39, lr: 3.96e-02, grad_scale: 16.0 +2024-08-03 02:31:48,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=20130.0, ans=0.07 +2024-08-03 02:32:01,332 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.03 vs. limit=15.0 +2024-08-03 02:32:01,362 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.47 vs. limit=22.5 +2024-08-03 02:32:04,606 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.463e+02 1.801e+02 2.661e+02 5.332e+02, threshold=3.601e+02, percent-clipped=10.0 +2024-08-03 02:32:04,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=20203.333333333332, ans=0.025 +2024-08-03 02:32:17,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=20240.0, ans=0.2 +2024-08-03 02:32:17,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=20240.0, ans=0.05 +2024-08-03 02:32:18,184 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.74 vs. limit=22.5 +2024-08-03 02:32:20,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=20240.0, ans=0.125 +2024-08-03 02:32:32,391 INFO [train.py:1114] (2/4) Epoch 2, batch 1900, loss[loss=0.3485, simple_loss=0.4092, pruned_loss=0.1439, over 13311.00 frames. ], tot_loss[loss=0.3265, simple_loss=0.3783, pruned_loss=0.1374, over 2638161.14 frames. ], batch size: 40, lr: 3.96e-02, grad_scale: 16.0 +2024-08-03 02:32:40,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=20350.0, ans=0.006445652173913043 +2024-08-03 02:32:45,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=20350.0, ans=0.0 +2024-08-03 02:32:51,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=20350.0, ans=0.125 +2024-08-03 02:33:16,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=20460.0, ans=0.2 +2024-08-03 02:33:27,790 INFO [train.py:1114] (2/4) Epoch 2, batch 1950, loss[loss=0.3378, simple_loss=0.3831, pruned_loss=0.1463, over 13560.00 frames. ], tot_loss[loss=0.3282, simple_loss=0.3803, pruned_loss=0.138, over 2644950.75 frames. ], batch size: 36, lr: 3.95e-02, grad_scale: 16.0 +2024-08-03 02:33:36,736 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.15 vs. limit=15.0 +2024-08-03 02:33:37,963 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.76 vs. limit=10.0 +2024-08-03 02:33:41,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=20533.333333333332, ans=0.125 +2024-08-03 02:33:46,559 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.390e+02 1.603e+02 1.917e+02 3.719e+02, threshold=3.206e+02, percent-clipped=1.0 +2024-08-03 02:33:50,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=20570.0, ans=0.1 +2024-08-03 02:33:50,766 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.66 vs. limit=15.0 +2024-08-03 02:33:55,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=20606.666666666668, ans=0.0 +2024-08-03 02:33:57,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=20606.666666666668, ans=0.125 +2024-08-03 02:34:08,171 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.31 vs. limit=6.0 +2024-08-03 02:34:17,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20643.333333333332, ans=0.1 +2024-08-03 02:34:17,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=20643.333333333332, ans=0.1 +2024-08-03 02:34:22,778 INFO [train.py:1114] (2/4) Epoch 2, batch 2000, loss[loss=0.288, simple_loss=0.3367, pruned_loss=0.1197, over 13549.00 frames. ], tot_loss[loss=0.3291, simple_loss=0.381, pruned_loss=0.1386, over 2634678.63 frames. ], batch size: 31, lr: 3.94e-02, grad_scale: 32.0 +2024-08-03 02:35:14,599 INFO [train.py:1114] (2/4) Epoch 2, batch 2050, loss[loss=0.3638, simple_loss=0.3843, pruned_loss=0.1717, over 13424.00 frames. ], tot_loss[loss=0.3271, simple_loss=0.3793, pruned_loss=0.1375, over 2631619.38 frames. ], batch size: 32, lr: 3.94e-02, grad_scale: 32.0 +2024-08-03 02:35:20,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=20863.333333333332, ans=0.125 +2024-08-03 02:35:34,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=20900.0, ans=0.0 +2024-08-03 02:35:36,030 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.423e+02 1.683e+02 2.101e+02 5.163e+02, threshold=3.365e+02, percent-clipped=3.0 +2024-08-03 02:35:54,060 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.38 vs. limit=15.0 +2024-08-03 02:35:54,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=20973.333333333332, ans=0.125 +2024-08-03 02:36:09,039 INFO [train.py:1114] (2/4) Epoch 2, batch 2100, loss[loss=0.301, simple_loss=0.3606, pruned_loss=0.1207, over 13536.00 frames. ], tot_loss[loss=0.3263, simple_loss=0.3784, pruned_loss=0.1371, over 2637765.67 frames. ], batch size: 37, lr: 3.93e-02, grad_scale: 32.0 +2024-08-03 02:36:15,074 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=2.86 vs. limit=15.0 +2024-08-03 02:36:23,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21083.333333333332, ans=0.1 +2024-08-03 02:36:26,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=21083.333333333332, ans=0.125 +2024-08-03 02:36:31,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=21120.0, ans=0.04949747468305833 +2024-08-03 02:36:33,233 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.58 vs. limit=12.0 +2024-08-03 02:36:56,259 INFO [train.py:1114] (2/4) Epoch 2, batch 2150, loss[loss=0.2815, simple_loss=0.3363, pruned_loss=0.1133, over 13553.00 frames. ], tot_loss[loss=0.3236, simple_loss=0.3762, pruned_loss=0.1355, over 2646609.65 frames. ], batch size: 36, lr: 3.93e-02, grad_scale: 16.0 +2024-08-03 02:37:01,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=21230.0, ans=0.09899494936611666 +2024-08-03 02:37:01,416 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.54 vs. limit=12.0 +2024-08-03 02:37:17,960 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.342e+02 1.575e+02 1.914e+02 2.983e+02, threshold=3.149e+02, percent-clipped=0.0 +2024-08-03 02:37:37,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=21376.666666666668, ans=0.2 +2024-08-03 02:37:48,056 INFO [train.py:1114] (2/4) Epoch 2, batch 2200, loss[loss=0.3186, simple_loss=0.3798, pruned_loss=0.1288, over 13381.00 frames. ], tot_loss[loss=0.3241, simple_loss=0.3765, pruned_loss=0.1358, over 2644670.45 frames. ], batch size: 39, lr: 3.92e-02, grad_scale: 16.0 +2024-08-03 02:38:11,613 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.56 vs. limit=22.5 +2024-08-03 02:39:23,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=21560.0, ans=0.025 +2024-08-03 02:39:28,807 INFO [train.py:1114] (2/4) Epoch 2, batch 2250, loss[loss=0.3315, simple_loss=0.3954, pruned_loss=0.1338, over 13371.00 frames. ], tot_loss[loss=0.324, simple_loss=0.3764, pruned_loss=0.1358, over 2642835.59 frames. ], batch size: 37, lr: 3.91e-02, grad_scale: 16.0 +2024-08-03 02:39:38,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=21596.666666666668, ans=0.1 +2024-08-03 02:39:42,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=21596.666666666668, ans=0.006174637681159421 +2024-08-03 02:39:56,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=21633.333333333332, ans=0.0061666666666666675 +2024-08-03 02:40:05,995 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.129e+02 1.392e+02 1.558e+02 1.925e+02 3.298e+02, threshold=3.115e+02, percent-clipped=1.0 +2024-08-03 02:40:06,656 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.74 vs. limit=22.5 +2024-08-03 02:40:08,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=21670.0, ans=0.006158695652173913 +2024-08-03 02:40:14,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=21706.666666666668, ans=0.125 +2024-08-03 02:40:16,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=21706.666666666668, ans=0.125 +2024-08-03 02:40:22,860 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.77 vs. limit=22.5 +2024-08-03 02:40:34,539 INFO [train.py:1114] (2/4) Epoch 2, batch 2300, loss[loss=0.2868, simple_loss=0.3408, pruned_loss=0.1164, over 13565.00 frames. ], tot_loss[loss=0.3224, simple_loss=0.3747, pruned_loss=0.1351, over 2638916.92 frames. ], batch size: 33, lr: 3.91e-02, grad_scale: 8.0 +2024-08-03 02:40:39,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=21780.0, ans=0.07 +2024-08-03 02:40:49,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=21816.666666666668, ans=0.0 +2024-08-03 02:40:50,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=21816.666666666668, ans=0.125 +2024-08-03 02:40:51,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=21816.666666666668, ans=0.0 +2024-08-03 02:40:59,704 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.60 vs. limit=15.0 +2024-08-03 02:41:00,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21853.333333333332, ans=0.1 +2024-08-03 02:41:08,006 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.12 vs. limit=15.0 +2024-08-03 02:41:12,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=21890.0, ans=0.07 +2024-08-03 02:41:17,872 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-08-03 02:41:20,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=21926.666666666668, ans=0.125 +2024-08-03 02:41:23,901 INFO [train.py:1114] (2/4) Epoch 2, batch 2350, loss[loss=0.3177, simple_loss=0.3758, pruned_loss=0.1298, over 13551.00 frames. ], tot_loss[loss=0.3228, simple_loss=0.3752, pruned_loss=0.1352, over 2641352.18 frames. ], batch size: 38, lr: 3.90e-02, grad_scale: 8.0 +2024-08-03 02:41:24,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=21963.333333333332, ans=0.006094927536231884 +2024-08-03 02:41:37,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=22000.0, ans=0.2 +2024-08-03 02:41:42,235 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.07 vs. limit=22.5 +2024-08-03 02:41:44,437 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.437e+02 1.623e+02 1.973e+02 3.440e+02, threshold=3.245e+02, percent-clipped=2.0 +2024-08-03 02:41:53,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=22036.666666666668, ans=0.0 +2024-08-03 02:42:13,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=22110.0, ans=0.0 +2024-08-03 02:42:15,888 INFO [train.py:1114] (2/4) Epoch 2, batch 2400, loss[loss=0.3104, simple_loss=0.3682, pruned_loss=0.1263, over 13525.00 frames. ], tot_loss[loss=0.322, simple_loss=0.3748, pruned_loss=0.1346, over 2642303.11 frames. ], batch size: 35, lr: 3.89e-02, grad_scale: 16.0 +2024-08-03 02:42:19,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=22146.666666666668, ans=0.0 +2024-08-03 02:42:59,516 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:43:01,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=22293.333333333332, ans=0.05 +2024-08-03 02:43:04,901 INFO [train.py:1114] (2/4) Epoch 2, batch 2450, loss[loss=0.3394, simple_loss=0.391, pruned_loss=0.1439, over 13351.00 frames. ], tot_loss[loss=0.3223, simple_loss=0.3753, pruned_loss=0.1347, over 2633071.92 frames. ], batch size: 37, lr: 3.89e-02, grad_scale: 16.0 +2024-08-03 02:43:11,155 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.41 vs. limit=15.0 +2024-08-03 02:43:12,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22330.0, ans=0.1 +2024-08-03 02:43:27,796 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.465e+02 1.678e+02 2.067e+02 5.260e+02, threshold=3.356e+02, percent-clipped=2.0 +2024-08-03 02:43:58,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=22440.0, ans=0.5 +2024-08-03 02:44:14,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=22440.0, ans=0.1 +2024-08-03 02:45:11,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=22476.666666666668, ans=0.125 +2024-08-03 02:45:37,293 INFO [train.py:1114] (2/4) Epoch 2, batch 2500, loss[loss=0.3497, simple_loss=0.3986, pruned_loss=0.1504, over 13396.00 frames. ], tot_loss[loss=0.3214, simple_loss=0.375, pruned_loss=0.1339, over 2637008.65 frames. ], batch size: 39, lr: 3.88e-02, grad_scale: 8.0 +2024-08-03 02:46:00,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=22513.333333333332, ans=0.0 +2024-08-03 02:46:28,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=22550.0, ans=0.1 +2024-08-03 02:46:50,017 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.43 vs. limit=22.5 +2024-08-03 02:47:28,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22586.666666666668, ans=0.1 +2024-08-03 02:47:53,382 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.68 vs. limit=22.5 +2024-08-03 02:48:03,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=22586.666666666668, ans=0.2 +2024-08-03 02:48:26,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=22623.333333333332, ans=0.2 +2024-08-03 02:48:27,747 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.24 vs. limit=15.0 +2024-08-03 02:48:55,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_na.min_abs, batch_count=22623.333333333332, ans=0.02 +2024-08-03 02:52:09,584 INFO [train.py:1114] (2/4) Epoch 2, batch 2550, loss[loss=0.3152, simple_loss=0.3579, pruned_loss=0.1362, over 13554.00 frames. ], tot_loss[loss=0.321, simple_loss=0.3746, pruned_loss=0.1337, over 2638025.60 frames. ], batch size: 31, lr: 3.88e-02, grad_scale: 8.0 +2024-08-03 02:52:18,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=22696.666666666668, ans=0.09899494936611666 +2024-08-03 02:52:18,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=22696.666666666668, ans=0.125 +2024-08-03 02:52:36,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=22696.666666666668, ans=10.0 +2024-08-03 02:53:16,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=22733.333333333332, ans=0.2 +2024-08-03 02:53:31,469 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.373e+02 1.661e+02 2.107e+02 4.285e+02, threshold=3.322e+02, percent-clipped=3.0 +2024-08-03 02:54:31,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=22806.666666666668, ans=0.0 +2024-08-03 02:54:33,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=22843.333333333332, ans=0.2 +2024-08-03 02:54:41,633 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.59 vs. limit=15.0 +2024-08-03 02:54:52,163 INFO [train.py:1114] (2/4) Epoch 2, batch 2600, loss[loss=0.3211, simple_loss=0.3689, pruned_loss=0.1366, over 13561.00 frames. ], tot_loss[loss=0.3219, simple_loss=0.3751, pruned_loss=0.1343, over 2637399.92 frames. ], batch size: 36, lr: 3.87e-02, grad_scale: 8.0 +2024-08-03 02:54:53,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=22880.0, ans=0.125 +2024-08-03 02:55:35,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=22953.333333333332, ans=0.0 +2024-08-03 02:55:38,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=22953.333333333332, ans=0.005879710144927537 +2024-08-03 02:55:42,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=22990.0, ans=0.125 +2024-08-03 02:55:51,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=23026.666666666668, ans=0.025 +2024-08-03 02:55:52,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=23026.666666666668, ans=0.0 +2024-08-03 02:55:53,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=23026.666666666668, ans=0.125 +2024-08-03 02:55:55,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=23026.666666666668, ans=0.0 +2024-08-03 02:56:03,855 INFO [train.py:1114] (2/4) Epoch 2, batch 2650, loss[loss=0.3383, simple_loss=0.386, pruned_loss=0.1453, over 13330.00 frames. ], tot_loss[loss=0.3222, simple_loss=0.3756, pruned_loss=0.1344, over 2640530.48 frames. ], batch size: 46, lr: 3.86e-02, grad_scale: 8.0 +2024-08-03 02:56:27,905 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.143e+02 1.447e+02 1.734e+02 2.047e+02 3.463e+02, threshold=3.469e+02, percent-clipped=1.0 +2024-08-03 02:57:00,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=23173.333333333332, ans=0.005831884057971015 +2024-08-03 02:57:02,472 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=7.55 vs. limit=6.0 +2024-08-03 02:58:07,702 INFO [train.py:1114] (2/4) Epoch 2, batch 2700, loss[loss=0.3716, simple_loss=0.4061, pruned_loss=0.1686, over 13561.00 frames. ], tot_loss[loss=0.3221, simple_loss=0.3755, pruned_loss=0.1343, over 2638051.90 frames. ], batch size: 40, lr: 3.86e-02, grad_scale: 8.0 +2024-08-03 02:58:26,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=23283.333333333332, ans=0.04949747468305833 +2024-08-03 02:59:39,350 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.20 vs. limit=15.0 +2024-08-03 02:59:52,231 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.76 vs. limit=22.5 +2024-08-03 03:00:30,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23393.333333333332, ans=0.1 +2024-08-03 03:00:36,561 INFO [train.py:1114] (2/4) Epoch 2, batch 2750, loss[loss=0.2854, simple_loss=0.3427, pruned_loss=0.1141, over 13335.00 frames. ], tot_loss[loss=0.3203, simple_loss=0.3737, pruned_loss=0.1335, over 2635544.91 frames. ], batch size: 34, lr: 3.85e-02, grad_scale: 8.0 +2024-08-03 03:00:47,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=23430.0, ans=0.025 +2024-08-03 03:00:47,448 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:00:50,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=23430.0, ans=0.0 +2024-08-03 03:01:06,447 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.143e+02 1.416e+02 1.639e+02 1.960e+02 3.073e+02, threshold=3.277e+02, percent-clipped=0.0 +2024-08-03 03:01:06,906 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.91 vs. limit=15.0 +2024-08-03 03:01:09,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=23503.333333333332, ans=0.125 +2024-08-03 03:01:13,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=23540.0, ans=0.1 +2024-08-03 03:01:15,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=23540.0, ans=0.125 +2024-08-03 03:01:21,330 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.25 vs. limit=22.5 +2024-08-03 03:01:27,919 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.45 vs. limit=15.0 +2024-08-03 03:01:32,829 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.72 vs. limit=10.0 +2024-08-03 03:01:38,818 INFO [train.py:1114] (2/4) Epoch 2, batch 2800, loss[loss=0.4021, simple_loss=0.4234, pruned_loss=0.1904, over 9421.00 frames. ], tot_loss[loss=0.3212, simple_loss=0.3743, pruned_loss=0.1341, over 2628152.04 frames. ], batch size: 96, lr: 3.84e-02, grad_scale: 16.0 +2024-08-03 03:02:00,584 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.66 vs. limit=15.0 +2024-08-03 03:02:31,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=23723.333333333332, ans=0.125 +2024-08-03 03:02:44,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=23760.0, ans=0.0 +2024-08-03 03:02:47,281 INFO [train.py:1114] (2/4) Epoch 2, batch 2850, loss[loss=0.3206, simple_loss=0.376, pruned_loss=0.1326, over 13355.00 frames. ], tot_loss[loss=0.3229, simple_loss=0.3755, pruned_loss=0.1351, over 2621651.17 frames. ], batch size: 35, lr: 3.84e-02, grad_scale: 16.0 +2024-08-03 03:02:53,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=23796.666666666668, ans=0.125 +2024-08-03 03:03:03,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=23833.333333333332, ans=0.125 +2024-08-03 03:03:09,962 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.500e+02 1.764e+02 2.105e+02 5.677e+02, threshold=3.527e+02, percent-clipped=3.0 +2024-08-03 03:04:04,004 INFO [train.py:1114] (2/4) Epoch 2, batch 2900, loss[loss=0.3163, simple_loss=0.365, pruned_loss=0.1338, over 13362.00 frames. ], tot_loss[loss=0.3229, simple_loss=0.3763, pruned_loss=0.1348, over 2632539.20 frames. ], batch size: 36, lr: 3.83e-02, grad_scale: 16.0 +2024-08-03 03:04:11,230 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.93 vs. limit=15.0 +2024-08-03 03:04:28,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=24016.666666666668, ans=0.125 +2024-08-03 03:04:28,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=24016.666666666668, ans=0.0 +2024-08-03 03:04:38,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=24053.333333333332, ans=0.0 +2024-08-03 03:04:50,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=24090.0, ans=0.5 +2024-08-03 03:04:54,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=24090.0, ans=0.04949747468305833 +2024-08-03 03:04:58,842 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=2.93 vs. limit=15.0 +2024-08-03 03:05:08,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=24163.333333333332, ans=0.005616666666666667 +2024-08-03 03:05:09,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=24163.333333333332, ans=0.125 +2024-08-03 03:05:09,681 INFO [train.py:1114] (2/4) Epoch 2, batch 2950, loss[loss=0.321, simple_loss=0.3722, pruned_loss=0.135, over 13325.00 frames. ], tot_loss[loss=0.3207, simple_loss=0.3739, pruned_loss=0.1337, over 2629626.59 frames. ], batch size: 34, lr: 3.82e-02, grad_scale: 8.0 +2024-08-03 03:05:16,381 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.27 vs. limit=22.5 +2024-08-03 03:05:29,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=24200.0, ans=0.125 +2024-08-03 03:05:34,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=24236.666666666668, ans=0.125 +2024-08-03 03:05:38,159 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.078e+02 1.432e+02 1.719e+02 2.227e+02 3.350e+02, threshold=3.438e+02, percent-clipped=0.0 +2024-08-03 03:05:45,473 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.38 vs. limit=22.5 +2024-08-03 03:05:55,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=24273.333333333332, ans=0.125 +2024-08-03 03:06:07,499 INFO [train.py:1114] (2/4) Epoch 2, batch 3000, loss[loss=0.3066, simple_loss=0.3742, pruned_loss=0.1195, over 13538.00 frames. ], tot_loss[loss=0.3194, simple_loss=0.3733, pruned_loss=0.1327, over 2630011.56 frames. ], batch size: 37, lr: 3.82e-02, grad_scale: 8.0 +2024-08-03 03:06:07,500 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 03:06:22,312 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([1.6414, 2.8268, 2.8189, 2.4629], device='cuda:2') +2024-08-03 03:06:27,520 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([0.8695, 2.2287, 1.2346, 3.4954, 3.2823, 1.6934, 2.4224, 3.2383], + device='cuda:2') +2024-08-03 03:06:29,639 INFO [train.py:1146] (2/4) Epoch 2, validation: loss=0.2511, simple_loss=0.3433, pruned_loss=0.07947, over 944034.00 frames. +2024-08-03 03:06:29,639 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 03:06:35,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=24346.666666666668, ans=0.95 +2024-08-03 03:06:49,154 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.27 vs. limit=15.0 +2024-08-03 03:06:50,882 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.85 vs. limit=15.0 +2024-08-03 03:06:56,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=24456.666666666668, ans=0.125 +2024-08-03 03:07:00,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.89 vs. limit=15.0 +2024-08-03 03:07:00,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=24456.666666666668, ans=0.125 +2024-08-03 03:07:04,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=24493.333333333332, ans=0.125 +2024-08-03 03:07:08,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24493.333333333332, ans=0.1 +2024-08-03 03:07:08,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=24493.333333333332, ans=0.125 +2024-08-03 03:07:13,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=24493.333333333332, ans=0.005544927536231884 +2024-08-03 03:07:15,182 INFO [train.py:1114] (2/4) Epoch 2, batch 3050, loss[loss=0.3011, simple_loss=0.3525, pruned_loss=0.1248, over 13533.00 frames. ], tot_loss[loss=0.3211, simple_loss=0.3748, pruned_loss=0.1338, over 2627152.38 frames. ], batch size: 35, lr: 3.81e-02, grad_scale: 8.0 +2024-08-03 03:07:25,495 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.87 vs. limit=15.0 +2024-08-03 03:07:26,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=24566.666666666668, ans=0.125 +2024-08-03 03:07:37,908 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.153e+02 1.349e+02 1.521e+02 1.830e+02 3.051e+02, threshold=3.043e+02, percent-clipped=0.0 +2024-08-03 03:08:04,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=24713.333333333332, ans=0.2 +2024-08-03 03:08:04,766 INFO [train.py:1114] (2/4) Epoch 2, batch 3100, loss[loss=0.3717, simple_loss=0.4172, pruned_loss=0.1631, over 13365.00 frames. ], tot_loss[loss=0.3199, simple_loss=0.3738, pruned_loss=0.1329, over 2627599.94 frames. ], batch size: 46, lr: 3.81e-02, grad_scale: 8.0 +2024-08-03 03:08:26,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=24750.0, ans=0.0 +2024-08-03 03:08:49,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=24823.333333333332, ans=0.005473188405797102 +2024-08-03 03:08:51,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=24823.333333333332, ans=0.125 +2024-08-03 03:08:53,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=24823.333333333332, ans=0.2 +2024-08-03 03:09:07,696 INFO [train.py:1114] (2/4) Epoch 2, batch 3150, loss[loss=0.3657, simple_loss=0.4094, pruned_loss=0.1611, over 12993.00 frames. ], tot_loss[loss=0.3205, simple_loss=0.3743, pruned_loss=0.1333, over 2629405.35 frames. ], batch size: 48, lr: 3.80e-02, grad_scale: 8.0 +2024-08-03 03:09:10,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=24896.666666666668, ans=0.125 +2024-08-03 03:09:11,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.07 vs. limit=15.0 +2024-08-03 03:09:17,532 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.85 vs. limit=15.0 +2024-08-03 03:09:17,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=24933.333333333332, ans=0.025 +2024-08-03 03:09:29,255 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.526e+02 1.877e+02 2.299e+02 4.480e+02, threshold=3.753e+02, percent-clipped=6.0 +2024-08-03 03:09:38,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=25006.666666666668, ans=0.125 +2024-08-03 03:09:46,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=25043.333333333332, ans=0.00542536231884058 +2024-08-03 03:09:53,515 INFO [train.py:1114] (2/4) Epoch 2, batch 3200, loss[loss=0.3315, simple_loss=0.3798, pruned_loss=0.1416, over 13543.00 frames. ], tot_loss[loss=0.3198, simple_loss=0.3736, pruned_loss=0.133, over 2635001.15 frames. ], batch size: 37, lr: 3.79e-02, grad_scale: 16.0 +2024-08-03 03:10:01,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25080.0, ans=0.1 +2024-08-03 03:10:01,470 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=18.54 vs. limit=15.0 +2024-08-03 03:10:03,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=25080.0, ans=0.2 +2024-08-03 03:10:08,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=25116.666666666668, ans=0.125 +2024-08-03 03:10:56,394 INFO [train.py:1114] (2/4) Epoch 2, batch 3250, loss[loss=0.3413, simple_loss=0.3947, pruned_loss=0.144, over 13396.00 frames. ], tot_loss[loss=0.3186, simple_loss=0.3732, pruned_loss=0.132, over 2638828.10 frames. ], batch size: 38, lr: 3.79e-02, grad_scale: 16.0 +2024-08-03 03:11:13,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=25263.333333333332, ans=0.125 +2024-08-03 03:11:34,450 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.086e+02 1.442e+02 1.550e+02 1.764e+02 2.865e+02, threshold=3.101e+02, percent-clipped=0.0 +2024-08-03 03:11:57,684 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.07 vs. limit=12.0 +2024-08-03 03:12:04,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=25410.0, ans=0.005345652173913044 +2024-08-03 03:12:05,866 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.45 vs. limit=15.0 +2024-08-03 03:12:09,643 INFO [train.py:1114] (2/4) Epoch 2, batch 3300, loss[loss=0.3665, simple_loss=0.4118, pruned_loss=0.1606, over 12908.00 frames. ], tot_loss[loss=0.3175, simple_loss=0.3722, pruned_loss=0.1314, over 2640703.85 frames. ], batch size: 52, lr: 3.78e-02, grad_scale: 16.0 +2024-08-03 03:12:23,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=25483.333333333332, ans=0.005329710144927537 +2024-08-03 03:12:25,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=25483.333333333332, ans=0.2 +2024-08-03 03:12:33,298 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.50 vs. limit=15.0 +2024-08-03 03:12:35,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=25556.666666666668, ans=0.125 +2024-08-03 03:12:44,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=25593.333333333332, ans=0.005305797101449275 +2024-08-03 03:12:49,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=25593.333333333332, ans=0.0 +2024-08-03 03:12:53,096 INFO [train.py:1114] (2/4) Epoch 2, batch 3350, loss[loss=0.3195, simple_loss=0.3834, pruned_loss=0.1279, over 13062.00 frames. ], tot_loss[loss=0.3187, simple_loss=0.3731, pruned_loss=0.1321, over 2630105.57 frames. ], batch size: 48, lr: 3.77e-02, grad_scale: 16.0 +2024-08-03 03:13:14,122 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.415e+02 1.657e+02 2.011e+02 3.247e+02, threshold=3.315e+02, percent-clipped=1.0 +2024-08-03 03:13:21,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=25740.0, ans=0.125 +2024-08-03 03:13:23,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=25740.0, ans=0.125 +2024-08-03 03:13:29,984 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.45 vs. limit=22.5 +2024-08-03 03:13:39,080 INFO [train.py:1114] (2/4) Epoch 2, batch 3400, loss[loss=0.241, simple_loss=0.2994, pruned_loss=0.09131, over 13515.00 frames. ], tot_loss[loss=0.3186, simple_loss=0.3728, pruned_loss=0.1322, over 2626200.92 frames. ], batch size: 31, lr: 3.77e-02, grad_scale: 16.0 +2024-08-03 03:13:48,028 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.209e-02 +2024-08-03 03:13:49,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=25850.0, ans=0.0 +2024-08-03 03:13:52,525 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.88 vs. limit=15.0 +2024-08-03 03:13:58,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=25886.666666666668, ans=0.125 +2024-08-03 03:14:07,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.whiten.whitening_limit, batch_count=25923.333333333332, ans=12.0 +2024-08-03 03:14:13,793 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.53 vs. limit=12.0 +2024-08-03 03:14:17,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=25960.0, ans=0.0 +2024-08-03 03:14:23,925 INFO [train.py:1114] (2/4) Epoch 2, batch 3450, loss[loss=0.329, simple_loss=0.3861, pruned_loss=0.1359, over 12914.00 frames. ], tot_loss[loss=0.3196, simple_loss=0.3738, pruned_loss=0.1327, over 2629746.28 frames. ], batch size: 52, lr: 3.76e-02, grad_scale: 16.0 +2024-08-03 03:14:35,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=26033.333333333332, ans=0.125 +2024-08-03 03:14:44,188 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.439e+02 1.568e+02 1.776e+02 4.751e+02, threshold=3.136e+02, percent-clipped=2.0 +2024-08-03 03:14:51,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=26106.666666666668, ans=0.125 +2024-08-03 03:15:02,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=26143.333333333332, ans=0.125 +2024-08-03 03:15:06,465 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.67 vs. limit=15.0 +2024-08-03 03:15:06,940 INFO [train.py:1114] (2/4) Epoch 2, batch 3500, loss[loss=0.2645, simple_loss=0.3225, pruned_loss=0.1032, over 13512.00 frames. ], tot_loss[loss=0.3186, simple_loss=0.3728, pruned_loss=0.1322, over 2630872.20 frames. ], batch size: 34, lr: 3.76e-02, grad_scale: 16.0 +2024-08-03 03:15:15,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=26216.666666666668, ans=0.125 +2024-08-03 03:15:36,467 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.19 vs. limit=22.5 +2024-08-03 03:15:39,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=26290.0, ans=0.1 +2024-08-03 03:15:39,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=26290.0, ans=0.125 +2024-08-03 03:15:42,689 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.71 vs. limit=15.0 +2024-08-03 03:15:44,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=26326.666666666668, ans=0.125 +2024-08-03 03:15:53,623 INFO [train.py:1114] (2/4) Epoch 2, batch 3550, loss[loss=0.3486, simple_loss=0.3962, pruned_loss=0.1505, over 12561.00 frames. ], tot_loss[loss=0.3216, simple_loss=0.3755, pruned_loss=0.1339, over 2629580.34 frames. ], batch size: 58, lr: 3.75e-02, grad_scale: 16.0 +2024-08-03 03:15:53,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=26363.333333333332, ans=0.0051384057971014495 +2024-08-03 03:15:57,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=26363.333333333332, ans=0.035 +2024-08-03 03:15:59,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=26363.333333333332, ans=0.0051384057971014495 +2024-08-03 03:16:15,275 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.421e+02 1.616e+02 2.006e+02 3.426e+02, threshold=3.231e+02, percent-clipped=2.0 +2024-08-03 03:16:20,711 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.18 vs. limit=15.0 +2024-08-03 03:16:39,552 INFO [train.py:1114] (2/4) Epoch 2, batch 3600, loss[loss=0.3792, simple_loss=0.4088, pruned_loss=0.1749, over 9068.00 frames. ], tot_loss[loss=0.3329, simple_loss=0.3826, pruned_loss=0.1416, over 2489833.02 frames. ], batch size: 97, lr: 3.74e-02, grad_scale: 32.0 +2024-08-03 03:17:00,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=26620.0, ans=0.125 +2024-08-03 03:17:09,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=26656.666666666668, ans=0.0 +2024-08-03 03:17:54,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=26693.333333333332, ans=0.07 +2024-08-03 03:18:02,797 INFO [train.py:1114] (2/4) Epoch 3, batch 0, loss[loss=0.2784, simple_loss=0.3368, pruned_loss=0.11, over 13354.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.3368, pruned_loss=0.11, over 13354.00 frames. ], batch size: 33, lr: 3.55e-02, grad_scale: 32.0 +2024-08-03 03:18:02,797 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 03:18:12,652 INFO [train.py:1146] (2/4) Epoch 3, validation: loss=0.2631, simple_loss=0.3546, pruned_loss=0.08577, over 944034.00 frames. +2024-08-03 03:18:12,653 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 03:18:19,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=26693.333333333332, ans=0.125 +2024-08-03 03:18:21,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=26730.0, ans=0.125 +2024-08-03 03:18:27,727 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.80 vs. limit=10.0 +2024-08-03 03:18:28,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=26730.0, ans=0.0 +2024-08-03 03:18:29,441 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.72 vs. limit=10.0 +2024-08-03 03:18:34,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=26766.666666666668, ans=0.2 +2024-08-03 03:18:38,723 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.31 vs. limit=15.0 +2024-08-03 03:18:39,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=26766.666666666668, ans=0.025 +2024-08-03 03:18:44,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26803.333333333332, ans=0.1 +2024-08-03 03:18:45,700 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.401e+02 1.625e+02 1.929e+02 3.724e+02, threshold=3.249e+02, percent-clipped=3.0 +2024-08-03 03:18:49,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=26840.0, ans=0.2 +2024-08-03 03:18:55,165 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.92 vs. limit=12.0 +2024-08-03 03:18:56,190 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.94 vs. limit=15.0 +2024-08-03 03:18:59,362 INFO [train.py:1114] (2/4) Epoch 3, batch 50, loss[loss=0.293, simple_loss=0.3494, pruned_loss=0.1183, over 13437.00 frames. ], tot_loss[loss=0.326, simple_loss=0.3789, pruned_loss=0.1365, over 578446.13 frames. ], batch size: 32, lr: 3.55e-02, grad_scale: 32.0 +2024-08-03 03:19:30,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=26986.666666666668, ans=0.025 +2024-08-03 03:19:37,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=26986.666666666668, ans=0.2 +2024-08-03 03:19:48,799 INFO [train.py:1114] (2/4) Epoch 3, batch 100, loss[loss=0.2827, simple_loss=0.3369, pruned_loss=0.1143, over 13526.00 frames. ], tot_loss[loss=0.3233, simple_loss=0.3773, pruned_loss=0.1347, over 1025701.43 frames. ], batch size: 35, lr: 3.54e-02, grad_scale: 32.0 +2024-08-03 03:20:02,279 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.07 vs. limit=15.0 +2024-08-03 03:20:08,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=27133.333333333332, ans=0.125 +2024-08-03 03:20:10,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=27133.333333333332, ans=0.125 +2024-08-03 03:20:21,033 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.439e+02 1.724e+02 2.172e+02 3.862e+02, threshold=3.447e+02, percent-clipped=4.0 +2024-08-03 03:20:54,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=27170.0, ans=0.0 +2024-08-03 03:21:05,498 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.59 vs. limit=15.0 +2024-08-03 03:21:06,933 INFO [train.py:1114] (2/4) Epoch 3, batch 150, loss[loss=0.2623, simple_loss=0.3176, pruned_loss=0.1035, over 13431.00 frames. ], tot_loss[loss=0.3171, simple_loss=0.3724, pruned_loss=0.1309, over 1386476.77 frames. ], batch size: 32, lr: 3.53e-02, grad_scale: 32.0 +2024-08-03 03:21:13,652 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.51 vs. limit=10.0 +2024-08-03 03:21:13,709 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.76 vs. limit=15.0 +2024-08-03 03:21:30,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=27316.666666666668, ans=0.125 +2024-08-03 03:22:07,976 INFO [train.py:1114] (2/4) Epoch 3, batch 200, loss[loss=0.3688, simple_loss=0.4174, pruned_loss=0.1601, over 12482.00 frames. ], tot_loss[loss=0.315, simple_loss=0.3704, pruned_loss=0.1298, over 1665279.32 frames. ], batch size: 58, lr: 3.53e-02, grad_scale: 16.0 +2024-08-03 03:22:18,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=27463.333333333332, ans=0.125 +2024-08-03 03:22:33,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=27500.0, ans=0.125 +2024-08-03 03:22:41,092 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.338e+02 1.522e+02 1.755e+02 2.817e+02, threshold=3.045e+02, percent-clipped=0.0 +2024-08-03 03:22:41,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=27536.666666666668, ans=0.125 +2024-08-03 03:22:41,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=27536.666666666668, ans=0.05 +2024-08-03 03:22:53,983 INFO [train.py:1114] (2/4) Epoch 3, batch 250, loss[loss=0.3348, simple_loss=0.3878, pruned_loss=0.1409, over 13301.00 frames. ], tot_loss[loss=0.3132, simple_loss=0.369, pruned_loss=0.1287, over 1884062.82 frames. ], batch size: 46, lr: 3.52e-02, grad_scale: 16.0 +2024-08-03 03:22:55,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=27610.0, ans=0.125 +2024-08-03 03:23:10,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=27646.666666666668, ans=0.125 +2024-08-03 03:23:11,973 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.55 vs. limit=15.0 +2024-08-03 03:23:16,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=27683.333333333332, ans=0.125 +2024-08-03 03:23:27,656 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:23:35,064 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.65 vs. limit=22.5 +2024-08-03 03:23:36,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.27 vs. limit=15.0 +2024-08-03 03:23:48,977 INFO [train.py:1114] (2/4) Epoch 3, batch 300, loss[loss=0.3136, simple_loss=0.374, pruned_loss=0.1266, over 13416.00 frames. ], tot_loss[loss=0.3122, simple_loss=0.3681, pruned_loss=0.1282, over 2051199.33 frames. ], batch size: 42, lr: 3.52e-02, grad_scale: 16.0 +2024-08-03 03:23:49,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27793.333333333332, ans=0.1 +2024-08-03 03:23:52,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=27793.333333333332, ans=0.0 +2024-08-03 03:24:12,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=27866.666666666668, ans=0.125 +2024-08-03 03:24:24,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=27903.333333333332, ans=0.004803623188405797 +2024-08-03 03:24:24,866 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.466e+02 1.718e+02 2.215e+02 5.480e+02, threshold=3.437e+02, percent-clipped=5.0 +2024-08-03 03:24:38,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=27976.666666666668, ans=0.125 +2024-08-03 03:24:38,885 INFO [train.py:1114] (2/4) Epoch 3, batch 350, loss[loss=0.2977, simple_loss=0.3459, pruned_loss=0.1247, over 13595.00 frames. ], tot_loss[loss=0.3124, simple_loss=0.3683, pruned_loss=0.1283, over 2182445.52 frames. ], batch size: 33, lr: 3.51e-02, grad_scale: 16.0 +2024-08-03 03:24:43,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=27976.666666666668, ans=0.125 +2024-08-03 03:24:48,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=28013.333333333332, ans=0.125 +2024-08-03 03:25:10,003 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.79 vs. limit=15.0 +2024-08-03 03:25:14,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=28086.666666666668, ans=0.125 +2024-08-03 03:25:17,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=28123.333333333332, ans=0.5 +2024-08-03 03:25:22,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28123.333333333332, ans=0.1 +2024-08-03 03:25:25,240 INFO [train.py:1114] (2/4) Epoch 3, batch 400, loss[loss=0.3311, simple_loss=0.3824, pruned_loss=0.14, over 13369.00 frames. ], tot_loss[loss=0.3114, simple_loss=0.3674, pruned_loss=0.1277, over 2286428.53 frames. ], batch size: 37, lr: 3.50e-02, grad_scale: 32.0 +2024-08-03 03:25:40,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=28196.666666666668, ans=0.0 +2024-08-03 03:25:45,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=28196.666666666668, ans=0.004739855072463768 +2024-08-03 03:25:45,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28196.666666666668, ans=0.1 +2024-08-03 03:25:49,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=28196.666666666668, ans=0.1 +2024-08-03 03:25:59,123 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.42 vs. limit=15.0 +2024-08-03 03:26:03,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=28270.0, ans=0.125 +2024-08-03 03:26:05,597 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.402e+02 1.627e+02 1.926e+02 3.907e+02, threshold=3.254e+02, percent-clipped=1.0 +2024-08-03 03:26:15,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=28306.666666666668, ans=0.125 +2024-08-03 03:26:17,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=28306.666666666668, ans=0.004715942028985507 +2024-08-03 03:26:18,617 INFO [train.py:1114] (2/4) Epoch 3, batch 450, loss[loss=0.3235, simple_loss=0.3817, pruned_loss=0.1326, over 13558.00 frames. ], tot_loss[loss=0.3114, simple_loss=0.3674, pruned_loss=0.1277, over 2360454.05 frames. ], batch size: 38, lr: 3.50e-02, grad_scale: 32.0 +2024-08-03 03:26:22,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=28343.333333333332, ans=0.1 +2024-08-03 03:26:38,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=28380.0, ans=0.125 +2024-08-03 03:26:49,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=28416.666666666668, ans=0.125 +2024-08-03 03:26:56,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=28453.333333333332, ans=0.2 +2024-08-03 03:27:08,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=28490.0, ans=0.0 +2024-08-03 03:27:09,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=28490.0, ans=0.125 +2024-08-03 03:27:11,498 INFO [train.py:1114] (2/4) Epoch 3, batch 500, loss[loss=0.3253, simple_loss=0.3842, pruned_loss=0.1332, over 13425.00 frames. ], tot_loss[loss=0.3087, simple_loss=0.3652, pruned_loss=0.1261, over 2426256.82 frames. ], batch size: 43, lr: 3.49e-02, grad_scale: 32.0 +2024-08-03 03:27:16,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=28526.666666666668, ans=0.125 +2024-08-03 03:27:20,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=28563.333333333332, ans=0.0 +2024-08-03 03:27:48,044 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.466e+02 1.735e+02 2.174e+02 5.837e+02, threshold=3.470e+02, percent-clipped=2.0 +2024-08-03 03:27:48,715 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.88 vs. limit=6.0 +2024-08-03 03:27:56,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=28673.333333333332, ans=0.2 +2024-08-03 03:27:59,898 INFO [train.py:1114] (2/4) Epoch 3, batch 550, loss[loss=0.366, simple_loss=0.4105, pruned_loss=0.1608, over 13028.00 frames. ], tot_loss[loss=0.3082, simple_loss=0.3648, pruned_loss=0.1258, over 2468375.25 frames. ], batch size: 48, lr: 3.49e-02, grad_scale: 16.0 +2024-08-03 03:28:11,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=28746.666666666668, ans=10.0 +2024-08-03 03:28:16,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=28746.666666666668, ans=0.0 +2024-08-03 03:28:23,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=28783.333333333332, ans=0.125 +2024-08-03 03:28:44,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=28856.666666666668, ans=0.125 +2024-08-03 03:28:45,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=28856.666666666668, ans=0.1 +2024-08-03 03:28:45,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=28856.666666666668, ans=0.07 +2024-08-03 03:28:48,276 INFO [train.py:1114] (2/4) Epoch 3, batch 600, loss[loss=0.3381, simple_loss=0.3996, pruned_loss=0.1383, over 13345.00 frames. ], tot_loss[loss=0.3079, simple_loss=0.365, pruned_loss=0.1254, over 2508390.14 frames. ], batch size: 46, lr: 3.48e-02, grad_scale: 16.0 +2024-08-03 03:29:02,015 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.51 vs. limit=15.0 +2024-08-03 03:29:21,220 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.080e+02 1.381e+02 1.525e+02 1.783e+02 3.115e+02, threshold=3.051e+02, percent-clipped=0.0 +2024-08-03 03:29:24,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=29040.0, ans=0.07 +2024-08-03 03:29:39,464 INFO [train.py:1114] (2/4) Epoch 3, batch 650, loss[loss=0.2772, simple_loss=0.3413, pruned_loss=0.1066, over 13545.00 frames. ], tot_loss[loss=0.3063, simple_loss=0.3638, pruned_loss=0.1244, over 2543893.94 frames. ], batch size: 37, lr: 3.48e-02, grad_scale: 16.0 +2024-08-03 03:29:39,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=29076.666666666668, ans=0.125 +2024-08-03 03:29:47,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=29076.666666666668, ans=0.125 +2024-08-03 03:29:48,525 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.84 vs. limit=6.0 +2024-08-03 03:30:02,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=29150.0, ans=0.04949747468305833 +2024-08-03 03:30:08,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=29186.666666666668, ans=0.125 +2024-08-03 03:30:13,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=29186.666666666668, ans=0.125 +2024-08-03 03:30:19,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=29223.333333333332, ans=0.125 +2024-08-03 03:30:20,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=29223.333333333332, ans=0.0 +2024-08-03 03:30:26,353 INFO [train.py:1114] (2/4) Epoch 3, batch 700, loss[loss=0.2815, simple_loss=0.34, pruned_loss=0.1116, over 13533.00 frames. ], tot_loss[loss=0.3076, simple_loss=0.365, pruned_loss=0.1251, over 2565329.54 frames. ], batch size: 35, lr: 3.47e-02, grad_scale: 16.0 +2024-08-03 03:31:26,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29370.0, ans=0.1 +2024-08-03 03:31:29,961 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.985e+01 1.488e+02 1.744e+02 2.083e+02 3.353e+02, threshold=3.487e+02, percent-clipped=2.0 +2024-08-03 03:31:31,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=29406.666666666668, ans=0.125 +2024-08-03 03:31:33,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=29406.666666666668, ans=0.125 +2024-08-03 03:31:38,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=29406.666666666668, ans=0.125 +2024-08-03 03:31:41,806 INFO [train.py:1114] (2/4) Epoch 3, batch 750, loss[loss=0.3148, simple_loss=0.3768, pruned_loss=0.1264, over 13352.00 frames. ], tot_loss[loss=0.3064, simple_loss=0.3642, pruned_loss=0.1243, over 2582852.56 frames. ], batch size: 37, lr: 3.46e-02, grad_scale: 16.0 +2024-08-03 03:36:28,734 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.55 vs. limit=6.0 +2024-08-03 03:38:17,161 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.94 vs. limit=22.5 +2024-08-03 03:38:18,960 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.03 vs. limit=6.0 +2024-08-03 03:38:26,551 INFO [train.py:1114] (2/4) Epoch 3, batch 800, loss[loss=0.2639, simple_loss=0.3244, pruned_loss=0.1017, over 13322.00 frames. ], tot_loss[loss=0.3063, simple_loss=0.364, pruned_loss=0.1244, over 2597317.16 frames. ], batch size: 33, lr: 3.46e-02, grad_scale: 32.0 +2024-08-03 03:38:41,510 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.68 vs. limit=10.0 +2024-08-03 03:38:45,126 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.46 vs. limit=6.0 +2024-08-03 03:38:55,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=29736.666666666668, ans=0.125 +2024-08-03 03:38:58,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=29736.666666666668, ans=0.125 +2024-08-03 03:39:00,217 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.412e+02 1.629e+02 2.089e+02 3.471e+02, threshold=3.259e+02, percent-clipped=0.0 +2024-08-03 03:39:10,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=29773.333333333332, ans=0.0 +2024-08-03 03:39:12,362 INFO [train.py:1114] (2/4) Epoch 3, batch 850, loss[loss=0.3116, simple_loss=0.3821, pruned_loss=0.1205, over 13320.00 frames. ], tot_loss[loss=0.3053, simple_loss=0.3632, pruned_loss=0.1237, over 2609185.20 frames. ], batch size: 40, lr: 3.45e-02, grad_scale: 32.0 +2024-08-03 03:39:25,590 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.86 vs. limit=15.0 +2024-08-03 03:39:29,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29883.333333333332, ans=0.1 +2024-08-03 03:39:34,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=29883.333333333332, ans=0.125 +2024-08-03 03:39:52,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=29920.0, ans=0.025 +2024-08-03 03:40:04,388 INFO [train.py:1114] (2/4) Epoch 3, batch 900, loss[loss=0.2624, simple_loss=0.3217, pruned_loss=0.1016, over 13336.00 frames. ], tot_loss[loss=0.3053, simple_loss=0.3631, pruned_loss=0.1238, over 2612480.63 frames. ], batch size: 33, lr: 3.45e-02, grad_scale: 32.0 +2024-08-03 03:40:12,999 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.32 vs. limit=22.5 +2024-08-03 03:40:34,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=30103.333333333332, ans=0.0 +2024-08-03 03:40:38,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=30103.333333333332, ans=0.1 +2024-08-03 03:40:39,777 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.109e+02 1.370e+02 1.651e+02 2.028e+02 4.342e+02, threshold=3.303e+02, percent-clipped=4.0 +2024-08-03 03:40:42,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=30140.0, ans=0.004317391304347827 +2024-08-03 03:40:45,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=30140.0, ans=0.2 +2024-08-03 03:40:46,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=30140.0, ans=0.0 +2024-08-03 03:40:51,072 INFO [train.py:1114] (2/4) Epoch 3, batch 950, loss[loss=0.265, simple_loss=0.3291, pruned_loss=0.1004, over 13528.00 frames. ], tot_loss[loss=0.3056, simple_loss=0.3633, pruned_loss=0.1239, over 2612497.98 frames. ], batch size: 34, lr: 3.44e-02, grad_scale: 16.0 +2024-08-03 03:41:12,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=30250.0, ans=0.004293478260869565 +2024-08-03 03:41:13,176 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.96 vs. limit=10.0 +2024-08-03 03:41:13,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=30250.0, ans=0.125 +2024-08-03 03:41:15,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=30250.0, ans=0.09899494936611666 +2024-08-03 03:41:16,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=30250.0, ans=0.004293478260869565 +2024-08-03 03:41:19,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=30286.666666666668, ans=0.125 +2024-08-03 03:41:35,626 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.86 vs. limit=10.0 +2024-08-03 03:41:40,379 INFO [train.py:1114] (2/4) Epoch 3, batch 1000, loss[loss=0.2894, simple_loss=0.3531, pruned_loss=0.1128, over 13362.00 frames. ], tot_loss[loss=0.3062, simple_loss=0.364, pruned_loss=0.1242, over 2611436.13 frames. ], batch size: 35, lr: 3.44e-02, grad_scale: 16.0 +2024-08-03 03:41:47,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=30360.0, ans=0.04949747468305833 +2024-08-03 03:41:49,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.28 vs. limit=22.5 +2024-08-03 03:42:06,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=30433.333333333332, ans=0.125 +2024-08-03 03:42:15,784 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.115e+02 1.383e+02 1.605e+02 2.126e+02 5.573e+02, threshold=3.210e+02, percent-clipped=1.0 +2024-08-03 03:42:27,493 INFO [train.py:1114] (2/4) Epoch 3, batch 1050, loss[loss=0.2994, simple_loss=0.3679, pruned_loss=0.1155, over 13570.00 frames. ], tot_loss[loss=0.3061, simple_loss=0.3637, pruned_loss=0.1243, over 2616209.14 frames. ], batch size: 39, lr: 3.43e-02, grad_scale: 16.0 +2024-08-03 03:42:38,991 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.06 vs. limit=15.0 +2024-08-03 03:42:44,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=30616.666666666668, ans=0.125 +2024-08-03 03:42:45,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=30616.666666666668, ans=0.125 +2024-08-03 03:42:49,572 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:42:59,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=30653.333333333332, ans=0.125 +2024-08-03 03:43:11,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30690.0, ans=0.1 +2024-08-03 03:43:16,410 INFO [train.py:1114] (2/4) Epoch 3, batch 1100, loss[loss=0.2919, simple_loss=0.3537, pruned_loss=0.115, over 13553.00 frames. ], tot_loss[loss=0.3043, simple_loss=0.3623, pruned_loss=0.1231, over 2620690.88 frames. ], batch size: 36, lr: 3.42e-02, grad_scale: 16.0 +2024-08-03 03:43:27,221 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:43:39,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=30800.0, ans=0.1 +2024-08-03 03:43:44,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=30800.0, ans=0.004173913043478261 +2024-08-03 03:43:55,896 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.095e+02 1.319e+02 1.473e+02 1.688e+02 2.812e+02, threshold=2.945e+02, percent-clipped=0.0 +2024-08-03 03:44:03,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=30873.333333333332, ans=0.2 +2024-08-03 03:44:06,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=30910.0, ans=0.125 +2024-08-03 03:44:07,139 INFO [train.py:1114] (2/4) Epoch 3, batch 1150, loss[loss=0.3017, simple_loss=0.3612, pruned_loss=0.1211, over 13551.00 frames. ], tot_loss[loss=0.3048, simple_loss=0.3627, pruned_loss=0.1234, over 2619410.63 frames. ], batch size: 36, lr: 3.42e-02, grad_scale: 16.0 +2024-08-03 03:44:10,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=30910.0, ans=0.125 +2024-08-03 03:44:17,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=30946.666666666668, ans=0.125 +2024-08-03 03:44:22,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=30946.666666666668, ans=0.004142028985507246 +2024-08-03 03:44:27,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=30983.333333333332, ans=0.09899494936611666 +2024-08-03 03:44:34,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=31020.0, ans=0.2 +2024-08-03 03:44:46,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=31056.666666666668, ans=0.1 +2024-08-03 03:45:00,371 INFO [train.py:1114] (2/4) Epoch 3, batch 1200, loss[loss=0.3333, simple_loss=0.3933, pruned_loss=0.1367, over 13565.00 frames. ], tot_loss[loss=0.3056, simple_loss=0.3636, pruned_loss=0.1238, over 2617673.30 frames. ], batch size: 39, lr: 3.41e-02, grad_scale: 32.0 +2024-08-03 03:45:01,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=31093.333333333332, ans=0.125 +2024-08-03 03:45:12,671 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.94 vs. limit=22.5 +2024-08-03 03:45:15,128 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.05 vs. limit=12.0 +2024-08-03 03:45:37,478 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.033e+02 1.400e+02 1.587e+02 2.006e+02 3.916e+02, threshold=3.173e+02, percent-clipped=5.0 +2024-08-03 03:45:42,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=31240.0, ans=0.125 +2024-08-03 03:45:48,704 INFO [train.py:1114] (2/4) Epoch 3, batch 1250, loss[loss=0.3291, simple_loss=0.3862, pruned_loss=0.136, over 13463.00 frames. ], tot_loss[loss=0.3056, simple_loss=0.364, pruned_loss=0.1237, over 2629209.89 frames. ], batch size: 42, lr: 3.41e-02, grad_scale: 32.0 +2024-08-03 03:45:49,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=31276.666666666668, ans=0.125 +2024-08-03 03:46:00,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31313.333333333332, ans=0.1 +2024-08-03 03:46:03,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=31313.333333333332, ans=0.125 +2024-08-03 03:46:04,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.03 vs. limit=22.5 +2024-08-03 03:46:06,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=31350.0, ans=0.1 +2024-08-03 03:46:19,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=31386.666666666668, ans=0.025 +2024-08-03 03:46:34,558 INFO [train.py:1114] (2/4) Epoch 3, batch 1300, loss[loss=0.3516, simple_loss=0.4013, pruned_loss=0.151, over 12954.00 frames. ], tot_loss[loss=0.3038, simple_loss=0.3624, pruned_loss=0.1226, over 2632173.67 frames. ], batch size: 52, lr: 3.40e-02, grad_scale: 16.0 +2024-08-03 03:46:42,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=31460.0, ans=0.5 +2024-08-03 03:46:42,414 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.15 vs. limit=22.5 +2024-08-03 03:46:43,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=31496.666666666668, ans=0.125 +2024-08-03 03:46:59,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=31533.333333333332, ans=0.05 +2024-08-03 03:47:12,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=31570.0, ans=0.0 +2024-08-03 03:47:14,536 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.368e+02 1.596e+02 1.808e+02 3.073e+02, threshold=3.191e+02, percent-clipped=0.0 +2024-08-03 03:47:20,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=31606.666666666668, ans=0.125 +2024-08-03 03:47:24,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=31643.333333333332, ans=0.2 +2024-08-03 03:47:25,334 INFO [train.py:1114] (2/4) Epoch 3, batch 1350, loss[loss=0.2718, simple_loss=0.3431, pruned_loss=0.1003, over 13540.00 frames. ], tot_loss[loss=0.3028, simple_loss=0.3617, pruned_loss=0.122, over 2639098.69 frames. ], batch size: 37, lr: 3.40e-02, grad_scale: 16.0 +2024-08-03 03:47:31,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=31643.333333333332, ans=0.003990579710144927 +2024-08-03 03:47:33,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=31680.0, ans=0.125 +2024-08-03 03:48:08,990 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.55 vs. limit=6.0 +2024-08-03 03:48:11,342 INFO [train.py:1114] (2/4) Epoch 3, batch 1400, loss[loss=0.2962, simple_loss=0.347, pruned_loss=0.1227, over 13258.00 frames. ], tot_loss[loss=0.3011, simple_loss=0.3603, pruned_loss=0.1209, over 2642606.50 frames. ], batch size: 31, lr: 3.39e-02, grad_scale: 8.0 +2024-08-03 03:48:33,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31900.0, ans=0.1 +2024-08-03 03:48:33,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=31900.0, ans=0.125 +2024-08-03 03:48:50,664 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.406e+02 1.620e+02 1.890e+02 3.890e+02, threshold=3.239e+02, percent-clipped=2.0 +2024-08-03 03:48:51,218 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.95 vs. limit=22.5 +2024-08-03 03:49:26,523 INFO [train.py:1114] (2/4) Epoch 3, batch 1450, loss[loss=0.3185, simple_loss=0.3826, pruned_loss=0.1272, over 13460.00 frames. ], tot_loss[loss=0.3022, simple_loss=0.3615, pruned_loss=0.1215, over 2641245.75 frames. ], batch size: 43, lr: 3.38e-02, grad_scale: 8.0 +2024-08-03 03:50:05,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=32046.666666666668, ans=0.0 +2024-08-03 03:50:09,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=32083.333333333332, ans=0.0038949275362318847 +2024-08-03 03:50:11,700 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=7.64 vs. limit=12.0 +2024-08-03 03:52:24,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=32120.0, ans=0.1 +2024-08-03 03:52:32,039 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=4.67 vs. limit=15.0 +2024-08-03 03:52:44,081 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.90 vs. limit=22.5 +2024-08-03 03:52:46,943 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.55 vs. limit=15.0 +2024-08-03 03:52:49,532 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.41 vs. limit=15.0 +2024-08-03 03:52:50,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=32193.333333333332, ans=0.125 +2024-08-03 03:52:50,975 INFO [train.py:1114] (2/4) Epoch 3, batch 1500, loss[loss=0.313, simple_loss=0.378, pruned_loss=0.124, over 13412.00 frames. ], tot_loss[loss=0.3025, simple_loss=0.3617, pruned_loss=0.1216, over 2641924.30 frames. ], batch size: 39, lr: 3.38e-02, grad_scale: 8.0 +2024-08-03 03:53:05,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=32230.0, ans=0.125 +2024-08-03 03:53:11,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=32266.666666666668, ans=0.125 +2024-08-03 03:53:13,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=32266.666666666668, ans=0.003855072463768116 +2024-08-03 03:53:20,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=32303.333333333332, ans=0.125 +2024-08-03 03:53:29,876 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.393e+02 1.602e+02 2.004e+02 4.084e+02, threshold=3.204e+02, percent-clipped=1.0 +2024-08-03 03:53:30,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=32340.0, ans=0.1 +2024-08-03 03:53:38,882 INFO [train.py:1114] (2/4) Epoch 3, batch 1550, loss[loss=0.3447, simple_loss=0.3905, pruned_loss=0.1494, over 13408.00 frames. ], tot_loss[loss=0.302, simple_loss=0.3611, pruned_loss=0.1215, over 2631075.91 frames. ], batch size: 41, lr: 3.37e-02, grad_scale: 8.0 +2024-08-03 03:53:44,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=32376.666666666668, ans=0.0038311594202898548 +2024-08-03 03:53:45,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=32376.666666666668, ans=0.2 +2024-08-03 03:54:12,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=32413.333333333332, ans=0.0 +2024-08-03 03:54:17,655 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.46 vs. limit=15.0 +2024-08-03 03:54:23,724 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.56 vs. limit=6.0 +2024-08-03 03:54:28,420 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.32 vs. limit=10.0 +2024-08-03 03:54:42,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=32523.333333333332, ans=0.125 +2024-08-03 03:54:44,557 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.08 vs. limit=6.0 +2024-08-03 03:54:49,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=32523.333333333332, ans=0.125 +2024-08-03 03:54:57,313 INFO [train.py:1114] (2/4) Epoch 3, batch 1600, loss[loss=0.3074, simple_loss=0.3699, pruned_loss=0.1224, over 13579.00 frames. ], tot_loss[loss=0.3034, simple_loss=0.3616, pruned_loss=0.1226, over 2624197.17 frames. ], batch size: 39, lr: 3.37e-02, grad_scale: 16.0 +2024-08-03 03:55:06,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=32560.0, ans=0.125 +2024-08-03 03:55:14,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=32596.666666666668, ans=0.125 +2024-08-03 03:55:32,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=32670.0, ans=0.125 +2024-08-03 03:55:34,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=32670.0, ans=0.125 +2024-08-03 03:55:37,909 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.348e+02 1.511e+02 1.737e+02 4.413e+02, threshold=3.022e+02, percent-clipped=2.0 +2024-08-03 03:55:41,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=32706.666666666668, ans=0.025 +2024-08-03 03:55:48,041 INFO [train.py:1114] (2/4) Epoch 3, batch 1650, loss[loss=0.3024, simple_loss=0.3602, pruned_loss=0.1222, over 13331.00 frames. ], tot_loss[loss=0.3025, simple_loss=0.3608, pruned_loss=0.1221, over 2620853.84 frames. ], batch size: 40, lr: 3.36e-02, grad_scale: 16.0 +2024-08-03 03:55:49,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=32743.333333333332, ans=0.0 +2024-08-03 03:56:08,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=32816.666666666664, ans=0.2 +2024-08-03 03:56:14,820 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:56:41,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=32890.0, ans=0.125 +2024-08-03 03:56:43,052 INFO [train.py:1114] (2/4) Epoch 3, batch 1700, loss[loss=0.2532, simple_loss=0.3105, pruned_loss=0.09797, over 13259.00 frames. ], tot_loss[loss=0.3007, simple_loss=0.3593, pruned_loss=0.1211, over 2630392.90 frames. ], batch size: 31, lr: 3.36e-02, grad_scale: 16.0 +2024-08-03 03:56:44,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=32926.666666666664, ans=0.0 +2024-08-03 03:56:46,400 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.94 vs. limit=6.0 +2024-08-03 03:56:50,029 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.80 vs. limit=15.0 +2024-08-03 03:56:57,193 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.96 vs. limit=15.0 +2024-08-03 03:57:06,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=33000.0, ans=0.125 +2024-08-03 03:57:06,762 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.97 vs. limit=12.0 +2024-08-03 03:57:07,625 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.10 vs. limit=22.5 +2024-08-03 03:57:18,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=33036.666666666664, ans=0.0036876811594202903 +2024-08-03 03:57:19,661 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.070e+02 1.348e+02 1.541e+02 1.805e+02 2.810e+02, threshold=3.082e+02, percent-clipped=0.0 +2024-08-03 03:57:20,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33073.333333333336, ans=0.1 +2024-08-03 03:57:34,247 INFO [train.py:1114] (2/4) Epoch 3, batch 1750, loss[loss=0.2772, simple_loss=0.3227, pruned_loss=0.1159, over 13529.00 frames. ], tot_loss[loss=0.3004, simple_loss=0.3588, pruned_loss=0.121, over 2633626.21 frames. ], batch size: 31, lr: 3.35e-02, grad_scale: 16.0 +2024-08-03 03:57:39,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=33110.0, ans=0.1 +2024-08-03 03:57:46,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=33146.666666666664, ans=0.0 +2024-08-03 03:57:51,458 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=2.97 vs. limit=15.0 +2024-08-03 03:57:57,352 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.45 vs. limit=15.0 +2024-08-03 03:58:06,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=33183.333333333336, ans=0.003655797101449275 +2024-08-03 03:58:36,899 INFO [train.py:1114] (2/4) Epoch 3, batch 1800, loss[loss=0.3417, simple_loss=0.3996, pruned_loss=0.1419, over 13559.00 frames. ], tot_loss[loss=0.3008, simple_loss=0.3592, pruned_loss=0.1212, over 2634153.74 frames. ], batch size: 38, lr: 3.35e-02, grad_scale: 16.0 +2024-08-03 03:58:40,574 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.47 vs. limit=15.0 +2024-08-03 03:58:41,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=33293.333333333336, ans=0.07 +2024-08-03 03:59:06,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=33366.666666666664, ans=0.025 +2024-08-03 03:59:13,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=33403.333333333336, ans=0.125 +2024-08-03 03:59:17,226 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.060e+02 1.365e+02 1.589e+02 1.919e+02 3.211e+02, threshold=3.178e+02, percent-clipped=2.0 +2024-08-03 03:59:26,709 INFO [train.py:1114] (2/4) Epoch 3, batch 1850, loss[loss=0.2912, simple_loss=0.358, pruned_loss=0.1122, over 13387.00 frames. ], tot_loss[loss=0.2998, simple_loss=0.3589, pruned_loss=0.1203, over 2635849.92 frames. ], batch size: 39, lr: 3.34e-02, grad_scale: 16.0 +2024-08-03 03:59:29,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=33476.666666666664, ans=0.125 +2024-08-03 03:59:31,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=33476.666666666664, ans=0.1 +2024-08-03 03:59:39,870 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.49 vs. limit=12.0 +2024-08-03 03:59:41,833 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.58 vs. limit=12.0 +2024-08-03 03:59:55,164 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.49 vs. limit=10.0 +2024-08-03 03:59:59,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33586.666666666664, ans=0.1 +2024-08-03 04:00:09,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=33586.666666666664, ans=0.125 +2024-08-03 04:00:22,377 INFO [train.py:1114] (2/4) Epoch 3, batch 1900, loss[loss=0.3187, simple_loss=0.3843, pruned_loss=0.1265, over 13324.00 frames. ], tot_loss[loss=0.3013, simple_loss=0.3607, pruned_loss=0.121, over 2638710.18 frames. ], batch size: 40, lr: 3.34e-02, grad_scale: 16.0 +2024-08-03 04:00:45,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=33733.333333333336, ans=0.125 +2024-08-03 04:00:47,503 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:01:01,324 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.079e+02 1.394e+02 1.539e+02 1.838e+02 3.320e+02, threshold=3.078e+02, percent-clipped=1.0 +2024-08-03 04:01:10,258 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.69 vs. limit=15.0 +2024-08-03 04:01:10,631 INFO [train.py:1114] (2/4) Epoch 3, batch 1950, loss[loss=0.2965, simple_loss=0.3505, pruned_loss=0.1212, over 13541.00 frames. ], tot_loss[loss=0.3013, simple_loss=0.3611, pruned_loss=0.1208, over 2645782.47 frames. ], batch size: 36, lr: 3.33e-02, grad_scale: 16.0 +2024-08-03 04:01:18,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=33843.333333333336, ans=0.0 +2024-08-03 04:01:22,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=33843.333333333336, ans=0.0 +2024-08-03 04:01:30,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=33880.0, ans=0.125 +2024-08-03 04:02:01,746 INFO [train.py:1114] (2/4) Epoch 3, batch 2000, loss[loss=0.2378, simple_loss=0.3019, pruned_loss=0.08685, over 13526.00 frames. ], tot_loss[loss=0.3032, simple_loss=0.3624, pruned_loss=0.122, over 2634297.60 frames. ], batch size: 31, lr: 3.32e-02, grad_scale: 32.0 +2024-08-03 04:02:09,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=34026.666666666664, ans=0.0034724637681159427 +2024-08-03 04:02:23,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=34063.333333333336, ans=0.2 +2024-08-03 04:02:32,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=34100.0, ans=0.125 +2024-08-03 04:02:33,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=34100.0, ans=0.2 +2024-08-03 04:02:34,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34100.0, ans=0.1 +2024-08-03 04:02:48,526 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.054e+02 1.429e+02 1.657e+02 2.036e+02 4.223e+02, threshold=3.314e+02, percent-clipped=3.0 +2024-08-03 04:02:54,015 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=15.86 vs. limit=15.0 +2024-08-03 04:02:57,966 INFO [train.py:1114] (2/4) Epoch 3, batch 2050, loss[loss=0.2831, simple_loss=0.3361, pruned_loss=0.1151, over 13432.00 frames. ], tot_loss[loss=0.3011, simple_loss=0.3604, pruned_loss=0.1208, over 2632128.53 frames. ], batch size: 32, lr: 3.32e-02, grad_scale: 32.0 +2024-08-03 04:03:05,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=34210.0, ans=0.125 +2024-08-03 04:03:05,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=34210.0, ans=0.04949747468305833 +2024-08-03 04:03:14,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=34246.666666666664, ans=0.125 +2024-08-03 04:03:25,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=34283.333333333336, ans=0.0 +2024-08-03 04:03:25,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=34283.333333333336, ans=0.1 +2024-08-03 04:03:34,708 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.68 vs. limit=15.0 +2024-08-03 04:03:39,512 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.14 vs. limit=15.0 +2024-08-03 04:03:43,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=34356.666666666664, ans=0.0034007246376811595 +2024-08-03 04:03:48,259 INFO [train.py:1114] (2/4) Epoch 3, batch 2100, loss[loss=0.2629, simple_loss=0.332, pruned_loss=0.0969, over 13548.00 frames. ], tot_loss[loss=0.2994, simple_loss=0.3591, pruned_loss=0.1199, over 2637562.80 frames. ], batch size: 37, lr: 3.31e-02, grad_scale: 32.0 +2024-08-03 04:03:59,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=34393.333333333336, ans=0.125 +2024-08-03 04:04:00,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=34430.0, ans=0.2 +2024-08-03 04:04:06,439 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.90 vs. limit=15.0 +2024-08-03 04:04:08,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=34430.0, ans=0.125 +2024-08-03 04:04:18,524 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.41 vs. limit=22.5 +2024-08-03 04:04:23,180 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.40 vs. limit=12.0 +2024-08-03 04:04:29,656 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.385e+02 1.595e+02 1.788e+02 2.690e+02, threshold=3.190e+02, percent-clipped=1.0 +2024-08-03 04:04:37,888 INFO [train.py:1114] (2/4) Epoch 3, batch 2150, loss[loss=0.3041, simple_loss=0.3541, pruned_loss=0.127, over 13583.00 frames. ], tot_loss[loss=0.2969, simple_loss=0.3573, pruned_loss=0.1183, over 2646330.42 frames. ], batch size: 36, lr: 3.31e-02, grad_scale: 16.0 +2024-08-03 04:05:20,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=34650.0, ans=0.0 +2024-08-03 04:05:25,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34686.666666666664, ans=0.1 +2024-08-03 04:05:32,431 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.76 vs. limit=15.0 +2024-08-03 04:05:38,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=34723.333333333336, ans=0.125 +2024-08-03 04:05:42,257 INFO [train.py:1114] (2/4) Epoch 3, batch 2200, loss[loss=0.3126, simple_loss=0.3805, pruned_loss=0.1223, over 13420.00 frames. ], tot_loss[loss=0.2962, simple_loss=0.3568, pruned_loss=0.1177, over 2644517.33 frames. ], batch size: 39, lr: 3.30e-02, grad_scale: 16.0 +2024-08-03 04:05:43,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=34760.0, ans=0.025 +2024-08-03 04:05:52,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=34796.666666666664, ans=0.125 +2024-08-03 04:05:59,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=34796.666666666664, ans=0.125 +2024-08-03 04:05:59,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=34796.666666666664, ans=0.125 +2024-08-03 04:06:13,749 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.81 vs. limit=6.0 +2024-08-03 04:06:14,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=34870.0, ans=0.125 +2024-08-03 04:06:24,638 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.093e+02 1.404e+02 1.621e+02 1.995e+02 2.772e+02, threshold=3.241e+02, percent-clipped=0.0 +2024-08-03 04:06:33,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=34906.666666666664, ans=0.0 +2024-08-03 04:06:34,731 INFO [train.py:1114] (2/4) Epoch 3, batch 2250, loss[loss=0.2661, simple_loss=0.3433, pruned_loss=0.09447, over 13380.00 frames. ], tot_loss[loss=0.2964, simple_loss=0.3572, pruned_loss=0.1178, over 2642094.59 frames. ], batch size: 37, lr: 3.30e-02, grad_scale: 16.0 +2024-08-03 04:06:38,887 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.55 vs. limit=22.5 +2024-08-03 04:06:39,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=34943.333333333336, ans=0.0032731884057971014 +2024-08-03 04:06:40,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=34943.333333333336, ans=0.0 +2024-08-03 04:06:45,577 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.62 vs. limit=6.0 +2024-08-03 04:06:55,840 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.79 vs. limit=15.0 +2024-08-03 04:07:03,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=35053.333333333336, ans=0.0 +2024-08-03 04:07:03,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=35053.333333333336, ans=0.125 +2024-08-03 04:07:04,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=35053.333333333336, ans=0.025 +2024-08-03 04:07:11,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=35090.0, ans=0.04949747468305833 +2024-08-03 04:07:20,704 INFO [train.py:1114] (2/4) Epoch 3, batch 2300, loss[loss=0.2818, simple_loss=0.346, pruned_loss=0.1088, over 13571.00 frames. ], tot_loss[loss=0.2955, simple_loss=0.3558, pruned_loss=0.1176, over 2637836.29 frames. ], batch size: 33, lr: 3.29e-02, grad_scale: 16.0 +2024-08-03 04:07:24,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=35126.666666666664, ans=0.0 +2024-08-03 04:07:31,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=35163.333333333336, ans=0.125 +2024-08-03 04:07:46,368 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:07:46,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=35200.0, ans=0.0 +2024-08-03 04:07:49,273 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:07:56,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=35236.666666666664, ans=0.2 +2024-08-03 04:08:01,049 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.453e+02 1.713e+02 2.224e+02 5.491e+02, threshold=3.425e+02, percent-clipped=5.0 +2024-08-03 04:08:09,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=35273.333333333336, ans=0.125 +2024-08-03 04:08:11,158 INFO [train.py:1114] (2/4) Epoch 3, batch 2350, loss[loss=0.2783, simple_loss=0.3489, pruned_loss=0.1038, over 13558.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.3559, pruned_loss=0.1177, over 2639948.41 frames. ], batch size: 38, lr: 3.29e-02, grad_scale: 16.0 +2024-08-03 04:08:11,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=35310.0, ans=0.2 +2024-08-03 04:08:14,472 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.85 vs. limit=15.0 +2024-08-03 04:08:31,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=35383.333333333336, ans=0.025 +2024-08-03 04:08:39,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=35420.0, ans=0.09899494936611666 +2024-08-03 04:09:00,935 INFO [train.py:1114] (2/4) Epoch 3, batch 2400, loss[loss=0.2556, simple_loss=0.3129, pruned_loss=0.09914, over 13553.00 frames. ], tot_loss[loss=0.2958, simple_loss=0.356, pruned_loss=0.1178, over 2641317.69 frames. ], batch size: 35, lr: 3.28e-02, grad_scale: 32.0 +2024-08-03 04:09:10,610 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.689e-02 +2024-08-03 04:09:18,288 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.79 vs. limit=15.0 +2024-08-03 04:09:34,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=35603.333333333336, ans=0.125 +2024-08-03 04:09:38,727 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.322e+02 1.493e+02 1.831e+02 3.002e+02, threshold=2.985e+02, percent-clipped=0.0 +2024-08-03 04:09:44,287 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.65 vs. limit=22.5 +2024-08-03 04:09:47,369 INFO [train.py:1114] (2/4) Epoch 3, batch 2450, loss[loss=0.3193, simple_loss=0.3853, pruned_loss=0.1266, over 13358.00 frames. ], tot_loss[loss=0.2978, simple_loss=0.3578, pruned_loss=0.1189, over 2631838.19 frames. ], batch size: 37, lr: 3.28e-02, grad_scale: 32.0 +2024-08-03 04:09:54,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=35676.666666666664, ans=0.1 +2024-08-03 04:09:55,167 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.58 vs. limit=22.5 +2024-08-03 04:10:05,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35713.333333333336, ans=0.1 +2024-08-03 04:10:09,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=35750.0, ans=0.125 +2024-08-03 04:10:11,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=35750.0, ans=0.2 +2024-08-03 04:10:17,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=35750.0, ans=0.0 +2024-08-03 04:10:17,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.98 vs. limit=15.0 +2024-08-03 04:10:22,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=35786.666666666664, ans=0.2 +2024-08-03 04:10:23,884 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.96 vs. limit=10.0 +2024-08-03 04:10:24,877 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.73 vs. limit=15.0 +2024-08-03 04:10:34,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=35823.333333333336, ans=0.025 +2024-08-03 04:10:34,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=35823.333333333336, ans=0.2 +2024-08-03 04:10:35,800 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:10:37,281 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.91 vs. limit=5.0 +2024-08-03 04:10:39,228 INFO [train.py:1114] (2/4) Epoch 3, batch 2500, loss[loss=0.3059, simple_loss=0.3719, pruned_loss=0.1199, over 13398.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.3574, pruned_loss=0.1186, over 2636401.29 frames. ], batch size: 39, lr: 3.27e-02, grad_scale: 32.0 +2024-08-03 04:10:50,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=35896.666666666664, ans=0.1 +2024-08-03 04:11:08,189 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.65 vs. limit=6.0 +2024-08-03 04:11:13,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=35970.0, ans=0.125 +2024-08-03 04:11:15,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=36006.666666666664, ans=0.125 +2024-08-03 04:11:15,828 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.323e+02 1.438e+02 1.681e+02 3.376e+02, threshold=2.876e+02, percent-clipped=2.0 +2024-08-03 04:11:26,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=36043.333333333336, ans=0.125 +2024-08-03 04:11:27,368 INFO [train.py:1114] (2/4) Epoch 3, batch 2550, loss[loss=0.2869, simple_loss=0.3378, pruned_loss=0.118, over 13524.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.3574, pruned_loss=0.1186, over 2638074.15 frames. ], batch size: 31, lr: 3.27e-02, grad_scale: 32.0 +2024-08-03 04:11:33,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=36043.333333333336, ans=0.125 +2024-08-03 04:11:39,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36080.0, ans=0.1 +2024-08-03 04:11:59,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=36116.666666666664, ans=0.2 +2024-08-03 04:12:17,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=36153.333333333336, ans=0.125 +2024-08-03 04:12:39,564 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.37 vs. limit=15.0 +2024-08-03 04:12:43,960 INFO [train.py:1114] (2/4) Epoch 3, batch 2600, loss[loss=0.2962, simple_loss=0.3561, pruned_loss=0.1181, over 13554.00 frames. ], tot_loss[loss=0.2981, simple_loss=0.3582, pruned_loss=0.119, over 2637315.60 frames. ], batch size: 36, lr: 3.26e-02, grad_scale: 32.0 +2024-08-03 04:13:39,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=36373.333333333336, ans=0.2 +2024-08-03 04:13:39,927 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.332e+02 1.510e+02 1.763e+02 2.662e+02, threshold=3.019e+02, percent-clipped=0.0 +2024-08-03 04:13:48,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=36373.333333333336, ans=0.125 +2024-08-03 04:13:49,891 INFO [train.py:1114] (2/4) Epoch 3, batch 2650, loss[loss=0.2999, simple_loss=0.3587, pruned_loss=0.1205, over 13311.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3591, pruned_loss=0.1197, over 2639580.37 frames. ], batch size: 46, lr: 3.26e-02, grad_scale: 32.0 +2024-08-03 04:14:18,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=36483.333333333336, ans=0.125 +2024-08-03 04:14:41,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=36520.0, ans=0.0029304347826086957 +2024-08-03 04:14:42,920 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:14:50,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=36556.666666666664, ans=0.125 +2024-08-03 04:14:51,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.23 vs. limit=22.5 +2024-08-03 04:14:57,306 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.18 vs. limit=15.0 +2024-08-03 04:14:57,818 INFO [train.py:1114] (2/4) Epoch 3, batch 2700, loss[loss=0.2867, simple_loss=0.3545, pruned_loss=0.1094, over 13543.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.3584, pruned_loss=0.1192, over 2636686.37 frames. ], batch size: 40, lr: 3.25e-02, grad_scale: 16.0 +2024-08-03 04:15:38,611 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=19.24 vs. limit=15.0 +2024-08-03 04:15:44,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=36666.666666666664, ans=0.1 +2024-08-03 04:19:05,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=36703.333333333336, ans=0.0 +2024-08-03 04:19:06,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=36703.333333333336, ans=0.002890579710144927 +2024-08-03 04:19:06,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=36703.333333333336, ans=0.2 +2024-08-03 04:19:08,841 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.09 vs. limit=15.0 +2024-08-03 04:19:10,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=36703.333333333336, ans=0.125 +2024-08-03 04:19:15,073 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.088e+02 1.378e+02 1.616e+02 1.965e+02 4.698e+02, threshold=3.232e+02, percent-clipped=3.0 +2024-08-03 04:19:22,261 INFO [train.py:1114] (2/4) Epoch 3, batch 2750, loss[loss=0.2904, simple_loss=0.3448, pruned_loss=0.118, over 13337.00 frames. ], tot_loss[loss=0.298, simple_loss=0.3577, pruned_loss=0.1191, over 2635404.11 frames. ], batch size: 34, lr: 3.24e-02, grad_scale: 16.0 +2024-08-03 04:19:23,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=36776.666666666664, ans=0.2 +2024-08-03 04:19:26,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=36776.666666666664, ans=0.125 +2024-08-03 04:19:28,624 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.38 vs. limit=15.0 +2024-08-03 04:19:36,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=36813.333333333336, ans=0.04949747468305833 +2024-08-03 04:19:42,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36850.0, ans=0.1 +2024-08-03 04:19:47,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=36850.0, ans=0.1 +2024-08-03 04:19:57,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=36886.666666666664, ans=0.2 +2024-08-03 04:20:08,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=36923.333333333336, ans=0.05 +2024-08-03 04:20:08,472 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.44 vs. limit=15.0 +2024-08-03 04:20:09,778 INFO [train.py:1114] (2/4) Epoch 3, batch 2800, loss[loss=0.3667, simple_loss=0.3979, pruned_loss=0.1678, over 8885.00 frames. ], tot_loss[loss=0.2979, simple_loss=0.3574, pruned_loss=0.1192, over 2626535.21 frames. ], batch size: 97, lr: 3.24e-02, grad_scale: 32.0 +2024-08-03 04:20:10,222 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.00 vs. limit=6.0 +2024-08-03 04:20:15,326 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.76 vs. limit=10.0 +2024-08-03 04:20:21,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=36996.666666666664, ans=0.125 +2024-08-03 04:20:21,617 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.89 vs. limit=15.0 +2024-08-03 04:20:26,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37033.333333333336, ans=0.1 +2024-08-03 04:20:27,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=37033.333333333336, ans=0.002818840579710144 +2024-08-03 04:20:37,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=37070.0, ans=0.125 +2024-08-03 04:20:42,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=37070.0, ans=0.0028108695652173917 +2024-08-03 04:20:46,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.98 vs. limit=15.0 +2024-08-03 04:20:46,352 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.018e+02 1.344e+02 1.514e+02 1.782e+02 3.763e+02, threshold=3.028e+02, percent-clipped=1.0 +2024-08-03 04:20:48,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37106.666666666664, ans=0.1 +2024-08-03 04:20:53,528 INFO [train.py:1114] (2/4) Epoch 3, batch 2850, loss[loss=0.2735, simple_loss=0.3362, pruned_loss=0.1054, over 13356.00 frames. ], tot_loss[loss=0.2994, simple_loss=0.3584, pruned_loss=0.1202, over 2620726.23 frames. ], batch size: 35, lr: 3.23e-02, grad_scale: 32.0 +2024-08-03 04:20:57,315 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.17 vs. limit=15.0 +2024-08-03 04:21:08,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=37180.0, ans=0.125 +2024-08-03 04:21:08,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=37180.0, ans=0.125 +2024-08-03 04:21:37,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=37290.0, ans=0.125 +2024-08-03 04:21:41,468 INFO [train.py:1114] (2/4) Epoch 3, batch 2900, loss[loss=0.3001, simple_loss=0.3581, pruned_loss=0.121, over 13377.00 frames. ], tot_loss[loss=0.3004, simple_loss=0.3597, pruned_loss=0.1205, over 2631865.23 frames. ], batch size: 36, lr: 3.23e-02, grad_scale: 32.0 +2024-08-03 04:21:47,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=37326.666666666664, ans=0.0 +2024-08-03 04:21:56,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=37363.333333333336, ans=0.125 +2024-08-03 04:22:02,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=37400.0, ans=0.0027391304347826086 +2024-08-03 04:22:09,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=37436.666666666664, ans=0.125 +2024-08-03 04:22:13,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=37436.666666666664, ans=0.125 +2024-08-03 04:22:19,106 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.112e+02 1.360e+02 1.545e+02 1.848e+02 3.511e+02, threshold=3.091e+02, percent-clipped=1.0 +2024-08-03 04:22:23,115 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.04 vs. limit=15.0 +2024-08-03 04:22:26,076 INFO [train.py:1114] (2/4) Epoch 3, batch 2950, loss[loss=0.2699, simple_loss=0.3287, pruned_loss=0.1056, over 13341.00 frames. ], tot_loss[loss=0.2994, simple_loss=0.3585, pruned_loss=0.1202, over 2629136.20 frames. ], batch size: 34, lr: 3.22e-02, grad_scale: 32.0 +2024-08-03 04:22:26,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=37510.0, ans=0.5 +2024-08-03 04:22:41,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37546.666666666664, ans=0.1 +2024-08-03 04:22:43,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=37583.333333333336, ans=0.0 +2024-08-03 04:22:49,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=37583.333333333336, ans=0.125 +2024-08-03 04:22:50,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=37583.333333333336, ans=0.0 +2024-08-03 04:22:57,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=37620.0, ans=0.125 +2024-08-03 04:23:02,892 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.24 vs. limit=6.0 +2024-08-03 04:23:11,264 INFO [train.py:1114] (2/4) Epoch 3, batch 3000, loss[loss=0.306, simple_loss=0.3645, pruned_loss=0.1238, over 13538.00 frames. ], tot_loss[loss=0.2976, simple_loss=0.3573, pruned_loss=0.119, over 2628109.52 frames. ], batch size: 37, lr: 3.22e-02, grad_scale: 16.0 +2024-08-03 04:23:11,265 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 04:23:49,061 INFO [train.py:1146] (2/4) Epoch 3, validation: loss=0.2357, simple_loss=0.3301, pruned_loss=0.07069, over 944034.00 frames. +2024-08-03 04:23:49,062 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 04:23:49,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=37693.333333333336, ans=0.035 +2024-08-03 04:24:02,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=37693.333333333336, ans=0.0 +2024-08-03 04:24:06,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=37730.0, ans=0.0 +2024-08-03 04:24:40,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=37766.666666666664, ans=0.125 +2024-08-03 04:25:32,441 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.344e+02 1.558e+02 1.814e+02 2.891e+02, threshold=3.117e+02, percent-clipped=0.0 +2024-08-03 04:25:39,782 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.94 vs. limit=15.0 +2024-08-03 04:25:42,610 INFO [train.py:1114] (2/4) Epoch 3, batch 3050, loss[loss=0.2924, simple_loss=0.3529, pruned_loss=0.1159, over 13521.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3588, pruned_loss=0.1198, over 2626725.24 frames. ], batch size: 35, lr: 3.21e-02, grad_scale: 8.0 +2024-08-03 04:25:44,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=37876.666666666664, ans=0.2 +2024-08-03 04:25:54,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=37876.666666666664, ans=0.002635507246376812 +2024-08-03 04:26:59,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=38060.0, ans=0.125 +2024-08-03 04:27:00,306 INFO [train.py:1114] (2/4) Epoch 3, batch 3100, loss[loss=0.3798, simple_loss=0.4224, pruned_loss=0.1686, over 13372.00 frames. ], tot_loss[loss=0.2981, simple_loss=0.358, pruned_loss=0.1191, over 2626440.37 frames. ], batch size: 46, lr: 3.21e-02, grad_scale: 8.0 +2024-08-03 04:27:20,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=38096.666666666664, ans=0.125 +2024-08-03 04:27:43,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=38170.0, ans=0.125 +2024-08-03 04:27:47,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=38170.0, ans=0.2 +2024-08-03 04:27:53,164 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.089e+02 1.343e+02 1.458e+02 1.761e+02 2.606e+02, threshold=2.915e+02, percent-clipped=0.0 +2024-08-03 04:27:58,275 INFO [train.py:1114] (2/4) Epoch 3, batch 3150, loss[loss=0.3063, simple_loss=0.3711, pruned_loss=0.1207, over 12954.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.3571, pruned_loss=0.1182, over 2628254.84 frames. ], batch size: 48, lr: 3.20e-02, grad_scale: 8.0 +2024-08-03 04:27:59,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=38243.333333333336, ans=0.125 +2024-08-03 04:28:01,947 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=5.119e-03 +2024-08-03 04:28:07,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=38280.0, ans=0.2 +2024-08-03 04:28:08,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=38280.0, ans=0.1 +2024-08-03 04:28:12,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=38280.0, ans=0.0 +2024-08-03 04:28:15,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=38316.666666666664, ans=0.05 +2024-08-03 04:28:42,456 INFO [train.py:1114] (2/4) Epoch 3, batch 3200, loss[loss=0.2922, simple_loss=0.3497, pruned_loss=0.1174, over 13546.00 frames. ], tot_loss[loss=0.2955, simple_loss=0.3561, pruned_loss=0.1175, over 2634560.29 frames. ], batch size: 37, lr: 3.20e-02, grad_scale: 16.0 +2024-08-03 04:29:01,467 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.54 vs. limit=6.0 +2024-08-03 04:29:13,330 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:29:20,722 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.040e+02 1.433e+02 1.626e+02 1.828e+02 2.707e+02, threshold=3.253e+02, percent-clipped=0.0 +2024-08-03 04:29:24,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=38573.333333333336, ans=0.0024840579710144924 +2024-08-03 04:29:26,759 INFO [train.py:1114] (2/4) Epoch 3, batch 3250, loss[loss=0.309, simple_loss=0.3733, pruned_loss=0.1223, over 13386.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.3566, pruned_loss=0.1173, over 2638661.34 frames. ], batch size: 38, lr: 3.19e-02, grad_scale: 16.0 +2024-08-03 04:29:50,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=38683.333333333336, ans=0.125 +2024-08-03 04:29:50,538 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=17.95 vs. limit=15.0 +2024-08-03 04:29:53,934 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.61 vs. limit=22.5 +2024-08-03 04:30:01,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=38756.666666666664, ans=0.125 +2024-08-03 04:30:01,964 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.83 vs. limit=6.0 +2024-08-03 04:30:02,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=38756.666666666664, ans=0.125 +2024-08-03 04:30:10,331 INFO [train.py:1114] (2/4) Epoch 3, batch 3300, loss[loss=0.3314, simple_loss=0.3994, pruned_loss=0.1317, over 12937.00 frames. ], tot_loss[loss=0.2933, simple_loss=0.3545, pruned_loss=0.116, over 2640239.08 frames. ], batch size: 52, lr: 3.19e-02, grad_scale: 16.0 +2024-08-03 04:30:12,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=38793.333333333336, ans=0.0 +2024-08-03 04:30:18,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38830.0, ans=0.1 +2024-08-03 04:30:25,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=38830.0, ans=0.2 +2024-08-03 04:30:44,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=38903.333333333336, ans=0.0 +2024-08-03 04:30:46,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=38940.0, ans=0.2 +2024-08-03 04:30:51,792 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.360e+02 1.548e+02 1.881e+02 7.173e+02, threshold=3.096e+02, percent-clipped=4.0 +2024-08-03 04:30:58,396 INFO [train.py:1114] (2/4) Epoch 3, batch 3350, loss[loss=0.3563, simple_loss=0.4105, pruned_loss=0.151, over 13051.00 frames. ], tot_loss[loss=0.2974, simple_loss=0.3578, pruned_loss=0.1185, over 2630078.19 frames. ], batch size: 48, lr: 3.18e-02, grad_scale: 16.0 +2024-08-03 04:31:00,663 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.79 vs. limit=22.5 +2024-08-03 04:31:11,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=39013.333333333336, ans=0.2 +2024-08-03 04:31:26,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=39050.0, ans=0.125 +2024-08-03 04:31:43,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=39086.666666666664, ans=0.0 +2024-08-03 04:31:59,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=39086.666666666664, ans=0.025 +2024-08-03 04:32:00,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=39086.666666666664, ans=0.125 +2024-08-03 04:32:07,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=39123.333333333336, ans=0.125 +2024-08-03 04:32:08,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=39123.333333333336, ans=0.0 +2024-08-03 04:32:21,172 INFO [train.py:1114] (2/4) Epoch 3, batch 3400, loss[loss=0.2719, simple_loss=0.3266, pruned_loss=0.1085, over 13553.00 frames. ], tot_loss[loss=0.2965, simple_loss=0.3569, pruned_loss=0.1181, over 2625844.39 frames. ], batch size: 31, lr: 3.18e-02, grad_scale: 16.0 +2024-08-03 04:32:25,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=39160.0, ans=0.95 +2024-08-03 04:32:28,660 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.42 vs. limit=6.0 +2024-08-03 04:32:41,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=39233.333333333336, ans=0.0 +2024-08-03 04:32:46,356 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.02 vs. limit=22.5 +2024-08-03 04:32:51,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=39270.0, ans=0.0 +2024-08-03 04:32:59,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39306.666666666664, ans=0.1 +2024-08-03 04:33:16,248 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:33:16,814 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.382e+02 1.601e+02 1.847e+02 2.492e+02, threshold=3.202e+02, percent-clipped=0.0 +2024-08-03 04:33:20,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=39343.333333333336, ans=0.0023166666666666665 +2024-08-03 04:33:21,223 INFO [train.py:1114] (2/4) Epoch 3, batch 3450, loss[loss=0.3005, simple_loss=0.3664, pruned_loss=0.1173, over 12860.00 frames. ], tot_loss[loss=0.2966, simple_loss=0.3571, pruned_loss=0.1181, over 2628769.37 frames. ], batch size: 52, lr: 3.17e-02, grad_scale: 8.0 +2024-08-03 04:33:26,074 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.28 vs. limit=22.5 +2024-08-03 04:33:37,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=39416.666666666664, ans=0.0 +2024-08-03 04:33:38,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=39416.666666666664, ans=0.1 +2024-08-03 04:33:40,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=39416.666666666664, ans=0.0 +2024-08-03 04:33:58,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39490.0, ans=0.1 +2024-08-03 04:34:02,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=39490.0, ans=0.1 +2024-08-03 04:34:03,917 INFO [train.py:1114] (2/4) Epoch 3, batch 3500, loss[loss=0.278, simple_loss=0.3336, pruned_loss=0.1112, over 13515.00 frames. ], tot_loss[loss=0.2946, simple_loss=0.355, pruned_loss=0.1171, over 2631183.07 frames. ], batch size: 34, lr: 3.17e-02, grad_scale: 8.0 +2024-08-03 04:34:09,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=39526.666666666664, ans=0.125 +2024-08-03 04:34:09,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=39526.666666666664, ans=0.125 +2024-08-03 04:34:41,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=39673.333333333336, ans=0.125 +2024-08-03 04:34:42,758 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.446e+02 1.687e+02 2.018e+02 4.896e+02, threshold=3.374e+02, percent-clipped=2.0 +2024-08-03 04:34:46,981 INFO [train.py:1114] (2/4) Epoch 3, batch 3550, loss[loss=0.3395, simple_loss=0.3918, pruned_loss=0.1437, over 12478.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.358, pruned_loss=0.1187, over 2629095.63 frames. ], batch size: 59, lr: 3.16e-02, grad_scale: 8.0 +2024-08-03 04:35:01,443 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.35 vs. limit=10.0 +2024-08-03 04:35:04,503 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.87 vs. limit=8.0 +2024-08-03 04:35:05,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=39746.666666666664, ans=0.2 +2024-08-03 04:35:07,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=39746.666666666664, ans=0.125 +2024-08-03 04:35:08,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=39746.666666666664, ans=0.0 +2024-08-03 04:35:14,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=39783.333333333336, ans=0.125 +2024-08-03 04:35:26,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=39820.0, ans=0.0 +2024-08-03 04:35:37,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=39856.666666666664, ans=0.125 +2024-08-03 04:35:40,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=39893.333333333336, ans=0.0 +2024-08-03 04:35:40,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=39893.333333333336, ans=0.125 +2024-08-03 04:35:40,846 INFO [train.py:1114] (2/4) Epoch 3, batch 3600, loss[loss=0.3765, simple_loss=0.408, pruned_loss=0.1725, over 9262.00 frames. ], tot_loss[loss=0.307, simple_loss=0.364, pruned_loss=0.125, over 2489169.35 frames. ], batch size: 97, lr: 3.16e-02, grad_scale: 16.0 +2024-08-03 04:35:42,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=39893.333333333336, ans=10.0 +2024-08-03 04:35:51,578 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.70 vs. limit=6.0 +2024-08-03 04:35:52,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=39930.0, ans=0.0 +2024-08-03 04:35:58,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=39966.666666666664, ans=0.025 +2024-08-03 04:35:59,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=39966.666666666664, ans=0.0 +2024-08-03 04:36:08,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=40003.333333333336, ans=0.0021731884057971003 +2024-08-03 04:36:56,739 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.80 vs. limit=12.0 +2024-08-03 04:37:20,587 INFO [train.py:1114] (2/4) Epoch 4, batch 0, loss[loss=0.2896, simple_loss=0.3513, pruned_loss=0.114, over 13358.00 frames. ], tot_loss[loss=0.2896, simple_loss=0.3513, pruned_loss=0.114, over 13358.00 frames. ], batch size: 33, lr: 2.95e-02, grad_scale: 32.0 +2024-08-03 04:37:20,588 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 04:37:30,574 INFO [train.py:1146] (2/4) Epoch 4, validation: loss=0.2412, simple_loss=0.337, pruned_loss=0.07274, over 944034.00 frames. +2024-08-03 04:37:30,574 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 04:43:26,874 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.350e+02 1.466e+02 1.683e+02 2.712e+02, threshold=2.931e+02, percent-clipped=0.0 +2024-08-03 04:43:30,145 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:45:10,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=40117.0, ans=0.0 +2024-08-03 04:45:13,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40117.0, ans=0.1 +2024-08-03 04:45:17,834 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.61 vs. limit=15.0 +2024-08-03 04:45:21,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=40153.666666666664, ans=0.015 +2024-08-03 04:45:32,869 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.54 vs. limit=15.0 +2024-08-03 04:45:33,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=40190.333333333336, ans=0.125 +2024-08-03 04:45:39,708 INFO [train.py:1114] (2/4) Epoch 4, batch 50, loss[loss=0.253, simple_loss=0.3168, pruned_loss=0.09457, over 13419.00 frames. ], tot_loss[loss=0.2999, simple_loss=0.3598, pruned_loss=0.12, over 577726.44 frames. ], batch size: 32, lr: 2.95e-02, grad_scale: 32.0 +2024-08-03 04:45:41,167 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.74 vs. limit=15.0 +2024-08-03 04:45:53,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40263.666666666664, ans=0.1 +2024-08-03 04:46:01,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=40300.333333333336, ans=0.125 +2024-08-03 04:46:04,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=40300.333333333336, ans=0.125 +2024-08-03 04:46:07,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=40300.333333333336, ans=0.0 +2024-08-03 04:46:17,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40337.0, ans=0.1 +2024-08-03 04:46:22,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=40373.666666666664, ans=0.1 +2024-08-03 04:46:29,604 INFO [train.py:1114] (2/4) Epoch 4, batch 100, loss[loss=0.2948, simple_loss=0.3539, pruned_loss=0.1178, over 13533.00 frames. ], tot_loss[loss=0.2981, simple_loss=0.3589, pruned_loss=0.1187, over 1025509.29 frames. ], batch size: 35, lr: 2.94e-02, grad_scale: 32.0 +2024-08-03 04:46:30,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=40410.333333333336, ans=0.125 +2024-08-03 04:46:33,370 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.339e+02 1.516e+02 1.849e+02 3.720e+02, threshold=3.031e+02, percent-clipped=4.0 +2024-08-03 04:46:34,105 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.21 vs. limit=15.0 +2024-08-03 04:46:34,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=40410.333333333336, ans=0.125 +2024-08-03 04:46:41,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=40447.0, ans=0.2 +2024-08-03 04:46:42,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=40447.0, ans=0.125 +2024-08-03 04:46:51,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=40483.666666666664, ans=0.002068768115942029 +2024-08-03 04:46:53,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=40483.666666666664, ans=0.125 +2024-08-03 04:46:56,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=40483.666666666664, ans=10.0 +2024-08-03 04:47:11,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=40557.0, ans=0.125 +2024-08-03 04:47:12,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=40557.0, ans=0.0020528260869565225 +2024-08-03 04:47:17,274 INFO [train.py:1114] (2/4) Epoch 4, batch 150, loss[loss=0.2853, simple_loss=0.3342, pruned_loss=0.1182, over 13424.00 frames. ], tot_loss[loss=0.2932, simple_loss=0.3549, pruned_loss=0.1157, over 1386480.67 frames. ], batch size: 32, lr: 2.94e-02, grad_scale: 32.0 +2024-08-03 04:47:34,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=40630.333333333336, ans=0.0 +2024-08-03 04:47:50,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40703.666666666664, ans=0.1 +2024-08-03 04:47:54,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40740.333333333336, ans=0.1 +2024-08-03 04:47:56,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=40740.333333333336, ans=0.035 +2024-08-03 04:48:04,766 INFO [train.py:1114] (2/4) Epoch 4, batch 200, loss[loss=0.3002, simple_loss=0.368, pruned_loss=0.1163, over 12459.00 frames. ], tot_loss[loss=0.2907, simple_loss=0.3527, pruned_loss=0.1143, over 1665442.95 frames. ], batch size: 58, lr: 2.93e-02, grad_scale: 16.0 +2024-08-03 04:48:07,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=40777.0, ans=0.125 +2024-08-03 04:48:09,214 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.032e+02 1.266e+02 1.437e+02 1.719e+02 2.508e+02, threshold=2.875e+02, percent-clipped=0.0 +2024-08-03 04:48:10,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=40777.0, ans=0.2 +2024-08-03 04:48:45,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=40923.666666666664, ans=0.05 +2024-08-03 04:48:47,424 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.55 vs. limit=6.0 +2024-08-03 04:48:50,643 INFO [train.py:1114] (2/4) Epoch 4, batch 250, loss[loss=0.2927, simple_loss=0.3584, pruned_loss=0.1135, over 13325.00 frames. ], tot_loss[loss=0.2895, simple_loss=0.3519, pruned_loss=0.1136, over 1884107.06 frames. ], batch size: 46, lr: 2.93e-02, grad_scale: 16.0 +2024-08-03 04:49:00,511 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.40 vs. limit=10.0 +2024-08-03 04:49:10,678 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.66 vs. limit=6.0 +2024-08-03 04:49:22,533 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:49:29,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=41107.0, ans=0.0 +2024-08-03 04:49:41,476 INFO [train.py:1114] (2/4) Epoch 4, batch 300, loss[loss=0.3128, simple_loss=0.377, pruned_loss=0.1243, over 13459.00 frames. ], tot_loss[loss=0.2876, simple_loss=0.3505, pruned_loss=0.1123, over 2050672.08 frames. ], batch size: 42, lr: 2.92e-02, grad_scale: 16.0 +2024-08-03 04:49:46,023 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.010e+02 1.331e+02 1.504e+02 1.895e+02 3.054e+02, threshold=3.007e+02, percent-clipped=2.0 +2024-08-03 04:50:06,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=41217.0, ans=0.125 +2024-08-03 04:50:20,909 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.68 vs. limit=15.0 +2024-08-03 04:50:24,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=41290.333333333336, ans=0.09899494936611666 +2024-08-03 04:50:24,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=41290.333333333336, ans=0.125 +2024-08-03 04:50:27,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=41290.333333333336, ans=0.2 +2024-08-03 04:50:29,618 INFO [train.py:1114] (2/4) Epoch 4, batch 350, loss[loss=0.2283, simple_loss=0.2887, pruned_loss=0.08396, over 13600.00 frames. ], tot_loss[loss=0.2873, simple_loss=0.3505, pruned_loss=0.112, over 2181250.10 frames. ], batch size: 33, lr: 2.92e-02, grad_scale: 16.0 +2024-08-03 04:50:33,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=41327.0, ans=0.035 +2024-08-03 04:50:33,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=41327.0, ans=0.125 +2024-08-03 04:50:44,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=41363.666666666664, ans=0.125 +2024-08-03 04:51:03,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=41437.0, ans=0.05 +2024-08-03 04:51:17,213 INFO [train.py:1114] (2/4) Epoch 4, batch 400, loss[loss=0.2619, simple_loss=0.3356, pruned_loss=0.09411, over 13344.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3492, pruned_loss=0.1111, over 2286253.86 frames. ], batch size: 37, lr: 2.91e-02, grad_scale: 32.0 +2024-08-03 04:51:21,806 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.051e+02 1.350e+02 1.537e+02 1.828e+02 3.072e+02, threshold=3.074e+02, percent-clipped=1.0 +2024-08-03 04:51:28,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=41547.0, ans=0.0 +2024-08-03 04:51:31,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=41547.0, ans=0.125 +2024-08-03 04:51:31,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=41547.0, ans=0.125 +2024-08-03 04:51:46,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=41620.333333333336, ans=0.0018216666666666659 +2024-08-03 04:52:02,309 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.15 vs. limit=22.5 +2024-08-03 04:52:05,340 INFO [train.py:1114] (2/4) Epoch 4, batch 450, loss[loss=0.292, simple_loss=0.3576, pruned_loss=0.1132, over 13562.00 frames. ], tot_loss[loss=0.286, simple_loss=0.3494, pruned_loss=0.1113, over 2359471.00 frames. ], batch size: 38, lr: 2.91e-02, grad_scale: 32.0 +2024-08-03 04:52:12,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=41693.666666666664, ans=0.0 +2024-08-03 04:52:20,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=41730.333333333336, ans=0.1 +2024-08-03 04:52:48,302 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.44 vs. limit=12.0 +2024-08-03 04:52:50,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=41840.333333333336, ans=0.2 +2024-08-03 04:52:58,627 INFO [train.py:1114] (2/4) Epoch 4, batch 500, loss[loss=0.2899, simple_loss=0.3608, pruned_loss=0.1095, over 13429.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.3486, pruned_loss=0.1111, over 2425162.35 frames. ], batch size: 43, lr: 2.90e-02, grad_scale: 16.0 +2024-08-03 04:53:04,084 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.429e+02 1.668e+02 2.120e+02 3.628e+02, threshold=3.335e+02, percent-clipped=2.0 +2024-08-03 04:53:12,110 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.79 vs. limit=6.0 +2024-08-03 04:53:38,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=42023.666666666664, ans=0.0017339855072463772 +2024-08-03 04:53:47,140 INFO [train.py:1114] (2/4) Epoch 4, batch 550, loss[loss=0.3243, simple_loss=0.3804, pruned_loss=0.1341, over 13050.00 frames. ], tot_loss[loss=0.2862, simple_loss=0.349, pruned_loss=0.1117, over 2467641.50 frames. ], batch size: 48, lr: 2.90e-02, grad_scale: 16.0 +2024-08-03 04:53:52,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=42060.333333333336, ans=0.2 +2024-08-03 04:53:55,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=42060.333333333336, ans=0.025 +2024-08-03 04:53:57,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=42097.0, ans=0.125 +2024-08-03 04:54:09,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=42133.666666666664, ans=0.0 +2024-08-03 04:54:14,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=42133.666666666664, ans=0.1 +2024-08-03 04:54:22,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=42170.333333333336, ans=0.125 +2024-08-03 04:54:32,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=42207.0, ans=0.125 +2024-08-03 04:54:35,679 INFO [train.py:1114] (2/4) Epoch 4, batch 600, loss[loss=0.3247, simple_loss=0.3888, pruned_loss=0.1303, over 13308.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.3487, pruned_loss=0.1111, over 2508252.83 frames. ], batch size: 46, lr: 2.90e-02, grad_scale: 8.0 +2024-08-03 04:54:38,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=42243.666666666664, ans=0.1 +2024-08-03 04:54:42,099 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.301e+02 1.482e+02 1.829e+02 3.304e+02, threshold=2.963e+02, percent-clipped=0.0 +2024-08-03 04:54:43,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=42243.666666666664, ans=0.2 +2024-08-03 04:54:46,397 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.79 vs. limit=15.0 +2024-08-03 04:54:51,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=42280.333333333336, ans=0.0016781884057971014 +2024-08-03 04:54:51,868 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.68 vs. limit=15.0 +2024-08-03 04:55:04,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=42317.0, ans=0.125 +2024-08-03 04:55:14,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=42353.666666666664, ans=0.04949747468305833 +2024-08-03 04:55:17,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=42390.333333333336, ans=0.00165427536231884 +2024-08-03 04:55:26,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=42427.0, ans=0.125 +2024-08-03 04:55:26,875 INFO [train.py:1114] (2/4) Epoch 4, batch 650, loss[loss=0.2783, simple_loss=0.3496, pruned_loss=0.1035, over 13543.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3468, pruned_loss=0.1099, over 2543135.61 frames. ], batch size: 37, lr: 2.89e-02, grad_scale: 8.0 +2024-08-03 04:55:43,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.89 vs. limit=15.0 +2024-08-03 04:55:44,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=42463.666666666664, ans=0.025 +2024-08-03 04:56:04,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=42537.0, ans=0.125 +2024-08-03 04:56:04,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=42537.0, ans=0.0 +2024-08-03 04:56:09,688 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=18.16 vs. limit=15.0 +2024-08-03 04:56:19,614 INFO [train.py:1114] (2/4) Epoch 4, batch 700, loss[loss=0.2413, simple_loss=0.3162, pruned_loss=0.08318, over 13544.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.3464, pruned_loss=0.1095, over 2565313.19 frames. ], batch size: 35, lr: 2.89e-02, grad_scale: 8.0 +2024-08-03 04:56:26,130 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.281e+02 1.426e+02 1.623e+02 2.957e+02, threshold=2.853e+02, percent-clipped=0.0 +2024-08-03 04:56:39,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=42647.0, ans=0.125 +2024-08-03 04:56:45,805 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.00 vs. limit=15.0 +2024-08-03 04:56:53,235 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.45 vs. limit=6.0 +2024-08-03 04:57:09,841 INFO [train.py:1114] (2/4) Epoch 4, batch 750, loss[loss=0.317, simple_loss=0.3675, pruned_loss=0.1333, over 13353.00 frames. ], tot_loss[loss=0.2828, simple_loss=0.3465, pruned_loss=0.1095, over 2583148.39 frames. ], batch size: 37, lr: 2.88e-02, grad_scale: 8.0 +2024-08-03 04:57:23,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=42830.333333333336, ans=0.125 +2024-08-03 04:57:34,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42867.0, ans=0.1 +2024-08-03 04:57:53,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=42940.333333333336, ans=0.0 +2024-08-03 04:57:57,456 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.08 vs. limit=10.0 +2024-08-03 04:57:59,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=42977.0, ans=0.2 +2024-08-03 04:57:59,779 INFO [train.py:1114] (2/4) Epoch 4, batch 800, loss[loss=0.2673, simple_loss=0.3286, pruned_loss=0.103, over 13346.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.346, pruned_loss=0.1089, over 2596991.21 frames. ], batch size: 33, lr: 2.88e-02, grad_scale: 16.0 +2024-08-03 04:58:06,221 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.323e+02 1.556e+02 1.905e+02 4.049e+02, threshold=3.112e+02, percent-clipped=3.0 +2024-08-03 04:58:17,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=43050.333333333336, ans=0.0 +2024-08-03 04:58:38,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=43123.666666666664, ans=0.0 +2024-08-03 04:58:45,957 INFO [train.py:1114] (2/4) Epoch 4, batch 850, loss[loss=0.3221, simple_loss=0.387, pruned_loss=0.1286, over 13337.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3457, pruned_loss=0.1085, over 2609563.85 frames. ], batch size: 40, lr: 2.87e-02, grad_scale: 16.0 +2024-08-03 04:58:54,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=43160.333333333336, ans=0.0 +2024-08-03 04:58:55,419 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.04 vs. limit=12.0 +2024-08-03 04:59:13,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=43233.666666666664, ans=0.125 +2024-08-03 04:59:14,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=43233.666666666664, ans=0.09899494936611666 +2024-08-03 04:59:22,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=43270.333333333336, ans=0.125 +2024-08-03 04:59:34,792 INFO [train.py:1114] (2/4) Epoch 4, batch 900, loss[loss=0.2522, simple_loss=0.3192, pruned_loss=0.09256, over 13345.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.3465, pruned_loss=0.1092, over 2612012.79 frames. ], batch size: 33, lr: 2.87e-02, grad_scale: 16.0 +2024-08-03 04:59:40,972 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.102e+02 1.400e+02 1.608e+02 1.991e+02 3.200e+02, threshold=3.215e+02, percent-clipped=1.0 +2024-08-03 04:59:45,839 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:59:45,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=43380.333333333336, ans=0.0 +2024-08-03 04:59:53,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=43417.0, ans=0.0014310869565217384 +2024-08-03 04:59:54,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=43417.0, ans=0.125 +2024-08-03 04:59:57,134 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.59 vs. limit=22.5 +2024-08-03 05:00:04,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=43453.666666666664, ans=0.0 +2024-08-03 05:00:22,656 INFO [train.py:1114] (2/4) Epoch 4, batch 950, loss[loss=0.2752, simple_loss=0.3362, pruned_loss=0.1072, over 13521.00 frames. ], tot_loss[loss=0.282, simple_loss=0.3462, pruned_loss=0.1089, over 2612673.01 frames. ], batch size: 34, lr: 2.87e-02, grad_scale: 16.0 +2024-08-03 05:00:22,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=43527.0, ans=0.1 +2024-08-03 05:01:11,443 INFO [train.py:1114] (2/4) Epoch 4, batch 1000, loss[loss=0.2443, simple_loss=0.32, pruned_loss=0.0843, over 13363.00 frames. ], tot_loss[loss=0.2845, simple_loss=0.3481, pruned_loss=0.1105, over 2611879.95 frames. ], batch size: 35, lr: 2.86e-02, grad_scale: 16.0 +2024-08-03 05:01:17,820 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.299e+02 1.424e+02 1.610e+02 2.784e+02, threshold=2.848e+02, percent-clipped=0.0 +2024-08-03 05:01:22,638 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=1.598e-02 +2024-08-03 05:01:35,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=43783.666666666664, ans=0.125 +2024-08-03 05:01:45,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=43820.333333333336, ans=0.001343405797101448 +2024-08-03 05:01:57,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=43857.0, ans=0.07 +2024-08-03 05:01:59,669 INFO [train.py:1114] (2/4) Epoch 4, batch 1050, loss[loss=0.3072, simple_loss=0.3679, pruned_loss=0.1232, over 13583.00 frames. ], tot_loss[loss=0.2844, simple_loss=0.3477, pruned_loss=0.1106, over 2616251.85 frames. ], batch size: 39, lr: 2.86e-02, grad_scale: 16.0 +2024-08-03 05:02:04,043 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.60 vs. limit=22.5 +2024-08-03 05:02:07,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43893.666666666664, ans=0.1 +2024-08-03 05:02:09,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=43930.333333333336, ans=0.0013194927536231884 +2024-08-03 05:02:10,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43930.333333333336, ans=0.1 +2024-08-03 05:02:16,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=43930.333333333336, ans=0.125 +2024-08-03 05:02:19,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=43967.0, ans=0.0 +2024-08-03 05:02:41,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=44003.666666666664, ans=0.125 +2024-08-03 05:02:42,293 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.94 vs. limit=15.0 +2024-08-03 05:02:44,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=44040.333333333336, ans=0.0 +2024-08-03 05:02:45,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=44040.333333333336, ans=0.025 +2024-08-03 05:02:53,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=44040.333333333336, ans=10.0 +2024-08-03 05:02:54,675 INFO [train.py:1114] (2/4) Epoch 4, batch 1100, loss[loss=0.2551, simple_loss=0.32, pruned_loss=0.0951, over 13566.00 frames. ], tot_loss[loss=0.2838, simple_loss=0.3472, pruned_loss=0.1102, over 2619647.69 frames. ], batch size: 36, lr: 2.85e-02, grad_scale: 16.0 +2024-08-03 05:03:00,988 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.033e+02 1.374e+02 1.585e+02 1.899e+02 4.895e+02, threshold=3.171e+02, percent-clipped=1.0 +2024-08-03 05:03:09,489 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.25 vs. limit=15.0 +2024-08-03 05:03:22,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=44187.0, ans=0.0012636956521739125 +2024-08-03 05:03:24,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=44187.0, ans=0.0012636956521739125 +2024-08-03 05:03:40,659 INFO [train.py:1114] (2/4) Epoch 4, batch 1150, loss[loss=0.2629, simple_loss=0.3305, pruned_loss=0.09767, over 13570.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3465, pruned_loss=0.1101, over 2619884.36 frames. ], batch size: 36, lr: 2.85e-02, grad_scale: 16.0 +2024-08-03 05:03:40,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=44260.333333333336, ans=0.0 +2024-08-03 05:03:46,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=44260.333333333336, ans=0.125 +2024-08-03 05:04:02,954 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.55 vs. limit=15.0 +2024-08-03 05:04:11,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=44370.333333333336, ans=0.0 +2024-08-03 05:04:20,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=44407.0, ans=0.0 +2024-08-03 05:04:32,853 INFO [train.py:1114] (2/4) Epoch 4, batch 1200, loss[loss=0.292, simple_loss=0.3559, pruned_loss=0.1141, over 13565.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.3476, pruned_loss=0.1103, over 2617115.22 frames. ], batch size: 39, lr: 2.84e-02, grad_scale: 32.0 +2024-08-03 05:04:38,160 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.14 vs. limit=15.0 +2024-08-03 05:04:39,345 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.292e+02 1.456e+02 1.641e+02 3.622e+02, threshold=2.911e+02, percent-clipped=1.0 +2024-08-03 05:04:45,214 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:04:50,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=44517.0, ans=0.0 +2024-08-03 05:04:57,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=44517.0, ans=0.125 +2024-08-03 05:05:04,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=44553.666666666664, ans=0.0 +2024-08-03 05:05:04,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=44553.666666666664, ans=0.125 +2024-08-03 05:06:14,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=44590.333333333336, ans=0.1 +2024-08-03 05:06:15,094 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.21 vs. limit=12.0 +2024-08-03 05:06:15,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=44590.333333333336, ans=0.0 +2024-08-03 05:06:18,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=44590.333333333336, ans=0.05 +2024-08-03 05:06:21,861 INFO [train.py:1114] (2/4) Epoch 4, batch 1250, loss[loss=0.2456, simple_loss=0.3218, pruned_loss=0.08471, over 13463.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.3474, pruned_loss=0.1097, over 2628501.73 frames. ], batch size: 42, lr: 2.84e-02, grad_scale: 32.0 +2024-08-03 05:06:30,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=44663.666666666664, ans=0.125 +2024-08-03 05:06:37,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=44663.666666666664, ans=0.2 +2024-08-03 05:06:39,957 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:06:46,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=44700.333333333336, ans=0.2 +2024-08-03 05:06:48,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=44700.333333333336, ans=0.1 +2024-08-03 05:07:08,976 INFO [train.py:1114] (2/4) Epoch 4, batch 1300, loss[loss=0.3307, simple_loss=0.3982, pruned_loss=0.1315, over 12986.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3473, pruned_loss=0.1098, over 2631038.45 frames. ], batch size: 52, lr: 2.84e-02, grad_scale: 16.0 +2024-08-03 05:07:18,017 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.082e+02 1.334e+02 1.637e+02 2.034e+02 3.739e+02, threshold=3.274e+02, percent-clipped=6.0 +2024-08-03 05:07:35,123 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.41 vs. limit=12.0 +2024-08-03 05:07:38,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=44883.666666666664, ans=0.1 +2024-08-03 05:07:43,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=44920.333333333336, ans=0.07 +2024-08-03 05:07:53,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=44957.0, ans=0.125 +2024-08-03 05:08:01,434 INFO [train.py:1114] (2/4) Epoch 4, batch 1350, loss[loss=0.2684, simple_loss=0.3351, pruned_loss=0.1009, over 13548.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.3476, pruned_loss=0.1096, over 2637310.65 frames. ], batch size: 37, lr: 2.83e-02, grad_scale: 8.0 +2024-08-03 05:08:12,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=45030.333333333336, ans=0.5 +2024-08-03 05:08:35,502 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.74 vs. limit=15.0 +2024-08-03 05:08:42,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=45140.333333333336, ans=0.125 +2024-08-03 05:08:48,836 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:08:50,554 INFO [train.py:1114] (2/4) Epoch 4, batch 1400, loss[loss=0.269, simple_loss=0.3184, pruned_loss=0.1098, over 13251.00 frames. ], tot_loss[loss=0.2832, simple_loss=0.3472, pruned_loss=0.1096, over 2641290.89 frames. ], batch size: 31, lr: 2.83e-02, grad_scale: 8.0 +2024-08-03 05:08:52,527 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:08:58,652 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.107e+02 1.343e+02 1.530e+02 1.906e+02 3.012e+02, threshold=3.060e+02, percent-clipped=0.0 +2024-08-03 05:08:59,861 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:09:13,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=45250.333333333336, ans=0.0010325362318840577 +2024-08-03 05:09:25,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=45287.0, ans=0.125 +2024-08-03 05:09:26,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=45287.0, ans=0.125 +2024-08-03 05:09:33,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=45323.666666666664, ans=0.2 +2024-08-03 05:09:36,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=45323.666666666664, ans=0.0010165942028985513 +2024-08-03 05:09:38,877 INFO [train.py:1114] (2/4) Epoch 4, batch 1450, loss[loss=0.3079, simple_loss=0.3775, pruned_loss=0.1192, over 13404.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.3459, pruned_loss=0.1086, over 2639868.83 frames. ], batch size: 43, lr: 2.82e-02, grad_scale: 8.0 +2024-08-03 05:09:43,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=45360.333333333336, ans=0.125 +2024-08-03 05:09:50,389 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.83 vs. limit=15.0 +2024-08-03 05:10:12,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=45470.333333333336, ans=0.000984710144927535 +2024-08-03 05:10:28,999 INFO [train.py:1114] (2/4) Epoch 4, batch 1500, loss[loss=0.2868, simple_loss=0.3524, pruned_loss=0.1106, over 13400.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.346, pruned_loss=0.1085, over 2640309.63 frames. ], batch size: 39, lr: 2.82e-02, grad_scale: 8.0 +2024-08-03 05:10:31,631 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.63 vs. limit=10.0 +2024-08-03 05:10:37,541 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.331e+02 1.463e+02 1.698e+02 3.158e+02, threshold=2.927e+02, percent-clipped=1.0 +2024-08-03 05:10:44,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=45580.333333333336, ans=0.2 +2024-08-03 05:10:45,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=45580.333333333336, ans=0.125 +2024-08-03 05:10:50,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=45617.0, ans=0.125 +2024-08-03 05:11:06,620 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.52 vs. limit=15.0 +2024-08-03 05:11:23,506 INFO [train.py:1114] (2/4) Epoch 4, batch 1550, loss[loss=0.2819, simple_loss=0.3592, pruned_loss=0.1023, over 13388.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.3462, pruned_loss=0.1087, over 2630561.07 frames. ], batch size: 41, lr: 2.81e-02, grad_scale: 8.0 +2024-08-03 05:11:35,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45763.666666666664, ans=0.1 +2024-08-03 05:11:41,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45800.333333333336, ans=0.1 +2024-08-03 05:11:42,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=45800.333333333336, ans=0.125 +2024-08-03 05:11:44,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=45800.333333333336, ans=0.0009129710144927528 +2024-08-03 05:12:21,099 INFO [train.py:1114] (2/4) Epoch 4, batch 1600, loss[loss=0.288, simple_loss=0.3625, pruned_loss=0.1068, over 13571.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.3466, pruned_loss=0.1094, over 2624500.96 frames. ], batch size: 39, lr: 2.81e-02, grad_scale: 16.0 +2024-08-03 05:12:23,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=45910.333333333336, ans=0.0008890579710144932 +2024-08-03 05:12:23,311 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:12:26,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45910.333333333336, ans=0.1 +2024-08-03 05:12:30,802 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.396e+02 1.598e+02 1.877e+02 3.901e+02, threshold=3.195e+02, percent-clipped=2.0 +2024-08-03 05:12:31,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=45947.0, ans=0.0008810869565217382 +2024-08-03 05:12:36,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=45947.0, ans=0.125 +2024-08-03 05:12:57,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=46020.333333333336, ans=0.125 +2024-08-03 05:13:10,777 INFO [train.py:1114] (2/4) Epoch 4, batch 1650, loss[loss=0.281, simple_loss=0.3571, pruned_loss=0.1024, over 13318.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3465, pruned_loss=0.1096, over 2622279.68 frames. ], batch size: 40, lr: 2.81e-02, grad_scale: 16.0 +2024-08-03 05:13:50,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=46203.666666666664, ans=0.95 +2024-08-03 05:14:01,238 INFO [train.py:1114] (2/4) Epoch 4, batch 1700, loss[loss=0.2726, simple_loss=0.3207, pruned_loss=0.1122, over 13271.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3456, pruned_loss=0.1088, over 2630774.71 frames. ], batch size: 31, lr: 2.80e-02, grad_scale: 16.0 +2024-08-03 05:14:01,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=46277.0, ans=0.125 +2024-08-03 05:14:09,449 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.050e+02 1.356e+02 1.607e+02 2.015e+02 3.197e+02, threshold=3.213e+02, percent-clipped=1.0 +2024-08-03 05:14:11,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=46313.666666666664, ans=0.125 +2024-08-03 05:14:15,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=46313.666666666664, ans=0.125 +2024-08-03 05:14:36,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=46387.0, ans=0.125 +2024-08-03 05:14:47,569 INFO [train.py:1114] (2/4) Epoch 4, batch 1750, loss[loss=0.238, simple_loss=0.299, pruned_loss=0.08849, over 13563.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3449, pruned_loss=0.1085, over 2634118.20 frames. ], batch size: 31, lr: 2.80e-02, grad_scale: 16.0 +2024-08-03 05:14:58,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=46497.0, ans=0.125 +2024-08-03 05:15:01,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=46497.0, ans=0.125 +2024-08-03 05:15:16,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=46533.666666666664, ans=0.0 +2024-08-03 05:15:21,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=46570.333333333336, ans=0.125 +2024-08-03 05:15:21,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=46570.333333333336, ans=0.0 +2024-08-03 05:18:21,912 INFO [train.py:1114] (2/4) Epoch 4, batch 1800, loss[loss=0.26, simple_loss=0.3318, pruned_loss=0.09413, over 13553.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3448, pruned_loss=0.1082, over 2635386.25 frames. ], batch size: 38, lr: 2.79e-02, grad_scale: 16.0 +2024-08-03 05:18:23,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=46643.666666666664, ans=0.125 +2024-08-03 05:18:23,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=46643.666666666664, ans=0.125 +2024-08-03 05:18:27,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=46643.666666666664, ans=0.0007296376811594205 +2024-08-03 05:18:30,261 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.378e+02 1.581e+02 2.012e+02 3.618e+02, threshold=3.161e+02, percent-clipped=2.0 +2024-08-03 05:19:09,837 INFO [train.py:1114] (2/4) Epoch 4, batch 1850, loss[loss=0.2969, simple_loss=0.3706, pruned_loss=0.1116, over 13408.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.345, pruned_loss=0.1083, over 2638278.87 frames. ], batch size: 39, lr: 2.79e-02, grad_scale: 16.0 +2024-08-03 05:19:20,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=46863.666666666664, ans=0.0006818115942028996 +2024-08-03 05:19:42,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=46937.0, ans=0.025 +2024-08-03 05:19:52,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=46973.666666666664, ans=0.5 +2024-08-03 05:19:58,277 INFO [train.py:1114] (2/4) Epoch 4, batch 1900, loss[loss=0.2788, simple_loss=0.3536, pruned_loss=0.102, over 13335.00 frames. ], tot_loss[loss=0.2811, simple_loss=0.3457, pruned_loss=0.1083, over 2640282.29 frames. ], batch size: 40, lr: 2.79e-02, grad_scale: 16.0 +2024-08-03 05:20:06,388 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.297e+02 1.477e+02 1.706e+02 2.975e+02, threshold=2.953e+02, percent-clipped=0.0 +2024-08-03 05:20:08,860 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.54 vs. limit=22.5 +2024-08-03 05:20:32,017 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.21 vs. limit=15.0 +2024-08-03 05:20:32,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=47120.333333333336, ans=0.0 +2024-08-03 05:20:45,534 INFO [train.py:1114] (2/4) Epoch 4, batch 1950, loss[loss=0.2641, simple_loss=0.3431, pruned_loss=0.09256, over 13553.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3465, pruned_loss=0.1082, over 2646994.82 frames. ], batch size: 36, lr: 2.78e-02, grad_scale: 16.0 +2024-08-03 05:20:45,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=47193.666666666664, ans=0.125 +2024-08-03 05:20:48,042 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.26 vs. limit=15.0 +2024-08-03 05:20:58,965 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:21:00,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=47230.333333333336, ans=0.125 +2024-08-03 05:21:03,002 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.08 vs. limit=15.0 +2024-08-03 05:21:10,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=47267.0, ans=0.1 +2024-08-03 05:21:18,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=47303.666666666664, ans=0.125 +2024-08-03 05:21:34,018 INFO [train.py:1114] (2/4) Epoch 4, batch 2000, loss[loss=0.2409, simple_loss=0.3022, pruned_loss=0.08977, over 13540.00 frames. ], tot_loss[loss=0.2832, simple_loss=0.348, pruned_loss=0.1093, over 2636197.02 frames. ], batch size: 31, lr: 2.78e-02, grad_scale: 32.0 +2024-08-03 05:21:40,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=47377.0, ans=0.035 +2024-08-03 05:21:42,490 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.720e+01 1.383e+02 1.598e+02 1.904e+02 4.710e+02, threshold=3.195e+02, percent-clipped=1.0 +2024-08-03 05:21:42,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=47413.666666666664, ans=0.125 +2024-08-03 05:22:02,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=47487.0, ans=0.125 +2024-08-03 05:22:09,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=47487.0, ans=0.1 +2024-08-03 05:22:20,555 INFO [train.py:1114] (2/4) Epoch 4, batch 2050, loss[loss=0.207, simple_loss=0.2799, pruned_loss=0.06703, over 13423.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.3463, pruned_loss=0.1087, over 2633857.60 frames. ], batch size: 32, lr: 2.77e-02, grad_scale: 32.0 +2024-08-03 05:22:37,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=47597.0, ans=0.125 +2024-08-03 05:22:37,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=47597.0, ans=0.05 +2024-08-03 05:23:51,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=47707.0, ans=0.0004984782608695656 +2024-08-03 05:23:51,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47707.0, ans=0.1 +2024-08-03 05:23:56,643 INFO [train.py:1114] (2/4) Epoch 4, batch 2100, loss[loss=0.281, simple_loss=0.3514, pruned_loss=0.1052, over 13556.00 frames. ], tot_loss[loss=0.2794, simple_loss=0.3445, pruned_loss=0.1071, over 2638725.66 frames. ], batch size: 37, lr: 2.77e-02, grad_scale: 32.0 +2024-08-03 05:23:57,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=47743.666666666664, ans=0.125 +2024-08-03 05:23:59,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=47743.666666666664, ans=0.125 +2024-08-03 05:24:06,857 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.064e+02 1.313e+02 1.529e+02 1.934e+02 3.413e+02, threshold=3.058e+02, percent-clipped=1.0 +2024-08-03 05:24:08,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=47780.333333333336, ans=0.125 +2024-08-03 05:24:20,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=47817.0, ans=0.2 +2024-08-03 05:24:26,082 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.77 vs. limit=15.0 +2024-08-03 05:24:30,633 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.22 vs. limit=15.0 +2024-08-03 05:24:31,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=47853.666666666664, ans=0.0 +2024-08-03 05:24:47,090 INFO [train.py:1114] (2/4) Epoch 4, batch 2150, loss[loss=0.2348, simple_loss=0.3091, pruned_loss=0.08024, over 13552.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.343, pruned_loss=0.1063, over 2647173.72 frames. ], batch size: 36, lr: 2.77e-02, grad_scale: 32.0 +2024-08-03 05:24:47,749 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.98 vs. limit=10.0 +2024-08-03 05:24:50,383 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.56 vs. limit=22.5 +2024-08-03 05:24:57,172 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.80 vs. limit=10.0 +2024-08-03 05:25:05,495 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.43 vs. limit=15.0 +2024-08-03 05:25:15,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=48037.0, ans=0.0 +2024-08-03 05:25:27,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=48073.666666666664, ans=0.0 +2024-08-03 05:25:29,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=48073.666666666664, ans=15.0 +2024-08-03 05:25:33,690 INFO [train.py:1114] (2/4) Epoch 4, batch 2200, loss[loss=0.283, simple_loss=0.3481, pruned_loss=0.1089, over 13393.00 frames. ], tot_loss[loss=0.2779, simple_loss=0.3431, pruned_loss=0.1063, over 2644739.44 frames. ], batch size: 39, lr: 2.76e-02, grad_scale: 32.0 +2024-08-03 05:25:35,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.63 vs. limit=12.0 +2024-08-03 05:25:37,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=48110.333333333336, ans=0.125 +2024-08-03 05:25:42,108 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.479e+02 1.728e+02 2.109e+02 3.412e+02, threshold=3.456e+02, percent-clipped=2.0 +2024-08-03 05:25:50,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=48147.0, ans=0.07 +2024-08-03 05:26:14,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=48257.0, ans=0.00037891304347826074 +2024-08-03 05:26:22,235 INFO [train.py:1114] (2/4) Epoch 4, batch 2250, loss[loss=0.2668, simple_loss=0.3435, pruned_loss=0.09506, over 13365.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3422, pruned_loss=0.1057, over 2642288.43 frames. ], batch size: 37, lr: 2.76e-02, grad_scale: 32.0 +2024-08-03 05:26:23,653 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.69 vs. limit=15.0 +2024-08-03 05:26:28,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=48293.666666666664, ans=0.025 +2024-08-03 05:26:37,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=48330.333333333336, ans=0.125 +2024-08-03 05:26:56,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48403.666666666664, ans=0.1 +2024-08-03 05:27:02,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=48440.333333333336, ans=0.0 +2024-08-03 05:27:02,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=48440.333333333336, ans=0.125 +2024-08-03 05:27:17,461 INFO [train.py:1114] (2/4) Epoch 4, batch 2300, loss[loss=0.237, simple_loss=0.3, pruned_loss=0.08701, over 13578.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.3415, pruned_loss=0.1062, over 2638270.80 frames. ], batch size: 33, lr: 2.75e-02, grad_scale: 32.0 +2024-08-03 05:27:38,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=48477.0, ans=0.0 +2024-08-03 05:27:38,605 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.046e+02 1.390e+02 1.580e+02 1.913e+02 3.341e+02, threshold=3.160e+02, percent-clipped=0.0 +2024-08-03 05:27:43,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=48513.666666666664, ans=0.05 +2024-08-03 05:27:46,561 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.40 vs. limit=15.0 +2024-08-03 05:27:59,110 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.35 vs. limit=22.5 +2024-08-03 05:28:16,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=48623.666666666664, ans=0.125 +2024-08-03 05:28:18,344 INFO [train.py:1114] (2/4) Epoch 4, batch 2350, loss[loss=0.2811, simple_loss=0.3499, pruned_loss=0.1062, over 13553.00 frames. ], tot_loss[loss=0.2765, simple_loss=0.3415, pruned_loss=0.1058, over 2640875.97 frames. ], batch size: 38, lr: 2.75e-02, grad_scale: 32.0 +2024-08-03 05:28:20,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=48660.333333333336, ans=0.125 +2024-08-03 05:28:25,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=48660.333333333336, ans=0.1 +2024-08-03 05:28:36,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=48697.0, ans=0.125 +2024-08-03 05:28:40,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=48733.666666666664, ans=0.00027528985507246397 +2024-08-03 05:28:47,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=48733.666666666664, ans=0.0 +2024-08-03 05:29:35,509 INFO [train.py:1114] (2/4) Epoch 4, batch 2400, loss[loss=0.1996, simple_loss=0.2816, pruned_loss=0.05877, over 13536.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3428, pruned_loss=0.106, over 2642618.43 frames. ], batch size: 35, lr: 2.75e-02, grad_scale: 32.0 +2024-08-03 05:29:51,920 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.312e+02 1.493e+02 1.944e+02 3.513e+02, threshold=2.987e+02, percent-clipped=1.0 +2024-08-03 05:30:26,140 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.72 vs. limit=15.0 +2024-08-03 05:30:46,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=48953.666666666664, ans=0.125 +2024-08-03 05:31:15,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=48990.333333333336, ans=0.0 +2024-08-03 05:31:38,652 INFO [train.py:1114] (2/4) Epoch 4, batch 2450, loss[loss=0.2686, simple_loss=0.3548, pruned_loss=0.09119, over 13357.00 frames. ], tot_loss[loss=0.2793, simple_loss=0.3445, pruned_loss=0.1071, over 2632611.41 frames. ], batch size: 37, lr: 2.74e-02, grad_scale: 32.0 +2024-08-03 05:31:46,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=49027.0, ans=0.125 +2024-08-03 05:31:49,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=49027.0, ans=0.0 +2024-08-03 05:31:53,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=49063.666666666664, ans=0.125 +2024-08-03 05:31:58,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=49063.666666666664, ans=0.125 +2024-08-03 05:32:13,030 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:32:22,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=49137.0, ans=0.125 +2024-08-03 05:32:27,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=49173.666666666664, ans=0.025 +2024-08-03 05:32:30,672 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.24 vs. limit=15.0 +2024-08-03 05:32:32,606 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.35 vs. limit=15.0 +2024-08-03 05:32:34,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=49173.666666666664, ans=0.2 +2024-08-03 05:32:35,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=49210.333333333336, ans=0.125 +2024-08-03 05:32:36,606 INFO [train.py:1114] (2/4) Epoch 4, batch 2500, loss[loss=0.3091, simple_loss=0.3734, pruned_loss=0.1224, over 13387.00 frames. ], tot_loss[loss=0.2779, simple_loss=0.3434, pruned_loss=0.1062, over 2636955.49 frames. ], batch size: 39, lr: 2.74e-02, grad_scale: 32.0 +2024-08-03 05:32:39,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=49210.333333333336, ans=0.0001716666666666672 +2024-08-03 05:32:44,461 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.392e+02 1.612e+02 1.907e+02 3.604e+02, threshold=3.223e+02, percent-clipped=4.0 +2024-08-03 05:33:35,815 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.84 vs. limit=15.0 +2024-08-03 05:33:44,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=49393.666666666664, ans=0.0 +2024-08-03 05:33:44,740 INFO [train.py:1114] (2/4) Epoch 4, batch 2550, loss[loss=0.2348, simple_loss=0.296, pruned_loss=0.08678, over 13532.00 frames. ], tot_loss[loss=0.2761, simple_loss=0.342, pruned_loss=0.105, over 2638940.76 frames. ], batch size: 31, lr: 2.73e-02, grad_scale: 32.0 +2024-08-03 05:34:01,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=49430.333333333336, ans=0.035 +2024-08-03 05:34:32,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49540.333333333336, ans=0.1 +2024-08-03 05:34:36,331 INFO [train.py:1114] (2/4) Epoch 4, batch 2600, loss[loss=0.249, simple_loss=0.325, pruned_loss=0.08644, over 13564.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3431, pruned_loss=0.1058, over 2638153.91 frames. ], batch size: 36, lr: 2.73e-02, grad_scale: 32.0 +2024-08-03 05:34:44,131 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.359e+02 1.570e+02 1.941e+02 3.532e+02, threshold=3.140e+02, percent-clipped=1.0 +2024-08-03 05:34:55,505 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.84 vs. limit=15.0 +2024-08-03 05:35:15,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.89 vs. limit=15.0 +2024-08-03 05:35:24,688 INFO [train.py:1114] (2/4) Epoch 4, batch 2650, loss[loss=0.2777, simple_loss=0.3458, pruned_loss=0.1048, over 13301.00 frames. ], tot_loss[loss=0.2765, simple_loss=0.3426, pruned_loss=0.1053, over 2640770.07 frames. ], batch size: 46, lr: 2.73e-02, grad_scale: 16.0 +2024-08-03 05:35:56,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=49870.333333333336, ans=0.125 +2024-08-03 05:36:08,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=49907.0, ans=0.125 +2024-08-03 05:36:11,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=49907.0, ans=0.0 +2024-08-03 05:36:14,029 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.75 vs. limit=6.0 +2024-08-03 05:36:15,381 INFO [train.py:1114] (2/4) Epoch 4, batch 2700, loss[loss=0.3048, simple_loss=0.3701, pruned_loss=0.1198, over 13536.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.3432, pruned_loss=0.1057, over 2637106.19 frames. ], batch size: 40, lr: 2.72e-02, grad_scale: 16.0 +2024-08-03 05:36:20,180 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.69 vs. limit=6.0 +2024-08-03 05:36:24,106 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.340e+02 1.529e+02 1.834e+02 2.682e+02, threshold=3.057e+02, percent-clipped=0.0 +2024-08-03 05:36:30,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=49980.333333333336, ans=0.0 +2024-08-03 05:36:48,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=50053.666666666664, ans=0.2 +2024-08-03 05:36:56,056 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.71 vs. limit=15.0 +2024-08-03 05:37:03,511 INFO [train.py:1114] (2/4) Epoch 4, batch 2750, loss[loss=0.2988, simple_loss=0.3512, pruned_loss=0.1232, over 13346.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.3428, pruned_loss=0.1059, over 2635282.49 frames. ], batch size: 34, lr: 2.72e-02, grad_scale: 16.0 +2024-08-03 05:37:14,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.25 vs. limit=15.0 +2024-08-03 05:37:27,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=50200.333333333336, ans=0.0 +2024-08-03 05:37:45,733 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.05 vs. limit=22.5 +2024-08-03 05:37:46,912 INFO [train.py:1114] (2/4) Epoch 4, batch 2800, loss[loss=0.3333, simple_loss=0.3758, pruned_loss=0.1455, over 9159.00 frames. ], tot_loss[loss=0.2783, simple_loss=0.3434, pruned_loss=0.1066, over 2626958.36 frames. ], batch size: 96, lr: 2.72e-02, grad_scale: 32.0 +2024-08-03 05:37:55,719 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.137e+02 1.473e+02 1.737e+02 2.107e+02 3.108e+02, threshold=3.473e+02, percent-clipped=1.0 +2024-08-03 05:38:16,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=50420.333333333336, ans=0.0 +2024-08-03 05:38:21,724 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.81 vs. limit=15.0 +2024-08-03 05:38:22,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=50457.0, ans=0.125 +2024-08-03 05:38:25,151 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=23.38 vs. limit=15.0 +2024-08-03 05:38:28,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50457.0, ans=0.1 +2024-08-03 05:38:31,581 INFO [train.py:1114] (2/4) Epoch 4, batch 2850, loss[loss=0.2696, simple_loss=0.3352, pruned_loss=0.102, over 13371.00 frames. ], tot_loss[loss=0.2787, simple_loss=0.3437, pruned_loss=0.1068, over 2621245.50 frames. ], batch size: 35, lr: 2.71e-02, grad_scale: 16.0 +2024-08-03 05:38:34,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=50493.666666666664, ans=0.125 +2024-08-03 05:38:44,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=50493.666666666664, ans=0.07 +2024-08-03 05:38:53,450 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.72 vs. limit=10.0 +2024-08-03 05:39:10,840 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.00 vs. limit=15.0 +2024-08-03 05:39:26,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=50677.0, ans=0.2 +2024-08-03 05:39:27,367 INFO [train.py:1114] (2/4) Epoch 4, batch 2900, loss[loss=0.2704, simple_loss=0.3322, pruned_loss=0.1043, over 13376.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.344, pruned_loss=0.1063, over 2632149.85 frames. ], batch size: 36, lr: 2.71e-02, grad_scale: 16.0 +2024-08-03 05:39:39,678 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.312e+02 1.485e+02 1.747e+02 2.702e+02, threshold=2.970e+02, percent-clipped=0.0 +2024-08-03 05:39:39,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=50713.666666666664, ans=0.1 +2024-08-03 05:39:42,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=50713.666666666664, ans=0.0 +2024-08-03 05:39:58,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=50787.0, ans=0.125 +2024-08-03 05:40:04,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=50823.666666666664, ans=0.07 +2024-08-03 05:40:07,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=50823.666666666664, ans=0.2 +2024-08-03 05:40:10,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=50823.666666666664, ans=0.2 +2024-08-03 05:40:13,933 INFO [train.py:1114] (2/4) Epoch 4, batch 2950, loss[loss=0.2456, simple_loss=0.3163, pruned_loss=0.08746, over 13335.00 frames. ], tot_loss[loss=0.2766, simple_loss=0.3422, pruned_loss=0.1055, over 2631084.87 frames. ], batch size: 34, lr: 2.70e-02, grad_scale: 16.0 +2024-08-03 05:40:19,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=50860.333333333336, ans=0.125 +2024-08-03 05:40:22,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=50860.333333333336, ans=0.1 +2024-08-03 05:40:26,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=50897.0, ans=0.0 +2024-08-03 05:40:38,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=50933.666666666664, ans=0.0 +2024-08-03 05:40:48,118 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.77 vs. limit=15.0 +2024-08-03 05:40:48,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=50970.333333333336, ans=0.07 +2024-08-03 05:40:51,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=51007.0, ans=0.125 +2024-08-03 05:40:59,439 INFO [train.py:1114] (2/4) Epoch 4, batch 3000, loss[loss=0.2608, simple_loss=0.3314, pruned_loss=0.09504, over 13533.00 frames. ], tot_loss[loss=0.2757, simple_loss=0.3417, pruned_loss=0.1048, over 2631318.40 frames. ], batch size: 37, lr: 2.70e-02, grad_scale: 16.0 +2024-08-03 05:40:59,439 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 05:41:15,389 INFO [train.py:1146] (2/4) Epoch 4, validation: loss=0.2213, simple_loss=0.3178, pruned_loss=0.06237, over 944034.00 frames. +2024-08-03 05:41:15,390 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 05:41:15,757 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.05 vs. limit=15.0 +2024-08-03 05:41:28,393 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.054e+02 1.441e+02 1.719e+02 2.426e+02 4.333e+02, threshold=3.438e+02, percent-clipped=13.0 +2024-08-03 05:41:30,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=51080.333333333336, ans=0.125 +2024-08-03 05:41:32,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=51080.333333333336, ans=0.125 +2024-08-03 05:41:41,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=51117.0, ans=0.0 +2024-08-03 05:41:48,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=51153.666666666664, ans=0.0 +2024-08-03 05:42:02,349 INFO [train.py:1114] (2/4) Epoch 4, batch 3050, loss[loss=0.2309, simple_loss=0.3082, pruned_loss=0.07674, over 13533.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.3429, pruned_loss=0.1059, over 2628358.02 frames. ], batch size: 35, lr: 2.70e-02, grad_scale: 16.0 +2024-08-03 05:42:04,461 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.31 vs. limit=15.0 +2024-08-03 05:42:07,209 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.00 vs. limit=15.0 +2024-08-03 05:42:08,034 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.63 vs. limit=12.0 +2024-08-03 05:42:08,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=51227.0, ans=0.0 +2024-08-03 05:42:11,702 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.27 vs. limit=22.5 +2024-08-03 05:42:14,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=51263.666666666664, ans=0.2 +2024-08-03 05:42:21,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51300.333333333336, ans=0.1 +2024-08-03 05:42:23,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=51300.333333333336, ans=0.125 +2024-08-03 05:42:31,891 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.64 vs. limit=22.5 +2024-08-03 05:42:37,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=51373.666666666664, ans=0.125 +2024-08-03 05:42:57,210 INFO [train.py:1114] (2/4) Epoch 4, batch 3100, loss[loss=0.3131, simple_loss=0.3738, pruned_loss=0.1262, over 13329.00 frames. ], tot_loss[loss=0.2772, simple_loss=0.343, pruned_loss=0.1057, over 2628282.08 frames. ], batch size: 46, lr: 2.69e-02, grad_scale: 16.0 +2024-08-03 05:51:13,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=51410.333333333336, ans=0.125 +2024-08-03 05:51:13,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=51410.333333333336, ans=0.0 +2024-08-03 05:52:34,206 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.298e+02 1.531e+02 1.928e+02 3.998e+02, threshold=3.062e+02, percent-clipped=1.0 +2024-08-03 05:54:17,809 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.74 vs. limit=22.5 +2024-08-03 05:54:21,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=51483.666666666664, ans=0.2 +2024-08-03 05:54:28,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=51520.333333333336, ans=0.0 +2024-08-03 05:54:43,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=51557.0, ans=0.0 +2024-08-03 05:54:51,696 INFO [train.py:1114] (2/4) Epoch 4, batch 3150, loss[loss=0.2816, simple_loss=0.3522, pruned_loss=0.1054, over 13015.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.3434, pruned_loss=0.1061, over 2628930.53 frames. ], batch size: 48, lr: 2.69e-02, grad_scale: 16.0 +2024-08-03 05:55:03,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=51593.666666666664, ans=0.2 +2024-08-03 05:55:24,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=51703.666666666664, ans=0.125 +2024-08-03 05:55:40,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=51740.333333333336, ans=0.5 +2024-08-03 05:55:41,178 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.28 vs. limit=8.0 +2024-08-03 05:55:48,105 INFO [train.py:1114] (2/4) Epoch 4, batch 3200, loss[loss=0.2629, simple_loss=0.3278, pruned_loss=0.09902, over 13540.00 frames. ], tot_loss[loss=0.2767, simple_loss=0.3425, pruned_loss=0.1054, over 2635033.97 frames. ], batch size: 37, lr: 2.69e-02, grad_scale: 32.0 +2024-08-03 05:55:52,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=51777.0, ans=0.125 +2024-08-03 05:55:57,490 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.804e+01 1.368e+02 1.621e+02 1.933e+02 3.574e+02, threshold=3.241e+02, percent-clipped=2.0 +2024-08-03 05:56:06,639 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.97 vs. limit=15.0 +2024-08-03 05:56:10,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=51850.333333333336, ans=0.125 +2024-08-03 05:56:19,190 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.00 vs. limit=15.0 +2024-08-03 05:56:23,178 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.19 vs. limit=15.0 +2024-08-03 05:56:38,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=51923.666666666664, ans=0.2 +2024-08-03 05:56:44,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=51923.666666666664, ans=0.125 +2024-08-03 05:56:51,450 INFO [train.py:1114] (2/4) Epoch 4, batch 3250, loss[loss=0.3058, simple_loss=0.3668, pruned_loss=0.1224, over 13379.00 frames. ], tot_loss[loss=0.2764, simple_loss=0.3426, pruned_loss=0.1051, over 2639254.22 frames. ], batch size: 38, lr: 2.68e-02, grad_scale: 32.0 +2024-08-03 05:56:51,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=51960.333333333336, ans=0.2 +2024-08-03 05:57:02,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=51997.0, ans=0.0 +2024-08-03 05:57:23,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=52033.666666666664, ans=0.125 +2024-08-03 05:58:07,832 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=1.926e-02 +2024-08-03 05:58:14,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=52107.0, ans=0.125 +2024-08-03 05:58:15,534 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.84 vs. limit=15.0 +2024-08-03 05:58:20,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52107.0, ans=0.1 +2024-08-03 05:58:22,583 INFO [train.py:1114] (2/4) Epoch 4, batch 3300, loss[loss=0.2602, simple_loss=0.3277, pruned_loss=0.09632, over 12811.00 frames. ], tot_loss[loss=0.2741, simple_loss=0.3404, pruned_loss=0.1039, over 2640104.89 frames. ], batch size: 52, lr: 2.68e-02, grad_scale: 32.0 +2024-08-03 05:58:28,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=52143.666666666664, ans=0.125 +2024-08-03 05:58:34,518 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.337e+02 1.543e+02 1.796e+02 2.309e+02, threshold=3.087e+02, percent-clipped=0.0 +2024-08-03 05:58:47,890 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.25 vs. limit=15.0 +2024-08-03 05:58:57,352 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:59:06,999 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.33 vs. limit=15.0 +2024-08-03 05:59:14,199 INFO [train.py:1114] (2/4) Epoch 4, batch 3350, loss[loss=0.3124, simple_loss=0.3823, pruned_loss=0.1212, over 13263.00 frames. ], tot_loss[loss=0.2757, simple_loss=0.3419, pruned_loss=0.1048, over 2629981.20 frames. ], batch size: 49, lr: 2.67e-02, grad_scale: 32.0 +2024-08-03 05:59:20,640 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.74 vs. limit=15.0 +2024-08-03 05:59:24,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=52363.666666666664, ans=0.2 +2024-08-03 05:59:35,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52400.333333333336, ans=0.1 +2024-08-03 05:59:36,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=52400.333333333336, ans=0.125 +2024-08-03 05:59:45,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=52400.333333333336, ans=0.125 +2024-08-03 05:59:58,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=52473.666666666664, ans=0.0 +2024-08-03 06:00:05,676 INFO [train.py:1114] (2/4) Epoch 4, batch 3400, loss[loss=0.2365, simple_loss=0.3039, pruned_loss=0.08457, over 13553.00 frames. ], tot_loss[loss=0.277, simple_loss=0.3424, pruned_loss=0.1057, over 2625049.67 frames. ], batch size: 31, lr: 2.67e-02, grad_scale: 32.0 +2024-08-03 06:00:15,141 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.426e+02 1.702e+02 2.054e+02 4.258e+02, threshold=3.404e+02, percent-clipped=2.0 +2024-08-03 06:00:21,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=52547.0, ans=0.125 +2024-08-03 06:00:23,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=52583.666666666664, ans=0.2 +2024-08-03 06:00:23,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=52583.666666666664, ans=0.125 +2024-08-03 06:00:38,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=52620.333333333336, ans=0.125 +2024-08-03 06:00:40,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=52657.0, ans=0.0 +2024-08-03 06:00:47,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=52657.0, ans=0.0 +2024-08-03 06:00:48,744 INFO [train.py:1114] (2/4) Epoch 4, batch 3450, loss[loss=0.2995, simple_loss=0.3593, pruned_loss=0.1198, over 12958.00 frames. ], tot_loss[loss=0.2785, simple_loss=0.3437, pruned_loss=0.1067, over 2627697.82 frames. ], batch size: 52, lr: 2.67e-02, grad_scale: 32.0 +2024-08-03 06:00:55,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=52693.666666666664, ans=0.0 +2024-08-03 06:01:07,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=52730.333333333336, ans=0.125 +2024-08-03 06:01:19,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=52803.666666666664, ans=0.0 +2024-08-03 06:01:27,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=52840.333333333336, ans=0.125 +2024-08-03 06:01:27,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=52840.333333333336, ans=0.0 +2024-08-03 06:01:33,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=52840.333333333336, ans=0.07 +2024-08-03 06:01:34,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=52840.333333333336, ans=0.0 +2024-08-03 06:01:36,371 INFO [train.py:1114] (2/4) Epoch 4, batch 3500, loss[loss=0.2977, simple_loss=0.3559, pruned_loss=0.1197, over 13525.00 frames. ], tot_loss[loss=0.2779, simple_loss=0.3427, pruned_loss=0.1065, over 2629633.34 frames. ], batch size: 34, lr: 2.66e-02, grad_scale: 32.0 +2024-08-03 06:01:36,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52877.0, ans=0.1 +2024-08-03 06:01:41,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=52877.0, ans=0.125 +2024-08-03 06:01:45,562 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.329e+02 1.542e+02 1.871e+02 3.471e+02, threshold=3.085e+02, percent-clipped=1.0 +2024-08-03 06:01:56,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=52913.666666666664, ans=0.0 +2024-08-03 06:02:13,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=52950.333333333336, ans=0.2 +2024-08-03 06:02:15,156 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.26 vs. limit=15.0 +2024-08-03 06:02:18,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=52950.333333333336, ans=0.2 +2024-08-03 06:02:18,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=52950.333333333336, ans=0.125 +2024-08-03 06:02:20,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52987.0, ans=0.1 +2024-08-03 06:02:37,235 INFO [train.py:1114] (2/4) Epoch 4, batch 3550, loss[loss=0.2792, simple_loss=0.3453, pruned_loss=0.1065, over 12381.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3451, pruned_loss=0.1081, over 2628590.04 frames. ], batch size: 58, lr: 2.66e-02, grad_scale: 32.0 +2024-08-03 06:03:20,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=53170.333333333336, ans=0.125 +2024-08-03 06:03:27,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=53207.0, ans=0.09899494936611666 +2024-08-03 06:03:32,786 INFO [train.py:1114] (2/4) Epoch 4, batch 3600, loss[loss=0.3979, simple_loss=0.4242, pruned_loss=0.1858, over 9630.00 frames. ], tot_loss[loss=0.2919, simple_loss=0.3525, pruned_loss=0.1157, over 2489059.16 frames. ], batch size: 97, lr: 2.66e-02, grad_scale: 16.0 +2024-08-03 06:03:33,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=53243.666666666664, ans=0.0 +2024-08-03 06:03:57,022 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.095e+02 1.338e+02 1.465e+02 1.631e+02 2.841e+02, threshold=2.930e+02, percent-clipped=0.0 +2024-08-03 06:04:17,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=53353.666666666664, ans=10.0 +2024-08-03 06:05:08,616 INFO [train.py:1114] (2/4) Epoch 5, batch 0, loss[loss=0.2502, simple_loss=0.3233, pruned_loss=0.08857, over 13334.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3233, pruned_loss=0.08857, over 13334.00 frames. ], batch size: 33, lr: 2.47e-02, grad_scale: 32.0 +2024-08-03 06:05:08,616 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 06:05:18,614 INFO [train.py:1146] (2/4) Epoch 5, validation: loss=0.231, simple_loss=0.3271, pruned_loss=0.06749, over 944034.00 frames. +2024-08-03 06:05:18,615 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 06:05:27,309 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=15.50 vs. limit=15.0 +2024-08-03 06:05:31,871 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.47 vs. limit=22.5 +2024-08-03 06:05:36,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=53467.333333333336, ans=0.2 +2024-08-03 06:05:44,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=53467.333333333336, ans=0.2 +2024-08-03 06:05:53,753 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.16 vs. limit=12.0 +2024-08-03 06:05:56,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53540.666666666664, ans=0.1 +2024-08-03 06:06:04,992 INFO [train.py:1114] (2/4) Epoch 5, batch 50, loss[loss=0.2682, simple_loss=0.3224, pruned_loss=0.1071, over 13420.00 frames. ], tot_loss[loss=0.2795, simple_loss=0.3447, pruned_loss=0.1071, over 579278.26 frames. ], batch size: 32, lr: 2.47e-02, grad_scale: 32.0 +2024-08-03 06:06:14,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.92 vs. limit=10.0 +2024-08-03 06:06:18,942 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.15 vs. limit=15.0 +2024-08-03 06:06:24,486 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.30 vs. limit=15.0 +2024-08-03 06:06:24,700 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.396e+02 1.612e+02 2.008e+02 3.505e+02, threshold=3.224e+02, percent-clipped=4.0 +2024-08-03 06:06:29,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53650.666666666664, ans=0.1 +2024-08-03 06:06:31,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=53687.333333333336, ans=0.2 +2024-08-03 06:06:34,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=53687.333333333336, ans=0.0 +2024-08-03 06:06:38,644 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.99 vs. limit=6.0 +2024-08-03 06:06:48,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=53724.0, ans=0.0 +2024-08-03 06:06:49,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=53760.666666666664, ans=6.0 +2024-08-03 06:06:49,894 INFO [train.py:1114] (2/4) Epoch 5, batch 100, loss[loss=0.2481, simple_loss=0.3154, pruned_loss=0.09038, over 13528.00 frames. ], tot_loss[loss=0.2743, simple_loss=0.3419, pruned_loss=0.1033, over 1027069.91 frames. ], batch size: 35, lr: 2.46e-02, grad_scale: 32.0 +2024-08-03 06:06:52,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=53760.666666666664, ans=0.125 +2024-08-03 06:07:03,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=53797.333333333336, ans=0.125 +2024-08-03 06:07:14,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=53834.0, ans=0.09899494936611666 +2024-08-03 06:07:16,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=53834.0, ans=0.0 +2024-08-03 06:07:22,997 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.89 vs. limit=22.5 +2024-08-03 06:07:27,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=53870.666666666664, ans=0.125 +2024-08-03 06:07:33,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=53907.333333333336, ans=0.0 +2024-08-03 06:07:37,880 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.51 vs. limit=5.0 +2024-08-03 06:07:42,376 INFO [train.py:1114] (2/4) Epoch 5, batch 150, loss[loss=0.208, simple_loss=0.2839, pruned_loss=0.06605, over 13452.00 frames. ], tot_loss[loss=0.2716, simple_loss=0.339, pruned_loss=0.1021, over 1388207.77 frames. ], batch size: 32, lr: 2.46e-02, grad_scale: 32.0 +2024-08-03 06:07:47,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=53944.0, ans=0.0 +2024-08-03 06:07:47,298 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.68 vs. limit=22.5 +2024-08-03 06:08:01,993 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.021e+02 1.304e+02 1.445e+02 1.840e+02 3.127e+02, threshold=2.891e+02, percent-clipped=0.0 +2024-08-03 06:09:17,803 INFO [train.py:1114] (2/4) Epoch 5, batch 200, loss[loss=0.2985, simple_loss=0.3588, pruned_loss=0.1191, over 12636.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3368, pruned_loss=0.1011, over 1666818.91 frames. ], batch size: 59, lr: 2.46e-02, grad_scale: 32.0 +2024-08-03 06:10:25,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.47 vs. limit=12.0 +2024-08-03 06:10:32,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=54274.0, ans=0.2 +2024-08-03 06:10:36,822 INFO [train.py:1114] (2/4) Epoch 5, batch 250, loss[loss=0.2661, simple_loss=0.3385, pruned_loss=0.09682, over 13294.00 frames. ], tot_loss[loss=0.2679, simple_loss=0.3355, pruned_loss=0.1001, over 1884995.63 frames. ], batch size: 46, lr: 2.45e-02, grad_scale: 32.0 +2024-08-03 06:11:05,032 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.051e+02 1.288e+02 1.425e+02 1.791e+02 2.775e+02, threshold=2.850e+02, percent-clipped=0.0 +2024-08-03 06:11:05,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=54384.0, ans=0.1 +2024-08-03 06:11:11,389 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.99 vs. limit=22.5 +2024-08-03 06:11:11,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=54420.666666666664, ans=0.0 +2024-08-03 06:11:39,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=54457.333333333336, ans=0.125 +2024-08-03 06:11:47,371 INFO [train.py:1114] (2/4) Epoch 5, batch 300, loss[loss=0.2997, simple_loss=0.3689, pruned_loss=0.1152, over 13459.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3345, pruned_loss=0.09949, over 2052659.86 frames. ], batch size: 42, lr: 2.45e-02, grad_scale: 16.0 +2024-08-03 06:12:05,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=54530.666666666664, ans=0.125 +2024-08-03 06:12:06,237 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.51 vs. limit=22.5 +2024-08-03 06:12:19,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=54567.333333333336, ans=0.125 +2024-08-03 06:12:20,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=54567.333333333336, ans=0.0 +2024-08-03 06:12:22,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=54567.333333333336, ans=0.125 +2024-08-03 06:12:23,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=54567.333333333336, ans=0.07 +2024-08-03 06:12:46,942 INFO [train.py:1114] (2/4) Epoch 5, batch 350, loss[loss=0.2347, simple_loss=0.3068, pruned_loss=0.08134, over 13586.00 frames. ], tot_loss[loss=0.2673, simple_loss=0.3353, pruned_loss=0.09966, over 2183210.29 frames. ], batch size: 33, lr: 2.45e-02, grad_scale: 16.0 +2024-08-03 06:12:53,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54677.333333333336, ans=0.1 +2024-08-03 06:12:58,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=54677.333333333336, ans=0.0 +2024-08-03 06:13:00,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54714.0, ans=0.1 +2024-08-03 06:14:29,329 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.016e+02 1.351e+02 1.704e+02 2.152e+02 5.145e+02, threshold=3.407e+02, percent-clipped=8.0 +2024-08-03 06:14:29,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=54750.666666666664, ans=0.2 +2024-08-03 06:14:32,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=54750.666666666664, ans=0.125 +2024-08-03 06:14:42,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=54787.333333333336, ans=0.1 +2024-08-03 06:14:54,828 INFO [train.py:1114] (2/4) Epoch 5, batch 400, loss[loss=0.2789, simple_loss=0.3543, pruned_loss=0.1018, over 13358.00 frames. ], tot_loss[loss=0.2664, simple_loss=0.3347, pruned_loss=0.09906, over 2287097.07 frames. ], batch size: 37, lr: 2.44e-02, grad_scale: 32.0 +2024-08-03 06:15:08,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=54897.333333333336, ans=0.2 +2024-08-03 06:15:13,388 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.57 vs. limit=10.0 +2024-08-03 06:15:20,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=54934.0, ans=0.0 +2024-08-03 06:15:28,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=54970.666666666664, ans=0.125 +2024-08-03 06:15:34,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=55007.333333333336, ans=0.0 +2024-08-03 06:15:40,355 INFO [train.py:1114] (2/4) Epoch 5, batch 450, loss[loss=0.2918, simple_loss=0.3505, pruned_loss=0.1165, over 13546.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3358, pruned_loss=0.09993, over 2361242.23 frames. ], batch size: 38, lr: 2.44e-02, grad_scale: 32.0 +2024-08-03 06:15:42,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=55044.0, ans=0.025 +2024-08-03 06:15:50,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=55080.666666666664, ans=0.125 +2024-08-03 06:15:57,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=55117.333333333336, ans=0.125 +2024-08-03 06:15:58,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=55117.333333333336, ans=0.09899494936611666 +2024-08-03 06:16:01,311 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.058e+02 1.371e+02 1.584e+02 1.939e+02 3.313e+02, threshold=3.167e+02, percent-clipped=0.0 +2024-08-03 06:16:01,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=55117.333333333336, ans=0.0 +2024-08-03 06:16:02,743 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.90 vs. limit=15.0 +2024-08-03 06:16:19,261 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.71 vs. limit=15.0 +2024-08-03 06:16:19,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=55154.0, ans=0.125 +2024-08-03 06:16:23,561 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:16:31,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=55190.666666666664, ans=0.0 +2024-08-03 06:16:34,936 INFO [train.py:1114] (2/4) Epoch 5, batch 500, loss[loss=0.2872, simple_loss=0.355, pruned_loss=0.1096, over 13466.00 frames. ], tot_loss[loss=0.2663, simple_loss=0.3343, pruned_loss=0.09914, over 2426427.73 frames. ], batch size: 43, lr: 2.44e-02, grad_scale: 32.0 +2024-08-03 06:16:35,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55227.333333333336, ans=0.1 +2024-08-03 06:16:45,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=55264.0, ans=0.1 +2024-08-03 06:16:46,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=55264.0, ans=0.125 +2024-08-03 06:17:40,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=55300.666666666664, ans=0.125 +2024-08-03 06:17:44,965 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.98 vs. limit=15.0 +2024-08-03 06:17:47,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=55337.333333333336, ans=0.125 +2024-08-03 06:17:52,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=55337.333333333336, ans=0.2 +2024-08-03 06:17:54,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55374.0, ans=0.1 +2024-08-03 06:19:39,785 INFO [train.py:1114] (2/4) Epoch 5, batch 550, loss[loss=0.2818, simple_loss=0.3582, pruned_loss=0.1027, over 13060.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3347, pruned_loss=0.09955, over 2468954.63 frames. ], batch size: 48, lr: 2.43e-02, grad_scale: 32.0 +2024-08-03 06:21:07,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55410.666666666664, ans=0.1 +2024-08-03 06:21:17,254 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.27 vs. limit=10.0 +2024-08-03 06:21:26,877 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.054e+02 1.350e+02 1.520e+02 1.792e+02 6.308e+02, threshold=3.041e+02, percent-clipped=2.0 +2024-08-03 06:21:32,827 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.05 vs. limit=12.0 +2024-08-03 06:21:36,585 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.62 vs. limit=10.0 +2024-08-03 06:21:43,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=55520.666666666664, ans=0.0 +2024-08-03 06:21:45,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55520.666666666664, ans=0.1 +2024-08-03 06:22:14,708 INFO [train.py:1114] (2/4) Epoch 5, batch 600, loss[loss=0.2827, simple_loss=0.3521, pruned_loss=0.1066, over 13290.00 frames. ], tot_loss[loss=0.2664, simple_loss=0.3345, pruned_loss=0.09915, over 2509027.23 frames. ], batch size: 46, lr: 2.43e-02, grad_scale: 32.0 +2024-08-03 06:22:16,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=55594.0, ans=0.1 +2024-08-03 06:22:29,220 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.68 vs. limit=15.0 +2024-08-03 06:22:42,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=55667.333333333336, ans=0.0 +2024-08-03 06:22:44,303 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.81 vs. limit=10.0 +2024-08-03 06:23:06,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=55740.666666666664, ans=0.125 +2024-08-03 06:23:07,600 INFO [train.py:1114] (2/4) Epoch 5, batch 650, loss[loss=0.2411, simple_loss=0.3203, pruned_loss=0.08094, over 13558.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3331, pruned_loss=0.09835, over 2544324.46 frames. ], batch size: 37, lr: 2.43e-02, grad_scale: 32.0 +2024-08-03 06:23:25,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=55777.333333333336, ans=0.125 +2024-08-03 06:23:34,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=55814.0, ans=0.125 +2024-08-03 06:23:39,501 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.04 vs. limit=22.5 +2024-08-03 06:23:43,266 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.044e+02 1.304e+02 1.464e+02 1.924e+02 3.409e+02, threshold=2.927e+02, percent-clipped=2.0 +2024-08-03 06:24:09,706 INFO [train.py:1114] (2/4) Epoch 5, batch 700, loss[loss=0.2284, simple_loss=0.3028, pruned_loss=0.077, over 13541.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.3336, pruned_loss=0.09837, over 2566425.50 frames. ], batch size: 35, lr: 2.43e-02, grad_scale: 32.0 +2024-08-03 06:24:43,404 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.71 vs. limit=15.0 +2024-08-03 06:24:45,992 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=5.096e-03 +2024-08-03 06:24:46,160 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.84 vs. limit=15.0 +2024-08-03 06:24:48,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=56034.0, ans=0.125 +2024-08-03 06:25:08,403 INFO [train.py:1114] (2/4) Epoch 5, batch 750, loss[loss=0.2636, simple_loss=0.3354, pruned_loss=0.0959, over 13358.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3342, pruned_loss=0.09916, over 2583930.16 frames. ], batch size: 37, lr: 2.42e-02, grad_scale: 32.0 +2024-08-03 06:25:27,446 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.53 vs. limit=22.5 +2024-08-03 06:25:28,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=56180.666666666664, ans=0.125 +2024-08-03 06:25:32,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=56217.333333333336, ans=0.125 +2024-08-03 06:25:34,264 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.431e+02 1.731e+02 2.437e+02 4.529e+02, threshold=3.462e+02, percent-clipped=10.0 +2024-08-03 06:25:48,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=56290.666666666664, ans=0.125 +2024-08-03 06:25:59,103 INFO [train.py:1114] (2/4) Epoch 5, batch 800, loss[loss=0.2225, simple_loss=0.2996, pruned_loss=0.07268, over 13352.00 frames. ], tot_loss[loss=0.2657, simple_loss=0.3341, pruned_loss=0.0986, over 2598452.88 frames. ], batch size: 33, lr: 2.42e-02, grad_scale: 32.0 +2024-08-03 06:26:01,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56327.333333333336, ans=0.1 +2024-08-03 06:26:27,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=56400.666666666664, ans=0.0 +2024-08-03 06:26:50,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=56474.0, ans=0.125 +2024-08-03 06:26:54,428 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.00 vs. limit=15.0 +2024-08-03 06:26:58,690 INFO [train.py:1114] (2/4) Epoch 5, batch 850, loss[loss=0.2659, simple_loss=0.3439, pruned_loss=0.09399, over 13340.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.3341, pruned_loss=0.0989, over 2610760.35 frames. ], batch size: 40, lr: 2.42e-02, grad_scale: 32.0 +2024-08-03 06:27:01,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=56510.666666666664, ans=0.09899494936611666 +2024-08-03 06:27:04,480 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.82 vs. limit=22.5 +2024-08-03 06:27:12,117 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.07 vs. limit=15.0 +2024-08-03 06:27:19,555 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.023e+02 1.293e+02 1.480e+02 2.210e+02 4.419e+02, threshold=2.961e+02, percent-clipped=1.0 +2024-08-03 06:27:30,111 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:27:41,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=56657.333333333336, ans=0.2 +2024-08-03 06:27:44,539 INFO [train.py:1114] (2/4) Epoch 5, batch 900, loss[loss=0.2345, simple_loss=0.305, pruned_loss=0.082, over 13346.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.334, pruned_loss=0.09877, over 2612327.86 frames. ], batch size: 33, lr: 2.41e-02, grad_scale: 32.0 +2024-08-03 06:27:56,527 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:27:56,831 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.11 vs. limit=6.0 +2024-08-03 06:28:04,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=56767.333333333336, ans=0.0 +2024-08-03 06:28:14,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=56804.0, ans=0.125 +2024-08-03 06:28:15,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=56804.0, ans=0.0 +2024-08-03 06:28:22,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=56804.0, ans=0.1 +2024-08-03 06:28:39,185 INFO [train.py:1114] (2/4) Epoch 5, batch 950, loss[loss=0.2454, simple_loss=0.3219, pruned_loss=0.08448, over 13535.00 frames. ], tot_loss[loss=0.2665, simple_loss=0.3348, pruned_loss=0.09913, over 2614212.45 frames. ], batch size: 34, lr: 2.41e-02, grad_scale: 32.0 +2024-08-03 06:28:52,715 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.85 vs. limit=15.0 +2024-08-03 06:29:00,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=56950.666666666664, ans=0.0 +2024-08-03 06:29:01,394 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.321e+02 1.545e+02 1.895e+02 5.386e+02, threshold=3.090e+02, percent-clipped=1.0 +2024-08-03 06:29:18,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=56987.333333333336, ans=0.5 +2024-08-03 06:29:19,312 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.41 vs. limit=6.0 +2024-08-03 06:29:35,301 INFO [train.py:1114] (2/4) Epoch 5, batch 1000, loss[loss=0.2332, simple_loss=0.3025, pruned_loss=0.08194, over 13351.00 frames. ], tot_loss[loss=0.2671, simple_loss=0.3352, pruned_loss=0.09952, over 2611537.80 frames. ], batch size: 35, lr: 2.41e-02, grad_scale: 32.0 +2024-08-03 06:29:46,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=57060.666666666664, ans=0.125 +2024-08-03 06:29:53,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=57097.333333333336, ans=0.025 +2024-08-03 06:30:40,640 INFO [train.py:1114] (2/4) Epoch 5, batch 1050, loss[loss=0.2493, simple_loss=0.3284, pruned_loss=0.08509, over 13572.00 frames. ], tot_loss[loss=0.2666, simple_loss=0.3345, pruned_loss=0.09936, over 2615538.90 frames. ], batch size: 39, lr: 2.40e-02, grad_scale: 32.0 +2024-08-03 06:30:42,890 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.27 vs. limit=22.5 +2024-08-03 06:30:43,922 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.50 vs. limit=22.5 +2024-08-03 06:30:53,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=57280.666666666664, ans=0.025 +2024-08-03 06:30:54,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=57280.666666666664, ans=0.125 +2024-08-03 06:31:01,345 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.349e+02 1.601e+02 2.002e+02 3.488e+02, threshold=3.202e+02, percent-clipped=3.0 +2024-08-03 06:31:14,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=57354.0, ans=0.2 +2024-08-03 06:31:31,762 INFO [train.py:1114] (2/4) Epoch 5, batch 1100, loss[loss=0.2604, simple_loss=0.3244, pruned_loss=0.09815, over 13557.00 frames. ], tot_loss[loss=0.2664, simple_loss=0.3342, pruned_loss=0.09928, over 2619375.39 frames. ], batch size: 36, lr: 2.40e-02, grad_scale: 32.0 +2024-08-03 06:31:41,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=57427.333333333336, ans=0.2 +2024-08-03 06:31:59,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=57500.666666666664, ans=0.125 +2024-08-03 06:32:01,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=57500.666666666664, ans=0.2 +2024-08-03 06:32:20,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=57537.333333333336, ans=0.125 +2024-08-03 06:32:26,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=57574.0, ans=0.0 +2024-08-03 06:32:31,909 INFO [train.py:1114] (2/4) Epoch 5, batch 1150, loss[loss=0.3021, simple_loss=0.3578, pruned_loss=0.1232, over 13565.00 frames. ], tot_loss[loss=0.266, simple_loss=0.334, pruned_loss=0.09901, over 2618669.85 frames. ], batch size: 36, lr: 2.40e-02, grad_scale: 32.0 +2024-08-03 06:32:39,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=57610.666666666664, ans=0.125 +2024-08-03 06:32:40,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=57610.666666666664, ans=0.025 +2024-08-03 06:32:43,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57610.666666666664, ans=0.1 +2024-08-03 06:32:59,168 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.317e+02 1.572e+02 1.915e+02 2.951e+02, threshold=3.144e+02, percent-clipped=0.0 +2024-08-03 06:33:18,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=57720.666666666664, ans=0.125 +2024-08-03 06:33:39,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57757.333333333336, ans=0.1 +2024-08-03 06:33:44,661 INFO [train.py:1114] (2/4) Epoch 5, batch 1200, loss[loss=0.2521, simple_loss=0.336, pruned_loss=0.0841, over 13577.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3351, pruned_loss=0.09938, over 2616854.02 frames. ], batch size: 39, lr: 2.39e-02, grad_scale: 32.0 +2024-08-03 06:33:48,747 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.06 vs. limit=15.0 +2024-08-03 06:33:49,602 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.18 vs. limit=22.5 +2024-08-03 06:33:51,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=57794.0, ans=0.125 +2024-08-03 06:34:16,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=57904.0, ans=0.125 +2024-08-03 06:34:29,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=57940.666666666664, ans=0.95 +2024-08-03 06:34:36,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=57940.666666666664, ans=0.125 +2024-08-03 06:34:39,099 INFO [train.py:1114] (2/4) Epoch 5, batch 1250, loss[loss=0.2679, simple_loss=0.344, pruned_loss=0.09591, over 13451.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3352, pruned_loss=0.09924, over 2628495.35 frames. ], batch size: 42, lr: 2.39e-02, grad_scale: 32.0 +2024-08-03 06:34:57,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=58014.0, ans=0.125 +2024-08-03 06:35:00,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=58050.666666666664, ans=0.2 +2024-08-03 06:35:05,297 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.303e+02 1.543e+02 2.003e+02 3.165e+02, threshold=3.086e+02, percent-clipped=1.0 +2024-08-03 06:35:15,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=58087.333333333336, ans=0.125 +2024-08-03 06:35:32,169 INFO [train.py:1114] (2/4) Epoch 5, batch 1300, loss[loss=0.2543, simple_loss=0.3267, pruned_loss=0.09095, over 12835.00 frames. ], tot_loss[loss=0.2655, simple_loss=0.3338, pruned_loss=0.09854, over 2631089.93 frames. ], batch size: 52, lr: 2.39e-02, grad_scale: 16.0 +2024-08-03 06:35:44,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=58197.333333333336, ans=0.125 +2024-08-03 06:35:52,338 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.59 vs. limit=15.0 +2024-08-03 06:36:14,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=58307.333333333336, ans=0.125 +2024-08-03 06:36:16,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=58344.0, ans=0.0 +2024-08-03 06:36:17,087 INFO [train.py:1114] (2/4) Epoch 5, batch 1350, loss[loss=0.278, simple_loss=0.3474, pruned_loss=0.1043, over 13540.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3335, pruned_loss=0.09812, over 2638810.69 frames. ], batch size: 37, lr: 2.38e-02, grad_scale: 16.0 +2024-08-03 06:36:24,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=58344.0, ans=0.0 +2024-08-03 06:36:36,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=58417.333333333336, ans=0.125 +2024-08-03 06:36:39,397 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.960e+01 1.325e+02 1.559e+02 1.988e+02 3.487e+02, threshold=3.118e+02, percent-clipped=2.0 +2024-08-03 06:36:57,242 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.19 vs. limit=15.0 +2024-08-03 06:36:57,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=58454.0, ans=0.125 +2024-08-03 06:36:59,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58454.0, ans=0.1 +2024-08-03 06:37:02,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=58454.0, ans=0.1 +2024-08-03 06:37:09,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58490.666666666664, ans=0.1 +2024-08-03 06:37:17,026 INFO [train.py:1114] (2/4) Epoch 5, batch 1400, loss[loss=0.2318, simple_loss=0.2962, pruned_loss=0.08373, over 13249.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3333, pruned_loss=0.09801, over 2642340.47 frames. ], batch size: 31, lr: 2.38e-02, grad_scale: 16.0 +2024-08-03 06:37:17,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=58527.333333333336, ans=0.0 +2024-08-03 06:37:45,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=58637.333333333336, ans=0.125 +2024-08-03 06:37:48,814 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.52 vs. limit=10.0 +2024-08-03 06:38:07,878 INFO [train.py:1114] (2/4) Epoch 5, batch 1450, loss[loss=0.2643, simple_loss=0.3377, pruned_loss=0.09543, over 13447.00 frames. ], tot_loss[loss=0.2654, simple_loss=0.3341, pruned_loss=0.09833, over 2640815.65 frames. ], batch size: 43, lr: 2.38e-02, grad_scale: 16.0 +2024-08-03 06:38:21,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=58747.333333333336, ans=0.1 +2024-08-03 06:38:28,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=58784.0, ans=0.025 +2024-08-03 06:38:29,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=58784.0, ans=0.125 +2024-08-03 06:38:30,502 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.070e+02 1.374e+02 1.719e+02 2.363e+02 5.392e+02, threshold=3.437e+02, percent-clipped=8.0 +2024-08-03 06:38:46,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=58820.666666666664, ans=0.125 +2024-08-03 06:38:48,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=58820.666666666664, ans=0.0 +2024-08-03 06:38:48,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=58820.666666666664, ans=0.125 +2024-08-03 06:39:02,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58857.333333333336, ans=0.1 +2024-08-03 06:39:06,274 INFO [train.py:1114] (2/4) Epoch 5, batch 1500, loss[loss=0.2655, simple_loss=0.3429, pruned_loss=0.09402, over 13406.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3337, pruned_loss=0.09803, over 2641055.34 frames. ], batch size: 39, lr: 2.38e-02, grad_scale: 16.0 +2024-08-03 06:39:37,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=58930.666666666664, ans=0.125 +2024-08-03 06:39:42,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=58930.666666666664, ans=0.125 +2024-08-03 06:39:44,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=58930.666666666664, ans=0.1 +2024-08-03 06:39:50,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=58967.333333333336, ans=0.2 +2024-08-03 06:39:55,990 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:39:56,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=59004.0, ans=0.025 +2024-08-03 06:39:59,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=59004.0, ans=0.0 +2024-08-03 06:40:08,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59040.666666666664, ans=0.1 +2024-08-03 06:40:13,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=59040.666666666664, ans=0.1 +2024-08-03 06:40:14,978 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.59 vs. limit=15.0 +2024-08-03 06:40:16,381 INFO [train.py:1114] (2/4) Epoch 5, batch 1550, loss[loss=0.2475, simple_loss=0.3324, pruned_loss=0.08136, over 13399.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3337, pruned_loss=0.09804, over 2630655.65 frames. ], batch size: 41, lr: 2.37e-02, grad_scale: 16.0 +2024-08-03 06:40:19,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=59077.333333333336, ans=0.125 +2024-08-03 06:40:20,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=59077.333333333336, ans=0.125 +2024-08-03 06:40:21,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59077.333333333336, ans=0.1 +2024-08-03 06:40:28,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=59114.0, ans=0.2 +2024-08-03 06:40:35,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=59150.666666666664, ans=0.2 +2024-08-03 06:40:38,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=59150.666666666664, ans=0.125 +2024-08-03 06:40:39,047 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.330e+02 1.569e+02 1.992e+02 3.164e+02, threshold=3.138e+02, percent-clipped=1.0 +2024-08-03 06:40:41,589 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.44 vs. limit=12.0 +2024-08-03 06:40:48,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=59187.333333333336, ans=0.125 +2024-08-03 06:41:38,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=59224.0, ans=0.2 +2024-08-03 06:41:38,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=59224.0, ans=0.125 +2024-08-03 06:41:43,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=59224.0, ans=0.0 +2024-08-03 06:41:47,101 INFO [train.py:1114] (2/4) Epoch 5, batch 1600, loss[loss=0.2831, simple_loss=0.3517, pruned_loss=0.1073, over 13571.00 frames. ], tot_loss[loss=0.265, simple_loss=0.3337, pruned_loss=0.09818, over 2623415.03 frames. ], batch size: 39, lr: 2.37e-02, grad_scale: 32.0 +2024-08-03 06:41:49,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=59260.666666666664, ans=0.125 +2024-08-03 06:41:58,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59297.333333333336, ans=0.1 +2024-08-03 06:41:59,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=59297.333333333336, ans=0.125 +2024-08-03 06:42:06,169 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.77 vs. limit=6.0 +2024-08-03 06:42:10,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=59334.0, ans=0.2 +2024-08-03 06:42:18,762 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.98 vs. limit=22.5 +2024-08-03 06:42:28,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=59407.333333333336, ans=0.025 +2024-08-03 06:42:38,322 INFO [train.py:1114] (2/4) Epoch 5, batch 1650, loss[loss=0.2632, simple_loss=0.3511, pruned_loss=0.08763, over 13324.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3331, pruned_loss=0.09798, over 2620478.50 frames. ], batch size: 40, lr: 2.37e-02, grad_scale: 32.0 +2024-08-03 06:42:57,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=59480.666666666664, ans=0.125 +2024-08-03 06:42:58,892 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.35 vs. limit=15.0 +2024-08-03 06:43:04,539 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.342e+02 1.500e+02 2.074e+02 4.077e+02, threshold=2.999e+02, percent-clipped=4.0 +2024-08-03 06:43:27,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=59627.333333333336, ans=0.125 +2024-08-03 06:43:27,951 INFO [train.py:1114] (2/4) Epoch 5, batch 1700, loss[loss=0.2464, simple_loss=0.3034, pruned_loss=0.09473, over 13253.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3321, pruned_loss=0.09689, over 2628862.90 frames. ], batch size: 31, lr: 2.36e-02, grad_scale: 32.0 +2024-08-03 06:43:43,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=59664.0, ans=0.125 +2024-08-03 06:43:48,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=59700.666666666664, ans=0.2 +2024-08-03 06:43:52,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=59700.666666666664, ans=0.125 +2024-08-03 06:44:12,819 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.28 vs. limit=15.0 +2024-08-03 06:44:13,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=59774.0, ans=0.0 +2024-08-03 06:44:20,424 INFO [train.py:1114] (2/4) Epoch 5, batch 1750, loss[loss=0.2195, simple_loss=0.2894, pruned_loss=0.07479, over 13550.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3311, pruned_loss=0.09648, over 2633083.17 frames. ], batch size: 31, lr: 2.36e-02, grad_scale: 32.0 +2024-08-03 06:44:45,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=59884.0, ans=0.125 +2024-08-03 06:44:50,131 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.019e+02 1.258e+02 1.421e+02 1.677e+02 2.914e+02, threshold=2.843e+02, percent-clipped=0.0 +2024-08-03 06:44:50,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=59884.0, ans=0.125 +2024-08-03 06:45:09,205 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.09 vs. limit=15.0 +2024-08-03 06:45:13,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=59957.333333333336, ans=0.2 +2024-08-03 06:45:13,875 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.23 vs. limit=6.0 +2024-08-03 06:45:21,417 INFO [train.py:1114] (2/4) Epoch 5, batch 1800, loss[loss=0.2439, simple_loss=0.3198, pruned_loss=0.08401, over 13556.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3317, pruned_loss=0.09708, over 2634602.15 frames. ], batch size: 38, lr: 2.36e-02, grad_scale: 32.0 +2024-08-03 06:45:32,935 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.15 vs. limit=15.0 +2024-08-03 06:45:46,435 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.20 vs. limit=15.0 +2024-08-03 06:45:54,503 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.47 vs. limit=6.0 +2024-08-03 06:45:55,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=60104.0, ans=0.0 +2024-08-03 06:49:17,447 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.44 vs. limit=15.0 +2024-08-03 06:49:33,863 INFO [train.py:1114] (2/4) Epoch 5, batch 1850, loss[loss=0.2433, simple_loss=0.3205, pruned_loss=0.08307, over 13397.00 frames. ], tot_loss[loss=0.2631, simple_loss=0.3318, pruned_loss=0.09719, over 2637394.17 frames. ], batch size: 39, lr: 2.35e-02, grad_scale: 32.0 +2024-08-03 06:50:15,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=60177.333333333336, ans=0.2 +2024-08-03 06:50:42,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=60214.0, ans=0.07 +2024-08-03 06:50:57,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=60214.0, ans=0.025 +2024-08-03 06:51:00,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=60214.0, ans=0.2 +2024-08-03 06:51:09,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=60250.666666666664, ans=0.0 +2024-08-03 06:51:23,621 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.104e+02 1.315e+02 1.584e+02 1.966e+02 3.228e+02, threshold=3.167e+02, percent-clipped=4.0 +2024-08-03 06:51:26,004 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.29 vs. limit=22.5 +2024-08-03 06:53:02,072 INFO [train.py:1114] (2/4) Epoch 5, batch 1900, loss[loss=0.273, simple_loss=0.3483, pruned_loss=0.09879, over 13339.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3327, pruned_loss=0.0975, over 2639324.75 frames. ], batch size: 40, lr: 2.35e-02, grad_scale: 32.0 +2024-08-03 06:53:24,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60397.333333333336, ans=0.1 +2024-08-03 06:53:28,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=60397.333333333336, ans=0.0 +2024-08-03 06:56:52,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=60397.333333333336, ans=0.125 +2024-08-03 06:57:48,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=60434.0, ans=0.125 +2024-08-03 06:57:50,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=60434.0, ans=0.0 +2024-08-03 06:57:50,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=60434.0, ans=0.2 +2024-08-03 06:57:57,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60434.0, ans=0.1 +2024-08-03 06:58:15,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=60470.666666666664, ans=0.0 +2024-08-03 06:58:25,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=60470.666666666664, ans=0.125 +2024-08-03 06:58:47,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60507.333333333336, ans=0.1 +2024-08-03 06:58:59,374 INFO [train.py:1114] (2/4) Epoch 5, batch 1950, loss[loss=0.258, simple_loss=0.3294, pruned_loss=0.0933, over 13573.00 frames. ], tot_loss[loss=0.2648, simple_loss=0.3337, pruned_loss=0.09798, over 2646228.94 frames. ], batch size: 36, lr: 2.35e-02, grad_scale: 32.0 +2024-08-03 06:59:17,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60580.666666666664, ans=0.1 +2024-08-03 07:01:31,512 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.403e+02 1.665e+02 1.976e+02 3.868e+02, threshold=3.331e+02, percent-clipped=1.0 +2024-08-03 07:01:33,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60617.333333333336, ans=0.1 +2024-08-03 07:01:34,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=60617.333333333336, ans=0.0 +2024-08-03 07:01:34,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=60617.333333333336, ans=0.0 +2024-08-03 07:02:06,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=60654.0, ans=0.2 +2024-08-03 07:02:34,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=60690.666666666664, ans=0.125 +2024-08-03 07:03:26,355 INFO [train.py:1114] (2/4) Epoch 5, batch 2000, loss[loss=0.2738, simple_loss=0.3216, pruned_loss=0.1129, over 13521.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.3345, pruned_loss=0.09856, over 2635449.27 frames. ], batch size: 31, lr: 2.35e-02, grad_scale: 32.0 +2024-08-03 07:03:27,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=60727.333333333336, ans=0.125 +2024-08-03 07:03:31,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=60727.333333333336, ans=0.0 +2024-08-03 07:03:33,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=60727.333333333336, ans=0.125 +2024-08-03 07:03:40,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60764.0, ans=0.1 +2024-08-03 07:03:51,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=60800.666666666664, ans=0.125 +2024-08-03 07:03:56,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=60837.333333333336, ans=0.125 +2024-08-03 07:04:02,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=60874.0, ans=0.125 +2024-08-03 07:04:20,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=60874.0, ans=0.2 +2024-08-03 07:04:31,374 INFO [train.py:1114] (2/4) Epoch 5, batch 2050, loss[loss=0.2413, simple_loss=0.3031, pruned_loss=0.08981, over 13434.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3323, pruned_loss=0.09763, over 2632774.59 frames. ], batch size: 32, lr: 2.34e-02, grad_scale: 32.0 +2024-08-03 07:04:47,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=60910.666666666664, ans=0.125 +2024-08-03 07:05:05,953 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.33 vs. limit=10.0 +2024-08-03 07:05:08,120 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.320e+02 1.526e+02 1.984e+02 3.306e+02, threshold=3.052e+02, percent-clipped=0.0 +2024-08-03 07:05:29,947 INFO [train.py:1114] (2/4) Epoch 5, batch 2100, loss[loss=0.2497, simple_loss=0.3183, pruned_loss=0.09054, over 13534.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.3318, pruned_loss=0.09725, over 2638013.61 frames. ], batch size: 37, lr: 2.34e-02, grad_scale: 16.0 +2024-08-03 07:07:41,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=61167.333333333336, ans=0.2 +2024-08-03 07:07:44,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=61204.0, ans=0.125 +2024-08-03 07:08:00,450 INFO [train.py:1114] (2/4) Epoch 5, batch 2150, loss[loss=0.2733, simple_loss=0.3376, pruned_loss=0.1045, over 13561.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3308, pruned_loss=0.09621, over 2647207.47 frames. ], batch size: 36, lr: 2.34e-02, grad_scale: 16.0 +2024-08-03 07:08:11,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=61314.0, ans=0.125 +2024-08-03 07:08:13,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=61314.0, ans=0.125 +2024-08-03 07:08:25,510 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.060e+02 1.319e+02 1.581e+02 2.053e+02 4.024e+02, threshold=3.163e+02, percent-clipped=3.0 +2024-08-03 07:08:26,996 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.78 vs. limit=15.0 +2024-08-03 07:08:37,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=61387.333333333336, ans=0.125 +2024-08-03 07:08:50,640 INFO [train.py:1114] (2/4) Epoch 5, batch 2200, loss[loss=0.2566, simple_loss=0.3359, pruned_loss=0.08868, over 13389.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3302, pruned_loss=0.09554, over 2645756.16 frames. ], batch size: 39, lr: 2.33e-02, grad_scale: 16.0 +2024-08-03 07:08:56,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=61460.666666666664, ans=0.125 +2024-08-03 07:08:57,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=61460.666666666664, ans=0.2 +2024-08-03 07:08:58,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=61460.666666666664, ans=0.0 +2024-08-03 07:09:04,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=61497.333333333336, ans=0.125 +2024-08-03 07:09:15,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=61534.0, ans=0.5 +2024-08-03 07:09:16,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=61534.0, ans=0.125 +2024-08-03 07:09:28,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=61570.666666666664, ans=0.1 +2024-08-03 07:09:38,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=61607.333333333336, ans=0.125 +2024-08-03 07:09:41,549 INFO [train.py:1114] (2/4) Epoch 5, batch 2250, loss[loss=0.2429, simple_loss=0.3187, pruned_loss=0.08359, over 13368.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3303, pruned_loss=0.09569, over 2642615.23 frames. ], batch size: 37, lr: 2.33e-02, grad_scale: 16.0 +2024-08-03 07:09:47,521 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.57 vs. limit=22.5 +2024-08-03 07:10:04,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=61717.333333333336, ans=0.125 +2024-08-03 07:10:05,790 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.390e+02 1.682e+02 2.115e+02 4.078e+02, threshold=3.364e+02, percent-clipped=8.0 +2024-08-03 07:12:06,481 INFO [train.py:1114] (2/4) Epoch 5, batch 2300, loss[loss=0.1933, simple_loss=0.268, pruned_loss=0.05933, over 13574.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3291, pruned_loss=0.09526, over 2638760.50 frames. ], batch size: 33, lr: 2.33e-02, grad_scale: 16.0 +2024-08-03 07:13:45,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=61864.0, ans=0.07 +2024-08-03 07:13:58,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61864.0, ans=0.1 +2024-08-03 07:16:10,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.05 vs. limit=15.0 +2024-08-03 07:16:31,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=61974.0, ans=0.2 +2024-08-03 07:16:32,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61974.0, ans=0.1 +2024-08-03 07:16:35,465 INFO [train.py:1114] (2/4) Epoch 5, batch 2350, loss[loss=0.2653, simple_loss=0.3347, pruned_loss=0.09791, over 13547.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3301, pruned_loss=0.09614, over 2641215.66 frames. ], batch size: 38, lr: 2.32e-02, grad_scale: 16.0 +2024-08-03 07:17:03,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=62047.333333333336, ans=0.0 +2024-08-03 07:17:20,769 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.323e+02 1.600e+02 2.155e+02 3.699e+02, threshold=3.200e+02, percent-clipped=2.0 +2024-08-03 07:17:34,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=62120.666666666664, ans=0.025 +2024-08-03 07:17:36,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=62120.666666666664, ans=10.0 +2024-08-03 07:18:25,935 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=17.11 vs. limit=22.5 +2024-08-03 07:18:36,410 INFO [train.py:1114] (2/4) Epoch 5, batch 2400, loss[loss=0.2317, simple_loss=0.3005, pruned_loss=0.08145, over 13525.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3306, pruned_loss=0.09615, over 2642298.66 frames. ], batch size: 35, lr: 2.32e-02, grad_scale: 32.0 +2024-08-03 07:19:00,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=62194.0, ans=0.125 +2024-08-03 07:19:06,018 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=16.39 vs. limit=15.0 +2024-08-03 07:19:13,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=62267.333333333336, ans=0.0 +2024-08-03 07:19:35,289 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.542e-03 +2024-08-03 07:19:37,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=62340.666666666664, ans=0.125 +2024-08-03 07:19:45,671 INFO [train.py:1114] (2/4) Epoch 5, batch 2450, loss[loss=0.2641, simple_loss=0.3338, pruned_loss=0.09724, over 13346.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3317, pruned_loss=0.09663, over 2631896.92 frames. ], batch size: 37, lr: 2.32e-02, grad_scale: 32.0 +2024-08-03 07:19:53,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=62414.0, ans=0.125 +2024-08-03 07:20:05,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=62450.666666666664, ans=0.09899494936611666 +2024-08-03 07:20:06,693 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.86 vs. limit=15.0 +2024-08-03 07:20:08,877 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.340e+02 1.643e+02 2.212e+02 4.155e+02, threshold=3.287e+02, percent-clipped=6.0 +2024-08-03 07:20:19,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=62487.333333333336, ans=0.125 +2024-08-03 07:20:27,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=62524.0, ans=0.0 +2024-08-03 07:20:35,558 INFO [train.py:1114] (2/4) Epoch 5, batch 2500, loss[loss=0.2736, simple_loss=0.343, pruned_loss=0.1021, over 13391.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3315, pruned_loss=0.09643, over 2636549.41 frames. ], batch size: 39, lr: 2.32e-02, grad_scale: 32.0 +2024-08-03 07:20:37,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=62560.666666666664, ans=0.1 +2024-08-03 07:20:37,625 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.32 vs. limit=22.5 +2024-08-03 07:20:41,788 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.23 vs. limit=22.5 +2024-08-03 07:20:42,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=62560.666666666664, ans=0.07 +2024-08-03 07:20:43,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=62597.333333333336, ans=0.125 +2024-08-03 07:20:51,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=62597.333333333336, ans=0.125 +2024-08-03 07:20:52,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=62597.333333333336, ans=0.2 +2024-08-03 07:21:20,458 INFO [train.py:1114] (2/4) Epoch 5, batch 2550, loss[loss=0.2463, simple_loss=0.3111, pruned_loss=0.09075, over 13531.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.3314, pruned_loss=0.09663, over 2638435.63 frames. ], batch size: 31, lr: 2.31e-02, grad_scale: 32.0 +2024-08-03 07:21:20,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=62744.0, ans=0.2 +2024-08-03 07:21:21,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=62744.0, ans=0.125 +2024-08-03 07:21:22,620 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.08 vs. limit=22.5 +2024-08-03 07:21:26,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=62744.0, ans=0.0 +2024-08-03 07:21:49,255 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.074e+02 1.290e+02 1.480e+02 1.885e+02 4.380e+02, threshold=2.959e+02, percent-clipped=2.0 +2024-08-03 07:21:50,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62817.333333333336, ans=0.1 +2024-08-03 07:22:01,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=62890.666666666664, ans=0.125 +2024-08-03 07:22:09,867 INFO [train.py:1114] (2/4) Epoch 5, batch 2600, loss[loss=0.2655, simple_loss=0.335, pruned_loss=0.09802, over 13573.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.3318, pruned_loss=0.09678, over 2637562.01 frames. ], batch size: 36, lr: 2.31e-02, grad_scale: 32.0 +2024-08-03 07:24:05,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=63037.333333333336, ans=0.2 +2024-08-03 07:24:07,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=63074.0, ans=0.125 +2024-08-03 07:24:12,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=63074.0, ans=0.2 +2024-08-03 07:24:12,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=63074.0, ans=0.125 +2024-08-03 07:24:17,371 INFO [train.py:1114] (2/4) Epoch 5, batch 2650, loss[loss=0.2842, simple_loss=0.3644, pruned_loss=0.102, over 13272.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.3327, pruned_loss=0.09687, over 2640014.91 frames. ], batch size: 46, lr: 2.31e-02, grad_scale: 32.0 +2024-08-03 07:24:43,701 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.082e+02 1.380e+02 1.559e+02 1.928e+02 2.967e+02, threshold=3.118e+02, percent-clipped=1.0 +2024-08-03 07:24:58,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=63257.333333333336, ans=0.125 +2024-08-03 07:25:04,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=63257.333333333336, ans=0.125 +2024-08-03 07:25:05,923 INFO [train.py:1114] (2/4) Epoch 5, batch 2700, loss[loss=0.2495, simple_loss=0.327, pruned_loss=0.08595, over 13548.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3321, pruned_loss=0.09616, over 2637246.86 frames. ], batch size: 40, lr: 2.31e-02, grad_scale: 32.0 +2024-08-03 07:25:19,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=63330.666666666664, ans=0.125 +2024-08-03 07:25:21,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=63330.666666666664, ans=0.025 +2024-08-03 07:25:33,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=63404.0, ans=0.1 +2024-08-03 07:25:34,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63404.0, ans=0.1 +2024-08-03 07:25:38,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=63404.0, ans=0.025 +2024-08-03 07:25:49,675 INFO [train.py:1114] (2/4) Epoch 5, batch 2750, loss[loss=0.2122, simple_loss=0.2842, pruned_loss=0.07011, over 13334.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3307, pruned_loss=0.09565, over 2635240.20 frames. ], batch size: 34, lr: 2.30e-02, grad_scale: 32.0 +2024-08-03 07:25:50,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=63477.333333333336, ans=0.125 +2024-08-03 07:26:08,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=63477.333333333336, ans=0.125 +2024-08-03 07:26:33,882 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.28 vs. limit=15.0 +2024-08-03 07:26:35,262 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.092e+02 1.483e+02 1.693e+02 2.147e+02 4.016e+02, threshold=3.386e+02, percent-clipped=6.0 +2024-08-03 07:26:38,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63587.333333333336, ans=0.1 +2024-08-03 07:26:47,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=63624.0, ans=0.125 +2024-08-03 07:26:56,074 INFO [train.py:1114] (2/4) Epoch 5, batch 2800, loss[loss=0.3674, simple_loss=0.3926, pruned_loss=0.171, over 9030.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3308, pruned_loss=0.09603, over 2626275.71 frames. ], batch size: 96, lr: 2.30e-02, grad_scale: 32.0 +2024-08-03 07:27:00,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=63660.666666666664, ans=0.125 +2024-08-03 07:27:04,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=63660.666666666664, ans=0.0 +2024-08-03 07:27:22,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=63770.666666666664, ans=0.2 +2024-08-03 07:27:36,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=63807.333333333336, ans=0.2 +2024-08-03 07:27:40,308 INFO [train.py:1114] (2/4) Epoch 5, batch 2850, loss[loss=0.2703, simple_loss=0.3319, pruned_loss=0.1044, over 13356.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3316, pruned_loss=0.09703, over 2621383.95 frames. ], batch size: 35, lr: 2.30e-02, grad_scale: 32.0 +2024-08-03 07:27:54,545 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.84 vs. limit=10.0 +2024-08-03 07:27:58,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=63880.666666666664, ans=0.0 +2024-08-03 07:28:00,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=63880.666666666664, ans=0.1 +2024-08-03 07:28:36,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=63917.333333333336, ans=0.125 +2024-08-03 07:28:37,169 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.375e+02 1.611e+02 2.020e+02 3.770e+02, threshold=3.222e+02, percent-clipped=1.0 +2024-08-03 07:28:38,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=63917.333333333336, ans=0.2 +2024-08-03 07:28:41,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=63954.0, ans=0.0 +2024-08-03 07:28:51,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=63990.666666666664, ans=0.0 +2024-08-03 07:28:52,221 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:28:55,696 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.47 vs. limit=15.0 +2024-08-03 07:28:58,763 INFO [train.py:1114] (2/4) Epoch 5, batch 2900, loss[loss=0.2827, simple_loss=0.3468, pruned_loss=0.1094, over 13351.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3324, pruned_loss=0.09626, over 2632139.29 frames. ], batch size: 36, lr: 2.29e-02, grad_scale: 32.0 +2024-08-03 07:29:02,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=64027.333333333336, ans=0.0 +2024-08-03 07:29:03,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=64027.333333333336, ans=0.125 +2024-08-03 07:29:06,457 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.41 vs. limit=10.0 +2024-08-03 07:29:08,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=64027.333333333336, ans=0.125 +2024-08-03 07:29:17,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=64064.0, ans=0.0 +2024-08-03 07:29:38,533 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.05 vs. limit=12.0 +2024-08-03 07:29:41,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=64174.0, ans=0.1 +2024-08-03 07:29:46,625 INFO [train.py:1114] (2/4) Epoch 5, batch 2950, loss[loss=0.2689, simple_loss=0.3319, pruned_loss=0.103, over 13325.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3301, pruned_loss=0.09543, over 2629590.45 frames. ], batch size: 34, lr: 2.29e-02, grad_scale: 32.0 +2024-08-03 07:29:51,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64210.666666666664, ans=0.1 +2024-08-03 07:30:13,352 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.269e+02 1.515e+02 1.903e+02 4.002e+02, threshold=3.030e+02, percent-clipped=2.0 +2024-08-03 07:30:15,578 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.08 vs. limit=15.0 +2024-08-03 07:30:19,391 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.85 vs. limit=10.0 +2024-08-03 07:30:27,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=64320.666666666664, ans=0.05 +2024-08-03 07:30:38,301 INFO [train.py:1114] (2/4) Epoch 5, batch 3000, loss[loss=0.2508, simple_loss=0.3248, pruned_loss=0.08838, over 13540.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3302, pruned_loss=0.0956, over 2630227.47 frames. ], batch size: 37, lr: 2.29e-02, grad_scale: 16.0 +2024-08-03 07:30:38,302 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 07:31:39,948 INFO [train.py:1146] (2/4) Epoch 5, validation: loss=0.2105, simple_loss=0.3083, pruned_loss=0.0563, over 944034.00 frames. +2024-08-03 07:31:39,949 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 07:31:52,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=64430.666666666664, ans=0.025 +2024-08-03 07:32:26,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=64540.666666666664, ans=0.0 +2024-08-03 07:32:27,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=64540.666666666664, ans=0.125 +2024-08-03 07:32:30,611 INFO [train.py:1114] (2/4) Epoch 5, batch 3050, loss[loss=0.2451, simple_loss=0.3183, pruned_loss=0.08597, over 13530.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3313, pruned_loss=0.09621, over 2627416.22 frames. ], batch size: 35, lr: 2.29e-02, grad_scale: 16.0 +2024-08-03 07:32:30,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=64577.333333333336, ans=0.0 +2024-08-03 07:32:35,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64577.333333333336, ans=0.1 +2024-08-03 07:32:41,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=64577.333333333336, ans=0.2 +2024-08-03 07:32:54,556 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.62 vs. limit=15.0 +2024-08-03 07:32:59,069 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.073e+02 1.354e+02 1.544e+02 1.924e+02 3.300e+02, threshold=3.088e+02, percent-clipped=4.0 +2024-08-03 07:33:14,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=64724.0, ans=0.125 +2024-08-03 07:33:18,658 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.07 vs. limit=15.0 +2024-08-03 07:33:18,950 INFO [train.py:1114] (2/4) Epoch 5, batch 3100, loss[loss=0.2752, simple_loss=0.344, pruned_loss=0.1032, over 13293.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3309, pruned_loss=0.09578, over 2627141.63 frames. ], batch size: 46, lr: 2.28e-02, grad_scale: 16.0 +2024-08-03 07:33:21,156 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.62 vs. limit=10.0 +2024-08-03 07:33:22,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=64760.666666666664, ans=0.0 +2024-08-03 07:33:24,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=64760.666666666664, ans=0.125 +2024-08-03 07:33:25,104 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:33:33,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=64797.333333333336, ans=0.125 +2024-08-03 07:33:39,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=64834.0, ans=0.125 +2024-08-03 07:33:41,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=64834.0, ans=0.0 +2024-08-03 07:33:48,412 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.80 vs. limit=15.0 +2024-08-03 07:34:01,577 INFO [train.py:1114] (2/4) Epoch 5, batch 3150, loss[loss=0.2851, simple_loss=0.3541, pruned_loss=0.108, over 13010.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.3308, pruned_loss=0.09567, over 2628604.66 frames. ], batch size: 48, lr: 2.28e-02, grad_scale: 16.0 +2024-08-03 07:34:10,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=64980.666666666664, ans=0.2 +2024-08-03 07:34:16,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=64980.666666666664, ans=0.125 +2024-08-03 07:34:24,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=65017.333333333336, ans=0.125 +2024-08-03 07:34:25,112 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.020e+02 1.369e+02 1.582e+02 1.897e+02 3.787e+02, threshold=3.164e+02, percent-clipped=5.0 +2024-08-03 07:34:35,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=65054.0, ans=0.125 +2024-08-03 07:34:41,029 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.87 vs. limit=15.0 +2024-08-03 07:34:51,990 INFO [train.py:1114] (2/4) Epoch 5, batch 3200, loss[loss=0.2719, simple_loss=0.3427, pruned_loss=0.1006, over 13559.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3294, pruned_loss=0.09489, over 2636108.91 frames. ], batch size: 37, lr: 2.28e-02, grad_scale: 32.0 +2024-08-03 07:35:17,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=65200.666666666664, ans=0.125 +2024-08-03 07:35:29,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=65274.0, ans=0.125 +2024-08-03 07:35:30,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=65274.0, ans=0.2 +2024-08-03 07:35:33,838 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.15 vs. limit=15.0 +2024-08-03 07:35:35,993 INFO [train.py:1114] (2/4) Epoch 5, batch 3250, loss[loss=0.267, simple_loss=0.3472, pruned_loss=0.09347, over 13414.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3297, pruned_loss=0.09484, over 2640090.37 frames. ], batch size: 38, lr: 2.27e-02, grad_scale: 32.0 +2024-08-03 07:35:37,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=65310.666666666664, ans=0.0 +2024-08-03 07:35:41,766 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.72 vs. limit=22.5 +2024-08-03 07:35:48,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=65347.333333333336, ans=0.0 +2024-08-03 07:35:52,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=65347.333333333336, ans=0.1 +2024-08-03 07:35:57,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=65384.0, ans=0.0 +2024-08-03 07:36:00,546 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.084e+02 1.291e+02 1.478e+02 1.851e+02 2.616e+02, threshold=2.956e+02, percent-clipped=0.0 +2024-08-03 07:36:05,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=65420.666666666664, ans=0.125 +2024-08-03 07:36:09,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=65420.666666666664, ans=0.025 +2024-08-03 07:36:13,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=65457.333333333336, ans=0.1 +2024-08-03 07:36:19,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=65494.0, ans=0.125 +2024-08-03 07:36:20,410 INFO [train.py:1114] (2/4) Epoch 5, batch 3300, loss[loss=0.2998, simple_loss=0.3611, pruned_loss=0.1192, over 12918.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3289, pruned_loss=0.09461, over 2641529.09 frames. ], batch size: 52, lr: 2.27e-02, grad_scale: 32.0 +2024-08-03 07:36:25,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=65494.0, ans=0.125 +2024-08-03 07:36:32,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=65530.666666666664, ans=0.0 +2024-08-03 07:36:45,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=65567.33333333333, ans=0.0 +2024-08-03 07:37:00,769 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.34 vs. limit=15.0 +2024-08-03 07:37:03,882 INFO [train.py:1114] (2/4) Epoch 5, batch 3350, loss[loss=0.2923, simple_loss=0.365, pruned_loss=0.1098, over 13258.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3299, pruned_loss=0.09536, over 2630316.92 frames. ], batch size: 49, lr: 2.27e-02, grad_scale: 32.0 +2024-08-03 07:37:09,262 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.30 vs. limit=15.0 +2024-08-03 07:37:09,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=65677.33333333333, ans=0.0 +2024-08-03 07:37:13,780 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=20.09 vs. limit=22.5 +2024-08-03 07:37:16,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=65714.0, ans=0.125 +2024-08-03 07:37:16,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=65714.0, ans=0.125 +2024-08-03 07:37:20,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=65750.66666666667, ans=0.0 +2024-08-03 07:37:25,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=65750.66666666667, ans=0.0 +2024-08-03 07:37:26,819 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.375e+02 1.557e+02 1.947e+02 3.831e+02, threshold=3.114e+02, percent-clipped=2.0 +2024-08-03 07:37:27,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=65750.66666666667, ans=0.0 +2024-08-03 07:37:29,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=65787.33333333333, ans=10.0 +2024-08-03 07:37:31,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=65787.33333333333, ans=0.125 +2024-08-03 07:37:33,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=65787.33333333333, ans=0.95 +2024-08-03 07:37:46,239 INFO [train.py:1114] (2/4) Epoch 5, batch 3400, loss[loss=0.2553, simple_loss=0.3119, pruned_loss=0.09934, over 13557.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3293, pruned_loss=0.09504, over 2626101.16 frames. ], batch size: 31, lr: 2.27e-02, grad_scale: 32.0 +2024-08-03 07:37:48,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=65860.66666666667, ans=0.0 +2024-08-03 07:37:48,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=65860.66666666667, ans=0.125 +2024-08-03 07:38:15,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=65970.66666666667, ans=0.125 +2024-08-03 07:38:16,979 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.03 vs. limit=22.5 +2024-08-03 07:38:28,623 INFO [train.py:1114] (2/4) Epoch 5, batch 3450, loss[loss=0.3166, simple_loss=0.3743, pruned_loss=0.1295, over 12841.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3296, pruned_loss=0.09503, over 2629662.03 frames. ], batch size: 52, lr: 2.26e-02, grad_scale: 32.0 +2024-08-03 07:38:38,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=66080.66666666667, ans=0.125 +2024-08-03 07:38:43,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=66080.66666666667, ans=0.0 +2024-08-03 07:38:51,386 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.130e+02 1.367e+02 1.647e+02 2.162e+02 3.510e+02, threshold=3.294e+02, percent-clipped=1.0 +2024-08-03 07:38:56,683 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:39:07,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=66190.66666666667, ans=0.2 +2024-08-03 07:39:10,861 INFO [train.py:1114] (2/4) Epoch 5, batch 3500, loss[loss=0.245, simple_loss=0.3196, pruned_loss=0.0852, over 13545.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3289, pruned_loss=0.09479, over 2631294.96 frames. ], batch size: 34, lr: 2.26e-02, grad_scale: 32.0 +2024-08-03 07:39:19,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66227.33333333333, ans=0.1 +2024-08-03 07:39:26,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=66264.0, ans=0.125 +2024-08-03 07:39:47,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=66337.33333333333, ans=0.0 +2024-08-03 07:39:48,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=66337.33333333333, ans=0.2 +2024-08-03 07:39:49,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=66337.33333333333, ans=0.5 +2024-08-03 07:39:58,297 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:40:00,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=66374.0, ans=0.125 +2024-08-03 07:40:05,779 INFO [train.py:1114] (2/4) Epoch 5, batch 3550, loss[loss=0.3384, simple_loss=0.3834, pruned_loss=0.1467, over 12489.00 frames. ], tot_loss[loss=0.2626, simple_loss=0.3318, pruned_loss=0.09668, over 2629402.93 frames. ], batch size: 58, lr: 2.26e-02, grad_scale: 16.0 +2024-08-03 07:40:16,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=66410.66666666667, ans=0.0 +2024-08-03 07:40:39,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=66484.0, ans=0.125 +2024-08-03 07:40:40,257 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.95 vs. limit=6.0 +2024-08-03 07:40:41,027 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.52 vs. limit=15.0 +2024-08-03 07:40:41,453 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.465e+02 1.682e+02 2.236e+02 4.572e+02, threshold=3.363e+02, percent-clipped=5.0 +2024-08-03 07:40:45,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=66520.66666666667, ans=0.125 +2024-08-03 07:40:46,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=66520.66666666667, ans=0.125 +2024-08-03 07:40:50,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.18 vs. limit=15.0 +2024-08-03 07:40:57,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=66557.33333333333, ans=0.0 +2024-08-03 07:45:32,996 INFO [train.py:1114] (2/4) Epoch 5, batch 3600, loss[loss=0.3375, simple_loss=0.3766, pruned_loss=0.1492, over 8819.00 frames. ], tot_loss[loss=0.2732, simple_loss=0.3388, pruned_loss=0.1039, over 2488447.88 frames. ], batch size: 96, lr: 2.26e-02, grad_scale: 32.0 +2024-08-03 07:45:33,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.13 vs. limit=15.0 +2024-08-03 07:45:37,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=66594.0, ans=0.0 +2024-08-03 07:45:56,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=66667.33333333333, ans=0.0 +2024-08-03 07:45:57,824 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.67 vs. limit=12.0 +2024-08-03 07:46:04,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=66704.0, ans=0.025 +2024-08-03 07:47:41,849 INFO [train.py:1114] (2/4) Epoch 6, batch 0, loss[loss=0.2575, simple_loss=0.3198, pruned_loss=0.09758, over 13315.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3198, pruned_loss=0.09758, over 13315.00 frames. ], batch size: 33, lr: 2.10e-02, grad_scale: 32.0 +2024-08-03 07:47:41,849 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 07:47:51,610 INFO [train.py:1146] (2/4) Epoch 6, validation: loss=0.2159, simple_loss=0.3144, pruned_loss=0.05871, over 944034.00 frames. +2024-08-03 07:47:51,611 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 07:47:54,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=66744.33333333333, ans=0.2 +2024-08-03 07:48:04,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=66781.0, ans=0.025 +2024-08-03 07:48:14,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=66817.66666666667, ans=0.125 +2024-08-03 07:48:22,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66854.33333333333, ans=0.1 +2024-08-03 07:48:28,343 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.298e+02 1.438e+02 1.681e+02 2.917e+02, threshold=2.876e+02, percent-clipped=0.0 +2024-08-03 07:48:28,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=66854.33333333333, ans=0.125 +2024-08-03 07:48:28,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=66854.33333333333, ans=0.2 +2024-08-03 07:48:36,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=66891.0, ans=0.0 +2024-08-03 07:48:39,355 INFO [train.py:1114] (2/4) Epoch 6, batch 50, loss[loss=0.2145, simple_loss=0.2914, pruned_loss=0.06882, over 13413.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3301, pruned_loss=0.09471, over 578416.39 frames. ], batch size: 32, lr: 2.10e-02, grad_scale: 32.0 +2024-08-03 07:48:53,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=66927.66666666667, ans=0.125 +2024-08-03 07:48:55,849 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.19 vs. limit=15.0 +2024-08-03 07:49:04,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=67001.0, ans=0.125 +2024-08-03 07:49:11,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=67001.0, ans=0.0 +2024-08-03 07:49:18,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=67037.66666666667, ans=0.125 +2024-08-03 07:49:26,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=67037.66666666667, ans=0.125 +2024-08-03 07:49:31,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=67074.33333333333, ans=0.2 +2024-08-03 07:49:31,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=67074.33333333333, ans=0.0 +2024-08-03 07:49:42,390 INFO [train.py:1114] (2/4) Epoch 6, batch 100, loss[loss=0.2466, simple_loss=0.3183, pruned_loss=0.08748, over 13523.00 frames. ], tot_loss[loss=0.2583, simple_loss=0.3303, pruned_loss=0.09318, over 1026370.67 frames. ], batch size: 35, lr: 2.10e-02, grad_scale: 32.0 +2024-08-03 07:49:42,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=67111.0, ans=0.125 +2024-08-03 07:50:08,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.43 vs. limit=12.0 +2024-08-03 07:50:11,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=67184.33333333333, ans=0.025 +2024-08-03 07:50:19,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=67221.0, ans=10.0 +2024-08-03 07:50:22,470 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.327e+02 1.627e+02 2.047e+02 3.063e+02, threshold=3.255e+02, percent-clipped=2.0 +2024-08-03 07:50:35,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=67257.66666666667, ans=0.125 +2024-08-03 07:50:38,354 INFO [train.py:1114] (2/4) Epoch 6, batch 150, loss[loss=0.2346, simple_loss=0.3016, pruned_loss=0.08382, over 13412.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.328, pruned_loss=0.09288, over 1387440.12 frames. ], batch size: 32, lr: 2.10e-02, grad_scale: 16.0 +2024-08-03 07:51:36,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=67441.0, ans=0.125 +2024-08-03 07:51:40,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67441.0, ans=0.1 +2024-08-03 07:51:43,441 INFO [train.py:1114] (2/4) Epoch 6, batch 200, loss[loss=0.2526, simple_loss=0.3298, pruned_loss=0.08776, over 12487.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3261, pruned_loss=0.09191, over 1666030.16 frames. ], batch size: 58, lr: 2.09e-02, grad_scale: 16.0 +2024-08-03 07:51:44,084 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=22.19 vs. limit=22.5 +2024-08-03 07:51:56,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=67514.33333333333, ans=0.2 +2024-08-03 07:52:13,450 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.88 vs. limit=15.0 +2024-08-03 07:52:22,922 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.261e+02 1.408e+02 1.836e+02 2.572e+02, threshold=2.817e+02, percent-clipped=0.0 +2024-08-03 07:52:32,917 INFO [train.py:1114] (2/4) Epoch 6, batch 250, loss[loss=0.2626, simple_loss=0.3368, pruned_loss=0.09425, over 13301.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3264, pruned_loss=0.09195, over 1885016.45 frames. ], batch size: 46, lr: 2.09e-02, grad_scale: 16.0 +2024-08-03 07:52:41,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=67697.66666666667, ans=0.2 +2024-08-03 07:53:00,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=67734.33333333333, ans=0.0 +2024-08-03 07:53:11,224 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.67 vs. limit=15.0 +2024-08-03 07:53:27,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=67844.33333333333, ans=0.1 +2024-08-03 07:53:28,621 INFO [train.py:1114] (2/4) Epoch 6, batch 300, loss[loss=0.2712, simple_loss=0.3436, pruned_loss=0.09943, over 13459.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3254, pruned_loss=0.09159, over 2051706.96 frames. ], batch size: 42, lr: 2.09e-02, grad_scale: 16.0 +2024-08-03 07:53:29,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=67844.33333333333, ans=0.125 +2024-08-03 07:53:37,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=67844.33333333333, ans=0.0 +2024-08-03 07:53:46,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=67881.0, ans=0.0 +2024-08-03 07:54:03,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=67954.33333333333, ans=0.125 +2024-08-03 07:54:03,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=67954.33333333333, ans=0.125 +2024-08-03 07:54:06,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=67954.33333333333, ans=0.125 +2024-08-03 07:54:08,720 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.896e+01 1.256e+02 1.436e+02 1.831e+02 3.083e+02, threshold=2.872e+02, percent-clipped=2.0 +2024-08-03 07:54:09,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=67991.0, ans=0.2 +2024-08-03 07:54:09,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=67991.0, ans=0.0 +2024-08-03 07:54:11,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=67991.0, ans=0.05 +2024-08-03 07:54:14,898 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.01 vs. limit=15.0 +2024-08-03 07:54:19,023 INFO [train.py:1114] (2/4) Epoch 6, batch 350, loss[loss=0.2235, simple_loss=0.2866, pruned_loss=0.0802, over 13578.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3258, pruned_loss=0.0919, over 2182856.95 frames. ], batch size: 33, lr: 2.09e-02, grad_scale: 16.0 +2024-08-03 07:54:19,539 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.69 vs. limit=15.0 +2024-08-03 07:54:35,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=68064.33333333333, ans=0.2 +2024-08-03 07:55:02,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=68174.33333333333, ans=0.025 +2024-08-03 07:55:05,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=68211.0, ans=0.125 +2024-08-03 07:55:06,647 INFO [train.py:1114] (2/4) Epoch 6, batch 400, loss[loss=0.2666, simple_loss=0.3349, pruned_loss=0.09917, over 13349.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.3256, pruned_loss=0.09174, over 2286225.64 frames. ], batch size: 37, lr: 2.08e-02, grad_scale: 32.0 +2024-08-03 07:55:11,663 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.87 vs. limit=22.5 +2024-08-03 07:55:13,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=68211.0, ans=0.125 +2024-08-03 07:55:19,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=68247.66666666667, ans=0.0 +2024-08-03 07:55:27,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=68284.33333333333, ans=0.0 +2024-08-03 07:55:33,353 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:55:42,227 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.052e+02 1.404e+02 1.670e+02 2.079e+02 3.576e+02, threshold=3.340e+02, percent-clipped=3.0 +2024-08-03 07:55:51,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=68394.33333333333, ans=0.2 +2024-08-03 07:55:51,982 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.92 vs. limit=10.0 +2024-08-03 07:55:52,433 INFO [train.py:1114] (2/4) Epoch 6, batch 450, loss[loss=0.2443, simple_loss=0.3271, pruned_loss=0.08077, over 13551.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.3258, pruned_loss=0.09167, over 2359850.90 frames. ], batch size: 38, lr: 2.08e-02, grad_scale: 32.0 +2024-08-03 07:56:09,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=68431.0, ans=0.0 +2024-08-03 07:56:18,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=68467.66666666667, ans=0.025 +2024-08-03 07:56:25,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=68504.33333333333, ans=0.2 +2024-08-03 07:56:33,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=68541.0, ans=0.125 +2024-08-03 07:56:36,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=68541.0, ans=0.025 +2024-08-03 07:56:40,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=68577.66666666667, ans=0.2 +2024-08-03 07:56:41,078 INFO [train.py:1114] (2/4) Epoch 6, batch 500, loss[loss=0.2381, simple_loss=0.3223, pruned_loss=0.07691, over 13392.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.324, pruned_loss=0.09063, over 2425603.21 frames. ], batch size: 43, lr: 2.08e-02, grad_scale: 32.0 +2024-08-03 07:56:50,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=68614.33333333333, ans=0.2 +2024-08-03 07:56:55,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=68614.33333333333, ans=0.125 +2024-08-03 07:57:01,121 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.84 vs. limit=15.0 +2024-08-03 07:57:08,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=68687.66666666667, ans=0.125 +2024-08-03 07:57:14,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=68687.66666666667, ans=0.0 +2024-08-03 07:57:15,947 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.283e+02 1.463e+02 1.945e+02 3.864e+02, threshold=2.927e+02, percent-clipped=1.0 +2024-08-03 07:57:23,620 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.24 vs. limit=15.0 +2024-08-03 07:57:25,751 INFO [train.py:1114] (2/4) Epoch 6, batch 550, loss[loss=0.2606, simple_loss=0.343, pruned_loss=0.08914, over 13053.00 frames. ], tot_loss[loss=0.2528, simple_loss=0.3239, pruned_loss=0.09087, over 2467338.22 frames. ], batch size: 48, lr: 2.08e-02, grad_scale: 32.0 +2024-08-03 07:57:29,071 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.71 vs. limit=15.0 +2024-08-03 07:57:31,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=68761.0, ans=0.0 +2024-08-03 07:57:35,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=68797.66666666667, ans=0.0 +2024-08-03 07:57:36,173 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.63 vs. limit=22.5 +2024-08-03 07:58:02,773 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.95 vs. limit=22.5 +2024-08-03 07:58:15,094 INFO [train.py:1114] (2/4) Epoch 6, batch 600, loss[loss=0.2447, simple_loss=0.3236, pruned_loss=0.08291, over 13335.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.3236, pruned_loss=0.09047, over 2507197.65 frames. ], batch size: 46, lr: 2.07e-02, grad_scale: 32.0 +2024-08-03 07:58:17,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=68944.33333333333, ans=0.125 +2024-08-03 07:58:20,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=68944.33333333333, ans=0.125 +2024-08-03 07:58:32,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68981.0, ans=0.1 +2024-08-03 07:58:44,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=69054.33333333333, ans=0.125 +2024-08-03 07:58:45,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=69054.33333333333, ans=0.07 +2024-08-03 07:58:46,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=69054.33333333333, ans=0.0 +2024-08-03 07:58:49,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=69054.33333333333, ans=0.125 +2024-08-03 07:58:52,940 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.444e+02 1.751e+02 2.367e+02 5.361e+02, threshold=3.502e+02, percent-clipped=14.0 +2024-08-03 07:59:03,045 INFO [train.py:1114] (2/4) Epoch 6, batch 650, loss[loss=0.1947, simple_loss=0.2714, pruned_loss=0.05906, over 13542.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3213, pruned_loss=0.08819, over 2542434.62 frames. ], batch size: 37, lr: 2.07e-02, grad_scale: 32.0 +2024-08-03 07:59:14,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=69164.33333333333, ans=0.2 +2024-08-03 07:59:37,201 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:59:54,468 INFO [train.py:1114] (2/4) Epoch 6, batch 700, loss[loss=0.2432, simple_loss=0.3119, pruned_loss=0.08727, over 13533.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3219, pruned_loss=0.0885, over 2564918.63 frames. ], batch size: 35, lr: 2.07e-02, grad_scale: 32.0 +2024-08-03 07:59:57,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=69311.0, ans=0.1 +2024-08-03 08:00:17,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=69384.33333333333, ans=0.125 +2024-08-03 08:00:30,544 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.041e+02 1.317e+02 1.524e+02 2.110e+02 4.129e+02, threshold=3.048e+02, percent-clipped=1.0 +2024-08-03 08:00:30,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=69457.66666666667, ans=0.125 +2024-08-03 08:00:37,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=69457.66666666667, ans=0.0 +2024-08-03 08:00:39,420 INFO [train.py:1114] (2/4) Epoch 6, batch 750, loss[loss=0.2259, simple_loss=0.3112, pruned_loss=0.07027, over 13364.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3223, pruned_loss=0.08884, over 2582387.23 frames. ], batch size: 37, lr: 2.07e-02, grad_scale: 16.0 +2024-08-03 08:00:56,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69567.66666666667, ans=0.1 +2024-08-03 08:01:01,507 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.43 vs. limit=12.0 +2024-08-03 08:01:03,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=69567.66666666667, ans=0.0 +2024-08-03 08:01:05,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=69604.33333333333, ans=0.07 +2024-08-03 08:01:07,110 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.34 vs. limit=10.0 +2024-08-03 08:01:16,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=69641.0, ans=0.125 +2024-08-03 08:01:18,154 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.33 vs. limit=22.5 +2024-08-03 08:01:19,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=69641.0, ans=0.2 +2024-08-03 08:01:25,634 INFO [train.py:1114] (2/4) Epoch 6, batch 800, loss[loss=0.22, simple_loss=0.2933, pruned_loss=0.07337, over 13354.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3216, pruned_loss=0.08842, over 2597516.90 frames. ], batch size: 33, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:01:30,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=69677.66666666667, ans=0.2 +2024-08-03 08:01:32,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=69677.66666666667, ans=0.5 +2024-08-03 08:01:36,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=69714.33333333333, ans=0.125 +2024-08-03 08:02:03,244 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.66 vs. limit=15.0 +2024-08-03 08:02:04,559 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.016e+02 1.299e+02 1.541e+02 1.861e+02 5.767e+02, threshold=3.082e+02, percent-clipped=2.0 +2024-08-03 08:02:12,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=69824.33333333333, ans=0.125 +2024-08-03 08:02:15,430 INFO [train.py:1114] (2/4) Epoch 6, batch 850, loss[loss=0.2736, simple_loss=0.3526, pruned_loss=0.09733, over 13345.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3207, pruned_loss=0.08792, over 2610206.02 frames. ], batch size: 40, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:02:21,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=69861.0, ans=0.125 +2024-08-03 08:02:25,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=69897.66666666667, ans=0.125 +2024-08-03 08:02:42,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=69934.33333333333, ans=0.0 +2024-08-03 08:02:43,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=69934.33333333333, ans=0.025 +2024-08-03 08:02:44,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=69971.0, ans=15.0 +2024-08-03 08:02:46,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69971.0, ans=0.1 +2024-08-03 08:03:05,399 INFO [train.py:1114] (2/4) Epoch 6, batch 900, loss[loss=0.2639, simple_loss=0.3269, pruned_loss=0.1005, over 13357.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.3215, pruned_loss=0.08812, over 2612536.39 frames. ], batch size: 33, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:03:16,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=70081.0, ans=0.025 +2024-08-03 08:03:17,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=70081.0, ans=10.0 +2024-08-03 08:03:19,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=70081.0, ans=0.2 +2024-08-03 08:03:37,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=70154.33333333333, ans=0.0 +2024-08-03 08:03:38,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=70154.33333333333, ans=0.125 +2024-08-03 08:03:45,053 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.28 vs. limit=12.0 +2024-08-03 08:03:47,225 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.449e+02 1.731e+02 2.120e+02 4.168e+02, threshold=3.462e+02, percent-clipped=3.0 +2024-08-03 08:03:56,473 INFO [train.py:1114] (2/4) Epoch 6, batch 950, loss[loss=0.2313, simple_loss=0.3052, pruned_loss=0.07868, over 13526.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3218, pruned_loss=0.0885, over 2612799.78 frames. ], batch size: 34, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:04:03,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=70227.66666666667, ans=0.125 +2024-08-03 08:04:05,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=70264.33333333333, ans=0.2 +2024-08-03 08:04:11,882 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.33 vs. limit=12.0 +2024-08-03 08:04:20,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=70301.0, ans=0.125 +2024-08-03 08:04:37,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=70374.33333333333, ans=0.1 +2024-08-03 08:04:38,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=70374.33333333333, ans=0.125 +2024-08-03 08:04:42,184 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:04:43,847 INFO [train.py:1114] (2/4) Epoch 6, batch 1000, loss[loss=0.2301, simple_loss=0.3129, pruned_loss=0.07365, over 13383.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3227, pruned_loss=0.08914, over 2611076.47 frames. ], batch size: 35, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:04:58,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=70447.66666666667, ans=0.125 +2024-08-03 08:04:58,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff3.min_abs, batch_count=70447.66666666667, ans=0.2 +2024-08-03 08:05:04,041 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.29 vs. limit=15.0 +2024-08-03 08:05:05,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=70484.33333333333, ans=0.0 +2024-08-03 08:05:22,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=70557.66666666667, ans=0.025 +2024-08-03 08:05:22,971 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.279e+01 1.312e+02 1.654e+02 1.929e+02 3.115e+02, threshold=3.308e+02, percent-clipped=0.0 +2024-08-03 08:05:34,389 INFO [train.py:1114] (2/4) Epoch 6, batch 1050, loss[loss=0.2603, simple_loss=0.347, pruned_loss=0.08678, over 13574.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3219, pruned_loss=0.08874, over 2615541.52 frames. ], batch size: 39, lr: 2.05e-02, grad_scale: 32.0 +2024-08-03 08:05:39,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=70594.33333333333, ans=0.125 +2024-08-03 08:06:11,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=70741.0, ans=0.0 +2024-08-03 08:06:13,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=70741.0, ans=0.125 +2024-08-03 08:06:21,129 INFO [train.py:1114] (2/4) Epoch 6, batch 1100, loss[loss=0.236, simple_loss=0.3078, pruned_loss=0.08205, over 13570.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3222, pruned_loss=0.089, over 2619337.03 frames. ], batch size: 36, lr: 2.05e-02, grad_scale: 32.0 +2024-08-03 08:06:27,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=70777.66666666667, ans=0.125 +2024-08-03 08:06:30,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=70814.33333333333, ans=0.04949747468305833 +2024-08-03 08:06:57,396 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.320e+02 1.582e+02 2.063e+02 3.090e+02, threshold=3.163e+02, percent-clipped=0.0 +2024-08-03 08:07:01,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=70924.33333333333, ans=0.125 +2024-08-03 08:07:08,202 INFO [train.py:1114] (2/4) Epoch 6, batch 1150, loss[loss=0.2481, simple_loss=0.3209, pruned_loss=0.08766, over 13563.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.321, pruned_loss=0.08837, over 2618631.60 frames. ], batch size: 36, lr: 2.05e-02, grad_scale: 32.0 +2024-08-03 08:07:12,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=70961.0, ans=0.025 +2024-08-03 08:07:17,763 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.27 vs. limit=15.0 +2024-08-03 08:07:28,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=71034.33333333333, ans=0.025 +2024-08-03 08:07:39,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=71071.0, ans=0.0 +2024-08-03 08:07:55,755 INFO [train.py:1114] (2/4) Epoch 6, batch 1200, loss[loss=0.2329, simple_loss=0.3242, pruned_loss=0.07082, over 13570.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.322, pruned_loss=0.08846, over 2616571.90 frames. ], batch size: 39, lr: 2.05e-02, grad_scale: 32.0 +2024-08-03 08:08:24,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=71254.33333333333, ans=0.1 +2024-08-03 08:08:28,283 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.94 vs. limit=15.0 +2024-08-03 08:08:31,455 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.251e+02 1.398e+02 1.677e+02 2.839e+02, threshold=2.796e+02, percent-clipped=0.0 +2024-08-03 08:08:40,470 INFO [train.py:1114] (2/4) Epoch 6, batch 1250, loss[loss=0.2665, simple_loss=0.3404, pruned_loss=0.0963, over 13451.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.3218, pruned_loss=0.08803, over 2628178.45 frames. ], batch size: 42, lr: 2.04e-02, grad_scale: 32.0 +2024-08-03 08:08:56,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71364.33333333333, ans=0.1 +2024-08-03 08:09:13,486 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.03 vs. limit=6.0 +2024-08-03 08:09:14,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=71437.66666666667, ans=0.0 +2024-08-03 08:09:27,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=71474.33333333333, ans=0.0 +2024-08-03 08:09:29,157 INFO [train.py:1114] (2/4) Epoch 6, batch 1300, loss[loss=0.2682, simple_loss=0.3442, pruned_loss=0.0961, over 12831.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3203, pruned_loss=0.08737, over 2630967.16 frames. ], batch size: 52, lr: 2.04e-02, grad_scale: 32.0 +2024-08-03 08:09:34,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=71511.0, ans=0.015 +2024-08-03 08:09:45,448 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.97 vs. limit=22.5 +2024-08-03 08:09:49,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.77 vs. limit=22.5 +2024-08-03 08:09:52,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=71584.33333333333, ans=0.125 +2024-08-03 08:09:52,551 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:09:52,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=71584.33333333333, ans=0.2 +2024-08-03 08:09:53,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=71584.33333333333, ans=0.0 +2024-08-03 08:09:56,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=71584.33333333333, ans=0.0 +2024-08-03 08:10:12,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=71621.0, ans=0.025 +2024-08-03 08:10:14,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=71621.0, ans=0.0 +2024-08-03 08:10:17,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=71621.0, ans=0.0 +2024-08-03 08:10:21,999 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.007e+02 1.330e+02 1.619e+02 2.134e+02 3.747e+02, threshold=3.238e+02, percent-clipped=6.0 +2024-08-03 08:10:28,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=71657.66666666667, ans=15.0 +2024-08-03 08:10:31,266 INFO [train.py:1114] (2/4) Epoch 6, batch 1350, loss[loss=0.2216, simple_loss=0.3053, pruned_loss=0.069, over 13542.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.3201, pruned_loss=0.08732, over 2638471.49 frames. ], batch size: 37, lr: 2.04e-02, grad_scale: 32.0 +2024-08-03 08:10:54,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=71731.0, ans=0.125 +2024-08-03 08:11:05,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=71731.0, ans=15.0 +2024-08-03 08:11:23,518 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.74 vs. limit=22.5 +2024-08-03 08:11:29,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=71804.33333333333, ans=0.0 +2024-08-03 08:11:33,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=71804.33333333333, ans=0.015 +2024-08-03 08:11:39,094 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.06 vs. limit=10.0 +2024-08-03 08:11:47,807 INFO [train.py:1114] (2/4) Epoch 6, batch 1400, loss[loss=0.2352, simple_loss=0.2999, pruned_loss=0.08526, over 13246.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3199, pruned_loss=0.08725, over 2642266.76 frames. ], batch size: 31, lr: 2.04e-02, grad_scale: 32.0 +2024-08-03 08:12:02,094 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.80 vs. limit=22.5 +2024-08-03 08:12:18,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=71987.66666666667, ans=0.1 +2024-08-03 08:12:18,642 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.41 vs. limit=15.0 +2024-08-03 08:12:24,348 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.721e+01 1.334e+02 1.562e+02 1.833e+02 2.897e+02, threshold=3.124e+02, percent-clipped=0.0 +2024-08-03 08:12:37,232 INFO [train.py:1114] (2/4) Epoch 6, batch 1450, loss[loss=0.3061, simple_loss=0.3753, pruned_loss=0.1185, over 13427.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3215, pruned_loss=0.08798, over 2640707.62 frames. ], batch size: 43, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:12:37,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=72061.0, ans=0.025 +2024-08-03 08:12:41,164 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.94 vs. limit=10.0 +2024-08-03 08:12:42,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=72061.0, ans=0.125 +2024-08-03 08:13:02,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=72134.33333333333, ans=0.0 +2024-08-03 08:13:10,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=72171.0, ans=0.0 +2024-08-03 08:13:12,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=72207.66666666667, ans=0.2 +2024-08-03 08:13:24,201 INFO [train.py:1114] (2/4) Epoch 6, batch 1500, loss[loss=0.2506, simple_loss=0.333, pruned_loss=0.08407, over 13400.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3211, pruned_loss=0.08757, over 2640358.15 frames. ], batch size: 39, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:13:29,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=72244.33333333333, ans=0.125 +2024-08-03 08:13:33,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=72281.0, ans=0.2 +2024-08-03 08:13:36,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=72281.0, ans=0.125 +2024-08-03 08:13:50,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=72317.66666666667, ans=0.125 +2024-08-03 08:14:02,720 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.371e+02 1.678e+02 2.032e+02 3.850e+02, threshold=3.356e+02, percent-clipped=2.0 +2024-08-03 08:14:07,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=72391.0, ans=0.125 +2024-08-03 08:14:09,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=72391.0, ans=0.125 +2024-08-03 08:14:09,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=72391.0, ans=0.0 +2024-08-03 08:14:11,850 INFO [train.py:1114] (2/4) Epoch 6, batch 1550, loss[loss=0.2659, simple_loss=0.3494, pruned_loss=0.09126, over 13392.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.322, pruned_loss=0.0886, over 2630903.91 frames. ], batch size: 41, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:14:15,013 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.69 vs. limit=15.0 +2024-08-03 08:14:16,952 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.06 vs. limit=15.0 +2024-08-03 08:14:19,582 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.62 vs. limit=15.0 +2024-08-03 08:14:23,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=72464.33333333333, ans=0.07 +2024-08-03 08:14:31,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=72501.0, ans=0.025 +2024-08-03 08:14:34,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=72501.0, ans=0.025 +2024-08-03 08:14:34,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=72501.0, ans=0.125 +2024-08-03 08:14:38,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=72537.66666666667, ans=0.2 +2024-08-03 08:14:40,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=72537.66666666667, ans=0.2 +2024-08-03 08:14:43,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=72537.66666666667, ans=0.125 +2024-08-03 08:14:59,388 INFO [train.py:1114] (2/4) Epoch 6, batch 1600, loss[loss=0.2726, simple_loss=0.3481, pruned_loss=0.09857, over 13574.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3221, pruned_loss=0.08871, over 2625023.03 frames. ], batch size: 39, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:15:07,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=72611.0, ans=0.125 +2024-08-03 08:15:10,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=72647.66666666667, ans=0.1 +2024-08-03 08:15:20,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=72684.33333333333, ans=0.09899494936611666 +2024-08-03 08:15:29,759 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.81 vs. limit=6.0 +2024-08-03 08:15:38,174 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.361e+02 1.766e+02 2.117e+02 3.688e+02, threshold=3.533e+02, percent-clipped=3.0 +2024-08-03 08:15:38,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=72757.66666666667, ans=0.125 +2024-08-03 08:15:47,188 INFO [train.py:1114] (2/4) Epoch 6, batch 1650, loss[loss=0.2582, simple_loss=0.338, pruned_loss=0.08922, over 13329.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3217, pruned_loss=0.08846, over 2621940.04 frames. ], batch size: 40, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:15:47,651 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.66 vs. limit=10.0 +2024-08-03 08:16:00,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72831.0, ans=0.1 +2024-08-03 08:16:13,978 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.49 vs. limit=22.5 +2024-08-03 08:16:18,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=72904.33333333333, ans=0.0 +2024-08-03 08:16:25,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72941.0, ans=0.1 +2024-08-03 08:16:32,585 INFO [train.py:1114] (2/4) Epoch 6, batch 1700, loss[loss=0.2173, simple_loss=0.2883, pruned_loss=0.07312, over 13263.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3209, pruned_loss=0.08767, over 2631184.28 frames. ], batch size: 31, lr: 2.02e-02, grad_scale: 32.0 +2024-08-03 08:16:34,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=72977.66666666667, ans=0.125 +2024-08-03 08:16:35,087 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.56 vs. limit=12.0 +2024-08-03 08:16:38,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=72977.66666666667, ans=0.04949747468305833 +2024-08-03 08:16:44,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=73014.33333333333, ans=0.0 +2024-08-03 08:16:50,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=73014.33333333333, ans=0.2 +2024-08-03 08:17:00,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=73087.66666666667, ans=0.025 +2024-08-03 08:17:01,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=73087.66666666667, ans=0.05 +2024-08-03 08:17:12,007 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.052e+02 1.261e+02 1.452e+02 1.741e+02 3.211e+02, threshold=2.904e+02, percent-clipped=0.0 +2024-08-03 08:17:16,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=73124.33333333333, ans=0.125 +2024-08-03 08:17:20,852 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.24 vs. limit=15.0 +2024-08-03 08:17:21,256 INFO [train.py:1114] (2/4) Epoch 6, batch 1750, loss[loss=0.2402, simple_loss=0.3019, pruned_loss=0.08919, over 13555.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3202, pruned_loss=0.08744, over 2634763.90 frames. ], batch size: 31, lr: 2.02e-02, grad_scale: 32.0 +2024-08-03 08:17:24,666 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.95 vs. limit=12.0 +2024-08-03 08:17:31,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=73197.66666666667, ans=0.125 +2024-08-03 08:17:34,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=73197.66666666667, ans=0.2 +2024-08-03 08:17:40,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=73234.33333333333, ans=0.07 +2024-08-03 08:17:45,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=73234.33333333333, ans=0.125 +2024-08-03 08:18:08,632 INFO [train.py:1114] (2/4) Epoch 6, batch 1800, loss[loss=0.2639, simple_loss=0.333, pruned_loss=0.09747, over 13545.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3219, pruned_loss=0.0892, over 2636049.70 frames. ], batch size: 38, lr: 2.02e-02, grad_scale: 32.0 +2024-08-03 08:18:16,932 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:18:32,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=73417.66666666667, ans=0.09899494936611666 +2024-08-03 08:18:34,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=73417.66666666667, ans=0.1 +2024-08-03 08:18:35,154 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:18:47,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=73454.33333333333, ans=0.125 +2024-08-03 08:18:50,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=73491.0, ans=0.0 +2024-08-03 08:18:50,839 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.090e+02 1.331e+02 1.549e+02 2.028e+02 3.164e+02, threshold=3.097e+02, percent-clipped=2.0 +2024-08-03 08:18:54,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=73491.0, ans=0.0 +2024-08-03 08:18:59,636 INFO [train.py:1114] (2/4) Epoch 6, batch 1850, loss[loss=0.2344, simple_loss=0.3141, pruned_loss=0.07739, over 13391.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3212, pruned_loss=0.08872, over 2637390.61 frames. ], batch size: 39, lr: 2.02e-02, grad_scale: 16.0 +2024-08-03 08:19:07,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=73527.66666666667, ans=0.125 +2024-08-03 08:19:13,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=73564.33333333333, ans=0.125 +2024-08-03 08:19:16,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=73564.33333333333, ans=0.2 +2024-08-03 08:19:21,893 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.56 vs. limit=15.0 +2024-08-03 08:19:32,009 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.53 vs. limit=22.5 +2024-08-03 08:19:51,022 INFO [train.py:1114] (2/4) Epoch 6, batch 1900, loss[loss=0.2594, simple_loss=0.3398, pruned_loss=0.08949, over 13337.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3215, pruned_loss=0.08847, over 2639918.45 frames. ], batch size: 40, lr: 2.01e-02, grad_scale: 16.0 +2024-08-03 08:20:17,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=73784.33333333333, ans=0.125 +2024-08-03 08:20:17,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73784.33333333333, ans=0.1 +2024-08-03 08:20:25,671 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.90 vs. limit=15.0 +2024-08-03 08:20:29,682 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.282e+02 1.594e+02 1.886e+02 3.634e+02, threshold=3.188e+02, percent-clipped=1.0 +2024-08-03 08:21:06,039 INFO [train.py:1114] (2/4) Epoch 6, batch 1950, loss[loss=0.2606, simple_loss=0.3345, pruned_loss=0.09334, over 13562.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3228, pruned_loss=0.08882, over 2646557.31 frames. ], batch size: 36, lr: 2.01e-02, grad_scale: 16.0 +2024-08-03 08:21:16,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=73894.33333333333, ans=0.2 +2024-08-03 08:21:20,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=73894.33333333333, ans=0.125 +2024-08-03 08:21:39,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.37 vs. limit=22.5 +2024-08-03 08:21:48,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=74004.33333333333, ans=0.0 +2024-08-03 08:21:53,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=74041.0, ans=0.125 +2024-08-03 08:21:58,096 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.46 vs. limit=15.0 +2024-08-03 08:22:01,507 INFO [train.py:1114] (2/4) Epoch 6, batch 2000, loss[loss=0.2366, simple_loss=0.3099, pruned_loss=0.08165, over 13522.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3234, pruned_loss=0.08929, over 2635459.71 frames. ], batch size: 31, lr: 2.01e-02, grad_scale: 32.0 +2024-08-03 08:22:12,461 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.81 vs. limit=5.0 +2024-08-03 08:22:13,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=74114.33333333333, ans=0.0 +2024-08-03 08:22:30,536 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.28 vs. limit=12.0 +2024-08-03 08:22:40,826 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.995e+01 1.304e+02 1.521e+02 1.870e+02 3.402e+02, threshold=3.042e+02, percent-clipped=1.0 +2024-08-03 08:22:51,649 INFO [train.py:1114] (2/4) Epoch 6, batch 2050, loss[loss=0.2293, simple_loss=0.2934, pruned_loss=0.08258, over 13422.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3219, pruned_loss=0.0888, over 2632702.35 frames. ], batch size: 32, lr: 2.01e-02, grad_scale: 32.0 +2024-08-03 08:22:57,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=74261.0, ans=0.0 +2024-08-03 08:23:06,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74297.66666666667, ans=0.1 +2024-08-03 08:23:24,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=74371.0, ans=0.2 +2024-08-03 08:23:25,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=74371.0, ans=0.125 +2024-08-03 08:23:27,371 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.72 vs. limit=15.0 +2024-08-03 08:23:29,088 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.57 vs. limit=15.0 +2024-08-03 08:23:38,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74407.66666666667, ans=0.1 +2024-08-03 08:23:40,250 INFO [train.py:1114] (2/4) Epoch 6, batch 2100, loss[loss=0.2272, simple_loss=0.3087, pruned_loss=0.07285, over 13544.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3214, pruned_loss=0.08826, over 2638606.61 frames. ], batch size: 37, lr: 2.01e-02, grad_scale: 32.0 +2024-08-03 08:23:40,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=74444.33333333333, ans=0.125 +2024-08-03 08:23:47,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=74444.33333333333, ans=0.0 +2024-08-03 08:23:50,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=74481.0, ans=0.125 +2024-08-03 08:23:51,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=74481.0, ans=0.0 +2024-08-03 08:23:52,616 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.08 vs. limit=22.5 +2024-08-03 08:23:53,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=74481.0, ans=0.0 +2024-08-03 08:24:02,280 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.13 vs. limit=12.0 +2024-08-03 08:24:11,243 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.19 vs. limit=15.0 +2024-08-03 08:24:16,930 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.001e+02 1.291e+02 1.674e+02 2.132e+02 3.817e+02, threshold=3.348e+02, percent-clipped=5.0 +2024-08-03 08:24:25,015 INFO [train.py:1114] (2/4) Epoch 6, batch 2150, loss[loss=0.2284, simple_loss=0.2981, pruned_loss=0.07935, over 13563.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.32, pruned_loss=0.08715, over 2646981.18 frames. ], batch size: 36, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:24:38,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=74664.33333333333, ans=0.125 +2024-08-03 08:24:43,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=74701.0, ans=0.05 +2024-08-03 08:24:43,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=74701.0, ans=0.125 +2024-08-03 08:24:44,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=74701.0, ans=0.125 +2024-08-03 08:24:45,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=74701.0, ans=6.0 +2024-08-03 08:25:02,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=74774.33333333333, ans=0.0 +2024-08-03 08:25:04,546 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.95 vs. limit=6.0 +2024-08-03 08:25:14,036 INFO [train.py:1114] (2/4) Epoch 6, batch 2200, loss[loss=0.25, simple_loss=0.322, pruned_loss=0.08897, over 13407.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3196, pruned_loss=0.08701, over 2644262.35 frames. ], batch size: 39, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:25:18,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=74811.0, ans=0.05 +2024-08-03 08:25:24,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=74847.66666666667, ans=0.125 +2024-08-03 08:25:50,666 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.053e+02 1.295e+02 1.544e+02 1.950e+02 3.525e+02, threshold=3.088e+02, percent-clipped=1.0 +2024-08-03 08:25:59,016 INFO [train.py:1114] (2/4) Epoch 6, batch 2250, loss[loss=0.2124, simple_loss=0.3004, pruned_loss=0.0622, over 13346.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3187, pruned_loss=0.08638, over 2641138.06 frames. ], batch size: 37, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:26:00,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=74994.33333333333, ans=0.2 +2024-08-03 08:26:12,112 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.40 vs. limit=15.0 +2024-08-03 08:26:12,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=75031.0, ans=0.025 +2024-08-03 08:26:14,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=75031.0, ans=0.125 +2024-08-03 08:26:21,854 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.65 vs. limit=6.0 +2024-08-03 08:26:31,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=75104.33333333333, ans=0.125 +2024-08-03 08:26:42,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=75141.0, ans=0.0 +2024-08-03 08:26:48,066 INFO [train.py:1114] (2/4) Epoch 6, batch 2300, loss[loss=0.2178, simple_loss=0.2879, pruned_loss=0.07386, over 13581.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3177, pruned_loss=0.08621, over 2637854.17 frames. ], batch size: 33, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:26:50,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=75177.66666666667, ans=0.125 +2024-08-03 08:26:52,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=75177.66666666667, ans=0.2 +2024-08-03 08:26:53,251 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.41 vs. limit=15.0 +2024-08-03 08:27:04,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=75214.33333333333, ans=0.025 +2024-08-03 08:27:06,664 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.69 vs. limit=15.0 +2024-08-03 08:27:09,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75251.0, ans=0.1 +2024-08-03 08:27:16,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=75287.66666666667, ans=0.125 +2024-08-03 08:27:25,146 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.021e+02 1.310e+02 1.601e+02 2.046e+02 3.853e+02, threshold=3.202e+02, percent-clipped=4.0 +2024-08-03 08:27:28,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=75324.33333333333, ans=0.025 +2024-08-03 08:27:31,235 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.06 vs. limit=6.0 +2024-08-03 08:27:33,457 INFO [train.py:1114] (2/4) Epoch 6, batch 2350, loss[loss=0.2491, simple_loss=0.3342, pruned_loss=0.08199, over 13544.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3179, pruned_loss=0.08614, over 2640929.83 frames. ], batch size: 38, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:27:39,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=75361.0, ans=0.125 +2024-08-03 08:27:57,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75434.33333333333, ans=0.1 +2024-08-03 08:27:59,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=75471.0, ans=0.0 +2024-08-03 08:28:06,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=75471.0, ans=0.0 +2024-08-03 08:28:06,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75471.0, ans=0.1 +2024-08-03 08:28:17,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=75507.66666666667, ans=0.2 +2024-08-03 08:28:18,865 INFO [train.py:1114] (2/4) Epoch 6, batch 2400, loss[loss=0.2028, simple_loss=0.2842, pruned_loss=0.06068, over 13529.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3187, pruned_loss=0.08636, over 2642401.97 frames. ], batch size: 35, lr: 1.99e-02, grad_scale: 32.0 +2024-08-03 08:28:19,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=75544.33333333333, ans=0.125 +2024-08-03 08:28:21,213 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.23 vs. limit=22.5 +2024-08-03 08:28:39,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=75581.0, ans=0.0 +2024-08-03 08:28:41,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=75617.66666666667, ans=0.125 +2024-08-03 08:28:50,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=75654.33333333333, ans=0.125 +2024-08-03 08:28:59,502 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.007e+02 1.273e+02 1.529e+02 2.027e+02 4.146e+02, threshold=3.058e+02, percent-clipped=9.0 +2024-08-03 08:28:59,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=75691.0, ans=0.125 +2024-08-03 08:29:03,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=75691.0, ans=0.125 +2024-08-03 08:29:03,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75691.0, ans=0.1 +2024-08-03 08:29:07,936 INFO [train.py:1114] (2/4) Epoch 6, batch 2450, loss[loss=0.2613, simple_loss=0.3337, pruned_loss=0.09443, over 13374.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3211, pruned_loss=0.08808, over 2632917.79 frames. ], batch size: 37, lr: 1.99e-02, grad_scale: 32.0 +2024-08-03 08:29:10,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=75727.66666666667, ans=0.125 +2024-08-03 08:29:27,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75801.0, ans=0.1 +2024-08-03 08:29:34,111 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:29:52,482 INFO [train.py:1114] (2/4) Epoch 6, batch 2500, loss[loss=0.2297, simple_loss=0.3155, pruned_loss=0.07194, over 13408.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3205, pruned_loss=0.08736, over 2637210.22 frames. ], batch size: 39, lr: 1.99e-02, grad_scale: 32.0 +2024-08-03 08:29:56,416 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.70 vs. limit=15.0 +2024-08-03 08:29:58,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=75911.0, ans=0.125 +2024-08-03 08:30:03,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=75947.66666666667, ans=0.125 +2024-08-03 08:30:04,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=75947.66666666667, ans=0.125 +2024-08-03 08:30:06,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.55 vs. limit=15.0 +2024-08-03 08:30:07,383 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:30:23,527 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.19 vs. limit=12.0 +2024-08-03 08:30:27,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=76021.0, ans=0.125 +2024-08-03 08:30:31,089 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.038e+02 1.253e+02 1.529e+02 2.022e+02 3.392e+02, threshold=3.058e+02, percent-clipped=3.0 +2024-08-03 08:30:37,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=76094.33333333333, ans=0.0 +2024-08-03 08:30:38,264 INFO [train.py:1114] (2/4) Epoch 6, batch 2550, loss[loss=0.2252, simple_loss=0.2937, pruned_loss=0.07838, over 13540.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3196, pruned_loss=0.08644, over 2637951.17 frames. ], batch size: 31, lr: 1.99e-02, grad_scale: 16.0 +2024-08-03 08:30:46,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=76094.33333333333, ans=0.07 +2024-08-03 08:31:12,528 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.85 vs. limit=15.0 +2024-08-03 08:31:14,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=76204.33333333333, ans=0.125 +2024-08-03 08:31:15,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=76241.0, ans=0.1 +2024-08-03 08:31:25,203 INFO [train.py:1114] (2/4) Epoch 6, batch 2600, loss[loss=0.2204, simple_loss=0.298, pruned_loss=0.07139, over 13566.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3197, pruned_loss=0.08654, over 2637414.93 frames. ], batch size: 36, lr: 1.98e-02, grad_scale: 16.0 +2024-08-03 08:31:27,988 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:31:40,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=76314.33333333333, ans=0.2 +2024-08-03 08:31:52,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=76387.66666666667, ans=0.0 +2024-08-03 08:31:55,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=76387.66666666667, ans=0.0 +2024-08-03 08:32:01,829 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.041e+02 1.310e+02 1.554e+02 1.964e+02 3.750e+02, threshold=3.108e+02, percent-clipped=4.0 +2024-08-03 08:32:08,707 INFO [train.py:1114] (2/4) Epoch 6, batch 2650, loss[loss=0.2532, simple_loss=0.3343, pruned_loss=0.08605, over 13308.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.32, pruned_loss=0.08669, over 2639870.51 frames. ], batch size: 46, lr: 1.98e-02, grad_scale: 16.0 +2024-08-03 08:32:14,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=76461.0, ans=0.025 +2024-08-03 08:32:26,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=76534.33333333333, ans=0.2 +2024-08-03 08:32:31,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=76534.33333333333, ans=0.125 +2024-08-03 08:32:34,250 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.70 vs. limit=15.0 +2024-08-03 08:32:39,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=76571.0, ans=0.0 +2024-08-03 08:32:41,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=76571.0, ans=0.1 +2024-08-03 08:32:52,919 INFO [train.py:1114] (2/4) Epoch 6, batch 2700, loss[loss=0.226, simple_loss=0.3062, pruned_loss=0.07287, over 13535.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3201, pruned_loss=0.08636, over 2637489.73 frames. ], batch size: 40, lr: 1.98e-02, grad_scale: 16.0 +2024-08-03 08:33:14,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=76717.66666666667, ans=0.125 +2024-08-03 08:33:18,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=76754.33333333333, ans=0.2 +2024-08-03 08:33:24,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=76754.33333333333, ans=0.2 +2024-08-03 08:33:27,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=76791.0, ans=0.125 +2024-08-03 08:33:28,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76791.0, ans=0.1 +2024-08-03 08:33:29,835 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.293e+02 1.589e+02 2.019e+02 3.318e+02, threshold=3.177e+02, percent-clipped=2.0 +2024-08-03 08:33:33,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=76791.0, ans=0.125 +2024-08-03 08:33:41,623 INFO [train.py:1114] (2/4) Epoch 6, batch 2750, loss[loss=0.2408, simple_loss=0.3064, pruned_loss=0.08755, over 13330.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3192, pruned_loss=0.08647, over 2635300.06 frames. ], batch size: 34, lr: 1.98e-02, grad_scale: 16.0 +2024-08-03 08:33:47,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=76827.66666666667, ans=0.125 +2024-08-03 08:33:57,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=76864.33333333333, ans=0.1 +2024-08-03 08:34:05,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=76901.0, ans=0.1 +2024-08-03 08:34:08,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.17 vs. limit=10.0 +2024-08-03 08:34:09,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=76937.66666666667, ans=0.0 +2024-08-03 08:34:25,230 INFO [train.py:1114] (2/4) Epoch 6, batch 2800, loss[loss=0.3872, simple_loss=0.4002, pruned_loss=0.1871, over 9407.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3199, pruned_loss=0.08761, over 2626692.34 frames. ], batch size: 97, lr: 1.98e-02, grad_scale: 32.0 +2024-08-03 08:34:39,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=77047.66666666667, ans=0.125 +2024-08-03 08:34:57,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=77121.0, ans=0.125 +2024-08-03 08:35:01,671 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.373e+02 1.725e+02 2.259e+02 3.489e+02, threshold=3.451e+02, percent-clipped=3.0 +2024-08-03 08:35:08,643 INFO [train.py:1114] (2/4) Epoch 6, batch 2850, loss[loss=0.2334, simple_loss=0.3037, pruned_loss=0.08157, over 13362.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.3202, pruned_loss=0.08763, over 2621496.18 frames. ], batch size: 35, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:35:17,884 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.41 vs. limit=15.0 +2024-08-03 08:35:29,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77267.66666666667, ans=0.1 +2024-08-03 08:35:34,019 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.40 vs. limit=6.0 +2024-08-03 08:35:41,782 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.47 vs. limit=12.0 +2024-08-03 08:35:49,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=77341.0, ans=0.025 +2024-08-03 08:35:49,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=77341.0, ans=0.0 +2024-08-03 08:35:53,463 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.39 vs. limit=15.0 +2024-08-03 08:35:53,868 INFO [train.py:1114] (2/4) Epoch 6, batch 2900, loss[loss=0.2446, simple_loss=0.3186, pruned_loss=0.08527, over 13373.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3213, pruned_loss=0.08756, over 2632065.30 frames. ], batch size: 36, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:35:55,258 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.81 vs. limit=22.5 +2024-08-03 08:36:00,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=77377.66666666667, ans=0.0 +2024-08-03 08:36:03,114 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.25 vs. limit=10.0 +2024-08-03 08:36:07,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=77414.33333333333, ans=0.2 +2024-08-03 08:36:14,328 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.48 vs. limit=15.0 +2024-08-03 08:36:14,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=77451.0, ans=0.125 +2024-08-03 08:36:16,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77451.0, ans=0.1 +2024-08-03 08:36:17,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=77451.0, ans=0.025 +2024-08-03 08:36:20,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=77487.66666666667, ans=0.2 +2024-08-03 08:40:05,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=77487.66666666667, ans=0.125 +2024-08-03 08:40:34,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=77487.66666666667, ans=0.0 +2024-08-03 08:41:12,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=77524.33333333333, ans=0.125 +2024-08-03 08:41:12,656 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.754e+01 1.224e+02 1.336e+02 1.587e+02 3.692e+02, threshold=2.672e+02, percent-clipped=1.0 +2024-08-03 08:41:32,114 INFO [train.py:1114] (2/4) Epoch 6, batch 2950, loss[loss=0.2423, simple_loss=0.3108, pruned_loss=0.08695, over 13315.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.32, pruned_loss=0.08707, over 2630073.23 frames. ], batch size: 34, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:41:32,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=77561.0, ans=0.07 +2024-08-03 08:41:34,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=77561.0, ans=0.2 +2024-08-03 08:42:10,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=77634.33333333333, ans=0.0 +2024-08-03 08:43:15,146 INFO [train.py:1114] (2/4) Epoch 6, batch 3000, loss[loss=0.2513, simple_loss=0.3281, pruned_loss=0.08718, over 13546.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.3204, pruned_loss=0.08764, over 2629932.08 frames. ], batch size: 37, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:43:15,147 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 08:43:32,686 INFO [train.py:1146] (2/4) Epoch 6, validation: loss=0.2027, simple_loss=0.301, pruned_loss=0.05225, over 944034.00 frames. +2024-08-03 08:43:32,687 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 08:43:58,358 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.17 vs. limit=15.0 +2024-08-03 08:44:02,096 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.95 vs. limit=15.0 +2024-08-03 08:44:09,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=77781.0, ans=0.125 +2024-08-03 08:44:21,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=77817.66666666667, ans=0.125 +2024-08-03 08:44:33,314 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.660e+01 1.264e+02 1.515e+02 1.854e+02 4.431e+02, threshold=3.030e+02, percent-clipped=3.0 +2024-08-03 08:44:34,911 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.78 vs. limit=15.0 +2024-08-03 08:44:41,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=77891.0, ans=0.05 +2024-08-03 08:44:47,349 INFO [train.py:1114] (2/4) Epoch 6, batch 3050, loss[loss=0.2295, simple_loss=0.3042, pruned_loss=0.07741, over 13530.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3207, pruned_loss=0.08758, over 2627395.11 frames. ], batch size: 35, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:45:12,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=77927.66666666667, ans=0.0 +2024-08-03 08:45:12,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=77927.66666666667, ans=0.05 +2024-08-03 08:49:08,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77964.33333333333, ans=0.1 +2024-08-03 08:49:28,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77964.33333333333, ans=0.1 +2024-08-03 08:49:31,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=77964.33333333333, ans=0.0 +2024-08-03 08:50:18,937 INFO [train.py:1114] (2/4) Epoch 6, batch 3100, loss[loss=0.2834, simple_loss=0.3615, pruned_loss=0.1027, over 13341.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3202, pruned_loss=0.08737, over 2626113.56 frames. ], batch size: 46, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:50:24,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=78111.0, ans=0.125 +2024-08-03 08:50:29,717 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:50:34,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=78147.66666666667, ans=0.0 +2024-08-03 08:50:37,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=78147.66666666667, ans=15.0 +2024-08-03 08:50:43,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=78184.33333333333, ans=0.125 +2024-08-03 08:50:47,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=78184.33333333333, ans=0.125 +2024-08-03 08:50:52,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=78184.33333333333, ans=0.125 +2024-08-03 08:50:55,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=78221.0, ans=0.125 +2024-08-03 08:50:58,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=78221.0, ans=0.2 +2024-08-03 08:51:05,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=78221.0, ans=0.125 +2024-08-03 08:51:12,347 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.281e+02 1.526e+02 2.102e+02 4.706e+02, threshold=3.052e+02, percent-clipped=7.0 +2024-08-03 08:51:34,802 INFO [train.py:1114] (2/4) Epoch 6, batch 3150, loss[loss=0.2607, simple_loss=0.3374, pruned_loss=0.09203, over 13039.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3195, pruned_loss=0.08674, over 2627020.25 frames. ], batch size: 48, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:51:41,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78294.33333333333, ans=0.1 +2024-08-03 08:52:42,587 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.27 vs. limit=10.0 +2024-08-03 08:52:43,924 INFO [train.py:1114] (2/4) Epoch 6, batch 3200, loss[loss=0.2518, simple_loss=0.3272, pruned_loss=0.08816, over 13546.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3188, pruned_loss=0.08605, over 2633116.68 frames. ], batch size: 37, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:52:50,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78477.66666666667, ans=0.1 +2024-08-03 08:52:58,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=78514.33333333333, ans=0.1 +2024-08-03 08:52:58,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=78514.33333333333, ans=0.0 +2024-08-03 08:53:03,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=78514.33333333333, ans=0.0 +2024-08-03 08:57:17,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=78551.0, ans=0.0 +2024-08-03 08:57:47,832 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.879e+01 1.222e+02 1.408e+02 1.742e+02 2.685e+02, threshold=2.816e+02, percent-clipped=0.0 +2024-08-03 08:57:57,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=78624.33333333333, ans=0.0 +2024-08-03 08:57:59,195 INFO [train.py:1114] (2/4) Epoch 6, batch 3250, loss[loss=0.2375, simple_loss=0.3202, pruned_loss=0.07733, over 13393.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.319, pruned_loss=0.08582, over 2637662.51 frames. ], batch size: 38, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:58:28,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=78697.66666666667, ans=0.125 +2024-08-03 08:58:36,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=78734.33333333333, ans=10.0 +2024-08-03 08:58:39,715 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:58:50,977 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:58:59,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=78807.66666666667, ans=0.125 +2024-08-03 08:59:05,489 INFO [train.py:1114] (2/4) Epoch 6, batch 3300, loss[loss=0.2477, simple_loss=0.3264, pruned_loss=0.08455, over 12922.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3175, pruned_loss=0.08531, over 2639268.74 frames. ], batch size: 52, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:59:14,399 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.83 vs. limit=15.0 +2024-08-03 08:59:24,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=78917.66666666667, ans=0.125 +2024-08-03 08:59:37,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=78954.33333333333, ans=0.2 +2024-08-03 08:59:42,888 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.013e+02 1.439e+02 1.688e+02 2.442e+02 4.060e+02, threshold=3.376e+02, percent-clipped=9.0 +2024-08-03 08:59:53,403 INFO [train.py:1114] (2/4) Epoch 6, batch 3350, loss[loss=0.2824, simple_loss=0.3467, pruned_loss=0.1091, over 13002.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3189, pruned_loss=0.08631, over 2629922.04 frames. ], batch size: 48, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 08:59:53,731 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.00 vs. limit=15.0 +2024-08-03 09:00:04,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=79064.33333333333, ans=0.025 +2024-08-03 09:00:08,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=79064.33333333333, ans=0.125 +2024-08-03 09:00:09,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=79064.33333333333, ans=0.2 +2024-08-03 09:00:16,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=79101.0, ans=0.125 +2024-08-03 09:00:18,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=79137.66666666667, ans=0.0 +2024-08-03 09:00:22,260 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.10 vs. limit=15.0 +2024-08-03 09:00:22,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=79137.66666666667, ans=0.0 +2024-08-03 09:00:36,288 INFO [train.py:1114] (2/4) Epoch 6, batch 3400, loss[loss=0.2081, simple_loss=0.2754, pruned_loss=0.07047, over 13527.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3183, pruned_loss=0.08604, over 2626063.43 frames. ], batch size: 31, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:00:37,576 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.25 vs. limit=6.0 +2024-08-03 09:00:41,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=79211.0, ans=0.125 +2024-08-03 09:01:12,725 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.222e+02 1.420e+02 1.734e+02 2.761e+02, threshold=2.839e+02, percent-clipped=0.0 +2024-08-03 09:01:19,593 INFO [train.py:1114] (2/4) Epoch 6, batch 3450, loss[loss=0.2392, simple_loss=0.3134, pruned_loss=0.08249, over 12925.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3179, pruned_loss=0.08555, over 2629646.72 frames. ], batch size: 52, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:01:48,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=79504.33333333333, ans=0.07 +2024-08-03 09:01:59,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=79541.0, ans=0.0 +2024-08-03 09:02:01,848 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.18 vs. limit=15.0 +2024-08-03 09:02:02,276 INFO [train.py:1114] (2/4) Epoch 6, batch 3500, loss[loss=0.2487, simple_loss=0.3118, pruned_loss=0.09282, over 13513.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.318, pruned_loss=0.08611, over 2631357.15 frames. ], batch size: 34, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:02:04,427 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.29 vs. limit=15.0 +2024-08-03 09:02:05,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=79577.66666666667, ans=0.125 +2024-08-03 09:02:07,806 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.52 vs. limit=6.0 +2024-08-03 09:02:14,553 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.80 vs. limit=6.0 +2024-08-03 09:02:18,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=79651.0, ans=0.125 +2024-08-03 09:02:24,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=79651.0, ans=0.125 +2024-08-03 09:02:26,708 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.39 vs. limit=22.5 +2024-08-03 09:02:29,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=79687.66666666667, ans=0.1 +2024-08-03 09:02:34,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=79687.66666666667, ans=0.1 +2024-08-03 09:02:38,268 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.912e+01 1.291e+02 1.420e+02 1.811e+02 3.621e+02, threshold=2.839e+02, percent-clipped=4.0 +2024-08-03 09:02:41,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=79724.33333333333, ans=0.0 +2024-08-03 09:02:45,561 INFO [train.py:1114] (2/4) Epoch 6, batch 3550, loss[loss=0.2695, simple_loss=0.3433, pruned_loss=0.09782, over 12401.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.32, pruned_loss=0.08715, over 2629498.57 frames. ], batch size: 58, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:02:50,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=79761.0, ans=0.0 +2024-08-03 09:02:53,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=79797.66666666667, ans=0.125 +2024-08-03 09:03:02,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=79834.33333333333, ans=0.07 +2024-08-03 09:03:10,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=79834.33333333333, ans=0.125 +2024-08-03 09:03:10,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=79834.33333333333, ans=0.125 +2024-08-03 09:03:20,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=79907.66666666667, ans=0.125 +2024-08-03 09:03:23,354 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:03:23,726 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.51 vs. limit=15.0 +2024-08-03 09:03:30,154 INFO [train.py:1114] (2/4) Epoch 6, batch 3600, loss[loss=0.3482, simple_loss=0.3888, pruned_loss=0.1538, over 9375.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.3265, pruned_loss=0.09362, over 2489952.27 frames. ], batch size: 96, lr: 1.94e-02, grad_scale: 32.0 +2024-08-03 09:11:26,778 INFO [train.py:1114] (2/4) Epoch 7, batch 0, loss[loss=0.2073, simple_loss=0.2788, pruned_loss=0.06788, over 13335.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2788, pruned_loss=0.06788, over 13335.00 frames. ], batch size: 33, lr: 1.82e-02, grad_scale: 32.0 +2024-08-03 09:11:26,779 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 09:11:36,680 INFO [train.py:1146] (2/4) Epoch 7, validation: loss=0.2064, simple_loss=0.3063, pruned_loss=0.05331, over 944034.00 frames. +2024-08-03 09:11:36,680 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 09:11:36,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=80091.0, ans=0.5 +2024-08-03 09:11:39,256 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.328e+02 1.470e+02 1.676e+02 3.542e+02, threshold=2.940e+02, percent-clipped=1.0 +2024-08-03 09:11:39,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=80091.0, ans=0.025 +2024-08-03 09:11:45,692 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.46 vs. limit=15.0 +2024-08-03 09:11:47,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=80091.0, ans=0.0 +2024-08-03 09:11:56,096 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.98 vs. limit=15.0 +2024-08-03 09:12:23,304 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.14 vs. limit=15.0 +2024-08-03 09:12:27,671 INFO [train.py:1114] (2/4) Epoch 7, batch 50, loss[loss=0.1883, simple_loss=0.2692, pruned_loss=0.05369, over 13412.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.322, pruned_loss=0.08847, over 578146.63 frames. ], batch size: 32, lr: 1.82e-02, grad_scale: 32.0 +2024-08-03 09:12:57,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=80347.66666666667, ans=0.2 +2024-08-03 09:12:59,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=80384.33333333333, ans=0.125 +2024-08-03 09:13:02,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=80384.33333333333, ans=0.125 +2024-08-03 09:13:03,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=80384.33333333333, ans=0.125 +2024-08-03 09:13:19,705 INFO [train.py:1114] (2/4) Epoch 7, batch 100, loss[loss=0.2253, simple_loss=0.3, pruned_loss=0.07533, over 13552.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3204, pruned_loss=0.08582, over 1025507.74 frames. ], batch size: 35, lr: 1.82e-02, grad_scale: 32.0 +2024-08-03 09:13:22,392 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.031e+02 1.305e+02 1.508e+02 1.904e+02 3.829e+02, threshold=3.017e+02, percent-clipped=4.0 +2024-08-03 09:13:45,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=80531.0, ans=0.125 +2024-08-03 09:13:45,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=80531.0, ans=0.125 +2024-08-03 09:13:48,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=80531.0, ans=0.025 +2024-08-03 09:13:58,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=80604.33333333333, ans=0.125 +2024-08-03 09:14:01,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=80604.33333333333, ans=0.0 +2024-08-03 09:14:08,299 INFO [train.py:1114] (2/4) Epoch 7, batch 150, loss[loss=0.2145, simple_loss=0.2846, pruned_loss=0.07222, over 13439.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3181, pruned_loss=0.08549, over 1386171.04 frames. ], batch size: 32, lr: 1.81e-02, grad_scale: 32.0 +2024-08-03 09:14:23,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=80677.66666666667, ans=0.125 +2024-08-03 09:14:34,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80714.33333333333, ans=0.1 +2024-08-03 09:14:56,253 INFO [train.py:1114] (2/4) Epoch 7, batch 200, loss[loss=0.2657, simple_loss=0.3363, pruned_loss=0.09755, over 12477.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.316, pruned_loss=0.08416, over 1664629.65 frames. ], batch size: 58, lr: 1.81e-02, grad_scale: 16.0 +2024-08-03 09:14:56,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=80824.33333333333, ans=0.0 +2024-08-03 09:14:59,741 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.449e+01 1.216e+02 1.369e+02 1.577e+02 2.982e+02, threshold=2.737e+02, percent-clipped=0.0 +2024-08-03 09:15:13,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=80897.66666666667, ans=0.2 +2024-08-03 09:15:16,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=80897.66666666667, ans=0.025 +2024-08-03 09:15:16,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=80897.66666666667, ans=0.0 +2024-08-03 09:15:20,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=80897.66666666667, ans=0.05 +2024-08-03 09:15:21,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=80897.66666666667, ans=0.0 +2024-08-03 09:15:22,200 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.60 vs. limit=22.5 +2024-08-03 09:15:41,531 INFO [train.py:1114] (2/4) Epoch 7, batch 250, loss[loss=0.2562, simple_loss=0.3324, pruned_loss=0.09006, over 13314.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.3158, pruned_loss=0.08346, over 1883614.50 frames. ], batch size: 46, lr: 1.81e-02, grad_scale: 16.0 +2024-08-03 09:15:41,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81007.66666666667, ans=0.1 +2024-08-03 09:15:57,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.10 vs. limit=6.0 +2024-08-03 09:16:03,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=81081.0, ans=0.125 +2024-08-03 09:16:05,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=81081.0, ans=0.125 +2024-08-03 09:16:05,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=81081.0, ans=0.125 +2024-08-03 09:16:08,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=81117.66666666667, ans=0.0 +2024-08-03 09:16:10,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=81117.66666666667, ans=0.125 +2024-08-03 09:16:26,436 INFO [train.py:1114] (2/4) Epoch 7, batch 300, loss[loss=0.2577, simple_loss=0.3274, pruned_loss=0.09402, over 13437.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3159, pruned_loss=0.08353, over 2050918.07 frames. ], batch size: 42, lr: 1.81e-02, grad_scale: 16.0 +2024-08-03 09:16:30,006 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.067e+02 1.291e+02 1.630e+02 2.116e+02 3.205e+02, threshold=3.259e+02, percent-clipped=7.0 +2024-08-03 09:16:38,114 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.69 vs. limit=22.5 +2024-08-03 09:16:38,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=81227.66666666667, ans=0.0 +2024-08-03 09:16:54,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=81264.33333333333, ans=0.025 +2024-08-03 09:17:00,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81301.0, ans=0.1 +2024-08-03 09:17:19,134 INFO [train.py:1114] (2/4) Epoch 7, batch 350, loss[loss=0.1976, simple_loss=0.2684, pruned_loss=0.0634, over 13592.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.3157, pruned_loss=0.08334, over 2181076.57 frames. ], batch size: 33, lr: 1.81e-02, grad_scale: 16.0 +2024-08-03 09:17:23,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=81374.33333333333, ans=0.0 +2024-08-03 09:17:26,324 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.86 vs. limit=10.0 +2024-08-03 09:17:37,856 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.38 vs. limit=15.0 +2024-08-03 09:17:45,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=81484.33333333333, ans=0.125 +2024-08-03 09:17:47,345 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:17:55,011 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.35 vs. limit=15.0 +2024-08-03 09:18:03,371 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.15 vs. limit=15.0 +2024-08-03 09:18:06,300 INFO [train.py:1114] (2/4) Epoch 7, batch 400, loss[loss=0.2406, simple_loss=0.3153, pruned_loss=0.08295, over 13357.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3151, pruned_loss=0.08305, over 2285582.58 frames. ], batch size: 37, lr: 1.81e-02, grad_scale: 32.0 +2024-08-03 09:18:09,970 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.073e+02 1.335e+02 1.614e+02 1.996e+02 4.244e+02, threshold=3.229e+02, percent-clipped=5.0 +2024-08-03 09:18:27,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=81631.0, ans=0.0 +2024-08-03 09:18:38,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=81667.66666666667, ans=0.5 +2024-08-03 09:18:42,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=81704.33333333333, ans=0.125 +2024-08-03 09:18:47,511 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.29 vs. limit=15.0 +2024-08-03 09:18:51,577 INFO [train.py:1114] (2/4) Epoch 7, batch 450, loss[loss=0.2483, simple_loss=0.3254, pruned_loss=0.08558, over 13553.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3153, pruned_loss=0.08315, over 2359307.88 frames. ], batch size: 38, lr: 1.80e-02, grad_scale: 32.0 +2024-08-03 09:18:53,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=81741.0, ans=0.2 +2024-08-03 09:19:04,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=81777.66666666667, ans=0.0 +2024-08-03 09:19:46,370 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.76 vs. limit=15.0 +2024-08-03 09:19:54,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=81851.0, ans=0.125 +2024-08-03 09:19:57,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=81851.0, ans=0.2 +2024-08-03 09:19:58,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81851.0, ans=0.1 +2024-08-03 09:20:02,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=81851.0, ans=10.0 +2024-08-03 09:20:05,782 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:20:13,716 INFO [train.py:1114] (2/4) Epoch 7, batch 500, loss[loss=0.2486, simple_loss=0.3238, pruned_loss=0.08669, over 13425.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3144, pruned_loss=0.08292, over 2424662.16 frames. ], batch size: 43, lr: 1.80e-02, grad_scale: 32.0 +2024-08-03 09:20:17,135 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.053e+02 1.254e+02 1.426e+02 1.803e+02 2.820e+02, threshold=2.853e+02, percent-clipped=0.0 +2024-08-03 09:20:19,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=81924.33333333333, ans=0.0 +2024-08-03 09:20:23,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=81961.0, ans=0.05 +2024-08-03 09:20:30,588 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.97 vs. limit=22.5 +2024-08-03 09:20:41,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=82034.33333333333, ans=0.0 +2024-08-03 09:20:44,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=82034.33333333333, ans=0.5 +2024-08-03 09:20:55,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=82071.0, ans=0.0 +2024-08-03 09:20:58,669 INFO [train.py:1114] (2/4) Epoch 7, batch 550, loss[loss=0.2686, simple_loss=0.3346, pruned_loss=0.1013, over 13057.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.3146, pruned_loss=0.08292, over 2467580.56 frames. ], batch size: 48, lr: 1.80e-02, grad_scale: 16.0 +2024-08-03 09:21:00,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=82107.66666666667, ans=0.2 +2024-08-03 09:21:34,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=82217.66666666667, ans=0.125 +2024-08-03 09:21:40,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=82254.33333333333, ans=0.09899494936611666 +2024-08-03 09:21:49,911 INFO [train.py:1114] (2/4) Epoch 7, batch 600, loss[loss=0.2434, simple_loss=0.327, pruned_loss=0.07991, over 13335.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3148, pruned_loss=0.08292, over 2507624.69 frames. ], batch size: 46, lr: 1.80e-02, grad_scale: 16.0 +2024-08-03 09:21:54,381 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.061e+02 1.313e+02 1.488e+02 1.850e+02 2.717e+02, threshold=2.975e+02, percent-clipped=0.0 +2024-08-03 09:22:10,467 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.25 vs. limit=15.0 +2024-08-03 09:22:14,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=82364.33333333333, ans=0.125 +2024-08-03 09:22:16,992 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.58 vs. limit=15.0 +2024-08-03 09:22:21,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=82401.0, ans=0.125 +2024-08-03 09:22:34,997 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.49 vs. limit=6.0 +2024-08-03 09:22:35,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=82437.66666666667, ans=0.025 +2024-08-03 09:22:39,802 INFO [train.py:1114] (2/4) Epoch 7, batch 650, loss[loss=0.2697, simple_loss=0.3419, pruned_loss=0.09879, over 13556.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.3134, pruned_loss=0.08185, over 2542747.39 frames. ], batch size: 37, lr: 1.80e-02, grad_scale: 16.0 +2024-08-03 09:22:43,016 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.83 vs. limit=15.0 +2024-08-03 09:22:50,763 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:22:52,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82511.0, ans=0.1 +2024-08-03 09:23:20,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=82621.0, ans=0.0 +2024-08-03 09:23:24,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82657.66666666667, ans=0.1 +2024-08-03 09:23:25,559 INFO [train.py:1114] (2/4) Epoch 7, batch 700, loss[loss=0.2382, simple_loss=0.3164, pruned_loss=0.07999, over 13532.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3133, pruned_loss=0.08147, over 2565804.67 frames. ], batch size: 35, lr: 1.79e-02, grad_scale: 16.0 +2024-08-03 09:23:25,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=82657.66666666667, ans=0.07 +2024-08-03 09:23:29,925 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.643e+01 1.288e+02 1.544e+02 2.300e+02 4.218e+02, threshold=3.088e+02, percent-clipped=10.0 +2024-08-03 09:23:30,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=82657.66666666667, ans=0.2 +2024-08-03 09:23:44,019 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.90 vs. limit=22.5 +2024-08-03 09:23:45,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=82731.0, ans=0.035 +2024-08-03 09:23:51,105 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.28 vs. limit=15.0 +2024-08-03 09:23:57,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=82767.66666666667, ans=0.125 +2024-08-03 09:24:10,538 INFO [train.py:1114] (2/4) Epoch 7, batch 750, loss[loss=0.2413, simple_loss=0.3197, pruned_loss=0.08147, over 13356.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.3138, pruned_loss=0.08182, over 2583243.29 frames. ], batch size: 37, lr: 1.79e-02, grad_scale: 16.0 +2024-08-03 09:24:25,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=82877.66666666667, ans=0.0 +2024-08-03 09:24:27,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=82877.66666666667, ans=0.025 +2024-08-03 09:24:34,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=82914.33333333333, ans=0.125 +2024-08-03 09:24:38,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=82914.33333333333, ans=0.0 +2024-08-03 09:24:43,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=82951.0, ans=0.125 +2024-08-03 09:24:47,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=82951.0, ans=0.0 +2024-08-03 09:25:00,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=82987.66666666667, ans=0.125 +2024-08-03 09:25:05,597 INFO [train.py:1114] (2/4) Epoch 7, batch 800, loss[loss=0.2504, simple_loss=0.3107, pruned_loss=0.09504, over 13351.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.3138, pruned_loss=0.08182, over 2598095.98 frames. ], batch size: 33, lr: 1.79e-02, grad_scale: 32.0 +2024-08-03 09:25:06,170 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.50 vs. limit=15.0 +2024-08-03 09:25:11,847 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.297e+02 1.506e+02 2.061e+02 3.344e+02, threshold=3.011e+02, percent-clipped=3.0 +2024-08-03 09:25:18,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=83061.0, ans=0.0 +2024-08-03 09:25:18,682 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.98 vs. limit=6.0 +2024-08-03 09:25:30,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=83097.66666666667, ans=0.07 +2024-08-03 09:25:32,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=83097.66666666667, ans=0.125 +2024-08-03 09:25:59,668 INFO [train.py:1114] (2/4) Epoch 7, batch 850, loss[loss=0.21, simple_loss=0.296, pruned_loss=0.06202, over 13332.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3139, pruned_loss=0.08214, over 2609362.40 frames. ], batch size: 40, lr: 1.79e-02, grad_scale: 32.0 +2024-08-03 09:25:59,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=83207.66666666667, ans=0.1 +2024-08-03 09:26:07,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=83207.66666666667, ans=0.025 +2024-08-03 09:26:12,908 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.36 vs. limit=15.0 +2024-08-03 09:26:16,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=83244.33333333333, ans=0.0 +2024-08-03 09:26:29,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=83281.0, ans=0.125 +2024-08-03 09:26:39,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=83317.66666666667, ans=0.125 +2024-08-03 09:26:54,480 INFO [train.py:1114] (2/4) Epoch 7, batch 900, loss[loss=0.2241, simple_loss=0.2921, pruned_loss=0.0781, over 13348.00 frames. ], tot_loss[loss=0.2388, simple_loss=0.3137, pruned_loss=0.08197, over 2611844.50 frames. ], batch size: 33, lr: 1.79e-02, grad_scale: 32.0 +2024-08-03 09:26:58,761 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.957e+01 1.340e+02 1.564e+02 1.853e+02 3.494e+02, threshold=3.128e+02, percent-clipped=2.0 +2024-08-03 09:27:06,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=83427.66666666667, ans=0.125 +2024-08-03 09:27:15,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=83427.66666666667, ans=0.125 +2024-08-03 09:27:20,782 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.33 vs. limit=15.0 +2024-08-03 09:27:43,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=83537.66666666667, ans=0.0 +2024-08-03 09:27:44,885 INFO [train.py:1114] (2/4) Epoch 7, batch 950, loss[loss=0.2038, simple_loss=0.2824, pruned_loss=0.06263, over 13543.00 frames. ], tot_loss[loss=0.239, simple_loss=0.3137, pruned_loss=0.08211, over 2613198.97 frames. ], batch size: 34, lr: 1.79e-02, grad_scale: 32.0 +2024-08-03 09:27:55,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=83611.0, ans=0.0 +2024-08-03 09:27:59,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=83611.0, ans=0.0 +2024-08-03 09:28:11,677 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.96 vs. limit=22.5 +2024-08-03 09:28:31,249 INFO [train.py:1114] (2/4) Epoch 7, batch 1000, loss[loss=0.2097, simple_loss=0.2847, pruned_loss=0.06734, over 13368.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3152, pruned_loss=0.08328, over 2611161.64 frames. ], batch size: 35, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:28:39,588 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.039e+02 1.347e+02 1.651e+02 2.099e+02 3.599e+02, threshold=3.301e+02, percent-clipped=2.0 +2024-08-03 09:28:41,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=83757.66666666667, ans=0.125 +2024-08-03 09:28:47,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=83794.33333333333, ans=0.05 +2024-08-03 09:28:57,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=83831.0, ans=0.125 +2024-08-03 09:29:05,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=83867.66666666667, ans=0.125 +2024-08-03 09:29:19,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=83904.33333333333, ans=0.04949747468305833 +2024-08-03 09:29:19,285 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.21 vs. limit=15.0 +2024-08-03 09:29:25,992 INFO [train.py:1114] (2/4) Epoch 7, batch 1050, loss[loss=0.2645, simple_loss=0.3354, pruned_loss=0.0968, over 13572.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3143, pruned_loss=0.08311, over 2615693.93 frames. ], batch size: 39, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:29:30,450 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.25 vs. limit=10.0 +2024-08-03 09:29:32,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=83941.0, ans=0.0 +2024-08-03 09:29:33,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=83941.0, ans=0.2 +2024-08-03 09:29:35,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=83977.66666666667, ans=0.125 +2024-08-03 09:29:36,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=83977.66666666667, ans=0.2 +2024-08-03 09:29:57,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=84014.33333333333, ans=0.125 +2024-08-03 09:30:18,335 INFO [train.py:1114] (2/4) Epoch 7, batch 1100, loss[loss=0.2423, simple_loss=0.3174, pruned_loss=0.08361, over 13564.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.3138, pruned_loss=0.08277, over 2619796.79 frames. ], batch size: 36, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:30:18,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=84124.33333333333, ans=0.125 +2024-08-03 09:30:22,734 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.294e+01 1.204e+02 1.427e+02 1.810e+02 3.442e+02, threshold=2.853e+02, percent-clipped=1.0 +2024-08-03 09:30:29,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=84161.0, ans=0.025 +2024-08-03 09:30:30,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=84161.0, ans=0.125 +2024-08-03 09:30:50,920 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.20 vs. limit=12.0 +2024-08-03 09:30:59,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=84234.33333333333, ans=0.05 +2024-08-03 09:31:13,212 INFO [train.py:1114] (2/4) Epoch 7, batch 1150, loss[loss=0.2453, simple_loss=0.3167, pruned_loss=0.08698, over 13568.00 frames. ], tot_loss[loss=0.2393, simple_loss=0.3135, pruned_loss=0.08253, over 2619072.66 frames. ], batch size: 36, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:31:17,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=84307.66666666667, ans=0.2 +2024-08-03 09:31:42,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=84381.0, ans=0.2 +2024-08-03 09:31:48,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=84417.66666666667, ans=0.125 +2024-08-03 09:31:57,355 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.53 vs. limit=15.0 +2024-08-03 09:32:04,050 INFO [train.py:1114] (2/4) Epoch 7, batch 1200, loss[loss=0.2587, simple_loss=0.3456, pruned_loss=0.08585, over 13572.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3148, pruned_loss=0.08277, over 2616309.12 frames. ], batch size: 39, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:32:09,671 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.359e+02 1.583e+02 1.870e+02 3.127e+02, threshold=3.166e+02, percent-clipped=2.0 +2024-08-03 09:32:26,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=84564.33333333333, ans=0.2 +2024-08-03 09:32:27,131 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.87 vs. limit=10.0 +2024-08-03 09:32:27,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=84564.33333333333, ans=0.125 +2024-08-03 09:32:32,227 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:32:36,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=84601.0, ans=0.95 +2024-08-03 09:32:44,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.whiten.whitening_limit, batch_count=84637.66666666667, ans=12.0 +2024-08-03 09:32:46,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84637.66666666667, ans=0.1 +2024-08-03 09:32:50,755 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.46 vs. limit=12.0 +2024-08-03 09:32:52,751 INFO [train.py:1114] (2/4) Epoch 7, batch 1250, loss[loss=0.2203, simple_loss=0.3003, pruned_loss=0.0701, over 13441.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.315, pruned_loss=0.08236, over 2628376.52 frames. ], batch size: 42, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:32:58,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=84674.33333333333, ans=0.125 +2024-08-03 09:33:00,026 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:33:00,424 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.33 vs. limit=15.0 +2024-08-03 09:33:20,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=84747.66666666667, ans=0.1 +2024-08-03 09:33:23,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=84747.66666666667, ans=0.2 +2024-08-03 09:33:27,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=84747.66666666667, ans=0.1 +2024-08-03 09:33:31,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=84784.33333333333, ans=0.025 +2024-08-03 09:33:43,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=84821.0, ans=0.125 +2024-08-03 09:33:49,159 INFO [train.py:1114] (2/4) Epoch 7, batch 1300, loss[loss=0.2767, simple_loss=0.3455, pruned_loss=0.1039, over 12943.00 frames. ], tot_loss[loss=0.2393, simple_loss=0.3143, pruned_loss=0.08217, over 2631641.52 frames. ], batch size: 52, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:33:53,582 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.285e+01 1.265e+02 1.441e+02 2.116e+02 4.466e+02, threshold=2.882e+02, percent-clipped=10.0 +2024-08-03 09:34:10,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=84931.0, ans=0.125 +2024-08-03 09:34:17,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=84967.66666666667, ans=0.125 +2024-08-03 09:34:34,083 INFO [train.py:1114] (2/4) Epoch 7, batch 1350, loss[loss=0.2176, simple_loss=0.2986, pruned_loss=0.06836, over 13545.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3131, pruned_loss=0.08131, over 2638463.20 frames. ], batch size: 37, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:34:46,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=85077.66666666667, ans=0.125 +2024-08-03 09:34:48,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=85077.66666666667, ans=0.125 +2024-08-03 09:35:01,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=85114.33333333333, ans=0.125 +2024-08-03 09:35:08,041 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:35:14,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=85187.66666666667, ans=0.09899494936611666 +2024-08-03 09:35:23,191 INFO [train.py:1114] (2/4) Epoch 7, batch 1400, loss[loss=0.2087, simple_loss=0.2815, pruned_loss=0.06802, over 13251.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3121, pruned_loss=0.08078, over 2642011.99 frames. ], batch size: 31, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:35:27,303 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.25 vs. limit=12.0 +2024-08-03 09:35:27,643 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.041e+02 1.250e+02 1.480e+02 1.868e+02 3.141e+02, threshold=2.961e+02, percent-clipped=2.0 +2024-08-03 09:35:29,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=85224.33333333333, ans=0.1 +2024-08-03 09:35:42,384 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:35:45,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=85297.66666666667, ans=0.125 +2024-08-03 09:35:49,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=85334.33333333333, ans=0.125 +2024-08-03 09:35:54,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=85334.33333333333, ans=0.125 +2024-08-03 09:35:56,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=85334.33333333333, ans=0.125 +2024-08-03 09:36:10,259 INFO [train.py:1114] (2/4) Epoch 7, batch 1450, loss[loss=0.2243, simple_loss=0.3049, pruned_loss=0.07188, over 13442.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.3131, pruned_loss=0.08112, over 2640837.24 frames. ], batch size: 43, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:36:18,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=85444.33333333333, ans=0.1 +2024-08-03 09:36:19,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=85444.33333333333, ans=0.2 +2024-08-03 09:36:20,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=85444.33333333333, ans=0.025 +2024-08-03 09:36:25,417 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.63 vs. limit=12.0 +2024-08-03 09:36:36,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=85481.0, ans=0.2 +2024-08-03 09:36:37,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=85481.0, ans=0.2 +2024-08-03 09:36:38,866 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.07 vs. limit=15.0 +2024-08-03 09:36:43,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=85517.66666666667, ans=0.0 +2024-08-03 09:36:56,382 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=5.157e-03 +2024-08-03 09:36:56,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=85554.33333333333, ans=0.125 +2024-08-03 09:37:03,216 INFO [train.py:1114] (2/4) Epoch 7, batch 1500, loss[loss=0.2399, simple_loss=0.3241, pruned_loss=0.07784, over 13405.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.3136, pruned_loss=0.08138, over 2640993.95 frames. ], batch size: 39, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:37:07,889 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.276e+02 1.426e+02 1.677e+02 2.585e+02, threshold=2.853e+02, percent-clipped=0.0 +2024-08-03 09:37:12,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=85627.66666666667, ans=0.125 +2024-08-03 09:37:17,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=85627.66666666667, ans=0.125 +2024-08-03 09:37:56,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=85701.0, ans=0.125 +2024-08-03 09:38:03,716 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.92 vs. limit=22.5 +2024-08-03 09:38:04,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=85737.66666666667, ans=0.125 +2024-08-03 09:38:10,431 INFO [train.py:1114] (2/4) Epoch 7, batch 1550, loss[loss=0.2353, simple_loss=0.3119, pruned_loss=0.07933, over 13410.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3138, pruned_loss=0.08219, over 2630359.43 frames. ], batch size: 41, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:38:14,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=85774.33333333333, ans=0.2 +2024-08-03 09:38:23,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=85811.0, ans=0.025 +2024-08-03 09:38:30,352 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.43 vs. limit=12.0 +2024-08-03 09:38:30,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=85847.66666666667, ans=0.0 +2024-08-03 09:38:50,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=85921.0, ans=0.125 +2024-08-03 09:38:50,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=85921.0, ans=0.2 +2024-08-03 09:38:57,203 INFO [train.py:1114] (2/4) Epoch 7, batch 1600, loss[loss=0.2104, simple_loss=0.2974, pruned_loss=0.06171, over 13565.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.3132, pruned_loss=0.08191, over 2623658.46 frames. ], batch size: 39, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:38:58,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=85957.66666666667, ans=0.0 +2024-08-03 09:38:59,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=85957.66666666667, ans=0.5 +2024-08-03 09:39:00,383 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.45 vs. limit=15.0 +2024-08-03 09:39:01,498 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.10 vs. limit=15.0 +2024-08-03 09:39:02,745 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.893e+01 1.288e+02 1.487e+02 1.890e+02 3.069e+02, threshold=2.975e+02, percent-clipped=2.0 +2024-08-03 09:39:19,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=86031.0, ans=0.0 +2024-08-03 09:39:45,490 INFO [train.py:1114] (2/4) Epoch 7, batch 1650, loss[loss=0.2181, simple_loss=0.3103, pruned_loss=0.06292, over 13332.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.3134, pruned_loss=0.08185, over 2621512.57 frames. ], batch size: 40, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:39:46,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=86141.0, ans=0.125 +2024-08-03 09:39:57,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86177.66666666667, ans=0.1 +2024-08-03 09:39:58,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=86177.66666666667, ans=0.0 +2024-08-03 09:40:06,069 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.79 vs. limit=15.0 +2024-08-03 09:40:15,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=86251.0, ans=0.125 +2024-08-03 09:40:25,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=86287.66666666667, ans=0.0 +2024-08-03 09:40:30,961 INFO [train.py:1114] (2/4) Epoch 7, batch 1700, loss[loss=0.2072, simple_loss=0.2747, pruned_loss=0.06984, over 13245.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.3121, pruned_loss=0.08111, over 2629881.65 frames. ], batch size: 31, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:40:36,462 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.893e+01 1.270e+02 1.510e+02 1.921e+02 4.226e+02, threshold=3.020e+02, percent-clipped=3.0 +2024-08-03 09:40:37,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=86324.33333333333, ans=0.2 +2024-08-03 09:40:48,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=86361.0, ans=0.2 +2024-08-03 09:40:54,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=86397.66666666667, ans=0.0 +2024-08-03 09:41:13,214 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.39 vs. limit=12.0 +2024-08-03 09:41:14,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=86471.0, ans=0.0 +2024-08-03 09:41:17,678 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.39 vs. limit=15.0 +2024-08-03 09:41:20,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=86507.66666666667, ans=0.125 +2024-08-03 09:41:23,774 INFO [train.py:1114] (2/4) Epoch 7, batch 1750, loss[loss=0.21, simple_loss=0.2805, pruned_loss=0.06971, over 13496.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.312, pruned_loss=0.0814, over 2631871.38 frames. ], batch size: 31, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:41:24,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=86507.66666666667, ans=0.125 +2024-08-03 09:41:40,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86544.33333333333, ans=0.1 +2024-08-03 09:41:46,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=86581.0, ans=0.125 +2024-08-03 09:41:49,454 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.19 vs. limit=15.0 +2024-08-03 09:41:49,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=86581.0, ans=0.025 +2024-08-03 09:41:51,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=86617.66666666667, ans=0.125 +2024-08-03 09:41:55,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=86617.66666666667, ans=0.125 +2024-08-03 09:42:01,695 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.90 vs. limit=15.0 +2024-08-03 09:42:03,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=86654.33333333333, ans=0.125 +2024-08-03 09:42:04,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=86654.33333333333, ans=0.1 +2024-08-03 09:42:06,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=86654.33333333333, ans=0.125 +2024-08-03 09:42:09,846 INFO [train.py:1114] (2/4) Epoch 7, batch 1800, loss[loss=0.2341, simple_loss=0.319, pruned_loss=0.07461, over 13538.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3121, pruned_loss=0.08143, over 2634064.50 frames. ], batch size: 38, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:42:15,297 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.789e+01 1.268e+02 1.407e+02 1.831e+02 3.286e+02, threshold=2.815e+02, percent-clipped=2.0 +2024-08-03 09:42:23,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=86727.66666666667, ans=0.125 +2024-08-03 09:42:29,330 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.79 vs. limit=22.5 +2024-08-03 09:42:34,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=86764.33333333333, ans=0.025 +2024-08-03 09:42:47,226 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.82 vs. limit=15.0 +2024-08-03 09:42:54,768 INFO [train.py:1114] (2/4) Epoch 7, batch 1850, loss[loss=0.2327, simple_loss=0.3221, pruned_loss=0.07162, over 13393.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3118, pruned_loss=0.08085, over 2635498.30 frames. ], batch size: 39, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:42:54,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86874.33333333333, ans=0.1 +2024-08-03 09:43:06,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=86911.0, ans=0.125 +2024-08-03 09:43:27,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=86984.33333333333, ans=0.0 +2024-08-03 09:43:37,437 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.67 vs. limit=6.0 +2024-08-03 09:43:39,419 INFO [train.py:1114] (2/4) Epoch 7, batch 1900, loss[loss=0.2581, simple_loss=0.3464, pruned_loss=0.0849, over 13311.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3121, pruned_loss=0.08071, over 2637809.17 frames. ], batch size: 40, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:43:44,830 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.827e+01 1.267e+02 1.561e+02 1.810e+02 3.811e+02, threshold=3.121e+02, percent-clipped=4.0 +2024-08-03 09:43:57,722 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.84 vs. limit=15.0 +2024-08-03 09:44:09,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=87167.66666666667, ans=0.125 +2024-08-03 09:44:13,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=87167.66666666667, ans=0.125 +2024-08-03 09:44:20,611 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:44:28,644 INFO [train.py:1114] (2/4) Epoch 7, batch 1950, loss[loss=0.265, simple_loss=0.3365, pruned_loss=0.0968, over 13554.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3127, pruned_loss=0.08066, over 2644903.55 frames. ], batch size: 36, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:44:34,624 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.73 vs. limit=10.0 +2024-08-03 09:44:39,426 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.75 vs. limit=15.0 +2024-08-03 09:44:46,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=87314.33333333333, ans=0.0 +2024-08-03 09:45:03,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=87314.33333333333, ans=0.125 +2024-08-03 09:45:06,739 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.41 vs. limit=6.0 +2024-08-03 09:45:24,714 INFO [train.py:1114] (2/4) Epoch 7, batch 2000, loss[loss=0.1796, simple_loss=0.2547, pruned_loss=0.05222, over 13532.00 frames. ], tot_loss[loss=0.238, simple_loss=0.3136, pruned_loss=0.08115, over 2635327.60 frames. ], batch size: 31, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:45:27,144 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.81 vs. limit=15.0 +2024-08-03 09:45:29,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=87424.33333333333, ans=0.125 +2024-08-03 09:45:30,328 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.022e+02 1.315e+02 1.529e+02 1.937e+02 2.914e+02, threshold=3.058e+02, percent-clipped=0.0 +2024-08-03 09:45:46,275 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.26 vs. limit=10.0 +2024-08-03 09:45:59,625 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.93 vs. limit=22.5 +2024-08-03 09:45:59,660 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.77 vs. limit=6.0 +2024-08-03 09:46:07,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=87571.0, ans=0.025 +2024-08-03 09:46:09,935 INFO [train.py:1114] (2/4) Epoch 7, batch 2050, loss[loss=0.1865, simple_loss=0.2676, pruned_loss=0.05268, over 13395.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.312, pruned_loss=0.08038, over 2633309.27 frames. ], batch size: 32, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:46:12,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=87607.66666666667, ans=15.0 +2024-08-03 09:46:18,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=87644.33333333333, ans=0.125 +2024-08-03 09:46:19,946 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.32 vs. limit=22.5 +2024-08-03 09:46:33,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=87681.0, ans=15.0 +2024-08-03 09:46:40,085 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.92 vs. limit=15.0 +2024-08-03 09:46:56,261 INFO [train.py:1114] (2/4) Epoch 7, batch 2100, loss[loss=0.2265, simple_loss=0.3043, pruned_loss=0.07436, over 13539.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3113, pruned_loss=0.07992, over 2637654.50 frames. ], batch size: 37, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:47:00,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=87791.0, ans=0.0 +2024-08-03 09:47:01,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=87791.0, ans=0.125 +2024-08-03 09:47:03,251 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.729e+01 1.195e+02 1.377e+02 1.752e+02 2.850e+02, threshold=2.753e+02, percent-clipped=0.0 +2024-08-03 09:47:07,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=87827.66666666667, ans=0.125 +2024-08-03 09:47:17,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=87864.33333333333, ans=0.0 +2024-08-03 09:47:25,498 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.42 vs. limit=12.0 +2024-08-03 09:47:27,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=87901.0, ans=0.0 +2024-08-03 09:47:31,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=87901.0, ans=0.125 +2024-08-03 09:47:36,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=87937.66666666667, ans=0.1 +2024-08-03 09:47:42,916 INFO [train.py:1114] (2/4) Epoch 7, batch 2150, loss[loss=0.2376, simple_loss=0.3031, pruned_loss=0.08604, over 13563.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3105, pruned_loss=0.07941, over 2647066.24 frames. ], batch size: 36, lr: 1.74e-02, grad_scale: 32.0 +2024-08-03 09:47:55,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=88011.0, ans=0.025 +2024-08-03 09:48:19,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=88084.33333333333, ans=0.125 +2024-08-03 09:48:23,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=88084.33333333333, ans=0.2 +2024-08-03 09:49:00,799 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.59 vs. limit=22.5 +2024-08-03 09:49:38,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=88121.0, ans=0.0 +2024-08-03 09:49:41,825 INFO [train.py:1114] (2/4) Epoch 7, batch 2200, loss[loss=0.2562, simple_loss=0.3351, pruned_loss=0.08859, over 13398.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3112, pruned_loss=0.08, over 2645205.76 frames. ], batch size: 39, lr: 1.74e-02, grad_scale: 32.0 +2024-08-03 09:49:43,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=88157.66666666667, ans=0.125 +2024-08-03 09:49:56,405 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.554e+01 1.287e+02 1.626e+02 2.364e+02 4.219e+02, threshold=3.252e+02, percent-clipped=14.0 +2024-08-03 09:50:18,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=88231.0, ans=0.125 +2024-08-03 09:50:20,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=88231.0, ans=0.0 +2024-08-03 09:50:58,518 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.31 vs. limit=15.0 +2024-08-03 09:51:41,818 INFO [train.py:1114] (2/4) Epoch 7, batch 2250, loss[loss=0.2257, simple_loss=0.3077, pruned_loss=0.07182, over 13372.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3114, pruned_loss=0.07996, over 2642474.50 frames. ], batch size: 37, lr: 1.74e-02, grad_scale: 16.0 +2024-08-03 09:51:54,693 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.61 vs. limit=6.0 +2024-08-03 09:52:23,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=88414.33333333333, ans=0.125 +2024-08-03 09:52:39,915 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.29 vs. limit=10.0 +2024-08-03 09:52:45,241 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.41 vs. limit=15.0 +2024-08-03 09:52:56,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=88487.66666666667, ans=0.125 +2024-08-03 09:52:57,972 INFO [train.py:1114] (2/4) Epoch 7, batch 2300, loss[loss=0.2252, simple_loss=0.2911, pruned_loss=0.07969, over 13583.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3102, pruned_loss=0.07998, over 2639428.09 frames. ], batch size: 33, lr: 1.74e-02, grad_scale: 16.0 +2024-08-03 09:53:00,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=88524.33333333333, ans=0.0 +2024-08-03 09:53:04,297 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.405e+01 1.244e+02 1.416e+02 1.864e+02 3.449e+02, threshold=2.832e+02, percent-clipped=2.0 +2024-08-03 09:53:19,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff3.min_abs, batch_count=88597.66666666667, ans=0.2 +2024-08-03 09:53:23,700 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.24 vs. limit=10.0 +2024-08-03 09:53:35,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=88634.33333333333, ans=0.1 +2024-08-03 09:53:52,477 INFO [train.py:1114] (2/4) Epoch 7, batch 2350, loss[loss=0.2194, simple_loss=0.3066, pruned_loss=0.06611, over 13549.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3098, pruned_loss=0.07959, over 2641787.19 frames. ], batch size: 38, lr: 1.74e-02, grad_scale: 16.0 +2024-08-03 09:55:16,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=88744.33333333333, ans=0.125 +2024-08-03 09:55:19,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=88744.33333333333, ans=0.0 +2024-08-03 09:55:22,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=88781.0, ans=0.0 +2024-08-03 09:55:23,845 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.18 vs. limit=15.0 +2024-08-03 09:55:25,862 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.74 vs. limit=15.0 +2024-08-03 09:55:28,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=88781.0, ans=0.0 +2024-08-03 09:55:31,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=88817.66666666667, ans=0.125 +2024-08-03 09:55:35,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=88817.66666666667, ans=0.0 +2024-08-03 09:55:36,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=88817.66666666667, ans=0.0 +2024-08-03 09:55:52,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=88891.0, ans=0.0 +2024-08-03 09:55:52,791 INFO [train.py:1114] (2/4) Epoch 7, batch 2400, loss[loss=0.1947, simple_loss=0.2788, pruned_loss=0.05532, over 13528.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3107, pruned_loss=0.08005, over 2642760.85 frames. ], batch size: 35, lr: 1.74e-02, grad_scale: 16.0 +2024-08-03 09:56:07,277 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.040e+02 1.279e+02 1.511e+02 1.745e+02 2.971e+02, threshold=3.023e+02, percent-clipped=1.0 +2024-08-03 09:56:11,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=88927.66666666667, ans=0.125 +2024-08-03 09:56:17,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=88927.66666666667, ans=0.0 +2024-08-03 09:56:25,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=88964.33333333333, ans=0.125 +2024-08-03 09:56:36,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=89001.0, ans=0.125 +2024-08-03 09:56:42,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=89001.0, ans=0.0 +2024-08-03 09:56:46,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=89037.66666666667, ans=0.1 +2024-08-03 09:56:53,286 INFO [train.py:1114] (2/4) Epoch 7, batch 2450, loss[loss=0.2353, simple_loss=0.3164, pruned_loss=0.07712, over 13369.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3122, pruned_loss=0.08102, over 2631724.78 frames. ], batch size: 37, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 09:56:53,472 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:57:06,385 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.77 vs. limit=15.0 +2024-08-03 09:57:15,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=89111.0, ans=0.1 +2024-08-03 09:57:16,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=89111.0, ans=0.0 +2024-08-03 09:57:23,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=89147.66666666667, ans=0.0 +2024-08-03 09:57:24,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=89147.66666666667, ans=0.0 +2024-08-03 09:57:39,865 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:57:43,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=89221.0, ans=0.125 +2024-08-03 09:57:52,098 INFO [train.py:1114] (2/4) Epoch 7, batch 2500, loss[loss=0.2589, simple_loss=0.3262, pruned_loss=0.09585, over 13396.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3123, pruned_loss=0.08097, over 2636121.31 frames. ], batch size: 39, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 09:57:59,334 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.252e+02 1.492e+02 2.074e+02 3.860e+02, threshold=2.984e+02, percent-clipped=5.0 +2024-08-03 09:57:59,874 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.25 vs. limit=15.0 +2024-08-03 09:58:11,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=89331.0, ans=0.0 +2024-08-03 09:58:27,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=89404.33333333333, ans=0.07 +2024-08-03 09:58:35,997 INFO [train.py:1114] (2/4) Epoch 7, batch 2550, loss[loss=0.2143, simple_loss=0.2889, pruned_loss=0.06988, over 13536.00 frames. ], tot_loss[loss=0.238, simple_loss=0.3131, pruned_loss=0.0814, over 2637016.21 frames. ], batch size: 31, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 09:58:37,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=89441.0, ans=0.125 +2024-08-03 09:58:39,676 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=89441.0, ans=0.125 +2024-08-03 09:58:58,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=89514.33333333333, ans=0.2 +2024-08-03 09:59:04,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=89551.0, ans=0.125 +2024-08-03 09:59:04,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=89551.0, ans=0.0 +2024-08-03 09:59:13,666 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:59:15,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=89587.66666666667, ans=0.0 +2024-08-03 09:59:19,631 INFO [train.py:1114] (2/4) Epoch 7, batch 2600, loss[loss=0.2283, simple_loss=0.2997, pruned_loss=0.07845, over 13568.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3124, pruned_loss=0.08091, over 2636213.95 frames. ], batch size: 36, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 09:59:22,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=89624.33333333333, ans=0.125 +2024-08-03 09:59:26,383 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.355e+01 1.229e+02 1.441e+02 1.780e+02 3.809e+02, threshold=2.882e+02, percent-clipped=4.0 +2024-08-03 09:59:30,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=89661.0, ans=0.05 +2024-08-03 09:59:31,958 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.23 vs. limit=10.0 +2024-08-03 09:59:35,494 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.68 vs. limit=22.5 +2024-08-03 09:59:37,064 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.40 vs. limit=15.0 +2024-08-03 09:59:40,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=89697.66666666667, ans=0.125 +2024-08-03 09:59:49,295 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.72 vs. limit=12.0 +2024-08-03 09:59:49,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=89734.33333333333, ans=0.2 +2024-08-03 10:00:01,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=89771.0, ans=0.0 +2024-08-03 10:00:02,670 INFO [train.py:1114] (2/4) Epoch 7, batch 2650, loss[loss=0.3004, simple_loss=0.3601, pruned_loss=0.1204, over 13296.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3125, pruned_loss=0.08084, over 2639925.43 frames. ], batch size: 46, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 10:00:17,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=89844.33333333333, ans=0.1 +2024-08-03 10:00:48,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=89881.0, ans=0.025 +2024-08-03 10:01:08,366 INFO [train.py:1114] (2/4) Epoch 7, batch 2700, loss[loss=0.258, simple_loss=0.3316, pruned_loss=0.09218, over 13540.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3125, pruned_loss=0.08103, over 2637338.28 frames. ], batch size: 40, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 10:01:11,509 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.83 vs. limit=10.0 +2024-08-03 10:01:16,147 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.262e+02 1.504e+02 2.229e+02 3.961e+02, threshold=3.008e+02, percent-clipped=4.0 +2024-08-03 10:01:22,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=90027.66666666667, ans=0.025 +2024-08-03 10:01:27,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=90064.33333333333, ans=0.0 +2024-08-03 10:01:53,307 INFO [train.py:1114] (2/4) Epoch 7, batch 2750, loss[loss=0.2502, simple_loss=0.317, pruned_loss=0.09166, over 13338.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3117, pruned_loss=0.08111, over 2634505.52 frames. ], batch size: 34, lr: 1.73e-02, grad_scale: 8.0 +2024-08-03 10:01:54,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90174.33333333333, ans=0.1 +2024-08-03 10:02:08,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=90211.0, ans=0.125 +2024-08-03 10:02:10,393 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.80 vs. limit=15.0 +2024-08-03 10:02:13,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=90247.66666666667, ans=0.125 +2024-08-03 10:02:14,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=90247.66666666667, ans=0.125 +2024-08-03 10:02:31,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=90321.0, ans=0.125 +2024-08-03 10:02:37,940 INFO [train.py:1114] (2/4) Epoch 7, batch 2800, loss[loss=0.3307, simple_loss=0.3759, pruned_loss=0.1428, over 9362.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3126, pruned_loss=0.08176, over 2627124.23 frames. ], batch size: 96, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:02:46,064 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.878e+01 1.254e+02 1.435e+02 1.719e+02 3.010e+02, threshold=2.870e+02, percent-clipped=1.0 +2024-08-03 10:02:59,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_na.min_abs, batch_count=90431.0, ans=0.02 +2024-08-03 10:03:17,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=90504.33333333333, ans=0.125 +2024-08-03 10:03:26,255 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:03:27,737 INFO [train.py:1114] (2/4) Epoch 7, batch 2850, loss[loss=0.2311, simple_loss=0.3064, pruned_loss=0.07785, over 13357.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.3134, pruned_loss=0.08253, over 2622176.97 frames. ], batch size: 35, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:03:29,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=90541.0, ans=0.0 +2024-08-03 10:03:42,382 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.05 vs. limit=22.5 +2024-08-03 10:03:43,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=90577.66666666667, ans=0.125 +2024-08-03 10:03:50,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=90614.33333333333, ans=0.125 +2024-08-03 10:03:53,785 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.81 vs. limit=22.5 +2024-08-03 10:04:04,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=90687.66666666667, ans=0.2 +2024-08-03 10:04:06,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=90687.66666666667, ans=0.125 +2024-08-03 10:04:07,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=90687.66666666667, ans=0.125 +2024-08-03 10:04:12,845 INFO [train.py:1114] (2/4) Epoch 7, batch 2900, loss[loss=0.2312, simple_loss=0.305, pruned_loss=0.07869, over 13366.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.3142, pruned_loss=0.08237, over 2632563.74 frames. ], batch size: 36, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:04:24,256 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.250e+02 1.523e+02 1.874e+02 3.482e+02, threshold=3.046e+02, percent-clipped=1.0 +2024-08-03 10:04:27,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=90761.0, ans=0.07 +2024-08-03 10:04:27,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=90761.0, ans=0.125 +2024-08-03 10:04:31,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=90761.0, ans=0.1 +2024-08-03 10:04:45,653 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.22 vs. limit=15.0 +2024-08-03 10:04:49,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=90834.33333333333, ans=0.0 +2024-08-03 10:04:49,392 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.80 vs. limit=6.0 +2024-08-03 10:05:03,307 INFO [train.py:1114] (2/4) Epoch 7, batch 2950, loss[loss=0.1959, simple_loss=0.2795, pruned_loss=0.05614, over 13323.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.3126, pruned_loss=0.0814, over 2631142.98 frames. ], batch size: 34, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:05:03,758 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.71 vs. limit=6.0 +2024-08-03 10:05:20,538 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.24 vs. limit=15.0 +2024-08-03 10:05:32,869 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.03 vs. limit=15.0 +2024-08-03 10:05:36,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=91017.66666666667, ans=0.0 +2024-08-03 10:05:36,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=91017.66666666667, ans=0.04949747468305833 +2024-08-03 10:05:37,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=91017.66666666667, ans=0.2 +2024-08-03 10:05:38,195 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.92 vs. limit=15.0 +2024-08-03 10:05:39,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=91054.33333333333, ans=0.2 +2024-08-03 10:05:41,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=91054.33333333333, ans=0.125 +2024-08-03 10:05:47,199 INFO [train.py:1114] (2/4) Epoch 7, batch 3000, loss[loss=0.2243, simple_loss=0.3051, pruned_loss=0.07175, over 13540.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3122, pruned_loss=0.08116, over 2631296.04 frames. ], batch size: 37, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:05:47,199 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 10:06:06,640 INFO [train.py:1146] (2/4) Epoch 7, validation: loss=0.1942, simple_loss=0.2938, pruned_loss=0.04733, over 944034.00 frames. +2024-08-03 10:06:06,640 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 10:06:06,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=91091.0, ans=0.035 +2024-08-03 10:06:07,159 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.92 vs. limit=22.5 +2024-08-03 10:06:08,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=91091.0, ans=0.05 +2024-08-03 10:06:11,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=91091.0, ans=0.1 +2024-08-03 10:06:13,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=91091.0, ans=0.0 +2024-08-03 10:06:14,472 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.238e+02 1.419e+02 1.719e+02 4.359e+02, threshold=2.839e+02, percent-clipped=6.0 +2024-08-03 10:06:16,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=91127.66666666667, ans=0.025 +2024-08-03 10:06:22,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91127.66666666667, ans=0.1 +2024-08-03 10:06:33,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=91201.0, ans=0.09899494936611666 +2024-08-03 10:06:46,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=91237.66666666667, ans=0.0 +2024-08-03 10:06:51,247 INFO [train.py:1114] (2/4) Epoch 7, batch 3050, loss[loss=0.2065, simple_loss=0.286, pruned_loss=0.06349, over 13536.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.313, pruned_loss=0.08121, over 2628580.15 frames. ], batch size: 35, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:06:55,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=91274.33333333333, ans=0.0 +2024-08-03 10:06:58,684 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.05 vs. limit=15.0 +2024-08-03 10:07:03,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=91311.0, ans=0.0 +2024-08-03 10:07:05,290 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:07:23,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=91384.33333333333, ans=0.025 +2024-08-03 10:07:26,177 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.48 vs. limit=15.0 +2024-08-03 10:07:34,429 INFO [train.py:1114] (2/4) Epoch 7, batch 3100, loss[loss=0.2784, simple_loss=0.3405, pruned_loss=0.1081, over 13333.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3125, pruned_loss=0.08109, over 2628811.18 frames. ], batch size: 46, lr: 1.71e-02, grad_scale: 16.0 +2024-08-03 10:07:35,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=91457.66666666667, ans=0.125 +2024-08-03 10:07:42,185 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.930e+01 1.259e+02 1.446e+02 1.808e+02 2.827e+02, threshold=2.891e+02, percent-clipped=0.0 +2024-08-03 10:07:45,760 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:07:48,464 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:07:50,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=91531.0, ans=0.125 +2024-08-03 10:07:51,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=91531.0, ans=0.0 +2024-08-03 10:07:53,660 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:07:56,261 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=3.798e-02 +2024-08-03 10:08:02,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91567.66666666667, ans=0.1 +2024-08-03 10:08:16,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=91641.0, ans=0.0 +2024-08-03 10:08:17,430 INFO [train.py:1114] (2/4) Epoch 7, batch 3150, loss[loss=0.2445, simple_loss=0.3227, pruned_loss=0.0832, over 13239.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3123, pruned_loss=0.08083, over 2630256.44 frames. ], batch size: 49, lr: 1.71e-02, grad_scale: 16.0 +2024-08-03 10:08:45,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=91751.0, ans=0.125 +2024-08-03 10:08:56,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=91787.66666666667, ans=0.0 +2024-08-03 10:09:01,222 INFO [train.py:1114] (2/4) Epoch 7, batch 3200, loss[loss=0.2585, simple_loss=0.3271, pruned_loss=0.095, over 13543.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3113, pruned_loss=0.08035, over 2636247.83 frames. ], batch size: 37, lr: 1.71e-02, grad_scale: 32.0 +2024-08-03 10:09:06,850 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.65 vs. limit=15.0 +2024-08-03 10:09:08,916 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.005e+02 1.267e+02 1.711e+02 2.068e+02 3.292e+02, threshold=3.421e+02, percent-clipped=4.0 +2024-08-03 10:09:12,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=91861.0, ans=0.2 +2024-08-03 10:09:34,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=91934.33333333333, ans=0.04949747468305833 +2024-08-03 10:09:39,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.18 vs. limit=15.0 +2024-08-03 10:09:48,353 INFO [train.py:1114] (2/4) Epoch 7, batch 3250, loss[loss=0.2463, simple_loss=0.3256, pruned_loss=0.08347, over 13389.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3113, pruned_loss=0.0802, over 2640516.01 frames. ], batch size: 38, lr: 1.71e-02, grad_scale: 32.0 +2024-08-03 10:10:07,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=92081.0, ans=0.0 +2024-08-03 10:10:33,762 INFO [train.py:1114] (2/4) Epoch 7, batch 3300, loss[loss=0.2517, simple_loss=0.3267, pruned_loss=0.08832, over 12974.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3099, pruned_loss=0.07963, over 2642766.98 frames. ], batch size: 52, lr: 1.71e-02, grad_scale: 16.0 +2024-08-03 10:10:34,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=92191.0, ans=0.2 +2024-08-03 10:10:42,159 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.670e+01 1.272e+02 1.617e+02 1.965e+02 3.247e+02, threshold=3.234e+02, percent-clipped=0.0 +2024-08-03 10:11:06,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=92337.66666666667, ans=0.5 +2024-08-03 10:11:15,784 INFO [train.py:1114] (2/4) Epoch 7, batch 3350, loss[loss=0.2617, simple_loss=0.3333, pruned_loss=0.09509, over 13108.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.3109, pruned_loss=0.08004, over 2630521.25 frames. ], batch size: 48, lr: 1.71e-02, grad_scale: 16.0 +2024-08-03 10:11:15,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=92374.33333333333, ans=0.125 +2024-08-03 10:11:16,771 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.583e-03 +2024-08-03 10:11:19,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=92374.33333333333, ans=0.0 +2024-08-03 10:11:22,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=92374.33333333333, ans=0.0 +2024-08-03 10:11:25,434 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=27.54 vs. limit=22.5 +2024-08-03 10:11:43,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=92484.33333333333, ans=0.2 +2024-08-03 10:11:48,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=92484.33333333333, ans=0.1 +2024-08-03 10:11:49,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=92484.33333333333, ans=0.0 +2024-08-03 10:11:57,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=92521.0, ans=0.125 +2024-08-03 10:11:59,781 INFO [train.py:1114] (2/4) Epoch 7, batch 3400, loss[loss=0.228, simple_loss=0.2877, pruned_loss=0.08412, over 13540.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3108, pruned_loss=0.08028, over 2626153.49 frames. ], batch size: 31, lr: 1.70e-02, grad_scale: 16.0 +2024-08-03 10:12:05,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=92557.66666666667, ans=0.025 +2024-08-03 10:12:05,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=92557.66666666667, ans=0.125 +2024-08-03 10:12:08,996 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.271e+02 1.505e+02 1.907e+02 3.089e+02, threshold=3.010e+02, percent-clipped=0.0 +2024-08-03 10:12:18,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=92631.0, ans=0.1 +2024-08-03 10:12:19,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=92631.0, ans=0.125 +2024-08-03 10:12:29,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=92667.66666666667, ans=0.2 +2024-08-03 10:12:36,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=92704.33333333333, ans=0.125 +2024-08-03 10:12:37,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=92704.33333333333, ans=0.125 +2024-08-03 10:12:42,891 INFO [train.py:1114] (2/4) Epoch 7, batch 3450, loss[loss=0.2363, simple_loss=0.3241, pruned_loss=0.07425, over 12985.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3108, pruned_loss=0.08014, over 2629743.39 frames. ], batch size: 52, lr: 1.70e-02, grad_scale: 16.0 +2024-08-03 10:12:45,904 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.45 vs. limit=15.0 +2024-08-03 10:12:48,428 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.90 vs. limit=6.0 +2024-08-03 10:12:48,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=92741.0, ans=0.2 +2024-08-03 10:12:51,924 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.70 vs. limit=6.0 +2024-08-03 10:13:04,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=92814.33333333333, ans=0.0 +2024-08-03 10:13:25,188 INFO [train.py:1114] (2/4) Epoch 7, batch 3500, loss[loss=0.206, simple_loss=0.2837, pruned_loss=0.06413, over 13527.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3106, pruned_loss=0.08028, over 2632272.41 frames. ], batch size: 34, lr: 1.70e-02, grad_scale: 16.0 +2024-08-03 10:13:25,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=92924.33333333333, ans=0.0 +2024-08-03 10:13:33,818 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.754e+01 1.223e+02 1.539e+02 1.881e+02 2.645e+02, threshold=3.078e+02, percent-clipped=0.0 +2024-08-03 10:13:38,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=92961.0, ans=0.125 +2024-08-03 10:13:41,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=92997.66666666667, ans=0.09899494936611666 +2024-08-03 10:13:43,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92997.66666666667, ans=0.1 +2024-08-03 10:13:48,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=92997.66666666667, ans=0.125 +2024-08-03 10:14:09,033 INFO [train.py:1114] (2/4) Epoch 7, batch 3550, loss[loss=0.2285, simple_loss=0.3121, pruned_loss=0.07251, over 12545.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.3134, pruned_loss=0.08201, over 2630891.21 frames. ], batch size: 58, lr: 1.70e-02, grad_scale: 16.0 +2024-08-03 10:14:10,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=93107.66666666667, ans=0.125 +2024-08-03 10:14:25,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=93144.33333333333, ans=0.125 +2024-08-03 10:14:37,348 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=7.39 vs. limit=6.0 +2024-08-03 10:14:53,564 INFO [train.py:1114] (2/4) Epoch 7, batch 3600, loss[loss=0.2976, simple_loss=0.3481, pruned_loss=0.1235, over 9180.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3198, pruned_loss=0.08816, over 2489804.40 frames. ], batch size: 97, lr: 1.70e-02, grad_scale: 32.0 +2024-08-03 10:14:57,561 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.92 vs. limit=10.0 +2024-08-03 10:14:59,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93291.0, ans=0.1 +2024-08-03 10:15:02,199 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.315e+02 1.480e+02 1.683e+02 2.632e+02, threshold=2.960e+02, percent-clipped=0.0 +2024-08-03 10:15:11,082 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.36 vs. limit=15.0 +2024-08-03 10:15:23,491 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.31 vs. limit=15.0 +2024-08-03 10:16:13,467 INFO [train.py:1114] (2/4) Epoch 8, batch 0, loss[loss=0.1977, simple_loss=0.2771, pruned_loss=0.05917, over 13336.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2771, pruned_loss=0.05917, over 13336.00 frames. ], batch size: 33, lr: 1.60e-02, grad_scale: 32.0 +2024-08-03 10:16:13,468 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 10:16:24,029 INFO [train.py:1146] (2/4) Epoch 8, validation: loss=0.1977, simple_loss=0.2989, pruned_loss=0.04829, over 944034.00 frames. +2024-08-03 10:16:24,030 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 10:16:26,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=93437.66666666667, ans=0.0 +2024-08-03 10:16:27,154 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.97 vs. limit=6.0 +2024-08-03 10:17:08,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93621.0, ans=0.1 +2024-08-03 10:17:09,637 INFO [train.py:1114] (2/4) Epoch 8, batch 50, loss[loss=0.1859, simple_loss=0.2627, pruned_loss=0.05454, over 13408.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3143, pruned_loss=0.08143, over 578435.84 frames. ], batch size: 32, lr: 1.60e-02, grad_scale: 16.0 +2024-08-03 10:17:15,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=93621.0, ans=0.125 +2024-08-03 10:17:23,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=93657.66666666667, ans=0.125 +2024-08-03 10:17:30,613 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.247e+02 1.447e+02 2.039e+02 3.809e+02, threshold=2.894e+02, percent-clipped=5.0 +2024-08-03 10:17:31,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.39 vs. limit=12.0 +2024-08-03 10:17:46,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=93731.0, ans=0.0 +2024-08-03 10:17:48,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=93767.66666666667, ans=0.09899494936611666 +2024-08-03 10:17:54,283 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.82 vs. limit=15.0 +2024-08-03 10:17:55,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=93767.66666666667, ans=0.125 +2024-08-03 10:17:56,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=93804.33333333333, ans=0.0 +2024-08-03 10:17:57,633 INFO [train.py:1114] (2/4) Epoch 8, batch 100, loss[loss=0.2724, simple_loss=0.3366, pruned_loss=0.1041, over 13518.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.3153, pruned_loss=0.08207, over 1025677.12 frames. ], batch size: 35, lr: 1.60e-02, grad_scale: 16.0 +2024-08-03 10:18:08,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=93841.0, ans=0.125 +2024-08-03 10:18:14,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=93841.0, ans=0.0 +2024-08-03 10:18:14,763 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.78 vs. limit=15.0 +2024-08-03 10:18:37,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=93951.0, ans=0.125 +2024-08-03 10:18:43,049 INFO [train.py:1114] (2/4) Epoch 8, batch 150, loss[loss=0.2291, simple_loss=0.2952, pruned_loss=0.08143, over 13426.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3121, pruned_loss=0.08036, over 1387221.34 frames. ], batch size: 32, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:18:46,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=93987.66666666667, ans=0.125 +2024-08-03 10:18:54,284 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.33 vs. limit=22.5 +2024-08-03 10:19:02,666 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.928e+01 1.201e+02 1.310e+02 1.526e+02 2.654e+02, threshold=2.621e+02, percent-clipped=0.0 +2024-08-03 10:19:28,078 INFO [train.py:1114] (2/4) Epoch 8, batch 200, loss[loss=0.2422, simple_loss=0.3168, pruned_loss=0.08382, over 12475.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3098, pruned_loss=0.07885, over 1666074.11 frames. ], batch size: 58, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:19:32,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=94171.0, ans=0.2 +2024-08-03 10:19:40,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=94207.66666666667, ans=0.125 +2024-08-03 10:19:45,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=94244.33333333333, ans=0.2 +2024-08-03 10:19:56,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=94244.33333333333, ans=0.1 +2024-08-03 10:19:56,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=94244.33333333333, ans=0.2 +2024-08-03 10:20:17,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=94317.66666666667, ans=0.025 +2024-08-03 10:20:23,205 INFO [train.py:1114] (2/4) Epoch 8, batch 250, loss[loss=0.2284, simple_loss=0.3173, pruned_loss=0.06976, over 13285.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3092, pruned_loss=0.07831, over 1884436.82 frames. ], batch size: 46, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:20:24,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=94354.33333333333, ans=0.125 +2024-08-03 10:20:40,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=94391.0, ans=0.125 +2024-08-03 10:20:41,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=94391.0, ans=0.2 +2024-08-03 10:20:47,225 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.316e+01 1.275e+02 1.578e+02 1.902e+02 3.207e+02, threshold=3.155e+02, percent-clipped=3.0 +2024-08-03 10:20:53,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=94427.66666666667, ans=0.125 +2024-08-03 10:21:00,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=94464.33333333333, ans=0.125 +2024-08-03 10:21:04,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94464.33333333333, ans=0.1 +2024-08-03 10:21:10,899 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.57 vs. limit=6.0 +2024-08-03 10:21:15,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=94501.0, ans=0.125 +2024-08-03 10:21:17,483 INFO [train.py:1114] (2/4) Epoch 8, batch 300, loss[loss=0.2563, simple_loss=0.3302, pruned_loss=0.09121, over 13438.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3081, pruned_loss=0.07777, over 2051065.07 frames. ], batch size: 42, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:21:18,113 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.77 vs. limit=15.0 +2024-08-03 10:21:19,620 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.61 vs. limit=15.0 +2024-08-03 10:21:28,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=94574.33333333333, ans=0.04949747468305833 +2024-08-03 10:21:34,238 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:21:42,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=94611.0, ans=0.125 +2024-08-03 10:21:43,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=94611.0, ans=0.125 +2024-08-03 10:21:44,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=94611.0, ans=0.125 +2024-08-03 10:21:53,362 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.10 vs. limit=15.0 +2024-08-03 10:22:12,140 INFO [train.py:1114] (2/4) Epoch 8, batch 350, loss[loss=0.2261, simple_loss=0.2902, pruned_loss=0.08101, over 13570.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3082, pruned_loss=0.07742, over 2182521.28 frames. ], batch size: 33, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:22:32,140 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.441e+01 1.242e+02 1.508e+02 2.025e+02 3.534e+02, threshold=3.015e+02, percent-clipped=1.0 +2024-08-03 10:22:51,975 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.93 vs. limit=15.0 +2024-08-03 10:22:55,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=94867.66666666667, ans=0.05 +2024-08-03 10:22:57,071 INFO [train.py:1114] (2/4) Epoch 8, batch 400, loss[loss=0.2823, simple_loss=0.345, pruned_loss=0.1098, over 13361.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3078, pruned_loss=0.07719, over 2286056.71 frames. ], batch size: 37, lr: 1.59e-02, grad_scale: 32.0 +2024-08-03 10:22:59,356 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.86 vs. limit=10.0 +2024-08-03 10:23:03,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=94904.33333333333, ans=0.125 +2024-08-03 10:23:03,823 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.73 vs. limit=15.0 +2024-08-03 10:23:16,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=94977.66666666667, ans=0.125 +2024-08-03 10:23:34,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=95051.0, ans=0.0 +2024-08-03 10:23:44,501 INFO [train.py:1114] (2/4) Epoch 8, batch 450, loss[loss=0.2311, simple_loss=0.3108, pruned_loss=0.07567, over 13544.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3082, pruned_loss=0.07739, over 2359887.91 frames. ], batch size: 38, lr: 1.59e-02, grad_scale: 32.0 +2024-08-03 10:23:59,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=95124.33333333333, ans=0.125 +2024-08-03 10:24:08,014 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.730e+01 1.246e+02 1.455e+02 1.839e+02 3.207e+02, threshold=2.909e+02, percent-clipped=1.0 +2024-08-03 10:24:17,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=95197.66666666667, ans=0.125 +2024-08-03 10:24:18,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=95197.66666666667, ans=6.0 +2024-08-03 10:24:33,375 INFO [train.py:1114] (2/4) Epoch 8, batch 500, loss[loss=0.217, simple_loss=0.2934, pruned_loss=0.0703, over 13445.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3062, pruned_loss=0.07623, over 2425753.71 frames. ], batch size: 43, lr: 1.58e-02, grad_scale: 32.0 +2024-08-03 10:24:44,135 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.89 vs. limit=15.0 +2024-08-03 10:24:49,708 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.38 vs. limit=15.0 +2024-08-03 10:25:01,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=95381.0, ans=0.1 +2024-08-03 10:25:20,658 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.77 vs. limit=22.5 +2024-08-03 10:25:21,150 INFO [train.py:1114] (2/4) Epoch 8, batch 550, loss[loss=0.239, simple_loss=0.3179, pruned_loss=0.08004, over 12987.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3065, pruned_loss=0.07639, over 2467162.32 frames. ], batch size: 48, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:25:42,950 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.590e+01 1.201e+02 1.471e+02 1.924e+02 3.912e+02, threshold=2.942e+02, percent-clipped=7.0 +2024-08-03 10:25:44,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=95527.66666666667, ans=0.125 +2024-08-03 10:26:00,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=95601.0, ans=0.025 +2024-08-03 10:26:06,485 INFO [train.py:1114] (2/4) Epoch 8, batch 600, loss[loss=0.2458, simple_loss=0.3214, pruned_loss=0.08509, over 13346.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3066, pruned_loss=0.07644, over 2506860.71 frames. ], batch size: 46, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:26:17,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=95637.66666666667, ans=0.0 +2024-08-03 10:26:37,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=95711.0, ans=0.0 +2024-08-03 10:26:59,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=95784.33333333333, ans=0.1 +2024-08-03 10:27:01,403 INFO [train.py:1114] (2/4) Epoch 8, batch 650, loss[loss=0.2115, simple_loss=0.2933, pruned_loss=0.06485, over 13547.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3065, pruned_loss=0.07628, over 2542262.89 frames. ], batch size: 37, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:27:04,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=95821.0, ans=0.125 +2024-08-03 10:27:06,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=95821.0, ans=0.125 +2024-08-03 10:27:12,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=95857.66666666667, ans=0.09899494936611666 +2024-08-03 10:27:15,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=95857.66666666667, ans=0.125 +2024-08-03 10:27:20,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=95894.33333333333, ans=0.0 +2024-08-03 10:27:22,945 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.916e+01 1.338e+02 1.718e+02 2.265e+02 3.658e+02, threshold=3.436e+02, percent-clipped=6.0 +2024-08-03 10:27:41,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=95931.0, ans=0.025 +2024-08-03 10:27:52,484 INFO [train.py:1114] (2/4) Epoch 8, batch 700, loss[loss=0.2477, simple_loss=0.312, pruned_loss=0.09164, over 13534.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3069, pruned_loss=0.07637, over 2564438.90 frames. ], batch size: 35, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:27:57,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=96004.33333333333, ans=0.125 +2024-08-03 10:28:16,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=96077.66666666667, ans=0.025 +2024-08-03 10:28:17,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=96077.66666666667, ans=0.05 +2024-08-03 10:28:21,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=96114.33333333333, ans=0.0 +2024-08-03 10:28:37,999 INFO [train.py:1114] (2/4) Epoch 8, batch 750, loss[loss=0.2184, simple_loss=0.3058, pruned_loss=0.06556, over 13355.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3061, pruned_loss=0.07572, over 2582143.49 frames. ], batch size: 37, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:28:55,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=96224.33333333333, ans=0.125 +2024-08-03 10:28:55,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=96224.33333333333, ans=0.0 +2024-08-03 10:29:15,352 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.402e+01 1.294e+02 1.560e+02 2.121e+02 3.650e+02, threshold=3.121e+02, percent-clipped=1.0 +2024-08-03 10:29:19,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=96261.0, ans=0.1 +2024-08-03 10:29:44,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=96297.66666666667, ans=0.125 +2024-08-03 10:29:49,007 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.59 vs. limit=15.0 +2024-08-03 10:30:00,639 INFO [train.py:1114] (2/4) Epoch 8, batch 800, loss[loss=0.1899, simple_loss=0.2705, pruned_loss=0.05469, over 13353.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3065, pruned_loss=0.07617, over 2596104.09 frames. ], batch size: 33, lr: 1.58e-02, grad_scale: 16.0 +2024-08-03 10:30:03,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96371.0, ans=0.1 +2024-08-03 10:30:06,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=96371.0, ans=0.125 +2024-08-03 10:30:10,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=96407.66666666667, ans=0.0 +2024-08-03 10:30:13,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96407.66666666667, ans=0.1 +2024-08-03 10:30:15,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=96407.66666666667, ans=0.1 +2024-08-03 10:30:51,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=96481.0, ans=0.0 +2024-08-03 10:31:04,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=96517.66666666667, ans=0.125 +2024-08-03 10:31:07,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=96517.66666666667, ans=0.0 +2024-08-03 10:31:10,619 INFO [train.py:1114] (2/4) Epoch 8, batch 850, loss[loss=0.2344, simple_loss=0.3218, pruned_loss=0.0735, over 13331.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3069, pruned_loss=0.07653, over 2608454.83 frames. ], batch size: 40, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:31:15,691 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.23 vs. limit=22.5 +2024-08-03 10:31:22,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=96591.0, ans=0.0 +2024-08-03 10:31:28,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=96627.66666666667, ans=0.1 +2024-08-03 10:31:29,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=96627.66666666667, ans=0.2 +2024-08-03 10:31:30,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=96627.66666666667, ans=0.125 +2024-08-03 10:31:30,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=96627.66666666667, ans=0.0 +2024-08-03 10:31:32,145 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.603e+01 1.283e+02 1.438e+02 1.736e+02 2.880e+02, threshold=2.876e+02, percent-clipped=0.0 +2024-08-03 10:31:36,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96664.33333333333, ans=0.1 +2024-08-03 10:31:44,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=96664.33333333333, ans=0.125 +2024-08-03 10:31:56,063 INFO [train.py:1114] (2/4) Epoch 8, batch 900, loss[loss=0.2133, simple_loss=0.2899, pruned_loss=0.0684, over 13330.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3073, pruned_loss=0.07698, over 2611156.75 frames. ], batch size: 33, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:32:10,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=96774.33333333333, ans=0.0 +2024-08-03 10:32:20,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=96811.0, ans=0.125 +2024-08-03 10:32:30,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=96847.66666666667, ans=0.125 +2024-08-03 10:32:37,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=96884.33333333333, ans=0.1 +2024-08-03 10:32:39,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=96884.33333333333, ans=0.1 +2024-08-03 10:32:47,021 INFO [train.py:1114] (2/4) Epoch 8, batch 950, loss[loss=0.2254, simple_loss=0.2989, pruned_loss=0.07598, over 13540.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3073, pruned_loss=0.07722, over 2611601.09 frames. ], batch size: 34, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:32:49,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=96921.0, ans=0.125 +2024-08-03 10:32:50,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=96921.0, ans=0.0 +2024-08-03 10:32:50,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=96921.0, ans=0.0 +2024-08-03 10:32:51,115 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.70 vs. limit=10.0 +2024-08-03 10:32:54,728 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.16 vs. limit=15.0 +2024-08-03 10:33:00,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=96957.66666666667, ans=0.125 +2024-08-03 10:33:08,790 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.880e+01 1.218e+02 1.421e+02 1.776e+02 3.206e+02, threshold=2.842e+02, percent-clipped=1.0 +2024-08-03 10:33:19,260 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.53 vs. limit=12.0 +2024-08-03 10:33:32,185 INFO [train.py:1114] (2/4) Epoch 8, batch 1000, loss[loss=0.1928, simple_loss=0.2777, pruned_loss=0.05395, over 13369.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3083, pruned_loss=0.07776, over 2610157.64 frames. ], batch size: 35, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:33:47,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=97141.0, ans=0.2 +2024-08-03 10:33:50,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=97141.0, ans=0.125 +2024-08-03 10:33:53,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=97177.66666666667, ans=0.0 +2024-08-03 10:33:58,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=97177.66666666667, ans=0.02 +2024-08-03 10:33:59,077 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.67 vs. limit=6.0 +2024-08-03 10:34:19,173 INFO [train.py:1114] (2/4) Epoch 8, batch 1050, loss[loss=0.2687, simple_loss=0.3443, pruned_loss=0.09652, over 13586.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3068, pruned_loss=0.07689, over 2614727.72 frames. ], batch size: 39, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:34:21,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=97287.66666666667, ans=0.125 +2024-08-03 10:34:27,649 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.07 vs. limit=22.5 +2024-08-03 10:34:29,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=97324.33333333333, ans=0.125 +2024-08-03 10:34:31,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=97324.33333333333, ans=0.2 +2024-08-03 10:34:40,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=97361.0, ans=0.0 +2024-08-03 10:34:40,965 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.371e+01 1.174e+02 1.326e+02 1.659e+02 3.865e+02, threshold=2.652e+02, percent-clipped=4.0 +2024-08-03 10:35:04,457 INFO [train.py:1114] (2/4) Epoch 8, batch 1100, loss[loss=0.2291, simple_loss=0.3052, pruned_loss=0.07654, over 13567.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3062, pruned_loss=0.07685, over 2619472.28 frames. ], batch size: 36, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:35:05,102 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.53 vs. limit=15.0 +2024-08-03 10:35:21,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=97507.66666666667, ans=15.0 +2024-08-03 10:35:35,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=97581.0, ans=0.0 +2024-08-03 10:35:35,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=97581.0, ans=0.125 +2024-08-03 10:35:53,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=97617.66666666667, ans=0.125 +2024-08-03 10:35:54,298 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.92 vs. limit=15.0 +2024-08-03 10:35:56,404 INFO [train.py:1114] (2/4) Epoch 8, batch 1150, loss[loss=0.2448, simple_loss=0.3171, pruned_loss=0.08627, over 13563.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3065, pruned_loss=0.07684, over 2617981.77 frames. ], batch size: 36, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:36:00,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=97654.33333333333, ans=0.0 +2024-08-03 10:36:17,815 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.235e+02 1.421e+02 1.826e+02 2.699e+02, threshold=2.842e+02, percent-clipped=1.0 +2024-08-03 10:36:23,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=97764.33333333333, ans=0.0 +2024-08-03 10:36:26,537 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.23 vs. limit=15.0 +2024-08-03 10:36:39,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=97801.0, ans=0.04949747468305833 +2024-08-03 10:36:41,664 INFO [train.py:1114] (2/4) Epoch 8, batch 1200, loss[loss=0.2582, simple_loss=0.3436, pruned_loss=0.08636, over 13559.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3071, pruned_loss=0.07684, over 2615017.00 frames. ], batch size: 39, lr: 1.57e-02, grad_scale: 32.0 +2024-08-03 10:36:41,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_na.min_abs, batch_count=97837.66666666667, ans=0.02 +2024-08-03 10:36:46,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=97837.66666666667, ans=0.125 +2024-08-03 10:37:04,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=97911.0, ans=0.0 +2024-08-03 10:37:08,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=97947.66666666667, ans=0.05 +2024-08-03 10:37:12,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=97947.66666666667, ans=0.0 +2024-08-03 10:37:37,205 INFO [train.py:1114] (2/4) Epoch 8, batch 1250, loss[loss=0.2295, simple_loss=0.3154, pruned_loss=0.07184, over 13450.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3073, pruned_loss=0.07674, over 2627028.97 frames. ], batch size: 42, lr: 1.56e-02, grad_scale: 32.0 +2024-08-03 10:37:37,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=98021.0, ans=0.125 +2024-08-03 10:37:39,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=98021.0, ans=0.2 +2024-08-03 10:37:54,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=98094.33333333333, ans=0.5 +2024-08-03 10:37:58,744 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.995e+01 1.185e+02 1.323e+02 1.561e+02 3.297e+02, threshold=2.645e+02, percent-clipped=2.0 +2024-08-03 10:38:19,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=98167.66666666667, ans=0.0 +2024-08-03 10:38:22,560 INFO [train.py:1114] (2/4) Epoch 8, batch 1300, loss[loss=0.2657, simple_loss=0.3403, pruned_loss=0.09553, over 12849.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3064, pruned_loss=0.07631, over 2630374.44 frames. ], batch size: 52, lr: 1.56e-02, grad_scale: 16.0 +2024-08-03 10:39:09,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=98277.66666666667, ans=0.1 +2024-08-03 10:39:13,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=98314.33333333333, ans=0.125 +2024-08-03 10:39:20,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=98314.33333333333, ans=0.2 +2024-08-03 10:39:30,883 INFO [train.py:1114] (2/4) Epoch 8, batch 1350, loss[loss=0.1992, simple_loss=0.2831, pruned_loss=0.05764, over 13539.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.305, pruned_loss=0.07521, over 2638526.26 frames. ], batch size: 37, lr: 1.56e-02, grad_scale: 4.0 +2024-08-03 10:39:59,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=98461.0, ans=0.2 +2024-08-03 10:40:00,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=98461.0, ans=0.125 +2024-08-03 10:40:01,338 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.797e+01 1.224e+02 1.395e+02 1.638e+02 2.508e+02, threshold=2.789e+02, percent-clipped=0.0 +2024-08-03 10:40:20,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=98534.33333333333, ans=0.125 +2024-08-03 10:40:22,546 INFO [train.py:1114] (2/4) Epoch 8, batch 1400, loss[loss=0.2114, simple_loss=0.272, pruned_loss=0.07537, over 13269.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3054, pruned_loss=0.07547, over 2642267.10 frames. ], batch size: 31, lr: 1.56e-02, grad_scale: 8.0 +2024-08-03 10:40:27,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.34 vs. limit=15.0 +2024-08-03 10:40:32,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=98607.66666666667, ans=0.125 +2024-08-03 10:40:32,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=98607.66666666667, ans=0.125 +2024-08-03 10:40:37,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=98607.66666666667, ans=0.2 +2024-08-03 10:40:52,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=98681.0, ans=10.0 +2024-08-03 10:41:01,172 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.08 vs. limit=10.0 +2024-08-03 10:41:07,775 INFO [train.py:1114] (2/4) Epoch 8, batch 1450, loss[loss=0.236, simple_loss=0.3147, pruned_loss=0.07867, over 13428.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3064, pruned_loss=0.07605, over 2641731.89 frames. ], batch size: 43, lr: 1.56e-02, grad_scale: 8.0 +2024-08-03 10:41:15,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=98754.33333333333, ans=0.125 +2024-08-03 10:41:34,028 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.227e+02 1.462e+02 1.726e+02 3.399e+02, threshold=2.923e+02, percent-clipped=2.0 +2024-08-03 10:41:34,495 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.57 vs. limit=6.0 +2024-08-03 10:41:54,820 INFO [train.py:1114] (2/4) Epoch 8, batch 1500, loss[loss=0.2242, simple_loss=0.3072, pruned_loss=0.0706, over 13415.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3072, pruned_loss=0.07624, over 2640789.64 frames. ], batch size: 39, lr: 1.56e-02, grad_scale: 8.0 +2024-08-03 10:42:09,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=98974.33333333333, ans=0.125 +2024-08-03 10:42:27,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=99047.66666666667, ans=0.07 +2024-08-03 10:42:32,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=99084.33333333333, ans=0.09899494936611666 +2024-08-03 10:42:40,620 INFO [train.py:1114] (2/4) Epoch 8, batch 1550, loss[loss=0.2659, simple_loss=0.3465, pruned_loss=0.09267, over 13385.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3078, pruned_loss=0.07677, over 2630888.60 frames. ], batch size: 41, lr: 1.56e-02, grad_scale: 8.0 +2024-08-03 10:43:00,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99194.33333333333, ans=0.1 +2024-08-03 10:43:04,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=99194.33333333333, ans=0.0 +2024-08-03 10:43:04,922 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.564e+01 1.246e+02 1.506e+02 1.858e+02 4.061e+02, threshold=3.012e+02, percent-clipped=4.0 +2024-08-03 10:43:15,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=99231.0, ans=0.125 +2024-08-03 10:43:15,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=99231.0, ans=0.125 +2024-08-03 10:43:20,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=99267.66666666667, ans=0.125 +2024-08-03 10:43:29,765 INFO [train.py:1114] (2/4) Epoch 8, batch 1600, loss[loss=0.2476, simple_loss=0.3238, pruned_loss=0.08573, over 13576.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3079, pruned_loss=0.07715, over 2623678.05 frames. ], batch size: 39, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:43:29,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=99304.33333333333, ans=0.0 +2024-08-03 10:43:42,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99341.0, ans=0.1 +2024-08-03 10:43:46,040 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.96 vs. limit=15.0 +2024-08-03 10:43:48,814 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.34 vs. limit=22.5 +2024-08-03 10:43:53,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=99377.66666666667, ans=0.2 +2024-08-03 10:43:55,190 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.37 vs. limit=15.0 +2024-08-03 10:44:03,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=99414.33333333333, ans=0.5 +2024-08-03 10:44:15,506 INFO [train.py:1114] (2/4) Epoch 8, batch 1650, loss[loss=0.2073, simple_loss=0.2989, pruned_loss=0.05784, over 13333.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3077, pruned_loss=0.07736, over 2620657.18 frames. ], batch size: 40, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:44:40,468 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.002e+02 1.254e+02 1.426e+02 1.791e+02 5.006e+02, threshold=2.852e+02, percent-clipped=4.0 +2024-08-03 10:45:01,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=99634.33333333333, ans=0.125 +2024-08-03 10:45:03,283 INFO [train.py:1114] (2/4) Epoch 8, batch 1700, loss[loss=0.1968, simple_loss=0.2665, pruned_loss=0.06353, over 13249.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3067, pruned_loss=0.07656, over 2629182.68 frames. ], batch size: 31, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:45:31,690 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.96 vs. limit=15.0 +2024-08-03 10:45:46,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=99817.66666666667, ans=0.125 +2024-08-03 10:45:47,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=99854.33333333333, ans=0.0 +2024-08-03 10:45:48,395 INFO [train.py:1114] (2/4) Epoch 8, batch 1750, loss[loss=0.1879, simple_loss=0.2613, pruned_loss=0.05727, over 13562.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3059, pruned_loss=0.07596, over 2632903.58 frames. ], batch size: 31, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:45:51,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=99854.33333333333, ans=0.125 +2024-08-03 10:45:51,607 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.92 vs. limit=15.0 +2024-08-03 10:46:04,605 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.33 vs. limit=6.0 +2024-08-03 10:46:13,076 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.856e+01 1.232e+02 1.552e+02 2.162e+02 4.270e+02, threshold=3.103e+02, percent-clipped=14.0 +2024-08-03 10:46:23,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=99964.33333333333, ans=0.125 +2024-08-03 10:46:34,039 INFO [train.py:1114] (2/4) Epoch 8, batch 1800, loss[loss=0.235, simple_loss=0.3182, pruned_loss=0.07592, over 13537.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3064, pruned_loss=0.07608, over 2634247.60 frames. ], batch size: 38, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:46:40,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=100037.66666666667, ans=0.5 +2024-08-03 10:47:00,141 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:47:02,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=100111.0, ans=0.125 +2024-08-03 10:47:09,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100147.66666666667, ans=0.1 +2024-08-03 10:47:24,467 INFO [train.py:1114] (2/4) Epoch 8, batch 1850, loss[loss=0.2212, simple_loss=0.3078, pruned_loss=0.0673, over 13399.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3059, pruned_loss=0.07574, over 2636418.42 frames. ], batch size: 39, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:47:31,356 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.11 vs. limit=15.0 +2024-08-03 10:47:31,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100221.0, ans=0.1 +2024-08-03 10:47:48,905 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.993e+01 1.278e+02 1.540e+02 2.004e+02 3.260e+02, threshold=3.079e+02, percent-clipped=4.0 +2024-08-03 10:48:02,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100367.66666666667, ans=0.1 +2024-08-03 10:48:07,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=100367.66666666667, ans=0.95 +2024-08-03 10:48:09,645 INFO [train.py:1114] (2/4) Epoch 8, batch 1900, loss[loss=0.248, simple_loss=0.3243, pruned_loss=0.08587, over 13311.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3055, pruned_loss=0.07535, over 2639503.44 frames. ], batch size: 40, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:48:15,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=100404.33333333333, ans=0.0 +2024-08-03 10:48:16,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=100404.33333333333, ans=0.125 +2024-08-03 10:48:20,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100441.0, ans=0.125 +2024-08-03 10:48:23,395 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:48:44,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=100514.33333333333, ans=0.125 +2024-08-03 10:48:53,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=100551.0, ans=15.0 +2024-08-03 10:48:56,509 INFO [train.py:1114] (2/4) Epoch 8, batch 1950, loss[loss=0.2049, simple_loss=0.2889, pruned_loss=0.06042, over 13567.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3064, pruned_loss=0.07549, over 2646094.18 frames. ], batch size: 36, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:48:58,652 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:48:58,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100587.66666666667, ans=0.1 +2024-08-03 10:48:58,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=100587.66666666667, ans=0.125 +2024-08-03 10:49:11,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=100624.33333333333, ans=0.125 +2024-08-03 10:49:21,462 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.223e+02 1.431e+02 1.772e+02 2.626e+02, threshold=2.861e+02, percent-clipped=0.0 +2024-08-03 10:49:23,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=100697.66666666667, ans=0.0 +2024-08-03 10:49:42,385 INFO [train.py:1114] (2/4) Epoch 8, batch 2000, loss[loss=0.2107, simple_loss=0.2752, pruned_loss=0.07313, over 13520.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3073, pruned_loss=0.07624, over 2635710.61 frames. ], batch size: 31, lr: 1.54e-02, grad_scale: 32.0 +2024-08-03 10:49:49,682 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.58 vs. limit=15.0 +2024-08-03 10:49:50,777 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:49:57,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=100807.66666666667, ans=0.2 +2024-08-03 10:49:59,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=100807.66666666667, ans=0.0 +2024-08-03 10:50:06,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=100844.33333333333, ans=0.125 +2024-08-03 10:50:08,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=100844.33333333333, ans=0.0 +2024-08-03 10:50:08,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=100844.33333333333, ans=0.125 +2024-08-03 10:50:14,596 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.71 vs. limit=22.5 +2024-08-03 10:50:18,541 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.45 vs. limit=15.0 +2024-08-03 10:50:20,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=100881.0, ans=0.125 +2024-08-03 10:50:21,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=100881.0, ans=0.1 +2024-08-03 10:50:24,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100917.66666666667, ans=0.125 +2024-08-03 10:50:38,485 INFO [train.py:1114] (2/4) Epoch 8, batch 2050, loss[loss=0.1977, simple_loss=0.2778, pruned_loss=0.05883, over 13424.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3066, pruned_loss=0.07607, over 2633786.41 frames. ], batch size: 32, lr: 1.54e-02, grad_scale: 32.0 +2024-08-03 10:51:01,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101027.66666666667, ans=0.1 +2024-08-03 10:51:03,411 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.251e+02 1.508e+02 1.862e+02 2.983e+02, threshold=3.016e+02, percent-clipped=1.0 +2024-08-03 10:51:06,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=101064.33333333333, ans=0.125 +2024-08-03 10:51:19,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=101101.0, ans=0.025 +2024-08-03 10:51:23,594 INFO [train.py:1114] (2/4) Epoch 8, batch 2100, loss[loss=0.2398, simple_loss=0.3155, pruned_loss=0.08205, over 13544.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.3052, pruned_loss=0.07532, over 2638708.86 frames. ], batch size: 37, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:51:28,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=101137.66666666667, ans=0.125 +2024-08-03 10:51:41,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101211.0, ans=0.1 +2024-08-03 10:52:09,819 INFO [train.py:1114] (2/4) Epoch 8, batch 2150, loss[loss=0.2531, simple_loss=0.3246, pruned_loss=0.09084, over 13572.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.305, pruned_loss=0.07509, over 2647740.38 frames. ], batch size: 36, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:52:36,949 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.507e+01 1.244e+02 1.463e+02 2.141e+02 4.797e+02, threshold=2.925e+02, percent-clipped=7.0 +2024-08-03 10:52:57,018 INFO [train.py:1114] (2/4) Epoch 8, batch 2200, loss[loss=0.2278, simple_loss=0.3002, pruned_loss=0.07773, over 13398.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.304, pruned_loss=0.07459, over 2646123.82 frames. ], batch size: 39, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:53:06,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=101541.0, ans=0.0 +2024-08-03 10:53:09,318 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.91 vs. limit=12.0 +2024-08-03 10:53:12,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=101541.0, ans=0.0 +2024-08-03 10:53:13,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=101541.0, ans=0.125 +2024-08-03 10:53:25,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=101614.33333333333, ans=0.2 +2024-08-03 10:53:41,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=101687.66666666667, ans=0.125 +2024-08-03 10:53:42,324 INFO [train.py:1114] (2/4) Epoch 8, batch 2250, loss[loss=0.223, simple_loss=0.3051, pruned_loss=0.07051, over 13367.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.3046, pruned_loss=0.07506, over 2642488.15 frames. ], batch size: 37, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:53:47,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=101687.66666666667, ans=0.0 +2024-08-03 10:53:52,811 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.90 vs. limit=22.5 +2024-08-03 10:53:53,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=101724.33333333333, ans=0.125 +2024-08-03 10:54:09,240 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.704e+01 1.191e+02 1.378e+02 1.872e+02 3.290e+02, threshold=2.756e+02, percent-clipped=1.0 +2024-08-03 10:54:22,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=101797.66666666667, ans=0.125 +2024-08-03 10:54:39,888 INFO [train.py:1114] (2/4) Epoch 8, batch 2300, loss[loss=0.1911, simple_loss=0.2679, pruned_loss=0.05711, over 13595.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3045, pruned_loss=0.07546, over 2638344.35 frames. ], batch size: 33, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:54:50,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=101907.66666666667, ans=0.125 +2024-08-03 10:55:10,285 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.25 vs. limit=12.0 +2024-08-03 10:55:11,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=101981.0, ans=0.0 +2024-08-03 10:55:24,951 INFO [train.py:1114] (2/4) Epoch 8, batch 2350, loss[loss=0.2284, simple_loss=0.3149, pruned_loss=0.07095, over 13555.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3052, pruned_loss=0.07591, over 2641624.35 frames. ], batch size: 38, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:55:53,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102127.66666666667, ans=0.1 +2024-08-03 10:55:55,516 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.744e+01 1.287e+02 1.695e+02 2.279e+02 3.908e+02, threshold=3.390e+02, percent-clipped=9.0 +2024-08-03 10:56:16,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=102201.0, ans=0.04949747468305833 +2024-08-03 10:56:24,745 INFO [train.py:1114] (2/4) Epoch 8, batch 2400, loss[loss=0.2252, simple_loss=0.3072, pruned_loss=0.07162, over 13526.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3052, pruned_loss=0.07537, over 2642907.93 frames. ], batch size: 35, lr: 1.53e-02, grad_scale: 32.0 +2024-08-03 10:56:26,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=102237.66666666667, ans=0.2 +2024-08-03 10:56:31,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=102237.66666666667, ans=0.125 +2024-08-03 10:56:58,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=102347.66666666667, ans=0.07 +2024-08-03 10:56:59,696 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.52 vs. limit=6.0 +2024-08-03 10:57:00,691 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.40 vs. limit=15.0 +2024-08-03 10:57:11,095 INFO [train.py:1114] (2/4) Epoch 8, batch 2450, loss[loss=0.2031, simple_loss=0.2944, pruned_loss=0.05588, over 13353.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3062, pruned_loss=0.07601, over 2633049.91 frames. ], batch size: 37, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:57:19,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=102457.66666666667, ans=0.125 +2024-08-03 10:57:32,636 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.67 vs. limit=15.0 +2024-08-03 10:57:33,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=102494.33333333333, ans=0.125 +2024-08-03 10:57:40,049 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.286e+02 1.590e+02 1.975e+02 2.991e+02, threshold=3.180e+02, percent-clipped=0.0 +2024-08-03 10:57:47,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102531.0, ans=0.1 +2024-08-03 10:57:48,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=102531.0, ans=10.0 +2024-08-03 10:57:59,037 INFO [train.py:1114] (2/4) Epoch 8, batch 2500, loss[loss=0.2242, simple_loss=0.3067, pruned_loss=0.07082, over 13390.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3058, pruned_loss=0.07563, over 2636624.57 frames. ], batch size: 39, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:58:03,685 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:58:03,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=102604.33333333333, ans=0.125 +2024-08-03 10:58:05,791 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.32 vs. limit=15.0 +2024-08-03 10:58:05,939 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.64 vs. limit=10.0 +2024-08-03 10:58:16,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=102641.0, ans=0.125 +2024-08-03 10:58:17,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=102677.66666666667, ans=0.125 +2024-08-03 10:58:41,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=102751.0, ans=0.125 +2024-08-03 10:58:44,732 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.52 vs. limit=15.0 +2024-08-03 10:58:45,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=102751.0, ans=0.125 +2024-08-03 10:58:48,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=102751.0, ans=0.0 +2024-08-03 10:58:50,556 INFO [train.py:1114] (2/4) Epoch 8, batch 2550, loss[loss=0.2235, simple_loss=0.2846, pruned_loss=0.08122, over 13538.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3062, pruned_loss=0.07603, over 2638000.71 frames. ], batch size: 31, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:58:53,816 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.99 vs. limit=15.0 +2024-08-03 10:58:54,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=102787.66666666667, ans=0.5 +2024-08-03 10:59:11,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=102861.0, ans=0.125 +2024-08-03 10:59:15,649 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.849e+01 1.203e+02 1.342e+02 1.554e+02 2.450e+02, threshold=2.684e+02, percent-clipped=0.0 +2024-08-03 10:59:25,886 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.17 vs. limit=15.0 +2024-08-03 10:59:34,147 INFO [train.py:1114] (2/4) Epoch 8, batch 2600, loss[loss=0.2172, simple_loss=0.2912, pruned_loss=0.07156, over 13561.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3064, pruned_loss=0.07613, over 2637230.05 frames. ], batch size: 36, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 11:00:11,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=103081.0, ans=0.0 +2024-08-03 11:00:17,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=103117.66666666667, ans=15.0 +2024-08-03 11:00:19,415 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.24 vs. limit=15.0 +2024-08-03 11:00:23,275 INFO [train.py:1114] (2/4) Epoch 8, batch 2650, loss[loss=0.2331, simple_loss=0.3096, pruned_loss=0.07829, over 13346.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3061, pruned_loss=0.07585, over 2640052.66 frames. ], batch size: 46, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 11:00:48,525 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.954e+01 1.189e+02 1.373e+02 1.657e+02 2.856e+02, threshold=2.745e+02, percent-clipped=2.0 +2024-08-03 11:00:56,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=103264.33333333333, ans=0.0 +2024-08-03 11:00:58,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=103301.0, ans=0.0 +2024-08-03 11:01:06,150 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.84 vs. limit=15.0 +2024-08-03 11:01:07,325 INFO [train.py:1114] (2/4) Epoch 8, batch 2700, loss[loss=0.2609, simple_loss=0.3374, pruned_loss=0.09224, over 13568.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3065, pruned_loss=0.07623, over 2636816.21 frames. ], batch size: 40, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 11:01:17,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=103374.33333333333, ans=0.125 +2024-08-03 11:01:30,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103411.0, ans=0.1 +2024-08-03 11:01:39,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103447.66666666667, ans=0.1 +2024-08-03 11:01:44,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103484.33333333333, ans=0.1 +2024-08-03 11:01:51,074 INFO [train.py:1114] (2/4) Epoch 8, batch 2750, loss[loss=0.1931, simple_loss=0.2697, pruned_loss=0.05825, over 13326.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3053, pruned_loss=0.07582, over 2635191.57 frames. ], batch size: 34, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:01:53,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=103521.0, ans=0.125 +2024-08-03 11:02:02,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=103557.66666666667, ans=10.0 +2024-08-03 11:02:07,032 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-08-03 11:02:12,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=103594.33333333333, ans=0.125 +2024-08-03 11:02:13,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103594.33333333333, ans=0.1 +2024-08-03 11:02:18,847 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.453e+01 1.294e+02 1.512e+02 1.970e+02 3.598e+02, threshold=3.023e+02, percent-clipped=4.0 +2024-08-03 11:02:37,584 INFO [train.py:1114] (2/4) Epoch 8, batch 2800, loss[loss=0.2952, simple_loss=0.3602, pruned_loss=0.115, over 8915.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3055, pruned_loss=0.07609, over 2626293.46 frames. ], batch size: 96, lr: 1.52e-02, grad_scale: 32.0 +2024-08-03 11:03:01,526 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=16.15 vs. limit=15.0 +2024-08-03 11:03:02,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=103777.66666666667, ans=0.2 +2024-08-03 11:03:02,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=103777.66666666667, ans=0.2 +2024-08-03 11:03:08,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=103814.33333333333, ans=0.2 +2024-08-03 11:03:15,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=103851.0, ans=0.125 +2024-08-03 11:03:20,074 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.95 vs. limit=10.0 +2024-08-03 11:03:22,211 INFO [train.py:1114] (2/4) Epoch 8, batch 2850, loss[loss=0.2092, simple_loss=0.29, pruned_loss=0.06422, over 13363.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3059, pruned_loss=0.07594, over 2620943.15 frames. ], batch size: 35, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:03:29,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=103887.66666666667, ans=0.125 +2024-08-03 11:03:30,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=103924.33333333333, ans=0.125 +2024-08-03 11:03:36,079 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:03:38,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=103961.0, ans=0.0 +2024-08-03 11:03:47,796 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.776e+01 1.266e+02 1.558e+02 2.014e+02 3.574e+02, threshold=3.117e+02, percent-clipped=3.0 +2024-08-03 11:03:48,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=103997.66666666667, ans=0.025 +2024-08-03 11:03:49,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=103997.66666666667, ans=0.025 +2024-08-03 11:03:55,252 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.95 vs. limit=12.0 +2024-08-03 11:04:02,698 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:04:05,041 INFO [train.py:1114] (2/4) Epoch 8, batch 2900, loss[loss=0.213, simple_loss=0.2903, pruned_loss=0.06785, over 13360.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3061, pruned_loss=0.07534, over 2631751.26 frames. ], batch size: 36, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:04:24,331 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.23 vs. limit=15.0 +2024-08-03 11:04:28,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=104144.33333333333, ans=0.015 +2024-08-03 11:04:47,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=104217.66666666667, ans=10.0 +2024-08-03 11:04:48,495 INFO [train.py:1114] (2/4) Epoch 8, batch 2950, loss[loss=0.2052, simple_loss=0.2893, pruned_loss=0.06058, over 13335.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3044, pruned_loss=0.07478, over 2630603.24 frames. ], batch size: 34, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:04:54,884 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.33 vs. limit=15.0 +2024-08-03 11:04:56,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=104291.0, ans=0.0 +2024-08-03 11:04:58,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=104291.0, ans=0.09899494936611666 +2024-08-03 11:05:04,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=104291.0, ans=0.0 +2024-08-03 11:05:12,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=104327.66666666667, ans=0.125 +2024-08-03 11:05:14,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=104364.33333333333, ans=0.0 +2024-08-03 11:05:14,601 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.699e+01 1.150e+02 1.316e+02 1.628e+02 4.465e+02, threshold=2.631e+02, percent-clipped=1.0 +2024-08-03 11:05:31,937 INFO [train.py:1114] (2/4) Epoch 8, batch 3000, loss[loss=0.2056, simple_loss=0.2823, pruned_loss=0.06447, over 13533.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.3051, pruned_loss=0.0754, over 2629603.34 frames. ], batch size: 37, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:05:31,938 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 11:05:37,332 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.6976, 2.8880, 2.6547, 2.6990], device='cuda:2') +2024-08-03 11:05:42,233 INFO [train.py:1146] (2/4) Epoch 8, validation: loss=0.1886, simple_loss=0.2887, pruned_loss=0.04428, over 944034.00 frames. +2024-08-03 11:05:42,233 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 11:05:46,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=104437.66666666667, ans=10.0 +2024-08-03 11:05:46,406 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.76 vs. limit=10.0 +2024-08-03 11:06:01,262 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.78 vs. limit=15.0 +2024-08-03 11:06:03,648 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.11 vs. limit=12.0 +2024-08-03 11:06:08,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=104547.66666666667, ans=0.1 +2024-08-03 11:06:10,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=104547.66666666667, ans=0.2 +2024-08-03 11:06:17,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=104584.33333333333, ans=0.125 +2024-08-03 11:06:25,405 INFO [train.py:1114] (2/4) Epoch 8, batch 3050, loss[loss=0.2067, simple_loss=0.2815, pruned_loss=0.06593, over 13541.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3052, pruned_loss=0.07558, over 2626219.81 frames. ], batch size: 35, lr: 1.52e-02, grad_scale: 8.0 +2024-08-03 11:06:28,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=104621.0, ans=0.0 +2024-08-03 11:06:54,265 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.678e+01 1.159e+02 1.376e+02 1.941e+02 3.361e+02, threshold=2.751e+02, percent-clipped=3.0 +2024-08-03 11:07:10,693 INFO [train.py:1114] (2/4) Epoch 8, batch 3100, loss[loss=0.2256, simple_loss=0.3069, pruned_loss=0.07214, over 13383.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.305, pruned_loss=0.07519, over 2626850.62 frames. ], batch size: 46, lr: 1.52e-02, grad_scale: 8.0 +2024-08-03 11:07:27,318 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:07:41,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=104914.33333333333, ans=0.0 +2024-08-03 11:07:44,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=104951.0, ans=0.125 +2024-08-03 11:07:53,556 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.20 vs. limit=10.0 +2024-08-03 11:07:53,831 INFO [train.py:1114] (2/4) Epoch 8, batch 3150, loss[loss=0.2362, simple_loss=0.3129, pruned_loss=0.07977, over 13311.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.3053, pruned_loss=0.07526, over 2628595.54 frames. ], batch size: 49, lr: 1.51e-02, grad_scale: 8.0 +2024-08-03 11:08:04,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=105024.33333333333, ans=0.2 +2024-08-03 11:08:16,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=105061.0, ans=0.05 +2024-08-03 11:08:20,101 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.438e+02 1.843e+02 2.666e+02 3.777e+02, threshold=3.687e+02, percent-clipped=21.0 +2024-08-03 11:08:22,255 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.99 vs. limit=15.0 +2024-08-03 11:08:24,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=105097.66666666667, ans=0.0 +2024-08-03 11:08:36,210 INFO [train.py:1114] (2/4) Epoch 8, batch 3200, loss[loss=0.2274, simple_loss=0.3102, pruned_loss=0.07235, over 13538.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3046, pruned_loss=0.07479, over 2634814.31 frames. ], batch size: 37, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:08:41,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=105171.0, ans=0.125 +2024-08-03 11:08:45,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=105207.66666666667, ans=0.125 +2024-08-03 11:08:54,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=105244.33333333333, ans=0.07 +2024-08-03 11:08:55,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105244.33333333333, ans=0.1 +2024-08-03 11:08:58,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105244.33333333333, ans=0.1 +2024-08-03 11:09:05,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=105281.0, ans=0.025 +2024-08-03 11:09:08,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=105281.0, ans=0.125 +2024-08-03 11:09:14,862 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.40 vs. limit=15.0 +2024-08-03 11:09:19,472 INFO [train.py:1114] (2/4) Epoch 8, batch 3250, loss[loss=0.2393, simple_loss=0.3274, pruned_loss=0.07561, over 13395.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.305, pruned_loss=0.07469, over 2639169.35 frames. ], batch size: 38, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:09:21,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=105354.33333333333, ans=0.015 +2024-08-03 11:09:30,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=105391.0, ans=0.0 +2024-08-03 11:09:37,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105427.66666666667, ans=0.1 +2024-08-03 11:09:45,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=105464.33333333333, ans=0.1 +2024-08-03 11:09:47,361 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.799e+01 1.281e+02 1.591e+02 1.983e+02 2.904e+02, threshold=3.182e+02, percent-clipped=0.0 +2024-08-03 11:09:50,517 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.49 vs. limit=15.0 +2024-08-03 11:09:57,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=105501.0, ans=0.0 +2024-08-03 11:10:03,916 INFO [train.py:1114] (2/4) Epoch 8, batch 3300, loss[loss=0.2389, simple_loss=0.3254, pruned_loss=0.07619, over 12834.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3037, pruned_loss=0.07414, over 2640045.12 frames. ], batch size: 52, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:10:09,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=105537.66666666667, ans=0.125 +2024-08-03 11:10:10,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=105537.66666666667, ans=0.2 +2024-08-03 11:10:16,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=105574.33333333333, ans=0.2 +2024-08-03 11:10:44,103 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.87 vs. limit=15.0 +2024-08-03 11:10:51,475 INFO [train.py:1114] (2/4) Epoch 8, batch 3350, loss[loss=0.2746, simple_loss=0.3447, pruned_loss=0.1022, over 12976.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3056, pruned_loss=0.07521, over 2629513.63 frames. ], batch size: 48, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:11:02,667 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.58 vs. limit=12.0 +2024-08-03 11:11:05,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=105757.66666666667, ans=0.125 +2024-08-03 11:11:20,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=105757.66666666667, ans=0.125 +2024-08-03 11:11:42,001 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.800e+01 1.191e+02 1.380e+02 1.620e+02 2.699e+02, threshold=2.759e+02, percent-clipped=0.0 +2024-08-03 11:11:48,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=105831.0, ans=0.0 +2024-08-03 11:12:06,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=105867.66666666667, ans=0.2 +2024-08-03 11:12:10,265 INFO [train.py:1114] (2/4) Epoch 8, batch 3400, loss[loss=0.1967, simple_loss=0.2695, pruned_loss=0.06201, over 13523.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3046, pruned_loss=0.07482, over 2624429.35 frames. ], batch size: 31, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:12:18,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=105904.33333333333, ans=0.0 +2024-08-03 11:12:39,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=105977.66666666667, ans=0.125 +2024-08-03 11:12:46,450 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.66 vs. limit=15.0 +2024-08-03 11:12:48,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=106014.33333333333, ans=0.125 +2024-08-03 11:12:49,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=106014.33333333333, ans=0.04949747468305833 +2024-08-03 11:12:56,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=106051.0, ans=0.0 +2024-08-03 11:13:03,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=106051.0, ans=0.5 +2024-08-03 11:13:06,195 INFO [train.py:1114] (2/4) Epoch 8, batch 3450, loss[loss=0.2751, simple_loss=0.3461, pruned_loss=0.102, over 12899.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3055, pruned_loss=0.07544, over 2628118.72 frames. ], batch size: 52, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:13:10,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106087.66666666667, ans=0.0 +2024-08-03 11:13:20,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=106124.33333333333, ans=0.125 +2024-08-03 11:13:29,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=106161.0, ans=0.025 +2024-08-03 11:13:35,948 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.667e+01 1.192e+02 1.350e+02 1.586e+02 3.469e+02, threshold=2.701e+02, percent-clipped=1.0 +2024-08-03 11:13:54,438 INFO [train.py:1114] (2/4) Epoch 8, batch 3500, loss[loss=0.211, simple_loss=0.2881, pruned_loss=0.06696, over 13539.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3044, pruned_loss=0.07511, over 2629908.63 frames. ], batch size: 34, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:14:03,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=106307.66666666667, ans=0.0 +2024-08-03 11:14:13,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106344.33333333333, ans=0.0 +2024-08-03 11:14:13,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106344.33333333333, ans=0.1 +2024-08-03 11:14:14,170 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:14:20,300 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.72 vs. limit=15.0 +2024-08-03 11:14:27,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=106381.0, ans=0.125 +2024-08-03 11:14:28,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=106417.66666666667, ans=0.125 +2024-08-03 11:14:40,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=106417.66666666667, ans=0.125 +2024-08-03 11:14:42,317 INFO [train.py:1114] (2/4) Epoch 8, batch 3550, loss[loss=0.2459, simple_loss=0.3163, pruned_loss=0.0877, over 12596.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3069, pruned_loss=0.07653, over 2628923.11 frames. ], batch size: 58, lr: 1.50e-02, grad_scale: 16.0 +2024-08-03 11:14:45,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=106454.33333333333, ans=0.125 +2024-08-03 11:15:16,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=106527.66666666667, ans=0.0 +2024-08-03 11:15:21,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=106527.66666666667, ans=0.2 +2024-08-03 11:15:23,230 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.39 vs. limit=10.0 +2024-08-03 11:15:26,478 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.912e+01 1.312e+02 1.471e+02 1.710e+02 3.286e+02, threshold=2.943e+02, percent-clipped=3.0 +2024-08-03 11:15:34,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=106601.0, ans=0.0 +2024-08-03 11:15:48,850 INFO [train.py:1114] (2/4) Epoch 8, batch 3600, loss[loss=0.2769, simple_loss=0.3358, pruned_loss=0.109, over 9603.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3125, pruned_loss=0.08186, over 2489560.87 frames. ], batch size: 96, lr: 1.50e-02, grad_scale: 32.0 +2024-08-03 11:15:53,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=106637.66666666667, ans=0.125 +2024-08-03 11:15:56,735 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=26.22 vs. limit=22.5 +2024-08-03 11:16:10,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106711.0, ans=0.1 +2024-08-03 11:17:27,621 INFO [train.py:1114] (2/4) Epoch 9, batch 0, loss[loss=0.2027, simple_loss=0.2772, pruned_loss=0.06407, over 13340.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2772, pruned_loss=0.06407, over 13340.00 frames. ], batch size: 33, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:17:27,622 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 11:17:39,634 INFO [train.py:1146] (2/4) Epoch 9, validation: loss=0.1935, simple_loss=0.2948, pruned_loss=0.04614, over 944034.00 frames. +2024-08-03 11:17:39,635 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 11:17:42,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106788.0, ans=0.1 +2024-08-03 11:17:44,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=106788.0, ans=0.125 +2024-08-03 11:17:48,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=106824.66666666667, ans=0.2 +2024-08-03 11:17:53,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=106824.66666666667, ans=0.0 +2024-08-03 11:17:58,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=106861.33333333333, ans=0.125 +2024-08-03 11:18:11,268 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:18:18,801 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.039e+02 1.291e+02 1.434e+02 1.801e+02 3.339e+02, threshold=2.868e+02, percent-clipped=2.0 +2024-08-03 11:18:19,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=106934.66666666667, ans=6.0 +2024-08-03 11:18:21,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=106934.66666666667, ans=0.025 +2024-08-03 11:18:22,126 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.29 vs. limit=15.0 +2024-08-03 11:18:27,050 INFO [train.py:1114] (2/4) Epoch 9, batch 50, loss[loss=0.2236, simple_loss=0.2938, pruned_loss=0.07664, over 13442.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.306, pruned_loss=0.07635, over 576606.93 frames. ], batch size: 32, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:18:35,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=106971.33333333333, ans=0.2 +2024-08-03 11:18:38,269 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:18:44,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=107008.0, ans=0.125 +2024-08-03 11:18:51,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=107044.66666666667, ans=0.125 +2024-08-03 11:18:59,059 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.61 vs. limit=15.0 +2024-08-03 11:19:16,442 INFO [train.py:1114] (2/4) Epoch 9, batch 100, loss[loss=0.2115, simple_loss=0.2846, pruned_loss=0.0692, over 13544.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3047, pruned_loss=0.07374, over 1024382.22 frames. ], batch size: 35, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:19:21,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=107154.66666666667, ans=0.025 +2024-08-03 11:19:21,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=107154.66666666667, ans=0.0 +2024-08-03 11:19:47,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=107264.66666666667, ans=0.0 +2024-08-03 11:19:53,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=107301.33333333333, ans=0.125 +2024-08-03 11:19:54,655 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.233e+02 1.427e+02 1.918e+02 3.132e+02, threshold=2.853e+02, percent-clipped=1.0 +2024-08-03 11:19:59,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=107301.33333333333, ans=0.05 +2024-08-03 11:20:04,318 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.97 vs. limit=15.0 +2024-08-03 11:20:04,704 INFO [train.py:1114] (2/4) Epoch 9, batch 150, loss[loss=0.1869, simple_loss=0.2613, pruned_loss=0.05628, over 13411.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3023, pruned_loss=0.07219, over 1386172.81 frames. ], batch size: 32, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:20:07,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=107338.0, ans=0.0 +2024-08-03 11:20:14,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=107374.66666666667, ans=0.0 +2024-08-03 11:20:22,253 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.14 vs. limit=22.5 +2024-08-03 11:20:23,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=107411.33333333333, ans=0.125 +2024-08-03 11:20:33,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.65 vs. limit=15.0 +2024-08-03 11:20:46,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107484.66666666667, ans=0.1 +2024-08-03 11:20:55,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=107521.33333333333, ans=0.125 +2024-08-03 11:20:56,477 INFO [train.py:1114] (2/4) Epoch 9, batch 200, loss[loss=0.2311, simple_loss=0.3167, pruned_loss=0.0727, over 12409.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3017, pruned_loss=0.07243, over 1664837.41 frames. ], batch size: 58, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:20:59,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=107521.33333333333, ans=0.125 +2024-08-03 11:21:10,261 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.60 vs. limit=15.0 +2024-08-03 11:21:16,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=107558.0, ans=0.0 +2024-08-03 11:21:27,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=107631.33333333333, ans=0.125 +2024-08-03 11:21:39,289 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.622e+01 1.326e+02 1.622e+02 2.251e+02 3.498e+02, threshold=3.245e+02, percent-clipped=9.0 +2024-08-03 11:21:45,020 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.83 vs. limit=8.0 +2024-08-03 11:21:47,901 INFO [train.py:1114] (2/4) Epoch 9, batch 250, loss[loss=0.2531, simple_loss=0.3326, pruned_loss=0.08681, over 13308.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3017, pruned_loss=0.07264, over 1884143.47 frames. ], batch size: 46, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:21:54,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.48 vs. limit=22.5 +2024-08-03 11:21:55,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=107704.66666666667, ans=0.125 +2024-08-03 11:22:20,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107814.66666666667, ans=0.1 +2024-08-03 11:22:24,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=107814.66666666667, ans=0.125 +2024-08-03 11:22:35,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=107851.33333333333, ans=0.2 +2024-08-03 11:22:35,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107851.33333333333, ans=0.125 +2024-08-03 11:22:35,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=107851.33333333333, ans=0.0 +2024-08-03 11:22:38,747 INFO [train.py:1114] (2/4) Epoch 9, batch 300, loss[loss=0.2311, simple_loss=0.3085, pruned_loss=0.07684, over 13460.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3017, pruned_loss=0.07274, over 2051375.56 frames. ], batch size: 42, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:22:51,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=107924.66666666667, ans=0.035 +2024-08-03 11:22:56,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=107924.66666666667, ans=0.0 +2024-08-03 11:23:12,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=107998.0, ans=0.125 +2024-08-03 11:23:18,268 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.922e+01 1.191e+02 1.365e+02 1.684e+02 2.533e+02, threshold=2.730e+02, percent-clipped=0.0 +2024-08-03 11:23:26,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=108034.66666666667, ans=10.0 +2024-08-03 11:23:32,779 INFO [train.py:1114] (2/4) Epoch 9, batch 350, loss[loss=0.185, simple_loss=0.2673, pruned_loss=0.05129, over 13597.00 frames. ], tot_loss[loss=0.223, simple_loss=0.3016, pruned_loss=0.07225, over 2182772.05 frames. ], batch size: 33, lr: 1.41e-02, grad_scale: 32.0 +2024-08-03 11:23:42,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=108108.0, ans=0.125 +2024-08-03 11:24:41,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108218.0, ans=0.1 +2024-08-03 11:24:44,570 INFO [train.py:1114] (2/4) Epoch 9, batch 400, loss[loss=0.2023, simple_loss=0.2891, pruned_loss=0.05772, over 13376.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2999, pruned_loss=0.07123, over 2287518.51 frames. ], batch size: 37, lr: 1.41e-02, grad_scale: 32.0 +2024-08-03 11:25:10,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=108254.66666666667, ans=0.0 +2024-08-03 11:25:12,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108254.66666666667, ans=0.1 +2024-08-03 11:25:46,044 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.902e+01 1.178e+02 1.332e+02 1.607e+02 2.662e+02, threshold=2.664e+02, percent-clipped=0.0 +2024-08-03 11:25:49,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=108401.33333333333, ans=0.0 +2024-08-03 11:25:50,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=108401.33333333333, ans=0.125 +2024-08-03 11:26:00,583 INFO [train.py:1114] (2/4) Epoch 9, batch 450, loss[loss=0.2177, simple_loss=0.3079, pruned_loss=0.06379, over 13548.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3009, pruned_loss=0.07168, over 2360921.15 frames. ], batch size: 38, lr: 1.41e-02, grad_scale: 32.0 +2024-08-03 11:26:02,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=108438.0, ans=0.025 +2024-08-03 11:26:03,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=108438.0, ans=0.0 +2024-08-03 11:26:03,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=108438.0, ans=0.5 +2024-08-03 11:26:06,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108438.0, ans=0.1 +2024-08-03 11:26:13,470 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.26 vs. limit=15.0 +2024-08-03 11:26:20,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=108511.33333333333, ans=0.125 +2024-08-03 11:26:32,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=108548.0, ans=0.0 +2024-08-03 11:26:32,444 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.31 vs. limit=15.0 +2024-08-03 11:26:46,757 INFO [train.py:1114] (2/4) Epoch 9, batch 500, loss[loss=0.2245, simple_loss=0.3103, pruned_loss=0.06935, over 13431.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.3003, pruned_loss=0.07131, over 2426051.02 frames. ], batch size: 43, lr: 1.41e-02, grad_scale: 32.0 +2024-08-03 11:27:00,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=108658.0, ans=0.025 +2024-08-03 11:27:01,707 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:27:16,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=108694.66666666667, ans=0.025 +2024-08-03 11:27:18,403 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.26 vs. limit=12.0 +2024-08-03 11:27:30,443 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.278e+01 1.224e+02 1.411e+02 1.818e+02 3.084e+02, threshold=2.822e+02, percent-clipped=2.0 +2024-08-03 11:27:37,713 INFO [train.py:1114] (2/4) Epoch 9, batch 550, loss[loss=0.2215, simple_loss=0.3058, pruned_loss=0.06861, over 13022.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3005, pruned_loss=0.0717, over 2468249.19 frames. ], batch size: 48, lr: 1.41e-02, grad_scale: 16.0 +2024-08-03 11:28:12,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108878.0, ans=0.1 +2024-08-03 11:28:29,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=108914.66666666667, ans=0.125 +2024-08-03 11:28:32,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=108914.66666666667, ans=10.0 +2024-08-03 11:28:33,427 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.58 vs. limit=15.0 +2024-08-03 11:28:43,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=108988.0, ans=0.1 +2024-08-03 11:28:43,874 INFO [train.py:1114] (2/4) Epoch 9, batch 600, loss[loss=0.2407, simple_loss=0.3274, pruned_loss=0.07705, over 13291.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3001, pruned_loss=0.07107, over 2508710.62 frames. ], batch size: 46, lr: 1.41e-02, grad_scale: 16.0 +2024-08-03 11:28:46,938 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.38 vs. limit=22.5 +2024-08-03 11:28:53,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=109024.66666666667, ans=0.025 +2024-08-03 11:29:21,674 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.363e+01 1.197e+02 1.407e+02 1.902e+02 4.020e+02, threshold=2.813e+02, percent-clipped=3.0 +2024-08-03 11:29:28,825 INFO [train.py:1114] (2/4) Epoch 9, batch 650, loss[loss=0.2228, simple_loss=0.3012, pruned_loss=0.0722, over 13540.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2993, pruned_loss=0.07024, over 2543608.37 frames. ], batch size: 37, lr: 1.41e-02, grad_scale: 16.0 +2024-08-03 11:29:28,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=109171.33333333333, ans=0.025 +2024-08-03 11:29:39,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109208.0, ans=0.1 +2024-08-03 11:29:49,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=109244.66666666667, ans=0.0 +2024-08-03 11:29:49,825 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.10 vs. limit=22.5 +2024-08-03 11:30:00,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=109281.33333333333, ans=0.0 +2024-08-03 11:30:04,852 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:30:13,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=109318.0, ans=0.125 +2024-08-03 11:30:15,754 INFO [train.py:1114] (2/4) Epoch 9, batch 700, loss[loss=0.2488, simple_loss=0.3156, pruned_loss=0.09098, over 13527.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3005, pruned_loss=0.07085, over 2566177.48 frames. ], batch size: 35, lr: 1.41e-02, grad_scale: 8.0 +2024-08-03 11:30:19,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=109354.66666666667, ans=0.025 +2024-08-03 11:30:26,108 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.03 vs. limit=15.0 +2024-08-03 11:30:26,140 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.39 vs. limit=22.5 +2024-08-03 11:30:56,879 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.372e+01 1.239e+02 1.392e+02 1.880e+02 3.301e+02, threshold=2.784e+02, percent-clipped=6.0 +2024-08-03 11:31:03,166 INFO [train.py:1114] (2/4) Epoch 9, batch 750, loss[loss=0.2223, simple_loss=0.3056, pruned_loss=0.0695, over 13369.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3003, pruned_loss=0.07093, over 2583930.53 frames. ], batch size: 37, lr: 1.41e-02, grad_scale: 8.0 +2024-08-03 11:31:17,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=109574.66666666667, ans=0.2 +2024-08-03 11:31:22,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=109574.66666666667, ans=0.09899494936611666 +2024-08-03 11:31:25,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=109611.33333333333, ans=0.125 +2024-08-03 11:31:48,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=109684.66666666667, ans=0.2 +2024-08-03 11:31:53,675 INFO [train.py:1114] (2/4) Epoch 9, batch 800, loss[loss=0.206, simple_loss=0.2782, pruned_loss=0.06687, over 13327.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3004, pruned_loss=0.07092, over 2597746.26 frames. ], batch size: 33, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:31:55,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=109721.33333333333, ans=0.125 +2024-08-03 11:31:59,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=109721.33333333333, ans=0.025 +2024-08-03 11:32:05,069 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.67 vs. limit=15.0 +2024-08-03 11:32:05,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=109758.0, ans=0.2 +2024-08-03 11:32:12,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=109758.0, ans=0.025 +2024-08-03 11:32:22,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=109831.33333333333, ans=0.125 +2024-08-03 11:32:34,399 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.676e+01 1.153e+02 1.323e+02 1.787e+02 2.891e+02, threshold=2.646e+02, percent-clipped=1.0 +2024-08-03 11:32:40,793 INFO [train.py:1114] (2/4) Epoch 9, batch 850, loss[loss=0.2382, simple_loss=0.3285, pruned_loss=0.074, over 13329.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3, pruned_loss=0.07103, over 2610812.60 frames. ], batch size: 40, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:32:43,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=109904.66666666667, ans=0.025 +2024-08-03 11:32:48,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=109904.66666666667, ans=0.0 +2024-08-03 11:32:56,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=109941.33333333333, ans=0.2 +2024-08-03 11:33:07,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=110014.66666666667, ans=0.125 +2024-08-03 11:33:17,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=110051.33333333333, ans=0.125 +2024-08-03 11:33:26,731 INFO [train.py:1114] (2/4) Epoch 9, batch 900, loss[loss=0.2163, simple_loss=0.291, pruned_loss=0.07082, over 13352.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.3005, pruned_loss=0.07113, over 2613250.25 frames. ], batch size: 33, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:33:41,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=110124.66666666667, ans=0.0 +2024-08-03 11:33:41,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=110124.66666666667, ans=0.0 +2024-08-03 11:33:44,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=110161.33333333333, ans=0.125 +2024-08-03 11:33:45,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=110161.33333333333, ans=0.125 +2024-08-03 11:33:47,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=110161.33333333333, ans=0.0 +2024-08-03 11:34:05,775 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.797e+01 1.134e+02 1.308e+02 1.698e+02 3.183e+02, threshold=2.616e+02, percent-clipped=1.0 +2024-08-03 11:38:02,442 INFO [train.py:1114] (2/4) Epoch 9, batch 950, loss[loss=0.1983, simple_loss=0.2758, pruned_loss=0.06043, over 13543.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3012, pruned_loss=0.07191, over 2613835.51 frames. ], batch size: 34, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:38:02,862 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.41 vs. limit=15.0 +2024-08-03 11:46:15,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=110271.33333333333, ans=0.125 +2024-08-03 11:46:16,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=110271.33333333333, ans=0.2 +2024-08-03 11:46:17,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=110271.33333333333, ans=0.125 +2024-08-03 11:47:42,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=110308.0, ans=0.125 +2024-08-03 11:55:50,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=110344.66666666667, ans=0.0 +2024-08-03 11:57:11,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=110381.33333333333, ans=0.125 +2024-08-03 11:57:16,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=110418.0, ans=0.05 +2024-08-03 11:57:24,450 INFO [train.py:1114] (2/4) Epoch 9, batch 1000, loss[loss=0.2262, simple_loss=0.3087, pruned_loss=0.07185, over 13365.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3024, pruned_loss=0.07242, over 2611943.51 frames. ], batch size: 35, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:58:03,589 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.54 vs. limit=15.0 +2024-08-03 11:58:14,609 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.69 vs. limit=15.0 +2024-08-03 11:58:16,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=110491.33333333333, ans=0.125 +2024-08-03 11:59:10,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=110564.66666666667, ans=0.0 +2024-08-03 12:02:13,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=110601.33333333333, ans=0.125 +2024-08-03 12:02:14,655 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.417e+01 1.320e+02 1.683e+02 2.294e+02 6.382e+02, threshold=3.366e+02, percent-clipped=18.0 +2024-08-03 12:02:46,937 INFO [train.py:1114] (2/4) Epoch 9, batch 1050, loss[loss=0.2044, simple_loss=0.2845, pruned_loss=0.06216, over 13581.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.3011, pruned_loss=0.07172, over 2616591.59 frames. ], batch size: 39, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 12:02:47,469 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.79 vs. limit=10.0 +2024-08-03 12:03:13,426 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.48 vs. limit=12.0 +2024-08-03 12:03:36,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=110784.66666666667, ans=0.025 +2024-08-03 12:03:41,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=110784.66666666667, ans=0.025 +2024-08-03 12:03:43,572 INFO [train.py:1114] (2/4) Epoch 9, batch 1100, loss[loss=0.2095, simple_loss=0.2905, pruned_loss=0.0643, over 13573.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3007, pruned_loss=0.0714, over 2620334.30 frames. ], batch size: 36, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 12:03:59,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=110858.0, ans=0.2 +2024-08-03 12:04:02,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=110894.66666666667, ans=0.0 +2024-08-03 12:04:06,400 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.03 vs. limit=6.0 +2024-08-03 12:04:20,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=110968.0, ans=0.0 +2024-08-03 12:04:23,168 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.596e+01 1.248e+02 1.447e+02 1.784e+02 2.947e+02, threshold=2.893e+02, percent-clipped=0.0 +2024-08-03 12:04:28,532 INFO [train.py:1114] (2/4) Epoch 9, batch 1150, loss[loss=0.1802, simple_loss=0.2705, pruned_loss=0.04496, over 13571.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3014, pruned_loss=0.07201, over 2620122.35 frames. ], batch size: 36, lr: 1.40e-02, grad_scale: 8.0 +2024-08-03 12:04:38,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=111041.33333333333, ans=0.1 +2024-08-03 12:04:40,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111041.33333333333, ans=0.1 +2024-08-03 12:04:47,743 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.54 vs. limit=15.0 +2024-08-03 12:04:56,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=111114.66666666667, ans=0.2 +2024-08-03 12:05:00,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=111114.66666666667, ans=0.07 +2024-08-03 12:05:01,047 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.39 vs. limit=22.5 +2024-08-03 12:05:17,650 INFO [train.py:1114] (2/4) Epoch 9, batch 1200, loss[loss=0.247, simple_loss=0.3228, pruned_loss=0.08554, over 13577.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.302, pruned_loss=0.07207, over 2616838.24 frames. ], batch size: 39, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 12:05:30,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=111224.66666666667, ans=0.2 +2024-08-03 12:05:30,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=111224.66666666667, ans=0.0 +2024-08-03 12:05:33,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111224.66666666667, ans=0.1 +2024-08-03 12:05:34,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=111224.66666666667, ans=0.125 +2024-08-03 12:05:34,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=111224.66666666667, ans=0.125 +2024-08-03 12:05:46,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=111298.0, ans=0.125 +2024-08-03 12:05:50,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=111298.0, ans=0.0 +2024-08-03 12:06:01,479 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.67 vs. limit=10.0 +2024-08-03 12:06:01,825 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.516e+01 1.186e+02 1.386e+02 1.604e+02 2.506e+02, threshold=2.772e+02, percent-clipped=0.0 +2024-08-03 12:06:02,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=111334.66666666667, ans=0.0 +2024-08-03 12:06:03,194 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.96 vs. limit=15.0 +2024-08-03 12:06:06,349 INFO [train.py:1114] (2/4) Epoch 9, batch 1250, loss[loss=0.2245, simple_loss=0.3055, pruned_loss=0.07173, over 13435.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3015, pruned_loss=0.07136, over 2628076.63 frames. ], batch size: 42, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:06:50,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=111518.0, ans=0.035 +2024-08-03 12:06:50,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111518.0, ans=0.1 +2024-08-03 12:06:53,532 INFO [train.py:1114] (2/4) Epoch 9, batch 1300, loss[loss=0.2129, simple_loss=0.2944, pruned_loss=0.06567, over 12862.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.3001, pruned_loss=0.07069, over 2631187.99 frames. ], batch size: 52, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:06:55,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=111554.66666666667, ans=0.125 +2024-08-03 12:06:57,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=111554.66666666667, ans=0.125 +2024-08-03 12:07:02,121 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.57 vs. limit=10.0 +2024-08-03 12:07:25,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=111664.66666666667, ans=0.1 +2024-08-03 12:07:49,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=111701.33333333333, ans=0.2 +2024-08-03 12:07:52,280 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.616e+01 1.204e+02 1.402e+02 1.778e+02 3.607e+02, threshold=2.805e+02, percent-clipped=3.0 +2024-08-03 12:07:53,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=111701.33333333333, ans=0.125 +2024-08-03 12:07:56,929 INFO [train.py:1114] (2/4) Epoch 9, batch 1350, loss[loss=0.247, simple_loss=0.3195, pruned_loss=0.08727, over 13544.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2996, pruned_loss=0.07042, over 2638452.49 frames. ], batch size: 37, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:07:57,496 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.38 vs. limit=15.0 +2024-08-03 12:08:27,436 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.74 vs. limit=15.0 +2024-08-03 12:08:31,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=111848.0, ans=0.125 +2024-08-03 12:08:45,490 INFO [train.py:1114] (2/4) Epoch 9, batch 1400, loss[loss=0.2114, simple_loss=0.281, pruned_loss=0.07087, over 13228.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2993, pruned_loss=0.07048, over 2642129.59 frames. ], batch size: 31, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:08:45,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=111921.33333333333, ans=0.05 +2024-08-03 12:08:46,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=111921.33333333333, ans=0.025 +2024-08-03 12:08:47,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=111921.33333333333, ans=0.125 +2024-08-03 12:08:52,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=111921.33333333333, ans=0.125 +2024-08-03 12:09:01,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=111958.0, ans=15.0 +2024-08-03 12:09:06,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=111994.66666666667, ans=0.0 +2024-08-03 12:09:18,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=112031.33333333333, ans=0.2 +2024-08-03 12:09:25,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=112068.0, ans=0.1 +2024-08-03 12:10:29,033 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.010e+02 1.189e+02 1.393e+02 1.717e+02 2.790e+02, threshold=2.787e+02, percent-clipped=0.0 +2024-08-03 12:10:45,229 INFO [train.py:1114] (2/4) Epoch 9, batch 1450, loss[loss=0.2459, simple_loss=0.3331, pruned_loss=0.07939, over 13453.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.3, pruned_loss=0.07062, over 2640984.54 frames. ], batch size: 43, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:10:50,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=112104.66666666667, ans=0.0 +2024-08-03 12:10:55,420 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.69 vs. limit=15.0 +2024-08-03 12:11:15,654 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.25 vs. limit=10.0 +2024-08-03 12:11:54,914 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.41 vs. limit=22.5 +2024-08-03 12:11:59,353 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.67 vs. limit=15.0 +2024-08-03 12:12:29,514 INFO [train.py:1114] (2/4) Epoch 9, batch 1500, loss[loss=0.2316, simple_loss=0.3168, pruned_loss=0.07323, over 13391.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3004, pruned_loss=0.07087, over 2640175.48 frames. ], batch size: 39, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:12:36,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112288.0, ans=0.1 +2024-08-03 12:12:40,210 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.97 vs. limit=22.5 +2024-08-03 12:12:52,756 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:13:10,793 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.784e+01 1.196e+02 1.437e+02 1.780e+02 2.962e+02, threshold=2.875e+02, percent-clipped=1.0 +2024-08-03 12:13:15,274 INFO [train.py:1114] (2/4) Epoch 9, batch 1550, loss[loss=0.2025, simple_loss=0.293, pruned_loss=0.05599, over 13406.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3006, pruned_loss=0.07117, over 2629098.81 frames. ], batch size: 41, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:13:30,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=112508.0, ans=0.05 +2024-08-03 12:13:31,016 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.67 vs. limit=15.0 +2024-08-03 12:13:33,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=112544.66666666667, ans=0.125 +2024-08-03 12:13:33,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=112544.66666666667, ans=0.125 +2024-08-03 12:13:48,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=112581.33333333333, ans=0.0 +2024-08-03 12:13:49,038 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.82 vs. limit=15.0 +2024-08-03 12:14:00,204 INFO [train.py:1114] (2/4) Epoch 9, batch 1600, loss[loss=0.227, simple_loss=0.3096, pruned_loss=0.07223, over 13575.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3001, pruned_loss=0.0711, over 2622882.61 frames. ], batch size: 39, lr: 1.39e-02, grad_scale: 16.0 +2024-08-03 12:14:00,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112654.66666666667, ans=0.1 +2024-08-03 12:14:05,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=112654.66666666667, ans=0.125 +2024-08-03 12:14:06,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.08 vs. limit=12.0 +2024-08-03 12:14:16,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=112691.33333333333, ans=10.0 +2024-08-03 12:14:43,177 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.525e+01 1.266e+02 1.505e+02 1.991e+02 3.418e+02, threshold=3.010e+02, percent-clipped=5.0 +2024-08-03 12:14:47,573 INFO [train.py:1114] (2/4) Epoch 9, batch 1650, loss[loss=0.2073, simple_loss=0.3022, pruned_loss=0.05619, over 13313.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.3007, pruned_loss=0.0718, over 2620385.75 frames. ], batch size: 40, lr: 1.39e-02, grad_scale: 16.0 +2024-08-03 12:14:53,228 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:14:58,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=112874.66666666667, ans=0.2 +2024-08-03 12:15:04,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112874.66666666667, ans=0.1 +2024-08-03 12:15:11,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=112911.33333333333, ans=0.125 +2024-08-03 12:15:24,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten.whitening_limit, batch_count=112948.0, ans=15.0 +2024-08-03 12:15:33,669 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.37 vs. limit=15.0 +2024-08-03 12:15:37,665 INFO [train.py:1114] (2/4) Epoch 9, batch 1700, loss[loss=0.1813, simple_loss=0.2568, pruned_loss=0.05285, over 13252.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3001, pruned_loss=0.07121, over 2629886.79 frames. ], batch size: 31, lr: 1.39e-02, grad_scale: 16.0 +2024-08-03 12:15:42,447 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.40 vs. limit=15.0 +2024-08-03 12:15:56,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=113094.66666666667, ans=0.125 +2024-08-03 12:16:02,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=113094.66666666667, ans=0.125 +2024-08-03 12:16:03,274 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.23 vs. limit=15.0 +2024-08-03 12:16:17,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113168.0, ans=0.1 +2024-08-03 12:16:18,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=113168.0, ans=0.125 +2024-08-03 12:16:19,703 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.237e+01 1.197e+02 1.424e+02 1.856e+02 4.679e+02, threshold=2.848e+02, percent-clipped=5.0 +2024-08-03 12:16:23,828 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.19 vs. limit=15.0 +2024-08-03 12:16:24,264 INFO [train.py:1114] (2/4) Epoch 9, batch 1750, loss[loss=0.199, simple_loss=0.2725, pruned_loss=0.06269, over 13533.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3003, pruned_loss=0.07142, over 2633767.55 frames. ], batch size: 31, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:16:28,381 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.62 vs. limit=15.0 +2024-08-03 12:16:33,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=113241.33333333333, ans=0.125 +2024-08-03 12:16:50,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=113314.66666666667, ans=0.05 +2024-08-03 12:16:58,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113314.66666666667, ans=0.1 +2024-08-03 12:17:02,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=113351.33333333333, ans=0.2 +2024-08-03 12:17:09,924 INFO [train.py:1114] (2/4) Epoch 9, batch 1800, loss[loss=0.2054, simple_loss=0.2904, pruned_loss=0.06021, over 13555.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3, pruned_loss=0.07107, over 2634788.64 frames. ], batch size: 38, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:17:29,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113461.33333333333, ans=0.1 +2024-08-03 12:17:33,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=113461.33333333333, ans=0.125 +2024-08-03 12:17:34,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=113461.33333333333, ans=0.125 +2024-08-03 12:17:50,277 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.12 vs. limit=15.0 +2024-08-03 12:17:50,713 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.294e+02 1.753e+02 2.320e+02 3.685e+02, threshold=3.507e+02, percent-clipped=11.0 +2024-08-03 12:17:52,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=113534.66666666667, ans=0.125 +2024-08-03 12:17:55,237 INFO [train.py:1114] (2/4) Epoch 9, batch 1850, loss[loss=0.2231, simple_loss=0.3024, pruned_loss=0.07189, over 13414.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2999, pruned_loss=0.07109, over 2637954.48 frames. ], batch size: 39, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:18:00,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=113571.33333333333, ans=0.0 +2024-08-03 12:18:16,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.16 vs. limit=22.5 +2024-08-03 12:18:23,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=113644.66666666667, ans=0.125 +2024-08-03 12:18:43,014 INFO [train.py:1114] (2/4) Epoch 9, batch 1900, loss[loss=0.2151, simple_loss=0.3012, pruned_loss=0.06452, over 13313.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.3011, pruned_loss=0.0718, over 2641280.55 frames. ], batch size: 40, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:19:00,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=113791.33333333333, ans=0.125 +2024-08-03 12:19:05,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=113828.0, ans=0.125 +2024-08-03 12:19:14,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=113864.66666666667, ans=0.125 +2024-08-03 12:19:21,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=113901.33333333333, ans=0.0 +2024-08-03 12:19:26,849 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.010e+02 1.181e+02 1.342e+02 1.556e+02 3.723e+02, threshold=2.684e+02, percent-clipped=1.0 +2024-08-03 12:19:32,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=113938.0, ans=0.0 +2024-08-03 12:19:33,197 INFO [train.py:1114] (2/4) Epoch 9, batch 1950, loss[loss=0.1949, simple_loss=0.2742, pruned_loss=0.05782, over 13557.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3018, pruned_loss=0.07183, over 2647625.35 frames. ], batch size: 36, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:19:37,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=113938.0, ans=0.125 +2024-08-03 12:19:59,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=114011.33333333333, ans=0.125 +2024-08-03 12:20:02,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=114011.33333333333, ans=0.125 +2024-08-03 12:20:10,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=114048.0, ans=0.0 +2024-08-03 12:20:18,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=114084.66666666667, ans=0.125 +2024-08-03 12:20:23,613 INFO [train.py:1114] (2/4) Epoch 9, batch 2000, loss[loss=0.2048, simple_loss=0.2759, pruned_loss=0.06686, over 13554.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.303, pruned_loss=0.07276, over 2637517.88 frames. ], batch size: 31, lr: 1.38e-02, grad_scale: 32.0 +2024-08-03 12:20:23,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=114121.33333333333, ans=0.0 +2024-08-03 12:20:32,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=114158.0, ans=0.05 +2024-08-03 12:20:37,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=114158.0, ans=0.0 +2024-08-03 12:20:38,096 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.17 vs. limit=6.0 +2024-08-03 12:20:41,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=114158.0, ans=0.04949747468305833 +2024-08-03 12:20:43,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=114194.66666666667, ans=0.2 +2024-08-03 12:21:03,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=114268.0, ans=0.2 +2024-08-03 12:21:04,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=114268.0, ans=0.09899494936611666 +2024-08-03 12:21:05,457 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.389e+01 1.230e+02 1.563e+02 1.827e+02 3.181e+02, threshold=3.125e+02, percent-clipped=4.0 +2024-08-03 12:21:10,001 INFO [train.py:1114] (2/4) Epoch 9, batch 2050, loss[loss=0.1946, simple_loss=0.2656, pruned_loss=0.06181, over 13434.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.3014, pruned_loss=0.07196, over 2634491.29 frames. ], batch size: 32, lr: 1.38e-02, grad_scale: 32.0 +2024-08-03 12:21:11,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=114304.66666666667, ans=0.2 +2024-08-03 12:21:25,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=114341.33333333333, ans=0.125 +2024-08-03 12:21:51,083 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.55 vs. limit=15.0 +2024-08-03 12:21:51,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=114451.33333333333, ans=0.0 +2024-08-03 12:21:57,222 INFO [train.py:1114] (2/4) Epoch 9, batch 2100, loss[loss=0.2252, simple_loss=0.3087, pruned_loss=0.07082, over 13528.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.3003, pruned_loss=0.07122, over 2639661.74 frames. ], batch size: 37, lr: 1.38e-02, grad_scale: 32.0 +2024-08-03 12:22:06,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=114524.66666666667, ans=10.0 +2024-08-03 12:22:12,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=114524.66666666667, ans=0.1 +2024-08-03 12:22:13,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=114524.66666666667, ans=0.125 +2024-08-03 12:22:18,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=114561.33333333333, ans=0.2 +2024-08-03 12:22:23,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=114598.0, ans=0.09899494936611666 +2024-08-03 12:22:31,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=114598.0, ans=0.125 +2024-08-03 12:22:54,561 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.023e+02 1.194e+02 1.363e+02 1.768e+02 4.718e+02, threshold=2.726e+02, percent-clipped=3.0 +2024-08-03 12:23:07,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=114671.33333333333, ans=0.125 +2024-08-03 12:23:09,830 INFO [train.py:1114] (2/4) Epoch 9, batch 2150, loss[loss=0.207, simple_loss=0.291, pruned_loss=0.06151, over 13584.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2992, pruned_loss=0.07063, over 2647858.84 frames. ], batch size: 36, lr: 1.38e-02, grad_scale: 32.0 +2024-08-03 12:23:30,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=114708.0, ans=0.125 +2024-08-03 12:24:09,194 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.17 vs. limit=15.0 +2024-08-03 12:24:13,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=114744.66666666667, ans=0.2 +2024-08-03 12:24:50,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=114818.0, ans=0.125 +2024-08-03 12:24:59,179 INFO [train.py:1114] (2/4) Epoch 9, batch 2200, loss[loss=0.2067, simple_loss=0.2908, pruned_loss=0.06132, over 13422.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2996, pruned_loss=0.07117, over 2645387.99 frames. ], batch size: 39, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:25:22,927 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.97 vs. limit=15.0 +2024-08-03 12:25:27,483 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.58 vs. limit=10.0 +2024-08-03 12:25:47,878 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.835e+01 1.281e+02 1.616e+02 2.381e+02 3.635e+02, threshold=3.231e+02, percent-clipped=12.0 +2024-08-03 12:25:48,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=115001.33333333333, ans=0.125 +2024-08-03 12:25:52,562 INFO [train.py:1114] (2/4) Epoch 9, batch 2250, loss[loss=0.2042, simple_loss=0.2917, pruned_loss=0.05838, over 13368.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2989, pruned_loss=0.0707, over 2642394.78 frames. ], batch size: 37, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:25:52,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=115038.0, ans=0.0 +2024-08-03 12:26:12,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=115111.33333333333, ans=0.125 +2024-08-03 12:26:43,636 INFO [train.py:1114] (2/4) Epoch 9, batch 2300, loss[loss=0.1873, simple_loss=0.259, pruned_loss=0.05779, over 13578.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2981, pruned_loss=0.07069, over 2638530.51 frames. ], batch size: 33, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:27:11,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=115294.66666666667, ans=0.125 +2024-08-03 12:27:12,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=115294.66666666667, ans=0.125 +2024-08-03 12:27:28,229 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.909e+01 1.211e+02 1.428e+02 1.712e+02 2.709e+02, threshold=2.855e+02, percent-clipped=0.0 +2024-08-03 12:27:32,865 INFO [train.py:1114] (2/4) Epoch 9, batch 2350, loss[loss=0.2299, simple_loss=0.3121, pruned_loss=0.07383, over 13544.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2989, pruned_loss=0.07073, over 2641772.87 frames. ], batch size: 38, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:27:32,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=115404.66666666667, ans=0.125 +2024-08-03 12:27:37,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=115404.66666666667, ans=0.1 +2024-08-03 12:27:40,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=115404.66666666667, ans=0.125 +2024-08-03 12:27:53,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=115478.0, ans=0.2 +2024-08-03 12:27:55,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=115478.0, ans=0.125 +2024-08-03 12:28:00,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=115478.0, ans=0.0 +2024-08-03 12:28:00,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=115478.0, ans=0.125 +2024-08-03 12:28:04,969 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.32 vs. limit=15.0 +2024-08-03 12:28:16,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=115551.33333333333, ans=0.2 +2024-08-03 12:28:21,708 INFO [train.py:1114] (2/4) Epoch 9, batch 2400, loss[loss=0.2012, simple_loss=0.2822, pruned_loss=0.06008, over 13536.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2989, pruned_loss=0.07054, over 2643097.65 frames. ], batch size: 35, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:28:30,340 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.50 vs. limit=6.0 +2024-08-03 12:28:30,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=115588.0, ans=0.125 +2024-08-03 12:28:53,835 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.20 vs. limit=15.0 +2024-08-03 12:29:04,112 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.274e+02 1.512e+02 2.104e+02 3.890e+02, threshold=3.023e+02, percent-clipped=4.0 +2024-08-03 12:29:08,562 INFO [train.py:1114] (2/4) Epoch 9, batch 2450, loss[loss=0.22, simple_loss=0.2989, pruned_loss=0.07051, over 13354.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3003, pruned_loss=0.07143, over 2632642.91 frames. ], batch size: 37, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:29:14,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=115771.33333333333, ans=0.025 +2024-08-03 12:29:25,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=115844.66666666667, ans=0.125 +2024-08-03 12:29:26,876 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.09 vs. limit=6.0 +2024-08-03 12:29:34,335 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.15 vs. limit=15.0 +2024-08-03 12:29:45,718 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.97 vs. limit=15.0 +2024-08-03 12:29:53,263 INFO [train.py:1114] (2/4) Epoch 9, batch 2500, loss[loss=0.2409, simple_loss=0.3219, pruned_loss=0.07992, over 13406.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2992, pruned_loss=0.07047, over 2636499.40 frames. ], batch size: 39, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:30:11,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=116028.0, ans=0.05 +2024-08-03 12:30:20,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=116064.66666666667, ans=0.125 +2024-08-03 12:30:31,832 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.707e+01 1.175e+02 1.472e+02 1.847e+02 3.243e+02, threshold=2.944e+02, percent-clipped=1.0 +2024-08-03 12:30:32,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=116101.33333333333, ans=0.0 +2024-08-03 12:30:36,223 INFO [train.py:1114] (2/4) Epoch 9, batch 2550, loss[loss=0.1901, simple_loss=0.2663, pruned_loss=0.05693, over 13538.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2987, pruned_loss=0.07022, over 2638145.64 frames. ], batch size: 31, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:30:38,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=116138.0, ans=0.025 +2024-08-03 12:30:39,297 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.78 vs. limit=6.0 +2024-08-03 12:30:40,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=116138.0, ans=0.125 +2024-08-03 12:31:08,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=116248.0, ans=0.2 +2024-08-03 12:31:09,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=116284.66666666667, ans=0.125 +2024-08-03 12:31:11,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=116284.66666666667, ans=0.0 +2024-08-03 12:31:11,861 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.02 vs. limit=15.0 +2024-08-03 12:31:14,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=116284.66666666667, ans=0.09899494936611666 +2024-08-03 12:31:20,892 INFO [train.py:1114] (2/4) Epoch 9, batch 2600, loss[loss=0.2111, simple_loss=0.2887, pruned_loss=0.06678, over 13564.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3001, pruned_loss=0.07106, over 2636701.79 frames. ], batch size: 36, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:31:25,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=116321.33333333333, ans=0.0 +2024-08-03 12:31:39,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=116394.66666666667, ans=0.1 +2024-08-03 12:31:43,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=116394.66666666667, ans=0.125 +2024-08-03 12:31:46,347 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.98 vs. limit=22.5 +2024-08-03 12:31:52,728 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.57 vs. limit=5.0 +2024-08-03 12:32:00,488 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.327e+01 1.173e+02 1.340e+02 1.707e+02 3.211e+02, threshold=2.680e+02, percent-clipped=1.0 +2024-08-03 12:32:03,946 INFO [train.py:1114] (2/4) Epoch 9, batch 2650, loss[loss=0.2516, simple_loss=0.3263, pruned_loss=0.08849, over 13318.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3007, pruned_loss=0.07154, over 2639541.60 frames. ], batch size: 46, lr: 1.37e-02, grad_scale: 16.0 +2024-08-03 12:32:17,348 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.23 vs. limit=15.0 +2024-08-03 12:32:17,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=116541.33333333333, ans=0.0 +2024-08-03 12:32:32,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=116614.66666666667, ans=0.1 +2024-08-03 12:32:35,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff2.min_abs, batch_count=116614.66666666667, ans=0.1 +2024-08-03 12:32:36,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=116614.66666666667, ans=0.1 +2024-08-03 12:32:38,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=116651.33333333333, ans=0.2 +2024-08-03 12:32:47,473 INFO [train.py:1114] (2/4) Epoch 9, batch 2700, loss[loss=0.2519, simple_loss=0.3244, pruned_loss=0.08972, over 13546.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.3011, pruned_loss=0.0716, over 2637586.52 frames. ], batch size: 40, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:32:50,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=116688.0, ans=0.1 +2024-08-03 12:32:52,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=116688.0, ans=0.1 +2024-08-03 12:32:55,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=116724.66666666667, ans=0.125 +2024-08-03 12:33:04,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=116761.33333333333, ans=0.0 +2024-08-03 12:33:13,286 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.66 vs. limit=22.5 +2024-08-03 12:33:28,907 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.391e+01 1.337e+02 1.607e+02 2.047e+02 3.156e+02, threshold=3.214e+02, percent-clipped=11.0 +2024-08-03 12:33:32,426 INFO [train.py:1114] (2/4) Epoch 9, batch 2750, loss[loss=0.2231, simple_loss=0.2926, pruned_loss=0.07674, over 13343.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2998, pruned_loss=0.07142, over 2635521.22 frames. ], batch size: 34, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:33:50,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=116944.66666666667, ans=0.125 +2024-08-03 12:33:59,846 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:34:05,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=116981.33333333333, ans=0.125 +2024-08-03 12:34:14,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=117018.0, ans=0.125 +2024-08-03 12:34:16,674 INFO [train.py:1114] (2/4) Epoch 9, batch 2800, loss[loss=0.3259, simple_loss=0.3646, pruned_loss=0.1436, over 9256.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2992, pruned_loss=0.07099, over 2627464.01 frames. ], batch size: 96, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:34:16,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=117054.66666666667, ans=0.125 +2024-08-03 12:34:17,130 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.34 vs. limit=22.5 +2024-08-03 12:34:22,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117054.66666666667, ans=0.1 +2024-08-03 12:34:40,381 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:34:58,829 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.896e+01 1.288e+02 1.462e+02 1.862e+02 3.632e+02, threshold=2.925e+02, percent-clipped=2.0 +2024-08-03 12:35:01,061 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.29 vs. limit=15.0 +2024-08-03 12:35:01,472 INFO [train.py:1114] (2/4) Epoch 9, batch 2850, loss[loss=0.1801, simple_loss=0.2618, pruned_loss=0.04919, over 13363.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3001, pruned_loss=0.07163, over 2621469.59 frames. ], batch size: 35, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:35:11,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=117274.66666666667, ans=0.2 +2024-08-03 12:35:12,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=117274.66666666667, ans=0.2 +2024-08-03 12:36:05,393 INFO [train.py:1114] (2/4) Epoch 9, batch 2900, loss[loss=0.2013, simple_loss=0.2799, pruned_loss=0.06136, over 13385.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2994, pruned_loss=0.07053, over 2632234.76 frames. ], batch size: 36, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:36:12,035 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.11 vs. limit=15.0 +2024-08-03 12:36:18,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=117458.0, ans=0.025 +2024-08-03 12:36:21,760 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.27 vs. limit=15.0 +2024-08-03 12:36:25,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=117494.66666666667, ans=0.0 +2024-08-03 12:36:26,814 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.27 vs. limit=15.0 +2024-08-03 12:36:35,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=117531.33333333333, ans=0.125 +2024-08-03 12:36:35,328 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.69 vs. limit=15.0 +2024-08-03 12:36:36,396 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.33 vs. limit=22.5 +2024-08-03 12:36:36,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=117531.33333333333, ans=0.125 +2024-08-03 12:36:46,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117568.0, ans=0.1 +2024-08-03 12:36:47,222 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.425e+01 1.178e+02 1.305e+02 1.613e+02 2.693e+02, threshold=2.610e+02, percent-clipped=0.0 +2024-08-03 12:36:48,958 INFO [train.py:1114] (2/4) Epoch 9, batch 2950, loss[loss=0.1939, simple_loss=0.279, pruned_loss=0.05445, over 13361.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2988, pruned_loss=0.07055, over 2630412.80 frames. ], batch size: 34, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:37:04,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=117641.33333333333, ans=0.125 +2024-08-03 12:37:14,963 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.70 vs. limit=22.5 +2024-08-03 12:37:15,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=117714.66666666667, ans=0.2 +2024-08-03 12:37:19,465 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.36 vs. limit=6.0 +2024-08-03 12:37:32,406 INFO [train.py:1114] (2/4) Epoch 9, batch 3000, loss[loss=0.1911, simple_loss=0.2702, pruned_loss=0.05598, over 13533.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2984, pruned_loss=0.07025, over 2630396.07 frames. ], batch size: 37, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:37:32,406 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 12:37:49,955 INFO [train.py:1146] (2/4) Epoch 9, validation: loss=0.1846, simple_loss=0.2849, pruned_loss=0.04217, over 944034.00 frames. +2024-08-03 12:37:49,955 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 12:37:59,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=117824.66666666667, ans=0.0 +2024-08-03 12:38:04,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=117824.66666666667, ans=0.125 +2024-08-03 12:38:08,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=117861.33333333333, ans=0.125 +2024-08-03 12:38:19,626 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.48 vs. limit=15.0 +2024-08-03 12:38:22,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=117898.0, ans=0.125 +2024-08-03 12:38:22,154 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.07 vs. limit=22.5 +2024-08-03 12:38:22,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=117898.0, ans=0.125 +2024-08-03 12:38:24,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117934.66666666667, ans=0.1 +2024-08-03 12:38:31,058 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.539e+01 1.182e+02 1.340e+02 1.696e+02 3.056e+02, threshold=2.681e+02, percent-clipped=1.0 +2024-08-03 12:38:31,433 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.15 vs. limit=15.0 +2024-08-03 12:38:32,960 INFO [train.py:1114] (2/4) Epoch 9, batch 3050, loss[loss=0.2204, simple_loss=0.2992, pruned_loss=0.07084, over 13548.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3003, pruned_loss=0.07146, over 2627542.66 frames. ], batch size: 35, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:38:42,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=118008.0, ans=0.125 +2024-08-03 12:39:01,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=118081.33333333333, ans=0.5 +2024-08-03 12:39:07,532 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:39:17,135 INFO [train.py:1114] (2/4) Epoch 9, batch 3100, loss[loss=0.2519, simple_loss=0.3239, pruned_loss=0.09, over 13329.00 frames. ], tot_loss[loss=0.22, simple_loss=0.299, pruned_loss=0.07054, over 2627434.64 frames. ], batch size: 46, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:39:32,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=118191.33333333333, ans=0.125 +2024-08-03 12:39:38,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=118228.0, ans=0.125 +2024-08-03 12:39:48,376 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:39:50,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=118301.33333333333, ans=0.2 +2024-08-03 12:39:54,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=118301.33333333333, ans=0.1 +2024-08-03 12:39:55,382 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.34 vs. limit=15.0 +2024-08-03 12:39:57,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=118301.33333333333, ans=0.125 +2024-08-03 12:39:58,237 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.498e+01 1.162e+02 1.416e+02 1.749e+02 3.223e+02, threshold=2.833e+02, percent-clipped=4.0 +2024-08-03 12:39:58,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118301.33333333333, ans=0.1 +2024-08-03 12:39:58,544 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:40:00,003 INFO [train.py:1114] (2/4) Epoch 9, batch 3150, loss[loss=0.2191, simple_loss=0.3013, pruned_loss=0.06842, over 13124.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2987, pruned_loss=0.07013, over 2629314.47 frames. ], batch size: 48, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:40:00,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=118338.0, ans=0.0 +2024-08-03 12:42:09,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=118448.0, ans=0.0 +2024-08-03 12:42:10,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=118448.0, ans=0.2 +2024-08-03 12:42:15,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118484.66666666667, ans=0.1 +2024-08-03 12:42:23,245 INFO [train.py:1114] (2/4) Epoch 9, batch 3200, loss[loss=0.2004, simple_loss=0.2859, pruned_loss=0.05749, over 13541.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2989, pruned_loss=0.07056, over 2636015.49 frames. ], batch size: 37, lr: 1.35e-02, grad_scale: 16.0 +2024-08-03 12:42:33,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=118558.0, ans=0.125 +2024-08-03 12:42:58,488 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.64 vs. limit=6.0 +2024-08-03 12:43:06,443 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.961e+01 1.301e+02 1.590e+02 2.129e+02 3.021e+02, threshold=3.180e+02, percent-clipped=5.0 +2024-08-03 12:43:07,344 INFO [train.py:1114] (2/4) Epoch 9, batch 3250, loss[loss=0.2224, simple_loss=0.3029, pruned_loss=0.07097, over 13393.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2989, pruned_loss=0.07, over 2640165.69 frames. ], batch size: 38, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:43:13,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.17 vs. limit=22.5 +2024-08-03 12:43:23,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=118778.0, ans=0.125 +2024-08-03 12:43:26,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=118778.0, ans=0.0 +2024-08-03 12:43:38,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=118814.66666666667, ans=0.125 +2024-08-03 12:43:38,574 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:43:49,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=118851.33333333333, ans=0.1 +2024-08-03 12:43:51,299 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.78 vs. limit=8.0 +2024-08-03 12:43:55,821 INFO [train.py:1114] (2/4) Epoch 9, batch 3300, loss[loss=0.2383, simple_loss=0.3145, pruned_loss=0.08105, over 12885.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2976, pruned_loss=0.06944, over 2641432.90 frames. ], batch size: 52, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:44:06,974 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.70 vs. limit=6.0 +2024-08-03 12:44:10,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=118924.66666666667, ans=0.125 +2024-08-03 12:44:11,094 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.15 vs. limit=15.0 +2024-08-03 12:44:25,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=118961.33333333333, ans=0.0 +2024-08-03 12:44:43,220 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.376e+01 1.208e+02 1.436e+02 1.732e+02 3.249e+02, threshold=2.873e+02, percent-clipped=1.0 +2024-08-03 12:44:44,074 INFO [train.py:1114] (2/4) Epoch 9, batch 3350, loss[loss=0.2355, simple_loss=0.3133, pruned_loss=0.07886, over 13076.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2993, pruned_loss=0.0705, over 2630090.17 frames. ], batch size: 48, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:44:45,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=119071.33333333333, ans=0.05 +2024-08-03 12:44:53,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=119108.0, ans=0.125 +2024-08-03 12:45:10,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=119181.33333333333, ans=0.0 +2024-08-03 12:45:12,275 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.83 vs. limit=6.0 +2024-08-03 12:45:22,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119218.0, ans=0.1 +2024-08-03 12:45:26,437 INFO [train.py:1114] (2/4) Epoch 9, batch 3400, loss[loss=0.1913, simple_loss=0.2653, pruned_loss=0.05869, over 13577.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2988, pruned_loss=0.07025, over 2625341.62 frames. ], batch size: 31, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:45:38,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=119291.33333333333, ans=0.2 +2024-08-03 12:45:38,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=119291.33333333333, ans=0.0 +2024-08-03 12:45:45,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=119328.0, ans=0.0 +2024-08-03 12:45:51,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=119364.66666666667, ans=0.125 +2024-08-03 12:46:01,807 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:46:18,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=119401.33333333333, ans=0.09899494936611666 +2024-08-03 12:46:21,253 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.303e+01 1.200e+02 1.400e+02 1.853e+02 3.003e+02, threshold=2.800e+02, percent-clipped=1.0 +2024-08-03 12:46:21,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=119438.0, ans=0.125 +2024-08-03 12:46:22,087 INFO [train.py:1114] (2/4) Epoch 9, batch 3450, loss[loss=0.212, simple_loss=0.2977, pruned_loss=0.06317, over 12888.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2983, pruned_loss=0.06969, over 2628307.77 frames. ], batch size: 52, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:46:40,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=119511.33333333333, ans=0.125 +2024-08-03 12:46:40,716 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.80 vs. limit=22.5 +2024-08-03 12:46:59,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=119584.66666666667, ans=0.125 +2024-08-03 12:47:01,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=119584.66666666667, ans=0.1 +2024-08-03 12:47:06,102 INFO [train.py:1114] (2/4) Epoch 9, batch 3500, loss[loss=0.2141, simple_loss=0.2921, pruned_loss=0.0681, over 13520.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2985, pruned_loss=0.07055, over 2630583.44 frames. ], batch size: 34, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:47:28,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=119694.66666666667, ans=0.0 +2024-08-03 12:47:47,901 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.10 vs. limit=15.0 +2024-08-03 12:47:50,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=119768.0, ans=0.125 +2024-08-03 12:47:51,577 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.983e+01 1.270e+02 1.503e+02 1.772e+02 3.260e+02, threshold=3.007e+02, percent-clipped=1.0 +2024-08-03 12:47:51,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=119804.66666666667, ans=0.125 +2024-08-03 12:47:52,439 INFO [train.py:1114] (2/4) Epoch 9, batch 3550, loss[loss=0.23, simple_loss=0.3093, pruned_loss=0.07537, over 12348.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3011, pruned_loss=0.07213, over 2628458.13 frames. ], batch size: 58, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:48:10,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=119878.0, ans=0.125 +2024-08-03 12:48:18,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=119914.66666666667, ans=0.125 +2024-08-03 12:48:22,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=119914.66666666667, ans=0.125 +2024-08-03 12:48:23,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.82 vs. limit=15.0 +2024-08-03 12:48:27,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=119914.66666666667, ans=0.0 +2024-08-03 12:48:36,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=119951.33333333333, ans=0.0 +2024-08-03 12:48:37,780 INFO [train.py:1114] (2/4) Epoch 9, batch 3600, loss[loss=0.2571, simple_loss=0.324, pruned_loss=0.09507, over 9178.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3062, pruned_loss=0.07694, over 2482995.59 frames. ], batch size: 96, lr: 1.35e-02, grad_scale: 16.0 +2024-08-03 12:48:39,284 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.68 vs. limit=10.0 +2024-08-03 12:48:58,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=120061.33333333333, ans=0.1 +2024-08-03 12:50:09,321 INFO [train.py:1114] (2/4) Epoch 10, batch 0, loss[loss=0.1898, simple_loss=0.2693, pruned_loss=0.0551, over 13336.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2693, pruned_loss=0.0551, over 13336.00 frames. ], batch size: 33, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:50:09,322 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 12:50:19,382 INFO [train.py:1146] (2/4) Epoch 10, validation: loss=0.1895, simple_loss=0.2901, pruned_loss=0.04443, over 944034.00 frames. +2024-08-03 12:50:19,383 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 12:50:27,507 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.231e+02 1.354e+02 1.561e+02 3.235e+02, threshold=2.709e+02, percent-clipped=1.0 +2024-08-03 12:50:29,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=120171.33333333333, ans=0.0 +2024-08-03 12:50:33,510 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:50:34,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=120171.33333333333, ans=0.2 +2024-08-03 12:50:39,359 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.61 vs. limit=15.0 +2024-08-03 12:50:44,663 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.80 vs. limit=15.0 +2024-08-03 12:50:51,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=120244.66666666667, ans=0.125 +2024-08-03 12:50:58,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=120281.33333333333, ans=0.2 +2024-08-03 12:51:07,177 INFO [train.py:1114] (2/4) Epoch 10, batch 50, loss[loss=0.1921, simple_loss=0.274, pruned_loss=0.0551, over 13434.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3022, pruned_loss=0.07185, over 576780.94 frames. ], batch size: 32, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:51:13,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=120318.0, ans=0.0 +2024-08-03 12:51:19,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=120354.66666666667, ans=0.0 +2024-08-03 12:51:19,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=120354.66666666667, ans=0.125 +2024-08-03 12:51:37,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=120428.0, ans=0.125 +2024-08-03 12:51:51,515 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.81 vs. limit=6.0 +2024-08-03 12:51:53,260 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.55 vs. limit=15.0 +2024-08-03 12:51:54,517 INFO [train.py:1114] (2/4) Epoch 10, batch 100, loss[loss=0.2006, simple_loss=0.282, pruned_loss=0.05962, over 13536.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.3029, pruned_loss=0.07167, over 1025225.11 frames. ], batch size: 35, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:52:00,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=120501.33333333333, ans=0.025 +2024-08-03 12:52:02,523 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.137e+01 1.185e+02 1.436e+02 1.784e+02 2.704e+02, threshold=2.871e+02, percent-clipped=0.0 +2024-08-03 12:52:23,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=120611.33333333333, ans=0.07 +2024-08-03 12:52:39,342 INFO [train.py:1114] (2/4) Epoch 10, batch 150, loss[loss=0.1835, simple_loss=0.2649, pruned_loss=0.0511, over 13384.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2984, pruned_loss=0.0683, over 1385894.87 frames. ], batch size: 32, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:53:08,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=120794.66666666667, ans=0.025 +2024-08-03 12:53:22,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=120831.33333333333, ans=0.0 +2024-08-03 12:53:27,595 INFO [train.py:1114] (2/4) Epoch 10, batch 200, loss[loss=0.2311, simple_loss=0.3151, pruned_loss=0.07354, over 12472.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2968, pruned_loss=0.0681, over 1664140.32 frames. ], batch size: 58, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:53:35,555 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.851e+01 1.179e+02 1.388e+02 1.956e+02 3.362e+02, threshold=2.775e+02, percent-clipped=2.0 +2024-08-03 12:53:53,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=120941.33333333333, ans=0.0 +2024-08-03 12:54:12,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=121014.66666666667, ans=0.95 +2024-08-03 12:54:15,698 INFO [train.py:1114] (2/4) Epoch 10, batch 250, loss[loss=0.2113, simple_loss=0.2932, pruned_loss=0.06465, over 13293.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2955, pruned_loss=0.06724, over 1883084.61 frames. ], batch size: 46, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:54:28,865 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.98 vs. limit=15.0 +2024-08-03 12:54:34,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=121088.0, ans=0.125 +2024-08-03 12:54:39,959 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.31 vs. limit=6.0 +2024-08-03 12:54:43,672 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.91 vs. limit=15.0 +2024-08-03 12:54:49,899 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:54:50,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=121161.33333333333, ans=0.07 +2024-08-03 12:55:03,436 INFO [train.py:1114] (2/4) Epoch 10, batch 300, loss[loss=0.2339, simple_loss=0.3104, pruned_loss=0.07872, over 13437.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2954, pruned_loss=0.06726, over 2049963.33 frames. ], batch size: 42, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:55:07,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=121234.66666666667, ans=0.125 +2024-08-03 12:55:11,652 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.594e+01 1.264e+02 1.454e+02 1.818e+02 3.044e+02, threshold=2.909e+02, percent-clipped=3.0 +2024-08-03 12:55:27,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=121308.0, ans=0.125 +2024-08-03 12:55:27,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=121308.0, ans=0.0 +2024-08-03 12:55:34,554 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.74 vs. limit=6.0 +2024-08-03 12:55:38,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=121344.66666666667, ans=0.125 +2024-08-03 12:55:40,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121344.66666666667, ans=0.1 +2024-08-03 12:55:45,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=121381.33333333333, ans=0.0 +2024-08-03 12:55:50,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=121381.33333333333, ans=15.0 +2024-08-03 12:55:51,193 INFO [train.py:1114] (2/4) Epoch 10, batch 350, loss[loss=0.2077, simple_loss=0.2865, pruned_loss=0.06441, over 13580.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2968, pruned_loss=0.06793, over 2181115.83 frames. ], batch size: 33, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:55:53,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=121418.0, ans=0.95 +2024-08-03 12:55:53,494 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.05 vs. limit=15.0 +2024-08-03 12:56:07,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=121454.66666666667, ans=0.025 +2024-08-03 12:56:10,510 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.40 vs. limit=22.5 +2024-08-03 12:56:11,479 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.45 vs. limit=22.5 +2024-08-03 12:56:27,589 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.55 vs. limit=15.0 +2024-08-03 12:56:29,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=121564.66666666667, ans=0.0 +2024-08-03 12:56:29,499 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.07 vs. limit=15.0 +2024-08-03 12:56:35,971 INFO [train.py:1114] (2/4) Epoch 10, batch 400, loss[loss=0.2218, simple_loss=0.3051, pruned_loss=0.0693, over 13356.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2962, pruned_loss=0.06774, over 2285547.37 frames. ], batch size: 37, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:56:39,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=121601.33333333333, ans=0.0 +2024-08-03 12:56:42,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=121601.33333333333, ans=0.125 +2024-08-03 12:56:44,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=121601.33333333333, ans=0.04949747468305833 +2024-08-03 12:56:45,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=121601.33333333333, ans=0.125 +2024-08-03 12:56:46,294 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.011e+02 1.271e+02 1.420e+02 1.744e+02 2.813e+02, threshold=2.840e+02, percent-clipped=0.0 +2024-08-03 12:57:01,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=121674.66666666667, ans=0.125 +2024-08-03 12:57:08,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121711.33333333333, ans=0.1 +2024-08-03 12:57:23,500 INFO [train.py:1114] (2/4) Epoch 10, batch 450, loss[loss=0.2066, simple_loss=0.292, pruned_loss=0.06057, over 13544.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2956, pruned_loss=0.06753, over 2359880.69 frames. ], batch size: 38, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:57:32,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=121784.66666666667, ans=0.2 +2024-08-03 12:57:42,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=121821.33333333333, ans=15.0 +2024-08-03 12:57:49,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=121858.0, ans=0.125 +2024-08-03 12:57:57,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=121894.66666666667, ans=0.035 +2024-08-03 12:57:57,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=121894.66666666667, ans=0.025 +2024-08-03 12:58:12,705 INFO [train.py:1114] (2/4) Epoch 10, batch 500, loss[loss=0.2365, simple_loss=0.3199, pruned_loss=0.07653, over 13435.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.295, pruned_loss=0.06739, over 2425730.41 frames. ], batch size: 43, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:58:14,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=121968.0, ans=0.0 +2024-08-03 12:58:16,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=121968.0, ans=0.025 +2024-08-03 12:58:20,907 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.605e+01 1.105e+02 1.306e+02 1.598e+02 3.062e+02, threshold=2.611e+02, percent-clipped=1.0 +2024-08-03 12:58:26,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=122004.66666666667, ans=0.2 +2024-08-03 12:58:36,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122041.33333333333, ans=0.1 +2024-08-03 12:58:43,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122078.0, ans=0.1 +2024-08-03 12:58:45,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=122078.0, ans=0.0 +2024-08-03 12:58:49,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=122114.66666666667, ans=0.2 +2024-08-03 12:59:00,200 INFO [train.py:1114] (2/4) Epoch 10, batch 550, loss[loss=0.2281, simple_loss=0.3114, pruned_loss=0.07236, over 13033.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2952, pruned_loss=0.0674, over 2468065.12 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:59:15,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=122188.0, ans=0.0 +2024-08-03 12:59:17,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122224.66666666667, ans=0.1 +2024-08-03 12:59:24,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=122224.66666666667, ans=0.125 +2024-08-03 12:59:38,672 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.26 vs. limit=22.5 +2024-08-03 12:59:40,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=122298.0, ans=0.0 +2024-08-03 12:59:45,499 INFO [train.py:1114] (2/4) Epoch 10, batch 600, loss[loss=0.233, simple_loss=0.3087, pruned_loss=0.07861, over 13313.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2954, pruned_loss=0.06756, over 2508032.02 frames. ], batch size: 46, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:59:53,552 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.876e+01 1.160e+02 1.307e+02 1.564e+02 2.892e+02, threshold=2.615e+02, percent-clipped=4.0 +2024-08-03 13:00:10,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122408.0, ans=0.1 +2024-08-03 13:00:12,403 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.01 vs. limit=15.0 +2024-08-03 13:00:15,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=122444.66666666667, ans=0.05 +2024-08-03 13:00:17,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=122444.66666666667, ans=0.07 +2024-08-03 13:00:33,101 INFO [train.py:1114] (2/4) Epoch 10, batch 650, loss[loss=0.2121, simple_loss=0.3007, pruned_loss=0.06178, over 13549.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2941, pruned_loss=0.06674, over 2543460.09 frames. ], batch size: 37, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 13:00:39,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=122518.0, ans=0.07 +2024-08-03 13:00:44,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=122554.66666666667, ans=0.0 +2024-08-03 13:00:52,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=122591.33333333333, ans=0.0 +2024-08-03 13:00:53,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=122591.33333333333, ans=0.125 +2024-08-03 13:01:19,902 INFO [train.py:1114] (2/4) Epoch 10, batch 700, loss[loss=0.1947, simple_loss=0.2741, pruned_loss=0.05764, over 13529.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2939, pruned_loss=0.06646, over 2565264.01 frames. ], batch size: 35, lr: 1.27e-02, grad_scale: 16.0 +2024-08-03 13:01:21,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=122701.33333333333, ans=0.0 +2024-08-03 13:01:28,966 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.487e+01 1.261e+02 1.582e+02 2.111e+02 3.773e+02, threshold=3.165e+02, percent-clipped=11.0 +2024-08-03 13:01:29,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=122738.0, ans=10.0 +2024-08-03 13:01:42,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=122774.66666666667, ans=0.0 +2024-08-03 13:01:48,423 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.72 vs. limit=15.0 +2024-08-03 13:01:54,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=122811.33333333333, ans=0.0 +2024-08-03 13:02:06,740 INFO [train.py:1114] (2/4) Epoch 10, batch 750, loss[loss=0.2456, simple_loss=0.3253, pruned_loss=0.08297, over 13344.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.293, pruned_loss=0.06612, over 2581895.32 frames. ], batch size: 37, lr: 1.27e-02, grad_scale: 16.0 +2024-08-03 13:02:07,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=122884.66666666667, ans=0.05 +2024-08-03 13:02:16,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=122921.33333333333, ans=0.0 +2024-08-03 13:02:21,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=122921.33333333333, ans=0.125 +2024-08-03 13:02:35,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=122958.0, ans=0.0 +2024-08-03 13:08:35,469 INFO [train.py:1114] (2/4) Epoch 10, batch 800, loss[loss=0.2177, simple_loss=0.2879, pruned_loss=0.07376, over 13330.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2937, pruned_loss=0.06652, over 2596844.90 frames. ], batch size: 33, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 13:08:59,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=123068.0, ans=0.05 +2024-08-03 13:09:00,772 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.986e+01 1.304e+02 1.516e+02 1.968e+02 2.999e+02, threshold=3.032e+02, percent-clipped=0.0 +2024-08-03 13:09:14,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=123141.33333333333, ans=0.09899494936611666 +2024-08-03 13:09:19,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=123141.33333333333, ans=10.0 +2024-08-03 13:09:33,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=123214.66666666667, ans=0.125 +2024-08-03 13:09:36,549 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.59 vs. limit=15.0 +2024-08-03 13:09:40,211 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.45 vs. limit=15.0 +2024-08-03 13:10:07,625 INFO [train.py:1114] (2/4) Epoch 10, batch 850, loss[loss=0.2094, simple_loss=0.2994, pruned_loss=0.05966, over 13344.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2944, pruned_loss=0.06723, over 2608963.08 frames. ], batch size: 40, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:10:10,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=123251.33333333333, ans=0.1 +2024-08-03 13:10:13,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=123251.33333333333, ans=0.0 +2024-08-03 13:10:15,474 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.30 vs. limit=10.0 +2024-08-03 13:10:16,974 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.01 vs. limit=15.0 +2024-08-03 13:10:19,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=123288.0, ans=0.125 +2024-08-03 13:10:19,675 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.83 vs. limit=15.0 +2024-08-03 13:10:43,410 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.26 vs. limit=15.0 +2024-08-03 13:10:52,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=123398.0, ans=0.125 +2024-08-03 13:10:54,531 INFO [train.py:1114] (2/4) Epoch 10, batch 900, loss[loss=0.1803, simple_loss=0.261, pruned_loss=0.04979, over 13354.00 frames. ], tot_loss[loss=0.214, simple_loss=0.294, pruned_loss=0.06698, over 2610901.31 frames. ], batch size: 33, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:11:04,166 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.190e+01 1.156e+02 1.356e+02 1.629e+02 2.273e+02, threshold=2.713e+02, percent-clipped=0.0 +2024-08-03 13:11:08,753 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:11:11,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=123471.33333333333, ans=0.0 +2024-08-03 13:11:37,017 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.70 vs. limit=22.5 +2024-08-03 13:11:41,265 INFO [train.py:1114] (2/4) Epoch 10, batch 950, loss[loss=0.193, simple_loss=0.2731, pruned_loss=0.05642, over 13527.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2946, pruned_loss=0.06737, over 2613514.62 frames. ], batch size: 34, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:11:41,795 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.14 vs. limit=15.0 +2024-08-03 13:11:45,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=123618.0, ans=0.125 +2024-08-03 13:11:52,887 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.34 vs. limit=15.0 +2024-08-03 13:12:07,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=123691.33333333333, ans=0.0 +2024-08-03 13:12:26,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=123764.66666666667, ans=0.0 +2024-08-03 13:12:28,925 INFO [train.py:1114] (2/4) Epoch 10, batch 1000, loss[loss=0.1851, simple_loss=0.2667, pruned_loss=0.0517, over 13375.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2958, pruned_loss=0.06837, over 2610810.27 frames. ], batch size: 35, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:12:32,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=123801.33333333333, ans=0.2 +2024-08-03 13:12:38,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=123801.33333333333, ans=0.025 +2024-08-03 13:12:41,050 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.614e+01 1.218e+02 1.406e+02 1.942e+02 3.222e+02, threshold=2.813e+02, percent-clipped=3.0 +2024-08-03 13:13:07,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=123948.0, ans=0.125 +2024-08-03 13:13:16,731 INFO [train.py:1114] (2/4) Epoch 10, batch 1050, loss[loss=0.2151, simple_loss=0.2923, pruned_loss=0.06897, over 13588.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2949, pruned_loss=0.06808, over 2615639.91 frames. ], batch size: 39, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:13:46,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=124094.66666666667, ans=0.07 +2024-08-03 13:13:49,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=124094.66666666667, ans=0.125 +2024-08-03 13:14:04,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=124168.0, ans=0.025 +2024-08-03 13:14:05,010 INFO [train.py:1114] (2/4) Epoch 10, batch 1100, loss[loss=0.2387, simple_loss=0.3165, pruned_loss=0.0805, over 13544.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2951, pruned_loss=0.06822, over 2620455.28 frames. ], batch size: 36, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:14:14,710 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.380e+01 1.135e+02 1.251e+02 1.585e+02 3.709e+02, threshold=2.501e+02, percent-clipped=2.0 +2024-08-03 13:14:14,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=124204.66666666667, ans=0.125 +2024-08-03 13:14:38,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=124278.0, ans=0.09899494936611666 +2024-08-03 13:14:46,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=124314.66666666667, ans=0.07 +2024-08-03 13:14:51,689 INFO [train.py:1114] (2/4) Epoch 10, batch 1150, loss[loss=0.2107, simple_loss=0.2892, pruned_loss=0.06605, over 13546.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.295, pruned_loss=0.06824, over 2619438.51 frames. ], batch size: 36, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:14:55,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=124351.33333333333, ans=0.0 +2024-08-03 13:15:05,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.40 vs. limit=15.0 +2024-08-03 13:15:21,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=124461.33333333333, ans=0.1 +2024-08-03 13:15:31,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=124498.0, ans=0.05 +2024-08-03 13:15:32,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=124498.0, ans=0.125 +2024-08-03 13:15:38,762 INFO [train.py:1114] (2/4) Epoch 10, batch 1200, loss[loss=0.228, simple_loss=0.3139, pruned_loss=0.071, over 13563.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.296, pruned_loss=0.06825, over 2616544.36 frames. ], batch size: 39, lr: 1.26e-02, grad_scale: 32.0 +2024-08-03 13:15:42,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124534.66666666667, ans=0.1 +2024-08-03 13:15:48,572 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.153e+02 1.332e+02 1.610e+02 2.864e+02, threshold=2.663e+02, percent-clipped=2.0 +2024-08-03 13:15:50,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=124571.33333333333, ans=0.125 +2024-08-03 13:16:13,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=124644.66666666667, ans=0.025 +2024-08-03 13:16:14,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=124644.66666666667, ans=0.0 +2024-08-03 13:16:17,796 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:16:18,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=124681.33333333333, ans=0.2 +2024-08-03 13:16:25,769 INFO [train.py:1114] (2/4) Epoch 10, batch 1250, loss[loss=0.219, simple_loss=0.308, pruned_loss=0.06499, over 13415.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.296, pruned_loss=0.06785, over 2628293.74 frames. ], batch size: 42, lr: 1.26e-02, grad_scale: 32.0 +2024-08-03 13:16:29,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=124718.0, ans=0.0 +2024-08-03 13:16:36,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=124754.66666666667, ans=0.125 +2024-08-03 13:16:45,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=124791.33333333333, ans=0.0 +2024-08-03 13:16:52,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=124828.0, ans=0.0 +2024-08-03 13:17:05,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=124864.66666666667, ans=10.0 +2024-08-03 13:17:11,300 INFO [train.py:1114] (2/4) Epoch 10, batch 1300, loss[loss=0.2403, simple_loss=0.3141, pruned_loss=0.08325, over 12943.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2956, pruned_loss=0.06778, over 2632106.64 frames. ], batch size: 52, lr: 1.26e-02, grad_scale: 32.0 +2024-08-03 13:17:16,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=124901.33333333333, ans=0.125 +2024-08-03 13:17:21,174 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.141e+01 1.219e+02 1.549e+02 1.853e+02 2.795e+02, threshold=3.098e+02, percent-clipped=1.0 +2024-08-03 13:17:51,768 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.18 vs. limit=15.0 +2024-08-03 13:17:58,636 INFO [train.py:1114] (2/4) Epoch 10, batch 1350, loss[loss=0.2076, simple_loss=0.2998, pruned_loss=0.05772, over 13547.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2957, pruned_loss=0.06769, over 2639378.38 frames. ], batch size: 37, lr: 1.26e-02, grad_scale: 32.0 +2024-08-03 13:18:02,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=125084.66666666667, ans=0.125 +2024-08-03 13:18:07,388 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.90 vs. limit=15.0 +2024-08-03 13:18:09,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=125121.33333333333, ans=0.2 +2024-08-03 13:18:15,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=125158.0, ans=0.0 +2024-08-03 13:18:20,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=125158.0, ans=0.125 +2024-08-03 13:18:30,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=125194.66666666667, ans=0.125 +2024-08-03 13:18:45,223 INFO [train.py:1114] (2/4) Epoch 10, batch 1400, loss[loss=0.1797, simple_loss=0.2558, pruned_loss=0.05181, over 13273.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2951, pruned_loss=0.0672, over 2643140.97 frames. ], batch size: 31, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:18:45,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=125268.0, ans=0.125 +2024-08-03 13:18:54,982 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.794e+01 1.134e+02 1.256e+02 1.534e+02 3.011e+02, threshold=2.513e+02, percent-clipped=0.0 +2024-08-03 13:19:02,744 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.37 vs. limit=15.0 +2024-08-03 13:19:06,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=125341.33333333333, ans=0.07 +2024-08-03 13:19:10,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=125341.33333333333, ans=0.125 +2024-08-03 13:19:22,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=125414.66666666667, ans=0.125 +2024-08-03 13:19:25,239 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.38 vs. limit=10.0 +2024-08-03 13:19:31,966 INFO [train.py:1114] (2/4) Epoch 10, batch 1450, loss[loss=0.2753, simple_loss=0.3481, pruned_loss=0.1013, over 13410.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.296, pruned_loss=0.06763, over 2642129.27 frames. ], batch size: 43, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:19:40,530 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:19:42,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=125488.0, ans=0.2 +2024-08-03 13:19:47,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=125488.0, ans=0.125 +2024-08-03 13:19:50,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=125488.0, ans=0.125 +2024-08-03 13:20:03,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=125561.33333333333, ans=0.0 +2024-08-03 13:20:08,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=125561.33333333333, ans=0.1 +2024-08-03 13:20:15,298 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.98 vs. limit=22.5 +2024-08-03 13:20:19,155 INFO [train.py:1114] (2/4) Epoch 10, batch 1500, loss[loss=0.2295, simple_loss=0.3123, pruned_loss=0.07333, over 13395.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2952, pruned_loss=0.06703, over 2641853.58 frames. ], batch size: 39, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:20:21,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=125634.66666666667, ans=0.125 +2024-08-03 13:20:21,760 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.71 vs. limit=15.0 +2024-08-03 13:20:22,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=125634.66666666667, ans=15.0 +2024-08-03 13:20:27,223 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.49 vs. limit=6.0 +2024-08-03 13:20:29,215 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.867e+01 1.156e+02 1.320e+02 1.724e+02 3.764e+02, threshold=2.640e+02, percent-clipped=6.0 +2024-08-03 13:20:34,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=125671.33333333333, ans=0.125 +2024-08-03 13:20:39,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=125708.0, ans=0.0 +2024-08-03 13:20:42,798 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.55 vs. limit=15.0 +2024-08-03 13:20:48,923 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=9.31 vs. limit=12.0 +2024-08-03 13:21:06,621 INFO [train.py:1114] (2/4) Epoch 10, batch 1550, loss[loss=0.2276, simple_loss=0.3078, pruned_loss=0.07374, over 13405.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2961, pruned_loss=0.068, over 2630498.69 frames. ], batch size: 41, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:21:17,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=125854.66666666667, ans=0.125 +2024-08-03 13:21:26,750 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.06 vs. limit=15.0 +2024-08-03 13:21:38,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=125928.0, ans=0.0 +2024-08-03 13:21:54,148 INFO [train.py:1114] (2/4) Epoch 10, batch 1600, loss[loss=0.2136, simple_loss=0.3043, pruned_loss=0.06145, over 13579.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2961, pruned_loss=0.06829, over 2624359.03 frames. ], batch size: 39, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:22:04,504 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.527e+01 1.158e+02 1.419e+02 1.742e+02 3.880e+02, threshold=2.837e+02, percent-clipped=3.0 +2024-08-03 13:22:08,617 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.81 vs. limit=12.0 +2024-08-03 13:22:16,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=126074.66666666667, ans=0.025 +2024-08-03 13:22:28,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=126111.33333333333, ans=0.125 +2024-08-03 13:22:29,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=126111.33333333333, ans=0.2 +2024-08-03 13:22:32,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=126148.0, ans=0.0 +2024-08-03 13:22:41,774 INFO [train.py:1114] (2/4) Epoch 10, batch 1650, loss[loss=0.2163, simple_loss=0.3037, pruned_loss=0.06442, over 13335.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2953, pruned_loss=0.06796, over 2621128.44 frames. ], batch size: 40, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:22:42,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=126184.66666666667, ans=0.0 +2024-08-03 13:22:45,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=126184.66666666667, ans=0.1 +2024-08-03 13:22:51,423 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.52 vs. limit=10.0 +2024-08-03 13:23:12,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=126294.66666666667, ans=0.125 +2024-08-03 13:23:24,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=126331.33333333333, ans=0.2 +2024-08-03 13:23:29,224 INFO [train.py:1114] (2/4) Epoch 10, batch 1700, loss[loss=0.2006, simple_loss=0.2718, pruned_loss=0.0647, over 13248.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2958, pruned_loss=0.0681, over 2630128.61 frames. ], batch size: 31, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:23:39,070 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.220e+01 1.201e+02 1.479e+02 1.994e+02 3.572e+02, threshold=2.957e+02, percent-clipped=7.0 +2024-08-03 13:23:41,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=126404.66666666667, ans=0.0 +2024-08-03 13:23:45,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126404.66666666667, ans=0.1 +2024-08-03 13:23:50,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=126441.33333333333, ans=0.0 +2024-08-03 13:23:54,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=126441.33333333333, ans=0.0 +2024-08-03 13:24:01,642 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.93 vs. limit=22.5 +2024-08-03 13:24:04,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=126478.0, ans=0.0 +2024-08-03 13:24:14,864 INFO [train.py:1114] (2/4) Epoch 10, batch 1750, loss[loss=0.197, simple_loss=0.2678, pruned_loss=0.06308, over 13518.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2954, pruned_loss=0.06807, over 2633691.96 frames. ], batch size: 31, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:24:15,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=126551.33333333333, ans=0.0 +2024-08-03 13:24:15,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=126551.33333333333, ans=0.125 +2024-08-03 13:24:19,972 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.61 vs. limit=15.0 +2024-08-03 13:24:34,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=126624.66666666667, ans=0.125 +2024-08-03 13:24:42,592 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.47 vs. limit=6.0 +2024-08-03 13:24:48,906 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.23 vs. limit=6.0 +2024-08-03 13:25:01,908 INFO [train.py:1114] (2/4) Epoch 10, batch 1800, loss[loss=0.2172, simple_loss=0.3004, pruned_loss=0.06697, over 13543.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2954, pruned_loss=0.06753, over 2634295.51 frames. ], batch size: 38, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:25:11,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126771.33333333333, ans=0.1 +2024-08-03 13:25:11,949 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.656e+01 1.185e+02 1.312e+02 1.554e+02 2.308e+02, threshold=2.624e+02, percent-clipped=0.0 +2024-08-03 13:25:19,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=126808.0, ans=0.025 +2024-08-03 13:25:37,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=126844.66666666667, ans=0.2 +2024-08-03 13:25:49,047 INFO [train.py:1114] (2/4) Epoch 10, batch 1850, loss[loss=0.2272, simple_loss=0.3099, pruned_loss=0.07225, over 13399.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2945, pruned_loss=0.06685, over 2636413.39 frames. ], batch size: 39, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:25:54,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=126918.0, ans=0.125 +2024-08-03 13:25:55,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=126918.0, ans=0.125 +2024-08-03 13:25:58,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=126954.66666666667, ans=0.125 +2024-08-03 13:26:04,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=126954.66666666667, ans=0.125 +2024-08-03 13:26:16,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=127028.0, ans=0.0 +2024-08-03 13:26:23,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=127028.0, ans=0.0 +2024-08-03 13:26:25,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=127028.0, ans=0.0 +2024-08-03 13:26:33,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=127064.66666666667, ans=0.125 +2024-08-03 13:26:36,792 INFO [train.py:1114] (2/4) Epoch 10, batch 1900, loss[loss=0.2289, simple_loss=0.3103, pruned_loss=0.0738, over 13326.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2953, pruned_loss=0.06716, over 2639278.52 frames. ], batch size: 40, lr: 1.25e-02, grad_scale: 16.0 +2024-08-03 13:26:49,384 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.604e+01 1.255e+02 1.783e+02 2.547e+02 3.918e+02, threshold=3.565e+02, percent-clipped=23.0 +2024-08-03 13:27:05,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=127211.33333333333, ans=0.125 +2024-08-03 13:27:08,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=127211.33333333333, ans=0.125 +2024-08-03 13:27:11,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=127211.33333333333, ans=0.2 +2024-08-03 13:27:19,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=127248.0, ans=0.125 +2024-08-03 13:27:24,286 INFO [train.py:1114] (2/4) Epoch 10, batch 1950, loss[loss=0.2127, simple_loss=0.3006, pruned_loss=0.06244, over 13569.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2959, pruned_loss=0.06694, over 2645964.60 frames. ], batch size: 36, lr: 1.24e-02, grad_scale: 16.0 +2024-08-03 13:27:47,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=127358.0, ans=0.0 +2024-08-03 13:27:55,689 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:28:11,432 INFO [train.py:1114] (2/4) Epoch 10, batch 2000, loss[loss=0.2089, simple_loss=0.2809, pruned_loss=0.06841, over 13532.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2968, pruned_loss=0.06773, over 2635138.30 frames. ], batch size: 31, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:28:22,597 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.471e+01 1.146e+02 1.313e+02 1.617e+02 2.483e+02, threshold=2.626e+02, percent-clipped=0.0 +2024-08-03 13:28:23,064 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.86 vs. limit=15.0 +2024-08-03 13:28:27,800 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.55 vs. limit=15.0 +2024-08-03 13:28:31,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=127541.33333333333, ans=0.125 +2024-08-03 13:28:42,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=127578.0, ans=0.0 +2024-08-03 13:28:59,001 INFO [train.py:1114] (2/4) Epoch 10, batch 2050, loss[loss=0.1804, simple_loss=0.2594, pruned_loss=0.05065, over 13423.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.295, pruned_loss=0.06706, over 2632125.70 frames. ], batch size: 32, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:29:02,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=127651.33333333333, ans=0.025 +2024-08-03 13:29:03,913 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.80 vs. limit=15.0 +2024-08-03 13:29:06,737 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.75 vs. limit=15.0 +2024-08-03 13:29:11,850 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.33 vs. limit=15.0 +2024-08-03 13:29:16,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=127724.66666666667, ans=0.125 +2024-08-03 13:29:34,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127798.0, ans=0.1 +2024-08-03 13:29:43,992 INFO [train.py:1114] (2/4) Epoch 10, batch 2100, loss[loss=0.1932, simple_loss=0.2716, pruned_loss=0.05744, over 13556.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2934, pruned_loss=0.06626, over 2638166.67 frames. ], batch size: 37, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:29:44,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=127834.66666666667, ans=0.125 +2024-08-03 13:29:50,678 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.565e-03 +2024-08-03 13:29:56,756 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.808e+01 1.235e+02 1.463e+02 1.746e+02 3.043e+02, threshold=2.927e+02, percent-clipped=3.0 +2024-08-03 13:30:04,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=127908.0, ans=0.125 +2024-08-03 13:30:13,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127908.0, ans=0.1 +2024-08-03 13:30:14,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=127944.66666666667, ans=0.125 +2024-08-03 13:30:28,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=127981.33333333333, ans=0.0 +2024-08-03 13:30:33,151 INFO [train.py:1114] (2/4) Epoch 10, batch 2150, loss[loss=0.2093, simple_loss=0.2887, pruned_loss=0.06501, over 13551.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2929, pruned_loss=0.0661, over 2646864.08 frames. ], batch size: 36, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:30:34,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=128018.0, ans=0.125 +2024-08-03 13:30:39,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=128018.0, ans=0.07 +2024-08-03 13:30:42,306 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:30:54,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=128091.33333333333, ans=0.1 +2024-08-03 13:30:58,688 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.42 vs. limit=12.0 +2024-08-03 13:31:06,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=128128.0, ans=0.025 +2024-08-03 13:31:07,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=128128.0, ans=0.125 +2024-08-03 13:31:14,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128164.66666666667, ans=0.1 +2024-08-03 13:31:18,221 INFO [train.py:1114] (2/4) Epoch 10, batch 2200, loss[loss=0.2057, simple_loss=0.2929, pruned_loss=0.05928, over 13402.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2929, pruned_loss=0.06609, over 2644929.60 frames. ], batch size: 39, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:31:18,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128201.33333333333, ans=0.1 +2024-08-03 13:31:21,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=128201.33333333333, ans=0.125 +2024-08-03 13:31:28,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=128238.0, ans=0.125 +2024-08-03 13:31:29,283 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.267e+02 1.540e+02 2.050e+02 4.140e+02, threshold=3.080e+02, percent-clipped=6.0 +2024-08-03 13:31:34,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=128238.0, ans=0.2 +2024-08-03 13:31:40,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=128274.66666666667, ans=0.2 +2024-08-03 13:31:45,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=128274.66666666667, ans=0.0 +2024-08-03 13:31:47,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=128311.33333333333, ans=0.025 +2024-08-03 13:31:51,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=128311.33333333333, ans=0.125 +2024-08-03 13:32:01,990 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:32:05,478 INFO [train.py:1114] (2/4) Epoch 10, batch 2250, loss[loss=0.1763, simple_loss=0.2692, pruned_loss=0.04167, over 13361.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2926, pruned_loss=0.06564, over 2642080.52 frames. ], batch size: 37, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:32:06,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=128384.66666666667, ans=0.2 +2024-08-03 13:32:09,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=128384.66666666667, ans=0.125 +2024-08-03 13:32:10,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=128384.66666666667, ans=0.025 +2024-08-03 13:32:12,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=128384.66666666667, ans=0.125 +2024-08-03 13:32:26,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128458.0, ans=0.1 +2024-08-03 13:32:51,158 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.64 vs. limit=15.0 +2024-08-03 13:32:52,564 INFO [train.py:1114] (2/4) Epoch 10, batch 2300, loss[loss=0.1865, simple_loss=0.2636, pruned_loss=0.05471, over 13591.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2919, pruned_loss=0.06549, over 2638181.42 frames. ], batch size: 33, lr: 1.24e-02, grad_scale: 16.0 +2024-08-03 13:32:58,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128568.0, ans=0.1 +2024-08-03 13:32:59,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=128568.0, ans=10.0 +2024-08-03 13:33:04,015 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.537e+01 1.182e+02 1.366e+02 1.663e+02 2.762e+02, threshold=2.732e+02, percent-clipped=0.0 +2024-08-03 13:33:09,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=128641.33333333333, ans=0.125 +2024-08-03 13:33:13,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=128641.33333333333, ans=0.0 +2024-08-03 13:33:16,492 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=15.37 vs. limit=15.0 +2024-08-03 13:33:17,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=128641.33333333333, ans=0.0 +2024-08-03 13:33:20,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=128678.0, ans=0.04949747468305833 +2024-08-03 13:33:22,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=128678.0, ans=0.0 +2024-08-03 13:33:22,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=128678.0, ans=0.125 +2024-08-03 13:33:26,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=128678.0, ans=0.0 +2024-08-03 13:33:26,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=128678.0, ans=0.125 +2024-08-03 13:33:29,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=128714.66666666667, ans=0.125 +2024-08-03 13:33:39,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=128751.33333333333, ans=0.2 +2024-08-03 13:33:39,782 INFO [train.py:1114] (2/4) Epoch 10, batch 2350, loss[loss=0.2032, simple_loss=0.2946, pruned_loss=0.05594, over 13541.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.292, pruned_loss=0.06534, over 2640857.99 frames. ], batch size: 38, lr: 1.24e-02, grad_scale: 16.0 +2024-08-03 13:33:47,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=128751.33333333333, ans=0.95 +2024-08-03 13:33:56,713 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.04 vs. limit=12.0 +2024-08-03 13:34:09,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=128861.33333333333, ans=0.125 +2024-08-03 13:34:09,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=128861.33333333333, ans=0.125 +2024-08-03 13:34:10,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=128861.33333333333, ans=0.0 +2024-08-03 13:34:10,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128861.33333333333, ans=0.1 +2024-08-03 13:34:18,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=128898.0, ans=0.125 +2024-08-03 13:34:27,032 INFO [train.py:1114] (2/4) Epoch 10, batch 2400, loss[loss=0.1748, simple_loss=0.2651, pruned_loss=0.04227, over 13535.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2929, pruned_loss=0.06572, over 2641967.43 frames. ], batch size: 35, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:34:39,020 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.606e+01 1.219e+02 1.512e+02 2.010e+02 3.572e+02, threshold=3.023e+02, percent-clipped=5.0 +2024-08-03 13:34:41,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=128971.33333333333, ans=0.125 +2024-08-03 13:34:41,600 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.79 vs. limit=12.0 +2024-08-03 13:34:42,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128971.33333333333, ans=0.1 +2024-08-03 13:34:47,201 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.83 vs. limit=22.5 +2024-08-03 13:35:11,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=129081.33333333333, ans=0.025 +2024-08-03 13:35:12,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=129118.0, ans=0.0 +2024-08-03 13:35:13,314 INFO [train.py:1114] (2/4) Epoch 10, batch 2450, loss[loss=0.2166, simple_loss=0.301, pruned_loss=0.06611, over 13348.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2947, pruned_loss=0.06703, over 2632023.12 frames. ], batch size: 37, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:35:17,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=129118.0, ans=0.125 +2024-08-03 13:35:18,298 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.16 vs. limit=15.0 +2024-08-03 13:35:36,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=129191.33333333333, ans=0.025 +2024-08-03 13:35:41,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129228.0, ans=0.1 +2024-08-03 13:35:42,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=129228.0, ans=0.0 +2024-08-03 13:35:43,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129228.0, ans=0.1 +2024-08-03 13:35:52,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=129264.66666666667, ans=0.125 +2024-08-03 13:36:00,204 INFO [train.py:1114] (2/4) Epoch 10, batch 2500, loss[loss=0.2432, simple_loss=0.3235, pruned_loss=0.08143, over 13399.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2944, pruned_loss=0.06685, over 2636015.71 frames. ], batch size: 39, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:36:06,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=129301.33333333333, ans=0.0 +2024-08-03 13:36:06,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=129301.33333333333, ans=0.0 +2024-08-03 13:36:11,366 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.968e+01 1.203e+02 1.318e+02 1.532e+02 2.282e+02, threshold=2.635e+02, percent-clipped=0.0 +2024-08-03 13:36:45,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=129484.66666666667, ans=0.125 +2024-08-03 13:36:45,874 INFO [train.py:1114] (2/4) Epoch 10, batch 2550, loss[loss=0.172, simple_loss=0.2457, pruned_loss=0.04916, over 13518.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2938, pruned_loss=0.06616, over 2637635.56 frames. ], batch size: 31, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:36:46,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=129484.66666666667, ans=0.125 +2024-08-03 13:36:49,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=129484.66666666667, ans=0.0 +2024-08-03 13:36:52,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129484.66666666667, ans=0.1 +2024-08-03 13:36:58,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=129521.33333333333, ans=0.125 +2024-08-03 13:37:02,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=129558.0, ans=0.125 +2024-08-03 13:37:03,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=129558.0, ans=0.0 +2024-08-03 13:37:06,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=129558.0, ans=0.125 +2024-08-03 13:37:12,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129594.66666666667, ans=0.1 +2024-08-03 13:37:18,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=129594.66666666667, ans=0.125 +2024-08-03 13:37:24,245 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.045e-02 +2024-08-03 13:37:26,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=129631.33333333333, ans=0.125 +2024-08-03 13:37:29,298 INFO [train.py:1114] (2/4) Epoch 10, batch 2600, loss[loss=0.2284, simple_loss=0.3041, pruned_loss=0.07637, over 13561.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.295, pruned_loss=0.06669, over 2637090.64 frames. ], batch size: 36, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:37:30,790 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.11 vs. limit=6.0 +2024-08-03 13:37:40,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=129704.66666666667, ans=0.125 +2024-08-03 13:37:40,630 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.809e+01 1.167e+02 1.488e+02 1.878e+02 3.119e+02, threshold=2.976e+02, percent-clipped=4.0 +2024-08-03 13:37:41,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=129704.66666666667, ans=0.0 +2024-08-03 13:38:05,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129814.66666666667, ans=0.1 +2024-08-03 13:38:12,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=129851.33333333333, ans=0.2 +2024-08-03 13:38:13,139 INFO [train.py:1114] (2/4) Epoch 10, batch 2650, loss[loss=0.2586, simple_loss=0.3297, pruned_loss=0.09372, over 13315.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2951, pruned_loss=0.06688, over 2640667.29 frames. ], batch size: 46, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:38:21,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=129888.0, ans=0.125 +2024-08-03 13:38:27,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=129888.0, ans=0.0 +2024-08-03 13:38:33,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=129924.66666666667, ans=0.125 +2024-08-03 13:38:48,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=129998.0, ans=0.0 +2024-08-03 13:38:57,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=130034.66666666667, ans=10.0 +2024-08-03 13:38:57,881 INFO [train.py:1114] (2/4) Epoch 10, batch 2700, loss[loss=0.2052, simple_loss=0.2893, pruned_loss=0.06053, over 13543.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2954, pruned_loss=0.06681, over 2637283.92 frames. ], batch size: 40, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:39:03,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=130034.66666666667, ans=0.125 +2024-08-03 13:39:05,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=130034.66666666667, ans=0.125 +2024-08-03 13:39:10,502 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.234e+02 1.365e+02 1.791e+02 3.628e+02, threshold=2.731e+02, percent-clipped=1.0 +2024-08-03 13:39:22,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff3.min_abs, batch_count=130108.0, ans=0.2 +2024-08-03 13:39:24,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=130144.66666666667, ans=0.125 +2024-08-03 13:39:28,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=130144.66666666667, ans=0.125 +2024-08-03 13:39:31,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=130144.66666666667, ans=0.0 +2024-08-03 13:39:40,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=130181.33333333333, ans=0.125 +2024-08-03 13:39:42,465 INFO [train.py:1114] (2/4) Epoch 10, batch 2750, loss[loss=0.2027, simple_loss=0.2854, pruned_loss=0.06004, over 13332.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2937, pruned_loss=0.0662, over 2635580.61 frames. ], batch size: 34, lr: 1.23e-02, grad_scale: 16.0 +2024-08-03 13:40:03,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=130291.33333333333, ans=0.125 +2024-08-03 13:40:05,546 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.12 vs. limit=22.5 +2024-08-03 13:40:06,120 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:40:11,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=130328.0, ans=0.025 +2024-08-03 13:40:15,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=130328.0, ans=0.035 +2024-08-03 13:40:19,550 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.43 vs. limit=12.0 +2024-08-03 13:40:22,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=130364.66666666667, ans=0.2 +2024-08-03 13:40:26,209 INFO [train.py:1114] (2/4) Epoch 10, batch 2800, loss[loss=0.285, simple_loss=0.3456, pruned_loss=0.1122, over 9582.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2947, pruned_loss=0.06674, over 2627391.83 frames. ], batch size: 96, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:40:31,787 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.86 vs. limit=15.0 +2024-08-03 13:40:38,303 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.441e+01 1.179e+02 1.306e+02 1.650e+02 3.137e+02, threshold=2.611e+02, percent-clipped=1.0 +2024-08-03 13:41:06,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=130548.0, ans=0.0 +2024-08-03 13:41:09,744 INFO [train.py:1114] (2/4) Epoch 10, batch 2850, loss[loss=0.1986, simple_loss=0.2739, pruned_loss=0.0616, over 13368.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2953, pruned_loss=0.06721, over 2621316.28 frames. ], batch size: 35, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:41:20,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=130621.33333333333, ans=0.125 +2024-08-03 13:41:22,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=130621.33333333333, ans=0.125 +2024-08-03 13:41:30,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=130658.0, ans=0.125 +2024-08-03 13:41:33,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130658.0, ans=0.1 +2024-08-03 13:41:37,756 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.58 vs. limit=10.0 +2024-08-03 13:41:52,752 INFO [train.py:1114] (2/4) Epoch 10, batch 2900, loss[loss=0.2143, simple_loss=0.2895, pruned_loss=0.06954, over 13363.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2965, pruned_loss=0.06747, over 2632092.68 frames. ], batch size: 36, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:41:56,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=130768.0, ans=0.1 +2024-08-03 13:42:05,051 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.521e+01 1.253e+02 1.591e+02 2.001e+02 4.136e+02, threshold=3.182e+02, percent-clipped=6.0 +2024-08-03 13:42:22,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=130878.0, ans=0.125 +2024-08-03 13:42:33,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=130914.66666666667, ans=0.125 +2024-08-03 13:42:35,311 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.72 vs. limit=15.0 +2024-08-03 13:42:36,429 INFO [train.py:1114] (2/4) Epoch 10, batch 2950, loss[loss=0.2077, simple_loss=0.2839, pruned_loss=0.0658, over 13311.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2952, pruned_loss=0.0674, over 2630505.79 frames. ], batch size: 34, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:42:36,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=130951.33333333333, ans=0.0 +2024-08-03 13:42:40,476 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.49 vs. limit=15.0 +2024-08-03 13:43:01,759 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.52 vs. limit=10.0 +2024-08-03 13:43:19,188 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:43:22,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=131061.33333333333, ans=0.125 +2024-08-03 13:43:30,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=131098.0, ans=0.0 +2024-08-03 13:43:37,098 INFO [train.py:1114] (2/4) Epoch 10, batch 3000, loss[loss=0.2184, simple_loss=0.2922, pruned_loss=0.07225, over 13551.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2944, pruned_loss=0.06683, over 2629936.61 frames. ], batch size: 37, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:43:37,099 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 13:43:49,245 INFO [train.py:1146] (2/4) Epoch 10, validation: loss=0.1798, simple_loss=0.2807, pruned_loss=0.03945, over 944034.00 frames. +2024-08-03 13:43:49,246 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 13:43:51,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=131134.66666666666, ans=0.025 +2024-08-03 13:43:55,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=131134.66666666666, ans=0.0 +2024-08-03 13:44:02,237 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.869e+01 1.161e+02 1.330e+02 1.562e+02 2.944e+02, threshold=2.661e+02, percent-clipped=0.0 +2024-08-03 13:44:09,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131208.0, ans=0.1 +2024-08-03 13:44:16,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=131208.0, ans=0.125 +2024-08-03 13:44:20,309 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:44:25,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=131244.66666666666, ans=0.125 +2024-08-03 13:44:43,929 INFO [train.py:1114] (2/4) Epoch 10, batch 3050, loss[loss=0.2016, simple_loss=0.2856, pruned_loss=0.05881, over 13541.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2957, pruned_loss=0.06742, over 2626407.25 frames. ], batch size: 35, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:44:45,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=131318.0, ans=0.125 +2024-08-03 13:45:13,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=131428.0, ans=0.125 +2024-08-03 13:45:24,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=131464.66666666666, ans=0.125 +2024-08-03 13:45:25,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=131464.66666666666, ans=0.0 +2024-08-03 13:45:27,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=131464.66666666666, ans=0.1 +2024-08-03 13:45:27,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=131464.66666666666, ans=0.125 +2024-08-03 13:45:30,516 INFO [train.py:1114] (2/4) Epoch 10, batch 3100, loss[loss=0.2051, simple_loss=0.2933, pruned_loss=0.05845, over 13304.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2946, pruned_loss=0.06703, over 2625986.21 frames. ], batch size: 46, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:45:33,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=131501.33333333334, ans=0.1 +2024-08-03 13:45:37,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=131501.33333333334, ans=0.125 +2024-08-03 13:45:41,799 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.45 vs. limit=15.0 +2024-08-03 13:45:43,066 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.582e+01 1.186e+02 1.379e+02 1.722e+02 2.702e+02, threshold=2.757e+02, percent-clipped=2.0 +2024-08-03 13:46:00,255 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.86 vs. limit=15.0 +2024-08-03 13:46:00,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131611.33333333334, ans=0.1 +2024-08-03 13:46:07,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=131648.0, ans=0.05 +2024-08-03 13:46:19,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=131648.0, ans=15.0 +2024-08-03 13:46:24,094 INFO [train.py:1114] (2/4) Epoch 10, batch 3150, loss[loss=0.2507, simple_loss=0.3275, pruned_loss=0.08691, over 13014.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2943, pruned_loss=0.06675, over 2627198.67 frames. ], batch size: 48, lr: 1.22e-02, grad_scale: 32.0 +2024-08-03 13:46:36,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.86 vs. limit=15.0 +2024-08-03 13:46:43,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=131721.33333333334, ans=0.0 +2024-08-03 13:46:48,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=131758.0, ans=0.125 +2024-08-03 13:46:57,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=131794.66666666666, ans=0.025 +2024-08-03 13:46:58,770 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=20.14 vs. limit=22.5 +2024-08-03 13:47:02,911 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.24 vs. limit=15.0 +2024-08-03 13:47:10,945 INFO [train.py:1114] (2/4) Epoch 10, batch 3200, loss[loss=0.2596, simple_loss=0.3308, pruned_loss=0.09423, over 13548.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2943, pruned_loss=0.06668, over 2633692.89 frames. ], batch size: 37, lr: 1.22e-02, grad_scale: 32.0 +2024-08-03 13:47:22,875 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.160e+01 1.174e+02 1.311e+02 1.747e+02 3.069e+02, threshold=2.622e+02, percent-clipped=2.0 +2024-08-03 13:47:44,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=131978.0, ans=0.0 +2024-08-03 13:47:51,982 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.02 vs. limit=22.5 +2024-08-03 13:47:55,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=132051.33333333334, ans=0.0 +2024-08-03 13:47:55,718 INFO [train.py:1114] (2/4) Epoch 10, batch 3250, loss[loss=0.2476, simple_loss=0.3291, pruned_loss=0.08302, over 13391.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2946, pruned_loss=0.06647, over 2638086.30 frames. ], batch size: 38, lr: 1.22e-02, grad_scale: 32.0 +2024-08-03 13:47:56,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=132051.33333333334, ans=0.125 +2024-08-03 13:48:10,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=132088.0, ans=0.125 +2024-08-03 13:48:11,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132088.0, ans=0.1 +2024-08-03 13:48:16,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132124.66666666666, ans=0.1 +2024-08-03 13:48:29,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=132198.0, ans=0.125 +2024-08-03 13:48:30,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=132198.0, ans=0.0 +2024-08-03 13:48:38,141 INFO [train.py:1114] (2/4) Epoch 10, batch 3300, loss[loss=0.2428, simple_loss=0.3214, pruned_loss=0.08209, over 12862.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2935, pruned_loss=0.06632, over 2640306.71 frames. ], batch size: 52, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:48:50,933 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.252e+01 1.284e+02 1.634e+02 2.035e+02 3.075e+02, threshold=3.268e+02, percent-clipped=7.0 +2024-08-03 13:48:52,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=132271.33333333334, ans=0.125 +2024-08-03 13:48:53,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=132271.33333333334, ans=10.0 +2024-08-03 13:48:58,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=132308.0, ans=0.2 +2024-08-03 13:49:14,928 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.67 vs. limit=22.5 +2024-08-03 13:49:21,122 INFO [train.py:1114] (2/4) Epoch 10, batch 3350, loss[loss=0.2303, simple_loss=0.3072, pruned_loss=0.07673, over 13043.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2952, pruned_loss=0.06741, over 2629482.95 frames. ], batch size: 48, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:49:22,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=132418.0, ans=15.0 +2024-08-03 13:49:38,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=132491.33333333334, ans=0.125 +2024-08-03 13:49:40,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=132491.33333333334, ans=0.125 +2024-08-03 13:49:41,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=132491.33333333334, ans=0.125 +2024-08-03 13:49:51,209 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.63 vs. limit=15.0 +2024-08-03 13:49:58,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132564.66666666666, ans=0.1 +2024-08-03 13:50:01,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=132564.66666666666, ans=0.0 +2024-08-03 13:50:03,845 INFO [train.py:1114] (2/4) Epoch 10, batch 3400, loss[loss=0.2006, simple_loss=0.2672, pruned_loss=0.067, over 13535.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.295, pruned_loss=0.06731, over 2625178.04 frames. ], batch size: 31, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:50:09,988 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:50:10,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=132601.33333333334, ans=0.0 +2024-08-03 13:50:11,268 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.26 vs. limit=12.0 +2024-08-03 13:50:15,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=132638.0, ans=0.0 +2024-08-03 13:50:16,691 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.511e+01 1.188e+02 1.377e+02 1.704e+02 3.995e+02, threshold=2.755e+02, percent-clipped=1.0 +2024-08-03 13:50:16,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=132638.0, ans=0.2 +2024-08-03 13:50:24,402 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.99 vs. limit=15.0 +2024-08-03 13:50:29,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.44 vs. limit=15.0 +2024-08-03 13:50:36,640 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.95 vs. limit=22.5 +2024-08-03 13:50:41,774 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.52 vs. limit=15.0 +2024-08-03 13:50:47,221 INFO [train.py:1114] (2/4) Epoch 10, batch 3450, loss[loss=0.2106, simple_loss=0.2951, pruned_loss=0.06301, over 12849.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2944, pruned_loss=0.06694, over 2627772.41 frames. ], batch size: 52, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:50:50,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132784.66666666666, ans=0.1 +2024-08-03 13:50:56,954 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.59 vs. limit=10.0 +2024-08-03 13:51:03,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=132858.0, ans=0.125 +2024-08-03 13:51:16,412 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.09 vs. limit=15.0 +2024-08-03 13:51:17,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=132894.66666666666, ans=0.125 +2024-08-03 13:51:20,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=132894.66666666666, ans=0.0 +2024-08-03 13:51:25,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=132931.33333333334, ans=0.125 +2024-08-03 13:51:27,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=132931.33333333334, ans=0.05 +2024-08-03 13:51:30,949 INFO [train.py:1114] (2/4) Epoch 10, batch 3500, loss[loss=0.2074, simple_loss=0.275, pruned_loss=0.06996, over 13535.00 frames. ], tot_loss[loss=0.213, simple_loss=0.293, pruned_loss=0.06651, over 2629586.25 frames. ], batch size: 34, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:51:41,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=133004.66666666666, ans=0.0 +2024-08-03 13:51:41,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=133004.66666666666, ans=0.95 +2024-08-03 13:51:43,628 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.579e+01 1.188e+02 1.502e+02 1.811e+02 2.689e+02, threshold=3.004e+02, percent-clipped=0.0 +2024-08-03 13:51:51,650 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.07 vs. limit=22.5 +2024-08-03 13:51:53,145 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:52:05,211 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.36 vs. limit=12.0 +2024-08-03 13:52:13,033 INFO [train.py:1114] (2/4) Epoch 10, batch 3550, loss[loss=0.2461, simple_loss=0.3199, pruned_loss=0.08612, over 12416.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.296, pruned_loss=0.06809, over 2627163.72 frames. ], batch size: 58, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:52:15,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=133151.33333333334, ans=0.125 +2024-08-03 13:52:18,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133151.33333333334, ans=0.1 +2024-08-03 13:52:20,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=133188.0, ans=10.0 +2024-08-03 13:52:23,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=133188.0, ans=0.125 +2024-08-03 13:52:28,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=133188.0, ans=0.125 +2024-08-03 13:52:36,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=133224.66666666666, ans=0.0 +2024-08-03 13:52:36,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=133224.66666666666, ans=0.0 +2024-08-03 13:52:56,944 INFO [train.py:1114] (2/4) Epoch 10, batch 3600, loss[loss=0.2743, simple_loss=0.3353, pruned_loss=0.1066, over 9664.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3015, pruned_loss=0.07329, over 2483806.58 frames. ], batch size: 97, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:52:57,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=133334.66666666666, ans=0.0 +2024-08-03 13:53:02,666 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.85 vs. limit=6.0 +2024-08-03 13:53:05,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=133371.33333333334, ans=0.0 +2024-08-03 13:53:06,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=133371.33333333334, ans=0.0 +2024-08-03 13:53:10,853 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.160e+02 1.272e+02 1.395e+02 1.858e+02, threshold=2.544e+02, percent-clipped=0.0 +2024-08-03 13:53:11,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=133371.33333333334, ans=0.95 +2024-08-03 13:53:13,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=133408.0, ans=0.04949747468305833 +2024-08-03 13:53:24,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=133444.66666666666, ans=0.2 +2024-08-03 13:55:05,757 INFO [train.py:1114] (2/4) Epoch 11, batch 0, loss[loss=0.196, simple_loss=0.2758, pruned_loss=0.05809, over 13341.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2758, pruned_loss=0.05809, over 13341.00 frames. ], batch size: 33, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:55:05,757 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 13:55:17,609 INFO [train.py:1146] (2/4) Epoch 11, validation: loss=0.1876, simple_loss=0.2878, pruned_loss=0.04367, over 944034.00 frames. +2024-08-03 13:55:17,610 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 13:55:35,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=133554.66666666666, ans=0.0 +2024-08-03 13:55:36,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=133554.66666666666, ans=0.125 +2024-08-03 13:55:37,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=133554.66666666666, ans=0.1 +2024-08-03 13:55:37,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=133554.66666666666, ans=0.125 +2024-08-03 13:55:50,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=133591.33333333334, ans=0.125 +2024-08-03 13:56:03,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=133628.0, ans=0.025 +2024-08-03 13:56:05,956 INFO [train.py:1114] (2/4) Epoch 11, batch 50, loss[loss=0.1813, simple_loss=0.2608, pruned_loss=0.05091, over 13427.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.296, pruned_loss=0.06816, over 578152.55 frames. ], batch size: 32, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:56:15,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=133664.66666666666, ans=0.125 +2024-08-03 13:56:15,675 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.63 vs. limit=12.0 +2024-08-03 13:56:19,308 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.65 vs. limit=15.0 +2024-08-03 13:56:21,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=133701.33333333334, ans=0.0 +2024-08-03 13:56:31,308 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.301e+01 1.198e+02 1.313e+02 1.584e+02 3.827e+02, threshold=2.627e+02, percent-clipped=3.0 +2024-08-03 13:56:32,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=133738.0, ans=0.95 +2024-08-03 13:56:36,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=133774.66666666666, ans=0.125 +2024-08-03 13:56:41,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=133774.66666666666, ans=0.0 +2024-08-03 13:56:53,152 INFO [train.py:1114] (2/4) Epoch 11, batch 100, loss[loss=0.2, simple_loss=0.2783, pruned_loss=0.06088, over 13538.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2966, pruned_loss=0.0672, over 1028121.72 frames. ], batch size: 35, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:57:05,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=133884.66666666666, ans=0.2 +2024-08-03 13:57:56,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=133958.0, ans=0.025 +2024-08-03 13:58:13,268 INFO [train.py:1114] (2/4) Epoch 11, batch 150, loss[loss=0.1906, simple_loss=0.2586, pruned_loss=0.06132, over 13406.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2931, pruned_loss=0.06539, over 1388880.96 frames. ], batch size: 32, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:58:22,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=134068.0, ans=0.2 +2024-08-03 13:58:36,663 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.551e+01 1.130e+02 1.367e+02 1.649e+02 2.945e+02, threshold=2.733e+02, percent-clipped=2.0 +2024-08-03 13:58:36,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=134104.66666666666, ans=0.0 +2024-08-03 13:58:44,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=134141.33333333334, ans=0.0 +2024-08-03 13:58:51,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=134178.0, ans=0.025 +2024-08-03 13:58:53,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=134178.0, ans=0.125 +2024-08-03 13:58:58,431 INFO [train.py:1114] (2/4) Epoch 11, batch 200, loss[loss=0.2161, simple_loss=0.2944, pruned_loss=0.06888, over 12380.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2915, pruned_loss=0.06458, over 1666570.45 frames. ], batch size: 58, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:59:17,301 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:00:27,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=134361.33333333334, ans=0.2 +2024-08-03 14:00:37,212 INFO [train.py:1114] (2/4) Epoch 11, batch 250, loss[loss=0.1935, simple_loss=0.2962, pruned_loss=0.04537, over 13299.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2914, pruned_loss=0.06421, over 1885249.56 frames. ], batch size: 46, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:00:48,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=134434.66666666666, ans=0.0 +2024-08-03 14:00:49,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=134434.66666666666, ans=0.125 +2024-08-03 14:00:54,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=134471.33333333334, ans=0.125 +2024-08-03 14:00:58,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=134471.33333333334, ans=0.1 +2024-08-03 14:01:00,736 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.508e+01 1.185e+02 1.387e+02 1.656e+02 4.049e+02, threshold=2.774e+02, percent-clipped=1.0 +2024-08-03 14:01:15,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=134544.66666666666, ans=0.125 +2024-08-03 14:01:22,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=134544.66666666666, ans=0.0 +2024-08-03 14:01:23,616 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.90 vs. limit=22.5 +2024-08-03 14:01:25,008 INFO [train.py:1114] (2/4) Epoch 11, batch 300, loss[loss=0.214, simple_loss=0.3009, pruned_loss=0.0635, over 13452.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2903, pruned_loss=0.06375, over 2052302.54 frames. ], batch size: 42, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:01:43,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134654.66666666666, ans=0.1 +2024-08-03 14:02:11,939 INFO [train.py:1114] (2/4) Epoch 11, batch 350, loss[loss=0.1604, simple_loss=0.2435, pruned_loss=0.03862, over 13567.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2911, pruned_loss=0.06385, over 2182617.25 frames. ], batch size: 33, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:02:37,147 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.915e+01 1.230e+02 1.440e+02 1.763e+02 3.166e+02, threshold=2.879e+02, percent-clipped=2.0 +2024-08-03 14:02:40,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=134874.66666666666, ans=0.125 +2024-08-03 14:02:46,797 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.40 vs. limit=15.0 +2024-08-03 14:02:56,575 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.28 vs. limit=15.0 +2024-08-03 14:02:58,880 INFO [train.py:1114] (2/4) Epoch 11, batch 400, loss[loss=0.2215, simple_loss=0.2996, pruned_loss=0.07169, over 13353.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2909, pruned_loss=0.06391, over 2287212.79 frames. ], batch size: 37, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:02:59,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=134948.0, ans=0.0 +2024-08-03 14:03:04,581 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.96 vs. limit=10.0 +2024-08-03 14:03:25,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=135058.0, ans=0.05 +2024-08-03 14:03:35,136 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.54 vs. limit=15.0 +2024-08-03 14:03:41,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=135094.66666666666, ans=0.05 +2024-08-03 14:03:43,648 INFO [train.py:1114] (2/4) Epoch 11, batch 450, loss[loss=0.2196, simple_loss=0.3067, pruned_loss=0.0662, over 13548.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2912, pruned_loss=0.06431, over 2359067.41 frames. ], batch size: 38, lr: 1.15e-02, grad_scale: 32.0 +2024-08-03 14:03:45,539 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:03:52,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=135131.33333333334, ans=0.2 +2024-08-03 14:03:57,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=135168.0, ans=0.125 +2024-08-03 14:04:02,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=135204.66666666666, ans=0.125 +2024-08-03 14:04:08,549 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.113e+02 1.253e+02 1.576e+02 3.089e+02, threshold=2.506e+02, percent-clipped=1.0 +2024-08-03 14:04:12,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.92 vs. limit=10.0 +2024-08-03 14:04:19,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=135241.33333333334, ans=0.025 +2024-08-03 14:04:30,225 INFO [train.py:1114] (2/4) Epoch 11, batch 500, loss[loss=0.2464, simple_loss=0.3195, pruned_loss=0.08659, over 13393.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.29, pruned_loss=0.0638, over 2424595.70 frames. ], batch size: 43, lr: 1.15e-02, grad_scale: 32.0 +2024-08-03 14:05:08,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=135461.33333333334, ans=0.2 +2024-08-03 14:05:11,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=135461.33333333334, ans=0.2 +2024-08-03 14:05:17,626 INFO [train.py:1114] (2/4) Epoch 11, batch 550, loss[loss=0.2154, simple_loss=0.3021, pruned_loss=0.06437, over 13234.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2897, pruned_loss=0.06358, over 2467536.60 frames. ], batch size: 49, lr: 1.15e-02, grad_scale: 32.0 +2024-08-03 14:05:31,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=135534.66666666666, ans=0.2 +2024-08-03 14:05:44,377 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.929e+01 1.236e+02 1.527e+02 1.937e+02 2.923e+02, threshold=3.054e+02, percent-clipped=2.0 +2024-08-03 14:05:50,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=135608.0, ans=0.0 +2024-08-03 14:06:07,518 INFO [train.py:1114] (2/4) Epoch 11, batch 600, loss[loss=0.2337, simple_loss=0.3207, pruned_loss=0.07335, over 13348.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2894, pruned_loss=0.06331, over 2508022.93 frames. ], batch size: 46, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:06:09,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=135681.33333333334, ans=0.0 +2024-08-03 14:06:11,003 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.44 vs. limit=5.0 +2024-08-03 14:06:11,533 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.92 vs. limit=15.0 +2024-08-03 14:06:23,263 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.26 vs. limit=15.0 +2024-08-03 14:06:39,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=135791.33333333334, ans=0.1 +2024-08-03 14:06:40,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=135791.33333333334, ans=0.2 +2024-08-03 14:06:40,898 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.85 vs. limit=15.0 +2024-08-03 14:06:52,173 INFO [train.py:1114] (2/4) Epoch 11, batch 650, loss[loss=0.2035, simple_loss=0.2935, pruned_loss=0.05678, over 13565.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2894, pruned_loss=0.06322, over 2543237.56 frames. ], batch size: 37, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:06:54,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=135864.66666666666, ans=0.1 +2024-08-03 14:07:16,053 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.62 vs. limit=6.0 +2024-08-03 14:07:16,372 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.852e+01 1.178e+02 1.481e+02 2.104e+02 3.972e+02, threshold=2.962e+02, percent-clipped=10.0 +2024-08-03 14:07:16,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=135938.0, ans=0.2 +2024-08-03 14:07:29,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=136011.33333333334, ans=0.0 +2024-08-03 14:07:38,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=136048.0, ans=0.125 +2024-08-03 14:07:39,639 INFO [train.py:1114] (2/4) Epoch 11, batch 700, loss[loss=0.1919, simple_loss=0.2731, pruned_loss=0.05531, over 13558.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2897, pruned_loss=0.06326, over 2565365.56 frames. ], batch size: 35, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:07:41,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=136048.0, ans=0.2 +2024-08-03 14:07:52,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=136084.66666666666, ans=0.2 +2024-08-03 14:08:05,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=136121.33333333334, ans=0.0 +2024-08-03 14:08:05,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=136121.33333333334, ans=0.1 +2024-08-03 14:08:25,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=136194.66666666666, ans=0.1 +2024-08-03 14:08:26,739 INFO [train.py:1114] (2/4) Epoch 11, batch 750, loss[loss=0.1842, simple_loss=0.2675, pruned_loss=0.05043, over 13368.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2892, pruned_loss=0.06311, over 2582468.44 frames. ], batch size: 37, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:08:28,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=136231.33333333334, ans=0.025 +2024-08-03 14:08:36,426 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.54 vs. limit=15.0 +2024-08-03 14:08:37,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=136268.0, ans=0.125 +2024-08-03 14:08:37,925 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.17 vs. limit=12.0 +2024-08-03 14:08:44,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=136304.66666666666, ans=0.125 +2024-08-03 14:08:51,159 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.398e+01 1.179e+02 1.348e+02 1.712e+02 2.826e+02, threshold=2.695e+02, percent-clipped=0.0 +2024-08-03 14:08:55,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136341.33333333334, ans=0.1 +2024-08-03 14:08:58,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=136341.33333333334, ans=0.125 +2024-08-03 14:08:59,047 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.90 vs. limit=15.0 +2024-08-03 14:09:02,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136378.0, ans=0.1 +2024-08-03 14:09:03,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=136378.0, ans=0.0 +2024-08-03 14:09:04,462 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.35 vs. limit=12.0 +2024-08-03 14:09:06,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=136378.0, ans=0.0 +2024-08-03 14:09:12,258 INFO [train.py:1114] (2/4) Epoch 11, batch 800, loss[loss=0.1655, simple_loss=0.2512, pruned_loss=0.03989, over 13368.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2895, pruned_loss=0.06334, over 2597424.63 frames. ], batch size: 33, lr: 1.15e-02, grad_scale: 32.0 +2024-08-03 14:09:12,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=136414.66666666666, ans=0.125 +2024-08-03 14:09:16,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=136414.66666666666, ans=10.0 +2024-08-03 14:09:20,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=136414.66666666666, ans=0.0 +2024-08-03 14:09:27,026 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:09:32,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=136451.33333333334, ans=0.125 +2024-08-03 14:09:52,711 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.87 vs. limit=15.0 +2024-08-03 14:10:01,227 INFO [train.py:1114] (2/4) Epoch 11, batch 850, loss[loss=0.2183, simple_loss=0.3013, pruned_loss=0.06768, over 13332.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2898, pruned_loss=0.06362, over 2610066.95 frames. ], batch size: 40, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:10:04,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136598.0, ans=0.1 +2024-08-03 14:10:09,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=136634.66666666666, ans=0.125 +2024-08-03 14:10:13,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff2.min_abs, batch_count=136634.66666666666, ans=0.1 +2024-08-03 14:10:25,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=136671.33333333334, ans=0.2 +2024-08-03 14:10:26,151 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.536e+01 1.181e+02 1.335e+02 1.644e+02 2.754e+02, threshold=2.669e+02, percent-clipped=1.0 +2024-08-03 14:10:32,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=136708.0, ans=0.0 +2024-08-03 14:10:38,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=136744.66666666666, ans=0.125 +2024-08-03 14:10:41,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=136744.66666666666, ans=0.125 +2024-08-03 14:10:42,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136744.66666666666, ans=0.1 +2024-08-03 14:10:46,280 INFO [train.py:1114] (2/4) Epoch 11, batch 900, loss[loss=0.2164, simple_loss=0.2859, pruned_loss=0.07341, over 13334.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2901, pruned_loss=0.06386, over 2612050.77 frames. ], batch size: 33, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:10:52,235 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=15.82 vs. limit=15.0 +2024-08-03 14:11:07,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=136854.66666666666, ans=0.125 +2024-08-03 14:11:13,193 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.78 vs. limit=15.0 +2024-08-03 14:11:33,345 INFO [train.py:1114] (2/4) Epoch 11, batch 950, loss[loss=0.1718, simple_loss=0.2558, pruned_loss=0.04395, over 13532.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2898, pruned_loss=0.06322, over 2613713.49 frames. ], batch size: 34, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:11:33,914 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.01 vs. limit=15.0 +2024-08-03 14:11:35,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=136964.66666666666, ans=0.125 +2024-08-03 14:11:38,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=136964.66666666666, ans=0.125 +2024-08-03 14:11:38,269 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.89 vs. limit=15.0 +2024-08-03 14:11:40,381 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.44 vs. limit=6.0 +2024-08-03 14:11:42,845 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.19 vs. limit=6.0 +2024-08-03 14:11:55,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=137038.0, ans=0.2 +2024-08-03 14:12:02,140 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.31 vs. limit=15.0 +2024-08-03 14:12:02,581 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.938e+01 1.284e+02 1.520e+02 1.871e+02 3.091e+02, threshold=3.040e+02, percent-clipped=3.0 +2024-08-03 14:12:10,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=137111.33333333334, ans=0.125 +2024-08-03 14:12:20,789 INFO [train.py:1114] (2/4) Epoch 11, batch 1000, loss[loss=0.2025, simple_loss=0.2797, pruned_loss=0.06266, over 13365.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2909, pruned_loss=0.06399, over 2612092.10 frames. ], batch size: 35, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:12:23,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=137148.0, ans=10.0 +2024-08-03 14:12:33,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=137184.66666666666, ans=0.0 +2024-08-03 14:12:44,520 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.80 vs. limit=6.0 +2024-08-03 14:12:49,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=137258.0, ans=0.95 +2024-08-03 14:13:01,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=137294.66666666666, ans=0.2 +2024-08-03 14:13:05,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=137294.66666666666, ans=0.2 +2024-08-03 14:13:09,236 INFO [train.py:1114] (2/4) Epoch 11, batch 1050, loss[loss=0.2038, simple_loss=0.2939, pruned_loss=0.05689, over 13560.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2894, pruned_loss=0.06308, over 2615794.02 frames. ], batch size: 39, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:13:11,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=137331.33333333334, ans=0.0 +2024-08-03 14:13:13,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=137331.33333333334, ans=0.0 +2024-08-03 14:13:15,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=137331.33333333334, ans=0.2 +2024-08-03 14:13:15,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=137331.33333333334, ans=0.5 +2024-08-03 14:13:34,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=137404.66666666666, ans=0.125 +2024-08-03 14:13:36,246 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.744e+01 1.143e+02 1.275e+02 1.569e+02 2.169e+02, threshold=2.550e+02, percent-clipped=0.0 +2024-08-03 14:13:39,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=137441.33333333334, ans=0.1 +2024-08-03 14:13:52,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=137478.0, ans=0.125 +2024-08-03 14:13:54,633 INFO [train.py:1114] (2/4) Epoch 11, batch 1100, loss[loss=0.2233, simple_loss=0.3042, pruned_loss=0.07117, over 13575.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2894, pruned_loss=0.06313, over 2619609.31 frames. ], batch size: 36, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:14:01,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=137514.66666666666, ans=0.125 +2024-08-03 14:14:08,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=137551.33333333334, ans=0.0 +2024-08-03 14:14:14,540 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:14:19,345 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.12 vs. limit=15.0 +2024-08-03 14:14:24,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=137624.66666666666, ans=0.125 +2024-08-03 14:14:39,728 INFO [train.py:1114] (2/4) Epoch 11, batch 1150, loss[loss=0.2454, simple_loss=0.3205, pruned_loss=0.08519, over 13561.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.289, pruned_loss=0.06312, over 2618799.71 frames. ], batch size: 36, lr: 1.14e-02, grad_scale: 8.0 +2024-08-03 14:14:55,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=137734.66666666666, ans=0.125 +2024-08-03 14:15:09,308 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.634e+01 1.163e+02 1.278e+02 1.596e+02 2.243e+02, threshold=2.555e+02, percent-clipped=0.0 +2024-08-03 14:15:14,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=137808.0, ans=0.1 +2024-08-03 14:15:16,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=137808.0, ans=0.125 +2024-08-03 14:15:29,799 INFO [train.py:1114] (2/4) Epoch 11, batch 1200, loss[loss=0.2401, simple_loss=0.3191, pruned_loss=0.0805, over 13562.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2903, pruned_loss=0.06372, over 2616302.66 frames. ], batch size: 39, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:15:35,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137881.33333333334, ans=0.1 +2024-08-03 14:15:40,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=137918.0, ans=0.0 +2024-08-03 14:15:42,846 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.64 vs. limit=22.5 +2024-08-03 14:15:45,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=137918.0, ans=0.125 +2024-08-03 14:15:48,340 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.03 vs. limit=15.0 +2024-08-03 14:16:02,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=137991.33333333334, ans=0.0 +2024-08-03 14:16:10,439 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.00 vs. limit=22.5 +2024-08-03 14:16:14,554 INFO [train.py:1114] (2/4) Epoch 11, batch 1250, loss[loss=0.2629, simple_loss=0.3397, pruned_loss=0.09303, over 13457.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2908, pruned_loss=0.06369, over 2627899.43 frames. ], batch size: 42, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:16:15,009 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.11 vs. limit=12.0 +2024-08-03 14:16:19,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=138064.66666666666, ans=0.125 +2024-08-03 14:16:26,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=138101.33333333334, ans=0.0 +2024-08-03 14:16:33,220 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.95 vs. limit=10.0 +2024-08-03 14:16:45,546 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.814e+01 1.117e+02 1.380e+02 1.651e+02 4.437e+02, threshold=2.760e+02, percent-clipped=2.0 +2024-08-03 14:16:46,075 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.76 vs. limit=12.0 +2024-08-03 14:17:03,498 INFO [train.py:1114] (2/4) Epoch 11, batch 1300, loss[loss=0.2254, simple_loss=0.3102, pruned_loss=0.07028, over 12927.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2905, pruned_loss=0.0636, over 2631576.35 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:17:05,891 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.40 vs. limit=15.0 +2024-08-03 14:17:15,732 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.95 vs. limit=15.0 +2024-08-03 14:17:19,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=138284.66666666666, ans=0.125 +2024-08-03 14:17:22,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138321.33333333334, ans=0.1 +2024-08-03 14:17:38,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138358.0, ans=0.1 +2024-08-03 14:17:39,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=138394.66666666666, ans=0.125 +2024-08-03 14:17:48,828 INFO [train.py:1114] (2/4) Epoch 11, batch 1350, loss[loss=0.1816, simple_loss=0.2681, pruned_loss=0.04754, over 13559.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2901, pruned_loss=0.06351, over 2638457.45 frames. ], batch size: 37, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:17:52,986 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.02 vs. limit=15.0 +2024-08-03 14:17:58,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=138468.0, ans=0.2 +2024-08-03 14:17:59,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=138468.0, ans=0.0 +2024-08-03 14:18:09,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=138504.66666666666, ans=10.0 +2024-08-03 14:18:15,978 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.205e+02 1.432e+02 1.737e+02 2.785e+02, threshold=2.864e+02, percent-clipped=2.0 +2024-08-03 14:18:24,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=138541.33333333334, ans=0.0 +2024-08-03 14:18:25,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=138541.33333333334, ans=0.125 +2024-08-03 14:18:25,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=138541.33333333334, ans=0.0 +2024-08-03 14:18:26,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=138578.0, ans=0.125 +2024-08-03 14:18:27,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=138578.0, ans=0.125 +2024-08-03 14:18:28,519 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.78 vs. limit=22.5 +2024-08-03 14:18:33,919 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.48 vs. limit=15.0 +2024-08-03 14:18:36,198 INFO [train.py:1114] (2/4) Epoch 11, batch 1400, loss[loss=0.1941, simple_loss=0.2615, pruned_loss=0.06332, over 13256.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.29, pruned_loss=0.0638, over 2641987.25 frames. ], batch size: 31, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:18:40,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=138614.66666666666, ans=0.0 +2024-08-03 14:18:43,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=138614.66666666666, ans=0.0 +2024-08-03 14:18:43,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=138614.66666666666, ans=0.0 +2024-08-03 14:18:50,908 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.98 vs. limit=22.5 +2024-08-03 14:18:56,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138688.0, ans=0.1 +2024-08-03 14:19:04,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=138724.66666666666, ans=0.0 +2024-08-03 14:19:06,437 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.84 vs. limit=15.0 +2024-08-03 14:19:20,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=138761.33333333334, ans=0.125 +2024-08-03 14:19:23,023 INFO [train.py:1114] (2/4) Epoch 11, batch 1450, loss[loss=0.2095, simple_loss=0.3018, pruned_loss=0.05861, over 13400.00 frames. ], tot_loss[loss=0.21, simple_loss=0.291, pruned_loss=0.06449, over 2640383.41 frames. ], batch size: 43, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:19:26,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138798.0, ans=0.1 +2024-08-03 14:19:44,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=138871.33333333334, ans=0.125 +2024-08-03 14:19:48,732 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.94 vs. limit=22.5 +2024-08-03 14:19:49,931 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.222e+01 1.153e+02 1.341e+02 1.677e+02 2.779e+02, threshold=2.682e+02, percent-clipped=0.0 +2024-08-03 14:20:01,374 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.19 vs. limit=10.0 +2024-08-03 14:20:11,557 INFO [train.py:1114] (2/4) Epoch 11, batch 1500, loss[loss=0.2149, simple_loss=0.2998, pruned_loss=0.06499, over 13400.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2915, pruned_loss=0.06436, over 2640859.61 frames. ], batch size: 39, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:20:34,316 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.38 vs. limit=22.5 +2024-08-03 14:20:58,117 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.22 vs. limit=15.0 +2024-08-03 14:20:58,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=139091.33333333334, ans=0.2 +2024-08-03 14:21:10,822 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.19 vs. limit=12.0 +2024-08-03 14:21:12,169 INFO [train.py:1114] (2/4) Epoch 11, batch 1550, loss[loss=0.2031, simple_loss=0.2904, pruned_loss=0.05788, over 13392.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2912, pruned_loss=0.06419, over 2630290.67 frames. ], batch size: 41, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:21:19,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=139164.66666666666, ans=0.125 +2024-08-03 14:21:20,939 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.22 vs. limit=15.0 +2024-08-03 14:21:29,047 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.05 vs. limit=22.5 +2024-08-03 14:21:39,465 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.226e+01 1.184e+02 1.477e+02 1.893e+02 3.709e+02, threshold=2.955e+02, percent-clipped=6.0 +2024-08-03 14:21:57,700 INFO [train.py:1114] (2/4) Epoch 11, batch 1600, loss[loss=0.2099, simple_loss=0.3013, pruned_loss=0.05926, over 13568.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2908, pruned_loss=0.06404, over 2623034.97 frames. ], batch size: 39, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:21:58,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=139348.0, ans=0.2 +2024-08-03 14:22:14,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=139384.66666666666, ans=0.0 +2024-08-03 14:22:19,499 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.54 vs. limit=15.0 +2024-08-03 14:22:21,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=139421.33333333334, ans=0.025 +2024-08-03 14:22:22,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=139421.33333333334, ans=0.125 +2024-08-03 14:22:31,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139458.0, ans=0.1 +2024-08-03 14:22:34,318 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.96 vs. limit=6.0 +2024-08-03 14:22:36,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=139494.66666666666, ans=0.125 +2024-08-03 14:22:47,790 INFO [train.py:1114] (2/4) Epoch 11, batch 1650, loss[loss=0.1898, simple_loss=0.2828, pruned_loss=0.04839, over 13335.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2909, pruned_loss=0.0641, over 2619651.42 frames. ], batch size: 40, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:22:47,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=139531.33333333334, ans=0.2 +2024-08-03 14:22:50,153 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.59 vs. limit=12.0 +2024-08-03 14:22:51,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=139531.33333333334, ans=0.1 +2024-08-03 14:22:55,299 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:23:08,944 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.29 vs. limit=15.0 +2024-08-03 14:23:11,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139604.66666666666, ans=0.1 +2024-08-03 14:23:14,853 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.757e+01 1.241e+02 1.421e+02 1.904e+02 3.771e+02, threshold=2.842e+02, percent-clipped=2.0 +2024-08-03 14:23:32,931 INFO [train.py:1114] (2/4) Epoch 11, batch 1700, loss[loss=0.2352, simple_loss=0.2918, pruned_loss=0.0893, over 13244.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2905, pruned_loss=0.06369, over 2628549.18 frames. ], batch size: 31, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:23:38,879 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.64 vs. limit=15.0 +2024-08-03 14:23:54,750 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:23:59,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139788.0, ans=0.1 +2024-08-03 14:24:19,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=139898.0, ans=0.125 +2024-08-03 14:24:19,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=139898.0, ans=0.0 +2024-08-03 14:24:20,124 INFO [train.py:1114] (2/4) Epoch 11, batch 1750, loss[loss=0.1972, simple_loss=0.2667, pruned_loss=0.06383, over 13527.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2893, pruned_loss=0.06309, over 2632658.07 frames. ], batch size: 31, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:24:34,525 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.50 vs. limit=15.0 +2024-08-03 14:24:47,662 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.098e+01 1.175e+02 1.427e+02 2.048e+02 3.147e+02, threshold=2.855e+02, percent-clipped=3.0 +2024-08-03 14:24:48,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=140008.0, ans=0.025 +2024-08-03 14:24:54,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140008.0, ans=0.1 +2024-08-03 14:24:58,961 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.49 vs. limit=15.0 +2024-08-03 14:25:06,039 INFO [train.py:1114] (2/4) Epoch 11, batch 1800, loss[loss=0.2092, simple_loss=0.2948, pruned_loss=0.06182, over 13561.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2895, pruned_loss=0.06302, over 2633889.78 frames. ], batch size: 38, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:25:13,223 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.02 vs. limit=15.0 +2024-08-03 14:25:30,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=140118.0, ans=0.0 +2024-08-03 14:25:31,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=140118.0, ans=0.125 +2024-08-03 14:25:31,921 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=19.24 vs. limit=22.5 +2024-08-03 14:25:35,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=140118.0, ans=0.0 +2024-08-03 14:26:06,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=140228.0, ans=0.125 +2024-08-03 14:26:09,134 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.00 vs. limit=15.0 +2024-08-03 14:26:12,417 INFO [train.py:1114] (2/4) Epoch 11, batch 1850, loss[loss=0.2117, simple_loss=0.3018, pruned_loss=0.06086, over 13394.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2889, pruned_loss=0.06266, over 2636629.02 frames. ], batch size: 39, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:26:12,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=140264.66666666666, ans=0.125 +2024-08-03 14:26:25,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=140301.33333333334, ans=0.125 +2024-08-03 14:26:25,810 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.21 vs. limit=15.0 +2024-08-03 14:26:30,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=140338.0, ans=0.0 +2024-08-03 14:26:37,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=140338.0, ans=0.125 +2024-08-03 14:26:40,995 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.650e+01 1.194e+02 1.468e+02 2.041e+02 3.479e+02, threshold=2.936e+02, percent-clipped=2.0 +2024-08-03 14:27:02,737 INFO [train.py:1114] (2/4) Epoch 11, batch 1900, loss[loss=0.2064, simple_loss=0.3015, pruned_loss=0.05568, over 13320.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2895, pruned_loss=0.06297, over 2639598.43 frames. ], batch size: 40, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:27:21,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=140521.33333333334, ans=0.0 +2024-08-03 14:27:23,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=140521.33333333334, ans=0.125 +2024-08-03 14:27:49,636 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.41 vs. limit=15.0 +2024-08-03 14:27:51,873 INFO [train.py:1114] (2/4) Epoch 11, batch 1950, loss[loss=0.2264, simple_loss=0.2953, pruned_loss=0.07869, over 13561.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.291, pruned_loss=0.06322, over 2646587.82 frames. ], batch size: 36, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:28:09,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=140704.66666666666, ans=0.125 +2024-08-03 14:28:17,554 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:28:19,126 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.848e+01 1.152e+02 1.250e+02 1.577e+02 2.279e+02, threshold=2.500e+02, percent-clipped=0.0 +2024-08-03 14:28:37,496 INFO [train.py:1114] (2/4) Epoch 11, batch 2000, loss[loss=0.1886, simple_loss=0.2645, pruned_loss=0.05635, over 13500.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2915, pruned_loss=0.06384, over 2635985.85 frames. ], batch size: 31, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:28:50,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=140851.33333333334, ans=0.04949747468305833 +2024-08-03 14:28:58,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=140888.0, ans=0.0 +2024-08-03 14:29:03,657 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.70 vs. limit=10.0 +2024-08-03 14:29:08,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=140924.66666666666, ans=0.125 +2024-08-03 14:29:09,749 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.01 vs. limit=15.0 +2024-08-03 14:29:10,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=140924.66666666666, ans=0.0 +2024-08-03 14:29:13,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=140961.33333333334, ans=0.0 +2024-08-03 14:29:13,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=140961.33333333334, ans=0.125 +2024-08-03 14:29:16,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140961.33333333334, ans=0.1 +2024-08-03 14:29:23,049 INFO [train.py:1114] (2/4) Epoch 11, batch 2050, loss[loss=0.1865, simple_loss=0.2615, pruned_loss=0.05575, over 13424.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2904, pruned_loss=0.06373, over 2632302.43 frames. ], batch size: 32, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:29:30,730 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.29 vs. limit=22.5 +2024-08-03 14:29:32,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=141034.66666666666, ans=0.0 +2024-08-03 14:29:32,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141034.66666666666, ans=0.1 +2024-08-03 14:29:52,685 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.045e+01 1.175e+02 1.416e+02 1.784e+02 2.828e+02, threshold=2.832e+02, percent-clipped=4.0 +2024-08-03 14:29:59,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=141108.0, ans=0.0 +2024-08-03 14:29:59,510 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.67 vs. limit=15.0 +2024-08-03 14:29:59,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=141144.66666666666, ans=0.0 +2024-08-03 14:30:09,689 INFO [train.py:1114] (2/4) Epoch 11, batch 2100, loss[loss=0.2101, simple_loss=0.3018, pruned_loss=0.05916, over 13538.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2904, pruned_loss=0.06378, over 2638280.30 frames. ], batch size: 37, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:30:16,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=141181.33333333334, ans=0.2 +2024-08-03 14:30:26,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=141218.0, ans=0.0 +2024-08-03 14:30:27,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=141218.0, ans=0.0 +2024-08-03 14:30:47,792 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.29 vs. limit=15.0 +2024-08-03 14:30:54,973 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.06 vs. limit=12.0 +2024-08-03 14:30:56,298 INFO [train.py:1114] (2/4) Epoch 11, batch 2150, loss[loss=0.1868, simple_loss=0.2738, pruned_loss=0.04989, over 13550.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2892, pruned_loss=0.06296, over 2647103.52 frames. ], batch size: 36, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:31:03,927 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.14 vs. limit=12.0 +2024-08-03 14:31:18,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=141438.0, ans=0.2 +2024-08-03 14:31:27,805 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.279e+01 1.207e+02 1.412e+02 1.929e+02 3.002e+02, threshold=2.825e+02, percent-clipped=1.0 +2024-08-03 14:31:32,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=141474.66666666666, ans=0.0 +2024-08-03 14:31:44,756 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.27 vs. limit=15.0 +2024-08-03 14:31:45,168 INFO [train.py:1114] (2/4) Epoch 11, batch 2200, loss[loss=0.2109, simple_loss=0.2999, pruned_loss=0.06094, over 13413.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2891, pruned_loss=0.0629, over 2644378.26 frames. ], batch size: 39, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:31:56,308 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:32:11,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=141658.0, ans=0.0 +2024-08-03 14:32:18,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=141658.0, ans=0.0 +2024-08-03 14:32:28,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=141694.66666666666, ans=0.125 +2024-08-03 14:32:30,721 INFO [train.py:1114] (2/4) Epoch 11, batch 2250, loss[loss=0.1836, simple_loss=0.2695, pruned_loss=0.04891, over 13357.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2886, pruned_loss=0.06249, over 2642439.73 frames. ], batch size: 37, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:32:55,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=141804.66666666666, ans=0.125 +2024-08-03 14:32:58,698 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.445e+01 1.221e+02 1.460e+02 1.800e+02 3.358e+02, threshold=2.920e+02, percent-clipped=4.0 +2024-08-03 14:33:04,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=141841.33333333334, ans=0.2 +2024-08-03 14:33:04,615 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.25 vs. limit=15.0 +2024-08-03 14:33:05,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=141841.33333333334, ans=0.025 +2024-08-03 14:33:15,707 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=15.0 +2024-08-03 14:33:16,159 INFO [train.py:1114] (2/4) Epoch 11, batch 2300, loss[loss=0.1942, simple_loss=0.2661, pruned_loss=0.0611, over 13571.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2878, pruned_loss=0.06235, over 2639153.31 frames. ], batch size: 33, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:33:18,492 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.61 vs. limit=22.5 +2024-08-03 14:33:32,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=141951.33333333334, ans=0.0 +2024-08-03 14:33:40,280 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.01 vs. limit=15.0 +2024-08-03 14:33:43,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=141988.0, ans=0.125 +2024-08-03 14:34:05,405 INFO [train.py:1114] (2/4) Epoch 11, batch 2350, loss[loss=0.195, simple_loss=0.2852, pruned_loss=0.05245, over 13554.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2881, pruned_loss=0.06233, over 2641476.67 frames. ], batch size: 38, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:34:12,452 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.28 vs. limit=15.0 +2024-08-03 14:34:16,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=142134.66666666666, ans=0.125 +2024-08-03 14:34:22,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=142134.66666666666, ans=0.125 +2024-08-03 14:34:34,198 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.779e+01 1.150e+02 1.456e+02 1.792e+02 2.996e+02, threshold=2.912e+02, percent-clipped=1.0 +2024-08-03 14:34:52,120 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.99 vs. limit=15.0 +2024-08-03 14:34:55,297 INFO [train.py:1114] (2/4) Epoch 11, batch 2400, loss[loss=0.2072, simple_loss=0.2866, pruned_loss=0.06388, over 13528.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2891, pruned_loss=0.06262, over 2643308.23 frames. ], batch size: 35, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:34:55,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=142281.33333333334, ans=0.125 +2024-08-03 14:34:58,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=142281.33333333334, ans=0.0 +2024-08-03 14:35:01,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142281.33333333334, ans=0.1 +2024-08-03 14:35:06,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=142318.0, ans=0.1 +2024-08-03 14:35:08,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=142318.0, ans=0.1 +2024-08-03 14:35:23,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142391.33333333334, ans=0.1 +2024-08-03 14:35:27,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142391.33333333334, ans=0.1 +2024-08-03 14:35:28,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=142391.33333333334, ans=0.125 +2024-08-03 14:35:29,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=142391.33333333334, ans=0.0 +2024-08-03 14:35:29,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142391.33333333334, ans=0.1 +2024-08-03 14:35:40,808 INFO [train.py:1114] (2/4) Epoch 11, batch 2450, loss[loss=0.2278, simple_loss=0.3121, pruned_loss=0.07175, over 13354.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2904, pruned_loss=0.06318, over 2633242.49 frames. ], batch size: 37, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:35:43,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=142464.66666666666, ans=0.025 +2024-08-03 14:35:48,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=142464.66666666666, ans=0.0 +2024-08-03 14:36:08,680 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.765e+01 1.220e+02 1.473e+02 1.922e+02 3.559e+02, threshold=2.946e+02, percent-clipped=1.0 +2024-08-03 14:36:21,005 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.35 vs. limit=10.0 +2024-08-03 14:36:24,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=142648.0, ans=0.5 +2024-08-03 14:36:25,797 INFO [train.py:1114] (2/4) Epoch 11, batch 2500, loss[loss=0.2388, simple_loss=0.3151, pruned_loss=0.08127, over 13409.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2902, pruned_loss=0.06312, over 2637268.57 frames. ], batch size: 39, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:36:42,762 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.28 vs. limit=6.0 +2024-08-03 14:36:51,841 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:36:54,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=142758.0, ans=10.0 +2024-08-03 14:37:08,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=142831.33333333334, ans=0.0 +2024-08-03 14:37:09,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=142831.33333333334, ans=0.2 +2024-08-03 14:37:09,695 INFO [train.py:1114] (2/4) Epoch 11, batch 2550, loss[loss=0.1995, simple_loss=0.2715, pruned_loss=0.06379, over 13543.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2903, pruned_loss=0.06316, over 2638699.95 frames. ], batch size: 31, lr: 1.12e-02, grad_scale: 32.0 +2024-08-03 14:37:11,035 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.35 vs. limit=10.0 +2024-08-03 14:37:26,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=142904.66666666666, ans=0.025 +2024-08-03 14:37:36,293 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.364e+01 1.185e+02 1.436e+02 1.900e+02 4.163e+02, threshold=2.872e+02, percent-clipped=5.0 +2024-08-03 14:37:42,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=142941.33333333334, ans=0.2 +2024-08-03 14:37:42,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=142941.33333333334, ans=0.125 +2024-08-03 14:37:52,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=142978.0, ans=0.5 +2024-08-03 14:37:54,711 INFO [train.py:1114] (2/4) Epoch 11, batch 2600, loss[loss=0.1775, simple_loss=0.2627, pruned_loss=0.04609, over 13554.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2902, pruned_loss=0.06292, over 2638548.65 frames. ], batch size: 36, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:37:58,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=143014.66666666666, ans=0.125 +2024-08-03 14:38:07,218 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.67 vs. limit=15.0 +2024-08-03 14:38:18,665 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.09 vs. limit=22.5 +2024-08-03 14:38:26,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=143124.66666666666, ans=0.125 +2024-08-03 14:38:38,298 INFO [train.py:1114] (2/4) Epoch 11, batch 2650, loss[loss=0.2578, simple_loss=0.3305, pruned_loss=0.09256, over 13308.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2907, pruned_loss=0.06345, over 2641605.30 frames. ], batch size: 46, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:38:44,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=143198.0, ans=0.125 +2024-08-03 14:38:53,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.23 vs. limit=15.0 +2024-08-03 14:38:55,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=143271.33333333334, ans=0.0 +2024-08-03 14:39:06,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=143308.0, ans=0.05 +2024-08-03 14:39:07,481 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.944e+01 1.197e+02 1.327e+02 1.649e+02 2.749e+02, threshold=2.654e+02, percent-clipped=0.0 +2024-08-03 14:39:10,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=143308.0, ans=0.0 +2024-08-03 14:39:23,393 INFO [train.py:1114] (2/4) Epoch 11, batch 2700, loss[loss=0.2007, simple_loss=0.2848, pruned_loss=0.05827, over 13537.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2908, pruned_loss=0.06365, over 2637314.66 frames. ], batch size: 40, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:39:23,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=143381.33333333334, ans=0.125 +2024-08-03 14:39:35,790 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:39:38,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=143418.0, ans=0.2 +2024-08-03 14:39:41,048 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.97 vs. limit=22.5 +2024-08-03 14:39:43,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=143454.66666666666, ans=0.125 +2024-08-03 14:39:49,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=143491.33333333334, ans=0.125 +2024-08-03 14:39:53,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=143491.33333333334, ans=0.125 +2024-08-03 14:40:06,618 INFO [train.py:1114] (2/4) Epoch 11, batch 2750, loss[loss=0.2146, simple_loss=0.293, pruned_loss=0.06803, over 13319.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2886, pruned_loss=0.06312, over 2634812.84 frames. ], batch size: 34, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:40:10,662 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.51 vs. limit=15.0 +2024-08-03 14:40:19,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=143601.33333333334, ans=0.125 +2024-08-03 14:40:24,293 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.30 vs. limit=10.0 +2024-08-03 14:40:31,097 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.38 vs. limit=22.5 +2024-08-03 14:40:31,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=143674.66666666666, ans=0.125 +2024-08-03 14:40:34,880 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.875e+01 1.140e+02 1.438e+02 1.760e+02 3.626e+02, threshold=2.877e+02, percent-clipped=2.0 +2024-08-03 14:40:35,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=143674.66666666666, ans=0.0 +2024-08-03 14:40:41,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=143711.33333333334, ans=0.125 +2024-08-03 14:40:41,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=143711.33333333334, ans=0.125 +2024-08-03 14:40:41,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=143711.33333333334, ans=0.0 +2024-08-03 14:40:42,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=143711.33333333334, ans=0.125 +2024-08-03 14:40:51,365 INFO [train.py:1114] (2/4) Epoch 11, batch 2800, loss[loss=0.2817, simple_loss=0.3329, pruned_loss=0.1153, over 9356.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2889, pruned_loss=0.06331, over 2626536.77 frames. ], batch size: 96, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:40:53,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=143748.0, ans=0.125 +2024-08-03 14:41:08,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=143821.33333333334, ans=0.0 +2024-08-03 14:41:08,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=143821.33333333334, ans=0.125 +2024-08-03 14:41:11,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=143821.33333333334, ans=0.025 +2024-08-03 14:41:13,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=143821.33333333334, ans=0.125 +2024-08-03 14:41:22,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=143858.0, ans=0.125 +2024-08-03 14:41:36,392 INFO [train.py:1114] (2/4) Epoch 11, batch 2850, loss[loss=0.171, simple_loss=0.259, pruned_loss=0.04146, over 13375.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2902, pruned_loss=0.06361, over 2620585.63 frames. ], batch size: 35, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:41:38,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143931.33333333334, ans=0.1 +2024-08-03 14:41:43,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143931.33333333334, ans=0.1 +2024-08-03 14:41:43,542 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.43 vs. limit=15.0 +2024-08-03 14:42:02,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=144041.33333333334, ans=0.025 +2024-08-03 14:42:04,512 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.280e+01 1.169e+02 1.350e+02 1.770e+02 2.759e+02, threshold=2.700e+02, percent-clipped=0.0 +2024-08-03 14:42:11,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=144078.0, ans=0.125 +2024-08-03 14:42:18,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=144114.66666666666, ans=0.125 +2024-08-03 14:42:18,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144114.66666666666, ans=0.1 +2024-08-03 14:42:19,096 INFO [train.py:1114] (2/4) Epoch 11, batch 2900, loss[loss=0.2039, simple_loss=0.2885, pruned_loss=0.05971, over 13343.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2916, pruned_loss=0.06375, over 2631218.57 frames. ], batch size: 36, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:42:19,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=144114.66666666666, ans=0.125 +2024-08-03 14:42:24,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=144114.66666666666, ans=0.0 +2024-08-03 14:42:33,656 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-08-03 14:42:37,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=144188.0, ans=0.0 +2024-08-03 14:42:39,700 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.68 vs. limit=15.0 +2024-08-03 14:42:44,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=144224.66666666666, ans=0.125 +2024-08-03 14:42:45,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=144224.66666666666, ans=0.2 +2024-08-03 14:42:51,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=144224.66666666666, ans=0.07 +2024-08-03 14:42:59,615 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-08-03 14:43:02,601 INFO [train.py:1114] (2/4) Epoch 11, batch 2950, loss[loss=0.1843, simple_loss=0.2674, pruned_loss=0.05064, over 13332.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2908, pruned_loss=0.06382, over 2630608.89 frames. ], batch size: 34, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:43:04,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=144298.0, ans=0.125 +2024-08-03 14:43:16,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=144334.66666666666, ans=0.2 +2024-08-03 14:43:27,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=144371.33333333334, ans=0.025 +2024-08-03 14:43:31,428 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.788e+01 1.243e+02 1.438e+02 2.009e+02 3.771e+02, threshold=2.877e+02, percent-clipped=8.0 +2024-08-03 14:43:34,550 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.59 vs. limit=22.5 +2024-08-03 14:43:44,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=144444.66666666666, ans=0.125 +2024-08-03 14:43:46,213 INFO [train.py:1114] (2/4) Epoch 11, batch 3000, loss[loss=0.1865, simple_loss=0.2696, pruned_loss=0.05167, over 13555.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2902, pruned_loss=0.06365, over 2630177.18 frames. ], batch size: 37, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:43:46,213 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 14:43:56,412 INFO [train.py:1146] (2/4) Epoch 11, validation: loss=0.1797, simple_loss=0.2796, pruned_loss=0.03992, over 944034.00 frames. +2024-08-03 14:43:56,413 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 14:43:59,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=144481.33333333334, ans=0.0 +2024-08-03 14:44:03,854 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.26 vs. limit=22.5 +2024-08-03 14:44:06,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=144518.0, ans=0.125 +2024-08-03 14:44:09,742 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.64 vs. limit=6.0 +2024-08-03 14:44:09,950 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.45 vs. limit=15.0 +2024-08-03 14:44:13,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=144554.66666666666, ans=0.125 +2024-08-03 14:44:39,729 INFO [train.py:1114] (2/4) Epoch 11, batch 3050, loss[loss=0.1755, simple_loss=0.2591, pruned_loss=0.04599, over 13532.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2908, pruned_loss=0.06382, over 2627376.84 frames. ], batch size: 35, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:44:39,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=144664.66666666666, ans=0.125 +2024-08-03 14:44:57,842 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.97 vs. limit=15.0 +2024-08-03 14:45:08,192 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.491e+01 1.118e+02 1.274e+02 1.524e+02 2.549e+02, threshold=2.548e+02, percent-clipped=0.0 +2024-08-03 14:45:11,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=144774.66666666666, ans=0.125 +2024-08-03 14:45:20,503 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.60 vs. limit=15.0 +2024-08-03 14:45:22,753 INFO [train.py:1114] (2/4) Epoch 11, batch 3100, loss[loss=0.2323, simple_loss=0.3131, pruned_loss=0.07575, over 13315.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.291, pruned_loss=0.06392, over 2627496.60 frames. ], batch size: 46, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:45:28,351 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.40 vs. limit=15.0 +2024-08-03 14:45:29,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=144848.0, ans=0.2 +2024-08-03 14:45:41,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=144921.33333333334, ans=0.2 +2024-08-03 14:45:42,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=144921.33333333334, ans=0.95 +2024-08-03 14:45:44,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144921.33333333334, ans=0.1 +2024-08-03 14:45:51,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=144958.0, ans=0.125 +2024-08-03 14:45:57,330 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.85 vs. limit=6.0 +2024-08-03 14:45:57,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=144994.66666666666, ans=0.125 +2024-08-03 14:46:04,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=144994.66666666666, ans=0.125 +2024-08-03 14:46:07,303 INFO [train.py:1114] (2/4) Epoch 11, batch 3150, loss[loss=0.2292, simple_loss=0.3069, pruned_loss=0.0757, over 13029.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2905, pruned_loss=0.06357, over 2629432.32 frames. ], batch size: 48, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:46:36,199 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.294e+01 1.169e+02 1.363e+02 1.667e+02 3.402e+02, threshold=2.726e+02, percent-clipped=3.0 +2024-08-03 14:46:38,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=145141.33333333334, ans=0.125 +2024-08-03 14:46:48,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=145178.0, ans=0.125 +2024-08-03 14:46:50,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=145214.66666666666, ans=0.125 +2024-08-03 14:46:51,169 INFO [train.py:1114] (2/4) Epoch 11, batch 3200, loss[loss=0.2064, simple_loss=0.2847, pruned_loss=0.06401, over 13546.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2902, pruned_loss=0.06343, over 2634776.30 frames. ], batch size: 37, lr: 1.12e-02, grad_scale: 32.0 +2024-08-03 14:46:58,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=145214.66666666666, ans=0.125 +2024-08-03 14:47:12,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=145288.0, ans=0.125 +2024-08-03 14:47:16,456 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.06 vs. limit=15.0 +2024-08-03 14:47:23,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=145324.66666666666, ans=0.2 +2024-08-03 14:47:31,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=145361.33333333334, ans=0.2 +2024-08-03 14:47:35,719 INFO [train.py:1114] (2/4) Epoch 11, batch 3250, loss[loss=0.2242, simple_loss=0.3032, pruned_loss=0.07262, over 13388.00 frames. ], tot_loss[loss=0.208, simple_loss=0.29, pruned_loss=0.06305, over 2638859.76 frames. ], batch size: 38, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:47:38,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=145398.0, ans=0.125 +2024-08-03 14:47:48,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=145434.66666666666, ans=0.0 +2024-08-03 14:47:50,301 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:47:58,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=145471.33333333334, ans=0.125 +2024-08-03 14:48:04,044 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.409e+01 1.172e+02 1.394e+02 1.962e+02 3.481e+02, threshold=2.788e+02, percent-clipped=6.0 +2024-08-03 14:48:06,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145508.0, ans=0.1 +2024-08-03 14:48:10,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=145544.66666666666, ans=0.125 +2024-08-03 14:48:12,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=145544.66666666666, ans=0.125 +2024-08-03 14:48:18,819 INFO [train.py:1114] (2/4) Epoch 11, batch 3300, loss[loss=0.2379, simple_loss=0.3134, pruned_loss=0.08119, over 12885.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2894, pruned_loss=0.06325, over 2640711.84 frames. ], batch size: 52, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:48:22,390 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:48:26,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=145618.0, ans=0.5 +2024-08-03 14:48:38,978 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.29 vs. limit=15.0 +2024-08-03 14:48:50,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=145691.33333333334, ans=0.5 +2024-08-03 14:48:52,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=145691.33333333334, ans=0.0 +2024-08-03 14:49:02,026 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.21 vs. limit=15.0 +2024-08-03 14:49:07,250 INFO [train.py:1114] (2/4) Epoch 11, batch 3350, loss[loss=0.2014, simple_loss=0.2886, pruned_loss=0.05709, over 13029.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2903, pruned_loss=0.06376, over 2630787.67 frames. ], batch size: 48, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:49:14,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=145764.66666666666, ans=0.025 +2024-08-03 14:49:20,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=145801.33333333334, ans=0.125 +2024-08-03 14:49:35,916 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.62 vs. limit=10.0 +2024-08-03 14:49:48,374 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.672e+01 1.163e+02 1.273e+02 1.475e+02 2.297e+02, threshold=2.547e+02, percent-clipped=0.0 +2024-08-03 14:50:49,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=145911.33333333334, ans=0.0 +2024-08-03 14:50:56,095 INFO [train.py:1114] (2/4) Epoch 11, batch 3400, loss[loss=0.1712, simple_loss=0.2539, pruned_loss=0.04425, over 13539.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2904, pruned_loss=0.06398, over 2626273.59 frames. ], batch size: 31, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:51:01,750 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.52 vs. limit=15.0 +2024-08-03 14:51:21,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145984.66666666666, ans=0.1 +2024-08-03 14:51:24,266 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.14 vs. limit=15.0 +2024-08-03 14:51:34,121 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.75 vs. limit=22.5 +2024-08-03 14:51:39,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=146058.0, ans=0.125 +2024-08-03 14:51:40,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=146058.0, ans=0.0 +2024-08-03 14:51:45,264 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:51:57,625 INFO [train.py:1114] (2/4) Epoch 11, batch 3450, loss[loss=0.2105, simple_loss=0.298, pruned_loss=0.06147, over 12858.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2907, pruned_loss=0.06422, over 2628984.98 frames. ], batch size: 52, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:52:00,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=146131.33333333334, ans=0.125 +2024-08-03 14:52:01,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=146131.33333333334, ans=0.0 +2024-08-03 14:52:09,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=146168.0, ans=0.2 +2024-08-03 14:52:09,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=146168.0, ans=0.0 +2024-08-03 14:52:18,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=146204.66666666666, ans=0.125 +2024-08-03 14:52:20,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=146204.66666666666, ans=0.125 +2024-08-03 14:52:24,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=146204.66666666666, ans=0.125 +2024-08-03 14:52:28,425 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.083e+01 1.190e+02 1.396e+02 1.805e+02 2.896e+02, threshold=2.793e+02, percent-clipped=1.0 +2024-08-03 14:52:44,386 INFO [train.py:1114] (2/4) Epoch 11, batch 3500, loss[loss=0.1932, simple_loss=0.272, pruned_loss=0.05719, over 13537.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2898, pruned_loss=0.06393, over 2631408.99 frames. ], batch size: 34, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:52:51,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=146314.66666666666, ans=0.125 +2024-08-03 14:52:59,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146351.33333333334, ans=0.1 +2024-08-03 14:52:59,793 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.12 vs. limit=15.0 +2024-08-03 14:53:14,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=146351.33333333334, ans=0.125 +2024-08-03 14:53:38,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=146388.0, ans=0.0 +2024-08-03 14:54:24,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=146461.33333333334, ans=0.125 +2024-08-03 14:54:41,874 INFO [train.py:1114] (2/4) Epoch 11, batch 3550, loss[loss=0.2677, simple_loss=0.3401, pruned_loss=0.09763, over 12602.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2925, pruned_loss=0.06536, over 2628294.69 frames. ], batch size: 58, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:54:45,665 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.67 vs. limit=15.0 +2024-08-03 14:54:52,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=146534.66666666666, ans=0.025 +2024-08-03 14:54:54,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=146534.66666666666, ans=0.125 +2024-08-03 14:55:10,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=146608.0, ans=0.2 +2024-08-03 14:55:10,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=146608.0, ans=0.125 +2024-08-03 14:55:11,304 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.96 vs. limit=15.0 +2024-08-03 14:55:11,612 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.699e+01 1.206e+02 1.319e+02 1.556e+02 2.603e+02, threshold=2.638e+02, percent-clipped=0.0 +2024-08-03 14:55:28,464 INFO [train.py:1114] (2/4) Epoch 11, batch 3600, loss[loss=0.284, simple_loss=0.3473, pruned_loss=0.1104, over 9054.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2978, pruned_loss=0.07014, over 2487134.65 frames. ], batch size: 96, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:55:34,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=146681.33333333334, ans=0.125 +2024-08-03 14:55:36,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=146718.0, ans=0.1 +2024-08-03 14:55:48,143 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=9.17 vs. limit=10.0 +2024-08-03 14:55:48,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=146754.66666666666, ans=0.2 +2024-08-03 14:55:53,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=146754.66666666666, ans=0.125 +2024-08-03 14:56:58,474 INFO [train.py:1114] (2/4) Epoch 12, batch 0, loss[loss=0.1959, simple_loss=0.2679, pruned_loss=0.06194, over 13334.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2679, pruned_loss=0.06194, over 13334.00 frames. ], batch size: 33, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 14:56:58,535 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 14:57:04,797 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.4419, 2.7539, 2.5724, 1.8896], device='cuda:2') +2024-08-03 14:57:09,820 INFO [train.py:1146] (2/4) Epoch 12, validation: loss=0.1815, simple_loss=0.2827, pruned_loss=0.04015, over 944034.00 frames. +2024-08-03 14:57:09,821 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 14:57:11,344 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.22 vs. limit=15.0 +2024-08-03 14:57:13,816 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.62 vs. limit=6.0 +2024-08-03 14:57:29,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=146901.33333333334, ans=0.125 +2024-08-03 14:57:30,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=146901.33333333334, ans=0.125 +2024-08-03 14:57:48,502 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.34 vs. limit=15.0 +2024-08-03 14:57:49,738 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.441e+01 1.252e+02 1.419e+02 1.568e+02 2.905e+02, threshold=2.838e+02, percent-clipped=2.0 +2024-08-03 14:57:55,161 INFO [train.py:1114] (2/4) Epoch 12, batch 50, loss[loss=0.1612, simple_loss=0.235, pruned_loss=0.04374, over 13416.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2944, pruned_loss=0.06617, over 577916.88 frames. ], batch size: 32, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 14:58:02,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=147011.33333333334, ans=0.025 +2024-08-03 14:58:05,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147048.0, ans=0.1 +2024-08-03 14:58:07,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=147048.0, ans=0.0 +2024-08-03 14:58:09,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=147048.0, ans=0.125 +2024-08-03 14:58:22,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=147121.33333333334, ans=0.125 +2024-08-03 14:58:40,615 INFO [train.py:1114] (2/4) Epoch 12, batch 100, loss[loss=0.2069, simple_loss=0.2841, pruned_loss=0.06485, over 13525.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2946, pruned_loss=0.06487, over 1025247.30 frames. ], batch size: 35, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 14:58:51,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=147231.33333333334, ans=22.5 +2024-08-03 14:59:05,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=147268.0, ans=0.0 +2024-08-03 14:59:22,269 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.865e+01 1.262e+02 1.549e+02 1.868e+02 3.478e+02, threshold=3.099e+02, percent-clipped=1.0 +2024-08-03 14:59:27,470 INFO [train.py:1114] (2/4) Epoch 12, batch 150, loss[loss=0.1824, simple_loss=0.2652, pruned_loss=0.04975, over 13420.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2892, pruned_loss=0.06133, over 1386096.44 frames. ], batch size: 32, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 14:59:47,605 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:59:54,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147451.33333333334, ans=0.1 +2024-08-03 14:59:57,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=147488.0, ans=0.125 +2024-08-03 14:59:58,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=147488.0, ans=0.0 +2024-08-03 15:00:17,999 INFO [train.py:1114] (2/4) Epoch 12, batch 200, loss[loss=0.2349, simple_loss=0.313, pruned_loss=0.07833, over 12442.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2889, pruned_loss=0.06156, over 1665262.19 frames. ], batch size: 58, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:00:20,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=147561.33333333334, ans=0.0 +2024-08-03 15:00:23,930 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.67 vs. limit=22.5 +2024-08-03 15:00:57,968 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.496e+01 1.132e+02 1.278e+02 1.609e+02 2.884e+02, threshold=2.557e+02, percent-clipped=0.0 +2024-08-03 15:01:00,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=147708.0, ans=0.2 +2024-08-03 15:01:01,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=147708.0, ans=0.0 +2024-08-03 15:01:02,539 INFO [train.py:1114] (2/4) Epoch 12, batch 250, loss[loss=0.2183, simple_loss=0.3021, pruned_loss=0.06729, over 13341.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2883, pruned_loss=0.06131, over 1884789.02 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:01:17,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=147781.33333333334, ans=0.04949747468305833 +2024-08-03 15:01:23,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=147818.0, ans=0.0 +2024-08-03 15:01:23,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=147818.0, ans=0.125 +2024-08-03 15:01:30,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=147854.66666666666, ans=0.2 +2024-08-03 15:01:34,396 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.71 vs. limit=12.0 +2024-08-03 15:01:48,777 INFO [train.py:1114] (2/4) Epoch 12, batch 300, loss[loss=0.2285, simple_loss=0.305, pruned_loss=0.07594, over 13423.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.288, pruned_loss=0.06136, over 2051090.78 frames. ], batch size: 42, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:01:55,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=147928.0, ans=0.125 +2024-08-03 15:02:13,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=148001.33333333334, ans=0.5 +2024-08-03 15:02:20,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=148038.0, ans=0.125 +2024-08-03 15:02:24,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=148074.66666666666, ans=0.125 +2024-08-03 15:02:29,621 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.695e+01 1.130e+02 1.266e+02 1.669e+02 3.180e+02, threshold=2.531e+02, percent-clipped=2.0 +2024-08-03 15:02:34,182 INFO [train.py:1114] (2/4) Epoch 12, batch 350, loss[loss=0.2028, simple_loss=0.2755, pruned_loss=0.06504, over 13573.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2877, pruned_loss=0.06085, over 2182079.05 frames. ], batch size: 33, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:03:16,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=148258.0, ans=0.025 +2024-08-03 15:03:23,953 INFO [train.py:1114] (2/4) Epoch 12, batch 400, loss[loss=0.2287, simple_loss=0.3114, pruned_loss=0.07297, over 13349.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2864, pruned_loss=0.06026, over 2286798.21 frames. ], batch size: 37, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 15:03:44,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148368.0, ans=0.1 +2024-08-03 15:04:10,396 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.063e+01 1.155e+02 1.349e+02 1.691e+02 2.771e+02, threshold=2.698e+02, percent-clipped=3.0 +2024-08-03 15:04:28,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=148441.33333333334, ans=0.2 +2024-08-03 15:04:30,843 INFO [train.py:1114] (2/4) Epoch 12, batch 450, loss[loss=0.2214, simple_loss=0.3084, pruned_loss=0.06723, over 13552.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2865, pruned_loss=0.0604, over 2360247.79 frames. ], batch size: 38, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:04:36,883 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.45 vs. limit=22.5 +2024-08-03 15:04:37,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=148478.0, ans=0.07 +2024-08-03 15:05:15,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=148661.33333333334, ans=0.125 +2024-08-03 15:05:15,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148661.33333333334, ans=0.1 +2024-08-03 15:05:16,104 INFO [train.py:1114] (2/4) Epoch 12, batch 500, loss[loss=0.2421, simple_loss=0.3208, pruned_loss=0.08167, over 13430.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2856, pruned_loss=0.06007, over 2425225.99 frames. ], batch size: 43, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:05:59,421 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.251e+01 1.174e+02 1.357e+02 1.973e+02 3.338e+02, threshold=2.713e+02, percent-clipped=6.0 +2024-08-03 15:06:02,940 INFO [train.py:1114] (2/4) Epoch 12, batch 550, loss[loss=0.209, simple_loss=0.2969, pruned_loss=0.06049, over 12979.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2857, pruned_loss=0.05992, over 2467776.44 frames. ], batch size: 48, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:06:06,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=148844.66666666666, ans=0.0 +2024-08-03 15:06:42,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=148991.33333333334, ans=0.125 +2024-08-03 15:06:43,334 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.52 vs. limit=15.0 +2024-08-03 15:06:50,051 INFO [train.py:1114] (2/4) Epoch 12, batch 600, loss[loss=0.2202, simple_loss=0.2983, pruned_loss=0.07102, over 13332.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2857, pruned_loss=0.05977, over 2507609.33 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:06:50,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=149028.0, ans=0.125 +2024-08-03 15:07:20,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=149138.0, ans=0.2 +2024-08-03 15:07:22,889 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.78 vs. limit=15.0 +2024-08-03 15:07:33,068 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.645e+01 1.231e+02 1.471e+02 1.906e+02 4.499e+02, threshold=2.942e+02, percent-clipped=14.0 +2024-08-03 15:07:36,016 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.20 vs. limit=12.0 +2024-08-03 15:07:36,525 INFO [train.py:1114] (2/4) Epoch 12, batch 650, loss[loss=0.1812, simple_loss=0.2669, pruned_loss=0.04772, over 13544.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2854, pruned_loss=0.05973, over 2542748.71 frames. ], batch size: 37, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:07:43,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=149211.33333333334, ans=0.0 +2024-08-03 15:07:45,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=149211.33333333334, ans=0.125 +2024-08-03 15:07:46,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=149211.33333333334, ans=0.0 +2024-08-03 15:08:05,285 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:08:11,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=149321.33333333334, ans=0.2 +2024-08-03 15:08:17,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=149358.0, ans=0.125 +2024-08-03 15:08:26,285 INFO [train.py:1114] (2/4) Epoch 12, batch 700, loss[loss=0.1984, simple_loss=0.2871, pruned_loss=0.05483, over 13537.00 frames. ], tot_loss[loss=0.203, simple_loss=0.286, pruned_loss=0.06001, over 2564974.55 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:08:44,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=149468.0, ans=0.0 +2024-08-03 15:08:52,015 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.69 vs. limit=10.0 +2024-08-03 15:09:07,553 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.082e+01 1.154e+02 1.315e+02 1.690e+02 3.404e+02, threshold=2.630e+02, percent-clipped=2.0 +2024-08-03 15:09:07,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=149541.33333333334, ans=0.125 +2024-08-03 15:09:11,177 INFO [train.py:1114] (2/4) Epoch 12, batch 750, loss[loss=0.2141, simple_loss=0.3049, pruned_loss=0.06167, over 13357.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2854, pruned_loss=0.06006, over 2583043.77 frames. ], batch size: 37, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:09:18,799 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:09:23,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=149614.66666666666, ans=0.125 +2024-08-03 15:09:25,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=149614.66666666666, ans=0.125 +2024-08-03 15:09:37,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.61 vs. limit=15.0 +2024-08-03 15:09:39,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=149688.0, ans=0.09899494936611666 +2024-08-03 15:09:40,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=149688.0, ans=0.05 +2024-08-03 15:09:56,675 INFO [train.py:1114] (2/4) Epoch 12, batch 800, loss[loss=0.1916, simple_loss=0.2685, pruned_loss=0.05734, over 13326.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2859, pruned_loss=0.06031, over 2597738.82 frames. ], batch size: 33, lr: 1.05e-02, grad_scale: 32.0 +2024-08-03 15:10:03,120 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:10:09,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=149798.0, ans=0.125 +2024-08-03 15:10:17,762 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.07 vs. limit=6.0 +2024-08-03 15:10:41,751 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.822e+01 1.144e+02 1.285e+02 1.607e+02 2.448e+02, threshold=2.570e+02, percent-clipped=0.0 +2024-08-03 15:10:45,393 INFO [train.py:1114] (2/4) Epoch 12, batch 850, loss[loss=0.2053, simple_loss=0.3, pruned_loss=0.05532, over 13339.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2855, pruned_loss=0.06033, over 2610136.87 frames. ], batch size: 40, lr: 1.05e-02, grad_scale: 32.0 +2024-08-03 15:10:45,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=149944.66666666666, ans=0.0 +2024-08-03 15:11:17,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=150054.66666666666, ans=0.0 +2024-08-03 15:11:25,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=150091.33333333334, ans=0.125 +2024-08-03 15:11:34,312 INFO [train.py:1114] (2/4) Epoch 12, batch 900, loss[loss=0.1991, simple_loss=0.2766, pruned_loss=0.06078, over 13361.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2865, pruned_loss=0.06096, over 2612876.04 frames. ], batch size: 33, lr: 1.05e-02, grad_scale: 32.0 +2024-08-03 15:12:09,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=150274.66666666666, ans=0.1 +2024-08-03 15:12:15,736 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.906e+01 1.165e+02 1.316e+02 1.823e+02 3.379e+02, threshold=2.632e+02, percent-clipped=3.0 +2024-08-03 15:12:19,152 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.50 vs. limit=12.0 +2024-08-03 15:12:19,461 INFO [train.py:1114] (2/4) Epoch 12, batch 950, loss[loss=0.1765, simple_loss=0.2626, pruned_loss=0.04522, over 13550.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2869, pruned_loss=0.06121, over 2613092.70 frames. ], batch size: 34, lr: 1.05e-02, grad_scale: 32.0 +2024-08-03 15:12:34,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=150348.0, ans=0.2 +2024-08-03 15:12:45,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=150384.66666666666, ans=10.0 +2024-08-03 15:12:48,539 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.94 vs. limit=10.0 +2024-08-03 15:12:51,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=150421.33333333334, ans=0.0 +2024-08-03 15:13:01,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=150458.0, ans=0.125 +2024-08-03 15:13:05,465 INFO [train.py:1114] (2/4) Epoch 12, batch 1000, loss[loss=0.1995, simple_loss=0.279, pruned_loss=0.06001, over 13355.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2877, pruned_loss=0.06148, over 2611571.32 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:13:16,003 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.49 vs. limit=15.0 +2024-08-03 15:13:23,143 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.670e-03 +2024-08-03 15:13:26,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=150568.0, ans=0.09899494936611666 +2024-08-03 15:13:40,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=150604.66666666666, ans=0.125 +2024-08-03 15:13:47,785 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.724e+01 1.124e+02 1.317e+02 1.509e+02 2.289e+02, threshold=2.634e+02, percent-clipped=0.0 +2024-08-03 15:13:49,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=150678.0, ans=0.07 +2024-08-03 15:13:52,459 INFO [train.py:1114] (2/4) Epoch 12, batch 1050, loss[loss=0.1929, simple_loss=0.2921, pruned_loss=0.04687, over 13586.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2868, pruned_loss=0.06109, over 2615730.78 frames. ], batch size: 39, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:13:54,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=150678.0, ans=0.125 +2024-08-03 15:14:01,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.whiten.whitening_limit, batch_count=150714.66666666666, ans=12.0 +2024-08-03 15:14:03,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=150714.66666666666, ans=0.125 +2024-08-03 15:14:16,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=150751.33333333334, ans=0.025 +2024-08-03 15:14:18,792 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.82 vs. limit=22.5 +2024-08-03 15:14:20,682 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.89 vs. limit=15.0 +2024-08-03 15:14:28,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150788.0, ans=0.1 +2024-08-03 15:14:29,724 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.84 vs. limit=22.5 +2024-08-03 15:14:39,483 INFO [train.py:1114] (2/4) Epoch 12, batch 1100, loss[loss=0.1933, simple_loss=0.2819, pruned_loss=0.05233, over 13557.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2873, pruned_loss=0.0616, over 2619712.36 frames. ], batch size: 36, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:14:51,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=150898.0, ans=0.0 +2024-08-03 15:15:15,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=150971.33333333334, ans=0.07 +2024-08-03 15:15:23,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=151008.0, ans=0.125 +2024-08-03 15:15:24,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=151008.0, ans=0.09899494936611666 +2024-08-03 15:15:26,422 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.356e+01 1.180e+02 1.359e+02 1.591e+02 2.320e+02, threshold=2.719e+02, percent-clipped=0.0 +2024-08-03 15:15:27,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=151008.0, ans=0.04949747468305833 +2024-08-03 15:15:29,094 INFO [train.py:1114] (2/4) Epoch 12, batch 1150, loss[loss=0.1997, simple_loss=0.276, pruned_loss=0.06167, over 13579.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2876, pruned_loss=0.0618, over 2619256.94 frames. ], batch size: 36, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:15:37,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=151044.66666666666, ans=0.0 +2024-08-03 15:15:47,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=151118.0, ans=0.2 +2024-08-03 15:15:51,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=151118.0, ans=0.125 +2024-08-03 15:16:15,300 INFO [train.py:1114] (2/4) Epoch 12, batch 1200, loss[loss=0.2406, simple_loss=0.3266, pruned_loss=0.07725, over 13580.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2884, pruned_loss=0.06196, over 2616055.46 frames. ], batch size: 39, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:16:22,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=151228.0, ans=0.1 +2024-08-03 15:16:34,749 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.78 vs. limit=15.0 +2024-08-03 15:16:47,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=151338.0, ans=0.2 +2024-08-03 15:16:50,021 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.82 vs. limit=15.0 +2024-08-03 15:16:50,876 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.48 vs. limit=15.0 +2024-08-03 15:16:58,390 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.940e+01 1.145e+02 1.336e+02 1.664e+02 3.085e+02, threshold=2.672e+02, percent-clipped=3.0 +2024-08-03 15:17:00,264 INFO [train.py:1114] (2/4) Epoch 12, batch 1250, loss[loss=0.2339, simple_loss=0.3086, pruned_loss=0.07956, over 13459.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2884, pruned_loss=0.0615, over 2628576.89 frames. ], batch size: 42, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:17:07,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=151411.33333333334, ans=0.2 +2024-08-03 15:17:08,292 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.11 vs. limit=6.0 +2024-08-03 15:17:09,250 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.56 vs. limit=22.5 +2024-08-03 15:17:17,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=151448.0, ans=10.0 +2024-08-03 15:17:31,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=151521.33333333334, ans=0.125 +2024-08-03 15:17:39,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=151558.0, ans=0.125 +2024-08-03 15:17:49,356 INFO [train.py:1114] (2/4) Epoch 12, batch 1300, loss[loss=0.2312, simple_loss=0.3188, pruned_loss=0.07183, over 12907.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2875, pruned_loss=0.06145, over 2630686.90 frames. ], batch size: 52, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:17:54,184 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.51 vs. limit=6.0 +2024-08-03 15:18:00,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=151631.33333333334, ans=0.125 +2024-08-03 15:18:17,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=151704.66666666666, ans=0.015 +2024-08-03 15:18:34,751 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.477e+01 1.244e+02 1.482e+02 1.823e+02 3.057e+02, threshold=2.965e+02, percent-clipped=1.0 +2024-08-03 15:18:36,562 INFO [train.py:1114] (2/4) Epoch 12, batch 1350, loss[loss=0.2005, simple_loss=0.2852, pruned_loss=0.0579, over 13531.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2869, pruned_loss=0.06109, over 2637633.25 frames. ], batch size: 37, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:18:43,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=151778.0, ans=0.0 +2024-08-03 15:19:25,330 INFO [train.py:1114] (2/4) Epoch 12, batch 1400, loss[loss=0.2062, simple_loss=0.2726, pruned_loss=0.06986, over 13239.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2862, pruned_loss=0.06064, over 2642345.36 frames. ], batch size: 31, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:19:28,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=151961.33333333334, ans=0.07 +2024-08-03 15:19:40,847 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.30 vs. limit=15.0 +2024-08-03 15:19:51,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=152034.66666666666, ans=0.125 +2024-08-03 15:19:59,364 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.18 vs. limit=15.0 +2024-08-03 15:20:00,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=152071.33333333334, ans=0.125 +2024-08-03 15:20:00,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=152071.33333333334, ans=0.0 +2024-08-03 15:20:09,507 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.408e+01 1.177e+02 1.443e+02 1.884e+02 3.508e+02, threshold=2.887e+02, percent-clipped=1.0 +2024-08-03 15:20:11,318 INFO [train.py:1114] (2/4) Epoch 12, batch 1450, loss[loss=0.222, simple_loss=0.3043, pruned_loss=0.06991, over 13400.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2865, pruned_loss=0.06092, over 2641306.50 frames. ], batch size: 43, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:20:17,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=152144.66666666666, ans=0.025 +2024-08-03 15:20:27,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=152181.33333333334, ans=0.125 +2024-08-03 15:20:45,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=152254.66666666666, ans=0.09899494936611666 +2024-08-03 15:20:56,411 INFO [train.py:1114] (2/4) Epoch 12, batch 1500, loss[loss=0.2027, simple_loss=0.3004, pruned_loss=0.05253, over 13416.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2867, pruned_loss=0.06077, over 2641526.21 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:21:13,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152364.66666666666, ans=0.1 +2024-08-03 15:21:16,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=152364.66666666666, ans=0.125 +2024-08-03 15:21:16,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=152364.66666666666, ans=0.2 +2024-08-03 15:21:21,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=152401.33333333334, ans=0.125 +2024-08-03 15:21:27,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=152438.0, ans=0.125 +2024-08-03 15:21:43,773 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.575e+01 1.302e+02 1.536e+02 1.991e+02 2.999e+02, threshold=3.072e+02, percent-clipped=1.0 +2024-08-03 15:21:47,447 INFO [train.py:1114] (2/4) Epoch 12, batch 1550, loss[loss=0.2078, simple_loss=0.2929, pruned_loss=0.0613, over 13392.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2865, pruned_loss=0.06065, over 2630436.21 frames. ], batch size: 41, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:21:47,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=152511.33333333334, ans=0.2 +2024-08-03 15:21:52,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=152511.33333333334, ans=0.2 +2024-08-03 15:21:55,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=152511.33333333334, ans=0.125 +2024-08-03 15:22:05,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=152548.0, ans=0.125 +2024-08-03 15:22:09,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=152584.66666666666, ans=0.125 +2024-08-03 15:22:10,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=152584.66666666666, ans=0.0 +2024-08-03 15:22:14,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=152584.66666666666, ans=0.125 +2024-08-03 15:22:16,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=152584.66666666666, ans=0.125 +2024-08-03 15:22:28,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=152658.0, ans=0.125 +2024-08-03 15:22:32,869 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.15 vs. limit=22.5 +2024-08-03 15:22:38,645 INFO [train.py:1114] (2/4) Epoch 12, batch 1600, loss[loss=0.2357, simple_loss=0.313, pruned_loss=0.0792, over 13569.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2867, pruned_loss=0.06091, over 2623549.52 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 32.0 +2024-08-03 15:22:40,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=152694.66666666666, ans=0.125 +2024-08-03 15:22:42,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=152694.66666666666, ans=0.1 +2024-08-03 15:22:49,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=152731.33333333334, ans=0.2 +2024-08-03 15:22:57,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=152768.0, ans=0.0 +2024-08-03 15:22:59,938 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.59 vs. limit=15.0 +2024-08-03 15:23:09,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=152804.66666666666, ans=0.125 +2024-08-03 15:23:15,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_na.min_abs, batch_count=152841.33333333334, ans=0.02 +2024-08-03 15:23:16,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=152841.33333333334, ans=0.125 +2024-08-03 15:23:22,361 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.356e+01 1.191e+02 1.409e+02 1.604e+02 3.528e+02, threshold=2.818e+02, percent-clipped=1.0 +2024-08-03 15:23:24,249 INFO [train.py:1114] (2/4) Epoch 12, batch 1650, loss[loss=0.2109, simple_loss=0.2975, pruned_loss=0.06217, over 13351.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2863, pruned_loss=0.06063, over 2621385.90 frames. ], batch size: 40, lr: 1.04e-02, grad_scale: 32.0 +2024-08-03 15:23:38,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=152914.66666666666, ans=0.125 +2024-08-03 15:23:42,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=152951.33333333334, ans=0.125 +2024-08-03 15:23:44,446 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.57 vs. limit=15.0 +2024-08-03 15:23:47,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=152951.33333333334, ans=0.0 +2024-08-03 15:23:48,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=152951.33333333334, ans=0.125 +2024-08-03 15:23:55,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=152988.0, ans=0.125 +2024-08-03 15:23:55,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=152988.0, ans=0.125 +2024-08-03 15:23:55,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=152988.0, ans=0.125 +2024-08-03 15:23:59,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=153024.66666666666, ans=0.1 +2024-08-03 15:24:09,368 INFO [train.py:1114] (2/4) Epoch 12, batch 1700, loss[loss=0.179, simple_loss=0.2537, pruned_loss=0.0522, over 13275.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2863, pruned_loss=0.06049, over 2630015.95 frames. ], batch size: 31, lr: 1.04e-02, grad_scale: 32.0 +2024-08-03 15:24:11,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=153061.33333333334, ans=0.0 +2024-08-03 15:24:12,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=153061.33333333334, ans=0.2 +2024-08-03 15:24:12,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=153061.33333333334, ans=0.125 +2024-08-03 15:24:39,744 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.01 vs. limit=22.5 +2024-08-03 15:24:44,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=153171.33333333334, ans=0.125 +2024-08-03 15:24:47,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=153171.33333333334, ans=0.07 +2024-08-03 15:24:49,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=153208.0, ans=0.0 +2024-08-03 15:24:54,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=153208.0, ans=0.0 +2024-08-03 15:24:57,292 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.203e+01 1.265e+02 1.510e+02 1.884e+02 3.458e+02, threshold=3.019e+02, percent-clipped=4.0 +2024-08-03 15:24:58,243 INFO [train.py:1114] (2/4) Epoch 12, batch 1750, loss[loss=0.1866, simple_loss=0.2672, pruned_loss=0.05303, over 13535.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2854, pruned_loss=0.06017, over 2634043.02 frames. ], batch size: 31, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:25:13,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=153281.33333333334, ans=0.0 +2024-08-03 15:25:22,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=153318.0, ans=0.0 +2024-08-03 15:25:31,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=153354.66666666666, ans=10.0 +2024-08-03 15:25:39,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=153391.33333333334, ans=0.1 +2024-08-03 15:25:43,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=153391.33333333334, ans=0.0 +2024-08-03 15:25:45,813 INFO [train.py:1114] (2/4) Epoch 12, batch 1800, loss[loss=0.1992, simple_loss=0.294, pruned_loss=0.05223, over 13547.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2854, pruned_loss=0.06011, over 2635082.65 frames. ], batch size: 38, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:25:58,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=153464.66666666666, ans=0.125 +2024-08-03 15:26:04,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=153501.33333333334, ans=0.0 +2024-08-03 15:26:18,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=153538.0, ans=0.1 +2024-08-03 15:26:30,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=153574.66666666666, ans=0.1 +2024-08-03 15:26:31,783 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.247e+01 1.215e+02 1.564e+02 1.986e+02 3.414e+02, threshold=3.127e+02, percent-clipped=2.0 +2024-08-03 15:26:32,685 INFO [train.py:1114] (2/4) Epoch 12, batch 1850, loss[loss=0.2093, simple_loss=0.3059, pruned_loss=0.05636, over 13410.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2859, pruned_loss=0.05991, over 2637198.10 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:26:38,647 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.70 vs. limit=15.0 +2024-08-03 15:27:04,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=153721.33333333334, ans=0.2 +2024-08-03 15:27:09,349 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.92 vs. limit=10.0 +2024-08-03 15:27:16,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=153794.66666666666, ans=0.125 +2024-08-03 15:27:17,666 INFO [train.py:1114] (2/4) Epoch 12, batch 1900, loss[loss=0.2213, simple_loss=0.3091, pruned_loss=0.06675, over 13316.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2865, pruned_loss=0.06029, over 2640068.03 frames. ], batch size: 40, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:27:18,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=153794.66666666666, ans=0.0 +2024-08-03 15:27:36,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=153868.0, ans=0.2 +2024-08-03 15:27:55,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=153941.33333333334, ans=0.0 +2024-08-03 15:28:02,823 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.586e+01 1.133e+02 1.329e+02 1.671e+02 4.322e+02, threshold=2.659e+02, percent-clipped=4.0 +2024-08-03 15:28:02,861 INFO [train.py:1114] (2/4) Epoch 12, batch 1950, loss[loss=0.1886, simple_loss=0.2755, pruned_loss=0.05088, over 13566.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2876, pruned_loss=0.0605, over 2646635.10 frames. ], batch size: 36, lr: 1.04e-02, grad_scale: 8.0 +2024-08-03 15:28:43,337 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.51 vs. limit=15.0 +2024-08-03 15:28:51,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=154161.33333333334, ans=0.2 +2024-08-03 15:28:51,857 INFO [train.py:1114] (2/4) Epoch 12, batch 2000, loss[loss=0.1634, simple_loss=0.2415, pruned_loss=0.0427, over 13537.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2883, pruned_loss=0.06101, over 2636473.25 frames. ], batch size: 31, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:28:56,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=154161.33333333334, ans=0.025 +2024-08-03 15:29:00,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=154198.0, ans=0.025 +2024-08-03 15:29:40,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=154344.66666666666, ans=0.125 +2024-08-03 15:29:40,805 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.317e+01 1.213e+02 1.428e+02 1.743e+02 2.865e+02, threshold=2.857e+02, percent-clipped=1.0 +2024-08-03 15:29:40,842 INFO [train.py:1114] (2/4) Epoch 12, batch 2050, loss[loss=0.1813, simple_loss=0.2596, pruned_loss=0.05149, over 13446.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.287, pruned_loss=0.06074, over 2633204.54 frames. ], batch size: 32, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:29:41,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=154344.66666666666, ans=0.1 +2024-08-03 15:30:07,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=154454.66666666666, ans=0.07 +2024-08-03 15:30:24,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=154528.0, ans=0.125 +2024-08-03 15:30:25,477 INFO [train.py:1114] (2/4) Epoch 12, batch 2100, loss[loss=0.2069, simple_loss=0.2885, pruned_loss=0.06268, over 13536.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2868, pruned_loss=0.06055, over 2638580.79 frames. ], batch size: 37, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:30:58,504 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.22 vs. limit=6.0 +2024-08-03 15:31:03,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=154674.66666666666, ans=0.0 +2024-08-03 15:31:10,330 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.387e+01 1.104e+02 1.282e+02 1.725e+02 3.211e+02, threshold=2.564e+02, percent-clipped=3.0 +2024-08-03 15:31:10,368 INFO [train.py:1114] (2/4) Epoch 12, batch 2150, loss[loss=0.1934, simple_loss=0.2765, pruned_loss=0.05514, over 13553.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2858, pruned_loss=0.06016, over 2646760.06 frames. ], batch size: 36, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:31:14,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=154711.33333333334, ans=0.0 +2024-08-03 15:31:16,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=154711.33333333334, ans=0.2 +2024-08-03 15:31:19,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=154748.0, ans=0.125 +2024-08-03 15:31:22,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=154748.0, ans=0.125 +2024-08-03 15:31:24,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=154748.0, ans=0.0 +2024-08-03 15:31:28,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=154784.66666666666, ans=0.125 +2024-08-03 15:31:31,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=154784.66666666666, ans=0.0 +2024-08-03 15:31:32,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=154784.66666666666, ans=0.0 +2024-08-03 15:31:59,006 INFO [train.py:1114] (2/4) Epoch 12, batch 2200, loss[loss=0.2271, simple_loss=0.3139, pruned_loss=0.07018, over 13408.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2853, pruned_loss=0.0595, over 2645215.77 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:32:11,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=154931.33333333334, ans=0.1 +2024-08-03 15:32:11,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=154931.33333333334, ans=0.125 +2024-08-03 15:32:12,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=154931.33333333334, ans=0.2 +2024-08-03 15:32:13,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=154931.33333333334, ans=0.2 +2024-08-03 15:32:23,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=154968.0, ans=0.0 +2024-08-03 15:32:28,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=155004.66666666666, ans=0.0 +2024-08-03 15:32:30,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=155004.66666666666, ans=0.125 +2024-08-03 15:32:32,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=155004.66666666666, ans=0.1 +2024-08-03 15:32:37,749 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.71 vs. limit=15.0 +2024-08-03 15:32:46,540 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.416e+01 1.202e+02 1.524e+02 1.986e+02 3.191e+02, threshold=3.048e+02, percent-clipped=7.0 +2024-08-03 15:32:46,578 INFO [train.py:1114] (2/4) Epoch 12, batch 2250, loss[loss=0.2072, simple_loss=0.2966, pruned_loss=0.0589, over 13374.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.286, pruned_loss=0.06009, over 2642852.09 frames. ], batch size: 37, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:32:46,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=155078.0, ans=0.0 +2024-08-03 15:32:57,841 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.42 vs. limit=15.0 +2024-08-03 15:33:05,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=155151.33333333334, ans=0.0 +2024-08-03 15:33:08,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155151.33333333334, ans=0.1 +2024-08-03 15:33:20,336 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.12 vs. limit=15.0 +2024-08-03 15:33:21,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=155188.0, ans=0.125 +2024-08-03 15:33:31,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=155224.66666666666, ans=0.125 +2024-08-03 15:33:34,530 INFO [train.py:1114] (2/4) Epoch 12, batch 2300, loss[loss=0.1782, simple_loss=0.2555, pruned_loss=0.05049, over 13561.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2841, pruned_loss=0.05958, over 2638117.88 frames. ], batch size: 33, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:33:44,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=155298.0, ans=0.07 +2024-08-03 15:33:44,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=155298.0, ans=0.025 +2024-08-03 15:33:45,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=155298.0, ans=0.125 +2024-08-03 15:34:00,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=155334.66666666666, ans=0.025 +2024-08-03 15:34:00,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=155334.66666666666, ans=0.0 +2024-08-03 15:34:11,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=155408.0, ans=0.025 +2024-08-03 15:34:17,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=155408.0, ans=0.125 +2024-08-03 15:34:19,893 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.892e+01 1.184e+02 1.344e+02 1.744e+02 3.184e+02, threshold=2.689e+02, percent-clipped=1.0 +2024-08-03 15:34:19,930 INFO [train.py:1114] (2/4) Epoch 12, batch 2350, loss[loss=0.1906, simple_loss=0.278, pruned_loss=0.05159, over 13555.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2841, pruned_loss=0.05945, over 2641096.99 frames. ], batch size: 38, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:34:45,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155518.0, ans=0.1 +2024-08-03 15:34:55,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=155591.33333333334, ans=0.125 +2024-08-03 15:35:04,148 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.27 vs. limit=10.0 +2024-08-03 15:35:05,408 INFO [train.py:1114] (2/4) Epoch 12, batch 2400, loss[loss=0.1782, simple_loss=0.2639, pruned_loss=0.04624, over 13529.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2848, pruned_loss=0.05972, over 2641898.40 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 32.0 +2024-08-03 15:35:07,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=155628.0, ans=0.125 +2024-08-03 15:35:11,801 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:35:13,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155664.66666666666, ans=0.1 +2024-08-03 15:35:16,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=155664.66666666666, ans=0.125 +2024-08-03 15:35:22,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=155664.66666666666, ans=0.0 +2024-08-03 15:35:36,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=155738.0, ans=0.125 +2024-08-03 15:35:42,205 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.31 vs. limit=6.0 +2024-08-03 15:35:43,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=155738.0, ans=0.125 +2024-08-03 15:35:54,431 INFO [train.py:1114] (2/4) Epoch 12, batch 2450, loss[loss=0.2192, simple_loss=0.3, pruned_loss=0.06921, over 13365.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2859, pruned_loss=0.0604, over 2631621.45 frames. ], batch size: 37, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:35:55,283 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.374e+01 1.163e+02 1.350e+02 1.845e+02 2.920e+02, threshold=2.699e+02, percent-clipped=1.0 +2024-08-03 15:36:02,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=155811.33333333334, ans=0.0 +2024-08-03 15:36:03,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=155848.0, ans=0.2 +2024-08-03 15:36:18,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=155884.66666666666, ans=0.125 +2024-08-03 15:36:20,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=155884.66666666666, ans=0.125 +2024-08-03 15:36:20,720 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.35 vs. limit=15.0 +2024-08-03 15:36:26,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=155921.33333333334, ans=0.1 +2024-08-03 15:36:30,632 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.13 vs. limit=10.0 +2024-08-03 15:36:41,778 INFO [train.py:1114] (2/4) Epoch 12, batch 2500, loss[loss=0.2112, simple_loss=0.2989, pruned_loss=0.06172, over 13404.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.285, pruned_loss=0.05967, over 2636292.11 frames. ], batch size: 39, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:36:41,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155994.66666666666, ans=0.1 +2024-08-03 15:36:48,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155994.66666666666, ans=0.1 +2024-08-03 15:37:03,345 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.97 vs. limit=10.0 +2024-08-03 15:37:09,360 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.25 vs. limit=15.0 +2024-08-03 15:37:27,539 INFO [train.py:1114] (2/4) Epoch 12, batch 2550, loss[loss=0.1972, simple_loss=0.2679, pruned_loss=0.06322, over 13517.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2849, pruned_loss=0.05972, over 2638495.32 frames. ], batch size: 31, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:37:28,349 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.903e+01 1.161e+02 1.332e+02 1.717e+02 3.575e+02, threshold=2.664e+02, percent-clipped=6.0 +2024-08-03 15:37:33,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=156178.0, ans=0.125 +2024-08-03 15:37:43,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=156214.66666666666, ans=0.0 +2024-08-03 15:37:56,685 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.78 vs. limit=15.0 +2024-08-03 15:38:00,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156288.0, ans=0.1 +2024-08-03 15:38:01,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=156288.0, ans=0.0 +2024-08-03 15:38:12,945 INFO [train.py:1114] (2/4) Epoch 12, batch 2600, loss[loss=0.1976, simple_loss=0.2816, pruned_loss=0.05683, over 13552.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2859, pruned_loss=0.05993, over 2637924.60 frames. ], batch size: 36, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:38:16,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=156361.33333333334, ans=0.125 +2024-08-03 15:38:20,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=156398.0, ans=0.5 +2024-08-03 15:38:56,391 INFO [train.py:1114] (2/4) Epoch 12, batch 2650, loss[loss=0.2179, simple_loss=0.3034, pruned_loss=0.06619, over 13352.00 frames. ], tot_loss[loss=0.203, simple_loss=0.286, pruned_loss=0.06004, over 2641336.91 frames. ], batch size: 46, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:38:56,823 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.44 vs. limit=15.0 +2024-08-03 15:38:57,262 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.952e+01 1.176e+02 1.444e+02 1.768e+02 3.309e+02, threshold=2.888e+02, percent-clipped=8.0 +2024-08-03 15:39:10,064 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.78 vs. limit=8.0 +2024-08-03 15:39:19,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=156618.0, ans=0.1 +2024-08-03 15:39:20,406 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.18 vs. limit=15.0 +2024-08-03 15:39:24,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156654.66666666666, ans=0.1 +2024-08-03 15:39:32,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=156691.33333333334, ans=0.125 +2024-08-03 15:39:33,491 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.60 vs. limit=15.0 +2024-08-03 15:39:38,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=156691.33333333334, ans=0.0 +2024-08-03 15:39:40,092 INFO [train.py:1114] (2/4) Epoch 12, batch 2700, loss[loss=0.2108, simple_loss=0.2953, pruned_loss=0.06315, over 13562.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2859, pruned_loss=0.05985, over 2638827.73 frames. ], batch size: 40, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:39:41,393 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-08-03 15:39:42,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=156728.0, ans=0.125 +2024-08-03 15:40:05,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=156838.0, ans=0.2 +2024-08-03 15:40:08,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=156838.0, ans=0.125 +2024-08-03 15:40:13,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=156838.0, ans=0.2 +2024-08-03 15:40:23,014 INFO [train.py:1114] (2/4) Epoch 12, batch 2750, loss[loss=0.1668, simple_loss=0.2543, pruned_loss=0.03958, over 13334.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2854, pruned_loss=0.06002, over 2635519.54 frames. ], batch size: 34, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:40:23,800 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.506e+01 1.130e+02 1.309e+02 1.569e+02 2.980e+02, threshold=2.619e+02, percent-clipped=1.0 +2024-08-03 15:40:23,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=156911.33333333334, ans=0.025 +2024-08-03 15:40:52,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=157021.33333333334, ans=0.035 +2024-08-03 15:40:52,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=157021.33333333334, ans=0.09899494936611666 +2024-08-03 15:41:01,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=157058.0, ans=0.0 +2024-08-03 15:41:06,520 INFO [train.py:1114] (2/4) Epoch 12, batch 2800, loss[loss=0.232, simple_loss=0.3071, pruned_loss=0.07846, over 9154.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2857, pruned_loss=0.06048, over 2627640.73 frames. ], batch size: 97, lr: 1.03e-02, grad_scale: 32.0 +2024-08-03 15:41:07,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=157094.66666666666, ans=0.5 +2024-08-03 15:41:07,961 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.72 vs. limit=15.0 +2024-08-03 15:41:34,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=157204.66666666666, ans=0.125 +2024-08-03 15:41:37,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=157204.66666666666, ans=0.025 +2024-08-03 15:41:43,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=157241.33333333334, ans=0.07 +2024-08-03 15:41:46,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=157241.33333333334, ans=0.0 +2024-08-03 15:41:49,748 INFO [train.py:1114] (2/4) Epoch 12, batch 2850, loss[loss=0.2141, simple_loss=0.2962, pruned_loss=0.06595, over 13356.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2861, pruned_loss=0.06059, over 2620970.65 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:41:51,424 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.668e+01 1.162e+02 1.250e+02 1.511e+02 3.589e+02, threshold=2.501e+02, percent-clipped=1.0 +2024-08-03 15:42:05,342 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.22 vs. limit=22.5 +2024-08-03 15:42:09,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=157351.33333333334, ans=0.0 +2024-08-03 15:42:35,664 INFO [train.py:1114] (2/4) Epoch 12, batch 2900, loss[loss=0.1932, simple_loss=0.272, pruned_loss=0.05725, over 13366.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2876, pruned_loss=0.06141, over 2632067.91 frames. ], batch size: 36, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:42:44,843 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.11 vs. limit=15.0 +2024-08-03 15:42:46,653 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.03 vs. limit=15.0 +2024-08-03 15:42:48,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=157498.0, ans=0.125 +2024-08-03 15:42:55,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=157534.66666666666, ans=0.025 +2024-08-03 15:43:03,901 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.10 vs. limit=15.0 +2024-08-03 15:43:15,971 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.91 vs. limit=15.0 +2024-08-03 15:43:18,711 INFO [train.py:1114] (2/4) Epoch 12, batch 2950, loss[loss=0.1892, simple_loss=0.275, pruned_loss=0.05172, over 13321.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2864, pruned_loss=0.06095, over 2630644.61 frames. ], batch size: 34, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:43:20,458 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.484e+01 1.179e+02 1.423e+02 1.782e+02 2.994e+02, threshold=2.847e+02, percent-clipped=4.0 +2024-08-03 15:43:31,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=157681.33333333334, ans=0.0 +2024-08-03 15:43:40,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=157718.0, ans=0.125 +2024-08-03 15:43:51,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=157754.66666666666, ans=0.035 +2024-08-03 15:44:01,734 INFO [train.py:1114] (2/4) Epoch 12, batch 3000, loss[loss=0.1869, simple_loss=0.2753, pruned_loss=0.0493, over 13564.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2856, pruned_loss=0.06052, over 2631030.80 frames. ], batch size: 37, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:44:01,735 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 15:44:11,713 INFO [train.py:1146] (2/4) Epoch 12, validation: loss=0.178, simple_loss=0.2775, pruned_loss=0.03924, over 944034.00 frames. +2024-08-03 15:44:11,714 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 15:44:12,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157828.0, ans=0.1 +2024-08-03 15:44:23,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=157864.66666666666, ans=0.1 +2024-08-03 15:44:26,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=157864.66666666666, ans=0.125 +2024-08-03 15:44:39,145 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.45 vs. limit=15.0 +2024-08-03 15:44:40,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=157938.0, ans=0.1 +2024-08-03 15:44:48,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=157974.66666666666, ans=0.0 +2024-08-03 15:44:55,165 INFO [train.py:1114] (2/4) Epoch 12, batch 3050, loss[loss=0.185, simple_loss=0.2747, pruned_loss=0.0477, over 13532.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2871, pruned_loss=0.06108, over 2627486.81 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:44:56,888 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.316e+01 1.167e+02 1.304e+02 1.686e+02 2.790e+02, threshold=2.608e+02, percent-clipped=0.0 +2024-08-03 15:45:20,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=158084.66666666666, ans=0.2 +2024-08-03 15:45:36,484 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.76 vs. limit=15.0 +2024-08-03 15:45:37,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=158158.0, ans=0.125 +2024-08-03 15:45:39,541 INFO [train.py:1114] (2/4) Epoch 12, batch 3100, loss[loss=0.223, simple_loss=0.3117, pruned_loss=0.06715, over 13318.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2871, pruned_loss=0.06111, over 2626991.44 frames. ], batch size: 46, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:45:44,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=158194.66666666666, ans=0.125 +2024-08-03 15:45:55,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=158268.0, ans=0.125 +2024-08-03 15:46:08,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=158304.66666666666, ans=0.2 +2024-08-03 15:46:22,126 INFO [train.py:1114] (2/4) Epoch 12, batch 3150, loss[loss=0.2001, simple_loss=0.2914, pruned_loss=0.0544, over 13063.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2871, pruned_loss=0.06096, over 2627975.06 frames. ], batch size: 48, lr: 1.02e-02, grad_scale: 16.0 +2024-08-03 15:46:23,778 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.679e+01 1.169e+02 1.445e+02 1.962e+02 3.331e+02, threshold=2.890e+02, percent-clipped=6.0 +2024-08-03 15:46:24,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=158378.0, ans=0.0 +2024-08-03 15:46:47,653 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.96 vs. limit=15.0 +2024-08-03 15:46:53,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158488.0, ans=0.1 +2024-08-03 15:46:58,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=158524.66666666666, ans=0.0 +2024-08-03 15:47:05,159 INFO [train.py:1114] (2/4) Epoch 12, batch 3200, loss[loss=0.1915, simple_loss=0.2741, pruned_loss=0.05443, over 13551.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2856, pruned_loss=0.06003, over 2634318.29 frames. ], batch size: 37, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:47:13,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=158598.0, ans=0.0 +2024-08-03 15:47:37,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158671.33333333334, ans=0.1 +2024-08-03 15:47:39,581 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.07 vs. limit=22.5 +2024-08-03 15:47:45,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=158708.0, ans=10.0 +2024-08-03 15:47:48,552 INFO [train.py:1114] (2/4) Epoch 12, batch 3250, loss[loss=0.2071, simple_loss=0.2939, pruned_loss=0.06014, over 13385.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2858, pruned_loss=0.05984, over 2638636.99 frames. ], batch size: 38, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:47:50,192 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.188e+01 1.142e+02 1.340e+02 1.709e+02 3.212e+02, threshold=2.679e+02, percent-clipped=3.0 +2024-08-03 15:48:11,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=158818.0, ans=0.2 +2024-08-03 15:48:31,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=158928.0, ans=0.125 +2024-08-03 15:48:32,121 INFO [train.py:1114] (2/4) Epoch 12, batch 3300, loss[loss=0.209, simple_loss=0.2904, pruned_loss=0.06378, over 12873.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2845, pruned_loss=0.05939, over 2640524.97 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:49:05,939 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:49:06,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=159074.66666666666, ans=0.1 +2024-08-03 15:49:15,289 INFO [train.py:1114] (2/4) Epoch 12, batch 3350, loss[loss=0.2351, simple_loss=0.3133, pruned_loss=0.07843, over 13039.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2859, pruned_loss=0.06029, over 2629808.65 frames. ], batch size: 48, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:49:17,008 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.306e+01 1.184e+02 1.362e+02 1.748e+02 2.695e+02, threshold=2.725e+02, percent-clipped=2.0 +2024-08-03 15:49:21,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=159111.33333333334, ans=0.125 +2024-08-03 15:49:27,335 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.22 vs. limit=15.0 +2024-08-03 15:49:32,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=159184.66666666666, ans=0.125 +2024-08-03 15:49:39,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=159184.66666666666, ans=0.125 +2024-08-03 15:49:54,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=159258.0, ans=0.125 +2024-08-03 15:49:59,030 INFO [train.py:1114] (2/4) Epoch 12, batch 3400, loss[loss=0.1774, simple_loss=0.257, pruned_loss=0.04887, over 13549.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2862, pruned_loss=0.06065, over 2625967.12 frames. ], batch size: 31, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:49:59,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=159294.66666666666, ans=0.0 +2024-08-03 15:50:08,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=159331.33333333334, ans=0.125 +2024-08-03 15:50:22,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=159368.0, ans=0.125 +2024-08-03 15:50:25,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159404.66666666666, ans=0.1 +2024-08-03 15:50:27,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=159404.66666666666, ans=0.125 +2024-08-03 15:50:27,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=159404.66666666666, ans=0.125 +2024-08-03 15:50:28,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=159404.66666666666, ans=0.04949747468305833 +2024-08-03 15:50:32,521 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.38 vs. limit=15.0 +2024-08-03 15:50:36,220 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:50:37,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=159441.33333333334, ans=0.0 +2024-08-03 15:50:42,888 INFO [train.py:1114] (2/4) Epoch 12, batch 3450, loss[loss=0.2209, simple_loss=0.3031, pruned_loss=0.06932, over 12906.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2854, pruned_loss=0.05997, over 2629439.43 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:50:44,511 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.325e+01 1.219e+02 1.408e+02 1.757e+02 3.423e+02, threshold=2.817e+02, percent-clipped=3.0 +2024-08-03 15:50:44,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=159478.0, ans=0.125 +2024-08-03 15:50:45,778 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.27 vs. limit=15.0 +2024-08-03 15:50:50,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.30 vs. limit=15.0 +2024-08-03 15:51:12,512 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.15 vs. limit=6.0 +2024-08-03 15:51:21,983 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.67 vs. limit=15.0 +2024-08-03 15:51:24,776 INFO [train.py:1114] (2/4) Epoch 12, batch 3500, loss[loss=0.2019, simple_loss=0.2708, pruned_loss=0.06646, over 13512.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2848, pruned_loss=0.0601, over 2629622.27 frames. ], batch size: 34, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:51:25,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=159661.33333333334, ans=0.125 +2024-08-03 15:51:28,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=159661.33333333334, ans=0.125 +2024-08-03 15:51:45,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=159734.66666666666, ans=0.125 +2024-08-03 15:52:02,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159808.0, ans=0.1 +2024-08-03 15:52:04,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=159808.0, ans=0.07 +2024-08-03 15:52:07,855 INFO [train.py:1114] (2/4) Epoch 12, batch 3550, loss[loss=0.234, simple_loss=0.3166, pruned_loss=0.07575, over 12521.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2876, pruned_loss=0.06118, over 2628554.00 frames. ], batch size: 58, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:52:09,498 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.339e+01 1.127e+02 1.244e+02 1.565e+02 2.847e+02, threshold=2.489e+02, percent-clipped=1.0 +2024-08-03 15:52:15,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=159881.33333333334, ans=0.025 +2024-08-03 15:52:17,117 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.52 vs. limit=22.5 +2024-08-03 15:52:39,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=159954.66666666666, ans=0.2 +2024-08-03 15:52:42,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=159954.66666666666, ans=22.5 +2024-08-03 15:52:42,498 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.01 vs. limit=5.0 +2024-08-03 15:52:48,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=159991.33333333334, ans=0.125 +2024-08-03 15:52:53,380 INFO [train.py:1114] (2/4) Epoch 12, batch 3600, loss[loss=0.2438, simple_loss=0.3104, pruned_loss=0.0886, over 9290.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.293, pruned_loss=0.06602, over 2487670.92 frames. ], batch size: 97, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:53:07,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=160064.66666666666, ans=0.2 +2024-08-03 15:53:11,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=160101.33333333334, ans=0.125 +2024-08-03 15:53:19,844 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.32 vs. limit=15.0 +2024-08-03 15:54:13,941 INFO [train.py:1114] (2/4) Epoch 13, batch 0, loss[loss=0.198, simple_loss=0.278, pruned_loss=0.05901, over 13357.00 frames. ], tot_loss[loss=0.198, simple_loss=0.278, pruned_loss=0.05901, over 13357.00 frames. ], batch size: 33, lr: 9.79e-03, grad_scale: 32.0 +2024-08-03 15:54:13,942 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 15:54:23,996 INFO [train.py:1146] (2/4) Epoch 13, validation: loss=0.179, simple_loss=0.2806, pruned_loss=0.03875, over 944034.00 frames. +2024-08-03 15:54:23,997 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 15:54:25,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=160174.66666666666, ans=0.0 +2024-08-03 15:54:36,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=160211.33333333334, ans=0.125 +2024-08-03 15:54:37,805 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.239e+02 1.394e+02 1.533e+02 2.538e+02, threshold=2.789e+02, percent-clipped=1.0 +2024-08-03 15:54:38,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=160211.33333333334, ans=0.125 +2024-08-03 15:54:45,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=160248.0, ans=0.5 +2024-08-03 15:54:46,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=160248.0, ans=0.125 +2024-08-03 15:55:01,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=160321.33333333334, ans=0.1 +2024-08-03 15:55:11,494 INFO [train.py:1114] (2/4) Epoch 13, batch 50, loss[loss=0.202, simple_loss=0.2811, pruned_loss=0.06151, over 13438.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2906, pruned_loss=0.06161, over 578425.31 frames. ], batch size: 32, lr: 9.79e-03, grad_scale: 32.0 +2024-08-03 15:55:16,737 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.16 vs. limit=15.0 +2024-08-03 15:55:30,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=160431.33333333334, ans=0.125 +2024-08-03 15:55:49,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=160504.66666666666, ans=0.0 +2024-08-03 15:55:58,990 INFO [train.py:1114] (2/4) Epoch 13, batch 100, loss[loss=0.1997, simple_loss=0.2822, pruned_loss=0.05866, over 13527.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2899, pruned_loss=0.06009, over 1025503.27 frames. ], batch size: 35, lr: 9.78e-03, grad_scale: 32.0 +2024-08-03 15:56:10,677 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.344e+01 1.098e+02 1.241e+02 1.448e+02 3.539e+02, threshold=2.482e+02, percent-clipped=1.0 +2024-08-03 15:56:11,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=160578.0, ans=0.0 +2024-08-03 15:56:22,176 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.75 vs. limit=22.5 +2024-08-03 15:56:30,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=160651.33333333334, ans=0.025 +2024-08-03 15:56:32,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=160651.33333333334, ans=0.125 +2024-08-03 15:56:34,923 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.81 vs. limit=15.0 +2024-08-03 15:56:44,363 INFO [train.py:1114] (2/4) Epoch 13, batch 150, loss[loss=0.1583, simple_loss=0.2406, pruned_loss=0.03805, over 13420.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2865, pruned_loss=0.05912, over 1386554.39 frames. ], batch size: 32, lr: 9.78e-03, grad_scale: 32.0 +2024-08-03 15:56:50,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=160724.66666666666, ans=0.07 +2024-08-03 15:57:15,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=160834.66666666666, ans=0.2 +2024-08-03 15:57:22,825 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.62 vs. limit=22.5 +2024-08-03 15:57:33,253 INFO [train.py:1114] (2/4) Epoch 13, batch 200, loss[loss=0.2157, simple_loss=0.295, pruned_loss=0.06817, over 12522.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2861, pruned_loss=0.05949, over 1664919.33 frames. ], batch size: 58, lr: 9.77e-03, grad_scale: 16.0 +2024-08-03 15:57:36,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=160908.0, ans=0.125 +2024-08-03 15:57:43,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=160944.66666666666, ans=0.07 +2024-08-03 15:57:45,688 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.389e+01 1.180e+02 1.439e+02 1.786e+02 2.514e+02, threshold=2.877e+02, percent-clipped=2.0 +2024-08-03 15:57:48,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=160944.66666666666, ans=0.125 +2024-08-03 15:57:54,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=160981.33333333334, ans=0.2 +2024-08-03 15:58:10,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=161054.66666666666, ans=0.0 +2024-08-03 15:58:20,398 INFO [train.py:1114] (2/4) Epoch 13, batch 250, loss[loss=0.2249, simple_loss=0.2993, pruned_loss=0.07528, over 13353.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2848, pruned_loss=0.05857, over 1884141.45 frames. ], batch size: 46, lr: 9.77e-03, grad_scale: 16.0 +2024-08-03 15:58:25,625 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.29 vs. limit=15.0 +2024-08-03 15:58:27,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161091.33333333334, ans=0.1 +2024-08-03 15:58:28,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=161128.0, ans=0.125 +2024-08-03 15:58:30,753 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.93 vs. limit=15.0 +2024-08-03 15:58:32,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=161128.0, ans=0.0 +2024-08-03 15:58:49,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=161201.33333333334, ans=0.125 +2024-08-03 15:58:49,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=161201.33333333334, ans=0.95 +2024-08-03 15:58:50,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=161201.33333333334, ans=0.2 +2024-08-03 15:58:50,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=161201.33333333334, ans=0.025 +2024-08-03 15:58:54,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=161201.33333333334, ans=0.125 +2024-08-03 15:58:59,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=161238.0, ans=0.0 +2024-08-03 15:59:03,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=161238.0, ans=0.125 +2024-08-03 15:59:05,422 INFO [train.py:1114] (2/4) Epoch 13, batch 300, loss[loss=0.2113, simple_loss=0.2925, pruned_loss=0.06502, over 13433.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2847, pruned_loss=0.05883, over 2051368.99 frames. ], batch size: 42, lr: 9.76e-03, grad_scale: 16.0 +2024-08-03 15:59:08,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=161274.66666666666, ans=0.0 +2024-08-03 15:59:20,162 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.241e+01 1.100e+02 1.297e+02 1.682e+02 2.744e+02, threshold=2.594e+02, percent-clipped=0.0 +2024-08-03 15:59:29,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=161311.33333333334, ans=0.125 +2024-08-03 15:59:53,902 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:00:00,485 INFO [train.py:1114] (2/4) Epoch 13, batch 350, loss[loss=0.1776, simple_loss=0.2539, pruned_loss=0.05069, over 13579.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2851, pruned_loss=0.05931, over 2182615.07 frames. ], batch size: 33, lr: 9.76e-03, grad_scale: 16.0 +2024-08-03 16:00:16,533 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.97 vs. limit=12.0 +2024-08-03 16:00:44,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=161641.33333333334, ans=0.125 +2024-08-03 16:00:45,704 INFO [train.py:1114] (2/4) Epoch 13, batch 400, loss[loss=0.2268, simple_loss=0.3144, pruned_loss=0.06958, over 13357.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2839, pruned_loss=0.05879, over 2286180.05 frames. ], batch size: 37, lr: 9.75e-03, grad_scale: 32.0 +2024-08-03 16:01:00,503 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.412e+01 1.099e+02 1.258e+02 1.504e+02 2.448e+02, threshold=2.516e+02, percent-clipped=0.0 +2024-08-03 16:01:18,362 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.89 vs. limit=15.0 +2024-08-03 16:01:19,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=161751.33333333334, ans=0.125 +2024-08-03 16:01:21,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=161751.33333333334, ans=0.025 +2024-08-03 16:01:21,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=161751.33333333334, ans=0.125 +2024-08-03 16:01:28,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=161788.0, ans=0.125 +2024-08-03 16:01:39,135 INFO [train.py:1114] (2/4) Epoch 13, batch 450, loss[loss=0.2443, simple_loss=0.3265, pruned_loss=0.08111, over 13551.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.284, pruned_loss=0.05874, over 2359440.04 frames. ], batch size: 38, lr: 9.75e-03, grad_scale: 32.0 +2024-08-03 16:01:40,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=161824.66666666666, ans=0.0 +2024-08-03 16:01:42,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161824.66666666666, ans=0.1 +2024-08-03 16:01:42,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=161824.66666666666, ans=0.0 +2024-08-03 16:01:56,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=161861.33333333334, ans=0.0 +2024-08-03 16:02:18,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=161971.33333333334, ans=0.125 +2024-08-03 16:02:28,145 INFO [train.py:1114] (2/4) Epoch 13, batch 500, loss[loss=0.2283, simple_loss=0.312, pruned_loss=0.07229, over 13430.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2832, pruned_loss=0.05845, over 2425998.57 frames. ], batch size: 43, lr: 9.74e-03, grad_scale: 16.0 +2024-08-03 16:02:32,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=162008.0, ans=0.125 +2024-08-03 16:02:44,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=162044.66666666666, ans=0.125 +2024-08-03 16:02:44,993 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.845e+01 1.131e+02 1.351e+02 1.618e+02 2.590e+02, threshold=2.702e+02, percent-clipped=1.0 +2024-08-03 16:02:52,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162081.33333333334, ans=0.1 +2024-08-03 16:02:53,037 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.11 vs. limit=15.0 +2024-08-03 16:03:04,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162118.0, ans=0.1 +2024-08-03 16:03:12,614 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:03:18,773 INFO [train.py:1114] (2/4) Epoch 13, batch 550, loss[loss=0.2086, simple_loss=0.2987, pruned_loss=0.05927, over 13082.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2835, pruned_loss=0.05859, over 2467891.75 frames. ], batch size: 48, lr: 9.74e-03, grad_scale: 16.0 +2024-08-03 16:03:31,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=162228.0, ans=0.125 +2024-08-03 16:03:35,691 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.58 vs. limit=6.0 +2024-08-03 16:04:04,094 INFO [train.py:1114] (2/4) Epoch 13, batch 600, loss[loss=0.2225, simple_loss=0.3041, pruned_loss=0.07044, over 13336.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2826, pruned_loss=0.05822, over 2507848.50 frames. ], batch size: 46, lr: 9.73e-03, grad_scale: 16.0 +2024-08-03 16:04:05,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=162374.66666666666, ans=0.2 +2024-08-03 16:04:17,370 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.493e+01 1.180e+02 1.372e+02 1.849e+02 3.441e+02, threshold=2.744e+02, percent-clipped=2.0 +2024-08-03 16:04:22,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162448.0, ans=0.1 +2024-08-03 16:04:25,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=162448.0, ans=0.125 +2024-08-03 16:04:38,490 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=10.67 vs. limit=15.0 +2024-08-03 16:04:51,061 INFO [train.py:1114] (2/4) Epoch 13, batch 650, loss[loss=0.1772, simple_loss=0.2654, pruned_loss=0.04444, over 13552.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2821, pruned_loss=0.05788, over 2543142.96 frames. ], batch size: 37, lr: 9.72e-03, grad_scale: 16.0 +2024-08-03 16:04:57,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=162558.0, ans=0.125 +2024-08-03 16:05:07,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=162594.66666666666, ans=0.125 +2024-08-03 16:05:16,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162631.33333333334, ans=0.1 +2024-08-03 16:05:39,757 INFO [train.py:1114] (2/4) Epoch 13, batch 700, loss[loss=0.2019, simple_loss=0.2791, pruned_loss=0.06236, over 13534.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2828, pruned_loss=0.05831, over 2565247.31 frames. ], batch size: 35, lr: 9.72e-03, grad_scale: 16.0 +2024-08-03 16:05:53,433 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.882e+01 1.139e+02 1.377e+02 1.797e+02 3.206e+02, threshold=2.754e+02, percent-clipped=4.0 +2024-08-03 16:06:01,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=162814.66666666666, ans=0.125 +2024-08-03 16:06:10,254 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.29 vs. limit=15.0 +2024-08-03 16:06:16,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=162851.33333333334, ans=0.025 +2024-08-03 16:06:21,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=162888.0, ans=0.025 +2024-08-03 16:06:31,546 INFO [train.py:1114] (2/4) Epoch 13, batch 750, loss[loss=0.1867, simple_loss=0.2787, pruned_loss=0.04734, over 13362.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2822, pruned_loss=0.05771, over 2581877.15 frames. ], batch size: 37, lr: 9.71e-03, grad_scale: 16.0 +2024-08-03 16:06:33,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=162924.66666666666, ans=0.0 +2024-08-03 16:06:33,732 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.00 vs. limit=15.0 +2024-08-03 16:06:40,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=162961.33333333334, ans=0.04949747468305833 +2024-08-03 16:06:50,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162998.0, ans=0.1 +2024-08-03 16:07:26,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=162998.0, ans=10.0 +2024-08-03 16:07:38,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=163034.66666666666, ans=0.0 +2024-08-03 16:07:45,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=163071.33333333334, ans=0.0 +2024-08-03 16:07:50,952 INFO [train.py:1114] (2/4) Epoch 13, batch 800, loss[loss=0.162, simple_loss=0.2457, pruned_loss=0.03922, over 13342.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2826, pruned_loss=0.05809, over 2596583.90 frames. ], batch size: 33, lr: 9.71e-03, grad_scale: 32.0 +2024-08-03 16:07:53,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163108.0, ans=0.1 +2024-08-03 16:08:02,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=163144.66666666666, ans=0.125 +2024-08-03 16:08:04,632 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.131e+01 1.126e+02 1.310e+02 1.667e+02 3.702e+02, threshold=2.620e+02, percent-clipped=3.0 +2024-08-03 16:08:15,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163181.33333333334, ans=0.1 +2024-08-03 16:08:24,225 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.73 vs. limit=15.0 +2024-08-03 16:08:26,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=163254.66666666666, ans=0.2 +2024-08-03 16:08:32,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=163254.66666666666, ans=0.125 +2024-08-03 16:08:36,394 INFO [train.py:1114] (2/4) Epoch 13, batch 850, loss[loss=0.1846, simple_loss=0.2788, pruned_loss=0.04525, over 13341.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2831, pruned_loss=0.05865, over 2609320.01 frames. ], batch size: 40, lr: 9.70e-03, grad_scale: 32.0 +2024-08-03 16:08:40,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=163291.33333333334, ans=0.95 +2024-08-03 16:08:55,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=163364.66666666666, ans=0.07 +2024-08-03 16:09:00,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=163364.66666666666, ans=0.125 +2024-08-03 16:09:03,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=163364.66666666666, ans=0.125 +2024-08-03 16:09:19,447 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.40 vs. limit=15.0 +2024-08-03 16:09:20,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=163438.0, ans=0.0 +2024-08-03 16:09:25,544 INFO [train.py:1114] (2/4) Epoch 13, batch 900, loss[loss=0.1811, simple_loss=0.2629, pruned_loss=0.04967, over 13360.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2833, pruned_loss=0.05861, over 2612197.62 frames. ], batch size: 33, lr: 9.70e-03, grad_scale: 32.0 +2024-08-03 16:09:29,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=163474.66666666666, ans=0.125 +2024-08-03 16:09:31,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=163474.66666666666, ans=0.2 +2024-08-03 16:09:38,998 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.048e+01 1.164e+02 1.408e+02 1.726e+02 2.750e+02, threshold=2.816e+02, percent-clipped=1.0 +2024-08-03 16:09:45,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=163548.0, ans=0.0 +2024-08-03 16:09:56,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=163584.66666666666, ans=0.125 +2024-08-03 16:09:59,030 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.53 vs. limit=15.0 +2024-08-03 16:10:12,887 INFO [train.py:1114] (2/4) Epoch 13, batch 950, loss[loss=0.1568, simple_loss=0.2455, pruned_loss=0.03404, over 13540.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2828, pruned_loss=0.05814, over 2613479.95 frames. ], batch size: 34, lr: 9.69e-03, grad_scale: 32.0 +2024-08-03 16:10:31,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=163731.33333333334, ans=0.125 +2024-08-03 16:10:34,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=163731.33333333334, ans=0.125 +2024-08-03 16:10:38,787 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.85 vs. limit=15.0 +2024-08-03 16:10:48,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=163804.66666666666, ans=0.2 +2024-08-03 16:10:55,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=163804.66666666666, ans=0.125 +2024-08-03 16:11:00,370 INFO [train.py:1114] (2/4) Epoch 13, batch 1000, loss[loss=0.1994, simple_loss=0.2815, pruned_loss=0.05863, over 13366.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.284, pruned_loss=0.05892, over 2611042.91 frames. ], batch size: 35, lr: 9.69e-03, grad_scale: 32.0 +2024-08-03 16:11:09,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=163878.0, ans=0.0 +2024-08-03 16:11:13,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=163878.0, ans=0.025 +2024-08-03 16:11:14,074 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.003e+01 1.149e+02 1.296e+02 1.618e+02 2.591e+02, threshold=2.593e+02, percent-clipped=0.0 +2024-08-03 16:11:23,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.96 vs. limit=15.0 +2024-08-03 16:11:28,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=163951.33333333334, ans=0.0 +2024-08-03 16:11:39,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=163988.0, ans=0.1 +2024-08-03 16:11:45,698 INFO [train.py:1114] (2/4) Epoch 13, batch 1050, loss[loss=0.1677, simple_loss=0.2613, pruned_loss=0.03707, over 13580.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2828, pruned_loss=0.05833, over 2615247.21 frames. ], batch size: 39, lr: 9.68e-03, grad_scale: 32.0 +2024-08-03 16:11:47,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=164024.66666666666, ans=0.125 +2024-08-03 16:12:03,935 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:12:10,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=164098.0, ans=0.125 +2024-08-03 16:12:19,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=164134.66666666666, ans=0.1 +2024-08-03 16:12:23,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=164171.33333333334, ans=0.125 +2024-08-03 16:12:28,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=164171.33333333334, ans=0.0 +2024-08-03 16:12:29,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=164171.33333333334, ans=0.125 +2024-08-03 16:12:32,445 INFO [train.py:1114] (2/4) Epoch 13, batch 1100, loss[loss=0.1891, simple_loss=0.2747, pruned_loss=0.05176, over 13558.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.283, pruned_loss=0.05833, over 2619445.41 frames. ], batch size: 36, lr: 9.68e-03, grad_scale: 32.0 +2024-08-03 16:12:45,859 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.230e+01 1.162e+02 1.483e+02 1.783e+02 2.652e+02, threshold=2.966e+02, percent-clipped=1.0 +2024-08-03 16:12:55,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=164281.33333333334, ans=0.125 +2024-08-03 16:13:16,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=164354.66666666666, ans=0.0 +2024-08-03 16:13:19,699 INFO [train.py:1114] (2/4) Epoch 13, batch 1150, loss[loss=0.2051, simple_loss=0.2883, pruned_loss=0.06092, over 13565.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.283, pruned_loss=0.05831, over 2619111.50 frames. ], batch size: 36, lr: 9.67e-03, grad_scale: 32.0 +2024-08-03 16:13:32,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=164428.0, ans=0.125 +2024-08-03 16:13:39,398 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:13:46,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=164464.66666666666, ans=0.125 +2024-08-03 16:14:07,670 INFO [train.py:1114] (2/4) Epoch 13, batch 1200, loss[loss=0.1944, simple_loss=0.2861, pruned_loss=0.05137, over 13594.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2839, pruned_loss=0.05831, over 2616191.85 frames. ], batch size: 39, lr: 9.67e-03, grad_scale: 32.0 +2024-08-03 16:14:08,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=164574.66666666666, ans=0.125 +2024-08-03 16:14:09,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=164574.66666666666, ans=0.0 +2024-08-03 16:14:19,999 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.27 vs. limit=15.0 +2024-08-03 16:14:21,157 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.721e+01 1.160e+02 1.448e+02 1.730e+02 2.788e+02, threshold=2.895e+02, percent-clipped=0.0 +2024-08-03 16:14:24,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=164611.33333333334, ans=0.2 +2024-08-03 16:14:31,578 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.25 vs. limit=10.0 +2024-08-03 16:14:34,141 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.90 vs. limit=15.0 +2024-08-03 16:14:36,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164684.66666666666, ans=0.1 +2024-08-03 16:14:49,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=164721.33333333334, ans=0.0 +2024-08-03 16:14:50,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164721.33333333334, ans=0.1 +2024-08-03 16:14:54,289 INFO [train.py:1114] (2/4) Epoch 13, batch 1250, loss[loss=0.222, simple_loss=0.3062, pruned_loss=0.06889, over 13412.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2842, pruned_loss=0.05841, over 2628340.95 frames. ], batch size: 42, lr: 9.66e-03, grad_scale: 32.0 +2024-08-03 16:14:57,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=164758.0, ans=0.025 +2024-08-03 16:15:02,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=164794.66666666666, ans=0.0 +2024-08-03 16:15:19,905 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:15:33,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=164904.66666666666, ans=0.1 +2024-08-03 16:15:39,442 INFO [train.py:1114] (2/4) Epoch 13, batch 1300, loss[loss=0.2153, simple_loss=0.3047, pruned_loss=0.06291, over 12952.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2832, pruned_loss=0.05808, over 2630910.05 frames. ], batch size: 52, lr: 9.66e-03, grad_scale: 32.0 +2024-08-03 16:15:46,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=164941.33333333334, ans=0.0 +2024-08-03 16:15:52,785 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.108e+01 1.112e+02 1.319e+02 1.683e+02 3.006e+02, threshold=2.638e+02, percent-clipped=1.0 +2024-08-03 16:15:56,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=164978.0, ans=0.125 +2024-08-03 16:16:15,087 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.97 vs. limit=15.0 +2024-08-03 16:16:27,971 INFO [train.py:1114] (2/4) Epoch 13, batch 1350, loss[loss=0.1992, simple_loss=0.2851, pruned_loss=0.05665, over 13534.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2828, pruned_loss=0.05792, over 2637656.26 frames. ], batch size: 37, lr: 9.65e-03, grad_scale: 32.0 +2024-08-03 16:16:28,991 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:16:36,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=165161.33333333334, ans=0.0 +2024-08-03 16:16:43,770 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.53 vs. limit=22.5 +2024-08-03 16:16:48,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165198.0, ans=0.1 +2024-08-03 16:17:11,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=165271.33333333334, ans=0.125 +2024-08-03 16:17:14,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=165308.0, ans=0.125 +2024-08-03 16:17:15,454 INFO [train.py:1114] (2/4) Epoch 13, batch 1400, loss[loss=0.2047, simple_loss=0.2727, pruned_loss=0.06839, over 13268.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2825, pruned_loss=0.05774, over 2641478.55 frames. ], batch size: 31, lr: 9.65e-03, grad_scale: 16.0 +2024-08-03 16:17:20,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=165308.0, ans=0.0 +2024-08-03 16:17:29,723 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.085e+01 1.131e+02 1.241e+02 1.412e+02 2.386e+02, threshold=2.482e+02, percent-clipped=0.0 +2024-08-03 16:17:32,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=165344.66666666666, ans=0.125 +2024-08-03 16:17:37,692 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:17:39,839 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.73 vs. limit=6.0 +2024-08-03 16:17:52,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=165454.66666666666, ans=0.125 +2024-08-03 16:17:58,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=165454.66666666666, ans=0.125 +2024-08-03 16:17:58,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=165454.66666666666, ans=0.1 +2024-08-03 16:18:01,118 INFO [train.py:1114] (2/4) Epoch 13, batch 1450, loss[loss=0.2175, simple_loss=0.3066, pruned_loss=0.06421, over 13399.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2827, pruned_loss=0.05786, over 2640376.69 frames. ], batch size: 43, lr: 9.64e-03, grad_scale: 16.0 +2024-08-03 16:18:03,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=165491.33333333334, ans=0.0 +2024-08-03 16:18:07,850 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.25 vs. limit=15.0 +2024-08-03 16:18:47,875 INFO [train.py:1114] (2/4) Epoch 13, batch 1500, loss[loss=0.1906, simple_loss=0.2769, pruned_loss=0.05217, over 13399.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2829, pruned_loss=0.05769, over 2640828.14 frames. ], batch size: 39, lr: 9.64e-03, grad_scale: 16.0 +2024-08-03 16:19:02,695 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.982e+01 1.158e+02 1.427e+02 1.728e+02 2.727e+02, threshold=2.854e+02, percent-clipped=3.0 +2024-08-03 16:19:26,921 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.46 vs. limit=15.0 +2024-08-03 16:19:32,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=165821.33333333334, ans=0.1 +2024-08-03 16:19:35,403 INFO [train.py:1114] (2/4) Epoch 13, batch 1550, loss[loss=0.1993, simple_loss=0.2886, pruned_loss=0.05505, over 13397.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2828, pruned_loss=0.05779, over 2630524.08 frames. ], batch size: 41, lr: 9.63e-03, grad_scale: 16.0 +2024-08-03 16:19:40,444 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.91 vs. limit=12.0 +2024-08-03 16:19:41,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=165858.0, ans=0.1 +2024-08-03 16:19:42,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=165858.0, ans=0.125 +2024-08-03 16:19:56,343 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:20:22,602 INFO [train.py:1114] (2/4) Epoch 13, batch 1600, loss[loss=0.1965, simple_loss=0.2926, pruned_loss=0.05019, over 13576.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2824, pruned_loss=0.05752, over 2624530.26 frames. ], batch size: 39, lr: 9.63e-03, grad_scale: 32.0 +2024-08-03 16:20:47,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=166041.33333333334, ans=0.035 +2024-08-03 16:20:59,197 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.495e+01 1.166e+02 1.333e+02 1.673e+02 3.385e+02, threshold=2.665e+02, percent-clipped=4.0 +2024-08-03 16:21:14,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=166151.33333333334, ans=0.09899494936611666 +2024-08-03 16:21:25,105 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.03 vs. limit=22.5 +2024-08-03 16:21:30,120 INFO [train.py:1114] (2/4) Epoch 13, batch 1650, loss[loss=0.2078, simple_loss=0.2905, pruned_loss=0.06256, over 13302.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2827, pruned_loss=0.05796, over 2621751.27 frames. ], batch size: 40, lr: 9.62e-03, grad_scale: 32.0 +2024-08-03 16:21:31,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=166224.66666666666, ans=0.125 +2024-08-03 16:21:43,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=166261.33333333334, ans=0.125 +2024-08-03 16:21:44,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.94 vs. limit=15.0 +2024-08-03 16:21:57,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=166298.0, ans=15.0 +2024-08-03 16:22:02,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=166334.66666666666, ans=0.025 +2024-08-03 16:22:10,380 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.43 vs. limit=15.0 +2024-08-03 16:22:17,148 INFO [train.py:1114] (2/4) Epoch 13, batch 1700, loss[loss=0.1997, simple_loss=0.2708, pruned_loss=0.06427, over 13235.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2825, pruned_loss=0.05763, over 2630540.84 frames. ], batch size: 31, lr: 9.61e-03, grad_scale: 16.0 +2024-08-03 16:22:19,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=166408.0, ans=0.0 +2024-08-03 16:22:22,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=166408.0, ans=0.1 +2024-08-03 16:22:23,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=166408.0, ans=0.125 +2024-08-03 16:22:26,881 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.70 vs. limit=15.0 +2024-08-03 16:22:32,762 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.948e+01 1.164e+02 1.401e+02 1.757e+02 2.684e+02, threshold=2.802e+02, percent-clipped=1.0 +2024-08-03 16:22:36,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=166481.33333333334, ans=0.125 +2024-08-03 16:22:37,997 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.52 vs. limit=22.5 +2024-08-03 16:22:50,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=166518.0, ans=0.05 +2024-08-03 16:23:00,214 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.91 vs. limit=15.0 +2024-08-03 16:23:03,322 INFO [train.py:1114] (2/4) Epoch 13, batch 1750, loss[loss=0.1627, simple_loss=0.2425, pruned_loss=0.04151, over 13534.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2813, pruned_loss=0.05726, over 2633939.22 frames. ], batch size: 31, lr: 9.61e-03, grad_scale: 16.0 +2024-08-03 16:23:22,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=166664.66666666666, ans=0.125 +2024-08-03 16:23:26,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=166664.66666666666, ans=0.125 +2024-08-03 16:23:38,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=166701.33333333334, ans=0.125 +2024-08-03 16:23:44,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=166738.0, ans=0.125 +2024-08-03 16:23:50,888 INFO [train.py:1114] (2/4) Epoch 13, batch 1800, loss[loss=0.2048, simple_loss=0.2974, pruned_loss=0.05611, over 13555.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2815, pruned_loss=0.05764, over 2635657.95 frames. ], batch size: 38, lr: 9.60e-03, grad_scale: 16.0 +2024-08-03 16:23:54,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=166774.66666666666, ans=0.125 +2024-08-03 16:24:04,301 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.89 vs. limit=15.0 +2024-08-03 16:24:08,447 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.884e+01 1.178e+02 1.346e+02 1.574e+02 2.406e+02, threshold=2.692e+02, percent-clipped=0.0 +2024-08-03 16:24:13,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=166848.0, ans=0.125 +2024-08-03 16:24:19,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=166884.66666666666, ans=0.1 +2024-08-03 16:24:26,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=166884.66666666666, ans=0.125 +2024-08-03 16:24:37,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=166921.33333333334, ans=0.125 +2024-08-03 16:24:40,318 INFO [train.py:1114] (2/4) Epoch 13, batch 1850, loss[loss=0.2078, simple_loss=0.2958, pruned_loss=0.05984, over 13397.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2815, pruned_loss=0.05743, over 2637771.76 frames. ], batch size: 39, lr: 9.60e-03, grad_scale: 16.0 +2024-08-03 16:25:22,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=167104.66666666666, ans=0.125 +2024-08-03 16:25:26,610 INFO [train.py:1114] (2/4) Epoch 13, batch 1900, loss[loss=0.1973, simple_loss=0.2833, pruned_loss=0.05567, over 13318.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2819, pruned_loss=0.05767, over 2640161.20 frames. ], batch size: 40, lr: 9.59e-03, grad_scale: 16.0 +2024-08-03 16:25:31,559 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.16 vs. limit=15.0 +2024-08-03 16:25:44,172 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.192e+01 1.122e+02 1.325e+02 1.918e+02 3.257e+02, threshold=2.651e+02, percent-clipped=9.0 +2024-08-03 16:25:48,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=167214.66666666666, ans=0.025 +2024-08-03 16:25:58,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167251.33333333334, ans=0.1 +2024-08-03 16:26:00,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=167251.33333333334, ans=0.025 +2024-08-03 16:26:11,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=167288.0, ans=0.0 +2024-08-03 16:26:14,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=167324.66666666666, ans=0.2 +2024-08-03 16:26:14,634 INFO [train.py:1114] (2/4) Epoch 13, batch 1950, loss[loss=0.187, simple_loss=0.2759, pruned_loss=0.04904, over 13567.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2833, pruned_loss=0.05803, over 2646697.01 frames. ], batch size: 36, lr: 9.59e-03, grad_scale: 16.0 +2024-08-03 16:26:15,194 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.07 vs. limit=15.0 +2024-08-03 16:26:20,470 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.16 vs. limit=15.0 +2024-08-03 16:26:22,328 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.58 vs. limit=22.5 +2024-08-03 16:26:23,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=167361.33333333334, ans=0.1 +2024-08-03 16:26:49,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=167434.66666666666, ans=0.0 +2024-08-03 16:27:01,466 INFO [train.py:1114] (2/4) Epoch 13, batch 2000, loss[loss=0.2009, simple_loss=0.2743, pruned_loss=0.06378, over 13528.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2843, pruned_loss=0.05864, over 2636113.62 frames. ], batch size: 31, lr: 9.58e-03, grad_scale: 32.0 +2024-08-03 16:27:05,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=167508.0, ans=0.0 +2024-08-03 16:27:08,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=167508.0, ans=0.1 +2024-08-03 16:27:17,126 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.733e+01 1.160e+02 1.429e+02 1.703e+02 2.821e+02, threshold=2.859e+02, percent-clipped=2.0 +2024-08-03 16:27:46,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=167654.66666666666, ans=0.125 +2024-08-03 16:27:49,178 INFO [train.py:1114] (2/4) Epoch 13, batch 2050, loss[loss=0.1812, simple_loss=0.2591, pruned_loss=0.05165, over 13411.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2833, pruned_loss=0.05844, over 2633503.92 frames. ], batch size: 32, lr: 9.58e-03, grad_scale: 32.0 +2024-08-03 16:27:49,670 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.30 vs. limit=12.0 +2024-08-03 16:28:10,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=167764.66666666666, ans=0.125 +2024-08-03 16:28:11,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=167764.66666666666, ans=0.125 +2024-08-03 16:28:15,205 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.21 vs. limit=22.5 +2024-08-03 16:28:26,790 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.63 vs. limit=15.0 +2024-08-03 16:28:31,311 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.35 vs. limit=15.0 +2024-08-03 16:28:31,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=167838.0, ans=0.125 +2024-08-03 16:28:36,271 INFO [train.py:1114] (2/4) Epoch 13, batch 2100, loss[loss=0.1814, simple_loss=0.274, pruned_loss=0.04436, over 13546.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2828, pruned_loss=0.0583, over 2638294.36 frames. ], batch size: 37, lr: 9.57e-03, grad_scale: 32.0 +2024-08-03 16:28:36,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=167874.66666666666, ans=0.04949747468305833 +2024-08-03 16:28:48,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=167911.33333333334, ans=0.125 +2024-08-03 16:28:51,610 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.366e+01 1.102e+02 1.273e+02 1.593e+02 3.536e+02, threshold=2.546e+02, percent-clipped=4.0 +2024-08-03 16:28:51,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=167911.33333333334, ans=0.0 +2024-08-03 16:28:52,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=167911.33333333334, ans=0.125 +2024-08-03 16:28:54,948 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.25 vs. limit=15.0 +2024-08-03 16:28:57,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=167948.0, ans=0.0 +2024-08-03 16:29:03,832 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.19 vs. limit=22.5 +2024-08-03 16:29:11,954 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.51 vs. limit=15.0 +2024-08-03 16:29:20,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=168021.33333333334, ans=0.125 +2024-08-03 16:29:20,971 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=12.0 +2024-08-03 16:29:23,260 INFO [train.py:1114] (2/4) Epoch 13, batch 2150, loss[loss=0.1776, simple_loss=0.266, pruned_loss=0.04459, over 13566.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.282, pruned_loss=0.05755, over 2646651.44 frames. ], batch size: 36, lr: 9.57e-03, grad_scale: 32.0 +2024-08-03 16:29:27,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=168058.0, ans=0.125 +2024-08-03 16:29:27,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=168058.0, ans=0.125 +2024-08-03 16:29:40,888 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.39 vs. limit=15.0 +2024-08-03 16:29:50,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=168168.0, ans=0.0 +2024-08-03 16:30:08,092 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.97 vs. limit=22.5 +2024-08-03 16:30:08,431 INFO [train.py:1114] (2/4) Epoch 13, batch 2200, loss[loss=0.2108, simple_loss=0.2911, pruned_loss=0.0653, over 13408.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2823, pruned_loss=0.0577, over 2644556.86 frames. ], batch size: 39, lr: 9.56e-03, grad_scale: 32.0 +2024-08-03 16:30:18,149 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.18 vs. limit=22.5 +2024-08-03 16:30:23,845 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.382e+01 1.307e+02 1.724e+02 2.157e+02 3.326e+02, threshold=3.447e+02, percent-clipped=16.0 +2024-08-03 16:30:27,169 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.32 vs. limit=12.0 +2024-08-03 16:30:47,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=168388.0, ans=0.0 +2024-08-03 16:30:55,465 INFO [train.py:1114] (2/4) Epoch 13, batch 2250, loss[loss=0.1748, simple_loss=0.2726, pruned_loss=0.03847, over 13357.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2821, pruned_loss=0.05777, over 2641575.49 frames. ], batch size: 37, lr: 9.56e-03, grad_scale: 16.0 +2024-08-03 16:31:04,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=168461.33333333334, ans=22.5 +2024-08-03 16:31:04,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=168461.33333333334, ans=0.2 +2024-08-03 16:31:16,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=168498.0, ans=0.125 +2024-08-03 16:31:26,494 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:31:31,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=168534.66666666666, ans=0.125 +2024-08-03 16:31:44,480 INFO [train.py:1114] (2/4) Epoch 13, batch 2300, loss[loss=0.1825, simple_loss=0.2584, pruned_loss=0.05331, over 13587.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2815, pruned_loss=0.05779, over 2637857.77 frames. ], batch size: 33, lr: 9.55e-03, grad_scale: 16.0 +2024-08-03 16:31:50,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=168608.0, ans=0.125 +2024-08-03 16:32:01,027 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.657e+01 1.154e+02 1.335e+02 1.728e+02 3.672e+02, threshold=2.670e+02, percent-clipped=1.0 +2024-08-03 16:32:06,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=168681.33333333334, ans=0.1 +2024-08-03 16:32:15,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=168718.0, ans=0.125 +2024-08-03 16:32:29,772 INFO [train.py:1114] (2/4) Epoch 13, batch 2350, loss[loss=0.1893, simple_loss=0.2868, pruned_loss=0.04589, over 13552.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2821, pruned_loss=0.05774, over 2640447.92 frames. ], batch size: 38, lr: 9.55e-03, grad_scale: 16.0 +2024-08-03 16:32:38,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=168828.0, ans=0.125 +2024-08-03 16:32:42,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=168828.0, ans=0.07 +2024-08-03 16:32:48,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=168828.0, ans=0.95 +2024-08-03 16:33:08,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168938.0, ans=0.1 +2024-08-03 16:33:09,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=168938.0, ans=0.125 +2024-08-03 16:33:12,492 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.68 vs. limit=15.0 +2024-08-03 16:33:16,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=168974.66666666666, ans=0.125 +2024-08-03 16:33:17,355 INFO [train.py:1114] (2/4) Epoch 13, batch 2400, loss[loss=0.1779, simple_loss=0.2656, pruned_loss=0.04512, over 13543.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2824, pruned_loss=0.05785, over 2641605.91 frames. ], batch size: 35, lr: 9.54e-03, grad_scale: 32.0 +2024-08-03 16:33:32,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=169011.33333333334, ans=0.0 +2024-08-03 16:33:33,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=169011.33333333334, ans=0.0 +2024-08-03 16:33:33,791 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.921e+01 1.143e+02 1.305e+02 1.687e+02 2.768e+02, threshold=2.610e+02, percent-clipped=2.0 +2024-08-03 16:33:38,756 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.80 vs. limit=22.5 +2024-08-03 16:33:55,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=169121.33333333334, ans=0.0 +2024-08-03 16:34:04,455 INFO [train.py:1114] (2/4) Epoch 13, batch 2450, loss[loss=0.2046, simple_loss=0.297, pruned_loss=0.05606, over 13354.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2836, pruned_loss=0.05862, over 2632146.25 frames. ], batch size: 37, lr: 9.54e-03, grad_scale: 32.0 +2024-08-03 16:34:12,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=169158.0, ans=0.05 +2024-08-03 16:34:12,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=169158.0, ans=0.2 +2024-08-03 16:34:19,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=169194.66666666666, ans=0.5 +2024-08-03 16:34:33,157 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.92 vs. limit=15.0 +2024-08-03 16:34:39,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=169304.66666666666, ans=0.125 +2024-08-03 16:34:41,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=169304.66666666666, ans=0.125 +2024-08-03 16:34:42,126 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.26 vs. limit=15.0 +2024-08-03 16:34:51,539 INFO [train.py:1114] (2/4) Epoch 13, batch 2500, loss[loss=0.2366, simple_loss=0.3162, pruned_loss=0.0785, over 13400.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2844, pruned_loss=0.05912, over 2636345.21 frames. ], batch size: 39, lr: 9.53e-03, grad_scale: 32.0 +2024-08-03 16:34:53,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=169341.33333333334, ans=0.0 +2024-08-03 16:34:53,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=169341.33333333334, ans=0.1 +2024-08-03 16:34:55,383 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.86 vs. limit=15.0 +2024-08-03 16:34:58,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=169341.33333333334, ans=22.5 +2024-08-03 16:35:08,283 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.080e+01 1.139e+02 1.387e+02 1.623e+02 2.338e+02, threshold=2.774e+02, percent-clipped=0.0 +2024-08-03 16:35:11,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=169414.66666666666, ans=0.025 +2024-08-03 16:35:12,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=169414.66666666666, ans=0.125 +2024-08-03 16:35:15,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=169414.66666666666, ans=0.125 +2024-08-03 16:35:30,827 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.27 vs. limit=15.0 +2024-08-03 16:35:38,356 INFO [train.py:1114] (2/4) Epoch 13, batch 2550, loss[loss=0.1956, simple_loss=0.2714, pruned_loss=0.05993, over 13535.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2845, pruned_loss=0.05897, over 2637626.44 frames. ], batch size: 31, lr: 9.53e-03, grad_scale: 16.0 +2024-08-03 16:35:50,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=169561.33333333334, ans=0.125 +2024-08-03 16:35:58,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=169598.0, ans=0.125 +2024-08-03 16:36:12,631 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.27 vs. limit=15.0 +2024-08-03 16:36:20,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=169708.0, ans=0.2 +2024-08-03 16:36:21,540 INFO [train.py:1114] (2/4) Epoch 13, batch 2600, loss[loss=0.1858, simple_loss=0.2781, pruned_loss=0.04672, over 13553.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2848, pruned_loss=0.05918, over 2637056.10 frames. ], batch size: 36, lr: 9.52e-03, grad_scale: 16.0 +2024-08-03 16:36:24,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=169708.0, ans=0.025 +2024-08-03 16:36:29,421 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.95 vs. limit=15.0 +2024-08-03 16:36:39,612 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.885e+01 1.136e+02 1.295e+02 1.531e+02 3.554e+02, threshold=2.589e+02, percent-clipped=4.0 +2024-08-03 16:36:43,483 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.05 vs. limit=6.0 +2024-08-03 16:36:43,575 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.14 vs. limit=15.0 +2024-08-03 16:36:52,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=169818.0, ans=0.0 +2024-08-03 16:37:06,852 INFO [train.py:1114] (2/4) Epoch 13, batch 2650, loss[loss=0.2363, simple_loss=0.3179, pruned_loss=0.0773, over 13333.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2845, pruned_loss=0.05899, over 2640309.45 frames. ], batch size: 46, lr: 9.52e-03, grad_scale: 16.0 +2024-08-03 16:37:19,592 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.00 vs. limit=12.0 +2024-08-03 16:37:35,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=170001.33333333334, ans=0.2 +2024-08-03 16:37:41,330 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.70 vs. limit=15.0 +2024-08-03 16:37:48,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=170038.0, ans=0.0 +2024-08-03 16:37:49,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=170038.0, ans=0.0 +2024-08-03 16:37:50,565 INFO [train.py:1114] (2/4) Epoch 13, batch 2700, loss[loss=0.2155, simple_loss=0.299, pruned_loss=0.06601, over 13535.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2849, pruned_loss=0.05883, over 2636385.27 frames. ], batch size: 40, lr: 9.51e-03, grad_scale: 16.0 +2024-08-03 16:38:03,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=170111.33333333334, ans=0.2 +2024-08-03 16:38:07,173 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.611e+01 1.174e+02 1.343e+02 1.652e+02 2.925e+02, threshold=2.686e+02, percent-clipped=2.0 +2024-08-03 16:38:07,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=170148.0, ans=0.125 +2024-08-03 16:38:11,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=170148.0, ans=0.1 +2024-08-03 16:38:19,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=170184.66666666666, ans=15.0 +2024-08-03 16:38:36,367 INFO [train.py:1114] (2/4) Epoch 13, batch 2750, loss[loss=0.2003, simple_loss=0.2835, pruned_loss=0.05855, over 13331.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2835, pruned_loss=0.05854, over 2634001.91 frames. ], batch size: 34, lr: 9.51e-03, grad_scale: 16.0 +2024-08-03 16:38:43,634 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.132e-02 +2024-08-03 16:38:45,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=170294.66666666666, ans=0.0 +2024-08-03 16:38:46,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=170294.66666666666, ans=0.125 +2024-08-03 16:38:47,510 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.30 vs. limit=10.0 +2024-08-03 16:38:48,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=170294.66666666666, ans=0.0 +2024-08-03 16:38:49,086 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.34 vs. limit=15.0 +2024-08-03 16:39:02,050 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.70 vs. limit=15.0 +2024-08-03 16:39:04,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=170368.0, ans=0.025 +2024-08-03 16:39:16,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=170404.66666666666, ans=0.07 +2024-08-03 16:39:20,223 INFO [train.py:1114] (2/4) Epoch 13, batch 2800, loss[loss=0.3009, simple_loss=0.3562, pruned_loss=0.1227, over 9154.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2838, pruned_loss=0.0589, over 2625479.91 frames. ], batch size: 96, lr: 9.50e-03, grad_scale: 32.0 +2024-08-03 16:39:29,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=170478.0, ans=0.125 +2024-08-03 16:39:34,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=170478.0, ans=0.2 +2024-08-03 16:39:36,326 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.827e+01 1.148e+02 1.326e+02 1.634e+02 2.406e+02, threshold=2.653e+02, percent-clipped=0.0 +2024-08-03 16:40:03,089 INFO [train.py:1114] (2/4) Epoch 13, batch 2850, loss[loss=0.2095, simple_loss=0.2957, pruned_loss=0.06165, over 13362.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2845, pruned_loss=0.05935, over 2620390.52 frames. ], batch size: 35, lr: 9.50e-03, grad_scale: 16.0 +2024-08-03 16:40:06,212 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.46 vs. limit=15.0 +2024-08-03 16:40:07,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=170624.66666666666, ans=0.0 +2024-08-03 16:40:11,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=170661.33333333334, ans=0.125 +2024-08-03 16:40:22,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=170698.0, ans=0.0 +2024-08-03 16:40:38,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.09 vs. limit=15.0 +2024-08-03 16:40:40,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=170771.33333333334, ans=0.1 +2024-08-03 16:40:46,450 INFO [train.py:1114] (2/4) Epoch 13, batch 2900, loss[loss=0.189, simple_loss=0.2755, pruned_loss=0.05119, over 13363.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2851, pruned_loss=0.05898, over 2631396.20 frames. ], batch size: 36, lr: 9.49e-03, grad_scale: 16.0 +2024-08-03 16:40:50,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=170808.0, ans=0.0 +2024-08-03 16:40:57,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=170844.66666666666, ans=0.025 +2024-08-03 16:41:01,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=170844.66666666666, ans=0.125 +2024-08-03 16:41:03,990 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.894e+01 1.100e+02 1.263e+02 1.445e+02 2.759e+02, threshold=2.526e+02, percent-clipped=1.0 +2024-08-03 16:41:22,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=170954.66666666666, ans=0.2 +2024-08-03 16:41:30,941 INFO [train.py:1114] (2/4) Epoch 13, batch 2950, loss[loss=0.1804, simple_loss=0.2602, pruned_loss=0.05032, over 13336.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2839, pruned_loss=0.05889, over 2630554.19 frames. ], batch size: 34, lr: 9.49e-03, grad_scale: 16.0 +2024-08-03 16:41:31,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=170991.33333333334, ans=0.125 +2024-08-03 16:41:41,525 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.62 vs. limit=15.0 +2024-08-03 16:41:41,704 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.09 vs. limit=15.0 +2024-08-03 16:41:48,787 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.44 vs. limit=6.0 +2024-08-03 16:41:51,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=171064.66666666666, ans=0.125 +2024-08-03 16:41:57,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=171101.33333333334, ans=0.125 +2024-08-03 16:41:57,610 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.01 vs. limit=15.0 +2024-08-03 16:42:02,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=171101.33333333334, ans=0.125 +2024-08-03 16:42:14,322 INFO [train.py:1114] (2/4) Epoch 13, batch 3000, loss[loss=0.2087, simple_loss=0.2954, pruned_loss=0.06103, over 13539.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2828, pruned_loss=0.05815, over 2631260.48 frames. ], batch size: 37, lr: 9.48e-03, grad_scale: 16.0 +2024-08-03 16:42:14,323 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 16:42:28,564 INFO [train.py:1146] (2/4) Epoch 13, validation: loss=0.1746, simple_loss=0.2745, pruned_loss=0.03731, over 944034.00 frames. +2024-08-03 16:42:28,565 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 16:42:45,834 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.103e+01 1.098e+02 1.258e+02 1.464e+02 2.884e+02, threshold=2.515e+02, percent-clipped=2.0 +2024-08-03 16:42:53,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=171284.66666666666, ans=0.2 +2024-08-03 16:43:01,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=171284.66666666666, ans=0.125 +2024-08-03 16:43:10,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=171321.33333333334, ans=0.2 +2024-08-03 16:43:12,765 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.13 vs. limit=10.0 +2024-08-03 16:43:12,963 INFO [train.py:1114] (2/4) Epoch 13, batch 3050, loss[loss=0.1921, simple_loss=0.2716, pruned_loss=0.0563, over 13528.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2833, pruned_loss=0.0584, over 2628172.37 frames. ], batch size: 35, lr: 9.48e-03, grad_scale: 16.0 +2024-08-03 16:43:22,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.84 vs. limit=15.0 +2024-08-03 16:43:28,606 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.67 vs. limit=15.0 +2024-08-03 16:43:48,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171504.66666666666, ans=0.1 +2024-08-03 16:43:57,530 INFO [train.py:1114] (2/4) Epoch 13, batch 3100, loss[loss=0.2422, simple_loss=0.324, pruned_loss=0.08025, over 13295.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2834, pruned_loss=0.0583, over 2627792.48 frames. ], batch size: 46, lr: 9.47e-03, grad_scale: 16.0 +2024-08-03 16:44:02,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=171541.33333333334, ans=0.125 +2024-08-03 16:44:14,829 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.222e+01 1.118e+02 1.244e+02 1.594e+02 3.299e+02, threshold=2.487e+02, percent-clipped=5.0 +2024-08-03 16:44:17,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=171614.66666666666, ans=0.0 +2024-08-03 16:44:21,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=171614.66666666666, ans=0.0 +2024-08-03 16:44:40,340 INFO [train.py:1114] (2/4) Epoch 13, batch 3150, loss[loss=0.2052, simple_loss=0.2893, pruned_loss=0.06056, over 13062.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2828, pruned_loss=0.05772, over 2628961.97 frames. ], batch size: 48, lr: 9.47e-03, grad_scale: 16.0 +2024-08-03 16:44:49,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=171761.33333333334, ans=0.2 +2024-08-03 16:45:03,991 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.65 vs. limit=12.0 +2024-08-03 16:45:07,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=171834.66666666666, ans=0.025 +2024-08-03 16:45:24,090 INFO [train.py:1114] (2/4) Epoch 13, batch 3200, loss[loss=0.1859, simple_loss=0.2718, pruned_loss=0.04999, over 13539.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2825, pruned_loss=0.05752, over 2634651.60 frames. ], batch size: 37, lr: 9.46e-03, grad_scale: 32.0 +2024-08-03 16:45:27,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=171908.0, ans=0.125 +2024-08-03 16:45:32,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=171944.66666666666, ans=0.05 +2024-08-03 16:45:41,821 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.110e+01 1.144e+02 1.329e+02 1.843e+02 2.975e+02, threshold=2.659e+02, percent-clipped=4.0 +2024-08-03 16:45:47,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=171981.33333333334, ans=0.125 +2024-08-03 16:45:50,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=172018.0, ans=0.125 +2024-08-03 16:45:50,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=172018.0, ans=0.125 +2024-08-03 16:45:57,092 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:46:06,407 INFO [train.py:1114] (2/4) Epoch 13, batch 3250, loss[loss=0.1691, simple_loss=0.2652, pruned_loss=0.0365, over 13398.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2827, pruned_loss=0.05726, over 2639272.03 frames. ], batch size: 38, lr: 9.46e-03, grad_scale: 16.0 +2024-08-03 16:46:12,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=172091.33333333334, ans=0.0 +2024-08-03 16:46:14,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=172128.0, ans=0.07 +2024-08-03 16:46:19,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=172128.0, ans=0.2 +2024-08-03 16:46:25,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=172164.66666666666, ans=0.0 +2024-08-03 16:46:33,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=172201.33333333334, ans=0.125 +2024-08-03 16:46:50,169 INFO [train.py:1114] (2/4) Epoch 13, batch 3300, loss[loss=0.2399, simple_loss=0.3259, pruned_loss=0.07696, over 12804.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2825, pruned_loss=0.05749, over 2640776.51 frames. ], batch size: 52, lr: 9.45e-03, grad_scale: 16.0 +2024-08-03 16:46:56,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=172274.66666666666, ans=0.0 +2024-08-03 16:46:56,594 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.73 vs. limit=12.0 +2024-08-03 16:47:08,242 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.464e+01 1.115e+02 1.316e+02 1.603e+02 3.409e+02, threshold=2.632e+02, percent-clipped=2.0 +2024-08-03 16:47:08,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=172348.0, ans=0.0 +2024-08-03 16:47:08,749 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.80 vs. limit=15.0 +2024-08-03 16:47:14,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=172348.0, ans=0.015 +2024-08-03 16:47:19,771 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.33 vs. limit=10.0 +2024-08-03 16:47:20,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=172384.66666666666, ans=0.125 +2024-08-03 16:47:21,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=172384.66666666666, ans=0.025 +2024-08-03 16:47:30,898 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.11 vs. limit=15.0 +2024-08-03 16:47:32,850 INFO [train.py:1114] (2/4) Epoch 13, batch 3350, loss[loss=0.2013, simple_loss=0.2864, pruned_loss=0.05811, over 13049.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2839, pruned_loss=0.05832, over 2629801.71 frames. ], batch size: 48, lr: 9.45e-03, grad_scale: 16.0 +2024-08-03 16:47:35,915 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.02 vs. limit=15.0 +2024-08-03 16:47:52,091 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.34 vs. limit=15.0 +2024-08-03 16:47:52,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=172531.33333333334, ans=0.09899494936611666 +2024-08-03 16:48:01,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=172568.0, ans=0.0 +2024-08-03 16:48:01,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=172568.0, ans=0.2 +2024-08-03 16:48:06,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=172604.66666666666, ans=0.0 +2024-08-03 16:48:15,664 INFO [train.py:1114] (2/4) Epoch 13, batch 3400, loss[loss=0.1821, simple_loss=0.2559, pruned_loss=0.05415, over 13515.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2833, pruned_loss=0.05835, over 2625298.74 frames. ], batch size: 31, lr: 9.44e-03, grad_scale: 8.0 +2024-08-03 16:48:30,739 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.54 vs. limit=15.0 +2024-08-03 16:48:34,566 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.232e+01 1.137e+02 1.264e+02 1.560e+02 2.546e+02, threshold=2.528e+02, percent-clipped=0.0 +2024-08-03 16:48:37,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=172714.66666666666, ans=0.2 +2024-08-03 16:48:40,566 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.86 vs. limit=15.0 +2024-08-03 16:48:44,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=172751.33333333334, ans=0.09899494936611666 +2024-08-03 16:49:00,076 INFO [train.py:1114] (2/4) Epoch 13, batch 3450, loss[loss=0.1994, simple_loss=0.2872, pruned_loss=0.05585, over 12919.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2838, pruned_loss=0.05864, over 2627880.10 frames. ], batch size: 52, lr: 9.44e-03, grad_scale: 8.0 +2024-08-03 16:49:01,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=172824.66666666666, ans=0.125 +2024-08-03 16:49:01,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=172824.66666666666, ans=0.0 +2024-08-03 16:49:02,311 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.22 vs. limit=22.5 +2024-08-03 16:49:05,287 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:49:13,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=172861.33333333334, ans=0.1 +2024-08-03 16:49:15,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=172861.33333333334, ans=0.0 +2024-08-03 16:49:21,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=172898.0, ans=0.125 +2024-08-03 16:49:33,229 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.09 vs. limit=15.0 +2024-08-03 16:49:43,781 INFO [train.py:1114] (2/4) Epoch 13, batch 3500, loss[loss=0.1904, simple_loss=0.266, pruned_loss=0.05745, over 13541.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2823, pruned_loss=0.05792, over 2629307.33 frames. ], batch size: 34, lr: 9.43e-03, grad_scale: 8.0 +2024-08-03 16:49:43,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173008.0, ans=0.1 +2024-08-03 16:49:54,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=173044.66666666666, ans=0.125 +2024-08-03 16:49:55,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=173044.66666666666, ans=0.125 +2024-08-03 16:49:58,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=173044.66666666666, ans=0.125 +2024-08-03 16:49:59,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=173044.66666666666, ans=0.0 +2024-08-03 16:50:03,121 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.363e+01 1.139e+02 1.315e+02 1.608e+02 2.660e+02, threshold=2.630e+02, percent-clipped=2.0 +2024-08-03 16:50:16,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=173118.0, ans=0.0 +2024-08-03 16:50:18,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=173154.66666666666, ans=0.2 +2024-08-03 16:50:24,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=173154.66666666666, ans=0.0 +2024-08-03 16:50:26,467 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:50:27,047 INFO [train.py:1114] (2/4) Epoch 13, batch 3550, loss[loss=0.2139, simple_loss=0.3015, pruned_loss=0.06317, over 12689.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2841, pruned_loss=0.05877, over 2628166.81 frames. ], batch size: 59, lr: 9.43e-03, grad_scale: 8.0 +2024-08-03 16:50:36,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.03 vs. limit=15.0 +2024-08-03 16:50:51,005 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.47 vs. limit=15.0 +2024-08-03 16:51:00,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173301.33333333334, ans=0.1 +2024-08-03 16:51:00,984 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.61 vs. limit=15.0 +2024-08-03 16:51:05,497 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.70 vs. limit=15.0 +2024-08-03 16:51:10,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=173338.0, ans=0.0 +2024-08-03 16:51:11,547 INFO [train.py:1114] (2/4) Epoch 13, batch 3600, loss[loss=0.2359, simple_loss=0.3108, pruned_loss=0.08048, over 9050.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2892, pruned_loss=0.0634, over 2484699.25 frames. ], batch size: 97, lr: 9.42e-03, grad_scale: 16.0 +2024-08-03 16:51:31,029 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.031e+02 1.213e+02 1.304e+02 1.372e+02 1.765e+02, threshold=2.609e+02, percent-clipped=0.0 +2024-08-03 16:51:34,576 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:51:36,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=173484.66666666666, ans=0.0 +2024-08-03 16:51:39,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=173484.66666666666, ans=0.125 +2024-08-03 16:51:40,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=173484.66666666666, ans=0.0 +2024-08-03 16:51:42,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=173484.66666666666, ans=0.125 +2024-08-03 16:51:42,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=173484.66666666666, ans=0.09899494936611666 +2024-08-03 16:52:47,635 INFO [train.py:1114] (2/4) Epoch 14, batch 0, loss[loss=0.1632, simple_loss=0.2504, pruned_loss=0.03799, over 13347.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2504, pruned_loss=0.03799, over 13347.00 frames. ], batch size: 33, lr: 9.08e-03, grad_scale: 32.0 +2024-08-03 16:52:47,635 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 16:52:54,215 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.7062, 3.0756, 2.5654, 2.2347], device='cuda:2') +2024-08-03 16:53:02,073 INFO [train.py:1146] (2/4) Epoch 14, validation: loss=0.1773, simple_loss=0.2784, pruned_loss=0.03813, over 944034.00 frames. +2024-08-03 16:53:02,073 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 16:53:11,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=173558.0, ans=0.05 +2024-08-03 16:53:34,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=173631.33333333334, ans=0.0 +2024-08-03 16:53:35,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=173631.33333333334, ans=0.07 +2024-08-03 16:53:45,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173668.0, ans=0.1 +2024-08-03 16:53:49,560 INFO [train.py:1114] (2/4) Epoch 14, batch 50, loss[loss=0.1734, simple_loss=0.2566, pruned_loss=0.04514, over 13408.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2832, pruned_loss=0.05798, over 578247.92 frames. ], batch size: 32, lr: 9.07e-03, grad_scale: 32.0 +2024-08-03 16:53:58,666 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:54:03,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=173741.33333333334, ans=0.0 +2024-08-03 16:54:13,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=173741.33333333334, ans=0.2 +2024-08-03 16:54:13,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=173741.33333333334, ans=0.125 +2024-08-03 16:54:16,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173778.0, ans=0.1 +2024-08-03 16:54:20,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173778.0, ans=0.1 +2024-08-03 16:54:22,303 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.01 vs. limit=12.0 +2024-08-03 16:54:27,224 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.315e+01 1.175e+02 1.367e+02 1.781e+02 2.550e+02, threshold=2.735e+02, percent-clipped=0.0 +2024-08-03 16:54:35,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=173851.33333333334, ans=0.2 +2024-08-03 16:54:43,513 INFO [train.py:1114] (2/4) Epoch 14, batch 100, loss[loss=0.1671, simple_loss=0.2518, pruned_loss=0.04124, over 13524.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2828, pruned_loss=0.05721, over 1025744.29 frames. ], batch size: 35, lr: 9.07e-03, grad_scale: 32.0 +2024-08-03 16:55:30,122 INFO [train.py:1114] (2/4) Epoch 14, batch 150, loss[loss=0.1596, simple_loss=0.2479, pruned_loss=0.03558, over 13403.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2811, pruned_loss=0.05651, over 1387072.06 frames. ], batch size: 32, lr: 9.06e-03, grad_scale: 32.0 +2024-08-03 16:55:41,644 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.52 vs. limit=22.5 +2024-08-03 16:55:42,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=174108.0, ans=0.035 +2024-08-03 16:55:44,224 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.49 vs. limit=15.0 +2024-08-03 16:55:52,388 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.81 vs. limit=12.0 +2024-08-03 16:55:58,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=174144.66666666666, ans=0.125 +2024-08-03 16:56:03,372 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.908e+01 1.123e+02 1.329e+02 1.786e+02 3.044e+02, threshold=2.658e+02, percent-clipped=1.0 +2024-08-03 16:56:07,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=174181.33333333334, ans=0.125 +2024-08-03 16:56:16,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=174218.0, ans=0.0 +2024-08-03 16:56:24,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=174218.0, ans=0.025 +2024-08-03 16:56:28,041 INFO [train.py:1114] (2/4) Epoch 14, batch 200, loss[loss=0.1883, simple_loss=0.2786, pruned_loss=0.04906, over 12539.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2799, pruned_loss=0.05598, over 1665045.94 frames. ], batch size: 58, lr: 9.06e-03, grad_scale: 32.0 +2024-08-03 16:56:41,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=174291.33333333334, ans=0.125 +2024-08-03 16:56:43,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=174291.33333333334, ans=0.035 +2024-08-03 16:56:44,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=174291.33333333334, ans=0.125 +2024-08-03 16:56:49,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=174328.0, ans=0.0 +2024-08-03 16:57:09,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=174401.33333333334, ans=15.0 +2024-08-03 16:57:14,951 INFO [train.py:1114] (2/4) Epoch 14, batch 250, loss[loss=0.2071, simple_loss=0.292, pruned_loss=0.06108, over 13295.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2805, pruned_loss=0.05627, over 1884083.06 frames. ], batch size: 46, lr: 9.05e-03, grad_scale: 32.0 +2024-08-03 16:57:15,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=174438.0, ans=0.125 +2024-08-03 16:57:15,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=174438.0, ans=0.125 +2024-08-03 16:57:49,037 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.116e+01 1.178e+02 1.381e+02 1.725e+02 3.085e+02, threshold=2.762e+02, percent-clipped=4.0 +2024-08-03 16:57:51,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=174548.0, ans=0.125 +2024-08-03 16:57:56,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=174584.66666666666, ans=0.2 +2024-08-03 16:58:01,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=174584.66666666666, ans=0.125 +2024-08-03 16:58:05,301 INFO [train.py:1114] (2/4) Epoch 14, batch 300, loss[loss=0.1978, simple_loss=0.2854, pruned_loss=0.05513, over 13458.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2801, pruned_loss=0.05603, over 2051020.43 frames. ], batch size: 42, lr: 9.05e-03, grad_scale: 32.0 +2024-08-03 16:58:18,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=174658.0, ans=0.125 +2024-08-03 16:58:36,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=174731.33333333334, ans=0.1 +2024-08-03 16:58:49,914 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.83 vs. limit=10.0 +2024-08-03 16:58:52,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=174804.66666666666, ans=0.1 +2024-08-03 16:58:52,953 INFO [train.py:1114] (2/4) Epoch 14, batch 350, loss[loss=0.1824, simple_loss=0.2572, pruned_loss=0.05383, over 13585.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2803, pruned_loss=0.05583, over 2181831.98 frames. ], batch size: 33, lr: 9.04e-03, grad_scale: 32.0 +2024-08-03 16:58:53,463 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=12.0 +2024-08-03 16:59:35,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=174914.66666666666, ans=0.125 +2024-08-03 16:59:36,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=174914.66666666666, ans=0.025 +2024-08-03 16:59:38,112 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.558e+01 1.118e+02 1.275e+02 1.546e+02 2.611e+02, threshold=2.551e+02, percent-clipped=0.0 +2024-08-03 16:59:57,451 INFO [train.py:1114] (2/4) Epoch 14, batch 400, loss[loss=0.1959, simple_loss=0.2826, pruned_loss=0.05457, over 13348.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2794, pruned_loss=0.05523, over 2286595.91 frames. ], batch size: 37, lr: 9.04e-03, grad_scale: 32.0 +2024-08-03 17:00:02,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=174988.0, ans=0.2 +2024-08-03 17:00:12,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=175024.66666666666, ans=0.0 +2024-08-03 17:00:14,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=175024.66666666666, ans=0.0 +2024-08-03 17:00:26,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175098.0, ans=0.1 +2024-08-03 17:00:44,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=175134.66666666666, ans=0.125 +2024-08-03 17:00:46,859 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.52 vs. limit=15.0 +2024-08-03 17:00:48,049 INFO [train.py:1114] (2/4) Epoch 14, batch 450, loss[loss=0.2195, simple_loss=0.3047, pruned_loss=0.06715, over 13549.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2799, pruned_loss=0.05571, over 2360964.73 frames. ], batch size: 38, lr: 9.04e-03, grad_scale: 32.0 +2024-08-03 17:01:03,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=175208.0, ans=0.0 +2024-08-03 17:01:05,881 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.49 vs. limit=15.0 +2024-08-03 17:01:18,916 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.283e+01 1.087e+02 1.281e+02 1.631e+02 3.461e+02, threshold=2.562e+02, percent-clipped=3.0 +2024-08-03 17:01:27,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=175318.0, ans=0.2 +2024-08-03 17:01:29,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175318.0, ans=0.1 +2024-08-03 17:01:36,930 INFO [train.py:1114] (2/4) Epoch 14, batch 500, loss[loss=0.2275, simple_loss=0.3155, pruned_loss=0.06974, over 13460.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2793, pruned_loss=0.05558, over 2426469.76 frames. ], batch size: 43, lr: 9.03e-03, grad_scale: 32.0 +2024-08-03 17:01:46,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=175391.33333333334, ans=0.2 +2024-08-03 17:01:48,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=175391.33333333334, ans=0.0 +2024-08-03 17:01:50,655 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.51 vs. limit=15.0 +2024-08-03 17:01:54,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=175428.0, ans=0.125 +2024-08-03 17:02:21,559 INFO [train.py:1114] (2/4) Epoch 14, batch 550, loss[loss=0.2241, simple_loss=0.3114, pruned_loss=0.06843, over 13067.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2793, pruned_loss=0.05574, over 2469592.46 frames. ], batch size: 48, lr: 9.03e-03, grad_scale: 32.0 +2024-08-03 17:02:23,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=175538.0, ans=0.125 +2024-08-03 17:02:28,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=175538.0, ans=0.04949747468305833 +2024-08-03 17:02:29,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=175574.66666666666, ans=0.125 +2024-08-03 17:02:29,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=175574.66666666666, ans=0.0 +2024-08-03 17:02:38,962 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:02:40,446 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.13 vs. limit=15.0 +2024-08-03 17:02:52,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=175648.0, ans=0.95 +2024-08-03 17:02:52,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=175648.0, ans=0.0 +2024-08-03 17:02:53,528 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.358e+01 1.151e+02 1.294e+02 1.518e+02 2.416e+02, threshold=2.587e+02, percent-clipped=0.0 +2024-08-03 17:03:06,772 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.41 vs. limit=15.0 +2024-08-03 17:03:08,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=175721.33333333334, ans=0.2 +2024-08-03 17:03:08,899 INFO [train.py:1114] (2/4) Epoch 14, batch 600, loss[loss=0.2479, simple_loss=0.319, pruned_loss=0.08838, over 13337.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2796, pruned_loss=0.05597, over 2509219.73 frames. ], batch size: 46, lr: 9.02e-03, grad_scale: 16.0 +2024-08-03 17:03:12,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=175721.33333333334, ans=0.125 +2024-08-03 17:03:20,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=175758.0, ans=0.125 +2024-08-03 17:03:26,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=175758.0, ans=0.0 +2024-08-03 17:03:37,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175831.33333333334, ans=0.1 +2024-08-03 17:03:44,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=175831.33333333334, ans=0.125 +2024-08-03 17:03:49,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=175868.0, ans=0.0 +2024-08-03 17:03:56,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=175904.66666666666, ans=0.025 +2024-08-03 17:03:57,661 INFO [train.py:1114] (2/4) Epoch 14, batch 650, loss[loss=0.1876, simple_loss=0.2735, pruned_loss=0.05083, over 13538.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2788, pruned_loss=0.05553, over 2544140.51 frames. ], batch size: 37, lr: 9.02e-03, grad_scale: 8.0 +2024-08-03 17:04:03,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=175904.66666666666, ans=0.0 +2024-08-03 17:04:28,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=176014.66666666666, ans=0.2 +2024-08-03 17:04:29,807 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.301e+01 1.160e+02 1.386e+02 1.901e+02 3.564e+02, threshold=2.772e+02, percent-clipped=5.0 +2024-08-03 17:04:31,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=176014.66666666666, ans=0.0 +2024-08-03 17:04:38,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=176051.33333333334, ans=0.1 +2024-08-03 17:04:46,197 INFO [train.py:1114] (2/4) Epoch 14, batch 700, loss[loss=0.1596, simple_loss=0.2441, pruned_loss=0.03758, over 13528.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2793, pruned_loss=0.05573, over 2564936.09 frames. ], batch size: 35, lr: 9.01e-03, grad_scale: 8.0 +2024-08-03 17:04:50,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=176088.0, ans=0.0 +2024-08-03 17:04:52,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=176088.0, ans=0.1 +2024-08-03 17:04:54,262 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.20 vs. limit=15.0 +2024-08-03 17:04:57,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176124.66666666666, ans=0.1 +2024-08-03 17:05:03,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=176124.66666666666, ans=0.0 +2024-08-03 17:05:12,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=176198.0, ans=0.125 +2024-08-03 17:05:28,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=176234.66666666666, ans=0.2 +2024-08-03 17:05:28,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=176234.66666666666, ans=0.125 +2024-08-03 17:05:31,384 INFO [train.py:1114] (2/4) Epoch 14, batch 750, loss[loss=0.1802, simple_loss=0.2757, pruned_loss=0.04233, over 13360.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2794, pruned_loss=0.0557, over 2582021.39 frames. ], batch size: 37, lr: 9.01e-03, grad_scale: 8.0 +2024-08-03 17:05:55,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176344.66666666666, ans=0.1 +2024-08-03 17:06:00,141 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.98 vs. limit=15.0 +2024-08-03 17:06:02,469 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.800e+01 1.140e+02 1.290e+02 1.721e+02 6.299e+02, threshold=2.581e+02, percent-clipped=4.0 +2024-08-03 17:06:16,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=176454.66666666666, ans=0.025 +2024-08-03 17:06:17,181 INFO [train.py:1114] (2/4) Epoch 14, batch 800, loss[loss=0.183, simple_loss=0.2633, pruned_loss=0.05133, over 13338.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2798, pruned_loss=0.05606, over 2596524.80 frames. ], batch size: 33, lr: 9.00e-03, grad_scale: 16.0 +2024-08-03 17:06:17,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=176454.66666666666, ans=0.04949747468305833 +2024-08-03 17:06:22,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=176454.66666666666, ans=0.0 +2024-08-03 17:06:46,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=176564.66666666666, ans=0.125 +2024-08-03 17:06:53,093 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.52 vs. limit=10.0 +2024-08-03 17:07:05,357 INFO [train.py:1114] (2/4) Epoch 14, batch 850, loss[loss=0.205, simple_loss=0.2918, pruned_loss=0.05908, over 13325.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.28, pruned_loss=0.05651, over 2609625.28 frames. ], batch size: 40, lr: 9.00e-03, grad_scale: 16.0 +2024-08-03 17:07:40,305 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.443e+01 1.087e+02 1.211e+02 1.412e+02 2.074e+02, threshold=2.422e+02, percent-clipped=0.0 +2024-08-03 17:07:45,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=176784.66666666666, ans=0.0 +2024-08-03 17:07:47,486 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.16 vs. limit=15.0 +2024-08-03 17:07:48,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176784.66666666666, ans=0.1 +2024-08-03 17:07:54,937 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.48 vs. limit=22.5 +2024-08-03 17:07:55,282 INFO [train.py:1114] (2/4) Epoch 14, batch 900, loss[loss=0.1796, simple_loss=0.2567, pruned_loss=0.05123, over 13347.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2811, pruned_loss=0.05701, over 2611682.42 frames. ], batch size: 33, lr: 8.99e-03, grad_scale: 16.0 +2024-08-03 17:08:32,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=176931.33333333334, ans=0.0 +2024-08-03 17:08:40,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=176968.0, ans=0.125 +2024-08-03 17:08:41,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=177004.66666666666, ans=0.125 +2024-08-03 17:08:42,702 INFO [train.py:1114] (2/4) Epoch 14, batch 950, loss[loss=0.1702, simple_loss=0.252, pruned_loss=0.04423, over 13529.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2814, pruned_loss=0.05683, over 2612689.34 frames. ], batch size: 34, lr: 8.99e-03, grad_scale: 16.0 +2024-08-03 17:08:54,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=177041.33333333334, ans=0.125 +2024-08-03 17:09:06,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=177078.0, ans=0.125 +2024-08-03 17:09:12,541 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:09:15,827 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.964e+01 1.158e+02 1.398e+02 1.727e+02 2.347e+02, threshold=2.796e+02, percent-clipped=0.0 +2024-08-03 17:09:18,166 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.99 vs. limit=10.0 +2024-08-03 17:09:30,714 INFO [train.py:1114] (2/4) Epoch 14, batch 1000, loss[loss=0.1912, simple_loss=0.2792, pruned_loss=0.05159, over 13369.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2815, pruned_loss=0.05702, over 2610555.07 frames. ], batch size: 35, lr: 8.99e-03, grad_scale: 16.0 +2024-08-03 17:09:32,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=177188.0, ans=0.025 +2024-08-03 17:09:36,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=177188.0, ans=0.125 +2024-08-03 17:09:51,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=177261.33333333334, ans=0.025 +2024-08-03 17:09:57,720 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.31 vs. limit=22.5 +2024-08-03 17:10:03,966 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.02 vs. limit=15.0 +2024-08-03 17:10:16,587 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.60 vs. limit=22.5 +2024-08-03 17:10:19,004 INFO [train.py:1114] (2/4) Epoch 14, batch 1050, loss[loss=0.2164, simple_loss=0.3097, pruned_loss=0.06157, over 13580.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2807, pruned_loss=0.05673, over 2614440.31 frames. ], batch size: 39, lr: 8.98e-03, grad_scale: 16.0 +2024-08-03 17:10:21,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=177371.33333333334, ans=0.125 +2024-08-03 17:10:38,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=177444.66666666666, ans=0.0 +2024-08-03 17:10:51,918 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.165e+01 1.080e+02 1.235e+02 1.446e+02 2.124e+02, threshold=2.470e+02, percent-clipped=0.0 +2024-08-03 17:10:55,241 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.34 vs. limit=12.0 +2024-08-03 17:11:06,534 INFO [train.py:1114] (2/4) Epoch 14, batch 1100, loss[loss=0.1893, simple_loss=0.2752, pruned_loss=0.0517, over 13555.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2802, pruned_loss=0.05632, over 2618617.44 frames. ], batch size: 36, lr: 8.98e-03, grad_scale: 16.0 +2024-08-03 17:11:15,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=177554.66666666666, ans=0.125 +2024-08-03 17:11:32,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=177628.0, ans=0.1 +2024-08-03 17:11:39,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=177664.66666666666, ans=0.125 +2024-08-03 17:11:50,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=177701.33333333334, ans=0.2 +2024-08-03 17:11:54,526 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:11:56,969 INFO [train.py:1114] (2/4) Epoch 14, batch 1150, loss[loss=0.2026, simple_loss=0.2848, pruned_loss=0.06019, over 13570.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.28, pruned_loss=0.05633, over 2617858.90 frames. ], batch size: 36, lr: 8.97e-03, grad_scale: 16.0 +2024-08-03 17:12:00,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=177738.0, ans=0.1 +2024-08-03 17:12:10,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=177774.66666666666, ans=0.125 +2024-08-03 17:12:12,339 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.52 vs. limit=22.5 +2024-08-03 17:12:20,813 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.67 vs. limit=15.0 +2024-08-03 17:12:28,534 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.383e+01 1.181e+02 1.323e+02 1.686e+02 3.018e+02, threshold=2.646e+02, percent-clipped=3.0 +2024-08-03 17:12:41,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=177884.66666666666, ans=0.125 +2024-08-03 17:12:42,907 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.45 vs. limit=12.0 +2024-08-03 17:12:43,145 INFO [train.py:1114] (2/4) Epoch 14, batch 1200, loss[loss=0.2095, simple_loss=0.2929, pruned_loss=0.06309, over 13580.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2814, pruned_loss=0.05676, over 2614869.86 frames. ], batch size: 39, lr: 8.97e-03, grad_scale: 32.0 +2024-08-03 17:13:01,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=177994.66666666666, ans=0.125 +2024-08-03 17:13:24,676 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=178068.0, ans=10.0 +2024-08-03 17:13:28,041 INFO [train.py:1114] (2/4) Epoch 14, batch 1250, loss[loss=0.2034, simple_loss=0.2912, pruned_loss=0.0578, over 13435.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2813, pruned_loss=0.05641, over 2627294.36 frames. ], batch size: 42, lr: 8.96e-03, grad_scale: 32.0 +2024-08-03 17:13:32,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff3.min_abs, batch_count=178104.66666666666, ans=0.2 +2024-08-03 17:13:55,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=178178.0, ans=0.125 +2024-08-03 17:13:57,199 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.71 vs. limit=10.0 +2024-08-03 17:13:57,865 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:13:59,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=178214.66666666666, ans=0.125 +2024-08-03 17:14:01,211 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.606e+01 1.145e+02 1.312e+02 1.553e+02 2.666e+02, threshold=2.625e+02, percent-clipped=1.0 +2024-08-03 17:14:02,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=178214.66666666666, ans=0.125 +2024-08-03 17:14:06,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=178251.33333333334, ans=0.2 +2024-08-03 17:14:08,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=178251.33333333334, ans=0.0 +2024-08-03 17:14:14,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=178251.33333333334, ans=0.125 +2024-08-03 17:14:15,815 INFO [train.py:1114] (2/4) Epoch 14, batch 1300, loss[loss=0.1937, simple_loss=0.2935, pruned_loss=0.04698, over 12868.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2806, pruned_loss=0.05611, over 2630751.00 frames. ], batch size: 52, lr: 8.96e-03, grad_scale: 32.0 +2024-08-03 17:14:29,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=178324.66666666666, ans=0.125 +2024-08-03 17:14:44,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=178398.0, ans=0.0 +2024-08-03 17:14:45,621 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.77 vs. limit=6.0 +2024-08-03 17:14:46,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=178398.0, ans=0.0 +2024-08-03 17:15:04,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=178471.33333333334, ans=0.95 +2024-08-03 17:15:05,135 INFO [train.py:1114] (2/4) Epoch 14, batch 1350, loss[loss=0.1791, simple_loss=0.2646, pruned_loss=0.04683, over 13545.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2811, pruned_loss=0.05635, over 2639109.63 frames. ], batch size: 37, lr: 8.95e-03, grad_scale: 32.0 +2024-08-03 17:15:07,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=178471.33333333334, ans=0.025 +2024-08-03 17:15:19,152 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.85 vs. limit=15.0 +2024-08-03 17:15:19,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=178508.0, ans=0.125 +2024-08-03 17:15:37,363 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.49 vs. limit=8.0 +2024-08-03 17:15:37,512 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.701e+01 1.127e+02 1.257e+02 1.561e+02 2.635e+02, threshold=2.514e+02, percent-clipped=1.0 +2024-08-03 17:15:41,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=178581.33333333334, ans=0.125 +2024-08-03 17:15:51,672 INFO [train.py:1114] (2/4) Epoch 14, batch 1400, loss[loss=0.1686, simple_loss=0.2439, pruned_loss=0.04664, over 13246.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2801, pruned_loss=0.05585, over 2642701.57 frames. ], batch size: 31, lr: 8.95e-03, grad_scale: 16.0 +2024-08-03 17:15:54,130 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.73 vs. limit=15.0 +2024-08-03 17:15:58,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=178654.66666666666, ans=0.125 +2024-08-03 17:16:17,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=178728.0, ans=0.125 +2024-08-03 17:16:29,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=178801.33333333334, ans=0.125 +2024-08-03 17:16:35,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=178838.0, ans=0.0 +2024-08-03 17:16:36,742 INFO [train.py:1114] (2/4) Epoch 14, batch 1450, loss[loss=0.2106, simple_loss=0.297, pruned_loss=0.06212, over 13427.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.281, pruned_loss=0.05619, over 2641615.07 frames. ], batch size: 43, lr: 8.94e-03, grad_scale: 16.0 +2024-08-03 17:16:38,227 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.75 vs. limit=15.0 +2024-08-03 17:16:43,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=178838.0, ans=0.125 +2024-08-03 17:16:54,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=178911.33333333334, ans=0.0 +2024-08-03 17:17:08,369 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.238e+01 1.149e+02 1.315e+02 1.594e+02 2.634e+02, threshold=2.629e+02, percent-clipped=1.0 +2024-08-03 17:17:08,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=178948.0, ans=0.2 +2024-08-03 17:17:10,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=178948.0, ans=0.1 +2024-08-03 17:17:17,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=178984.66666666666, ans=0.125 +2024-08-03 17:17:18,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=178984.66666666666, ans=0.125 +2024-08-03 17:17:24,003 INFO [train.py:1114] (2/4) Epoch 14, batch 1500, loss[loss=0.1906, simple_loss=0.2777, pruned_loss=0.05175, over 13414.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.281, pruned_loss=0.05599, over 2641635.28 frames. ], batch size: 39, lr: 8.94e-03, grad_scale: 16.0 +2024-08-03 17:17:26,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=179021.33333333334, ans=0.125 +2024-08-03 17:17:40,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=179058.0, ans=0.125 +2024-08-03 17:17:40,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=179058.0, ans=0.0 +2024-08-03 17:17:42,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179094.66666666666, ans=0.1 +2024-08-03 17:17:58,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179131.33333333334, ans=0.1 +2024-08-03 17:18:03,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=179168.0, ans=0.125 +2024-08-03 17:18:10,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=179168.0, ans=0.0 +2024-08-03 17:18:10,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=179168.0, ans=0.0 +2024-08-03 17:18:11,854 INFO [train.py:1114] (2/4) Epoch 14, batch 1550, loss[loss=0.2038, simple_loss=0.2967, pruned_loss=0.05549, over 13386.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2815, pruned_loss=0.0564, over 2631236.95 frames. ], batch size: 41, lr: 8.94e-03, grad_scale: 16.0 +2024-08-03 17:18:22,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=179241.33333333334, ans=0.125 +2024-08-03 17:18:45,086 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.351e+01 1.093e+02 1.288e+02 1.698e+02 2.728e+02, threshold=2.576e+02, percent-clipped=2.0 +2024-08-03 17:18:49,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=179314.66666666666, ans=0.125 +2024-08-03 17:18:52,684 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.92 vs. limit=15.0 +2024-08-03 17:18:59,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=179388.0, ans=0.125 +2024-08-03 17:19:00,523 INFO [train.py:1114] (2/4) Epoch 14, batch 1600, loss[loss=0.2245, simple_loss=0.3188, pruned_loss=0.06512, over 13576.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.281, pruned_loss=0.05628, over 2624737.41 frames. ], batch size: 39, lr: 8.93e-03, grad_scale: 32.0 +2024-08-03 17:19:05,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=179388.0, ans=0.125 +2024-08-03 17:19:05,546 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.89 vs. limit=22.5 +2024-08-03 17:19:09,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=179424.66666666666, ans=0.2 +2024-08-03 17:19:09,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=179424.66666666666, ans=0.2 +2024-08-03 17:19:17,734 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=3.98 vs. limit=12.0 +2024-08-03 17:19:45,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=179571.33333333334, ans=0.125 +2024-08-03 17:19:46,116 INFO [train.py:1114] (2/4) Epoch 14, batch 1650, loss[loss=0.2062, simple_loss=0.2883, pruned_loss=0.06207, over 13345.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2812, pruned_loss=0.05678, over 2621803.18 frames. ], batch size: 40, lr: 8.93e-03, grad_scale: 32.0 +2024-08-03 17:19:54,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=179608.0, ans=0.1 +2024-08-03 17:19:54,870 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.57 vs. limit=15.0 +2024-08-03 17:20:33,194 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.085e+01 1.145e+02 1.327e+02 1.825e+02 3.127e+02, threshold=2.655e+02, percent-clipped=5.0 +2024-08-03 17:20:33,841 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.08 vs. limit=15.0 +2024-08-03 17:20:38,260 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.64 vs. limit=6.0 +2024-08-03 17:20:46,571 INFO [train.py:1114] (2/4) Epoch 14, batch 1700, loss[loss=0.1566, simple_loss=0.2361, pruned_loss=0.03858, over 13263.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2804, pruned_loss=0.05638, over 2630233.79 frames. ], batch size: 31, lr: 8.92e-03, grad_scale: 32.0 +2024-08-03 17:20:54,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=179791.33333333334, ans=0.125 +2024-08-03 17:21:13,490 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=11.36 vs. limit=15.0 +2024-08-03 17:21:16,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=179864.66666666666, ans=0.125 +2024-08-03 17:21:17,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=179864.66666666666, ans=0.0 +2024-08-03 17:21:21,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=179864.66666666666, ans=0.125 +2024-08-03 17:21:33,628 INFO [train.py:1114] (2/4) Epoch 14, batch 1750, loss[loss=0.1946, simple_loss=0.2642, pruned_loss=0.06255, over 13544.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2807, pruned_loss=0.05673, over 2633866.45 frames. ], batch size: 31, lr: 8.92e-03, grad_scale: 32.0 +2024-08-03 17:21:35,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=179938.0, ans=0.125 +2024-08-03 17:21:58,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=180011.33333333334, ans=0.125 +2024-08-03 17:21:58,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=180011.33333333334, ans=0.125 +2024-08-03 17:22:04,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=180048.0, ans=0.025 +2024-08-03 17:22:07,567 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.076e+01 1.125e+02 1.266e+02 1.724e+02 3.044e+02, threshold=2.532e+02, percent-clipped=5.0 +2024-08-03 17:22:10,769 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.76 vs. limit=6.0 +2024-08-03 17:22:22,976 INFO [train.py:1114] (2/4) Epoch 14, batch 1800, loss[loss=0.196, simple_loss=0.2786, pruned_loss=0.05674, over 13532.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2808, pruned_loss=0.05684, over 2635259.06 frames. ], batch size: 38, lr: 8.91e-03, grad_scale: 32.0 +2024-08-03 17:22:49,303 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.77 vs. limit=15.0 +2024-08-03 17:22:58,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=180231.33333333334, ans=0.0 +2024-08-03 17:22:59,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=180231.33333333334, ans=0.0 +2024-08-03 17:23:00,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=180268.0, ans=0.125 +2024-08-03 17:23:10,716 INFO [train.py:1114] (2/4) Epoch 14, batch 1850, loss[loss=0.2084, simple_loss=0.292, pruned_loss=0.06239, over 13415.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2801, pruned_loss=0.05638, over 2637943.94 frames. ], batch size: 39, lr: 8.91e-03, grad_scale: 32.0 +2024-08-03 17:23:21,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=180341.33333333334, ans=0.95 +2024-08-03 17:23:26,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=180341.33333333334, ans=0.2 +2024-08-03 17:23:35,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180378.0, ans=0.1 +2024-08-03 17:23:37,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=180414.66666666666, ans=0.0 +2024-08-03 17:23:42,948 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.173e+01 1.187e+02 1.383e+02 1.867e+02 3.590e+02, threshold=2.765e+02, percent-clipped=8.0 +2024-08-03 17:23:43,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=180414.66666666666, ans=0.125 +2024-08-03 17:23:56,656 INFO [train.py:1114] (2/4) Epoch 14, batch 1900, loss[loss=0.2047, simple_loss=0.2995, pruned_loss=0.05499, over 13323.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2808, pruned_loss=0.05628, over 2640777.25 frames. ], batch size: 40, lr: 8.90e-03, grad_scale: 32.0 +2024-08-03 17:23:56,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=180488.0, ans=0.125 +2024-08-03 17:24:16,128 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.60 vs. limit=15.0 +2024-08-03 17:24:23,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=180598.0, ans=0.0 +2024-08-03 17:24:26,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=180598.0, ans=0.125 +2024-08-03 17:24:43,812 INFO [train.py:1114] (2/4) Epoch 14, batch 1950, loss[loss=0.1818, simple_loss=0.2627, pruned_loss=0.05041, over 13548.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2813, pruned_loss=0.05615, over 2647091.21 frames. ], batch size: 36, lr: 8.90e-03, grad_scale: 16.0 +2024-08-03 17:24:54,585 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.80 vs. limit=15.0 +2024-08-03 17:25:09,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=180744.66666666666, ans=0.125 +2024-08-03 17:25:10,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=180781.33333333334, ans=0.125 +2024-08-03 17:25:19,256 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.397e+01 1.089e+02 1.232e+02 1.473e+02 2.566e+02, threshold=2.463e+02, percent-clipped=0.0 +2024-08-03 17:25:19,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=180781.33333333334, ans=15.0 +2024-08-03 17:25:26,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=180818.0, ans=0.125 +2024-08-03 17:25:27,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=180818.0, ans=0.125 +2024-08-03 17:25:32,199 INFO [train.py:1114] (2/4) Epoch 14, batch 2000, loss[loss=0.1612, simple_loss=0.2427, pruned_loss=0.03984, over 13529.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2822, pruned_loss=0.0567, over 2636137.36 frames. ], batch size: 31, lr: 8.90e-03, grad_scale: 32.0 +2024-08-03 17:25:32,712 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.29 vs. limit=12.0 +2024-08-03 17:25:37,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=180854.66666666666, ans=0.125 +2024-08-03 17:26:21,669 INFO [train.py:1114] (2/4) Epoch 14, batch 2050, loss[loss=0.1918, simple_loss=0.2654, pruned_loss=0.05909, over 13428.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2814, pruned_loss=0.05669, over 2632909.87 frames. ], batch size: 32, lr: 8.89e-03, grad_scale: 32.0 +2024-08-03 17:26:27,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff3.min_abs, batch_count=181038.0, ans=0.2 +2024-08-03 17:26:29,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=181038.0, ans=0.09899494936611666 +2024-08-03 17:26:42,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=181111.33333333334, ans=0.05 +2024-08-03 17:26:44,881 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.48 vs. limit=5.0 +2024-08-03 17:26:51,985 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.15 vs. limit=15.0 +2024-08-03 17:26:54,259 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.315e+01 1.117e+02 1.302e+02 1.562e+02 2.500e+02, threshold=2.604e+02, percent-clipped=1.0 +2024-08-03 17:27:02,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=181184.66666666666, ans=0.0 +2024-08-03 17:27:06,805 INFO [train.py:1114] (2/4) Epoch 14, batch 2100, loss[loss=0.201, simple_loss=0.2847, pruned_loss=0.05865, over 13538.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2804, pruned_loss=0.05586, over 2638838.08 frames. ], batch size: 37, lr: 8.89e-03, grad_scale: 32.0 +2024-08-03 17:27:10,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=181221.33333333334, ans=0.0 +2024-08-03 17:27:26,906 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:27:45,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=181368.0, ans=0.0 +2024-08-03 17:27:51,984 INFO [train.py:1114] (2/4) Epoch 14, batch 2150, loss[loss=0.2007, simple_loss=0.2834, pruned_loss=0.05894, over 13551.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2792, pruned_loss=0.05534, over 2647449.38 frames. ], batch size: 36, lr: 8.88e-03, grad_scale: 32.0 +2024-08-03 17:28:00,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=181441.33333333334, ans=0.2 +2024-08-03 17:28:09,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=181478.0, ans=0.0 +2024-08-03 17:28:18,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=181478.0, ans=0.125 +2024-08-03 17:28:24,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=181514.66666666666, ans=0.0 +2024-08-03 17:28:26,854 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.184e+01 1.154e+02 1.453e+02 1.954e+02 3.704e+02, threshold=2.907e+02, percent-clipped=11.0 +2024-08-03 17:28:38,438 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.37 vs. limit=15.0 +2024-08-03 17:28:39,551 INFO [train.py:1114] (2/4) Epoch 14, batch 2200, loss[loss=0.221, simple_loss=0.3096, pruned_loss=0.06623, over 13384.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2797, pruned_loss=0.05546, over 2645693.20 frames. ], batch size: 39, lr: 8.88e-03, grad_scale: 32.0 +2024-08-03 17:29:10,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=181698.0, ans=0.125 +2024-08-03 17:29:24,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=181734.66666666666, ans=0.125 +2024-08-03 17:29:26,884 INFO [train.py:1114] (2/4) Epoch 14, batch 2250, loss[loss=0.1674, simple_loss=0.2631, pruned_loss=0.03587, over 13358.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2796, pruned_loss=0.05539, over 2642597.85 frames. ], batch size: 37, lr: 8.87e-03, grad_scale: 32.0 +2024-08-03 17:29:27,332 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=7.61 vs. limit=12.0 +2024-08-03 17:29:28,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=181771.33333333334, ans=0.1 +2024-08-03 17:29:33,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=181771.33333333334, ans=0.125 +2024-08-03 17:29:41,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=181808.0, ans=0.125 +2024-08-03 17:29:48,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=181844.66666666666, ans=0.125 +2024-08-03 17:29:55,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=181844.66666666666, ans=0.125 +2024-08-03 17:30:02,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=181881.33333333334, ans=0.2 +2024-08-03 17:30:03,284 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.261e+01 1.193e+02 1.520e+02 1.872e+02 2.993e+02, threshold=3.040e+02, percent-clipped=1.0 +2024-08-03 17:30:07,459 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.23 vs. limit=22.5 +2024-08-03 17:30:10,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=181918.0, ans=0.125 +2024-08-03 17:30:15,675 INFO [train.py:1114] (2/4) Epoch 14, batch 2300, loss[loss=0.1607, simple_loss=0.25, pruned_loss=0.03566, over 13567.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2785, pruned_loss=0.05473, over 2639091.78 frames. ], batch size: 33, lr: 8.87e-03, grad_scale: 32.0 +2024-08-03 17:30:38,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=182028.0, ans=0.05 +2024-08-03 17:30:56,238 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.31 vs. limit=10.0 +2024-08-03 17:31:00,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=182138.0, ans=0.025 +2024-08-03 17:31:01,041 INFO [train.py:1114] (2/4) Epoch 14, batch 2350, loss[loss=0.1794, simple_loss=0.2687, pruned_loss=0.04503, over 13553.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2786, pruned_loss=0.0549, over 2641753.46 frames. ], batch size: 38, lr: 8.87e-03, grad_scale: 32.0 +2024-08-03 17:31:33,676 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.345e+01 1.136e+02 1.357e+02 1.723e+02 3.270e+02, threshold=2.715e+02, percent-clipped=1.0 +2024-08-03 17:31:34,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=182248.0, ans=0.0 +2024-08-03 17:31:40,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=182284.66666666666, ans=0.125 +2024-08-03 17:31:46,429 INFO [train.py:1114] (2/4) Epoch 14, batch 2400, loss[loss=0.1792, simple_loss=0.2626, pruned_loss=0.04793, over 13534.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.279, pruned_loss=0.05501, over 2643586.45 frames. ], batch size: 35, lr: 8.86e-03, grad_scale: 32.0 +2024-08-03 17:31:46,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=182321.33333333334, ans=0.025 +2024-08-03 17:31:54,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=182321.33333333334, ans=0.125 +2024-08-03 17:32:36,581 INFO [train.py:1114] (2/4) Epoch 14, batch 2450, loss[loss=0.226, simple_loss=0.3093, pruned_loss=0.0713, over 13360.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2803, pruned_loss=0.05574, over 2633180.01 frames. ], batch size: 37, lr: 8.86e-03, grad_scale: 32.0 +2024-08-03 17:32:45,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=182541.33333333334, ans=0.125 +2024-08-03 17:32:53,326 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.26 vs. limit=15.0 +2024-08-03 17:32:54,172 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.41 vs. limit=15.0 +2024-08-03 17:32:58,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=182578.0, ans=0.025 +2024-08-03 17:33:11,022 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.955e+01 1.136e+02 1.286e+02 1.596e+02 2.665e+02, threshold=2.571e+02, percent-clipped=0.0 +2024-08-03 17:33:23,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=182651.33333333334, ans=0.125 +2024-08-03 17:33:25,710 INFO [train.py:1114] (2/4) Epoch 14, batch 2500, loss[loss=0.2009, simple_loss=0.2931, pruned_loss=0.05435, over 13399.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.28, pruned_loss=0.05557, over 2637180.05 frames. ], batch size: 39, lr: 8.85e-03, grad_scale: 32.0 +2024-08-03 17:33:41,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=182724.66666666666, ans=0.125 +2024-08-03 17:33:53,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=182798.0, ans=0.0 +2024-08-03 17:33:55,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=182798.0, ans=0.125 +2024-08-03 17:33:58,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=182798.0, ans=0.125 +2024-08-03 17:34:03,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=182834.66666666666, ans=0.125 +2024-08-03 17:34:06,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=182834.66666666666, ans=0.125 +2024-08-03 17:34:09,942 INFO [train.py:1114] (2/4) Epoch 14, batch 2550, loss[loss=0.188, simple_loss=0.266, pruned_loss=0.05504, over 13525.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2808, pruned_loss=0.05598, over 2638629.95 frames. ], batch size: 31, lr: 8.85e-03, grad_scale: 32.0 +2024-08-03 17:34:10,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182871.33333333334, ans=0.1 +2024-08-03 17:34:12,143 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.45 vs. limit=12.0 +2024-08-03 17:34:20,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=182908.0, ans=0.125 +2024-08-03 17:34:41,383 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.643e+01 1.149e+02 1.432e+02 2.081e+02 4.007e+02, threshold=2.864e+02, percent-clipped=10.0 +2024-08-03 17:34:41,642 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:34:53,690 INFO [train.py:1114] (2/4) Epoch 14, batch 2600, loss[loss=0.2027, simple_loss=0.2835, pruned_loss=0.06101, over 13566.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2812, pruned_loss=0.05642, over 2636982.82 frames. ], batch size: 36, lr: 8.84e-03, grad_scale: 32.0 +2024-08-03 17:34:59,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=183054.66666666666, ans=0.125 +2024-08-03 17:35:01,873 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:35:02,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=183091.33333333334, ans=0.125 +2024-08-03 17:35:14,165 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.22 vs. limit=22.5 +2024-08-03 17:35:23,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=183164.66666666666, ans=0.125 +2024-08-03 17:35:37,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=183238.0, ans=0.2 +2024-08-03 17:35:38,000 INFO [train.py:1114] (2/4) Epoch 14, batch 2650, loss[loss=0.209, simple_loss=0.3004, pruned_loss=0.05877, over 13288.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2815, pruned_loss=0.05644, over 2639585.58 frames. ], batch size: 46, lr: 8.84e-03, grad_scale: 32.0 +2024-08-03 17:35:38,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=183238.0, ans=0.125 +2024-08-03 17:35:43,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=183238.0, ans=0.0 +2024-08-03 17:35:46,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=183274.66666666666, ans=0.0 +2024-08-03 17:35:51,444 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.74 vs. limit=12.0 +2024-08-03 17:35:55,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=183311.33333333334, ans=0.0 +2024-08-03 17:35:57,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=183311.33333333334, ans=0.04949747468305833 +2024-08-03 17:36:09,722 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.484e+01 1.156e+02 1.338e+02 1.561e+02 2.649e+02, threshold=2.677e+02, percent-clipped=0.0 +2024-08-03 17:36:47,880 INFO [train.py:1114] (2/4) Epoch 14, batch 2700, loss[loss=0.1792, simple_loss=0.2682, pruned_loss=0.0451, over 13544.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2818, pruned_loss=0.05657, over 2637407.25 frames. ], batch size: 40, lr: 8.83e-03, grad_scale: 32.0 +2024-08-03 17:36:53,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183421.33333333334, ans=0.1 +2024-08-03 17:37:00,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183458.0, ans=0.1 +2024-08-03 17:37:12,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183494.66666666666, ans=0.1 +2024-08-03 17:37:23,092 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.57 vs. limit=15.0 +2024-08-03 17:37:26,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=183568.0, ans=0.125 +2024-08-03 17:37:29,150 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.15 vs. limit=15.0 +2024-08-03 17:37:31,436 INFO [train.py:1114] (2/4) Epoch 14, batch 2750, loss[loss=0.1746, simple_loss=0.2617, pruned_loss=0.04379, over 13336.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2805, pruned_loss=0.0562, over 2634857.97 frames. ], batch size: 34, lr: 8.83e-03, grad_scale: 32.0 +2024-08-03 17:37:32,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=183604.66666666666, ans=0.0 +2024-08-03 17:37:33,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=183604.66666666666, ans=0.0 +2024-08-03 17:37:34,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=183604.66666666666, ans=0.025 +2024-08-03 17:38:02,648 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.576e+01 1.249e+02 1.500e+02 2.010e+02 3.327e+02, threshold=3.000e+02, percent-clipped=3.0 +2024-08-03 17:38:02,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183714.66666666666, ans=0.1 +2024-08-03 17:38:02,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183714.66666666666, ans=0.1 +2024-08-03 17:38:04,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=183714.66666666666, ans=0.05 +2024-08-03 17:38:15,241 INFO [train.py:1114] (2/4) Epoch 14, batch 2800, loss[loss=0.2587, simple_loss=0.322, pruned_loss=0.09771, over 9060.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2802, pruned_loss=0.05595, over 2626984.29 frames. ], batch size: 96, lr: 8.83e-03, grad_scale: 32.0 +2024-08-03 17:38:20,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183788.0, ans=0.1 +2024-08-03 17:38:21,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=183788.0, ans=0.2 +2024-08-03 17:38:31,270 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.10 vs. limit=22.5 +2024-08-03 17:38:42,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=183898.0, ans=0.09899494936611666 +2024-08-03 17:38:45,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=183898.0, ans=0.2 +2024-08-03 17:38:46,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183898.0, ans=0.1 +2024-08-03 17:38:47,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183898.0, ans=0.1 +2024-08-03 17:39:00,482 INFO [train.py:1114] (2/4) Epoch 14, batch 2850, loss[loss=0.1808, simple_loss=0.2627, pruned_loss=0.04942, over 13365.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2804, pruned_loss=0.05624, over 2621059.29 frames. ], batch size: 35, lr: 8.82e-03, grad_scale: 32.0 +2024-08-03 17:39:18,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=184044.66666666666, ans=0.125 +2024-08-03 17:39:27,605 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.07 vs. limit=15.0 +2024-08-03 17:39:28,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184081.33333333334, ans=0.1 +2024-08-03 17:39:32,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=184081.33333333334, ans=0.125 +2024-08-03 17:39:33,097 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.234e+01 1.082e+02 1.194e+02 1.402e+02 2.334e+02, threshold=2.389e+02, percent-clipped=0.0 +2024-08-03 17:39:40,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=184118.0, ans=0.125 +2024-08-03 17:39:45,199 INFO [train.py:1114] (2/4) Epoch 14, batch 2900, loss[loss=0.2158, simple_loss=0.291, pruned_loss=0.07032, over 13355.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2816, pruned_loss=0.0563, over 2631939.80 frames. ], batch size: 36, lr: 8.82e-03, grad_scale: 32.0 +2024-08-03 17:39:45,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=184154.66666666666, ans=0.0 +2024-08-03 17:39:55,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=184191.33333333334, ans=0.2 +2024-08-03 17:40:21,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=184301.33333333334, ans=0.0 +2024-08-03 17:40:25,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=184301.33333333334, ans=0.2 +2024-08-03 17:40:26,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=184301.33333333334, ans=0.2 +2024-08-03 17:40:28,419 INFO [train.py:1114] (2/4) Epoch 14, batch 2950, loss[loss=0.1747, simple_loss=0.2578, pruned_loss=0.04577, over 13334.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2801, pruned_loss=0.05583, over 2630097.66 frames. ], batch size: 34, lr: 8.81e-03, grad_scale: 32.0 +2024-08-03 17:40:53,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184411.33333333334, ans=0.1 +2024-08-03 17:40:57,918 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.40 vs. limit=15.0 +2024-08-03 17:41:00,868 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.120e+01 1.167e+02 1.400e+02 1.731e+02 2.660e+02, threshold=2.799e+02, percent-clipped=4.0 +2024-08-03 17:41:07,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=184484.66666666666, ans=0.125 +2024-08-03 17:41:13,015 INFO [train.py:1114] (2/4) Epoch 14, batch 3000, loss[loss=0.1785, simple_loss=0.2679, pruned_loss=0.04452, over 13536.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2799, pruned_loss=0.05566, over 2630943.41 frames. ], batch size: 37, lr: 8.81e-03, grad_scale: 32.0 +2024-08-03 17:41:13,015 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 17:41:23,007 INFO [train.py:1146] (2/4) Epoch 14, validation: loss=0.1738, simple_loss=0.2731, pruned_loss=0.03723, over 944034.00 frames. +2024-08-03 17:41:23,007 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 17:41:34,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=184558.0, ans=0.125 +2024-08-03 17:41:41,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184594.66666666666, ans=0.1 +2024-08-03 17:41:44,170 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.61 vs. limit=15.0 +2024-08-03 17:41:44,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=184594.66666666666, ans=0.125 +2024-08-03 17:41:50,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=184594.66666666666, ans=0.125 +2024-08-03 17:42:01,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=184668.0, ans=0.0 +2024-08-03 17:42:08,953 INFO [train.py:1114] (2/4) Epoch 14, batch 3050, loss[loss=0.1762, simple_loss=0.2621, pruned_loss=0.0452, over 13533.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2802, pruned_loss=0.05547, over 2628299.11 frames. ], batch size: 35, lr: 8.80e-03, grad_scale: 16.0 +2024-08-03 17:42:41,261 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.416e+01 1.107e+02 1.206e+02 1.425e+02 2.070e+02, threshold=2.412e+02, percent-clipped=0.0 +2024-08-03 17:42:52,662 INFO [train.py:1114] (2/4) Epoch 14, batch 3100, loss[loss=0.2102, simple_loss=0.2987, pruned_loss=0.06085, over 13330.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2791, pruned_loss=0.05497, over 2627301.85 frames. ], batch size: 46, lr: 8.80e-03, grad_scale: 16.0 +2024-08-03 17:43:00,620 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:43:06,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=184924.66666666666, ans=0.125 +2024-08-03 17:43:10,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=184961.33333333334, ans=0.025 +2024-08-03 17:43:13,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=184961.33333333334, ans=0.125 +2024-08-03 17:43:16,409 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.03 vs. limit=12.0 +2024-08-03 17:43:21,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184998.0, ans=0.1 +2024-08-03 17:43:35,772 INFO [train.py:1114] (2/4) Epoch 14, batch 3150, loss[loss=0.2001, simple_loss=0.2875, pruned_loss=0.0563, over 12999.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2796, pruned_loss=0.05509, over 2628516.68 frames. ], batch size: 48, lr: 8.80e-03, grad_scale: 16.0 +2024-08-03 17:43:49,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=185108.0, ans=0.2 +2024-08-03 17:44:01,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=185181.33333333334, ans=0.0 +2024-08-03 17:44:03,149 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.07 vs. limit=6.0 +2024-08-03 17:44:04,924 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.31 vs. limit=15.0 +2024-08-03 17:44:08,664 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.659e+01 1.212e+02 1.528e+02 2.079e+02 4.163e+02, threshold=3.057e+02, percent-clipped=18.0 +2024-08-03 17:44:08,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=185181.33333333334, ans=0.0 +2024-08-03 17:44:18,869 INFO [train.py:1114] (2/4) Epoch 14, batch 3200, loss[loss=0.1581, simple_loss=0.2472, pruned_loss=0.03449, over 13542.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2787, pruned_loss=0.05467, over 2635573.85 frames. ], batch size: 37, lr: 8.79e-03, grad_scale: 16.0 +2024-08-03 17:44:19,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=185254.66666666666, ans=0.0 +2024-08-03 17:44:32,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=185291.33333333334, ans=0.125 +2024-08-03 17:44:33,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=185291.33333333334, ans=0.0 +2024-08-03 17:44:38,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=185328.0, ans=0.125 +2024-08-03 17:44:55,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=185401.33333333334, ans=0.2 +2024-08-03 17:44:57,562 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.58 vs. limit=15.0 +2024-08-03 17:45:01,556 INFO [train.py:1114] (2/4) Epoch 14, batch 3250, loss[loss=0.2125, simple_loss=0.3056, pruned_loss=0.0597, over 13395.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2792, pruned_loss=0.05465, over 2638769.33 frames. ], batch size: 38, lr: 8.79e-03, grad_scale: 16.0 +2024-08-03 17:45:01,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=185438.0, ans=0.125 +2024-08-03 17:45:10,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=185474.66666666666, ans=0.0 +2024-08-03 17:45:16,193 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:45:16,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=185474.66666666666, ans=0.0 +2024-08-03 17:45:22,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=185511.33333333334, ans=0.125 +2024-08-03 17:45:35,110 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.801e+01 1.165e+02 1.318e+02 1.646e+02 3.018e+02, threshold=2.636e+02, percent-clipped=0.0 +2024-08-03 17:45:43,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=185584.66666666666, ans=0.2 +2024-08-03 17:45:45,437 INFO [train.py:1114] (2/4) Epoch 14, batch 3300, loss[loss=0.2404, simple_loss=0.3162, pruned_loss=0.08236, over 12835.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2782, pruned_loss=0.05465, over 2640695.49 frames. ], batch size: 52, lr: 8.78e-03, grad_scale: 16.0 +2024-08-03 17:45:53,322 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:46:01,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185658.0, ans=0.1 +2024-08-03 17:46:10,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=185694.66666666666, ans=0.2 +2024-08-03 17:46:16,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=185731.33333333334, ans=0.125 +2024-08-03 17:46:20,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=185768.0, ans=0.125 +2024-08-03 17:46:21,464 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=17.30 vs. limit=22.5 +2024-08-03 17:46:27,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=185768.0, ans=0.125 +2024-08-03 17:46:28,632 INFO [train.py:1114] (2/4) Epoch 14, batch 3350, loss[loss=0.2057, simple_loss=0.2937, pruned_loss=0.05886, over 12963.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2794, pruned_loss=0.05509, over 2629424.86 frames. ], batch size: 48, lr: 8.78e-03, grad_scale: 8.0 +2024-08-03 17:46:32,453 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.74 vs. limit=22.5 +2024-08-03 17:46:47,913 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.29 vs. limit=22.5 +2024-08-03 17:46:49,540 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.66 vs. limit=15.0 +2024-08-03 17:46:58,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=185914.66666666666, ans=0.2 +2024-08-03 17:47:01,822 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.039e+01 1.138e+02 1.305e+02 1.515e+02 2.289e+02, threshold=2.609e+02, percent-clipped=0.0 +2024-08-03 17:47:10,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=185951.33333333334, ans=0.02 +2024-08-03 17:47:11,856 INFO [train.py:1114] (2/4) Epoch 14, batch 3400, loss[loss=0.1629, simple_loss=0.2457, pruned_loss=0.04004, over 13539.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2793, pruned_loss=0.05535, over 2624771.59 frames. ], batch size: 31, lr: 8.78e-03, grad_scale: 8.0 +2024-08-03 17:47:12,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=185988.0, ans=0.125 +2024-08-03 17:47:18,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.24 vs. limit=15.0 +2024-08-03 17:47:24,968 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.93 vs. limit=10.0 +2024-08-03 17:47:30,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=186061.33333333334, ans=0.125 +2024-08-03 17:47:37,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=186061.33333333334, ans=0.125 +2024-08-03 17:47:55,939 INFO [train.py:1114] (2/4) Epoch 14, batch 3450, loss[loss=0.1973, simple_loss=0.2796, pruned_loss=0.05749, over 12902.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2799, pruned_loss=0.05558, over 2628669.94 frames. ], batch size: 52, lr: 8.77e-03, grad_scale: 8.0 +2024-08-03 17:47:57,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=186171.33333333334, ans=0.125 +2024-08-03 17:48:07,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=186208.0, ans=0.015 +2024-08-03 17:48:26,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=186281.33333333334, ans=0.04949747468305833 +2024-08-03 17:48:29,133 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.095e+01 1.141e+02 1.344e+02 1.531e+02 2.504e+02, threshold=2.687e+02, percent-clipped=0.0 +2024-08-03 17:48:34,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=186318.0, ans=0.125 +2024-08-03 17:48:38,414 INFO [train.py:1114] (2/4) Epoch 14, batch 3500, loss[loss=0.2043, simple_loss=0.2846, pruned_loss=0.06195, over 13523.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2796, pruned_loss=0.05574, over 2630126.25 frames. ], batch size: 34, lr: 8.77e-03, grad_scale: 8.0 +2024-08-03 17:48:40,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186354.66666666666, ans=0.1 +2024-08-03 17:48:52,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=186391.33333333334, ans=0.0 +2024-08-03 17:48:53,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=186391.33333333334, ans=0.2 +2024-08-03 17:49:02,623 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.27 vs. limit=15.0 +2024-08-03 17:49:13,302 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.12 vs. limit=22.5 +2024-08-03 17:49:21,419 INFO [train.py:1114] (2/4) Epoch 14, batch 3550, loss[loss=0.1968, simple_loss=0.2859, pruned_loss=0.05389, over 12358.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2816, pruned_loss=0.05661, over 2628888.56 frames. ], batch size: 58, lr: 8.76e-03, grad_scale: 8.0 +2024-08-03 17:49:21,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186538.0, ans=0.1 +2024-08-03 17:49:30,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=186574.66666666666, ans=0.125 +2024-08-03 17:49:43,541 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.91 vs. limit=15.0 +2024-08-03 17:49:56,372 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.367e+01 1.245e+02 1.346e+02 1.510e+02 2.403e+02, threshold=2.693e+02, percent-clipped=0.0 +2024-08-03 17:50:04,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=186684.66666666666, ans=0.125 +2024-08-03 17:50:06,471 INFO [train.py:1114] (2/4) Epoch 14, batch 3600, loss[loss=0.2315, simple_loss=0.3015, pruned_loss=0.08077, over 9141.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2858, pruned_loss=0.0605, over 2488707.91 frames. ], batch size: 97, lr: 8.76e-03, grad_scale: 16.0 +2024-08-03 17:50:07,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=186721.33333333334, ans=0.125 +2024-08-03 17:50:11,631 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.53 vs. limit=6.0 +2024-08-03 17:50:31,442 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.32 vs. limit=15.0 +2024-08-03 17:54:05,179 INFO [train.py:1114] (2/4) Epoch 15, batch 0, loss[loss=0.1696, simple_loss=0.2568, pruned_loss=0.0412, over 13345.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2568, pruned_loss=0.0412, over 13345.00 frames. ], batch size: 33, lr: 8.46e-03, grad_scale: 32.0 +2024-08-03 17:54:05,180 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 17:54:17,155 INFO [train.py:1146] (2/4) Epoch 15, validation: loss=0.1774, simple_loss=0.2778, pruned_loss=0.03851, over 944034.00 frames. +2024-08-03 17:54:17,155 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 17:54:19,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=186871.66666666666, ans=0.125 +2024-08-03 17:54:23,833 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.17 vs. limit=10.0 +2024-08-03 17:54:24,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=186871.66666666666, ans=0.125 +2024-08-03 17:54:25,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=186908.33333333334, ans=0.125 +2024-08-03 17:54:28,638 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.74 vs. limit=15.0 +2024-08-03 17:54:34,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=186945.0, ans=0.07 +2024-08-03 17:54:53,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=186945.0, ans=0.125 +2024-08-03 17:54:53,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=186945.0, ans=0.0 +2024-08-03 17:55:05,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=187018.33333333334, ans=0.125 +2024-08-03 17:55:12,109 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.609e+01 1.155e+02 1.252e+02 1.382e+02 2.620e+02, threshold=2.503e+02, percent-clipped=0.0 +2024-08-03 17:55:13,945 INFO [train.py:1114] (2/4) Epoch 15, batch 50, loss[loss=0.1602, simple_loss=0.2375, pruned_loss=0.04145, over 13401.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.28, pruned_loss=0.05548, over 578154.33 frames. ], batch size: 32, lr: 8.45e-03, grad_scale: 32.0 +2024-08-03 17:55:43,645 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.37 vs. limit=15.0 +2024-08-03 17:55:49,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=187165.0, ans=0.2 +2024-08-03 17:55:49,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=187165.0, ans=0.125 +2024-08-03 17:56:05,328 INFO [train.py:1114] (2/4) Epoch 15, batch 100, loss[loss=0.1955, simple_loss=0.2731, pruned_loss=0.05889, over 13537.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2815, pruned_loss=0.05531, over 1024925.75 frames. ], batch size: 35, lr: 8.45e-03, grad_scale: 32.0 +2024-08-03 17:56:12,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=187238.33333333334, ans=0.2 +2024-08-03 17:56:15,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=187275.0, ans=0.125 +2024-08-03 17:56:48,422 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.485e+01 1.136e+02 1.309e+02 1.649e+02 2.921e+02, threshold=2.617e+02, percent-clipped=2.0 +2024-08-03 17:56:50,227 INFO [train.py:1114] (2/4) Epoch 15, batch 150, loss[loss=0.1664, simple_loss=0.2471, pruned_loss=0.0428, over 13413.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2795, pruned_loss=0.05503, over 1386698.68 frames. ], batch size: 32, lr: 8.44e-03, grad_scale: 32.0 +2024-08-03 17:56:53,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=187421.66666666666, ans=0.125 +2024-08-03 17:56:54,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=187421.66666666666, ans=0.125 +2024-08-03 17:57:09,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=187495.0, ans=0.0 +2024-08-03 17:57:12,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=187495.0, ans=0.0 +2024-08-03 17:57:21,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=187531.66666666666, ans=0.0 +2024-08-03 17:57:43,988 INFO [train.py:1114] (2/4) Epoch 15, batch 200, loss[loss=0.2009, simple_loss=0.2921, pruned_loss=0.05485, over 12477.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2777, pruned_loss=0.05424, over 1665789.89 frames. ], batch size: 58, lr: 8.44e-03, grad_scale: 32.0 +2024-08-03 17:57:47,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=187605.0, ans=0.0 +2024-08-03 17:58:00,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=187641.66666666666, ans=0.125 +2024-08-03 17:58:29,622 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.054e+01 1.128e+02 1.299e+02 1.749e+02 3.562e+02, threshold=2.599e+02, percent-clipped=4.0 +2024-08-03 17:58:29,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=187751.66666666666, ans=0.125 +2024-08-03 17:58:31,492 INFO [train.py:1114] (2/4) Epoch 15, batch 250, loss[loss=0.2138, simple_loss=0.2958, pruned_loss=0.06583, over 13277.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2782, pruned_loss=0.05423, over 1885434.99 frames. ], batch size: 46, lr: 8.44e-03, grad_scale: 32.0 +2024-08-03 17:58:37,809 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.94 vs. limit=22.5 +2024-08-03 17:58:41,646 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.71 vs. limit=15.0 +2024-08-03 17:59:01,299 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.26 vs. limit=15.0 +2024-08-03 17:59:03,076 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.36 vs. limit=15.0 +2024-08-03 17:59:11,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=187935.0, ans=0.125 +2024-08-03 17:59:19,798 INFO [train.py:1114] (2/4) Epoch 15, batch 300, loss[loss=0.2027, simple_loss=0.2898, pruned_loss=0.0578, over 13455.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2787, pruned_loss=0.05491, over 2051265.47 frames. ], batch size: 42, lr: 8.43e-03, grad_scale: 32.0 +2024-08-03 17:59:22,257 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.63 vs. limit=15.0 +2024-08-03 17:59:23,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=187971.66666666666, ans=0.0 +2024-08-03 17:59:38,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=188008.33333333334, ans=0.0 +2024-08-03 17:59:51,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.96 vs. limit=22.5 +2024-08-03 18:00:02,487 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.99 vs. limit=15.0 +2024-08-03 18:00:05,573 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.050e+01 1.089e+02 1.187e+02 1.395e+02 2.688e+02, threshold=2.374e+02, percent-clipped=1.0 +2024-08-03 18:00:07,392 INFO [train.py:1114] (2/4) Epoch 15, batch 350, loss[loss=0.1865, simple_loss=0.268, pruned_loss=0.05247, over 13572.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2792, pruned_loss=0.05491, over 2182210.89 frames. ], batch size: 33, lr: 8.43e-03, grad_scale: 32.0 +2024-08-03 18:00:28,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=188228.33333333334, ans=0.125 +2024-08-03 18:00:47,633 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.95 vs. limit=6.0 +2024-08-03 18:00:54,366 INFO [train.py:1114] (2/4) Epoch 15, batch 400, loss[loss=0.201, simple_loss=0.2919, pruned_loss=0.05501, over 13355.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2793, pruned_loss=0.05497, over 2285716.73 frames. ], batch size: 37, lr: 8.42e-03, grad_scale: 32.0 +2024-08-03 18:00:55,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=188338.33333333334, ans=0.1 +2024-08-03 18:01:17,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=188411.66666666666, ans=0.125 +2024-08-03 18:01:38,640 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.296e+01 1.094e+02 1.225e+02 1.629e+02 4.007e+02, threshold=2.451e+02, percent-clipped=6.0 +2024-08-03 18:01:39,621 INFO [train.py:1114] (2/4) Epoch 15, batch 450, loss[loss=0.2138, simple_loss=0.2985, pruned_loss=0.06457, over 13561.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.279, pruned_loss=0.05496, over 2360066.32 frames. ], batch size: 38, lr: 8.42e-03, grad_scale: 32.0 +2024-08-03 18:01:42,107 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.98 vs. limit=5.0 +2024-08-03 18:01:50,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=188558.33333333334, ans=0.0 +2024-08-03 18:02:06,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=188595.0, ans=0.0 +2024-08-03 18:02:22,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=188668.33333333334, ans=0.125 +2024-08-03 18:02:26,761 INFO [train.py:1114] (2/4) Epoch 15, batch 500, loss[loss=0.2017, simple_loss=0.2933, pruned_loss=0.05508, over 13414.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2773, pruned_loss=0.05392, over 2425606.83 frames. ], batch size: 43, lr: 8.42e-03, grad_scale: 32.0 +2024-08-03 18:02:30,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=188705.0, ans=0.2 +2024-08-03 18:02:40,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn2.whiten.whitening_limit, batch_count=188741.66666666666, ans=22.5 +2024-08-03 18:02:43,978 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.02 vs. limit=15.0 +2024-08-03 18:02:47,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=188778.33333333334, ans=0.125 +2024-08-03 18:02:47,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=188778.33333333334, ans=0.125 +2024-08-03 18:02:55,019 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.95 vs. limit=15.0 +2024-08-03 18:03:01,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=188815.0, ans=0.0 +2024-08-03 18:03:06,626 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.53 vs. limit=15.0 +2024-08-03 18:03:09,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=188851.66666666666, ans=0.125 +2024-08-03 18:03:15,310 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.367e+01 1.124e+02 1.290e+02 1.584e+02 2.757e+02, threshold=2.579e+02, percent-clipped=2.0 +2024-08-03 18:03:16,218 INFO [train.py:1114] (2/4) Epoch 15, batch 550, loss[loss=0.1953, simple_loss=0.2801, pruned_loss=0.05527, over 13108.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2773, pruned_loss=0.05377, over 2468864.18 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 32.0 +2024-08-03 18:03:20,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=188888.33333333334, ans=0.0 +2024-08-03 18:03:34,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=188961.66666666666, ans=0.125 +2024-08-03 18:03:35,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=188961.66666666666, ans=0.125 +2024-08-03 18:03:39,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=188961.66666666666, ans=0.125 +2024-08-03 18:03:42,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=188998.33333333334, ans=0.0 +2024-08-03 18:04:01,259 INFO [train.py:1114] (2/4) Epoch 15, batch 600, loss[loss=0.1979, simple_loss=0.2836, pruned_loss=0.0561, over 13301.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2774, pruned_loss=0.0538, over 2507897.00 frames. ], batch size: 46, lr: 8.41e-03, grad_scale: 32.0 +2024-08-03 18:04:15,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=189108.33333333334, ans=0.125 +2024-08-03 18:04:16,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=189108.33333333334, ans=0.125 +2024-08-03 18:04:22,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=189145.0, ans=0.125 +2024-08-03 18:04:26,340 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.42 vs. limit=15.0 +2024-08-03 18:04:47,690 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.457e+01 1.170e+02 1.382e+02 2.012e+02 3.539e+02, threshold=2.764e+02, percent-clipped=13.0 +2024-08-03 18:04:48,663 INFO [train.py:1114] (2/4) Epoch 15, batch 650, loss[loss=0.1851, simple_loss=0.2735, pruned_loss=0.04837, over 13535.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2766, pruned_loss=0.05336, over 2543321.11 frames. ], batch size: 37, lr: 8.40e-03, grad_scale: 32.0 +2024-08-03 18:04:51,914 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.36 vs. limit=22.5 +2024-08-03 18:04:53,889 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.78 vs. limit=15.0 +2024-08-03 18:04:54,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=189255.0, ans=0.025 +2024-08-03 18:04:54,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=189255.0, ans=0.125 +2024-08-03 18:04:58,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=189291.66666666666, ans=0.0 +2024-08-03 18:04:59,750 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:05:03,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=189291.66666666666, ans=0.0 +2024-08-03 18:05:12,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=189328.33333333334, ans=0.2 +2024-08-03 18:05:15,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=189365.0, ans=0.125 +2024-08-03 18:05:24,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=189365.0, ans=0.04949747468305833 +2024-08-03 18:05:32,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=189401.66666666666, ans=0.125 +2024-08-03 18:05:35,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=189438.33333333334, ans=0.05 +2024-08-03 18:05:35,753 INFO [train.py:1114] (2/4) Epoch 15, batch 700, loss[loss=0.213, simple_loss=0.2906, pruned_loss=0.06771, over 13537.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2771, pruned_loss=0.05348, over 2565445.20 frames. ], batch size: 35, lr: 8.40e-03, grad_scale: 32.0 +2024-08-03 18:05:43,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=189438.33333333334, ans=0.0 +2024-08-03 18:05:44,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189475.0, ans=0.1 +2024-08-03 18:05:49,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=189475.0, ans=0.125 +2024-08-03 18:05:57,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=189511.66666666666, ans=0.5 +2024-08-03 18:06:02,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=189548.33333333334, ans=0.125 +2024-08-03 18:06:21,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=189585.0, ans=0.0 +2024-08-03 18:06:22,758 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.490e+01 1.100e+02 1.247e+02 1.589e+02 2.626e+02, threshold=2.494e+02, percent-clipped=0.0 +2024-08-03 18:06:22,795 INFO [train.py:1114] (2/4) Epoch 15, batch 750, loss[loss=0.1924, simple_loss=0.2859, pruned_loss=0.04945, over 13377.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2771, pruned_loss=0.05363, over 2583271.86 frames. ], batch size: 37, lr: 8.40e-03, grad_scale: 16.0 +2024-08-03 18:06:28,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=189621.66666666666, ans=0.125 +2024-08-03 18:06:44,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=189695.0, ans=0.125 +2024-08-03 18:06:49,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=189695.0, ans=0.125 +2024-08-03 18:06:52,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=189695.0, ans=0.0 +2024-08-03 18:06:56,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=189731.66666666666, ans=0.125 +2024-08-03 18:07:12,594 INFO [train.py:1114] (2/4) Epoch 15, batch 800, loss[loss=0.1719, simple_loss=0.2582, pruned_loss=0.0428, over 13357.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2772, pruned_loss=0.05371, over 2597935.23 frames. ], batch size: 33, lr: 8.39e-03, grad_scale: 32.0 +2024-08-03 18:07:14,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=189805.0, ans=0.125 +2024-08-03 18:07:20,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=189805.0, ans=0.2 +2024-08-03 18:07:21,193 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.50 vs. limit=15.0 +2024-08-03 18:07:28,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=189841.66666666666, ans=0.125 +2024-08-03 18:07:58,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189951.66666666666, ans=0.1 +2024-08-03 18:07:59,900 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.253e+01 1.110e+02 1.253e+02 1.558e+02 2.817e+02, threshold=2.505e+02, percent-clipped=1.0 +2024-08-03 18:07:59,938 INFO [train.py:1114] (2/4) Epoch 15, batch 850, loss[loss=0.2062, simple_loss=0.2979, pruned_loss=0.05722, over 13338.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2777, pruned_loss=0.05417, over 2609827.71 frames. ], batch size: 40, lr: 8.39e-03, grad_scale: 32.0 +2024-08-03 18:08:00,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=189988.33333333334, ans=0.025 +2024-08-03 18:08:07,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=189988.33333333334, ans=0.125 +2024-08-03 18:08:21,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=190061.66666666666, ans=0.125 +2024-08-03 18:08:24,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190061.66666666666, ans=0.1 +2024-08-03 18:08:27,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=190061.66666666666, ans=0.025 +2024-08-03 18:08:34,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=190098.33333333334, ans=0.125 +2024-08-03 18:08:47,479 INFO [train.py:1114] (2/4) Epoch 15, batch 900, loss[loss=0.1744, simple_loss=0.2578, pruned_loss=0.04549, over 13345.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2774, pruned_loss=0.05421, over 2611784.78 frames. ], batch size: 33, lr: 8.38e-03, grad_scale: 32.0 +2024-08-03 18:08:48,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=190171.66666666666, ans=0.0 +2024-08-03 18:08:56,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=190208.33333333334, ans=0.0 +2024-08-03 18:09:15,907 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.60 vs. limit=22.5 +2024-08-03 18:09:20,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=190281.66666666666, ans=0.125 +2024-08-03 18:09:35,332 INFO [train.py:1114] (2/4) Epoch 15, batch 950, loss[loss=0.1897, simple_loss=0.2704, pruned_loss=0.05446, over 13534.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2778, pruned_loss=0.05438, over 2611720.95 frames. ], batch size: 34, lr: 8.38e-03, grad_scale: 16.0 +2024-08-03 18:09:35,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=190355.0, ans=0.125 +2024-08-03 18:09:35,773 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.80 vs. limit=15.0 +2024-08-03 18:09:36,164 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.983e+01 1.147e+02 1.407e+02 1.582e+02 2.602e+02, threshold=2.813e+02, percent-clipped=2.0 +2024-08-03 18:09:45,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=190391.66666666666, ans=0.0 +2024-08-03 18:09:49,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=190391.66666666666, ans=0.125 +2024-08-03 18:10:03,667 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.03 vs. limit=15.0 +2024-08-03 18:10:05,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff3.min_abs, batch_count=190465.0, ans=0.2 +2024-08-03 18:10:09,664 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-08-03 18:10:24,642 INFO [train.py:1114] (2/4) Epoch 15, batch 1000, loss[loss=0.1837, simple_loss=0.2661, pruned_loss=0.05067, over 13367.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2784, pruned_loss=0.05463, over 2609947.94 frames. ], batch size: 35, lr: 8.38e-03, grad_scale: 16.0 +2024-08-03 18:10:27,877 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.67 vs. limit=6.0 +2024-08-03 18:10:29,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190538.33333333334, ans=0.1 +2024-08-03 18:10:39,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=190575.0, ans=0.025 +2024-08-03 18:10:52,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=190648.33333333334, ans=0.125 +2024-08-03 18:10:55,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=190648.33333333334, ans=0.125 +2024-08-03 18:11:12,167 INFO [train.py:1114] (2/4) Epoch 15, batch 1050, loss[loss=0.2112, simple_loss=0.3043, pruned_loss=0.05902, over 13586.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2781, pruned_loss=0.05445, over 2614604.15 frames. ], batch size: 39, lr: 8.37e-03, grad_scale: 16.0 +2024-08-03 18:11:12,982 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.909e+01 1.089e+02 1.310e+02 1.512e+02 2.407e+02, threshold=2.620e+02, percent-clipped=0.0 +2024-08-03 18:11:32,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=190795.0, ans=0.125 +2024-08-03 18:11:46,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=190831.66666666666, ans=0.5 +2024-08-03 18:11:56,081 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.48 vs. limit=12.0 +2024-08-03 18:11:57,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=190868.33333333334, ans=0.125 +2024-08-03 18:11:59,030 INFO [train.py:1114] (2/4) Epoch 15, batch 1100, loss[loss=0.1833, simple_loss=0.2687, pruned_loss=0.04897, over 13581.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2774, pruned_loss=0.05418, over 2618058.59 frames. ], batch size: 36, lr: 8.37e-03, grad_scale: 16.0 +2024-08-03 18:12:00,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190905.0, ans=0.1 +2024-08-03 18:12:06,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=190905.0, ans=0.125 +2024-08-03 18:12:11,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=190941.66666666666, ans=0.09899494936611666 +2024-08-03 18:12:44,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=191051.66666666666, ans=0.125 +2024-08-03 18:12:45,856 INFO [train.py:1114] (2/4) Epoch 15, batch 1150, loss[loss=0.1758, simple_loss=0.2577, pruned_loss=0.04698, over 13571.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2774, pruned_loss=0.05448, over 2617641.35 frames. ], batch size: 36, lr: 8.36e-03, grad_scale: 8.0 +2024-08-03 18:12:47,596 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.299e+01 1.143e+02 1.336e+02 1.684e+02 2.618e+02, threshold=2.671e+02, percent-clipped=0.0 +2024-08-03 18:12:50,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=191088.33333333334, ans=0.125 +2024-08-03 18:13:01,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=191125.0, ans=0.025 +2024-08-03 18:13:13,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=191161.66666666666, ans=0.0 +2024-08-03 18:13:18,689 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.85 vs. limit=22.5 +2024-08-03 18:13:28,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=191235.0, ans=0.125 +2024-08-03 18:13:35,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=191235.0, ans=0.125 +2024-08-03 18:13:40,338 INFO [train.py:1114] (2/4) Epoch 15, batch 1200, loss[loss=0.186, simple_loss=0.2777, pruned_loss=0.04711, over 13576.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2778, pruned_loss=0.05443, over 2615785.13 frames. ], batch size: 39, lr: 8.36e-03, grad_scale: 16.0 +2024-08-03 18:13:46,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=191271.66666666666, ans=0.0 +2024-08-03 18:13:59,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=191345.0, ans=0.125 +2024-08-03 18:14:04,878 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:14:13,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=191381.66666666666, ans=0.125 +2024-08-03 18:14:18,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=191418.33333333334, ans=0.2 +2024-08-03 18:14:23,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=191418.33333333334, ans=0.125 +2024-08-03 18:14:28,835 INFO [train.py:1114] (2/4) Epoch 15, batch 1250, loss[loss=0.2005, simple_loss=0.2834, pruned_loss=0.05876, over 13446.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2784, pruned_loss=0.05429, over 2627567.70 frames. ], batch size: 42, lr: 8.36e-03, grad_scale: 16.0 +2024-08-03 18:14:30,565 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.500e+01 1.104e+02 1.280e+02 1.551e+02 2.607e+02, threshold=2.559e+02, percent-clipped=0.0 +2024-08-03 18:14:48,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=191528.33333333334, ans=0.2 +2024-08-03 18:14:49,869 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:14:51,164 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.92 vs. limit=10.0 +2024-08-03 18:15:14,022 INFO [train.py:1114] (2/4) Epoch 15, batch 1300, loss[loss=0.2217, simple_loss=0.3015, pruned_loss=0.071, over 12913.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2774, pruned_loss=0.05399, over 2630602.27 frames. ], batch size: 52, lr: 8.35e-03, grad_scale: 16.0 +2024-08-03 18:15:30,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191675.0, ans=0.1 +2024-08-03 18:15:53,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=191785.0, ans=0.2 +2024-08-03 18:15:54,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=191785.0, ans=0.2 +2024-08-03 18:16:01,023 INFO [train.py:1114] (2/4) Epoch 15, batch 1350, loss[loss=0.2022, simple_loss=0.2879, pruned_loss=0.05829, over 13550.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2773, pruned_loss=0.05407, over 2638155.41 frames. ], batch size: 37, lr: 8.35e-03, grad_scale: 16.0 +2024-08-03 18:16:02,468 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.14 vs. limit=22.5 +2024-08-03 18:16:02,865 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.620e+01 1.149e+02 1.398e+02 1.802e+02 2.548e+02, threshold=2.797e+02, percent-clipped=0.0 +2024-08-03 18:16:03,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=191821.66666666666, ans=0.0 +2024-08-03 18:16:06,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=191821.66666666666, ans=0.2 +2024-08-03 18:16:11,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=191858.33333333334, ans=0.125 +2024-08-03 18:16:14,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=191858.33333333334, ans=0.0 +2024-08-03 18:16:40,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=191968.33333333334, ans=0.0 +2024-08-03 18:16:47,828 INFO [train.py:1114] (2/4) Epoch 15, batch 1400, loss[loss=0.1798, simple_loss=0.2502, pruned_loss=0.05465, over 13269.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.277, pruned_loss=0.05406, over 2642047.92 frames. ], batch size: 31, lr: 8.34e-03, grad_scale: 16.0 +2024-08-03 18:16:50,121 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.77 vs. limit=15.0 +2024-08-03 18:17:08,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=192078.33333333334, ans=0.0 +2024-08-03 18:17:09,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=192078.33333333334, ans=0.125 +2024-08-03 18:17:11,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=192078.33333333334, ans=0.125 +2024-08-03 18:17:17,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=192115.0, ans=0.125 +2024-08-03 18:17:24,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=192115.0, ans=0.04949747468305833 +2024-08-03 18:17:35,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=192151.66666666666, ans=0.125 +2024-08-03 18:17:39,153 INFO [train.py:1114] (2/4) Epoch 15, batch 1450, loss[loss=0.1891, simple_loss=0.2824, pruned_loss=0.04793, over 13417.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2781, pruned_loss=0.05457, over 2641099.06 frames. ], batch size: 43, lr: 8.34e-03, grad_scale: 16.0 +2024-08-03 18:17:40,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=192188.33333333334, ans=0.0 +2024-08-03 18:17:40,949 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.258e+01 1.126e+02 1.352e+02 1.648e+02 3.700e+02, threshold=2.704e+02, percent-clipped=1.0 +2024-08-03 18:17:52,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=192225.0, ans=0.2 +2024-08-03 18:17:52,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192225.0, ans=0.1 +2024-08-03 18:17:58,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=192261.66666666666, ans=0.125 +2024-08-03 18:18:11,200 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.37 vs. limit=5.0 +2024-08-03 18:18:24,968 INFO [train.py:1114] (2/4) Epoch 15, batch 1500, loss[loss=0.2045, simple_loss=0.2891, pruned_loss=0.05995, over 13397.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2779, pruned_loss=0.05419, over 2641728.33 frames. ], batch size: 39, lr: 8.34e-03, grad_scale: 16.0 +2024-08-03 18:18:28,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=192371.66666666666, ans=0.0 +2024-08-03 18:18:35,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=192408.33333333334, ans=0.125 +2024-08-03 18:18:35,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=192408.33333333334, ans=0.0 +2024-08-03 18:18:48,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=192445.0, ans=0.2 +2024-08-03 18:19:01,900 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.15 vs. limit=12.0 +2024-08-03 18:19:08,179 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.81 vs. limit=22.5 +2024-08-03 18:19:09,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=192518.33333333334, ans=0.125 +2024-08-03 18:19:12,238 INFO [train.py:1114] (2/4) Epoch 15, batch 1550, loss[loss=0.2216, simple_loss=0.3062, pruned_loss=0.06853, over 13407.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2786, pruned_loss=0.05472, over 2631016.58 frames. ], batch size: 41, lr: 8.33e-03, grad_scale: 16.0 +2024-08-03 18:19:13,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=192555.0, ans=0.0 +2024-08-03 18:19:14,134 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.182e+01 1.117e+02 1.315e+02 1.608e+02 2.647e+02, threshold=2.631e+02, percent-clipped=0.0 +2024-08-03 18:19:19,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192555.0, ans=0.1 +2024-08-03 18:19:29,044 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.87 vs. limit=12.0 +2024-08-03 18:19:34,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=192628.33333333334, ans=0.025 +2024-08-03 18:19:41,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=192665.0, ans=0.125 +2024-08-03 18:19:43,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=192665.0, ans=0.125 +2024-08-03 18:19:59,537 INFO [train.py:1114] (2/4) Epoch 15, batch 1600, loss[loss=0.2154, simple_loss=0.3087, pruned_loss=0.06102, over 13575.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2786, pruned_loss=0.05494, over 2624225.90 frames. ], batch size: 39, lr: 8.33e-03, grad_scale: 32.0 +2024-08-03 18:20:23,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=192811.66666666666, ans=0.125 +2024-08-03 18:20:46,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=192921.66666666666, ans=0.125 +2024-08-03 18:20:47,092 INFO [train.py:1114] (2/4) Epoch 15, batch 1650, loss[loss=0.1878, simple_loss=0.2813, pruned_loss=0.04712, over 13315.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2782, pruned_loss=0.05458, over 2621504.69 frames. ], batch size: 40, lr: 8.33e-03, grad_scale: 32.0 +2024-08-03 18:20:48,899 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.213e+01 1.145e+02 1.280e+02 1.838e+02 3.870e+02, threshold=2.560e+02, percent-clipped=5.0 +2024-08-03 18:20:52,075 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.65 vs. limit=12.0 +2024-08-03 18:21:05,773 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.98 vs. limit=15.0 +2024-08-03 18:21:12,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=192995.0, ans=0.125 +2024-08-03 18:21:23,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=193031.66666666666, ans=0.125 +2024-08-03 18:21:34,183 INFO [train.py:1114] (2/4) Epoch 15, batch 1700, loss[loss=0.1644, simple_loss=0.2508, pruned_loss=0.03897, over 13251.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2776, pruned_loss=0.05441, over 2630515.11 frames. ], batch size: 31, lr: 8.32e-03, grad_scale: 32.0 +2024-08-03 18:21:51,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=193178.33333333334, ans=0.125 +2024-08-03 18:21:54,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=193178.33333333334, ans=0.0 +2024-08-03 18:22:02,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=193215.0, ans=0.125 +2024-08-03 18:22:04,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=193215.0, ans=0.025 +2024-08-03 18:22:05,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=193215.0, ans=0.125 +2024-08-03 18:22:12,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=193251.66666666666, ans=0.1 +2024-08-03 18:22:14,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=193251.66666666666, ans=0.125 +2024-08-03 18:22:17,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=193251.66666666666, ans=0.0 +2024-08-03 18:22:19,612 INFO [train.py:1114] (2/4) Epoch 15, batch 1750, loss[loss=0.1684, simple_loss=0.2513, pruned_loss=0.04277, over 13522.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2775, pruned_loss=0.05441, over 2634416.99 frames. ], batch size: 31, lr: 8.32e-03, grad_scale: 32.0 +2024-08-03 18:22:20,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=193288.33333333334, ans=0.0 +2024-08-03 18:22:21,330 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.690e+01 1.123e+02 1.340e+02 1.586e+02 3.403e+02, threshold=2.681e+02, percent-clipped=7.0 +2024-08-03 18:22:38,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=193361.66666666666, ans=0.1 +2024-08-03 18:22:50,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=193398.33333333334, ans=0.0 +2024-08-03 18:22:56,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=193398.33333333334, ans=0.04949747468305833 +2024-08-03 18:22:56,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=193398.33333333334, ans=0.04949747468305833 +2024-08-03 18:23:04,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=193435.0, ans=0.0 +2024-08-03 18:23:04,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=193435.0, ans=0.125 +2024-08-03 18:23:06,733 INFO [train.py:1114] (2/4) Epoch 15, batch 1800, loss[loss=0.2013, simple_loss=0.2869, pruned_loss=0.05789, over 13557.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2781, pruned_loss=0.05455, over 2635335.76 frames. ], batch size: 38, lr: 8.31e-03, grad_scale: 32.0 +2024-08-03 18:23:12,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=193471.66666666666, ans=0.0 +2024-08-03 18:23:36,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=193581.66666666666, ans=0.1 +2024-08-03 18:23:39,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=193581.66666666666, ans=0.125 +2024-08-03 18:23:43,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=193581.66666666666, ans=0.125 +2024-08-03 18:23:49,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=193618.33333333334, ans=0.0 +2024-08-03 18:23:53,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=193618.33333333334, ans=0.125 +2024-08-03 18:23:55,687 INFO [train.py:1114] (2/4) Epoch 15, batch 1850, loss[loss=0.2071, simple_loss=0.295, pruned_loss=0.05959, over 13407.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2771, pruned_loss=0.05397, over 2636887.22 frames. ], batch size: 39, lr: 8.31e-03, grad_scale: 32.0 +2024-08-03 18:23:57,398 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.035e+01 1.191e+02 1.556e+02 2.123e+02 2.973e+02, threshold=3.112e+02, percent-clipped=3.0 +2024-08-03 18:24:08,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=193691.66666666666, ans=0.0 +2024-08-03 18:24:10,522 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.22 vs. limit=15.0 +2024-08-03 18:24:15,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=193728.33333333334, ans=0.125 +2024-08-03 18:24:16,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=193728.33333333334, ans=0.1 +2024-08-03 18:24:18,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=193728.33333333334, ans=0.125 +2024-08-03 18:24:18,743 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.81 vs. limit=15.0 +2024-08-03 18:24:26,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=193765.0, ans=0.0 +2024-08-03 18:24:44,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=193838.33333333334, ans=0.2 +2024-08-03 18:24:44,747 INFO [train.py:1114] (2/4) Epoch 15, batch 1900, loss[loss=0.1853, simple_loss=0.2783, pruned_loss=0.04609, over 13333.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2776, pruned_loss=0.05399, over 2639257.97 frames. ], batch size: 40, lr: 8.31e-03, grad_scale: 32.0 +2024-08-03 18:24:45,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=193838.33333333334, ans=0.125 +2024-08-03 18:24:46,975 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.42 vs. limit=6.0 +2024-08-03 18:24:53,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=193875.0, ans=0.025 +2024-08-03 18:25:12,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=193948.33333333334, ans=0.0 +2024-08-03 18:25:14,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=193948.33333333334, ans=0.0 +2024-08-03 18:25:16,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=193948.33333333334, ans=0.04949747468305833 +2024-08-03 18:25:29,774 INFO [train.py:1114] (2/4) Epoch 15, batch 1950, loss[loss=0.2098, simple_loss=0.2973, pruned_loss=0.06119, over 13544.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2786, pruned_loss=0.05419, over 2645822.19 frames. ], batch size: 36, lr: 8.30e-03, grad_scale: 32.0 +2024-08-03 18:25:31,621 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.973e+01 1.188e+02 1.452e+02 1.828e+02 3.234e+02, threshold=2.903e+02, percent-clipped=1.0 +2024-08-03 18:25:32,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=194021.66666666666, ans=0.125 +2024-08-03 18:25:41,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=194058.33333333334, ans=0.125 +2024-08-03 18:25:59,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=194131.66666666666, ans=0.125 +2024-08-03 18:26:07,159 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.04 vs. limit=15.0 +2024-08-03 18:26:12,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=194168.33333333334, ans=0.0 +2024-08-03 18:26:15,835 INFO [train.py:1114] (2/4) Epoch 15, batch 2000, loss[loss=0.1591, simple_loss=0.2463, pruned_loss=0.03594, over 13533.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2793, pruned_loss=0.05432, over 2635529.50 frames. ], batch size: 31, lr: 8.30e-03, grad_scale: 32.0 +2024-08-03 18:26:30,002 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.14 vs. limit=12.0 +2024-08-03 18:26:38,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=194278.33333333334, ans=0.125 +2024-08-03 18:27:01,216 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.97 vs. limit=10.0 +2024-08-03 18:27:06,152 INFO [train.py:1114] (2/4) Epoch 15, batch 2050, loss[loss=0.1764, simple_loss=0.2485, pruned_loss=0.05212, over 13443.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2788, pruned_loss=0.05448, over 2632732.82 frames. ], batch size: 32, lr: 8.29e-03, grad_scale: 32.0 +2024-08-03 18:27:07,806 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.454e+01 1.180e+02 1.343e+02 1.712e+02 4.642e+02, threshold=2.687e+02, percent-clipped=2.0 +2024-08-03 18:27:24,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=194461.66666666666, ans=0.125 +2024-08-03 18:27:42,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194535.0, ans=0.1 +2024-08-03 18:27:42,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=194535.0, ans=0.125 +2024-08-03 18:27:45,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194535.0, ans=0.1 +2024-08-03 18:27:51,118 INFO [train.py:1114] (2/4) Epoch 15, batch 2100, loss[loss=0.196, simple_loss=0.2831, pruned_loss=0.05448, over 13548.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2772, pruned_loss=0.05365, over 2637825.55 frames. ], batch size: 37, lr: 8.29e-03, grad_scale: 32.0 +2024-08-03 18:28:01,017 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.47 vs. limit=15.0 +2024-08-03 18:28:04,609 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.65 vs. limit=15.0 +2024-08-03 18:28:20,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=194645.0, ans=0.07 +2024-08-03 18:28:33,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=194718.33333333334, ans=0.125 +2024-08-03 18:28:34,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=194718.33333333334, ans=0.07 +2024-08-03 18:28:40,555 INFO [train.py:1114] (2/4) Epoch 15, batch 2150, loss[loss=0.1662, simple_loss=0.253, pruned_loss=0.03971, over 13561.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2763, pruned_loss=0.05324, over 2646621.61 frames. ], batch size: 36, lr: 8.29e-03, grad_scale: 32.0 +2024-08-03 18:28:42,289 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.720e+01 1.112e+02 1.243e+02 1.782e+02 4.136e+02, threshold=2.485e+02, percent-clipped=5.0 +2024-08-03 18:28:47,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=194755.0, ans=0.125 +2024-08-03 18:28:50,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=194791.66666666666, ans=0.125 +2024-08-03 18:29:04,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=194828.33333333334, ans=0.125 +2024-08-03 18:29:21,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=194901.66666666666, ans=0.0 +2024-08-03 18:29:25,502 INFO [train.py:1114] (2/4) Epoch 15, batch 2200, loss[loss=0.1992, simple_loss=0.2916, pruned_loss=0.05341, over 13401.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2769, pruned_loss=0.05342, over 2645219.36 frames. ], batch size: 39, lr: 8.28e-03, grad_scale: 32.0 +2024-08-03 18:29:32,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=194938.33333333334, ans=0.0 +2024-08-03 18:29:54,272 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.55 vs. limit=15.0 +2024-08-03 18:30:13,224 INFO [train.py:1114] (2/4) Epoch 15, batch 2250, loss[loss=0.176, simple_loss=0.2668, pruned_loss=0.04257, over 13354.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2761, pruned_loss=0.05269, over 2642938.29 frames. ], batch size: 37, lr: 8.28e-03, grad_scale: 32.0 +2024-08-03 18:30:15,095 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.620e+01 1.201e+02 1.486e+02 1.910e+02 3.582e+02, threshold=2.971e+02, percent-clipped=11.0 +2024-08-03 18:30:21,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=195158.33333333334, ans=0.125 +2024-08-03 18:30:21,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=195158.33333333334, ans=0.125 +2024-08-03 18:30:51,345 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.78 vs. limit=15.0 +2024-08-03 18:30:56,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=195268.33333333334, ans=0.125 +2024-08-03 18:31:01,065 INFO [train.py:1114] (2/4) Epoch 15, batch 2300, loss[loss=0.163, simple_loss=0.2407, pruned_loss=0.04262, over 13554.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2747, pruned_loss=0.05232, over 2638293.30 frames. ], batch size: 33, lr: 8.28e-03, grad_scale: 32.0 +2024-08-03 18:31:04,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=195305.0, ans=0.125 +2024-08-03 18:31:17,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=195341.66666666666, ans=0.125 +2024-08-03 18:31:48,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=195451.66666666666, ans=0.1 +2024-08-03 18:31:49,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=195451.66666666666, ans=0.0 +2024-08-03 18:31:58,484 INFO [train.py:1114] (2/4) Epoch 15, batch 2350, loss[loss=0.1996, simple_loss=0.2874, pruned_loss=0.05596, over 13545.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2752, pruned_loss=0.05278, over 2640859.30 frames. ], batch size: 38, lr: 8.27e-03, grad_scale: 32.0 +2024-08-03 18:32:00,492 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.540e+01 1.095e+02 1.335e+02 1.545e+02 2.606e+02, threshold=2.670e+02, percent-clipped=0.0 +2024-08-03 18:32:09,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=195525.0, ans=0.125 +2024-08-03 18:32:12,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=195525.0, ans=0.0 +2024-08-03 18:32:15,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195525.0, ans=0.1 +2024-08-03 18:32:18,895 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.93 vs. limit=15.0 +2024-08-03 18:32:20,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195561.66666666666, ans=0.1 +2024-08-03 18:32:29,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=195598.33333333334, ans=0.125 +2024-08-03 18:32:34,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=195598.33333333334, ans=0.125 +2024-08-03 18:32:39,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.03 vs. limit=15.0 +2024-08-03 18:32:44,896 INFO [train.py:1114] (2/4) Epoch 15, batch 2400, loss[loss=0.1905, simple_loss=0.2747, pruned_loss=0.05318, over 13523.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2765, pruned_loss=0.05344, over 2642325.87 frames. ], batch size: 35, lr: 8.27e-03, grad_scale: 32.0 +2024-08-03 18:32:52,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=195671.66666666666, ans=0.0 +2024-08-03 18:32:54,415 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.14 vs. limit=15.0 +2024-08-03 18:32:56,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=195708.33333333334, ans=0.0 +2024-08-03 18:33:01,833 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.05 vs. limit=10.0 +2024-08-03 18:33:05,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=195745.0, ans=0.0 +2024-08-03 18:33:09,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=195745.0, ans=0.0 +2024-08-03 18:33:10,859 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.44 vs. limit=15.0 +2024-08-03 18:33:12,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195781.66666666666, ans=0.1 +2024-08-03 18:33:14,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=195781.66666666666, ans=0.0 +2024-08-03 18:33:17,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=195781.66666666666, ans=0.125 +2024-08-03 18:33:20,663 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.32 vs. limit=15.0 +2024-08-03 18:33:27,947 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.71 vs. limit=15.0 +2024-08-03 18:33:30,326 INFO [train.py:1114] (2/4) Epoch 15, batch 2450, loss[loss=0.1858, simple_loss=0.2721, pruned_loss=0.04981, over 13361.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2775, pruned_loss=0.05395, over 2631922.28 frames. ], batch size: 37, lr: 8.26e-03, grad_scale: 16.0 +2024-08-03 18:33:33,027 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.379e+01 1.105e+02 1.269e+02 1.556e+02 2.604e+02, threshold=2.537e+02, percent-clipped=0.0 +2024-08-03 18:33:37,964 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.15 vs. limit=15.0 +2024-08-03 18:33:55,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=195928.33333333334, ans=0.125 +2024-08-03 18:33:58,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195965.0, ans=0.1 +2024-08-03 18:34:01,541 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:34:03,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195965.0, ans=0.1 +2024-08-03 18:34:10,098 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.69 vs. limit=15.0 +2024-08-03 18:34:15,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=196001.66666666666, ans=0.2 +2024-08-03 18:34:17,547 INFO [train.py:1114] (2/4) Epoch 15, batch 2500, loss[loss=0.1935, simple_loss=0.2839, pruned_loss=0.05149, over 13382.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2771, pruned_loss=0.05363, over 2636229.84 frames. ], batch size: 39, lr: 8.26e-03, grad_scale: 16.0 +2024-08-03 18:34:27,595 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.59 vs. limit=15.0 +2024-08-03 18:34:29,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=196075.0, ans=0.125 +2024-08-03 18:34:42,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=196111.66666666666, ans=0.2 +2024-08-03 18:34:52,959 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.38 vs. limit=22.5 +2024-08-03 18:34:55,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=196185.0, ans=0.125 +2024-08-03 18:34:55,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=196185.0, ans=0.125 +2024-08-03 18:35:02,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=196185.0, ans=0.0 +2024-08-03 18:35:04,094 INFO [train.py:1114] (2/4) Epoch 15, batch 2550, loss[loss=0.1603, simple_loss=0.2405, pruned_loss=0.04009, over 13527.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2768, pruned_loss=0.0539, over 2638393.24 frames. ], batch size: 31, lr: 8.26e-03, grad_scale: 16.0 +2024-08-03 18:35:06,686 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.866e+01 1.097e+02 1.275e+02 1.738e+02 2.775e+02, threshold=2.550e+02, percent-clipped=2.0 +2024-08-03 18:35:12,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=196258.33333333334, ans=0.125 +2024-08-03 18:35:13,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=196258.33333333334, ans=0.1 +2024-08-03 18:35:13,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=196258.33333333334, ans=0.125 +2024-08-03 18:35:17,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=196258.33333333334, ans=0.125 +2024-08-03 18:35:19,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=196258.33333333334, ans=0.2 +2024-08-03 18:35:23,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=196295.0, ans=0.0 +2024-08-03 18:35:43,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=196368.33333333334, ans=0.2 +2024-08-03 18:35:47,180 INFO [train.py:1114] (2/4) Epoch 15, batch 2600, loss[loss=0.1826, simple_loss=0.2724, pruned_loss=0.04637, over 13567.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.277, pruned_loss=0.05377, over 2637501.51 frames. ], batch size: 36, lr: 8.25e-03, grad_scale: 16.0 +2024-08-03 18:35:50,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=196405.0, ans=0.125 +2024-08-03 18:36:30,677 INFO [train.py:1114] (2/4) Epoch 15, batch 2650, loss[loss=0.2235, simple_loss=0.3113, pruned_loss=0.06786, over 13346.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2771, pruned_loss=0.05393, over 2640469.58 frames. ], batch size: 46, lr: 8.25e-03, grad_scale: 16.0 +2024-08-03 18:36:33,203 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.155e+01 1.086e+02 1.260e+02 1.535e+02 2.930e+02, threshold=2.521e+02, percent-clipped=3.0 +2024-08-03 18:36:56,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=196661.66666666666, ans=0.0 +2024-08-03 18:37:09,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=196735.0, ans=0.0 +2024-08-03 18:37:17,599 INFO [train.py:1114] (2/4) Epoch 15, batch 2700, loss[loss=0.1839, simple_loss=0.2779, pruned_loss=0.04492, over 13564.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2775, pruned_loss=0.05379, over 2637964.79 frames. ], batch size: 40, lr: 8.24e-03, grad_scale: 16.0 +2024-08-03 18:37:33,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=196808.33333333334, ans=0.0 +2024-08-03 18:37:41,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196845.0, ans=0.1 +2024-08-03 18:37:41,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=196845.0, ans=0.0 +2024-08-03 18:37:56,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=196918.33333333334, ans=0.125 +2024-08-03 18:38:01,017 INFO [train.py:1114] (2/4) Epoch 15, batch 2750, loss[loss=0.1872, simple_loss=0.2669, pruned_loss=0.05372, over 13333.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2759, pruned_loss=0.05322, over 2635280.13 frames. ], batch size: 34, lr: 8.24e-03, grad_scale: 16.0 +2024-08-03 18:38:02,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=196955.0, ans=0.025 +2024-08-03 18:38:03,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=196955.0, ans=0.0 +2024-08-03 18:38:03,556 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.526e+01 1.073e+02 1.243e+02 1.451e+02 2.224e+02, threshold=2.486e+02, percent-clipped=0.0 +2024-08-03 18:38:03,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=196955.0, ans=0.125 +2024-08-03 18:38:13,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=196991.66666666666, ans=0.0 +2024-08-03 18:38:23,985 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.96 vs. limit=12.0 +2024-08-03 18:38:24,648 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:38:35,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=197101.66666666666, ans=0.2 +2024-08-03 18:38:36,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=197101.66666666666, ans=0.0 +2024-08-03 18:38:43,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=197138.33333333334, ans=0.0 +2024-08-03 18:38:44,476 INFO [train.py:1114] (2/4) Epoch 15, batch 2800, loss[loss=0.2719, simple_loss=0.3294, pruned_loss=0.1072, over 9773.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2764, pruned_loss=0.05386, over 2627555.93 frames. ], batch size: 97, lr: 8.24e-03, grad_scale: 32.0 +2024-08-03 18:38:57,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=197175.0, ans=0.125 +2024-08-03 18:39:16,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=197248.33333333334, ans=0.035 +2024-08-03 18:39:19,963 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.77 vs. limit=15.0 +2024-08-03 18:39:27,873 INFO [train.py:1114] (2/4) Epoch 15, batch 2850, loss[loss=0.1721, simple_loss=0.2585, pruned_loss=0.04287, over 13368.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2772, pruned_loss=0.05438, over 2621128.10 frames. ], batch size: 35, lr: 8.23e-03, grad_scale: 32.0 +2024-08-03 18:39:28,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=197321.66666666666, ans=0.025 +2024-08-03 18:39:30,443 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.971e+01 1.137e+02 1.312e+02 1.532e+02 3.029e+02, threshold=2.624e+02, percent-clipped=2.0 +2024-08-03 18:39:33,635 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.48 vs. limit=10.0 +2024-08-03 18:39:36,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=197358.33333333334, ans=0.1 +2024-08-03 18:39:43,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=197358.33333333334, ans=0.0 +2024-08-03 18:39:46,319 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:39:58,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=197431.66666666666, ans=0.0 +2024-08-03 18:40:11,208 INFO [train.py:1114] (2/4) Epoch 15, batch 2900, loss[loss=0.1764, simple_loss=0.262, pruned_loss=0.04543, over 13356.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2782, pruned_loss=0.05431, over 2631941.94 frames. ], batch size: 36, lr: 8.23e-03, grad_scale: 32.0 +2024-08-03 18:40:21,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=197541.66666666666, ans=0.125 +2024-08-03 18:40:27,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=197578.33333333334, ans=0.0 +2024-08-03 18:40:44,565 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.33 vs. limit=10.0 +2024-08-03 18:40:50,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=197651.66666666666, ans=0.125 +2024-08-03 18:40:50,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=197651.66666666666, ans=0.125 +2024-08-03 18:40:54,521 INFO [train.py:1114] (2/4) Epoch 15, batch 2950, loss[loss=0.1792, simple_loss=0.2649, pruned_loss=0.04679, over 13335.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2767, pruned_loss=0.05375, over 2630811.35 frames. ], batch size: 34, lr: 8.23e-03, grad_scale: 32.0 +2024-08-03 18:40:57,043 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.080e+01 1.111e+02 1.238e+02 1.494e+02 2.430e+02, threshold=2.476e+02, percent-clipped=0.0 +2024-08-03 18:40:58,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.10 vs. limit=15.0 +2024-08-03 18:41:19,410 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=11.38 vs. limit=15.0 +2024-08-03 18:41:24,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=197798.33333333334, ans=0.0 +2024-08-03 18:41:33,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=197835.0, ans=0.0 +2024-08-03 18:41:35,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=197835.0, ans=15.0 +2024-08-03 18:41:38,132 INFO [train.py:1114] (2/4) Epoch 15, batch 3000, loss[loss=0.1972, simple_loss=0.2868, pruned_loss=0.05383, over 13555.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2767, pruned_loss=0.05375, over 2630807.09 frames. ], batch size: 37, lr: 8.22e-03, grad_scale: 32.0 +2024-08-03 18:41:38,133 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 18:41:48,175 INFO [train.py:1146] (2/4) Epoch 15, validation: loss=0.1719, simple_loss=0.2717, pruned_loss=0.03605, over 944034.00 frames. +2024-08-03 18:41:48,176 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 18:41:49,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=197871.66666666666, ans=0.05 +2024-08-03 18:42:00,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=197908.33333333334, ans=0.125 +2024-08-03 18:42:17,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=197981.66666666666, ans=0.0 +2024-08-03 18:42:26,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=198018.33333333334, ans=0.09899494936611666 +2024-08-03 18:42:32,396 INFO [train.py:1114] (2/4) Epoch 15, batch 3050, loss[loss=0.1907, simple_loss=0.275, pruned_loss=0.0532, over 13543.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2773, pruned_loss=0.05404, over 2627745.68 frames. ], batch size: 35, lr: 8.22e-03, grad_scale: 32.0 +2024-08-03 18:42:32,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.79 vs. limit=15.0 +2024-08-03 18:42:35,001 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.644e+01 1.092e+02 1.243e+02 1.494e+02 2.695e+02, threshold=2.487e+02, percent-clipped=3.0 +2024-08-03 18:42:50,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=198128.33333333334, ans=0.05 +2024-08-03 18:43:16,339 INFO [train.py:1114] (2/4) Epoch 15, batch 3100, loss[loss=0.207, simple_loss=0.2928, pruned_loss=0.06058, over 13302.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2769, pruned_loss=0.05369, over 2626771.46 frames. ], batch size: 46, lr: 8.21e-03, grad_scale: 32.0 +2024-08-03 18:43:57,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=198385.0, ans=0.125 +2024-08-03 18:44:00,848 INFO [train.py:1114] (2/4) Epoch 15, batch 3150, loss[loss=0.2225, simple_loss=0.3061, pruned_loss=0.06939, over 13059.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2772, pruned_loss=0.05361, over 2628168.72 frames. ], batch size: 48, lr: 8.21e-03, grad_scale: 32.0 +2024-08-03 18:44:03,294 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.819e+01 1.142e+02 1.468e+02 1.823e+02 3.085e+02, threshold=2.937e+02, percent-clipped=3.0 +2024-08-03 18:44:31,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198495.0, ans=0.1 +2024-08-03 18:44:33,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=198495.0, ans=0.025 +2024-08-03 18:44:35,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=198495.0, ans=0.2 +2024-08-03 18:44:54,992 INFO [train.py:1114] (2/4) Epoch 15, batch 3200, loss[loss=0.1897, simple_loss=0.2742, pruned_loss=0.05258, over 13551.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.277, pruned_loss=0.05362, over 2634957.48 frames. ], batch size: 37, lr: 8.21e-03, grad_scale: 32.0 +2024-08-03 18:45:11,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=198678.33333333334, ans=0.125 +2024-08-03 18:45:12,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=198678.33333333334, ans=0.125 +2024-08-03 18:45:16,764 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.36 vs. limit=12.0 +2024-08-03 18:45:33,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=198751.66666666666, ans=0.125 +2024-08-03 18:45:38,328 INFO [train.py:1114] (2/4) Epoch 15, batch 3250, loss[loss=0.1943, simple_loss=0.2808, pruned_loss=0.05397, over 13394.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.277, pruned_loss=0.05325, over 2638597.65 frames. ], batch size: 38, lr: 8.20e-03, grad_scale: 32.0 +2024-08-03 18:45:40,870 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.293e+01 1.129e+02 1.312e+02 1.541e+02 2.254e+02, threshold=2.624e+02, percent-clipped=0.0 +2024-08-03 18:45:44,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=198788.33333333334, ans=0.125 +2024-08-03 18:45:46,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=198825.0, ans=0.2 +2024-08-03 18:45:59,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=198861.66666666666, ans=0.2 +2024-08-03 18:46:06,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198898.33333333334, ans=0.1 +2024-08-03 18:46:10,877 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.42 vs. limit=15.0 +2024-08-03 18:46:13,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=198935.0, ans=0.0 +2024-08-03 18:46:18,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=198935.0, ans=0.125 +2024-08-03 18:46:21,419 INFO [train.py:1114] (2/4) Epoch 15, batch 3300, loss[loss=0.2274, simple_loss=0.3061, pruned_loss=0.07434, over 12893.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2768, pruned_loss=0.0537, over 2640473.34 frames. ], batch size: 52, lr: 8.20e-03, grad_scale: 16.0 +2024-08-03 18:46:28,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=198971.66666666666, ans=0.2 +2024-08-03 18:46:33,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=199008.33333333334, ans=0.05 +2024-08-03 18:46:38,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=199045.0, ans=0.1 +2024-08-03 18:46:39,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.58 vs. limit=15.0 +2024-08-03 18:47:04,156 INFO [train.py:1114] (2/4) Epoch 15, batch 3350, loss[loss=0.2239, simple_loss=0.3076, pruned_loss=0.07006, over 13062.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2787, pruned_loss=0.05473, over 2629252.27 frames. ], batch size: 48, lr: 8.20e-03, grad_scale: 16.0 +2024-08-03 18:47:04,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=199155.0, ans=0.125 +2024-08-03 18:47:07,472 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.754e+01 1.143e+02 1.314e+02 1.590e+02 2.231e+02, threshold=2.628e+02, percent-clipped=0.0 +2024-08-03 18:47:14,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=199191.66666666666, ans=0.125 +2024-08-03 18:47:18,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=199191.66666666666, ans=10.0 +2024-08-03 18:47:21,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=199228.33333333334, ans=0.125 +2024-08-03 18:47:32,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=199265.0, ans=0.125 +2024-08-03 18:47:34,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=199265.0, ans=0.125 +2024-08-03 18:47:44,261 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.85 vs. limit=15.0 +2024-08-03 18:47:47,284 INFO [train.py:1114] (2/4) Epoch 15, batch 3400, loss[loss=0.1632, simple_loss=0.2481, pruned_loss=0.03915, over 13507.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2787, pruned_loss=0.05478, over 2625927.47 frames. ], batch size: 31, lr: 8.19e-03, grad_scale: 16.0 +2024-08-03 18:47:55,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=199375.0, ans=0.04949747468305833 +2024-08-03 18:47:59,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=199375.0, ans=0.125 +2024-08-03 18:48:07,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=199411.66666666666, ans=0.125 +2024-08-03 18:48:18,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199448.33333333334, ans=0.1 +2024-08-03 18:48:30,462 INFO [train.py:1114] (2/4) Epoch 15, batch 3450, loss[loss=0.2241, simple_loss=0.3094, pruned_loss=0.06942, over 12872.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2791, pruned_loss=0.0549, over 2628232.48 frames. ], batch size: 52, lr: 8.19e-03, grad_scale: 16.0 +2024-08-03 18:48:33,769 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.288e+01 1.166e+02 1.372e+02 1.679e+02 2.920e+02, threshold=2.743e+02, percent-clipped=3.0 +2024-08-03 18:48:35,544 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:48:46,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=199558.33333333334, ans=0.125 +2024-08-03 18:48:52,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=199595.0, ans=0.2 +2024-08-03 18:49:04,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=199668.33333333334, ans=0.1 +2024-08-03 18:49:13,503 INFO [train.py:1114] (2/4) Epoch 15, batch 3500, loss[loss=0.1962, simple_loss=0.2777, pruned_loss=0.05739, over 13509.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2783, pruned_loss=0.05481, over 2630234.32 frames. ], batch size: 34, lr: 8.19e-03, grad_scale: 16.0 +2024-08-03 18:49:16,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=199705.0, ans=22.5 +2024-08-03 18:49:23,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.89 vs. limit=6.0 +2024-08-03 18:49:44,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=199815.0, ans=0.0 +2024-08-03 18:49:46,245 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.69 vs. limit=10.0 +2024-08-03 18:49:50,527 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.71 vs. limit=15.0 +2024-08-03 18:49:54,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=199851.66666666666, ans=0.125 +2024-08-03 18:49:55,944 INFO [train.py:1114] (2/4) Epoch 15, batch 3550, loss[loss=0.2006, simple_loss=0.2812, pruned_loss=0.05997, over 12456.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2808, pruned_loss=0.05597, over 2629319.35 frames. ], batch size: 58, lr: 8.18e-03, grad_scale: 16.0 +2024-08-03 18:49:59,302 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.733e+01 1.107e+02 1.282e+02 1.546e+02 2.459e+02, threshold=2.565e+02, percent-clipped=0.0 +2024-08-03 18:50:09,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=199925.0, ans=0.025 +2024-08-03 18:50:12,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=199961.66666666666, ans=0.125 +2024-08-03 18:50:18,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=199961.66666666666, ans=0.125 +2024-08-03 18:50:24,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=199998.33333333334, ans=0.0 +2024-08-03 18:50:33,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=200035.0, ans=0.0 +2024-08-03 18:50:40,663 INFO [train.py:1114] (2/4) Epoch 15, batch 3600, loss[loss=0.2096, simple_loss=0.2824, pruned_loss=0.06844, over 8863.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2851, pruned_loss=0.05987, over 2488992.83 frames. ], batch size: 96, lr: 8.18e-03, grad_scale: 32.0 +2024-08-03 18:50:50,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=200108.33333333334, ans=0.125 +2024-08-03 18:51:15,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=200218.33333333334, ans=0.2 +2024-08-03 18:52:09,588 INFO [train.py:1114] (2/4) Epoch 16, batch 0, loss[loss=0.1696, simple_loss=0.2584, pruned_loss=0.0404, over 13338.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2584, pruned_loss=0.0404, over 13338.00 frames. ], batch size: 33, lr: 7.91e-03, grad_scale: 16.0 +2024-08-03 18:52:09,588 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 18:52:19,527 INFO [train.py:1146] (2/4) Epoch 16, validation: loss=0.1763, simple_loss=0.2767, pruned_loss=0.03798, over 944034.00 frames. +2024-08-03 18:52:19,527 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 18:52:26,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=200222.0, ans=0.0 +2024-08-03 18:52:33,152 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.265e+01 1.242e+02 1.434e+02 1.560e+02 1.878e+02, threshold=2.867e+02, percent-clipped=0.0 +2024-08-03 18:52:53,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=200332.0, ans=0.125 +2024-08-03 18:52:57,682 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.46 vs. limit=15.0 +2024-08-03 18:53:02,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=200368.66666666666, ans=0.09899494936611666 +2024-08-03 18:53:05,368 INFO [train.py:1114] (2/4) Epoch 16, batch 50, loss[loss=0.1685, simple_loss=0.2511, pruned_loss=0.04301, over 13413.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2815, pruned_loss=0.05633, over 578195.23 frames. ], batch size: 32, lr: 7.91e-03, grad_scale: 16.0 +2024-08-03 18:53:05,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=200405.33333333334, ans=0.0 +2024-08-03 18:53:10,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=200405.33333333334, ans=0.0 +2024-08-03 18:53:29,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=200478.66666666666, ans=0.0 +2024-08-03 18:53:42,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200552.0, ans=0.1 +2024-08-03 18:53:50,758 INFO [train.py:1114] (2/4) Epoch 16, batch 100, loss[loss=0.1813, simple_loss=0.2661, pruned_loss=0.04821, over 13529.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2812, pruned_loss=0.05496, over 1025306.91 frames. ], batch size: 35, lr: 7.91e-03, grad_scale: 16.0 +2024-08-03 18:53:51,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=200588.66666666666, ans=0.025 +2024-08-03 18:54:06,123 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.720e+01 1.140e+02 1.333e+02 1.689e+02 2.611e+02, threshold=2.666e+02, percent-clipped=0.0 +2024-08-03 18:54:09,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=200625.33333333334, ans=0.125 +2024-08-03 18:54:11,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=200662.0, ans=0.125 +2024-08-03 18:54:22,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200698.66666666666, ans=0.1 +2024-08-03 18:54:24,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=200698.66666666666, ans=0.125 +2024-08-03 18:54:38,665 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.41 vs. limit=22.5 +2024-08-03 18:54:43,434 INFO [train.py:1114] (2/4) Epoch 16, batch 150, loss[loss=0.1771, simple_loss=0.2555, pruned_loss=0.04937, over 13415.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2777, pruned_loss=0.05363, over 1386329.08 frames. ], batch size: 32, lr: 7.90e-03, grad_scale: 16.0 +2024-08-03 18:54:43,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=200772.0, ans=0.125 +2024-08-03 18:54:45,410 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:54:48,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.16 vs. limit=15.0 +2024-08-03 18:55:01,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=200845.33333333334, ans=0.0 +2024-08-03 18:55:21,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=200918.66666666666, ans=0.0 +2024-08-03 18:55:27,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=200918.66666666666, ans=10.0 +2024-08-03 18:55:29,204 INFO [train.py:1114] (2/4) Epoch 16, batch 200, loss[loss=0.2265, simple_loss=0.3058, pruned_loss=0.07354, over 12331.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2767, pruned_loss=0.05344, over 1665144.99 frames. ], batch size: 58, lr: 7.90e-03, grad_scale: 16.0 +2024-08-03 18:55:42,874 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.144e+01 1.093e+02 1.229e+02 1.620e+02 3.492e+02, threshold=2.459e+02, percent-clipped=5.0 +2024-08-03 18:56:18,896 INFO [train.py:1114] (2/4) Epoch 16, batch 250, loss[loss=0.2119, simple_loss=0.2908, pruned_loss=0.0665, over 13318.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2768, pruned_loss=0.05337, over 1884831.67 frames. ], batch size: 46, lr: 7.89e-03, grad_scale: 16.0 +2024-08-03 18:56:30,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=201175.33333333334, ans=0.125 +2024-08-03 18:56:33,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=201175.33333333334, ans=0.125 +2024-08-03 18:56:45,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201212.0, ans=0.1 +2024-08-03 18:56:51,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=201248.66666666666, ans=0.0 +2024-08-03 18:57:00,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=201285.33333333334, ans=0.125 +2024-08-03 18:57:05,285 INFO [train.py:1114] (2/4) Epoch 16, batch 300, loss[loss=0.1941, simple_loss=0.2873, pruned_loss=0.0505, over 13426.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2761, pruned_loss=0.05332, over 2051600.36 frames. ], batch size: 42, lr: 7.89e-03, grad_scale: 16.0 +2024-08-03 18:57:06,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=201322.0, ans=0.125 +2024-08-03 18:57:19,199 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.378e+01 1.118e+02 1.251e+02 1.604e+02 3.551e+02, threshold=2.502e+02, percent-clipped=3.0 +2024-08-03 18:57:30,502 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.91 vs. limit=10.0 +2024-08-03 18:57:31,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=201395.33333333334, ans=0.1 +2024-08-03 18:57:39,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=201432.0, ans=0.125 +2024-08-03 18:57:50,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=201468.66666666666, ans=0.025 +2024-08-03 18:57:53,247 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.80 vs. limit=22.5 +2024-08-03 18:57:58,324 INFO [train.py:1114] (2/4) Epoch 16, batch 350, loss[loss=0.1903, simple_loss=0.2714, pruned_loss=0.05461, over 13602.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2761, pruned_loss=0.05301, over 2182399.71 frames. ], batch size: 33, lr: 7.89e-03, grad_scale: 16.0 +2024-08-03 18:58:05,357 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.40 vs. limit=10.0 +2024-08-03 18:58:07,373 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.00 vs. limit=15.0 +2024-08-03 18:58:15,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=201542.0, ans=0.1 +2024-08-03 18:58:21,458 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.40 vs. limit=15.0 +2024-08-03 18:58:22,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=201578.66666666666, ans=0.0 +2024-08-03 18:58:27,816 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.91 vs. limit=6.0 +2024-08-03 18:58:35,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=201615.33333333334, ans=0.2 +2024-08-03 18:58:48,033 INFO [train.py:1114] (2/4) Epoch 16, batch 400, loss[loss=0.1904, simple_loss=0.2769, pruned_loss=0.05193, over 13358.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2754, pruned_loss=0.05248, over 2286873.09 frames. ], batch size: 37, lr: 7.88e-03, grad_scale: 32.0 +2024-08-03 18:58:54,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=201688.66666666666, ans=0.125 +2024-08-03 18:59:01,843 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.809e+01 1.125e+02 1.305e+02 1.618e+02 2.689e+02, threshold=2.611e+02, percent-clipped=3.0 +2024-08-03 18:59:03,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=201725.33333333334, ans=0.0 +2024-08-03 18:59:16,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=201798.66666666666, ans=0.025 +2024-08-03 18:59:23,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=201835.33333333334, ans=0.125 +2024-08-03 18:59:31,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=201835.33333333334, ans=0.05 +2024-08-03 18:59:31,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=201835.33333333334, ans=0.125 +2024-08-03 18:59:31,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=201835.33333333334, ans=0.2 +2024-08-03 18:59:32,889 INFO [train.py:1114] (2/4) Epoch 16, batch 450, loss[loss=0.2049, simple_loss=0.2932, pruned_loss=0.05828, over 13545.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2753, pruned_loss=0.05246, over 2359419.92 frames. ], batch size: 38, lr: 7.88e-03, grad_scale: 32.0 +2024-08-03 19:00:05,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=201982.0, ans=0.07 +2024-08-03 19:00:52,725 INFO [train.py:1114] (2/4) Epoch 16, batch 500, loss[loss=0.2146, simple_loss=0.2991, pruned_loss=0.06507, over 13412.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2752, pruned_loss=0.05267, over 2424862.82 frames. ], batch size: 43, lr: 7.88e-03, grad_scale: 32.0 +2024-08-03 19:01:03,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=202092.0, ans=0.2 +2024-08-03 19:01:06,064 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.045e+01 1.073e+02 1.280e+02 1.513e+02 2.984e+02, threshold=2.559e+02, percent-clipped=3.0 +2024-08-03 19:01:17,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=202128.66666666666, ans=0.05 +2024-08-03 19:01:32,735 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.90 vs. limit=22.5 +2024-08-03 19:01:37,127 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.58 vs. limit=15.0 +2024-08-03 19:01:37,607 INFO [train.py:1114] (2/4) Epoch 16, batch 550, loss[loss=0.1971, simple_loss=0.2804, pruned_loss=0.05687, over 13074.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.275, pruned_loss=0.05267, over 2467716.93 frames. ], batch size: 48, lr: 7.87e-03, grad_scale: 32.0 +2024-08-03 19:01:37,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=202238.66666666666, ans=0.1 +2024-08-03 19:01:47,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=202275.33333333334, ans=0.0 +2024-08-03 19:01:53,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=202275.33333333334, ans=0.125 +2024-08-03 19:01:58,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=202312.0, ans=0.0 +2024-08-03 19:02:07,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.95 vs. limit=15.0 +2024-08-03 19:02:10,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=202348.66666666666, ans=0.125 +2024-08-03 19:02:10,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202348.66666666666, ans=0.1 +2024-08-03 19:02:11,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=202348.66666666666, ans=0.2 +2024-08-03 19:02:30,469 INFO [train.py:1114] (2/4) Epoch 16, batch 600, loss[loss=0.2193, simple_loss=0.3041, pruned_loss=0.06728, over 13282.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2749, pruned_loss=0.05233, over 2507408.58 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 32.0 +2024-08-03 19:02:33,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=202422.0, ans=0.125 +2024-08-03 19:02:35,046 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:02:35,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=202422.0, ans=22.5 +2024-08-03 19:02:43,889 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.542e+01 1.053e+02 1.176e+02 1.457e+02 2.332e+02, threshold=2.351e+02, percent-clipped=0.0 +2024-08-03 19:02:44,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=202458.66666666666, ans=0.04949747468305833 +2024-08-03 19:02:51,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=202495.33333333334, ans=0.015 +2024-08-03 19:02:56,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=202532.0, ans=0.125 +2024-08-03 19:03:02,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=202532.0, ans=0.0 +2024-08-03 19:03:10,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=202568.66666666666, ans=0.07 +2024-08-03 19:03:15,279 INFO [train.py:1114] (2/4) Epoch 16, batch 650, loss[loss=0.1822, simple_loss=0.2694, pruned_loss=0.04748, over 13536.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2743, pruned_loss=0.05186, over 2542737.67 frames. ], batch size: 37, lr: 7.87e-03, grad_scale: 32.0 +2024-08-03 19:03:24,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=202642.0, ans=0.025 +2024-08-03 19:03:52,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=202752.0, ans=0.0 +2024-08-03 19:03:55,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=202752.0, ans=0.125 +2024-08-03 19:04:00,440 INFO [train.py:1114] (2/4) Epoch 16, batch 700, loss[loss=0.1778, simple_loss=0.2618, pruned_loss=0.04694, over 13543.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2749, pruned_loss=0.05206, over 2564997.72 frames. ], batch size: 35, lr: 7.86e-03, grad_scale: 32.0 +2024-08-03 19:04:03,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=202788.66666666666, ans=0.0 +2024-08-03 19:04:10,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=202825.33333333334, ans=0.025 +2024-08-03 19:04:13,873 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.401e+01 1.143e+02 1.370e+02 1.738e+02 3.116e+02, threshold=2.740e+02, percent-clipped=8.0 +2024-08-03 19:04:16,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=202825.33333333334, ans=0.125 +2024-08-03 19:04:18,336 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.20 vs. limit=12.0 +2024-08-03 19:04:20,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=202862.0, ans=0.025 +2024-08-03 19:04:21,754 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.90 vs. limit=22.5 +2024-08-03 19:04:32,748 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.86 vs. limit=6.0 +2024-08-03 19:04:42,587 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.84 vs. limit=15.0 +2024-08-03 19:04:45,952 INFO [train.py:1114] (2/4) Epoch 16, batch 750, loss[loss=0.1928, simple_loss=0.2834, pruned_loss=0.05109, over 13355.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2744, pruned_loss=0.05213, over 2582151.71 frames. ], batch size: 37, lr: 7.86e-03, grad_scale: 32.0 +2024-08-03 19:04:49,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=202972.0, ans=0.125 +2024-08-03 19:04:55,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=203008.66666666666, ans=0.125 +2024-08-03 19:04:58,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=203008.66666666666, ans=0.125 +2024-08-03 19:05:02,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=203008.66666666666, ans=0.125 +2024-08-03 19:05:02,552 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.44 vs. limit=22.5 +2024-08-03 19:05:17,665 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=2.632e-03 +2024-08-03 19:05:33,741 INFO [train.py:1114] (2/4) Epoch 16, batch 800, loss[loss=0.1611, simple_loss=0.2447, pruned_loss=0.03875, over 13334.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2745, pruned_loss=0.05223, over 2596804.84 frames. ], batch size: 33, lr: 7.86e-03, grad_scale: 32.0 +2024-08-03 19:05:48,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=203192.0, ans=0.125 +2024-08-03 19:05:49,282 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.111e+01 1.103e+02 1.311e+02 1.683e+02 3.142e+02, threshold=2.622e+02, percent-clipped=1.0 +2024-08-03 19:05:49,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=203192.0, ans=0.125 +2024-08-03 19:05:52,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=203192.0, ans=0.0 +2024-08-03 19:06:08,920 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.95 vs. limit=6.0 +2024-08-03 19:06:13,657 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.69 vs. limit=15.0 +2024-08-03 19:06:15,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=203302.0, ans=0.025 +2024-08-03 19:06:16,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=203302.0, ans=0.125 +2024-08-03 19:06:21,244 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:06:23,399 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.04 vs. limit=15.0 +2024-08-03 19:06:25,699 INFO [train.py:1114] (2/4) Epoch 16, batch 850, loss[loss=0.1735, simple_loss=0.2655, pruned_loss=0.04075, over 13331.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2747, pruned_loss=0.05243, over 2609955.57 frames. ], batch size: 40, lr: 7.85e-03, grad_scale: 32.0 +2024-08-03 19:06:41,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=203375.33333333334, ans=0.1 +2024-08-03 19:06:43,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=203412.0, ans=0.125 +2024-08-03 19:06:54,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=203448.66666666666, ans=0.0 +2024-08-03 19:06:58,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=203448.66666666666, ans=0.0 +2024-08-03 19:07:10,792 INFO [train.py:1114] (2/4) Epoch 16, batch 900, loss[loss=0.1654, simple_loss=0.2444, pruned_loss=0.04325, over 13355.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2752, pruned_loss=0.05271, over 2612933.99 frames. ], batch size: 33, lr: 7.85e-03, grad_scale: 32.0 +2024-08-03 19:07:18,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=203522.0, ans=0.025 +2024-08-03 19:07:18,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=203558.66666666666, ans=0.125 +2024-08-03 19:07:19,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=203558.66666666666, ans=0.035 +2024-08-03 19:07:19,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=203558.66666666666, ans=0.0 +2024-08-03 19:07:24,142 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.183e+01 1.118e+02 1.381e+02 1.663e+02 2.638e+02, threshold=2.763e+02, percent-clipped=1.0 +2024-08-03 19:07:29,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=203595.33333333334, ans=0.1 +2024-08-03 19:07:31,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=203595.33333333334, ans=0.1 +2024-08-03 19:07:47,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=203668.66666666666, ans=0.0 +2024-08-03 19:07:47,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=203668.66666666666, ans=0.2 +2024-08-03 19:07:50,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=203668.66666666666, ans=0.2 +2024-08-03 19:07:52,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=203668.66666666666, ans=0.025 +2024-08-03 19:07:55,954 INFO [train.py:1114] (2/4) Epoch 16, batch 950, loss[loss=0.1763, simple_loss=0.2546, pruned_loss=0.04895, over 13534.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.275, pruned_loss=0.0527, over 2613988.37 frames. ], batch size: 34, lr: 7.85e-03, grad_scale: 32.0 +2024-08-03 19:08:22,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=203815.33333333334, ans=0.1 +2024-08-03 19:08:22,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=203815.33333333334, ans=0.125 +2024-08-03 19:08:35,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=203852.0, ans=0.125 +2024-08-03 19:08:40,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=203888.66666666666, ans=0.0 +2024-08-03 19:08:41,464 INFO [train.py:1114] (2/4) Epoch 16, batch 1000, loss[loss=0.1781, simple_loss=0.2683, pruned_loss=0.04396, over 13358.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2757, pruned_loss=0.05289, over 2612245.99 frames. ], batch size: 35, lr: 7.84e-03, grad_scale: 32.0 +2024-08-03 19:08:55,033 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.698e+01 1.080e+02 1.221e+02 1.447e+02 2.524e+02, threshold=2.442e+02, percent-clipped=0.0 +2024-08-03 19:08:57,142 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.94 vs. limit=6.0 +2024-08-03 19:09:22,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=203998.66666666666, ans=15.0 +2024-08-03 19:09:29,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=204035.33333333334, ans=0.125 +2024-08-03 19:09:34,129 INFO [train.py:1114] (2/4) Epoch 16, batch 1050, loss[loss=0.1857, simple_loss=0.2792, pruned_loss=0.04609, over 13562.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.275, pruned_loss=0.05265, over 2616260.69 frames. ], batch size: 39, lr: 7.84e-03, grad_scale: 32.0 +2024-08-03 19:09:40,330 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.27 vs. limit=15.0 +2024-08-03 19:09:49,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=204108.66666666666, ans=0.2 +2024-08-03 19:10:00,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=204145.33333333334, ans=0.125 +2024-08-03 19:10:10,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=204182.0, ans=0.125 +2024-08-03 19:10:21,100 INFO [train.py:1114] (2/4) Epoch 16, batch 1100, loss[loss=0.157, simple_loss=0.2465, pruned_loss=0.03372, over 13562.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2748, pruned_loss=0.05254, over 2620295.00 frames. ], batch size: 36, lr: 7.84e-03, grad_scale: 16.0 +2024-08-03 19:10:35,472 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.269e+01 1.085e+02 1.218e+02 1.448e+02 2.223e+02, threshold=2.436e+02, percent-clipped=0.0 +2024-08-03 19:10:38,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=204328.66666666666, ans=0.125 +2024-08-03 19:10:52,166 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.37 vs. limit=15.0 +2024-08-03 19:11:03,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=204402.0, ans=0.125 +2024-08-03 19:11:06,546 INFO [train.py:1114] (2/4) Epoch 16, batch 1150, loss[loss=0.1907, simple_loss=0.2743, pruned_loss=0.05352, over 13582.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2746, pruned_loss=0.05244, over 2619302.37 frames. ], batch size: 36, lr: 7.83e-03, grad_scale: 16.0 +2024-08-03 19:11:17,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=204475.33333333334, ans=0.2 +2024-08-03 19:11:21,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=204475.33333333334, ans=0.125 +2024-08-03 19:11:33,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=204548.66666666666, ans=0.125 +2024-08-03 19:11:52,578 INFO [train.py:1114] (2/4) Epoch 16, batch 1200, loss[loss=0.222, simple_loss=0.2953, pruned_loss=0.07438, over 13576.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2762, pruned_loss=0.05296, over 2617279.76 frames. ], batch size: 39, lr: 7.83e-03, grad_scale: 32.0 +2024-08-03 19:11:56,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=204622.0, ans=0.0 +2024-08-03 19:11:58,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=204622.0, ans=0.125 +2024-08-03 19:11:58,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=204622.0, ans=0.0 +2024-08-03 19:12:00,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=204658.66666666666, ans=0.125 +2024-08-03 19:12:06,566 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.982e+01 1.149e+02 1.396e+02 1.741e+02 2.381e+02, threshold=2.791e+02, percent-clipped=0.0 +2024-08-03 19:12:13,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=204695.33333333334, ans=0.0 +2024-08-03 19:12:24,136 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.718e-03 +2024-08-03 19:12:39,494 INFO [train.py:1114] (2/4) Epoch 16, batch 1250, loss[loss=0.2152, simple_loss=0.3, pruned_loss=0.06519, over 13448.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.277, pruned_loss=0.05326, over 2628740.70 frames. ], batch size: 42, lr: 7.83e-03, grad_scale: 32.0 +2024-08-03 19:12:47,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=204842.0, ans=0.0 +2024-08-03 19:12:55,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=204842.0, ans=0.125 +2024-08-03 19:13:19,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=204915.33333333334, ans=0.0 +2024-08-03 19:13:25,565 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.12 vs. limit=15.0 +2024-08-03 19:13:30,419 INFO [train.py:1114] (2/4) Epoch 16, batch 1300, loss[loss=0.1924, simple_loss=0.2797, pruned_loss=0.05259, over 12944.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2764, pruned_loss=0.05324, over 2631654.48 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 32.0 +2024-08-03 19:13:38,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=205025.33333333334, ans=0.2 +2024-08-03 19:13:38,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=205025.33333333334, ans=0.0 +2024-08-03 19:13:41,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=205025.33333333334, ans=0.125 +2024-08-03 19:13:44,835 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.037e+01 1.093e+02 1.262e+02 1.670e+02 2.902e+02, threshold=2.524e+02, percent-clipped=1.0 +2024-08-03 19:13:52,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=205062.0, ans=0.0 +2024-08-03 19:14:03,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=205098.66666666666, ans=0.125 +2024-08-03 19:14:10,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=205135.33333333334, ans=10.0 +2024-08-03 19:14:15,780 INFO [train.py:1114] (2/4) Epoch 16, batch 1350, loss[loss=0.1839, simple_loss=0.2657, pruned_loss=0.0511, over 13545.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2757, pruned_loss=0.05274, over 2639316.59 frames. ], batch size: 37, lr: 7.82e-03, grad_scale: 32.0 +2024-08-03 19:14:16,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=205172.0, ans=0.125 +2024-08-03 19:14:20,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=205172.0, ans=0.1 +2024-08-03 19:14:24,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=205208.66666666666, ans=0.0 +2024-08-03 19:14:24,372 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=4.276e-02 +2024-08-03 19:14:28,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=205208.66666666666, ans=15.0 +2024-08-03 19:14:33,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=205245.33333333334, ans=0.125 +2024-08-03 19:14:40,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=205245.33333333334, ans=0.0 +2024-08-03 19:14:54,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=205318.66666666666, ans=0.125 +2024-08-03 19:15:02,776 INFO [train.py:1114] (2/4) Epoch 16, batch 1400, loss[loss=0.1671, simple_loss=0.2463, pruned_loss=0.04397, over 13263.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.275, pruned_loss=0.05221, over 2643112.65 frames. ], batch size: 31, lr: 7.81e-03, grad_scale: 32.0 +2024-08-03 19:15:11,383 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.86 vs. limit=15.0 +2024-08-03 19:15:17,270 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.642e+01 1.154e+02 1.285e+02 1.682e+02 2.521e+02, threshold=2.570e+02, percent-clipped=0.0 +2024-08-03 19:15:25,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=205428.66666666666, ans=0.125 +2024-08-03 19:15:40,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=205502.0, ans=0.125 +2024-08-03 19:15:43,575 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.87 vs. limit=5.0 +2024-08-03 19:15:48,518 INFO [train.py:1114] (2/4) Epoch 16, batch 1450, loss[loss=0.1913, simple_loss=0.2869, pruned_loss=0.04786, over 13448.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2757, pruned_loss=0.0525, over 2642003.11 frames. ], batch size: 43, lr: 7.81e-03, grad_scale: 32.0 +2024-08-03 19:15:50,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=205538.66666666666, ans=0.2 +2024-08-03 19:15:51,547 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.29 vs. limit=12.0 +2024-08-03 19:16:33,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=205685.33333333334, ans=0.125 +2024-08-03 19:16:37,159 INFO [train.py:1114] (2/4) Epoch 16, batch 1500, loss[loss=0.175, simple_loss=0.2654, pruned_loss=0.04231, over 13400.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2755, pruned_loss=0.0522, over 2641196.90 frames. ], batch size: 39, lr: 7.81e-03, grad_scale: 32.0 +2024-08-03 19:16:41,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=205722.0, ans=0.0 +2024-08-03 19:16:50,015 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:16:56,106 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.464e+01 1.141e+02 1.324e+02 1.628e+02 2.574e+02, threshold=2.648e+02, percent-clipped=1.0 +2024-08-03 19:17:08,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=205832.0, ans=0.125 +2024-08-03 19:17:19,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=205868.66666666666, ans=0.125 +2024-08-03 19:17:26,703 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.49 vs. limit=22.5 +2024-08-03 19:17:27,188 INFO [train.py:1114] (2/4) Epoch 16, batch 1550, loss[loss=0.187, simple_loss=0.2773, pruned_loss=0.04835, over 13398.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2759, pruned_loss=0.05259, over 2631305.33 frames. ], batch size: 41, lr: 7.80e-03, grad_scale: 32.0 +2024-08-03 19:17:36,552 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:17:38,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=205942.0, ans=0.05 +2024-08-03 19:17:45,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=205978.66666666666, ans=0.0 +2024-08-03 19:18:12,966 INFO [train.py:1114] (2/4) Epoch 16, batch 1600, loss[loss=0.2269, simple_loss=0.3194, pruned_loss=0.06715, over 13570.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.276, pruned_loss=0.05286, over 2625075.01 frames. ], batch size: 39, lr: 7.80e-03, grad_scale: 32.0 +2024-08-03 19:18:26,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=206125.33333333334, ans=0.5 +2024-08-03 19:18:27,555 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.061e+01 1.163e+02 1.376e+02 1.726e+02 3.125e+02, threshold=2.751e+02, percent-clipped=2.0 +2024-08-03 19:18:39,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206198.66666666666, ans=0.1 +2024-08-03 19:18:58,027 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.96 vs. limit=22.5 +2024-08-03 19:18:58,353 INFO [train.py:1114] (2/4) Epoch 16, batch 1650, loss[loss=0.2067, simple_loss=0.2941, pruned_loss=0.05963, over 13327.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2768, pruned_loss=0.05342, over 2620714.89 frames. ], batch size: 40, lr: 7.80e-03, grad_scale: 16.0 +2024-08-03 19:19:01,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=206272.0, ans=0.0 +2024-08-03 19:19:04,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=206272.0, ans=0.125 +2024-08-03 19:19:07,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=206308.66666666666, ans=0.1 +2024-08-03 19:19:09,023 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.86 vs. limit=15.0 +2024-08-03 19:19:12,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=206308.66666666666, ans=0.1 +2024-08-03 19:19:25,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=206382.0, ans=0.2 +2024-08-03 19:19:28,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=206382.0, ans=0.125 +2024-08-03 19:19:32,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206382.0, ans=0.1 +2024-08-03 19:19:44,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=206455.33333333334, ans=0.125 +2024-08-03 19:19:45,354 INFO [train.py:1114] (2/4) Epoch 16, batch 1700, loss[loss=0.1796, simple_loss=0.2556, pruned_loss=0.05175, over 13262.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2767, pruned_loss=0.05324, over 2628926.57 frames. ], batch size: 31, lr: 7.79e-03, grad_scale: 16.0 +2024-08-03 19:19:46,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=206455.33333333334, ans=0.125 +2024-08-03 19:19:48,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=206455.33333333334, ans=0.0 +2024-08-03 19:19:49,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=206455.33333333334, ans=0.125 +2024-08-03 19:19:59,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=206492.0, ans=0.2 +2024-08-03 19:20:02,479 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.148e+01 1.167e+02 1.345e+02 1.765e+02 2.775e+02, threshold=2.690e+02, percent-clipped=1.0 +2024-08-03 19:20:11,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206528.66666666666, ans=0.1 +2024-08-03 19:20:13,050 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.12 vs. limit=15.0 +2024-08-03 19:20:16,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=206565.33333333334, ans=0.2 +2024-08-03 19:20:19,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=206565.33333333334, ans=0.0 +2024-08-03 19:20:19,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=206565.33333333334, ans=0.125 +2024-08-03 19:20:26,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=206602.0, ans=0.025 +2024-08-03 19:20:36,091 INFO [train.py:1114] (2/4) Epoch 16, batch 1750, loss[loss=0.1733, simple_loss=0.2493, pruned_loss=0.04868, over 13531.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2759, pruned_loss=0.05279, over 2632204.57 frames. ], batch size: 31, lr: 7.79e-03, grad_scale: 16.0 +2024-08-03 19:20:39,485 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.49 vs. limit=15.0 +2024-08-03 19:20:41,234 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.75 vs. limit=22.5 +2024-08-03 19:20:53,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=206712.0, ans=0.0 +2024-08-03 19:20:55,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=206712.0, ans=0.0 +2024-08-03 19:20:57,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=206712.0, ans=0.125 +2024-08-03 19:21:15,296 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.39 vs. limit=15.0 +2024-08-03 19:21:21,428 INFO [train.py:1114] (2/4) Epoch 16, batch 1800, loss[loss=0.1925, simple_loss=0.2859, pruned_loss=0.04953, over 13554.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2761, pruned_loss=0.05275, over 2633610.54 frames. ], batch size: 38, lr: 7.79e-03, grad_scale: 16.0 +2024-08-03 19:21:22,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=206822.0, ans=0.04949747468305833 +2024-08-03 19:21:24,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=206822.0, ans=0.125 +2024-08-03 19:21:36,889 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.527e+01 1.147e+02 1.312e+02 1.685e+02 2.855e+02, threshold=2.624e+02, percent-clipped=1.0 +2024-08-03 19:22:01,330 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.45 vs. limit=8.0 +2024-08-03 19:22:06,967 INFO [train.py:1114] (2/4) Epoch 16, batch 1850, loss[loss=0.2065, simple_loss=0.2971, pruned_loss=0.05798, over 13397.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2755, pruned_loss=0.05233, over 2637255.62 frames. ], batch size: 39, lr: 7.78e-03, grad_scale: 16.0 +2024-08-03 19:22:08,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=207005.33333333334, ans=0.125 +2024-08-03 19:22:08,491 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.86 vs. limit=15.0 +2024-08-03 19:22:12,120 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.07 vs. limit=15.0 +2024-08-03 19:22:14,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=207005.33333333334, ans=0.125 +2024-08-03 19:22:17,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=207042.0, ans=0.025 +2024-08-03 19:22:18,373 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.87 vs. limit=15.0 +2024-08-03 19:22:27,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=207078.66666666666, ans=0.0 +2024-08-03 19:22:28,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=207078.66666666666, ans=0.0 +2024-08-03 19:22:32,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=207078.66666666666, ans=0.1 +2024-08-03 19:22:45,772 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.65 vs. limit=15.0 +2024-08-03 19:22:49,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=207152.0, ans=0.125 +2024-08-03 19:22:57,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=207152.0, ans=0.125 +2024-08-03 19:22:58,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=207188.66666666666, ans=0.2 +2024-08-03 19:22:58,926 INFO [train.py:1114] (2/4) Epoch 16, batch 1900, loss[loss=0.1929, simple_loss=0.2894, pruned_loss=0.04821, over 13327.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2763, pruned_loss=0.05241, over 2639934.98 frames. ], batch size: 40, lr: 7.78e-03, grad_scale: 16.0 +2024-08-03 19:23:29,099 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.263e+01 1.127e+02 1.354e+02 1.894e+02 2.950e+02, threshold=2.708e+02, percent-clipped=4.0 +2024-08-03 19:23:57,643 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=207335.33333333334, ans=0.125 +2024-08-03 19:24:03,032 INFO [train.py:1114] (2/4) Epoch 16, batch 1950, loss[loss=0.1749, simple_loss=0.2615, pruned_loss=0.04414, over 13555.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.277, pruned_loss=0.05274, over 2646396.73 frames. ], batch size: 36, lr: 7.78e-03, grad_scale: 16.0 +2024-08-03 19:24:04,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=207372.0, ans=0.125 +2024-08-03 19:24:27,655 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.93 vs. limit=6.0 +2024-08-03 19:24:39,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=207482.0, ans=0.95 +2024-08-03 19:24:48,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=207518.66666666666, ans=0.1 +2024-08-03 19:24:52,066 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:24:53,795 INFO [train.py:1114] (2/4) Epoch 16, batch 2000, loss[loss=0.1726, simple_loss=0.2533, pruned_loss=0.04593, over 13558.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2774, pruned_loss=0.05298, over 2635606.76 frames. ], batch size: 31, lr: 7.77e-03, grad_scale: 32.0 +2024-08-03 19:24:59,725 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.38 vs. limit=15.0 +2024-08-03 19:25:00,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=207555.33333333334, ans=0.0 +2024-08-03 19:25:00,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=207555.33333333334, ans=0.1 +2024-08-03 19:25:09,399 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.745e+01 1.151e+02 1.402e+02 1.831e+02 3.066e+02, threshold=2.804e+02, percent-clipped=4.0 +2024-08-03 19:25:13,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=207628.66666666666, ans=0.125 +2024-08-03 19:25:16,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=207628.66666666666, ans=0.025 +2024-08-03 19:25:17,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=207628.66666666666, ans=0.0 +2024-08-03 19:25:22,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=207665.33333333334, ans=0.04949747468305833 +2024-08-03 19:25:33,732 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:25:38,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=207738.66666666666, ans=0.025 +2024-08-03 19:25:38,900 INFO [train.py:1114] (2/4) Epoch 16, batch 2050, loss[loss=0.1693, simple_loss=0.2473, pruned_loss=0.04567, over 13407.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2764, pruned_loss=0.05312, over 2632437.00 frames. ], batch size: 32, lr: 7.77e-03, grad_scale: 32.0 +2024-08-03 19:25:48,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=207775.33333333334, ans=0.025 +2024-08-03 19:26:01,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=207812.0, ans=0.125 +2024-08-03 19:26:23,344 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.45 vs. limit=22.5 +2024-08-03 19:26:23,759 INFO [train.py:1114] (2/4) Epoch 16, batch 2100, loss[loss=0.1874, simple_loss=0.2731, pruned_loss=0.05079, over 13543.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2757, pruned_loss=0.05253, over 2637446.73 frames. ], batch size: 37, lr: 7.77e-03, grad_scale: 32.0 +2024-08-03 19:26:28,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=207922.0, ans=0.0 +2024-08-03 19:26:28,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=207922.0, ans=0.125 +2024-08-03 19:26:38,692 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.022e+01 1.073e+02 1.217e+02 1.568e+02 3.232e+02, threshold=2.433e+02, percent-clipped=1.0 +2024-08-03 19:26:56,148 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.15 vs. limit=15.0 +2024-08-03 19:27:03,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=208068.66666666666, ans=0.035 +2024-08-03 19:27:10,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn2.whiten.whitening_limit, batch_count=208105.33333333334, ans=22.5 +2024-08-03 19:27:10,305 INFO [train.py:1114] (2/4) Epoch 16, batch 2150, loss[loss=0.1842, simple_loss=0.264, pruned_loss=0.05215, over 13569.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2751, pruned_loss=0.05236, over 2646357.87 frames. ], batch size: 36, lr: 7.76e-03, grad_scale: 32.0 +2024-08-03 19:27:10,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=208105.33333333334, ans=0.125 +2024-08-03 19:27:17,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=208105.33333333334, ans=0.125 +2024-08-03 19:27:46,135 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.78 vs. limit=22.5 +2024-08-03 19:27:59,570 INFO [train.py:1114] (2/4) Epoch 16, batch 2200, loss[loss=0.1992, simple_loss=0.2881, pruned_loss=0.05518, over 13404.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2749, pruned_loss=0.05222, over 2644150.32 frames. ], batch size: 39, lr: 7.76e-03, grad_scale: 32.0 +2024-08-03 19:28:04,446 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.20 vs. limit=22.5 +2024-08-03 19:28:16,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=208325.33333333334, ans=0.125 +2024-08-03 19:28:16,738 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.518e+01 1.187e+02 1.408e+02 1.826e+02 3.967e+02, threshold=2.817e+02, percent-clipped=9.0 +2024-08-03 19:28:18,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=208362.0, ans=0.125 +2024-08-03 19:28:26,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=208362.0, ans=0.125 +2024-08-03 19:28:46,816 INFO [train.py:1114] (2/4) Epoch 16, batch 2250, loss[loss=0.199, simple_loss=0.3016, pruned_loss=0.04817, over 13363.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2748, pruned_loss=0.05191, over 2641234.11 frames. ], batch size: 37, lr: 7.76e-03, grad_scale: 32.0 +2024-08-03 19:28:52,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=208472.0, ans=0.0 +2024-08-03 19:28:54,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=208472.0, ans=0.025 +2024-08-03 19:28:57,163 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.56 vs. limit=15.0 +2024-08-03 19:29:03,565 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.54 vs. limit=15.0 +2024-08-03 19:29:14,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=208582.0, ans=0.125 +2024-08-03 19:29:15,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=208582.0, ans=0.5 +2024-08-03 19:29:33,619 INFO [train.py:1114] (2/4) Epoch 16, batch 2300, loss[loss=0.1548, simple_loss=0.2437, pruned_loss=0.03299, over 13597.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2738, pruned_loss=0.05166, over 2637870.99 frames. ], batch size: 33, lr: 7.75e-03, grad_scale: 32.0 +2024-08-03 19:29:49,265 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.37 vs. limit=15.0 +2024-08-03 19:29:52,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=208692.0, ans=0.2 +2024-08-03 19:29:54,059 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.353e+01 1.164e+02 1.344e+02 1.643e+02 2.956e+02, threshold=2.688e+02, percent-clipped=1.0 +2024-08-03 19:30:02,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=208728.66666666666, ans=0.07 +2024-08-03 19:30:02,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=208728.66666666666, ans=0.025 +2024-08-03 19:30:03,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=208728.66666666666, ans=0.125 +2024-08-03 19:30:05,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=208765.33333333334, ans=0.0 +2024-08-03 19:30:19,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=208802.0, ans=0.5 +2024-08-03 19:30:24,643 INFO [train.py:1114] (2/4) Epoch 16, batch 2350, loss[loss=0.1931, simple_loss=0.2824, pruned_loss=0.05187, over 13529.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2735, pruned_loss=0.0512, over 2641307.25 frames. ], batch size: 38, lr: 7.75e-03, grad_scale: 32.0 +2024-08-03 19:30:26,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=208838.66666666666, ans=0.125 +2024-08-03 19:30:29,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=208838.66666666666, ans=0.2 +2024-08-03 19:30:33,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=208875.33333333334, ans=0.125 +2024-08-03 19:30:51,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=208912.0, ans=0.0 +2024-08-03 19:31:01,516 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:31:08,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=208985.33333333334, ans=0.125 +2024-08-03 19:31:14,341 INFO [train.py:1114] (2/4) Epoch 16, batch 2400, loss[loss=0.1836, simple_loss=0.2723, pruned_loss=0.04749, over 13535.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2749, pruned_loss=0.05186, over 2642384.29 frames. ], batch size: 35, lr: 7.75e-03, grad_scale: 32.0 +2024-08-03 19:31:40,583 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.713e+01 1.195e+02 1.361e+02 1.735e+02 2.883e+02, threshold=2.722e+02, percent-clipped=1.0 +2024-08-03 19:31:49,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=209095.33333333334, ans=0.1 +2024-08-03 19:32:08,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=209168.66666666666, ans=0.125 +2024-08-03 19:32:11,878 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.94 vs. limit=10.0 +2024-08-03 19:32:18,681 INFO [train.py:1114] (2/4) Epoch 16, batch 2450, loss[loss=0.2074, simple_loss=0.2855, pruned_loss=0.06465, over 13366.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2756, pruned_loss=0.05242, over 2632915.92 frames. ], batch size: 37, lr: 7.74e-03, grad_scale: 32.0 +2024-08-03 19:32:19,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=209205.33333333334, ans=0.0 +2024-08-03 19:32:19,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=209205.33333333334, ans=0.125 +2024-08-03 19:32:32,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=209242.0, ans=0.04949747468305833 +2024-08-03 19:32:36,252 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.40 vs. limit=10.0 +2024-08-03 19:32:39,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=209278.66666666666, ans=0.0 +2024-08-03 19:32:40,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209278.66666666666, ans=0.1 +2024-08-03 19:32:54,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=209352.0, ans=0.0 +2024-08-03 19:32:54,269 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.20 vs. limit=22.5 +2024-08-03 19:32:56,137 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.96 vs. limit=15.0 +2024-08-03 19:32:56,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=209352.0, ans=0.2 +2024-08-03 19:33:03,797 INFO [train.py:1114] (2/4) Epoch 16, batch 2500, loss[loss=0.1904, simple_loss=0.2841, pruned_loss=0.04829, over 13404.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2756, pruned_loss=0.05221, over 2636281.24 frames. ], batch size: 39, lr: 7.74e-03, grad_scale: 32.0 +2024-08-03 19:33:21,419 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.166e+01 1.109e+02 1.260e+02 1.584e+02 2.146e+02, threshold=2.521e+02, percent-clipped=0.0 +2024-08-03 19:33:50,672 INFO [train.py:1114] (2/4) Epoch 16, batch 2550, loss[loss=0.183, simple_loss=0.2508, pruned_loss=0.05759, over 13539.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2759, pruned_loss=0.05236, over 2637443.08 frames. ], batch size: 31, lr: 7.74e-03, grad_scale: 32.0 +2024-08-03 19:33:53,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209572.0, ans=0.1 +2024-08-03 19:33:56,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=209572.0, ans=0.0 +2024-08-03 19:33:58,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209608.66666666666, ans=0.1 +2024-08-03 19:34:19,104 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.14 vs. limit=12.0 +2024-08-03 19:34:24,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=209682.0, ans=0.0 +2024-08-03 19:34:25,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209682.0, ans=0.1 +2024-08-03 19:34:36,110 INFO [train.py:1114] (2/4) Epoch 16, batch 2600, loss[loss=0.1768, simple_loss=0.2702, pruned_loss=0.04169, over 13571.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2756, pruned_loss=0.05229, over 2636694.48 frames. ], batch size: 36, lr: 7.73e-03, grad_scale: 32.0 +2024-08-03 19:34:47,734 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.80 vs. limit=6.0 +2024-08-03 19:34:49,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=209792.0, ans=0.125 +2024-08-03 19:34:50,514 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.224e+01 1.145e+02 1.272e+02 1.680e+02 2.511e+02, threshold=2.545e+02, percent-clipped=0.0 +2024-08-03 19:34:50,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=209792.0, ans=0.025 +2024-08-03 19:35:18,985 INFO [train.py:1114] (2/4) Epoch 16, batch 2650, loss[loss=0.199, simple_loss=0.2862, pruned_loss=0.05585, over 13291.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2759, pruned_loss=0.05218, over 2640214.34 frames. ], batch size: 46, lr: 7.73e-03, grad_scale: 32.0 +2024-08-03 19:35:24,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=209938.66666666666, ans=0.0 +2024-08-03 19:35:27,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=209975.33333333334, ans=0.0 +2024-08-03 19:35:44,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=210048.66666666666, ans=0.025 +2024-08-03 19:36:00,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=210085.33333333334, ans=15.0 +2024-08-03 19:36:02,102 INFO [train.py:1114] (2/4) Epoch 16, batch 2700, loss[loss=0.1997, simple_loss=0.2945, pruned_loss=0.05251, over 13552.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2766, pruned_loss=0.05262, over 2637820.43 frames. ], batch size: 40, lr: 7.73e-03, grad_scale: 32.0 +2024-08-03 19:36:05,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=210122.0, ans=0.0 +2024-08-03 19:36:16,696 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.859e+01 1.215e+02 1.396e+02 1.823e+02 2.794e+02, threshold=2.792e+02, percent-clipped=5.0 +2024-08-03 19:36:26,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=210195.33333333334, ans=0.95 +2024-08-03 19:36:29,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210232.0, ans=0.1 +2024-08-03 19:36:29,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=210232.0, ans=0.0 +2024-08-03 19:36:32,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=210232.0, ans=0.2 +2024-08-03 19:36:35,578 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.13 vs. limit=15.0 +2024-08-03 19:36:38,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=210268.66666666666, ans=0.0 +2024-08-03 19:36:43,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=210268.66666666666, ans=0.2 +2024-08-03 19:36:45,435 INFO [train.py:1114] (2/4) Epoch 16, batch 2750, loss[loss=0.1864, simple_loss=0.2683, pruned_loss=0.05223, over 13333.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2755, pruned_loss=0.05249, over 2635537.60 frames. ], batch size: 34, lr: 7.72e-03, grad_scale: 32.0 +2024-08-03 19:36:57,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=210342.0, ans=0.1 +2024-08-03 19:36:58,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=210342.0, ans=0.125 +2024-08-03 19:37:03,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=210378.66666666666, ans=0.125 +2024-08-03 19:37:16,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=210415.33333333334, ans=0.1 +2024-08-03 19:37:20,102 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.86 vs. limit=15.0 +2024-08-03 19:37:27,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=210452.0, ans=0.0 +2024-08-03 19:37:31,233 INFO [train.py:1114] (2/4) Epoch 16, batch 2800, loss[loss=0.2091, simple_loss=0.2902, pruned_loss=0.06405, over 9659.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2755, pruned_loss=0.05265, over 2627404.81 frames. ], batch size: 100, lr: 7.72e-03, grad_scale: 32.0 +2024-08-03 19:37:32,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=210488.66666666666, ans=0.125 +2024-08-03 19:37:33,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=210488.66666666666, ans=0.125 +2024-08-03 19:37:42,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=210525.33333333334, ans=0.125 +2024-08-03 19:37:42,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210525.33333333334, ans=0.1 +2024-08-03 19:37:45,864 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.762e+01 1.112e+02 1.298e+02 1.652e+02 2.703e+02, threshold=2.596e+02, percent-clipped=0.0 +2024-08-03 19:38:09,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=210635.33333333334, ans=0.5 +2024-08-03 19:38:12,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=210635.33333333334, ans=0.125 +2024-08-03 19:38:15,868 INFO [train.py:1114] (2/4) Epoch 16, batch 2850, loss[loss=0.1772, simple_loss=0.2659, pruned_loss=0.04427, over 13372.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2755, pruned_loss=0.05247, over 2620962.07 frames. ], batch size: 35, lr: 7.72e-03, grad_scale: 32.0 +2024-08-03 19:38:17,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=210672.0, ans=15.0 +2024-08-03 19:38:37,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=210745.33333333334, ans=0.0 +2024-08-03 19:38:37,895 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.35 vs. limit=15.0 +2024-08-03 19:38:40,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=210745.33333333334, ans=0.0 +2024-08-03 19:38:43,066 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.16 vs. limit=15.0 +2024-08-03 19:38:44,029 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.21 vs. limit=15.0 +2024-08-03 19:38:49,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=210782.0, ans=0.0 +2024-08-03 19:39:00,431 INFO [train.py:1114] (2/4) Epoch 16, batch 2900, loss[loss=0.1813, simple_loss=0.2727, pruned_loss=0.04497, over 13368.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2767, pruned_loss=0.05277, over 2631726.94 frames. ], batch size: 36, lr: 7.71e-03, grad_scale: 32.0 +2024-08-03 19:39:00,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=210855.33333333334, ans=0.0 +2024-08-03 19:39:04,402 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.46 vs. limit=15.0 +2024-08-03 19:39:05,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210855.33333333334, ans=0.1 +2024-08-03 19:39:06,467 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.16 vs. limit=12.0 +2024-08-03 19:39:16,544 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.624e+01 1.079e+02 1.232e+02 1.534e+02 2.946e+02, threshold=2.465e+02, percent-clipped=2.0 +2024-08-03 19:39:42,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=211002.0, ans=0.0 +2024-08-03 19:39:46,445 INFO [train.py:1114] (2/4) Epoch 16, batch 2950, loss[loss=0.1686, simple_loss=0.2524, pruned_loss=0.04242, over 13335.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2763, pruned_loss=0.05296, over 2629717.33 frames. ], batch size: 34, lr: 7.71e-03, grad_scale: 32.0 +2024-08-03 19:39:57,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=211075.33333333334, ans=0.0 +2024-08-03 19:39:59,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=211075.33333333334, ans=0.025 +2024-08-03 19:40:30,439 INFO [train.py:1114] (2/4) Epoch 16, batch 3000, loss[loss=0.1891, simple_loss=0.2791, pruned_loss=0.04955, over 13548.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2759, pruned_loss=0.05278, over 2630618.41 frames. ], batch size: 37, lr: 7.71e-03, grad_scale: 32.0 +2024-08-03 19:40:30,440 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 19:40:42,216 INFO [train.py:1146] (2/4) Epoch 16, validation: loss=0.1717, simple_loss=0.2708, pruned_loss=0.03625, over 944034.00 frames. +2024-08-03 19:40:42,216 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 19:40:51,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=211258.66666666666, ans=0.125 +2024-08-03 19:40:55,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=211258.66666666666, ans=0.125 +2024-08-03 19:40:56,642 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.281e+01 1.094e+02 1.222e+02 1.516e+02 2.979e+02, threshold=2.443e+02, percent-clipped=5.0 +2024-08-03 19:40:58,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=211295.33333333334, ans=0.2 +2024-08-03 19:40:59,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=211295.33333333334, ans=0.2 +2024-08-03 19:41:08,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=211332.0, ans=0.0 +2024-08-03 19:41:24,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=211368.66666666666, ans=10.0 +2024-08-03 19:41:25,994 INFO [train.py:1114] (2/4) Epoch 16, batch 3050, loss[loss=0.1985, simple_loss=0.2827, pruned_loss=0.05713, over 13543.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2759, pruned_loss=0.0526, over 2627999.33 frames. ], batch size: 35, lr: 7.70e-03, grad_scale: 32.0 +2024-08-03 19:41:37,624 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.39 vs. limit=6.0 +2024-08-03 19:41:59,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=211552.0, ans=0.125 +2024-08-03 19:42:09,454 INFO [train.py:1114] (2/4) Epoch 16, batch 3100, loss[loss=0.2058, simple_loss=0.2939, pruned_loss=0.0589, over 13324.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2757, pruned_loss=0.05246, over 2628803.10 frames. ], batch size: 46, lr: 7.70e-03, grad_scale: 32.0 +2024-08-03 19:42:20,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=211625.33333333334, ans=0.125 +2024-08-03 19:42:23,782 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.933e+01 1.091e+02 1.250e+02 1.567e+02 2.776e+02, threshold=2.501e+02, percent-clipped=2.0 +2024-08-03 19:42:24,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=211625.33333333334, ans=0.125 +2024-08-03 19:42:42,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.63 vs. limit=15.0 +2024-08-03 19:42:47,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=211735.33333333334, ans=0.125 +2024-08-03 19:42:51,921 INFO [train.py:1114] (2/4) Epoch 16, batch 3150, loss[loss=0.2175, simple_loss=0.2895, pruned_loss=0.07274, over 12988.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2758, pruned_loss=0.05265, over 2629303.45 frames. ], batch size: 48, lr: 7.70e-03, grad_scale: 32.0 +2024-08-03 19:42:57,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=211772.0, ans=0.025 +2024-08-03 19:43:10,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=211845.33333333334, ans=0.0 +2024-08-03 19:43:21,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211882.0, ans=0.1 +2024-08-03 19:43:35,469 INFO [train.py:1114] (2/4) Epoch 16, batch 3200, loss[loss=0.1823, simple_loss=0.2609, pruned_loss=0.05183, over 13544.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2748, pruned_loss=0.05203, over 2635106.97 frames. ], batch size: 37, lr: 7.69e-03, grad_scale: 32.0 +2024-08-03 19:43:49,847 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.470e+01 1.178e+02 1.467e+02 1.849e+02 2.870e+02, threshold=2.934e+02, percent-clipped=4.0 +2024-08-03 19:44:07,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=212065.33333333334, ans=0.0 +2024-08-03 19:44:10,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=212102.0, ans=0.125 +2024-08-03 19:44:16,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=212102.0, ans=0.0 +2024-08-03 19:44:18,726 INFO [train.py:1114] (2/4) Epoch 16, batch 3250, loss[loss=0.1827, simple_loss=0.268, pruned_loss=0.04874, over 13380.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2747, pruned_loss=0.05135, over 2639867.53 frames. ], batch size: 38, lr: 7.69e-03, grad_scale: 32.0 +2024-08-03 19:44:20,829 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.71 vs. limit=12.0 +2024-08-03 19:44:42,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212212.0, ans=0.1 +2024-08-03 19:44:49,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=212248.66666666666, ans=0.125 +2024-08-03 19:45:02,682 INFO [train.py:1114] (2/4) Epoch 16, batch 3300, loss[loss=0.1983, simple_loss=0.2897, pruned_loss=0.05349, over 12893.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2739, pruned_loss=0.05111, over 2640978.88 frames. ], batch size: 52, lr: 7.69e-03, grad_scale: 32.0 +2024-08-03 19:45:17,672 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.789e+01 1.171e+02 1.334e+02 1.762e+02 2.468e+02, threshold=2.668e+02, percent-clipped=0.0 +2024-08-03 19:45:19,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=212395.33333333334, ans=0.0 +2024-08-03 19:45:30,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=212432.0, ans=0.0 +2024-08-03 19:45:32,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=212432.0, ans=0.125 +2024-08-03 19:45:33,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=212432.0, ans=0.0 +2024-08-03 19:45:39,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=212468.66666666666, ans=0.0 +2024-08-03 19:45:45,688 INFO [train.py:1114] (2/4) Epoch 16, batch 3350, loss[loss=0.2254, simple_loss=0.3043, pruned_loss=0.07322, over 13063.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.275, pruned_loss=0.05192, over 2631739.71 frames. ], batch size: 48, lr: 7.68e-03, grad_scale: 32.0 +2024-08-03 19:45:49,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=212505.33333333334, ans=0.07 +2024-08-03 19:46:04,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=212578.66666666666, ans=0.125 +2024-08-03 19:46:13,143 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.14 vs. limit=15.0 +2024-08-03 19:46:23,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=212652.0, ans=0.2 +2024-08-03 19:46:26,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=212652.0, ans=0.2 +2024-08-03 19:46:29,928 INFO [train.py:1114] (2/4) Epoch 16, batch 3400, loss[loss=0.1642, simple_loss=0.2423, pruned_loss=0.04303, over 13545.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2748, pruned_loss=0.05191, over 2627124.09 frames. ], batch size: 31, lr: 7.68e-03, grad_scale: 32.0 +2024-08-03 19:46:39,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=212725.33333333334, ans=0.0 +2024-08-03 19:46:44,565 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.890e+01 1.091e+02 1.257e+02 1.485e+02 2.568e+02, threshold=2.513e+02, percent-clipped=0.0 +2024-08-03 19:46:47,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=212762.0, ans=0.125 +2024-08-03 19:47:01,080 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:47:02,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=212798.66666666666, ans=0.0 +2024-08-03 19:47:11,829 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:47:12,577 INFO [train.py:1114] (2/4) Epoch 16, batch 3450, loss[loss=0.2221, simple_loss=0.3051, pruned_loss=0.06953, over 12842.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2754, pruned_loss=0.05248, over 2630302.26 frames. ], batch size: 52, lr: 7.68e-03, grad_scale: 32.0 +2024-08-03 19:47:12,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=212872.0, ans=0.025 +2024-08-03 19:47:25,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=212908.66666666666, ans=0.025 +2024-08-03 19:47:33,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=212945.33333333334, ans=0.125 +2024-08-03 19:47:43,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=212982.0, ans=0.0 +2024-08-03 19:47:55,630 INFO [train.py:1114] (2/4) Epoch 16, batch 3500, loss[loss=0.1728, simple_loss=0.2608, pruned_loss=0.04239, over 13523.00 frames. ], tot_loss[loss=0.19, simple_loss=0.275, pruned_loss=0.05245, over 2632259.18 frames. ], batch size: 34, lr: 7.67e-03, grad_scale: 32.0 +2024-08-03 19:48:01,039 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.77 vs. limit=6.0 +2024-08-03 19:48:10,729 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.160e+01 1.228e+02 1.407e+02 1.881e+02 3.021e+02, threshold=2.813e+02, percent-clipped=7.0 +2024-08-03 19:48:16,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=213128.66666666666, ans=0.125 +2024-08-03 19:48:16,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=213128.66666666666, ans=0.0 +2024-08-03 19:48:18,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=213128.66666666666, ans=0.0 +2024-08-03 19:48:23,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=213165.33333333334, ans=0.125 +2024-08-03 19:48:34,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.90 vs. limit=15.0 +2024-08-03 19:48:38,353 INFO [train.py:1114] (2/4) Epoch 16, batch 3550, loss[loss=0.1937, simple_loss=0.2778, pruned_loss=0.05484, over 12487.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2772, pruned_loss=0.05344, over 2629499.29 frames. ], batch size: 58, lr: 7.67e-03, grad_scale: 16.0 +2024-08-03 19:48:39,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=213238.66666666666, ans=0.2 +2024-08-03 19:49:23,442 INFO [train.py:1114] (2/4) Epoch 16, batch 3600, loss[loss=0.2312, simple_loss=0.3044, pruned_loss=0.079, over 9228.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2816, pruned_loss=0.0574, over 2487131.89 frames. ], batch size: 96, lr: 7.67e-03, grad_scale: 32.0 +2024-08-03 19:49:27,538 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.43 vs. limit=10.0 +2024-08-03 19:49:38,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=213458.66666666666, ans=0.125 +2024-08-03 19:49:39,127 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.558e+01 1.114e+02 1.218e+02 1.319e+02 1.769e+02, threshold=2.437e+02, percent-clipped=0.0 +2024-08-03 19:49:47,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=213495.33333333334, ans=0.125 +2024-08-03 19:50:42,665 INFO [train.py:1114] (2/4) Epoch 17, batch 0, loss[loss=0.1597, simple_loss=0.25, pruned_loss=0.03468, over 13342.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.25, pruned_loss=0.03468, over 13342.00 frames. ], batch size: 33, lr: 7.43e-03, grad_scale: 32.0 +2024-08-03 19:50:42,665 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 19:50:47,621 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.5521, 2.9315, 2.5752, 2.7430], device='cuda:2') +2024-08-03 19:50:48,809 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.8594, 3.6141, 4.0344, 3.8641], device='cuda:2') +2024-08-03 19:50:49,128 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.0320, 3.4082, 3.4261, 1.7729], device='cuda:2') +2024-08-03 19:50:52,770 INFO [train.py:1146] (2/4) Epoch 17, validation: loss=0.17, simple_loss=0.2717, pruned_loss=0.03416, over 944034.00 frames. +2024-08-03 19:50:52,770 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 19:51:13,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213642.0, ans=0.1 +2024-08-03 19:51:13,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=213642.0, ans=0.125 +2024-08-03 19:51:14,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=213642.0, ans=0.125 +2024-08-03 19:51:24,408 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.71 vs. limit=10.0 +2024-08-03 19:51:27,718 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.14 vs. limit=15.0 +2024-08-03 19:51:27,908 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.55 vs. limit=15.0 +2024-08-03 19:51:28,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=213678.66666666666, ans=0.0 +2024-08-03 19:51:40,076 INFO [train.py:1114] (2/4) Epoch 17, batch 50, loss[loss=0.1554, simple_loss=0.2468, pruned_loss=0.03202, over 13422.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2787, pruned_loss=0.05414, over 578532.51 frames. ], batch size: 32, lr: 7.43e-03, grad_scale: 32.0 +2024-08-03 19:51:40,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=213752.0, ans=0.5 +2024-08-03 19:51:49,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=213788.66666666666, ans=0.07 +2024-08-03 19:52:05,564 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.139e+01 1.157e+02 1.306e+02 1.728e+02 3.229e+02, threshold=2.612e+02, percent-clipped=8.0 +2024-08-03 19:52:12,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=213862.0, ans=0.125 +2024-08-03 19:52:13,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=213862.0, ans=0.125 +2024-08-03 19:52:25,590 INFO [train.py:1114] (2/4) Epoch 17, batch 100, loss[loss=0.1891, simple_loss=0.2766, pruned_loss=0.0508, over 13526.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2805, pruned_loss=0.05465, over 1026279.44 frames. ], batch size: 35, lr: 7.43e-03, grad_scale: 32.0 +2024-08-03 19:52:26,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=213935.33333333334, ans=0.125 +2024-08-03 19:52:41,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=213972.0, ans=0.125 +2024-08-03 19:52:43,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=213972.0, ans=0.0 +2024-08-03 19:53:00,692 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:53:13,864 INFO [train.py:1114] (2/4) Epoch 17, batch 150, loss[loss=0.1578, simple_loss=0.2399, pruned_loss=0.03784, over 13418.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2758, pruned_loss=0.0518, over 1387848.23 frames. ], batch size: 32, lr: 7.42e-03, grad_scale: 32.0 +2024-08-03 19:53:14,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=214118.66666666666, ans=0.125 +2024-08-03 19:53:29,582 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.04 vs. limit=15.0 +2024-08-03 19:53:39,022 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.782e+01 1.100e+02 1.230e+02 1.473e+02 3.065e+02, threshold=2.460e+02, percent-clipped=1.0 +2024-08-03 19:53:51,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=214265.33333333334, ans=0.125 +2024-08-03 19:53:51,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=214265.33333333334, ans=0.125 +2024-08-03 19:53:55,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=214265.33333333334, ans=0.125 +2024-08-03 19:53:58,933 INFO [train.py:1114] (2/4) Epoch 17, batch 200, loss[loss=0.206, simple_loss=0.2929, pruned_loss=0.05955, over 12429.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2744, pruned_loss=0.05134, over 1666677.50 frames. ], batch size: 58, lr: 7.42e-03, grad_scale: 32.0 +2024-08-03 19:53:59,456 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.67 vs. limit=15.0 +2024-08-03 19:54:07,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214338.66666666666, ans=0.1 +2024-08-03 19:54:44,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214448.66666666666, ans=0.1 +2024-08-03 19:54:46,081 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:54:48,677 INFO [train.py:1114] (2/4) Epoch 17, batch 250, loss[loss=0.1858, simple_loss=0.2788, pruned_loss=0.0464, over 13295.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2748, pruned_loss=0.0511, over 1885257.34 frames. ], batch size: 46, lr: 7.42e-03, grad_scale: 32.0 +2024-08-03 19:54:49,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=214485.33333333334, ans=0.1 +2024-08-03 19:54:56,538 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.37 vs. limit=10.0 +2024-08-03 19:55:14,669 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.730e+01 1.099e+02 1.340e+02 1.709e+02 3.717e+02, threshold=2.680e+02, percent-clipped=7.0 +2024-08-03 19:55:22,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=214595.33333333334, ans=0.05 +2024-08-03 19:55:24,847 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:55:33,714 INFO [train.py:1114] (2/4) Epoch 17, batch 300, loss[loss=0.1881, simple_loss=0.2794, pruned_loss=0.04838, over 13428.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2739, pruned_loss=0.05085, over 2051860.00 frames. ], batch size: 42, lr: 7.42e-03, grad_scale: 16.0 +2024-08-03 19:55:43,286 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.06 vs. limit=12.0 +2024-08-03 19:55:46,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=214705.33333333334, ans=0.025 +2024-08-03 19:55:46,947 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.40 vs. limit=10.0 +2024-08-03 19:55:49,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=214705.33333333334, ans=0.0 +2024-08-03 19:56:02,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=214778.66666666666, ans=0.125 +2024-08-03 19:56:06,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_na.min_abs, batch_count=214778.66666666666, ans=0.02 +2024-08-03 19:56:10,428 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.65 vs. limit=15.0 +2024-08-03 19:56:20,683 INFO [train.py:1114] (2/4) Epoch 17, batch 350, loss[loss=0.173, simple_loss=0.2545, pruned_loss=0.04578, over 13592.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2737, pruned_loss=0.05087, over 2182346.10 frames. ], batch size: 33, lr: 7.41e-03, grad_scale: 16.0 +2024-08-03 19:56:24,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=214852.0, ans=0.0 +2024-08-03 19:56:25,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=214852.0, ans=0.0 +2024-08-03 19:56:34,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214888.66666666666, ans=0.1 +2024-08-03 19:56:49,374 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.060e+01 1.097e+02 1.266e+02 1.426e+02 2.641e+02, threshold=2.532e+02, percent-clipped=0.0 +2024-08-03 19:56:56,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=214962.0, ans=0.0 +2024-08-03 19:57:08,522 INFO [train.py:1114] (2/4) Epoch 17, batch 400, loss[loss=0.1827, simple_loss=0.2717, pruned_loss=0.04688, over 13356.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.273, pruned_loss=0.05075, over 2286437.64 frames. ], batch size: 37, lr: 7.41e-03, grad_scale: 32.0 +2024-08-03 19:57:15,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=215035.33333333334, ans=0.0 +2024-08-03 19:57:17,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=215072.0, ans=0.0 +2024-08-03 19:57:29,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=215108.66666666666, ans=0.125 +2024-08-03 19:57:32,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=215108.66666666666, ans=0.0 +2024-08-03 19:57:32,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=215108.66666666666, ans=0.125 +2024-08-03 19:57:37,000 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.38 vs. limit=10.0 +2024-08-03 19:57:44,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=215182.0, ans=0.0 +2024-08-03 19:57:54,025 INFO [train.py:1114] (2/4) Epoch 17, batch 450, loss[loss=0.1822, simple_loss=0.2759, pruned_loss=0.0443, over 13556.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2736, pruned_loss=0.05083, over 2359591.58 frames. ], batch size: 38, lr: 7.41e-03, grad_scale: 32.0 +2024-08-03 19:58:12,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=215255.33333333334, ans=0.0 +2024-08-03 19:58:23,995 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.558e+01 1.110e+02 1.275e+02 1.603e+02 2.813e+02, threshold=2.549e+02, percent-clipped=2.0 +2024-08-03 19:58:25,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215328.66666666666, ans=0.1 +2024-08-03 19:58:41,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=215365.33333333334, ans=0.0 +2024-08-03 19:58:42,990 INFO [train.py:1114] (2/4) Epoch 17, batch 500, loss[loss=0.2123, simple_loss=0.2955, pruned_loss=0.0646, over 13429.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2729, pruned_loss=0.05059, over 2424920.43 frames. ], batch size: 43, lr: 7.40e-03, grad_scale: 32.0 +2024-08-03 19:58:44,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=215402.0, ans=0.0 +2024-08-03 19:58:54,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=215438.66666666666, ans=0.0 +2024-08-03 19:59:13,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=215512.0, ans=0.125 +2024-08-03 19:59:23,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=215548.66666666666, ans=0.125 +2024-08-03 19:59:27,988 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.63 vs. limit=15.0 +2024-08-03 19:59:28,225 INFO [train.py:1114] (2/4) Epoch 17, batch 550, loss[loss=0.2021, simple_loss=0.2886, pruned_loss=0.05779, over 13040.00 frames. ], tot_loss[loss=0.187, simple_loss=0.273, pruned_loss=0.05049, over 2468213.01 frames. ], batch size: 48, lr: 7.40e-03, grad_scale: 32.0 +2024-08-03 19:59:32,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=215585.33333333334, ans=0.125 +2024-08-03 19:59:33,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215585.33333333334, ans=0.1 +2024-08-03 19:59:38,162 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.78 vs. limit=22.5 +2024-08-03 19:59:42,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=215622.0, ans=0.2 +2024-08-03 19:59:52,758 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:59:57,002 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.283e+01 1.155e+02 1.328e+02 1.624e+02 2.790e+02, threshold=2.656e+02, percent-clipped=3.0 +2024-08-03 20:00:18,067 INFO [train.py:1114] (2/4) Epoch 17, batch 600, loss[loss=0.2164, simple_loss=0.3027, pruned_loss=0.06505, over 13312.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2733, pruned_loss=0.05066, over 2508546.60 frames. ], batch size: 46, lr: 7.40e-03, grad_scale: 32.0 +2024-08-03 20:00:29,332 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.74 vs. limit=15.0 +2024-08-03 20:00:32,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215805.33333333334, ans=0.1 +2024-08-03 20:00:55,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=215915.33333333334, ans=0.0 +2024-08-03 20:01:02,895 INFO [train.py:1114] (2/4) Epoch 17, batch 650, loss[loss=0.1708, simple_loss=0.267, pruned_loss=0.03729, over 13545.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2731, pruned_loss=0.05049, over 2543690.55 frames. ], batch size: 37, lr: 7.39e-03, grad_scale: 32.0 +2024-08-03 20:01:04,317 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.56 vs. limit=22.5 +2024-08-03 20:01:14,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=215988.66666666666, ans=0.2 +2024-08-03 20:01:20,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=216025.33333333334, ans=0.0 +2024-08-03 20:01:22,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=216025.33333333334, ans=0.125 +2024-08-03 20:01:23,156 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.63 vs. limit=15.0 +2024-08-03 20:01:24,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=216025.33333333334, ans=0.125 +2024-08-03 20:01:26,682 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.85 vs. limit=15.0 +2024-08-03 20:01:29,732 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.069e+01 1.111e+02 1.221e+02 1.677e+02 3.173e+02, threshold=2.441e+02, percent-clipped=3.0 +2024-08-03 20:01:32,973 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.79 vs. limit=15.0 +2024-08-03 20:01:38,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=216062.0, ans=0.125 +2024-08-03 20:01:42,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=216098.66666666666, ans=0.125 +2024-08-03 20:01:52,172 INFO [train.py:1114] (2/4) Epoch 17, batch 700, loss[loss=0.1595, simple_loss=0.2526, pruned_loss=0.0332, over 13536.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2737, pruned_loss=0.05073, over 2565751.72 frames. ], batch size: 35, lr: 7.39e-03, grad_scale: 16.0 +2024-08-03 20:01:53,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=216135.33333333334, ans=0.09899494936611666 +2024-08-03 20:01:57,863 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.87 vs. limit=12.0 +2024-08-03 20:02:01,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=216172.0, ans=0.125 +2024-08-03 20:02:09,858 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.58 vs. limit=15.0 +2024-08-03 20:02:17,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff3.min_abs, batch_count=216208.66666666666, ans=0.2 +2024-08-03 20:02:20,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=216245.33333333334, ans=0.0 +2024-08-03 20:02:31,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=216282.0, ans=0.07 +2024-08-03 20:02:34,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=216282.0, ans=0.2 +2024-08-03 20:02:37,431 INFO [train.py:1114] (2/4) Epoch 17, batch 750, loss[loss=0.189, simple_loss=0.2846, pruned_loss=0.04668, over 13352.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2725, pruned_loss=0.05015, over 2583037.30 frames. ], batch size: 37, lr: 7.39e-03, grad_scale: 16.0 +2024-08-03 20:02:38,047 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.31 vs. limit=10.0 +2024-08-03 20:02:42,305 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:02:48,597 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:02:49,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=216355.33333333334, ans=0.125 +2024-08-03 20:03:01,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=216392.0, ans=0.1 +2024-08-03 20:03:05,011 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.455e+01 1.109e+02 1.251e+02 1.578e+02 2.500e+02, threshold=2.502e+02, percent-clipped=1.0 +2024-08-03 20:03:12,916 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.83 vs. limit=15.0 +2024-08-03 20:03:21,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=216465.33333333334, ans=0.0 +2024-08-03 20:03:23,144 INFO [train.py:1114] (2/4) Epoch 17, batch 800, loss[loss=0.1551, simple_loss=0.2421, pruned_loss=0.03406, over 13345.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2725, pruned_loss=0.05009, over 2597499.92 frames. ], batch size: 33, lr: 7.38e-03, grad_scale: 32.0 +2024-08-03 20:03:27,379 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.62 vs. limit=15.0 +2024-08-03 20:03:45,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=216575.33333333334, ans=0.125 +2024-08-03 20:04:01,084 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.51 vs. limit=15.0 +2024-08-03 20:04:13,152 INFO [train.py:1114] (2/4) Epoch 17, batch 850, loss[loss=0.1915, simple_loss=0.2852, pruned_loss=0.04888, over 13335.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2724, pruned_loss=0.05037, over 2609325.68 frames. ], batch size: 40, lr: 7.38e-03, grad_scale: 16.0 +2024-08-03 20:04:20,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=216685.33333333334, ans=0.125 +2024-08-03 20:04:25,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216722.0, ans=0.1 +2024-08-03 20:04:26,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=216722.0, ans=0.125 +2024-08-03 20:04:34,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=216758.66666666666, ans=0.025 +2024-08-03 20:04:35,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=216758.66666666666, ans=0.125 +2024-08-03 20:04:40,986 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.726e+01 1.085e+02 1.274e+02 1.570e+02 2.707e+02, threshold=2.548e+02, percent-clipped=2.0 +2024-08-03 20:04:43,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=216795.33333333334, ans=0.125 +2024-08-03 20:04:46,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=216795.33333333334, ans=0.0 +2024-08-03 20:04:48,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=216832.0, ans=0.0 +2024-08-03 20:04:51,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=216832.0, ans=0.2 +2024-08-03 20:04:56,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=216832.0, ans=0.1 +2024-08-03 20:04:58,403 INFO [train.py:1114] (2/4) Epoch 17, batch 900, loss[loss=0.1622, simple_loss=0.2456, pruned_loss=0.03943, over 13342.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.273, pruned_loss=0.05065, over 2612017.87 frames. ], batch size: 33, lr: 7.38e-03, grad_scale: 16.0 +2024-08-03 20:05:03,288 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.20 vs. limit=15.0 +2024-08-03 20:05:11,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=216905.33333333334, ans=0.125 +2024-08-03 20:05:36,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=216978.66666666666, ans=0.125 +2024-08-03 20:05:43,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=217015.33333333334, ans=0.125 +2024-08-03 20:05:47,232 INFO [train.py:1114] (2/4) Epoch 17, batch 950, loss[loss=0.1685, simple_loss=0.2569, pruned_loss=0.04005, over 13542.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.273, pruned_loss=0.05074, over 2613162.84 frames. ], batch size: 34, lr: 7.38e-03, grad_scale: 16.0 +2024-08-03 20:06:09,880 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.56 vs. limit=15.0 +2024-08-03 20:06:12,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=217125.33333333334, ans=0.2 +2024-08-03 20:06:13,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=217125.33333333334, ans=0.125 +2024-08-03 20:06:14,448 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.67 vs. limit=15.0 +2024-08-03 20:06:15,641 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.469e+01 1.104e+02 1.288e+02 1.565e+02 2.337e+02, threshold=2.575e+02, percent-clipped=0.0 +2024-08-03 20:06:20,077 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.67 vs. limit=8.0 +2024-08-03 20:06:23,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=217198.66666666666, ans=0.09899494936611666 +2024-08-03 20:06:25,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=217198.66666666666, ans=15.0 +2024-08-03 20:06:33,330 INFO [train.py:1114] (2/4) Epoch 17, batch 1000, loss[loss=0.1972, simple_loss=0.2881, pruned_loss=0.05318, over 13365.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2734, pruned_loss=0.0508, over 2611614.72 frames. ], batch size: 35, lr: 7.37e-03, grad_scale: 8.0 +2024-08-03 20:06:35,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=217235.33333333334, ans=0.2 +2024-08-03 20:06:56,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=217308.66666666666, ans=0.0 +2024-08-03 20:07:21,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=217382.0, ans=0.0 +2024-08-03 20:07:27,021 INFO [train.py:1114] (2/4) Epoch 17, batch 1050, loss[loss=0.1883, simple_loss=0.2756, pruned_loss=0.05046, over 13563.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2727, pruned_loss=0.05058, over 2616267.75 frames. ], batch size: 39, lr: 7.37e-03, grad_scale: 8.0 +2024-08-03 20:07:49,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=217492.0, ans=0.07 +2024-08-03 20:07:54,156 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:07:55,672 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.928e+01 1.069e+02 1.230e+02 1.488e+02 2.448e+02, threshold=2.459e+02, percent-clipped=0.0 +2024-08-03 20:08:12,127 INFO [train.py:1114] (2/4) Epoch 17, batch 1100, loss[loss=0.1765, simple_loss=0.2631, pruned_loss=0.04495, over 13563.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2729, pruned_loss=0.0507, over 2620223.75 frames. ], batch size: 36, lr: 7.37e-03, grad_scale: 8.0 +2024-08-03 20:08:30,045 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.24 vs. limit=10.0 +2024-08-03 20:08:33,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=217675.33333333334, ans=0.125 +2024-08-03 20:08:35,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=217675.33333333334, ans=0.125 +2024-08-03 20:08:53,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=217748.66666666666, ans=0.0 +2024-08-03 20:08:59,105 INFO [train.py:1114] (2/4) Epoch 17, batch 1150, loss[loss=0.1793, simple_loss=0.2607, pruned_loss=0.04897, over 13551.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2731, pruned_loss=0.05074, over 2618731.66 frames. ], batch size: 36, lr: 7.36e-03, grad_scale: 8.0 +2024-08-03 20:09:03,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=217785.33333333334, ans=0.5 +2024-08-03 20:09:12,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=217822.0, ans=0.0 +2024-08-03 20:09:17,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217822.0, ans=0.1 +2024-08-03 20:09:24,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=217858.66666666666, ans=0.125 +2024-08-03 20:09:25,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=217858.66666666666, ans=0.125 +2024-08-03 20:09:27,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=217895.33333333334, ans=0.125 +2024-08-03 20:09:28,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=217895.33333333334, ans=0.0 +2024-08-03 20:09:29,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=217895.33333333334, ans=0.0 +2024-08-03 20:09:30,348 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.508e+01 1.111e+02 1.257e+02 1.521e+02 2.461e+02, threshold=2.515e+02, percent-clipped=1.0 +2024-08-03 20:09:46,439 INFO [train.py:1114] (2/4) Epoch 17, batch 1200, loss[loss=0.2071, simple_loss=0.2882, pruned_loss=0.06298, over 13572.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2736, pruned_loss=0.05075, over 2616089.75 frames. ], batch size: 39, lr: 7.36e-03, grad_scale: 16.0 +2024-08-03 20:09:50,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=217968.66666666666, ans=0.2 +2024-08-03 20:09:54,535 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.16 vs. limit=8.0 +2024-08-03 20:10:21,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=218078.66666666666, ans=0.125 +2024-08-03 20:10:23,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=218115.33333333334, ans=0.0 +2024-08-03 20:10:24,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=218115.33333333334, ans=0.125 +2024-08-03 20:10:29,796 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.72 vs. limit=15.0 +2024-08-03 20:10:32,172 INFO [train.py:1114] (2/4) Epoch 17, batch 1250, loss[loss=0.1984, simple_loss=0.287, pruned_loss=0.05487, over 13464.00 frames. ], tot_loss[loss=0.188, simple_loss=0.274, pruned_loss=0.05097, over 2628113.29 frames. ], batch size: 42, lr: 7.36e-03, grad_scale: 16.0 +2024-08-03 20:10:33,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=218152.0, ans=0.2 +2024-08-03 20:10:34,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=218152.0, ans=0.0 +2024-08-03 20:11:02,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=218262.0, ans=0.1 +2024-08-03 20:11:05,409 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.654e+01 1.175e+02 1.463e+02 1.905e+02 2.984e+02, threshold=2.925e+02, percent-clipped=5.0 +2024-08-03 20:11:11,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff3.min_abs, batch_count=218298.66666666666, ans=0.2 +2024-08-03 20:11:15,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=218298.66666666666, ans=0.05 +2024-08-03 20:11:21,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218335.33333333334, ans=0.1 +2024-08-03 20:11:21,612 INFO [train.py:1114] (2/4) Epoch 17, batch 1300, loss[loss=0.1977, simple_loss=0.2861, pruned_loss=0.05467, over 12848.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2736, pruned_loss=0.05078, over 2632203.74 frames. ], batch size: 52, lr: 7.35e-03, grad_scale: 16.0 +2024-08-03 20:11:26,373 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.36 vs. limit=15.0 +2024-08-03 20:11:32,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=218372.0, ans=0.2 +2024-08-03 20:11:42,102 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.61 vs. limit=15.0 +2024-08-03 20:11:55,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=218445.33333333334, ans=0.125 +2024-08-03 20:12:01,104 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.61 vs. limit=15.0 +2024-08-03 20:12:02,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=218482.0, ans=0.125 +2024-08-03 20:12:06,806 INFO [train.py:1114] (2/4) Epoch 17, batch 1350, loss[loss=0.1878, simple_loss=0.2754, pruned_loss=0.05016, over 13543.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2729, pruned_loss=0.05048, over 2638726.66 frames. ], batch size: 37, lr: 7.35e-03, grad_scale: 16.0 +2024-08-03 20:12:16,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218555.33333333334, ans=0.1 +2024-08-03 20:12:23,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=10.78 vs. limit=15.0 +2024-08-03 20:12:32,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=218592.0, ans=0.0 +2024-08-03 20:12:37,642 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.610e+01 1.168e+02 1.323e+02 1.597e+02 2.527e+02, threshold=2.645e+02, percent-clipped=0.0 +2024-08-03 20:12:49,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.30 vs. limit=15.0 +2024-08-03 20:12:50,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=218665.33333333334, ans=0.125 +2024-08-03 20:12:55,611 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.12 vs. limit=15.0 +2024-08-03 20:12:56,124 INFO [train.py:1114] (2/4) Epoch 17, batch 1400, loss[loss=0.187, simple_loss=0.2627, pruned_loss=0.05565, over 13262.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2731, pruned_loss=0.05051, over 2642621.91 frames. ], batch size: 31, lr: 7.35e-03, grad_scale: 16.0 +2024-08-03 20:13:00,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=218702.0, ans=0.125 +2024-08-03 20:13:07,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=218738.66666666666, ans=0.125 +2024-08-03 20:13:41,298 INFO [train.py:1114] (2/4) Epoch 17, batch 1450, loss[loss=0.199, simple_loss=0.2811, pruned_loss=0.05846, over 13420.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2738, pruned_loss=0.05099, over 2641581.37 frames. ], batch size: 43, lr: 7.34e-03, grad_scale: 16.0 +2024-08-03 20:13:41,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=218885.33333333334, ans=0.125 +2024-08-03 20:14:10,050 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.837e+01 1.093e+02 1.261e+02 1.597e+02 2.531e+02, threshold=2.522e+02, percent-clipped=0.0 +2024-08-03 20:14:11,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218995.33333333334, ans=0.1 +2024-08-03 20:14:15,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=218995.33333333334, ans=0.125 +2024-08-03 20:14:26,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=219032.0, ans=0.1 +2024-08-03 20:14:29,616 INFO [train.py:1114] (2/4) Epoch 17, batch 1500, loss[loss=0.1695, simple_loss=0.2645, pruned_loss=0.0372, over 13407.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2744, pruned_loss=0.05123, over 2641669.25 frames. ], batch size: 39, lr: 7.34e-03, grad_scale: 16.0 +2024-08-03 20:14:52,819 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.21 vs. limit=15.0 +2024-08-03 20:14:54,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=219142.0, ans=0.2 +2024-08-03 20:14:56,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=219142.0, ans=0.125 +2024-08-03 20:14:58,435 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.54 vs. limit=22.5 +2024-08-03 20:15:08,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=219215.33333333334, ans=0.125 +2024-08-03 20:15:15,945 INFO [train.py:1114] (2/4) Epoch 17, batch 1550, loss[loss=0.2208, simple_loss=0.3059, pruned_loss=0.06787, over 13396.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2746, pruned_loss=0.05146, over 2631481.52 frames. ], batch size: 41, lr: 7.34e-03, grad_scale: 16.0 +2024-08-03 20:15:17,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=219252.0, ans=0.0 +2024-08-03 20:15:21,344 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.88 vs. limit=8.0 +2024-08-03 20:15:27,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=219288.66666666666, ans=0.0 +2024-08-03 20:15:37,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=219325.33333333334, ans=0.1 +2024-08-03 20:15:37,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=219325.33333333334, ans=0.1 +2024-08-03 20:15:40,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=219325.33333333334, ans=0.125 +2024-08-03 20:15:45,317 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.134e+01 1.116e+02 1.285e+02 1.566e+02 3.410e+02, threshold=2.570e+02, percent-clipped=5.0 +2024-08-03 20:15:45,509 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:15:53,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=219398.66666666666, ans=0.125 +2024-08-03 20:16:03,418 INFO [train.py:1114] (2/4) Epoch 17, batch 1600, loss[loss=0.2219, simple_loss=0.3056, pruned_loss=0.0691, over 13569.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2746, pruned_loss=0.05168, over 2624251.73 frames. ], batch size: 39, lr: 7.34e-03, grad_scale: 32.0 +2024-08-03 20:16:06,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=219435.33333333334, ans=0.0 +2024-08-03 20:16:16,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=219472.0, ans=0.0 +2024-08-03 20:16:20,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=219472.0, ans=0.07 +2024-08-03 20:16:27,293 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.72 vs. limit=6.0 +2024-08-03 20:16:27,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=219508.66666666666, ans=0.0 +2024-08-03 20:16:43,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=219545.33333333334, ans=0.125 +2024-08-03 20:16:55,678 INFO [train.py:1114] (2/4) Epoch 17, batch 1650, loss[loss=0.1939, simple_loss=0.2873, pruned_loss=0.0502, over 13329.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2748, pruned_loss=0.05208, over 2620504.21 frames. ], batch size: 40, lr: 7.33e-03, grad_scale: 32.0 +2024-08-03 20:16:59,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2.whitening_limit, batch_count=219618.66666666666, ans=15.0 +2024-08-03 20:17:07,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=219655.33333333334, ans=0.025 +2024-08-03 20:17:13,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=219692.0, ans=0.0 +2024-08-03 20:17:24,617 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.842e+01 1.100e+02 1.247e+02 1.816e+02 3.503e+02, threshold=2.494e+02, percent-clipped=6.0 +2024-08-03 20:17:30,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=219728.66666666666, ans=0.125 +2024-08-03 20:17:40,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=219802.0, ans=0.125 +2024-08-03 20:17:41,086 INFO [train.py:1114] (2/4) Epoch 17, batch 1700, loss[loss=0.1793, simple_loss=0.2527, pruned_loss=0.05294, over 13257.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2743, pruned_loss=0.05174, over 2629747.98 frames. ], batch size: 31, lr: 7.33e-03, grad_scale: 32.0 +2024-08-03 20:17:57,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219838.66666666666, ans=0.1 +2024-08-03 20:18:02,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=219875.33333333334, ans=0.025 +2024-08-03 20:18:07,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=219875.33333333334, ans=0.025 +2024-08-03 20:18:16,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=219912.0, ans=0.125 +2024-08-03 20:18:28,168 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.72 vs. limit=15.0 +2024-08-03 20:18:29,577 INFO [train.py:1114] (2/4) Epoch 17, batch 1750, loss[loss=0.1743, simple_loss=0.2523, pruned_loss=0.04819, over 13530.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2733, pruned_loss=0.05112, over 2632901.98 frames. ], batch size: 31, lr: 7.33e-03, grad_scale: 32.0 +2024-08-03 20:18:39,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=220022.0, ans=0.125 +2024-08-03 20:18:45,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=220022.0, ans=0.0 +2024-08-03 20:18:51,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=220058.66666666666, ans=0.125 +2024-08-03 20:19:00,575 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.983e+01 1.113e+02 1.270e+02 1.558e+02 2.524e+02, threshold=2.540e+02, percent-clipped=1.0 +2024-08-03 20:19:00,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=220095.33333333334, ans=0.0 +2024-08-03 20:19:05,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=220095.33333333334, ans=0.025 +2024-08-03 20:19:09,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=220132.0, ans=0.125 +2024-08-03 20:19:16,797 INFO [train.py:1114] (2/4) Epoch 17, batch 1800, loss[loss=0.1806, simple_loss=0.2677, pruned_loss=0.04677, over 13537.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2739, pruned_loss=0.05133, over 2633828.90 frames. ], batch size: 38, lr: 7.32e-03, grad_scale: 32.0 +2024-08-03 20:19:27,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=220205.33333333334, ans=0.125 +2024-08-03 20:19:49,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=220278.66666666666, ans=0.125 +2024-08-03 20:19:53,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=220278.66666666666, ans=0.0 +2024-08-03 20:20:06,474 INFO [train.py:1114] (2/4) Epoch 17, batch 1850, loss[loss=0.1906, simple_loss=0.2802, pruned_loss=0.05055, over 13391.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2731, pruned_loss=0.05076, over 2635554.69 frames. ], batch size: 39, lr: 7.32e-03, grad_scale: 32.0 +2024-08-03 20:20:17,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=220388.66666666666, ans=0.0 +2024-08-03 20:20:24,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=220425.33333333334, ans=0.0 +2024-08-03 20:20:35,728 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.176e+01 1.159e+02 1.677e+02 2.408e+02 3.560e+02, threshold=3.354e+02, percent-clipped=19.0 +2024-08-03 20:20:45,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=220498.66666666666, ans=0.125 +2024-08-03 20:20:51,758 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.49 vs. limit=12.0 +2024-08-03 20:20:52,110 INFO [train.py:1114] (2/4) Epoch 17, batch 1900, loss[loss=0.1658, simple_loss=0.2606, pruned_loss=0.03553, over 13343.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2739, pruned_loss=0.05113, over 2638718.44 frames. ], batch size: 40, lr: 7.32e-03, grad_scale: 32.0 +2024-08-03 20:21:02,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=220572.0, ans=0.125 +2024-08-03 20:21:10,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=220608.66666666666, ans=0.125 +2024-08-03 20:21:24,282 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.75 vs. limit=15.0 +2024-08-03 20:21:38,840 INFO [train.py:1114] (2/4) Epoch 17, batch 1950, loss[loss=0.1824, simple_loss=0.2765, pruned_loss=0.04409, over 13563.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2748, pruned_loss=0.05131, over 2645741.13 frames. ], batch size: 36, lr: 7.31e-03, grad_scale: 32.0 +2024-08-03 20:21:45,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=220718.66666666666, ans=0.125 +2024-08-03 20:21:53,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220755.33333333334, ans=0.1 +2024-08-03 20:22:00,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.52 vs. limit=15.0 +2024-08-03 20:22:10,356 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.292e+01 1.160e+02 1.302e+02 1.581e+02 2.993e+02, threshold=2.604e+02, percent-clipped=0.0 +2024-08-03 20:22:13,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=220828.66666666666, ans=0.125 +2024-08-03 20:22:25,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=220865.33333333334, ans=0.125 +2024-08-03 20:22:26,602 INFO [train.py:1114] (2/4) Epoch 17, batch 2000, loss[loss=0.1668, simple_loss=0.2429, pruned_loss=0.04534, over 13531.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2744, pruned_loss=0.05116, over 2635296.03 frames. ], batch size: 31, lr: 7.31e-03, grad_scale: 32.0 +2024-08-03 20:22:27,919 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.72 vs. limit=15.0 +2024-08-03 20:22:28,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220902.0, ans=0.1 +2024-08-03 20:22:38,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220938.66666666666, ans=0.1 +2024-08-03 20:22:41,420 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:22:58,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=221012.0, ans=0.2 +2024-08-03 20:23:02,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=221048.66666666666, ans=0.2 +2024-08-03 20:23:14,197 INFO [train.py:1114] (2/4) Epoch 17, batch 2050, loss[loss=0.1807, simple_loss=0.2581, pruned_loss=0.05168, over 13433.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.274, pruned_loss=0.05123, over 2632868.34 frames. ], batch size: 32, lr: 7.31e-03, grad_scale: 32.0 +2024-08-03 20:23:30,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=221122.0, ans=0.09899494936611666 +2024-08-03 20:23:45,004 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.513e+01 1.116e+02 1.230e+02 1.630e+02 2.618e+02, threshold=2.461e+02, percent-clipped=1.0 +2024-08-03 20:24:00,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=221268.66666666666, ans=0.125 +2024-08-03 20:24:01,282 INFO [train.py:1114] (2/4) Epoch 17, batch 2100, loss[loss=0.1818, simple_loss=0.2683, pruned_loss=0.04769, over 13529.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2733, pruned_loss=0.0509, over 2637517.47 frames. ], batch size: 37, lr: 7.31e-03, grad_scale: 32.0 +2024-08-03 20:24:16,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=221305.33333333334, ans=0.0 +2024-08-03 20:24:29,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=221378.66666666666, ans=0.09899494936611666 +2024-08-03 20:24:32,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=221378.66666666666, ans=0.0 +2024-08-03 20:24:35,656 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.32 vs. limit=15.0 +2024-08-03 20:24:43,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=221415.33333333334, ans=0.025 +2024-08-03 20:24:45,883 INFO [train.py:1114] (2/4) Epoch 17, batch 2150, loss[loss=0.1697, simple_loss=0.2546, pruned_loss=0.04241, over 13550.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2724, pruned_loss=0.05082, over 2646479.67 frames. ], batch size: 36, lr: 7.30e-03, grad_scale: 32.0 +2024-08-03 20:24:48,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=221452.0, ans=0.125 +2024-08-03 20:24:54,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=221488.66666666666, ans=0.125 +2024-08-03 20:25:16,727 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.666e+01 1.100e+02 1.262e+02 1.658e+02 2.819e+02, threshold=2.523e+02, percent-clipped=4.0 +2024-08-03 20:25:18,215 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.32 vs. limit=15.0 +2024-08-03 20:25:34,763 INFO [train.py:1114] (2/4) Epoch 17, batch 2200, loss[loss=0.2165, simple_loss=0.3063, pruned_loss=0.06339, over 13409.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2722, pruned_loss=0.05077, over 2644985.62 frames. ], batch size: 39, lr: 7.30e-03, grad_scale: 32.0 +2024-08-03 20:25:45,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=221672.0, ans=0.125 +2024-08-03 20:25:51,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221708.66666666666, ans=0.1 +2024-08-03 20:25:59,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=221708.66666666666, ans=0.0 +2024-08-03 20:26:16,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=221782.0, ans=0.125 +2024-08-03 20:26:20,147 INFO [train.py:1114] (2/4) Epoch 17, batch 2250, loss[loss=0.1626, simple_loss=0.253, pruned_loss=0.03616, over 13359.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.272, pruned_loss=0.05047, over 2642118.23 frames. ], batch size: 37, lr: 7.30e-03, grad_scale: 32.0 +2024-08-03 20:26:28,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221855.33333333334, ans=0.1 +2024-08-03 20:26:29,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=221855.33333333334, ans=0.125 +2024-08-03 20:26:36,040 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.26 vs. limit=22.5 +2024-08-03 20:26:48,987 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.784e+01 1.159e+02 1.389e+02 1.848e+02 3.074e+02, threshold=2.777e+02, percent-clipped=8.0 +2024-08-03 20:26:58,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=221965.33333333334, ans=0.0 +2024-08-03 20:26:58,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=221965.33333333334, ans=0.0 +2024-08-03 20:27:04,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=221965.33333333334, ans=0.125 +2024-08-03 20:27:09,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=222002.0, ans=0.125 +2024-08-03 20:27:10,465 INFO [train.py:1114] (2/4) Epoch 17, batch 2300, loss[loss=0.1817, simple_loss=0.2619, pruned_loss=0.05071, over 13554.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2712, pruned_loss=0.0502, over 2637923.09 frames. ], batch size: 33, lr: 7.29e-03, grad_scale: 32.0 +2024-08-03 20:27:12,892 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.25 vs. limit=15.0 +2024-08-03 20:27:13,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=222002.0, ans=0.125 +2024-08-03 20:27:32,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=222075.33333333334, ans=0.0 +2024-08-03 20:27:37,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=222112.0, ans=0.125 +2024-08-03 20:27:44,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=222112.0, ans=0.0 +2024-08-03 20:27:49,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=222148.66666666666, ans=0.125 +2024-08-03 20:27:50,184 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.80 vs. limit=15.0 +2024-08-03 20:27:55,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=222185.33333333334, ans=0.0 +2024-08-03 20:27:56,081 INFO [train.py:1114] (2/4) Epoch 17, batch 2350, loss[loss=0.174, simple_loss=0.269, pruned_loss=0.03943, over 13547.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2708, pruned_loss=0.04961, over 2640891.62 frames. ], batch size: 38, lr: 7.29e-03, grad_scale: 16.0 +2024-08-03 20:28:01,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=222185.33333333334, ans=0.2 +2024-08-03 20:28:08,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=222222.0, ans=0.07 +2024-08-03 20:28:09,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=222222.0, ans=0.125 +2024-08-03 20:28:25,883 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.384e+01 1.095e+02 1.277e+02 1.611e+02 2.837e+02, threshold=2.555e+02, percent-clipped=1.0 +2024-08-03 20:28:32,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=222332.0, ans=0.2 +2024-08-03 20:28:43,086 INFO [train.py:1114] (2/4) Epoch 17, batch 2400, loss[loss=0.1545, simple_loss=0.2414, pruned_loss=0.03384, over 13541.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2719, pruned_loss=0.05008, over 2642055.27 frames. ], batch size: 35, lr: 7.29e-03, grad_scale: 32.0 +2024-08-03 20:29:01,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=222405.33333333334, ans=0.04949747468305833 +2024-08-03 20:29:04,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=222442.0, ans=0.125 +2024-08-03 20:29:05,534 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.06 vs. limit=12.0 +2024-08-03 20:29:08,920 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.43 vs. limit=10.0 +2024-08-03 20:29:25,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=222515.33333333334, ans=0.035 +2024-08-03 20:29:30,153 INFO [train.py:1114] (2/4) Epoch 17, batch 2450, loss[loss=0.1867, simple_loss=0.2806, pruned_loss=0.0464, over 13355.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2734, pruned_loss=0.05081, over 2632385.42 frames. ], batch size: 37, lr: 7.28e-03, grad_scale: 32.0 +2024-08-03 20:29:34,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=222552.0, ans=0.0 +2024-08-03 20:29:38,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=222588.66666666666, ans=0.2 +2024-08-03 20:29:39,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=222588.66666666666, ans=0.125 +2024-08-03 20:29:45,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=222588.66666666666, ans=0.07 +2024-08-03 20:29:49,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=222625.33333333334, ans=0.2 +2024-08-03 20:29:59,843 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.386e+01 1.131e+02 1.264e+02 1.537e+02 2.363e+02, threshold=2.529e+02, percent-clipped=0.0 +2024-08-03 20:30:01,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=222662.0, ans=0.125 +2024-08-03 20:30:15,151 INFO [train.py:1114] (2/4) Epoch 17, batch 2500, loss[loss=0.2103, simple_loss=0.2981, pruned_loss=0.06127, over 13397.00 frames. ], tot_loss[loss=0.188, simple_loss=0.274, pruned_loss=0.051, over 2636926.74 frames. ], batch size: 39, lr: 7.28e-03, grad_scale: 32.0 +2024-08-03 20:30:34,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=222808.66666666666, ans=0.0 +2024-08-03 20:30:34,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=222808.66666666666, ans=0.125 +2024-08-03 20:30:52,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=222882.0, ans=0.0 +2024-08-03 20:31:00,460 INFO [train.py:1114] (2/4) Epoch 17, batch 2550, loss[loss=0.1547, simple_loss=0.2264, pruned_loss=0.04155, over 13526.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2734, pruned_loss=0.05086, over 2638450.68 frames. ], batch size: 31, lr: 7.28e-03, grad_scale: 32.0 +2024-08-03 20:31:07,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=222918.66666666666, ans=0.125 +2024-08-03 20:31:08,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=222918.66666666666, ans=0.0 +2024-08-03 20:31:14,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=222955.33333333334, ans=0.2 +2024-08-03 20:31:20,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=222992.0, ans=0.1 +2024-08-03 20:31:24,834 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.62 vs. limit=22.5 +2024-08-03 20:31:29,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=223028.66666666666, ans=0.0 +2024-08-03 20:31:31,224 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.999e+01 1.159e+02 1.553e+02 2.092e+02 3.686e+02, threshold=3.106e+02, percent-clipped=10.0 +2024-08-03 20:31:45,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=223102.0, ans=0.1 +2024-08-03 20:31:46,271 INFO [train.py:1114] (2/4) Epoch 17, batch 2600, loss[loss=0.1776, simple_loss=0.2677, pruned_loss=0.04375, over 13556.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2739, pruned_loss=0.051, over 2637722.11 frames. ], batch size: 36, lr: 7.28e-03, grad_scale: 32.0 +2024-08-03 20:31:50,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223102.0, ans=0.1 +2024-08-03 20:31:52,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=223102.0, ans=0.2 +2024-08-03 20:31:52,886 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.53 vs. limit=15.0 +2024-08-03 20:32:00,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=223138.66666666666, ans=0.125 +2024-08-03 20:32:01,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=223138.66666666666, ans=0.0 +2024-08-03 20:32:03,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=223175.33333333334, ans=0.0 +2024-08-03 20:32:05,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=223175.33333333334, ans=0.07 +2024-08-03 20:32:22,811 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.44 vs. limit=22.5 +2024-08-03 20:32:27,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=223248.66666666666, ans=0.125 +2024-08-03 20:32:28,963 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.49 vs. limit=15.0 +2024-08-03 20:32:29,420 INFO [train.py:1114] (2/4) Epoch 17, batch 2650, loss[loss=0.2187, simple_loss=0.2985, pruned_loss=0.06944, over 13319.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2744, pruned_loss=0.05122, over 2640060.26 frames. ], batch size: 46, lr: 7.27e-03, grad_scale: 16.0 +2024-08-03 20:32:29,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=223285.33333333334, ans=0.125 +2024-08-03 20:32:32,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=223285.33333333334, ans=0.125 +2024-08-03 20:32:43,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=223322.0, ans=0.2 +2024-08-03 20:32:47,112 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.55 vs. limit=15.0 +2024-08-03 20:32:58,626 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.512e+01 1.129e+02 1.471e+02 1.804e+02 3.189e+02, threshold=2.942e+02, percent-clipped=1.0 +2024-08-03 20:33:10,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=223432.0, ans=0.125 +2024-08-03 20:33:12,558 INFO [train.py:1114] (2/4) Epoch 17, batch 2700, loss[loss=0.21, simple_loss=0.2966, pruned_loss=0.06175, over 13541.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2744, pruned_loss=0.0511, over 2637368.96 frames. ], batch size: 40, lr: 7.27e-03, grad_scale: 16.0 +2024-08-03 20:33:12,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=223468.66666666666, ans=0.125 +2024-08-03 20:33:16,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=223468.66666666666, ans=0.125 +2024-08-03 20:33:31,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=223542.0, ans=0.035 +2024-08-03 20:33:32,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=223542.0, ans=0.0 +2024-08-03 20:33:34,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223542.0, ans=0.1 +2024-08-03 20:33:37,421 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.03 vs. limit=22.5 +2024-08-03 20:33:40,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223578.66666666666, ans=0.1 +2024-08-03 20:33:56,038 INFO [train.py:1114] (2/4) Epoch 17, batch 2750, loss[loss=0.1824, simple_loss=0.2613, pruned_loss=0.05169, over 13325.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2732, pruned_loss=0.0507, over 2635767.98 frames. ], batch size: 34, lr: 7.27e-03, grad_scale: 16.0 +2024-08-03 20:34:03,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=223652.0, ans=0.04949747468305833 +2024-08-03 20:34:04,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=223688.66666666666, ans=0.035 +2024-08-03 20:34:05,441 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.24 vs. limit=10.0 +2024-08-03 20:34:18,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=223725.33333333334, ans=0.125 +2024-08-03 20:34:26,195 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.377e+01 1.115e+02 1.294e+02 1.597e+02 2.305e+02, threshold=2.588e+02, percent-clipped=0.0 +2024-08-03 20:34:40,274 INFO [train.py:1114] (2/4) Epoch 17, batch 2800, loss[loss=0.2204, simple_loss=0.3007, pruned_loss=0.07011, over 9219.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2738, pruned_loss=0.05114, over 2627391.76 frames. ], batch size: 97, lr: 7.26e-03, grad_scale: 32.0 +2024-08-03 20:34:41,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=223835.33333333334, ans=0.2 +2024-08-03 20:34:46,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=223835.33333333334, ans=0.125 +2024-08-03 20:34:56,190 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.53 vs. limit=15.0 +2024-08-03 20:35:07,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=223945.33333333334, ans=0.025 +2024-08-03 20:35:25,233 INFO [train.py:1114] (2/4) Epoch 17, batch 2850, loss[loss=0.1822, simple_loss=0.2686, pruned_loss=0.04791, over 13357.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2744, pruned_loss=0.05169, over 2621092.69 frames. ], batch size: 35, lr: 7.26e-03, grad_scale: 32.0 +2024-08-03 20:35:26,281 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:35:41,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=224055.33333333334, ans=0.125 +2024-08-03 20:35:49,918 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.28 vs. limit=22.5 +2024-08-03 20:35:54,814 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.742e+01 1.205e+02 1.427e+02 1.924e+02 3.362e+02, threshold=2.855e+02, percent-clipped=10.0 +2024-08-03 20:35:57,046 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.88 vs. limit=15.0 +2024-08-03 20:35:57,140 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.14 vs. limit=10.0 +2024-08-03 20:36:07,134 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.80 vs. limit=15.0 +2024-08-03 20:36:10,086 INFO [train.py:1114] (2/4) Epoch 17, batch 2900, loss[loss=0.1682, simple_loss=0.2527, pruned_loss=0.0419, over 13363.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2744, pruned_loss=0.05125, over 2631723.65 frames. ], batch size: 36, lr: 7.26e-03, grad_scale: 32.0 +2024-08-03 20:36:53,523 INFO [train.py:1114] (2/4) Epoch 17, batch 2950, loss[loss=0.163, simple_loss=0.2434, pruned_loss=0.04128, over 13342.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2735, pruned_loss=0.05103, over 2630593.82 frames. ], batch size: 34, lr: 7.26e-03, grad_scale: 32.0 +2024-08-03 20:37:05,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=224422.0, ans=0.125 +2024-08-03 20:37:06,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=224422.0, ans=0.125 +2024-08-03 20:37:08,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=224422.0, ans=0.125 +2024-08-03 20:37:11,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=224458.66666666666, ans=0.2 +2024-08-03 20:37:19,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=224495.33333333334, ans=0.125 +2024-08-03 20:37:23,420 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.999e+01 1.109e+02 1.313e+02 1.570e+02 2.324e+02, threshold=2.625e+02, percent-clipped=1.0 +2024-08-03 20:37:23,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=224495.33333333334, ans=0.125 +2024-08-03 20:37:37,367 INFO [train.py:1114] (2/4) Epoch 17, batch 3000, loss[loss=0.1876, simple_loss=0.2743, pruned_loss=0.05051, over 13544.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2731, pruned_loss=0.05064, over 2631096.89 frames. ], batch size: 37, lr: 7.25e-03, grad_scale: 32.0 +2024-08-03 20:37:37,675 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 20:37:45,238 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.4983, 3.8256, 3.5775, 3.8801], device='cuda:2') +2024-08-03 20:37:47,663 INFO [train.py:1146] (2/4) Epoch 17, validation: loss=0.1723, simple_loss=0.2712, pruned_loss=0.03676, over 944034.00 frames. +2024-08-03 20:37:47,975 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 20:38:03,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=224605.33333333334, ans=0.0 +2024-08-03 20:38:12,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=224642.0, ans=0.2 +2024-08-03 20:38:15,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=224678.66666666666, ans=0.125 +2024-08-03 20:38:16,779 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.41 vs. limit=10.0 +2024-08-03 20:38:25,545 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.95 vs. limit=6.0 +2024-08-03 20:38:25,991 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:38:31,639 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.78 vs. limit=15.0 +2024-08-03 20:38:33,104 INFO [train.py:1114] (2/4) Epoch 17, batch 3050, loss[loss=0.1541, simple_loss=0.2461, pruned_loss=0.03103, over 13553.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2741, pruned_loss=0.05085, over 2627583.62 frames. ], batch size: 35, lr: 7.25e-03, grad_scale: 32.0 +2024-08-03 20:38:43,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=224788.66666666666, ans=0.025 +2024-08-03 20:38:46,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=224788.66666666666, ans=0.0 +2024-08-03 20:38:48,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=224788.66666666666, ans=0.125 +2024-08-03 20:38:51,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=224825.33333333334, ans=0.2 +2024-08-03 20:39:04,755 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.913e+01 1.103e+02 1.242e+02 1.449e+02 2.712e+02, threshold=2.483e+02, percent-clipped=1.0 +2024-08-03 20:39:10,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=224898.66666666666, ans=0.125 +2024-08-03 20:39:18,549 INFO [train.py:1114] (2/4) Epoch 17, batch 3100, loss[loss=0.2136, simple_loss=0.3045, pruned_loss=0.06134, over 13340.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2745, pruned_loss=0.05123, over 2627457.24 frames. ], batch size: 46, lr: 7.25e-03, grad_scale: 32.0 +2024-08-03 20:39:33,524 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.18 vs. limit=15.0 +2024-08-03 20:39:36,999 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.37 vs. limit=15.0 +2024-08-03 20:39:39,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=225008.66666666666, ans=0.05 +2024-08-03 20:40:01,364 INFO [train.py:1114] (2/4) Epoch 17, batch 3150, loss[loss=0.2041, simple_loss=0.2922, pruned_loss=0.05798, over 13038.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2744, pruned_loss=0.05114, over 2628879.21 frames. ], batch size: 48, lr: 7.24e-03, grad_scale: 32.0 +2024-08-03 20:40:02,845 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.21 vs. limit=12.0 +2024-08-03 20:40:17,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225155.33333333334, ans=0.1 +2024-08-03 20:40:30,637 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.291e+01 1.117e+02 1.300e+02 1.745e+02 2.777e+02, threshold=2.600e+02, percent-clipped=1.0 +2024-08-03 20:40:44,312 INFO [train.py:1114] (2/4) Epoch 17, batch 3200, loss[loss=0.1975, simple_loss=0.2829, pruned_loss=0.056, over 13562.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.273, pruned_loss=0.05026, over 2635157.50 frames. ], batch size: 37, lr: 7.24e-03, grad_scale: 32.0 +2024-08-03 20:40:53,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=225338.66666666666, ans=0.0 +2024-08-03 20:41:06,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=225375.33333333334, ans=0.125 +2024-08-03 20:41:10,583 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.24 vs. limit=15.0 +2024-08-03 20:41:14,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225412.0, ans=0.1 +2024-08-03 20:41:17,923 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.09 vs. limit=12.0 +2024-08-03 20:41:27,808 INFO [train.py:1114] (2/4) Epoch 17, batch 3250, loss[loss=0.2051, simple_loss=0.2902, pruned_loss=0.05998, over 13393.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2733, pruned_loss=0.05008, over 2639639.12 frames. ], batch size: 38, lr: 7.24e-03, grad_scale: 32.0 +2024-08-03 20:41:37,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=225522.0, ans=0.2 +2024-08-03 20:41:44,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=225558.66666666666, ans=0.125 +2024-08-03 20:41:46,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=225558.66666666666, ans=0.2 +2024-08-03 20:41:50,721 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.38 vs. limit=15.0 +2024-08-03 20:41:57,624 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.641e+01 1.154e+02 1.402e+02 1.667e+02 2.489e+02, threshold=2.804e+02, percent-clipped=0.0 +2024-08-03 20:42:11,279 INFO [train.py:1114] (2/4) Epoch 17, batch 3300, loss[loss=0.2142, simple_loss=0.2958, pruned_loss=0.06628, over 12875.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2721, pruned_loss=0.04965, over 2640438.44 frames. ], batch size: 52, lr: 7.23e-03, grad_scale: 32.0 +2024-08-03 20:42:30,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=225742.0, ans=0.2 +2024-08-03 20:42:53,697 INFO [train.py:1114] (2/4) Epoch 17, batch 3350, loss[loss=0.1919, simple_loss=0.2854, pruned_loss=0.0492, over 13016.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2743, pruned_loss=0.05095, over 2629403.42 frames. ], batch size: 48, lr: 7.23e-03, grad_scale: 32.0 +2024-08-03 20:42:55,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=225852.0, ans=0.125 +2024-08-03 20:43:10,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.95 vs. limit=6.0 +2024-08-03 20:43:14,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=225925.33333333334, ans=0.07 +2024-08-03 20:43:19,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=225962.0, ans=0.0 +2024-08-03 20:43:22,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=225962.0, ans=0.0 +2024-08-03 20:43:22,994 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.784e+01 1.128e+02 1.245e+02 1.447e+02 2.027e+02, threshold=2.490e+02, percent-clipped=0.0 +2024-08-03 20:43:36,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=226035.33333333334, ans=0.0 +2024-08-03 20:43:36,785 INFO [train.py:1114] (2/4) Epoch 17, batch 3400, loss[loss=0.1789, simple_loss=0.2575, pruned_loss=0.05013, over 13531.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2737, pruned_loss=0.0509, over 2625329.79 frames. ], batch size: 31, lr: 7.23e-03, grad_scale: 32.0 +2024-08-03 20:43:41,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=226035.33333333334, ans=0.2 +2024-08-03 20:43:42,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=226035.33333333334, ans=0.125 +2024-08-03 20:43:44,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=226072.0, ans=0.0 +2024-08-03 20:43:47,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=226072.0, ans=0.025 +2024-08-03 20:43:59,224 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.93 vs. limit=15.0 +2024-08-03 20:44:13,954 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.98 vs. limit=15.0 +2024-08-03 20:44:20,074 INFO [train.py:1114] (2/4) Epoch 17, batch 3450, loss[loss=0.2072, simple_loss=0.2909, pruned_loss=0.06177, over 12952.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2733, pruned_loss=0.0507, over 2628241.02 frames. ], batch size: 52, lr: 7.23e-03, grad_scale: 32.0 +2024-08-03 20:44:20,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.92 vs. limit=15.0 +2024-08-03 20:44:28,164 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.30 vs. limit=15.0 +2024-08-03 20:44:33,907 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.01 vs. limit=22.5 +2024-08-03 20:44:49,799 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.757e+01 1.151e+02 1.446e+02 1.763e+02 3.368e+02, threshold=2.892e+02, percent-clipped=3.0 +2024-08-03 20:44:56,414 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.64 vs. limit=15.0 +2024-08-03 20:44:59,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=226365.33333333334, ans=0.125 +2024-08-03 20:45:02,688 INFO [train.py:1114] (2/4) Epoch 17, batch 3500, loss[loss=0.167, simple_loss=0.2506, pruned_loss=0.04175, over 13547.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2727, pruned_loss=0.05103, over 2630358.67 frames. ], batch size: 34, lr: 7.22e-03, grad_scale: 16.0 +2024-08-03 20:45:03,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=226402.0, ans=0.125 +2024-08-03 20:45:27,388 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=5.455e-03 +2024-08-03 20:45:34,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=226512.0, ans=0.025 +2024-08-03 20:45:45,725 INFO [train.py:1114] (2/4) Epoch 17, batch 3550, loss[loss=0.2121, simple_loss=0.2966, pruned_loss=0.06373, over 12632.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2752, pruned_loss=0.05203, over 2628970.99 frames. ], batch size: 58, lr: 7.22e-03, grad_scale: 16.0 +2024-08-03 20:45:47,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=226585.33333333334, ans=0.125 +2024-08-03 20:45:48,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=226585.33333333334, ans=0.125 +2024-08-03 20:45:57,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=226622.0, ans=0.2 +2024-08-03 20:45:59,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=226622.0, ans=0.0 +2024-08-03 20:46:04,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226658.66666666666, ans=0.1 +2024-08-03 20:46:10,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=226658.66666666666, ans=0.125 +2024-08-03 20:46:11,219 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=8.45 vs. limit=12.0 +2024-08-03 20:46:12,977 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.78 vs. limit=15.0 +2024-08-03 20:46:15,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=226695.33333333334, ans=0.025 +2024-08-03 20:46:16,484 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:46:17,977 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.551e+01 1.203e+02 1.370e+02 1.580e+02 2.866e+02, threshold=2.739e+02, percent-clipped=0.0 +2024-08-03 20:46:18,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=226695.33333333334, ans=0.125 +2024-08-03 20:46:20,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=226695.33333333334, ans=0.1 +2024-08-03 20:46:25,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=226732.0, ans=0.125 +2024-08-03 20:46:31,220 INFO [train.py:1114] (2/4) Epoch 17, batch 3600, loss[loss=0.2289, simple_loss=0.3016, pruned_loss=0.07807, over 8965.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2789, pruned_loss=0.05535, over 2487755.95 frames. ], batch size: 96, lr: 7.22e-03, grad_scale: 32.0 +2024-08-03 20:46:41,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226805.33333333334, ans=0.1 +2024-08-03 20:46:49,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=226842.0, ans=0.0 +2024-08-03 20:47:03,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=226878.66666666666, ans=0.0 +2024-08-03 20:47:48,370 INFO [train.py:1114] (2/4) Epoch 18, batch 0, loss[loss=0.1676, simple_loss=0.25, pruned_loss=0.04262, over 13355.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.25, pruned_loss=0.04262, over 13355.00 frames. ], batch size: 33, lr: 7.01e-03, grad_scale: 32.0 +2024-08-03 20:47:48,370 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 20:47:53,839 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.2561, 2.6675, 2.4798, 2.4570], device='cuda:2') +2024-08-03 20:47:58,988 INFO [train.py:1146] (2/4) Epoch 18, validation: loss=0.1737, simple_loss=0.274, pruned_loss=0.03673, over 944034.00 frames. +2024-08-03 20:47:58,989 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 20:48:02,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=226915.33333333334, ans=0.125 +2024-08-03 20:48:18,671 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.42 vs. limit=15.0 +2024-08-03 20:48:21,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=226988.66666666666, ans=0.0 +2024-08-03 20:48:39,068 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.16 vs. limit=12.0 +2024-08-03 20:48:40,349 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.175e+01 1.169e+02 1.268e+02 1.393e+02 2.818e+02, threshold=2.535e+02, percent-clipped=2.0 +2024-08-03 20:48:41,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=227062.0, ans=0.0 +2024-08-03 20:48:41,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=227062.0, ans=0.125 +2024-08-03 20:48:43,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=227062.0, ans=0.1 +2024-08-03 20:48:44,900 INFO [train.py:1114] (2/4) Epoch 18, batch 50, loss[loss=0.1576, simple_loss=0.2395, pruned_loss=0.03783, over 13404.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2727, pruned_loss=0.04992, over 577805.09 frames. ], batch size: 32, lr: 7.01e-03, grad_scale: 32.0 +2024-08-03 20:48:51,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=227098.66666666666, ans=0.0 +2024-08-03 20:49:13,856 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.42 vs. limit=12.0 +2024-08-03 20:49:31,409 INFO [train.py:1114] (2/4) Epoch 18, batch 100, loss[loss=0.1886, simple_loss=0.2716, pruned_loss=0.05273, over 13524.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2746, pruned_loss=0.05012, over 1025290.81 frames. ], batch size: 35, lr: 7.00e-03, grad_scale: 32.0 +2024-08-03 20:49:55,875 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.71 vs. limit=15.0 +2024-08-03 20:49:56,380 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:50:04,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=227392.0, ans=0.125 +2024-08-03 20:50:05,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=227392.0, ans=0.125 +2024-08-03 20:50:08,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=227392.0, ans=0.2 +2024-08-03 20:50:11,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227428.66666666666, ans=0.1 +2024-08-03 20:50:16,428 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.987e+01 1.102e+02 1.290e+02 1.676e+02 3.343e+02, threshold=2.579e+02, percent-clipped=6.0 +2024-08-03 20:50:18,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=227428.66666666666, ans=0.0 +2024-08-03 20:50:20,794 INFO [train.py:1114] (2/4) Epoch 18, batch 150, loss[loss=0.1729, simple_loss=0.2515, pruned_loss=0.04711, over 13429.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2714, pruned_loss=0.04871, over 1386664.77 frames. ], batch size: 32, lr: 7.00e-03, grad_scale: 32.0 +2024-08-03 20:50:26,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227465.33333333334, ans=0.1 +2024-08-03 20:50:31,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=227502.0, ans=0.0 +2024-08-03 20:50:34,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=227502.0, ans=0.125 +2024-08-03 20:50:41,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=227538.66666666666, ans=0.0 +2024-08-03 20:50:41,936 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.71 vs. limit=15.0 +2024-08-03 20:50:42,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=227538.66666666666, ans=0.2 +2024-08-03 20:50:45,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=227538.66666666666, ans=0.1 +2024-08-03 20:51:01,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=227612.0, ans=10.0 +2024-08-03 20:51:02,036 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.19 vs. limit=22.5 +2024-08-03 20:51:07,693 INFO [train.py:1114] (2/4) Epoch 18, batch 200, loss[loss=0.1878, simple_loss=0.2827, pruned_loss=0.0465, over 12507.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2708, pruned_loss=0.04826, over 1665582.35 frames. ], batch size: 58, lr: 7.00e-03, grad_scale: 32.0 +2024-08-03 20:51:14,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=227648.66666666666, ans=0.125 +2024-08-03 20:51:18,014 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.75 vs. limit=15.0 +2024-08-03 20:51:32,481 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.42 vs. limit=22.5 +2024-08-03 20:51:38,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=227758.66666666666, ans=0.125 +2024-08-03 20:51:38,795 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.53 vs. limit=15.0 +2024-08-03 20:51:41,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=227758.66666666666, ans=0.0 +2024-08-03 20:51:48,063 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.522e+01 1.097e+02 1.244e+02 1.547e+02 2.709e+02, threshold=2.488e+02, percent-clipped=2.0 +2024-08-03 20:51:52,896 INFO [train.py:1114] (2/4) Epoch 18, batch 250, loss[loss=0.1982, simple_loss=0.2771, pruned_loss=0.05963, over 13275.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2711, pruned_loss=0.04853, over 1884519.29 frames. ], batch size: 46, lr: 7.00e-03, grad_scale: 32.0 +2024-08-03 20:52:00,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=227832.0, ans=0.0 +2024-08-03 20:52:04,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=227868.66666666666, ans=0.0 +2024-08-03 20:52:24,050 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:52:24,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=227942.0, ans=0.125 +2024-08-03 20:52:35,902 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:52:38,230 INFO [train.py:1114] (2/4) Epoch 18, batch 300, loss[loss=0.1984, simple_loss=0.2878, pruned_loss=0.05448, over 13465.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2712, pruned_loss=0.04917, over 2051888.14 frames. ], batch size: 42, lr: 6.99e-03, grad_scale: 32.0 +2024-08-03 20:52:43,340 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.73 vs. limit=10.0 +2024-08-03 20:52:46,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228052.0, ans=0.1 +2024-08-03 20:52:47,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=228052.0, ans=0.125 +2024-08-03 20:52:56,783 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:53:01,562 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.13 vs. limit=15.0 +2024-08-03 20:53:22,562 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.234e+01 1.098e+02 1.173e+02 1.552e+02 2.222e+02, threshold=2.347e+02, percent-clipped=0.0 +2024-08-03 20:53:23,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=228162.0, ans=0.125 +2024-08-03 20:53:27,234 INFO [train.py:1114] (2/4) Epoch 18, batch 350, loss[loss=0.1429, simple_loss=0.2256, pruned_loss=0.03015, over 13587.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2714, pruned_loss=0.0491, over 2182395.36 frames. ], batch size: 33, lr: 6.99e-03, grad_scale: 32.0 +2024-08-03 20:53:53,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=228272.0, ans=0.0 +2024-08-03 20:54:01,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=228308.66666666666, ans=0.2 +2024-08-03 20:54:11,803 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.33 vs. limit=22.5 +2024-08-03 20:54:16,768 INFO [train.py:1114] (2/4) Epoch 18, batch 400, loss[loss=0.1977, simple_loss=0.2821, pruned_loss=0.0567, over 13354.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2714, pruned_loss=0.04954, over 2285857.58 frames. ], batch size: 37, lr: 6.99e-03, grad_scale: 32.0 +2024-08-03 20:54:20,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=228382.0, ans=0.125 +2024-08-03 20:54:48,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=228492.0, ans=0.125 +2024-08-03 20:54:53,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=228528.66666666666, ans=0.0 +2024-08-03 20:54:57,266 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.985e+01 1.132e+02 1.280e+02 1.629e+02 3.189e+02, threshold=2.560e+02, percent-clipped=4.0 +2024-08-03 20:55:01,715 INFO [train.py:1114] (2/4) Epoch 18, batch 450, loss[loss=0.1969, simple_loss=0.2886, pruned_loss=0.05257, over 13565.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2719, pruned_loss=0.04983, over 2359048.53 frames. ], batch size: 38, lr: 6.98e-03, grad_scale: 32.0 +2024-08-03 20:55:06,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=228565.33333333334, ans=0.0 +2024-08-03 20:55:11,398 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.65 vs. limit=15.0 +2024-08-03 20:55:15,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228602.0, ans=0.1 +2024-08-03 20:55:16,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=228602.0, ans=0.125 +2024-08-03 20:55:30,808 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.07 vs. limit=15.0 +2024-08-03 20:55:40,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=228712.0, ans=0.125 +2024-08-03 20:55:46,660 INFO [train.py:1114] (2/4) Epoch 18, batch 500, loss[loss=0.2184, simple_loss=0.2997, pruned_loss=0.06855, over 13444.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2707, pruned_loss=0.04921, over 2425366.60 frames. ], batch size: 43, lr: 6.98e-03, grad_scale: 32.0 +2024-08-03 20:55:52,513 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:55:55,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=228785.33333333334, ans=0.125 +2024-08-03 20:55:58,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=228785.33333333334, ans=0.0 +2024-08-03 20:56:02,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=228785.33333333334, ans=0.125 +2024-08-03 20:56:05,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=228822.0, ans=0.125 +2024-08-03 20:56:07,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=228822.0, ans=0.125 +2024-08-03 20:56:27,949 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.312e+01 1.074e+02 1.245e+02 1.559e+02 2.675e+02, threshold=2.490e+02, percent-clipped=1.0 +2024-08-03 20:56:32,411 INFO [train.py:1114] (2/4) Epoch 18, batch 550, loss[loss=0.1959, simple_loss=0.2922, pruned_loss=0.04978, over 13079.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.271, pruned_loss=0.04905, over 2467482.85 frames. ], batch size: 48, lr: 6.98e-03, grad_scale: 32.0 +2024-08-03 20:56:34,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=228932.0, ans=0.0 +2024-08-03 20:56:36,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=228932.0, ans=0.2 +2024-08-03 20:56:52,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=228968.66666666666, ans=0.125 +2024-08-03 20:56:55,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=229005.33333333334, ans=0.125 +2024-08-03 20:57:17,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=229078.66666666666, ans=0.125 +2024-08-03 20:57:21,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=229078.66666666666, ans=0.0 +2024-08-03 20:57:23,270 INFO [train.py:1114] (2/4) Epoch 18, batch 600, loss[loss=0.1921, simple_loss=0.2818, pruned_loss=0.05119, over 13359.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2716, pruned_loss=0.04928, over 2507533.45 frames. ], batch size: 46, lr: 6.98e-03, grad_scale: 16.0 +2024-08-03 20:57:33,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=229152.0, ans=0.125 +2024-08-03 20:57:42,862 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.00 vs. limit=15.0 +2024-08-03 20:57:51,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229225.33333333334, ans=0.1 +2024-08-03 20:58:04,572 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.66 vs. limit=10.0 +2024-08-03 20:58:05,828 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.952e+01 1.114e+02 1.293e+02 1.855e+02 3.099e+02, threshold=2.585e+02, percent-clipped=2.0 +2024-08-03 20:58:07,237 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.78 vs. limit=12.0 +2024-08-03 20:58:09,361 INFO [train.py:1114] (2/4) Epoch 18, batch 650, loss[loss=0.1867, simple_loss=0.2805, pruned_loss=0.0465, over 13539.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2708, pruned_loss=0.04887, over 2543064.35 frames. ], batch size: 37, lr: 6.97e-03, grad_scale: 16.0 +2024-08-03 20:58:22,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=229335.33333333334, ans=0.2 +2024-08-03 20:58:26,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=229372.0, ans=0.0 +2024-08-03 20:58:38,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=229408.66666666666, ans=0.125 +2024-08-03 20:58:42,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=229408.66666666666, ans=0.05 +2024-08-03 20:58:43,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=229408.66666666666, ans=0.125 +2024-08-03 20:58:54,986 INFO [train.py:1114] (2/4) Epoch 18, batch 700, loss[loss=0.1723, simple_loss=0.2613, pruned_loss=0.04166, over 13516.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.271, pruned_loss=0.04867, over 2565141.78 frames. ], batch size: 35, lr: 6.97e-03, grad_scale: 16.0 +2024-08-03 20:58:59,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=229482.0, ans=0.125 +2024-08-03 20:59:15,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=229555.33333333334, ans=0.125 +2024-08-03 20:59:24,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=229555.33333333334, ans=0.04949747468305833 +2024-08-03 20:59:37,674 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.59 vs. limit=15.0 +2024-08-03 20:59:37,708 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.27 vs. limit=6.0 +2024-08-03 20:59:41,523 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.312e+01 1.096e+02 1.191e+02 1.436e+02 2.621e+02, threshold=2.382e+02, percent-clipped=1.0 +2024-08-03 20:59:42,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=229628.66666666666, ans=0.125 +2024-08-03 20:59:45,181 INFO [train.py:1114] (2/4) Epoch 18, batch 750, loss[loss=0.1841, simple_loss=0.2741, pruned_loss=0.04703, over 13368.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.27, pruned_loss=0.04824, over 2582762.15 frames. ], batch size: 37, lr: 6.97e-03, grad_scale: 16.0 +2024-08-03 20:59:47,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=229665.33333333334, ans=0.2 +2024-08-03 20:59:53,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=229702.0, ans=0.0 +2024-08-03 20:59:58,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=229702.0, ans=0.015 +2024-08-03 21:00:10,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=229738.66666666666, ans=0.0 +2024-08-03 21:00:12,944 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.74 vs. limit=15.0 +2024-08-03 21:00:13,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=229775.33333333334, ans=0.125 +2024-08-03 21:00:17,200 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.94 vs. limit=15.0 +2024-08-03 21:00:19,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=229775.33333333334, ans=0.125 +2024-08-03 21:00:25,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=229812.0, ans=0.125 +2024-08-03 21:00:25,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=229812.0, ans=0.125 +2024-08-03 21:00:28,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=229812.0, ans=0.125 +2024-08-03 21:00:30,240 INFO [train.py:1114] (2/4) Epoch 18, batch 800, loss[loss=0.1608, simple_loss=0.2476, pruned_loss=0.03705, over 13318.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2697, pruned_loss=0.04811, over 2596363.56 frames. ], batch size: 33, lr: 6.96e-03, grad_scale: 32.0 +2024-08-03 21:00:36,966 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.77 vs. limit=10.0 +2024-08-03 21:00:54,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229885.33333333334, ans=0.1 +2024-08-03 21:00:57,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=229885.33333333334, ans=0.125 +2024-08-03 21:01:14,860 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.87 vs. limit=6.0 +2024-08-03 21:01:24,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=229995.33333333334, ans=0.0 +2024-08-03 21:01:25,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=229995.33333333334, ans=0.0 +2024-08-03 21:01:28,070 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.223e+01 1.040e+02 1.288e+02 1.609e+02 2.437e+02, threshold=2.577e+02, percent-clipped=2.0 +2024-08-03 21:01:30,767 INFO [train.py:1114] (2/4) Epoch 18, batch 850, loss[loss=0.1868, simple_loss=0.2903, pruned_loss=0.04164, over 13333.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.27, pruned_loss=0.04826, over 2609443.81 frames. ], batch size: 40, lr: 6.96e-03, grad_scale: 16.0 +2024-08-03 21:01:34,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=230032.0, ans=0.1 +2024-08-03 21:01:34,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=230032.0, ans=0.0 +2024-08-03 21:02:00,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=230142.0, ans=0.0 +2024-08-03 21:02:10,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=230178.66666666666, ans=0.125 +2024-08-03 21:02:15,569 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.76 vs. limit=22.5 +2024-08-03 21:02:15,876 INFO [train.py:1114] (2/4) Epoch 18, batch 900, loss[loss=0.1758, simple_loss=0.2556, pruned_loss=0.04795, over 13350.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2701, pruned_loss=0.04843, over 2612083.89 frames. ], batch size: 33, lr: 6.96e-03, grad_scale: 16.0 +2024-08-03 21:02:18,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=230215.33333333334, ans=0.125 +2024-08-03 21:02:38,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=230288.66666666666, ans=0.025 +2024-08-03 21:02:49,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=230325.33333333334, ans=0.0 +2024-08-03 21:02:55,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=230362.0, ans=0.125 +2024-08-03 21:02:59,119 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.753e+01 1.118e+02 1.313e+02 1.560e+02 2.225e+02, threshold=2.625e+02, percent-clipped=0.0 +2024-08-03 21:03:02,042 INFO [train.py:1114] (2/4) Epoch 18, batch 950, loss[loss=0.1436, simple_loss=0.2318, pruned_loss=0.02768, over 13533.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2702, pruned_loss=0.04856, over 2613507.39 frames. ], batch size: 34, lr: 6.96e-03, grad_scale: 16.0 +2024-08-03 21:03:06,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230398.66666666666, ans=0.1 +2024-08-03 21:03:10,640 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.81 vs. limit=10.0 +2024-08-03 21:03:26,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=230472.0, ans=0.2 +2024-08-03 21:03:27,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230472.0, ans=0.1 +2024-08-03 21:03:32,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=230508.66666666666, ans=0.1 +2024-08-03 21:03:32,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.37 vs. limit=10.0 +2024-08-03 21:03:38,174 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.53 vs. limit=10.0 +2024-08-03 21:03:40,910 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.11 vs. limit=22.5 +2024-08-03 21:03:45,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=230545.33333333334, ans=0.1 +2024-08-03 21:03:48,151 INFO [train.py:1114] (2/4) Epoch 18, batch 1000, loss[loss=0.1605, simple_loss=0.2487, pruned_loss=0.03618, over 13367.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2709, pruned_loss=0.04897, over 2612014.62 frames. ], batch size: 35, lr: 6.95e-03, grad_scale: 16.0 +2024-08-03 21:03:52,962 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:03:55,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=230582.0, ans=0.125 +2024-08-03 21:04:04,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=230618.66666666666, ans=0.0 +2024-08-03 21:04:04,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230618.66666666666, ans=0.1 +2024-08-03 21:04:18,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=230692.0, ans=0.125 +2024-08-03 21:04:20,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=230692.0, ans=0.0 +2024-08-03 21:04:34,934 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.346e+01 1.082e+02 1.211e+02 1.465e+02 2.308e+02, threshold=2.421e+02, percent-clipped=0.0 +2024-08-03 21:04:39,859 INFO [train.py:1114] (2/4) Epoch 18, batch 1050, loss[loss=0.1921, simple_loss=0.2823, pruned_loss=0.0509, over 13566.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2703, pruned_loss=0.04868, over 2615566.45 frames. ], batch size: 39, lr: 6.95e-03, grad_scale: 16.0 +2024-08-03 21:04:42,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=230765.33333333334, ans=0.5 +2024-08-03 21:04:59,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=230838.66666666666, ans=0.125 +2024-08-03 21:05:05,328 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.82 vs. limit=15.0 +2024-08-03 21:05:08,131 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.42 vs. limit=22.5 +2024-08-03 21:05:17,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=230912.0, ans=0.125 +2024-08-03 21:05:21,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=230912.0, ans=0.0 +2024-08-03 21:05:26,816 INFO [train.py:1114] (2/4) Epoch 18, batch 1100, loss[loss=0.1891, simple_loss=0.2748, pruned_loss=0.05169, over 13562.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2704, pruned_loss=0.04855, over 2619248.42 frames. ], batch size: 36, lr: 6.95e-03, grad_scale: 16.0 +2024-08-03 21:05:31,490 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:05:35,532 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.59 vs. limit=10.0 +2024-08-03 21:05:53,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=231058.66666666666, ans=0.2 +2024-08-03 21:05:56,589 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.41 vs. limit=15.0 +2024-08-03 21:06:09,958 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.017e+01 1.089e+02 1.225e+02 1.560e+02 2.576e+02, threshold=2.450e+02, percent-clipped=1.0 +2024-08-03 21:06:10,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=231095.33333333334, ans=0.125 +2024-08-03 21:06:12,313 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.64 vs. limit=15.0 +2024-08-03 21:06:12,618 INFO [train.py:1114] (2/4) Epoch 18, batch 1150, loss[loss=0.1983, simple_loss=0.2783, pruned_loss=0.05909, over 13543.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2698, pruned_loss=0.04851, over 2618831.07 frames. ], batch size: 36, lr: 6.95e-03, grad_scale: 16.0 +2024-08-03 21:06:15,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=231132.0, ans=0.125 +2024-08-03 21:06:18,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=231132.0, ans=0.025 +2024-08-03 21:06:24,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=231168.66666666666, ans=0.0 +2024-08-03 21:06:25,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=231168.66666666666, ans=0.0 +2024-08-03 21:06:58,353 INFO [train.py:1114] (2/4) Epoch 18, batch 1200, loss[loss=0.1826, simple_loss=0.2723, pruned_loss=0.04644, over 13585.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2709, pruned_loss=0.04891, over 2616090.12 frames. ], batch size: 39, lr: 6.94e-03, grad_scale: 32.0 +2024-08-03 21:07:09,670 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=7.74 vs. limit=15.0 +2024-08-03 21:07:17,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=231388.66666666666, ans=0.125 +2024-08-03 21:07:18,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231388.66666666666, ans=0.1 +2024-08-03 21:07:28,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231425.33333333334, ans=0.1 +2024-08-03 21:07:28,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=231425.33333333334, ans=0.125 +2024-08-03 21:07:29,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=231425.33333333334, ans=0.0 +2024-08-03 21:07:40,620 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.911e+01 1.076e+02 1.246e+02 1.591e+02 2.283e+02, threshold=2.493e+02, percent-clipped=0.0 +2024-08-03 21:07:43,363 INFO [train.py:1114] (2/4) Epoch 18, batch 1250, loss[loss=0.1948, simple_loss=0.2899, pruned_loss=0.04982, over 13458.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2711, pruned_loss=0.04883, over 2627466.31 frames. ], batch size: 42, lr: 6.94e-03, grad_scale: 32.0 +2024-08-03 21:07:53,859 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.48 vs. limit=12.0 +2024-08-03 21:07:58,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231535.33333333334, ans=0.1 +2024-08-03 21:08:26,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=231645.33333333334, ans=0.125 +2024-08-03 21:08:27,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=231645.33333333334, ans=0.0 +2024-08-03 21:08:36,235 INFO [train.py:1114] (2/4) Epoch 18, batch 1300, loss[loss=0.2082, simple_loss=0.2956, pruned_loss=0.06038, over 12959.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2702, pruned_loss=0.04859, over 2630899.00 frames. ], batch size: 52, lr: 6.94e-03, grad_scale: 32.0 +2024-08-03 21:08:36,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=231682.0, ans=0.125 +2024-08-03 21:08:42,767 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.79 vs. limit=15.0 +2024-08-03 21:08:48,423 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.13 vs. limit=15.0 +2024-08-03 21:08:49,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=231718.66666666666, ans=0.2 +2024-08-03 21:08:50,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=231718.66666666666, ans=0.125 +2024-08-03 21:09:01,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=231755.33333333334, ans=0.1 +2024-08-03 21:09:15,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=231828.66666666666, ans=0.0 +2024-08-03 21:09:19,122 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.689e+01 1.086e+02 1.232e+02 1.465e+02 2.299e+02, threshold=2.463e+02, percent-clipped=0.0 +2024-08-03 21:09:21,777 INFO [train.py:1114] (2/4) Epoch 18, batch 1350, loss[loss=0.1731, simple_loss=0.2681, pruned_loss=0.0391, over 13541.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2699, pruned_loss=0.04851, over 2639453.31 frames. ], batch size: 37, lr: 6.93e-03, grad_scale: 32.0 +2024-08-03 21:09:39,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=231938.66666666666, ans=0.07 +2024-08-03 21:09:47,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231938.66666666666, ans=0.1 +2024-08-03 21:10:03,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=232012.0, ans=0.025 +2024-08-03 21:10:07,018 INFO [train.py:1114] (2/4) Epoch 18, batch 1400, loss[loss=0.1574, simple_loss=0.2353, pruned_loss=0.03974, over 13250.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2698, pruned_loss=0.04844, over 2643225.05 frames. ], batch size: 31, lr: 6.93e-03, grad_scale: 32.0 +2024-08-03 21:10:07,625 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.71 vs. limit=6.0 +2024-08-03 21:10:10,818 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:10:34,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=232158.66666666666, ans=0.0 +2024-08-03 21:10:35,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=232158.66666666666, ans=0.5 +2024-08-03 21:10:49,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=232195.33333333334, ans=0.125 +2024-08-03 21:10:49,665 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.779e+01 1.159e+02 1.340e+02 1.703e+02 2.344e+02, threshold=2.679e+02, percent-clipped=0.0 +2024-08-03 21:10:52,347 INFO [train.py:1114] (2/4) Epoch 18, batch 1450, loss[loss=0.2015, simple_loss=0.2925, pruned_loss=0.05524, over 13412.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2701, pruned_loss=0.04845, over 2642315.49 frames. ], batch size: 43, lr: 6.93e-03, grad_scale: 32.0 +2024-08-03 21:10:58,262 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.48 vs. limit=15.0 +2024-08-03 21:11:04,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=232268.66666666666, ans=0.0 +2024-08-03 21:11:11,819 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.09 vs. limit=12.0 +2024-08-03 21:11:12,740 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.70 vs. limit=22.5 +2024-08-03 21:11:26,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=232342.0, ans=0.125 +2024-08-03 21:11:27,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=232342.0, ans=0.0 +2024-08-03 21:11:32,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=232378.66666666666, ans=0.1 +2024-08-03 21:11:35,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=232378.66666666666, ans=0.0 +2024-08-03 21:11:40,682 INFO [train.py:1114] (2/4) Epoch 18, batch 1500, loss[loss=0.1694, simple_loss=0.2681, pruned_loss=0.03541, over 13392.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2706, pruned_loss=0.04832, over 2641843.92 frames. ], batch size: 39, lr: 6.93e-03, grad_scale: 16.0 +2024-08-03 21:11:40,904 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:11:41,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=232415.33333333334, ans=0.1 +2024-08-03 21:11:48,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=232415.33333333334, ans=0.5 +2024-08-03 21:11:50,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=232415.33333333334, ans=0.2 +2024-08-03 21:11:51,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=232452.0, ans=0.0 +2024-08-03 21:11:56,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=232452.0, ans=0.0 +2024-08-03 21:12:16,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=232525.33333333334, ans=0.125 +2024-08-03 21:12:28,167 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.948e+01 1.098e+02 1.405e+02 1.700e+02 3.079e+02, threshold=2.810e+02, percent-clipped=1.0 +2024-08-03 21:12:29,962 INFO [train.py:1114] (2/4) Epoch 18, batch 1550, loss[loss=0.1903, simple_loss=0.2856, pruned_loss=0.04753, over 13399.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2708, pruned_loss=0.04839, over 2632143.45 frames. ], batch size: 41, lr: 6.92e-03, grad_scale: 16.0 +2024-08-03 21:12:33,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=232598.66666666666, ans=0.1 +2024-08-03 21:12:36,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232598.66666666666, ans=0.1 +2024-08-03 21:12:43,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=232635.33333333334, ans=0.2 +2024-08-03 21:13:06,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=232745.33333333334, ans=0.0 +2024-08-03 21:13:07,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=232745.33333333334, ans=0.0 +2024-08-03 21:13:10,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=232745.33333333334, ans=0.125 +2024-08-03 21:13:15,287 INFO [train.py:1114] (2/4) Epoch 18, batch 1600, loss[loss=0.2068, simple_loss=0.2988, pruned_loss=0.05737, over 13575.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2714, pruned_loss=0.04911, over 2624127.84 frames. ], batch size: 39, lr: 6.92e-03, grad_scale: 32.0 +2024-08-03 21:13:18,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=232782.0, ans=0.125 +2024-08-03 21:13:29,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=232818.66666666666, ans=0.1 +2024-08-03 21:13:40,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=232855.33333333334, ans=0.125 +2024-08-03 21:13:47,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=232892.0, ans=0.07 +2024-08-03 21:14:01,387 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.392e+01 1.109e+02 1.247e+02 1.559e+02 3.003e+02, threshold=2.495e+02, percent-clipped=1.0 +2024-08-03 21:14:03,113 INFO [train.py:1114] (2/4) Epoch 18, batch 1650, loss[loss=0.178, simple_loss=0.2747, pruned_loss=0.04062, over 13333.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2711, pruned_loss=0.04925, over 2620625.84 frames. ], batch size: 40, lr: 6.92e-03, grad_scale: 32.0 +2024-08-03 21:14:07,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=232965.33333333334, ans=0.125 +2024-08-03 21:14:25,166 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.75 vs. limit=22.5 +2024-08-03 21:14:33,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=233075.33333333334, ans=0.125 +2024-08-03 21:14:37,281 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.02 vs. limit=22.5 +2024-08-03 21:14:39,009 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.54 vs. limit=15.0 +2024-08-03 21:14:39,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=233112.0, ans=0.125 +2024-08-03 21:14:43,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=233112.0, ans=0.125 +2024-08-03 21:14:48,609 INFO [train.py:1114] (2/4) Epoch 18, batch 1700, loss[loss=0.1459, simple_loss=0.2318, pruned_loss=0.03004, over 13247.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2715, pruned_loss=0.04894, over 2629822.20 frames. ], batch size: 31, lr: 6.92e-03, grad_scale: 32.0 +2024-08-03 21:15:10,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff2.min_abs, batch_count=233222.0, ans=0.1 +2024-08-03 21:15:12,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=233222.0, ans=0.2 +2024-08-03 21:15:15,261 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=1.098e-02 +2024-08-03 21:15:17,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=233222.0, ans=0.125 +2024-08-03 21:15:29,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=233258.66666666666, ans=0.0 +2024-08-03 21:15:33,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233295.33333333334, ans=0.1 +2024-08-03 21:15:41,602 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.564e+01 1.087e+02 1.250e+02 1.627e+02 3.379e+02, threshold=2.500e+02, percent-clipped=6.0 +2024-08-03 21:15:42,574 INFO [train.py:1114] (2/4) Epoch 18, batch 1750, loss[loss=0.1717, simple_loss=0.2554, pruned_loss=0.04398, over 13529.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2706, pruned_loss=0.04868, over 2632668.91 frames. ], batch size: 31, lr: 6.91e-03, grad_scale: 16.0 +2024-08-03 21:15:46,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=233332.0, ans=0.0 +2024-08-03 21:15:47,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=233332.0, ans=0.025 +2024-08-03 21:15:50,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=233332.0, ans=0.2 +2024-08-03 21:16:03,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=233405.33333333334, ans=0.125 +2024-08-03 21:16:19,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=233478.66666666666, ans=0.1 +2024-08-03 21:16:27,998 INFO [train.py:1114] (2/4) Epoch 18, batch 1800, loss[loss=0.1944, simple_loss=0.2809, pruned_loss=0.054, over 13562.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2713, pruned_loss=0.04892, over 2633837.95 frames. ], batch size: 38, lr: 6.91e-03, grad_scale: 16.0 +2024-08-03 21:16:38,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=233552.0, ans=0.125 +2024-08-03 21:16:44,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=233552.0, ans=0.0 +2024-08-03 21:16:46,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=233588.66666666666, ans=0.1 +2024-08-03 21:16:48,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233588.66666666666, ans=0.1 +2024-08-03 21:16:48,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=233588.66666666666, ans=0.1 +2024-08-03 21:16:49,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=233588.66666666666, ans=0.125 +2024-08-03 21:16:54,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=233625.33333333334, ans=0.125 +2024-08-03 21:16:58,792 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.95 vs. limit=15.0 +2024-08-03 21:17:06,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=233662.0, ans=0.125 +2024-08-03 21:17:12,816 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.303e+01 1.188e+02 1.406e+02 1.892e+02 3.223e+02, threshold=2.812e+02, percent-clipped=8.0 +2024-08-03 21:17:13,751 INFO [train.py:1114] (2/4) Epoch 18, batch 1850, loss[loss=0.1926, simple_loss=0.2846, pruned_loss=0.0503, over 13405.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.271, pruned_loss=0.0489, over 2636553.56 frames. ], batch size: 39, lr: 6.91e-03, grad_scale: 16.0 +2024-08-03 21:17:30,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=233735.33333333334, ans=0.025 +2024-08-03 21:17:36,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=233772.0, ans=0.0 +2024-08-03 21:17:41,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=233808.66666666666, ans=0.125 +2024-08-03 21:17:50,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=233845.33333333334, ans=0.0 +2024-08-03 21:17:57,576 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.93 vs. limit=10.0 +2024-08-03 21:17:59,008 INFO [train.py:1114] (2/4) Epoch 18, batch 1900, loss[loss=0.2154, simple_loss=0.2922, pruned_loss=0.06929, over 13310.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2714, pruned_loss=0.04911, over 2639372.70 frames. ], batch size: 40, lr: 6.91e-03, grad_scale: 16.0 +2024-08-03 21:18:06,012 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.29 vs. limit=22.5 +2024-08-03 21:18:09,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=233918.66666666666, ans=0.125 +2024-08-03 21:18:48,032 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.661e+01 1.108e+02 1.336e+02 1.650e+02 2.713e+02, threshold=2.672e+02, percent-clipped=0.0 +2024-08-03 21:18:48,977 INFO [train.py:1114] (2/4) Epoch 18, batch 1950, loss[loss=0.1738, simple_loss=0.2631, pruned_loss=0.04227, over 13559.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2719, pruned_loss=0.04913, over 2646579.16 frames. ], batch size: 36, lr: 6.90e-03, grad_scale: 16.0 +2024-08-03 21:19:04,571 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.56 vs. limit=6.0 +2024-08-03 21:19:11,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=234102.0, ans=0.125 +2024-08-03 21:19:16,783 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.83 vs. limit=15.0 +2024-08-03 21:19:40,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=234212.0, ans=0.125 +2024-08-03 21:19:43,527 INFO [train.py:1114] (2/4) Epoch 18, batch 2000, loss[loss=0.168, simple_loss=0.2495, pruned_loss=0.04331, over 13563.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2723, pruned_loss=0.04927, over 2635934.98 frames. ], batch size: 31, lr: 6.90e-03, grad_scale: 32.0 +2024-08-03 21:19:52,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234285.33333333334, ans=0.1 +2024-08-03 21:19:56,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=234285.33333333334, ans=0.0 +2024-08-03 21:20:02,998 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.41 vs. limit=15.0 +2024-08-03 21:20:05,572 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.02 vs. limit=22.5 +2024-08-03 21:20:14,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=234358.66666666666, ans=0.0 +2024-08-03 21:20:19,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=234358.66666666666, ans=0.2 +2024-08-03 21:20:28,746 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.785e+01 1.141e+02 1.284e+02 1.591e+02 2.253e+02, threshold=2.569e+02, percent-clipped=0.0 +2024-08-03 21:20:29,732 INFO [train.py:1114] (2/4) Epoch 18, batch 2050, loss[loss=0.1529, simple_loss=0.2313, pruned_loss=0.0373, over 13419.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2722, pruned_loss=0.04957, over 2633399.68 frames. ], batch size: 32, lr: 6.90e-03, grad_scale: 32.0 +2024-08-03 21:20:32,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=234432.0, ans=0.125 +2024-08-03 21:20:41,274 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.18 vs. limit=22.5 +2024-08-03 21:20:52,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=234505.33333333334, ans=0.125 +2024-08-03 21:20:57,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=234542.0, ans=0.0 +2024-08-03 21:21:04,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=234542.0, ans=0.09899494936611666 +2024-08-03 21:21:14,908 INFO [train.py:1114] (2/4) Epoch 18, batch 2100, loss[loss=0.2003, simple_loss=0.2937, pruned_loss=0.05344, over 13536.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2713, pruned_loss=0.04922, over 2639058.43 frames. ], batch size: 37, lr: 6.89e-03, grad_scale: 32.0 +2024-08-03 21:21:25,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=234652.0, ans=0.125 +2024-08-03 21:21:33,118 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.81 vs. limit=15.0 +2024-08-03 21:21:57,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=234762.0, ans=0.125 +2024-08-03 21:21:59,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=234762.0, ans=0.0 +2024-08-03 21:22:01,494 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.837e+01 1.081e+02 1.198e+02 1.435e+02 3.177e+02, threshold=2.396e+02, percent-clipped=1.0 +2024-08-03 21:22:02,383 INFO [train.py:1114] (2/4) Epoch 18, batch 2150, loss[loss=0.1863, simple_loss=0.2705, pruned_loss=0.05105, over 13558.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2698, pruned_loss=0.04843, over 2647917.31 frames. ], batch size: 36, lr: 6.89e-03, grad_scale: 32.0 +2024-08-03 21:22:02,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234798.66666666666, ans=0.1 +2024-08-03 21:22:06,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=234798.66666666666, ans=0.04949747468305833 +2024-08-03 21:22:08,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=234798.66666666666, ans=0.2 +2024-08-03 21:22:12,032 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.54 vs. limit=15.0 +2024-08-03 21:22:14,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=234835.33333333334, ans=0.125 +2024-08-03 21:22:16,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=234835.33333333334, ans=0.125 +2024-08-03 21:22:23,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=234872.0, ans=0.2 +2024-08-03 21:22:30,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=234872.0, ans=0.125 +2024-08-03 21:22:36,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=234908.66666666666, ans=0.0 +2024-08-03 21:22:41,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=234908.66666666666, ans=0.0 +2024-08-03 21:22:47,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=234945.33333333334, ans=0.125 +2024-08-03 21:22:54,821 INFO [train.py:1114] (2/4) Epoch 18, batch 2200, loss[loss=0.1822, simple_loss=0.2748, pruned_loss=0.04484, over 13394.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2697, pruned_loss=0.04842, over 2645415.14 frames. ], batch size: 39, lr: 6.89e-03, grad_scale: 16.0 +2024-08-03 21:22:59,043 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.45 vs. limit=15.0 +2024-08-03 21:23:15,734 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:23:18,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=235055.33333333334, ans=0.95 +2024-08-03 21:23:27,061 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.38 vs. limit=15.0 +2024-08-03 21:23:32,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=235128.66666666666, ans=0.125 +2024-08-03 21:23:35,471 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.50 vs. limit=6.0 +2024-08-03 21:23:36,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=235128.66666666666, ans=0.2 +2024-08-03 21:23:40,428 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.106e+01 1.127e+02 1.422e+02 1.837e+02 2.804e+02, threshold=2.844e+02, percent-clipped=6.0 +2024-08-03 21:23:40,465 INFO [train.py:1114] (2/4) Epoch 18, batch 2250, loss[loss=0.1814, simple_loss=0.2796, pruned_loss=0.0416, over 13349.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2705, pruned_loss=0.04904, over 2642749.53 frames. ], batch size: 37, lr: 6.89e-03, grad_scale: 16.0 +2024-08-03 21:23:47,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=235165.33333333334, ans=0.125 +2024-08-03 21:23:47,346 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.24 vs. limit=15.0 +2024-08-03 21:24:25,615 INFO [train.py:1114] (2/4) Epoch 18, batch 2300, loss[loss=0.1484, simple_loss=0.2325, pruned_loss=0.03217, over 13586.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2694, pruned_loss=0.04872, over 2637914.90 frames. ], batch size: 33, lr: 6.88e-03, grad_scale: 16.0 +2024-08-03 21:24:43,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=235422.0, ans=0.05 +2024-08-03 21:24:45,186 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.80 vs. limit=10.0 +2024-08-03 21:24:56,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=235458.66666666666, ans=0.2 +2024-08-03 21:25:00,590 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.59 vs. limit=15.0 +2024-08-03 21:25:04,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=235495.33333333334, ans=0.125 +2024-08-03 21:25:10,865 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.665e+01 1.085e+02 1.233e+02 1.641e+02 2.605e+02, threshold=2.466e+02, percent-clipped=0.0 +2024-08-03 21:25:10,902 INFO [train.py:1114] (2/4) Epoch 18, batch 2350, loss[loss=0.1886, simple_loss=0.2817, pruned_loss=0.04769, over 13557.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.27, pruned_loss=0.04869, over 2640981.25 frames. ], batch size: 38, lr: 6.88e-03, grad_scale: 16.0 +2024-08-03 21:25:19,422 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.22 vs. limit=22.5 +2024-08-03 21:25:36,475 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:25:38,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=235642.0, ans=0.125 +2024-08-03 21:25:38,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=235642.0, ans=0.1 +2024-08-03 21:25:39,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=235642.0, ans=0.125 +2024-08-03 21:25:46,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=235678.66666666666, ans=0.125 +2024-08-03 21:25:49,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=235678.66666666666, ans=0.2 +2024-08-03 21:25:49,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=235678.66666666666, ans=0.09899494936611666 +2024-08-03 21:25:57,727 INFO [train.py:1114] (2/4) Epoch 18, batch 2400, loss[loss=0.1607, simple_loss=0.2549, pruned_loss=0.03323, over 13533.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2701, pruned_loss=0.04851, over 2641889.86 frames. ], batch size: 35, lr: 6.88e-03, grad_scale: 32.0 +2024-08-03 21:25:58,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=235715.33333333334, ans=0.125 +2024-08-03 21:26:02,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=235715.33333333334, ans=0.2 +2024-08-03 21:26:17,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=235752.0, ans=0.025 +2024-08-03 21:26:22,086 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.33 vs. limit=15.0 +2024-08-03 21:26:31,827 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:26:48,965 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.162e+01 1.095e+02 1.213e+02 1.558e+02 2.561e+02, threshold=2.426e+02, percent-clipped=1.0 +2024-08-03 21:26:49,003 INFO [train.py:1114] (2/4) Epoch 18, batch 2450, loss[loss=0.1785, simple_loss=0.2621, pruned_loss=0.04748, over 13348.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2711, pruned_loss=0.04897, over 2631651.49 frames. ], batch size: 37, lr: 6.88e-03, grad_scale: 32.0 +2024-08-03 21:27:06,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=235972.0, ans=0.05 +2024-08-03 21:27:07,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=235972.0, ans=0.0 +2024-08-03 21:27:08,703 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.06 vs. limit=15.0 +2024-08-03 21:27:09,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=235972.0, ans=0.05 +2024-08-03 21:27:34,613 INFO [train.py:1114] (2/4) Epoch 18, batch 2500, loss[loss=0.1876, simple_loss=0.2781, pruned_loss=0.04859, over 13402.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2715, pruned_loss=0.04911, over 2635861.38 frames. ], batch size: 39, lr: 6.87e-03, grad_scale: 32.0 +2024-08-03 21:27:58,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=236155.33333333334, ans=0.025 +2024-08-03 21:27:58,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=236155.33333333334, ans=0.0 +2024-08-03 21:28:06,637 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.25 vs. limit=15.0 +2024-08-03 21:28:10,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=236228.66666666666, ans=0.125 +2024-08-03 21:28:17,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=236228.66666666666, ans=0.015 +2024-08-03 21:28:18,827 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.01 vs. limit=12.0 +2024-08-03 21:28:19,323 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.251e+01 1.154e+02 1.291e+02 1.813e+02 3.422e+02, threshold=2.583e+02, percent-clipped=8.0 +2024-08-03 21:28:19,361 INFO [train.py:1114] (2/4) Epoch 18, batch 2550, loss[loss=0.1488, simple_loss=0.2262, pruned_loss=0.03565, over 13543.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2711, pruned_loss=0.04878, over 2638153.72 frames. ], batch size: 31, lr: 6.87e-03, grad_scale: 32.0 +2024-08-03 21:28:21,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=236265.33333333334, ans=0.1 +2024-08-03 21:28:22,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=236265.33333333334, ans=0.0 +2024-08-03 21:28:27,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=236302.0, ans=0.125 +2024-08-03 21:28:31,654 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.14 vs. limit=15.0 +2024-08-03 21:28:31,809 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.49 vs. limit=15.0 +2024-08-03 21:28:32,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236302.0, ans=0.1 +2024-08-03 21:28:38,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.89 vs. limit=15.0 +2024-08-03 21:28:50,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236375.33333333334, ans=0.1 +2024-08-03 21:28:57,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=236412.0, ans=0.125 +2024-08-03 21:29:03,076 INFO [train.py:1114] (2/4) Epoch 18, batch 2600, loss[loss=0.1622, simple_loss=0.2544, pruned_loss=0.03499, over 13563.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2716, pruned_loss=0.0489, over 2637518.31 frames. ], batch size: 36, lr: 6.87e-03, grad_scale: 16.0 +2024-08-03 21:29:03,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=236448.66666666666, ans=0.125 +2024-08-03 21:29:18,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=236485.33333333334, ans=0.125 +2024-08-03 21:29:26,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=236522.0, ans=0.125 +2024-08-03 21:29:34,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=236558.66666666666, ans=0.1 +2024-08-03 21:29:47,192 INFO [train.py:1114] (2/4) Epoch 18, batch 2650, loss[loss=0.2052, simple_loss=0.2948, pruned_loss=0.05778, over 13354.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2722, pruned_loss=0.04944, over 2640097.71 frames. ], batch size: 46, lr: 6.87e-03, grad_scale: 16.0 +2024-08-03 21:29:48,040 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.186e+01 1.172e+02 1.315e+02 1.569e+02 3.387e+02, threshold=2.631e+02, percent-clipped=2.0 +2024-08-03 21:30:07,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=236705.33333333334, ans=0.125 +2024-08-03 21:30:12,627 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.32 vs. limit=6.0 +2024-08-03 21:30:17,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=236742.0, ans=0.2 +2024-08-03 21:30:30,355 INFO [train.py:1114] (2/4) Epoch 18, batch 2700, loss[loss=0.1949, simple_loss=0.2806, pruned_loss=0.05458, over 13556.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.272, pruned_loss=0.0494, over 2637516.74 frames. ], batch size: 40, lr: 6.86e-03, grad_scale: 16.0 +2024-08-03 21:30:35,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=236815.33333333334, ans=0.5 +2024-08-03 21:30:46,651 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.77 vs. limit=15.0 +2024-08-03 21:30:51,618 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.40 vs. limit=15.0 +2024-08-03 21:31:06,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=236962.0, ans=0.0 +2024-08-03 21:31:13,960 INFO [train.py:1114] (2/4) Epoch 18, batch 2750, loss[loss=0.1716, simple_loss=0.2547, pruned_loss=0.04428, over 13321.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2712, pruned_loss=0.0495, over 2635239.04 frames. ], batch size: 34, lr: 6.86e-03, grad_scale: 16.0 +2024-08-03 21:31:14,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=236998.66666666666, ans=0.125 +2024-08-03 21:31:14,724 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.742e+01 1.152e+02 1.311e+02 1.647e+02 2.709e+02, threshold=2.622e+02, percent-clipped=1.0 +2024-08-03 21:31:17,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=236998.66666666666, ans=0.5 +2024-08-03 21:31:36,096 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.23 vs. limit=12.0 +2024-08-03 21:31:36,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=237072.0, ans=0.0 +2024-08-03 21:31:56,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=237182.0, ans=15.0 +2024-08-03 21:31:57,332 INFO [train.py:1114] (2/4) Epoch 18, batch 2800, loss[loss=0.2698, simple_loss=0.3256, pruned_loss=0.1069, over 9254.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2717, pruned_loss=0.04983, over 2626730.52 frames. ], batch size: 96, lr: 6.86e-03, grad_scale: 16.0 +2024-08-03 21:32:24,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=237292.0, ans=0.125 +2024-08-03 21:32:41,590 INFO [train.py:1114] (2/4) Epoch 18, batch 2850, loss[loss=0.2044, simple_loss=0.2804, pruned_loss=0.06423, over 13365.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2723, pruned_loss=0.05032, over 2620588.26 frames. ], batch size: 35, lr: 6.86e-03, grad_scale: 16.0 +2024-08-03 21:32:42,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=237365.33333333334, ans=0.125 +2024-08-03 21:32:43,241 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.041e+01 1.131e+02 1.338e+02 1.690e+02 3.058e+02, threshold=2.676e+02, percent-clipped=5.0 +2024-08-03 21:32:43,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=237365.33333333334, ans=0.025 +2024-08-03 21:33:02,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=237438.66666666666, ans=0.0 +2024-08-03 21:33:20,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=237512.0, ans=0.025 +2024-08-03 21:33:28,456 INFO [train.py:1114] (2/4) Epoch 18, batch 2900, loss[loss=0.1737, simple_loss=0.2639, pruned_loss=0.04177, over 13363.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2734, pruned_loss=0.05028, over 2631444.48 frames. ], batch size: 36, lr: 6.85e-03, grad_scale: 16.0 +2024-08-03 21:33:31,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=237548.66666666666, ans=0.025 +2024-08-03 21:34:11,741 INFO [train.py:1114] (2/4) Epoch 18, batch 2950, loss[loss=0.1768, simple_loss=0.2621, pruned_loss=0.04577, over 13332.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2717, pruned_loss=0.0498, over 2628517.59 frames. ], batch size: 34, lr: 6.85e-03, grad_scale: 16.0 +2024-08-03 21:34:12,676 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=237732.0, ans=0.125 +2024-08-03 21:34:13,341 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.218e+01 1.166e+02 1.489e+02 1.763e+02 2.783e+02, threshold=2.978e+02, percent-clipped=2.0 +2024-08-03 21:34:25,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=237768.66666666666, ans=0.125 +2024-08-03 21:34:32,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=237805.33333333334, ans=0.125 +2024-08-03 21:34:43,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=237842.0, ans=0.05 +2024-08-03 21:34:54,886 INFO [train.py:1114] (2/4) Epoch 18, batch 3000, loss[loss=0.1949, simple_loss=0.2806, pruned_loss=0.0546, over 13541.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2718, pruned_loss=0.05024, over 2628800.64 frames. ], batch size: 37, lr: 6.85e-03, grad_scale: 16.0 +2024-08-03 21:34:54,886 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 21:35:02,133 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([1.5835, 2.7226, 2.8971, 1.6078], device='cuda:2') +2024-08-03 21:35:04,732 INFO [train.py:1146] (2/4) Epoch 18, validation: loss=0.1701, simple_loss=0.269, pruned_loss=0.03557, over 944034.00 frames. +2024-08-03 21:35:04,732 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 21:35:10,531 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.47 vs. limit=15.0 +2024-08-03 21:35:13,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=237952.0, ans=0.125 +2024-08-03 21:35:21,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=237988.66666666666, ans=0.2 +2024-08-03 21:35:36,793 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.47 vs. limit=15.0 +2024-08-03 21:35:38,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=238025.33333333334, ans=0.125 +2024-08-03 21:35:48,166 INFO [train.py:1114] (2/4) Epoch 18, batch 3050, loss[loss=0.173, simple_loss=0.2561, pruned_loss=0.04494, over 13518.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2724, pruned_loss=0.05031, over 2625437.36 frames. ], batch size: 35, lr: 6.84e-03, grad_scale: 16.0 +2024-08-03 21:35:49,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=238098.66666666666, ans=0.0 +2024-08-03 21:35:49,907 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.482e+01 1.049e+02 1.161e+02 1.346e+02 2.617e+02, threshold=2.322e+02, percent-clipped=0.0 +2024-08-03 21:35:57,446 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.40 vs. limit=15.0 +2024-08-03 21:36:09,840 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.22 vs. limit=10.0 +2024-08-03 21:36:20,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238208.66666666666, ans=0.1 +2024-08-03 21:36:21,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=238245.33333333334, ans=0.0 +2024-08-03 21:36:31,298 INFO [train.py:1114] (2/4) Epoch 18, batch 3100, loss[loss=0.1907, simple_loss=0.2789, pruned_loss=0.05122, over 13280.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2716, pruned_loss=0.04987, over 2625592.66 frames. ], batch size: 46, lr: 6.84e-03, grad_scale: 16.0 +2024-08-03 21:36:56,777 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.72 vs. limit=22.5 +2024-08-03 21:37:03,022 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.44 vs. limit=22.5 +2024-08-03 21:37:07,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=238428.66666666666, ans=0.2 +2024-08-03 21:37:15,539 INFO [train.py:1114] (2/4) Epoch 18, batch 3150, loss[loss=0.2105, simple_loss=0.2981, pruned_loss=0.06147, over 13328.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2714, pruned_loss=0.04965, over 2626851.59 frames. ], batch size: 49, lr: 6.84e-03, grad_scale: 16.0 +2024-08-03 21:37:17,263 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.157e+01 1.121e+02 1.376e+02 1.775e+02 3.223e+02, threshold=2.752e+02, percent-clipped=7.0 +2024-08-03 21:37:17,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=238465.33333333334, ans=0.05 +2024-08-03 21:37:20,582 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.45 vs. limit=22.5 +2024-08-03 21:37:50,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=238612.0, ans=0.0 +2024-08-03 21:37:53,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=238612.0, ans=0.125 +2024-08-03 21:37:58,749 INFO [train.py:1114] (2/4) Epoch 18, batch 3200, loss[loss=0.1827, simple_loss=0.2681, pruned_loss=0.0486, over 13547.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2707, pruned_loss=0.04937, over 2633799.46 frames. ], batch size: 37, lr: 6.84e-03, grad_scale: 32.0 +2024-08-03 21:38:11,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=238685.33333333334, ans=0.125 +2024-08-03 21:38:12,311 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.64 vs. limit=15.0 +2024-08-03 21:38:22,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=238722.0, ans=0.125 +2024-08-03 21:38:23,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=238722.0, ans=0.125 +2024-08-03 21:38:26,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=238758.66666666666, ans=0.2 +2024-08-03 21:38:43,450 INFO [train.py:1114] (2/4) Epoch 18, batch 3250, loss[loss=0.1964, simple_loss=0.284, pruned_loss=0.05438, over 13390.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2721, pruned_loss=0.04997, over 2638348.92 frames. ], batch size: 38, lr: 6.83e-03, grad_scale: 32.0 +2024-08-03 21:38:45,173 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.730e+01 1.129e+02 1.271e+02 1.537e+02 2.545e+02, threshold=2.542e+02, percent-clipped=0.0 +2024-08-03 21:38:50,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=238832.0, ans=0.0 +2024-08-03 21:39:02,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=238905.33333333334, ans=0.125 +2024-08-03 21:39:15,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=238942.0, ans=0.125 +2024-08-03 21:39:26,852 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.35 vs. limit=15.0 +2024-08-03 21:39:27,913 INFO [train.py:1114] (2/4) Epoch 18, batch 3300, loss[loss=0.1934, simple_loss=0.2767, pruned_loss=0.05509, over 12952.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2707, pruned_loss=0.04929, over 2639503.11 frames. ], batch size: 52, lr: 6.83e-03, grad_scale: 32.0 +2024-08-03 21:39:28,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=239015.33333333334, ans=0.0 +2024-08-03 21:39:41,311 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.32 vs. limit=22.5 +2024-08-03 21:39:46,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=239088.66666666666, ans=0.0 +2024-08-03 21:39:47,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=239088.66666666666, ans=0.1 +2024-08-03 21:39:53,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239125.33333333334, ans=0.1 +2024-08-03 21:40:00,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=239125.33333333334, ans=0.125 +2024-08-03 21:40:00,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=239125.33333333334, ans=0.125 +2024-08-03 21:40:10,663 INFO [train.py:1114] (2/4) Epoch 18, batch 3350, loss[loss=0.2139, simple_loss=0.3038, pruned_loss=0.06202, over 12968.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2718, pruned_loss=0.04982, over 2631056.04 frames. ], batch size: 48, lr: 6.83e-03, grad_scale: 32.0 +2024-08-03 21:40:11,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=239198.66666666666, ans=0.125 +2024-08-03 21:40:12,337 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.013e+01 1.097e+02 1.292e+02 1.574e+02 2.403e+02, threshold=2.585e+02, percent-clipped=0.0 +2024-08-03 21:40:22,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=239235.33333333334, ans=0.0 +2024-08-03 21:40:31,604 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.84 vs. limit=6.0 +2024-08-03 21:40:34,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=239272.0, ans=0.09899494936611666 +2024-08-03 21:40:35,072 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.52 vs. limit=22.5 +2024-08-03 21:40:37,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=239308.66666666666, ans=0.125 +2024-08-03 21:40:38,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239308.66666666666, ans=0.125 +2024-08-03 21:40:38,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=239308.66666666666, ans=0.0 +2024-08-03 21:40:47,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=239345.33333333334, ans=0.025 +2024-08-03 21:40:53,383 INFO [train.py:1114] (2/4) Epoch 18, batch 3400, loss[loss=0.1835, simple_loss=0.2609, pruned_loss=0.05308, over 13538.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2718, pruned_loss=0.05001, over 2626246.81 frames. ], batch size: 31, lr: 6.83e-03, grad_scale: 16.0 +2024-08-03 21:40:55,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239382.0, ans=0.125 +2024-08-03 21:40:55,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=239382.0, ans=0.125 +2024-08-03 21:41:01,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=239418.66666666666, ans=0.125 +2024-08-03 21:41:08,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=239418.66666666666, ans=0.125 +2024-08-03 21:41:31,528 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.78 vs. limit=15.0 +2024-08-03 21:41:34,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=239528.66666666666, ans=0.125 +2024-08-03 21:41:36,137 INFO [train.py:1114] (2/4) Epoch 18, batch 3450, loss[loss=0.1795, simple_loss=0.2704, pruned_loss=0.0443, over 12898.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2719, pruned_loss=0.04982, over 2629612.89 frames. ], batch size: 52, lr: 6.82e-03, grad_scale: 16.0 +2024-08-03 21:41:38,577 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.763e+01 1.067e+02 1.315e+02 1.546e+02 2.791e+02, threshold=2.630e+02, percent-clipped=1.0 +2024-08-03 21:41:42,569 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.45 vs. limit=6.0 +2024-08-03 21:41:55,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=239638.66666666666, ans=0.125 +2024-08-03 21:41:57,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=239638.66666666666, ans=0.0 +2024-08-03 21:42:08,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239675.33333333334, ans=0.1 +2024-08-03 21:42:18,553 INFO [train.py:1114] (2/4) Epoch 18, batch 3500, loss[loss=0.1914, simple_loss=0.272, pruned_loss=0.05539, over 13537.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2712, pruned_loss=0.04986, over 2631404.34 frames. ], batch size: 34, lr: 6.82e-03, grad_scale: 16.0 +2024-08-03 21:42:21,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=239748.66666666666, ans=0.125 +2024-08-03 21:42:23,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=239748.66666666666, ans=0.2 +2024-08-03 21:42:49,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=239858.66666666666, ans=0.125 +2024-08-03 21:43:02,817 INFO [train.py:1114] (2/4) Epoch 18, batch 3550, loss[loss=0.1914, simple_loss=0.2742, pruned_loss=0.05431, over 12503.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2734, pruned_loss=0.05074, over 2629279.96 frames. ], batch size: 58, lr: 6.82e-03, grad_scale: 16.0 +2024-08-03 21:43:04,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=239932.0, ans=0.07 +2024-08-03 21:43:05,281 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.515e+01 1.163e+02 1.331e+02 1.591e+02 2.731e+02, threshold=2.663e+02, percent-clipped=1.0 +2024-08-03 21:43:41,127 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.31 vs. limit=22.5 +2024-08-03 21:43:46,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=240078.66666666666, ans=0.0 +2024-08-03 21:43:47,782 INFO [train.py:1114] (2/4) Epoch 18, batch 3600, loss[loss=0.2056, simple_loss=0.2892, pruned_loss=0.06102, over 9555.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2781, pruned_loss=0.05457, over 2487497.08 frames. ], batch size: 96, lr: 6.82e-03, grad_scale: 32.0 +2024-08-03 21:43:51,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=240115.33333333334, ans=0.125 +2024-08-03 21:43:57,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=240152.0, ans=0.125 +2024-08-03 21:44:10,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=240188.66666666666, ans=0.09899494936611666 +2024-08-03 21:44:10,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=240188.66666666666, ans=0.125 +2024-08-03 21:44:17,225 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.61 vs. limit=15.0 +2024-08-03 21:44:18,204 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.23 vs. limit=6.0 +2024-08-03 21:45:02,648 INFO [train.py:1114] (2/4) Epoch 19, batch 0, loss[loss=0.1626, simple_loss=0.2487, pruned_loss=0.03827, over 13323.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2487, pruned_loss=0.03827, over 13323.00 frames. ], batch size: 33, lr: 6.63e-03, grad_scale: 32.0 +2024-08-03 21:45:02,648 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 21:45:07,476 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.1273, 3.1561, 3.0264, 1.8854], device='cuda:2') +2024-08-03 21:45:13,115 INFO [train.py:1146] (2/4) Epoch 19, validation: loss=0.1699, simple_loss=0.2705, pruned_loss=0.03462, over 944034.00 frames. +2024-08-03 21:45:13,174 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 21:45:15,681 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.06 vs. limit=15.0 +2024-08-03 21:45:17,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=240262.0, ans=0.015 +2024-08-03 21:45:23,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=240298.66666666666, ans=0.0 +2024-08-03 21:45:26,646 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.737e+01 1.210e+02 1.300e+02 1.388e+02 2.591e+02, threshold=2.600e+02, percent-clipped=0.0 +2024-08-03 21:45:36,383 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.63 vs. limit=22.5 +2024-08-03 21:45:38,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=240335.33333333334, ans=0.125 +2024-08-03 21:45:42,193 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.96 vs. limit=22.5 +2024-08-03 21:45:50,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=240372.0, ans=0.2 +2024-08-03 21:45:53,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=240372.0, ans=0.0 +2024-08-03 21:46:03,567 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.52 vs. limit=10.0 +2024-08-03 21:46:05,633 INFO [train.py:1114] (2/4) Epoch 19, batch 50, loss[loss=0.1523, simple_loss=0.2339, pruned_loss=0.0353, over 13398.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2722, pruned_loss=0.05001, over 578810.44 frames. ], batch size: 32, lr: 6.63e-03, grad_scale: 32.0 +2024-08-03 21:46:08,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=240445.33333333334, ans=0.125 +2024-08-03 21:46:10,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=240445.33333333334, ans=0.125 +2024-08-03 21:46:13,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=240482.0, ans=0.125 +2024-08-03 21:46:19,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=240482.0, ans=0.125 +2024-08-03 21:46:43,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.82 vs. limit=6.0 +2024-08-03 21:46:44,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=240592.0, ans=0.0 +2024-08-03 21:46:53,415 INFO [train.py:1114] (2/4) Epoch 19, batch 100, loss[loss=0.1751, simple_loss=0.2564, pruned_loss=0.04693, over 13546.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2742, pruned_loss=0.05006, over 1027162.54 frames. ], batch size: 35, lr: 6.62e-03, grad_scale: 32.0 +2024-08-03 21:47:05,122 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.610e+01 1.117e+02 1.255e+02 1.420e+02 2.602e+02, threshold=2.511e+02, percent-clipped=1.0 +2024-08-03 21:47:09,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff2.min_abs, batch_count=240665.33333333334, ans=0.1 +2024-08-03 21:47:20,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=240738.66666666666, ans=0.125 +2024-08-03 21:47:38,536 INFO [train.py:1114] (2/4) Epoch 19, batch 150, loss[loss=0.151, simple_loss=0.2393, pruned_loss=0.03138, over 13428.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2719, pruned_loss=0.04945, over 1387658.52 frames. ], batch size: 32, lr: 6.62e-03, grad_scale: 32.0 +2024-08-03 21:47:44,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=240812.0, ans=0.2 +2024-08-03 21:47:58,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=240885.33333333334, ans=0.125 +2024-08-03 21:48:03,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=240885.33333333334, ans=0.0 +2024-08-03 21:48:04,002 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:48:06,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=240885.33333333334, ans=0.0 +2024-08-03 21:48:10,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=240922.0, ans=0.0 +2024-08-03 21:48:11,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=240922.0, ans=0.125 +2024-08-03 21:48:20,107 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.09 vs. limit=15.0 +2024-08-03 21:48:20,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=240958.66666666666, ans=0.025 +2024-08-03 21:48:26,775 INFO [train.py:1114] (2/4) Epoch 19, batch 200, loss[loss=0.1976, simple_loss=0.2916, pruned_loss=0.05177, over 12348.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2708, pruned_loss=0.04892, over 1665976.84 frames. ], batch size: 58, lr: 6.62e-03, grad_scale: 32.0 +2024-08-03 21:48:38,532 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.918e+01 1.063e+02 1.213e+02 1.459e+02 3.041e+02, threshold=2.427e+02, percent-clipped=0.0 +2024-08-03 21:48:45,551 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.94 vs. limit=22.5 +2024-08-03 21:48:52,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=241068.66666666666, ans=0.025 +2024-08-03 21:48:56,064 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.38 vs. limit=22.5 +2024-08-03 21:49:09,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten.whitening_limit, batch_count=241142.0, ans=15.0 +2024-08-03 21:49:11,663 INFO [train.py:1114] (2/4) Epoch 19, batch 250, loss[loss=0.1915, simple_loss=0.2814, pruned_loss=0.05081, over 13323.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2694, pruned_loss=0.04809, over 1884632.16 frames. ], batch size: 46, lr: 6.62e-03, grad_scale: 32.0 +2024-08-03 21:49:19,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=241178.66666666666, ans=0.04949747468305833 +2024-08-03 21:49:28,938 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.87 vs. limit=12.0 +2024-08-03 21:49:45,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=241288.66666666666, ans=0.125 +2024-08-03 21:49:47,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=241288.66666666666, ans=0.07 +2024-08-03 21:49:48,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=241288.66666666666, ans=0.2 +2024-08-03 21:49:49,223 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.67 vs. limit=6.0 +2024-08-03 21:49:54,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241325.33333333334, ans=0.1 +2024-08-03 21:49:58,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=241362.0, ans=0.125 +2024-08-03 21:49:59,625 INFO [train.py:1114] (2/4) Epoch 19, batch 300, loss[loss=0.1803, simple_loss=0.2827, pruned_loss=0.03899, over 13453.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2686, pruned_loss=0.0477, over 2051283.75 frames. ], batch size: 42, lr: 6.61e-03, grad_scale: 32.0 +2024-08-03 21:50:06,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241362.0, ans=0.1 +2024-08-03 21:50:08,341 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.02 vs. limit=15.0 +2024-08-03 21:50:13,614 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.068e+01 1.095e+02 1.222e+02 1.449e+02 2.776e+02, threshold=2.445e+02, percent-clipped=4.0 +2024-08-03 21:50:23,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=241435.33333333334, ans=0.0 +2024-08-03 21:50:33,223 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.12 vs. limit=15.0 +2024-08-03 21:50:35,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241472.0, ans=0.1 +2024-08-03 21:50:45,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=241508.66666666666, ans=0.07 +2024-08-03 21:50:52,428 INFO [train.py:1114] (2/4) Epoch 19, batch 350, loss[loss=0.1565, simple_loss=0.235, pruned_loss=0.03903, over 13589.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.27, pruned_loss=0.04833, over 2182532.24 frames. ], batch size: 33, lr: 6.61e-03, grad_scale: 32.0 +2024-08-03 21:50:55,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241545.33333333334, ans=0.1 +2024-08-03 21:50:55,921 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.08 vs. limit=15.0 +2024-08-03 21:50:56,666 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.29 vs. limit=15.0 +2024-08-03 21:51:05,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=241582.0, ans=0.125 +2024-08-03 21:51:09,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=241582.0, ans=0.2 +2024-08-03 21:51:11,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=241618.66666666666, ans=0.025 +2024-08-03 21:51:22,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=241655.33333333334, ans=0.125 +2024-08-03 21:51:22,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=241655.33333333334, ans=0.0 +2024-08-03 21:51:33,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=241692.0, ans=0.125 +2024-08-03 21:51:40,082 INFO [train.py:1114] (2/4) Epoch 19, batch 400, loss[loss=0.2007, simple_loss=0.2933, pruned_loss=0.05408, over 13364.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2705, pruned_loss=0.0484, over 2285971.01 frames. ], batch size: 37, lr: 6.61e-03, grad_scale: 32.0 +2024-08-03 21:51:41,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241728.66666666666, ans=0.1 +2024-08-03 21:51:52,137 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.902e+01 1.083e+02 1.174e+02 1.521e+02 2.282e+02, threshold=2.347e+02, percent-clipped=0.0 +2024-08-03 21:52:02,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=241802.0, ans=0.125 +2024-08-03 21:52:09,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=241838.66666666666, ans=0.125 +2024-08-03 21:52:18,645 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.79 vs. limit=10.0 +2024-08-03 21:52:21,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=241875.33333333334, ans=0.2 +2024-08-03 21:52:23,608 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.74 vs. limit=15.0 +2024-08-03 21:52:24,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=241875.33333333334, ans=0.0 +2024-08-03 21:52:28,386 INFO [train.py:1114] (2/4) Epoch 19, batch 450, loss[loss=0.1888, simple_loss=0.2789, pruned_loss=0.04933, over 13555.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2703, pruned_loss=0.04812, over 2359280.94 frames. ], batch size: 38, lr: 6.61e-03, grad_scale: 32.0 +2024-08-03 21:52:35,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241912.0, ans=0.1 +2024-08-03 21:52:36,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=241948.66666666666, ans=0.2 +2024-08-03 21:52:45,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241948.66666666666, ans=0.1 +2024-08-03 21:53:01,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=242022.0, ans=0.125 +2024-08-03 21:53:15,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=242095.33333333334, ans=0.0 +2024-08-03 21:53:16,368 INFO [train.py:1114] (2/4) Epoch 19, batch 500, loss[loss=0.2141, simple_loss=0.2924, pruned_loss=0.06793, over 13460.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2688, pruned_loss=0.04752, over 2424495.49 frames. ], batch size: 43, lr: 6.60e-03, grad_scale: 32.0 +2024-08-03 21:53:17,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242095.33333333334, ans=0.1 +2024-08-03 21:53:20,119 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:53:26,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=242132.0, ans=0.0 +2024-08-03 21:53:29,119 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.780e+01 1.159e+02 1.379e+02 1.825e+02 3.055e+02, threshold=2.757e+02, percent-clipped=7.0 +2024-08-03 21:53:59,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=242242.0, ans=0.0 +2024-08-03 21:54:03,933 INFO [train.py:1114] (2/4) Epoch 19, batch 550, loss[loss=0.1788, simple_loss=0.2726, pruned_loss=0.04251, over 13315.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2693, pruned_loss=0.0477, over 2468038.68 frames. ], batch size: 49, lr: 6.60e-03, grad_scale: 16.0 +2024-08-03 21:54:07,305 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.56 vs. limit=15.0 +2024-08-03 21:54:12,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=242278.66666666666, ans=0.125 +2024-08-03 21:54:26,778 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.15 vs. limit=15.0 +2024-08-03 21:54:30,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=242352.0, ans=0.05 +2024-08-03 21:54:30,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=242352.0, ans=0.125 +2024-08-03 21:54:33,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=242388.66666666666, ans=0.125 +2024-08-03 21:54:43,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=242425.33333333334, ans=0.125 +2024-08-03 21:54:51,044 INFO [train.py:1114] (2/4) Epoch 19, batch 600, loss[loss=0.1867, simple_loss=0.2788, pruned_loss=0.04733, over 13332.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2699, pruned_loss=0.04785, over 2508175.82 frames. ], batch size: 46, lr: 6.60e-03, grad_scale: 16.0 +2024-08-03 21:54:51,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=242462.0, ans=0.1 +2024-08-03 21:54:55,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242462.0, ans=0.1 +2024-08-03 21:54:58,366 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.35 vs. limit=15.0 +2024-08-03 21:55:00,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=242498.66666666666, ans=0.125 +2024-08-03 21:55:03,305 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.902e+01 1.087e+02 1.240e+02 1.431e+02 2.352e+02, threshold=2.480e+02, percent-clipped=0.0 +2024-08-03 21:55:11,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=242535.33333333334, ans=0.09899494936611666 +2024-08-03 21:55:22,419 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.27 vs. limit=15.0 +2024-08-03 21:55:26,657 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.53 vs. limit=12.0 +2024-08-03 21:55:37,871 INFO [train.py:1114] (2/4) Epoch 19, batch 650, loss[loss=0.1702, simple_loss=0.2562, pruned_loss=0.04211, over 13544.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2693, pruned_loss=0.04771, over 2543639.90 frames. ], batch size: 37, lr: 6.60e-03, grad_scale: 16.0 +2024-08-03 21:55:41,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=242645.33333333334, ans=0.07 +2024-08-03 21:55:45,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=242645.33333333334, ans=0.025 +2024-08-03 21:56:03,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=242718.66666666666, ans=0.125 +2024-08-03 21:56:15,929 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.51 vs. limit=22.5 +2024-08-03 21:56:25,340 INFO [train.py:1114] (2/4) Epoch 19, batch 700, loss[loss=0.1753, simple_loss=0.2651, pruned_loss=0.0427, over 13521.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2701, pruned_loss=0.04811, over 2565968.25 frames. ], batch size: 35, lr: 6.59e-03, grad_scale: 16.0 +2024-08-03 21:56:25,927 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.03 vs. limit=15.0 +2024-08-03 21:56:38,421 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.287e+01 1.160e+02 1.383e+02 1.887e+02 3.094e+02, threshold=2.766e+02, percent-clipped=5.0 +2024-08-03 21:56:40,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=242865.33333333334, ans=0.125 +2024-08-03 21:57:03,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=242975.33333333334, ans=0.125 +2024-08-03 21:57:13,135 INFO [train.py:1114] (2/4) Epoch 19, batch 750, loss[loss=0.1822, simple_loss=0.2783, pruned_loss=0.04302, over 13357.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2695, pruned_loss=0.04801, over 2582820.64 frames. ], batch size: 37, lr: 6.59e-03, grad_scale: 16.0 +2024-08-03 21:57:24,733 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.54 vs. limit=22.5 +2024-08-03 21:57:35,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=243085.33333333334, ans=0.0 +2024-08-03 21:57:36,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=243085.33333333334, ans=0.015 +2024-08-03 21:57:45,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=243122.0, ans=0.0 +2024-08-03 21:57:53,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=243158.66666666666, ans=0.125 +2024-08-03 21:58:00,316 INFO [train.py:1114] (2/4) Epoch 19, batch 800, loss[loss=0.1519, simple_loss=0.2376, pruned_loss=0.03312, over 13350.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2694, pruned_loss=0.04803, over 2597214.79 frames. ], batch size: 33, lr: 6.59e-03, grad_scale: 32.0 +2024-08-03 21:58:03,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=243195.33333333334, ans=0.1 +2024-08-03 21:58:12,634 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.835e+01 1.100e+02 1.252e+02 1.465e+02 2.313e+02, threshold=2.504e+02, percent-clipped=0.0 +2024-08-03 21:58:15,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243232.0, ans=0.1 +2024-08-03 21:58:34,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=243305.33333333334, ans=0.0 +2024-08-03 21:58:36,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=243342.0, ans=0.125 +2024-08-03 21:58:39,218 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-08-03 21:58:47,331 INFO [train.py:1114] (2/4) Epoch 19, batch 850, loss[loss=0.2114, simple_loss=0.3004, pruned_loss=0.06118, over 13335.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2692, pruned_loss=0.04792, over 2608858.35 frames. ], batch size: 40, lr: 6.59e-03, grad_scale: 32.0 +2024-08-03 21:59:00,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=243415.33333333334, ans=0.07 +2024-08-03 21:59:05,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=243452.0, ans=0.0 +2024-08-03 21:59:13,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=243488.66666666666, ans=0.125 +2024-08-03 21:59:16,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=243488.66666666666, ans=10.0 +2024-08-03 21:59:26,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=243525.33333333334, ans=0.2 +2024-08-03 21:59:31,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=243525.33333333334, ans=0.09899494936611666 +2024-08-03 21:59:31,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=243525.33333333334, ans=0.125 +2024-08-03 21:59:34,606 INFO [train.py:1114] (2/4) Epoch 19, batch 900, loss[loss=0.1733, simple_loss=0.2561, pruned_loss=0.04527, over 13335.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2698, pruned_loss=0.04819, over 2610181.02 frames. ], batch size: 33, lr: 6.59e-03, grad_scale: 16.0 +2024-08-03 21:59:43,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243598.66666666666, ans=0.1 +2024-08-03 21:59:48,154 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.299e+01 1.094e+02 1.385e+02 1.717e+02 2.818e+02, threshold=2.769e+02, percent-clipped=4.0 +2024-08-03 21:59:58,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=243635.33333333334, ans=0.125 +2024-08-03 21:59:59,713 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.03 vs. limit=22.5 +2024-08-03 22:00:06,620 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:00:07,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=243672.0, ans=0.025 +2024-08-03 22:00:22,266 INFO [train.py:1114] (2/4) Epoch 19, batch 950, loss[loss=0.1646, simple_loss=0.2513, pruned_loss=0.03894, over 13519.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2695, pruned_loss=0.04791, over 2611129.79 frames. ], batch size: 34, lr: 6.58e-03, grad_scale: 16.0 +2024-08-03 22:00:24,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=243745.33333333334, ans=0.125 +2024-08-03 22:00:25,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=243745.33333333334, ans=0.125 +2024-08-03 22:00:26,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=243745.33333333334, ans=0.2 +2024-08-03 22:00:36,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243782.0, ans=0.1 +2024-08-03 22:00:46,769 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.37 vs. limit=12.0 +2024-08-03 22:01:02,911 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.38 vs. limit=15.0 +2024-08-03 22:01:03,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=243892.0, ans=0.0 +2024-08-03 22:01:07,651 INFO [train.py:1114] (2/4) Epoch 19, batch 1000, loss[loss=0.1875, simple_loss=0.2674, pruned_loss=0.05384, over 13368.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2697, pruned_loss=0.04796, over 2610017.97 frames. ], batch size: 35, lr: 6.58e-03, grad_scale: 16.0 +2024-08-03 22:01:23,455 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.931e+01 1.118e+02 1.271e+02 1.540e+02 2.481e+02, threshold=2.543e+02, percent-clipped=0.0 +2024-08-03 22:01:39,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244038.66666666666, ans=0.1 +2024-08-03 22:01:50,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=244075.33333333334, ans=0.0 +2024-08-03 22:01:55,438 INFO [train.py:1114] (2/4) Epoch 19, batch 1050, loss[loss=0.1806, simple_loss=0.2697, pruned_loss=0.04578, over 13575.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2687, pruned_loss=0.04762, over 2614354.65 frames. ], batch size: 39, lr: 6.58e-03, grad_scale: 16.0 +2024-08-03 22:01:55,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=244112.0, ans=0.125 +2024-08-03 22:01:56,708 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.11 vs. limit=15.0 +2024-08-03 22:02:08,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=244148.66666666666, ans=0.125 +2024-08-03 22:02:11,055 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.37 vs. limit=10.0 +2024-08-03 22:02:18,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=244185.33333333334, ans=0.125 +2024-08-03 22:02:18,401 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.01 vs. limit=15.0 +2024-08-03 22:02:24,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=244222.0, ans=0.04949747468305833 +2024-08-03 22:02:32,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=244258.66666666666, ans=0.2 +2024-08-03 22:02:34,036 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.56 vs. limit=6.0 +2024-08-03 22:02:42,813 INFO [train.py:1114] (2/4) Epoch 19, batch 1100, loss[loss=0.1577, simple_loss=0.2451, pruned_loss=0.03511, over 13545.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2695, pruned_loss=0.04836, over 2618490.55 frames. ], batch size: 36, lr: 6.58e-03, grad_scale: 16.0 +2024-08-03 22:02:58,057 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.403e+01 1.088e+02 1.287e+02 1.607e+02 2.579e+02, threshold=2.574e+02, percent-clipped=1.0 +2024-08-03 22:03:12,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=244405.33333333334, ans=0.125 +2024-08-03 22:03:14,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=244405.33333333334, ans=0.2 +2024-08-03 22:03:23,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=244442.0, ans=0.125 +2024-08-03 22:03:29,661 INFO [train.py:1114] (2/4) Epoch 19, batch 1150, loss[loss=0.1996, simple_loss=0.2809, pruned_loss=0.05918, over 13561.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2698, pruned_loss=0.04864, over 2617608.33 frames. ], batch size: 36, lr: 6.57e-03, grad_scale: 16.0 +2024-08-03 22:03:33,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.78 vs. limit=15.0 +2024-08-03 22:03:48,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=244552.0, ans=0.125 +2024-08-03 22:04:07,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=244625.33333333334, ans=0.0 +2024-08-03 22:04:10,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=244625.33333333334, ans=0.2 +2024-08-03 22:04:12,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=244625.33333333334, ans=0.0 +2024-08-03 22:04:17,211 INFO [train.py:1114] (2/4) Epoch 19, batch 1200, loss[loss=0.1943, simple_loss=0.2886, pruned_loss=0.05002, over 13586.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2699, pruned_loss=0.04821, over 2616814.78 frames. ], batch size: 39, lr: 6.57e-03, grad_scale: 32.0 +2024-08-03 22:04:22,337 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.75 vs. limit=15.0 +2024-08-03 22:04:26,631 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:04:30,753 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.729e+01 1.075e+02 1.205e+02 1.408e+02 2.455e+02, threshold=2.410e+02, percent-clipped=0.0 +2024-08-03 22:04:33,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=244698.66666666666, ans=0.0 +2024-08-03 22:04:37,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=244735.33333333334, ans=0.125 +2024-08-03 22:04:52,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=244772.0, ans=0.2 +2024-08-03 22:04:53,769 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:04:54,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=244808.66666666666, ans=0.125 +2024-08-03 22:04:58,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=244808.66666666666, ans=0.035 +2024-08-03 22:05:04,356 INFO [train.py:1114] (2/4) Epoch 19, batch 1250, loss[loss=0.203, simple_loss=0.2866, pruned_loss=0.05968, over 13448.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2704, pruned_loss=0.04827, over 2628372.70 frames. ], batch size: 42, lr: 6.57e-03, grad_scale: 32.0 +2024-08-03 22:05:07,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=244845.33333333334, ans=0.0 +2024-08-03 22:05:09,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=244845.33333333334, ans=0.0 +2024-08-03 22:05:14,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244882.0, ans=0.1 +2024-08-03 22:05:32,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=244955.33333333334, ans=0.0 +2024-08-03 22:05:41,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=244992.0, ans=0.2 +2024-08-03 22:05:52,083 INFO [train.py:1114] (2/4) Epoch 19, batch 1300, loss[loss=0.1977, simple_loss=0.2866, pruned_loss=0.05443, over 13041.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2695, pruned_loss=0.04784, over 2631633.85 frames. ], batch size: 52, lr: 6.57e-03, grad_scale: 16.0 +2024-08-03 22:06:00,095 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.81 vs. limit=15.0 +2024-08-03 22:06:04,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=245065.33333333334, ans=0.125 +2024-08-03 22:06:07,829 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.978e+01 1.097e+02 1.270e+02 1.535e+02 2.662e+02, threshold=2.541e+02, percent-clipped=5.0 +2024-08-03 22:06:13,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=245102.0, ans=0.125 +2024-08-03 22:06:34,747 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.84 vs. limit=22.5 +2024-08-03 22:06:37,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=245175.33333333334, ans=0.0 +2024-08-03 22:06:40,457 INFO [train.py:1114] (2/4) Epoch 19, batch 1350, loss[loss=0.1627, simple_loss=0.2465, pruned_loss=0.03943, over 13551.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2693, pruned_loss=0.04801, over 2639530.54 frames. ], batch size: 37, lr: 6.56e-03, grad_scale: 16.0 +2024-08-03 22:06:54,277 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.72 vs. limit=15.0 +2024-08-03 22:06:55,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=245248.66666666666, ans=0.0 +2024-08-03 22:07:26,150 INFO [train.py:1114] (2/4) Epoch 19, batch 1400, loss[loss=0.155, simple_loss=0.2272, pruned_loss=0.04138, over 13251.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2692, pruned_loss=0.04796, over 2643277.35 frames. ], batch size: 31, lr: 6.56e-03, grad_scale: 8.0 +2024-08-03 22:07:29,767 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.01 vs. limit=15.0 +2024-08-03 22:07:42,464 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:07:44,089 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.615e+01 1.110e+02 1.328e+02 1.668e+02 3.835e+02, threshold=2.657e+02, percent-clipped=2.0 +2024-08-03 22:07:58,315 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.82 vs. limit=6.0 +2024-08-03 22:07:59,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=245505.33333333334, ans=0.125 +2024-08-03 22:08:07,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=245542.0, ans=0.05 +2024-08-03 22:08:07,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=245542.0, ans=0.0 +2024-08-03 22:08:13,723 INFO [train.py:1114] (2/4) Epoch 19, batch 1450, loss[loss=0.1748, simple_loss=0.2687, pruned_loss=0.04042, over 13413.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2691, pruned_loss=0.04788, over 2642115.90 frames. ], batch size: 43, lr: 6.56e-03, grad_scale: 8.0 +2024-08-03 22:08:14,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=245578.66666666666, ans=0.125 +2024-08-03 22:08:15,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245578.66666666666, ans=0.1 +2024-08-03 22:08:24,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=245615.33333333334, ans=0.0 +2024-08-03 22:08:41,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=245652.0, ans=0.125 +2024-08-03 22:08:58,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=245725.33333333334, ans=0.0 +2024-08-03 22:09:01,147 INFO [train.py:1114] (2/4) Epoch 19, batch 1500, loss[loss=0.1961, simple_loss=0.2828, pruned_loss=0.05467, over 13394.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2694, pruned_loss=0.04763, over 2641716.17 frames. ], batch size: 39, lr: 6.56e-03, grad_scale: 8.0 +2024-08-03 22:09:16,758 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.464e+01 1.116e+02 1.262e+02 1.580e+02 2.631e+02, threshold=2.524e+02, percent-clipped=0.0 +2024-08-03 22:09:24,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245835.33333333334, ans=0.1 +2024-08-03 22:09:30,291 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.05 vs. limit=15.0 +2024-08-03 22:09:30,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=245872.0, ans=0.125 +2024-08-03 22:09:31,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=245872.0, ans=0.125 +2024-08-03 22:09:31,341 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.12 vs. limit=22.5 +2024-08-03 22:09:39,353 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.09 vs. limit=15.0 +2024-08-03 22:09:42,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=9.69 vs. limit=15.0 +2024-08-03 22:09:48,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=245945.33333333334, ans=0.025 +2024-08-03 22:09:48,983 INFO [train.py:1114] (2/4) Epoch 19, batch 1550, loss[loss=0.1824, simple_loss=0.2778, pruned_loss=0.04349, over 13394.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2696, pruned_loss=0.04786, over 2631371.50 frames. ], batch size: 41, lr: 6.55e-03, grad_scale: 8.0 +2024-08-03 22:09:49,413 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.86 vs. limit=22.5 +2024-08-03 22:09:51,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=245945.33333333334, ans=0.025 +2024-08-03 22:09:51,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245945.33333333334, ans=0.1 +2024-08-03 22:10:08,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=246018.66666666666, ans=0.2 +2024-08-03 22:10:29,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246092.0, ans=0.1 +2024-08-03 22:10:31,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=246092.0, ans=0.05 +2024-08-03 22:10:32,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=246092.0, ans=0.125 +2024-08-03 22:10:36,200 INFO [train.py:1114] (2/4) Epoch 19, batch 1600, loss[loss=0.1888, simple_loss=0.2768, pruned_loss=0.05042, over 13581.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2697, pruned_loss=0.04826, over 2623919.81 frames. ], batch size: 39, lr: 6.55e-03, grad_scale: 16.0 +2024-08-03 22:10:38,433 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.50 vs. limit=15.0 +2024-08-03 22:10:39,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=246128.66666666666, ans=0.0 +2024-08-03 22:10:48,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.whiten.whitening_limit, batch_count=246165.33333333334, ans=12.0 +2024-08-03 22:10:49,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=246165.33333333334, ans=0.125 +2024-08-03 22:10:51,518 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.932e+01 1.109e+02 1.298e+02 1.770e+02 3.045e+02, threshold=2.595e+02, percent-clipped=6.0 +2024-08-03 22:11:21,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=246275.33333333334, ans=0.0 +2024-08-03 22:11:23,646 INFO [train.py:1114] (2/4) Epoch 19, batch 1650, loss[loss=0.1707, simple_loss=0.2668, pruned_loss=0.03726, over 13322.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2695, pruned_loss=0.0484, over 2621048.46 frames. ], batch size: 40, lr: 6.55e-03, grad_scale: 16.0 +2024-08-03 22:11:27,434 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:11:32,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=246348.66666666666, ans=0.2 +2024-08-03 22:11:32,123 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.08 vs. limit=15.0 +2024-08-03 22:11:38,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=246348.66666666666, ans=0.125 +2024-08-03 22:11:40,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=246385.33333333334, ans=0.2 +2024-08-03 22:11:46,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=246385.33333333334, ans=0.0 +2024-08-03 22:12:01,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=246458.66666666666, ans=0.125 +2024-08-03 22:12:05,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246458.66666666666, ans=0.1 +2024-08-03 22:12:11,718 INFO [train.py:1114] (2/4) Epoch 19, batch 1700, loss[loss=0.1487, simple_loss=0.2289, pruned_loss=0.03423, over 13266.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2693, pruned_loss=0.04808, over 2629833.61 frames. ], batch size: 31, lr: 6.55e-03, grad_scale: 16.0 +2024-08-03 22:12:21,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=246532.0, ans=0.125 +2024-08-03 22:12:23,838 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=2.78 vs. limit=12.0 +2024-08-03 22:12:27,025 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.554e+01 1.124e+02 1.376e+02 1.723e+02 2.933e+02, threshold=2.751e+02, percent-clipped=2.0 +2024-08-03 22:12:29,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=246568.66666666666, ans=0.2 +2024-08-03 22:12:34,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=246568.66666666666, ans=0.125 +2024-08-03 22:12:35,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=246568.66666666666, ans=0.125 +2024-08-03 22:12:42,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=246605.33333333334, ans=0.2 +2024-08-03 22:12:59,185 INFO [train.py:1114] (2/4) Epoch 19, batch 1750, loss[loss=0.1507, simple_loss=0.2322, pruned_loss=0.03454, over 13519.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2686, pruned_loss=0.04755, over 2633334.71 frames. ], batch size: 31, lr: 6.54e-03, grad_scale: 16.0 +2024-08-03 22:13:05,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=246678.66666666666, ans=0.125 +2024-08-03 22:13:05,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=246678.66666666666, ans=0.0 +2024-08-03 22:13:15,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=246715.33333333334, ans=0.0 +2024-08-03 22:13:18,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=246752.0, ans=0.05 +2024-08-03 22:13:18,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=246752.0, ans=0.125 +2024-08-03 22:13:23,319 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.03 vs. limit=6.0 +2024-08-03 22:13:36,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=246825.33333333334, ans=0.0 +2024-08-03 22:13:40,809 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.90 vs. limit=15.0 +2024-08-03 22:13:41,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=246825.33333333334, ans=0.125 +2024-08-03 22:13:43,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=246825.33333333334, ans=0.07 +2024-08-03 22:13:46,379 INFO [train.py:1114] (2/4) Epoch 19, batch 1800, loss[loss=0.2098, simple_loss=0.3009, pruned_loss=0.05939, over 13550.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2685, pruned_loss=0.04773, over 2634719.78 frames. ], batch size: 38, lr: 6.54e-03, grad_scale: 16.0 +2024-08-03 22:13:49,667 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.36 vs. limit=22.5 +2024-08-03 22:13:53,424 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.62 vs. limit=15.0 +2024-08-03 22:14:02,226 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.932e+01 1.153e+02 1.366e+02 1.717e+02 2.450e+02, threshold=2.732e+02, percent-clipped=0.0 +2024-08-03 22:14:08,308 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.83 vs. limit=15.0 +2024-08-03 22:14:13,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=246972.0, ans=0.0 +2024-08-03 22:14:20,992 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.11 vs. limit=22.5 +2024-08-03 22:14:22,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=247008.66666666666, ans=0.07 +2024-08-03 22:14:25,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=247008.66666666666, ans=0.125 +2024-08-03 22:14:32,248 INFO [train.py:1114] (2/4) Epoch 19, batch 1850, loss[loss=0.212, simple_loss=0.3032, pruned_loss=0.06039, over 13393.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2686, pruned_loss=0.04755, over 2637054.24 frames. ], batch size: 39, lr: 6.54e-03, grad_scale: 16.0 +2024-08-03 22:14:42,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=247045.33333333334, ans=0.125 +2024-08-03 22:14:48,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=247082.0, ans=0.07 +2024-08-03 22:14:49,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=247082.0, ans=0.125 +2024-08-03 22:15:04,097 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.82 vs. limit=22.5 +2024-08-03 22:15:11,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=247192.0, ans=0.125 +2024-08-03 22:15:18,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=247192.0, ans=0.2 +2024-08-03 22:15:19,801 INFO [train.py:1114] (2/4) Epoch 19, batch 1900, loss[loss=0.1916, simple_loss=0.2794, pruned_loss=0.05188, over 13341.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2698, pruned_loss=0.04796, over 2639400.57 frames. ], batch size: 40, lr: 6.54e-03, grad_scale: 16.0 +2024-08-03 22:15:31,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=247265.33333333334, ans=0.0 +2024-08-03 22:15:34,866 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.927e+01 1.140e+02 1.255e+02 1.731e+02 2.677e+02, threshold=2.509e+02, percent-clipped=0.0 +2024-08-03 22:15:35,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=247265.33333333334, ans=0.125 +2024-08-03 22:15:40,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=247302.0, ans=15.0 +2024-08-03 22:15:42,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247302.0, ans=0.1 +2024-08-03 22:15:54,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=247338.66666666666, ans=0.125 +2024-08-03 22:16:07,335 INFO [train.py:1114] (2/4) Epoch 19, batch 1950, loss[loss=0.1797, simple_loss=0.26, pruned_loss=0.04967, over 13577.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2707, pruned_loss=0.04822, over 2646072.45 frames. ], batch size: 36, lr: 6.53e-03, grad_scale: 16.0 +2024-08-03 22:16:07,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=247412.0, ans=0.125 +2024-08-03 22:16:13,401 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=9.88 vs. limit=15.0 +2024-08-03 22:16:17,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=247448.66666666666, ans=0.2 +2024-08-03 22:16:19,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=247448.66666666666, ans=0.125 +2024-08-03 22:16:36,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247522.0, ans=0.1 +2024-08-03 22:16:36,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=247522.0, ans=0.0 +2024-08-03 22:16:55,383 INFO [train.py:1114] (2/4) Epoch 19, batch 2000, loss[loss=0.1595, simple_loss=0.2402, pruned_loss=0.0394, over 13524.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2716, pruned_loss=0.04868, over 2635128.42 frames. ], batch size: 31, lr: 6.53e-03, grad_scale: 32.0 +2024-08-03 22:17:07,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=247632.0, ans=0.0 +2024-08-03 22:17:13,007 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.093e+02 1.300e+02 1.628e+02 2.543e+02, threshold=2.600e+02, percent-clipped=1.0 +2024-08-03 22:17:23,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247668.66666666666, ans=0.1 +2024-08-03 22:17:24,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=247705.33333333334, ans=0.2 +2024-08-03 22:17:29,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=247705.33333333334, ans=0.0 +2024-08-03 22:17:32,736 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.72 vs. limit=15.0 +2024-08-03 22:17:42,957 INFO [train.py:1114] (2/4) Epoch 19, batch 2050, loss[loss=0.1458, simple_loss=0.2277, pruned_loss=0.03194, over 13447.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2701, pruned_loss=0.04827, over 2632627.75 frames. ], batch size: 32, lr: 6.53e-03, grad_scale: 32.0 +2024-08-03 22:17:46,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.87 vs. limit=6.0 +2024-08-03 22:17:52,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247815.33333333334, ans=0.1 +2024-08-03 22:18:02,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=247852.0, ans=0.0 +2024-08-03 22:18:03,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=247852.0, ans=0.125 +2024-08-03 22:18:30,934 INFO [train.py:1114] (2/4) Epoch 19, batch 2100, loss[loss=0.1952, simple_loss=0.2826, pruned_loss=0.05387, over 13550.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2694, pruned_loss=0.04792, over 2638048.46 frames. ], batch size: 37, lr: 6.53e-03, grad_scale: 32.0 +2024-08-03 22:18:32,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=247962.0, ans=0.0 +2024-08-03 22:18:46,399 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.343e+01 1.123e+02 1.234e+02 1.440e+02 2.542e+02, threshold=2.468e+02, percent-clipped=0.0 +2024-08-03 22:18:47,136 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.56 vs. limit=12.0 +2024-08-03 22:18:49,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=248035.33333333334, ans=0.0 +2024-08-03 22:18:54,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=248035.33333333334, ans=0.0 +2024-08-03 22:18:55,523 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.69 vs. limit=22.5 +2024-08-03 22:19:03,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248072.0, ans=0.1 +2024-08-03 22:19:12,420 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:19:15,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=248108.66666666666, ans=0.125 +2024-08-03 22:19:16,683 INFO [train.py:1114] (2/4) Epoch 19, batch 2150, loss[loss=0.182, simple_loss=0.2647, pruned_loss=0.0497, over 13575.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2691, pruned_loss=0.04781, over 2646421.39 frames. ], batch size: 36, lr: 6.52e-03, grad_scale: 32.0 +2024-08-03 22:19:21,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248145.33333333334, ans=0.1 +2024-08-03 22:19:23,019 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.13 vs. limit=22.5 +2024-08-03 22:19:37,898 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.59 vs. limit=15.0 +2024-08-03 22:19:39,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=248218.66666666666, ans=0.125 +2024-08-03 22:19:41,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=248218.66666666666, ans=0.125 +2024-08-03 22:19:48,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=248255.33333333334, ans=0.025 +2024-08-03 22:20:02,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=248292.0, ans=0.125 +2024-08-03 22:20:06,563 INFO [train.py:1114] (2/4) Epoch 19, batch 2200, loss[loss=0.1704, simple_loss=0.2622, pruned_loss=0.03932, over 13401.00 frames. ], tot_loss[loss=0.182, simple_loss=0.269, pruned_loss=0.04749, over 2644441.63 frames. ], batch size: 39, lr: 6.52e-03, grad_scale: 16.0 +2024-08-03 22:20:16,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=248365.33333333334, ans=0.125 +2024-08-03 22:20:17,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=248365.33333333334, ans=0.0 +2024-08-03 22:20:22,938 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.876e+01 1.107e+02 1.433e+02 1.774e+02 2.441e+02, threshold=2.865e+02, percent-clipped=0.0 +2024-08-03 22:20:41,292 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.85 vs. limit=6.0 +2024-08-03 22:20:53,613 INFO [train.py:1114] (2/4) Epoch 19, batch 2250, loss[loss=0.1619, simple_loss=0.2572, pruned_loss=0.03328, over 13355.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2686, pruned_loss=0.04742, over 2641011.37 frames. ], batch size: 37, lr: 6.52e-03, grad_scale: 16.0 +2024-08-03 22:21:05,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248548.66666666666, ans=0.1 +2024-08-03 22:21:08,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=248548.66666666666, ans=0.125 +2024-08-03 22:21:10,400 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:21:28,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=248622.0, ans=10.0 +2024-08-03 22:21:35,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248658.66666666666, ans=0.1 +2024-08-03 22:21:39,615 INFO [train.py:1114] (2/4) Epoch 19, batch 2300, loss[loss=0.1492, simple_loss=0.2332, pruned_loss=0.03258, over 13573.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2674, pruned_loss=0.04709, over 2637411.51 frames. ], batch size: 33, lr: 6.52e-03, grad_scale: 16.0 +2024-08-03 22:21:41,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=248695.33333333334, ans=15.0 +2024-08-03 22:21:49,940 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.07 vs. limit=15.0 +2024-08-03 22:21:54,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=248732.0, ans=0.125 +2024-08-03 22:21:58,042 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.536e+01 1.062e+02 1.236e+02 1.586e+02 2.214e+02, threshold=2.472e+02, percent-clipped=0.0 +2024-08-03 22:21:58,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=248732.0, ans=0.2 +2024-08-03 22:22:04,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=248768.66666666666, ans=0.0 +2024-08-03 22:22:13,016 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=5.157e-03 +2024-08-03 22:22:27,299 INFO [train.py:1114] (2/4) Epoch 19, batch 2350, loss[loss=0.1832, simple_loss=0.2764, pruned_loss=0.04497, over 13549.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2679, pruned_loss=0.04698, over 2639983.61 frames. ], batch size: 38, lr: 6.52e-03, grad_scale: 16.0 +2024-08-03 22:22:28,827 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.47 vs. limit=10.0 +2024-08-03 22:22:40,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=248915.33333333334, ans=0.0 +2024-08-03 22:22:43,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=248915.33333333334, ans=0.125 +2024-08-03 22:22:43,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=248915.33333333334, ans=0.025 +2024-08-03 22:22:54,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=248988.66666666666, ans=0.0 +2024-08-03 22:22:58,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=248988.66666666666, ans=0.125 +2024-08-03 22:22:59,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=248988.66666666666, ans=0.025 +2024-08-03 22:23:13,910 INFO [train.py:1114] (2/4) Epoch 19, batch 2400, loss[loss=0.2009, simple_loss=0.2796, pruned_loss=0.06109, over 13531.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2687, pruned_loss=0.04738, over 2641417.82 frames. ], batch size: 35, lr: 6.51e-03, grad_scale: 32.0 +2024-08-03 22:23:20,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=249062.0, ans=0.0 +2024-08-03 22:23:27,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=249098.66666666666, ans=0.2 +2024-08-03 22:23:31,373 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.983e+01 1.101e+02 1.278e+02 1.688e+02 2.593e+02, threshold=2.556e+02, percent-clipped=1.0 +2024-08-03 22:23:53,886 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.11 vs. limit=15.0 +2024-08-03 22:23:57,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=249208.66666666666, ans=0.0 +2024-08-03 22:23:59,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=249208.66666666666, ans=0.125 +2024-08-03 22:24:01,922 INFO [train.py:1114] (2/4) Epoch 19, batch 2450, loss[loss=0.2055, simple_loss=0.2988, pruned_loss=0.05606, over 13354.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2696, pruned_loss=0.04772, over 2631498.82 frames. ], batch size: 37, lr: 6.51e-03, grad_scale: 16.0 +2024-08-03 22:24:30,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249355.33333333334, ans=0.1 +2024-08-03 22:24:39,412 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.71 vs. limit=15.0 +2024-08-03 22:24:40,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=249392.0, ans=0.125 +2024-08-03 22:24:49,891 INFO [train.py:1114] (2/4) Epoch 19, batch 2500, loss[loss=0.2062, simple_loss=0.299, pruned_loss=0.05673, over 13405.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2696, pruned_loss=0.04752, over 2635383.74 frames. ], batch size: 39, lr: 6.51e-03, grad_scale: 16.0 +2024-08-03 22:24:50,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=249428.66666666666, ans=0.05 +2024-08-03 22:24:55,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=249428.66666666666, ans=0.2 +2024-08-03 22:25:06,704 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.566e+01 1.106e+02 1.263e+02 1.596e+02 2.870e+02, threshold=2.527e+02, percent-clipped=4.0 +2024-08-03 22:25:13,007 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:25:34,296 INFO [train.py:1114] (2/4) Epoch 19, batch 2550, loss[loss=0.1755, simple_loss=0.2519, pruned_loss=0.04954, over 13542.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2698, pruned_loss=0.04776, over 2637166.66 frames. ], batch size: 31, lr: 6.51e-03, grad_scale: 16.0 +2024-08-03 22:25:37,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=249612.0, ans=0.125 +2024-08-03 22:25:38,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=249612.0, ans=0.025 +2024-08-03 22:25:44,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=249648.66666666666, ans=0.125 +2024-08-03 22:26:02,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=249722.0, ans=0.2 +2024-08-03 22:26:03,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=249722.0, ans=0.0 +2024-08-03 22:26:03,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=249722.0, ans=0.125 +2024-08-03 22:26:07,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=249722.0, ans=0.0 +2024-08-03 22:26:10,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=249758.66666666666, ans=0.07 +2024-08-03 22:26:13,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=249758.66666666666, ans=0.0 +2024-08-03 22:26:16,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=249758.66666666666, ans=0.0 +2024-08-03 22:26:17,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=249795.33333333334, ans=0.04949747468305833 +2024-08-03 22:26:18,048 INFO [train.py:1114] (2/4) Epoch 19, batch 2600, loss[loss=0.1732, simple_loss=0.2593, pruned_loss=0.04354, over 13558.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2694, pruned_loss=0.04773, over 2635691.77 frames. ], batch size: 36, lr: 6.50e-03, grad_scale: 16.0 +2024-08-03 22:26:25,420 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.73 vs. limit=12.0 +2024-08-03 22:26:25,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=249832.0, ans=0.0 +2024-08-03 22:26:28,179 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.82 vs. limit=6.0 +2024-08-03 22:26:36,437 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.660e+01 1.133e+02 1.412e+02 1.915e+02 3.004e+02, threshold=2.824e+02, percent-clipped=7.0 +2024-08-03 22:26:45,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249905.33333333334, ans=0.1 +2024-08-03 22:26:59,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=7.74 vs. limit=15.0 +2024-08-03 22:27:03,493 INFO [train.py:1114] (2/4) Epoch 19, batch 2650, loss[loss=0.2154, simple_loss=0.3061, pruned_loss=0.06233, over 13324.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2696, pruned_loss=0.04777, over 2639442.55 frames. ], batch size: 46, lr: 6.50e-03, grad_scale: 16.0 +2024-08-03 22:27:09,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249978.66666666666, ans=0.1 +2024-08-03 22:27:21,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=250052.0, ans=0.0 +2024-08-03 22:27:24,957 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.25 vs. limit=10.0 +2024-08-03 22:27:32,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250088.66666666666, ans=0.1 +2024-08-03 22:27:37,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.77 vs. limit=6.0 +2024-08-03 22:27:37,212 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.23 vs. limit=22.5 +2024-08-03 22:27:41,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=250125.33333333334, ans=0.125 +2024-08-03 22:27:47,044 INFO [train.py:1114] (2/4) Epoch 19, batch 2700, loss[loss=0.1773, simple_loss=0.2688, pruned_loss=0.04285, over 13548.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2704, pruned_loss=0.04772, over 2637877.09 frames. ], batch size: 40, lr: 6.50e-03, grad_scale: 16.0 +2024-08-03 22:27:48,324 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.98 vs. limit=12.0 +2024-08-03 22:28:03,576 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.168e+01 1.091e+02 1.247e+02 1.559e+02 2.482e+02, threshold=2.495e+02, percent-clipped=0.0 +2024-08-03 22:28:18,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=250272.0, ans=0.2 +2024-08-03 22:28:20,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=250272.0, ans=0.0 +2024-08-03 22:28:21,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.99 vs. limit=15.0 +2024-08-03 22:28:30,864 INFO [train.py:1114] (2/4) Epoch 19, batch 2750, loss[loss=0.1861, simple_loss=0.2672, pruned_loss=0.0525, over 13328.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2693, pruned_loss=0.04761, over 2635721.13 frames. ], batch size: 34, lr: 6.50e-03, grad_scale: 16.0 +2024-08-03 22:28:40,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=250382.0, ans=0.0 +2024-08-03 22:28:43,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=250382.0, ans=0.05 +2024-08-03 22:28:53,478 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:28:53,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=250418.66666666666, ans=0.2 +2024-08-03 22:29:05,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=250492.0, ans=0.125 +2024-08-03 22:29:14,513 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.67 vs. limit=15.0 +2024-08-03 22:29:15,857 INFO [train.py:1114] (2/4) Epoch 19, batch 2800, loss[loss=0.2549, simple_loss=0.3191, pruned_loss=0.09537, over 9048.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2705, pruned_loss=0.04837, over 2626639.59 frames. ], batch size: 96, lr: 6.49e-03, grad_scale: 32.0 +2024-08-03 22:29:38,862 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.968e+01 1.091e+02 1.270e+02 1.499e+02 3.648e+02, threshold=2.541e+02, percent-clipped=3.0 +2024-08-03 22:29:41,393 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.21 vs. limit=15.0 +2024-08-03 22:30:02,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=250675.33333333334, ans=0.025 +2024-08-03 22:30:07,681 INFO [train.py:1114] (2/4) Epoch 19, batch 2850, loss[loss=0.1798, simple_loss=0.2709, pruned_loss=0.04436, over 13365.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2707, pruned_loss=0.04856, over 2621054.65 frames. ], batch size: 35, lr: 6.49e-03, grad_scale: 32.0 +2024-08-03 22:30:15,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=250748.66666666666, ans=0.025 +2024-08-03 22:30:26,456 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.72 vs. limit=15.0 +2024-08-03 22:30:47,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=250858.66666666666, ans=0.125 +2024-08-03 22:30:54,420 INFO [train.py:1114] (2/4) Epoch 19, batch 2900, loss[loss=0.196, simple_loss=0.2856, pruned_loss=0.05314, over 13357.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2718, pruned_loss=0.04879, over 2631393.61 frames. ], batch size: 36, lr: 6.49e-03, grad_scale: 32.0 +2024-08-03 22:31:11,720 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.871e+01 1.142e+02 1.452e+02 2.110e+02 3.268e+02, threshold=2.903e+02, percent-clipped=11.0 +2024-08-03 22:31:12,151 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.73 vs. limit=15.0 +2024-08-03 22:31:19,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251005.33333333334, ans=0.1 +2024-08-03 22:31:21,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=251005.33333333334, ans=0.125 +2024-08-03 22:31:24,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=251005.33333333334, ans=0.2 +2024-08-03 22:31:36,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=251042.0, ans=0.04949747468305833 +2024-08-03 22:31:38,565 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.36 vs. limit=15.0 +2024-08-03 22:31:39,960 INFO [train.py:1114] (2/4) Epoch 19, batch 2950, loss[loss=0.1903, simple_loss=0.278, pruned_loss=0.05131, over 13326.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2699, pruned_loss=0.04812, over 2629411.48 frames. ], batch size: 34, lr: 6.49e-03, grad_scale: 16.0 +2024-08-03 22:31:46,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251078.66666666666, ans=0.1 +2024-08-03 22:31:54,504 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.53 vs. limit=15.0 +2024-08-03 22:31:55,235 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.65 vs. limit=15.0 +2024-08-03 22:31:57,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=251152.0, ans=0.0 +2024-08-03 22:32:07,255 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.58 vs. limit=15.0 +2024-08-03 22:32:07,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=251188.66666666666, ans=0.2 +2024-08-03 22:32:08,929 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.76 vs. limit=6.0 +2024-08-03 22:32:09,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.90 vs. limit=6.0 +2024-08-03 22:32:23,259 INFO [train.py:1114] (2/4) Epoch 19, batch 3000, loss[loss=0.1957, simple_loss=0.2782, pruned_loss=0.05663, over 13542.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2701, pruned_loss=0.04822, over 2628807.51 frames. ], batch size: 37, lr: 6.48e-03, grad_scale: 16.0 +2024-08-03 22:32:23,260 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 22:32:34,407 INFO [train.py:1146] (2/4) Epoch 19, validation: loss=0.169, simple_loss=0.2683, pruned_loss=0.03491, over 944034.00 frames. +2024-08-03 22:32:34,408 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 22:32:40,046 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.90 vs. limit=15.0 +2024-08-03 22:32:42,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=251298.66666666666, ans=0.125 +2024-08-03 22:32:51,310 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.09 vs. limit=22.5 +2024-08-03 22:32:51,805 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.029e+01 1.088e+02 1.228e+02 1.356e+02 2.065e+02, threshold=2.455e+02, percent-clipped=0.0 +2024-08-03 22:32:52,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=251335.33333333334, ans=0.125 +2024-08-03 22:32:57,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=251335.33333333334, ans=0.125 +2024-08-03 22:33:01,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=251372.0, ans=0.125 +2024-08-03 22:33:07,257 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.94 vs. limit=22.5 +2024-08-03 22:33:09,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=251408.66666666666, ans=0.125 +2024-08-03 22:33:17,984 INFO [train.py:1114] (2/4) Epoch 19, batch 3050, loss[loss=0.1834, simple_loss=0.2721, pruned_loss=0.0473, over 13539.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2704, pruned_loss=0.04822, over 2626189.89 frames. ], batch size: 35, lr: 6.48e-03, grad_scale: 16.0 +2024-08-03 22:33:19,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=251445.33333333334, ans=0.0 +2024-08-03 22:33:35,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=251518.66666666666, ans=0.0 +2024-08-03 22:33:57,153 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.09 vs. limit=22.5 +2024-08-03 22:34:01,090 INFO [train.py:1114] (2/4) Epoch 19, batch 3100, loss[loss=0.1841, simple_loss=0.2742, pruned_loss=0.04697, over 13316.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2709, pruned_loss=0.04853, over 2626570.78 frames. ], batch size: 46, lr: 6.48e-03, grad_scale: 16.0 +2024-08-03 22:34:04,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=251628.66666666666, ans=0.125 +2024-08-03 22:34:19,144 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.004e+01 1.074e+02 1.205e+02 1.545e+02 4.065e+02, threshold=2.411e+02, percent-clipped=2.0 +2024-08-03 22:34:20,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=251702.0, ans=0.125 +2024-08-03 22:34:42,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=251775.33333333334, ans=0.125 +2024-08-03 22:34:42,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=251775.33333333334, ans=0.0 +2024-08-03 22:34:46,842 INFO [train.py:1114] (2/4) Epoch 19, batch 3150, loss[loss=0.1955, simple_loss=0.2799, pruned_loss=0.0555, over 13093.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2705, pruned_loss=0.04822, over 2628233.86 frames. ], batch size: 48, lr: 6.48e-03, grad_scale: 16.0 +2024-08-03 22:34:49,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=251812.0, ans=0.125 +2024-08-03 22:35:01,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=251848.66666666666, ans=0.125 +2024-08-03 22:35:01,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=251848.66666666666, ans=0.125 +2024-08-03 22:35:06,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=251885.33333333334, ans=0.0 +2024-08-03 22:35:12,433 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.85 vs. limit=6.0 +2024-08-03 22:35:13,499 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.48 vs. limit=15.0 +2024-08-03 22:35:16,113 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.40 vs. limit=6.0 +2024-08-03 22:35:27,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251958.66666666666, ans=0.1 +2024-08-03 22:35:29,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=251995.33333333334, ans=0.5 +2024-08-03 22:35:30,190 INFO [train.py:1114] (2/4) Epoch 19, batch 3200, loss[loss=0.1747, simple_loss=0.2554, pruned_loss=0.04696, over 13546.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2698, pruned_loss=0.04783, over 2633984.00 frames. ], batch size: 37, lr: 6.48e-03, grad_scale: 32.0 +2024-08-03 22:35:38,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=252032.0, ans=0.125 +2024-08-03 22:35:40,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=252032.0, ans=0.125 +2024-08-03 22:35:42,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=252032.0, ans=0.015 +2024-08-03 22:35:44,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=252032.0, ans=0.0 +2024-08-03 22:35:47,280 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.593e+01 1.152e+02 1.475e+02 1.954e+02 2.995e+02, threshold=2.949e+02, percent-clipped=9.0 +2024-08-03 22:36:00,911 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.02 vs. limit=10.0 +2024-08-03 22:36:01,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=252105.33333333334, ans=0.1 +2024-08-03 22:36:02,353 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.74 vs. limit=6.0 +2024-08-03 22:36:10,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=252142.0, ans=0.125 +2024-08-03 22:36:13,784 INFO [train.py:1114] (2/4) Epoch 19, batch 3250, loss[loss=0.186, simple_loss=0.2777, pruned_loss=0.04716, over 13386.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2699, pruned_loss=0.04779, over 2638542.45 frames. ], batch size: 38, lr: 6.47e-03, grad_scale: 32.0 +2024-08-03 22:36:15,147 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.89 vs. limit=15.0 +2024-08-03 22:36:22,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=252215.33333333334, ans=0.0 +2024-08-03 22:36:28,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=252215.33333333334, ans=0.0 +2024-08-03 22:36:29,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=252215.33333333334, ans=0.125 +2024-08-03 22:36:33,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=252252.0, ans=0.125 +2024-08-03 22:36:50,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=252325.33333333334, ans=0.2 +2024-08-03 22:36:57,934 INFO [train.py:1114] (2/4) Epoch 19, batch 3300, loss[loss=0.2156, simple_loss=0.2961, pruned_loss=0.06756, over 13036.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2686, pruned_loss=0.04747, over 2640509.99 frames. ], batch size: 52, lr: 6.47e-03, grad_scale: 32.0 +2024-08-03 22:36:58,373 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=2.95 vs. limit=12.0 +2024-08-03 22:37:14,951 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.912e+01 1.101e+02 1.274e+02 1.526e+02 2.579e+02, threshold=2.548e+02, percent-clipped=0.0 +2024-08-03 22:37:25,817 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.08 vs. limit=22.5 +2024-08-03 22:37:26,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=252472.0, ans=0.125 +2024-08-03 22:37:40,617 INFO [train.py:1114] (2/4) Epoch 19, batch 3350, loss[loss=0.19, simple_loss=0.2824, pruned_loss=0.04877, over 13086.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.269, pruned_loss=0.0476, over 2631350.88 frames. ], batch size: 48, lr: 6.47e-03, grad_scale: 32.0 +2024-08-03 22:37:55,047 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.36 vs. limit=15.0 +2024-08-03 22:37:57,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=252618.66666666666, ans=0.0 +2024-08-03 22:38:07,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=252655.33333333334, ans=0.2 +2024-08-03 22:38:08,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=252655.33333333334, ans=0.1 +2024-08-03 22:38:13,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=252655.33333333334, ans=0.0 +2024-08-03 22:38:22,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=252692.0, ans=0.125 +2024-08-03 22:38:23,951 INFO [train.py:1114] (2/4) Epoch 19, batch 3400, loss[loss=0.1799, simple_loss=0.2593, pruned_loss=0.05022, over 13502.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2683, pruned_loss=0.0473, over 2626677.93 frames. ], batch size: 31, lr: 6.47e-03, grad_scale: 16.0 +2024-08-03 22:38:24,438 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.36 vs. limit=15.0 +2024-08-03 22:38:31,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=252765.33333333334, ans=0.0 +2024-08-03 22:38:31,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=252765.33333333334, ans=0.125 +2024-08-03 22:38:36,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=252765.33333333334, ans=0.125 +2024-08-03 22:38:38,144 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.25 vs. limit=15.0 +2024-08-03 22:38:39,085 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.76 vs. limit=15.0 +2024-08-03 22:38:41,984 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.435e+01 1.094e+02 1.256e+02 1.561e+02 2.442e+02, threshold=2.511e+02, percent-clipped=0.0 +2024-08-03 22:38:59,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=252875.33333333334, ans=0.125 +2024-08-03 22:39:03,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252875.33333333334, ans=0.1 +2024-08-03 22:39:07,296 INFO [train.py:1114] (2/4) Epoch 19, batch 3450, loss[loss=0.2122, simple_loss=0.297, pruned_loss=0.06371, over 12940.00 frames. ], tot_loss[loss=0.182, simple_loss=0.269, pruned_loss=0.04748, over 2629410.50 frames. ], batch size: 52, lr: 6.46e-03, grad_scale: 16.0 +2024-08-03 22:39:11,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=252912.0, ans=0.0 +2024-08-03 22:39:13,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=252912.0, ans=0.07 +2024-08-03 22:39:23,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=252985.33333333334, ans=0.09899494936611666 +2024-08-03 22:39:29,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=252985.33333333334, ans=0.0 +2024-08-03 22:39:33,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=253022.0, ans=0.125 +2024-08-03 22:39:34,457 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=15.0 +2024-08-03 22:39:37,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=253022.0, ans=0.025 +2024-08-03 22:39:40,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=253022.0, ans=0.025 +2024-08-03 22:39:47,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253058.66666666666, ans=0.1 +2024-08-03 22:39:48,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=253058.66666666666, ans=0.125 +2024-08-03 22:39:50,112 INFO [train.py:1114] (2/4) Epoch 19, batch 3500, loss[loss=0.172, simple_loss=0.2566, pruned_loss=0.04375, over 13528.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.268, pruned_loss=0.04732, over 2631638.42 frames. ], batch size: 34, lr: 6.46e-03, grad_scale: 16.0 +2024-08-03 22:39:50,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=253095.33333333334, ans=0.0 +2024-08-03 22:39:50,500 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.64 vs. limit=22.5 +2024-08-03 22:39:51,442 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.85 vs. limit=22.5 +2024-08-03 22:39:58,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=253132.0, ans=0.125 +2024-08-03 22:40:06,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=253168.66666666666, ans=0.0 +2024-08-03 22:40:07,938 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.416e+01 1.103e+02 1.254e+02 1.609e+02 3.004e+02, threshold=2.508e+02, percent-clipped=2.0 +2024-08-03 22:40:13,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253168.66666666666, ans=0.1 +2024-08-03 22:40:33,553 INFO [train.py:1114] (2/4) Epoch 19, batch 3550, loss[loss=0.1772, simple_loss=0.2735, pruned_loss=0.04041, over 12414.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2703, pruned_loss=0.04835, over 2629726.42 frames. ], batch size: 58, lr: 6.46e-03, grad_scale: 16.0 +2024-08-03 22:40:43,557 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.23 vs. limit=15.0 +2024-08-03 22:40:58,882 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.96 vs. limit=15.0 +2024-08-03 22:40:59,662 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.88 vs. limit=6.0 +2024-08-03 22:41:16,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=253425.33333333334, ans=0.125 +2024-08-03 22:41:17,602 INFO [train.py:1114] (2/4) Epoch 19, batch 3600, loss[loss=0.2005, simple_loss=0.2828, pruned_loss=0.05913, over 9209.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2742, pruned_loss=0.05157, over 2488261.25 frames. ], batch size: 96, lr: 6.46e-03, grad_scale: 32.0 +2024-08-03 22:41:20,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=253462.0, ans=0.2 +2024-08-03 22:41:23,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=253462.0, ans=0.125 +2024-08-03 22:41:35,765 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.007e+02 1.196e+02 1.277e+02 1.470e+02 2.167e+02, threshold=2.555e+02, percent-clipped=0.0 +2024-08-03 22:42:35,217 INFO [train.py:1114] (2/4) Epoch 20, batch 0, loss[loss=0.1881, simple_loss=0.2714, pruned_loss=0.05239, over 13337.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2714, pruned_loss=0.05239, over 13337.00 frames. ], batch size: 33, lr: 6.29e-03, grad_scale: 32.0 +2024-08-03 22:42:35,218 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 22:42:39,803 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.6010, 2.8749, 2.5874, 2.7527], device='cuda:2') +2024-08-03 22:42:45,196 INFO [train.py:1146] (2/4) Epoch 20, validation: loss=0.1683, simple_loss=0.2688, pruned_loss=0.0339, over 944034.00 frames. +2024-08-03 22:42:45,196 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 22:42:50,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253612.33333333334, ans=0.1 +2024-08-03 22:43:03,049 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.75 vs. limit=6.0 +2024-08-03 22:43:15,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=253722.33333333334, ans=0.125 +2024-08-03 22:43:19,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=253722.33333333334, ans=0.0 +2024-08-03 22:43:23,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=253759.0, ans=0.125 +2024-08-03 22:43:30,970 INFO [train.py:1114] (2/4) Epoch 20, batch 50, loss[loss=0.1652, simple_loss=0.2488, pruned_loss=0.04083, over 13429.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2706, pruned_loss=0.04797, over 578438.84 frames. ], batch size: 32, lr: 6.29e-03, grad_scale: 32.0 +2024-08-03 22:43:40,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=253832.33333333334, ans=0.04949747468305833 +2024-08-03 22:44:02,673 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.939e+01 1.083e+02 1.261e+02 1.490e+02 2.691e+02, threshold=2.522e+02, percent-clipped=1.0 +2024-08-03 22:44:19,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=253979.0, ans=0.0 +2024-08-03 22:44:19,844 INFO [train.py:1114] (2/4) Epoch 20, batch 100, loss[loss=0.1757, simple_loss=0.2561, pruned_loss=0.04766, over 13532.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2737, pruned_loss=0.04893, over 1025888.73 frames. ], batch size: 35, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:44:28,745 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.24 vs. limit=15.0 +2024-08-03 22:44:33,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=254015.66666666666, ans=0.2 +2024-08-03 22:44:41,210 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=3.82 vs. limit=12.0 +2024-08-03 22:44:43,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=254052.33333333334, ans=0.2 +2024-08-03 22:45:07,147 INFO [train.py:1114] (2/4) Epoch 20, batch 150, loss[loss=0.1646, simple_loss=0.2444, pruned_loss=0.04242, over 13437.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2705, pruned_loss=0.04786, over 1386861.79 frames. ], batch size: 32, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:45:08,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254162.33333333334, ans=0.1 +2024-08-03 22:45:31,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=254235.66666666666, ans=0.125 +2024-08-03 22:45:32,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254235.66666666666, ans=0.1 +2024-08-03 22:45:35,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=254272.33333333334, ans=10.0 +2024-08-03 22:45:35,892 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.685e+01 1.074e+02 1.304e+02 1.730e+02 2.668e+02, threshold=2.608e+02, percent-clipped=1.0 +2024-08-03 22:45:37,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=254272.33333333334, ans=0.125 +2024-08-03 22:45:43,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=254272.33333333334, ans=10.0 +2024-08-03 22:45:44,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=254309.0, ans=0.125 +2024-08-03 22:45:54,470 INFO [train.py:1114] (2/4) Epoch 20, batch 200, loss[loss=0.2021, simple_loss=0.2896, pruned_loss=0.05727, over 12585.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2696, pruned_loss=0.04738, over 1665486.23 frames. ], batch size: 58, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:46:10,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=254382.33333333334, ans=0.0 +2024-08-03 22:46:17,318 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:46:21,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=254455.66666666666, ans=0.125 +2024-08-03 22:46:21,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=254455.66666666666, ans=0.125 +2024-08-03 22:46:40,095 INFO [train.py:1114] (2/4) Epoch 20, batch 250, loss[loss=0.2037, simple_loss=0.2953, pruned_loss=0.05609, over 13295.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2695, pruned_loss=0.04733, over 1884813.10 frames. ], batch size: 46, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:46:56,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254565.66666666666, ans=0.1 +2024-08-03 22:47:05,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=254602.33333333334, ans=0.1 +2024-08-03 22:47:10,944 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.021e+01 1.100e+02 1.239e+02 1.581e+02 3.543e+02, threshold=2.478e+02, percent-clipped=3.0 +2024-08-03 22:47:12,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254639.0, ans=0.1 +2024-08-03 22:47:27,277 INFO [train.py:1114] (2/4) Epoch 20, batch 300, loss[loss=0.2049, simple_loss=0.2966, pruned_loss=0.05654, over 13451.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2685, pruned_loss=0.0467, over 2051459.26 frames. ], batch size: 42, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:47:40,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=254749.0, ans=0.1 +2024-08-03 22:47:49,529 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.25 vs. limit=12.0 +2024-08-03 22:48:02,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=254822.33333333334, ans=0.0 +2024-08-03 22:48:12,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=254859.0, ans=0.0 +2024-08-03 22:48:15,257 INFO [train.py:1114] (2/4) Epoch 20, batch 350, loss[loss=0.1611, simple_loss=0.2456, pruned_loss=0.03827, over 13581.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2702, pruned_loss=0.04755, over 2181900.94 frames. ], batch size: 33, lr: 6.27e-03, grad_scale: 16.0 +2024-08-03 22:48:15,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=254895.66666666666, ans=0.0 +2024-08-03 22:48:22,824 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:48:32,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=254932.33333333334, ans=0.0 +2024-08-03 22:48:33,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=254932.33333333334, ans=0.09899494936611666 +2024-08-03 22:48:39,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=254969.0, ans=0.125 +2024-08-03 22:48:46,588 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.302e+01 1.074e+02 1.204e+02 1.489e+02 2.516e+02, threshold=2.409e+02, percent-clipped=1.0 +2024-08-03 22:48:56,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255042.33333333334, ans=0.1 +2024-08-03 22:49:02,806 INFO [train.py:1114] (2/4) Epoch 20, batch 400, loss[loss=0.208, simple_loss=0.2958, pruned_loss=0.06009, over 13353.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.27, pruned_loss=0.04741, over 2285636.17 frames. ], batch size: 37, lr: 6.27e-03, grad_scale: 32.0 +2024-08-03 22:49:23,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=255152.33333333334, ans=0.125 +2024-08-03 22:49:25,023 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.34 vs. limit=6.0 +2024-08-03 22:49:35,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=255189.0, ans=0.125 +2024-08-03 22:49:39,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=255189.0, ans=0.0 +2024-08-03 22:49:47,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255225.66666666666, ans=0.1 +2024-08-03 22:49:51,089 INFO [train.py:1114] (2/4) Epoch 20, batch 450, loss[loss=0.1765, simple_loss=0.2691, pruned_loss=0.04192, over 13561.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.27, pruned_loss=0.04733, over 2359620.20 frames. ], batch size: 38, lr: 6.27e-03, grad_scale: 32.0 +2024-08-03 22:49:57,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=255262.33333333334, ans=0.125 +2024-08-03 22:50:20,453 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.506e+01 1.112e+02 1.300e+02 1.604e+02 2.595e+02, threshold=2.600e+02, percent-clipped=3.0 +2024-08-03 22:50:35,542 INFO [train.py:1114] (2/4) Epoch 20, batch 500, loss[loss=0.206, simple_loss=0.2941, pruned_loss=0.05892, over 13422.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2699, pruned_loss=0.04731, over 2425439.54 frames. ], batch size: 43, lr: 6.27e-03, grad_scale: 16.0 +2024-08-03 22:50:39,795 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.04 vs. limit=15.0 +2024-08-03 22:50:46,756 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:50:47,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255482.33333333334, ans=0.1 +2024-08-03 22:50:54,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255482.33333333334, ans=0.1 +2024-08-03 22:50:57,241 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.12 vs. limit=15.0 +2024-08-03 22:50:59,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=255519.0, ans=0.125 +2024-08-03 22:51:25,362 INFO [train.py:1114] (2/4) Epoch 20, batch 550, loss[loss=0.1947, simple_loss=0.2824, pruned_loss=0.05352, over 13075.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2696, pruned_loss=0.0475, over 2467738.64 frames. ], batch size: 48, lr: 6.26e-03, grad_scale: 16.0 +2024-08-03 22:51:26,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=255629.0, ans=0.05 +2024-08-03 22:51:43,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255702.33333333334, ans=0.1 +2024-08-03 22:51:46,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=255702.33333333334, ans=0.0 +2024-08-03 22:51:55,569 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.709e+01 1.108e+02 1.273e+02 1.483e+02 2.115e+02, threshold=2.547e+02, percent-clipped=0.0 +2024-08-03 22:51:58,827 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.23 vs. limit=15.0 +2024-08-03 22:51:58,966 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.19 vs. limit=15.0 +2024-08-03 22:52:13,742 INFO [train.py:1114] (2/4) Epoch 20, batch 600, loss[loss=0.2133, simple_loss=0.2976, pruned_loss=0.06451, over 13375.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2696, pruned_loss=0.04768, over 2507054.10 frames. ], batch size: 46, lr: 6.26e-03, grad_scale: 16.0 +2024-08-03 22:52:13,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=255812.33333333334, ans=0.0 +2024-08-03 22:52:29,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=255849.0, ans=0.025 +2024-08-03 22:52:33,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255885.66666666666, ans=0.1 +2024-08-03 22:53:01,426 INFO [train.py:1114] (2/4) Epoch 20, batch 650, loss[loss=0.1711, simple_loss=0.2582, pruned_loss=0.04205, over 13546.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2683, pruned_loss=0.04688, over 2542600.35 frames. ], batch size: 37, lr: 6.26e-03, grad_scale: 8.0 +2024-08-03 22:53:17,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=256032.33333333334, ans=0.035 +2024-08-03 22:53:32,260 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.291e+01 1.089e+02 1.229e+02 1.482e+02 2.680e+02, threshold=2.459e+02, percent-clipped=1.0 +2024-08-03 22:53:43,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=256142.33333333334, ans=0.07 +2024-08-03 22:53:47,113 INFO [train.py:1114] (2/4) Epoch 20, batch 700, loss[loss=0.1716, simple_loss=0.2575, pruned_loss=0.04287, over 13527.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2683, pruned_loss=0.04676, over 2564485.73 frames. ], batch size: 35, lr: 6.26e-03, grad_scale: 8.0 +2024-08-03 22:53:50,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=256179.0, ans=0.0 +2024-08-03 22:53:50,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=256179.0, ans=0.125 +2024-08-03 22:54:03,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=256215.66666666666, ans=0.125 +2024-08-03 22:54:04,766 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:54:05,082 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.81 vs. limit=15.0 +2024-08-03 22:54:05,282 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=19.50 vs. limit=22.5 +2024-08-03 22:54:24,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=256325.66666666666, ans=0.0 +2024-08-03 22:54:26,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256325.66666666666, ans=0.1 +2024-08-03 22:54:27,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=256325.66666666666, ans=0.125 +2024-08-03 22:54:34,717 INFO [train.py:1114] (2/4) Epoch 20, batch 750, loss[loss=0.1655, simple_loss=0.2518, pruned_loss=0.03955, over 13353.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2676, pruned_loss=0.04668, over 2581087.52 frames. ], batch size: 37, lr: 6.26e-03, grad_scale: 8.0 +2024-08-03 22:54:36,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=256362.33333333334, ans=0.07 +2024-08-03 22:54:54,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=256435.66666666666, ans=0.0 +2024-08-03 22:55:05,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=256472.33333333334, ans=0.0 +2024-08-03 22:55:08,788 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.791e+01 1.111e+02 1.273e+02 1.584e+02 2.450e+02, threshold=2.545e+02, percent-clipped=0.0 +2024-08-03 22:55:15,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=256509.0, ans=0.125 +2024-08-03 22:55:22,855 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.03 vs. limit=15.0 +2024-08-03 22:55:23,256 INFO [train.py:1114] (2/4) Epoch 20, batch 800, loss[loss=0.1703, simple_loss=0.2571, pruned_loss=0.04178, over 13326.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2677, pruned_loss=0.04676, over 2594992.26 frames. ], batch size: 33, lr: 6.25e-03, grad_scale: 16.0 +2024-08-03 22:55:25,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=256545.66666666666, ans=0.2 +2024-08-03 22:55:28,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=256545.66666666666, ans=0.2 +2024-08-03 22:55:36,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=256582.33333333334, ans=0.0 +2024-08-03 22:55:44,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=256619.0, ans=0.125 +2024-08-03 22:55:48,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=256619.0, ans=0.0 +2024-08-03 22:55:49,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=256619.0, ans=0.0 +2024-08-03 22:55:59,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=256655.66666666666, ans=0.2 +2024-08-03 22:56:00,303 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.74 vs. limit=15.0 +2024-08-03 22:56:05,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=256692.33333333334, ans=0.2 +2024-08-03 22:56:10,725 INFO [train.py:1114] (2/4) Epoch 20, batch 850, loss[loss=0.206, simple_loss=0.2976, pruned_loss=0.05722, over 13326.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2679, pruned_loss=0.04674, over 2608041.84 frames. ], batch size: 40, lr: 6.25e-03, grad_scale: 16.0 +2024-08-03 22:56:10,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=256729.0, ans=0.025 +2024-08-03 22:56:24,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=256765.66666666666, ans=0.1 +2024-08-03 22:56:37,180 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.19 vs. limit=15.0 +2024-08-03 22:56:44,114 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.111e+01 1.084e+02 1.248e+02 1.708e+02 3.125e+02, threshold=2.496e+02, percent-clipped=3.0 +2024-08-03 22:56:48,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=256839.0, ans=0.025 +2024-08-03 22:56:51,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=256875.66666666666, ans=0.0 +2024-08-03 22:56:52,845 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.21 vs. limit=15.0 +2024-08-03 22:56:58,775 INFO [train.py:1114] (2/4) Epoch 20, batch 900, loss[loss=0.1491, simple_loss=0.2396, pruned_loss=0.02932, over 13347.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2682, pruned_loss=0.04716, over 2611639.35 frames. ], batch size: 33, lr: 6.25e-03, grad_scale: 16.0 +2024-08-03 22:57:22,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=256985.66666666666, ans=0.125 +2024-08-03 22:57:27,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=257022.33333333334, ans=0.125 +2024-08-03 22:57:28,098 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.59 vs. limit=22.5 +2024-08-03 22:57:34,864 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:57:39,249 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:57:43,695 INFO [train.py:1114] (2/4) Epoch 20, batch 950, loss[loss=0.1613, simple_loss=0.2488, pruned_loss=0.03687, over 13538.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2685, pruned_loss=0.04716, over 2612712.78 frames. ], batch size: 34, lr: 6.25e-03, grad_scale: 16.0 +2024-08-03 22:57:46,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=257095.66666666666, ans=0.125 +2024-08-03 22:57:47,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257095.66666666666, ans=0.1 +2024-08-03 22:57:51,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=257095.66666666666, ans=0.2 +2024-08-03 22:57:55,165 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:58:03,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=257169.0, ans=0.0 +2024-08-03 22:58:03,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=257169.0, ans=0.1 +2024-08-03 22:58:04,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=257169.0, ans=0.125 +2024-08-03 22:58:08,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=257169.0, ans=0.2 +2024-08-03 22:58:16,416 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.792e+01 1.096e+02 1.434e+02 1.875e+02 2.963e+02, threshold=2.868e+02, percent-clipped=4.0 +2024-08-03 22:58:22,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=257242.33333333334, ans=0.125 +2024-08-03 22:58:32,892 INFO [train.py:1114] (2/4) Epoch 20, batch 1000, loss[loss=0.2031, simple_loss=0.2789, pruned_loss=0.06367, over 13357.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2696, pruned_loss=0.04745, over 2611776.74 frames. ], batch size: 35, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 22:58:52,894 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.33 vs. limit=15.0 +2024-08-03 22:59:00,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=257389.0, ans=0.125 +2024-08-03 22:59:16,083 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.64 vs. limit=6.0 +2024-08-03 22:59:16,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=257425.66666666666, ans=0.0 +2024-08-03 22:59:21,334 INFO [train.py:1114] (2/4) Epoch 20, batch 1050, loss[loss=0.1916, simple_loss=0.2799, pruned_loss=0.05169, over 13572.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.269, pruned_loss=0.04759, over 2616401.16 frames. ], batch size: 39, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 22:59:39,360 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.00 vs. limit=15.0 +2024-08-03 22:59:45,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=257535.66666666666, ans=0.0 +2024-08-03 22:59:52,247 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.716e+01 1.110e+02 1.251e+02 1.540e+02 2.508e+02, threshold=2.503e+02, percent-clipped=0.0 +2024-08-03 22:59:55,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=257572.33333333334, ans=0.0 +2024-08-03 23:00:00,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=257609.0, ans=0.125 +2024-08-03 23:00:03,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=257609.0, ans=0.125 +2024-08-03 23:00:03,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=257609.0, ans=0.2 +2024-08-03 23:00:08,584 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.08 vs. limit=15.0 +2024-08-03 23:00:08,983 INFO [train.py:1114] (2/4) Epoch 20, batch 1100, loss[loss=0.1758, simple_loss=0.2639, pruned_loss=0.04381, over 13562.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2679, pruned_loss=0.04686, over 2619761.97 frames. ], batch size: 36, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 23:00:39,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.00 vs. limit=10.0 +2024-08-03 23:00:54,180 INFO [train.py:1114] (2/4) Epoch 20, batch 1150, loss[loss=0.1952, simple_loss=0.2749, pruned_loss=0.05774, over 13577.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2678, pruned_loss=0.04715, over 2619060.48 frames. ], batch size: 36, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 23:01:16,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=257902.33333333334, ans=0.2 +2024-08-03 23:01:19,232 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.12 vs. limit=15.0 +2024-08-03 23:01:22,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=257939.0, ans=0.125 +2024-08-03 23:01:25,487 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.84 vs. limit=6.0 +2024-08-03 23:01:26,023 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.874e+01 1.110e+02 1.277e+02 1.674e+02 2.760e+02, threshold=2.554e+02, percent-clipped=1.0 +2024-08-03 23:01:29,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=257939.0, ans=0.0 +2024-08-03 23:01:39,684 INFO [train.py:1114] (2/4) Epoch 20, batch 1200, loss[loss=0.1944, simple_loss=0.2866, pruned_loss=0.05112, over 13587.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.269, pruned_loss=0.04771, over 2616650.72 frames. ], batch size: 39, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 23:01:45,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=258012.33333333334, ans=0.125 +2024-08-03 23:01:46,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=258012.33333333334, ans=0.2 +2024-08-03 23:01:47,360 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.20 vs. limit=15.0 +2024-08-03 23:01:54,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=258049.0, ans=0.0 +2024-08-03 23:02:28,511 INFO [train.py:1114] (2/4) Epoch 20, batch 1250, loss[loss=0.2228, simple_loss=0.2998, pruned_loss=0.07288, over 13446.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2688, pruned_loss=0.04736, over 2628206.89 frames. ], batch size: 42, lr: 6.23e-03, grad_scale: 16.0 +2024-08-03 23:02:28,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258195.66666666666, ans=0.1 +2024-08-03 23:02:33,348 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.68 vs. limit=12.0 +2024-08-03 23:02:34,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=258195.66666666666, ans=0.125 +2024-08-03 23:03:00,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=258305.66666666666, ans=0.125 +2024-08-03 23:03:00,700 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.63 vs. limit=15.0 +2024-08-03 23:03:00,846 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.35 vs. limit=10.0 +2024-08-03 23:03:01,938 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.994e+01 1.078e+02 1.315e+02 1.640e+02 2.788e+02, threshold=2.629e+02, percent-clipped=1.0 +2024-08-03 23:03:05,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=258342.33333333334, ans=0.125 +2024-08-03 23:03:15,618 INFO [train.py:1114] (2/4) Epoch 20, batch 1300, loss[loss=0.1834, simple_loss=0.275, pruned_loss=0.04593, over 12935.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2682, pruned_loss=0.04722, over 2630551.52 frames. ], batch size: 52, lr: 6.23e-03, grad_scale: 16.0 +2024-08-03 23:03:16,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=258379.0, ans=0.125 +2024-08-03 23:03:20,572 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.90 vs. limit=15.0 +2024-08-03 23:03:24,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=258415.66666666666, ans=0.125 +2024-08-03 23:03:27,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.91 vs. limit=15.0 +2024-08-03 23:03:33,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=258452.33333333334, ans=0.05 +2024-08-03 23:03:51,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=258489.0, ans=0.125 +2024-08-03 23:03:53,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=258525.66666666666, ans=0.125 +2024-08-03 23:04:02,910 INFO [train.py:1114] (2/4) Epoch 20, batch 1350, loss[loss=0.1931, simple_loss=0.2691, pruned_loss=0.0585, over 13564.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2673, pruned_loss=0.04691, over 2638440.15 frames. ], batch size: 37, lr: 6.23e-03, grad_scale: 16.0 +2024-08-03 23:04:04,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258562.33333333334, ans=0.1 +2024-08-03 23:04:15,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=258599.0, ans=0.2 +2024-08-03 23:04:15,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=258599.0, ans=0.125 +2024-08-03 23:04:28,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=258635.66666666666, ans=0.125 +2024-08-03 23:04:34,294 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.766e+01 1.164e+02 1.354e+02 1.727e+02 2.558e+02, threshold=2.707e+02, percent-clipped=0.0 +2024-08-03 23:04:37,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=258672.33333333334, ans=0.0 +2024-08-03 23:04:37,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=258672.33333333334, ans=0.2 +2024-08-03 23:04:41,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=258709.0, ans=0.125 +2024-08-03 23:04:47,931 INFO [train.py:1114] (2/4) Epoch 20, batch 1400, loss[loss=0.1664, simple_loss=0.2409, pruned_loss=0.04596, over 13248.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2671, pruned_loss=0.04667, over 2642069.73 frames. ], batch size: 31, lr: 6.23e-03, grad_scale: 16.0 +2024-08-03 23:04:56,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=258782.33333333334, ans=0.0 +2024-08-03 23:05:04,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=258782.33333333334, ans=0.0 +2024-08-03 23:05:05,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=258819.0, ans=0.125 +2024-08-03 23:05:07,604 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:05:08,734 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.97 vs. limit=15.0 +2024-08-03 23:05:20,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=258855.66666666666, ans=0.05 +2024-08-03 23:05:28,569 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.81 vs. limit=10.0 +2024-08-03 23:05:35,406 INFO [train.py:1114] (2/4) Epoch 20, batch 1450, loss[loss=0.1932, simple_loss=0.2847, pruned_loss=0.05092, over 13425.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2677, pruned_loss=0.0468, over 2640648.43 frames. ], batch size: 43, lr: 6.22e-03, grad_scale: 16.0 +2024-08-03 23:05:36,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=258929.0, ans=0.035 +2024-08-03 23:05:41,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=258929.0, ans=0.125 +2024-08-03 23:05:48,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=258965.66666666666, ans=0.125 +2024-08-03 23:05:53,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=258965.66666666666, ans=0.2 +2024-08-03 23:06:06,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=259039.0, ans=0.2 +2024-08-03 23:06:08,776 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.191e+01 1.099e+02 1.241e+02 1.472e+02 2.601e+02, threshold=2.481e+02, percent-clipped=0.0 +2024-08-03 23:06:10,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=259039.0, ans=0.0 +2024-08-03 23:06:10,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=259039.0, ans=0.0 +2024-08-03 23:06:22,418 INFO [train.py:1114] (2/4) Epoch 20, batch 1500, loss[loss=0.1844, simple_loss=0.2834, pruned_loss=0.04267, over 13403.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2685, pruned_loss=0.04698, over 2640962.70 frames. ], batch size: 39, lr: 6.22e-03, grad_scale: 16.0 +2024-08-03 23:06:22,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259112.33333333334, ans=0.1 +2024-08-03 23:06:25,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=259112.33333333334, ans=0.2 +2024-08-03 23:06:36,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259149.0, ans=0.1 +2024-08-03 23:06:41,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=259149.0, ans=0.2 +2024-08-03 23:06:51,576 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.81 vs. limit=10.0 +2024-08-03 23:06:52,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=259222.33333333334, ans=0.125 +2024-08-03 23:06:57,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=259222.33333333334, ans=0.125 +2024-08-03 23:07:05,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=259259.0, ans=0.2 +2024-08-03 23:07:06,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259259.0, ans=0.1 +2024-08-03 23:07:12,330 INFO [train.py:1114] (2/4) Epoch 20, batch 1550, loss[loss=0.1896, simple_loss=0.2806, pruned_loss=0.04927, over 13386.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2683, pruned_loss=0.04692, over 2630800.96 frames. ], batch size: 41, lr: 6.22e-03, grad_scale: 16.0 +2024-08-03 23:07:15,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=259295.66666666666, ans=0.125 +2024-08-03 23:07:24,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=259332.33333333334, ans=0.025 +2024-08-03 23:07:39,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=259369.0, ans=0.5 +2024-08-03 23:07:39,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259405.66666666666, ans=0.1 +2024-08-03 23:07:43,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=259405.66666666666, ans=10.0 +2024-08-03 23:07:45,376 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.913e+01 1.100e+02 1.241e+02 1.619e+02 2.779e+02, threshold=2.482e+02, percent-clipped=4.0 +2024-08-03 23:07:47,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=259405.66666666666, ans=0.0 +2024-08-03 23:07:48,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=259405.66666666666, ans=0.1 +2024-08-03 23:07:50,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=259442.33333333334, ans=0.125 +2024-08-03 23:07:59,014 INFO [train.py:1114] (2/4) Epoch 20, batch 1600, loss[loss=0.1719, simple_loss=0.2649, pruned_loss=0.03943, over 13572.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.268, pruned_loss=0.04683, over 2624036.62 frames. ], batch size: 39, lr: 6.22e-03, grad_scale: 32.0 +2024-08-03 23:08:12,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=259515.66666666666, ans=0.0 +2024-08-03 23:08:14,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=259515.66666666666, ans=0.125 +2024-08-03 23:08:22,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259552.33333333334, ans=0.1 +2024-08-03 23:08:34,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=259589.0, ans=0.04949747468305833 +2024-08-03 23:08:34,202 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.15 vs. limit=15.0 +2024-08-03 23:08:41,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259625.66666666666, ans=0.1 +2024-08-03 23:08:45,012 INFO [train.py:1114] (2/4) Epoch 20, batch 1650, loss[loss=0.1831, simple_loss=0.2797, pruned_loss=0.04324, over 13320.00 frames. ], tot_loss[loss=0.181, simple_loss=0.268, pruned_loss=0.047, over 2620665.61 frames. ], batch size: 40, lr: 6.22e-03, grad_scale: 32.0 +2024-08-03 23:08:46,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=259662.33333333334, ans=0.125 +2024-08-03 23:09:11,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=259735.66666666666, ans=0.0 +2024-08-03 23:09:16,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=259772.33333333334, ans=0.0 +2024-08-03 23:09:18,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=259772.33333333334, ans=0.2 +2024-08-03 23:09:20,927 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.088e+02 1.223e+02 1.648e+02 3.559e+02, threshold=2.446e+02, percent-clipped=8.0 +2024-08-03 23:09:23,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=259772.33333333334, ans=0.2 +2024-08-03 23:09:24,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=259772.33333333334, ans=0.0 +2024-08-03 23:09:34,467 INFO [train.py:1114] (2/4) Epoch 20, batch 1700, loss[loss=0.1543, simple_loss=0.2312, pruned_loss=0.0387, over 13266.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2677, pruned_loss=0.04662, over 2629557.73 frames. ], batch size: 31, lr: 6.21e-03, grad_scale: 32.0 +2024-08-03 23:09:34,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=259845.66666666666, ans=0.125 +2024-08-03 23:09:41,269 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.70 vs. limit=15.0 +2024-08-03 23:09:48,694 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.69 vs. limit=15.0 +2024-08-03 23:09:49,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=259882.33333333334, ans=0.0 +2024-08-03 23:09:54,262 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.65 vs. limit=12.0 +2024-08-03 23:10:17,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=259992.33333333334, ans=0.07 +2024-08-03 23:10:17,471 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.49 vs. limit=15.0 +2024-08-03 23:10:17,591 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.64 vs. limit=10.0 +2024-08-03 23:10:21,422 INFO [train.py:1114] (2/4) Epoch 20, batch 1750, loss[loss=0.1709, simple_loss=0.2577, pruned_loss=0.04205, over 13566.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2676, pruned_loss=0.04665, over 2633339.92 frames. ], batch size: 31, lr: 6.21e-03, grad_scale: 32.0 +2024-08-03 23:10:36,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=260065.66666666666, ans=0.1 +2024-08-03 23:10:37,284 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.78 vs. limit=15.0 +2024-08-03 23:10:51,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=260139.0, ans=0.1 +2024-08-03 23:10:54,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=260139.0, ans=0.0 +2024-08-03 23:10:54,928 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.991e+01 1.099e+02 1.220e+02 1.451e+02 2.480e+02, threshold=2.439e+02, percent-clipped=1.0 +2024-08-03 23:11:08,880 INFO [train.py:1114] (2/4) Epoch 20, batch 1800, loss[loss=0.1977, simple_loss=0.2852, pruned_loss=0.05507, over 13543.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2676, pruned_loss=0.04676, over 2634995.03 frames. ], batch size: 38, lr: 6.21e-03, grad_scale: 32.0 +2024-08-03 23:11:09,296 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.01 vs. limit=15.0 +2024-08-03 23:11:28,508 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.44 vs. limit=15.0 +2024-08-03 23:11:28,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=260285.66666666666, ans=0.125 +2024-08-03 23:11:30,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=260285.66666666666, ans=0.0 +2024-08-03 23:11:53,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=260395.66666666666, ans=0.025 +2024-08-03 23:11:54,206 INFO [train.py:1114] (2/4) Epoch 20, batch 1850, loss[loss=0.2011, simple_loss=0.29, pruned_loss=0.0561, over 13407.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2672, pruned_loss=0.04663, over 2637037.83 frames. ], batch size: 39, lr: 6.21e-03, grad_scale: 32.0 +2024-08-03 23:11:56,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=260395.66666666666, ans=0.125 +2024-08-03 23:11:56,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260395.66666666666, ans=0.1 +2024-08-03 23:12:00,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=260395.66666666666, ans=0.125 +2024-08-03 23:12:14,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=260469.0, ans=0.0 +2024-08-03 23:12:17,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=260469.0, ans=0.0 +2024-08-03 23:12:19,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=260469.0, ans=0.125 +2024-08-03 23:12:20,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=260469.0, ans=0.0 +2024-08-03 23:12:26,243 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.233e+01 1.108e+02 1.354e+02 1.892e+02 2.843e+02, threshold=2.709e+02, percent-clipped=7.0 +2024-08-03 23:12:44,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=260505.66666666666, ans=0.125 +2024-08-03 23:12:56,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=260542.33333333334, ans=0.125 +2024-08-03 23:12:58,317 INFO [train.py:1114] (2/4) Epoch 20, batch 1900, loss[loss=0.1944, simple_loss=0.2817, pruned_loss=0.05353, over 13306.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.268, pruned_loss=0.04693, over 2639044.03 frames. ], batch size: 40, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:13:01,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=260579.0, ans=0.125 +2024-08-03 23:13:03,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=260579.0, ans=0.125 +2024-08-03 23:13:07,764 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.36 vs. limit=6.0 +2024-08-03 23:13:23,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260652.33333333334, ans=0.1 +2024-08-03 23:13:28,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=260689.0, ans=0.125 +2024-08-03 23:13:37,126 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.97 vs. limit=6.0 +2024-08-03 23:13:45,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=260762.33333333334, ans=0.125 +2024-08-03 23:13:45,869 INFO [train.py:1114] (2/4) Epoch 20, batch 1950, loss[loss=0.1771, simple_loss=0.26, pruned_loss=0.04711, over 13559.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2696, pruned_loss=0.04756, over 2645577.16 frames. ], batch size: 36, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:13:45,992 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:13:58,945 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.88 vs. limit=22.5 +2024-08-03 23:14:05,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=260835.66666666666, ans=0.125 +2024-08-03 23:14:06,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=260835.66666666666, ans=0.0 +2024-08-03 23:14:10,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=260835.66666666666, ans=0.125 +2024-08-03 23:14:15,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=260872.33333333334, ans=0.95 +2024-08-03 23:14:19,448 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.342e+01 1.097e+02 1.231e+02 1.483e+02 2.195e+02, threshold=2.462e+02, percent-clipped=0.0 +2024-08-03 23:14:25,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260909.0, ans=0.1 +2024-08-03 23:14:35,084 INFO [train.py:1114] (2/4) Epoch 20, batch 2000, loss[loss=0.1671, simple_loss=0.2443, pruned_loss=0.04497, over 13547.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2696, pruned_loss=0.04751, over 2635672.95 frames. ], batch size: 31, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:14:41,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=260945.66666666666, ans=0.0 +2024-08-03 23:14:46,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=260982.33333333334, ans=0.0 +2024-08-03 23:14:52,548 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:14:58,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=261019.0, ans=0.125 +2024-08-03 23:15:00,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=261019.0, ans=0.0 +2024-08-03 23:15:03,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=261055.66666666666, ans=0.0 +2024-08-03 23:15:20,640 INFO [train.py:1114] (2/4) Epoch 20, batch 2050, loss[loss=0.1541, simple_loss=0.244, pruned_loss=0.03206, over 13415.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2691, pruned_loss=0.04759, over 2633348.80 frames. ], batch size: 32, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:15:21,942 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.74 vs. limit=15.0 +2024-08-03 23:15:30,272 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.28 vs. limit=15.0 +2024-08-03 23:15:42,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.99 vs. limit=15.0 +2024-08-03 23:15:44,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=261202.33333333334, ans=0.125 +2024-08-03 23:15:52,100 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.005e+01 1.074e+02 1.228e+02 1.423e+02 2.984e+02, threshold=2.455e+02, percent-clipped=1.0 +2024-08-03 23:15:54,442 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.12 vs. limit=22.5 +2024-08-03 23:16:05,626 INFO [train.py:1114] (2/4) Epoch 20, batch 2100, loss[loss=0.1746, simple_loss=0.2612, pruned_loss=0.04404, over 13538.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2684, pruned_loss=0.04729, over 2639073.25 frames. ], batch size: 37, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:16:07,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=261312.33333333334, ans=0.04949747468305833 +2024-08-03 23:16:12,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=261312.33333333334, ans=0.125 +2024-08-03 23:16:22,411 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.63 vs. limit=22.5 +2024-08-03 23:16:31,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=261385.66666666666, ans=0.125 +2024-08-03 23:16:31,496 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.17 vs. limit=12.0 +2024-08-03 23:16:37,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=261422.33333333334, ans=0.2 +2024-08-03 23:16:54,282 INFO [train.py:1114] (2/4) Epoch 20, batch 2150, loss[loss=0.1851, simple_loss=0.2734, pruned_loss=0.04845, over 13544.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2677, pruned_loss=0.04691, over 2648222.37 frames. ], batch size: 36, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:16:59,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=261495.66666666666, ans=0.125 +2024-08-03 23:17:06,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=261532.33333333334, ans=0.2 +2024-08-03 23:17:08,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=261532.33333333334, ans=0.0 +2024-08-03 23:17:24,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=261605.66666666666, ans=0.04949747468305833 +2024-08-03 23:17:26,353 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.602e+01 1.138e+02 1.478e+02 2.029e+02 3.755e+02, threshold=2.955e+02, percent-clipped=14.0 +2024-08-03 23:17:34,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.71 vs. limit=22.5 +2024-08-03 23:17:34,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=261642.33333333334, ans=0.2 +2024-08-03 23:17:38,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=261642.33333333334, ans=10.0 +2024-08-03 23:17:38,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=261642.33333333334, ans=0.025 +2024-08-03 23:17:41,767 INFO [train.py:1114] (2/4) Epoch 20, batch 2200, loss[loss=0.174, simple_loss=0.2701, pruned_loss=0.03894, over 13400.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2677, pruned_loss=0.0468, over 2646065.94 frames. ], batch size: 39, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:17:43,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=261679.0, ans=0.0 +2024-08-03 23:18:14,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=261789.0, ans=0.0 +2024-08-03 23:18:16,739 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.11 vs. limit=6.0 +2024-08-03 23:18:19,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=261789.0, ans=0.0 +2024-08-03 23:18:31,226 INFO [train.py:1114] (2/4) Epoch 20, batch 2250, loss[loss=0.1683, simple_loss=0.2585, pruned_loss=0.0391, over 13351.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2676, pruned_loss=0.04663, over 2643135.99 frames. ], batch size: 37, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:18:38,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=261862.33333333334, ans=0.125 +2024-08-03 23:18:50,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=261935.66666666666, ans=0.2 +2024-08-03 23:18:50,569 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.49 vs. limit=15.0 +2024-08-03 23:18:52,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=261935.66666666666, ans=0.125 +2024-08-03 23:19:02,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=261972.33333333334, ans=0.025 +2024-08-03 23:19:02,739 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.914e+01 1.098e+02 1.238e+02 1.485e+02 2.172e+02, threshold=2.476e+02, percent-clipped=0.0 +2024-08-03 23:19:07,631 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:19:12,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=262009.0, ans=0.09899494936611666 +2024-08-03 23:19:13,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=262009.0, ans=0.2 +2024-08-03 23:19:16,426 INFO [train.py:1114] (2/4) Epoch 20, batch 2300, loss[loss=0.1685, simple_loss=0.2558, pruned_loss=0.04061, over 13598.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2662, pruned_loss=0.0463, over 2639300.37 frames. ], batch size: 33, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:19:22,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=262045.66666666666, ans=0.125 +2024-08-03 23:19:34,171 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.32 vs. limit=15.0 +2024-08-03 23:19:37,115 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.23 vs. limit=15.0 +2024-08-03 23:19:56,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=262192.3333333333, ans=0.2 +2024-08-03 23:20:01,871 INFO [train.py:1114] (2/4) Epoch 20, batch 2350, loss[loss=0.1973, simple_loss=0.2841, pruned_loss=0.05522, over 13541.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2665, pruned_loss=0.0463, over 2641766.51 frames. ], batch size: 38, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:20:03,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=262229.0, ans=0.125 +2024-08-03 23:20:07,758 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.15 vs. limit=22.5 +2024-08-03 23:20:26,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262302.3333333333, ans=0.1 +2024-08-03 23:20:38,437 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.532e+01 1.125e+02 1.356e+02 1.575e+02 2.756e+02, threshold=2.712e+02, percent-clipped=1.0 +2024-08-03 23:20:48,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=262375.6666666667, ans=0.5 +2024-08-03 23:20:52,238 INFO [train.py:1114] (2/4) Epoch 20, batch 2400, loss[loss=0.1767, simple_loss=0.2625, pruned_loss=0.04545, over 13531.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2675, pruned_loss=0.04683, over 2643076.43 frames. ], batch size: 35, lr: 6.18e-03, grad_scale: 32.0 +2024-08-03 23:20:56,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262412.3333333333, ans=0.1 +2024-08-03 23:20:56,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=262412.3333333333, ans=0.0 +2024-08-03 23:21:05,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=262449.0, ans=0.0 +2024-08-03 23:21:13,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=262485.6666666667, ans=0.125 +2024-08-03 23:21:22,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=262522.3333333333, ans=0.125 +2024-08-03 23:21:26,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=262522.3333333333, ans=0.025 +2024-08-03 23:21:27,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262522.3333333333, ans=0.1 +2024-08-03 23:21:41,784 INFO [train.py:1114] (2/4) Epoch 20, batch 2450, loss[loss=0.1913, simple_loss=0.2779, pruned_loss=0.05234, over 13358.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2685, pruned_loss=0.04724, over 2632827.60 frames. ], batch size: 37, lr: 6.18e-03, grad_scale: 32.0 +2024-08-03 23:21:42,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=262595.6666666667, ans=0.125 +2024-08-03 23:21:49,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262595.6666666667, ans=0.1 +2024-08-03 23:21:49,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=262632.3333333333, ans=0.2 +2024-08-03 23:22:12,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=262705.6666666667, ans=0.125 +2024-08-03 23:22:13,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=262705.6666666667, ans=0.125 +2024-08-03 23:22:14,229 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.032e+01 1.119e+02 1.273e+02 1.496e+02 2.494e+02, threshold=2.546e+02, percent-clipped=0.0 +2024-08-03 23:22:27,158 INFO [train.py:1114] (2/4) Epoch 20, batch 2500, loss[loss=0.182, simple_loss=0.2707, pruned_loss=0.04666, over 13405.00 frames. ], tot_loss[loss=0.181, simple_loss=0.268, pruned_loss=0.04697, over 2636886.67 frames. ], batch size: 39, lr: 6.18e-03, grad_scale: 16.0 +2024-08-03 23:22:32,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=262779.0, ans=0.125 +2024-08-03 23:22:34,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=262779.0, ans=0.125 +2024-08-03 23:22:36,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=262815.6666666667, ans=0.125 +2024-08-03 23:22:45,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=262852.3333333333, ans=0.125 +2024-08-03 23:22:50,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=262852.3333333333, ans=0.0 +2024-08-03 23:23:11,080 INFO [train.py:1114] (2/4) Epoch 20, batch 2550, loss[loss=0.1669, simple_loss=0.2421, pruned_loss=0.04586, over 13524.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2675, pruned_loss=0.0467, over 2638246.33 frames. ], batch size: 31, lr: 6.18e-03, grad_scale: 16.0 +2024-08-03 23:23:22,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262999.0, ans=0.1 +2024-08-03 23:23:27,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=263035.6666666667, ans=0.0 +2024-08-03 23:23:42,183 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.295e+01 1.081e+02 1.432e+02 1.962e+02 3.343e+02, threshold=2.864e+02, percent-clipped=8.0 +2024-08-03 23:23:44,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=263072.3333333333, ans=0.05 +2024-08-03 23:23:54,451 INFO [train.py:1114] (2/4) Epoch 20, batch 2600, loss[loss=0.1658, simple_loss=0.2528, pruned_loss=0.0394, over 13561.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2678, pruned_loss=0.04673, over 2636486.59 frames. ], batch size: 36, lr: 6.17e-03, grad_scale: 16.0 +2024-08-03 23:23:58,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=263145.6666666667, ans=0.125 +2024-08-03 23:24:20,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=263255.6666666667, ans=0.125 +2024-08-03 23:24:23,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=263255.6666666667, ans=0.125 +2024-08-03 23:24:23,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=263255.6666666667, ans=0.0 +2024-08-03 23:24:32,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=263292.3333333333, ans=0.125 +2024-08-03 23:24:38,448 INFO [train.py:1114] (2/4) Epoch 20, batch 2650, loss[loss=0.1985, simple_loss=0.2875, pruned_loss=0.05474, over 13282.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2679, pruned_loss=0.04682, over 2639115.92 frames. ], batch size: 46, lr: 6.17e-03, grad_scale: 16.0 +2024-08-03 23:25:05,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=263439.0, ans=0.2 +2024-08-03 23:25:09,825 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.851e+01 1.115e+02 1.313e+02 1.651e+02 2.845e+02, threshold=2.627e+02, percent-clipped=0.0 +2024-08-03 23:25:13,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=263475.6666666667, ans=0.0 +2024-08-03 23:25:21,921 INFO [train.py:1114] (2/4) Epoch 20, batch 2700, loss[loss=0.1911, simple_loss=0.2798, pruned_loss=0.05122, over 13558.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2685, pruned_loss=0.04683, over 2636566.69 frames. ], batch size: 40, lr: 6.17e-03, grad_scale: 16.0 +2024-08-03 23:25:32,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=263549.0, ans=0.2 +2024-08-03 23:25:37,982 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.05 vs. limit=12.0 +2024-08-03 23:25:41,183 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.76 vs. limit=15.0 +2024-08-03 23:26:05,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=263659.0, ans=0.025 +2024-08-03 23:26:08,057 INFO [train.py:1114] (2/4) Epoch 20, batch 2750, loss[loss=0.1748, simple_loss=0.2575, pruned_loss=0.04605, over 13320.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2677, pruned_loss=0.04688, over 2633964.10 frames. ], batch size: 34, lr: 6.17e-03, grad_scale: 16.0 +2024-08-03 23:26:26,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=263769.0, ans=0.05 +2024-08-03 23:26:39,457 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.617e+01 1.130e+02 1.320e+02 1.634e+02 2.919e+02, threshold=2.640e+02, percent-clipped=4.0 +2024-08-03 23:26:44,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=263842.3333333333, ans=0.125 +2024-08-03 23:26:46,277 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.06 vs. limit=22.5 +2024-08-03 23:26:51,629 INFO [train.py:1114] (2/4) Epoch 20, batch 2800, loss[loss=0.2461, simple_loss=0.3121, pruned_loss=0.0901, over 8816.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2681, pruned_loss=0.04725, over 2625792.01 frames. ], batch size: 98, lr: 6.17e-03, grad_scale: 32.0 +2024-08-03 23:27:18,448 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.76 vs. limit=15.0 +2024-08-03 23:27:25,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=263989.0, ans=0.2 +2024-08-03 23:27:31,059 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.97 vs. limit=15.0 +2024-08-03 23:27:37,562 INFO [train.py:1114] (2/4) Epoch 20, batch 2850, loss[loss=0.156, simple_loss=0.2423, pruned_loss=0.03489, over 13359.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2681, pruned_loss=0.04717, over 2620157.76 frames. ], batch size: 35, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:27:48,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=264099.0, ans=0.0 +2024-08-03 23:28:02,233 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.96 vs. limit=15.0 +2024-08-03 23:28:08,784 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.256e+01 1.084e+02 1.260e+02 1.608e+02 3.133e+02, threshold=2.519e+02, percent-clipped=4.0 +2024-08-03 23:28:09,348 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.59 vs. limit=10.0 +2024-08-03 23:28:17,287 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.74 vs. limit=15.0 +2024-08-03 23:28:22,047 INFO [train.py:1114] (2/4) Epoch 20, batch 2900, loss[loss=0.1675, simple_loss=0.2533, pruned_loss=0.04084, over 13365.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2693, pruned_loss=0.04726, over 2631120.51 frames. ], batch size: 36, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:28:44,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=264319.0, ans=0.125 +2024-08-03 23:28:47,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=264355.6666666667, ans=0.0 +2024-08-03 23:29:00,762 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.51 vs. limit=22.5 +2024-08-03 23:29:05,317 INFO [train.py:1114] (2/4) Epoch 20, batch 2950, loss[loss=0.1826, simple_loss=0.2624, pruned_loss=0.0514, over 13323.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2674, pruned_loss=0.04659, over 2629215.42 frames. ], batch size: 34, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:29:17,520 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.65 vs. limit=12.0 +2024-08-03 23:29:30,765 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.05 vs. limit=15.0 +2024-08-03 23:29:33,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=264539.0, ans=0.125 +2024-08-03 23:29:33,977 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.92 vs. limit=15.0 +2024-08-03 23:29:37,836 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.731e+01 1.151e+02 1.380e+02 1.780e+02 2.510e+02, threshold=2.761e+02, percent-clipped=0.0 +2024-08-03 23:29:40,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=264575.6666666667, ans=0.05 +2024-08-03 23:29:48,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=264575.6666666667, ans=0.125 +2024-08-03 23:29:49,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=264612.3333333333, ans=0.5 +2024-08-03 23:29:49,937 INFO [train.py:1114] (2/4) Epoch 20, batch 3000, loss[loss=0.1516, simple_loss=0.2492, pruned_loss=0.02703, over 13556.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2677, pruned_loss=0.04706, over 2629489.68 frames. ], batch size: 37, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:29:49,938 INFO [train.py:1137] (2/4) Computing validation loss +2024-08-03 23:29:58,511 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.3978, 4.0298, 3.7665, 3.5063], device='cuda:2') +2024-08-03 23:29:59,831 INFO [train.py:1146] (2/4) Epoch 20, validation: loss=0.1683, simple_loss=0.267, pruned_loss=0.03482, over 944034.00 frames. +2024-08-03 23:29:59,832 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 10000MB +2024-08-03 23:30:05,572 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.01 vs. limit=22.5 +2024-08-03 23:30:06,521 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.83 vs. limit=10.0 +2024-08-03 23:30:07,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=264612.3333333333, ans=0.125 +2024-08-03 23:30:11,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=264649.0, ans=0.0 +2024-08-03 23:30:14,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=264649.0, ans=0.09899494936611666 +2024-08-03 23:30:15,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=264649.0, ans=0.125 +2024-08-03 23:30:20,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=264685.6666666667, ans=0.125 +2024-08-03 23:30:26,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff2.min_abs, batch_count=264722.3333333333, ans=0.1 +2024-08-03 23:30:34,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=264759.0, ans=0.0 +2024-08-03 23:30:43,188 INFO [train.py:1114] (2/4) Epoch 20, batch 3050, loss[loss=0.1719, simple_loss=0.2571, pruned_loss=0.04339, over 13533.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2686, pruned_loss=0.04764, over 2625843.11 frames. ], batch size: 35, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:30:49,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=264795.6666666667, ans=0.0 +2024-08-03 23:31:08,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=264869.0, ans=0.125 +2024-08-03 23:31:22,601 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.840e+01 1.051e+02 1.185e+02 1.395e+02 2.152e+02, threshold=2.371e+02, percent-clipped=0.0 +2024-08-03 23:31:25,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=264942.3333333333, ans=0.125 +2024-08-03 23:31:27,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=264942.3333333333, ans=0.0 +2024-08-03 23:31:33,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=264942.3333333333, ans=0.2 +2024-08-03 23:31:34,623 INFO [train.py:1114] (2/4) Epoch 20, batch 3100, loss[loss=0.2013, simple_loss=0.2938, pruned_loss=0.05445, over 13351.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2684, pruned_loss=0.04743, over 2625608.31 frames. ], batch size: 46, lr: 6.15e-03, grad_scale: 32.0 +2024-08-03 23:31:35,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=264979.0, ans=0.07 +2024-08-03 23:31:38,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=264979.0, ans=0.2 +2024-08-03 23:31:58,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=265052.3333333333, ans=0.0 +2024-08-03 23:32:11,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=265125.6666666667, ans=0.0 +2024-08-03 23:32:13,713 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=9.56 vs. limit=15.0 +2024-08-03 23:32:17,266 INFO [train.py:1114] (2/4) Epoch 20, batch 3150, loss[loss=0.2367, simple_loss=0.3149, pruned_loss=0.07925, over 12985.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2692, pruned_loss=0.04766, over 2626705.70 frames. ], batch size: 48, lr: 6.15e-03, grad_scale: 32.0 +2024-08-03 23:32:40,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=265235.6666666667, ans=0.125 +2024-08-03 23:32:48,869 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.568e+01 1.088e+02 1.255e+02 1.655e+02 2.829e+02, threshold=2.511e+02, percent-clipped=5.0 +2024-08-03 23:32:50,311 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.21 vs. limit=15.0 +2024-08-03 23:33:01,024 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.64 vs. limit=15.0 +2024-08-03 23:33:01,473 INFO [train.py:1114] (2/4) Epoch 20, batch 3200, loss[loss=0.1725, simple_loss=0.2717, pruned_loss=0.03663, over 13536.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2683, pruned_loss=0.04711, over 2632804.81 frames. ], batch size: 37, lr: 6.15e-03, grad_scale: 32.0 +2024-08-03 23:33:07,091 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.02 vs. limit=22.5 +2024-08-03 23:33:15,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265382.3333333333, ans=0.1 +2024-08-03 23:33:24,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=265419.0, ans=0.95 +2024-08-03 23:33:37,613 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.72 vs. limit=22.5 +2024-08-03 23:33:41,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=265492.3333333333, ans=0.125 +2024-08-03 23:33:43,977 INFO [train.py:1114] (2/4) Epoch 20, batch 3250, loss[loss=0.1749, simple_loss=0.2651, pruned_loss=0.04234, over 13392.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2685, pruned_loss=0.04668, over 2637548.37 frames. ], batch size: 38, lr: 6.15e-03, grad_scale: 16.0 +2024-08-03 23:33:58,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265565.6666666667, ans=0.1 +2024-08-03 23:34:04,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=265602.3333333333, ans=0.125 +2024-08-03 23:34:07,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265602.3333333333, ans=0.1 +2024-08-03 23:34:16,935 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.340e+01 1.119e+02 1.289e+02 1.600e+02 2.225e+02, threshold=2.578e+02, percent-clipped=0.0 +2024-08-03 23:34:27,185 INFO [train.py:1114] (2/4) Epoch 20, batch 3300, loss[loss=0.1897, simple_loss=0.2757, pruned_loss=0.05184, over 12885.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2673, pruned_loss=0.04622, over 2639700.62 frames. ], batch size: 52, lr: 6.15e-03, grad_scale: 16.0 +2024-08-03 23:34:27,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=265712.3333333333, ans=0.0 +2024-08-03 23:34:39,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=265749.0, ans=0.125 +2024-08-03 23:34:55,982 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.89 vs. limit=10.0 +2024-08-03 23:34:56,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=265822.3333333333, ans=0.2 +2024-08-03 23:35:09,986 INFO [train.py:1114] (2/4) Epoch 20, batch 3350, loss[loss=0.2142, simple_loss=0.2926, pruned_loss=0.06791, over 13317.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2683, pruned_loss=0.04663, over 2629845.28 frames. ], batch size: 49, lr: 6.14e-03, grad_scale: 16.0 +2024-08-03 23:35:11,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=265895.6666666667, ans=0.2 +2024-08-03 23:35:25,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=265932.3333333333, ans=0.125 +2024-08-03 23:35:28,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=265969.0, ans=0.125 +2024-08-03 23:35:42,730 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.982e+01 1.153e+02 1.327e+02 1.503e+02 2.183e+02, threshold=2.655e+02, percent-clipped=0.0 +2024-08-03 23:35:43,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=266005.6666666667, ans=0.0 +2024-08-03 23:35:53,146 INFO [train.py:1114] (2/4) Epoch 20, batch 3400, loss[loss=0.1503, simple_loss=0.2314, pruned_loss=0.03459, over 13540.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2681, pruned_loss=0.0467, over 2625490.20 frames. ], batch size: 31, lr: 6.14e-03, grad_scale: 16.0 +2024-08-03 23:36:01,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=266115.6666666667, ans=0.2 +2024-08-03 23:36:07,764 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.91 vs. limit=6.0 +2024-08-03 23:36:08,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=266115.6666666667, ans=0.2 +2024-08-03 23:36:10,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=266152.3333333333, ans=0.125 +2024-08-03 23:36:35,523 INFO [train.py:1114] (2/4) Epoch 20, batch 3450, loss[loss=0.21, simple_loss=0.3016, pruned_loss=0.05923, over 12991.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2684, pruned_loss=0.04698, over 2628616.74 frames. ], batch size: 52, lr: 6.14e-03, grad_scale: 16.0 +2024-08-03 23:36:44,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=266299.0, ans=0.07 +2024-08-03 23:36:47,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=266299.0, ans=0.125 +2024-08-03 23:37:08,059 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.153e+01 1.075e+02 1.228e+02 1.591e+02 2.797e+02, threshold=2.457e+02, percent-clipped=1.0 +2024-08-03 23:37:18,297 INFO [train.py:1114] (2/4) Epoch 20, batch 3500, loss[loss=0.1613, simple_loss=0.2541, pruned_loss=0.03423, over 13547.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2677, pruned_loss=0.04709, over 2629580.71 frames. ], batch size: 34, lr: 6.14e-03, grad_scale: 16.0 +2024-08-03 23:37:23,752 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.41 vs. limit=15.0 +2024-08-03 23:37:30,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=266482.3333333333, ans=0.1 +2024-08-03 23:37:32,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=266482.3333333333, ans=0.125 +2024-08-03 23:37:33,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=266482.3333333333, ans=0.2 +2024-08-03 23:37:38,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=266519.0, ans=0.125 +2024-08-03 23:37:42,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=266519.0, ans=0.125 +2024-08-03 23:37:43,717 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.34 vs. limit=15.0 +2024-08-03 23:38:02,721 INFO [train.py:1114] (2/4) Epoch 20, batch 3550, loss[loss=0.1745, simple_loss=0.2672, pruned_loss=0.04095, over 12525.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2699, pruned_loss=0.04805, over 2628452.27 frames. ], batch size: 58, lr: 6.13e-03, grad_scale: 16.0 +2024-08-03 23:38:10,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=266665.6666666667, ans=0.125 +2024-08-03 23:38:12,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=266665.6666666667, ans=0.025 +2024-08-03 23:38:36,934 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.117e+01 1.134e+02 1.257e+02 1.418e+02 2.840e+02, threshold=2.514e+02, percent-clipped=1.0 +2024-08-03 23:38:43,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=266775.6666666667, ans=0.1 +2024-08-03 23:38:45,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=266775.6666666667, ans=0.125 +2024-08-03 23:38:47,706 INFO [train.py:1114] (2/4) Epoch 20, batch 3600, loss[loss=0.2322, simple_loss=0.3097, pruned_loss=0.07735, over 9537.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2751, pruned_loss=0.05224, over 2487744.29 frames. ], batch size: 97, lr: 6.13e-03, grad_scale: 32.0 +2024-08-03 23:39:13,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=266922.3333333333, ans=0.95 +2024-08-03 23:39:22,606 INFO [train.py:1387] (2/4) Done! diff --git a/zipformer/pretrained/non_ctc/causal/exp/log/log-train-2024-08-02-23-23-28-3 b/zipformer/pretrained/non_ctc/causal/exp/log/log-train-2024-08-02-23-23-28-3 new file mode 100644 index 0000000000000000000000000000000000000000..9804b5aaf49f2acb84b89701dfc3fc23ad8ee105 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/log/log-train-2024-08-02-23-23-28-3 @@ -0,0 +1,10167 @@ +2024-08-02 23:23:28,722 INFO [train.py:1182] (3/4) Training started +2024-08-02 23:23:28,723 INFO [train.py:1192] (3/4) Device: cuda:3 +2024-08-02 23:23:28,725 INFO [train.py:1210] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': 'master', 'icefall-git-sha1': 'e3b0958-dirty', 'icefall-git-date': 'Tue Jul 30 21:51:45 2024', 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2656.int.cedar.computecanada.ca', 'IP address': '172.16.146.93'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/zipformer/streaming/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': True, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 550, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-08-02 23:23:28,726 INFO [train.py:1212] (3/4) About to create model +2024-08-02 23:23:31,838 INFO [train.py:1216] (3/4) Number of model parameters: 66110931 +2024-08-02 23:23:33,387 INFO [train.py:1231] (3/4) Using DDP +2024-08-02 23:23:49,626 INFO [asr_datamodule.py:909] (3/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-08-02 23:23:56,251 INFO [asr_datamodule.py:711] (3/4) Disable MUSAN +2024-08-02 23:23:56,251 INFO [asr_datamodule.py:729] (3/4) Enable SpecAugment +2024-08-02 23:23:56,251 INFO [asr_datamodule.py:730] (3/4) Time warp factor: 80 +2024-08-02 23:23:56,251 INFO [asr_datamodule.py:740] (3/4) Num frame mask: 10 +2024-08-02 23:23:56,251 INFO [asr_datamodule.py:753] (3/4) About to create train dataset +2024-08-02 23:23:56,252 INFO [asr_datamodule.py:780] (3/4) Using DynamicBucketingSampler. +2024-08-02 23:23:57,857 INFO [asr_datamodule.py:797] (3/4) About to create train dataloader +2024-08-02 23:23:57,865 INFO [asr_datamodule.py:926] (3/4) About to get dev-clean cuts +2024-08-02 23:23:58,241 INFO [asr_datamodule.py:933] (3/4) About to get dev-other cuts +2024-08-02 23:23:58,294 INFO [asr_datamodule.py:829] (3/4) About to create dev dataset +2024-08-02 23:23:58,617 INFO [asr_datamodule.py:846] (3/4) About to create dev dataloader +2024-08-02 23:23:58,617 INFO [train.py:1435] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-08-02 23:30:00,738 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 8493MB +2024-08-02 23:30:01,694 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 8493MB +2024-08-02 23:32:14,983 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 8653MB +2024-08-02 23:32:16,112 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 8653MB +2024-08-02 23:33:32,180 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=512, metric=119.83 vs. limit=4.0 +2024-08-02 23:33:32,660 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 8653MB +2024-08-02 23:33:39,254 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 8653MB +2024-08-02 23:34:57,140 INFO [train.py:1114] (3/4) Epoch 1, batch 0, loss[loss=7.907, simple_loss=7.226, pruned_loss=6.797, over 13312.00 frames. ], tot_loss[loss=7.907, simple_loss=7.226, pruned_loss=6.797, over 13312.00 frames. ], batch size: 33, lr: 2.25e-02, grad_scale: 1.0 +2024-08-02 23:34:57,140 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-02 23:35:53,587 INFO [train.py:1146] (3/4) Epoch 1, validation: loss=7.994, simple_loss=7.311, pruned_loss=6.819, over 944034.00 frames. +2024-08-02 23:35:53,588 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 8728MB +2024-08-02 23:36:58,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=0.0, ans=0.5 +2024-08-02 23:39:43,146 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.388e+03 2.561e+03 2.629e+03 3.528e+03 3.944e+03, threshold=1.052e+04, percent-clipped=0.0 +2024-08-02 23:41:18,718 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=185.08 vs. limit=7.51375 +2024-08-02 23:41:56,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=36.666666666666664, ans=0.49541666666666667 +2024-08-02 23:41:56,516 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=373.90 vs. limit=7.5275 +2024-08-02 23:41:56,919 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.63 vs. limit=7.5275 +2024-08-02 23:43:00,500 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 5.313e+02 2.388e+03 2.781e+03 4.030e+03, threshold=9.553e+03, percent-clipped=0.0 +2024-08-02 23:43:09,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=73.33333333333333, ans=0.19725 +2024-08-02 23:44:43,470 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=247.16 vs. limit=5.055 +2024-08-02 23:45:58,751 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 4.964e+02 6.315e+02 2.388e+03 4.030e+03, threshold=2.526e+03, percent-clipped=0.0 +2024-08-02 23:45:59,180 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=198.78 vs. limit=7.555 +2024-08-02 23:45:59,241 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=122.52 vs. limit=7.555 +2024-08-02 23:45:59,431 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=231.09 vs. limit=7.555 +2024-08-02 23:46:05,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=146.66666666666666, ans=0.8948666666666667 +2024-08-02 23:46:22,929 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=58.86 vs. limit=5.073333333333333 +2024-08-02 23:46:23,180 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=8.19 vs. limit=4.058666666666666 +2024-08-02 23:46:44,594 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=308.35 vs. limit=7.555 +2024-08-02 23:46:45,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=146.66666666666666, ans=0.8948666666666667 +2024-08-02 23:46:49,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=183.33333333333334, ans=0.4770833333333333 +2024-08-02 23:46:50,275 INFO [train.py:1114] (3/4) Epoch 1, batch 50, loss[loss=1.213, simple_loss=1.073, pruned_loss=1.248, over 13406.00 frames. ], tot_loss[loss=3.067, simple_loss=2.813, pruned_loss=2.463, over 579076.71 frames. ], batch size: 32, lr: 2.48e-02, grad_scale: 0.5 +2024-08-02 23:47:43,878 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=286.39 vs. limit=7.56875 +2024-08-02 23:48:08,914 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=232.34 vs. limit=7.665 +2024-08-02 23:50:02,697 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=240.94 vs. limit=7.59625 +2024-08-02 23:50:33,380 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=23.14 vs. limit=5.064166666666667 +2024-08-02 23:51:52,524 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=39.63 vs. limit=7.59625 +2024-08-02 23:52:12,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=293.3333333333333, ans=0.09340000000000001 +2024-08-02 23:52:13,177 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=34.79 vs. limit=7.72 +2024-08-02 23:53:40,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=234.33 vs. limit=5.1466666666666665 +2024-08-02 23:54:37,330 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=312.69 vs. limit=7.62375 +2024-08-02 23:54:45,307 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=162.59 vs. limit=7.62375 +2024-08-02 23:54:55,386 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=114.26 vs. limit=7.62375 +2024-08-02 23:55:14,127 INFO [train.py:1114] (3/4) Epoch 1, batch 100, loss[loss=1.144, simple_loss=0.987, pruned_loss=1.253, over 13530.00 frames. ], tot_loss[loss=2.065, simple_loss=1.866, pruned_loss=1.824, over 1026294.16 frames. ], batch size: 35, lr: 2.70e-02, grad_scale: 1.0 +2024-08-02 23:55:16,358 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.793e+01 6.736e+01 1.462e+02 5.319e+02 4.030e+03, threshold=2.924e+02, percent-clipped=0.0 +2024-08-02 23:55:16,831 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=38.73 vs. limit=7.6375 +2024-08-02 23:55:21,281 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=87.33 vs. limit=7.6375 +2024-08-02 23:55:40,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=366.6666666666667, ans=0.4828125 +2024-08-02 23:56:10,078 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=23.98 vs. limit=7.65125 +2024-08-02 23:56:24,163 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=127.96 vs. limit=7.665 +2024-08-02 23:56:42,138 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=82.26 vs. limit=5.22 +2024-08-03 00:01:26,889 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=60.49 vs. limit=7.67875 +2024-08-03 00:01:49,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=476.6666666666667, ans=0.47765625 +2024-08-03 00:12:27,950 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=13.73 vs. limit=5.119166666666667 +2024-08-03 00:14:45,579 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=11.98 vs. limit=5.119166666666667 +2024-08-03 00:15:03,489 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=193.13 vs. limit=7.6925 +2024-08-03 00:15:50,044 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=21.84 vs. limit=7.6925 +2024-08-03 00:16:00,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=513.3333333333334, ans=0.18075000000000002 +2024-08-03 00:16:13,225 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=36.97 vs. limit=5.275 +2024-08-03 00:16:22,410 INFO [train.py:1114] (3/4) Epoch 1, batch 150, loss[loss=0.9499, simple_loss=0.8101, pruned_loss=1.014, over 13423.00 frames. ], tot_loss[loss=1.646, simple_loss=1.466, pruned_loss=1.544, over 1387446.04 frames. ], batch size: 32, lr: 2.93e-02, grad_scale: 1.0 +2024-08-03 00:17:03,367 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=62.96 vs. limit=7.72 +2024-08-03 00:17:08,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=586.6666666666666, ans=0.04816666666666667 +2024-08-03 00:17:10,296 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=187.88 vs. limit=7.72 +2024-08-03 00:17:22,821 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=80.41 vs. limit=7.72 +2024-08-03 00:17:46,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=623.3333333333334, ans=0.09610416666666667 +2024-08-03 00:19:00,906 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=15.01 vs. limit=5.165 +2024-08-03 00:19:31,408 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=48.51 vs. limit=7.76125 +2024-08-03 00:19:53,059 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.26 vs. limit=8.0225 +2024-08-03 00:20:04,346 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=168.73 vs. limit=7.76125 +2024-08-03 00:20:04,432 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=8.52 vs. limit=4.278666666666667 +2024-08-03 00:20:08,217 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=22.93 vs. limit=5.348333333333334 +2024-08-03 00:20:24,601 INFO [train.py:1114] (3/4) Epoch 1, batch 200, loss[loss=0.9811, simple_loss=0.8369, pruned_loss=0.9782, over 12600.00 frames. ], tot_loss[loss=1.416, simple_loss=1.247, pruned_loss=1.363, over 1666129.86 frames. ], batch size: 59, lr: 3.15e-02, grad_scale: 2.0 +2024-08-03 00:20:29,218 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.304e+01 4.646e+01 6.073e+01 7.987e+01 1.954e+02, threshold=1.215e+02, percent-clipped=0.0 +2024-08-03 00:21:02,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=733.3333333333334, ans=0.8743333333333334 +2024-08-03 00:21:26,817 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=29.90 vs. limit=8.0775 +2024-08-03 00:21:27,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=770.0, ans=0.21155000000000002 +2024-08-03 00:21:29,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=770.0, ans=0.171125 +2024-08-03 00:21:40,155 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=32.05 vs. limit=7.78875 +2024-08-03 00:21:42,347 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.26 vs. limit=8.105 +2024-08-03 00:22:06,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=379.56 vs. limit=7.8025 +2024-08-03 00:22:41,552 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=19.65 vs. limit=7.8025 +2024-08-03 00:22:42,684 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=25.47 vs. limit=5.403333333333333 +2024-08-03 00:22:54,694 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=17.82 vs. limit=5.421666666666667 +2024-08-03 00:22:57,991 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.95 vs. limit=7.81625 +2024-08-03 00:22:58,158 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=189.34 vs. limit=7.81625 +2024-08-03 00:23:01,438 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=57.32 vs. limit=7.83 +2024-08-03 00:23:05,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.48 vs. limit=8.16 +2024-08-03 00:23:12,178 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=9.46 vs. limit=4.352 +2024-08-03 00:23:13,303 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=158.69 vs. limit=7.83 +2024-08-03 00:23:18,000 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=51.13 vs. limit=7.83 +2024-08-03 00:23:19,658 INFO [train.py:1114] (3/4) Epoch 1, batch 250, loss[loss=0.9835, simple_loss=0.8291, pruned_loss=0.9697, over 13323.00 frames. ], tot_loss[loss=1.277, simple_loss=1.114, pruned_loss=1.241, over 1884570.49 frames. ], batch size: 46, lr: 3.38e-02, grad_scale: 2.0 +2024-08-03 00:23:24,887 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.92 vs. limit=8.1875 +2024-08-03 00:23:27,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=916.6666666666666, ans=0.165625 +2024-08-03 00:24:01,389 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=11.88 vs. limit=4.381333333333333 +2024-08-03 00:24:02,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=990.0, ans=0.077725 +2024-08-03 00:24:02,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=990.0, ans=0.8653500000000001 +2024-08-03 00:24:02,652 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=29.85 vs. limit=8.2425 +2024-08-03 00:24:32,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=990.0, ans=0.077725 +2024-08-03 00:24:38,623 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.77 vs. limit=8.2425 +2024-08-03 00:24:38,737 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=64.49 vs. limit=7.87125 +2024-08-03 00:24:44,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.whiten.whitening_limit, batch_count=1026.6666666666667, ans=4.410666666666667 +2024-08-03 00:24:44,890 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=29.19 vs. limit=8.27 +2024-08-03 00:24:44,911 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=77.25 vs. limit=7.885 +2024-08-03 00:24:46,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=1026.6666666666667, ans=0.1615 +2024-08-03 00:24:46,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=1026.6666666666667, ans=8.27 +2024-08-03 00:24:46,950 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.45 vs. limit=8.27 +2024-08-03 00:24:49,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=1026.6666666666667, ans=0.19224999999999998 +2024-08-03 00:24:51,575 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.50 vs. limit=7.885 +2024-08-03 00:24:52,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=1063.3333333333333, ans=0.035 +2024-08-03 00:24:52,618 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=20.15 vs. limit=5.265833333333333 +2024-08-03 00:24:56,033 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.13 vs. limit=4.425333333333334 +2024-08-03 00:24:57,057 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=54.66 vs. limit=7.89875 +2024-08-03 00:24:59,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=1063.3333333333333, ans=0.16012500000000002 +2024-08-03 00:25:01,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=1063.3333333333333, ans=0.45015625 +2024-08-03 00:25:05,107 INFO [train.py:1114] (3/4) Epoch 1, batch 300, loss[loss=1.037, simple_loss=0.8657, pruned_loss=1.002, over 13446.00 frames. ], tot_loss[loss=1.186, simple_loss=1.026, pruned_loss=1.155, over 2052366.85 frames. ], batch size: 42, lr: 3.60e-02, grad_scale: 4.0 +2024-08-03 00:25:10,028 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.936e+01 8.005e+01 9.897e+01 1.290e+02 2.424e+02, threshold=1.979e+02, percent-clipped=29.0 +2024-08-03 00:25:10,691 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=134.09 vs. limit=7.9125 +2024-08-03 00:25:12,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=1100.0, ans=0.4484375 +2024-08-03 00:25:15,685 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=69.85 vs. limit=7.9125 +2024-08-03 00:25:21,510 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.25 vs. limit=5.284166666666667 +2024-08-03 00:26:07,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=1136.6666666666667, ans=0.44671875 +2024-08-03 00:26:11,655 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.02 vs. limit=8.38 +2024-08-03 00:26:18,600 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.23 vs. limit=4.469333333333333 +2024-08-03 00:26:28,554 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.74 vs. limit=3.176 +2024-08-03 00:26:37,785 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=8.24 vs. limit=4.484 +2024-08-03 00:26:41,662 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.02 vs. limit=4.242 +2024-08-03 00:26:45,719 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=34.79 vs. limit=7.95375 +2024-08-03 00:26:48,932 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.05 vs. limit=8.435 +2024-08-03 00:27:05,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=1283.3333333333333, ans=0.07112500000000001 +2024-08-03 00:27:06,202 INFO [train.py:1114] (3/4) Epoch 1, batch 350, loss[loss=0.9068, simple_loss=0.7459, pruned_loss=0.8775, over 13561.00 frames. ], tot_loss[loss=1.127, simple_loss=0.9653, pruned_loss=1.095, over 2183226.99 frames. ], batch size: 33, lr: 3.83e-02, grad_scale: 4.0 +2024-08-03 00:27:19,732 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.70 vs. limit=8.4625 +2024-08-03 00:27:26,275 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.68 vs. limit=8.4625 +2024-08-03 00:27:35,803 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=21.19 vs. limit=7.995 +2024-08-03 00:27:36,911 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.85 vs. limit=8.49 +2024-08-03 00:27:43,796 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.40 vs. limit=4.5280000000000005 +2024-08-03 00:27:45,259 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=23.74 vs. limit=8.00875 +2024-08-03 00:27:49,664 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=16.41 vs. limit=8.00875 +2024-08-03 00:27:57,387 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.63 vs. limit=8.545 +2024-08-03 00:28:02,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=1393.3333333333333, ans=0.06865 +2024-08-03 00:28:02,249 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=38.73 vs. limit=8.0225 +2024-08-03 00:28:06,444 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.83 vs. limit=8.545 +2024-08-03 00:28:12,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=1430.0, ans=0.43296875 +2024-08-03 00:28:32,520 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=52.41 vs. limit=8.03625 +2024-08-03 00:28:34,904 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.05 vs. limit=4.572 +2024-08-03 00:28:36,684 INFO [train.py:1114] (3/4) Epoch 1, batch 400, loss[loss=1.024, simple_loss=0.8377, pruned_loss=0.964, over 13364.00 frames. ], tot_loss[loss=1.082, simple_loss=0.9181, pruned_loss=1.047, over 2286509.27 frames. ], batch size: 37, lr: 4.05e-02, grad_scale: 8.0 +2024-08-03 00:28:37,235 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=12.94 vs. limit=8.05 +2024-08-03 00:28:44,137 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.328e+01 8.404e+01 1.145e+02 1.534e+02 2.452e+02, threshold=2.291e+02, percent-clipped=10.0 +2024-08-03 00:29:16,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=1466.6666666666667, ans=0.43125 +2024-08-03 00:30:17,069 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=22.71 vs. limit=8.06375 +2024-08-03 00:30:31,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff2.min_abs, batch_count=1540.0, ans=0.0385 +2024-08-03 00:30:51,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=1576.6666666666667, ans=0.42609375 +2024-08-03 00:31:21,139 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.27 vs. limit=5.788333333333333 +2024-08-03 00:32:02,700 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.50 vs. limit=5.806666666666667 +2024-08-03 00:32:30,870 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=29.13 vs. limit=8.105 +2024-08-03 00:32:51,096 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.65 vs. limit=3.2475 +2024-08-03 00:32:51,717 INFO [train.py:1114] (3/4) Epoch 1, batch 450, loss[loss=0.9803, simple_loss=0.7935, pruned_loss=0.912, over 13560.00 frames. ], tot_loss[loss=1.052, simple_loss=0.8847, pruned_loss=1.01, over 2360343.66 frames. ], batch size: 38, lr: 4.28e-02, grad_scale: 8.0 +2024-08-03 00:33:05,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=1650.0, ans=0.42265625 +2024-08-03 00:33:06,671 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=5.71 vs. limit=5.825 +2024-08-03 00:33:53,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=1686.6666666666667, ans=0.06205 +2024-08-03 00:33:56,423 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.12 vs. limit=4.674666666666667 +2024-08-03 00:34:01,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=1723.3333333333333, ans=0.13537500000000002 +2024-08-03 00:34:04,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.79 vs. limit=8.7925 +2024-08-03 00:34:18,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=1723.3333333333333, ans=0.41921875 +2024-08-03 00:34:44,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=1760.0, ans=8.82 +2024-08-03 00:34:45,012 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.52 vs. limit=4.704 +2024-08-03 00:34:51,426 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.38 vs. limit=8.82 +2024-08-03 00:35:38,385 INFO [train.py:1114] (3/4) Epoch 1, batch 500, loss[loss=0.9645, simple_loss=0.7818, pruned_loss=0.8623, over 13439.00 frames. ], tot_loss[loss=1.026, simple_loss=0.8557, pruned_loss=0.9724, over 2426119.20 frames. ], batch size: 43, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:35:53,150 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=29.08 vs. limit=8.1875 +2024-08-03 00:35:53,398 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.232e+01 1.074e+02 1.283e+02 1.686e+02 3.614e+02, threshold=2.565e+02, percent-clipped=11.0 +2024-08-03 00:36:24,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=1870.0, ans=0.83455 +2024-08-03 00:37:04,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=1870.0, ans=0.41234375 +2024-08-03 00:38:30,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=1943.3333333333333, ans=0.7694333333333333 +2024-08-03 00:38:57,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1943.3333333333333, ans=0.28056666666666663 +2024-08-03 00:38:58,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.89 vs. limit=8.9575 +2024-08-03 00:41:33,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=1943.3333333333333, ans=0.25708333333333333 +2024-08-03 00:41:37,667 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=21.79 vs. limit=8.22875 +2024-08-03 00:43:22,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=1980.0, ans=0.08762500000000001 +2024-08-03 00:43:25,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=1980.0, ans=0.2525 +2024-08-03 00:43:27,193 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=14.41 vs. limit=8.2425 +2024-08-03 00:43:31,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=1980.0, ans=0.01192 +2024-08-03 00:43:38,159 INFO [train.py:1114] (3/4) Epoch 1, batch 550, loss[loss=1.015, simple_loss=0.8264, pruned_loss=0.8694, over 13067.00 frames. ], tot_loss[loss=1.006, simple_loss=0.8341, pruned_loss=0.9374, over 2468319.57 frames. ], batch size: 48, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:43:43,239 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.87 vs. limit=8.25625 +2024-08-03 00:43:56,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=2016.6666666666667, ans=0.40546875 +2024-08-03 00:44:20,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=2016.6666666666667, ans=8.25625 +2024-08-03 00:44:41,207 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=9.11 vs. limit=8.27 +2024-08-03 00:44:53,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.whiten.whitening_limit, batch_count=2053.3333333333335, ans=4.8213333333333335 +2024-08-03 00:44:53,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=26.86 vs. limit=8.27 +2024-08-03 00:44:58,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=2053.3333333333335, ans=6.283333333333333 +2024-08-03 00:45:05,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=2053.3333333333335, ans=0.40375 +2024-08-03 00:45:06,024 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=12.28 vs. limit=8.27 +2024-08-03 00:45:20,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=2090.0, ans=0.40203125 +2024-08-03 00:45:25,801 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=19.16 vs. limit=8.28375 +2024-08-03 00:45:26,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=2090.0, ans=0.40203125 +2024-08-03 00:45:27,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=2090.0, ans=0.40203125 +2024-08-03 00:45:49,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=2126.6666666666665, ans=0.4003125 +2024-08-03 00:46:06,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=2163.3333333333335, ans=0.39859374999999997 +2024-08-03 00:46:13,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=2163.3333333333335, ans=0.22836666666666666 +2024-08-03 00:46:25,969 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=7.99 vs. limit=4.865333333333333 +2024-08-03 00:46:26,082 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.01 vs. limit=9.1225 +2024-08-03 00:46:28,711 INFO [train.py:1114] (3/4) Epoch 1, batch 600, loss[loss=0.9408, simple_loss=0.7727, pruned_loss=0.767, over 13336.00 frames. ], tot_loss[loss=0.9851, simple_loss=0.8148, pruned_loss=0.8963, over 2508381.19 frames. ], batch size: 46, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:46:49,386 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.40 vs. limit=8.325 +2024-08-03 00:46:49,720 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 7.983e+01 1.322e+02 1.697e+02 2.206e+02 6.951e+02, threshold=3.394e+02, percent-clipped=10.0 +2024-08-03 00:46:49,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=2200.0, ans=0.396875 +2024-08-03 00:47:09,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.02 vs. limit=9.1775 +2024-08-03 00:47:14,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=2236.6666666666665, ans=0.049675000000000004 +2024-08-03 00:47:21,065 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=12.65 vs. limit=8.33875 +2024-08-03 00:47:23,257 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.93 vs. limit=9.1775 +2024-08-03 00:47:25,549 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.97 vs. limit=8.33875 +2024-08-03 00:47:28,660 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=12.49 vs. limit=8.3525 +2024-08-03 00:47:28,717 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=16.15 vs. limit=8.3525 +2024-08-03 00:47:42,116 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=23.23 vs. limit=8.3525 +2024-08-03 00:47:43,990 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=24.55 vs. limit=8.3525 +2024-08-03 00:47:47,213 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.00 vs. limit=9.2325 +2024-08-03 00:48:02,712 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.92 vs. limit=9.26 +2024-08-03 00:48:23,481 INFO [train.py:1114] (3/4) Epoch 1, batch 650, loss[loss=0.8274, simple_loss=0.6983, pruned_loss=0.6178, over 13543.00 frames. ], tot_loss[loss=0.9572, simple_loss=0.7929, pruned_loss=0.8447, over 2543369.13 frames. ], batch size: 37, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:48:31,832 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.09 vs. limit=8.39375 +2024-08-03 00:48:49,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=2420.0, ans=0.38656250000000003 +2024-08-03 00:48:50,293 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=19.72 vs. limit=8.4075 +2024-08-03 00:49:16,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=2456.6666666666665, ans=0.04232291666666667 +2024-08-03 00:49:17,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=2456.6666666666665, ans=0.38484375 +2024-08-03 00:49:25,655 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=11.39 vs. limit=9.370000000000001 +2024-08-03 00:49:26,632 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.85 vs. limit=8.435 +2024-08-03 00:49:32,260 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=11.34 vs. limit=9.370000000000001 +2024-08-03 00:49:33,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=2493.3333333333335, ans=0.383125 +2024-08-03 00:49:48,084 INFO [train.py:1114] (3/4) Epoch 1, batch 700, loss[loss=0.7042, simple_loss=0.6009, pruned_loss=0.502, over 13535.00 frames. ], tot_loss[loss=0.9216, simple_loss=0.767, pruned_loss=0.7854, over 2566249.09 frames. ], batch size: 35, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:49:53,122 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.309e+01 1.383e+02 1.770e+02 2.360e+02 5.485e+02, threshold=3.539e+02, percent-clipped=6.0 +2024-08-03 00:49:59,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=2566.6666666666665, ans=0.10375 +2024-08-03 00:50:11,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=2603.3333333333335, ans=0.04186458333333334 +2024-08-03 00:50:15,476 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.19 vs. limit=3.3905 +2024-08-03 00:50:37,604 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.32 vs. limit=8.50375 +2024-08-03 00:50:41,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=2713.3333333333335, ans=0.8050333333333334 +2024-08-03 00:50:46,171 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.35 vs. limit=5.085333333333334 +2024-08-03 00:50:53,585 INFO [train.py:1114] (3/4) Epoch 1, batch 750, loss[loss=0.7182, simple_loss=0.6222, pruned_loss=0.486, over 13372.00 frames. ], tot_loss[loss=0.8794, simple_loss=0.7365, pruned_loss=0.723, over 2583519.62 frames. ], batch size: 37, lr: 4.49e-02, grad_scale: 8.0 +2024-08-03 00:50:56,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=2750.0, ans=0.37109375 +2024-08-03 00:50:57,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=2750.0, ans=0.8037500000000001 +2024-08-03 00:50:57,547 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.12 vs. limit=5.0 +2024-08-03 00:51:18,316 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 00:51:36,395 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.73 vs. limit=8.5725 +2024-08-03 00:51:38,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=2860.0, ans=0.03565 +2024-08-03 00:51:40,446 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.19 vs. limit=3.429 +2024-08-03 00:51:45,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=2860.0, ans=0.3659375 +2024-08-03 00:51:45,741 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.82 vs. limit=8.5725 +2024-08-03 00:51:48,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=2896.6666666666665, ans=0.091375 +2024-08-03 00:52:04,755 INFO [train.py:1114] (3/4) Epoch 1, batch 800, loss[loss=0.6465, simple_loss=0.5654, pruned_loss=0.4221, over 13339.00 frames. ], tot_loss[loss=0.8363, simple_loss=0.7058, pruned_loss=0.6624, over 2598195.19 frames. ], batch size: 33, lr: 4.49e-02, grad_scale: 16.0 +2024-08-03 00:52:06,751 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.241e+02 1.637e+02 2.042e+02 2.862e+02 4.523e+02, threshold=4.084e+02, percent-clipped=8.0 +2024-08-03 00:52:09,418 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=10.91 vs. limit=9.7 +2024-08-03 00:52:14,466 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.87 vs. limit=8.61375 +2024-08-03 00:52:23,639 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.60 vs. limit=8.61375 +2024-08-03 00:52:25,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=3006.6666666666665, ans=9.754999999999999 +2024-08-03 00:52:30,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=3006.6666666666665, ans=0.3590625 +2024-08-03 00:52:55,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=3080.0, ans=0.35562499999999997 +2024-08-03 00:52:57,571 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.34 vs. limit=8.655 +2024-08-03 00:53:14,278 INFO [train.py:1114] (3/4) Epoch 1, batch 850, loss[loss=0.6338, simple_loss=0.5658, pruned_loss=0.3901, over 13310.00 frames. ], tot_loss[loss=0.7949, simple_loss=0.6766, pruned_loss=0.6066, over 2611284.60 frames. ], batch size: 40, lr: 4.49e-02, grad_scale: 16.0 +2024-08-03 00:53:23,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=3116.6666666666665, ans=0.35390625 +2024-08-03 00:53:26,335 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.28 vs. limit=3.4675000000000002 +2024-08-03 00:53:52,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=3190.0, ans=0.035 +2024-08-03 00:53:58,602 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.76 vs. limit=5.7975 +2024-08-03 00:54:05,675 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=10.31 vs. limit=9.92 +2024-08-03 00:54:07,730 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.47 vs. limit=8.71 +2024-08-03 00:54:41,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=3263.3333333333335, ans=0.34703125 +2024-08-03 00:54:44,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=3300.0, ans=0.3453125 +2024-08-03 00:54:45,058 INFO [train.py:1114] (3/4) Epoch 1, batch 900, loss[loss=0.5862, simple_loss=0.5253, pruned_loss=0.3545, over 13348.00 frames. ], tot_loss[loss=0.7569, simple_loss=0.6499, pruned_loss=0.5567, over 2614556.16 frames. ], batch size: 33, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:54:47,015 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.046e+02 1.709e+02 2.155e+02 3.211e+02 6.364e+02, threshold=4.310e+02, percent-clipped=14.0 +2024-08-03 00:54:55,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=3300.0, ans=0.267 +2024-08-03 00:55:16,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.78 vs. limit=10.0025 +2024-08-03 00:55:26,014 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 00:55:29,020 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.96 vs. limit=8.765 +2024-08-03 00:56:05,967 INFO [train.py:1114] (3/4) Epoch 1, batch 950, loss[loss=0.5442, simple_loss=0.4989, pruned_loss=0.3104, over 13549.00 frames. ], tot_loss[loss=0.7206, simple_loss=0.6247, pruned_loss=0.5111, over 2614253.20 frames. ], batch size: 34, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:56:31,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=3483.3333333333335, ans=0.06937499999999999 +2024-08-03 00:56:42,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=3520.0, ans=0.33499999999999996 +2024-08-03 00:56:52,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=3520.0, ans=0.33499999999999996 +2024-08-03 00:56:57,564 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.28 vs. limit=5.889166666666666 +2024-08-03 00:57:00,692 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=6.11 vs. limit=5.422666666666666 +2024-08-03 00:57:16,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=3593.3333333333335, ans=0.03877083333333334 +2024-08-03 00:57:18,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=3630.0, ans=0.77295 +2024-08-03 00:57:22,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=3630.0, ans=0.018324999999999994 +2024-08-03 00:57:28,047 INFO [train.py:1114] (3/4) Epoch 1, batch 1000, loss[loss=0.5285, simple_loss=0.4895, pruned_loss=0.2934, over 13382.00 frames. ], tot_loss[loss=0.6914, simple_loss=0.6047, pruned_loss=0.4739, over 2612964.15 frames. ], batch size: 35, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:57:32,800 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.579e+02 2.012e+02 2.638e+02 6.886e+02, threshold=4.024e+02, percent-clipped=6.0 +2024-08-03 00:57:34,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=3666.6666666666665, ans=0.06249999999999997 +2024-08-03 00:57:36,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=3666.6666666666665, ans=0.328125 +2024-08-03 00:57:58,852 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=10.88 vs. limit=10.2775 +2024-08-03 00:58:33,367 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=1.079e+00 +2024-08-03 00:58:38,304 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=2.485e-01 +2024-08-03 00:58:48,522 INFO [train.py:1114] (3/4) Epoch 1, batch 1050, loss[loss=0.5292, simple_loss=0.4957, pruned_loss=0.2858, over 13580.00 frames. ], tot_loss[loss=0.6608, simple_loss=0.5836, pruned_loss=0.4377, over 2616815.06 frames. ], batch size: 39, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:58:57,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3850.0, ans=0.2615 +2024-08-03 00:59:00,451 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=1.271e+00 +2024-08-03 00:59:13,582 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.88 vs. limit=5.971666666666667 +2024-08-03 00:59:14,442 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.14 vs. limit=10.442499999999999 +2024-08-03 00:59:16,432 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.71 vs. limit=8.97125 +2024-08-03 00:59:21,212 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.07 vs. limit=5.569333333333334 +2024-08-03 00:59:30,743 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.73 vs. limit=6.98 +2024-08-03 00:59:32,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=3960.0, ans=0.31437499999999996 +2024-08-03 00:59:35,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=3960.0, ans=0.31437499999999996 +2024-08-03 00:59:41,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=3996.6666666666665, ans=0.010075 +2024-08-03 00:59:44,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=3996.6666666666665, ans=0.31265624999999997 +2024-08-03 00:59:48,494 INFO [train.py:1114] (3/4) Epoch 1, batch 1100, loss[loss=0.5117, simple_loss=0.4841, pruned_loss=0.2701, over 13569.00 frames. ], tot_loss[loss=0.634, simple_loss=0.5653, pruned_loss=0.4065, over 2620998.87 frames. ], batch size: 36, lr: 4.48e-02, grad_scale: 16.0 +2024-08-03 00:59:50,399 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.598e+02 2.010e+02 2.726e+02 4.926e+02, threshold=4.021e+02, percent-clipped=7.0 +2024-08-03 01:00:11,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4033.3333333333335, ans=0.25966666666666666 +2024-08-03 01:00:19,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4070.0, ans=0.2593 +2024-08-03 01:00:24,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=4106.666666666667, ans=0.3075 +2024-08-03 01:00:30,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=4106.666666666667, ans=0.049555555555555554 +2024-08-03 01:00:55,289 INFO [train.py:1114] (3/4) Epoch 1, batch 1150, loss[loss=0.5128, simple_loss=0.4787, pruned_loss=0.2782, over 13554.00 frames. ], tot_loss[loss=0.6087, simple_loss=0.5479, pruned_loss=0.3785, over 2619699.14 frames. ], batch size: 36, lr: 4.47e-02, grad_scale: 16.0 +2024-08-03 01:01:15,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=4253.333333333333, ans=0.30062500000000003 +2024-08-03 01:01:39,601 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=10.90 vs. limit=10.745000000000001 +2024-08-03 01:01:41,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=4326.666666666667, ans=0.2567333333333333 +2024-08-03 01:01:55,176 INFO [train.py:1114] (3/4) Epoch 1, batch 1200, loss[loss=0.5003, simple_loss=0.4742, pruned_loss=0.263, over 13579.00 frames. ], tot_loss[loss=0.5902, simple_loss=0.5357, pruned_loss=0.3571, over 2616465.45 frames. ], batch size: 39, lr: 4.47e-02, grad_scale: 32.0 +2024-08-03 01:01:57,188 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.679e+02 2.058e+02 2.623e+02 8.489e+02, threshold=4.116e+02, percent-clipped=4.0 +2024-08-03 01:02:08,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=4436.666666666667, ans=0.7447166666666667 +2024-08-03 01:02:14,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=4436.666666666667, ans=0.7447166666666667 +2024-08-03 01:02:14,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=4436.666666666667, ans=9.16375 +2024-08-03 01:02:17,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=4473.333333333333, ans=0.1586216666666667 +2024-08-03 01:02:48,706 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.81 vs. limit=5.818666666666667 +2024-08-03 01:02:54,142 INFO [train.py:1114] (3/4) Epoch 1, batch 1250, loss[loss=0.4982, simple_loss=0.4823, pruned_loss=0.2515, over 13443.00 frames. ], tot_loss[loss=0.5709, simple_loss=0.5232, pruned_loss=0.3359, over 2628762.66 frames. ], batch size: 42, lr: 4.47e-02, grad_scale: 32.0 +2024-08-03 01:03:23,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=4620.0, ans=0.04741666666666667 +2024-08-03 01:03:30,965 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.21 vs. limit=10.965 +2024-08-03 01:03:43,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=4656.666666666667, ans=0.28171875 +2024-08-03 01:03:43,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=4656.666666666667, ans=0.07089583333333334 +2024-08-03 01:03:44,580 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.88 vs. limit=5.862666666666667 +2024-08-03 01:03:45,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=4656.666666666667, ans=0.28171875 +2024-08-03 01:03:49,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=4693.333333333333, ans=0.7357333333333334 +2024-08-03 01:03:55,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=4693.333333333333, ans=0.28 +2024-08-03 01:03:57,816 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=4.92 vs. limit=9.26 +2024-08-03 01:04:01,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.05 vs. limit=11.0475 +2024-08-03 01:04:08,950 INFO [train.py:1114] (3/4) Epoch 1, batch 1300, loss[loss=0.5436, simple_loss=0.5091, pruned_loss=0.2919, over 12875.00 frames. ], tot_loss[loss=0.5515, simple_loss=0.5098, pruned_loss=0.3167, over 2632537.39 frames. ], batch size: 52, lr: 4.47e-02, grad_scale: 32.0 +2024-08-03 01:04:10,894 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.265e+02 1.740e+02 2.083e+02 2.560e+02 4.997e+02, threshold=4.167e+02, percent-clipped=2.0 +2024-08-03 01:04:36,012 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.60 vs. limit=3.7205 +2024-08-03 01:04:36,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.67 vs. limit=6.200833333333334 +2024-08-03 01:04:44,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=4803.333333333333, ans=0.03498958333333334 +2024-08-03 01:05:06,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4876.666666666667, ans=0.2512333333333333 +2024-08-03 01:05:15,439 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=11.58 vs. limit=11.185 +2024-08-03 01:05:23,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=4950.0, ans=0.025 +2024-08-03 01:05:23,860 INFO [train.py:1114] (3/4) Epoch 1, batch 1350, loss[loss=0.4883, simple_loss=0.4693, pruned_loss=0.2507, over 13527.00 frames. ], tot_loss[loss=0.5341, simple_loss=0.4983, pruned_loss=0.2996, over 2640007.33 frames. ], batch size: 37, lr: 4.46e-02, grad_scale: 32.0 +2024-08-03 01:05:29,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=4950.0, ans=0.04949747468305833 +2024-08-03 01:05:34,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=4986.666666666667, ans=0.2501333333333333 +2024-08-03 01:05:40,133 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.68 vs. limit=11.24 +2024-08-03 01:05:52,744 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:06:11,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=5096.666666666667, ans=0.24903333333333333 +2024-08-03 01:06:13,509 INFO [train.py:1114] (3/4) Epoch 1, batch 1400, loss[loss=0.4408, simple_loss=0.4231, pruned_loss=0.2271, over 13268.00 frames. ], tot_loss[loss=0.5196, simple_loss=0.489, pruned_loss=0.2854, over 2643816.69 frames. ], batch size: 31, lr: 4.46e-02, grad_scale: 32.0 +2024-08-03 01:06:15,474 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.555e+02 1.828e+02 2.203e+02 3.760e+02, threshold=3.656e+02, percent-clipped=0.0 +2024-08-03 01:06:29,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=5170.0, ans=0.009745652173913044 +2024-08-03 01:06:29,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.34 vs. limit=6.068 +2024-08-03 01:06:34,758 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.28 vs. limit=9.43875 +2024-08-03 01:06:37,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=5206.666666666667, ans=0.25593750000000004 +2024-08-03 01:06:48,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=5243.333333333333, ans=0.044819444444444446 +2024-08-03 01:06:53,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=5243.333333333333, ans=0.025 +2024-08-03 01:06:57,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=5280.0, ans=0.0 +2024-08-03 01:07:05,501 INFO [train.py:1114] (3/4) Epoch 1, batch 1450, loss[loss=0.4661, simple_loss=0.4623, pruned_loss=0.2277, over 13414.00 frames. ], tot_loss[loss=0.509, simple_loss=0.4826, pruned_loss=0.2747, over 2642093.85 frames. ], batch size: 43, lr: 4.46e-02, grad_scale: 32.0 +2024-08-03 01:07:13,979 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.90 vs. limit=7.658333333333333 +2024-08-03 01:07:30,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=5390.0, ans=0.025 +2024-08-03 01:07:48,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=5426.666666666667, ans=0.0 +2024-08-03 01:07:59,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.21 vs. limit=11.5975 +2024-08-03 01:08:00,852 INFO [train.py:1114] (3/4) Epoch 1, batch 1500, loss[loss=0.4884, simple_loss=0.4794, pruned_loss=0.2436, over 13400.00 frames. ], tot_loss[loss=0.4976, simple_loss=0.4756, pruned_loss=0.2639, over 2641986.81 frames. ], batch size: 39, lr: 4.46e-02, grad_scale: 32.0 +2024-08-03 01:08:14,032 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.278e+02 1.665e+02 2.059e+02 2.727e+02 4.755e+02, threshold=4.117e+02, percent-clipped=2.0 +2024-08-03 01:08:14,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=5500.0, ans=0.009673913043478262 +2024-08-03 01:08:27,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5536.666666666667, ans=0.24046875 +2024-08-03 01:08:29,332 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.64 vs. limit=11.6525 +2024-08-03 01:08:32,146 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.13 vs. limit=11.68 +2024-08-03 01:08:32,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=5573.333333333333, ans=0.009657971014492754 +2024-08-03 01:08:53,476 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:09:00,041 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.54 vs. limit=6.4025 +2024-08-03 01:09:00,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=5610.0, ans=0.06493750000000001 +2024-08-03 01:09:11,371 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.91 vs. limit=11.735 +2024-08-03 01:09:12,864 INFO [train.py:1114] (3/4) Epoch 1, batch 1550, loss[loss=0.4537, simple_loss=0.4618, pruned_loss=0.2142, over 13403.00 frames. ], tot_loss[loss=0.4881, simple_loss=0.4698, pruned_loss=0.2552, over 2631952.17 frames. ], batch size: 41, lr: 4.45e-02, grad_scale: 32.0 +2024-08-03 01:09:45,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=5720.0, ans=0.6998 +2024-08-03 01:09:50,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=5756.666666666667, ans=0.042680555555555555 +2024-08-03 01:09:59,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=5756.666666666667, ans=0.28635 +2024-08-03 01:10:00,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=5793.333333333333, ans=0.22843750000000002 +2024-08-03 01:10:06,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=5793.333333333333, ans=0.22843750000000002 +2024-08-03 01:10:06,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=5793.333333333333, ans=0.22843750000000002 +2024-08-03 01:10:06,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=5793.333333333333, ans=0.24206666666666665 +2024-08-03 01:10:09,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=5793.333333333333, ans=0.2869 +2024-08-03 01:10:30,113 INFO [train.py:1114] (3/4) Epoch 1, batch 1600, loss[loss=0.4336, simple_loss=0.4459, pruned_loss=0.2025, over 13582.00 frames. ], tot_loss[loss=0.4792, simple_loss=0.4643, pruned_loss=0.2475, over 2625388.99 frames. ], batch size: 39, lr: 4.45e-02, grad_scale: 32.0 +2024-08-03 01:10:33,557 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.694e+02 2.197e+02 2.790e+02 6.281e+02, threshold=4.393e+02, percent-clipped=9.0 +2024-08-03 01:10:51,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=5866.666666666667, ans=0.24133333333333332 +2024-08-03 01:10:51,695 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:10:52,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=5866.666666666667, ans=0.6946666666666667 +2024-08-03 01:11:37,607 INFO [train.py:1114] (3/4) Epoch 1, batch 1650, loss[loss=0.3987, simple_loss=0.4227, pruned_loss=0.1785, over 13321.00 frames. ], tot_loss[loss=0.4698, simple_loss=0.4587, pruned_loss=0.2396, over 2621783.47 frames. ], batch size: 40, lr: 4.45e-02, grad_scale: 32.0 +2024-08-03 01:11:43,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=6050.0, ans=0.21640625000000002 +2024-08-03 01:11:57,437 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.66 vs. limit=12.065000000000001 +2024-08-03 01:12:14,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=6123.333333333333, ans=0.04115277777777778 +2024-08-03 01:12:18,171 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.86 vs. limit=12.120000000000001 +2024-08-03 01:12:18,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=6160.0, ans=0.21125 +2024-08-03 01:12:19,061 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.08 vs. limit=12.120000000000001 +2024-08-03 01:12:20,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=6160.0, ans=0.025 +2024-08-03 01:12:35,772 INFO [train.py:1114] (3/4) Epoch 1, batch 1700, loss[loss=0.4011, simple_loss=0.405, pruned_loss=0.1945, over 13261.00 frames. ], tot_loss[loss=0.4611, simple_loss=0.4541, pruned_loss=0.2323, over 2630532.34 frames. ], batch size: 31, lr: 4.44e-02, grad_scale: 32.0 +2024-08-03 01:12:36,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=6233.333333333333, ans=0.2078125 +2024-08-03 01:12:37,767 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.541e+02 1.894e+02 2.425e+02 4.300e+02, threshold=3.787e+02, percent-clipped=0.0 +2024-08-03 01:12:44,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=6233.333333333333, ans=0.2078125 +2024-08-03 01:12:46,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=6233.333333333333, ans=0.2078125 +2024-08-03 01:12:52,079 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.91 vs. limit=12.2025 +2024-08-03 01:12:58,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=6306.666666666667, ans=0.025 +2024-08-03 01:13:08,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=6343.333333333333, ans=0.20265624999999998 +2024-08-03 01:13:14,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=6343.333333333333, ans=0.009490579710144928 +2024-08-03 01:13:18,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=6380.0, ans=0.6767000000000001 +2024-08-03 01:13:26,841 INFO [train.py:1114] (3/4) Epoch 1, batch 1750, loss[loss=0.3899, simple_loss=0.3964, pruned_loss=0.1881, over 13538.00 frames. ], tot_loss[loss=0.4536, simple_loss=0.4497, pruned_loss=0.2264, over 2633370.77 frames. ], batch size: 31, lr: 4.44e-02, grad_scale: 32.0 +2024-08-03 01:13:27,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=6416.666666666667, ans=0.00947463768115942 +2024-08-03 01:14:02,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=6526.666666666667, ans=0.19406250000000003 +2024-08-03 01:14:10,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.91 vs. limit=12.395 +2024-08-03 01:14:13,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.04 vs. limit=12.4225 +2024-08-03 01:14:24,171 INFO [train.py:1114] (3/4) Epoch 1, batch 1800, loss[loss=0.3967, simple_loss=0.4183, pruned_loss=0.1831, over 13544.00 frames. ], tot_loss[loss=0.4481, simple_loss=0.4469, pruned_loss=0.2219, over 2634710.62 frames. ], batch size: 38, lr: 4.44e-02, grad_scale: 32.0 +2024-08-03 01:14:25,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=6600.0, ans=0.190625 +2024-08-03 01:14:26,048 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.262e+02 1.670e+02 2.044e+02 2.499e+02 4.845e+02, threshold=4.088e+02, percent-clipped=4.0 +2024-08-03 01:14:44,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=6636.666666666667, ans=0.6677166666666667 +2024-08-03 01:14:51,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=6673.333333333333, ans=0.03886111111111112 +2024-08-03 01:15:07,953 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.50 vs. limit=12.504999999999999 +2024-08-03 01:15:30,697 INFO [train.py:1114] (3/4) Epoch 1, batch 1850, loss[loss=0.3903, simple_loss=0.4168, pruned_loss=0.1783, over 13412.00 frames. ], tot_loss[loss=0.4418, simple_loss=0.4438, pruned_loss=0.2171, over 2637221.28 frames. ], batch size: 39, lr: 4.43e-02, grad_scale: 32.0 +2024-08-03 01:15:33,044 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.71 vs. limit=6.695833333333333 +2024-08-03 01:15:35,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=6783.333333333333, ans=0.18203124999999998 +2024-08-03 01:15:41,460 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:15:49,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=6856.666666666667, ans=0.17859375 +2024-08-03 01:15:51,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten.whitening_limit, batch_count=6856.666666666667, ans=10.07125 +2024-08-03 01:15:52,499 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.18 vs. limit=10.07125 +2024-08-03 01:15:54,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=6856.666666666667, ans=0.009378985507246376 +2024-08-03 01:15:57,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=6856.666666666667, ans=0.17859375 +2024-08-03 01:15:57,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=6856.666666666667, ans=0.17859375 +2024-08-03 01:15:58,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.09 vs. limit=10.085 +2024-08-03 01:16:05,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=6893.333333333333, ans=0.176875 +2024-08-03 01:16:31,448 INFO [train.py:1114] (3/4) Epoch 1, batch 1900, loss[loss=0.4409, simple_loss=0.4514, pruned_loss=0.2135, over 13307.00 frames. ], tot_loss[loss=0.4358, simple_loss=0.4407, pruned_loss=0.2128, over 2639993.48 frames. ], batch size: 40, lr: 4.43e-02, grad_scale: 32.0 +2024-08-03 01:16:31,986 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.76 vs. limit=10.1125 +2024-08-03 01:16:35,171 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.281e+02 1.757e+02 2.130e+02 2.546e+02 5.245e+02, threshold=4.259e+02, percent-clipped=2.0 +2024-08-03 01:17:07,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=7040.0, ans=0.025 +2024-08-03 01:17:28,633 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.58 vs. limit=6.778333333333333 +2024-08-03 01:17:31,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=7113.333333333333, ans=0.6510333333333334 +2024-08-03 01:17:34,517 INFO [train.py:1114] (3/4) Epoch 1, batch 1950, loss[loss=0.4031, simple_loss=0.423, pruned_loss=0.1906, over 13559.00 frames. ], tot_loss[loss=0.4326, simple_loss=0.4404, pruned_loss=0.21, over 2646522.90 frames. ], batch size: 36, lr: 4.43e-02, grad_scale: 32.0 +2024-08-03 01:17:51,998 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.69 vs. limit=10.195 +2024-08-03 01:17:56,304 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.16 vs. limit=6.889333333333333 +2024-08-03 01:18:28,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=7260.0, ans=0.15968749999999998 +2024-08-03 01:18:35,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=7296.666666666667, ans=0.025 +2024-08-03 01:18:42,883 INFO [train.py:1114] (3/4) Epoch 1, batch 2000, loss[loss=0.3402, simple_loss=0.3696, pruned_loss=0.1554, over 13525.00 frames. ], tot_loss[loss=0.4296, simple_loss=0.4397, pruned_loss=0.2078, over 2636317.35 frames. ], batch size: 31, lr: 4.42e-02, grad_scale: 32.0 +2024-08-03 01:18:44,709 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.650e+02 1.978e+02 2.674e+02 4.949e+02, threshold=3.955e+02, percent-clipped=2.0 +2024-08-03 01:19:14,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=7370.0, ans=0.15453125 +2024-08-03 01:19:15,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=7406.666666666667, ans=0.035805555555555556 +2024-08-03 01:19:30,166 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.10 vs. limit=4.1165 +2024-08-03 01:19:38,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=7480.0, ans=0.14937499999999998 +2024-08-03 01:19:41,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=7480.0, ans=0.31220000000000003 +2024-08-03 01:19:44,682 INFO [train.py:1114] (3/4) Epoch 1, batch 2050, loss[loss=0.4059, simple_loss=0.4044, pruned_loss=0.2037, over 13410.00 frames. ], tot_loss[loss=0.4242, simple_loss=0.4364, pruned_loss=0.2045, over 2633077.93 frames. ], batch size: 32, lr: 4.42e-02, grad_scale: 64.0 +2024-08-03 01:20:00,469 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.05 vs. limit=13.165 +2024-08-03 01:20:02,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7553.333333333333, ans=0.22446666666666665 +2024-08-03 01:20:07,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=7553.333333333333, ans=0.009227536231884059 +2024-08-03 01:20:20,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=7590.0, ans=0.14421875 +2024-08-03 01:20:20,715 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.48 vs. limit=10.34625 +2024-08-03 01:20:38,341 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.79 vs. limit=13.219999999999999 +2024-08-03 01:20:51,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=7663.333333333333, ans=0.14078125000000002 +2024-08-03 01:20:54,841 INFO [train.py:1114] (3/4) Epoch 1, batch 2100, loss[loss=0.45, simple_loss=0.4696, pruned_loss=0.2152, over 13541.00 frames. ], tot_loss[loss=0.4179, simple_loss=0.4332, pruned_loss=0.2001, over 2638299.29 frames. ], batch size: 37, lr: 4.42e-02, grad_scale: 8.0 +2024-08-03 01:20:59,388 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.800e+02 2.088e+02 2.971e+02 6.141e+02, threshold=4.177e+02, percent-clipped=15.0 +2024-08-03 01:21:11,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=7736.666666666667, ans=0.13734375 +2024-08-03 01:21:19,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=7773.333333333333, ans=0.135625 +2024-08-03 01:21:33,778 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.08 vs. limit=10.42875 +2024-08-03 01:21:37,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=7846.666666666667, ans=0.1321875 +2024-08-03 01:21:38,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=7846.666666666667, ans=0.1321875 +2024-08-03 01:21:43,277 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.02 vs. limit=10.442499999999999 +2024-08-03 01:21:56,435 INFO [train.py:1114] (3/4) Epoch 1, batch 2150, loss[loss=0.4079, simple_loss=0.4297, pruned_loss=0.1931, over 13546.00 frames. ], tot_loss[loss=0.4112, simple_loss=0.4294, pruned_loss=0.1956, over 2646882.42 frames. ], batch size: 36, lr: 4.41e-02, grad_scale: 8.0 +2024-08-03 01:22:06,236 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.85 vs. limit=13.440000000000001 +2024-08-03 01:22:07,196 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.59 vs. limit=7.168 +2024-08-03 01:22:11,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=7920.0, ans=0.009147826086956521 +2024-08-03 01:22:27,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7993.333333333333, ans=0.22006666666666666 +2024-08-03 01:22:30,971 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.70 vs. limit=10.4975 +2024-08-03 01:22:40,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=8030.0, ans=0.61895 +2024-08-03 01:22:46,516 INFO [train.py:1114] (3/4) Epoch 1, batch 2200, loss[loss=0.4025, simple_loss=0.4364, pruned_loss=0.1843, over 13414.00 frames. ], tot_loss[loss=0.4083, simple_loss=0.4281, pruned_loss=0.1935, over 2645485.14 frames. ], batch size: 39, lr: 4.41e-02, grad_scale: 8.0 +2024-08-03 01:22:51,102 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.262e+02 1.590e+02 1.905e+02 2.323e+02 5.165e+02, threshold=3.810e+02, percent-clipped=4.0 +2024-08-03 01:22:51,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=8066.666666666667, ans=0.21933333333333332 +2024-08-03 01:22:58,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=8103.333333333333, ans=0.125 +2024-08-03 01:22:58,348 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.04 vs. limit=10.53875 +2024-08-03 01:23:25,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=8176.666666666667, ans=0.125 +2024-08-03 01:23:28,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=8176.666666666667, ans=0.0 +2024-08-03 01:23:38,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=8213.333333333334, ans=0.8321333333333333 +2024-08-03 01:23:38,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8213.333333333334, ans=0.21786666666666665 +2024-08-03 01:23:42,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=8213.333333333334, ans=0.125 +2024-08-03 01:23:43,780 INFO [train.py:1114] (3/4) Epoch 1, batch 2250, loss[loss=0.3782, simple_loss=0.4161, pruned_loss=0.1702, over 13355.00 frames. ], tot_loss[loss=0.4056, simple_loss=0.4269, pruned_loss=0.1916, over 2642023.63 frames. ], batch size: 37, lr: 4.40e-02, grad_scale: 8.0 +2024-08-03 01:23:44,319 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.24 vs. limit=13.6875 +2024-08-03 01:24:06,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.96 vs. limit=13.7425 +2024-08-03 01:24:34,176 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.20 vs. limit=13.825000000000001 +2024-08-03 01:24:34,658 INFO [train.py:1114] (3/4) Epoch 1, batch 2300, loss[loss=0.3228, simple_loss=0.3598, pruned_loss=0.1429, over 13583.00 frames. ], tot_loss[loss=0.4006, simple_loss=0.4235, pruned_loss=0.1884, over 2638208.02 frames. ], batch size: 33, lr: 4.40e-02, grad_scale: 8.0 +2024-08-03 01:24:51,213 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.412e+02 1.913e+02 2.281e+02 2.883e+02 4.389e+02, threshold=4.562e+02, percent-clipped=6.0 +2024-08-03 01:24:52,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=8433.333333333334, ans=0.125 +2024-08-03 01:25:05,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=8506.666666666666, ans=0.21493333333333334 +2024-08-03 01:25:13,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8506.666666666666, ans=0.21493333333333334 +2024-08-03 01:25:29,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=8580.0, ans=0.0 +2024-08-03 01:25:30,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=8580.0, ans=0.0 +2024-08-03 01:25:33,507 INFO [train.py:1114] (3/4) Epoch 1, batch 2350, loss[loss=0.3605, simple_loss=0.4097, pruned_loss=0.1556, over 13551.00 frames. ], tot_loss[loss=0.398, simple_loss=0.4224, pruned_loss=0.1865, over 2640517.85 frames. ], batch size: 38, lr: 4.40e-02, grad_scale: 8.0 +2024-08-03 01:25:33,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8616.666666666666, ans=0.21383333333333332 +2024-08-03 01:25:50,455 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.04 vs. limit=13.962499999999999 +2024-08-03 01:26:11,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=8690.0, ans=0.125 +2024-08-03 01:26:16,817 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.91 vs. limit=10.75875 +2024-08-03 01:26:22,559 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.13 vs. limit=14.044999999999998 +2024-08-03 01:26:23,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=8726.666666666666, ans=0.008972463768115942 +2024-08-03 01:26:31,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=8763.333333333334, ans=0.125 +2024-08-03 01:26:39,407 INFO [train.py:1114] (3/4) Epoch 1, batch 2400, loss[loss=0.3626, simple_loss=0.401, pruned_loss=0.1621, over 13526.00 frames. ], tot_loss[loss=0.3963, simple_loss=0.422, pruned_loss=0.1851, over 2642084.08 frames. ], batch size: 35, lr: 4.39e-02, grad_scale: 16.0 +2024-08-03 01:26:44,006 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.261e+02 1.562e+02 1.774e+02 2.172e+02 5.136e+02, threshold=3.548e+02, percent-clipped=1.0 +2024-08-03 01:26:48,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=8836.666666666666, ans=0.125 +2024-08-03 01:27:00,928 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.93 vs. limit=10.8275 +2024-08-03 01:27:01,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=8873.333333333334, ans=0.029694444444444443 +2024-08-03 01:27:08,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=8910.0, ans=0.025 +2024-08-03 01:27:17,302 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:27:18,696 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.62 vs. limit=9.473333333333333 +2024-08-03 01:27:19,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=8946.666666666666, ans=0.125 +2024-08-03 01:27:26,946 INFO [train.py:1114] (3/4) Epoch 1, batch 2450, loss[loss=0.3343, simple_loss=0.3877, pruned_loss=0.1405, over 13360.00 frames. ], tot_loss[loss=0.3966, simple_loss=0.4228, pruned_loss=0.185, over 2631985.21 frames. ], batch size: 37, lr: 4.39e-02, grad_scale: 16.0 +2024-08-03 01:27:31,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=8983.333333333334, ans=0.125 +2024-08-03 01:27:34,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=8983.333333333334, ans=0.125 +2024-08-03 01:27:38,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=9020.0, ans=0.125 +2024-08-03 01:27:55,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=9093.333333333334, ans=0.125 +2024-08-03 01:28:07,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=9130.0, ans=0.125 +2024-08-03 01:28:14,000 INFO [train.py:1114] (3/4) Epoch 1, batch 2500, loss[loss=0.4128, simple_loss=0.4498, pruned_loss=0.1879, over 13405.00 frames. ], tot_loss[loss=0.3925, simple_loss=0.4203, pruned_loss=0.1821, over 2636159.04 frames. ], batch size: 39, lr: 4.38e-02, grad_scale: 16.0 +2024-08-03 01:28:15,206 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.15 vs. limit=10.9375 +2024-08-03 01:28:18,357 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.243e+02 1.729e+02 1.988e+02 2.684e+02 1.225e+03, threshold=3.975e+02, percent-clipped=8.0 +2024-08-03 01:28:29,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=9203.333333333334, ans=0.20796666666666663 +2024-08-03 01:28:35,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=9240.0, ans=0.025 +2024-08-03 01:28:36,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=9240.0, ans=0.025 +2024-08-03 01:28:40,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=9276.666666666666, ans=0.125 +2024-08-03 01:28:41,381 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.00 vs. limit=4.3915 +2024-08-03 01:28:59,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.82 vs. limit=14.485 +2024-08-03 01:29:00,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=9350.0, ans=0.025 +2024-08-03 01:29:01,483 INFO [train.py:1114] (3/4) Epoch 1, batch 2550, loss[loss=0.3089, simple_loss=0.3441, pruned_loss=0.1369, over 13533.00 frames. ], tot_loss[loss=0.3886, simple_loss=0.4178, pruned_loss=0.1796, over 2638104.98 frames. ], batch size: 31, lr: 4.38e-02, grad_scale: 16.0 +2024-08-03 01:29:14,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=9386.666666666666, ans=0.02755555555555556 +2024-08-03 01:29:15,978 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.44 vs. limit=14.54 +2024-08-03 01:29:48,469 INFO [train.py:1114] (3/4) Epoch 1, batch 2600, loss[loss=0.3718, simple_loss=0.4107, pruned_loss=0.1665, over 13567.00 frames. ], tot_loss[loss=0.3855, simple_loss=0.4162, pruned_loss=0.1773, over 2636820.75 frames. ], batch size: 36, lr: 4.37e-02, grad_scale: 16.0 +2024-08-03 01:29:52,858 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.295e+02 1.601e+02 1.881e+02 2.405e+02 3.900e+02, threshold=3.763e+02, percent-clipped=0.0 +2024-08-03 01:29:54,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=9533.333333333334, ans=0.125 +2024-08-03 01:29:56,846 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.35 vs. limit=14.6775 +2024-08-03 01:30:03,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=9570.0, ans=0.125 +2024-08-03 01:30:13,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=9606.666666666666, ans=0.125 +2024-08-03 01:30:33,452 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.23 vs. limit=11.14375 +2024-08-03 01:30:33,664 INFO [train.py:1114] (3/4) Epoch 1, batch 2650, loss[loss=0.4127, simple_loss=0.4475, pruned_loss=0.1889, over 13328.00 frames. ], tot_loss[loss=0.3833, simple_loss=0.4151, pruned_loss=0.1757, over 2639896.22 frames. ], batch size: 46, lr: 4.37e-02, grad_scale: 16.0 +2024-08-03 01:30:40,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=9716.666666666666, ans=0.008757246376811594 +2024-08-03 01:31:02,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=9826.666666666666, ans=0.0 +2024-08-03 01:31:06,308 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.79 vs. limit=7.930666666666666 +2024-08-03 01:31:20,736 INFO [train.py:1114] (3/4) Epoch 1, batch 2700, loss[loss=0.3929, simple_loss=0.4409, pruned_loss=0.1724, over 13540.00 frames. ], tot_loss[loss=0.3828, simple_loss=0.4149, pruned_loss=0.1753, over 2637428.69 frames. ], batch size: 40, lr: 4.36e-02, grad_scale: 16.0 +2024-08-03 01:31:25,954 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.286e+02 1.664e+02 1.951e+02 2.469e+02 5.181e+02, threshold=3.901e+02, percent-clipped=9.0 +2024-08-03 01:31:26,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=9900.0, ans=0.02541666666666667 +2024-08-03 01:31:34,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=9936.666666666666, ans=0.09899494936611666 +2024-08-03 01:31:47,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=9936.666666666666, ans=0.125 +2024-08-03 01:31:55,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=9973.333333333334, ans=0.125 +2024-08-03 01:32:02,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=10010.0, ans=0.024958333333333336 +2024-08-03 01:32:17,814 INFO [train.py:1114] (3/4) Epoch 1, batch 2750, loss[loss=0.3282, simple_loss=0.3767, pruned_loss=0.1398, over 13328.00 frames. ], tot_loss[loss=0.3809, simple_loss=0.4135, pruned_loss=0.1741, over 2633227.28 frames. ], batch size: 34, lr: 4.36e-02, grad_scale: 16.0 +2024-08-03 01:32:17,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=10083.333333333334, ans=0.0 +2024-08-03 01:32:40,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=10120.0, ans=0.024500000000000004 +2024-08-03 01:32:41,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=10156.666666666666, ans=0.125 +2024-08-03 01:32:45,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=10156.666666666666, ans=0.125 +2024-08-03 01:32:45,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=10156.666666666666, ans=0.125 +2024-08-03 01:33:06,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.50 vs. limit=15.1725 +2024-08-03 01:33:08,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=10230.0, ans=0.09899494936611666 +2024-08-03 01:33:10,607 INFO [train.py:1114] (3/4) Epoch 1, batch 2800, loss[loss=0.5409, simple_loss=0.5062, pruned_loss=0.2878, over 8911.00 frames. ], tot_loss[loss=0.3797, simple_loss=0.4126, pruned_loss=0.1733, over 2625717.44 frames. ], batch size: 96, lr: 4.36e-02, grad_scale: 32.0 +2024-08-03 01:33:12,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.99 vs. limit=15.2 +2024-08-03 01:33:14,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=10266.666666666666, ans=0.00863768115942029 +2024-08-03 01:33:15,322 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.297e+02 1.678e+02 2.174e+02 2.677e+02 5.163e+02, threshold=4.348e+02, percent-clipped=2.0 +2024-08-03 01:33:15,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=10266.666666666666, ans=0.5406666666666667 +2024-08-03 01:33:23,673 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.20 vs. limit=15.2275 +2024-08-03 01:33:43,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.55 vs. limit=11.39125 +2024-08-03 01:35:19,833 INFO [train.py:1114] (3/4) Epoch 1, batch 2850, loss[loss=0.375, simple_loss=0.4251, pruned_loss=0.1625, over 13365.00 frames. ], tot_loss[loss=0.3801, simple_loss=0.4132, pruned_loss=0.1735, over 2620053.89 frames. ], batch size: 35, lr: 4.35e-02, grad_scale: 32.0 +2024-08-03 01:35:35,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=10450.0, ans=0.023125000000000003 +2024-08-03 01:35:44,043 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:35:55,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=10523.333333333334, ans=0.02281944444444444 +2024-08-03 01:36:05,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=10596.666666666666, ans=0.19403333333333334 +2024-08-03 01:36:05,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=10596.666666666666, ans=0.125 +2024-08-03 01:36:11,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=10596.666666666666, ans=0.022513888888888892 +2024-08-03 01:36:11,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=10596.666666666666, ans=0.125 +2024-08-03 01:36:14,790 INFO [train.py:1114] (3/4) Epoch 1, batch 2900, loss[loss=0.3809, simple_loss=0.4158, pruned_loss=0.173, over 13367.00 frames. ], tot_loss[loss=0.3782, simple_loss=0.413, pruned_loss=0.1717, over 2631197.49 frames. ], batch size: 36, lr: 4.35e-02, grad_scale: 32.0 +2024-08-03 01:36:19,260 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.316e+02 1.668e+02 1.982e+02 2.661e+02 5.002e+02, threshold=3.964e+02, percent-clipped=4.0 +2024-08-03 01:36:25,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=10670.0, ans=0.19329999999999997 +2024-08-03 01:36:35,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=10706.666666666666, ans=0.02205555555555556 +2024-08-03 01:36:57,543 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.27 vs. limit=8.312000000000001 +2024-08-03 01:37:00,008 INFO [train.py:1114] (3/4) Epoch 1, batch 2950, loss[loss=0.3271, simple_loss=0.3714, pruned_loss=0.1414, over 13329.00 frames. ], tot_loss[loss=0.3764, simple_loss=0.411, pruned_loss=0.1709, over 2629683.58 frames. ], batch size: 34, lr: 4.34e-02, grad_scale: 32.0 +2024-08-03 01:38:04,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=10926.666666666666, ans=0.07 +2024-08-03 01:38:05,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=10926.666666666666, ans=0.025 +2024-08-03 01:38:07,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=10926.666666666666, ans=0.125 +2024-08-03 01:38:10,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=10926.666666666666, ans=0.125 +2024-08-03 01:38:16,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=10963.333333333334, ans=0.07 +2024-08-03 01:38:21,968 INFO [train.py:1114] (3/4) Epoch 1, batch 3000, loss[loss=0.3767, simple_loss=0.4134, pruned_loss=0.1701, over 13532.00 frames. ], tot_loss[loss=0.374, simple_loss=0.4088, pruned_loss=0.1696, over 2629252.86 frames. ], batch size: 37, lr: 4.34e-02, grad_scale: 32.0 +2024-08-03 01:38:21,968 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 01:39:12,987 INFO [train.py:1146] (3/4) Epoch 1, validation: loss=0.2888, simple_loss=0.3696, pruned_loss=0.104, over 944034.00 frames. +2024-08-03 01:39:12,989 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 01:39:13,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=11000.0, ans=0.5150000000000001 +2024-08-03 01:39:17,652 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.298e+02 1.604e+02 1.963e+02 2.352e+02 4.798e+02, threshold=3.927e+02, percent-clipped=2.0 +2024-08-03 01:39:36,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff3.min_abs, batch_count=11073.333333333334, ans=0.2 +2024-08-03 01:39:41,748 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=3.67 vs. limit=11.66625 +2024-08-03 01:39:44,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=11110.0, ans=0.125 +2024-08-03 01:39:49,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=11146.666666666666, ans=0.125 +2024-08-03 01:39:51,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=11146.666666666666, ans=0.5098666666666667 +2024-08-03 01:40:00,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=11146.666666666666, ans=0.008446376811594204 +2024-08-03 01:40:03,053 INFO [train.py:1114] (3/4) Epoch 1, batch 3050, loss[loss=0.3357, simple_loss=0.3781, pruned_loss=0.1467, over 13518.00 frames. ], tot_loss[loss=0.3752, simple_loss=0.4101, pruned_loss=0.1702, over 2626972.87 frames. ], batch size: 35, lr: 4.33e-02, grad_scale: 32.0 +2024-08-03 01:40:08,473 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.42 vs. limit=15.8875 +2024-08-03 01:40:09,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=11183.333333333334, ans=0.125 +2024-08-03 01:40:17,942 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.90 vs. limit=11.7075 +2024-08-03 01:40:23,644 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.44 vs. limit=4.683 +2024-08-03 01:40:40,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=11293.333333333334, ans=0.125 +2024-08-03 01:40:44,155 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.95 vs. limit=8.517333333333333 +2024-08-03 01:40:55,296 INFO [train.py:1114] (3/4) Epoch 1, batch 3100, loss[loss=0.4063, simple_loss=0.4352, pruned_loss=0.1887, over 13315.00 frames. ], tot_loss[loss=0.3742, simple_loss=0.4092, pruned_loss=0.1696, over 2626676.68 frames. ], batch size: 46, lr: 4.33e-02, grad_scale: 32.0 +2024-08-03 01:40:59,560 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.612e+02 1.933e+02 2.547e+02 5.853e+02, threshold=3.866e+02, percent-clipped=4.0 +2024-08-03 01:41:17,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11440.0, ans=0.1856 +2024-08-03 01:41:21,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11440.0, ans=0.1856 +2024-08-03 01:41:30,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=11440.0, ans=0.019000000000000003 +2024-08-03 01:41:40,191 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.98 vs. limit=16.1075 +2024-08-03 01:41:45,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11513.333333333334, ans=0.18486666666666668 +2024-08-03 01:41:49,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=11513.333333333334, ans=0.025 +2024-08-03 01:41:54,265 INFO [train.py:1114] (3/4) Epoch 1, batch 3150, loss[loss=0.3518, simple_loss=0.401, pruned_loss=0.1513, over 13011.00 frames. ], tot_loss[loss=0.3718, simple_loss=0.4082, pruned_loss=0.1677, over 2628290.66 frames. ], batch size: 48, lr: 4.32e-02, grad_scale: 32.0 +2024-08-03 01:42:01,493 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.73 vs. limit=16.1625 +2024-08-03 01:42:35,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=11696.666666666666, ans=0.01793055555555556 +2024-08-03 01:42:44,258 INFO [train.py:1114] (3/4) Epoch 1, batch 3200, loss[loss=0.3368, simple_loss=0.392, pruned_loss=0.1408, over 13554.00 frames. ], tot_loss[loss=0.3704, simple_loss=0.4069, pruned_loss=0.1669, over 2634400.40 frames. ], batch size: 37, lr: 4.32e-02, grad_scale: 32.0 +2024-08-03 01:42:48,502 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.624e+02 1.966e+02 2.680e+02 4.372e+02, threshold=3.932e+02, percent-clipped=2.0 +2024-08-03 01:43:04,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=11770.0, ans=0.035 +2024-08-03 01:43:05,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=11770.0, ans=0.125 +2024-08-03 01:43:12,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=11806.666666666666, ans=0.01747222222222223 +2024-08-03 01:43:12,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=11806.666666666666, ans=0.04949747468305833 +2024-08-03 01:43:14,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=11843.333333333334, ans=0.125 +2024-08-03 01:43:15,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=11843.333333333334, ans=0.017319444444444443 +2024-08-03 01:43:22,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=11843.333333333334, ans=0.125 +2024-08-03 01:43:33,494 INFO [train.py:1114] (3/4) Epoch 1, batch 3250, loss[loss=0.3849, simple_loss=0.424, pruned_loss=0.1729, over 13391.00 frames. ], tot_loss[loss=0.3679, simple_loss=0.4059, pruned_loss=0.1649, over 2639151.33 frames. ], batch size: 38, lr: 4.31e-02, grad_scale: 32.0 +2024-08-03 01:43:33,865 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.87 vs. limit=4.7875 +2024-08-03 01:43:49,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=11953.333333333334, ans=0.016861111111111104 +2024-08-03 01:44:08,773 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.68 vs. limit=16.5475 +2024-08-03 01:44:18,531 INFO [train.py:1114] (3/4) Epoch 1, batch 3300, loss[loss=0.425, simple_loss=0.4458, pruned_loss=0.2021, over 12798.00 frames. ], tot_loss[loss=0.3642, simple_loss=0.4031, pruned_loss=0.1627, over 2641151.77 frames. ], batch size: 52, lr: 4.31e-02, grad_scale: 32.0 +2024-08-03 01:44:22,866 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.222e+02 1.557e+02 1.877e+02 2.344e+02 4.156e+02, threshold=3.753e+02, percent-clipped=2.0 +2024-08-03 01:44:27,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=12136.666666666666, ans=0.025 +2024-08-03 01:44:28,747 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.12 vs. limit=12.05125 +2024-08-03 01:44:36,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=12173.333333333334, ans=0.125 +2024-08-03 01:44:42,960 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.18 vs. limit=8.043333333333333 +2024-08-03 01:45:02,675 INFO [train.py:1114] (3/4) Epoch 1, batch 3350, loss[loss=0.3869, simple_loss=0.4306, pruned_loss=0.1716, over 13270.00 frames. ], tot_loss[loss=0.3654, simple_loss=0.4043, pruned_loss=0.1632, over 2630960.74 frames. ], batch size: 49, lr: 4.30e-02, grad_scale: 32.0 +2024-08-03 01:45:03,191 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.32 vs. limit=16.7125 +2024-08-03 01:45:31,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=12393.333333333334, ans=0.00817536231884058 +2024-08-03 01:45:45,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=12430.0, ans=0.125 +2024-08-03 01:45:46,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=12466.666666666666, ans=0.125 +2024-08-03 01:45:47,879 INFO [train.py:1114] (3/4) Epoch 1, batch 3400, loss[loss=0.291, simple_loss=0.3408, pruned_loss=0.1206, over 13538.00 frames. ], tot_loss[loss=0.3635, simple_loss=0.4027, pruned_loss=0.1622, over 2625281.16 frames. ], batch size: 31, lr: 4.29e-02, grad_scale: 32.0 +2024-08-03 01:45:52,232 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.662e+02 2.017e+02 2.620e+02 5.936e+02, threshold=4.033e+02, percent-clipped=10.0 +2024-08-03 01:46:21,374 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.89 vs. limit=16.877499999999998 +2024-08-03 01:46:44,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=12576.666666666666, ans=0.09899494936611666 +2024-08-03 01:46:52,963 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.16 vs. limit=11.288333333333334 +2024-08-03 01:47:02,906 INFO [train.py:1114] (3/4) Epoch 1, batch 3450, loss[loss=0.4103, simple_loss=0.4348, pruned_loss=0.1929, over 12892.00 frames. ], tot_loss[loss=0.3632, simple_loss=0.4026, pruned_loss=0.1619, over 2629407.84 frames. ], batch size: 52, lr: 4.29e-02, grad_scale: 32.0 +2024-08-03 01:47:05,281 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.92 vs. limit=11.325 +2024-08-03 01:47:06,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=12650.0, ans=0.125 +2024-08-03 01:47:19,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=12686.666666666666, ans=0.07 +2024-08-03 01:47:22,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=12723.333333333334, ans=0.125 +2024-08-03 01:47:23,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=12723.333333333334, ans=0.125 +2024-08-03 01:47:44,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=12796.666666666666, ans=10.0 +2024-08-03 01:47:54,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12833.333333333334, ans=0.17166666666666666 +2024-08-03 01:47:55,451 INFO [train.py:1114] (3/4) Epoch 1, batch 3500, loss[loss=0.3438, simple_loss=0.379, pruned_loss=0.1543, over 13538.00 frames. ], tot_loss[loss=0.3593, simple_loss=0.3995, pruned_loss=0.1596, over 2630461.03 frames. ], batch size: 34, lr: 4.28e-02, grad_scale: 32.0 +2024-08-03 01:47:55,916 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.88 vs. limit=12.3125 +2024-08-03 01:47:59,791 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.280e+02 1.559e+02 1.825e+02 2.381e+02 4.772e+02, threshold=3.650e+02, percent-clipped=2.0 +2024-08-03 01:48:06,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=12870.0, ans=0.008071739130434782 +2024-08-03 01:48:22,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=12906.666666666666, ans=0.07 +2024-08-03 01:48:42,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=12980.0, ans=10.0 +2024-08-03 01:48:45,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=12980.0, ans=0.125 +2024-08-03 01:48:51,746 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=3.91 vs. limit=12.38125 +2024-08-03 01:48:51,939 INFO [train.py:1114] (3/4) Epoch 1, batch 3550, loss[loss=0.3915, simple_loss=0.4293, pruned_loss=0.1768, over 12422.00 frames. ], tot_loss[loss=0.3631, simple_loss=0.4028, pruned_loss=0.1616, over 2628268.77 frames. ], batch size: 58, lr: 4.28e-02, grad_scale: 32.0 +2024-08-03 01:48:58,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=13016.666666666666, ans=0.16983333333333334 +2024-08-03 01:49:08,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=13053.333333333334, ans=0.125 +2024-08-03 01:49:17,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=13090.0, ans=0.012125000000000004 +2024-08-03 01:49:47,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13163.333333333334, ans=0.16836666666666666 +2024-08-03 01:49:51,066 INFO [train.py:1114] (3/4) Epoch 1, batch 3600, loss[loss=0.4476, simple_loss=0.4549, pruned_loss=0.2202, over 8880.00 frames. ], tot_loss[loss=0.3739, simple_loss=0.4096, pruned_loss=0.1691, over 2483112.68 frames. ], batch size: 96, lr: 4.27e-02, grad_scale: 32.0 +2024-08-03 01:49:52,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=13200.0, ans=0.125 +2024-08-03 01:49:55,455 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.481e+02 1.802e+02 2.019e+02 3.446e+02, threshold=3.604e+02, percent-clipped=0.0 +2024-08-03 01:50:02,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=13236.666666666666, ans=0.125 +2024-08-03 01:50:03,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=13236.666666666666, ans=0.007992028985507247 +2024-08-03 01:50:04,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=13236.666666666666, ans=0.05 +2024-08-03 01:50:04,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=13236.666666666666, ans=0.4367166666666667 +2024-08-03 01:50:16,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=13273.333333333334, ans=0.025 +2024-08-03 01:50:16,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=13273.333333333334, ans=0.125 +2024-08-03 01:50:34,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=13310.0, ans=0.07 +2024-08-03 01:50:38,713 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.13 vs. limit=12.49125 +2024-08-03 01:52:08,733 INFO [train.py:1114] (3/4) Epoch 2, batch 0, loss[loss=0.3346, simple_loss=0.3745, pruned_loss=0.1473, over 13365.00 frames. ], tot_loss[loss=0.3346, simple_loss=0.3745, pruned_loss=0.1473, over 13365.00 frames. ], batch size: 33, lr: 4.19e-02, grad_scale: 32.0 +2024-08-03 01:52:08,734 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 01:52:18,755 INFO [train.py:1146] (3/4) Epoch 2, validation: loss=0.2954, simple_loss=0.3785, pruned_loss=0.1062, over 944034.00 frames. +2024-08-03 01:52:18,756 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 01:52:18,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=13346.666666666666, ans=0.125 +2024-08-03 01:52:32,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=13383.333333333334, ans=0.16616666666666666 +2024-08-03 01:52:39,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=13420.0, ans=0.125 +2024-08-03 01:52:43,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=13420.0, ans=0.04949747468305833 +2024-08-03 01:52:46,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=13456.666666666666, ans=0.0 +2024-08-03 01:52:48,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13456.666666666666, ans=0.16543333333333335 +2024-08-03 01:52:51,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=13456.666666666666, ans=0.125 +2024-08-03 01:53:05,766 INFO [train.py:1114] (3/4) Epoch 2, batch 50, loss[loss=0.2945, simple_loss=0.3442, pruned_loss=0.1224, over 13427.00 frames. ], tot_loss[loss=0.3696, simple_loss=0.4084, pruned_loss=0.1654, over 579096.17 frames. ], batch size: 32, lr: 4.18e-02, grad_scale: 16.0 +2024-08-03 01:53:10,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=13530.0, ans=0.125 +2024-08-03 01:53:25,127 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.505e+02 1.833e+02 2.741e+02 6.945e+02, threshold=3.667e+02, percent-clipped=7.0 +2024-08-03 01:53:34,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=13603.333333333334, ans=0.00791231884057971 +2024-08-03 01:53:42,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=13640.0, ans=0.125 +2024-08-03 01:53:42,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=13640.0, ans=0.125 +2024-08-03 01:53:56,516 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.61 vs. limit=5.057 +2024-08-03 01:53:56,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=13713.333333333334, ans=12.6425 +2024-08-03 01:53:56,980 INFO [train.py:1114] (3/4) Epoch 2, batch 100, loss[loss=0.3519, simple_loss=0.3928, pruned_loss=0.1555, over 13535.00 frames. ], tot_loss[loss=0.3644, simple_loss=0.4054, pruned_loss=0.1617, over 1026552.63 frames. ], batch size: 35, lr: 4.17e-02, grad_scale: 16.0 +2024-08-03 01:54:01,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=13713.333333333334, ans=0.009527777777777774 +2024-08-03 01:54:06,968 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.84 vs. limit=5.0625 +2024-08-03 01:54:10,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=13750.0, ans=0.007880434782608695 +2024-08-03 01:54:11,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=13750.0, ans=0.125 +2024-08-03 01:54:15,066 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:54:28,915 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:54:31,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=13823.333333333334, ans=0.05 +2024-08-03 01:54:32,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=13823.333333333334, ans=0.025 +2024-08-03 01:54:35,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13823.333333333334, ans=0.16176666666666667 +2024-08-03 01:54:46,417 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 01:54:48,053 INFO [train.py:1114] (3/4) Epoch 2, batch 150, loss[loss=0.3202, simple_loss=0.3579, pruned_loss=0.1413, over 13405.00 frames. ], tot_loss[loss=0.3578, simple_loss=0.4001, pruned_loss=0.1577, over 1387937.81 frames. ], batch size: 32, lr: 4.17e-02, grad_scale: 16.0 +2024-08-03 01:54:58,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=13933.333333333334, ans=0.025 +2024-08-03 01:55:03,037 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.519e+02 1.772e+02 2.227e+02 3.651e+02, threshold=3.544e+02, percent-clipped=0.0 +2024-08-03 01:55:22,795 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.743e-03 +2024-08-03 01:55:27,767 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.70 vs. limit=8.510833333333334 +2024-08-03 01:55:31,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=14043.333333333334, ans=0.125 +2024-08-03 01:55:34,695 INFO [train.py:1114] (3/4) Epoch 2, batch 200, loss[loss=0.3423, simple_loss=0.3975, pruned_loss=0.1436, over 12426.00 frames. ], tot_loss[loss=0.3538, simple_loss=0.3968, pruned_loss=0.1554, over 1666212.80 frames. ], batch size: 58, lr: 4.16e-02, grad_scale: 16.0 +2024-08-03 01:55:37,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=14080.0, ans=0.09899494936611666 +2024-08-03 01:55:40,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=14080.0, ans=0.1592 +2024-08-03 01:55:50,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=14116.666666666666, ans=0.125 +2024-08-03 01:56:02,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=14153.333333333334, ans=0.15846666666666667 +2024-08-03 01:56:23,308 INFO [train.py:1114] (3/4) Epoch 2, batch 250, loss[loss=0.3438, simple_loss=0.4055, pruned_loss=0.1411, over 13320.00 frames. ], tot_loss[loss=0.3493, simple_loss=0.3942, pruned_loss=0.1522, over 1884860.59 frames. ], batch size: 46, lr: 4.16e-02, grad_scale: 16.0 +2024-08-03 01:56:24,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14263.333333333334, ans=0.15736666666666665 +2024-08-03 01:56:38,386 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.594e+02 1.964e+02 2.594e+02 6.291e+02, threshold=3.929e+02, percent-clipped=8.0 +2024-08-03 01:56:40,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=14300.0, ans=0.3995000000000001 +2024-08-03 01:56:43,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=14336.666666666666, ans=0.125 +2024-08-03 01:56:45,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=14336.666666666666, ans=0.007752898550724638 +2024-08-03 01:56:47,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=14336.666666666666, ans=0.15663333333333335 +2024-08-03 01:56:49,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=14336.666666666666, ans=0.09899494936611666 +2024-08-03 01:57:01,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=14373.333333333334, ans=0.125 +2024-08-03 01:57:12,758 INFO [train.py:1114] (3/4) Epoch 2, batch 300, loss[loss=0.3501, simple_loss=0.3993, pruned_loss=0.1505, over 13426.00 frames. ], tot_loss[loss=0.3485, simple_loss=0.3934, pruned_loss=0.1518, over 2051505.36 frames. ], batch size: 42, lr: 4.15e-02, grad_scale: 16.0 +2024-08-03 01:57:12,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=14446.666666666666, ans=0.0077289855072463776 +2024-08-03 01:57:37,813 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=11.95 vs. limit=12.945 +2024-08-03 01:57:38,537 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.66 vs. limit=12.945 +2024-08-03 01:57:53,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=14593.333333333334, ans=0.15406666666666666 +2024-08-03 01:58:00,428 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.23 vs. limit=12.9725 +2024-08-03 01:58:01,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=14630.0, ans=0.005708333333333336 +2024-08-03 01:58:01,708 INFO [train.py:1114] (3/4) Epoch 2, batch 350, loss[loss=0.3482, simple_loss=0.3884, pruned_loss=0.154, over 13587.00 frames. ], tot_loss[loss=0.348, simple_loss=0.3934, pruned_loss=0.1513, over 2182124.26 frames. ], batch size: 33, lr: 4.15e-02, grad_scale: 16.0 +2024-08-03 01:58:13,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=14666.666666666666, ans=0.005555555555555557 +2024-08-03 01:58:53,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=14666.666666666666, ans=0.15333333333333335 +2024-08-03 01:58:55,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=14666.666666666666, ans=0.005555555555555557 +2024-08-03 01:58:58,108 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.602e+02 1.924e+02 2.648e+02 5.206e+02, threshold=3.847e+02, percent-clipped=6.0 +2024-08-03 01:58:59,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=14666.666666666666, ans=0.125 +2024-08-03 01:59:05,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=14703.333333333334, ans=0.125 +2024-08-03 01:59:07,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=14703.333333333334, ans=0.125 +2024-08-03 01:59:10,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=14740.0, ans=0.0076652173913043485 +2024-08-03 01:59:17,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=14740.0, ans=0.125 +2024-08-03 01:59:20,239 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.34 vs. limit=18.5825 +2024-08-03 01:59:29,770 INFO [train.py:1114] (3/4) Epoch 2, batch 400, loss[loss=0.3357, simple_loss=0.3837, pruned_loss=0.1438, over 13345.00 frames. ], tot_loss[loss=0.3475, simple_loss=0.3927, pruned_loss=0.1512, over 2286171.47 frames. ], batch size: 37, lr: 4.14e-02, grad_scale: 32.0 +2024-08-03 02:04:04,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=14813.333333333334, ans=0.125 +2024-08-03 02:04:08,954 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.92 vs. limit=13.06875 +2024-08-03 02:04:12,584 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.98 vs. limit=13.06875 +2024-08-03 02:05:20,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=14886.666666666666, ans=0.125 +2024-08-03 02:05:23,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=14886.666666666666, ans=0.8988666666666666 +2024-08-03 02:05:26,955 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.88 vs. limit=18.665 +2024-08-03 02:05:41,610 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.35 vs. limit=13.096250000000001 +2024-08-03 02:05:44,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=14923.333333333334, ans=0.025 +2024-08-03 02:05:54,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.03 vs. limit=18.72 +2024-08-03 02:05:54,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14960.0, ans=0.1504 +2024-08-03 02:06:33,894 INFO [train.py:1114] (3/4) Epoch 2, batch 450, loss[loss=0.3717, simple_loss=0.4134, pruned_loss=0.165, over 13561.00 frames. ], tot_loss[loss=0.3477, simple_loss=0.3931, pruned_loss=0.1512, over 2359978.86 frames. ], batch size: 38, lr: 4.13e-02, grad_scale: 32.0 +2024-08-03 02:06:48,867 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.226e+02 1.511e+02 1.857e+02 2.288e+02 3.385e+02, threshold=3.714e+02, percent-clipped=0.0 +2024-08-03 02:06:51,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=15070.0, ans=0.0038750000000000034 +2024-08-03 02:07:20,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=15143.333333333334, ans=0.125 +2024-08-03 02:07:28,592 INFO [train.py:1114] (3/4) Epoch 2, batch 500, loss[loss=0.3813, simple_loss=0.4227, pruned_loss=0.17, over 13397.00 frames. ], tot_loss[loss=0.345, simple_loss=0.391, pruned_loss=0.1495, over 2425277.80 frames. ], batch size: 43, lr: 4.13e-02, grad_scale: 32.0 +2024-08-03 02:07:28,866 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:07:47,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=15216.666666666666, ans=0.125 +2024-08-03 02:07:49,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=15253.333333333334, ans=0.007553623188405797 +2024-08-03 02:07:49,504 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.26 vs. limit=18.939999999999998 +2024-08-03 02:07:51,470 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.07 vs. limit=18.939999999999998 +2024-08-03 02:07:56,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=15253.333333333334, ans=0.125 +2024-08-03 02:08:04,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=15290.0, ans=0.0 +2024-08-03 02:08:17,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=15326.666666666666, ans=0.025 +2024-08-03 02:08:18,907 INFO [train.py:1114] (3/4) Epoch 2, batch 550, loss[loss=0.3688, simple_loss=0.4102, pruned_loss=0.1637, over 13027.00 frames. ], tot_loss[loss=0.3443, simple_loss=0.3902, pruned_loss=0.1492, over 2467521.15 frames. ], batch size: 48, lr: 4.12e-02, grad_scale: 32.0 +2024-08-03 02:08:33,978 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.486e+02 1.782e+02 2.081e+02 4.201e+02, threshold=3.563e+02, percent-clipped=2.0 +2024-08-03 02:08:34,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=15400.0, ans=13.275 +2024-08-03 02:08:50,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=15473.333333333334, ans=0.0021944444444444433 +2024-08-03 02:09:19,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=15510.0, ans=0.007497826086956522 +2024-08-03 02:09:22,839 INFO [train.py:1114] (3/4) Epoch 2, batch 600, loss[loss=0.3748, simple_loss=0.4161, pruned_loss=0.1667, over 13316.00 frames. ], tot_loss[loss=0.3441, simple_loss=0.3902, pruned_loss=0.149, over 2507460.49 frames. ], batch size: 46, lr: 4.12e-02, grad_scale: 32.0 +2024-08-03 02:09:25,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15546.666666666666, ans=0.14453333333333335 +2024-08-03 02:09:29,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=15546.666666666666, ans=0.125 +2024-08-03 02:09:56,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=15656.666666666666, ans=0.007465942028985508 +2024-08-03 02:10:12,466 INFO [train.py:1114] (3/4) Epoch 2, batch 650, loss[loss=0.3203, simple_loss=0.3798, pruned_loss=0.1304, over 13538.00 frames. ], tot_loss[loss=0.3425, simple_loss=0.3888, pruned_loss=0.148, over 2543256.67 frames. ], batch size: 37, lr: 4.11e-02, grad_scale: 32.0 +2024-08-03 02:10:15,034 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:10:17,327 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.38 vs. limit=13.39875 +2024-08-03 02:10:27,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=15766.666666666666, ans=0.34816666666666674 +2024-08-03 02:10:28,712 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.454e+02 1.669e+02 2.017e+02 2.893e+02, threshold=3.339e+02, percent-clipped=0.0 +2024-08-03 02:10:52,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=15803.333333333334, ans=0.125 +2024-08-03 02:11:03,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=15840.0, ans=0.025 +2024-08-03 02:11:06,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15876.666666666666, ans=0.14123333333333335 +2024-08-03 02:11:11,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=15876.666666666666, ans=0.3443166666666667 +2024-08-03 02:11:12,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=15876.666666666666, ans=0.125 +2024-08-03 02:11:13,922 INFO [train.py:1114] (3/4) Epoch 2, batch 700, loss[loss=0.3701, simple_loss=0.4047, pruned_loss=0.1677, over 13538.00 frames. ], tot_loss[loss=0.3426, simple_loss=0.3894, pruned_loss=0.1479, over 2566240.26 frames. ], batch size: 35, lr: 4.11e-02, grad_scale: 8.0 +2024-08-03 02:11:47,219 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.41 vs. limit=13.50875 +2024-08-03 02:11:47,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=16023.333333333334, ans=0.125 +2024-08-03 02:11:55,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=16060.0, ans=0.3379 +2024-08-03 02:11:56,587 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.96 vs. limit=10.424 +2024-08-03 02:12:04,801 INFO [train.py:1114] (3/4) Epoch 2, batch 750, loss[loss=0.3462, simple_loss=0.3951, pruned_loss=0.1487, over 13353.00 frames. ], tot_loss[loss=0.3406, simple_loss=0.3878, pruned_loss=0.1468, over 2582783.86 frames. ], batch size: 37, lr: 4.10e-02, grad_scale: 8.0 +2024-08-03 02:12:21,722 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.638e+02 1.990e+02 2.530e+02 5.439e+02, threshold=3.980e+02, percent-clipped=7.0 +2024-08-03 02:12:22,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=16170.0, ans=0.33405000000000007 +2024-08-03 02:12:36,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=16206.666666666666, ans=0.33276666666666677 +2024-08-03 02:13:00,968 INFO [train.py:1114] (3/4) Epoch 2, batch 800, loss[loss=0.3245, simple_loss=0.3631, pruned_loss=0.1429, over 13334.00 frames. ], tot_loss[loss=0.3393, simple_loss=0.3868, pruned_loss=0.1459, over 2596971.56 frames. ], batch size: 33, lr: 4.09e-02, grad_scale: 16.0 +2024-08-03 02:13:04,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=16280.0, ans=0.007330434782608696 +2024-08-03 02:13:09,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=16280.0, ans=0.035 +2024-08-03 02:13:13,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=16316.666666666666, ans=0.125 +2024-08-03 02:13:32,417 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.43 vs. limit=5.452999999999999 +2024-08-03 02:13:39,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=16390.0, ans=0.0 +2024-08-03 02:13:52,945 INFO [train.py:1114] (3/4) Epoch 2, batch 850, loss[loss=0.3661, simple_loss=0.411, pruned_loss=0.1606, over 13343.00 frames. ], tot_loss[loss=0.3387, simple_loss=0.3865, pruned_loss=0.1454, over 2609879.78 frames. ], batch size: 40, lr: 4.09e-02, grad_scale: 16.0 +2024-08-03 02:13:55,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=16463.333333333332, ans=0.125 +2024-08-03 02:14:12,138 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.052e+02 1.467e+02 1.720e+02 2.030e+02 3.514e+02, threshold=3.439e+02, percent-clipped=0.0 +2024-08-03 02:14:23,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=16573.333333333332, ans=0.125 +2024-08-03 02:14:29,716 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.50 vs. limit=19.93 +2024-08-03 02:14:35,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16610.0, ans=0.13390000000000002 +2024-08-03 02:14:38,070 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.37 vs. limit=13.72875 +2024-08-03 02:14:39,195 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.49 vs. limit=13.72875 +2024-08-03 02:14:42,215 INFO [train.py:1114] (3/4) Epoch 2, batch 900, loss[loss=0.2992, simple_loss=0.3523, pruned_loss=0.123, over 13361.00 frames. ], tot_loss[loss=0.3382, simple_loss=0.3861, pruned_loss=0.1451, over 2611767.37 frames. ], batch size: 33, lr: 4.08e-02, grad_scale: 16.0 +2024-08-03 02:14:42,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=16646.666666666668, ans=0.125 +2024-08-03 02:14:49,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=16646.666666666668, ans=0.09899494936611666 +2024-08-03 02:14:50,196 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.40 vs. limit=19.985 +2024-08-03 02:15:08,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=16720.0, ans=0.125 +2024-08-03 02:15:27,662 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.12 vs. limit=5.519 +2024-08-03 02:15:30,754 INFO [train.py:1114] (3/4) Epoch 2, batch 950, loss[loss=0.2892, simple_loss=0.3543, pruned_loss=0.112, over 13525.00 frames. ], tot_loss[loss=0.3382, simple_loss=0.3861, pruned_loss=0.1452, over 2611680.45 frames. ], batch size: 34, lr: 4.08e-02, grad_scale: 16.0 +2024-08-03 02:15:47,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=16866.666666666668, ans=0.125 +2024-08-03 02:15:49,944 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.484e+02 1.735e+02 2.135e+02 4.344e+02, threshold=3.469e+02, percent-clipped=2.0 +2024-08-03 02:15:50,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=16866.666666666668, ans=0.007202898550724637 +2024-08-03 02:15:52,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=16903.333333333332, ans=0.125 +2024-08-03 02:16:09,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=16940.0, ans=0.30710000000000004 +2024-08-03 02:16:09,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=16976.666666666668, ans=0.125 +2024-08-03 02:16:15,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=16976.666666666668, ans=0.30581666666666674 +2024-08-03 02:16:18,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=16976.666666666668, ans=0.30581666666666674 +2024-08-03 02:16:20,063 INFO [train.py:1114] (3/4) Epoch 2, batch 1000, loss[loss=0.3445, simple_loss=0.3875, pruned_loss=0.1507, over 13354.00 frames. ], tot_loss[loss=0.3393, simple_loss=0.3867, pruned_loss=0.146, over 2610202.99 frames. ], batch size: 35, lr: 4.07e-02, grad_scale: 16.0 +2024-08-03 02:16:27,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=17013.333333333332, ans=0.125 +2024-08-03 02:16:28,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=17050.0, ans=0.125 +2024-08-03 02:16:32,759 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.22 vs. limit=13.89375 +2024-08-03 02:16:38,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=17086.666666666668, ans=0.125 +2024-08-03 02:16:50,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=17123.333333333332, ans=0.125 +2024-08-03 02:16:59,539 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.12 vs. limit=20.369999999999997 +2024-08-03 02:17:09,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=17160.0, ans=0.125 +2024-08-03 02:17:10,907 INFO [train.py:1114] (3/4) Epoch 2, batch 1050, loss[loss=0.3573, simple_loss=0.4048, pruned_loss=0.155, over 13567.00 frames. ], tot_loss[loss=0.3375, simple_loss=0.3852, pruned_loss=0.1448, over 2614889.09 frames. ], batch size: 39, lr: 4.06e-02, grad_scale: 16.0 +2024-08-03 02:17:17,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=17196.666666666668, ans=0.9219666666666666 +2024-08-03 02:17:19,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=17233.333333333332, ans=0.2968333333333334 +2024-08-03 02:17:27,497 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.473e+02 1.878e+02 2.204e+02 3.880e+02, threshold=3.755e+02, percent-clipped=2.0 +2024-08-03 02:17:31,647 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.88 vs. limit=13.97625 +2024-08-03 02:17:34,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=17270.0, ans=0.007115217391304348 +2024-08-03 02:18:01,363 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.39 vs. limit=10.937333333333333 +2024-08-03 02:18:05,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=17343.333333333332, ans=0.125 +2024-08-03 02:18:08,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=17343.333333333332, ans=0.125 +2024-08-03 02:18:11,159 INFO [train.py:1114] (3/4) Epoch 2, batch 1100, loss[loss=0.3373, simple_loss=0.3911, pruned_loss=0.1417, over 13554.00 frames. ], tot_loss[loss=0.3369, simple_loss=0.3851, pruned_loss=0.1443, over 2619127.01 frames. ], batch size: 36, lr: 4.06e-02, grad_scale: 16.0 +2024-08-03 02:18:24,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17416.666666666668, ans=0.12583333333333332 +2024-08-03 02:18:33,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=17453.333333333332, ans=0.025 +2024-08-03 02:18:37,534 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.08 vs. limit=5.618 +2024-08-03 02:18:53,327 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.56 vs. limit=9.381666666666668 +2024-08-03 02:18:59,085 INFO [train.py:1114] (3/4) Epoch 2, batch 1150, loss[loss=0.3279, simple_loss=0.3781, pruned_loss=0.1389, over 13557.00 frames. ], tot_loss[loss=0.3362, simple_loss=0.3845, pruned_loss=0.1439, over 2618137.14 frames. ], batch size: 36, lr: 4.05e-02, grad_scale: 16.0 +2024-08-03 02:19:14,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=17600.0, ans=0.28400000000000003 +2024-08-03 02:19:16,027 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.117e+02 1.513e+02 2.017e+02 2.624e+02 5.380e+02, threshold=4.034e+02, percent-clipped=4.0 +2024-08-03 02:19:50,391 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.12 vs. limit=20.755000000000003 +2024-08-03 02:19:51,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=17673.333333333332, ans=0.125 +2024-08-03 02:19:53,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=17710.0, ans=0.12290000000000001 +2024-08-03 02:19:53,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=17710.0, ans=0.9270999999999999 +2024-08-03 02:19:56,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=17710.0, ans=0.125 +2024-08-03 02:20:03,135 INFO [train.py:1114] (3/4) Epoch 2, batch 1200, loss[loss=0.3691, simple_loss=0.4124, pruned_loss=0.1629, over 13578.00 frames. ], tot_loss[loss=0.3362, simple_loss=0.3847, pruned_loss=0.1439, over 2615576.27 frames. ], batch size: 39, lr: 4.04e-02, grad_scale: 32.0 +2024-08-03 02:20:12,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=17746.666666666668, ans=0.0 +2024-08-03 02:20:30,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=17820.0, ans=0.125 +2024-08-03 02:20:32,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=17820.0, ans=0.0069956521739130435 +2024-08-03 02:20:36,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=17820.0, ans=0.2763 +2024-08-03 02:20:40,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=17856.666666666668, ans=0.2750166666666667 +2024-08-03 02:20:56,921 INFO [train.py:1114] (3/4) Epoch 2, batch 1250, loss[loss=0.3529, simple_loss=0.4017, pruned_loss=0.152, over 13446.00 frames. ], tot_loss[loss=0.3348, simple_loss=0.3841, pruned_loss=0.1428, over 2627838.27 frames. ], batch size: 42, lr: 4.04e-02, grad_scale: 32.0 +2024-08-03 02:21:13,659 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.97 vs. limit=14.223749999999999 +2024-08-03 02:21:22,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=17966.666666666668, ans=0.0 +2024-08-03 02:21:24,843 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.393e+02 1.566e+02 1.875e+02 3.241e+02, threshold=3.132e+02, percent-clipped=0.0 +2024-08-03 02:21:27,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=18003.333333333332, ans=0.0 +2024-08-03 02:21:28,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=18003.333333333332, ans=0.07 +2024-08-03 02:21:42,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18040.0, ans=0.11960000000000001 +2024-08-03 02:21:43,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=18040.0, ans=0.025 +2024-08-03 02:21:54,710 INFO [train.py:1114] (3/4) Epoch 2, batch 1300, loss[loss=0.378, simple_loss=0.419, pruned_loss=0.1685, over 12932.00 frames. ], tot_loss[loss=0.3334, simple_loss=0.3827, pruned_loss=0.1421, over 2631465.46 frames. ], batch size: 52, lr: 4.03e-02, grad_scale: 32.0 +2024-08-03 02:22:05,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=18113.333333333332, ans=0.0 +2024-08-03 02:22:17,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=18186.666666666668, ans=0.0 +2024-08-03 02:22:21,419 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:22:21,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=18186.666666666668, ans=0.125 +2024-08-03 02:22:25,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=18186.666666666668, ans=0.26346666666666674 +2024-08-03 02:22:30,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=18223.333333333332, ans=0.0 +2024-08-03 02:22:30,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=18223.333333333332, ans=0.025 +2024-08-03 02:22:34,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=18223.333333333332, ans=0.0 +2024-08-03 02:22:49,377 INFO [train.py:1114] (3/4) Epoch 2, batch 1350, loss[loss=0.3333, simple_loss=0.3787, pruned_loss=0.144, over 13530.00 frames. ], tot_loss[loss=0.332, simple_loss=0.3816, pruned_loss=0.1412, over 2638357.33 frames. ], batch size: 37, lr: 4.03e-02, grad_scale: 32.0 +2024-08-03 02:22:53,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=18296.666666666668, ans=0.006892028985507247 +2024-08-03 02:23:08,635 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.141e+02 1.434e+02 1.711e+02 2.081e+02 4.051e+02, threshold=3.422e+02, percent-clipped=5.0 +2024-08-03 02:23:27,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=18370.0, ans=0.0 +2024-08-03 02:23:35,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=18406.666666666668, ans=0.2557666666666667 +2024-08-03 02:23:47,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=18443.333333333332, ans=0.11556666666666668 +2024-08-03 02:23:49,068 INFO [train.py:1114] (3/4) Epoch 2, batch 1400, loss[loss=0.3001, simple_loss=0.3449, pruned_loss=0.1276, over 13251.00 frames. ], tot_loss[loss=0.3305, simple_loss=0.3805, pruned_loss=0.1402, over 2642815.05 frames. ], batch size: 31, lr: 4.02e-02, grad_scale: 32.0 +2024-08-03 02:24:09,815 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.17 vs. limit=14.4575 +2024-08-03 02:24:21,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=18590.0, ans=0.125 +2024-08-03 02:24:25,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=18590.0, ans=0.24935000000000007 +2024-08-03 02:24:32,935 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.41 vs. limit=14.485 +2024-08-03 02:24:38,147 INFO [train.py:1114] (3/4) Epoch 2, batch 1450, loss[loss=0.3512, simple_loss=0.4046, pruned_loss=0.1489, over 13384.00 frames. ], tot_loss[loss=0.3308, simple_loss=0.381, pruned_loss=0.1403, over 2641350.70 frames. ], batch size: 43, lr: 4.01e-02, grad_scale: 16.0 +2024-08-03 02:24:38,767 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.25 vs. limit=14.498750000000001 +2024-08-03 02:24:40,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=18663.333333333332, ans=0.025 +2024-08-03 02:24:47,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=18700.0, ans=0.24550000000000005 +2024-08-03 02:24:53,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=18700.0, ans=0.125 +2024-08-03 02:24:55,528 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.417e+02 1.675e+02 1.959e+02 3.168e+02, threshold=3.351e+02, percent-clipped=0.0 +2024-08-03 02:24:57,256 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.01 vs. limit=14.526250000000001 +2024-08-03 02:25:03,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=18736.666666666668, ans=0.06263333333333329 +2024-08-03 02:25:21,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=18810.0, ans=0.1119 +2024-08-03 02:25:32,402 INFO [train.py:1114] (3/4) Epoch 2, batch 1500, loss[loss=0.3551, simple_loss=0.4101, pruned_loss=0.1501, over 13388.00 frames. ], tot_loss[loss=0.3306, simple_loss=0.3814, pruned_loss=0.1399, over 2641936.09 frames. ], batch size: 39, lr: 4.01e-02, grad_scale: 16.0 +2024-08-03 02:25:41,170 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:25:45,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=18883.333333333332, ans=0.23908333333333343 +2024-08-03 02:25:46,649 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:25:48,083 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=3.64 vs. limit=14.58125 +2024-08-03 02:26:07,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=18956.666666666668, ans=0.125 +2024-08-03 02:26:08,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=18956.666666666668, ans=0.125 +2024-08-03 02:26:19,639 INFO [train.py:1114] (3/4) Epoch 2, batch 1550, loss[loss=0.3866, simple_loss=0.4344, pruned_loss=0.1694, over 13396.00 frames. ], tot_loss[loss=0.3324, simple_loss=0.3825, pruned_loss=0.1411, over 2630331.62 frames. ], batch size: 41, lr: 4.00e-02, grad_scale: 16.0 +2024-08-03 02:26:19,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=19030.0, ans=0.125 +2024-08-03 02:26:23,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=19030.0, ans=0.05 +2024-08-03 02:26:42,594 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.587e+02 1.878e+02 2.318e+02 8.334e+02, threshold=3.756e+02, percent-clipped=6.0 +2024-08-03 02:26:47,848 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.28 vs. limit=11.641333333333332 +2024-08-03 02:26:55,271 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.48 vs. limit=21.855 +2024-08-03 02:27:05,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=19176.666666666668, ans=0.125 +2024-08-03 02:27:15,319 INFO [train.py:1114] (3/4) Epoch 2, batch 1600, loss[loss=0.2968, simple_loss=0.364, pruned_loss=0.1148, over 13576.00 frames. ], tot_loss[loss=0.332, simple_loss=0.3819, pruned_loss=0.141, over 2623964.20 frames. ], batch size: 39, lr: 4.00e-02, grad_scale: 32.0 +2024-08-03 02:27:22,374 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.31 vs. limit=14.705 +2024-08-03 02:27:29,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=19250.0, ans=0.125 +2024-08-03 02:27:30,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=19250.0, ans=0.125 +2024-08-03 02:27:36,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19286.666666666668, ans=0.10713333333333333 +2024-08-03 02:27:43,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=19286.666666666668, ans=0.125 +2024-08-03 02:27:55,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=19360.0, ans=0.22240000000000004 +2024-08-03 02:27:55,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19360.0, ans=0.10640000000000002 +2024-08-03 02:27:55,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=19360.0, ans=0.07 +2024-08-03 02:28:03,914 INFO [train.py:1114] (3/4) Epoch 2, batch 1650, loss[loss=0.3391, simple_loss=0.3945, pruned_loss=0.1418, over 13332.00 frames. ], tot_loss[loss=0.3313, simple_loss=0.3812, pruned_loss=0.1407, over 2621630.53 frames. ], batch size: 40, lr: 3.99e-02, grad_scale: 16.0 +2024-08-03 02:28:20,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=19396.666666666668, ans=0.0 +2024-08-03 02:28:21,225 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.66 vs. limit=22.0475 +2024-08-03 02:28:30,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=19433.333333333332, ans=0.21983333333333344 +2024-08-03 02:28:35,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=19433.333333333332, ans=0.125 +2024-08-03 02:28:38,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=19433.333333333332, ans=0.125 +2024-08-03 02:28:40,135 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.530e+02 1.782e+02 2.174e+02 3.857e+02, threshold=3.564e+02, percent-clipped=2.0 +2024-08-03 02:28:40,632 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.46 vs. limit=14.735 +2024-08-03 02:28:54,805 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.86 vs. limit=14.815000000000001 +2024-08-03 02:29:00,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=19543.333333333332, ans=0.21598333333333342 +2024-08-03 02:29:05,131 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.73 vs. limit=22.1575 +2024-08-03 02:29:05,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=19543.333333333332, ans=0.125 +2024-08-03 02:29:08,342 INFO [train.py:1114] (3/4) Epoch 2, batch 1700, loss[loss=0.2594, simple_loss=0.3202, pruned_loss=0.09929, over 13242.00 frames. ], tot_loss[loss=0.3291, simple_loss=0.38, pruned_loss=0.1391, over 2630451.10 frames. ], batch size: 31, lr: 3.98e-02, grad_scale: 16.0 +2024-08-03 02:29:09,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=19580.0, ans=0.0 +2024-08-03 02:29:17,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=19580.0, ans=0.10420000000000001 +2024-08-03 02:29:35,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=19653.333333333332, ans=0.07 +2024-08-03 02:29:41,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=19690.0, ans=0.125 +2024-08-03 02:29:42,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=19690.0, ans=0.125 +2024-08-03 02:29:45,125 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.30 vs. limit=14.88375 +2024-08-03 02:29:49,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=19726.666666666668, ans=22.295 +2024-08-03 02:29:51,205 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=1.162e-01 +2024-08-03 02:29:58,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=19763.333333333332, ans=0.125 +2024-08-03 02:29:59,362 INFO [train.py:1114] (3/4) Epoch 2, batch 1750, loss[loss=0.2943, simple_loss=0.3391, pruned_loss=0.1247, over 13549.00 frames. ], tot_loss[loss=0.329, simple_loss=0.3797, pruned_loss=0.1392, over 2634578.27 frames. ], batch size: 31, lr: 3.98e-02, grad_scale: 16.0 +2024-08-03 02:30:06,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19763.333333333332, ans=0.10236666666666669 +2024-08-03 02:30:09,462 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.09 vs. limit=14.925 +2024-08-03 02:30:12,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=19800.0, ans=0.006565217391304348 +2024-08-03 02:30:18,375 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.108e+02 1.430e+02 1.665e+02 2.047e+02 3.989e+02, threshold=3.330e+02, percent-clipped=2.0 +2024-08-03 02:30:23,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=19836.666666666668, ans=0.125 +2024-08-03 02:30:25,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=19836.666666666668, ans=0.10163333333333333 +2024-08-03 02:30:30,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=19873.333333333332, ans=0.125 +2024-08-03 02:30:36,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=19910.0, ans=0.125 +2024-08-03 02:30:44,589 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.13 vs. limit=9.9775 +2024-08-03 02:30:45,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=19946.666666666668, ans=0.0 +2024-08-03 02:30:46,002 INFO [train.py:1114] (3/4) Epoch 2, batch 1800, loss[loss=0.3334, simple_loss=0.393, pruned_loss=0.1369, over 13560.00 frames. ], tot_loss[loss=0.3312, simple_loss=0.3811, pruned_loss=0.1406, over 2636196.28 frames. ], batch size: 38, lr: 3.97e-02, grad_scale: 16.0 +2024-08-03 02:30:52,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=19946.666666666668, ans=0.125 +2024-08-03 02:31:12,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=20020.0, ans=0.5 +2024-08-03 02:31:20,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=20056.666666666668, ans=0.125 +2024-08-03 02:31:32,542 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.50 vs. limit=5.0 +2024-08-03 02:31:32,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20093.333333333332, ans=0.1 +2024-08-03 02:31:37,410 INFO [train.py:1114] (3/4) Epoch 2, batch 1850, loss[loss=0.3211, simple_loss=0.3872, pruned_loss=0.1275, over 13393.00 frames. ], tot_loss[loss=0.3293, simple_loss=0.3799, pruned_loss=0.1394, over 2638626.94 frames. ], batch size: 39, lr: 3.96e-02, grad_scale: 16.0 +2024-08-03 02:31:51,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=20130.0, ans=0.04949747468305833 +2024-08-03 02:32:04,607 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.463e+02 1.801e+02 2.661e+02 5.332e+02, threshold=3.601e+02, percent-clipped=10.0 +2024-08-03 02:32:11,560 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.23 vs. limit=15.0 +2024-08-03 02:32:13,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=20240.0, ans=0.125 +2024-08-03 02:32:15,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=20240.0, ans=0.125 +2024-08-03 02:32:22,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=20276.666666666668, ans=0.125 +2024-08-03 02:32:23,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=20276.666666666668, ans=0.0 +2024-08-03 02:32:25,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=20276.666666666668, ans=0.025 +2024-08-03 02:32:32,393 INFO [train.py:1114] (3/4) Epoch 2, batch 1900, loss[loss=0.3046, simple_loss=0.3763, pruned_loss=0.1165, over 13325.00 frames. ], tot_loss[loss=0.3288, simple_loss=0.3798, pruned_loss=0.1389, over 2640601.45 frames. ], batch size: 40, lr: 3.96e-02, grad_scale: 16.0 +2024-08-03 02:32:53,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=20350.0, ans=0.5 +2024-08-03 02:33:01,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.63 vs. limit=22.5 +2024-08-03 02:33:12,654 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.17 vs. limit=12.0 +2024-08-03 02:33:18,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=20460.0, ans=0.025 +2024-08-03 02:33:20,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=20460.0, ans=0.2 +2024-08-03 02:33:27,792 INFO [train.py:1114] (3/4) Epoch 2, batch 1950, loss[loss=0.3327, simple_loss=0.3838, pruned_loss=0.1408, over 13562.00 frames. ], tot_loss[loss=0.3285, simple_loss=0.3802, pruned_loss=0.1384, over 2647259.80 frames. ], batch size: 36, lr: 3.95e-02, grad_scale: 16.0 +2024-08-03 02:33:37,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=20533.333333333332, ans=0.125 +2024-08-03 02:33:38,720 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.05 vs. limit=15.0 +2024-08-03 02:33:43,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=20533.333333333332, ans=0.125 +2024-08-03 02:33:46,555 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.390e+02 1.603e+02 1.917e+02 3.719e+02, threshold=3.206e+02, percent-clipped=1.0 +2024-08-03 02:33:53,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=20570.0, ans=0.025 +2024-08-03 02:33:54,375 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.30 vs. limit=10.0 +2024-08-03 02:34:06,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=20606.666666666668, ans=0.125 +2024-08-03 02:34:11,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=20606.666666666668, ans=0.09899494936611666 +2024-08-03 02:34:14,598 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.66 vs. limit=10.0 +2024-08-03 02:34:14,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=20643.333333333332, ans=0.125 +2024-08-03 02:34:22,759 INFO [train.py:1114] (3/4) Epoch 2, batch 2000, loss[loss=0.2986, simple_loss=0.3448, pruned_loss=0.1262, over 13556.00 frames. ], tot_loss[loss=0.3289, simple_loss=0.3805, pruned_loss=0.1387, over 2636391.24 frames. ], batch size: 31, lr: 3.94e-02, grad_scale: 32.0 +2024-08-03 02:34:29,493 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=7.72 vs. limit=12.0 +2024-08-03 02:34:30,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=20680.0, ans=0.2 +2024-08-03 02:34:38,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.32 vs. limit=22.5 +2024-08-03 02:34:45,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=20753.333333333332, ans=0.125 +2024-08-03 02:34:53,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=20790.0, ans=0.0 +2024-08-03 02:34:54,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=20790.0, ans=0.125 +2024-08-03 02:34:56,112 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.03 vs. limit=15.0 +2024-08-03 02:35:09,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=20826.666666666668, ans=10.0 +2024-08-03 02:35:13,414 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.91 vs. limit=10.0 +2024-08-03 02:35:14,580 INFO [train.py:1114] (3/4) Epoch 2, batch 2050, loss[loss=0.2533, simple_loss=0.3167, pruned_loss=0.09497, over 13400.00 frames. ], tot_loss[loss=0.3277, simple_loss=0.3791, pruned_loss=0.1381, over 2633411.35 frames. ], batch size: 32, lr: 3.94e-02, grad_scale: 32.0 +2024-08-03 02:35:23,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=20863.333333333332, ans=0.125 +2024-08-03 02:35:31,008 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=31.50 vs. limit=22.5 +2024-08-03 02:35:36,030 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.423e+02 1.683e+02 2.101e+02 5.163e+02, threshold=3.365e+02, percent-clipped=3.0 +2024-08-03 02:35:37,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=20936.666666666668, ans=0.0 +2024-08-03 02:35:51,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=20973.333333333332, ans=0.125 +2024-08-03 02:35:56,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=20973.333333333332, ans=0.125 +2024-08-03 02:36:00,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=21010.0, ans=0.1 +2024-08-03 02:36:02,088 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.50 vs. limit=15.0 +2024-08-03 02:36:08,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=21046.666666666668, ans=10.0 +2024-08-03 02:36:09,019 INFO [train.py:1114] (3/4) Epoch 2, batch 2100, loss[loss=0.2961, simple_loss=0.3566, pruned_loss=0.1178, over 13552.00 frames. ], tot_loss[loss=0.3258, simple_loss=0.3777, pruned_loss=0.1369, over 2639570.57 frames. ], batch size: 37, lr: 3.93e-02, grad_scale: 32.0 +2024-08-03 02:36:21,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=21083.333333333332, ans=0.2 +2024-08-03 02:36:25,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=21083.333333333332, ans=0.0 +2024-08-03 02:36:31,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=21120.0, ans=0.0062782608695652175 +2024-08-03 02:36:32,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=21120.0, ans=0.2 +2024-08-03 02:36:46,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=21193.333333333332, ans=0.125 +2024-08-03 02:36:56,215 INFO [train.py:1114] (3/4) Epoch 2, batch 2150, loss[loss=0.3, simple_loss=0.3556, pruned_loss=0.1222, over 13582.00 frames. ], tot_loss[loss=0.3238, simple_loss=0.376, pruned_loss=0.1358, over 2648653.56 frames. ], batch size: 36, lr: 3.93e-02, grad_scale: 16.0 +2024-08-03 02:37:10,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=21266.666666666668, ans=0.1 +2024-08-03 02:37:17,960 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.342e+02 1.575e+02 1.914e+02 2.983e+02, threshold=3.149e+02, percent-clipped=0.0 +2024-08-03 02:37:19,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=21303.333333333332, ans=0.125 +2024-08-03 02:37:20,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=21303.333333333332, ans=0.2 +2024-08-03 02:37:25,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=21303.333333333332, ans=0.125 +2024-08-03 02:37:33,754 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.69 vs. limit=15.0 +2024-08-03 02:37:39,974 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.47 vs. limit=12.0 +2024-08-03 02:37:41,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=21376.666666666668, ans=0.125 +2024-08-03 02:37:48,046 INFO [train.py:1114] (3/4) Epoch 2, batch 2200, loss[loss=0.311, simple_loss=0.3685, pruned_loss=0.1267, over 13400.00 frames. ], tot_loss[loss=0.324, simple_loss=0.3763, pruned_loss=0.1358, over 2646109.41 frames. ], batch size: 39, lr: 3.92e-02, grad_scale: 16.0 +2024-08-03 02:37:49,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=21413.333333333332, ans=0.025 +2024-08-03 02:38:07,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=21486.666666666668, ans=0.125 +2024-08-03 02:38:07,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=21486.666666666668, ans=0.125 +2024-08-03 02:38:48,518 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.98 vs. limit=22.5 +2024-08-03 02:39:26,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21560.0, ans=0.1 +2024-08-03 02:39:28,808 INFO [train.py:1114] (3/4) Epoch 2, batch 2250, loss[loss=0.3032, simple_loss=0.3718, pruned_loss=0.1173, over 13362.00 frames. ], tot_loss[loss=0.324, simple_loss=0.3766, pruned_loss=0.1357, over 2643471.63 frames. ], batch size: 37, lr: 3.91e-02, grad_scale: 16.0 +2024-08-03 02:39:49,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=21596.666666666668, ans=0.05 +2024-08-03 02:39:55,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=21633.333333333332, ans=0.2 +2024-08-03 02:40:05,990 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.129e+02 1.392e+02 1.558e+02 1.925e+02 3.298e+02, threshold=3.115e+02, percent-clipped=1.0 +2024-08-03 02:40:09,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=21670.0, ans=0.125 +2024-08-03 02:40:16,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=21706.666666666668, ans=0.125 +2024-08-03 02:40:29,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=21743.333333333332, ans=0.025 +2024-08-03 02:40:31,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=21743.333333333332, ans=0.2 +2024-08-03 02:40:34,540 INFO [train.py:1114] (3/4) Epoch 2, batch 2300, loss[loss=0.2712, simple_loss=0.3287, pruned_loss=0.1069, over 13584.00 frames. ], tot_loss[loss=0.3219, simple_loss=0.3745, pruned_loss=0.1347, over 2639042.34 frames. ], batch size: 33, lr: 3.91e-02, grad_scale: 8.0 +2024-08-03 02:40:43,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=21780.0, ans=0.025 +2024-08-03 02:40:47,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=21816.666666666668, ans=0.05 +2024-08-03 02:40:48,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21816.666666666668, ans=0.1 +2024-08-03 02:40:48,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=21816.666666666668, ans=0.0 +2024-08-03 02:40:57,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21853.333333333332, ans=0.1 +2024-08-03 02:41:16,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21926.666666666668, ans=0.1 +2024-08-03 02:41:23,902 INFO [train.py:1114] (3/4) Epoch 2, batch 2350, loss[loss=0.356, simple_loss=0.3972, pruned_loss=0.1574, over 13556.00 frames. ], tot_loss[loss=0.322, simple_loss=0.3746, pruned_loss=0.1347, over 2641427.21 frames. ], batch size: 38, lr: 3.90e-02, grad_scale: 8.0 +2024-08-03 02:41:24,427 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.22 vs. limit=15.0 +2024-08-03 02:41:44,433 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.437e+02 1.623e+02 1.973e+02 3.440e+02, threshold=3.245e+02, percent-clipped=2.0 +2024-08-03 02:41:51,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=22036.666666666668, ans=0.125 +2024-08-03 02:41:54,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=22073.333333333332, ans=0.006071014492753624 +2024-08-03 02:42:07,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=22110.0, ans=0.1 +2024-08-03 02:42:08,252 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.64 vs. limit=12.0 +2024-08-03 02:42:15,869 INFO [train.py:1114] (3/4) Epoch 2, batch 2400, loss[loss=0.2946, simple_loss=0.3538, pruned_loss=0.1177, over 13543.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3759, pruned_loss=0.1354, over 2642638.95 frames. ], batch size: 35, lr: 3.89e-02, grad_scale: 16.0 +2024-08-03 02:42:17,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=22146.666666666668, ans=0.07 +2024-08-03 02:42:41,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=22220.0, ans=0.125 +2024-08-03 02:42:46,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=22256.666666666668, ans=0.0 +2024-08-03 02:42:53,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=22256.666666666668, ans=0.125 +2024-08-03 02:42:58,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=22293.333333333332, ans=0.05 +2024-08-03 02:43:00,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=22293.333333333332, ans=0.2 +2024-08-03 02:43:04,902 INFO [train.py:1114] (3/4) Epoch 2, batch 2450, loss[loss=0.3262, simple_loss=0.3784, pruned_loss=0.137, over 13358.00 frames. ], tot_loss[loss=0.3264, simple_loss=0.3786, pruned_loss=0.1371, over 2631916.53 frames. ], batch size: 37, lr: 3.89e-02, grad_scale: 16.0 +2024-08-03 02:43:08,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=22330.0, ans=0.2 +2024-08-03 02:43:19,453 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.18 vs. limit=15.0 +2024-08-03 02:43:27,798 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.465e+02 1.678e+02 2.067e+02 5.260e+02, threshold=3.356e+02, percent-clipped=2.0 +2024-08-03 02:43:39,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=22403.333333333332, ans=0.125 +2024-08-03 02:44:05,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=22440.0, ans=0.125 +2024-08-03 02:44:15,401 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.40 vs. limit=6.0 +2024-08-03 02:45:16,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=22476.666666666668, ans=0.1 +2024-08-03 02:45:36,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=22513.333333333332, ans=0.0 +2024-08-03 02:45:37,298 INFO [train.py:1114] (3/4) Epoch 2, batch 2500, loss[loss=0.3268, simple_loss=0.374, pruned_loss=0.1398, over 13406.00 frames. ], tot_loss[loss=0.3243, simple_loss=0.3771, pruned_loss=0.1357, over 2635417.07 frames. ], batch size: 39, lr: 3.88e-02, grad_scale: 8.0 +2024-08-03 02:46:11,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=22513.333333333332, ans=0.125 +2024-08-03 02:46:16,451 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 02:46:17,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=22513.333333333332, ans=0.125 +2024-08-03 02:48:42,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=22623.333333333332, ans=10.0 +2024-08-03 02:48:50,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=22623.333333333332, ans=0.005951449275362319 +2024-08-03 02:52:09,587 INFO [train.py:1114] (3/4) Epoch 2, batch 2550, loss[loss=0.2802, simple_loss=0.3303, pruned_loss=0.115, over 13567.00 frames. ], tot_loss[loss=0.3236, simple_loss=0.3767, pruned_loss=0.1352, over 2637075.02 frames. ], batch size: 31, lr: 3.88e-02, grad_scale: 8.0 +2024-08-03 02:52:16,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=22696.666666666668, ans=0.2 +2024-08-03 02:52:59,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=22733.333333333332, ans=0.0 +2024-08-03 02:53:31,466 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.373e+02 1.661e+02 2.107e+02 4.285e+02, threshold=3.322e+02, percent-clipped=3.0 +2024-08-03 02:53:34,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=22770.0, ans=0.0 +2024-08-03 02:54:16,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=22806.666666666668, ans=0.125 +2024-08-03 02:54:41,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=22843.333333333332, ans=0.125 +2024-08-03 02:54:45,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=22843.333333333332, ans=0.125 +2024-08-03 02:54:52,213 INFO [train.py:1114] (3/4) Epoch 2, batch 2600, loss[loss=0.3082, simple_loss=0.3628, pruned_loss=0.1268, over 13563.00 frames. ], tot_loss[loss=0.3241, simple_loss=0.3771, pruned_loss=0.1356, over 2636255.60 frames. ], batch size: 36, lr: 3.87e-02, grad_scale: 8.0 +2024-08-03 02:55:23,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=22916.666666666668, ans=0.125 +2024-08-03 02:55:50,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=22990.0, ans=0.125 +2024-08-03 02:56:03,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=23063.333333333332, ans=0.125 +2024-08-03 02:56:03,858 INFO [train.py:1114] (3/4) Epoch 2, batch 2650, loss[loss=0.3238, simple_loss=0.392, pruned_loss=0.1279, over 13280.00 frames. ], tot_loss[loss=0.3229, simple_loss=0.3763, pruned_loss=0.1347, over 2639914.77 frames. ], batch size: 46, lr: 3.86e-02, grad_scale: 8.0 +2024-08-03 02:56:27,904 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.143e+02 1.447e+02 1.734e+02 2.047e+02 3.463e+02, threshold=3.469e+02, percent-clipped=1.0 +2024-08-03 02:56:49,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=23136.666666666668, ans=15.0 +2024-08-03 02:57:17,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=23210.0, ans=0.025 +2024-08-03 02:58:07,636 INFO [train.py:1114] (3/4) Epoch 2, batch 2700, loss[loss=0.319, simple_loss=0.381, pruned_loss=0.1285, over 13563.00 frames. ], tot_loss[loss=0.3237, simple_loss=0.3769, pruned_loss=0.1352, over 2637288.29 frames. ], batch size: 40, lr: 3.86e-02, grad_scale: 8.0 +2024-08-03 02:58:16,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=23246.666666666668, ans=0.2 +2024-08-03 02:58:29,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=23283.333333333332, ans=0.07 +2024-08-03 02:58:36,216 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.50 vs. limit=15.0 +2024-08-03 02:59:11,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23320.0, ans=0.1 +2024-08-03 02:59:52,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=23356.666666666668, ans=0.07 +2024-08-03 02:59:53,544 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=23356.666666666668, ans=0.125 +2024-08-03 03:00:36,558 INFO [train.py:1114] (3/4) Epoch 2, batch 2750, loss[loss=0.3239, simple_loss=0.3671, pruned_loss=0.1403, over 13334.00 frames. ], tot_loss[loss=0.3221, simple_loss=0.3752, pruned_loss=0.1345, over 2634188.13 frames. ], batch size: 34, lr: 3.85e-02, grad_scale: 8.0 +2024-08-03 03:00:36,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23430.0, ans=0.1 +2024-08-03 03:00:42,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=23430.0, ans=0.125 +2024-08-03 03:00:58,168 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.86 vs. limit=15.0 +2024-08-03 03:01:06,448 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.143e+02 1.416e+02 1.639e+02 1.960e+02 3.073e+02, threshold=3.277e+02, percent-clipped=0.0 +2024-08-03 03:01:16,649 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.64 vs. limit=6.0 +2024-08-03 03:01:30,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23576.666666666668, ans=0.1 +2024-08-03 03:01:33,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=23613.333333333332, ans=0.0 +2024-08-03 03:01:38,771 INFO [train.py:1114] (3/4) Epoch 2, batch 2800, loss[loss=0.4095, simple_loss=0.4259, pruned_loss=0.1965, over 9141.00 frames. ], tot_loss[loss=0.3218, simple_loss=0.3751, pruned_loss=0.1343, over 2626176.12 frames. ], batch size: 96, lr: 3.84e-02, grad_scale: 16.0 +2024-08-03 03:01:50,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=23613.333333333332, ans=0.125 +2024-08-03 03:01:50,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=23613.333333333332, ans=0.125 +2024-08-03 03:01:54,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=23613.333333333332, ans=0.125 +2024-08-03 03:02:00,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=23650.0, ans=0.125 +2024-08-03 03:02:17,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=23686.666666666668, ans=0.125 +2024-08-03 03:02:34,847 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:02:37,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=23760.0, ans=0.95 +2024-08-03 03:02:44,508 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.92 vs. limit=15.0 +2024-08-03 03:02:47,294 INFO [train.py:1114] (3/4) Epoch 2, batch 2850, loss[loss=0.2876, simple_loss=0.3518, pruned_loss=0.1117, over 13361.00 frames. ], tot_loss[loss=0.3224, simple_loss=0.3756, pruned_loss=0.1346, over 2619813.82 frames. ], batch size: 35, lr: 3.84e-02, grad_scale: 16.0 +2024-08-03 03:02:53,984 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.33 vs. limit=5.0 +2024-08-03 03:03:01,857 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.62 vs. limit=22.5 +2024-08-03 03:03:09,959 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.500e+02 1.764e+02 2.105e+02 5.677e+02, threshold=3.527e+02, percent-clipped=3.0 +2024-08-03 03:03:12,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=23870.0, ans=0.0 +2024-08-03 03:03:29,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=23943.333333333332, ans=0.0 +2024-08-03 03:04:04,004 INFO [train.py:1114] (3/4) Epoch 2, batch 2900, loss[loss=0.2822, simple_loss=0.3488, pruned_loss=0.1078, over 13356.00 frames. ], tot_loss[loss=0.3218, simple_loss=0.3759, pruned_loss=0.1339, over 2630710.66 frames. ], batch size: 36, lr: 3.83e-02, grad_scale: 16.0 +2024-08-03 03:04:17,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=23980.0, ans=0.125 +2024-08-03 03:04:28,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=24016.666666666668, ans=0.125 +2024-08-03 03:04:41,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=24053.333333333332, ans=0.2 +2024-08-03 03:04:49,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24090.0, ans=0.1 +2024-08-03 03:04:55,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=24126.666666666668, ans=0.025 +2024-08-03 03:04:56,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=24126.666666666668, ans=0.125 +2024-08-03 03:04:56,971 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.39 vs. limit=22.5 +2024-08-03 03:05:06,110 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.86 vs. limit=15.0 +2024-08-03 03:05:09,683 INFO [train.py:1114] (3/4) Epoch 2, batch 2950, loss[loss=0.3186, simple_loss=0.3723, pruned_loss=0.1324, over 13327.00 frames. ], tot_loss[loss=0.3209, simple_loss=0.3746, pruned_loss=0.1336, over 2628764.35 frames. ], batch size: 34, lr: 3.82e-02, grad_scale: 8.0 +2024-08-03 03:05:11,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=24163.333333333332, ans=0.95 +2024-08-03 03:05:12,765 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-08-03 03:05:12,841 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=26.86 vs. limit=22.5 +2024-08-03 03:05:37,047 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.45 vs. limit=5.0 +2024-08-03 03:05:38,155 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.078e+02 1.432e+02 1.719e+02 2.227e+02 3.350e+02, threshold=3.438e+02, percent-clipped=0.0 +2024-08-03 03:05:38,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=24236.666666666668, ans=0.125 +2024-08-03 03:05:49,310 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.09 vs. limit=15.0 +2024-08-03 03:05:56,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=24310.0, ans=0.125 +2024-08-03 03:05:57,950 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=18.52 vs. limit=15.0 +2024-08-03 03:06:01,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=24310.0, ans=0.0 +2024-08-03 03:06:07,500 INFO [train.py:1114] (3/4) Epoch 2, batch 3000, loss[loss=0.2962, simple_loss=0.3627, pruned_loss=0.1148, over 13538.00 frames. ], tot_loss[loss=0.3198, simple_loss=0.374, pruned_loss=0.1328, over 2628678.02 frames. ], batch size: 37, lr: 3.82e-02, grad_scale: 8.0 +2024-08-03 03:06:07,500 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 03:06:29,643 INFO [train.py:1146] (3/4) Epoch 2, validation: loss=0.2511, simple_loss=0.3433, pruned_loss=0.07947, over 944034.00 frames. +2024-08-03 03:06:29,644 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 03:06:38,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=24383.333333333332, ans=0.0 +2024-08-03 03:06:51,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=24420.0, ans=0.125 +2024-08-03 03:06:52,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=24420.0, ans=0.0 +2024-08-03 03:06:59,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=24456.666666666668, ans=0.0 +2024-08-03 03:07:02,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=24456.666666666668, ans=0.04949747468305833 +2024-08-03 03:07:14,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=24530.0, ans=0.125 +2024-08-03 03:07:15,187 INFO [train.py:1114] (3/4) Epoch 2, batch 3050, loss[loss=0.2586, simple_loss=0.3278, pruned_loss=0.09472, over 13535.00 frames. ], tot_loss[loss=0.3206, simple_loss=0.3746, pruned_loss=0.1333, over 2625850.75 frames. ], batch size: 35, lr: 3.81e-02, grad_scale: 8.0 +2024-08-03 03:07:15,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=24530.0, ans=0.95 +2024-08-03 03:07:19,566 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.96 vs. limit=15.0 +2024-08-03 03:07:37,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=24603.333333333332, ans=0.125 +2024-08-03 03:07:37,909 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.153e+02 1.349e+02 1.521e+02 1.830e+02 3.051e+02, threshold=3.043e+02, percent-clipped=0.0 +2024-08-03 03:07:42,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=24640.0, ans=0.5 +2024-08-03 03:07:54,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=24640.0, ans=0.00551304347826087 +2024-08-03 03:08:04,767 INFO [train.py:1114] (3/4) Epoch 2, batch 3100, loss[loss=0.4008, simple_loss=0.4362, pruned_loss=0.1827, over 13311.00 frames. ], tot_loss[loss=0.3206, simple_loss=0.3744, pruned_loss=0.1335, over 2626479.61 frames. ], batch size: 46, lr: 3.81e-02, grad_scale: 8.0 +2024-08-03 03:08:32,655 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.58 vs. limit=12.0 +2024-08-03 03:08:33,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=24786.666666666668, ans=0.125 +2024-08-03 03:08:34,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=24786.666666666668, ans=0.125 +2024-08-03 03:08:35,961 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.13 vs. limit=15.0 +2024-08-03 03:08:45,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=24823.333333333332, ans=0.2 +2024-08-03 03:08:51,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=24823.333333333332, ans=0.125 +2024-08-03 03:09:04,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=24860.0, ans=0.125 +2024-08-03 03:09:07,677 INFO [train.py:1114] (3/4) Epoch 2, batch 3150, loss[loss=0.3543, simple_loss=0.393, pruned_loss=0.1578, over 13012.00 frames. ], tot_loss[loss=0.3196, simple_loss=0.3739, pruned_loss=0.1327, over 2628117.26 frames. ], batch size: 48, lr: 3.80e-02, grad_scale: 8.0 +2024-08-03 03:09:17,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=24933.333333333332, ans=0.04949747468305833 +2024-08-03 03:09:28,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=24970.0, ans=0.125 +2024-08-03 03:09:29,251 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.526e+02 1.877e+02 2.299e+02 4.480e+02, threshold=3.753e+02, percent-clipped=6.0 +2024-08-03 03:09:35,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25006.666666666668, ans=0.1 +2024-08-03 03:09:44,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=25043.333333333332, ans=0.125 +2024-08-03 03:09:53,508 INFO [train.py:1114] (3/4) Epoch 2, batch 3200, loss[loss=0.323, simple_loss=0.3821, pruned_loss=0.1319, over 13549.00 frames. ], tot_loss[loss=0.3198, simple_loss=0.374, pruned_loss=0.1328, over 2634911.80 frames. ], batch size: 37, lr: 3.79e-02, grad_scale: 16.0 +2024-08-03 03:10:05,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25080.0, ans=0.1 +2024-08-03 03:10:20,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=25153.333333333332, ans=0.125 +2024-08-03 03:10:53,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=25226.666666666668, ans=0.025 +2024-08-03 03:10:54,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=25226.666666666668, ans=0.125 +2024-08-03 03:10:56,396 INFO [train.py:1114] (3/4) Epoch 2, batch 3250, loss[loss=0.3216, simple_loss=0.3802, pruned_loss=0.1315, over 13396.00 frames. ], tot_loss[loss=0.3206, simple_loss=0.375, pruned_loss=0.1331, over 2639014.52 frames. ], batch size: 38, lr: 3.79e-02, grad_scale: 16.0 +2024-08-03 03:11:11,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=25263.333333333332, ans=0.125 +2024-08-03 03:11:31,288 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:11:34,445 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.086e+02 1.442e+02 1.550e+02 1.764e+02 2.865e+02, threshold=3.101e+02, percent-clipped=0.0 +2024-08-03 03:11:54,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=25373.333333333332, ans=0.025 +2024-08-03 03:12:03,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25410.0, ans=0.1 +2024-08-03 03:12:09,624 INFO [train.py:1114] (3/4) Epoch 2, batch 3300, loss[loss=0.3695, simple_loss=0.4116, pruned_loss=0.1637, over 12860.00 frames. ], tot_loss[loss=0.3189, simple_loss=0.3732, pruned_loss=0.1323, over 2641644.97 frames. ], batch size: 52, lr: 3.78e-02, grad_scale: 16.0 +2024-08-03 03:12:12,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=25446.666666666668, ans=0.2 +2024-08-03 03:12:17,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=25483.333333333332, ans=0.125 +2024-08-03 03:12:26,318 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.84 vs. limit=15.0 +2024-08-03 03:12:34,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=25556.666666666668, ans=0.07 +2024-08-03 03:12:50,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=25593.333333333332, ans=0.0 +2024-08-03 03:12:53,076 INFO [train.py:1114] (3/4) Epoch 2, batch 3350, loss[loss=0.3445, simple_loss=0.3977, pruned_loss=0.1456, over 13043.00 frames. ], tot_loss[loss=0.3213, simple_loss=0.3749, pruned_loss=0.1338, over 2631637.19 frames. ], batch size: 48, lr: 3.77e-02, grad_scale: 16.0 +2024-08-03 03:12:54,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=25630.0, ans=0.005297826086956522 +2024-08-03 03:13:04,146 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:13:08,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=25666.666666666668, ans=0.2 +2024-08-03 03:13:13,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=25703.333333333332, ans=0.125 +2024-08-03 03:13:13,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=25703.333333333332, ans=0.0 +2024-08-03 03:13:14,122 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.415e+02 1.657e+02 2.011e+02 3.247e+02, threshold=3.315e+02, percent-clipped=1.0 +2024-08-03 03:13:15,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25703.333333333332, ans=0.1 +2024-08-03 03:13:16,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=25703.333333333332, ans=0.0 +2024-08-03 03:13:22,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=25740.0, ans=0.2 +2024-08-03 03:13:38,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-08-03 03:13:38,835 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.70 vs. limit=22.5 +2024-08-03 03:13:39,085 INFO [train.py:1114] (3/4) Epoch 2, batch 3400, loss[loss=0.2916, simple_loss=0.341, pruned_loss=0.1211, over 13529.00 frames. ], tot_loss[loss=0.3202, simple_loss=0.3737, pruned_loss=0.1333, over 2626741.21 frames. ], batch size: 31, lr: 3.77e-02, grad_scale: 16.0 +2024-08-03 03:13:50,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=25850.0, ans=0.2 +2024-08-03 03:13:53,412 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.54 vs. limit=15.0 +2024-08-03 03:13:55,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=25850.0, ans=0.2 +2024-08-03 03:13:57,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=25886.666666666668, ans=0.07 +2024-08-03 03:14:21,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=25960.0, ans=0.125 +2024-08-03 03:14:23,920 INFO [train.py:1114] (3/4) Epoch 2, batch 3450, loss[loss=0.3556, simple_loss=0.4064, pruned_loss=0.1523, over 12867.00 frames. ], tot_loss[loss=0.3191, simple_loss=0.3729, pruned_loss=0.1326, over 2630064.05 frames. ], batch size: 52, lr: 3.76e-02, grad_scale: 16.0 +2024-08-03 03:14:25,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=25996.666666666668, ans=0.125 +2024-08-03 03:14:29,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=25996.666666666668, ans=0.125 +2024-08-03 03:14:36,975 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.41 vs. limit=10.0 +2024-08-03 03:14:44,184 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.439e+02 1.568e+02 1.776e+02 4.751e+02, threshold=3.136e+02, percent-clipped=2.0 +2024-08-03 03:14:48,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=26106.666666666668, ans=0.125 +2024-08-03 03:15:06,939 INFO [train.py:1114] (3/4) Epoch 2, batch 3500, loss[loss=0.3322, simple_loss=0.3792, pruned_loss=0.1426, over 13518.00 frames. ], tot_loss[loss=0.3185, simple_loss=0.3718, pruned_loss=0.1326, over 2631457.35 frames. ], batch size: 34, lr: 3.76e-02, grad_scale: 16.0 +2024-08-03 03:15:22,506 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:15:35,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26290.0, ans=0.1 +2024-08-03 03:15:36,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=26290.0, ans=0.125 +2024-08-03 03:15:40,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=26290.0, ans=0.0 +2024-08-03 03:15:41,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=26326.666666666668, ans=0.0 +2024-08-03 03:15:50,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=26326.666666666668, ans=0.125 +2024-08-03 03:15:53,624 INFO [train.py:1114] (3/4) Epoch 2, batch 3550, loss[loss=0.3636, simple_loss=0.4084, pruned_loss=0.1594, over 12402.00 frames. ], tot_loss[loss=0.3235, simple_loss=0.3762, pruned_loss=0.1354, over 2629328.70 frames. ], batch size: 58, lr: 3.75e-02, grad_scale: 16.0 +2024-08-03 03:15:57,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=26363.333333333332, ans=0.125 +2024-08-03 03:16:14,099 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.84 vs. limit=22.5 +2024-08-03 03:16:15,276 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.421e+02 1.616e+02 2.006e+02 3.426e+02, threshold=3.231e+02, percent-clipped=2.0 +2024-08-03 03:16:29,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=26510.0, ans=0.0 +2024-08-03 03:16:39,547 INFO [train.py:1114] (3/4) Epoch 2, batch 3600, loss[loss=0.3998, simple_loss=0.4231, pruned_loss=0.1882, over 9202.00 frames. ], tot_loss[loss=0.334, simple_loss=0.3827, pruned_loss=0.1427, over 2485145.07 frames. ], batch size: 97, lr: 3.74e-02, grad_scale: 32.0 +2024-08-03 03:17:01,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=26620.0, ans=0.1 +2024-08-03 03:17:55,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=26693.333333333332, ans=0.07 +2024-08-03 03:18:02,778 INFO [train.py:1114] (3/4) Epoch 3, batch 0, loss[loss=0.2771, simple_loss=0.3424, pruned_loss=0.1059, over 13345.00 frames. ], tot_loss[loss=0.2771, simple_loss=0.3424, pruned_loss=0.1059, over 13345.00 frames. ], batch size: 33, lr: 3.55e-02, grad_scale: 32.0 +2024-08-03 03:18:02,779 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 03:18:12,654 INFO [train.py:1146] (3/4) Epoch 3, validation: loss=0.2631, simple_loss=0.3546, pruned_loss=0.08577, over 944034.00 frames. +2024-08-03 03:18:12,655 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 03:18:18,910 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.58 vs. limit=15.0 +2024-08-03 03:18:28,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=26730.0, ans=0.125 +2024-08-03 03:18:28,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=26730.0, ans=0.07 +2024-08-03 03:18:30,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=26766.666666666668, ans=0.07 +2024-08-03 03:18:34,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=26766.666666666668, ans=0.125 +2024-08-03 03:18:38,683 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.11 vs. limit=15.0 +2024-08-03 03:18:39,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=26766.666666666668, ans=0.025 +2024-08-03 03:18:45,700 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.401e+02 1.625e+02 1.929e+02 3.724e+02, threshold=3.249e+02, percent-clipped=3.0 +2024-08-03 03:18:49,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26840.0, ans=0.1 +2024-08-03 03:18:59,364 INFO [train.py:1114] (3/4) Epoch 3, batch 50, loss[loss=0.3019, simple_loss=0.3581, pruned_loss=0.1228, over 13400.00 frames. ], tot_loss[loss=0.3321, simple_loss=0.3836, pruned_loss=0.1403, over 577800.69 frames. ], batch size: 32, lr: 3.55e-02, grad_scale: 32.0 +2024-08-03 03:19:06,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=26876.666666666668, ans=0.005026811594202899 +2024-08-03 03:19:18,338 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.49 vs. limit=15.0 +2024-08-03 03:19:28,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.48 vs. limit=15.0 +2024-08-03 03:19:45,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=27023.333333333332, ans=0.2 +2024-08-03 03:19:45,611 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=4.77 vs. limit=15.0 +2024-08-03 03:19:48,779 INFO [train.py:1114] (3/4) Epoch 3, batch 100, loss[loss=0.3097, simple_loss=0.3628, pruned_loss=0.1283, over 13515.00 frames. ], tot_loss[loss=0.327, simple_loss=0.3808, pruned_loss=0.1366, over 1025622.40 frames. ], batch size: 35, lr: 3.54e-02, grad_scale: 32.0 +2024-08-03 03:19:51,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=27060.0, ans=0.0 +2024-08-03 03:20:10,511 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=27.24 vs. limit=22.5 +2024-08-03 03:20:21,032 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.439e+02 1.724e+02 2.172e+02 3.862e+02, threshold=3.447e+02, percent-clipped=4.0 +2024-08-03 03:20:54,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=27170.0, ans=0.2 +2024-08-03 03:21:06,902 INFO [train.py:1114] (3/4) Epoch 3, batch 150, loss[loss=0.2253, simple_loss=0.2966, pruned_loss=0.07699, over 13436.00 frames. ], tot_loss[loss=0.3191, simple_loss=0.374, pruned_loss=0.1321, over 1386977.92 frames. ], batch size: 32, lr: 3.53e-02, grad_scale: 32.0 +2024-08-03 03:21:20,222 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.22 vs. limit=6.0 +2024-08-03 03:21:30,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27316.666666666668, ans=0.1 +2024-08-03 03:22:07,957 INFO [train.py:1114] (3/4) Epoch 3, batch 200, loss[loss=0.3184, simple_loss=0.3804, pruned_loss=0.1282, over 12436.00 frames. ], tot_loss[loss=0.3145, simple_loss=0.3705, pruned_loss=0.1292, over 1664863.24 frames. ], batch size: 58, lr: 3.53e-02, grad_scale: 16.0 +2024-08-03 03:22:17,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27463.333333333332, ans=0.1 +2024-08-03 03:22:18,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=27463.333333333332, ans=0.2 +2024-08-03 03:22:34,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=27536.666666666668, ans=0.125 +2024-08-03 03:22:41,088 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.338e+02 1.522e+02 1.755e+02 2.817e+02, threshold=3.045e+02, percent-clipped=0.0 +2024-08-03 03:22:53,963 INFO [train.py:1114] (3/4) Epoch 3, batch 250, loss[loss=0.3616, simple_loss=0.4084, pruned_loss=0.1574, over 13323.00 frames. ], tot_loss[loss=0.314, simple_loss=0.37, pruned_loss=0.129, over 1883543.85 frames. ], batch size: 46, lr: 3.52e-02, grad_scale: 16.0 +2024-08-03 03:22:57,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.whiten.whitening_limit, batch_count=27610.0, ans=12.0 +2024-08-03 03:23:12,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=27683.333333333332, ans=0.2 +2024-08-03 03:23:12,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=27683.333333333332, ans=0.1 +2024-08-03 03:23:13,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=27683.333333333332, ans=0.125 +2024-08-03 03:23:18,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=27683.333333333332, ans=0.125 +2024-08-03 03:23:21,263 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.04 vs. limit=15.0 +2024-08-03 03:23:28,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=27720.0, ans=0.125 +2024-08-03 03:23:35,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=27756.666666666668, ans=0.125 +2024-08-03 03:23:48,978 INFO [train.py:1114] (3/4) Epoch 3, batch 300, loss[loss=0.3492, simple_loss=0.3961, pruned_loss=0.1512, over 13437.00 frames. ], tot_loss[loss=0.3119, simple_loss=0.3681, pruned_loss=0.1279, over 2050663.43 frames. ], batch size: 42, lr: 3.52e-02, grad_scale: 16.0 +2024-08-03 03:23:51,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=27793.333333333332, ans=0.5 +2024-08-03 03:24:05,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27830.0, ans=0.1 +2024-08-03 03:24:14,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=27866.666666666668, ans=0.0 +2024-08-03 03:24:15,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=27866.666666666668, ans=0.125 +2024-08-03 03:24:24,867 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.466e+02 1.718e+02 2.215e+02 5.480e+02, threshold=3.437e+02, percent-clipped=5.0 +2024-08-03 03:24:38,888 INFO [train.py:1114] (3/4) Epoch 3, batch 350, loss[loss=0.2576, simple_loss=0.3163, pruned_loss=0.09949, over 13590.00 frames. ], tot_loss[loss=0.3095, simple_loss=0.3664, pruned_loss=0.1263, over 2181749.72 frames. ], batch size: 33, lr: 3.51e-02, grad_scale: 16.0 +2024-08-03 03:24:40,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=27976.666666666668, ans=0.2 +2024-08-03 03:24:48,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=28013.333333333332, ans=0.0 +2024-08-03 03:25:10,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=28086.666666666668, ans=0.2 +2024-08-03 03:25:25,236 INFO [train.py:1114] (3/4) Epoch 3, batch 400, loss[loss=0.307, simple_loss=0.3623, pruned_loss=0.1258, over 13377.00 frames. ], tot_loss[loss=0.3099, simple_loss=0.3664, pruned_loss=0.1267, over 2285477.73 frames. ], batch size: 37, lr: 3.50e-02, grad_scale: 32.0 +2024-08-03 03:25:52,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=28233.333333333332, ans=0.0 +2024-08-03 03:26:05,597 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.402e+02 1.627e+02 1.926e+02 3.907e+02, threshold=3.254e+02, percent-clipped=1.0 +2024-08-03 03:26:18,598 INFO [train.py:1114] (3/4) Epoch 3, batch 450, loss[loss=0.3224, simple_loss=0.3818, pruned_loss=0.1315, over 13543.00 frames. ], tot_loss[loss=0.3108, simple_loss=0.367, pruned_loss=0.1273, over 2359680.75 frames. ], batch size: 38, lr: 3.50e-02, grad_scale: 32.0 +2024-08-03 03:26:37,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28380.0, ans=0.1 +2024-08-03 03:27:11,501 INFO [train.py:1114] (3/4) Epoch 3, batch 500, loss[loss=0.2871, simple_loss=0.3568, pruned_loss=0.1087, over 13420.00 frames. ], tot_loss[loss=0.3102, simple_loss=0.3665, pruned_loss=0.127, over 2424971.86 frames. ], batch size: 43, lr: 3.49e-02, grad_scale: 32.0 +2024-08-03 03:27:14,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=28526.666666666668, ans=0.125 +2024-08-03 03:27:15,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=28526.666666666668, ans=0.125 +2024-08-03 03:27:19,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=28526.666666666668, ans=0.125 +2024-08-03 03:27:22,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=28563.333333333332, ans=0.0046601449275362325 +2024-08-03 03:27:48,040 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.466e+02 1.735e+02 2.174e+02 5.837e+02, threshold=3.470e+02, percent-clipped=2.0 +2024-08-03 03:27:54,000 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.82 vs. limit=15.0 +2024-08-03 03:27:59,879 INFO [train.py:1114] (3/4) Epoch 3, batch 550, loss[loss=0.3115, simple_loss=0.3725, pruned_loss=0.1253, over 13023.00 frames. ], tot_loss[loss=0.3094, simple_loss=0.3659, pruned_loss=0.1264, over 2467987.17 frames. ], batch size: 48, lr: 3.49e-02, grad_scale: 16.0 +2024-08-03 03:28:03,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=28710.0, ans=0.125 +2024-08-03 03:28:09,510 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:28:17,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=28783.333333333332, ans=0.125 +2024-08-03 03:28:18,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=28783.333333333332, ans=0.125 +2024-08-03 03:28:25,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=28783.333333333332, ans=0.0 +2024-08-03 03:28:32,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=28820.0, ans=0.2 +2024-08-03 03:28:44,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=28856.666666666668, ans=0.0 +2024-08-03 03:28:48,243 INFO [train.py:1114] (3/4) Epoch 3, batch 600, loss[loss=0.3482, simple_loss=0.3974, pruned_loss=0.1495, over 13266.00 frames. ], tot_loss[loss=0.3082, simple_loss=0.3652, pruned_loss=0.1257, over 2506964.68 frames. ], batch size: 46, lr: 3.48e-02, grad_scale: 16.0 +2024-08-03 03:28:59,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=28930.0, ans=0.004580434782608695 +2024-08-03 03:29:00,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=28930.0, ans=0.0 +2024-08-03 03:29:01,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=28930.0, ans=0.0 +2024-08-03 03:29:06,554 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.54 vs. limit=15.0 +2024-08-03 03:29:17,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=29003.333333333332, ans=0.025 +2024-08-03 03:29:21,220 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.080e+02 1.381e+02 1.525e+02 1.783e+02 3.115e+02, threshold=3.051e+02, percent-clipped=0.0 +2024-08-03 03:29:32,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=29040.0, ans=0.2 +2024-08-03 03:29:39,461 INFO [train.py:1114] (3/4) Epoch 3, batch 650, loss[loss=0.3041, simple_loss=0.3616, pruned_loss=0.1233, over 13551.00 frames. ], tot_loss[loss=0.3072, simple_loss=0.3642, pruned_loss=0.1251, over 2542645.20 frames. ], batch size: 37, lr: 3.48e-02, grad_scale: 16.0 +2024-08-03 03:29:41,942 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.77 vs. limit=22.5 +2024-08-03 03:29:50,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=29113.333333333332, ans=0.125 +2024-08-03 03:29:54,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=29113.333333333332, ans=0.2 +2024-08-03 03:30:04,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=29150.0, ans=0.125 +2024-08-03 03:30:09,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=29186.666666666668, ans=0.125 +2024-08-03 03:30:26,320 INFO [train.py:1114] (3/4) Epoch 3, batch 700, loss[loss=0.3236, simple_loss=0.3742, pruned_loss=0.1365, over 13533.00 frames. ], tot_loss[loss=0.3074, simple_loss=0.3646, pruned_loss=0.1251, over 2565749.54 frames. ], batch size: 35, lr: 3.47e-02, grad_scale: 16.0 +2024-08-03 03:30:40,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=29296.666666666668, ans=0.125 +2024-08-03 03:30:43,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29296.666666666668, ans=0.1 +2024-08-03 03:31:14,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=29333.333333333332, ans=0.015 +2024-08-03 03:31:29,957 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.985e+01 1.488e+02 1.744e+02 2.083e+02 3.353e+02, threshold=3.487e+02, percent-clipped=2.0 +2024-08-03 03:31:30,611 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.90 vs. limit=15.0 +2024-08-03 03:31:41,806 INFO [train.py:1114] (3/4) Epoch 3, batch 750, loss[loss=0.2933, simple_loss=0.3606, pruned_loss=0.1131, over 13347.00 frames. ], tot_loss[loss=0.3061, simple_loss=0.3638, pruned_loss=0.1242, over 2582926.98 frames. ], batch size: 37, lr: 3.46e-02, grad_scale: 16.0 +2024-08-03 03:36:13,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=29443.333333333332, ans=0.0 +2024-08-03 03:36:15,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=29443.333333333332, ans=0.0 +2024-08-03 03:36:17,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=29443.333333333332, ans=0.125 +2024-08-03 03:36:21,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=29480.0, ans=0.125 +2024-08-03 03:38:09,758 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.62 vs. limit=15.0 +2024-08-03 03:38:26,552 INFO [train.py:1114] (3/4) Epoch 3, batch 800, loss[loss=0.2777, simple_loss=0.3457, pruned_loss=0.1048, over 13326.00 frames. ], tot_loss[loss=0.3058, simple_loss=0.3634, pruned_loss=0.1241, over 2597449.55 frames. ], batch size: 33, lr: 3.46e-02, grad_scale: 32.0 +2024-08-03 03:38:45,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=29700.0, ans=0.0 +2024-08-03 03:38:46,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=29700.0, ans=0.125 +2024-08-03 03:39:00,213 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.412e+02 1.629e+02 2.089e+02 3.471e+02, threshold=3.259e+02, percent-clipped=0.0 +2024-08-03 03:39:00,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=29736.666666666668, ans=0.025 +2024-08-03 03:39:10,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=29773.333333333332, ans=0.0 +2024-08-03 03:39:12,357 INFO [train.py:1114] (3/4) Epoch 3, batch 850, loss[loss=0.2692, simple_loss=0.3456, pruned_loss=0.09636, over 13323.00 frames. ], tot_loss[loss=0.3051, simple_loss=0.3629, pruned_loss=0.1237, over 2609917.49 frames. ], batch size: 40, lr: 3.45e-02, grad_scale: 32.0 +2024-08-03 03:39:23,835 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.21 vs. limit=15.0 +2024-08-03 03:39:42,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=29920.0, ans=0.025 +2024-08-03 03:39:47,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=29920.0, ans=0.0 +2024-08-03 03:39:59,548 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=4.44 vs. limit=15.0 +2024-08-03 03:40:02,691 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:40:04,389 INFO [train.py:1114] (3/4) Epoch 3, batch 900, loss[loss=0.3146, simple_loss=0.3589, pruned_loss=0.1351, over 13344.00 frames. ], tot_loss[loss=0.3053, simple_loss=0.3632, pruned_loss=0.1238, over 2611854.10 frames. ], batch size: 33, lr: 3.45e-02, grad_scale: 32.0 +2024-08-03 03:40:04,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29993.333333333332, ans=0.1 +2024-08-03 03:40:10,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=29993.333333333332, ans=0.125 +2024-08-03 03:40:11,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=29993.333333333332, ans=0.2 +2024-08-03 03:40:27,613 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:40:32,954 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.43 vs. limit=12.0 +2024-08-03 03:40:34,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=30103.333333333332, ans=0.125 +2024-08-03 03:40:36,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=30103.333333333332, ans=0.025 +2024-08-03 03:40:39,772 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.109e+02 1.370e+02 1.651e+02 2.028e+02 4.342e+02, threshold=3.303e+02, percent-clipped=4.0 +2024-08-03 03:40:51,073 INFO [train.py:1114] (3/4) Epoch 3, batch 950, loss[loss=0.2706, simple_loss=0.3335, pruned_loss=0.1038, over 13551.00 frames. ], tot_loss[loss=0.3061, simple_loss=0.3636, pruned_loss=0.1243, over 2613637.82 frames. ], batch size: 34, lr: 3.44e-02, grad_scale: 16.0 +2024-08-03 03:40:51,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=30176.666666666668, ans=0.125 +2024-08-03 03:40:55,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=30176.666666666668, ans=0.125 +2024-08-03 03:40:59,581 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:41:19,056 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.81 vs. limit=15.0 +2024-08-03 03:41:20,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=30286.666666666668, ans=0.125 +2024-08-03 03:41:25,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=30286.666666666668, ans=0.125 +2024-08-03 03:41:30,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=30323.333333333332, ans=0.0 +2024-08-03 03:41:38,038 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.54 vs. limit=10.0 +2024-08-03 03:41:40,359 INFO [train.py:1114] (3/4) Epoch 3, batch 1000, loss[loss=0.2911, simple_loss=0.3512, pruned_loss=0.1155, over 13363.00 frames. ], tot_loss[loss=0.3076, simple_loss=0.3648, pruned_loss=0.1252, over 2611194.42 frames. ], batch size: 35, lr: 3.44e-02, grad_scale: 16.0 +2024-08-03 03:41:40,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=30360.0, ans=0.07 +2024-08-03 03:41:52,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30396.666666666668, ans=0.1 +2024-08-03 03:42:15,781 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.115e+02 1.383e+02 1.605e+02 2.126e+02 5.573e+02, threshold=3.210e+02, percent-clipped=1.0 +2024-08-03 03:42:17,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=30506.666666666668, ans=0.00423768115942029 +2024-08-03 03:42:26,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=30543.333333333332, ans=0.004229710144927536 +2024-08-03 03:42:27,492 INFO [train.py:1114] (3/4) Epoch 3, batch 1050, loss[loss=0.301, simple_loss=0.3758, pruned_loss=0.1131, over 13572.00 frames. ], tot_loss[loss=0.3068, simple_loss=0.3638, pruned_loss=0.1248, over 2615319.84 frames. ], batch size: 39, lr: 3.43e-02, grad_scale: 16.0 +2024-08-03 03:42:27,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=30543.333333333332, ans=10.0 +2024-08-03 03:42:38,993 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.88 vs. limit=15.0 +2024-08-03 03:42:49,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=30616.666666666668, ans=0.125 +2024-08-03 03:42:55,292 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.33 vs. limit=10.0 +2024-08-03 03:43:16,388 INFO [train.py:1114] (3/4) Epoch 3, batch 1100, loss[loss=0.2909, simple_loss=0.3537, pruned_loss=0.1141, over 13567.00 frames. ], tot_loss[loss=0.3051, simple_loss=0.3628, pruned_loss=0.1237, over 2619796.99 frames. ], batch size: 36, lr: 3.42e-02, grad_scale: 16.0 +2024-08-03 03:43:35,734 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=20.29 vs. limit=15.0 +2024-08-03 03:43:49,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=30836.666666666668, ans=0.1 +2024-08-03 03:43:55,895 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.095e+02 1.319e+02 1.473e+02 1.688e+02 2.812e+02, threshold=2.945e+02, percent-clipped=0.0 +2024-08-03 03:44:07,121 INFO [train.py:1114] (3/4) Epoch 3, batch 1150, loss[loss=0.2895, simple_loss=0.3518, pruned_loss=0.1136, over 13566.00 frames. ], tot_loss[loss=0.3047, simple_loss=0.3628, pruned_loss=0.1233, over 2619020.23 frames. ], batch size: 36, lr: 3.42e-02, grad_scale: 16.0 +2024-08-03 03:44:13,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=30910.0, ans=0.0 +2024-08-03 03:44:29,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=30983.333333333332, ans=0.015 +2024-08-03 03:44:29,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=30983.333333333332, ans=0.004134057971014494 +2024-08-03 03:44:44,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.70 vs. limit=6.0 +2024-08-03 03:44:51,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31056.666666666668, ans=0.1 +2024-08-03 03:44:59,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=31093.333333333332, ans=0.004110144927536232 +2024-08-03 03:45:00,366 INFO [train.py:1114] (3/4) Epoch 3, batch 1200, loss[loss=0.2745, simple_loss=0.3353, pruned_loss=0.1069, over 13572.00 frames. ], tot_loss[loss=0.3059, simple_loss=0.3638, pruned_loss=0.124, over 2616188.02 frames. ], batch size: 39, lr: 3.41e-02, grad_scale: 32.0 +2024-08-03 03:45:00,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=31093.333333333332, ans=0.0 +2024-08-03 03:45:09,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31093.333333333332, ans=0.1 +2024-08-03 03:45:10,670 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.29 vs. limit=6.0 +2024-08-03 03:45:11,710 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.82 vs. limit=15.0 +2024-08-03 03:45:21,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=31166.666666666668, ans=0.125 +2024-08-03 03:45:37,477 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.033e+02 1.400e+02 1.587e+02 2.006e+02 3.916e+02, threshold=3.173e+02, percent-clipped=5.0 +2024-08-03 03:45:48,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=31276.666666666668, ans=0.125 +2024-08-03 03:45:48,704 INFO [train.py:1114] (3/4) Epoch 3, batch 1250, loss[loss=0.3323, simple_loss=0.3935, pruned_loss=0.1355, over 13413.00 frames. ], tot_loss[loss=0.305, simple_loss=0.3632, pruned_loss=0.1234, over 2627660.37 frames. ], batch size: 42, lr: 3.41e-02, grad_scale: 32.0 +2024-08-03 03:45:59,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=31313.333333333332, ans=0.125 +2024-08-03 03:46:11,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=31350.0, ans=0.07 +2024-08-03 03:46:29,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=31423.333333333332, ans=0.004038405797101449 +2024-08-03 03:46:33,250 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.36 vs. limit=12.0 +2024-08-03 03:46:34,190 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.30 vs. limit=15.0 +2024-08-03 03:46:34,553 INFO [train.py:1114] (3/4) Epoch 3, batch 1300, loss[loss=0.289, simple_loss=0.3618, pruned_loss=0.1081, over 12948.00 frames. ], tot_loss[loss=0.3033, simple_loss=0.3617, pruned_loss=0.1224, over 2630613.39 frames. ], batch size: 52, lr: 3.40e-02, grad_scale: 16.0 +2024-08-03 03:46:34,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=31460.0, ans=0.125 +2024-08-03 03:46:34,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=31460.0, ans=0.125 +2024-08-03 03:46:36,893 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.14 vs. limit=15.0 +2024-08-03 03:46:46,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=31496.666666666668, ans=0.05 +2024-08-03 03:47:00,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=31533.333333333332, ans=0.2 +2024-08-03 03:47:07,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=31570.0, ans=0.125 +2024-08-03 03:47:14,531 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.368e+02 1.596e+02 1.808e+02 3.073e+02, threshold=3.191e+02, percent-clipped=0.0 +2024-08-03 03:47:25,302 INFO [train.py:1114] (3/4) Epoch 3, batch 1350, loss[loss=0.2825, simple_loss=0.3499, pruned_loss=0.1076, over 13548.00 frames. ], tot_loss[loss=0.3014, simple_loss=0.3605, pruned_loss=0.1212, over 2638504.54 frames. ], batch size: 37, lr: 3.40e-02, grad_scale: 16.0 +2024-08-03 03:47:25,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=31643.333333333332, ans=0.125 +2024-08-03 03:47:26,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=31643.333333333332, ans=0.025 +2024-08-03 03:47:37,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=31680.0, ans=0.2 +2024-08-03 03:47:48,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=31716.666666666668, ans=0.0 +2024-08-03 03:47:55,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=31753.333333333332, ans=0.1 +2024-08-03 03:48:08,902 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.57 vs. limit=15.0 +2024-08-03 03:48:11,337 INFO [train.py:1114] (3/4) Epoch 3, batch 1400, loss[loss=0.266, simple_loss=0.3195, pruned_loss=0.1063, over 13254.00 frames. ], tot_loss[loss=0.3019, simple_loss=0.3609, pruned_loss=0.1215, over 2642202.51 frames. ], batch size: 31, lr: 3.39e-02, grad_scale: 8.0 +2024-08-03 03:48:14,780 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.60 vs. limit=15.0 +2024-08-03 03:48:19,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=31863.333333333332, ans=0.125 +2024-08-03 03:48:46,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=31936.666666666668, ans=0.2 +2024-08-03 03:48:50,659 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.406e+02 1.620e+02 1.890e+02 3.890e+02, threshold=3.239e+02, percent-clipped=2.0 +2024-08-03 03:49:23,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=31973.333333333332, ans=0.0 +2024-08-03 03:49:26,504 INFO [train.py:1114] (3/4) Epoch 3, batch 1450, loss[loss=0.2681, simple_loss=0.341, pruned_loss=0.09759, over 13429.00 frames. ], tot_loss[loss=0.302, simple_loss=0.3612, pruned_loss=0.1215, over 2641308.38 frames. ], batch size: 43, lr: 3.38e-02, grad_scale: 8.0 +2024-08-03 03:49:43,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=32010.0, ans=0.125 +2024-08-03 03:49:52,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=32046.666666666668, ans=0.0 +2024-08-03 03:50:02,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=32046.666666666668, ans=0.003902898550724637 +2024-08-03 03:50:06,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=32046.666666666668, ans=0.2 +2024-08-03 03:52:44,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=32156.666666666668, ans=0.2 +2024-08-03 03:52:50,969 INFO [train.py:1114] (3/4) Epoch 3, batch 1500, loss[loss=0.3241, simple_loss=0.3837, pruned_loss=0.1323, over 13410.00 frames. ], tot_loss[loss=0.3017, simple_loss=0.361, pruned_loss=0.1212, over 2641314.66 frames. ], batch size: 39, lr: 3.38e-02, grad_scale: 8.0 +2024-08-03 03:52:53,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=32193.333333333332, ans=0.125 +2024-08-03 03:52:56,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=32193.333333333332, ans=0.04949747468305833 +2024-08-03 03:53:25,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=32303.333333333332, ans=0.125 +2024-08-03 03:53:29,874 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.393e+02 1.602e+02 2.004e+02 4.084e+02, threshold=3.204e+02, percent-clipped=1.0 +2024-08-03 03:53:34,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=32340.0, ans=0.0 +2024-08-03 03:53:38,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=32376.666666666668, ans=0.0038311594202898548 +2024-08-03 03:53:38,884 INFO [train.py:1114] (3/4) Epoch 3, batch 1550, loss[loss=0.3012, simple_loss=0.3673, pruned_loss=0.1175, over 13406.00 frames. ], tot_loss[loss=0.3022, simple_loss=0.3612, pruned_loss=0.1216, over 2630948.88 frames. ], batch size: 41, lr: 3.37e-02, grad_scale: 8.0 +2024-08-03 03:53:45,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=32376.666666666668, ans=0.125 +2024-08-03 03:54:13,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=32413.333333333332, ans=0.125 +2024-08-03 03:54:41,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=32523.333333333332, ans=0.0 +2024-08-03 03:54:57,314 INFO [train.py:1114] (3/4) Epoch 3, batch 1600, loss[loss=0.2932, simple_loss=0.364, pruned_loss=0.1112, over 13579.00 frames. ], tot_loss[loss=0.303, simple_loss=0.3616, pruned_loss=0.1222, over 2624221.59 frames. ], batch size: 39, lr: 3.37e-02, grad_scale: 16.0 +2024-08-03 03:54:58,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=32560.0, ans=0.1 +2024-08-03 03:55:16,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=32633.333333333332, ans=0.025 +2024-08-03 03:55:16,734 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.32 vs. limit=15.0 +2024-08-03 03:55:37,888 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.348e+02 1.511e+02 1.737e+02 4.413e+02, threshold=3.022e+02, percent-clipped=2.0 +2024-08-03 03:55:41,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=32706.666666666668, ans=0.2 +2024-08-03 03:55:42,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=32706.666666666668, ans=0.125 +2024-08-03 03:55:48,035 INFO [train.py:1114] (3/4) Epoch 3, batch 1650, loss[loss=0.3105, simple_loss=0.3841, pruned_loss=0.1184, over 13357.00 frames. ], tot_loss[loss=0.305, simple_loss=0.3628, pruned_loss=0.1236, over 2621292.88 frames. ], batch size: 40, lr: 3.36e-02, grad_scale: 16.0 +2024-08-03 03:55:52,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=32743.333333333332, ans=0.0 +2024-08-03 03:55:56,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32743.333333333332, ans=0.1 +2024-08-03 03:56:07,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=32780.0, ans=0.1 +2024-08-03 03:56:22,783 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.84 vs. limit=6.0 +2024-08-03 03:56:31,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=32853.333333333336, ans=0.025 +2024-08-03 03:56:43,053 INFO [train.py:1114] (3/4) Epoch 3, batch 1700, loss[loss=0.2567, simple_loss=0.3145, pruned_loss=0.09945, over 13257.00 frames. ], tot_loss[loss=0.3032, simple_loss=0.3616, pruned_loss=0.1224, over 2630580.40 frames. ], batch size: 31, lr: 3.36e-02, grad_scale: 16.0 +2024-08-03 03:56:51,992 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.31 vs. limit=10.0 +2024-08-03 03:57:19,657 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.070e+02 1.348e+02 1.541e+02 1.805e+02 2.810e+02, threshold=3.082e+02, percent-clipped=0.0 +2024-08-03 03:57:34,248 INFO [train.py:1114] (3/4) Epoch 3, batch 1750, loss[loss=0.2773, simple_loss=0.327, pruned_loss=0.1138, over 13531.00 frames. ], tot_loss[loss=0.3026, simple_loss=0.3611, pruned_loss=0.122, over 2633972.61 frames. ], batch size: 31, lr: 3.35e-02, grad_scale: 16.0 +2024-08-03 03:57:37,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=33110.0, ans=0.003671739130434782 +2024-08-03 03:57:37,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=33110.0, ans=0.95 +2024-08-03 03:57:50,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=33146.666666666664, ans=0.125 +2024-08-03 03:57:53,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=33183.333333333336, ans=15.0 +2024-08-03 03:57:54,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.whiten.whitening_limit, batch_count=33183.333333333336, ans=15.0 +2024-08-03 03:58:09,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=33183.333333333336, ans=0.0 +2024-08-03 03:58:14,635 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.48 vs. limit=15.0 +2024-08-03 03:58:28,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=33256.666666666664, ans=0.125 +2024-08-03 03:58:29,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=33256.666666666664, ans=0.125 +2024-08-03 03:58:34,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=33256.666666666664, ans=0.2 +2024-08-03 03:58:36,900 INFO [train.py:1114] (3/4) Epoch 3, batch 1800, loss[loss=0.3019, simple_loss=0.373, pruned_loss=0.1153, over 13549.00 frames. ], tot_loss[loss=0.303, simple_loss=0.3616, pruned_loss=0.1222, over 2635923.98 frames. ], batch size: 38, lr: 3.35e-02, grad_scale: 16.0 +2024-08-03 03:58:56,575 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 03:59:17,226 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.060e+02 1.365e+02 1.589e+02 1.919e+02 3.211e+02, threshold=3.178e+02, percent-clipped=2.0 +2024-08-03 03:59:24,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=33440.0, ans=0.125 +2024-08-03 03:59:26,704 INFO [train.py:1114] (3/4) Epoch 3, batch 1850, loss[loss=0.267, simple_loss=0.3441, pruned_loss=0.09493, over 13411.00 frames. ], tot_loss[loss=0.3015, simple_loss=0.3605, pruned_loss=0.1213, over 2638464.73 frames. ], batch size: 39, lr: 3.34e-02, grad_scale: 16.0 +2024-08-03 03:59:38,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=33513.333333333336, ans=0.0 +2024-08-03 03:59:41,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=33513.333333333336, ans=0.0 +2024-08-03 04:00:19,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=33623.333333333336, ans=0.2 +2024-08-03 04:00:22,362 INFO [train.py:1114] (3/4) Epoch 3, batch 1900, loss[loss=0.3378, simple_loss=0.3927, pruned_loss=0.1414, over 13328.00 frames. ], tot_loss[loss=0.3013, simple_loss=0.3607, pruned_loss=0.1209, over 2641047.92 frames. ], batch size: 40, lr: 3.34e-02, grad_scale: 16.0 +2024-08-03 04:00:42,231 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.23 vs. limit=10.0 +2024-08-03 04:00:47,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=33733.333333333336, ans=22.5 +2024-08-03 04:01:01,318 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.079e+02 1.394e+02 1.539e+02 1.838e+02 3.320e+02, threshold=3.078e+02, percent-clipped=1.0 +2024-08-03 04:01:03,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=33806.666666666664, ans=0.025 +2024-08-03 04:01:10,629 INFO [train.py:1114] (3/4) Epoch 3, batch 1950, loss[loss=0.2948, simple_loss=0.3534, pruned_loss=0.1181, over 13561.00 frames. ], tot_loss[loss=0.3018, simple_loss=0.3614, pruned_loss=0.1211, over 2647124.51 frames. ], batch size: 36, lr: 3.33e-02, grad_scale: 16.0 +2024-08-03 04:02:01,747 INFO [train.py:1114] (3/4) Epoch 3, batch 2000, loss[loss=0.2613, simple_loss=0.3204, pruned_loss=0.101, over 13529.00 frames. ], tot_loss[loss=0.3031, simple_loss=0.3621, pruned_loss=0.122, over 2637062.27 frames. ], batch size: 31, lr: 3.32e-02, grad_scale: 32.0 +2024-08-03 04:02:32,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=34100.0, ans=0.0 +2024-08-03 04:02:45,188 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=9.34 vs. limit=15.0 +2024-08-03 04:02:48,520 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.054e+02 1.429e+02 1.657e+02 2.036e+02 4.223e+02, threshold=3.314e+02, percent-clipped=3.0 +2024-08-03 04:02:51,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=34173.333333333336, ans=0.125 +2024-08-03 04:02:57,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=34210.0, ans=0.125 +2024-08-03 04:02:57,967 INFO [train.py:1114] (3/4) Epoch 3, batch 2050, loss[loss=0.2841, simple_loss=0.338, pruned_loss=0.1151, over 13415.00 frames. ], tot_loss[loss=0.3027, simple_loss=0.3611, pruned_loss=0.1222, over 2634539.38 frames. ], batch size: 32, lr: 3.32e-02, grad_scale: 32.0 +2024-08-03 04:02:58,188 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=34210.0, ans=0.125 +2024-08-03 04:03:04,491 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.53 vs. limit=15.0 +2024-08-03 04:03:15,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=34246.666666666664, ans=0.2 +2024-08-03 04:03:17,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=34246.666666666664, ans=0.5 +2024-08-03 04:03:23,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=34283.333333333336, ans=0.0 +2024-08-03 04:03:27,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=34283.333333333336, ans=0.1 +2024-08-03 04:03:34,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=34320.0, ans=0.0 +2024-08-03 04:03:41,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34356.666666666664, ans=0.1 +2024-08-03 04:03:46,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=34356.666666666664, ans=0.1 +2024-08-03 04:03:47,544 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=34393.333333333336, ans=0.025 +2024-08-03 04:03:48,239 INFO [train.py:1114] (3/4) Epoch 3, batch 2100, loss[loss=0.313, simple_loss=0.3753, pruned_loss=0.1253, over 13546.00 frames. ], tot_loss[loss=0.3016, simple_loss=0.3602, pruned_loss=0.1215, over 2639871.44 frames. ], batch size: 37, lr: 3.31e-02, grad_scale: 32.0 +2024-08-03 04:04:12,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=34466.666666666664, ans=0.2 +2024-08-03 04:04:18,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=34466.666666666664, ans=0.125 +2024-08-03 04:04:21,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=34503.333333333336, ans=0.003368840579710144 +2024-08-03 04:04:29,651 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.385e+02 1.595e+02 1.788e+02 2.690e+02, threshold=3.190e+02, percent-clipped=1.0 +2024-08-03 04:04:37,868 INFO [train.py:1114] (3/4) Epoch 3, batch 2150, loss[loss=0.2737, simple_loss=0.3331, pruned_loss=0.1071, over 13570.00 frames. ], tot_loss[loss=0.2991, simple_loss=0.3582, pruned_loss=0.12, over 2648218.29 frames. ], batch size: 36, lr: 3.31e-02, grad_scale: 16.0 +2024-08-03 04:05:00,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=34576.666666666664, ans=0.125 +2024-08-03 04:05:24,413 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.66 vs. limit=12.0 +2024-08-03 04:05:29,681 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.15 vs. limit=10.0 +2024-08-03 04:05:34,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=34723.333333333336, ans=0.125 +2024-08-03 04:05:38,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.27 vs. limit=15.0 +2024-08-03 04:05:42,224 INFO [train.py:1114] (3/4) Epoch 3, batch 2200, loss[loss=0.3163, simple_loss=0.38, pruned_loss=0.1263, over 13396.00 frames. ], tot_loss[loss=0.2988, simple_loss=0.3581, pruned_loss=0.1198, over 2645722.95 frames. ], batch size: 39, lr: 3.30e-02, grad_scale: 16.0 +2024-08-03 04:05:43,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=34760.0, ans=0.125 +2024-08-03 04:05:47,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34760.0, ans=0.1 +2024-08-03 04:06:04,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=34833.333333333336, ans=0.125 +2024-08-03 04:06:24,638 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.093e+02 1.404e+02 1.621e+02 1.995e+02 2.772e+02, threshold=3.241e+02, percent-clipped=0.0 +2024-08-03 04:06:24,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=34906.666666666664, ans=0.125 +2024-08-03 04:06:24,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=34906.666666666664, ans=0.125 +2024-08-03 04:06:34,731 INFO [train.py:1114] (3/4) Epoch 3, batch 2250, loss[loss=0.2702, simple_loss=0.3501, pruned_loss=0.09518, over 13361.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.3581, pruned_loss=0.1196, over 2643024.27 frames. ], batch size: 37, lr: 3.30e-02, grad_scale: 16.0 +2024-08-03 04:06:35,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=34943.333333333336, ans=0.035 +2024-08-03 04:06:41,699 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.09 vs. limit=6.0 +2024-08-03 04:06:45,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=34980.0, ans=0.125 +2024-08-03 04:06:54,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=35016.666666666664, ans=0.125 +2024-08-03 04:07:17,171 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.06 vs. limit=15.0 +2024-08-03 04:07:20,713 INFO [train.py:1114] (3/4) Epoch 3, batch 2300, loss[loss=0.2497, simple_loss=0.3131, pruned_loss=0.09314, over 13579.00 frames. ], tot_loss[loss=0.2972, simple_loss=0.3564, pruned_loss=0.1191, over 2639024.05 frames. ], batch size: 33, lr: 3.29e-02, grad_scale: 16.0 +2024-08-03 04:07:38,825 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.61 vs. limit=15.0 +2024-08-03 04:07:43,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=35200.0, ans=0.125 +2024-08-03 04:07:52,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=35236.666666666664, ans=0.2 +2024-08-03 04:08:01,044 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.453e+02 1.713e+02 2.224e+02 5.491e+02, threshold=3.425e+02, percent-clipped=5.0 +2024-08-03 04:08:02,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=35273.333333333336, ans=0.125 +2024-08-03 04:08:11,152 INFO [train.py:1114] (3/4) Epoch 3, batch 2350, loss[loss=0.289, simple_loss=0.354, pruned_loss=0.112, over 13552.00 frames. ], tot_loss[loss=0.2971, simple_loss=0.3566, pruned_loss=0.1188, over 2641726.36 frames. ], batch size: 38, lr: 3.29e-02, grad_scale: 16.0 +2024-08-03 04:08:12,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=35310.0, ans=0.125 +2024-08-03 04:08:19,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=35346.666666666664, ans=0.003185507246376813 +2024-08-03 04:08:37,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=35383.333333333336, ans=0.125 +2024-08-03 04:08:54,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.01 vs. limit=15.0 +2024-08-03 04:09:00,930 INFO [train.py:1114] (3/4) Epoch 3, batch 2400, loss[loss=0.2586, simple_loss=0.329, pruned_loss=0.0941, over 13548.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.357, pruned_loss=0.1188, over 2642786.53 frames. ], batch size: 35, lr: 3.28e-02, grad_scale: 32.0 +2024-08-03 04:09:02,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=35493.333333333336, ans=0.2 +2024-08-03 04:09:03,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=35493.333333333336, ans=0.2 +2024-08-03 04:09:06,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=35493.333333333336, ans=0.125 +2024-08-03 04:09:11,771 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.06 vs. limit=15.0 +2024-08-03 04:09:13,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=35530.0, ans=0.0 +2024-08-03 04:09:38,722 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.322e+02 1.493e+02 1.831e+02 3.002e+02, threshold=2.985e+02, percent-clipped=0.0 +2024-08-03 04:09:47,347 INFO [train.py:1114] (3/4) Epoch 3, batch 2450, loss[loss=0.288, simple_loss=0.3463, pruned_loss=0.1149, over 13353.00 frames. ], tot_loss[loss=0.3, simple_loss=0.3593, pruned_loss=0.1203, over 2632388.34 frames. ], batch size: 37, lr: 3.28e-02, grad_scale: 32.0 +2024-08-03 04:10:28,664 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.14 vs. limit=12.0 +2024-08-03 04:10:37,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=35823.333333333336, ans=0.0 +2024-08-03 04:10:39,195 INFO [train.py:1114] (3/4) Epoch 3, batch 2500, loss[loss=0.3177, simple_loss=0.3796, pruned_loss=0.1279, over 13399.00 frames. ], tot_loss[loss=0.2991, simple_loss=0.359, pruned_loss=0.1196, over 2636652.74 frames. ], batch size: 39, lr: 3.27e-02, grad_scale: 32.0 +2024-08-03 04:10:42,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=35860.0, ans=0.1 +2024-08-03 04:10:43,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=35860.0, ans=0.125 +2024-08-03 04:11:02,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=35933.333333333336, ans=0.125 +2024-08-03 04:11:11,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35970.0, ans=0.1 +2024-08-03 04:11:15,827 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.323e+02 1.438e+02 1.681e+02 3.376e+02, threshold=2.876e+02, percent-clipped=2.0 +2024-08-03 04:11:22,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=36006.666666666664, ans=0.2 +2024-08-03 04:11:27,368 INFO [train.py:1114] (3/4) Epoch 3, batch 2550, loss[loss=0.2905, simple_loss=0.3425, pruned_loss=0.1193, over 13536.00 frames. ], tot_loss[loss=0.2989, simple_loss=0.3587, pruned_loss=0.1195, over 2637481.03 frames. ], batch size: 31, lr: 3.27e-02, grad_scale: 32.0 +2024-08-03 04:11:35,333 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=15.0 +2024-08-03 04:11:55,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=36080.0, ans=0.125 +2024-08-03 04:12:01,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=36116.666666666664, ans=0.125 +2024-08-03 04:12:07,212 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.56 vs. limit=22.5 +2024-08-03 04:12:21,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=36153.333333333336, ans=0.0 +2024-08-03 04:12:43,961 INFO [train.py:1114] (3/4) Epoch 3, batch 2600, loss[loss=0.284, simple_loss=0.3452, pruned_loss=0.1114, over 13561.00 frames. ], tot_loss[loss=0.2983, simple_loss=0.3585, pruned_loss=0.119, over 2636718.94 frames. ], batch size: 36, lr: 3.26e-02, grad_scale: 32.0 +2024-08-03 04:13:39,922 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.332e+02 1.510e+02 1.763e+02 2.662e+02, threshold=3.019e+02, percent-clipped=0.0 +2024-08-03 04:13:49,886 INFO [train.py:1114] (3/4) Epoch 3, batch 2650, loss[loss=0.2686, simple_loss=0.3424, pruned_loss=0.09736, over 13297.00 frames. ], tot_loss[loss=0.2967, simple_loss=0.3573, pruned_loss=0.1181, over 2639821.70 frames. ], batch size: 46, lr: 3.26e-02, grad_scale: 32.0 +2024-08-03 04:13:52,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=36410.0, ans=0.125 +2024-08-03 04:14:03,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=36446.666666666664, ans=0.002946376811594204 +2024-08-03 04:14:14,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=36483.333333333336, ans=0.125 +2024-08-03 04:14:42,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=36556.666666666664, ans=0.0 +2024-08-03 04:14:56,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=36556.666666666664, ans=0.2 +2024-08-03 04:14:56,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36556.666666666664, ans=0.1 +2024-08-03 04:14:57,818 INFO [train.py:1114] (3/4) Epoch 3, batch 2700, loss[loss=0.3042, simple_loss=0.373, pruned_loss=0.1176, over 13543.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.3578, pruned_loss=0.1184, over 2636896.95 frames. ], batch size: 40, lr: 3.25e-02, grad_scale: 16.0 +2024-08-03 04:15:00,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=36593.333333333336, ans=0.0 +2024-08-03 04:19:05,262 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.21 vs. limit=15.0 +2024-08-03 04:19:06,167 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.52 vs. limit=12.0 +2024-08-03 04:19:08,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=36703.333333333336, ans=0.1 +2024-08-03 04:19:15,072 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.088e+02 1.378e+02 1.616e+02 1.965e+02 4.698e+02, threshold=3.232e+02, percent-clipped=3.0 +2024-08-03 04:19:22,261 INFO [train.py:1114] (3/4) Epoch 3, batch 2750, loss[loss=0.2754, simple_loss=0.3352, pruned_loss=0.1078, over 13335.00 frames. ], tot_loss[loss=0.2957, simple_loss=0.3564, pruned_loss=0.1175, over 2635081.56 frames. ], batch size: 34, lr: 3.24e-02, grad_scale: 16.0 +2024-08-03 04:19:22,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=36776.666666666664, ans=0.125 +2024-08-03 04:19:29,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=36776.666666666664, ans=0.1 +2024-08-03 04:19:48,074 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.22 vs. limit=15.0 +2024-08-03 04:19:55,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=36886.666666666664, ans=0.0 +2024-08-03 04:20:05,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=36923.333333333336, ans=0.0028427536231884053 +2024-08-03 04:20:05,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=36923.333333333336, ans=0.0028427536231884053 +2024-08-03 04:20:07,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=36923.333333333336, ans=0.125 +2024-08-03 04:20:09,777 INFO [train.py:1114] (3/4) Epoch 3, batch 2800, loss[loss=0.3793, simple_loss=0.4122, pruned_loss=0.1732, over 9720.00 frames. ], tot_loss[loss=0.2982, simple_loss=0.3579, pruned_loss=0.1193, over 2627168.89 frames. ], batch size: 98, lr: 3.24e-02, grad_scale: 32.0 +2024-08-03 04:20:30,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=37033.333333333336, ans=0.05 +2024-08-03 04:20:46,347 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.018e+02 1.344e+02 1.514e+02 1.782e+02 3.763e+02, threshold=3.028e+02, percent-clipped=1.0 +2024-08-03 04:20:49,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=37106.666666666664, ans=0.125 +2024-08-03 04:20:51,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=37106.666666666664, ans=0.025 +2024-08-03 04:20:52,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=37143.333333333336, ans=0.05 +2024-08-03 04:20:53,506 INFO [train.py:1114] (3/4) Epoch 3, batch 2850, loss[loss=0.3191, simple_loss=0.3678, pruned_loss=0.1352, over 13364.00 frames. ], tot_loss[loss=0.301, simple_loss=0.3601, pruned_loss=0.121, over 2621139.55 frames. ], batch size: 35, lr: 3.23e-02, grad_scale: 32.0 +2024-08-03 04:21:03,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=37180.0, ans=0.2 +2024-08-03 04:21:03,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=37180.0, ans=0.125 +2024-08-03 04:21:08,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=37180.0, ans=0.025 +2024-08-03 04:21:12,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=37216.666666666664, ans=0.5 +2024-08-03 04:21:14,169 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.67 vs. limit=22.5 +2024-08-03 04:21:32,085 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:21:34,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=37290.0, ans=0.002763043478260869 +2024-08-03 04:21:38,511 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.54 vs. limit=6.0 +2024-08-03 04:21:41,447 INFO [train.py:1114] (3/4) Epoch 3, batch 2900, loss[loss=0.2805, simple_loss=0.3416, pruned_loss=0.1097, over 13366.00 frames. ], tot_loss[loss=0.3007, simple_loss=0.3606, pruned_loss=0.1204, over 2631851.35 frames. ], batch size: 36, lr: 3.23e-02, grad_scale: 32.0 +2024-08-03 04:21:46,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=37326.666666666664, ans=0.0027550724637681167 +2024-08-03 04:21:48,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=37326.666666666664, ans=0.0 +2024-08-03 04:22:19,108 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.112e+02 1.360e+02 1.545e+02 1.848e+02 3.511e+02, threshold=3.091e+02, percent-clipped=1.0 +2024-08-03 04:22:21,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=37473.333333333336, ans=0.0027231884057971013 +2024-08-03 04:22:26,070 INFO [train.py:1114] (3/4) Epoch 3, batch 2950, loss[loss=0.2749, simple_loss=0.3407, pruned_loss=0.1045, over 13320.00 frames. ], tot_loss[loss=0.2995, simple_loss=0.3592, pruned_loss=0.1199, over 2629758.86 frames. ], batch size: 34, lr: 3.22e-02, grad_scale: 32.0 +2024-08-03 04:22:35,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=37546.666666666664, ans=0.002707246376811595 +2024-08-03 04:22:50,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=37583.333333333336, ans=0.125 +2024-08-03 04:23:01,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=37656.666666666664, ans=0.2 +2024-08-03 04:23:05,203 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.35 vs. limit=15.0 +2024-08-03 04:23:09,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=37693.333333333336, ans=0.0 +2024-08-03 04:23:11,264 INFO [train.py:1114] (3/4) Epoch 3, batch 3000, loss[loss=0.3068, simple_loss=0.3628, pruned_loss=0.1254, over 13544.00 frames. ], tot_loss[loss=0.2993, simple_loss=0.3591, pruned_loss=0.1198, over 2630045.72 frames. ], batch size: 37, lr: 3.22e-02, grad_scale: 16.0 +2024-08-03 04:23:11,265 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 04:23:49,064 INFO [train.py:1146] (3/4) Epoch 3, validation: loss=0.2357, simple_loss=0.3301, pruned_loss=0.07069, over 944034.00 frames. +2024-08-03 04:23:49,065 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 04:25:13,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=37803.333333333336, ans=0.0 +2024-08-03 04:25:32,441 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.344e+02 1.558e+02 1.814e+02 2.891e+02, threshold=3.117e+02, percent-clipped=0.0 +2024-08-03 04:25:42,601 INFO [train.py:1114] (3/4) Epoch 3, batch 3050, loss[loss=0.2695, simple_loss=0.3323, pruned_loss=0.1034, over 13523.00 frames. ], tot_loss[loss=0.3008, simple_loss=0.3604, pruned_loss=0.1206, over 2626903.95 frames. ], batch size: 35, lr: 3.21e-02, grad_scale: 8.0 +2024-08-03 04:25:42,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=37876.666666666664, ans=0.125 +2024-08-03 04:26:49,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=38023.333333333336, ans=0.2 +2024-08-03 04:26:55,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38023.333333333336, ans=0.1 +2024-08-03 04:27:00,285 INFO [train.py:1114] (3/4) Epoch 3, batch 3100, loss[loss=0.3078, simple_loss=0.3763, pruned_loss=0.1197, over 13286.00 frames. ], tot_loss[loss=0.2991, simple_loss=0.3591, pruned_loss=0.1196, over 2625547.51 frames. ], batch size: 46, lr: 3.21e-02, grad_scale: 8.0 +2024-08-03 04:27:10,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=38060.0, ans=0.0 +2024-08-03 04:27:10,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=38060.0, ans=0.125 +2024-08-03 04:27:13,826 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.52 vs. limit=15.0 +2024-08-03 04:27:35,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=38133.333333333336, ans=0.002579710144927536 +2024-08-03 04:27:42,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=38170.0, ans=0.0 +2024-08-03 04:27:51,064 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.59 vs. limit=22.5 +2024-08-03 04:27:53,164 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.089e+02 1.343e+02 1.458e+02 1.761e+02 2.606e+02, threshold=2.915e+02, percent-clipped=0.0 +2024-08-03 04:27:58,276 INFO [train.py:1114] (3/4) Epoch 3, batch 3150, loss[loss=0.316, simple_loss=0.3685, pruned_loss=0.1318, over 12923.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.3586, pruned_loss=0.1194, over 2627044.19 frames. ], batch size: 48, lr: 3.20e-02, grad_scale: 8.0 +2024-08-03 04:28:06,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=38280.0, ans=0.2 +2024-08-03 04:28:06,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=38280.0, ans=0.125 +2024-08-03 04:28:08,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=38280.0, ans=0.2 +2024-08-03 04:28:23,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=38353.333333333336, ans=0.125 +2024-08-03 04:28:33,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=38390.0, ans=0.125 +2024-08-03 04:28:34,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=38390.0, ans=0.125 +2024-08-03 04:28:42,454 INFO [train.py:1114] (3/4) Epoch 3, batch 3200, loss[loss=0.3013, simple_loss=0.3618, pruned_loss=0.1204, over 13528.00 frames. ], tot_loss[loss=0.298, simple_loss=0.358, pruned_loss=0.119, over 2633858.01 frames. ], batch size: 37, lr: 3.20e-02, grad_scale: 16.0 +2024-08-03 04:28:46,444 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.68 vs. limit=15.0 +2024-08-03 04:29:00,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=38500.0, ans=0.2 +2024-08-03 04:29:08,184 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.39 vs. limit=10.0 +2024-08-03 04:29:18,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=38573.333333333336, ans=0.125 +2024-08-03 04:29:18,725 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.24 vs. limit=15.0 +2024-08-03 04:29:20,717 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.040e+02 1.433e+02 1.626e+02 1.828e+02 2.707e+02, threshold=3.253e+02, percent-clipped=0.0 +2024-08-03 04:29:26,745 INFO [train.py:1114] (3/4) Epoch 3, batch 3250, loss[loss=0.3041, simple_loss=0.363, pruned_loss=0.1227, over 13394.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.3585, pruned_loss=0.1191, over 2638423.27 frames. ], batch size: 38, lr: 3.19e-02, grad_scale: 16.0 +2024-08-03 04:29:27,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=38610.0, ans=0.002476086956521739 +2024-08-03 04:29:31,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.50 vs. limit=15.0 +2024-08-03 04:29:42,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=38646.666666666664, ans=0.0 +2024-08-03 04:29:46,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=38683.333333333336, ans=0.125 +2024-08-03 04:30:10,327 INFO [train.py:1114] (3/4) Epoch 3, batch 3300, loss[loss=0.3012, simple_loss=0.3609, pruned_loss=0.1207, over 12931.00 frames. ], tot_loss[loss=0.296, simple_loss=0.3564, pruned_loss=0.1179, over 2639678.38 frames. ], batch size: 52, lr: 3.19e-02, grad_scale: 16.0 +2024-08-03 04:30:10,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38793.333333333336, ans=0.1 +2024-08-03 04:30:23,698 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:30:31,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38866.666666666664, ans=0.1 +2024-08-03 04:30:35,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=38903.333333333336, ans=10.0 +2024-08-03 04:30:46,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=38940.0, ans=0.125 +2024-08-03 04:30:51,790 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.360e+02 1.548e+02 1.881e+02 7.173e+02, threshold=3.096e+02, percent-clipped=4.0 +2024-08-03 04:30:53,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=38940.0, ans=0.0 +2024-08-03 04:30:58,399 INFO [train.py:1114] (3/4) Epoch 3, batch 3350, loss[loss=0.3487, simple_loss=0.3979, pruned_loss=0.1498, over 13011.00 frames. ], tot_loss[loss=0.2979, simple_loss=0.358, pruned_loss=0.1189, over 2628946.57 frames. ], batch size: 48, lr: 3.18e-02, grad_scale: 16.0 +2024-08-03 04:30:58,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=38976.666666666664, ans=0.0 +2024-08-03 04:31:10,133 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.66 vs. limit=22.5 +2024-08-03 04:31:15,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=39050.0, ans=0.125 +2024-08-03 04:32:21,172 INFO [train.py:1114] (3/4) Epoch 3, batch 3400, loss[loss=0.2886, simple_loss=0.3303, pruned_loss=0.1235, over 13548.00 frames. ], tot_loss[loss=0.2978, simple_loss=0.3576, pruned_loss=0.119, over 2625907.26 frames. ], batch size: 31, lr: 3.18e-02, grad_scale: 16.0 +2024-08-03 04:32:30,241 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.65 vs. limit=15.0 +2024-08-03 04:32:47,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=39270.0, ans=0.2 +2024-08-03 04:32:51,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=39270.0, ans=10.0 +2024-08-03 04:32:58,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=39270.0, ans=0.125 +2024-08-03 04:33:13,011 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.53 vs. limit=6.0 +2024-08-03 04:33:16,813 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.382e+02 1.601e+02 1.847e+02 2.492e+02, threshold=3.202e+02, percent-clipped=0.0 +2024-08-03 04:33:21,218 INFO [train.py:1114] (3/4) Epoch 3, batch 3450, loss[loss=0.3523, simple_loss=0.3949, pruned_loss=0.1549, over 12936.00 frames. ], tot_loss[loss=0.2962, simple_loss=0.3564, pruned_loss=0.118, over 2629255.35 frames. ], batch size: 52, lr: 3.17e-02, grad_scale: 8.0 +2024-08-03 04:33:21,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=39343.333333333336, ans=0.0 +2024-08-03 04:33:21,842 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.37 vs. limit=10.0 +2024-08-03 04:33:29,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=39380.0, ans=0.1 +2024-08-03 04:33:31,196 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.14 vs. limit=15.0 +2024-08-03 04:33:32,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=39380.0, ans=0.025 +2024-08-03 04:33:34,333 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.71 vs. limit=15.0 +2024-08-03 04:33:40,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=39416.666666666664, ans=0.025 +2024-08-03 04:33:46,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=39453.333333333336, ans=0.002292753623188405 +2024-08-03 04:33:53,057 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.51 vs. limit=15.0 +2024-08-03 04:34:02,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=39490.0, ans=0.125 +2024-08-03 04:34:03,917 INFO [train.py:1114] (3/4) Epoch 3, batch 3500, loss[loss=0.2764, simple_loss=0.3332, pruned_loss=0.1098, over 13535.00 frames. ], tot_loss[loss=0.2964, simple_loss=0.356, pruned_loss=0.1184, over 2631518.51 frames. ], batch size: 34, lr: 3.17e-02, grad_scale: 8.0 +2024-08-03 04:34:14,394 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.65 vs. limit=6.0 +2024-08-03 04:34:15,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39563.333333333336, ans=0.1 +2024-08-03 04:34:18,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=39563.333333333336, ans=0.2 +2024-08-03 04:34:29,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=39636.666666666664, ans=0.125 +2024-08-03 04:34:37,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=39636.666666666664, ans=0.0 +2024-08-03 04:34:42,757 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.446e+02 1.687e+02 2.018e+02 4.896e+02, threshold=3.374e+02, percent-clipped=2.0 +2024-08-03 04:34:45,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39673.333333333336, ans=0.1 +2024-08-03 04:34:45,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=39673.333333333336, ans=0.2 +2024-08-03 04:34:46,981 INFO [train.py:1114] (3/4) Epoch 3, batch 3550, loss[loss=0.3193, simple_loss=0.3763, pruned_loss=0.1312, over 12349.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3586, pruned_loss=0.1199, over 2629139.91 frames. ], batch size: 58, lr: 3.16e-02, grad_scale: 8.0 +2024-08-03 04:34:47,167 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:34:52,573 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.97 vs. limit=15.0 +2024-08-03 04:35:02,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=39746.666666666664, ans=0.09899494936611666 +2024-08-03 04:35:27,983 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=10.82 vs. limit=12.0 +2024-08-03 04:35:30,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=39820.0, ans=0.125 +2024-08-03 04:35:40,845 INFO [train.py:1114] (3/4) Epoch 3, batch 3600, loss[loss=0.3533, simple_loss=0.3923, pruned_loss=0.1571, over 9134.00 frames. ], tot_loss[loss=0.3095, simple_loss=0.3654, pruned_loss=0.1268, over 2486661.42 frames. ], batch size: 98, lr: 3.16e-02, grad_scale: 16.0 +2024-08-03 04:35:45,015 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.39 vs. limit=6.0 +2024-08-03 04:35:47,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=39893.333333333336, ans=0.125 +2024-08-03 04:35:49,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39930.0, ans=0.1 +2024-08-03 04:35:55,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=39930.0, ans=0.07 +2024-08-03 04:36:08,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=40003.333333333336, ans=0.0 +2024-08-03 04:37:20,576 INFO [train.py:1114] (3/4) Epoch 4, batch 0, loss[loss=0.2703, simple_loss=0.3323, pruned_loss=0.1042, over 13326.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.3323, pruned_loss=0.1042, over 13326.00 frames. ], batch size: 33, lr: 2.95e-02, grad_scale: 32.0 +2024-08-03 04:37:20,577 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 04:37:30,576 INFO [train.py:1146] (3/4) Epoch 4, validation: loss=0.2412, simple_loss=0.337, pruned_loss=0.07274, over 944034.00 frames. +2024-08-03 04:37:30,576 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 04:43:26,868 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.350e+02 1.466e+02 1.683e+02 2.712e+02, threshold=2.931e+02, percent-clipped=0.0 +2024-08-03 04:43:33,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40080.333333333336, ans=0.1 +2024-08-03 04:45:13,215 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.19 vs. limit=10.0 +2024-08-03 04:45:25,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=40153.666666666664, ans=0.125 +2024-08-03 04:45:26,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=40153.666666666664, ans=0.125 +2024-08-03 04:45:30,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=40190.333333333336, ans=0.025 +2024-08-03 04:45:39,708 INFO [train.py:1114] (3/4) Epoch 4, batch 50, loss[loss=0.2311, simple_loss=0.298, pruned_loss=0.08208, over 13428.00 frames. ], tot_loss[loss=0.3032, simple_loss=0.3622, pruned_loss=0.1221, over 578774.31 frames. ], batch size: 32, lr: 2.95e-02, grad_scale: 32.0 +2024-08-03 04:45:48,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=40263.666666666664, ans=0.125 +2024-08-03 04:45:49,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=40263.666666666664, ans=0.1 +2024-08-03 04:45:54,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=40263.666666666664, ans=0.025 +2024-08-03 04:46:00,817 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.10 vs. limit=22.5 +2024-08-03 04:46:01,701 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.73 vs. limit=22.5 +2024-08-03 04:46:05,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=40300.333333333336, ans=0.0 +2024-08-03 04:46:07,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=40300.333333333336, ans=0.0 +2024-08-03 04:46:12,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=40337.0, ans=0.0021006521739130434 +2024-08-03 04:46:17,441 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:46:28,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=40410.333333333336, ans=0.04949747468305833 +2024-08-03 04:46:29,584 INFO [train.py:1114] (3/4) Epoch 4, batch 100, loss[loss=0.2557, simple_loss=0.3232, pruned_loss=0.09409, over 13531.00 frames. ], tot_loss[loss=0.2978, simple_loss=0.359, pruned_loss=0.1183, over 1025815.50 frames. ], batch size: 35, lr: 2.94e-02, grad_scale: 32.0 +2024-08-03 04:46:33,366 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.339e+02 1.516e+02 1.849e+02 3.720e+02, threshold=3.031e+02, percent-clipped=4.0 +2024-08-03 04:46:59,356 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.54 vs. limit=22.5 +2024-08-03 04:47:00,236 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.17 vs. limit=10.0 +2024-08-03 04:47:05,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=40520.333333333336, ans=0.05 +2024-08-03 04:47:05,891 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.12 vs. limit=22.5 +2024-08-03 04:47:06,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=40520.333333333336, ans=0.125 +2024-08-03 04:47:06,670 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.46 vs. limit=15.0 +2024-08-03 04:47:16,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=40593.666666666664, ans=0.125 +2024-08-03 04:47:17,274 INFO [train.py:1114] (3/4) Epoch 4, batch 150, loss[loss=0.2461, simple_loss=0.3151, pruned_loss=0.08861, over 13392.00 frames. ], tot_loss[loss=0.2911, simple_loss=0.3536, pruned_loss=0.1143, over 1387081.41 frames. ], batch size: 32, lr: 2.94e-02, grad_scale: 32.0 +2024-08-03 04:47:34,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn2.whiten.whitening_limit, batch_count=40630.333333333336, ans=22.5 +2024-08-03 04:47:40,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=40667.0, ans=0.2 +2024-08-03 04:47:47,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=40703.666666666664, ans=0.125 +2024-08-03 04:47:48,786 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.54 vs. limit=15.0 +2024-08-03 04:47:50,319 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:47:54,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=40703.666666666664, ans=0.125 +2024-08-03 04:47:54,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=40740.333333333336, ans=0.125 +2024-08-03 04:48:04,765 INFO [train.py:1114] (3/4) Epoch 4, batch 200, loss[loss=0.2786, simple_loss=0.3465, pruned_loss=0.1053, over 12509.00 frames. ], tot_loss[loss=0.2889, simple_loss=0.3511, pruned_loss=0.1133, over 1665710.66 frames. ], batch size: 58, lr: 2.93e-02, grad_scale: 16.0 +2024-08-03 04:48:05,245 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.21 vs. limit=22.5 +2024-08-03 04:48:08,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=40777.0, ans=0.125 +2024-08-03 04:48:09,213 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.032e+02 1.266e+02 1.437e+02 1.719e+02 2.508e+02, threshold=2.875e+02, percent-clipped=0.0 +2024-08-03 04:48:24,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=40850.333333333336, ans=0.07 +2024-08-03 04:48:33,349 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.42 vs. limit=6.0 +2024-08-03 04:48:50,622 INFO [train.py:1114] (3/4) Epoch 4, batch 250, loss[loss=0.3372, simple_loss=0.3919, pruned_loss=0.1412, over 13347.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.3508, pruned_loss=0.1127, over 1884735.98 frames. ], batch size: 46, lr: 2.93e-02, grad_scale: 16.0 +2024-08-03 04:49:04,387 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.43 vs. limit=15.0 +2024-08-03 04:49:13,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=41033.666666666664, ans=0.1 +2024-08-03 04:49:27,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=41070.333333333336, ans=0.0 +2024-08-03 04:49:27,498 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.11 vs. limit=10.0 +2024-08-03 04:49:32,774 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.11 vs. limit=22.5 +2024-08-03 04:49:41,443 INFO [train.py:1114] (3/4) Epoch 4, batch 300, loss[loss=0.2963, simple_loss=0.3574, pruned_loss=0.1176, over 13432.00 frames. ], tot_loss[loss=0.2882, simple_loss=0.351, pruned_loss=0.1127, over 2052068.54 frames. ], batch size: 42, lr: 2.92e-02, grad_scale: 16.0 +2024-08-03 04:49:42,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.97 vs. limit=15.0 +2024-08-03 04:49:46,016 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.010e+02 1.331e+02 1.504e+02 1.895e+02 3.054e+02, threshold=3.007e+02, percent-clipped=2.0 +2024-08-03 04:49:50,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=41180.333333333336, ans=0.025 +2024-08-03 04:49:57,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=41180.333333333336, ans=0.125 +2024-08-03 04:49:59,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=41180.333333333336, ans=0.125 +2024-08-03 04:50:19,241 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.76 vs. limit=6.0 +2024-08-03 04:50:20,091 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.23 vs. limit=22.5 +2024-08-03 04:50:20,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=41290.333333333336, ans=0.125 +2024-08-03 04:50:27,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=41290.333333333336, ans=0.0 +2024-08-03 04:50:29,618 INFO [train.py:1114] (3/4) Epoch 4, batch 350, loss[loss=0.2526, simple_loss=0.3158, pruned_loss=0.09468, over 13582.00 frames. ], tot_loss[loss=0.2886, simple_loss=0.3514, pruned_loss=0.1129, over 2182659.01 frames. ], batch size: 33, lr: 2.92e-02, grad_scale: 16.0 +2024-08-03 04:50:29,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=41327.0, ans=0.125 +2024-08-03 04:51:17,213 INFO [train.py:1114] (3/4) Epoch 4, batch 400, loss[loss=0.2857, simple_loss=0.3504, pruned_loss=0.1105, over 13368.00 frames. ], tot_loss[loss=0.2879, simple_loss=0.3504, pruned_loss=0.1127, over 2287040.78 frames. ], batch size: 37, lr: 2.91e-02, grad_scale: 32.0 +2024-08-03 04:51:21,801 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.051e+02 1.350e+02 1.537e+02 1.828e+02 3.072e+02, threshold=3.074e+02, percent-clipped=1.0 +2024-08-03 04:51:25,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=41547.0, ans=10.0 +2024-08-03 04:51:54,569 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.96 vs. limit=15.0 +2024-08-03 04:52:01,338 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.17 vs. limit=22.5 +2024-08-03 04:52:05,340 INFO [train.py:1114] (3/4) Epoch 4, batch 450, loss[loss=0.2685, simple_loss=0.346, pruned_loss=0.09551, over 13564.00 frames. ], tot_loss[loss=0.2889, simple_loss=0.351, pruned_loss=0.1134, over 2359609.02 frames. ], batch size: 38, lr: 2.91e-02, grad_scale: 32.0 +2024-08-03 04:52:14,546 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.94 vs. limit=15.0 +2024-08-03 04:52:16,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=41693.666666666664, ans=0.5 +2024-08-03 04:52:20,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=41730.333333333336, ans=0.125 +2024-08-03 04:52:46,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=41803.666666666664, ans=0.0 +2024-08-03 04:52:46,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41803.666666666664, ans=0.1 +2024-08-03 04:52:49,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=41840.333333333336, ans=0.0 +2024-08-03 04:52:58,606 INFO [train.py:1114] (3/4) Epoch 4, batch 500, loss[loss=0.2907, simple_loss=0.3558, pruned_loss=0.1129, over 13419.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.3489, pruned_loss=0.1117, over 2425433.04 frames. ], batch size: 43, lr: 2.90e-02, grad_scale: 16.0 +2024-08-03 04:53:00,950 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.36 vs. limit=12.0 +2024-08-03 04:53:04,080 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.429e+02 1.668e+02 2.120e+02 3.628e+02, threshold=3.335e+02, percent-clipped=2.0 +2024-08-03 04:53:12,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=41913.666666666664, ans=0.125 +2024-08-03 04:53:32,608 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 04:53:37,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=42023.666666666664, ans=0.125 +2024-08-03 04:53:39,219 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.52 vs. limit=22.5 +2024-08-03 04:53:47,139 INFO [train.py:1114] (3/4) Epoch 4, batch 550, loss[loss=0.2973, simple_loss=0.364, pruned_loss=0.1153, over 13038.00 frames. ], tot_loss[loss=0.2855, simple_loss=0.3484, pruned_loss=0.1113, over 2468090.69 frames. ], batch size: 48, lr: 2.90e-02, grad_scale: 16.0 +2024-08-03 04:53:59,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=42097.0, ans=0.07 +2024-08-03 04:54:01,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=42097.0, ans=0.07 +2024-08-03 04:54:15,239 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.39 vs. limit=22.5 +2024-08-03 04:54:20,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=42170.333333333336, ans=0.2 +2024-08-03 04:54:24,996 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.93 vs. limit=6.0 +2024-08-03 04:54:27,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=42207.0, ans=0.125 +2024-08-03 04:54:32,523 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.53 vs. limit=22.5 +2024-08-03 04:54:34,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42243.666666666664, ans=0.1 +2024-08-03 04:54:35,659 INFO [train.py:1114] (3/4) Epoch 4, batch 600, loss[loss=0.2976, simple_loss=0.3626, pruned_loss=0.1162, over 13274.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3483, pruned_loss=0.1111, over 2507897.70 frames. ], batch size: 46, lr: 2.90e-02, grad_scale: 8.0 +2024-08-03 04:54:39,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=42243.666666666664, ans=0.125 +2024-08-03 04:54:41,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=42243.666666666664, ans=10.0 +2024-08-03 04:54:42,095 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.301e+02 1.482e+02 1.829e+02 3.304e+02, threshold=2.963e+02, percent-clipped=0.0 +2024-08-03 04:54:46,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42280.333333333336, ans=0.1 +2024-08-03 04:54:55,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=42317.0, ans=0.125 +2024-08-03 04:55:13,089 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.93 vs. limit=15.0 +2024-08-03 04:55:19,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=42390.333333333336, ans=0.125 +2024-08-03 04:55:26,870 INFO [train.py:1114] (3/4) Epoch 4, batch 650, loss[loss=0.2313, simple_loss=0.3033, pruned_loss=0.07971, over 13552.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3464, pruned_loss=0.1098, over 2543220.03 frames. ], batch size: 37, lr: 2.89e-02, grad_scale: 8.0 +2024-08-03 04:55:27,515 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.92 vs. limit=15.0 +2024-08-03 04:55:49,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=42463.666666666664, ans=0.125 +2024-08-03 04:56:00,554 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.96 vs. limit=12.0 +2024-08-03 04:56:11,457 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.10 vs. limit=15.0 +2024-08-03 04:56:12,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=42573.666666666664, ans=0.125 +2024-08-03 04:56:17,432 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.45 vs. limit=15.0 +2024-08-03 04:56:19,614 INFO [train.py:1114] (3/4) Epoch 4, batch 700, loss[loss=0.2525, simple_loss=0.3242, pruned_loss=0.09042, over 13533.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3468, pruned_loss=0.1095, over 2564491.01 frames. ], batch size: 35, lr: 2.89e-02, grad_scale: 8.0 +2024-08-03 04:56:24,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42610.333333333336, ans=0.1 +2024-08-03 04:56:26,128 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.281e+02 1.426e+02 1.623e+02 2.957e+02, threshold=2.853e+02, percent-clipped=0.0 +2024-08-03 04:56:57,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=42720.333333333336, ans=0.2 +2024-08-03 04:56:59,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=42720.333333333336, ans=0.0015825362318840578 +2024-08-03 04:57:03,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=42757.0, ans=0.02 +2024-08-03 04:57:09,842 INFO [train.py:1114] (3/4) Epoch 4, batch 750, loss[loss=0.2834, simple_loss=0.3446, pruned_loss=0.1111, over 13351.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3459, pruned_loss=0.1088, over 2581405.32 frames. ], batch size: 37, lr: 2.88e-02, grad_scale: 8.0 +2024-08-03 04:57:15,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=42793.666666666664, ans=0.0 +2024-08-03 04:57:16,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=42793.666666666664, ans=0.125 +2024-08-03 04:57:28,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=42830.333333333336, ans=0.125 +2024-08-03 04:57:39,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=42903.666666666664, ans=0.125 +2024-08-03 04:57:56,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=42940.333333333336, ans=0.125 +2024-08-03 04:57:59,779 INFO [train.py:1114] (3/4) Epoch 4, batch 800, loss[loss=0.2296, simple_loss=0.3046, pruned_loss=0.07732, over 13333.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.3459, pruned_loss=0.1086, over 2596231.63 frames. ], batch size: 33, lr: 2.88e-02, grad_scale: 16.0 +2024-08-03 04:58:06,221 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.323e+02 1.556e+02 1.905e+02 4.049e+02, threshold=3.112e+02, percent-clipped=3.0 +2024-08-03 04:58:12,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=43013.666666666664, ans=0.05 +2024-08-03 04:58:25,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=43050.333333333336, ans=0.125 +2024-08-03 04:58:31,697 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.78 vs. limit=15.0 +2024-08-03 04:58:42,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=43123.666666666664, ans=0.2 +2024-08-03 04:58:45,957 INFO [train.py:1114] (3/4) Epoch 4, batch 850, loss[loss=0.3375, simple_loss=0.3952, pruned_loss=0.1399, over 13337.00 frames. ], tot_loss[loss=0.2823, simple_loss=0.3464, pruned_loss=0.1091, over 2608850.11 frames. ], batch size: 40, lr: 2.87e-02, grad_scale: 16.0 +2024-08-03 04:58:53,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=43160.333333333336, ans=0.09899494936611666 +2024-08-03 04:58:56,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=43197.0, ans=0.125 +2024-08-03 04:59:09,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=43233.666666666664, ans=0.09899494936611666 +2024-08-03 04:59:10,567 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.21 vs. limit=15.0 +2024-08-03 04:59:11,664 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.61 vs. limit=6.0 +2024-08-03 04:59:34,792 INFO [train.py:1114] (3/4) Epoch 4, batch 900, loss[loss=0.269, simple_loss=0.326, pruned_loss=0.106, over 13349.00 frames. ], tot_loss[loss=0.2828, simple_loss=0.3465, pruned_loss=0.1095, over 2612183.86 frames. ], batch size: 33, lr: 2.87e-02, grad_scale: 16.0 +2024-08-03 04:59:40,972 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.102e+02 1.400e+02 1.608e+02 1.991e+02 3.200e+02, threshold=3.215e+02, percent-clipped=1.0 +2024-08-03 04:59:41,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=43343.666666666664, ans=0.125 +2024-08-03 04:59:42,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43343.666666666664, ans=0.1 +2024-08-03 04:59:47,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=43380.333333333336, ans=0.125 +2024-08-03 05:00:06,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=43453.666666666664, ans=0.125 +2024-08-03 05:00:19,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=43490.333333333336, ans=0.1 +2024-08-03 05:00:19,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=43490.333333333336, ans=0.125 +2024-08-03 05:00:22,623 INFO [train.py:1114] (3/4) Epoch 4, batch 950, loss[loss=0.2585, simple_loss=0.3195, pruned_loss=0.09871, over 13544.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.3464, pruned_loss=0.1093, over 2612363.18 frames. ], batch size: 34, lr: 2.87e-02, grad_scale: 16.0 +2024-08-03 05:00:27,839 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.88 vs. limit=15.0 +2024-08-03 05:00:30,509 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.25 vs. limit=15.0 +2024-08-03 05:00:43,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=43600.333333333336, ans=0.0 +2024-08-03 05:00:44,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43600.333333333336, ans=0.1 +2024-08-03 05:01:02,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=43673.666666666664, ans=0.125 +2024-08-03 05:01:11,443 INFO [train.py:1114] (3/4) Epoch 4, batch 1000, loss[loss=0.2901, simple_loss=0.3602, pruned_loss=0.11, over 13370.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.3478, pruned_loss=0.1102, over 2611438.92 frames. ], batch size: 35, lr: 2.86e-02, grad_scale: 16.0 +2024-08-03 05:01:16,119 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:01:17,819 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.299e+02 1.424e+02 1.610e+02 2.784e+02, threshold=2.848e+02, percent-clipped=0.0 +2024-08-03 05:01:38,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=43783.666666666664, ans=0.0 +2024-08-03 05:01:49,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=43857.0, ans=0.07 +2024-08-03 05:01:59,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43893.666666666664, ans=0.1 +2024-08-03 05:01:59,637 INFO [train.py:1114] (3/4) Epoch 4, batch 1050, loss[loss=0.2889, simple_loss=0.3492, pruned_loss=0.1144, over 13575.00 frames. ], tot_loss[loss=0.282, simple_loss=0.3458, pruned_loss=0.1092, over 2615824.60 frames. ], batch size: 39, lr: 2.86e-02, grad_scale: 16.0 +2024-08-03 05:02:02,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=43893.666666666664, ans=0.125 +2024-08-03 05:02:02,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=43893.666666666664, ans=0.125 +2024-08-03 05:02:12,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=43930.333333333336, ans=0.0 +2024-08-03 05:02:13,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=43930.333333333336, ans=0.2 +2024-08-03 05:02:17,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=43967.0, ans=0.0 +2024-08-03 05:02:19,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=43967.0, ans=0.0 +2024-08-03 05:02:39,572 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.58 vs. limit=15.0 +2024-08-03 05:02:42,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=44003.666666666664, ans=0.0 +2024-08-03 05:02:45,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=44040.333333333336, ans=0.125 +2024-08-03 05:02:53,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=44040.333333333336, ans=0.125 +2024-08-03 05:02:54,674 INFO [train.py:1114] (3/4) Epoch 4, batch 1100, loss[loss=0.26, simple_loss=0.3313, pruned_loss=0.09439, over 13567.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3453, pruned_loss=0.1089, over 2619329.97 frames. ], batch size: 36, lr: 2.85e-02, grad_scale: 16.0 +2024-08-03 05:02:59,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=44077.0, ans=0.125 +2024-08-03 05:03:00,988 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.033e+02 1.374e+02 1.585e+02 1.899e+02 4.895e+02, threshold=3.171e+02, percent-clipped=1.0 +2024-08-03 05:03:01,590 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.82 vs. limit=12.0 +2024-08-03 05:03:09,661 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.92 vs. limit=22.5 +2024-08-03 05:03:33,783 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.79 vs. limit=6.0 +2024-08-03 05:03:34,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=44223.666666666664, ans=0.2 +2024-08-03 05:03:40,659 INFO [train.py:1114] (3/4) Epoch 4, batch 1150, loss[loss=0.31, simple_loss=0.357, pruned_loss=0.1315, over 13564.00 frames. ], tot_loss[loss=0.282, simple_loss=0.3455, pruned_loss=0.1093, over 2618746.36 frames. ], batch size: 36, lr: 2.85e-02, grad_scale: 16.0 +2024-08-03 05:03:51,625 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.26 vs. limit=6.0 +2024-08-03 05:04:05,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=44333.666666666664, ans=0.1 +2024-08-03 05:04:09,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=44333.666666666664, ans=0.125 +2024-08-03 05:04:10,317 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.51 vs. limit=22.5 +2024-08-03 05:04:10,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=44333.666666666664, ans=0.2 +2024-08-03 05:04:15,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.44 vs. limit=10.0 +2024-08-03 05:04:32,856 INFO [train.py:1114] (3/4) Epoch 4, batch 1200, loss[loss=0.2722, simple_loss=0.3478, pruned_loss=0.09836, over 13571.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3464, pruned_loss=0.1097, over 2615796.24 frames. ], batch size: 39, lr: 2.84e-02, grad_scale: 32.0 +2024-08-03 05:04:34,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=44443.666666666664, ans=0.1 +2024-08-03 05:04:37,213 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.29 vs. limit=22.5 +2024-08-03 05:04:39,344 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.292e+02 1.456e+02 1.641e+02 3.622e+02, threshold=2.911e+02, percent-clipped=1.0 +2024-08-03 05:05:01,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=44553.666666666664, ans=0.1 +2024-08-03 05:06:21,861 INFO [train.py:1114] (3/4) Epoch 4, batch 1250, loss[loss=0.2824, simple_loss=0.3577, pruned_loss=0.1036, over 13427.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.3466, pruned_loss=0.1093, over 2628189.33 frames. ], batch size: 42, lr: 2.84e-02, grad_scale: 32.0 +2024-08-03 05:06:57,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=44737.0, ans=0.125 +2024-08-03 05:06:57,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=44737.0, ans=0.125 +2024-08-03 05:06:58,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=44773.666666666664, ans=0.025 +2024-08-03 05:06:59,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=44773.666666666664, ans=0.1 +2024-08-03 05:07:02,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=44773.666666666664, ans=0.0 +2024-08-03 05:07:08,954 INFO [train.py:1114] (3/4) Epoch 4, batch 1300, loss[loss=0.3098, simple_loss=0.369, pruned_loss=0.1253, over 12946.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3454, pruned_loss=0.1082, over 2631389.02 frames. ], batch size: 52, lr: 2.84e-02, grad_scale: 16.0 +2024-08-03 05:07:18,016 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.082e+02 1.334e+02 1.637e+02 2.034e+02 3.739e+02, threshold=3.274e+02, percent-clipped=6.0 +2024-08-03 05:07:23,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=44847.0, ans=0.1 +2024-08-03 05:07:39,890 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.41 vs. limit=12.0 +2024-08-03 05:07:49,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=44920.333333333336, ans=0.125 +2024-08-03 05:08:00,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=44993.666666666664, ans=0.05 +2024-08-03 05:08:01,413 INFO [train.py:1114] (3/4) Epoch 4, batch 1350, loss[loss=0.2692, simple_loss=0.3368, pruned_loss=0.1008, over 13564.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.3447, pruned_loss=0.1076, over 2639063.20 frames. ], batch size: 37, lr: 2.83e-02, grad_scale: 8.0 +2024-08-03 05:08:07,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=44993.666666666664, ans=0.0 +2024-08-03 05:08:12,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=45030.333333333336, ans=0.125 +2024-08-03 05:08:16,030 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=2.97 vs. limit=15.0 +2024-08-03 05:08:19,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=45067.0, ans=0.125 +2024-08-03 05:08:22,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45067.0, ans=0.1 +2024-08-03 05:08:31,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=45103.666666666664, ans=0.001064420289855074 +2024-08-03 05:08:39,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=45140.333333333336, ans=0.125 +2024-08-03 05:08:50,554 INFO [train.py:1114] (3/4) Epoch 4, batch 1400, loss[loss=0.2483, simple_loss=0.3114, pruned_loss=0.09258, over 13258.00 frames. ], tot_loss[loss=0.2801, simple_loss=0.3448, pruned_loss=0.1077, over 2642884.90 frames. ], batch size: 31, lr: 2.83e-02, grad_scale: 8.0 +2024-08-03 05:08:58,651 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.107e+02 1.343e+02 1.530e+02 1.906e+02 3.012e+02, threshold=3.060e+02, percent-clipped=0.0 +2024-08-03 05:09:11,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=45250.333333333336, ans=0.0 +2024-08-03 05:09:35,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=45323.666666666664, ans=0.0 +2024-08-03 05:09:38,858 INFO [train.py:1114] (3/4) Epoch 4, batch 1450, loss[loss=0.2736, simple_loss=0.3514, pruned_loss=0.09787, over 13410.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3455, pruned_loss=0.1079, over 2642368.82 frames. ], batch size: 43, lr: 2.82e-02, grad_scale: 8.0 +2024-08-03 05:09:39,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.36 vs. limit=15.0 +2024-08-03 05:09:41,070 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.07 vs. limit=15.0 +2024-08-03 05:10:05,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=45433.666666666664, ans=0.2 +2024-08-03 05:10:07,219 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.01 vs. limit=15.0 +2024-08-03 05:10:11,014 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.56 vs. limit=15.0 +2024-08-03 05:10:28,999 INFO [train.py:1114] (3/4) Epoch 4, batch 1500, loss[loss=0.2968, simple_loss=0.3573, pruned_loss=0.1182, over 13412.00 frames. ], tot_loss[loss=0.2811, simple_loss=0.3461, pruned_loss=0.1081, over 2642393.24 frames. ], batch size: 39, lr: 2.82e-02, grad_scale: 8.0 +2024-08-03 05:10:34,450 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.04 vs. limit=15.0 +2024-08-03 05:10:37,550 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.331e+02 1.463e+02 1.698e+02 3.158e+02, threshold=2.927e+02, percent-clipped=1.0 +2024-08-03 05:11:11,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=45653.666666666664, ans=0.125 +2024-08-03 05:11:13,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=45690.333333333336, ans=0.125 +2024-08-03 05:11:13,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=45690.333333333336, ans=0.0 +2024-08-03 05:11:23,490 INFO [train.py:1114] (3/4) Epoch 4, batch 1550, loss[loss=0.2948, simple_loss=0.3645, pruned_loss=0.1126, over 13389.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3463, pruned_loss=0.1086, over 2631701.21 frames. ], batch size: 41, lr: 2.81e-02, grad_scale: 8.0 +2024-08-03 05:11:33,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=45763.666666666664, ans=0.0 +2024-08-03 05:11:37,375 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.58 vs. limit=15.0 +2024-08-03 05:11:43,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=45800.333333333336, ans=0.0 +2024-08-03 05:11:43,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=45800.333333333336, ans=0.125 +2024-08-03 05:11:52,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=45800.333333333336, ans=0.0009129710144927528 +2024-08-03 05:11:58,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=45837.0, ans=0.125 +2024-08-03 05:12:15,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=45873.666666666664, ans=0.0008970289855072464 +2024-08-03 05:12:21,099 INFO [train.py:1114] (3/4) Epoch 4, batch 1600, loss[loss=0.2743, simple_loss=0.3493, pruned_loss=0.09962, over 13561.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3456, pruned_loss=0.1087, over 2624692.17 frames. ], batch size: 39, lr: 2.81e-02, grad_scale: 16.0 +2024-08-03 05:12:21,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=45910.333333333336, ans=0.125 +2024-08-03 05:12:28,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=45910.333333333336, ans=0.0 +2024-08-03 05:12:30,798 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.396e+02 1.598e+02 1.877e+02 3.901e+02, threshold=3.195e+02, percent-clipped=2.0 +2024-08-03 05:12:32,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=45947.0, ans=0.5 +2024-08-03 05:12:35,029 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.47 vs. limit=6.0 +2024-08-03 05:12:56,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=46020.333333333336, ans=0.125 +2024-08-03 05:13:05,800 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.01 vs. limit=15.0 +2024-08-03 05:13:10,772 INFO [train.py:1114] (3/4) Epoch 4, batch 1650, loss[loss=0.3201, simple_loss=0.3807, pruned_loss=0.1297, over 13332.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.3463, pruned_loss=0.1096, over 2621877.52 frames. ], batch size: 40, lr: 2.81e-02, grad_scale: 16.0 +2024-08-03 05:13:15,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=46093.666666666664, ans=0.0 +2024-08-03 05:13:26,205 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.63 vs. limit=6.0 +2024-08-03 05:14:01,233 INFO [train.py:1114] (3/4) Epoch 4, batch 1700, loss[loss=0.2567, simple_loss=0.3131, pruned_loss=0.1001, over 13245.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3452, pruned_loss=0.1084, over 2630659.05 frames. ], batch size: 31, lr: 2.80e-02, grad_scale: 16.0 +2024-08-03 05:14:03,654 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.96 vs. limit=15.0 +2024-08-03 05:14:09,448 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.050e+02 1.356e+02 1.607e+02 2.015e+02 3.197e+02, threshold=3.213e+02, percent-clipped=1.0 +2024-08-03 05:14:22,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=46350.333333333336, ans=0.125 +2024-08-03 05:14:22,929 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.30 vs. limit=22.5 +2024-08-03 05:14:45,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=46423.666666666664, ans=0.2 +2024-08-03 05:14:47,549 INFO [train.py:1114] (3/4) Epoch 4, batch 1750, loss[loss=0.2531, simple_loss=0.3178, pruned_loss=0.09421, over 13553.00 frames. ], tot_loss[loss=0.2811, simple_loss=0.3451, pruned_loss=0.1085, over 2634132.76 frames. ], batch size: 31, lr: 2.80e-02, grad_scale: 16.0 +2024-08-03 05:15:05,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=46497.0, ans=0.0007615217391304351 +2024-08-03 05:15:20,295 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.88 vs. limit=15.0 +2024-08-03 05:18:12,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=46607.0, ans=0.125 +2024-08-03 05:18:13,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=46607.0, ans=0.0007376086956521737 +2024-08-03 05:18:21,912 INFO [train.py:1114] (3/4) Epoch 4, batch 1800, loss[loss=0.3092, simple_loss=0.3721, pruned_loss=0.1231, over 13553.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3455, pruned_loss=0.1084, over 2635350.30 frames. ], batch size: 38, lr: 2.79e-02, grad_scale: 16.0 +2024-08-03 05:18:30,257 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.378e+02 1.581e+02 2.012e+02 3.618e+02, threshold=3.161e+02, percent-clipped=2.0 +2024-08-03 05:18:42,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=46717.0, ans=0.125 +2024-08-03 05:18:58,075 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:19:09,817 INFO [train.py:1114] (3/4) Epoch 4, batch 1850, loss[loss=0.2793, simple_loss=0.3454, pruned_loss=0.1065, over 13406.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3453, pruned_loss=0.1082, over 2637663.47 frames. ], batch size: 39, lr: 2.79e-02, grad_scale: 16.0 +2024-08-03 05:19:09,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=46827.0, ans=0.025 +2024-08-03 05:19:35,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=46900.333333333336, ans=0.125 +2024-08-03 05:19:42,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=46937.0, ans=0.5 +2024-08-03 05:19:43,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=46937.0, ans=0.0 +2024-08-03 05:19:52,195 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.59 vs. limit=6.0 +2024-08-03 05:19:58,244 INFO [train.py:1114] (3/4) Epoch 4, batch 1900, loss[loss=0.2645, simple_loss=0.3439, pruned_loss=0.09257, over 13338.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.3451, pruned_loss=0.1077, over 2640646.10 frames. ], batch size: 40, lr: 2.79e-02, grad_scale: 16.0 +2024-08-03 05:20:04,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=47010.333333333336, ans=0.125 +2024-08-03 05:20:06,384 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.297e+02 1.477e+02 1.706e+02 2.975e+02, threshold=2.953e+02, percent-clipped=0.0 +2024-08-03 05:20:06,841 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.46 vs. limit=6.0 +2024-08-03 05:20:13,418 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.29 vs. limit=10.0 +2024-08-03 05:20:15,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=47047.0, ans=0.0 +2024-08-03 05:20:17,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=47083.666666666664, ans=0.125 +2024-08-03 05:20:35,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=47157.0, ans=0.125 +2024-08-03 05:20:38,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=47157.0, ans=0.125 +2024-08-03 05:20:45,529 INFO [train.py:1114] (3/4) Epoch 4, batch 1950, loss[loss=0.2723, simple_loss=0.3416, pruned_loss=0.1015, over 13576.00 frames. ], tot_loss[loss=0.2797, simple_loss=0.3453, pruned_loss=0.107, over 2647200.60 frames. ], batch size: 36, lr: 2.78e-02, grad_scale: 16.0 +2024-08-03 05:21:09,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=47267.0, ans=0.035 +2024-08-03 05:21:17,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=47303.666666666664, ans=0.125 +2024-08-03 05:21:27,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=47340.333333333336, ans=0.0 +2024-08-03 05:21:29,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=47340.333333333336, ans=0.0005781884057971011 +2024-08-03 05:21:31,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=47340.333333333336, ans=0.125 +2024-08-03 05:21:34,017 INFO [train.py:1114] (3/4) Epoch 4, batch 2000, loss[loss=0.2337, simple_loss=0.2983, pruned_loss=0.08453, over 13548.00 frames. ], tot_loss[loss=0.2802, simple_loss=0.3457, pruned_loss=0.1074, over 2636912.01 frames. ], batch size: 31, lr: 2.78e-02, grad_scale: 32.0 +2024-08-03 05:21:42,490 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.720e+01 1.383e+02 1.598e+02 1.904e+02 4.710e+02, threshold=3.195e+02, percent-clipped=1.0 +2024-08-03 05:21:47,619 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.44 vs. limit=12.0 +2024-08-03 05:21:50,376 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.11 vs. limit=22.5 +2024-08-03 05:21:50,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=47413.666666666664, ans=0.125 +2024-08-03 05:22:02,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=47487.0, ans=0.125 +2024-08-03 05:22:03,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=47487.0, ans=0.1 +2024-08-03 05:22:14,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47523.666666666664, ans=0.1 +2024-08-03 05:22:20,555 INFO [train.py:1114] (3/4) Epoch 4, batch 2050, loss[loss=0.2536, simple_loss=0.3163, pruned_loss=0.09545, over 13409.00 frames. ], tot_loss[loss=0.28, simple_loss=0.3448, pruned_loss=0.1076, over 2633287.58 frames. ], batch size: 32, lr: 2.77e-02, grad_scale: 32.0 +2024-08-03 05:22:22,819 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.18 vs. limit=15.0 +2024-08-03 05:22:23,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=47560.333333333336, ans=0.125 +2024-08-03 05:22:26,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=47560.333333333336, ans=0.04949747468305833 +2024-08-03 05:22:30,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=47597.0, ans=0.2 +2024-08-03 05:22:30,406 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.52 vs. limit=22.5 +2024-08-03 05:22:34,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=47597.0, ans=0.125 +2024-08-03 05:22:44,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=47633.666666666664, ans=0.125 +2024-08-03 05:22:46,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=47633.666666666664, ans=0.125 +2024-08-03 05:23:45,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=47670.333333333336, ans=0.125 +2024-08-03 05:23:51,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=47707.0, ans=10.0 +2024-08-03 05:23:56,611 INFO [train.py:1114] (3/4) Epoch 4, batch 2100, loss[loss=0.2622, simple_loss=0.3352, pruned_loss=0.09465, over 13535.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3429, pruned_loss=0.1059, over 2638972.21 frames. ], batch size: 37, lr: 2.77e-02, grad_scale: 32.0 +2024-08-03 05:24:01,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=47743.666666666664, ans=0.125 +2024-08-03 05:24:06,853 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.064e+02 1.313e+02 1.529e+02 1.934e+02 3.413e+02, threshold=3.058e+02, percent-clipped=1.0 +2024-08-03 05:24:09,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=47780.333333333336, ans=0.125 +2024-08-03 05:24:27,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=47853.666666666664, ans=0.125 +2024-08-03 05:24:31,519 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.64 vs. limit=15.0 +2024-08-03 05:24:42,916 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.68 vs. limit=12.0 +2024-08-03 05:24:47,070 INFO [train.py:1114] (3/4) Epoch 4, batch 2150, loss[loss=0.2553, simple_loss=0.3242, pruned_loss=0.09314, over 13554.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3425, pruned_loss=0.1056, over 2647511.99 frames. ], batch size: 36, lr: 2.77e-02, grad_scale: 32.0 +2024-08-03 05:24:55,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=47963.666666666664, ans=0.125 +2024-08-03 05:25:00,812 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.92 vs. limit=15.0 +2024-08-03 05:25:01,731 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.43 vs. limit=15.0 +2024-08-03 05:25:06,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48000.333333333336, ans=0.1 +2024-08-03 05:25:08,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=48000.333333333336, ans=0.1 +2024-08-03 05:25:17,548 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.59 vs. limit=15.0 +2024-08-03 05:25:18,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=48037.0, ans=0.125 +2024-08-03 05:25:24,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=48073.666666666664, ans=0.125 +2024-08-03 05:25:26,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=48073.666666666664, ans=0.0 +2024-08-03 05:25:31,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=48073.666666666664, ans=0.2 +2024-08-03 05:25:33,687 INFO [train.py:1114] (3/4) Epoch 4, batch 2200, loss[loss=0.2974, simple_loss=0.3653, pruned_loss=0.1147, over 13404.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.3429, pruned_loss=0.1058, over 2644979.21 frames. ], batch size: 39, lr: 2.76e-02, grad_scale: 32.0 +2024-08-03 05:25:42,101 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.479e+02 1.728e+02 2.109e+02 3.412e+02, threshold=3.456e+02, percent-clipped=2.0 +2024-08-03 05:25:51,678 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.99 vs. limit=15.0 +2024-08-03 05:26:04,031 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.15 vs. limit=10.0 +2024-08-03 05:26:13,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=48257.0, ans=0.1 +2024-08-03 05:26:20,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=48257.0, ans=0.125 +2024-08-03 05:26:21,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=48293.666666666664, ans=0.125 +2024-08-03 05:26:22,256 INFO [train.py:1114] (3/4) Epoch 4, batch 2250, loss[loss=0.2389, simple_loss=0.323, pruned_loss=0.07737, over 13342.00 frames. ], tot_loss[loss=0.2776, simple_loss=0.3433, pruned_loss=0.106, over 2642106.12 frames. ], batch size: 37, lr: 2.76e-02, grad_scale: 32.0 +2024-08-03 05:26:42,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=48367.0, ans=0.2 +2024-08-03 05:26:44,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=48367.0, ans=0.0 +2024-08-03 05:26:44,729 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.63 vs. limit=15.0 +2024-08-03 05:26:45,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=48367.0, ans=0.125 +2024-08-03 05:26:52,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=48403.666666666664, ans=0.125 +2024-08-03 05:26:58,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=48440.333333333336, ans=0.2 +2024-08-03 05:27:16,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=48477.0, ans=0.0003310869565217398 +2024-08-03 05:27:17,459 INFO [train.py:1114] (3/4) Epoch 4, batch 2300, loss[loss=0.2405, simple_loss=0.3031, pruned_loss=0.08898, over 13597.00 frames. ], tot_loss[loss=0.277, simple_loss=0.3421, pruned_loss=0.106, over 2638686.88 frames. ], batch size: 33, lr: 2.75e-02, grad_scale: 32.0 +2024-08-03 05:27:30,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=48477.0, ans=0.125 +2024-08-03 05:27:34,665 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.23 vs. limit=22.5 +2024-08-03 05:27:38,604 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.046e+02 1.390e+02 1.580e+02 1.913e+02 3.341e+02, threshold=3.160e+02, percent-clipped=0.0 +2024-08-03 05:28:08,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=48623.666666666664, ans=0.1 +2024-08-03 05:28:09,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=48623.666666666664, ans=0.07 +2024-08-03 05:28:10,537 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.18 vs. limit=15.0 +2024-08-03 05:28:13,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=48623.666666666664, ans=0.05 +2024-08-03 05:28:18,341 INFO [train.py:1114] (3/4) Epoch 4, batch 2350, loss[loss=0.2617, simple_loss=0.3347, pruned_loss=0.0943, over 13554.00 frames. ], tot_loss[loss=0.277, simple_loss=0.3421, pruned_loss=0.106, over 2641177.89 frames. ], batch size: 38, lr: 2.75e-02, grad_scale: 32.0 +2024-08-03 05:28:25,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=48660.333333333336, ans=0.125 +2024-08-03 05:28:35,401 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.86 vs. limit=5.0 +2024-08-03 05:28:48,796 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=5.442e-03 +2024-08-03 05:29:04,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=48770.333333333336, ans=0.2 +2024-08-03 05:29:17,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=48770.333333333336, ans=0.125 +2024-08-03 05:29:17,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=48770.333333333336, ans=0.0 +2024-08-03 05:29:35,513 INFO [train.py:1114] (3/4) Epoch 4, batch 2400, loss[loss=0.294, simple_loss=0.3496, pruned_loss=0.1192, over 13539.00 frames. ], tot_loss[loss=0.2776, simple_loss=0.3427, pruned_loss=0.1063, over 2642267.77 frames. ], batch size: 35, lr: 2.75e-02, grad_scale: 32.0 +2024-08-03 05:29:51,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=48843.666666666664, ans=0.2 +2024-08-03 05:29:51,917 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.312e+02 1.493e+02 1.944e+02 3.513e+02, threshold=2.987e+02, percent-clipped=1.0 +2024-08-03 05:30:00,693 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:30:19,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=48917.0, ans=0.125 +2024-08-03 05:30:46,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=48953.666666666664, ans=0.2 +2024-08-03 05:31:27,870 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.07 vs. limit=15.0 +2024-08-03 05:31:28,675 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.74 vs. limit=15.0 +2024-08-03 05:31:38,650 INFO [train.py:1114] (3/4) Epoch 4, batch 2450, loss[loss=0.2909, simple_loss=0.3554, pruned_loss=0.1132, over 13347.00 frames. ], tot_loss[loss=0.2785, simple_loss=0.3435, pruned_loss=0.1068, over 2632504.41 frames. ], batch size: 37, lr: 2.74e-02, grad_scale: 32.0 +2024-08-03 05:31:57,937 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.59 vs. limit=15.0 +2024-08-03 05:32:36,638 INFO [train.py:1114] (3/4) Epoch 4, batch 2500, loss[loss=0.2902, simple_loss=0.3563, pruned_loss=0.1121, over 13395.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3435, pruned_loss=0.1064, over 2636967.49 frames. ], batch size: 39, lr: 2.74e-02, grad_scale: 32.0 +2024-08-03 05:32:38,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=49210.333333333336, ans=0.2 +2024-08-03 05:32:38,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=49210.333333333336, ans=0.125 +2024-08-03 05:32:41,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=49210.333333333336, ans=0.0001716666666666672 +2024-08-03 05:32:44,466 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.392e+02 1.612e+02 1.907e+02 3.604e+02, threshold=3.223e+02, percent-clipped=4.0 +2024-08-03 05:32:49,351 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.88 vs. limit=15.0 +2024-08-03 05:32:49,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=49247.0, ans=0.125 +2024-08-03 05:32:55,469 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.62 vs. limit=15.0 +2024-08-03 05:33:18,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=49320.333333333336, ans=0.125 +2024-08-03 05:33:35,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=49357.0, ans=0.0 +2024-08-03 05:33:42,106 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.16 vs. limit=22.5 +2024-08-03 05:33:43,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=49357.0, ans=0.95 +2024-08-03 05:33:44,740 INFO [train.py:1114] (3/4) Epoch 4, batch 2550, loss[loss=0.2617, simple_loss=0.3162, pruned_loss=0.1036, over 13534.00 frames. ], tot_loss[loss=0.2789, simple_loss=0.3438, pruned_loss=0.107, over 2638395.09 frames. ], batch size: 31, lr: 2.73e-02, grad_scale: 32.0 +2024-08-03 05:33:51,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=49393.666666666664, ans=0.2 +2024-08-03 05:33:54,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49393.666666666664, ans=0.1 +2024-08-03 05:33:55,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=49430.333333333336, ans=0.125 +2024-08-03 05:33:57,197 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.63 vs. limit=15.0 +2024-08-03 05:34:02,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=49430.333333333336, ans=0.0 +2024-08-03 05:34:04,333 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.65 vs. limit=15.0 +2024-08-03 05:34:23,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=49503.666666666664, ans=0.125 +2024-08-03 05:34:36,364 INFO [train.py:1114] (3/4) Epoch 4, batch 2600, loss[loss=0.2768, simple_loss=0.3421, pruned_loss=0.1057, over 13567.00 frames. ], tot_loss[loss=0.2789, simple_loss=0.3439, pruned_loss=0.107, over 2637421.22 frames. ], batch size: 36, lr: 2.73e-02, grad_scale: 32.0 +2024-08-03 05:34:44,131 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.359e+02 1.570e+02 1.941e+02 3.532e+02, threshold=3.140e+02, percent-clipped=1.0 +2024-08-03 05:34:44,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=49613.666666666664, ans=0.2 +2024-08-03 05:34:44,586 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.95 vs. limit=15.0 +2024-08-03 05:35:01,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49650.333333333336, ans=0.1 +2024-08-03 05:35:07,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=49687.0, ans=0.0 +2024-08-03 05:35:09,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=49687.0, ans=0.2 +2024-08-03 05:35:24,687 INFO [train.py:1114] (3/4) Epoch 4, batch 2650, loss[loss=0.2893, simple_loss=0.3551, pruned_loss=0.1117, over 13248.00 frames. ], tot_loss[loss=0.2787, simple_loss=0.3441, pruned_loss=0.1067, over 2640203.40 frames. ], batch size: 46, lr: 2.73e-02, grad_scale: 16.0 +2024-08-03 05:35:31,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=49760.333333333336, ans=0.125 +2024-08-03 05:35:58,949 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.97 vs. limit=15.0 +2024-08-03 05:36:01,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=49907.0, ans=0.125 +2024-08-03 05:36:15,382 INFO [train.py:1114] (3/4) Epoch 4, batch 2700, loss[loss=0.3107, simple_loss=0.3749, pruned_loss=0.1233, over 13534.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3442, pruned_loss=0.107, over 2637591.19 frames. ], batch size: 40, lr: 2.72e-02, grad_scale: 16.0 +2024-08-03 05:36:21,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=49943.666666666664, ans=0.2 +2024-08-03 05:36:24,106 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.340e+02 1.529e+02 1.834e+02 2.682e+02, threshold=3.057e+02, percent-clipped=0.0 +2024-08-03 05:36:33,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=50017.0, ans=0.0 +2024-08-03 05:36:49,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=50053.666666666664, ans=0.0 +2024-08-03 05:37:03,511 INFO [train.py:1114] (3/4) Epoch 4, batch 2750, loss[loss=0.252, simple_loss=0.3139, pruned_loss=0.09504, over 13356.00 frames. ], tot_loss[loss=0.2776, simple_loss=0.3429, pruned_loss=0.1061, over 2634738.80 frames. ], batch size: 34, lr: 2.72e-02, grad_scale: 16.0 +2024-08-03 05:37:11,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=50163.666666666664, ans=0.125 +2024-08-03 05:37:14,096 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.24 vs. limit=12.0 +2024-08-03 05:37:14,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=50163.666666666664, ans=0.125 +2024-08-03 05:37:15,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=50163.666666666664, ans=0.0 +2024-08-03 05:37:15,977 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.13 vs. limit=15.0 +2024-08-03 05:37:19,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=50200.333333333336, ans=0.125 +2024-08-03 05:37:21,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=50200.333333333336, ans=0.125 +2024-08-03 05:37:46,911 INFO [train.py:1114] (3/4) Epoch 4, batch 2800, loss[loss=0.3524, simple_loss=0.3852, pruned_loss=0.1598, over 8939.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.3432, pruned_loss=0.1068, over 2626816.94 frames. ], batch size: 96, lr: 2.72e-02, grad_scale: 32.0 +2024-08-03 05:37:55,719 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.137e+02 1.473e+02 1.737e+02 2.107e+02 3.108e+02, threshold=3.473e+02, percent-clipped=1.0 +2024-08-03 05:38:06,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=50383.666666666664, ans=0.125 +2024-08-03 05:38:11,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=50383.666666666664, ans=0.125 +2024-08-03 05:38:31,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=50493.666666666664, ans=0.0 +2024-08-03 05:38:31,614 INFO [train.py:1114] (3/4) Epoch 4, batch 2850, loss[loss=0.2454, simple_loss=0.318, pruned_loss=0.08639, over 13351.00 frames. ], tot_loss[loss=0.2797, simple_loss=0.3443, pruned_loss=0.1075, over 2621263.50 frames. ], batch size: 35, lr: 2.71e-02, grad_scale: 16.0 +2024-08-03 05:38:31,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=50493.666666666664, ans=0.125 +2024-08-03 05:39:04,610 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.08 vs. limit=22.5 +2024-08-03 05:39:06,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=50567.0, ans=0.125 +2024-08-03 05:39:20,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=50640.333333333336, ans=0.0 +2024-08-03 05:39:21,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=50640.333333333336, ans=0.125 +2024-08-03 05:39:23,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=50640.333333333336, ans=0.1 +2024-08-03 05:39:26,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=50677.0, ans=0.09899494936611666 +2024-08-03 05:39:27,372 INFO [train.py:1114] (3/4) Epoch 4, batch 2900, loss[loss=0.2708, simple_loss=0.3395, pruned_loss=0.1011, over 13367.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3447, pruned_loss=0.1068, over 2631714.20 frames. ], batch size: 36, lr: 2.71e-02, grad_scale: 16.0 +2024-08-03 05:39:33,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=50677.0, ans=0.2 +2024-08-03 05:39:39,682 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.312e+02 1.485e+02 1.747e+02 2.702e+02, threshold=2.970e+02, percent-clipped=0.0 +2024-08-03 05:39:41,973 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.72 vs. limit=15.0 +2024-08-03 05:39:44,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=50713.666666666664, ans=0.0 +2024-08-03 05:39:46,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=15.36 vs. limit=15.0 +2024-08-03 05:39:55,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50787.0, ans=0.1 +2024-08-03 05:39:55,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=50787.0, ans=0.125 +2024-08-03 05:40:04,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=50823.666666666664, ans=0.125 +2024-08-03 05:40:07,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=50823.666666666664, ans=0.0 +2024-08-03 05:40:07,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=50823.666666666664, ans=0.125 +2024-08-03 05:40:11,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=50823.666666666664, ans=0.0 +2024-08-03 05:40:13,933 INFO [train.py:1114] (3/4) Epoch 4, batch 2950, loss[loss=0.2511, simple_loss=0.3259, pruned_loss=0.08812, over 13334.00 frames. ], tot_loss[loss=0.2777, simple_loss=0.3431, pruned_loss=0.1062, over 2629640.77 frames. ], batch size: 34, lr: 2.70e-02, grad_scale: 16.0 +2024-08-03 05:40:14,465 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.32 vs. limit=15.0 +2024-08-03 05:40:44,746 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.20 vs. limit=15.0 +2024-08-03 05:40:56,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=51007.0, ans=0.2 +2024-08-03 05:40:57,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=51007.0, ans=0.125 +2024-08-03 05:40:57,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=51007.0, ans=0.0 +2024-08-03 05:40:59,439 INFO [train.py:1114] (3/4) Epoch 4, batch 3000, loss[loss=0.2792, simple_loss=0.3433, pruned_loss=0.1075, over 13541.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3427, pruned_loss=0.1061, over 2628955.60 frames. ], batch size: 37, lr: 2.70e-02, grad_scale: 16.0 +2024-08-03 05:40:59,439 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 05:41:15,391 INFO [train.py:1146] (3/4) Epoch 4, validation: loss=0.2213, simple_loss=0.3178, pruned_loss=0.06237, over 944034.00 frames. +2024-08-03 05:41:15,392 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 05:41:28,400 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.054e+02 1.441e+02 1.719e+02 2.426e+02 4.333e+02, threshold=3.438e+02, percent-clipped=13.0 +2024-08-03 05:41:35,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51117.0, ans=0.1 +2024-08-03 05:41:58,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=51190.333333333336, ans=0.125 +2024-08-03 05:42:00,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=51190.333333333336, ans=0.0 +2024-08-03 05:42:02,343 INFO [train.py:1114] (3/4) Epoch 4, batch 3050, loss[loss=0.2657, simple_loss=0.3194, pruned_loss=0.106, over 13530.00 frames. ], tot_loss[loss=0.2785, simple_loss=0.3436, pruned_loss=0.1067, over 2626404.12 frames. ], batch size: 35, lr: 2.70e-02, grad_scale: 16.0 +2024-08-03 05:42:15,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=51263.666666666664, ans=0.125 +2024-08-03 05:42:26,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=51300.333333333336, ans=0.125 +2024-08-03 05:42:27,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=51337.0, ans=0.1 +2024-08-03 05:42:35,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=51337.0, ans=0.2 +2024-08-03 05:42:36,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=51337.0, ans=0.125 +2024-08-03 05:42:54,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=51373.666666666664, ans=0.025 +2024-08-03 05:42:57,229 INFO [train.py:1114] (3/4) Epoch 4, batch 3100, loss[loss=0.2857, simple_loss=0.3568, pruned_loss=0.1073, over 13319.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.3425, pruned_loss=0.106, over 2626196.42 frames. ], batch size: 46, lr: 2.69e-02, grad_scale: 16.0 +2024-08-03 05:43:01,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=51410.333333333336, ans=0.125 +2024-08-03 05:43:03,865 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.57 vs. limit=6.0 +2024-08-03 05:52:34,210 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.298e+02 1.531e+02 1.928e+02 3.998e+02, threshold=3.062e+02, percent-clipped=1.0 +2024-08-03 05:54:22,935 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.30 vs. limit=22.5 +2024-08-03 05:54:35,551 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:54:51,693 INFO [train.py:1114] (3/4) Epoch 4, batch 3150, loss[loss=0.2621, simple_loss=0.3366, pruned_loss=0.09383, over 13063.00 frames. ], tot_loss[loss=0.2764, simple_loss=0.3419, pruned_loss=0.1055, over 2628173.21 frames. ], batch size: 48, lr: 2.69e-02, grad_scale: 16.0 +2024-08-03 05:55:06,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51630.333333333336, ans=0.1 +2024-08-03 05:55:14,140 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:55:17,637 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.47 vs. limit=10.0 +2024-08-03 05:55:21,807 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 05:55:47,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=51777.0, ans=0.125 +2024-08-03 05:55:48,107 INFO [train.py:1114] (3/4) Epoch 4, batch 3200, loss[loss=0.2605, simple_loss=0.3358, pruned_loss=0.09261, over 13543.00 frames. ], tot_loss[loss=0.2758, simple_loss=0.3416, pruned_loss=0.105, over 2633780.95 frames. ], batch size: 37, lr: 2.69e-02, grad_scale: 32.0 +2024-08-03 05:55:51,991 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.66 vs. limit=6.0 +2024-08-03 05:55:54,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=51777.0, ans=0.125 +2024-08-03 05:55:57,490 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.804e+01 1.368e+02 1.621e+02 1.933e+02 3.574e+02, threshold=3.241e+02, percent-clipped=2.0 +2024-08-03 05:56:00,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=51813.666666666664, ans=0.125 +2024-08-03 05:56:09,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=51850.333333333336, ans=0.2 +2024-08-03 05:56:12,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=51850.333333333336, ans=0.125 +2024-08-03 05:56:14,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=51887.0, ans=0.07 +2024-08-03 05:56:23,935 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.48 vs. limit=6.0 +2024-08-03 05:56:26,699 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.44 vs. limit=12.0 +2024-08-03 05:56:45,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=51923.666666666664, ans=0.0 +2024-08-03 05:56:51,454 INFO [train.py:1114] (3/4) Epoch 4, batch 3250, loss[loss=0.2673, simple_loss=0.3425, pruned_loss=0.096, over 13380.00 frames. ], tot_loss[loss=0.275, simple_loss=0.3413, pruned_loss=0.1043, over 2637979.40 frames. ], batch size: 38, lr: 2.68e-02, grad_scale: 32.0 +2024-08-03 05:58:07,955 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.79 vs. limit=22.5 +2024-08-03 05:58:22,585 INFO [train.py:1114] (3/4) Epoch 4, batch 3300, loss[loss=0.2995, simple_loss=0.3585, pruned_loss=0.1202, over 12919.00 frames. ], tot_loss[loss=0.2754, simple_loss=0.3412, pruned_loss=0.1048, over 2639836.62 frames. ], batch size: 52, lr: 2.68e-02, grad_scale: 32.0 +2024-08-03 05:58:34,519 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.337e+02 1.543e+02 1.796e+02 2.309e+02, threshold=3.087e+02, percent-clipped=0.0 +2024-08-03 05:58:49,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=52217.0, ans=0.5 +2024-08-03 05:59:09,428 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.50 vs. limit=6.0 +2024-08-03 05:59:10,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=52290.333333333336, ans=0.125 +2024-08-03 05:59:13,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=52327.0, ans=0.0 +2024-08-03 05:59:14,201 INFO [train.py:1114] (3/4) Epoch 4, batch 3350, loss[loss=0.2727, simple_loss=0.357, pruned_loss=0.09416, over 13017.00 frames. ], tot_loss[loss=0.2758, simple_loss=0.3418, pruned_loss=0.1049, over 2630432.56 frames. ], batch size: 48, lr: 2.67e-02, grad_scale: 32.0 +2024-08-03 05:59:16,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=52327.0, ans=0.125 +2024-08-03 05:59:22,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=52363.666666666664, ans=0.2 +2024-08-03 05:59:22,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=52363.666666666664, ans=0.125 +2024-08-03 05:59:36,561 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.64 vs. limit=12.0 +2024-08-03 05:59:43,021 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.74 vs. limit=6.0 +2024-08-03 05:59:57,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=52473.666666666664, ans=0.0 +2024-08-03 06:00:05,676 INFO [train.py:1114] (3/4) Epoch 4, batch 3400, loss[loss=0.2803, simple_loss=0.3242, pruned_loss=0.1182, over 13534.00 frames. ], tot_loss[loss=0.2762, simple_loss=0.3419, pruned_loss=0.1053, over 2626306.20 frames. ], batch size: 31, lr: 2.67e-02, grad_scale: 32.0 +2024-08-03 06:00:06,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=52510.333333333336, ans=0.125 +2024-08-03 06:00:15,141 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.426e+02 1.702e+02 2.054e+02 4.258e+02, threshold=3.404e+02, percent-clipped=2.0 +2024-08-03 06:00:18,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=52547.0, ans=0.125 +2024-08-03 06:00:20,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=52547.0, ans=0.1 +2024-08-03 06:00:24,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=52583.666666666664, ans=0.0 +2024-08-03 06:00:26,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=52583.666666666664, ans=0.125 +2024-08-03 06:00:28,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=52583.666666666664, ans=0.125 +2024-08-03 06:00:34,935 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=12.72 vs. limit=15.0 +2024-08-03 06:00:41,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=52657.0, ans=0.125 +2024-08-03 06:00:48,744 INFO [train.py:1114] (3/4) Epoch 4, batch 3450, loss[loss=0.3327, simple_loss=0.3926, pruned_loss=0.1364, over 12966.00 frames. ], tot_loss[loss=0.2755, simple_loss=0.3414, pruned_loss=0.1048, over 2630313.18 frames. ], batch size: 52, lr: 2.67e-02, grad_scale: 32.0 +2024-08-03 06:00:51,155 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.92 vs. limit=8.0 +2024-08-03 06:00:51,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=52693.666666666664, ans=0.125 +2024-08-03 06:00:57,663 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.70 vs. limit=6.0 +2024-08-03 06:01:02,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=52730.333333333336, ans=0.125 +2024-08-03 06:01:03,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=52730.333333333336, ans=0.125 +2024-08-03 06:01:22,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=52803.666666666664, ans=0.125 +2024-08-03 06:01:29,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=52840.333333333336, ans=0.125 +2024-08-03 06:01:36,371 INFO [train.py:1114] (3/4) Epoch 4, batch 3500, loss[loss=0.2499, simple_loss=0.3149, pruned_loss=0.09251, over 13541.00 frames. ], tot_loss[loss=0.2747, simple_loss=0.3403, pruned_loss=0.1045, over 2631915.65 frames. ], batch size: 34, lr: 2.66e-02, grad_scale: 32.0 +2024-08-03 06:01:40,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=52877.0, ans=0.125 +2024-08-03 06:01:45,562 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.329e+02 1.542e+02 1.871e+02 3.471e+02, threshold=3.085e+02, percent-clipped=1.0 +2024-08-03 06:01:56,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=52913.666666666664, ans=0.125 +2024-08-03 06:02:22,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=52987.0, ans=0.0 +2024-08-03 06:02:26,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=52987.0, ans=0.2 +2024-08-03 06:02:32,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=53023.666666666664, ans=0.0 +2024-08-03 06:02:37,235 INFO [train.py:1114] (3/4) Epoch 4, batch 3550, loss[loss=0.287, simple_loss=0.3525, pruned_loss=0.1107, over 12700.00 frames. ], tot_loss[loss=0.2775, simple_loss=0.3429, pruned_loss=0.1061, over 2629640.28 frames. ], batch size: 59, lr: 2.66e-02, grad_scale: 32.0 +2024-08-03 06:02:52,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=53060.333333333336, ans=0.0 +2024-08-03 06:02:53,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=53060.333333333336, ans=0.0 +2024-08-03 06:03:05,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=53133.666666666664, ans=0.125 +2024-08-03 06:03:21,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=53170.333333333336, ans=0.125 +2024-08-03 06:03:32,779 INFO [train.py:1114] (3/4) Epoch 4, batch 3600, loss[loss=0.3706, simple_loss=0.4037, pruned_loss=0.1688, over 9420.00 frames. ], tot_loss[loss=0.2877, simple_loss=0.3495, pruned_loss=0.1129, over 2489355.42 frames. ], batch size: 96, lr: 2.66e-02, grad_scale: 16.0 +2024-08-03 06:03:34,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=53243.666666666664, ans=0.5 +2024-08-03 06:03:35,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=53243.666666666664, ans=0.0 +2024-08-03 06:03:57,022 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.095e+02 1.338e+02 1.465e+02 1.631e+02 2.841e+02, threshold=2.930e+02, percent-clipped=0.0 +2024-08-03 06:04:16,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=53353.666666666664, ans=0.2 +2024-08-03 06:04:21,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=53390.333333333336, ans=0.07 +2024-08-03 06:05:08,619 INFO [train.py:1114] (3/4) Epoch 5, batch 0, loss[loss=0.2573, simple_loss=0.3224, pruned_loss=0.09612, over 13321.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3224, pruned_loss=0.09612, over 13321.00 frames. ], batch size: 33, lr: 2.47e-02, grad_scale: 32.0 +2024-08-03 06:05:08,620 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 06:05:18,615 INFO [train.py:1146] (3/4) Epoch 5, validation: loss=0.231, simple_loss=0.3271, pruned_loss=0.06749, over 944034.00 frames. +2024-08-03 06:05:18,616 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 06:05:25,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=53394.0, ans=0.125 +2024-08-03 06:05:27,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=53430.666666666664, ans=0.09899494936611666 +2024-08-03 06:05:35,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=53467.333333333336, ans=0.0 +2024-08-03 06:05:49,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=53504.0, ans=0.125 +2024-08-03 06:05:56,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=53540.666666666664, ans=0.125 +2024-08-03 06:06:04,993 INFO [train.py:1114] (3/4) Epoch 5, batch 50, loss[loss=0.2219, simple_loss=0.3009, pruned_loss=0.07141, over 13439.00 frames. ], tot_loss[loss=0.2762, simple_loss=0.3429, pruned_loss=0.1047, over 578825.49 frames. ], batch size: 32, lr: 2.47e-02, grad_scale: 32.0 +2024-08-03 06:06:24,699 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.396e+02 1.612e+02 2.008e+02 3.505e+02, threshold=3.224e+02, percent-clipped=4.0 +2024-08-03 06:06:32,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=53687.333333333336, ans=0.125 +2024-08-03 06:06:49,895 INFO [train.py:1114] (3/4) Epoch 5, batch 100, loss[loss=0.2561, simple_loss=0.324, pruned_loss=0.09409, over 13532.00 frames. ], tot_loss[loss=0.2753, simple_loss=0.3427, pruned_loss=0.1039, over 1025849.90 frames. ], batch size: 35, lr: 2.46e-02, grad_scale: 32.0 +2024-08-03 06:06:57,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=53760.666666666664, ans=0.025 +2024-08-03 06:07:00,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=53797.333333333336, ans=0.125 +2024-08-03 06:07:20,025 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.95 vs. limit=12.0 +2024-08-03 06:07:25,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=53870.666666666664, ans=0.0 +2024-08-03 06:07:26,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=53870.666666666664, ans=0.125 +2024-08-03 06:07:29,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=53870.666666666664, ans=0.035 +2024-08-03 06:07:42,376 INFO [train.py:1114] (3/4) Epoch 5, batch 150, loss[loss=0.2227, simple_loss=0.2981, pruned_loss=0.07363, over 13407.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.3384, pruned_loss=0.1016, over 1387251.04 frames. ], batch size: 32, lr: 2.46e-02, grad_scale: 32.0 +2024-08-03 06:07:43,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=53944.0, ans=0.025 +2024-08-03 06:07:51,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=53980.666666666664, ans=0.125 +2024-08-03 06:08:01,993 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.021e+02 1.304e+02 1.445e+02 1.840e+02 3.127e+02, threshold=2.891e+02, percent-clipped=0.0 +2024-08-03 06:08:10,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=54017.333333333336, ans=0.07 +2024-08-03 06:08:10,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=54017.333333333336, ans=0.125 +2024-08-03 06:08:49,185 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.64 vs. limit=15.0 +2024-08-03 06:08:50,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=54054.0, ans=0.025 +2024-08-03 06:08:57,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=54090.666666666664, ans=0.2 +2024-08-03 06:09:02,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=54090.666666666664, ans=0.125 +2024-08-03 06:09:08,820 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.24 vs. limit=15.0 +2024-08-03 06:09:17,806 INFO [train.py:1114] (3/4) Epoch 5, batch 200, loss[loss=0.2998, simple_loss=0.3643, pruned_loss=0.1177, over 12552.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.3357, pruned_loss=0.1003, over 1666386.91 frames. ], batch size: 58, lr: 2.46e-02, grad_scale: 32.0 +2024-08-03 06:09:23,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=54127.333333333336, ans=0.95 +2024-08-03 06:09:26,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff2.min_abs, batch_count=54127.333333333336, ans=0.1 +2024-08-03 06:09:30,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=54127.333333333336, ans=0.125 +2024-08-03 06:09:30,999 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.59 vs. limit=15.0 +2024-08-03 06:09:36,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=54164.0, ans=0.0 +2024-08-03 06:09:48,908 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.56 vs. limit=22.5 +2024-08-03 06:09:51,123 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.76 vs. limit=10.0 +2024-08-03 06:10:01,468 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.20 vs. limit=22.5 +2024-08-03 06:10:15,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=54237.333333333336, ans=0.09899494936611666 +2024-08-03 06:10:36,822 INFO [train.py:1114] (3/4) Epoch 5, batch 250, loss[loss=0.2884, simple_loss=0.3521, pruned_loss=0.1124, over 13307.00 frames. ], tot_loss[loss=0.267, simple_loss=0.3352, pruned_loss=0.09938, over 1885199.89 frames. ], batch size: 46, lr: 2.45e-02, grad_scale: 32.0 +2024-08-03 06:10:49,192 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.90 vs. limit=6.0 +2024-08-03 06:11:04,551 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.03 vs. limit=22.5 +2024-08-03 06:11:05,037 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.051e+02 1.288e+02 1.425e+02 1.791e+02 2.775e+02, threshold=2.850e+02, percent-clipped=0.0 +2024-08-03 06:11:06,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=54384.0, ans=0.0 +2024-08-03 06:11:14,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=54420.666666666664, ans=0.0 +2024-08-03 06:11:24,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=54420.666666666664, ans=0.2 +2024-08-03 06:11:37,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54457.333333333336, ans=0.1 +2024-08-03 06:11:47,372 INFO [train.py:1114] (3/4) Epoch 5, batch 300, loss[loss=0.2658, simple_loss=0.3372, pruned_loss=0.09721, over 13445.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.3346, pruned_loss=0.09883, over 2052169.47 frames. ], batch size: 42, lr: 2.45e-02, grad_scale: 16.0 +2024-08-03 06:11:54,018 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.96 vs. limit=15.0 +2024-08-03 06:12:07,128 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.09 vs. limit=22.5 +2024-08-03 06:12:07,279 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.24 vs. limit=12.0 +2024-08-03 06:12:19,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=54567.333333333336, ans=0.125 +2024-08-03 06:12:21,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=54567.333333333336, ans=0.0 +2024-08-03 06:12:31,688 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.53 vs. limit=15.0 +2024-08-03 06:12:46,947 INFO [train.py:1114] (3/4) Epoch 5, batch 350, loss[loss=0.2731, simple_loss=0.3336, pruned_loss=0.1063, over 13583.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.3345, pruned_loss=0.09882, over 2183163.92 frames. ], batch size: 33, lr: 2.45e-02, grad_scale: 16.0 +2024-08-03 06:12:49,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=54677.333333333336, ans=0.125 +2024-08-03 06:14:11,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=54714.0, ans=0.125 +2024-08-03 06:14:28,930 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.91 vs. limit=22.5 +2024-08-03 06:14:29,330 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.016e+02 1.351e+02 1.704e+02 2.152e+02 5.145e+02, threshold=3.407e+02, percent-clipped=8.0 +2024-08-03 06:14:30,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=54750.666666666664, ans=0.025 +2024-08-03 06:14:35,637 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.10 vs. limit=15.0 +2024-08-03 06:14:53,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=54824.0, ans=0.125 +2024-08-03 06:14:54,828 INFO [train.py:1114] (3/4) Epoch 5, batch 400, loss[loss=0.2631, simple_loss=0.3389, pruned_loss=0.09358, over 13365.00 frames. ], tot_loss[loss=0.2657, simple_loss=0.3341, pruned_loss=0.0987, over 2286403.82 frames. ], batch size: 37, lr: 2.44e-02, grad_scale: 32.0 +2024-08-03 06:15:07,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.85 vs. limit=12.0 +2024-08-03 06:15:23,085 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:15:40,331 INFO [train.py:1114] (3/4) Epoch 5, batch 450, loss[loss=0.2711, simple_loss=0.3331, pruned_loss=0.1046, over 13543.00 frames. ], tot_loss[loss=0.2665, simple_loss=0.3348, pruned_loss=0.09915, over 2359918.11 frames. ], batch size: 38, lr: 2.44e-02, grad_scale: 32.0 +2024-08-03 06:15:47,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=55044.0, ans=0.125 +2024-08-03 06:15:55,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=55080.666666666664, ans=0.125 +2024-08-03 06:15:56,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=55080.666666666664, ans=0.035 +2024-08-03 06:16:01,313 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.058e+02 1.371e+02 1.584e+02 1.939e+02 3.313e+02, threshold=3.167e+02, percent-clipped=0.0 +2024-08-03 06:16:17,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=55154.0, ans=0.125 +2024-08-03 06:16:21,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=55154.0, ans=0.125 +2024-08-03 06:16:33,800 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.61 vs. limit=15.0 +2024-08-03 06:16:34,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=55227.333333333336, ans=0.0 +2024-08-03 06:16:34,968 INFO [train.py:1114] (3/4) Epoch 5, batch 500, loss[loss=0.2753, simple_loss=0.3509, pruned_loss=0.09986, over 13424.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.3341, pruned_loss=0.0989, over 2425653.31 frames. ], batch size: 43, lr: 2.44e-02, grad_scale: 32.0 +2024-08-03 06:16:39,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=55227.333333333336, ans=0.125 +2024-08-03 06:16:42,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55227.333333333336, ans=0.1 +2024-08-03 06:16:43,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=55264.0, ans=0.0 +2024-08-03 06:16:43,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=55264.0, ans=0.0 +2024-08-03 06:17:38,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55300.666666666664, ans=0.1 +2024-08-03 06:17:50,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=55337.333333333336, ans=0.0 +2024-08-03 06:17:52,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=55337.333333333336, ans=0.0 +2024-08-03 06:19:39,785 INFO [train.py:1114] (3/4) Epoch 5, batch 550, loss[loss=0.2937, simple_loss=0.3497, pruned_loss=0.1188, over 13116.00 frames. ], tot_loss[loss=0.2663, simple_loss=0.3341, pruned_loss=0.09929, over 2468892.44 frames. ], batch size: 48, lr: 2.43e-02, grad_scale: 32.0 +2024-08-03 06:21:10,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=55410.666666666664, ans=0.0 +2024-08-03 06:21:26,883 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.054e+02 1.350e+02 1.520e+02 1.792e+02 6.308e+02, threshold=3.041e+02, percent-clipped=2.0 +2024-08-03 06:21:39,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55520.666666666664, ans=0.1 +2024-08-03 06:22:10,659 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.78 vs. limit=15.0 +2024-08-03 06:22:14,709 INFO [train.py:1114] (3/4) Epoch 5, batch 600, loss[loss=0.296, simple_loss=0.3641, pruned_loss=0.1139, over 13288.00 frames. ], tot_loss[loss=0.2672, simple_loss=0.3349, pruned_loss=0.09972, over 2507462.47 frames. ], batch size: 46, lr: 2.43e-02, grad_scale: 32.0 +2024-08-03 06:22:21,307 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.27 vs. limit=15.0 +2024-08-03 06:22:21,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55594.0, ans=0.1 +2024-08-03 06:22:22,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55630.666666666664, ans=0.1 +2024-08-03 06:22:39,483 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.46 vs. limit=15.0 +2024-08-03 06:22:42,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=55667.333333333336, ans=0.1 +2024-08-03 06:22:59,938 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.72 vs. limit=22.5 +2024-08-03 06:23:07,600 INFO [train.py:1114] (3/4) Epoch 5, batch 650, loss[loss=0.2205, simple_loss=0.3053, pruned_loss=0.06785, over 13560.00 frames. ], tot_loss[loss=0.265, simple_loss=0.3334, pruned_loss=0.09832, over 2543235.00 frames. ], batch size: 37, lr: 2.43e-02, grad_scale: 32.0 +2024-08-03 06:23:23,920 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.70 vs. limit=12.0 +2024-08-03 06:23:30,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=55814.0, ans=0.125 +2024-08-03 06:23:43,266 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.044e+02 1.304e+02 1.464e+02 1.924e+02 3.409e+02, threshold=2.927e+02, percent-clipped=2.0 +2024-08-03 06:23:47,212 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.95 vs. limit=15.0 +2024-08-03 06:23:59,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=55924.0, ans=0.0 +2024-08-03 06:24:05,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=55924.0, ans=0.125 +2024-08-03 06:24:05,505 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.93 vs. limit=10.0 +2024-08-03 06:24:09,705 INFO [train.py:1114] (3/4) Epoch 5, batch 700, loss[loss=0.2457, simple_loss=0.3164, pruned_loss=0.0875, over 13546.00 frames. ], tot_loss[loss=0.2657, simple_loss=0.3339, pruned_loss=0.09874, over 2563932.37 frames. ], batch size: 35, lr: 2.43e-02, grad_scale: 32.0 +2024-08-03 06:24:28,957 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.35 vs. limit=12.0 +2024-08-03 06:24:31,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=55997.333333333336, ans=0.07 +2024-08-03 06:24:32,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=55997.333333333336, ans=0.125 +2024-08-03 06:24:37,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=55997.333333333336, ans=0.0 +2024-08-03 06:24:50,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=56070.666666666664, ans=0.125 +2024-08-03 06:25:08,423 INFO [train.py:1114] (3/4) Epoch 5, batch 750, loss[loss=0.2785, simple_loss=0.3505, pruned_loss=0.1033, over 13368.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.3332, pruned_loss=0.09855, over 2581274.93 frames. ], batch size: 37, lr: 2.42e-02, grad_scale: 32.0 +2024-08-03 06:25:08,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=56144.0, ans=0.125 +2024-08-03 06:25:20,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=56144.0, ans=0.125 +2024-08-03 06:25:23,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=56180.666666666664, ans=0.0 +2024-08-03 06:25:26,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=56180.666666666664, ans=0.125 +2024-08-03 06:25:27,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=56180.666666666664, ans=0.125 +2024-08-03 06:25:29,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=56180.666666666664, ans=0.125 +2024-08-03 06:25:34,264 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.431e+02 1.731e+02 2.437e+02 4.529e+02, threshold=3.462e+02, percent-clipped=10.0 +2024-08-03 06:25:45,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=56254.0, ans=0.0 +2024-08-03 06:25:59,100 INFO [train.py:1114] (3/4) Epoch 5, batch 800, loss[loss=0.233, simple_loss=0.3056, pruned_loss=0.08014, over 13343.00 frames. ], tot_loss[loss=0.264, simple_loss=0.3323, pruned_loss=0.09784, over 2596441.28 frames. ], batch size: 33, lr: 2.42e-02, grad_scale: 32.0 +2024-08-03 06:26:08,902 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.80 vs. limit=22.5 +2024-08-03 06:26:52,745 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.38 vs. limit=15.0 +2024-08-03 06:26:58,693 INFO [train.py:1114] (3/4) Epoch 5, batch 850, loss[loss=0.2659, simple_loss=0.3407, pruned_loss=0.09557, over 13333.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.3324, pruned_loss=0.09805, over 2609377.77 frames. ], batch size: 40, lr: 2.42e-02, grad_scale: 32.0 +2024-08-03 06:27:03,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=56510.666666666664, ans=0.1 +2024-08-03 06:27:09,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=56547.333333333336, ans=0.025 +2024-08-03 06:27:19,558 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.023e+02 1.293e+02 1.480e+02 2.210e+02 4.419e+02, threshold=2.961e+02, percent-clipped=1.0 +2024-08-03 06:27:34,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56657.333333333336, ans=0.1 +2024-08-03 06:27:44,561 INFO [train.py:1114] (3/4) Epoch 5, batch 900, loss[loss=0.2226, simple_loss=0.3007, pruned_loss=0.07228, over 13349.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.333, pruned_loss=0.09806, over 2612479.07 frames. ], batch size: 33, lr: 2.41e-02, grad_scale: 32.0 +2024-08-03 06:27:45,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=56694.0, ans=0.1 +2024-08-03 06:27:48,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=56694.0, ans=0.2 +2024-08-03 06:28:06,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=56767.333333333336, ans=0.0 +2024-08-03 06:28:10,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=56767.333333333336, ans=0.0 +2024-08-03 06:28:24,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=56840.666666666664, ans=0.04949747468305833 +2024-08-03 06:28:25,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=56840.666666666664, ans=0.125 +2024-08-03 06:28:39,195 INFO [train.py:1114] (3/4) Epoch 5, batch 950, loss[loss=0.2519, simple_loss=0.324, pruned_loss=0.08992, over 13539.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3332, pruned_loss=0.09782, over 2613778.47 frames. ], batch size: 34, lr: 2.41e-02, grad_scale: 32.0 +2024-08-03 06:28:39,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=56877.333333333336, ans=15.0 +2024-08-03 06:28:39,789 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.47 vs. limit=22.5 +2024-08-03 06:28:45,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=56877.333333333336, ans=0.125 +2024-08-03 06:29:01,396 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.321e+02 1.545e+02 1.895e+02 5.386e+02, threshold=3.090e+02, percent-clipped=1.0 +2024-08-03 06:29:05,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=56950.666666666664, ans=0.025 +2024-08-03 06:29:19,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=56987.333333333336, ans=0.1 +2024-08-03 06:29:20,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=56987.333333333336, ans=0.125 +2024-08-03 06:29:22,160 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.36 vs. limit=10.0 +2024-08-03 06:29:23,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=56987.333333333336, ans=0.0 +2024-08-03 06:29:35,303 INFO [train.py:1114] (3/4) Epoch 5, batch 1000, loss[loss=0.2418, simple_loss=0.3136, pruned_loss=0.08499, over 13350.00 frames. ], tot_loss[loss=0.2656, simple_loss=0.3343, pruned_loss=0.09846, over 2612221.71 frames. ], batch size: 35, lr: 2.41e-02, grad_scale: 32.0 +2024-08-03 06:29:45,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=57060.666666666664, ans=0.125 +2024-08-03 06:29:45,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=57060.666666666664, ans=0.125 +2024-08-03 06:29:45,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=57060.666666666664, ans=0.0 +2024-08-03 06:29:51,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=57060.666666666664, ans=0.125 +2024-08-03 06:29:56,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=57097.333333333336, ans=0.0 +2024-08-03 06:30:25,013 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.01 vs. limit=22.5 +2024-08-03 06:30:40,643 INFO [train.py:1114] (3/4) Epoch 5, batch 1050, loss[loss=0.2735, simple_loss=0.3589, pruned_loss=0.09402, over 13560.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3329, pruned_loss=0.09768, over 2616115.89 frames. ], batch size: 39, lr: 2.40e-02, grad_scale: 32.0 +2024-08-03 06:30:46,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=57244.0, ans=0.125 +2024-08-03 06:30:46,539 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.94 vs. limit=15.0 +2024-08-03 06:30:50,226 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.74 vs. limit=12.0 +2024-08-03 06:31:01,066 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.93 vs. limit=15.0 +2024-08-03 06:31:01,348 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.349e+02 1.601e+02 2.002e+02 3.488e+02, threshold=3.202e+02, percent-clipped=3.0 +2024-08-03 06:31:09,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=57317.333333333336, ans=0.0 +2024-08-03 06:31:24,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=57390.666666666664, ans=0.1 +2024-08-03 06:31:31,766 INFO [train.py:1114] (3/4) Epoch 5, batch 1100, loss[loss=0.285, simple_loss=0.3477, pruned_loss=0.1112, over 13554.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3328, pruned_loss=0.09775, over 2620491.05 frames. ], batch size: 36, lr: 2.40e-02, grad_scale: 32.0 +2024-08-03 06:31:55,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=57464.0, ans=0.125 +2024-08-03 06:31:56,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=57500.666666666664, ans=0.125 +2024-08-03 06:32:00,882 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.92 vs. limit=15.0 +2024-08-03 06:32:18,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=57537.333333333336, ans=0.125 +2024-08-03 06:32:31,942 INFO [train.py:1114] (3/4) Epoch 5, batch 1150, loss[loss=0.2347, simple_loss=0.3053, pruned_loss=0.08205, over 13564.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3325, pruned_loss=0.09758, over 2619654.71 frames. ], batch size: 36, lr: 2.40e-02, grad_scale: 32.0 +2024-08-03 06:32:40,950 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:32:43,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57610.666666666664, ans=0.1 +2024-08-03 06:32:43,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=57610.666666666664, ans=0.125 +2024-08-03 06:32:49,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=57647.333333333336, ans=0.0 +2024-08-03 06:32:57,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=57684.0, ans=0.0 +2024-08-03 06:32:59,169 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.317e+02 1.572e+02 1.915e+02 2.951e+02, threshold=3.144e+02, percent-clipped=0.0 +2024-08-03 06:32:59,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=57684.0, ans=0.2 +2024-08-03 06:33:13,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=57684.0, ans=0.125 +2024-08-03 06:33:44,661 INFO [train.py:1114] (3/4) Epoch 5, batch 1200, loss[loss=0.2834, simple_loss=0.3508, pruned_loss=0.108, over 13580.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.3346, pruned_loss=0.09878, over 2616232.70 frames. ], batch size: 39, lr: 2.39e-02, grad_scale: 32.0 +2024-08-03 06:33:54,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=57830.666666666664, ans=0.025 +2024-08-03 06:33:55,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=57830.666666666664, ans=0.125 +2024-08-03 06:33:58,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=57830.666666666664, ans=0.125 +2024-08-03 06:34:06,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=57867.333333333336, ans=0.0 +2024-08-03 06:34:08,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57867.333333333336, ans=0.1 +2024-08-03 06:34:09,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=57867.333333333336, ans=0.125 +2024-08-03 06:34:39,098 INFO [train.py:1114] (3/4) Epoch 5, batch 1250, loss[loss=0.2914, simple_loss=0.3614, pruned_loss=0.1107, over 13460.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.3345, pruned_loss=0.09794, over 2627961.12 frames. ], batch size: 42, lr: 2.39e-02, grad_scale: 32.0 +2024-08-03 06:34:50,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=58014.0, ans=0.0 +2024-08-03 06:34:55,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=58014.0, ans=0.0 +2024-08-03 06:34:57,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=58014.0, ans=0.04949747468305833 +2024-08-03 06:35:05,303 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.303e+02 1.543e+02 2.003e+02 3.165e+02, threshold=3.086e+02, percent-clipped=1.0 +2024-08-03 06:35:09,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=58050.666666666664, ans=0.0 +2024-08-03 06:35:15,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=58087.333333333336, ans=0.0 +2024-08-03 06:35:24,610 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.39 vs. limit=22.5 +2024-08-03 06:35:32,169 INFO [train.py:1114] (3/4) Epoch 5, batch 1300, loss[loss=0.2867, simple_loss=0.353, pruned_loss=0.1102, over 12821.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3338, pruned_loss=0.09773, over 2631800.55 frames. ], batch size: 52, lr: 2.39e-02, grad_scale: 16.0 +2024-08-03 06:35:35,416 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.91 vs. limit=22.5 +2024-08-03 06:35:36,181 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.13 vs. limit=12.0 +2024-08-03 06:35:41,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=58197.333333333336, ans=0.0 +2024-08-03 06:36:14,818 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.05 vs. limit=15.0 +2024-08-03 06:36:17,086 INFO [train.py:1114] (3/4) Epoch 5, batch 1350, loss[loss=0.2789, simple_loss=0.3438, pruned_loss=0.107, over 13547.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.3331, pruned_loss=0.09734, over 2638812.62 frames. ], batch size: 37, lr: 2.38e-02, grad_scale: 16.0 +2024-08-03 06:36:27,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=58380.666666666664, ans=10.0 +2024-08-03 06:36:39,396 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.960e+01 1.325e+02 1.559e+02 1.988e+02 3.487e+02, threshold=3.118e+02, percent-clipped=2.0 +2024-08-03 06:36:57,303 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.79 vs. limit=15.0 +2024-08-03 06:36:59,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=58454.0, ans=0.0 +2024-08-03 06:37:12,201 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.90 vs. limit=10.0 +2024-08-03 06:37:12,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=58490.666666666664, ans=0.125 +2024-08-03 06:37:15,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=58490.666666666664, ans=0.025 +2024-08-03 06:37:17,002 INFO [train.py:1114] (3/4) Epoch 5, batch 1400, loss[loss=0.2372, simple_loss=0.303, pruned_loss=0.08571, over 13255.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3327, pruned_loss=0.09695, over 2642745.50 frames. ], batch size: 31, lr: 2.38e-02, grad_scale: 16.0 +2024-08-03 06:37:32,268 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.08 vs. limit=22.5 +2024-08-03 06:37:43,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=58600.666666666664, ans=0.0 +2024-08-03 06:37:58,372 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.09 vs. limit=15.0 +2024-08-03 06:38:04,834 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.97 vs. limit=15.0 +2024-08-03 06:38:07,901 INFO [train.py:1114] (3/4) Epoch 5, batch 1450, loss[loss=0.2548, simple_loss=0.3231, pruned_loss=0.09323, over 13415.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3333, pruned_loss=0.0972, over 2642329.12 frames. ], batch size: 43, lr: 2.38e-02, grad_scale: 16.0 +2024-08-03 06:38:23,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=58747.333333333336, ans=0.0 +2024-08-03 06:38:30,503 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.070e+02 1.374e+02 1.719e+02 2.363e+02 5.392e+02, threshold=3.437e+02, percent-clipped=8.0 +2024-08-03 06:38:41,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=58784.0, ans=0.0 +2024-08-03 06:38:43,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=58784.0, ans=0.95 +2024-08-03 06:38:43,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=58784.0, ans=0.125 +2024-08-03 06:38:45,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=58820.666666666664, ans=0.125 +2024-08-03 06:38:46,840 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:38:55,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=58857.333333333336, ans=0.2 +2024-08-03 06:39:02,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=58857.333333333336, ans=0.025 +2024-08-03 06:39:06,273 INFO [train.py:1114] (3/4) Epoch 5, batch 1500, loss[loss=0.2595, simple_loss=0.3411, pruned_loss=0.08896, over 13398.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3337, pruned_loss=0.09782, over 2641720.78 frames. ], batch size: 39, lr: 2.38e-02, grad_scale: 16.0 +2024-08-03 06:40:05,458 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.52 vs. limit=6.0 +2024-08-03 06:40:13,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59040.666666666664, ans=0.1 +2024-08-03 06:40:15,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59077.333333333336, ans=0.1 +2024-08-03 06:40:16,381 INFO [train.py:1114] (3/4) Epoch 5, batch 1550, loss[loss=0.2414, simple_loss=0.3238, pruned_loss=0.07949, over 13405.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.3341, pruned_loss=0.0982, over 2631455.41 frames. ], batch size: 41, lr: 2.37e-02, grad_scale: 16.0 +2024-08-03 06:40:27,515 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.07 vs. limit=15.0 +2024-08-03 06:40:30,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59114.0, ans=0.1 +2024-08-03 06:40:39,047 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.330e+02 1.569e+02 1.992e+02 3.164e+02, threshold=3.138e+02, percent-clipped=1.0 +2024-08-03 06:40:43,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=59187.333333333336, ans=0.125 +2024-08-03 06:40:48,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=59187.333333333336, ans=0.2 +2024-08-03 06:41:36,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=59224.0, ans=0.5 +2024-08-03 06:41:38,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=59224.0, ans=0.125 +2024-08-03 06:41:45,101 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.16 vs. limit=8.0 +2024-08-03 06:41:47,101 INFO [train.py:1114] (3/4) Epoch 5, batch 1600, loss[loss=0.2703, simple_loss=0.3385, pruned_loss=0.1011, over 13568.00 frames. ], tot_loss[loss=0.2654, simple_loss=0.334, pruned_loss=0.09839, over 2624894.27 frames. ], batch size: 39, lr: 2.37e-02, grad_scale: 32.0 +2024-08-03 06:41:57,211 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.81 vs. limit=15.0 +2024-08-03 06:42:02,241 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:42:07,160 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=19.04 vs. limit=22.5 +2024-08-03 06:42:08,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=59334.0, ans=0.0 +2024-08-03 06:42:12,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=59334.0, ans=0.0 +2024-08-03 06:42:21,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=59370.666666666664, ans=0.2 +2024-08-03 06:42:38,327 INFO [train.py:1114] (3/4) Epoch 5, batch 1650, loss[loss=0.2589, simple_loss=0.3426, pruned_loss=0.08766, over 13332.00 frames. ], tot_loss[loss=0.265, simple_loss=0.3334, pruned_loss=0.09826, over 2620098.06 frames. ], batch size: 40, lr: 2.37e-02, grad_scale: 32.0 +2024-08-03 06:42:50,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59480.666666666664, ans=0.1 +2024-08-03 06:42:56,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=59480.666666666664, ans=0.0 +2024-08-03 06:43:04,540 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.342e+02 1.500e+02 2.074e+02 4.077e+02, threshold=2.999e+02, percent-clipped=4.0 +2024-08-03 06:43:22,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=59590.666666666664, ans=0.125 +2024-08-03 06:43:27,952 INFO [train.py:1114] (3/4) Epoch 5, batch 1700, loss[loss=0.2395, simple_loss=0.3066, pruned_loss=0.08624, over 13274.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.3329, pruned_loss=0.0978, over 2629255.82 frames. ], batch size: 31, lr: 2.36e-02, grad_scale: 32.0 +2024-08-03 06:43:28,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=59627.333333333336, ans=0.125 +2024-08-03 06:43:33,996 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.49 vs. limit=6.0 +2024-08-03 06:43:40,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=59664.0, ans=0.125 +2024-08-03 06:44:18,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=59774.0, ans=0.0 +2024-08-03 06:44:20,423 INFO [train.py:1114] (3/4) Epoch 5, batch 1750, loss[loss=0.1992, simple_loss=0.2735, pruned_loss=0.0624, over 13544.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3321, pruned_loss=0.09742, over 2633355.20 frames. ], batch size: 31, lr: 2.36e-02, grad_scale: 32.0 +2024-08-03 06:44:29,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=59810.666666666664, ans=0.0 +2024-08-03 06:44:30,062 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.60 vs. limit=15.0 +2024-08-03 06:44:36,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=59847.333333333336, ans=0.07 +2024-08-03 06:44:45,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=59884.0, ans=0.125 +2024-08-03 06:44:47,657 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:44:50,132 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.019e+02 1.258e+02 1.421e+02 1.677e+02 2.914e+02, threshold=2.843e+02, percent-clipped=0.0 +2024-08-03 06:45:21,426 INFO [train.py:1114] (3/4) Epoch 5, batch 1800, loss[loss=0.2672, simple_loss=0.3458, pruned_loss=0.09425, over 13557.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3318, pruned_loss=0.09709, over 2634689.04 frames. ], batch size: 38, lr: 2.36e-02, grad_scale: 32.0 +2024-08-03 06:45:24,053 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.40 vs. limit=15.0 +2024-08-03 06:45:24,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.93 vs. limit=10.0 +2024-08-03 06:45:26,591 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.83 vs. limit=22.5 +2024-08-03 06:45:37,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60030.666666666664, ans=0.1 +2024-08-03 06:45:41,178 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.65 vs. limit=15.0 +2024-08-03 06:45:41,662 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 06:49:33,870 INFO [train.py:1114] (3/4) Epoch 5, batch 1850, loss[loss=0.2964, simple_loss=0.3633, pruned_loss=0.1148, over 13407.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3313, pruned_loss=0.09675, over 2637414.00 frames. ], batch size: 39, lr: 2.35e-02, grad_scale: 32.0 +2024-08-03 06:49:34,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=60177.333333333336, ans=0.2 +2024-08-03 06:50:10,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=60177.333333333336, ans=0.025 +2024-08-03 06:50:41,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=60214.0, ans=0.125 +2024-08-03 06:50:46,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=60214.0, ans=0.2 +2024-08-03 06:50:58,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=60214.0, ans=0.2 +2024-08-03 06:51:23,625 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.104e+02 1.315e+02 1.584e+02 1.966e+02 3.228e+02, threshold=3.167e+02, percent-clipped=4.0 +2024-08-03 06:52:19,934 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.13 vs. limit=15.0 +2024-08-03 06:52:37,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60324.0, ans=0.1 +2024-08-03 06:52:40,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=60324.0, ans=0.0 +2024-08-03 06:53:02,071 INFO [train.py:1114] (3/4) Epoch 5, batch 1900, loss[loss=0.281, simple_loss=0.3462, pruned_loss=0.1079, over 13319.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3318, pruned_loss=0.09688, over 2640088.73 frames. ], batch size: 40, lr: 2.35e-02, grad_scale: 32.0 +2024-08-03 06:57:48,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60434.0, ans=0.1 +2024-08-03 06:58:28,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60470.666666666664, ans=0.1 +2024-08-03 06:58:59,406 INFO [train.py:1114] (3/4) Epoch 5, batch 1950, loss[loss=0.2278, simple_loss=0.304, pruned_loss=0.07578, over 13558.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3327, pruned_loss=0.09715, over 2646891.33 frames. ], batch size: 36, lr: 2.35e-02, grad_scale: 32.0 +2024-08-03 06:59:12,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=60544.0, ans=0.04949747468305833 +2024-08-03 06:59:23,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60580.666666666664, ans=0.1 +2024-08-03 06:59:42,956 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.53 vs. limit=6.0 +2024-08-03 07:00:43,434 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.08 vs. limit=15.0 +2024-08-03 07:01:17,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=60617.333333333336, ans=0.0 +2024-08-03 07:01:28,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=60617.333333333336, ans=0.125 +2024-08-03 07:01:31,513 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.403e+02 1.665e+02 1.976e+02 3.868e+02, threshold=3.331e+02, percent-clipped=1.0 +2024-08-03 07:02:34,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=60690.666666666664, ans=0.125 +2024-08-03 07:03:18,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=60690.666666666664, ans=0.0 +2024-08-03 07:03:26,355 INFO [train.py:1114] (3/4) Epoch 5, batch 2000, loss[loss=0.237, simple_loss=0.3064, pruned_loss=0.08381, over 13539.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3334, pruned_loss=0.09772, over 2636966.24 frames. ], batch size: 31, lr: 2.35e-02, grad_scale: 32.0 +2024-08-03 07:03:28,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=60727.333333333336, ans=0.025 +2024-08-03 07:03:49,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=60800.666666666664, ans=0.025 +2024-08-03 07:03:49,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=60800.666666666664, ans=0.025 +2024-08-03 07:03:56,052 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.38 vs. limit=15.0 +2024-08-03 07:03:57,310 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:03:59,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=60837.333333333336, ans=0.2 +2024-08-03 07:04:26,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=60874.0, ans=0.125 +2024-08-03 07:04:31,376 INFO [train.py:1114] (3/4) Epoch 5, batch 2050, loss[loss=0.2459, simple_loss=0.3099, pruned_loss=0.09094, over 13433.00 frames. ], tot_loss[loss=0.2636, simple_loss=0.3322, pruned_loss=0.09752, over 2633130.01 frames. ], batch size: 32, lr: 2.34e-02, grad_scale: 32.0 +2024-08-03 07:04:41,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=60910.666666666664, ans=15.0 +2024-08-03 07:05:03,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=60984.0, ans=0.2 +2024-08-03 07:05:08,118 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.320e+02 1.526e+02 1.984e+02 3.306e+02, threshold=3.052e+02, percent-clipped=0.0 +2024-08-03 07:05:18,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=61020.666666666664, ans=0.125 +2024-08-03 07:05:20,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=61057.333333333336, ans=0.0 +2024-08-03 07:05:29,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=61094.0, ans=0.125 +2024-08-03 07:05:29,947 INFO [train.py:1114] (3/4) Epoch 5, batch 2100, loss[loss=0.2326, simple_loss=0.3035, pruned_loss=0.08084, over 13543.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3307, pruned_loss=0.09666, over 2639028.87 frames. ], batch size: 37, lr: 2.34e-02, grad_scale: 16.0 +2024-08-03 07:05:43,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=61130.666666666664, ans=0.2 +2024-08-03 07:05:43,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=61130.666666666664, ans=0.125 +2024-08-03 07:07:42,846 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:07:50,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=61240.666666666664, ans=0.125 +2024-08-03 07:07:51,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=61240.666666666664, ans=0.125 +2024-08-03 07:08:00,452 INFO [train.py:1114] (3/4) Epoch 5, batch 2150, loss[loss=0.2472, simple_loss=0.3185, pruned_loss=0.08793, over 13562.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3305, pruned_loss=0.09682, over 2647207.60 frames. ], batch size: 36, lr: 2.34e-02, grad_scale: 16.0 +2024-08-03 07:08:02,646 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.08 vs. limit=22.5 +2024-08-03 07:08:09,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=61314.0, ans=0.0 +2024-08-03 07:08:12,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=61314.0, ans=0.125 +2024-08-03 07:08:12,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=61314.0, ans=0.0 +2024-08-03 07:08:23,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=61350.666666666664, ans=0.0 +2024-08-03 07:08:25,515 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.060e+02 1.319e+02 1.581e+02 2.053e+02 4.024e+02, threshold=3.163e+02, percent-clipped=3.0 +2024-08-03 07:08:27,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61350.666666666664, ans=0.1 +2024-08-03 07:08:48,499 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.68 vs. limit=12.0 +2024-08-03 07:08:50,643 INFO [train.py:1114] (3/4) Epoch 5, batch 2200, loss[loss=0.2448, simple_loss=0.3238, pruned_loss=0.08288, over 13400.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3297, pruned_loss=0.09587, over 2644863.38 frames. ], batch size: 39, lr: 2.33e-02, grad_scale: 16.0 +2024-08-03 07:09:09,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.98 vs. limit=22.5 +2024-08-03 07:09:30,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.13 vs. limit=22.5 +2024-08-03 07:09:36,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=61607.333333333336, ans=0.0 +2024-08-03 07:09:40,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.40 vs. limit=22.5 +2024-08-03 07:09:41,549 INFO [train.py:1114] (3/4) Epoch 5, batch 2250, loss[loss=0.295, simple_loss=0.366, pruned_loss=0.112, over 13356.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.3302, pruned_loss=0.09602, over 2642311.71 frames. ], batch size: 37, lr: 2.33e-02, grad_scale: 16.0 +2024-08-03 07:09:58,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=61680.666666666664, ans=0.09899494936611666 +2024-08-03 07:10:05,791 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.390e+02 1.682e+02 2.115e+02 4.078e+02, threshold=3.364e+02, percent-clipped=8.0 +2024-08-03 07:12:06,486 INFO [train.py:1114] (3/4) Epoch 5, batch 2300, loss[loss=0.2239, simple_loss=0.2976, pruned_loss=0.07511, over 13588.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3297, pruned_loss=0.09641, over 2638704.59 frames. ], batch size: 33, lr: 2.33e-02, grad_scale: 16.0 +2024-08-03 07:14:29,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61864.0, ans=0.1 +2024-08-03 07:16:32,956 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.21 vs. limit=15.0 +2024-08-03 07:16:35,491 INFO [train.py:1114] (3/4) Epoch 5, batch 2350, loss[loss=0.2472, simple_loss=0.3238, pruned_loss=0.08536, over 13562.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3296, pruned_loss=0.09614, over 2641742.65 frames. ], batch size: 38, lr: 2.32e-02, grad_scale: 16.0 +2024-08-03 07:16:35,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62010.666666666664, ans=0.1 +2024-08-03 07:17:19,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=62084.0, ans=0.125 +2024-08-03 07:17:19,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=62084.0, ans=0.0 +2024-08-03 07:17:20,768 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.323e+02 1.600e+02 2.155e+02 3.699e+02, threshold=3.200e+02, percent-clipped=2.0 +2024-08-03 07:18:26,542 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.45 vs. limit=15.0 +2024-08-03 07:18:36,409 INFO [train.py:1114] (3/4) Epoch 5, batch 2400, loss[loss=0.2355, simple_loss=0.308, pruned_loss=0.08152, over 13533.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3299, pruned_loss=0.09604, over 2643142.14 frames. ], batch size: 35, lr: 2.32e-02, grad_scale: 32.0 +2024-08-03 07:18:55,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=62194.0, ans=0.0 +2024-08-03 07:18:59,219 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.06 vs. limit=10.0 +2024-08-03 07:19:02,221 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.63 vs. limit=10.0 +2024-08-03 07:19:13,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.40 vs. limit=12.0 +2024-08-03 07:19:39,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=62340.666666666664, ans=0.2 +2024-08-03 07:19:45,670 INFO [train.py:1114] (3/4) Epoch 5, batch 2450, loss[loss=0.2555, simple_loss=0.3305, pruned_loss=0.09022, over 13355.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3309, pruned_loss=0.09651, over 2633523.75 frames. ], batch size: 37, lr: 2.32e-02, grad_scale: 32.0 +2024-08-03 07:20:08,871 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.340e+02 1.643e+02 2.212e+02 4.155e+02, threshold=3.287e+02, percent-clipped=6.0 +2024-08-03 07:20:35,558 INFO [train.py:1114] (3/4) Epoch 5, batch 2500, loss[loss=0.3238, simple_loss=0.3751, pruned_loss=0.1363, over 13408.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3307, pruned_loss=0.09617, over 2637871.69 frames. ], batch size: 39, lr: 2.32e-02, grad_scale: 32.0 +2024-08-03 07:20:40,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=62560.666666666664, ans=0.1 +2024-08-03 07:20:44,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=62597.333333333336, ans=0.125 +2024-08-03 07:21:11,364 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.17 vs. limit=15.0 +2024-08-03 07:21:20,478 INFO [train.py:1114] (3/4) Epoch 5, batch 2550, loss[loss=0.2271, simple_loss=0.2944, pruned_loss=0.07993, over 13567.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3306, pruned_loss=0.09612, over 2639196.05 frames. ], batch size: 31, lr: 2.31e-02, grad_scale: 32.0 +2024-08-03 07:21:20,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=62744.0, ans=0.125 +2024-08-03 07:21:26,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62744.0, ans=0.1 +2024-08-03 07:21:30,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=62780.666666666664, ans=0.125 +2024-08-03 07:21:46,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=62817.333333333336, ans=0.125 +2024-08-03 07:21:49,260 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.074e+02 1.290e+02 1.480e+02 1.885e+02 4.380e+02, threshold=2.959e+02, percent-clipped=2.0 +2024-08-03 07:21:56,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=62854.0, ans=10.0 +2024-08-03 07:21:57,501 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.12 vs. limit=15.0 +2024-08-03 07:22:00,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=62890.666666666664, ans=0.0 +2024-08-03 07:22:01,626 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.37 vs. limit=15.0 +2024-08-03 07:22:09,866 INFO [train.py:1114] (3/4) Epoch 5, batch 2600, loss[loss=0.2643, simple_loss=0.3255, pruned_loss=0.1015, over 13561.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.331, pruned_loss=0.09616, over 2639038.62 frames. ], batch size: 36, lr: 2.31e-02, grad_scale: 32.0 +2024-08-03 07:22:11,128 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.68 vs. limit=15.0 +2024-08-03 07:22:21,029 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.61 vs. limit=10.0 +2024-08-03 07:22:28,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=63000.666666666664, ans=0.1 +2024-08-03 07:23:59,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=63037.333333333336, ans=0.125 +2024-08-03 07:24:02,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=63037.333333333336, ans=0.125 +2024-08-03 07:24:08,418 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=12.42 vs. limit=15.0 +2024-08-03 07:24:17,396 INFO [train.py:1114] (3/4) Epoch 5, batch 2650, loss[loss=0.3115, simple_loss=0.3749, pruned_loss=0.124, over 13313.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3312, pruned_loss=0.09605, over 2641894.58 frames. ], batch size: 46, lr: 2.31e-02, grad_scale: 32.0 +2024-08-03 07:24:18,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=63110.666666666664, ans=0.0 +2024-08-03 07:24:43,701 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.082e+02 1.380e+02 1.559e+02 1.928e+02 2.967e+02, threshold=3.118e+02, percent-clipped=1.0 +2024-08-03 07:24:54,107 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.10 vs. limit=22.5 +2024-08-03 07:25:01,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=63257.333333333336, ans=0.0 +2024-08-03 07:25:05,923 INFO [train.py:1114] (3/4) Epoch 5, batch 2700, loss[loss=0.2797, simple_loss=0.35, pruned_loss=0.1047, over 13548.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3306, pruned_loss=0.09559, over 2638386.84 frames. ], batch size: 40, lr: 2.31e-02, grad_scale: 32.0 +2024-08-03 07:25:06,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=63294.0, ans=0.0 +2024-08-03 07:25:16,092 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.927e-02 +2024-08-03 07:25:19,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=63330.666666666664, ans=0.0 +2024-08-03 07:25:21,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=63330.666666666664, ans=0.125 +2024-08-03 07:25:37,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=63404.0, ans=0.125 +2024-08-03 07:25:47,652 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.72 vs. limit=22.5 +2024-08-03 07:25:49,694 INFO [train.py:1114] (3/4) Epoch 5, batch 2750, loss[loss=0.2524, simple_loss=0.3146, pruned_loss=0.09515, over 13334.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3293, pruned_loss=0.09537, over 2635282.12 frames. ], batch size: 34, lr: 2.30e-02, grad_scale: 32.0 +2024-08-03 07:26:35,263 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.092e+02 1.483e+02 1.693e+02 2.147e+02 4.016e+02, threshold=3.386e+02, percent-clipped=6.0 +2024-08-03 07:26:38,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=63587.333333333336, ans=0.125 +2024-08-03 07:26:42,595 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.01 vs. limit=12.0 +2024-08-03 07:26:52,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_na.min_abs, batch_count=63624.0, ans=0.02 +2024-08-03 07:26:56,075 INFO [train.py:1114] (3/4) Epoch 5, batch 2800, loss[loss=0.3753, simple_loss=0.4022, pruned_loss=0.1742, over 9113.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.3299, pruned_loss=0.09585, over 2626090.41 frames. ], batch size: 97, lr: 2.30e-02, grad_scale: 32.0 +2024-08-03 07:26:57,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=63660.666666666664, ans=0.125 +2024-08-03 07:27:15,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=63734.0, ans=0.0 +2024-08-03 07:27:40,308 INFO [train.py:1114] (3/4) Epoch 5, batch 2850, loss[loss=0.2367, simple_loss=0.3204, pruned_loss=0.07646, over 13376.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3298, pruned_loss=0.09578, over 2620173.60 frames. ], batch size: 35, lr: 2.30e-02, grad_scale: 32.0 +2024-08-03 07:27:45,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=63844.0, ans=0.2 +2024-08-03 07:27:51,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=63844.0, ans=0.0 +2024-08-03 07:27:58,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=63880.666666666664, ans=0.1 +2024-08-03 07:28:00,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=63880.666666666664, ans=0.0 +2024-08-03 07:28:00,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=63880.666666666664, ans=15.0 +2024-08-03 07:28:36,206 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.10 vs. limit=8.0 +2024-08-03 07:28:37,168 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.375e+02 1.611e+02 2.020e+02 3.770e+02, threshold=3.222e+02, percent-clipped=1.0 +2024-08-03 07:28:39,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=63917.333333333336, ans=0.125 +2024-08-03 07:28:54,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=63990.666666666664, ans=0.04949747468305833 +2024-08-03 07:28:58,764 INFO [train.py:1114] (3/4) Epoch 5, batch 2900, loss[loss=0.2722, simple_loss=0.3356, pruned_loss=0.1045, over 13372.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.3307, pruned_loss=0.09549, over 2631512.38 frames. ], batch size: 36, lr: 2.29e-02, grad_scale: 32.0 +2024-08-03 07:29:02,658 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.59 vs. limit=15.0 +2024-08-03 07:29:12,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=64064.0, ans=0.125 +2024-08-03 07:29:33,429 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:29:34,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=64137.333333333336, ans=0.025 +2024-08-03 07:29:40,350 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.47 vs. limit=12.0 +2024-08-03 07:29:43,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=64174.0, ans=0.1 +2024-08-03 07:29:46,625 INFO [train.py:1114] (3/4) Epoch 5, batch 2950, loss[loss=0.2318, simple_loss=0.3018, pruned_loss=0.08087, over 13345.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3293, pruned_loss=0.09507, over 2630611.88 frames. ], batch size: 34, lr: 2.29e-02, grad_scale: 32.0 +2024-08-03 07:29:53,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64210.666666666664, ans=0.1 +2024-08-03 07:30:01,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=64247.333333333336, ans=0.0 +2024-08-03 07:30:10,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=64284.0, ans=0.0 +2024-08-03 07:30:13,352 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.269e+02 1.515e+02 1.903e+02 4.002e+02, threshold=3.030e+02, percent-clipped=2.0 +2024-08-03 07:30:38,302 INFO [train.py:1114] (3/4) Epoch 5, batch 3000, loss[loss=0.2624, simple_loss=0.3426, pruned_loss=0.09107, over 13534.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3297, pruned_loss=0.09523, over 2630504.56 frames. ], batch size: 37, lr: 2.29e-02, grad_scale: 16.0 +2024-08-03 07:30:38,303 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 07:31:39,949 INFO [train.py:1146] (3/4) Epoch 5, validation: loss=0.2105, simple_loss=0.3083, pruned_loss=0.0563, over 944034.00 frames. +2024-08-03 07:31:39,949 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 07:31:54,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=64430.666666666664, ans=0.125 +2024-08-03 07:32:16,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=64540.666666666664, ans=0.125 +2024-08-03 07:32:24,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=64540.666666666664, ans=0.125 +2024-08-03 07:32:27,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=64540.666666666664, ans=0.0 +2024-08-03 07:32:30,611 INFO [train.py:1114] (3/4) Epoch 5, batch 3050, loss[loss=0.2338, simple_loss=0.3049, pruned_loss=0.08132, over 13535.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3303, pruned_loss=0.09558, over 2627419.17 frames. ], batch size: 35, lr: 2.29e-02, grad_scale: 16.0 +2024-08-03 07:32:45,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=64614.0, ans=0.0 +2024-08-03 07:32:51,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=64614.0, ans=0.125 +2024-08-03 07:32:59,073 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.073e+02 1.354e+02 1.544e+02 1.924e+02 3.300e+02, threshold=3.088e+02, percent-clipped=4.0 +2024-08-03 07:33:02,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=64687.333333333336, ans=0.2 +2024-08-03 07:33:03,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=64687.333333333336, ans=0.0 +2024-08-03 07:33:07,479 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.94 vs. limit=10.0 +2024-08-03 07:33:14,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=64724.0, ans=0.125 +2024-08-03 07:33:16,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=64724.0, ans=0.0 +2024-08-03 07:33:18,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=64760.666666666664, ans=0.125 +2024-08-03 07:33:18,950 INFO [train.py:1114] (3/4) Epoch 5, batch 3100, loss[loss=0.2654, simple_loss=0.331, pruned_loss=0.09991, over 13276.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3295, pruned_loss=0.09508, over 2627928.20 frames. ], batch size: 46, lr: 2.28e-02, grad_scale: 16.0 +2024-08-03 07:33:23,658 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.32 vs. limit=15.0 +2024-08-03 07:33:29,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=64797.333333333336, ans=0.125 +2024-08-03 07:33:43,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64834.0, ans=0.1 +2024-08-03 07:33:44,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=64870.666666666664, ans=0.125 +2024-08-03 07:33:47,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64870.666666666664, ans=0.1 +2024-08-03 07:33:48,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=64870.666666666664, ans=0.0 +2024-08-03 07:34:01,585 INFO [train.py:1114] (3/4) Epoch 5, batch 3150, loss[loss=0.2554, simple_loss=0.3319, pruned_loss=0.08942, over 13103.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3295, pruned_loss=0.09496, over 2629482.94 frames. ], batch size: 48, lr: 2.28e-02, grad_scale: 16.0 +2024-08-03 07:34:12,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=64980.666666666664, ans=0.0 +2024-08-03 07:34:24,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=65017.333333333336, ans=0.125 +2024-08-03 07:34:25,117 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.020e+02 1.369e+02 1.582e+02 1.897e+02 3.787e+02, threshold=3.164e+02, percent-clipped=5.0 +2024-08-03 07:34:25,677 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.10 vs. limit=15.0 +2024-08-03 07:34:37,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=65054.0, ans=0.125 +2024-08-03 07:34:41,136 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.11 vs. limit=6.0 +2024-08-03 07:34:43,998 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.18 vs. limit=15.0 +2024-08-03 07:34:47,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=65090.666666666664, ans=0.125 +2024-08-03 07:34:51,989 INFO [train.py:1114] (3/4) Epoch 5, batch 3200, loss[loss=0.2317, simple_loss=0.3122, pruned_loss=0.07563, over 13550.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3289, pruned_loss=0.09471, over 2635434.80 frames. ], batch size: 37, lr: 2.28e-02, grad_scale: 32.0 +2024-08-03 07:34:52,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=65127.333333333336, ans=0.125 +2024-08-03 07:35:04,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=65164.0, ans=0.125 +2024-08-03 07:35:05,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=65164.0, ans=0.0 +2024-08-03 07:35:28,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=65274.0, ans=0.125 +2024-08-03 07:35:32,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=65274.0, ans=0.125 +2024-08-03 07:35:36,015 INFO [train.py:1114] (3/4) Epoch 5, batch 3250, loss[loss=0.267, simple_loss=0.3385, pruned_loss=0.09777, over 13388.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3298, pruned_loss=0.09513, over 2639518.22 frames. ], batch size: 38, lr: 2.27e-02, grad_scale: 32.0 +2024-08-03 07:35:42,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=65310.666666666664, ans=0.125 +2024-08-03 07:35:45,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=65347.333333333336, ans=0.125 +2024-08-03 07:36:00,546 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.084e+02 1.291e+02 1.478e+02 1.851e+02 2.616e+02, threshold=2.956e+02, percent-clipped=0.0 +2024-08-03 07:36:03,817 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.51 vs. limit=12.0 +2024-08-03 07:36:06,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=65420.666666666664, ans=0.125 +2024-08-03 07:36:20,443 INFO [train.py:1114] (3/4) Epoch 5, batch 3300, loss[loss=0.2921, simple_loss=0.3523, pruned_loss=0.116, over 12880.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3284, pruned_loss=0.09459, over 2640317.05 frames. ], batch size: 52, lr: 2.27e-02, grad_scale: 32.0 +2024-08-03 07:36:24,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=65494.0, ans=0.0 +2024-08-03 07:36:25,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=65494.0, ans=0.0 +2024-08-03 07:36:27,209 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:36:30,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=65530.666666666664, ans=0.125 +2024-08-03 07:36:37,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=65567.33333333333, ans=0.125 +2024-08-03 07:36:47,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=65604.0, ans=0.125 +2024-08-03 07:37:00,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=65640.66666666667, ans=0.0 +2024-08-03 07:37:03,882 INFO [train.py:1114] (3/4) Epoch 5, batch 3350, loss[loss=0.2674, simple_loss=0.3379, pruned_loss=0.09849, over 13046.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3294, pruned_loss=0.09541, over 2628441.76 frames. ], batch size: 48, lr: 2.27e-02, grad_scale: 32.0 +2024-08-03 07:37:12,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=65714.0, ans=0.125 +2024-08-03 07:37:14,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=65714.0, ans=0.1 +2024-08-03 07:37:19,656 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.90 vs. limit=10.0 +2024-08-03 07:37:20,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=65750.66666666667, ans=0.0 +2024-08-03 07:37:26,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=65750.66666666667, ans=0.0 +2024-08-03 07:37:26,823 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.375e+02 1.557e+02 1.947e+02 3.831e+02, threshold=3.114e+02, percent-clipped=2.0 +2024-08-03 07:37:33,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=65787.33333333333, ans=0.125 +2024-08-03 07:37:38,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=65824.0, ans=10.0 +2024-08-03 07:37:46,246 INFO [train.py:1114] (3/4) Epoch 5, batch 3400, loss[loss=0.2381, simple_loss=0.3046, pruned_loss=0.08576, over 13564.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3283, pruned_loss=0.09466, over 2624480.72 frames. ], batch size: 31, lr: 2.27e-02, grad_scale: 32.0 +2024-08-03 07:37:51,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=65860.66666666667, ans=0.0 +2024-08-03 07:37:51,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=65860.66666666667, ans=0.125 +2024-08-03 07:37:52,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=65860.66666666667, ans=0.2 +2024-08-03 07:37:53,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=65860.66666666667, ans=0.125 +2024-08-03 07:37:53,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=65897.33333333333, ans=0.2 +2024-08-03 07:37:55,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=65897.33333333333, ans=0.1 +2024-08-03 07:38:05,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=65934.0, ans=0.0 +2024-08-03 07:38:13,611 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.84 vs. limit=10.0 +2024-08-03 07:38:19,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=66007.33333333333, ans=0.125 +2024-08-03 07:38:20,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=66007.33333333333, ans=0.0 +2024-08-03 07:38:28,628 INFO [train.py:1114] (3/4) Epoch 5, batch 3450, loss[loss=0.2584, simple_loss=0.3347, pruned_loss=0.09104, over 12902.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3286, pruned_loss=0.09474, over 2628528.89 frames. ], batch size: 52, lr: 2.26e-02, grad_scale: 32.0 +2024-08-03 07:38:40,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=66080.66666666667, ans=0.125 +2024-08-03 07:38:40,854 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.99 vs. limit=15.0 +2024-08-03 07:38:42,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=66080.66666666667, ans=0.04949747468305833 +2024-08-03 07:38:51,386 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.130e+02 1.367e+02 1.647e+02 2.162e+02 3.510e+02, threshold=3.294e+02, percent-clipped=1.0 +2024-08-03 07:39:03,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=66190.66666666667, ans=0.125 +2024-08-03 07:39:10,863 INFO [train.py:1114] (3/4) Epoch 5, batch 3500, loss[loss=0.2354, simple_loss=0.314, pruned_loss=0.07844, over 13539.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3279, pruned_loss=0.09475, over 2629966.82 frames. ], batch size: 34, lr: 2.26e-02, grad_scale: 32.0 +2024-08-03 07:39:11,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=66227.33333333333, ans=0.125 +2024-08-03 07:39:36,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=66300.66666666667, ans=0.125 +2024-08-03 07:39:55,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=66337.33333333333, ans=0.025 +2024-08-03 07:40:05,799 INFO [train.py:1114] (3/4) Epoch 5, batch 3550, loss[loss=0.2968, simple_loss=0.3582, pruned_loss=0.1177, over 12606.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3314, pruned_loss=0.09667, over 2628386.46 frames. ], batch size: 58, lr: 2.26e-02, grad_scale: 16.0 +2024-08-03 07:40:05,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=66410.66666666667, ans=0.125 +2024-08-03 07:40:32,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=66447.33333333333, ans=0.2 +2024-08-03 07:40:41,450 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.465e+02 1.682e+02 2.236e+02 4.572e+02, threshold=3.363e+02, percent-clipped=5.0 +2024-08-03 07:40:46,480 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=12.26 vs. limit=12.0 +2024-08-03 07:40:57,052 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.58 vs. limit=15.0 +2024-08-03 07:40:57,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66557.33333333333, ans=0.1 +2024-08-03 07:45:32,996 INFO [train.py:1114] (3/4) Epoch 5, batch 3600, loss[loss=0.3172, simple_loss=0.3619, pruned_loss=0.1362, over 9275.00 frames. ], tot_loss[loss=0.2719, simple_loss=0.3377, pruned_loss=0.1031, over 2486129.58 frames. ], batch size: 97, lr: 2.26e-02, grad_scale: 32.0 +2024-08-03 07:45:37,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=66594.0, ans=0.125 +2024-08-03 07:45:39,333 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.71 vs. limit=15.0 +2024-08-03 07:45:44,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=66630.66666666667, ans=0.05 +2024-08-03 07:46:02,325 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.62 vs. limit=22.5 +2024-08-03 07:46:02,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=66704.0, ans=0.0 +2024-08-03 07:46:04,416 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:47:41,878 INFO [train.py:1114] (3/4) Epoch 6, batch 0, loss[loss=0.2453, simple_loss=0.3142, pruned_loss=0.08825, over 13323.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3142, pruned_loss=0.08825, over 13323.00 frames. ], batch size: 33, lr: 2.10e-02, grad_scale: 32.0 +2024-08-03 07:47:41,878 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 07:47:51,612 INFO [train.py:1146] (3/4) Epoch 6, validation: loss=0.2159, simple_loss=0.3144, pruned_loss=0.05871, over 944034.00 frames. +2024-08-03 07:47:51,613 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 07:47:54,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=66744.33333333333, ans=0.2 +2024-08-03 07:48:00,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=66781.0, ans=0.025 +2024-08-03 07:48:03,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=66781.0, ans=0.0 +2024-08-03 07:48:21,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=66854.33333333333, ans=0.0 +2024-08-03 07:48:28,347 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.298e+02 1.438e+02 1.681e+02 2.917e+02, threshold=2.876e+02, percent-clipped=0.0 +2024-08-03 07:48:29,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=66891.0, ans=0.025 +2024-08-03 07:48:33,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66891.0, ans=0.1 +2024-08-03 07:48:39,356 INFO [train.py:1114] (3/4) Epoch 6, batch 50, loss[loss=0.2021, simple_loss=0.2762, pruned_loss=0.064, over 13438.00 frames. ], tot_loss[loss=0.265, simple_loss=0.3349, pruned_loss=0.09758, over 577715.50 frames. ], batch size: 32, lr: 2.10e-02, grad_scale: 32.0 +2024-08-03 07:48:39,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=66927.66666666667, ans=0.125 +2024-08-03 07:48:59,382 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.07 vs. limit=15.0 +2024-08-03 07:49:13,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=67001.0, ans=0.015 +2024-08-03 07:49:28,454 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.35 vs. limit=15.0 +2024-08-03 07:49:30,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=67074.33333333333, ans=0.125 +2024-08-03 07:49:42,389 INFO [train.py:1114] (3/4) Epoch 6, batch 100, loss[loss=0.2402, simple_loss=0.307, pruned_loss=0.08673, over 13519.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.334, pruned_loss=0.09625, over 1025410.73 frames. ], batch size: 35, lr: 2.10e-02, grad_scale: 32.0 +2024-08-03 07:50:04,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67147.66666666667, ans=0.1 +2024-08-03 07:50:11,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=67184.33333333333, ans=0.125 +2024-08-03 07:50:22,471 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.327e+02 1.627e+02 2.047e+02 3.063e+02, threshold=3.255e+02, percent-clipped=2.0 +2024-08-03 07:50:38,358 INFO [train.py:1114] (3/4) Epoch 6, batch 150, loss[loss=0.2643, simple_loss=0.3329, pruned_loss=0.09785, over 13399.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.3302, pruned_loss=0.09353, over 1386766.40 frames. ], batch size: 32, lr: 2.10e-02, grad_scale: 16.0 +2024-08-03 07:50:56,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=67294.33333333333, ans=0.125 +2024-08-03 07:50:56,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=67294.33333333333, ans=0.125 +2024-08-03 07:51:23,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=67367.66666666667, ans=0.125 +2024-08-03 07:51:39,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=67441.0, ans=0.0 +2024-08-03 07:51:43,469 INFO [train.py:1114] (3/4) Epoch 6, batch 200, loss[loss=0.2644, simple_loss=0.3442, pruned_loss=0.09226, over 12445.00 frames. ], tot_loss[loss=0.2559, simple_loss=0.3273, pruned_loss=0.09227, over 1665718.06 frames. ], batch size: 58, lr: 2.09e-02, grad_scale: 16.0 +2024-08-03 07:52:16,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=67587.66666666667, ans=0.125 +2024-08-03 07:52:22,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=67587.66666666667, ans=0.125 +2024-08-03 07:52:22,927 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.261e+02 1.408e+02 1.836e+02 2.572e+02, threshold=2.817e+02, percent-clipped=0.0 +2024-08-03 07:52:32,916 INFO [train.py:1114] (3/4) Epoch 6, batch 250, loss[loss=0.2676, simple_loss=0.3419, pruned_loss=0.09672, over 13316.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3264, pruned_loss=0.09115, over 1884956.23 frames. ], batch size: 46, lr: 2.09e-02, grad_scale: 16.0 +2024-08-03 07:52:36,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=67661.0, ans=0.125 +2024-08-03 07:52:39,950 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.96 vs. limit=12.0 +2024-08-03 07:53:07,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=67734.33333333333, ans=0.2 +2024-08-03 07:53:08,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=67734.33333333333, ans=10.0 +2024-08-03 07:53:19,448 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.42 vs. limit=8.0 +2024-08-03 07:53:27,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=67807.66666666667, ans=0.1 +2024-08-03 07:53:28,621 INFO [train.py:1114] (3/4) Epoch 6, batch 300, loss[loss=0.2699, simple_loss=0.3427, pruned_loss=0.09857, over 13448.00 frames. ], tot_loss[loss=0.2533, simple_loss=0.3251, pruned_loss=0.09073, over 2052514.45 frames. ], batch size: 42, lr: 2.09e-02, grad_scale: 16.0 +2024-08-03 07:53:49,145 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.58 vs. limit=12.0 +2024-08-03 07:54:03,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=67954.33333333333, ans=0.1 +2024-08-03 07:54:04,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67954.33333333333, ans=0.1 +2024-08-03 07:54:07,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=67954.33333333333, ans=0.125 +2024-08-03 07:54:08,722 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.896e+01 1.256e+02 1.436e+02 1.831e+02 3.083e+02, threshold=2.872e+02, percent-clipped=2.0 +2024-08-03 07:54:17,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=67991.0, ans=0.125 +2024-08-03 07:54:18,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=68027.66666666667, ans=0.2 +2024-08-03 07:54:19,055 INFO [train.py:1114] (3/4) Epoch 6, batch 350, loss[loss=0.2166, simple_loss=0.296, pruned_loss=0.0686, over 13583.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3255, pruned_loss=0.09116, over 2182915.24 frames. ], batch size: 33, lr: 2.09e-02, grad_scale: 16.0 +2024-08-03 07:54:29,654 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.97 vs. limit=6.0 +2024-08-03 07:54:31,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=68064.33333333333, ans=0.1 +2024-08-03 07:54:36,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.12 vs. limit=15.0 +2024-08-03 07:54:38,544 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=68101.0, ans=0.125 +2024-08-03 07:54:46,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68137.66666666667, ans=0.1 +2024-08-03 07:54:53,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=68137.66666666667, ans=0.125 +2024-08-03 07:54:58,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=68174.33333333333, ans=0.125 +2024-08-03 07:55:06,637 INFO [train.py:1114] (3/4) Epoch 6, batch 400, loss[loss=0.2369, simple_loss=0.3187, pruned_loss=0.07753, over 13374.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3256, pruned_loss=0.09111, over 2287007.55 frames. ], batch size: 37, lr: 2.08e-02, grad_scale: 32.0 +2024-08-03 07:55:12,893 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.79 vs. limit=15.0 +2024-08-03 07:55:15,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=68247.66666666667, ans=0.0 +2024-08-03 07:55:23,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=68247.66666666667, ans=0.125 +2024-08-03 07:55:25,291 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.22 vs. limit=15.0 +2024-08-03 07:55:42,227 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.052e+02 1.404e+02 1.670e+02 2.079e+02 3.576e+02, threshold=3.340e+02, percent-clipped=3.0 +2024-08-03 07:55:50,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=68357.66666666667, ans=0.125 +2024-08-03 07:55:51,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=68394.33333333333, ans=0.2 +2024-08-03 07:55:52,465 INFO [train.py:1114] (3/4) Epoch 6, batch 450, loss[loss=0.286, simple_loss=0.3595, pruned_loss=0.1062, over 13543.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3259, pruned_loss=0.09188, over 2361113.71 frames. ], batch size: 38, lr: 2.08e-02, grad_scale: 32.0 +2024-08-03 07:56:20,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=68467.66666666667, ans=0.0 +2024-08-03 07:56:36,057 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.95 vs. limit=22.5 +2024-08-03 07:56:38,570 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 07:56:41,074 INFO [train.py:1114] (3/4) Epoch 6, batch 500, loss[loss=0.2371, simple_loss=0.3234, pruned_loss=0.07542, over 13383.00 frames. ], tot_loss[loss=0.2531, simple_loss=0.3246, pruned_loss=0.09085, over 2426261.02 frames. ], batch size: 43, lr: 2.08e-02, grad_scale: 32.0 +2024-08-03 07:56:51,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=68614.33333333333, ans=0.125 +2024-08-03 07:57:10,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68687.66666666667, ans=0.1 +2024-08-03 07:57:10,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=68687.66666666667, ans=0.125 +2024-08-03 07:57:15,948 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.283e+02 1.463e+02 1.945e+02 3.864e+02, threshold=2.927e+02, percent-clipped=1.0 +2024-08-03 07:57:17,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=68724.33333333333, ans=0.125 +2024-08-03 07:57:19,196 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-08-03 07:57:24,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=68724.33333333333, ans=0.025 +2024-08-03 07:57:25,756 INFO [train.py:1114] (3/4) Epoch 6, batch 550, loss[loss=0.2647, simple_loss=0.3479, pruned_loss=0.09076, over 12938.00 frames. ], tot_loss[loss=0.2528, simple_loss=0.3246, pruned_loss=0.09052, over 2468789.24 frames. ], batch size: 48, lr: 2.08e-02, grad_scale: 32.0 +2024-08-03 07:57:52,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=68834.33333333333, ans=0.2 +2024-08-03 07:58:12,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=68907.66666666667, ans=0.2 +2024-08-03 07:58:14,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=68944.33333333333, ans=0.0 +2024-08-03 07:58:15,098 INFO [train.py:1114] (3/4) Epoch 6, batch 600, loss[loss=0.2776, simple_loss=0.3578, pruned_loss=0.09877, over 13316.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3251, pruned_loss=0.09086, over 2508937.64 frames. ], batch size: 46, lr: 2.07e-02, grad_scale: 32.0 +2024-08-03 07:58:27,226 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.77 vs. limit=6.0 +2024-08-03 07:58:32,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=68981.0, ans=0.125 +2024-08-03 07:58:38,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=69017.66666666667, ans=0.125 +2024-08-03 07:58:40,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=69017.66666666667, ans=0.125 +2024-08-03 07:58:46,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69054.33333333333, ans=0.1 +2024-08-03 07:58:51,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=69054.33333333333, ans=10.0 +2024-08-03 07:58:52,941 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.444e+02 1.751e+02 2.367e+02 5.361e+02, threshold=3.502e+02, percent-clipped=14.0 +2024-08-03 07:58:54,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=69091.0, ans=0.0 +2024-08-03 07:58:56,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=69091.0, ans=0.125 +2024-08-03 07:58:57,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=69091.0, ans=0.025 +2024-08-03 07:59:01,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=69091.0, ans=0.1 +2024-08-03 07:59:03,045 INFO [train.py:1114] (3/4) Epoch 6, batch 650, loss[loss=0.2739, simple_loss=0.3409, pruned_loss=0.1035, over 13539.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3239, pruned_loss=0.09004, over 2544196.65 frames. ], batch size: 37, lr: 2.07e-02, grad_scale: 32.0 +2024-08-03 07:59:20,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=69164.33333333333, ans=0.09899494936611666 +2024-08-03 07:59:54,234 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.18 vs. limit=22.5 +2024-08-03 07:59:54,468 INFO [train.py:1114] (3/4) Epoch 6, batch 700, loss[loss=0.2415, simple_loss=0.317, pruned_loss=0.083, over 13536.00 frames. ], tot_loss[loss=0.2515, simple_loss=0.3236, pruned_loss=0.08964, over 2566333.78 frames. ], batch size: 35, lr: 2.07e-02, grad_scale: 32.0 +2024-08-03 08:00:01,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=69311.0, ans=0.0 +2024-08-03 08:00:04,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=69347.66666666667, ans=0.125 +2024-08-03 08:00:06,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=69347.66666666667, ans=0.0 +2024-08-03 08:00:18,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=69384.33333333333, ans=0.125 +2024-08-03 08:00:20,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.90 vs. limit=15.0 +2024-08-03 08:00:30,544 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.041e+02 1.317e+02 1.524e+02 2.110e+02 4.129e+02, threshold=3.048e+02, percent-clipped=1.0 +2024-08-03 08:00:34,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=69457.66666666667, ans=0.025 +2024-08-03 08:00:39,420 INFO [train.py:1114] (3/4) Epoch 6, batch 750, loss[loss=0.2275, simple_loss=0.3119, pruned_loss=0.07154, over 13356.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3233, pruned_loss=0.08976, over 2583882.59 frames. ], batch size: 37, lr: 2.07e-02, grad_scale: 16.0 +2024-08-03 08:00:49,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=69531.0, ans=0.0 +2024-08-03 08:01:04,262 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.74 vs. limit=22.5 +2024-08-03 08:01:17,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=69641.0, ans=0.0 +2024-08-03 08:01:23,007 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.87 vs. limit=6.0 +2024-08-03 08:01:25,653 INFO [train.py:1114] (3/4) Epoch 6, batch 800, loss[loss=0.2266, simple_loss=0.2974, pruned_loss=0.07795, over 13334.00 frames. ], tot_loss[loss=0.2515, simple_loss=0.323, pruned_loss=0.09003, over 2598656.01 frames. ], batch size: 33, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:01:27,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=69677.66666666667, ans=0.2 +2024-08-03 08:01:32,686 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.61 vs. limit=15.0 +2024-08-03 08:01:35,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=69677.66666666667, ans=0.125 +2024-08-03 08:01:36,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=69714.33333333333, ans=0.2 +2024-08-03 08:02:04,559 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.016e+02 1.299e+02 1.541e+02 1.861e+02 5.767e+02, threshold=3.082e+02, percent-clipped=2.0 +2024-08-03 08:02:09,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=69824.33333333333, ans=0.0 +2024-08-03 08:02:14,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=69861.0, ans=0.2 +2024-08-03 08:02:15,431 INFO [train.py:1114] (3/4) Epoch 6, batch 850, loss[loss=0.2315, simple_loss=0.3126, pruned_loss=0.07518, over 13321.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.323, pruned_loss=0.09007, over 2610965.62 frames. ], batch size: 40, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:02:24,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=69897.66666666667, ans=0.125 +2024-08-03 08:02:39,191 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.67 vs. limit=10.0 +2024-08-03 08:02:43,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=69934.33333333333, ans=0.05 +2024-08-03 08:02:45,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=69971.0, ans=0.025 +2024-08-03 08:02:51,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=69971.0, ans=0.05 +2024-08-03 08:02:58,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=70007.66666666667, ans=0.07 +2024-08-03 08:02:59,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=70007.66666666667, ans=0.0 +2024-08-03 08:03:05,401 INFO [train.py:1114] (3/4) Epoch 6, batch 900, loss[loss=0.2324, simple_loss=0.2961, pruned_loss=0.08434, over 13343.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.323, pruned_loss=0.09012, over 2613747.75 frames. ], batch size: 33, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:03:13,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=70081.0, ans=0.125 +2024-08-03 08:03:14,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=70081.0, ans=0.125 +2024-08-03 08:03:15,795 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.75 vs. limit=6.0 +2024-08-03 08:03:28,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=70117.66666666667, ans=0.0 +2024-08-03 08:03:47,226 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.449e+02 1.731e+02 2.120e+02 4.168e+02, threshold=3.462e+02, percent-clipped=3.0 +2024-08-03 08:03:47,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=70191.0, ans=0.0 +2024-08-03 08:03:56,471 INFO [train.py:1114] (3/4) Epoch 6, batch 950, loss[loss=0.2402, simple_loss=0.3111, pruned_loss=0.08463, over 13549.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3224, pruned_loss=0.08949, over 2614228.88 frames. ], batch size: 34, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:04:02,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=70227.66666666667, ans=0.125 +2024-08-03 08:04:04,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=70264.33333333333, ans=0.0 +2024-08-03 08:04:05,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=70264.33333333333, ans=0.125 +2024-08-03 08:04:14,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=70301.0, ans=0.0 +2024-08-03 08:04:18,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=70301.0, ans=0.2 +2024-08-03 08:04:18,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=70301.0, ans=0.02 +2024-08-03 08:04:33,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=70374.33333333333, ans=0.125 +2024-08-03 08:04:36,541 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.87 vs. limit=15.0 +2024-08-03 08:04:43,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=70411.0, ans=0.125 +2024-08-03 08:04:43,846 INFO [train.py:1114] (3/4) Epoch 6, batch 1000, loss[loss=0.2234, simple_loss=0.2914, pruned_loss=0.07766, over 13374.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.3232, pruned_loss=0.09012, over 2612068.95 frames. ], batch size: 35, lr: 2.06e-02, grad_scale: 32.0 +2024-08-03 08:04:55,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=70447.66666666667, ans=0.125 +2024-08-03 08:05:09,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=70484.33333333333, ans=0.07 +2024-08-03 08:05:20,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=70521.0, ans=0.125 +2024-08-03 08:05:21,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=70521.0, ans=0.125 +2024-08-03 08:05:22,972 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.279e+01 1.312e+02 1.654e+02 1.929e+02 3.115e+02, threshold=3.308e+02, percent-clipped=0.0 +2024-08-03 08:05:34,389 INFO [train.py:1114] (3/4) Epoch 6, batch 1050, loss[loss=0.2691, simple_loss=0.3371, pruned_loss=0.1005, over 13581.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.322, pruned_loss=0.08951, over 2615792.83 frames. ], batch size: 39, lr: 2.05e-02, grad_scale: 32.0 +2024-08-03 08:05:35,004 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.51 vs. limit=15.0 +2024-08-03 08:05:51,988 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.11 vs. limit=15.0 +2024-08-03 08:05:54,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=70667.66666666667, ans=0.2 +2024-08-03 08:06:10,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=70704.33333333333, ans=0.125 +2024-08-03 08:06:21,130 INFO [train.py:1114] (3/4) Epoch 6, batch 1100, loss[loss=0.252, simple_loss=0.3267, pruned_loss=0.08861, over 13566.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3219, pruned_loss=0.08915, over 2620024.04 frames. ], batch size: 36, lr: 2.05e-02, grad_scale: 32.0 +2024-08-03 08:06:21,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=70777.66666666667, ans=0.125 +2024-08-03 08:06:27,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=70777.66666666667, ans=0.125 +2024-08-03 08:06:38,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=70851.0, ans=0.0 +2024-08-03 08:06:44,060 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.01 vs. limit=15.0 +2024-08-03 08:06:44,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=70851.0, ans=0.04949747468305833 +2024-08-03 08:06:57,401 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.320e+02 1.582e+02 2.063e+02 3.090e+02, threshold=3.163e+02, percent-clipped=0.0 +2024-08-03 08:07:08,222 INFO [train.py:1114] (3/4) Epoch 6, batch 1150, loss[loss=0.2269, simple_loss=0.3033, pruned_loss=0.07523, over 13557.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3215, pruned_loss=0.08904, over 2618826.10 frames. ], batch size: 36, lr: 2.05e-02, grad_scale: 32.0 +2024-08-03 08:07:17,742 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.10 vs. limit=15.0 +2024-08-03 08:07:37,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71071.0, ans=0.1 +2024-08-03 08:07:55,774 INFO [train.py:1114] (3/4) Epoch 6, batch 1200, loss[loss=0.2867, simple_loss=0.3488, pruned_loss=0.1123, over 13574.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3225, pruned_loss=0.08907, over 2615865.25 frames. ], batch size: 39, lr: 2.05e-02, grad_scale: 32.0 +2024-08-03 08:07:57,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=71144.33333333333, ans=0.0 +2024-08-03 08:07:59,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=71144.33333333333, ans=0.0 +2024-08-03 08:08:06,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=71181.0, ans=0.0 +2024-08-03 08:08:23,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=71254.33333333333, ans=15.0 +2024-08-03 08:08:31,461 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.251e+02 1.398e+02 1.677e+02 2.839e+02, threshold=2.796e+02, percent-clipped=0.0 +2024-08-03 08:08:38,473 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.84 vs. limit=15.0 +2024-08-03 08:08:40,491 INFO [train.py:1114] (3/4) Epoch 6, batch 1250, loss[loss=0.2993, simple_loss=0.36, pruned_loss=0.1193, over 13449.00 frames. ], tot_loss[loss=0.2512, simple_loss=0.3234, pruned_loss=0.0895, over 2627641.91 frames. ], batch size: 42, lr: 2.04e-02, grad_scale: 32.0 +2024-08-03 08:08:46,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=71327.66666666667, ans=0.125 +2024-08-03 08:08:55,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=71364.33333333333, ans=0.125 +2024-08-03 08:08:56,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71364.33333333333, ans=0.1 +2024-08-03 08:09:13,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=71437.66666666667, ans=0.2 +2024-08-03 08:09:18,893 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.29 vs. limit=6.0 +2024-08-03 08:09:26,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=71474.33333333333, ans=0.2 +2024-08-03 08:09:29,177 INFO [train.py:1114] (3/4) Epoch 6, batch 1300, loss[loss=0.2427, simple_loss=0.3259, pruned_loss=0.07974, over 12974.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3228, pruned_loss=0.08904, over 2630973.98 frames. ], batch size: 52, lr: 2.04e-02, grad_scale: 32.0 +2024-08-03 08:09:33,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=71511.0, ans=0.2 +2024-08-03 08:09:43,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=71547.66666666667, ans=0.0 +2024-08-03 08:09:45,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=71547.66666666667, ans=0.2 +2024-08-03 08:09:53,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=71584.33333333333, ans=0.125 +2024-08-03 08:10:14,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=71621.0, ans=0.125 +2024-08-03 08:10:15,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=71621.0, ans=0.1 +2024-08-03 08:10:22,005 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.007e+02 1.330e+02 1.619e+02 2.134e+02 3.747e+02, threshold=3.238e+02, percent-clipped=6.0 +2024-08-03 08:10:24,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=71657.66666666667, ans=0.125 +2024-08-03 08:10:31,285 INFO [train.py:1114] (3/4) Epoch 6, batch 1350, loss[loss=0.2527, simple_loss=0.3197, pruned_loss=0.0928, over 13555.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3217, pruned_loss=0.08846, over 2639220.68 frames. ], batch size: 37, lr: 2.04e-02, grad_scale: 32.0 +2024-08-03 08:10:54,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=71731.0, ans=0.0 +2024-08-03 08:11:22,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=71767.66666666667, ans=0.125 +2024-08-03 08:11:32,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=71804.33333333333, ans=0.125 +2024-08-03 08:11:40,894 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.98 vs. limit=15.0 +2024-08-03 08:11:41,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=71841.0, ans=0.125 +2024-08-03 08:11:47,827 INFO [train.py:1114] (3/4) Epoch 6, batch 1400, loss[loss=0.2482, simple_loss=0.3068, pruned_loss=0.0948, over 13268.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.321, pruned_loss=0.0882, over 2642881.98 frames. ], batch size: 31, lr: 2.04e-02, grad_scale: 32.0 +2024-08-03 08:11:49,415 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.69 vs. limit=22.5 +2024-08-03 08:11:52,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=71877.66666666667, ans=0.125 +2024-08-03 08:11:55,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=71877.66666666667, ans=0.125 +2024-08-03 08:12:17,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=71987.66666666667, ans=0.125 +2024-08-03 08:12:18,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=71987.66666666667, ans=0.125 +2024-08-03 08:12:24,353 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.721e+01 1.334e+02 1.562e+02 1.833e+02 2.897e+02, threshold=3.124e+02, percent-clipped=0.0 +2024-08-03 08:12:37,232 INFO [train.py:1114] (3/4) Epoch 6, batch 1450, loss[loss=0.2743, simple_loss=0.3448, pruned_loss=0.1019, over 13437.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3209, pruned_loss=0.0883, over 2642109.89 frames. ], batch size: 43, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:13:01,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=72134.33333333333, ans=0.1 +2024-08-03 08:13:22,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=72207.66666666667, ans=0.0 +2024-08-03 08:13:24,200 INFO [train.py:1114] (3/4) Epoch 6, batch 1500, loss[loss=0.2859, simple_loss=0.3587, pruned_loss=0.1065, over 13407.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3218, pruned_loss=0.08842, over 2641689.77 frames. ], batch size: 39, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:13:56,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=72354.33333333333, ans=0.0 +2024-08-03 08:14:02,723 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.371e+02 1.678e+02 2.032e+02 3.850e+02, threshold=3.356e+02, percent-clipped=2.0 +2024-08-03 08:14:03,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=72391.0, ans=0.5 +2024-08-03 08:14:06,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=72391.0, ans=0.2 +2024-08-03 08:14:07,704 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.10 vs. limit=15.0 +2024-08-03 08:14:11,870 INFO [train.py:1114] (3/4) Epoch 6, batch 1550, loss[loss=0.2646, simple_loss=0.3449, pruned_loss=0.09214, over 13371.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.322, pruned_loss=0.08881, over 2631266.12 frames. ], batch size: 41, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:14:15,904 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.44 vs. limit=10.0 +2024-08-03 08:14:32,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=72501.0, ans=0.0 +2024-08-03 08:14:57,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=72574.33333333333, ans=0.025 +2024-08-03 08:14:59,388 INFO [train.py:1114] (3/4) Epoch 6, batch 1600, loss[loss=0.2668, simple_loss=0.339, pruned_loss=0.09723, over 13575.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3225, pruned_loss=0.08942, over 2625723.59 frames. ], batch size: 39, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:15:09,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=72647.66666666667, ans=0.0 +2024-08-03 08:15:18,438 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.48 vs. limit=12.0 +2024-08-03 08:15:25,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=72684.33333333333, ans=0.0 +2024-08-03 08:15:26,675 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.13 vs. limit=15.0 +2024-08-03 08:15:38,175 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.361e+02 1.766e+02 2.117e+02 3.688e+02, threshold=3.533e+02, percent-clipped=3.0 +2024-08-03 08:15:46,759 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.64 vs. limit=22.5 +2024-08-03 08:15:47,188 INFO [train.py:1114] (3/4) Epoch 6, batch 1650, loss[loss=0.2711, simple_loss=0.3467, pruned_loss=0.09772, over 13324.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3226, pruned_loss=0.08975, over 2622106.21 frames. ], batch size: 40, lr: 2.03e-02, grad_scale: 32.0 +2024-08-03 08:16:01,246 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.95 vs. limit=15.0 +2024-08-03 08:16:08,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=72867.66666666667, ans=0.125 +2024-08-03 08:16:11,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=72867.66666666667, ans=0.125 +2024-08-03 08:16:17,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=72904.33333333333, ans=0.025 +2024-08-03 08:16:32,605 INFO [train.py:1114] (3/4) Epoch 6, batch 1700, loss[loss=0.2642, simple_loss=0.3195, pruned_loss=0.1045, over 13255.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3231, pruned_loss=0.08957, over 2630471.98 frames. ], batch size: 31, lr: 2.02e-02, grad_scale: 32.0 +2024-08-03 08:16:37,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=72977.66666666667, ans=0.125 +2024-08-03 08:16:39,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=72977.66666666667, ans=0.125 +2024-08-03 08:16:40,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=72977.66666666667, ans=0.0 +2024-08-03 08:16:59,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=73051.0, ans=0.125 +2024-08-03 08:16:59,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=73051.0, ans=0.025 +2024-08-03 08:17:12,007 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.052e+02 1.261e+02 1.452e+02 1.741e+02 3.211e+02, threshold=2.904e+02, percent-clipped=0.0 +2024-08-03 08:17:14,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=73124.33333333333, ans=0.0 +2024-08-03 08:17:21,266 INFO [train.py:1114] (3/4) Epoch 6, batch 1750, loss[loss=0.2406, simple_loss=0.2982, pruned_loss=0.09146, over 13549.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3215, pruned_loss=0.08859, over 2633947.06 frames. ], batch size: 31, lr: 2.02e-02, grad_scale: 32.0 +2024-08-03 08:17:32,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=73197.66666666667, ans=0.125 +2024-08-03 08:17:44,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=73234.33333333333, ans=0.125 +2024-08-03 08:18:08,633 INFO [train.py:1114] (3/4) Epoch 6, batch 1800, loss[loss=0.2586, simple_loss=0.3361, pruned_loss=0.09055, over 13535.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.321, pruned_loss=0.08775, over 2635318.87 frames. ], batch size: 38, lr: 2.02e-02, grad_scale: 32.0 +2024-08-03 08:18:42,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=73454.33333333333, ans=0.125 +2024-08-03 08:18:50,839 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.090e+02 1.331e+02 1.549e+02 2.028e+02 3.164e+02, threshold=3.097e+02, percent-clipped=2.0 +2024-08-03 08:18:59,642 INFO [train.py:1114] (3/4) Epoch 6, batch 1850, loss[loss=0.2664, simple_loss=0.3398, pruned_loss=0.0965, over 13385.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.3211, pruned_loss=0.08786, over 2638158.95 frames. ], batch size: 39, lr: 2.02e-02, grad_scale: 16.0 +2024-08-03 08:19:08,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=73564.33333333333, ans=0.0 +2024-08-03 08:19:10,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=73564.33333333333, ans=0.1 +2024-08-03 08:19:21,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=73601.0, ans=0.0 +2024-08-03 08:19:29,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=73637.66666666667, ans=0.125 +2024-08-03 08:19:35,684 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.74 vs. limit=15.0 +2024-08-03 08:19:51,017 INFO [train.py:1114] (3/4) Epoch 6, batch 1900, loss[loss=0.2886, simple_loss=0.3655, pruned_loss=0.1058, over 13301.00 frames. ], tot_loss[loss=0.249, simple_loss=0.322, pruned_loss=0.08805, over 2640557.04 frames. ], batch size: 40, lr: 2.01e-02, grad_scale: 16.0 +2024-08-03 08:20:00,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=73711.0, ans=0.125 +2024-08-03 08:20:01,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73747.66666666667, ans=0.1 +2024-08-03 08:20:05,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.87 vs. limit=22.5 +2024-08-03 08:20:09,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=73747.66666666667, ans=0.125 +2024-08-03 08:20:12,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=73784.33333333333, ans=0.125 +2024-08-03 08:20:13,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=73784.33333333333, ans=0.0 +2024-08-03 08:20:14,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=73784.33333333333, ans=0.125 +2024-08-03 08:20:20,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=73821.0, ans=0.0 +2024-08-03 08:20:22,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=73821.0, ans=0.125 +2024-08-03 08:20:26,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=73821.0, ans=0.125 +2024-08-03 08:20:26,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=73821.0, ans=0.125 +2024-08-03 08:20:29,685 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.282e+02 1.594e+02 1.886e+02 3.634e+02, threshold=3.188e+02, percent-clipped=1.0 +2024-08-03 08:21:06,040 INFO [train.py:1114] (3/4) Epoch 6, batch 1950, loss[loss=0.2348, simple_loss=0.3137, pruned_loss=0.07793, over 13548.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.3225, pruned_loss=0.08764, over 2647349.59 frames. ], batch size: 36, lr: 2.01e-02, grad_scale: 16.0 +2024-08-03 08:21:13,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=73894.33333333333, ans=0.95 +2024-08-03 08:21:16,496 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.87 vs. limit=10.0 +2024-08-03 08:21:17,279 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.81 vs. limit=22.5 +2024-08-03 08:21:39,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=73967.66666666667, ans=0.1 +2024-08-03 08:21:40,265 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.10 vs. limit=15.0 +2024-08-03 08:21:55,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=74041.0, ans=0.1 +2024-08-03 08:22:01,506 INFO [train.py:1114] (3/4) Epoch 6, batch 2000, loss[loss=0.2108, simple_loss=0.2773, pruned_loss=0.07209, over 13530.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.323, pruned_loss=0.08813, over 2637670.94 frames. ], batch size: 31, lr: 2.01e-02, grad_scale: 32.0 +2024-08-03 08:22:09,198 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:22:11,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=74114.33333333333, ans=0.0 +2024-08-03 08:22:40,831 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.995e+01 1.304e+02 1.521e+02 1.870e+02 3.402e+02, threshold=3.042e+02, percent-clipped=1.0 +2024-08-03 08:22:44,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=74224.33333333333, ans=0.1 +2024-08-03 08:22:51,648 INFO [train.py:1114] (3/4) Epoch 6, batch 2050, loss[loss=0.2551, simple_loss=0.3205, pruned_loss=0.0948, over 13418.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.3215, pruned_loss=0.08767, over 2633498.40 frames. ], batch size: 32, lr: 2.01e-02, grad_scale: 32.0 +2024-08-03 08:22:56,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74261.0, ans=0.1 +2024-08-03 08:23:13,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=74334.33333333333, ans=0.125 +2024-08-03 08:23:23,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=74371.0, ans=0.125 +2024-08-03 08:23:40,282 INFO [train.py:1114] (3/4) Epoch 6, batch 2100, loss[loss=0.2172, simple_loss=0.3009, pruned_loss=0.06678, over 13535.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3207, pruned_loss=0.08734, over 2639824.96 frames. ], batch size: 37, lr: 2.01e-02, grad_scale: 32.0 +2024-08-03 08:23:47,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=74444.33333333333, ans=0.05 +2024-08-03 08:23:51,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=74481.0, ans=0.125 +2024-08-03 08:23:53,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=74481.0, ans=0.125 +2024-08-03 08:23:58,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=74517.66666666667, ans=0.125 +2024-08-03 08:24:02,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=74517.66666666667, ans=0.2 +2024-08-03 08:24:03,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=74517.66666666667, ans=0.0 +2024-08-03 08:24:16,934 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.001e+02 1.291e+02 1.674e+02 2.132e+02 3.817e+02, threshold=3.348e+02, percent-clipped=5.0 +2024-08-03 08:24:25,015 INFO [train.py:1114] (3/4) Epoch 6, batch 2150, loss[loss=0.274, simple_loss=0.3393, pruned_loss=0.1044, over 13559.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.3202, pruned_loss=0.08734, over 2648026.92 frames. ], batch size: 36, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:24:28,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=74627.66666666667, ans=0.025 +2024-08-03 08:24:30,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=74627.66666666667, ans=0.125 +2024-08-03 08:24:33,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=74664.33333333333, ans=0.0 +2024-08-03 08:24:43,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=74701.0, ans=0.025 +2024-08-03 08:24:46,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=74701.0, ans=0.0 +2024-08-03 08:24:47,211 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.10 vs. limit=12.0 +2024-08-03 08:25:01,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=74737.66666666667, ans=0.125 +2024-08-03 08:25:03,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=74774.33333333333, ans=0.0 +2024-08-03 08:25:14,036 INFO [train.py:1114] (3/4) Epoch 6, batch 2200, loss[loss=0.2393, simple_loss=0.3186, pruned_loss=0.08002, over 13402.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.32, pruned_loss=0.08691, over 2646341.12 frames. ], batch size: 39, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:25:15,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=74811.0, ans=0.125 +2024-08-03 08:25:19,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=74811.0, ans=0.025 +2024-08-03 08:25:22,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=74847.66666666667, ans=0.125 +2024-08-03 08:25:35,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=74884.33333333333, ans=0.025 +2024-08-03 08:25:35,673 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:25:49,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=74957.66666666667, ans=0.125 +2024-08-03 08:25:50,667 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.053e+02 1.295e+02 1.544e+02 1.950e+02 3.525e+02, threshold=3.088e+02, percent-clipped=1.0 +2024-08-03 08:25:59,016 INFO [train.py:1114] (3/4) Epoch 6, batch 2250, loss[loss=0.2014, simple_loss=0.2918, pruned_loss=0.05552, over 13354.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3194, pruned_loss=0.08648, over 2644583.88 frames. ], batch size: 37, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:25:59,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=74994.33333333333, ans=0.025 +2024-08-03 08:26:14,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=75031.0, ans=0.0 +2024-08-03 08:26:48,064 INFO [train.py:1114] (3/4) Epoch 6, batch 2300, loss[loss=0.2203, simple_loss=0.2925, pruned_loss=0.07405, over 13579.00 frames. ], tot_loss[loss=0.245, simple_loss=0.318, pruned_loss=0.08606, over 2639616.90 frames. ], batch size: 33, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:26:48,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=75177.66666666667, ans=0.125 +2024-08-03 08:27:09,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=75251.0, ans=0.025 +2024-08-03 08:27:12,915 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.33 vs. limit=10.0 +2024-08-03 08:27:19,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=75287.66666666667, ans=0.1 +2024-08-03 08:27:25,151 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.021e+02 1.310e+02 1.601e+02 2.046e+02 3.853e+02, threshold=3.202e+02, percent-clipped=4.0 +2024-08-03 08:27:29,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=75324.33333333333, ans=0.125 +2024-08-03 08:27:33,028 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.21 vs. limit=15.0 +2024-08-03 08:27:33,477 INFO [train.py:1114] (3/4) Epoch 6, batch 2350, loss[loss=0.2541, simple_loss=0.3339, pruned_loss=0.08717, over 13546.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3188, pruned_loss=0.08655, over 2641876.52 frames. ], batch size: 38, lr: 2.00e-02, grad_scale: 32.0 +2024-08-03 08:27:35,631 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.21 vs. limit=15.0 +2024-08-03 08:27:44,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=75397.66666666667, ans=0.07 +2024-08-03 08:27:51,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=75434.33333333333, ans=0.125 +2024-08-03 08:27:52,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=75434.33333333333, ans=0.125 +2024-08-03 08:28:13,578 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=12.62 vs. limit=15.0 +2024-08-03 08:28:18,872 INFO [train.py:1114] (3/4) Epoch 6, batch 2400, loss[loss=0.217, simple_loss=0.303, pruned_loss=0.06546, over 13543.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.319, pruned_loss=0.08703, over 2643240.60 frames. ], batch size: 35, lr: 1.99e-02, grad_scale: 32.0 +2024-08-03 08:28:24,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=75544.33333333333, ans=22.5 +2024-08-03 08:28:29,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=75581.0, ans=0.025 +2024-08-03 08:28:49,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=75654.33333333333, ans=0.025 +2024-08-03 08:28:58,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75691.0, ans=0.1 +2024-08-03 08:28:59,502 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.007e+02 1.273e+02 1.529e+02 2.027e+02 4.146e+02, threshold=3.058e+02, percent-clipped=9.0 +2024-08-03 08:29:02,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=75691.0, ans=0.125 +2024-08-03 08:29:03,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.28 vs. limit=6.0 +2024-08-03 08:29:07,956 INFO [train.py:1114] (3/4) Epoch 6, batch 2450, loss[loss=0.2567, simple_loss=0.3362, pruned_loss=0.08861, over 13367.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3208, pruned_loss=0.08785, over 2632852.55 frames. ], batch size: 37, lr: 1.99e-02, grad_scale: 32.0 +2024-08-03 08:29:09,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=75727.66666666667, ans=0.125 +2024-08-03 08:29:15,165 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:29:18,245 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.16 vs. limit=15.0 +2024-08-03 08:29:20,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=75764.33333333333, ans=0.2 +2024-08-03 08:29:24,296 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.15 vs. limit=15.0 +2024-08-03 08:29:39,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=75837.66666666667, ans=0.025 +2024-08-03 08:29:52,481 INFO [train.py:1114] (3/4) Epoch 6, batch 2500, loss[loss=0.2198, simple_loss=0.3054, pruned_loss=0.06713, over 13394.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3209, pruned_loss=0.08786, over 2636954.90 frames. ], batch size: 39, lr: 1.99e-02, grad_scale: 32.0 +2024-08-03 08:29:58,033 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=1.153e-02 +2024-08-03 08:30:17,426 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.85 vs. limit=15.0 +2024-08-03 08:30:17,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=75984.33333333333, ans=0.0 +2024-08-03 08:30:20,920 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.23 vs. limit=10.0 +2024-08-03 08:30:31,089 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.038e+02 1.253e+02 1.529e+02 2.022e+02 3.392e+02, threshold=3.058e+02, percent-clipped=3.0 +2024-08-03 08:30:37,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=76094.33333333333, ans=0.125 +2024-08-03 08:30:38,269 INFO [train.py:1114] (3/4) Epoch 6, batch 2550, loss[loss=0.2219, simple_loss=0.2905, pruned_loss=0.07668, over 13540.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3208, pruned_loss=0.08792, over 2638737.53 frames. ], batch size: 31, lr: 1.99e-02, grad_scale: 16.0 +2024-08-03 08:30:38,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=76094.33333333333, ans=0.2 +2024-08-03 08:30:43,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=76094.33333333333, ans=0.125 +2024-08-03 08:30:46,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.85 vs. limit=15.0 +2024-08-03 08:30:52,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=76131.0, ans=0.125 +2024-08-03 08:30:54,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=76131.0, ans=0.125 +2024-08-03 08:30:57,973 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.79 vs. limit=15.0 +2024-08-03 08:31:06,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=76204.33333333333, ans=0.125 +2024-08-03 08:31:08,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=76204.33333333333, ans=0.125 +2024-08-03 08:31:14,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=76204.33333333333, ans=0.125 +2024-08-03 08:31:19,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=76241.0, ans=0.125 +2024-08-03 08:31:25,204 INFO [train.py:1114] (3/4) Epoch 6, batch 2600, loss[loss=0.2482, simple_loss=0.3162, pruned_loss=0.09012, over 13545.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3207, pruned_loss=0.08782, over 2637507.14 frames. ], batch size: 36, lr: 1.98e-02, grad_scale: 16.0 +2024-08-03 08:31:29,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=76277.66666666667, ans=0.125 +2024-08-03 08:31:29,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=76277.66666666667, ans=0.125 +2024-08-03 08:31:40,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=76314.33333333333, ans=0.0 +2024-08-03 08:31:49,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76351.0, ans=0.1 +2024-08-03 08:31:50,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=76387.66666666667, ans=0.0 +2024-08-03 08:32:01,830 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.041e+02 1.310e+02 1.554e+02 1.964e+02 3.750e+02, threshold=3.108e+02, percent-clipped=4.0 +2024-08-03 08:32:08,707 INFO [train.py:1114] (3/4) Epoch 6, batch 2650, loss[loss=0.2552, simple_loss=0.3367, pruned_loss=0.0869, over 13343.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3213, pruned_loss=0.08792, over 2640785.63 frames. ], batch size: 46, lr: 1.98e-02, grad_scale: 16.0 +2024-08-03 08:32:25,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=76534.33333333333, ans=0.04949747468305833 +2024-08-03 08:32:26,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=76534.33333333333, ans=0.125 +2024-08-03 08:32:29,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=76534.33333333333, ans=0.5 +2024-08-03 08:32:31,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=76534.33333333333, ans=0.1 +2024-08-03 08:32:52,918 INFO [train.py:1114] (3/4) Epoch 6, batch 2700, loss[loss=0.2659, simple_loss=0.3338, pruned_loss=0.09905, over 13547.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3223, pruned_loss=0.08881, over 2637663.79 frames. ], batch size: 40, lr: 1.98e-02, grad_scale: 16.0 +2024-08-03 08:33:15,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=76717.66666666667, ans=0.125 +2024-08-03 08:33:26,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=76754.33333333333, ans=0.1 +2024-08-03 08:33:29,842 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.293e+02 1.589e+02 2.019e+02 3.318e+02, threshold=3.177e+02, percent-clipped=2.0 +2024-08-03 08:33:41,643 INFO [train.py:1114] (3/4) Epoch 6, batch 2750, loss[loss=0.2549, simple_loss=0.3227, pruned_loss=0.09348, over 13327.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3212, pruned_loss=0.08847, over 2635258.39 frames. ], batch size: 34, lr: 1.98e-02, grad_scale: 16.0 +2024-08-03 08:33:44,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=76827.66666666667, ans=0.025 +2024-08-03 08:34:07,966 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.26 vs. limit=6.0 +2024-08-03 08:34:14,666 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.35 vs. limit=15.0 +2024-08-03 08:34:15,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=76974.33333333333, ans=0.125 +2024-08-03 08:34:19,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=76974.33333333333, ans=0.0 +2024-08-03 08:34:25,229 INFO [train.py:1114] (3/4) Epoch 6, batch 2800, loss[loss=0.3032, simple_loss=0.3445, pruned_loss=0.1309, over 8862.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3212, pruned_loss=0.08875, over 2626150.22 frames. ], batch size: 96, lr: 1.98e-02, grad_scale: 32.0 +2024-08-03 08:34:36,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=77047.66666666667, ans=0.2 +2024-08-03 08:34:39,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=77047.66666666667, ans=0.0 +2024-08-03 08:34:44,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=77084.33333333333, ans=0.125 +2024-08-03 08:35:01,671 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.373e+02 1.725e+02 2.259e+02 3.489e+02, threshold=3.451e+02, percent-clipped=3.0 +2024-08-03 08:35:08,643 INFO [train.py:1114] (3/4) Epoch 6, batch 2850, loss[loss=0.2386, simple_loss=0.3049, pruned_loss=0.08613, over 13367.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3217, pruned_loss=0.08922, over 2620785.06 frames. ], batch size: 35, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:35:18,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=77231.0, ans=0.125 +2024-08-03 08:35:30,365 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.27 vs. limit=15.0 +2024-08-03 08:35:48,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=77341.0, ans=0.125 +2024-08-03 08:35:48,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=77341.0, ans=0.125 +2024-08-03 08:35:48,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=77341.0, ans=0.0 +2024-08-03 08:35:48,408 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.67 vs. limit=12.0 +2024-08-03 08:35:49,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=77341.0, ans=0.2 +2024-08-03 08:35:51,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=77341.0, ans=0.025 +2024-08-03 08:35:53,888 INFO [train.py:1114] (3/4) Epoch 6, batch 2900, loss[loss=0.2551, simple_loss=0.3298, pruned_loss=0.09019, over 13367.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3226, pruned_loss=0.08889, over 2631477.26 frames. ], batch size: 36, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:35:58,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=77377.66666666667, ans=0.1 +2024-08-03 08:36:05,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=77414.33333333333, ans=0.0 +2024-08-03 08:36:05,564 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.354e-02 +2024-08-03 08:36:06,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=77414.33333333333, ans=0.0 +2024-08-03 08:36:08,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=77414.33333333333, ans=0.0 +2024-08-03 08:36:14,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=77451.0, ans=0.07 +2024-08-03 08:36:15,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=77451.0, ans=0.0 +2024-08-03 08:40:04,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=77487.66666666667, ans=0.125 +2024-08-03 08:40:19,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=77487.66666666667, ans=0.2 +2024-08-03 08:41:05,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=77524.33333333333, ans=0.125 +2024-08-03 08:41:07,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=77524.33333333333, ans=0.125 +2024-08-03 08:41:12,656 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.754e+01 1.224e+02 1.336e+02 1.587e+02 3.692e+02, threshold=2.672e+02, percent-clipped=1.0 +2024-08-03 08:41:30,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=77561.0, ans=0.125 +2024-08-03 08:41:30,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=77561.0, ans=0.0 +2024-08-03 08:41:32,114 INFO [train.py:1114] (3/4) Epoch 6, batch 2950, loss[loss=0.2215, simple_loss=0.3, pruned_loss=0.07149, over 13327.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.3208, pruned_loss=0.0881, over 2629869.87 frames. ], batch size: 34, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:41:32,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77561.0, ans=0.1 +2024-08-03 08:42:05,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=77597.66666666667, ans=0.125 +2024-08-03 08:42:33,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=77671.0, ans=0.0 +2024-08-03 08:42:46,080 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=21.96 vs. limit=22.5 +2024-08-03 08:42:56,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=77707.66666666667, ans=0.125 +2024-08-03 08:43:15,146 INFO [train.py:1114] (3/4) Epoch 6, batch 3000, loss[loss=0.2424, simple_loss=0.3224, pruned_loss=0.08118, over 13542.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3197, pruned_loss=0.08775, over 2629796.66 frames. ], batch size: 37, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:43:15,147 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 08:43:32,686 INFO [train.py:1146] (3/4) Epoch 6, validation: loss=0.2027, simple_loss=0.301, pruned_loss=0.05225, over 944034.00 frames. +2024-08-03 08:43:32,687 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 08:43:32,810 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:43:33,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=77744.33333333333, ans=0.125 +2024-08-03 08:43:56,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=77781.0, ans=0.0 +2024-08-03 08:43:56,329 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.50 vs. limit=15.0 +2024-08-03 08:44:12,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=77817.66666666667, ans=0.125 +2024-08-03 08:44:29,378 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.97 vs. limit=15.0 +2024-08-03 08:44:30,285 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-08-03 08:44:33,313 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.660e+01 1.264e+02 1.515e+02 1.854e+02 4.431e+02, threshold=3.030e+02, percent-clipped=3.0 +2024-08-03 08:44:34,007 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.92 vs. limit=15.0 +2024-08-03 08:44:37,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=77891.0, ans=0.05 +2024-08-03 08:44:47,347 INFO [train.py:1114] (3/4) Epoch 6, batch 3050, loss[loss=0.2325, simple_loss=0.3116, pruned_loss=0.07666, over 13523.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.3204, pruned_loss=0.08759, over 2626455.18 frames. ], batch size: 35, lr: 1.97e-02, grad_scale: 32.0 +2024-08-03 08:45:04,128 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.94 vs. limit=10.0 +2024-08-03 08:45:04,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77927.66666666667, ans=0.1 +2024-08-03 08:45:04,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=77927.66666666667, ans=0.05 +2024-08-03 08:48:35,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77964.33333333333, ans=0.1 +2024-08-03 08:49:48,339 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.08 vs. limit=15.0 +2024-08-03 08:49:58,672 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.33 vs. limit=6.0 +2024-08-03 08:50:07,131 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.42 vs. limit=15.0 +2024-08-03 08:50:16,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=78074.33333333333, ans=0.125 +2024-08-03 08:50:18,937 INFO [train.py:1114] (3/4) Epoch 6, batch 3100, loss[loss=0.2539, simple_loss=0.3298, pruned_loss=0.08897, over 13291.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3205, pruned_loss=0.08766, over 2626383.26 frames. ], batch size: 46, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:50:24,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=78111.0, ans=0.0 +2024-08-03 08:50:55,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=78221.0, ans=0.125 +2024-08-03 08:51:00,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=78221.0, ans=0.125 +2024-08-03 08:51:12,347 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.281e+02 1.526e+02 2.102e+02 4.706e+02, threshold=3.052e+02, percent-clipped=7.0 +2024-08-03 08:51:18,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=78257.66666666667, ans=0.05 +2024-08-03 08:51:34,801 INFO [train.py:1114] (3/4) Epoch 6, batch 3150, loss[loss=0.2847, simple_loss=0.3473, pruned_loss=0.111, over 13016.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3198, pruned_loss=0.08667, over 2627896.63 frames. ], batch size: 48, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:51:36,064 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.92 vs. limit=6.0 +2024-08-03 08:51:41,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=78294.33333333333, ans=0.125 +2024-08-03 08:51:41,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=78294.33333333333, ans=0.0 +2024-08-03 08:51:42,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=78331.0, ans=0.125 +2024-08-03 08:51:42,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=78331.0, ans=0.05 +2024-08-03 08:52:05,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=78367.66666666667, ans=0.2 +2024-08-03 08:52:33,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=78441.0, ans=0.0 +2024-08-03 08:52:38,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=78441.0, ans=0.125 +2024-08-03 08:52:43,925 INFO [train.py:1114] (3/4) Epoch 6, batch 3200, loss[loss=0.2863, simple_loss=0.3473, pruned_loss=0.1126, over 13544.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.319, pruned_loss=0.08638, over 2634653.83 frames. ], batch size: 37, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:52:51,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=78477.66666666667, ans=0.125 +2024-08-03 08:53:03,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=78514.33333333333, ans=0.0 +2024-08-03 08:57:15,821 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.49 vs. limit=15.0 +2024-08-03 08:57:28,726 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.05 vs. limit=15.0 +2024-08-03 08:57:47,368 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.34 vs. limit=15.0 +2024-08-03 08:57:47,838 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.879e+01 1.222e+02 1.408e+02 1.742e+02 2.685e+02, threshold=2.816e+02, percent-clipped=0.0 +2024-08-03 08:57:48,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=78624.33333333333, ans=0.125 +2024-08-03 08:57:59,204 INFO [train.py:1114] (3/4) Epoch 6, batch 3250, loss[loss=0.2688, simple_loss=0.3339, pruned_loss=0.1019, over 13400.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3197, pruned_loss=0.08653, over 2639178.38 frames. ], batch size: 38, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:58:39,745 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 08:58:48,929 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.98 vs. limit=15.0 +2024-08-03 08:59:05,489 INFO [train.py:1114] (3/4) Epoch 6, batch 3300, loss[loss=0.2725, simple_loss=0.339, pruned_loss=0.103, over 12866.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3188, pruned_loss=0.08618, over 2640667.89 frames. ], batch size: 52, lr: 1.96e-02, grad_scale: 32.0 +2024-08-03 08:59:19,396 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.82 vs. limit=15.0 +2024-08-03 08:59:21,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.49 vs. limit=12.0 +2024-08-03 08:59:23,882 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.52 vs. limit=15.0 +2024-08-03 08:59:36,585 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.66 vs. limit=15.0 +2024-08-03 08:59:42,889 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.013e+02 1.439e+02 1.688e+02 2.442e+02 4.060e+02, threshold=3.376e+02, percent-clipped=9.0 +2024-08-03 08:59:53,408 INFO [train.py:1114] (3/4) Epoch 6, batch 3350, loss[loss=0.2393, simple_loss=0.3219, pruned_loss=0.07835, over 13283.00 frames. ], tot_loss[loss=0.247, simple_loss=0.32, pruned_loss=0.08702, over 2629739.00 frames. ], batch size: 49, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 08:59:55,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=79027.66666666667, ans=0.0 +2024-08-03 09:00:03,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.61 vs. limit=6.0 +2024-08-03 09:00:04,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=79064.33333333333, ans=0.125 +2024-08-03 09:00:05,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=79064.33333333333, ans=0.2 +2024-08-03 09:00:07,785 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.90 vs. limit=15.0 +2024-08-03 09:00:22,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=79137.66666666667, ans=0.04949747468305833 +2024-08-03 09:00:23,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=79137.66666666667, ans=0.125 +2024-08-03 09:00:32,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=79174.33333333333, ans=0.0 +2024-08-03 09:00:36,286 INFO [train.py:1114] (3/4) Epoch 6, batch 3400, loss[loss=0.2091, simple_loss=0.2822, pruned_loss=0.06802, over 13521.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3199, pruned_loss=0.08709, over 2624942.10 frames. ], batch size: 31, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:00:36,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=79211.0, ans=0.2 +2024-08-03 09:00:52,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=79247.66666666667, ans=0.2 +2024-08-03 09:01:12,726 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.222e+02 1.420e+02 1.734e+02 2.761e+02, threshold=2.839e+02, percent-clipped=0.0 +2024-08-03 09:01:19,593 INFO [train.py:1114] (3/4) Epoch 6, batch 3450, loss[loss=0.2536, simple_loss=0.3284, pruned_loss=0.08939, over 12988.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3195, pruned_loss=0.08676, over 2628956.72 frames. ], batch size: 52, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:01:21,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=79394.33333333333, ans=0.125 +2024-08-03 09:01:23,237 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:01:23,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=79394.33333333333, ans=0.2 +2024-08-03 09:01:47,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=79504.33333333333, ans=0.015 +2024-08-03 09:02:00,947 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.14 vs. limit=10.0 +2024-08-03 09:02:02,277 INFO [train.py:1114] (3/4) Epoch 6, batch 3500, loss[loss=0.216, simple_loss=0.2877, pruned_loss=0.07217, over 13527.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3191, pruned_loss=0.08686, over 2630757.96 frames. ], batch size: 34, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:02:04,470 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.91 vs. limit=15.0 +2024-08-03 09:02:10,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=79614.33333333333, ans=0.0 +2024-08-03 09:02:12,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=79614.33333333333, ans=0.0 +2024-08-03 09:02:13,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=79614.33333333333, ans=0.0 +2024-08-03 09:02:15,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=79614.33333333333, ans=0.0 +2024-08-03 09:02:24,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=79651.0, ans=0.1 +2024-08-03 09:02:28,581 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.58 vs. limit=12.0 +2024-08-03 09:02:38,270 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.912e+01 1.291e+02 1.420e+02 1.811e+02 3.621e+02, threshold=2.839e+02, percent-clipped=4.0 +2024-08-03 09:02:45,562 INFO [train.py:1114] (3/4) Epoch 6, batch 3550, loss[loss=0.2892, simple_loss=0.3529, pruned_loss=0.1128, over 12426.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3218, pruned_loss=0.08835, over 2629233.98 frames. ], batch size: 58, lr: 1.95e-02, grad_scale: 32.0 +2024-08-03 09:02:59,389 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=6.81 vs. limit=12.0 +2024-08-03 09:03:09,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=79834.33333333333, ans=0.0 +2024-08-03 09:03:12,846 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.20 vs. limit=15.0 +2024-08-03 09:03:19,149 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=13.38 vs. limit=12.0 +2024-08-03 09:03:20,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=79907.66666666667, ans=0.125 +2024-08-03 09:03:21,676 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.34 vs. limit=15.0 +2024-08-03 09:03:29,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=79944.33333333333, ans=0.125 +2024-08-03 09:03:30,154 INFO [train.py:1114] (3/4) Epoch 6, batch 3600, loss[loss=0.3262, simple_loss=0.3677, pruned_loss=0.1424, over 9307.00 frames. ], tot_loss[loss=0.2578, simple_loss=0.3274, pruned_loss=0.09409, over 2484963.22 frames. ], batch size: 96, lr: 1.94e-02, grad_scale: 32.0 +2024-08-03 09:03:38,532 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.13 vs. limit=15.0 +2024-08-03 09:03:38,593 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.41 vs. limit=15.0 +2024-08-03 09:03:46,686 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.66 vs. limit=22.5 +2024-08-03 09:04:01,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=80054.33333333333, ans=0.125 +2024-08-03 09:11:26,786 INFO [train.py:1114] (3/4) Epoch 7, batch 0, loss[loss=0.2312, simple_loss=0.3057, pruned_loss=0.07839, over 13331.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3057, pruned_loss=0.07839, over 13331.00 frames. ], batch size: 33, lr: 1.82e-02, grad_scale: 32.0 +2024-08-03 09:11:26,787 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 09:11:36,681 INFO [train.py:1146] (3/4) Epoch 7, validation: loss=0.2064, simple_loss=0.3063, pruned_loss=0.05331, over 944034.00 frames. +2024-08-03 09:11:36,682 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 09:11:36,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=80091.0, ans=0.5 +2024-08-03 09:11:39,256 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.328e+02 1.470e+02 1.676e+02 3.542e+02, threshold=2.940e+02, percent-clipped=1.0 +2024-08-03 09:11:58,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=80127.66666666667, ans=0.125 +2024-08-03 09:12:04,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80164.33333333333, ans=0.1 +2024-08-03 09:12:06,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=80164.33333333333, ans=0.125 +2024-08-03 09:12:27,703 INFO [train.py:1114] (3/4) Epoch 7, batch 50, loss[loss=0.2157, simple_loss=0.2871, pruned_loss=0.07215, over 13424.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3221, pruned_loss=0.08866, over 578245.54 frames. ], batch size: 32, lr: 1.82e-02, grad_scale: 32.0 +2024-08-03 09:12:39,874 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.64 vs. limit=15.0 +2024-08-03 09:12:53,083 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.76 vs. limit=10.0 +2024-08-03 09:13:19,705 INFO [train.py:1114] (3/4) Epoch 7, batch 100, loss[loss=0.244, simple_loss=0.3119, pruned_loss=0.08809, over 13540.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3221, pruned_loss=0.08856, over 1026697.70 frames. ], batch size: 35, lr: 1.82e-02, grad_scale: 32.0 +2024-08-03 09:13:22,393 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.031e+02 1.305e+02 1.508e+02 1.904e+02 3.829e+02, threshold=3.017e+02, percent-clipped=4.0 +2024-08-03 09:13:24,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=80457.66666666667, ans=0.125 +2024-08-03 09:13:33,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=80494.33333333333, ans=0.0 +2024-08-03 09:14:00,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80604.33333333333, ans=0.1 +2024-08-03 09:14:07,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.60 vs. limit=22.5 +2024-08-03 09:14:08,299 INFO [train.py:1114] (3/4) Epoch 7, batch 150, loss[loss=0.2304, simple_loss=0.2956, pruned_loss=0.08266, over 13430.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3187, pruned_loss=0.086, over 1387771.16 frames. ], batch size: 32, lr: 1.81e-02, grad_scale: 32.0 +2024-08-03 09:14:08,758 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.82 vs. limit=15.0 +2024-08-03 09:14:15,098 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.03 vs. limit=15.0 +2024-08-03 09:14:16,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=80677.66666666667, ans=0.125 +2024-08-03 09:14:22,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=80677.66666666667, ans=0.09899494936611666 +2024-08-03 09:14:26,215 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.50 vs. limit=22.5 +2024-08-03 09:14:27,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=80714.33333333333, ans=0.0 +2024-08-03 09:14:33,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=80714.33333333333, ans=0.125 +2024-08-03 09:14:38,935 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:14:41,055 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.21 vs. limit=15.0 +2024-08-03 09:14:48,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=80787.66666666667, ans=0.125 +2024-08-03 09:14:50,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=80787.66666666667, ans=0.125 +2024-08-03 09:14:55,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=80824.33333333333, ans=0.0 +2024-08-03 09:14:55,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=80824.33333333333, ans=0.0 +2024-08-03 09:14:56,253 INFO [train.py:1114] (3/4) Epoch 7, batch 200, loss[loss=0.2647, simple_loss=0.3296, pruned_loss=0.09992, over 12261.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3162, pruned_loss=0.08443, over 1666351.37 frames. ], batch size: 58, lr: 1.81e-02, grad_scale: 16.0 +2024-08-03 09:14:59,741 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.449e+01 1.216e+02 1.369e+02 1.577e+02 2.982e+02, threshold=2.737e+02, percent-clipped=0.0 +2024-08-03 09:15:01,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=80824.33333333333, ans=0.0 +2024-08-03 09:15:17,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=80897.66666666667, ans=0.125 +2024-08-03 09:15:24,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=80934.33333333333, ans=0.1 +2024-08-03 09:15:28,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=80934.33333333333, ans=0.125 +2024-08-03 09:15:36,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80971.0, ans=0.1 +2024-08-03 09:15:41,497 INFO [train.py:1114] (3/4) Epoch 7, batch 250, loss[loss=0.2259, simple_loss=0.3066, pruned_loss=0.07261, over 13318.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3161, pruned_loss=0.08425, over 1885127.87 frames. ], batch size: 46, lr: 1.81e-02, grad_scale: 16.0 +2024-08-03 09:15:44,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=81007.66666666667, ans=0.0 +2024-08-03 09:15:55,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=81044.33333333333, ans=0.125 +2024-08-03 09:16:18,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=81154.33333333333, ans=0.125 +2024-08-03 09:16:26,456 INFO [train.py:1114] (3/4) Epoch 7, batch 300, loss[loss=0.2808, simple_loss=0.3505, pruned_loss=0.1055, over 13443.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.3154, pruned_loss=0.08358, over 2052638.22 frames. ], batch size: 42, lr: 1.81e-02, grad_scale: 16.0 +2024-08-03 09:16:26,852 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.19 vs. limit=15.0 +2024-08-03 09:16:30,010 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.067e+02 1.291e+02 1.630e+02 2.116e+02 3.205e+02, threshold=3.259e+02, percent-clipped=7.0 +2024-08-03 09:16:31,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=81191.0, ans=0.2 +2024-08-03 09:16:39,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.10 vs. limit=15.0 +2024-08-03 09:16:53,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=81264.33333333333, ans=0.125 +2024-08-03 09:17:11,526 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=5.25 vs. limit=15.0 +2024-08-03 09:17:19,134 INFO [train.py:1114] (3/4) Epoch 7, batch 350, loss[loss=0.194, simple_loss=0.273, pruned_loss=0.05754, over 13579.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.316, pruned_loss=0.0842, over 2182080.47 frames. ], batch size: 33, lr: 1.81e-02, grad_scale: 16.0 +2024-08-03 09:17:24,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=81374.33333333333, ans=0.0 +2024-08-03 09:17:27,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=81411.0, ans=0.2 +2024-08-03 09:17:37,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=81447.66666666667, ans=0.125 +2024-08-03 09:17:49,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=81484.33333333333, ans=0.125 +2024-08-03 09:17:51,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=81484.33333333333, ans=0.125 +2024-08-03 09:18:05,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=81557.66666666667, ans=0.125 +2024-08-03 09:18:06,306 INFO [train.py:1114] (3/4) Epoch 7, batch 400, loss[loss=0.2317, simple_loss=0.3155, pruned_loss=0.0739, over 13356.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.316, pruned_loss=0.08409, over 2286032.63 frames. ], batch size: 37, lr: 1.81e-02, grad_scale: 32.0 +2024-08-03 09:18:09,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81557.66666666667, ans=0.1 +2024-08-03 09:18:09,976 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.073e+02 1.335e+02 1.614e+02 1.996e+02 4.244e+02, threshold=3.229e+02, percent-clipped=5.0 +2024-08-03 09:18:16,966 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.53 vs. limit=15.0 +2024-08-03 09:18:19,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=81594.33333333333, ans=0.125 +2024-08-03 09:18:39,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=81667.66666666667, ans=0.025 +2024-08-03 09:18:43,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=81704.33333333333, ans=0.125 +2024-08-03 09:18:46,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81704.33333333333, ans=0.1 +2024-08-03 09:18:51,596 INFO [train.py:1114] (3/4) Epoch 7, batch 450, loss[loss=0.2533, simple_loss=0.3298, pruned_loss=0.08833, over 13540.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3159, pruned_loss=0.08388, over 2360390.17 frames. ], batch size: 38, lr: 1.80e-02, grad_scale: 32.0 +2024-08-03 09:19:42,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=81777.66666666667, ans=0.0 +2024-08-03 09:19:53,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=81814.33333333333, ans=0.0 +2024-08-03 09:19:58,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81851.0, ans=0.1 +2024-08-03 09:20:02,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=81851.0, ans=0.0 +2024-08-03 09:20:04,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=81887.66666666667, ans=0.2 +2024-08-03 09:20:13,718 INFO [train.py:1114] (3/4) Epoch 7, batch 500, loss[loss=0.2238, simple_loss=0.3082, pruned_loss=0.06971, over 13439.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3146, pruned_loss=0.0831, over 2426294.13 frames. ], batch size: 43, lr: 1.80e-02, grad_scale: 32.0 +2024-08-03 09:20:17,135 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.053e+02 1.254e+02 1.426e+02 1.803e+02 2.820e+02, threshold=2.853e+02, percent-clipped=0.0 +2024-08-03 09:20:17,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81924.33333333333, ans=0.1 +2024-08-03 09:20:21,542 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.98 vs. limit=15.0 +2024-08-03 09:20:25,920 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:20:42,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82034.33333333333, ans=0.1 +2024-08-03 09:20:46,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=82034.33333333333, ans=0.04949747468305833 +2024-08-03 09:20:57,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=82071.0, ans=15.0 +2024-08-03 09:20:58,672 INFO [train.py:1114] (3/4) Epoch 7, batch 550, loss[loss=0.2413, simple_loss=0.3212, pruned_loss=0.08072, over 13297.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3147, pruned_loss=0.08327, over 2468857.73 frames. ], batch size: 49, lr: 1.80e-02, grad_scale: 16.0 +2024-08-03 09:20:58,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=82107.66666666667, ans=0.05 +2024-08-03 09:21:11,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=82144.33333333333, ans=0.2 +2024-08-03 09:21:43,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=82254.33333333333, ans=0.025 +2024-08-03 09:21:44,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=82254.33333333333, ans=0.2 +2024-08-03 09:21:44,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=82254.33333333333, ans=0.125 +2024-08-03 09:21:49,911 INFO [train.py:1114] (3/4) Epoch 7, batch 600, loss[loss=0.2581, simple_loss=0.3323, pruned_loss=0.09198, over 13270.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3146, pruned_loss=0.08316, over 2508248.01 frames. ], batch size: 46, lr: 1.80e-02, grad_scale: 16.0 +2024-08-03 09:21:53,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82291.0, ans=0.1 +2024-08-03 09:21:54,385 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.061e+02 1.313e+02 1.488e+02 1.850e+02 2.717e+02, threshold=2.975e+02, percent-clipped=0.0 +2024-08-03 09:22:16,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=82364.33333333333, ans=0.125 +2024-08-03 09:22:36,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=82437.66666666667, ans=0.125 +2024-08-03 09:22:39,805 INFO [train.py:1114] (3/4) Epoch 7, batch 650, loss[loss=0.2318, simple_loss=0.3127, pruned_loss=0.07548, over 13547.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3135, pruned_loss=0.08235, over 2543533.89 frames. ], batch size: 37, lr: 1.80e-02, grad_scale: 16.0 +2024-08-03 09:22:53,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=82511.0, ans=0.125 +2024-08-03 09:23:12,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=82584.33333333333, ans=0.0 +2024-08-03 09:23:25,558 INFO [train.py:1114] (3/4) Epoch 7, batch 700, loss[loss=0.231, simple_loss=0.3037, pruned_loss=0.07915, over 13535.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.3139, pruned_loss=0.08255, over 2565803.38 frames. ], batch size: 35, lr: 1.79e-02, grad_scale: 16.0 +2024-08-03 09:23:26,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.91 vs. limit=15.0 +2024-08-03 09:23:27,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=82657.66666666667, ans=0.0 +2024-08-03 09:23:29,926 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.643e+01 1.288e+02 1.544e+02 2.300e+02 4.218e+02, threshold=3.088e+02, percent-clipped=10.0 +2024-08-03 09:23:30,644 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.49 vs. limit=10.0 +2024-08-03 09:24:10,539 INFO [train.py:1114] (3/4) Epoch 7, batch 750, loss[loss=0.2818, simple_loss=0.3512, pruned_loss=0.1061, over 13358.00 frames. ], tot_loss[loss=0.2393, simple_loss=0.3136, pruned_loss=0.08254, over 2582938.31 frames. ], batch size: 37, lr: 1.79e-02, grad_scale: 16.0 +2024-08-03 09:24:21,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=82877.66666666667, ans=0.1 +2024-08-03 09:24:24,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=82877.66666666667, ans=0.125 +2024-08-03 09:24:31,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=82877.66666666667, ans=0.025 +2024-08-03 09:24:59,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=82987.66666666667, ans=0.2 +2024-08-03 09:25:05,599 INFO [train.py:1114] (3/4) Epoch 7, batch 800, loss[loss=0.1911, simple_loss=0.265, pruned_loss=0.0586, over 13337.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.3135, pruned_loss=0.08245, over 2597836.84 frames. ], batch size: 33, lr: 1.79e-02, grad_scale: 32.0 +2024-08-03 09:25:11,847 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.297e+02 1.506e+02 2.061e+02 3.344e+02, threshold=3.011e+02, percent-clipped=3.0 +2024-08-03 09:25:12,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=83024.33333333333, ans=0.0 +2024-08-03 09:25:18,558 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.18 vs. limit=15.0 +2024-08-03 09:25:32,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=83097.66666666667, ans=0.2 +2024-08-03 09:25:54,974 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.74 vs. limit=22.5 +2024-08-03 09:25:59,651 INFO [train.py:1114] (3/4) Epoch 7, batch 850, loss[loss=0.2296, simple_loss=0.306, pruned_loss=0.07657, over 13336.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.3123, pruned_loss=0.08159, over 2610084.88 frames. ], batch size: 40, lr: 1.79e-02, grad_scale: 32.0 +2024-08-03 09:26:09,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=83207.66666666667, ans=0.2 +2024-08-03 09:26:31,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=83281.0, ans=0.0 +2024-08-03 09:26:46,420 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:26:48,554 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.80 vs. limit=15.0 +2024-08-03 09:26:52,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=83354.33333333333, ans=0.0 +2024-08-03 09:26:54,483 INFO [train.py:1114] (3/4) Epoch 7, batch 900, loss[loss=0.1883, simple_loss=0.2668, pruned_loss=0.05489, over 13344.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3127, pruned_loss=0.08159, over 2612542.74 frames. ], batch size: 33, lr: 1.79e-02, grad_scale: 32.0 +2024-08-03 09:26:58,761 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.957e+01 1.340e+02 1.564e+02 1.853e+02 3.494e+02, threshold=3.128e+02, percent-clipped=2.0 +2024-08-03 09:27:01,021 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.86 vs. limit=15.0 +2024-08-03 09:27:37,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=83537.66666666667, ans=0.2 +2024-08-03 09:27:44,892 INFO [train.py:1114] (3/4) Epoch 7, batch 950, loss[loss=0.1852, simple_loss=0.2651, pruned_loss=0.05266, over 13532.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3128, pruned_loss=0.08149, over 2613459.12 frames. ], batch size: 34, lr: 1.79e-02, grad_scale: 32.0 +2024-08-03 09:27:49,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=83574.33333333333, ans=0.125 +2024-08-03 09:27:53,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=83611.0, ans=0.125 +2024-08-03 09:28:04,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=83647.66666666667, ans=0.2 +2024-08-03 09:28:22,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=83721.0, ans=0.125 +2024-08-03 09:28:29,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=83721.0, ans=0.125 +2024-08-03 09:28:31,251 INFO [train.py:1114] (3/4) Epoch 7, batch 1000, loss[loss=0.2318, simple_loss=0.3101, pruned_loss=0.07675, over 13382.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.3142, pruned_loss=0.08261, over 2611892.72 frames. ], batch size: 35, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:28:39,588 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.039e+02 1.347e+02 1.651e+02 2.099e+02 3.599e+02, threshold=3.301e+02, percent-clipped=2.0 +2024-08-03 09:28:40,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_na.min_abs, batch_count=83757.66666666667, ans=0.02 +2024-08-03 09:29:03,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=83831.0, ans=0.1 +2024-08-03 09:29:24,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=83904.33333333333, ans=0.1 +2024-08-03 09:29:25,993 INFO [train.py:1114] (3/4) Epoch 7, batch 1050, loss[loss=0.2515, simple_loss=0.328, pruned_loss=0.08748, over 13578.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.3134, pruned_loss=0.08202, over 2616173.90 frames. ], batch size: 39, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:29:35,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=83977.66666666667, ans=10.0 +2024-08-03 09:29:56,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=84014.33333333333, ans=0.125 +2024-08-03 09:29:56,686 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.60 vs. limit=10.0 +2024-08-03 09:30:02,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=84051.0, ans=0.125 +2024-08-03 09:30:18,341 INFO [train.py:1114] (3/4) Epoch 7, batch 1100, loss[loss=0.1982, simple_loss=0.2735, pruned_loss=0.06147, over 13561.00 frames. ], tot_loss[loss=0.238, simple_loss=0.3127, pruned_loss=0.08167, over 2620736.89 frames. ], batch size: 36, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:30:18,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84124.33333333333, ans=0.1 +2024-08-03 09:30:22,738 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.294e+01 1.204e+02 1.427e+02 1.810e+02 3.442e+02, threshold=2.853e+02, percent-clipped=1.0 +2024-08-03 09:30:31,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84161.0, ans=0.1 +2024-08-03 09:30:47,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=84197.66666666667, ans=0.025 +2024-08-03 09:31:07,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=84271.0, ans=0.025 +2024-08-03 09:31:07,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=84271.0, ans=0.125 +2024-08-03 09:31:11,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=84271.0, ans=0.2 +2024-08-03 09:31:13,207 INFO [train.py:1114] (3/4) Epoch 7, batch 1150, loss[loss=0.2425, simple_loss=0.3207, pruned_loss=0.08213, over 13564.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.3138, pruned_loss=0.08234, over 2620274.88 frames. ], batch size: 36, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:31:52,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=84417.66666666667, ans=0.2 +2024-08-03 09:31:55,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=84454.33333333333, ans=0.1 +2024-08-03 09:32:04,083 INFO [train.py:1114] (3/4) Epoch 7, batch 1200, loss[loss=0.2457, simple_loss=0.3214, pruned_loss=0.08502, over 13585.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.3145, pruned_loss=0.08244, over 2617683.75 frames. ], batch size: 39, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:32:09,671 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.359e+02 1.583e+02 1.870e+02 3.127e+02, threshold=3.166e+02, percent-clipped=2.0 +2024-08-03 09:32:21,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=84527.66666666667, ans=0.0 +2024-08-03 09:32:27,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=84564.33333333333, ans=0.125 +2024-08-03 09:32:28,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=84564.33333333333, ans=0.2 +2024-08-03 09:32:36,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=84601.0, ans=0.125 +2024-08-03 09:32:44,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=84637.66666666667, ans=0.09899494936611666 +2024-08-03 09:32:47,106 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.71 vs. limit=15.0 +2024-08-03 09:32:52,752 INFO [train.py:1114] (3/4) Epoch 7, batch 1250, loss[loss=0.2244, simple_loss=0.3085, pruned_loss=0.07014, over 13452.00 frames. ], tot_loss[loss=0.2396, simple_loss=0.3145, pruned_loss=0.08235, over 2629108.84 frames. ], batch size: 42, lr: 1.78e-02, grad_scale: 32.0 +2024-08-03 09:32:53,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=84674.33333333333, ans=0.125 +2024-08-03 09:33:07,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=84711.0, ans=0.125 +2024-08-03 09:33:10,417 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.82 vs. limit=8.0 +2024-08-03 09:33:27,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=84747.66666666667, ans=0.1 +2024-08-03 09:33:31,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=84784.33333333333, ans=0.2 +2024-08-03 09:33:43,984 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.25 vs. limit=22.5 +2024-08-03 09:33:44,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=84821.0, ans=0.125 +2024-08-03 09:33:49,190 INFO [train.py:1114] (3/4) Epoch 7, batch 1300, loss[loss=0.2638, simple_loss=0.3363, pruned_loss=0.09568, over 12894.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.3132, pruned_loss=0.08192, over 2631729.87 frames. ], batch size: 52, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:33:52,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=84857.66666666667, ans=0.125 +2024-08-03 09:33:53,581 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.285e+01 1.265e+02 1.441e+02 2.116e+02 4.466e+02, threshold=2.882e+02, percent-clipped=10.0 +2024-08-03 09:33:53,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=84857.66666666667, ans=0.0 +2024-08-03 09:33:58,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=84894.33333333333, ans=0.125 +2024-08-03 09:34:01,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84894.33333333333, ans=0.1 +2024-08-03 09:34:09,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=84931.0, ans=0.125 +2024-08-03 09:34:12,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=84931.0, ans=0.125 +2024-08-03 09:34:18,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=84967.66666666667, ans=0.125 +2024-08-03 09:34:34,083 INFO [train.py:1114] (3/4) Epoch 7, batch 1350, loss[loss=0.2363, simple_loss=0.3083, pruned_loss=0.08214, over 13548.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.3129, pruned_loss=0.08173, over 2638947.25 frames. ], batch size: 37, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:34:49,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=85077.66666666667, ans=0.0 +2024-08-03 09:34:55,057 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.06 vs. limit=6.0 +2024-08-03 09:34:59,650 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.10 vs. limit=15.0 +2024-08-03 09:35:01,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=85114.33333333333, ans=0.95 +2024-08-03 09:35:09,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=85151.0, ans=0.0 +2024-08-03 09:35:10,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=85151.0, ans=0.0 +2024-08-03 09:35:11,956 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.19 vs. limit=15.0 +2024-08-03 09:35:21,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=85187.66666666667, ans=0.0 +2024-08-03 09:35:22,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=85224.33333333333, ans=0.125 +2024-08-03 09:35:23,222 INFO [train.py:1114] (3/4) Epoch 7, batch 1400, loss[loss=0.2083, simple_loss=0.2813, pruned_loss=0.06764, over 13247.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.3124, pruned_loss=0.08139, over 2642395.82 frames. ], batch size: 31, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:35:27,643 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.041e+02 1.250e+02 1.480e+02 1.868e+02 3.141e+02, threshold=2.961e+02, percent-clipped=2.0 +2024-08-03 09:35:29,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=85224.33333333333, ans=0.125 +2024-08-03 09:35:34,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=85261.0, ans=0.0 +2024-08-03 09:35:39,789 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.20 vs. limit=15.0 +2024-08-03 09:35:53,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=85334.33333333333, ans=0.125 +2024-08-03 09:36:03,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=85371.0, ans=0.1 +2024-08-03 09:36:03,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=85371.0, ans=0.125 +2024-08-03 09:36:07,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=85371.0, ans=0.2 +2024-08-03 09:36:10,258 INFO [train.py:1114] (3/4) Epoch 7, batch 1450, loss[loss=0.2161, simple_loss=0.301, pruned_loss=0.06561, over 13410.00 frames. ], tot_loss[loss=0.2388, simple_loss=0.3137, pruned_loss=0.08198, over 2641096.49 frames. ], batch size: 43, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:36:13,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=85407.66666666667, ans=0.2 +2024-08-03 09:36:49,430 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.85 vs. limit=10.0 +2024-08-03 09:36:50,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=85517.66666666667, ans=0.125 +2024-08-03 09:36:51,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=85554.33333333333, ans=0.0 +2024-08-03 09:36:58,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=85554.33333333333, ans=0.0 +2024-08-03 09:37:03,216 INFO [train.py:1114] (3/4) Epoch 7, batch 1500, loss[loss=0.2675, simple_loss=0.3501, pruned_loss=0.09241, over 13417.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.3144, pruned_loss=0.0822, over 2641582.18 frames. ], batch size: 39, lr: 1.77e-02, grad_scale: 32.0 +2024-08-03 09:37:06,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=85591.0, ans=0.125 +2024-08-03 09:37:07,888 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.276e+02 1.426e+02 1.677e+02 2.585e+02, threshold=2.853e+02, percent-clipped=0.0 +2024-08-03 09:37:37,857 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.68 vs. limit=6.0 +2024-08-03 09:37:57,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=85701.0, ans=0.0 +2024-08-03 09:38:10,452 INFO [train.py:1114] (3/4) Epoch 7, batch 1550, loss[loss=0.2443, simple_loss=0.3238, pruned_loss=0.08236, over 13377.00 frames. ], tot_loss[loss=0.239, simple_loss=0.314, pruned_loss=0.08202, over 2631015.08 frames. ], batch size: 41, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:38:13,648 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.27 vs. limit=15.0 +2024-08-03 09:38:50,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=85921.0, ans=0.0 +2024-08-03 09:38:55,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=85921.0, ans=0.125 +2024-08-03 09:38:55,997 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.98 vs. limit=15.0 +2024-08-03 09:38:57,224 INFO [train.py:1114] (3/4) Epoch 7, batch 1600, loss[loss=0.211, simple_loss=0.3011, pruned_loss=0.06046, over 13559.00 frames. ], tot_loss[loss=0.239, simple_loss=0.3136, pruned_loss=0.08219, over 2624361.51 frames. ], batch size: 39, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:39:02,750 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.893e+01 1.288e+02 1.487e+02 1.890e+02 3.069e+02, threshold=2.975e+02, percent-clipped=2.0 +2024-08-03 09:39:09,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff3.min_abs, batch_count=85994.33333333333, ans=0.2 +2024-08-03 09:39:16,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=85994.33333333333, ans=0.1 +2024-08-03 09:39:42,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=86104.33333333333, ans=0.125 +2024-08-03 09:39:45,490 INFO [train.py:1114] (3/4) Epoch 7, batch 1650, loss[loss=0.2082, simple_loss=0.2915, pruned_loss=0.06239, over 13324.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.3134, pruned_loss=0.08203, over 2621390.85 frames. ], batch size: 40, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:39:49,884 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.96 vs. limit=22.5 +2024-08-03 09:39:52,774 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.64 vs. limit=22.5 +2024-08-03 09:39:55,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=86177.66666666667, ans=0.07 +2024-08-03 09:39:56,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=86177.66666666667, ans=0.0 +2024-08-03 09:40:07,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=86214.33333333333, ans=0.0 +2024-08-03 09:40:19,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=86251.0, ans=0.0 +2024-08-03 09:40:30,962 INFO [train.py:1114] (3/4) Epoch 7, batch 1700, loss[loss=0.214, simple_loss=0.281, pruned_loss=0.07353, over 13251.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3128, pruned_loss=0.08148, over 2630699.16 frames. ], batch size: 31, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:40:36,467 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.893e+01 1.270e+02 1.510e+02 1.921e+02 4.226e+02, threshold=3.020e+02, percent-clipped=3.0 +2024-08-03 09:40:37,700 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:40:46,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=86361.0, ans=0.125 +2024-08-03 09:40:47,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=86361.0, ans=0.035 +2024-08-03 09:40:50,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=86361.0, ans=0.2 +2024-08-03 09:40:51,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=86361.0, ans=0.125 +2024-08-03 09:40:56,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=86397.66666666667, ans=0.125 +2024-08-03 09:41:09,725 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.42 vs. limit=12.0 +2024-08-03 09:41:23,773 INFO [train.py:1114] (3/4) Epoch 7, batch 1750, loss[loss=0.2393, simple_loss=0.3017, pruned_loss=0.08844, over 13512.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.3122, pruned_loss=0.08152, over 2632968.56 frames. ], batch size: 31, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:41:56,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=86617.66666666667, ans=0.0 +2024-08-03 09:41:58,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=86617.66666666667, ans=0.125 +2024-08-03 09:42:00,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=86654.33333333333, ans=0.07 +2024-08-03 09:42:09,847 INFO [train.py:1114] (3/4) Epoch 7, batch 1800, loss[loss=0.2459, simple_loss=0.3241, pruned_loss=0.08389, over 13554.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3126, pruned_loss=0.08163, over 2634553.38 frames. ], batch size: 38, lr: 1.76e-02, grad_scale: 32.0 +2024-08-03 09:42:11,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=86691.0, ans=0.125 +2024-08-03 09:42:15,297 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.789e+01 1.268e+02 1.407e+02 1.831e+02 3.286e+02, threshold=2.815e+02, percent-clipped=2.0 +2024-08-03 09:42:25,865 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.81 vs. limit=10.0 +2024-08-03 09:42:38,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=86801.0, ans=0.0 +2024-08-03 09:42:39,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=86801.0, ans=0.1 +2024-08-03 09:42:50,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=86837.66666666667, ans=0.07 +2024-08-03 09:42:54,768 INFO [train.py:1114] (3/4) Epoch 7, batch 1850, loss[loss=0.2403, simple_loss=0.3276, pruned_loss=0.07647, over 13407.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.3122, pruned_loss=0.08147, over 2637045.83 frames. ], batch size: 39, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:43:04,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=86911.0, ans=0.125 +2024-08-03 09:43:15,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=86947.66666666667, ans=0.95 +2024-08-03 09:43:21,029 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:43:39,420 INFO [train.py:1114] (3/4) Epoch 7, batch 1900, loss[loss=0.2496, simple_loss=0.3265, pruned_loss=0.08637, over 13315.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3125, pruned_loss=0.08116, over 2639299.09 frames. ], batch size: 40, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:43:44,830 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.827e+01 1.267e+02 1.561e+02 1.810e+02 3.811e+02, threshold=3.121e+02, percent-clipped=4.0 +2024-08-03 09:43:52,467 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.48 vs. limit=15.0 +2024-08-03 09:43:59,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=87131.0, ans=0.07 +2024-08-03 09:44:00,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=87131.0, ans=0.125 +2024-08-03 09:44:03,067 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.16 vs. limit=15.0 +2024-08-03 09:44:12,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=87167.66666666667, ans=0.2 +2024-08-03 09:44:22,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=87204.33333333333, ans=0.125 +2024-08-03 09:44:28,646 INFO [train.py:1114] (3/4) Epoch 7, batch 1950, loss[loss=0.2045, simple_loss=0.2815, pruned_loss=0.06377, over 13558.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.3129, pruned_loss=0.0808, over 2646056.43 frames. ], batch size: 36, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:44:47,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=87314.33333333333, ans=0.09899494936611666 +2024-08-03 09:45:03,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=87314.33333333333, ans=0.2 +2024-08-03 09:45:05,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=87351.0, ans=0.0 +2024-08-03 09:45:05,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=87351.0, ans=0.0 +2024-08-03 09:45:10,715 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.40 vs. limit=12.0 +2024-08-03 09:45:15,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=87387.66666666667, ans=0.125 +2024-08-03 09:45:24,715 INFO [train.py:1114] (3/4) Epoch 7, batch 2000, loss[loss=0.2104, simple_loss=0.2783, pruned_loss=0.07121, over 13550.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.314, pruned_loss=0.08193, over 2634696.02 frames. ], batch size: 31, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:45:30,334 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.022e+02 1.315e+02 1.529e+02 1.937e+02 2.914e+02, threshold=3.058e+02, percent-clipped=0.0 +2024-08-03 09:45:33,713 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.35 vs. limit=15.0 +2024-08-03 09:45:38,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=87461.0, ans=0.125 +2024-08-03 09:45:42,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=87497.66666666667, ans=0.2 +2024-08-03 09:45:53,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=87534.33333333333, ans=0.125 +2024-08-03 09:45:59,806 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.77 vs. limit=22.5 +2024-08-03 09:46:00,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=87571.0, ans=0.125 +2024-08-03 09:46:09,935 INFO [train.py:1114] (3/4) Epoch 7, batch 2050, loss[loss=0.1926, simple_loss=0.2752, pruned_loss=0.05502, over 13401.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3124, pruned_loss=0.08124, over 2631631.30 frames. ], batch size: 32, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:46:15,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=87607.66666666667, ans=0.0 +2024-08-03 09:46:22,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=87644.33333333333, ans=0.125 +2024-08-03 09:46:27,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=87644.33333333333, ans=0.125 +2024-08-03 09:46:37,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=87717.66666666667, ans=0.0 +2024-08-03 09:46:38,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=87717.66666666667, ans=0.0 +2024-08-03 09:46:42,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=87717.66666666667, ans=0.025 +2024-08-03 09:46:42,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=87717.66666666667, ans=0.125 +2024-08-03 09:46:47,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=87754.33333333333, ans=0.0 +2024-08-03 09:46:56,263 INFO [train.py:1114] (3/4) Epoch 7, batch 2100, loss[loss=0.2346, simple_loss=0.3106, pruned_loss=0.07927, over 13552.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.311, pruned_loss=0.08001, over 2637477.17 frames. ], batch size: 37, lr: 1.75e-02, grad_scale: 32.0 +2024-08-03 09:47:01,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=87791.0, ans=0.0 +2024-08-03 09:47:03,251 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.729e+01 1.195e+02 1.377e+02 1.752e+02 2.850e+02, threshold=2.753e+02, percent-clipped=0.0 +2024-08-03 09:47:03,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=87791.0, ans=0.0 +2024-08-03 09:47:12,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=87827.66666666667, ans=0.125 +2024-08-03 09:47:31,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=87901.0, ans=0.05 +2024-08-03 09:47:32,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=87901.0, ans=0.125 +2024-08-03 09:47:40,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=87937.66666666667, ans=0.0 +2024-08-03 09:47:42,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=87974.33333333333, ans=0.1 +2024-08-03 09:47:42,915 INFO [train.py:1114] (3/4) Epoch 7, batch 2150, loss[loss=0.2033, simple_loss=0.2854, pruned_loss=0.06058, over 13562.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3105, pruned_loss=0.07983, over 2646838.35 frames. ], batch size: 36, lr: 1.74e-02, grad_scale: 32.0 +2024-08-03 09:47:55,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=88011.0, ans=0.125 +2024-08-03 09:47:57,984 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.55 vs. limit=22.5 +2024-08-03 09:48:10,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=88047.66666666667, ans=0.125 +2024-08-03 09:48:23,524 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.23 vs. limit=15.0 +2024-08-03 09:49:41,846 INFO [train.py:1114] (3/4) Epoch 7, batch 2200, loss[loss=0.2369, simple_loss=0.3185, pruned_loss=0.07761, over 13395.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3109, pruned_loss=0.07976, over 2644333.38 frames. ], batch size: 39, lr: 1.74e-02, grad_scale: 32.0 +2024-08-03 09:49:56,403 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.554e+01 1.287e+02 1.626e+02 2.364e+02 4.219e+02, threshold=3.252e+02, percent-clipped=14.0 +2024-08-03 09:50:47,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=88267.66666666667, ans=0.125 +2024-08-03 09:50:51,708 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=13.97 vs. limit=15.0 +2024-08-03 09:51:41,818 INFO [train.py:1114] (3/4) Epoch 7, batch 2250, loss[loss=0.1928, simple_loss=0.2914, pruned_loss=0.04714, over 13365.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3113, pruned_loss=0.08015, over 2641308.55 frames. ], batch size: 37, lr: 1.74e-02, grad_scale: 16.0 +2024-08-03 09:51:52,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=88341.0, ans=10.0 +2024-08-03 09:52:11,282 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.44 vs. limit=6.0 +2024-08-03 09:52:19,535 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.13 vs. limit=22.5 +2024-08-03 09:52:24,625 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.50 vs. limit=15.0 +2024-08-03 09:52:42,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=88487.66666666667, ans=0.125 +2024-08-03 09:52:47,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=88487.66666666667, ans=0.125 +2024-08-03 09:52:57,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=88524.33333333333, ans=0.125 +2024-08-03 09:52:57,972 INFO [train.py:1114] (3/4) Epoch 7, batch 2300, loss[loss=0.2132, simple_loss=0.2926, pruned_loss=0.06692, over 13591.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.31, pruned_loss=0.07971, over 2637978.86 frames. ], batch size: 33, lr: 1.74e-02, grad_scale: 16.0 +2024-08-03 09:53:01,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=88524.33333333333, ans=0.0 +2024-08-03 09:53:04,297 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.405e+01 1.244e+02 1.416e+02 1.864e+02 3.449e+02, threshold=2.832e+02, percent-clipped=2.0 +2024-08-03 09:53:15,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=88561.0, ans=0.1 +2024-08-03 09:53:16,276 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.75 vs. limit=15.0 +2024-08-03 09:53:28,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=88597.66666666667, ans=0.0 +2024-08-03 09:53:31,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=88634.33333333333, ans=0.025 +2024-08-03 09:53:44,272 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.93 vs. limit=15.0 +2024-08-03 09:53:52,474 INFO [train.py:1114] (3/4) Epoch 7, batch 2350, loss[loss=0.2458, simple_loss=0.3284, pruned_loss=0.08157, over 13550.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3101, pruned_loss=0.07959, over 2641139.49 frames. ], batch size: 38, lr: 1.74e-02, grad_scale: 16.0 +2024-08-03 09:55:07,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=88744.33333333333, ans=0.0 +2024-08-03 09:55:08,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=88744.33333333333, ans=0.0 +2024-08-03 09:55:26,797 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.95 vs. limit=22.5 +2024-08-03 09:55:28,141 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:55:31,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=88781.0, ans=0.125 +2024-08-03 09:55:32,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=88817.66666666667, ans=0.0 +2024-08-03 09:55:52,790 INFO [train.py:1114] (3/4) Epoch 7, batch 2400, loss[loss=0.1999, simple_loss=0.2854, pruned_loss=0.05721, over 13521.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3109, pruned_loss=0.07997, over 2642418.63 frames. ], batch size: 35, lr: 1.74e-02, grad_scale: 16.0 +2024-08-03 09:56:07,282 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.040e+02 1.279e+02 1.511e+02 1.745e+02 2.971e+02, threshold=3.023e+02, percent-clipped=1.0 +2024-08-03 09:56:07,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=88891.0, ans=0.2 +2024-08-03 09:56:24,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=88964.33333333333, ans=0.125 +2024-08-03 09:56:44,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=89037.66666666667, ans=0.1 +2024-08-03 09:56:53,287 INFO [train.py:1114] (3/4) Epoch 7, batch 2450, loss[loss=0.2343, simple_loss=0.3152, pruned_loss=0.07672, over 13353.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.3125, pruned_loss=0.08097, over 2632453.77 frames. ], batch size: 37, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 09:56:59,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=89074.33333333333, ans=0.125 +2024-08-03 09:57:16,906 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 09:57:36,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=89184.33333333333, ans=0.0 +2024-08-03 09:57:41,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=89184.33333333333, ans=0.2 +2024-08-03 09:57:42,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=89221.0, ans=0.2 +2024-08-03 09:57:52,089 INFO [train.py:1114] (3/4) Epoch 7, batch 2500, loss[loss=0.2314, simple_loss=0.3158, pruned_loss=0.07353, over 13398.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3117, pruned_loss=0.08051, over 2636863.84 frames. ], batch size: 39, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 09:57:59,335 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.252e+02 1.492e+02 2.074e+02 3.860e+02, threshold=2.984e+02, percent-clipped=5.0 +2024-08-03 09:58:03,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=89294.33333333333, ans=0.125 +2024-08-03 09:58:07,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=89294.33333333333, ans=0.0 +2024-08-03 09:58:10,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=89331.0, ans=0.1 +2024-08-03 09:58:10,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=89331.0, ans=0.0 +2024-08-03 09:58:24,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=89367.66666666667, ans=0.125 +2024-08-03 09:58:33,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=89404.33333333333, ans=0.125 +2024-08-03 09:58:36,001 INFO [train.py:1114] (3/4) Epoch 7, batch 2550, loss[loss=0.237, simple_loss=0.3012, pruned_loss=0.08643, over 13535.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3119, pruned_loss=0.08069, over 2638880.76 frames. ], batch size: 31, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 09:58:39,183 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=12.00 vs. limit=15.0 +2024-08-03 09:58:46,344 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.01 vs. limit=15.0 +2024-08-03 09:58:48,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=89477.66666666667, ans=0.0 +2024-08-03 09:58:51,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=89477.66666666667, ans=0.025 +2024-08-03 09:59:03,357 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.90 vs. limit=22.5 +2024-08-03 09:59:19,383 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.95 vs. limit=10.0 +2024-08-03 09:59:19,629 INFO [train.py:1114] (3/4) Epoch 7, batch 2600, loss[loss=0.2083, simple_loss=0.2901, pruned_loss=0.06325, over 13574.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3123, pruned_loss=0.08113, over 2638748.47 frames. ], batch size: 36, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 09:59:21,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=89624.33333333333, ans=0.0 +2024-08-03 09:59:26,388 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.355e+01 1.229e+02 1.441e+02 1.780e+02 3.809e+02, threshold=2.882e+02, percent-clipped=4.0 +2024-08-03 09:59:34,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=89661.0, ans=0.2 +2024-08-03 09:59:36,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=89697.66666666667, ans=10.0 +2024-08-03 09:59:55,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=89771.0, ans=0.5 +2024-08-03 10:00:02,670 INFO [train.py:1114] (3/4) Epoch 7, batch 2650, loss[loss=0.2292, simple_loss=0.309, pruned_loss=0.07469, over 13323.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3122, pruned_loss=0.0809, over 2641743.82 frames. ], batch size: 46, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 10:00:05,887 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.51 vs. limit=15.0 +2024-08-03 10:00:08,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=89807.66666666667, ans=0.09899494936611666 +2024-08-03 10:00:11,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=89844.33333333333, ans=0.0 +2024-08-03 10:00:48,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=89881.0, ans=0.0 +2024-08-03 10:01:08,388 INFO [train.py:1114] (3/4) Epoch 7, batch 2700, loss[loss=0.2587, simple_loss=0.3427, pruned_loss=0.08731, over 13530.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.3132, pruned_loss=0.08116, over 2638427.40 frames. ], batch size: 40, lr: 1.73e-02, grad_scale: 16.0 +2024-08-03 10:01:16,159 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.262e+02 1.504e+02 2.229e+02 3.961e+02, threshold=3.008e+02, percent-clipped=4.0 +2024-08-03 10:01:43,414 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.72 vs. limit=15.0 +2024-08-03 10:01:53,309 INFO [train.py:1114] (3/4) Epoch 7, batch 2750, loss[loss=0.2214, simple_loss=0.2935, pruned_loss=0.07466, over 13343.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.311, pruned_loss=0.08022, over 2636619.90 frames. ], batch size: 34, lr: 1.73e-02, grad_scale: 8.0 +2024-08-03 10:02:04,842 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=1.189e-02 +2024-08-03 10:02:13,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=90247.66666666667, ans=0.0 +2024-08-03 10:02:13,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=90247.66666666667, ans=0.0 +2024-08-03 10:02:19,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=90284.33333333333, ans=10.0 +2024-08-03 10:02:19,255 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.69 vs. limit=22.5 +2024-08-03 10:02:19,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=90284.33333333333, ans=0.0 +2024-08-03 10:02:21,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=90284.33333333333, ans=0.2 +2024-08-03 10:02:37,968 INFO [train.py:1114] (3/4) Epoch 7, batch 2800, loss[loss=0.3085, simple_loss=0.3597, pruned_loss=0.1286, over 9729.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3113, pruned_loss=0.08049, over 2628858.29 frames. ], batch size: 98, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:02:39,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=90357.66666666667, ans=0.0 +2024-08-03 10:02:46,064 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.878e+01 1.254e+02 1.435e+02 1.719e+02 3.010e+02, threshold=2.870e+02, percent-clipped=1.0 +2024-08-03 10:02:57,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90431.0, ans=0.1 +2024-08-03 10:03:01,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=90431.0, ans=0.125 +2024-08-03 10:03:03,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=90431.0, ans=0.125 +2024-08-03 10:03:23,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=90504.33333333333, ans=0.125 +2024-08-03 10:03:25,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90504.33333333333, ans=0.1 +2024-08-03 10:03:27,737 INFO [train.py:1114] (3/4) Epoch 7, batch 2850, loss[loss=0.2173, simple_loss=0.3, pruned_loss=0.06724, over 13354.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.3118, pruned_loss=0.08057, over 2621719.26 frames. ], batch size: 35, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:03:33,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=90541.0, ans=0.2 +2024-08-03 10:03:39,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=90577.66666666667, ans=0.2 +2024-08-03 10:03:50,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=90614.33333333333, ans=0.125 +2024-08-03 10:04:03,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=90687.66666666667, ans=0.0 +2024-08-03 10:04:06,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=90687.66666666667, ans=0.025 +2024-08-03 10:04:09,873 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.48 vs. limit=15.0 +2024-08-03 10:04:10,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=90687.66666666667, ans=0.125 +2024-08-03 10:04:12,846 INFO [train.py:1114] (3/4) Epoch 7, batch 2900, loss[loss=0.201, simple_loss=0.2898, pruned_loss=0.05608, over 13346.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3131, pruned_loss=0.08082, over 2632594.57 frames. ], batch size: 36, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:04:12,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=90724.33333333333, ans=0.125 +2024-08-03 10:04:24,258 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.250e+02 1.523e+02 1.874e+02 3.482e+02, threshold=3.046e+02, percent-clipped=1.0 +2024-08-03 10:04:31,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=90761.0, ans=0.2 +2024-08-03 10:04:52,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=90871.0, ans=0.125 +2024-08-03 10:04:58,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=90871.0, ans=0.125 +2024-08-03 10:05:03,276 INFO [train.py:1114] (3/4) Epoch 7, batch 2950, loss[loss=0.1949, simple_loss=0.2782, pruned_loss=0.05581, over 13331.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.3119, pruned_loss=0.08049, over 2630840.65 frames. ], batch size: 34, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:05:04,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=90907.66666666667, ans=0.2 +2024-08-03 10:05:13,819 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.33 vs. limit=15.0 +2024-08-03 10:05:28,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.82 vs. limit=15.0 +2024-08-03 10:05:29,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=91017.66666666667, ans=0.025 +2024-08-03 10:05:34,088 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.18 vs. limit=15.0 +2024-08-03 10:05:42,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=91054.33333333333, ans=0.0 +2024-08-03 10:05:47,230 INFO [train.py:1114] (3/4) Epoch 7, batch 3000, loss[loss=0.226, simple_loss=0.3034, pruned_loss=0.0743, over 13548.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3108, pruned_loss=0.08015, over 2630722.27 frames. ], batch size: 37, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:05:47,231 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 10:06:06,640 INFO [train.py:1146] (3/4) Epoch 7, validation: loss=0.1942, simple_loss=0.2938, pruned_loss=0.04733, over 944034.00 frames. +2024-08-03 10:06:06,640 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 10:06:11,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=91091.0, ans=0.125 +2024-08-03 10:06:14,472 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.238e+02 1.419e+02 1.719e+02 4.359e+02, threshold=2.839e+02, percent-clipped=6.0 +2024-08-03 10:06:34,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=91201.0, ans=0.0 +2024-08-03 10:06:38,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=91201.0, ans=0.125 +2024-08-03 10:06:41,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=91237.66666666667, ans=0.0 +2024-08-03 10:06:43,261 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.67 vs. limit=15.0 +2024-08-03 10:06:50,042 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.22 vs. limit=22.5 +2024-08-03 10:06:51,246 INFO [train.py:1114] (3/4) Epoch 7, batch 3050, loss[loss=0.2169, simple_loss=0.2811, pruned_loss=0.07635, over 13519.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3121, pruned_loss=0.08058, over 2627498.54 frames. ], batch size: 35, lr: 1.72e-02, grad_scale: 16.0 +2024-08-03 10:06:52,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=91274.33333333333, ans=0.0 +2024-08-03 10:07:02,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91311.0, ans=0.1 +2024-08-03 10:07:34,429 INFO [train.py:1114] (3/4) Epoch 7, batch 3100, loss[loss=0.2632, simple_loss=0.3414, pruned_loss=0.09246, over 13295.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3111, pruned_loss=0.07977, over 2626964.38 frames. ], batch size: 46, lr: 1.71e-02, grad_scale: 16.0 +2024-08-03 10:07:35,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=91457.66666666667, ans=0.07 +2024-08-03 10:07:42,186 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.930e+01 1.259e+02 1.446e+02 1.808e+02 2.827e+02, threshold=2.891e+02, percent-clipped=0.0 +2024-08-03 10:07:44,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91494.33333333333, ans=0.1 +2024-08-03 10:07:49,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=91494.33333333333, ans=0.0 +2024-08-03 10:07:53,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=91531.0, ans=0.5 +2024-08-03 10:07:55,438 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:08:07,658 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.00 vs. limit=15.0 +2024-08-03 10:08:17,430 INFO [train.py:1114] (3/4) Epoch 7, batch 3150, loss[loss=0.2532, simple_loss=0.3349, pruned_loss=0.08571, over 13058.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3115, pruned_loss=0.08016, over 2628689.48 frames. ], batch size: 48, lr: 1.71e-02, grad_scale: 16.0 +2024-08-03 10:08:29,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=91677.66666666667, ans=0.125 +2024-08-03 10:08:38,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.44 vs. limit=10.0 +2024-08-03 10:09:01,222 INFO [train.py:1114] (3/4) Epoch 7, batch 3200, loss[loss=0.2438, simple_loss=0.3263, pruned_loss=0.08067, over 13537.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.311, pruned_loss=0.07973, over 2634029.84 frames. ], batch size: 37, lr: 1.71e-02, grad_scale: 32.0 +2024-08-03 10:09:03,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=91824.33333333333, ans=0.125 +2024-08-03 10:09:08,916 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.005e+02 1.267e+02 1.711e+02 2.068e+02 3.292e+02, threshold=3.421e+02, percent-clipped=4.0 +2024-08-03 10:09:14,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=91861.0, ans=0.125 +2024-08-03 10:09:48,354 INFO [train.py:1114] (3/4) Epoch 7, batch 3250, loss[loss=0.2492, simple_loss=0.333, pruned_loss=0.0827, over 13393.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3116, pruned_loss=0.0802, over 2638222.73 frames. ], batch size: 38, lr: 1.71e-02, grad_scale: 32.0 +2024-08-03 10:10:09,588 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.97 vs. limit=15.0 +2024-08-03 10:10:23,357 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.52 vs. limit=22.5 +2024-08-03 10:10:26,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=92154.33333333333, ans=0.0 +2024-08-03 10:10:33,779 INFO [train.py:1114] (3/4) Epoch 7, batch 3300, loss[loss=0.2368, simple_loss=0.3125, pruned_loss=0.08057, over 12831.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3103, pruned_loss=0.07964, over 2640027.95 frames. ], batch size: 52, lr: 1.71e-02, grad_scale: 16.0 +2024-08-03 10:10:35,312 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.12 vs. limit=15.0 +2024-08-03 10:10:37,845 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.15 vs. limit=15.0 +2024-08-03 10:10:42,163 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.670e+01 1.272e+02 1.617e+02 1.965e+02 3.247e+02, threshold=3.234e+02, percent-clipped=0.0 +2024-08-03 10:10:44,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=92227.66666666667, ans=0.125 +2024-08-03 10:10:46,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=92227.66666666667, ans=0.125 +2024-08-03 10:10:47,023 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.27 vs. limit=22.5 +2024-08-03 10:10:58,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=92301.0, ans=0.0 +2024-08-03 10:10:58,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=92301.0, ans=0.125 +2024-08-03 10:11:07,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=92337.66666666667, ans=0.125 +2024-08-03 10:11:15,807 INFO [train.py:1114] (3/4) Epoch 7, batch 3350, loss[loss=0.2636, simple_loss=0.3352, pruned_loss=0.09596, over 12942.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3114, pruned_loss=0.07999, over 2630478.86 frames. ], batch size: 48, lr: 1.71e-02, grad_scale: 16.0 +2024-08-03 10:11:25,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=92411.0, ans=0.125 +2024-08-03 10:11:27,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=92411.0, ans=0.125 +2024-08-03 10:11:58,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=92521.0, ans=0.0 +2024-08-03 10:11:59,799 INFO [train.py:1114] (3/4) Epoch 7, batch 3400, loss[loss=0.1906, simple_loss=0.2568, pruned_loss=0.06226, over 13531.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.3109, pruned_loss=0.08008, over 2626706.45 frames. ], batch size: 31, lr: 1.70e-02, grad_scale: 16.0 +2024-08-03 10:12:02,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=92557.66666666667, ans=0.125 +2024-08-03 10:12:05,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=92557.66666666667, ans=0.125 +2024-08-03 10:12:07,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=92557.66666666667, ans=0.2 +2024-08-03 10:12:08,995 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.271e+02 1.505e+02 1.907e+02 3.089e+02, threshold=3.010e+02, percent-clipped=0.0 +2024-08-03 10:12:11,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92594.33333333333, ans=0.1 +2024-08-03 10:12:12,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=92594.33333333333, ans=0.125 +2024-08-03 10:12:15,338 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.35 vs. limit=6.0 +2024-08-03 10:12:16,354 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.85 vs. limit=15.0 +2024-08-03 10:12:25,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=92667.66666666667, ans=0.95 +2024-08-03 10:12:26,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=92667.66666666667, ans=0.0 +2024-08-03 10:12:31,486 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.23 vs. limit=22.5 +2024-08-03 10:12:33,192 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.26 vs. limit=15.0 +2024-08-03 10:12:42,891 INFO [train.py:1114] (3/4) Epoch 7, batch 3450, loss[loss=0.2546, simple_loss=0.3327, pruned_loss=0.08826, over 12920.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3107, pruned_loss=0.07986, over 2629124.30 frames. ], batch size: 52, lr: 1.70e-02, grad_scale: 16.0 +2024-08-03 10:12:43,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=92741.0, ans=0.2 +2024-08-03 10:12:52,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=92777.66666666667, ans=0.125 +2024-08-03 10:13:00,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=92814.33333333333, ans=0.2 +2024-08-03 10:13:12,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=92851.0, ans=0.2 +2024-08-03 10:13:22,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=92887.66666666667, ans=0.125 +2024-08-03 10:13:25,191 INFO [train.py:1114] (3/4) Epoch 7, batch 3500, loss[loss=0.2123, simple_loss=0.2872, pruned_loss=0.0687, over 13527.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3104, pruned_loss=0.08014, over 2630675.18 frames. ], batch size: 34, lr: 1.70e-02, grad_scale: 16.0 +2024-08-03 10:13:27,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=92924.33333333333, ans=0.0 +2024-08-03 10:13:33,823 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.754e+01 1.223e+02 1.539e+02 1.881e+02 2.645e+02, threshold=3.078e+02, percent-clipped=0.0 +2024-08-03 10:13:40,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=92961.0, ans=0.125 +2024-08-03 10:13:45,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92997.66666666667, ans=0.1 +2024-08-03 10:13:49,062 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:13:58,246 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:13:59,885 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.87 vs. limit=22.5 +2024-08-03 10:14:09,034 INFO [train.py:1114] (3/4) Epoch 7, batch 3550, loss[loss=0.2394, simple_loss=0.3162, pruned_loss=0.08136, over 12589.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3131, pruned_loss=0.08133, over 2629018.16 frames. ], batch size: 58, lr: 1.70e-02, grad_scale: 16.0 +2024-08-03 10:14:12,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=93107.66666666667, ans=0.2 +2024-08-03 10:14:53,544 INFO [train.py:1114] (3/4) Epoch 7, batch 3600, loss[loss=0.2998, simple_loss=0.3541, pruned_loss=0.1228, over 9287.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.3204, pruned_loss=0.08817, over 2486063.90 frames. ], batch size: 97, lr: 1.70e-02, grad_scale: 32.0 +2024-08-03 10:15:02,205 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.315e+02 1.480e+02 1.683e+02 2.632e+02, threshold=2.960e+02, percent-clipped=0.0 +2024-08-03 10:15:16,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=93364.33333333333, ans=0.125 +2024-08-03 10:15:22,754 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.12 vs. limit=15.0 +2024-08-03 10:15:24,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=93401.0, ans=0.0 +2024-08-03 10:16:13,472 INFO [train.py:1114] (3/4) Epoch 8, batch 0, loss[loss=0.1978, simple_loss=0.2792, pruned_loss=0.0582, over 13363.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2792, pruned_loss=0.0582, over 13363.00 frames. ], batch size: 33, lr: 1.60e-02, grad_scale: 32.0 +2024-08-03 10:16:13,473 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 10:16:24,030 INFO [train.py:1146] (3/4) Epoch 8, validation: loss=0.1977, simple_loss=0.2989, pruned_loss=0.04829, over 944034.00 frames. +2024-08-03 10:16:24,031 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 10:16:24,475 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-08-03 10:16:26,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=93437.66666666667, ans=0.125 +2024-08-03 10:17:01,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=93584.33333333333, ans=0.1 +2024-08-03 10:17:07,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=93584.33333333333, ans=0.125 +2024-08-03 10:17:09,635 INFO [train.py:1114] (3/4) Epoch 8, batch 50, loss[loss=0.1933, simple_loss=0.2739, pruned_loss=0.05633, over 13417.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.3103, pruned_loss=0.07922, over 578897.65 frames. ], batch size: 32, lr: 1.60e-02, grad_scale: 16.0 +2024-08-03 10:17:19,998 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.07 vs. limit=15.0 +2024-08-03 10:17:23,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=93657.66666666667, ans=0.125 +2024-08-03 10:17:30,614 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.247e+02 1.447e+02 2.039e+02 3.809e+02, threshold=2.894e+02, percent-clipped=5.0 +2024-08-03 10:17:46,403 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.35 vs. limit=6.0 +2024-08-03 10:17:55,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93767.66666666667, ans=0.1 +2024-08-03 10:17:57,633 INFO [train.py:1114] (3/4) Epoch 8, batch 100, loss[loss=0.2195, simple_loss=0.3088, pruned_loss=0.0651, over 13513.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3132, pruned_loss=0.07984, over 1025996.85 frames. ], batch size: 35, lr: 1.60e-02, grad_scale: 16.0 +2024-08-03 10:18:11,458 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:18:28,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=93914.33333333333, ans=0.125 +2024-08-03 10:18:31,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=93914.33333333333, ans=0.0 +2024-08-03 10:18:38,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=93951.0, ans=0.0 +2024-08-03 10:18:43,050 INFO [train.py:1114] (3/4) Epoch 8, batch 150, loss[loss=0.2187, simple_loss=0.2862, pruned_loss=0.07562, over 13421.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3091, pruned_loss=0.07736, over 1386851.86 frames. ], batch size: 32, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:18:45,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93987.66666666667, ans=0.1 +2024-08-03 10:18:52,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=94024.33333333333, ans=0.125 +2024-08-03 10:18:53,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=94024.33333333333, ans=0.2 +2024-08-03 10:18:59,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=94024.33333333333, ans=0.2 +2024-08-03 10:18:59,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=94024.33333333333, ans=0.95 +2024-08-03 10:19:02,666 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.928e+01 1.201e+02 1.310e+02 1.526e+02 2.654e+02, threshold=2.621e+02, percent-clipped=0.0 +2024-08-03 10:19:02,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=94061.0, ans=0.09899494936611666 +2024-08-03 10:19:07,920 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.33 vs. limit=22.5 +2024-08-03 10:19:19,534 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.45 vs. limit=22.5 +2024-08-03 10:19:24,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=94134.33333333333, ans=0.0 +2024-08-03 10:19:28,079 INFO [train.py:1114] (3/4) Epoch 8, batch 200, loss[loss=0.2537, simple_loss=0.3359, pruned_loss=0.08571, over 12656.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3066, pruned_loss=0.07676, over 1666049.77 frames. ], batch size: 59, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:19:31,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=94171.0, ans=0.0 +2024-08-03 10:19:32,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=94171.0, ans=0.035 +2024-08-03 10:19:39,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=94207.66666666667, ans=0.125 +2024-08-03 10:19:48,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=94244.33333333333, ans=0.0 +2024-08-03 10:19:49,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=94244.33333333333, ans=0.1 +2024-08-03 10:19:55,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94244.33333333333, ans=0.1 +2024-08-03 10:19:55,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=94244.33333333333, ans=0.2 +2024-08-03 10:19:59,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff2.min_abs, batch_count=94281.0, ans=0.1 +2024-08-03 10:20:14,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=94317.66666666667, ans=0.125 +2024-08-03 10:20:23,209 INFO [train.py:1114] (3/4) Epoch 8, batch 250, loss[loss=0.2229, simple_loss=0.3127, pruned_loss=0.0666, over 13309.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3085, pruned_loss=0.07781, over 1885090.00 frames. ], batch size: 46, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:20:25,589 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.87 vs. limit=22.5 +2024-08-03 10:20:27,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=94354.33333333333, ans=0.125 +2024-08-03 10:20:38,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=94391.0, ans=0.125 +2024-08-03 10:20:47,225 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.316e+01 1.275e+02 1.578e+02 1.902e+02 3.207e+02, threshold=3.155e+02, percent-clipped=3.0 +2024-08-03 10:20:50,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=94427.66666666667, ans=0.125 +2024-08-03 10:20:52,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=94427.66666666667, ans=0.125 +2024-08-03 10:20:57,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=94464.33333333333, ans=0.125 +2024-08-03 10:21:12,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=94501.0, ans=0.07 +2024-08-03 10:21:17,484 INFO [train.py:1114] (3/4) Epoch 8, batch 300, loss[loss=0.2673, simple_loss=0.3369, pruned_loss=0.09885, over 13429.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3079, pruned_loss=0.07756, over 2051809.60 frames. ], batch size: 42, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:21:23,523 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.37 vs. limit=15.0 +2024-08-03 10:21:25,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=94574.33333333333, ans=0.0 +2024-08-03 10:21:28,886 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.82 vs. limit=6.0 +2024-08-03 10:21:32,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=94574.33333333333, ans=0.035 +2024-08-03 10:21:34,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.34 vs. limit=15.0 +2024-08-03 10:21:38,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=94574.33333333333, ans=0.125 +2024-08-03 10:21:48,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=94647.66666666667, ans=0.05 +2024-08-03 10:21:58,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=94647.66666666667, ans=0.125 +2024-08-03 10:22:00,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=94647.66666666667, ans=0.125 +2024-08-03 10:22:12,140 INFO [train.py:1114] (3/4) Epoch 8, batch 350, loss[loss=0.1752, simple_loss=0.2502, pruned_loss=0.05011, over 13569.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3079, pruned_loss=0.07753, over 2183041.38 frames. ], batch size: 33, lr: 1.59e-02, grad_scale: 16.0 +2024-08-03 10:22:16,331 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.09 vs. limit=12.0 +2024-08-03 10:22:16,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=94721.0, ans=0.0 +2024-08-03 10:22:30,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=94794.33333333333, ans=0.125 +2024-08-03 10:22:32,140 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.441e+01 1.242e+02 1.508e+02 2.025e+02 3.534e+02, threshold=3.015e+02, percent-clipped=1.0 +2024-08-03 10:22:50,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=94867.66666666667, ans=0.05 +2024-08-03 10:22:57,090 INFO [train.py:1114] (3/4) Epoch 8, batch 400, loss[loss=0.2491, simple_loss=0.3264, pruned_loss=0.08588, over 13360.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3068, pruned_loss=0.07702, over 2286839.07 frames. ], batch size: 37, lr: 1.59e-02, grad_scale: 32.0 +2024-08-03 10:22:59,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=94904.33333333333, ans=0.125 +2024-08-03 10:23:15,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=94977.66666666667, ans=0.125 +2024-08-03 10:23:29,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=95014.33333333333, ans=0.1 +2024-08-03 10:23:34,659 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.56 vs. limit=15.0 +2024-08-03 10:23:34,727 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.58 vs. limit=6.0 +2024-08-03 10:23:42,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=95051.0, ans=0.125 +2024-08-03 10:23:43,343 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.55 vs. limit=15.0 +2024-08-03 10:23:44,506 INFO [train.py:1114] (3/4) Epoch 8, batch 450, loss[loss=0.2761, simple_loss=0.3517, pruned_loss=0.1003, over 13555.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3075, pruned_loss=0.07729, over 2359823.21 frames. ], batch size: 38, lr: 1.59e-02, grad_scale: 32.0 +2024-08-03 10:23:53,167 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.85 vs. limit=15.0 +2024-08-03 10:24:06,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=95161.0, ans=0.0 +2024-08-03 10:24:08,008 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.730e+01 1.246e+02 1.455e+02 1.839e+02 3.207e+02, threshold=2.909e+02, percent-clipped=1.0 +2024-08-03 10:24:12,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=95161.0, ans=0.0 +2024-08-03 10:24:33,350 INFO [train.py:1114] (3/4) Epoch 8, batch 500, loss[loss=0.2759, simple_loss=0.3523, pruned_loss=0.09969, over 13412.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3063, pruned_loss=0.07661, over 2425636.96 frames. ], batch size: 43, lr: 1.58e-02, grad_scale: 32.0 +2024-08-03 10:24:54,468 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.48 vs. limit=22.5 +2024-08-03 10:24:59,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=95344.33333333333, ans=0.0 +2024-08-03 10:25:09,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=95417.66666666667, ans=0.125 +2024-08-03 10:25:17,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=95417.66666666667, ans=0.125 +2024-08-03 10:25:17,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=95417.66666666667, ans=0.125 +2024-08-03 10:25:21,170 INFO [train.py:1114] (3/4) Epoch 8, batch 550, loss[loss=0.2755, simple_loss=0.3427, pruned_loss=0.1042, over 12982.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3071, pruned_loss=0.0774, over 2468646.43 frames. ], batch size: 48, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:25:24,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=95454.33333333333, ans=0.0 +2024-08-03 10:25:25,263 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.71 vs. limit=15.0 +2024-08-03 10:25:26,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=95454.33333333333, ans=0.125 +2024-08-03 10:25:32,638 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.82 vs. limit=15.0 +2024-08-03 10:25:42,950 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.590e+01 1.201e+02 1.471e+02 1.924e+02 3.912e+02, threshold=2.942e+02, percent-clipped=7.0 +2024-08-03 10:26:02,566 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.29 vs. limit=15.0 +2024-08-03 10:26:06,485 INFO [train.py:1114] (3/4) Epoch 8, batch 600, loss[loss=0.2627, simple_loss=0.3337, pruned_loss=0.09586, over 13292.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3071, pruned_loss=0.0773, over 2509030.78 frames. ], batch size: 46, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:26:25,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=95674.33333333333, ans=0.125 +2024-08-03 10:26:29,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=95674.33333333333, ans=0.0 +2024-08-03 10:26:58,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=95784.33333333333, ans=0.1 +2024-08-03 10:27:00,963 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.91 vs. limit=15.0 +2024-08-03 10:27:01,402 INFO [train.py:1114] (3/4) Epoch 8, batch 650, loss[loss=0.2033, simple_loss=0.2811, pruned_loss=0.06272, over 13538.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3061, pruned_loss=0.07641, over 2544086.77 frames. ], batch size: 37, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:27:09,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=95857.66666666667, ans=0.2 +2024-08-03 10:27:15,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=95857.66666666667, ans=0.0 +2024-08-03 10:27:21,643 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.23 vs. limit=12.0 +2024-08-03 10:27:22,945 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.916e+01 1.338e+02 1.718e+02 2.265e+02 3.658e+02, threshold=3.436e+02, percent-clipped=6.0 +2024-08-03 10:27:30,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=95931.0, ans=6.0 +2024-08-03 10:27:52,484 INFO [train.py:1114] (3/4) Epoch 8, batch 700, loss[loss=0.1974, simple_loss=0.2766, pruned_loss=0.05905, over 13529.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.307, pruned_loss=0.07681, over 2566278.61 frames. ], batch size: 35, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:28:06,695 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.59 vs. limit=22.5 +2024-08-03 10:28:14,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=96077.66666666667, ans=0.2 +2024-08-03 10:28:20,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=96114.33333333333, ans=0.1 +2024-08-03 10:28:27,430 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.36 vs. limit=15.0 +2024-08-03 10:28:29,442 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.51 vs. limit=22.5 +2024-08-03 10:28:38,020 INFO [train.py:1114] (3/4) Epoch 8, batch 750, loss[loss=0.2383, simple_loss=0.3171, pruned_loss=0.07974, over 13354.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3062, pruned_loss=0.0762, over 2583756.90 frames. ], batch size: 37, lr: 1.58e-02, grad_scale: 8.0 +2024-08-03 10:28:44,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=96187.66666666667, ans=0.0 +2024-08-03 10:28:47,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=96224.33333333333, ans=0.1 +2024-08-03 10:29:15,357 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.402e+01 1.294e+02 1.560e+02 2.121e+02 3.650e+02, threshold=3.121e+02, percent-clipped=1.0 +2024-08-03 10:29:21,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=96297.66666666667, ans=0.025 +2024-08-03 10:30:00,639 INFO [train.py:1114] (3/4) Epoch 8, batch 800, loss[loss=0.2243, simple_loss=0.2925, pruned_loss=0.07804, over 13349.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3067, pruned_loss=0.07652, over 2598222.23 frames. ], batch size: 33, lr: 1.58e-02, grad_scale: 16.0 +2024-08-03 10:30:04,675 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.14 vs. limit=15.0 +2024-08-03 10:30:05,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96371.0, ans=0.1 +2024-08-03 10:30:53,513 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:30:59,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=96481.0, ans=0.125 +2024-08-03 10:31:10,639 INFO [train.py:1114] (3/4) Epoch 8, batch 850, loss[loss=0.2201, simple_loss=0.3119, pruned_loss=0.06413, over 13316.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3057, pruned_loss=0.07604, over 2610952.33 frames. ], batch size: 40, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:31:11,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=96554.33333333333, ans=0.125 +2024-08-03 10:31:11,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96554.33333333333, ans=0.1 +2024-08-03 10:31:20,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=96591.0, ans=0.125 +2024-08-03 10:31:21,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=96591.0, ans=0.2 +2024-08-03 10:31:22,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=96591.0, ans=0.0 +2024-08-03 10:31:22,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=96591.0, ans=0.0 +2024-08-03 10:31:32,146 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.603e+01 1.283e+02 1.438e+02 1.736e+02 2.880e+02, threshold=2.876e+02, percent-clipped=0.0 +2024-08-03 10:31:37,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=96664.33333333333, ans=0.0 +2024-08-03 10:31:38,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=96664.33333333333, ans=0.0 +2024-08-03 10:31:44,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=96664.33333333333, ans=0.0 +2024-08-03 10:31:48,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=96701.0, ans=0.0 +2024-08-03 10:31:56,082 INFO [train.py:1114] (3/4) Epoch 8, batch 900, loss[loss=0.2046, simple_loss=0.2849, pruned_loss=0.06214, over 13349.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3061, pruned_loss=0.076, over 2612750.80 frames. ], batch size: 33, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:31:59,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=96737.66666666667, ans=0.125 +2024-08-03 10:32:00,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=96737.66666666667, ans=0.5 +2024-08-03 10:32:03,827 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:32:08,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=96774.33333333333, ans=0.0 +2024-08-03 10:32:09,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=96774.33333333333, ans=10.0 +2024-08-03 10:32:18,204 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.03 vs. limit=15.0 +2024-08-03 10:32:20,388 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.17 vs. limit=15.0 +2024-08-03 10:32:29,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=96847.66666666667, ans=0.2 +2024-08-03 10:32:31,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=96847.66666666667, ans=0.125 +2024-08-03 10:32:35,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96847.66666666667, ans=0.1 +2024-08-03 10:32:36,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=96847.66666666667, ans=0.125 +2024-08-03 10:32:40,415 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.58 vs. limit=15.0 +2024-08-03 10:32:40,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=96884.33333333333, ans=0.0 +2024-08-03 10:32:41,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=96884.33333333333, ans=0.125 +2024-08-03 10:32:47,026 INFO [train.py:1114] (3/4) Epoch 8, batch 950, loss[loss=0.1746, simple_loss=0.2607, pruned_loss=0.04429, over 13544.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3063, pruned_loss=0.07597, over 2613728.32 frames. ], batch size: 34, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:32:53,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=96921.0, ans=0.125 +2024-08-03 10:32:54,925 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.52 vs. limit=15.0 +2024-08-03 10:32:55,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=96957.66666666667, ans=0.125 +2024-08-03 10:32:56,716 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.71 vs. limit=15.0 +2024-08-03 10:33:00,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=96957.66666666667, ans=0.125 +2024-08-03 10:33:08,790 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.880e+01 1.218e+02 1.421e+02 1.776e+02 3.206e+02, threshold=2.842e+02, percent-clipped=1.0 +2024-08-03 10:33:19,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff2.min_abs, batch_count=97031.0, ans=0.1 +2024-08-03 10:33:23,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=97067.66666666667, ans=0.0 +2024-08-03 10:33:29,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=97067.66666666667, ans=0.025 +2024-08-03 10:33:29,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=97067.66666666667, ans=0.125 +2024-08-03 10:33:32,182 INFO [train.py:1114] (3/4) Epoch 8, batch 1000, loss[loss=0.2088, simple_loss=0.2872, pruned_loss=0.06519, over 13376.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3072, pruned_loss=0.07659, over 2612011.46 frames. ], batch size: 35, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:33:38,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97104.33333333333, ans=0.1 +2024-08-03 10:33:40,044 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.29 vs. limit=15.0 +2024-08-03 10:33:43,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=97141.0, ans=0.0 +2024-08-03 10:33:43,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=97141.0, ans=0.2 +2024-08-03 10:33:59,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=97177.66666666667, ans=0.125 +2024-08-03 10:34:18,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=97287.66666666667, ans=0.125 +2024-08-03 10:34:19,173 INFO [train.py:1114] (3/4) Epoch 8, batch 1050, loss[loss=0.2165, simple_loss=0.2955, pruned_loss=0.06876, over 13582.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3054, pruned_loss=0.07554, over 2615787.29 frames. ], batch size: 39, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:34:20,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=97287.66666666667, ans=0.0 +2024-08-03 10:34:29,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=97324.33333333333, ans=0.125 +2024-08-03 10:34:31,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=97324.33333333333, ans=0.2 +2024-08-03 10:34:40,965 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.371e+01 1.174e+02 1.326e+02 1.659e+02 3.865e+02, threshold=2.652e+02, percent-clipped=4.0 +2024-08-03 10:35:04,457 INFO [train.py:1114] (3/4) Epoch 8, batch 1100, loss[loss=0.2243, simple_loss=0.3119, pruned_loss=0.0684, over 13563.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.306, pruned_loss=0.07579, over 2620035.15 frames. ], batch size: 36, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:35:08,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=97471.0, ans=0.0 +2024-08-03 10:35:09,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97471.0, ans=0.1 +2024-08-03 10:35:10,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.80 vs. limit=6.0 +2024-08-03 10:35:15,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=97507.66666666667, ans=0.125 +2024-08-03 10:35:21,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97544.33333333333, ans=0.1 +2024-08-03 10:35:34,918 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.67 vs. limit=10.0 +2024-08-03 10:35:38,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97581.0, ans=0.1 +2024-08-03 10:35:49,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=97617.66666666667, ans=0.125 +2024-08-03 10:35:51,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=97617.66666666667, ans=0.0 +2024-08-03 10:35:56,404 INFO [train.py:1114] (3/4) Epoch 8, batch 1150, loss[loss=0.2491, simple_loss=0.3208, pruned_loss=0.08871, over 13569.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3057, pruned_loss=0.07561, over 2619009.66 frames. ], batch size: 36, lr: 1.57e-02, grad_scale: 16.0 +2024-08-03 10:36:11,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=97691.0, ans=0.125 +2024-08-03 10:36:17,819 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.235e+02 1.421e+02 1.826e+02 2.699e+02, threshold=2.842e+02, percent-clipped=1.0 +2024-08-03 10:36:35,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=97801.0, ans=0.0 +2024-08-03 10:36:41,662 INFO [train.py:1114] (3/4) Epoch 8, batch 1200, loss[loss=0.2435, simple_loss=0.3213, pruned_loss=0.08284, over 13563.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3063, pruned_loss=0.07572, over 2616120.21 frames. ], batch size: 39, lr: 1.57e-02, grad_scale: 32.0 +2024-08-03 10:36:42,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97837.66666666667, ans=0.1 +2024-08-03 10:36:50,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=97874.33333333333, ans=0.0 +2024-08-03 10:36:57,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=97874.33333333333, ans=0.0 +2024-08-03 10:36:59,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97911.0, ans=0.1 +2024-08-03 10:37:02,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=97911.0, ans=0.125 +2024-08-03 10:37:19,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97984.33333333333, ans=0.1 +2024-08-03 10:37:30,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=97984.33333333333, ans=0.1 +2024-08-03 10:37:37,206 INFO [train.py:1114] (3/4) Epoch 8, batch 1250, loss[loss=0.259, simple_loss=0.3418, pruned_loss=0.0881, over 13432.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3069, pruned_loss=0.07576, over 2627636.69 frames. ], batch size: 42, lr: 1.56e-02, grad_scale: 32.0 +2024-08-03 10:37:45,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=98057.66666666667, ans=0.0 +2024-08-03 10:37:49,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=98057.66666666667, ans=0.0 +2024-08-03 10:37:58,744 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.995e+01 1.185e+02 1.323e+02 1.561e+02 3.297e+02, threshold=2.645e+02, percent-clipped=2.0 +2024-08-03 10:38:00,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=98094.33333333333, ans=0.07 +2024-08-03 10:38:04,404 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:38:12,208 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.11 vs. limit=22.5 +2024-08-03 10:38:22,561 INFO [train.py:1114] (3/4) Epoch 8, batch 1300, loss[loss=0.2525, simple_loss=0.3236, pruned_loss=0.09066, over 12930.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3057, pruned_loss=0.07515, over 2630313.30 frames. ], batch size: 52, lr: 1.56e-02, grad_scale: 16.0 +2024-08-03 10:38:59,685 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.69 vs. limit=15.0 +2024-08-03 10:39:05,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=98277.66666666667, ans=0.2 +2024-08-03 10:39:07,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=98277.66666666667, ans=0.0 +2024-08-03 10:39:07,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=98277.66666666667, ans=0.125 +2024-08-03 10:39:21,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=98351.0, ans=0.0 +2024-08-03 10:39:25,079 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.67 vs. limit=15.0 +2024-08-03 10:39:30,888 INFO [train.py:1114] (3/4) Epoch 8, batch 1350, loss[loss=0.2175, simple_loss=0.2973, pruned_loss=0.06884, over 13547.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3059, pruned_loss=0.07547, over 2637262.77 frames. ], batch size: 37, lr: 1.56e-02, grad_scale: 4.0 +2024-08-03 10:39:32,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=98387.66666666667, ans=0.125 +2024-08-03 10:39:43,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=98424.33333333333, ans=0.2 +2024-08-03 10:39:45,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=98424.33333333333, ans=0.125 +2024-08-03 10:39:49,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=98424.33333333333, ans=0.2 +2024-08-03 10:40:01,338 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.797e+01 1.224e+02 1.395e+02 1.638e+02 2.508e+02, threshold=2.789e+02, percent-clipped=0.0 +2024-08-03 10:40:07,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=98497.66666666667, ans=10.0 +2024-08-03 10:40:20,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=98534.33333333333, ans=0.125 +2024-08-03 10:40:22,545 INFO [train.py:1114] (3/4) Epoch 8, batch 1400, loss[loss=0.2054, simple_loss=0.2752, pruned_loss=0.06781, over 13281.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3061, pruned_loss=0.0757, over 2641534.55 frames. ], batch size: 31, lr: 1.56e-02, grad_scale: 8.0 +2024-08-03 10:40:22,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=98571.0, ans=0.025 +2024-08-03 10:40:27,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=98571.0, ans=0.2 +2024-08-03 10:40:28,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=98571.0, ans=0.2 +2024-08-03 10:40:39,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=98644.33333333333, ans=0.0 +2024-08-03 10:40:42,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=98644.33333333333, ans=0.125 +2024-08-03 10:40:48,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=98644.33333333333, ans=0.0 +2024-08-03 10:40:53,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.31 vs. limit=15.0 +2024-08-03 10:41:03,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98717.66666666667, ans=0.1 +2024-08-03 10:41:07,775 INFO [train.py:1114] (3/4) Epoch 8, batch 1450, loss[loss=0.2559, simple_loss=0.3344, pruned_loss=0.0887, over 13418.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3072, pruned_loss=0.07615, over 2640254.20 frames. ], batch size: 43, lr: 1.56e-02, grad_scale: 8.0 +2024-08-03 10:41:19,970 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=17.24 vs. limit=22.5 +2024-08-03 10:41:33,618 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.79 vs. limit=15.0 +2024-08-03 10:41:34,029 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.227e+02 1.462e+02 1.726e+02 3.399e+02, threshold=2.923e+02, percent-clipped=2.0 +2024-08-03 10:41:37,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=98864.33333333333, ans=10.0 +2024-08-03 10:41:46,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=98901.0, ans=0.2 +2024-08-03 10:41:54,819 INFO [train.py:1114] (3/4) Epoch 8, batch 1500, loss[loss=0.2736, simple_loss=0.3497, pruned_loss=0.0987, over 13384.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3079, pruned_loss=0.07653, over 2640719.30 frames. ], batch size: 39, lr: 1.56e-02, grad_scale: 8.0 +2024-08-03 10:42:07,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98974.33333333333, ans=0.1 +2024-08-03 10:42:08,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=98974.33333333333, ans=0.125 +2024-08-03 10:42:12,839 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.36 vs. limit=6.0 +2024-08-03 10:42:13,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=99011.0, ans=0.025 +2024-08-03 10:42:21,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=99011.0, ans=0.125 +2024-08-03 10:42:39,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=99084.33333333333, ans=0.0 +2024-08-03 10:42:40,619 INFO [train.py:1114] (3/4) Epoch 8, batch 1550, loss[loss=0.2407, simple_loss=0.3269, pruned_loss=0.07728, over 13394.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3078, pruned_loss=0.07696, over 2631226.09 frames. ], batch size: 41, lr: 1.56e-02, grad_scale: 8.0 +2024-08-03 10:42:44,700 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.58 vs. limit=12.0 +2024-08-03 10:42:49,037 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.27 vs. limit=22.5 +2024-08-03 10:42:55,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=99157.66666666667, ans=0.2 +2024-08-03 10:42:56,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99157.66666666667, ans=0.1 +2024-08-03 10:43:04,926 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.564e+01 1.246e+02 1.506e+02 1.858e+02 4.061e+02, threshold=3.012e+02, percent-clipped=4.0 +2024-08-03 10:43:08,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=99231.0, ans=0.1 +2024-08-03 10:43:15,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=99231.0, ans=0.0 +2024-08-03 10:43:19,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99267.66666666667, ans=0.1 +2024-08-03 10:43:22,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=99267.66666666667, ans=0.125 +2024-08-03 10:43:29,765 INFO [train.py:1114] (3/4) Epoch 8, batch 1600, loss[loss=0.2594, simple_loss=0.3336, pruned_loss=0.09258, over 13577.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3073, pruned_loss=0.07692, over 2623570.73 frames. ], batch size: 39, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:43:30,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=99304.33333333333, ans=0.0 +2024-08-03 10:43:31,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=99304.33333333333, ans=0.0 +2024-08-03 10:43:32,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=99304.33333333333, ans=0.125 +2024-08-03 10:43:52,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99377.66666666667, ans=0.1 +2024-08-03 10:43:56,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=99414.33333333333, ans=0.0 +2024-08-03 10:44:12,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=99451.0, ans=0.0 +2024-08-03 10:44:15,526 INFO [train.py:1114] (3/4) Epoch 8, batch 1650, loss[loss=0.234, simple_loss=0.3154, pruned_loss=0.07633, over 13319.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3074, pruned_loss=0.07741, over 2621980.79 frames. ], batch size: 40, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:44:27,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=99524.33333333333, ans=0.025 +2024-08-03 10:44:37,561 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.62 vs. limit=15.0 +2024-08-03 10:44:39,216 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.87 vs. limit=15.0 +2024-08-03 10:44:40,416 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.002e+02 1.254e+02 1.426e+02 1.791e+02 5.006e+02, threshold=2.852e+02, percent-clipped=4.0 +2024-08-03 10:44:41,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99561.0, ans=0.1 +2024-08-03 10:44:42,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=99597.66666666667, ans=15.0 +2024-08-03 10:44:55,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=99634.33333333333, ans=0.125 +2024-08-03 10:45:03,284 INFO [train.py:1114] (3/4) Epoch 8, batch 1700, loss[loss=0.1777, simple_loss=0.2527, pruned_loss=0.0513, over 13266.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3063, pruned_loss=0.07624, over 2630922.84 frames. ], batch size: 31, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:45:13,572 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.95 vs. limit=15.0 +2024-08-03 10:45:48,394 INFO [train.py:1114] (3/4) Epoch 8, batch 1750, loss[loss=0.2421, simple_loss=0.298, pruned_loss=0.09308, over 13549.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3058, pruned_loss=0.07613, over 2633961.83 frames. ], batch size: 31, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:45:56,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=99891.0, ans=0.0 +2024-08-03 10:46:13,081 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.856e+01 1.232e+02 1.552e+02 2.162e+02 4.270e+02, threshold=3.103e+02, percent-clipped=14.0 +2024-08-03 10:46:21,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=99964.33333333333, ans=0.125 +2024-08-03 10:46:27,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=100001.0, ans=0.0 +2024-08-03 10:46:34,072 INFO [train.py:1114] (3/4) Epoch 8, batch 1800, loss[loss=0.2214, simple_loss=0.3055, pruned_loss=0.06864, over 13542.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3065, pruned_loss=0.07644, over 2634962.84 frames. ], batch size: 38, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:46:45,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=100074.33333333333, ans=0.2 +2024-08-03 10:46:45,507 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.85 vs. limit=15.0 +2024-08-03 10:46:47,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=100074.33333333333, ans=0.125 +2024-08-03 10:46:51,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=100074.33333333333, ans=0.0 +2024-08-03 10:46:51,672 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.45 vs. limit=12.0 +2024-08-03 10:47:09,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100147.66666666667, ans=0.1 +2024-08-03 10:47:11,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=100147.66666666667, ans=0.125 +2024-08-03 10:47:20,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=100184.33333333333, ans=0.0 +2024-08-03 10:47:24,467 INFO [train.py:1114] (3/4) Epoch 8, batch 1850, loss[loss=0.2221, simple_loss=0.3113, pruned_loss=0.0665, over 13399.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3062, pruned_loss=0.07614, over 2637902.19 frames. ], batch size: 39, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:47:41,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=100257.66666666667, ans=0.0 +2024-08-03 10:47:45,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=100294.33333333333, ans=10.0 +2024-08-03 10:47:48,905 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.993e+01 1.278e+02 1.540e+02 2.004e+02 3.260e+02, threshold=3.079e+02, percent-clipped=4.0 +2024-08-03 10:48:01,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=100367.66666666667, ans=0.0 +2024-08-03 10:48:07,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100367.66666666667, ans=0.1 +2024-08-03 10:48:09,649 INFO [train.py:1114] (3/4) Epoch 8, batch 1900, loss[loss=0.2356, simple_loss=0.3135, pruned_loss=0.07889, over 13304.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3068, pruned_loss=0.07617, over 2640520.57 frames. ], batch size: 40, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:48:12,532 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:48:24,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=100441.0, ans=0.0 +2024-08-03 10:48:44,249 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.10 vs. limit=22.5 +2024-08-03 10:48:56,509 INFO [train.py:1114] (3/4) Epoch 8, batch 1950, loss[loss=0.2265, simple_loss=0.3039, pruned_loss=0.07456, over 13554.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3076, pruned_loss=0.07629, over 2646783.50 frames. ], batch size: 36, lr: 1.55e-02, grad_scale: 16.0 +2024-08-03 10:49:00,571 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.98 vs. limit=15.0 +2024-08-03 10:49:13,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=100624.33333333333, ans=0.0 +2024-08-03 10:49:19,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=100661.0, ans=0.125 +2024-08-03 10:49:21,466 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.223e+02 1.431e+02 1.772e+02 2.626e+02, threshold=2.861e+02, percent-clipped=0.0 +2024-08-03 10:49:42,385 INFO [train.py:1114] (3/4) Epoch 8, batch 2000, loss[loss=0.2004, simple_loss=0.2738, pruned_loss=0.06345, over 13530.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.308, pruned_loss=0.0766, over 2636970.47 frames. ], batch size: 31, lr: 1.54e-02, grad_scale: 32.0 +2024-08-03 10:49:44,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100771.0, ans=0.1 +2024-08-03 10:49:53,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=100807.66666666667, ans=0.0 +2024-08-03 10:50:05,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=100844.33333333333, ans=0.125 +2024-08-03 10:50:10,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100844.33333333333, ans=0.125 +2024-08-03 10:50:22,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100881.0, ans=0.125 +2024-08-03 10:50:28,229 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.22 vs. limit=22.5 +2024-08-03 10:50:36,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=100917.66666666667, ans=0.2 +2024-08-03 10:50:38,485 INFO [train.py:1114] (3/4) Epoch 8, batch 2050, loss[loss=0.1937, simple_loss=0.2673, pruned_loss=0.06003, over 13397.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3061, pruned_loss=0.07593, over 2632643.25 frames. ], batch size: 32, lr: 1.54e-02, grad_scale: 32.0 +2024-08-03 10:50:39,813 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.17 vs. limit=15.0 +2024-08-03 10:50:46,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=100954.33333333333, ans=0.025 +2024-08-03 10:50:50,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100991.0, ans=0.1 +2024-08-03 10:50:54,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=100991.0, ans=0.2 +2024-08-03 10:50:56,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=101027.66666666667, ans=0.125 +2024-08-03 10:51:03,411 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.251e+02 1.508e+02 1.862e+02 2.983e+02, threshold=3.016e+02, percent-clipped=1.0 +2024-08-03 10:51:16,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=101101.0, ans=0.0 +2024-08-03 10:51:23,590 INFO [train.py:1114] (3/4) Epoch 8, batch 2100, loss[loss=0.2284, simple_loss=0.3075, pruned_loss=0.07465, over 13535.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.306, pruned_loss=0.07577, over 2638053.07 frames. ], batch size: 37, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:51:25,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101137.66666666667, ans=0.1 +2024-08-03 10:51:51,263 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.09 vs. limit=10.0 +2024-08-03 10:51:58,678 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.12 vs. limit=15.0 +2024-08-03 10:52:00,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=101284.33333333333, ans=0.0 +2024-08-03 10:52:02,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=101284.33333333333, ans=0.125 +2024-08-03 10:52:09,839 INFO [train.py:1114] (3/4) Epoch 8, batch 2150, loss[loss=0.1998, simple_loss=0.282, pruned_loss=0.0588, over 13551.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3048, pruned_loss=0.07512, over 2646952.97 frames. ], batch size: 36, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:52:24,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=101357.66666666667, ans=0.125 +2024-08-03 10:52:36,949 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.507e+01 1.244e+02 1.463e+02 2.141e+02 4.797e+02, threshold=2.925e+02, percent-clipped=7.0 +2024-08-03 10:52:45,057 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.89 vs. limit=22.5 +2024-08-03 10:52:45,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=101431.0, ans=0.0 +2024-08-03 10:52:50,643 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.15 vs. limit=10.0 +2024-08-03 10:52:55,676 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.10 vs. limit=12.0 +2024-08-03 10:52:57,023 INFO [train.py:1114] (3/4) Epoch 8, batch 2200, loss[loss=0.2291, simple_loss=0.311, pruned_loss=0.07358, over 13403.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3045, pruned_loss=0.0748, over 2644649.97 frames. ], batch size: 39, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:52:57,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=101504.33333333333, ans=0.2 +2024-08-03 10:52:59,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=101504.33333333333, ans=0.125 +2024-08-03 10:53:01,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=101504.33333333333, ans=0.0 +2024-08-03 10:53:09,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=101541.0, ans=0.125 +2024-08-03 10:53:11,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101541.0, ans=0.1 +2024-08-03 10:53:13,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=101541.0, ans=0.125 +2024-08-03 10:53:30,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=101614.33333333333, ans=0.0 +2024-08-03 10:53:42,324 INFO [train.py:1114] (3/4) Epoch 8, batch 2250, loss[loss=0.2418, simple_loss=0.3213, pruned_loss=0.08111, over 13350.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3043, pruned_loss=0.0748, over 2641989.24 frames. ], batch size: 37, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:53:59,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=101724.33333333333, ans=0.0 +2024-08-03 10:54:02,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101761.0, ans=0.1 +2024-08-03 10:54:06,874 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.17 vs. limit=15.0 +2024-08-03 10:54:09,240 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.704e+01 1.191e+02 1.378e+02 1.872e+02 3.290e+02, threshold=2.756e+02, percent-clipped=1.0 +2024-08-03 10:54:39,887 INFO [train.py:1114] (3/4) Epoch 8, batch 2300, loss[loss=0.2136, simple_loss=0.285, pruned_loss=0.0711, over 13590.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3029, pruned_loss=0.07444, over 2637468.69 frames. ], batch size: 33, lr: 1.54e-02, grad_scale: 16.0 +2024-08-03 10:55:02,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=101944.33333333333, ans=0.125 +2024-08-03 10:55:24,957 INFO [train.py:1114] (3/4) Epoch 8, batch 2350, loss[loss=0.2279, simple_loss=0.3087, pruned_loss=0.07359, over 13554.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3029, pruned_loss=0.0741, over 2640173.12 frames. ], batch size: 38, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:55:35,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=102054.33333333333, ans=0.025 +2024-08-03 10:55:39,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102091.0, ans=0.1 +2024-08-03 10:55:47,707 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.12 vs. limit=10.0 +2024-08-03 10:55:49,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=102127.66666666667, ans=0.2 +2024-08-03 10:55:55,521 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.744e+01 1.287e+02 1.695e+02 2.279e+02 3.908e+02, threshold=3.390e+02, percent-clipped=9.0 +2024-08-03 10:56:03,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=102164.33333333333, ans=0.125 +2024-08-03 10:56:15,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=102201.0, ans=0.125 +2024-08-03 10:56:24,793 INFO [train.py:1114] (3/4) Epoch 8, batch 2400, loss[loss=0.1964, simple_loss=0.2835, pruned_loss=0.05466, over 13528.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3043, pruned_loss=0.07481, over 2641641.10 frames. ], batch size: 35, lr: 1.53e-02, grad_scale: 32.0 +2024-08-03 10:56:45,493 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.99 vs. limit=15.0 +2024-08-03 10:57:03,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=102384.33333333333, ans=0.125 +2024-08-03 10:57:05,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=102384.33333333333, ans=10.0 +2024-08-03 10:57:11,095 INFO [train.py:1114] (3/4) Epoch 8, batch 2450, loss[loss=0.2238, simple_loss=0.3071, pruned_loss=0.07025, over 13356.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3064, pruned_loss=0.07611, over 2631157.87 frames. ], batch size: 37, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:57:14,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=102421.0, ans=0.125 +2024-08-03 10:57:19,937 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.05 vs. limit=15.0 +2024-08-03 10:57:22,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=102457.66666666667, ans=10.0 +2024-08-03 10:57:40,047 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.286e+02 1.590e+02 1.975e+02 2.991e+02, threshold=3.180e+02, percent-clipped=0.0 +2024-08-03 10:57:45,155 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.62 vs. limit=12.0 +2024-08-03 10:57:49,614 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.13 vs. limit=15.0 +2024-08-03 10:57:59,037 INFO [train.py:1114] (3/4) Epoch 8, batch 2500, loss[loss=0.2554, simple_loss=0.3302, pruned_loss=0.09033, over 13396.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3066, pruned_loss=0.07619, over 2635232.60 frames. ], batch size: 39, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:58:02,513 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 10:58:08,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=102641.0, ans=0.025 +2024-08-03 10:58:13,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=102641.0, ans=0.0 +2024-08-03 10:58:21,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=102677.66666666667, ans=0.04949747468305833 +2024-08-03 10:58:23,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=102677.66666666667, ans=0.125 +2024-08-03 10:58:36,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.81 vs. limit=15.0 +2024-08-03 10:58:50,555 INFO [train.py:1114] (3/4) Epoch 8, batch 2550, loss[loss=0.1844, simple_loss=0.2586, pruned_loss=0.05516, over 13546.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.3052, pruned_loss=0.0753, over 2636177.37 frames. ], batch size: 31, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:58:59,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=102824.33333333333, ans=0.0 +2024-08-03 10:59:15,649 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.849e+01 1.203e+02 1.342e+02 1.554e+02 2.450e+02, threshold=2.684e+02, percent-clipped=0.0 +2024-08-03 10:59:19,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=102897.66666666667, ans=0.0 +2024-08-03 10:59:22,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=102897.66666666667, ans=0.0 +2024-08-03 10:59:31,176 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.84 vs. limit=10.0 +2024-08-03 10:59:34,145 INFO [train.py:1114] (3/4) Epoch 8, batch 2600, loss[loss=0.2117, simple_loss=0.2905, pruned_loss=0.06644, over 13566.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3055, pruned_loss=0.0755, over 2635456.37 frames. ], batch size: 36, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 10:59:46,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103007.66666666667, ans=0.1 +2024-08-03 10:59:46,853 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.78 vs. limit=6.0 +2024-08-03 10:59:48,579 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.34 vs. limit=15.0 +2024-08-03 10:59:56,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=103044.33333333333, ans=0.07 +2024-08-03 11:00:02,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=103044.33333333333, ans=0.2 +2024-08-03 11:00:02,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=103044.33333333333, ans=0.125 +2024-08-03 11:00:07,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_na.min_abs, batch_count=103081.0, ans=0.02 +2024-08-03 11:00:23,273 INFO [train.py:1114] (3/4) Epoch 8, batch 2650, loss[loss=0.2341, simple_loss=0.3174, pruned_loss=0.07541, over 13347.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3057, pruned_loss=0.07521, over 2639031.99 frames. ], batch size: 46, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 11:00:26,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=103154.33333333333, ans=0.0 +2024-08-03 11:00:28,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=103154.33333333333, ans=10.0 +2024-08-03 11:00:35,234 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.66 vs. limit=12.0 +2024-08-03 11:00:37,562 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=5.486e-02 +2024-08-03 11:00:40,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103227.66666666667, ans=0.1 +2024-08-03 11:00:41,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=103227.66666666667, ans=0.125 +2024-08-03 11:00:48,529 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.954e+01 1.189e+02 1.373e+02 1.657e+02 2.856e+02, threshold=2.745e+02, percent-clipped=2.0 +2024-08-03 11:01:07,306 INFO [train.py:1114] (3/4) Epoch 8, batch 2700, loss[loss=0.2212, simple_loss=0.3104, pruned_loss=0.06606, over 13542.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3059, pruned_loss=0.07521, over 2636324.78 frames. ], batch size: 40, lr: 1.53e-02, grad_scale: 16.0 +2024-08-03 11:01:07,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=103337.66666666667, ans=0.025 +2024-08-03 11:01:33,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=103447.66666666667, ans=0.015 +2024-08-03 11:01:36,480 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:01:44,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103484.33333333333, ans=0.1 +2024-08-03 11:01:51,073 INFO [train.py:1114] (3/4) Epoch 8, batch 2750, loss[loss=0.2247, simple_loss=0.2957, pruned_loss=0.07685, over 13325.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3049, pruned_loss=0.07521, over 2634674.75 frames. ], batch size: 34, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:01:52,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=103521.0, ans=0.125 +2024-08-03 11:01:52,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=103521.0, ans=0.0 +2024-08-03 11:01:54,998 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.09 vs. limit=15.0 +2024-08-03 11:02:11,392 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.61 vs. limit=6.0 +2024-08-03 11:02:18,842 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.453e+01 1.294e+02 1.512e+02 1.970e+02 3.598e+02, threshold=3.023e+02, percent-clipped=4.0 +2024-08-03 11:02:19,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=103631.0, ans=0.125 +2024-08-03 11:02:29,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=103667.66666666667, ans=0.125 +2024-08-03 11:02:33,689 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.83 vs. limit=15.0 +2024-08-03 11:02:35,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=103667.66666666667, ans=0.125 +2024-08-03 11:02:37,586 INFO [train.py:1114] (3/4) Epoch 8, batch 2800, loss[loss=0.3376, simple_loss=0.381, pruned_loss=0.147, over 9199.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.305, pruned_loss=0.07516, over 2626795.79 frames. ], batch size: 96, lr: 1.52e-02, grad_scale: 32.0 +2024-08-03 11:02:41,628 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.32 vs. limit=22.5 +2024-08-03 11:02:44,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=103704.33333333333, ans=0.125 +2024-08-03 11:02:44,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=103704.33333333333, ans=0.0 +2024-08-03 11:02:47,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=103741.0, ans=0.125 +2024-08-03 11:02:49,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103741.0, ans=0.1 +2024-08-03 11:02:51,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=103741.0, ans=0.04949747468305833 +2024-08-03 11:03:01,668 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.48 vs. limit=10.0 +2024-08-03 11:03:22,211 INFO [train.py:1114] (3/4) Epoch 8, batch 2850, loss[loss=0.2339, simple_loss=0.3124, pruned_loss=0.07771, over 13352.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3056, pruned_loss=0.0755, over 2620330.08 frames. ], batch size: 35, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:03:24,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103887.66666666667, ans=0.1 +2024-08-03 11:03:33,619 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.07 vs. limit=12.0 +2024-08-03 11:03:37,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=103924.33333333333, ans=0.125 +2024-08-03 11:03:47,801 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.776e+01 1.266e+02 1.558e+02 2.014e+02 3.574e+02, threshold=3.117e+02, percent-clipped=3.0 +2024-08-03 11:03:55,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=103997.66666666667, ans=0.125 +2024-08-03 11:04:05,046 INFO [train.py:1114] (3/4) Epoch 8, batch 2900, loss[loss=0.2127, simple_loss=0.292, pruned_loss=0.06665, over 13372.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3057, pruned_loss=0.07518, over 2631197.30 frames. ], batch size: 36, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:04:38,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=104181.0, ans=0.025 +2024-08-03 11:04:39,579 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-08-03 11:04:40,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=104217.66666666667, ans=0.0 +2024-08-03 11:04:47,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=104217.66666666667, ans=0.025 +2024-08-03 11:04:48,513 INFO [train.py:1114] (3/4) Epoch 8, batch 2950, loss[loss=0.2194, simple_loss=0.3045, pruned_loss=0.06718, over 13341.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3044, pruned_loss=0.07468, over 2629379.23 frames. ], batch size: 34, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:04:54,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=104254.33333333333, ans=0.0 +2024-08-03 11:05:13,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=104364.33333333333, ans=0.125 +2024-08-03 11:05:14,601 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.699e+01 1.150e+02 1.316e+02 1.628e+02 4.465e+02, threshold=2.631e+02, percent-clipped=1.0 +2024-08-03 11:05:18,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=104364.33333333333, ans=0.0 +2024-08-03 11:05:30,613 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.46 vs. limit=15.0 +2024-08-03 11:05:31,942 INFO [train.py:1114] (3/4) Epoch 8, batch 3000, loss[loss=0.2201, simple_loss=0.3001, pruned_loss=0.07005, over 13554.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3049, pruned_loss=0.07504, over 2630041.44 frames. ], batch size: 37, lr: 1.52e-02, grad_scale: 16.0 +2024-08-03 11:05:31,943 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 11:05:42,235 INFO [train.py:1146] (3/4) Epoch 8, validation: loss=0.1886, simple_loss=0.2887, pruned_loss=0.04428, over 944034.00 frames. +2024-08-03 11:05:42,236 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 11:06:01,158 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.71 vs. limit=15.0 +2024-08-03 11:06:10,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=104547.66666666667, ans=0.0 +2024-08-03 11:06:17,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=104584.33333333333, ans=0.125 +2024-08-03 11:06:23,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.18 vs. limit=15.0 +2024-08-03 11:06:24,083 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.22 vs. limit=15.0 +2024-08-03 11:06:25,406 INFO [train.py:1114] (3/4) Epoch 8, batch 3050, loss[loss=0.2316, simple_loss=0.304, pruned_loss=0.07965, over 13540.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3051, pruned_loss=0.07513, over 2627487.65 frames. ], batch size: 35, lr: 1.52e-02, grad_scale: 8.0 +2024-08-03 11:06:28,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=104621.0, ans=0.125 +2024-08-03 11:06:29,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=104621.0, ans=0.0 +2024-08-03 11:06:29,618 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.14 vs. limit=22.5 +2024-08-03 11:06:36,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=104657.66666666667, ans=0.09899494936611666 +2024-08-03 11:06:37,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=104657.66666666667, ans=0.0 +2024-08-03 11:06:48,611 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.70 vs. limit=15.0 +2024-08-03 11:06:54,269 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.678e+01 1.159e+02 1.376e+02 1.941e+02 3.361e+02, threshold=2.751e+02, percent-clipped=3.0 +2024-08-03 11:06:56,621 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.13 vs. limit=22.5 +2024-08-03 11:06:58,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=104731.0, ans=0.125 +2024-08-03 11:07:02,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=104767.66666666667, ans=0.125 +2024-08-03 11:07:10,692 INFO [train.py:1114] (3/4) Epoch 8, batch 3100, loss[loss=0.2184, simple_loss=0.3036, pruned_loss=0.0666, over 13346.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3043, pruned_loss=0.0745, over 2627937.49 frames. ], batch size: 46, lr: 1.52e-02, grad_scale: 8.0 +2024-08-03 11:07:23,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=104841.0, ans=0.125 +2024-08-03 11:07:30,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=104877.66666666667, ans=0.05 +2024-08-03 11:07:33,746 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.78 vs. limit=15.0 +2024-08-03 11:07:40,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=104914.33333333333, ans=0.025 +2024-08-03 11:07:53,852 INFO [train.py:1114] (3/4) Epoch 8, batch 3150, loss[loss=0.2257, simple_loss=0.3114, pruned_loss=0.06998, over 13013.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3042, pruned_loss=0.07441, over 2629373.49 frames. ], batch size: 48, lr: 1.51e-02, grad_scale: 8.0 +2024-08-03 11:08:05,255 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.90 vs. limit=15.0 +2024-08-03 11:08:06,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=105024.33333333333, ans=0.0 +2024-08-03 11:08:10,285 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.97 vs. limit=15.0 +2024-08-03 11:08:13,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=105061.0, ans=0.025 +2024-08-03 11:08:19,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=105097.66666666667, ans=0.125 +2024-08-03 11:08:20,106 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.438e+02 1.843e+02 2.666e+02 3.777e+02, threshold=3.687e+02, percent-clipped=21.0 +2024-08-03 11:08:22,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=105097.66666666667, ans=0.0 +2024-08-03 11:08:22,767 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:08:28,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=105134.33333333333, ans=0.2 +2024-08-03 11:08:34,084 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.95 vs. limit=15.0 +2024-08-03 11:08:36,205 INFO [train.py:1114] (3/4) Epoch 8, batch 3200, loss[loss=0.209, simple_loss=0.2955, pruned_loss=0.06122, over 13524.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3034, pruned_loss=0.07408, over 2635557.75 frames. ], batch size: 37, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:08:37,558 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.35 vs. limit=22.5 +2024-08-03 11:09:10,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105317.66666666667, ans=0.1 +2024-08-03 11:09:19,471 INFO [train.py:1114] (3/4) Epoch 8, batch 3250, loss[loss=0.2326, simple_loss=0.318, pruned_loss=0.07355, over 13375.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3046, pruned_loss=0.07442, over 2640045.54 frames. ], batch size: 38, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:09:21,626 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.13 vs. limit=10.0 +2024-08-03 11:09:24,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.40 vs. limit=6.0 +2024-08-03 11:09:30,413 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.70 vs. limit=15.0 +2024-08-03 11:09:37,842 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.04 vs. limit=15.0 +2024-08-03 11:09:45,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=105464.33333333333, ans=0.125 +2024-08-03 11:09:47,360 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.799e+01 1.281e+02 1.591e+02 1.983e+02 2.904e+02, threshold=3.182e+02, percent-clipped=0.0 +2024-08-03 11:09:53,155 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.90 vs. limit=22.5 +2024-08-03 11:09:58,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=105501.0, ans=0.2 +2024-08-03 11:09:59,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=105501.0, ans=0.125 +2024-08-03 11:10:03,914 INFO [train.py:1114] (3/4) Epoch 8, batch 3300, loss[loss=0.2238, simple_loss=0.3084, pruned_loss=0.06955, over 12841.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3037, pruned_loss=0.07408, over 2641182.56 frames. ], batch size: 52, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:10:04,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=105537.66666666667, ans=0.0 +2024-08-03 11:10:06,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=105537.66666666667, ans=0.0 +2024-08-03 11:10:30,764 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.75 vs. limit=5.0 +2024-08-03 11:10:51,495 INFO [train.py:1114] (3/4) Epoch 8, batch 3350, loss[loss=0.2329, simple_loss=0.3157, pruned_loss=0.0751, over 13073.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3055, pruned_loss=0.07528, over 2630247.96 frames. ], batch size: 48, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:11:04,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=105721.0, ans=0.125 +2024-08-03 11:11:18,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=105757.66666666667, ans=0.125 +2024-08-03 11:11:42,000 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.800e+01 1.191e+02 1.380e+02 1.620e+02 2.699e+02, threshold=2.759e+02, percent-clipped=0.0 +2024-08-03 11:11:45,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=105831.0, ans=0.125 +2024-08-03 11:12:10,266 INFO [train.py:1114] (3/4) Epoch 8, batch 3400, loss[loss=0.2037, simple_loss=0.2748, pruned_loss=0.06625, over 13530.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3051, pruned_loss=0.07553, over 2625812.92 frames. ], batch size: 31, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:12:27,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=105941.0, ans=0.125 +2024-08-03 11:12:34,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=105977.66666666667, ans=0.035 +2024-08-03 11:12:36,118 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.81 vs. limit=10.0 +2024-08-03 11:12:37,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=105977.66666666667, ans=0.025 +2024-08-03 11:12:42,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105977.66666666667, ans=0.1 +2024-08-03 11:12:51,098 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.75 vs. limit=22.5 +2024-08-03 11:12:58,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=106051.0, ans=0.0 +2024-08-03 11:13:02,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106051.0, ans=0.1 +2024-08-03 11:13:04,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106051.0, ans=0.125 +2024-08-03 11:13:06,202 INFO [train.py:1114] (3/4) Epoch 8, batch 3450, loss[loss=0.2076, simple_loss=0.2958, pruned_loss=0.0597, over 12773.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.3052, pruned_loss=0.07529, over 2629217.87 frames. ], batch size: 52, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:13:13,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=106087.66666666667, ans=0.0 +2024-08-03 11:13:25,172 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:13:30,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=106161.0, ans=0.125 +2024-08-03 11:13:31,930 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:13:32,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106161.0, ans=0.1 +2024-08-03 11:13:35,947 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.667e+01 1.192e+02 1.350e+02 1.586e+02 3.469e+02, threshold=2.701e+02, percent-clipped=1.0 +2024-08-03 11:13:38,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106197.66666666667, ans=0.0 +2024-08-03 11:13:54,442 INFO [train.py:1114] (3/4) Epoch 8, batch 3500, loss[loss=0.1919, simple_loss=0.2742, pruned_loss=0.05478, over 13541.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3035, pruned_loss=0.07454, over 2631572.68 frames. ], batch size: 34, lr: 1.51e-02, grad_scale: 16.0 +2024-08-03 11:13:56,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=106271.0, ans=0.125 +2024-08-03 11:13:56,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=106271.0, ans=0.0 +2024-08-03 11:14:03,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=106307.66666666667, ans=0.125 +2024-08-03 11:14:10,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106307.66666666667, ans=0.1 +2024-08-03 11:14:15,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.72 vs. limit=15.0 +2024-08-03 11:14:20,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=106381.0, ans=0.0 +2024-08-03 11:14:42,315 INFO [train.py:1114] (3/4) Epoch 8, batch 3550, loss[loss=0.2714, simple_loss=0.3441, pruned_loss=0.0993, over 12520.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3052, pruned_loss=0.07566, over 2629606.45 frames. ], batch size: 58, lr: 1.50e-02, grad_scale: 16.0 +2024-08-03 11:14:48,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106454.33333333333, ans=0.1 +2024-08-03 11:15:12,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=106527.66666666667, ans=0.0 +2024-08-03 11:15:14,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=106527.66666666667, ans=0.2 +2024-08-03 11:15:26,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=106564.33333333333, ans=0.125 +2024-08-03 11:15:26,476 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.912e+01 1.312e+02 1.471e+02 1.710e+02 3.286e+02, threshold=2.943e+02, percent-clipped=3.0 +2024-08-03 11:15:27,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=106564.33333333333, ans=0.025 +2024-08-03 11:15:43,356 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:15:48,462 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.34 vs. limit=15.0 +2024-08-03 11:15:48,880 INFO [train.py:1114] (3/4) Epoch 8, batch 3600, loss[loss=0.2856, simple_loss=0.3437, pruned_loss=0.1138, over 9138.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3113, pruned_loss=0.0814, over 2489762.46 frames. ], batch size: 96, lr: 1.50e-02, grad_scale: 32.0 +2024-08-03 11:15:50,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=106637.66666666667, ans=0.125 +2024-08-03 11:15:55,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=106637.66666666667, ans=0.0 +2024-08-03 11:16:06,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=106674.33333333333, ans=0.0 +2024-08-03 11:16:15,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=106711.0, ans=0.2 +2024-08-03 11:16:21,351 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=9.25 vs. limit=12.0 +2024-08-03 11:16:22,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=106747.66666666667, ans=0.125 +2024-08-03 11:16:24,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=106747.66666666667, ans=0.0 +2024-08-03 11:17:27,618 INFO [train.py:1114] (3/4) Epoch 9, batch 0, loss[loss=0.2007, simple_loss=0.281, pruned_loss=0.06021, over 13344.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.281, pruned_loss=0.06021, over 13344.00 frames. ], batch size: 33, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:17:27,618 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 11:17:39,636 INFO [train.py:1146] (3/4) Epoch 9, validation: loss=0.1935, simple_loss=0.2948, pruned_loss=0.04614, over 944034.00 frames. +2024-08-03 11:17:39,636 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 11:17:42,023 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.45 vs. limit=15.0 +2024-08-03 11:17:42,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=106788.0, ans=0.035 +2024-08-03 11:17:47,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=106788.0, ans=0.2 +2024-08-03 11:17:50,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=106824.66666666667, ans=0.125 +2024-08-03 11:18:03,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=106861.33333333333, ans=0.125 +2024-08-03 11:18:14,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=106898.0, ans=0.0 +2024-08-03 11:18:18,806 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.039e+02 1.291e+02 1.434e+02 1.801e+02 3.339e+02, threshold=2.868e+02, percent-clipped=2.0 +2024-08-03 11:18:23,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=106934.66666666667, ans=0.05 +2024-08-03 11:18:27,055 INFO [train.py:1114] (3/4) Epoch 9, batch 50, loss[loss=0.1872, simple_loss=0.2669, pruned_loss=0.05375, over 13406.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3038, pruned_loss=0.07414, over 578707.01 frames. ], batch size: 32, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:18:34,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=106971.33333333333, ans=0.0 +2024-08-03 11:18:37,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=107008.0, ans=0.0 +2024-08-03 11:18:44,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107008.0, ans=0.1 +2024-08-03 11:18:56,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=107081.33333333333, ans=0.125 +2024-08-03 11:19:09,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=107118.0, ans=0.125 +2024-08-03 11:19:09,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=107118.0, ans=0.2 +2024-08-03 11:19:12,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107118.0, ans=0.1 +2024-08-03 11:19:16,449 INFO [train.py:1114] (3/4) Epoch 9, batch 100, loss[loss=0.1822, simple_loss=0.2635, pruned_loss=0.05043, over 13541.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.305, pruned_loss=0.07374, over 1025471.13 frames. ], batch size: 35, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:19:16,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=107154.66666666667, ans=0.025 +2024-08-03 11:19:17,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107154.66666666667, ans=0.1 +2024-08-03 11:19:21,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=107154.66666666667, ans=0.2 +2024-08-03 11:19:22,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=107154.66666666667, ans=0.2 +2024-08-03 11:19:30,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=107191.33333333333, ans=0.0 +2024-08-03 11:19:39,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=107228.0, ans=0.125 +2024-08-03 11:19:43,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=107228.0, ans=0.2 +2024-08-03 11:19:45,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=107264.66666666667, ans=0.125 +2024-08-03 11:19:47,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=107264.66666666667, ans=0.2 +2024-08-03 11:19:54,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107301.33333333333, ans=0.1 +2024-08-03 11:19:54,654 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.233e+02 1.427e+02 1.918e+02 3.132e+02, threshold=2.853e+02, percent-clipped=1.0 +2024-08-03 11:20:04,705 INFO [train.py:1114] (3/4) Epoch 9, batch 150, loss[loss=0.2004, simple_loss=0.2725, pruned_loss=0.06418, over 13435.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3018, pruned_loss=0.07231, over 1386842.94 frames. ], batch size: 32, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:20:09,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=107338.0, ans=0.125 +2024-08-03 11:20:12,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=107338.0, ans=0.125 +2024-08-03 11:20:19,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=107374.66666666667, ans=0.95 +2024-08-03 11:20:21,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=107374.66666666667, ans=0.0 +2024-08-03 11:20:21,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=107411.33333333333, ans=0.125 +2024-08-03 11:20:25,028 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.39 vs. limit=15.0 +2024-08-03 11:20:46,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=107484.66666666667, ans=0.125 +2024-08-03 11:20:49,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107484.66666666667, ans=0.1 +2024-08-03 11:20:51,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107484.66666666667, ans=0.1 +2024-08-03 11:20:52,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=107521.33333333333, ans=0.125 +2024-08-03 11:20:56,480 INFO [train.py:1114] (3/4) Epoch 9, batch 200, loss[loss=0.2383, simple_loss=0.3158, pruned_loss=0.08039, over 12591.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.3019, pruned_loss=0.07271, over 1665951.83 frames. ], batch size: 58, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:21:00,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=107521.33333333333, ans=0.2 +2024-08-03 11:21:04,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=107521.33333333333, ans=0.125 +2024-08-03 11:21:16,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=107558.0, ans=0.0 +2024-08-03 11:21:27,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=107631.33333333333, ans=0.0 +2024-08-03 11:21:32,505 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.10 vs. limit=10.0 +2024-08-03 11:21:34,923 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.81 vs. limit=15.0 +2024-08-03 11:21:39,288 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.622e+01 1.326e+02 1.622e+02 2.251e+02 3.498e+02, threshold=3.245e+02, percent-clipped=9.0 +2024-08-03 11:21:45,728 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.23 vs. limit=15.0 +2024-08-03 11:21:47,923 INFO [train.py:1114] (3/4) Epoch 9, batch 250, loss[loss=0.2611, simple_loss=0.335, pruned_loss=0.09356, over 13334.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3024, pruned_loss=0.07318, over 1884654.36 frames. ], batch size: 46, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:21:52,805 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.80 vs. limit=12.0 +2024-08-03 11:22:17,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107778.0, ans=0.125 +2024-08-03 11:22:20,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=107814.66666666667, ans=0.125 +2024-08-03 11:22:29,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=107851.33333333333, ans=0.0 +2024-08-03 11:22:30,487 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.13 vs. limit=15.0 +2024-08-03 11:22:32,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=107851.33333333333, ans=0.125 +2024-08-03 11:22:32,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107851.33333333333, ans=0.1 +2024-08-03 11:22:33,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=107851.33333333333, ans=10.0 +2024-08-03 11:22:34,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=107851.33333333333, ans=0.025 +2024-08-03 11:22:38,746 INFO [train.py:1114] (3/4) Epoch 9, batch 300, loss[loss=0.2505, simple_loss=0.3284, pruned_loss=0.08632, over 13445.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3017, pruned_loss=0.07263, over 2050482.55 frames. ], batch size: 42, lr: 1.42e-02, grad_scale: 32.0 +2024-08-03 11:22:42,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=107888.0, ans=0.025 +2024-08-03 11:22:44,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=107888.0, ans=0.025 +2024-08-03 11:22:45,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=107888.0, ans=0.07 +2024-08-03 11:22:45,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.11 vs. limit=15.0 +2024-08-03 11:22:52,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=107924.66666666667, ans=0.0 +2024-08-03 11:22:54,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=107924.66666666667, ans=0.125 +2024-08-03 11:23:08,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=107998.0, ans=0.0 +2024-08-03 11:23:08,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=107998.0, ans=0.2 +2024-08-03 11:23:09,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=107998.0, ans=0.125 +2024-08-03 11:23:18,265 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.922e+01 1.191e+02 1.365e+02 1.684e+02 2.533e+02, threshold=2.730e+02, percent-clipped=0.0 +2024-08-03 11:23:32,779 INFO [train.py:1114] (3/4) Epoch 9, batch 350, loss[loss=0.2069, simple_loss=0.2834, pruned_loss=0.06518, over 13585.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3017, pruned_loss=0.07238, over 2181095.60 frames. ], batch size: 33, lr: 1.41e-02, grad_scale: 32.0 +2024-08-03 11:24:12,509 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.60 vs. limit=22.5 +2024-08-03 11:24:14,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108108.0, ans=0.1 +2024-08-03 11:24:18,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.16 vs. limit=22.5 +2024-08-03 11:24:35,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=108218.0, ans=0.2 +2024-08-03 11:24:42,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.57 vs. limit=10.0 +2024-08-03 11:24:44,568 INFO [train.py:1114] (3/4) Epoch 9, batch 400, loss[loss=0.2271, simple_loss=0.3071, pruned_loss=0.07349, over 13370.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3026, pruned_loss=0.07302, over 2284624.68 frames. ], batch size: 37, lr: 1.41e-02, grad_scale: 32.0 +2024-08-03 11:25:29,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=108328.0, ans=0.2 +2024-08-03 11:25:45,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=108401.33333333333, ans=0.2 +2024-08-03 11:25:46,044 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.902e+01 1.178e+02 1.332e+02 1.607e+02 2.662e+02, threshold=2.664e+02, percent-clipped=0.0 +2024-08-03 11:25:47,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=108401.33333333333, ans=0.125 +2024-08-03 11:25:59,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=108438.0, ans=0.5 +2024-08-03 11:26:00,582 INFO [train.py:1114] (3/4) Epoch 9, batch 450, loss[loss=0.2482, simple_loss=0.3233, pruned_loss=0.08654, over 13562.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3025, pruned_loss=0.07275, over 2358419.85 frames. ], batch size: 38, lr: 1.41e-02, grad_scale: 32.0 +2024-08-03 11:26:02,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=108438.0, ans=0.0 +2024-08-03 11:26:02,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=108438.0, ans=15.0 +2024-08-03 11:26:16,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=108474.66666666667, ans=0.125 +2024-08-03 11:26:17,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=108474.66666666667, ans=0.125 +2024-08-03 11:26:21,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=108511.33333333333, ans=0.025 +2024-08-03 11:26:27,745 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.43 vs. limit=12.0 +2024-08-03 11:26:31,521 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.87 vs. limit=15.0 +2024-08-03 11:26:33,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108548.0, ans=0.1 +2024-08-03 11:26:38,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=108584.66666666667, ans=0.125 +2024-08-03 11:26:42,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=108584.66666666667, ans=0.125 +2024-08-03 11:26:46,102 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 11:26:46,756 INFO [train.py:1114] (3/4) Epoch 9, batch 500, loss[loss=0.2751, simple_loss=0.3521, pruned_loss=0.09901, over 13416.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3017, pruned_loss=0.0723, over 2424435.17 frames. ], batch size: 43, lr: 1.41e-02, grad_scale: 32.0 +2024-08-03 11:26:57,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=108621.33333333333, ans=0.025 +2024-08-03 11:27:30,449 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.278e+01 1.224e+02 1.411e+02 1.818e+02 3.084e+02, threshold=2.822e+02, percent-clipped=2.0 +2024-08-03 11:27:37,717 INFO [train.py:1114] (3/4) Epoch 9, batch 550, loss[loss=0.2399, simple_loss=0.3167, pruned_loss=0.08152, over 13079.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3018, pruned_loss=0.07244, over 2466329.57 frames. ], batch size: 48, lr: 1.41e-02, grad_scale: 16.0 +2024-08-03 11:28:02,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=108841.33333333333, ans=0.0 +2024-08-03 11:28:08,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108841.33333333333, ans=0.1 +2024-08-03 11:28:28,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=108914.66666666667, ans=10.0 +2024-08-03 11:28:39,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=108951.33333333333, ans=0.025 +2024-08-03 11:28:43,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=108988.0, ans=0.2 +2024-08-03 11:28:43,874 INFO [train.py:1114] (3/4) Epoch 9, batch 600, loss[loss=0.2585, simple_loss=0.3344, pruned_loss=0.09133, over 13326.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.3022, pruned_loss=0.07275, over 2506291.63 frames. ], batch size: 46, lr: 1.41e-02, grad_scale: 16.0 +2024-08-03 11:29:06,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=109061.33333333333, ans=0.0 +2024-08-03 11:29:15,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=109098.0, ans=0.125 +2024-08-03 11:29:17,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=109098.0, ans=0.125 +2024-08-03 11:29:21,674 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.363e+01 1.197e+02 1.407e+02 1.902e+02 4.020e+02, threshold=2.813e+02, percent-clipped=3.0 +2024-08-03 11:29:28,824 INFO [train.py:1114] (3/4) Epoch 9, batch 650, loss[loss=0.2053, simple_loss=0.2909, pruned_loss=0.05986, over 13548.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.3018, pruned_loss=0.07222, over 2542183.95 frames. ], batch size: 37, lr: 1.41e-02, grad_scale: 16.0 +2024-08-03 11:29:35,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=109171.33333333333, ans=0.125 +2024-08-03 11:29:45,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=109244.66666666667, ans=0.125 +2024-08-03 11:29:54,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=109244.66666666667, ans=0.125 +2024-08-03 11:29:55,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=109281.33333333333, ans=0.0 +2024-08-03 11:29:55,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.34 vs. limit=6.0 +2024-08-03 11:30:15,753 INFO [train.py:1114] (3/4) Epoch 9, batch 700, loss[loss=0.2146, simple_loss=0.292, pruned_loss=0.0686, over 13522.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3018, pruned_loss=0.07238, over 2563852.69 frames. ], batch size: 35, lr: 1.41e-02, grad_scale: 8.0 +2024-08-03 11:30:22,520 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.50 vs. limit=10.0 +2024-08-03 11:30:25,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=109391.33333333333, ans=0.125 +2024-08-03 11:30:37,451 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.41 vs. limit=15.0 +2024-08-03 11:30:47,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109464.66666666667, ans=0.1 +2024-08-03 11:30:56,866 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.372e+01 1.239e+02 1.392e+02 1.880e+02 3.301e+02, threshold=2.784e+02, percent-clipped=6.0 +2024-08-03 11:30:57,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=109501.33333333333, ans=0.0 +2024-08-03 11:31:02,673 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.05 vs. limit=22.5 +2024-08-03 11:31:03,172 INFO [train.py:1114] (3/4) Epoch 9, batch 750, loss[loss=0.2111, simple_loss=0.3029, pruned_loss=0.05966, over 13361.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3009, pruned_loss=0.07209, over 2582236.13 frames. ], batch size: 37, lr: 1.41e-02, grad_scale: 8.0 +2024-08-03 11:31:21,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109574.66666666667, ans=0.1 +2024-08-03 11:31:53,695 INFO [train.py:1114] (3/4) Epoch 9, batch 800, loss[loss=0.1861, simple_loss=0.2701, pruned_loss=0.05103, over 13350.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3001, pruned_loss=0.07154, over 2596611.50 frames. ], batch size: 33, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:31:55,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=109721.33333333333, ans=0.0 +2024-08-03 11:31:57,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=109721.33333333333, ans=0.025 +2024-08-03 11:32:20,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=109794.66666666667, ans=0.0 +2024-08-03 11:32:34,402 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.676e+01 1.153e+02 1.323e+02 1.787e+02 2.891e+02, threshold=2.646e+02, percent-clipped=1.0 +2024-08-03 11:32:37,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=109868.0, ans=0.0 +2024-08-03 11:32:40,794 INFO [train.py:1114] (3/4) Epoch 9, batch 850, loss[loss=0.2451, simple_loss=0.3251, pruned_loss=0.0826, over 13313.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2999, pruned_loss=0.07134, over 2609270.97 frames. ], batch size: 40, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:32:43,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109904.66666666667, ans=0.1 +2024-08-03 11:32:46,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=109904.66666666667, ans=0.0 +2024-08-03 11:32:54,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=109941.33333333333, ans=0.0 +2024-08-03 11:33:04,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=109978.0, ans=0.125 +2024-08-03 11:33:26,730 INFO [train.py:1114] (3/4) Epoch 9, batch 900, loss[loss=0.1883, simple_loss=0.27, pruned_loss=0.05333, over 13342.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3001, pruned_loss=0.07112, over 2611425.81 frames. ], batch size: 33, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:33:45,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=110161.33333333333, ans=0.025 +2024-08-03 11:33:50,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=110161.33333333333, ans=0.2 +2024-08-03 11:33:51,809 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.04 vs. limit=15.0 +2024-08-03 11:33:59,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=110198.0, ans=0.015 +2024-08-03 11:34:05,779 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.797e+01 1.134e+02 1.308e+02 1.698e+02 3.183e+02, threshold=2.616e+02, percent-clipped=1.0 +2024-08-03 11:38:02,473 INFO [train.py:1114] (3/4) Epoch 9, batch 950, loss[loss=0.217, simple_loss=0.2835, pruned_loss=0.07522, over 13528.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2998, pruned_loss=0.07109, over 2612262.77 frames. ], batch size: 34, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:46:14,560 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.08 vs. limit=15.0 +2024-08-03 11:46:14,638 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.98 vs. limit=10.0 +2024-08-03 11:46:15,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=110271.33333333333, ans=0.0 +2024-08-03 11:46:17,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=110271.33333333333, ans=0.2 +2024-08-03 11:46:18,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=110308.0, ans=0.0 +2024-08-03 11:55:43,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=110308.0, ans=0.1 +2024-08-03 11:55:44,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.33 vs. limit=12.0 +2024-08-03 11:57:17,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=110418.0, ans=0.0 +2024-08-03 11:57:24,449 INFO [train.py:1114] (3/4) Epoch 9, batch 1000, loss[loss=0.2129, simple_loss=0.2947, pruned_loss=0.06552, over 13364.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3004, pruned_loss=0.07108, over 2611919.07 frames. ], batch size: 35, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 11:58:03,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=110454.66666666667, ans=0.125 +2024-08-03 11:58:05,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=110454.66666666667, ans=0.2 +2024-08-03 11:58:06,434 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.20 vs. limit=12.0 +2024-08-03 11:58:09,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=110491.33333333333, ans=0.04949747468305833 +2024-08-03 11:58:13,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=110491.33333333333, ans=0.125 +2024-08-03 11:58:14,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110491.33333333333, ans=0.1 +2024-08-03 11:58:18,490 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.54 vs. limit=15.0 +2024-08-03 11:58:21,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110528.0, ans=0.1 +2024-08-03 11:59:08,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=110564.66666666667, ans=0.95 +2024-08-03 11:59:10,201 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.96 vs. limit=15.0 +2024-08-03 12:02:14,659 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.417e+01 1.320e+02 1.683e+02 2.294e+02 6.382e+02, threshold=3.366e+02, percent-clipped=18.0 +2024-08-03 12:02:43,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=110601.33333333333, ans=0.0 +2024-08-03 12:02:43,851 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.64 vs. limit=15.0 +2024-08-03 12:02:46,937 INFO [train.py:1114] (3/4) Epoch 9, batch 1050, loss[loss=0.2325, simple_loss=0.3141, pruned_loss=0.07543, over 13581.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.299, pruned_loss=0.07059, over 2616282.07 frames. ], batch size: 39, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 12:03:31,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=110748.0, ans=0.125 +2024-08-03 12:03:33,133 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.17 vs. limit=15.0 +2024-08-03 12:03:40,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=110784.66666666667, ans=10.0 +2024-08-03 12:03:43,570 INFO [train.py:1114] (3/4) Epoch 9, batch 1100, loss[loss=0.2239, simple_loss=0.3036, pruned_loss=0.07214, over 13554.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2998, pruned_loss=0.07134, over 2619759.24 frames. ], batch size: 36, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 12:04:03,672 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.49 vs. limit=15.0 +2024-08-03 12:04:17,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=110931.33333333333, ans=0.07 +2024-08-03 12:04:23,167 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.596e+01 1.248e+02 1.447e+02 1.784e+02 2.947e+02, threshold=2.893e+02, percent-clipped=0.0 +2024-08-03 12:04:28,531 INFO [train.py:1114] (3/4) Epoch 9, batch 1150, loss[loss=0.2204, simple_loss=0.3011, pruned_loss=0.06985, over 13549.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2996, pruned_loss=0.07126, over 2619806.93 frames. ], batch size: 36, lr: 1.40e-02, grad_scale: 8.0 +2024-08-03 12:04:34,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=111004.66666666667, ans=0.2 +2024-08-03 12:04:40,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=111041.33333333333, ans=0.04949747468305833 +2024-08-03 12:04:46,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111041.33333333333, ans=0.1 +2024-08-03 12:04:51,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=111078.0, ans=0.125 +2024-08-03 12:05:10,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=111151.33333333333, ans=0.125 +2024-08-03 12:05:17,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=111188.0, ans=0.125 +2024-08-03 12:05:17,650 INFO [train.py:1114] (3/4) Epoch 9, batch 1200, loss[loss=0.2014, simple_loss=0.2905, pruned_loss=0.05615, over 13572.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3002, pruned_loss=0.07142, over 2616298.96 frames. ], batch size: 39, lr: 1.40e-02, grad_scale: 16.0 +2024-08-03 12:05:27,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=111224.66666666667, ans=0.125 +2024-08-03 12:05:29,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=111224.66666666667, ans=0.2 +2024-08-03 12:05:38,681 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.96 vs. limit=15.0 +2024-08-03 12:05:42,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111261.33333333333, ans=0.125 +2024-08-03 12:05:59,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=111334.66666666667, ans=0.2 +2024-08-03 12:05:59,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111334.66666666667, ans=0.125 +2024-08-03 12:05:59,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111334.66666666667, ans=0.1 +2024-08-03 12:06:01,825 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.516e+01 1.186e+02 1.386e+02 1.604e+02 2.506e+02, threshold=2.772e+02, percent-clipped=0.0 +2024-08-03 12:06:01,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=111334.66666666667, ans=0.125 +2024-08-03 12:06:06,354 INFO [train.py:1114] (3/4) Epoch 9, batch 1250, loss[loss=0.2306, simple_loss=0.3098, pruned_loss=0.07572, over 13459.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.3011, pruned_loss=0.07169, over 2628440.73 frames. ], batch size: 42, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:06:36,688 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.78 vs. limit=15.0 +2024-08-03 12:06:50,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=111518.0, ans=0.125 +2024-08-03 12:06:52,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=111554.66666666667, ans=0.125 +2024-08-03 12:06:53,564 INFO [train.py:1114] (3/4) Epoch 9, batch 1300, loss[loss=0.2683, simple_loss=0.3449, pruned_loss=0.09582, over 12912.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.3007, pruned_loss=0.07183, over 2632398.50 frames. ], batch size: 52, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:06:55,586 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.47 vs. limit=15.0 +2024-08-03 12:07:10,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=111591.33333333333, ans=0.125 +2024-08-03 12:07:19,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=111628.0, ans=0.09899494936611666 +2024-08-03 12:07:24,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=111664.66666666667, ans=0.125 +2024-08-03 12:07:28,188 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=111664.66666666667, ans=0.125 +2024-08-03 12:07:46,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=111664.66666666667, ans=0.0 +2024-08-03 12:07:48,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111701.33333333333, ans=0.125 +2024-08-03 12:07:52,285 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.616e+01 1.204e+02 1.402e+02 1.778e+02 3.607e+02, threshold=2.805e+02, percent-clipped=3.0 +2024-08-03 12:07:56,910 INFO [train.py:1114] (3/4) Epoch 9, batch 1350, loss[loss=0.2142, simple_loss=0.3068, pruned_loss=0.06084, over 13540.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.3008, pruned_loss=0.07179, over 2640393.74 frames. ], batch size: 37, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:08:05,615 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.65 vs. limit=22.5 +2024-08-03 12:08:09,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111774.66666666667, ans=0.125 +2024-08-03 12:08:10,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111774.66666666667, ans=0.1 +2024-08-03 12:08:11,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=111774.66666666667, ans=0.0 +2024-08-03 12:08:17,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=111811.33333333333, ans=0.0 +2024-08-03 12:08:23,007 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.86 vs. limit=15.0 +2024-08-03 12:08:27,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=111848.0, ans=0.125 +2024-08-03 12:08:29,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=111848.0, ans=0.1 +2024-08-03 12:08:41,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=111884.66666666667, ans=0.2 +2024-08-03 12:08:43,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=111884.66666666667, ans=0.0 +2024-08-03 12:08:45,495 INFO [train.py:1114] (3/4) Epoch 9, batch 1400, loss[loss=0.2086, simple_loss=0.2735, pruned_loss=0.0719, over 13260.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2998, pruned_loss=0.07127, over 2643739.98 frames. ], batch size: 31, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:08:47,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=111921.33333333333, ans=0.125 +2024-08-03 12:08:55,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=111958.0, ans=0.2 +2024-08-03 12:09:02,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=111958.0, ans=0.125 +2024-08-03 12:10:28,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=112068.0, ans=0.2 +2024-08-03 12:10:29,036 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.010e+02 1.189e+02 1.393e+02 1.717e+02 2.790e+02, threshold=2.787e+02, percent-clipped=0.0 +2024-08-03 12:10:29,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=112068.0, ans=0.125 +2024-08-03 12:10:45,203 INFO [train.py:1114] (3/4) Epoch 9, batch 1450, loss[loss=0.233, simple_loss=0.3161, pruned_loss=0.07493, over 13446.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3002, pruned_loss=0.07167, over 2643141.98 frames. ], batch size: 43, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:11:15,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=112141.33333333333, ans=0.0 +2024-08-03 12:11:46,337 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.22 vs. limit=15.0 +2024-08-03 12:12:11,606 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.55 vs. limit=10.0 +2024-08-03 12:12:28,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=112288.0, ans=0.125 +2024-08-03 12:12:29,511 INFO [train.py:1114] (3/4) Epoch 9, batch 1500, loss[loss=0.2341, simple_loss=0.3117, pruned_loss=0.07821, over 13400.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3006, pruned_loss=0.07143, over 2641891.10 frames. ], batch size: 39, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:12:36,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=112288.0, ans=0.125 +2024-08-03 12:13:00,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=112398.0, ans=0.0 +2024-08-03 12:13:09,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=112434.66666666667, ans=0.04949747468305833 +2024-08-03 12:13:09,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=112434.66666666667, ans=0.2 +2024-08-03 12:13:10,793 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.784e+01 1.196e+02 1.437e+02 1.780e+02 2.962e+02, threshold=2.875e+02, percent-clipped=1.0 +2024-08-03 12:13:13,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=112434.66666666667, ans=0.2 +2024-08-03 12:13:15,275 INFO [train.py:1114] (3/4) Epoch 9, batch 1550, loss[loss=0.2342, simple_loss=0.3182, pruned_loss=0.07512, over 13415.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.3014, pruned_loss=0.07245, over 2631619.90 frames. ], batch size: 41, lr: 1.39e-02, grad_scale: 8.0 +2024-08-03 12:13:16,667 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.55 vs. limit=22.5 +2024-08-03 12:13:18,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=112471.33333333333, ans=0.2 +2024-08-03 12:13:23,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=112508.0, ans=0.125 +2024-08-03 12:13:31,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=112508.0, ans=0.035 +2024-08-03 12:13:32,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=112544.66666666667, ans=0.125 +2024-08-03 12:13:35,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=112544.66666666667, ans=0.125 +2024-08-03 12:13:55,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=112618.0, ans=0.125 +2024-08-03 12:14:00,204 INFO [train.py:1114] (3/4) Epoch 9, batch 1600, loss[loss=0.202, simple_loss=0.2874, pruned_loss=0.05825, over 13583.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3016, pruned_loss=0.07242, over 2625152.41 frames. ], batch size: 39, lr: 1.39e-02, grad_scale: 16.0 +2024-08-03 12:14:01,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=112654.66666666667, ans=0.125 +2024-08-03 12:14:43,177 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.525e+01 1.266e+02 1.505e+02 1.991e+02 3.418e+02, threshold=3.010e+02, percent-clipped=5.0 +2024-08-03 12:14:47,578 INFO [train.py:1114] (3/4) Epoch 9, batch 1650, loss[loss=0.1953, simple_loss=0.2846, pruned_loss=0.05296, over 13311.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.3006, pruned_loss=0.07195, over 2623001.30 frames. ], batch size: 40, lr: 1.39e-02, grad_scale: 16.0 +2024-08-03 12:14:47,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=112838.0, ans=0.1 +2024-08-03 12:14:55,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=112874.66666666667, ans=0.2 +2024-08-03 12:15:03,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=112874.66666666667, ans=0.0 +2024-08-03 12:15:03,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112874.66666666667, ans=0.1 +2024-08-03 12:15:11,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=112911.33333333333, ans=10.0 +2024-08-03 12:15:20,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=112948.0, ans=0.125 +2024-08-03 12:15:26,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112984.66666666667, ans=0.1 +2024-08-03 12:15:27,595 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.68 vs. limit=6.0 +2024-08-03 12:15:30,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=112984.66666666667, ans=0.0 +2024-08-03 12:15:34,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=112984.66666666667, ans=0.0 +2024-08-03 12:15:37,671 INFO [train.py:1114] (3/4) Epoch 9, batch 1700, loss[loss=0.2103, simple_loss=0.2823, pruned_loss=0.06909, over 13254.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2996, pruned_loss=0.07106, over 2631525.50 frames. ], batch size: 31, lr: 1.39e-02, grad_scale: 16.0 +2024-08-03 12:15:46,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=113058.0, ans=0.125 +2024-08-03 12:15:51,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=113058.0, ans=0.125 +2024-08-03 12:15:55,496 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:16:03,264 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.73 vs. limit=15.0 +2024-08-03 12:16:17,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=113168.0, ans=0.125 +2024-08-03 12:16:19,706 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.237e+01 1.197e+02 1.424e+02 1.856e+02 4.679e+02, threshold=2.848e+02, percent-clipped=5.0 +2024-08-03 12:16:21,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=113168.0, ans=0.09899494936611666 +2024-08-03 12:16:21,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=113168.0, ans=0.125 +2024-08-03 12:16:24,271 INFO [train.py:1114] (3/4) Epoch 9, batch 1750, loss[loss=0.183, simple_loss=0.2608, pruned_loss=0.05262, over 13525.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2988, pruned_loss=0.07076, over 2634921.97 frames. ], batch size: 31, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:16:28,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=113204.66666666667, ans=0.125 +2024-08-03 12:16:45,561 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:16:45,860 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.24 vs. limit=12.0 +2024-08-03 12:17:06,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=113351.33333333333, ans=0.0 +2024-08-03 12:17:09,924 INFO [train.py:1114] (3/4) Epoch 9, batch 1800, loss[loss=0.2396, simple_loss=0.3167, pruned_loss=0.08125, over 13546.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2992, pruned_loss=0.07082, over 2635218.40 frames. ], batch size: 38, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:17:31,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=113461.33333333333, ans=0.125 +2024-08-03 12:17:31,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=113461.33333333333, ans=0.0 +2024-08-03 12:17:45,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=113534.66666666667, ans=0.125 +2024-08-03 12:17:50,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=113534.66666666667, ans=0.125 +2024-08-03 12:17:50,713 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.294e+02 1.753e+02 2.320e+02 3.685e+02, threshold=3.507e+02, percent-clipped=11.0 +2024-08-03 12:17:55,241 INFO [train.py:1114] (3/4) Epoch 9, batch 1850, loss[loss=0.2218, simple_loss=0.313, pruned_loss=0.06527, over 13388.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2987, pruned_loss=0.07039, over 2637709.25 frames. ], batch size: 39, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:18:08,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113608.0, ans=0.1 +2024-08-03 12:18:10,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=113608.0, ans=0.125 +2024-08-03 12:18:10,874 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.41 vs. limit=12.0 +2024-08-03 12:18:13,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=113608.0, ans=0.0 +2024-08-03 12:18:27,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=113681.33333333333, ans=0.0 +2024-08-03 12:18:32,667 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.05 vs. limit=6.0 +2024-08-03 12:18:36,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=113718.0, ans=0.2 +2024-08-03 12:18:43,014 INFO [train.py:1114] (3/4) Epoch 9, batch 1900, loss[loss=0.2399, simple_loss=0.3243, pruned_loss=0.0778, over 13317.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3002, pruned_loss=0.07086, over 2640380.70 frames. ], batch size: 40, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:19:07,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=113828.0, ans=0.2 +2024-08-03 12:19:08,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=113828.0, ans=0.1 +2024-08-03 12:19:11,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=113828.0, ans=0.2 +2024-08-03 12:19:12,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=113864.66666666667, ans=0.125 +2024-08-03 12:19:26,848 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.010e+02 1.181e+02 1.342e+02 1.556e+02 3.723e+02, threshold=2.684e+02, percent-clipped=1.0 +2024-08-03 12:19:27,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=113901.33333333333, ans=0.125 +2024-08-03 12:19:33,197 INFO [train.py:1114] (3/4) Epoch 9, batch 1950, loss[loss=0.2341, simple_loss=0.3128, pruned_loss=0.07769, over 13562.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3013, pruned_loss=0.07141, over 2646727.68 frames. ], batch size: 36, lr: 1.38e-02, grad_scale: 16.0 +2024-08-03 12:19:34,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=113938.0, ans=0.125 +2024-08-03 12:19:34,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=113938.0, ans=0.0 +2024-08-03 12:19:35,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=113938.0, ans=0.125 +2024-08-03 12:19:58,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=114011.33333333333, ans=0.025 +2024-08-03 12:19:58,834 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.78 vs. limit=15.0 +2024-08-03 12:19:59,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=114011.33333333333, ans=0.125 +2024-08-03 12:20:00,728 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.90 vs. limit=15.0 +2024-08-03 12:20:04,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=114011.33333333333, ans=0.125 +2024-08-03 12:20:04,059 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:20:06,941 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.00 vs. limit=22.5 +2024-08-03 12:20:08,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=114048.0, ans=0.125 +2024-08-03 12:20:16,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=114084.66666666667, ans=0.125 +2024-08-03 12:20:21,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=114084.66666666667, ans=0.0 +2024-08-03 12:20:23,612 INFO [train.py:1114] (3/4) Epoch 9, batch 2000, loss[loss=0.1963, simple_loss=0.2641, pruned_loss=0.0643, over 13520.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.301, pruned_loss=0.07125, over 2636666.97 frames. ], batch size: 31, lr: 1.38e-02, grad_scale: 32.0 +2024-08-03 12:20:34,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=114158.0, ans=0.025 +2024-08-03 12:20:42,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=114194.66666666667, ans=0.2 +2024-08-03 12:20:45,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=114194.66666666667, ans=0.125 +2024-08-03 12:21:05,462 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.389e+01 1.230e+02 1.563e+02 1.827e+02 3.181e+02, threshold=3.125e+02, percent-clipped=4.0 +2024-08-03 12:21:09,700 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.19 vs. limit=12.0 +2024-08-03 12:21:10,001 INFO [train.py:1114] (3/4) Epoch 9, batch 2050, loss[loss=0.211, simple_loss=0.2854, pruned_loss=0.06827, over 13398.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.3002, pruned_loss=0.07125, over 2633298.38 frames. ], batch size: 32, lr: 1.38e-02, grad_scale: 32.0 +2024-08-03 12:21:24,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=114341.33333333333, ans=0.125 +2024-08-03 12:21:57,220 INFO [train.py:1114] (3/4) Epoch 9, batch 2100, loss[loss=0.2062, simple_loss=0.2843, pruned_loss=0.06403, over 13541.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2994, pruned_loss=0.07096, over 2638400.90 frames. ], batch size: 37, lr: 1.38e-02, grad_scale: 32.0 +2024-08-03 12:21:59,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=114488.0, ans=0.125 +2024-08-03 12:22:09,990 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.35 vs. limit=15.0 +2024-08-03 12:22:10,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=114524.66666666667, ans=0.0 +2024-08-03 12:22:12,650 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.67 vs. limit=15.0 +2024-08-03 12:22:25,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=114598.0, ans=0.125 +2024-08-03 12:22:42,440 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.66 vs. limit=15.0 +2024-08-03 12:22:46,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=114634.66666666667, ans=0.125 +2024-08-03 12:22:54,131 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.66 vs. limit=15.0 +2024-08-03 12:22:54,566 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.023e+02 1.194e+02 1.363e+02 1.768e+02 4.718e+02, threshold=2.726e+02, percent-clipped=3.0 +2024-08-03 12:23:07,978 INFO [train.py:1114] (3/4) Epoch 9, batch 2150, loss[loss=0.198, simple_loss=0.2729, pruned_loss=0.06151, over 13567.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2978, pruned_loss=0.06994, over 2647424.16 frames. ], batch size: 36, lr: 1.38e-02, grad_scale: 32.0 +2024-08-03 12:23:20,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=114671.33333333333, ans=0.125 +2024-08-03 12:23:25,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=114671.33333333333, ans=0.0 +2024-08-03 12:23:50,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=114708.0, ans=0.125 +2024-08-03 12:24:08,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=114744.66666666667, ans=0.0 +2024-08-03 12:24:12,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=114744.66666666667, ans=0.125 +2024-08-03 12:24:33,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=114781.33333333333, ans=0.0 +2024-08-03 12:24:43,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=114818.0, ans=0.0 +2024-08-03 12:24:44,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=114818.0, ans=0.1 +2024-08-03 12:24:44,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=114818.0, ans=0.0 +2024-08-03 12:24:59,182 INFO [train.py:1114] (3/4) Epoch 9, batch 2200, loss[loss=0.1986, simple_loss=0.2926, pruned_loss=0.05233, over 13402.00 frames. ], tot_loss[loss=0.219, simple_loss=0.298, pruned_loss=0.06998, over 2645449.49 frames. ], batch size: 39, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:25:04,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=114854.66666666667, ans=0.125 +2024-08-03 12:25:47,878 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.835e+01 1.281e+02 1.616e+02 2.381e+02 3.635e+02, threshold=3.231e+02, percent-clipped=12.0 +2024-08-03 12:25:52,563 INFO [train.py:1114] (3/4) Epoch 9, batch 2250, loss[loss=0.2036, simple_loss=0.2842, pruned_loss=0.06155, over 13368.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2979, pruned_loss=0.07003, over 2643105.48 frames. ], batch size: 37, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:26:10,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=115074.66666666667, ans=0.125 +2024-08-03 12:26:11,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=115074.66666666667, ans=0.2 +2024-08-03 12:26:35,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115184.66666666667, ans=0.1 +2024-08-03 12:26:39,030 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.88 vs. limit=15.0 +2024-08-03 12:26:43,633 INFO [train.py:1114] (3/4) Epoch 9, batch 2300, loss[loss=0.1845, simple_loss=0.26, pruned_loss=0.05448, over 13579.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2967, pruned_loss=0.06972, over 2639361.37 frames. ], batch size: 33, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:26:50,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=115221.33333333333, ans=0.0 +2024-08-03 12:26:51,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=115221.33333333333, ans=0.0 +2024-08-03 12:27:13,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=115331.33333333333, ans=0.1 +2024-08-03 12:27:25,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=115368.0, ans=0.0 +2024-08-03 12:27:28,234 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.909e+01 1.211e+02 1.428e+02 1.712e+02 2.709e+02, threshold=2.855e+02, percent-clipped=0.0 +2024-08-03 12:27:30,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=115368.0, ans=0.125 +2024-08-03 12:27:32,866 INFO [train.py:1114] (3/4) Epoch 9, batch 2350, loss[loss=0.2122, simple_loss=0.2956, pruned_loss=0.06441, over 13548.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2966, pruned_loss=0.06948, over 2641714.74 frames. ], batch size: 38, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:27:35,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=115404.66666666667, ans=0.1 +2024-08-03 12:27:37,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=115404.66666666667, ans=0.125 +2024-08-03 12:27:45,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=115441.33333333333, ans=0.125 +2024-08-03 12:27:50,894 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.13 vs. limit=15.0 +2024-08-03 12:27:51,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=115441.33333333333, ans=0.2 +2024-08-03 12:27:58,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=115478.0, ans=0.1 +2024-08-03 12:28:03,926 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.60 vs. limit=15.0 +2024-08-03 12:28:21,740 INFO [train.py:1114] (3/4) Epoch 9, batch 2400, loss[loss=0.2053, simple_loss=0.2752, pruned_loss=0.06766, over 13558.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2975, pruned_loss=0.06964, over 2642492.39 frames. ], batch size: 35, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:28:30,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=115588.0, ans=0.125 +2024-08-03 12:28:46,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.83 vs. limit=22.5 +2024-08-03 12:28:48,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=115661.33333333333, ans=0.0 +2024-08-03 12:28:48,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=115661.33333333333, ans=0.125 +2024-08-03 12:29:03,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=115734.66666666667, ans=0.0 +2024-08-03 12:29:04,112 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.274e+02 1.512e+02 2.104e+02 3.890e+02, threshold=3.023e+02, percent-clipped=4.0 +2024-08-03 12:29:06,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=115734.66666666667, ans=0.2 +2024-08-03 12:29:08,580 INFO [train.py:1114] (3/4) Epoch 9, batch 2450, loss[loss=0.2479, simple_loss=0.3297, pruned_loss=0.08309, over 13358.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2993, pruned_loss=0.07069, over 2631595.55 frames. ], batch size: 37, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:29:15,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=115771.33333333333, ans=0.07 +2024-08-03 12:29:36,076 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.31 vs. limit=15.0 +2024-08-03 12:29:45,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=115918.0, ans=0.04949747468305833 +2024-08-03 12:29:53,295 INFO [train.py:1114] (3/4) Epoch 9, batch 2500, loss[loss=0.2092, simple_loss=0.2952, pruned_loss=0.06162, over 13399.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2984, pruned_loss=0.06978, over 2635670.21 frames. ], batch size: 39, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:29:59,648 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.58 vs. limit=15.0 +2024-08-03 12:30:07,339 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.57 vs. limit=6.0 +2024-08-03 12:30:21,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=116064.66666666667, ans=0.0 +2024-08-03 12:30:23,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=116064.66666666667, ans=0.125 +2024-08-03 12:30:31,832 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.707e+01 1.175e+02 1.472e+02 1.847e+02 3.243e+02, threshold=2.944e+02, percent-clipped=1.0 +2024-08-03 12:30:34,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=116101.33333333333, ans=0.0 +2024-08-03 12:30:36,226 INFO [train.py:1114] (3/4) Epoch 9, batch 2550, loss[loss=0.1768, simple_loss=0.2516, pruned_loss=0.05101, over 13554.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2986, pruned_loss=0.06967, over 2638426.47 frames. ], batch size: 31, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:31:14,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=116284.66666666667, ans=0.0 +2024-08-03 12:31:20,892 INFO [train.py:1114] (3/4) Epoch 9, batch 2600, loss[loss=0.2128, simple_loss=0.2948, pruned_loss=0.06545, over 13548.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2991, pruned_loss=0.06988, over 2637566.25 frames. ], batch size: 36, lr: 1.37e-02, grad_scale: 32.0 +2024-08-03 12:31:34,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=116358.0, ans=0.0 +2024-08-03 12:31:38,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=116394.66666666667, ans=0.0 +2024-08-03 12:31:53,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=116431.33333333333, ans=0.0 +2024-08-03 12:32:00,488 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.327e+01 1.173e+02 1.340e+02 1.707e+02 3.211e+02, threshold=2.680e+02, percent-clipped=1.0 +2024-08-03 12:32:03,965 INFO [train.py:1114] (3/4) Epoch 9, batch 2650, loss[loss=0.2387, simple_loss=0.33, pruned_loss=0.07374, over 13440.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2996, pruned_loss=0.07017, over 2640688.53 frames. ], batch size: 46, lr: 1.37e-02, grad_scale: 16.0 +2024-08-03 12:32:16,123 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:32:21,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=116578.0, ans=0.125 +2024-08-03 12:32:35,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=116614.66666666667, ans=0.2 +2024-08-03 12:32:41,159 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.13 vs. limit=15.0 +2024-08-03 12:32:47,472 INFO [train.py:1114] (3/4) Epoch 9, batch 2700, loss[loss=0.2414, simple_loss=0.3238, pruned_loss=0.07949, over 13531.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3006, pruned_loss=0.07084, over 2637216.38 frames. ], batch size: 40, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:33:06,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=116761.33333333333, ans=0.0 +2024-08-03 12:33:08,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=116761.33333333333, ans=0.025 +2024-08-03 12:33:09,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=116761.33333333333, ans=0.125 +2024-08-03 12:33:14,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=116798.0, ans=0.0 +2024-08-03 12:33:22,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=116834.66666666667, ans=0.125 +2024-08-03 12:33:26,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=116834.66666666667, ans=0.125 +2024-08-03 12:33:28,907 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.391e+01 1.337e+02 1.607e+02 2.047e+02 3.156e+02, threshold=3.214e+02, percent-clipped=11.0 +2024-08-03 12:33:32,431 INFO [train.py:1114] (3/4) Epoch 9, batch 2750, loss[loss=0.1977, simple_loss=0.2798, pruned_loss=0.05778, over 13330.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2994, pruned_loss=0.07043, over 2635128.78 frames. ], batch size: 34, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:33:43,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=116908.0, ans=0.0 +2024-08-03 12:33:46,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=116908.0, ans=0.125 +2024-08-03 12:33:50,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=116944.66666666667, ans=0.1 +2024-08-03 12:34:11,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=117018.0, ans=0.0 +2024-08-03 12:34:16,694 INFO [train.py:1114] (3/4) Epoch 9, batch 2800, loss[loss=0.3115, simple_loss=0.3605, pruned_loss=0.1312, over 9021.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2996, pruned_loss=0.07101, over 2626788.45 frames. ], batch size: 97, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:34:17,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=117054.66666666667, ans=0.125 +2024-08-03 12:34:22,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=117054.66666666667, ans=0.125 +2024-08-03 12:34:58,829 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.896e+01 1.288e+02 1.462e+02 1.862e+02 3.632e+02, threshold=2.925e+02, percent-clipped=2.0 +2024-08-03 12:34:59,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=117201.33333333333, ans=0.2 +2024-08-03 12:35:00,178 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.36 vs. limit=15.0 +2024-08-03 12:35:01,471 INFO [train.py:1114] (3/4) Epoch 9, batch 2850, loss[loss=0.1907, simple_loss=0.28, pruned_loss=0.05067, over 13364.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.3003, pruned_loss=0.07132, over 2621072.01 frames. ], batch size: 35, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:35:21,212 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:35:44,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117311.33333333333, ans=0.1 +2024-08-03 12:35:45,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=117311.33333333333, ans=0.2 +2024-08-03 12:36:05,398 INFO [train.py:1114] (3/4) Epoch 9, batch 2900, loss[loss=0.2094, simple_loss=0.2913, pruned_loss=0.06374, over 13367.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3006, pruned_loss=0.07093, over 2632102.86 frames. ], batch size: 36, lr: 1.36e-02, grad_scale: 16.0 +2024-08-03 12:36:14,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=117458.0, ans=0.2 +2024-08-03 12:36:18,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=117458.0, ans=0.0 +2024-08-03 12:36:26,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=117494.66666666667, ans=0.2 +2024-08-03 12:36:41,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=117568.0, ans=0.125 +2024-08-03 12:36:47,220 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.425e+01 1.178e+02 1.305e+02 1.613e+02 2.693e+02, threshold=2.610e+02, percent-clipped=0.0 +2024-08-03 12:36:48,963 INFO [train.py:1114] (3/4) Epoch 9, batch 2950, loss[loss=0.1957, simple_loss=0.2736, pruned_loss=0.05891, over 13346.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2994, pruned_loss=0.07082, over 2631289.65 frames. ], batch size: 34, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:36:56,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=117604.66666666667, ans=0.0 +2024-08-03 12:36:59,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=117641.33333333333, ans=0.0 +2024-08-03 12:37:09,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=117678.0, ans=0.125 +2024-08-03 12:37:25,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=117751.33333333333, ans=0.1 +2024-08-03 12:37:32,406 INFO [train.py:1114] (3/4) Epoch 9, batch 3000, loss[loss=0.2135, simple_loss=0.2943, pruned_loss=0.0664, over 13553.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2996, pruned_loss=0.0712, over 2631052.92 frames. ], batch size: 37, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:37:32,407 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 12:37:49,957 INFO [train.py:1146] (3/4) Epoch 9, validation: loss=0.1846, simple_loss=0.2849, pruned_loss=0.04217, over 944034.00 frames. +2024-08-03 12:37:49,957 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 12:38:04,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=117824.66666666667, ans=0.125 +2024-08-03 12:38:10,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=117861.33333333333, ans=0.07 +2024-08-03 12:38:26,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=117934.66666666667, ans=0.2 +2024-08-03 12:38:27,420 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.45 vs. limit=22.5 +2024-08-03 12:38:29,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=117934.66666666667, ans=0.125 +2024-08-03 12:38:30,765 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.68 vs. limit=15.0 +2024-08-03 12:38:31,057 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.539e+01 1.182e+02 1.340e+02 1.696e+02 3.056e+02, threshold=2.681e+02, percent-clipped=1.0 +2024-08-03 12:38:32,960 INFO [train.py:1114] (3/4) Epoch 9, batch 3050, loss[loss=0.2274, simple_loss=0.3031, pruned_loss=0.07582, over 13538.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2999, pruned_loss=0.07106, over 2627599.54 frames. ], batch size: 35, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:38:33,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=117971.33333333333, ans=10.0 +2024-08-03 12:38:34,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=117971.33333333333, ans=0.05 +2024-08-03 12:38:40,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=117971.33333333333, ans=0.125 +2024-08-03 12:38:49,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=118044.66666666667, ans=0.1 +2024-08-03 12:39:07,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=118118.0, ans=0.125 +2024-08-03 12:39:08,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=118118.0, ans=0.125 +2024-08-03 12:39:12,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=118118.0, ans=0.125 +2024-08-03 12:39:17,094 INFO [train.py:1114] (3/4) Epoch 9, batch 3100, loss[loss=0.2207, simple_loss=0.304, pruned_loss=0.06872, over 13306.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2991, pruned_loss=0.07057, over 2627757.61 frames. ], batch size: 46, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:39:34,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=118228.0, ans=0.025 +2024-08-03 12:39:43,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=118264.66666666667, ans=0.125 +2024-08-03 12:39:43,897 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.96 vs. limit=10.0 +2024-08-03 12:39:45,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=118264.66666666667, ans=0.125 +2024-08-03 12:39:47,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=118264.66666666667, ans=0.125 +2024-08-03 12:39:58,237 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.498e+01 1.162e+02 1.416e+02 1.749e+02 3.223e+02, threshold=2.833e+02, percent-clipped=4.0 +2024-08-03 12:40:00,022 INFO [train.py:1114] (3/4) Epoch 9, batch 3150, loss[loss=0.2361, simple_loss=0.3141, pruned_loss=0.07902, over 13013.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2987, pruned_loss=0.07002, over 2629127.52 frames. ], batch size: 48, lr: 1.36e-02, grad_scale: 8.0 +2024-08-03 12:40:16,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118411.33333333333, ans=0.1 +2024-08-03 12:40:21,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=118411.33333333333, ans=0.2 +2024-08-03 12:40:24,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=118448.0, ans=0.125 +2024-08-03 12:42:07,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=118448.0, ans=0.0 +2024-08-03 12:42:18,392 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:42:18,659 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.13 vs. limit=22.5 +2024-08-03 12:42:20,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=118484.66666666667, ans=0.025 +2024-08-03 12:42:23,245 INFO [train.py:1114] (3/4) Epoch 9, batch 3200, loss[loss=0.2276, simple_loss=0.2983, pruned_loss=0.07845, over 13549.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2979, pruned_loss=0.06956, over 2635948.69 frames. ], batch size: 37, lr: 1.35e-02, grad_scale: 16.0 +2024-08-03 12:42:42,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118594.66666666667, ans=0.1 +2024-08-03 12:42:58,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=118668.0, ans=0.2 +2024-08-03 12:43:04,100 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 12:43:06,442 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.961e+01 1.301e+02 1.590e+02 2.129e+02 3.021e+02, threshold=3.180e+02, percent-clipped=5.0 +2024-08-03 12:43:07,343 INFO [train.py:1114] (3/4) Epoch 9, batch 3250, loss[loss=0.2139, simple_loss=0.2986, pruned_loss=0.06464, over 13384.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2986, pruned_loss=0.06975, over 2640155.45 frames. ], batch size: 38, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:43:20,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=118741.33333333333, ans=0.0 +2024-08-03 12:43:30,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=118778.0, ans=10.0 +2024-08-03 12:43:41,475 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.64 vs. limit=22.5 +2024-08-03 12:43:55,825 INFO [train.py:1114] (3/4) Epoch 9, batch 3300, loss[loss=0.228, simple_loss=0.3182, pruned_loss=0.06893, over 12939.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2982, pruned_loss=0.06993, over 2641407.96 frames. ], batch size: 52, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:43:55,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=118888.0, ans=0.125 +2024-08-03 12:44:00,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=118888.0, ans=0.2 +2024-08-03 12:44:08,778 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.60 vs. limit=6.0 +2024-08-03 12:44:10,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=118924.66666666667, ans=0.2 +2024-08-03 12:44:12,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=118924.66666666667, ans=0.0 +2024-08-03 12:44:16,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=118924.66666666667, ans=0.0 +2024-08-03 12:44:26,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=118998.0, ans=0.0 +2024-08-03 12:44:29,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=118998.0, ans=0.125 +2024-08-03 12:44:36,250 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.41 vs. limit=12.0 +2024-08-03 12:44:43,220 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.376e+01 1.208e+02 1.436e+02 1.732e+02 3.249e+02, threshold=2.873e+02, percent-clipped=1.0 +2024-08-03 12:44:44,074 INFO [train.py:1114] (3/4) Epoch 9, batch 3350, loss[loss=0.2353, simple_loss=0.309, pruned_loss=0.08083, over 13045.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2992, pruned_loss=0.07056, over 2630561.44 frames. ], batch size: 48, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:44:58,069 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.33 vs. limit=12.0 +2024-08-03 12:44:59,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=119108.0, ans=0.2 +2024-08-03 12:45:26,470 INFO [train.py:1114] (3/4) Epoch 9, batch 3400, loss[loss=0.2103, simple_loss=0.2814, pruned_loss=0.06964, over 13572.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2991, pruned_loss=0.07068, over 2625828.43 frames. ], batch size: 31, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:45:29,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=119254.66666666667, ans=0.1 +2024-08-03 12:45:33,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=119254.66666666667, ans=0.125 +2024-08-03 12:45:50,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=119328.0, ans=0.125 +2024-08-03 12:45:56,812 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.14 vs. limit=15.0 +2024-08-03 12:45:58,697 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.72 vs. limit=15.0 +2024-08-03 12:46:02,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=119401.33333333333, ans=0.125 +2024-08-03 12:46:17,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=119401.33333333333, ans=0.125 +2024-08-03 12:46:21,258 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.303e+01 1.200e+02 1.400e+02 1.853e+02 3.003e+02, threshold=2.800e+02, percent-clipped=1.0 +2024-08-03 12:46:22,092 INFO [train.py:1114] (3/4) Epoch 9, batch 3450, loss[loss=0.2131, simple_loss=0.297, pruned_loss=0.06454, over 12911.00 frames. ], tot_loss[loss=0.22, simple_loss=0.299, pruned_loss=0.07051, over 2629359.43 frames. ], batch size: 52, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:46:28,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=119438.0, ans=0.025 +2024-08-03 12:46:35,026 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.84 vs. limit=15.0 +2024-08-03 12:46:36,240 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=119474.66666666667, ans=0.035 +2024-08-03 12:46:36,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.55 vs. limit=12.0 +2024-08-03 12:46:46,719 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.18 vs. limit=15.0 +2024-08-03 12:46:50,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=119548.0, ans=0.2 +2024-08-03 12:47:04,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=119584.66666666667, ans=0.2 +2024-08-03 12:47:06,135 INFO [train.py:1114] (3/4) Epoch 9, batch 3500, loss[loss=0.2037, simple_loss=0.2862, pruned_loss=0.06065, over 13542.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2982, pruned_loss=0.07026, over 2631472.30 frames. ], batch size: 34, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:47:07,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=119621.33333333333, ans=0.125 +2024-08-03 12:47:16,500 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.58 vs. limit=15.0 +2024-08-03 12:47:17,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=119658.0, ans=0.1 +2024-08-03 12:47:24,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119694.66666666667, ans=0.1 +2024-08-03 12:47:35,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=119731.33333333333, ans=0.04949747468305833 +2024-08-03 12:47:38,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=119731.33333333333, ans=0.0 +2024-08-03 12:47:41,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=119768.0, ans=0.0 +2024-08-03 12:47:51,577 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.983e+01 1.270e+02 1.503e+02 1.772e+02 3.260e+02, threshold=3.007e+02, percent-clipped=1.0 +2024-08-03 12:47:52,440 INFO [train.py:1114] (3/4) Epoch 9, batch 3550, loss[loss=0.2668, simple_loss=0.3427, pruned_loss=0.09544, over 12437.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.3009, pruned_loss=0.07189, over 2629518.90 frames. ], batch size: 58, lr: 1.35e-02, grad_scale: 8.0 +2024-08-03 12:47:55,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=119804.66666666667, ans=0.07 +2024-08-03 12:48:03,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=119841.33333333333, ans=0.09899494936611666 +2024-08-03 12:48:12,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119878.0, ans=0.1 +2024-08-03 12:48:17,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=119878.0, ans=0.125 +2024-08-03 12:48:21,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=119914.66666666667, ans=0.125 +2024-08-03 12:48:33,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=119951.33333333333, ans=0.025 +2024-08-03 12:48:37,780 INFO [train.py:1114] (3/4) Epoch 9, batch 3600, loss[loss=0.2846, simple_loss=0.3455, pruned_loss=0.1118, over 9207.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3058, pruned_loss=0.07641, over 2485202.61 frames. ], batch size: 97, lr: 1.35e-02, grad_scale: 16.0 +2024-08-03 12:48:38,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=119988.0, ans=0.125 +2024-08-03 12:48:42,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=119988.0, ans=0.2 +2024-08-03 12:48:44,412 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.43 vs. limit=22.5 +2024-08-03 12:48:53,062 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=8.162e-03 +2024-08-03 12:50:09,326 INFO [train.py:1114] (3/4) Epoch 10, batch 0, loss[loss=0.1988, simple_loss=0.2835, pruned_loss=0.05706, over 13352.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2835, pruned_loss=0.05706, over 13352.00 frames. ], batch size: 33, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:50:09,327 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 12:50:16,017 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.1847, 1.6109, 3.8238, 3.6852], device='cuda:3') +2024-08-03 12:50:19,384 INFO [train.py:1146] (3/4) Epoch 10, validation: loss=0.1895, simple_loss=0.2901, pruned_loss=0.04443, over 944034.00 frames. +2024-08-03 12:50:19,384 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 12:50:23,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=120134.66666666667, ans=0.125 +2024-08-03 12:50:27,512 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.231e+02 1.354e+02 1.561e+02 3.235e+02, threshold=2.709e+02, percent-clipped=1.0 +2024-08-03 12:50:42,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=120208.0, ans=0.125 +2024-08-03 12:50:52,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=120244.66666666667, ans=0.05 +2024-08-03 12:50:58,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=120281.33333333333, ans=0.125 +2024-08-03 12:51:03,923 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.64 vs. limit=22.5 +2024-08-03 12:51:07,177 INFO [train.py:1114] (3/4) Epoch 10, batch 50, loss[loss=0.2034, simple_loss=0.2775, pruned_loss=0.06467, over 13415.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3011, pruned_loss=0.07146, over 577985.16 frames. ], batch size: 32, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:51:34,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=120428.0, ans=10.0 +2024-08-03 12:51:34,960 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.29 vs. limit=15.0 +2024-08-03 12:51:37,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=120428.0, ans=0.125 +2024-08-03 12:51:48,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=120464.66666666667, ans=0.2 +2024-08-03 12:51:49,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=120464.66666666667, ans=0.125 +2024-08-03 12:51:52,332 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.47 vs. limit=22.5 +2024-08-03 12:51:54,549 INFO [train.py:1114] (3/4) Epoch 10, batch 100, loss[loss=0.2027, simple_loss=0.2876, pruned_loss=0.05895, over 13527.00 frames. ], tot_loss[loss=0.221, simple_loss=0.301, pruned_loss=0.07049, over 1025623.10 frames. ], batch size: 35, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:52:02,524 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.137e+01 1.185e+02 1.436e+02 1.784e+02 2.704e+02, threshold=2.871e+02, percent-clipped=0.0 +2024-08-03 12:52:03,050 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.29 vs. limit=15.0 +2024-08-03 12:52:04,789 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.19 vs. limit=6.0 +2024-08-03 12:52:17,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=120574.66666666667, ans=10.0 +2024-08-03 12:52:22,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=120611.33333333333, ans=0.125 +2024-08-03 12:52:25,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=120611.33333333333, ans=0.025 +2024-08-03 12:52:39,362 INFO [train.py:1114] (3/4) Epoch 10, batch 150, loss[loss=0.2057, simple_loss=0.2741, pruned_loss=0.06864, over 13441.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2979, pruned_loss=0.06894, over 1386541.51 frames. ], batch size: 32, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:53:12,600 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.51 vs. limit=15.0 +2024-08-03 12:53:24,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=120831.33333333333, ans=0.04949747468305833 +2024-08-03 12:53:26,389 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.22 vs. limit=12.0 +2024-08-03 12:53:27,625 INFO [train.py:1114] (3/4) Epoch 10, batch 200, loss[loss=0.2626, simple_loss=0.3392, pruned_loss=0.09307, over 12516.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2974, pruned_loss=0.06886, over 1665004.74 frames. ], batch size: 58, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:53:35,554 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.851e+01 1.179e+02 1.388e+02 1.956e+02 3.362e+02, threshold=2.775e+02, percent-clipped=2.0 +2024-08-03 12:53:35,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=120904.66666666667, ans=0.025 +2024-08-03 12:53:54,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=120941.33333333333, ans=0.1 +2024-08-03 12:53:55,411 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.21 vs. limit=15.0 +2024-08-03 12:53:55,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=120941.33333333333, ans=15.0 +2024-08-03 12:54:03,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=120978.0, ans=0.0 +2024-08-03 12:54:11,277 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.08 vs. limit=10.0 +2024-08-03 12:54:15,706 INFO [train.py:1114] (3/4) Epoch 10, batch 250, loss[loss=0.2363, simple_loss=0.3056, pruned_loss=0.08353, over 13305.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2976, pruned_loss=0.06907, over 1884160.92 frames. ], batch size: 46, lr: 1.28e-02, grad_scale: 32.0 +2024-08-03 12:54:17,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=121051.33333333333, ans=0.125 +2024-08-03 12:54:25,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=121088.0, ans=0.125 +2024-08-03 12:54:25,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=121088.0, ans=0.0 +2024-08-03 12:54:28,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=121088.0, ans=0.125 +2024-08-03 12:54:37,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=121124.66666666667, ans=0.125 +2024-08-03 12:54:49,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=121161.33333333333, ans=0.2 +2024-08-03 12:54:51,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=121161.33333333333, ans=0.07 +2024-08-03 12:55:03,436 INFO [train.py:1114] (3/4) Epoch 10, batch 300, loss[loss=0.243, simple_loss=0.3119, pruned_loss=0.08707, over 13470.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2974, pruned_loss=0.06901, over 2051260.06 frames. ], batch size: 42, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:55:06,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=121234.66666666667, ans=0.0 +2024-08-03 12:55:11,648 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.594e+01 1.264e+02 1.454e+02 1.818e+02 3.044e+02, threshold=2.909e+02, percent-clipped=3.0 +2024-08-03 12:55:41,760 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.73 vs. limit=15.0 +2024-08-03 12:55:46,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=121381.33333333333, ans=0.09899494936611666 +2024-08-03 12:55:51,198 INFO [train.py:1114] (3/4) Epoch 10, batch 350, loss[loss=0.1947, simple_loss=0.273, pruned_loss=0.05824, over 13586.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2977, pruned_loss=0.06892, over 2182643.93 frames. ], batch size: 33, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:55:58,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=121418.0, ans=0.0 +2024-08-03 12:56:02,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=121454.66666666667, ans=0.0 +2024-08-03 12:56:30,427 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.22 vs. limit=15.0 +2024-08-03 12:56:35,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=121601.33333333333, ans=0.07 +2024-08-03 12:56:35,970 INFO [train.py:1114] (3/4) Epoch 10, batch 400, loss[loss=0.2253, simple_loss=0.3079, pruned_loss=0.07134, over 13361.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2965, pruned_loss=0.06812, over 2286479.17 frames. ], batch size: 37, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:56:46,299 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.011e+02 1.271e+02 1.420e+02 1.744e+02 2.813e+02, threshold=2.840e+02, percent-clipped=0.0 +2024-08-03 12:56:46,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=121638.0, ans=0.125 +2024-08-03 12:57:02,306 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.13 vs. limit=10.0 +2024-08-03 12:57:02,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=121674.66666666667, ans=0.025 +2024-08-03 12:57:07,688 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.57 vs. limit=15.0 +2024-08-03 12:57:10,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=121711.33333333333, ans=0.1 +2024-08-03 12:57:14,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=121748.0, ans=0.125 +2024-08-03 12:57:23,520 INFO [train.py:1114] (3/4) Epoch 10, batch 450, loss[loss=0.2286, simple_loss=0.3135, pruned_loss=0.07186, over 13555.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2968, pruned_loss=0.06831, over 2359332.88 frames. ], batch size: 38, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:57:43,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=121858.0, ans=0.0 +2024-08-03 12:57:48,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=121858.0, ans=0.125 +2024-08-03 12:57:51,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=121894.66666666667, ans=0.0 +2024-08-03 12:58:00,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=121931.33333333333, ans=0.125 +2024-08-03 12:58:02,489 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.74 vs. limit=12.0 +2024-08-03 12:58:09,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=121968.0, ans=0.025 +2024-08-03 12:58:12,711 INFO [train.py:1114] (3/4) Epoch 10, batch 500, loss[loss=0.2242, simple_loss=0.3051, pruned_loss=0.07163, over 13417.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2951, pruned_loss=0.06735, over 2425456.54 frames. ], batch size: 43, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:58:15,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=121968.0, ans=0.125 +2024-08-03 12:58:18,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=121968.0, ans=0.0 +2024-08-03 12:58:20,912 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.605e+01 1.105e+02 1.306e+02 1.598e+02 3.062e+02, threshold=2.611e+02, percent-clipped=1.0 +2024-08-03 12:58:30,562 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.00 vs. limit=15.0 +2024-08-03 12:58:37,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=122041.33333333333, ans=0.125 +2024-08-03 12:58:45,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122078.0, ans=0.1 +2024-08-03 12:58:47,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=122078.0, ans=0.0 +2024-08-03 12:58:49,887 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.27 vs. limit=15.0 +2024-08-03 12:58:51,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=122114.66666666667, ans=0.2 +2024-08-03 12:59:00,200 INFO [train.py:1114] (3/4) Epoch 10, batch 550, loss[loss=0.2269, simple_loss=0.3143, pruned_loss=0.06973, over 12994.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2948, pruned_loss=0.06735, over 2467935.67 frames. ], batch size: 48, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:59:02,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122151.33333333333, ans=0.1 +2024-08-03 12:59:12,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=122188.0, ans=0.0 +2024-08-03 12:59:16,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=122188.0, ans=0.2 +2024-08-03 12:59:19,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=122224.66666666667, ans=0.04949747468305833 +2024-08-03 12:59:24,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=122224.66666666667, ans=0.125 +2024-08-03 12:59:24,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=122224.66666666667, ans=0.2 +2024-08-03 12:59:28,831 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-08-03 12:59:41,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122298.0, ans=0.1 +2024-08-03 12:59:45,530 INFO [train.py:1114] (3/4) Epoch 10, batch 600, loss[loss=0.2483, simple_loss=0.323, pruned_loss=0.08686, over 13295.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2943, pruned_loss=0.06689, over 2508045.63 frames. ], batch size: 46, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 12:59:52,149 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.27 vs. limit=22.5 +2024-08-03 12:59:53,556 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.876e+01 1.160e+02 1.307e+02 1.564e+02 2.892e+02, threshold=2.615e+02, percent-clipped=4.0 +2024-08-03 13:00:13,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=122444.66666666667, ans=15.0 +2024-08-03 13:00:29,167 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.39 vs. limit=15.0 +2024-08-03 13:00:33,101 INFO [train.py:1114] (3/4) Epoch 10, batch 650, loss[loss=0.181, simple_loss=0.2689, pruned_loss=0.04652, over 13546.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2933, pruned_loss=0.06639, over 2543619.40 frames. ], batch size: 37, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 13:00:40,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=122518.0, ans=0.125 +2024-08-03 13:00:48,608 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.82 vs. limit=6.0 +2024-08-03 13:01:19,907 INFO [train.py:1114] (3/4) Epoch 10, batch 700, loss[loss=0.2048, simple_loss=0.2798, pruned_loss=0.06487, over 13537.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2938, pruned_loss=0.06635, over 2565240.20 frames. ], batch size: 35, lr: 1.27e-02, grad_scale: 16.0 +2024-08-03 13:01:20,412 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.89 vs. limit=15.0 +2024-08-03 13:01:26,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=122701.33333333333, ans=0.0 +2024-08-03 13:01:28,966 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.487e+01 1.261e+02 1.582e+02 2.111e+02 3.773e+02, threshold=3.165e+02, percent-clipped=11.0 +2024-08-03 13:01:34,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=122738.0, ans=0.125 +2024-08-03 13:01:41,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=122774.66666666667, ans=0.125 +2024-08-03 13:01:43,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=122774.66666666667, ans=0.125 +2024-08-03 13:01:44,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=122774.66666666667, ans=0.125 +2024-08-03 13:01:45,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=122774.66666666667, ans=0.125 +2024-08-03 13:01:47,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=122811.33333333333, ans=0.0 +2024-08-03 13:01:57,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=122848.0, ans=0.2 +2024-08-03 13:02:06,740 INFO [train.py:1114] (3/4) Epoch 10, batch 750, loss[loss=0.2155, simple_loss=0.3019, pruned_loss=0.06451, over 13367.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2939, pruned_loss=0.06667, over 2582061.27 frames. ], batch size: 37, lr: 1.27e-02, grad_scale: 16.0 +2024-08-03 13:02:11,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=122884.66666666667, ans=0.0 +2024-08-03 13:02:31,301 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.40 vs. limit=15.0 +2024-08-03 13:02:46,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=123031.33333333333, ans=0.0 +2024-08-03 13:02:50,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=123031.33333333333, ans=0.0 +2024-08-03 13:08:35,489 INFO [train.py:1114] (3/4) Epoch 10, batch 800, loss[loss=0.2232, simple_loss=0.2963, pruned_loss=0.07501, over 13334.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.294, pruned_loss=0.06689, over 2596838.87 frames. ], batch size: 33, lr: 1.27e-02, grad_scale: 32.0 +2024-08-03 13:08:39,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=123068.0, ans=0.0 +2024-08-03 13:08:58,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=123068.0, ans=0.1 +2024-08-03 13:08:59,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=123068.0, ans=0.0 +2024-08-03 13:09:00,777 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.986e+01 1.304e+02 1.516e+02 1.968e+02 2.999e+02, threshold=3.032e+02, percent-clipped=0.0 +2024-08-03 13:09:05,664 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.05 vs. limit=10.0 +2024-08-03 13:10:07,626 INFO [train.py:1114] (3/4) Epoch 10, batch 850, loss[loss=0.2273, simple_loss=0.3083, pruned_loss=0.07318, over 13305.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2937, pruned_loss=0.06689, over 2609517.85 frames. ], batch size: 40, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:10:13,338 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.33 vs. limit=6.0 +2024-08-03 13:10:16,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=123288.0, ans=0.0 +2024-08-03 13:10:25,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=123288.0, ans=0.1 +2024-08-03 13:10:35,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=123324.66666666667, ans=0.0 +2024-08-03 13:10:36,037 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.41 vs. limit=22.5 +2024-08-03 13:10:44,318 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.19 vs. limit=15.0 +2024-08-03 13:10:53,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=123398.0, ans=0.0 +2024-08-03 13:10:53,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=123434.66666666667, ans=0.125 +2024-08-03 13:10:54,530 INFO [train.py:1114] (3/4) Epoch 10, batch 900, loss[loss=0.181, simple_loss=0.2589, pruned_loss=0.0516, over 13339.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2932, pruned_loss=0.06649, over 2611974.29 frames. ], batch size: 33, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:10:57,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=123434.66666666667, ans=0.125 +2024-08-03 13:11:04,166 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.190e+01 1.156e+02 1.356e+02 1.629e+02 2.273e+02, threshold=2.713e+02, percent-clipped=0.0 +2024-08-03 13:11:13,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=123508.0, ans=0.0 +2024-08-03 13:11:37,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=123581.33333333333, ans=0.0 +2024-08-03 13:11:41,264 INFO [train.py:1114] (3/4) Epoch 10, batch 950, loss[loss=0.1845, simple_loss=0.2618, pruned_loss=0.05358, over 13529.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2934, pruned_loss=0.06656, over 2613878.99 frames. ], batch size: 34, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:11:47,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=123618.0, ans=0.0 +2024-08-03 13:11:49,998 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.58 vs. limit=15.0 +2024-08-03 13:12:00,161 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.30 vs. limit=6.0 +2024-08-03 13:12:03,190 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.64 vs. limit=6.0 +2024-08-03 13:12:15,126 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.16 vs. limit=12.0 +2024-08-03 13:12:20,760 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.67 vs. limit=22.5 +2024-08-03 13:12:22,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=123764.66666666667, ans=0.125 +2024-08-03 13:12:22,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=123764.66666666667, ans=0.025 +2024-08-03 13:12:25,876 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.16 vs. limit=6.0 +2024-08-03 13:12:28,929 INFO [train.py:1114] (3/4) Epoch 10, batch 1000, loss[loss=0.1861, simple_loss=0.2739, pruned_loss=0.04917, over 13364.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2947, pruned_loss=0.06728, over 2610462.80 frames. ], batch size: 35, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:12:31,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=123801.33333333333, ans=0.125 +2024-08-03 13:12:40,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=123838.0, ans=0.125 +2024-08-03 13:12:41,054 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.614e+01 1.218e+02 1.406e+02 1.942e+02 3.222e+02, threshold=2.813e+02, percent-clipped=3.0 +2024-08-03 13:12:41,760 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.93 vs. limit=15.0 +2024-08-03 13:12:45,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=123838.0, ans=0.0 +2024-08-03 13:12:53,516 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.36 vs. limit=15.0 +2024-08-03 13:12:56,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=123874.66666666667, ans=0.0 +2024-08-03 13:13:05,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=123911.33333333333, ans=0.2 +2024-08-03 13:13:15,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=123948.0, ans=0.035 +2024-08-03 13:13:16,730 INFO [train.py:1114] (3/4) Epoch 10, batch 1050, loss[loss=0.2208, simple_loss=0.3003, pruned_loss=0.07068, over 13579.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2938, pruned_loss=0.06701, over 2614764.59 frames. ], batch size: 39, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:13:45,333 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.11 vs. limit=15.0 +2024-08-03 13:13:47,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124094.66666666667, ans=0.1 +2024-08-03 13:13:48,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=124094.66666666667, ans=0.2 +2024-08-03 13:13:50,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=124094.66666666667, ans=0.1 +2024-08-03 13:13:59,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=124131.33333333333, ans=0.0 +2024-08-03 13:14:00,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=124131.33333333333, ans=0.025 +2024-08-03 13:14:05,015 INFO [train.py:1114] (3/4) Epoch 10, batch 1100, loss[loss=0.1978, simple_loss=0.2771, pruned_loss=0.05928, over 13551.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2943, pruned_loss=0.06747, over 2619535.63 frames. ], batch size: 36, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:14:06,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=124168.0, ans=0.0 +2024-08-03 13:14:14,709 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.380e+01 1.135e+02 1.251e+02 1.585e+02 3.709e+02, threshold=2.501e+02, percent-clipped=2.0 +2024-08-03 13:14:19,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124204.66666666667, ans=0.1 +2024-08-03 13:14:19,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=124204.66666666667, ans=0.0 +2024-08-03 13:14:27,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=124241.33333333333, ans=0.025 +2024-08-03 13:14:51,718 INFO [train.py:1114] (3/4) Epoch 10, batch 1150, loss[loss=0.2253, simple_loss=0.2982, pruned_loss=0.0762, over 13565.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2942, pruned_loss=0.06732, over 2618190.43 frames. ], batch size: 36, lr: 1.26e-02, grad_scale: 16.0 +2024-08-03 13:14:58,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=124351.33333333333, ans=0.0 +2024-08-03 13:15:07,913 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.83 vs. limit=15.0 +2024-08-03 13:15:25,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=124461.33333333333, ans=0.125 +2024-08-03 13:15:33,012 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.54 vs. limit=22.5 +2024-08-03 13:15:35,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=124498.0, ans=0.2 +2024-08-03 13:15:38,762 INFO [train.py:1114] (3/4) Epoch 10, batch 1200, loss[loss=0.2386, simple_loss=0.3204, pruned_loss=0.07843, over 13586.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2959, pruned_loss=0.06815, over 2614978.55 frames. ], batch size: 39, lr: 1.26e-02, grad_scale: 32.0 +2024-08-03 13:15:48,571 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.153e+02 1.332e+02 1.610e+02 2.864e+02, threshold=2.663e+02, percent-clipped=2.0 +2024-08-03 13:15:58,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=124608.0, ans=0.125 +2024-08-03 13:15:59,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=124608.0, ans=0.0 +2024-08-03 13:16:01,723 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:16:03,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124608.0, ans=0.1 +2024-08-03 13:16:04,582 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.02 vs. limit=15.0 +2024-08-03 13:16:12,788 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=21.47 vs. limit=22.5 +2024-08-03 13:16:15,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=124644.66666666667, ans=0.125 +2024-08-03 13:16:22,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=124681.33333333333, ans=0.2 +2024-08-03 13:16:25,774 INFO [train.py:1114] (3/4) Epoch 10, batch 1250, loss[loss=0.2039, simple_loss=0.2936, pruned_loss=0.0571, over 13424.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2961, pruned_loss=0.06801, over 2627077.76 frames. ], batch size: 42, lr: 1.26e-02, grad_scale: 32.0 +2024-08-03 13:16:35,597 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.88 vs. limit=15.0 +2024-08-03 13:16:39,081 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.28 vs. limit=22.5 +2024-08-03 13:16:49,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=124791.33333333333, ans=0.0 +2024-08-03 13:16:52,825 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.15 vs. limit=22.5 +2024-08-03 13:16:53,709 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.15 vs. limit=22.5 +2024-08-03 13:17:02,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=124864.66666666667, ans=0.125 +2024-08-03 13:17:04,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=124864.66666666667, ans=0.125 +2024-08-03 13:17:11,320 INFO [train.py:1114] (3/4) Epoch 10, batch 1300, loss[loss=0.2279, simple_loss=0.3095, pruned_loss=0.07317, over 12953.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.295, pruned_loss=0.06737, over 2630027.03 frames. ], batch size: 52, lr: 1.26e-02, grad_scale: 32.0 +2024-08-03 13:17:21,176 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.141e+01 1.219e+02 1.549e+02 1.853e+02 2.795e+02, threshold=3.098e+02, percent-clipped=1.0 +2024-08-03 13:17:40,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=125011.33333333333, ans=0.025 +2024-08-03 13:17:45,666 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.44 vs. limit=6.0 +2024-08-03 13:17:58,636 INFO [train.py:1114] (3/4) Epoch 10, batch 1350, loss[loss=0.2171, simple_loss=0.2959, pruned_loss=0.06919, over 13528.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2943, pruned_loss=0.06682, over 2638358.80 frames. ], batch size: 37, lr: 1.26e-02, grad_scale: 32.0 +2024-08-03 13:18:06,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=125084.66666666667, ans=0.125 +2024-08-03 13:18:06,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=125121.33333333333, ans=0.0 +2024-08-03 13:18:13,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=125121.33333333333, ans=0.1 +2024-08-03 13:18:23,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=125158.0, ans=0.125 +2024-08-03 13:18:24,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=125158.0, ans=0.0 +2024-08-03 13:18:24,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=125158.0, ans=0.5 +2024-08-03 13:18:26,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=125194.66666666667, ans=0.125 +2024-08-03 13:18:30,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=125194.66666666667, ans=0.125 +2024-08-03 13:18:32,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=125194.66666666667, ans=0.125 +2024-08-03 13:18:33,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=125194.66666666667, ans=0.125 +2024-08-03 13:18:33,926 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.88 vs. limit=15.0 +2024-08-03 13:18:44,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=125268.0, ans=0.025 +2024-08-03 13:18:45,244 INFO [train.py:1114] (3/4) Epoch 10, batch 1400, loss[loss=0.1646, simple_loss=0.2477, pruned_loss=0.0408, over 13272.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2937, pruned_loss=0.06642, over 2642199.33 frames. ], batch size: 31, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:18:49,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=125268.0, ans=0.125 +2024-08-03 13:18:52,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=125268.0, ans=0.125 +2024-08-03 13:18:54,982 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.794e+01 1.134e+02 1.256e+02 1.534e+02 3.011e+02, threshold=2.513e+02, percent-clipped=0.0 +2024-08-03 13:18:55,501 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.77 vs. limit=6.0 +2024-08-03 13:19:13,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=125378.0, ans=6.0 +2024-08-03 13:19:19,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=125378.0, ans=0.125 +2024-08-03 13:19:31,966 INFO [train.py:1114] (3/4) Epoch 10, batch 1450, loss[loss=0.2395, simple_loss=0.3232, pruned_loss=0.07786, over 13429.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2946, pruned_loss=0.06669, over 2641290.96 frames. ], batch size: 43, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:19:43,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=125488.0, ans=0.125 +2024-08-03 13:20:00,674 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.26 vs. limit=15.0 +2024-08-03 13:20:08,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=125561.33333333333, ans=0.0 +2024-08-03 13:20:19,156 INFO [train.py:1114] (3/4) Epoch 10, batch 1500, loss[loss=0.2242, simple_loss=0.3045, pruned_loss=0.07194, over 13403.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2949, pruned_loss=0.06654, over 2641670.47 frames. ], batch size: 39, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:20:19,746 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.16 vs. limit=12.0 +2024-08-03 13:20:23,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=125634.66666666667, ans=0.025 +2024-08-03 13:20:27,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=125671.33333333333, ans=0.0 +2024-08-03 13:20:29,214 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.867e+01 1.156e+02 1.320e+02 1.724e+02 3.764e+02, threshold=2.640e+02, percent-clipped=6.0 +2024-08-03 13:20:36,877 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.35 vs. limit=15.0 +2024-08-03 13:20:43,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=125708.0, ans=0.125 +2024-08-03 13:20:54,225 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.42 vs. limit=12.0 +2024-08-03 13:21:06,620 INFO [train.py:1114] (3/4) Epoch 10, batch 1550, loss[loss=0.2271, simple_loss=0.3091, pruned_loss=0.07255, over 13388.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2953, pruned_loss=0.06706, over 2632302.37 frames. ], batch size: 41, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:21:54,152 INFO [train.py:1114] (3/4) Epoch 10, batch 1600, loss[loss=0.2121, simple_loss=0.3011, pruned_loss=0.06151, over 13576.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2946, pruned_loss=0.06699, over 2624344.45 frames. ], batch size: 39, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:21:56,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=126001.33333333333, ans=0.125 +2024-08-03 13:22:04,507 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.527e+01 1.158e+02 1.419e+02 1.742e+02 3.880e+02, threshold=2.837e+02, percent-clipped=3.0 +2024-08-03 13:22:31,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=126148.0, ans=0.025 +2024-08-03 13:22:41,774 INFO [train.py:1114] (3/4) Epoch 10, batch 1650, loss[loss=0.1959, simple_loss=0.2879, pruned_loss=0.05191, over 13320.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2948, pruned_loss=0.0675, over 2621778.64 frames. ], batch size: 40, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:22:59,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=126258.0, ans=0.2 +2024-08-03 13:23:04,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=126258.0, ans=0.125 +2024-08-03 13:23:29,223 INFO [train.py:1114] (3/4) Epoch 10, batch 1700, loss[loss=0.1991, simple_loss=0.2703, pruned_loss=0.06402, over 13284.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2947, pruned_loss=0.06715, over 2630518.29 frames. ], batch size: 31, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:23:39,075 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.220e+01 1.201e+02 1.479e+02 1.994e+02 3.572e+02, threshold=2.957e+02, percent-clipped=7.0 +2024-08-03 13:23:47,910 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.43 vs. limit=12.0 +2024-08-03 13:24:01,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=126478.0, ans=0.125 +2024-08-03 13:24:02,423 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.18 vs. limit=22.5 +2024-08-03 13:24:14,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=126551.33333333333, ans=0.07 +2024-08-03 13:24:14,864 INFO [train.py:1114] (3/4) Epoch 10, batch 1750, loss[loss=0.2, simple_loss=0.2729, pruned_loss=0.06355, over 13561.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2948, pruned_loss=0.06729, over 2633502.95 frames. ], batch size: 31, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:24:25,134 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.23 vs. limit=15.0 +2024-08-03 13:24:37,099 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.83 vs. limit=6.0 +2024-08-03 13:24:39,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=126624.66666666667, ans=0.125 +2024-08-03 13:24:39,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=126624.66666666667, ans=0.2 +2024-08-03 13:24:40,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=126624.66666666667, ans=0.125 +2024-08-03 13:24:45,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=126661.33333333333, ans=0.125 +2024-08-03 13:25:01,908 INFO [train.py:1114] (3/4) Epoch 10, batch 1800, loss[loss=0.2159, simple_loss=0.3033, pruned_loss=0.06424, over 13559.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.295, pruned_loss=0.06728, over 2634905.57 frames. ], batch size: 38, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:25:02,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=126734.66666666667, ans=0.025 +2024-08-03 13:25:11,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=126771.33333333333, ans=0.95 +2024-08-03 13:25:11,953 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.656e+01 1.185e+02 1.312e+02 1.554e+02 2.308e+02, threshold=2.624e+02, percent-clipped=0.0 +2024-08-03 13:25:34,984 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.31 vs. limit=15.0 +2024-08-03 13:25:36,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=126844.66666666667, ans=0.0 +2024-08-03 13:25:44,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=126881.33333333333, ans=0.2 +2024-08-03 13:25:45,821 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.16 vs. limit=12.0 +2024-08-03 13:25:49,079 INFO [train.py:1114] (3/4) Epoch 10, batch 1850, loss[loss=0.2203, simple_loss=0.3045, pruned_loss=0.06806, over 13404.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2943, pruned_loss=0.06683, over 2637486.81 frames. ], batch size: 39, lr: 1.25e-02, grad_scale: 32.0 +2024-08-03 13:26:02,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=126954.66666666667, ans=0.125 +2024-08-03 13:26:13,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=126991.33333333333, ans=0.2 +2024-08-03 13:26:25,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=127028.0, ans=0.0 +2024-08-03 13:26:26,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=127064.66666666667, ans=0.125 +2024-08-03 13:26:27,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=127064.66666666667, ans=0.125 +2024-08-03 13:26:32,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127064.66666666667, ans=0.1 +2024-08-03 13:26:34,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=127064.66666666667, ans=0.2 +2024-08-03 13:26:36,792 INFO [train.py:1114] (3/4) Epoch 10, batch 1900, loss[loss=0.2029, simple_loss=0.2884, pruned_loss=0.05874, over 13328.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2943, pruned_loss=0.06649, over 2639366.97 frames. ], batch size: 40, lr: 1.25e-02, grad_scale: 16.0 +2024-08-03 13:26:41,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=127101.33333333333, ans=0.1 +2024-08-03 13:26:49,388 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.604e+01 1.255e+02 1.783e+02 2.547e+02 3.918e+02, threshold=3.565e+02, percent-clipped=23.0 +2024-08-03 13:26:53,552 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.76 vs. limit=22.5 +2024-08-03 13:26:53,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=127138.0, ans=0.0 +2024-08-03 13:27:02,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=127174.66666666667, ans=0.125 +2024-08-03 13:27:04,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=127174.66666666667, ans=0.0 +2024-08-03 13:27:07,428 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.09 vs. limit=22.5 +2024-08-03 13:27:24,292 INFO [train.py:1114] (3/4) Epoch 10, batch 1950, loss[loss=0.2342, simple_loss=0.3071, pruned_loss=0.08063, over 13539.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2955, pruned_loss=0.06681, over 2646055.31 frames. ], batch size: 36, lr: 1.24e-02, grad_scale: 16.0 +2024-08-03 13:27:25,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=127284.66666666667, ans=0.05 +2024-08-03 13:27:27,818 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.39 vs. limit=15.0 +2024-08-03 13:27:36,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=127321.33333333333, ans=0.0 +2024-08-03 13:27:49,900 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.12 vs. limit=22.5 +2024-08-03 13:27:50,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=127358.0, ans=0.125 +2024-08-03 13:28:11,018 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.49 vs. limit=15.0 +2024-08-03 13:28:11,161 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.29 vs. limit=15.0 +2024-08-03 13:28:11,431 INFO [train.py:1114] (3/4) Epoch 10, batch 2000, loss[loss=0.1796, simple_loss=0.2584, pruned_loss=0.05043, over 13526.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2959, pruned_loss=0.067, over 2636878.24 frames. ], batch size: 31, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:28:22,603 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.471e+01 1.146e+02 1.313e+02 1.617e+02 2.483e+02, threshold=2.626e+02, percent-clipped=0.0 +2024-08-03 13:28:25,011 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.20 vs. limit=15.0 +2024-08-03 13:28:34,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=127541.33333333333, ans=0.125 +2024-08-03 13:28:59,000 INFO [train.py:1114] (3/4) Epoch 10, batch 2050, loss[loss=0.1916, simple_loss=0.2621, pruned_loss=0.0605, over 13436.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2947, pruned_loss=0.06688, over 2634086.85 frames. ], batch size: 32, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:29:01,147 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.21 vs. limit=12.0 +2024-08-03 13:29:09,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=127688.0, ans=0.125 +2024-08-03 13:29:18,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=127724.66666666667, ans=0.2 +2024-08-03 13:29:43,997 INFO [train.py:1114] (3/4) Epoch 10, batch 2100, loss[loss=0.2091, simple_loss=0.2902, pruned_loss=0.06401, over 13541.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2941, pruned_loss=0.06637, over 2639327.39 frames. ], batch size: 37, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:29:44,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=127834.66666666667, ans=0.125 +2024-08-03 13:29:44,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=127834.66666666667, ans=0.2 +2024-08-03 13:29:46,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=127834.66666666667, ans=0.125 +2024-08-03 13:29:56,756 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.808e+01 1.235e+02 1.463e+02 1.746e+02 3.043e+02, threshold=2.927e+02, percent-clipped=3.0 +2024-08-03 13:30:01,658 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:30:02,006 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.56 vs. limit=10.0 +2024-08-03 13:30:06,728 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.40 vs. limit=22.5 +2024-08-03 13:30:23,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=127981.33333333333, ans=0.1 +2024-08-03 13:30:33,155 INFO [train.py:1114] (3/4) Epoch 10, batch 2150, loss[loss=0.185, simple_loss=0.2741, pruned_loss=0.04793, over 13554.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2936, pruned_loss=0.06614, over 2647725.58 frames. ], batch size: 36, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:30:35,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=128018.0, ans=0.125 +2024-08-03 13:30:36,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128018.0, ans=0.1 +2024-08-03 13:30:39,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=128018.0, ans=0.125 +2024-08-03 13:30:42,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=128054.66666666667, ans=0.125 +2024-08-03 13:30:45,873 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:31:02,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=128128.0, ans=0.5 +2024-08-03 13:31:06,950 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-08-03 13:31:08,080 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.39 vs. limit=10.0 +2024-08-03 13:31:13,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128164.66666666667, ans=0.1 +2024-08-03 13:31:14,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128164.66666666667, ans=0.1 +2024-08-03 13:31:18,221 INFO [train.py:1114] (3/4) Epoch 10, batch 2200, loss[loss=0.2295, simple_loss=0.3181, pruned_loss=0.07048, over 13398.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2937, pruned_loss=0.06608, over 2645100.95 frames. ], batch size: 39, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:31:20,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=128201.33333333333, ans=0.0 +2024-08-03 13:31:29,287 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.267e+02 1.540e+02 2.050e+02 4.140e+02, threshold=3.080e+02, percent-clipped=6.0 +2024-08-03 13:31:31,533 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.55 vs. limit=15.0 +2024-08-03 13:31:37,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=128274.66666666667, ans=0.125 +2024-08-03 13:31:50,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=128311.33333333333, ans=0.0 +2024-08-03 13:31:54,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=128311.33333333333, ans=0.0 +2024-08-03 13:32:04,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=128384.66666666667, ans=0.025 +2024-08-03 13:32:05,478 INFO [train.py:1114] (3/4) Epoch 10, batch 2250, loss[loss=0.2131, simple_loss=0.2955, pruned_loss=0.0653, over 13360.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2937, pruned_loss=0.06616, over 2642744.34 frames. ], batch size: 37, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:32:06,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=128384.66666666667, ans=0.2 +2024-08-03 13:32:12,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=128384.66666666667, ans=0.125 +2024-08-03 13:32:15,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=128421.33333333333, ans=0.2 +2024-08-03 13:32:19,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=128421.33333333333, ans=0.025 +2024-08-03 13:32:21,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=128421.33333333333, ans=0.125 +2024-08-03 13:32:27,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=128458.0, ans=0.125 +2024-08-03 13:32:47,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=128531.33333333333, ans=0.125 +2024-08-03 13:32:52,563 INFO [train.py:1114] (3/4) Epoch 10, batch 2300, loss[loss=0.2036, simple_loss=0.2737, pruned_loss=0.06669, over 13595.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2934, pruned_loss=0.0667, over 2638441.11 frames. ], batch size: 33, lr: 1.24e-02, grad_scale: 16.0 +2024-08-03 13:33:04,019 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.537e+01 1.182e+02 1.366e+02 1.663e+02 2.762e+02, threshold=2.732e+02, percent-clipped=0.0 +2024-08-03 13:33:29,163 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.94 vs. limit=15.0 +2024-08-03 13:33:32,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=128714.66666666667, ans=0.125 +2024-08-03 13:33:39,782 INFO [train.py:1114] (3/4) Epoch 10, batch 2350, loss[loss=0.2159, simple_loss=0.3031, pruned_loss=0.06435, over 13549.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2933, pruned_loss=0.06647, over 2641312.37 frames. ], batch size: 38, lr: 1.24e-02, grad_scale: 16.0 +2024-08-03 13:33:48,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=128751.33333333333, ans=0.125 +2024-08-03 13:33:51,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=128788.0, ans=0.125 +2024-08-03 13:33:53,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=128788.0, ans=0.2 +2024-08-03 13:33:54,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=128788.0, ans=0.0 +2024-08-03 13:34:07,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=128824.66666666667, ans=0.0 +2024-08-03 13:34:11,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=128861.33333333333, ans=0.125 +2024-08-03 13:34:27,032 INFO [train.py:1114] (3/4) Epoch 10, batch 2400, loss[loss=0.1981, simple_loss=0.2781, pruned_loss=0.05904, over 13531.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2938, pruned_loss=0.06667, over 2642334.03 frames. ], batch size: 35, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:34:29,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=128934.66666666667, ans=0.125 +2024-08-03 13:34:39,020 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.606e+01 1.219e+02 1.512e+02 2.010e+02 3.572e+02, threshold=3.023e+02, percent-clipped=5.0 +2024-08-03 13:34:46,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=129008.0, ans=0.0 +2024-08-03 13:34:49,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=129008.0, ans=0.2 +2024-08-03 13:34:54,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=129044.66666666667, ans=0.125 +2024-08-03 13:35:07,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129081.33333333333, ans=0.1 +2024-08-03 13:35:10,192 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=9.02 vs. limit=12.0 +2024-08-03 13:35:13,313 INFO [train.py:1114] (3/4) Epoch 10, batch 2450, loss[loss=0.2168, simple_loss=0.3071, pruned_loss=0.06325, over 13365.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.296, pruned_loss=0.06811, over 2632112.34 frames. ], batch size: 37, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:35:13,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129118.0, ans=0.1 +2024-08-03 13:35:25,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=129154.66666666667, ans=0.07 +2024-08-03 13:35:52,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=129264.66666666667, ans=0.125 +2024-08-03 13:35:53,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=129264.66666666667, ans=0.125 +2024-08-03 13:36:00,204 INFO [train.py:1114] (3/4) Epoch 10, batch 2500, loss[loss=0.2157, simple_loss=0.2971, pruned_loss=0.06715, over 13398.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2949, pruned_loss=0.06745, over 2636009.99 frames. ], batch size: 39, lr: 1.24e-02, grad_scale: 32.0 +2024-08-03 13:36:02,377 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.49 vs. limit=12.0 +2024-08-03 13:36:08,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=129338.0, ans=0.0 +2024-08-03 13:36:09,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=129338.0, ans=0.0 +2024-08-03 13:36:11,371 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.968e+01 1.203e+02 1.318e+02 1.532e+02 2.282e+02, threshold=2.635e+02, percent-clipped=0.0 +2024-08-03 13:36:16,938 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.13 vs. limit=15.0 +2024-08-03 13:36:18,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=129374.66666666667, ans=0.125 +2024-08-03 13:36:28,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=129411.33333333333, ans=0.125 +2024-08-03 13:36:38,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=129448.0, ans=0.125 +2024-08-03 13:36:42,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=129448.0, ans=0.0 +2024-08-03 13:36:45,872 INFO [train.py:1114] (3/4) Epoch 10, batch 2550, loss[loss=0.1887, simple_loss=0.2623, pruned_loss=0.0575, over 13543.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2946, pruned_loss=0.06734, over 2637542.16 frames. ], batch size: 31, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:36:48,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=129484.66666666667, ans=0.125 +2024-08-03 13:36:54,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=129521.33333333333, ans=0.125 +2024-08-03 13:36:56,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129521.33333333333, ans=0.1 +2024-08-03 13:36:58,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=129521.33333333333, ans=0.025 +2024-08-03 13:37:17,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=129594.66666666667, ans=0.5 +2024-08-03 13:37:29,295 INFO [train.py:1114] (3/4) Epoch 10, batch 2600, loss[loss=0.1852, simple_loss=0.2649, pruned_loss=0.05279, over 13555.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2951, pruned_loss=0.06736, over 2637505.95 frames. ], batch size: 36, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:37:31,480 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.17 vs. limit=15.0 +2024-08-03 13:37:37,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=129704.66666666667, ans=0.125 +2024-08-03 13:37:38,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=129704.66666666667, ans=0.0 +2024-08-03 13:37:40,628 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.809e+01 1.167e+02 1.488e+02 1.878e+02 3.119e+02, threshold=2.976e+02, percent-clipped=4.0 +2024-08-03 13:37:50,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=129741.33333333333, ans=0.0 +2024-08-03 13:37:50,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=129741.33333333333, ans=0.125 +2024-08-03 13:37:58,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129778.0, ans=0.1 +2024-08-03 13:38:01,337 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.48 vs. limit=10.0 +2024-08-03 13:38:04,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=129814.66666666667, ans=0.0 +2024-08-03 13:38:13,143 INFO [train.py:1114] (3/4) Epoch 10, batch 2650, loss[loss=0.2377, simple_loss=0.3196, pruned_loss=0.0779, over 13297.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2952, pruned_loss=0.06723, over 2640393.46 frames. ], batch size: 46, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:38:31,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=129924.66666666667, ans=0.0 +2024-08-03 13:38:56,876 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.48 vs. limit=22.5 +2024-08-03 13:38:57,881 INFO [train.py:1114] (3/4) Epoch 10, batch 2700, loss[loss=0.217, simple_loss=0.3037, pruned_loss=0.06516, over 13530.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2953, pruned_loss=0.06696, over 2638281.30 frames. ], batch size: 40, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:39:10,502 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.234e+02 1.365e+02 1.791e+02 3.628e+02, threshold=2.731e+02, percent-clipped=1.0 +2024-08-03 13:39:35,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=130181.33333333333, ans=0.125 +2024-08-03 13:39:42,470 INFO [train.py:1114] (3/4) Epoch 10, batch 2750, loss[loss=0.1898, simple_loss=0.2729, pruned_loss=0.05335, over 13346.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2941, pruned_loss=0.0662, over 2635983.99 frames. ], batch size: 34, lr: 1.23e-02, grad_scale: 16.0 +2024-08-03 13:39:42,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=130218.0, ans=0.09899494936611666 +2024-08-03 13:39:42,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130218.0, ans=0.1 +2024-08-03 13:39:43,929 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.00 vs. limit=15.0 +2024-08-03 13:39:58,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=130254.66666666667, ans=0.125 +2024-08-03 13:39:58,719 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.85 vs. limit=22.5 +2024-08-03 13:40:20,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=130364.66666666667, ans=0.0 +2024-08-03 13:40:20,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=130364.66666666667, ans=0.125 +2024-08-03 13:40:26,228 INFO [train.py:1114] (3/4) Epoch 10, batch 2800, loss[loss=0.268, simple_loss=0.3323, pruned_loss=0.1018, over 9316.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2941, pruned_loss=0.06645, over 2626796.39 frames. ], batch size: 97, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:40:32,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=130401.33333333333, ans=0.2 +2024-08-03 13:40:36,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=130438.0, ans=0.125 +2024-08-03 13:40:38,306 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.441e+01 1.179e+02 1.306e+02 1.650e+02 3.137e+02, threshold=2.611e+02, percent-clipped=1.0 +2024-08-03 13:40:43,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=130474.66666666667, ans=0.0 +2024-08-03 13:41:02,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=130548.0, ans=0.0 +2024-08-03 13:41:08,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=130548.0, ans=0.125 +2024-08-03 13:41:09,748 INFO [train.py:1114] (3/4) Epoch 10, batch 2850, loss[loss=0.2048, simple_loss=0.2884, pruned_loss=0.06054, over 13370.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2949, pruned_loss=0.06691, over 2621258.71 frames. ], batch size: 35, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:41:16,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130584.66666666667, ans=0.1 +2024-08-03 13:41:17,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=130621.33333333333, ans=0.0 +2024-08-03 13:41:23,111 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.79 vs. limit=15.0 +2024-08-03 13:41:52,772 INFO [train.py:1114] (3/4) Epoch 10, batch 2900, loss[loss=0.2139, simple_loss=0.2945, pruned_loss=0.0666, over 13361.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2962, pruned_loss=0.06711, over 2631954.14 frames. ], batch size: 36, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:41:55,990 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.81 vs. limit=22.5 +2024-08-03 13:41:57,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=8.75 vs. limit=15.0 +2024-08-03 13:42:05,052 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.521e+01 1.253e+02 1.591e+02 2.001e+02 4.136e+02, threshold=3.182e+02, percent-clipped=6.0 +2024-08-03 13:42:36,449 INFO [train.py:1114] (3/4) Epoch 10, batch 2950, loss[loss=0.2133, simple_loss=0.2843, pruned_loss=0.07109, over 13326.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2948, pruned_loss=0.06681, over 2630145.70 frames. ], batch size: 34, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:42:37,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=130951.33333333333, ans=0.125 +2024-08-03 13:42:37,574 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-08-03 13:42:40,344 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.43 vs. limit=15.0 +2024-08-03 13:42:46,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=130988.0, ans=10.0 +2024-08-03 13:42:49,074 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.62 vs. limit=15.0 +2024-08-03 13:43:09,635 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.84 vs. limit=15.0 +2024-08-03 13:43:10,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=131061.33333333333, ans=0.025 +2024-08-03 13:43:12,758 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.81 vs. limit=15.0 +2024-08-03 13:43:37,103 INFO [train.py:1114] (3/4) Epoch 10, batch 3000, loss[loss=0.2306, simple_loss=0.3076, pruned_loss=0.07678, over 13545.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2942, pruned_loss=0.06679, over 2629567.17 frames. ], batch size: 37, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:43:37,104 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 13:43:49,245 INFO [train.py:1146] (3/4) Epoch 10, validation: loss=0.1798, simple_loss=0.2807, pruned_loss=0.03945, over 944034.00 frames. +2024-08-03 13:43:49,246 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 13:43:50,628 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.55 vs. limit=15.0 +2024-08-03 13:44:00,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=131171.33333333334, ans=0.125 +2024-08-03 13:44:02,238 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.869e+01 1.161e+02 1.330e+02 1.562e+02 2.944e+02, threshold=2.661e+02, percent-clipped=0.0 +2024-08-03 13:44:04,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=131171.33333333334, ans=0.125 +2024-08-03 13:44:06,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=131171.33333333334, ans=0.0 +2024-08-03 13:44:34,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=131281.33333333334, ans=0.125 +2024-08-03 13:44:39,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=131281.33333333334, ans=0.125 +2024-08-03 13:44:43,933 INFO [train.py:1114] (3/4) Epoch 10, batch 3050, loss[loss=0.2029, simple_loss=0.2801, pruned_loss=0.06283, over 13544.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2951, pruned_loss=0.06734, over 2626498.65 frames. ], batch size: 35, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:44:57,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=131354.66666666666, ans=0.0 +2024-08-03 13:45:30,517 INFO [train.py:1114] (3/4) Epoch 10, batch 3100, loss[loss=0.268, simple_loss=0.3337, pruned_loss=0.1011, over 13390.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2946, pruned_loss=0.067, over 2626146.76 frames. ], batch size: 46, lr: 1.23e-02, grad_scale: 32.0 +2024-08-03 13:45:39,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131538.0, ans=0.1 +2024-08-03 13:45:39,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=131538.0, ans=0.1 +2024-08-03 13:45:43,072 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.582e+01 1.186e+02 1.379e+02 1.722e+02 2.702e+02, threshold=2.757e+02, percent-clipped=2.0 +2024-08-03 13:45:45,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=131538.0, ans=0.125 +2024-08-03 13:45:55,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=131611.33333333334, ans=0.025 +2024-08-03 13:46:03,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=131611.33333333334, ans=0.0 +2024-08-03 13:46:24,112 INFO [train.py:1114] (3/4) Epoch 10, batch 3150, loss[loss=0.2113, simple_loss=0.3019, pruned_loss=0.06041, over 13040.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2933, pruned_loss=0.06608, over 2626826.62 frames. ], batch size: 48, lr: 1.22e-02, grad_scale: 32.0 +2024-08-03 13:46:26,070 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:46:36,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=131721.33333333334, ans=0.125 +2024-08-03 13:46:54,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=131794.66666666666, ans=0.0 +2024-08-03 13:46:59,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=131794.66666666666, ans=0.125 +2024-08-03 13:47:10,945 INFO [train.py:1114] (3/4) Epoch 10, batch 3200, loss[loss=0.2256, simple_loss=0.3055, pruned_loss=0.07286, over 13547.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.293, pruned_loss=0.066, over 2634237.23 frames. ], batch size: 37, lr: 1.22e-02, grad_scale: 32.0 +2024-08-03 13:47:15,723 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.20 vs. limit=12.0 +2024-08-03 13:47:20,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=131904.66666666666, ans=0.2 +2024-08-03 13:47:20,913 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.72 vs. limit=22.5 +2024-08-03 13:47:21,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=131904.66666666666, ans=0.07 +2024-08-03 13:47:22,879 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.160e+01 1.174e+02 1.311e+02 1.747e+02 3.069e+02, threshold=2.622e+02, percent-clipped=2.0 +2024-08-03 13:47:55,717 INFO [train.py:1114] (3/4) Epoch 10, batch 3250, loss[loss=0.2621, simple_loss=0.3373, pruned_loss=0.09342, over 13386.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2942, pruned_loss=0.06639, over 2637966.82 frames. ], batch size: 38, lr: 1.22e-02, grad_scale: 32.0 +2024-08-03 13:47:59,503 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.26 vs. limit=15.0 +2024-08-03 13:48:12,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=132124.66666666666, ans=0.0 +2024-08-03 13:48:20,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=132161.33333333334, ans=0.125 +2024-08-03 13:48:21,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=132161.33333333334, ans=0.0 +2024-08-03 13:48:23,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=132161.33333333334, ans=0.09899494936611666 +2024-08-03 13:48:33,504 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.64 vs. limit=15.0 +2024-08-03 13:48:38,133 INFO [train.py:1114] (3/4) Epoch 10, batch 3300, loss[loss=0.2356, simple_loss=0.3116, pruned_loss=0.07984, over 12874.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2933, pruned_loss=0.06625, over 2639940.17 frames. ], batch size: 52, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:48:39,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=132234.66666666666, ans=0.0 +2024-08-03 13:48:46,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=132271.33333333334, ans=0.2 +2024-08-03 13:48:47,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=132271.33333333334, ans=0.1 +2024-08-03 13:48:48,000 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.45 vs. limit=15.0 +2024-08-03 13:48:50,933 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.252e+01 1.284e+02 1.634e+02 2.035e+02 3.075e+02, threshold=3.268e+02, percent-clipped=7.0 +2024-08-03 13:49:01,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=132308.0, ans=0.0 +2024-08-03 13:49:01,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=132308.0, ans=0.0 +2024-08-03 13:49:15,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=132381.33333333334, ans=0.0 +2024-08-03 13:49:21,121 INFO [train.py:1114] (3/4) Epoch 10, batch 3350, loss[loss=0.233, simple_loss=0.3078, pruned_loss=0.07909, over 12993.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2939, pruned_loss=0.06649, over 2629046.50 frames. ], batch size: 48, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:49:29,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.75 vs. limit=15.0 +2024-08-03 13:49:50,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=132528.0, ans=0.125 +2024-08-03 13:50:00,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=132564.66666666666, ans=0.0 +2024-08-03 13:50:03,865 INFO [train.py:1114] (3/4) Epoch 10, batch 3400, loss[loss=0.1771, simple_loss=0.2574, pruned_loss=0.04844, over 13559.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2937, pruned_loss=0.06677, over 2625864.54 frames. ], batch size: 31, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:50:05,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=132601.33333333334, ans=0.04949747468305833 +2024-08-03 13:50:11,329 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.06 vs. limit=22.5 +2024-08-03 13:50:14,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=132638.0, ans=0.125 +2024-08-03 13:50:15,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=132638.0, ans=0.2 +2024-08-03 13:50:16,695 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.511e+01 1.188e+02 1.377e+02 1.704e+02 3.995e+02, threshold=2.755e+02, percent-clipped=1.0 +2024-08-03 13:50:31,223 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.28 vs. limit=12.0 +2024-08-03 13:50:45,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.87 vs. limit=12.0 +2024-08-03 13:50:47,222 INFO [train.py:1114] (3/4) Epoch 10, batch 3450, loss[loss=0.2411, simple_loss=0.3217, pruned_loss=0.08018, over 12891.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2942, pruned_loss=0.06703, over 2628359.22 frames. ], batch size: 52, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:50:47,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=132784.66666666666, ans=0.125 +2024-08-03 13:50:54,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=132784.66666666666, ans=0.0 +2024-08-03 13:51:06,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=132858.0, ans=0.125 +2024-08-03 13:51:15,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=132894.66666666666, ans=0.125 +2024-08-03 13:51:30,954 INFO [train.py:1114] (3/4) Epoch 10, batch 3500, loss[loss=0.1896, simple_loss=0.2645, pruned_loss=0.05736, over 13516.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2938, pruned_loss=0.06693, over 2630214.64 frames. ], batch size: 34, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:51:31,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=132968.0, ans=0.125 +2024-08-03 13:51:33,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=132968.0, ans=0.0 +2024-08-03 13:51:35,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=132968.0, ans=0.025 +2024-08-03 13:51:35,717 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.69 vs. limit=15.0 +2024-08-03 13:51:38,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=132968.0, ans=0.0 +2024-08-03 13:51:38,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=133004.66666666666, ans=0.125 +2024-08-03 13:51:43,628 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.579e+01 1.188e+02 1.502e+02 1.811e+02 2.689e+02, threshold=3.004e+02, percent-clipped=0.0 +2024-08-03 13:52:10,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=133114.66666666666, ans=0.0 +2024-08-03 13:52:13,033 INFO [train.py:1114] (3/4) Epoch 10, batch 3550, loss[loss=0.2124, simple_loss=0.3019, pruned_loss=0.06144, over 12366.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2966, pruned_loss=0.06848, over 2627522.23 frames. ], batch size: 58, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:52:23,342 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:52:28,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=133188.0, ans=0.125 +2024-08-03 13:52:31,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=133224.66666666666, ans=0.2 +2024-08-03 13:52:34,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=133224.66666666666, ans=0.125 +2024-08-03 13:52:36,554 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.56 vs. limit=6.0 +2024-08-03 13:52:42,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=133261.33333333334, ans=0.09899494936611666 +2024-08-03 13:52:56,965 INFO [train.py:1114] (3/4) Epoch 10, batch 3600, loss[loss=0.2714, simple_loss=0.3363, pruned_loss=0.1033, over 8861.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3013, pruned_loss=0.07289, over 2483555.32 frames. ], batch size: 96, lr: 1.22e-02, grad_scale: 16.0 +2024-08-03 13:53:10,853 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.160e+02 1.272e+02 1.395e+02 1.858e+02, threshold=2.544e+02, percent-clipped=0.0 +2024-08-03 13:53:11,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133371.33333333334, ans=0.1 +2024-08-03 13:53:22,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=133444.66666666666, ans=0.125 +2024-08-03 13:53:26,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=133444.66666666666, ans=0.015 +2024-08-03 13:55:05,757 INFO [train.py:1114] (3/4) Epoch 11, batch 0, loss[loss=0.2017, simple_loss=0.2816, pruned_loss=0.0609, over 13328.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2816, pruned_loss=0.0609, over 13328.00 frames. ], batch size: 33, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:55:05,757 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 13:55:17,611 INFO [train.py:1146] (3/4) Epoch 11, validation: loss=0.1876, simple_loss=0.2878, pruned_loss=0.04367, over 944034.00 frames. +2024-08-03 13:55:17,612 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 13:55:30,921 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.45 vs. limit=22.5 +2024-08-03 13:55:53,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=133591.33333333334, ans=0.125 +2024-08-03 13:56:03,066 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.03 vs. limit=15.0 +2024-08-03 13:56:05,958 INFO [train.py:1114] (3/4) Epoch 11, batch 50, loss[loss=0.1971, simple_loss=0.2725, pruned_loss=0.06079, over 13426.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2947, pruned_loss=0.06725, over 579068.59 frames. ], batch size: 32, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:56:21,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=133701.33333333334, ans=0.0 +2024-08-03 13:56:24,527 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.67 vs. limit=15.0 +2024-08-03 13:56:31,310 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.301e+01 1.198e+02 1.313e+02 1.584e+02 3.827e+02, threshold=2.627e+02, percent-clipped=3.0 +2024-08-03 13:56:33,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=133738.0, ans=0.95 +2024-08-03 13:56:42,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=133774.66666666666, ans=0.1 +2024-08-03 13:56:53,171 INFO [train.py:1114] (3/4) Epoch 11, batch 100, loss[loss=0.2076, simple_loss=0.2851, pruned_loss=0.0651, over 13527.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2964, pruned_loss=0.06758, over 1026592.65 frames. ], batch size: 35, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:57:38,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=133884.66666666666, ans=0.2 +2024-08-03 13:57:43,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=133921.33333333334, ans=0.0 +2024-08-03 13:57:50,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=133921.33333333334, ans=0.0 +2024-08-03 13:58:13,267 INFO [train.py:1114] (3/4) Epoch 11, batch 150, loss[loss=0.1836, simple_loss=0.2661, pruned_loss=0.05053, over 13419.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2923, pruned_loss=0.06413, over 1388115.75 frames. ], batch size: 32, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 13:58:33,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=134104.66666666666, ans=0.2 +2024-08-03 13:58:36,662 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.551e+01 1.130e+02 1.367e+02 1.649e+02 2.945e+02, threshold=2.733e+02, percent-clipped=2.0 +2024-08-03 13:58:37,793 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 13:58:54,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=134178.0, ans=0.125 +2024-08-03 13:58:58,429 INFO [train.py:1114] (3/4) Epoch 11, batch 200, loss[loss=0.2343, simple_loss=0.3159, pruned_loss=0.07635, over 12534.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2917, pruned_loss=0.06411, over 1666902.13 frames. ], batch size: 58, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:00:21,222 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.54 vs. limit=15.0 +2024-08-03 14:00:34,918 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.59 vs. limit=10.0 +2024-08-03 14:00:37,212 INFO [train.py:1114] (3/4) Epoch 11, batch 250, loss[loss=0.2286, simple_loss=0.3079, pruned_loss=0.07463, over 13337.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2913, pruned_loss=0.06363, over 1886213.50 frames. ], batch size: 46, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:00:40,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=134398.0, ans=0.1 +2024-08-03 14:00:42,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=134398.0, ans=0.2 +2024-08-03 14:00:46,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=134434.66666666666, ans=0.125 +2024-08-03 14:00:48,327 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:00:49,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=134434.66666666666, ans=0.0 +2024-08-03 14:00:51,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=134434.66666666666, ans=0.025 +2024-08-03 14:00:52,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=134434.66666666666, ans=0.0 +2024-08-03 14:00:57,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=134471.33333333334, ans=0.125 +2024-08-03 14:00:59,710 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.51 vs. limit=22.5 +2024-08-03 14:01:00,742 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.508e+01 1.185e+02 1.387e+02 1.656e+02 4.049e+02, threshold=2.774e+02, percent-clipped=1.0 +2024-08-03 14:01:19,022 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.97 vs. limit=10.0 +2024-08-03 14:01:25,007 INFO [train.py:1114] (3/4) Epoch 11, batch 300, loss[loss=0.2135, simple_loss=0.2964, pruned_loss=0.06537, over 13466.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2903, pruned_loss=0.06353, over 2051910.32 frames. ], batch size: 42, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:01:27,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=134581.33333333334, ans=0.0 +2024-08-03 14:01:32,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=134581.33333333334, ans=0.125 +2024-08-03 14:01:46,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=134654.66666666666, ans=0.2 +2024-08-03 14:01:49,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=134654.66666666666, ans=0.125 +2024-08-03 14:02:01,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=134728.0, ans=0.125 +2024-08-03 14:02:11,939 INFO [train.py:1114] (3/4) Epoch 11, batch 350, loss[loss=0.1653, simple_loss=0.2493, pruned_loss=0.0407, over 13576.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2912, pruned_loss=0.06384, over 2181748.60 frames. ], batch size: 33, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:02:37,147 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.915e+01 1.230e+02 1.440e+02 1.763e+02 3.166e+02, threshold=2.879e+02, percent-clipped=2.0 +2024-08-03 14:02:52,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=134911.33333333334, ans=0.0 +2024-08-03 14:02:56,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=134911.33333333334, ans=0.2 +2024-08-03 14:02:58,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=134948.0, ans=0.1 +2024-08-03 14:02:58,898 INFO [train.py:1114] (3/4) Epoch 11, batch 400, loss[loss=0.22, simple_loss=0.2967, pruned_loss=0.07164, over 13369.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2921, pruned_loss=0.06451, over 2286154.88 frames. ], batch size: 37, lr: 1.16e-02, grad_scale: 32.0 +2024-08-03 14:03:13,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=134984.66666666666, ans=0.125 +2024-08-03 14:03:15,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=134984.66666666666, ans=0.125 +2024-08-03 14:03:20,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=135021.33333333334, ans=0.0 +2024-08-03 14:03:21,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=135021.33333333334, ans=0.1 +2024-08-03 14:03:25,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=135058.0, ans=0.2 +2024-08-03 14:03:42,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=135131.33333333334, ans=0.125 +2024-08-03 14:03:43,649 INFO [train.py:1114] (3/4) Epoch 11, batch 450, loss[loss=0.2188, simple_loss=0.307, pruned_loss=0.06533, over 13538.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2925, pruned_loss=0.06476, over 2359138.91 frames. ], batch size: 38, lr: 1.15e-02, grad_scale: 32.0 +2024-08-03 14:03:56,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=135168.0, ans=0.5 +2024-08-03 14:04:02,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=135204.66666666666, ans=0.125 +2024-08-03 14:04:08,550 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.113e+02 1.253e+02 1.576e+02 3.089e+02, threshold=2.506e+02, percent-clipped=1.0 +2024-08-03 14:04:10,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=135204.66666666666, ans=0.0 +2024-08-03 14:04:13,812 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=5.80 vs. limit=15.0 +2024-08-03 14:04:18,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135241.33333333334, ans=0.1 +2024-08-03 14:04:23,467 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.87 vs. limit=12.0 +2024-08-03 14:04:30,244 INFO [train.py:1114] (3/4) Epoch 11, batch 500, loss[loss=0.2131, simple_loss=0.3059, pruned_loss=0.06021, over 13393.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2914, pruned_loss=0.06433, over 2424917.33 frames. ], batch size: 43, lr: 1.15e-02, grad_scale: 32.0 +2024-08-03 14:04:30,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=135314.66666666666, ans=0.2 +2024-08-03 14:04:48,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=135351.33333333334, ans=0.125 +2024-08-03 14:05:07,348 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.22 vs. limit=15.0 +2024-08-03 14:05:07,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=135461.33333333334, ans=0.125 +2024-08-03 14:05:17,624 INFO [train.py:1114] (3/4) Epoch 11, batch 550, loss[loss=0.2365, simple_loss=0.3123, pruned_loss=0.08033, over 13127.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2915, pruned_loss=0.06448, over 2467638.31 frames. ], batch size: 48, lr: 1.15e-02, grad_scale: 32.0 +2024-08-03 14:05:21,744 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.83 vs. limit=15.0 +2024-08-03 14:05:23,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=135498.0, ans=10.0 +2024-08-03 14:05:31,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=135534.66666666666, ans=0.0 +2024-08-03 14:05:33,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=135534.66666666666, ans=0.0 +2024-08-03 14:05:44,378 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.929e+01 1.236e+02 1.527e+02 1.937e+02 2.923e+02, threshold=3.054e+02, percent-clipped=2.0 +2024-08-03 14:05:49,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=135608.0, ans=0.125 +2024-08-03 14:05:57,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=135644.66666666666, ans=0.0 +2024-08-03 14:06:07,517 INFO [train.py:1114] (3/4) Epoch 11, batch 600, loss[loss=0.2436, simple_loss=0.3246, pruned_loss=0.08131, over 13271.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2918, pruned_loss=0.06435, over 2507280.81 frames. ], batch size: 46, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:06:11,655 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.49 vs. limit=15.0 +2024-08-03 14:06:20,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=135718.0, ans=0.125 +2024-08-03 14:06:23,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=135718.0, ans=0.125 +2024-08-03 14:06:24,438 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.80 vs. limit=5.0 +2024-08-03 14:06:38,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=135791.33333333334, ans=0.1 +2024-08-03 14:06:52,193 INFO [train.py:1114] (3/4) Epoch 11, batch 650, loss[loss=0.2088, simple_loss=0.2911, pruned_loss=0.06325, over 13551.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2911, pruned_loss=0.06421, over 2542741.91 frames. ], batch size: 37, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:07:09,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=135938.0, ans=0.1 +2024-08-03 14:07:16,371 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.852e+01 1.178e+02 1.481e+02 2.104e+02 3.972e+02, threshold=2.962e+02, percent-clipped=10.0 +2024-08-03 14:07:38,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.whiten.whitening_limit, batch_count=136011.33333333334, ans=12.0 +2024-08-03 14:07:39,238 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.50 vs. limit=15.0 +2024-08-03 14:07:39,638 INFO [train.py:1114] (3/4) Epoch 11, batch 700, loss[loss=0.1964, simple_loss=0.2753, pruned_loss=0.05876, over 13547.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2906, pruned_loss=0.06359, over 2564900.09 frames. ], batch size: 35, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:07:41,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=136048.0, ans=0.0 +2024-08-03 14:07:50,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=136084.66666666666, ans=0.125 +2024-08-03 14:07:57,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=136121.33333333334, ans=0.0 +2024-08-03 14:08:08,222 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.29 vs. limit=15.0 +2024-08-03 14:08:19,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=136194.66666666666, ans=0.2 +2024-08-03 14:08:26,759 INFO [train.py:1114] (3/4) Epoch 11, batch 750, loss[loss=0.1846, simple_loss=0.2824, pruned_loss=0.04343, over 13362.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2901, pruned_loss=0.06332, over 2582006.53 frames. ], batch size: 37, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:08:29,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=136231.33333333334, ans=0.2 +2024-08-03 14:08:31,797 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.23 vs. limit=22.5 +2024-08-03 14:08:34,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=136231.33333333334, ans=0.0 +2024-08-03 14:08:39,914 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.51 vs. limit=15.0 +2024-08-03 14:08:51,164 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.398e+01 1.179e+02 1.348e+02 1.712e+02 2.826e+02, threshold=2.695e+02, percent-clipped=0.0 +2024-08-03 14:08:53,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=136341.33333333334, ans=0.5 +2024-08-03 14:08:54,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=136341.33333333334, ans=0.125 +2024-08-03 14:08:55,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=136341.33333333334, ans=0.125 +2024-08-03 14:08:56,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=136341.33333333334, ans=0.0 +2024-08-03 14:08:58,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=136341.33333333334, ans=0.125 +2024-08-03 14:09:06,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=136378.0, ans=0.125 +2024-08-03 14:09:12,257 INFO [train.py:1114] (3/4) Epoch 11, batch 800, loss[loss=0.2144, simple_loss=0.2896, pruned_loss=0.06956, over 13347.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2901, pruned_loss=0.06318, over 2596364.87 frames. ], batch size: 33, lr: 1.15e-02, grad_scale: 32.0 +2024-08-03 14:09:12,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=136414.66666666666, ans=0.035 +2024-08-03 14:09:34,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=136488.0, ans=0.0 +2024-08-03 14:09:39,162 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.06 vs. limit=15.0 +2024-08-03 14:09:41,684 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.29 vs. limit=22.5 +2024-08-03 14:10:01,247 INFO [train.py:1114] (3/4) Epoch 11, batch 850, loss[loss=0.2047, simple_loss=0.2967, pruned_loss=0.05633, over 13333.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2895, pruned_loss=0.06317, over 2609366.89 frames. ], batch size: 40, lr: 1.15e-02, grad_scale: 16.0 +2024-08-03 14:10:09,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=136634.66666666666, ans=0.2 +2024-08-03 14:10:11,664 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.91 vs. limit=15.0 +2024-08-03 14:10:15,963 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.47 vs. limit=15.0 +2024-08-03 14:10:26,155 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.536e+01 1.181e+02 1.335e+02 1.644e+02 2.754e+02, threshold=2.669e+02, percent-clipped=1.0 +2024-08-03 14:10:26,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=136671.33333333334, ans=0.0 +2024-08-03 14:10:29,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=136708.0, ans=0.125 +2024-08-03 14:10:32,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=136708.0, ans=0.0 +2024-08-03 14:10:36,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=136744.66666666666, ans=0.09899494936611666 +2024-08-03 14:10:46,280 INFO [train.py:1114] (3/4) Epoch 11, batch 900, loss[loss=0.2013, simple_loss=0.2789, pruned_loss=0.06185, over 13363.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2895, pruned_loss=0.06321, over 2611867.92 frames. ], batch size: 33, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:10:59,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=136818.0, ans=0.125 +2024-08-03 14:11:00,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=136818.0, ans=0.0 +2024-08-03 14:11:06,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=136854.66666666666, ans=0.125 +2024-08-03 14:11:28,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136928.0, ans=0.1 +2024-08-03 14:11:31,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=136928.0, ans=0.0 +2024-08-03 14:11:32,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=136964.66666666666, ans=0.125 +2024-08-03 14:11:33,344 INFO [train.py:1114] (3/4) Epoch 11, batch 950, loss[loss=0.1967, simple_loss=0.2782, pruned_loss=0.05763, over 13506.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2903, pruned_loss=0.06359, over 2612694.78 frames. ], batch size: 34, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:11:34,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=136964.66666666666, ans=0.125 +2024-08-03 14:11:51,105 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.08 vs. limit=15.0 +2024-08-03 14:12:02,581 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.938e+01 1.284e+02 1.520e+02 1.871e+02 3.091e+02, threshold=3.040e+02, percent-clipped=3.0 +2024-08-03 14:12:19,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=137111.33333333334, ans=0.125 +2024-08-03 14:12:20,789 INFO [train.py:1114] (3/4) Epoch 11, batch 1000, loss[loss=0.1756, simple_loss=0.2609, pruned_loss=0.04512, over 13361.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2909, pruned_loss=0.06411, over 2609635.41 frames. ], batch size: 35, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:12:21,411 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.76 vs. limit=15.0 +2024-08-03 14:12:25,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=137148.0, ans=0.0 +2024-08-03 14:12:33,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=137184.66666666666, ans=0.125 +2024-08-03 14:12:40,964 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.74 vs. limit=22.5 +2024-08-03 14:12:42,728 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.58 vs. limit=15.0 +2024-08-03 14:12:44,466 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.38 vs. limit=15.0 +2024-08-03 14:12:55,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=137258.0, ans=0.1 +2024-08-03 14:13:02,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=137294.66666666666, ans=0.0 +2024-08-03 14:13:06,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=137294.66666666666, ans=0.125 +2024-08-03 14:13:09,236 INFO [train.py:1114] (3/4) Epoch 11, batch 1050, loss[loss=0.2132, simple_loss=0.2985, pruned_loss=0.06392, over 13586.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2903, pruned_loss=0.06398, over 2614818.97 frames. ], batch size: 39, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:13:12,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=137331.33333333334, ans=0.0 +2024-08-03 14:13:24,018 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.79 vs. limit=15.0 +2024-08-03 14:13:35,770 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.48 vs. limit=15.0 +2024-08-03 14:13:36,245 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.744e+01 1.143e+02 1.275e+02 1.569e+02 2.169e+02, threshold=2.550e+02, percent-clipped=0.0 +2024-08-03 14:13:39,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=137441.33333333334, ans=0.05 +2024-08-03 14:13:39,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=137441.33333333334, ans=0.125 +2024-08-03 14:13:41,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=137441.33333333334, ans=0.0 +2024-08-03 14:13:54,633 INFO [train.py:1114] (3/4) Epoch 11, batch 1100, loss[loss=0.2077, simple_loss=0.2861, pruned_loss=0.06466, over 13557.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2901, pruned_loss=0.06376, over 2619023.63 frames. ], batch size: 36, lr: 1.15e-02, grad_scale: 8.0 +2024-08-03 14:13:58,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=137514.66666666666, ans=0.0 +2024-08-03 14:14:12,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=137588.0, ans=0.0 +2024-08-03 14:14:13,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=137588.0, ans=0.0 +2024-08-03 14:14:39,727 INFO [train.py:1114] (3/4) Epoch 11, batch 1150, loss[loss=0.1788, simple_loss=0.2662, pruned_loss=0.04569, over 13558.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2893, pruned_loss=0.06366, over 2618217.84 frames. ], batch size: 36, lr: 1.14e-02, grad_scale: 8.0 +2024-08-03 14:14:57,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=9.74 vs. limit=15.0 +2024-08-03 14:15:03,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137771.33333333334, ans=0.1 +2024-08-03 14:15:09,308 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.634e+01 1.163e+02 1.278e+02 1.596e+02 2.243e+02, threshold=2.555e+02, percent-clipped=0.0 +2024-08-03 14:15:13,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=137808.0, ans=0.0 +2024-08-03 14:15:16,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=137808.0, ans=0.0 +2024-08-03 14:15:22,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=137844.66666666666, ans=0.0 +2024-08-03 14:15:25,776 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.22 vs. limit=15.0 +2024-08-03 14:15:29,799 INFO [train.py:1114] (3/4) Epoch 11, batch 1200, loss[loss=0.2339, simple_loss=0.3153, pruned_loss=0.07624, over 13582.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2905, pruned_loss=0.064, over 2616163.41 frames. ], batch size: 39, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:15:36,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=137881.33333333334, ans=0.1 +2024-08-03 14:15:52,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=137954.66666666666, ans=0.0 +2024-08-03 14:15:56,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=137991.33333333334, ans=0.0 +2024-08-03 14:15:58,783 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.70 vs. limit=15.0 +2024-08-03 14:16:14,554 INFO [train.py:1114] (3/4) Epoch 11, batch 1250, loss[loss=0.2165, simple_loss=0.2989, pruned_loss=0.06701, over 13445.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2902, pruned_loss=0.06312, over 2628230.27 frames. ], batch size: 42, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:16:14,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=138064.66666666666, ans=0.95 +2024-08-03 14:16:34,290 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.55 vs. limit=6.0 +2024-08-03 14:16:42,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138138.0, ans=0.1 +2024-08-03 14:16:45,547 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.814e+01 1.117e+02 1.380e+02 1.651e+02 4.437e+02, threshold=2.760e+02, percent-clipped=2.0 +2024-08-03 14:16:54,298 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.26 vs. limit=6.0 +2024-08-03 14:17:03,525 INFO [train.py:1114] (3/4) Epoch 11, batch 1300, loss[loss=0.2371, simple_loss=0.3138, pruned_loss=0.08017, over 12848.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2897, pruned_loss=0.06304, over 2630774.37 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:17:27,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=138321.33333333334, ans=0.0 +2024-08-03 14:17:31,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=138358.0, ans=0.0 +2024-08-03 14:17:43,912 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.92 vs. limit=15.0 +2024-08-03 14:17:44,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=138394.66666666666, ans=0.0 +2024-08-03 14:17:48,833 INFO [train.py:1114] (3/4) Epoch 11, batch 1350, loss[loss=0.2099, simple_loss=0.2914, pruned_loss=0.06419, over 13558.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2898, pruned_loss=0.06323, over 2638903.07 frames. ], batch size: 37, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:17:49,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138431.33333333334, ans=0.1 +2024-08-03 14:17:55,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=138431.33333333334, ans=0.125 +2024-08-03 14:17:57,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=138468.0, ans=10.0 +2024-08-03 14:18:09,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=138504.66666666666, ans=0.025 +2024-08-03 14:18:09,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138504.66666666666, ans=0.1 +2024-08-03 14:18:11,825 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.65 vs. limit=15.0 +2024-08-03 14:18:15,978 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.205e+02 1.432e+02 1.737e+02 2.785e+02, threshold=2.864e+02, percent-clipped=2.0 +2024-08-03 14:18:25,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=138541.33333333334, ans=0.1 +2024-08-03 14:18:36,198 INFO [train.py:1114] (3/4) Epoch 11, batch 1400, loss[loss=0.1934, simple_loss=0.2664, pruned_loss=0.0602, over 13263.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2906, pruned_loss=0.06385, over 2642519.69 frames. ], batch size: 31, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:18:41,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138614.66666666666, ans=0.1 +2024-08-03 14:18:50,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138651.33333333334, ans=0.1 +2024-08-03 14:18:55,956 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:19:00,836 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.67 vs. limit=15.0 +2024-08-03 14:19:08,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=138724.66666666666, ans=10.0 +2024-08-03 14:19:22,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=138798.0, ans=0.0 +2024-08-03 14:19:23,023 INFO [train.py:1114] (3/4) Epoch 11, batch 1450, loss[loss=0.2254, simple_loss=0.3128, pruned_loss=0.06902, over 13414.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2908, pruned_loss=0.06391, over 2641673.28 frames. ], batch size: 43, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:19:34,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=138834.66666666666, ans=0.0 +2024-08-03 14:19:35,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=138834.66666666666, ans=0.125 +2024-08-03 14:19:40,730 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.73 vs. limit=15.0 +2024-08-03 14:19:49,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=138908.0, ans=0.0 +2024-08-03 14:19:49,932 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.222e+01 1.153e+02 1.341e+02 1.677e+02 2.779e+02, threshold=2.682e+02, percent-clipped=0.0 +2024-08-03 14:19:50,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=138908.0, ans=0.125 +2024-08-03 14:19:51,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=138908.0, ans=0.2 +2024-08-03 14:20:08,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=138944.66666666666, ans=0.5 +2024-08-03 14:20:10,244 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.07 vs. limit=15.0 +2024-08-03 14:20:11,557 INFO [train.py:1114] (3/4) Epoch 11, batch 1500, loss[loss=0.1854, simple_loss=0.2782, pruned_loss=0.04635, over 13407.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2904, pruned_loss=0.06325, over 2642460.52 frames. ], batch size: 39, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:20:29,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=138981.33333333334, ans=0.035 +2024-08-03 14:20:33,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=138981.33333333334, ans=0.0 +2024-08-03 14:20:58,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=139091.33333333334, ans=0.025 +2024-08-03 14:21:01,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=139091.33333333334, ans=6.0 +2024-08-03 14:21:04,594 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.97 vs. limit=12.0 +2024-08-03 14:21:08,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=139128.0, ans=0.025 +2024-08-03 14:21:12,188 INFO [train.py:1114] (3/4) Epoch 11, batch 1550, loss[loss=0.2268, simple_loss=0.3135, pruned_loss=0.07002, over 13388.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2902, pruned_loss=0.06339, over 2631592.34 frames. ], batch size: 41, lr: 1.14e-02, grad_scale: 16.0 +2024-08-03 14:21:25,690 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.14 vs. limit=12.0 +2024-08-03 14:21:31,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=139238.0, ans=10.0 +2024-08-03 14:21:39,471 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.226e+01 1.184e+02 1.477e+02 1.893e+02 3.709e+02, threshold=2.955e+02, percent-clipped=6.0 +2024-08-03 14:21:57,700 INFO [train.py:1114] (3/4) Epoch 11, batch 1600, loss[loss=0.1768, simple_loss=0.2644, pruned_loss=0.04459, over 13566.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2902, pruned_loss=0.06359, over 2625506.40 frames. ], batch size: 39, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:22:03,089 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.40 vs. limit=15.0 +2024-08-03 14:22:03,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139348.0, ans=0.1 +2024-08-03 14:22:03,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=139348.0, ans=0.125 +2024-08-03 14:22:04,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.56 vs. limit=15.0 +2024-08-03 14:22:10,190 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.87 vs. limit=15.0 +2024-08-03 14:22:11,137 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.69 vs. limit=10.0 +2024-08-03 14:22:20,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=139421.33333333334, ans=0.0 +2024-08-03 14:22:47,808 INFO [train.py:1114] (3/4) Epoch 11, batch 1650, loss[loss=0.2133, simple_loss=0.2996, pruned_loss=0.0635, over 13334.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2906, pruned_loss=0.06414, over 2622515.22 frames. ], batch size: 40, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:22:55,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=139531.33333333334, ans=0.5 +2024-08-03 14:22:55,690 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.65 vs. limit=10.0 +2024-08-03 14:23:14,854 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.757e+01 1.241e+02 1.421e+02 1.904e+02 3.771e+02, threshold=2.842e+02, percent-clipped=2.0 +2024-08-03 14:23:22,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=139641.33333333334, ans=0.125 +2024-08-03 14:23:24,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=139678.0, ans=0.125 +2024-08-03 14:23:29,130 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.68 vs. limit=6.0 +2024-08-03 14:23:32,951 INFO [train.py:1114] (3/4) Epoch 11, batch 1700, loss[loss=0.2198, simple_loss=0.2874, pruned_loss=0.0761, over 13254.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2901, pruned_loss=0.06349, over 2630542.49 frames. ], batch size: 31, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:23:39,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=139714.66666666666, ans=0.0 +2024-08-03 14:23:41,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=139751.33333333334, ans=0.05 +2024-08-03 14:24:11,442 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.51 vs. limit=22.5 +2024-08-03 14:24:20,145 INFO [train.py:1114] (3/4) Epoch 11, batch 1750, loss[loss=0.1983, simple_loss=0.268, pruned_loss=0.06431, over 13544.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2896, pruned_loss=0.06339, over 2634436.47 frames. ], batch size: 31, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:24:28,506 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:24:39,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=139971.33333333334, ans=0.2 +2024-08-03 14:24:46,443 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.11 vs. limit=22.5 +2024-08-03 14:24:47,666 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.098e+01 1.175e+02 1.427e+02 2.048e+02 3.147e+02, threshold=2.855e+02, percent-clipped=3.0 +2024-08-03 14:25:06,039 INFO [train.py:1114] (3/4) Epoch 11, batch 1800, loss[loss=0.2247, simple_loss=0.307, pruned_loss=0.07115, over 13547.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2902, pruned_loss=0.06364, over 2635547.15 frames. ], batch size: 38, lr: 1.14e-02, grad_scale: 32.0 +2024-08-03 14:25:25,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140081.33333333334, ans=0.1 +2024-08-03 14:25:27,165 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.08 vs. limit=10.0 +2024-08-03 14:25:34,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=140118.0, ans=0.0 +2024-08-03 14:25:48,056 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.81 vs. limit=22.5 +2024-08-03 14:25:52,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=140191.33333333334, ans=0.0 +2024-08-03 14:25:56,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=140191.33333333334, ans=0.125 +2024-08-03 14:26:12,438 INFO [train.py:1114] (3/4) Epoch 11, batch 1850, loss[loss=0.2176, simple_loss=0.3074, pruned_loss=0.06388, over 13396.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2897, pruned_loss=0.06333, over 2638016.52 frames. ], batch size: 39, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:26:35,398 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.44 vs. limit=10.0 +2024-08-03 14:26:38,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=140338.0, ans=0.125 +2024-08-03 14:26:40,995 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.650e+01 1.194e+02 1.468e+02 2.041e+02 3.479e+02, threshold=2.936e+02, percent-clipped=2.0 +2024-08-03 14:26:46,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.19 vs. limit=15.0 +2024-08-03 14:26:51,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=140374.66666666666, ans=0.125 +2024-08-03 14:26:54,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=140411.33333333334, ans=0.125 +2024-08-03 14:27:02,736 INFO [train.py:1114] (3/4) Epoch 11, batch 1900, loss[loss=0.217, simple_loss=0.3054, pruned_loss=0.06434, over 13319.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.29, pruned_loss=0.06351, over 2640986.34 frames. ], batch size: 40, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:27:12,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=140484.66666666666, ans=0.2 +2024-08-03 14:27:19,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=140484.66666666666, ans=0.0 +2024-08-03 14:27:41,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=140594.66666666666, ans=0.125 +2024-08-03 14:27:51,874 INFO [train.py:1114] (3/4) Epoch 11, batch 1950, loss[loss=0.2125, simple_loss=0.2849, pruned_loss=0.07, over 13568.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2915, pruned_loss=0.06422, over 2647221.69 frames. ], batch size: 36, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:28:04,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=140668.0, ans=0.0 +2024-08-03 14:28:19,126 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.848e+01 1.152e+02 1.250e+02 1.577e+02 2.279e+02, threshold=2.500e+02, percent-clipped=0.0 +2024-08-03 14:28:35,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=140778.0, ans=0.07 +2024-08-03 14:28:37,495 INFO [train.py:1114] (3/4) Epoch 11, batch 2000, loss[loss=0.2021, simple_loss=0.2745, pruned_loss=0.06491, over 13543.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2922, pruned_loss=0.06487, over 2637712.31 frames. ], batch size: 31, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:28:53,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=140851.33333333334, ans=0.2 +2024-08-03 14:28:55,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=140888.0, ans=0.125 +2024-08-03 14:28:57,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=140888.0, ans=0.125 +2024-08-03 14:28:58,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140888.0, ans=0.1 +2024-08-03 14:29:00,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=140888.0, ans=0.0 +2024-08-03 14:29:09,170 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.27 vs. limit=6.0 +2024-08-03 14:29:17,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=140961.33333333334, ans=0.0 +2024-08-03 14:29:23,069 INFO [train.py:1114] (3/4) Epoch 11, batch 2050, loss[loss=0.1779, simple_loss=0.262, pruned_loss=0.04694, over 13417.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2914, pruned_loss=0.06442, over 2634555.25 frames. ], batch size: 32, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:29:36,814 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.46 vs. limit=15.0 +2024-08-03 14:29:41,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=141071.33333333334, ans=0.125 +2024-08-03 14:29:43,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=141071.33333333334, ans=0.0 +2024-08-03 14:29:43,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=141071.33333333334, ans=0.125 +2024-08-03 14:29:51,066 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:29:52,685 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.045e+01 1.175e+02 1.416e+02 1.784e+02 2.828e+02, threshold=2.832e+02, percent-clipped=4.0 +2024-08-03 14:29:56,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=141108.0, ans=0.0 +2024-08-03 14:29:57,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=141108.0, ans=0.07 +2024-08-03 14:30:01,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=141144.66666666666, ans=0.125 +2024-08-03 14:30:09,688 INFO [train.py:1114] (3/4) Epoch 11, batch 2100, loss[loss=0.2064, simple_loss=0.2835, pruned_loss=0.06465, over 13551.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2903, pruned_loss=0.06356, over 2639655.61 frames. ], batch size: 37, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:30:20,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141218.0, ans=0.1 +2024-08-03 14:30:25,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=141218.0, ans=0.125 +2024-08-03 14:30:27,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=141218.0, ans=0.0 +2024-08-03 14:30:31,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=141254.66666666666, ans=0.025 +2024-08-03 14:30:34,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=141254.66666666666, ans=0.07 +2024-08-03 14:30:35,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=141254.66666666666, ans=0.025 +2024-08-03 14:30:36,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=141254.66666666666, ans=0.0 +2024-08-03 14:30:50,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=141328.0, ans=0.2 +2024-08-03 14:30:56,317 INFO [train.py:1114] (3/4) Epoch 11, batch 2150, loss[loss=0.2038, simple_loss=0.2879, pruned_loss=0.0599, over 13566.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2893, pruned_loss=0.06286, over 2648375.56 frames. ], batch size: 36, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:31:03,168 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.71 vs. limit=15.0 +2024-08-03 14:31:05,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=141401.33333333334, ans=0.125 +2024-08-03 14:31:05,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=141401.33333333334, ans=0.0 +2024-08-03 14:31:24,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=141438.0, ans=0.125 +2024-08-03 14:31:27,809 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.279e+01 1.207e+02 1.412e+02 1.929e+02 3.002e+02, threshold=2.825e+02, percent-clipped=1.0 +2024-08-03 14:31:29,963 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.20 vs. limit=15.0 +2024-08-03 14:31:45,117 INFO [train.py:1114] (3/4) Epoch 11, batch 2200, loss[loss=0.2157, simple_loss=0.3018, pruned_loss=0.06479, over 13399.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2893, pruned_loss=0.06288, over 2645936.10 frames. ], batch size: 39, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:32:03,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=141621.33333333334, ans=0.0 +2024-08-03 14:32:03,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=141621.33333333334, ans=0.025 +2024-08-03 14:32:08,429 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.87 vs. limit=6.0 +2024-08-03 14:32:27,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=141694.66666666666, ans=0.125 +2024-08-03 14:32:30,722 INFO [train.py:1114] (3/4) Epoch 11, batch 2250, loss[loss=0.1874, simple_loss=0.2764, pruned_loss=0.04916, over 13359.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2893, pruned_loss=0.06302, over 2643505.52 frames. ], batch size: 37, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:32:32,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=141731.33333333334, ans=0.125 +2024-08-03 14:32:36,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=141731.33333333334, ans=0.125 +2024-08-03 14:32:38,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=141731.33333333334, ans=0.125 +2024-08-03 14:32:40,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=141768.0, ans=0.125 +2024-08-03 14:32:47,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141804.66666666666, ans=0.1 +2024-08-03 14:32:48,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=141804.66666666666, ans=0.2 +2024-08-03 14:32:52,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=141804.66666666666, ans=0.125 +2024-08-03 14:32:58,703 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.445e+01 1.221e+02 1.460e+02 1.800e+02 3.358e+02, threshold=2.920e+02, percent-clipped=4.0 +2024-08-03 14:33:01,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=141841.33333333334, ans=0.125 +2024-08-03 14:33:16,159 INFO [train.py:1114] (3/4) Epoch 11, batch 2300, loss[loss=0.1736, simple_loss=0.2581, pruned_loss=0.04458, over 13575.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2878, pruned_loss=0.06253, over 2639619.05 frames. ], batch size: 33, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:33:28,565 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.81 vs. limit=15.0 +2024-08-03 14:33:33,774 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.02 vs. limit=15.0 +2024-08-03 14:33:36,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=141988.0, ans=0.125 +2024-08-03 14:33:36,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=141988.0, ans=0.125 +2024-08-03 14:34:00,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=142061.33333333334, ans=0.125 +2024-08-03 14:34:02,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=142061.33333333334, ans=0.125 +2024-08-03 14:34:04,291 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.98 vs. limit=15.0 +2024-08-03 14:34:05,406 INFO [train.py:1114] (3/4) Epoch 11, batch 2350, loss[loss=0.2029, simple_loss=0.2932, pruned_loss=0.05624, over 13547.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2877, pruned_loss=0.06247, over 2641964.29 frames. ], batch size: 38, lr: 1.13e-02, grad_scale: 16.0 +2024-08-03 14:34:24,542 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.50 vs. limit=22.5 +2024-08-03 14:34:27,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.08 vs. limit=15.0 +2024-08-03 14:34:34,198 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.779e+01 1.150e+02 1.456e+02 1.792e+02 2.996e+02, threshold=2.912e+02, percent-clipped=1.0 +2024-08-03 14:34:35,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=142208.0, ans=0.125 +2024-08-03 14:34:48,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=142244.66666666666, ans=0.025 +2024-08-03 14:34:50,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=142244.66666666666, ans=0.0 +2024-08-03 14:34:55,295 INFO [train.py:1114] (3/4) Epoch 11, batch 2400, loss[loss=0.205, simple_loss=0.2832, pruned_loss=0.06344, over 13538.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2886, pruned_loss=0.06313, over 2642852.88 frames. ], batch size: 35, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:35:08,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=142318.0, ans=0.0 +2024-08-03 14:35:13,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=142354.66666666666, ans=0.0 +2024-08-03 14:35:15,781 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.20 vs. limit=12.0 +2024-08-03 14:35:18,396 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.29 vs. limit=6.0 +2024-08-03 14:35:22,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=142391.33333333334, ans=0.0 +2024-08-03 14:35:27,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=142391.33333333334, ans=0.025 +2024-08-03 14:35:40,518 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.53 vs. limit=12.0 +2024-08-03 14:35:40,828 INFO [train.py:1114] (3/4) Epoch 11, batch 2450, loss[loss=0.2082, simple_loss=0.2891, pruned_loss=0.06363, over 13367.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2895, pruned_loss=0.06361, over 2633108.60 frames. ], batch size: 37, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:35:40,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=142464.66666666666, ans=0.2 +2024-08-03 14:35:43,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=142464.66666666666, ans=0.125 +2024-08-03 14:35:45,680 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.28 vs. limit=15.0 +2024-08-03 14:36:04,407 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:36:08,680 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.765e+01 1.220e+02 1.473e+02 1.922e+02 3.559e+02, threshold=2.946e+02, percent-clipped=1.0 +2024-08-03 14:36:08,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=142574.66666666666, ans=0.025 +2024-08-03 14:36:25,802 INFO [train.py:1114] (3/4) Epoch 11, batch 2500, loss[loss=0.257, simple_loss=0.3345, pruned_loss=0.08975, over 13400.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2887, pruned_loss=0.06288, over 2636919.72 frames. ], batch size: 39, lr: 1.13e-02, grad_scale: 32.0 +2024-08-03 14:36:27,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=142648.0, ans=0.125 +2024-08-03 14:36:28,967 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.63 vs. limit=10.0 +2024-08-03 14:36:38,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=142684.66666666666, ans=0.125 +2024-08-03 14:36:50,411 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.09 vs. limit=15.0 +2024-08-03 14:37:04,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=142794.66666666666, ans=0.0 +2024-08-03 14:37:09,716 INFO [train.py:1114] (3/4) Epoch 11, batch 2550, loss[loss=0.1694, simple_loss=0.2448, pruned_loss=0.04706, over 13556.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2893, pruned_loss=0.06336, over 2638621.62 frames. ], batch size: 31, lr: 1.12e-02, grad_scale: 32.0 +2024-08-03 14:37:33,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=142904.66666666666, ans=0.0 +2024-08-03 14:37:36,298 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.364e+01 1.185e+02 1.436e+02 1.900e+02 4.163e+02, threshold=2.872e+02, percent-clipped=5.0 +2024-08-03 14:37:40,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142941.33333333334, ans=0.1 +2024-08-03 14:37:54,711 INFO [train.py:1114] (3/4) Epoch 11, batch 2600, loss[loss=0.2264, simple_loss=0.2985, pruned_loss=0.07717, over 13554.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2897, pruned_loss=0.06328, over 2637308.53 frames. ], batch size: 36, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:37:56,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=143014.66666666666, ans=0.2 +2024-08-03 14:37:56,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=143014.66666666666, ans=0.125 +2024-08-03 14:37:58,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=143014.66666666666, ans=0.125 +2024-08-03 14:38:00,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.75 vs. limit=22.5 +2024-08-03 14:38:10,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=143051.33333333334, ans=0.1 +2024-08-03 14:38:11,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=143088.0, ans=0.09899494936611666 +2024-08-03 14:38:38,303 INFO [train.py:1114] (3/4) Epoch 11, batch 2650, loss[loss=0.2292, simple_loss=0.3109, pruned_loss=0.07369, over 13385.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2906, pruned_loss=0.06369, over 2640536.20 frames. ], batch size: 46, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:38:39,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=143198.0, ans=0.0 +2024-08-03 14:38:47,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=143234.66666666666, ans=0.125 +2024-08-03 14:38:51,630 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.37 vs. limit=15.0 +2024-08-03 14:38:52,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=143234.66666666666, ans=0.125 +2024-08-03 14:39:04,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=143271.33333333334, ans=0.125 +2024-08-03 14:39:06,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=143308.0, ans=0.125 +2024-08-03 14:39:07,485 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.944e+01 1.197e+02 1.327e+02 1.649e+02 2.749e+02, threshold=2.654e+02, percent-clipped=0.0 +2024-08-03 14:39:23,391 INFO [train.py:1114] (3/4) Epoch 11, batch 2700, loss[loss=0.1872, simple_loss=0.282, pruned_loss=0.04621, over 13538.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2912, pruned_loss=0.06409, over 2637497.30 frames. ], batch size: 40, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:39:33,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=143418.0, ans=0.125 +2024-08-03 14:39:45,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=143454.66666666666, ans=0.09899494936611666 +2024-08-03 14:39:51,526 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.91 vs. limit=22.5 +2024-08-03 14:39:52,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=143491.33333333334, ans=0.0 +2024-08-03 14:40:01,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=143528.0, ans=0.2 +2024-08-03 14:40:02,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=143528.0, ans=0.0 +2024-08-03 14:40:06,617 INFO [train.py:1114] (3/4) Epoch 11, batch 2750, loss[loss=0.1774, simple_loss=0.2628, pruned_loss=0.04595, over 13330.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.29, pruned_loss=0.06358, over 2636011.92 frames. ], batch size: 34, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:40:09,465 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:40:18,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=143601.33333333334, ans=0.0 +2024-08-03 14:40:18,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=143601.33333333334, ans=0.125 +2024-08-03 14:40:20,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=143601.33333333334, ans=0.125 +2024-08-03 14:40:25,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=143638.0, ans=0.0 +2024-08-03 14:40:34,880 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.875e+01 1.140e+02 1.438e+02 1.760e+02 3.626e+02, threshold=2.877e+02, percent-clipped=2.0 +2024-08-03 14:40:39,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143674.66666666666, ans=0.1 +2024-08-03 14:40:43,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.73 vs. limit=15.0 +2024-08-03 14:40:51,366 INFO [train.py:1114] (3/4) Epoch 11, batch 2800, loss[loss=0.259, simple_loss=0.3253, pruned_loss=0.09635, over 8887.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2896, pruned_loss=0.0635, over 2627541.75 frames. ], batch size: 96, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:40:52,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=143748.0, ans=0.125 +2024-08-03 14:40:56,968 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.96 vs. limit=15.0 +2024-08-03 14:41:01,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=143784.66666666666, ans=0.2 +2024-08-03 14:41:03,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=143784.66666666666, ans=0.0 +2024-08-03 14:41:13,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143821.33333333334, ans=0.1 +2024-08-03 14:41:22,118 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.23 vs. limit=15.0 +2024-08-03 14:41:23,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=143858.0, ans=0.125 +2024-08-03 14:41:27,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=143894.66666666666, ans=0.0 +2024-08-03 14:41:36,421 INFO [train.py:1114] (3/4) Epoch 11, batch 2850, loss[loss=0.1838, simple_loss=0.2659, pruned_loss=0.0509, over 13358.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2906, pruned_loss=0.0644, over 2621535.33 frames. ], batch size: 35, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:41:39,889 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:41:53,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=144004.66666666666, ans=0.125 +2024-08-03 14:42:01,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=144041.33333333334, ans=0.1 +2024-08-03 14:42:04,515 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.280e+01 1.169e+02 1.350e+02 1.770e+02 2.759e+02, threshold=2.700e+02, percent-clipped=0.0 +2024-08-03 14:42:07,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=144041.33333333334, ans=0.125 +2024-08-03 14:42:13,774 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.42 vs. limit=12.0 +2024-08-03 14:42:19,096 INFO [train.py:1114] (3/4) Epoch 11, batch 2900, loss[loss=0.1913, simple_loss=0.2737, pruned_loss=0.05444, over 13375.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2916, pruned_loss=0.06432, over 2632213.78 frames. ], batch size: 36, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:42:20,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=144114.66666666666, ans=0.125 +2024-08-03 14:42:23,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=144114.66666666666, ans=0.125 +2024-08-03 14:42:28,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=144151.33333333334, ans=0.09899494936611666 +2024-08-03 14:42:28,496 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.76 vs. limit=10.0 +2024-08-03 14:42:52,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=144224.66666666666, ans=0.0 +2024-08-03 14:42:52,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=144224.66666666666, ans=0.125 +2024-08-03 14:42:57,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=144261.33333333334, ans=0.125 +2024-08-03 14:43:01,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=144261.33333333334, ans=0.2 +2024-08-03 14:43:02,606 INFO [train.py:1114] (3/4) Epoch 11, batch 2950, loss[loss=0.1977, simple_loss=0.2789, pruned_loss=0.05824, over 13324.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2906, pruned_loss=0.06399, over 2631136.04 frames. ], batch size: 34, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:43:03,071 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.55 vs. limit=15.0 +2024-08-03 14:43:03,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=144298.0, ans=0.125 +2024-08-03 14:43:08,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=144298.0, ans=0.0 +2024-08-03 14:43:31,433 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.788e+01 1.243e+02 1.438e+02 2.009e+02 3.771e+02, threshold=2.877e+02, percent-clipped=8.0 +2024-08-03 14:43:32,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=144408.0, ans=0.0 +2024-08-03 14:43:46,222 INFO [train.py:1114] (3/4) Epoch 11, batch 3000, loss[loss=0.2087, simple_loss=0.2894, pruned_loss=0.06402, over 13545.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2903, pruned_loss=0.06388, over 2631215.09 frames. ], batch size: 37, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:43:46,223 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 14:43:51,606 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.2030, 2.7333, 5.0513, 4.7023], device='cuda:3') +2024-08-03 14:43:56,413 INFO [train.py:1146] (3/4) Epoch 11, validation: loss=0.1797, simple_loss=0.2796, pruned_loss=0.03992, over 944034.00 frames. +2024-08-03 14:43:56,414 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 14:43:59,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=144481.33333333334, ans=0.125 +2024-08-03 14:44:07,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=144518.0, ans=0.2 +2024-08-03 14:44:07,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=144518.0, ans=0.0 +2024-08-03 14:44:13,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=144554.66666666666, ans=0.0 +2024-08-03 14:44:18,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=144554.66666666666, ans=0.125 +2024-08-03 14:44:21,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.31 vs. limit=12.0 +2024-08-03 14:44:30,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=144628.0, ans=0.1 +2024-08-03 14:44:33,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=144628.0, ans=0.125 +2024-08-03 14:44:39,761 INFO [train.py:1114] (3/4) Epoch 11, batch 3050, loss[loss=0.1852, simple_loss=0.272, pruned_loss=0.04924, over 13551.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2908, pruned_loss=0.06398, over 2627316.86 frames. ], batch size: 35, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:45:05,158 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.93 vs. limit=22.5 +2024-08-03 14:45:05,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=144774.66666666666, ans=0.1 +2024-08-03 14:45:08,192 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.491e+01 1.118e+02 1.274e+02 1.524e+02 2.549e+02, threshold=2.548e+02, percent-clipped=0.0 +2024-08-03 14:45:11,130 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.46 vs. limit=15.0 +2024-08-03 14:45:22,752 INFO [train.py:1114] (3/4) Epoch 11, batch 3100, loss[loss=0.2034, simple_loss=0.2979, pruned_loss=0.05447, over 13334.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2904, pruned_loss=0.06373, over 2626699.19 frames. ], batch size: 46, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:45:25,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=144848.0, ans=0.0 +2024-08-03 14:45:28,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=144848.0, ans=0.125 +2024-08-03 14:45:35,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=144884.66666666666, ans=0.07 +2024-08-03 14:45:47,478 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.41 vs. limit=6.0 +2024-08-03 14:46:00,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=144994.66666666666, ans=0.125 +2024-08-03 14:46:04,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=144994.66666666666, ans=0.025 +2024-08-03 14:46:07,335 INFO [train.py:1114] (3/4) Epoch 11, batch 3150, loss[loss=0.2177, simple_loss=0.3007, pruned_loss=0.06733, over 13045.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2899, pruned_loss=0.06343, over 2629107.18 frames. ], batch size: 48, lr: 1.12e-02, grad_scale: 16.0 +2024-08-03 14:46:14,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=145031.33333333334, ans=0.125 +2024-08-03 14:46:17,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=145068.0, ans=0.125 +2024-08-03 14:46:20,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.20 vs. limit=15.0 +2024-08-03 14:46:31,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=145104.66666666666, ans=0.0 +2024-08-03 14:46:32,298 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.35 vs. limit=15.0 +2024-08-03 14:46:32,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=145141.33333333334, ans=0.025 +2024-08-03 14:46:36,203 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.294e+01 1.169e+02 1.363e+02 1.667e+02 3.402e+02, threshold=2.726e+02, percent-clipped=3.0 +2024-08-03 14:46:47,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=145178.0, ans=0.025 +2024-08-03 14:46:49,116 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.87 vs. limit=22.5 +2024-08-03 14:46:51,189 INFO [train.py:1114] (3/4) Epoch 11, batch 3200, loss[loss=0.1923, simple_loss=0.2774, pruned_loss=0.05366, over 13553.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2898, pruned_loss=0.06322, over 2635231.07 frames. ], batch size: 37, lr: 1.12e-02, grad_scale: 32.0 +2024-08-03 14:46:58,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.42 vs. limit=6.0 +2024-08-03 14:47:11,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=145288.0, ans=0.05 +2024-08-03 14:47:33,627 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.59 vs. limit=15.0 +2024-08-03 14:47:35,718 INFO [train.py:1114] (3/4) Epoch 11, batch 3250, loss[loss=0.2207, simple_loss=0.3031, pruned_loss=0.06912, over 13403.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2905, pruned_loss=0.06327, over 2638877.43 frames. ], batch size: 38, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:47:48,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=145434.66666666666, ans=0.125 +2024-08-03 14:47:52,435 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.58 vs. limit=15.0 +2024-08-03 14:47:59,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=145471.33333333334, ans=0.125 +2024-08-03 14:48:02,817 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.43 vs. limit=15.0 +2024-08-03 14:48:04,048 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.409e+01 1.172e+02 1.394e+02 1.962e+02 3.481e+02, threshold=2.788e+02, percent-clipped=6.0 +2024-08-03 14:48:05,013 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 14:48:05,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=145508.0, ans=0.125 +2024-08-03 14:48:09,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=145544.66666666666, ans=0.0 +2024-08-03 14:48:15,916 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.24 vs. limit=15.0 +2024-08-03 14:48:18,819 INFO [train.py:1114] (3/4) Epoch 11, batch 3300, loss[loss=0.2324, simple_loss=0.3186, pruned_loss=0.07306, over 12905.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2889, pruned_loss=0.06265, over 2641045.78 frames. ], batch size: 52, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:48:21,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145581.33333333334, ans=0.1 +2024-08-03 14:48:23,499 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.20 vs. limit=15.0 +2024-08-03 14:48:28,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=145618.0, ans=0.125 +2024-08-03 14:48:31,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=145618.0, ans=0.0 +2024-08-03 14:48:38,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=145654.66666666666, ans=0.2 +2024-08-03 14:49:07,255 INFO [train.py:1114] (3/4) Epoch 11, batch 3350, loss[loss=0.2095, simple_loss=0.2932, pruned_loss=0.0629, over 13028.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2901, pruned_loss=0.06342, over 2630331.91 frames. ], batch size: 48, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:49:17,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=145764.66666666666, ans=0.125 +2024-08-03 14:49:31,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=145801.33333333334, ans=0.2 +2024-08-03 14:49:34,211 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.48 vs. limit=15.0 +2024-08-03 14:49:36,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=145838.0, ans=0.125 +2024-08-03 14:49:48,379 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.672e+01 1.163e+02 1.273e+02 1.475e+02 2.297e+02, threshold=2.547e+02, percent-clipped=0.0 +2024-08-03 14:50:39,074 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.53 vs. limit=10.0 +2024-08-03 14:50:52,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=145911.33333333334, ans=0.0 +2024-08-03 14:50:56,100 INFO [train.py:1114] (3/4) Epoch 11, batch 3400, loss[loss=0.1912, simple_loss=0.2582, pruned_loss=0.0621, over 13560.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2895, pruned_loss=0.06311, over 2626134.88 frames. ], batch size: 31, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:51:09,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145984.66666666666, ans=0.1 +2024-08-03 14:51:41,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=146058.0, ans=0.125 +2024-08-03 14:51:57,624 INFO [train.py:1114] (3/4) Epoch 11, batch 3450, loss[loss=0.2174, simple_loss=0.3066, pruned_loss=0.06408, over 12847.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2895, pruned_loss=0.06287, over 2628635.00 frames. ], batch size: 52, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:51:59,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=146131.33333333334, ans=0.125 +2024-08-03 14:52:01,155 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.12 vs. limit=15.0 +2024-08-03 14:52:07,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=146131.33333333334, ans=0.2 +2024-08-03 14:52:13,007 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.23 vs. limit=22.5 +2024-08-03 14:52:22,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=146204.66666666666, ans=0.2 +2024-08-03 14:52:28,425 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.083e+01 1.190e+02 1.396e+02 1.805e+02 2.896e+02, threshold=2.793e+02, percent-clipped=1.0 +2024-08-03 14:52:44,388 INFO [train.py:1114] (3/4) Epoch 11, batch 3500, loss[loss=0.2107, simple_loss=0.2885, pruned_loss=0.06644, over 13527.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2889, pruned_loss=0.06304, over 2631306.74 frames. ], batch size: 34, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:52:57,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=146351.33333333334, ans=0.2 +2024-08-03 14:54:41,870 INFO [train.py:1114] (3/4) Epoch 11, batch 3550, loss[loss=0.2607, simple_loss=0.3342, pruned_loss=0.09361, over 12568.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2917, pruned_loss=0.06476, over 2629889.72 frames. ], batch size: 58, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:54:57,392 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.95 vs. limit=15.0 +2024-08-03 14:55:03,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=146571.33333333334, ans=0.09899494936611666 +2024-08-03 14:55:11,610 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.699e+01 1.206e+02 1.319e+02 1.556e+02 2.603e+02, threshold=2.638e+02, percent-clipped=0.0 +2024-08-03 14:55:16,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=146608.0, ans=0.125 +2024-08-03 14:55:18,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=146644.66666666666, ans=0.2 +2024-08-03 14:55:25,646 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.49 vs. limit=6.0 +2024-08-03 14:55:28,469 INFO [train.py:1114] (3/4) Epoch 11, batch 3600, loss[loss=0.2861, simple_loss=0.3403, pruned_loss=0.116, over 8916.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2971, pruned_loss=0.06949, over 2485486.78 frames. ], batch size: 96, lr: 1.11e-02, grad_scale: 32.0 +2024-08-03 14:55:32,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=146681.33333333334, ans=0.2 +2024-08-03 14:55:39,642 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.19 vs. limit=22.5 +2024-08-03 14:55:43,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=146718.0, ans=0.125 +2024-08-03 14:55:48,047 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.11 vs. limit=15.0 +2024-08-03 14:55:52,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=146754.66666666666, ans=0.125 +2024-08-03 14:55:55,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=146791.33333333334, ans=0.025 +2024-08-03 14:55:57,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=146791.33333333334, ans=0.1 +2024-08-03 14:56:01,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=146791.33333333334, ans=0.125 +2024-08-03 14:56:58,470 INFO [train.py:1114] (3/4) Epoch 12, batch 0, loss[loss=0.1843, simple_loss=0.264, pruned_loss=0.05231, over 13344.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.264, pruned_loss=0.05231, over 13344.00 frames. ], batch size: 33, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 14:56:58,535 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 14:57:05,804 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.4393, 2.7454, 2.5962, 1.9032], device='cuda:3') +2024-08-03 14:57:09,822 INFO [train.py:1146] (3/4) Epoch 12, validation: loss=0.1815, simple_loss=0.2827, pruned_loss=0.04015, over 944034.00 frames. +2024-08-03 14:57:09,822 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 14:57:11,310 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.30 vs. limit=15.0 +2024-08-03 14:57:16,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146828.0, ans=0.1 +2024-08-03 14:57:29,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=146901.33333333334, ans=0.125 +2024-08-03 14:57:31,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=146901.33333333334, ans=0.125 +2024-08-03 14:57:49,741 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.441e+01 1.252e+02 1.419e+02 1.568e+02 2.905e+02, threshold=2.838e+02, percent-clipped=2.0 +2024-08-03 14:57:55,161 INFO [train.py:1114] (3/4) Epoch 12, batch 50, loss[loss=0.1735, simple_loss=0.2588, pruned_loss=0.04416, over 13429.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2908, pruned_loss=0.06334, over 577617.88 frames. ], batch size: 32, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 14:58:02,023 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.37 vs. limit=22.5 +2024-08-03 14:58:05,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147048.0, ans=0.1 +2024-08-03 14:58:08,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=147048.0, ans=0.125 +2024-08-03 14:58:10,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=147048.0, ans=0.1 +2024-08-03 14:58:12,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=147084.66666666666, ans=0.125 +2024-08-03 14:58:16,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147084.66666666666, ans=0.1 +2024-08-03 14:58:24,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=147121.33333333334, ans=15.0 +2024-08-03 14:58:29,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=147121.33333333334, ans=0.025 +2024-08-03 14:58:40,615 INFO [train.py:1114] (3/4) Epoch 12, batch 100, loss[loss=0.2056, simple_loss=0.2803, pruned_loss=0.06544, over 13532.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2915, pruned_loss=0.06316, over 1025182.83 frames. ], batch size: 35, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 14:58:42,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=147194.66666666666, ans=0.0 +2024-08-03 14:58:44,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147194.66666666666, ans=0.1 +2024-08-03 14:58:54,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=147231.33333333334, ans=0.125 +2024-08-03 14:59:17,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147304.66666666666, ans=0.1 +2024-08-03 14:59:18,322 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.78 vs. limit=15.0 +2024-08-03 14:59:22,269 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.865e+01 1.262e+02 1.549e+02 1.868e+02 3.478e+02, threshold=3.099e+02, percent-clipped=1.0 +2024-08-03 14:59:27,475 INFO [train.py:1114] (3/4) Epoch 12, batch 150, loss[loss=0.1676, simple_loss=0.2474, pruned_loss=0.04394, over 13438.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2887, pruned_loss=0.06232, over 1386224.12 frames. ], batch size: 32, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 14:59:49,760 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.87 vs. limit=12.0 +2024-08-03 14:59:54,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=147451.33333333334, ans=0.125 +2024-08-03 15:00:17,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=147561.33333333334, ans=0.125 +2024-08-03 15:00:18,030 INFO [train.py:1114] (3/4) Epoch 12, batch 200, loss[loss=0.2017, simple_loss=0.2919, pruned_loss=0.05569, over 12442.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2872, pruned_loss=0.06182, over 1665068.71 frames. ], batch size: 58, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:00:20,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=147561.33333333334, ans=0.04949747468305833 +2024-08-03 15:00:25,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147561.33333333334, ans=0.1 +2024-08-03 15:00:43,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=147671.33333333334, ans=0.0 +2024-08-03 15:00:57,968 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.496e+01 1.132e+02 1.278e+02 1.609e+02 2.884e+02, threshold=2.557e+02, percent-clipped=0.0 +2024-08-03 15:00:59,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=147708.0, ans=0.125 +2024-08-03 15:01:00,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=147708.0, ans=0.0 +2024-08-03 15:01:01,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=147708.0, ans=15.0 +2024-08-03 15:01:02,538 INFO [train.py:1114] (3/4) Epoch 12, batch 250, loss[loss=0.2311, simple_loss=0.3186, pruned_loss=0.07175, over 13296.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2869, pruned_loss=0.06135, over 1884265.44 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:01:05,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=147744.66666666666, ans=0.0 +2024-08-03 15:01:12,942 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.47 vs. limit=10.0 +2024-08-03 15:01:17,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=147781.33333333334, ans=0.0 +2024-08-03 15:01:23,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=147818.0, ans=0.2 +2024-08-03 15:01:23,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=147818.0, ans=0.125 +2024-08-03 15:01:28,802 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:01:48,777 INFO [train.py:1114] (3/4) Epoch 12, batch 300, loss[loss=0.2089, simple_loss=0.2952, pruned_loss=0.06124, over 13438.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.286, pruned_loss=0.0604, over 2051340.79 frames. ], batch size: 42, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:02:15,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=148038.0, ans=0.2 +2024-08-03 15:02:19,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148038.0, ans=0.1 +2024-08-03 15:02:20,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=148038.0, ans=0.2 +2024-08-03 15:02:25,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=148074.66666666666, ans=0.0 +2024-08-03 15:02:29,620 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.695e+01 1.130e+02 1.266e+02 1.669e+02 3.180e+02, threshold=2.531e+02, percent-clipped=2.0 +2024-08-03 15:02:34,181 INFO [train.py:1114] (3/4) Epoch 12, batch 350, loss[loss=0.1957, simple_loss=0.2772, pruned_loss=0.05707, over 13591.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2876, pruned_loss=0.06119, over 2182320.29 frames. ], batch size: 33, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:02:51,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=148148.0, ans=0.125 +2024-08-03 15:03:14,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=148258.0, ans=0.2 +2024-08-03 15:03:15,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=148258.0, ans=0.0 +2024-08-03 15:03:23,958 INFO [train.py:1114] (3/4) Epoch 12, batch 400, loss[loss=0.2179, simple_loss=0.3058, pruned_loss=0.065, over 13346.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2866, pruned_loss=0.06059, over 2286301.99 frames. ], batch size: 37, lr: 1.06e-02, grad_scale: 32.0 +2024-08-03 15:03:28,991 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.88 vs. limit=22.5 +2024-08-03 15:04:10,395 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.063e+01 1.155e+02 1.349e+02 1.691e+02 2.771e+02, threshold=2.698e+02, percent-clipped=3.0 +2024-08-03 15:04:30,863 INFO [train.py:1114] (3/4) Epoch 12, batch 450, loss[loss=0.1859, simple_loss=0.2743, pruned_loss=0.04877, over 13557.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2875, pruned_loss=0.06147, over 2360201.48 frames. ], batch size: 38, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:04:33,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=148478.0, ans=0.035 +2024-08-03 15:04:36,875 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.49 vs. limit=22.5 +2024-08-03 15:04:54,681 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:05:16,124 INFO [train.py:1114] (3/4) Epoch 12, batch 500, loss[loss=0.2305, simple_loss=0.3136, pruned_loss=0.07372, over 13425.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2873, pruned_loss=0.06142, over 2425157.42 frames. ], batch size: 43, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:05:20,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=148661.33333333334, ans=0.025 +2024-08-03 15:05:30,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148698.0, ans=0.1 +2024-08-03 15:05:35,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=148734.66666666666, ans=0.125 +2024-08-03 15:05:37,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=148734.66666666666, ans=0.0 +2024-08-03 15:05:40,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=148734.66666666666, ans=0.125 +2024-08-03 15:05:54,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=148808.0, ans=0.0 +2024-08-03 15:05:59,426 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.251e+01 1.174e+02 1.357e+02 1.973e+02 3.338e+02, threshold=2.713e+02, percent-clipped=6.0 +2024-08-03 15:06:02,944 INFO [train.py:1114] (3/4) Epoch 12, batch 550, loss[loss=0.1879, simple_loss=0.2761, pruned_loss=0.04983, over 13009.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2871, pruned_loss=0.0611, over 2466668.41 frames. ], batch size: 48, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:06:15,726 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:06:26,216 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.66 vs. limit=15.0 +2024-08-03 15:06:26,832 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.03 vs. limit=15.0 +2024-08-03 15:06:32,448 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.17 vs. limit=15.0 +2024-08-03 15:06:36,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=148954.66666666666, ans=0.0 +2024-08-03 15:06:50,050 INFO [train.py:1114] (3/4) Epoch 12, batch 600, loss[loss=0.247, simple_loss=0.3249, pruned_loss=0.08449, over 13310.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2868, pruned_loss=0.06089, over 2506878.84 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 16.0 +2024-08-03 15:06:51,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149028.0, ans=0.1 +2024-08-03 15:06:51,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=149028.0, ans=0.09899494936611666 +2024-08-03 15:06:56,965 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.38 vs. limit=10.0 +2024-08-03 15:07:22,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149138.0, ans=0.1 +2024-08-03 15:07:22,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=149138.0, ans=0.0 +2024-08-03 15:07:33,068 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.645e+01 1.231e+02 1.471e+02 1.906e+02 4.499e+02, threshold=2.942e+02, percent-clipped=14.0 +2024-08-03 15:07:33,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149174.66666666666, ans=0.1 +2024-08-03 15:07:36,525 INFO [train.py:1114] (3/4) Epoch 12, batch 650, loss[loss=0.2017, simple_loss=0.2875, pruned_loss=0.05798, over 13549.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2861, pruned_loss=0.06036, over 2542548.99 frames. ], batch size: 37, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:07:43,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=149211.33333333334, ans=0.05 +2024-08-03 15:08:11,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=149321.33333333334, ans=0.0 +2024-08-03 15:08:19,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=149358.0, ans=0.125 +2024-08-03 15:08:21,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149358.0, ans=0.1 +2024-08-03 15:08:24,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=149358.0, ans=0.07 +2024-08-03 15:08:25,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=149394.66666666666, ans=0.0 +2024-08-03 15:08:26,285 INFO [train.py:1114] (3/4) Epoch 12, batch 700, loss[loss=0.1912, simple_loss=0.2762, pruned_loss=0.05314, over 13516.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2874, pruned_loss=0.06101, over 2564746.94 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:08:38,499 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.39 vs. limit=12.0 +2024-08-03 15:09:06,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=149541.33333333334, ans=0.2 +2024-08-03 15:09:07,553 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.082e+01 1.154e+02 1.315e+02 1.690e+02 3.404e+02, threshold=2.630e+02, percent-clipped=2.0 +2024-08-03 15:09:08,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=149541.33333333334, ans=0.07 +2024-08-03 15:09:11,177 INFO [train.py:1114] (3/4) Epoch 12, batch 750, loss[loss=0.2213, simple_loss=0.3124, pruned_loss=0.06506, over 13353.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2859, pruned_loss=0.05997, over 2582116.13 frames. ], batch size: 37, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:09:25,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=149614.66666666666, ans=0.1 +2024-08-03 15:09:25,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=149614.66666666666, ans=0.125 +2024-08-03 15:09:26,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=149614.66666666666, ans=0.0 +2024-08-03 15:09:38,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=149688.0, ans=0.0 +2024-08-03 15:09:43,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=149688.0, ans=0.125 +2024-08-03 15:09:46,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=149724.66666666666, ans=0.125 +2024-08-03 15:09:53,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=149724.66666666666, ans=0.125 +2024-08-03 15:09:56,674 INFO [train.py:1114] (3/4) Epoch 12, batch 800, loss[loss=0.165, simple_loss=0.2492, pruned_loss=0.04042, over 13356.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2856, pruned_loss=0.06007, over 2597005.49 frames. ], batch size: 33, lr: 1.05e-02, grad_scale: 32.0 +2024-08-03 15:10:04,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=149761.33333333334, ans=0.125 +2024-08-03 15:10:06,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=149798.0, ans=0.125 +2024-08-03 15:10:06,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=149798.0, ans=0.125 +2024-08-03 15:10:11,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=149798.0, ans=0.2 +2024-08-03 15:10:18,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=149834.66666666666, ans=0.125 +2024-08-03 15:10:23,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=149834.66666666666, ans=0.125 +2024-08-03 15:10:41,752 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.822e+01 1.144e+02 1.285e+02 1.607e+02 2.448e+02, threshold=2.570e+02, percent-clipped=0.0 +2024-08-03 15:10:43,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=149908.0, ans=0.025 +2024-08-03 15:10:45,392 INFO [train.py:1114] (3/4) Epoch 12, batch 850, loss[loss=0.2064, simple_loss=0.3024, pruned_loss=0.05522, over 13334.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2853, pruned_loss=0.06003, over 2609435.31 frames. ], batch size: 40, lr: 1.05e-02, grad_scale: 32.0 +2024-08-03 15:11:02,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=150018.0, ans=0.2 +2024-08-03 15:11:13,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=150054.66666666666, ans=0.125 +2024-08-03 15:11:27,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=150091.33333333334, ans=0.0 +2024-08-03 15:11:34,312 INFO [train.py:1114] (3/4) Epoch 12, batch 900, loss[loss=0.1905, simple_loss=0.2697, pruned_loss=0.05569, over 13339.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2859, pruned_loss=0.06047, over 2612284.17 frames. ], batch size: 33, lr: 1.05e-02, grad_scale: 32.0 +2024-08-03 15:11:34,601 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=7.298e-01 +2024-08-03 15:11:36,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=150128.0, ans=0.125 +2024-08-03 15:12:00,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=150238.0, ans=0.2 +2024-08-03 15:12:02,889 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.41 vs. limit=15.0 +2024-08-03 15:12:15,740 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.906e+01 1.165e+02 1.316e+02 1.823e+02 3.379e+02, threshold=2.632e+02, percent-clipped=3.0 +2024-08-03 15:12:19,481 INFO [train.py:1114] (3/4) Epoch 12, batch 950, loss[loss=0.1855, simple_loss=0.2671, pruned_loss=0.05201, over 13519.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2859, pruned_loss=0.06055, over 2612089.37 frames. ], batch size: 34, lr: 1.05e-02, grad_scale: 32.0 +2024-08-03 15:12:26,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=150311.33333333334, ans=0.025 +2024-08-03 15:12:28,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150348.0, ans=0.1 +2024-08-03 15:12:40,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=150384.66666666666, ans=0.1 +2024-08-03 15:12:46,819 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.94 vs. limit=15.0 +2024-08-03 15:12:51,362 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.45 vs. limit=6.0 +2024-08-03 15:13:00,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=150458.0, ans=0.125 +2024-08-03 15:13:02,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=150458.0, ans=0.0 +2024-08-03 15:13:04,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150494.66666666666, ans=0.1 +2024-08-03 15:13:05,465 INFO [train.py:1114] (3/4) Epoch 12, batch 1000, loss[loss=0.203, simple_loss=0.2836, pruned_loss=0.06119, over 13364.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2873, pruned_loss=0.06122, over 2610722.99 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:13:16,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=150531.33333333334, ans=0.025 +2024-08-03 15:13:25,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=150568.0, ans=0.0 +2024-08-03 15:13:28,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=150568.0, ans=0.0 +2024-08-03 15:13:36,718 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.08 vs. limit=6.0 +2024-08-03 15:13:38,443 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.05 vs. limit=15.0 +2024-08-03 15:13:39,377 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.50 vs. limit=10.0 +2024-08-03 15:13:47,790 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.724e+01 1.124e+02 1.317e+02 1.509e+02 2.289e+02, threshold=2.634e+02, percent-clipped=0.0 +2024-08-03 15:13:52,465 INFO [train.py:1114] (3/4) Epoch 12, batch 1050, loss[loss=0.1835, simple_loss=0.2776, pruned_loss=0.04471, over 13581.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2869, pruned_loss=0.061, over 2615467.89 frames. ], batch size: 39, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:13:56,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=150678.0, ans=0.0 +2024-08-03 15:14:06,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=150714.66666666666, ans=10.0 +2024-08-03 15:14:18,044 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.32 vs. limit=12.0 +2024-08-03 15:14:20,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150788.0, ans=0.1 +2024-08-03 15:14:31,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=150824.66666666666, ans=0.2 +2024-08-03 15:14:39,483 INFO [train.py:1114] (3/4) Epoch 12, batch 1100, loss[loss=0.201, simple_loss=0.283, pruned_loss=0.05951, over 13557.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2872, pruned_loss=0.06153, over 2619633.54 frames. ], batch size: 36, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:15:26,426 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.356e+01 1.180e+02 1.359e+02 1.591e+02 2.320e+02, threshold=2.719e+02, percent-clipped=0.0 +2024-08-03 15:15:26,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=151008.0, ans=0.125 +2024-08-03 15:15:29,125 INFO [train.py:1114] (3/4) Epoch 12, batch 1150, loss[loss=0.1867, simple_loss=0.2686, pruned_loss=0.05241, over 13549.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.287, pruned_loss=0.06164, over 2618382.81 frames. ], batch size: 36, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:15:45,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=151081.33333333334, ans=0.0 +2024-08-03 15:15:50,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=151118.0, ans=0.125 +2024-08-03 15:15:56,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151154.66666666666, ans=0.1 +2024-08-03 15:16:05,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=151191.33333333334, ans=0.05 +2024-08-03 15:16:15,319 INFO [train.py:1114] (3/4) Epoch 12, batch 1200, loss[loss=0.2248, simple_loss=0.3064, pruned_loss=0.07158, over 13557.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2874, pruned_loss=0.06165, over 2616169.31 frames. ], batch size: 39, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:16:27,488 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.60 vs. limit=22.5 +2024-08-03 15:16:32,007 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.83 vs. limit=15.0 +2024-08-03 15:16:40,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=151301.33333333334, ans=0.125 +2024-08-03 15:16:50,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=151374.66666666666, ans=0.2 +2024-08-03 15:16:58,394 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.940e+01 1.145e+02 1.336e+02 1.664e+02 3.085e+02, threshold=2.672e+02, percent-clipped=3.0 +2024-08-03 15:17:00,269 INFO [train.py:1114] (3/4) Epoch 12, batch 1250, loss[loss=0.2119, simple_loss=0.2961, pruned_loss=0.06387, over 13423.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2874, pruned_loss=0.06146, over 2628254.48 frames. ], batch size: 42, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:17:00,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=151411.33333333334, ans=0.035 +2024-08-03 15:17:12,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=151448.0, ans=0.04949747468305833 +2024-08-03 15:17:18,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=151484.66666666666, ans=0.125 +2024-08-03 15:17:19,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=151484.66666666666, ans=0.125 +2024-08-03 15:17:40,938 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.00 vs. limit=15.0 +2024-08-03 15:17:49,375 INFO [train.py:1114] (3/4) Epoch 12, batch 1300, loss[loss=0.215, simple_loss=0.2974, pruned_loss=0.06626, over 12813.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2867, pruned_loss=0.06121, over 2631263.07 frames. ], batch size: 52, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:17:53,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=151594.66666666666, ans=0.125 +2024-08-03 15:18:00,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=151631.33333333334, ans=0.025 +2024-08-03 15:18:01,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=151631.33333333334, ans=0.125 +2024-08-03 15:18:03,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=151631.33333333334, ans=0.125 +2024-08-03 15:18:08,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=151668.0, ans=0.125 +2024-08-03 15:18:14,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=151668.0, ans=0.2 +2024-08-03 15:18:24,801 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.09 vs. limit=10.0 +2024-08-03 15:18:34,753 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.477e+01 1.244e+02 1.482e+02 1.823e+02 3.057e+02, threshold=2.965e+02, percent-clipped=1.0 +2024-08-03 15:18:36,562 INFO [train.py:1114] (3/4) Epoch 12, batch 1350, loss[loss=0.2243, simple_loss=0.3032, pruned_loss=0.07271, over 13542.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2862, pruned_loss=0.06071, over 2637744.64 frames. ], batch size: 37, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:18:36,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=151778.0, ans=0.04949747468305833 +2024-08-03 15:18:48,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=151814.66666666666, ans=0.025 +2024-08-03 15:18:55,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.83 vs. limit=15.0 +2024-08-03 15:18:58,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=151851.33333333334, ans=0.125 +2024-08-03 15:19:13,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=151888.0, ans=0.125 +2024-08-03 15:19:25,330 INFO [train.py:1114] (3/4) Epoch 12, batch 1400, loss[loss=0.1934, simple_loss=0.2702, pruned_loss=0.05826, over 13255.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.286, pruned_loss=0.06067, over 2641732.64 frames. ], batch size: 31, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:19:25,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151961.33333333334, ans=0.1 +2024-08-03 15:19:30,884 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:19:31,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=151961.33333333334, ans=0.0 +2024-08-03 15:20:09,512 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.408e+01 1.177e+02 1.443e+02 1.884e+02 3.508e+02, threshold=2.887e+02, percent-clipped=1.0 +2024-08-03 15:20:11,338 INFO [train.py:1114] (3/4) Epoch 12, batch 1450, loss[loss=0.2389, simple_loss=0.3234, pruned_loss=0.07721, over 13427.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.287, pruned_loss=0.06114, over 2640287.10 frames. ], batch size: 43, lr: 1.05e-02, grad_scale: 16.0 +2024-08-03 15:20:19,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=152181.33333333334, ans=0.025 +2024-08-03 15:20:24,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=152181.33333333334, ans=0.2 +2024-08-03 15:20:31,903 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.28 vs. limit=15.0 +2024-08-03 15:20:37,491 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.09 vs. limit=15.0 +2024-08-03 15:20:39,060 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.44 vs. limit=12.0 +2024-08-03 15:20:47,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152291.33333333334, ans=0.1 +2024-08-03 15:20:50,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=152291.33333333334, ans=0.125 +2024-08-03 15:20:53,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.35 vs. limit=6.0 +2024-08-03 15:20:56,411 INFO [train.py:1114] (3/4) Epoch 12, batch 1500, loss[loss=0.1903, simple_loss=0.2748, pruned_loss=0.0529, over 13409.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.287, pruned_loss=0.06097, over 2640511.73 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:21:10,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=152364.66666666666, ans=0.125 +2024-08-03 15:21:19,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=152401.33333333334, ans=0.025 +2024-08-03 15:21:24,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=152401.33333333334, ans=0.125 +2024-08-03 15:21:43,773 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.575e+01 1.302e+02 1.536e+02 1.991e+02 2.999e+02, threshold=3.072e+02, percent-clipped=1.0 +2024-08-03 15:21:47,466 INFO [train.py:1114] (3/4) Epoch 12, batch 1550, loss[loss=0.2225, simple_loss=0.304, pruned_loss=0.07049, over 13420.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2873, pruned_loss=0.06131, over 2631434.65 frames. ], batch size: 41, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:22:07,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=152548.0, ans=0.1 +2024-08-03 15:22:12,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152584.66666666666, ans=0.1 +2024-08-03 15:22:14,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=152584.66666666666, ans=0.0 +2024-08-03 15:22:17,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=152621.33333333334, ans=0.2 +2024-08-03 15:22:17,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=152621.33333333334, ans=0.0 +2024-08-03 15:22:26,548 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.18 vs. limit=22.5 +2024-08-03 15:22:32,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152658.0, ans=0.1 +2024-08-03 15:22:33,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152658.0, ans=0.1 +2024-08-03 15:22:38,650 INFO [train.py:1114] (3/4) Epoch 12, batch 1600, loss[loss=0.1972, simple_loss=0.2768, pruned_loss=0.05878, over 13583.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2872, pruned_loss=0.06143, over 2624977.40 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 32.0 +2024-08-03 15:22:44,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=152694.66666666666, ans=0.025 +2024-08-03 15:22:58,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=152768.0, ans=0.2 +2024-08-03 15:23:03,640 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.93 vs. limit=10.0 +2024-08-03 15:23:15,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152841.33333333334, ans=0.1 +2024-08-03 15:23:22,367 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.356e+01 1.191e+02 1.409e+02 1.604e+02 3.528e+02, threshold=2.818e+02, percent-clipped=1.0 +2024-08-03 15:23:24,254 INFO [train.py:1114] (3/4) Epoch 12, batch 1650, loss[loss=0.2045, simple_loss=0.2926, pruned_loss=0.05823, over 13292.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2869, pruned_loss=0.06142, over 2621071.38 frames. ], batch size: 40, lr: 1.04e-02, grad_scale: 32.0 +2024-08-03 15:23:31,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=152878.0, ans=0.2 +2024-08-03 15:23:40,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=152914.66666666666, ans=0.0 +2024-08-03 15:23:56,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=152988.0, ans=0.0 +2024-08-03 15:24:04,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=153024.66666666666, ans=0.125 +2024-08-03 15:24:09,368 INFO [train.py:1114] (3/4) Epoch 12, batch 1700, loss[loss=0.1997, simple_loss=0.2746, pruned_loss=0.06235, over 13253.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2869, pruned_loss=0.06128, over 2630198.88 frames. ], batch size: 31, lr: 1.04e-02, grad_scale: 32.0 +2024-08-03 15:24:14,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=153061.33333333334, ans=0.0 +2024-08-03 15:24:19,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=153098.0, ans=0.125 +2024-08-03 15:24:51,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=153208.0, ans=0.0 +2024-08-03 15:24:57,291 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.203e+01 1.265e+02 1.510e+02 1.884e+02 3.458e+02, threshold=3.019e+02, percent-clipped=4.0 +2024-08-03 15:24:58,242 INFO [train.py:1114] (3/4) Epoch 12, batch 1750, loss[loss=0.182, simple_loss=0.2588, pruned_loss=0.05258, over 13559.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2871, pruned_loss=0.06146, over 2633803.61 frames. ], batch size: 31, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:25:10,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=153281.33333333334, ans=10.0 +2024-08-03 15:25:12,806 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.12 vs. limit=15.0 +2024-08-03 15:25:21,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=153318.0, ans=0.0 +2024-08-03 15:25:32,375 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:25:37,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=153391.33333333334, ans=0.2 +2024-08-03 15:25:45,845 INFO [train.py:1114] (3/4) Epoch 12, batch 1800, loss[loss=0.1996, simple_loss=0.296, pruned_loss=0.05166, over 13554.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.288, pruned_loss=0.06181, over 2635318.32 frames. ], batch size: 38, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:25:50,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=153428.0, ans=0.1 +2024-08-03 15:25:58,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=153464.66666666666, ans=0.0 +2024-08-03 15:25:59,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=153464.66666666666, ans=0.1 +2024-08-03 15:26:14,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=153538.0, ans=0.125 +2024-08-03 15:26:21,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=153538.0, ans=0.125 +2024-08-03 15:26:31,783 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.247e+01 1.215e+02 1.564e+02 1.986e+02 3.414e+02, threshold=3.127e+02, percent-clipped=2.0 +2024-08-03 15:26:32,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=153611.33333333334, ans=0.1 +2024-08-03 15:26:32,684 INFO [train.py:1114] (3/4) Epoch 12, batch 1850, loss[loss=0.1865, simple_loss=0.2811, pruned_loss=0.04594, over 13403.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2877, pruned_loss=0.06151, over 2638386.77 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:26:32,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=153611.33333333334, ans=0.125 +2024-08-03 15:26:33,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=153611.33333333334, ans=0.2 +2024-08-03 15:26:34,088 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.34 vs. limit=15.0 +2024-08-03 15:26:47,437 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:26:48,318 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=2.544e-03 +2024-08-03 15:26:57,491 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.61 vs. limit=22.5 +2024-08-03 15:27:17,698 INFO [train.py:1114] (3/4) Epoch 12, batch 1900, loss[loss=0.2065, simple_loss=0.2997, pruned_loss=0.0567, over 13308.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2885, pruned_loss=0.0617, over 2640484.88 frames. ], batch size: 40, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:27:18,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=153794.66666666666, ans=0.1 +2024-08-03 15:27:20,588 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:27:28,422 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.70 vs. limit=5.0 +2024-08-03 15:27:42,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=153868.0, ans=0.125 +2024-08-03 15:28:00,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=153941.33333333334, ans=0.05 +2024-08-03 15:28:02,468 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.66 vs. limit=15.0 +2024-08-03 15:28:02,827 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.586e+01 1.133e+02 1.329e+02 1.671e+02 4.322e+02, threshold=2.659e+02, percent-clipped=4.0 +2024-08-03 15:28:02,880 INFO [train.py:1114] (3/4) Epoch 12, batch 1950, loss[loss=0.1901, simple_loss=0.2725, pruned_loss=0.05383, over 13561.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2899, pruned_loss=0.06237, over 2647188.89 frames. ], batch size: 36, lr: 1.04e-02, grad_scale: 8.0 +2024-08-03 15:28:08,964 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.65 vs. limit=22.5 +2024-08-03 15:28:15,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=154014.66666666666, ans=0.125 +2024-08-03 15:28:19,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=154014.66666666666, ans=0.2 +2024-08-03 15:28:32,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154051.33333333334, ans=0.1 +2024-08-03 15:28:51,877 INFO [train.py:1114] (3/4) Epoch 12, batch 2000, loss[loss=0.1804, simple_loss=0.2564, pruned_loss=0.05216, over 13547.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2904, pruned_loss=0.06249, over 2637131.34 frames. ], batch size: 31, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:29:07,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154198.0, ans=0.1 +2024-08-03 15:29:08,704 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.01 vs. limit=15.0 +2024-08-03 15:29:40,805 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.317e+01 1.213e+02 1.428e+02 1.743e+02 2.865e+02, threshold=2.857e+02, percent-clipped=1.0 +2024-08-03 15:29:40,842 INFO [train.py:1114] (3/4) Epoch 12, batch 2050, loss[loss=0.1653, simple_loss=0.2425, pruned_loss=0.044, over 13420.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.289, pruned_loss=0.06204, over 2633764.21 frames. ], batch size: 32, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:29:43,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=154344.66666666666, ans=0.125 +2024-08-03 15:29:44,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=154344.66666666666, ans=0.0 +2024-08-03 15:29:50,876 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:29:58,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=154418.0, ans=0.0 +2024-08-03 15:30:00,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=154418.0, ans=0.0 +2024-08-03 15:30:19,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=154491.33333333334, ans=0.125 +2024-08-03 15:30:20,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.60 vs. limit=15.0 +2024-08-03 15:30:25,477 INFO [train.py:1114] (3/4) Epoch 12, batch 2100, loss[loss=0.1981, simple_loss=0.2866, pruned_loss=0.05483, over 13534.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2877, pruned_loss=0.06132, over 2638843.65 frames. ], batch size: 37, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:30:30,532 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.54 vs. limit=15.0 +2024-08-03 15:30:46,666 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.38 vs. limit=22.5 +2024-08-03 15:30:51,211 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=41.41 vs. limit=15.0 +2024-08-03 15:30:58,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=154638.0, ans=0.125 +2024-08-03 15:31:07,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=154674.66666666666, ans=0.0 +2024-08-03 15:31:10,334 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.387e+01 1.104e+02 1.282e+02 1.725e+02 3.211e+02, threshold=2.564e+02, percent-clipped=3.0 +2024-08-03 15:31:10,387 INFO [train.py:1114] (3/4) Epoch 12, batch 2150, loss[loss=0.1892, simple_loss=0.2761, pruned_loss=0.0511, over 13568.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2864, pruned_loss=0.0607, over 2647621.18 frames. ], batch size: 36, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:31:29,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=154784.66666666666, ans=0.0 +2024-08-03 15:31:38,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=154821.33333333334, ans=0.125 +2024-08-03 15:31:42,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=154821.33333333334, ans=0.125 +2024-08-03 15:31:59,005 INFO [train.py:1114] (3/4) Epoch 12, batch 2200, loss[loss=0.204, simple_loss=0.2868, pruned_loss=0.06061, over 13399.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2857, pruned_loss=0.06022, over 2645387.28 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:31:59,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=154894.66666666666, ans=0.025 +2024-08-03 15:31:59,282 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:32:06,745 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.59 vs. limit=15.0 +2024-08-03 15:32:06,776 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.19 vs. limit=15.0 +2024-08-03 15:32:09,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=154931.33333333334, ans=0.0 +2024-08-03 15:32:22,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=154968.0, ans=0.125 +2024-08-03 15:32:26,750 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=15.0 +2024-08-03 15:32:33,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=155004.66666666666, ans=0.125 +2024-08-03 15:32:34,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=155041.33333333334, ans=0.125 +2024-08-03 15:32:35,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155041.33333333334, ans=0.1 +2024-08-03 15:32:46,541 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.416e+01 1.202e+02 1.524e+02 1.986e+02 3.191e+02, threshold=3.048e+02, percent-clipped=7.0 +2024-08-03 15:32:46,579 INFO [train.py:1114] (3/4) Epoch 12, batch 2250, loss[loss=0.1877, simple_loss=0.2822, pruned_loss=0.04659, over 13341.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2854, pruned_loss=0.0602, over 2642495.80 frames. ], batch size: 37, lr: 1.04e-02, grad_scale: 16.0 +2024-08-03 15:32:53,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=155078.0, ans=0.025 +2024-08-03 15:33:10,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=155151.33333333334, ans=0.125 +2024-08-03 15:33:13,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=155151.33333333334, ans=0.125 +2024-08-03 15:33:17,240 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.49 vs. limit=15.0 +2024-08-03 15:33:28,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=155224.66666666666, ans=0.0 +2024-08-03 15:33:34,530 INFO [train.py:1114] (3/4) Epoch 12, batch 2300, loss[loss=0.1822, simple_loss=0.2627, pruned_loss=0.05083, over 13572.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2844, pruned_loss=0.06, over 2638420.53 frames. ], batch size: 33, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:33:38,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=155261.33333333334, ans=10.0 +2024-08-03 15:33:39,501 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.85 vs. limit=15.0 +2024-08-03 15:33:40,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155261.33333333334, ans=0.1 +2024-08-03 15:33:51,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=155298.0, ans=0.125 +2024-08-03 15:34:08,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=155371.33333333334, ans=0.2 +2024-08-03 15:34:19,893 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.892e+01 1.184e+02 1.344e+02 1.744e+02 3.184e+02, threshold=2.689e+02, percent-clipped=1.0 +2024-08-03 15:34:19,930 INFO [train.py:1114] (3/4) Epoch 12, batch 2350, loss[loss=0.1782, simple_loss=0.2659, pruned_loss=0.04521, over 13540.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2844, pruned_loss=0.05987, over 2641391.72 frames. ], batch size: 38, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:34:42,716 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.84 vs. limit=22.5 +2024-08-03 15:35:04,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=155628.0, ans=0.0 +2024-08-03 15:35:05,407 INFO [train.py:1114] (3/4) Epoch 12, batch 2400, loss[loss=0.2093, simple_loss=0.2845, pruned_loss=0.06703, over 13532.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2852, pruned_loss=0.06007, over 2642440.98 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 32.0 +2024-08-03 15:35:09,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=155628.0, ans=0.0 +2024-08-03 15:35:10,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=155628.0, ans=0.07 +2024-08-03 15:35:13,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=155664.66666666666, ans=0.0 +2024-08-03 15:35:24,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=155664.66666666666, ans=0.1 +2024-08-03 15:35:34,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=155701.33333333334, ans=0.125 +2024-08-03 15:35:37,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=155738.0, ans=0.0 +2024-08-03 15:35:40,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=155738.0, ans=0.0 +2024-08-03 15:35:42,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=155738.0, ans=0.125 +2024-08-03 15:35:43,993 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.34 vs. limit=15.0 +2024-08-03 15:35:49,527 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.28 vs. limit=15.0 +2024-08-03 15:35:53,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=155811.33333333334, ans=0.0 +2024-08-03 15:35:54,436 INFO [train.py:1114] (3/4) Epoch 12, batch 2450, loss[loss=0.2273, simple_loss=0.2999, pruned_loss=0.07733, over 13366.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.286, pruned_loss=0.06043, over 2632795.21 frames. ], batch size: 37, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:35:55,289 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.374e+01 1.163e+02 1.350e+02 1.845e+02 2.920e+02, threshold=2.699e+02, percent-clipped=1.0 +2024-08-03 15:35:55,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=155811.33333333334, ans=0.0 +2024-08-03 15:35:56,608 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.19 vs. limit=15.0 +2024-08-03 15:36:01,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=155811.33333333334, ans=0.125 +2024-08-03 15:36:07,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=155848.0, ans=0.0 +2024-08-03 15:36:09,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=155848.0, ans=0.125 +2024-08-03 15:36:09,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=155848.0, ans=0.125 +2024-08-03 15:36:23,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=155921.33333333334, ans=0.125 +2024-08-03 15:36:25,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=155921.33333333334, ans=0.0 +2024-08-03 15:36:25,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=155921.33333333334, ans=0.125 +2024-08-03 15:36:30,623 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.49 vs. limit=22.5 +2024-08-03 15:36:41,778 INFO [train.py:1114] (3/4) Epoch 12, batch 2500, loss[loss=0.1877, simple_loss=0.2791, pruned_loss=0.04814, over 13410.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.286, pruned_loss=0.06019, over 2636177.24 frames. ], batch size: 39, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:36:49,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=156031.33333333334, ans=0.05 +2024-08-03 15:36:56,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156031.33333333334, ans=0.1 +2024-08-03 15:37:16,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.35 vs. limit=15.0 +2024-08-03 15:37:25,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=156141.33333333334, ans=0.125 +2024-08-03 15:37:27,539 INFO [train.py:1114] (3/4) Epoch 12, batch 2550, loss[loss=0.1806, simple_loss=0.2585, pruned_loss=0.05139, over 13550.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2866, pruned_loss=0.0606, over 2637098.96 frames. ], batch size: 31, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:37:28,352 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.903e+01 1.161e+02 1.332e+02 1.717e+02 3.575e+02, threshold=2.664e+02, percent-clipped=6.0 +2024-08-03 15:37:39,561 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.36 vs. limit=22.5 +2024-08-03 15:37:40,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=156214.66666666666, ans=0.0 +2024-08-03 15:37:41,963 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-08-03 15:37:48,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=156251.33333333334, ans=0.0 +2024-08-03 15:38:04,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=156324.66666666666, ans=0.125 +2024-08-03 15:38:07,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=156324.66666666666, ans=0.125 +2024-08-03 15:38:12,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=156361.33333333334, ans=0.0 +2024-08-03 15:38:12,948 INFO [train.py:1114] (3/4) Epoch 12, batch 2600, loss[loss=0.1816, simple_loss=0.2691, pruned_loss=0.04709, over 13558.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2868, pruned_loss=0.06058, over 2636882.72 frames. ], batch size: 36, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:38:16,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=156361.33333333334, ans=0.125 +2024-08-03 15:38:26,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=156398.0, ans=0.0 +2024-08-03 15:38:38,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=156471.33333333334, ans=0.125 +2024-08-03 15:38:44,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=156471.33333333334, ans=0.1 +2024-08-03 15:38:49,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=156508.0, ans=0.125 +2024-08-03 15:38:49,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=156508.0, ans=0.125 +2024-08-03 15:38:55,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.03 vs. limit=22.5 +2024-08-03 15:38:56,424 INFO [train.py:1114] (3/4) Epoch 12, batch 2650, loss[loss=0.24, simple_loss=0.3187, pruned_loss=0.08068, over 13310.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2869, pruned_loss=0.0606, over 2639520.25 frames. ], batch size: 46, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:38:57,261 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.952e+01 1.176e+02 1.444e+02 1.768e+02 3.309e+02, threshold=2.888e+02, percent-clipped=8.0 +2024-08-03 15:39:03,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=156544.66666666666, ans=0.125 +2024-08-03 15:39:07,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=156581.33333333334, ans=0.125 +2024-08-03 15:39:08,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=156581.33333333334, ans=0.95 +2024-08-03 15:39:15,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=156618.0, ans=0.0 +2024-08-03 15:39:36,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=156691.33333333334, ans=0.125 +2024-08-03 15:39:40,111 INFO [train.py:1114] (3/4) Epoch 12, batch 2700, loss[loss=0.2363, simple_loss=0.3138, pruned_loss=0.07942, over 13533.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2874, pruned_loss=0.06104, over 2636887.25 frames. ], batch size: 40, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:39:45,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=156728.0, ans=0.2 +2024-08-03 15:39:48,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=156764.66666666666, ans=0.0 +2024-08-03 15:40:08,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=156838.0, ans=0.125 +2024-08-03 15:40:12,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=156838.0, ans=0.2 +2024-08-03 15:40:15,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=156874.66666666666, ans=0.0 +2024-08-03 15:40:15,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.34 vs. limit=15.0 +2024-08-03 15:40:19,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=156874.66666666666, ans=0.125 +2024-08-03 15:40:20,020 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.57 vs. limit=22.5 +2024-08-03 15:40:23,014 INFO [train.py:1114] (3/4) Epoch 12, batch 2750, loss[loss=0.2082, simple_loss=0.2846, pruned_loss=0.06591, over 13335.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.286, pruned_loss=0.06063, over 2635165.85 frames. ], batch size: 34, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:40:23,801 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.506e+01 1.130e+02 1.309e+02 1.569e+02 2.980e+02, threshold=2.619e+02, percent-clipped=1.0 +2024-08-03 15:40:27,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=156911.33333333334, ans=0.2 +2024-08-03 15:40:39,179 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.30 vs. limit=15.0 +2024-08-03 15:40:40,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=156984.66666666666, ans=0.125 +2024-08-03 15:41:05,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=157094.66666666666, ans=0.0 +2024-08-03 15:41:06,520 INFO [train.py:1114] (3/4) Epoch 12, batch 2800, loss[loss=0.2741, simple_loss=0.3342, pruned_loss=0.107, over 9076.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2868, pruned_loss=0.06109, over 2626292.63 frames. ], batch size: 97, lr: 1.03e-02, grad_scale: 32.0 +2024-08-03 15:41:31,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=157204.66666666666, ans=0.125 +2024-08-03 15:41:42,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157241.33333333334, ans=0.1 +2024-08-03 15:41:49,748 INFO [train.py:1114] (3/4) Epoch 12, batch 2850, loss[loss=0.1985, simple_loss=0.2884, pruned_loss=0.05428, over 13360.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2873, pruned_loss=0.06143, over 2620422.23 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:41:51,425 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.668e+01 1.162e+02 1.250e+02 1.511e+02 3.589e+02, threshold=2.501e+02, percent-clipped=1.0 +2024-08-03 15:42:07,032 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.91 vs. limit=15.0 +2024-08-03 15:42:15,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157351.33333333334, ans=0.1 +2024-08-03 15:42:35,665 INFO [train.py:1114] (3/4) Epoch 12, batch 2900, loss[loss=0.2077, simple_loss=0.2886, pruned_loss=0.06342, over 13369.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2887, pruned_loss=0.06192, over 2631372.92 frames. ], batch size: 36, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:42:39,652 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.43 vs. limit=12.0 +2024-08-03 15:42:54,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=157534.66666666666, ans=0.2 +2024-08-03 15:43:18,732 INFO [train.py:1114] (3/4) Epoch 12, batch 2950, loss[loss=0.1929, simple_loss=0.2807, pruned_loss=0.05261, over 13348.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2871, pruned_loss=0.06127, over 2630370.38 frames. ], batch size: 34, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:43:20,458 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.484e+01 1.179e+02 1.423e+02 1.782e+02 2.994e+02, threshold=2.847e+02, percent-clipped=4.0 +2024-08-03 15:43:35,686 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.19 vs. limit=15.0 +2024-08-03 15:43:42,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157718.0, ans=0.1 +2024-08-03 15:43:45,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=157754.66666666666, ans=0.025 +2024-08-03 15:44:01,735 INFO [train.py:1114] (3/4) Epoch 12, batch 3000, loss[loss=0.1768, simple_loss=0.2636, pruned_loss=0.045, over 13540.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2863, pruned_loss=0.06089, over 2630586.39 frames. ], batch size: 37, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:44:01,735 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 15:44:07,507 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([1.4289, 2.2102, 1.7349, 1.9889, 1.9252, 2.2247, 0.9242, 1.6503], + device='cuda:3') +2024-08-03 15:44:11,714 INFO [train.py:1146] (3/4) Epoch 12, validation: loss=0.178, simple_loss=0.2775, pruned_loss=0.03924, over 944034.00 frames. +2024-08-03 15:44:11,715 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 15:44:31,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=157901.33333333334, ans=0.125 +2024-08-03 15:44:45,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=157974.66666666666, ans=0.0 +2024-08-03 15:44:55,167 INFO [train.py:1114] (3/4) Epoch 12, batch 3050, loss[loss=0.1937, simple_loss=0.2778, pruned_loss=0.05475, over 13532.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2871, pruned_loss=0.06118, over 2626850.38 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:44:56,893 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.316e+01 1.167e+02 1.304e+02 1.686e+02 2.790e+02, threshold=2.608e+02, percent-clipped=0.0 +2024-08-03 15:45:03,443 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.16 vs. limit=15.0 +2024-08-03 15:45:17,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=158084.66666666666, ans=0.1 +2024-08-03 15:45:19,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=158084.66666666666, ans=0.0 +2024-08-03 15:45:32,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=158158.0, ans=0.2 +2024-08-03 15:45:34,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=158158.0, ans=0.2 +2024-08-03 15:45:39,540 INFO [train.py:1114] (3/4) Epoch 12, batch 3100, loss[loss=0.1926, simple_loss=0.2786, pruned_loss=0.05333, over 13313.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.287, pruned_loss=0.06112, over 2627476.83 frames. ], batch size: 46, lr: 1.03e-02, grad_scale: 16.0 +2024-08-03 15:45:49,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=158231.33333333334, ans=0.125 +2024-08-03 15:46:22,131 INFO [train.py:1114] (3/4) Epoch 12, batch 3150, loss[loss=0.2034, simple_loss=0.2852, pruned_loss=0.06077, over 13009.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2866, pruned_loss=0.06059, over 2629499.45 frames. ], batch size: 48, lr: 1.02e-02, grad_scale: 16.0 +2024-08-03 15:46:23,778 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.679e+01 1.169e+02 1.445e+02 1.962e+02 3.331e+02, threshold=2.890e+02, percent-clipped=6.0 +2024-08-03 15:46:25,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=158378.0, ans=0.125 +2024-08-03 15:46:31,420 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.26 vs. limit=12.0 +2024-08-03 15:46:33,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=158414.66666666666, ans=0.125 +2024-08-03 15:46:56,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=158524.66666666666, ans=0.1 +2024-08-03 15:46:58,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=158524.66666666666, ans=0.0 +2024-08-03 15:47:01,210 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.35 vs. limit=15.0 +2024-08-03 15:47:01,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=158524.66666666666, ans=0.125 +2024-08-03 15:47:05,159 INFO [train.py:1114] (3/4) Epoch 12, batch 3200, loss[loss=0.1727, simple_loss=0.2569, pruned_loss=0.04423, over 13548.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2857, pruned_loss=0.06034, over 2635485.84 frames. ], batch size: 37, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:47:05,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=158561.33333333334, ans=0.125 +2024-08-03 15:47:07,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=158561.33333333334, ans=0.125 +2024-08-03 15:47:11,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=158561.33333333334, ans=0.125 +2024-08-03 15:47:21,659 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.44 vs. limit=15.0 +2024-08-03 15:47:26,767 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.02 vs. limit=15.0 +2024-08-03 15:47:46,229 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:47:48,552 INFO [train.py:1114] (3/4) Epoch 12, batch 3250, loss[loss=0.2297, simple_loss=0.3123, pruned_loss=0.07353, over 13392.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2866, pruned_loss=0.06039, over 2639396.64 frames. ], batch size: 38, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:47:50,197 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.188e+01 1.142e+02 1.340e+02 1.709e+02 3.212e+02, threshold=2.679e+02, percent-clipped=3.0 +2024-08-03 15:48:02,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.03 vs. limit=22.5 +2024-08-03 15:48:15,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=158854.66666666666, ans=0.125 +2024-08-03 15:48:22,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=158854.66666666666, ans=0.0 +2024-08-03 15:48:32,141 INFO [train.py:1114] (3/4) Epoch 12, batch 3300, loss[loss=0.2107, simple_loss=0.2935, pruned_loss=0.06391, over 12860.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.286, pruned_loss=0.06052, over 2640729.53 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:48:35,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=158928.0, ans=0.1 +2024-08-03 15:48:54,137 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.75 vs. limit=15.0 +2024-08-03 15:49:04,433 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.57 vs. limit=22.5 +2024-08-03 15:49:15,287 INFO [train.py:1114] (3/4) Epoch 12, batch 3350, loss[loss=0.2267, simple_loss=0.3065, pruned_loss=0.07344, over 13048.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2873, pruned_loss=0.06129, over 2629559.84 frames. ], batch size: 48, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:49:17,007 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.306e+01 1.184e+02 1.362e+02 1.748e+02 2.695e+02, threshold=2.725e+02, percent-clipped=2.0 +2024-08-03 15:49:43,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=159221.33333333334, ans=0.125 +2024-08-03 15:49:49,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=159258.0, ans=0.125 +2024-08-03 15:49:50,819 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.52 vs. limit=15.0 +2024-08-03 15:49:59,051 INFO [train.py:1114] (3/4) Epoch 12, batch 3400, loss[loss=0.1606, simple_loss=0.2425, pruned_loss=0.03933, over 13532.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2868, pruned_loss=0.06109, over 2625052.85 frames. ], batch size: 31, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:50:00,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=159294.66666666666, ans=0.0 +2024-08-03 15:50:13,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=159331.33333333334, ans=0.09899494936611666 +2024-08-03 15:50:23,417 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.72 vs. limit=6.0 +2024-08-03 15:50:24,129 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.29 vs. limit=15.0 +2024-08-03 15:50:25,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=159404.66666666666, ans=0.025 +2024-08-03 15:50:37,589 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.31 vs. limit=15.0 +2024-08-03 15:50:42,894 INFO [train.py:1114] (3/4) Epoch 12, batch 3450, loss[loss=0.2303, simple_loss=0.309, pruned_loss=0.07578, over 12923.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2868, pruned_loss=0.06081, over 2629071.06 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:50:44,516 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.325e+01 1.219e+02 1.408e+02 1.757e+02 3.423e+02, threshold=2.817e+02, percent-clipped=3.0 +2024-08-03 15:51:03,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=159551.33333333334, ans=0.0 +2024-08-03 15:51:16,860 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.71 vs. limit=15.0 +2024-08-03 15:51:24,781 INFO [train.py:1114] (3/4) Epoch 12, batch 3500, loss[loss=0.1817, simple_loss=0.2567, pruned_loss=0.0534, over 13550.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2864, pruned_loss=0.06103, over 2630789.51 frames. ], batch size: 34, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:51:29,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=159661.33333333334, ans=0.125 +2024-08-03 15:51:41,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=159734.66666666666, ans=0.125 +2024-08-03 15:51:50,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159771.33333333334, ans=0.1 +2024-08-03 15:52:01,868 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:52:07,854 INFO [train.py:1114] (3/4) Epoch 12, batch 3550, loss[loss=0.2332, simple_loss=0.307, pruned_loss=0.07964, over 12463.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2883, pruned_loss=0.06208, over 2628722.79 frames. ], batch size: 58, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:52:07,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=159844.66666666666, ans=0.015 +2024-08-03 15:52:09,501 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.339e+01 1.127e+02 1.244e+02 1.565e+02 2.847e+02, threshold=2.489e+02, percent-clipped=1.0 +2024-08-03 15:52:14,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=159844.66666666666, ans=0.04949747468305833 +2024-08-03 15:52:15,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=159881.33333333334, ans=0.035 +2024-08-03 15:52:34,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=159954.66666666666, ans=0.07 +2024-08-03 15:52:38,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=159954.66666666666, ans=0.09899494936611666 +2024-08-03 15:52:44,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159991.33333333334, ans=0.1 +2024-08-03 15:52:48,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=159991.33333333334, ans=0.0 +2024-08-03 15:52:51,314 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=5.68 vs. limit=12.0 +2024-08-03 15:52:53,376 INFO [train.py:1114] (3/4) Epoch 12, batch 3600, loss[loss=0.2573, simple_loss=0.3171, pruned_loss=0.09871, over 9195.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.293, pruned_loss=0.06666, over 2484339.91 frames. ], batch size: 96, lr: 1.02e-02, grad_scale: 32.0 +2024-08-03 15:52:59,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=160028.0, ans=0.125 +2024-08-03 15:53:06,953 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.76 vs. limit=22.5 +2024-08-03 15:53:07,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=160064.66666666666, ans=0.0 +2024-08-03 15:53:22,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=160138.0, ans=0.0 +2024-08-03 15:53:22,975 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:54:13,945 INFO [train.py:1114] (3/4) Epoch 13, batch 0, loss[loss=0.1837, simple_loss=0.2623, pruned_loss=0.05253, over 13346.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2623, pruned_loss=0.05253, over 13346.00 frames. ], batch size: 33, lr: 9.79e-03, grad_scale: 32.0 +2024-08-03 15:54:13,946 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 15:54:23,997 INFO [train.py:1146] (3/4) Epoch 13, validation: loss=0.179, simple_loss=0.2806, pruned_loss=0.03875, over 944034.00 frames. +2024-08-03 15:54:23,998 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 15:54:25,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=160174.66666666666, ans=0.0 +2024-08-03 15:54:30,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=160174.66666666666, ans=0.125 +2024-08-03 15:54:37,804 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.239e+02 1.394e+02 1.533e+02 2.538e+02, threshold=2.789e+02, percent-clipped=1.0 +2024-08-03 15:54:48,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=160248.0, ans=0.2 +2024-08-03 15:54:48,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=160248.0, ans=0.0 +2024-08-03 15:54:57,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=160284.66666666666, ans=0.0 +2024-08-03 15:55:00,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=160284.66666666666, ans=0.035 +2024-08-03 15:55:00,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=160284.66666666666, ans=0.0 +2024-08-03 15:55:11,490 INFO [train.py:1114] (3/4) Epoch 13, batch 50, loss[loss=0.1667, simple_loss=0.2564, pruned_loss=0.0385, over 13421.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2857, pruned_loss=0.05981, over 576634.38 frames. ], batch size: 32, lr: 9.79e-03, grad_scale: 32.0 +2024-08-03 15:55:32,300 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.11 vs. limit=22.5 +2024-08-03 15:55:39,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=160468.0, ans=0.035 +2024-08-03 15:55:53,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=160504.66666666666, ans=0.025 +2024-08-03 15:55:58,990 INFO [train.py:1114] (3/4) Epoch 13, batch 100, loss[loss=0.1871, simple_loss=0.2732, pruned_loss=0.05055, over 13528.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2877, pruned_loss=0.06014, over 1023623.14 frames. ], batch size: 35, lr: 9.78e-03, grad_scale: 32.0 +2024-08-03 15:56:01,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=160541.33333333334, ans=0.07 +2024-08-03 15:56:05,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=160541.33333333334, ans=0.0 +2024-08-03 15:56:10,677 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.344e+01 1.098e+02 1.241e+02 1.448e+02 3.539e+02, threshold=2.482e+02, percent-clipped=1.0 +2024-08-03 15:56:12,723 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 15:56:14,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=160578.0, ans=0.125 +2024-08-03 15:56:30,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=160651.33333333334, ans=0.125 +2024-08-03 15:56:31,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=160651.33333333334, ans=0.125 +2024-08-03 15:56:38,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=160688.0, ans=0.05 +2024-08-03 15:56:40,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=160688.0, ans=0.0 +2024-08-03 15:56:44,363 INFO [train.py:1114] (3/4) Epoch 13, batch 150, loss[loss=0.1719, simple_loss=0.2499, pruned_loss=0.04698, over 13426.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2842, pruned_loss=0.05896, over 1385088.98 frames. ], batch size: 32, lr: 9.78e-03, grad_scale: 32.0 +2024-08-03 15:56:44,725 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.90 vs. limit=22.5 +2024-08-03 15:56:47,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=160724.66666666666, ans=0.0 +2024-08-03 15:56:49,585 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.35 vs. limit=6.0 +2024-08-03 15:56:50,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=160724.66666666666, ans=0.125 +2024-08-03 15:56:54,711 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.97 vs. limit=15.0 +2024-08-03 15:57:05,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=160798.0, ans=0.0 +2024-08-03 15:57:14,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=160834.66666666666, ans=0.0 +2024-08-03 15:57:14,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=160834.66666666666, ans=0.125 +2024-08-03 15:57:23,798 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.97 vs. limit=22.5 +2024-08-03 15:57:33,252 INFO [train.py:1114] (3/4) Epoch 13, batch 200, loss[loss=0.2335, simple_loss=0.3104, pruned_loss=0.07825, over 12352.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2847, pruned_loss=0.05941, over 1663834.99 frames. ], batch size: 58, lr: 9.77e-03, grad_scale: 16.0 +2024-08-03 15:57:37,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=160908.0, ans=0.125 +2024-08-03 15:57:45,686 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.389e+01 1.180e+02 1.439e+02 1.786e+02 2.514e+02, threshold=2.877e+02, percent-clipped=2.0 +2024-08-03 15:57:48,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=160944.66666666666, ans=0.025 +2024-08-03 15:57:49,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=160944.66666666666, ans=0.125 +2024-08-03 15:57:54,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=160981.33333333334, ans=0.0 +2024-08-03 15:58:01,369 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.69 vs. limit=15.0 +2024-08-03 15:58:01,383 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.49 vs. limit=15.0 +2024-08-03 15:58:17,416 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.49 vs. limit=15.0 +2024-08-03 15:58:18,138 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.34 vs. limit=12.0 +2024-08-03 15:58:18,249 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.34 vs. limit=6.0 +2024-08-03 15:58:20,397 INFO [train.py:1114] (3/4) Epoch 13, batch 250, loss[loss=0.2305, simple_loss=0.3184, pruned_loss=0.07131, over 13328.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2841, pruned_loss=0.05902, over 1883121.32 frames. ], batch size: 46, lr: 9.77e-03, grad_scale: 16.0 +2024-08-03 15:58:33,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=161128.0, ans=0.125 +2024-08-03 15:58:46,037 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.35 vs. limit=15.0 +2024-08-03 15:58:46,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=161164.66666666666, ans=15.0 +2024-08-03 15:58:47,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=161201.33333333334, ans=0.125 +2024-08-03 15:58:56,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=161238.0, ans=0.0 +2024-08-03 15:59:00,304 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.21 vs. limit=15.0 +2024-08-03 15:59:02,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=161238.0, ans=0.0 +2024-08-03 15:59:05,441 INFO [train.py:1114] (3/4) Epoch 13, batch 300, loss[loss=0.2065, simple_loss=0.2944, pruned_loss=0.05929, over 13422.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2831, pruned_loss=0.05835, over 2050108.76 frames. ], batch size: 42, lr: 9.76e-03, grad_scale: 16.0 +2024-08-03 15:59:09,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=161274.66666666666, ans=0.125 +2024-08-03 15:59:18,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=161311.33333333334, ans=0.2 +2024-08-03 15:59:20,162 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.241e+01 1.100e+02 1.297e+02 1.682e+02 2.744e+02, threshold=2.594e+02, percent-clipped=0.0 +2024-08-03 15:59:35,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=161348.0, ans=0.2 +2024-08-03 16:00:00,503 INFO [train.py:1114] (3/4) Epoch 13, batch 350, loss[loss=0.1796, simple_loss=0.2569, pruned_loss=0.05109, over 13584.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2831, pruned_loss=0.05808, over 2181210.85 frames. ], batch size: 33, lr: 9.76e-03, grad_scale: 16.0 +2024-08-03 16:00:05,844 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.64 vs. limit=15.0 +2024-08-03 16:00:12,674 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:00:19,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=161531.33333333334, ans=0.09899494936611666 +2024-08-03 16:00:33,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=161568.0, ans=0.2 +2024-08-03 16:00:35,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=161604.66666666666, ans=0.035 +2024-08-03 16:00:45,704 INFO [train.py:1114] (3/4) Epoch 13, batch 400, loss[loss=0.2018, simple_loss=0.2863, pruned_loss=0.05865, over 13370.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2831, pruned_loss=0.05822, over 2285505.40 frames. ], batch size: 37, lr: 9.75e-03, grad_scale: 32.0 +2024-08-03 16:00:45,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=161641.33333333334, ans=0.125 +2024-08-03 16:00:49,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161641.33333333334, ans=0.1 +2024-08-03 16:00:51,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=161641.33333333334, ans=0.09899494936611666 +2024-08-03 16:00:54,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161678.0, ans=0.1 +2024-08-03 16:00:54,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=161678.0, ans=0.0 +2024-08-03 16:00:56,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=161678.0, ans=0.125 +2024-08-03 16:01:00,504 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.412e+01 1.099e+02 1.258e+02 1.504e+02 2.448e+02, threshold=2.516e+02, percent-clipped=0.0 +2024-08-03 16:01:05,091 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:01:18,172 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.35 vs. limit=15.0 +2024-08-03 16:01:21,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=161751.33333333334, ans=0.07 +2024-08-03 16:01:34,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=161788.0, ans=0.0 +2024-08-03 16:01:39,129 INFO [train.py:1114] (3/4) Epoch 13, batch 450, loss[loss=0.198, simple_loss=0.2888, pruned_loss=0.05356, over 13557.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2833, pruned_loss=0.05804, over 2359241.71 frames. ], batch size: 38, lr: 9.75e-03, grad_scale: 32.0 +2024-08-03 16:01:39,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=161824.66666666666, ans=0.0 +2024-08-03 16:02:28,143 INFO [train.py:1114] (3/4) Epoch 13, batch 500, loss[loss=0.2112, simple_loss=0.295, pruned_loss=0.06372, over 13415.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2824, pruned_loss=0.05783, over 2425096.79 frames. ], batch size: 43, lr: 9.74e-03, grad_scale: 16.0 +2024-08-03 16:02:34,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=162008.0, ans=0.125 +2024-08-03 16:02:44,987 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.845e+01 1.131e+02 1.351e+02 1.618e+02 2.590e+02, threshold=2.702e+02, percent-clipped=1.0 +2024-08-03 16:02:53,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=162081.33333333334, ans=0.0 +2024-08-03 16:03:11,262 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.18 vs. limit=12.0 +2024-08-03 16:03:13,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=162154.66666666666, ans=0.025 +2024-08-03 16:03:18,773 INFO [train.py:1114] (3/4) Epoch 13, batch 550, loss[loss=0.2181, simple_loss=0.2971, pruned_loss=0.06954, over 13332.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2826, pruned_loss=0.0582, over 2468193.73 frames. ], batch size: 49, lr: 9.74e-03, grad_scale: 16.0 +2024-08-03 16:03:37,528 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.13 vs. limit=22.5 +2024-08-03 16:03:48,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=162301.33333333334, ans=0.125 +2024-08-03 16:04:04,094 INFO [train.py:1114] (3/4) Epoch 13, batch 600, loss[loss=0.2426, simple_loss=0.3293, pruned_loss=0.07794, over 13288.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2838, pruned_loss=0.05898, over 2507708.44 frames. ], batch size: 46, lr: 9.73e-03, grad_scale: 16.0 +2024-08-03 16:04:04,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=162374.66666666666, ans=0.025 +2024-08-03 16:04:17,374 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.493e+01 1.180e+02 1.372e+02 1.849e+02 3.441e+02, threshold=2.744e+02, percent-clipped=2.0 +2024-08-03 16:04:26,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=162448.0, ans=0.025 +2024-08-03 16:04:29,093 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:04:32,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=162484.66666666666, ans=0.125 +2024-08-03 16:04:37,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=162484.66666666666, ans=0.2 +2024-08-03 16:04:38,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162484.66666666666, ans=0.1 +2024-08-03 16:04:51,064 INFO [train.py:1114] (3/4) Epoch 13, batch 650, loss[loss=0.2142, simple_loss=0.2958, pruned_loss=0.06632, over 13542.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2831, pruned_loss=0.05847, over 2543108.61 frames. ], batch size: 37, lr: 9.72e-03, grad_scale: 16.0 +2024-08-03 16:05:07,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=162594.66666666666, ans=0.2 +2024-08-03 16:05:13,929 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.27 vs. limit=12.0 +2024-08-03 16:05:19,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=162668.0, ans=0.125 +2024-08-03 16:05:25,745 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.47 vs. limit=22.5 +2024-08-03 16:05:32,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=162704.66666666666, ans=0.0 +2024-08-03 16:05:37,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=162704.66666666666, ans=0.125 +2024-08-03 16:05:39,758 INFO [train.py:1114] (3/4) Epoch 13, batch 700, loss[loss=0.1772, simple_loss=0.2621, pruned_loss=0.04608, over 13526.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2834, pruned_loss=0.05844, over 2566013.10 frames. ], batch size: 35, lr: 9.72e-03, grad_scale: 16.0 +2024-08-03 16:05:52,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=162778.0, ans=0.125 +2024-08-03 16:05:53,431 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.882e+01 1.139e+02 1.377e+02 1.797e+02 3.206e+02, threshold=2.754e+02, percent-clipped=4.0 +2024-08-03 16:05:55,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=162778.0, ans=0.125 +2024-08-03 16:05:55,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=162778.0, ans=0.125 +2024-08-03 16:05:57,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=162778.0, ans=0.125 +2024-08-03 16:05:58,819 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.56 vs. limit=15.0 +2024-08-03 16:06:16,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=162851.33333333334, ans=0.025 +2024-08-03 16:06:28,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162888.0, ans=0.1 +2024-08-03 16:06:31,546 INFO [train.py:1114] (3/4) Epoch 13, batch 750, loss[loss=0.2171, simple_loss=0.2986, pruned_loss=0.06785, over 13360.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2831, pruned_loss=0.05837, over 2583772.37 frames. ], batch size: 37, lr: 9.71e-03, grad_scale: 16.0 +2024-08-03 16:06:53,798 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.16 vs. limit=15.0 +2024-08-03 16:07:34,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=163034.66666666666, ans=0.125 +2024-08-03 16:07:35,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=163034.66666666666, ans=0.2 +2024-08-03 16:07:40,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=163034.66666666666, ans=0.0 +2024-08-03 16:07:46,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=163071.33333333334, ans=0.125 +2024-08-03 16:07:46,749 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.94 vs. limit=10.0 +2024-08-03 16:07:47,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=163071.33333333334, ans=0.125 +2024-08-03 16:07:50,970 INFO [train.py:1114] (3/4) Epoch 13, batch 800, loss[loss=0.1639, simple_loss=0.2409, pruned_loss=0.04349, over 13335.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2829, pruned_loss=0.05801, over 2597862.84 frames. ], batch size: 33, lr: 9.71e-03, grad_scale: 32.0 +2024-08-03 16:07:54,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=163108.0, ans=0.2 +2024-08-03 16:07:58,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163108.0, ans=0.1 +2024-08-03 16:08:04,632 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.131e+01 1.126e+02 1.310e+02 1.667e+02 3.702e+02, threshold=2.620e+02, percent-clipped=3.0 +2024-08-03 16:08:29,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=163254.66666666666, ans=0.0 +2024-08-03 16:08:30,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=163254.66666666666, ans=0.07 +2024-08-03 16:08:35,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=163291.33333333334, ans=0.125 +2024-08-03 16:08:36,394 INFO [train.py:1114] (3/4) Epoch 13, batch 850, loss[loss=0.2283, simple_loss=0.3163, pruned_loss=0.07018, over 13341.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2831, pruned_loss=0.05841, over 2610501.97 frames. ], batch size: 40, lr: 9.70e-03, grad_scale: 32.0 +2024-08-03 16:08:49,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=163328.0, ans=0.025 +2024-08-03 16:09:25,549 INFO [train.py:1114] (3/4) Epoch 13, batch 900, loss[loss=0.1838, simple_loss=0.2633, pruned_loss=0.05215, over 13352.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2831, pruned_loss=0.05858, over 2613575.69 frames. ], batch size: 33, lr: 9.70e-03, grad_scale: 32.0 +2024-08-03 16:09:38,997 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.048e+01 1.164e+02 1.408e+02 1.726e+02 2.750e+02, threshold=2.816e+02, percent-clipped=1.0 +2024-08-03 16:09:41,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=163511.33333333334, ans=0.125 +2024-08-03 16:09:42,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=163511.33333333334, ans=0.0 +2024-08-03 16:09:54,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=163584.66666666666, ans=0.125 +2024-08-03 16:09:54,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=163584.66666666666, ans=0.025 +2024-08-03 16:10:00,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=163584.66666666666, ans=0.125 +2024-08-03 16:10:05,349 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=5.70 vs. limit=15.0 +2024-08-03 16:10:09,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=163621.33333333334, ans=15.0 +2024-08-03 16:10:12,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=163658.0, ans=0.125 +2024-08-03 16:10:12,887 INFO [train.py:1114] (3/4) Epoch 13, batch 950, loss[loss=0.1614, simple_loss=0.2446, pruned_loss=0.03909, over 13526.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2832, pruned_loss=0.0586, over 2613752.54 frames. ], batch size: 34, lr: 9.69e-03, grad_scale: 32.0 +2024-08-03 16:10:27,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=163694.66666666666, ans=0.125 +2024-08-03 16:10:29,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=163694.66666666666, ans=0.1 +2024-08-03 16:10:52,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=163804.66666666666, ans=0.125 +2024-08-03 16:11:00,368 INFO [train.py:1114] (3/4) Epoch 13, batch 1000, loss[loss=0.191, simple_loss=0.2716, pruned_loss=0.05518, over 13365.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2841, pruned_loss=0.05912, over 2612419.69 frames. ], batch size: 35, lr: 9.69e-03, grad_scale: 32.0 +2024-08-03 16:11:00,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=163841.33333333334, ans=0.125 +2024-08-03 16:11:02,522 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:11:14,073 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.003e+01 1.149e+02 1.296e+02 1.618e+02 2.591e+02, threshold=2.593e+02, percent-clipped=0.0 +2024-08-03 16:11:30,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=163951.33333333334, ans=0.0 +2024-08-03 16:11:34,403 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.37 vs. limit=10.0 +2024-08-03 16:11:34,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=163951.33333333334, ans=0.125 +2024-08-03 16:11:35,331 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.12 vs. limit=22.5 +2024-08-03 16:11:45,697 INFO [train.py:1114] (3/4) Epoch 13, batch 1050, loss[loss=0.1908, simple_loss=0.28, pruned_loss=0.05084, over 13572.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2833, pruned_loss=0.05882, over 2617360.07 frames. ], batch size: 39, lr: 9.68e-03, grad_scale: 32.0 +2024-08-03 16:11:48,035 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.26 vs. limit=6.0 +2024-08-03 16:12:04,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=164098.0, ans=0.0 +2024-08-03 16:12:18,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164134.66666666666, ans=0.1 +2024-08-03 16:12:32,464 INFO [train.py:1114] (3/4) Epoch 13, batch 1100, loss[loss=0.1977, simple_loss=0.2845, pruned_loss=0.0555, over 13577.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2827, pruned_loss=0.05832, over 2621738.68 frames. ], batch size: 36, lr: 9.68e-03, grad_scale: 32.0 +2024-08-03 16:12:45,863 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.230e+01 1.162e+02 1.483e+02 1.783e+02 2.652e+02, threshold=2.966e+02, percent-clipped=1.0 +2024-08-03 16:12:46,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=164244.66666666666, ans=0.125 +2024-08-03 16:13:18,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=164354.66666666666, ans=0.125 +2024-08-03 16:13:19,698 INFO [train.py:1114] (3/4) Epoch 13, batch 1150, loss[loss=0.2022, simple_loss=0.2767, pruned_loss=0.06388, over 13563.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2832, pruned_loss=0.05859, over 2620036.86 frames. ], batch size: 36, lr: 9.67e-03, grad_scale: 32.0 +2024-08-03 16:13:37,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=164428.0, ans=0.125 +2024-08-03 16:13:39,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=164464.66666666666, ans=0.0 +2024-08-03 16:13:41,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=164464.66666666666, ans=0.0 +2024-08-03 16:13:49,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=164501.33333333334, ans=0.125 +2024-08-03 16:14:05,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=164538.0, ans=0.09899494936611666 +2024-08-03 16:14:07,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=164574.66666666666, ans=0.125 +2024-08-03 16:14:07,671 INFO [train.py:1114] (3/4) Epoch 13, batch 1200, loss[loss=0.1863, simple_loss=0.2758, pruned_loss=0.04844, over 13560.00 frames. ], tot_loss[loss=0.201, simple_loss=0.284, pruned_loss=0.05895, over 2617791.37 frames. ], batch size: 39, lr: 9.67e-03, grad_scale: 32.0 +2024-08-03 16:14:17,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=164611.33333333334, ans=0.0 +2024-08-03 16:14:19,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=164611.33333333334, ans=0.125 +2024-08-03 16:14:21,157 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.721e+01 1.160e+02 1.448e+02 1.730e+02 2.788e+02, threshold=2.895e+02, percent-clipped=0.0 +2024-08-03 16:14:36,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=164684.66666666666, ans=0.125 +2024-08-03 16:14:44,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=164721.33333333334, ans=0.125 +2024-08-03 16:14:49,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=164721.33333333334, ans=0.0 +2024-08-03 16:14:54,289 INFO [train.py:1114] (3/4) Epoch 13, batch 1250, loss[loss=0.2192, simple_loss=0.2943, pruned_loss=0.07204, over 13449.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2847, pruned_loss=0.05923, over 2629434.98 frames. ], batch size: 42, lr: 9.66e-03, grad_scale: 32.0 +2024-08-03 16:15:07,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=164794.66666666666, ans=0.125 +2024-08-03 16:15:16,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=164831.33333333334, ans=0.025 +2024-08-03 16:15:28,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=164868.0, ans=0.125 +2024-08-03 16:15:39,442 INFO [train.py:1114] (3/4) Epoch 13, batch 1300, loss[loss=0.2204, simple_loss=0.3013, pruned_loss=0.06974, over 12907.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2838, pruned_loss=0.05873, over 2632224.12 frames. ], batch size: 52, lr: 9.66e-03, grad_scale: 32.0 +2024-08-03 16:15:44,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=164941.33333333334, ans=15.0 +2024-08-03 16:15:51,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=164978.0, ans=0.0 +2024-08-03 16:15:52,785 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.108e+01 1.112e+02 1.319e+02 1.683e+02 3.006e+02, threshold=2.638e+02, percent-clipped=1.0 +2024-08-03 16:15:53,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=164978.0, ans=0.2 +2024-08-03 16:16:27,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff3.min_abs, batch_count=165124.66666666666, ans=0.2 +2024-08-03 16:16:27,970 INFO [train.py:1114] (3/4) Epoch 13, batch 1350, loss[loss=0.1734, simple_loss=0.2578, pruned_loss=0.04448, over 13535.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2826, pruned_loss=0.05823, over 2639667.78 frames. ], batch size: 37, lr: 9.65e-03, grad_scale: 32.0 +2024-08-03 16:16:34,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=165124.66666666666, ans=0.0 +2024-08-03 16:16:35,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=165124.66666666666, ans=0.2 +2024-08-03 16:16:44,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=165161.33333333334, ans=0.0 +2024-08-03 16:16:54,859 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.02 vs. limit=15.0 +2024-08-03 16:17:15,449 INFO [train.py:1114] (3/4) Epoch 13, batch 1400, loss[loss=0.1793, simple_loss=0.2501, pruned_loss=0.05425, over 13247.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2824, pruned_loss=0.0581, over 2643318.61 frames. ], batch size: 31, lr: 9.65e-03, grad_scale: 16.0 +2024-08-03 16:17:29,723 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.085e+01 1.131e+02 1.241e+02 1.412e+02 2.386e+02, threshold=2.482e+02, percent-clipped=0.0 +2024-08-03 16:17:31,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=165344.66666666666, ans=0.2 +2024-08-03 16:17:45,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=165418.0, ans=0.125 +2024-08-03 16:17:49,970 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.75 vs. limit=10.0 +2024-08-03 16:17:56,294 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.12 vs. limit=15.0 +2024-08-03 16:18:01,117 INFO [train.py:1114] (3/4) Epoch 13, batch 1450, loss[loss=0.1896, simple_loss=0.2829, pruned_loss=0.04817, over 13443.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2827, pruned_loss=0.0581, over 2643713.70 frames. ], batch size: 43, lr: 9.64e-03, grad_scale: 16.0 +2024-08-03 16:18:11,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=165528.0, ans=0.125 +2024-08-03 16:18:12,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=165528.0, ans=0.0 +2024-08-03 16:18:19,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=165528.0, ans=0.04949747468305833 +2024-08-03 16:18:22,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=165564.66666666666, ans=0.95 +2024-08-03 16:18:32,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=165601.33333333334, ans=0.125 +2024-08-03 16:18:32,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=165601.33333333334, ans=0.2 +2024-08-03 16:18:42,264 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.80 vs. limit=15.0 +2024-08-03 16:18:44,520 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:18:47,875 INFO [train.py:1114] (3/4) Epoch 13, batch 1500, loss[loss=0.2475, simple_loss=0.3221, pruned_loss=0.0864, over 13424.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2836, pruned_loss=0.0585, over 2643034.89 frames. ], batch size: 39, lr: 9.64e-03, grad_scale: 16.0 +2024-08-03 16:19:02,696 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.982e+01 1.158e+02 1.427e+02 1.728e+02 2.727e+02, threshold=2.854e+02, percent-clipped=3.0 +2024-08-03 16:19:11,540 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.67 vs. limit=22.5 +2024-08-03 16:19:12,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=165748.0, ans=0.125 +2024-08-03 16:19:13,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.47 vs. limit=15.0 +2024-08-03 16:19:29,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=165821.33333333334, ans=0.025 +2024-08-03 16:19:35,424 INFO [train.py:1114] (3/4) Epoch 13, batch 1550, loss[loss=0.2049, simple_loss=0.2958, pruned_loss=0.05703, over 13378.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2838, pruned_loss=0.05851, over 2632516.09 frames. ], batch size: 41, lr: 9.63e-03, grad_scale: 16.0 +2024-08-03 16:19:47,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=165894.66666666666, ans=0.125 +2024-08-03 16:19:49,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=165894.66666666666, ans=0.125 +2024-08-03 16:20:18,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=166004.66666666666, ans=0.07 +2024-08-03 16:20:22,602 INFO [train.py:1114] (3/4) Epoch 13, batch 1600, loss[loss=0.1926, simple_loss=0.2864, pruned_loss=0.04944, over 13581.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2833, pruned_loss=0.05852, over 2625442.24 frames. ], batch size: 39, lr: 9.63e-03, grad_scale: 32.0 +2024-08-03 16:20:51,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=166041.33333333334, ans=0.125 +2024-08-03 16:20:59,197 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.495e+01 1.166e+02 1.333e+02 1.673e+02 3.385e+02, threshold=2.665e+02, percent-clipped=4.0 +2024-08-03 16:21:21,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=166188.0, ans=0.125 +2024-08-03 16:21:21,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=166188.0, ans=0.125 +2024-08-03 16:21:23,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=166188.0, ans=0.035 +2024-08-03 16:21:30,120 INFO [train.py:1114] (3/4) Epoch 13, batch 1650, loss[loss=0.1892, simple_loss=0.2843, pruned_loss=0.04704, over 13306.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.283, pruned_loss=0.05859, over 2621797.88 frames. ], batch size: 40, lr: 9.62e-03, grad_scale: 32.0 +2024-08-03 16:21:35,169 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.23 vs. limit=22.5 +2024-08-03 16:21:35,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=166224.66666666666, ans=0.125 +2024-08-03 16:22:02,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=166334.66666666666, ans=0.025 +2024-08-03 16:22:17,152 INFO [train.py:1114] (3/4) Epoch 13, batch 1700, loss[loss=0.1702, simple_loss=0.2475, pruned_loss=0.0464, over 13262.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2822, pruned_loss=0.05775, over 2630928.08 frames. ], batch size: 31, lr: 9.61e-03, grad_scale: 16.0 +2024-08-03 16:22:17,728 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.67 vs. limit=10.0 +2024-08-03 16:22:27,591 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.92 vs. limit=15.0 +2024-08-03 16:22:30,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=166444.66666666666, ans=0.0 +2024-08-03 16:22:32,767 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.948e+01 1.164e+02 1.401e+02 1.757e+02 2.684e+02, threshold=2.802e+02, percent-clipped=1.0 +2024-08-03 16:22:33,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=166444.66666666666, ans=0.125 +2024-08-03 16:22:43,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=166481.33333333334, ans=0.0 +2024-08-03 16:22:45,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=166518.0, ans=0.1 +2024-08-03 16:22:45,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=166518.0, ans=0.025 +2024-08-03 16:22:49,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=166518.0, ans=0.125 +2024-08-03 16:22:52,837 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.88 vs. limit=15.0 +2024-08-03 16:23:03,320 INFO [train.py:1114] (3/4) Epoch 13, batch 1750, loss[loss=0.197, simple_loss=0.2719, pruned_loss=0.06103, over 13520.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2819, pruned_loss=0.05793, over 2634186.68 frames. ], batch size: 31, lr: 9.61e-03, grad_scale: 16.0 +2024-08-03 16:23:13,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=166628.0, ans=0.125 +2024-08-03 16:23:17,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=166628.0, ans=0.1 +2024-08-03 16:23:29,498 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.22 vs. limit=15.0 +2024-08-03 16:23:40,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=166738.0, ans=0.125 +2024-08-03 16:23:45,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=166738.0, ans=0.125 +2024-08-03 16:23:46,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=166738.0, ans=0.125 +2024-08-03 16:23:50,886 INFO [train.py:1114] (3/4) Epoch 13, batch 1800, loss[loss=0.1871, simple_loss=0.2702, pruned_loss=0.05199, over 13560.00 frames. ], tot_loss[loss=0.199, simple_loss=0.282, pruned_loss=0.05801, over 2634799.66 frames. ], batch size: 38, lr: 9.60e-03, grad_scale: 16.0 +2024-08-03 16:23:55,939 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.14 vs. limit=12.0 +2024-08-03 16:24:08,451 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.884e+01 1.178e+02 1.346e+02 1.574e+02 2.406e+02, threshold=2.692e+02, percent-clipped=0.0 +2024-08-03 16:24:12,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=166848.0, ans=0.0 +2024-08-03 16:24:16,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=166848.0, ans=0.0 +2024-08-03 16:24:27,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=166884.66666666666, ans=0.2 +2024-08-03 16:24:40,317 INFO [train.py:1114] (3/4) Epoch 13, batch 1850, loss[loss=0.2307, simple_loss=0.3071, pruned_loss=0.07718, over 13385.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2818, pruned_loss=0.05799, over 2636727.48 frames. ], batch size: 39, lr: 9.60e-03, grad_scale: 16.0 +2024-08-03 16:24:47,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=166958.0, ans=0.1 +2024-08-03 16:24:56,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.23 vs. limit=15.0 +2024-08-03 16:25:15,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=167068.0, ans=0.125 +2024-08-03 16:25:17,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=167104.66666666666, ans=22.5 +2024-08-03 16:25:26,609 INFO [train.py:1114] (3/4) Epoch 13, batch 1900, loss[loss=0.2499, simple_loss=0.331, pruned_loss=0.08434, over 13329.00 frames. ], tot_loss[loss=0.199, simple_loss=0.282, pruned_loss=0.05803, over 2640121.78 frames. ], batch size: 40, lr: 9.59e-03, grad_scale: 16.0 +2024-08-03 16:25:44,172 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.192e+01 1.122e+02 1.325e+02 1.918e+02 3.257e+02, threshold=2.651e+02, percent-clipped=9.0 +2024-08-03 16:25:53,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=167214.66666666666, ans=0.125 +2024-08-03 16:25:57,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=167251.33333333334, ans=0.0 +2024-08-03 16:26:12,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=167288.0, ans=0.0 +2024-08-03 16:26:14,633 INFO [train.py:1114] (3/4) Epoch 13, batch 1950, loss[loss=0.2013, simple_loss=0.2826, pruned_loss=0.05996, over 13562.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2834, pruned_loss=0.05815, over 2646707.17 frames. ], batch size: 36, lr: 9.59e-03, grad_scale: 16.0 +2024-08-03 16:26:24,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=167361.33333333334, ans=0.0 +2024-08-03 16:26:30,290 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.19 vs. limit=15.0 +2024-08-03 16:26:31,234 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.21 vs. limit=10.0 +2024-08-03 16:26:33,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167398.0, ans=0.1 +2024-08-03 16:26:33,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=167398.0, ans=0.125 +2024-08-03 16:26:37,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=167398.0, ans=0.125 +2024-08-03 16:26:57,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=167471.33333333334, ans=0.125 +2024-08-03 16:26:57,163 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:27:01,470 INFO [train.py:1114] (3/4) Epoch 13, batch 2000, loss[loss=0.181, simple_loss=0.2534, pruned_loss=0.05431, over 13556.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2841, pruned_loss=0.05822, over 2636212.42 frames. ], batch size: 31, lr: 9.58e-03, grad_scale: 32.0 +2024-08-03 16:27:17,126 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.733e+01 1.160e+02 1.429e+02 1.703e+02 2.821e+02, threshold=2.859e+02, percent-clipped=2.0 +2024-08-03 16:27:23,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=167581.33333333334, ans=0.125 +2024-08-03 16:27:26,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=167581.33333333334, ans=0.125 +2024-08-03 16:27:36,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=167618.0, ans=0.025 +2024-08-03 16:27:38,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=167618.0, ans=0.125 +2024-08-03 16:27:43,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=167654.66666666666, ans=0.0 +2024-08-03 16:27:49,178 INFO [train.py:1114] (3/4) Epoch 13, batch 2050, loss[loss=0.2073, simple_loss=0.2816, pruned_loss=0.0665, over 13430.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2836, pruned_loss=0.05842, over 2633597.85 frames. ], batch size: 32, lr: 9.58e-03, grad_scale: 32.0 +2024-08-03 16:27:51,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=167691.33333333334, ans=0.125 +2024-08-03 16:27:58,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=167691.33333333334, ans=0.0 +2024-08-03 16:28:02,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=167728.0, ans=0.2 +2024-08-03 16:28:03,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=167728.0, ans=0.125 +2024-08-03 16:28:04,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=167728.0, ans=0.0 +2024-08-03 16:28:05,248 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.74 vs. limit=15.0 +2024-08-03 16:28:17,786 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.81 vs. limit=15.0 +2024-08-03 16:28:26,946 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.33 vs. limit=6.0 +2024-08-03 16:28:36,268 INFO [train.py:1114] (3/4) Epoch 13, batch 2100, loss[loss=0.1945, simple_loss=0.282, pruned_loss=0.05346, over 13528.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2833, pruned_loss=0.05809, over 2639342.88 frames. ], batch size: 37, lr: 9.57e-03, grad_scale: 32.0 +2024-08-03 16:28:45,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=167911.33333333334, ans=0.125 +2024-08-03 16:28:51,610 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.366e+01 1.102e+02 1.273e+02 1.593e+02 3.536e+02, threshold=2.546e+02, percent-clipped=4.0 +2024-08-03 16:29:00,210 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.07 vs. limit=22.5 +2024-08-03 16:29:13,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=168021.33333333334, ans=0.125 +2024-08-03 16:29:13,660 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.32 vs. limit=15.0 +2024-08-03 16:29:16,214 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:29:20,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=168021.33333333334, ans=0.025 +2024-08-03 16:29:20,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=168021.33333333334, ans=0.125 +2024-08-03 16:29:23,258 INFO [train.py:1114] (3/4) Epoch 13, batch 2150, loss[loss=0.1842, simple_loss=0.27, pruned_loss=0.04922, over 13571.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2823, pruned_loss=0.05758, over 2647944.07 frames. ], batch size: 36, lr: 9.57e-03, grad_scale: 32.0 +2024-08-03 16:29:27,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=168058.0, ans=0.1 +2024-08-03 16:29:30,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=168058.0, ans=0.0 +2024-08-03 16:29:36,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=168094.66666666666, ans=0.125 +2024-08-03 16:29:37,087 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.84 vs. limit=15.0 +2024-08-03 16:29:38,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=168094.66666666666, ans=0.0 +2024-08-03 16:29:40,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=168131.33333333334, ans=0.1 +2024-08-03 16:29:41,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=168131.33333333334, ans=0.125 +2024-08-03 16:29:41,834 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.77 vs. limit=15.0 +2024-08-03 16:29:46,218 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.33 vs. limit=10.0 +2024-08-03 16:29:47,105 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.05 vs. limit=15.0 +2024-08-03 16:30:08,431 INFO [train.py:1114] (3/4) Epoch 13, batch 2200, loss[loss=0.1762, simple_loss=0.2733, pruned_loss=0.03955, over 13401.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.282, pruned_loss=0.05763, over 2646096.69 frames. ], batch size: 39, lr: 9.56e-03, grad_scale: 32.0 +2024-08-03 16:30:19,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=168278.0, ans=15.0 +2024-08-03 16:30:23,845 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.382e+01 1.307e+02 1.724e+02 2.157e+02 3.326e+02, threshold=3.447e+02, percent-clipped=16.0 +2024-08-03 16:30:26,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168314.66666666666, ans=0.1 +2024-08-03 16:30:30,635 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.13 vs. limit=22.5 +2024-08-03 16:30:42,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=168351.33333333334, ans=0.04949747468305833 +2024-08-03 16:30:55,465 INFO [train.py:1114] (3/4) Epoch 13, batch 2250, loss[loss=0.1824, simple_loss=0.2791, pruned_loss=0.04284, over 13342.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2817, pruned_loss=0.05734, over 2644031.95 frames. ], batch size: 37, lr: 9.56e-03, grad_scale: 16.0 +2024-08-03 16:30:55,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=168424.66666666666, ans=0.125 +2024-08-03 16:31:07,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=168461.33333333334, ans=0.125 +2024-08-03 16:31:23,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=168534.66666666666, ans=0.125 +2024-08-03 16:31:24,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168534.66666666666, ans=0.1 +2024-08-03 16:31:28,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=168534.66666666666, ans=0.0 +2024-08-03 16:31:38,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=168571.33333333334, ans=0.0 +2024-08-03 16:31:41,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=168571.33333333334, ans=0.0 +2024-08-03 16:31:44,161 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.94 vs. limit=10.0 +2024-08-03 16:31:44,479 INFO [train.py:1114] (3/4) Epoch 13, batch 2300, loss[loss=0.1806, simple_loss=0.2519, pruned_loss=0.05463, over 13585.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2811, pruned_loss=0.05752, over 2639655.81 frames. ], batch size: 33, lr: 9.55e-03, grad_scale: 16.0 +2024-08-03 16:31:47,601 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.79 vs. limit=12.0 +2024-08-03 16:32:01,026 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.657e+01 1.154e+02 1.335e+02 1.728e+02 3.672e+02, threshold=2.670e+02, percent-clipped=1.0 +2024-08-03 16:32:11,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=168718.0, ans=0.125 +2024-08-03 16:32:22,301 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.31 vs. limit=15.0 +2024-08-03 16:32:29,791 INFO [train.py:1114] (3/4) Epoch 13, batch 2350, loss[loss=0.2005, simple_loss=0.2783, pruned_loss=0.06136, over 13559.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2814, pruned_loss=0.05732, over 2642181.70 frames. ], batch size: 38, lr: 9.55e-03, grad_scale: 16.0 +2024-08-03 16:32:56,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=168864.66666666666, ans=0.125 +2024-08-03 16:33:01,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=168901.33333333334, ans=0.125 +2024-08-03 16:33:03,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=168901.33333333334, ans=0.0 +2024-08-03 16:33:03,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168901.33333333334, ans=0.1 +2024-08-03 16:33:17,355 INFO [train.py:1114] (3/4) Epoch 13, batch 2400, loss[loss=0.209, simple_loss=0.2984, pruned_loss=0.05979, over 13545.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2827, pruned_loss=0.05771, over 2643370.29 frames. ], batch size: 35, lr: 9.54e-03, grad_scale: 32.0 +2024-08-03 16:33:20,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=168974.66666666666, ans=0.1 +2024-08-03 16:33:26,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=169011.33333333334, ans=0.2 +2024-08-03 16:33:33,795 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.921e+01 1.143e+02 1.305e+02 1.687e+02 2.768e+02, threshold=2.610e+02, percent-clipped=2.0 +2024-08-03 16:34:03,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=169158.0, ans=0.125 +2024-08-03 16:34:04,475 INFO [train.py:1114] (3/4) Epoch 13, batch 2450, loss[loss=0.1849, simple_loss=0.2803, pruned_loss=0.04473, over 13353.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2841, pruned_loss=0.05839, over 2633594.42 frames. ], batch size: 37, lr: 9.54e-03, grad_scale: 32.0 +2024-08-03 16:34:05,940 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.89 vs. limit=10.0 +2024-08-03 16:34:16,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=169194.66666666666, ans=0.125 +2024-08-03 16:34:28,181 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:34:34,757 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.527e-03 +2024-08-03 16:34:41,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=169304.66666666666, ans=0.2 +2024-08-03 16:34:43,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=169304.66666666666, ans=0.0 +2024-08-03 16:34:50,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=169304.66666666666, ans=15.0 +2024-08-03 16:34:51,558 INFO [train.py:1114] (3/4) Epoch 13, batch 2500, loss[loss=0.2019, simple_loss=0.2921, pruned_loss=0.05588, over 13386.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2839, pruned_loss=0.05811, over 2637707.38 frames. ], batch size: 39, lr: 9.53e-03, grad_scale: 32.0 +2024-08-03 16:35:06,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=169378.0, ans=0.125 +2024-08-03 16:35:08,288 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.080e+01 1.139e+02 1.387e+02 1.623e+02 2.338e+02, threshold=2.774e+02, percent-clipped=0.0 +2024-08-03 16:35:08,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=169414.66666666666, ans=0.125 +2024-08-03 16:35:27,499 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.98 vs. limit=15.0 +2024-08-03 16:35:30,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=169488.0, ans=0.0 +2024-08-03 16:35:38,362 INFO [train.py:1114] (3/4) Epoch 13, batch 2550, loss[loss=0.1995, simple_loss=0.2696, pruned_loss=0.06464, over 13499.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2838, pruned_loss=0.05818, over 2639379.33 frames. ], batch size: 31, lr: 9.53e-03, grad_scale: 16.0 +2024-08-03 16:35:47,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=169561.33333333334, ans=0.0 +2024-08-03 16:35:49,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=169561.33333333334, ans=0.0 +2024-08-03 16:35:50,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=169561.33333333334, ans=0.1 +2024-08-03 16:36:06,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=169634.66666666666, ans=0.0 +2024-08-03 16:36:12,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=169671.33333333334, ans=0.0 +2024-08-03 16:36:21,539 INFO [train.py:1114] (3/4) Epoch 13, batch 2600, loss[loss=0.1807, simple_loss=0.2618, pruned_loss=0.04978, over 13565.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2844, pruned_loss=0.05857, over 2638375.50 frames. ], batch size: 36, lr: 9.52e-03, grad_scale: 16.0 +2024-08-03 16:36:39,618 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.885e+01 1.136e+02 1.295e+02 1.531e+02 3.554e+02, threshold=2.589e+02, percent-clipped=4.0 +2024-08-03 16:36:42,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=169781.33333333334, ans=0.1 +2024-08-03 16:37:04,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=169854.66666666666, ans=0.125 +2024-08-03 16:37:06,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=169891.33333333334, ans=0.0 +2024-08-03 16:37:06,850 INFO [train.py:1114] (3/4) Epoch 13, batch 2650, loss[loss=0.2091, simple_loss=0.2927, pruned_loss=0.06277, over 13344.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2847, pruned_loss=0.05862, over 2641339.39 frames. ], batch size: 46, lr: 9.52e-03, grad_scale: 16.0 +2024-08-03 16:37:10,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=169891.33333333334, ans=0.125 +2024-08-03 16:37:18,974 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.67 vs. limit=22.5 +2024-08-03 16:37:22,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=169928.0, ans=0.125 +2024-08-03 16:37:27,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=169964.66666666666, ans=0.025 +2024-08-03 16:37:34,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=170001.33333333334, ans=0.125 +2024-08-03 16:37:49,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=170038.0, ans=0.2 +2024-08-03 16:37:50,564 INFO [train.py:1114] (3/4) Epoch 13, batch 2700, loss[loss=0.2351, simple_loss=0.3184, pruned_loss=0.07585, over 13552.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2853, pruned_loss=0.05904, over 2638725.77 frames. ], batch size: 40, lr: 9.51e-03, grad_scale: 16.0 +2024-08-03 16:38:05,949 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.79 vs. limit=15.0 +2024-08-03 16:38:07,177 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.611e+01 1.174e+02 1.343e+02 1.652e+02 2.925e+02, threshold=2.686e+02, percent-clipped=2.0 +2024-08-03 16:38:18,062 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:38:18,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=170184.66666666666, ans=0.025 +2024-08-03 16:38:27,891 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:38:36,372 INFO [train.py:1114] (3/4) Epoch 13, batch 2750, loss[loss=0.1947, simple_loss=0.274, pruned_loss=0.05769, over 13341.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2838, pruned_loss=0.0586, over 2635686.68 frames. ], batch size: 34, lr: 9.51e-03, grad_scale: 16.0 +2024-08-03 16:38:38,770 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.18 vs. limit=10.0 +2024-08-03 16:38:51,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170294.66666666666, ans=0.1 +2024-08-03 16:38:54,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=170331.33333333334, ans=0.2 +2024-08-03 16:38:54,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170331.33333333334, ans=0.1 +2024-08-03 16:38:56,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=170331.33333333334, ans=0.125 +2024-08-03 16:39:20,225 INFO [train.py:1114] (3/4) Epoch 13, batch 2800, loss[loss=0.2477, simple_loss=0.3174, pruned_loss=0.08894, over 9246.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2836, pruned_loss=0.05876, over 2627723.14 frames. ], batch size: 96, lr: 9.50e-03, grad_scale: 32.0 +2024-08-03 16:39:27,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=170441.33333333334, ans=0.0 +2024-08-03 16:39:36,326 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.827e+01 1.148e+02 1.326e+02 1.634e+02 2.406e+02, threshold=2.653e+02, percent-clipped=0.0 +2024-08-03 16:39:52,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=170551.33333333334, ans=0.125 +2024-08-03 16:39:53,978 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:40:03,088 INFO [train.py:1114] (3/4) Epoch 13, batch 2850, loss[loss=0.187, simple_loss=0.2703, pruned_loss=0.0518, over 13367.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2839, pruned_loss=0.05871, over 2621711.02 frames. ], batch size: 35, lr: 9.50e-03, grad_scale: 16.0 +2024-08-03 16:40:07,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=170624.66666666666, ans=0.95 +2024-08-03 16:40:14,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=170661.33333333334, ans=0.2 +2024-08-03 16:40:21,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170698.0, ans=0.1 +2024-08-03 16:40:46,449 INFO [train.py:1114] (3/4) Epoch 13, batch 2900, loss[loss=0.2136, simple_loss=0.2964, pruned_loss=0.06542, over 13359.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2844, pruned_loss=0.05849, over 2632081.83 frames. ], batch size: 36, lr: 9.49e-03, grad_scale: 16.0 +2024-08-03 16:41:03,990 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.894e+01 1.100e+02 1.263e+02 1.445e+02 2.759e+02, threshold=2.526e+02, percent-clipped=1.0 +2024-08-03 16:41:13,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=170918.0, ans=0.0 +2024-08-03 16:41:30,941 INFO [train.py:1114] (3/4) Epoch 13, batch 2950, loss[loss=0.1791, simple_loss=0.2655, pruned_loss=0.04635, over 13351.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2828, pruned_loss=0.05802, over 2629760.76 frames. ], batch size: 34, lr: 9.49e-03, grad_scale: 16.0 +2024-08-03 16:41:36,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=170991.33333333334, ans=0.125 +2024-08-03 16:41:38,140 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.58 vs. limit=15.0 +2024-08-03 16:41:42,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=171028.0, ans=0.125 +2024-08-03 16:41:49,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=171064.66666666666, ans=0.0 +2024-08-03 16:41:53,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=171064.66666666666, ans=0.025 +2024-08-03 16:41:58,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=171101.33333333334, ans=0.0 +2024-08-03 16:42:14,322 INFO [train.py:1114] (3/4) Epoch 13, batch 3000, loss[loss=0.1848, simple_loss=0.2742, pruned_loss=0.04767, over 13529.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2826, pruned_loss=0.05789, over 2629792.66 frames. ], batch size: 37, lr: 9.48e-03, grad_scale: 16.0 +2024-08-03 16:42:14,322 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 16:42:28,565 INFO [train.py:1146] (3/4) Epoch 13, validation: loss=0.1746, simple_loss=0.2745, pruned_loss=0.03731, over 944034.00 frames. +2024-08-03 16:42:28,566 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 16:42:45,833 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.103e+01 1.098e+02 1.258e+02 1.464e+02 2.884e+02, threshold=2.515e+02, percent-clipped=2.0 +2024-08-03 16:42:49,137 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.81 vs. limit=15.0 +2024-08-03 16:42:53,478 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.20 vs. limit=15.0 +2024-08-03 16:43:12,983 INFO [train.py:1114] (3/4) Epoch 13, batch 3050, loss[loss=0.1799, simple_loss=0.2589, pruned_loss=0.05047, over 13544.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2833, pruned_loss=0.05824, over 2626649.46 frames. ], batch size: 35, lr: 9.48e-03, grad_scale: 16.0 +2024-08-03 16:43:17,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171358.0, ans=0.1 +2024-08-03 16:43:33,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=171431.33333333334, ans=0.0 +2024-08-03 16:43:39,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=171468.0, ans=0.125 +2024-08-03 16:43:48,369 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:43:53,939 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.50 vs. limit=6.0 +2024-08-03 16:43:54,732 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.90 vs. limit=6.0 +2024-08-03 16:43:57,530 INFO [train.py:1114] (3/4) Epoch 13, batch 3100, loss[loss=0.2396, simple_loss=0.3161, pruned_loss=0.08154, over 13251.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2823, pruned_loss=0.05768, over 2626611.52 frames. ], batch size: 46, lr: 9.47e-03, grad_scale: 16.0 +2024-08-03 16:43:59,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=171541.33333333334, ans=0.125 +2024-08-03 16:44:08,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171578.0, ans=0.1 +2024-08-03 16:44:10,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=171578.0, ans=0.2 +2024-08-03 16:44:13,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=171578.0, ans=0.125 +2024-08-03 16:44:14,828 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.222e+01 1.118e+02 1.244e+02 1.594e+02 3.299e+02, threshold=2.487e+02, percent-clipped=5.0 +2024-08-03 16:44:32,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=171688.0, ans=0.0 +2024-08-03 16:44:40,359 INFO [train.py:1114] (3/4) Epoch 13, batch 3150, loss[loss=0.21, simple_loss=0.2954, pruned_loss=0.06225, over 13008.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2825, pruned_loss=0.05772, over 2628292.51 frames. ], batch size: 48, lr: 9.47e-03, grad_scale: 16.0 +2024-08-03 16:44:40,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=171724.66666666666, ans=0.2 +2024-08-03 16:44:57,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=171798.0, ans=0.125 +2024-08-03 16:45:24,089 INFO [train.py:1114] (3/4) Epoch 13, batch 3200, loss[loss=0.1891, simple_loss=0.2743, pruned_loss=0.05196, over 13540.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2813, pruned_loss=0.05692, over 2634725.32 frames. ], batch size: 37, lr: 9.46e-03, grad_scale: 32.0 +2024-08-03 16:45:36,242 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.04 vs. limit=12.0 +2024-08-03 16:45:41,825 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.110e+01 1.144e+02 1.329e+02 1.843e+02 2.975e+02, threshold=2.659e+02, percent-clipped=4.0 +2024-08-03 16:45:42,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=171981.33333333334, ans=0.2 +2024-08-03 16:45:45,779 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.32 vs. limit=15.0 +2024-08-03 16:45:56,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172018.0, ans=0.1 +2024-08-03 16:45:59,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172054.66666666666, ans=0.1 +2024-08-03 16:46:04,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=172054.66666666666, ans=0.1 +2024-08-03 16:46:06,426 INFO [train.py:1114] (3/4) Epoch 13, batch 3250, loss[loss=0.2055, simple_loss=0.2999, pruned_loss=0.05552, over 13394.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2819, pruned_loss=0.05672, over 2638864.02 frames. ], batch size: 38, lr: 9.46e-03, grad_scale: 16.0 +2024-08-03 16:46:08,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=172091.33333333334, ans=0.0 +2024-08-03 16:46:09,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=172091.33333333334, ans=0.0 +2024-08-03 16:46:13,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172091.33333333334, ans=0.1 +2024-08-03 16:46:22,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=172164.66666666666, ans=0.0 +2024-08-03 16:46:45,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=172238.0, ans=0.0 +2024-08-03 16:46:50,168 INFO [train.py:1114] (3/4) Epoch 13, batch 3300, loss[loss=0.217, simple_loss=0.2946, pruned_loss=0.06968, over 12862.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2817, pruned_loss=0.05708, over 2640569.40 frames. ], batch size: 52, lr: 9.45e-03, grad_scale: 16.0 +2024-08-03 16:46:51,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=172274.66666666666, ans=0.0 +2024-08-03 16:46:55,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=172274.66666666666, ans=0.125 +2024-08-03 16:46:59,041 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.82 vs. limit=12.0 +2024-08-03 16:47:08,242 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.464e+01 1.115e+02 1.316e+02 1.603e+02 3.409e+02, threshold=2.632e+02, percent-clipped=2.0 +2024-08-03 16:47:22,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=172384.66666666666, ans=0.125 +2024-08-03 16:47:29,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=172421.33333333334, ans=0.125 +2024-08-03 16:47:32,850 INFO [train.py:1114] (3/4) Epoch 13, batch 3350, loss[loss=0.2123, simple_loss=0.297, pruned_loss=0.06377, over 13024.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2826, pruned_loss=0.05758, over 2629653.84 frames. ], batch size: 48, lr: 9.45e-03, grad_scale: 16.0 +2024-08-03 16:47:50,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=172531.33333333334, ans=0.0 +2024-08-03 16:47:50,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=172531.33333333334, ans=0.125 +2024-08-03 16:47:50,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=172531.33333333334, ans=0.0 +2024-08-03 16:47:58,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=172568.0, ans=0.125 +2024-08-03 16:48:15,668 INFO [train.py:1114] (3/4) Epoch 13, batch 3400, loss[loss=0.1679, simple_loss=0.2513, pruned_loss=0.04219, over 13537.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2827, pruned_loss=0.05778, over 2626154.03 frames. ], batch size: 31, lr: 9.44e-03, grad_scale: 8.0 +2024-08-03 16:48:22,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=172641.33333333334, ans=0.95 +2024-08-03 16:48:32,465 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.90 vs. limit=15.0 +2024-08-03 16:48:34,566 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.232e+01 1.137e+02 1.264e+02 1.560e+02 2.546e+02, threshold=2.528e+02, percent-clipped=0.0 +2024-08-03 16:48:54,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=172788.0, ans=0.125 +2024-08-03 16:49:00,082 INFO [train.py:1114] (3/4) Epoch 13, batch 3450, loss[loss=0.1973, simple_loss=0.2831, pruned_loss=0.05573, over 12995.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2828, pruned_loss=0.05801, over 2629845.87 frames. ], batch size: 52, lr: 9.44e-03, grad_scale: 8.0 +2024-08-03 16:49:01,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=172824.66666666666, ans=0.0 +2024-08-03 16:49:04,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=172824.66666666666, ans=0.025 +2024-08-03 16:49:06,316 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.58 vs. limit=10.0 +2024-08-03 16:49:08,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=172861.33333333334, ans=0.2 +2024-08-03 16:49:10,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=172861.33333333334, ans=0.125 +2024-08-03 16:49:25,275 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.30 vs. limit=10.0 +2024-08-03 16:49:43,786 INFO [train.py:1114] (3/4) Epoch 13, batch 3500, loss[loss=0.1878, simple_loss=0.2605, pruned_loss=0.05754, over 13532.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2828, pruned_loss=0.05861, over 2631414.60 frames. ], batch size: 34, lr: 9.43e-03, grad_scale: 8.0 +2024-08-03 16:49:48,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=173008.0, ans=0.2 +2024-08-03 16:49:56,569 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.71 vs. limit=15.0 +2024-08-03 16:50:02,034 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.86 vs. limit=22.5 +2024-08-03 16:50:03,125 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.363e+01 1.139e+02 1.315e+02 1.608e+02 2.660e+02, threshold=2.630e+02, percent-clipped=2.0 +2024-08-03 16:50:05,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=173081.33333333334, ans=0.0 +2024-08-03 16:50:08,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=173081.33333333334, ans=22.5 +2024-08-03 16:50:22,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=173154.66666666666, ans=0.0 +2024-08-03 16:50:24,913 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.89 vs. limit=22.5 +2024-08-03 16:50:27,047 INFO [train.py:1114] (3/4) Epoch 13, batch 3550, loss[loss=0.194, simple_loss=0.2869, pruned_loss=0.05054, over 12488.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2847, pruned_loss=0.05942, over 2629528.87 frames. ], batch size: 58, lr: 9.43e-03, grad_scale: 8.0 +2024-08-03 16:50:30,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=173191.33333333334, ans=0.0 +2024-08-03 16:50:49,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=173264.66666666666, ans=0.2 +2024-08-03 16:51:01,455 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 16:51:01,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=173301.33333333334, ans=0.025 +2024-08-03 16:51:02,825 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.86 vs. limit=15.0 +2024-08-03 16:51:09,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=173338.0, ans=0.5 +2024-08-03 16:51:11,546 INFO [train.py:1114] (3/4) Epoch 13, batch 3600, loss[loss=0.2589, simple_loss=0.3246, pruned_loss=0.09661, over 9463.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2902, pruned_loss=0.06389, over 2486015.80 frames. ], batch size: 97, lr: 9.42e-03, grad_scale: 16.0 +2024-08-03 16:51:15,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=173374.66666666666, ans=0.125 +2024-08-03 16:51:26,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=173411.33333333334, ans=0.125 +2024-08-03 16:51:31,030 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.031e+02 1.213e+02 1.304e+02 1.372e+02 1.765e+02, threshold=2.609e+02, percent-clipped=0.0 +2024-08-03 16:51:39,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=173484.66666666666, ans=0.0 +2024-08-03 16:52:47,633 INFO [train.py:1114] (3/4) Epoch 14, batch 0, loss[loss=0.1792, simple_loss=0.2599, pruned_loss=0.04927, over 13323.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2599, pruned_loss=0.04927, over 13323.00 frames. ], batch size: 33, lr: 9.08e-03, grad_scale: 32.0 +2024-08-03 16:52:47,633 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 16:52:54,674 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.7107, 3.0869, 2.5350, 2.2631], device='cuda:3') +2024-08-03 16:53:02,075 INFO [train.py:1146] (3/4) Epoch 14, validation: loss=0.1773, simple_loss=0.2784, pruned_loss=0.03813, over 944034.00 frames. +2024-08-03 16:53:02,078 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 16:53:11,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=173558.0, ans=0.125 +2024-08-03 16:53:36,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173631.33333333334, ans=0.1 +2024-08-03 16:53:36,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173631.33333333334, ans=0.1 +2024-08-03 16:53:45,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=173668.0, ans=0.0 +2024-08-03 16:53:49,579 INFO [train.py:1114] (3/4) Epoch 14, batch 50, loss[loss=0.1776, simple_loss=0.2611, pruned_loss=0.04708, over 13430.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2843, pruned_loss=0.05895, over 578935.71 frames. ], batch size: 32, lr: 9.07e-03, grad_scale: 32.0 +2024-08-03 16:54:15,131 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.05 vs. limit=15.0 +2024-08-03 16:54:27,224 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.315e+01 1.175e+02 1.367e+02 1.781e+02 2.550e+02, threshold=2.735e+02, percent-clipped=0.0 +2024-08-03 16:54:43,518 INFO [train.py:1114] (3/4) Epoch 14, batch 100, loss[loss=0.1834, simple_loss=0.2668, pruned_loss=0.04997, over 13529.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2842, pruned_loss=0.05799, over 1026537.15 frames. ], batch size: 35, lr: 9.07e-03, grad_scale: 32.0 +2024-08-03 16:54:44,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=173888.0, ans=0.125 +2024-08-03 16:54:50,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=173888.0, ans=0.125 +2024-08-03 16:55:15,549 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.05 vs. limit=22.5 +2024-08-03 16:55:17,321 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.84 vs. limit=10.0 +2024-08-03 16:55:23,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=174034.66666666666, ans=0.1 +2024-08-03 16:55:30,121 INFO [train.py:1114] (3/4) Epoch 14, batch 150, loss[loss=0.1556, simple_loss=0.2333, pruned_loss=0.03898, over 13425.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2818, pruned_loss=0.0563, over 1387131.22 frames. ], batch size: 32, lr: 9.06e-03, grad_scale: 32.0 +2024-08-03 16:55:50,465 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.25 vs. limit=15.0 +2024-08-03 16:56:03,373 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.908e+01 1.123e+02 1.329e+02 1.786e+02 3.044e+02, threshold=2.658e+02, percent-clipped=1.0 +2024-08-03 16:56:08,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=174181.33333333334, ans=0.125 +2024-08-03 16:56:09,433 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.59 vs. limit=15.0 +2024-08-03 16:56:28,049 INFO [train.py:1114] (3/4) Epoch 14, batch 200, loss[loss=0.2026, simple_loss=0.2859, pruned_loss=0.05962, over 12685.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2813, pruned_loss=0.05607, over 1666455.10 frames. ], batch size: 59, lr: 9.06e-03, grad_scale: 32.0 +2024-08-03 16:56:31,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=174254.66666666666, ans=0.125 +2024-08-03 16:56:45,571 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.39 vs. limit=15.0 +2024-08-03 16:56:46,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=174328.0, ans=0.2 +2024-08-03 16:56:56,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=174364.66666666666, ans=0.025 +2024-08-03 16:57:14,953 INFO [train.py:1114] (3/4) Epoch 14, batch 250, loss[loss=0.2169, simple_loss=0.3082, pruned_loss=0.0628, over 13332.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2812, pruned_loss=0.05627, over 1885747.51 frames. ], batch size: 46, lr: 9.05e-03, grad_scale: 32.0 +2024-08-03 16:57:15,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=174438.0, ans=0.125 +2024-08-03 16:57:23,398 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.59 vs. limit=15.0 +2024-08-03 16:57:24,331 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.46 vs. limit=10.0 +2024-08-03 16:57:41,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=174511.33333333334, ans=0.05 +2024-08-03 16:57:49,041 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.116e+01 1.178e+02 1.381e+02 1.725e+02 3.085e+02, threshold=2.762e+02, percent-clipped=4.0 +2024-08-03 16:58:03,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2.whitening_limit, batch_count=174584.66666666666, ans=15.0 +2024-08-03 16:58:05,301 INFO [train.py:1114] (3/4) Epoch 14, batch 300, loss[loss=0.2187, simple_loss=0.3041, pruned_loss=0.06663, over 13442.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.28, pruned_loss=0.05607, over 2052811.64 frames. ], batch size: 42, lr: 9.05e-03, grad_scale: 32.0 +2024-08-03 16:58:32,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=174731.33333333334, ans=0.0 +2024-08-03 16:58:52,953 INFO [train.py:1114] (3/4) Epoch 14, batch 350, loss[loss=0.1806, simple_loss=0.2623, pruned_loss=0.04943, over 13555.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2815, pruned_loss=0.05675, over 2182795.71 frames. ], batch size: 33, lr: 9.04e-03, grad_scale: 32.0 +2024-08-03 16:59:27,082 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.27 vs. limit=15.0 +2024-08-03 16:59:33,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=174878.0, ans=0.125 +2024-08-03 16:59:38,112 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.558e+01 1.118e+02 1.275e+02 1.546e+02 2.611e+02, threshold=2.551e+02, percent-clipped=0.0 +2024-08-03 16:59:40,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=174914.66666666666, ans=0.1 +2024-08-03 16:59:56,232 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=10.27 vs. limit=15.0 +2024-08-03 16:59:57,449 INFO [train.py:1114] (3/4) Epoch 14, batch 400, loss[loss=0.1826, simple_loss=0.2682, pruned_loss=0.04845, over 13357.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2809, pruned_loss=0.05649, over 2286822.49 frames. ], batch size: 37, lr: 9.04e-03, grad_scale: 32.0 +2024-08-03 16:59:57,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=174988.0, ans=0.125 +2024-08-03 17:00:11,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=175024.66666666666, ans=0.125 +2024-08-03 17:00:15,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=175024.66666666666, ans=0.025 +2024-08-03 17:00:16,527 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.26 vs. limit=10.0 +2024-08-03 17:00:19,149 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.02 vs. limit=12.0 +2024-08-03 17:00:40,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=175134.66666666666, ans=0.125 +2024-08-03 17:00:48,053 INFO [train.py:1114] (3/4) Epoch 14, batch 450, loss[loss=0.2042, simple_loss=0.2844, pruned_loss=0.06196, over 13559.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2811, pruned_loss=0.0566, over 2360091.69 frames. ], batch size: 38, lr: 9.04e-03, grad_scale: 32.0 +2024-08-03 17:00:59,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=175208.0, ans=0.125 +2024-08-03 17:01:18,574 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.57 vs. limit=6.0 +2024-08-03 17:01:18,914 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.283e+01 1.087e+02 1.281e+02 1.631e+02 3.461e+02, threshold=2.562e+02, percent-clipped=3.0 +2024-08-03 17:01:20,496 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.82 vs. limit=15.0 +2024-08-03 17:01:36,949 INFO [train.py:1114] (3/4) Epoch 14, batch 500, loss[loss=0.2329, simple_loss=0.3142, pruned_loss=0.07583, over 13422.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2807, pruned_loss=0.05648, over 2425770.64 frames. ], batch size: 43, lr: 9.03e-03, grad_scale: 32.0 +2024-08-03 17:01:41,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=175354.66666666666, ans=0.0 +2024-08-03 17:01:53,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=175391.33333333334, ans=0.125 +2024-08-03 17:01:58,975 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.73 vs. limit=15.0 +2024-08-03 17:02:21,565 INFO [train.py:1114] (3/4) Epoch 14, batch 550, loss[loss=0.2095, simple_loss=0.2949, pruned_loss=0.06203, over 13063.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2805, pruned_loss=0.05613, over 2468538.46 frames. ], batch size: 48, lr: 9.03e-03, grad_scale: 32.0 +2024-08-03 17:02:27,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=175538.0, ans=0.2 +2024-08-03 17:02:50,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=175648.0, ans=0.125 +2024-08-03 17:02:53,529 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.358e+01 1.151e+02 1.294e+02 1.518e+02 2.416e+02, threshold=2.587e+02, percent-clipped=0.0 +2024-08-03 17:02:57,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=175648.0, ans=0.5 +2024-08-03 17:03:07,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=175684.66666666666, ans=0.0 +2024-08-03 17:03:08,918 INFO [train.py:1114] (3/4) Epoch 14, batch 600, loss[loss=0.2135, simple_loss=0.2967, pruned_loss=0.06514, over 13303.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2808, pruned_loss=0.05615, over 2508501.40 frames. ], batch size: 46, lr: 9.02e-03, grad_scale: 16.0 +2024-08-03 17:03:11,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=175721.33333333334, ans=0.09899494936611666 +2024-08-03 17:03:40,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=175831.33333333334, ans=0.125 +2024-08-03 17:03:57,660 INFO [train.py:1114] (3/4) Epoch 14, batch 650, loss[loss=0.1866, simple_loss=0.2827, pruned_loss=0.04524, over 13547.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2803, pruned_loss=0.05619, over 2543716.35 frames. ], batch size: 37, lr: 9.02e-03, grad_scale: 8.0 +2024-08-03 17:04:06,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=175941.33333333334, ans=0.125 +2024-08-03 17:04:11,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=175941.33333333334, ans=0.0 +2024-08-03 17:04:19,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=175978.0, ans=0.125 +2024-08-03 17:04:29,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=176014.66666666666, ans=0.125 +2024-08-03 17:04:29,811 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.301e+01 1.160e+02 1.386e+02 1.901e+02 3.564e+02, threshold=2.772e+02, percent-clipped=5.0 +2024-08-03 17:04:31,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=176014.66666666666, ans=0.125 +2024-08-03 17:04:31,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=176014.66666666666, ans=0.0 +2024-08-03 17:04:37,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=176051.33333333334, ans=0.125 +2024-08-03 17:04:37,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=176051.33333333334, ans=0.0 +2024-08-03 17:04:41,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=176051.33333333334, ans=0.125 +2024-08-03 17:04:46,195 INFO [train.py:1114] (3/4) Epoch 14, batch 700, loss[loss=0.1962, simple_loss=0.2772, pruned_loss=0.05761, over 13525.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2792, pruned_loss=0.05535, over 2565519.88 frames. ], batch size: 35, lr: 9.01e-03, grad_scale: 8.0 +2024-08-03 17:05:02,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176124.66666666666, ans=0.1 +2024-08-03 17:05:12,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=176198.0, ans=0.025 +2024-08-03 17:05:16,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=176198.0, ans=0.0 +2024-08-03 17:05:19,328 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:05:31,390 INFO [train.py:1114] (3/4) Epoch 14, batch 750, loss[loss=0.1863, simple_loss=0.2795, pruned_loss=0.04658, over 13356.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2787, pruned_loss=0.055, over 2583163.29 frames. ], batch size: 37, lr: 9.01e-03, grad_scale: 8.0 +2024-08-03 17:05:32,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=176271.33333333334, ans=0.1 +2024-08-03 17:05:35,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=176271.33333333334, ans=0.0 +2024-08-03 17:05:38,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=176271.33333333334, ans=0.0 +2024-08-03 17:05:38,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=176271.33333333334, ans=0.0 +2024-08-03 17:05:50,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176344.66666666666, ans=0.1 +2024-08-03 17:06:02,472 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.800e+01 1.140e+02 1.290e+02 1.721e+02 6.299e+02, threshold=2.581e+02, percent-clipped=4.0 +2024-08-03 17:06:17,180 INFO [train.py:1114] (3/4) Epoch 14, batch 800, loss[loss=0.1782, simple_loss=0.2591, pruned_loss=0.04866, over 13347.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2787, pruned_loss=0.0552, over 2597830.35 frames. ], batch size: 33, lr: 9.00e-03, grad_scale: 16.0 +2024-08-03 17:06:34,906 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.70 vs. limit=15.0 +2024-08-03 17:06:46,725 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.59 vs. limit=15.0 +2024-08-03 17:06:50,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=176564.66666666666, ans=0.125 +2024-08-03 17:06:57,615 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=15.0 +2024-08-03 17:07:05,356 INFO [train.py:1114] (3/4) Epoch 14, batch 850, loss[loss=0.188, simple_loss=0.2842, pruned_loss=0.04591, over 13329.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2793, pruned_loss=0.0559, over 2610045.60 frames. ], batch size: 40, lr: 9.00e-03, grad_scale: 16.0 +2024-08-03 17:07:31,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=176711.33333333334, ans=0.125 +2024-08-03 17:07:31,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176711.33333333334, ans=0.1 +2024-08-03 17:07:40,304 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.443e+01 1.087e+02 1.211e+02 1.412e+02 2.074e+02, threshold=2.422e+02, percent-clipped=0.0 +2024-08-03 17:07:47,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=176784.66666666666, ans=0.0 +2024-08-03 17:07:47,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=176784.66666666666, ans=0.035 +2024-08-03 17:07:55,278 INFO [train.py:1114] (3/4) Epoch 14, batch 900, loss[loss=0.1915, simple_loss=0.2785, pruned_loss=0.05222, over 13345.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.28, pruned_loss=0.0563, over 2612902.31 frames. ], batch size: 33, lr: 8.99e-03, grad_scale: 16.0 +2024-08-03 17:08:14,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=176894.66666666666, ans=0.125 +2024-08-03 17:08:30,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=176931.33333333334, ans=0.0 +2024-08-03 17:08:31,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=176931.33333333334, ans=0.02 +2024-08-03 17:08:41,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=176968.0, ans=0.125 +2024-08-03 17:08:42,701 INFO [train.py:1114] (3/4) Epoch 14, batch 950, loss[loss=0.1821, simple_loss=0.2646, pruned_loss=0.04981, over 13532.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.28, pruned_loss=0.05608, over 2614165.26 frames. ], batch size: 34, lr: 8.99e-03, grad_scale: 16.0 +2024-08-03 17:08:45,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=177004.66666666666, ans=0.07 +2024-08-03 17:09:01,751 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.96 vs. limit=12.0 +2024-08-03 17:09:05,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=177078.0, ans=0.125 +2024-08-03 17:09:06,459 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.48 vs. limit=15.0 +2024-08-03 17:09:15,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=177114.66666666666, ans=0.125 +2024-08-03 17:09:15,827 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.964e+01 1.158e+02 1.398e+02 1.727e+02 2.347e+02, threshold=2.796e+02, percent-clipped=0.0 +2024-08-03 17:09:30,713 INFO [train.py:1114] (3/4) Epoch 14, batch 1000, loss[loss=0.1756, simple_loss=0.2671, pruned_loss=0.04206, over 13351.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.281, pruned_loss=0.05642, over 2611640.60 frames. ], batch size: 35, lr: 8.99e-03, grad_scale: 16.0 +2024-08-03 17:09:36,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=177188.0, ans=0.025 +2024-08-03 17:09:37,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177188.0, ans=0.1 +2024-08-03 17:09:40,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=177224.66666666666, ans=0.0 +2024-08-03 17:09:49,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=177261.33333333334, ans=0.05 +2024-08-03 17:09:52,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=177261.33333333334, ans=0.05 +2024-08-03 17:09:55,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177261.33333333334, ans=0.1 +2024-08-03 17:10:09,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=177334.66666666666, ans=0.2 +2024-08-03 17:10:18,995 INFO [train.py:1114] (3/4) Epoch 14, batch 1050, loss[loss=0.2019, simple_loss=0.2946, pruned_loss=0.05457, over 13579.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2801, pruned_loss=0.05595, over 2615252.13 frames. ], batch size: 39, lr: 8.98e-03, grad_scale: 16.0 +2024-08-03 17:10:20,570 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.07 vs. limit=10.0 +2024-08-03 17:10:21,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=177371.33333333334, ans=0.125 +2024-08-03 17:10:24,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=177371.33333333334, ans=0.025 +2024-08-03 17:10:34,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=177408.0, ans=0.125 +2024-08-03 17:10:51,918 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.165e+01 1.080e+02 1.235e+02 1.446e+02 2.124e+02, threshold=2.470e+02, percent-clipped=0.0 +2024-08-03 17:11:06,532 INFO [train.py:1114] (3/4) Epoch 14, batch 1100, loss[loss=0.1971, simple_loss=0.283, pruned_loss=0.05558, over 13552.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2802, pruned_loss=0.05596, over 2618900.70 frames. ], batch size: 36, lr: 8.98e-03, grad_scale: 16.0 +2024-08-03 17:11:08,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=177554.66666666666, ans=0.125 +2024-08-03 17:11:10,688 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.27 vs. limit=15.0 +2024-08-03 17:11:32,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=177628.0, ans=0.125 +2024-08-03 17:11:32,364 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.33 vs. limit=15.0 +2024-08-03 17:11:32,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177628.0, ans=0.1 +2024-08-03 17:11:39,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=177664.66666666666, ans=0.125 +2024-08-03 17:11:53,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=177701.33333333334, ans=0.0 +2024-08-03 17:11:54,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=177701.33333333334, ans=0.125 +2024-08-03 17:11:56,967 INFO [train.py:1114] (3/4) Epoch 14, batch 1150, loss[loss=0.1933, simple_loss=0.2783, pruned_loss=0.05415, over 13556.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2807, pruned_loss=0.05631, over 2617781.87 frames. ], batch size: 36, lr: 8.97e-03, grad_scale: 16.0 +2024-08-03 17:11:58,051 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:12:02,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=177738.0, ans=0.125 +2024-08-03 17:12:06,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=177774.66666666666, ans=0.2 +2024-08-03 17:12:06,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=177774.66666666666, ans=0.0 +2024-08-03 17:12:13,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177774.66666666666, ans=0.1 +2024-08-03 17:12:14,161 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.89 vs. limit=10.0 +2024-08-03 17:12:28,538 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.383e+01 1.181e+02 1.323e+02 1.686e+02 3.018e+02, threshold=2.646e+02, percent-clipped=3.0 +2024-08-03 17:12:28,841 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:12:43,146 INFO [train.py:1114] (3/4) Epoch 14, batch 1200, loss[loss=0.1968, simple_loss=0.2835, pruned_loss=0.05504, over 13550.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2812, pruned_loss=0.05658, over 2615171.32 frames. ], batch size: 39, lr: 8.97e-03, grad_scale: 32.0 +2024-08-03 17:12:52,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.28 vs. limit=15.0 +2024-08-03 17:13:00,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=177994.66666666666, ans=0.2 +2024-08-03 17:13:01,613 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.45 vs. limit=22.5 +2024-08-03 17:13:02,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=177994.66666666666, ans=0.125 +2024-08-03 17:13:03,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=177994.66666666666, ans=0.0 +2024-08-03 17:13:20,529 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.08 vs. limit=15.0 +2024-08-03 17:13:21,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=178068.0, ans=0.5 +2024-08-03 17:13:26,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=178068.0, ans=0.0 +2024-08-03 17:13:28,039 INFO [train.py:1114] (3/4) Epoch 14, batch 1250, loss[loss=0.2117, simple_loss=0.2984, pruned_loss=0.06252, over 13450.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2824, pruned_loss=0.05681, over 2627064.43 frames. ], batch size: 42, lr: 8.96e-03, grad_scale: 32.0 +2024-08-03 17:13:42,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=178141.33333333334, ans=0.0 +2024-08-03 17:13:56,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=178214.66666666666, ans=0.2 +2024-08-03 17:13:58,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=178214.66666666666, ans=0.125 +2024-08-03 17:14:01,211 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.606e+01 1.145e+02 1.312e+02 1.553e+02 2.666e+02, threshold=2.625e+02, percent-clipped=1.0 +2024-08-03 17:14:03,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=178214.66666666666, ans=0.0 +2024-08-03 17:14:07,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=178251.33333333334, ans=0.2 +2024-08-03 17:14:10,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=178251.33333333334, ans=0.0 +2024-08-03 17:14:15,835 INFO [train.py:1114] (3/4) Epoch 14, batch 1300, loss[loss=0.2315, simple_loss=0.3084, pruned_loss=0.07723, over 12883.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2814, pruned_loss=0.05642, over 2629301.22 frames. ], batch size: 52, lr: 8.96e-03, grad_scale: 32.0 +2024-08-03 17:14:25,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=178288.0, ans=0.025 +2024-08-03 17:14:26,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=178324.66666666666, ans=0.125 +2024-08-03 17:14:36,420 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.27 vs. limit=15.0 +2024-08-03 17:14:44,699 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.32 vs. limit=6.0 +2024-08-03 17:15:05,134 INFO [train.py:1114] (3/4) Epoch 14, batch 1350, loss[loss=0.2031, simple_loss=0.2897, pruned_loss=0.05823, over 13550.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2809, pruned_loss=0.05587, over 2637125.28 frames. ], batch size: 37, lr: 8.95e-03, grad_scale: 32.0 +2024-08-03 17:15:11,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=178471.33333333334, ans=0.125 +2024-08-03 17:15:12,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178471.33333333334, ans=0.1 +2024-08-03 17:15:13,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=178508.0, ans=0.2 +2024-08-03 17:15:14,905 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.96 vs. limit=22.5 +2024-08-03 17:15:15,362 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:15:37,517 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.701e+01 1.127e+02 1.257e+02 1.561e+02 2.635e+02, threshold=2.514e+02, percent-clipped=1.0 +2024-08-03 17:15:38,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=178581.33333333334, ans=0.0 +2024-08-03 17:15:51,671 INFO [train.py:1114] (3/4) Epoch 14, batch 1400, loss[loss=0.1633, simple_loss=0.2441, pruned_loss=0.04121, over 13263.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2801, pruned_loss=0.05563, over 2641577.01 frames. ], batch size: 31, lr: 8.95e-03, grad_scale: 16.0 +2024-08-03 17:15:59,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=178654.66666666666, ans=0.125 +2024-08-03 17:16:02,107 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.07 vs. limit=22.5 +2024-08-03 17:16:05,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=178691.33333333334, ans=0.125 +2024-08-03 17:16:10,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=178728.0, ans=0.125 +2024-08-03 17:16:17,638 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.73 vs. limit=15.0 +2024-08-03 17:16:18,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=178764.66666666666, ans=0.0 +2024-08-03 17:16:28,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=178801.33333333334, ans=0.125 +2024-08-03 17:16:36,762 INFO [train.py:1114] (3/4) Epoch 14, batch 1450, loss[loss=0.1904, simple_loss=0.2738, pruned_loss=0.05352, over 13396.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2806, pruned_loss=0.05587, over 2640818.66 frames. ], batch size: 43, lr: 8.94e-03, grad_scale: 16.0 +2024-08-03 17:16:52,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=178874.66666666666, ans=0.125 +2024-08-03 17:16:57,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=178911.33333333334, ans=0.1 +2024-08-03 17:17:08,369 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.238e+01 1.149e+02 1.315e+02 1.594e+02 2.634e+02, threshold=2.629e+02, percent-clipped=1.0 +2024-08-03 17:17:16,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=178984.66666666666, ans=0.125 +2024-08-03 17:17:24,002 INFO [train.py:1114] (3/4) Epoch 14, batch 1500, loss[loss=0.2371, simple_loss=0.3171, pruned_loss=0.07856, over 13405.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2804, pruned_loss=0.05587, over 2640548.55 frames. ], batch size: 39, lr: 8.94e-03, grad_scale: 16.0 +2024-08-03 17:17:28,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=179021.33333333334, ans=0.95 +2024-08-03 17:17:39,613 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.53 vs. limit=15.0 +2024-08-03 17:17:42,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=179094.66666666666, ans=0.125 +2024-08-03 17:17:58,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=179131.33333333334, ans=0.125 +2024-08-03 17:18:02,337 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.99 vs. limit=12.0 +2024-08-03 17:18:11,858 INFO [train.py:1114] (3/4) Epoch 14, batch 1550, loss[loss=0.1811, simple_loss=0.2794, pruned_loss=0.04143, over 13421.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2806, pruned_loss=0.05613, over 2630630.65 frames. ], batch size: 41, lr: 8.94e-03, grad_scale: 16.0 +2024-08-03 17:18:28,558 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.02 vs. limit=12.0 +2024-08-03 17:18:34,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179278.0, ans=0.1 +2024-08-03 17:18:45,086 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.351e+01 1.093e+02 1.288e+02 1.698e+02 2.728e+02, threshold=2.576e+02, percent-clipped=2.0 +2024-08-03 17:18:49,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=179314.66666666666, ans=0.125 +2024-08-03 17:18:52,678 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.76 vs. limit=15.0 +2024-08-03 17:18:59,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=179388.0, ans=0.125 +2024-08-03 17:19:00,555 INFO [train.py:1114] (3/4) Epoch 14, batch 1600, loss[loss=0.1978, simple_loss=0.2861, pruned_loss=0.05478, over 13583.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2805, pruned_loss=0.05616, over 2625122.93 frames. ], batch size: 39, lr: 8.93e-03, grad_scale: 32.0 +2024-08-03 17:19:05,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=179388.0, ans=0.125 +2024-08-03 17:19:09,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=179424.66666666666, ans=0.125 +2024-08-03 17:19:20,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=179461.33333333334, ans=0.1 +2024-08-03 17:19:29,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=179498.0, ans=0.025 +2024-08-03 17:19:45,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=179571.33333333334, ans=0.125 +2024-08-03 17:19:46,135 INFO [train.py:1114] (3/4) Epoch 14, batch 1650, loss[loss=0.1863, simple_loss=0.273, pruned_loss=0.04975, over 13331.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2804, pruned_loss=0.05611, over 2622035.58 frames. ], batch size: 40, lr: 8.93e-03, grad_scale: 32.0 +2024-08-03 17:19:52,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=179571.33333333334, ans=0.1 +2024-08-03 17:19:52,891 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.02 vs. limit=10.0 +2024-08-03 17:19:57,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=179608.0, ans=0.0 +2024-08-03 17:20:14,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=179681.33333333334, ans=0.0 +2024-08-03 17:20:33,194 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.085e+01 1.145e+02 1.327e+02 1.825e+02 3.127e+02, threshold=2.655e+02, percent-clipped=5.0 +2024-08-03 17:20:39,153 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.96 vs. limit=6.0 +2024-08-03 17:20:44,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=179718.0, ans=0.125 +2024-08-03 17:20:46,571 INFO [train.py:1114] (3/4) Epoch 14, batch 1700, loss[loss=0.1921, simple_loss=0.2642, pruned_loss=0.06002, over 13236.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2799, pruned_loss=0.05574, over 2630631.46 frames. ], batch size: 31, lr: 8.92e-03, grad_scale: 32.0 +2024-08-03 17:20:54,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=179754.66666666666, ans=15.0 +2024-08-03 17:20:55,035 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:21:04,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=179791.33333333334, ans=0.5 +2024-08-03 17:21:13,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=179828.0, ans=0.125 +2024-08-03 17:21:33,627 INFO [train.py:1114] (3/4) Epoch 14, batch 1750, loss[loss=0.1643, simple_loss=0.2458, pruned_loss=0.04137, over 13532.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2796, pruned_loss=0.05591, over 2633293.74 frames. ], batch size: 31, lr: 8.92e-03, grad_scale: 32.0 +2024-08-03 17:21:34,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=179938.0, ans=0.125 +2024-08-03 17:21:35,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=179938.0, ans=0.0 +2024-08-03 17:21:39,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=179938.0, ans=0.2 +2024-08-03 17:21:42,641 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.38 vs. limit=12.0 +2024-08-03 17:21:48,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=179974.66666666666, ans=0.04949747468305833 +2024-08-03 17:21:55,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=180011.33333333334, ans=0.125 +2024-08-03 17:22:07,567 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.076e+01 1.125e+02 1.266e+02 1.724e+02 3.044e+02, threshold=2.532e+02, percent-clipped=5.0 +2024-08-03 17:22:22,979 INFO [train.py:1114] (3/4) Epoch 14, batch 1800, loss[loss=0.2007, simple_loss=0.2883, pruned_loss=0.05651, over 13536.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.28, pruned_loss=0.05613, over 2634401.29 frames. ], batch size: 38, lr: 8.91e-03, grad_scale: 32.0 +2024-08-03 17:22:23,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=180121.33333333334, ans=0.1 +2024-08-03 17:22:40,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=180158.0, ans=0.1 +2024-08-03 17:22:41,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.01 vs. limit=15.0 +2024-08-03 17:22:49,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=180194.66666666666, ans=0.125 +2024-08-03 17:22:58,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180231.33333333334, ans=0.1 +2024-08-03 17:23:04,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=180268.0, ans=0.0 +2024-08-03 17:23:08,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=180268.0, ans=0.0 +2024-08-03 17:23:10,716 INFO [train.py:1114] (3/4) Epoch 14, batch 1850, loss[loss=0.1963, simple_loss=0.2927, pruned_loss=0.04992, over 13388.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2801, pruned_loss=0.056, over 2636998.87 frames. ], batch size: 39, lr: 8.91e-03, grad_scale: 32.0 +2024-08-03 17:23:17,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=180304.66666666666, ans=0.125 +2024-08-03 17:23:21,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=180341.33333333334, ans=10.0 +2024-08-03 17:23:23,067 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.28 vs. limit=12.0 +2024-08-03 17:23:27,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=180341.33333333334, ans=0.125 +2024-08-03 17:23:42,952 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.173e+01 1.187e+02 1.383e+02 1.867e+02 3.590e+02, threshold=2.765e+02, percent-clipped=8.0 +2024-08-03 17:23:48,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=180451.33333333334, ans=0.0 +2024-08-03 17:23:56,687 INFO [train.py:1114] (3/4) Epoch 14, batch 1900, loss[loss=0.1914, simple_loss=0.2871, pruned_loss=0.04782, over 13327.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.28, pruned_loss=0.0556, over 2638892.62 frames. ], batch size: 40, lr: 8.90e-03, grad_scale: 32.0 +2024-08-03 17:24:12,527 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.32 vs. limit=10.0 +2024-08-03 17:24:22,254 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.47 vs. limit=6.0 +2024-08-03 17:24:24,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=180598.0, ans=0.1 +2024-08-03 17:24:43,812 INFO [train.py:1114] (3/4) Epoch 14, batch 1950, loss[loss=0.1942, simple_loss=0.2746, pruned_loss=0.05694, over 13557.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2809, pruned_loss=0.05564, over 2645804.81 frames. ], batch size: 36, lr: 8.90e-03, grad_scale: 16.0 +2024-08-03 17:24:52,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=180708.0, ans=0.125 +2024-08-03 17:24:56,476 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.23 vs. limit=6.0 +2024-08-03 17:24:58,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=180708.0, ans=0.125 +2024-08-03 17:25:00,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=180708.0, ans=0.125 +2024-08-03 17:25:19,260 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.397e+01 1.089e+02 1.232e+02 1.473e+02 2.566e+02, threshold=2.463e+02, percent-clipped=0.0 +2024-08-03 17:25:20,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=180781.33333333334, ans=0.125 +2024-08-03 17:25:26,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=180818.0, ans=0.0 +2024-08-03 17:25:28,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=180818.0, ans=0.0 +2024-08-03 17:25:32,198 INFO [train.py:1114] (3/4) Epoch 14, batch 2000, loss[loss=0.1603, simple_loss=0.2395, pruned_loss=0.04056, over 13541.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2816, pruned_loss=0.05602, over 2635494.12 frames. ], batch size: 31, lr: 8.90e-03, grad_scale: 32.0 +2024-08-03 17:25:45,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=180891.33333333334, ans=0.125 +2024-08-03 17:26:08,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=180964.66666666666, ans=0.125 +2024-08-03 17:26:21,689 INFO [train.py:1114] (3/4) Epoch 14, batch 2050, loss[loss=0.2058, simple_loss=0.2739, pruned_loss=0.0688, over 13438.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2813, pruned_loss=0.05645, over 2632998.57 frames. ], batch size: 32, lr: 8.89e-03, grad_scale: 32.0 +2024-08-03 17:26:29,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=181074.66666666666, ans=0.1 +2024-08-03 17:26:54,263 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.315e+01 1.117e+02 1.302e+02 1.562e+02 2.500e+02, threshold=2.604e+02, percent-clipped=1.0 +2024-08-03 17:26:57,364 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.61 vs. limit=15.0 +2024-08-03 17:27:06,805 INFO [train.py:1114] (3/4) Epoch 14, batch 2100, loss[loss=0.1668, simple_loss=0.2617, pruned_loss=0.03596, over 13551.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2806, pruned_loss=0.05567, over 2638409.85 frames. ], batch size: 37, lr: 8.89e-03, grad_scale: 32.0 +2024-08-03 17:27:11,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=181221.33333333334, ans=0.125 +2024-08-03 17:27:13,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=181221.33333333334, ans=0.125 +2024-08-03 17:27:28,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=181294.66666666666, ans=0.125 +2024-08-03 17:27:28,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=181294.66666666666, ans=0.125 +2024-08-03 17:27:28,873 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.14 vs. limit=10.0 +2024-08-03 17:27:48,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=181368.0, ans=0.0 +2024-08-03 17:27:51,988 INFO [train.py:1114] (3/4) Epoch 14, batch 2150, loss[loss=0.1797, simple_loss=0.2626, pruned_loss=0.04836, over 13556.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2794, pruned_loss=0.05533, over 2647593.24 frames. ], batch size: 36, lr: 8.88e-03, grad_scale: 32.0 +2024-08-03 17:27:55,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=181404.66666666666, ans=0.0 +2024-08-03 17:27:56,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=181404.66666666666, ans=0.125 +2024-08-03 17:28:06,041 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.09 vs. limit=15.0 +2024-08-03 17:28:06,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=181441.33333333334, ans=0.2 +2024-08-03 17:28:17,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=181478.0, ans=0.125 +2024-08-03 17:28:26,854 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.184e+01 1.154e+02 1.453e+02 1.954e+02 3.704e+02, threshold=2.907e+02, percent-clipped=11.0 +2024-08-03 17:28:32,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=181551.33333333334, ans=0.125 +2024-08-03 17:28:39,550 INFO [train.py:1114] (3/4) Epoch 14, batch 2200, loss[loss=0.2086, simple_loss=0.2939, pruned_loss=0.06163, over 13395.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2795, pruned_loss=0.05526, over 2645838.88 frames. ], batch size: 39, lr: 8.88e-03, grad_scale: 32.0 +2024-08-03 17:28:44,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181588.0, ans=0.1 +2024-08-03 17:28:46,161 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=7.767e-03 +2024-08-03 17:28:48,937 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:28:56,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=181624.66666666666, ans=0.125 +2024-08-03 17:29:07,060 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.08 vs. limit=10.0 +2024-08-03 17:29:19,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=181734.66666666666, ans=0.0 +2024-08-03 17:29:22,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=181734.66666666666, ans=0.125 +2024-08-03 17:29:23,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=181734.66666666666, ans=0.0 +2024-08-03 17:29:26,903 INFO [train.py:1114] (3/4) Epoch 14, batch 2250, loss[loss=0.1867, simple_loss=0.2734, pruned_loss=0.04999, over 13362.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2797, pruned_loss=0.05541, over 2642860.29 frames. ], batch size: 37, lr: 8.87e-03, grad_scale: 32.0 +2024-08-03 17:29:27,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=181771.33333333334, ans=0.125 +2024-08-03 17:29:27,557 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.17 vs. limit=15.0 +2024-08-03 17:29:36,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=181808.0, ans=0.1 +2024-08-03 17:29:39,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=181808.0, ans=0.125 +2024-08-03 17:29:52,940 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.23 vs. limit=15.0 +2024-08-03 17:29:59,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=181881.33333333334, ans=0.1 +2024-08-03 17:30:03,290 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.261e+01 1.193e+02 1.520e+02 1.872e+02 2.993e+02, threshold=3.040e+02, percent-clipped=1.0 +2024-08-03 17:30:13,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=181918.0, ans=0.0 +2024-08-03 17:30:14,359 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.01 vs. limit=15.0 +2024-08-03 17:30:15,674 INFO [train.py:1114] (3/4) Epoch 14, batch 2300, loss[loss=0.1754, simple_loss=0.2555, pruned_loss=0.04768, over 13567.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.278, pruned_loss=0.0548, over 2638650.42 frames. ], batch size: 33, lr: 8.87e-03, grad_scale: 32.0 +2024-08-03 17:30:20,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=181954.66666666666, ans=0.125 +2024-08-03 17:30:24,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=181991.33333333334, ans=0.0 +2024-08-03 17:30:37,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=182028.0, ans=0.1 +2024-08-03 17:30:40,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=182028.0, ans=0.1 +2024-08-03 17:31:01,039 INFO [train.py:1114] (3/4) Epoch 14, batch 2350, loss[loss=0.2087, simple_loss=0.2977, pruned_loss=0.05988, over 13544.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2784, pruned_loss=0.055, over 2641064.07 frames. ], batch size: 38, lr: 8.87e-03, grad_scale: 32.0 +2024-08-03 17:31:10,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=182174.66666666666, ans=0.125 +2024-08-03 17:31:33,681 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.345e+01 1.136e+02 1.357e+02 1.723e+02 3.270e+02, threshold=2.715e+02, percent-clipped=1.0 +2024-08-03 17:31:36,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=182284.66666666666, ans=0.125 +2024-08-03 17:31:45,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=182321.33333333334, ans=0.125 +2024-08-03 17:31:46,450 INFO [train.py:1114] (3/4) Epoch 14, batch 2400, loss[loss=0.1818, simple_loss=0.2625, pruned_loss=0.0506, over 13531.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2789, pruned_loss=0.05517, over 2642209.05 frames. ], batch size: 35, lr: 8.86e-03, grad_scale: 32.0 +2024-08-03 17:31:47,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=182321.33333333334, ans=0.07 +2024-08-03 17:31:47,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=182321.33333333334, ans=0.0 +2024-08-03 17:32:07,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=182394.66666666666, ans=0.125 +2024-08-03 17:32:36,580 INFO [train.py:1114] (3/4) Epoch 14, batch 2450, loss[loss=0.2001, simple_loss=0.2959, pruned_loss=0.05213, over 13346.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2801, pruned_loss=0.05571, over 2632017.87 frames. ], batch size: 37, lr: 8.86e-03, grad_scale: 32.0 +2024-08-03 17:32:38,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=182504.66666666666, ans=0.2 +2024-08-03 17:33:01,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=182578.0, ans=0.0 +2024-08-03 17:33:11,020 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.955e+01 1.136e+02 1.286e+02 1.596e+02 2.665e+02, threshold=2.571e+02, percent-clipped=0.0 +2024-08-03 17:33:25,713 INFO [train.py:1114] (3/4) Epoch 14, batch 2500, loss[loss=0.2217, simple_loss=0.3063, pruned_loss=0.06853, over 13409.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2798, pruned_loss=0.0556, over 2636481.06 frames. ], batch size: 39, lr: 8.85e-03, grad_scale: 32.0 +2024-08-03 17:33:30,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182688.0, ans=0.1 +2024-08-03 17:33:30,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=182688.0, ans=0.09899494936611666 +2024-08-03 17:33:35,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=182724.66666666666, ans=0.025 +2024-08-03 17:33:40,571 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.99 vs. limit=10.0 +2024-08-03 17:33:41,459 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.19 vs. limit=15.0 +2024-08-03 17:33:43,824 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.81 vs. limit=22.5 +2024-08-03 17:33:55,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.37 vs. limit=6.0 +2024-08-03 17:34:05,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=182834.66666666666, ans=0.0 +2024-08-03 17:34:09,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=182871.33333333334, ans=0.0 +2024-08-03 17:34:09,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182871.33333333334, ans=0.1 +2024-08-03 17:34:09,941 INFO [train.py:1114] (3/4) Epoch 14, batch 2550, loss[loss=0.1778, simple_loss=0.2547, pruned_loss=0.05042, over 13523.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2793, pruned_loss=0.05533, over 2638092.42 frames. ], batch size: 31, lr: 8.85e-03, grad_scale: 32.0 +2024-08-03 17:34:41,387 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.643e+01 1.149e+02 1.432e+02 2.081e+02 4.007e+02, threshold=2.864e+02, percent-clipped=10.0 +2024-08-03 17:34:49,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=183018.0, ans=0.125 +2024-08-03 17:34:51,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183018.0, ans=0.1 +2024-08-03 17:34:53,669 INFO [train.py:1114] (3/4) Epoch 14, batch 2600, loss[loss=0.1853, simple_loss=0.2716, pruned_loss=0.0495, over 13555.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2796, pruned_loss=0.05558, over 2637897.49 frames. ], batch size: 36, lr: 8.84e-03, grad_scale: 32.0 +2024-08-03 17:35:04,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=183091.33333333334, ans=0.05 +2024-08-03 17:35:07,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=183091.33333333334, ans=0.0 +2024-08-03 17:35:25,258 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.02 vs. limit=22.5 +2024-08-03 17:35:32,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=183201.33333333334, ans=0.125 +2024-08-03 17:35:37,999 INFO [train.py:1114] (3/4) Epoch 14, batch 2650, loss[loss=0.2034, simple_loss=0.2873, pruned_loss=0.05978, over 13349.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2802, pruned_loss=0.05593, over 2640352.28 frames. ], batch size: 46, lr: 8.84e-03, grad_scale: 32.0 +2024-08-03 17:35:41,265 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=19.26 vs. limit=22.5 +2024-08-03 17:35:46,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=183274.66666666666, ans=0.125 +2024-08-03 17:35:46,337 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.46 vs. limit=15.0 +2024-08-03 17:35:49,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=183274.66666666666, ans=0.2 +2024-08-03 17:35:56,834 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.04 vs. limit=15.0 +2024-08-03 17:35:59,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=183311.33333333334, ans=0.125 +2024-08-03 17:36:09,722 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.484e+01 1.156e+02 1.338e+02 1.561e+02 2.649e+02, threshold=2.677e+02, percent-clipped=0.0 +2024-08-03 17:36:20,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183384.66666666666, ans=0.1 +2024-08-03 17:36:47,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=183421.33333333334, ans=0.1 +2024-08-03 17:36:47,880 INFO [train.py:1114] (3/4) Epoch 14, batch 2700, loss[loss=0.2121, simple_loss=0.3044, pruned_loss=0.05987, over 13538.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2801, pruned_loss=0.0558, over 2637125.17 frames. ], batch size: 40, lr: 8.83e-03, grad_scale: 32.0 +2024-08-03 17:36:57,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183458.0, ans=0.1 +2024-08-03 17:37:02,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183458.0, ans=0.1 +2024-08-03 17:37:29,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=183568.0, ans=0.125 +2024-08-03 17:37:31,436 INFO [train.py:1114] (3/4) Epoch 14, batch 2750, loss[loss=0.1915, simple_loss=0.2708, pruned_loss=0.05615, over 13346.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2789, pruned_loss=0.05541, over 2635500.80 frames. ], batch size: 34, lr: 8.83e-03, grad_scale: 32.0 +2024-08-03 17:37:37,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=183604.66666666666, ans=0.025 +2024-08-03 17:37:41,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=183641.33333333334, ans=0.125 +2024-08-03 17:37:52,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=183678.0, ans=0.125 +2024-08-03 17:37:58,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=183714.66666666666, ans=0.125 +2024-08-03 17:38:00,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=183714.66666666666, ans=0.125 +2024-08-03 17:38:02,650 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.576e+01 1.249e+02 1.500e+02 2.010e+02 3.327e+02, threshold=3.000e+02, percent-clipped=3.0 +2024-08-03 17:38:05,950 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.18 vs. limit=15.0 +2024-08-03 17:38:15,244 INFO [train.py:1114] (3/4) Epoch 14, batch 2800, loss[loss=0.2273, simple_loss=0.3011, pruned_loss=0.07668, over 9597.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2803, pruned_loss=0.05628, over 2626867.87 frames. ], batch size: 97, lr: 8.83e-03, grad_scale: 32.0 +2024-08-03 17:38:19,009 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=2.95 vs. limit=12.0 +2024-08-03 17:38:24,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=183824.66666666666, ans=0.125 +2024-08-03 17:38:27,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=183824.66666666666, ans=0.1 +2024-08-03 17:38:38,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=183861.33333333334, ans=0.2 +2024-08-03 17:38:51,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.66 vs. limit=15.0 +2024-08-03 17:38:58,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=183934.66666666666, ans=0.0 +2024-08-03 17:39:00,484 INFO [train.py:1114] (3/4) Epoch 14, batch 2850, loss[loss=0.1918, simple_loss=0.2705, pruned_loss=0.05649, over 13362.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2809, pruned_loss=0.05676, over 2620830.72 frames. ], batch size: 35, lr: 8.82e-03, grad_scale: 32.0 +2024-08-03 17:39:11,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=184008.0, ans=0.125 +2024-08-03 17:39:23,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=184044.66666666666, ans=0.125 +2024-08-03 17:39:24,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=184044.66666666666, ans=0.125 +2024-08-03 17:39:29,302 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.32 vs. limit=22.5 +2024-08-03 17:39:33,096 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.234e+01 1.082e+02 1.194e+02 1.402e+02 2.334e+02, threshold=2.389e+02, percent-clipped=0.0 +2024-08-03 17:39:38,830 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.70 vs. limit=15.0 +2024-08-03 17:39:42,187 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.79 vs. limit=15.0 +2024-08-03 17:39:45,198 INFO [train.py:1114] (3/4) Epoch 14, batch 2900, loss[loss=0.1991, simple_loss=0.2799, pruned_loss=0.0592, over 13366.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2821, pruned_loss=0.05705, over 2631535.00 frames. ], batch size: 36, lr: 8.82e-03, grad_scale: 32.0 +2024-08-03 17:39:54,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=184191.33333333334, ans=0.2 +2024-08-03 17:40:05,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184228.0, ans=0.1 +2024-08-03 17:40:06,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=184228.0, ans=0.07 +2024-08-03 17:40:08,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=184228.0, ans=0.125 +2024-08-03 17:40:11,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=184264.66666666666, ans=0.025 +2024-08-03 17:40:15,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=184264.66666666666, ans=0.125 +2024-08-03 17:40:20,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184301.33333333334, ans=0.1 +2024-08-03 17:40:28,419 INFO [train.py:1114] (3/4) Epoch 14, batch 2950, loss[loss=0.1921, simple_loss=0.2799, pruned_loss=0.05212, over 13328.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2809, pruned_loss=0.0567, over 2629844.71 frames. ], batch size: 34, lr: 8.81e-03, grad_scale: 32.0 +2024-08-03 17:40:40,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.81 vs. limit=15.0 +2024-08-03 17:40:46,729 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.52 vs. limit=12.0 +2024-08-03 17:40:49,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=184411.33333333334, ans=0.0 +2024-08-03 17:40:52,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=184411.33333333334, ans=0.0 +2024-08-03 17:40:54,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=184448.0, ans=0.0 +2024-08-03 17:41:00,868 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.120e+01 1.167e+02 1.400e+02 1.731e+02 2.660e+02, threshold=2.799e+02, percent-clipped=4.0 +2024-08-03 17:41:13,019 INFO [train.py:1114] (3/4) Epoch 14, batch 3000, loss[loss=0.1795, simple_loss=0.2734, pruned_loss=0.04282, over 13539.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2803, pruned_loss=0.05622, over 2629640.90 frames. ], batch size: 37, lr: 8.81e-03, grad_scale: 32.0 +2024-08-03 17:41:13,019 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 17:41:19,108 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([1.8575, 2.2043, 1.8451, 2.1932, 2.8405, 2.5969, 2.6156, 2.8593], + device='cuda:3') +2024-08-03 17:41:23,008 INFO [train.py:1146] (3/4) Epoch 14, validation: loss=0.1738, simple_loss=0.2731, pruned_loss=0.03723, over 944034.00 frames. +2024-08-03 17:41:23,008 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 17:41:24,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=184521.33333333334, ans=0.125 +2024-08-03 17:42:00,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=184668.0, ans=0.125 +2024-08-03 17:42:07,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=184668.0, ans=0.0 +2024-08-03 17:42:08,957 INFO [train.py:1114] (3/4) Epoch 14, batch 3050, loss[loss=0.17, simple_loss=0.2552, pruned_loss=0.04243, over 13547.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2813, pruned_loss=0.0567, over 2626747.04 frames. ], batch size: 35, lr: 8.80e-03, grad_scale: 16.0 +2024-08-03 17:42:21,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=184741.33333333334, ans=0.125 +2024-08-03 17:42:30,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=184778.0, ans=0.0 +2024-08-03 17:42:32,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=184778.0, ans=0.2 +2024-08-03 17:42:41,260 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.416e+01 1.107e+02 1.206e+02 1.425e+02 2.070e+02, threshold=2.412e+02, percent-clipped=0.0 +2024-08-03 17:42:50,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184851.33333333334, ans=0.1 +2024-08-03 17:42:52,661 INFO [train.py:1114] (3/4) Epoch 14, batch 3100, loss[loss=0.2192, simple_loss=0.3116, pruned_loss=0.06336, over 13378.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2809, pruned_loss=0.05623, over 2626467.85 frames. ], batch size: 46, lr: 8.80e-03, grad_scale: 16.0 +2024-08-03 17:42:55,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=184888.0, ans=0.025 +2024-08-03 17:42:58,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=184888.0, ans=0.2 +2024-08-03 17:43:12,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=184961.33333333334, ans=0.0 +2024-08-03 17:43:13,115 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.28 vs. limit=15.0 +2024-08-03 17:43:13,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184961.33333333334, ans=0.1 +2024-08-03 17:43:20,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=184998.0, ans=0.125 +2024-08-03 17:43:28,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.12 vs. limit=15.0 +2024-08-03 17:43:35,775 INFO [train.py:1114] (3/4) Epoch 14, batch 3150, loss[loss=0.2246, simple_loss=0.3047, pruned_loss=0.07225, over 13059.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2808, pruned_loss=0.05606, over 2627182.73 frames. ], batch size: 48, lr: 8.80e-03, grad_scale: 16.0 +2024-08-03 17:43:45,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=185108.0, ans=0.0 +2024-08-03 17:43:50,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=185108.0, ans=0.0 +2024-08-03 17:43:58,938 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.23 vs. limit=10.0 +2024-08-03 17:44:03,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=185181.33333333334, ans=0.025 +2024-08-03 17:44:03,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=185181.33333333334, ans=0.125 +2024-08-03 17:44:06,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=185181.33333333334, ans=0.0 +2024-08-03 17:44:07,911 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:44:08,664 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.659e+01 1.212e+02 1.528e+02 2.079e+02 4.163e+02, threshold=3.057e+02, percent-clipped=18.0 +2024-08-03 17:44:18,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=185254.66666666666, ans=0.0 +2024-08-03 17:44:18,868 INFO [train.py:1114] (3/4) Epoch 14, batch 3200, loss[loss=0.1806, simple_loss=0.2752, pruned_loss=0.04296, over 13549.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2806, pruned_loss=0.05615, over 2633156.66 frames. ], batch size: 37, lr: 8.79e-03, grad_scale: 16.0 +2024-08-03 17:44:28,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=185291.33333333334, ans=0.125 +2024-08-03 17:44:39,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=185328.0, ans=0.2 +2024-08-03 17:44:40,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=185328.0, ans=0.0 +2024-08-03 17:45:01,559 INFO [train.py:1114] (3/4) Epoch 14, batch 3250, loss[loss=0.2052, simple_loss=0.2925, pruned_loss=0.059, over 13391.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2812, pruned_loss=0.05608, over 2637522.76 frames. ], batch size: 38, lr: 8.79e-03, grad_scale: 16.0 +2024-08-03 17:45:06,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=185438.0, ans=0.2 +2024-08-03 17:45:07,955 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.58 vs. limit=22.5 +2024-08-03 17:45:15,575 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.86 vs. limit=15.0 +2024-08-03 17:45:17,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=185474.66666666666, ans=0.125 +2024-08-03 17:45:18,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=185474.66666666666, ans=0.1 +2024-08-03 17:45:24,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=185511.33333333334, ans=0.0 +2024-08-03 17:45:35,114 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.801e+01 1.165e+02 1.318e+02 1.646e+02 3.018e+02, threshold=2.636e+02, percent-clipped=0.0 +2024-08-03 17:45:37,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185584.66666666666, ans=0.1 +2024-08-03 17:45:37,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=185584.66666666666, ans=0.5 +2024-08-03 17:45:39,739 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=185584.66666666666, ans=0.0 +2024-08-03 17:45:40,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=185584.66666666666, ans=22.5 +2024-08-03 17:45:45,437 INFO [train.py:1114] (3/4) Epoch 14, batch 3300, loss[loss=0.2166, simple_loss=0.3009, pruned_loss=0.06617, over 12869.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2804, pruned_loss=0.05576, over 2640052.92 frames. ], batch size: 52, lr: 8.78e-03, grad_scale: 16.0 +2024-08-03 17:45:55,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=185658.0, ans=0.0 +2024-08-03 17:46:10,261 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.34 vs. limit=15.0 +2024-08-03 17:46:18,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=185731.33333333334, ans=0.125 +2024-08-03 17:46:27,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=185768.0, ans=0.125 +2024-08-03 17:46:28,652 INFO [train.py:1114] (3/4) Epoch 14, batch 3350, loss[loss=0.2109, simple_loss=0.2984, pruned_loss=0.06167, over 13298.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2813, pruned_loss=0.05641, over 2630966.30 frames. ], batch size: 49, lr: 8.78e-03, grad_scale: 8.0 +2024-08-03 17:46:28,803 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:46:30,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=185804.66666666666, ans=0.125 +2024-08-03 17:46:32,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=185804.66666666666, ans=0.125 +2024-08-03 17:46:36,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=185841.33333333334, ans=0.5 +2024-08-03 17:46:37,542 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:47:00,514 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.35 vs. limit=15.0 +2024-08-03 17:47:01,822 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.039e+01 1.138e+02 1.305e+02 1.515e+02 2.289e+02, threshold=2.609e+02, percent-clipped=0.0 +2024-08-03 17:47:11,855 INFO [train.py:1114] (3/4) Epoch 14, batch 3400, loss[loss=0.1665, simple_loss=0.2444, pruned_loss=0.0443, over 13561.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.28, pruned_loss=0.05584, over 2626646.26 frames. ], batch size: 31, lr: 8.78e-03, grad_scale: 8.0 +2024-08-03 17:47:14,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=185988.0, ans=0.125 +2024-08-03 17:47:15,564 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.52 vs. limit=6.0 +2024-08-03 17:47:24,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=186024.66666666666, ans=0.1 +2024-08-03 17:47:29,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=186061.33333333334, ans=0.0 +2024-08-03 17:47:35,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=186061.33333333334, ans=0.125 +2024-08-03 17:47:40,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=186098.0, ans=0.1 +2024-08-03 17:47:43,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=186098.0, ans=0.0 +2024-08-03 17:47:55,943 INFO [train.py:1114] (3/4) Epoch 14, batch 3450, loss[loss=0.2257, simple_loss=0.3176, pruned_loss=0.06696, over 12870.00 frames. ], tot_loss[loss=0.196, simple_loss=0.28, pruned_loss=0.05597, over 2629303.87 frames. ], batch size: 52, lr: 8.77e-03, grad_scale: 8.0 +2024-08-03 17:48:04,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=186208.0, ans=0.2 +2024-08-03 17:48:14,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=186244.66666666666, ans=0.0 +2024-08-03 17:48:29,132 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.095e+01 1.141e+02 1.344e+02 1.531e+02 2.504e+02, threshold=2.687e+02, percent-clipped=0.0 +2024-08-03 17:48:29,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=186318.0, ans=0.125 +2024-08-03 17:48:38,413 INFO [train.py:1114] (3/4) Epoch 14, batch 3500, loss[loss=0.1824, simple_loss=0.2682, pruned_loss=0.04827, over 13517.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2801, pruned_loss=0.05629, over 2631429.67 frames. ], batch size: 34, lr: 8.77e-03, grad_scale: 8.0 +2024-08-03 17:48:38,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=186354.66666666666, ans=0.04949747468305833 +2024-08-03 17:48:47,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=186391.33333333334, ans=0.0 +2024-08-03 17:48:48,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=186391.33333333334, ans=0.0 +2024-08-03 17:48:56,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=186428.0, ans=0.04949747468305833 +2024-08-03 17:49:06,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=186464.66666666666, ans=0.125 +2024-08-03 17:49:21,422 INFO [train.py:1114] (3/4) Epoch 14, batch 3550, loss[loss=0.1826, simple_loss=0.2743, pruned_loss=0.0455, over 12374.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2822, pruned_loss=0.05714, over 2629505.02 frames. ], batch size: 58, lr: 8.76e-03, grad_scale: 8.0 +2024-08-03 17:49:35,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.87 vs. limit=15.0 +2024-08-03 17:49:43,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=186611.33333333334, ans=0.1 +2024-08-03 17:49:52,623 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.25 vs. limit=22.5 +2024-08-03 17:49:56,372 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.367e+01 1.245e+02 1.346e+02 1.510e+02 2.403e+02, threshold=2.693e+02, percent-clipped=0.0 +2024-08-03 17:50:02,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186684.66666666666, ans=0.1 +2024-08-03 17:50:06,479 INFO [train.py:1114] (3/4) Epoch 14, batch 3600, loss[loss=0.2313, simple_loss=0.3072, pruned_loss=0.07765, over 9064.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2871, pruned_loss=0.06151, over 2487190.78 frames. ], batch size: 96, lr: 8.76e-03, grad_scale: 16.0 +2024-08-03 17:50:08,920 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.73 vs. limit=10.0 +2024-08-03 17:50:10,767 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.59 vs. limit=15.0 +2024-08-03 17:50:13,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=186721.33333333334, ans=0.0 +2024-08-03 17:50:14,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186758.0, ans=0.1 +2024-08-03 17:50:17,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=186758.0, ans=0.0 +2024-08-03 17:50:18,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=186758.0, ans=0.2 +2024-08-03 17:50:24,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=186794.66666666666, ans=0.0 +2024-08-03 17:54:05,177 INFO [train.py:1114] (3/4) Epoch 15, batch 0, loss[loss=0.1764, simple_loss=0.2597, pruned_loss=0.04658, over 13350.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2597, pruned_loss=0.04658, over 13350.00 frames. ], batch size: 33, lr: 8.46e-03, grad_scale: 32.0 +2024-08-03 17:54:05,177 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 17:54:17,157 INFO [train.py:1146] (3/4) Epoch 15, validation: loss=0.1774, simple_loss=0.2778, pruned_loss=0.03851, over 944034.00 frames. +2024-08-03 17:54:17,157 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 17:54:18,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=186871.66666666666, ans=0.2 +2024-08-03 17:54:31,546 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.86 vs. limit=22.5 +2024-08-03 17:55:12,108 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.609e+01 1.155e+02 1.252e+02 1.382e+02 2.620e+02, threshold=2.503e+02, percent-clipped=0.0 +2024-08-03 17:55:13,965 INFO [train.py:1114] (3/4) Epoch 15, batch 50, loss[loss=0.1723, simple_loss=0.2551, pruned_loss=0.04478, over 13435.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2825, pruned_loss=0.05593, over 578454.31 frames. ], batch size: 32, lr: 8.45e-03, grad_scale: 32.0 +2024-08-03 17:55:21,882 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.30 vs. limit=12.0 +2024-08-03 17:55:28,460 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.83 vs. limit=22.5 +2024-08-03 17:55:30,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=187091.66666666666, ans=0.2 +2024-08-03 17:55:47,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=187165.0, ans=0.125 +2024-08-03 17:56:04,082 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.02 vs. limit=10.0 +2024-08-03 17:56:05,326 INFO [train.py:1114] (3/4) Epoch 15, batch 100, loss[loss=0.1767, simple_loss=0.2541, pruned_loss=0.04964, over 13530.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2817, pruned_loss=0.05507, over 1027167.06 frames. ], batch size: 35, lr: 8.45e-03, grad_scale: 32.0 +2024-08-03 17:56:06,708 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.29 vs. limit=15.0 +2024-08-03 17:56:09,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=187238.33333333334, ans=0.125 +2024-08-03 17:56:13,965 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.20 vs. limit=12.0 +2024-08-03 17:56:14,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=187275.0, ans=0.5 +2024-08-03 17:56:17,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=187275.0, ans=0.0 +2024-08-03 17:56:18,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=187275.0, ans=0.125 +2024-08-03 17:56:23,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=187311.66666666666, ans=0.125 +2024-08-03 17:56:31,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=187348.33333333334, ans=0.125 +2024-08-03 17:56:48,427 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.485e+01 1.136e+02 1.309e+02 1.649e+02 2.921e+02, threshold=2.617e+02, percent-clipped=2.0 +2024-08-03 17:56:50,231 INFO [train.py:1114] (3/4) Epoch 15, batch 150, loss[loss=0.1724, simple_loss=0.2503, pruned_loss=0.04726, over 13418.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2792, pruned_loss=0.05399, over 1387749.21 frames. ], batch size: 32, lr: 8.44e-03, grad_scale: 32.0 +2024-08-03 17:57:07,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=187458.33333333334, ans=0.07 +2024-08-03 17:57:14,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=187495.0, ans=0.125 +2024-08-03 17:57:30,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=187531.66666666666, ans=0.125 +2024-08-03 17:57:43,993 INFO [train.py:1114] (3/4) Epoch 15, batch 200, loss[loss=0.1952, simple_loss=0.2814, pruned_loss=0.05447, over 12515.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2782, pruned_loss=0.0541, over 1666462.15 frames. ], batch size: 58, lr: 8.44e-03, grad_scale: 32.0 +2024-08-03 17:58:03,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=187678.33333333334, ans=0.2 +2024-08-03 17:58:08,755 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 17:58:29,626 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.054e+01 1.128e+02 1.299e+02 1.749e+02 3.562e+02, threshold=2.599e+02, percent-clipped=4.0 +2024-08-03 17:58:31,514 INFO [train.py:1114] (3/4) Epoch 15, batch 250, loss[loss=0.2126, simple_loss=0.3035, pruned_loss=0.06084, over 13288.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2782, pruned_loss=0.05418, over 1885384.81 frames. ], batch size: 46, lr: 8.44e-03, grad_scale: 32.0 +2024-08-03 17:58:55,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=187861.66666666666, ans=0.035 +2024-08-03 17:59:05,041 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.44 vs. limit=22.5 +2024-08-03 17:59:14,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=187935.0, ans=0.125 +2024-08-03 17:59:19,929 INFO [train.py:1114] (3/4) Epoch 15, batch 300, loss[loss=0.2067, simple_loss=0.2911, pruned_loss=0.06115, over 13442.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2774, pruned_loss=0.05386, over 2051980.29 frames. ], batch size: 42, lr: 8.43e-03, grad_scale: 32.0 +2024-08-03 17:59:20,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=187971.66666666666, ans=0.125 +2024-08-03 17:59:21,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=187971.66666666666, ans=0.125 +2024-08-03 17:59:34,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=188008.33333333334, ans=0.0 +2024-08-03 17:59:40,263 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.91 vs. limit=15.0 +2024-08-03 17:59:47,281 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.28 vs. limit=10.0 +2024-08-03 18:00:01,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=188118.33333333334, ans=0.0 +2024-08-03 18:00:05,579 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.050e+01 1.089e+02 1.187e+02 1.395e+02 2.688e+02, threshold=2.374e+02, percent-clipped=1.0 +2024-08-03 18:00:07,392 INFO [train.py:1114] (3/4) Epoch 15, batch 350, loss[loss=0.168, simple_loss=0.2506, pruned_loss=0.04274, over 13584.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2778, pruned_loss=0.05399, over 2182972.87 frames. ], batch size: 33, lr: 8.43e-03, grad_scale: 32.0 +2024-08-03 18:00:14,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=188155.0, ans=0.125 +2024-08-03 18:00:23,745 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:00:28,372 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.23 vs. limit=6.0 +2024-08-03 18:00:33,853 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.93 vs. limit=22.5 +2024-08-03 18:00:48,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=188301.66666666666, ans=0.1 +2024-08-03 18:00:49,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff3.min_abs, batch_count=188301.66666666666, ans=0.2 +2024-08-03 18:00:54,397 INFO [train.py:1114] (3/4) Epoch 15, batch 400, loss[loss=0.1937, simple_loss=0.2734, pruned_loss=0.05703, over 13363.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.278, pruned_loss=0.05419, over 2287078.50 frames. ], batch size: 37, lr: 8.42e-03, grad_scale: 32.0 +2024-08-03 18:01:08,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=188375.0, ans=0.125 +2024-08-03 18:01:31,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188485.0, ans=0.1 +2024-08-03 18:01:38,645 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.296e+01 1.094e+02 1.225e+02 1.629e+02 4.007e+02, threshold=2.451e+02, percent-clipped=6.0 +2024-08-03 18:01:39,626 INFO [train.py:1114] (3/4) Epoch 15, batch 450, loss[loss=0.2133, simple_loss=0.2967, pruned_loss=0.06498, over 13540.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2779, pruned_loss=0.05395, over 2360673.18 frames. ], batch size: 38, lr: 8.42e-03, grad_scale: 32.0 +2024-08-03 18:01:56,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=188558.33333333334, ans=0.125 +2024-08-03 18:02:07,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=188595.0, ans=0.025 +2024-08-03 18:02:18,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=188668.33333333334, ans=0.0 +2024-08-03 18:02:18,969 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:02:26,765 INFO [train.py:1114] (3/4) Epoch 15, batch 500, loss[loss=0.1919, simple_loss=0.2782, pruned_loss=0.05282, over 13400.00 frames. ], tot_loss[loss=0.192, simple_loss=0.277, pruned_loss=0.05354, over 2426090.26 frames. ], batch size: 43, lr: 8.42e-03, grad_scale: 32.0 +2024-08-03 18:02:27,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=188705.0, ans=0.125 +2024-08-03 18:02:37,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=188741.66666666666, ans=0.1 +2024-08-03 18:02:45,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=188741.66666666666, ans=0.125 +2024-08-03 18:02:46,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=188778.33333333334, ans=0.125 +2024-08-03 18:02:51,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=188778.33333333334, ans=0.2 +2024-08-03 18:02:51,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=188778.33333333334, ans=0.125 +2024-08-03 18:03:05,623 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=16.30 vs. limit=15.0 +2024-08-03 18:03:11,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=188851.66666666666, ans=0.125 +2024-08-03 18:03:15,310 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.367e+01 1.124e+02 1.290e+02 1.584e+02 2.757e+02, threshold=2.579e+02, percent-clipped=2.0 +2024-08-03 18:03:16,223 INFO [train.py:1114] (3/4) Epoch 15, batch 550, loss[loss=0.2014, simple_loss=0.2873, pruned_loss=0.05773, over 13072.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2768, pruned_loss=0.05348, over 2468189.45 frames. ], batch size: 48, lr: 8.41e-03, grad_scale: 32.0 +2024-08-03 18:03:36,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=188961.66666666666, ans=0.0 +2024-08-03 18:03:36,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=188961.66666666666, ans=0.125 +2024-08-03 18:03:42,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=188998.33333333334, ans=0.125 +2024-08-03 18:04:00,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=189071.66666666666, ans=0.125 +2024-08-03 18:04:01,259 INFO [train.py:1114] (3/4) Epoch 15, batch 600, loss[loss=0.1931, simple_loss=0.2882, pruned_loss=0.04902, over 13296.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2774, pruned_loss=0.0537, over 2508551.78 frames. ], batch size: 46, lr: 8.41e-03, grad_scale: 32.0 +2024-08-03 18:04:03,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=189071.66666666666, ans=0.0 +2024-08-03 18:04:08,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=189071.66666666666, ans=0.125 +2024-08-03 18:04:19,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=189145.0, ans=0.0 +2024-08-03 18:04:20,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=189145.0, ans=0.0 +2024-08-03 18:04:25,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=189145.0, ans=0.125 +2024-08-03 18:04:25,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=189145.0, ans=0.2 +2024-08-03 18:04:28,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=189145.0, ans=10.0 +2024-08-03 18:04:40,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=189218.33333333334, ans=0.1 +2024-08-03 18:04:42,683 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.34 vs. limit=15.0 +2024-08-03 18:04:47,690 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.457e+01 1.170e+02 1.382e+02 2.012e+02 3.539e+02, threshold=2.764e+02, percent-clipped=13.0 +2024-08-03 18:04:48,659 INFO [train.py:1114] (3/4) Epoch 15, batch 650, loss[loss=0.1906, simple_loss=0.2841, pruned_loss=0.04856, over 13543.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2768, pruned_loss=0.05359, over 2543654.06 frames. ], batch size: 37, lr: 8.40e-03, grad_scale: 32.0 +2024-08-03 18:05:02,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=189291.66666666666, ans=0.125 +2024-08-03 18:05:17,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=189365.0, ans=0.07 +2024-08-03 18:05:18,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=189365.0, ans=0.5 +2024-08-03 18:05:19,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=189365.0, ans=0.125 +2024-08-03 18:05:19,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=189365.0, ans=0.2 +2024-08-03 18:05:34,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=189401.66666666666, ans=0.125 +2024-08-03 18:05:35,785 INFO [train.py:1114] (3/4) Epoch 15, batch 700, loss[loss=0.1543, simple_loss=0.2437, pruned_loss=0.03245, over 13525.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.277, pruned_loss=0.05371, over 2566264.16 frames. ], batch size: 35, lr: 8.40e-03, grad_scale: 32.0 +2024-08-03 18:05:47,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=189475.0, ans=0.125 +2024-08-03 18:05:48,049 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.72 vs. limit=22.5 +2024-08-03 18:05:49,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=189475.0, ans=0.125 +2024-08-03 18:05:50,765 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.47 vs. limit=22.5 +2024-08-03 18:06:22,757 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.490e+01 1.100e+02 1.247e+02 1.589e+02 2.626e+02, threshold=2.494e+02, percent-clipped=0.0 +2024-08-03 18:06:22,794 INFO [train.py:1114] (3/4) Epoch 15, batch 750, loss[loss=0.1949, simple_loss=0.289, pruned_loss=0.05042, over 13358.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2763, pruned_loss=0.05325, over 2582519.36 frames. ], batch size: 37, lr: 8.40e-03, grad_scale: 16.0 +2024-08-03 18:06:26,563 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.84 vs. limit=22.5 +2024-08-03 18:06:28,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=189621.66666666666, ans=0.0 +2024-08-03 18:06:33,355 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.93 vs. limit=22.5 +2024-08-03 18:06:45,240 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=189695.0, ans=0.025 +2024-08-03 18:06:58,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=189731.66666666666, ans=0.2 +2024-08-03 18:06:59,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=189731.66666666666, ans=0.0 +2024-08-03 18:07:01,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=189731.66666666666, ans=0.2 +2024-08-03 18:07:03,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=189768.33333333334, ans=0.2 +2024-08-03 18:07:12,594 INFO [train.py:1114] (3/4) Epoch 15, batch 800, loss[loss=0.1757, simple_loss=0.2534, pruned_loss=0.04896, over 13335.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2759, pruned_loss=0.0533, over 2597198.57 frames. ], batch size: 33, lr: 8.39e-03, grad_scale: 32.0 +2024-08-03 18:07:22,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=189841.66666666666, ans=0.035 +2024-08-03 18:07:59,900 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.253e+01 1.110e+02 1.253e+02 1.558e+02 2.817e+02, threshold=2.505e+02, percent-clipped=1.0 +2024-08-03 18:07:59,937 INFO [train.py:1114] (3/4) Epoch 15, batch 850, loss[loss=0.1739, simple_loss=0.2704, pruned_loss=0.03871, over 13347.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2761, pruned_loss=0.0534, over 2609487.00 frames. ], batch size: 40, lr: 8.39e-03, grad_scale: 32.0 +2024-08-03 18:08:00,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=189988.33333333334, ans=0.0 +2024-08-03 18:08:01,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=189988.33333333334, ans=0.125 +2024-08-03 18:08:10,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=190025.0, ans=0.125 +2024-08-03 18:08:44,237 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.70 vs. limit=15.0 +2024-08-03 18:08:47,479 INFO [train.py:1114] (3/4) Epoch 15, batch 900, loss[loss=0.1846, simple_loss=0.2573, pruned_loss=0.05596, over 13354.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2765, pruned_loss=0.05381, over 2612096.75 frames. ], batch size: 33, lr: 8.38e-03, grad_scale: 32.0 +2024-08-03 18:08:57,631 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.70 vs. limit=15.0 +2024-08-03 18:09:03,954 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.55 vs. limit=15.0 +2024-08-03 18:09:18,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190281.66666666666, ans=0.1 +2024-08-03 18:09:26,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=190318.33333333334, ans=0.125 +2024-08-03 18:09:35,331 INFO [train.py:1114] (3/4) Epoch 15, batch 950, loss[loss=0.1807, simple_loss=0.2684, pruned_loss=0.04655, over 13557.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2766, pruned_loss=0.05361, over 2611884.53 frames. ], batch size: 34, lr: 8.38e-03, grad_scale: 16.0 +2024-08-03 18:09:36,169 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.983e+01 1.147e+02 1.407e+02 1.582e+02 2.602e+02, threshold=2.813e+02, percent-clipped=2.0 +2024-08-03 18:09:36,752 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.54 vs. limit=15.0 +2024-08-03 18:09:49,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=190391.66666666666, ans=0.0 +2024-08-03 18:09:57,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=190428.33333333334, ans=0.125 +2024-08-03 18:10:14,265 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.15 vs. limit=22.5 +2024-08-03 18:10:16,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=190501.66666666666, ans=0.0 +2024-08-03 18:10:19,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190501.66666666666, ans=0.1 +2024-08-03 18:10:24,642 INFO [train.py:1114] (3/4) Epoch 15, batch 1000, loss[loss=0.1741, simple_loss=0.2567, pruned_loss=0.04568, over 13359.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2774, pruned_loss=0.05413, over 2610535.17 frames. ], batch size: 35, lr: 8.38e-03, grad_scale: 16.0 +2024-08-03 18:10:29,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.78 vs. limit=15.0 +2024-08-03 18:10:30,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=190538.33333333334, ans=0.125 +2024-08-03 18:10:39,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=190575.0, ans=0.1 +2024-08-03 18:11:12,155 INFO [train.py:1114] (3/4) Epoch 15, batch 1050, loss[loss=0.201, simple_loss=0.292, pruned_loss=0.05495, over 13575.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2766, pruned_loss=0.05366, over 2615025.78 frames. ], batch size: 39, lr: 8.37e-03, grad_scale: 16.0 +2024-08-03 18:11:12,982 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.909e+01 1.089e+02 1.310e+02 1.512e+02 2.407e+02, threshold=2.620e+02, percent-clipped=0.0 +2024-08-03 18:11:34,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=190795.0, ans=0.2 +2024-08-03 18:11:48,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=190831.66666666666, ans=0.125 +2024-08-03 18:11:59,048 INFO [train.py:1114] (3/4) Epoch 15, batch 1100, loss[loss=0.196, simple_loss=0.2814, pruned_loss=0.05529, over 13560.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.277, pruned_loss=0.0539, over 2619176.02 frames. ], batch size: 36, lr: 8.37e-03, grad_scale: 16.0 +2024-08-03 18:12:01,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=190905.0, ans=0.0 +2024-08-03 18:12:08,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=190941.66666666666, ans=0.0 +2024-08-03 18:12:19,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=190978.33333333334, ans=15.0 +2024-08-03 18:12:22,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=190978.33333333334, ans=0.125 +2024-08-03 18:12:28,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=191015.0, ans=0.0 +2024-08-03 18:12:45,861 INFO [train.py:1114] (3/4) Epoch 15, batch 1150, loss[loss=0.2027, simple_loss=0.2857, pruned_loss=0.05986, over 13555.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2768, pruned_loss=0.05386, over 2617772.34 frames. ], batch size: 36, lr: 8.36e-03, grad_scale: 8.0 +2024-08-03 18:12:47,596 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.299e+01 1.143e+02 1.336e+02 1.684e+02 2.618e+02, threshold=2.671e+02, percent-clipped=0.0 +2024-08-03 18:12:57,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=191125.0, ans=0.2 +2024-08-03 18:12:57,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=191125.0, ans=0.05 +2024-08-03 18:12:58,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=191125.0, ans=0.125 +2024-08-03 18:13:02,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=191125.0, ans=0.125 +2024-08-03 18:13:02,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=191125.0, ans=0.125 +2024-08-03 18:13:06,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=191161.66666666666, ans=0.0 +2024-08-03 18:13:12,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=191161.66666666666, ans=0.2 +2024-08-03 18:13:12,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn2.whiten.whitening_limit, batch_count=191161.66666666666, ans=22.5 +2024-08-03 18:13:15,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=191198.33333333334, ans=0.025 +2024-08-03 18:13:40,337 INFO [train.py:1114] (3/4) Epoch 15, batch 1200, loss[loss=0.1809, simple_loss=0.2693, pruned_loss=0.04619, over 13564.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2776, pruned_loss=0.05397, over 2615430.52 frames. ], batch size: 39, lr: 8.36e-03, grad_scale: 16.0 +2024-08-03 18:13:41,628 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.50 vs. limit=15.0 +2024-08-03 18:13:47,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=191271.66666666666, ans=0.125 +2024-08-03 18:13:52,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=191308.33333333334, ans=0.0 +2024-08-03 18:14:03,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=191345.0, ans=0.0 +2024-08-03 18:14:04,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=191345.0, ans=0.2 +2024-08-03 18:14:09,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=191381.66666666666, ans=0.125 +2024-08-03 18:14:24,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=191418.33333333334, ans=0.0 +2024-08-03 18:14:27,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=191418.33333333334, ans=0.1 +2024-08-03 18:14:27,563 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.74 vs. limit=15.0 +2024-08-03 18:14:28,836 INFO [train.py:1114] (3/4) Epoch 15, batch 1250, loss[loss=0.1882, simple_loss=0.2768, pruned_loss=0.04973, over 13443.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2782, pruned_loss=0.05412, over 2627572.25 frames. ], batch size: 42, lr: 8.36e-03, grad_scale: 16.0 +2024-08-03 18:14:30,570 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.500e+01 1.104e+02 1.280e+02 1.551e+02 2.607e+02, threshold=2.559e+02, percent-clipped=0.0 +2024-08-03 18:14:35,703 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.99 vs. limit=22.5 +2024-08-03 18:15:03,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=191565.0, ans=15.0 +2024-08-03 18:15:10,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=191601.66666666666, ans=0.2 +2024-08-03 18:15:12,002 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.02 vs. limit=15.0 +2024-08-03 18:15:14,042 INFO [train.py:1114] (3/4) Epoch 15, batch 1300, loss[loss=0.2163, simple_loss=0.303, pruned_loss=0.0648, over 12937.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2774, pruned_loss=0.05426, over 2629903.55 frames. ], batch size: 52, lr: 8.35e-03, grad_scale: 16.0 +2024-08-03 18:15:19,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=191638.33333333334, ans=0.125 +2024-08-03 18:15:20,703 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.88 vs. limit=15.0 +2024-08-03 18:15:33,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=191711.66666666666, ans=0.04949747468305833 +2024-08-03 18:15:39,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=191711.66666666666, ans=0.125 +2024-08-03 18:15:39,644 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-08-03 18:15:47,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=191748.33333333334, ans=0.0 +2024-08-03 18:16:01,044 INFO [train.py:1114] (3/4) Epoch 15, batch 1350, loss[loss=0.2093, simple_loss=0.2946, pruned_loss=0.06203, over 13537.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2774, pruned_loss=0.05387, over 2636876.48 frames. ], batch size: 37, lr: 8.35e-03, grad_scale: 16.0 +2024-08-03 18:16:02,869 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.620e+01 1.149e+02 1.398e+02 1.802e+02 2.548e+02, threshold=2.797e+02, percent-clipped=0.0 +2024-08-03 18:16:19,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=191895.0, ans=0.125 +2024-08-03 18:16:20,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=191895.0, ans=0.035 +2024-08-03 18:16:42,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=191968.33333333334, ans=0.125 +2024-08-03 18:16:47,845 INFO [train.py:1114] (3/4) Epoch 15, batch 1400, loss[loss=0.1701, simple_loss=0.2483, pruned_loss=0.04596, over 13267.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2769, pruned_loss=0.05338, over 2641071.56 frames. ], batch size: 31, lr: 8.34e-03, grad_scale: 16.0 +2024-08-03 18:16:48,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192005.0, ans=0.1 +2024-08-03 18:17:08,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=192078.33333333334, ans=0.04949747468305833 +2024-08-03 18:17:34,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=192151.66666666666, ans=0.0 +2024-08-03 18:17:39,154 INFO [train.py:1114] (3/4) Epoch 15, batch 1450, loss[loss=0.1973, simple_loss=0.2923, pruned_loss=0.05118, over 13418.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2776, pruned_loss=0.05357, over 2640556.59 frames. ], batch size: 43, lr: 8.34e-03, grad_scale: 16.0 +2024-08-03 18:17:40,955 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.258e+01 1.126e+02 1.352e+02 1.648e+02 3.700e+02, threshold=2.704e+02, percent-clipped=1.0 +2024-08-03 18:17:44,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=192188.33333333334, ans=0.09899494936611666 +2024-08-03 18:18:07,371 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.46 vs. limit=15.0 +2024-08-03 18:18:10,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=192298.33333333334, ans=0.0 +2024-08-03 18:18:12,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=192298.33333333334, ans=0.125 +2024-08-03 18:18:24,968 INFO [train.py:1114] (3/4) Epoch 15, batch 1500, loss[loss=0.2053, simple_loss=0.2965, pruned_loss=0.05707, over 13415.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.278, pruned_loss=0.05359, over 2640576.79 frames. ], batch size: 39, lr: 8.34e-03, grad_scale: 16.0 +2024-08-03 18:18:37,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=192408.33333333334, ans=0.125 +2024-08-03 18:18:41,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=192408.33333333334, ans=0.125 +2024-08-03 18:18:46,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192445.0, ans=0.1 +2024-08-03 18:18:46,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=192445.0, ans=0.0 +2024-08-03 18:19:12,258 INFO [train.py:1114] (3/4) Epoch 15, batch 1550, loss[loss=0.2162, simple_loss=0.2986, pruned_loss=0.06695, over 13405.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2784, pruned_loss=0.05418, over 2630840.73 frames. ], batch size: 41, lr: 8.33e-03, grad_scale: 16.0 +2024-08-03 18:19:14,134 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.182e+01 1.117e+02 1.315e+02 1.608e+02 2.647e+02, threshold=2.631e+02, percent-clipped=0.0 +2024-08-03 18:19:21,790 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.44 vs. limit=15.0 +2024-08-03 18:19:23,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.52 vs. limit=15.0 +2024-08-03 18:19:27,103 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.99 vs. limit=6.0 +2024-08-03 18:19:27,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=192591.66666666666, ans=0.1 +2024-08-03 18:19:32,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192628.33333333334, ans=0.1 +2024-08-03 18:19:34,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=192628.33333333334, ans=0.125 +2024-08-03 18:19:34,332 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.21 vs. limit=22.5 +2024-08-03 18:19:44,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=192665.0, ans=0.0 +2024-08-03 18:19:49,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192701.66666666666, ans=0.1 +2024-08-03 18:19:50,219 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.07 vs. limit=6.0 +2024-08-03 18:19:59,556 INFO [train.py:1114] (3/4) Epoch 15, batch 1600, loss[loss=0.2147, simple_loss=0.2987, pruned_loss=0.06538, over 13572.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2781, pruned_loss=0.0539, over 2623325.08 frames. ], batch size: 39, lr: 8.33e-03, grad_scale: 32.0 +2024-08-03 18:20:01,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=192738.33333333334, ans=0.0 +2024-08-03 18:20:04,188 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=192738.33333333334, ans=0.0 +2024-08-03 18:20:05,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=192738.33333333334, ans=0.0 +2024-08-03 18:20:05,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=192738.33333333334, ans=0.1 +2024-08-03 18:20:09,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=192775.0, ans=0.125 +2024-08-03 18:20:19,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=192811.66666666666, ans=0.125 +2024-08-03 18:20:21,828 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.87 vs. limit=15.0 +2024-08-03 18:20:30,604 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:20:36,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=192885.0, ans=0.1 +2024-08-03 18:20:44,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=192885.0, ans=0.2 +2024-08-03 18:20:47,111 INFO [train.py:1114] (3/4) Epoch 15, batch 1650, loss[loss=0.1776, simple_loss=0.2678, pruned_loss=0.0437, over 13335.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2778, pruned_loss=0.05403, over 2621334.76 frames. ], batch size: 40, lr: 8.33e-03, grad_scale: 32.0 +2024-08-03 18:20:48,897 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.213e+01 1.145e+02 1.280e+02 1.838e+02 3.870e+02, threshold=2.560e+02, percent-clipped=5.0 +2024-08-03 18:20:59,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=192958.33333333334, ans=0.125 +2024-08-03 18:21:08,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=192995.0, ans=0.0 +2024-08-03 18:21:11,979 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-08-03 18:21:15,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=193031.66666666666, ans=0.125 +2024-08-03 18:21:23,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=193031.66666666666, ans=0.035 +2024-08-03 18:21:28,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=193068.33333333334, ans=0.2 +2024-08-03 18:21:31,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=193068.33333333334, ans=0.125 +2024-08-03 18:21:33,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=193105.0, ans=0.125 +2024-08-03 18:21:34,182 INFO [train.py:1114] (3/4) Epoch 15, batch 1700, loss[loss=0.1858, simple_loss=0.2614, pruned_loss=0.05509, over 13258.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2776, pruned_loss=0.05394, over 2629931.63 frames. ], batch size: 31, lr: 8.32e-03, grad_scale: 32.0 +2024-08-03 18:21:37,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=193105.0, ans=0.125 +2024-08-03 18:21:39,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=193105.0, ans=0.07 +2024-08-03 18:21:42,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=193141.66666666666, ans=0.0 +2024-08-03 18:21:43,010 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.58 vs. limit=22.5 +2024-08-03 18:21:50,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=193141.66666666666, ans=0.0 +2024-08-03 18:21:50,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=193141.66666666666, ans=0.0 +2024-08-03 18:21:58,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=193178.33333333334, ans=0.125 +2024-08-03 18:22:04,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=193215.0, ans=0.0 +2024-08-03 18:22:14,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=193251.66666666666, ans=0.125 +2024-08-03 18:22:19,611 INFO [train.py:1114] (3/4) Epoch 15, batch 1750, loss[loss=0.1659, simple_loss=0.2426, pruned_loss=0.04463, over 13558.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2775, pruned_loss=0.05387, over 2633273.35 frames. ], batch size: 31, lr: 8.32e-03, grad_scale: 32.0 +2024-08-03 18:22:21,329 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.690e+01 1.123e+02 1.340e+02 1.586e+02 3.403e+02, threshold=2.681e+02, percent-clipped=7.0 +2024-08-03 18:22:23,293 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:22:36,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=193325.0, ans=0.125 +2024-08-03 18:22:43,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=193361.66666666666, ans=0.0 +2024-08-03 18:22:58,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=193435.0, ans=0.0 +2024-08-03 18:23:00,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=193435.0, ans=0.125 +2024-08-03 18:23:02,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=193435.0, ans=0.125 +2024-08-03 18:23:06,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=193471.66666666666, ans=0.125 +2024-08-03 18:23:06,732 INFO [train.py:1114] (3/4) Epoch 15, batch 1800, loss[loss=0.1949, simple_loss=0.2801, pruned_loss=0.05487, over 13547.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2774, pruned_loss=0.05361, over 2634959.38 frames. ], batch size: 38, lr: 8.31e-03, grad_scale: 32.0 +2024-08-03 18:23:09,484 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:23:14,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=193471.66666666666, ans=0.025 +2024-08-03 18:23:14,631 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.29 vs. limit=15.0 +2024-08-03 18:23:21,638 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.40 vs. limit=15.0 +2024-08-03 18:23:26,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=193545.0, ans=0.0 +2024-08-03 18:23:26,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=193545.0, ans=0.0 +2024-08-03 18:23:29,036 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.51 vs. limit=22.5 +2024-08-03 18:23:42,896 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.03 vs. limit=12.0 +2024-08-03 18:23:55,686 INFO [train.py:1114] (3/4) Epoch 15, batch 1850, loss[loss=0.1844, simple_loss=0.2724, pruned_loss=0.04823, over 13398.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2771, pruned_loss=0.05344, over 2638030.45 frames. ], batch size: 39, lr: 8.31e-03, grad_scale: 32.0 +2024-08-03 18:23:57,398 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.035e+01 1.191e+02 1.556e+02 2.123e+02 2.973e+02, threshold=3.112e+02, percent-clipped=3.0 +2024-08-03 18:24:03,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=193655.0, ans=0.0 +2024-08-03 18:24:07,673 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:24:14,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=193728.33333333334, ans=0.125 +2024-08-03 18:24:26,209 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.77 vs. limit=15.0 +2024-08-03 18:24:35,536 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.33 vs. limit=12.0 +2024-08-03 18:24:40,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=193801.66666666666, ans=0.1 +2024-08-03 18:24:44,747 INFO [train.py:1114] (3/4) Epoch 15, batch 1900, loss[loss=0.184, simple_loss=0.2861, pruned_loss=0.04097, over 13335.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2781, pruned_loss=0.05395, over 2640547.80 frames. ], batch size: 40, lr: 8.31e-03, grad_scale: 32.0 +2024-08-03 18:24:45,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=193838.33333333334, ans=0.025 +2024-08-03 18:24:53,746 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:24:55,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=193875.0, ans=0.125 +2024-08-03 18:25:07,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=193911.66666666666, ans=0.0 +2024-08-03 18:25:16,815 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.14 vs. limit=15.0 +2024-08-03 18:25:17,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=193948.33333333334, ans=0.1 +2024-08-03 18:25:29,773 INFO [train.py:1114] (3/4) Epoch 15, batch 1950, loss[loss=0.1901, simple_loss=0.2746, pruned_loss=0.05283, over 13576.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2793, pruned_loss=0.05429, over 2646982.85 frames. ], batch size: 36, lr: 8.30e-03, grad_scale: 32.0 +2024-08-03 18:25:31,621 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.973e+01 1.188e+02 1.452e+02 1.828e+02 3.234e+02, threshold=2.903e+02, percent-clipped=1.0 +2024-08-03 18:25:33,833 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.45 vs. limit=15.0 +2024-08-03 18:25:40,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=194058.33333333334, ans=0.0 +2024-08-03 18:25:41,065 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:25:44,271 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.49 vs. limit=10.0 +2024-08-03 18:26:00,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=194131.66666666666, ans=0.025 +2024-08-03 18:26:02,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=194131.66666666666, ans=0.0 +2024-08-03 18:26:12,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=194168.33333333334, ans=0.1 +2024-08-03 18:26:15,852 INFO [train.py:1114] (3/4) Epoch 15, batch 2000, loss[loss=0.1994, simple_loss=0.2732, pruned_loss=0.0628, over 13574.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2796, pruned_loss=0.05434, over 2636915.57 frames. ], batch size: 31, lr: 8.30e-03, grad_scale: 32.0 +2024-08-03 18:26:16,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=194205.0, ans=0.0 +2024-08-03 18:26:21,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=194205.0, ans=0.125 +2024-08-03 18:26:28,248 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.33 vs. limit=10.0 +2024-08-03 18:26:30,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=194241.66666666666, ans=0.125 +2024-08-03 18:26:33,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=194241.66666666666, ans=0.015 +2024-08-03 18:26:58,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=194351.66666666666, ans=0.125 +2024-08-03 18:26:58,609 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.57 vs. limit=10.0 +2024-08-03 18:27:06,153 INFO [train.py:1114] (3/4) Epoch 15, batch 2050, loss[loss=0.213, simple_loss=0.2885, pruned_loss=0.06871, over 13415.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.279, pruned_loss=0.05439, over 2633269.00 frames. ], batch size: 32, lr: 8.29e-03, grad_scale: 32.0 +2024-08-03 18:27:07,811 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.454e+01 1.180e+02 1.343e+02 1.712e+02 4.642e+02, threshold=2.687e+02, percent-clipped=2.0 +2024-08-03 18:27:29,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=194461.66666666666, ans=0.0 +2024-08-03 18:27:48,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=194535.0, ans=0.125 +2024-08-03 18:27:51,123 INFO [train.py:1114] (3/4) Epoch 15, batch 2100, loss[loss=0.2072, simple_loss=0.285, pruned_loss=0.06469, over 13543.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2785, pruned_loss=0.05411, over 2638569.57 frames. ], batch size: 37, lr: 8.29e-03, grad_scale: 32.0 +2024-08-03 18:28:01,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=194608.33333333334, ans=0.125 +2024-08-03 18:28:13,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=194645.0, ans=0.1 +2024-08-03 18:28:35,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=194718.33333333334, ans=0.0 +2024-08-03 18:28:40,574 INFO [train.py:1114] (3/4) Epoch 15, batch 2150, loss[loss=0.1856, simple_loss=0.268, pruned_loss=0.05164, over 13554.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2774, pruned_loss=0.05391, over 2647091.54 frames. ], batch size: 36, lr: 8.29e-03, grad_scale: 32.0 +2024-08-03 18:28:42,290 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.720e+01 1.112e+02 1.243e+02 1.782e+02 4.136e+02, threshold=2.485e+02, percent-clipped=5.0 +2024-08-03 18:28:42,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=194755.0, ans=0.0 +2024-08-03 18:28:58,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=194828.33333333334, ans=0.2 +2024-08-03 18:29:14,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=194865.0, ans=0.0 +2024-08-03 18:29:24,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=194938.33333333334, ans=0.125 +2024-08-03 18:29:25,520 INFO [train.py:1114] (3/4) Epoch 15, batch 2200, loss[loss=0.2023, simple_loss=0.2895, pruned_loss=0.0576, over 13408.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2771, pruned_loss=0.05356, over 2645646.74 frames. ], batch size: 39, lr: 8.28e-03, grad_scale: 32.0 +2024-08-03 18:29:28,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=194938.33333333334, ans=0.125 +2024-08-03 18:29:34,403 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.13 vs. limit=15.0 +2024-08-03 18:29:53,014 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:30:04,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=195085.0, ans=0.125 +2024-08-03 18:30:04,582 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.71 vs. limit=15.0 +2024-08-03 18:30:13,224 INFO [train.py:1114] (3/4) Epoch 15, batch 2250, loss[loss=0.1674, simple_loss=0.2716, pruned_loss=0.03158, over 13353.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2769, pruned_loss=0.05321, over 2642012.96 frames. ], batch size: 37, lr: 8.28e-03, grad_scale: 32.0 +2024-08-03 18:30:15,094 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.620e+01 1.201e+02 1.486e+02 1.910e+02 3.582e+02, threshold=2.971e+02, percent-clipped=11.0 +2024-08-03 18:30:25,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=195158.33333333334, ans=0.125 +2024-08-03 18:30:31,837 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.48 vs. limit=6.0 +2024-08-03 18:30:33,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195195.0, ans=0.1 +2024-08-03 18:31:01,065 INFO [train.py:1114] (3/4) Epoch 15, batch 2300, loss[loss=0.1559, simple_loss=0.2394, pruned_loss=0.03617, over 13574.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2766, pruned_loss=0.05347, over 2637661.57 frames. ], batch size: 33, lr: 8.28e-03, grad_scale: 32.0 +2024-08-03 18:31:25,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=195378.33333333334, ans=0.125 +2024-08-03 18:31:27,173 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.88 vs. limit=15.0 +2024-08-03 18:31:46,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=195451.66666666666, ans=0.5 +2024-08-03 18:31:53,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=195451.66666666666, ans=0.0 +2024-08-03 18:31:58,504 INFO [train.py:1114] (3/4) Epoch 15, batch 2350, loss[loss=0.2153, simple_loss=0.3038, pruned_loss=0.0634, over 13545.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2763, pruned_loss=0.05325, over 2640100.74 frames. ], batch size: 38, lr: 8.27e-03, grad_scale: 32.0 +2024-08-03 18:32:00,496 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.540e+01 1.095e+02 1.335e+02 1.545e+02 2.606e+02, threshold=2.670e+02, percent-clipped=0.0 +2024-08-03 18:32:09,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=195525.0, ans=0.0 +2024-08-03 18:32:35,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=195635.0, ans=0.125 +2024-08-03 18:32:36,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=195635.0, ans=0.0 +2024-08-03 18:32:38,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195635.0, ans=0.1 +2024-08-03 18:32:44,916 INFO [train.py:1114] (3/4) Epoch 15, batch 2400, loss[loss=0.18, simple_loss=0.26, pruned_loss=0.05001, over 13531.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2773, pruned_loss=0.05337, over 2641608.86 frames. ], batch size: 35, lr: 8.27e-03, grad_scale: 32.0 +2024-08-03 18:32:49,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=195671.66666666666, ans=0.0 +2024-08-03 18:32:59,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=195708.33333333334, ans=0.0 +2024-08-03 18:33:02,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=195745.0, ans=0.0 +2024-08-03 18:33:04,627 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.95 vs. limit=15.0 +2024-08-03 18:33:18,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=195781.66666666666, ans=0.125 +2024-08-03 18:33:20,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=195818.33333333334, ans=0.0 +2024-08-03 18:33:28,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=195818.33333333334, ans=0.125 +2024-08-03 18:33:28,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=195818.33333333334, ans=0.125 +2024-08-03 18:33:30,325 INFO [train.py:1114] (3/4) Epoch 15, batch 2450, loss[loss=0.1923, simple_loss=0.2809, pruned_loss=0.05179, over 13355.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.278, pruned_loss=0.05383, over 2631203.84 frames. ], batch size: 37, lr: 8.26e-03, grad_scale: 16.0 +2024-08-03 18:33:33,024 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.379e+01 1.105e+02 1.269e+02 1.556e+02 2.604e+02, threshold=2.537e+02, percent-clipped=0.0 +2024-08-03 18:33:33,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=195855.0, ans=0.0 +2024-08-03 18:33:34,528 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.21 vs. limit=15.0 +2024-08-03 18:33:36,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195855.0, ans=0.1 +2024-08-03 18:33:44,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=195891.66666666666, ans=0.95 +2024-08-03 18:34:17,552 INFO [train.py:1114] (3/4) Epoch 15, batch 2500, loss[loss=0.1921, simple_loss=0.2853, pruned_loss=0.04946, over 13409.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2773, pruned_loss=0.05343, over 2636001.04 frames. ], batch size: 39, lr: 8.26e-03, grad_scale: 16.0 +2024-08-03 18:34:34,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=196111.66666666666, ans=0.125 +2024-08-03 18:34:36,462 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.00 vs. limit=15.0 +2024-08-03 18:34:38,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=196111.66666666666, ans=0.0 +2024-08-03 18:34:55,732 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.16 vs. limit=6.0 +2024-08-03 18:35:04,095 INFO [train.py:1114] (3/4) Epoch 15, batch 2550, loss[loss=0.1752, simple_loss=0.2533, pruned_loss=0.04853, over 13515.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2769, pruned_loss=0.05348, over 2637799.76 frames. ], batch size: 31, lr: 8.26e-03, grad_scale: 16.0 +2024-08-03 18:35:06,686 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.866e+01 1.097e+02 1.275e+02 1.738e+02 2.775e+02, threshold=2.550e+02, percent-clipped=2.0 +2024-08-03 18:35:17,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=196258.33333333334, ans=0.125 +2024-08-03 18:35:19,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=196258.33333333334, ans=0.0 +2024-08-03 18:35:19,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=196258.33333333334, ans=0.125 +2024-08-03 18:35:24,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=196295.0, ans=0.0 +2024-08-03 18:35:36,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196331.66666666666, ans=0.1 +2024-08-03 18:35:36,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=196331.66666666666, ans=0.125 +2024-08-03 18:35:39,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=196368.33333333334, ans=0.2 +2024-08-03 18:35:47,211 INFO [train.py:1114] (3/4) Epoch 15, batch 2600, loss[loss=0.1956, simple_loss=0.278, pruned_loss=0.05663, over 13560.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2774, pruned_loss=0.05357, over 2636570.09 frames. ], batch size: 36, lr: 8.25e-03, grad_scale: 16.0 +2024-08-03 18:35:50,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=196405.0, ans=0.0 +2024-08-03 18:36:30,696 INFO [train.py:1114] (3/4) Epoch 15, batch 2650, loss[loss=0.2162, simple_loss=0.3008, pruned_loss=0.06578, over 13360.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2779, pruned_loss=0.05363, over 2639687.08 frames. ], batch size: 46, lr: 8.25e-03, grad_scale: 16.0 +2024-08-03 18:36:33,206 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.155e+01 1.086e+02 1.260e+02 1.535e+02 2.930e+02, threshold=2.521e+02, percent-clipped=3.0 +2024-08-03 18:36:33,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=196588.33333333334, ans=0.0 +2024-08-03 18:36:34,448 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.13 vs. limit=15.0 +2024-08-03 18:36:36,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=196588.33333333334, ans=0.0 +2024-08-03 18:36:41,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=196625.0, ans=0.0 +2024-08-03 18:36:49,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=196661.66666666666, ans=0.0 +2024-08-03 18:37:00,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=196698.33333333334, ans=0.0 +2024-08-03 18:37:16,245 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.88 vs. limit=15.0 +2024-08-03 18:37:17,630 INFO [train.py:1114] (3/4) Epoch 15, batch 2700, loss[loss=0.2138, simple_loss=0.3019, pruned_loss=0.06289, over 13544.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2786, pruned_loss=0.05406, over 2636683.23 frames. ], batch size: 40, lr: 8.24e-03, grad_scale: 16.0 +2024-08-03 18:37:25,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196808.33333333334, ans=0.1 +2024-08-03 18:37:35,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.20 vs. limit=15.0 +2024-08-03 18:37:42,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=196845.0, ans=0.125 +2024-08-03 18:37:42,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=196881.66666666666, ans=0.125 +2024-08-03 18:37:48,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=196881.66666666666, ans=0.125 +2024-08-03 18:38:01,036 INFO [train.py:1114] (3/4) Epoch 15, batch 2750, loss[loss=0.1776, simple_loss=0.2637, pruned_loss=0.04573, over 13337.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2776, pruned_loss=0.05396, over 2634072.12 frames. ], batch size: 34, lr: 8.24e-03, grad_scale: 16.0 +2024-08-03 18:38:03,266 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.79 vs. limit=15.0 +2024-08-03 18:38:03,556 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.526e+01 1.073e+02 1.243e+02 1.451e+02 2.224e+02, threshold=2.486e+02, percent-clipped=0.0 +2024-08-03 18:38:11,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=196991.66666666666, ans=0.0 +2024-08-03 18:38:12,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=196991.66666666666, ans=0.1 +2024-08-03 18:38:19,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=197028.33333333334, ans=0.125 +2024-08-03 18:38:22,235 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.40 vs. limit=6.0 +2024-08-03 18:38:26,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=197065.0, ans=0.125 +2024-08-03 18:38:44,475 INFO [train.py:1114] (3/4) Epoch 15, batch 2800, loss[loss=0.2957, simple_loss=0.3478, pruned_loss=0.1218, over 9185.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.278, pruned_loss=0.05426, over 2625586.53 frames. ], batch size: 96, lr: 8.24e-03, grad_scale: 32.0 +2024-08-03 18:38:54,436 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.35 vs. limit=15.0 +2024-08-03 18:38:54,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=197175.0, ans=0.1 +2024-08-03 18:38:58,467 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.69 vs. limit=15.0 +2024-08-03 18:39:05,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=197211.66666666666, ans=0.2 +2024-08-03 18:39:27,894 INFO [train.py:1114] (3/4) Epoch 15, batch 2850, loss[loss=0.172, simple_loss=0.2634, pruned_loss=0.0403, over 13363.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2787, pruned_loss=0.05429, over 2620772.78 frames. ], batch size: 35, lr: 8.23e-03, grad_scale: 32.0 +2024-08-03 18:39:30,443 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.971e+01 1.137e+02 1.312e+02 1.532e+02 3.029e+02, threshold=2.624e+02, percent-clipped=2.0 +2024-08-03 18:39:31,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=197321.66666666666, ans=0.0 +2024-08-03 18:39:35,384 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.82 vs. limit=15.0 +2024-08-03 18:39:39,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=197358.33333333334, ans=15.0 +2024-08-03 18:39:42,535 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.69 vs. limit=15.0 +2024-08-03 18:39:45,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=197395.0, ans=10.0 +2024-08-03 18:40:00,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=197431.66666666666, ans=0.125 +2024-08-03 18:40:11,207 INFO [train.py:1114] (3/4) Epoch 15, batch 2900, loss[loss=0.1811, simple_loss=0.2617, pruned_loss=0.05024, over 13355.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.279, pruned_loss=0.05444, over 2631744.38 frames. ], batch size: 36, lr: 8.23e-03, grad_scale: 32.0 +2024-08-03 18:40:24,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197541.66666666666, ans=0.1 +2024-08-03 18:40:32,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=197578.33333333334, ans=0.125 +2024-08-03 18:40:33,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=197578.33333333334, ans=15.0 +2024-08-03 18:40:34,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=197578.33333333334, ans=0.125 +2024-08-03 18:40:36,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=197615.0, ans=0.0 +2024-08-03 18:40:36,637 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.08 vs. limit=15.0 +2024-08-03 18:40:39,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=197615.0, ans=0.0 +2024-08-03 18:40:46,846 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:40:48,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=197651.66666666666, ans=0.1 +2024-08-03 18:40:52,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=197651.66666666666, ans=0.125 +2024-08-03 18:40:54,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.28 vs. limit=22.5 +2024-08-03 18:40:54,520 INFO [train.py:1114] (3/4) Epoch 15, batch 2950, loss[loss=0.1686, simple_loss=0.2579, pruned_loss=0.03968, over 13321.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2778, pruned_loss=0.05416, over 2630531.76 frames. ], batch size: 34, lr: 8.23e-03, grad_scale: 32.0 +2024-08-03 18:40:57,043 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.080e+01 1.111e+02 1.238e+02 1.494e+02 2.430e+02, threshold=2.476e+02, percent-clipped=0.0 +2024-08-03 18:41:07,843 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.16 vs. limit=10.0 +2024-08-03 18:41:13,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=197761.66666666666, ans=0.125 +2024-08-03 18:41:18,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=197761.66666666666, ans=0.0 +2024-08-03 18:41:26,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=197798.33333333334, ans=0.0 +2024-08-03 18:41:38,164 INFO [train.py:1114] (3/4) Epoch 15, batch 3000, loss[loss=0.1983, simple_loss=0.2834, pruned_loss=0.05663, over 13536.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2777, pruned_loss=0.05455, over 2631466.08 frames. ], batch size: 37, lr: 8.22e-03, grad_scale: 32.0 +2024-08-03 18:41:38,164 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 18:41:48,177 INFO [train.py:1146] (3/4) Epoch 15, validation: loss=0.1719, simple_loss=0.2717, pruned_loss=0.03605, over 944034.00 frames. +2024-08-03 18:41:48,177 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 18:41:54,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=197871.66666666666, ans=0.125 +2024-08-03 18:41:58,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=197908.33333333334, ans=0.125 +2024-08-03 18:41:58,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=197908.33333333334, ans=0.0 +2024-08-03 18:42:11,469 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=2.95 vs. limit=12.0 +2024-08-03 18:42:15,339 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.36 vs. limit=12.0 +2024-08-03 18:42:18,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=197981.66666666666, ans=0.0 +2024-08-03 18:42:25,094 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.78 vs. limit=15.0 +2024-08-03 18:42:26,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=198018.33333333334, ans=0.5 +2024-08-03 18:42:32,395 INFO [train.py:1114] (3/4) Epoch 15, batch 3050, loss[loss=0.1747, simple_loss=0.2604, pruned_loss=0.04449, over 13534.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2784, pruned_loss=0.05483, over 2628369.31 frames. ], batch size: 35, lr: 8.22e-03, grad_scale: 32.0 +2024-08-03 18:42:35,001 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.644e+01 1.092e+02 1.243e+02 1.494e+02 2.695e+02, threshold=2.487e+02, percent-clipped=3.0 +2024-08-03 18:42:39,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=198055.0, ans=0.125 +2024-08-03 18:42:40,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=198091.66666666666, ans=0.125 +2024-08-03 18:42:43,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198091.66666666666, ans=0.1 +2024-08-03 18:43:10,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198201.66666666666, ans=0.1 +2024-08-03 18:43:15,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=198238.33333333334, ans=0.0 +2024-08-03 18:43:16,035 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.86 vs. limit=15.0 +2024-08-03 18:43:16,371 INFO [train.py:1114] (3/4) Epoch 15, batch 3100, loss[loss=0.2118, simple_loss=0.2947, pruned_loss=0.06442, over 13352.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2782, pruned_loss=0.05458, over 2628605.33 frames. ], batch size: 46, lr: 8.21e-03, grad_scale: 32.0 +2024-08-03 18:43:19,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=198238.33333333334, ans=0.07 +2024-08-03 18:43:30,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=198275.0, ans=0.0 +2024-08-03 18:43:56,444 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.35 vs. limit=6.0 +2024-08-03 18:43:58,861 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.91 vs. limit=15.0 +2024-08-03 18:44:00,848 INFO [train.py:1114] (3/4) Epoch 15, batch 3150, loss[loss=0.1986, simple_loss=0.2857, pruned_loss=0.05578, over 13078.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2784, pruned_loss=0.05441, over 2630385.19 frames. ], batch size: 48, lr: 8.21e-03, grad_scale: 32.0 +2024-08-03 18:44:03,294 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.819e+01 1.142e+02 1.468e+02 1.823e+02 3.085e+02, threshold=2.937e+02, percent-clipped=3.0 +2024-08-03 18:44:13,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=198458.33333333334, ans=0.07 +2024-08-03 18:44:36,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=198495.0, ans=0.2 +2024-08-03 18:44:38,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=198531.66666666666, ans=0.0 +2024-08-03 18:44:38,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=198531.66666666666, ans=0.125 +2024-08-03 18:44:40,223 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.63 vs. limit=22.5 +2024-08-03 18:44:53,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=198568.33333333334, ans=0.0 +2024-08-03 18:44:54,991 INFO [train.py:1114] (3/4) Epoch 15, batch 3200, loss[loss=0.2131, simple_loss=0.2921, pruned_loss=0.06703, over 13518.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2778, pruned_loss=0.05414, over 2636268.27 frames. ], batch size: 37, lr: 8.21e-03, grad_scale: 32.0 +2024-08-03 18:44:55,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=198605.0, ans=0.125 +2024-08-03 18:44:56,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=198605.0, ans=0.07 +2024-08-03 18:45:05,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=198641.66666666666, ans=0.125 +2024-08-03 18:45:09,681 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.31 vs. limit=22.5 +2024-08-03 18:45:11,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=198678.33333333334, ans=0.0 +2024-08-03 18:45:12,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=198678.33333333334, ans=0.125 +2024-08-03 18:45:21,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=198715.0, ans=0.125 +2024-08-03 18:45:23,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=198715.0, ans=0.0 +2024-08-03 18:45:38,352 INFO [train.py:1114] (3/4) Epoch 15, batch 3250, loss[loss=0.2039, simple_loss=0.2894, pruned_loss=0.05923, over 13395.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2779, pruned_loss=0.054, over 2639425.98 frames. ], batch size: 38, lr: 8.20e-03, grad_scale: 32.0 +2024-08-03 18:45:40,868 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.293e+01 1.129e+02 1.312e+02 1.541e+02 2.254e+02, threshold=2.624e+02, percent-clipped=0.0 +2024-08-03 18:45:41,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=198788.33333333334, ans=0.1 +2024-08-03 18:45:42,391 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.21 vs. limit=12.0 +2024-08-03 18:45:48,945 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:45:50,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=198825.0, ans=0.0 +2024-08-03 18:46:04,988 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.31 vs. limit=15.0 +2024-08-03 18:46:08,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=198898.33333333334, ans=0.0 +2024-08-03 18:46:21,423 INFO [train.py:1114] (3/4) Epoch 15, batch 3300, loss[loss=0.2013, simple_loss=0.2837, pruned_loss=0.05941, over 12813.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2764, pruned_loss=0.05364, over 2641136.49 frames. ], batch size: 52, lr: 8.20e-03, grad_scale: 16.0 +2024-08-03 18:46:38,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=199045.0, ans=0.125 +2024-08-03 18:46:39,912 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.78 vs. limit=15.0 +2024-08-03 18:46:43,458 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.25 vs. limit=12.0 +2024-08-03 18:46:54,807 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:46:59,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=199118.33333333334, ans=0.125 +2024-08-03 18:47:04,156 INFO [train.py:1114] (3/4) Epoch 15, batch 3350, loss[loss=0.2119, simple_loss=0.2982, pruned_loss=0.06279, over 13018.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2776, pruned_loss=0.05411, over 2629977.33 frames. ], batch size: 48, lr: 8.20e-03, grad_scale: 16.0 +2024-08-03 18:47:07,477 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.754e+01 1.143e+02 1.314e+02 1.590e+02 2.231e+02, threshold=2.628e+02, percent-clipped=0.0 +2024-08-03 18:47:08,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=199155.0, ans=0.2 +2024-08-03 18:47:18,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=199191.66666666666, ans=0.025 +2024-08-03 18:47:40,145 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.40 vs. limit=22.5 +2024-08-03 18:47:42,817 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.83 vs. limit=15.0 +2024-08-03 18:47:47,284 INFO [train.py:1114] (3/4) Epoch 15, batch 3400, loss[loss=0.18, simple_loss=0.2544, pruned_loss=0.05276, over 13542.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2777, pruned_loss=0.05467, over 2626203.47 frames. ], batch size: 31, lr: 8.19e-03, grad_scale: 16.0 +2024-08-03 18:47:53,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=199338.33333333334, ans=0.09899494936611666 +2024-08-03 18:47:53,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=199338.33333333334, ans=0.125 +2024-08-03 18:47:55,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199375.0, ans=0.1 +2024-08-03 18:47:55,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=199375.0, ans=0.125 +2024-08-03 18:48:04,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=199411.66666666666, ans=0.05 +2024-08-03 18:48:06,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=199411.66666666666, ans=0.125 +2024-08-03 18:48:09,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=199411.66666666666, ans=0.0 +2024-08-03 18:48:10,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=199411.66666666666, ans=0.95 +2024-08-03 18:48:25,911 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.66 vs. limit=15.0 +2024-08-03 18:48:26,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=199485.0, ans=0.07 +2024-08-03 18:48:30,462 INFO [train.py:1114] (3/4) Epoch 15, batch 3450, loss[loss=0.186, simple_loss=0.2767, pruned_loss=0.04759, over 12851.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2774, pruned_loss=0.05434, over 2629182.26 frames. ], batch size: 52, lr: 8.19e-03, grad_scale: 16.0 +2024-08-03 18:48:33,769 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.288e+01 1.166e+02 1.372e+02 1.679e+02 2.920e+02, threshold=2.743e+02, percent-clipped=3.0 +2024-08-03 18:48:36,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=199521.66666666666, ans=0.1 +2024-08-03 18:48:39,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=199558.33333333334, ans=0.125 +2024-08-03 18:48:59,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199631.66666666666, ans=0.1 +2024-08-03 18:49:08,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=199668.33333333334, ans=0.125 +2024-08-03 18:49:13,502 INFO [train.py:1114] (3/4) Epoch 15, batch 3500, loss[loss=0.1754, simple_loss=0.2681, pruned_loss=0.04138, over 13511.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2764, pruned_loss=0.05404, over 2631413.88 frames. ], batch size: 34, lr: 8.19e-03, grad_scale: 16.0 +2024-08-03 18:49:55,965 INFO [train.py:1114] (3/4) Epoch 15, batch 3550, loss[loss=0.1934, simple_loss=0.28, pruned_loss=0.05339, over 12440.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2788, pruned_loss=0.05506, over 2629566.62 frames. ], batch size: 58, lr: 8.18e-03, grad_scale: 16.0 +2024-08-03 18:49:57,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=199888.33333333334, ans=0.125 +2024-08-03 18:49:59,306 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.733e+01 1.107e+02 1.282e+02 1.546e+02 2.459e+02, threshold=2.565e+02, percent-clipped=0.0 +2024-08-03 18:50:02,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=199888.33333333334, ans=0.125 +2024-08-03 18:50:02,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=199888.33333333334, ans=0.125 +2024-08-03 18:50:17,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=199961.66666666666, ans=0.2 +2024-08-03 18:50:24,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=199998.33333333334, ans=0.2 +2024-08-03 18:50:32,447 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.97 vs. limit=6.0 +2024-08-03 18:50:35,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=200035.0, ans=0.0 +2024-08-03 18:50:40,683 INFO [train.py:1114] (3/4) Epoch 15, batch 3600, loss[loss=0.2146, simple_loss=0.2961, pruned_loss=0.06649, over 9443.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2832, pruned_loss=0.05879, over 2488220.80 frames. ], batch size: 96, lr: 8.18e-03, grad_scale: 32.0 +2024-08-03 18:50:42,098 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.94 vs. limit=10.0 +2024-08-03 18:50:59,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200145.0, ans=0.1 +2024-08-03 18:51:11,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=200181.66666666666, ans=0.025 +2024-08-03 18:52:09,588 INFO [train.py:1114] (3/4) Epoch 16, batch 0, loss[loss=0.1663, simple_loss=0.2562, pruned_loss=0.03818, over 13331.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2562, pruned_loss=0.03818, over 13331.00 frames. ], batch size: 33, lr: 7.91e-03, grad_scale: 16.0 +2024-08-03 18:52:09,588 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 18:52:19,527 INFO [train.py:1146] (3/4) Epoch 16, validation: loss=0.1763, simple_loss=0.2767, pruned_loss=0.03798, over 944034.00 frames. +2024-08-03 18:52:19,528 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 18:52:24,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=200222.0, ans=0.0 +2024-08-03 18:52:33,152 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.265e+01 1.242e+02 1.434e+02 1.560e+02 1.878e+02, threshold=2.867e+02, percent-clipped=0.0 +2024-08-03 18:52:36,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=200258.66666666666, ans=0.0 +2024-08-03 18:52:48,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200332.0, ans=0.1 +2024-08-03 18:53:00,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=200368.66666666666, ans=0.125 +2024-08-03 18:53:00,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=200368.66666666666, ans=0.125 +2024-08-03 18:53:01,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200368.66666666666, ans=0.1 +2024-08-03 18:53:02,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=200368.66666666666, ans=0.07 +2024-08-03 18:53:05,367 INFO [train.py:1114] (3/4) Epoch 16, batch 50, loss[loss=0.1825, simple_loss=0.2564, pruned_loss=0.0543, over 13417.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.279, pruned_loss=0.05521, over 578550.83 frames. ], batch size: 32, lr: 7.91e-03, grad_scale: 16.0 +2024-08-03 18:53:05,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=200405.33333333334, ans=0.0 +2024-08-03 18:53:07,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=200405.33333333334, ans=0.125 +2024-08-03 18:53:09,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=200405.33333333334, ans=0.125 +2024-08-03 18:53:29,748 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.49 vs. limit=22.5 +2024-08-03 18:53:50,756 INFO [train.py:1114] (3/4) Epoch 16, batch 100, loss[loss=0.1749, simple_loss=0.2578, pruned_loss=0.04598, over 13539.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2812, pruned_loss=0.05559, over 1024977.53 frames. ], batch size: 35, lr: 7.91e-03, grad_scale: 16.0 +2024-08-03 18:54:01,287 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.86 vs. limit=15.0 +2024-08-03 18:54:06,122 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.720e+01 1.140e+02 1.333e+02 1.689e+02 2.611e+02, threshold=2.666e+02, percent-clipped=0.0 +2024-08-03 18:54:06,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=200625.33333333334, ans=0.125 +2024-08-03 18:54:18,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=200662.0, ans=0.2 +2024-08-03 18:54:27,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=200698.66666666666, ans=0.125 +2024-08-03 18:54:43,434 INFO [train.py:1114] (3/4) Epoch 16, batch 150, loss[loss=0.1518, simple_loss=0.2387, pruned_loss=0.03247, over 13431.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2773, pruned_loss=0.05375, over 1386119.10 frames. ], batch size: 32, lr: 7.90e-03, grad_scale: 16.0 +2024-08-03 18:54:43,558 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:54:56,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=200808.66666666666, ans=0.0 +2024-08-03 18:55:29,204 INFO [train.py:1114] (3/4) Epoch 16, batch 200, loss[loss=0.194, simple_loss=0.2797, pruned_loss=0.05414, over 12297.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2766, pruned_loss=0.05347, over 1664609.67 frames. ], batch size: 58, lr: 7.90e-03, grad_scale: 16.0 +2024-08-03 18:55:36,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=200955.33333333334, ans=0.125 +2024-08-03 18:55:42,874 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.144e+01 1.093e+02 1.229e+02 1.620e+02 3.492e+02, threshold=2.459e+02, percent-clipped=5.0 +2024-08-03 18:55:43,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=200992.0, ans=0.0 +2024-08-03 18:55:53,347 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.62 vs. limit=15.0 +2024-08-03 18:55:54,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=201028.66666666666, ans=0.0 +2024-08-03 18:56:13,410 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.47 vs. limit=15.0 +2024-08-03 18:56:18,890 INFO [train.py:1114] (3/4) Epoch 16, batch 250, loss[loss=0.1928, simple_loss=0.2807, pruned_loss=0.05241, over 13364.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2763, pruned_loss=0.05322, over 1883711.75 frames. ], batch size: 46, lr: 7.89e-03, grad_scale: 16.0 +2024-08-03 18:56:25,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=201138.66666666666, ans=0.2 +2024-08-03 18:56:26,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=201138.66666666666, ans=0.0 +2024-08-03 18:56:27,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=201138.66666666666, ans=0.125 +2024-08-03 18:56:34,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=201175.33333333334, ans=0.0 +2024-08-03 18:56:40,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=201212.0, ans=0.125 +2024-08-03 18:56:41,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201212.0, ans=0.1 +2024-08-03 18:56:56,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=201285.33333333334, ans=0.125 +2024-08-03 18:56:59,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=201285.33333333334, ans=0.0 +2024-08-03 18:56:59,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=201285.33333333334, ans=0.05 +2024-08-03 18:57:03,924 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.57 vs. limit=15.0 +2024-08-03 18:57:05,282 INFO [train.py:1114] (3/4) Epoch 16, batch 300, loss[loss=0.2161, simple_loss=0.2998, pruned_loss=0.06615, over 13455.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2756, pruned_loss=0.05287, over 2050604.67 frames. ], batch size: 42, lr: 7.89e-03, grad_scale: 16.0 +2024-08-03 18:57:05,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=201322.0, ans=0.125 +2024-08-03 18:57:15,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=201358.66666666666, ans=0.05 +2024-08-03 18:57:19,204 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.378e+01 1.118e+02 1.251e+02 1.604e+02 3.551e+02, threshold=2.502e+02, percent-clipped=3.0 +2024-08-03 18:57:27,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=201395.33333333334, ans=0.125 +2024-08-03 18:57:33,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=201432.0, ans=0.1 +2024-08-03 18:57:58,321 INFO [train.py:1114] (3/4) Epoch 16, batch 350, loss[loss=0.1638, simple_loss=0.2416, pruned_loss=0.04305, over 13573.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2759, pruned_loss=0.0527, over 2181665.55 frames. ], batch size: 33, lr: 7.89e-03, grad_scale: 16.0 +2024-08-03 18:58:05,163 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 18:58:05,696 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.26 vs. limit=10.0 +2024-08-03 18:58:10,025 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.54 vs. limit=15.0 +2024-08-03 18:58:17,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=201542.0, ans=0.0 +2024-08-03 18:58:34,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=201615.33333333334, ans=0.025 +2024-08-03 18:58:48,031 INFO [train.py:1114] (3/4) Epoch 16, batch 400, loss[loss=0.1781, simple_loss=0.2614, pruned_loss=0.0474, over 13352.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2754, pruned_loss=0.0522, over 2285312.68 frames. ], batch size: 37, lr: 7.88e-03, grad_scale: 32.0 +2024-08-03 18:58:50,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=201688.66666666666, ans=0.5 +2024-08-03 18:58:51,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=201688.66666666666, ans=0.125 +2024-08-03 18:59:01,843 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.809e+01 1.125e+02 1.305e+02 1.618e+02 2.689e+02, threshold=2.611e+02, percent-clipped=3.0 +2024-08-03 18:59:16,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201798.66666666666, ans=0.1 +2024-08-03 18:59:17,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=201798.66666666666, ans=0.0 +2024-08-03 18:59:32,889 INFO [train.py:1114] (3/4) Epoch 16, batch 450, loss[loss=0.2101, simple_loss=0.2907, pruned_loss=0.06472, over 13536.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2759, pruned_loss=0.05246, over 2359605.10 frames. ], batch size: 38, lr: 7.88e-03, grad_scale: 32.0 +2024-08-03 18:59:33,505 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.13 vs. limit=22.5 +2024-08-03 18:59:56,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=201945.33333333334, ans=0.125 +2024-08-03 19:00:03,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=201982.0, ans=0.125 +2024-08-03 19:00:06,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=201982.0, ans=0.2 +2024-08-03 19:00:09,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=202018.66666666666, ans=0.125 +2024-08-03 19:00:48,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=202018.66666666666, ans=0.2 +2024-08-03 19:00:50,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=202018.66666666666, ans=0.125 +2024-08-03 19:00:52,723 INFO [train.py:1114] (3/4) Epoch 16, batch 500, loss[loss=0.2086, simple_loss=0.2899, pruned_loss=0.06364, over 13451.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2749, pruned_loss=0.05205, over 2425205.80 frames. ], batch size: 43, lr: 7.88e-03, grad_scale: 32.0 +2024-08-03 19:00:59,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=202055.33333333334, ans=0.125 +2024-08-03 19:01:06,063 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.045e+01 1.073e+02 1.280e+02 1.513e+02 2.984e+02, threshold=2.559e+02, percent-clipped=3.0 +2024-08-03 19:01:30,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=202202.0, ans=0.125 +2024-08-03 19:01:37,604 INFO [train.py:1114] (3/4) Epoch 16, batch 550, loss[loss=0.2122, simple_loss=0.2906, pruned_loss=0.06692, over 13060.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2745, pruned_loss=0.05208, over 2467548.15 frames. ], batch size: 48, lr: 7.87e-03, grad_scale: 32.0 +2024-08-03 19:01:42,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=202238.66666666666, ans=0.0 +2024-08-03 19:01:42,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=202238.66666666666, ans=0.09899494936611666 +2024-08-03 19:01:45,359 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.03 vs. limit=10.0 +2024-08-03 19:01:55,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=202275.33333333334, ans=0.125 +2024-08-03 19:02:02,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=202312.0, ans=0.0 +2024-08-03 19:02:09,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=202348.66666666666, ans=0.125 +2024-08-03 19:02:09,529 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.23 vs. limit=15.0 +2024-08-03 19:02:09,775 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.48 vs. limit=15.0 +2024-08-03 19:02:24,728 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.26 vs. limit=15.0 +2024-08-03 19:02:26,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=202385.33333333334, ans=0.2 +2024-08-03 19:02:30,468 INFO [train.py:1114] (3/4) Epoch 16, batch 600, loss[loss=0.1944, simple_loss=0.2845, pruned_loss=0.05213, over 13335.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2748, pruned_loss=0.05201, over 2506751.17 frames. ], batch size: 46, lr: 7.87e-03, grad_scale: 32.0 +2024-08-03 19:02:31,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=202422.0, ans=0.125 +2024-08-03 19:02:43,889 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.542e+01 1.053e+02 1.176e+02 1.457e+02 2.332e+02, threshold=2.351e+02, percent-clipped=0.0 +2024-08-03 19:03:15,350 INFO [train.py:1114] (3/4) Epoch 16, batch 650, loss[loss=0.1938, simple_loss=0.2729, pruned_loss=0.05735, over 13564.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.274, pruned_loss=0.05181, over 2542093.41 frames. ], batch size: 37, lr: 7.87e-03, grad_scale: 32.0 +2024-08-03 19:03:15,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=202605.33333333334, ans=0.0 +2024-08-03 19:03:16,013 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.18 vs. limit=15.0 +2024-08-03 19:03:32,131 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.43 vs. limit=15.0 +2024-08-03 19:03:48,467 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.51 vs. limit=15.0 +2024-08-03 19:03:48,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=202715.33333333334, ans=0.125 +2024-08-03 19:03:56,376 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.86 vs. limit=15.0 +2024-08-03 19:04:00,440 INFO [train.py:1114] (3/4) Epoch 16, batch 700, loss[loss=0.1942, simple_loss=0.2754, pruned_loss=0.05651, over 13528.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2749, pruned_loss=0.05228, over 2564220.63 frames. ], batch size: 35, lr: 7.86e-03, grad_scale: 32.0 +2024-08-03 19:04:03,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=202788.66666666666, ans=0.025 +2024-08-03 19:04:13,873 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.401e+01 1.143e+02 1.370e+02 1.738e+02 3.116e+02, threshold=2.740e+02, percent-clipped=8.0 +2024-08-03 19:04:23,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=202862.0, ans=0.125 +2024-08-03 19:04:32,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=202898.66666666666, ans=0.2 +2024-08-03 19:04:39,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=202935.33333333334, ans=0.0 +2024-08-03 19:04:43,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=202935.33333333334, ans=0.125 +2024-08-03 19:04:46,006 INFO [train.py:1114] (3/4) Epoch 16, batch 750, loss[loss=0.1729, simple_loss=0.267, pruned_loss=0.0394, over 13359.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2742, pruned_loss=0.05189, over 2582348.18 frames. ], batch size: 37, lr: 7.86e-03, grad_scale: 32.0 +2024-08-03 19:04:50,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=202972.0, ans=0.95 +2024-08-03 19:04:52,752 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.52 vs. limit=15.0 +2024-08-03 19:04:55,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=203008.66666666666, ans=0.025 +2024-08-03 19:04:57,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=203008.66666666666, ans=0.02 +2024-08-03 19:05:02,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=203008.66666666666, ans=0.0 +2024-08-03 19:05:10,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=203045.33333333334, ans=0.125 +2024-08-03 19:05:21,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=203118.66666666666, ans=0.125 +2024-08-03 19:05:33,762 INFO [train.py:1114] (3/4) Epoch 16, batch 800, loss[loss=0.1738, simple_loss=0.2528, pruned_loss=0.04739, over 13339.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2742, pruned_loss=0.05176, over 2597336.39 frames. ], batch size: 33, lr: 7.86e-03, grad_scale: 32.0 +2024-08-03 19:05:49,283 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.111e+01 1.103e+02 1.311e+02 1.683e+02 3.142e+02, threshold=2.622e+02, percent-clipped=1.0 +2024-08-03 19:05:52,506 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.83 vs. limit=22.5 +2024-08-03 19:05:55,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=203228.66666666666, ans=0.125 +2024-08-03 19:06:05,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=203228.66666666666, ans=0.0 +2024-08-03 19:06:11,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=203265.33333333334, ans=0.125 +2024-08-03 19:06:21,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=203302.0, ans=0.95 +2024-08-03 19:06:25,697 INFO [train.py:1114] (3/4) Epoch 16, batch 850, loss[loss=0.1969, simple_loss=0.2854, pruned_loss=0.05417, over 13310.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2743, pruned_loss=0.05172, over 2609419.93 frames. ], batch size: 40, lr: 7.85e-03, grad_scale: 32.0 +2024-08-03 19:06:25,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=203338.66666666666, ans=0.0 +2024-08-03 19:06:26,080 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.02 vs. limit=15.0 +2024-08-03 19:07:01,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=203485.33333333334, ans=0.025 +2024-08-03 19:07:02,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=203485.33333333334, ans=0.1 +2024-08-03 19:07:10,791 INFO [train.py:1114] (3/4) Epoch 16, batch 900, loss[loss=0.1764, simple_loss=0.2527, pruned_loss=0.05003, over 13365.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2749, pruned_loss=0.05227, over 2611531.93 frames. ], batch size: 33, lr: 7.85e-03, grad_scale: 32.0 +2024-08-03 19:07:11,016 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:07:19,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=203558.66666666666, ans=0.0 +2024-08-03 19:07:21,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=203558.66666666666, ans=0.1 +2024-08-03 19:07:24,142 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.183e+01 1.118e+02 1.381e+02 1.663e+02 2.638e+02, threshold=2.763e+02, percent-clipped=1.0 +2024-08-03 19:07:26,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=203558.66666666666, ans=0.025 +2024-08-03 19:07:38,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=203632.0, ans=0.2 +2024-08-03 19:07:47,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=203668.66666666666, ans=0.0 +2024-08-03 19:07:55,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=203705.33333333334, ans=0.125 +2024-08-03 19:07:55,958 INFO [train.py:1114] (3/4) Epoch 16, batch 950, loss[loss=0.1699, simple_loss=0.2559, pruned_loss=0.0419, over 13531.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2748, pruned_loss=0.05178, over 2613154.81 frames. ], batch size: 34, lr: 7.85e-03, grad_scale: 32.0 +2024-08-03 19:08:05,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=203742.0, ans=0.2 +2024-08-03 19:08:32,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=203852.0, ans=0.125 +2024-08-03 19:08:41,464 INFO [train.py:1114] (3/4) Epoch 16, batch 1000, loss[loss=0.1763, simple_loss=0.2654, pruned_loss=0.04363, over 13375.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2755, pruned_loss=0.05234, over 2611632.50 frames. ], batch size: 35, lr: 7.84e-03, grad_scale: 32.0 +2024-08-03 19:08:48,582 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.75 vs. limit=15.0 +2024-08-03 19:08:55,033 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.698e+01 1.080e+02 1.221e+02 1.447e+02 2.524e+02, threshold=2.442e+02, percent-clipped=0.0 +2024-08-03 19:09:00,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=203962.0, ans=0.125 +2024-08-03 19:09:17,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=203998.66666666666, ans=0.2 +2024-08-03 19:09:24,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=204035.33333333334, ans=0.125 +2024-08-03 19:09:30,282 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.97 vs. limit=15.0 +2024-08-03 19:09:34,129 INFO [train.py:1114] (3/4) Epoch 16, batch 1050, loss[loss=0.1951, simple_loss=0.2936, pruned_loss=0.04835, over 13566.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2748, pruned_loss=0.05206, over 2616256.65 frames. ], batch size: 39, lr: 7.84e-03, grad_scale: 32.0 +2024-08-03 19:09:49,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204108.66666666666, ans=0.1 +2024-08-03 19:09:49,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204108.66666666666, ans=0.1 +2024-08-03 19:10:01,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=204145.33333333334, ans=0.125 +2024-08-03 19:10:10,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=204182.0, ans=0.95 +2024-08-03 19:10:19,001 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.91 vs. limit=15.0 +2024-08-03 19:10:21,099 INFO [train.py:1114] (3/4) Epoch 16, batch 1100, loss[loss=0.1906, simple_loss=0.2765, pruned_loss=0.05231, over 13555.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2745, pruned_loss=0.0522, over 2620488.83 frames. ], batch size: 36, lr: 7.84e-03, grad_scale: 16.0 +2024-08-03 19:10:35,472 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.269e+01 1.085e+02 1.218e+02 1.448e+02 2.223e+02, threshold=2.436e+02, percent-clipped=0.0 +2024-08-03 19:10:37,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=204292.0, ans=0.0 +2024-08-03 19:10:46,721 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.74 vs. limit=15.0 +2024-08-03 19:10:51,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=204365.33333333334, ans=0.125 +2024-08-03 19:10:52,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=204365.33333333334, ans=0.125 +2024-08-03 19:11:06,565 INFO [train.py:1114] (3/4) Epoch 16, batch 1150, loss[loss=0.1947, simple_loss=0.27, pruned_loss=0.05968, over 13561.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2743, pruned_loss=0.05244, over 2619734.43 frames. ], batch size: 36, lr: 7.83e-03, grad_scale: 16.0 +2024-08-03 19:11:07,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=204438.66666666666, ans=0.025 +2024-08-03 19:11:16,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=204475.33333333334, ans=0.0 +2024-08-03 19:11:27,478 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.72 vs. limit=5.0 +2024-08-03 19:11:34,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=204548.66666666666, ans=0.125 +2024-08-03 19:11:42,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=204585.33333333334, ans=0.0 +2024-08-03 19:11:46,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=204585.33333333334, ans=0.2 +2024-08-03 19:11:52,578 INFO [train.py:1114] (3/4) Epoch 16, batch 1200, loss[loss=0.2308, simple_loss=0.3161, pruned_loss=0.07268, over 13577.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2756, pruned_loss=0.05269, over 2616604.13 frames. ], batch size: 39, lr: 7.83e-03, grad_scale: 32.0 +2024-08-03 19:12:06,566 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.982e+01 1.149e+02 1.396e+02 1.741e+02 2.381e+02, threshold=2.791e+02, percent-clipped=0.0 +2024-08-03 19:12:15,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=204695.33333333334, ans=0.2 +2024-08-03 19:12:36,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=204768.66666666666, ans=0.2 +2024-08-03 19:12:37,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=204768.66666666666, ans=0.125 +2024-08-03 19:12:39,493 INFO [train.py:1114] (3/4) Epoch 16, batch 1250, loss[loss=0.1916, simple_loss=0.2777, pruned_loss=0.05273, over 13445.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2766, pruned_loss=0.0529, over 2628097.29 frames. ], batch size: 42, lr: 7.83e-03, grad_scale: 32.0 +2024-08-03 19:13:27,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=204952.0, ans=0.07 +2024-08-03 19:13:27,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=204952.0, ans=0.025 +2024-08-03 19:13:29,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=204988.66666666666, ans=10.0 +2024-08-03 19:13:30,418 INFO [train.py:1114] (3/4) Epoch 16, batch 1300, loss[loss=0.1876, simple_loss=0.2731, pruned_loss=0.05099, over 12861.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2757, pruned_loss=0.05271, over 2630868.30 frames. ], batch size: 52, lr: 7.82e-03, grad_scale: 32.0 +2024-08-03 19:13:33,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=204988.66666666666, ans=0.125 +2024-08-03 19:13:38,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=205025.33333333334, ans=0.0 +2024-08-03 19:13:40,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=205025.33333333334, ans=0.125 +2024-08-03 19:13:40,800 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.56 vs. limit=12.0 +2024-08-03 19:13:44,839 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.037e+01 1.093e+02 1.262e+02 1.670e+02 2.902e+02, threshold=2.524e+02, percent-clipped=1.0 +2024-08-03 19:13:49,118 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=15.0 +2024-08-03 19:13:49,306 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.96 vs. limit=15.0 +2024-08-03 19:13:50,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=205062.0, ans=0.125 +2024-08-03 19:13:55,742 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.68 vs. limit=22.5 +2024-08-03 19:14:05,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=205098.66666666666, ans=0.0 +2024-08-03 19:14:15,800 INFO [train.py:1114] (3/4) Epoch 16, batch 1350, loss[loss=0.1939, simple_loss=0.282, pruned_loss=0.05291, over 13534.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2753, pruned_loss=0.05221, over 2639642.22 frames. ], batch size: 37, lr: 7.82e-03, grad_scale: 32.0 +2024-08-03 19:14:22,773 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.03 vs. limit=15.0 +2024-08-03 19:14:39,739 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.85 vs. limit=15.0 +2024-08-03 19:14:43,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=205282.0, ans=0.2 +2024-08-03 19:14:57,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=205318.66666666666, ans=0.125 +2024-08-03 19:15:02,796 INFO [train.py:1114] (3/4) Epoch 16, batch 1400, loss[loss=0.1602, simple_loss=0.2376, pruned_loss=0.04136, over 13241.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2753, pruned_loss=0.05203, over 2643815.47 frames. ], batch size: 31, lr: 7.81e-03, grad_scale: 32.0 +2024-08-03 19:15:06,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=205355.33333333334, ans=0.125 +2024-08-03 19:15:17,270 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.642e+01 1.154e+02 1.285e+02 1.682e+02 2.521e+02, threshold=2.570e+02, percent-clipped=0.0 +2024-08-03 19:15:38,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=205502.0, ans=0.125 +2024-08-03 19:15:45,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=205502.0, ans=0.0 +2024-08-03 19:15:47,067 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.15 vs. limit=22.5 +2024-08-03 19:15:48,537 INFO [train.py:1114] (3/4) Epoch 16, batch 1450, loss[loss=0.2091, simple_loss=0.2979, pruned_loss=0.0601, over 13423.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2768, pruned_loss=0.05298, over 2642209.51 frames. ], batch size: 43, lr: 7.81e-03, grad_scale: 32.0 +2024-08-03 19:15:52,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=205538.66666666666, ans=0.125 +2024-08-03 19:16:15,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=205612.0, ans=0.0 +2024-08-03 19:16:19,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=205648.66666666666, ans=0.0 +2024-08-03 19:16:21,679 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.34 vs. limit=15.0 +2024-08-03 19:16:24,309 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.12 vs. limit=12.0 +2024-08-03 19:16:26,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=205685.33333333334, ans=0.125 +2024-08-03 19:16:37,159 INFO [train.py:1114] (3/4) Epoch 16, batch 1500, loss[loss=0.2133, simple_loss=0.2984, pruned_loss=0.06408, over 13407.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2775, pruned_loss=0.05315, over 2642166.84 frames. ], batch size: 39, lr: 7.81e-03, grad_scale: 32.0 +2024-08-03 19:16:38,910 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.42 vs. limit=8.0 +2024-08-03 19:16:41,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=205722.0, ans=0.1 +2024-08-03 19:16:50,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=205758.66666666666, ans=0.125 +2024-08-03 19:16:56,106 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.464e+01 1.141e+02 1.324e+02 1.628e+02 2.574e+02, threshold=2.648e+02, percent-clipped=1.0 +2024-08-03 19:17:09,934 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.67 vs. limit=15.0 +2024-08-03 19:17:20,573 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.17 vs. limit=12.0 +2024-08-03 19:17:27,188 INFO [train.py:1114] (3/4) Epoch 16, batch 1550, loss[loss=0.1973, simple_loss=0.2875, pruned_loss=0.05357, over 13400.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2777, pruned_loss=0.0534, over 2632021.36 frames. ], batch size: 41, lr: 7.80e-03, grad_scale: 32.0 +2024-08-03 19:17:29,614 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.10 vs. limit=22.5 +2024-08-03 19:17:31,231 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.54 vs. limit=15.0 +2024-08-03 19:17:41,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=205942.0, ans=0.0 +2024-08-03 19:18:12,965 INFO [train.py:1114] (3/4) Epoch 16, batch 1600, loss[loss=0.1794, simple_loss=0.2676, pruned_loss=0.04555, over 13569.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2777, pruned_loss=0.05383, over 2624740.76 frames. ], batch size: 39, lr: 7.80e-03, grad_scale: 32.0 +2024-08-03 19:18:27,555 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.061e+01 1.163e+02 1.376e+02 1.726e+02 3.125e+02, threshold=2.751e+02, percent-clipped=2.0 +2024-08-03 19:18:45,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=206198.66666666666, ans=0.125 +2024-08-03 19:18:52,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=206235.33333333334, ans=10.0 +2024-08-03 19:18:53,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=206235.33333333334, ans=0.125 +2024-08-03 19:18:58,352 INFO [train.py:1114] (3/4) Epoch 16, batch 1650, loss[loss=0.1823, simple_loss=0.2823, pruned_loss=0.04114, over 13335.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2764, pruned_loss=0.05319, over 2621558.16 frames. ], batch size: 40, lr: 7.80e-03, grad_scale: 16.0 +2024-08-03 19:19:09,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=206308.66666666666, ans=0.025 +2024-08-03 19:19:31,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=206382.0, ans=0.0 +2024-08-03 19:19:32,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=206382.0, ans=0.0 +2024-08-03 19:19:45,353 INFO [train.py:1114] (3/4) Epoch 16, batch 1700, loss[loss=0.184, simple_loss=0.2522, pruned_loss=0.05789, over 13263.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2759, pruned_loss=0.05286, over 2630855.00 frames. ], batch size: 31, lr: 7.79e-03, grad_scale: 16.0 +2024-08-03 19:19:45,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=206455.33333333334, ans=0.125 +2024-08-03 19:19:50,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=206455.33333333334, ans=0.05 +2024-08-03 19:19:54,915 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.87 vs. limit=10.0 +2024-08-03 19:19:57,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=206492.0, ans=0.04949747468305833 +2024-08-03 19:20:02,481 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.148e+01 1.167e+02 1.345e+02 1.765e+02 2.775e+02, threshold=2.690e+02, percent-clipped=1.0 +2024-08-03 19:20:02,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=206492.0, ans=0.125 +2024-08-03 19:20:27,337 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:20:36,096 INFO [train.py:1114] (3/4) Epoch 16, batch 1750, loss[loss=0.1698, simple_loss=0.2488, pruned_loss=0.04536, over 13547.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2757, pruned_loss=0.05259, over 2634306.26 frames. ], batch size: 31, lr: 7.79e-03, grad_scale: 16.0 +2024-08-03 19:20:41,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=206638.66666666666, ans=0.0 +2024-08-03 19:20:42,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=206638.66666666666, ans=0.125 +2024-08-03 19:20:50,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.41 vs. limit=12.0 +2024-08-03 19:20:51,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=206675.33333333334, ans=0.125 +2024-08-03 19:20:53,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=206712.0, ans=0.1 +2024-08-03 19:21:09,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=206748.66666666666, ans=0.125 +2024-08-03 19:21:21,428 INFO [train.py:1114] (3/4) Epoch 16, batch 1800, loss[loss=0.1828, simple_loss=0.269, pruned_loss=0.04827, over 13556.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2754, pruned_loss=0.05254, over 2635104.68 frames. ], batch size: 38, lr: 7.79e-03, grad_scale: 16.0 +2024-08-03 19:21:29,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=206858.66666666666, ans=0.0 +2024-08-03 19:21:36,889 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.527e+01 1.147e+02 1.312e+02 1.685e+02 2.855e+02, threshold=2.624e+02, percent-clipped=1.0 +2024-08-03 19:21:45,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=206895.33333333334, ans=0.0 +2024-08-03 19:21:57,994 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:22:06,972 INFO [train.py:1114] (3/4) Epoch 16, batch 1850, loss[loss=0.1901, simple_loss=0.2823, pruned_loss=0.04894, over 13401.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2751, pruned_loss=0.05209, over 2637756.57 frames. ], batch size: 39, lr: 7.78e-03, grad_scale: 16.0 +2024-08-03 19:22:09,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=207005.33333333334, ans=0.2 +2024-08-03 19:22:09,306 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.22 vs. limit=6.0 +2024-08-03 19:22:12,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.59 vs. limit=22.5 +2024-08-03 19:22:19,433 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.69 vs. limit=12.0 +2024-08-03 19:22:28,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=207078.66666666666, ans=0.1 +2024-08-03 19:22:45,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=207115.33333333334, ans=0.0 +2024-08-03 19:22:49,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=207152.0, ans=0.125 +2024-08-03 19:22:54,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=207152.0, ans=0.0 +2024-08-03 19:22:58,926 INFO [train.py:1114] (3/4) Epoch 16, batch 1900, loss[loss=0.1799, simple_loss=0.2696, pruned_loss=0.04506, over 13311.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2748, pruned_loss=0.05177, over 2640443.81 frames. ], batch size: 40, lr: 7.78e-03, grad_scale: 16.0 +2024-08-03 19:23:29,099 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.263e+01 1.127e+02 1.354e+02 1.894e+02 2.950e+02, threshold=2.708e+02, percent-clipped=4.0 +2024-08-03 19:23:34,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207262.0, ans=0.1 +2024-08-03 19:23:35,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=207262.0, ans=0.1 +2024-08-03 19:23:48,744 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:24:02,628 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.97 vs. limit=15.0 +2024-08-03 19:24:03,031 INFO [train.py:1114] (3/4) Epoch 16, batch 1950, loss[loss=0.2083, simple_loss=0.2844, pruned_loss=0.06607, over 13572.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2763, pruned_loss=0.05238, over 2647013.08 frames. ], batch size: 36, lr: 7.78e-03, grad_scale: 16.0 +2024-08-03 19:24:17,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=207408.66666666666, ans=0.025 +2024-08-03 19:24:18,478 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:24:21,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=207408.66666666666, ans=0.125 +2024-08-03 19:24:29,298 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:24:30,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=207445.33333333334, ans=0.125 +2024-08-03 19:24:41,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=207482.0, ans=0.125 +2024-08-03 19:24:44,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=207518.66666666666, ans=0.0 +2024-08-03 19:24:49,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=207518.66666666666, ans=0.025 +2024-08-03 19:24:52,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=207518.66666666666, ans=0.125 +2024-08-03 19:24:53,825 INFO [train.py:1114] (3/4) Epoch 16, batch 2000, loss[loss=0.1847, simple_loss=0.2641, pruned_loss=0.05268, over 13534.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2774, pruned_loss=0.05281, over 2635644.13 frames. ], batch size: 31, lr: 7.77e-03, grad_scale: 32.0 +2024-08-03 19:24:57,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=207555.33333333334, ans=0.125 +2024-08-03 19:25:05,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=207592.0, ans=0.0 +2024-08-03 19:25:07,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=207592.0, ans=0.125 +2024-08-03 19:25:09,399 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.745e+01 1.151e+02 1.402e+02 1.831e+02 3.066e+02, threshold=2.804e+02, percent-clipped=4.0 +2024-08-03 19:25:29,725 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.30 vs. limit=12.0 +2024-08-03 19:25:38,900 INFO [train.py:1114] (3/4) Epoch 16, batch 2050, loss[loss=0.1818, simple_loss=0.2624, pruned_loss=0.05062, over 13419.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2759, pruned_loss=0.05242, over 2632654.72 frames. ], batch size: 32, lr: 7.77e-03, grad_scale: 32.0 +2024-08-03 19:25:40,563 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.65 vs. limit=5.0 +2024-08-03 19:25:54,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=207775.33333333334, ans=0.025 +2024-08-03 19:25:58,157 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.91 vs. limit=22.5 +2024-08-03 19:26:14,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=207885.33333333334, ans=0.2 +2024-08-03 19:26:15,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=207885.33333333334, ans=0.0 +2024-08-03 19:26:15,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=207885.33333333334, ans=0.125 +2024-08-03 19:26:20,592 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.94 vs. limit=10.0 +2024-08-03 19:26:23,758 INFO [train.py:1114] (3/4) Epoch 16, batch 2100, loss[loss=0.1958, simple_loss=0.2763, pruned_loss=0.05766, over 13548.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2747, pruned_loss=0.05195, over 2638401.63 frames. ], batch size: 37, lr: 7.77e-03, grad_scale: 32.0 +2024-08-03 19:26:26,949 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.47 vs. limit=10.0 +2024-08-03 19:26:38,697 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.022e+01 1.073e+02 1.217e+02 1.568e+02 3.232e+02, threshold=2.433e+02, percent-clipped=1.0 +2024-08-03 19:26:43,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=207995.33333333334, ans=0.125 +2024-08-03 19:26:46,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207995.33333333334, ans=0.1 +2024-08-03 19:26:52,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=208032.0, ans=0.125 +2024-08-03 19:27:08,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.01 vs. limit=12.0 +2024-08-03 19:27:10,324 INFO [train.py:1114] (3/4) Epoch 16, batch 2150, loss[loss=0.1765, simple_loss=0.2659, pruned_loss=0.04358, over 13549.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2747, pruned_loss=0.05208, over 2646999.73 frames. ], batch size: 36, lr: 7.76e-03, grad_scale: 32.0 +2024-08-03 19:27:27,763 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.63 vs. limit=15.0 +2024-08-03 19:27:40,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=208215.33333333334, ans=0.125 +2024-08-03 19:27:59,602 INFO [train.py:1114] (3/4) Epoch 16, batch 2200, loss[loss=0.1878, simple_loss=0.2764, pruned_loss=0.04955, over 13400.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.275, pruned_loss=0.05199, over 2644753.24 frames. ], batch size: 39, lr: 7.76e-03, grad_scale: 32.0 +2024-08-03 19:28:16,738 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.518e+01 1.187e+02 1.408e+02 1.826e+02 3.967e+02, threshold=2.817e+02, percent-clipped=9.0 +2024-08-03 19:28:25,264 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.71 vs. limit=12.0 +2024-08-03 19:28:46,817 INFO [train.py:1114] (3/4) Epoch 16, batch 2250, loss[loss=0.1702, simple_loss=0.2654, pruned_loss=0.03749, over 13355.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2753, pruned_loss=0.05208, over 2642302.56 frames. ], batch size: 37, lr: 7.76e-03, grad_scale: 32.0 +2024-08-03 19:28:48,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=208472.0, ans=0.2 +2024-08-03 19:28:49,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=208472.0, ans=0.0 +2024-08-03 19:28:59,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=208508.66666666666, ans=0.1 +2024-08-03 19:29:00,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=208508.66666666666, ans=0.2 +2024-08-03 19:29:10,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=208545.33333333334, ans=0.07 +2024-08-03 19:29:10,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=208545.33333333334, ans=0.125 +2024-08-03 19:29:27,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=208618.66666666666, ans=0.04949747468305833 +2024-08-03 19:29:29,679 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.92 vs. limit=22.5 +2024-08-03 19:29:31,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=208618.66666666666, ans=0.0 +2024-08-03 19:29:33,648 INFO [train.py:1114] (3/4) Epoch 16, batch 2300, loss[loss=0.1567, simple_loss=0.2381, pruned_loss=0.03771, over 13570.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2745, pruned_loss=0.05206, over 2639323.27 frames. ], batch size: 33, lr: 7.75e-03, grad_scale: 32.0 +2024-08-03 19:29:37,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=208655.33333333334, ans=0.09899494936611666 +2024-08-03 19:29:45,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.26 vs. limit=6.0 +2024-08-03 19:29:48,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=208692.0, ans=0.125 +2024-08-03 19:29:54,059 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.353e+01 1.164e+02 1.344e+02 1.643e+02 2.956e+02, threshold=2.688e+02, percent-clipped=1.0 +2024-08-03 19:29:55,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=208692.0, ans=0.125 +2024-08-03 19:30:05,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=208765.33333333334, ans=0.5 +2024-08-03 19:30:07,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=208765.33333333334, ans=0.0 +2024-08-03 19:30:10,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=208765.33333333334, ans=0.125 +2024-08-03 19:30:12,152 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:30:14,872 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:30:19,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=208802.0, ans=0.125 +2024-08-03 19:30:24,643 INFO [train.py:1114] (3/4) Epoch 16, batch 2350, loss[loss=0.2101, simple_loss=0.2979, pruned_loss=0.06111, over 13553.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2735, pruned_loss=0.05164, over 2641711.72 frames. ], batch size: 38, lr: 7.75e-03, grad_scale: 32.0 +2024-08-03 19:30:35,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=208875.33333333334, ans=0.04949747468305833 +2024-08-03 19:31:14,339 INFO [train.py:1114] (3/4) Epoch 16, batch 2400, loss[loss=0.1774, simple_loss=0.2698, pruned_loss=0.04252, over 13535.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2744, pruned_loss=0.05178, over 2642810.30 frames. ], batch size: 35, lr: 7.75e-03, grad_scale: 32.0 +2024-08-03 19:31:24,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=209022.0, ans=0.2 +2024-08-03 19:31:29,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209022.0, ans=0.1 +2024-08-03 19:31:40,582 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.713e+01 1.195e+02 1.361e+02 1.735e+02 2.883e+02, threshold=2.722e+02, percent-clipped=1.0 +2024-08-03 19:31:56,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=209095.33333333334, ans=0.125 +2024-08-03 19:32:06,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=209132.0, ans=0.0 +2024-08-03 19:32:06,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=209132.0, ans=0.125 +2024-08-03 19:32:09,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=209168.66666666666, ans=0.125 +2024-08-03 19:32:11,893 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.77 vs. limit=22.5 +2024-08-03 19:32:11,962 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.02 vs. limit=22.5 +2024-08-03 19:32:18,680 INFO [train.py:1114] (3/4) Epoch 16, batch 2450, loss[loss=0.1871, simple_loss=0.2831, pruned_loss=0.04561, over 13351.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2755, pruned_loss=0.05241, over 2633570.41 frames. ], batch size: 37, lr: 7.74e-03, grad_scale: 32.0 +2024-08-03 19:32:18,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=209205.33333333334, ans=0.125 +2024-08-03 19:32:32,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=209242.0, ans=0.125 +2024-08-03 19:32:45,407 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.61 vs. limit=15.0 +2024-08-03 19:32:59,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.47 vs. limit=15.0 +2024-08-03 19:33:03,797 INFO [train.py:1114] (3/4) Epoch 16, batch 2500, loss[loss=0.1885, simple_loss=0.2797, pruned_loss=0.04863, over 13404.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2752, pruned_loss=0.05213, over 2637231.54 frames. ], batch size: 39, lr: 7.74e-03, grad_scale: 32.0 +2024-08-03 19:33:10,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=209388.66666666666, ans=0.125 +2024-08-03 19:33:19,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=209425.33333333334, ans=0.0 +2024-08-03 19:33:21,423 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.166e+01 1.109e+02 1.260e+02 1.584e+02 2.146e+02, threshold=2.521e+02, percent-clipped=0.0 +2024-08-03 19:33:42,848 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:33:45,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=209535.33333333334, ans=10.0 +2024-08-03 19:33:50,677 INFO [train.py:1114] (3/4) Epoch 16, batch 2550, loss[loss=0.1719, simple_loss=0.2524, pruned_loss=0.04575, over 13535.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2752, pruned_loss=0.05201, over 2639108.41 frames. ], batch size: 31, lr: 7.74e-03, grad_scale: 32.0 +2024-08-03 19:34:19,063 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.93 vs. limit=15.0 +2024-08-03 19:34:23,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=209682.0, ans=0.125 +2024-08-03 19:34:30,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=209718.66666666666, ans=0.025 +2024-08-03 19:34:36,114 INFO [train.py:1114] (3/4) Epoch 16, batch 2600, loss[loss=0.1833, simple_loss=0.2736, pruned_loss=0.04655, over 13563.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2759, pruned_loss=0.05194, over 2637910.47 frames. ], batch size: 36, lr: 7.73e-03, grad_scale: 32.0 +2024-08-03 19:34:50,518 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.224e+01 1.145e+02 1.272e+02 1.680e+02 2.511e+02, threshold=2.545e+02, percent-clipped=0.0 +2024-08-03 19:35:06,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=209865.33333333334, ans=0.125 +2024-08-03 19:35:08,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=209865.33333333334, ans=0.0 +2024-08-03 19:35:14,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=209902.0, ans=0.125 +2024-08-03 19:35:18,989 INFO [train.py:1114] (3/4) Epoch 16, batch 2650, loss[loss=0.1995, simple_loss=0.2923, pruned_loss=0.0534, over 13291.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2762, pruned_loss=0.05205, over 2640928.09 frames. ], batch size: 46, lr: 7.73e-03, grad_scale: 32.0 +2024-08-03 19:35:28,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=209975.33333333334, ans=0.05 +2024-08-03 19:35:38,280 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.30 vs. limit=12.0 +2024-08-03 19:35:49,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=210048.66666666666, ans=0.025 +2024-08-03 19:36:02,101 INFO [train.py:1114] (3/4) Epoch 16, batch 2700, loss[loss=0.1964, simple_loss=0.2848, pruned_loss=0.05405, over 13551.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2763, pruned_loss=0.05218, over 2637451.03 frames. ], batch size: 40, lr: 7.73e-03, grad_scale: 32.0 +2024-08-03 19:36:07,045 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.16 vs. limit=8.0 +2024-08-03 19:36:16,696 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.859e+01 1.215e+02 1.396e+02 1.823e+02 2.794e+02, threshold=2.792e+02, percent-clipped=5.0 +2024-08-03 19:36:18,049 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.37 vs. limit=15.0 +2024-08-03 19:36:24,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=210195.33333333334, ans=0.0 +2024-08-03 19:36:24,250 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.91 vs. limit=15.0 +2024-08-03 19:36:27,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=210232.0, ans=0.0 +2024-08-03 19:36:30,272 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.40 vs. limit=15.0 +2024-08-03 19:36:34,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=210232.0, ans=0.125 +2024-08-03 19:36:38,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=210268.66666666666, ans=0.125 +2024-08-03 19:36:44,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=210305.33333333334, ans=0.125 +2024-08-03 19:36:45,453 INFO [train.py:1114] (3/4) Epoch 16, batch 2750, loss[loss=0.196, simple_loss=0.2718, pruned_loss=0.06009, over 13337.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2755, pruned_loss=0.05218, over 2635566.14 frames. ], batch size: 34, lr: 7.72e-03, grad_scale: 32.0 +2024-08-03 19:36:59,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=210342.0, ans=0.125 +2024-08-03 19:37:06,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=210378.66666666666, ans=0.0 +2024-08-03 19:37:07,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=210378.66666666666, ans=0.125 +2024-08-03 19:37:12,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=210415.33333333334, ans=0.125 +2024-08-03 19:37:13,656 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.51 vs. limit=15.0 +2024-08-03 19:37:26,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=210452.0, ans=0.2 +2024-08-03 19:37:31,232 INFO [train.py:1114] (3/4) Epoch 16, batch 2800, loss[loss=0.2346, simple_loss=0.3101, pruned_loss=0.07956, over 9174.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2757, pruned_loss=0.05236, over 2627368.45 frames. ], batch size: 96, lr: 7.72e-03, grad_scale: 32.0 +2024-08-03 19:37:45,864 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.762e+01 1.112e+02 1.298e+02 1.652e+02 2.703e+02, threshold=2.596e+02, percent-clipped=0.0 +2024-08-03 19:37:48,739 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=210562.0, ans=0.125 +2024-08-03 19:38:07,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=210635.33333333334, ans=0.5 +2024-08-03 19:38:15,873 INFO [train.py:1114] (3/4) Epoch 16, batch 2850, loss[loss=0.1694, simple_loss=0.2631, pruned_loss=0.03786, over 13364.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2762, pruned_loss=0.05297, over 2621288.31 frames. ], batch size: 35, lr: 7.72e-03, grad_scale: 32.0 +2024-08-03 19:38:16,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=210672.0, ans=0.125 +2024-08-03 19:38:20,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=210672.0, ans=0.0 +2024-08-03 19:38:20,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=210672.0, ans=0.125 +2024-08-03 19:38:23,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=210708.66666666666, ans=0.0 +2024-08-03 19:38:31,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=210708.66666666666, ans=0.125 +2024-08-03 19:38:42,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=210782.0, ans=0.025 +2024-08-03 19:39:00,432 INFO [train.py:1114] (3/4) Epoch 16, batch 2900, loss[loss=0.1831, simple_loss=0.2724, pruned_loss=0.04693, over 13368.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2775, pruned_loss=0.05327, over 2631811.85 frames. ], batch size: 36, lr: 7.71e-03, grad_scale: 32.0 +2024-08-03 19:39:16,549 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.624e+01 1.079e+02 1.232e+02 1.534e+02 2.946e+02, threshold=2.465e+02, percent-clipped=2.0 +2024-08-03 19:39:24,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=210928.66666666666, ans=0.0 +2024-08-03 19:39:25,655 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.10 vs. limit=15.0 +2024-08-03 19:39:46,445 INFO [train.py:1114] (3/4) Epoch 16, batch 2950, loss[loss=0.1665, simple_loss=0.2517, pruned_loss=0.04066, over 13324.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2765, pruned_loss=0.05265, over 2629517.89 frames. ], batch size: 34, lr: 7.71e-03, grad_scale: 32.0 +2024-08-03 19:40:07,956 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.91 vs. limit=6.0 +2024-08-03 19:40:17,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=211148.66666666666, ans=0.025 +2024-08-03 19:40:23,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=211185.33333333334, ans=0.125 +2024-08-03 19:40:24,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=211185.33333333334, ans=0.125 +2024-08-03 19:40:27,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=211185.33333333334, ans=0.125 +2024-08-03 19:40:29,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=211222.0, ans=0.2 +2024-08-03 19:40:30,444 INFO [train.py:1114] (3/4) Epoch 16, batch 3000, loss[loss=0.1872, simple_loss=0.2711, pruned_loss=0.05167, over 13543.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2762, pruned_loss=0.05265, over 2628927.95 frames. ], batch size: 37, lr: 7.71e-03, grad_scale: 32.0 +2024-08-03 19:40:30,444 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 19:40:42,219 INFO [train.py:1146] (3/4) Epoch 16, validation: loss=0.1717, simple_loss=0.2708, pruned_loss=0.03625, over 944034.00 frames. +2024-08-03 19:40:42,220 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 19:40:50,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=211258.66666666666, ans=0.125 +2024-08-03 19:40:51,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=211258.66666666666, ans=0.125 +2024-08-03 19:40:56,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=211258.66666666666, ans=0.125 +2024-08-03 19:40:56,648 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.281e+01 1.094e+02 1.222e+02 1.516e+02 2.979e+02, threshold=2.443e+02, percent-clipped=5.0 +2024-08-03 19:40:59,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=211295.33333333334, ans=0.07 +2024-08-03 19:41:15,520 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:41:25,993 INFO [train.py:1114] (3/4) Epoch 16, batch 3050, loss[loss=0.1443, simple_loss=0.225, pruned_loss=0.03179, over 13529.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2761, pruned_loss=0.0527, over 2625555.90 frames. ], batch size: 35, lr: 7.70e-03, grad_scale: 32.0 +2024-08-03 19:41:26,537 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.75 vs. limit=12.0 +2024-08-03 19:41:35,255 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.25 vs. limit=15.0 +2024-08-03 19:41:40,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=211442.0, ans=0.125 +2024-08-03 19:41:40,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=211442.0, ans=0.125 +2024-08-03 19:41:41,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.77 vs. limit=15.0 +2024-08-03 19:41:44,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=211478.66666666666, ans=0.0 +2024-08-03 19:41:45,661 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.98 vs. limit=10.0 +2024-08-03 19:41:53,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=211515.33333333334, ans=0.0 +2024-08-03 19:42:05,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211552.0, ans=0.1 +2024-08-03 19:42:09,453 INFO [train.py:1114] (3/4) Epoch 16, batch 3100, loss[loss=0.1908, simple_loss=0.2805, pruned_loss=0.0505, over 13309.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2754, pruned_loss=0.05229, over 2625572.16 frames. ], batch size: 46, lr: 7.70e-03, grad_scale: 32.0 +2024-08-03 19:42:14,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=211588.66666666666, ans=0.07 +2024-08-03 19:42:23,782 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.933e+01 1.091e+02 1.250e+02 1.567e+02 2.776e+02, threshold=2.501e+02, percent-clipped=2.0 +2024-08-03 19:42:24,403 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.70 vs. limit=15.0 +2024-08-03 19:42:25,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=211662.0, ans=0.09899494936611666 +2024-08-03 19:42:26,892 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.78 vs. limit=15.0 +2024-08-03 19:42:40,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=211698.66666666666, ans=0.0 +2024-08-03 19:42:44,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=211735.33333333334, ans=0.0 +2024-08-03 19:42:51,941 INFO [train.py:1114] (3/4) Epoch 16, batch 3150, loss[loss=0.1835, simple_loss=0.2734, pruned_loss=0.04682, over 13030.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2752, pruned_loss=0.05202, over 2627345.98 frames. ], batch size: 48, lr: 7.70e-03, grad_scale: 32.0 +2024-08-03 19:42:59,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=211808.66666666666, ans=0.125 +2024-08-03 19:43:01,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=211808.66666666666, ans=0.0 +2024-08-03 19:43:26,302 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.82 vs. limit=12.0 +2024-08-03 19:43:28,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=211918.66666666666, ans=0.0 +2024-08-03 19:43:33,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=211918.66666666666, ans=0.125 +2024-08-03 19:43:35,468 INFO [train.py:1114] (3/4) Epoch 16, batch 3200, loss[loss=0.188, simple_loss=0.2772, pruned_loss=0.04942, over 13554.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2746, pruned_loss=0.0516, over 2632900.53 frames. ], batch size: 37, lr: 7.69e-03, grad_scale: 32.0 +2024-08-03 19:43:45,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=211992.0, ans=0.125 +2024-08-03 19:43:46,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=211992.0, ans=0.125 +2024-08-03 19:43:47,087 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.04 vs. limit=10.0 +2024-08-03 19:43:49,849 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.470e+01 1.178e+02 1.467e+02 1.849e+02 2.870e+02, threshold=2.934e+02, percent-clipped=4.0 +2024-08-03 19:43:51,301 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.54 vs. limit=15.0 +2024-08-03 19:44:16,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=212102.0, ans=0.125 +2024-08-03 19:44:18,731 INFO [train.py:1114] (3/4) Epoch 16, batch 3250, loss[loss=0.2065, simple_loss=0.2938, pruned_loss=0.05966, over 13396.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2752, pruned_loss=0.05172, over 2638133.36 frames. ], batch size: 38, lr: 7.69e-03, grad_scale: 32.0 +2024-08-03 19:44:20,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=212138.66666666666, ans=0.0 +2024-08-03 19:44:24,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=212138.66666666666, ans=0.0 +2024-08-03 19:44:29,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=212175.33333333334, ans=0.04949747468305833 +2024-08-03 19:44:37,605 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:44:40,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=212212.0, ans=0.125 +2024-08-03 19:44:41,928 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:44:49,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=212248.66666666666, ans=0.0 +2024-08-03 19:44:53,460 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.18 vs. limit=12.0 +2024-08-03 19:45:02,686 INFO [train.py:1114] (3/4) Epoch 16, batch 3300, loss[loss=0.1874, simple_loss=0.2776, pruned_loss=0.0486, over 12754.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2737, pruned_loss=0.05131, over 2639896.56 frames. ], batch size: 52, lr: 7.69e-03, grad_scale: 32.0 +2024-08-03 19:45:14,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=212358.66666666666, ans=0.07 +2024-08-03 19:45:17,676 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.789e+01 1.171e+02 1.334e+02 1.762e+02 2.468e+02, threshold=2.668e+02, percent-clipped=0.0 +2024-08-03 19:45:25,844 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.80 vs. limit=15.0 +2024-08-03 19:45:30,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=212432.0, ans=0.05 +2024-08-03 19:45:32,425 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=10.37 vs. limit=15.0 +2024-08-03 19:45:33,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=212432.0, ans=0.125 +2024-08-03 19:45:39,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=212468.66666666666, ans=0.125 +2024-08-03 19:45:44,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=212468.66666666666, ans=0.125 +2024-08-03 19:45:44,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=212468.66666666666, ans=0.2 +2024-08-03 19:45:45,688 INFO [train.py:1114] (3/4) Epoch 16, batch 3350, loss[loss=0.2028, simple_loss=0.2877, pruned_loss=0.05895, over 13114.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2746, pruned_loss=0.05157, over 2630399.42 frames. ], batch size: 48, lr: 7.68e-03, grad_scale: 32.0 +2024-08-03 19:45:46,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=212505.33333333334, ans=0.09899494936611666 +2024-08-03 19:46:06,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=212578.66666666666, ans=0.025 +2024-08-03 19:46:26,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=212652.0, ans=0.025 +2024-08-03 19:46:29,937 INFO [train.py:1114] (3/4) Epoch 16, batch 3400, loss[loss=0.1548, simple_loss=0.238, pruned_loss=0.03581, over 13537.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2745, pruned_loss=0.05175, over 2626495.41 frames. ], batch size: 31, lr: 7.68e-03, grad_scale: 32.0 +2024-08-03 19:46:44,565 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.890e+01 1.091e+02 1.257e+02 1.485e+02 2.568e+02, threshold=2.513e+02, percent-clipped=0.0 +2024-08-03 19:46:44,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=212725.33333333334, ans=0.2 +2024-08-03 19:46:51,591 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:46:54,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=212798.66666666666, ans=0.0 +2024-08-03 19:47:00,486 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.88 vs. limit=12.0 +2024-08-03 19:47:07,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=212835.33333333334, ans=0.125 +2024-08-03 19:47:07,890 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.22 vs. limit=15.0 +2024-08-03 19:47:12,597 INFO [train.py:1114] (3/4) Epoch 16, batch 3450, loss[loss=0.2046, simple_loss=0.2839, pruned_loss=0.06262, over 12851.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2749, pruned_loss=0.05189, over 2629199.55 frames. ], batch size: 52, lr: 7.68e-03, grad_scale: 32.0 +2024-08-03 19:47:18,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=212872.0, ans=0.125 +2024-08-03 19:47:37,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=212982.0, ans=0.0 +2024-08-03 19:47:55,628 INFO [train.py:1114] (3/4) Epoch 16, batch 3500, loss[loss=0.1974, simple_loss=0.2757, pruned_loss=0.05952, over 13542.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2742, pruned_loss=0.05195, over 2632039.36 frames. ], batch size: 34, lr: 7.67e-03, grad_scale: 32.0 +2024-08-03 19:47:55,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=213055.33333333334, ans=0.125 +2024-08-03 19:47:58,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=213055.33333333334, ans=10.0 +2024-08-03 19:48:10,733 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.160e+01 1.228e+02 1.407e+02 1.881e+02 3.021e+02, threshold=2.813e+02, percent-clipped=7.0 +2024-08-03 19:48:12,040 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.81 vs. limit=15.0 +2024-08-03 19:48:38,353 INFO [train.py:1114] (3/4) Epoch 16, batch 3550, loss[loss=0.2118, simple_loss=0.2944, pruned_loss=0.06463, over 12497.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2758, pruned_loss=0.05269, over 2629295.22 frames. ], batch size: 58, lr: 7.67e-03, grad_scale: 16.0 +2024-08-03 19:49:04,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=213348.66666666666, ans=0.0 +2024-08-03 19:49:14,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=213385.33333333334, ans=0.025 +2024-08-03 19:49:21,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=213385.33333333334, ans=0.125 +2024-08-03 19:49:21,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=213385.33333333334, ans=0.125 +2024-08-03 19:49:23,461 INFO [train.py:1114] (3/4) Epoch 16, batch 3600, loss[loss=0.2311, simple_loss=0.3032, pruned_loss=0.07951, over 9203.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2802, pruned_loss=0.05661, over 2487126.76 frames. ], batch size: 96, lr: 7.67e-03, grad_scale: 32.0 +2024-08-03 19:49:39,125 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.558e+01 1.114e+02 1.218e+02 1.319e+02 1.769e+02, threshold=2.437e+02, percent-clipped=0.0 +2024-08-03 19:49:52,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=213532.0, ans=0.05 +2024-08-03 19:50:42,679 INFO [train.py:1114] (3/4) Epoch 17, batch 0, loss[loss=0.1629, simple_loss=0.2527, pruned_loss=0.03658, over 13353.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2527, pruned_loss=0.03658, over 13353.00 frames. ], batch size: 33, lr: 7.43e-03, grad_scale: 32.0 +2024-08-03 19:50:42,680 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 19:50:47,777 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.5518, 2.9311, 2.5749, 2.7276], device='cuda:3') +2024-08-03 19:50:48,943 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.8595, 3.6003, 4.0228, 3.8412], device='cuda:3') +2024-08-03 19:50:49,262 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.0671, 3.4321, 3.4338, 1.7964], device='cuda:3') +2024-08-03 19:50:52,771 INFO [train.py:1146] (3/4) Epoch 17, validation: loss=0.17, simple_loss=0.2717, pruned_loss=0.03416, over 944034.00 frames. +2024-08-03 19:50:52,772 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 19:51:06,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=213605.33333333334, ans=0.0 +2024-08-03 19:51:08,068 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.96 vs. limit=22.5 +2024-08-03 19:51:13,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=213642.0, ans=0.125 +2024-08-03 19:51:13,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=213642.0, ans=0.5 +2024-08-03 19:51:13,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=213642.0, ans=0.125 +2024-08-03 19:51:17,068 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.51 vs. limit=22.5 +2024-08-03 19:51:20,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=213642.0, ans=0.0 +2024-08-03 19:51:25,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=213678.66666666666, ans=0.125 +2024-08-03 19:51:36,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=213715.33333333334, ans=0.125 +2024-08-03 19:51:40,097 INFO [train.py:1114] (3/4) Epoch 17, batch 50, loss[loss=0.1558, simple_loss=0.2369, pruned_loss=0.03735, over 13435.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2762, pruned_loss=0.05293, over 578763.94 frames. ], batch size: 32, lr: 7.43e-03, grad_scale: 32.0 +2024-08-03 19:51:54,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=213788.66666666666, ans=0.0 +2024-08-03 19:52:03,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=213825.33333333334, ans=0.125 +2024-08-03 19:52:05,566 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.139e+01 1.157e+02 1.306e+02 1.728e+02 3.229e+02, threshold=2.612e+02, percent-clipped=8.0 +2024-08-03 19:52:17,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=213898.66666666666, ans=0.5 +2024-08-03 19:52:25,589 INFO [train.py:1114] (3/4) Epoch 17, batch 100, loss[loss=0.1743, simple_loss=0.2649, pruned_loss=0.0418, over 13538.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2769, pruned_loss=0.0521, over 1026455.24 frames. ], batch size: 35, lr: 7.43e-03, grad_scale: 32.0 +2024-08-03 19:52:27,942 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.96 vs. limit=6.0 +2024-08-03 19:52:33,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=213935.33333333334, ans=0.0 +2024-08-03 19:52:38,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=213972.0, ans=0.0 +2024-08-03 19:52:47,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=214008.66666666666, ans=0.125 +2024-08-03 19:53:10,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=214082.0, ans=0.2 +2024-08-03 19:53:13,863 INFO [train.py:1114] (3/4) Epoch 17, batch 150, loss[loss=0.174, simple_loss=0.2595, pruned_loss=0.0443, over 13411.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2753, pruned_loss=0.05184, over 1387783.67 frames. ], batch size: 32, lr: 7.42e-03, grad_scale: 32.0 +2024-08-03 19:53:39,022 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.782e+01 1.100e+02 1.230e+02 1.473e+02 3.065e+02, threshold=2.460e+02, percent-clipped=1.0 +2024-08-03 19:53:53,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=214265.33333333334, ans=0.0 +2024-08-03 19:53:56,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=214265.33333333334, ans=0.0 +2024-08-03 19:53:58,932 INFO [train.py:1114] (3/4) Epoch 17, batch 200, loss[loss=0.2181, simple_loss=0.2949, pruned_loss=0.0706, over 12442.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2748, pruned_loss=0.05155, over 1666138.82 frames. ], batch size: 58, lr: 7.42e-03, grad_scale: 32.0 +2024-08-03 19:54:02,126 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.12 vs. limit=15.0 +2024-08-03 19:54:15,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=214338.66666666666, ans=0.025 +2024-08-03 19:54:26,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=214375.33333333334, ans=0.125 +2024-08-03 19:54:41,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=214448.66666666666, ans=0.125 +2024-08-03 19:54:47,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214448.66666666666, ans=0.1 +2024-08-03 19:54:48,690 INFO [train.py:1114] (3/4) Epoch 17, batch 250, loss[loss=0.1795, simple_loss=0.276, pruned_loss=0.04145, over 13345.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2746, pruned_loss=0.05108, over 1885902.41 frames. ], batch size: 46, lr: 7.42e-03, grad_scale: 32.0 +2024-08-03 19:54:52,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=214485.33333333334, ans=0.125 +2024-08-03 19:54:52,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=214485.33333333334, ans=0.0 +2024-08-03 19:55:07,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=214558.66666666666, ans=0.125 +2024-08-03 19:55:14,674 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.730e+01 1.099e+02 1.340e+02 1.709e+02 3.717e+02, threshold=2.680e+02, percent-clipped=7.0 +2024-08-03 19:55:18,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214595.33333333334, ans=0.1 +2024-08-03 19:55:21,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=214595.33333333334, ans=0.2 +2024-08-03 19:55:33,715 INFO [train.py:1114] (3/4) Epoch 17, batch 300, loss[loss=0.2292, simple_loss=0.3141, pruned_loss=0.07215, over 13444.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2744, pruned_loss=0.05128, over 2053383.46 frames. ], batch size: 42, lr: 7.42e-03, grad_scale: 16.0 +2024-08-03 19:55:56,092 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.40 vs. limit=15.0 +2024-08-03 19:56:13,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=214815.33333333334, ans=0.125 +2024-08-03 19:56:19,296 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.18 vs. limit=15.0 +2024-08-03 19:56:20,683 INFO [train.py:1114] (3/4) Epoch 17, batch 350, loss[loss=0.1655, simple_loss=0.2461, pruned_loss=0.04248, over 13556.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2743, pruned_loss=0.05132, over 2183135.39 frames. ], batch size: 33, lr: 7.41e-03, grad_scale: 16.0 +2024-08-03 19:56:31,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=214888.66666666666, ans=0.125 +2024-08-03 19:56:49,374 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.060e+01 1.097e+02 1.266e+02 1.426e+02 2.641e+02, threshold=2.532e+02, percent-clipped=0.0 +2024-08-03 19:57:03,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=214998.66666666666, ans=0.0 +2024-08-03 19:57:03,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=214998.66666666666, ans=0.0 +2024-08-03 19:57:03,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=214998.66666666666, ans=0.2 +2024-08-03 19:57:08,521 INFO [train.py:1114] (3/4) Epoch 17, batch 400, loss[loss=0.1935, simple_loss=0.277, pruned_loss=0.05504, over 13351.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2744, pruned_loss=0.05161, over 2287065.19 frames. ], batch size: 37, lr: 7.41e-03, grad_scale: 32.0 +2024-08-03 19:57:10,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_na.min_abs, batch_count=215035.33333333334, ans=0.02 +2024-08-03 19:57:15,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=215035.33333333334, ans=0.125 +2024-08-03 19:57:24,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=215072.0, ans=0.0 +2024-08-03 19:57:27,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=215108.66666666666, ans=0.0 +2024-08-03 19:57:27,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=215108.66666666666, ans=0.125 +2024-08-03 19:57:32,478 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.43 vs. limit=10.0 +2024-08-03 19:57:36,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215145.33333333334, ans=0.1 +2024-08-03 19:57:38,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.17 vs. limit=15.0 +2024-08-03 19:57:43,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=215182.0, ans=0.0 +2024-08-03 19:57:46,281 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.09 vs. limit=22.5 +2024-08-03 19:57:51,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.32 vs. limit=15.0 +2024-08-03 19:57:54,023 INFO [train.py:1114] (3/4) Epoch 17, batch 450, loss[loss=0.1956, simple_loss=0.278, pruned_loss=0.05656, over 13550.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2742, pruned_loss=0.05145, over 2360324.34 frames. ], batch size: 38, lr: 7.41e-03, grad_scale: 32.0 +2024-08-03 19:58:06,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=215255.33333333334, ans=0.0 +2024-08-03 19:58:06,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=215255.33333333334, ans=0.125 +2024-08-03 19:58:23,999 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.558e+01 1.110e+02 1.275e+02 1.603e+02 2.813e+02, threshold=2.549e+02, percent-clipped=2.0 +2024-08-03 19:58:24,152 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 19:58:35,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=215365.33333333334, ans=0.2 +2024-08-03 19:58:42,990 INFO [train.py:1114] (3/4) Epoch 17, batch 500, loss[loss=0.1888, simple_loss=0.2783, pruned_loss=0.04968, over 13416.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2735, pruned_loss=0.05109, over 2425873.72 frames. ], batch size: 43, lr: 7.40e-03, grad_scale: 32.0 +2024-08-03 19:59:00,832 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.20 vs. limit=15.0 +2024-08-03 19:59:01,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=215475.33333333334, ans=0.0 +2024-08-03 19:59:07,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=215475.33333333334, ans=0.0 +2024-08-03 19:59:18,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=215512.0, ans=0.125 +2024-08-03 19:59:28,227 INFO [train.py:1114] (3/4) Epoch 17, batch 550, loss[loss=0.1926, simple_loss=0.2842, pruned_loss=0.05052, over 13276.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2732, pruned_loss=0.05105, over 2467314.10 frames. ], batch size: 49, lr: 7.40e-03, grad_scale: 32.0 +2024-08-03 19:59:32,792 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.63 vs. limit=15.0 +2024-08-03 19:59:37,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=215622.0, ans=0.125 +2024-08-03 19:59:57,002 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.283e+01 1.155e+02 1.328e+02 1.624e+02 2.790e+02, threshold=2.656e+02, percent-clipped=3.0 +2024-08-03 20:00:18,072 INFO [train.py:1114] (3/4) Epoch 17, batch 600, loss[loss=0.2172, simple_loss=0.3013, pruned_loss=0.06654, over 13302.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2728, pruned_loss=0.05098, over 2506897.39 frames. ], batch size: 46, lr: 7.40e-03, grad_scale: 32.0 +2024-08-03 20:00:23,073 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.97 vs. limit=15.0 +2024-08-03 20:00:26,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=215805.33333333334, ans=0.125 +2024-08-03 20:00:28,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=215805.33333333334, ans=0.125 +2024-08-03 20:00:38,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=215842.0, ans=0.0 +2024-08-03 20:00:39,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=215842.0, ans=0.125 +2024-08-03 20:00:48,783 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.67 vs. limit=12.0 +2024-08-03 20:00:54,319 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.84 vs. limit=22.5 +2024-08-03 20:00:56,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=215915.33333333334, ans=0.125 +2024-08-03 20:01:02,927 INFO [train.py:1114] (3/4) Epoch 17, batch 650, loss[loss=0.1801, simple_loss=0.2756, pruned_loss=0.0423, over 13526.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2723, pruned_loss=0.05056, over 2542871.80 frames. ], batch size: 37, lr: 7.39e-03, grad_scale: 32.0 +2024-08-03 20:01:06,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=215952.0, ans=0.2 +2024-08-03 20:01:08,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=215952.0, ans=0.0 +2024-08-03 20:01:24,750 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.84 vs. limit=15.0 +2024-08-03 20:01:29,736 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.069e+01 1.111e+02 1.221e+02 1.677e+02 3.173e+02, threshold=2.441e+02, percent-clipped=3.0 +2024-08-03 20:01:38,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=216062.0, ans=0.05 +2024-08-03 20:01:52,172 INFO [train.py:1114] (3/4) Epoch 17, batch 700, loss[loss=0.1736, simple_loss=0.2612, pruned_loss=0.04296, over 13526.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2732, pruned_loss=0.0508, over 2564312.48 frames. ], batch size: 35, lr: 7.39e-03, grad_scale: 16.0 +2024-08-03 20:02:08,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=216172.0, ans=0.125 +2024-08-03 20:02:11,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=216208.66666666666, ans=0.2 +2024-08-03 20:02:14,421 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.89 vs. limit=15.0 +2024-08-03 20:02:20,707 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=13.13 vs. limit=15.0 +2024-08-03 20:02:33,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=216282.0, ans=0.2 +2024-08-03 20:02:37,428 INFO [train.py:1114] (3/4) Epoch 17, batch 750, loss[loss=0.1758, simple_loss=0.2738, pruned_loss=0.0389, over 13362.00 frames. ], tot_loss[loss=0.186, simple_loss=0.272, pruned_loss=0.05003, over 2581974.11 frames. ], batch size: 37, lr: 7.39e-03, grad_scale: 16.0 +2024-08-03 20:02:40,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=216318.66666666666, ans=0.125 +2024-08-03 20:02:47,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=216355.33333333334, ans=0.0 +2024-08-03 20:02:52,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=216355.33333333334, ans=0.0 +2024-08-03 20:02:52,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=216355.33333333334, ans=0.125 +2024-08-03 20:03:05,016 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.455e+01 1.109e+02 1.251e+02 1.578e+02 2.500e+02, threshold=2.502e+02, percent-clipped=1.0 +2024-08-03 20:03:11,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=216428.66666666666, ans=0.0 +2024-08-03 20:03:15,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=216465.33333333334, ans=0.125 +2024-08-03 20:03:23,144 INFO [train.py:1114] (3/4) Epoch 17, batch 800, loss[loss=0.1547, simple_loss=0.2434, pruned_loss=0.03302, over 13353.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2719, pruned_loss=0.05009, over 2596864.75 frames. ], batch size: 33, lr: 7.38e-03, grad_scale: 32.0 +2024-08-03 20:03:29,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=216502.0, ans=0.2 +2024-08-03 20:03:31,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=216538.66666666666, ans=0.125 +2024-08-03 20:03:47,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=216575.33333333334, ans=0.0 +2024-08-03 20:03:50,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=216575.33333333334, ans=0.2 +2024-08-03 20:03:53,735 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.82 vs. limit=6.0 +2024-08-03 20:04:13,172 INFO [train.py:1114] (3/4) Epoch 17, batch 850, loss[loss=0.1909, simple_loss=0.2825, pruned_loss=0.04967, over 13330.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.272, pruned_loss=0.05023, over 2609058.28 frames. ], batch size: 40, lr: 7.38e-03, grad_scale: 16.0 +2024-08-03 20:04:16,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=216685.33333333334, ans=0.0 +2024-08-03 20:04:17,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216685.33333333334, ans=0.1 +2024-08-03 20:04:22,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=216722.0, ans=0.125 +2024-08-03 20:04:27,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=216722.0, ans=0.125 +2024-08-03 20:04:38,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=216758.66666666666, ans=0.0 +2024-08-03 20:04:39,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.44 vs. limit=22.5 +2024-08-03 20:04:40,986 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.726e+01 1.085e+02 1.274e+02 1.570e+02 2.707e+02, threshold=2.548e+02, percent-clipped=2.0 +2024-08-03 20:04:42,620 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.93 vs. limit=15.0 +2024-08-03 20:04:44,137 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=12.0 +2024-08-03 20:04:48,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216832.0, ans=0.1 +2024-08-03 20:04:53,951 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:04:58,402 INFO [train.py:1114] (3/4) Epoch 17, batch 900, loss[loss=0.1611, simple_loss=0.252, pruned_loss=0.0351, over 13336.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2718, pruned_loss=0.05018, over 2611511.11 frames. ], batch size: 33, lr: 7.38e-03, grad_scale: 16.0 +2024-08-03 20:05:05,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=216868.66666666666, ans=0.125 +2024-08-03 20:05:10,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=216905.33333333334, ans=0.125 +2024-08-03 20:05:21,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=216942.0, ans=0.125 +2024-08-03 20:05:30,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=216978.66666666666, ans=10.0 +2024-08-03 20:05:34,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=216978.66666666666, ans=0.0 +2024-08-03 20:05:45,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=217015.33333333334, ans=0.0 +2024-08-03 20:05:47,252 INFO [train.py:1114] (3/4) Epoch 17, batch 950, loss[loss=0.1529, simple_loss=0.2392, pruned_loss=0.03331, over 13534.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2722, pruned_loss=0.0503, over 2612857.99 frames. ], batch size: 34, lr: 7.38e-03, grad_scale: 16.0 +2024-08-03 20:06:04,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=217088.66666666666, ans=0.0 +2024-08-03 20:06:15,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=217162.0, ans=0.125 +2024-08-03 20:06:15,641 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.469e+01 1.104e+02 1.288e+02 1.565e+02 2.337e+02, threshold=2.575e+02, percent-clipped=0.0 +2024-08-03 20:06:25,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=217198.66666666666, ans=0.125 +2024-08-03 20:06:26,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=217198.66666666666, ans=0.0 +2024-08-03 20:06:27,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=217198.66666666666, ans=0.125 +2024-08-03 20:06:33,332 INFO [train.py:1114] (3/4) Epoch 17, batch 1000, loss[loss=0.2066, simple_loss=0.2814, pruned_loss=0.06584, over 13378.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2734, pruned_loss=0.05096, over 2611162.98 frames. ], batch size: 35, lr: 7.37e-03, grad_scale: 8.0 +2024-08-03 20:06:42,059 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:06:45,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217272.0, ans=0.1 +2024-08-03 20:06:48,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=217272.0, ans=0.0 +2024-08-03 20:07:07,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=217345.33333333334, ans=0.125 +2024-08-03 20:07:26,619 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.38 vs. limit=22.5 +2024-08-03 20:07:27,041 INFO [train.py:1114] (3/4) Epoch 17, batch 1050, loss[loss=0.168, simple_loss=0.2585, pruned_loss=0.03879, over 13577.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2728, pruned_loss=0.05082, over 2615371.57 frames. ], batch size: 39, lr: 7.37e-03, grad_scale: 8.0 +2024-08-03 20:07:42,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=217455.33333333334, ans=0.125 +2024-08-03 20:07:43,454 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.99 vs. limit=15.0 +2024-08-03 20:07:55,676 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.928e+01 1.069e+02 1.230e+02 1.488e+02 2.448e+02, threshold=2.459e+02, percent-clipped=0.0 +2024-08-03 20:08:01,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=217528.66666666666, ans=0.0 +2024-08-03 20:08:12,147 INFO [train.py:1114] (3/4) Epoch 17, batch 1100, loss[loss=0.168, simple_loss=0.2617, pruned_loss=0.03717, over 13572.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2729, pruned_loss=0.05076, over 2619305.83 frames. ], batch size: 36, lr: 7.37e-03, grad_scale: 8.0 +2024-08-03 20:08:20,726 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:08:53,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217748.66666666666, ans=0.1 +2024-08-03 20:08:59,105 INFO [train.py:1114] (3/4) Epoch 17, batch 1150, loss[loss=0.173, simple_loss=0.2606, pruned_loss=0.04264, over 13545.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2732, pruned_loss=0.05095, over 2617921.89 frames. ], batch size: 36, lr: 7.36e-03, grad_scale: 8.0 +2024-08-03 20:09:03,190 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:09:06,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=217785.33333333334, ans=0.125 +2024-08-03 20:09:09,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217822.0, ans=0.1 +2024-08-03 20:09:30,351 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.508e+01 1.111e+02 1.257e+02 1.521e+02 2.461e+02, threshold=2.515e+02, percent-clipped=1.0 +2024-08-03 20:09:34,471 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.27 vs. limit=15.0 +2024-08-03 20:09:42,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=217932.0, ans=0.125 +2024-08-03 20:09:46,438 INFO [train.py:1114] (3/4) Epoch 17, batch 1200, loss[loss=0.1922, simple_loss=0.2823, pruned_loss=0.05099, over 13582.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2734, pruned_loss=0.05073, over 2615933.91 frames. ], batch size: 39, lr: 7.36e-03, grad_scale: 16.0 +2024-08-03 20:09:47,985 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=7.30 vs. limit=15.0 +2024-08-03 20:09:51,727 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.48 vs. limit=12.0 +2024-08-03 20:09:55,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=218005.33333333334, ans=0.0 +2024-08-03 20:10:06,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=218042.0, ans=0.125 +2024-08-03 20:10:13,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=218078.66666666666, ans=0.0 +2024-08-03 20:10:15,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=218078.66666666666, ans=0.0 +2024-08-03 20:10:32,171 INFO [train.py:1114] (3/4) Epoch 17, batch 1250, loss[loss=0.2081, simple_loss=0.2893, pruned_loss=0.06341, over 13431.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2744, pruned_loss=0.05108, over 2628158.83 frames. ], batch size: 42, lr: 7.36e-03, grad_scale: 16.0 +2024-08-03 20:11:00,553 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.32 vs. limit=15.0 +2024-08-03 20:11:05,409 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.654e+01 1.175e+02 1.463e+02 1.905e+02 2.984e+02, threshold=2.925e+02, percent-clipped=5.0 +2024-08-03 20:11:06,456 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:11:16,056 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=6.0 +2024-08-03 20:11:19,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=218298.66666666666, ans=0.2 +2024-08-03 20:11:21,612 INFO [train.py:1114] (3/4) Epoch 17, batch 1300, loss[loss=0.1997, simple_loss=0.2796, pruned_loss=0.05991, over 12828.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2733, pruned_loss=0.05059, over 2630134.52 frames. ], batch size: 52, lr: 7.35e-03, grad_scale: 16.0 +2024-08-03 20:11:23,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=218335.33333333334, ans=0.0 +2024-08-03 20:11:28,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218335.33333333334, ans=0.1 +2024-08-03 20:11:34,642 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.73 vs. limit=22.5 +2024-08-03 20:11:53,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=218445.33333333334, ans=0.2 +2024-08-03 20:12:06,805 INFO [train.py:1114] (3/4) Epoch 17, batch 1350, loss[loss=0.1843, simple_loss=0.2748, pruned_loss=0.04692, over 13555.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2737, pruned_loss=0.05092, over 2637717.51 frames. ], batch size: 37, lr: 7.35e-03, grad_scale: 16.0 +2024-08-03 20:12:08,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=218518.66666666666, ans=0.125 +2024-08-03 20:12:18,981 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.04 vs. limit=15.0 +2024-08-03 20:12:21,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=218555.33333333334, ans=0.125 +2024-08-03 20:12:22,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=218555.33333333334, ans=0.1 +2024-08-03 20:12:33,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=218592.0, ans=0.125 +2024-08-03 20:12:37,640 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.610e+01 1.168e+02 1.323e+02 1.597e+02 2.527e+02, threshold=2.645e+02, percent-clipped=0.0 +2024-08-03 20:12:43,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=218628.66666666666, ans=0.125 +2024-08-03 20:12:53,755 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.63 vs. limit=15.0 +2024-08-03 20:12:56,129 INFO [train.py:1114] (3/4) Epoch 17, batch 1400, loss[loss=0.1613, simple_loss=0.2333, pruned_loss=0.04466, over 13258.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2728, pruned_loss=0.05049, over 2642415.00 frames. ], batch size: 31, lr: 7.35e-03, grad_scale: 16.0 +2024-08-03 20:12:56,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=218702.0, ans=0.0 +2024-08-03 20:13:16,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=218775.33333333334, ans=0.125 +2024-08-03 20:13:16,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=218775.33333333334, ans=15.0 +2024-08-03 20:13:32,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=218848.66666666666, ans=0.025 +2024-08-03 20:13:33,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=218848.66666666666, ans=0.0 +2024-08-03 20:13:36,135 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.86 vs. limit=22.5 +2024-08-03 20:13:41,318 INFO [train.py:1114] (3/4) Epoch 17, batch 1450, loss[loss=0.2017, simple_loss=0.2893, pruned_loss=0.05703, over 13403.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2731, pruned_loss=0.05039, over 2641773.94 frames. ], batch size: 43, lr: 7.34e-03, grad_scale: 16.0 +2024-08-03 20:13:52,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=218922.0, ans=0.125 +2024-08-03 20:13:58,842 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.48 vs. limit=15.0 +2024-08-03 20:13:59,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=218958.66666666666, ans=0.05 +2024-08-03 20:14:10,050 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.837e+01 1.093e+02 1.261e+02 1.597e+02 2.531e+02, threshold=2.522e+02, percent-clipped=0.0 +2024-08-03 20:14:11,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=218995.33333333334, ans=0.125 +2024-08-03 20:14:14,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=218995.33333333334, ans=0.125 +2024-08-03 20:14:29,614 INFO [train.py:1114] (3/4) Epoch 17, batch 1500, loss[loss=0.214, simple_loss=0.307, pruned_loss=0.06049, over 13401.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2738, pruned_loss=0.05058, over 2641534.48 frames. ], batch size: 39, lr: 7.34e-03, grad_scale: 16.0 +2024-08-03 20:14:34,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=219068.66666666666, ans=0.125 +2024-08-03 20:14:38,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=219105.33333333334, ans=0.125 +2024-08-03 20:14:42,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=219105.33333333334, ans=0.125 +2024-08-03 20:14:52,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=219142.0, ans=10.0 +2024-08-03 20:15:06,557 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.13 vs. limit=15.0 +2024-08-03 20:15:15,945 INFO [train.py:1114] (3/4) Epoch 17, batch 1550, loss[loss=0.21, simple_loss=0.3061, pruned_loss=0.05701, over 13400.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2732, pruned_loss=0.05064, over 2630713.76 frames. ], batch size: 41, lr: 7.34e-03, grad_scale: 16.0 +2024-08-03 20:15:18,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=219252.0, ans=0.0 +2024-08-03 20:15:24,746 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.31 vs. limit=12.0 +2024-08-03 20:15:26,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=219288.66666666666, ans=0.07 +2024-08-03 20:15:26,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=219288.66666666666, ans=0.125 +2024-08-03 20:15:29,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=219288.66666666666, ans=0.125 +2024-08-03 20:15:45,316 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.134e+01 1.116e+02 1.285e+02 1.566e+02 3.410e+02, threshold=2.570e+02, percent-clipped=5.0 +2024-08-03 20:15:49,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=219362.0, ans=0.125 +2024-08-03 20:15:53,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=219398.66666666666, ans=0.2 +2024-08-03 20:16:02,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=219435.33333333334, ans=0.125 +2024-08-03 20:16:03,417 INFO [train.py:1114] (3/4) Epoch 17, batch 1600, loss[loss=0.1825, simple_loss=0.2729, pruned_loss=0.04605, over 13585.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2732, pruned_loss=0.05081, over 2623651.60 frames. ], batch size: 39, lr: 7.34e-03, grad_scale: 32.0 +2024-08-03 20:16:11,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219435.33333333334, ans=0.1 +2024-08-03 20:16:29,225 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.66 vs. limit=15.0 +2024-08-03 20:16:30,775 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.63 vs. limit=12.0 +2024-08-03 20:16:48,000 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.77 vs. limit=15.0 +2024-08-03 20:16:55,678 INFO [train.py:1114] (3/4) Epoch 17, batch 1650, loss[loss=0.1833, simple_loss=0.2834, pruned_loss=0.04163, over 13327.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2729, pruned_loss=0.05075, over 2620352.15 frames. ], batch size: 40, lr: 7.33e-03, grad_scale: 32.0 +2024-08-03 20:17:00,159 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.12 vs. limit=15.0 +2024-08-03 20:17:09,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=219655.33333333334, ans=0.0 +2024-08-03 20:17:19,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=219692.0, ans=0.125 +2024-08-03 20:17:24,617 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.842e+01 1.100e+02 1.247e+02 1.816e+02 3.503e+02, threshold=2.494e+02, percent-clipped=6.0 +2024-08-03 20:17:28,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=219728.66666666666, ans=0.125 +2024-08-03 20:17:41,129 INFO [train.py:1114] (3/4) Epoch 17, batch 1700, loss[loss=0.1682, simple_loss=0.2516, pruned_loss=0.04239, over 13301.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2727, pruned_loss=0.05049, over 2629968.98 frames. ], batch size: 31, lr: 7.33e-03, grad_scale: 32.0 +2024-08-03 20:17:48,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=219802.0, ans=0.0 +2024-08-03 20:18:03,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=219875.33333333334, ans=0.2 +2024-08-03 20:18:28,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=219948.66666666666, ans=10.0 +2024-08-03 20:18:29,576 INFO [train.py:1114] (3/4) Epoch 17, batch 1750, loss[loss=0.1741, simple_loss=0.2489, pruned_loss=0.04966, over 13531.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2727, pruned_loss=0.05056, over 2633561.19 frames. ], batch size: 31, lr: 7.33e-03, grad_scale: 32.0 +2024-08-03 20:18:55,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=220058.66666666666, ans=0.125 +2024-08-03 20:19:00,577 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.983e+01 1.113e+02 1.270e+02 1.558e+02 2.524e+02, threshold=2.540e+02, percent-clipped=1.0 +2024-08-03 20:19:02,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=220095.33333333334, ans=0.125 +2024-08-03 20:19:05,474 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:19:16,818 INFO [train.py:1114] (3/4) Epoch 17, batch 1800, loss[loss=0.1872, simple_loss=0.2782, pruned_loss=0.0481, over 13549.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2728, pruned_loss=0.05063, over 2635013.99 frames. ], batch size: 38, lr: 7.32e-03, grad_scale: 32.0 +2024-08-03 20:19:37,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220242.0, ans=0.1 +2024-08-03 20:19:45,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=220278.66666666666, ans=0.0 +2024-08-03 20:19:48,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=220278.66666666666, ans=0.125 +2024-08-03 20:20:04,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=220315.33333333334, ans=0.0 +2024-08-03 20:20:06,479 INFO [train.py:1114] (3/4) Epoch 17, batch 1850, loss[loss=0.188, simple_loss=0.2752, pruned_loss=0.05036, over 13416.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2729, pruned_loss=0.05094, over 2637326.36 frames. ], batch size: 39, lr: 7.32e-03, grad_scale: 32.0 +2024-08-03 20:20:07,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=220352.0, ans=0.2 +2024-08-03 20:20:12,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=220352.0, ans=0.1 +2024-08-03 20:20:24,199 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.88 vs. limit=15.0 +2024-08-03 20:20:33,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=220462.0, ans=0.125 +2024-08-03 20:20:35,730 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.176e+01 1.159e+02 1.677e+02 2.408e+02 3.560e+02, threshold=3.354e+02, percent-clipped=19.0 +2024-08-03 20:20:45,079 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:20:52,110 INFO [train.py:1114] (3/4) Epoch 17, batch 1900, loss[loss=0.2079, simple_loss=0.2928, pruned_loss=0.06148, over 13305.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2739, pruned_loss=0.05121, over 2639869.53 frames. ], batch size: 40, lr: 7.32e-03, grad_scale: 32.0 +2024-08-03 20:21:07,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220572.0, ans=0.1 +2024-08-03 20:21:15,189 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.48 vs. limit=15.0 +2024-08-03 20:21:32,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=220682.0, ans=0.125 +2024-08-03 20:21:33,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=220682.0, ans=0.0 +2024-08-03 20:21:36,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=220682.0, ans=0.0 +2024-08-03 20:21:38,859 INFO [train.py:1114] (3/4) Epoch 17, batch 1950, loss[loss=0.1786, simple_loss=0.2679, pruned_loss=0.04465, over 13552.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.275, pruned_loss=0.05136, over 2646193.08 frames. ], batch size: 36, lr: 7.31e-03, grad_scale: 32.0 +2024-08-03 20:21:40,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=220718.66666666666, ans=0.125 +2024-08-03 20:21:40,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=220718.66666666666, ans=0.125 +2024-08-03 20:21:41,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.93 vs. limit=6.0 +2024-08-03 20:22:10,359 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.292e+01 1.160e+02 1.302e+02 1.581e+02 2.993e+02, threshold=2.604e+02, percent-clipped=0.0 +2024-08-03 20:22:26,602 INFO [train.py:1114] (3/4) Epoch 17, batch 2000, loss[loss=0.1935, simple_loss=0.2677, pruned_loss=0.05967, over 13536.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2752, pruned_loss=0.05141, over 2635712.30 frames. ], batch size: 31, lr: 7.31e-03, grad_scale: 32.0 +2024-08-03 20:22:27,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=220902.0, ans=0.025 +2024-08-03 20:22:35,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=220938.66666666666, ans=0.0 +2024-08-03 20:22:41,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=220938.66666666666, ans=0.2 +2024-08-03 20:22:44,743 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.86 vs. limit=6.0 +2024-08-03 20:22:49,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=220975.33333333334, ans=0.2 +2024-08-03 20:22:52,711 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.17 vs. limit=22.5 +2024-08-03 20:23:09,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=221048.66666666666, ans=0.0 +2024-08-03 20:23:10,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=221048.66666666666, ans=0.025 +2024-08-03 20:23:14,204 INFO [train.py:1114] (3/4) Epoch 17, batch 2050, loss[loss=0.1802, simple_loss=0.2607, pruned_loss=0.04986, over 13422.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2744, pruned_loss=0.05134, over 2632866.45 frames. ], batch size: 32, lr: 7.31e-03, grad_scale: 32.0 +2024-08-03 20:23:15,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=221085.33333333334, ans=0.125 +2024-08-03 20:23:15,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221085.33333333334, ans=0.1 +2024-08-03 20:23:45,004 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.513e+01 1.116e+02 1.230e+02 1.630e+02 2.618e+02, threshold=2.461e+02, percent-clipped=1.0 +2024-08-03 20:23:47,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=221195.33333333334, ans=0.125 +2024-08-03 20:24:01,282 INFO [train.py:1114] (3/4) Epoch 17, batch 2100, loss[loss=0.1769, simple_loss=0.262, pruned_loss=0.0459, over 13539.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2729, pruned_loss=0.05052, over 2638272.91 frames. ], batch size: 37, lr: 7.31e-03, grad_scale: 32.0 +2024-08-03 20:24:10,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=221305.33333333334, ans=0.0 +2024-08-03 20:24:17,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=221305.33333333334, ans=0.125 +2024-08-03 20:24:20,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=221342.0, ans=0.0 +2024-08-03 20:24:23,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=221342.0, ans=0.125 +2024-08-03 20:24:39,999 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.82 vs. limit=15.0 +2024-08-03 20:24:40,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.67 vs. limit=12.0 +2024-08-03 20:24:42,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221415.33333333334, ans=0.1 +2024-08-03 20:24:45,882 INFO [train.py:1114] (3/4) Epoch 17, batch 2150, loss[loss=0.1921, simple_loss=0.2743, pruned_loss=0.05493, over 13552.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2724, pruned_loss=0.05026, over 2647152.46 frames. ], batch size: 36, lr: 7.30e-03, grad_scale: 32.0 +2024-08-03 20:24:47,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221452.0, ans=0.1 +2024-08-03 20:25:16,727 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.666e+01 1.100e+02 1.262e+02 1.658e+02 2.819e+02, threshold=2.523e+02, percent-clipped=4.0 +2024-08-03 20:25:19,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=221562.0, ans=0.2 +2024-08-03 20:25:20,077 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.07 vs. limit=15.0 +2024-08-03 20:25:24,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=221562.0, ans=0.125 +2024-08-03 20:25:34,767 INFO [train.py:1114] (3/4) Epoch 17, batch 2200, loss[loss=0.1613, simple_loss=0.2577, pruned_loss=0.03243, over 13408.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2719, pruned_loss=0.0499, over 2645622.60 frames. ], batch size: 39, lr: 7.30e-03, grad_scale: 32.0 +2024-08-03 20:25:47,899 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.50 vs. limit=15.0 +2024-08-03 20:25:50,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=221672.0, ans=0.2 +2024-08-03 20:26:14,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=221782.0, ans=0.125 +2024-08-03 20:26:18,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=221782.0, ans=0.125 +2024-08-03 20:26:20,148 INFO [train.py:1114] (3/4) Epoch 17, batch 2250, loss[loss=0.1868, simple_loss=0.2854, pruned_loss=0.04411, over 13355.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.272, pruned_loss=0.0498, over 2642458.44 frames. ], batch size: 37, lr: 7.30e-03, grad_scale: 32.0 +2024-08-03 20:26:23,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=221818.66666666666, ans=0.1 +2024-08-03 20:26:32,030 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:26:35,786 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.41 vs. limit=22.5 +2024-08-03 20:26:40,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=221892.0, ans=0.2 +2024-08-03 20:26:48,992 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.784e+01 1.159e+02 1.389e+02 1.848e+02 3.074e+02, threshold=2.777e+02, percent-clipped=8.0 +2024-08-03 20:26:54,204 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.71 vs. limit=22.5 +2024-08-03 20:26:54,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221928.66666666666, ans=0.1 +2024-08-03 20:26:56,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=221928.66666666666, ans=0.125 +2024-08-03 20:26:57,378 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:27:06,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=221965.33333333334, ans=0.125 +2024-08-03 20:27:10,464 INFO [train.py:1114] (3/4) Epoch 17, batch 2300, loss[loss=0.2106, simple_loss=0.2866, pruned_loss=0.06728, over 13564.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2713, pruned_loss=0.04995, over 2638568.03 frames. ], batch size: 33, lr: 7.29e-03, grad_scale: 32.0 +2024-08-03 20:27:11,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=222002.0, ans=0.5 +2024-08-03 20:27:16,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222002.0, ans=0.1 +2024-08-03 20:27:17,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=222002.0, ans=0.125 +2024-08-03 20:27:36,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=222075.33333333334, ans=0.125 +2024-08-03 20:27:50,923 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.02 vs. limit=15.0 +2024-08-03 20:27:51,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.67 vs. limit=15.0 +2024-08-03 20:27:56,081 INFO [train.py:1114] (3/4) Epoch 17, batch 2350, loss[loss=0.1686, simple_loss=0.266, pruned_loss=0.03564, over 13557.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2713, pruned_loss=0.05, over 2641452.86 frames. ], batch size: 38, lr: 7.29e-03, grad_scale: 16.0 +2024-08-03 20:28:22,964 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.18 vs. limit=15.0 +2024-08-03 20:28:25,883 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.384e+01 1.095e+02 1.277e+02 1.611e+02 2.837e+02, threshold=2.555e+02, percent-clipped=1.0 +2024-08-03 20:28:26,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=222295.33333333334, ans=0.0 +2024-08-03 20:28:43,086 INFO [train.py:1114] (3/4) Epoch 17, batch 2400, loss[loss=0.1677, simple_loss=0.2575, pruned_loss=0.03895, over 13546.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2713, pruned_loss=0.04972, over 2642698.42 frames. ], batch size: 35, lr: 7.29e-03, grad_scale: 32.0 +2024-08-03 20:29:05,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=222442.0, ans=0.125 +2024-08-03 20:29:14,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=222478.66666666666, ans=0.125 +2024-08-03 20:29:18,089 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.09 vs. limit=15.0 +2024-08-03 20:29:24,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=222515.33333333334, ans=10.0 +2024-08-03 20:29:30,152 INFO [train.py:1114] (3/4) Epoch 17, batch 2450, loss[loss=0.1943, simple_loss=0.2806, pruned_loss=0.05401, over 13374.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2726, pruned_loss=0.05033, over 2632011.88 frames. ], batch size: 37, lr: 7.28e-03, grad_scale: 32.0 +2024-08-03 20:29:33,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=222552.0, ans=0.07 +2024-08-03 20:29:45,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=222588.66666666666, ans=10.0 +2024-08-03 20:29:45,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=222588.66666666666, ans=0.0 +2024-08-03 20:29:53,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=222625.33333333334, ans=0.125 +2024-08-03 20:29:59,843 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.386e+01 1.131e+02 1.264e+02 1.537e+02 2.363e+02, threshold=2.529e+02, percent-clipped=0.0 +2024-08-03 20:30:15,151 INFO [train.py:1114] (3/4) Epoch 17, batch 2500, loss[loss=0.1861, simple_loss=0.2755, pruned_loss=0.04832, over 13402.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2725, pruned_loss=0.05012, over 2636579.24 frames. ], batch size: 39, lr: 7.28e-03, grad_scale: 32.0 +2024-08-03 20:30:18,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=222735.33333333334, ans=0.0 +2024-08-03 20:30:30,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=222772.0, ans=0.0 +2024-08-03 20:30:35,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=222808.66666666666, ans=0.0 +2024-08-03 20:30:41,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=222845.33333333334, ans=0.125 +2024-08-03 20:31:00,465 INFO [train.py:1114] (3/4) Epoch 17, batch 2550, loss[loss=0.1852, simple_loss=0.2629, pruned_loss=0.05375, over 13541.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2729, pruned_loss=0.05046, over 2638663.32 frames. ], batch size: 31, lr: 7.28e-03, grad_scale: 32.0 +2024-08-03 20:31:08,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=222918.66666666666, ans=0.125 +2024-08-03 20:31:08,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=222918.66666666666, ans=0.95 +2024-08-03 20:31:10,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=222955.33333333334, ans=0.125 +2024-08-03 20:31:13,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=222955.33333333334, ans=0.125 +2024-08-03 20:31:13,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=222955.33333333334, ans=0.1 +2024-08-03 20:31:14,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=222955.33333333334, ans=0.1 +2024-08-03 20:31:23,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=222992.0, ans=0.0 +2024-08-03 20:31:27,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=222992.0, ans=0.04949747468305833 +2024-08-03 20:31:27,970 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:31:31,228 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.999e+01 1.159e+02 1.553e+02 2.092e+02 3.686e+02, threshold=3.106e+02, percent-clipped=10.0 +2024-08-03 20:31:41,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=223065.33333333334, ans=0.125 +2024-08-03 20:31:46,271 INFO [train.py:1114] (3/4) Epoch 17, batch 2600, loss[loss=0.1751, simple_loss=0.2674, pruned_loss=0.04137, over 13552.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2734, pruned_loss=0.0507, over 2638170.99 frames. ], batch size: 36, lr: 7.28e-03, grad_scale: 32.0 +2024-08-03 20:31:49,452 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.40 vs. limit=10.0 +2024-08-03 20:31:55,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=223138.66666666666, ans=0.125 +2024-08-03 20:31:57,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223138.66666666666, ans=0.1 +2024-08-03 20:32:03,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223175.33333333334, ans=0.1 +2024-08-03 20:32:04,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223175.33333333334, ans=0.1 +2024-08-03 20:32:08,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.37 vs. limit=10.0 +2024-08-03 20:32:13,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=223212.0, ans=15.0 +2024-08-03 20:32:24,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223248.66666666666, ans=0.1 +2024-08-03 20:32:29,419 INFO [train.py:1114] (3/4) Epoch 17, batch 2650, loss[loss=0.2183, simple_loss=0.3072, pruned_loss=0.06468, over 13268.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2742, pruned_loss=0.05083, over 2641173.91 frames. ], batch size: 46, lr: 7.27e-03, grad_scale: 16.0 +2024-08-03 20:32:55,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=223395.33333333334, ans=0.125 +2024-08-03 20:32:55,644 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.75 vs. limit=15.0 +2024-08-03 20:32:58,625 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.512e+01 1.129e+02 1.471e+02 1.804e+02 3.189e+02, threshold=2.942e+02, percent-clipped=1.0 +2024-08-03 20:33:10,406 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.25 vs. limit=15.0 +2024-08-03 20:33:12,578 INFO [train.py:1114] (3/4) Epoch 17, batch 2700, loss[loss=0.1899, simple_loss=0.2872, pruned_loss=0.04629, over 13549.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2742, pruned_loss=0.05075, over 2638486.84 frames. ], batch size: 40, lr: 7.27e-03, grad_scale: 16.0 +2024-08-03 20:33:13,895 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.06 vs. limit=15.0 +2024-08-03 20:33:14,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=223468.66666666666, ans=0.0 +2024-08-03 20:33:17,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=223468.66666666666, ans=0.125 +2024-08-03 20:33:18,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=223468.66666666666, ans=0.0 +2024-08-03 20:33:20,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=223505.33333333334, ans=0.125 +2024-08-03 20:33:23,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=223505.33333333334, ans=0.2 +2024-08-03 20:33:34,197 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.95 vs. limit=15.0 +2024-08-03 20:33:36,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=223542.0, ans=0.5 +2024-08-03 20:33:36,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=223542.0, ans=0.125 +2024-08-03 20:33:44,446 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.13 vs. limit=15.0 +2024-08-03 20:33:46,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=223578.66666666666, ans=0.125 +2024-08-03 20:33:46,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=223615.33333333334, ans=0.0 +2024-08-03 20:33:51,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=223615.33333333334, ans=0.2 +2024-08-03 20:33:52,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=223615.33333333334, ans=10.0 +2024-08-03 20:33:56,039 INFO [train.py:1114] (3/4) Epoch 17, batch 2750, loss[loss=0.1611, simple_loss=0.2489, pruned_loss=0.03659, over 13347.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2727, pruned_loss=0.05043, over 2635188.42 frames. ], batch size: 34, lr: 7.27e-03, grad_scale: 16.0 +2024-08-03 20:34:00,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=223652.0, ans=0.04949747468305833 +2024-08-03 20:34:08,184 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.15 vs. limit=15.0 +2024-08-03 20:34:13,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=223725.33333333334, ans=0.125 +2024-08-03 20:34:20,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=223725.33333333334, ans=0.125 +2024-08-03 20:34:26,194 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.377e+01 1.115e+02 1.294e+02 1.597e+02 2.305e+02, threshold=2.588e+02, percent-clipped=0.0 +2024-08-03 20:34:27,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=223762.0, ans=0.125 +2024-08-03 20:34:27,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=223762.0, ans=0.0 +2024-08-03 20:34:40,274 INFO [train.py:1114] (3/4) Epoch 17, batch 2800, loss[loss=0.2124, simple_loss=0.2936, pruned_loss=0.06562, over 9293.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2732, pruned_loss=0.05079, over 2627322.88 frames. ], batch size: 97, lr: 7.26e-03, grad_scale: 32.0 +2024-08-03 20:34:48,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=223835.33333333334, ans=0.125 +2024-08-03 20:34:55,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=223872.0, ans=0.2 +2024-08-03 20:35:04,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223908.66666666666, ans=0.1 +2024-08-03 20:35:13,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=223945.33333333334, ans=0.2 +2024-08-03 20:35:22,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=223982.0, ans=0.0 +2024-08-03 20:35:22,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=223982.0, ans=0.0 +2024-08-03 20:35:24,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=224018.66666666666, ans=0.125 +2024-08-03 20:35:25,233 INFO [train.py:1114] (3/4) Epoch 17, batch 2850, loss[loss=0.1707, simple_loss=0.267, pruned_loss=0.03722, over 13353.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2741, pruned_loss=0.05151, over 2621471.98 frames. ], batch size: 35, lr: 7.26e-03, grad_scale: 32.0 +2024-08-03 20:35:31,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=224018.66666666666, ans=0.0 +2024-08-03 20:35:31,749 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.32 vs. limit=22.5 +2024-08-03 20:35:38,865 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.90 vs. limit=6.0 +2024-08-03 20:35:46,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=224092.0, ans=0.0 +2024-08-03 20:35:52,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=224128.66666666666, ans=0.1 +2024-08-03 20:35:54,814 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.742e+01 1.205e+02 1.427e+02 1.924e+02 3.362e+02, threshold=2.855e+02, percent-clipped=10.0 +2024-08-03 20:36:10,092 INFO [train.py:1114] (3/4) Epoch 17, batch 2900, loss[loss=0.1796, simple_loss=0.2666, pruned_loss=0.04636, over 13374.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.275, pruned_loss=0.05165, over 2632226.06 frames. ], batch size: 36, lr: 7.26e-03, grad_scale: 32.0 +2024-08-03 20:36:16,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=224202.0, ans=0.125 +2024-08-03 20:36:49,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=224348.66666666666, ans=0.04949747468305833 +2024-08-03 20:36:53,522 INFO [train.py:1114] (3/4) Epoch 17, batch 2950, loss[loss=0.186, simple_loss=0.2672, pruned_loss=0.05242, over 13344.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2739, pruned_loss=0.05165, over 2630171.43 frames. ], batch size: 34, lr: 7.26e-03, grad_scale: 32.0 +2024-08-03 20:36:56,748 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.32 vs. limit=15.0 +2024-08-03 20:37:00,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=224385.33333333334, ans=0.0 +2024-08-03 20:37:23,424 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.999e+01 1.109e+02 1.313e+02 1.570e+02 2.324e+02, threshold=2.625e+02, percent-clipped=1.0 +2024-08-03 20:37:24,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=224495.33333333334, ans=0.04949747468305833 +2024-08-03 20:37:34,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=224532.0, ans=0.0 +2024-08-03 20:37:37,368 INFO [train.py:1114] (3/4) Epoch 17, batch 3000, loss[loss=0.1809, simple_loss=0.2626, pruned_loss=0.04956, over 13541.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2738, pruned_loss=0.05152, over 2629769.29 frames. ], batch size: 37, lr: 7.25e-03, grad_scale: 32.0 +2024-08-03 20:37:37,675 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 20:37:47,666 INFO [train.py:1146] (3/4) Epoch 17, validation: loss=0.1723, simple_loss=0.2712, pruned_loss=0.03676, over 944034.00 frames. +2024-08-03 20:37:47,975 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 20:38:33,124 INFO [train.py:1114] (3/4) Epoch 17, batch 3050, loss[loss=0.1616, simple_loss=0.2534, pruned_loss=0.03486, over 13526.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2742, pruned_loss=0.05155, over 2626890.46 frames. ], batch size: 35, lr: 7.25e-03, grad_scale: 32.0 +2024-08-03 20:38:33,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=224752.0, ans=0.0 +2024-08-03 20:38:38,565 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:38:38,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.40 vs. limit=22.5 +2024-08-03 20:38:56,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=224825.33333333334, ans=0.1 +2024-08-03 20:38:56,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=224825.33333333334, ans=0.125 +2024-08-03 20:39:00,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=224862.0, ans=0.2 +2024-08-03 20:39:02,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=224862.0, ans=0.125 +2024-08-03 20:39:04,754 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.913e+01 1.103e+02 1.242e+02 1.449e+02 2.712e+02, threshold=2.483e+02, percent-clipped=1.0 +2024-08-03 20:39:11,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=224898.66666666666, ans=0.0 +2024-08-03 20:39:18,549 INFO [train.py:1114] (3/4) Epoch 17, batch 3100, loss[loss=0.2033, simple_loss=0.2969, pruned_loss=0.05491, over 13329.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2742, pruned_loss=0.05172, over 2627420.03 frames. ], batch size: 46, lr: 7.25e-03, grad_scale: 32.0 +2024-08-03 20:39:22,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=224935.33333333334, ans=0.0 +2024-08-03 20:39:56,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff2.min_abs, batch_count=225082.0, ans=0.1 +2024-08-03 20:40:01,364 INFO [train.py:1114] (3/4) Epoch 17, batch 3150, loss[loss=0.2218, simple_loss=0.3026, pruned_loss=0.07054, over 13036.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2748, pruned_loss=0.05195, over 2628896.25 frames. ], batch size: 48, lr: 7.24e-03, grad_scale: 32.0 +2024-08-03 20:40:05,529 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.71 vs. limit=15.0 +2024-08-03 20:40:18,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=225192.0, ans=0.125 +2024-08-03 20:40:30,635 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.291e+01 1.117e+02 1.300e+02 1.745e+02 2.777e+02, threshold=2.600e+02, percent-clipped=1.0 +2024-08-03 20:40:35,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=225265.33333333334, ans=0.2 +2024-08-03 20:40:43,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=225302.0, ans=0.125 +2024-08-03 20:40:44,311 INFO [train.py:1114] (3/4) Epoch 17, batch 3200, loss[loss=0.2096, simple_loss=0.2858, pruned_loss=0.06666, over 13535.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2747, pruned_loss=0.05179, over 2634594.51 frames. ], batch size: 37, lr: 7.24e-03, grad_scale: 32.0 +2024-08-03 20:40:53,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=225338.66666666666, ans=0.125 +2024-08-03 20:40:59,153 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:41:09,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=225375.33333333334, ans=0.1 +2024-08-03 20:41:27,807 INFO [train.py:1114] (3/4) Epoch 17, batch 3250, loss[loss=0.1841, simple_loss=0.2776, pruned_loss=0.04528, over 13384.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2749, pruned_loss=0.05153, over 2637869.63 frames. ], batch size: 38, lr: 7.24e-03, grad_scale: 32.0 +2024-08-03 20:41:57,625 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.641e+01 1.154e+02 1.402e+02 1.667e+02 2.489e+02, threshold=2.804e+02, percent-clipped=0.0 +2024-08-03 20:42:04,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=225632.0, ans=0.0 +2024-08-03 20:42:06,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff3.min_abs, batch_count=225632.0, ans=0.2 +2024-08-03 20:42:11,278 INFO [train.py:1114] (3/4) Epoch 17, batch 3300, loss[loss=0.2098, simple_loss=0.2927, pruned_loss=0.06342, over 12872.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2734, pruned_loss=0.0509, over 2639754.70 frames. ], batch size: 52, lr: 7.23e-03, grad_scale: 32.0 +2024-08-03 20:42:17,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=225668.66666666666, ans=0.125 +2024-08-03 20:42:17,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=225668.66666666666, ans=0.125 +2024-08-03 20:42:30,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=225742.0, ans=0.0 +2024-08-03 20:42:33,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=225742.0, ans=0.125 +2024-08-03 20:42:53,697 INFO [train.py:1114] (3/4) Epoch 17, batch 3350, loss[loss=0.1954, simple_loss=0.2832, pruned_loss=0.05386, over 13031.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2739, pruned_loss=0.05111, over 2629161.74 frames. ], batch size: 48, lr: 7.23e-03, grad_scale: 32.0 +2024-08-03 20:42:56,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=225852.0, ans=0.0 +2024-08-03 20:42:59,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=225852.0, ans=0.025 +2024-08-03 20:43:07,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=225888.66666666666, ans=0.0 +2024-08-03 20:43:08,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=225888.66666666666, ans=0.0 +2024-08-03 20:43:21,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=225962.0, ans=0.0 +2024-08-03 20:43:22,997 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.784e+01 1.128e+02 1.245e+02 1.447e+02 2.027e+02, threshold=2.490e+02, percent-clipped=0.0 +2024-08-03 20:43:27,631 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.81 vs. limit=15.0 +2024-08-03 20:43:31,030 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.91 vs. limit=10.0 +2024-08-03 20:43:36,805 INFO [train.py:1114] (3/4) Epoch 17, batch 3400, loss[loss=0.168, simple_loss=0.2428, pruned_loss=0.04657, over 13535.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2738, pruned_loss=0.05111, over 2625137.32 frames. ], batch size: 31, lr: 7.23e-03, grad_scale: 32.0 +2024-08-03 20:43:36,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=226035.33333333334, ans=0.125 +2024-08-03 20:43:37,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=226035.33333333334, ans=0.2 +2024-08-03 20:43:38,966 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.14 vs. limit=22.5 +2024-08-03 20:44:06,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=226145.33333333334, ans=0.0 +2024-08-03 20:44:13,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=226182.0, ans=10.0 +2024-08-03 20:44:20,074 INFO [train.py:1114] (3/4) Epoch 17, batch 3450, loss[loss=0.1918, simple_loss=0.2867, pruned_loss=0.04849, over 12887.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2743, pruned_loss=0.0515, over 2629214.10 frames. ], batch size: 52, lr: 7.23e-03, grad_scale: 32.0 +2024-08-03 20:44:31,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=226255.33333333334, ans=0.125 +2024-08-03 20:44:44,162 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.19 vs. limit=22.5 +2024-08-03 20:44:49,799 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.757e+01 1.151e+02 1.446e+02 1.763e+02 3.368e+02, threshold=2.892e+02, percent-clipped=3.0 +2024-08-03 20:44:56,334 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.50 vs. limit=15.0 +2024-08-03 20:45:00,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=226365.33333333334, ans=0.09899494936611666 +2024-08-03 20:45:02,687 INFO [train.py:1114] (3/4) Epoch 17, batch 3500, loss[loss=0.2116, simple_loss=0.2878, pruned_loss=0.0677, over 13530.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2736, pruned_loss=0.05139, over 2630304.46 frames. ], batch size: 34, lr: 7.22e-03, grad_scale: 16.0 +2024-08-03 20:45:11,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=226438.66666666666, ans=0.0 +2024-08-03 20:45:24,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=226475.33333333334, ans=0.5 +2024-08-03 20:45:26,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226475.33333333334, ans=0.1 +2024-08-03 20:45:28,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226512.0, ans=0.1 +2024-08-03 20:45:31,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=226512.0, ans=0.125 +2024-08-03 20:45:40,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=226548.66666666666, ans=0.125 +2024-08-03 20:45:44,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=226548.66666666666, ans=0.0 +2024-08-03 20:45:45,731 INFO [train.py:1114] (3/4) Epoch 17, batch 3550, loss[loss=0.194, simple_loss=0.2864, pruned_loss=0.05081, over 12465.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2757, pruned_loss=0.05211, over 2628649.15 frames. ], batch size: 58, lr: 7.22e-03, grad_scale: 16.0 +2024-08-03 20:45:51,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=226585.33333333334, ans=0.125 +2024-08-03 20:45:56,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=226622.0, ans=0.125 +2024-08-03 20:46:09,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226658.66666666666, ans=0.1 +2024-08-03 20:46:12,293 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.65 vs. limit=22.5 +2024-08-03 20:46:15,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=226695.33333333334, ans=0.0 +2024-08-03 20:46:16,705 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.75 vs. limit=6.0 +2024-08-03 20:46:17,976 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.551e+01 1.203e+02 1.370e+02 1.580e+02 2.866e+02, threshold=2.739e+02, percent-clipped=0.0 +2024-08-03 20:46:20,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=226695.33333333334, ans=0.0 +2024-08-03 20:46:28,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=226732.0, ans=0.125 +2024-08-03 20:46:31,219 INFO [train.py:1114] (3/4) Epoch 17, batch 3600, loss[loss=0.2001, simple_loss=0.2778, pruned_loss=0.0612, over 9601.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2798, pruned_loss=0.05596, over 2487065.66 frames. ], batch size: 96, lr: 7.22e-03, grad_scale: 32.0 +2024-08-03 20:46:39,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=226805.33333333334, ans=0.0 +2024-08-03 20:46:39,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.65 vs. limit=10.0 +2024-08-03 20:46:43,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=226805.33333333334, ans=0.125 +2024-08-03 20:46:49,362 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:46:52,799 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.82 vs. limit=15.0 +2024-08-03 20:46:56,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=226842.0, ans=0.125 +2024-08-03 20:46:57,123 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.74 vs. limit=15.0 +2024-08-03 20:46:59,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=226878.66666666666, ans=0.1 +2024-08-03 20:47:02,644 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.12 vs. limit=15.0 +2024-08-03 20:47:48,361 INFO [train.py:1114] (3/4) Epoch 18, batch 0, loss[loss=0.1786, simple_loss=0.2601, pruned_loss=0.04854, over 13352.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2601, pruned_loss=0.04854, over 13352.00 frames. ], batch size: 33, lr: 7.01e-03, grad_scale: 32.0 +2024-08-03 20:47:48,362 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 20:47:54,885 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.4894, 2.8274, 2.5086, 2.4996], device='cuda:3') +2024-08-03 20:47:58,991 INFO [train.py:1146] (3/4) Epoch 18, validation: loss=0.1737, simple_loss=0.274, pruned_loss=0.03673, over 944034.00 frames. +2024-08-03 20:47:58,991 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 20:48:00,993 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=1.363e-01 +2024-08-03 20:48:02,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=226915.33333333334, ans=0.125 +2024-08-03 20:48:09,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=226952.0, ans=0.125 +2024-08-03 20:48:17,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=226988.66666666666, ans=0.2 +2024-08-03 20:48:22,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=226988.66666666666, ans=0.125 +2024-08-03 20:48:31,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=227025.33333333334, ans=0.125 +2024-08-03 20:48:40,353 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.175e+01 1.169e+02 1.268e+02 1.393e+02 2.818e+02, threshold=2.535e+02, percent-clipped=2.0 +2024-08-03 20:48:43,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=227062.0, ans=0.0 +2024-08-03 20:48:44,904 INFO [train.py:1114] (3/4) Epoch 18, batch 50, loss[loss=0.1566, simple_loss=0.2431, pruned_loss=0.03511, over 13423.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2766, pruned_loss=0.05216, over 579081.54 frames. ], batch size: 32, lr: 7.01e-03, grad_scale: 32.0 +2024-08-03 20:48:47,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=227098.66666666666, ans=0.125 +2024-08-03 20:49:01,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=227135.33333333334, ans=0.0 +2024-08-03 20:49:05,650 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.56 vs. limit=6.0 +2024-08-03 20:49:12,144 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.24 vs. limit=15.0 +2024-08-03 20:49:14,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=227208.66666666666, ans=0.125 +2024-08-03 20:49:16,701 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.05 vs. limit=15.0 +2024-08-03 20:49:28,423 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.07 vs. limit=22.5 +2024-08-03 20:49:31,441 INFO [train.py:1114] (3/4) Epoch 18, batch 100, loss[loss=0.1536, simple_loss=0.2445, pruned_loss=0.03137, over 13524.00 frames. ], tot_loss[loss=0.189, simple_loss=0.276, pruned_loss=0.05098, over 1026479.64 frames. ], batch size: 35, lr: 7.00e-03, grad_scale: 32.0 +2024-08-03 20:49:31,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=227282.0, ans=0.5 +2024-08-03 20:49:41,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=227318.66666666666, ans=0.0 +2024-08-03 20:49:55,999 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.99 vs. limit=15.0 +2024-08-03 20:49:58,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=227355.33333333334, ans=0.125 +2024-08-03 20:50:09,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227392.0, ans=0.1 +2024-08-03 20:50:16,427 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.987e+01 1.102e+02 1.290e+02 1.676e+02 3.343e+02, threshold=2.579e+02, percent-clipped=6.0 +2024-08-03 20:50:20,792 INFO [train.py:1114] (3/4) Epoch 18, batch 150, loss[loss=0.1637, simple_loss=0.2464, pruned_loss=0.04054, over 13436.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2733, pruned_loss=0.04993, over 1387659.35 frames. ], batch size: 32, lr: 7.00e-03, grad_scale: 32.0 +2024-08-03 20:50:33,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=227502.0, ans=0.125 +2024-08-03 20:50:34,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=227502.0, ans=0.1 +2024-08-03 20:50:35,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=227502.0, ans=0.0 +2024-08-03 20:50:37,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=227502.0, ans=0.2 +2024-08-03 20:50:45,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=227538.66666666666, ans=0.0 +2024-08-03 20:50:45,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=227538.66666666666, ans=0.125 +2024-08-03 20:50:55,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=227575.33333333334, ans=0.125 +2024-08-03 20:51:06,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=227612.0, ans=0.07 +2024-08-03 20:51:07,694 INFO [train.py:1114] (3/4) Epoch 18, batch 200, loss[loss=0.2027, simple_loss=0.2902, pruned_loss=0.05758, over 12433.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2718, pruned_loss=0.04942, over 1665467.31 frames. ], batch size: 58, lr: 7.00e-03, grad_scale: 32.0 +2024-08-03 20:51:07,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=227648.66666666666, ans=0.125 +2024-08-03 20:51:14,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=227648.66666666666, ans=0.2 +2024-08-03 20:51:16,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=227685.33333333334, ans=0.125 +2024-08-03 20:51:20,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=227685.33333333334, ans=0.0 +2024-08-03 20:51:26,983 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.24 vs. limit=22.5 +2024-08-03 20:51:27,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=227722.0, ans=0.05 +2024-08-03 20:51:37,417 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:51:42,898 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=2.672e-03 +2024-08-03 20:51:43,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227795.33333333334, ans=0.1 +2024-08-03 20:51:48,064 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.522e+01 1.097e+02 1.244e+02 1.547e+02 2.709e+02, threshold=2.488e+02, percent-clipped=2.0 +2024-08-03 20:51:52,897 INFO [train.py:1114] (3/4) Epoch 18, batch 250, loss[loss=0.1948, simple_loss=0.2837, pruned_loss=0.05291, over 13339.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.272, pruned_loss=0.04947, over 1883511.11 frames. ], batch size: 46, lr: 7.00e-03, grad_scale: 32.0 +2024-08-03 20:51:55,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=227832.0, ans=0.015 +2024-08-03 20:52:12,653 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.05 vs. limit=15.0 +2024-08-03 20:52:26,197 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.78 vs. limit=15.0 +2024-08-03 20:52:38,230 INFO [train.py:1114] (3/4) Epoch 18, batch 300, loss[loss=0.2104, simple_loss=0.292, pruned_loss=0.06435, over 13438.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2713, pruned_loss=0.04931, over 2050194.75 frames. ], batch size: 42, lr: 6.99e-03, grad_scale: 32.0 +2024-08-03 20:52:46,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228052.0, ans=0.1 +2024-08-03 20:52:46,814 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.23 vs. limit=22.5 +2024-08-03 20:52:47,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=228052.0, ans=0.125 +2024-08-03 20:52:48,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=228052.0, ans=0.025 +2024-08-03 20:52:59,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=228088.66666666666, ans=0.0 +2024-08-03 20:53:00,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=228088.66666666666, ans=0.04949747468305833 +2024-08-03 20:53:06,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=228125.33333333334, ans=0.025 +2024-08-03 20:53:11,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=228125.33333333334, ans=0.025 +2024-08-03 20:53:22,561 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.234e+01 1.098e+02 1.173e+02 1.552e+02 2.222e+02, threshold=2.347e+02, percent-clipped=0.0 +2024-08-03 20:53:27,233 INFO [train.py:1114] (3/4) Epoch 18, batch 350, loss[loss=0.189, simple_loss=0.2653, pruned_loss=0.05638, over 13590.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2716, pruned_loss=0.04916, over 2180903.35 frames. ], batch size: 33, lr: 6.99e-03, grad_scale: 32.0 +2024-08-03 20:53:35,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=228235.33333333334, ans=0.2 +2024-08-03 20:53:44,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=228235.33333333334, ans=0.125 +2024-08-03 20:53:53,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228272.0, ans=0.1 +2024-08-03 20:54:06,592 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.02 vs. limit=6.0 +2024-08-03 20:54:06,638 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.44 vs. limit=12.0 +2024-08-03 20:54:16,768 INFO [train.py:1114] (3/4) Epoch 18, batch 400, loss[loss=0.1849, simple_loss=0.2787, pruned_loss=0.04552, over 13363.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2709, pruned_loss=0.04885, over 2284708.38 frames. ], batch size: 37, lr: 6.99e-03, grad_scale: 32.0 +2024-08-03 20:54:20,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=228382.0, ans=0.125 +2024-08-03 20:54:37,900 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.08 vs. limit=15.0 +2024-08-03 20:54:47,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=228492.0, ans=0.125 +2024-08-03 20:54:53,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=228528.66666666666, ans=0.1 +2024-08-03 20:54:53,373 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.99 vs. limit=15.0 +2024-08-03 20:54:57,269 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.985e+01 1.132e+02 1.280e+02 1.629e+02 3.189e+02, threshold=2.560e+02, percent-clipped=4.0 +2024-08-03 20:54:59,708 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.67 vs. limit=12.0 +2024-08-03 20:55:01,735 INFO [train.py:1114] (3/4) Epoch 18, batch 450, loss[loss=0.1653, simple_loss=0.254, pruned_loss=0.03833, over 13549.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2709, pruned_loss=0.04898, over 2358963.15 frames. ], batch size: 38, lr: 6.98e-03, grad_scale: 32.0 +2024-08-03 20:55:05,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=228565.33333333334, ans=0.125 +2024-08-03 20:55:12,794 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.87 vs. limit=15.0 +2024-08-03 20:55:18,125 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.09 vs. limit=15.0 +2024-08-03 20:55:21,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=228638.66666666666, ans=0.0 +2024-08-03 20:55:24,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228638.66666666666, ans=0.1 +2024-08-03 20:55:28,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=228675.33333333334, ans=0.2 +2024-08-03 20:55:30,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=228675.33333333334, ans=0.125 +2024-08-03 20:55:40,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=228712.0, ans=0.125 +2024-08-03 20:55:44,188 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=228712.0, ans=0.0 +2024-08-03 20:55:46,659 INFO [train.py:1114] (3/4) Epoch 18, batch 500, loss[loss=0.1933, simple_loss=0.2803, pruned_loss=0.0532, over 13425.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2702, pruned_loss=0.04867, over 2424998.96 frames. ], batch size: 43, lr: 6.98e-03, grad_scale: 32.0 +2024-08-03 20:55:51,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228748.66666666666, ans=0.1 +2024-08-03 20:55:56,483 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.12 vs. limit=15.0 +2024-08-03 20:56:02,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=228785.33333333334, ans=0.0 +2024-08-03 20:56:07,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=228822.0, ans=0.125 +2024-08-03 20:56:20,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=228858.66666666666, ans=0.1 +2024-08-03 20:56:26,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=228895.33333333334, ans=0.0 +2024-08-03 20:56:27,953 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.312e+01 1.074e+02 1.245e+02 1.559e+02 2.675e+02, threshold=2.490e+02, percent-clipped=1.0 +2024-08-03 20:56:29,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=228895.33333333334, ans=0.0 +2024-08-03 20:56:32,409 INFO [train.py:1114] (3/4) Epoch 18, batch 550, loss[loss=0.2178, simple_loss=0.3085, pruned_loss=0.06353, over 12969.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2706, pruned_loss=0.04888, over 2467556.84 frames. ], batch size: 48, lr: 6.98e-03, grad_scale: 32.0 +2024-08-03 20:56:44,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=228968.66666666666, ans=0.125 +2024-08-03 20:56:49,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=228968.66666666666, ans=0.05 +2024-08-03 20:57:20,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=229078.66666666666, ans=0.125 +2024-08-03 20:57:23,267 INFO [train.py:1114] (3/4) Epoch 18, batch 600, loss[loss=0.2184, simple_loss=0.3043, pruned_loss=0.06625, over 13298.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2711, pruned_loss=0.04901, over 2508319.42 frames. ], batch size: 46, lr: 6.98e-03, grad_scale: 16.0 +2024-08-03 20:57:32,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.50 vs. limit=15.0 +2024-08-03 20:57:37,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=229152.0, ans=0.125 +2024-08-03 20:57:38,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=229152.0, ans=0.2 +2024-08-03 20:57:51,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=229225.33333333334, ans=0.035 +2024-08-03 20:57:52,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=229225.33333333334, ans=0.025 +2024-08-03 20:57:52,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=229225.33333333334, ans=0.125 +2024-08-03 20:58:04,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=229262.0, ans=0.125 +2024-08-03 20:58:05,825 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.952e+01 1.114e+02 1.293e+02 1.855e+02 3.099e+02, threshold=2.585e+02, percent-clipped=2.0 +2024-08-03 20:58:09,361 INFO [train.py:1114] (3/4) Epoch 18, batch 650, loss[loss=0.1629, simple_loss=0.2528, pruned_loss=0.03646, over 13547.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2704, pruned_loss=0.04841, over 2543598.46 frames. ], batch size: 37, lr: 6.97e-03, grad_scale: 16.0 +2024-08-03 20:58:10,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=229298.66666666666, ans=0.0 +2024-08-03 20:58:15,779 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:58:19,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=229335.33333333334, ans=0.125 +2024-08-03 20:58:25,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=229335.33333333334, ans=0.125 +2024-08-03 20:58:31,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=229372.0, ans=0.95 +2024-08-03 20:58:37,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229408.66666666666, ans=0.1 +2024-08-03 20:58:42,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=229408.66666666666, ans=0.025 +2024-08-03 20:58:50,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=229445.33333333334, ans=0.125 +2024-08-03 20:58:54,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=229482.0, ans=0.125 +2024-08-03 20:58:54,985 INFO [train.py:1114] (3/4) Epoch 18, batch 700, loss[loss=0.1712, simple_loss=0.2554, pruned_loss=0.04349, over 13532.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2713, pruned_loss=0.04899, over 2565387.13 frames. ], batch size: 35, lr: 6.97e-03, grad_scale: 16.0 +2024-08-03 20:58:56,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=229482.0, ans=0.0 +2024-08-03 20:59:08,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=229518.66666666666, ans=0.125 +2024-08-03 20:59:26,826 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.92 vs. limit=15.0 +2024-08-03 20:59:39,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=229628.66666666666, ans=0.125 +2024-08-03 20:59:41,522 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.312e+01 1.096e+02 1.191e+02 1.436e+02 2.621e+02, threshold=2.382e+02, percent-clipped=1.0 +2024-08-03 20:59:45,181 INFO [train.py:1114] (3/4) Epoch 18, batch 750, loss[loss=0.1664, simple_loss=0.2587, pruned_loss=0.03712, over 13347.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2705, pruned_loss=0.04873, over 2584187.06 frames. ], batch size: 37, lr: 6.97e-03, grad_scale: 16.0 +2024-08-03 20:59:48,009 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 20:59:48,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=229665.33333333334, ans=0.0 +2024-08-03 20:59:56,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=229702.0, ans=0.125 +2024-08-03 21:00:07,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=229738.66666666666, ans=0.05 +2024-08-03 21:00:10,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=229738.66666666666, ans=0.125 +2024-08-03 21:00:11,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229775.33333333334, ans=0.1 +2024-08-03 21:00:12,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=229775.33333333334, ans=0.125 +2024-08-03 21:00:22,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=229812.0, ans=0.2 +2024-08-03 21:00:22,583 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.01 vs. limit=15.0 +2024-08-03 21:00:23,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=229812.0, ans=0.125 +2024-08-03 21:00:25,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=229812.0, ans=0.125 +2024-08-03 21:00:29,937 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.91 vs. limit=22.5 +2024-08-03 21:00:30,259 INFO [train.py:1114] (3/4) Epoch 18, batch 800, loss[loss=0.1615, simple_loss=0.2472, pruned_loss=0.03789, over 13355.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2699, pruned_loss=0.0483, over 2598069.11 frames. ], batch size: 33, lr: 6.96e-03, grad_scale: 32.0 +2024-08-03 21:00:53,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=229885.33333333334, ans=0.125 +2024-08-03 21:01:08,407 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.51 vs. limit=6.0 +2024-08-03 21:01:12,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=229958.66666666666, ans=0.125 +2024-08-03 21:01:23,488 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.33 vs. limit=22.5 +2024-08-03 21:01:28,073 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.223e+01 1.040e+02 1.288e+02 1.609e+02 2.437e+02, threshold=2.577e+02, percent-clipped=2.0 +2024-08-03 21:01:30,765 INFO [train.py:1114] (3/4) Epoch 18, batch 850, loss[loss=0.1765, simple_loss=0.266, pruned_loss=0.04351, over 13312.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2695, pruned_loss=0.04814, over 2610501.69 frames. ], batch size: 40, lr: 6.96e-03, grad_scale: 16.0 +2024-08-03 21:01:41,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=230068.66666666666, ans=0.2 +2024-08-03 21:01:44,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=230068.66666666666, ans=0.0 +2024-08-03 21:01:53,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=230105.33333333334, ans=0.125 +2024-08-03 21:01:56,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=2.93 vs. limit=12.0 +2024-08-03 21:02:02,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=230142.0, ans=0.125 +2024-08-03 21:02:05,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=230142.0, ans=0.125 +2024-08-03 21:02:15,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=230215.33333333334, ans=0.125 +2024-08-03 21:02:15,876 INFO [train.py:1114] (3/4) Epoch 18, batch 900, loss[loss=0.1433, simple_loss=0.2334, pruned_loss=0.02663, over 13344.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2692, pruned_loss=0.04798, over 2612799.04 frames. ], batch size: 33, lr: 6.96e-03, grad_scale: 16.0 +2024-08-03 21:02:27,204 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.50 vs. limit=15.0 +2024-08-03 21:02:37,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=230288.66666666666, ans=0.125 +2024-08-03 21:02:44,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=230325.33333333334, ans=0.0 +2024-08-03 21:02:49,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=230325.33333333334, ans=0.025 +2024-08-03 21:02:59,124 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.753e+01 1.118e+02 1.313e+02 1.560e+02 2.225e+02, threshold=2.625e+02, percent-clipped=0.0 +2024-08-03 21:03:02,040 INFO [train.py:1114] (3/4) Epoch 18, batch 950, loss[loss=0.1757, simple_loss=0.2633, pruned_loss=0.04403, over 13535.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2695, pruned_loss=0.04818, over 2612711.84 frames. ], batch size: 34, lr: 6.96e-03, grad_scale: 16.0 +2024-08-03 21:03:22,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=230472.0, ans=0.2 +2024-08-03 21:03:23,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=230472.0, ans=0.125 +2024-08-03 21:03:27,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=230472.0, ans=0.2 +2024-08-03 21:03:29,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=230508.66666666666, ans=0.0 +2024-08-03 21:03:32,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=230508.66666666666, ans=0.125 +2024-08-03 21:03:34,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=230508.66666666666, ans=0.0 +2024-08-03 21:03:42,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=230545.33333333334, ans=0.125 +2024-08-03 21:03:48,150 INFO [train.py:1114] (3/4) Epoch 18, batch 1000, loss[loss=0.1808, simple_loss=0.267, pruned_loss=0.04732, over 13372.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2702, pruned_loss=0.04868, over 2610322.72 frames. ], batch size: 35, lr: 6.95e-03, grad_scale: 16.0 +2024-08-03 21:03:56,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=230618.66666666666, ans=0.125 +2024-08-03 21:04:24,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=230692.0, ans=0.125 +2024-08-03 21:04:28,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=230728.66666666666, ans=0.1 +2024-08-03 21:04:34,935 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.346e+01 1.082e+02 1.211e+02 1.465e+02 2.308e+02, threshold=2.421e+02, percent-clipped=0.0 +2024-08-03 21:04:39,858 INFO [train.py:1114] (3/4) Epoch 18, batch 1050, loss[loss=0.1794, simple_loss=0.2692, pruned_loss=0.04474, over 13578.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2699, pruned_loss=0.04862, over 2615248.21 frames. ], batch size: 39, lr: 6.95e-03, grad_scale: 16.0 +2024-08-03 21:04:45,816 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.68 vs. limit=22.5 +2024-08-03 21:04:48,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=230802.0, ans=0.04949747468305833 +2024-08-03 21:05:08,319 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.39 vs. limit=6.0 +2024-08-03 21:05:26,816 INFO [train.py:1114] (3/4) Epoch 18, batch 1100, loss[loss=0.1677, simple_loss=0.259, pruned_loss=0.03816, over 13575.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2694, pruned_loss=0.0485, over 2619288.89 frames. ], batch size: 36, lr: 6.95e-03, grad_scale: 16.0 +2024-08-03 21:05:31,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=230948.66666666666, ans=0.0 +2024-08-03 21:05:35,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=230985.33333333334, ans=0.0 +2024-08-03 21:05:53,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=231058.66666666666, ans=0.125 +2024-08-03 21:06:09,958 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.017e+01 1.089e+02 1.225e+02 1.560e+02 2.576e+02, threshold=2.450e+02, percent-clipped=1.0 +2024-08-03 21:06:12,648 INFO [train.py:1114] (3/4) Epoch 18, batch 1150, loss[loss=0.1744, simple_loss=0.2576, pruned_loss=0.04558, over 13564.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2699, pruned_loss=0.04883, over 2618549.75 frames. ], batch size: 36, lr: 6.95e-03, grad_scale: 16.0 +2024-08-03 21:06:15,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=231132.0, ans=0.125 +2024-08-03 21:06:21,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=231168.66666666666, ans=0.0 +2024-08-03 21:06:29,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=231168.66666666666, ans=0.2 +2024-08-03 21:06:48,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231278.66666666666, ans=0.1 +2024-08-03 21:06:58,078 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.09 vs. limit=22.5 +2024-08-03 21:06:58,353 INFO [train.py:1114] (3/4) Epoch 18, batch 1200, loss[loss=0.2151, simple_loss=0.3064, pruned_loss=0.06184, over 13571.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2711, pruned_loss=0.04903, over 2616305.62 frames. ], batch size: 39, lr: 6.94e-03, grad_scale: 32.0 +2024-08-03 21:07:03,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231315.33333333334, ans=0.1 +2024-08-03 21:07:19,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.60 vs. limit=10.0 +2024-08-03 21:07:25,895 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.20 vs. limit=22.5 +2024-08-03 21:07:26,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=231425.33333333334, ans=0.0 +2024-08-03 21:07:26,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=231425.33333333334, ans=0.125 +2024-08-03 21:07:26,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=231425.33333333334, ans=0.0 +2024-08-03 21:07:32,979 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.27 vs. limit=15.0 +2024-08-03 21:07:36,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=231462.0, ans=0.0 +2024-08-03 21:07:40,620 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.911e+01 1.076e+02 1.246e+02 1.591e+02 2.283e+02, threshold=2.493e+02, percent-clipped=0.0 +2024-08-03 21:07:43,363 INFO [train.py:1114] (3/4) Epoch 18, batch 1250, loss[loss=0.1929, simple_loss=0.2878, pruned_loss=0.04896, over 13463.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.271, pruned_loss=0.04888, over 2627790.04 frames. ], batch size: 42, lr: 6.94e-03, grad_scale: 32.0 +2024-08-03 21:07:58,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=231535.33333333334, ans=0.0 +2024-08-03 21:08:00,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=231535.33333333334, ans=10.0 +2024-08-03 21:08:08,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=231572.0, ans=0.0 +2024-08-03 21:08:13,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=231572.0, ans=0.05 +2024-08-03 21:08:23,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=231608.66666666666, ans=0.125 +2024-08-03 21:08:23,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231608.66666666666, ans=0.1 +2024-08-03 21:08:30,511 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.46 vs. limit=15.0 +2024-08-03 21:08:32,353 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.17 vs. limit=22.5 +2024-08-03 21:08:36,235 INFO [train.py:1114] (3/4) Epoch 18, batch 1300, loss[loss=0.1943, simple_loss=0.2842, pruned_loss=0.05215, over 12936.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2704, pruned_loss=0.04871, over 2629514.42 frames. ], batch size: 52, lr: 6.94e-03, grad_scale: 32.0 +2024-08-03 21:08:39,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=231682.0, ans=0.125 +2024-08-03 21:08:39,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=231682.0, ans=0.125 +2024-08-03 21:08:58,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=231755.33333333334, ans=0.125 +2024-08-03 21:09:02,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=231792.0, ans=0.025 +2024-08-03 21:09:03,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=231792.0, ans=0.125 +2024-08-03 21:09:17,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=231828.66666666666, ans=0.0 +2024-08-03 21:09:19,121 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.689e+01 1.086e+02 1.232e+02 1.465e+02 2.299e+02, threshold=2.463e+02, percent-clipped=0.0 +2024-08-03 21:09:21,776 INFO [train.py:1114] (3/4) Epoch 18, batch 1350, loss[loss=0.161, simple_loss=0.2534, pruned_loss=0.03428, over 13556.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2706, pruned_loss=0.04874, over 2637344.60 frames. ], batch size: 37, lr: 6.93e-03, grad_scale: 32.0 +2024-08-03 21:09:29,556 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.41 vs. limit=15.0 +2024-08-03 21:10:07,017 INFO [train.py:1114] (3/4) Epoch 18, batch 1400, loss[loss=0.1843, simple_loss=0.2552, pruned_loss=0.05669, over 13247.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2707, pruned_loss=0.04886, over 2641244.09 frames. ], batch size: 31, lr: 6.93e-03, grad_scale: 32.0 +2024-08-03 21:10:13,707 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.23 vs. limit=15.0 +2024-08-03 21:10:17,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=232085.33333333334, ans=0.1 +2024-08-03 21:10:18,277 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.56 vs. limit=22.5 +2024-08-03 21:10:34,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232158.66666666666, ans=0.1 +2024-08-03 21:10:47,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=232195.33333333334, ans=0.125 +2024-08-03 21:10:49,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=232195.33333333334, ans=0.0 +2024-08-03 21:10:49,663 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 7.779e+01 1.159e+02 1.340e+02 1.703e+02 2.344e+02, threshold=2.679e+02, percent-clipped=0.0 +2024-08-03 21:10:52,346 INFO [train.py:1114] (3/4) Epoch 18, batch 1450, loss[loss=0.2011, simple_loss=0.2922, pruned_loss=0.05497, over 13421.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2718, pruned_loss=0.04938, over 2640453.62 frames. ], batch size: 43, lr: 6.93e-03, grad_scale: 32.0 +2024-08-03 21:10:52,920 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=8.50 vs. limit=15.0 +2024-08-03 21:10:55,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=232232.0, ans=0.0 +2024-08-03 21:11:00,819 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.00 vs. limit=15.0 +2024-08-03 21:11:04,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=232268.66666666666, ans=0.125 +2024-08-03 21:11:07,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=232268.66666666666, ans=0.125 +2024-08-03 21:11:09,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=232305.33333333334, ans=0.125 +2024-08-03 21:11:19,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=232342.0, ans=0.125 +2024-08-03 21:11:19,448 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:11:22,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=232342.0, ans=0.2 +2024-08-03 21:11:30,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=232378.66666666666, ans=0.125 +2024-08-03 21:11:38,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232378.66666666666, ans=0.1 +2024-08-03 21:11:39,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232378.66666666666, ans=0.1 +2024-08-03 21:11:39,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=232378.66666666666, ans=0.125 +2024-08-03 21:11:39,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=232415.33333333334, ans=0.0 +2024-08-03 21:11:40,681 INFO [train.py:1114] (3/4) Epoch 18, batch 1500, loss[loss=0.1843, simple_loss=0.2736, pruned_loss=0.04746, over 13399.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2724, pruned_loss=0.04967, over 2640280.34 frames. ], batch size: 39, lr: 6.93e-03, grad_scale: 16.0 +2024-08-03 21:11:48,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=232415.33333333334, ans=0.2 +2024-08-03 21:12:06,074 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.26 vs. limit=15.0 +2024-08-03 21:12:15,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=232525.33333333334, ans=0.125 +2024-08-03 21:12:28,167 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.948e+01 1.098e+02 1.405e+02 1.700e+02 3.079e+02, threshold=2.810e+02, percent-clipped=1.0 +2024-08-03 21:12:29,962 INFO [train.py:1114] (3/4) Epoch 18, batch 1550, loss[loss=0.2025, simple_loss=0.2856, pruned_loss=0.05969, over 13396.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2728, pruned_loss=0.04985, over 2630537.49 frames. ], batch size: 41, lr: 6.92e-03, grad_scale: 16.0 +2024-08-03 21:12:32,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=232598.66666666666, ans=0.07 +2024-08-03 21:12:33,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=232598.66666666666, ans=0.0 +2024-08-03 21:12:40,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=232635.33333333334, ans=0.125 +2024-08-03 21:12:47,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=232672.0, ans=0.125 +2024-08-03 21:13:02,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=232708.66666666666, ans=0.125 +2024-08-03 21:13:07,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=232745.33333333334, ans=0.07 +2024-08-03 21:13:09,300 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:13:11,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=232745.33333333334, ans=0.125 +2024-08-03 21:13:15,293 INFO [train.py:1114] (3/4) Epoch 18, batch 1600, loss[loss=0.204, simple_loss=0.287, pruned_loss=0.06044, over 13580.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2727, pruned_loss=0.04988, over 2624072.44 frames. ], batch size: 39, lr: 6.92e-03, grad_scale: 32.0 +2024-08-03 21:13:16,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=232782.0, ans=0.125 +2024-08-03 21:13:19,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232782.0, ans=0.1 +2024-08-03 21:13:28,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=232818.66666666666, ans=0.025 +2024-08-03 21:13:58,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=232928.66666666666, ans=0.0 +2024-08-03 21:13:59,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=232928.66666666666, ans=0.125 +2024-08-03 21:14:01,387 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.392e+01 1.109e+02 1.247e+02 1.559e+02 3.003e+02, threshold=2.495e+02, percent-clipped=1.0 +2024-08-03 21:14:03,116 INFO [train.py:1114] (3/4) Epoch 18, batch 1650, loss[loss=0.186, simple_loss=0.2794, pruned_loss=0.04633, over 13336.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.272, pruned_loss=0.0497, over 2622193.01 frames. ], batch size: 40, lr: 6.92e-03, grad_scale: 32.0 +2024-08-03 21:14:26,245 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.42 vs. limit=6.0 +2024-08-03 21:14:48,614 INFO [train.py:1114] (3/4) Epoch 18, batch 1700, loss[loss=0.1646, simple_loss=0.2492, pruned_loss=0.04004, over 13250.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2715, pruned_loss=0.04934, over 2630935.12 frames. ], batch size: 31, lr: 6.92e-03, grad_scale: 32.0 +2024-08-03 21:14:52,905 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.50 vs. limit=15.0 +2024-08-03 21:14:59,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=233185.33333333334, ans=0.2 +2024-08-03 21:15:26,407 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:15:41,606 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.564e+01 1.087e+02 1.250e+02 1.627e+02 3.379e+02, threshold=2.500e+02, percent-clipped=6.0 +2024-08-03 21:15:42,574 INFO [train.py:1114] (3/4) Epoch 18, batch 1750, loss[loss=0.1636, simple_loss=0.2431, pruned_loss=0.04203, over 13536.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.27, pruned_loss=0.04863, over 2633839.58 frames. ], batch size: 31, lr: 6.91e-03, grad_scale: 16.0 +2024-08-03 21:15:45,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=233332.0, ans=0.0 +2024-08-03 21:15:45,803 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.28 vs. limit=10.0 +2024-08-03 21:15:52,979 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.49 vs. limit=15.0 +2024-08-03 21:15:58,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=233368.66666666666, ans=0.07 +2024-08-03 21:16:02,490 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.54 vs. limit=15.0 +2024-08-03 21:16:24,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=233478.66666666666, ans=0.1 +2024-08-03 21:16:27,998 INFO [train.py:1114] (3/4) Epoch 18, batch 1800, loss[loss=0.1781, simple_loss=0.2699, pruned_loss=0.04311, over 13545.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2709, pruned_loss=0.04905, over 2635308.81 frames. ], batch size: 38, lr: 6.91e-03, grad_scale: 16.0 +2024-08-03 21:16:31,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=233515.33333333334, ans=0.0 +2024-08-03 21:16:41,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=233552.0, ans=0.1 +2024-08-03 21:16:42,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=233552.0, ans=0.5 +2024-08-03 21:16:44,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=233552.0, ans=0.09899494936611666 +2024-08-03 21:16:44,994 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.58 vs. limit=22.5 +2024-08-03 21:16:45,544 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=233588.66666666666, ans=0.0 +2024-08-03 21:16:46,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=233588.66666666666, ans=0.035 +2024-08-03 21:16:59,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=233625.33333333334, ans=0.0 +2024-08-03 21:17:02,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=233625.33333333334, ans=0.125 +2024-08-03 21:17:12,452 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.69 vs. limit=10.0 +2024-08-03 21:17:12,816 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.303e+01 1.188e+02 1.406e+02 1.892e+02 3.223e+02, threshold=2.812e+02, percent-clipped=8.0 +2024-08-03 21:17:13,750 INFO [train.py:1114] (3/4) Epoch 18, batch 1850, loss[loss=0.1742, simple_loss=0.2691, pruned_loss=0.03963, over 13411.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2705, pruned_loss=0.04873, over 2637908.46 frames. ], batch size: 39, lr: 6.91e-03, grad_scale: 16.0 +2024-08-03 21:17:14,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=233698.66666666666, ans=0.0 +2024-08-03 21:17:30,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=233735.33333333334, ans=0.2 +2024-08-03 21:17:31,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=233772.0, ans=0.0 +2024-08-03 21:17:37,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=233772.0, ans=0.95 +2024-08-03 21:17:49,553 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.98 vs. limit=15.0 +2024-08-03 21:17:54,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=233845.33333333334, ans=0.2 +2024-08-03 21:17:59,006 INFO [train.py:1114] (3/4) Epoch 18, batch 1900, loss[loss=0.1941, simple_loss=0.2861, pruned_loss=0.05107, over 13329.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2717, pruned_loss=0.04923, over 2640840.75 frames. ], batch size: 40, lr: 6.91e-03, grad_scale: 16.0 +2024-08-03 21:18:00,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=233882.0, ans=0.2 +2024-08-03 21:18:05,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233882.0, ans=0.1 +2024-08-03 21:18:18,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=233955.33333333334, ans=0.2 +2024-08-03 21:18:20,555 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.64 vs. limit=12.0 +2024-08-03 21:18:21,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=233955.33333333334, ans=0.0 +2024-08-03 21:18:23,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=233955.33333333334, ans=0.125 +2024-08-03 21:18:30,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=233992.0, ans=0.0 +2024-08-03 21:18:48,032 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.661e+01 1.108e+02 1.336e+02 1.650e+02 2.713e+02, threshold=2.672e+02, percent-clipped=0.0 +2024-08-03 21:18:48,977 INFO [train.py:1114] (3/4) Epoch 18, batch 1950, loss[loss=0.1842, simple_loss=0.2756, pruned_loss=0.04643, over 13565.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2724, pruned_loss=0.04923, over 2647001.69 frames. ], batch size: 36, lr: 6.90e-03, grad_scale: 16.0 +2024-08-03 21:18:52,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=234065.33333333334, ans=0.0 +2024-08-03 21:18:59,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=234065.33333333334, ans=0.125 +2024-08-03 21:18:59,670 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.71 vs. limit=15.0 +2024-08-03 21:19:06,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=234102.0, ans=0.04949747468305833 +2024-08-03 21:19:26,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=234175.33333333334, ans=0.0 +2024-08-03 21:19:43,558 INFO [train.py:1114] (3/4) Epoch 18, batch 2000, loss[loss=0.1658, simple_loss=0.2472, pruned_loss=0.04216, over 13530.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.273, pruned_loss=0.04959, over 2636938.15 frames. ], batch size: 31, lr: 6.90e-03, grad_scale: 32.0 +2024-08-03 21:19:48,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=234248.66666666666, ans=0.125 +2024-08-03 21:20:00,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=234285.33333333334, ans=0.125 +2024-08-03 21:20:01,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=234322.0, ans=0.05 +2024-08-03 21:20:05,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234322.0, ans=0.1 +2024-08-03 21:20:20,131 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.87 vs. limit=15.0 +2024-08-03 21:20:28,746 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.785e+01 1.141e+02 1.284e+02 1.591e+02 2.253e+02, threshold=2.569e+02, percent-clipped=0.0 +2024-08-03 21:20:29,736 INFO [train.py:1114] (3/4) Epoch 18, batch 2050, loss[loss=0.1809, simple_loss=0.2634, pruned_loss=0.0492, over 13430.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2716, pruned_loss=0.04937, over 2633364.89 frames. ], batch size: 32, lr: 6.90e-03, grad_scale: 32.0 +2024-08-03 21:20:33,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=234432.0, ans=0.125 +2024-08-03 21:20:47,503 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.67 vs. limit=15.0 +2024-08-03 21:20:57,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=234542.0, ans=0.04949747468305833 +2024-08-03 21:20:57,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=234542.0, ans=0.125 +2024-08-03 21:21:10,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=234578.66666666666, ans=0.025 +2024-08-03 21:21:14,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=234615.33333333334, ans=0.0 +2024-08-03 21:21:14,908 INFO [train.py:1114] (3/4) Epoch 18, batch 2100, loss[loss=0.1797, simple_loss=0.2684, pruned_loss=0.0455, over 13546.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2709, pruned_loss=0.049, over 2638232.63 frames. ], batch size: 37, lr: 6.89e-03, grad_scale: 32.0 +2024-08-03 21:21:15,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=234615.33333333334, ans=0.125 +2024-08-03 21:21:24,388 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.56 vs. limit=15.0 +2024-08-03 21:21:34,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234688.66666666666, ans=0.1 +2024-08-03 21:21:34,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=234688.66666666666, ans=0.125 +2024-08-03 21:21:38,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234688.66666666666, ans=0.1 +2024-08-03 21:21:40,133 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.47 vs. limit=15.0 +2024-08-03 21:22:01,490 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.837e+01 1.081e+02 1.198e+02 1.435e+02 3.177e+02, threshold=2.396e+02, percent-clipped=1.0 +2024-08-03 21:22:02,384 INFO [train.py:1114] (3/4) Epoch 18, batch 2150, loss[loss=0.1859, simple_loss=0.264, pruned_loss=0.05385, over 13564.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2703, pruned_loss=0.04887, over 2647519.53 frames. ], batch size: 36, lr: 6.89e-03, grad_scale: 32.0 +2024-08-03 21:22:03,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234798.66666666666, ans=0.1 +2024-08-03 21:22:03,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=234798.66666666666, ans=0.025 +2024-08-03 21:22:28,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=234872.0, ans=0.125 +2024-08-03 21:22:36,437 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.34 vs. limit=15.0 +2024-08-03 21:22:53,644 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.51 vs. limit=15.0 +2024-08-03 21:22:54,821 INFO [train.py:1114] (3/4) Epoch 18, batch 2200, loss[loss=0.2013, simple_loss=0.2896, pruned_loss=0.05655, over 13375.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2703, pruned_loss=0.04884, over 2645091.82 frames. ], batch size: 39, lr: 6.89e-03, grad_scale: 16.0 +2024-08-03 21:23:10,770 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.81 vs. limit=15.0 +2024-08-03 21:23:18,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.85 vs. limit=15.0 +2024-08-03 21:23:23,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=235092.0, ans=0.0 +2024-08-03 21:23:26,064 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.27 vs. limit=6.0 +2024-08-03 21:23:26,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=235092.0, ans=0.1 +2024-08-03 21:23:26,930 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.63 vs. limit=12.0 +2024-08-03 21:23:40,428 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.106e+01 1.127e+02 1.422e+02 1.837e+02 2.804e+02, threshold=2.844e+02, percent-clipped=6.0 +2024-08-03 21:23:40,465 INFO [train.py:1114] (3/4) Epoch 18, batch 2250, loss[loss=0.1768, simple_loss=0.2749, pruned_loss=0.03938, over 13355.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2702, pruned_loss=0.04866, over 2643160.09 frames. ], batch size: 37, lr: 6.89e-03, grad_scale: 16.0 +2024-08-03 21:23:42,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.36 vs. limit=10.0 +2024-08-03 21:24:05,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=235238.66666666666, ans=0.025 +2024-08-03 21:24:16,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=235312.0, ans=0.125 +2024-08-03 21:24:25,615 INFO [train.py:1114] (3/4) Epoch 18, batch 2300, loss[loss=0.1471, simple_loss=0.2297, pruned_loss=0.03223, over 13581.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2689, pruned_loss=0.04837, over 2639754.47 frames. ], batch size: 33, lr: 6.88e-03, grad_scale: 16.0 +2024-08-03 21:24:35,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=235385.33333333334, ans=0.2 +2024-08-03 21:24:41,908 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.72 vs. limit=22.5 +2024-08-03 21:24:44,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=235422.0, ans=0.125 +2024-08-03 21:25:06,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=235495.33333333334, ans=0.0 +2024-08-03 21:25:06,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=235495.33333333334, ans=0.2 +2024-08-03 21:25:10,865 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.665e+01 1.085e+02 1.233e+02 1.641e+02 2.605e+02, threshold=2.466e+02, percent-clipped=0.0 +2024-08-03 21:25:10,902 INFO [train.py:1114] (3/4) Epoch 18, batch 2350, loss[loss=0.1724, simple_loss=0.265, pruned_loss=0.03984, over 13577.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2687, pruned_loss=0.04808, over 2642499.57 frames. ], batch size: 38, lr: 6.88e-03, grad_scale: 16.0 +2024-08-03 21:25:15,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.16 vs. limit=15.0 +2024-08-03 21:25:39,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=235642.0, ans=0.05 +2024-08-03 21:25:41,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=235642.0, ans=0.125 +2024-08-03 21:25:42,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235642.0, ans=0.1 +2024-08-03 21:25:45,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=235642.0, ans=0.125 +2024-08-03 21:25:46,596 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.12 vs. limit=10.0 +2024-08-03 21:25:56,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=235678.66666666666, ans=0.125 +2024-08-03 21:25:57,747 INFO [train.py:1114] (3/4) Epoch 18, batch 2400, loss[loss=0.1619, simple_loss=0.2508, pruned_loss=0.03653, over 13517.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2689, pruned_loss=0.04807, over 2643753.76 frames. ], batch size: 35, lr: 6.88e-03, grad_scale: 32.0 +2024-08-03 21:25:58,000 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:26:04,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=235715.33333333334, ans=0.025 +2024-08-03 21:26:28,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=235788.66666666666, ans=0.2 +2024-08-03 21:26:36,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=235825.33333333334, ans=0.125 +2024-08-03 21:26:47,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=235862.0, ans=0.0 +2024-08-03 21:26:48,968 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.162e+01 1.095e+02 1.213e+02 1.558e+02 2.561e+02, threshold=2.426e+02, percent-clipped=1.0 +2024-08-03 21:26:49,006 INFO [train.py:1114] (3/4) Epoch 18, batch 2450, loss[loss=0.2043, simple_loss=0.287, pruned_loss=0.06084, over 13349.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2701, pruned_loss=0.04864, over 2633391.01 frames. ], batch size: 37, lr: 6.88e-03, grad_scale: 32.0 +2024-08-03 21:27:03,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=235935.33333333334, ans=0.1 +2024-08-03 21:27:22,190 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.48 vs. limit=15.0 +2024-08-03 21:27:23,816 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.16 vs. limit=15.0 +2024-08-03 21:27:28,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=236045.33333333334, ans=0.025 +2024-08-03 21:27:30,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236045.33333333334, ans=0.1 +2024-08-03 21:27:34,609 INFO [train.py:1114] (3/4) Epoch 18, batch 2500, loss[loss=0.1841, simple_loss=0.2696, pruned_loss=0.04931, over 13395.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2701, pruned_loss=0.04861, over 2637522.29 frames. ], batch size: 39, lr: 6.87e-03, grad_scale: 32.0 +2024-08-03 21:27:55,071 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.70 vs. limit=22.5 +2024-08-03 21:28:18,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=236265.33333333334, ans=0.2 +2024-08-03 21:28:19,323 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.251e+01 1.154e+02 1.291e+02 1.813e+02 3.422e+02, threshold=2.583e+02, percent-clipped=8.0 +2024-08-03 21:28:19,360 INFO [train.py:1114] (3/4) Epoch 18, batch 2550, loss[loss=0.1697, simple_loss=0.2501, pruned_loss=0.04469, over 13555.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2701, pruned_loss=0.04858, over 2638602.69 frames. ], batch size: 31, lr: 6.87e-03, grad_scale: 32.0 +2024-08-03 21:28:23,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=236265.33333333334, ans=0.125 +2024-08-03 21:28:26,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=236265.33333333334, ans=0.0 +2024-08-03 21:28:31,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=236302.0, ans=0.0 +2024-08-03 21:28:50,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=236375.33333333334, ans=0.125 +2024-08-03 21:28:54,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=236412.0, ans=0.0 +2024-08-03 21:28:55,533 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.260e-02 +2024-08-03 21:29:01,769 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.26 vs. limit=15.0 +2024-08-03 21:29:03,096 INFO [train.py:1114] (3/4) Epoch 18, batch 2600, loss[loss=0.1814, simple_loss=0.2624, pruned_loss=0.05018, over 13546.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2706, pruned_loss=0.04892, over 2637670.02 frames. ], batch size: 36, lr: 6.87e-03, grad_scale: 16.0 +2024-08-03 21:29:08,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=236448.66666666666, ans=0.125 +2024-08-03 21:29:12,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=236485.33333333334, ans=0.125 +2024-08-03 21:29:20,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=236522.0, ans=0.025 +2024-08-03 21:29:22,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=236522.0, ans=0.1 +2024-08-03 21:29:30,109 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.61 vs. limit=10.0 +2024-08-03 21:29:37,910 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.91 vs. limit=15.0 +2024-08-03 21:29:43,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=236595.33333333334, ans=0.125 +2024-08-03 21:29:47,192 INFO [train.py:1114] (3/4) Epoch 18, batch 2650, loss[loss=0.1877, simple_loss=0.2767, pruned_loss=0.04937, over 13309.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2711, pruned_loss=0.04901, over 2640258.43 frames. ], batch size: 46, lr: 6.87e-03, grad_scale: 16.0 +2024-08-03 21:29:48,040 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.186e+01 1.172e+02 1.315e+02 1.569e+02 3.387e+02, threshold=2.631e+02, percent-clipped=2.0 +2024-08-03 21:29:55,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=236668.66666666666, ans=0.125 +2024-08-03 21:30:02,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=236668.66666666666, ans=0.125 +2024-08-03 21:30:04,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=236705.33333333334, ans=0.125 +2024-08-03 21:30:05,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=236705.33333333334, ans=0.0 +2024-08-03 21:30:15,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=236742.0, ans=0.125 +2024-08-03 21:30:30,355 INFO [train.py:1114] (3/4) Epoch 18, batch 2700, loss[loss=0.1653, simple_loss=0.2564, pruned_loss=0.03713, over 13545.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2714, pruned_loss=0.04901, over 2637786.47 frames. ], batch size: 40, lr: 6.86e-03, grad_scale: 16.0 +2024-08-03 21:30:33,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=236815.33333333334, ans=0.1 +2024-08-03 21:30:52,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=236888.66666666666, ans=0.025 +2024-08-03 21:31:13,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=236998.66666666666, ans=0.0 +2024-08-03 21:31:13,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=236998.66666666666, ans=0.125 +2024-08-03 21:31:13,964 INFO [train.py:1114] (3/4) Epoch 18, batch 2750, loss[loss=0.1825, simple_loss=0.2623, pruned_loss=0.05136, over 13336.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2708, pruned_loss=0.04889, over 2635656.19 frames. ], batch size: 34, lr: 6.86e-03, grad_scale: 16.0 +2024-08-03 21:31:14,729 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.742e+01 1.152e+02 1.311e+02 1.647e+02 2.709e+02, threshold=2.622e+02, percent-clipped=1.0 +2024-08-03 21:31:32,697 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.13 vs. limit=15.0 +2024-08-03 21:31:43,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten.whitening_limit, batch_count=237108.66666666666, ans=15.0 +2024-08-03 21:31:53,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=237145.33333333334, ans=0.05 +2024-08-03 21:31:57,332 INFO [train.py:1114] (3/4) Epoch 18, batch 2800, loss[loss=0.2133, simple_loss=0.2919, pruned_loss=0.06737, over 9343.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2715, pruned_loss=0.04933, over 2627042.03 frames. ], batch size: 96, lr: 6.86e-03, grad_scale: 16.0 +2024-08-03 21:32:02,013 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=3.94 vs. limit=12.0 +2024-08-03 21:32:16,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237255.33333333334, ans=0.1 +2024-08-03 21:32:20,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=237255.33333333334, ans=0.2 +2024-08-03 21:32:41,590 INFO [train.py:1114] (3/4) Epoch 18, batch 2850, loss[loss=0.1666, simple_loss=0.2547, pruned_loss=0.0392, over 13369.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2719, pruned_loss=0.0497, over 2620510.27 frames. ], batch size: 35, lr: 6.86e-03, grad_scale: 16.0 +2024-08-03 21:32:41,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=237365.33333333334, ans=0.125 +2024-08-03 21:32:41,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=237365.33333333334, ans=0.0 +2024-08-03 21:32:43,241 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.041e+01 1.131e+02 1.338e+02 1.690e+02 3.058e+02, threshold=2.676e+02, percent-clipped=5.0 +2024-08-03 21:32:49,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=237365.33333333334, ans=0.0 +2024-08-03 21:32:51,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.38 vs. limit=15.0 +2024-08-03 21:33:01,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=237438.66666666666, ans=0.0 +2024-08-03 21:33:20,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237512.0, ans=0.1 +2024-08-03 21:33:28,456 INFO [train.py:1114] (3/4) Epoch 18, batch 2900, loss[loss=0.1764, simple_loss=0.2641, pruned_loss=0.04429, over 13373.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2726, pruned_loss=0.04977, over 2631380.00 frames. ], batch size: 36, lr: 6.85e-03, grad_scale: 16.0 +2024-08-03 21:33:28,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=237548.66666666666, ans=0.125 +2024-08-03 21:33:43,834 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:33:50,089 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.83 vs. limit=15.0 +2024-08-03 21:34:06,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=237695.33333333334, ans=0.125 +2024-08-03 21:34:09,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=237695.33333333334, ans=0.1 +2024-08-03 21:34:10,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=237695.33333333334, ans=0.125 +2024-08-03 21:34:11,741 INFO [train.py:1114] (3/4) Epoch 18, batch 2950, loss[loss=0.1799, simple_loss=0.2595, pruned_loss=0.05015, over 13331.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2711, pruned_loss=0.04945, over 2629476.69 frames. ], batch size: 34, lr: 6.85e-03, grad_scale: 16.0 +2024-08-03 21:34:11,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=237732.0, ans=0.125 +2024-08-03 21:34:13,341 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.218e+01 1.166e+02 1.489e+02 1.763e+02 2.783e+02, threshold=2.978e+02, percent-clipped=2.0 +2024-08-03 21:34:24,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=237768.66666666666, ans=0.125 +2024-08-03 21:34:36,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237842.0, ans=0.1 +2024-08-03 21:34:39,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=237842.0, ans=0.025 +2024-08-03 21:34:49,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=237878.66666666666, ans=0.125 +2024-08-03 21:34:54,886 INFO [train.py:1114] (3/4) Epoch 18, batch 3000, loss[loss=0.1664, simple_loss=0.2621, pruned_loss=0.03537, over 13538.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2711, pruned_loss=0.04915, over 2630229.26 frames. ], batch size: 37, lr: 6.85e-03, grad_scale: 16.0 +2024-08-03 21:34:54,887 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 21:35:04,734 INFO [train.py:1146] (3/4) Epoch 18, validation: loss=0.1701, simple_loss=0.269, pruned_loss=0.03557, over 944034.00 frames. +2024-08-03 21:35:04,735 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 21:35:21,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=237988.66666666666, ans=0.025 +2024-08-03 21:35:29,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=237988.66666666666, ans=0.025 +2024-08-03 21:35:34,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238025.33333333334, ans=0.1 +2024-08-03 21:35:40,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=238062.0, ans=0.125 +2024-08-03 21:35:48,164 INFO [train.py:1114] (3/4) Epoch 18, batch 3050, loss[loss=0.1716, simple_loss=0.2548, pruned_loss=0.04424, over 13543.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2712, pruned_loss=0.04897, over 2626920.97 frames. ], batch size: 35, lr: 6.84e-03, grad_scale: 16.0 +2024-08-03 21:35:49,907 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.482e+01 1.049e+02 1.161e+02 1.346e+02 2.617e+02, threshold=2.322e+02, percent-clipped=0.0 +2024-08-03 21:36:15,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=238208.66666666666, ans=0.0 +2024-08-03 21:36:16,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238208.66666666666, ans=0.1 +2024-08-03 21:36:31,298 INFO [train.py:1114] (3/4) Epoch 18, batch 3100, loss[loss=0.2305, simple_loss=0.3122, pruned_loss=0.07441, over 13329.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2713, pruned_loss=0.049, over 2626882.05 frames. ], batch size: 46, lr: 6.84e-03, grad_scale: 16.0 +2024-08-03 21:36:53,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=238355.33333333334, ans=0.025 +2024-08-03 21:36:56,674 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.96 vs. limit=10.0 +2024-08-03 21:36:56,857 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.20 vs. limit=15.0 +2024-08-03 21:37:03,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=238392.0, ans=0.1 +2024-08-03 21:37:06,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=238428.66666666666, ans=0.5 +2024-08-03 21:37:13,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=238428.66666666666, ans=0.2 +2024-08-03 21:37:15,537 INFO [train.py:1114] (3/4) Epoch 18, batch 3150, loss[loss=0.1941, simple_loss=0.2809, pruned_loss=0.05366, over 13324.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2717, pruned_loss=0.04906, over 2628728.78 frames. ], batch size: 49, lr: 6.84e-03, grad_scale: 16.0 +2024-08-03 21:37:17,262 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.157e+01 1.121e+02 1.376e+02 1.775e+02 3.223e+02, threshold=2.752e+02, percent-clipped=7.0 +2024-08-03 21:37:19,663 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.84 vs. limit=15.0 +2024-08-03 21:37:30,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=238502.0, ans=0.0 +2024-08-03 21:37:36,778 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.01 vs. limit=10.0 +2024-08-03 21:37:46,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=238575.33333333334, ans=0.0 +2024-08-03 21:37:57,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=238612.0, ans=0.0 +2024-08-03 21:37:58,751 INFO [train.py:1114] (3/4) Epoch 18, batch 3200, loss[loss=0.16, simple_loss=0.2502, pruned_loss=0.03487, over 13545.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.271, pruned_loss=0.04902, over 2634901.08 frames. ], batch size: 37, lr: 6.84e-03, grad_scale: 32.0 +2024-08-03 21:37:59,195 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.09 vs. limit=10.0 +2024-08-03 21:37:59,416 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.38 vs. limit=15.0 +2024-08-03 21:38:22,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=238722.0, ans=0.2 +2024-08-03 21:38:24,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=238758.66666666666, ans=0.2 +2024-08-03 21:38:40,483 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.51 vs. limit=15.0 +2024-08-03 21:38:43,450 INFO [train.py:1114] (3/4) Epoch 18, batch 3250, loss[loss=0.1887, simple_loss=0.2804, pruned_loss=0.04854, over 13377.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2717, pruned_loss=0.04922, over 2638992.12 frames. ], batch size: 38, lr: 6.83e-03, grad_scale: 32.0 +2024-08-03 21:38:45,177 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.730e+01 1.129e+02 1.271e+02 1.537e+02 2.545e+02, threshold=2.542e+02, percent-clipped=0.0 +2024-08-03 21:39:14,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=238942.0, ans=0.125 +2024-08-03 21:39:21,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=238978.66666666666, ans=0.125 +2024-08-03 21:39:27,933 INFO [train.py:1114] (3/4) Epoch 18, batch 3300, loss[loss=0.2147, simple_loss=0.3053, pruned_loss=0.06209, over 12863.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2706, pruned_loss=0.04899, over 2639978.06 frames. ], batch size: 52, lr: 6.83e-03, grad_scale: 32.0 +2024-08-03 21:39:37,951 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.99 vs. limit=22.5 +2024-08-03 21:39:44,025 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.12 vs. limit=15.0 +2024-08-03 21:39:47,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=239088.66666666666, ans=0.0 +2024-08-03 21:39:52,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=239088.66666666666, ans=0.0 +2024-08-03 21:40:10,669 INFO [train.py:1114] (3/4) Epoch 18, batch 3350, loss[loss=0.1812, simple_loss=0.2676, pruned_loss=0.0474, over 12982.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2713, pruned_loss=0.04949, over 2629529.90 frames. ], batch size: 48, lr: 6.83e-03, grad_scale: 32.0 +2024-08-03 21:40:12,337 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.013e+01 1.097e+02 1.292e+02 1.574e+02 2.403e+02, threshold=2.585e+02, percent-clipped=0.0 +2024-08-03 21:40:33,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=239272.0, ans=0.125 +2024-08-03 21:40:38,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=239308.66666666666, ans=0.05 +2024-08-03 21:40:39,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=239308.66666666666, ans=0.125 +2024-08-03 21:40:40,098 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.92 vs. limit=12.0 +2024-08-03 21:40:49,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=239345.33333333334, ans=0.125 +2024-08-03 21:40:53,383 INFO [train.py:1114] (3/4) Epoch 18, batch 3400, loss[loss=0.189, simple_loss=0.2601, pruned_loss=0.05897, over 13517.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2707, pruned_loss=0.04935, over 2624311.77 frames. ], batch size: 31, lr: 6.83e-03, grad_scale: 16.0 +2024-08-03 21:40:57,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=239382.0, ans=0.125 +2024-08-03 21:41:02,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=239418.66666666666, ans=0.125 +2024-08-03 21:41:15,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=239455.33333333334, ans=0.125 +2024-08-03 21:41:19,942 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:41:31,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=239528.66666666666, ans=0.5 +2024-08-03 21:41:32,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=239528.66666666666, ans=0.0 +2024-08-03 21:41:36,157 INFO [train.py:1114] (3/4) Epoch 18, batch 3450, loss[loss=0.2021, simple_loss=0.2899, pruned_loss=0.05717, over 12901.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2716, pruned_loss=0.04947, over 2627907.14 frames. ], batch size: 52, lr: 6.82e-03, grad_scale: 16.0 +2024-08-03 21:41:36,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=239565.33333333334, ans=0.125 +2024-08-03 21:41:37,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239565.33333333334, ans=0.1 +2024-08-03 21:41:38,583 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.763e+01 1.067e+02 1.315e+02 1.546e+02 2.791e+02, threshold=2.630e+02, percent-clipped=1.0 +2024-08-03 21:41:42,588 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.47 vs. limit=15.0 +2024-08-03 21:41:56,026 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.85 vs. limit=22.5 +2024-08-03 21:41:56,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=239638.66666666666, ans=0.1 +2024-08-03 21:41:57,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=239638.66666666666, ans=0.125 +2024-08-03 21:42:05,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239675.33333333334, ans=0.125 +2024-08-03 21:42:11,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=239712.0, ans=0.125 +2024-08-03 21:42:18,552 INFO [train.py:1114] (3/4) Epoch 18, batch 3500, loss[loss=0.1731, simple_loss=0.2639, pruned_loss=0.0412, over 13523.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2708, pruned_loss=0.0495, over 2630473.30 frames. ], batch size: 34, lr: 6.82e-03, grad_scale: 16.0 +2024-08-03 21:42:20,867 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.00 vs. limit=15.0 +2024-08-03 21:42:23,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=239748.66666666666, ans=0.125 +2024-08-03 21:42:28,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=239785.33333333334, ans=0.0 +2024-08-03 21:42:37,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=239822.0, ans=0.125 +2024-08-03 21:42:44,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=239858.66666666666, ans=0.125 +2024-08-03 21:42:46,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=239858.66666666666, ans=0.125 +2024-08-03 21:42:48,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239858.66666666666, ans=0.125 +2024-08-03 21:42:53,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=239895.33333333334, ans=0.125 +2024-08-03 21:42:55,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=239895.33333333334, ans=0.0 +2024-08-03 21:43:00,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=239895.33333333334, ans=0.025 +2024-08-03 21:43:02,405 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.56 vs. limit=15.0 +2024-08-03 21:43:02,818 INFO [train.py:1114] (3/4) Epoch 18, batch 3550, loss[loss=0.1761, simple_loss=0.2671, pruned_loss=0.04259, over 12466.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2731, pruned_loss=0.05037, over 2628497.77 frames. ], batch size: 58, lr: 6.82e-03, grad_scale: 16.0 +2024-08-03 21:43:05,285 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.515e+01 1.163e+02 1.331e+02 1.591e+02 2.731e+02, threshold=2.663e+02, percent-clipped=1.0 +2024-08-03 21:43:05,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=239932.0, ans=0.125 +2024-08-03 21:43:08,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.19 vs. limit=15.0 +2024-08-03 21:43:18,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=239968.66666666666, ans=0.125 +2024-08-03 21:43:20,645 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.75 vs. limit=15.0 +2024-08-03 21:43:24,298 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.67 vs. limit=15.0 +2024-08-03 21:43:32,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=240042.0, ans=0.125 +2024-08-03 21:43:47,782 INFO [train.py:1114] (3/4) Epoch 18, batch 3600, loss[loss=0.2235, simple_loss=0.2977, pruned_loss=0.0747, over 9193.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2772, pruned_loss=0.05392, over 2486078.77 frames. ], batch size: 97, lr: 6.82e-03, grad_scale: 32.0 +2024-08-03 21:43:51,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=240115.33333333334, ans=0.0 +2024-08-03 21:43:57,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240152.0, ans=0.1 +2024-08-03 21:44:05,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=240188.66666666666, ans=0.2 +2024-08-03 21:44:07,285 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.66 vs. limit=6.0 +2024-08-03 21:44:18,645 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.94 vs. limit=15.0 +2024-08-03 21:45:02,648 INFO [train.py:1114] (3/4) Epoch 19, batch 0, loss[loss=0.1635, simple_loss=0.2525, pruned_loss=0.0372, over 13343.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.2525, pruned_loss=0.0372, over 13343.00 frames. ], batch size: 33, lr: 6.63e-03, grad_scale: 32.0 +2024-08-03 21:45:02,648 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 21:45:08,304 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.1335, 3.1625, 3.0291, 1.8909], device='cuda:3') +2024-08-03 21:45:13,117 INFO [train.py:1146] (3/4) Epoch 19, validation: loss=0.1699, simple_loss=0.2705, pruned_loss=0.03462, over 944034.00 frames. +2024-08-03 21:45:13,175 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 21:45:19,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=240262.0, ans=0.025 +2024-08-03 21:45:26,645 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.737e+01 1.210e+02 1.300e+02 1.388e+02 2.591e+02, threshold=2.600e+02, percent-clipped=0.0 +2024-08-03 21:45:27,928 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.38 vs. limit=15.0 +2024-08-03 21:45:40,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=240335.33333333334, ans=0.0 +2024-08-03 21:46:01,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=240408.66666666666, ans=0.125 +2024-08-03 21:46:05,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=240445.33333333334, ans=0.125 +2024-08-03 21:46:05,633 INFO [train.py:1114] (3/4) Epoch 19, batch 50, loss[loss=0.1672, simple_loss=0.2469, pruned_loss=0.04377, over 13406.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2738, pruned_loss=0.05061, over 579097.86 frames. ], batch size: 32, lr: 6.63e-03, grad_scale: 32.0 +2024-08-03 21:46:07,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=240445.33333333334, ans=0.125 +2024-08-03 21:46:09,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=240445.33333333334, ans=0.125 +2024-08-03 21:46:11,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=240445.33333333334, ans=0.2 +2024-08-03 21:46:13,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=240482.0, ans=0.1 +2024-08-03 21:46:36,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=240555.33333333334, ans=0.09899494936611666 +2024-08-03 21:46:45,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240592.0, ans=0.1 +2024-08-03 21:46:46,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=240592.0, ans=0.125 +2024-08-03 21:46:53,415 INFO [train.py:1114] (3/4) Epoch 19, batch 100, loss[loss=0.1804, simple_loss=0.2654, pruned_loss=0.04773, over 13524.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2744, pruned_loss=0.0501, over 1026533.37 frames. ], batch size: 35, lr: 6.62e-03, grad_scale: 32.0 +2024-08-03 21:46:57,333 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:47:01,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=240665.33333333334, ans=0.0 +2024-08-03 21:47:05,122 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.610e+01 1.117e+02 1.255e+02 1.420e+02 2.602e+02, threshold=2.511e+02, percent-clipped=1.0 +2024-08-03 21:47:09,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=240665.33333333334, ans=0.2 +2024-08-03 21:47:21,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=240738.66666666666, ans=0.1 +2024-08-03 21:47:21,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=240738.66666666666, ans=0.1 +2024-08-03 21:47:36,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=240775.33333333334, ans=0.125 +2024-08-03 21:47:38,536 INFO [train.py:1114] (3/4) Epoch 19, batch 150, loss[loss=0.1696, simple_loss=0.2563, pruned_loss=0.04144, over 13432.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2714, pruned_loss=0.04889, over 1387678.91 frames. ], batch size: 32, lr: 6.62e-03, grad_scale: 32.0 +2024-08-03 21:47:41,566 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.39 vs. limit=15.0 +2024-08-03 21:47:56,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=240848.66666666666, ans=0.035 +2024-08-03 21:48:06,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=240922.0, ans=0.125 +2024-08-03 21:48:09,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=240922.0, ans=0.2 +2024-08-03 21:48:14,644 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.23 vs. limit=22.5 +2024-08-03 21:48:17,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=240958.66666666666, ans=0.0 +2024-08-03 21:48:19,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=240958.66666666666, ans=0.0 +2024-08-03 21:48:23,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=240958.66666666666, ans=0.125 +2024-08-03 21:48:24,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=240958.66666666666, ans=0.0 +2024-08-03 21:48:26,809 INFO [train.py:1114] (3/4) Epoch 19, batch 200, loss[loss=0.1882, simple_loss=0.2788, pruned_loss=0.04882, over 12475.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2691, pruned_loss=0.04733, over 1665555.59 frames. ], batch size: 58, lr: 6.62e-03, grad_scale: 32.0 +2024-08-03 21:48:29,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=240995.33333333334, ans=0.125 +2024-08-03 21:48:31,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.82 vs. limit=6.0 +2024-08-03 21:48:38,532 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.918e+01 1.063e+02 1.213e+02 1.459e+02 3.041e+02, threshold=2.427e+02, percent-clipped=0.0 +2024-08-03 21:48:43,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241032.0, ans=0.1 +2024-08-03 21:48:49,912 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.40 vs. limit=22.5 +2024-08-03 21:49:09,198 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:49:11,663 INFO [train.py:1114] (3/4) Epoch 19, batch 250, loss[loss=0.2185, simple_loss=0.3078, pruned_loss=0.06464, over 13319.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2699, pruned_loss=0.04808, over 1884733.78 frames. ], batch size: 46, lr: 6.62e-03, grad_scale: 32.0 +2024-08-03 21:49:18,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=241178.66666666666, ans=0.2 +2024-08-03 21:49:41,877 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.02 vs. limit=22.5 +2024-08-03 21:49:42,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=241288.66666666666, ans=0.1 +2024-08-03 21:49:48,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=241288.66666666666, ans=0.125 +2024-08-03 21:49:56,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=241325.33333333334, ans=0.025 +2024-08-03 21:49:58,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=241362.0, ans=0.125 +2024-08-03 21:49:59,229 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.56 vs. limit=15.0 +2024-08-03 21:49:59,656 INFO [train.py:1114] (3/4) Epoch 19, batch 300, loss[loss=0.1837, simple_loss=0.2749, pruned_loss=0.04621, over 13438.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2695, pruned_loss=0.0481, over 2051973.15 frames. ], batch size: 42, lr: 6.61e-03, grad_scale: 32.0 +2024-08-03 21:50:04,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=241362.0, ans=0.1 +2024-08-03 21:50:09,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=241398.66666666666, ans=0.125 +2024-08-03 21:50:13,615 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.068e+01 1.095e+02 1.222e+02 1.449e+02 2.776e+02, threshold=2.445e+02, percent-clipped=4.0 +2024-08-03 21:50:14,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=241398.66666666666, ans=0.0 +2024-08-03 21:50:35,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=241472.0, ans=0.125 +2024-08-03 21:50:52,429 INFO [train.py:1114] (3/4) Epoch 19, batch 350, loss[loss=0.1701, simple_loss=0.2517, pruned_loss=0.04421, over 13587.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.27, pruned_loss=0.04828, over 2182892.77 frames. ], batch size: 33, lr: 6.61e-03, grad_scale: 32.0 +2024-08-03 21:50:53,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=241545.33333333334, ans=0.125 +2024-08-03 21:51:05,830 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.58 vs. limit=22.5 +2024-08-03 21:51:08,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=241582.0, ans=0.0 +2024-08-03 21:51:12,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=241618.66666666666, ans=0.0 +2024-08-03 21:51:16,701 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.70 vs. limit=22.5 +2024-08-03 21:51:20,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=241655.33333333334, ans=0.2 +2024-08-03 21:51:20,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=241655.33333333334, ans=0.0 +2024-08-03 21:51:22,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=241655.33333333334, ans=0.125 +2024-08-03 21:51:29,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=241692.0, ans=0.0 +2024-08-03 21:51:37,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=241692.0, ans=0.125 +2024-08-03 21:51:40,085 INFO [train.py:1114] (3/4) Epoch 19, batch 400, loss[loss=0.1877, simple_loss=0.2734, pruned_loss=0.05101, over 13342.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2698, pruned_loss=0.04819, over 2286211.21 frames. ], batch size: 37, lr: 6.61e-03, grad_scale: 32.0 +2024-08-03 21:51:43,281 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.78 vs. limit=15.0 +2024-08-03 21:51:48,061 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.43 vs. limit=22.5 +2024-08-03 21:51:52,140 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.902e+01 1.083e+02 1.174e+02 1.521e+02 2.282e+02, threshold=2.347e+02, percent-clipped=0.0 +2024-08-03 21:51:55,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241765.33333333334, ans=0.1 +2024-08-03 21:52:04,024 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.64 vs. limit=22.5 +2024-08-03 21:52:06,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=241802.0, ans=0.125 +2024-08-03 21:52:12,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=241838.66666666666, ans=0.0 +2024-08-03 21:52:28,384 INFO [train.py:1114] (3/4) Epoch 19, batch 450, loss[loss=0.1755, simple_loss=0.2683, pruned_loss=0.04131, over 13561.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2704, pruned_loss=0.04864, over 2359694.96 frames. ], batch size: 38, lr: 6.61e-03, grad_scale: 32.0 +2024-08-03 21:52:34,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=241912.0, ans=0.2 +2024-08-03 21:52:35,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=241912.0, ans=0.05 +2024-08-03 21:52:36,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=241948.66666666666, ans=0.125 +2024-08-03 21:52:38,650 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:52:42,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=241948.66666666666, ans=0.0 +2024-08-03 21:52:43,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=241948.66666666666, ans=0.125 +2024-08-03 21:53:15,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=242095.33333333334, ans=0.125 +2024-08-03 21:53:16,366 INFO [train.py:1114] (3/4) Epoch 19, batch 500, loss[loss=0.2266, simple_loss=0.3095, pruned_loss=0.07189, over 13411.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.27, pruned_loss=0.04858, over 2425690.93 frames. ], batch size: 43, lr: 6.60e-03, grad_scale: 32.0 +2024-08-03 21:53:19,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=242095.33333333334, ans=0.0 +2024-08-03 21:53:24,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=242132.0, ans=0.125 +2024-08-03 21:53:25,176 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.34 vs. limit=15.0 +2024-08-03 21:53:29,118 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.780e+01 1.159e+02 1.379e+02 1.825e+02 3.055e+02, threshold=2.757e+02, percent-clipped=7.0 +2024-08-03 21:53:29,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242132.0, ans=0.1 +2024-08-03 21:53:32,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=242132.0, ans=0.125 +2024-08-03 21:53:46,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=242205.33333333334, ans=0.2 +2024-08-03 21:54:03,951 INFO [train.py:1114] (3/4) Epoch 19, batch 550, loss[loss=0.219, simple_loss=0.3099, pruned_loss=0.06408, over 13013.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2702, pruned_loss=0.04849, over 2468495.28 frames. ], batch size: 48, lr: 6.60e-03, grad_scale: 16.0 +2024-08-03 21:54:04,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242278.66666666666, ans=0.1 +2024-08-03 21:54:14,037 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.74 vs. limit=22.5 +2024-08-03 21:54:18,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=242315.33333333334, ans=0.125 +2024-08-03 21:54:22,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242352.0, ans=0.1 +2024-08-03 21:54:24,148 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.14 vs. limit=10.0 +2024-08-03 21:54:31,202 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.28 vs. limit=15.0 +2024-08-03 21:54:38,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=242388.66666666666, ans=0.0 +2024-08-03 21:54:41,970 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.16 vs. limit=12.0 +2024-08-03 21:54:45,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=242425.33333333334, ans=0.125 +2024-08-03 21:54:48,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=242425.33333333334, ans=10.0 +2024-08-03 21:54:51,044 INFO [train.py:1114] (3/4) Epoch 19, batch 600, loss[loss=0.1815, simple_loss=0.2685, pruned_loss=0.04725, over 13348.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2702, pruned_loss=0.04853, over 2508491.81 frames. ], batch size: 46, lr: 6.60e-03, grad_scale: 16.0 +2024-08-03 21:54:52,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=242462.0, ans=0.125 +2024-08-03 21:54:53,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242462.0, ans=0.1 +2024-08-03 21:55:03,305 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.902e+01 1.087e+02 1.240e+02 1.431e+02 2.352e+02, threshold=2.480e+02, percent-clipped=0.0 +2024-08-03 21:55:19,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=242572.0, ans=0.125 +2024-08-03 21:55:22,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=242572.0, ans=0.95 +2024-08-03 21:55:23,255 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.45 vs. limit=15.0 +2024-08-03 21:55:30,060 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 21:55:32,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=242608.66666666666, ans=0.125 +2024-08-03 21:55:37,872 INFO [train.py:1114] (3/4) Epoch 19, batch 650, loss[loss=0.1768, simple_loss=0.2731, pruned_loss=0.04024, over 13524.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2694, pruned_loss=0.04818, over 2543817.35 frames. ], batch size: 37, lr: 6.60e-03, grad_scale: 16.0 +2024-08-03 21:56:00,137 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.85 vs. limit=22.5 +2024-08-03 21:56:03,241 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.60 vs. limit=12.0 +2024-08-03 21:56:03,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=242718.66666666666, ans=0.0 +2024-08-03 21:56:05,874 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.00 vs. limit=12.0 +2024-08-03 21:56:13,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242755.33333333334, ans=0.1 +2024-08-03 21:56:19,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=242792.0, ans=0.125 +2024-08-03 21:56:25,346 INFO [train.py:1114] (3/4) Epoch 19, batch 700, loss[loss=0.1577, simple_loss=0.2444, pruned_loss=0.03552, over 13524.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2698, pruned_loss=0.04816, over 2565551.24 frames. ], batch size: 35, lr: 6.59e-03, grad_scale: 16.0 +2024-08-03 21:56:38,425 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.287e+01 1.160e+02 1.383e+02 1.887e+02 3.094e+02, threshold=2.766e+02, percent-clipped=5.0 +2024-08-03 21:57:01,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=242938.66666666666, ans=0.025 +2024-08-03 21:57:09,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=242975.33333333334, ans=0.125 +2024-08-03 21:57:12,916 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.23 vs. limit=6.0 +2024-08-03 21:57:13,134 INFO [train.py:1114] (3/4) Epoch 19, batch 750, loss[loss=0.1821, simple_loss=0.2819, pruned_loss=0.04116, over 13365.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2689, pruned_loss=0.04761, over 2581585.39 frames. ], batch size: 37, lr: 6.59e-03, grad_scale: 16.0 +2024-08-03 21:57:39,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=243122.0, ans=0.0 +2024-08-03 21:58:00,336 INFO [train.py:1114] (3/4) Epoch 19, batch 800, loss[loss=0.1595, simple_loss=0.2424, pruned_loss=0.03826, over 13351.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2691, pruned_loss=0.04769, over 2596561.65 frames. ], batch size: 33, lr: 6.59e-03, grad_scale: 32.0 +2024-08-03 21:58:08,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=243232.0, ans=0.05 +2024-08-03 21:58:10,410 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.43 vs. limit=15.0 +2024-08-03 21:58:12,638 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.835e+01 1.100e+02 1.252e+02 1.465e+02 2.313e+02, threshold=2.504e+02, percent-clipped=0.0 +2024-08-03 21:58:30,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=243305.33333333334, ans=0.0 +2024-08-03 21:58:47,350 INFO [train.py:1114] (3/4) Epoch 19, batch 850, loss[loss=0.1969, simple_loss=0.2856, pruned_loss=0.05409, over 13324.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.269, pruned_loss=0.0478, over 2609306.26 frames. ], batch size: 40, lr: 6.59e-03, grad_scale: 32.0 +2024-08-03 21:58:47,779 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.74 vs. limit=6.0 +2024-08-03 21:59:02,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=243415.33333333334, ans=0.0 +2024-08-03 21:59:07,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=243452.0, ans=0.0 +2024-08-03 21:59:10,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=243452.0, ans=0.125 +2024-08-03 21:59:20,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=243488.66666666666, ans=0.125 +2024-08-03 21:59:20,464 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=4.96 vs. limit=15.0 +2024-08-03 21:59:24,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=243488.66666666666, ans=0.0 +2024-08-03 21:59:28,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=243525.33333333334, ans=0.125 +2024-08-03 21:59:29,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=243525.33333333334, ans=0.1 +2024-08-03 21:59:34,606 INFO [train.py:1114] (3/4) Epoch 19, batch 900, loss[loss=0.1787, simple_loss=0.2531, pruned_loss=0.05215, over 13339.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2692, pruned_loss=0.04799, over 2612071.05 frames. ], batch size: 33, lr: 6.59e-03, grad_scale: 16.0 +2024-08-03 21:59:48,155 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.299e+01 1.094e+02 1.385e+02 1.717e+02 2.818e+02, threshold=2.769e+02, percent-clipped=4.0 +2024-08-03 22:00:00,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=243635.33333333334, ans=0.0 +2024-08-03 22:00:02,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243672.0, ans=0.1 +2024-08-03 22:00:02,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243672.0, ans=0.1 +2024-08-03 22:00:08,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=243672.0, ans=0.0 +2024-08-03 22:00:11,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=243708.66666666666, ans=0.125 +2024-08-03 22:00:22,265 INFO [train.py:1114] (3/4) Epoch 19, batch 950, loss[loss=0.1639, simple_loss=0.2511, pruned_loss=0.03834, over 13545.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2698, pruned_loss=0.04821, over 2611839.12 frames. ], batch size: 34, lr: 6.58e-03, grad_scale: 16.0 +2024-08-03 22:00:22,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=243745.33333333334, ans=0.1 +2024-08-03 22:00:25,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=243745.33333333334, ans=0.125 +2024-08-03 22:00:30,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=243782.0, ans=0.125 +2024-08-03 22:00:34,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243782.0, ans=0.1 +2024-08-03 22:00:38,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=243782.0, ans=0.125 +2024-08-03 22:01:00,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=243892.0, ans=0.04949747468305833 +2024-08-03 22:01:01,892 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.74 vs. limit=22.5 +2024-08-03 22:01:05,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=243892.0, ans=0.2 +2024-08-03 22:01:07,671 INFO [train.py:1114] (3/4) Epoch 19, batch 1000, loss[loss=0.1836, simple_loss=0.2725, pruned_loss=0.04737, over 13366.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.271, pruned_loss=0.04878, over 2610847.06 frames. ], batch size: 35, lr: 6.58e-03, grad_scale: 16.0 +2024-08-03 22:01:23,459 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.931e+01 1.118e+02 1.271e+02 1.540e+02 2.481e+02, threshold=2.543e+02, percent-clipped=0.0 +2024-08-03 22:01:28,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=244002.0, ans=0.0 +2024-08-03 22:01:35,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=244002.0, ans=0.125 +2024-08-03 22:01:40,224 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.86 vs. limit=15.0 +2024-08-03 22:01:50,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=244075.33333333334, ans=0.0 +2024-08-03 22:01:51,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244075.33333333334, ans=0.1 +2024-08-03 22:01:55,436 INFO [train.py:1114] (3/4) Epoch 19, batch 1050, loss[loss=0.1945, simple_loss=0.28, pruned_loss=0.05449, over 13583.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2702, pruned_loss=0.04856, over 2614553.28 frames. ], batch size: 39, lr: 6.58e-03, grad_scale: 16.0 +2024-08-03 22:01:55,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=244112.0, ans=0.1 +2024-08-03 22:02:01,381 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.21 vs. limit=15.0 +2024-08-03 22:02:28,714 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=7.02 vs. limit=15.0 +2024-08-03 22:02:42,814 INFO [train.py:1114] (3/4) Epoch 19, batch 1100, loss[loss=0.198, simple_loss=0.283, pruned_loss=0.05647, over 13558.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2701, pruned_loss=0.04853, over 2618844.89 frames. ], batch size: 36, lr: 6.58e-03, grad_scale: 16.0 +2024-08-03 22:02:51,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=244295.33333333334, ans=0.0 +2024-08-03 22:02:58,056 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.403e+01 1.088e+02 1.287e+02 1.607e+02 2.579e+02, threshold=2.574e+02, percent-clipped=1.0 +2024-08-03 22:03:08,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=244368.66666666666, ans=0.125 +2024-08-03 22:03:20,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=244442.0, ans=0.125 +2024-08-03 22:03:26,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=244442.0, ans=0.2 +2024-08-03 22:03:29,661 INFO [train.py:1114] (3/4) Epoch 19, batch 1150, loss[loss=0.1855, simple_loss=0.269, pruned_loss=0.05097, over 13558.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.27, pruned_loss=0.04824, over 2617968.13 frames. ], batch size: 36, lr: 6.57e-03, grad_scale: 16.0 +2024-08-03 22:03:33,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=244478.66666666666, ans=0.125 +2024-08-03 22:03:35,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=244478.66666666666, ans=0.2 +2024-08-03 22:03:43,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=244515.33333333334, ans=0.0 +2024-08-03 22:03:59,173 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:04:05,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=244588.66666666666, ans=0.125 +2024-08-03 22:04:12,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=244625.33333333334, ans=0.125 +2024-08-03 22:04:17,210 INFO [train.py:1114] (3/4) Epoch 19, batch 1200, loss[loss=0.193, simple_loss=0.2797, pruned_loss=0.05315, over 13576.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2701, pruned_loss=0.048, over 2615367.62 frames. ], batch size: 39, lr: 6.57e-03, grad_scale: 32.0 +2024-08-03 22:04:18,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=244662.0, ans=0.2 +2024-08-03 22:04:30,756 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.729e+01 1.075e+02 1.205e+02 1.408e+02 2.455e+02, threshold=2.410e+02, percent-clipped=0.0 +2024-08-03 22:04:34,206 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.96 vs. limit=8.0 +2024-08-03 22:05:01,176 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.14 vs. limit=15.0 +2024-08-03 22:05:04,355 INFO [train.py:1114] (3/4) Epoch 19, batch 1250, loss[loss=0.1982, simple_loss=0.2823, pruned_loss=0.05708, over 13429.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2702, pruned_loss=0.04794, over 2627333.56 frames. ], batch size: 42, lr: 6.57e-03, grad_scale: 32.0 +2024-08-03 22:05:26,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=244918.66666666666, ans=0.125 +2024-08-03 22:05:31,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=244955.33333333334, ans=0.125 +2024-08-03 22:05:36,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=244955.33333333334, ans=0.125 +2024-08-03 22:05:39,855 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.27 vs. limit=15.0 +2024-08-03 22:05:40,563 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.95 vs. limit=15.0 +2024-08-03 22:05:52,082 INFO [train.py:1114] (3/4) Epoch 19, batch 1300, loss[loss=0.2151, simple_loss=0.2954, pruned_loss=0.06738, over 12885.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2698, pruned_loss=0.04789, over 2630465.21 frames. ], batch size: 52, lr: 6.57e-03, grad_scale: 16.0 +2024-08-03 22:05:54,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=245028.66666666666, ans=0.2 +2024-08-03 22:06:07,834 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.978e+01 1.097e+02 1.270e+02 1.535e+02 2.662e+02, threshold=2.541e+02, percent-clipped=5.0 +2024-08-03 22:06:10,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245102.0, ans=0.1 +2024-08-03 22:06:21,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=245138.66666666666, ans=0.125 +2024-08-03 22:06:28,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=245138.66666666666, ans=0.2 +2024-08-03 22:06:32,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.90 vs. limit=6.0 +2024-08-03 22:06:35,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=245175.33333333334, ans=0.0 +2024-08-03 22:06:38,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245175.33333333334, ans=0.1 +2024-08-03 22:06:40,450 INFO [train.py:1114] (3/4) Epoch 19, batch 1350, loss[loss=0.1826, simple_loss=0.2688, pruned_loss=0.04819, over 13534.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2693, pruned_loss=0.04761, over 2638142.44 frames. ], batch size: 37, lr: 6.56e-03, grad_scale: 16.0 +2024-08-03 22:07:16,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=245358.66666666666, ans=0.09899494936611666 +2024-08-03 22:07:26,183 INFO [train.py:1114] (3/4) Epoch 19, batch 1400, loss[loss=0.1731, simple_loss=0.2469, pruned_loss=0.04972, over 13252.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2692, pruned_loss=0.04762, over 2642274.76 frames. ], batch size: 31, lr: 6.56e-03, grad_scale: 8.0 +2024-08-03 22:07:39,880 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:07:44,089 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.615e+01 1.110e+02 1.328e+02 1.668e+02 3.835e+02, threshold=2.657e+02, percent-clipped=2.0 +2024-08-03 22:07:46,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=245468.66666666666, ans=0.125 +2024-08-03 22:07:52,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=245468.66666666666, ans=0.0 +2024-08-03 22:07:57,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=245505.33333333334, ans=0.0 +2024-08-03 22:08:11,350 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=6.36 vs. limit=12.0 +2024-08-03 22:08:13,722 INFO [train.py:1114] (3/4) Epoch 19, batch 1450, loss[loss=0.1809, simple_loss=0.2665, pruned_loss=0.04761, over 13426.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2699, pruned_loss=0.0481, over 2641275.38 frames. ], batch size: 43, lr: 6.56e-03, grad_scale: 8.0 +2024-08-03 22:08:15,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=245578.66666666666, ans=0.125 +2024-08-03 22:08:22,049 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.88 vs. limit=15.0 +2024-08-03 22:08:31,508 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.09 vs. limit=10.0 +2024-08-03 22:08:38,917 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.88 vs. limit=22.5 +2024-08-03 22:08:40,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=245652.0, ans=0.125 +2024-08-03 22:08:42,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=245688.66666666666, ans=0.2 +2024-08-03 22:08:48,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=245688.66666666666, ans=0.125 +2024-08-03 22:08:53,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=245725.33333333334, ans=0.05 +2024-08-03 22:08:53,949 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:08:56,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=245725.33333333334, ans=0.0 +2024-08-03 22:09:01,148 INFO [train.py:1114] (3/4) Epoch 19, batch 1500, loss[loss=0.1921, simple_loss=0.2883, pruned_loss=0.048, over 13408.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2694, pruned_loss=0.04747, over 2640762.25 frames. ], batch size: 39, lr: 6.56e-03, grad_scale: 8.0 +2024-08-03 22:09:03,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=245762.0, ans=0.0 +2024-08-03 22:09:10,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=245798.66666666666, ans=0.0 +2024-08-03 22:09:16,759 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.464e+01 1.116e+02 1.262e+02 1.580e+02 2.631e+02, threshold=2.524e+02, percent-clipped=0.0 +2024-08-03 22:09:29,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=245835.33333333334, ans=0.0 +2024-08-03 22:09:32,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=245872.0, ans=0.1 +2024-08-03 22:09:44,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=245908.66666666666, ans=0.125 +2024-08-03 22:09:49,003 INFO [train.py:1114] (3/4) Epoch 19, batch 1550, loss[loss=0.203, simple_loss=0.2943, pruned_loss=0.05589, over 13414.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2695, pruned_loss=0.04775, over 2630951.44 frames. ], batch size: 41, lr: 6.55e-03, grad_scale: 8.0 +2024-08-03 22:09:49,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=245945.33333333334, ans=0.09899494936611666 +2024-08-03 22:09:53,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=245945.33333333334, ans=0.5 +2024-08-03 22:09:53,976 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.30 vs. limit=15.0 +2024-08-03 22:09:57,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=245982.0, ans=0.025 +2024-08-03 22:10:04,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=245982.0, ans=0.0 +2024-08-03 22:10:06,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=245982.0, ans=0.0 +2024-08-03 22:10:14,659 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-08-03 22:10:19,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=246055.33333333334, ans=0.125 +2024-08-03 22:10:29,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff3.min_abs, batch_count=246092.0, ans=0.2 +2024-08-03 22:10:30,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=246092.0, ans=0.125 +2024-08-03 22:10:36,232 INFO [train.py:1114] (3/4) Epoch 19, batch 1600, loss[loss=0.1877, simple_loss=0.281, pruned_loss=0.04716, over 13586.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.269, pruned_loss=0.04756, over 2623756.64 frames. ], batch size: 39, lr: 6.55e-03, grad_scale: 16.0 +2024-08-03 22:10:45,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=246165.33333333334, ans=0.125 +2024-08-03 22:10:47,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=246165.33333333334, ans=0.2 +2024-08-03 22:10:51,523 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.932e+01 1.109e+02 1.298e+02 1.770e+02 3.045e+02, threshold=2.595e+02, percent-clipped=6.0 +2024-08-03 22:11:02,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246202.0, ans=0.1 +2024-08-03 22:11:11,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=246238.66666666666, ans=0.025 +2024-08-03 22:11:16,915 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.66 vs. limit=15.0 +2024-08-03 22:11:20,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246275.33333333334, ans=0.1 +2024-08-03 22:11:23,665 INFO [train.py:1114] (3/4) Epoch 19, batch 1650, loss[loss=0.1789, simple_loss=0.2803, pruned_loss=0.03877, over 13322.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2695, pruned_loss=0.04785, over 2620753.76 frames. ], batch size: 40, lr: 6.55e-03, grad_scale: 16.0 +2024-08-03 22:11:24,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=246312.0, ans=0.2 +2024-08-03 22:12:01,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=246458.66666666666, ans=0.125 +2024-08-03 22:12:02,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=246458.66666666666, ans=0.125 +2024-08-03 22:12:11,720 INFO [train.py:1114] (3/4) Epoch 19, batch 1700, loss[loss=0.1676, simple_loss=0.2415, pruned_loss=0.04686, over 13254.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2691, pruned_loss=0.04772, over 2629819.28 frames. ], batch size: 31, lr: 6.55e-03, grad_scale: 16.0 +2024-08-03 22:12:11,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=246495.33333333334, ans=0.125 +2024-08-03 22:12:19,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=246495.33333333334, ans=0.125 +2024-08-03 22:12:27,025 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.554e+01 1.124e+02 1.376e+02 1.723e+02 2.933e+02, threshold=2.751e+02, percent-clipped=2.0 +2024-08-03 22:12:31,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=246568.66666666666, ans=0.125 +2024-08-03 22:12:46,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=246605.33333333334, ans=0.0 +2024-08-03 22:12:59,206 INFO [train.py:1114] (3/4) Epoch 19, batch 1750, loss[loss=0.1649, simple_loss=0.2401, pruned_loss=0.04486, over 13558.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2688, pruned_loss=0.04766, over 2632206.80 frames. ], batch size: 31, lr: 6.54e-03, grad_scale: 16.0 +2024-08-03 22:13:02,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=246678.66666666666, ans=0.07 +2024-08-03 22:13:19,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=246752.0, ans=0.125 +2024-08-03 22:13:29,663 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.00 vs. limit=12.0 +2024-08-03 22:13:37,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=246825.33333333334, ans=0.0 +2024-08-03 22:13:41,527 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.41 vs. limit=22.5 +2024-08-03 22:13:46,404 INFO [train.py:1114] (3/4) Epoch 19, batch 1800, loss[loss=0.2018, simple_loss=0.2916, pruned_loss=0.05602, over 13565.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2687, pruned_loss=0.04746, over 2633793.04 frames. ], batch size: 38, lr: 6.54e-03, grad_scale: 16.0 +2024-08-03 22:13:58,283 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.76 vs. limit=15.0 +2024-08-03 22:14:02,232 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.932e+01 1.153e+02 1.366e+02 1.717e+02 2.450e+02, threshold=2.732e+02, percent-clipped=0.0 +2024-08-03 22:14:04,827 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.54 vs. limit=10.0 +2024-08-03 22:14:28,216 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.98 vs. limit=15.0 +2024-08-03 22:14:30,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=247008.66666666666, ans=0.125 +2024-08-03 22:14:32,247 INFO [train.py:1114] (3/4) Epoch 19, batch 1850, loss[loss=0.1815, simple_loss=0.2799, pruned_loss=0.04161, over 13397.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2689, pruned_loss=0.04754, over 2635756.12 frames. ], batch size: 39, lr: 6.54e-03, grad_scale: 16.0 +2024-08-03 22:14:43,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=247082.0, ans=0.07 +2024-08-03 22:14:44,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=247082.0, ans=0.125 +2024-08-03 22:15:05,878 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.95 vs. limit=15.0 +2024-08-03 22:15:12,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff3.min_abs, batch_count=247192.0, ans=0.2 +2024-08-03 22:15:19,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247228.66666666666, ans=0.1 +2024-08-03 22:15:19,801 INFO [train.py:1114] (3/4) Epoch 19, batch 1900, loss[loss=0.2038, simple_loss=0.295, pruned_loss=0.05627, over 13333.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.27, pruned_loss=0.04832, over 2638531.65 frames. ], batch size: 40, lr: 6.54e-03, grad_scale: 16.0 +2024-08-03 22:15:34,866 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.927e+01 1.140e+02 1.255e+02 1.731e+02 2.677e+02, threshold=2.509e+02, percent-clipped=0.0 +2024-08-03 22:15:38,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=247302.0, ans=0.125 +2024-08-03 22:15:41,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=247302.0, ans=0.0 +2024-08-03 22:15:45,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=247302.0, ans=0.1 +2024-08-03 22:15:56,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=247338.66666666666, ans=0.0 +2024-08-03 22:16:07,334 INFO [train.py:1114] (3/4) Epoch 19, batch 1950, loss[loss=0.1895, simple_loss=0.2742, pruned_loss=0.05242, over 13552.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2702, pruned_loss=0.04792, over 2645092.29 frames. ], batch size: 36, lr: 6.53e-03, grad_scale: 16.0 +2024-08-03 22:16:30,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=247485.33333333334, ans=0.125 +2024-08-03 22:16:30,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=247485.33333333334, ans=0.125 +2024-08-03 22:16:35,380 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:16:49,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=247558.66666666666, ans=0.015 +2024-08-03 22:16:55,383 INFO [train.py:1114] (3/4) Epoch 19, batch 2000, loss[loss=0.157, simple_loss=0.2416, pruned_loss=0.03626, over 13544.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.271, pruned_loss=0.04824, over 2635289.74 frames. ], batch size: 31, lr: 6.53e-03, grad_scale: 32.0 +2024-08-03 22:17:02,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=247595.33333333334, ans=0.125 +2024-08-03 22:17:03,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=247595.33333333334, ans=0.2 +2024-08-03 22:17:05,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=247632.0, ans=0.0 +2024-08-03 22:17:11,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=247632.0, ans=0.125 +2024-08-03 22:17:13,007 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.093e+02 1.300e+02 1.628e+02 2.543e+02, threshold=2.600e+02, percent-clipped=1.0 +2024-08-03 22:17:15,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=247668.66666666666, ans=0.0 +2024-08-03 22:17:21,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=247668.66666666666, ans=0.2 +2024-08-03 22:17:28,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=247705.33333333334, ans=0.0 +2024-08-03 22:17:42,957 INFO [train.py:1114] (3/4) Epoch 19, batch 2050, loss[loss=0.1473, simple_loss=0.2304, pruned_loss=0.03206, over 13422.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.27, pruned_loss=0.04813, over 2632227.42 frames. ], batch size: 32, lr: 6.53e-03, grad_scale: 32.0 +2024-08-03 22:17:45,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=247778.66666666666, ans=0.125 +2024-08-03 22:17:48,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=247778.66666666666, ans=0.07 +2024-08-03 22:17:59,683 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.61 vs. limit=15.0 +2024-08-03 22:18:17,540 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.89 vs. limit=15.0 +2024-08-03 22:18:30,932 INFO [train.py:1114] (3/4) Epoch 19, batch 2100, loss[loss=0.19, simple_loss=0.2764, pruned_loss=0.05177, over 13545.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2689, pruned_loss=0.04767, over 2638319.85 frames. ], batch size: 37, lr: 6.53e-03, grad_scale: 32.0 +2024-08-03 22:18:32,331 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=4.46 vs. limit=15.0 +2024-08-03 22:18:32,499 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.00 vs. limit=15.0 +2024-08-03 22:18:32,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247962.0, ans=0.1 +2024-08-03 22:18:40,437 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.96 vs. limit=15.0 +2024-08-03 22:18:46,395 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.343e+01 1.123e+02 1.234e+02 1.440e+02 2.542e+02, threshold=2.468e+02, percent-clipped=0.0 +2024-08-03 22:18:46,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=247998.66666666666, ans=0.125 +2024-08-03 22:18:48,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=248035.33333333334, ans=0.125 +2024-08-03 22:18:58,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248072.0, ans=0.1 +2024-08-03 22:18:59,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=248072.0, ans=0.2 +2024-08-03 22:19:10,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=248108.66666666666, ans=0.0 +2024-08-03 22:19:13,394 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.99 vs. limit=15.0 +2024-08-03 22:19:16,683 INFO [train.py:1114] (3/4) Epoch 19, batch 2150, loss[loss=0.1796, simple_loss=0.2758, pruned_loss=0.04171, over 13571.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2681, pruned_loss=0.04731, over 2647137.18 frames. ], batch size: 36, lr: 6.52e-03, grad_scale: 32.0 +2024-08-03 22:19:27,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.14 vs. limit=10.0 +2024-08-03 22:19:34,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248182.0, ans=0.1 +2024-08-03 22:19:45,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=248218.66666666666, ans=0.0 +2024-08-03 22:19:58,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248292.0, ans=0.1 +2024-08-03 22:20:06,563 INFO [train.py:1114] (3/4) Epoch 19, batch 2200, loss[loss=0.1953, simple_loss=0.2845, pruned_loss=0.05307, over 13387.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2685, pruned_loss=0.04739, over 2645419.94 frames. ], batch size: 39, lr: 6.52e-03, grad_scale: 16.0 +2024-08-03 22:20:16,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=248365.33333333334, ans=0.5 +2024-08-03 22:20:22,937 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.876e+01 1.107e+02 1.433e+02 1.774e+02 2.441e+02, threshold=2.865e+02, percent-clipped=0.0 +2024-08-03 22:20:27,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=248402.0, ans=0.0 +2024-08-03 22:20:34,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=248438.66666666666, ans=0.0 +2024-08-03 22:20:38,421 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.95 vs. limit=6.0 +2024-08-03 22:20:53,613 INFO [train.py:1114] (3/4) Epoch 19, batch 2250, loss[loss=0.1931, simple_loss=0.2886, pruned_loss=0.04881, over 13359.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2685, pruned_loss=0.04728, over 2641866.42 frames. ], batch size: 37, lr: 6.52e-03, grad_scale: 16.0 +2024-08-03 22:21:03,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=248548.66666666666, ans=0.125 +2024-08-03 22:21:12,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=248585.33333333334, ans=6.0 +2024-08-03 22:21:19,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=248585.33333333334, ans=0.125 +2024-08-03 22:21:39,615 INFO [train.py:1114] (3/4) Epoch 19, batch 2300, loss[loss=0.1661, simple_loss=0.2522, pruned_loss=0.04, over 13566.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2676, pruned_loss=0.0471, over 2637769.29 frames. ], batch size: 33, lr: 6.52e-03, grad_scale: 16.0 +2024-08-03 22:21:40,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=248695.33333333334, ans=0.125 +2024-08-03 22:21:49,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=248732.0, ans=0.125 +2024-08-03 22:21:58,047 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.536e+01 1.062e+02 1.236e+02 1.586e+02 2.214e+02, threshold=2.472e+02, percent-clipped=0.0 +2024-08-03 22:21:58,589 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.82 vs. limit=15.0 +2024-08-03 22:22:00,332 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.77 vs. limit=15.0 +2024-08-03 22:22:00,608 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.64 vs. limit=15.0 +2024-08-03 22:22:15,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=248805.33333333334, ans=0.125 +2024-08-03 22:22:20,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=248842.0, ans=0.125 +2024-08-03 22:22:27,299 INFO [train.py:1114] (3/4) Epoch 19, batch 2350, loss[loss=0.1891, simple_loss=0.2753, pruned_loss=0.05143, over 13544.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2678, pruned_loss=0.04715, over 2640844.07 frames. ], batch size: 38, lr: 6.52e-03, grad_scale: 16.0 +2024-08-03 22:22:37,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=248915.33333333334, ans=0.025 +2024-08-03 22:22:42,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=248915.33333333334, ans=0.0 +2024-08-03 22:22:52,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=248952.0, ans=0.2 +2024-08-03 22:22:55,991 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.88 vs. limit=6.0 +2024-08-03 22:22:56,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=248988.66666666666, ans=0.0 +2024-08-03 22:22:58,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248988.66666666666, ans=0.1 +2024-08-03 22:23:06,514 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.33 vs. limit=12.0 +2024-08-03 22:23:08,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=249025.33333333334, ans=0.125 +2024-08-03 22:23:13,929 INFO [train.py:1114] (3/4) Epoch 19, batch 2400, loss[loss=0.164, simple_loss=0.2509, pruned_loss=0.0385, over 13530.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2682, pruned_loss=0.04723, over 2642673.73 frames. ], batch size: 35, lr: 6.51e-03, grad_scale: 32.0 +2024-08-03 22:23:19,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=249062.0, ans=0.0 +2024-08-03 22:23:23,241 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:23:23,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=249098.66666666666, ans=0.2 +2024-08-03 22:23:31,373 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.983e+01 1.101e+02 1.278e+02 1.688e+02 2.593e+02, threshold=2.556e+02, percent-clipped=1.0 +2024-08-03 22:23:50,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=249172.0, ans=0.07 +2024-08-03 22:24:01,859 INFO [train.py:1114] (3/4) Epoch 19, batch 2450, loss[loss=0.1932, simple_loss=0.2826, pruned_loss=0.05185, over 13359.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2691, pruned_loss=0.04757, over 2633696.86 frames. ], batch size: 37, lr: 6.51e-03, grad_scale: 16.0 +2024-08-03 22:24:19,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=249318.66666666666, ans=0.0 +2024-08-03 22:24:20,130 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.14 vs. limit=22.5 +2024-08-03 22:24:37,735 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.80 vs. limit=15.0 +2024-08-03 22:24:49,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=249428.66666666666, ans=0.0 +2024-08-03 22:24:49,892 INFO [train.py:1114] (3/4) Epoch 19, batch 2500, loss[loss=0.1783, simple_loss=0.2757, pruned_loss=0.04039, over 13392.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2689, pruned_loss=0.04745, over 2637642.31 frames. ], batch size: 39, lr: 6.51e-03, grad_scale: 16.0 +2024-08-03 22:25:04,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=249465.33333333334, ans=0.2 +2024-08-03 22:25:06,708 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.566e+01 1.106e+02 1.263e+02 1.596e+02 2.870e+02, threshold=2.527e+02, percent-clipped=4.0 +2024-08-03 22:25:09,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=249502.0, ans=0.125 +2024-08-03 22:25:32,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=249575.33333333334, ans=0.125 +2024-08-03 22:25:34,330 INFO [train.py:1114] (3/4) Epoch 19, batch 2550, loss[loss=0.1619, simple_loss=0.2399, pruned_loss=0.04189, over 13509.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2691, pruned_loss=0.04762, over 2638735.24 frames. ], batch size: 31, lr: 6.51e-03, grad_scale: 16.0 +2024-08-03 22:25:57,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=249685.33333333334, ans=0.95 +2024-08-03 22:26:14,899 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.70 vs. limit=15.0 +2024-08-03 22:26:18,053 INFO [train.py:1114] (3/4) Epoch 19, batch 2600, loss[loss=0.1769, simple_loss=0.2621, pruned_loss=0.04587, over 13583.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2696, pruned_loss=0.04782, over 2637347.68 frames. ], batch size: 36, lr: 6.50e-03, grad_scale: 16.0 +2024-08-03 22:26:23,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=249795.33333333334, ans=0.0 +2024-08-03 22:26:29,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=249832.0, ans=0.125 +2024-08-03 22:26:34,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=249832.0, ans=0.0 +2024-08-03 22:26:36,436 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.660e+01 1.133e+02 1.412e+02 1.915e+02 3.004e+02, threshold=2.824e+02, percent-clipped=7.0 +2024-08-03 22:26:41,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=249868.66666666666, ans=0.5 +2024-08-03 22:26:42,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=249868.66666666666, ans=0.0 +2024-08-03 22:26:51,015 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.78 vs. limit=15.0 +2024-08-03 22:26:55,296 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.27 vs. limit=22.5 +2024-08-03 22:26:58,827 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.15 vs. limit=22.5 +2024-08-03 22:27:03,492 INFO [train.py:1114] (3/4) Epoch 19, batch 2650, loss[loss=0.1985, simple_loss=0.2839, pruned_loss=0.05652, over 13324.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2702, pruned_loss=0.04809, over 2640801.44 frames. ], batch size: 46, lr: 6.50e-03, grad_scale: 16.0 +2024-08-03 22:27:25,426 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:27:47,044 INFO [train.py:1114] (3/4) Epoch 19, batch 2700, loss[loss=0.1807, simple_loss=0.2731, pruned_loss=0.04418, over 13562.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2703, pruned_loss=0.04801, over 2638180.83 frames. ], batch size: 40, lr: 6.50e-03, grad_scale: 16.0 +2024-08-03 22:28:03,580 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.168e+01 1.091e+02 1.247e+02 1.559e+02 2.482e+02, threshold=2.495e+02, percent-clipped=0.0 +2024-08-03 22:28:14,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=250272.0, ans=0.2 +2024-08-03 22:28:26,056 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.91 vs. limit=15.0 +2024-08-03 22:28:30,894 INFO [train.py:1114] (3/4) Epoch 19, batch 2750, loss[loss=0.1746, simple_loss=0.2557, pruned_loss=0.04674, over 13342.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2697, pruned_loss=0.04834, over 2635596.01 frames. ], batch size: 34, lr: 6.50e-03, grad_scale: 16.0 +2024-08-03 22:28:50,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=250418.66666666666, ans=0.125 +2024-08-03 22:28:54,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250418.66666666666, ans=0.1 +2024-08-03 22:29:08,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=250492.0, ans=0.0 +2024-08-03 22:29:15,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250528.66666666666, ans=0.1 +2024-08-03 22:29:15,862 INFO [train.py:1114] (3/4) Epoch 19, batch 2800, loss[loss=0.2224, simple_loss=0.297, pruned_loss=0.07393, over 9106.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2701, pruned_loss=0.04856, over 2626906.86 frames. ], batch size: 97, lr: 6.49e-03, grad_scale: 32.0 +2024-08-03 22:29:17,184 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.83 vs. limit=15.0 +2024-08-03 22:29:20,868 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-08-03 22:29:22,414 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.21 vs. limit=15.0 +2024-08-03 22:29:23,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=250528.66666666666, ans=0.125 +2024-08-03 22:29:27,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=250565.33333333334, ans=0.125 +2024-08-03 22:29:38,854 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.968e+01 1.091e+02 1.270e+02 1.499e+02 3.648e+02, threshold=2.541e+02, percent-clipped=3.0 +2024-08-03 22:29:41,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=250602.0, ans=0.125 +2024-08-03 22:30:07,681 INFO [train.py:1114] (3/4) Epoch 19, batch 2850, loss[loss=0.159, simple_loss=0.2538, pruned_loss=0.03215, over 13361.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2697, pruned_loss=0.04826, over 2621141.83 frames. ], batch size: 35, lr: 6.49e-03, grad_scale: 32.0 +2024-08-03 22:30:15,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=250748.66666666666, ans=0.2 +2024-08-03 22:30:16,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=250748.66666666666, ans=0.125 +2024-08-03 22:30:20,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=250748.66666666666, ans=0.2 +2024-08-03 22:30:47,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=250858.66666666666, ans=0.125 +2024-08-03 22:30:48,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=250858.66666666666, ans=0.1 +2024-08-03 22:30:52,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=250858.66666666666, ans=0.2 +2024-08-03 22:30:54,439 INFO [train.py:1114] (3/4) Epoch 19, batch 2900, loss[loss=0.1774, simple_loss=0.2645, pruned_loss=0.04514, over 13363.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2707, pruned_loss=0.04837, over 2631604.34 frames. ], batch size: 36, lr: 6.49e-03, grad_scale: 32.0 +2024-08-03 22:31:11,724 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.871e+01 1.142e+02 1.452e+02 2.110e+02 3.268e+02, threshold=2.903e+02, percent-clipped=11.0 +2024-08-03 22:31:13,153 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.49 vs. limit=22.5 +2024-08-03 22:31:22,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=251005.33333333334, ans=0.2 +2024-08-03 22:31:35,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=251042.0, ans=0.125 +2024-08-03 22:31:39,960 INFO [train.py:1114] (3/4) Epoch 19, batch 2950, loss[loss=0.1693, simple_loss=0.2527, pruned_loss=0.04293, over 13321.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2694, pruned_loss=0.04806, over 2629982.92 frames. ], batch size: 34, lr: 6.49e-03, grad_scale: 16.0 +2024-08-03 22:31:40,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=251078.66666666666, ans=0.2 +2024-08-03 22:31:42,934 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.00 vs. limit=15.0 +2024-08-03 22:31:48,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=251115.33333333334, ans=0.05 +2024-08-03 22:31:52,652 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.86 vs. limit=15.0 +2024-08-03 22:31:53,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_na.min_abs, batch_count=251115.33333333334, ans=0.02 +2024-08-03 22:31:56,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=251152.0, ans=0.0 +2024-08-03 22:32:00,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=251152.0, ans=0.2 +2024-08-03 22:32:05,533 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.71 vs. limit=15.0 +2024-08-03 22:32:16,796 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.07 vs. limit=22.5 +2024-08-03 22:32:20,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251225.33333333334, ans=0.1 +2024-08-03 22:32:23,257 INFO [train.py:1114] (3/4) Epoch 19, batch 3000, loss[loss=0.1588, simple_loss=0.2542, pruned_loss=0.03173, over 13542.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2695, pruned_loss=0.04799, over 2630114.20 frames. ], batch size: 37, lr: 6.48e-03, grad_scale: 16.0 +2024-08-03 22:32:23,258 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 22:32:32,876 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.1928, 3.5116, 3.4594, 1.7860], device='cuda:3') +2024-08-03 22:32:34,409 INFO [train.py:1146] (3/4) Epoch 19, validation: loss=0.169, simple_loss=0.2683, pruned_loss=0.03491, over 944034.00 frames. +2024-08-03 22:32:34,410 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 22:32:44,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=251298.66666666666, ans=0.04949747468305833 +2024-08-03 22:32:51,805 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.029e+01 1.088e+02 1.228e+02 1.356e+02 2.065e+02, threshold=2.455e+02, percent-clipped=0.0 +2024-08-03 22:32:51,909 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:32:57,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=251335.33333333334, ans=0.0 +2024-08-03 22:33:00,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=251372.0, ans=0.0 +2024-08-03 22:33:00,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=251372.0, ans=0.2 +2024-08-03 22:33:02,650 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.94 vs. limit=15.0 +2024-08-03 22:33:12,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=251408.66666666666, ans=0.125 +2024-08-03 22:33:17,983 INFO [train.py:1114] (3/4) Epoch 19, batch 3050, loss[loss=0.1774, simple_loss=0.2706, pruned_loss=0.04212, over 13540.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.27, pruned_loss=0.04818, over 2627341.08 frames. ], batch size: 35, lr: 6.48e-03, grad_scale: 16.0 +2024-08-03 22:33:35,739 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=251518.66666666666, ans=0.5 +2024-08-03 22:33:37,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=251518.66666666666, ans=0.125 +2024-08-03 22:33:49,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=251555.33333333334, ans=0.125 +2024-08-03 22:33:51,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=251555.33333333334, ans=0.5 +2024-08-03 22:33:58,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=251592.0, ans=0.2 +2024-08-03 22:33:59,785 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.57 vs. limit=22.5 +2024-08-03 22:34:01,090 INFO [train.py:1114] (3/4) Epoch 19, batch 3100, loss[loss=0.1919, simple_loss=0.289, pruned_loss=0.04742, over 13332.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2703, pruned_loss=0.04821, over 2627206.86 frames. ], batch size: 46, lr: 6.48e-03, grad_scale: 16.0 +2024-08-03 22:34:06,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=251628.66666666666, ans=0.1 +2024-08-03 22:34:07,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=251628.66666666666, ans=0.125 +2024-08-03 22:34:08,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=251665.33333333334, ans=0.025 +2024-08-03 22:34:19,145 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.004e+01 1.074e+02 1.205e+02 1.545e+02 4.065e+02, threshold=2.411e+02, percent-clipped=2.0 +2024-08-03 22:34:46,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=251812.0, ans=0.125 +2024-08-03 22:34:46,861 INFO [train.py:1114] (3/4) Epoch 19, batch 3150, loss[loss=0.202, simple_loss=0.2867, pruned_loss=0.05866, over 13102.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.27, pruned_loss=0.04808, over 2629250.35 frames. ], batch size: 48, lr: 6.48e-03, grad_scale: 16.0 +2024-08-03 22:35:07,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=251885.33333333334, ans=0.125 +2024-08-03 22:35:14,443 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.72 vs. limit=6.0 +2024-08-03 22:35:18,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=251922.0, ans=0.0 +2024-08-03 22:35:30,190 INFO [train.py:1114] (3/4) Epoch 19, batch 3200, loss[loss=0.1938, simple_loss=0.2858, pruned_loss=0.05088, over 13544.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2694, pruned_loss=0.04795, over 2636000.56 frames. ], batch size: 37, lr: 6.48e-03, grad_scale: 32.0 +2024-08-03 22:35:32,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=251995.33333333334, ans=0.0 +2024-08-03 22:35:47,280 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.593e+01 1.152e+02 1.475e+02 1.954e+02 2.995e+02, threshold=2.949e+02, percent-clipped=9.0 +2024-08-03 22:35:47,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=252068.66666666666, ans=0.025 +2024-08-03 22:35:54,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=252068.66666666666, ans=0.0 +2024-08-03 22:36:09,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=252142.0, ans=10.0 +2024-08-03 22:36:13,784 INFO [train.py:1114] (3/4) Epoch 19, batch 3250, loss[loss=0.186, simple_loss=0.2731, pruned_loss=0.04948, over 13386.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2701, pruned_loss=0.04817, over 2639598.32 frames. ], batch size: 38, lr: 6.47e-03, grad_scale: 32.0 +2024-08-03 22:36:16,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=252178.66666666666, ans=0.125 +2024-08-03 22:36:17,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=252178.66666666666, ans=0.125 +2024-08-03 22:36:26,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=252215.33333333334, ans=0.0 +2024-08-03 22:36:26,783 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:36:28,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=252215.33333333334, ans=0.07 +2024-08-03 22:36:34,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=252252.0, ans=0.125 +2024-08-03 22:36:49,764 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.20 vs. limit=15.0 +2024-08-03 22:36:57,954 INFO [train.py:1114] (3/4) Epoch 19, batch 3300, loss[loss=0.2013, simple_loss=0.2925, pruned_loss=0.05508, over 12820.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2691, pruned_loss=0.04792, over 2640611.59 frames. ], batch size: 52, lr: 6.47e-03, grad_scale: 32.0 +2024-08-03 22:37:14,956 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.912e+01 1.101e+02 1.274e+02 1.526e+02 2.579e+02, threshold=2.548e+02, percent-clipped=0.0 +2024-08-03 22:37:19,678 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.89 vs. limit=10.0 +2024-08-03 22:37:28,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=252472.0, ans=0.0 +2024-08-03 22:37:40,616 INFO [train.py:1114] (3/4) Epoch 19, batch 3350, loss[loss=0.1964, simple_loss=0.2805, pruned_loss=0.05619, over 13062.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2703, pruned_loss=0.04815, over 2630723.06 frames. ], batch size: 48, lr: 6.47e-03, grad_scale: 32.0 +2024-08-03 22:37:45,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=252545.33333333334, ans=0.0 +2024-08-03 22:37:45,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=252545.33333333334, ans=0.5 +2024-08-03 22:37:52,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=252582.0, ans=0.125 +2024-08-03 22:38:01,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=252618.66666666666, ans=0.025 +2024-08-03 22:38:09,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=252655.33333333334, ans=0.04949747468305833 +2024-08-03 22:38:23,950 INFO [train.py:1114] (3/4) Epoch 19, batch 3400, loss[loss=0.1697, simple_loss=0.247, pruned_loss=0.04613, over 13545.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2699, pruned_loss=0.04826, over 2626261.10 frames. ], batch size: 31, lr: 6.47e-03, grad_scale: 16.0 +2024-08-03 22:38:30,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=252728.66666666666, ans=0.0 +2024-08-03 22:38:32,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=252765.33333333334, ans=0.035 +2024-08-03 22:38:39,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=252765.33333333334, ans=0.125 +2024-08-03 22:38:41,985 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.435e+01 1.094e+02 1.256e+02 1.561e+02 2.442e+02, threshold=2.511e+02, percent-clipped=0.0 +2024-08-03 22:38:43,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=252802.0, ans=0.125 +2024-08-03 22:38:45,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=252802.0, ans=0.0 +2024-08-03 22:39:07,296 INFO [train.py:1114] (3/4) Epoch 19, batch 3450, loss[loss=0.1946, simple_loss=0.2866, pruned_loss=0.05126, over 12876.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2695, pruned_loss=0.04805, over 2629191.69 frames. ], batch size: 52, lr: 6.46e-03, grad_scale: 16.0 +2024-08-03 22:39:09,188 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=252912.0, ans=0.125 +2024-08-03 22:39:12,045 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.92 vs. limit=6.0 +2024-08-03 22:39:14,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=252912.0, ans=0.0 +2024-08-03 22:39:17,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=252948.66666666666, ans=0.09899494936611666 +2024-08-03 22:39:21,183 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.67 vs. limit=22.5 +2024-08-03 22:39:21,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=252948.66666666666, ans=0.125 +2024-08-03 22:39:33,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=253022.0, ans=0.2 +2024-08-03 22:39:37,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=253022.0, ans=0.0 +2024-08-03 22:39:46,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=253058.66666666666, ans=0.0 +2024-08-03 22:39:50,109 INFO [train.py:1114] (3/4) Epoch 19, batch 3500, loss[loss=0.1796, simple_loss=0.2578, pruned_loss=0.05067, over 13520.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2687, pruned_loss=0.04792, over 2630660.99 frames. ], batch size: 34, lr: 6.46e-03, grad_scale: 16.0 +2024-08-03 22:40:01,575 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.89 vs. limit=15.0 +2024-08-03 22:40:04,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=253132.0, ans=0.125 +2024-08-03 22:40:07,937 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.416e+01 1.103e+02 1.254e+02 1.609e+02 3.004e+02, threshold=2.508e+02, percent-clipped=2.0 +2024-08-03 22:40:09,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=253168.66666666666, ans=0.0 +2024-08-03 22:40:15,323 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.36 vs. limit=6.0 +2024-08-03 22:40:16,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=253205.33333333334, ans=0.125 +2024-08-03 22:40:19,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=253205.33333333334, ans=0.0 +2024-08-03 22:40:22,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=253205.33333333334, ans=0.015 +2024-08-03 22:40:28,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=253242.0, ans=0.125 +2024-08-03 22:40:33,553 INFO [train.py:1114] (3/4) Epoch 19, batch 3550, loss[loss=0.1908, simple_loss=0.2766, pruned_loss=0.05253, over 12706.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2707, pruned_loss=0.04891, over 2628902.62 frames. ], batch size: 59, lr: 6.46e-03, grad_scale: 16.0 +2024-08-03 22:40:48,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=253315.33333333334, ans=0.0 +2024-08-03 22:40:55,183 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.97 vs. limit=15.0 +2024-08-03 22:41:02,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=253388.66666666666, ans=0.025 +2024-08-03 22:41:17,601 INFO [train.py:1114] (3/4) Epoch 19, batch 3600, loss[loss=0.2247, simple_loss=0.2969, pruned_loss=0.0762, over 9683.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2753, pruned_loss=0.05258, over 2487989.46 frames. ], batch size: 97, lr: 6.46e-03, grad_scale: 32.0 +2024-08-03 22:41:25,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=253498.66666666666, ans=0.2 +2024-08-03 22:41:35,765 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.007e+02 1.196e+02 1.277e+02 1.470e+02 2.167e+02, threshold=2.555e+02, percent-clipped=0.0 +2024-08-03 22:41:42,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=253572.0, ans=0.1 +2024-08-03 22:41:46,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=253572.0, ans=0.0 +2024-08-03 22:42:35,193 INFO [train.py:1114] (3/4) Epoch 20, batch 0, loss[loss=0.1729, simple_loss=0.2589, pruned_loss=0.0434, over 13327.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2589, pruned_loss=0.0434, over 13327.00 frames. ], batch size: 33, lr: 6.29e-03, grad_scale: 32.0 +2024-08-03 22:42:35,194 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 22:42:40,122 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.5925, 2.8491, 2.5881, 2.7736], device='cuda:3') +2024-08-03 22:42:45,197 INFO [train.py:1146] (3/4) Epoch 20, validation: loss=0.1683, simple_loss=0.2688, pruned_loss=0.0339, over 944034.00 frames. +2024-08-03 22:42:45,198 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 22:42:55,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=253649.0, ans=0.125 +2024-08-03 22:43:04,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=253685.66666666666, ans=0.125 +2024-08-03 22:43:24,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=253759.0, ans=0.0 +2024-08-03 22:43:30,968 INFO [train.py:1114] (3/4) Epoch 20, batch 50, loss[loss=0.1393, simple_loss=0.2273, pruned_loss=0.02565, over 13429.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2721, pruned_loss=0.04915, over 577871.88 frames. ], batch size: 32, lr: 6.29e-03, grad_scale: 32.0 +2024-08-03 22:43:38,502 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.44 vs. limit=6.0 +2024-08-03 22:43:39,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=253795.66666666666, ans=0.125 +2024-08-03 22:44:02,673 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.939e+01 1.083e+02 1.261e+02 1.490e+02 2.691e+02, threshold=2.522e+02, percent-clipped=1.0 +2024-08-03 22:44:09,300 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.78 vs. limit=15.0 +2024-08-03 22:44:12,265 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.14 vs. limit=15.0 +2024-08-03 22:44:19,864 INFO [train.py:1114] (3/4) Epoch 20, batch 100, loss[loss=0.1673, simple_loss=0.2554, pruned_loss=0.03962, over 13541.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.271, pruned_loss=0.04793, over 1024674.36 frames. ], batch size: 35, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:44:20,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=253979.0, ans=0.2 +2024-08-03 22:44:21,364 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.34 vs. limit=6.0 +2024-08-03 22:44:31,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=254015.66666666666, ans=0.0 +2024-08-03 22:44:42,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=254052.33333333334, ans=0.125 +2024-08-03 22:44:58,485 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:45:01,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=254125.66666666666, ans=0.025 +2024-08-03 22:45:06,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=254162.33333333334, ans=0.125 +2024-08-03 22:45:06,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=254162.33333333334, ans=0.125 +2024-08-03 22:45:07,151 INFO [train.py:1114] (3/4) Epoch 20, batch 150, loss[loss=0.1649, simple_loss=0.2391, pruned_loss=0.0454, over 13407.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2696, pruned_loss=0.04743, over 1386386.02 frames. ], batch size: 32, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:45:09,460 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.92 vs. limit=22.5 +2024-08-03 22:45:13,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=254162.33333333334, ans=0.125 +2024-08-03 22:45:23,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=254199.0, ans=0.125 +2024-08-03 22:45:29,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=254235.66666666666, ans=0.0 +2024-08-03 22:45:35,892 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.685e+01 1.074e+02 1.304e+02 1.730e+02 2.668e+02, threshold=2.608e+02, percent-clipped=1.0 +2024-08-03 22:45:36,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=254272.33333333334, ans=0.0 +2024-08-03 22:45:38,275 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.24 vs. limit=22.5 +2024-08-03 22:45:52,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=254309.0, ans=0.0 +2024-08-03 22:45:54,487 INFO [train.py:1114] (3/4) Epoch 20, batch 200, loss[loss=0.2068, simple_loss=0.2968, pruned_loss=0.05846, over 12570.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2682, pruned_loss=0.04689, over 1664334.03 frames. ], batch size: 58, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:46:09,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=254382.33333333334, ans=0.0 +2024-08-03 22:46:09,483 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.77 vs. limit=15.0 +2024-08-03 22:46:11,981 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:46:15,688 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:46:19,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254419.0, ans=0.1 +2024-08-03 22:46:40,095 INFO [train.py:1114] (3/4) Epoch 20, batch 250, loss[loss=0.1962, simple_loss=0.2829, pruned_loss=0.05477, over 13294.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2681, pruned_loss=0.04705, over 1883339.53 frames. ], batch size: 46, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:46:49,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=254565.66666666666, ans=0.125 +2024-08-03 22:46:56,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254565.66666666666, ans=0.1 +2024-08-03 22:47:05,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=254602.33333333334, ans=0.1 +2024-08-03 22:47:10,948 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.021e+01 1.100e+02 1.239e+02 1.581e+02 3.543e+02, threshold=2.478e+02, percent-clipped=3.0 +2024-08-03 22:47:23,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254675.66666666666, ans=0.1 +2024-08-03 22:47:27,292 INFO [train.py:1114] (3/4) Epoch 20, batch 300, loss[loss=0.2036, simple_loss=0.2932, pruned_loss=0.05704, over 13434.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2675, pruned_loss=0.04683, over 2050018.49 frames. ], batch size: 42, lr: 6.28e-03, grad_scale: 16.0 +2024-08-03 22:47:48,786 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.95 vs. limit=22.5 +2024-08-03 22:48:00,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=254822.33333333334, ans=0.0 +2024-08-03 22:48:03,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=254822.33333333334, ans=0.0 +2024-08-03 22:48:05,793 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.67 vs. limit=15.0 +2024-08-03 22:48:15,258 INFO [train.py:1114] (3/4) Epoch 20, batch 350, loss[loss=0.1567, simple_loss=0.2377, pruned_loss=0.03788, over 13589.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2679, pruned_loss=0.04659, over 2180344.17 frames. ], batch size: 33, lr: 6.27e-03, grad_scale: 16.0 +2024-08-03 22:48:17,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=254895.66666666666, ans=0.0 +2024-08-03 22:48:20,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=254895.66666666666, ans=0.2 +2024-08-03 22:48:33,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=254932.33333333334, ans=0.0 +2024-08-03 22:48:46,586 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.302e+01 1.074e+02 1.204e+02 1.489e+02 2.516e+02, threshold=2.409e+02, percent-clipped=1.0 +2024-08-03 22:49:02,825 INFO [train.py:1114] (3/4) Epoch 20, batch 400, loss[loss=0.1673, simple_loss=0.2677, pruned_loss=0.03344, over 13372.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2674, pruned_loss=0.04628, over 2284370.02 frames. ], batch size: 37, lr: 6.27e-03, grad_scale: 32.0 +2024-08-03 22:49:03,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=4.51 vs. limit=15.0 +2024-08-03 22:49:09,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.65 vs. limit=15.0 +2024-08-03 22:49:22,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=255152.33333333334, ans=0.125 +2024-08-03 22:49:27,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=255152.33333333334, ans=0.2 +2024-08-03 22:49:29,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=255152.33333333334, ans=0.1 +2024-08-03 22:49:29,634 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.77 vs. limit=15.0 +2024-08-03 22:49:39,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=255189.0, ans=0.125 +2024-08-03 22:49:51,108 INFO [train.py:1114] (3/4) Epoch 20, batch 450, loss[loss=0.1976, simple_loss=0.2859, pruned_loss=0.0547, over 13548.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2682, pruned_loss=0.04687, over 2359396.39 frames. ], batch size: 38, lr: 6.27e-03, grad_scale: 32.0 +2024-08-03 22:49:58,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=255262.33333333334, ans=0.125 +2024-08-03 22:50:02,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=255299.0, ans=0.125 +2024-08-03 22:50:20,453 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.506e+01 1.112e+02 1.300e+02 1.604e+02 2.595e+02, threshold=2.600e+02, percent-clipped=3.0 +2024-08-03 22:50:23,826 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.69 vs. limit=15.0 +2024-08-03 22:50:26,226 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.93 vs. limit=15.0 +2024-08-03 22:50:35,542 INFO [train.py:1114] (3/4) Epoch 20, batch 500, loss[loss=0.2035, simple_loss=0.2911, pruned_loss=0.05797, over 13454.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2673, pruned_loss=0.04639, over 2425118.67 frames. ], batch size: 43, lr: 6.27e-03, grad_scale: 16.0 +2024-08-03 22:51:25,361 INFO [train.py:1114] (3/4) Epoch 20, batch 550, loss[loss=0.1943, simple_loss=0.2906, pruned_loss=0.04901, over 13011.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2677, pruned_loss=0.04666, over 2467787.60 frames. ], batch size: 48, lr: 6.26e-03, grad_scale: 16.0 +2024-08-03 22:51:48,553 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.36 vs. limit=22.5 +2024-08-03 22:51:52,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=255739.0, ans=0.0 +2024-08-03 22:51:55,573 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.709e+01 1.108e+02 1.273e+02 1.483e+02 2.115e+02, threshold=2.547e+02, percent-clipped=0.0 +2024-08-03 22:52:01,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=255775.66666666666, ans=0.0 +2024-08-03 22:52:13,747 INFO [train.py:1114] (3/4) Epoch 20, batch 600, loss[loss=0.1986, simple_loss=0.2956, pruned_loss=0.05078, over 13363.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2676, pruned_loss=0.04674, over 2507755.95 frames. ], batch size: 46, lr: 6.26e-03, grad_scale: 16.0 +2024-08-03 22:52:14,355 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.41 vs. limit=6.0 +2024-08-03 22:52:21,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=255812.33333333334, ans=0.125 +2024-08-03 22:52:24,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=255849.0, ans=0.0 +2024-08-03 22:52:38,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=255885.66666666666, ans=0.2 +2024-08-03 22:52:42,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=255922.33333333334, ans=0.125 +2024-08-03 22:52:43,247 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.00 vs. limit=22.5 +2024-08-03 22:52:50,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=255959.0, ans=0.0 +2024-08-03 22:53:01,446 INFO [train.py:1114] (3/4) Epoch 20, batch 650, loss[loss=0.1833, simple_loss=0.271, pruned_loss=0.04773, over 13536.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2669, pruned_loss=0.04651, over 2542991.77 frames. ], batch size: 37, lr: 6.26e-03, grad_scale: 8.0 +2024-08-03 22:53:11,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=256032.33333333334, ans=0.125 +2024-08-03 22:53:20,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256069.0, ans=0.1 +2024-08-03 22:53:31,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=256105.66666666666, ans=0.125 +2024-08-03 22:53:32,264 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.291e+01 1.089e+02 1.229e+02 1.482e+02 2.680e+02, threshold=2.459e+02, percent-clipped=1.0 +2024-08-03 22:53:36,478 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.21 vs. limit=12.0 +2024-08-03 22:53:37,196 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.12 vs. limit=15.0 +2024-08-03 22:53:37,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=256142.33333333334, ans=0.0 +2024-08-03 22:53:47,132 INFO [train.py:1114] (3/4) Epoch 20, batch 700, loss[loss=0.1677, simple_loss=0.254, pruned_loss=0.04071, over 13536.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2679, pruned_loss=0.04676, over 2564475.01 frames. ], batch size: 35, lr: 6.26e-03, grad_scale: 8.0 +2024-08-03 22:53:47,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=256179.0, ans=0.125 +2024-08-03 22:53:59,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=256215.66666666666, ans=0.0 +2024-08-03 22:53:59,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=256215.66666666666, ans=0.125 +2024-08-03 22:54:00,408 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.05 vs. limit=15.0 +2024-08-03 22:54:14,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=256289.0, ans=0.1 +2024-08-03 22:54:16,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=256289.0, ans=0.2 +2024-08-03 22:54:28,531 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.61 vs. limit=22.5 +2024-08-03 22:54:29,581 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.48 vs. limit=15.0 +2024-08-03 22:54:34,714 INFO [train.py:1114] (3/4) Epoch 20, batch 750, loss[loss=0.1737, simple_loss=0.2673, pruned_loss=0.04006, over 13360.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2668, pruned_loss=0.04619, over 2581995.35 frames. ], batch size: 37, lr: 6.26e-03, grad_scale: 8.0 +2024-08-03 22:54:37,007 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.64 vs. limit=15.0 +2024-08-03 22:54:41,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=256362.33333333334, ans=0.025 +2024-08-03 22:54:41,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.98 vs. limit=15.0 +2024-08-03 22:54:44,513 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.84 vs. limit=10.0 +2024-08-03 22:54:47,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=256399.0, ans=0.0 +2024-08-03 22:55:08,788 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.791e+01 1.111e+02 1.273e+02 1.584e+02 2.450e+02, threshold=2.545e+02, percent-clipped=0.0 +2024-08-03 22:55:14,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=256509.0, ans=0.2 +2024-08-03 22:55:18,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=256509.0, ans=0.0 +2024-08-03 22:55:23,262 INFO [train.py:1114] (3/4) Epoch 20, batch 800, loss[loss=0.1627, simple_loss=0.2486, pruned_loss=0.03839, over 13340.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2671, pruned_loss=0.04635, over 2596412.66 frames. ], batch size: 33, lr: 6.25e-03, grad_scale: 16.0 +2024-08-03 22:55:40,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=256619.0, ans=0.025 +2024-08-03 22:56:10,725 INFO [train.py:1114] (3/4) Epoch 20, batch 850, loss[loss=0.1915, simple_loss=0.2842, pruned_loss=0.04936, over 13322.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2668, pruned_loss=0.04612, over 2609235.01 frames. ], batch size: 40, lr: 6.25e-03, grad_scale: 16.0 +2024-08-03 22:56:19,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=256765.66666666666, ans=0.1 +2024-08-03 22:56:25,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=256765.66666666666, ans=0.0 +2024-08-03 22:56:26,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=256765.66666666666, ans=0.0 +2024-08-03 22:56:36,986 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=4.51 vs. limit=15.0 +2024-08-03 22:56:41,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=256839.0, ans=0.125 +2024-08-03 22:56:44,117 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.111e+01 1.084e+02 1.248e+02 1.708e+02 3.125e+02, threshold=2.496e+02, percent-clipped=3.0 +2024-08-03 22:56:53,051 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.18 vs. limit=15.0 +2024-08-03 22:56:56,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=256875.66666666666, ans=0.025 +2024-08-03 22:56:58,775 INFO [train.py:1114] (3/4) Epoch 20, batch 900, loss[loss=0.1701, simple_loss=0.252, pruned_loss=0.04413, over 13331.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2675, pruned_loss=0.04679, over 2611135.81 frames. ], batch size: 33, lr: 6.25e-03, grad_scale: 16.0 +2024-08-03 22:57:10,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256949.0, ans=0.1 +2024-08-03 22:57:18,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=256985.66666666666, ans=0.0 +2024-08-03 22:57:19,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=256985.66666666666, ans=0.125 +2024-08-03 22:57:20,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=256985.66666666666, ans=0.125 +2024-08-03 22:57:30,586 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.93 vs. limit=6.0 +2024-08-03 22:57:41,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=257059.0, ans=0.125 +2024-08-03 22:57:43,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=257095.66666666666, ans=0.125 +2024-08-03 22:57:43,695 INFO [train.py:1114] (3/4) Epoch 20, batch 950, loss[loss=0.1737, simple_loss=0.2586, pruned_loss=0.04437, over 13535.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2677, pruned_loss=0.04678, over 2612219.55 frames. ], batch size: 34, lr: 6.25e-03, grad_scale: 16.0 +2024-08-03 22:57:51,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=257095.66666666666, ans=0.125 +2024-08-03 22:58:02,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=257169.0, ans=0.125 +2024-08-03 22:58:16,416 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.792e+01 1.096e+02 1.434e+02 1.875e+02 2.963e+02, threshold=2.868e+02, percent-clipped=4.0 +2024-08-03 22:58:21,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=257242.33333333334, ans=0.125 +2024-08-03 22:58:21,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=257242.33333333334, ans=0.125 +2024-08-03 22:58:25,985 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-08-03 22:58:27,973 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.06 vs. limit=15.0 +2024-08-03 22:58:32,892 INFO [train.py:1114] (3/4) Epoch 20, batch 1000, loss[loss=0.1774, simple_loss=0.2582, pruned_loss=0.04827, over 13368.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2686, pruned_loss=0.04724, over 2610532.17 frames. ], batch size: 35, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 22:58:35,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=257279.0, ans=0.1 +2024-08-03 22:58:54,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=257352.33333333334, ans=0.0 +2024-08-03 22:58:55,366 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:59:03,538 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 22:59:08,792 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.08 vs. limit=10.0 +2024-08-03 22:59:20,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=257462.33333333334, ans=0.2 +2024-08-03 22:59:20,974 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.44 vs. limit=12.0 +2024-08-03 22:59:21,357 INFO [train.py:1114] (3/4) Epoch 20, batch 1050, loss[loss=0.1713, simple_loss=0.2724, pruned_loss=0.03508, over 13573.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2677, pruned_loss=0.04693, over 2615054.84 frames. ], batch size: 39, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 22:59:25,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=257462.33333333334, ans=0.035 +2024-08-03 22:59:28,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=257462.33333333334, ans=10.0 +2024-08-03 22:59:33,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=257499.0, ans=0.0 +2024-08-03 22:59:44,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=257535.66666666666, ans=0.125 +2024-08-03 22:59:47,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=257535.66666666666, ans=0.125 +2024-08-03 22:59:52,247 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.716e+01 1.110e+02 1.251e+02 1.540e+02 2.508e+02, threshold=2.503e+02, percent-clipped=0.0 +2024-08-03 23:00:03,004 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.41 vs. limit=15.0 +2024-08-03 23:00:08,983 INFO [train.py:1114] (3/4) Epoch 20, batch 1100, loss[loss=0.1832, simple_loss=0.2705, pruned_loss=0.04795, over 13574.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.268, pruned_loss=0.04717, over 2619211.16 frames. ], batch size: 36, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 23:00:09,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=257645.66666666666, ans=0.125 +2024-08-03 23:00:23,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=257682.33333333334, ans=0.125 +2024-08-03 23:00:23,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=257682.33333333334, ans=0.0 +2024-08-03 23:00:33,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=257719.0, ans=0.07 +2024-08-03 23:00:54,180 INFO [train.py:1114] (3/4) Epoch 20, batch 1150, loss[loss=0.18, simple_loss=0.2694, pruned_loss=0.04531, over 13554.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2678, pruned_loss=0.0472, over 2618109.17 frames. ], batch size: 36, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 23:00:54,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=257829.0, ans=0.125 +2024-08-03 23:00:55,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=257829.0, ans=0.125 +2024-08-03 23:00:57,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=257829.0, ans=0.0 +2024-08-03 23:00:58,378 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.03 vs. limit=22.5 +2024-08-03 23:01:26,023 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.874e+01 1.110e+02 1.277e+02 1.674e+02 2.760e+02, threshold=2.554e+02, percent-clipped=1.0 +2024-08-03 23:01:26,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=257939.0, ans=0.2 +2024-08-03 23:01:31,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=257975.66666666666, ans=0.125 +2024-08-03 23:01:39,717 INFO [train.py:1114] (3/4) Epoch 20, batch 1200, loss[loss=0.1859, simple_loss=0.2729, pruned_loss=0.04943, over 13578.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2688, pruned_loss=0.04744, over 2614938.59 frames. ], batch size: 39, lr: 6.24e-03, grad_scale: 16.0 +2024-08-03 23:01:43,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=258012.33333333334, ans=0.125 +2024-08-03 23:01:45,364 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:02:15,111 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:02:23,468 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.30 vs. limit=15.0 +2024-08-03 23:02:28,510 INFO [train.py:1114] (3/4) Epoch 20, batch 1250, loss[loss=0.1988, simple_loss=0.2828, pruned_loss=0.05741, over 13439.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2693, pruned_loss=0.04724, over 2627097.90 frames. ], batch size: 42, lr: 6.23e-03, grad_scale: 16.0 +2024-08-03 23:02:28,923 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.37 vs. limit=10.0 +2024-08-03 23:02:48,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=258269.0, ans=0.125 +2024-08-03 23:03:01,938 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.994e+01 1.078e+02 1.315e+02 1.640e+02 2.788e+02, threshold=2.629e+02, percent-clipped=1.0 +2024-08-03 23:03:15,618 INFO [train.py:1114] (3/4) Epoch 20, batch 1300, loss[loss=0.1927, simple_loss=0.2806, pruned_loss=0.05243, over 12844.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.268, pruned_loss=0.0467, over 2631122.20 frames. ], batch size: 52, lr: 6.23e-03, grad_scale: 16.0 +2024-08-03 23:03:16,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=258379.0, ans=0.2 +2024-08-03 23:03:17,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=258379.0, ans=0.1 +2024-08-03 23:03:29,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=258415.66666666666, ans=0.2 +2024-08-03 23:03:29,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=258415.66666666666, ans=0.125 +2024-08-03 23:03:33,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=258452.33333333334, ans=0.0 +2024-08-03 23:03:41,584 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.85 vs. limit=15.0 +2024-08-03 23:03:42,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=258452.33333333334, ans=0.125 +2024-08-03 23:03:43,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=258452.33333333334, ans=0.125 +2024-08-03 23:04:02,910 INFO [train.py:1114] (3/4) Epoch 20, batch 1350, loss[loss=0.1845, simple_loss=0.2772, pruned_loss=0.04583, over 13548.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.268, pruned_loss=0.04666, over 2637939.28 frames. ], batch size: 37, lr: 6.23e-03, grad_scale: 16.0 +2024-08-03 23:04:19,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=258599.0, ans=0.125 +2024-08-03 23:04:34,293 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.766e+01 1.164e+02 1.354e+02 1.727e+02 2.558e+02, threshold=2.707e+02, percent-clipped=0.0 +2024-08-03 23:04:46,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=258709.0, ans=0.125 +2024-08-03 23:04:47,952 INFO [train.py:1114] (3/4) Epoch 20, batch 1400, loss[loss=0.1602, simple_loss=0.2367, pruned_loss=0.0419, over 13261.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2672, pruned_loss=0.04623, over 2641369.04 frames. ], batch size: 31, lr: 6.23e-03, grad_scale: 16.0 +2024-08-03 23:04:54,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=258745.66666666666, ans=0.2 +2024-08-03 23:04:56,753 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.82 vs. limit=15.0 +2024-08-03 23:05:04,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=258782.33333333334, ans=0.0 +2024-08-03 23:05:18,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.96 vs. limit=22.5 +2024-08-03 23:05:31,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=258892.33333333334, ans=0.2 +2024-08-03 23:05:31,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=258892.33333333334, ans=0.07 +2024-08-03 23:05:32,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=258892.33333333334, ans=0.0 +2024-08-03 23:05:33,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=258892.33333333334, ans=0.125 +2024-08-03 23:05:35,406 INFO [train.py:1114] (3/4) Epoch 20, batch 1450, loss[loss=0.1888, simple_loss=0.2782, pruned_loss=0.04966, over 13453.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2681, pruned_loss=0.04662, over 2640854.37 frames. ], batch size: 43, lr: 6.22e-03, grad_scale: 16.0 +2024-08-03 23:05:35,701 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:05:36,874 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.31 vs. limit=15.0 +2024-08-03 23:05:50,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=258965.66666666666, ans=0.125 +2024-08-03 23:06:00,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=259002.33333333334, ans=0.125 +2024-08-03 23:06:04,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259039.0, ans=0.1 +2024-08-03 23:06:08,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=259039.0, ans=0.125 +2024-08-03 23:06:08,781 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.191e+01 1.099e+02 1.241e+02 1.472e+02 2.601e+02, threshold=2.481e+02, percent-clipped=0.0 +2024-08-03 23:06:10,439 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.30 vs. limit=15.0 +2024-08-03 23:06:10,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=259039.0, ans=0.2 +2024-08-03 23:06:19,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=259075.66666666666, ans=0.2 +2024-08-03 23:06:22,418 INFO [train.py:1114] (3/4) Epoch 20, batch 1500, loss[loss=0.1713, simple_loss=0.2649, pruned_loss=0.03882, over 13403.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2682, pruned_loss=0.04659, over 2640981.57 frames. ], batch size: 39, lr: 6.22e-03, grad_scale: 16.0 +2024-08-03 23:06:25,993 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.21 vs. limit=22.5 +2024-08-03 23:06:32,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=259149.0, ans=0.125 +2024-08-03 23:06:57,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=259222.33333333334, ans=0.125 +2024-08-03 23:07:00,866 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.06 vs. limit=12.0 +2024-08-03 23:07:01,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=259259.0, ans=0.025 +2024-08-03 23:07:12,335 INFO [train.py:1114] (3/4) Epoch 20, batch 1550, loss[loss=0.166, simple_loss=0.2682, pruned_loss=0.03185, over 13408.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2678, pruned_loss=0.04678, over 2630586.96 frames. ], batch size: 41, lr: 6.22e-03, grad_scale: 16.0 +2024-08-03 23:07:38,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=259369.0, ans=15.0 +2024-08-03 23:07:42,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=259405.66666666666, ans=0.125 +2024-08-03 23:07:43,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=259405.66666666666, ans=0.05 +2024-08-03 23:07:45,109 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=4.85 vs. limit=10.0 +2024-08-03 23:07:45,381 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.913e+01 1.100e+02 1.241e+02 1.619e+02 2.779e+02, threshold=2.482e+02, percent-clipped=4.0 +2024-08-03 23:07:58,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=259479.0, ans=0.125 +2024-08-03 23:07:59,033 INFO [train.py:1114] (3/4) Epoch 20, batch 1600, loss[loss=0.1933, simple_loss=0.282, pruned_loss=0.05235, over 13573.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.268, pruned_loss=0.04713, over 2623320.58 frames. ], batch size: 39, lr: 6.22e-03, grad_scale: 32.0 +2024-08-03 23:08:10,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=259515.66666666666, ans=0.125 +2024-08-03 23:08:13,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=259515.66666666666, ans=0.125 +2024-08-03 23:08:38,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=259625.66666666666, ans=0.125 +2024-08-03 23:08:44,989 INFO [train.py:1114] (3/4) Epoch 20, batch 1650, loss[loss=0.195, simple_loss=0.2854, pruned_loss=0.05227, over 13332.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2678, pruned_loss=0.04707, over 2621374.40 frames. ], batch size: 40, lr: 6.22e-03, grad_scale: 32.0 +2024-08-03 23:08:50,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=259662.33333333334, ans=0.125 +2024-08-03 23:08:58,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=259699.0, ans=0.125 +2024-08-03 23:09:01,495 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.42 vs. limit=15.0 +2024-08-03 23:09:07,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=259735.66666666666, ans=0.0 +2024-08-03 23:09:12,177 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.79 vs. limit=12.0 +2024-08-03 23:09:20,927 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.088e+02 1.223e+02 1.648e+02 3.559e+02, threshold=2.446e+02, percent-clipped=8.0 +2024-08-03 23:09:24,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=259772.33333333334, ans=0.125 +2024-08-03 23:09:34,467 INFO [train.py:1114] (3/4) Epoch 20, batch 1700, loss[loss=0.144, simple_loss=0.2183, pruned_loss=0.03479, over 13243.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2679, pruned_loss=0.04681, over 2630223.36 frames. ], batch size: 31, lr: 6.21e-03, grad_scale: 32.0 +2024-08-03 23:09:45,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=259882.33333333334, ans=0.125 +2024-08-03 23:09:48,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=259882.33333333334, ans=0.0 +2024-08-03 23:09:50,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=259882.33333333334, ans=0.0 +2024-08-03 23:09:51,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=259882.33333333334, ans=0.125 +2024-08-03 23:10:06,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=259955.66666666666, ans=0.0 +2024-08-03 23:10:09,448 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.42 vs. limit=10.0 +2024-08-03 23:10:21,422 INFO [train.py:1114] (3/4) Epoch 20, batch 1750, loss[loss=0.1571, simple_loss=0.237, pruned_loss=0.03861, over 13566.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2676, pruned_loss=0.04665, over 2634285.16 frames. ], batch size: 31, lr: 6.21e-03, grad_scale: 32.0 +2024-08-03 23:10:29,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=260065.66666666666, ans=0.125 +2024-08-03 23:10:48,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=260102.33333333334, ans=0.125 +2024-08-03 23:10:52,886 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.39 vs. limit=12.0 +2024-08-03 23:10:54,928 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.991e+01 1.099e+02 1.220e+02 1.451e+02 2.480e+02, threshold=2.439e+02, percent-clipped=1.0 +2024-08-03 23:11:07,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=260175.66666666666, ans=0.125 +2024-08-03 23:11:08,884 INFO [train.py:1114] (3/4) Epoch 20, batch 1800, loss[loss=0.1873, simple_loss=0.2762, pruned_loss=0.04923, over 13549.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2679, pruned_loss=0.04677, over 2635818.95 frames. ], batch size: 38, lr: 6.21e-03, grad_scale: 32.0 +2024-08-03 23:11:24,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=260249.0, ans=0.0 +2024-08-03 23:11:27,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=260285.66666666666, ans=0.0 +2024-08-03 23:11:34,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=260285.66666666666, ans=0.09899494936611666 +2024-08-03 23:11:39,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=260322.33333333334, ans=0.5 +2024-08-03 23:11:44,727 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.99 vs. limit=22.5 +2024-08-03 23:11:54,206 INFO [train.py:1114] (3/4) Epoch 20, batch 1850, loss[loss=0.1962, simple_loss=0.2855, pruned_loss=0.0535, over 13401.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2675, pruned_loss=0.04664, over 2637037.80 frames. ], batch size: 39, lr: 6.21e-03, grad_scale: 32.0 +2024-08-03 23:11:58,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=260395.66666666666, ans=0.04949747468305833 +2024-08-03 23:12:00,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=260395.66666666666, ans=0.125 +2024-08-03 23:12:00,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=260395.66666666666, ans=0.125 +2024-08-03 23:12:00,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=260395.66666666666, ans=0.2 +2024-08-03 23:12:09,462 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.61 vs. limit=15.0 +2024-08-03 23:12:18,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=260469.0, ans=0.025 +2024-08-03 23:12:22,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=260505.66666666666, ans=0.0 +2024-08-03 23:12:26,247 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.233e+01 1.108e+02 1.354e+02 1.892e+02 2.843e+02, threshold=2.709e+02, percent-clipped=7.0 +2024-08-03 23:12:26,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=260505.66666666666, ans=0.125 +2024-08-03 23:12:46,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=260542.33333333334, ans=0.025 +2024-08-03 23:12:55,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260542.33333333334, ans=0.1 +2024-08-03 23:12:55,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260542.33333333334, ans=0.1 +2024-08-03 23:12:58,316 INFO [train.py:1114] (3/4) Epoch 20, batch 1900, loss[loss=0.1737, simple_loss=0.266, pruned_loss=0.04073, over 13325.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2679, pruned_loss=0.04672, over 2639900.68 frames. ], batch size: 40, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:13:05,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=260579.0, ans=0.0 +2024-08-03 23:13:10,484 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.75 vs. limit=10.0 +2024-08-03 23:13:11,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=260615.66666666666, ans=0.125 +2024-08-03 23:13:16,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=260615.66666666666, ans=0.125 +2024-08-03 23:13:18,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=260652.33333333334, ans=0.125 +2024-08-03 23:13:20,033 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.71 vs. limit=15.0 +2024-08-03 23:13:32,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=260689.0, ans=0.2 +2024-08-03 23:13:32,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=260689.0, ans=0.0 +2024-08-03 23:13:45,869 INFO [train.py:1114] (3/4) Epoch 20, batch 1950, loss[loss=0.1786, simple_loss=0.2644, pruned_loss=0.04644, over 13556.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2691, pruned_loss=0.04703, over 2646643.32 frames. ], batch size: 36, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:13:50,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=260762.33333333334, ans=0.125 +2024-08-03 23:13:52,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=260762.33333333334, ans=0.125 +2024-08-03 23:13:56,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=260799.0, ans=0.2 +2024-08-03 23:14:00,811 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.95 vs. limit=15.0 +2024-08-03 23:14:02,387 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.43 vs. limit=15.0 +2024-08-03 23:14:17,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=260872.33333333334, ans=0.0 +2024-08-03 23:14:19,449 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.342e+01 1.097e+02 1.231e+02 1.483e+02 2.195e+02, threshold=2.462e+02, percent-clipped=0.0 +2024-08-03 23:14:23,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=260909.0, ans=0.125 +2024-08-03 23:14:24,554 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.24 vs. limit=22.5 +2024-08-03 23:14:35,083 INFO [train.py:1114] (3/4) Epoch 20, batch 2000, loss[loss=0.1578, simple_loss=0.2366, pruned_loss=0.03949, over 13531.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2697, pruned_loss=0.04733, over 2636890.39 frames. ], batch size: 31, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:14:39,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.whiten.whitening_limit, batch_count=260945.66666666666, ans=12.0 +2024-08-03 23:14:47,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=260982.33333333334, ans=0.125 +2024-08-03 23:14:48,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=260982.33333333334, ans=0.125 +2024-08-03 23:14:58,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=261019.0, ans=0.1 +2024-08-03 23:15:14,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=261092.33333333334, ans=0.125 +2024-08-03 23:15:18,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=261092.33333333334, ans=0.125 +2024-08-03 23:15:20,660 INFO [train.py:1114] (3/4) Epoch 20, batch 2050, loss[loss=0.1694, simple_loss=0.2445, pruned_loss=0.04715, over 13390.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.268, pruned_loss=0.04706, over 2633756.82 frames. ], batch size: 32, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:15:22,165 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.22 vs. limit=15.0 +2024-08-03 23:15:24,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=261129.0, ans=0.125 +2024-08-03 23:15:52,100 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.005e+01 1.074e+02 1.228e+02 1.423e+02 2.984e+02, threshold=2.455e+02, percent-clipped=1.0 +2024-08-03 23:16:05,626 INFO [train.py:1114] (3/4) Epoch 20, batch 2100, loss[loss=0.1688, simple_loss=0.2622, pruned_loss=0.0377, over 13549.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2674, pruned_loss=0.04622, over 2639488.75 frames. ], batch size: 37, lr: 6.20e-03, grad_scale: 32.0 +2024-08-03 23:16:08,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=261312.33333333334, ans=0.07 +2024-08-03 23:16:11,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=261312.33333333334, ans=0.0 +2024-08-03 23:16:11,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=261312.33333333334, ans=0.05 +2024-08-03 23:16:12,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=261312.33333333334, ans=0.125 +2024-08-03 23:16:14,197 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.40 vs. limit=15.0 +2024-08-03 23:16:20,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=261349.0, ans=0.125 +2024-08-03 23:16:21,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=261349.0, ans=0.025 +2024-08-03 23:16:24,889 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=5.144e-03 +2024-08-03 23:16:54,282 INFO [train.py:1114] (3/4) Epoch 20, batch 2150, loss[loss=0.1694, simple_loss=0.2587, pruned_loss=0.04008, over 13570.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2672, pruned_loss=0.04612, over 2647828.96 frames. ], batch size: 36, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:16:57,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=261495.66666666666, ans=0.0 +2024-08-03 23:17:02,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=261532.33333333334, ans=0.125 +2024-08-03 23:17:07,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261532.33333333334, ans=0.1 +2024-08-03 23:17:13,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=261569.0, ans=0.125 +2024-08-03 23:17:25,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=261605.66666666666, ans=0.07 +2024-08-03 23:17:26,354 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.602e+01 1.138e+02 1.478e+02 2.029e+02 3.755e+02, threshold=2.955e+02, percent-clipped=14.0 +2024-08-03 23:17:36,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=261642.33333333334, ans=0.0 +2024-08-03 23:17:40,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=261642.33333333334, ans=0.125 +2024-08-03 23:17:41,769 INFO [train.py:1114] (3/4) Epoch 20, batch 2200, loss[loss=0.1911, simple_loss=0.286, pruned_loss=0.04809, over 13397.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2673, pruned_loss=0.04598, over 2645774.18 frames. ], batch size: 39, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:17:42,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=261679.0, ans=0.1 +2024-08-03 23:17:58,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=261715.66666666666, ans=0.125 +2024-08-03 23:17:58,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=261715.66666666666, ans=0.125 +2024-08-03 23:18:00,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=261752.33333333334, ans=0.09899494936611666 +2024-08-03 23:18:00,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=261752.33333333334, ans=0.125 +2024-08-03 23:18:02,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=261752.33333333334, ans=0.0 +2024-08-03 23:18:04,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=261752.33333333334, ans=0.0 +2024-08-03 23:18:14,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=261789.0, ans=0.125 +2024-08-03 23:18:18,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=261789.0, ans=0.025 +2024-08-03 23:18:21,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=261825.66666666666, ans=0.125 +2024-08-03 23:18:21,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=261825.66666666666, ans=10.0 +2024-08-03 23:18:31,224 INFO [train.py:1114] (3/4) Epoch 20, batch 2250, loss[loss=0.1797, simple_loss=0.2679, pruned_loss=0.04577, over 13366.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2672, pruned_loss=0.04577, over 2643081.20 frames. ], batch size: 37, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:18:51,618 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.72 vs. limit=15.0 +2024-08-03 23:19:00,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=261972.33333333334, ans=0.125 +2024-08-03 23:19:02,744 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.914e+01 1.098e+02 1.238e+02 1.485e+02 2.172e+02, threshold=2.476e+02, percent-clipped=0.0 +2024-08-03 23:19:07,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=262009.0, ans=0.125 +2024-08-03 23:19:12,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=262009.0, ans=0.0 +2024-08-03 23:19:14,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=262009.0, ans=0.0 +2024-08-03 23:19:16,457 INFO [train.py:1114] (3/4) Epoch 20, batch 2300, loss[loss=0.1756, simple_loss=0.2569, pruned_loss=0.04717, over 13600.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2668, pruned_loss=0.04598, over 2638952.35 frames. ], batch size: 33, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:19:16,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=262045.66666666666, ans=0.1 +2024-08-03 23:19:17,992 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.44 vs. limit=22.5 +2024-08-03 23:19:18,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=262045.66666666666, ans=0.0 +2024-08-03 23:19:29,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=262082.33333333334, ans=0.2 +2024-08-03 23:19:44,346 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.94 vs. limit=12.0 +2024-08-03 23:19:47,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=262155.6666666667, ans=0.2 +2024-08-03 23:19:47,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=262155.6666666667, ans=0.125 +2024-08-03 23:19:54,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=262192.3333333333, ans=0.125 +2024-08-03 23:20:01,871 INFO [train.py:1114] (3/4) Epoch 20, batch 2350, loss[loss=0.19, simple_loss=0.2806, pruned_loss=0.04972, over 13570.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2666, pruned_loss=0.04619, over 2641117.11 frames. ], batch size: 38, lr: 6.19e-03, grad_scale: 32.0 +2024-08-03 23:20:02,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262229.0, ans=0.1 +2024-08-03 23:20:05,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=262229.0, ans=0.0 +2024-08-03 23:20:11,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=262229.0, ans=0.2 +2024-08-03 23:20:24,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=262302.3333333333, ans=0.125 +2024-08-03 23:20:33,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=262339.0, ans=0.0 +2024-08-03 23:20:38,438 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.532e+01 1.125e+02 1.356e+02 1.575e+02 2.756e+02, threshold=2.712e+02, percent-clipped=1.0 +2024-08-03 23:20:50,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=262375.6666666667, ans=0.125 +2024-08-03 23:20:52,267 INFO [train.py:1114] (3/4) Epoch 20, batch 2400, loss[loss=0.1619, simple_loss=0.2485, pruned_loss=0.03766, over 13525.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2677, pruned_loss=0.04672, over 2642423.79 frames. ], batch size: 35, lr: 6.18e-03, grad_scale: 32.0 +2024-08-03 23:21:27,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=262522.3333333333, ans=0.09899494936611666 +2024-08-03 23:21:31,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=262559.0, ans=0.125 +2024-08-03 23:21:33,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=262559.0, ans=0.125 +2024-08-03 23:21:34,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=262559.0, ans=0.125 +2024-08-03 23:21:41,783 INFO [train.py:1114] (3/4) Epoch 20, batch 2450, loss[loss=0.1886, simple_loss=0.2837, pruned_loss=0.04675, over 13361.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2681, pruned_loss=0.04666, over 2632644.51 frames. ], batch size: 37, lr: 6.18e-03, grad_scale: 32.0 +2024-08-03 23:21:41,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262595.6666666667, ans=0.1 +2024-08-03 23:21:57,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=262632.3333333333, ans=0.2 +2024-08-03 23:22:14,233 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.032e+01 1.119e+02 1.273e+02 1.496e+02 2.494e+02, threshold=2.546e+02, percent-clipped=0.0 +2024-08-03 23:22:22,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=262742.3333333333, ans=0.07 +2024-08-03 23:22:27,158 INFO [train.py:1114] (3/4) Epoch 20, batch 2500, loss[loss=0.2092, simple_loss=0.29, pruned_loss=0.06425, over 13397.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2677, pruned_loss=0.04648, over 2636801.88 frames. ], batch size: 39, lr: 6.18e-03, grad_scale: 16.0 +2024-08-03 23:22:40,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=262815.6666666667, ans=0.0 +2024-08-03 23:22:50,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=262852.3333333333, ans=0.125 +2024-08-03 23:22:51,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=262852.3333333333, ans=0.125 +2024-08-03 23:23:05,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=262925.6666666667, ans=0.125 +2024-08-03 23:23:09,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=262925.6666666667, ans=0.0 +2024-08-03 23:23:11,101 INFO [train.py:1114] (3/4) Epoch 20, batch 2550, loss[loss=0.1721, simple_loss=0.2514, pruned_loss=0.04637, over 13537.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2675, pruned_loss=0.04637, over 2638613.79 frames. ], batch size: 31, lr: 6.18e-03, grad_scale: 16.0 +2024-08-03 23:23:12,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=262962.3333333333, ans=0.0 +2024-08-03 23:23:13,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=262962.3333333333, ans=0.1 +2024-08-03 23:23:21,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=262999.0, ans=0.0 +2024-08-03 23:23:25,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=262999.0, ans=0.125 +2024-08-03 23:23:32,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=263035.6666666667, ans=0.0 +2024-08-03 23:23:34,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=263035.6666666667, ans=0.125 +2024-08-03 23:23:42,183 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.295e+01 1.081e+02 1.432e+02 1.962e+02 3.343e+02, threshold=2.864e+02, percent-clipped=8.0 +2024-08-03 23:23:48,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=263109.0, ans=0.125 +2024-08-03 23:23:50,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=263109.0, ans=0.0 +2024-08-03 23:23:54,450 INFO [train.py:1114] (3/4) Epoch 20, batch 2600, loss[loss=0.1646, simple_loss=0.2554, pruned_loss=0.03692, over 13548.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2673, pruned_loss=0.04613, over 2638229.17 frames. ], batch size: 36, lr: 6.17e-03, grad_scale: 16.0 +2024-08-03 23:23:55,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=263145.6666666667, ans=0.125 +2024-08-03 23:23:57,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=263145.6666666667, ans=0.0 +2024-08-03 23:24:04,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263182.3333333333, ans=0.1 +2024-08-03 23:24:12,178 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.05 vs. limit=15.0 +2024-08-03 23:24:14,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=263219.0, ans=0.0 +2024-08-03 23:24:24,663 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.83 vs. limit=15.0 +2024-08-03 23:24:38,458 INFO [train.py:1114] (3/4) Epoch 20, batch 2650, loss[loss=0.1905, simple_loss=0.2791, pruned_loss=0.05091, over 13321.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2678, pruned_loss=0.04634, over 2641466.57 frames. ], batch size: 46, lr: 6.17e-03, grad_scale: 16.0 +2024-08-03 23:24:41,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=263329.0, ans=0.0 +2024-08-03 23:24:50,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=263365.6666666667, ans=0.025 +2024-08-03 23:24:51,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=263365.6666666667, ans=0.125 +2024-08-03 23:24:57,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263402.3333333333, ans=0.1 +2024-08-03 23:24:57,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=263402.3333333333, ans=10.0 +2024-08-03 23:24:59,801 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.96 vs. limit=6.0 +2024-08-03 23:25:09,825 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.851e+01 1.115e+02 1.313e+02 1.651e+02 2.845e+02, threshold=2.627e+02, percent-clipped=0.0 +2024-08-03 23:25:21,922 INFO [train.py:1114] (3/4) Epoch 20, batch 2700, loss[loss=0.194, simple_loss=0.2778, pruned_loss=0.05516, over 13541.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2681, pruned_loss=0.04651, over 2638775.34 frames. ], batch size: 40, lr: 6.17e-03, grad_scale: 16.0 +2024-08-03 23:25:25,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=263512.3333333333, ans=0.0 +2024-08-03 23:25:27,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=263512.3333333333, ans=0.125 +2024-08-03 23:25:36,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=263549.0, ans=0.125 +2024-08-03 23:25:48,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=263622.3333333333, ans=0.05 +2024-08-03 23:25:48,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=263622.3333333333, ans=0.2 +2024-08-03 23:26:08,056 INFO [train.py:1114] (3/4) Epoch 20, batch 2750, loss[loss=0.1822, simple_loss=0.2669, pruned_loss=0.04871, over 13331.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2669, pruned_loss=0.04631, over 2636998.04 frames. ], batch size: 34, lr: 6.17e-03, grad_scale: 16.0 +2024-08-03 23:26:20,676 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.87 vs. limit=10.0 +2024-08-03 23:26:31,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=263769.0, ans=0.5 +2024-08-03 23:26:35,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=263805.6666666667, ans=0.0 +2024-08-03 23:26:39,456 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.617e+01 1.130e+02 1.320e+02 1.634e+02 2.919e+02, threshold=2.640e+02, percent-clipped=4.0 +2024-08-03 23:26:42,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=263842.3333333333, ans=0.125 +2024-08-03 23:26:44,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263842.3333333333, ans=0.1 +2024-08-03 23:26:50,106 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:26:50,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=263842.3333333333, ans=0.0 +2024-08-03 23:26:51,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=263879.0, ans=0.125 +2024-08-03 23:26:51,628 INFO [train.py:1114] (3/4) Epoch 20, batch 2800, loss[loss=0.2554, simple_loss=0.3154, pruned_loss=0.09769, over 9191.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2672, pruned_loss=0.04675, over 2627828.43 frames. ], batch size: 96, lr: 6.17e-03, grad_scale: 32.0 +2024-08-03 23:27:05,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=263915.6666666667, ans=0.025 +2024-08-03 23:27:07,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=263915.6666666667, ans=0.2 +2024-08-03 23:27:08,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263952.3333333333, ans=0.1 +2024-08-03 23:27:18,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=263989.0, ans=0.125 +2024-08-03 23:27:25,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263989.0, ans=0.1 +2024-08-03 23:27:33,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=264025.6666666667, ans=0.125 +2024-08-03 23:27:37,563 INFO [train.py:1114] (3/4) Epoch 20, batch 2850, loss[loss=0.1671, simple_loss=0.2573, pruned_loss=0.03843, over 13357.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2673, pruned_loss=0.04656, over 2621346.16 frames. ], batch size: 35, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:27:39,137 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.06 vs. limit=6.0 +2024-08-03 23:27:40,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=264062.3333333333, ans=0.09899494936611666 +2024-08-03 23:27:59,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.34 vs. limit=15.0 +2024-08-03 23:28:00,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=264135.6666666667, ans=0.125 +2024-08-03 23:28:05,758 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.24 vs. limit=15.0 +2024-08-03 23:28:08,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=264172.3333333333, ans=0.0 +2024-08-03 23:28:08,784 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.256e+01 1.084e+02 1.260e+02 1.608e+02 3.133e+02, threshold=2.519e+02, percent-clipped=4.0 +2024-08-03 23:28:17,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=264209.0, ans=0.2 +2024-08-03 23:28:22,047 INFO [train.py:1114] (3/4) Epoch 20, batch 2900, loss[loss=0.1668, simple_loss=0.2481, pruned_loss=0.04278, over 13358.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2686, pruned_loss=0.04684, over 2632108.89 frames. ], batch size: 36, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:28:23,095 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:28:24,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=264245.6666666667, ans=0.2 +2024-08-03 23:28:26,335 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.16 vs. limit=6.0 +2024-08-03 23:28:29,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=264245.6666666667, ans=0.025 +2024-08-03 23:28:35,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=264282.3333333333, ans=0.2 +2024-08-03 23:28:42,514 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.30 vs. limit=15.0 +2024-08-03 23:28:43,061 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:28:50,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=264355.6666666667, ans=0.025 +2024-08-03 23:29:05,317 INFO [train.py:1114] (3/4) Epoch 20, batch 2950, loss[loss=0.1656, simple_loss=0.2556, pruned_loss=0.03778, over 13337.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2679, pruned_loss=0.04682, over 2629603.09 frames. ], batch size: 34, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:29:31,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=264502.3333333333, ans=0.0 +2024-08-03 23:29:33,134 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.35 vs. limit=15.0 +2024-08-03 23:29:37,840 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 7.731e+01 1.151e+02 1.380e+02 1.780e+02 2.510e+02, threshold=2.761e+02, percent-clipped=0.0 +2024-08-03 23:29:43,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=264575.6666666667, ans=0.2 +2024-08-03 23:29:45,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=264575.6666666667, ans=0.1 +2024-08-03 23:29:48,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=264575.6666666667, ans=0.035 +2024-08-03 23:29:49,937 INFO [train.py:1114] (3/4) Epoch 20, batch 3000, loss[loss=0.1713, simple_loss=0.2639, pruned_loss=0.03933, over 13550.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2674, pruned_loss=0.04692, over 2630513.15 frames. ], batch size: 37, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:29:49,938 INFO [train.py:1137] (3/4) Computing validation loss +2024-08-03 23:29:57,472 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.3189, 2.9593, 2.8462, 2.7207, 2.8539, 3.1117, 2.0901, 2.5407], + device='cuda:3') +2024-08-03 23:29:59,831 INFO [train.py:1146] (3/4) Epoch 20, validation: loss=0.1683, simple_loss=0.267, pruned_loss=0.03482, over 944034.00 frames. +2024-08-03 23:29:59,832 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 9968MB +2024-08-03 23:30:08,097 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.17 vs. limit=6.0 +2024-08-03 23:30:14,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=264649.0, ans=0.5 +2024-08-03 23:30:15,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=264649.0, ans=0.0 +2024-08-03 23:30:22,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=264685.6666666667, ans=0.125 +2024-08-03 23:30:29,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=264722.3333333333, ans=0.0 +2024-08-03 23:30:39,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=264759.0, ans=0.125 +2024-08-03 23:30:43,189 INFO [train.py:1114] (3/4) Epoch 20, batch 3050, loss[loss=0.1892, simple_loss=0.2643, pruned_loss=0.05704, over 13519.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2683, pruned_loss=0.04724, over 2627668.03 frames. ], batch size: 35, lr: 6.16e-03, grad_scale: 32.0 +2024-08-03 23:30:46,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=264795.6666666667, ans=0.0 +2024-08-03 23:30:47,359 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.14 vs. limit=15.0 +2024-08-03 23:30:59,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=264832.3333333333, ans=0.125 +2024-08-03 23:31:01,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264832.3333333333, ans=0.1 +2024-08-03 23:31:05,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.22 vs. limit=15.0 +2024-08-03 23:31:14,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=264869.0, ans=0.0 +2024-08-03 23:31:15,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=264869.0, ans=0.125 +2024-08-03 23:31:15,480 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.19 vs. limit=22.5 +2024-08-03 23:31:22,604 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.840e+01 1.051e+02 1.185e+02 1.395e+02 2.152e+02, threshold=2.371e+02, percent-clipped=0.0 +2024-08-03 23:31:32,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=264942.3333333333, ans=0.035 +2024-08-03 23:31:34,622 INFO [train.py:1114] (3/4) Epoch 20, batch 3100, loss[loss=0.1858, simple_loss=0.2753, pruned_loss=0.04812, over 13274.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2677, pruned_loss=0.04693, over 2627598.11 frames. ], batch size: 46, lr: 6.15e-03, grad_scale: 32.0 +2024-08-03 23:31:39,223 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.47 vs. limit=22.5 +2024-08-03 23:31:47,087 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.74 vs. limit=22.5 +2024-08-03 23:32:05,064 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.11 vs. limit=15.0 +2024-08-03 23:32:15,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=265125.6666666667, ans=0.07 +2024-08-03 23:32:15,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=265125.6666666667, ans=0.125 +2024-08-03 23:32:17,268 INFO [train.py:1114] (3/4) Epoch 20, batch 3150, loss[loss=0.2036, simple_loss=0.2877, pruned_loss=0.0598, over 13344.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2676, pruned_loss=0.04683, over 2628452.14 frames. ], batch size: 49, lr: 6.15e-03, grad_scale: 32.0 +2024-08-03 23:32:18,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=265162.3333333333, ans=0.0 +2024-08-03 23:32:38,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=265235.6666666667, ans=0.0 +2024-08-03 23:32:48,873 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.568e+01 1.088e+02 1.255e+02 1.655e+02 2.829e+02, threshold=2.511e+02, percent-clipped=5.0 +2024-08-03 23:33:01,471 INFO [train.py:1114] (3/4) Epoch 20, batch 3200, loss[loss=0.1921, simple_loss=0.2739, pruned_loss=0.05511, over 13558.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.267, pruned_loss=0.04657, over 2634202.87 frames. ], batch size: 37, lr: 6.15e-03, grad_scale: 32.0 +2024-08-03 23:33:02,040 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.95 vs. limit=15.0 +2024-08-03 23:33:23,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.07 vs. limit=10.0 +2024-08-03 23:33:24,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=265419.0, ans=0.025 +2024-08-03 23:33:29,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=265455.6666666667, ans=0.125 +2024-08-03 23:33:43,975 INFO [train.py:1114] (3/4) Epoch 20, batch 3250, loss[loss=0.1922, simple_loss=0.2801, pruned_loss=0.05214, over 13374.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2679, pruned_loss=0.04682, over 2638987.76 frames. ], batch size: 38, lr: 6.15e-03, grad_scale: 16.0 +2024-08-03 23:33:51,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=265565.6666666667, ans=0.125 +2024-08-03 23:33:57,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265565.6666666667, ans=0.1 +2024-08-03 23:34:16,935 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.340e+01 1.119e+02 1.289e+02 1.600e+02 2.225e+02, threshold=2.578e+02, percent-clipped=0.0 +2024-08-03 23:34:27,185 INFO [train.py:1114] (3/4) Epoch 20, batch 3300, loss[loss=0.1775, simple_loss=0.27, pruned_loss=0.04247, over 12912.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.267, pruned_loss=0.04674, over 2641360.83 frames. ], batch size: 52, lr: 6.15e-03, grad_scale: 16.0 +2024-08-03 23:34:28,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=265712.3333333333, ans=0.125 +2024-08-03 23:34:35,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265749.0, ans=0.1 +2024-08-03 23:34:40,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265749.0, ans=0.1 +2024-08-03 23:34:50,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=265785.6666666667, ans=0.125 +2024-08-03 23:34:50,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=265785.6666666667, ans=0.125 +2024-08-03 23:34:58,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=2.88 vs. limit=10.0 +2024-08-03 23:35:05,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=265859.0, ans=0.125 +2024-08-03 23:35:05,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=265859.0, ans=0.0 +2024-08-03 23:35:09,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=265895.6666666667, ans=0.125 +2024-08-03 23:35:10,001 INFO [train.py:1114] (3/4) Epoch 20, batch 3350, loss[loss=0.2142, simple_loss=0.2957, pruned_loss=0.06637, over 13059.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2676, pruned_loss=0.04673, over 2631181.00 frames. ], batch size: 48, lr: 6.14e-03, grad_scale: 16.0 +2024-08-03 23:35:11,353 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.15 vs. limit=15.0 +2024-08-03 23:35:13,746 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=7.48 vs. limit=15.0 +2024-08-03 23:35:22,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=265932.3333333333, ans=0.125 +2024-08-03 23:35:29,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265969.0, ans=0.1 +2024-08-03 23:35:32,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=265969.0, ans=0.125 +2024-08-03 23:35:35,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=266005.6666666667, ans=0.125 +2024-08-03 23:35:35,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=266005.6666666667, ans=0.125 +2024-08-03 23:35:38,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=266005.6666666667, ans=0.1 +2024-08-03 23:35:40,562 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=3.66 vs. limit=15.0 +2024-08-03 23:35:42,730 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.982e+01 1.153e+02 1.327e+02 1.503e+02 2.183e+02, threshold=2.655e+02, percent-clipped=0.0 +2024-08-03 23:35:53,120 INFO [train.py:1114] (3/4) Epoch 20, batch 3400, loss[loss=0.1778, simple_loss=0.2539, pruned_loss=0.05087, over 13523.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2676, pruned_loss=0.04685, over 2626607.65 frames. ], batch size: 31, lr: 6.14e-03, grad_scale: 16.0 +2024-08-03 23:35:54,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=266079.0, ans=0.2 +2024-08-03 23:35:58,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=266079.0, ans=0.04949747468305833 +2024-08-03 23:36:06,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=266115.6666666667, ans=0.0 +2024-08-03 23:36:15,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=266152.3333333333, ans=0.0 +2024-08-03 23:36:19,780 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.52 vs. limit=15.0 +2024-08-03 23:36:28,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=266225.6666666667, ans=0.05 +2024-08-03 23:36:35,524 INFO [train.py:1114] (3/4) Epoch 20, batch 3450, loss[loss=0.2018, simple_loss=0.2879, pruned_loss=0.05786, over 12956.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2678, pruned_loss=0.04678, over 2629146.23 frames. ], batch size: 52, lr: 6.14e-03, grad_scale: 16.0 +2024-08-03 23:36:36,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=266262.3333333333, ans=0.125 +2024-08-03 23:36:40,124 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=3.64 vs. limit=12.0 +2024-08-03 23:36:54,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=266335.6666666667, ans=0.2 +2024-08-03 23:37:00,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=266372.3333333333, ans=0.125 +2024-08-03 23:37:03,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=266372.3333333333, ans=0.2 +2024-08-03 23:37:08,062 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.153e+01 1.075e+02 1.228e+02 1.591e+02 2.797e+02, threshold=2.457e+02, percent-clipped=1.0 +2024-08-03 23:37:14,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=266409.0, ans=0.0 +2024-08-03 23:37:18,317 INFO [train.py:1114] (3/4) Epoch 20, batch 3500, loss[loss=0.1714, simple_loss=0.2541, pruned_loss=0.04441, over 13512.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2671, pruned_loss=0.04673, over 2630826.19 frames. ], batch size: 34, lr: 6.14e-03, grad_scale: 16.0 +2024-08-03 23:37:27,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=266482.3333333333, ans=0.0 +2024-08-03 23:37:32,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=266482.3333333333, ans=0.1 +2024-08-03 23:37:32,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=266482.3333333333, ans=0.0 +2024-08-03 23:37:45,026 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-08-03 23:37:52,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=266592.3333333333, ans=0.125 +2024-08-03 23:38:02,726 INFO [train.py:1114] (3/4) Epoch 20, batch 3550, loss[loss=0.2114, simple_loss=0.2947, pruned_loss=0.06401, over 12616.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2691, pruned_loss=0.04766, over 2628564.89 frames. ], batch size: 58, lr: 6.13e-03, grad_scale: 16.0 +2024-08-03 23:38:10,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=266629.0, ans=0.1 +2024-08-03 23:38:18,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=266665.6666666667, ans=0.1 +2024-08-03 23:38:28,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=11.01 vs. limit=12.0 +2024-08-03 23:38:29,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=266739.0, ans=0.125 +2024-08-03 23:38:31,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=266739.0, ans=0.125 +2024-08-03 23:38:33,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=266739.0, ans=0.125 +2024-08-03 23:38:35,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=266739.0, ans=0.125 +2024-08-03 23:38:36,933 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 9.117e+01 1.134e+02 1.257e+02 1.418e+02 2.840e+02, threshold=2.514e+02, percent-clipped=1.0 +2024-08-03 23:38:40,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=266775.6666666667, ans=0.125 +2024-08-03 23:38:47,706 INFO [train.py:1114] (3/4) Epoch 20, batch 3600, loss[loss=0.2168, simple_loss=0.2939, pruned_loss=0.06982, over 9401.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.273, pruned_loss=0.05067, over 2487763.85 frames. ], batch size: 96, lr: 6.13e-03, grad_scale: 32.0 +2024-08-03 23:38:51,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=266812.3333333333, ans=0.0 +2024-08-03 23:39:22,577 INFO [train.py:1387] (3/4) Done! diff --git a/zipformer/pretrained/non_ctc/causal/exp/tensorboard/events.out.tfevents.1722666206.cdr2656.int.cedar.computecanada.ca.71.0 b/zipformer/pretrained/non_ctc/causal/exp/tensorboard/events.out.tfevents.1722666206.cdr2656.int.cedar.computecanada.ca.71.0 new file mode 100644 index 0000000000000000000000000000000000000000..a82653f5d07741842b6a07e82c848c63ebd10a65 --- /dev/null +++ b/zipformer/pretrained/non_ctc/causal/exp/tensorboard/events.out.tfevents.1722666206.cdr2656.int.cedar.computecanada.ca.71.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b920407a50535fd97f4a08b14d1dc835630942a59b606ba3e08fa6e4310e63bc +size 705034 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/best-train-loss.pt b/zipformer/pretrained/non_ctc/non_causal/exp/best-train-loss.pt new file mode 100644 index 0000000000000000000000000000000000000000..caff16bc9ae513b2e58b40b9c10d65aab7646ccf --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/best-train-loss.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a77e9f0340d10962e2f6646581ee52bd601e5c118ee8bdea75d1ae8f11a455f6 +size 1049768014 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/best-valid-loss.pt b/zipformer/pretrained/non_ctc/non_causal/exp/best-valid-loss.pt new file mode 100644 index 0000000000000000000000000000000000000000..caff16bc9ae513b2e58b40b9c10d65aab7646ccf --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/best-valid-loss.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a77e9f0340d10962e2f6646581ee52bd601e5c118ee8bdea75d1ae8f11a455f6 +size 1049768014 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-100000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-100000.pt new file mode 100644 index 0000000000000000000000000000000000000000..b861283b53fe92ef261206f5211b220125cc2809 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-100000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:abe0dfc38ba7123b6c0dd4582404d0d80bf5231571489cc396fd4463a4c74008 +size 1049785219 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-104000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-104000.pt new file mode 100644 index 0000000000000000000000000000000000000000..3e21cf7915ada53172f26f95c38f90fe6f5e2173 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-104000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2c61e1f5bdfee907d81c84c2fd58da7a569a39301b5b113d3052d79436daebfb +size 1049785219 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-108000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-108000.pt new file mode 100644 index 0000000000000000000000000000000000000000..2d0b7ccdbf3751b52a91a859980d335bfd78605a --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-108000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ac027b995eaf4d881a4dd08076487176a5854c1bd031c595bdfaf7d96c9308b9 +size 1049785219 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-112000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-112000.pt new file mode 100644 index 0000000000000000000000000000000000000000..604fb810563f235ecf5135c0d303d87a4eec981f --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-112000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d415cc31a75d07b424011c6e3de0aa6ed5d841af5b83b67c6f295288a04226b8 +size 1049785219 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-116000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-116000.pt new file mode 100644 index 0000000000000000000000000000000000000000..0ceeaa26fe35b37d63dce8158853fcf1fb40a45b --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-116000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7d847c8600780e86273b02b554c34d843a8f215a6a41dabcc57f3a7f1ab93164 +size 1049785283 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-120000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-120000.pt new file mode 100644 index 0000000000000000000000000000000000000000..c3d22e94dcfa1ed01b1f1d8a96d3ff74a9e2ce81 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-120000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0c6cbad97e461fe0e3ff6f347e1bd46d7560b4346b36a44d0ceda9c595d9fac3 +size 1049785283 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-124000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-124000.pt new file mode 100644 index 0000000000000000000000000000000000000000..4617c3be9ba38117dd907654baddda3d15605539 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-124000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ccf544baebed500fb1c4f1977aa21ec6ba60f346cfa065e3d1d0fc8e7afe2cd7 +size 1049785347 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-128000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-128000.pt new file mode 100644 index 0000000000000000000000000000000000000000..3484d810d6c05c64783caf2fa641e3a7cd4c4738 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-128000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d4207a4d458afd55e4b99cacf0aafb07cda2c8118ac3b980c40e1214e491608 +size 1049785347 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-132000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-132000.pt new file mode 100644 index 0000000000000000000000000000000000000000..a2e4cac107f132d89d8872332499ba794a32fc86 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-132000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a017711a15de6823d4db593c78039c0eeff7127a9cc4ce74b120886a1bb933a8 +size 1049785347 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-136000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-136000.pt new file mode 100644 index 0000000000000000000000000000000000000000..830d84e9a7d10d0cdf676b5d1d744544394e2c10 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-136000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dc2bc3bca29419b040fd405e1a9c60b0df07450b7e2cf7acaf703b9098ef9938 +size 1049785411 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-140000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-140000.pt new file mode 100644 index 0000000000000000000000000000000000000000..fe67121bec298fdf527f08428f3630b5d33d787c --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-140000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:595674125198ae924b6ec28715bf5255473965793b8765fb752847ae5ea79a71 +size 1049785411 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-144000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-144000.pt new file mode 100644 index 0000000000000000000000000000000000000000..5d1c87a62134330f46a308bdf89bfc165c30e2f5 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-144000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d3684642c0dc69230489fa41d0f6e420747d2c6f13afe8380ef8571b0f955474 +size 1049785411 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-148000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-148000.pt new file mode 100644 index 0000000000000000000000000000000000000000..1eb9b32d22a7321e6855b623424cb80b9d652ecb --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-148000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b2122250344839bfdb78a9085e214fcae8375cb2b39eb3b1b7ca8344736dd912 +size 1049785411 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-152000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-152000.pt new file mode 100644 index 0000000000000000000000000000000000000000..1fd76fc621fe41e847338f4c2e70d01e50877e33 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-152000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:71eed468154ec25672dafabe698f13ae11ce1374385d3612b7689c410b54768d +size 1049785411 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-156000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-156000.pt new file mode 100644 index 0000000000000000000000000000000000000000..00a408d4a59d6f1e3ddb33670424155c3b546bde --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-156000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5e769fcea36b01654f7b582fc0a51699450e0afac9e11996ffb9ce5fac070033 +size 1049785475 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-160000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-160000.pt new file mode 100644 index 0000000000000000000000000000000000000000..ca0e328cdcae22bb694f76f027d7fdb01e88698e --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-160000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7738642b10e9e41beb1cad6559f9ef93221db4cda4239e23e9720a2a1e91f40b +size 1049785475 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-164000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-164000.pt new file mode 100644 index 0000000000000000000000000000000000000000..adef1e3b2eb2a893272679e3a068e283238a5db3 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-164000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:be882262ee83be82a0cc8c469b3192aed618b62c7d24c703f3cc3af8ed13dd55 +size 1049785539 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-168000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-168000.pt new file mode 100644 index 0000000000000000000000000000000000000000..a06b711729f7996489451031e0d852ca69bb7490 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-168000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:34cb3a16adb462b08b26263381978d452e6ddf48c974bebf7a87baa0803d792e +size 1049785539 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-172000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-172000.pt new file mode 100644 index 0000000000000000000000000000000000000000..4fec431e86e3d3c571c01c972631b5f7eebc1a01 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-172000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d662958d82d04e06dcc8cb00d3ff264ec78e7c525e7adc374620655cf0f7e059 +size 1049785539 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-176000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-176000.pt new file mode 100644 index 0000000000000000000000000000000000000000..18eb8f9337992abd18a02cbc62c76fd63c04426a --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-176000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:445dbc4eed311c5652773ace8426876e196686e5835ae3ea445436e8d78cd76b +size 1049785603 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-180000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-180000.pt new file mode 100644 index 0000000000000000000000000000000000000000..aa6ca7ab79f7be4f67e19dd797d18d2ad9e9a27c --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-180000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ed6020edc09cb7ec0b223914902bdbd54508e60bb1aae7326f1cce22c4c58005 +size 1049785603 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-184000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-184000.pt new file mode 100644 index 0000000000000000000000000000000000000000..9295f6db2ba69affa5f5f7e360d5cf01436e668e --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-184000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e4cb11a34d8331a5739a62accc2595bd66f03a3f0db4cc6e8a965ddf4cb1a676 +size 1049785603 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-188000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-188000.pt new file mode 100644 index 0000000000000000000000000000000000000000..d3e74175ee88c4fc04284be100da498065a09d40 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-188000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:752f0ec091e0a7942410ae21485891ca8583be5c09421d959aad9852f534955e +size 1049785667 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-192000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-192000.pt new file mode 100644 index 0000000000000000000000000000000000000000..9018623d3558bd31dac62f0104c625509c7aa99a --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-192000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5999d8090fdb0e67d2a7dd50214b4ba6f5a2ffd4587ac55ecb471c916a78832e +size 1049785667 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-196000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-196000.pt new file mode 100644 index 0000000000000000000000000000000000000000..2ace0cfffe6bbed55fc69e16d3c613b26314c032 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-196000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0876cc6474439a9f60017365465330e03dfa9aa9fbdc0a06aa19cdcfed06fea0 +size 1049785667 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-200000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-200000.pt new file mode 100644 index 0000000000000000000000000000000000000000..6df41f01bcaaefeae2e8fd7f44e6acc1bef36534 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-200000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:16cec78d2adc526a874faad7bc6a5b26e2b3cc384c982a71b91350cf5387100b +size 1049785667 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-204000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-204000.pt new file mode 100644 index 0000000000000000000000000000000000000000..a7d43c4fa408fc17b4c8af4f368290d009de7d9c --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-204000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aeedc8734eb7596ce04a71d5e520a7704932caea77af81cbb77f57b6c74373fb +size 1049785667 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-88000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-88000.pt new file mode 100644 index 0000000000000000000000000000000000000000..22e35b23103a2cd8f805d95634b1bf011c83fb7c --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-88000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ff51df44cd93eee6a1b1966e3760c60e943269584497b0019e9d90c549b27620 +size 1049783222 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-92000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-92000.pt new file mode 100644 index 0000000000000000000000000000000000000000..a1611ef2c58848ecf2ea9701e0814f141d8dfa59 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-92000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:31f496f619f1ccaa59c2283a4f8220ee25bc4c7ff239791f1e94d96707c441c4 +size 1049783222 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-96000.pt b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-96000.pt new file mode 100644 index 0000000000000000000000000000000000000000..eed0de472dd5c04021c20656a8f01884a94f0848 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/checkpoint-96000.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:73a16bf1f5ba7e4422808d375022dfe3bb65cf3e524dc9338f1fa2f9b94555cb +size 1049783286 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-1.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-1.pt new file mode 100644 index 0000000000000000000000000000000000000000..7db18bc08fa6d6747db73b23e052030d9ccff13d --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-1.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1d86ff374ba1671e561209bd317df885e54b035dde5d5870e06a1d331bfbb3fb +size 1049764481 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-10.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-10.pt new file mode 100644 index 0000000000000000000000000000000000000000..aa56fff67ee02393e57597b12b6f613a3381a993 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-10.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:10654ba49d9e24690bf3171feb3f2804154ed544d2df03665f69c92395fe6dda +size 1049767566 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-11.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-11.pt new file mode 100644 index 0000000000000000000000000000000000000000..f0a97f727f6a8273a2a8ea5e71e9c72909cfb7d2 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-11.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0ad4ad5444e032b523f20b5876b77b9ab74654a3396c752f23fb2697378648c6 +size 1049767566 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-12.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-12.pt new file mode 100644 index 0000000000000000000000000000000000000000..241d4f6ade029c539331e42f645d3389f8180f3b --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-12.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b4fc05c5094e5a1e737ea9e3fd13d1baa9ced63580d247e9c220b703386859f +size 1049767630 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-13.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-13.pt new file mode 100644 index 0000000000000000000000000000000000000000..a964c065bf6f67ed615753bb080af06e724a98c1 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-13.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8e807882364881fd59d45d35599ea1c61b9830e3158ffd95c26a74f39cb933c9 +size 1049767694 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-14.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-14.pt new file mode 100644 index 0000000000000000000000000000000000000000..a60e076c56980fae958f5befff92b8107c66221b --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-14.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ba0691c45971cd561f4e32b841e673d4a0bde6dc3edd85c1f9af84a0f4fe08e1 +size 1049767758 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-15.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-15.pt new file mode 100644 index 0000000000000000000000000000000000000000..0a321d3016b7199c9fdf6fe768c5dc0d1fbfce20 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-15.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:de18d3d553aca335672fc55ba9db12fa6ff5052ac2380786148577ef73555cf5 +size 1049767758 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-16.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-16.pt new file mode 100644 index 0000000000000000000000000000000000000000..2454aff98774c63ad21b0b14441a8c4c48c77340 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-16.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7428fd7b70e742ea02c040049b8743841c1089000d24f461d2047254df916620 +size 1049767822 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-17.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-17.pt new file mode 100644 index 0000000000000000000000000000000000000000..c0df687ae023f6d51c791d73795a5db98c3e5c96 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-17.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d7f4b6f542137b31484347df1da9e076ec1b68d1ccba746da0644bd2937895f +size 1049767886 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-18.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-18.pt new file mode 100644 index 0000000000000000000000000000000000000000..18aeca828fd73baf0c41f7ec02a79be321542fdb --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-18.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:14431544774e583da8282768ab27c6ac72b2aa08d251a898e5b0512b1a3dd041 +size 1049767950 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-19.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-19.pt new file mode 100644 index 0000000000000000000000000000000000000000..f79c9b30e97aa289cb2f3fc9619959a7f8dd9ec5 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-19.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a18d26155d030331ad7f2e369890642d836ec06cf7b0a83c2e5bfed28f767b7 +size 1049767950 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-2.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-2.pt new file mode 100644 index 0000000000000000000000000000000000000000..8612d91395e18b5a4c373585d5728b08a1da6903 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-2.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b71b3850e581c05d9976ae341abea040524ec318c7b6aea484cc080e0c40a416 +size 1049764545 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-20.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-20.pt new file mode 100644 index 0000000000000000000000000000000000000000..caff16bc9ae513b2e58b40b9c10d65aab7646ccf --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-20.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a77e9f0340d10962e2f6646581ee52bd601e5c118ee8bdea75d1ae8f11a455f6 +size 1049768014 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-3.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-3.pt new file mode 100644 index 0000000000000000000000000000000000000000..c243d4ce223fea66f1254448c5a349360bcc1bad --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-3.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:18a2592a4a21b5248b38b180b775ea4f6eca44ba8aadfcac48d6f5d78dafebef +size 1049764545 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-4.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-4.pt new file mode 100644 index 0000000000000000000000000000000000000000..8070ca87cc00803f0784ea38ed035cf40013fa41 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-4.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1d583a8ac4166d5c336c64fc82c9e8dba5fea188c215f1ce6fe227feee32b7dd +size 1049764609 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-5.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-5.pt new file mode 100644 index 0000000000000000000000000000000000000000..1d6c7d9b507847bd961fb60edacfd37137f1f860 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-5.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5f20b4ce232727812974d9078b800b322ea7c4ce319b6244e82483583e45b281 +size 1049764673 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-6.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-6.pt new file mode 100644 index 0000000000000000000000000000000000000000..ab8361925a401c8d03baa107e14786eb87fe6b15 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-6.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1a752e303b10e81717e65433df1ce396c22ce574d3698fab9325d176e71423ba +size 1049764737 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-7.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-7.pt new file mode 100644 index 0000000000000000000000000000000000000000..06e1159be3de177833ec2f09a5affb9bde19caa0 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-7.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b572e0e7304a235f37e88a799ef6fd40ffeae6271fb3b7792472861105f0f0b9 +size 1049764929 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-8.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-8.pt new file mode 100644 index 0000000000000000000000000000000000000000..3b38767cd185ed1bd5b3e034a23cf72bb5fe1841 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-8.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:280f3ae2a565f609a321bbf8cd09a073a26fe6beaeb7eb9cc4f77fd6da583d73 +size 1049764993 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/epoch-9.pt b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-9.pt new file mode 100644 index 0000000000000000000000000000000000000000..59be5b8acd606191af861d905c217c48ee404b05 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/epoch-9.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9c075a5366343bd653744e97a5be65efc3289934ee20b5a8f5abd412c1224fae +size 1049765057 diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/log/log-train-2024-07-27-09-10-05-0 b/zipformer/pretrained/non_ctc/non_causal/exp/log/log-train-2024-07-27-09-10-05-0 new file mode 100644 index 0000000000000000000000000000000000000000..9bfa2b13f13563966fc76f2e0390ee5cc1e38da1 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/log/log-train-2024-07-27-09-10-05-0 @@ -0,0 +1,28868 @@ +2024-07-27 09:10:05,022 INFO [train.py:1182] (0/4) Training started +2024-07-27 09:10:05,079 INFO [train.py:1192] (0/4) Device: cuda:0 +2024-07-27 09:10:05,172 INFO [train.py:1210] (0/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2658.int.cedar.computecanada.ca', 'IP address': '172.16.146.95'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('zipformer/libri/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 200.0, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-07-27 09:10:05,172 INFO [train.py:1212] (0/4) About to create model +2024-07-27 09:10:23,765 INFO [train.py:1216] (0/4) Number of model parameters: 65549011 +2024-07-27 09:10:25,010 INFO [train.py:1231] (0/4) Using DDP +2024-07-27 09:11:00,504 INFO [asr_datamodule.py:893] (0/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-07-27 09:11:00,829 INFO [asr_datamodule.py:696] (0/4) Disable MUSAN +2024-07-27 09:11:00,829 INFO [asr_datamodule.py:714] (0/4) Enable SpecAugment +2024-07-27 09:11:00,829 INFO [asr_datamodule.py:715] (0/4) Time warp factor: 80 +2024-07-27 09:11:00,829 INFO [asr_datamodule.py:725] (0/4) Num frame mask: 10 +2024-07-27 09:11:00,829 INFO [asr_datamodule.py:738] (0/4) About to create train dataset +2024-07-27 09:11:00,829 INFO [asr_datamodule.py:765] (0/4) Using DynamicBucketingSampler. +2024-07-27 09:11:02,461 INFO [asr_datamodule.py:782] (0/4) About to create train dataloader +2024-07-27 09:11:02,462 INFO [asr_datamodule.py:910] (0/4) About to get dev-clean cuts +2024-07-27 09:11:02,593 INFO [asr_datamodule.py:917] (0/4) About to get dev-other cuts +2024-07-27 09:11:03,488 INFO [asr_datamodule.py:813] (0/4) About to create dev dataset +2024-07-27 09:11:03,817 INFO [asr_datamodule.py:830] (0/4) About to create dev dataloader +2024-07-27 09:11:03,818 INFO [train.py:1435] (0/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-07-27 09:17:48,889 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=192, metric=42.80 vs. limit=7.5 +2024-07-27 09:17:49,702 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 3353MB +2024-07-27 09:17:50,297 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 3353MB +2024-07-27 09:17:54,280 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 3353MB +2024-07-27 09:17:55,232 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 3353MB +2024-07-27 09:18:08,384 INFO [scaling.py:1024] (0/4) Whitening: name=None, num_groups=1, num_channels=288, metric=71.80 vs. limit=5.0 +2024-07-27 09:18:08,567 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 3353MB +2024-07-27 09:18:09,388 INFO [train.py:1463] (0/4) Maximum memory allocated so far is 3353MB +2024-07-27 09:18:51,935 INFO [train.py:1114] (0/4) Epoch 1, batch 0, loss[loss=7.689, simple_loss=7.004, pruned_loss=6.841, over 4846.00 frames. ], tot_loss[loss=7.689, simple_loss=7.004, pruned_loss=6.841, over 4846.00 frames. ], batch size: 12, lr: 2.25e-02, grad_scale: 2.0 +2024-07-27 09:18:51,937 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 09:19:27,483 INFO [train.py:1146] (0/4) Epoch 1, validation: loss=7.631, simple_loss=6.945, pruned_loss=6.846, over 944034.00 frames. +2024-07-27 09:19:27,484 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 3371MB +2024-07-27 09:19:29,389 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=15.45 vs. limit=7.5 +2024-07-27 09:19:29,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=0.0, ans=0.1 +2024-07-27 09:19:29,863 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=13.60 vs. limit=7.5 +2024-07-27 09:19:31,645 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.50 vs. limit=3.0 +2024-07-27 09:19:38,578 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=31.34 vs. limit=7.5 +2024-07-27 09:19:41,228 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.05 vs. limit=3.0 +2024-07-27 09:19:44,195 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=34.04 vs. limit=7.5 +2024-07-27 09:19:44,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=13.333333333333334, ans=0.1995 +2024-07-27 09:19:47,847 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=38.08 vs. limit=7.505 +2024-07-27 09:19:52,383 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.784e+02 9.392e+02 1.009e+03 1.270e+03 1.305e+03, threshold=4.037e+03, percent-clipped=0.0 +2024-07-27 09:19:55,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=13.333333333333334, ans=5.008333333333334 +2024-07-27 09:20:02,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13.333333333333334, ans=0.29986666666666667 +2024-07-27 09:20:02,785 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=198.07 vs. limit=7.505 +2024-07-27 09:20:09,138 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.923e+01 2.100e+02 8.784e+02 1.111e+03 1.403e+03, threshold=3.513e+03, percent-clipped=0.0 +2024-07-27 09:20:09,557 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=344.80 vs. limit=7.51 +2024-07-27 09:20:18,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=26.666666666666668, ans=0.2485 +2024-07-27 09:20:19,354 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=179.73 vs. limit=4.005333333333334 +2024-07-27 09:20:26,343 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=476.75 vs. limit=5.013333333333334 +2024-07-27 09:20:37,086 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=133.13 vs. limit=7.515 +2024-07-27 09:20:41,321 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.923e+01 1.821e+02 2.209e+02 8.784e+02 1.403e+03, threshold=8.837e+02, percent-clipped=0.0 +2024-07-27 09:21:22,717 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=241.81 vs. limit=5.026666666666666 +2024-07-27 09:21:33,264 INFO [train.py:1114] (0/4) Epoch 1, batch 50, loss[loss=1.182, simple_loss=1.053, pruned_loss=1.161, over 4597.00 frames. ], tot_loss[loss=2.999, simple_loss=2.754, pruned_loss=2.384, over 206858.92 frames. ], batch size: 11, lr: 2.48e-02, grad_scale: 1.0 +2024-07-27 09:21:34,852 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=18.79 vs. limit=7.55 +2024-07-27 09:21:37,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66.66666666666667, ans=0.29933333333333334 +2024-07-27 09:21:46,805 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 09:21:50,404 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=373.09 vs. limit=5.04 +2024-07-27 09:21:51,139 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=156.26 vs. limit=7.53 +2024-07-27 09:21:52,703 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=391.02 vs. limit=7.53 +2024-07-27 09:21:58,198 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=215.15 vs. limit=7.53 +2024-07-27 09:22:00,854 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=205.59 vs. limit=5.046666666666667 +2024-07-27 09:22:01,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=93.33333333333333, ans=0.1965 +2024-07-27 09:22:02,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=93.33333333333333, ans=0.495625 +2024-07-27 09:22:18,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=106.66666666666667, ans=0.196 +2024-07-27 09:22:31,875 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=18.03 vs. limit=5.026666666666666 +2024-07-27 09:22:37,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120.0, ans=0.2988 +2024-07-27 09:22:38,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=120.0, ans=0.494375 +2024-07-27 09:22:54,172 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.76 vs. limit=3.018 +2024-07-27 09:23:02,521 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=156.38 vs. limit=7.545 +2024-07-27 09:23:03,412 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=48.66 vs. limit=4.048 +2024-07-27 09:23:04,779 INFO [train.py:1114] (0/4) Epoch 1, batch 100, loss[loss=1.205, simple_loss=1.044, pruned_loss=1.287, over 4637.00 frames. ], tot_loss[loss=2.043, simple_loss=1.848, pruned_loss=1.786, over 366321.42 frames. ], batch size: 12, lr: 2.70e-02, grad_scale: 2.0 +2024-07-27 09:23:06,809 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.579e+01 2.513e+01 6.174e+01 1.938e+02 1.403e+03, threshold=1.235e+02, percent-clipped=0.0 +2024-07-27 09:23:12,960 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=35.25 vs. limit=5.033333333333333 +2024-07-27 09:23:13,829 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=50.14 vs. limit=7.55 +2024-07-27 09:23:13,959 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=27.70 vs. limit=5.033333333333333 +2024-07-27 09:23:19,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=133.33333333333334, ans=0.48333333333333334 +2024-07-27 09:23:26,653 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=26.64 vs. limit=7.555 +2024-07-27 09:23:28,754 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=172.71 vs. limit=7.555 +2024-07-27 09:23:34,088 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=54.22 vs. limit=4.064 +2024-07-27 09:23:42,354 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=54.12 vs. limit=5.08 +2024-07-27 09:23:44,118 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=78.19 vs. limit=7.56 +2024-07-27 09:23:59,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=173.33333333333334, ans=0.491875 +2024-07-27 09:24:03,195 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=196.72 vs. limit=7.565 +2024-07-27 09:24:03,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=173.33333333333334, ans=0.491875 +2024-07-27 09:24:04,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=173.33333333333334, ans=0.47833333333333333 +2024-07-27 09:24:05,174 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=46.14 vs. limit=7.565 +2024-07-27 09:24:11,928 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=45.43 vs. limit=7.57 +2024-07-27 09:24:17,814 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=32.09 vs. limit=5.046666666666667 +2024-07-27 09:24:19,849 INFO [train.py:1114] (0/4) Epoch 1, batch 150, loss[loss=1.016, simple_loss=0.8669, pruned_loss=1.084, over 4608.00 frames. ], tot_loss[loss=1.656, simple_loss=1.477, pruned_loss=1.537, over 494825.28 frames. ], batch size: 11, lr: 2.93e-02, grad_scale: 2.0 +2024-07-27 09:24:21,180 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=102.23 vs. limit=7.575 +2024-07-27 09:24:37,311 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=24.09 vs. limit=4.085333333333334 +2024-07-27 09:24:37,360 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.28 vs. limit=3.032 +2024-07-27 09:24:46,437 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=268.90 vs. limit=7.585 +2024-07-27 09:24:47,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=226.66666666666666, ans=0.04929166666666667 +2024-07-27 09:25:04,231 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=30.99 vs. limit=7.68 +2024-07-27 09:25:04,490 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=99.27 vs. limit=7.59 +2024-07-27 09:25:08,143 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=43.72 vs. limit=7.595 +2024-07-27 09:25:12,871 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=222.55 vs. limit=7.595 +2024-07-27 09:25:23,005 INFO [train.py:1114] (0/4) Epoch 1, batch 200, loss[loss=1.069, simple_loss=0.9119, pruned_loss=1.064, over 4524.00 frames. ], tot_loss[loss=1.438, simple_loss=1.268, pruned_loss=1.372, over 594126.75 frames. ], batch size: 21, lr: 3.15e-02, grad_scale: 4.0 +2024-07-27 09:25:24,355 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.807e+01 2.398e+01 2.890e+01 3.614e+01 1.455e+02, threshold=5.780e+01, percent-clipped=1.0 +2024-07-27 09:25:26,757 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=117.01 vs. limit=7.6 +2024-07-27 09:25:29,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=266.6666666666667, ans=0.4875 +2024-07-27 09:25:49,178 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=19.44 vs. limit=7.605 +2024-07-27 09:25:50,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=280.0, ans=0.8902 +2024-07-27 09:25:52,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=280.0, ans=0.8902 +2024-07-27 09:25:53,081 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=40.37 vs. limit=7.71 +2024-07-27 09:25:56,581 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=14.04 vs. limit=5.07 +2024-07-27 09:25:58,533 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=65.84 vs. limit=7.61 +2024-07-27 09:26:06,845 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.20 vs. limit=7.72 +2024-07-27 09:26:20,357 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=38.28 vs. limit=5.153333333333333 +2024-07-27 09:26:23,455 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=467.97 vs. limit=7.615 +2024-07-27 09:26:33,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=320.0, ans=0.2968 +2024-07-27 09:26:46,166 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=67.01 vs. limit=7.625 +2024-07-27 09:26:46,525 INFO [train.py:1114] (0/4) Epoch 1, batch 250, loss[loss=1.066, simple_loss=0.8967, pruned_loss=1.059, over 4643.00 frames. ], tot_loss[loss=1.306, simple_loss=1.14, pruned_loss=1.261, over 670890.45 frames. ], batch size: 16, lr: 3.38e-02, grad_scale: 4.0 +2024-07-27 09:26:53,989 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=17.54 vs. limit=7.625 +2024-07-27 09:26:55,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=333.3333333333333, ans=0.4583333333333333 +2024-07-27 09:26:57,425 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=17.84 vs. limit=7.625 +2024-07-27 09:27:07,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=346.6666666666667, ans=0.0922 +2024-07-27 09:27:07,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=346.6666666666667, ans=0.8878666666666667 +2024-07-27 09:27:07,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=360.0, ans=0.1865 +2024-07-27 09:27:14,232 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=44.67 vs. limit=7.77 +2024-07-27 09:27:23,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=373.3333333333333, ans=0.2962666666666667 +2024-07-27 09:27:26,200 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=40.02 vs. limit=7.64 +2024-07-27 09:27:30,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=386.6666666666667, ans=0.0913 +2024-07-27 09:27:31,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=386.6666666666667, ans=0.481875 +2024-07-27 09:27:34,048 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=19.04 vs. limit=7.645 +2024-07-27 09:27:38,728 INFO [train.py:1114] (0/4) Epoch 1, batch 300, loss[loss=0.9742, simple_loss=0.8145, pruned_loss=0.9385, over 4787.00 frames. ], tot_loss[loss=1.217, simple_loss=1.052, pruned_loss=1.179, over 730416.43 frames. ], batch size: 15, lr: 3.60e-02, grad_scale: 8.0 +2024-07-27 09:27:40,100 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.723e+01 3.145e+01 3.570e+01 4.574e+01 1.008e+02, threshold=7.140e+01, percent-clipped=16.0 +2024-07-27 09:27:45,977 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=24.04 vs. limit=5.2 +2024-07-27 09:27:48,534 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=69.76 vs. limit=7.65 +2024-07-27 09:27:52,173 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=62.58 vs. limit=7.655 +2024-07-27 09:27:52,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=413.3333333333333, ans=0.29586666666666667 +2024-07-27 09:27:53,220 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=166.40 vs. limit=7.655 +2024-07-27 09:27:54,030 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=17.40 vs. limit=5.1033333333333335 +2024-07-27 09:27:54,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=184.57 vs. limit=5.206666666666667 +2024-07-27 09:27:58,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=413.3333333333333, ans=0.480625 +2024-07-27 09:27:59,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=413.3333333333333, ans=0.480625 +2024-07-27 09:28:02,677 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.56 vs. limit=4.1706666666666665 +2024-07-27 09:28:06,298 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=204.46 vs. limit=7.66 +2024-07-27 09:28:11,438 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=67.50 vs. limit=7.665 +2024-07-27 09:28:16,539 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=29.32 vs. limit=7.83 +2024-07-27 09:28:16,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=22.91 vs. limit=7.665 +2024-07-27 09:28:18,288 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=38.71 vs. limit=5.22 +2024-07-27 09:28:18,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=440.0, ans=0.479375 +2024-07-27 09:28:22,711 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=18.82 vs. limit=5.226666666666667 +2024-07-27 09:28:25,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=453.3333333333333, ans=0.47875 +2024-07-27 09:28:28,143 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=91.22 vs. limit=7.67 +2024-07-27 09:28:28,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=453.3333333333333, ans=0.183 +2024-07-27 09:28:31,020 INFO [train.py:1114] (0/4) Epoch 1, batch 350, loss[loss=0.9412, simple_loss=0.7758, pruned_loss=0.9051, over 4936.00 frames. ], tot_loss[loss=1.154, simple_loss=0.9887, pruned_loss=1.117, over 776030.67 frames. ], batch size: 12, lr: 3.83e-02, grad_scale: 8.0 +2024-07-27 09:28:31,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=466.6666666666667, ans=0.29533333333333334 +2024-07-27 09:28:41,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=480.0, ans=0.7548 +2024-07-27 09:28:44,572 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=37.42 vs. limit=7.86 +2024-07-27 09:28:45,348 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=13.87 vs. limit=5.24 +2024-07-27 09:28:47,581 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=57.44 vs. limit=7.68 +2024-07-27 09:28:57,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=493.3333333333333, ans=0.7549333333333333 +2024-07-27 09:29:04,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=506.6666666666667, ans=0.2076 +2024-07-27 09:29:22,935 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=3.284e+01 +2024-07-27 09:29:23,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=520.0, ans=0.1805 +2024-07-27 09:29:27,545 INFO [train.py:1114] (0/4) Epoch 1, batch 400, loss[loss=0.9504, simple_loss=0.7793, pruned_loss=0.8888, over 4698.00 frames. ], tot_loss[loss=1.108, simple_loss=0.9402, pruned_loss=1.067, over 813648.77 frames. ], batch size: 13, lr: 4.05e-02, grad_scale: 16.0 +2024-07-27 09:29:29,105 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.881e+01 3.675e+01 4.330e+01 5.451e+01 8.565e+01, threshold=8.660e+01, percent-clipped=3.0 +2024-07-27 09:29:31,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=533.3333333333334, ans=0.22 +2024-07-27 09:29:32,977 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.01 vs. limit=5.266666666666667 +2024-07-27 09:29:41,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=546.6666666666666, ans=0.474375 +2024-07-27 09:29:44,700 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.97 vs. limit=7.91 +2024-07-27 09:29:48,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=10.93 vs. limit=7.71 +2024-07-27 09:29:56,319 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.21 vs. limit=5.14 +2024-07-27 09:29:56,459 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=30.28 vs. limit=7.71 +2024-07-27 09:30:18,908 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=43.73 vs. limit=7.72 +2024-07-27 09:30:42,755 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.47 vs. limit=5.293333333333333 +2024-07-27 09:31:09,333 INFO [train.py:1114] (0/4) Epoch 1, batch 450, loss[loss=1.006, simple_loss=0.8203, pruned_loss=0.9184, over 4635.00 frames. ], tot_loss[loss=1.075, simple_loss=0.9052, pruned_loss=1.025, over 839209.90 frames. ], batch size: 13, lr: 4.28e-02, grad_scale: 16.0 +2024-07-27 09:31:13,402 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=20.20 vs. limit=5.3 +2024-07-27 09:31:16,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=600.0, ans=0.425 +2024-07-27 09:31:24,735 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=70.22 vs. limit=5.306666666666667 +2024-07-27 09:31:42,921 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=10.11 vs. limit=5.156666666666666 +2024-07-27 09:31:44,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=626.6666666666666, ans=0.29373333333333335 +2024-07-27 09:31:45,486 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=36.99 vs. limit=7.97 +2024-07-27 09:36:53,626 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=41.49 vs. limit=7.735 +2024-07-27 09:37:00,857 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=9.51 vs. limit=4.256 +2024-07-27 09:37:01,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=640.0, ans=0.47 +2024-07-27 09:37:03,316 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.93 vs. limit=4.256 +2024-07-27 09:37:07,384 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=7.04 vs. limit=4.256 +2024-07-27 09:37:09,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=640.0, ans=0.47 +2024-07-27 09:37:09,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=640.0, ans=0.08560000000000001 +2024-07-27 09:37:17,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=653.3333333333334, ans=0.5 +2024-07-27 09:37:21,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=666.6666666666666, ans=0.46875 +2024-07-27 09:37:22,255 INFO [train.py:1114] (0/4) Epoch 1, batch 500, loss[loss=1.079, simple_loss=0.8786, pruned_loss=0.9539, over 4693.00 frames. ], tot_loss[loss=1.049, simple_loss=0.8774, pruned_loss=0.9872, over 862070.27 frames. ], batch size: 15, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:37:25,284 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.049e+01 3.795e+01 4.382e+01 5.151e+01 8.333e+01, threshold=8.764e+01, percent-clipped=0.0 +2024-07-27 09:37:39,229 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.73 vs. limit=7.75 +2024-07-27 09:37:42,748 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=24.26 vs. limit=7.75 +2024-07-27 09:37:52,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=680.0, ans=0.468125 +2024-07-27 09:38:05,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=19.23 vs. limit=7.755 +2024-07-27 09:38:05,427 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=25.62 vs. limit=7.755 +2024-07-27 09:38:06,260 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=34.39 vs. limit=7.755 +2024-07-27 09:38:17,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=31.83 vs. limit=7.76 +2024-07-27 09:38:29,927 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.25 vs. limit=8.03 +2024-07-27 09:38:30,693 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=34.39 vs. limit=7.765 +2024-07-27 09:38:33,131 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=31.48 vs. limit=7.765 +2024-07-27 09:38:34,272 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=12.40 vs. limit=7.765 +2024-07-27 09:38:49,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=720.0, ans=0.8748 +2024-07-27 09:38:51,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=733.3333333333334, ans=0.29266666666666663 +2024-07-27 09:38:51,901 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=19.00 vs. limit=7.775 +2024-07-27 09:38:51,918 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=16.48 vs. limit=7.775 +2024-07-27 09:38:55,244 INFO [train.py:1114] (0/4) Epoch 1, batch 550, loss[loss=0.9748, simple_loss=0.7922, pruned_loss=0.8383, over 4632.00 frames. ], tot_loss[loss=1.027, simple_loss=0.8537, pruned_loss=0.95, over 877716.60 frames. ], batch size: 17, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:39:00,114 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.08 vs. limit=7.775 +2024-07-27 09:39:00,143 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=11.45 vs. limit=5.183333333333334 +2024-07-27 09:39:06,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=746.6666666666666, ans=0.4066666666666667 +2024-07-27 09:39:07,216 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=12.05 vs. limit=7.78 +2024-07-27 09:39:07,943 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.17 vs. limit=4.298666666666667 +2024-07-27 09:39:08,994 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=10.94 vs. limit=5.1866666666666665 +2024-07-27 09:39:15,606 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=12.45 vs. limit=7.785 +2024-07-27 09:39:17,451 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=12.03 vs. limit=4.304 +2024-07-27 09:39:17,589 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.02 vs. limit=8.07 +2024-07-27 09:39:20,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=760.0, ans=0.464375 +2024-07-27 09:39:30,647 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=16.98 vs. limit=7.79 +2024-07-27 09:39:31,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=773.3333333333334, ans=0.5 +2024-07-27 09:39:35,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=786.6666666666666, ans=8.09 +2024-07-27 09:39:38,411 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.16 vs. limit=5.3933333333333335 +2024-07-27 09:39:42,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=800.0, ans=0.082 +2024-07-27 09:39:43,016 INFO [train.py:1114] (0/4) Epoch 1, batch 600, loss[loss=0.9472, simple_loss=0.7665, pruned_loss=0.7987, over 4646.00 frames. ], tot_loss[loss=1.014, simple_loss=0.8378, pruned_loss=0.9218, over 892655.70 frames. ], batch size: 16, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:39:43,872 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.748e+01 6.137e+01 8.087e+01 1.069e+02 3.258e+02, threshold=1.617e+02, percent-clipped=41.0 +2024-07-27 09:39:44,446 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=16.40 vs. limit=7.8 +2024-07-27 09:39:51,491 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=17.43 vs. limit=7.805 +2024-07-27 09:39:59,961 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=7.74 vs. limit=4.325333333333333 +2024-07-27 09:40:02,569 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.80 vs. limit=8.11 +2024-07-27 09:40:03,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=826.6666666666666, ans=0.46125 +2024-07-27 09:40:20,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=853.3333333333334, ans=0.5 +2024-07-27 09:40:21,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=853.3333333333334, ans=0.168 +2024-07-27 09:40:22,058 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=7.50 vs. limit=4.341333333333333 +2024-07-27 09:40:24,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=853.3333333333334, ans=0.168 +2024-07-27 09:40:30,099 INFO [train.py:1114] (0/4) Epoch 1, batch 650, loss[loss=0.9993, simple_loss=0.7978, pruned_loss=0.8434, over 4759.00 frames. ], tot_loss[loss=1.003, simple_loss=0.8237, pruned_loss=0.8972, over 904352.44 frames. ], batch size: 13, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:40:31,730 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.44 vs. limit=4.346666666666667 +2024-07-27 09:40:34,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=866.6666666666666, ans=0.459375 +2024-07-27 09:40:35,901 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.76 vs. limit=4.346666666666667 +2024-07-27 09:40:35,950 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=30.71 vs. limit=7.825 +2024-07-27 09:40:36,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=11.99 vs. limit=7.825 +2024-07-27 09:40:36,099 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=11.72 vs. limit=7.825 +2024-07-27 09:40:54,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=893.3333333333334, ans=0.3883333333333333 +2024-07-27 09:40:55,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=893.3333333333334, ans=0.24106666666666665 +2024-07-27 09:40:58,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.27 vs. limit=8.17 +2024-07-27 09:41:00,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=893.3333333333334, ans=0.458125 +2024-07-27 09:41:01,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=906.6666666666666, ans=7.84 +2024-07-27 09:41:02,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=906.6666666666666, ans=0.4575 +2024-07-27 09:41:03,124 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.81 vs. limit=4.362666666666667 +2024-07-27 09:41:36,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=920.0, ans=0.1655 +2024-07-27 09:41:40,084 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.24 vs. limit=5.466666666666667 +2024-07-27 09:41:40,101 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.91 vs. limit=7.85 +2024-07-27 09:41:40,666 INFO [train.py:1114] (0/4) Epoch 1, batch 700, loss[loss=0.9659, simple_loss=0.7757, pruned_loss=0.7851, over 4644.00 frames. ], tot_loss[loss=0.9966, simple_loss=0.8143, pruned_loss=0.8757, over 912193.23 frames. ], batch size: 12, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:41:41,492 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.672e+01 9.322e+01 1.196e+02 1.686e+02 3.909e+02, threshold=2.392e+02, percent-clipped=30.0 +2024-07-27 09:41:52,978 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=34.40 vs. limit=7.85 +2024-07-27 09:42:02,890 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=7.84 vs. limit=4.378666666666667 +2024-07-27 09:42:17,013 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=9.05 vs. limit=5.236666666666666 +2024-07-27 09:42:17,824 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=44.32 vs. limit=7.86 +2024-07-27 09:42:18,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=960.0, ans=0.455 +2024-07-27 09:42:22,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=960.0, ans=7.86 +2024-07-27 09:42:23,088 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.78 vs. limit=7.86 +2024-07-27 09:42:25,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=960.0, ans=0.5 +2024-07-27 09:42:28,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=973.3333333333334, ans=5.608333333333333 +2024-07-27 09:42:35,324 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.81 vs. limit=8.24 +2024-07-27 09:42:42,019 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=33.23 vs. limit=8.24 +2024-07-27 09:42:44,030 INFO [train.py:1114] (0/4) Epoch 1, batch 750, loss[loss=1.043, simple_loss=0.8393, pruned_loss=0.8248, over 4684.00 frames. ], tot_loss[loss=0.9853, simple_loss=0.8027, pruned_loss=0.8482, over 918835.19 frames. ], batch size: 13, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:42:53,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=1013.3333333333334, ans=0.8645333333333334 +2024-07-27 09:42:55,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=1013.3333333333334, ans=0.0772 +2024-07-27 09:42:57,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=1013.3333333333334, ans=0.162 +2024-07-27 09:43:01,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=1026.6666666666667, ans=0.1615 +2024-07-27 09:43:01,703 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.20 vs. limit=8.27 +2024-07-27 09:43:03,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=1026.6666666666667, ans=5.641666666666667 +2024-07-27 09:43:23,753 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=25.64 vs. limit=7.89 +2024-07-27 09:43:24,762 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=16.12 vs. limit=7.89 +2024-07-27 09:43:26,004 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 09:43:26,411 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=16.52 vs. limit=7.895 +2024-07-27 09:43:26,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=1053.3333333333333, ans=0.04670833333333334 +2024-07-27 09:43:28,948 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=30.76 vs. limit=8.29 +2024-07-27 09:43:35,562 INFO [train.py:1114] (0/4) Epoch 1, batch 800, loss[loss=0.9207, simple_loss=0.7505, pruned_loss=0.6962, over 4846.00 frames. ], tot_loss[loss=0.9709, simple_loss=0.79, pruned_loss=0.8174, over 924092.77 frames. ], batch size: 12, lr: 4.49e-02, grad_scale: 32.0 +2024-07-27 09:43:36,418 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.122e+01 7.305e+01 9.106e+01 1.068e+02 1.961e+02, threshold=1.821e+02, percent-clipped=0.0 +2024-07-27 09:43:36,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1066.6666666666667, ans=0.28933333333333333 +2024-07-27 09:43:37,698 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=17.02 vs. limit=7.9 +2024-07-27 09:43:40,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=1066.6666666666667, ans=0.45 +2024-07-27 09:43:42,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.10 vs. limit=8.3 +2024-07-27 09:43:42,866 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.52 vs. limit=5.533333333333333 +2024-07-27 09:43:53,650 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.24 vs. limit=8.31 +2024-07-27 09:44:39,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=1093.3333333333333, ans=0.44875 +2024-07-27 09:44:43,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=1106.6666666666667, ans=0.1585 +2024-07-27 09:44:51,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=1120.0, ans=0.4475 +2024-07-27 09:44:54,660 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.16 vs. limit=5.5600000000000005 +2024-07-27 09:45:05,385 INFO [train.py:1114] (0/4) Epoch 1, batch 850, loss[loss=0.9624, simple_loss=0.7931, pruned_loss=0.7, over 4663.00 frames. ], tot_loss[loss=0.9531, simple_loss=0.7764, pruned_loss=0.7826, over 928339.07 frames. ], batch size: 14, lr: 4.49e-02, grad_scale: 32.0 +2024-07-27 09:45:50,470 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=25.55 vs. limit=8.35 +2024-07-27 09:46:03,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1133.3333333333333, ans=0.2886666666666667 +2024-07-27 09:46:10,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=1146.6666666666667, ans=0.8598666666666667 +2024-07-27 09:46:11,259 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.68 vs. limit=8.36 +2024-07-27 09:46:19,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=1160.0, ans=0.07390000000000001 +2024-07-27 09:46:43,408 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.80 vs. limit=7.935 +2024-07-27 09:46:47,530 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.60 vs. limit=7.94 +2024-07-27 09:46:48,676 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.92 vs. limit=8.38 +2024-07-27 09:46:49,198 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.19 vs. limit=7.94 +2024-07-27 09:46:52,020 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.36 vs. limit=3.176 +2024-07-27 09:46:58,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=1186.6666666666667, ans=5.741666666666667 +2024-07-27 09:46:59,037 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.02 vs. limit=7.945 +2024-07-27 09:47:01,774 INFO [train.py:1114] (0/4) Epoch 1, batch 900, loss[loss=0.7753, simple_loss=0.646, pruned_loss=0.5437, over 4853.00 frames. ], tot_loss[loss=0.9292, simple_loss=0.7591, pruned_loss=0.7437, over 928679.00 frames. ], batch size: 12, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:47:05,077 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.509e+01 5.472e+01 6.615e+01 8.339e+01 1.626e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-27 09:47:10,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=1200.0, ans=5.75 +2024-07-27 09:47:16,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=1213.3333333333333, ans=0.443125 +2024-07-27 09:47:17,166 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.45 vs. limit=5.303333333333334 +2024-07-27 09:47:24,687 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.02 vs. limit=5.613333333333333 +2024-07-27 09:47:29,250 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.56 vs. limit=7.96 +2024-07-27 09:47:33,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=1240.0, ans=0.0721 +2024-07-27 09:47:38,666 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.77 vs. limit=8.44 +2024-07-27 09:47:45,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=1253.3333333333333, ans=0.8561333333333334 +2024-07-27 09:47:47,820 INFO [train.py:1114] (0/4) Epoch 1, batch 950, loss[loss=0.8425, simple_loss=0.7056, pruned_loss=0.577, over 4770.00 frames. ], tot_loss[loss=0.9048, simple_loss=0.742, pruned_loss=0.7053, over 930054.28 frames. ], batch size: 12, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:47:51,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=1266.6666666666667, ans=0.04604166666666667 +2024-07-27 09:47:52,516 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.50 vs. limit=7.975 +2024-07-27 09:48:04,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=1293.3333333333333, ans=0.0709 +2024-07-27 09:48:10,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=1293.3333333333333, ans=0.439375 +2024-07-27 09:48:10,981 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=16.39 vs. limit=5.6466666666666665 +2024-07-27 09:48:21,593 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.11 vs. limit=5.653333333333333 +2024-07-27 09:48:30,407 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.21 vs. limit=4.5280000000000005 +2024-07-27 09:48:33,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=1320.0, ans=0.045875 +2024-07-27 09:48:38,203 INFO [train.py:1114] (0/4) Epoch 1, batch 1000, loss[loss=0.7168, simple_loss=0.6082, pruned_loss=0.4738, over 4960.00 frames. ], tot_loss[loss=0.8775, simple_loss=0.7231, pruned_loss=0.6661, over 929660.02 frames. ], batch size: 13, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:48:38,636 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.31 vs. limit=8.0 +2024-07-27 09:48:39,232 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 6.604e+01 7.221e+01 8.711e+01 1.557e+02, threshold=1.444e+02, percent-clipped=4.0 +2024-07-27 09:48:40,436 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.02 vs. limit=8.0 +2024-07-27 09:48:43,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=1333.3333333333333, ans=5.333333333333333 +2024-07-27 09:48:44,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=1333.3333333333333, ans=0.22 +2024-07-27 09:48:54,839 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.30 vs. limit=8.51 +2024-07-27 09:48:59,751 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.19 vs. limit=5.34 +2024-07-27 09:49:03,136 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.01 vs. limit=8.52 +2024-07-27 09:49:10,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=1373.3333333333333, ans=0.0691 +2024-07-27 09:49:13,879 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.89 vs. limit=5.346666666666667 +2024-07-27 09:49:15,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=1386.6666666666667, ans=0.435 +2024-07-27 09:49:16,136 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=7.37 vs. limit=4.554666666666667 +2024-07-27 09:49:17,055 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.17 vs. limit=8.54 +2024-07-27 09:49:19,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=1386.6666666666667, ans=0.435 +2024-07-27 09:49:19,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=1386.6666666666667, ans=0.14800000000000002 +2024-07-27 09:49:21,062 INFO [train.py:1114] (0/4) Epoch 1, batch 1050, loss[loss=0.8076, simple_loss=0.6863, pruned_loss=0.5265, over 4872.00 frames. ], tot_loss[loss=0.8492, simple_loss=0.7036, pruned_loss=0.6278, over 932108.35 frames. ], batch size: 14, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:49:27,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=1400.0, ans=0.434375 +2024-07-27 09:49:27,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=1400.0, ans=0.434375 +2024-07-27 09:49:30,094 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.37 vs. limit=8.56 +2024-07-27 09:49:31,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=1413.3333333333333, ans=0.23586666666666667 +2024-07-27 09:49:34,929 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.69 vs. limit=5.3533333333333335 +2024-07-27 09:49:36,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=1413.3333333333333, ans=0.06820000000000001 +2024-07-27 09:49:52,308 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.38 vs. limit=5.3566666666666665 +2024-07-27 09:49:54,445 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 09:49:54,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=1426.6666666666667, ans=0.32166666666666666 +2024-07-27 09:49:58,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=1440.0, ans=0.22160000000000002 +2024-07-27 09:49:59,754 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=29.24 vs. limit=8.58 +2024-07-27 09:50:20,850 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.77 vs. limit=8.045 +2024-07-27 09:50:21,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=1453.3333333333333, ans=0.31833333333333336 +2024-07-27 09:50:22,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=1453.3333333333333, ans=0.09091666666666667 +2024-07-27 09:50:23,620 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.02 vs. limit=8.59 +2024-07-27 09:50:29,890 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.82 vs. limit=8.6 +2024-07-27 09:50:30,338 INFO [train.py:1114] (0/4) Epoch 1, batch 1100, loss[loss=0.6646, simple_loss=0.5762, pruned_loss=0.4148, over 4901.00 frames. ], tot_loss[loss=0.8198, simple_loss=0.6836, pruned_loss=0.5904, over 934412.58 frames. ], batch size: 13, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:50:31,167 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.580e+01 6.586e+01 7.875e+01 9.417e+01 1.858e+02, threshold=1.575e+02, percent-clipped=4.0 +2024-07-27 09:50:31,332 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 09:50:33,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=1466.6666666666667, ans=0.145 +2024-07-27 09:50:33,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.27 vs. limit=8.05 +2024-07-27 09:50:35,139 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.50 vs. limit=8.05 +2024-07-27 09:50:36,062 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.22 vs. limit=8.6 +2024-07-27 09:50:37,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=1466.6666666666667, ans=0.43125 +2024-07-27 09:50:48,416 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.64 vs. limit=8.61 +2024-07-27 09:50:50,926 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.72 vs. limit=8.06 +2024-07-27 09:50:51,094 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.08 vs. limit=8.620000000000001 +2024-07-27 09:50:57,586 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=8.93 vs. limit=8.629999999999999 +2024-07-27 09:50:58,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=1506.6666666666667, ans=0.429375 +2024-07-27 09:50:59,091 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.47 vs. limit=8.065 +2024-07-27 09:51:08,689 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=4.608 +2024-07-27 09:51:12,014 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.43 vs. limit=8.07 +2024-07-27 09:51:14,855 INFO [train.py:1114] (0/4) Epoch 1, batch 1150, loss[loss=0.6677, simple_loss=0.5754, pruned_loss=0.4173, over 4893.00 frames. ], tot_loss[loss=0.7948, simple_loss=0.6674, pruned_loss=0.5575, over 933881.46 frames. ], batch size: 13, lr: 4.47e-02, grad_scale: 32.0 +2024-07-27 09:51:38,891 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.74 vs. limit=8.08 +2024-07-27 09:51:51,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=1573.3333333333333, ans=0.42625 +2024-07-27 09:51:53,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=1573.3333333333333, ans=0.14100000000000001 +2024-07-27 09:51:57,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=1573.3333333333333, ans=5.983333333333333 +2024-07-27 09:52:01,466 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.62 vs. limit=5.3966666666666665 +2024-07-27 09:52:06,784 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.41 vs. limit=8.69 +2024-07-27 09:52:09,176 INFO [train.py:1114] (0/4) Epoch 1, batch 1200, loss[loss=0.6315, simple_loss=0.5608, pruned_loss=0.3732, over 4870.00 frames. ], tot_loss[loss=0.7737, simple_loss=0.6537, pruned_loss=0.5297, over 933084.92 frames. ], batch size: 14, lr: 4.47e-02, grad_scale: 32.0 +2024-07-27 09:52:10,010 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.946e+01 6.977e+01 8.267e+01 1.004e+02 1.485e+02, threshold=1.653e+02, percent-clipped=0.0 +2024-07-27 09:52:13,011 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.56 vs. limit=8.7 +2024-07-27 09:52:22,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=1613.3333333333333, ans=0.424375 +2024-07-27 09:52:23,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=1613.3333333333333, ans=0.1395 +2024-07-27 09:52:25,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1613.3333333333333, ans=0.28386666666666666 +2024-07-27 09:52:26,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=1613.3333333333333, ans=0.1395 +2024-07-27 09:52:40,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=1640.0, ans=0.423125 +2024-07-27 09:52:49,476 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.96 vs. limit=5.413333333333333 +2024-07-27 09:52:53,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=1653.3333333333333, ans=0.4225 +2024-07-27 09:52:55,866 INFO [train.py:1114] (0/4) Epoch 1, batch 1250, loss[loss=0.71, simple_loss=0.6231, pruned_loss=0.425, over 4803.00 frames. ], tot_loss[loss=0.7512, simple_loss=0.6398, pruned_loss=0.5014, over 937184.59 frames. ], batch size: 15, lr: 4.47e-02, grad_scale: 32.0 +2024-07-27 09:52:57,628 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.07 vs. limit=8.125 +2024-07-27 09:53:09,289 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.51 vs. limit=8.76 +2024-07-27 09:53:09,945 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.81 vs. limit=5.42 +2024-07-27 09:53:16,544 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 09:53:21,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=1693.3333333333333, ans=0.044708333333333336 +2024-07-27 09:53:33,790 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.12 vs. limit=8.145 +2024-07-27 09:53:33,835 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.53 vs. limit=8.145 +2024-07-27 09:53:37,372 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.79 vs. limit=5.43 +2024-07-27 09:53:43,442 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.45 vs. limit=8.79 +2024-07-27 09:53:58,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=1733.3333333333333, ans=0.41875 +2024-07-27 09:53:58,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=1733.3333333333333, ans=0.41875 +2024-07-27 09:53:59,503 INFO [train.py:1114] (0/4) Epoch 1, batch 1300, loss[loss=0.6662, simple_loss=0.586, pruned_loss=0.395, over 4710.00 frames. ], tot_loss[loss=0.726, simple_loss=0.6227, pruned_loss=0.4739, over 938702.92 frames. ], batch size: 19, lr: 4.47e-02, grad_scale: 32.0 +2024-07-27 09:54:00,176 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.513e+01 6.459e+01 7.334e+01 8.641e+01 1.550e+02, threshold=1.467e+02, percent-clipped=0.0 +2024-07-27 09:54:00,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=1733.3333333333333, ans=0.061000000000000006 +2024-07-27 09:54:05,469 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.94 vs. limit=5.433333333333334 +2024-07-27 09:54:11,928 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.07 vs. limit=5.433333333333334 +2024-07-27 09:54:18,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=1746.6666666666667, ans=0.41812499999999997 +2024-07-27 09:54:28,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=1760.0, ans=0.0604 +2024-07-27 09:55:29,130 INFO [train.py:1114] (0/4) Epoch 1, batch 1350, loss[loss=0.6023, simple_loss=0.5431, pruned_loss=0.3427, over 4761.00 frames. ], tot_loss[loss=0.7037, simple_loss=0.6083, pruned_loss=0.4492, over 940948.82 frames. ], batch size: 13, lr: 4.46e-02, grad_scale: 32.0 +2024-07-27 09:55:47,234 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.90 vs. limit=8.85 +2024-07-27 09:55:51,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=1813.3333333333333, ans=0.415 +2024-07-27 09:55:56,251 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.99 vs. limit=8.870000000000001 +2024-07-27 09:56:07,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=1826.6666666666667, ans=0.04429166666666667 +2024-07-27 09:56:09,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=1826.6666666666667, ans=0.414375 +2024-07-27 09:56:15,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=1840.0, ans=0.8356 +2024-07-27 09:56:16,029 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.36 vs. limit=8.19 +2024-07-27 09:56:20,989 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.12 vs. limit=4.736 +2024-07-27 09:56:21,700 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.15 vs. limit=8.879999999999999 +2024-07-27 09:56:23,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1853.3333333333333, ans=0.28146666666666664 +2024-07-27 09:56:24,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=1853.3333333333333, ans=0.04949747468305833 +2024-07-27 09:56:25,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=1853.3333333333333, ans=0.413125 +2024-07-27 09:56:30,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=1853.3333333333333, ans=0.1305 +2024-07-27 09:56:32,614 INFO [train.py:1114] (0/4) Epoch 1, batch 1400, loss[loss=0.5802, simple_loss=0.5291, pruned_loss=0.3237, over 4710.00 frames. ], tot_loss[loss=0.6857, simple_loss=0.5967, pruned_loss=0.429, over 942977.25 frames. ], batch size: 11, lr: 4.46e-02, grad_scale: 32.0 +2024-07-27 09:56:33,349 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.208e+01 7.358e+01 8.189e+01 9.683e+01 1.850e+02, threshold=1.638e+02, percent-clipped=1.0 +2024-07-27 09:56:33,820 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.06 vs. limit=8.9 +2024-07-27 09:56:46,008 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.55 vs. limit=8.9 +2024-07-27 09:56:46,571 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.34 vs. limit=8.9 +2024-07-27 09:56:49,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.62 vs. limit=8.91 +2024-07-27 09:56:53,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=1880.0, ans=0.14425 +2024-07-27 09:56:54,137 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.79 vs. limit=5.9399999999999995 +2024-07-27 09:57:01,957 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.80 vs. limit=4.757333333333333 +2024-07-27 09:57:08,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=1906.6666666666667, ans=0.410625 +2024-07-27 09:57:16,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=1920.0, ans=0.41000000000000003 +2024-07-27 09:57:17,528 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.09 vs. limit=5.96 +2024-07-27 09:57:24,386 INFO [train.py:1114] (0/4) Epoch 1, batch 1450, loss[loss=0.6674, simple_loss=0.5956, pruned_loss=0.3823, over 4684.00 frames. ], tot_loss[loss=0.6657, simple_loss=0.5835, pruned_loss=0.4085, over 942714.53 frames. ], batch size: 15, lr: 4.46e-02, grad_scale: 32.0 +2024-07-27 09:57:24,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=1933.3333333333333, ans=0.05650000000000001 +2024-07-27 09:57:31,613 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.30 vs. limit=8.95 +2024-07-27 09:57:32,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1946.6666666666667, ans=0.2805333333333333 +2024-07-27 09:57:55,717 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.19 vs. limit=8.98 +2024-07-27 09:57:59,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=1973.3333333333333, ans=6.233333333333333 +2024-07-27 09:58:00,309 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.33 vs. limit=3.296 +2024-07-27 09:58:00,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=1973.3333333333333, ans=0.23026666666666668 +2024-07-27 09:58:13,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=1986.6666666666667, ans=0.406875 +2024-07-27 09:58:18,768 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.33 vs. limit=3.298 +2024-07-27 09:58:19,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=1986.6666666666667, ans=0.04966666666666667 +2024-07-27 09:58:19,579 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.14 vs. limit=8.99 +2024-07-27 09:58:22,741 INFO [train.py:1114] (0/4) Epoch 1, batch 1500, loss[loss=0.6258, simple_loss=0.5636, pruned_loss=0.353, over 4804.00 frames. ], tot_loss[loss=0.6531, simple_loss=0.5762, pruned_loss=0.3939, over 942327.17 frames. ], batch size: 14, lr: 4.46e-02, grad_scale: 32.0 +2024-07-27 09:58:23,587 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.537e+01 6.985e+01 7.625e+01 8.885e+01 1.224e+02, threshold=1.525e+02, percent-clipped=0.0 +2024-07-27 09:58:32,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=2013.3333333333333, ans=0.0547 +2024-07-27 09:58:32,323 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.76 vs. limit=5.503333333333333 +2024-07-27 09:58:47,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=2026.6666666666667, ans=0.136 +2024-07-27 09:58:54,860 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.48 vs. limit=8.265 +2024-07-27 09:59:05,698 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.00 vs. limit=5.513333333333334 +2024-07-27 09:59:07,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=2053.3333333333335, ans=0.40375 +2024-07-27 09:59:10,243 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.82 vs. limit=5.516666666666667 +2024-07-27 09:59:10,627 INFO [train.py:1114] (0/4) Epoch 1, batch 1550, loss[loss=0.6117, simple_loss=0.5743, pruned_loss=0.3259, over 4901.00 frames. ], tot_loss[loss=0.64, simple_loss=0.5675, pruned_loss=0.3803, over 938295.84 frames. ], batch size: 15, lr: 4.45e-02, grad_scale: 32.0 +2024-07-27 09:59:38,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=2080.0, ans=0.053200000000000004 +2024-07-27 09:59:39,646 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=25.20 vs. limit=9.07 +2024-07-27 09:59:43,861 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.62 vs. limit=5.523333333333333 +2024-07-27 09:59:54,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=2106.6666666666665, ans=0.12100000000000001 +2024-07-27 09:59:57,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=2120.0, ans=0.0523 +2024-07-27 10:00:02,481 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.83 vs. limit=5.53 +2024-07-27 10:00:04,779 INFO [train.py:1114] (0/4) Epoch 1, batch 1600, loss[loss=0.6727, simple_loss=0.6073, pruned_loss=0.3759, over 4879.00 frames. ], tot_loss[loss=0.6275, simple_loss=0.5596, pruned_loss=0.3676, over 937096.98 frames. ], batch size: 14, lr: 4.45e-02, grad_scale: 32.0 +2024-07-27 10:00:05,617 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.018e+01 7.259e+01 8.235e+01 9.551e+01 1.793e+02, threshold=1.647e+02, percent-clipped=2.0 +2024-07-27 10:00:05,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=2133.3333333333335, ans=0.052 +2024-07-27 10:00:05,993 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.02 vs. limit=8.3 +2024-07-27 10:00:08,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=2133.3333333333335, ans=0.13 +2024-07-27 10:00:09,529 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.72 vs. limit=9.1 +2024-07-27 10:00:40,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=2160.0, ans=0.119 +2024-07-27 10:00:41,544 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.61 vs. limit=8.31 +2024-07-27 10:00:42,117 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.28 vs. limit=8.31 +2024-07-27 10:00:45,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.31 vs. limit=9.120000000000001 +2024-07-27 10:00:47,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=2173.3333333333335, ans=6.358333333333333 +2024-07-27 10:00:47,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=2173.3333333333335, ans=0.398125 +2024-07-27 10:01:02,875 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.34 vs. limit=9.14 +2024-07-27 10:01:04,973 INFO [train.py:1114] (0/4) Epoch 1, batch 1650, loss[loss=0.5509, simple_loss=0.5206, pruned_loss=0.2909, over 4674.00 frames. ], tot_loss[loss=0.6154, simple_loss=0.5519, pruned_loss=0.3558, over 936897.88 frames. ], batch size: 14, lr: 4.45e-02, grad_scale: 32.0 +2024-07-27 10:01:05,478 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.25 vs. limit=9.15 +2024-07-27 10:01:08,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=2200.0, ans=0.8230000000000001 +2024-07-27 10:01:09,180 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.03 vs. limit=9.15 +2024-07-27 10:01:11,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=2200.0, ans=0.12625 +2024-07-27 10:01:14,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=2213.3333333333335, ans=0.23320000000000002 +2024-07-27 10:01:20,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=2213.3333333333335, ans=0.39625 +2024-07-27 10:01:28,453 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.48 vs. limit=4.890666666666666 +2024-07-27 10:01:30,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=2226.6666666666665, ans=0.1165 +2024-07-27 10:01:30,851 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.88 vs. limit=9.17 +2024-07-27 10:01:31,611 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.66 vs. limit=4.890666666666666 +2024-07-27 10:01:35,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=2240.0, ans=0.395 +2024-07-27 10:01:48,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.02 vs. limit=4.9013333333333335 +2024-07-27 10:01:50,396 INFO [train.py:1114] (0/4) Epoch 1, batch 1700, loss[loss=0.5015, simple_loss=0.4687, pruned_loss=0.2682, over 4711.00 frames. ], tot_loss[loss=0.6033, simple_loss=0.5444, pruned_loss=0.3443, over 938968.38 frames. ], batch size: 11, lr: 4.44e-02, grad_scale: 32.0 +2024-07-27 10:01:51,145 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.631e+01 6.759e+01 7.966e+01 9.777e+01 1.760e+02, threshold=1.593e+02, percent-clipped=1.0 +2024-07-27 10:01:53,513 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.23 vs. limit=9.2 +2024-07-27 10:01:54,324 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.26 vs. limit=9.2 +2024-07-27 10:01:58,716 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=4.35 vs. limit=4.456 +2024-07-27 10:02:04,594 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.43 vs. limit=5.57 +2024-07-27 10:02:08,915 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.47 vs. limit=6.1466666666666665 +2024-07-27 10:02:16,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=2293.3333333333335, ans=0.121 +2024-07-27 10:02:17,429 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.25 vs. limit=5.576666666666666 +2024-07-27 10:02:30,966 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:02:31,143 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.39 vs. limit=9.24 +2024-07-27 10:02:32,770 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.67 vs. limit=8.37 +2024-07-27 10:02:33,851 INFO [train.py:1114] (0/4) Epoch 1, batch 1750, loss[loss=0.4796, simple_loss=0.4453, pruned_loss=0.2581, over 4811.00 frames. ], tot_loss[loss=0.5905, simple_loss=0.5367, pruned_loss=0.3327, over 940035.88 frames. ], batch size: 11, lr: 4.44e-02, grad_scale: 32.0 +2024-07-27 10:02:37,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=2333.3333333333335, ans=6.166666666666667 +2024-07-27 10:02:57,937 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.11 vs. limit=9.27 +2024-07-27 10:03:05,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=2373.3333333333335, ans=0.38875 +2024-07-27 10:03:10,802 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.32 vs. limit=9.28 +2024-07-27 10:03:12,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=2373.3333333333335, ans=0.8169333333333334 +2024-07-27 10:03:12,329 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.16 vs. limit=9.28 +2024-07-27 10:03:12,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=2373.3333333333335, ans=0.046599999999999996 +2024-07-27 10:03:13,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=2373.3333333333335, ans=0.27626666666666666 +2024-07-27 10:03:25,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=2400.0, ans=0.11 +2024-07-27 10:03:25,541 INFO [train.py:1114] (0/4) Epoch 1, batch 1800, loss[loss=0.5205, simple_loss=0.509, pruned_loss=0.2643, over 4640.00 frames. ], tot_loss[loss=0.5827, simple_loss=0.5325, pruned_loss=0.3248, over 940748.21 frames. ], batch size: 13, lr: 4.44e-02, grad_scale: 32.0 +2024-07-27 10:03:25,931 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.50 vs. limit=9.3 +2024-07-27 10:03:26,061 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.36 vs. limit=9.3 +2024-07-27 10:03:26,385 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.000e+01 7.252e+01 8.218e+01 9.576e+01 1.850e+02, threshold=1.644e+02, percent-clipped=1.0 +2024-07-27 10:03:31,800 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.63 vs. limit=5.6 +2024-07-27 10:03:32,455 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.47 vs. limit=9.3 +2024-07-27 10:03:33,070 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:03:38,741 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.64 vs. limit=8.405 +2024-07-27 10:03:51,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=2440.0, ans=0.385625 +2024-07-27 10:03:53,112 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.05 vs. limit=5.61 +2024-07-27 10:04:06,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=2453.3333333333335, ans=0.1933333333333333 +2024-07-27 10:04:07,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=2453.3333333333335, ans=0.042333333333333334 +2024-07-27 10:04:07,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=2453.3333333333335, ans=0.385 +2024-07-27 10:04:13,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=2466.6666666666665, ans=0.1075 +2024-07-27 10:04:13,823 INFO [train.py:1114] (0/4) Epoch 1, batch 1850, loss[loss=0.5513, simple_loss=0.5206, pruned_loss=0.2911, over 4813.00 frames. ], tot_loss[loss=0.5733, simple_loss=0.5274, pruned_loss=0.3162, over 940823.18 frames. ], batch size: 14, lr: 4.43e-02, grad_scale: 32.0 +2024-07-27 10:04:14,289 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.26 vs. limit=9.35 +2024-07-27 10:04:23,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=2480.0, ans=0.2752 +2024-07-27 10:04:28,125 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.28 vs. limit=6.24 +2024-07-27 10:04:31,368 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.83 vs. limit=9.370000000000001 +2024-07-27 10:04:34,438 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.38 vs. limit=6.246666666666667 +2024-07-27 10:04:37,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=2506.6666666666665, ans=0.0436 +2024-07-27 10:04:42,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=2506.6666666666665, ans=0.5 +2024-07-27 10:04:45,234 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.48 vs. limit=3.376 +2024-07-27 10:04:46,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=2520.0, ans=0.381875 +2024-07-27 10:04:47,137 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.39 vs. limit=3.378 +2024-07-27 10:04:48,193 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.23 vs. limit=8.445 +2024-07-27 10:04:49,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=2520.0, ans=0.381875 +2024-07-27 10:04:54,825 INFO [train.py:1114] (0/4) Epoch 1, batch 1900, loss[loss=0.5628, simple_loss=0.5265, pruned_loss=0.2999, over 4670.00 frames. ], tot_loss[loss=0.5628, simple_loss=0.5218, pruned_loss=0.307, over 941950.10 frames. ], batch size: 14, lr: 4.43e-02, grad_scale: 32.0 +2024-07-27 10:04:55,618 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.340e+01 7.620e+01 8.335e+01 9.482e+01 1.510e+02, threshold=1.667e+02, percent-clipped=0.0 +2024-07-27 10:05:20,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=2546.6666666666665, ans=0.8108666666666667 +2024-07-27 10:05:22,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=2560.0, ans=0.8104 +2024-07-27 10:05:25,706 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=7.88 vs. limit=8.46 +2024-07-27 10:05:40,886 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.09 vs. limit=5.6466666666666665 +2024-07-27 10:05:42,186 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.13 vs. limit=8.47 +2024-07-27 10:05:42,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=2586.6666666666665, ans=0.103 +2024-07-27 10:05:44,699 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.22 vs. limit=9.44 +2024-07-27 10:05:44,775 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.63 vs. limit=5.034666666666666 +2024-07-27 10:05:46,625 INFO [train.py:1114] (0/4) Epoch 1, batch 1950, loss[loss=0.451, simple_loss=0.4452, pruned_loss=0.2279, over 4903.00 frames. ], tot_loss[loss=0.5556, simple_loss=0.5186, pruned_loss=0.3002, over 944009.16 frames. ], batch size: 13, lr: 4.43e-02, grad_scale: 32.0 +2024-07-27 10:05:54,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=2613.3333333333335, ans=0.3775 +2024-07-27 10:05:56,660 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.32 vs. limit=6.306666666666667 +2024-07-27 10:06:12,311 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.96 vs. limit=9.47 +2024-07-27 10:06:13,827 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.39 vs. limit=6.3133333333333335 +2024-07-27 10:06:14,494 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.39 vs. limit=9.47 +2024-07-27 10:06:15,425 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.31 vs. limit=9.47 +2024-07-27 10:06:15,432 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.50 vs. limit=8.485 +2024-07-27 10:06:17,862 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.75 vs. limit=9.47 +2024-07-27 10:06:25,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=2640.0, ans=0.035 +2024-07-27 10:06:25,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=2640.0, ans=6.65 +2024-07-27 10:06:31,265 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.91 vs. limit=5.663333333333333 +2024-07-27 10:06:42,512 INFO [train.py:1114] (0/4) Epoch 1, batch 2000, loss[loss=0.4773, simple_loss=0.447, pruned_loss=0.2538, over 4807.00 frames. ], tot_loss[loss=0.5497, simple_loss=0.5155, pruned_loss=0.295, over 941486.71 frames. ], batch size: 11, lr: 4.42e-02, grad_scale: 32.0 +2024-07-27 10:06:43,352 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.312e+01 7.554e+01 8.059e+01 9.021e+01 3.573e+02, threshold=1.612e+02, percent-clipped=2.0 +2024-07-27 10:06:45,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=2666.6666666666665, ans=0.375 +2024-07-27 10:06:54,023 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.17 vs. limit=6.34 +2024-07-27 10:06:56,591 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.36 vs. limit=9.51 +2024-07-27 10:06:59,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=2680.0, ans=0.09925 +2024-07-27 10:07:01,422 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.61 vs. limit=6.34 +2024-07-27 10:07:18,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=2720.0, ans=0.3725 +2024-07-27 10:07:19,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.55 vs. limit=9.54 +2024-07-27 10:07:24,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2720.0, ans=0.2728 +2024-07-27 10:07:25,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=2720.0, ans=0.098 +2024-07-27 10:07:26,905 INFO [train.py:1114] (0/4) Epoch 1, batch 2050, loss[loss=0.5173, simple_loss=0.4833, pruned_loss=0.2756, over 4606.00 frames. ], tot_loss[loss=0.5402, simple_loss=0.5092, pruned_loss=0.288, over 939521.21 frames. ], batch size: 11, lr: 4.42e-02, grad_scale: 64.0 +2024-07-27 10:07:27,241 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.13 vs. limit=6.366666666666667 +2024-07-27 10:07:30,666 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.12 vs. limit=9.55 +2024-07-27 10:07:36,777 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.45 vs. limit=3.412 +2024-07-27 10:07:51,273 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.38 vs. limit=3.416 +2024-07-27 10:07:53,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=2773.3333333333335, ans=0.04949747468305833 +2024-07-27 10:08:04,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=2786.6666666666665, ans=0.27213333333333334 +2024-07-27 10:08:05,717 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.96 vs. limit=8.545 +2024-07-27 10:08:07,311 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.93 vs. limit=6.3933333333333335 +2024-07-27 10:08:08,458 INFO [train.py:1114] (0/4) Epoch 1, batch 2100, loss[loss=0.5524, simple_loss=0.5244, pruned_loss=0.2902, over 4759.00 frames. ], tot_loss[loss=0.5324, simple_loss=0.5046, pruned_loss=0.2819, over 941445.81 frames. ], batch size: 13, lr: 4.42e-02, grad_scale: 64.0 +2024-07-27 10:08:09,839 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.063e+01 7.785e+01 9.607e+01 1.091e+02 1.489e+02, threshold=1.921e+02, percent-clipped=0.0 +2024-07-27 10:08:10,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=2800.0, ans=0.36875 +2024-07-27 10:08:25,594 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.12 vs. limit=8.555 +2024-07-27 10:08:27,977 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.11 vs. limit=9.620000000000001 +2024-07-27 10:08:34,301 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.30 vs. limit=6.413333333333333 +2024-07-27 10:08:38,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=2840.0, ans=6.775 +2024-07-27 10:08:50,266 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.81 vs. limit=9.629999999999999 +2024-07-27 10:08:54,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=2853.3333333333335, ans=0.36624999999999996 +2024-07-27 10:08:59,130 INFO [train.py:1114] (0/4) Epoch 1, batch 2150, loss[loss=0.4358, simple_loss=0.448, pruned_loss=0.2118, over 4897.00 frames. ], tot_loss[loss=0.5242, simple_loss=0.5005, pruned_loss=0.2754, over 944375.11 frames. ], batch size: 13, lr: 4.41e-02, grad_scale: 64.0 +2024-07-27 10:09:05,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=2866.6666666666665, ans=0.365625 +2024-07-27 10:09:16,688 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.11 vs. limit=9.66 +2024-07-27 10:09:37,081 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.85 vs. limit=6.446666666666667 +2024-07-27 10:09:37,214 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.35 vs. limit=8.585 +2024-07-27 10:09:39,660 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.66 vs. limit=6.446666666666667 +2024-07-27 10:09:49,288 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.94 vs. limit=9.68 +2024-07-27 10:09:59,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=2920.0, ans=0.36312500000000003 +2024-07-27 10:10:01,503 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.47 vs. limit=9.69 +2024-07-27 10:10:02,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=2920.0, ans=0.36312500000000003 +2024-07-27 10:10:05,527 INFO [train.py:1114] (0/4) Epoch 1, batch 2200, loss[loss=0.5599, simple_loss=0.536, pruned_loss=0.2919, over 4804.00 frames. ], tot_loss[loss=0.5164, simple_loss=0.496, pruned_loss=0.2695, over 943368.23 frames. ], batch size: 14, lr: 4.41e-02, grad_scale: 64.0 +2024-07-27 10:10:06,253 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.332e+01 7.672e+01 8.381e+01 9.351e+01 1.723e+02, threshold=1.676e+02, percent-clipped=0.0 +2024-07-27 10:10:06,693 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.16 vs. limit=8.6 +2024-07-27 10:10:14,974 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.46 vs. limit=3.44 +2024-07-27 10:10:16,745 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.24 vs. limit=8.605 +2024-07-27 10:10:34,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=2973.3333333333335, ans=0.360625 +2024-07-27 10:10:35,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.42 vs. limit=3.446 +2024-07-27 10:10:48,004 INFO [train.py:1114] (0/4) Epoch 1, batch 2250, loss[loss=0.4233, simple_loss=0.4363, pruned_loss=0.2052, over 4697.00 frames. ], tot_loss[loss=0.512, simple_loss=0.4941, pruned_loss=0.2658, over 941840.63 frames. ], batch size: 13, lr: 4.40e-02, grad_scale: 64.0 +2024-07-27 10:10:54,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=3000.0, ans=0.359375 +2024-07-27 10:10:55,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=3013.3333333333335, ans=0.35875 +2024-07-27 10:10:56,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=3013.3333333333335, ans=0.35875 +2024-07-27 10:11:05,477 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.13 vs. limit=8.629999999999999 +2024-07-27 10:11:24,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=3026.6666666666665, ans=0.09899494936611666 +2024-07-27 10:11:28,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=3026.6666666666665, ans=0.2697333333333333 +2024-07-27 10:11:34,445 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.02 vs. limit=5.76 +2024-07-27 10:11:39,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=3040.0, ans=0.35750000000000004 +2024-07-27 10:11:45,976 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.58 vs. limit=8.645 +2024-07-27 10:11:47,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=3053.3333333333335, ans=0.356875 +2024-07-27 10:11:48,647 INFO [train.py:1114] (0/4) Epoch 1, batch 2300, loss[loss=0.4108, simple_loss=0.4232, pruned_loss=0.1992, over 4940.00 frames. ], tot_loss[loss=0.5016, simple_loss=0.4873, pruned_loss=0.2587, over 939634.84 frames. ], batch size: 12, lr: 4.40e-02, grad_scale: 64.0 +2024-07-27 10:11:49,438 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.418e+01 7.845e+01 8.717e+01 9.817e+01 1.762e+02, threshold=1.743e+02, percent-clipped=1.0 +2024-07-27 10:11:49,624 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:11:50,798 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.16 vs. limit=8.65 +2024-07-27 10:12:05,754 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.80 vs. limit=6.54 +2024-07-27 10:12:06,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=3080.0, ans=0.08449999999999999 +2024-07-27 10:12:07,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=3093.3333333333335, ans=0.2690666666666667 +2024-07-27 10:12:09,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=3093.3333333333335, ans=0.355 +2024-07-27 10:12:12,507 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.09 vs. limit=8.66 +2024-07-27 10:12:13,355 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.23 vs. limit=6.546666666666667 +2024-07-27 10:12:14,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=3093.3333333333335, ans=6.546666666666667 +2024-07-27 10:12:22,965 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.93 vs. limit=5.776666666666666 +2024-07-27 10:12:32,079 INFO [train.py:1114] (0/4) Epoch 1, batch 2350, loss[loss=0.4626, simple_loss=0.4691, pruned_loss=0.2281, over 4638.00 frames. ], tot_loss[loss=0.4949, simple_loss=0.4836, pruned_loss=0.2536, over 941515.87 frames. ], batch size: 13, lr: 4.40e-02, grad_scale: 64.0 +2024-07-27 10:12:34,964 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.09 vs. limit=9.85 +2024-07-27 10:12:36,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=3133.3333333333335, ans=0.7903333333333333 +2024-07-27 10:12:39,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3133.3333333333335, ans=0.26866666666666666 +2024-07-27 10:12:45,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=3146.6666666666665, ans=0.35250000000000004 +2024-07-27 10:12:47,884 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.81 vs. limit=3.472 +2024-07-27 10:12:51,167 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.61 vs. limit=9.870000000000001 +2024-07-27 10:13:03,583 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:13:16,259 INFO [train.py:1114] (0/4) Epoch 1, batch 2400, loss[loss=0.4895, simple_loss=0.4677, pruned_loss=0.2557, over 4649.00 frames. ], tot_loss[loss=0.493, simple_loss=0.4833, pruned_loss=0.2518, over 941210.40 frames. ], batch size: 12, lr: 4.39e-02, grad_scale: 64.0 +2024-07-27 10:13:16,983 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.405e+01 7.978e+01 8.770e+01 1.032e+02 1.902e+02, threshold=1.754e+02, percent-clipped=2.0 +2024-07-27 10:13:22,174 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.99 vs. limit=5.8 +2024-07-27 10:13:31,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=3226.6666666666665, ans=0.079 +2024-07-27 10:13:38,269 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.94 vs. limit=5.806666666666667 +2024-07-27 10:13:43,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3240.0, ans=0.2676 +2024-07-27 10:13:49,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3253.3333333333335, ans=0.34750000000000003 +2024-07-27 10:13:50,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=3253.3333333333335, ans=7.033333333333333 +2024-07-27 10:13:51,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3253.3333333333335, ans=0.26746666666666663 +2024-07-27 10:13:53,611 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.56 vs. limit=8.72 +2024-07-27 10:13:55,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=3266.6666666666665, ans=0.346875 +2024-07-27 10:13:56,384 INFO [train.py:1114] (0/4) Epoch 1, batch 2450, loss[loss=0.5436, simple_loss=0.5367, pruned_loss=0.2752, over 4694.00 frames. ], tot_loss[loss=0.4925, simple_loss=0.484, pruned_loss=0.2509, over 936219.96 frames. ], batch size: 13, lr: 4.39e-02, grad_scale: 64.0 +2024-07-27 10:14:00,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=3266.6666666666665, ans=0.346875 +2024-07-27 10:14:01,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3266.6666666666665, ans=0.2673333333333333 +2024-07-27 10:14:01,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=3266.6666666666665, ans=0.0775 +2024-07-27 10:14:03,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=3266.6666666666665, ans=0.346875 +2024-07-27 10:14:19,005 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.13 vs. limit=9.96 +2024-07-27 10:14:30,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3293.3333333333335, ans=0.2670666666666667 +2024-07-27 10:14:39,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=3306.6666666666665, ans=0.08666666666666667 +2024-07-27 10:14:41,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=3306.6666666666665, ans=0.7842666666666667 +2024-07-27 10:14:53,954 INFO [train.py:1114] (0/4) Epoch 1, batch 2500, loss[loss=0.529, simple_loss=0.5202, pruned_loss=0.2689, over 4804.00 frames. ], tot_loss[loss=0.4859, simple_loss=0.4796, pruned_loss=0.2464, over 938335.63 frames. ], batch size: 14, lr: 4.38e-02, grad_scale: 64.0 +2024-07-27 10:14:54,650 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.439e+01 7.441e+01 8.140e+01 9.225e+01 1.396e+02, threshold=1.628e+02, percent-clipped=0.0 +2024-07-27 10:15:12,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=3346.6666666666665, ans=0.343125 +2024-07-27 10:15:31,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=3360.0, ans=0.2504 +2024-07-27 10:15:38,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=3360.0, ans=7.1 +2024-07-27 10:15:42,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=3373.3333333333335, ans=0.34187500000000004 +2024-07-27 10:15:42,641 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.90 vs. limit=5.843333333333334 +2024-07-27 10:15:44,208 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.89 vs. limit=10.03 +2024-07-27 10:15:45,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=3373.3333333333335, ans=0.024099999999999996 +2024-07-27 10:15:53,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=3386.6666666666665, ans=0.34125 +2024-07-27 10:16:03,503 INFO [train.py:1114] (0/4) Epoch 1, batch 2550, loss[loss=0.3907, simple_loss=0.4137, pruned_loss=0.1838, over 4796.00 frames. ], tot_loss[loss=0.4817, simple_loss=0.4776, pruned_loss=0.2431, over 937888.59 frames. ], batch size: 11, lr: 4.38e-02, grad_scale: 64.0 +2024-07-27 10:16:11,504 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.86 vs. limit=5.8533333333333335 +2024-07-27 10:16:13,105 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.15 vs. limit=10.06 +2024-07-27 10:16:31,272 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.65 vs. limit=8.79 +2024-07-27 10:16:37,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=3440.0, ans=0.07099999999999998 +2024-07-27 10:16:42,126 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.29 vs. limit=10.09 +2024-07-27 10:16:44,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=3453.3333333333335, ans=0.338125 +2024-07-27 10:16:49,089 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.02 vs. limit=5.866666666666666 +2024-07-27 10:16:49,898 INFO [train.py:1114] (0/4) Epoch 1, batch 2600, loss[loss=0.4349, simple_loss=0.455, pruned_loss=0.2074, over 4900.00 frames. ], tot_loss[loss=0.4768, simple_loss=0.475, pruned_loss=0.2394, over 936989.77 frames. ], batch size: 13, lr: 4.37e-02, grad_scale: 64.0 +2024-07-27 10:16:50,622 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.396e+01 7.798e+01 8.275e+01 9.472e+01 1.752e+02, threshold=1.655e+02, percent-clipped=1.0 +2024-07-27 10:17:04,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.whiten.whitening_limit, batch_count=3480.0, ans=5.3919999999999995 +2024-07-27 10:17:08,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=3493.3333333333335, ans=0.05349999999999999 +2024-07-27 10:17:26,903 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.34 vs. limit=10.129999999999999 +2024-07-27 10:17:37,243 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.42 vs. limit=8.82 +2024-07-27 10:17:38,263 INFO [train.py:1114] (0/4) Epoch 1, batch 2650, loss[loss=0.4389, simple_loss=0.454, pruned_loss=0.2119, over 4617.00 frames. ], tot_loss[loss=0.4749, simple_loss=0.4741, pruned_loss=0.238, over 939326.96 frames. ], batch size: 16, lr: 4.37e-02, grad_scale: 64.0 +2024-07-27 10:17:43,418 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.98 vs. limit=10.15 +2024-07-27 10:17:44,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=3533.3333333333335, ans=0.07791666666666666 +2024-07-27 10:17:45,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3533.3333333333335, ans=0.26466666666666666 +2024-07-27 10:18:00,034 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.60 vs. limit=10.17 +2024-07-27 10:18:04,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=3560.0, ans=0.06649999999999998 +2024-07-27 10:18:05,550 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.96 vs. limit=5.8933333333333335 +2024-07-27 10:18:06,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=3573.3333333333335, ans=0.019599999999999992 +2024-07-27 10:18:10,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=3573.3333333333335, ans=0.03883333333333333 +2024-07-27 10:18:11,155 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.77 vs. limit=6.786666666666667 +2024-07-27 10:18:12,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=3573.3333333333335, ans=0.21426666666666666 +2024-07-27 10:18:40,839 INFO [train.py:1114] (0/4) Epoch 1, batch 2700, loss[loss=0.4625, simple_loss=0.4686, pruned_loss=0.2283, over 4729.00 frames. ], tot_loss[loss=0.4731, simple_loss=0.4732, pruned_loss=0.2366, over 939646.33 frames. ], batch size: 14, lr: 4.36e-02, grad_scale: 64.0 +2024-07-27 10:18:41,569 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.609e+01 7.664e+01 8.465e+01 9.239e+01 1.807e+02, threshold=1.693e+02, percent-clipped=1.0 +2024-07-27 10:18:46,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3600.0, ans=0.33125 +2024-07-27 10:18:54,150 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.67 vs. limit=10.21 +2024-07-27 10:18:59,342 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.02 vs. limit=10.21 +2024-07-27 10:19:06,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=3626.6666666666665, ans=0.04600000000000001 +2024-07-27 10:19:10,514 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.17 vs. limit=10.23 +2024-07-27 10:19:31,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=3653.3333333333335, ans=0.7721333333333333 +2024-07-27 10:19:40,886 INFO [train.py:1114] (0/4) Epoch 1, batch 2750, loss[loss=0.3874, simple_loss=0.4021, pruned_loss=0.1863, over 4707.00 frames. ], tot_loss[loss=0.4695, simple_loss=0.4703, pruned_loss=0.2344, over 939869.52 frames. ], batch size: 12, lr: 4.36e-02, grad_scale: 32.0 +2024-07-27 10:19:43,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3666.6666666666665, ans=0.2633333333333333 +2024-07-27 10:19:57,398 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:19:58,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=3680.0, ans=0.03999999999999998 +2024-07-27 10:20:00,819 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.25 vs. limit=10.26 +2024-07-27 10:20:02,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3693.3333333333335, ans=0.26306666666666667 +2024-07-27 10:20:16,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=3706.6666666666665, ans=0.016600000000000004 +2024-07-27 10:20:21,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=3720.0, ans=0.034999999999999976 +2024-07-27 10:20:22,967 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.34 vs. limit=6.86 +2024-07-27 10:20:27,077 INFO [train.py:1114] (0/4) Epoch 1, batch 2800, loss[loss=0.6072, simple_loss=0.547, pruned_loss=0.3337, over 3341.00 frames. ], tot_loss[loss=0.4687, simple_loss=0.4701, pruned_loss=0.2337, over 937828.30 frames. ], batch size: 35, lr: 4.36e-02, grad_scale: 32.0 +2024-07-27 10:20:28,619 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.137e+01 7.490e+01 8.370e+01 9.871e+01 2.286e+02, threshold=1.674e+02, percent-clipped=1.0 +2024-07-27 10:20:33,607 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.26 vs. limit=8.9 +2024-07-27 10:20:34,596 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.15 vs. limit=5.9366666666666665 +2024-07-27 10:20:37,080 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=9.55 vs. limit=10.31 +2024-07-27 10:20:41,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=3746.6666666666665, ans=0.7688666666666667 +2024-07-27 10:21:04,865 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.66 vs. limit=10.34 +2024-07-27 10:21:12,416 INFO [train.py:1114] (0/4) Epoch 1, batch 2850, loss[loss=0.4785, simple_loss=0.473, pruned_loss=0.2421, over 4959.00 frames. ], tot_loss[loss=0.4667, simple_loss=0.4689, pruned_loss=0.2323, over 936233.43 frames. ], batch size: 13, lr: 4.35e-02, grad_scale: 32.0 +2024-07-27 10:21:12,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=3800.0, ans=0.767 +2024-07-27 10:21:14,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=3800.0, ans=0.321875 +2024-07-27 10:21:32,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=3826.6666666666665, ans=0.03475 +2024-07-27 10:21:37,737 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.31 vs. limit=10.379999999999999 +2024-07-27 10:21:43,075 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.14 vs. limit=10.379999999999999 +2024-07-27 10:21:54,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=3853.3333333333335, ans=0.25780000000000003 +2024-07-27 10:21:58,275 INFO [train.py:1114] (0/4) Epoch 1, batch 2900, loss[loss=0.3778, simple_loss=0.4064, pruned_loss=0.1746, over 4824.00 frames. ], tot_loss[loss=0.4633, simple_loss=0.4678, pruned_loss=0.2294, over 939971.73 frames. ], batch size: 13, lr: 4.35e-02, grad_scale: 32.0 +2024-07-27 10:22:05,676 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.89 vs. limit=5.546666666666667 +2024-07-27 10:22:07,176 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.485e+01 7.711e+01 8.512e+01 9.288e+01 5.214e+02, threshold=1.702e+02, percent-clipped=1.0 +2024-07-27 10:22:07,280 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:22:14,349 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.94 vs. limit=10.41 +2024-07-27 10:22:17,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3880.0, ans=0.2612 +2024-07-27 10:22:20,362 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:22:30,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=3906.6666666666665, ans=0.316875 +2024-07-27 10:22:30,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=3906.6666666666665, ans=0.011666666666666659 +2024-07-27 10:22:32,918 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.12 vs. limit=5.976666666666667 +2024-07-27 10:22:34,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=3906.6666666666665, ans=0.316875 +2024-07-27 10:22:40,179 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.15 vs. limit=5.98 +2024-07-27 10:22:45,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=3920.0, ans=0.05299999999999999 +2024-07-27 10:22:47,616 INFO [train.py:1114] (0/4) Epoch 1, batch 2950, loss[loss=0.4523, simple_loss=0.475, pruned_loss=0.2148, over 4705.00 frames. ], tot_loss[loss=0.4608, simple_loss=0.4659, pruned_loss=0.2278, over 938801.95 frames. ], batch size: 12, lr: 4.34e-02, grad_scale: 32.0 +2024-07-27 10:22:58,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=3933.3333333333335, ans=0.7623333333333333 +2024-07-27 10:23:00,456 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.28 vs. limit=10.45 +2024-07-27 10:23:03,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=3946.6666666666665, ans=0.315 +2024-07-27 10:23:04,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=3946.6666666666665, ans=0.011199999999999988 +2024-07-27 10:23:10,447 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.16 vs. limit=10.46 +2024-07-27 10:23:11,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=3960.0, ans=0.027249999999999996 +2024-07-27 10:23:13,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=3960.0, ans=0.31437499999999996 +2024-07-27 10:23:14,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=3960.0, ans=0.0050000000000000044 +2024-07-27 10:23:25,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=3973.3333333333335, ans=0.05099999999999999 +2024-07-27 10:23:26,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=3973.3333333333335, ans=0.05099999999999999 +2024-07-27 10:23:33,580 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:23:35,415 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.58 vs. limit=8.995000000000001 +2024-07-27 10:23:38,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=4000.0, ans=0.76 +2024-07-27 10:23:38,736 INFO [train.py:1114] (0/4) Epoch 1, batch 3000, loss[loss=0.3473, simple_loss=0.3954, pruned_loss=0.1496, over 4758.00 frames. ], tot_loss[loss=0.4561, simple_loss=0.4637, pruned_loss=0.2242, over 938569.48 frames. ], batch size: 13, lr: 4.34e-02, grad_scale: 32.0 +2024-07-27 10:23:38,737 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 10:23:46,708 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([4.3754, 4.2191, 4.5406, 4.3715], device='cuda:0') +2024-07-27 10:23:52,403 INFO [train.py:1146] (0/4) Epoch 1, validation: loss=0.3584, simple_loss=0.4212, pruned_loss=0.1478, over 944034.00 frames. +2024-07-27 10:23:52,404 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4151MB +2024-07-27 10:23:52,804 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.52 vs. limit=7.0 +2024-07-27 10:23:54,442 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.536e+01 7.537e+01 8.350e+01 9.496e+01 1.510e+02, threshold=1.670e+02, percent-clipped=0.0 +2024-07-27 10:23:57,584 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.51 vs. limit=10.5 +2024-07-27 10:24:27,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=4026.6666666666665, ans=0.31125 +2024-07-27 10:24:31,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=4026.6666666666665, ans=0.31125 +2024-07-27 10:24:44,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=4053.3333333333335, ans=0.31 +2024-07-27 10:24:49,757 INFO [train.py:1114] (0/4) Epoch 1, batch 3050, loss[loss=0.4525, simple_loss=0.4595, pruned_loss=0.2228, over 4643.00 frames. ], tot_loss[loss=0.4554, simple_loss=0.4639, pruned_loss=0.2235, over 937225.12 frames. ], batch size: 12, lr: 4.33e-02, grad_scale: 32.0 +2024-07-27 10:24:55,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=4066.6666666666665, ans=0.309375 +2024-07-27 10:24:56,032 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.90 vs. limit=10.55 +2024-07-27 10:24:58,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=4080.0, ans=0.30874999999999997 +2024-07-27 10:25:04,840 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.66 vs. limit=7.04 +2024-07-27 10:25:13,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=4093.3333333333335, ans=0.7567333333333334 +2024-07-27 10:25:17,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=4106.666666666667, ans=0.3075 +2024-07-27 10:25:17,752 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.75 vs. limit=7.053333333333334 +2024-07-27 10:25:24,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=4106.666666666667, ans=0.049555555555555554 +2024-07-27 10:25:24,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=4106.666666666667, ans=0.3075 +2024-07-27 10:25:31,809 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.63 vs. limit=10.59 +2024-07-27 10:25:33,330 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.66 vs. limit=10.59 +2024-07-27 10:25:34,282 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.69 vs. limit=9.045 +2024-07-27 10:25:36,753 INFO [train.py:1114] (0/4) Epoch 1, batch 3100, loss[loss=0.473, simple_loss=0.4883, pruned_loss=0.2289, over 4650.00 frames. ], tot_loss[loss=0.4547, simple_loss=0.4631, pruned_loss=0.2231, over 938001.62 frames. ], batch size: 16, lr: 4.33e-02, grad_scale: 32.0 +2024-07-27 10:25:38,258 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.617e+01 7.727e+01 8.300e+01 9.366e+01 1.573e+02, threshold=1.660e+02, percent-clipped=0.0 +2024-07-27 10:25:40,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=4133.333333333333, ans=0.025 +2024-07-27 10:25:46,920 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.15 vs. limit=9.055 +2024-07-27 10:25:55,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=4146.666666666667, ans=0.30562500000000004 +2024-07-27 10:26:06,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=4173.333333333333, ans=0.304375 +2024-07-27 10:26:08,566 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.03 vs. limit=10.629999999999999 +2024-07-27 10:26:14,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=4186.666666666667, ans=0.30374999999999996 +2024-07-27 10:26:14,551 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.46 vs. limit=3.628 +2024-07-27 10:26:24,779 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.59 vs. limit=3.63 +2024-07-27 10:26:25,184 INFO [train.py:1114] (0/4) Epoch 1, batch 3150, loss[loss=0.4663, simple_loss=0.467, pruned_loss=0.2328, over 4632.00 frames. ], tot_loss[loss=0.4498, simple_loss=0.4602, pruned_loss=0.2197, over 938083.22 frames. ], batch size: 17, lr: 4.32e-02, grad_scale: 32.0 +2024-07-27 10:26:29,516 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.63 vs. limit=10.65 +2024-07-27 10:26:30,471 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.39 vs. limit=10.65 +2024-07-27 10:26:34,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=4213.333333333333, ans=0.7525333333333334 +2024-07-27 10:26:38,138 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.34 vs. limit=10.66 +2024-07-27 10:26:44,185 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.05 vs. limit=6.056666666666667 +2024-07-27 10:26:46,630 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.38 vs. limit=10.67 +2024-07-27 10:26:47,566 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.88 vs. limit=10.67 +2024-07-27 10:26:48,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=4240.0, ans=0.009947826086956522 +2024-07-27 10:26:48,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.54 vs. limit=9.09 +2024-07-27 10:26:56,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4253.333333333333, ans=0.2574666666666667 +2024-07-27 10:26:57,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=4253.333333333333, ans=0.30062500000000003 +2024-07-27 10:27:05,655 INFO [train.py:1114] (0/4) Epoch 1, batch 3200, loss[loss=0.3782, simple_loss=0.4069, pruned_loss=0.1747, over 4823.00 frames. ], tot_loss[loss=0.4459, simple_loss=0.4575, pruned_loss=0.2171, over 939693.97 frames. ], batch size: 13, lr: 4.32e-02, grad_scale: 32.0 +2024-07-27 10:27:11,921 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.85 vs. limit=10.7 +2024-07-27 10:27:12,171 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.236e+01 7.498e+01 8.243e+01 8.897e+01 1.348e+02, threshold=1.649e+02, percent-clipped=0.0 +2024-07-27 10:27:42,569 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.42 vs. limit=10.71 +2024-07-27 10:27:52,965 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.86 vs. limit=9.11 +2024-07-27 10:27:53,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=4293.333333333333, ans=0.29874999999999996 +2024-07-27 10:28:28,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=4333.333333333333, ans=0.296875 +2024-07-27 10:28:29,682 INFO [train.py:1114] (0/4) Epoch 1, batch 3250, loss[loss=0.4382, simple_loss=0.4602, pruned_loss=0.2081, over 4936.00 frames. ], tot_loss[loss=0.4443, simple_loss=0.4568, pruned_loss=0.2158, over 940937.29 frames. ], batch size: 14, lr: 4.31e-02, grad_scale: 32.0 +2024-07-27 10:28:33,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=4333.333333333333, ans=0.25666666666666665 +2024-07-27 10:28:36,714 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.38 vs. limit=10.75 +2024-07-27 10:28:37,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=4346.666666666667, ans=0.00992463768115942 +2024-07-27 10:28:49,288 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.34 vs. limit=5.744 +2024-07-27 10:28:57,008 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.13 vs. limit=10.78 +2024-07-27 10:28:59,317 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.80 vs. limit=10.78 +2024-07-27 10:29:03,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=4373.333333333333, ans=0.09899494936611666 +2024-07-27 10:29:12,743 INFO [train.py:1114] (0/4) Epoch 1, batch 3300, loss[loss=0.4321, simple_loss=0.4427, pruned_loss=0.2107, over 4709.00 frames. ], tot_loss[loss=0.4402, simple_loss=0.454, pruned_loss=0.2132, over 941116.50 frames. ], batch size: 19, lr: 4.31e-02, grad_scale: 32.0 +2024-07-27 10:29:14,419 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.348e+01 7.414e+01 8.133e+01 9.480e+01 1.579e+02, threshold=1.627e+02, percent-clipped=0.0 +2024-07-27 10:29:14,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=4400.0, ans=0.29375 +2024-07-27 10:29:16,844 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.39 vs. limit=10.8 +2024-07-27 10:29:37,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=4413.333333333333, ans=0.20586666666666667 +2024-07-27 10:29:55,759 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.05 vs. limit=10.81 +2024-07-27 10:29:59,510 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.03 vs. limit=9.155 +2024-07-27 10:30:00,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=4413.333333333333, ans=0.29312499999999997 +2024-07-27 10:30:07,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=4426.666666666667, ans=0.04822222222222222 +2024-07-27 10:30:48,571 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.47 vs. limit=5.781333333333333 +2024-07-27 10:30:53,655 INFO [train.py:1114] (0/4) Epoch 1, batch 3350, loss[loss=0.5409, simple_loss=0.519, pruned_loss=0.2814, over 4632.00 frames. ], tot_loss[loss=0.4423, simple_loss=0.456, pruned_loss=0.2143, over 938527.32 frames. ], batch size: 17, lr: 4.30e-02, grad_scale: 32.0 +2024-07-27 10:30:53,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=4466.666666666667, ans=0.00989855072463768 +2024-07-27 10:30:53,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=4466.666666666667, ans=0.290625 +2024-07-27 10:31:14,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=4466.666666666667, ans=0.00989855072463768 +2024-07-27 10:31:18,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=4480.0, ans=0.009895652173913043 +2024-07-27 10:31:21,202 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.55 vs. limit=10.86 +2024-07-27 10:31:22,706 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.49 vs. limit=10.86 +2024-07-27 10:31:33,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=4506.666666666667, ans=0.25493333333333335 +2024-07-27 10:31:34,808 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.99 vs. limit=10.879999999999999 +2024-07-27 10:31:41,110 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.84 vs. limit=10.879999999999999 +2024-07-27 10:31:46,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=4520.0, ans=0.00988695652173913 +2024-07-27 10:31:47,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=4520.0, ans=0.25479999999999997 +2024-07-27 10:31:55,295 INFO [train.py:1114] (0/4) Epoch 1, batch 3400, loss[loss=0.3801, simple_loss=0.4206, pruned_loss=0.1698, over 4802.00 frames. ], tot_loss[loss=0.4396, simple_loss=0.4542, pruned_loss=0.2125, over 937624.98 frames. ], batch size: 11, lr: 4.29e-02, grad_scale: 32.0 +2024-07-27 10:31:56,822 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.312e+01 7.521e+01 8.329e+01 9.335e+01 1.968e+02, threshold=1.666e+02, percent-clipped=1.0 +2024-07-27 10:31:57,872 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:32:14,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=4546.666666666667, ans=0.07158333333333333 +2024-07-27 10:32:14,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=4546.666666666667, ans=0.009881159420289855 +2024-07-27 10:32:54,912 INFO [train.py:1114] (0/4) Epoch 1, batch 3450, loss[loss=0.4538, simple_loss=0.4807, pruned_loss=0.2135, over 4705.00 frames. ], tot_loss[loss=0.438, simple_loss=0.4537, pruned_loss=0.2111, over 937627.08 frames. ], batch size: 19, lr: 4.29e-02, grad_scale: 32.0 +2024-07-27 10:32:57,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=4600.0, ans=0.009869565217391305 +2024-07-27 10:33:03,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=4600.0, ans=0.269 +2024-07-27 10:33:10,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=4613.333333333333, ans=0.009866666666666668 +2024-07-27 10:33:13,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=4613.333333333333, ans=0.28375 +2024-07-27 10:33:14,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=4613.333333333333, ans=0.7385333333333334 +2024-07-27 10:33:18,240 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:33:25,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=4626.666666666667, ans=0.009863768115942029 +2024-07-27 10:33:28,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=4640.0, ans=0.2825 +2024-07-27 10:33:29,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=4640.0, ans=0.7376 +2024-07-27 10:33:35,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=4640.0, ans=0.04733333333333334 +2024-07-27 10:33:38,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=4640.0, ans=0.2825 +2024-07-27 10:33:53,137 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=7.421e-01 +2024-07-27 10:33:57,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=4653.333333333333, ans=0.009857971014492754 +2024-07-27 10:33:57,553 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.37 vs. limit=9.245000000000001 +2024-07-27 10:33:58,728 INFO [train.py:1114] (0/4) Epoch 1, batch 3500, loss[loss=0.3631, simple_loss=0.3896, pruned_loss=0.1683, over 4942.00 frames. ], tot_loss[loss=0.4345, simple_loss=0.4514, pruned_loss=0.2088, over 938450.64 frames. ], batch size: 12, lr: 4.28e-02, grad_scale: 32.0 +2024-07-27 10:34:00,613 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.699e+01 7.535e+01 8.121e+01 9.134e+01 1.279e+02, threshold=1.624e+02, percent-clipped=0.0 +2024-07-27 10:34:13,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4666.666666666667, ans=0.2533333333333333 +2024-07-27 10:34:13,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=4666.666666666667, ans=0.1 +2024-07-27 10:35:18,206 INFO [train.py:1114] (0/4) Epoch 1, batch 3550, loss[loss=0.3819, simple_loss=0.4089, pruned_loss=0.1775, over 4668.00 frames. ], tot_loss[loss=0.4322, simple_loss=0.4502, pruned_loss=0.2071, over 938877.26 frames. ], batch size: 14, lr: 4.28e-02, grad_scale: 32.0 +2024-07-27 10:35:21,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=4733.333333333333, ans=0.009840579710144928 +2024-07-27 10:35:24,059 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.59 vs. limit=11.05 +2024-07-27 10:35:47,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=4760.0, ans=0.276875 +2024-07-27 10:35:49,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=4773.333333333333, ans=0.27625 +2024-07-27 10:36:00,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=4786.666666666667, ans=0.275625 +2024-07-27 10:36:04,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=4786.666666666667, ans=0.04949747468305833 +2024-07-27 10:36:07,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=4786.666666666667, ans=0.7324666666666667 +2024-07-27 10:36:11,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.00 vs. limit=11.1 +2024-07-27 10:36:12,122 INFO [train.py:1114] (0/4) Epoch 1, batch 3600, loss[loss=0.3756, simple_loss=0.3982, pruned_loss=0.1765, over 4964.00 frames. ], tot_loss[loss=0.4324, simple_loss=0.45, pruned_loss=0.2074, over 940361.90 frames. ], batch size: 13, lr: 4.27e-02, grad_scale: 32.0 +2024-07-27 10:36:13,741 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.357e+01 7.358e+01 8.127e+01 9.443e+01 1.425e+02, threshold=1.625e+02, percent-clipped=0.0 +2024-07-27 10:36:17,998 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.21 vs. limit=6.2 +2024-07-27 10:36:20,374 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.41 vs. limit=9.305 +2024-07-27 10:36:38,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.12 vs. limit=7.42 +2024-07-27 10:36:39,487 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.79 vs. limit=9.315 +2024-07-27 10:36:40,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=4840.0, ans=0.273125 +2024-07-27 10:36:42,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=4840.0, ans=0.04949747468305833 +2024-07-27 10:36:43,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=4840.0, ans=0.273125 +2024-07-27 10:37:20,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=4853.333333333333, ans=0.0 +2024-07-27 10:37:20,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=4853.333333333333, ans=0.27249999999999996 +2024-07-27 10:37:22,219 INFO [train.py:1114] (0/4) Epoch 1, batch 3650, loss[loss=0.4343, simple_loss=0.4531, pruned_loss=0.2077, over 4885.00 frames. ], tot_loss[loss=0.4309, simple_loss=0.4493, pruned_loss=0.2063, over 940570.33 frames. ], batch size: 15, lr: 4.27e-02, grad_scale: 32.0 +2024-07-27 10:37:29,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=4866.666666666667, ans=0.7296666666666667 +2024-07-27 10:37:30,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=4866.666666666667, ans=0.271875 +2024-07-27 10:37:32,659 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.53 vs. limit=9.33 +2024-07-27 10:37:33,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=4880.0, ans=0.04633333333333334 +2024-07-27 10:37:34,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=4880.0, ans=0.27125 +2024-07-27 10:37:57,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=4906.666666666667, ans=0.27 +2024-07-27 10:38:02,369 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:38:06,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=4920.0, ans=0.034625 +2024-07-27 10:38:08,641 INFO [train.py:1114] (0/4) Epoch 1, batch 3700, loss[loss=0.4103, simple_loss=0.4454, pruned_loss=0.1876, over 4932.00 frames. ], tot_loss[loss=0.4295, simple_loss=0.4487, pruned_loss=0.2052, over 941481.11 frames. ], batch size: 14, lr: 4.26e-02, grad_scale: 32.0 +2024-07-27 10:38:09,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=4933.333333333333, ans=0.26875 +2024-07-27 10:38:10,095 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.394e+01 7.604e+01 8.315e+01 9.088e+01 1.291e+02, threshold=1.663e+02, percent-clipped=0.0 +2024-07-27 10:38:18,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.32 vs. limit=11.21 +2024-07-27 10:38:18,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=4946.666666666667, ans=0.04605555555555556 +2024-07-27 10:38:19,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=4946.666666666667, ans=0.268125 +2024-07-27 10:38:19,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4946.666666666667, ans=0.25053333333333333 +2024-07-27 10:38:32,165 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.73 vs. limit=11.23 +2024-07-27 10:38:32,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=4973.333333333333, ans=0.04594444444444445 +2024-07-27 10:38:35,341 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=10.97 vs. limit=11.23 +2024-07-27 10:38:38,220 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.77 vs. limit=11.23 +2024-07-27 10:38:39,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=4986.666666666667, ans=0.07 +2024-07-27 10:38:41,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=4986.666666666667, ans=0.009785507246376812 +2024-07-27 10:38:44,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=4986.666666666667, ans=0.7254666666666667 +2024-07-27 10:38:46,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=5000.0, ans=0.025 +2024-07-27 10:38:46,999 INFO [train.py:1114] (0/4) Epoch 1, batch 3750, loss[loss=0.3847, simple_loss=0.4085, pruned_loss=0.1805, over 4797.00 frames. ], tot_loss[loss=0.4267, simple_loss=0.4467, pruned_loss=0.2034, over 943042.28 frames. ], batch size: 11, lr: 4.26e-02, grad_scale: 32.0 +2024-07-27 10:38:47,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=5000.0, ans=0.8 +2024-07-27 10:39:00,727 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.66 vs. limit=9.379999999999999 +2024-07-27 10:39:01,603 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.91 vs. limit=9.379999999999999 +2024-07-27 10:39:06,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5026.666666666667, ans=0.264375 +2024-07-27 10:39:08,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=5026.666666666667, ans=0.264375 +2024-07-27 10:39:09,183 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.73 vs. limit=3.754 +2024-07-27 10:39:11,508 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.04 vs. limit=11.27 +2024-07-27 10:39:20,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=5040.0, ans=0.26375000000000004 +2024-07-27 10:39:21,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=5053.333333333333, ans=0.2758 +2024-07-27 10:39:25,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=5053.333333333333, ans=0.06841666666666668 +2024-07-27 10:39:27,713 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.39 vs. limit=11.29 +2024-07-27 10:39:28,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=5053.333333333333, ans=8.158333333333333 +2024-07-27 10:39:29,563 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.78 vs. limit=9.4 +2024-07-27 10:39:29,994 INFO [train.py:1114] (0/4) Epoch 1, batch 3800, loss[loss=0.4378, simple_loss=0.4539, pruned_loss=0.2109, over 4802.00 frames. ], tot_loss[loss=0.4273, simple_loss=0.4466, pruned_loss=0.204, over 941649.73 frames. ], batch size: 14, lr: 4.25e-02, grad_scale: 32.0 +2024-07-27 10:39:30,598 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.16 vs. limit=7.533333333333333 +2024-07-27 10:39:31,471 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.397e+01 7.848e+01 8.926e+01 1.062e+02 1.659e+02, threshold=1.785e+02, percent-clipped=0.0 +2024-07-27 10:39:42,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=5080.0, ans=0.045500000000000006 +2024-07-27 10:39:59,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=5106.666666666667, ans=0.260625 +2024-07-27 10:40:00,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=5106.666666666667, ans=0.025 +2024-07-27 10:40:09,707 INFO [train.py:1114] (0/4) Epoch 1, batch 3850, loss[loss=0.4358, simple_loss=0.4787, pruned_loss=0.1965, over 4644.00 frames. ], tot_loss[loss=0.424, simple_loss=0.4448, pruned_loss=0.2016, over 942356.26 frames. ], batch size: 16, lr: 4.24e-02, grad_scale: 32.0 +2024-07-27 10:40:17,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=5133.333333333333, ans=0.259375 +2024-07-27 10:40:17,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=5133.333333333333, ans=0.009753623188405797 +2024-07-27 10:40:18,269 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.86 vs. limit=9.425 +2024-07-27 10:40:18,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=5133.333333333333, ans=0.259375 +2024-07-27 10:40:37,333 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.19 vs. limit=7.58 +2024-07-27 10:40:41,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=5160.0, ans=0.258125 +2024-07-27 10:40:44,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=5160.0, ans=0.24839999999999998 +2024-07-27 10:40:45,083 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.89 vs. limit=3.774 +2024-07-27 10:40:46,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=5173.333333333333, ans=0.045111111111111116 +2024-07-27 10:40:49,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=5173.333333333333, ans=0.2575 +2024-07-27 10:41:02,120 INFO [train.py:1114] (0/4) Epoch 1, batch 3900, loss[loss=0.4052, simple_loss=0.4388, pruned_loss=0.1857, over 4816.00 frames. ], tot_loss[loss=0.4219, simple_loss=0.4434, pruned_loss=0.2002, over 942463.09 frames. ], batch size: 14, lr: 4.24e-02, grad_scale: 32.0 +2024-07-27 10:41:05,351 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.129e+01 7.258e+01 7.897e+01 8.876e+01 1.354e+02, threshold=1.579e+02, percent-clipped=0.0 +2024-07-27 10:41:05,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=5200.0, ans=0.7180000000000001 +2024-07-27 10:41:33,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=5240.0, ans=0.254375 +2024-07-27 10:41:37,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=5253.333333333333, ans=0.24746666666666667 +2024-07-27 10:41:40,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.37 vs. limit=6.3133333333333335 +2024-07-27 10:41:46,755 INFO [train.py:1114] (0/4) Epoch 1, batch 3950, loss[loss=0.4103, simple_loss=0.4368, pruned_loss=0.1919, over 4832.00 frames. ], tot_loss[loss=0.4211, simple_loss=0.4434, pruned_loss=0.1994, over 944499.18 frames. ], batch size: 16, lr: 4.23e-02, grad_scale: 32.0 +2024-07-27 10:41:47,272 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.19 vs. limit=11.45 +2024-07-27 10:41:47,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=5266.666666666667, ans=0.253125 +2024-07-27 10:41:50,857 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.23 vs. limit=11.45 +2024-07-27 10:41:51,024 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.35 vs. limit=11.45 +2024-07-27 10:41:56,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=5280.0, ans=0.2525 +2024-07-27 10:42:02,287 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.01 vs. limit=9.48 +2024-07-27 10:42:03,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5280.0, ans=0.2525 +2024-07-27 10:42:33,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=5306.666666666667, ans=0.7142666666666667 +2024-07-27 10:42:34,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=5306.666666666667, ans=0.25125 +2024-07-27 10:42:41,508 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.43 vs. limit=6.33 +2024-07-27 10:42:42,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=5320.0, ans=0.00971304347826087 +2024-07-27 10:42:48,597 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-4000.pt +2024-07-27 10:42:59,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=5333.333333333333, ans=8.333333333333332 +2024-07-27 10:43:00,274 INFO [train.py:1114] (0/4) Epoch 1, batch 4000, loss[loss=0.3331, simple_loss=0.3808, pruned_loss=0.1427, over 4770.00 frames. ], tot_loss[loss=0.4228, simple_loss=0.4444, pruned_loss=0.2005, over 941115.05 frames. ], batch size: 12, lr: 4.23e-02, grad_scale: 32.0 +2024-07-27 10:43:01,870 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.271e+01 7.652e+01 8.472e+01 9.315e+01 2.163e+02, threshold=1.694e+02, percent-clipped=2.0 +2024-07-27 10:43:06,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=5333.333333333333, ans=0.25 +2024-07-27 10:43:06,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=5333.333333333333, ans=0.7133333333333334 +2024-07-27 10:43:10,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=5333.333333333333, ans=0.24666666666666667 +2024-07-27 10:43:10,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=5333.333333333333, ans=0.7133333333333334 +2024-07-27 10:43:10,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=5333.333333333333, ans=0.25 +2024-07-27 10:43:13,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=5346.666666666667, ans=0.0 +2024-07-27 10:43:23,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=5360.0, ans=0.009704347826086956 +2024-07-27 10:43:26,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=5360.0, ans=0.07 +2024-07-27 10:43:34,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=5373.333333333333, ans=0.24812499999999998 +2024-07-27 10:43:47,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=5386.666666666667, ans=0.2475 +2024-07-27 10:43:47,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=5386.666666666667, ans=0.044222222222222225 +2024-07-27 10:43:52,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=5386.666666666667, ans=0.009698550724637682 +2024-07-27 10:43:55,177 INFO [train.py:1114] (0/4) Epoch 1, batch 4050, loss[loss=0.5735, simple_loss=0.5332, pruned_loss=0.3069, over 3078.00 frames. ], tot_loss[loss=0.4209, simple_loss=0.4432, pruned_loss=0.1993, over 938916.36 frames. ], batch size: 35, lr: 4.22e-02, grad_scale: 32.0 +2024-07-27 10:43:55,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=5400.0, ans=0.246875 +2024-07-27 10:43:56,282 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.36 vs. limit=6.16 +2024-07-27 10:43:59,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=5400.0, ans=0.246875 +2024-07-27 10:44:01,518 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.87 vs. limit=11.55 +2024-07-27 10:44:12,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=5413.333333333333, ans=0.24625000000000002 +2024-07-27 10:44:27,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=5440.0, ans=0.044000000000000004 +2024-07-27 10:44:29,010 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.18 vs. limit=11.58 +2024-07-27 10:44:45,015 INFO [train.py:1114] (0/4) Epoch 1, batch 4100, loss[loss=0.4409, simple_loss=0.4597, pruned_loss=0.2111, over 4896.00 frames. ], tot_loss[loss=0.4207, simple_loss=0.4429, pruned_loss=0.1993, over 938168.23 frames. ], batch size: 15, lr: 4.22e-02, grad_scale: 32.0 +2024-07-27 10:44:46,471 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.345e+01 7.438e+01 7.964e+01 9.010e+01 1.753e+02, threshold=1.593e+02, percent-clipped=1.0 +2024-07-27 10:44:47,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=5466.666666666667, ans=0.24375000000000002 +2024-07-27 10:44:55,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=5480.0, ans=0.032875 +2024-07-27 10:44:55,457 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.62 vs. limit=9.555 +2024-07-27 10:44:59,413 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.68 vs. limit=11.61 +2024-07-27 10:45:00,246 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.19 vs. limit=11.61 +2024-07-27 10:45:21,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=5506.666666666667, ans=0.241875 +2024-07-27 10:45:25,984 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.62 vs. limit=6.376666666666667 +2024-07-27 10:45:31,691 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.14 vs. limit=11.64 +2024-07-27 10:45:41,346 INFO [train.py:1114] (0/4) Epoch 1, batch 4150, loss[loss=0.3759, simple_loss=0.4059, pruned_loss=0.173, over 4825.00 frames. ], tot_loss[loss=0.4198, simple_loss=0.4423, pruned_loss=0.1987, over 938542.61 frames. ], batch size: 13, lr: 4.21e-02, grad_scale: 32.0 +2024-07-27 10:45:47,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=5533.333333333333, ans=0.0 +2024-07-27 10:45:50,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=5546.666666666667, ans=0.24 +2024-07-27 10:46:04,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=5560.0, ans=0.2444 +2024-07-27 10:46:23,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=5573.333333333333, ans=0.24426666666666666 +2024-07-27 10:46:29,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=5573.333333333333, ans=0.23875000000000002 +2024-07-27 10:46:30,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=5586.666666666667, ans=0.23812499999999998 +2024-07-27 10:46:38,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=5600.0, ans=0.2375 +2024-07-27 10:46:38,731 INFO [train.py:1114] (0/4) Epoch 1, batch 4200, loss[loss=0.381, simple_loss=0.4165, pruned_loss=0.1728, over 4903.00 frames. ], tot_loss[loss=0.418, simple_loss=0.4411, pruned_loss=0.1975, over 940181.05 frames. ], batch size: 15, lr: 4.20e-02, grad_scale: 32.0 +2024-07-27 10:46:40,172 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.767e+01 7.218e+01 8.164e+01 9.157e+01 1.293e+02, threshold=1.633e+02, percent-clipped=0.0 +2024-07-27 10:47:05,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=5626.666666666667, ans=0.24373333333333333 +2024-07-27 10:47:08,141 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:47:11,351 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.59 vs. limit=11.719999999999999 +2024-07-27 10:47:16,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=5640.0, ans=0.2846 +2024-07-27 10:47:16,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=5640.0, ans=0.23562499999999997 +2024-07-27 10:47:30,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=5653.333333333333, ans=0.043111111111111114 +2024-07-27 10:47:30,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=5653.333333333333, ans=0.009640579710144927 +2024-07-27 10:47:33,677 INFO [train.py:1114] (0/4) Epoch 1, batch 4250, loss[loss=0.4003, simple_loss=0.4481, pruned_loss=0.1763, over 4633.00 frames. ], tot_loss[loss=0.4176, simple_loss=0.441, pruned_loss=0.1971, over 940512.87 frames. ], batch size: 12, lr: 4.20e-02, grad_scale: 32.0 +2024-07-27 10:47:35,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=5666.666666666667, ans=0.234375 +2024-07-27 10:47:46,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=5666.666666666667, ans=0.24333333333333332 +2024-07-27 10:47:49,290 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.04 vs. limit=6.416666666666667 +2024-07-27 10:48:05,730 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.78 vs. limit=7.846666666666666 +2024-07-27 10:48:09,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=5706.666666666667, ans=0.23249999999999998 +2024-07-27 10:48:11,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=5706.666666666667, ans=0.8070666666666666 +2024-07-27 10:48:20,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=5720.0, ans=0.231875 +2024-07-27 10:48:25,320 INFO [train.py:1114] (0/4) Epoch 1, batch 4300, loss[loss=0.43, simple_loss=0.4456, pruned_loss=0.2073, over 4760.00 frames. ], tot_loss[loss=0.4193, simple_loss=0.4424, pruned_loss=0.1981, over 940434.58 frames. ], batch size: 13, lr: 4.19e-02, grad_scale: 32.0 +2024-07-27 10:48:26,781 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.009e+01 7.278e+01 8.201e+01 9.440e+01 2.695e+02, threshold=1.640e+02, percent-clipped=2.0 +2024-07-27 10:48:29,233 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.70 vs. limit=11.8 +2024-07-27 10:48:35,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=5746.666666666667, ans=11.809999999999999 +2024-07-27 10:48:39,932 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=2.76 vs. limit=9.66 +2024-07-27 10:48:41,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=5760.0, ans=0.6984 +2024-07-27 10:48:44,233 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.98 vs. limit=9.66 +2024-07-27 10:49:02,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=5786.666666666667, ans=0.22875 +2024-07-27 10:49:15,556 INFO [train.py:1114] (0/4) Epoch 1, batch 4350, loss[loss=0.3443, simple_loss=0.3931, pruned_loss=0.1477, over 4757.00 frames. ], tot_loss[loss=0.4176, simple_loss=0.442, pruned_loss=0.1966, over 941091.53 frames. ], batch size: 13, lr: 4.19e-02, grad_scale: 32.0 +2024-07-27 10:49:15,950 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.45 vs. limit=11.85 +2024-07-27 10:49:26,026 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.45 vs. limit=6.45 +2024-07-27 10:49:26,105 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.01 vs. limit=3.87 +2024-07-27 10:49:26,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=5813.333333333333, ans=0.0 +2024-07-27 10:49:53,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=5840.0, ans=0.04949747468305833 +2024-07-27 10:49:53,846 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=8.199e-01 +2024-07-27 10:49:58,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=5853.333333333333, ans=0.6951333333333334 +2024-07-27 10:50:12,278 INFO [train.py:1114] (0/4) Epoch 1, batch 4400, loss[loss=0.4736, simple_loss=0.4867, pruned_loss=0.2303, over 4796.00 frames. ], tot_loss[loss=0.4196, simple_loss=0.4438, pruned_loss=0.1977, over 940793.06 frames. ], batch size: 14, lr: 4.18e-02, grad_scale: 32.0 +2024-07-27 10:50:13,758 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.875e+01 7.282e+01 8.065e+01 8.793e+01 1.417e+02, threshold=1.613e+02, percent-clipped=0.0 +2024-07-27 10:50:14,219 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.93 vs. limit=6.346666666666667 +2024-07-27 10:50:18,518 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.13 vs. limit=6.466666666666667 +2024-07-27 10:50:26,244 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.51 vs. limit=6.47 +2024-07-27 10:50:30,017 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.53 vs. limit=11.92 +2024-07-27 10:50:56,598 INFO [train.py:1114] (0/4) Epoch 1, batch 4450, loss[loss=0.3798, simple_loss=0.408, pruned_loss=0.1758, over 4947.00 frames. ], tot_loss[loss=0.418, simple_loss=0.4424, pruned_loss=0.1968, over 939031.02 frames. ], batch size: 12, lr: 4.17e-02, grad_scale: 32.0 +2024-07-27 10:51:11,897 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.62 vs. limit=6.378666666666667 +2024-07-27 10:51:16,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=5960.0, ans=0.2404 +2024-07-27 10:51:17,863 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.31 vs. limit=11.969999999999999 +2024-07-27 10:51:24,631 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.11 vs. limit=9.735 +2024-07-27 10:51:43,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=5973.333333333333, ans=8.733333333333333 +2024-07-27 10:51:47,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5973.333333333333, ans=0.21999999999999997 +2024-07-27 10:52:02,921 INFO [train.py:1114] (0/4) Epoch 1, batch 4500, loss[loss=0.402, simple_loss=0.45, pruned_loss=0.177, over 4744.00 frames. ], tot_loss[loss=0.4173, simple_loss=0.4425, pruned_loss=0.196, over 938201.87 frames. ], batch size: 14, lr: 4.17e-02, grad_scale: 32.0 +2024-07-27 10:52:04,386 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.515e+01 7.518e+01 8.133e+01 8.921e+01 1.342e+02, threshold=1.627e+02, percent-clipped=0.0 +2024-07-27 10:52:15,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=6013.333333333333, ans=0.218125 +2024-07-27 10:52:31,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=6040.0, ans=0.0415 +2024-07-27 10:52:38,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=6053.333333333333, ans=0.04144444444444445 +2024-07-27 10:52:41,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=6053.333333333333, ans=0.025 +2024-07-27 10:52:42,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=6053.333333333333, ans=0.04144444444444445 +2024-07-27 10:52:47,951 INFO [train.py:1114] (0/4) Epoch 1, batch 4550, loss[loss=0.4656, simple_loss=0.4753, pruned_loss=0.2279, over 4896.00 frames. ], tot_loss[loss=0.4148, simple_loss=0.4406, pruned_loss=0.1945, over 940096.04 frames. ], batch size: 13, lr: 4.16e-02, grad_scale: 32.0 +2024-07-27 10:52:51,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=6066.666666666667, ans=0.215625 +2024-07-27 10:53:48,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=6080.0, ans=0.009547826086956522 +2024-07-27 10:55:19,951 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:55:31,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=6120.0, ans=0.213125 +2024-07-27 10:55:36,123 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.37 vs. limit=6.448 +2024-07-27 10:55:36,148 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.58 vs. limit=12.09 +2024-07-27 10:55:40,600 INFO [train.py:1114] (0/4) Epoch 1, batch 4600, loss[loss=0.4447, simple_loss=0.4623, pruned_loss=0.2135, over 4527.00 frames. ], tot_loss[loss=0.4143, simple_loss=0.4401, pruned_loss=0.1943, over 938128.86 frames. ], batch size: 21, lr: 4.15e-02, grad_scale: 32.0 +2024-07-27 10:55:47,211 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.044e+01 7.331e+01 8.005e+01 8.983e+01 1.431e+02, threshold=1.601e+02, percent-clipped=0.0 +2024-07-27 10:55:51,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=6133.333333333333, ans=0.21250000000000002 +2024-07-27 10:56:15,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=6173.333333333333, ans=0.210625 +2024-07-27 10:56:24,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=6186.666666666667, ans=0.6834666666666667 +2024-07-27 10:56:28,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=6200.0, ans=0.20937499999999998 +2024-07-27 10:56:29,197 INFO [train.py:1114] (0/4) Epoch 1, batch 4650, loss[loss=0.4451, simple_loss=0.4681, pruned_loss=0.211, over 4840.00 frames. ], tot_loss[loss=0.4143, simple_loss=0.4405, pruned_loss=0.1941, over 939917.04 frames. ], batch size: 16, lr: 4.15e-02, grad_scale: 32.0 +2024-07-27 10:56:32,965 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:56:34,086 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.38 vs. limit=12.15 +2024-07-27 10:56:45,108 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.07 vs. limit=9.835 +2024-07-27 10:57:01,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=6253.333333333333, ans=0.20687499999999998 +2024-07-27 10:57:05,912 INFO [train.py:1114] (0/4) Epoch 1, batch 4700, loss[loss=0.3762, simple_loss=0.3943, pruned_loss=0.179, over 4713.00 frames. ], tot_loss[loss=0.4103, simple_loss=0.4371, pruned_loss=0.1918, over 937555.28 frames. ], batch size: 11, lr: 4.14e-02, grad_scale: 32.0 +2024-07-27 10:57:07,357 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.557e+01 7.394e+01 8.015e+01 9.109e+01 1.664e+02, threshold=1.603e+02, percent-clipped=1.0 +2024-07-27 10:57:08,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=6266.666666666667, ans=0.20625 +2024-07-27 10:57:14,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=6280.0, ans=0.205625 +2024-07-27 10:57:16,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.60 vs. limit=12.21 +2024-07-27 10:57:18,644 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.56 vs. limit=6.5120000000000005 +2024-07-27 10:57:32,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=6306.666666666667, ans=0.6792666666666667 +2024-07-27 10:57:36,283 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.02 vs. limit=3.948 +2024-07-27 10:57:40,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=6320.0, ans=0.009495652173913044 +2024-07-27 10:57:40,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=6320.0, ans=0.20375 +2024-07-27 10:57:40,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=6320.0, ans=0.0 +2024-07-27 10:57:44,067 INFO [train.py:1114] (0/4) Epoch 1, batch 4750, loss[loss=0.5366, simple_loss=0.5317, pruned_loss=0.2707, over 4642.00 frames. ], tot_loss[loss=0.4126, simple_loss=0.4384, pruned_loss=0.1934, over 935833.75 frames. ], batch size: 22, lr: 4.14e-02, grad_scale: 64.0 +2024-07-27 10:57:49,871 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.50 vs. limit=9.875 +2024-07-27 10:57:51,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=6333.333333333333, ans=0.06041666666666667 +2024-07-27 10:57:55,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=6346.666666666667, ans=0.2025 +2024-07-27 10:58:00,118 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.38 vs. limit=12.26 +2024-07-27 10:58:13,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=6373.333333333333, ans=0.6769333333333334 +2024-07-27 10:58:13,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=6373.333333333333, ans=0.8137333333333333 +2024-07-27 10:58:16,932 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.59 vs. limit=12.280000000000001 +2024-07-27 10:58:25,436 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.45 vs. limit=9.895 +2024-07-27 10:58:26,654 INFO [train.py:1114] (0/4) Epoch 1, batch 4800, loss[loss=0.4153, simple_loss=0.4464, pruned_loss=0.1921, over 4702.00 frames. ], tot_loss[loss=0.4107, simple_loss=0.4369, pruned_loss=0.1923, over 933319.85 frames. ], batch size: 13, lr: 4.13e-02, grad_scale: 64.0 +2024-07-27 10:58:28,212 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.395e+01 7.298e+01 7.833e+01 8.734e+01 1.995e+02, threshold=1.567e+02, percent-clipped=2.0 +2024-07-27 10:58:29,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=6400.0, ans=0.2 +2024-07-27 10:58:29,477 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.81 vs. limit=3.96 +2024-07-27 10:58:30,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=6400.0, ans=0.2 +2024-07-27 10:58:33,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=9.9 +2024-07-27 10:58:34,046 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.66 vs. limit=12.309999999999999 +2024-07-27 10:58:35,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=6413.333333333333, ans=0.19937500000000002 +2024-07-27 10:58:41,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=6413.333333333333, ans=9.905 +2024-07-27 10:58:46,182 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.41 vs. limit=8.213333333333333 +2024-07-27 10:59:05,501 INFO [train.py:1114] (0/4) Epoch 1, batch 4850, loss[loss=0.3611, simple_loss=0.4087, pruned_loss=0.1568, over 4745.00 frames. ], tot_loss[loss=0.4107, simple_loss=0.4371, pruned_loss=0.1922, over 932797.15 frames. ], batch size: 14, lr: 4.12e-02, grad_scale: 64.0 +2024-07-27 10:59:14,803 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.70 vs. limit=3.972 +2024-07-27 10:59:23,831 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.71 vs. limit=6.623333333333333 +2024-07-27 10:59:36,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=6520.0, ans=9.945 +2024-07-27 10:59:43,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=6520.0, ans=0.6718000000000001 +2024-07-27 10:59:44,471 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.59 vs. limit=12.4 +2024-07-27 10:59:44,802 INFO [train.py:1114] (0/4) Epoch 1, batch 4900, loss[loss=0.3911, simple_loss=0.4189, pruned_loss=0.1817, over 4769.00 frames. ], tot_loss[loss=0.4068, simple_loss=0.434, pruned_loss=0.1898, over 934584.78 frames. ], batch size: 13, lr: 4.12e-02, grad_scale: 64.0 +2024-07-27 10:59:45,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=6533.333333333333, ans=0.19374999999999998 +2024-07-27 10:59:46,289 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.298e+01 7.338e+01 8.038e+01 8.614e+01 1.106e+02, threshold=1.608e+02, percent-clipped=0.0 +2024-07-27 10:59:46,852 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.20 vs. limit=9.95 +2024-07-27 10:59:50,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=6533.333333333333, ans=0.13956666666666667 +2024-07-27 10:59:51,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=6533.333333333333, ans=0.035 +2024-07-27 10:59:56,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=6546.666666666667, ans=0.193125 +2024-07-27 11:00:02,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=6560.0, ans=0.0 +2024-07-27 11:00:04,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=6560.0, ans=0.1925 +2024-07-27 11:00:11,146 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.17 vs. limit=9.965 +2024-07-27 11:00:12,526 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.12 vs. limit=8.286666666666667 +2024-07-27 11:00:23,809 INFO [train.py:1114] (0/4) Epoch 1, batch 4950, loss[loss=0.5059, simple_loss=0.4866, pruned_loss=0.2626, over 3274.00 frames. ], tot_loss[loss=0.4084, simple_loss=0.435, pruned_loss=0.1909, over 931683.93 frames. ], batch size: 35, lr: 4.11e-02, grad_scale: 64.0 +2024-07-27 11:00:29,865 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:00:38,900 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.55 vs. limit=12.469999999999999 +2024-07-27 11:00:40,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=6626.666666666667, ans=9.141666666666666 +2024-07-27 11:00:43,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.88 vs. limit=6.650666666666667 +2024-07-27 11:00:51,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=6640.0, ans=0.18874999999999997 +2024-07-27 11:00:52,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=6653.333333333333, ans=0.6671333333333334 +2024-07-27 11:00:54,852 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=2.935e-03 +2024-07-27 11:01:00,650 INFO [train.py:1114] (0/4) Epoch 1, batch 5000, loss[loss=0.3979, simple_loss=0.4353, pruned_loss=0.1803, over 4665.00 frames. ], tot_loss[loss=0.4064, simple_loss=0.4341, pruned_loss=0.1893, over 935486.81 frames. ], batch size: 14, lr: 4.10e-02, grad_scale: 64.0 +2024-07-27 11:01:01,027 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.00 vs. limit=4.0 +2024-07-27 11:01:02,002 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.106e+01 7.393e+01 8.012e+01 9.177e+01 1.350e+02, threshold=1.602e+02, percent-clipped=0.0 +2024-07-27 11:01:03,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.91 vs. limit=12.5 +2024-07-27 11:01:07,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=6680.0, ans=0.13821 +2024-07-27 11:01:07,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=6680.0, ans=0.186875 +2024-07-27 11:01:15,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=6693.333333333333, ans=0.03877777777777778 +2024-07-27 11:01:26,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=6706.666666666667, ans=0.18562499999999998 +2024-07-27 11:01:26,182 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.58 vs. limit=12.530000000000001 +2024-07-27 11:01:30,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=6720.0, ans=0.185 +2024-07-27 11:01:35,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=6720.0, ans=0.009408695652173914 +2024-07-27 11:01:36,541 INFO [train.py:1114] (0/4) Epoch 1, batch 5050, loss[loss=0.3397, simple_loss=0.3743, pruned_loss=0.1525, over 4849.00 frames. ], tot_loss[loss=0.4054, simple_loss=0.4339, pruned_loss=0.1884, over 937689.48 frames. ], batch size: 12, lr: 4.10e-02, grad_scale: 64.0 +2024-07-27 11:01:37,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=6733.333333333333, ans=0.6643333333333333 +2024-07-27 11:01:38,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6733.333333333333, ans=0.23266666666666666 +2024-07-27 11:01:47,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=6746.666666666667, ans=0.23253333333333331 +2024-07-27 11:01:47,456 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.82 vs. limit=4.0120000000000005 +2024-07-27 11:01:48,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=6746.666666666667, ans=0.0 +2024-07-27 11:01:48,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=6746.666666666667, ans=0.23253333333333331 +2024-07-27 11:01:49,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=6746.666666666667, ans=0.23253333333333331 +2024-07-27 11:01:53,415 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.63 vs. limit=10.035 +2024-07-27 11:01:57,261 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=21.70 vs. limit=10.035 +2024-07-27 11:02:00,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=6773.333333333333, ans=0.1825 +2024-07-27 11:02:12,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=6786.666666666667, ans=0.181875 +2024-07-27 11:02:14,733 INFO [train.py:1114] (0/4) Epoch 1, batch 5100, loss[loss=0.346, simple_loss=0.3869, pruned_loss=0.1526, over 4775.00 frames. ], tot_loss[loss=0.4061, simple_loss=0.4348, pruned_loss=0.1887, over 935077.61 frames. ], batch size: 12, lr: 4.09e-02, grad_scale: 64.0 +2024-07-27 11:02:16,198 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.973e+01 7.191e+01 7.778e+01 8.421e+01 1.083e+02, threshold=1.556e+02, percent-clipped=0.0 +2024-07-27 11:02:23,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=6813.333333333333, ans=0.6615333333333333 +2024-07-27 11:02:27,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=6813.333333333333, ans=0.18062499999999998 +2024-07-27 11:02:42,855 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.05 vs. limit=12.629999999999999 +2024-07-27 11:02:49,258 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:02:51,282 INFO [train.py:1114] (0/4) Epoch 1, batch 5150, loss[loss=0.4482, simple_loss=0.4657, pruned_loss=0.2153, over 4842.00 frames. ], tot_loss[loss=0.4073, simple_loss=0.4358, pruned_loss=0.1894, over 935869.44 frames. ], batch size: 16, lr: 4.09e-02, grad_scale: 64.0 +2024-07-27 11:03:06,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.23 vs. limit=12.67 +2024-07-27 11:03:09,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=6893.333333333333, ans=0.176875 +2024-07-27 11:03:15,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=6906.666666666667, ans=0.03788888888888889 +2024-07-27 11:03:19,710 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.70 vs. limit=6.726666666666667 +2024-07-27 11:03:26,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=6920.0, ans=0.3038 +2024-07-27 11:03:33,713 INFO [train.py:1114] (0/4) Epoch 1, batch 5200, loss[loss=0.4165, simple_loss=0.4603, pruned_loss=0.1863, over 4669.00 frames. ], tot_loss[loss=0.4035, simple_loss=0.4333, pruned_loss=0.1868, over 935830.56 frames. ], batch size: 14, lr: 4.08e-02, grad_scale: 64.0 +2024-07-27 11:03:35,264 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.869e+01 7.238e+01 8.043e+01 8.705e+01 1.237e+02, threshold=1.609e+02, percent-clipped=0.0 +2024-07-27 11:03:53,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=6960.0, ans=0.009356521739130435 +2024-07-27 11:03:54,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=6960.0, ans=0.17375000000000002 +2024-07-27 11:04:00,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=6973.333333333333, ans=0.0 +2024-07-27 11:04:00,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6973.333333333333, ans=0.23026666666666668 +2024-07-27 11:04:06,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=6986.666666666667, ans=0.1725 +2024-07-27 11:04:11,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=6986.666666666667, ans=0.025 +2024-07-27 11:04:13,862 INFO [train.py:1114] (0/4) Epoch 1, batch 5250, loss[loss=0.3571, simple_loss=0.397, pruned_loss=0.1586, over 4901.00 frames. ], tot_loss[loss=0.4007, simple_loss=0.4311, pruned_loss=0.1852, over 935319.44 frames. ], batch size: 13, lr: 4.07e-02, grad_scale: 64.0 +2024-07-27 11:04:18,563 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.03 vs. limit=8.5 +2024-07-27 11:04:40,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=7026.666666666667, ans=10.135 +2024-07-27 11:04:42,661 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.92 vs. limit=8.513333333333334 +2024-07-27 11:04:47,082 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.33 vs. limit=4.056 +2024-07-27 11:04:55,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=7053.333333333333, ans=0.8205333333333333 +2024-07-27 11:04:55,788 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.10 vs. limit=12.79 +2024-07-27 11:04:58,106 INFO [train.py:1114] (0/4) Epoch 1, batch 5300, loss[loss=0.4715, simple_loss=0.4885, pruned_loss=0.2273, over 4636.00 frames. ], tot_loss[loss=0.401, simple_loss=0.4311, pruned_loss=0.1854, over 933723.95 frames. ], batch size: 16, lr: 4.07e-02, grad_scale: 64.0 +2024-07-27 11:05:04,093 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.771e+01 7.230e+01 7.839e+01 8.733e+01 1.218e+02, threshold=1.568e+02, percent-clipped=0.0 +2024-07-27 11:05:10,873 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.43 vs. limit=12.809999999999999 +2024-07-27 11:05:14,616 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.93 vs. limit=12.809999999999999 +2024-07-27 11:05:15,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=7080.0, ans=0.22920000000000001 +2024-07-27 11:05:25,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=7106.666666666667, ans=0.03705555555555556 +2024-07-27 11:05:25,088 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:05:28,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=7106.666666666667, ans=0.09899494936611666 +2024-07-27 11:05:29,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7106.666666666667, ans=0.22893333333333332 +2024-07-27 11:05:35,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=7120.0, ans=0.16625 +2024-07-27 11:05:40,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=7120.0, ans=0.16625 +2024-07-27 11:05:41,456 INFO [train.py:1114] (0/4) Epoch 1, batch 5350, loss[loss=0.3842, simple_loss=0.3972, pruned_loss=0.1856, over 4514.00 frames. ], tot_loss[loss=0.4015, simple_loss=0.432, pruned_loss=0.1855, over 935671.21 frames. ], batch size: 10, lr: 4.06e-02, grad_scale: 64.0 +2024-07-27 11:05:51,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=7146.666666666667, ans=0.2285333333333333 +2024-07-27 11:05:55,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=7146.666666666667, ans=0.16499999999999998 +2024-07-27 11:06:16,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=7186.666666666667, ans=9.491666666666667 +2024-07-27 11:06:20,220 INFO [train.py:1114] (0/4) Epoch 1, batch 5400, loss[loss=0.4787, simple_loss=0.4786, pruned_loss=0.2394, over 4353.00 frames. ], tot_loss[loss=0.4034, simple_loss=0.4331, pruned_loss=0.1868, over 929814.32 frames. ], batch size: 25, lr: 4.05e-02, grad_scale: 64.0 +2024-07-27 11:06:21,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=7200.0, ans=0.16249999999999998 +2024-07-27 11:06:21,687 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.213e+01 7.171e+01 7.909e+01 8.696e+01 2.349e+02, threshold=1.582e+02, percent-clipped=3.0 +2024-07-27 11:06:21,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=7200.0, ans=0.16249999999999998 +2024-07-27 11:06:22,121 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.25 vs. limit=8.6 +2024-07-27 11:06:50,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=7253.333333333333, ans=0.15999999999999998 +2024-07-27 11:06:55,850 INFO [train.py:1114] (0/4) Epoch 1, batch 5450, loss[loss=0.4485, simple_loss=0.4719, pruned_loss=0.2126, over 4691.00 frames. ], tot_loss[loss=0.4008, simple_loss=0.4316, pruned_loss=0.185, over 932481.44 frames. ], batch size: 11, lr: 4.05e-02, grad_scale: 64.0 +2024-07-27 11:06:55,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=7266.666666666667, ans=0.159375 +2024-07-27 11:06:56,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=7266.666666666667, ans=0.159375 +2024-07-27 11:06:59,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=7266.666666666667, ans=0.6456666666666666 +2024-07-27 11:07:04,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=7280.0, ans=0.15875 +2024-07-27 11:07:07,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=7280.0, ans=0.15875 +2024-07-27 11:07:16,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=7293.333333333333, ans=0.025 +2024-07-27 11:07:18,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=7293.333333333333, ans=0.036277777777777784 +2024-07-27 11:07:28,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2.whitening_limit, batch_count=7306.666666666667, ans=8.653333333333334 +2024-07-27 11:07:29,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=7320.0, ans=0.0 +2024-07-27 11:07:31,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=7320.0, ans=0.0 +2024-07-27 11:07:36,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=7333.333333333333, ans=0.22666666666666668 +2024-07-27 11:07:37,166 INFO [train.py:1114] (0/4) Epoch 1, batch 5500, loss[loss=0.5166, simple_loss=0.4983, pruned_loss=0.2675, over 4255.00 frames. ], tot_loss[loss=0.4001, simple_loss=0.4305, pruned_loss=0.1848, over 930257.10 frames. ], batch size: 25, lr: 4.04e-02, grad_scale: 64.0 +2024-07-27 11:07:38,664 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.895e+01 7.344e+01 7.791e+01 8.854e+01 1.594e+02, threshold=1.558e+02, percent-clipped=1.0 +2024-07-27 11:07:42,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=7333.333333333333, ans=0.15625 +2024-07-27 11:07:48,073 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.41 vs. limit=13.01 +2024-07-27 11:08:03,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=7373.333333333333, ans=0.15437499999999998 +2024-07-27 11:08:07,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=7386.666666666667, ans=0.15375 +2024-07-27 11:08:10,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=7386.666666666667, ans=0.6414666666666666 +2024-07-27 11:08:14,262 INFO [train.py:1114] (0/4) Epoch 1, batch 5550, loss[loss=0.3625, simple_loss=0.4024, pruned_loss=0.1613, over 4714.00 frames. ], tot_loss[loss=0.4005, simple_loss=0.4312, pruned_loss=0.1849, over 932750.04 frames. ], batch size: 12, lr: 4.03e-02, grad_scale: 64.0 +2024-07-27 11:08:16,392 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.29 vs. limit=10.275 +2024-07-27 11:08:25,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=7413.333333333333, ans=0.22586666666666666 +2024-07-27 11:08:27,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.37 vs. limit=13.059999999999999 +2024-07-27 11:08:29,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=7426.666666666667, ans=0.07 +2024-07-27 11:08:31,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=7426.666666666667, ans=0.15187499999999998 +2024-07-27 11:08:37,241 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.77 vs. limit=4.116 +2024-07-27 11:08:49,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=7453.333333333333, ans=0.150625 +2024-07-27 11:08:50,831 INFO [train.py:1114] (0/4) Epoch 1, batch 5600, loss[loss=0.4132, simple_loss=0.4368, pruned_loss=0.1948, over 4751.00 frames. ], tot_loss[loss=0.4002, simple_loss=0.4311, pruned_loss=0.1847, over 934170.72 frames. ], batch size: 14, lr: 4.03e-02, grad_scale: 64.0 +2024-07-27 11:08:52,373 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.970e+01 7.181e+01 7.813e+01 8.583e+01 1.892e+02, threshold=1.563e+02, percent-clipped=1.0 +2024-07-27 11:08:57,660 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.09 vs. limit=10.3 +2024-07-27 11:09:02,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=7480.0, ans=0.035500000000000004 +2024-07-27 11:09:08,465 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.81 vs. limit=10.31 +2024-07-27 11:09:13,660 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.86 vs. limit=6.876666666666667 +2024-07-27 11:09:17,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=7506.666666666667, ans=0.22493333333333332 +2024-07-27 11:09:20,732 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.67 vs. limit=8.76 +2024-07-27 11:09:29,089 INFO [train.py:1114] (0/4) Epoch 1, batch 5650, loss[loss=0.3893, simple_loss=0.4239, pruned_loss=0.1773, over 4574.00 frames. ], tot_loss[loss=0.3989, simple_loss=0.4295, pruned_loss=0.1842, over 936711.22 frames. ], batch size: 21, lr: 4.02e-02, grad_scale: 64.0 +2024-07-27 11:09:30,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=7533.333333333333, ans=0.035 +2024-07-27 11:09:37,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=7546.666666666667, ans=0.14625 +2024-07-27 11:09:42,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=7546.666666666667, ans=0.04949747468305833 +2024-07-27 11:09:48,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=7560.0, ans=0.145625 +2024-07-27 11:09:53,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=7573.333333333333, ans=0.14500000000000002 +2024-07-27 11:09:54,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=7573.333333333333, ans=0.14500000000000002 +2024-07-27 11:09:58,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=7586.666666666667, ans=0.14437499999999998 +2024-07-27 11:10:03,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=7586.666666666667, ans=0.035055555555555555 +2024-07-27 11:10:05,073 INFO [train.py:1114] (0/4) Epoch 1, batch 5700, loss[loss=0.3897, simple_loss=0.4214, pruned_loss=0.179, over 4689.00 frames. ], tot_loss[loss=0.398, simple_loss=0.429, pruned_loss=0.1835, over 937944.83 frames. ], batch size: 13, lr: 4.02e-02, grad_scale: 64.0 +2024-07-27 11:10:06,393 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.074e+01 7.227e+01 8.129e+01 9.173e+01 1.333e+02, threshold=1.626e+02, percent-clipped=0.0 +2024-07-27 11:10:09,067 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.14 vs. limit=10.35 +2024-07-27 11:10:27,255 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.01 vs. limit=13.23 +2024-07-27 11:10:29,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7640.0, ans=0.22360000000000002 +2024-07-27 11:10:32,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=7640.0, ans=0.034833333333333334 +2024-07-27 11:10:41,771 INFO [train.py:1114] (0/4) Epoch 1, batch 5750, loss[loss=0.3996, simple_loss=0.4503, pruned_loss=0.1745, over 4727.00 frames. ], tot_loss[loss=0.3988, simple_loss=0.4301, pruned_loss=0.1838, over 938393.88 frames. ], batch size: 19, lr: 4.01e-02, grad_scale: 64.0 +2024-07-27 11:10:46,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=7666.666666666667, ans=0.140625 +2024-07-27 11:10:48,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=7680.0, ans=0.03466666666666667 +2024-07-27 11:10:50,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=7680.0, ans=0.14 +2024-07-27 11:11:01,262 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.95 vs. limit=13.27 +2024-07-27 11:11:25,453 INFO [train.py:1114] (0/4) Epoch 1, batch 5800, loss[loss=0.4511, simple_loss=0.4768, pruned_loss=0.2127, over 4764.00 frames. ], tot_loss[loss=0.4012, simple_loss=0.4322, pruned_loss=0.1851, over 937517.44 frames. ], batch size: 19, lr: 4.00e-02, grad_scale: 64.0 +2024-07-27 11:11:26,802 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.680e+01 7.353e+01 8.081e+01 9.227e+01 1.347e+02, threshold=1.616e+02, percent-clipped=0.0 +2024-07-27 11:11:34,524 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.54 vs. limit=8.873333333333333 +2024-07-27 11:11:40,771 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.36 vs. limit=8.879999999999999 +2024-07-27 11:11:48,029 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.97 vs. limit=13.33 +2024-07-27 11:11:48,781 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.23 vs. limit=13.33 +2024-07-27 11:11:58,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=7786.666666666667, ans=0.135 +2024-07-27 11:12:04,117 INFO [train.py:1114] (0/4) Epoch 1, batch 5850, loss[loss=0.4793, simple_loss=0.4912, pruned_loss=0.2337, over 4424.00 frames. ], tot_loss[loss=0.3995, simple_loss=0.4308, pruned_loss=0.184, over 938001.21 frames. ], batch size: 21, lr: 4.00e-02, grad_scale: 64.0 +2024-07-27 11:12:13,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=7813.333333333333, ans=0.03411111111111112 +2024-07-27 11:12:17,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=7826.666666666667, ans=0.034055555555555554 +2024-07-27 11:12:18,615 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.50 vs. limit=10.435 +2024-07-27 11:12:20,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=7826.666666666667, ans=0.133125 +2024-07-27 11:12:32,180 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.89 vs. limit=13.39 +2024-07-27 11:12:39,508 INFO [train.py:1114] (0/4) Epoch 1, batch 5900, loss[loss=0.4748, simple_loss=0.484, pruned_loss=0.2328, over 4689.00 frames. ], tot_loss[loss=0.3993, simple_loss=0.4305, pruned_loss=0.184, over 938366.96 frames. ], batch size: 15, lr: 3.99e-02, grad_scale: 64.0 +2024-07-27 11:12:40,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=7866.666666666667, ans=0.033888888888888885 +2024-07-27 11:12:40,946 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.022e+01 7.265e+01 7.754e+01 8.488e+01 1.052e+02, threshold=1.551e+02, percent-clipped=0.0 +2024-07-27 11:12:45,634 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.05 vs. limit=10.45 +2024-07-27 11:12:47,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=7880.0, ans=0.8288 +2024-07-27 11:12:49,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=7880.0, ans=0.0 +2024-07-27 11:13:01,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=7906.666666666667, ans=0.0 +2024-07-27 11:13:10,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=7920.0, ans=0.6228 +2024-07-27 11:13:14,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=7933.333333333333, ans=0.128125 +2024-07-27 11:13:14,896 INFO [train.py:1114] (0/4) Epoch 1, batch 5950, loss[loss=0.3948, simple_loss=0.4254, pruned_loss=0.1821, over 4692.00 frames. ], tot_loss[loss=0.3966, simple_loss=0.4291, pruned_loss=0.1821, over 940221.50 frames. ], batch size: 15, lr: 3.98e-02, grad_scale: 64.0 +2024-07-27 11:13:23,537 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.45 vs. limit=6.986666666666666 +2024-07-27 11:13:27,193 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.54 vs. limit=10.48 +2024-07-27 11:13:28,502 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.81 vs. limit=13.46 +2024-07-27 11:13:37,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=7973.333333333333, ans=0.12624999999999997 +2024-07-27 11:13:40,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=7973.333333333333, ans=0.6209333333333333 +2024-07-27 11:13:50,888 INFO [train.py:1114] (0/4) Epoch 1, batch 6000, loss[loss=0.4042, simple_loss=0.4483, pruned_loss=0.1801, over 4291.00 frames. ], tot_loss[loss=0.3949, simple_loss=0.4276, pruned_loss=0.1811, over 936825.57 frames. ], batch size: 25, lr: 3.98e-02, grad_scale: 64.0 +2024-07-27 11:13:50,889 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 11:14:06,849 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.6903, 3.2736, 2.9638, 3.1275, 3.1659, 3.2774, 2.7095, 3.0499], + device='cuda:0') +2024-07-27 11:14:12,395 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.7262, 2.5874, 1.9991, 1.8820, 1.9105, 2.1007, 2.1921, 1.5520], + device='cuda:0') +2024-07-27 11:14:16,112 INFO [train.py:1146] (0/4) Epoch 1, validation: loss=0.3082, simple_loss=0.3886, pruned_loss=0.1139, over 944034.00 frames. +2024-07-27 11:14:16,113 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 11:14:17,454 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.690e+01 7.303e+01 7.945e+01 8.512e+01 1.515e+02, threshold=1.589e+02, percent-clipped=0.0 +2024-07-27 11:14:21,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=8000.0, ans=0.125 +2024-07-27 11:14:32,852 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.06 vs. limit=13.52 +2024-07-27 11:14:40,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8040.0, ans=0.21960000000000002 +2024-07-27 11:14:41,050 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.85 vs. limit=7.01 +2024-07-27 11:14:41,781 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.03 vs. limit=7.01 +2024-07-27 11:14:50,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=8053.333333333333, ans=0.125 +2024-07-27 11:14:52,562 INFO [train.py:1114] (0/4) Epoch 1, batch 6050, loss[loss=0.387, simple_loss=0.4193, pruned_loss=0.1773, over 4780.00 frames. ], tot_loss[loss=0.3926, simple_loss=0.4256, pruned_loss=0.1799, over 938066.45 frames. ], batch size: 12, lr: 3.97e-02, grad_scale: 64.0 +2024-07-27 11:14:53,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=8066.666666666667, ans=0.125 +2024-07-27 11:14:53,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.15 vs. limit=13.55 +2024-07-27 11:14:56,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8066.666666666667, ans=0.21933333333333332 +2024-07-27 11:14:58,578 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.45 vs. limit=13.55 +2024-07-27 11:15:05,313 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.97 vs. limit=13.559999999999999 +2024-07-27 11:15:14,473 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:15:20,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=8106.666666666667, ans=0.3216 +2024-07-27 11:15:28,413 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.66 vs. limit=10.545 +2024-07-27 11:15:30,043 INFO [train.py:1114] (0/4) Epoch 1, batch 6100, loss[loss=0.4019, simple_loss=0.4412, pruned_loss=0.1813, over 4678.00 frames. ], tot_loss[loss=0.3911, simple_loss=0.4246, pruned_loss=0.1788, over 937431.92 frames. ], batch size: 15, lr: 3.96e-02, grad_scale: 64.0 +2024-07-27 11:15:31,488 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.445e+01 6.771e+01 7.517e+01 8.445e+01 1.300e+02, threshold=1.503e+02, percent-clipped=0.0 +2024-07-27 11:15:36,037 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.41 vs. limit=4.22 +2024-07-27 11:15:38,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=8146.666666666667, ans=10.0 +2024-07-27 11:15:42,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=8146.666666666667, ans=0.09899494936611666 +2024-07-27 11:15:48,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=8160.0, ans=0.03266666666666667 +2024-07-27 11:15:51,857 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.31 vs. limit=9.086666666666666 +2024-07-27 11:16:02,650 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:16:05,351 INFO [train.py:1114] (0/4) Epoch 1, batch 6150, loss[loss=0.4917, simple_loss=0.4778, pruned_loss=0.2529, over 3647.00 frames. ], tot_loss[loss=0.3895, simple_loss=0.4239, pruned_loss=0.1775, over 936805.88 frames. ], batch size: 36, lr: 3.96e-02, grad_scale: 64.0 +2024-07-27 11:16:10,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=8200.0, ans=0.07 +2024-07-27 11:16:32,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=8240.0, ans=0.3236 +2024-07-27 11:16:38,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=8253.333333333334, ans=0.125 +2024-07-27 11:16:43,761 INFO [train.py:1114] (0/4) Epoch 1, batch 6200, loss[loss=0.4144, simple_loss=0.4278, pruned_loss=0.2005, over 4746.00 frames. ], tot_loss[loss=0.3904, simple_loss=0.4246, pruned_loss=0.1782, over 936324.34 frames. ], batch size: 14, lr: 3.95e-02, grad_scale: 64.0 +2024-07-27 11:16:45,337 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.005e+01 7.091e+01 7.789e+01 8.708e+01 1.298e+02, threshold=1.558e+02, percent-clipped=0.0 +2024-07-27 11:17:06,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=8280.0, ans=0.125 +2024-07-27 11:17:18,257 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.57 vs. limit=13.72 +2024-07-27 11:17:31,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=8320.0, ans=0.09899494936611666 +2024-07-27 11:17:33,942 INFO [train.py:1114] (0/4) Epoch 1, batch 6250, loss[loss=0.414, simple_loss=0.4497, pruned_loss=0.1892, over 4807.00 frames. ], tot_loss[loss=0.3922, simple_loss=0.4254, pruned_loss=0.1795, over 932871.97 frames. ], batch size: 14, lr: 3.94e-02, grad_scale: 64.0 +2024-07-27 11:17:36,446 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.14 vs. limit=13.75 +2024-07-27 11:17:41,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=8346.666666666666, ans=0.025 +2024-07-27 11:17:42,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=8346.666666666666, ans=0.125 +2024-07-27 11:17:43,026 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.00 vs. limit=13.759999999999998 +2024-07-27 11:17:44,366 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.28 vs. limit=7.086666666666666 +2024-07-27 11:17:54,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=8373.333333333334, ans=0.125 +2024-07-27 11:18:03,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=8386.666666666666, ans=0.07 +2024-07-27 11:18:09,417 INFO [train.py:1114] (0/4) Epoch 1, batch 6300, loss[loss=0.3213, simple_loss=0.3726, pruned_loss=0.135, over 4514.00 frames. ], tot_loss[loss=0.3928, simple_loss=0.4256, pruned_loss=0.18, over 929404.73 frames. ], batch size: 10, lr: 3.94e-02, grad_scale: 64.0 +2024-07-27 11:18:10,512 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.72 vs. limit=7.359999999999999 +2024-07-27 11:18:10,750 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.284e+01 7.148e+01 7.847e+01 8.773e+01 1.332e+02, threshold=1.569e+02, percent-clipped=0.0 +2024-07-27 11:18:14,809 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.55 vs. limit=7.359999999999999 +2024-07-27 11:18:43,785 INFO [train.py:1114] (0/4) Epoch 1, batch 6350, loss[loss=0.4201, simple_loss=0.4565, pruned_loss=0.1919, over 4506.00 frames. ], tot_loss[loss=0.3921, simple_loss=0.4255, pruned_loss=0.1794, over 933657.50 frames. ], batch size: 21, lr: 3.93e-02, grad_scale: 64.0 +2024-07-27 11:18:49,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=8466.666666666666, ans=0.125 +2024-07-27 11:18:55,403 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.14 vs. limit=10.68 +2024-07-27 11:19:13,048 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.68 vs. limit=10.695 +2024-07-27 11:19:13,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=8520.0, ans=0.009017391304347826 +2024-07-27 11:19:19,051 INFO [train.py:1114] (0/4) Epoch 1, batch 6400, loss[loss=0.4277, simple_loss=0.4531, pruned_loss=0.2011, over 4637.00 frames. ], tot_loss[loss=0.3932, simple_loss=0.4266, pruned_loss=0.1799, over 935395.45 frames. ], batch size: 13, lr: 3.92e-02, grad_scale: 64.0 +2024-07-27 11:19:20,481 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.815e+01 7.135e+01 7.649e+01 8.994e+01 1.161e+02, threshold=1.530e+02, percent-clipped=0.0 +2024-07-27 11:19:24,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=8533.333333333334, ans=0.21466666666666667 +2024-07-27 11:19:28,121 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.83 vs. limit=10.705 +2024-07-27 11:19:37,986 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.89 vs. limit=10.705 +2024-07-27 11:19:39,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=8546.666666666666, ans=0.125 +2024-07-27 11:19:40,018 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.34 vs. limit=4.282 +2024-07-27 11:19:41,563 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=5.80 vs. limit=5.712 +2024-07-27 11:19:47,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8573.333333333334, ans=0.21426666666666666 +2024-07-27 11:19:49,138 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.89 vs. limit=9.286666666666667 +2024-07-27 11:19:54,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=8586.666666666666, ans=0.009002898550724638 +2024-07-27 11:20:00,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=8586.666666666666, ans=0.0 +2024-07-27 11:20:02,091 INFO [train.py:1114] (0/4) Epoch 1, batch 6450, loss[loss=0.4523, simple_loss=0.4645, pruned_loss=0.2201, over 4485.00 frames. ], tot_loss[loss=0.3927, simple_loss=0.4266, pruned_loss=0.1794, over 938942.93 frames. ], batch size: 21, lr: 3.92e-02, grad_scale: 64.0 +2024-07-27 11:20:08,132 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.54 vs. limit=13.95 +2024-07-27 11:20:15,804 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.09 vs. limit=10.735 +2024-07-27 11:20:17,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=8626.666666666666, ans=0.030722222222222227 +2024-07-27 11:20:31,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=8653.333333333334, ans=0.008988405797101449 +2024-07-27 11:20:36,423 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.11 vs. limit=9.326666666666668 +2024-07-27 11:20:37,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=8666.666666666666, ans=0.008985507246376812 +2024-07-27 11:20:38,021 INFO [train.py:1114] (0/4) Epoch 1, batch 6500, loss[loss=0.584, simple_loss=0.5434, pruned_loss=0.3123, over 3289.00 frames. ], tot_loss[loss=0.3911, simple_loss=0.4252, pruned_loss=0.1785, over 940012.21 frames. ], batch size: 35, lr: 3.91e-02, grad_scale: 64.0 +2024-07-27 11:20:39,464 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.881e+01 7.078e+01 7.610e+01 8.619e+01 1.357e+02, threshold=1.522e+02, percent-clipped=0.0 +2024-07-27 11:20:42,874 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.73 vs. limit=14.0 +2024-07-27 11:20:54,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=8693.333333333334, ans=0.21306666666666665 +2024-07-27 11:20:56,072 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.63 vs. limit=14.02 +2024-07-27 11:21:03,068 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.90 vs. limit=14.03 +2024-07-27 11:21:03,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=8706.666666666666, ans=0.125 +2024-07-27 11:21:14,508 INFO [train.py:1114] (0/4) Epoch 1, batch 6550, loss[loss=0.2992, simple_loss=0.336, pruned_loss=0.1311, over 4807.00 frames. ], tot_loss[loss=0.3857, simple_loss=0.4215, pruned_loss=0.175, over 942912.47 frames. ], batch size: 11, lr: 3.91e-02, grad_scale: 64.0 +2024-07-27 11:21:15,388 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:21:18,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=8733.333333333334, ans=0.5943333333333334 +2024-07-27 11:21:28,021 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.03 vs. limit=14.07 +2024-07-27 11:21:45,228 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.02 vs. limit=7.196666666666666 +2024-07-27 11:21:49,211 INFO [train.py:1114] (0/4) Epoch 1, batch 6600, loss[loss=0.3434, simple_loss=0.3827, pruned_loss=0.1521, over 4930.00 frames. ], tot_loss[loss=0.3854, simple_loss=0.421, pruned_loss=0.175, over 944844.08 frames. ], batch size: 14, lr: 3.90e-02, grad_scale: 64.0 +2024-07-27 11:21:50,619 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.093e+01 7.005e+01 7.535e+01 8.213e+01 1.214e+02, threshold=1.507e+02, percent-clipped=0.0 +2024-07-27 11:21:51,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=8800.0, ans=0.025 +2024-07-27 11:22:01,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=8813.333333333334, ans=0.5915333333333334 +2024-07-27 11:22:02,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=8826.666666666666, ans=0.125 +2024-07-27 11:22:08,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=8826.666666666666, ans=0.029888888888888892 +2024-07-27 11:22:15,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8840.0, ans=0.2116 +2024-07-27 11:22:25,950 INFO [train.py:1114] (0/4) Epoch 1, batch 6650, loss[loss=0.4127, simple_loss=0.4499, pruned_loss=0.1877, over 4589.00 frames. ], tot_loss[loss=0.3879, simple_loss=0.4227, pruned_loss=0.1765, over 943339.47 frames. ], batch size: 17, lr: 3.89e-02, grad_scale: 64.0 +2024-07-27 11:22:27,929 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=7.62 vs. limit=7.216666666666667 +2024-07-27 11:22:29,901 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.28 vs. limit=14.15 +2024-07-27 11:22:32,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=8880.0, ans=0.125 +2024-07-27 11:22:35,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=8880.0, ans=0.5892 +2024-07-27 11:22:48,343 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.15 vs. limit=10.835 +2024-07-27 11:22:56,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=8920.0, ans=0.008930434782608696 +2024-07-27 11:22:59,634 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.44 vs. limit=14.190000000000001 +2024-07-27 11:23:05,441 INFO [train.py:1114] (0/4) Epoch 1, batch 6700, loss[loss=0.4066, simple_loss=0.456, pruned_loss=0.1787, over 4680.00 frames. ], tot_loss[loss=0.3849, simple_loss=0.4211, pruned_loss=0.1743, over 942014.20 frames. ], batch size: 19, lr: 3.89e-02, grad_scale: 64.0 +2024-07-27 11:23:06,706 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.382e+01 7.413e+01 7.948e+01 9.118e+01 1.138e+02, threshold=1.590e+02, percent-clipped=0.0 +2024-07-27 11:23:13,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=8946.666666666666, ans=0.029388888888888895 +2024-07-27 11:23:23,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=8960.0, ans=0.5864 +2024-07-27 11:23:27,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=8973.333333333334, ans=0.125 +2024-07-27 11:23:37,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8986.666666666666, ans=0.21013333333333334 +2024-07-27 11:23:44,421 INFO [train.py:1114] (0/4) Epoch 1, batch 6750, loss[loss=0.3882, simple_loss=0.4159, pruned_loss=0.1803, over 4271.00 frames. ], tot_loss[loss=0.3853, simple_loss=0.4214, pruned_loss=0.1746, over 940262.72 frames. ], batch size: 25, lr: 3.88e-02, grad_scale: 128.0 +2024-07-27 11:23:49,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=9000.0, ans=0.585 +2024-07-27 11:23:52,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=9013.333333333334, ans=0.20986666666666665 +2024-07-27 11:23:52,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=9013.333333333334, ans=0.20986666666666665 +2024-07-27 11:23:56,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=9013.333333333334, ans=0.125 +2024-07-27 11:24:15,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=9053.333333333334, ans=0.028944444444444443 +2024-07-27 11:24:17,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.23 vs. limit=7.621333333333334 +2024-07-27 11:24:19,784 INFO [train.py:1114] (0/4) Epoch 1, batch 6800, loss[loss=0.4173, simple_loss=0.4539, pruned_loss=0.1904, over 4633.00 frames. ], tot_loss[loss=0.384, simple_loss=0.4203, pruned_loss=0.1738, over 938647.44 frames. ], batch size: 13, lr: 3.87e-02, grad_scale: 128.0 +2024-07-27 11:24:21,070 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.897e+01 7.261e+01 7.946e+01 8.901e+01 1.743e+02, threshold=1.589e+02, percent-clipped=1.0 +2024-07-27 11:24:21,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=9066.666666666666, ans=0.125 +2024-07-27 11:24:23,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=9066.666666666666, ans=0.5826666666666667 +2024-07-27 11:24:28,833 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.95 vs. limit=10.905 +2024-07-27 11:24:32,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=9080.0, ans=0.05 +2024-07-27 11:24:33,625 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.72 vs. limit=10.91 +2024-07-27 11:24:43,970 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.40 vs. limit=4.366 +2024-07-27 11:24:44,109 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.13 vs. limit=14.33 +2024-07-27 11:24:51,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=9120.0, ans=0.5808 +2024-07-27 11:24:51,233 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=11.45 vs. limit=10.92 +2024-07-27 11:24:53,713 INFO [train.py:1114] (0/4) Epoch 1, batch 6850, loss[loss=0.4344, simple_loss=0.4646, pruned_loss=0.2021, over 4696.00 frames. ], tot_loss[loss=0.385, simple_loss=0.4208, pruned_loss=0.1746, over 940254.69 frames. ], batch size: 13, lr: 3.87e-02, grad_scale: 64.0 +2024-07-27 11:25:00,992 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.58 vs. limit=4.372 +2024-07-27 11:25:06,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=9160.0, ans=0.125 +2024-07-27 11:25:16,712 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.83 vs. limit=10.94 +2024-07-27 11:25:28,393 INFO [train.py:1114] (0/4) Epoch 1, batch 6900, loss[loss=0.3414, simple_loss=0.3738, pruned_loss=0.1546, over 4961.00 frames. ], tot_loss[loss=0.3847, simple_loss=0.4203, pruned_loss=0.1746, over 942447.69 frames. ], batch size: 13, lr: 3.86e-02, grad_scale: 64.0 +2024-07-27 11:25:30,375 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.952e+01 6.982e+01 7.530e+01 8.620e+01 1.386e+02, threshold=1.506e+02, percent-clipped=0.0 +2024-07-27 11:25:34,164 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.81 vs. limit=7.3 +2024-07-27 11:25:39,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=9213.333333333334, ans=0.0 +2024-07-27 11:25:44,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=9226.666666666666, ans=0.125 +2024-07-27 11:25:54,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=9240.0, ans=0.125 +2024-07-27 11:26:01,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=9253.333333333334, ans=0.125 +2024-07-27 11:26:02,697 INFO [train.py:1114] (0/4) Epoch 1, batch 6950, loss[loss=0.3577, simple_loss=0.3996, pruned_loss=0.1579, over 4533.00 frames. ], tot_loss[loss=0.3849, simple_loss=0.4206, pruned_loss=0.1746, over 940036.60 frames. ], batch size: 10, lr: 3.85e-02, grad_scale: 64.0 +2024-07-27 11:26:15,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=9280.0, ans=0.125 +2024-07-27 11:26:18,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=9293.333333333334, ans=0.027944444444444445 +2024-07-27 11:26:18,956 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.82 vs. limit=14.469999999999999 +2024-07-27 11:26:22,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=9306.666666666666, ans=0.2 +2024-07-27 11:26:26,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=9306.666666666666, ans=0.20693333333333336 +2024-07-27 11:26:31,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=9320.0, ans=0.125 +2024-07-27 11:26:36,250 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.52 vs. limit=14.49 +2024-07-27 11:26:37,597 INFO [train.py:1114] (0/4) Epoch 1, batch 7000, loss[loss=0.4207, simple_loss=0.4489, pruned_loss=0.1962, over 4620.00 frames. ], tot_loss[loss=0.3838, simple_loss=0.4198, pruned_loss=0.1738, over 938209.74 frames. ], batch size: 17, lr: 3.85e-02, grad_scale: 64.0 +2024-07-27 11:26:39,624 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.819e+01 7.301e+01 8.158e+01 9.084e+01 2.160e+02, threshold=1.632e+02, percent-clipped=1.0 +2024-07-27 11:26:45,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=9346.666666666666, ans=0.00883768115942029 +2024-07-27 11:26:53,056 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.80 vs. limit=14.52 +2024-07-27 11:27:03,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=9360.0, ans=0.125 +2024-07-27 11:27:23,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=9386.666666666666, ans=0.125 +2024-07-27 11:27:28,094 INFO [train.py:1114] (0/4) Epoch 1, batch 7050, loss[loss=0.3667, simple_loss=0.4276, pruned_loss=0.1529, over 4718.00 frames. ], tot_loss[loss=0.3811, simple_loss=0.4187, pruned_loss=0.1718, over 941364.11 frames. ], batch size: 19, lr: 3.84e-02, grad_scale: 64.0 +2024-07-27 11:27:45,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=9426.666666666666, ans=0.125 +2024-07-27 11:27:45,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=9426.666666666666, ans=0.125 +2024-07-27 11:27:53,711 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.26 vs. limit=14.57 +2024-07-27 11:28:02,433 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.55 vs. limit=14.58 +2024-07-27 11:28:04,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=9440.0, ans=0.125 +2024-07-27 11:28:09,595 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:28:11,405 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:28:14,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=9466.666666666666, ans=0.125 +2024-07-27 11:28:15,358 INFO [train.py:1114] (0/4) Epoch 1, batch 7100, loss[loss=0.4054, simple_loss=0.4415, pruned_loss=0.1846, over 4795.00 frames. ], tot_loss[loss=0.3815, simple_loss=0.4186, pruned_loss=0.1722, over 936026.93 frames. ], batch size: 15, lr: 3.83e-02, grad_scale: 64.0 +2024-07-27 11:28:17,414 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.024e+01 6.989e+01 7.688e+01 8.481e+01 1.289e+02, threshold=1.538e+02, percent-clipped=0.0 +2024-07-27 11:28:20,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=9466.666666666666, ans=0.125 +2024-07-27 11:28:21,359 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.23 vs. limit=14.6 +2024-07-27 11:28:24,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=9480.0, ans=0.0 +2024-07-27 11:28:26,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=9480.0, ans=0.02716666666666667 +2024-07-27 11:28:30,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=9493.333333333334, ans=0.5677333333333334 +2024-07-27 11:28:32,515 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.06 vs. limit=4.4239999999999995 +2024-07-27 11:28:37,415 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.56 vs. limit=4.426 +2024-07-27 11:28:41,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=9506.666666666666, ans=0.125 +2024-07-27 11:28:44,989 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.65 vs. limit=11.07 +2024-07-27 11:28:49,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=9533.333333333334, ans=0.025 +2024-07-27 11:28:50,208 INFO [train.py:1114] (0/4) Epoch 1, batch 7150, loss[loss=0.4955, simple_loss=0.4952, pruned_loss=0.2479, over 4487.00 frames. ], tot_loss[loss=0.3797, simple_loss=0.4169, pruned_loss=0.1712, over 937223.85 frames. ], batch size: 21, lr: 3.83e-02, grad_scale: 64.0 +2024-07-27 11:28:51,007 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.698e-01 +2024-07-27 11:29:00,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.77 vs. limit=14.66 +2024-07-27 11:29:01,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=9546.666666666666, ans=0.20453333333333334 +2024-07-27 11:29:23,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.14 vs. limit=11.095 +2024-07-27 11:29:26,407 INFO [train.py:1114] (0/4) Epoch 1, batch 7200, loss[loss=0.3602, simple_loss=0.4114, pruned_loss=0.1545, over 4801.00 frames. ], tot_loss[loss=0.3777, simple_loss=0.4152, pruned_loss=0.17, over 937772.43 frames. ], batch size: 15, lr: 3.82e-02, grad_scale: 64.0 +2024-07-27 11:29:28,268 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.182e+01 6.919e+01 7.589e+01 8.160e+01 1.329e+02, threshold=1.518e+02, percent-clipped=0.0 +2024-07-27 11:29:35,399 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.50 vs. limit=11.105 +2024-07-27 11:29:35,460 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.32 vs. limit=7.403333333333334 +2024-07-27 11:29:40,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=9626.666666666666, ans=0.20373333333333332 +2024-07-27 11:30:03,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=9640.0, ans=0.025 +2024-07-27 11:30:55,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=9653.333333333334, ans=0.20346666666666666 +2024-07-27 11:30:56,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=9653.333333333334, ans=0.026444444444444444 +2024-07-27 11:30:56,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=9653.333333333334, ans=0.04949747468305833 +2024-07-27 11:30:58,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=9653.333333333334, ans=0.04949747468305833 +2024-07-27 11:31:01,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=9666.666666666666, ans=0.008768115942028986 +2024-07-27 11:31:01,722 INFO [train.py:1114] (0/4) Epoch 1, batch 7250, loss[loss=0.3331, simple_loss=0.3765, pruned_loss=0.1448, over 4844.00 frames. ], tot_loss[loss=0.3761, simple_loss=0.414, pruned_loss=0.1691, over 939163.62 frames. ], batch size: 12, lr: 3.82e-02, grad_scale: 64.0 +2024-07-27 11:31:03,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=9666.666666666666, ans=0.125 +2024-07-27 11:31:05,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=9666.666666666666, ans=0.05 +2024-07-27 11:31:17,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=9693.333333333334, ans=0.0 +2024-07-27 11:31:18,271 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.44 vs. limit=9.846666666666668 +2024-07-27 11:31:22,344 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:31:25,838 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:31:28,043 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:31:28,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=9720.0, ans=0.125 +2024-07-27 11:31:29,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=9720.0, ans=0.0 +2024-07-27 11:31:32,229 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.38 vs. limit=11.145 +2024-07-27 11:31:37,467 INFO [train.py:1114] (0/4) Epoch 1, batch 7300, loss[loss=0.3196, simple_loss=0.3734, pruned_loss=0.1329, over 4864.00 frames. ], tot_loss[loss=0.375, simple_loss=0.414, pruned_loss=0.168, over 938901.43 frames. ], batch size: 12, lr: 3.81e-02, grad_scale: 64.0 +2024-07-27 11:31:37,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=9733.333333333334, ans=0.125 +2024-07-27 11:31:38,683 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.50 vs. limit=11.15 +2024-07-27 11:31:39,747 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.899e+01 6.987e+01 7.392e+01 8.309e+01 1.190e+02, threshold=1.478e+02, percent-clipped=0.0 +2024-07-27 11:31:39,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=9733.333333333334, ans=0.026111111111111113 +2024-07-27 11:31:52,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=9760.0, ans=0.026000000000000002 +2024-07-27 11:31:56,687 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.71 vs. limit=11.16 +2024-07-27 11:31:57,529 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.41 vs. limit=11.16 +2024-07-27 11:32:08,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=9773.333333333334, ans=0.20226666666666665 +2024-07-27 11:32:17,013 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.06 vs. limit=14.84 +2024-07-27 11:32:17,899 INFO [train.py:1114] (0/4) Epoch 1, batch 7350, loss[loss=0.351, simple_loss=0.4078, pruned_loss=0.1471, over 4653.00 frames. ], tot_loss[loss=0.3753, simple_loss=0.4145, pruned_loss=0.168, over 938249.65 frames. ], batch size: 12, lr: 3.80e-02, grad_scale: 64.0 +2024-07-27 11:32:24,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=9813.333333333334, ans=0.008736231884057971 +2024-07-27 11:32:35,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=9826.666666666666, ans=0.125 +2024-07-27 11:32:55,347 INFO [train.py:1114] (0/4) Epoch 1, batch 7400, loss[loss=0.3767, simple_loss=0.4035, pruned_loss=0.175, over 4693.00 frames. ], tot_loss[loss=0.3739, simple_loss=0.4138, pruned_loss=0.167, over 939563.77 frames. ], batch size: 13, lr: 3.80e-02, grad_scale: 64.0 +2024-07-27 11:32:57,415 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.028e+01 6.927e+01 7.410e+01 8.183e+01 1.194e+02, threshold=1.482e+02, percent-clipped=0.0 +2024-07-27 11:33:01,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=9866.666666666666, ans=7.466666666666667 +2024-07-27 11:33:12,427 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.71 vs. limit=7.473333333333334 +2024-07-27 11:33:17,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=9906.666666666666, ans=0.09899494936611666 +2024-07-27 11:33:31,484 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.40 vs. limit=14.940000000000001 +2024-07-27 11:33:31,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=9920.0, ans=0.07 +2024-07-27 11:33:37,719 INFO [train.py:1114] (0/4) Epoch 1, batch 7450, loss[loss=0.3316, simple_loss=0.377, pruned_loss=0.1431, over 4605.00 frames. ], tot_loss[loss=0.3754, simple_loss=0.4145, pruned_loss=0.1681, over 936987.40 frames. ], batch size: 11, lr: 3.79e-02, grad_scale: 64.0 +2024-07-27 11:33:49,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=9946.666666666666, ans=0.125 +2024-07-27 11:33:51,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=9960.0, ans=0.125 +2024-07-27 11:33:59,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=9973.333333333334, ans=0.00870144927536232 +2024-07-27 11:34:03,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=9986.666666666666, ans=0.0 +2024-07-27 11:34:38,103 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.18 vs. limit=4.498 +2024-07-27 11:34:39,722 INFO [train.py:1114] (0/4) Epoch 1, batch 7500, loss[loss=0.4999, simple_loss=0.4913, pruned_loss=0.2542, over 3429.00 frames. ], tot_loss[loss=0.3762, simple_loss=0.4149, pruned_loss=0.1687, over 935383.13 frames. ], batch size: 36, lr: 3.78e-02, grad_scale: 64.0 +2024-07-27 11:34:41,635 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.028e+01 6.970e+01 7.592e+01 8.473e+01 1.449e+02, threshold=1.518e+02, percent-clipped=0.0 +2024-07-27 11:34:46,901 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.78 vs. limit=15.01 +2024-07-27 11:34:47,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=10013.333333333334, ans=0.125 +2024-07-27 11:34:55,219 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.54 vs. limit=11.26 +2024-07-27 11:35:15,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=10040.0, ans=0.125 +2024-07-27 11:35:15,933 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.75 vs. limit=15.030000000000001 +2024-07-27 11:35:18,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=10040.0, ans=0.125 +2024-07-27 11:35:20,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=10040.0, ans=0.0 +2024-07-27 11:35:20,645 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.97 vs. limit=15.030000000000001 +2024-07-27 11:35:23,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10053.333333333334, ans=0.19946666666666668 +2024-07-27 11:35:25,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=10053.333333333334, ans=0.19946666666666668 +2024-07-27 11:35:29,212 INFO [train.py:1114] (0/4) Epoch 1, batch 7550, loss[loss=0.3871, simple_loss=0.4318, pruned_loss=0.1713, over 4605.00 frames. ], tot_loss[loss=0.3777, simple_loss=0.4164, pruned_loss=0.1695, over 935337.80 frames. ], batch size: 17, lr: 3.78e-02, grad_scale: 64.0 +2024-07-27 11:35:29,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.86 vs. limit=15.05 +2024-07-27 11:35:32,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=10066.666666666666, ans=0.125 +2024-07-27 11:35:32,853 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.14 vs. limit=7.516666666666667 +2024-07-27 11:35:44,271 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.46 vs. limit=4.514 +2024-07-27 11:35:46,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=10093.333333333334, ans=0.19906666666666667 +2024-07-27 11:36:08,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=10120.0, ans=0.5458000000000001 +2024-07-27 11:36:10,594 INFO [train.py:1114] (0/4) Epoch 1, batch 7600, loss[loss=0.3709, simple_loss=0.4265, pruned_loss=0.1576, over 4818.00 frames. ], tot_loss[loss=0.3778, simple_loss=0.4168, pruned_loss=0.1694, over 937504.09 frames. ], batch size: 14, lr: 3.77e-02, grad_scale: 64.0 +2024-07-27 11:36:12,582 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.409e+01 6.929e+01 7.591e+01 8.810e+01 1.172e+02, threshold=1.518e+02, percent-clipped=0.0 +2024-07-27 11:36:41,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=10160.0, ans=0.125 +2024-07-27 11:36:43,655 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.09 vs. limit=7.54 +2024-07-27 11:36:44,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=10160.0, ans=0.125 +2024-07-27 11:36:48,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=10160.0, ans=0.125 +2024-07-27 11:36:59,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=10186.666666666666, ans=0.008655072463768116 +2024-07-27 11:37:01,260 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:37:02,966 INFO [train.py:1114] (0/4) Epoch 1, batch 7650, loss[loss=0.3444, simple_loss=0.3847, pruned_loss=0.152, over 4948.00 frames. ], tot_loss[loss=0.3795, simple_loss=0.4178, pruned_loss=0.1706, over 936551.26 frames. ], batch size: 12, lr: 3.77e-02, grad_scale: 64.0 +2024-07-27 11:37:04,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=10200.0, ans=0.025 +2024-07-27 11:37:09,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=10213.333333333334, ans=0.125 +2024-07-27 11:37:14,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=10213.333333333334, ans=0.125 +2024-07-27 11:37:22,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=10226.666666666666, ans=0.05 +2024-07-27 11:37:27,371 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.04 vs. limit=11.34 +2024-07-27 11:37:36,577 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.78 vs. limit=15.190000000000001 +2024-07-27 11:37:37,478 INFO [train.py:1114] (0/4) Epoch 1, batch 7700, loss[loss=0.3812, simple_loss=0.418, pruned_loss=0.1722, over 4694.00 frames. ], tot_loss[loss=0.3807, simple_loss=0.4187, pruned_loss=0.1713, over 933839.74 frames. ], batch size: 13, lr: 3.76e-02, grad_scale: 64.0 +2024-07-27 11:37:39,437 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.134e+01 7.002e+01 7.732e+01 8.804e+01 1.160e+02, threshold=1.546e+02, percent-clipped=0.0 +2024-07-27 11:37:40,632 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.57 vs. limit=15.2 +2024-07-27 11:37:47,289 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.80 vs. limit=15.21 +2024-07-27 11:37:51,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=10293.333333333334, ans=0.023777777777777773 +2024-07-27 11:37:58,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=10306.666666666666, ans=0.5392666666666668 +2024-07-27 11:38:10,885 INFO [train.py:1114] (0/4) Epoch 1, batch 7750, loss[loss=0.4096, simple_loss=0.45, pruned_loss=0.1846, over 4939.00 frames. ], tot_loss[loss=0.3818, simple_loss=0.4205, pruned_loss=0.1716, over 934896.10 frames. ], batch size: 14, lr: 3.75e-02, grad_scale: 64.0 +2024-07-27 11:38:20,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=10346.666666666666, ans=10.0 +2024-07-27 11:38:23,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=10346.666666666666, ans=0.0 +2024-07-27 11:38:32,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=10373.333333333334, ans=0.125 +2024-07-27 11:38:39,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=10386.666666666666, ans=0.023388888888888893 +2024-07-27 11:38:42,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=10386.666666666666, ans=0.025 +2024-07-27 11:38:43,567 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.21 vs. limit=7.596666666666666 +2024-07-27 11:38:45,469 INFO [train.py:1114] (0/4) Epoch 1, batch 7800, loss[loss=0.3456, simple_loss=0.3904, pruned_loss=0.1503, over 4653.00 frames. ], tot_loss[loss=0.3792, simple_loss=0.4189, pruned_loss=0.1698, over 936806.31 frames. ], batch size: 14, lr: 3.75e-02, grad_scale: 64.0 +2024-07-27 11:38:47,314 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.952e+01 6.890e+01 7.293e+01 8.300e+01 1.085e+02, threshold=1.459e+02, percent-clipped=0.0 +2024-07-27 11:38:52,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=10413.333333333334, ans=0.125 +2024-07-27 11:38:59,273 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=25.10 vs. limit=11.405 +2024-07-27 11:38:59,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=10426.666666666666, ans=0.05 +2024-07-27 11:39:00,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=10426.666666666666, ans=0.5350666666666668 +2024-07-27 11:39:03,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=10426.666666666666, ans=0.125 +2024-07-27 11:39:05,173 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.43 vs. limit=15.32 +2024-07-27 11:39:08,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=10440.0, ans=0.09899494936611666 +2024-07-27 11:39:13,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=10453.333333333334, ans=0.125 +2024-07-27 11:39:13,278 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.82 vs. limit=15.34 +2024-07-27 11:39:14,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10453.333333333334, ans=0.19546666666666668 +2024-07-27 11:39:15,196 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:39:15,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=10453.333333333334, ans=0.125 +2024-07-27 11:39:19,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.57 vs. limit=4.568 +2024-07-27 11:39:20,382 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.76 vs. limit=15.35 +2024-07-27 11:39:20,618 INFO [train.py:1114] (0/4) Epoch 1, batch 7850, loss[loss=0.3169, simple_loss=0.3537, pruned_loss=0.1401, over 4516.00 frames. ], tot_loss[loss=0.3802, simple_loss=0.4192, pruned_loss=0.1706, over 936278.04 frames. ], batch size: 10, lr: 3.74e-02, grad_scale: 64.0 +2024-07-27 11:39:33,996 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.23 vs. limit=7.623333333333333 +2024-07-27 11:39:34,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=10493.333333333334, ans=0.125 +2024-07-27 11:39:37,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=10493.333333333334, ans=0.125 +2024-07-27 11:39:38,111 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.18 vs. limit=15.370000000000001 +2024-07-27 11:39:43,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=10506.666666666666, ans=0.5322666666666667 +2024-07-27 11:39:47,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=10520.0, ans=0.125 +2024-07-27 11:39:48,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=10520.0, ans=10.0 +2024-07-27 11:39:51,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=10520.0, ans=0.125 +2024-07-27 11:39:53,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=10520.0, ans=0.022833333333333337 +2024-07-27 11:39:54,400 INFO [train.py:1114] (0/4) Epoch 1, batch 7900, loss[loss=0.4192, simple_loss=0.4547, pruned_loss=0.1919, over 4863.00 frames. ], tot_loss[loss=0.384, simple_loss=0.4224, pruned_loss=0.1728, over 933606.65 frames. ], batch size: 14, lr: 3.73e-02, grad_scale: 64.0 +2024-07-27 11:39:56,292 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.42 vs. limit=11.45 +2024-07-27 11:39:56,377 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.883e+01 7.101e+01 7.733e+01 8.610e+01 1.628e+02, threshold=1.547e+02, percent-clipped=1.0 +2024-07-27 11:39:59,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=10533.333333333334, ans=0.125 +2024-07-27 11:40:00,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=10546.666666666666, ans=0.5308666666666667 +2024-07-27 11:40:17,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=10573.333333333334, ans=0.0 +2024-07-27 11:40:18,145 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=2.80 vs. limit=11.465 +2024-07-27 11:40:18,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=10573.333333333334, ans=0.125 +2024-07-27 11:40:20,378 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.41 vs. limit=4.586 +2024-07-27 11:40:27,051 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.27 vs. limit=11.47 +2024-07-27 11:40:28,140 INFO [train.py:1114] (0/4) Epoch 1, batch 7950, loss[loss=0.4186, simple_loss=0.4196, pruned_loss=0.2088, over 3257.00 frames. ], tot_loss[loss=0.3805, simple_loss=0.4199, pruned_loss=0.1705, over 935590.39 frames. ], batch size: 35, lr: 3.73e-02, grad_scale: 64.0 +2024-07-27 11:40:28,673 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.45 vs. limit=11.475 +2024-07-27 11:40:32,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=10600.0, ans=0.125 +2024-07-27 11:40:37,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=10613.333333333334, ans=0.02244444444444444 +2024-07-27 11:40:49,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=10640.0, ans=0.1936 +2024-07-27 11:40:57,249 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:41:03,394 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-8000.pt +2024-07-27 11:41:44,677 INFO [train.py:1114] (0/4) Epoch 1, batch 8000, loss[loss=0.3974, simple_loss=0.419, pruned_loss=0.1879, over 4626.00 frames. ], tot_loss[loss=0.3754, simple_loss=0.4152, pruned_loss=0.1678, over 934755.96 frames. ], batch size: 11, lr: 3.72e-02, grad_scale: 64.0 +2024-07-27 11:41:45,001 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.30 vs. limit=15.5 +2024-07-27 11:41:46,753 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.949e+01 6.868e+01 7.730e+01 8.687e+01 2.055e+02, threshold=1.546e+02, percent-clipped=1.0 +2024-07-27 11:41:57,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=10693.333333333334, ans=0.025 +2024-07-27 11:41:59,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.54 vs. limit=15.52 +2024-07-27 11:42:00,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=10693.333333333334, ans=0.125 +2024-07-27 11:42:18,028 INFO [train.py:1114] (0/4) Epoch 1, batch 8050, loss[loss=0.4242, simple_loss=0.4618, pruned_loss=0.1933, over 4816.00 frames. ], tot_loss[loss=0.3755, simple_loss=0.4154, pruned_loss=0.1678, over 934322.66 frames. ], batch size: 14, lr: 3.72e-02, grad_scale: 64.0 +2024-07-27 11:42:22,764 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=2.664e-02 +2024-07-27 11:42:25,124 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.88 vs. limit=11.525 +2024-07-27 11:42:26,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=10746.666666666666, ans=0.5238666666666667 +2024-07-27 11:42:29,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=10746.666666666666, ans=11.530000000000001 +2024-07-27 11:42:36,389 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.86 vs. limit=8.304 +2024-07-27 11:42:39,002 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.08 vs. limit=11.54 +2024-07-27 11:42:41,080 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.30 vs. limit=10.386666666666667 +2024-07-27 11:42:46,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10786.666666666666, ans=0.19213333333333332 +2024-07-27 11:42:49,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=10786.666666666666, ans=0.09899494936611666 +2024-07-27 11:42:52,713 INFO [train.py:1114] (0/4) Epoch 1, batch 8100, loss[loss=0.4662, simple_loss=0.4888, pruned_loss=0.2219, over 4799.00 frames. ], tot_loss[loss=0.3754, simple_loss=0.4158, pruned_loss=0.1675, over 934211.06 frames. ], batch size: 15, lr: 3.71e-02, grad_scale: 64.0 +2024-07-27 11:42:54,757 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.815e+01 7.005e+01 7.921e+01 8.722e+01 1.648e+02, threshold=1.584e+02, percent-clipped=1.0 +2024-07-27 11:43:10,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=10826.666666666666, ans=0.5210666666666668 +2024-07-27 11:43:19,005 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.87 vs. limit=11.57 +2024-07-27 11:43:22,491 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.06 vs. limit=15.64 +2024-07-27 11:43:26,147 INFO [train.py:1114] (0/4) Epoch 1, batch 8150, loss[loss=0.3951, simple_loss=0.4301, pruned_loss=0.18, over 4795.00 frames. ], tot_loss[loss=0.3728, simple_loss=0.4136, pruned_loss=0.166, over 937749.69 frames. ], batch size: 15, lr: 3.70e-02, grad_scale: 64.0 +2024-07-27 11:43:33,081 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.38 vs. limit=7.720000000000001 +2024-07-27 11:43:41,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=10893.333333333334, ans=0.125 +2024-07-27 11:43:42,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=10893.333333333334, ans=0.00850144927536232 +2024-07-27 11:44:06,733 INFO [train.py:1114] (0/4) Epoch 1, batch 8200, loss[loss=0.4178, simple_loss=0.4479, pruned_loss=0.1939, over 4806.00 frames. ], tot_loss[loss=0.3734, simple_loss=0.4144, pruned_loss=0.1662, over 938841.66 frames. ], batch size: 15, lr: 3.70e-02, grad_scale: 64.0 +2024-07-27 11:44:08,823 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.129e+01 6.954e+01 7.394e+01 8.427e+01 2.023e+02, threshold=1.479e+02, percent-clipped=1.0 +2024-07-27 11:44:11,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=10933.333333333334, ans=0.125 +2024-07-27 11:44:15,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=10946.666666666666, ans=0.05 +2024-07-27 11:44:32,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=10973.333333333334, ans=0.008484057971014492 +2024-07-27 11:44:35,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=10986.666666666666, ans=0.020888888888888894 +2024-07-27 11:44:37,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=10986.666666666666, ans=0.125 +2024-07-27 11:44:40,414 INFO [train.py:1114] (0/4) Epoch 1, batch 8250, loss[loss=0.323, simple_loss=0.3792, pruned_loss=0.1333, over 4888.00 frames. ], tot_loss[loss=0.3739, simple_loss=0.4151, pruned_loss=0.1664, over 938616.02 frames. ], batch size: 13, lr: 3.69e-02, grad_scale: 64.0 +2024-07-27 11:44:42,156 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.97 vs. limit=15.75 +2024-07-27 11:45:01,859 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.08 vs. limit=10.52 +2024-07-27 11:45:03,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=11040.0, ans=0.008469565217391305 +2024-07-27 11:45:05,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=11040.0, ans=0.125 +2024-07-27 11:45:08,771 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.53 vs. limit=11.645 +2024-07-27 11:46:40,231 INFO [train.py:1114] (0/4) Epoch 1, batch 8300, loss[loss=0.3757, simple_loss=0.4237, pruned_loss=0.1638, over 4894.00 frames. ], tot_loss[loss=0.3741, simple_loss=0.4154, pruned_loss=0.1664, over 938310.28 frames. ], batch size: 15, lr: 3.68e-02, grad_scale: 64.0 +2024-07-27 11:46:41,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=11066.666666666666, ans=0.02055555555555556 +2024-07-27 11:46:42,166 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.900e+01 6.941e+01 7.717e+01 8.510e+01 1.243e+02, threshold=1.543e+02, percent-clipped=0.0 +2024-07-27 11:46:45,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=11066.666666666666, ans=0.125 +2024-07-27 11:46:56,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=11080.0, ans=0.36619999999999997 +2024-07-27 11:47:18,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=11106.666666666666, ans=0.125 +2024-07-27 11:47:18,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=11106.666666666666, ans=0.008455072463768117 +2024-07-27 11:47:18,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=11106.666666666666, ans=0.125 +2024-07-27 11:47:24,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=11120.0, ans=0.5108000000000001 +2024-07-27 11:47:28,607 INFO [train.py:1114] (0/4) Epoch 1, batch 8350, loss[loss=0.3654, simple_loss=0.426, pruned_loss=0.1524, over 4789.00 frames. ], tot_loss[loss=0.3712, simple_loss=0.4128, pruned_loss=0.1648, over 941020.36 frames. ], batch size: 15, lr: 3.68e-02, grad_scale: 64.0 +2024-07-27 11:47:36,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=11146.666666666666, ans=0.020222222222222228 +2024-07-27 11:47:37,854 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.62 vs. limit=11.68 +2024-07-27 11:47:48,136 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.86 vs. limit=15.86 +2024-07-27 11:47:48,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.63 vs. limit=15.86 +2024-07-27 11:47:53,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=11160.0, ans=15.87 +2024-07-27 11:48:11,883 INFO [train.py:1114] (0/4) Epoch 1, batch 8400, loss[loss=0.3246, simple_loss=0.3731, pruned_loss=0.138, over 4786.00 frames. ], tot_loss[loss=0.3717, simple_loss=0.4134, pruned_loss=0.165, over 940017.63 frames. ], batch size: 12, lr: 3.67e-02, grad_scale: 64.0 +2024-07-27 11:48:13,816 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.876e+01 7.080e+01 7.641e+01 8.587e+01 1.412e+02, threshold=1.528e+02, percent-clipped=0.0 +2024-07-27 11:48:15,007 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.44 vs. limit=15.9 +2024-07-27 11:48:16,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=11200.0, ans=0.138 +2024-07-27 11:48:24,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=11226.666666666666, ans=0.125 +2024-07-27 11:48:35,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=11240.0, ans=0.00842608695652174 +2024-07-27 11:48:45,022 INFO [train.py:1114] (0/4) Epoch 1, batch 8450, loss[loss=0.358, simple_loss=0.4103, pruned_loss=0.1529, over 4809.00 frames. ], tot_loss[loss=0.373, simple_loss=0.4148, pruned_loss=0.1656, over 939219.64 frames. ], batch size: 15, lr: 3.67e-02, grad_scale: 64.0 +2024-07-27 11:48:46,789 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.47 vs. limit=7.816666666666666 +2024-07-27 11:48:47,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=11266.666666666666, ans=0.5056666666666667 +2024-07-27 11:48:59,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=11293.333333333334, ans=0.0 +2024-07-27 11:49:07,543 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.81 vs. limit=10.653333333333332 +2024-07-27 11:49:08,220 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.50 vs. limit=15.98 +2024-07-27 11:49:20,001 INFO [train.py:1114] (0/4) Epoch 1, batch 8500, loss[loss=0.3195, simple_loss=0.3783, pruned_loss=0.1303, over 4625.00 frames. ], tot_loss[loss=0.3736, simple_loss=0.4155, pruned_loss=0.1658, over 938559.98 frames. ], batch size: 11, lr: 3.66e-02, grad_scale: 64.0 +2024-07-27 11:49:21,073 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.24 vs. limit=11.75 +2024-07-27 11:49:21,909 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.800e+01 6.867e+01 7.338e+01 8.262e+01 1.317e+02, threshold=1.468e+02, percent-clipped=0.0 +2024-07-27 11:49:27,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=11346.666666666666, ans=0.019388888888888893 +2024-07-27 11:49:28,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=11346.666666666666, ans=0.008402898550724638 +2024-07-27 11:49:31,697 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.03 vs. limit=16.009999999999998 +2024-07-27 11:49:42,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=11373.333333333334, ans=0.125 +2024-07-27 11:49:44,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=11373.333333333334, ans=0.019277777777777776 +2024-07-27 11:49:54,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11400.0, ans=0.186 +2024-07-27 11:49:54,525 INFO [train.py:1114] (0/4) Epoch 1, batch 8550, loss[loss=0.3237, simple_loss=0.3672, pruned_loss=0.1401, over 4797.00 frames. ], tot_loss[loss=0.372, simple_loss=0.4139, pruned_loss=0.1651, over 939474.76 frames. ], batch size: 11, lr: 3.65e-02, grad_scale: 64.0 +2024-07-27 11:49:57,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=11400.0, ans=0.01916666666666667 +2024-07-27 11:49:57,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=11400.0, ans=0.01916666666666667 +2024-07-27 11:50:00,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=11413.333333333334, ans=0.125 +2024-07-27 11:50:06,805 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.50 vs. limit=16.060000000000002 +2024-07-27 11:50:08,600 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:50:16,452 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.24 vs. limit=10.719999999999999 +2024-07-27 11:50:20,643 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:50:22,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=11453.333333333334, ans=0.125 +2024-07-27 11:50:22,423 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.64 vs. limit=16.09 +2024-07-27 11:50:24,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11453.333333333334, ans=0.18546666666666667 +2024-07-27 11:50:28,072 INFO [train.py:1114] (0/4) Epoch 1, batch 8600, loss[loss=0.3747, simple_loss=0.4122, pruned_loss=0.1686, over 4799.00 frames. ], tot_loss[loss=0.3715, simple_loss=0.4129, pruned_loss=0.1651, over 938896.21 frames. ], batch size: 15, lr: 3.65e-02, grad_scale: 64.0 +2024-07-27 11:50:31,116 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.520e+01 6.717e+01 7.221e+01 8.025e+01 1.285e+02, threshold=1.444e+02, percent-clipped=0.0 +2024-07-27 11:50:31,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11466.666666666666, ans=0.18533333333333335 +2024-07-27 11:50:38,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=11480.0, ans=0.008373913043478261 +2024-07-27 11:50:48,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=11493.333333333334, ans=0.125 +2024-07-27 11:50:51,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=11506.666666666666, ans=0.018722222222222223 +2024-07-27 11:50:56,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=11520.0, ans=0.0 +2024-07-27 11:51:02,953 INFO [train.py:1114] (0/4) Epoch 1, batch 8650, loss[loss=0.3594, simple_loss=0.4093, pruned_loss=0.1548, over 4894.00 frames. ], tot_loss[loss=0.3701, simple_loss=0.4117, pruned_loss=0.1642, over 940379.62 frames. ], batch size: 15, lr: 3.64e-02, grad_scale: 64.0 +2024-07-27 11:51:09,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=11533.333333333334, ans=0.025 +2024-07-27 11:51:10,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=11533.333333333334, ans=0.018611111111111106 +2024-07-27 11:51:13,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=11546.666666666666, ans=0.49586666666666673 +2024-07-27 11:51:21,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=11560.0, ans=0.49540000000000006 +2024-07-27 11:51:23,238 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.32 vs. limit=16.17 +2024-07-27 11:51:23,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=11560.0, ans=0.025 +2024-07-27 11:51:29,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=11573.333333333334, ans=0.125 +2024-07-27 11:51:44,972 INFO [train.py:1114] (0/4) Epoch 1, batch 8700, loss[loss=0.3237, simple_loss=0.3709, pruned_loss=0.1382, over 4762.00 frames. ], tot_loss[loss=0.3686, simple_loss=0.4108, pruned_loss=0.1632, over 938045.82 frames. ], batch size: 13, lr: 3.64e-02, grad_scale: 64.0 +2024-07-27 11:51:46,840 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.502e+01 6.768e+01 7.395e+01 8.572e+01 1.594e+02, threshold=1.479e+02, percent-clipped=2.0 +2024-07-27 11:51:53,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=11600.0, ans=0.125 +2024-07-27 11:51:56,881 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.53 vs. limit=11.855 +2024-07-27 11:52:01,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=11626.666666666666, ans=0.125 +2024-07-27 11:52:03,630 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.86 vs. limit=16.22 +2024-07-27 11:52:20,374 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.81 vs. limit=11.870000000000001 +2024-07-27 11:52:21,401 INFO [train.py:1114] (0/4) Epoch 1, batch 8750, loss[loss=0.3638, simple_loss=0.4215, pruned_loss=0.1531, over 4679.00 frames. ], tot_loss[loss=0.3689, simple_loss=0.4108, pruned_loss=0.1635, over 936725.76 frames. ], batch size: 15, lr: 3.63e-02, grad_scale: 64.0 +2024-07-27 11:52:22,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=11666.666666666666, ans=0.01805555555555556 +2024-07-27 11:52:26,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=11666.666666666666, ans=0.01805555555555556 +2024-07-27 11:52:31,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=11680.0, ans=0.018000000000000002 +2024-07-27 11:52:35,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11693.333333333334, ans=0.18306666666666666 +2024-07-27 11:52:38,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=11693.333333333334, ans=0.025 +2024-07-27 11:52:38,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=11693.333333333334, ans=0.0 +2024-07-27 11:52:46,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=11706.666666666666, ans=10.0 +2024-07-27 11:52:55,474 INFO [train.py:1114] (0/4) Epoch 1, batch 8800, loss[loss=0.3456, simple_loss=0.4006, pruned_loss=0.1453, over 4932.00 frames. ], tot_loss[loss=0.3689, simple_loss=0.4106, pruned_loss=0.1636, over 937280.30 frames. ], batch size: 14, lr: 3.62e-02, grad_scale: 64.0 +2024-07-27 11:52:56,692 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.47 vs. limit=11.9 +2024-07-27 11:52:57,644 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.900e+01 7.237e+01 7.954e+01 8.853e+01 1.433e+02, threshold=1.591e+02, percent-clipped=0.0 +2024-07-27 11:53:12,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=11760.0, ans=0.125 +2024-07-27 11:53:12,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=11760.0, ans=0.07 +2024-07-27 11:53:14,403 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:53:18,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=11773.333333333334, ans=0.125 +2024-07-27 11:53:19,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=11773.333333333334, ans=0.025 +2024-07-27 11:53:29,050 INFO [train.py:1114] (0/4) Epoch 1, batch 8850, loss[loss=0.4105, simple_loss=0.4459, pruned_loss=0.1875, over 4532.00 frames. ], tot_loss[loss=0.3705, simple_loss=0.4115, pruned_loss=0.1648, over 931290.32 frames. ], batch size: 21, lr: 3.62e-02, grad_scale: 128.0 +2024-07-27 11:53:30,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11800.0, ans=0.182 +2024-07-27 11:53:32,914 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.19 vs. limit=10.9 +2024-07-27 11:53:33,982 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:53:48,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=11840.0, ans=0.125 +2024-07-27 11:53:55,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=11853.333333333334, ans=0.4851333333333333 +2024-07-27 11:53:58,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=11853.333333333334, ans=0.18146666666666667 +2024-07-27 11:54:02,398 INFO [train.py:1114] (0/4) Epoch 1, batch 8900, loss[loss=0.3437, simple_loss=0.3805, pruned_loss=0.1534, over 4939.00 frames. ], tot_loss[loss=0.3709, simple_loss=0.4115, pruned_loss=0.1652, over 929561.48 frames. ], batch size: 12, lr: 3.61e-02, grad_scale: 128.0 +2024-07-27 11:54:04,471 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.705e+01 6.769e+01 7.408e+01 8.026e+01 1.011e+02, threshold=1.482e+02, percent-clipped=0.0 +2024-07-27 11:54:05,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11866.666666666666, ans=0.18133333333333335 +2024-07-27 11:54:07,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=11866.666666666666, ans=0.48466666666666675 +2024-07-27 11:54:14,273 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=12.06 vs. limit=10.940000000000001 +2024-07-27 11:54:28,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=11893.333333333334, ans=0.48373333333333335 +2024-07-27 11:54:30,842 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.03 vs. limit=8.757333333333333 +2024-07-27 11:54:33,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=11893.333333333334, ans=0.017111111111111105 +2024-07-27 11:54:38,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=11906.666666666666, ans=0.008281159420289855 +2024-07-27 11:54:42,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=11920.0, ans=0.09899494936611666 +2024-07-27 11:54:47,616 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=11.969999999999999 +2024-07-27 11:54:48,601 INFO [train.py:1114] (0/4) Epoch 1, batch 8950, loss[loss=0.3346, simple_loss=0.3851, pruned_loss=0.142, over 4488.00 frames. ], tot_loss[loss=0.3697, simple_loss=0.4109, pruned_loss=0.1642, over 930415.21 frames. ], batch size: 21, lr: 3.61e-02, grad_scale: 128.0 +2024-07-27 11:54:49,893 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.08 vs. limit=11.975 +2024-07-27 11:55:14,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=11960.0, ans=0.4814 +2024-07-27 11:55:14,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=11960.0, ans=0.008269565217391304 +2024-07-27 11:55:17,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=11973.333333333334, ans=0.07 +2024-07-27 11:55:17,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=11973.333333333334, ans=0.016777777777777773 +2024-07-27 11:55:30,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=12000.0, ans=0.125 +2024-07-27 11:55:31,945 INFO [train.py:1114] (0/4) Epoch 1, batch 9000, loss[loss=0.3569, simple_loss=0.3961, pruned_loss=0.1588, over 4641.00 frames. ], tot_loss[loss=0.3694, simple_loss=0.4111, pruned_loss=0.1638, over 933511.84 frames. ], batch size: 12, lr: 3.60e-02, grad_scale: 64.0 +2024-07-27 11:55:31,946 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 11:55:38,857 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.0634, 4.7968, 4.5234, 4.6165], device='cuda:0') +2024-07-27 11:55:45,437 INFO [train.py:1146] (0/4) Epoch 1, validation: loss=0.2917, simple_loss=0.3779, pruned_loss=0.1028, over 944034.00 frames. +2024-07-27 11:55:45,438 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 11:55:48,956 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.414e+01 6.571e+01 7.230e+01 7.907e+01 1.156e+02, threshold=1.446e+02, percent-clipped=0.0 +2024-07-27 11:55:50,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=12000.0, ans=0.025 +2024-07-27 11:55:51,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=12000.0, ans=0.025 +2024-07-27 11:55:54,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=12013.333333333334, ans=0.04949747468305833 +2024-07-27 11:55:56,370 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.35 vs. limit=16.509999999999998 +2024-07-27 11:56:10,838 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.33 vs. limit=16.53 +2024-07-27 11:56:18,495 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:56:18,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.33 vs. limit=12.02 +2024-07-27 11:56:21,461 INFO [train.py:1114] (0/4) Epoch 1, batch 9050, loss[loss=0.347, simple_loss=0.3781, pruned_loss=0.158, over 4508.00 frames. ], tot_loss[loss=0.3684, simple_loss=0.41, pruned_loss=0.1633, over 934124.82 frames. ], batch size: 10, lr: 3.59e-02, grad_scale: 64.0 +2024-07-27 11:56:31,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=12080.0, ans=0.125 +2024-07-27 11:56:32,112 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=9.023e+00 +2024-07-27 11:56:39,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=12093.333333333334, ans=0.125 +2024-07-27 11:56:42,018 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.79 vs. limit=4.816 +2024-07-27 11:56:45,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=12106.666666666666, ans=0.07 +2024-07-27 11:56:47,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=12120.0, ans=0.008234782608695652 +2024-07-27 11:56:49,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=12120.0, ans=0.125 +2024-07-27 11:56:49,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=12120.0, ans=0.025 +2024-07-27 11:56:54,149 INFO [train.py:1114] (0/4) Epoch 1, batch 9100, loss[loss=0.3872, simple_loss=0.4053, pruned_loss=0.1846, over 4933.00 frames. ], tot_loss[loss=0.3669, simple_loss=0.4091, pruned_loss=0.1623, over 936840.61 frames. ], batch size: 14, lr: 3.59e-02, grad_scale: 64.0 +2024-07-27 11:57:00,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=12133.333333333334, ans=0.025 +2024-07-27 11:57:01,197 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.077e+01 6.999e+01 7.542e+01 8.527e+01 1.258e+02, threshold=1.508e+02, percent-clipped=0.0 +2024-07-27 11:57:07,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=12146.666666666666, ans=0.025 +2024-07-27 11:57:08,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=12146.666666666666, ans=0.125 +2024-07-27 11:57:11,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=12160.0, ans=0.125 +2024-07-27 11:57:19,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=12173.333333333334, ans=0.125 +2024-07-27 11:57:27,866 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:57:31,649 INFO [train.py:1114] (0/4) Epoch 1, batch 9150, loss[loss=0.324, simple_loss=0.3873, pruned_loss=0.1304, over 4816.00 frames. ], tot_loss[loss=0.3686, simple_loss=0.4108, pruned_loss=0.1633, over 935417.10 frames. ], batch size: 14, lr: 3.58e-02, grad_scale: 64.0 +2024-07-27 11:57:35,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=12200.0, ans=0.125 +2024-07-27 11:57:38,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12213.333333333334, ans=0.17786666666666667 +2024-07-27 11:57:56,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=12240.0, ans=0.008208695652173914 +2024-07-27 11:58:04,617 INFO [train.py:1114] (0/4) Epoch 1, batch 9200, loss[loss=0.2675, simple_loss=0.3258, pruned_loss=0.1046, over 4860.00 frames. ], tot_loss[loss=0.3663, simple_loss=0.4087, pruned_loss=0.162, over 937773.28 frames. ], batch size: 12, lr: 3.58e-02, grad_scale: 64.0 +2024-07-27 11:58:07,215 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.834e+01 6.731e+01 7.265e+01 8.123e+01 1.608e+02, threshold=1.453e+02, percent-clipped=1.0 +2024-07-27 11:58:14,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=12280.0, ans=0.4702 +2024-07-27 11:58:16,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=12280.0, ans=0.125 +2024-07-27 11:58:17,332 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:58:20,201 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.67 vs. limit=16.72 +2024-07-27 11:58:22,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=12293.333333333334, ans=0.015444444444444441 +2024-07-27 11:58:28,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=12306.666666666666, ans=0.125 +2024-07-27 11:58:32,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=12306.666666666666, ans=0.125 +2024-07-27 11:58:40,694 INFO [train.py:1114] (0/4) Epoch 1, batch 9250, loss[loss=0.3848, simple_loss=0.4379, pruned_loss=0.1658, over 4635.00 frames. ], tot_loss[loss=0.3653, simple_loss=0.4076, pruned_loss=0.1615, over 938468.32 frames. ], batch size: 13, lr: 3.57e-02, grad_scale: 64.0 +2024-07-27 11:58:47,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_na.min_abs, batch_count=12346.666666666666, ans=0.02 +2024-07-27 11:58:52,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=12346.666666666666, ans=0.4678666666666667 +2024-07-27 11:58:57,675 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.12 vs. limit=11.18 +2024-07-27 11:59:00,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=12373.333333333334, ans=0.17626666666666668 +2024-07-27 11:59:05,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.75 vs. limit=8.093333333333334 +2024-07-27 11:59:14,659 INFO [train.py:1114] (0/4) Epoch 1, batch 9300, loss[loss=0.4051, simple_loss=0.4334, pruned_loss=0.1884, over 4778.00 frames. ], tot_loss[loss=0.3657, simple_loss=0.4081, pruned_loss=0.1616, over 938673.43 frames. ], batch size: 12, lr: 3.57e-02, grad_scale: 64.0 +2024-07-27 11:59:17,486 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.284e+01 6.919e+01 7.820e+01 8.678e+01 1.247e+02, threshold=1.564e+02, percent-clipped=0.0 +2024-07-27 11:59:23,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=12413.333333333334, ans=4.862 +2024-07-27 11:59:25,273 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.72 vs. limit=12.155000000000001 +2024-07-27 11:59:29,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.53 vs. limit=11.213333333333333 +2024-07-27 11:59:32,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=12426.666666666666, ans=0.008168115942028986 +2024-07-27 11:59:37,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=12440.0, ans=0.17559999999999998 +2024-07-27 11:59:39,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12440.0, ans=0.17559999999999998 +2024-07-27 11:59:42,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=12453.333333333334, ans=0.00816231884057971 +2024-07-27 11:59:45,326 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.16 vs. limit=12.17 +2024-07-27 11:59:48,179 INFO [train.py:1114] (0/4) Epoch 1, batch 9350, loss[loss=0.3263, simple_loss=0.3688, pruned_loss=0.1419, over 4796.00 frames. ], tot_loss[loss=0.3662, simple_loss=0.4087, pruned_loss=0.1618, over 935783.88 frames. ], batch size: 11, lr: 3.56e-02, grad_scale: 64.0 +2024-07-27 11:59:49,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=12466.666666666666, ans=0.014722222222222227 +2024-07-27 11:59:50,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=12466.666666666666, ans=0.125 +2024-07-27 12:00:14,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=12506.666666666666, ans=0.125 +2024-07-27 12:00:14,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=12506.666666666666, ans=0.125 +2024-07-27 12:00:22,082 INFO [train.py:1114] (0/4) Epoch 1, batch 9400, loss[loss=0.3134, simple_loss=0.3861, pruned_loss=0.1204, over 4687.00 frames. ], tot_loss[loss=0.3652, simple_loss=0.4074, pruned_loss=0.1615, over 933960.51 frames. ], batch size: 13, lr: 3.55e-02, grad_scale: 64.0 +2024-07-27 12:00:24,566 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.848e+01 6.575e+01 7.346e+01 8.658e+01 2.018e+02, threshold=1.469e+02, percent-clipped=2.0 +2024-07-27 12:00:25,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=12533.333333333334, ans=0.125 +2024-07-27 12:00:25,913 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:00:26,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=12533.333333333334, ans=0.125 +2024-07-27 12:00:28,831 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.65 vs. limit=16.91 +2024-07-27 12:00:30,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=12546.666666666666, ans=0.125 +2024-07-27 12:00:36,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=12546.666666666666, ans=0.008142028985507246 +2024-07-27 12:00:40,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=12560.0, ans=0.46040000000000003 +2024-07-27 12:00:42,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=12560.0, ans=0.008139130434782609 +2024-07-27 12:00:43,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.43 vs. limit=16.92 +2024-07-27 12:00:45,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=12573.333333333334, ans=0.014277777777777771 +2024-07-27 12:00:57,889 INFO [train.py:1114] (0/4) Epoch 1, batch 9450, loss[loss=0.3373, simple_loss=0.3783, pruned_loss=0.1482, over 4795.00 frames. ], tot_loss[loss=0.3644, simple_loss=0.407, pruned_loss=0.1609, over 933032.01 frames. ], batch size: 11, lr: 3.55e-02, grad_scale: 64.0 +2024-07-27 12:01:09,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=12613.333333333334, ans=0.45853333333333335 +2024-07-27 12:01:16,382 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.73 vs. limit=12.235 +2024-07-27 12:01:16,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=12626.666666666666, ans=0.125 +2024-07-27 12:01:30,610 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.45 vs. limit=12.245000000000001 +2024-07-27 12:01:33,383 INFO [train.py:1114] (0/4) Epoch 1, batch 9500, loss[loss=0.3058, simple_loss=0.3617, pruned_loss=0.125, over 4700.00 frames. ], tot_loss[loss=0.3638, simple_loss=0.407, pruned_loss=0.1602, over 935234.09 frames. ], batch size: 12, lr: 3.54e-02, grad_scale: 64.0 +2024-07-27 12:01:35,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=12666.666666666666, ans=0.4566666666666667 +2024-07-27 12:01:35,894 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.605e+01 6.746e+01 7.341e+01 8.122e+01 1.206e+02, threshold=1.468e+02, percent-clipped=0.0 +2024-07-27 12:01:40,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=12680.0, ans=0.1732 +2024-07-27 12:01:46,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.53 vs. limit=17.009999999999998 +2024-07-27 12:01:52,722 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.40 vs. limit=17.02 +2024-07-27 12:02:02,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=12693.333333333334, ans=0.125 +2024-07-27 12:02:14,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.51 vs. limit=4.906 +2024-07-27 12:02:31,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12720.0, ans=0.17279999999999998 +2024-07-27 12:02:42,665 INFO [train.py:1114] (0/4) Epoch 1, batch 9550, loss[loss=0.3475, simple_loss=0.3892, pruned_loss=0.1529, over 4778.00 frames. ], tot_loss[loss=0.3628, simple_loss=0.4066, pruned_loss=0.1596, over 932213.88 frames. ], batch size: 12, lr: 3.54e-02, grad_scale: 64.0 +2024-07-27 12:02:42,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=12733.333333333334, ans=0.125 +2024-07-27 12:02:49,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=12733.333333333334, ans=0.013611111111111109 +2024-07-27 12:03:01,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=12746.666666666666, ans=0.4538666666666667 +2024-07-27 12:03:08,028 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.84 vs. limit=12.285 +2024-07-27 12:03:13,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=12773.333333333334, ans=0.125 +2024-07-27 12:03:19,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=12786.666666666666, ans=0.008089855072463768 +2024-07-27 12:03:22,950 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.63 vs. limit=11.393333333333333 +2024-07-27 12:03:24,201 INFO [train.py:1114] (0/4) Epoch 1, batch 9600, loss[loss=0.4961, simple_loss=0.4858, pruned_loss=0.2533, over 3143.00 frames. ], tot_loss[loss=0.3631, simple_loss=0.4067, pruned_loss=0.1597, over 930985.58 frames. ], batch size: 35, lr: 3.53e-02, grad_scale: 64.0 +2024-07-27 12:03:30,716 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.723e+01 6.771e+01 7.099e+01 8.382e+01 1.458e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 12:03:32,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12800.0, ans=0.172 +2024-07-27 12:03:35,378 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.56 vs. limit=12.305 +2024-07-27 12:03:45,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=12826.666666666666, ans=0.4510666666666667 +2024-07-27 12:03:50,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=12840.0, ans=0.008078260869565217 +2024-07-27 12:03:55,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=12853.333333333334, ans=0.17146666666666666 +2024-07-27 12:04:01,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=12853.333333333334, ans=0.125 +2024-07-27 12:04:11,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=12866.666666666666, ans=0.4496666666666667 +2024-07-27 12:04:11,707 INFO [train.py:1114] (0/4) Epoch 1, batch 9650, loss[loss=0.4209, simple_loss=0.4695, pruned_loss=0.1861, over 4833.00 frames. ], tot_loss[loss=0.3646, simple_loss=0.4083, pruned_loss=0.1605, over 926939.59 frames. ], batch size: 16, lr: 3.53e-02, grad_scale: 64.0 +2024-07-27 12:04:12,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=12866.666666666666, ans=0.125 +2024-07-27 12:04:22,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=12880.0, ans=0.125 +2024-07-27 12:04:29,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=12893.333333333334, ans=0.008066666666666666 +2024-07-27 12:04:31,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=12893.333333333334, ans=0.008066666666666666 +2024-07-27 12:04:31,369 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.48 vs. limit=12.335 +2024-07-27 12:04:32,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=12893.333333333334, ans=0.012944444444444439 +2024-07-27 12:04:48,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=12933.333333333334, ans=0.012777777777777777 +2024-07-27 12:04:49,096 INFO [train.py:1114] (0/4) Epoch 1, batch 9700, loss[loss=0.4022, simple_loss=0.4526, pruned_loss=0.1759, over 4243.00 frames. ], tot_loss[loss=0.3647, simple_loss=0.4084, pruned_loss=0.1605, over 924831.02 frames. ], batch size: 25, lr: 3.52e-02, grad_scale: 64.0 +2024-07-27 12:04:52,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12933.333333333334, ans=0.17066666666666666 +2024-07-27 12:04:52,734 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.794e+01 6.661e+01 7.332e+01 8.273e+01 1.352e+02, threshold=1.466e+02, percent-clipped=0.0 +2024-07-27 12:05:00,737 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=17.33 vs. limit=17.21 +2024-07-27 12:05:06,812 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.92 vs. limit=9.184000000000001 +2024-07-27 12:05:15,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=12960.0, ans=0.125 +2024-07-27 12:05:20,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=12973.333333333334, ans=0.008049275362318841 +2024-07-27 12:05:50,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12986.666666666666, ans=0.17013333333333333 +2024-07-27 12:05:51,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=12986.666666666666, ans=0.125 +2024-07-27 12:05:57,821 INFO [train.py:1114] (0/4) Epoch 1, batch 9750, loss[loss=0.3872, simple_loss=0.4129, pruned_loss=0.1808, over 4689.00 frames. ], tot_loss[loss=0.3624, simple_loss=0.4064, pruned_loss=0.1592, over 925554.55 frames. ], batch size: 15, lr: 3.51e-02, grad_scale: 64.0 +2024-07-27 12:05:59,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=13000.0, ans=0.125 +2024-07-27 12:06:22,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=13026.666666666666, ans=0.00803768115942029 +2024-07-27 12:06:31,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13026.666666666666, ans=0.16973333333333335 +2024-07-27 12:06:31,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=13026.666666666666, ans=0.11973333333333333 +2024-07-27 12:06:40,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=13040.0, ans=0.125 +2024-07-27 12:06:40,464 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.91 vs. limit=17.28 +2024-07-27 12:06:48,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=13066.666666666666, ans=4.96 +2024-07-27 12:06:48,517 INFO [train.py:1114] (0/4) Epoch 1, batch 9800, loss[loss=0.3467, simple_loss=0.3821, pruned_loss=0.1556, over 4711.00 frames. ], tot_loss[loss=0.3606, simple_loss=0.4046, pruned_loss=0.1583, over 925457.42 frames. ], batch size: 12, lr: 3.51e-02, grad_scale: 64.0 +2024-07-27 12:06:48,797 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.69 vs. limit=12.4 +2024-07-27 12:06:51,243 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.642e+01 6.855e+01 7.493e+01 8.291e+01 1.245e+02, threshold=1.499e+02, percent-clipped=0.0 +2024-07-27 12:07:03,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=13080.0, ans=0.2 +2024-07-27 12:07:06,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=13080.0, ans=0.44220000000000004 +2024-07-27 12:07:23,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=13120.0, ans=0.0 +2024-07-27 12:07:24,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=13120.0, ans=0.39680000000000004 +2024-07-27 12:07:30,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=13133.333333333334, ans=0.125 +2024-07-27 12:07:31,388 INFO [train.py:1114] (0/4) Epoch 1, batch 9850, loss[loss=0.389, simple_loss=0.4372, pruned_loss=0.1704, over 4904.00 frames. ], tot_loss[loss=0.3603, simple_loss=0.4041, pruned_loss=0.1582, over 927423.34 frames. ], batch size: 15, lr: 3.50e-02, grad_scale: 64.0 +2024-07-27 12:07:38,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=13146.666666666666, ans=0.07 +2024-07-27 12:07:39,553 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.37 vs. limit=17.36 +2024-07-27 12:07:47,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=13160.0, ans=0.125 +2024-07-27 12:08:01,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=13173.333333333334, ans=0.43893333333333334 +2024-07-27 12:08:02,496 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:08:13,218 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.64 vs. limit=9.274666666666667 +2024-07-27 12:08:14,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=13186.666666666666, ans=0.07 +2024-07-27 12:08:15,401 INFO [train.py:1114] (0/4) Epoch 1, batch 9900, loss[loss=0.4464, simple_loss=0.4721, pruned_loss=0.2104, over 4851.00 frames. ], tot_loss[loss=0.3633, simple_loss=0.4067, pruned_loss=0.1599, over 926731.60 frames. ], batch size: 16, lr: 3.50e-02, grad_scale: 64.0 +2024-07-27 12:08:17,088 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.97 vs. limit=17.4 +2024-07-27 12:08:17,904 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.707e+01 6.801e+01 7.469e+01 8.450e+01 1.233e+02, threshold=1.494e+02, percent-clipped=0.0 +2024-07-27 12:08:30,871 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.12 vs. limit=4.982 +2024-07-27 12:08:31,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=13213.333333333334, ans=0.0 +2024-07-27 12:08:49,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=13253.333333333334, ans=0.125 +2024-07-27 12:08:49,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=13253.333333333334, ans=0.125 +2024-07-27 12:08:54,199 INFO [train.py:1114] (0/4) Epoch 1, batch 9950, loss[loss=0.2779, simple_loss=0.3267, pruned_loss=0.1146, over 4794.00 frames. ], tot_loss[loss=0.3657, simple_loss=0.4086, pruned_loss=0.1614, over 929180.87 frames. ], batch size: 11, lr: 3.49e-02, grad_scale: 64.0 +2024-07-27 12:09:00,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=13280.0, ans=0.007982608695652173 +2024-07-27 12:09:02,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=13280.0, ans=0.43520000000000003 +2024-07-27 12:09:10,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=13293.333333333334, ans=0.125 +2024-07-27 12:09:16,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=13306.666666666666, ans=0.125 +2024-07-27 12:09:20,256 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.82 vs. limit=17.490000000000002 +2024-07-27 12:09:21,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=13320.0, ans=0.125 +2024-07-27 12:09:27,231 INFO [train.py:1114] (0/4) Epoch 1, batch 10000, loss[loss=0.3653, simple_loss=0.4206, pruned_loss=0.155, over 4655.00 frames. ], tot_loss[loss=0.368, simple_loss=0.4116, pruned_loss=0.1622, over 926963.82 frames. ], batch size: 16, lr: 3.49e-02, grad_scale: 64.0 +2024-07-27 12:09:27,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=13333.333333333334, ans=0.0 +2024-07-27 12:09:28,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=13333.333333333334, ans=0.125 +2024-07-27 12:09:29,743 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.713e+01 6.862e+01 7.247e+01 8.214e+01 1.240e+02, threshold=1.449e+02, percent-clipped=0.0 +2024-07-27 12:09:41,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=13360.0, ans=0.125 +2024-07-27 12:09:42,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=13360.0, ans=0.125 +2024-07-27 12:09:44,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=13360.0, ans=0.011000000000000003 +2024-07-27 12:09:57,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=13386.666666666666, ans=0.125 +2024-07-27 12:10:01,314 INFO [train.py:1114] (0/4) Epoch 1, batch 10050, loss[loss=0.5169, simple_loss=0.4894, pruned_loss=0.2722, over 3312.00 frames. ], tot_loss[loss=0.3728, simple_loss=0.4153, pruned_loss=0.1651, over 915949.70 frames. ], batch size: 35, lr: 3.48e-02, grad_scale: 64.0 +2024-07-27 12:10:18,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=13413.333333333334, ans=0.16586666666666666 +2024-07-27 12:10:19,539 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.69 vs. limit=17.560000000000002 +2024-07-27 12:10:20,408 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.05 vs. limit=12.530000000000001 +2024-07-27 12:10:32,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=13440.0, ans=0.025 +2024-07-27 12:10:33,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten.whitening_limit, batch_count=13440.0, ans=12.54 +2024-07-27 12:10:35,520 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.90 vs. limit=12.54 +2024-07-27 12:10:38,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=13440.0, ans=0.2 +2024-07-27 12:10:39,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13453.333333333334, ans=0.16546666666666665 +2024-07-27 12:10:40,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=13453.333333333334, ans=0.007944927536231884 +2024-07-27 12:10:42,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=13453.333333333334, ans=0.125 +2024-07-27 12:10:42,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=13453.333333333334, ans=0.125 +2024-07-27 12:10:46,728 INFO [train.py:1114] (0/4) Epoch 1, batch 10100, loss[loss=0.4564, simple_loss=0.4629, pruned_loss=0.225, over 3280.00 frames. ], tot_loss[loss=0.3876, simple_loss=0.4237, pruned_loss=0.1757, over 864431.26 frames. ], batch size: 35, lr: 3.47e-02, grad_scale: 64.0 +2024-07-27 12:10:46,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=13466.666666666666, ans=0.4286666666666667 +2024-07-27 12:10:49,394 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.041e+01 6.990e+01 7.547e+01 8.268e+01 1.617e+02, threshold=1.509e+02, percent-clipped=1.0 +2024-07-27 12:10:52,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=13466.666666666666, ans=0.125 +2024-07-27 12:11:23,859 INFO [train.py:1114] (0/4) Epoch 1, batch 10150, loss[loss=0.4143, simple_loss=0.4252, pruned_loss=0.2017, over 3558.00 frames. ], tot_loss[loss=0.3972, simple_loss=0.4292, pruned_loss=0.1826, over 822181.85 frames. ], batch size: 35, lr: 3.47e-02, grad_scale: 64.0 +2024-07-27 12:11:32,847 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=10.88 vs. limit=9.413333333333334 +2024-07-27 12:11:33,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=13546.666666666666, ans=0.125 +2024-07-27 12:11:39,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=13546.666666666666, ans=0.125 +2024-07-27 12:11:55,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=13586.666666666666, ans=0.007915942028985507 +2024-07-27 12:11:55,428 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.84 vs. limit=12.594999999999999 +2024-07-27 12:11:58,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=13586.666666666666, ans=0.125 +2024-07-27 12:12:02,428 INFO [train.py:1114] (0/4) Epoch 1, batch 10200, loss[loss=0.4167, simple_loss=0.4349, pruned_loss=0.1993, over 3342.00 frames. ], tot_loss[loss=0.4035, simple_loss=0.4324, pruned_loss=0.1873, over 790105.16 frames. ], batch size: 35, lr: 3.46e-02, grad_scale: 64.0 +2024-07-27 12:12:04,946 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.695e+01 6.612e+01 7.159e+01 7.876e+01 1.155e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 12:12:09,933 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.37 vs. limit=17.71 +2024-07-27 12:12:16,792 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-1.pt +2024-07-27 12:13:02,325 INFO [train.py:1114] (0/4) Epoch 2, batch 0, loss[loss=0.3168, simple_loss=0.3636, pruned_loss=0.135, over 4856.00 frames. ], tot_loss[loss=0.3168, simple_loss=0.3636, pruned_loss=0.135, over 4856.00 frames. ], batch size: 12, lr: 3.39e-02, grad_scale: 64.0 +2024-07-27 12:13:02,325 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 12:13:11,221 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.7291, 5.4575, 5.5191, 5.5490], device='cuda:0') +2024-07-27 12:13:13,917 INFO [train.py:1146] (0/4) Epoch 2, validation: loss=0.3005, simple_loss=0.3865, pruned_loss=0.1073, over 944034.00 frames. +2024-07-27 12:13:13,918 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 12:13:24,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13642.666666666666, ans=0.16357333333333335 +2024-07-27 12:13:28,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=13656.0, ans=0.125 +2024-07-27 12:13:28,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=13656.0, ans=0.125 +2024-07-27 12:13:29,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=13656.0, ans=0.125 +2024-07-27 12:13:32,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=13656.0, ans=0.009766666666666667 +2024-07-27 12:13:35,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=13669.333333333334, ans=0.42157333333333336 +2024-07-27 12:13:35,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=13669.333333333334, ans=0.009711111111111115 +2024-07-27 12:13:49,241 INFO [train.py:1114] (0/4) Epoch 2, batch 50, loss[loss=0.3226, simple_loss=0.3785, pruned_loss=0.1334, over 4626.00 frames. ], tot_loss[loss=0.3676, simple_loss=0.41, pruned_loss=0.1626, over 207002.17 frames. ], batch size: 11, lr: 3.39e-02, grad_scale: 64.0 +2024-07-27 12:13:50,349 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.67 vs. limit=17.772 +2024-07-27 12:13:52,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=13696.0, ans=0.125 +2024-07-27 12:14:01,626 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.38 vs. limit=12.641 +2024-07-27 12:14:02,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.60 vs. limit=9.483733333333333 +2024-07-27 12:14:05,567 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:14:05,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=13709.333333333334, ans=0.009544444444444446 +2024-07-27 12:14:16,676 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.870e+01 6.791e+01 7.517e+01 8.543e+01 1.783e+02, threshold=1.503e+02, percent-clipped=1.0 +2024-07-27 12:14:20,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=13736.0, ans=0.125 +2024-07-27 12:14:28,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13762.666666666666, ans=0.16237333333333334 +2024-07-27 12:14:29,111 INFO [train.py:1114] (0/4) Epoch 2, batch 100, loss[loss=0.4075, simple_loss=0.4199, pruned_loss=0.1976, over 4641.00 frames. ], tot_loss[loss=0.367, simple_loss=0.4102, pruned_loss=0.1619, over 365627.14 frames. ], batch size: 12, lr: 3.38e-02, grad_scale: 64.0 +2024-07-27 12:14:30,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=13762.666666666666, ans=0.41830666666666677 +2024-07-27 12:14:32,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=13762.666666666666, ans=0.009322222222222225 +2024-07-27 12:14:34,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=13762.666666666666, ans=0.41830666666666677 +2024-07-27 12:14:34,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=13762.666666666666, ans=0.00787768115942029 +2024-07-27 12:14:35,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=13776.0, ans=0.025 +2024-07-27 12:14:38,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=13776.0, ans=0.125 +2024-07-27 12:14:39,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=13776.0, ans=0.125 +2024-07-27 12:14:47,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=13789.333333333334, ans=0.125 +2024-07-27 12:15:04,113 INFO [train.py:1114] (0/4) Epoch 2, batch 150, loss[loss=0.3075, simple_loss=0.3723, pruned_loss=0.1214, over 4615.00 frames. ], tot_loss[loss=0.3568, simple_loss=0.4029, pruned_loss=0.1554, over 494184.05 frames. ], batch size: 11, lr: 3.38e-02, grad_scale: 64.0 +2024-07-27 12:15:05,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=13829.333333333334, ans=0.125 +2024-07-27 12:15:07,276 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.32 vs. limit=9.531733333333333 +2024-07-27 12:15:22,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=13856.0, ans=0.125 +2024-07-27 12:15:25,216 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:15:26,447 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.728e+01 6.633e+01 7.371e+01 8.455e+01 1.546e+02, threshold=1.474e+02, percent-clipped=1.0 +2024-07-27 12:15:38,909 INFO [train.py:1114] (0/4) Epoch 2, batch 200, loss[loss=0.4159, simple_loss=0.446, pruned_loss=0.1929, over 4534.00 frames. ], tot_loss[loss=0.3549, simple_loss=0.4011, pruned_loss=0.1544, over 593633.27 frames. ], batch size: 21, lr: 3.37e-02, grad_scale: 64.0 +2024-07-27 12:15:41,412 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.67 vs. limit=9.558399999999999 +2024-07-27 12:15:54,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=13922.666666666666, ans=0.125 +2024-07-27 12:16:15,252 INFO [train.py:1114] (0/4) Epoch 2, batch 250, loss[loss=0.3785, simple_loss=0.427, pruned_loss=0.165, over 4626.00 frames. ], tot_loss[loss=0.3538, simple_loss=0.4007, pruned_loss=0.1535, over 670402.54 frames. ], batch size: 16, lr: 3.37e-02, grad_scale: 64.0 +2024-07-27 12:16:35,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13989.333333333334, ans=0.16010666666666667 +2024-07-27 12:16:37,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=13989.333333333334, ans=0.125 +2024-07-27 12:16:39,608 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.95 vs. limit=6.797866666666667 +2024-07-27 12:16:39,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=14002.666666666666, ans=0.125 +2024-07-27 12:16:41,803 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.968e+01 6.499e+01 7.152e+01 7.948e+01 1.053e+02, threshold=1.430e+02, percent-clipped=0.0 +2024-07-27 12:16:50,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=14016.0, ans=0.125 +2024-07-27 12:16:55,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=14016.0, ans=0.008266666666666665 +2024-07-27 12:16:56,573 INFO [train.py:1114] (0/4) Epoch 2, batch 300, loss[loss=0.3473, simple_loss=0.4051, pruned_loss=0.1448, over 4800.00 frames. ], tot_loss[loss=0.3524, simple_loss=0.3997, pruned_loss=0.1525, over 729953.27 frames. ], batch size: 15, lr: 3.36e-02, grad_scale: 64.0 +2024-07-27 12:17:00,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=14029.333333333334, ans=0.008211111111111113 +2024-07-27 12:17:01,004 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.24 vs. limit=12.761 +2024-07-27 12:17:02,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=14042.666666666666, ans=0.125 +2024-07-27 12:17:08,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=14042.666666666666, ans=0.008155555555555562 +2024-07-27 12:17:11,312 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.24 vs. limit=12.027999999999999 +2024-07-27 12:17:16,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=14056.0, ans=0.007813913043478261 +2024-07-27 12:17:18,161 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.22 vs. limit=9.627733333333333 +2024-07-27 12:17:23,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=14069.333333333334, ans=0.008044444444444444 +2024-07-27 12:17:30,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=14082.666666666666, ans=0.05 +2024-07-27 12:17:31,955 INFO [train.py:1114] (0/4) Epoch 2, batch 350, loss[loss=0.3556, simple_loss=0.3951, pruned_loss=0.158, over 4939.00 frames. ], tot_loss[loss=0.3512, simple_loss=0.3992, pruned_loss=0.1516, over 775913.99 frames. ], batch size: 12, lr: 3.36e-02, grad_scale: 64.0 +2024-07-27 12:17:34,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=14096.0, ans=0.007933333333333334 +2024-07-27 12:17:38,355 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:17:54,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=14122.666666666666, ans=0.125 +2024-07-27 12:17:57,630 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.955e+01 6.715e+01 7.349e+01 8.005e+01 1.409e+02, threshold=1.470e+02, percent-clipped=0.0 +2024-07-27 12:17:59,453 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.92 vs. limit=12.068 +2024-07-27 12:18:09,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14149.333333333334, ans=0.15850666666666668 +2024-07-27 12:18:10,163 INFO [train.py:1114] (0/4) Epoch 2, batch 400, loss[loss=0.3108, simple_loss=0.3694, pruned_loss=0.1261, over 4700.00 frames. ], tot_loss[loss=0.3503, simple_loss=0.3984, pruned_loss=0.1511, over 813447.50 frames. ], batch size: 13, lr: 3.35e-02, grad_scale: 64.0 +2024-07-27 12:18:13,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=14162.666666666666, ans=0.00779072463768116 +2024-07-27 12:18:14,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=14162.666666666666, ans=0.125 +2024-07-27 12:18:26,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=14189.333333333334, ans=0.007544444444444444 +2024-07-27 12:18:28,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=14189.333333333334, ans=0.125 +2024-07-27 12:18:45,088 INFO [train.py:1114] (0/4) Epoch 2, batch 450, loss[loss=0.3638, simple_loss=0.4098, pruned_loss=0.1589, over 4645.00 frames. ], tot_loss[loss=0.3535, simple_loss=0.4008, pruned_loss=0.1531, over 838173.83 frames. ], batch size: 13, lr: 3.35e-02, grad_scale: 64.0 +2024-07-27 12:18:51,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=14242.666666666666, ans=0.0 +2024-07-27 12:18:52,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=14242.666666666666, ans=0.025 +2024-07-27 12:18:52,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=14242.666666666666, ans=0.025 +2024-07-27 12:18:54,546 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.14 vs. limit=12.841000000000001 +2024-07-27 12:18:57,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=14242.666666666666, ans=0.125 +2024-07-27 12:18:57,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=14242.666666666666, ans=0.125 +2024-07-27 12:19:03,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=14256.0, ans=0.09899494936611666 +2024-07-27 12:19:03,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=14256.0, ans=0.07 +2024-07-27 12:19:07,278 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.533e+01 6.535e+01 7.099e+01 8.060e+01 1.224e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 12:19:09,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=14269.333333333334, ans=0.007211111111111113 +2024-07-27 12:19:11,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=14269.333333333334, ans=0.007211111111111113 +2024-07-27 12:19:14,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=14282.666666666666, ans=0.125 +2024-07-27 12:19:14,751 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.65 vs. limit=12.856 +2024-07-27 12:19:19,771 INFO [train.py:1114] (0/4) Epoch 2, batch 500, loss[loss=0.4029, simple_loss=0.4433, pruned_loss=0.1813, over 4682.00 frames. ], tot_loss[loss=0.3512, simple_loss=0.3991, pruned_loss=0.1517, over 860752.65 frames. ], batch size: 15, lr: 3.34e-02, grad_scale: 64.0 +2024-07-27 12:19:22,916 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.07 vs. limit=5.1444 +2024-07-27 12:19:26,547 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.61 vs. limit=12.866 +2024-07-27 12:19:57,917 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.76 vs. limit=9.739733333333334 +2024-07-27 12:19:59,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=14349.333333333334, ans=0.0 +2024-07-27 12:20:00,826 INFO [train.py:1114] (0/4) Epoch 2, batch 550, loss[loss=0.4095, simple_loss=0.4454, pruned_loss=0.1868, over 4591.00 frames. ], tot_loss[loss=0.3516, simple_loss=0.3998, pruned_loss=0.1517, over 876984.70 frames. ], batch size: 17, lr: 3.34e-02, grad_scale: 64.0 +2024-07-27 12:20:01,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=14362.666666666666, ans=0.125 +2024-07-27 12:20:01,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=14362.666666666666, ans=0.3973066666666668 +2024-07-27 12:20:27,924 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.749e+01 6.667e+01 7.400e+01 8.349e+01 1.588e+02, threshold=1.480e+02, percent-clipped=3.0 +2024-07-27 12:20:40,335 INFO [train.py:1114] (0/4) Epoch 2, batch 600, loss[loss=0.3577, simple_loss=0.4111, pruned_loss=0.1521, over 4673.00 frames. ], tot_loss[loss=0.3499, simple_loss=0.399, pruned_loss=0.1504, over 891623.70 frames. ], batch size: 16, lr: 3.33e-02, grad_scale: 64.0 +2024-07-27 12:20:45,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=14429.333333333334, ans=0.0 +2024-07-27 12:20:50,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=14442.666666666666, ans=0.15557333333333334 +2024-07-27 12:20:51,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=14442.666666666666, ans=0.3945066666666668 +2024-07-27 12:21:04,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=14469.333333333334, ans=0.125 +2024-07-27 12:21:05,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14469.333333333334, ans=0.15530666666666668 +2024-07-27 12:21:14,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=14496.0, ans=0.15504 +2024-07-27 12:21:14,602 INFO [train.py:1114] (0/4) Epoch 2, batch 650, loss[loss=0.3757, simple_loss=0.4043, pruned_loss=0.1735, over 4757.00 frames. ], tot_loss[loss=0.3493, simple_loss=0.3979, pruned_loss=0.1504, over 903163.68 frames. ], batch size: 13, lr: 3.33e-02, grad_scale: 64.0 +2024-07-27 12:21:21,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=14509.333333333334, ans=0.006211111111111112 +2024-07-27 12:21:25,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=14509.333333333334, ans=0.41764 +2024-07-27 12:21:36,643 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.116e+01 6.647e+01 7.177e+01 7.899e+01 1.481e+02, threshold=1.435e+02, percent-clipped=1.0 +2024-07-27 12:21:38,471 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.28 vs. limit=5.180400000000001 +2024-07-27 12:21:40,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=14536.0, ans=0.125 +2024-07-27 12:21:41,784 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.42 vs. limit=12.956 +2024-07-27 12:21:43,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=14549.333333333334, ans=0.125 +2024-07-27 12:21:43,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=14549.333333333334, ans=0.125 +2024-07-27 12:21:45,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=14549.333333333334, ans=0.125 +2024-07-27 12:21:49,057 INFO [train.py:1114] (0/4) Epoch 2, batch 700, loss[loss=0.3334, simple_loss=0.402, pruned_loss=0.1324, over 4643.00 frames. ], tot_loss[loss=0.3478, simple_loss=0.3966, pruned_loss=0.1495, over 911160.08 frames. ], batch size: 12, lr: 3.32e-02, grad_scale: 64.0 +2024-07-27 12:21:49,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=14562.666666666666, ans=0.15437333333333333 +2024-07-27 12:21:59,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=14576.0, ans=0.005933333333333332 +2024-07-27 12:22:05,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.33 vs. limit=12.294666666666668 +2024-07-27 12:22:06,401 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.92 vs. limit=18.442 +2024-07-27 12:22:08,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=14589.333333333334, ans=0.15410666666666667 +2024-07-27 12:22:08,412 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.28 vs. limit=12.971 +2024-07-27 12:22:19,313 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:22:20,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=14616.0, ans=0.0076921739130434786 +2024-07-27 12:22:27,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=14616.0, ans=0.005766666666666663 +2024-07-27 12:22:29,319 INFO [train.py:1114] (0/4) Epoch 2, batch 750, loss[loss=0.3404, simple_loss=0.4058, pruned_loss=0.1375, over 4694.00 frames. ], tot_loss[loss=0.3454, simple_loss=0.3949, pruned_loss=0.1479, over 917922.60 frames. ], batch size: 13, lr: 3.31e-02, grad_scale: 64.0 +2024-07-27 12:22:38,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=14642.666666666666, ans=0.10357333333333332 +2024-07-27 12:22:53,332 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.645e+01 6.839e+01 7.355e+01 8.149e+01 1.440e+02, threshold=1.471e+02, percent-clipped=1.0 +2024-07-27 12:22:55,052 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.15 vs. limit=5.2004 +2024-07-27 12:23:05,830 INFO [train.py:1114] (0/4) Epoch 2, batch 800, loss[loss=0.4032, simple_loss=0.4286, pruned_loss=0.1889, over 4856.00 frames. ], tot_loss[loss=0.3478, simple_loss=0.396, pruned_loss=0.1498, over 923503.01 frames. ], batch size: 12, lr: 3.31e-02, grad_scale: 128.0 +2024-07-27 12:23:06,130 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.68 vs. limit=13.011 +2024-07-27 12:23:10,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=14696.0, ans=0.125 +2024-07-27 12:23:14,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=14709.333333333334, ans=0.125 +2024-07-27 12:23:17,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=14709.333333333334, ans=0.125 +2024-07-27 12:23:21,358 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.76 vs. limit=9.889066666666666 +2024-07-27 12:23:24,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=14722.666666666666, ans=0.38470666666666675 +2024-07-27 12:23:34,979 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.58 vs. limit=12.374666666666666 +2024-07-27 12:23:40,054 INFO [train.py:1114] (0/4) Epoch 2, batch 850, loss[loss=0.3416, simple_loss=0.4016, pruned_loss=0.1408, over 4650.00 frames. ], tot_loss[loss=0.3463, simple_loss=0.395, pruned_loss=0.1487, over 927445.46 frames. ], batch size: 14, lr: 3.30e-02, grad_scale: 64.0 +2024-07-27 12:23:45,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=14762.666666666666, ans=0.09899494936611666 +2024-07-27 12:23:46,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=14776.0, ans=0.125 +2024-07-27 12:23:47,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=14776.0, ans=0.0051 +2024-07-27 12:23:57,037 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.04 vs. limit=5.2184 +2024-07-27 12:24:02,976 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.532e+01 6.555e+01 7.139e+01 7.731e+01 1.156e+02, threshold=1.428e+02, percent-clipped=0.0 +2024-07-27 12:24:15,202 INFO [train.py:1114] (0/4) Epoch 2, batch 900, loss[loss=0.3576, simple_loss=0.4047, pruned_loss=0.1552, over 4869.00 frames. ], tot_loss[loss=0.3451, simple_loss=0.3944, pruned_loss=0.1479, over 928162.66 frames. ], batch size: 12, lr: 3.30e-02, grad_scale: 64.0 +2024-07-27 12:24:26,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=14842.666666666666, ans=0.15157333333333334 +2024-07-27 12:24:36,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=14869.333333333334, ans=0.37957333333333343 +2024-07-27 12:24:39,963 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.56 vs. limit=12.434666666666667 +2024-07-27 12:24:48,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=14882.666666666666, ans=0.0 +2024-07-27 12:24:48,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=14882.666666666666, ans=0.3791066666666668 +2024-07-27 12:24:50,047 INFO [train.py:1114] (0/4) Epoch 2, batch 950, loss[loss=0.3458, simple_loss=0.3868, pruned_loss=0.1524, over 4768.00 frames. ], tot_loss[loss=0.3456, simple_loss=0.395, pruned_loss=0.1481, over 930148.62 frames. ], batch size: 12, lr: 3.29e-02, grad_scale: 64.0 +2024-07-27 12:24:52,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=14896.0, ans=0.125 +2024-07-27 12:25:07,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=14922.666666666666, ans=0.37770666666666675 +2024-07-27 12:25:08,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=14922.666666666666, ans=0.125 +2024-07-27 12:25:13,554 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.467e+01 6.513e+01 7.102e+01 8.226e+01 2.101e+02, threshold=1.420e+02, percent-clipped=1.0 +2024-07-27 12:25:18,387 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.14 vs. limit=13.106 +2024-07-27 12:25:21,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=14949.333333333334, ans=0.004377777777777779 +2024-07-27 12:25:25,597 INFO [train.py:1114] (0/4) Epoch 2, batch 1000, loss[loss=0.3527, simple_loss=0.3934, pruned_loss=0.156, over 4957.00 frames. ], tot_loss[loss=0.3465, simple_loss=0.3961, pruned_loss=0.1484, over 929767.86 frames. ], batch size: 13, lr: 3.29e-02, grad_scale: 64.0 +2024-07-27 12:25:26,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=14962.666666666666, ans=0.3763066666666668 +2024-07-27 12:25:35,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14976.0, ans=0.15023999999999998 +2024-07-27 12:25:36,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=14976.0, ans=0.007613913043478261 +2024-07-27 12:25:44,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14989.333333333334, ans=0.15010666666666667 +2024-07-27 12:25:49,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=15002.666666666666, ans=0.3749066666666667 +2024-07-27 12:25:54,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.10 vs. limit=12.508 +2024-07-27 12:25:55,100 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=13.131 +2024-07-27 12:25:57,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=15016.0, ans=0.125 +2024-07-27 12:25:59,557 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:26:00,224 INFO [train.py:1114] (0/4) Epoch 2, batch 1050, loss[loss=0.3222, simple_loss=0.3663, pruned_loss=0.139, over 4881.00 frames. ], tot_loss[loss=0.3442, simple_loss=0.3939, pruned_loss=0.1473, over 932009.30 frames. ], batch size: 14, lr: 3.28e-02, grad_scale: 64.0 +2024-07-27 12:26:06,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=15029.333333333334, ans=0.07 +2024-07-27 12:26:18,245 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=1.056e-02 +2024-07-27 12:26:22,937 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.050e+01 6.480e+01 6.937e+01 7.724e+01 1.151e+02, threshold=1.387e+02, percent-clipped=0.0 +2024-07-27 12:26:24,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=15069.333333333334, ans=0.025 +2024-07-27 12:26:30,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=15082.666666666666, ans=0.00759072463768116 +2024-07-27 12:26:33,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=15082.666666666666, ans=0.125 +2024-07-27 12:26:34,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=15096.0, ans=0.007587826086956522 +2024-07-27 12:26:35,192 INFO [train.py:1114] (0/4) Epoch 2, batch 1100, loss[loss=0.3398, simple_loss=0.3904, pruned_loss=0.1446, over 4893.00 frames. ], tot_loss[loss=0.3429, simple_loss=0.3928, pruned_loss=0.1465, over 933985.46 frames. ], batch size: 13, lr: 3.28e-02, grad_scale: 64.0 +2024-07-27 12:26:36,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=15096.0, ans=0.125 +2024-07-27 12:26:47,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=15109.333333333334, ans=0.42664 +2024-07-27 12:26:49,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=15122.666666666666, ans=0.0036555555555555577 +2024-07-27 12:27:08,075 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.07 vs. limit=12.574666666666667 +2024-07-27 12:27:09,691 INFO [train.py:1114] (0/4) Epoch 2, batch 1150, loss[loss=0.3682, simple_loss=0.4154, pruned_loss=0.1605, over 4897.00 frames. ], tot_loss[loss=0.3433, simple_loss=0.3934, pruned_loss=0.1466, over 934042.63 frames. ], batch size: 13, lr: 3.27e-02, grad_scale: 64.0 +2024-07-27 12:27:24,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=15176.0, ans=0.007570434782608696 +2024-07-27 12:27:27,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=15189.333333333334, ans=10.0 +2024-07-27 12:27:35,088 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.945e+01 6.616e+01 7.321e+01 8.237e+01 1.316e+02, threshold=1.464e+02, percent-clipped=0.0 +2024-07-27 12:27:36,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15202.666666666666, ans=0.14797333333333335 +2024-07-27 12:27:48,470 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.73 vs. limit=18.922 +2024-07-27 12:27:48,586 INFO [train.py:1114] (0/4) Epoch 2, batch 1200, loss[loss=0.3383, simple_loss=0.3904, pruned_loss=0.1431, over 4871.00 frames. ], tot_loss[loss=0.3466, simple_loss=0.396, pruned_loss=0.1485, over 932906.62 frames. ], batch size: 14, lr: 3.27e-02, grad_scale: 64.0 +2024-07-27 12:28:13,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=15269.333333333334, ans=0.125 +2024-07-27 12:28:19,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=15282.666666666666, ans=0.0 +2024-07-27 12:28:23,715 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=13.231 +2024-07-27 12:28:26,610 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.89 vs. limit=10.118400000000001 +2024-07-27 12:28:26,768 INFO [train.py:1114] (0/4) Epoch 2, batch 1250, loss[loss=0.3107, simple_loss=0.3723, pruned_loss=0.1246, over 4799.00 frames. ], tot_loss[loss=0.3444, simple_loss=0.3948, pruned_loss=0.147, over 937060.21 frames. ], batch size: 15, lr: 3.26e-02, grad_scale: 64.0 +2024-07-27 12:28:28,535 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.96 vs. limit=13.236 +2024-07-27 12:28:30,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=15296.0, ans=0.0029333333333333295 +2024-07-27 12:28:31,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=15296.0, ans=0.0075443478260869565 +2024-07-27 12:28:35,479 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.44 vs. limit=13.241 +2024-07-27 12:28:37,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=15309.333333333334, ans=0.125 +2024-07-27 12:28:39,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=15309.333333333334, ans=0.0028777777777777777 +2024-07-27 12:28:42,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=15322.666666666666, ans=0.36370666666666673 +2024-07-27 12:28:46,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=15322.666666666666, ans=0.125 +2024-07-27 12:28:47,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=15336.0, ans=0.125 +2024-07-27 12:28:49,355 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.661e+01 6.573e+01 7.173e+01 8.198e+01 1.375e+02, threshold=1.435e+02, percent-clipped=0.0 +2024-07-27 12:28:51,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=15336.0, ans=0.007535652173913044 +2024-07-27 12:28:53,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=15336.0, ans=0.125 +2024-07-27 12:29:01,020 INFO [train.py:1114] (0/4) Epoch 2, batch 1300, loss[loss=0.3957, simple_loss=0.4287, pruned_loss=0.1814, over 4687.00 frames. ], tot_loss[loss=0.3419, simple_loss=0.3923, pruned_loss=0.1457, over 939038.05 frames. ], batch size: 19, lr: 3.26e-02, grad_scale: 64.0 +2024-07-27 12:29:01,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=15362.666666666666, ans=0.007529855072463768 +2024-07-27 12:29:09,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=15376.0, ans=0.125 +2024-07-27 12:29:09,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=15376.0, ans=0.025 +2024-07-27 12:29:24,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=15402.666666666666, ans=0.125 +2024-07-27 12:29:25,114 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.85 vs. limit=19.052 +2024-07-27 12:29:26,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=15402.666666666666, ans=0.025 +2024-07-27 12:29:35,583 INFO [train.py:1114] (0/4) Epoch 2, batch 1350, loss[loss=0.3517, simple_loss=0.3919, pruned_loss=0.1558, over 4757.00 frames. ], tot_loss[loss=0.3394, simple_loss=0.3905, pruned_loss=0.1442, over 941078.66 frames. ], batch size: 13, lr: 3.25e-02, grad_scale: 64.0 +2024-07-27 12:29:36,034 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.39 vs. limit=12.714666666666666 +2024-07-27 12:29:38,114 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.88 vs. limit=13.286 +2024-07-27 12:29:38,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=15429.333333333334, ans=0.35997333333333337 +2024-07-27 12:29:38,771 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.56 vs. limit=13.286 +2024-07-27 12:29:51,469 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.21 vs. limit=12.728 +2024-07-27 12:29:52,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=15456.0, ans=0.125 +2024-07-27 12:29:58,235 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:29:58,812 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.201e+01 6.395e+01 7.183e+01 7.821e+01 1.561e+02, threshold=1.437e+02, percent-clipped=1.0 +2024-07-27 12:30:06,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=15482.666666666666, ans=0.125 +2024-07-27 12:30:10,898 INFO [train.py:1114] (0/4) Epoch 2, batch 1400, loss[loss=0.295, simple_loss=0.3419, pruned_loss=0.124, over 4717.00 frames. ], tot_loss[loss=0.3412, simple_loss=0.3913, pruned_loss=0.1456, over 942774.42 frames. ], batch size: 11, lr: 3.25e-02, grad_scale: 64.0 +2024-07-27 12:30:23,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=15509.333333333334, ans=0.002044444444444446 +2024-07-27 12:30:27,541 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.56 vs. limit=13.321 +2024-07-27 12:30:27,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=15522.666666666666, ans=0.125 +2024-07-27 12:30:29,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=15522.666666666666, ans=0.125 +2024-07-27 12:30:36,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=15536.0, ans=0.3562400000000001 +2024-07-27 12:30:43,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=15549.333333333334, ans=0.3557733333333334 +2024-07-27 12:30:46,222 INFO [train.py:1114] (0/4) Epoch 2, batch 1450, loss[loss=0.4073, simple_loss=0.4485, pruned_loss=0.1831, over 4691.00 frames. ], tot_loss[loss=0.3427, simple_loss=0.3926, pruned_loss=0.1464, over 942546.74 frames. ], batch size: 15, lr: 3.24e-02, grad_scale: 64.0 +2024-07-27 12:30:50,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=15562.666666666666, ans=0.007486376811594203 +2024-07-27 12:30:55,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=15576.0, ans=0.09899494936611666 +2024-07-27 12:30:56,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=15576.0, ans=0.125 +2024-07-27 12:31:01,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=15589.333333333334, ans=0.35437333333333343 +2024-07-27 12:31:01,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=15589.333333333334, ans=0.0017111111111111146 +2024-07-27 12:31:08,929 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.669e+01 6.624e+01 7.283e+01 7.925e+01 1.878e+02, threshold=1.457e+02, percent-clipped=2.0 +2024-07-27 12:31:15,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=15616.0, ans=0.125 +2024-07-27 12:31:16,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=15616.0, ans=0.125 +2024-07-27 12:31:19,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=15616.0, ans=0.125 +2024-07-27 12:31:19,377 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.72 vs. limit=13.356 +2024-07-27 12:31:20,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=15616.0, ans=0.125 +2024-07-27 12:31:23,979 INFO [train.py:1114] (0/4) Epoch 2, batch 1500, loss[loss=0.3418, simple_loss=0.395, pruned_loss=0.1443, over 4814.00 frames. ], tot_loss[loss=0.3434, simple_loss=0.3936, pruned_loss=0.1466, over 942422.12 frames. ], batch size: 14, lr: 3.24e-02, grad_scale: 64.0 +2024-07-27 12:31:28,812 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.55 vs. limit=12.814666666666668 +2024-07-27 12:31:31,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=15642.666666666666, ans=0.0 +2024-07-27 12:31:36,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=15642.666666666666, ans=0.0014888888888888938 +2024-07-27 12:31:37,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=15656.0, ans=0.125 +2024-07-27 12:31:37,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15656.0, ans=0.14343999999999998 +2024-07-27 12:31:38,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=15656.0, ans=0.125 +2024-07-27 12:31:42,954 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.27 vs. limit=19.242 +2024-07-27 12:31:47,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15669.333333333334, ans=0.14330666666666667 +2024-07-27 12:31:48,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=15669.333333333334, ans=0.14330666666666667 +2024-07-27 12:31:48,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=15669.333333333334, ans=0.0 +2024-07-27 12:31:51,930 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.88 vs. limit=13.381 +2024-07-27 12:31:52,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=15682.666666666666, ans=0.125 +2024-07-27 12:31:57,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=15682.666666666666, ans=0.007460289855072464 +2024-07-27 12:31:58,877 INFO [train.py:1114] (0/4) Epoch 2, batch 1550, loss[loss=0.2996, simple_loss=0.3831, pruned_loss=0.1081, over 4913.00 frames. ], tot_loss[loss=0.3457, simple_loss=0.3951, pruned_loss=0.1481, over 938812.82 frames. ], batch size: 15, lr: 3.23e-02, grad_scale: 64.0 +2024-07-27 12:32:13,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=15722.666666666666, ans=0.125 +2024-07-27 12:32:13,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=15722.666666666666, ans=0.007451594202898551 +2024-07-27 12:32:15,318 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.48 vs. limit=13.396 +2024-07-27 12:32:21,030 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=13.47 vs. limit=12.868 +2024-07-27 12:32:22,802 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.789e+01 6.571e+01 7.346e+01 8.400e+01 2.303e+02, threshold=1.469e+02, percent-clipped=1.0 +2024-07-27 12:32:26,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=15736.0, ans=0.125 +2024-07-27 12:32:26,822 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.62 vs. limit=13.401 +2024-07-27 12:32:31,044 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.60 vs. limit=13.405999999999999 +2024-07-27 12:32:34,714 INFO [train.py:1114] (0/4) Epoch 2, batch 1600, loss[loss=0.3688, simple_loss=0.4247, pruned_loss=0.1565, over 4868.00 frames. ], tot_loss[loss=0.3461, simple_loss=0.3955, pruned_loss=0.1483, over 936554.25 frames. ], batch size: 14, lr: 3.23e-02, grad_scale: 64.0 +2024-07-27 12:32:39,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=15762.666666666666, ans=0.125 +2024-07-27 12:32:45,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=15776.0, ans=0.34784000000000004 +2024-07-27 12:33:00,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=15802.666666666666, ans=0.125 +2024-07-27 12:33:02,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=15802.666666666666, ans=0.0 +2024-07-27 12:33:13,426 INFO [train.py:1114] (0/4) Epoch 2, batch 1650, loss[loss=0.3494, simple_loss=0.4054, pruned_loss=0.1468, over 4670.00 frames. ], tot_loss[loss=0.3475, simple_loss=0.3963, pruned_loss=0.1494, over 937097.19 frames. ], batch size: 14, lr: 3.22e-02, grad_scale: 64.0 +2024-07-27 12:33:15,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=15829.333333333334, ans=0.125 +2024-07-27 12:33:36,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=15856.0, ans=0.007422608695652174 +2024-07-27 12:33:36,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=15856.0, ans=0.025 +2024-07-27 12:33:42,228 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.638e+01 6.520e+01 7.164e+01 7.874e+01 1.221e+02, threshold=1.433e+02, percent-clipped=0.0 +2024-07-27 12:33:45,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=15869.333333333334, ans=0.14130666666666666 +2024-07-27 12:33:59,012 INFO [train.py:1114] (0/4) Epoch 2, batch 1700, loss[loss=0.2759, simple_loss=0.3336, pruned_loss=0.1091, over 4697.00 frames. ], tot_loss[loss=0.3448, simple_loss=0.3946, pruned_loss=0.1475, over 938746.16 frames. ], batch size: 11, lr: 3.22e-02, grad_scale: 64.0 +2024-07-27 12:34:01,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=15896.0, ans=0.07 +2024-07-27 12:34:06,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=15909.333333333334, ans=0.125 +2024-07-27 12:34:23,230 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=13.475999999999999 +2024-07-27 12:34:24,038 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.48 vs. limit=19.451999999999998 +2024-07-27 12:34:25,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=15936.0, ans=0.125 +2024-07-27 12:34:25,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=15936.0, ans=0.07 +2024-07-27 12:34:33,941 INFO [train.py:1114] (0/4) Epoch 2, batch 1750, loss[loss=0.3524, simple_loss=0.3831, pruned_loss=0.1609, over 4810.00 frames. ], tot_loss[loss=0.3436, simple_loss=0.3934, pruned_loss=0.1469, over 940007.99 frames. ], batch size: 11, lr: 3.22e-02, grad_scale: 64.0 +2024-07-27 12:34:43,850 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.68 vs. limit=13.491 +2024-07-27 12:34:47,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=15989.333333333334, ans=0.125 +2024-07-27 12:34:48,622 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.04 vs. limit=19.492 +2024-07-27 12:34:53,357 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-12000.pt +2024-07-27 12:34:59,092 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.489e+01 6.792e+01 7.364e+01 8.042e+01 2.018e+02, threshold=1.473e+02, percent-clipped=1.0 +2024-07-27 12:35:00,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=16002.666666666666, ans=0.0 +2024-07-27 12:35:01,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=16002.666666666666, ans=0.0 +2024-07-27 12:35:04,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=16016.0, ans=0.0 +2024-07-27 12:35:06,271 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.33 vs. limit=13.008 +2024-07-27 12:35:07,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=16016.0, ans=0.125 +2024-07-27 12:35:09,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=16016.0, ans=0.125 +2024-07-27 12:35:10,728 INFO [train.py:1114] (0/4) Epoch 2, batch 1800, loss[loss=0.3402, simple_loss=0.3858, pruned_loss=0.1473, over 4631.00 frames. ], tot_loss[loss=0.3432, simple_loss=0.393, pruned_loss=0.1467, over 940491.44 frames. ], batch size: 13, lr: 3.21e-02, grad_scale: 64.0 +2024-07-27 12:35:11,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=16029.333333333334, ans=0.13970666666666667 +2024-07-27 12:35:21,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=16042.666666666666, ans=0.125 +2024-07-27 12:35:29,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=16056.0, ans=0.125 +2024-07-27 12:35:43,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=16082.666666666666, ans=0.125 +2024-07-27 12:35:45,373 INFO [train.py:1114] (0/4) Epoch 2, batch 1850, loss[loss=0.3638, simple_loss=0.4049, pruned_loss=0.1613, over 4808.00 frames. ], tot_loss[loss=0.3412, simple_loss=0.3912, pruned_loss=0.1456, over 940442.98 frames. ], batch size: 14, lr: 3.21e-02, grad_scale: 64.0 +2024-07-27 12:35:46,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=16096.0, ans=0.09899494936611666 +2024-07-27 12:36:07,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=16122.666666666666, ans=0.125 +2024-07-27 12:36:09,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=16122.666666666666, ans=0.125 +2024-07-27 12:36:15,861 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.06 vs. limit=13.551 +2024-07-27 12:36:17,282 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.330e+01 6.413e+01 7.038e+01 7.663e+01 1.052e+02, threshold=1.408e+02, percent-clipped=0.0 +2024-07-27 12:36:24,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=16149.333333333334, ans=0.33477333333333337 +2024-07-27 12:36:28,685 INFO [train.py:1114] (0/4) Epoch 2, batch 1900, loss[loss=0.366, simple_loss=0.4015, pruned_loss=0.1652, over 4664.00 frames. ], tot_loss[loss=0.34, simple_loss=0.3908, pruned_loss=0.1446, over 942123.10 frames. ], batch size: 14, lr: 3.20e-02, grad_scale: 64.0 +2024-07-27 12:36:44,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16189.333333333334, ans=0.13810666666666668 +2024-07-27 12:36:50,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=16189.333333333334, ans=0.3333733333333334 +2024-07-27 12:36:53,647 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.69 vs. limit=10.481066666666667 +2024-07-27 12:37:05,422 INFO [train.py:1114] (0/4) Epoch 2, batch 1950, loss[loss=0.2711, simple_loss=0.3277, pruned_loss=0.1072, over 4897.00 frames. ], tot_loss[loss=0.3406, simple_loss=0.3919, pruned_loss=0.1446, over 944093.99 frames. ], batch size: 13, lr: 3.20e-02, grad_scale: 64.0 +2024-07-27 12:37:28,264 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.625e+01 6.630e+01 7.143e+01 8.194e+01 1.176e+02, threshold=1.429e+02, percent-clipped=0.0 +2024-07-27 12:38:00,619 INFO [train.py:1114] (0/4) Epoch 2, batch 2000, loss[loss=0.3357, simple_loss=0.3792, pruned_loss=0.1461, over 4793.00 frames. ], tot_loss[loss=0.3439, simple_loss=0.3945, pruned_loss=0.1466, over 940958.65 frames. ], batch size: 11, lr: 3.19e-02, grad_scale: 64.0 +2024-07-27 12:38:05,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=16296.0, ans=0.08703999999999998 +2024-07-27 12:38:09,714 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:38:17,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=16322.666666666666, ans=0.9132266666666666 +2024-07-27 12:38:18,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=16322.666666666666, ans=0.125 +2024-07-27 12:38:21,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=16336.0, ans=0.0 +2024-07-27 12:38:27,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=16336.0, ans=0.007318260869565218 +2024-07-27 12:38:35,083 INFO [train.py:1114] (0/4) Epoch 2, batch 2050, loss[loss=0.2755, simple_loss=0.3404, pruned_loss=0.1054, over 4633.00 frames. ], tot_loss[loss=0.3403, simple_loss=0.3916, pruned_loss=0.1445, over 938691.53 frames. ], batch size: 11, lr: 3.19e-02, grad_scale: 64.0 +2024-07-27 12:38:35,493 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.06 vs. limit=19.772 +2024-07-27 12:38:38,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=16362.666666666666, ans=0.44543999999999995 +2024-07-27 12:38:49,409 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.29 vs. limit=10.555733333333333 +2024-07-27 12:38:58,885 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.186e+01 6.444e+01 7.138e+01 8.017e+01 1.723e+02, threshold=1.428e+02, percent-clipped=1.0 +2024-07-27 12:39:08,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=16402.666666666668, ans=0.125 +2024-07-27 12:39:09,884 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.38 vs. limit=13.201333333333334 +2024-07-27 12:39:19,481 INFO [train.py:1114] (0/4) Epoch 2, batch 2100, loss[loss=0.2679, simple_loss=0.344, pruned_loss=0.09591, over 4754.00 frames. ], tot_loss[loss=0.3387, simple_loss=0.3906, pruned_loss=0.1435, over 940622.79 frames. ], batch size: 13, lr: 3.18e-02, grad_scale: 64.0 +2024-07-27 12:39:34,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=16442.666666666668, ans=0.0 +2024-07-27 12:39:35,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=16442.666666666668, ans=0.125 +2024-07-27 12:39:51,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=16469.333333333332, ans=0.0 +2024-07-27 12:39:51,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=16469.333333333332, ans=0.0 +2024-07-27 12:40:06,949 INFO [train.py:1114] (0/4) Epoch 2, batch 2150, loss[loss=0.3094, simple_loss=0.3663, pruned_loss=0.1263, over 4891.00 frames. ], tot_loss[loss=0.3363, simple_loss=0.3886, pruned_loss=0.142, over 943766.75 frames. ], batch size: 13, lr: 3.18e-02, grad_scale: 64.0 +2024-07-27 12:40:19,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16509.333333333332, ans=0.13490666666666667 +2024-07-27 12:40:28,351 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.78 vs. limit=5.478400000000001 +2024-07-27 12:40:33,589 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.279e+01 6.440e+01 7.313e+01 8.077e+01 1.347e+02, threshold=1.463e+02, percent-clipped=0.0 +2024-07-27 12:40:35,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=16536.0, ans=0.125 +2024-07-27 12:40:36,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=16536.0, ans=0.125 +2024-07-27 12:40:40,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=16549.333333333332, ans=0.125 +2024-07-27 12:40:44,973 INFO [train.py:1114] (0/4) Epoch 2, batch 2200, loss[loss=0.3214, simple_loss=0.3888, pruned_loss=0.127, over 4812.00 frames. ], tot_loss[loss=0.3367, simple_loss=0.3889, pruned_loss=0.1422, over 943424.35 frames. ], batch size: 14, lr: 3.17e-02, grad_scale: 64.0 +2024-07-27 12:40:45,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=16562.666666666668, ans=0.025 +2024-07-27 12:40:48,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=16562.666666666668, ans=0.125 +2024-07-27 12:40:52,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=16576.0, ans=0.0 +2024-07-27 12:40:54,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=16576.0, ans=10.0 +2024-07-27 12:41:03,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=16576.0, ans=0.125 +2024-07-27 12:41:04,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=16576.0, ans=0.0 +2024-07-27 12:41:04,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=16576.0, ans=0.125 +2024-07-27 12:41:04,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=16589.333333333332, ans=0.125 +2024-07-27 12:41:10,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=16589.333333333332, ans=0.3193733333333335 +2024-07-27 12:41:11,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=16589.333333333332, ans=0.0 +2024-07-27 12:41:11,597 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.00 vs. limit=13.721 +2024-07-27 12:41:14,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=16602.666666666668, ans=0.125 +2024-07-27 12:41:18,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16602.666666666668, ans=0.13397333333333333 +2024-07-27 12:41:18,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=16602.666666666668, ans=0.007260289855072464 +2024-07-27 12:41:18,479 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.93 vs. limit=5.4904 +2024-07-27 12:41:20,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=16616.0, ans=0.125 +2024-07-27 12:41:21,894 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=17.15 vs. limit=13.308 +2024-07-27 12:41:26,219 INFO [train.py:1114] (0/4) Epoch 2, batch 2250, loss[loss=0.3284, simple_loss=0.3966, pruned_loss=0.1301, over 4696.00 frames. ], tot_loss[loss=0.337, simple_loss=0.3889, pruned_loss=0.1425, over 942202.86 frames. ], batch size: 13, lr: 3.17e-02, grad_scale: 64.0 +2024-07-27 12:41:31,338 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.48 vs. limit=5.494400000000001 +2024-07-27 12:41:41,412 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:41:48,636 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.022e+01 6.411e+01 7.130e+01 8.285e+01 1.332e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 12:41:50,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=16669.333333333332, ans=0.0 +2024-07-27 12:42:00,436 INFO [train.py:1114] (0/4) Epoch 2, batch 2300, loss[loss=0.2796, simple_loss=0.3254, pruned_loss=0.1169, over 4947.00 frames. ], tot_loss[loss=0.3344, simple_loss=0.3867, pruned_loss=0.141, over 939123.04 frames. ], batch size: 12, lr: 3.16e-02, grad_scale: 64.0 +2024-07-27 12:42:15,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.09 vs. limit=20.031999999999996 +2024-07-27 12:42:18,159 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:42:42,011 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:42:47,780 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.90 vs. limit=13.786000000000001 +2024-07-27 12:42:47,925 INFO [train.py:1114] (0/4) Epoch 2, batch 2350, loss[loss=0.3668, simple_loss=0.425, pruned_loss=0.1544, over 4641.00 frames. ], tot_loss[loss=0.3338, simple_loss=0.3866, pruned_loss=0.1405, over 941403.27 frames. ], batch size: 13, lr: 3.16e-02, grad_scale: 64.0 +2024-07-27 12:42:55,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=16776.0, ans=0.3128400000000001 +2024-07-27 12:42:56,641 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.59 vs. limit=20.082 +2024-07-27 12:43:09,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=16789.333333333332, ans=0.007219710144927536 +2024-07-27 12:43:13,075 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.842e+01 6.484e+01 7.035e+01 7.953e+01 1.463e+02, threshold=1.407e+02, percent-clipped=1.0 +2024-07-27 12:43:15,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=16802.666666666668, ans=0.125 +2024-07-27 12:43:18,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16816.0, ans=0.13183999999999998 +2024-07-27 12:43:20,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=16816.0, ans=0.125 +2024-07-27 12:43:24,691 INFO [train.py:1114] (0/4) Epoch 2, batch 2400, loss[loss=0.348, simple_loss=0.397, pruned_loss=0.1495, over 4634.00 frames. ], tot_loss[loss=0.3332, simple_loss=0.3861, pruned_loss=0.1402, over 941205.38 frames. ], batch size: 12, lr: 3.15e-02, grad_scale: 64.0 +2024-07-27 12:43:30,322 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:43:38,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=16842.666666666668, ans=0.125 +2024-07-27 12:43:40,920 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.81 vs. limit=20.132 +2024-07-27 12:43:58,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=16856.0, ans=0.125 +2024-07-27 12:44:14,595 INFO [train.py:1114] (0/4) Epoch 2, batch 2450, loss[loss=0.3215, simple_loss=0.3815, pruned_loss=0.1308, over 4694.00 frames. ], tot_loss[loss=0.3358, simple_loss=0.3885, pruned_loss=0.1415, over 937069.53 frames. ], batch size: 13, lr: 3.15e-02, grad_scale: 64.0 +2024-07-27 12:44:21,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=16896.0, ans=0.125 +2024-07-27 12:44:24,416 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.38 vs. limit=13.836 +2024-07-27 12:44:43,830 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.068e+01 6.416e+01 7.061e+01 7.801e+01 1.253e+02, threshold=1.412e+02, percent-clipped=0.0 +2024-07-27 12:44:46,282 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.05 vs. limit=13.850999999999999 +2024-07-27 12:44:48,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=16949.333333333332, ans=0.007184927536231884 +2024-07-27 12:44:57,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=16949.333333333332, ans=0.05 +2024-07-27 12:44:57,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=16949.333333333332, ans=0.125 +2024-07-27 12:44:58,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=16949.333333333332, ans=0.30677333333333345 +2024-07-27 12:45:00,023 INFO [train.py:1114] (0/4) Epoch 2, batch 2500, loss[loss=0.368, simple_loss=0.4217, pruned_loss=0.1572, over 4815.00 frames. ], tot_loss[loss=0.3354, simple_loss=0.3882, pruned_loss=0.1413, over 939142.75 frames. ], batch size: 14, lr: 3.14e-02, grad_scale: 64.0 +2024-07-27 12:45:04,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=16962.666666666668, ans=0.0 +2024-07-27 12:45:05,779 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.98 vs. limit=13.481333333333334 +2024-07-27 12:45:13,124 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.71 vs. limit=13.870999999999999 +2024-07-27 12:45:16,465 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.83 vs. limit=13.870999999999999 +2024-07-27 12:45:26,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=17002.666666666668, ans=5.5504 +2024-07-27 12:45:30,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=17002.666666666668, ans=0.125 +2024-07-27 12:45:37,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=17016.0, ans=0.0 +2024-07-27 12:45:37,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=17016.0, ans=0.007170434782608696 +2024-07-27 12:45:38,909 INFO [train.py:1114] (0/4) Epoch 2, batch 2550, loss[loss=0.3054, simple_loss=0.3561, pruned_loss=0.1273, over 4806.00 frames. ], tot_loss[loss=0.3339, simple_loss=0.3869, pruned_loss=0.1404, over 938791.56 frames. ], batch size: 11, lr: 3.14e-02, grad_scale: 64.0 +2024-07-27 12:45:42,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=17029.333333333332, ans=0.125 +2024-07-27 12:45:44,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=17029.333333333332, ans=0.125 +2024-07-27 12:45:47,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=17042.666666666668, ans=0.0 +2024-07-27 12:45:58,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=17042.666666666668, ans=0.125 +2024-07-27 12:46:01,524 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.85 vs. limit=13.891000000000002 +2024-07-27 12:46:02,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=17042.666666666668, ans=0.0 +2024-07-27 12:46:02,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=17042.666666666668, ans=0.00716463768115942 +2024-07-27 12:46:13,109 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.177e+01 6.481e+01 6.949e+01 7.902e+01 1.029e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-27 12:46:15,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=17069.333333333332, ans=0.125 +2024-07-27 12:46:16,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17069.333333333332, ans=0.12930666666666668 +2024-07-27 12:46:18,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=17082.666666666668, ans=0.007155942028985507 +2024-07-27 12:46:18,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=17082.666666666668, ans=0.125 +2024-07-27 12:46:26,463 INFO [train.py:1114] (0/4) Epoch 2, batch 2600, loss[loss=0.3104, simple_loss=0.3608, pruned_loss=0.13, over 4894.00 frames. ], tot_loss[loss=0.3349, simple_loss=0.388, pruned_loss=0.1409, over 937943.18 frames. ], batch size: 13, lr: 3.14e-02, grad_scale: 32.0 +2024-07-27 12:46:28,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=17096.0, ans=0.0 +2024-07-27 12:46:39,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=17109.333333333332, ans=0.025 +2024-07-27 12:46:42,628 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.48 vs. limit=5.5684000000000005 +2024-07-27 12:46:46,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=17122.666666666668, ans=13.561333333333334 +2024-07-27 12:46:56,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=17136.0, ans=0.007144347826086956 +2024-07-27 12:46:59,184 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.19 vs. limit=20.352 +2024-07-27 12:47:07,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=17149.333333333332, ans=0.125 +2024-07-27 12:47:09,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=17149.333333333332, ans=0.0 +2024-07-27 12:47:21,229 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:47:23,230 INFO [train.py:1114] (0/4) Epoch 2, batch 2650, loss[loss=0.3034, simple_loss=0.3759, pruned_loss=0.1154, over 4648.00 frames. ], tot_loss[loss=0.334, simple_loss=0.3874, pruned_loss=0.1404, over 939979.73 frames. ], batch size: 16, lr: 3.13e-02, grad_scale: 32.0 +2024-07-27 12:47:37,053 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.02 vs. limit=13.940999999999999 +2024-07-27 12:47:51,453 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.937e+01 6.612e+01 7.199e+01 8.016e+01 1.169e+02, threshold=1.440e+02, percent-clipped=0.0 +2024-07-27 12:47:57,737 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.39 vs. limit=13.956 +2024-07-27 12:47:59,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=17216.0, ans=0.0 +2024-07-27 12:48:02,493 INFO [train.py:1114] (0/4) Epoch 2, batch 2700, loss[loss=0.3966, simple_loss=0.4346, pruned_loss=0.1793, over 4734.00 frames. ], tot_loss[loss=0.3339, simple_loss=0.387, pruned_loss=0.1404, over 939690.78 frames. ], batch size: 14, lr: 3.13e-02, grad_scale: 32.0 +2024-07-27 12:48:03,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=17229.333333333332, ans=0.0 +2024-07-27 12:48:27,215 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.94 vs. limit=13.634666666666666 +2024-07-27 12:48:34,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=17282.666666666668, ans=0.007112463768115942 +2024-07-27 12:48:36,980 INFO [train.py:1114] (0/4) Epoch 2, batch 2750, loss[loss=0.3408, simple_loss=0.3932, pruned_loss=0.1442, over 4704.00 frames. ], tot_loss[loss=0.3346, simple_loss=0.3873, pruned_loss=0.141, over 939581.76 frames. ], batch size: 12, lr: 3.12e-02, grad_scale: 32.0 +2024-07-27 12:48:44,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=17296.0, ans=0.125 +2024-07-27 12:48:52,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=17322.666666666668, ans=0.0 +2024-07-27 12:48:54,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=17322.666666666668, ans=0.07 +2024-07-27 12:48:56,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=17322.666666666668, ans=0.125 +2024-07-27 12:48:56,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=17322.666666666668, ans=0.29370666666666667 +2024-07-27 12:49:02,554 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.446e+01 6.464e+01 7.074e+01 8.489e+01 1.052e+02, threshold=1.415e+02, percent-clipped=0.0 +2024-07-27 12:49:13,976 INFO [train.py:1114] (0/4) Epoch 2, batch 2800, loss[loss=0.5118, simple_loss=0.4879, pruned_loss=0.2679, over 3607.00 frames. ], tot_loss[loss=0.3368, simple_loss=0.3888, pruned_loss=0.1424, over 938003.98 frames. ], batch size: 35, lr: 3.12e-02, grad_scale: 32.0 +2024-07-27 12:49:14,370 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.54 vs. limit=20.522000000000002 +2024-07-27 12:49:25,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=17376.0, ans=0.125 +2024-07-27 12:49:48,560 INFO [train.py:1114] (0/4) Epoch 2, batch 2850, loss[loss=0.2989, simple_loss=0.3718, pruned_loss=0.113, over 4961.00 frames. ], tot_loss[loss=0.3364, simple_loss=0.3887, pruned_loss=0.142, over 936103.14 frames. ], batch size: 13, lr: 3.11e-02, grad_scale: 32.0 +2024-07-27 12:49:50,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=17429.333333333332, ans=0.125 +2024-07-27 12:49:52,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=17429.333333333332, ans=0.0 +2024-07-27 12:50:11,762 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.157e+01 6.590e+01 7.080e+01 8.267e+01 4.948e+02, threshold=1.416e+02, percent-clipped=1.0 +2024-07-27 12:50:12,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=17469.333333333332, ans=0.28857333333333346 +2024-07-27 12:50:30,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=17482.666666666668, ans=0.125 +2024-07-27 12:50:32,061 INFO [train.py:1114] (0/4) Epoch 2, batch 2900, loss[loss=0.3336, simple_loss=0.4011, pruned_loss=0.133, over 4831.00 frames. ], tot_loss[loss=0.3356, simple_loss=0.3887, pruned_loss=0.1413, over 939858.95 frames. ], batch size: 13, lr: 3.11e-02, grad_scale: 32.0 +2024-07-27 12:50:32,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=17496.0, ans=0.125 +2024-07-27 12:50:50,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=17522.666666666668, ans=0.0 +2024-07-27 12:50:50,642 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.77 vs. limit=14.071 +2024-07-27 12:50:57,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=17536.0, ans=0.125 +2024-07-27 12:50:58,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=17536.0, ans=0.125 +2024-07-27 12:51:03,619 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.64 vs. limit=13.774666666666665 +2024-07-27 12:51:10,547 INFO [train.py:1114] (0/4) Epoch 2, batch 2950, loss[loss=0.3393, simple_loss=0.3936, pruned_loss=0.1425, over 4698.00 frames. ], tot_loss[loss=0.3349, simple_loss=0.3874, pruned_loss=0.1412, over 938768.62 frames. ], batch size: 12, lr: 3.10e-02, grad_scale: 32.0 +2024-07-27 12:51:29,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=17589.333333333332, ans=0.125 +2024-07-27 12:51:38,404 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.210e+01 6.523e+01 7.161e+01 8.021e+01 1.155e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 12:51:40,197 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.41 vs. limit=14.100999999999999 +2024-07-27 12:51:40,888 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.39 vs. limit=14.100999999999999 +2024-07-27 12:51:49,462 INFO [train.py:1114] (0/4) Epoch 2, batch 3000, loss[loss=0.3228, simple_loss=0.3714, pruned_loss=0.1371, over 4764.00 frames. ], tot_loss[loss=0.3342, simple_loss=0.3871, pruned_loss=0.1407, over 938501.60 frames. ], batch size: 13, lr: 3.10e-02, grad_scale: 32.0 +2024-07-27 12:51:49,463 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 12:52:02,763 INFO [train.py:1146] (0/4) Epoch 2, validation: loss=0.2667, simple_loss=0.3583, pruned_loss=0.0876, over 944034.00 frames. +2024-07-27 12:52:02,765 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 12:52:20,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=17642.666666666668, ans=0.0 +2024-07-27 12:52:28,623 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.07 vs. limit=14.116 +2024-07-27 12:52:52,635 INFO [train.py:1114] (0/4) Epoch 2, batch 3050, loss[loss=0.2758, simple_loss=0.3381, pruned_loss=0.1068, over 4640.00 frames. ], tot_loss[loss=0.3347, simple_loss=0.3873, pruned_loss=0.1411, over 937322.19 frames. ], batch size: 12, lr: 3.09e-02, grad_scale: 32.0 +2024-07-27 12:52:54,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=17696.0, ans=0.07 +2024-07-27 12:52:57,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=17696.0, ans=0.125 +2024-07-27 12:53:05,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=17709.333333333332, ans=0.2801733333333335 +2024-07-27 12:53:06,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=17709.333333333332, ans=0.025 +2024-07-27 12:53:16,694 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.68 vs. limit=13.868 +2024-07-27 12:53:18,161 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.638e+01 6.442e+01 7.179e+01 7.661e+01 1.033e+02, threshold=1.436e+02, percent-clipped=0.0 +2024-07-27 12:53:20,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=17736.0, ans=0.46604 +2024-07-27 12:53:28,098 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.90 vs. limit=9.437333333333333 +2024-07-27 12:53:29,166 INFO [train.py:1114] (0/4) Epoch 2, batch 3100, loss[loss=0.3347, simple_loss=0.3847, pruned_loss=0.1424, over 4628.00 frames. ], tot_loss[loss=0.3339, simple_loss=0.3863, pruned_loss=0.1407, over 937921.10 frames. ], batch size: 16, lr: 3.09e-02, grad_scale: 32.0 +2024-07-27 12:53:35,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=17776.0, ans=0.125 +2024-07-27 12:53:36,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=17776.0, ans=0.46664 +2024-07-27 12:53:36,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=17776.0, ans=0.125 +2024-07-27 12:53:44,919 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.95 vs. limit=5.6684 +2024-07-27 12:53:53,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=17802.666666666668, ans=0.125 +2024-07-27 12:54:03,850 INFO [train.py:1114] (0/4) Epoch 2, batch 3150, loss[loss=0.3561, simple_loss=0.4066, pruned_loss=0.1527, over 4621.00 frames. ], tot_loss[loss=0.332, simple_loss=0.3851, pruned_loss=0.1395, over 937921.54 frames. ], batch size: 17, lr: 3.09e-02, grad_scale: 32.0 +2024-07-27 12:54:06,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=17829.333333333332, ans=0.125 +2024-07-27 12:54:06,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=17829.333333333332, ans=0.07 +2024-07-27 12:54:13,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=17842.666666666668, ans=0.125 +2024-07-27 12:54:22,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=17856.0, ans=0.125 +2024-07-27 12:54:27,255 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.400e+01 6.385e+01 6.845e+01 7.954e+01 1.765e+02, threshold=1.369e+02, percent-clipped=1.0 +2024-07-27 12:54:27,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=17869.333333333332, ans=0.025 +2024-07-27 12:54:28,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=17869.333333333332, ans=0.125 +2024-07-27 12:54:38,115 INFO [train.py:1114] (0/4) Epoch 2, batch 3200, loss[loss=0.2842, simple_loss=0.3528, pruned_loss=0.1078, over 4832.00 frames. ], tot_loss[loss=0.3314, simple_loss=0.3848, pruned_loss=0.139, over 939397.32 frames. ], batch size: 13, lr: 3.08e-02, grad_scale: 32.0 +2024-07-27 12:54:48,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=17896.0, ans=0.125 +2024-07-27 12:54:59,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=17922.666666666668, ans=0.125 +2024-07-27 12:55:08,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=17936.0, ans=0.006970434782608696 +2024-07-27 12:55:12,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=17949.333333333332, ans=0.9294933333333333 +2024-07-27 12:55:16,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17962.666666666668, ans=0.12037333333333333 +2024-07-27 12:55:16,929 INFO [train.py:1114] (0/4) Epoch 2, batch 3250, loss[loss=0.35, simple_loss=0.401, pruned_loss=0.1495, over 4929.00 frames. ], tot_loss[loss=0.3307, simple_loss=0.3843, pruned_loss=0.1386, over 940348.13 frames. ], batch size: 14, lr: 3.08e-02, grad_scale: 32.0 +2024-07-27 12:55:37,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=17962.666666666668, ans=0.125 +2024-07-27 12:55:43,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=17976.0, ans=0.125 +2024-07-27 12:55:49,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=17976.0, ans=0.125 +2024-07-27 12:55:51,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=17989.333333333332, ans=0.006958840579710145 +2024-07-27 12:55:58,556 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.20 vs. limit=21.002000000000002 +2024-07-27 12:56:00,162 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.105e+01 6.706e+01 7.327e+01 8.227e+01 1.129e+02, threshold=1.465e+02, percent-clipped=0.0 +2024-07-27 12:56:11,172 INFO [train.py:1114] (0/4) Epoch 2, batch 3300, loss[loss=0.3728, simple_loss=0.4248, pruned_loss=0.1604, over 4718.00 frames. ], tot_loss[loss=0.3303, simple_loss=0.3841, pruned_loss=0.1383, over 940665.53 frames. ], batch size: 19, lr: 3.07e-02, grad_scale: 32.0 +2024-07-27 12:56:14,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=18029.333333333332, ans=0.125 +2024-07-27 12:56:16,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=18029.333333333332, ans=0.0 +2024-07-27 12:56:19,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=18042.666666666668, ans=0.125 +2024-07-27 12:56:28,756 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.57 vs. limit=14.028 +2024-07-27 12:56:36,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=18069.333333333332, ans=0.0 +2024-07-27 12:56:39,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=18082.666666666668, ans=0.2671066666666667 +2024-07-27 12:56:47,388 INFO [train.py:1114] (0/4) Epoch 2, batch 3350, loss[loss=0.4016, simple_loss=0.4477, pruned_loss=0.1778, over 4637.00 frames. ], tot_loss[loss=0.3324, simple_loss=0.3858, pruned_loss=0.1395, over 938716.03 frames. ], batch size: 17, lr: 3.07e-02, grad_scale: 32.0 +2024-07-27 12:57:01,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18122.666666666668, ans=0.11877333333333331 +2024-07-27 12:57:10,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=18136.0, ans=0.0 +2024-07-27 12:57:10,889 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.417e+01 6.714e+01 7.318e+01 8.136e+01 2.148e+02, threshold=1.464e+02, percent-clipped=2.0 +2024-07-27 12:57:22,069 INFO [train.py:1114] (0/4) Epoch 2, batch 3400, loss[loss=0.2952, simple_loss=0.354, pruned_loss=0.1182, over 4809.00 frames. ], tot_loss[loss=0.3304, simple_loss=0.3838, pruned_loss=0.1385, over 937164.28 frames. ], batch size: 11, lr: 3.06e-02, grad_scale: 32.0 +2024-07-27 12:57:31,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=18176.0, ans=0.125 +2024-07-27 12:58:03,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=18216.0, ans=0.0 +2024-07-27 12:58:07,981 INFO [train.py:1114] (0/4) Epoch 2, batch 3450, loss[loss=0.3527, simple_loss=0.41, pruned_loss=0.1477, over 4707.00 frames. ], tot_loss[loss=0.3307, simple_loss=0.3845, pruned_loss=0.1385, over 937677.63 frames. ], batch size: 19, lr: 3.06e-02, grad_scale: 32.0 +2024-07-27 12:58:23,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=18256.0, ans=21.192 +2024-07-27 12:58:31,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=18269.333333333332, ans=0.26057333333333343 +2024-07-27 12:58:35,300 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.179e+01 6.586e+01 6.989e+01 7.796e+01 1.302e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 12:58:36,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=18269.333333333332, ans=0.125 +2024-07-27 12:58:50,976 INFO [train.py:1114] (0/4) Epoch 2, batch 3500, loss[loss=0.3068, simple_loss=0.3612, pruned_loss=0.1262, over 4932.00 frames. ], tot_loss[loss=0.3295, simple_loss=0.3836, pruned_loss=0.1377, over 938222.40 frames. ], batch size: 12, lr: 3.06e-02, grad_scale: 32.0 +2024-07-27 12:59:14,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.63 vs. limit=21.242 +2024-07-27 12:59:21,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=18336.0, ans=0.125 +2024-07-27 12:59:25,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=18349.333333333332, ans=0.11650666666666668 +2024-07-27 12:59:29,547 INFO [train.py:1114] (0/4) Epoch 2, batch 3550, loss[loss=0.3776, simple_loss=0.4231, pruned_loss=0.166, over 4665.00 frames. ], tot_loss[loss=0.3285, simple_loss=0.383, pruned_loss=0.137, over 938430.00 frames. ], batch size: 14, lr: 3.05e-02, grad_scale: 32.0 +2024-07-27 12:59:38,014 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.51 vs. limit=9.594000000000001 +2024-07-27 12:59:53,862 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.354e+01 6.416e+01 6.884e+01 7.445e+01 1.050e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-27 12:59:56,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=18402.666666666668, ans=0.2559066666666667 +2024-07-27 12:59:56,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=18402.666666666668, ans=0.2559066666666667 +2024-07-27 12:59:58,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18416.0, ans=0.11584 +2024-07-27 12:59:58,504 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.62 vs. limit=5.7623999999999995 +2024-07-27 13:00:03,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=18416.0, ans=0.2554400000000001 +2024-07-27 13:00:04,806 INFO [train.py:1114] (0/4) Epoch 2, batch 3600, loss[loss=0.3099, simple_loss=0.3695, pruned_loss=0.1252, over 4964.00 frames. ], tot_loss[loss=0.3269, simple_loss=0.382, pruned_loss=0.1359, over 940166.42 frames. ], batch size: 13, lr: 3.05e-02, grad_scale: 32.0 +2024-07-27 13:00:18,753 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.78 vs. limit=11.3824 +2024-07-27 13:00:25,770 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.28 vs. limit=14.426 +2024-07-27 13:00:30,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=18469.333333333332, ans=0.125 +2024-07-27 13:00:35,495 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:00:46,939 INFO [train.py:1114] (0/4) Epoch 2, batch 3650, loss[loss=0.316, simple_loss=0.3796, pruned_loss=0.1262, over 4910.00 frames. ], tot_loss[loss=0.3266, simple_loss=0.3816, pruned_loss=0.1358, over 940869.93 frames. ], batch size: 15, lr: 3.04e-02, grad_scale: 32.0 +2024-07-27 13:01:18,843 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.55 vs. limit=9.634 +2024-07-27 13:01:20,524 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.247e+01 6.612e+01 7.129e+01 7.786e+01 1.024e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 13:01:23,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=18536.0, ans=0.125 +2024-07-27 13:01:29,140 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:01:32,268 INFO [train.py:1114] (0/4) Epoch 2, batch 3700, loss[loss=0.3023, simple_loss=0.3555, pruned_loss=0.1245, over 4930.00 frames. ], tot_loss[loss=0.3263, simple_loss=0.3815, pruned_loss=0.1355, over 941940.44 frames. ], batch size: 14, lr: 3.04e-02, grad_scale: 32.0 +2024-07-27 13:01:33,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=18562.666666666668, ans=0.0 +2024-07-27 13:01:33,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=18562.666666666668, ans=0.125 +2024-07-27 13:01:42,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_na.min_abs, batch_count=18576.0, ans=0.02 +2024-07-27 13:01:44,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=18576.0, ans=0.125 +2024-07-27 13:01:46,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=18589.333333333332, ans=0.125 +2024-07-27 13:01:46,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=18589.333333333332, ans=0.125 +2024-07-27 13:01:55,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=18602.666666666668, ans=0.0 +2024-07-27 13:02:11,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=18602.666666666668, ans=0.125 +2024-07-27 13:02:25,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=18629.333333333332, ans=0.11370666666666668 +2024-07-27 13:02:25,808 INFO [train.py:1114] (0/4) Epoch 2, batch 3750, loss[loss=0.2911, simple_loss=0.3383, pruned_loss=0.122, over 4798.00 frames. ], tot_loss[loss=0.3272, simple_loss=0.382, pruned_loss=0.1362, over 943415.14 frames. ], batch size: 11, lr: 3.03e-02, grad_scale: 32.0 +2024-07-27 13:03:00,028 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.29 vs. limit=5.7984 +2024-07-27 13:03:38,409 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.449e+01 6.486e+01 7.051e+01 7.963e+01 1.237e+02, threshold=1.410e+02, percent-clipped=0.0 +2024-07-27 13:03:40,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=18669.333333333332, ans=0.125 +2024-07-27 13:03:42,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18682.666666666668, ans=0.11317333333333332 +2024-07-27 13:03:44,557 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.02 vs. limit=5.8024000000000004 +2024-07-27 13:04:17,945 INFO [train.py:1114] (0/4) Epoch 2, batch 3800, loss[loss=0.2992, simple_loss=0.3661, pruned_loss=0.1162, over 4807.00 frames. ], tot_loss[loss=0.327, simple_loss=0.3814, pruned_loss=0.1364, over 941694.64 frames. ], batch size: 14, lr: 3.03e-02, grad_scale: 32.0 +2024-07-27 13:04:23,211 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.31 vs. limit=21.522 +2024-07-27 13:04:26,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=18709.333333333332, ans=0.125 +2024-07-27 13:05:11,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=18722.666666666668, ans=0.24470666666666663 +2024-07-27 13:05:35,992 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=27.88 vs. limit=14.526 +2024-07-27 13:06:49,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=18736.0, ans=0.24424000000000012 +2024-07-27 13:07:02,677 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.51 vs. limit=9.687333333333333 +2024-07-27 13:07:03,378 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.58 vs. limit=9.687333333333333 +2024-07-27 13:07:10,270 INFO [train.py:1114] (0/4) Epoch 2, batch 3850, loss[loss=0.3387, simple_loss=0.403, pruned_loss=0.1372, over 4860.00 frames. ], tot_loss[loss=0.3273, simple_loss=0.3821, pruned_loss=0.1362, over 942471.99 frames. ], batch size: 17, lr: 3.03e-02, grad_scale: 32.0 +2024-07-27 13:07:47,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=18776.0, ans=0.125 +2024-07-27 13:08:14,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=18789.333333333332, ans=0.0 +2024-07-27 13:08:17,425 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.36 vs. limit=14.546 +2024-07-27 13:08:23,998 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.367e+01 6.538e+01 7.102e+01 7.754e+01 1.153e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 13:08:30,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=18802.666666666668, ans=0.006782028985507246 +2024-07-27 13:08:36,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=18816.0, ans=0.125 +2024-07-27 13:08:38,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=18816.0, ans=0.0 +2024-07-27 13:08:41,622 INFO [train.py:1114] (0/4) Epoch 2, batch 3900, loss[loss=0.3451, simple_loss=0.4168, pruned_loss=0.1367, over 4806.00 frames. ], tot_loss[loss=0.3282, simple_loss=0.3831, pruned_loss=0.1366, over 942783.93 frames. ], batch size: 14, lr: 3.02e-02, grad_scale: 32.0 +2024-07-27 13:08:47,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=18829.333333333332, ans=0.0 +2024-07-27 13:08:59,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=18842.666666666668, ans=0.125 +2024-07-27 13:09:32,168 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=26.05 vs. limit=14.576 +2024-07-27 13:09:34,287 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.47 vs. limit=14.576 +2024-07-27 13:09:42,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=18869.333333333332, ans=0.11130666666666666 +2024-07-27 13:09:44,579 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.50 vs. limit=21.652 +2024-07-27 13:09:52,411 INFO [train.py:1114] (0/4) Epoch 2, batch 3950, loss[loss=0.363, simple_loss=0.4166, pruned_loss=0.1548, over 4825.00 frames. ], tot_loss[loss=0.3262, simple_loss=0.3814, pruned_loss=0.1354, over 944613.98 frames. ], batch size: 16, lr: 3.02e-02, grad_scale: 32.0 +2024-07-27 13:09:54,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=18896.0, ans=0.125 +2024-07-27 13:10:06,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=18909.333333333332, ans=0.006758840579710145 +2024-07-27 13:10:07,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=18909.333333333332, ans=0.125 +2024-07-27 13:10:22,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=18922.666666666668, ans=0.125 +2024-07-27 13:10:27,290 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.74 vs. limit=14.600999999999999 +2024-07-27 13:10:28,274 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.314e+01 6.596e+01 7.241e+01 7.988e+01 1.615e+02, threshold=1.448e+02, percent-clipped=1.0 +2024-07-27 13:10:35,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=18949.333333333332, ans=0.025 +2024-07-27 13:11:04,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=18962.666666666668, ans=0.125 +2024-07-27 13:11:05,246 INFO [train.py:1114] (0/4) Epoch 2, batch 4000, loss[loss=0.2765, simple_loss=0.3323, pruned_loss=0.1103, over 4784.00 frames. ], tot_loss[loss=0.3268, simple_loss=0.3819, pruned_loss=0.1359, over 940717.22 frames. ], batch size: 12, lr: 3.01e-02, grad_scale: 32.0 +2024-07-27 13:11:17,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=18976.0, ans=0.006744347826086957 +2024-07-27 13:11:23,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=18989.333333333332, ans=0.09899494936611666 +2024-07-27 13:11:32,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=19002.666666666668, ans=0.025 +2024-07-27 13:11:33,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=19002.666666666668, ans=0.006738550724637681 +2024-07-27 13:11:35,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=19016.0, ans=0.125 +2024-07-27 13:12:02,949 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=14.631 +2024-07-27 13:12:03,971 INFO [train.py:1114] (0/4) Epoch 2, batch 4050, loss[loss=0.4149, simple_loss=0.4349, pruned_loss=0.1975, over 3437.00 frames. ], tot_loss[loss=0.3265, simple_loss=0.3814, pruned_loss=0.1358, over 939355.49 frames. ], batch size: 35, lr: 3.01e-02, grad_scale: 32.0 +2024-07-27 13:12:11,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=19042.666666666668, ans=0.125 +2024-07-27 13:12:16,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=19042.666666666668, ans=0.125 +2024-07-27 13:12:16,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=19056.0, ans=0.0 +2024-07-27 13:12:17,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=19056.0, ans=0.025 +2024-07-27 13:12:19,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=19056.0, ans=0.94056 +2024-07-27 13:12:21,181 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.63 vs. limit=14.528 +2024-07-27 13:12:21,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=19056.0, ans=14.646 +2024-07-27 13:12:26,248 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.94 vs. limit=14.651 +2024-07-27 13:12:27,063 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.071e+01 6.599e+01 7.309e+01 8.116e+01 1.221e+02, threshold=1.462e+02, percent-clipped=0.0 +2024-07-27 13:12:33,933 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=14.655999999999999 +2024-07-27 13:12:39,185 INFO [train.py:1114] (0/4) Epoch 2, batch 4100, loss[loss=0.3293, simple_loss=0.384, pruned_loss=0.1373, over 4903.00 frames. ], tot_loss[loss=0.3277, simple_loss=0.382, pruned_loss=0.1367, over 938346.22 frames. ], batch size: 15, lr: 3.01e-02, grad_scale: 32.0 +2024-07-27 13:12:49,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=19109.333333333332, ans=0.48663999999999996 +2024-07-27 13:13:01,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=19136.0, ans=0.125 +2024-07-27 13:13:12,055 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.01 vs. limit=9.787333333333333 +2024-07-27 13:13:14,989 INFO [train.py:1114] (0/4) Epoch 2, batch 4150, loss[loss=0.2988, simple_loss=0.3641, pruned_loss=0.1168, over 4831.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3808, pruned_loss=0.1351, over 937831.56 frames. ], batch size: 13, lr: 3.00e-02, grad_scale: 32.0 +2024-07-27 13:13:19,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=19162.666666666668, ans=0.0 +2024-07-27 13:13:30,553 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.01 vs. limit=21.892 +2024-07-27 13:13:32,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=19189.333333333332, ans=0.05 +2024-07-27 13:13:44,561 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.086e+01 6.337e+01 6.945e+01 7.844e+01 2.237e+02, threshold=1.389e+02, percent-clipped=1.0 +2024-07-27 13:13:53,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=19216.0, ans=0.04949747468305833 +2024-07-27 13:13:57,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=19216.0, ans=0.2 +2024-07-27 13:13:58,216 INFO [train.py:1114] (0/4) Epoch 2, batch 4200, loss[loss=0.3007, simple_loss=0.3714, pruned_loss=0.115, over 4911.00 frames. ], tot_loss[loss=0.3264, simple_loss=0.3819, pruned_loss=0.1354, over 939320.05 frames. ], batch size: 15, lr: 3.00e-02, grad_scale: 32.0 +2024-07-27 13:14:01,237 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:14:02,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=19229.333333333332, ans=0.125 +2024-07-27 13:14:06,654 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.93 vs. limit=14.716000000000001 +2024-07-27 13:14:06,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=19242.666666666668, ans=0.0 +2024-07-27 13:14:32,757 INFO [train.py:1114] (0/4) Epoch 2, batch 4250, loss[loss=0.2959, simple_loss=0.3467, pruned_loss=0.1226, over 4633.00 frames. ], tot_loss[loss=0.3261, simple_loss=0.3815, pruned_loss=0.1354, over 940734.37 frames. ], batch size: 12, lr: 2.99e-02, grad_scale: 32.0 +2024-07-27 13:14:32,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=19296.0, ans=0.006674782608695652 +2024-07-27 13:14:49,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=19322.666666666668, ans=0.125 +2024-07-27 13:14:55,987 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.180e+01 6.301e+01 6.853e+01 7.797e+01 1.151e+02, threshold=1.371e+02, percent-clipped=0.0 +2024-07-27 13:15:06,781 INFO [train.py:1114] (0/4) Epoch 2, batch 4300, loss[loss=0.2669, simple_loss=0.3419, pruned_loss=0.09597, over 4767.00 frames. ], tot_loss[loss=0.3261, simple_loss=0.3813, pruned_loss=0.1355, over 940435.31 frames. ], batch size: 13, lr: 2.99e-02, grad_scale: 32.0 +2024-07-27 13:15:10,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=19362.666666666668, ans=0.125 +2024-07-27 13:15:22,653 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.64 vs. limit=14.766 +2024-07-27 13:15:31,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=19402.666666666668, ans=0.125 +2024-07-27 13:15:33,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=19402.666666666668, ans=0.125 +2024-07-27 13:15:36,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=19402.666666666668, ans=0.125 +2024-07-27 13:15:42,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=19416.0, ans=0.035 +2024-07-27 13:15:45,767 INFO [train.py:1114] (0/4) Epoch 2, batch 4350, loss[loss=0.2985, simple_loss=0.349, pruned_loss=0.124, over 4765.00 frames. ], tot_loss[loss=0.3276, simple_loss=0.3825, pruned_loss=0.1364, over 941472.26 frames. ], batch size: 13, lr: 2.98e-02, grad_scale: 32.0 +2024-07-27 13:15:51,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=19429.333333333332, ans=0.006645797101449276 +2024-07-27 13:15:53,301 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.31 vs. limit=7.888533333333333 +2024-07-27 13:15:54,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=19442.666666666668, ans=0.0 +2024-07-27 13:15:54,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=19442.666666666668, ans=0.0 +2024-07-27 13:15:54,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=19442.666666666668, ans=0.125 +2024-07-27 13:16:03,305 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.22 vs. limit=14.796 +2024-07-27 13:16:07,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=19469.333333333332, ans=0.125 +2024-07-27 13:16:08,316 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.72 vs. limit=11.787733333333332 +2024-07-27 13:16:09,155 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.359e+01 6.368e+01 6.866e+01 7.654e+01 1.225e+02, threshold=1.373e+02, percent-clipped=0.0 +2024-07-27 13:16:13,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=19482.666666666668, ans=0.125 +2024-07-27 13:16:21,979 INFO [train.py:1114] (0/4) Epoch 2, batch 4400, loss[loss=0.2947, simple_loss=0.3677, pruned_loss=0.1109, over 4810.00 frames. ], tot_loss[loss=0.3271, simple_loss=0.3822, pruned_loss=0.136, over 940988.66 frames. ], batch size: 14, lr: 2.98e-02, grad_scale: 32.0 +2024-07-27 13:16:23,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=19496.0, ans=0.125 +2024-07-27 13:16:25,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=19496.0, ans=0.125 +2024-07-27 13:16:28,510 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=14.815999999999999 +2024-07-27 13:16:30,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=19509.333333333332, ans=0.125 +2024-07-27 13:16:31,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.07 vs. limit=5.9264 +2024-07-27 13:16:34,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=19509.333333333332, ans=0.9450933333333332 +2024-07-27 13:16:35,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.15 vs. limit=14.821 +2024-07-27 13:16:38,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=19522.666666666668, ans=0.07 +2024-07-27 13:16:51,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=19549.333333333332, ans=0.0 +2024-07-27 13:16:55,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=19549.333333333332, ans=0.125 +2024-07-27 13:16:58,311 INFO [train.py:1114] (0/4) Epoch 2, batch 4450, loss[loss=0.2774, simple_loss=0.334, pruned_loss=0.1105, over 4941.00 frames. ], tot_loss[loss=0.327, simple_loss=0.3815, pruned_loss=0.1362, over 939054.46 frames. ], batch size: 12, lr: 2.98e-02, grad_scale: 32.0 +2024-07-27 13:17:07,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=19576.0, ans=0.21484000000000003 +2024-07-27 13:17:10,188 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.32 vs. limit=7.9152000000000005 +2024-07-27 13:17:14,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=19589.333333333332, ans=0.125 +2024-07-27 13:17:15,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=19589.333333333332, ans=0.21437333333333342 +2024-07-27 13:17:20,237 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.64 vs. limit=14.846 +2024-07-27 13:17:20,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=19589.333333333332, ans=0.125 +2024-07-27 13:17:31,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=19602.666666666668, ans=0.2139066666666667 +2024-07-27 13:17:31,925 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.227e+01 6.384e+01 6.851e+01 7.779e+01 1.148e+02, threshold=1.370e+02, percent-clipped=0.0 +2024-07-27 13:17:34,207 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:17:37,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=19616.0, ans=0.125 +2024-07-27 13:17:38,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=19616.0, ans=0.006605217391304348 +2024-07-27 13:17:39,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=19616.0, ans=0.0 +2024-07-27 13:17:42,968 INFO [train.py:1114] (0/4) Epoch 2, batch 4500, loss[loss=0.3322, simple_loss=0.4041, pruned_loss=0.1301, over 4748.00 frames. ], tot_loss[loss=0.328, simple_loss=0.3829, pruned_loss=0.1365, over 938460.43 frames. ], batch size: 14, lr: 2.97e-02, grad_scale: 32.0 +2024-07-27 13:17:51,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=19642.666666666668, ans=0.10357333333333332 +2024-07-27 13:18:00,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=19656.0, ans=0.125 +2024-07-27 13:18:09,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=19682.666666666668, ans=0.006590724637681159 +2024-07-27 13:18:17,182 INFO [train.py:1114] (0/4) Epoch 2, batch 4550, loss[loss=0.3194, simple_loss=0.3939, pruned_loss=0.1224, over 4890.00 frames. ], tot_loss[loss=0.3275, simple_loss=0.3823, pruned_loss=0.1363, over 940232.72 frames. ], batch size: 13, lr: 2.97e-02, grad_scale: 32.0 +2024-07-27 13:18:21,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19696.0, ans=0.10303999999999999 +2024-07-27 13:18:26,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=19709.333333333332, ans=0.21017333333333343 +2024-07-27 13:18:26,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=19709.333333333332, ans=0.21017333333333343 +2024-07-27 13:18:29,876 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:18:38,041 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:18:43,564 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.866e+01 6.563e+01 7.303e+01 8.334e+01 1.051e+02, threshold=1.461e+02, percent-clipped=0.0 +2024-07-27 13:18:50,774 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.90 vs. limit=14.905999999999999 +2024-07-27 13:18:57,898 INFO [train.py:1114] (0/4) Epoch 2, batch 4600, loss[loss=0.3349, simple_loss=0.3887, pruned_loss=0.1405, over 4585.00 frames. ], tot_loss[loss=0.3265, simple_loss=0.3817, pruned_loss=0.1356, over 938794.53 frames. ], batch size: 21, lr: 2.96e-02, grad_scale: 64.0 +2024-07-27 13:19:02,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=19762.666666666668, ans=0.10237333333333332 +2024-07-27 13:19:03,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=19762.666666666668, ans=0.0 +2024-07-27 13:19:08,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=19776.0, ans=0.125 +2024-07-27 13:19:09,921 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.28 vs. limit=22.332 +2024-07-27 13:19:15,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=19789.333333333332, ans=0.0 +2024-07-27 13:19:18,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=19802.666666666668, ans=0.125 +2024-07-27 13:19:23,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=19802.666666666668, ans=0.125 +2024-07-27 13:19:31,774 INFO [train.py:1114] (0/4) Epoch 2, batch 4650, loss[loss=0.3381, simple_loss=0.397, pruned_loss=0.1396, over 4841.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3815, pruned_loss=0.1348, over 940524.86 frames. ], batch size: 16, lr: 2.96e-02, grad_scale: 64.0 +2024-07-27 13:19:35,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=19829.333333333332, ans=0.125 +2024-07-27 13:19:39,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=19842.666666666668, ans=0.20550666666666673 +2024-07-27 13:19:40,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=19842.666666666668, ans=0.0 +2024-07-27 13:19:42,553 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.13 vs. limit=5.9764 +2024-07-27 13:19:42,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=19842.666666666668, ans=0.125 +2024-07-27 13:19:50,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=19856.0, ans=0.125 +2024-07-27 13:19:51,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=19856.0, ans=0.125 +2024-07-27 13:19:53,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=19869.333333333332, ans=0.0 +2024-07-27 13:19:54,897 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.553e+01 6.585e+01 7.200e+01 8.002e+01 1.335e+02, threshold=1.440e+02, percent-clipped=0.0 +2024-07-27 13:20:04,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=19882.666666666668, ans=0.006547246376811594 +2024-07-27 13:20:06,198 INFO [train.py:1114] (0/4) Epoch 2, batch 4700, loss[loss=0.2898, simple_loss=0.3422, pruned_loss=0.1187, over 4699.00 frames. ], tot_loss[loss=0.3243, simple_loss=0.381, pruned_loss=0.1338, over 937540.29 frames. ], batch size: 11, lr: 2.96e-02, grad_scale: 64.0 +2024-07-27 13:20:09,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=19896.0, ans=0.0065443478260869565 +2024-07-27 13:20:10,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=19896.0, ans=0.125 +2024-07-27 13:20:45,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=19936.0, ans=0.0 +2024-07-27 13:20:46,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=19936.0, ans=0.07 +2024-07-27 13:20:47,058 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.67 vs. limit=11.9744 +2024-07-27 13:20:55,561 INFO [train.py:1114] (0/4) Epoch 2, batch 4750, loss[loss=0.403, simple_loss=0.4382, pruned_loss=0.1839, over 4516.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3817, pruned_loss=0.1346, over 935523.68 frames. ], batch size: 21, lr: 2.95e-02, grad_scale: 64.0 +2024-07-27 13:20:58,230 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.69 vs. limit=14.986 +2024-07-27 13:21:10,873 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.94 vs. limit=11.995733333333334 +2024-07-27 13:21:18,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=20002.666666666668, ans=22.5 +2024-07-27 13:21:19,841 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.240e+01 6.354e+01 6.910e+01 7.839e+01 1.849e+02, threshold=1.382e+02, percent-clipped=1.0 +2024-07-27 13:21:20,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=20002.666666666668, ans=0.025 +2024-07-27 13:21:29,483 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.21 vs. limit=12.0 +2024-07-27 13:21:31,074 INFO [train.py:1114] (0/4) Epoch 2, batch 4800, loss[loss=0.3203, simple_loss=0.3957, pruned_loss=0.1224, over 4695.00 frames. ], tot_loss[loss=0.3263, simple_loss=0.3814, pruned_loss=0.1356, over 932597.98 frames. ], batch size: 13, lr: 2.95e-02, grad_scale: 64.0 +2024-07-27 13:21:40,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=20042.666666666668, ans=0.006512463768115942 +2024-07-27 13:21:53,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=20069.333333333332, ans=0.0065066666666666675 +2024-07-27 13:21:56,821 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.50 vs. limit=15.0 +2024-07-27 13:21:59,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=20082.666666666668, ans=0.006503768115942029 +2024-07-27 13:22:05,293 INFO [train.py:1114] (0/4) Epoch 2, batch 4850, loss[loss=0.3472, simple_loss=0.4022, pruned_loss=0.1461, over 4745.00 frames. ], tot_loss[loss=0.3265, simple_loss=0.3814, pruned_loss=0.1358, over 932250.70 frames. ], batch size: 14, lr: 2.95e-02, grad_scale: 64.0 +2024-07-27 13:22:06,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20096.0, ans=0.1 +2024-07-27 13:22:09,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=20096.0, ans=0.125 +2024-07-27 13:22:17,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=20109.333333333332, ans=0.125 +2024-07-27 13:22:33,553 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.340e+01 6.424e+01 6.890e+01 7.552e+01 1.246e+02, threshold=1.378e+02, percent-clipped=0.0 +2024-07-27 13:22:42,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=20149.333333333332, ans=0.125 +2024-07-27 13:22:45,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=20149.333333333332, ans=0.125 +2024-07-27 13:22:46,562 INFO [train.py:1114] (0/4) Epoch 2, batch 4900, loss[loss=0.3431, simple_loss=0.3834, pruned_loss=0.1514, over 4758.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.381, pruned_loss=0.135, over 934047.99 frames. ], batch size: 13, lr: 2.94e-02, grad_scale: 64.0 +2024-07-27 13:22:51,352 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=11.60 vs. limit=10.0 +2024-07-27 13:22:55,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20176.0, ans=0.1 +2024-07-27 13:23:12,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=20202.666666666668, ans=0.125 +2024-07-27 13:23:15,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=20202.666666666668, ans=0.125 +2024-07-27 13:23:27,089 INFO [train.py:1114] (0/4) Epoch 2, batch 4950, loss[loss=0.3546, simple_loss=0.3902, pruned_loss=0.1595, over 3479.00 frames. ], tot_loss[loss=0.3268, simple_loss=0.3816, pruned_loss=0.136, over 931386.18 frames. ], batch size: 36, lr: 2.94e-02, grad_scale: 64.0 +2024-07-27 13:23:27,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=20229.333333333332, ans=0.0 +2024-07-27 13:23:31,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=20229.333333333332, ans=0.125 +2024-07-27 13:23:40,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=20256.0, ans=0.025 +2024-07-27 13:23:50,215 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.55 vs. limit=22.5 +2024-07-27 13:23:51,058 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.442e+01 6.541e+01 7.146e+01 7.949e+01 1.013e+02, threshold=1.429e+02, percent-clipped=0.0 +2024-07-27 13:23:56,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=20282.666666666668, ans=0.125 +2024-07-27 13:23:56,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=20282.666666666668, ans=0.125 +2024-07-27 13:24:02,002 INFO [train.py:1114] (0/4) Epoch 2, batch 5000, loss[loss=0.3351, simple_loss=0.3993, pruned_loss=0.1354, over 4665.00 frames. ], tot_loss[loss=0.3257, simple_loss=0.3814, pruned_loss=0.135, over 935179.57 frames. ], batch size: 14, lr: 2.93e-02, grad_scale: 64.0 +2024-07-27 13:24:08,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=20309.333333333332, ans=0.95 +2024-07-27 13:24:13,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=20309.333333333332, ans=0.125 +2024-07-27 13:24:15,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=20322.666666666668, ans=0.125 +2024-07-27 13:24:25,203 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.90 vs. limit=15.0 +2024-07-27 13:24:26,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=20336.0, ans=0.125 +2024-07-27 13:24:36,220 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.53 vs. limit=12.0 +2024-07-27 13:24:36,497 INFO [train.py:1114] (0/4) Epoch 2, batch 5050, loss[loss=0.2783, simple_loss=0.343, pruned_loss=0.1068, over 4860.00 frames. ], tot_loss[loss=0.323, simple_loss=0.3789, pruned_loss=0.1336, over 937937.39 frames. ], batch size: 12, lr: 2.93e-02, grad_scale: 64.0 +2024-07-27 13:24:48,785 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.15 vs. limit=22.5 +2024-07-27 13:24:53,309 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.55 vs. limit=12.0 +2024-07-27 13:25:00,638 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.122e+01 6.416e+01 7.117e+01 7.818e+01 1.344e+02, threshold=1.423e+02, percent-clipped=0.0 +2024-07-27 13:25:00,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=20402.666666666668, ans=0.0 +2024-07-27 13:25:00,948 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.34 vs. limit=22.5 +2024-07-27 13:25:03,077 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.17 vs. limit=15.0 +2024-07-27 13:25:06,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=20416.0, ans=0.125 +2024-07-27 13:25:11,755 INFO [train.py:1114] (0/4) Epoch 2, batch 5100, loss[loss=0.3303, simple_loss=0.383, pruned_loss=0.1388, over 4776.00 frames. ], tot_loss[loss=0.3244, simple_loss=0.3798, pruned_loss=0.1345, over 935518.19 frames. ], batch size: 12, lr: 2.93e-02, grad_scale: 64.0 +2024-07-27 13:25:14,802 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:25:30,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=20456.0, ans=0.125 +2024-07-27 13:25:31,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=20456.0, ans=0.125 +2024-07-27 13:25:38,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=20469.333333333332, ans=0.2 +2024-07-27 13:25:39,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=20482.666666666668, ans=0.125 +2024-07-27 13:25:40,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20482.666666666668, ans=0.1 +2024-07-27 13:25:41,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=20482.666666666668, ans=0.1 +2024-07-27 13:25:42,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=20482.666666666668, ans=0.125 +2024-07-27 13:25:45,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=20482.666666666668, ans=0.0 +2024-07-27 13:25:46,245 INFO [train.py:1114] (0/4) Epoch 2, batch 5150, loss[loss=0.3335, simple_loss=0.3913, pruned_loss=0.1378, over 4864.00 frames. ], tot_loss[loss=0.3265, simple_loss=0.3814, pruned_loss=0.1358, over 936386.50 frames. ], batch size: 16, lr: 2.92e-02, grad_scale: 64.0 +2024-07-27 13:25:51,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=20496.0, ans=0.125 +2024-07-27 13:26:00,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=20522.666666666668, ans=0.125 +2024-07-27 13:26:09,649 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.469e+01 6.536e+01 7.424e+01 8.253e+01 1.032e+02, threshold=1.485e+02, percent-clipped=0.0 +2024-07-27 13:26:09,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=20536.0, ans=0.006405217391304348 +2024-07-27 13:26:16,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=20549.333333333332, ans=0.125 +2024-07-27 13:26:19,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=20549.333333333332, ans=0.006402318840579711 +2024-07-27 13:26:21,129 INFO [train.py:1114] (0/4) Epoch 2, batch 5200, loss[loss=0.3712, simple_loss=0.412, pruned_loss=0.1652, over 4670.00 frames. ], tot_loss[loss=0.3251, simple_loss=0.3803, pruned_loss=0.1349, over 936347.91 frames. ], batch size: 14, lr: 2.92e-02, grad_scale: 64.0 +2024-07-27 13:26:21,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=20562.666666666668, ans=0.006399420289855072 +2024-07-27 13:26:21,253 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:26:23,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=20562.666666666668, ans=0.006399420289855072 +2024-07-27 13:26:33,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=20576.0, ans=0.0 +2024-07-27 13:26:34,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=20589.333333333332, ans=0.006393623188405797 +2024-07-27 13:26:34,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=20589.333333333332, ans=0.125 +2024-07-27 13:26:40,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=20589.333333333332, ans=0.125 +2024-07-27 13:26:47,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=20602.666666666668, ans=0.125 +2024-07-27 13:26:48,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=20616.0, ans=0.125 +2024-07-27 13:27:00,349 INFO [train.py:1114] (0/4) Epoch 2, batch 5250, loss[loss=0.319, simple_loss=0.3968, pruned_loss=0.1206, over 4901.00 frames. ], tot_loss[loss=0.3215, simple_loss=0.3777, pruned_loss=0.1327, over 935699.54 frames. ], batch size: 13, lr: 2.91e-02, grad_scale: 64.0 +2024-07-27 13:27:06,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.01 vs. limit=22.5 +2024-07-27 13:27:09,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=20642.666666666668, ans=0.0 +2024-07-27 13:27:09,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=20642.666666666668, ans=0.025 +2024-07-27 13:27:17,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn1.whiten.whitening_limit, batch_count=20656.0, ans=22.5 +2024-07-27 13:27:23,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=20669.333333333332, ans=0.125 +2024-07-27 13:27:27,953 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.995e+01 6.506e+01 7.005e+01 7.765e+01 1.418e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-27 13:27:29,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=20669.333333333332, ans=0.025 +2024-07-27 13:27:39,816 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.53 vs. limit=15.0 +2024-07-27 13:27:40,044 INFO [train.py:1114] (0/4) Epoch 2, batch 5300, loss[loss=0.3486, simple_loss=0.4228, pruned_loss=0.1372, over 4645.00 frames. ], tot_loss[loss=0.3236, simple_loss=0.3795, pruned_loss=0.1338, over 934943.87 frames. ], batch size: 16, lr: 2.91e-02, grad_scale: 64.0 +2024-07-27 13:27:42,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=20696.0, ans=0.125 +2024-07-27 13:27:44,874 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:27:45,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=20696.0, ans=0.0 +2024-07-27 13:27:55,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=20722.666666666668, ans=0.0 +2024-07-27 13:28:15,730 INFO [train.py:1114] (0/4) Epoch 2, batch 5350, loss[loss=0.2902, simple_loss=0.3526, pruned_loss=0.1139, over 4566.00 frames. ], tot_loss[loss=0.3225, simple_loss=0.3792, pruned_loss=0.1329, over 936884.71 frames. ], batch size: 10, lr: 2.91e-02, grad_scale: 64.0 +2024-07-27 13:28:15,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=20762.666666666668, ans=0.2 +2024-07-27 13:28:20,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=20762.666666666668, ans=0.025 +2024-07-27 13:28:24,354 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.99 vs. limit=22.5 +2024-07-27 13:28:27,088 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.66 vs. limit=15.0 +2024-07-27 13:28:41,454 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.442e+01 6.375e+01 6.982e+01 7.841e+01 1.512e+02, threshold=1.396e+02, percent-clipped=1.0 +2024-07-27 13:28:51,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=20816.0, ans=0.0 +2024-07-27 13:28:54,564 INFO [train.py:1114] (0/4) Epoch 2, batch 5400, loss[loss=0.3173, simple_loss=0.3896, pruned_loss=0.1225, over 4247.00 frames. ], tot_loss[loss=0.3259, simple_loss=0.3813, pruned_loss=0.1352, over 931599.32 frames. ], batch size: 26, lr: 2.90e-02, grad_scale: 64.0 +2024-07-27 13:28:58,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=20829.333333333332, ans=0.0 +2024-07-27 13:28:58,733 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.68 vs. limit=5.0 +2024-07-27 13:29:22,801 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.73 vs. limit=15.0 +2024-07-27 13:29:25,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=20882.666666666668, ans=0.006329855072463768 +2024-07-27 13:29:26,110 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.82 vs. limit=6.0 +2024-07-27 13:29:27,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20882.666666666668, ans=0.1 +2024-07-27 13:29:30,141 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.18 vs. limit=15.0 +2024-07-27 13:29:31,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=20896.0, ans=0.125 +2024-07-27 13:29:31,780 INFO [train.py:1114] (0/4) Epoch 2, batch 5450, loss[loss=0.3266, simple_loss=0.3697, pruned_loss=0.1418, over 4705.00 frames. ], tot_loss[loss=0.3243, simple_loss=0.3799, pruned_loss=0.1344, over 934653.36 frames. ], batch size: 11, lr: 2.90e-02, grad_scale: 64.0 +2024-07-27 13:29:31,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=20896.0, ans=0.125 +2024-07-27 13:29:38,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=20909.333333333332, ans=0.125 +2024-07-27 13:29:49,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20922.666666666668, ans=0.1 +2024-07-27 13:29:50,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.51 vs. limit=15.0 +2024-07-27 13:29:55,871 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.402e+01 6.377e+01 6.925e+01 7.766e+01 1.521e+02, threshold=1.385e+02, percent-clipped=1.0 +2024-07-27 13:30:06,879 INFO [train.py:1114] (0/4) Epoch 2, batch 5500, loss[loss=0.423, simple_loss=0.4488, pruned_loss=0.1986, over 4215.00 frames. ], tot_loss[loss=0.3237, simple_loss=0.3791, pruned_loss=0.1342, over 932336.62 frames. ], batch size: 25, lr: 2.90e-02, grad_scale: 64.0 +2024-07-27 13:30:07,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=20962.666666666668, ans=0.125 +2024-07-27 13:30:08,842 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.13 vs. limit=15.0 +2024-07-27 13:30:09,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=20962.666666666668, ans=0.0 +2024-07-27 13:30:13,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=20976.0, ans=0.0 +2024-07-27 13:30:17,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=20976.0, ans=0.0 +2024-07-27 13:30:18,240 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:30:25,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=20989.333333333332, ans=0.125 +2024-07-27 13:30:30,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=21002.666666666668, ans=0.125 +2024-07-27 13:30:38,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=21016.0, ans=0.2 +2024-07-27 13:30:39,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=21016.0, ans=0.125 +2024-07-27 13:30:39,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=21016.0, ans=0.125 +2024-07-27 13:30:41,387 INFO [train.py:1114] (0/4) Epoch 2, batch 5550, loss[loss=0.265, simple_loss=0.3355, pruned_loss=0.09726, over 4701.00 frames. ], tot_loss[loss=0.3242, simple_loss=0.3794, pruned_loss=0.1345, over 934443.19 frames. ], batch size: 12, lr: 2.89e-02, grad_scale: 64.0 +2024-07-27 13:30:41,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=21029.333333333332, ans=0.125 +2024-07-27 13:30:46,685 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.90 vs. limit=15.0 +2024-07-27 13:30:48,874 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.38 vs. limit=6.0 +2024-07-27 13:30:50,943 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.32 vs. limit=5.0 +2024-07-27 13:31:00,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=21056.0, ans=0.125 +2024-07-27 13:31:04,814 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.723e+01 6.613e+01 7.499e+01 8.477e+01 2.130e+02, threshold=1.500e+02, percent-clipped=3.0 +2024-07-27 13:31:07,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21069.333333333332, ans=0.1 +2024-07-27 13:31:12,779 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=21.77 vs. limit=15.0 +2024-07-27 13:31:15,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=21082.666666666668, ans=15.0 +2024-07-27 13:31:15,950 INFO [train.py:1114] (0/4) Epoch 2, batch 5600, loss[loss=0.3824, simple_loss=0.4309, pruned_loss=0.1669, over 4747.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.3785, pruned_loss=0.134, over 935207.66 frames. ], batch size: 14, lr: 2.89e-02, grad_scale: 64.0 +2024-07-27 13:31:23,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=21109.333333333332, ans=0.006280579710144928 +2024-07-27 13:31:27,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=21109.333333333332, ans=0.0 +2024-07-27 13:31:33,787 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.606e+00 +2024-07-27 13:31:37,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=21136.0, ans=0.125 +2024-07-27 13:31:42,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=21136.0, ans=0.2 +2024-07-27 13:31:42,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=21136.0, ans=0.125 +2024-07-27 13:31:50,638 INFO [train.py:1114] (0/4) Epoch 2, batch 5650, loss[loss=0.2868, simple_loss=0.3504, pruned_loss=0.1115, over 4613.00 frames. ], tot_loss[loss=0.3203, simple_loss=0.376, pruned_loss=0.1322, over 937972.17 frames. ], batch size: 21, lr: 2.88e-02, grad_scale: 64.0 +2024-07-27 13:31:55,408 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-27 13:31:57,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=21176.0, ans=0.006266086956521739 +2024-07-27 13:32:01,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=21176.0, ans=0.006266086956521739 +2024-07-27 13:32:02,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=21176.0, ans=0.125 +2024-07-27 13:32:08,579 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.85 vs. limit=15.0 +2024-07-27 13:32:14,250 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.661e+01 6.325e+01 6.816e+01 7.626e+01 1.168e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-27 13:32:14,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=21202.666666666668, ans=0.125 +2024-07-27 13:32:18,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=21216.0, ans=0.2 +2024-07-27 13:32:18,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21216.0, ans=0.1 +2024-07-27 13:32:24,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=21216.0, ans=0.0 +2024-07-27 13:32:25,398 INFO [train.py:1114] (0/4) Epoch 2, batch 5700, loss[loss=0.3323, simple_loss=0.3792, pruned_loss=0.1427, over 4692.00 frames. ], tot_loss[loss=0.3207, simple_loss=0.3764, pruned_loss=0.1325, over 939126.62 frames. ], batch size: 13, lr: 2.88e-02, grad_scale: 64.0 +2024-07-27 13:32:27,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21229.333333333332, ans=0.1 +2024-07-27 13:32:31,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=21229.333333333332, ans=0.125 +2024-07-27 13:32:38,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=21242.666666666668, ans=0.125 +2024-07-27 13:32:39,748 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.33 vs. limit=10.0 +2024-07-27 13:32:45,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=21256.0, ans=0.125 +2024-07-27 13:32:52,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21269.333333333332, ans=0.1 +2024-07-27 13:33:05,787 INFO [train.py:1114] (0/4) Epoch 2, batch 5750, loss[loss=0.3302, simple_loss=0.3925, pruned_loss=0.1339, over 4734.00 frames. ], tot_loss[loss=0.321, simple_loss=0.3773, pruned_loss=0.1324, over 938979.23 frames. ], batch size: 19, lr: 2.88e-02, grad_scale: 64.0 +2024-07-27 13:33:06,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21296.0, ans=0.1 +2024-07-27 13:33:09,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21296.0, ans=0.1 +2024-07-27 13:33:10,301 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.76 vs. limit=10.0 +2024-07-27 13:33:14,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=21309.333333333332, ans=0.125 +2024-07-27 13:33:15,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=21309.333333333332, ans=0.125 +2024-07-27 13:33:17,796 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.54 vs. limit=15.0 +2024-07-27 13:33:24,502 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-16000.pt +2024-07-27 13:33:33,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=21336.0, ans=0.1 +2024-07-27 13:33:34,447 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.471e+01 6.485e+01 7.135e+01 7.978e+01 1.224e+02, threshold=1.427e+02, percent-clipped=0.0 +2024-07-27 13:33:45,496 INFO [train.py:1114] (0/4) Epoch 2, batch 5800, loss[loss=0.371, simple_loss=0.4239, pruned_loss=0.159, over 4743.00 frames. ], tot_loss[loss=0.3208, simple_loss=0.3775, pruned_loss=0.1321, over 938218.28 frames. ], batch size: 19, lr: 2.87e-02, grad_scale: 64.0 +2024-07-27 13:33:49,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=21362.666666666668, ans=0.2 +2024-07-27 13:33:55,065 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:33:56,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=21376.0, ans=0.2 +2024-07-27 13:34:05,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=21389.333333333332, ans=0.125 +2024-07-27 13:34:24,440 INFO [train.py:1114] (0/4) Epoch 2, batch 5850, loss[loss=0.3566, simple_loss=0.4087, pruned_loss=0.1522, over 4453.00 frames. ], tot_loss[loss=0.3198, simple_loss=0.3771, pruned_loss=0.1313, over 938404.11 frames. ], batch size: 21, lr: 2.87e-02, grad_scale: 64.0 +2024-07-27 13:34:40,490 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.27 vs. limit=12.0 +2024-07-27 13:34:46,990 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.98 vs. limit=15.0 +2024-07-27 13:34:49,987 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.936e+01 6.249e+01 6.870e+01 7.832e+01 1.003e+02, threshold=1.374e+02, percent-clipped=0.0 +2024-07-27 13:34:54,454 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.12 vs. limit=15.0 +2024-07-27 13:35:01,031 INFO [train.py:1114] (0/4) Epoch 2, batch 5900, loss[loss=0.3541, simple_loss=0.4134, pruned_loss=0.1474, over 4678.00 frames. ], tot_loss[loss=0.3195, simple_loss=0.3769, pruned_loss=0.131, over 938747.06 frames. ], batch size: 15, lr: 2.87e-02, grad_scale: 64.0 +2024-07-27 13:35:03,742 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.96 vs. limit=15.0 +2024-07-27 13:35:09,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21509.333333333332, ans=0.1 +2024-07-27 13:35:13,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21509.333333333332, ans=0.1 +2024-07-27 13:35:13,279 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.36 vs. limit=10.0 +2024-07-27 13:35:25,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21536.0, ans=0.1 +2024-07-27 13:35:32,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=21549.333333333332, ans=0.09899494936611666 +2024-07-27 13:35:35,455 INFO [train.py:1114] (0/4) Epoch 2, batch 5950, loss[loss=0.3809, simple_loss=0.4248, pruned_loss=0.1685, over 4688.00 frames. ], tot_loss[loss=0.3187, simple_loss=0.3756, pruned_loss=0.1308, over 940654.32 frames. ], batch size: 15, lr: 2.86e-02, grad_scale: 64.0 +2024-07-27 13:35:42,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=21576.0, ans=0.125 +2024-07-27 13:35:46,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=21576.0, ans=0.1 +2024-07-27 13:35:59,414 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.396e+01 6.345e+01 7.134e+01 8.050e+01 1.843e+02, threshold=1.427e+02, percent-clipped=1.0 +2024-07-27 13:36:19,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=21629.333333333332, ans=0.125 +2024-07-27 13:36:20,609 INFO [train.py:1114] (0/4) Epoch 2, batch 6000, loss[loss=0.3577, simple_loss=0.4104, pruned_loss=0.1524, over 4252.00 frames. ], tot_loss[loss=0.3181, simple_loss=0.3756, pruned_loss=0.1304, over 938240.19 frames. ], batch size: 25, lr: 2.86e-02, grad_scale: 64.0 +2024-07-27 13:36:20,610 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 13:36:33,367 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.7364, 3.5918, 3.7596, 4.1166], device='cuda:0') +2024-07-27 13:36:34,281 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.5945, 5.4668, 5.1062, 3.4493], device='cuda:0') +2024-07-27 13:36:36,192 INFO [train.py:1146] (0/4) Epoch 2, validation: loss=0.2564, simple_loss=0.3503, pruned_loss=0.08121, over 944034.00 frames. +2024-07-27 13:36:36,192 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 13:36:40,305 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.83 vs. limit=15.0 +2024-07-27 13:36:40,340 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.11 vs. limit=5.0 +2024-07-27 13:36:43,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=21642.666666666668, ans=0.95 +2024-07-27 13:36:43,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=21642.666666666668, ans=0.0 +2024-07-27 13:36:46,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=21642.666666666668, ans=0.125 +2024-07-27 13:36:50,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=21656.0, ans=0.025 +2024-07-27 13:36:59,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21669.333333333332, ans=0.1 +2024-07-27 13:37:01,205 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.28 vs. limit=6.0 +2024-07-27 13:37:01,706 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:37:05,374 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.23 vs. limit=22.5 +2024-07-27 13:37:05,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=21682.666666666668, ans=0.0 +2024-07-27 13:37:05,945 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.31 vs. limit=22.5 +2024-07-27 13:37:10,636 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:37:10,814 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.23 vs. limit=15.0 +2024-07-27 13:37:11,085 INFO [train.py:1114] (0/4) Epoch 2, batch 6050, loss[loss=0.2499, simple_loss=0.3218, pruned_loss=0.08904, over 4765.00 frames. ], tot_loss[loss=0.3163, simple_loss=0.3743, pruned_loss=0.1291, over 939460.28 frames. ], batch size: 12, lr: 2.85e-02, grad_scale: 64.0 +2024-07-27 13:37:16,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=21696.0, ans=0.0 +2024-07-27 13:37:29,491 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.97 vs. limit=15.0 +2024-07-27 13:37:31,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=21736.0, ans=0.025 +2024-07-27 13:37:31,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=21736.0, ans=0.0 +2024-07-27 13:37:34,508 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.397e+01 6.133e+01 6.810e+01 7.852e+01 1.499e+02, threshold=1.362e+02, percent-clipped=2.0 +2024-07-27 13:37:36,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=21736.0, ans=0.125 +2024-07-27 13:37:42,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=21749.333333333332, ans=0.2 +2024-07-27 13:37:43,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=21749.333333333332, ans=0.0 +2024-07-27 13:37:45,502 INFO [train.py:1114] (0/4) Epoch 2, batch 6100, loss[loss=0.3397, simple_loss=0.3973, pruned_loss=0.141, over 4683.00 frames. ], tot_loss[loss=0.316, simple_loss=0.3745, pruned_loss=0.1287, over 938427.94 frames. ], batch size: 15, lr: 2.85e-02, grad_scale: 64.0 +2024-07-27 13:37:54,914 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.94 vs. limit=15.0 +2024-07-27 13:38:00,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=21789.333333333332, ans=10.0 +2024-07-27 13:38:03,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21789.333333333332, ans=0.1 +2024-07-27 13:38:20,159 INFO [train.py:1114] (0/4) Epoch 2, batch 6150, loss[loss=0.4495, simple_loss=0.4604, pruned_loss=0.2193, over 3415.00 frames. ], tot_loss[loss=0.3166, simple_loss=0.3751, pruned_loss=0.129, over 937062.27 frames. ], batch size: 35, lr: 2.85e-02, grad_scale: 64.0 +2024-07-27 13:38:21,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=21829.333333333332, ans=0.125 +2024-07-27 13:38:38,312 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.39 vs. limit=10.0 +2024-07-27 13:38:39,930 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.59 vs. limit=15.0 +2024-07-27 13:38:41,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=21869.333333333332, ans=0.1 +2024-07-27 13:38:44,317 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.693e+01 6.434e+01 7.098e+01 7.748e+01 1.262e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 13:38:51,360 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.53 vs. limit=10.0 +2024-07-27 13:38:56,897 INFO [train.py:1114] (0/4) Epoch 2, batch 6200, loss[loss=0.299, simple_loss=0.3611, pruned_loss=0.1184, over 4738.00 frames. ], tot_loss[loss=0.3177, simple_loss=0.3761, pruned_loss=0.1297, over 936368.16 frames. ], batch size: 14, lr: 2.84e-02, grad_scale: 64.0 +2024-07-27 13:39:01,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=21896.0, ans=0.006109565217391305 +2024-07-27 13:39:09,146 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:39:17,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=21922.666666666668, ans=0.125 +2024-07-27 13:39:22,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=21936.0, ans=0.125 +2024-07-27 13:39:27,776 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.23 vs. limit=12.0 +2024-07-27 13:39:30,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=21949.333333333332, ans=0.125 +2024-07-27 13:39:38,449 INFO [train.py:1114] (0/4) Epoch 2, batch 6250, loss[loss=0.34, simple_loss=0.4078, pruned_loss=0.1361, over 4805.00 frames. ], tot_loss[loss=0.319, simple_loss=0.3771, pruned_loss=0.1304, over 932385.22 frames. ], batch size: 14, lr: 2.84e-02, grad_scale: 64.0 +2024-07-27 13:39:43,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=21962.666666666668, ans=0.125 +2024-07-27 13:39:48,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21976.0, ans=0.1 +2024-07-27 13:39:49,829 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.09 vs. limit=15.0 +2024-07-27 13:39:55,923 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:40:04,082 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.093e+01 6.216e+01 6.990e+01 7.888e+01 1.132e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 13:40:10,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff3.min_abs, batch_count=22016.0, ans=0.2 +2024-07-27 13:40:11,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=22016.0, ans=0.5 +2024-07-27 13:40:15,191 INFO [train.py:1114] (0/4) Epoch 2, batch 6300, loss[loss=0.2548, simple_loss=0.3216, pruned_loss=0.09403, over 4911.00 frames. ], tot_loss[loss=0.3182, simple_loss=0.376, pruned_loss=0.1302, over 929771.83 frames. ], batch size: 11, lr: 2.84e-02, grad_scale: 64.0 +2024-07-27 13:40:18,691 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.94 vs. limit=22.5 +2024-07-27 13:40:19,524 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.43 vs. limit=22.5 +2024-07-27 13:40:24,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=22042.666666666668, ans=0.025 +2024-07-27 13:40:28,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=22042.666666666668, ans=0.125 +2024-07-27 13:40:31,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=22056.0, ans=0.006074782608695652 +2024-07-27 13:40:32,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=22056.0, ans=0.07 +2024-07-27 13:40:58,405 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.75 vs. limit=15.0 +2024-07-27 13:41:02,175 INFO [train.py:1114] (0/4) Epoch 2, batch 6350, loss[loss=0.3138, simple_loss=0.3713, pruned_loss=0.1282, over 4519.00 frames. ], tot_loss[loss=0.318, simple_loss=0.376, pruned_loss=0.13, over 933831.08 frames. ], batch size: 21, lr: 2.83e-02, grad_scale: 64.0 +2024-07-27 13:41:03,259 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.71 vs. limit=15.0 +2024-07-27 13:41:05,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=22096.0, ans=0.05 +2024-07-27 13:41:12,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=22109.333333333332, ans=0.07 +2024-07-27 13:41:25,750 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.585e+01 6.300e+01 6.631e+01 7.435e+01 1.313e+02, threshold=1.326e+02, percent-clipped=0.0 +2024-07-27 13:41:27,574 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.85 vs. limit=22.5 +2024-07-27 13:41:29,592 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.34 vs. limit=15.0 +2024-07-27 13:41:36,417 INFO [train.py:1114] (0/4) Epoch 2, batch 6400, loss[loss=0.3389, simple_loss=0.4103, pruned_loss=0.1338, over 4635.00 frames. ], tot_loss[loss=0.3168, simple_loss=0.3748, pruned_loss=0.1294, over 935595.49 frames. ], batch size: 13, lr: 2.83e-02, grad_scale: 64.0 +2024-07-27 13:41:38,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=22162.666666666668, ans=0.125 +2024-07-27 13:41:51,255 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.88 vs. limit=15.0 +2024-07-27 13:41:53,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=22189.333333333332, ans=0.125 +2024-07-27 13:42:09,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=22216.0, ans=0.0 +2024-07-27 13:42:10,973 INFO [train.py:1114] (0/4) Epoch 2, batch 6450, loss[loss=0.3362, simple_loss=0.3857, pruned_loss=0.1434, over 4532.00 frames. ], tot_loss[loss=0.3161, simple_loss=0.3748, pruned_loss=0.1287, over 939128.15 frames. ], batch size: 21, lr: 2.83e-02, grad_scale: 64.0 +2024-07-27 13:42:15,469 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.09 vs. limit=22.5 +2024-07-27 13:42:16,821 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.19 vs. limit=15.0 +2024-07-27 13:42:22,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=22242.666666666668, ans=0.125 +2024-07-27 13:42:34,025 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.359e+01 6.221e+01 6.785e+01 7.657e+01 1.359e+02, threshold=1.357e+02, percent-clipped=1.0 +2024-07-27 13:42:41,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=22282.666666666668, ans=0.5 +2024-07-27 13:42:45,070 INFO [train.py:1114] (0/4) Epoch 2, batch 6500, loss[loss=0.5683, simple_loss=0.5461, pruned_loss=0.2952, over 3191.00 frames. ], tot_loss[loss=0.3174, simple_loss=0.3756, pruned_loss=0.1296, over 940081.46 frames. ], batch size: 36, lr: 2.82e-02, grad_scale: 64.0 +2024-07-27 13:42:45,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=22296.0, ans=0.0 +2024-07-27 13:43:01,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=22322.666666666668, ans=22.5 +2024-07-27 13:43:08,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=22336.0, ans=0.025 +2024-07-27 13:43:11,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=22349.333333333332, ans=0.1 +2024-07-27 13:43:16,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=22349.333333333332, ans=0.125 +2024-07-27 13:43:19,929 INFO [train.py:1114] (0/4) Epoch 2, batch 6550, loss[loss=0.2724, simple_loss=0.3192, pruned_loss=0.1128, over 4805.00 frames. ], tot_loss[loss=0.3157, simple_loss=0.3745, pruned_loss=0.1284, over 942914.54 frames. ], batch size: 11, lr: 2.82e-02, grad_scale: 64.0 +2024-07-27 13:43:24,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=22362.666666666668, ans=0.125 +2024-07-27 13:43:32,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=22389.333333333332, ans=0.125 +2024-07-27 13:43:33,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=22389.333333333332, ans=0.125 +2024-07-27 13:43:33,917 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.34 vs. limit=10.0 +2024-07-27 13:43:40,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=22402.666666666668, ans=0.0 +2024-07-27 13:43:43,156 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.181e+01 6.196e+01 6.780e+01 7.401e+01 1.122e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 13:43:44,151 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.27 vs. limit=12.0 +2024-07-27 13:43:49,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=22416.0, ans=0.07 +2024-07-27 13:43:51,854 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.36 vs. limit=8.0 +2024-07-27 13:43:56,108 INFO [train.py:1114] (0/4) Epoch 2, batch 6600, loss[loss=0.3116, simple_loss=0.3846, pruned_loss=0.1193, over 4931.00 frames. ], tot_loss[loss=0.3161, simple_loss=0.3745, pruned_loss=0.1289, over 944951.34 frames. ], batch size: 14, lr: 2.82e-02, grad_scale: 128.0 +2024-07-27 13:44:02,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22442.666666666668, ans=0.1 +2024-07-27 13:44:02,836 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.48 vs. limit=22.5 +2024-07-27 13:44:24,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.13 vs. limit=15.0 +2024-07-27 13:44:27,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=22482.666666666668, ans=0.0 +2024-07-27 13:44:31,235 INFO [train.py:1114] (0/4) Epoch 2, batch 6650, loss[loss=0.3518, simple_loss=0.3979, pruned_loss=0.1529, over 4655.00 frames. ], tot_loss[loss=0.3151, simple_loss=0.3737, pruned_loss=0.1282, over 943607.81 frames. ], batch size: 17, lr: 2.81e-02, grad_scale: 128.0 +2024-07-27 13:44:38,358 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.12 vs. limit=10.0 +2024-07-27 13:44:53,915 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.46 vs. limit=15.0 +2024-07-27 13:44:55,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22536.0, ans=0.1 +2024-07-27 13:44:55,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=22536.0, ans=0.0 +2024-07-27 13:44:58,377 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.864e+01 6.602e+01 7.128e+01 7.971e+01 1.702e+02, threshold=1.426e+02, percent-clipped=1.0 +2024-07-27 13:45:10,072 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=18.97 vs. limit=15.0 +2024-07-27 13:45:14,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=22549.333333333332, ans=0.125 +2024-07-27 13:45:17,345 INFO [train.py:1114] (0/4) Epoch 2, batch 6700, loss[loss=0.3272, simple_loss=0.3944, pruned_loss=0.13, over 4717.00 frames. ], tot_loss[loss=0.3147, simple_loss=0.3736, pruned_loss=0.1279, over 942689.42 frames. ], batch size: 19, lr: 2.81e-02, grad_scale: 128.0 +2024-07-27 13:45:17,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=22562.666666666668, ans=0.2 +2024-07-27 13:45:22,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=22562.666666666668, ans=0.05 +2024-07-27 13:45:23,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22576.0, ans=0.1 +2024-07-27 13:45:32,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=22589.333333333332, ans=0.07 +2024-07-27 13:45:37,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=22589.333333333332, ans=0.05 +2024-07-27 13:45:40,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=22602.666666666668, ans=0.5 +2024-07-27 13:45:44,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=22602.666666666668, ans=0.125 +2024-07-27 13:45:53,895 INFO [train.py:1114] (0/4) Epoch 2, batch 6750, loss[loss=0.3333, simple_loss=0.3844, pruned_loss=0.1411, over 4422.00 frames. ], tot_loss[loss=0.3157, simple_loss=0.374, pruned_loss=0.1287, over 940999.73 frames. ], batch size: 26, lr: 2.81e-02, grad_scale: 128.0 +2024-07-27 13:46:02,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=22642.666666666668, ans=0.125 +2024-07-27 13:46:04,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=22642.666666666668, ans=0.125 +2024-07-27 13:46:04,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=22642.666666666668, ans=0.2 +2024-07-27 13:46:05,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22642.666666666668, ans=0.1 +2024-07-27 13:46:13,640 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:46:19,139 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.110e+01 6.419e+01 6.907e+01 8.025e+01 1.154e+02, threshold=1.381e+02, percent-clipped=0.0 +2024-07-27 13:46:24,940 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.71 vs. limit=22.5 +2024-07-27 13:46:27,583 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.18 vs. limit=22.5 +2024-07-27 13:46:29,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=22682.666666666668, ans=0.125 +2024-07-27 13:46:31,952 INFO [train.py:1114] (0/4) Epoch 2, batch 6800, loss[loss=0.3074, simple_loss=0.3813, pruned_loss=0.1167, over 4630.00 frames. ], tot_loss[loss=0.3179, simple_loss=0.3759, pruned_loss=0.1299, over 939460.34 frames. ], batch size: 13, lr: 2.80e-02, grad_scale: 128.0 +2024-07-27 13:46:36,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=22696.0, ans=0.2 +2024-07-27 13:46:37,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff3.min_abs, batch_count=22696.0, ans=0.2 +2024-07-27 13:46:38,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=22709.333333333332, ans=0.025 +2024-07-27 13:46:58,650 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.72 vs. limit=15.0 +2024-07-27 13:47:04,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=22749.333333333332, ans=0.125 +2024-07-27 13:47:06,015 INFO [train.py:1114] (0/4) Epoch 2, batch 6850, loss[loss=0.249, simple_loss=0.3181, pruned_loss=0.08999, over 4698.00 frames. ], tot_loss[loss=0.3172, simple_loss=0.3755, pruned_loss=0.1295, over 941074.43 frames. ], batch size: 13, lr: 2.80e-02, grad_scale: 128.0 +2024-07-27 13:47:07,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=22762.666666666668, ans=10.0 +2024-07-27 13:47:11,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22762.666666666668, ans=0.1 +2024-07-27 13:47:12,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=22776.0, ans=0.2 +2024-07-27 13:47:27,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=22789.333333333332, ans=0.0 +2024-07-27 13:47:33,807 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.902e+01 6.353e+01 6.914e+01 7.942e+01 1.137e+02, threshold=1.383e+02, percent-clipped=0.0 +2024-07-27 13:47:42,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=22816.0, ans=0.025 +2024-07-27 13:47:46,071 INFO [train.py:1114] (0/4) Epoch 2, batch 6900, loss[loss=0.2483, simple_loss=0.3163, pruned_loss=0.09019, over 4962.00 frames. ], tot_loss[loss=0.3161, simple_loss=0.3748, pruned_loss=0.1287, over 943247.25 frames. ], batch size: 13, lr: 2.79e-02, grad_scale: 128.0 +2024-07-27 13:47:49,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=22829.333333333332, ans=0.125 +2024-07-27 13:47:51,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=22829.333333333332, ans=0.1 +2024-07-27 13:47:54,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=22842.666666666668, ans=0.025 +2024-07-27 13:47:55,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22842.666666666668, ans=0.1 +2024-07-27 13:47:56,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=22842.666666666668, ans=0.5 +2024-07-27 13:48:02,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=22856.0, ans=0.2 +2024-07-27 13:48:03,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=22856.0, ans=0.0 +2024-07-27 13:48:09,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22869.333333333332, ans=0.1 +2024-07-27 13:48:16,903 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:48:19,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=22882.666666666668, ans=0.025 +2024-07-27 13:48:21,669 INFO [train.py:1114] (0/4) Epoch 2, batch 6950, loss[loss=0.2927, simple_loss=0.3315, pruned_loss=0.127, over 4479.00 frames. ], tot_loss[loss=0.3159, simple_loss=0.3737, pruned_loss=0.129, over 940922.77 frames. ], batch size: 10, lr: 2.79e-02, grad_scale: 128.0 +2024-07-27 13:48:44,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22922.666666666668, ans=0.1 +2024-07-27 13:48:52,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=22936.0, ans=0.95 +2024-07-27 13:48:54,775 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.437e+01 6.446e+01 7.112e+01 7.644e+01 1.059e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 13:49:01,122 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.48 vs. limit=15.0 +2024-07-27 13:49:01,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=22949.333333333332, ans=0.0 +2024-07-27 13:49:02,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=22949.333333333332, ans=0.5 +2024-07-27 13:49:05,672 INFO [train.py:1114] (0/4) Epoch 2, batch 7000, loss[loss=0.3442, simple_loss=0.4025, pruned_loss=0.143, over 4659.00 frames. ], tot_loss[loss=0.3136, simple_loss=0.3718, pruned_loss=0.1277, over 938892.71 frames. ], batch size: 17, lr: 2.79e-02, grad_scale: 128.0 +2024-07-27 13:49:11,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=22976.0, ans=0.005874782608695652 +2024-07-27 13:49:28,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=22989.333333333332, ans=0.125 +2024-07-27 13:49:38,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=23016.0, ans=0.2 +2024-07-27 13:49:38,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=23016.0, ans=0.0 +2024-07-27 13:49:39,117 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.31 vs. limit=12.0 +2024-07-27 13:49:42,731 INFO [train.py:1114] (0/4) Epoch 2, batch 7050, loss[loss=0.3175, simple_loss=0.3819, pruned_loss=0.1265, over 4719.00 frames. ], tot_loss[loss=0.3133, simple_loss=0.3715, pruned_loss=0.1275, over 942666.69 frames. ], batch size: 19, lr: 2.78e-02, grad_scale: 128.0 +2024-07-27 13:50:13,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=23069.333333333332, ans=0.05 +2024-07-27 13:50:17,256 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.353e+01 6.903e+01 7.811e+01 8.989e+01 1.248e+02, threshold=1.562e+02, percent-clipped=0.0 +2024-07-27 13:50:18,244 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.88 vs. limit=10.0 +2024-07-27 13:50:22,351 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.67 vs. limit=12.0 +2024-07-27 13:50:27,356 INFO [train.py:1114] (0/4) Epoch 2, batch 7100, loss[loss=0.2902, simple_loss=0.3551, pruned_loss=0.1127, over 4810.00 frames. ], tot_loss[loss=0.3148, simple_loss=0.3726, pruned_loss=0.1285, over 936981.74 frames. ], batch size: 15, lr: 2.78e-02, grad_scale: 64.0 +2024-07-27 13:50:33,541 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.07 vs. limit=6.0 +2024-07-27 13:50:34,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=23109.333333333332, ans=10.0 +2024-07-27 13:50:37,054 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.97 vs. limit=8.0 +2024-07-27 13:50:53,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=23122.666666666668, ans=0.125 +2024-07-27 13:50:56,713 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.55 vs. limit=15.0 +2024-07-27 13:50:58,923 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.46 vs. limit=22.5 +2024-07-27 13:51:07,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=23162.666666666668, ans=0.2 +2024-07-27 13:51:07,821 INFO [train.py:1114] (0/4) Epoch 2, batch 7150, loss[loss=0.425, simple_loss=0.4698, pruned_loss=0.1901, over 4499.00 frames. ], tot_loss[loss=0.3145, simple_loss=0.3718, pruned_loss=0.1286, over 937861.48 frames. ], batch size: 21, lr: 2.78e-02, grad_scale: 64.0 +2024-07-27 13:51:12,071 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.29 vs. limit=15.0 +2024-07-27 13:51:22,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=23176.0, ans=0.125 +2024-07-27 13:51:24,361 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.32 vs. limit=15.0 +2024-07-27 13:51:41,527 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.182e+01 6.431e+01 7.159e+01 7.939e+01 1.328e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 13:51:44,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=23202.666666666668, ans=0.005825507246376812 +2024-07-27 13:51:48,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=23216.0, ans=0.005822608695652174 +2024-07-27 13:51:48,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=23216.0, ans=0.125 +2024-07-27 13:51:51,808 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-07-27 13:51:53,962 INFO [train.py:1114] (0/4) Epoch 2, batch 7200, loss[loss=0.2729, simple_loss=0.357, pruned_loss=0.09446, over 4789.00 frames. ], tot_loss[loss=0.3146, simple_loss=0.3724, pruned_loss=0.1283, over 937993.83 frames. ], batch size: 15, lr: 2.77e-02, grad_scale: 64.0 +2024-07-27 13:51:57,540 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.18 vs. limit=22.5 +2024-07-27 13:51:58,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=23229.333333333332, ans=0.125 +2024-07-27 13:51:59,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=23229.333333333332, ans=0.0 +2024-07-27 13:51:59,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=23229.333333333332, ans=0.125 +2024-07-27 13:52:11,530 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.04 vs. limit=12.0 +2024-07-27 13:52:19,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=23269.333333333332, ans=0.2 +2024-07-27 13:52:21,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=23282.666666666668, ans=0.2 +2024-07-27 13:52:28,624 INFO [train.py:1114] (0/4) Epoch 2, batch 7250, loss[loss=0.2986, simple_loss=0.3505, pruned_loss=0.1233, over 4852.00 frames. ], tot_loss[loss=0.3132, simple_loss=0.3713, pruned_loss=0.1276, over 939750.81 frames. ], batch size: 12, lr: 2.77e-02, grad_scale: 64.0 +2024-07-27 13:52:33,799 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.78 vs. limit=12.0 +2024-07-27 13:52:42,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=23309.333333333332, ans=0.125 +2024-07-27 13:52:50,343 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.37 vs. limit=6.0 +2024-07-27 13:52:54,560 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.076e+01 6.237e+01 6.919e+01 7.525e+01 1.117e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-27 13:53:06,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=23349.333333333332, ans=0.125 +2024-07-27 13:53:10,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=23349.333333333332, ans=0.125 +2024-07-27 13:53:14,180 INFO [train.py:1114] (0/4) Epoch 2, batch 7300, loss[loss=0.2563, simple_loss=0.3206, pruned_loss=0.09597, over 4853.00 frames. ], tot_loss[loss=0.3126, simple_loss=0.3707, pruned_loss=0.1273, over 939592.82 frames. ], batch size: 12, lr: 2.77e-02, grad_scale: 64.0 +2024-07-27 13:53:14,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=23362.666666666668, ans=0.0 +2024-07-27 13:53:21,897 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.96 vs. limit=15.0 +2024-07-27 13:53:28,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=23389.333333333332, ans=0.125 +2024-07-27 13:53:31,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=23389.333333333332, ans=0.2 +2024-07-27 13:53:49,019 INFO [train.py:1114] (0/4) Epoch 2, batch 7350, loss[loss=0.2964, simple_loss=0.3585, pruned_loss=0.1172, over 4645.00 frames. ], tot_loss[loss=0.3111, simple_loss=0.3699, pruned_loss=0.1261, over 938776.66 frames. ], batch size: 12, lr: 2.76e-02, grad_scale: 64.0 +2024-07-27 13:53:53,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23429.333333333332, ans=0.1 +2024-07-27 13:53:56,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23429.333333333332, ans=0.1 +2024-07-27 13:54:01,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=23442.666666666668, ans=0.025 +2024-07-27 13:54:06,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=23456.0, ans=0.1 +2024-07-27 13:54:14,582 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.043e+01 6.547e+01 7.387e+01 8.600e+01 1.543e+02, threshold=1.477e+02, percent-clipped=1.0 +2024-07-27 13:54:20,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=23469.333333333332, ans=0.005767536231884058 +2024-07-27 13:54:22,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23482.666666666668, ans=0.1 +2024-07-27 13:54:26,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=23482.666666666668, ans=0.125 +2024-07-27 13:54:45,262 INFO [train.py:1114] (0/4) Epoch 2, batch 7400, loss[loss=0.3585, simple_loss=0.4067, pruned_loss=0.1551, over 4694.00 frames. ], tot_loss[loss=0.312, simple_loss=0.3715, pruned_loss=0.1263, over 940061.30 frames. ], batch size: 13, lr: 2.76e-02, grad_scale: 64.0 +2024-07-27 13:54:59,434 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.08 vs. limit=22.5 +2024-07-27 13:55:18,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=23549.333333333332, ans=0.2 +2024-07-27 13:55:19,703 INFO [train.py:1114] (0/4) Epoch 2, batch 7450, loss[loss=0.324, simple_loss=0.3657, pruned_loss=0.1412, over 4606.00 frames. ], tot_loss[loss=0.3125, simple_loss=0.3713, pruned_loss=0.1269, over 937781.44 frames. ], batch size: 11, lr: 2.76e-02, grad_scale: 64.0 +2024-07-27 13:55:22,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=23562.666666666668, ans=0.125 +2024-07-27 13:55:31,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=23576.0, ans=0.125 +2024-07-27 13:55:35,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=23589.333333333332, ans=0.125 +2024-07-27 13:55:47,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=23602.666666666668, ans=0.0 +2024-07-27 13:55:47,787 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.335e+01 6.373e+01 7.113e+01 7.806e+01 1.283e+02, threshold=1.423e+02, percent-clipped=0.0 +2024-07-27 13:55:47,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=23602.666666666668, ans=0.125 +2024-07-27 13:55:50,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=23616.0, ans=0.05 +2024-07-27 13:55:57,877 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.35 vs. limit=15.0 +2024-07-27 13:55:58,077 INFO [train.py:1114] (0/4) Epoch 2, batch 7500, loss[loss=0.4116, simple_loss=0.4321, pruned_loss=0.1955, over 3590.00 frames. ], tot_loss[loss=0.3136, simple_loss=0.3715, pruned_loss=0.1278, over 936625.50 frames. ], batch size: 35, lr: 2.75e-02, grad_scale: 64.0 +2024-07-27 13:56:12,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=23629.333333333332, ans=0.125 +2024-07-27 13:56:29,225 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.01 vs. limit=6.0 +2024-07-27 13:56:36,503 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.80 vs. limit=10.0 +2024-07-27 13:56:36,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=23669.333333333332, ans=0.025 +2024-07-27 13:56:40,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=23682.666666666668, ans=0.125 +2024-07-27 13:56:47,501 INFO [train.py:1114] (0/4) Epoch 2, batch 7550, loss[loss=0.3646, simple_loss=0.4103, pruned_loss=0.1595, over 4635.00 frames. ], tot_loss[loss=0.3179, simple_loss=0.3755, pruned_loss=0.1301, over 936090.35 frames. ], batch size: 17, lr: 2.75e-02, grad_scale: 64.0 +2024-07-27 13:56:52,667 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.67 vs. limit=22.5 +2024-07-27 13:56:58,332 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.94 vs. limit=15.0 +2024-07-27 13:57:02,173 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:57:04,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=23722.666666666668, ans=0.125 +2024-07-27 13:57:05,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=23722.666666666668, ans=0.125 +2024-07-27 13:57:15,319 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.177e+01 6.494e+01 6.851e+01 7.705e+01 1.471e+02, threshold=1.370e+02, percent-clipped=1.0 +2024-07-27 13:57:22,174 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.88 vs. limit=15.0 +2024-07-27 13:57:25,096 INFO [train.py:1114] (0/4) Epoch 2, batch 7600, loss[loss=0.3336, simple_loss=0.3823, pruned_loss=0.1424, over 4806.00 frames. ], tot_loss[loss=0.316, simple_loss=0.3741, pruned_loss=0.129, over 937882.62 frames. ], batch size: 14, lr: 2.75e-02, grad_scale: 64.0 +2024-07-27 13:57:29,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=23762.666666666668, ans=0.005703768115942028 +2024-07-27 13:57:33,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=23776.0, ans=0.125 +2024-07-27 13:57:37,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23789.333333333332, ans=0.1 +2024-07-27 13:57:44,813 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.04 vs. limit=15.0 +2024-07-27 13:57:54,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=23816.0, ans=0.0056921739130434785 +2024-07-27 13:57:58,727 INFO [train.py:1114] (0/4) Epoch 2, batch 7650, loss[loss=0.2976, simple_loss=0.3478, pruned_loss=0.1237, over 4935.00 frames. ], tot_loss[loss=0.3173, simple_loss=0.3749, pruned_loss=0.1298, over 936858.94 frames. ], batch size: 12, lr: 2.74e-02, grad_scale: 64.0 +2024-07-27 13:59:48,748 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.93 vs. limit=22.5 +2024-07-27 13:59:49,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=23869.333333333332, ans=0.125 +2024-07-27 13:59:50,981 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.430e+01 6.487e+01 6.980e+01 8.234e+01 1.140e+02, threshold=1.396e+02, percent-clipped=0.0 +2024-07-27 13:59:51,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=23869.333333333332, ans=0.07 +2024-07-27 14:00:00,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=18.91 vs. limit=15.0 +2024-07-27 14:00:01,165 INFO [train.py:1114] (0/4) Epoch 2, batch 7700, loss[loss=0.2936, simple_loss=0.3694, pruned_loss=0.1089, over 4688.00 frames. ], tot_loss[loss=0.3184, simple_loss=0.376, pruned_loss=0.1304, over 934319.56 frames. ], batch size: 13, lr: 2.74e-02, grad_scale: 64.0 +2024-07-27 14:00:05,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23896.0, ans=0.1 +2024-07-27 14:00:08,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=23909.333333333332, ans=0.05 +2024-07-27 14:00:11,984 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.34 vs. limit=22.5 +2024-07-27 14:00:13,970 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.44 vs. limit=15.0 +2024-07-27 14:00:15,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=23922.666666666668, ans=0.2 +2024-07-27 14:00:24,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=23936.0, ans=0.2 +2024-07-27 14:00:31,169 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.33 vs. limit=6.0 +2024-07-27 14:00:34,012 INFO [train.py:1114] (0/4) Epoch 2, batch 7750, loss[loss=0.2909, simple_loss=0.3496, pruned_loss=0.1161, over 4938.00 frames. ], tot_loss[loss=0.3176, simple_loss=0.3761, pruned_loss=0.1296, over 935570.62 frames. ], batch size: 14, lr: 2.74e-02, grad_scale: 64.0 +2024-07-27 14:00:36,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=23962.666666666668, ans=0.1 +2024-07-27 14:00:57,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=23989.333333333332, ans=0.2 +2024-07-27 14:00:57,993 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.39 vs. limit=22.5 +2024-07-27 14:00:59,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=24002.666666666668, ans=0.005651594202898551 +2024-07-27 14:01:01,471 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.212e+01 6.407e+01 7.069e+01 7.682e+01 1.137e+02, threshold=1.414e+02, percent-clipped=0.0 +2024-07-27 14:01:03,139 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:01:07,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=24016.0, ans=0.005648695652173913 +2024-07-27 14:01:11,458 INFO [train.py:1114] (0/4) Epoch 2, batch 7800, loss[loss=0.3111, simple_loss=0.3723, pruned_loss=0.1249, over 4670.00 frames. ], tot_loss[loss=0.3159, simple_loss=0.3753, pruned_loss=0.1283, over 937117.69 frames. ], batch size: 14, lr: 2.74e-02, grad_scale: 64.0 +2024-07-27 14:01:19,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=24042.666666666668, ans=0.025 +2024-07-27 14:01:32,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=24069.333333333332, ans=0.0 +2024-07-27 14:01:33,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.22 vs. limit=22.5 +2024-07-27 14:01:36,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=24069.333333333332, ans=0.0 +2024-07-27 14:01:37,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=24082.666666666668, ans=0.125 +2024-07-27 14:01:41,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=24082.666666666668, ans=0.0 +2024-07-27 14:01:44,586 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.06 vs. limit=6.0 +2024-07-27 14:01:44,897 INFO [train.py:1114] (0/4) Epoch 2, batch 7850, loss[loss=0.2519, simple_loss=0.3188, pruned_loss=0.09251, over 4489.00 frames. ], tot_loss[loss=0.3149, simple_loss=0.3742, pruned_loss=0.1278, over 935975.11 frames. ], batch size: 10, lr: 2.73e-02, grad_scale: 64.0 +2024-07-27 14:01:48,545 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=12.58 vs. limit=15.0 +2024-07-27 14:01:49,699 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.51 vs. limit=15.0 +2024-07-27 14:01:53,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=24109.333333333332, ans=0.09899494936611666 +2024-07-27 14:01:57,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=24122.666666666668, ans=0.125 +2024-07-27 14:02:09,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=24136.0, ans=0.0 +2024-07-27 14:02:10,322 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.460e+01 6.475e+01 7.021e+01 7.812e+01 1.156e+02, threshold=1.404e+02, percent-clipped=0.0 +2024-07-27 14:02:13,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=24149.333333333332, ans=0.125 +2024-07-27 14:02:20,256 INFO [train.py:1114] (0/4) Epoch 2, batch 7900, loss[loss=0.3492, simple_loss=0.3925, pruned_loss=0.153, over 4864.00 frames. ], tot_loss[loss=0.3161, simple_loss=0.3748, pruned_loss=0.1287, over 932755.42 frames. ], batch size: 14, lr: 2.73e-02, grad_scale: 64.0 +2024-07-27 14:02:21,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=24162.666666666668, ans=0.05 +2024-07-27 14:02:23,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=24162.666666666668, ans=0.2 +2024-07-27 14:02:25,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.22 vs. limit=15.0 +2024-07-27 14:02:28,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24176.0, ans=0.1 +2024-07-27 14:02:32,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=24176.0, ans=0.1 +2024-07-27 14:02:32,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=24176.0, ans=0.025 +2024-07-27 14:02:36,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=24189.333333333332, ans=0.0 +2024-07-27 14:02:41,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=24202.666666666668, ans=0.0056081159420289855 +2024-07-27 14:02:49,378 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.79 vs. limit=10.0 +2024-07-27 14:02:53,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=24229.333333333332, ans=0.125 +2024-07-27 14:02:53,707 INFO [train.py:1114] (0/4) Epoch 2, batch 7950, loss[loss=0.4463, simple_loss=0.4431, pruned_loss=0.2248, over 3477.00 frames. ], tot_loss[loss=0.3144, simple_loss=0.3738, pruned_loss=0.1275, over 934985.49 frames. ], batch size: 35, lr: 2.73e-02, grad_scale: 64.0 +2024-07-27 14:02:58,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=24229.333333333332, ans=0.95 +2024-07-27 14:03:03,479 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=14.91 vs. limit=15.0 +2024-07-27 14:03:03,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.73 vs. limit=22.5 +2024-07-27 14:03:07,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=24256.0, ans=0.2 +2024-07-27 14:03:08,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=24256.0, ans=0.125 +2024-07-27 14:03:11,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=24256.0, ans=0.5 +2024-07-27 14:03:17,189 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.361e+01 6.433e+01 7.086e+01 8.045e+01 1.490e+02, threshold=1.417e+02, percent-clipped=1.0 +2024-07-27 14:03:19,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=24269.333333333332, ans=0.125 +2024-07-27 14:03:25,129 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:03:26,900 INFO [train.py:1114] (0/4) Epoch 2, batch 8000, loss[loss=0.2849, simple_loss=0.3473, pruned_loss=0.1113, over 4596.00 frames. ], tot_loss[loss=0.3107, simple_loss=0.3702, pruned_loss=0.1256, over 934155.31 frames. ], batch size: 11, lr: 2.72e-02, grad_scale: 64.0 +2024-07-27 14:03:30,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24296.0, ans=0.1 +2024-07-27 14:03:31,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=24296.0, ans=0.125 +2024-07-27 14:03:31,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=24296.0, ans=0.125 +2024-07-27 14:03:33,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=24309.333333333332, ans=0.0 +2024-07-27 14:03:34,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=24309.333333333332, ans=0.09899494936611666 +2024-07-27 14:03:40,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=24322.666666666668, ans=0.125 +2024-07-27 14:04:00,345 INFO [train.py:1114] (0/4) Epoch 2, batch 8050, loss[loss=0.3041, simple_loss=0.3694, pruned_loss=0.1194, over 4814.00 frames. ], tot_loss[loss=0.3101, simple_loss=0.3696, pruned_loss=0.1253, over 933473.23 frames. ], batch size: 14, lr: 2.72e-02, grad_scale: 64.0 +2024-07-27 14:04:09,616 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.61 vs. limit=15.0 +2024-07-27 14:04:20,293 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.92 vs. limit=15.0 +2024-07-27 14:04:23,842 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.030e+01 6.126e+01 6.809e+01 7.483e+01 1.319e+02, threshold=1.362e+02, percent-clipped=0.0 +2024-07-27 14:04:24,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=24402.666666666668, ans=0.00556463768115942 +2024-07-27 14:04:25,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=24402.666666666668, ans=0.00556463768115942 +2024-07-27 14:04:27,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24416.0, ans=0.1 +2024-07-27 14:04:28,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=24416.0, ans=0.125 +2024-07-27 14:04:29,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=24416.0, ans=0.125 +2024-07-27 14:04:33,618 INFO [train.py:1114] (0/4) Epoch 2, batch 8100, loss[loss=0.3848, simple_loss=0.4231, pruned_loss=0.1733, over 4807.00 frames. ], tot_loss[loss=0.3111, simple_loss=0.3702, pruned_loss=0.126, over 933528.62 frames. ], batch size: 15, lr: 2.72e-02, grad_scale: 64.0 +2024-07-27 14:05:15,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=24456.0, ans=0.125 +2024-07-27 14:05:58,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=24482.666666666668, ans=0.125 +2024-07-27 14:05:59,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=24482.666666666668, ans=0.2 +2024-07-27 14:05:59,505 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=18.14 vs. limit=15.0 +2024-07-27 14:06:02,876 INFO [train.py:1114] (0/4) Epoch 2, batch 8150, loss[loss=0.2987, simple_loss=0.3894, pruned_loss=0.104, over 4799.00 frames. ], tot_loss[loss=0.31, simple_loss=0.3693, pruned_loss=0.1253, over 936955.88 frames. ], batch size: 15, lr: 2.71e-02, grad_scale: 64.0 +2024-07-27 14:06:03,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24496.0, ans=0.1 +2024-07-27 14:06:20,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=24509.333333333332, ans=0.05 +2024-07-27 14:06:21,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24509.333333333332, ans=0.1 +2024-07-27 14:06:23,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24509.333333333332, ans=0.1 +2024-07-27 14:06:27,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=24522.666666666668, ans=0.125 +2024-07-27 14:06:50,927 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.617e+01 6.763e+01 7.386e+01 8.088e+01 1.261e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 14:07:00,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=24549.333333333332, ans=0.0 +2024-07-27 14:07:01,511 INFO [train.py:1114] (0/4) Epoch 2, batch 8200, loss[loss=0.3619, simple_loss=0.4138, pruned_loss=0.1551, over 4812.00 frames. ], tot_loss[loss=0.3105, simple_loss=0.3704, pruned_loss=0.1253, over 938312.56 frames. ], batch size: 15, lr: 2.71e-02, grad_scale: 64.0 +2024-07-27 14:07:02,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=24562.666666666668, ans=0.125 +2024-07-27 14:07:08,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=24576.0, ans=0.125 +2024-07-27 14:07:17,730 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.39 vs. limit=10.0 +2024-07-27 14:07:18,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=24589.333333333332, ans=0.005524057971014493 +2024-07-27 14:07:19,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24589.333333333332, ans=0.1 +2024-07-27 14:07:27,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=24602.666666666668, ans=0.2 +2024-07-27 14:07:33,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=24616.0, ans=0.025 +2024-07-27 14:07:39,093 INFO [train.py:1114] (0/4) Epoch 2, batch 8250, loss[loss=0.2905, simple_loss=0.3551, pruned_loss=0.113, over 4897.00 frames. ], tot_loss[loss=0.312, simple_loss=0.3717, pruned_loss=0.1262, over 938835.02 frames. ], batch size: 13, lr: 2.71e-02, grad_scale: 64.0 +2024-07-27 14:07:40,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24629.333333333332, ans=0.1 +2024-07-27 14:07:44,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=24629.333333333332, ans=0.125 +2024-07-27 14:07:44,237 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.82 vs. limit=15.0 +2024-07-27 14:07:45,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=24642.666666666668, ans=0.125 +2024-07-27 14:07:51,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=24642.666666666668, ans=0.2 +2024-07-27 14:07:59,586 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=14.37 vs. limit=15.0 +2024-07-27 14:08:00,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=24656.0, ans=0.0 +2024-07-27 14:08:01,523 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=7.651e+00 +2024-07-27 14:08:04,268 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.72 vs. limit=15.0 +2024-07-27 14:08:05,600 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.51 vs. limit=12.0 +2024-07-27 14:08:07,824 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.008e+01 6.272e+01 7.057e+01 7.926e+01 1.070e+02, threshold=1.411e+02, percent-clipped=0.0 +2024-07-27 14:08:08,151 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-27 14:08:32,474 INFO [train.py:1114] (0/4) Epoch 2, batch 8300, loss[loss=0.3133, simple_loss=0.3813, pruned_loss=0.1226, over 4910.00 frames. ], tot_loss[loss=0.3138, simple_loss=0.3731, pruned_loss=0.1273, over 938996.03 frames. ], batch size: 15, lr: 2.70e-02, grad_scale: 64.0 +2024-07-27 14:08:42,592 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.56 vs. limit=15.0 +2024-07-27 14:08:47,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=24722.666666666668, ans=0.125 +2024-07-27 14:08:50,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=24722.666666666668, ans=0.125 +2024-07-27 14:08:52,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24722.666666666668, ans=0.1 +2024-07-27 14:09:06,238 INFO [train.py:1114] (0/4) Epoch 2, batch 8350, loss[loss=0.3834, simple_loss=0.4359, pruned_loss=0.1654, over 4796.00 frames. ], tot_loss[loss=0.3131, simple_loss=0.3726, pruned_loss=0.1268, over 941746.72 frames. ], batch size: 15, lr: 2.70e-02, grad_scale: 64.0 +2024-07-27 14:09:13,140 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.80 vs. limit=15.0 +2024-07-27 14:09:17,445 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.36 vs. limit=15.0 +2024-07-27 14:09:18,763 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.80 vs. limit=15.0 +2024-07-27 14:09:19,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=24789.333333333332, ans=0.0 +2024-07-27 14:09:21,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=24789.333333333332, ans=0.125 +2024-07-27 14:09:34,719 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.707e+01 6.372e+01 7.103e+01 7.786e+01 1.162e+02, threshold=1.421e+02, percent-clipped=0.0 +2024-07-27 14:09:37,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=24816.0, ans=0.125 +2024-07-27 14:09:38,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=24816.0, ans=0.125 +2024-07-27 14:09:41,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=24816.0, ans=0.0 +2024-07-27 14:09:44,948 INFO [train.py:1114] (0/4) Epoch 2, batch 8400, loss[loss=0.2744, simple_loss=0.3392, pruned_loss=0.1048, over 4778.00 frames. ], tot_loss[loss=0.314, simple_loss=0.3736, pruned_loss=0.1272, over 940083.44 frames. ], batch size: 12, lr: 2.70e-02, grad_scale: 64.0 +2024-07-27 14:09:46,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=24829.333333333332, ans=10.0 +2024-07-27 14:09:53,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=24842.666666666668, ans=0.0 +2024-07-27 14:09:54,487 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.63 vs. limit=6.0 +2024-07-27 14:09:55,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=24842.666666666668, ans=0.125 +2024-07-27 14:09:58,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=24856.0, ans=0.125 +2024-07-27 14:09:59,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=24856.0, ans=0.0 +2024-07-27 14:10:05,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=24869.333333333332, ans=0.2 +2024-07-27 14:10:05,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=24869.333333333332, ans=0.0 +2024-07-27 14:10:06,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=24869.333333333332, ans=0.0 +2024-07-27 14:10:15,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=24882.666666666668, ans=10.0 +2024-07-27 14:10:17,808 INFO [train.py:1114] (0/4) Epoch 2, batch 8450, loss[loss=0.2999, simple_loss=0.3621, pruned_loss=0.1189, over 4794.00 frames. ], tot_loss[loss=0.3148, simple_loss=0.3744, pruned_loss=0.1276, over 939248.42 frames. ], batch size: 15, lr: 2.69e-02, grad_scale: 64.0 +2024-07-27 14:10:25,532 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.87 vs. limit=12.0 +2024-07-27 14:10:30,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=24909.333333333332, ans=0.04949747468305833 +2024-07-27 14:10:40,104 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.32 vs. limit=15.0 +2024-07-27 14:10:51,594 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.225e+01 6.363e+01 6.808e+01 7.563e+01 1.440e+02, threshold=1.362e+02, percent-clipped=1.0 +2024-07-27 14:11:01,357 INFO [train.py:1114] (0/4) Epoch 2, batch 8500, loss[loss=0.2339, simple_loss=0.3043, pruned_loss=0.08178, over 4610.00 frames. ], tot_loss[loss=0.313, simple_loss=0.3723, pruned_loss=0.1268, over 938668.72 frames. ], batch size: 11, lr: 2.69e-02, grad_scale: 64.0 +2024-07-27 14:11:04,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=24962.666666666668, ans=0.125 +2024-07-27 14:11:04,984 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.69 vs. limit=22.5 +2024-07-27 14:11:12,924 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.54 vs. limit=15.0 +2024-07-27 14:11:17,539 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.30 vs. limit=12.0 +2024-07-27 14:11:23,667 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.28 vs. limit=6.0 +2024-07-27 14:11:30,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=25016.0, ans=0.0 +2024-07-27 14:11:30,920 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.86 vs. limit=10.0 +2024-07-27 14:11:34,219 INFO [train.py:1114] (0/4) Epoch 2, batch 8550, loss[loss=0.3028, simple_loss=0.3486, pruned_loss=0.1285, over 4806.00 frames. ], tot_loss[loss=0.3143, simple_loss=0.3731, pruned_loss=0.1277, over 939536.56 frames. ], batch size: 11, lr: 2.69e-02, grad_scale: 64.0 +2024-07-27 14:11:44,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=25042.666666666668, ans=0.1 +2024-07-27 14:11:45,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=25042.666666666668, ans=0.1 +2024-07-27 14:11:46,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=25042.666666666668, ans=0.125 +2024-07-27 14:11:52,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=25056.0, ans=0.2 +2024-07-27 14:11:56,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25056.0, ans=0.1 +2024-07-27 14:12:01,368 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.226e+01 6.174e+01 6.782e+01 7.598e+01 1.715e+02, threshold=1.356e+02, percent-clipped=1.0 +2024-07-27 14:12:02,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=25069.333333333332, ans=0.2 +2024-07-27 14:12:12,548 INFO [train.py:1114] (0/4) Epoch 2, batch 8600, loss[loss=0.3021, simple_loss=0.3703, pruned_loss=0.1169, over 4806.00 frames. ], tot_loss[loss=0.3124, simple_loss=0.3718, pruned_loss=0.1265, over 939097.13 frames. ], batch size: 15, lr: 2.68e-02, grad_scale: 64.0 +2024-07-27 14:12:15,720 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.85 vs. limit=6.0 +2024-07-27 14:12:24,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=25109.333333333332, ans=0.95 +2024-07-27 14:12:24,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=25109.333333333332, ans=0.125 +2024-07-27 14:12:27,442 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.74 vs. limit=12.0 +2024-07-27 14:12:29,175 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.27 vs. limit=10.0 +2024-07-27 14:12:31,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=25122.666666666668, ans=0.0 +2024-07-27 14:12:36,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=25136.0, ans=0.125 +2024-07-27 14:12:41,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=25149.333333333332, ans=0.125 +2024-07-27 14:12:47,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=25149.333333333332, ans=0.025 +2024-07-27 14:12:50,763 INFO [train.py:1114] (0/4) Epoch 2, batch 8650, loss[loss=0.3095, simple_loss=0.3676, pruned_loss=0.1257, over 4897.00 frames. ], tot_loss[loss=0.3106, simple_loss=0.3705, pruned_loss=0.1253, over 940399.56 frames. ], batch size: 15, lr: 2.68e-02, grad_scale: 64.0 +2024-07-27 14:13:00,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=25176.0, ans=0.04949747468305833 +2024-07-27 14:13:02,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=25176.0, ans=0.125 +2024-07-27 14:13:03,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=25176.0, ans=0.005396521739130435 +2024-07-27 14:13:04,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.52 vs. limit=15.0 +2024-07-27 14:13:13,970 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.076e+01 6.621e+01 7.393e+01 8.155e+01 1.216e+02, threshold=1.479e+02, percent-clipped=0.0 +2024-07-27 14:13:14,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=25202.666666666668, ans=0.125 +2024-07-27 14:13:21,627 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.66 vs. limit=6.0 +2024-07-27 14:13:23,797 INFO [train.py:1114] (0/4) Epoch 2, batch 8700, loss[loss=0.2686, simple_loss=0.347, pruned_loss=0.09506, over 4761.00 frames. ], tot_loss[loss=0.3113, simple_loss=0.3714, pruned_loss=0.1256, over 938161.63 frames. ], batch size: 13, lr: 2.68e-02, grad_scale: 64.0 +2024-07-27 14:13:25,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=25229.333333333332, ans=0.2 +2024-07-27 14:13:31,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=25242.666666666668, ans=0.005382028985507247 +2024-07-27 14:13:32,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=25242.666666666668, ans=0.0 +2024-07-27 14:13:46,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25269.333333333332, ans=0.1 +2024-07-27 14:14:00,736 INFO [train.py:1114] (0/4) Epoch 2, batch 8750, loss[loss=0.3166, simple_loss=0.3744, pruned_loss=0.1294, over 4694.00 frames. ], tot_loss[loss=0.3109, simple_loss=0.3712, pruned_loss=0.1253, over 936538.83 frames. ], batch size: 15, lr: 2.68e-02, grad_scale: 64.0 +2024-07-27 14:14:14,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=25296.0, ans=0.0 +2024-07-27 14:14:24,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=25309.333333333332, ans=10.0 +2024-07-27 14:14:24,784 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.36 vs. limit=10.0 +2024-07-27 14:14:43,964 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.590e+01 6.384e+01 6.883e+01 7.910e+01 1.074e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-27 14:14:50,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=25349.333333333332, ans=0.09899494936611666 +2024-07-27 14:14:50,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=25349.333333333332, ans=0.125 +2024-07-27 14:14:50,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1.whitening_limit, batch_count=25349.333333333332, ans=10.0 +2024-07-27 14:14:53,575 INFO [train.py:1114] (0/4) Epoch 2, batch 8800, loss[loss=0.3498, simple_loss=0.4265, pruned_loss=0.1365, over 4938.00 frames. ], tot_loss[loss=0.3114, simple_loss=0.3718, pruned_loss=0.1255, over 937561.19 frames. ], batch size: 14, lr: 2.67e-02, grad_scale: 64.0 +2024-07-27 14:14:57,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=25362.666666666668, ans=0.125 +2024-07-27 14:15:08,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=25389.333333333332, ans=0.0 +2024-07-27 14:15:09,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=25389.333333333332, ans=0.2 +2024-07-27 14:15:18,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=25402.666666666668, ans=0.125 +2024-07-27 14:15:19,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25402.666666666668, ans=0.1 +2024-07-27 14:15:25,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=25416.0, ans=0.005344347826086957 +2024-07-27 14:15:27,661 INFO [train.py:1114] (0/4) Epoch 2, batch 8850, loss[loss=0.3433, simple_loss=0.3995, pruned_loss=0.1435, over 4402.00 frames. ], tot_loss[loss=0.3113, simple_loss=0.3715, pruned_loss=0.1256, over 931473.55 frames. ], batch size: 21, lr: 2.67e-02, grad_scale: 64.0 +2024-07-27 14:15:28,673 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.95 vs. limit=12.0 +2024-07-27 14:15:40,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=25456.0, ans=10.0 +2024-07-27 14:15:45,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=25456.0, ans=0.1 +2024-07-27 14:15:47,147 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.01 vs. limit=6.0 +2024-07-27 14:15:56,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=25456.0, ans=0.125 +2024-07-27 14:16:01,203 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.339e+01 6.518e+01 6.996e+01 7.988e+01 1.039e+02, threshold=1.399e+02, percent-clipped=0.0 +2024-07-27 14:16:01,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=25469.333333333332, ans=0.1 +2024-07-27 14:16:11,055 INFO [train.py:1114] (0/4) Epoch 2, batch 8900, loss[loss=0.291, simple_loss=0.3513, pruned_loss=0.1154, over 4934.00 frames. ], tot_loss[loss=0.3125, simple_loss=0.3724, pruned_loss=0.1263, over 929826.28 frames. ], batch size: 12, lr: 2.67e-02, grad_scale: 64.0 +2024-07-27 14:16:33,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=25536.0, ans=0.025 +2024-07-27 14:16:44,042 INFO [train.py:1114] (0/4) Epoch 2, batch 8950, loss[loss=0.3362, simple_loss=0.3784, pruned_loss=0.147, over 4493.00 frames. ], tot_loss[loss=0.3125, simple_loss=0.3723, pruned_loss=0.1263, over 931028.97 frames. ], batch size: 21, lr: 2.66e-02, grad_scale: 64.0 +2024-07-27 14:16:53,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=25576.0, ans=0.005309565217391305 +2024-07-27 14:16:59,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=25589.333333333332, ans=0.2 +2024-07-27 14:17:00,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=25589.333333333332, ans=0.125 +2024-07-27 14:17:02,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=25589.333333333332, ans=0.1 +2024-07-27 14:17:05,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=25602.666666666668, ans=0.125 +2024-07-27 14:17:08,255 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.246e+01 6.427e+01 6.847e+01 7.354e+01 1.255e+02, threshold=1.369e+02, percent-clipped=0.0 +2024-07-27 14:17:13,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=25616.0, ans=0.2 +2024-07-27 14:17:18,098 INFO [train.py:1114] (0/4) Epoch 2, batch 9000, loss[loss=0.284, simple_loss=0.3555, pruned_loss=0.1063, over 4643.00 frames. ], tot_loss[loss=0.3106, simple_loss=0.3705, pruned_loss=0.1253, over 933666.41 frames. ], batch size: 12, lr: 2.66e-02, grad_scale: 64.0 +2024-07-27 14:17:18,098 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 14:17:22,267 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.0463, 4.6287, 4.7638, 4.6190], device='cuda:0') +2024-07-27 14:17:33,659 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([3.5118, 2.4776, 2.8411, 3.2490, 3.1154, 2.8855, 3.1004, 1.8885], + device='cuda:0') +2024-07-27 14:17:37,005 INFO [train.py:1146] (0/4) Epoch 2, validation: loss=0.2471, simple_loss=0.3424, pruned_loss=0.07587, over 944034.00 frames. +2024-07-27 14:17:37,069 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 14:17:45,318 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.80 vs. limit=15.0 +2024-07-27 14:17:53,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=25656.0, ans=0.125 +2024-07-27 14:17:54,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=25656.0, ans=0.005292173913043478 +2024-07-27 14:18:10,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=25669.333333333332, ans=0.0 +2024-07-27 14:18:14,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=25682.666666666668, ans=0.0 +2024-07-27 14:18:18,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=25696.0, ans=0.005283478260869565 +2024-07-27 14:18:18,627 INFO [train.py:1114] (0/4) Epoch 2, batch 9050, loss[loss=0.3059, simple_loss=0.356, pruned_loss=0.1279, over 4499.00 frames. ], tot_loss[loss=0.3091, simple_loss=0.3691, pruned_loss=0.1245, over 934111.21 frames. ], batch size: 10, lr: 2.66e-02, grad_scale: 64.0 +2024-07-27 14:18:20,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=25696.0, ans=0.0 +2024-07-27 14:18:22,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=25696.0, ans=0.025 +2024-07-27 14:18:23,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=25696.0, ans=0.125 +2024-07-27 14:18:48,206 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.394e+01 6.425e+01 6.926e+01 7.624e+01 1.076e+02, threshold=1.385e+02, percent-clipped=0.0 +2024-07-27 14:18:50,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=25736.0, ans=0.0 +2024-07-27 14:18:53,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=25749.333333333332, ans=0.125 +2024-07-27 14:18:58,024 INFO [train.py:1114] (0/4) Epoch 2, batch 9100, loss[loss=0.2891, simple_loss=0.3646, pruned_loss=0.1068, over 4920.00 frames. ], tot_loss[loss=0.3086, simple_loss=0.3688, pruned_loss=0.1242, over 936773.95 frames. ], batch size: 14, lr: 2.65e-02, grad_scale: 128.0 +2024-07-27 14:19:05,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=25776.0, ans=0.125 +2024-07-27 14:19:19,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=25802.666666666668, ans=0.125 +2024-07-27 14:19:20,472 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.200e+01 +2024-07-27 14:19:24,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=25802.666666666668, ans=0.1 +2024-07-27 14:19:32,217 INFO [train.py:1114] (0/4) Epoch 2, batch 9150, loss[loss=0.3693, simple_loss=0.4238, pruned_loss=0.1574, over 4811.00 frames. ], tot_loss[loss=0.3095, simple_loss=0.3695, pruned_loss=0.1247, over 935482.10 frames. ], batch size: 14, lr: 2.65e-02, grad_scale: 64.0 +2024-07-27 14:19:41,247 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.99 vs. limit=15.0 +2024-07-27 14:19:50,319 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.76 vs. limit=22.5 +2024-07-27 14:19:53,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=25856.0, ans=0.005248695652173913 +2024-07-27 14:19:55,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=25856.0, ans=0.125 +2024-07-27 14:20:00,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=25869.333333333332, ans=0.125 +2024-07-27 14:20:02,452 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.426e+01 6.682e+01 7.261e+01 8.100e+01 1.344e+02, threshold=1.452e+02, percent-clipped=0.0 +2024-07-27 14:20:05,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=25882.666666666668, ans=0.125 +2024-07-27 14:20:06,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=25882.666666666668, ans=0.125 +2024-07-27 14:20:11,596 INFO [train.py:1114] (0/4) Epoch 2, batch 9200, loss[loss=0.2693, simple_loss=0.3389, pruned_loss=0.09981, over 4843.00 frames. ], tot_loss[loss=0.3094, simple_loss=0.3691, pruned_loss=0.1248, over 937527.64 frames. ], batch size: 12, lr: 2.65e-02, grad_scale: 64.0 +2024-07-27 14:20:15,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=25896.0, ans=0.0 +2024-07-27 14:20:18,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=25909.333333333332, ans=0.035 +2024-07-27 14:20:35,979 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.30 vs. limit=15.0 +2024-07-27 14:20:36,429 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:20:44,400 INFO [train.py:1114] (0/4) Epoch 2, batch 9250, loss[loss=0.3544, simple_loss=0.3882, pruned_loss=0.1603, over 4637.00 frames. ], tot_loss[loss=0.3082, simple_loss=0.3683, pruned_loss=0.1241, over 938107.05 frames. ], batch size: 13, lr: 2.65e-02, grad_scale: 64.0 +2024-07-27 14:20:45,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25962.666666666668, ans=0.1 +2024-07-27 14:20:45,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=25962.666666666668, ans=0.125 +2024-07-27 14:20:47,955 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.57 vs. limit=22.5 +2024-07-27 14:20:49,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=25962.666666666668, ans=0.125 +2024-07-27 14:20:57,215 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.71 vs. limit=22.5 +2024-07-27 14:20:59,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=25989.333333333332, ans=0.125 +2024-07-27 14:21:04,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=26002.666666666668, ans=0.125 +2024-07-27 14:21:07,929 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.437e+01 6.326e+01 6.900e+01 7.743e+01 1.339e+02, threshold=1.380e+02, percent-clipped=0.0 +2024-07-27 14:21:17,299 INFO [train.py:1114] (0/4) Epoch 2, batch 9300, loss[loss=0.2663, simple_loss=0.3341, pruned_loss=0.0993, over 4780.00 frames. ], tot_loss[loss=0.3077, simple_loss=0.3678, pruned_loss=0.1238, over 937984.24 frames. ], batch size: 12, lr: 2.64e-02, grad_scale: 64.0 +2024-07-27 14:21:19,089 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.91 vs. limit=15.0 +2024-07-27 14:21:22,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=26029.333333333332, ans=0.0 +2024-07-27 14:21:28,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=26042.666666666668, ans=0.05 +2024-07-27 14:21:30,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=26056.0, ans=0.2 +2024-07-27 14:21:41,602 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.58 vs. limit=10.0 +2024-07-27 14:21:49,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=26096.0, ans=22.5 +2024-07-27 14:21:49,586 INFO [train.py:1114] (0/4) Epoch 2, batch 9350, loss[loss=0.3325, simple_loss=0.3598, pruned_loss=0.1526, over 4803.00 frames. ], tot_loss[loss=0.3094, simple_loss=0.3689, pruned_loss=0.1249, over 934586.99 frames. ], batch size: 11, lr: 2.64e-02, grad_scale: 64.0 +2024-07-27 14:21:56,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=26096.0, ans=0.125 +2024-07-27 14:22:02,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=26109.333333333332, ans=0.2 +2024-07-27 14:22:05,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26122.666666666668, ans=0.1 +2024-07-27 14:22:13,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=26136.0, ans=0.025 +2024-07-27 14:22:16,017 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.591e+01 6.512e+01 7.104e+01 8.363e+01 3.070e+02, threshold=1.421e+02, percent-clipped=1.0 +2024-07-27 14:22:26,800 INFO [train.py:1114] (0/4) Epoch 2, batch 9400, loss[loss=0.3676, simple_loss=0.4247, pruned_loss=0.1552, over 4702.00 frames. ], tot_loss[loss=0.3103, simple_loss=0.3695, pruned_loss=0.1255, over 932898.45 frames. ], batch size: 13, lr: 2.64e-02, grad_scale: 64.0 +2024-07-27 14:22:27,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=26162.666666666668, ans=0.0 +2024-07-27 14:22:30,232 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.70 vs. limit=15.0 +2024-07-27 14:22:33,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26176.0, ans=0.1 +2024-07-27 14:22:40,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=26189.333333333332, ans=0.0 +2024-07-27 14:22:58,566 INFO [train.py:1114] (0/4) Epoch 2, batch 9450, loss[loss=0.2356, simple_loss=0.2963, pruned_loss=0.08746, over 4794.00 frames. ], tot_loss[loss=0.3098, simple_loss=0.3696, pruned_loss=0.125, over 932391.59 frames. ], batch size: 11, lr: 2.63e-02, grad_scale: 64.0 +2024-07-27 14:23:01,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=26229.333333333332, ans=0.1 +2024-07-27 14:23:03,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=26229.333333333332, ans=0.1 +2024-07-27 14:23:20,758 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=23.42 vs. limit=22.5 +2024-07-27 14:23:21,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=26269.333333333332, ans=0.1 +2024-07-27 14:23:23,993 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.999e+01 6.119e+01 6.627e+01 7.680e+01 1.096e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-27 14:23:24,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=26269.333333333332, ans=0.125 +2024-07-27 14:23:24,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=26269.333333333332, ans=0.125 +2024-07-27 14:23:32,699 INFO [train.py:1114] (0/4) Epoch 2, batch 9500, loss[loss=0.278, simple_loss=0.3337, pruned_loss=0.1111, over 4711.00 frames. ], tot_loss[loss=0.3069, simple_loss=0.3678, pruned_loss=0.123, over 934765.58 frames. ], batch size: 12, lr: 2.63e-02, grad_scale: 64.0 +2024-07-27 14:23:36,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=26296.0, ans=0.00515304347826087 +2024-07-27 14:23:38,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=26309.333333333332, ans=0.125 +2024-07-27 14:23:46,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=26322.666666666668, ans=0.025 +2024-07-27 14:23:49,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=26322.666666666668, ans=0.125 +2024-07-27 14:23:54,302 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.23 vs. limit=15.0 +2024-07-27 14:23:56,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=26336.0, ans=0.125 +2024-07-27 14:24:00,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=26349.333333333332, ans=0.125 +2024-07-27 14:24:01,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=26349.333333333332, ans=0.0 +2024-07-27 14:24:05,368 INFO [train.py:1114] (0/4) Epoch 2, batch 9550, loss[loss=0.2926, simple_loss=0.353, pruned_loss=0.1161, over 4781.00 frames. ], tot_loss[loss=0.3067, simple_loss=0.3676, pruned_loss=0.1229, over 931962.09 frames. ], batch size: 12, lr: 2.63e-02, grad_scale: 64.0 +2024-07-27 14:24:16,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=26362.666666666668, ans=0.125 +2024-07-27 14:24:23,950 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.85 vs. limit=15.0 +2024-07-27 14:24:37,641 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.234e+01 6.396e+01 7.111e+01 8.222e+01 1.095e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 14:24:50,618 INFO [train.py:1114] (0/4) Epoch 2, batch 9600, loss[loss=0.4107, simple_loss=0.4419, pruned_loss=0.1898, over 3378.00 frames. ], tot_loss[loss=0.3074, simple_loss=0.3685, pruned_loss=0.1232, over 930660.89 frames. ], batch size: 36, lr: 2.62e-02, grad_scale: 64.0 +2024-07-27 14:25:05,920 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.35 vs. limit=15.0 +2024-07-27 14:25:08,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=26442.666666666668, ans=0.125 +2024-07-27 14:25:13,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=26456.0, ans=0.025 +2024-07-27 14:25:14,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=26456.0, ans=0.0 +2024-07-27 14:25:29,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=26469.333333333332, ans=0.1 +2024-07-27 14:25:34,693 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.86 vs. limit=15.0 +2024-07-27 14:25:40,937 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.87 vs. limit=15.0 +2024-07-27 14:25:48,571 INFO [train.py:1114] (0/4) Epoch 2, batch 9650, loss[loss=0.2936, simple_loss=0.3535, pruned_loss=0.1168, over 4835.00 frames. ], tot_loss[loss=0.3092, simple_loss=0.3699, pruned_loss=0.1243, over 926581.88 frames. ], batch size: 16, lr: 2.62e-02, grad_scale: 64.0 +2024-07-27 14:26:11,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=26536.0, ans=0.125 +2024-07-27 14:26:14,867 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.044e+01 6.379e+01 6.976e+01 8.027e+01 1.621e+02, threshold=1.395e+02, percent-clipped=2.0 +2024-07-27 14:26:25,380 INFO [train.py:1114] (0/4) Epoch 2, batch 9700, loss[loss=0.3404, simple_loss=0.4024, pruned_loss=0.1391, over 4319.00 frames. ], tot_loss[loss=0.3084, simple_loss=0.3693, pruned_loss=0.1238, over 924617.53 frames. ], batch size: 26, lr: 2.62e-02, grad_scale: 64.0 +2024-07-27 14:26:28,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=26562.666666666668, ans=0.2 +2024-07-27 14:26:37,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=26576.0, ans=0.2 +2024-07-27 14:26:57,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=26616.0, ans=0.125 +2024-07-27 14:27:00,094 INFO [train.py:1114] (0/4) Epoch 2, batch 9750, loss[loss=0.3883, simple_loss=0.4266, pruned_loss=0.175, over 4687.00 frames. ], tot_loss[loss=0.3083, simple_loss=0.369, pruned_loss=0.1238, over 925212.28 frames. ], batch size: 15, lr: 2.62e-02, grad_scale: 64.0 +2024-07-27 14:27:06,844 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.19 vs. limit=15.0 +2024-07-27 14:27:09,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=26642.666666666668, ans=0.09899494936611666 +2024-07-27 14:27:10,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=26642.666666666668, ans=0.0 +2024-07-27 14:27:15,019 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=26.50 vs. limit=22.5 +2024-07-27 14:27:16,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=26656.0, ans=0.025 +2024-07-27 14:27:17,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=26656.0, ans=0.1 +2024-07-27 14:27:18,671 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-20000.pt +2024-07-27 14:27:34,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=26669.333333333332, ans=0.2 +2024-07-27 14:27:37,705 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.374e+01 6.232e+01 6.802e+01 7.534e+01 1.606e+02, threshold=1.360e+02, percent-clipped=1.0 +2024-07-27 14:27:39,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=26682.666666666668, ans=0.025 +2024-07-27 14:27:42,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=26682.666666666668, ans=0.005068985507246377 +2024-07-27 14:27:42,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=26682.666666666668, ans=0.005068985507246377 +2024-07-27 14:27:46,367 INFO [train.py:1114] (0/4) Epoch 2, batch 9800, loss[loss=0.2796, simple_loss=0.3441, pruned_loss=0.1075, over 4718.00 frames. ], tot_loss[loss=0.3068, simple_loss=0.3672, pruned_loss=0.1232, over 924957.17 frames. ], batch size: 12, lr: 2.61e-02, grad_scale: 64.0 +2024-07-27 14:27:55,062 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.45 vs. limit=6.0 +2024-07-27 14:27:58,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=26709.333333333332, ans=0.125 +2024-07-27 14:27:58,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=26722.666666666668, ans=0.125 +2024-07-27 14:28:08,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=26736.0, ans=0.025 +2024-07-27 14:28:11,980 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:28:17,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=26749.333333333332, ans=0.125 +2024-07-27 14:28:18,193 INFO [train.py:1114] (0/4) Epoch 2, batch 9850, loss[loss=0.3469, simple_loss=0.4067, pruned_loss=0.1435, over 4890.00 frames. ], tot_loss[loss=0.307, simple_loss=0.3677, pruned_loss=0.1231, over 927188.88 frames. ], batch size: 15, lr: 2.61e-02, grad_scale: 64.0 +2024-07-27 14:28:18,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=26762.666666666668, ans=0.07 +2024-07-27 14:28:31,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=26776.0, ans=0.125 +2024-07-27 14:28:37,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=26789.333333333332, ans=0.07 +2024-07-27 14:28:43,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=26802.666666666668, ans=0.0 +2024-07-27 14:28:48,005 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.332e+01 6.564e+01 7.229e+01 8.186e+01 1.183e+02, threshold=1.446e+02, percent-clipped=0.0 +2024-07-27 14:28:57,016 INFO [train.py:1114] (0/4) Epoch 2, batch 9900, loss[loss=0.3347, simple_loss=0.3902, pruned_loss=0.1396, over 4838.00 frames. ], tot_loss[loss=0.3082, simple_loss=0.3685, pruned_loss=0.1239, over 926617.38 frames. ], batch size: 16, lr: 2.61e-02, grad_scale: 64.0 +2024-07-27 14:29:09,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=26856.0, ans=0.07 +2024-07-27 14:29:10,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=26856.0, ans=0.125 +2024-07-27 14:29:23,792 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.21 vs. limit=22.5 +2024-07-27 14:29:27,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=26882.666666666668, ans=0.2 +2024-07-27 14:29:28,778 INFO [train.py:1114] (0/4) Epoch 2, batch 9950, loss[loss=0.2752, simple_loss=0.3292, pruned_loss=0.1107, over 4809.00 frames. ], tot_loss[loss=0.3086, simple_loss=0.3687, pruned_loss=0.1243, over 929328.69 frames. ], batch size: 11, lr: 2.60e-02, grad_scale: 64.0 +2024-07-27 14:29:32,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26896.0, ans=0.1 +2024-07-27 14:29:35,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=26896.0, ans=0.125 +2024-07-27 14:29:38,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=26909.333333333332, ans=0.125 +2024-07-27 14:30:19,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=26922.666666666668, ans=0.2 +2024-07-27 14:30:27,311 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.345e+01 6.388e+01 7.080e+01 7.845e+01 1.130e+02, threshold=1.416e+02, percent-clipped=0.0 +2024-07-27 14:30:36,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=26949.333333333332, ans=0.125 +2024-07-27 14:30:40,419 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-07-27 14:30:41,810 INFO [train.py:1114] (0/4) Epoch 2, batch 10000, loss[loss=0.3144, simple_loss=0.3812, pruned_loss=0.1238, over 4632.00 frames. ], tot_loss[loss=0.312, simple_loss=0.3725, pruned_loss=0.1258, over 926351.54 frames. ], batch size: 16, lr: 2.60e-02, grad_scale: 64.0 +2024-07-27 14:30:57,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=26976.0, ans=0.125 +2024-07-27 14:30:59,976 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.07 vs. limit=22.5 +2024-07-27 14:31:00,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=26989.333333333332, ans=0.125 +2024-07-27 14:31:01,589 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:31:05,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=27002.666666666668, ans=0.125 +2024-07-27 14:31:09,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=27002.666666666668, ans=0.125 +2024-07-27 14:31:10,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27002.666666666668, ans=0.1 +2024-07-27 14:31:11,698 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.88 vs. limit=15.0 +2024-07-27 14:31:24,565 INFO [train.py:1114] (0/4) Epoch 2, batch 10050, loss[loss=0.3965, simple_loss=0.4165, pruned_loss=0.1882, over 3632.00 frames. ], tot_loss[loss=0.3178, simple_loss=0.3767, pruned_loss=0.1294, over 916443.35 frames. ], batch size: 35, lr: 2.60e-02, grad_scale: 64.0 +2024-07-27 14:31:32,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=27029.333333333332, ans=0.125 +2024-07-27 14:31:34,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=27042.666666666668, ans=0.0 +2024-07-27 14:31:37,764 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.30 vs. limit=15.0 +2024-07-27 14:31:42,368 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.62 vs. limit=15.0 +2024-07-27 14:31:47,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=27069.333333333332, ans=0.125 +2024-07-27 14:31:50,392 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.657e+01 6.782e+01 7.547e+01 8.673e+01 1.246e+02, threshold=1.509e+02, percent-clipped=0.0 +2024-07-27 14:31:51,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=27069.333333333332, ans=0.0 +2024-07-27 14:31:59,882 INFO [train.py:1114] (0/4) Epoch 2, batch 10100, loss[loss=0.4458, simple_loss=0.4519, pruned_loss=0.2199, over 3611.00 frames. ], tot_loss[loss=0.3314, simple_loss=0.3849, pruned_loss=0.139, over 860931.83 frames. ], batch size: 36, lr: 2.60e-02, grad_scale: 64.0 +2024-07-27 14:32:14,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=27122.666666666668, ans=0.0 +2024-07-27 14:32:18,309 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.68 vs. limit=15.0 +2024-07-27 14:32:26,371 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=12.50 vs. limit=15.0 +2024-07-27 14:32:30,033 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:32:33,175 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.33 vs. limit=6.0 +2024-07-27 14:32:36,641 INFO [train.py:1114] (0/4) Epoch 2, batch 10150, loss[loss=0.3141, simple_loss=0.3748, pruned_loss=0.1267, over 3492.00 frames. ], tot_loss[loss=0.3402, simple_loss=0.3902, pruned_loss=0.1451, over 820382.21 frames. ], batch size: 35, lr: 2.59e-02, grad_scale: 64.0 +2024-07-27 14:32:40,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=27162.666666666668, ans=0.0 +2024-07-27 14:32:40,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=27162.666666666668, ans=0.1 +2024-07-27 14:32:42,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=27162.666666666668, ans=0.09899494936611666 +2024-07-27 14:32:44,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=27162.666666666668, ans=0.2 +2024-07-27 14:32:48,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=27176.0, ans=0.125 +2024-07-27 14:32:50,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=27176.0, ans=0.125 +2024-07-27 14:33:01,456 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.77 vs. limit=5.0 +2024-07-27 14:33:07,434 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.821e+01 6.551e+01 7.013e+01 7.617e+01 1.384e+02, threshold=1.403e+02, percent-clipped=0.0 +2024-07-27 14:33:11,053 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=11.09 vs. limit=10.0 +2024-07-27 14:33:13,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=27216.0, ans=0.0 +2024-07-27 14:33:15,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=27229.333333333332, ans=0.125 +2024-07-27 14:33:16,418 INFO [train.py:1114] (0/4) Epoch 2, batch 10200, loss[loss=0.4238, simple_loss=0.447, pruned_loss=0.2003, over 3807.00 frames. ], tot_loss[loss=0.3463, simple_loss=0.3933, pruned_loss=0.1496, over 789717.16 frames. ], batch size: 36, lr: 2.59e-02, grad_scale: 64.0 +2024-07-27 14:33:21,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=27229.333333333332, ans=0.125 +2024-07-27 14:33:22,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=27242.666666666668, ans=0.0 +2024-07-27 14:33:30,804 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-2.pt +2024-07-27 14:34:23,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=27260.0, ans=0.0 +2024-07-27 14:34:23,836 INFO [train.py:1114] (0/4) Epoch 3, batch 0, loss[loss=0.2689, simple_loss=0.3296, pruned_loss=0.1041, over 4838.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3296, pruned_loss=0.1041, over 4838.00 frames. ], batch size: 12, lr: 2.46e-02, grad_scale: 64.0 +2024-07-27 14:34:23,837 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 14:34:35,399 INFO [train.py:1146] (0/4) Epoch 3, validation: loss=0.2558, simple_loss=0.3526, pruned_loss=0.07947, over 944034.00 frames. +2024-07-27 14:34:35,401 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 14:34:35,984 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.01 vs. limit=22.5 +2024-07-27 14:35:01,556 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.64 vs. limit=10.0 +2024-07-27 14:35:08,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=27286.666666666668, ans=0.0 +2024-07-27 14:35:12,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=27300.0, ans=0.025 +2024-07-27 14:35:13,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=27300.0, ans=0.125 +2024-07-27 14:35:15,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=27300.0, ans=0.2 +2024-07-27 14:35:18,764 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.14 vs. limit=15.0 +2024-07-27 14:35:23,879 INFO [train.py:1114] (0/4) Epoch 3, batch 50, loss[loss=0.2502, simple_loss=0.3157, pruned_loss=0.09234, over 4631.00 frames. ], tot_loss[loss=0.3161, simple_loss=0.3765, pruned_loss=0.1278, over 206565.71 frames. ], batch size: 11, lr: 2.46e-02, grad_scale: 64.0 +2024-07-27 14:35:30,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=23.70 vs. limit=22.5 +2024-07-27 14:35:33,791 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.475e+01 6.508e+01 7.033e+01 7.791e+01 1.183e+02, threshold=1.407e+02, percent-clipped=0.0 +2024-07-27 14:35:37,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=27340.0, ans=0.125 +2024-07-27 14:35:37,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=27340.0, ans=6.0 +2024-07-27 14:35:39,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=27340.0, ans=0.2 +2024-07-27 14:35:39,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=27353.333333333332, ans=0.5 +2024-07-27 14:35:48,660 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.35 vs. limit=15.0 +2024-07-27 14:35:55,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=27380.0, ans=0.125 +2024-07-27 14:36:02,383 INFO [train.py:1114] (0/4) Epoch 3, batch 100, loss[loss=0.3083, simple_loss=0.37, pruned_loss=0.1233, over 4650.00 frames. ], tot_loss[loss=0.3114, simple_loss=0.3745, pruned_loss=0.1241, over 365600.37 frames. ], batch size: 12, lr: 2.46e-02, grad_scale: 64.0 +2024-07-27 14:36:11,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27406.666666666668, ans=0.1 +2024-07-27 14:36:12,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=27406.666666666668, ans=0.00491159420289855 +2024-07-27 14:36:18,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=27420.0, ans=0.125 +2024-07-27 14:36:33,368 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.96 vs. limit=15.0 +2024-07-27 14:36:37,621 INFO [train.py:1114] (0/4) Epoch 3, batch 150, loss[loss=0.2361, simple_loss=0.2939, pruned_loss=0.08909, over 4608.00 frames. ], tot_loss[loss=0.3069, simple_loss=0.3705, pruned_loss=0.1216, over 494267.99 frames. ], batch size: 11, lr: 2.45e-02, grad_scale: 64.0 +2024-07-27 14:36:47,463 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.350e+01 6.333e+01 7.071e+01 8.102e+01 1.073e+02, threshold=1.414e+02, percent-clipped=0.0 +2024-07-27 14:36:59,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=27486.666666666668, ans=0.025 +2024-07-27 14:37:02,342 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:37:06,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=27500.0, ans=0.1 +2024-07-27 14:37:14,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=27526.666666666668, ans=0.125 +2024-07-27 14:37:14,976 INFO [train.py:1114] (0/4) Epoch 3, batch 200, loss[loss=0.3134, simple_loss=0.3747, pruned_loss=0.1261, over 4523.00 frames. ], tot_loss[loss=0.3051, simple_loss=0.3686, pruned_loss=0.1208, over 593792.11 frames. ], batch size: 21, lr: 2.45e-02, grad_scale: 64.0 +2024-07-27 14:37:20,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27526.666666666668, ans=0.1 +2024-07-27 14:37:27,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=27540.0, ans=0.004882608695652174 +2024-07-27 14:37:40,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=27566.666666666668, ans=0.0 +2024-07-27 14:37:45,120 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.69 vs. limit=22.5 +2024-07-27 14:37:48,686 INFO [train.py:1114] (0/4) Epoch 3, batch 250, loss[loss=0.2743, simple_loss=0.3468, pruned_loss=0.1009, over 4644.00 frames. ], tot_loss[loss=0.3045, simple_loss=0.3677, pruned_loss=0.1207, over 670794.06 frames. ], batch size: 16, lr: 2.45e-02, grad_scale: 64.0 +2024-07-27 14:37:52,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=27593.333333333332, ans=0.125 +2024-07-27 14:37:57,411 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.290e+01 6.405e+01 7.025e+01 7.906e+01 1.155e+02, threshold=1.405e+02, percent-clipped=0.0 +2024-07-27 14:38:00,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=27606.666666666668, ans=0.125 +2024-07-27 14:38:10,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=27633.333333333332, ans=0.125 +2024-07-27 14:38:16,581 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=7.73 vs. limit=15.0 +2024-07-27 14:38:22,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=27646.666666666668, ans=0.125 +2024-07-27 14:38:24,174 INFO [train.py:1114] (0/4) Epoch 3, batch 300, loss[loss=0.3544, simple_loss=0.4046, pruned_loss=0.1521, over 4802.00 frames. ], tot_loss[loss=0.3036, simple_loss=0.3669, pruned_loss=0.1202, over 730422.55 frames. ], batch size: 15, lr: 2.44e-02, grad_scale: 64.0 +2024-07-27 14:38:24,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=27660.0, ans=0.004856521739130435 +2024-07-27 14:38:28,201 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:38:30,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=27673.333333333332, ans=0.025 +2024-07-27 14:38:52,919 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=9.825e+00 +2024-07-27 14:38:54,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=27700.0, ans=0.0 +2024-07-27 14:38:54,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27700.0, ans=0.1 +2024-07-27 14:39:09,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=27713.333333333332, ans=0.125 +2024-07-27 14:39:11,991 INFO [train.py:1114] (0/4) Epoch 3, batch 350, loss[loss=0.2605, simple_loss=0.3214, pruned_loss=0.09983, over 4938.00 frames. ], tot_loss[loss=0.3032, simple_loss=0.3668, pruned_loss=0.1198, over 776532.84 frames. ], batch size: 12, lr: 2.44e-02, grad_scale: 64.0 +2024-07-27 14:39:21,248 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.509e+01 6.194e+01 6.978e+01 7.817e+01 1.142e+02, threshold=1.396e+02, percent-clipped=0.0 +2024-07-27 14:39:22,343 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.37 vs. limit=22.5 +2024-07-27 14:39:26,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=27753.333333333332, ans=0.2 +2024-07-27 14:39:27,769 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.28 vs. limit=15.0 +2024-07-27 14:39:43,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=27780.0, ans=0.0 +2024-07-27 14:39:44,321 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-07-27 14:39:44,335 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-07-27 14:39:53,820 INFO [train.py:1114] (0/4) Epoch 3, batch 400, loss[loss=0.2933, simple_loss=0.3718, pruned_loss=0.1074, over 4693.00 frames. ], tot_loss[loss=0.3013, simple_loss=0.3652, pruned_loss=0.1187, over 813801.82 frames. ], batch size: 13, lr: 2.44e-02, grad_scale: 64.0 +2024-07-27 14:39:54,166 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.12 vs. limit=15.0 +2024-07-27 14:39:56,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=27793.333333333332, ans=0.125 +2024-07-27 14:40:02,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27806.666666666668, ans=0.1 +2024-07-27 14:40:04,967 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:40:19,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=27833.333333333332, ans=0.025 +2024-07-27 14:40:29,610 INFO [train.py:1114] (0/4) Epoch 3, batch 450, loss[loss=0.2796, simple_loss=0.3472, pruned_loss=0.106, over 4638.00 frames. ], tot_loss[loss=0.3014, simple_loss=0.3649, pruned_loss=0.1189, over 839154.03 frames. ], batch size: 13, lr: 2.44e-02, grad_scale: 64.0 +2024-07-27 14:40:39,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27873.333333333332, ans=0.1 +2024-07-27 14:40:40,712 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.175e+01 6.156e+01 6.961e+01 7.854e+01 1.209e+02, threshold=1.392e+02, percent-clipped=0.0 +2024-07-27 14:40:40,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=27873.333333333332, ans=0.125 +2024-07-27 14:40:50,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=27886.666666666668, ans=0.125 +2024-07-27 14:40:56,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=27900.0, ans=15.0 +2024-07-27 14:40:58,006 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-07-27 14:40:59,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=27900.0, ans=0.125 +2024-07-27 14:41:05,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=27913.333333333332, ans=0.125 +2024-07-27 14:41:09,145 INFO [train.py:1114] (0/4) Epoch 3, batch 500, loss[loss=0.3582, simple_loss=0.424, pruned_loss=0.1462, over 4684.00 frames. ], tot_loss[loss=0.2999, simple_loss=0.3633, pruned_loss=0.1182, over 861573.81 frames. ], batch size: 15, lr: 2.43e-02, grad_scale: 64.0 +2024-07-27 14:41:12,155 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.02 vs. limit=15.0 +2024-07-27 14:41:14,858 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.35 vs. limit=15.0 +2024-07-27 14:41:21,474 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:41:40,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=27980.0, ans=0.025 +2024-07-27 14:41:48,917 INFO [train.py:1114] (0/4) Epoch 3, batch 550, loss[loss=0.2883, simple_loss=0.3658, pruned_loss=0.1054, over 4583.00 frames. ], tot_loss[loss=0.2978, simple_loss=0.3623, pruned_loss=0.1167, over 877747.82 frames. ], batch size: 17, lr: 2.43e-02, grad_scale: 64.0 +2024-07-27 14:41:59,871 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.022e+01 6.181e+01 6.683e+01 7.809e+01 1.184e+02, threshold=1.337e+02, percent-clipped=0.0 +2024-07-27 14:42:31,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.29 vs. limit=15.0 +2024-07-27 14:42:34,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=28033.333333333332, ans=0.0 +2024-07-27 14:42:35,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=28046.666666666668, ans=0.125 +2024-07-27 14:42:40,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=28046.666666666668, ans=0.004772463768115942 +2024-07-27 14:42:40,685 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=20.54 vs. limit=22.5 +2024-07-27 14:42:46,419 INFO [train.py:1114] (0/4) Epoch 3, batch 600, loss[loss=0.3317, simple_loss=0.3897, pruned_loss=0.1369, over 4621.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.3615, pruned_loss=0.116, over 892419.82 frames. ], batch size: 16, lr: 2.43e-02, grad_scale: 64.0 +2024-07-27 14:42:50,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28060.0, ans=0.1 +2024-07-27 14:43:04,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=28073.333333333332, ans=0.0 +2024-07-27 14:43:12,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=28086.666666666668, ans=15.0 +2024-07-27 14:43:19,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=28100.0, ans=0.0 +2024-07-27 14:43:25,522 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.05 vs. limit=12.0 +2024-07-27 14:43:30,830 INFO [train.py:1114] (0/4) Epoch 3, batch 650, loss[loss=0.2689, simple_loss=0.3476, pruned_loss=0.0951, over 4762.00 frames. ], tot_loss[loss=0.2964, simple_loss=0.3604, pruned_loss=0.1161, over 903890.30 frames. ], batch size: 13, lr: 2.43e-02, grad_scale: 64.0 +2024-07-27 14:43:38,049 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:43:39,817 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.390e+01 6.211e+01 6.879e+01 7.737e+01 1.031e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-27 14:43:41,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=28140.0, ans=0.125 +2024-07-27 14:43:52,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=28153.333333333332, ans=0.004749275362318841 +2024-07-27 14:43:52,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=28153.333333333332, ans=0.07 +2024-07-27 14:43:54,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=28166.666666666668, ans=0.125 +2024-07-27 14:44:03,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=28166.666666666668, ans=0.125 +2024-07-27 14:44:20,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=28180.0, ans=0.1 +2024-07-27 14:44:22,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=28180.0, ans=0.125 +2024-07-27 14:44:23,649 INFO [train.py:1114] (0/4) Epoch 3, batch 700, loss[loss=0.3104, simple_loss=0.3606, pruned_loss=0.1301, over 4640.00 frames. ], tot_loss[loss=0.2958, simple_loss=0.3599, pruned_loss=0.1158, over 911983.45 frames. ], batch size: 12, lr: 2.42e-02, grad_scale: 64.0 +2024-07-27 14:44:34,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=28206.666666666668, ans=0.2 +2024-07-27 14:44:45,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=28206.666666666668, ans=10.0 +2024-07-27 14:45:10,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=28220.0, ans=0.125 +2024-07-27 14:45:10,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=28220.0, ans=0.0 +2024-07-27 14:45:24,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=28246.666666666668, ans=0.004728985507246377 +2024-07-27 14:45:42,574 INFO [train.py:1114] (0/4) Epoch 3, batch 750, loss[loss=0.3515, simple_loss=0.4093, pruned_loss=0.1468, over 4694.00 frames. ], tot_loss[loss=0.295, simple_loss=0.3594, pruned_loss=0.1153, over 918263.40 frames. ], batch size: 13, lr: 2.42e-02, grad_scale: 64.0 +2024-07-27 14:45:59,772 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:46:02,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=28260.0, ans=0.0 +2024-07-27 14:46:08,320 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.231e+01 6.433e+01 7.255e+01 8.187e+01 1.605e+02, threshold=1.451e+02, percent-clipped=1.0 +2024-07-27 14:46:17,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=28286.666666666668, ans=0.1 +2024-07-27 14:46:19,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=28286.666666666668, ans=0.1 +2024-07-27 14:46:19,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=28286.666666666668, ans=0.125 +2024-07-27 14:46:21,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=28286.666666666668, ans=0.0 +2024-07-27 14:46:39,891 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.32 vs. limit=22.5 +2024-07-27 14:46:49,688 INFO [train.py:1114] (0/4) Epoch 3, batch 800, loss[loss=0.3457, simple_loss=0.3933, pruned_loss=0.149, over 4854.00 frames. ], tot_loss[loss=0.2976, simple_loss=0.3611, pruned_loss=0.1171, over 923410.39 frames. ], batch size: 12, lr: 2.42e-02, grad_scale: 64.0 +2024-07-27 14:47:07,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=28326.666666666668, ans=0.004711594202898551 +2024-07-27 14:47:13,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=28340.0, ans=0.0047086956521739136 +2024-07-27 14:47:51,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=28366.666666666668, ans=0.1 +2024-07-27 14:48:04,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28380.0, ans=0.1 +2024-07-27 14:48:09,324 INFO [train.py:1114] (0/4) Epoch 3, batch 850, loss[loss=0.2838, simple_loss=0.3593, pruned_loss=0.1042, over 4671.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.362, pruned_loss=0.1182, over 927391.62 frames. ], batch size: 14, lr: 2.42e-02, grad_scale: 64.0 +2024-07-27 14:48:21,501 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.186e+01 6.419e+01 6.895e+01 7.641e+01 1.957e+02, threshold=1.379e+02, percent-clipped=1.0 +2024-07-27 14:48:29,229 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.05 vs. limit=15.0 +2024-07-27 14:48:50,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=28446.666666666668, ans=0.2 +2024-07-27 14:48:52,060 INFO [train.py:1114] (0/4) Epoch 3, batch 900, loss[loss=0.24, simple_loss=0.3042, pruned_loss=0.08786, over 4857.00 frames. ], tot_loss[loss=0.2981, simple_loss=0.3613, pruned_loss=0.1174, over 928015.48 frames. ], batch size: 12, lr: 2.41e-02, grad_scale: 64.0 +2024-07-27 14:49:30,697 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.69 vs. limit=15.0 +2024-07-27 14:49:34,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=28486.666666666668, ans=0.09899494936611666 +2024-07-27 14:49:53,623 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.81 vs. limit=15.0 +2024-07-27 14:50:08,971 INFO [train.py:1114] (0/4) Epoch 3, batch 950, loss[loss=0.2401, simple_loss=0.3162, pruned_loss=0.08205, over 4773.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.3603, pruned_loss=0.1172, over 929427.21 frames. ], batch size: 12, lr: 2.41e-02, grad_scale: 128.0 +2024-07-27 14:50:11,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=28526.666666666668, ans=0.0 +2024-07-27 14:50:21,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28540.0, ans=0.1 +2024-07-27 14:50:23,821 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.236e+01 6.152e+01 6.859e+01 7.763e+01 1.125e+02, threshold=1.372e+02, percent-clipped=0.0 +2024-07-27 14:50:31,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=28553.333333333332, ans=0.1 +2024-07-27 14:50:36,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=28566.666666666668, ans=0.07 +2024-07-27 14:50:37,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=28566.666666666668, ans=0.2 +2024-07-27 14:50:41,251 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.86 vs. limit=15.0 +2024-07-27 14:51:03,513 INFO [train.py:1114] (0/4) Epoch 3, batch 1000, loss[loss=0.2563, simple_loss=0.333, pruned_loss=0.08975, over 4967.00 frames. ], tot_loss[loss=0.2985, simple_loss=0.3617, pruned_loss=0.1176, over 929008.38 frames. ], batch size: 13, lr: 2.41e-02, grad_scale: 128.0 +2024-07-27 14:51:07,814 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.26 vs. limit=12.0 +2024-07-27 14:51:23,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=28606.666666666668, ans=0.125 +2024-07-27 14:51:28,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28620.0, ans=0.1 +2024-07-27 14:51:33,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=28633.333333333332, ans=0.125 +2024-07-27 14:51:54,882 INFO [train.py:1114] (0/4) Epoch 3, batch 1050, loss[loss=0.3401, simple_loss=0.4055, pruned_loss=0.1373, over 4871.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3627, pruned_loss=0.1178, over 931544.73 frames. ], batch size: 14, lr: 2.41e-02, grad_scale: 128.0 +2024-07-27 14:52:03,820 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:52:06,686 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.090e+01 6.458e+01 7.095e+01 7.722e+01 9.914e+01, threshold=1.419e+02, percent-clipped=0.0 +2024-07-27 14:52:07,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28673.333333333332, ans=0.1 +2024-07-27 14:52:14,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=28686.666666666668, ans=0.2 +2024-07-27 14:52:25,388 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=2.709e-02 +2024-07-27 14:52:25,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=28686.666666666668, ans=0.125 +2024-07-27 14:52:28,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=28686.666666666668, ans=0.004633333333333333 +2024-07-27 14:52:30,970 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.94 vs. limit=22.5 +2024-07-27 14:52:53,540 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.64 vs. limit=22.5 +2024-07-27 14:52:53,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28726.666666666668, ans=0.1 +2024-07-27 14:52:54,289 INFO [train.py:1114] (0/4) Epoch 3, batch 1100, loss[loss=0.3315, simple_loss=0.3866, pruned_loss=0.1382, over 4903.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.3614, pruned_loss=0.1177, over 934051.89 frames. ], batch size: 13, lr: 2.40e-02, grad_scale: 128.0 +2024-07-27 14:53:19,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=28740.0, ans=0.125 +2024-07-27 14:54:04,140 INFO [train.py:1114] (0/4) Epoch 3, batch 1150, loss[loss=0.2962, simple_loss=0.364, pruned_loss=0.1142, over 4898.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3614, pruned_loss=0.1179, over 933814.33 frames. ], batch size: 13, lr: 2.40e-02, grad_scale: 128.0 +2024-07-27 14:54:08,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=28793.333333333332, ans=0.2 +2024-07-27 14:54:14,005 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.388e+01 6.303e+01 6.956e+01 7.734e+01 1.852e+02, threshold=1.391e+02, percent-clipped=1.0 +2024-07-27 14:54:18,515 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.48 vs. limit=15.0 +2024-07-27 14:54:20,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28820.0, ans=0.1 +2024-07-27 14:54:40,676 INFO [train.py:1114] (0/4) Epoch 3, batch 1200, loss[loss=0.2355, simple_loss=0.316, pruned_loss=0.0775, over 4876.00 frames. ], tot_loss[loss=0.299, simple_loss=0.3623, pruned_loss=0.1178, over 933038.15 frames. ], batch size: 14, lr: 2.40e-02, grad_scale: 64.0 +2024-07-27 14:54:53,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=28886.666666666668, ans=0.2 +2024-07-27 14:55:07,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=28900.0, ans=0.0 +2024-07-27 14:55:32,479 INFO [train.py:1114] (0/4) Epoch 3, batch 1250, loss[loss=0.3295, simple_loss=0.3828, pruned_loss=0.1381, over 4804.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.362, pruned_loss=0.1176, over 937259.41 frames. ], batch size: 15, lr: 2.40e-02, grad_scale: 64.0 +2024-07-27 14:55:36,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=28926.666666666668, ans=0.1 +2024-07-27 14:55:36,919 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.76 vs. limit=12.0 +2024-07-27 14:55:39,398 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=3.042e+01 +2024-07-27 14:55:41,891 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 6.356e+01 6.895e+01 7.489e+01 1.286e+02, threshold=1.379e+02, percent-clipped=0.0 +2024-07-27 14:56:02,387 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.01 vs. limit=6.0 +2024-07-27 14:56:04,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=28966.666666666668, ans=0.125 +2024-07-27 14:56:07,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=28980.0, ans=0.004569565217391304 +2024-07-27 14:56:12,515 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.76 vs. limit=22.5 +2024-07-27 14:56:15,701 INFO [train.py:1114] (0/4) Epoch 3, batch 1300, loss[loss=0.3438, simple_loss=0.393, pruned_loss=0.1473, over 4705.00 frames. ], tot_loss[loss=0.296, simple_loss=0.36, pruned_loss=0.116, over 939197.13 frames. ], batch size: 19, lr: 2.39e-02, grad_scale: 64.0 +2024-07-27 14:56:31,538 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.74 vs. limit=15.0 +2024-07-27 14:56:56,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=29033.333333333332, ans=0.025 +2024-07-27 14:57:05,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=29046.666666666668, ans=0.004555072463768115 +2024-07-27 14:57:16,157 INFO [train.py:1114] (0/4) Epoch 3, batch 1350, loss[loss=0.2846, simple_loss=0.3663, pruned_loss=0.1014, over 4755.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.3596, pruned_loss=0.1157, over 941073.61 frames. ], batch size: 13, lr: 2.39e-02, grad_scale: 64.0 +2024-07-27 14:57:16,607 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.56 vs. limit=15.0 +2024-07-27 14:57:18,476 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.71 vs. limit=15.0 +2024-07-27 14:57:20,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=29060.0, ans=0.125 +2024-07-27 14:57:32,073 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.012e+01 6.171e+01 6.881e+01 8.115e+01 1.166e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-27 14:57:45,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=29086.666666666668, ans=0.125 +2024-07-27 14:58:01,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=29113.333333333332, ans=0.125 +2024-07-27 14:58:06,123 INFO [train.py:1114] (0/4) Epoch 3, batch 1400, loss[loss=0.2867, simple_loss=0.3342, pruned_loss=0.1196, over 4699.00 frames. ], tot_loss[loss=0.2957, simple_loss=0.3595, pruned_loss=0.1159, over 943078.63 frames. ], batch size: 11, lr: 2.39e-02, grad_scale: 64.0 +2024-07-27 14:58:08,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=29126.666666666668, ans=0.2 +2024-07-27 14:58:12,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=29126.666666666668, ans=0.2 +2024-07-27 14:58:18,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=29140.0, ans=0.004534782608695652 +2024-07-27 14:58:18,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=29140.0, ans=0.125 +2024-07-27 14:58:27,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=29153.333333333332, ans=0.125 +2024-07-27 14:58:30,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=29153.333333333332, ans=0.025 +2024-07-27 14:58:30,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=29153.333333333332, ans=0.0 +2024-07-27 14:58:33,979 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.36 vs. limit=15.0 +2024-07-27 14:58:35,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=29153.333333333332, ans=0.0 +2024-07-27 14:58:40,833 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:59:02,331 INFO [train.py:1114] (0/4) Epoch 3, batch 1450, loss[loss=0.3148, simple_loss=0.3906, pruned_loss=0.1195, over 4692.00 frames. ], tot_loss[loss=0.2955, simple_loss=0.3598, pruned_loss=0.1156, over 943291.66 frames. ], batch size: 15, lr: 2.39e-02, grad_scale: 64.0 +2024-07-27 14:59:03,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=29193.333333333332, ans=0.004523188405797102 +2024-07-27 14:59:18,813 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.112e+01 6.399e+01 7.002e+01 7.900e+01 1.035e+02, threshold=1.400e+02, percent-clipped=0.0 +2024-07-27 14:59:51,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=29246.666666666668, ans=0.05 +2024-07-27 14:59:53,880 INFO [train.py:1114] (0/4) Epoch 3, batch 1500, loss[loss=0.2899, simple_loss=0.3727, pruned_loss=0.1036, over 4806.00 frames. ], tot_loss[loss=0.2961, simple_loss=0.3602, pruned_loss=0.1159, over 942718.87 frames. ], batch size: 14, lr: 2.38e-02, grad_scale: 64.0 +2024-07-27 14:59:58,854 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.81 vs. limit=15.0 +2024-07-27 14:59:59,386 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.00 vs. limit=22.5 +2024-07-27 15:00:09,678 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.47 vs. limit=12.0 +2024-07-27 15:00:11,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=29286.666666666668, ans=0.004502898550724638 +2024-07-27 15:00:29,946 INFO [train.py:1114] (0/4) Epoch 3, batch 1550, loss[loss=0.3061, simple_loss=0.378, pruned_loss=0.1171, over 4893.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.3596, pruned_loss=0.1158, over 939114.07 frames. ], batch size: 15, lr: 2.38e-02, grad_scale: 64.0 +2024-07-27 15:00:41,579 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.380e+01 6.188e+01 6.996e+01 8.008e+01 1.128e+02, threshold=1.399e+02, percent-clipped=0.0 +2024-07-27 15:00:47,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=29353.333333333332, ans=0.125 +2024-07-27 15:01:23,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=29366.666666666668, ans=0.95 +2024-07-27 15:01:50,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=29380.0, ans=0.0 +2024-07-27 15:01:53,199 INFO [train.py:1114] (0/4) Epoch 3, batch 1600, loss[loss=0.273, simple_loss=0.3528, pruned_loss=0.09666, over 4870.00 frames. ], tot_loss[loss=0.2942, simple_loss=0.3589, pruned_loss=0.1148, over 937870.55 frames. ], batch size: 14, lr: 2.38e-02, grad_scale: 32.0 +2024-07-27 15:02:13,258 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.25 vs. limit=15.0 +2024-07-27 15:02:20,310 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.04 vs. limit=15.0 +2024-07-27 15:02:30,084 INFO [train.py:1114] (0/4) Epoch 3, batch 1650, loss[loss=0.2822, simple_loss=0.3633, pruned_loss=0.1005, over 4657.00 frames. ], tot_loss[loss=0.2953, simple_loss=0.3595, pruned_loss=0.1156, over 937461.77 frames. ], batch size: 14, lr: 2.38e-02, grad_scale: 32.0 +2024-07-27 15:02:30,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=29460.0, ans=0.1 +2024-07-27 15:02:32,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=29460.0, ans=0.95 +2024-07-27 15:02:40,272 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.917e+01 6.286e+01 6.723e+01 7.368e+01 1.143e+02, threshold=1.345e+02, percent-clipped=0.0 +2024-07-27 15:03:05,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29486.666666666668, ans=0.1 +2024-07-27 15:03:05,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=29486.666666666668, ans=0.0 +2024-07-27 15:03:06,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=29486.666666666668, ans=0.2 +2024-07-27 15:03:27,878 INFO [train.py:1114] (0/4) Epoch 3, batch 1700, loss[loss=0.3007, simple_loss=0.3556, pruned_loss=0.1229, over 4691.00 frames. ], tot_loss[loss=0.294, simple_loss=0.3586, pruned_loss=0.1146, over 939025.32 frames. ], batch size: 11, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:03:35,574 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=2.69 vs. limit=15.0 +2024-07-27 15:03:36,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=29526.666666666668, ans=0.2 +2024-07-27 15:03:44,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=29553.333333333332, ans=0.0 +2024-07-27 15:03:47,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=29553.333333333332, ans=0.07 +2024-07-27 15:04:00,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=29566.666666666668, ans=0.2 +2024-07-27 15:04:14,458 INFO [train.py:1114] (0/4) Epoch 3, batch 1750, loss[loss=0.2785, simple_loss=0.3389, pruned_loss=0.1091, over 4801.00 frames. ], tot_loss[loss=0.2937, simple_loss=0.3585, pruned_loss=0.1144, over 939995.91 frames. ], batch size: 11, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:04:25,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=29593.333333333332, ans=0.025 +2024-07-27 15:04:27,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=29606.666666666668, ans=0.125 +2024-07-27 15:04:28,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=29606.666666666668, ans=0.125 +2024-07-27 15:04:30,955 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.973e+01 6.164e+01 6.649e+01 7.575e+01 1.168e+02, threshold=1.330e+02, percent-clipped=0.0 +2024-07-27 15:04:31,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=29606.666666666668, ans=0.125 +2024-07-27 15:04:32,869 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.91 vs. limit=22.5 +2024-07-27 15:05:15,343 INFO [train.py:1114] (0/4) Epoch 3, batch 1800, loss[loss=0.3342, simple_loss=0.3949, pruned_loss=0.1368, over 4636.00 frames. ], tot_loss[loss=0.294, simple_loss=0.3589, pruned_loss=0.1145, over 940516.84 frames. ], batch size: 13, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:05:20,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=29660.0, ans=0.125 +2024-07-27 15:05:26,714 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:05:42,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=29713.333333333332, ans=0.125 +2024-07-27 15:05:47,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=29713.333333333332, ans=0.0 +2024-07-27 15:05:49,155 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.61 vs. limit=10.0 +2024-07-27 15:05:49,946 INFO [train.py:1114] (0/4) Epoch 3, batch 1850, loss[loss=0.314, simple_loss=0.3869, pruned_loss=0.1205, over 4808.00 frames. ], tot_loss[loss=0.2923, simple_loss=0.3577, pruned_loss=0.1135, over 940460.03 frames. ], batch size: 14, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:06:10,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=29740.0, ans=0.2 +2024-07-27 15:06:13,610 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.082e+01 6.411e+01 6.989e+01 8.311e+01 1.252e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 15:06:20,910 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:06:24,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=29753.333333333332, ans=0.2 +2024-07-27 15:07:02,454 INFO [train.py:1114] (0/4) Epoch 3, batch 1900, loss[loss=0.2496, simple_loss=0.3244, pruned_loss=0.08734, over 4659.00 frames. ], tot_loss[loss=0.2919, simple_loss=0.3577, pruned_loss=0.113, over 941606.50 frames. ], batch size: 14, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:08:04,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=29846.666666666668, ans=0.125 +2024-07-27 15:08:05,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29846.666666666668, ans=0.1 +2024-07-27 15:08:06,620 INFO [train.py:1114] (0/4) Epoch 3, batch 1950, loss[loss=0.2991, simple_loss=0.3585, pruned_loss=0.1199, over 4894.00 frames. ], tot_loss[loss=0.2933, simple_loss=0.3593, pruned_loss=0.1137, over 943497.08 frames. ], batch size: 13, lr: 2.36e-02, grad_scale: 32.0 +2024-07-27 15:08:08,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29860.0, ans=0.1 +2024-07-27 15:08:18,164 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.35 vs. limit=15.0 +2024-07-27 15:08:18,972 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.566e+01 6.436e+01 6.844e+01 7.392e+01 3.834e+02, threshold=1.369e+02, percent-clipped=1.0 +2024-07-27 15:08:28,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=29873.333333333332, ans=0.0 +2024-07-27 15:08:47,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29900.0, ans=0.1 +2024-07-27 15:08:56,183 INFO [train.py:1114] (0/4) Epoch 3, batch 2000, loss[loss=0.2406, simple_loss=0.3046, pruned_loss=0.08827, over 4791.00 frames. ], tot_loss[loss=0.2942, simple_loss=0.3598, pruned_loss=0.1143, over 940905.04 frames. ], batch size: 11, lr: 2.36e-02, grad_scale: 32.0 +2024-07-27 15:08:57,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=29926.666666666668, ans=0.0 +2024-07-27 15:09:13,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=29953.333333333332, ans=0.125 +2024-07-27 15:09:19,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=29966.666666666668, ans=0.0 +2024-07-27 15:09:23,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=29966.666666666668, ans=0.0 +2024-07-27 15:09:39,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=29980.0, ans=10.0 +2024-07-27 15:09:42,732 INFO [train.py:1114] (0/4) Epoch 3, batch 2050, loss[loss=0.2832, simple_loss=0.3482, pruned_loss=0.1091, over 4615.00 frames. ], tot_loss[loss=0.2939, simple_loss=0.3588, pruned_loss=0.1145, over 938966.03 frames. ], batch size: 11, lr: 2.36e-02, grad_scale: 32.0 +2024-07-27 15:09:42,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=29993.333333333332, ans=0.125 +2024-07-27 15:09:44,965 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=2.760e-02 +2024-07-27 15:09:52,893 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.226e+01 6.283e+01 7.092e+01 8.463e+01 1.553e+02, threshold=1.418e+02, percent-clipped=1.0 +2024-07-27 15:10:07,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=30033.333333333332, ans=0.0 +2024-07-27 15:10:13,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=30046.666666666668, ans=0.0 +2024-07-27 15:10:14,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=30046.666666666668, ans=0.125 +2024-07-27 15:10:15,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=30060.0, ans=0.0 +2024-07-27 15:10:16,508 INFO [train.py:1114] (0/4) Epoch 3, batch 2100, loss[loss=0.2505, simple_loss=0.321, pruned_loss=0.08996, over 4764.00 frames. ], tot_loss[loss=0.2924, simple_loss=0.3581, pruned_loss=0.1133, over 940962.40 frames. ], batch size: 13, lr: 2.36e-02, grad_scale: 32.0 +2024-07-27 15:10:18,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=30060.0, ans=0.125 +2024-07-27 15:10:20,367 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.76 vs. limit=15.0 +2024-07-27 15:10:31,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=30086.666666666668, ans=0.025 +2024-07-27 15:10:32,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.79 vs. limit=15.0 +2024-07-27 15:10:35,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=30086.666666666668, ans=0.2 +2024-07-27 15:10:50,457 INFO [train.py:1114] (0/4) Epoch 3, batch 2150, loss[loss=0.2321, simple_loss=0.3025, pruned_loss=0.08084, over 4900.00 frames. ], tot_loss[loss=0.2917, simple_loss=0.3573, pruned_loss=0.1131, over 944146.01 frames. ], batch size: 13, lr: 2.35e-02, grad_scale: 32.0 +2024-07-27 15:10:56,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=30126.666666666668, ans=0.07 +2024-07-27 15:11:01,325 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.479e+01 6.161e+01 6.566e+01 7.305e+01 9.854e+01, threshold=1.313e+02, percent-clipped=0.0 +2024-07-27 15:11:08,604 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.10 vs. limit=22.5 +2024-07-27 15:11:22,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=30180.0, ans=0.125 +2024-07-27 15:11:24,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=30180.0, ans=0.125 +2024-07-27 15:11:26,655 INFO [train.py:1114] (0/4) Epoch 3, batch 2200, loss[loss=0.329, simple_loss=0.3927, pruned_loss=0.1326, over 4815.00 frames. ], tot_loss[loss=0.2923, simple_loss=0.3575, pruned_loss=0.1135, over 943217.83 frames. ], batch size: 14, lr: 2.35e-02, grad_scale: 32.0 +2024-07-27 15:11:30,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=30193.333333333332, ans=0.2 +2024-07-27 15:11:31,628 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:11:35,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.01 vs. limit=12.0 +2024-07-27 15:11:36,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=30206.666666666668, ans=0.125 +2024-07-27 15:11:37,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=30206.666666666668, ans=0.125 +2024-07-27 15:11:51,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=30233.333333333332, ans=0.004297101449275363 +2024-07-27 15:11:51,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=30233.333333333332, ans=0.125 +2024-07-27 15:11:56,093 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.76 vs. limit=15.0 +2024-07-27 15:12:05,474 INFO [train.py:1114] (0/4) Epoch 3, batch 2250, loss[loss=0.3384, simple_loss=0.3828, pruned_loss=0.147, over 4693.00 frames. ], tot_loss[loss=0.2911, simple_loss=0.3563, pruned_loss=0.1129, over 942213.03 frames. ], batch size: 13, lr: 2.35e-02, grad_scale: 32.0 +2024-07-27 15:12:16,113 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.240e+01 6.175e+01 6.906e+01 7.852e+01 1.345e+02, threshold=1.381e+02, percent-clipped=1.0 +2024-07-27 15:12:16,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=30273.333333333332, ans=0.0 +2024-07-27 15:12:35,103 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:12:37,247 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.83 vs. limit=15.0 +2024-07-27 15:12:46,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=30313.333333333332, ans=0.125 +2024-07-27 15:12:50,203 INFO [train.py:1114] (0/4) Epoch 3, batch 2300, loss[loss=0.2196, simple_loss=0.2963, pruned_loss=0.07145, over 4954.00 frames. ], tot_loss[loss=0.2898, simple_loss=0.3553, pruned_loss=0.1121, over 939781.77 frames. ], batch size: 12, lr: 2.35e-02, grad_scale: 32.0 +2024-07-27 15:12:52,676 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.03 vs. limit=15.0 +2024-07-27 15:13:00,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=30340.0, ans=0.0 +2024-07-27 15:13:02,508 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.54 vs. limit=22.5 +2024-07-27 15:13:13,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=30366.666666666668, ans=0.0042681159420289855 +2024-07-27 15:13:16,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=30366.666666666668, ans=0.025 +2024-07-27 15:13:35,755 INFO [train.py:1114] (0/4) Epoch 3, batch 2350, loss[loss=0.248, simple_loss=0.3246, pruned_loss=0.08573, over 4640.00 frames. ], tot_loss[loss=0.2902, simple_loss=0.3556, pruned_loss=0.1124, over 942090.20 frames. ], batch size: 13, lr: 2.34e-02, grad_scale: 32.0 +2024-07-27 15:13:53,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=30406.666666666668, ans=0.09899494936611666 +2024-07-27 15:13:54,303 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.436e+01 6.422e+01 7.140e+01 8.022e+01 1.675e+02, threshold=1.428e+02, percent-clipped=1.0 +2024-07-27 15:13:57,851 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:14:05,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=30433.333333333332, ans=0.004253623188405798 +2024-07-27 15:14:18,295 INFO [train.py:1114] (0/4) Epoch 3, batch 2400, loss[loss=0.2697, simple_loss=0.3439, pruned_loss=0.09773, over 4646.00 frames. ], tot_loss[loss=0.2921, simple_loss=0.3571, pruned_loss=0.1135, over 941754.05 frames. ], batch size: 12, lr: 2.34e-02, grad_scale: 32.0 +2024-07-27 15:14:31,709 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.67 vs. limit=12.0 +2024-07-27 15:14:32,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=30460.0, ans=0.125 +2024-07-27 15:14:35,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=30473.333333333332, ans=0.125 +2024-07-27 15:14:35,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=30473.333333333332, ans=0.0 +2024-07-27 15:14:35,937 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.45 vs. limit=15.0 +2024-07-27 15:14:38,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=30473.333333333332, ans=0.0 +2024-07-27 15:15:04,219 INFO [train.py:1114] (0/4) Epoch 3, batch 2450, loss[loss=0.2815, simple_loss=0.349, pruned_loss=0.107, over 4693.00 frames. ], tot_loss[loss=0.2932, simple_loss=0.3579, pruned_loss=0.1142, over 937272.96 frames. ], batch size: 13, lr: 2.34e-02, grad_scale: 32.0 +2024-07-27 15:15:22,370 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.262e+01 6.488e+01 7.535e+01 9.077e+01 1.631e+02, threshold=1.507e+02, percent-clipped=1.0 +2024-07-27 15:15:36,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=30566.666666666668, ans=0.125 +2024-07-27 15:15:46,319 INFO [train.py:1114] (0/4) Epoch 3, batch 2500, loss[loss=0.2929, simple_loss=0.3703, pruned_loss=0.1077, over 4808.00 frames. ], tot_loss[loss=0.2935, simple_loss=0.3581, pruned_loss=0.1144, over 939038.63 frames. ], batch size: 14, lr: 2.34e-02, grad_scale: 32.0 +2024-07-27 15:15:46,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=30593.333333333332, ans=0.125 +2024-07-27 15:16:17,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=30646.666666666668, ans=15.0 +2024-07-27 15:16:23,933 INFO [train.py:1114] (0/4) Epoch 3, batch 2550, loss[loss=0.3111, simple_loss=0.3664, pruned_loss=0.1279, over 4802.00 frames. ], tot_loss[loss=0.2923, simple_loss=0.3573, pruned_loss=0.1136, over 938800.06 frames. ], batch size: 11, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:16:56,512 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.84 vs. limit=15.0 +2024-07-27 15:16:59,985 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.098e+01 6.217e+01 6.996e+01 7.708e+01 1.283e+02, threshold=1.399e+02, percent-clipped=0.0 +2024-07-27 15:17:20,259 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.98 vs. limit=15.0 +2024-07-27 15:17:30,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30713.333333333332, ans=0.1 +2024-07-27 15:17:31,978 INFO [train.py:1114] (0/4) Epoch 3, batch 2600, loss[loss=0.257, simple_loss=0.3338, pruned_loss=0.09003, over 4900.00 frames. ], tot_loss[loss=0.2923, simple_loss=0.3578, pruned_loss=0.1134, over 937503.86 frames. ], batch size: 13, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:17:54,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=30766.666666666668, ans=0.125 +2024-07-27 15:18:00,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=30766.666666666668, ans=0.125 +2024-07-27 15:18:00,426 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.90 vs. limit=22.5 +2024-07-27 15:18:02,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=30780.0, ans=0.004178260869565217 +2024-07-27 15:18:03,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.whiten.whitening_limit, batch_count=30780.0, ans=12.0 +2024-07-27 15:18:08,654 INFO [train.py:1114] (0/4) Epoch 3, batch 2650, loss[loss=0.3868, simple_loss=0.4241, pruned_loss=0.1747, over 4643.00 frames. ], tot_loss[loss=0.2927, simple_loss=0.3583, pruned_loss=0.1135, over 939413.46 frames. ], batch size: 16, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:18:12,982 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.50 vs. limit=22.5 +2024-07-27 15:18:28,833 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.827e+01 6.212e+01 6.736e+01 7.183e+01 9.052e+01, threshold=1.347e+02, percent-clipped=0.0 +2024-07-27 15:18:53,033 INFO [train.py:1114] (0/4) Epoch 3, batch 2700, loss[loss=0.3108, simple_loss=0.3805, pruned_loss=0.1206, over 4741.00 frames. ], tot_loss[loss=0.2937, simple_loss=0.3594, pruned_loss=0.114, over 939794.21 frames. ], batch size: 14, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:19:01,186 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.37 vs. limit=15.0 +2024-07-27 15:19:11,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=30873.333333333332, ans=0.0 +2024-07-27 15:19:31,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30913.333333333332, ans=0.1 +2024-07-27 15:19:34,127 INFO [train.py:1114] (0/4) Epoch 3, batch 2750, loss[loss=0.2665, simple_loss=0.3277, pruned_loss=0.1027, over 4709.00 frames. ], tot_loss[loss=0.2923, simple_loss=0.3576, pruned_loss=0.1135, over 939414.88 frames. ], batch size: 12, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:19:36,396 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.22 vs. limit=15.0 +2024-07-27 15:19:37,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=30926.666666666668, ans=0.2 +2024-07-27 15:19:37,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=30926.666666666668, ans=0.1 +2024-07-27 15:19:43,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=30940.0, ans=0.2 +2024-07-27 15:19:44,301 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.704e+01 6.371e+01 6.868e+01 7.779e+01 1.190e+02, threshold=1.374e+02, percent-clipped=0.0 +2024-07-27 15:19:49,507 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.29 vs. limit=12.0 +2024-07-27 15:19:51,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=30953.333333333332, ans=0.1 +2024-07-27 15:20:02,867 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.19 vs. limit=22.5 +2024-07-27 15:20:03,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=30980.0, ans=0.0 +2024-07-27 15:20:06,372 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.47 vs. limit=15.0 +2024-07-27 15:20:08,702 INFO [train.py:1114] (0/4) Epoch 3, batch 2800, loss[loss=0.426, simple_loss=0.4396, pruned_loss=0.2062, over 3360.00 frames. ], tot_loss[loss=0.2942, simple_loss=0.3589, pruned_loss=0.1147, over 937467.00 frames. ], batch size: 35, lr: 2.32e-02, grad_scale: 32.0 +2024-07-27 15:20:10,570 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.15 vs. limit=15.0 +2024-07-27 15:20:19,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31006.666666666668, ans=0.1 +2024-07-27 15:20:20,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=31006.666666666668, ans=6.0 +2024-07-27 15:20:26,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=31020.0, ans=0.07 +2024-07-27 15:20:40,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=31046.666666666668, ans=0.125 +2024-07-27 15:20:49,902 INFO [train.py:1114] (0/4) Epoch 3, batch 2850, loss[loss=0.3204, simple_loss=0.3723, pruned_loss=0.1343, over 4964.00 frames. ], tot_loss[loss=0.2958, simple_loss=0.3599, pruned_loss=0.1158, over 935921.16 frames. ], batch size: 13, lr: 2.32e-02, grad_scale: 32.0 +2024-07-27 15:20:53,573 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:21:00,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=31073.333333333332, ans=0.125 +2024-07-27 15:21:01,747 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.366e+01 6.414e+01 6.981e+01 8.121e+01 1.632e+02, threshold=1.396e+02, percent-clipped=1.0 +2024-07-27 15:21:10,453 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.68 vs. limit=12.0 +2024-07-27 15:21:11,103 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.08 vs. limit=22.5 +2024-07-27 15:21:43,083 INFO [train.py:1114] (0/4) Epoch 3, batch 2900, loss[loss=0.3, simple_loss=0.3609, pruned_loss=0.1196, over 4840.00 frames. ], tot_loss[loss=0.2953, simple_loss=0.3604, pruned_loss=0.1151, over 939824.98 frames. ], batch size: 13, lr: 2.32e-02, grad_scale: 32.0 +2024-07-27 15:21:51,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=31126.666666666668, ans=0.2 +2024-07-27 15:21:56,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=31140.0, ans=0.0 +2024-07-27 15:21:58,547 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=20.76 vs. limit=15.0 +2024-07-27 15:22:04,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=31153.333333333332, ans=0.125 +2024-07-27 15:22:05,091 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.96 vs. limit=15.0 +2024-07-27 15:22:05,838 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.87 vs. limit=6.0 +2024-07-27 15:22:09,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=31153.333333333332, ans=0.125 +2024-07-27 15:22:10,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=31153.333333333332, ans=10.0 +2024-07-27 15:22:35,884 INFO [train.py:1114] (0/4) Epoch 3, batch 2950, loss[loss=0.3022, simple_loss=0.3719, pruned_loss=0.1163, over 4706.00 frames. ], tot_loss[loss=0.294, simple_loss=0.3586, pruned_loss=0.1147, over 938949.53 frames. ], batch size: 12, lr: 2.32e-02, grad_scale: 32.0 +2024-07-27 15:22:47,218 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.54 vs. limit=6.0 +2024-07-27 15:22:48,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31206.666666666668, ans=0.1 +2024-07-27 15:22:48,428 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.00 vs. limit=15.0 +2024-07-27 15:22:49,105 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.20 vs. limit=10.0 +2024-07-27 15:22:52,192 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.280e+01 6.239e+01 6.722e+01 7.619e+01 1.818e+02, threshold=1.344e+02, percent-clipped=1.0 +2024-07-27 15:22:55,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=31220.0, ans=0.125 +2024-07-27 15:23:05,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=31233.333333333332, ans=0.125 +2024-07-27 15:23:16,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=31246.666666666668, ans=0.125 +2024-07-27 15:23:18,256 INFO [train.py:1114] (0/4) Epoch 3, batch 3000, loss[loss=0.2832, simple_loss=0.343, pruned_loss=0.1117, over 4763.00 frames. ], tot_loss[loss=0.2926, simple_loss=0.3574, pruned_loss=0.1139, over 938696.74 frames. ], batch size: 13, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:23:18,257 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 15:23:30,289 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.2606, 3.0044, 2.9696, 2.5970, 2.6921, 2.6627, 2.7474, 2.7098], + device='cuda:0') +2024-07-27 15:23:33,137 INFO [train.py:1146] (0/4) Epoch 3, validation: loss=0.2358, simple_loss=0.3336, pruned_loss=0.06904, over 944034.00 frames. +2024-07-27 15:23:33,138 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 15:23:34,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=31260.0, ans=6.0 +2024-07-27 15:23:55,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=31300.0, ans=0.125 +2024-07-27 15:23:57,197 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.12 vs. limit=15.0 +2024-07-27 15:24:08,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=31326.666666666668, ans=0.125 +2024-07-27 15:24:09,342 INFO [train.py:1114] (0/4) Epoch 3, batch 3050, loss[loss=0.2753, simple_loss=0.3467, pruned_loss=0.102, over 4638.00 frames. ], tot_loss[loss=0.2925, simple_loss=0.3578, pruned_loss=0.1136, over 937643.37 frames. ], batch size: 12, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:24:19,675 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.862e+01 6.122e+01 6.753e+01 7.490e+01 1.166e+02, threshold=1.351e+02, percent-clipped=0.0 +2024-07-27 15:24:31,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=31366.666666666668, ans=0.125 +2024-07-27 15:24:35,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=31366.666666666668, ans=0.125 +2024-07-27 15:24:37,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=31366.666666666668, ans=0.0 +2024-07-27 15:24:43,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=31380.0, ans=0.004047826086956522 +2024-07-27 15:24:44,027 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.10 vs. limit=15.0 +2024-07-27 15:24:45,610 INFO [train.py:1114] (0/4) Epoch 3, batch 3100, loss[loss=0.313, simple_loss=0.381, pruned_loss=0.1225, over 4629.00 frames. ], tot_loss[loss=0.2921, simple_loss=0.3571, pruned_loss=0.1135, over 938170.21 frames. ], batch size: 16, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:24:51,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=31406.666666666668, ans=0.125 +2024-07-27 15:24:54,944 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.89 vs. limit=5.0 +2024-07-27 15:25:01,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=31420.0, ans=0.125 +2024-07-27 15:25:21,432 INFO [train.py:1114] (0/4) Epoch 3, batch 3150, loss[loss=0.2936, simple_loss=0.3656, pruned_loss=0.1108, over 4651.00 frames. ], tot_loss[loss=0.292, simple_loss=0.3572, pruned_loss=0.1134, over 938169.13 frames. ], batch size: 17, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:25:21,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=31460.0, ans=0.125 +2024-07-27 15:25:23,820 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.47 vs. limit=12.0 +2024-07-27 15:25:28,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=31473.333333333332, ans=0.0 +2024-07-27 15:25:31,677 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.963e+01 6.198e+01 6.919e+01 7.574e+01 1.132e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-27 15:25:47,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=31500.0, ans=0.2 +2024-07-27 15:25:56,851 INFO [train.py:1114] (0/4) Epoch 3, batch 3200, loss[loss=0.3059, simple_loss=0.3698, pruned_loss=0.121, over 4822.00 frames. ], tot_loss[loss=0.2905, simple_loss=0.3557, pruned_loss=0.1126, over 939694.99 frames. ], batch size: 13, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:25:58,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=31526.666666666668, ans=0.0 +2024-07-27 15:26:00,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=31526.666666666668, ans=0.0040159420289855065 +2024-07-27 15:26:23,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=31553.333333333332, ans=0.125 +2024-07-27 15:26:24,157 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.46 vs. limit=15.0 +2024-07-27 15:26:26,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=31566.666666666668, ans=0.025 +2024-07-27 15:26:34,863 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.47 vs. limit=8.0 +2024-07-27 15:26:42,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=31580.0, ans=0.125 +2024-07-27 15:26:42,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=31580.0, ans=0.125 +2024-07-27 15:26:43,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=31580.0, ans=0.0 +2024-07-27 15:26:49,775 INFO [train.py:1114] (0/4) Epoch 3, batch 3250, loss[loss=0.3221, simple_loss=0.3887, pruned_loss=0.1278, over 4933.00 frames. ], tot_loss[loss=0.2902, simple_loss=0.3561, pruned_loss=0.1122, over 940960.77 frames. ], batch size: 14, lr: 2.30e-02, grad_scale: 32.0 +2024-07-27 15:26:51,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=31593.333333333332, ans=0.0 +2024-07-27 15:26:52,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=31593.333333333332, ans=0.004001449275362319 +2024-07-27 15:26:53,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=31593.333333333332, ans=0.2 +2024-07-27 15:27:01,431 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.74 vs. limit=6.0 +2024-07-27 15:27:01,543 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.545e+01 6.278e+01 6.797e+01 7.554e+01 1.103e+02, threshold=1.359e+02, percent-clipped=0.0 +2024-07-27 15:27:02,745 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.17 vs. limit=22.5 +2024-07-27 15:27:05,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31620.0, ans=0.1 +2024-07-27 15:27:07,371 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.63 vs. limit=22.5 +2024-07-27 15:27:12,369 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.14 vs. limit=22.5 +2024-07-27 15:27:13,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=31633.333333333332, ans=0.125 +2024-07-27 15:27:30,589 INFO [train.py:1114] (0/4) Epoch 3, batch 3300, loss[loss=0.3194, simple_loss=0.3786, pruned_loss=0.1301, over 4716.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.3537, pruned_loss=0.1113, over 941229.41 frames. ], batch size: 19, lr: 2.30e-02, grad_scale: 32.0 +2024-07-27 15:27:38,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=31660.0, ans=0.125 +2024-07-27 15:27:40,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=31660.0, ans=0.125 +2024-07-27 15:27:46,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=31673.333333333332, ans=0.125 +2024-07-27 15:28:06,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=31713.333333333332, ans=0.2 +2024-07-27 15:28:07,045 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.90 vs. limit=15.0 +2024-07-27 15:28:08,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=31713.333333333332, ans=0.0 +2024-07-27 15:28:13,926 INFO [train.py:1114] (0/4) Epoch 3, batch 3350, loss[loss=0.3153, simple_loss=0.3719, pruned_loss=0.1294, over 4606.00 frames. ], tot_loss[loss=0.2901, simple_loss=0.3554, pruned_loss=0.1124, over 939709.06 frames. ], batch size: 17, lr: 2.30e-02, grad_scale: 32.0 +2024-07-27 15:28:17,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=31726.666666666668, ans=0.025 +2024-07-27 15:28:18,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=31726.666666666668, ans=0.003972463768115941 +2024-07-27 15:28:22,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=31740.0, ans=0.125 +2024-07-27 15:28:24,377 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.034e+01 6.313e+01 6.716e+01 7.505e+01 1.231e+02, threshold=1.343e+02, percent-clipped=0.0 +2024-07-27 15:28:31,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31753.333333333332, ans=0.1 +2024-07-27 15:28:39,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=31766.666666666668, ans=0.003963768115942029 +2024-07-27 15:28:43,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31780.0, ans=0.1 +2024-07-27 15:28:46,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=31780.0, ans=0.07 +2024-07-27 15:28:50,164 INFO [train.py:1114] (0/4) Epoch 3, batch 3400, loss[loss=0.2848, simple_loss=0.3456, pruned_loss=0.1119, over 4808.00 frames. ], tot_loss[loss=0.2902, simple_loss=0.3555, pruned_loss=0.1125, over 938355.60 frames. ], batch size: 11, lr: 2.30e-02, grad_scale: 32.0 +2024-07-27 15:28:56,946 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.42 vs. limit=10.0 +2024-07-27 15:28:58,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=31793.333333333332, ans=0.1 +2024-07-27 15:29:00,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31793.333333333332, ans=0.1 +2024-07-27 15:29:05,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=31806.666666666668, ans=0.125 +2024-07-27 15:29:15,074 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.70 vs. limit=15.0 +2024-07-27 15:29:30,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=31846.666666666668, ans=0.2 +2024-07-27 15:29:35,520 INFO [train.py:1114] (0/4) Epoch 3, batch 3450, loss[loss=0.3223, simple_loss=0.3919, pruned_loss=0.1264, over 4649.00 frames. ], tot_loss[loss=0.2896, simple_loss=0.3553, pruned_loss=0.1119, over 938489.20 frames. ], batch size: 19, lr: 2.29e-02, grad_scale: 32.0 +2024-07-27 15:29:49,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=31860.0, ans=0.125 +2024-07-27 15:29:55,350 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.891e+01 6.313e+01 6.956e+01 7.933e+01 1.220e+02, threshold=1.391e+02, percent-clipped=0.0 +2024-07-27 15:30:01,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=31886.666666666668, ans=0.125 +2024-07-27 15:30:02,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31886.666666666668, ans=0.1 +2024-07-27 15:30:20,230 INFO [train.py:1114] (0/4) Epoch 3, batch 3500, loss[loss=0.296, simple_loss=0.3572, pruned_loss=0.1174, over 4946.00 frames. ], tot_loss[loss=0.2891, simple_loss=0.3549, pruned_loss=0.1117, over 938451.12 frames. ], batch size: 12, lr: 2.29e-02, grad_scale: 32.0 +2024-07-27 15:30:20,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=31926.666666666668, ans=0.003928985507246376 +2024-07-27 15:30:25,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=31926.666666666668, ans=0.0 +2024-07-27 15:30:30,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31940.0, ans=0.1 +2024-07-27 15:30:43,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=31966.666666666668, ans=0.07 +2024-07-27 15:30:55,156 INFO [train.py:1114] (0/4) Epoch 3, batch 3550, loss[loss=0.2631, simple_loss=0.3468, pruned_loss=0.08967, over 4666.00 frames. ], tot_loss[loss=0.2888, simple_loss=0.3548, pruned_loss=0.1114, over 938928.69 frames. ], batch size: 14, lr: 2.29e-02, grad_scale: 32.0 +2024-07-27 15:31:01,012 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-24000.pt +2024-07-27 15:31:09,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=32006.666666666668, ans=0.0 +2024-07-27 15:31:10,787 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 6.203e+01 6.849e+01 7.664e+01 1.472e+02, threshold=1.370e+02, percent-clipped=1.0 +2024-07-27 15:31:19,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=32020.0, ans=0.125 +2024-07-27 15:31:31,406 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.00 vs. limit=22.5 +2024-07-27 15:31:34,516 INFO [train.py:1114] (0/4) Epoch 3, batch 3600, loss[loss=0.2461, simple_loss=0.3169, pruned_loss=0.08772, over 4967.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.353, pruned_loss=0.1096, over 941035.32 frames. ], batch size: 13, lr: 2.29e-02, grad_scale: 64.0 +2024-07-27 15:31:36,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=32060.0, ans=0.0 +2024-07-27 15:31:42,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=32073.333333333332, ans=0.125 +2024-07-27 15:31:43,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=32073.333333333332, ans=0.025 +2024-07-27 15:31:45,129 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.48 vs. limit=15.0 +2024-07-27 15:31:59,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=32100.0, ans=0.0 +2024-07-27 15:32:10,745 INFO [train.py:1114] (0/4) Epoch 3, batch 3650, loss[loss=0.2846, simple_loss=0.3692, pruned_loss=0.09999, over 4898.00 frames. ], tot_loss[loss=0.286, simple_loss=0.3531, pruned_loss=0.1094, over 941263.64 frames. ], batch size: 15, lr: 2.29e-02, grad_scale: 64.0 +2024-07-27 15:32:10,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=32126.666666666668, ans=0.125 +2024-07-27 15:32:14,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=32126.666666666668, ans=0.003885507246376811 +2024-07-27 15:32:21,192 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=14.09 vs. limit=15.0 +2024-07-27 15:32:21,263 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.293e+01 6.817e+01 8.019e+01 9.949e+01 1.573e+02, threshold=1.604e+02, percent-clipped=3.0 +2024-07-27 15:32:32,443 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.93 vs. limit=6.0 +2024-07-27 15:32:34,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=32166.666666666668, ans=0.0038768115942028987 +2024-07-27 15:32:37,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=32166.666666666668, ans=22.5 +2024-07-27 15:32:40,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=32180.0, ans=0.2 +2024-07-27 15:32:45,101 INFO [train.py:1114] (0/4) Epoch 3, batch 3700, loss[loss=0.2926, simple_loss=0.3614, pruned_loss=0.1119, over 4941.00 frames. ], tot_loss[loss=0.2875, simple_loss=0.3546, pruned_loss=0.1102, over 942277.15 frames. ], batch size: 14, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:32:45,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=32193.333333333332, ans=0.125 +2024-07-27 15:32:46,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=32193.333333333332, ans=0.125 +2024-07-27 15:32:49,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=32193.333333333332, ans=0.125 +2024-07-27 15:32:57,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=32206.666666666668, ans=0.003868115942028986 +2024-07-27 15:33:00,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=32220.0, ans=0.125 +2024-07-27 15:33:20,703 INFO [train.py:1114] (0/4) Epoch 3, batch 3750, loss[loss=0.265, simple_loss=0.3221, pruned_loss=0.104, over 4803.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.3533, pruned_loss=0.1094, over 943401.51 frames. ], batch size: 11, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:33:29,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=32273.333333333332, ans=0.125 +2024-07-27 15:33:31,095 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.210e+01 6.187e+01 6.972e+01 7.768e+01 2.543e+02, threshold=1.394e+02, percent-clipped=1.0 +2024-07-27 15:33:47,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=32300.0, ans=0.2 +2024-07-27 15:33:49,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=32300.0, ans=0.0 +2024-07-27 15:33:51,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=32300.0, ans=0.125 +2024-07-27 15:34:01,626 INFO [train.py:1114] (0/4) Epoch 3, batch 3800, loss[loss=0.3465, simple_loss=0.4146, pruned_loss=0.1392, over 4813.00 frames. ], tot_loss[loss=0.2879, simple_loss=0.3542, pruned_loss=0.1108, over 941409.30 frames. ], batch size: 14, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:34:02,675 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.78 vs. limit=22.5 +2024-07-27 15:34:04,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=32326.666666666668, ans=0.125 +2024-07-27 15:34:06,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=32326.666666666668, ans=0.1 +2024-07-27 15:34:08,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=32340.0, ans=0.125 +2024-07-27 15:34:15,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=32353.333333333332, ans=0.0038362318840579716 +2024-07-27 15:34:15,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=32353.333333333332, ans=0.125 +2024-07-27 15:34:23,706 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.40 vs. limit=22.5 +2024-07-27 15:34:30,546 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.50 vs. limit=15.0 +2024-07-27 15:34:36,455 INFO [train.py:1114] (0/4) Epoch 3, batch 3850, loss[loss=0.2862, simple_loss=0.3644, pruned_loss=0.104, over 4640.00 frames. ], tot_loss[loss=0.2877, simple_loss=0.3542, pruned_loss=0.1106, over 942110.82 frames. ], batch size: 16, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:34:45,058 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.98 vs. limit=15.0 +2024-07-27 15:34:46,752 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.157e+01 6.304e+01 7.059e+01 8.148e+01 1.168e+02, threshold=1.412e+02, percent-clipped=0.0 +2024-07-27 15:34:46,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=32406.666666666668, ans=0.1 +2024-07-27 15:34:48,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32406.666666666668, ans=0.1 +2024-07-27 15:35:07,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=32446.666666666668, ans=0.05 +2024-07-27 15:35:12,210 INFO [train.py:1114] (0/4) Epoch 3, batch 3900, loss[loss=0.348, simple_loss=0.4055, pruned_loss=0.1453, over 4812.00 frames. ], tot_loss[loss=0.2898, simple_loss=0.3559, pruned_loss=0.1118, over 942702.43 frames. ], batch size: 14, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:35:14,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=32460.0, ans=0.025 +2024-07-27 15:35:24,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=32473.333333333332, ans=0.125 +2024-07-27 15:35:43,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=32513.333333333332, ans=0.025 +2024-07-27 15:35:46,894 INFO [train.py:1114] (0/4) Epoch 3, batch 3950, loss[loss=0.2809, simple_loss=0.3623, pruned_loss=0.09978, over 4831.00 frames. ], tot_loss[loss=0.2898, simple_loss=0.3561, pruned_loss=0.1117, over 944722.65 frames. ], batch size: 16, lr: 2.27e-02, grad_scale: 64.0 +2024-07-27 15:35:47,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.82 vs. limit=15.0 +2024-07-27 15:35:51,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=32526.666666666668, ans=0.07 +2024-07-27 15:35:58,552 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.016e+01 6.366e+01 6.864e+01 8.017e+01 1.947e+02, threshold=1.373e+02, percent-clipped=1.0 +2024-07-27 15:36:02,387 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.30 vs. limit=10.0 +2024-07-27 15:36:05,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=32553.333333333332, ans=0.07 +2024-07-27 15:36:07,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=32553.333333333332, ans=0.07 +2024-07-27 15:36:22,687 INFO [train.py:1114] (0/4) Epoch 3, batch 4000, loss[loss=0.2567, simple_loss=0.3306, pruned_loss=0.09139, over 4779.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.3543, pruned_loss=0.111, over 941007.37 frames. ], batch size: 12, lr: 2.27e-02, grad_scale: 64.0 +2024-07-27 15:36:25,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=32593.333333333332, ans=0.003784057971014493 +2024-07-27 15:36:30,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=32606.666666666668, ans=0.003781159420289855 +2024-07-27 15:36:32,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=32606.666666666668, ans=0.2 +2024-07-27 15:36:35,341 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.91 vs. limit=10.0 +2024-07-27 15:36:59,109 INFO [train.py:1114] (0/4) Epoch 3, batch 4050, loss[loss=0.4251, simple_loss=0.4418, pruned_loss=0.2042, over 3332.00 frames. ], tot_loss[loss=0.2887, simple_loss=0.3549, pruned_loss=0.1113, over 939452.45 frames. ], batch size: 37, lr: 2.27e-02, grad_scale: 64.0 +2024-07-27 15:36:59,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=32660.0, ans=0.035 +2024-07-27 15:37:00,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=32660.0, ans=0.0 +2024-07-27 15:37:01,503 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-27 15:37:08,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=32673.333333333332, ans=0.125 +2024-07-27 15:37:11,294 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.463e+01 6.459e+01 6.983e+01 7.697e+01 1.084e+02, threshold=1.397e+02, percent-clipped=0.0 +2024-07-27 15:37:14,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=32686.666666666668, ans=0.2 +2024-07-27 15:37:21,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=32686.666666666668, ans=0.125 +2024-07-27 15:37:37,935 INFO [train.py:1114] (0/4) Epoch 3, batch 4100, loss[loss=0.3097, simple_loss=0.3646, pruned_loss=0.1274, over 4907.00 frames. ], tot_loss[loss=0.2892, simple_loss=0.3552, pruned_loss=0.1116, over 938334.15 frames. ], batch size: 15, lr: 2.27e-02, grad_scale: 64.0 +2024-07-27 15:37:38,340 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.32 vs. limit=15.0 +2024-07-27 15:37:42,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=32726.666666666668, ans=0.125 +2024-07-27 15:38:04,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=32766.666666666668, ans=0.2 +2024-07-27 15:38:05,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=32766.666666666668, ans=0.04949747468305833 +2024-07-27 15:38:15,381 INFO [train.py:1114] (0/4) Epoch 3, batch 4150, loss[loss=0.2417, simple_loss=0.3099, pruned_loss=0.08674, over 4833.00 frames. ], tot_loss[loss=0.2862, simple_loss=0.3529, pruned_loss=0.1098, over 937815.65 frames. ], batch size: 13, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:38:17,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=32793.333333333336, ans=0.1 +2024-07-27 15:38:20,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=32793.333333333336, ans=0.0 +2024-07-27 15:38:23,651 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.06 vs. limit=15.0 +2024-07-27 15:38:24,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=32806.666666666664, ans=10.0 +2024-07-27 15:38:25,905 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.960e+01 6.227e+01 6.781e+01 8.028e+01 1.229e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 15:38:37,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=32833.333333333336, ans=0.125 +2024-07-27 15:38:45,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=32846.666666666664, ans=0.0 +2024-07-27 15:38:54,829 INFO [train.py:1114] (0/4) Epoch 3, batch 4200, loss[loss=0.2997, simple_loss=0.3614, pruned_loss=0.1191, over 4915.00 frames. ], tot_loss[loss=0.2869, simple_loss=0.3536, pruned_loss=0.1101, over 939127.72 frames. ], batch size: 15, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:38:56,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.66 vs. limit=15.0 +2024-07-27 15:39:05,627 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.17 vs. limit=12.0 +2024-07-27 15:39:11,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=32886.666666666664, ans=0.2 +2024-07-27 15:39:14,073 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.98 vs. limit=6.0 +2024-07-27 15:39:31,212 INFO [train.py:1114] (0/4) Epoch 3, batch 4250, loss[loss=0.3021, simple_loss=0.3644, pruned_loss=0.1199, over 4643.00 frames. ], tot_loss[loss=0.2876, simple_loss=0.3542, pruned_loss=0.1105, over 940541.74 frames. ], batch size: 12, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:39:32,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=32926.666666666664, ans=0.125 +2024-07-27 15:39:38,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=32940.0, ans=0.125 +2024-07-27 15:39:41,108 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.439e+01 6.186e+01 6.763e+01 7.704e+01 1.140e+02, threshold=1.353e+02, percent-clipped=0.0 +2024-07-27 15:39:45,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32953.333333333336, ans=0.1 +2024-07-27 15:39:51,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=32953.333333333336, ans=0.0037057971014492746 +2024-07-27 15:39:55,130 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.78 vs. limit=10.0 +2024-07-27 15:40:00,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=32980.0, ans=0.125 +2024-07-27 15:40:01,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff3.min_abs, batch_count=32980.0, ans=0.2 +2024-07-27 15:40:03,905 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.01 vs. limit=10.0 +2024-07-27 15:40:06,766 INFO [train.py:1114] (0/4) Epoch 3, batch 4300, loss[loss=0.2965, simple_loss=0.3617, pruned_loss=0.1157, over 4756.00 frames. ], tot_loss[loss=0.2885, simple_loss=0.3544, pruned_loss=0.1113, over 940222.75 frames. ], batch size: 13, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:40:10,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=32993.333333333336, ans=0.1 +2024-07-27 15:40:18,745 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=8.83 vs. limit=15.0 +2024-07-27 15:40:20,108 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.26 vs. limit=12.0 +2024-07-27 15:40:21,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=33020.0, ans=0.05 +2024-07-27 15:40:29,317 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.49 vs. limit=12.0 +2024-07-27 15:40:32,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33033.333333333336, ans=0.1 +2024-07-27 15:40:41,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=33046.666666666664, ans=0.0 +2024-07-27 15:40:42,786 INFO [train.py:1114] (0/4) Epoch 3, batch 4350, loss[loss=0.3035, simple_loss=0.3646, pruned_loss=0.1213, over 4751.00 frames. ], tot_loss[loss=0.2883, simple_loss=0.3546, pruned_loss=0.111, over 941346.88 frames. ], batch size: 13, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:40:43,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=33060.0, ans=0.0036826086956521734 +2024-07-27 15:40:49,740 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.77 vs. limit=15.0 +2024-07-27 15:40:54,686 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.277e+01 6.236e+01 6.804e+01 7.780e+01 1.356e+02, threshold=1.361e+02, percent-clipped=1.0 +2024-07-27 15:41:09,856 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.04 vs. limit=15.0 +2024-07-27 15:41:13,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=33113.333333333336, ans=0.05 +2024-07-27 15:41:19,011 INFO [train.py:1114] (0/4) Epoch 3, batch 4400, loss[loss=0.3091, simple_loss=0.3749, pruned_loss=0.1217, over 4819.00 frames. ], tot_loss[loss=0.2896, simple_loss=0.3557, pruned_loss=0.1117, over 941075.65 frames. ], batch size: 14, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:41:19,432 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.91 vs. limit=15.0 +2024-07-27 15:41:19,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=33126.666666666664, ans=0.125 +2024-07-27 15:41:19,977 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:41:20,752 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.52 vs. limit=15.0 +2024-07-27 15:41:27,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=33140.0, ans=0.2 +2024-07-27 15:41:33,925 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.12 vs. limit=6.0 +2024-07-27 15:41:35,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=33153.333333333336, ans=0.0 +2024-07-27 15:41:38,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=33153.333333333336, ans=0.0036623188405797095 +2024-07-27 15:41:56,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=33180.0, ans=0.125 +2024-07-27 15:41:58,385 INFO [train.py:1114] (0/4) Epoch 3, batch 4450, loss[loss=0.276, simple_loss=0.3396, pruned_loss=0.1062, over 4944.00 frames. ], tot_loss[loss=0.2909, simple_loss=0.3564, pruned_loss=0.1127, over 938997.19 frames. ], batch size: 12, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:42:00,915 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.90 vs. limit=22.5 +2024-07-27 15:42:02,855 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.98 vs. limit=12.0 +2024-07-27 15:42:08,444 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.127e+01 6.763e+01 7.448e+01 8.954e+01 1.362e+02, threshold=1.490e+02, percent-clipped=1.0 +2024-07-27 15:42:17,826 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.05 vs. limit=6.0 +2024-07-27 15:42:20,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=33233.333333333336, ans=0.125 +2024-07-27 15:42:22,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=33233.333333333336, ans=0.0036449275362318836 +2024-07-27 15:42:44,699 INFO [train.py:1114] (0/4) Epoch 3, batch 4500, loss[loss=0.3319, simple_loss=0.3902, pruned_loss=0.1368, over 4735.00 frames. ], tot_loss[loss=0.2907, simple_loss=0.3566, pruned_loss=0.1124, over 938288.39 frames. ], batch size: 14, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:42:53,012 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.867e+01 +2024-07-27 15:42:55,690 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.071e+01 +2024-07-27 15:43:00,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=33286.666666666664, ans=0.0 +2024-07-27 15:43:01,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33286.666666666664, ans=0.1 +2024-07-27 15:43:04,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=33286.666666666664, ans=0.003633333333333333 +2024-07-27 15:43:18,869 INFO [train.py:1114] (0/4) Epoch 3, batch 4550, loss[loss=0.2762, simple_loss=0.345, pruned_loss=0.1037, over 4900.00 frames. ], tot_loss[loss=0.2898, simple_loss=0.356, pruned_loss=0.1118, over 940201.33 frames. ], batch size: 13, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:43:19,294 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.51 vs. limit=22.5 +2024-07-27 15:43:22,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=33326.666666666664, ans=0.0 +2024-07-27 15:43:27,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=33340.0, ans=0.2 +2024-07-27 15:43:31,003 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.122e+01 6.619e+01 7.429e+01 8.895e+01 1.429e+02, threshold=1.486e+02, percent-clipped=0.0 +2024-07-27 15:43:34,256 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.24 vs. limit=22.5 +2024-07-27 15:43:40,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=33353.333333333336, ans=0.125 +2024-07-27 15:43:41,673 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.51 vs. limit=10.0 +2024-07-27 15:43:53,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=33380.0, ans=0.025 +2024-07-27 15:43:55,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=33380.0, ans=0.125 +2024-07-27 15:43:56,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=33380.0, ans=0.00361304347826087 +2024-07-27 15:43:57,986 INFO [train.py:1114] (0/4) Epoch 3, batch 4600, loss[loss=0.3046, simple_loss=0.3725, pruned_loss=0.1184, over 4432.00 frames. ], tot_loss[loss=0.2878, simple_loss=0.3541, pruned_loss=0.1107, over 938908.69 frames. ], batch size: 21, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:44:04,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=33406.666666666664, ans=0.1 +2024-07-27 15:44:13,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=33420.0, ans=0.0 +2024-07-27 15:44:21,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=33433.333333333336, ans=0.025 +2024-07-27 15:44:27,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=33446.666666666664, ans=0.125 +2024-07-27 15:44:32,078 INFO [train.py:1114] (0/4) Epoch 3, batch 4650, loss[loss=0.2644, simple_loss=0.3511, pruned_loss=0.08888, over 4829.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.355, pruned_loss=0.1106, over 940811.94 frames. ], batch size: 16, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:44:34,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=33460.0, ans=0.07 +2024-07-27 15:44:42,689 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.996e+01 6.580e+01 7.328e+01 8.938e+01 2.315e+02, threshold=1.466e+02, percent-clipped=1.0 +2024-07-27 15:44:44,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=33473.333333333336, ans=0.125 +2024-07-27 15:44:49,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=33486.666666666664, ans=0.1 +2024-07-27 15:45:00,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=33500.0, ans=0.125 +2024-07-27 15:45:08,074 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.77 vs. limit=15.0 +2024-07-27 15:45:08,309 INFO [train.py:1114] (0/4) Epoch 3, batch 4700, loss[loss=0.2605, simple_loss=0.3241, pruned_loss=0.09841, over 4716.00 frames. ], tot_loss[loss=0.2878, simple_loss=0.3545, pruned_loss=0.1106, over 938174.08 frames. ], batch size: 11, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:45:21,585 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.93 vs. limit=15.0 +2024-07-27 15:45:26,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=33553.333333333336, ans=0.1 +2024-07-27 15:45:27,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=33566.666666666664, ans=0.0 +2024-07-27 15:45:29,446 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.50 vs. limit=10.0 +2024-07-27 15:45:43,768 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.70 vs. limit=22.5 +2024-07-27 15:45:46,787 INFO [train.py:1114] (0/4) Epoch 3, batch 4750, loss[loss=0.3092, simple_loss=0.3689, pruned_loss=0.1248, over 4506.00 frames. ], tot_loss[loss=0.2876, simple_loss=0.3541, pruned_loss=0.1106, over 935916.30 frames. ], batch size: 21, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:45:47,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=33593.333333333336, ans=10.0 +2024-07-27 15:45:56,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=33606.666666666664, ans=0.0 +2024-07-27 15:45:57,481 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.591e+01 6.473e+01 7.371e+01 8.571e+01 1.233e+02, threshold=1.474e+02, percent-clipped=0.0 +2024-07-27 15:46:21,154 INFO [train.py:1114] (0/4) Epoch 3, batch 4800, loss[loss=0.3243, simple_loss=0.3844, pruned_loss=0.1321, over 4706.00 frames. ], tot_loss[loss=0.288, simple_loss=0.3541, pruned_loss=0.1109, over 933015.32 frames. ], batch size: 13, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:46:21,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=33660.0, ans=0.025 +2024-07-27 15:46:31,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=33673.333333333336, ans=0.125 +2024-07-27 15:46:32,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=33673.333333333336, ans=0.125 +2024-07-27 15:46:34,265 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-27 15:46:43,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=33700.0, ans=0.125 +2024-07-27 15:46:53,156 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.93 vs. limit=15.0 +2024-07-27 15:46:53,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=33713.333333333336, ans=0.2 +2024-07-27 15:46:56,087 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.36 vs. limit=15.0 +2024-07-27 15:46:56,857 INFO [train.py:1114] (0/4) Epoch 3, batch 4850, loss[loss=0.332, simple_loss=0.3951, pruned_loss=0.1345, over 4740.00 frames. ], tot_loss[loss=0.2902, simple_loss=0.3562, pruned_loss=0.1121, over 932313.21 frames. ], batch size: 14, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:46:57,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=33726.666666666664, ans=0.025 +2024-07-27 15:46:58,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=33726.666666666664, ans=0.125 +2024-07-27 15:47:00,156 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.06 vs. limit=15.0 +2024-07-27 15:47:02,853 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=18.62 vs. limit=15.0 +2024-07-27 15:47:07,319 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.203e+01 6.462e+01 7.308e+01 8.577e+01 1.443e+02, threshold=1.462e+02, percent-clipped=0.0 +2024-07-27 15:47:08,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=33740.0, ans=0.0 +2024-07-27 15:47:08,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=33740.0, ans=0.1 +2024-07-27 15:47:09,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=33740.0, ans=0.0035347826086956514 +2024-07-27 15:47:21,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33766.666666666664, ans=0.1 +2024-07-27 15:47:31,285 INFO [train.py:1114] (0/4) Epoch 3, batch 4900, loss[loss=0.3086, simple_loss=0.3707, pruned_loss=0.1233, over 4755.00 frames. ], tot_loss[loss=0.2899, simple_loss=0.3556, pruned_loss=0.1121, over 933852.90 frames. ], batch size: 13, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:47:38,871 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.26 vs. limit=15.0 +2024-07-27 15:47:41,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=33806.666666666664, ans=0.125 +2024-07-27 15:47:58,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33833.333333333336, ans=0.1 +2024-07-27 15:48:06,517 INFO [train.py:1114] (0/4) Epoch 3, batch 4950, loss[loss=0.3804, simple_loss=0.4034, pruned_loss=0.1786, over 3335.00 frames. ], tot_loss[loss=0.2923, simple_loss=0.3577, pruned_loss=0.1135, over 931006.45 frames. ], batch size: 35, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:48:15,276 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.32 vs. limit=15.0 +2024-07-27 15:48:16,794 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.224e+01 6.458e+01 7.350e+01 8.583e+01 1.982e+02, threshold=1.470e+02, percent-clipped=1.0 +2024-07-27 15:48:27,721 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.79 vs. limit=12.0 +2024-07-27 15:48:28,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=33900.0, ans=0.125 +2024-07-27 15:48:41,077 INFO [train.py:1114] (0/4) Epoch 3, batch 5000, loss[loss=0.311, simple_loss=0.3832, pruned_loss=0.1194, over 4664.00 frames. ], tot_loss[loss=0.2904, simple_loss=0.3564, pruned_loss=0.1121, over 934932.92 frames. ], batch size: 14, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:48:41,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=33926.666666666664, ans=0.2 +2024-07-27 15:49:00,483 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.69 vs. limit=15.0 +2024-07-27 15:49:12,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=33980.0, ans=0.0 +2024-07-27 15:49:13,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33980.0, ans=0.1 +2024-07-27 15:49:19,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=33980.0, ans=0.0 +2024-07-27 15:49:21,817 INFO [train.py:1114] (0/4) Epoch 3, batch 5050, loss[loss=0.3065, simple_loss=0.3472, pruned_loss=0.1329, over 4864.00 frames. ], tot_loss[loss=0.2889, simple_loss=0.3551, pruned_loss=0.1113, over 937518.36 frames. ], batch size: 12, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:49:37,723 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=26.65 vs. limit=22.5 +2024-07-27 15:49:40,704 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.504e+01 6.490e+01 6.878e+01 7.828e+01 1.247e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-27 15:49:42,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=34006.666666666664, ans=0.125 +2024-07-27 15:50:04,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=34060.0, ans=0.0 +2024-07-27 15:50:05,319 INFO [train.py:1114] (0/4) Epoch 3, batch 5100, loss[loss=0.2659, simple_loss=0.333, pruned_loss=0.09935, over 4774.00 frames. ], tot_loss[loss=0.2901, simple_loss=0.3563, pruned_loss=0.1119, over 934632.09 frames. ], batch size: 12, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:50:12,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=34060.0, ans=0.0 +2024-07-27 15:50:16,813 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.52 vs. limit=15.0 +2024-07-27 15:50:19,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=34073.333333333336, ans=0.2 +2024-07-27 15:50:25,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=34086.666666666664, ans=0.0 +2024-07-27 15:50:29,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34086.666666666664, ans=0.1 +2024-07-27 15:50:34,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=34100.0, ans=0.125 +2024-07-27 15:50:40,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=34113.333333333336, ans=0.0 +2024-07-27 15:50:42,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=34113.333333333336, ans=0.125 +2024-07-27 15:50:45,817 INFO [train.py:1114] (0/4) Epoch 3, batch 5150, loss[loss=0.2932, simple_loss=0.3617, pruned_loss=0.1124, over 4828.00 frames. ], tot_loss[loss=0.2905, simple_loss=0.3566, pruned_loss=0.1122, over 935735.70 frames. ], batch size: 16, lr: 2.22e-02, grad_scale: 64.0 +2024-07-27 15:50:49,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34126.666666666664, ans=0.1 +2024-07-27 15:50:50,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=34126.666666666664, ans=0.125 +2024-07-27 15:50:52,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=34140.0, ans=0.125 +2024-07-27 15:50:56,019 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.400e+01 6.638e+01 7.768e+01 8.989e+01 1.373e+02, threshold=1.554e+02, percent-clipped=0.0 +2024-07-27 15:50:56,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=34140.0, ans=0.0 +2024-07-27 15:51:06,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=34166.666666666664, ans=0.125 +2024-07-27 15:51:06,731 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:51:07,742 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.09 vs. limit=6.0 +2024-07-27 15:51:15,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=34180.0, ans=0.04949747468305833 +2024-07-27 15:51:15,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=34180.0, ans=0.2 +2024-07-27 15:51:15,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=34180.0, ans=0.0034391304347826087 +2024-07-27 15:51:22,074 INFO [train.py:1114] (0/4) Epoch 3, batch 5200, loss[loss=0.2785, simple_loss=0.3611, pruned_loss=0.09793, over 4654.00 frames. ], tot_loss[loss=0.2877, simple_loss=0.3553, pruned_loss=0.11, over 936001.54 frames. ], batch size: 14, lr: 2.22e-02, grad_scale: 64.0 +2024-07-27 15:51:45,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=34220.0, ans=0.0 +2024-07-27 15:51:51,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=34233.333333333336, ans=0.125 +2024-07-27 15:52:05,549 INFO [train.py:1114] (0/4) Epoch 3, batch 5250, loss[loss=0.2673, simple_loss=0.341, pruned_loss=0.09679, over 4894.00 frames. ], tot_loss[loss=0.2866, simple_loss=0.3543, pruned_loss=0.1094, over 936001.34 frames. ], batch size: 13, lr: 2.22e-02, grad_scale: 64.0 +2024-07-27 15:52:17,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=34273.333333333336, ans=0.125 +2024-07-27 15:52:18,634 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.312e+01 6.590e+01 7.442e+01 8.415e+01 1.347e+02, threshold=1.488e+02, percent-clipped=0.0 +2024-07-27 15:52:23,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=34286.666666666664, ans=0.0034159420289855075 +2024-07-27 15:52:32,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=34300.0, ans=0.125 +2024-07-27 15:52:33,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=34300.0, ans=0.125 +2024-07-27 15:52:34,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=34300.0, ans=0.5 +2024-07-27 15:52:38,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34313.333333333336, ans=0.1 +2024-07-27 15:52:45,556 INFO [train.py:1114] (0/4) Epoch 3, batch 5300, loss[loss=0.3192, simple_loss=0.377, pruned_loss=0.1307, over 4640.00 frames. ], tot_loss[loss=0.2872, simple_loss=0.3544, pruned_loss=0.11, over 934638.44 frames. ], batch size: 16, lr: 2.22e-02, grad_scale: 32.0 +2024-07-27 15:52:46,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=34326.666666666664, ans=0.125 +2024-07-27 15:52:50,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=34326.666666666664, ans=0.2 +2024-07-27 15:52:54,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=34340.0, ans=0.0 +2024-07-27 15:52:55,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=34340.0, ans=0.035 +2024-07-27 15:52:58,845 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.09 vs. limit=22.5 +2024-07-27 15:53:12,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34366.666666666664, ans=0.1 +2024-07-27 15:53:12,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=34366.666666666664, ans=0.0033985507246376825 +2024-07-27 15:53:13,320 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.78 vs. limit=12.0 +2024-07-27 15:53:15,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34380.0, ans=0.1 +2024-07-27 15:53:17,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=34380.0, ans=0.07 +2024-07-27 15:53:21,099 INFO [train.py:1114] (0/4) Epoch 3, batch 5350, loss[loss=0.2484, simple_loss=0.3206, pruned_loss=0.08805, over 4559.00 frames. ], tot_loss[loss=0.2863, simple_loss=0.354, pruned_loss=0.1093, over 936559.18 frames. ], batch size: 10, lr: 2.22e-02, grad_scale: 32.0 +2024-07-27 15:53:31,470 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=8.35 vs. limit=8.0 +2024-07-27 15:53:32,207 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.392e+01 6.457e+01 7.092e+01 8.534e+01 1.457e+02, threshold=1.418e+02, percent-clipped=0.0 +2024-07-27 15:53:32,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=34406.666666666664, ans=0.125 +2024-07-27 15:53:34,032 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.92 vs. limit=22.5 +2024-07-27 15:53:36,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=34420.0, ans=0.125 +2024-07-27 15:53:38,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=34420.0, ans=0.0 +2024-07-27 15:53:48,151 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.27 vs. limit=10.0 +2024-07-27 15:53:50,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=34446.666666666664, ans=0.0 +2024-07-27 15:53:56,294 INFO [train.py:1114] (0/4) Epoch 3, batch 5400, loss[loss=0.3501, simple_loss=0.3985, pruned_loss=0.1509, over 4200.00 frames. ], tot_loss[loss=0.2888, simple_loss=0.3558, pruned_loss=0.1109, over 929932.07 frames. ], batch size: 25, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:54:27,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34513.333333333336, ans=0.1 +2024-07-27 15:54:30,055 INFO [train.py:1114] (0/4) Epoch 3, batch 5450, loss[loss=0.2784, simple_loss=0.3319, pruned_loss=0.1125, over 4701.00 frames. ], tot_loss[loss=0.2867, simple_loss=0.3544, pruned_loss=0.1095, over 932808.56 frames. ], batch size: 11, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:54:30,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=34526.666666666664, ans=0.125 +2024-07-27 15:54:31,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=34526.666666666664, ans=0.0 +2024-07-27 15:54:32,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=34526.666666666664, ans=0.125 +2024-07-27 15:54:33,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=34526.666666666664, ans=0.125 +2024-07-27 15:54:33,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=34526.666666666664, ans=0.025 +2024-07-27 15:54:34,550 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-07-27 15:54:40,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=34540.0, ans=0.125 +2024-07-27 15:54:40,912 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.485e+01 6.790e+01 7.649e+01 9.479e+01 1.674e+02, threshold=1.530e+02, percent-clipped=4.0 +2024-07-27 15:55:04,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=34580.0, ans=0.125 +2024-07-27 15:55:09,476 INFO [train.py:1114] (0/4) Epoch 3, batch 5500, loss[loss=0.3845, simple_loss=0.427, pruned_loss=0.171, over 4304.00 frames. ], tot_loss[loss=0.2888, simple_loss=0.3554, pruned_loss=0.1111, over 930822.80 frames. ], batch size: 25, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:55:32,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=34633.333333333336, ans=0.125 +2024-07-27 15:55:34,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=34633.333333333336, ans=0.125 +2024-07-27 15:55:42,408 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.17 vs. limit=22.5 +2024-07-27 15:55:45,422 INFO [train.py:1114] (0/4) Epoch 3, batch 5550, loss[loss=0.2362, simple_loss=0.3139, pruned_loss=0.07923, over 4706.00 frames. ], tot_loss[loss=0.2885, simple_loss=0.3549, pruned_loss=0.1111, over 932712.93 frames. ], batch size: 12, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:55:55,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=34660.0, ans=0.0033347826086956517 +2024-07-27 15:56:01,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=34673.333333333336, ans=0.125 +2024-07-27 15:56:04,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=34673.333333333336, ans=0.125 +2024-07-27 15:56:05,371 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.078e+01 6.918e+01 7.816e+01 8.981e+01 2.239e+02, threshold=1.563e+02, percent-clipped=1.0 +2024-07-27 15:56:05,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=34673.333333333336, ans=0.0 +2024-07-27 15:56:07,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=34686.666666666664, ans=0.125 +2024-07-27 15:56:11,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=34686.666666666664, ans=0.1 +2024-07-27 15:56:13,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=34686.666666666664, ans=0.125 +2024-07-27 15:56:14,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=34686.666666666664, ans=0.025 +2024-07-27 15:56:19,105 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.90 vs. limit=12.0 +2024-07-27 15:56:23,174 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.50 vs. limit=15.0 +2024-07-27 15:56:23,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=34713.333333333336, ans=0.2 +2024-07-27 15:56:28,898 INFO [train.py:1114] (0/4) Epoch 3, batch 5600, loss[loss=0.2826, simple_loss=0.3508, pruned_loss=0.1072, over 4735.00 frames. ], tot_loss[loss=0.2876, simple_loss=0.3544, pruned_loss=0.1104, over 933610.92 frames. ], batch size: 14, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:56:29,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=34726.666666666664, ans=0.125 +2024-07-27 15:56:31,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=34726.666666666664, ans=0.125 +2024-07-27 15:56:38,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=34740.0, ans=0.125 +2024-07-27 15:56:40,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=34740.0, ans=0.125 +2024-07-27 15:56:53,649 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-27 15:56:57,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=34780.0, ans=0.04949747468305833 +2024-07-27 15:56:58,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=34780.0, ans=0.0 +2024-07-27 15:56:58,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=34780.0, ans=0.2 +2024-07-27 15:57:01,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=34780.0, ans=0.025 +2024-07-27 15:57:04,512 INFO [train.py:1114] (0/4) Epoch 3, batch 5650, loss[loss=0.2802, simple_loss=0.3443, pruned_loss=0.1081, over 4578.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.3521, pruned_loss=0.109, over 936387.41 frames. ], batch size: 21, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:57:10,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=34793.333333333336, ans=0.0 +2024-07-27 15:57:19,465 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.551e+01 6.421e+01 6.946e+01 8.141e+01 1.354e+02, threshold=1.389e+02, percent-clipped=0.0 +2024-07-27 15:57:33,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=34833.333333333336, ans=0.125 +2024-07-27 15:57:35,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=34833.333333333336, ans=0.125 +2024-07-27 15:57:41,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34846.666666666664, ans=0.1 +2024-07-27 15:57:43,166 INFO [train.py:1114] (0/4) Epoch 3, batch 5700, loss[loss=0.2696, simple_loss=0.3341, pruned_loss=0.1025, over 4691.00 frames. ], tot_loss[loss=0.2849, simple_loss=0.3517, pruned_loss=0.109, over 937726.52 frames. ], batch size: 13, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:57:46,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff3.min_abs, batch_count=34860.0, ans=0.2 +2024-07-27 15:57:55,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=34873.333333333336, ans=0.07 +2024-07-27 15:57:58,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=34886.666666666664, ans=0.015 +2024-07-27 15:58:01,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=34886.666666666664, ans=0.125 +2024-07-27 15:58:05,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=34900.0, ans=0.125 +2024-07-27 15:58:05,729 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.63 vs. limit=15.0 +2024-07-27 15:58:14,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=34913.333333333336, ans=0.05 +2024-07-27 15:58:17,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=34913.333333333336, ans=0.07 +2024-07-27 15:58:19,543 INFO [train.py:1114] (0/4) Epoch 3, batch 5750, loss[loss=0.3288, simple_loss=0.3884, pruned_loss=0.1346, over 4677.00 frames. ], tot_loss[loss=0.2848, simple_loss=0.3513, pruned_loss=0.1092, over 937703.16 frames. ], batch size: 19, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:58:21,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=34926.666666666664, ans=0.125 +2024-07-27 15:58:26,174 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.06 vs. limit=22.5 +2024-07-27 15:58:28,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=34940.0, ans=0.125 +2024-07-27 15:58:29,274 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.30 vs. limit=15.0 +2024-07-27 15:58:30,751 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.375e+01 6.773e+01 7.385e+01 8.434e+01 1.352e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 15:58:39,329 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.92 vs. limit=15.0 +2024-07-27 15:58:46,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=34966.666666666664, ans=0.125 +2024-07-27 15:58:55,444 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.98 vs. limit=15.0 +2024-07-27 15:58:56,498 INFO [train.py:1114] (0/4) Epoch 3, batch 5800, loss[loss=0.3423, simple_loss=0.3922, pruned_loss=0.1462, over 4663.00 frames. ], tot_loss[loss=0.286, simple_loss=0.3527, pruned_loss=0.1096, over 937082.66 frames. ], batch size: 19, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:58:58,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=34993.333333333336, ans=0.2 +2024-07-27 15:59:06,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=35006.666666666664, ans=0.2 +2024-07-27 15:59:14,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35020.0, ans=0.1 +2024-07-27 15:59:25,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=35046.666666666664, ans=0.0 +2024-07-27 15:59:29,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=35046.666666666664, ans=0.125 +2024-07-27 15:59:30,545 INFO [train.py:1114] (0/4) Epoch 3, batch 5850, loss[loss=0.3257, simple_loss=0.3877, pruned_loss=0.1318, over 4483.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.3521, pruned_loss=0.1091, over 937405.01 frames. ], batch size: 21, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:59:45,122 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.147e+01 6.773e+01 7.644e+01 9.466e+01 1.883e+02, threshold=1.529e+02, percent-clipped=1.0 +2024-07-27 15:59:45,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=35073.333333333336, ans=0.2 +2024-07-27 15:59:49,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=35086.666666666664, ans=0.125 +2024-07-27 15:59:50,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.98 vs. limit=22.5 +2024-07-27 15:59:56,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=35100.0, ans=0.2 +2024-07-27 15:59:56,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=35100.0, ans=0.0 +2024-07-27 16:00:02,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35113.333333333336, ans=0.1 +2024-07-27 16:00:04,566 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.55 vs. limit=15.0 +2024-07-27 16:00:12,756 INFO [train.py:1114] (0/4) Epoch 3, batch 5900, loss[loss=0.3157, simple_loss=0.3817, pruned_loss=0.1248, over 4668.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3524, pruned_loss=0.1095, over 937494.80 frames. ], batch size: 15, lr: 2.19e-02, grad_scale: 16.0 +2024-07-27 16:00:12,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=35126.666666666664, ans=0.1 +2024-07-27 16:00:18,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=35126.666666666664, ans=0.125 +2024-07-27 16:00:21,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=35140.0, ans=0.125 +2024-07-27 16:00:25,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=35153.333333333336, ans=0.0 +2024-07-27 16:00:27,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=35153.333333333336, ans=0.05 +2024-07-27 16:00:29,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=35153.333333333336, ans=0.125 +2024-07-27 16:00:52,911 INFO [train.py:1114] (0/4) Epoch 3, batch 5950, loss[loss=0.292, simple_loss=0.3507, pruned_loss=0.1166, over 4693.00 frames. ], tot_loss[loss=0.2863, simple_loss=0.3525, pruned_loss=0.11, over 939644.74 frames. ], batch size: 15, lr: 2.19e-02, grad_scale: 16.0 +2024-07-27 16:00:56,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=35193.333333333336, ans=0.025 +2024-07-27 16:01:06,207 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.498e+01 6.928e+01 7.896e+01 9.145e+01 1.429e+02, threshold=1.579e+02, percent-clipped=0.0 +2024-07-27 16:01:11,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=35220.0, ans=0.125 +2024-07-27 16:01:15,388 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.73 vs. limit=22.5 +2024-07-27 16:01:16,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=35233.333333333336, ans=0.5 +2024-07-27 16:01:27,305 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.01 vs. limit=22.5 +2024-07-27 16:01:29,011 INFO [train.py:1114] (0/4) Epoch 3, batch 6000, loss[loss=0.3187, simple_loss=0.3847, pruned_loss=0.1264, over 4210.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.352, pruned_loss=0.1097, over 936734.09 frames. ], batch size: 25, lr: 2.19e-02, grad_scale: 32.0 +2024-07-27 16:01:29,012 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 16:01:40,728 INFO [train.py:1146] (0/4) Epoch 3, validation: loss=0.2286, simple_loss=0.328, pruned_loss=0.06459, over 944034.00 frames. +2024-07-27 16:01:40,729 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 16:01:41,893 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.58 vs. limit=8.0 +2024-07-27 16:01:56,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=35286.666666666664, ans=0.125 +2024-07-27 16:02:01,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=35300.0, ans=0.2 +2024-07-27 16:02:08,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=35300.0, ans=0.125 +2024-07-27 16:02:12,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=35313.333333333336, ans=0.003192753623188405 +2024-07-27 16:02:17,517 INFO [train.py:1114] (0/4) Epoch 3, batch 6050, loss[loss=0.2532, simple_loss=0.327, pruned_loss=0.08972, over 4777.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.3514, pruned_loss=0.109, over 938176.69 frames. ], batch size: 12, lr: 2.19e-02, grad_scale: 32.0 +2024-07-27 16:02:17,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=35326.666666666664, ans=0.125 +2024-07-27 16:02:29,017 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.303e+01 6.741e+01 7.557e+01 8.762e+01 1.550e+02, threshold=1.511e+02, percent-clipped=0.0 +2024-07-27 16:02:29,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=35340.0, ans=0.125 +2024-07-27 16:02:31,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=35353.333333333336, ans=0.125 +2024-07-27 16:02:58,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=35393.333333333336, ans=0.125 +2024-07-27 16:02:59,386 INFO [train.py:1114] (0/4) Epoch 3, batch 6100, loss[loss=0.2859, simple_loss=0.3674, pruned_loss=0.1022, over 4675.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3497, pruned_loss=0.1076, over 937383.44 frames. ], batch size: 15, lr: 2.19e-02, grad_scale: 32.0 +2024-07-27 16:03:07,256 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.82 vs. limit=15.0 +2024-07-27 16:03:12,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=35420.0, ans=0.07 +2024-07-27 16:03:12,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=35420.0, ans=0.0 +2024-07-27 16:03:15,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=35420.0, ans=0.1 +2024-07-27 16:03:15,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=35420.0, ans=0.05 +2024-07-27 16:03:17,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=35420.0, ans=0.125 +2024-07-27 16:03:20,274 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.13 vs. limit=15.0 +2024-07-27 16:03:33,284 INFO [train.py:1114] (0/4) Epoch 3, batch 6150, loss[loss=0.4115, simple_loss=0.4343, pruned_loss=0.1944, over 3326.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.351, pruned_loss=0.108, over 936315.74 frames. ], batch size: 36, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:03:41,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=35473.333333333336, ans=0.125 +2024-07-27 16:03:46,858 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.320e+01 6.689e+01 7.561e+01 9.895e+01 1.847e+02, threshold=1.512e+02, percent-clipped=5.0 +2024-07-27 16:04:00,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35500.0, ans=0.1 +2024-07-27 16:04:09,276 INFO [train.py:1114] (0/4) Epoch 3, batch 6200, loss[loss=0.258, simple_loss=0.3341, pruned_loss=0.09092, over 4745.00 frames. ], tot_loss[loss=0.2828, simple_loss=0.3508, pruned_loss=0.1075, over 936413.78 frames. ], batch size: 14, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:04:10,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=35526.666666666664, ans=0.125 +2024-07-27 16:04:11,918 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.60 vs. limit=15.0 +2024-07-27 16:04:12,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=35526.666666666664, ans=0.0031463768115942035 +2024-07-27 16:04:15,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=35540.0, ans=0.0 +2024-07-27 16:04:23,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=35553.333333333336, ans=0.0 +2024-07-27 16:04:24,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=35553.333333333336, ans=0.125 +2024-07-27 16:04:25,433 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.28 vs. limit=6.0 +2024-07-27 16:04:38,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=35580.0, ans=0.125 +2024-07-27 16:04:38,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=35580.0, ans=0.125 +2024-07-27 16:04:43,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=35593.333333333336, ans=0.003131884057971015 +2024-07-27 16:04:43,494 INFO [train.py:1114] (0/4) Epoch 3, batch 6250, loss[loss=0.3181, simple_loss=0.3846, pruned_loss=0.1258, over 4798.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.3506, pruned_loss=0.1072, over 932544.52 frames. ], batch size: 14, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:04:49,853 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:04:51,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=35606.666666666664, ans=0.125 +2024-07-27 16:04:53,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=35606.666666666664, ans=0.025 +2024-07-27 16:04:54,894 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.517e+01 6.327e+01 7.433e+01 8.878e+01 1.317e+02, threshold=1.487e+02, percent-clipped=0.0 +2024-07-27 16:04:57,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=35606.666666666664, ans=0.09899494936611666 +2024-07-27 16:05:03,656 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-27 16:05:08,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35633.333333333336, ans=0.1 +2024-07-27 16:05:19,634 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.23 vs. limit=22.5 +2024-07-27 16:05:23,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=35646.666666666664, ans=0.025 +2024-07-27 16:05:27,329 INFO [train.py:1114] (0/4) Epoch 3, batch 6300, loss[loss=0.2345, simple_loss=0.3141, pruned_loss=0.0775, over 4605.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3505, pruned_loss=0.1077, over 929535.30 frames. ], batch size: 10, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:05:30,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=35660.0, ans=0.2 +2024-07-27 16:05:34,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=35673.333333333336, ans=0.125 +2024-07-27 16:05:45,008 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.84 vs. limit=15.0 +2024-07-27 16:05:50,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=35700.0, ans=0.0 +2024-07-27 16:05:52,386 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.46 vs. limit=15.0 +2024-07-27 16:06:00,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=35726.666666666664, ans=0.125 +2024-07-27 16:06:00,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=35726.666666666664, ans=0.125 +2024-07-27 16:06:00,823 INFO [train.py:1114] (0/4) Epoch 3, batch 6350, loss[loss=0.3311, simple_loss=0.3846, pruned_loss=0.1388, over 4565.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.351, pruned_loss=0.1082, over 933767.77 frames. ], batch size: 21, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:06:07,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=35740.0, ans=0.125 +2024-07-27 16:06:12,647 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.099e+01 7.124e+01 7.949e+01 9.215e+01 1.375e+02, threshold=1.590e+02, percent-clipped=0.0 +2024-07-27 16:06:16,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35753.333333333336, ans=0.1 +2024-07-27 16:06:22,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=35766.666666666664, ans=0.003094202898550726 +2024-07-27 16:06:24,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=35766.666666666664, ans=0.125 +2024-07-27 16:06:24,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=35766.666666666664, ans=0.125 +2024-07-27 16:06:26,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=35766.666666666664, ans=0.025 +2024-07-27 16:06:36,558 INFO [train.py:1114] (0/4) Epoch 3, batch 6400, loss[loss=0.2684, simple_loss=0.3238, pruned_loss=0.1065, over 4630.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.351, pruned_loss=0.1086, over 934962.09 frames. ], batch size: 13, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:06:45,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=35806.666666666664, ans=0.125 +2024-07-27 16:06:48,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=35806.666666666664, ans=0.125 +2024-07-27 16:07:02,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=35833.333333333336, ans=0.125 +2024-07-27 16:07:03,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=35833.333333333336, ans=0.0 +2024-07-27 16:07:12,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=35846.666666666664, ans=0.0 +2024-07-27 16:07:15,632 INFO [train.py:1114] (0/4) Epoch 3, batch 6450, loss[loss=0.2664, simple_loss=0.3464, pruned_loss=0.09319, over 4538.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.3514, pruned_loss=0.1077, over 938602.63 frames. ], batch size: 21, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:07:19,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=35860.0, ans=0.125 +2024-07-27 16:07:32,079 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.336e+01 7.043e+01 8.051e+01 9.807e+01 1.613e+02, threshold=1.610e+02, percent-clipped=2.0 +2024-07-27 16:07:43,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=35900.0, ans=0.125 +2024-07-27 16:07:49,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=35913.333333333336, ans=0.0 +2024-07-27 16:07:53,292 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.21 vs. limit=15.0 +2024-07-27 16:07:55,529 INFO [train.py:1114] (0/4) Epoch 3, batch 6500, loss[loss=0.3942, simple_loss=0.408, pruned_loss=0.1902, over 3441.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.3503, pruned_loss=0.1073, over 939849.84 frames. ], batch size: 35, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:08:05,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=35940.0, ans=0.125 +2024-07-27 16:08:07,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=35940.0, ans=0.125 +2024-07-27 16:08:18,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=35966.666666666664, ans=0.0 +2024-07-27 16:08:19,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=35966.666666666664, ans=0.0 +2024-07-27 16:08:29,003 INFO [train.py:1114] (0/4) Epoch 3, batch 6550, loss[loss=0.2392, simple_loss=0.3136, pruned_loss=0.08237, over 4793.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3494, pruned_loss=0.1066, over 942722.66 frames. ], batch size: 11, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:08:37,223 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.25 vs. limit=12.0 +2024-07-27 16:08:42,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=36006.666666666664, ans=0.1 +2024-07-27 16:08:44,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=36006.666666666664, ans=0.0 +2024-07-27 16:08:44,816 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.305e+01 6.734e+01 7.453e+01 8.745e+01 1.645e+02, threshold=1.491e+02, percent-clipped=1.0 +2024-07-27 16:08:44,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=36006.666666666664, ans=0.125 +2024-07-27 16:08:53,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=36033.333333333336, ans=0.05 +2024-07-27 16:09:03,780 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.14 vs. limit=22.5 +2024-07-27 16:09:07,347 INFO [train.py:1114] (0/4) Epoch 3, batch 6600, loss[loss=0.2966, simple_loss=0.355, pruned_loss=0.1191, over 4931.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.3507, pruned_loss=0.1074, over 944636.91 frames. ], batch size: 14, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:09:08,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=36060.0, ans=0.2 +2024-07-27 16:09:09,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=36060.0, ans=0.025 +2024-07-27 16:09:18,637 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.31 vs. limit=22.5 +2024-07-27 16:09:20,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36073.333333333336, ans=0.1 +2024-07-27 16:09:21,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=36086.666666666664, ans=0.125 +2024-07-27 16:09:24,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=36086.666666666664, ans=0.125 +2024-07-27 16:09:25,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=36086.666666666664, ans=0.125 +2024-07-27 16:09:25,483 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.81 vs. limit=15.0 +2024-07-27 16:09:35,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=36113.333333333336, ans=0.0 +2024-07-27 16:09:43,532 INFO [train.py:1114] (0/4) Epoch 3, batch 6650, loss[loss=0.3133, simple_loss=0.3697, pruned_loss=0.1285, over 4613.00 frames. ], tot_loss[loss=0.2828, simple_loss=0.3504, pruned_loss=0.1076, over 943416.33 frames. ], batch size: 17, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:09:45,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=36126.666666666664, ans=0.125 +2024-07-27 16:09:45,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=36126.666666666664, ans=0.025 +2024-07-27 16:09:47,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=36126.666666666664, ans=0.0 +2024-07-27 16:09:54,994 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.577e+01 6.831e+01 8.168e+01 1.025e+02 1.593e+02, threshold=1.634e+02, percent-clipped=2.0 +2024-07-27 16:09:55,482 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.56 vs. limit=15.0 +2024-07-27 16:10:03,702 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.37 vs. limit=15.0 +2024-07-27 16:10:10,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=36166.666666666664, ans=0.125 +2024-07-27 16:10:21,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=36180.0, ans=0.0 +2024-07-27 16:10:23,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=36193.333333333336, ans=0.125 +2024-07-27 16:10:24,376 INFO [train.py:1114] (0/4) Epoch 3, batch 6700, loss[loss=0.2744, simple_loss=0.3569, pruned_loss=0.09599, over 4680.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3514, pruned_loss=0.1082, over 942083.32 frames. ], batch size: 19, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:10:27,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=36193.333333333336, ans=0.125 +2024-07-27 16:10:27,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=36193.333333333336, ans=0.125 +2024-07-27 16:10:35,443 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.41 vs. limit=6.0 +2024-07-27 16:10:36,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=36206.666666666664, ans=0.125 +2024-07-27 16:10:40,382 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.77 vs. limit=15.0 +2024-07-27 16:10:43,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=36220.0, ans=0.1 +2024-07-27 16:10:45,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=36233.333333333336, ans=0.125 +2024-07-27 16:10:54,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=36246.666666666664, ans=0.1 +2024-07-27 16:10:58,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=36246.666666666664, ans=0.125 +2024-07-27 16:10:59,181 INFO [train.py:1114] (0/4) Epoch 3, batch 6750, loss[loss=0.3025, simple_loss=0.363, pruned_loss=0.121, over 4367.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3504, pruned_loss=0.1076, over 940349.86 frames. ], batch size: 26, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:11:10,907 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.207e+01 6.852e+01 7.798e+01 8.780e+01 1.253e+02, threshold=1.560e+02, percent-clipped=0.0 +2024-07-27 16:11:19,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=36300.0, ans=0.09899494936611666 +2024-07-27 16:11:33,070 INFO [train.py:1114] (0/4) Epoch 3, batch 6800, loss[loss=0.272, simple_loss=0.3407, pruned_loss=0.1017, over 4638.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.352, pruned_loss=0.1086, over 938798.28 frames. ], batch size: 13, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:11:36,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=36326.666666666664, ans=0.025 +2024-07-27 16:11:39,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=36326.666666666664, ans=0.125 +2024-07-27 16:11:43,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=36340.0, ans=0.125 +2024-07-27 16:11:47,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=36340.0, ans=0.1 +2024-07-27 16:11:59,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=36366.666666666664, ans=0.0 +2024-07-27 16:12:00,684 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.87 vs. limit=15.0 +2024-07-27 16:12:04,641 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.41 vs. limit=15.0 +2024-07-27 16:12:08,460 INFO [train.py:1114] (0/4) Epoch 3, batch 6850, loss[loss=0.3156, simple_loss=0.3839, pruned_loss=0.1236, over 4692.00 frames. ], tot_loss[loss=0.2843, simple_loss=0.3519, pruned_loss=0.1084, over 940611.52 frames. ], batch size: 13, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:12:11,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=36393.333333333336, ans=0.1 +2024-07-27 16:12:13,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=36393.333333333336, ans=0.125 +2024-07-27 16:12:17,863 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.18 vs. limit=10.0 +2024-07-27 16:12:19,974 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.402e+01 6.863e+01 7.550e+01 8.711e+01 1.509e+02, threshold=1.510e+02, percent-clipped=0.0 +2024-07-27 16:12:24,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=36420.0, ans=0.125 +2024-07-27 16:12:27,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=36420.0, ans=0.125 +2024-07-27 16:12:44,073 INFO [train.py:1114] (0/4) Epoch 3, batch 6900, loss[loss=0.2738, simple_loss=0.3388, pruned_loss=0.1044, over 4958.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.3507, pruned_loss=0.1074, over 942603.27 frames. ], batch size: 13, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:12:54,388 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.99 vs. limit=15.0 +2024-07-27 16:12:54,475 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.91 vs. limit=15.0 +2024-07-27 16:12:56,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=36473.333333333336, ans=0.0029405797101449277 +2024-07-27 16:12:58,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=36486.666666666664, ans=0.125 +2024-07-27 16:13:04,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36500.0, ans=0.1 +2024-07-27 16:13:16,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=36513.333333333336, ans=0.2 +2024-07-27 16:13:18,508 INFO [train.py:1114] (0/4) Epoch 3, batch 6950, loss[loss=0.2288, simple_loss=0.2968, pruned_loss=0.08037, over 4537.00 frames. ], tot_loss[loss=0.2828, simple_loss=0.3504, pruned_loss=0.1076, over 939553.37 frames. ], batch size: 10, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:13:31,823 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.70 vs. limit=22.5 +2024-07-27 16:13:32,733 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.311e+01 6.730e+01 8.056e+01 9.531e+01 1.380e+02, threshold=1.611e+02, percent-clipped=0.0 +2024-07-27 16:13:36,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=36553.333333333336, ans=0.125 +2024-07-27 16:13:38,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=36553.333333333336, ans=0.125 +2024-07-27 16:13:38,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=36553.333333333336, ans=0.125 +2024-07-27 16:13:54,240 INFO [train.py:1114] (0/4) Epoch 3, batch 7000, loss[loss=0.3013, simple_loss=0.3692, pruned_loss=0.1167, over 4592.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3487, pruned_loss=0.1066, over 938298.94 frames. ], batch size: 17, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:13:54,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=36593.333333333336, ans=0.125 +2024-07-27 16:14:00,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=36606.666666666664, ans=0.0 +2024-07-27 16:14:13,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=36620.0, ans=0.125 +2024-07-27 16:14:28,485 INFO [train.py:1114] (0/4) Epoch 3, batch 7050, loss[loss=0.3627, simple_loss=0.4227, pruned_loss=0.1513, over 4738.00 frames. ], tot_loss[loss=0.2811, simple_loss=0.3493, pruned_loss=0.1064, over 941788.35 frames. ], batch size: 19, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:14:33,997 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.32 vs. limit=22.5 +2024-07-27 16:14:50,690 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.466e+01 6.926e+01 7.603e+01 8.954e+01 1.226e+02, threshold=1.521e+02, percent-clipped=0.0 +2024-07-27 16:14:52,581 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.76 vs. limit=22.5 +2024-07-27 16:15:01,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=36700.0, ans=0.125 +2024-07-27 16:15:02,873 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.97 vs. limit=6.0 +2024-07-27 16:15:04,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=36700.0, ans=10.0 +2024-07-27 16:15:09,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=36713.333333333336, ans=0.1 +2024-07-27 16:15:11,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=36713.333333333336, ans=0.125 +2024-07-27 16:15:12,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36713.333333333336, ans=0.1 +2024-07-27 16:15:13,272 INFO [train.py:1114] (0/4) Epoch 3, batch 7100, loss[loss=0.3087, simple_loss=0.3806, pruned_loss=0.1183, over 4800.00 frames. ], tot_loss[loss=0.2822, simple_loss=0.3504, pruned_loss=0.107, over 937326.55 frames. ], batch size: 15, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:15:17,475 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.39 vs. limit=15.0 +2024-07-27 16:15:35,822 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.75 vs. limit=15.0 +2024-07-27 16:15:44,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=36780.0, ans=0.125 +2024-07-27 16:15:51,805 INFO [train.py:1114] (0/4) Epoch 3, batch 7150, loss[loss=0.2904, simple_loss=0.3631, pruned_loss=0.1089, over 4410.00 frames. ], tot_loss[loss=0.2797, simple_loss=0.3478, pruned_loss=0.1058, over 938372.18 frames. ], batch size: 21, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:16:07,965 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.541e+01 6.708e+01 7.597e+01 9.458e+01 1.380e+02, threshold=1.519e+02, percent-clipped=0.0 +2024-07-27 16:16:09,713 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.37 vs. limit=6.0 +2024-07-27 16:16:11,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=36820.0, ans=0.125 +2024-07-27 16:16:12,428 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.58 vs. limit=22.5 +2024-07-27 16:16:26,468 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.73 vs. limit=22.5 +2024-07-27 16:16:29,422 INFO [train.py:1114] (0/4) Epoch 3, batch 7200, loss[loss=0.3097, simple_loss=0.3766, pruned_loss=0.1214, over 4799.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3493, pruned_loss=0.1062, over 938484.85 frames. ], batch size: 15, lr: 2.15e-02, grad_scale: 32.0 +2024-07-27 16:16:31,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=36860.0, ans=0.125 +2024-07-27 16:16:36,845 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.11 vs. limit=6.0 +2024-07-27 16:16:50,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36886.666666666664, ans=0.1 +2024-07-27 16:16:52,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=36900.0, ans=0.125 +2024-07-27 16:17:06,626 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.97 vs. limit=15.0 +2024-07-27 16:17:11,428 INFO [train.py:1114] (0/4) Epoch 3, batch 7250, loss[loss=0.2452, simple_loss=0.313, pruned_loss=0.08867, over 4856.00 frames. ], tot_loss[loss=0.2786, simple_loss=0.3473, pruned_loss=0.1049, over 940106.03 frames. ], batch size: 12, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:17:20,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=36940.0, ans=0.5 +2024-07-27 16:17:23,005 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.450e+01 6.548e+01 7.607e+01 9.272e+01 1.593e+02, threshold=1.521e+02, percent-clipped=2.0 +2024-07-27 16:17:33,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=36966.666666666664, ans=0.125 +2024-07-27 16:17:35,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=36966.666666666664, ans=0.125 +2024-07-27 16:17:42,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=36980.0, ans=0.0 +2024-07-27 16:17:42,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=36980.0, ans=0.125 +2024-07-27 16:17:42,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=36980.0, ans=0.0028304347826086955 +2024-07-27 16:17:44,584 INFO [train.py:1114] (0/4) Epoch 3, batch 7300, loss[loss=0.2593, simple_loss=0.3311, pruned_loss=0.09372, over 4850.00 frames. ], tot_loss[loss=0.2793, simple_loss=0.3479, pruned_loss=0.1054, over 940412.05 frames. ], batch size: 12, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:17:47,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=36993.333333333336, ans=0.0 +2024-07-27 16:17:49,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=36993.333333333336, ans=0.025 +2024-07-27 16:17:49,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=36993.333333333336, ans=0.125 +2024-07-27 16:17:56,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=37006.666666666664, ans=0.125 +2024-07-27 16:17:56,859 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.08 vs. limit=15.0 +2024-07-27 16:17:58,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=37020.0, ans=0.002821739130434783 +2024-07-27 16:18:05,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=37033.333333333336, ans=0.125 +2024-07-27 16:18:13,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=37046.666666666664, ans=0.125 +2024-07-27 16:18:17,363 INFO [train.py:1114] (0/4) Epoch 3, batch 7350, loss[loss=0.23, simple_loss=0.2937, pruned_loss=0.08311, over 4644.00 frames. ], tot_loss[loss=0.2785, simple_loss=0.3473, pruned_loss=0.1048, over 939740.54 frames. ], batch size: 12, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:18:19,125 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.75 vs. limit=6.0 +2024-07-27 16:18:20,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=37060.0, ans=0.0 +2024-07-27 16:18:23,961 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.40 vs. limit=15.0 +2024-07-27 16:18:27,632 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.45 vs. limit=15.0 +2024-07-27 16:18:29,216 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.542e+01 6.884e+01 7.906e+01 1.038e+02 1.585e+02, threshold=1.581e+02, percent-clipped=4.0 +2024-07-27 16:18:42,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=37100.0, ans=0.04949747468305833 +2024-07-27 16:18:45,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=37113.333333333336, ans=0.2 +2024-07-27 16:18:48,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.28 vs. limit=15.0 +2024-07-27 16:18:49,396 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:18:49,932 INFO [train.py:1114] (0/4) Epoch 3, batch 7400, loss[loss=0.3435, simple_loss=0.4013, pruned_loss=0.1429, over 4704.00 frames. ], tot_loss[loss=0.28, simple_loss=0.3489, pruned_loss=0.1056, over 940824.60 frames. ], batch size: 13, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:19:04,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=37153.333333333336, ans=0.125 +2024-07-27 16:19:10,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=37166.666666666664, ans=0.125 +2024-07-27 16:19:19,678 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:19:22,929 INFO [train.py:1114] (0/4) Epoch 3, batch 7450, loss[loss=0.2587, simple_loss=0.3166, pruned_loss=0.1003, over 4621.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.3488, pruned_loss=0.1059, over 938200.73 frames. ], batch size: 11, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:19:34,466 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.095e+01 6.772e+01 7.758e+01 9.808e+01 2.086e+02, threshold=1.552e+02, percent-clipped=2.0 +2024-07-27 16:19:55,859 INFO [train.py:1114] (0/4) Epoch 3, batch 7500, loss[loss=0.3616, simple_loss=0.4, pruned_loss=0.1616, over 3464.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3498, pruned_loss=0.1067, over 936421.14 frames. ], batch size: 35, lr: 2.13e-02, grad_scale: 16.0 +2024-07-27 16:20:00,164 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.30 vs. limit=15.0 +2024-07-27 16:20:00,676 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.62 vs. limit=12.0 +2024-07-27 16:20:01,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.72 vs. limit=12.0 +2024-07-27 16:20:06,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=37273.333333333336, ans=0.125 +2024-07-27 16:20:06,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=37273.333333333336, ans=0.125 +2024-07-27 16:20:16,201 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.32 vs. limit=22.5 +2024-07-27 16:20:16,391 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.11 vs. limit=10.0 +2024-07-27 16:20:18,267 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.47 vs. limit=15.0 +2024-07-27 16:20:24,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=37313.333333333336, ans=0.0 +2024-07-27 16:20:25,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=37313.333333333336, ans=0.2 +2024-07-27 16:20:29,156 INFO [train.py:1114] (0/4) Epoch 3, batch 7550, loss[loss=0.304, simple_loss=0.358, pruned_loss=0.125, over 4611.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3512, pruned_loss=0.1074, over 936371.81 frames. ], batch size: 17, lr: 2.13e-02, grad_scale: 16.0 +2024-07-27 16:20:31,968 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-28000.pt +2024-07-27 16:21:37,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=37340.0, ans=0.125 +2024-07-27 16:21:39,450 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.630e+01 6.808e+01 7.761e+01 9.046e+01 1.679e+02, threshold=1.552e+02, percent-clipped=1.0 +2024-07-27 16:21:41,192 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.17 vs. limit=22.5 +2024-07-27 16:21:46,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=37366.666666666664, ans=0.1 +2024-07-27 16:21:47,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=37366.666666666664, ans=0.0 +2024-07-27 16:21:58,582 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.18 vs. limit=12.0 +2024-07-27 16:21:59,521 INFO [train.py:1114] (0/4) Epoch 3, batch 7600, loss[loss=0.2903, simple_loss=0.357, pruned_loss=0.1118, over 4822.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3504, pruned_loss=0.1072, over 938421.32 frames. ], batch size: 14, lr: 2.13e-02, grad_scale: 32.0 +2024-07-27 16:22:00,655 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.84 vs. limit=10.0 +2024-07-27 16:22:05,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=37406.666666666664, ans=0.0 +2024-07-27 16:22:08,906 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.54 vs. limit=15.0 +2024-07-27 16:22:09,621 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.51 vs. limit=15.0 +2024-07-27 16:22:12,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=37406.666666666664, ans=0.0 +2024-07-27 16:22:17,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37420.0, ans=0.1 +2024-07-27 16:22:24,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.51 vs. limit=10.0 +2024-07-27 16:22:27,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=37433.333333333336, ans=0.125 +2024-07-27 16:22:30,779 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.65 vs. limit=22.5 +2024-07-27 16:22:39,434 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.03 vs. limit=15.0 +2024-07-27 16:22:40,227 INFO [train.py:1114] (0/4) Epoch 3, batch 7650, loss[loss=0.2821, simple_loss=0.3474, pruned_loss=0.1084, over 4937.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.3517, pruned_loss=0.1082, over 937180.33 frames. ], batch size: 12, lr: 2.13e-02, grad_scale: 32.0 +2024-07-27 16:22:54,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=37460.0, ans=0.125 +2024-07-27 16:23:02,043 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.597e+01 7.183e+01 8.812e+01 1.036e+02 1.540e+02, threshold=1.762e+02, percent-clipped=0.0 +2024-07-27 16:23:20,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=37500.0, ans=0.2 +2024-07-27 16:23:30,743 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.19 vs. limit=15.0 +2024-07-27 16:23:35,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=37513.333333333336, ans=0.125 +2024-07-27 16:23:39,144 INFO [train.py:1114] (0/4) Epoch 3, batch 7700, loss[loss=0.2437, simple_loss=0.3288, pruned_loss=0.07934, over 4700.00 frames. ], tot_loss[loss=0.285, simple_loss=0.3526, pruned_loss=0.1087, over 934286.67 frames. ], batch size: 13, lr: 2.13e-02, grad_scale: 32.0 +2024-07-27 16:23:50,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=37526.666666666664, ans=0.125 +2024-07-27 16:23:54,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=37540.0, ans=0.125 +2024-07-27 16:24:21,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=37580.0, ans=0.125 +2024-07-27 16:24:22,454 INFO [train.py:1114] (0/4) Epoch 3, batch 7750, loss[loss=0.2791, simple_loss=0.3461, pruned_loss=0.106, over 4939.00 frames. ], tot_loss[loss=0.2862, simple_loss=0.3538, pruned_loss=0.1093, over 935819.61 frames. ], batch size: 14, lr: 2.13e-02, grad_scale: 32.0 +2024-07-27 16:24:42,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=37593.333333333336, ans=0.125 +2024-07-27 16:24:52,950 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.568e+01 6.584e+01 7.302e+01 8.614e+01 1.487e+02, threshold=1.460e+02, percent-clipped=0.0 +2024-07-27 16:24:59,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37620.0, ans=0.1 +2024-07-27 16:25:07,073 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:25:07,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=37633.333333333336, ans=0.125 +2024-07-27 16:25:09,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=37633.333333333336, ans=0.2 +2024-07-27 16:25:10,556 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.02 vs. limit=6.0 +2024-07-27 16:25:18,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37646.666666666664, ans=0.1 +2024-07-27 16:25:24,479 INFO [train.py:1114] (0/4) Epoch 3, batch 7800, loss[loss=0.2629, simple_loss=0.3341, pruned_loss=0.09585, over 4662.00 frames. ], tot_loss[loss=0.2844, simple_loss=0.3528, pruned_loss=0.108, over 937632.94 frames. ], batch size: 14, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:25:26,243 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.29 vs. limit=15.0 +2024-07-27 16:25:27,993 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-07-27 16:25:33,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=37673.333333333336, ans=0.025 +2024-07-27 16:25:36,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37673.333333333336, ans=0.1 +2024-07-27 16:25:39,823 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.75 vs. limit=12.0 +2024-07-27 16:25:55,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=37713.333333333336, ans=0.125 +2024-07-27 16:26:00,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=37713.333333333336, ans=0.125 +2024-07-27 16:26:06,467 INFO [train.py:1114] (0/4) Epoch 3, batch 7850, loss[loss=0.2446, simple_loss=0.3194, pruned_loss=0.0849, over 4921.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3517, pruned_loss=0.1074, over 936816.28 frames. ], batch size: 11, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:26:19,927 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.341e+01 6.634e+01 7.796e+01 9.040e+01 1.354e+02, threshold=1.559e+02, percent-clipped=0.0 +2024-07-27 16:26:31,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=37753.333333333336, ans=0.125 +2024-07-27 16:26:35,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=37766.666666666664, ans=0.025 +2024-07-27 16:26:38,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=37766.666666666664, ans=0.5 +2024-07-27 16:26:44,707 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.68 vs. limit=15.0 +2024-07-27 16:26:47,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=37780.0, ans=0.0 +2024-07-27 16:26:51,031 INFO [train.py:1114] (0/4) Epoch 3, batch 7900, loss[loss=0.2742, simple_loss=0.3493, pruned_loss=0.09952, over 4867.00 frames. ], tot_loss[loss=0.2844, simple_loss=0.3529, pruned_loss=0.108, over 933694.32 frames. ], batch size: 14, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:26:52,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37793.333333333336, ans=0.1 +2024-07-27 16:27:14,559 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.75 vs. limit=10.0 +2024-07-27 16:27:16,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=37806.666666666664, ans=0.125 +2024-07-27 16:27:28,063 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.67 vs. limit=15.0 +2024-07-27 16:27:45,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=37846.666666666664, ans=0.1 +2024-07-27 16:27:55,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=37846.666666666664, ans=0.125 +2024-07-27 16:27:58,729 INFO [train.py:1114] (0/4) Epoch 3, batch 7950, loss[loss=0.3762, simple_loss=0.4054, pruned_loss=0.1735, over 3449.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3518, pruned_loss=0.1071, over 935903.65 frames. ], batch size: 35, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:28:14,334 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.890e+01 6.860e+01 7.518e+01 9.206e+01 1.306e+02, threshold=1.504e+02, percent-clipped=0.0 +2024-07-27 16:28:23,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=37900.0, ans=0.2 +2024-07-27 16:28:51,959 INFO [train.py:1114] (0/4) Epoch 3, batch 8000, loss[loss=0.267, simple_loss=0.3215, pruned_loss=0.1062, over 4612.00 frames. ], tot_loss[loss=0.2797, simple_loss=0.3486, pruned_loss=0.1054, over 935450.89 frames. ], batch size: 11, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:30:39,456 INFO [train.py:1114] (0/4) Epoch 3, batch 8050, loss[loss=0.2528, simple_loss=0.341, pruned_loss=0.08228, over 4808.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3496, pruned_loss=0.1059, over 935250.18 frames. ], batch size: 14, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:30:44,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=37993.333333333336, ans=0.125 +2024-07-27 16:30:46,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=37993.333333333336, ans=0.09899494936611666 +2024-07-27 16:30:49,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=37993.333333333336, ans=0.125 +2024-07-27 16:31:03,282 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.744e+01 7.005e+01 8.059e+01 9.966e+01 1.848e+02, threshold=1.612e+02, percent-clipped=3.0 +2024-07-27 16:31:08,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=38020.0, ans=0.0 +2024-07-27 16:31:08,665 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:31:41,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=38033.333333333336, ans=0.07 +2024-07-27 16:32:22,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=38046.666666666664, ans=0.125 +2024-07-27 16:32:24,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=38046.666666666664, ans=0.125 +2024-07-27 16:32:37,830 INFO [train.py:1114] (0/4) Epoch 3, batch 8100, loss[loss=0.341, simple_loss=0.3996, pruned_loss=0.1412, over 4801.00 frames. ], tot_loss[loss=0.2828, simple_loss=0.3518, pruned_loss=0.1069, over 934747.92 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 32.0 +2024-07-27 16:32:54,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=38060.0, ans=0.2 +2024-07-27 16:33:12,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=38073.333333333336, ans=10.0 +2024-07-27 16:33:25,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=38086.666666666664, ans=0.125 +2024-07-27 16:33:30,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=38100.0, ans=0.125 +2024-07-27 16:33:47,674 INFO [train.py:1114] (0/4) Epoch 3, batch 8150, loss[loss=0.3379, simple_loss=0.402, pruned_loss=0.1369, over 4803.00 frames. ], tot_loss[loss=0.2802, simple_loss=0.3496, pruned_loss=0.1054, over 938135.10 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 32.0 +2024-07-27 16:34:17,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=38140.0, ans=0.0025782608695652165 +2024-07-27 16:34:19,631 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.808e+01 6.778e+01 7.869e+01 9.669e+01 1.901e+02, threshold=1.574e+02, percent-clipped=1.0 +2024-07-27 16:34:37,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=38166.666666666664, ans=0.0 +2024-07-27 16:34:48,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=38166.666666666664, ans=0.04949747468305833 +2024-07-27 16:34:56,442 INFO [train.py:1114] (0/4) Epoch 3, batch 8200, loss[loss=0.3106, simple_loss=0.3793, pruned_loss=0.1209, over 4814.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.3498, pruned_loss=0.1054, over 939074.22 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 32.0 +2024-07-27 16:35:00,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=38193.333333333336, ans=0.125 +2024-07-27 16:35:02,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=38193.333333333336, ans=0.2 +2024-07-27 16:35:06,117 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.72 vs. limit=6.0 +2024-07-27 16:35:14,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=38206.666666666664, ans=0.2 +2024-07-27 16:35:16,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=38220.0, ans=0.07 +2024-07-27 16:35:19,788 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.30 vs. limit=10.0 +2024-07-27 16:35:24,607 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.35 vs. limit=15.0 +2024-07-27 16:35:26,732 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.91 vs. limit=15.0 +2024-07-27 16:35:35,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=38233.333333333336, ans=0.2 +2024-07-27 16:35:37,592 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.04 vs. limit=6.0 +2024-07-27 16:35:45,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=38260.0, ans=0.1 +2024-07-27 16:35:45,644 INFO [train.py:1114] (0/4) Epoch 3, batch 8250, loss[loss=0.2514, simple_loss=0.3308, pruned_loss=0.08605, over 4898.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.3493, pruned_loss=0.1056, over 939088.85 frames. ], batch size: 13, lr: 2.11e-02, grad_scale: 16.0 +2024-07-27 16:35:55,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=38260.0, ans=0.2 +2024-07-27 16:35:56,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.66 vs. limit=15.0 +2024-07-27 16:36:04,216 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.365e+01 6.777e+01 7.463e+01 9.374e+01 1.482e+02, threshold=1.493e+02, percent-clipped=0.0 +2024-07-27 16:36:12,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=38300.0, ans=0.2 +2024-07-27 16:36:15,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=38300.0, ans=0.07 +2024-07-27 16:36:16,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=38300.0, ans=0.0025434782608695656 +2024-07-27 16:36:21,681 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=13.44 vs. limit=15.0 +2024-07-27 16:36:24,704 INFO [train.py:1114] (0/4) Epoch 3, batch 8300, loss[loss=0.2739, simple_loss=0.3518, pruned_loss=0.09803, over 4907.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3506, pruned_loss=0.1061, over 939271.76 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 16.0 +2024-07-27 16:36:35,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=38340.0, ans=0.0 +2024-07-27 16:36:42,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=38353.333333333336, ans=0.0 +2024-07-27 16:36:57,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=38380.0, ans=0.125 +2024-07-27 16:37:01,106 INFO [train.py:1114] (0/4) Epoch 3, batch 8350, loss[loss=0.2918, simple_loss=0.3602, pruned_loss=0.1117, over 4803.00 frames. ], tot_loss[loss=0.28, simple_loss=0.3494, pruned_loss=0.1053, over 941927.00 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 16.0 +2024-07-27 16:37:07,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=38393.333333333336, ans=0.125 +2024-07-27 16:37:07,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=38393.333333333336, ans=0.125 +2024-07-27 16:37:11,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=38393.333333333336, ans=0.2 +2024-07-27 16:38:23,646 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.458e+01 6.838e+01 7.813e+01 8.986e+01 1.214e+02, threshold=1.563e+02, percent-clipped=0.0 +2024-07-27 16:38:42,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=38420.0, ans=0.0 +2024-07-27 16:38:46,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=38433.333333333336, ans=0.2 +2024-07-27 16:38:52,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=38446.666666666664, ans=0.125 +2024-07-27 16:39:01,334 INFO [train.py:1114] (0/4) Epoch 3, batch 8400, loss[loss=0.243, simple_loss=0.3204, pruned_loss=0.08277, over 4768.00 frames. ], tot_loss[loss=0.2797, simple_loss=0.3489, pruned_loss=0.1052, over 940181.99 frames. ], batch size: 12, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:39:01,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=38460.0, ans=10.0 +2024-07-27 16:39:01,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=38460.0, ans=0.025 +2024-07-27 16:39:08,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=38460.0, ans=0.125 +2024-07-27 16:39:09,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=38460.0, ans=0.025 +2024-07-27 16:39:13,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=38473.333333333336, ans=0.0 +2024-07-27 16:39:15,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=38473.333333333336, ans=0.0 +2024-07-27 16:39:16,146 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.93 vs. limit=15.0 +2024-07-27 16:39:16,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=38473.333333333336, ans=0.002505797101449275 +2024-07-27 16:39:19,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=38486.666666666664, ans=0.0025028985507246377 +2024-07-27 16:39:22,492 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.22 vs. limit=22.5 +2024-07-27 16:39:23,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=38486.666666666664, ans=0.0 +2024-07-27 16:39:27,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=38500.0, ans=0.125 +2024-07-27 16:39:32,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=38513.333333333336, ans=0.125 +2024-07-27 16:39:46,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=38526.666666666664, ans=22.5 +2024-07-27 16:39:46,499 INFO [train.py:1114] (0/4) Epoch 3, batch 8450, loss[loss=0.2825, simple_loss=0.3693, pruned_loss=0.09784, over 4799.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3501, pruned_loss=0.1056, over 938992.96 frames. ], batch size: 15, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:39:56,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=38526.666666666664, ans=0.95 +2024-07-27 16:39:59,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38540.0, ans=0.1 +2024-07-27 16:40:02,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=38540.0, ans=0.2 +2024-07-27 16:40:03,726 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.374e+01 6.960e+01 7.996e+01 9.204e+01 1.346e+02, threshold=1.599e+02, percent-clipped=0.0 +2024-07-27 16:40:12,404 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:40:15,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38566.666666666664, ans=0.1 +2024-07-27 16:40:18,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=38580.0, ans=0.2 +2024-07-27 16:40:19,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38580.0, ans=0.1 +2024-07-27 16:40:34,593 INFO [train.py:1114] (0/4) Epoch 3, batch 8500, loss[loss=0.2349, simple_loss=0.3077, pruned_loss=0.08107, over 4608.00 frames. ], tot_loss[loss=0.2796, simple_loss=0.349, pruned_loss=0.1051, over 939037.13 frames. ], batch size: 11, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:40:50,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=38620.0, ans=0.0 +2024-07-27 16:40:53,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=38620.0, ans=0.0024739130434782612 +2024-07-27 16:40:54,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=38620.0, ans=0.0 +2024-07-27 16:41:19,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=38646.666666666664, ans=0.2 +2024-07-27 16:41:22,948 INFO [train.py:1114] (0/4) Epoch 3, batch 8550, loss[loss=0.2431, simple_loss=0.319, pruned_loss=0.08361, over 4812.00 frames. ], tot_loss[loss=0.2789, simple_loss=0.349, pruned_loss=0.1045, over 939718.32 frames. ], batch size: 11, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:41:58,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=38673.333333333336, ans=0.125 +2024-07-27 16:41:59,819 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.027e+01 6.868e+01 7.768e+01 9.567e+01 1.448e+02, threshold=1.554e+02, percent-clipped=0.0 +2024-07-27 16:42:00,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=38686.666666666664, ans=0.0 +2024-07-27 16:42:07,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=38700.0, ans=0.125 +2024-07-27 16:42:19,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38713.333333333336, ans=0.1 +2024-07-27 16:42:37,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=38713.333333333336, ans=0.125 +2024-07-27 16:42:37,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=38713.333333333336, ans=0.125 +2024-07-27 16:42:40,837 INFO [train.py:1114] (0/4) Epoch 3, batch 8600, loss[loss=0.2779, simple_loss=0.3582, pruned_loss=0.09886, over 4812.00 frames. ], tot_loss[loss=0.2783, simple_loss=0.3478, pruned_loss=0.1045, over 938998.26 frames. ], batch size: 15, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:42:54,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=38753.333333333336, ans=0.125 +2024-07-27 16:42:55,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=38753.333333333336, ans=0.0 +2024-07-27 16:42:56,213 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.97 vs. limit=15.0 +2024-07-27 16:42:56,249 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=17.70 vs. limit=15.0 +2024-07-27 16:43:00,922 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.86 vs. limit=10.0 +2024-07-27 16:43:03,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=38766.666666666664, ans=0.125 +2024-07-27 16:43:13,339 INFO [train.py:1114] (0/4) Epoch 3, batch 8650, loss[loss=0.2843, simple_loss=0.3542, pruned_loss=0.1072, over 4897.00 frames. ], tot_loss[loss=0.2787, simple_loss=0.3482, pruned_loss=0.1046, over 940370.39 frames. ], batch size: 15, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:43:37,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=38793.333333333336, ans=0.09899494936611666 +2024-07-27 16:43:48,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=38793.333333333336, ans=0.125 +2024-07-27 16:43:50,230 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.01 vs. limit=15.0 +2024-07-27 16:43:52,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38806.666666666664, ans=0.1 +2024-07-27 16:44:00,163 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.000e+01 6.909e+01 7.732e+01 9.254e+01 1.585e+02, threshold=1.546e+02, percent-clipped=1.0 +2024-07-27 16:44:00,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=38820.0, ans=0.002430434782608696 +2024-07-27 16:44:03,172 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.03 vs. limit=15.0 +2024-07-27 16:44:05,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=38820.0, ans=0.09899494936611666 +2024-07-27 16:44:05,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=38820.0, ans=0.125 +2024-07-27 16:44:09,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=38833.333333333336, ans=0.0 +2024-07-27 16:44:27,632 INFO [train.py:1114] (0/4) Epoch 3, batch 8700, loss[loss=0.2586, simple_loss=0.3351, pruned_loss=0.09108, over 4756.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3503, pruned_loss=0.1063, over 937827.19 frames. ], batch size: 13, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:44:36,613 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.98 vs. limit=12.0 +2024-07-27 16:44:37,283 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.51 vs. limit=15.0 +2024-07-27 16:44:40,158 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:45:12,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38913.333333333336, ans=0.1 +2024-07-27 16:45:15,412 INFO [train.py:1114] (0/4) Epoch 3, batch 8750, loss[loss=0.2606, simple_loss=0.3228, pruned_loss=0.09919, over 4670.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.3488, pruned_loss=0.1055, over 936226.92 frames. ], batch size: 15, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:45:17,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=38926.666666666664, ans=0.125 +2024-07-27 16:45:23,788 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.90 vs. limit=15.0 +2024-07-27 16:45:28,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=38940.0, ans=0.1 +2024-07-27 16:45:35,868 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.390e+01 6.721e+01 7.628e+01 9.057e+01 1.548e+02, threshold=1.526e+02, percent-clipped=1.0 +2024-07-27 16:45:36,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38953.333333333336, ans=0.1 +2024-07-27 16:45:42,262 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.75 vs. limit=15.0 +2024-07-27 16:45:57,590 INFO [train.py:1114] (0/4) Epoch 3, batch 8800, loss[loss=0.25, simple_loss=0.3365, pruned_loss=0.0818, over 4929.00 frames. ], tot_loss[loss=0.2802, simple_loss=0.3496, pruned_loss=0.1054, over 936999.18 frames. ], batch size: 14, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:46:08,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=38993.333333333336, ans=0.2 +2024-07-27 16:46:27,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=39006.666666666664, ans=0.2 +2024-07-27 16:46:37,069 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:46:41,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=39006.666666666664, ans=0.125 +2024-07-27 16:46:42,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=39020.0, ans=0.025 +2024-07-27 16:46:43,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=39020.0, ans=0.002386956521739131 +2024-07-27 16:46:45,195 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.26 vs. limit=15.0 +2024-07-27 16:47:12,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39046.666666666664, ans=0.1 +2024-07-27 16:47:16,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=39046.666666666664, ans=0.125 +2024-07-27 16:47:18,790 INFO [train.py:1114] (0/4) Epoch 3, batch 8850, loss[loss=0.2915, simple_loss=0.3643, pruned_loss=0.1094, over 4484.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3495, pruned_loss=0.1058, over 931538.18 frames. ], batch size: 21, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:47:19,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=39060.0, ans=0.2 +2024-07-27 16:47:26,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=39073.333333333336, ans=0.1 +2024-07-27 16:47:27,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=39073.333333333336, ans=0.125 +2024-07-27 16:47:28,118 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.10 vs. limit=15.0 +2024-07-27 16:47:30,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=39073.333333333336, ans=0.09899494936611666 +2024-07-27 16:47:32,063 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.718e+01 6.978e+01 8.333e+01 9.846e+01 2.201e+02, threshold=1.667e+02, percent-clipped=2.0 +2024-07-27 16:48:11,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=39113.333333333336, ans=0.125 +2024-07-27 16:48:14,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=39113.333333333336, ans=0.025 +2024-07-27 16:48:17,153 INFO [train.py:1114] (0/4) Epoch 3, batch 8900, loss[loss=0.2453, simple_loss=0.2992, pruned_loss=0.09576, over 4930.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.3507, pruned_loss=0.1071, over 930445.54 frames. ], batch size: 12, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:48:18,035 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.04 vs. limit=10.0 +2024-07-27 16:48:24,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=39140.0, ans=0.0 +2024-07-27 16:48:24,717 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.13 vs. limit=6.0 +2024-07-27 16:48:39,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=39166.666666666664, ans=0.0 +2024-07-27 16:48:39,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=39166.666666666664, ans=0.125 +2024-07-27 16:49:00,629 INFO [train.py:1114] (0/4) Epoch 3, batch 8950, loss[loss=0.3312, simple_loss=0.3957, pruned_loss=0.1333, over 4506.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.35, pruned_loss=0.1064, over 931020.59 frames. ], batch size: 21, lr: 2.08e-02, grad_scale: 32.0 +2024-07-27 16:49:23,808 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.972e+01 6.809e+01 7.609e+01 8.972e+01 1.358e+02, threshold=1.522e+02, percent-clipped=0.0 +2024-07-27 16:49:51,657 INFO [train.py:1114] (0/4) Epoch 3, batch 9000, loss[loss=0.312, simple_loss=0.38, pruned_loss=0.122, over 4640.00 frames. ], tot_loss[loss=0.2801, simple_loss=0.3488, pruned_loss=0.1058, over 933958.60 frames. ], batch size: 12, lr: 2.08e-02, grad_scale: 32.0 +2024-07-27 16:49:51,658 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 16:50:05,982 INFO [train.py:1146] (0/4) Epoch 3, validation: loss=0.2254, simple_loss=0.3252, pruned_loss=0.06281, over 944034.00 frames. +2024-07-27 16:50:05,983 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 16:50:06,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=39260.0, ans=0.125 +2024-07-27 16:50:07,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=39260.0, ans=0.125 +2024-07-27 16:50:13,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=39260.0, ans=0.2 +2024-07-27 16:50:14,783 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.90 vs. limit=15.0 +2024-07-27 16:50:23,392 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-27 16:50:31,068 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.84 vs. limit=15.0 +2024-07-27 16:50:40,764 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.42 vs. limit=12.0 +2024-07-27 16:50:52,324 INFO [train.py:1114] (0/4) Epoch 3, batch 9050, loss[loss=0.266, simple_loss=0.3237, pruned_loss=0.1041, over 4556.00 frames. ], tot_loss[loss=0.279, simple_loss=0.3476, pruned_loss=0.1052, over 934324.15 frames. ], batch size: 10, lr: 2.08e-02, grad_scale: 32.0 +2024-07-27 16:50:55,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=39326.666666666664, ans=0.025 +2024-07-27 16:50:57,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=39326.666666666664, ans=0.125 +2024-07-27 16:51:04,892 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.552e+01 6.807e+01 7.856e+01 8.861e+01 3.440e+02, threshold=1.571e+02, percent-clipped=1.0 +2024-07-27 16:51:06,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=39353.333333333336, ans=0.125 +2024-07-27 16:51:11,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff3.min_abs, batch_count=39366.666666666664, ans=0.2 +2024-07-27 16:51:16,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=39366.666666666664, ans=0.0023115942028985514 +2024-07-27 16:51:16,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=39366.666666666664, ans=0.125 +2024-07-27 16:52:04,332 INFO [train.py:1114] (0/4) Epoch 3, batch 9100, loss[loss=0.303, simple_loss=0.3781, pruned_loss=0.114, over 4929.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.347, pruned_loss=0.1044, over 936995.34 frames. ], batch size: 14, lr: 2.08e-02, grad_scale: 16.0 +2024-07-27 16:52:32,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=39420.0, ans=0.125 +2024-07-27 16:52:36,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=39420.0, ans=0.1 +2024-07-27 16:52:38,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=39433.333333333336, ans=0.125 +2024-07-27 16:52:39,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=39433.333333333336, ans=0.125 +2024-07-27 16:52:45,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=39446.666666666664, ans=0.125 +2024-07-27 16:53:04,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=39446.666666666664, ans=0.125 +2024-07-27 16:53:05,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=39460.0, ans=0.125 +2024-07-27 16:53:06,197 INFO [train.py:1114] (0/4) Epoch 3, batch 9150, loss[loss=0.2742, simple_loss=0.3574, pruned_loss=0.09546, over 4816.00 frames. ], tot_loss[loss=0.2792, simple_loss=0.3482, pruned_loss=0.1051, over 935852.55 frames. ], batch size: 14, lr: 2.08e-02, grad_scale: 16.0 +2024-07-27 16:53:30,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=39473.333333333336, ans=0.2 +2024-07-27 16:53:32,476 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.23 vs. limit=10.0 +2024-07-27 16:53:38,856 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.526e+01 6.919e+01 8.427e+01 9.572e+01 1.552e+02, threshold=1.685e+02, percent-clipped=0.0 +2024-07-27 16:53:40,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=39486.666666666664, ans=0.2 +2024-07-27 16:54:00,484 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.86 vs. limit=15.0 +2024-07-27 16:54:03,875 INFO [train.py:1114] (0/4) Epoch 3, batch 9200, loss[loss=0.271, simple_loss=0.3403, pruned_loss=0.1008, over 4860.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3477, pruned_loss=0.1053, over 937584.71 frames. ], batch size: 12, lr: 2.08e-02, grad_scale: 32.0 +2024-07-27 16:54:07,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39526.666666666664, ans=0.1 +2024-07-27 16:54:11,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=39540.0, ans=0.125 +2024-07-27 16:54:11,393 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.06 vs. limit=22.5 +2024-07-27 16:54:21,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=39553.333333333336, ans=0.125 +2024-07-27 16:54:31,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=39580.0, ans=0.0 +2024-07-27 16:54:32,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=39580.0, ans=0.0022652173913043474 +2024-07-27 16:54:36,184 INFO [train.py:1114] (0/4) Epoch 3, batch 9250, loss[loss=0.2752, simple_loss=0.3462, pruned_loss=0.1021, over 4631.00 frames. ], tot_loss[loss=0.2783, simple_loss=0.3472, pruned_loss=0.1047, over 938294.36 frames. ], batch size: 13, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:54:44,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=39606.666666666664, ans=0.125 +2024-07-27 16:54:45,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=39606.666666666664, ans=0.125 +2024-07-27 16:54:49,930 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.796e+01 6.391e+01 6.941e+01 8.054e+01 1.289e+02, threshold=1.388e+02, percent-clipped=0.0 +2024-07-27 16:54:56,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=39633.333333333336, ans=0.125 +2024-07-27 16:55:08,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=39646.666666666664, ans=0.125 +2024-07-27 16:55:10,138 INFO [train.py:1114] (0/4) Epoch 3, batch 9300, loss[loss=0.2394, simple_loss=0.3214, pruned_loss=0.0787, over 4770.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3461, pruned_loss=0.1037, over 938474.36 frames. ], batch size: 12, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:55:35,655 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.94 vs. limit=10.0 +2024-07-27 16:55:50,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39713.333333333336, ans=0.1 +2024-07-27 16:56:05,256 INFO [train.py:1114] (0/4) Epoch 3, batch 9350, loss[loss=0.2665, simple_loss=0.3237, pruned_loss=0.1046, over 4813.00 frames. ], tot_loss[loss=0.2789, simple_loss=0.3476, pruned_loss=0.1051, over 935602.84 frames. ], batch size: 11, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:56:05,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=39726.666666666664, ans=0.125 +2024-07-27 16:56:06,837 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.81 vs. limit=6.0 +2024-07-27 16:56:14,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39740.0, ans=0.1 +2024-07-27 16:56:18,144 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.37 vs. limit=22.5 +2024-07-27 16:56:20,307 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.575e+01 6.744e+01 7.337e+01 8.957e+01 1.228e+02, threshold=1.467e+02, percent-clipped=0.0 +2024-07-27 16:56:24,592 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.10 vs. limit=10.0 +2024-07-27 16:56:32,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=39780.0, ans=0.2 +2024-07-27 16:56:34,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=39780.0, ans=0.0 +2024-07-27 16:56:39,581 INFO [train.py:1114] (0/4) Epoch 3, batch 9400, loss[loss=0.2738, simple_loss=0.3478, pruned_loss=0.0999, over 4685.00 frames. ], tot_loss[loss=0.279, simple_loss=0.3472, pruned_loss=0.1053, over 933520.95 frames. ], batch size: 13, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:56:48,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=39806.666666666664, ans=0.2 +2024-07-27 16:56:48,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=39806.666666666664, ans=0.2 +2024-07-27 16:57:00,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=39833.333333333336, ans=0.125 +2024-07-27 16:57:05,519 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.90 vs. limit=15.0 +2024-07-27 16:57:08,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=39846.666666666664, ans=0.125 +2024-07-27 16:57:09,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=39846.666666666664, ans=0.025 +2024-07-27 16:57:11,996 INFO [train.py:1114] (0/4) Epoch 3, batch 9450, loss[loss=0.2369, simple_loss=0.3007, pruned_loss=0.08655, over 4794.00 frames. ], tot_loss[loss=0.2785, simple_loss=0.3466, pruned_loss=0.1052, over 933122.55 frames. ], batch size: 11, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:57:25,851 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.577e+01 6.734e+01 7.503e+01 8.983e+01 1.272e+02, threshold=1.501e+02, percent-clipped=0.0 +2024-07-27 16:57:30,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=39886.666666666664, ans=0.1 +2024-07-27 16:57:42,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=39913.333333333336, ans=0.2 +2024-07-27 16:58:03,383 INFO [train.py:1114] (0/4) Epoch 3, batch 9500, loss[loss=0.2538, simple_loss=0.3089, pruned_loss=0.09937, over 4704.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.347, pruned_loss=0.1049, over 935452.05 frames. ], batch size: 12, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:58:03,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=39926.666666666664, ans=0.2 +2024-07-27 16:58:20,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=39926.666666666664, ans=0.125 +2024-07-27 16:58:25,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=39940.0, ans=0.0 +2024-07-27 16:58:29,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=39940.0, ans=0.2 +2024-07-27 16:58:36,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=39953.333333333336, ans=0.0021840579710144924 +2024-07-27 16:58:38,358 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.26 vs. limit=6.0 +2024-07-27 16:58:57,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=39966.666666666664, ans=0.0 +2024-07-27 16:59:09,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=39980.0, ans=0.125 +2024-07-27 16:59:12,963 INFO [train.py:1114] (0/4) Epoch 3, batch 9550, loss[loss=0.3085, simple_loss=0.3652, pruned_loss=0.1259, over 4779.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3465, pruned_loss=0.1049, over 932849.76 frames. ], batch size: 12, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:59:14,464 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.68 vs. limit=6.0 +2024-07-27 16:59:26,044 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.187e+01 6.649e+01 7.565e+01 8.321e+01 1.560e+02, threshold=1.513e+02, percent-clipped=2.0 +2024-07-27 16:59:34,188 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=15.41 vs. limit=15.0 +2024-07-27 16:59:37,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=40033.333333333336, ans=0.125 +2024-07-27 16:59:44,502 INFO [train.py:1114] (0/4) Epoch 3, batch 9600, loss[loss=0.4434, simple_loss=0.4559, pruned_loss=0.2155, over 3376.00 frames. ], tot_loss[loss=0.2792, simple_loss=0.3476, pruned_loss=0.1054, over 931759.58 frames. ], batch size: 35, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 16:59:54,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40073.333333333336, ans=0.1 +2024-07-27 17:00:02,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=40086.666666666664, ans=0.0 +2024-07-27 17:00:03,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40086.666666666664, ans=0.1 +2024-07-27 17:00:17,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=40100.0, ans=0.125 +2024-07-27 17:00:26,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=40113.333333333336, ans=0.0 +2024-07-27 17:00:30,244 INFO [train.py:1114] (0/4) Epoch 3, batch 9650, loss[loss=0.2924, simple_loss=0.3542, pruned_loss=0.1153, over 4846.00 frames. ], tot_loss[loss=0.2811, simple_loss=0.3495, pruned_loss=0.1064, over 927912.95 frames. ], batch size: 16, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:00:44,537 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.272e+01 6.641e+01 7.549e+01 8.923e+01 1.361e+02, threshold=1.510e+02, percent-clipped=0.0 +2024-07-27 17:00:45,034 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.95 vs. limit=15.0 +2024-07-27 17:00:47,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40153.333333333336, ans=0.1 +2024-07-27 17:00:47,591 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.56 vs. limit=15.0 +2024-07-27 17:00:50,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=40153.333333333336, ans=0.125 +2024-07-27 17:00:56,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=40166.666666666664, ans=0.125 +2024-07-27 17:00:57,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=40166.666666666664, ans=0.0 +2024-07-27 17:00:59,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40180.0, ans=0.1 +2024-07-27 17:01:00,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=40180.0, ans=0.125 +2024-07-27 17:01:04,420 INFO [train.py:1114] (0/4) Epoch 3, batch 9700, loss[loss=0.3192, simple_loss=0.3713, pruned_loss=0.1336, over 4174.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3493, pruned_loss=0.1061, over 925651.80 frames. ], batch size: 25, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:01:09,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=40193.333333333336, ans=0.04949747468305833 +2024-07-27 17:01:20,620 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.84 vs. limit=15.0 +2024-07-27 17:01:23,745 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.70 vs. limit=6.0 +2024-07-27 17:01:24,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=40220.0, ans=0.125 +2024-07-27 17:01:28,733 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.79 vs. limit=15.0 +2024-07-27 17:01:39,683 INFO [train.py:1114] (0/4) Epoch 3, batch 9750, loss[loss=0.3049, simple_loss=0.3621, pruned_loss=0.1239, over 4692.00 frames. ], tot_loss[loss=0.2785, simple_loss=0.3469, pruned_loss=0.1051, over 925608.70 frames. ], batch size: 15, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:01:52,045 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.16 vs. limit=15.0 +2024-07-27 17:01:53,660 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.325e+01 6.567e+01 7.224e+01 8.540e+01 1.142e+02, threshold=1.445e+02, percent-clipped=0.0 +2024-07-27 17:01:53,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=40286.666666666664, ans=0.125 +2024-07-27 17:01:55,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=40286.666666666664, ans=0.1 +2024-07-27 17:02:00,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=40300.0, ans=0.125 +2024-07-27 17:02:01,380 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:02:05,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=40313.333333333336, ans=0.125 +2024-07-27 17:02:11,786 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.51 vs. limit=10.0 +2024-07-27 17:02:12,029 INFO [train.py:1114] (0/4) Epoch 3, batch 9800, loss[loss=0.2253, simple_loss=0.3092, pruned_loss=0.07064, over 4705.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3454, pruned_loss=0.1041, over 924914.98 frames. ], batch size: 12, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:02:14,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=40326.666666666664, ans=0.125 +2024-07-27 17:02:19,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=40340.0, ans=0.125 +2024-07-27 17:02:41,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=40366.666666666664, ans=0.025 +2024-07-27 17:02:44,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=40380.0, ans=0.0 +2024-07-27 17:02:51,228 INFO [train.py:1114] (0/4) Epoch 3, batch 9850, loss[loss=0.3156, simple_loss=0.3921, pruned_loss=0.1195, over 4906.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.3466, pruned_loss=0.1045, over 927286.48 frames. ], batch size: 15, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:03:15,228 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.438e+01 6.644e+01 7.357e+01 1.003e+02 1.564e+02, threshold=1.471e+02, percent-clipped=2.0 +2024-07-27 17:03:15,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=40420.0, ans=0.125 +2024-07-27 17:03:16,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=40420.0, ans=10.0 +2024-07-27 17:03:26,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=40446.666666666664, ans=0.0020768115942029 +2024-07-27 17:03:32,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=40446.666666666664, ans=0.125 +2024-07-27 17:03:33,907 INFO [train.py:1114] (0/4) Epoch 3, batch 9900, loss[loss=0.3379, simple_loss=0.394, pruned_loss=0.1408, over 4825.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3475, pruned_loss=0.1053, over 927216.01 frames. ], batch size: 16, lr: 2.05e-02, grad_scale: 32.0 +2024-07-27 17:03:34,145 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.12 vs. limit=22.5 +2024-07-27 17:03:35,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=40460.0, ans=0.125 +2024-07-27 17:04:14,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=40500.0, ans=0.025 +2024-07-27 17:04:18,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40513.333333333336, ans=0.1 +2024-07-27 17:04:20,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=40513.333333333336, ans=0.2 +2024-07-27 17:04:22,313 INFO [train.py:1114] (0/4) Epoch 3, batch 9950, loss[loss=0.2194, simple_loss=0.2891, pruned_loss=0.07488, over 4798.00 frames. ], tot_loss[loss=0.2787, simple_loss=0.3469, pruned_loss=0.1053, over 929740.61 frames. ], batch size: 11, lr: 2.05e-02, grad_scale: 32.0 +2024-07-27 17:04:35,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=40540.0, ans=0.5 +2024-07-27 17:04:36,833 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.37 vs. limit=15.0 +2024-07-27 17:04:42,172 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.432e+01 7.065e+01 7.952e+01 9.840e+01 1.527e+02, threshold=1.590e+02, percent-clipped=1.0 +2024-07-27 17:04:43,728 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.13 vs. limit=22.5 +2024-07-27 17:05:00,008 INFO [train.py:1114] (0/4) Epoch 3, batch 10000, loss[loss=0.315, simple_loss=0.3719, pruned_loss=0.1291, over 4641.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.3502, pruned_loss=0.1068, over 926908.77 frames. ], batch size: 16, lr: 2.05e-02, grad_scale: 32.0 +2024-07-27 17:05:05,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=40593.333333333336, ans=0.2 +2024-07-27 17:05:11,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=40606.666666666664, ans=0.125 +2024-07-27 17:05:11,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=40620.0, ans=0.125 +2024-07-27 17:05:13,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=40620.0, ans=0.025 +2024-07-27 17:05:18,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=40633.333333333336, ans=0.0 +2024-07-27 17:05:32,259 INFO [train.py:1114] (0/4) Epoch 3, batch 10050, loss[loss=0.3367, simple_loss=0.3816, pruned_loss=0.1459, over 3407.00 frames. ], tot_loss[loss=0.2871, simple_loss=0.3547, pruned_loss=0.1098, over 916071.20 frames. ], batch size: 35, lr: 2.05e-02, grad_scale: 32.0 +2024-07-27 17:05:40,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=40673.333333333336, ans=0.0 +2024-07-27 17:05:47,409 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.930e+01 7.073e+01 7.900e+01 8.546e+01 1.194e+02, threshold=1.580e+02, percent-clipped=0.0 +2024-07-27 17:05:55,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=40700.0, ans=0.125 +2024-07-27 17:06:07,151 INFO [train.py:1114] (0/4) Epoch 3, batch 10100, loss[loss=0.3795, simple_loss=0.4091, pruned_loss=0.1749, over 3365.00 frames. ], tot_loss[loss=0.299, simple_loss=0.362, pruned_loss=0.118, over 863078.39 frames. ], batch size: 37, lr: 2.05e-02, grad_scale: 16.0 +2024-07-27 17:06:22,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=40753.333333333336, ans=0.125 +2024-07-27 17:06:27,541 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.36 vs. limit=15.0 +2024-07-27 17:06:34,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=40780.0, ans=0.125 +2024-07-27 17:06:39,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=40780.0, ans=0.002004347826086956 +2024-07-27 17:06:39,721 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.52 vs. limit=15.0 +2024-07-27 17:06:41,363 INFO [train.py:1114] (0/4) Epoch 3, batch 10150, loss[loss=0.3512, simple_loss=0.3985, pruned_loss=0.152, over 3565.00 frames. ], tot_loss[loss=0.3088, simple_loss=0.368, pruned_loss=0.1248, over 819196.45 frames. ], batch size: 36, lr: 2.05e-02, grad_scale: 16.0 +2024-07-27 17:06:41,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=40793.333333333336, ans=10.0 +2024-07-27 17:07:01,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=40806.666666666664, ans=0.125 +2024-07-27 17:07:09,197 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.948e+01 6.999e+01 7.537e+01 8.281e+01 1.738e+02, threshold=1.507e+02, percent-clipped=1.0 +2024-07-27 17:07:23,686 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.74 vs. limit=15.0 +2024-07-27 17:07:26,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=40846.666666666664, ans=10.0 +2024-07-27 17:07:30,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=40846.666666666664, ans=0.95 +2024-07-27 17:07:32,449 INFO [train.py:1114] (0/4) Epoch 3, batch 10200, loss[loss=0.3868, simple_loss=0.4086, pruned_loss=0.1825, over 3151.00 frames. ], tot_loss[loss=0.3152, simple_loss=0.3717, pruned_loss=0.1293, over 787089.32 frames. ], batch size: 35, lr: 2.04e-02, grad_scale: 16.0 +2024-07-27 17:07:33,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=40860.0, ans=0.025 +2024-07-27 17:07:35,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=40860.0, ans=0.025 +2024-07-27 17:07:38,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=40873.333333333336, ans=0.025 +2024-07-27 17:08:03,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40873.333333333336, ans=0.1 +2024-07-27 17:08:03,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=40873.333333333336, ans=0.025 +2024-07-27 17:08:07,291 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-3.pt +2024-07-27 17:08:50,790 INFO [train.py:1114] (0/4) Epoch 4, batch 0, loss[loss=0.2432, simple_loss=0.3209, pruned_loss=0.08278, over 4848.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.3209, pruned_loss=0.08278, over 4848.00 frames. ], batch size: 12, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:08:50,791 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 17:09:02,630 INFO [train.py:1146] (0/4) Epoch 4, validation: loss=0.2303, simple_loss=0.3319, pruned_loss=0.06433, over 944034.00 frames. +2024-07-27 17:09:02,631 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 17:09:17,028 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.12 vs. limit=10.0 +2024-07-27 17:09:23,534 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.41 vs. limit=10.0 +2024-07-27 17:09:25,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=40917.333333333336, ans=0.0 +2024-07-27 17:09:28,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=40917.333333333336, ans=0.125 +2024-07-27 17:09:33,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.16 vs. limit=22.5 +2024-07-27 17:09:37,920 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.90 vs. limit=15.0 +2024-07-27 17:09:45,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=40944.0, ans=0.125 +2024-07-27 17:09:47,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=40944.0, ans=0.125 +2024-07-27 17:09:56,709 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.176e+01 6.703e+01 7.240e+01 7.919e+01 1.564e+02, threshold=1.448e+02, percent-clipped=1.0 +2024-07-27 17:09:57,432 INFO [train.py:1114] (0/4) Epoch 4, batch 50, loss[loss=0.2301, simple_loss=0.3158, pruned_loss=0.0722, over 4632.00 frames. ], tot_loss[loss=0.2858, simple_loss=0.3528, pruned_loss=0.1094, over 206442.74 frames. ], batch size: 11, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:10:04,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40970.666666666664, ans=0.1 +2024-07-27 17:10:09,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=40970.666666666664, ans=0.0 +2024-07-27 17:10:10,218 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:10:14,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=40984.0, ans=0.0 +2024-07-27 17:10:23,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40997.333333333336, ans=0.1 +2024-07-27 17:10:27,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=41010.666666666664, ans=0.125 +2024-07-27 17:10:30,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=41024.0, ans=0.0 +2024-07-27 17:10:31,386 INFO [train.py:1114] (0/4) Epoch 4, batch 100, loss[loss=0.2598, simple_loss=0.3248, pruned_loss=0.09738, over 4635.00 frames. ], tot_loss[loss=0.2836, simple_loss=0.3523, pruned_loss=0.1074, over 364807.72 frames. ], batch size: 12, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:10:32,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=41024.0, ans=0.125 +2024-07-27 17:10:38,306 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=14.80 vs. limit=15.0 +2024-07-27 17:10:38,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=41037.333333333336, ans=0.125 +2024-07-27 17:10:42,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=41037.333333333336, ans=0.0 +2024-07-27 17:10:45,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=41050.666666666664, ans=0.125 +2024-07-27 17:10:55,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=41064.0, ans=0.05 +2024-07-27 17:10:56,597 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.12 vs. limit=15.0 +2024-07-27 17:11:02,602 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.01 vs. limit=15.0 +2024-07-27 17:11:03,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=41077.333333333336, ans=10.0 +2024-07-27 17:11:04,705 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.318e+01 6.667e+01 8.145e+01 9.581e+01 1.407e+02, threshold=1.629e+02, percent-clipped=0.0 +2024-07-27 17:15:47,216 INFO [train.py:1114] (0/4) Epoch 4, batch 150, loss[loss=0.2049, simple_loss=0.275, pruned_loss=0.06739, over 4620.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.3472, pruned_loss=0.1037, over 493580.97 frames. ], batch size: 11, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:15:53,625 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.78 vs. limit=12.0 +2024-07-27 17:16:04,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=41090.666666666664, ans=10.0 +2024-07-27 17:16:08,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=41104.0, ans=0.125 +2024-07-27 17:16:11,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=41104.0, ans=0.125 +2024-07-27 17:16:13,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=41117.333333333336, ans=0.5 +2024-07-27 17:16:35,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=41157.333333333336, ans=0.0 +2024-07-27 17:16:35,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=41157.333333333336, ans=0.0 +2024-07-27 17:16:36,407 INFO [train.py:1114] (0/4) Epoch 4, batch 200, loss[loss=0.2817, simple_loss=0.3418, pruned_loss=0.1108, over 4608.00 frames. ], tot_loss[loss=0.2759, simple_loss=0.3458, pruned_loss=0.103, over 593470.15 frames. ], batch size: 21, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:16:36,933 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=17.12 vs. limit=15.0 +2024-07-27 17:16:48,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=41170.666666666664, ans=0.125 +2024-07-27 17:16:56,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=41197.333333333336, ans=0.05 +2024-07-27 17:17:09,104 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.496e+01 6.403e+01 7.504e+01 8.893e+01 1.315e+02, threshold=1.501e+02, percent-clipped=0.0 +2024-07-27 17:17:09,909 INFO [train.py:1114] (0/4) Epoch 4, batch 250, loss[loss=0.2582, simple_loss=0.3466, pruned_loss=0.08492, over 4682.00 frames. ], tot_loss[loss=0.2759, simple_loss=0.3455, pruned_loss=0.1031, over 670048.87 frames. ], batch size: 16, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:17:15,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=41224.0, ans=0.125 +2024-07-27 17:17:18,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=41237.333333333336, ans=0.125 +2024-07-27 17:17:41,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.65 vs. limit=15.0 +2024-07-27 17:17:43,539 INFO [train.py:1114] (0/4) Epoch 4, batch 300, loss[loss=0.2963, simple_loss=0.3523, pruned_loss=0.1201, over 4807.00 frames. ], tot_loss[loss=0.2765, simple_loss=0.3462, pruned_loss=0.1034, over 729600.99 frames. ], batch size: 15, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:17:53,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff3.min_abs, batch_count=41304.0, ans=0.2 +2024-07-27 17:17:56,747 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.09 vs. limit=15.0 +2024-07-27 17:17:57,121 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:18:02,729 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.07 vs. limit=15.0 +2024-07-27 17:18:11,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=41330.666666666664, ans=0.125 +2024-07-27 17:18:17,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=41344.0, ans=0.2 +2024-07-27 17:18:18,359 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.327e+01 6.726e+01 7.955e+01 9.020e+01 1.256e+02, threshold=1.591e+02, percent-clipped=0.0 +2024-07-27 17:18:19,051 INFO [train.py:1114] (0/4) Epoch 4, batch 350, loss[loss=0.1983, simple_loss=0.2712, pruned_loss=0.06266, over 4949.00 frames. ], tot_loss[loss=0.2746, simple_loss=0.3452, pruned_loss=0.102, over 776035.41 frames. ], batch size: 12, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:18:20,863 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.12 vs. limit=6.0 +2024-07-27 17:18:23,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=41357.333333333336, ans=0.0 +2024-07-27 17:18:24,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=41357.333333333336, ans=0.125 +2024-07-27 17:18:52,195 INFO [train.py:1114] (0/4) Epoch 4, batch 400, loss[loss=0.2792, simple_loss=0.357, pruned_loss=0.1007, over 4687.00 frames. ], tot_loss[loss=0.2736, simple_loss=0.3444, pruned_loss=0.1014, over 813305.98 frames. ], batch size: 13, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:18:56,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=41424.0, ans=0.125 +2024-07-27 17:18:56,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=41424.0, ans=0.125 +2024-07-27 17:19:07,977 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.95 vs. limit=10.0 +2024-07-27 17:19:11,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=41464.0, ans=0.125 +2024-07-27 17:19:13,616 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:19:14,675 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.93 vs. limit=15.0 +2024-07-27 17:19:18,481 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.01 vs. limit=15.0 +2024-07-27 17:19:24,666 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.330e+01 6.564e+01 7.397e+01 8.870e+01 1.499e+02, threshold=1.479e+02, percent-clipped=0.0 +2024-07-27 17:19:25,329 INFO [train.py:1114] (0/4) Epoch 4, batch 450, loss[loss=0.238, simple_loss=0.3108, pruned_loss=0.08258, over 4629.00 frames. ], tot_loss[loss=0.2729, simple_loss=0.3437, pruned_loss=0.1011, over 838691.28 frames. ], batch size: 13, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:19:26,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=41490.666666666664, ans=0.0 +2024-07-27 17:19:28,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=41490.666666666664, ans=0.125 +2024-07-27 17:19:31,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=41490.666666666664, ans=0.125 +2024-07-27 17:19:38,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=41504.0, ans=0.125 +2024-07-27 17:19:43,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=41517.333333333336, ans=0.2 +2024-07-27 17:19:44,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=41517.333333333336, ans=0.125 +2024-07-27 17:19:46,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41530.666666666664, ans=0.1 +2024-07-27 17:19:48,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=41530.666666666664, ans=0.0 +2024-07-27 17:19:55,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=41530.666666666664, ans=0.125 +2024-07-27 17:19:58,498 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.31 vs. limit=15.0 +2024-07-27 17:20:09,639 INFO [train.py:1114] (0/4) Epoch 4, batch 500, loss[loss=0.2781, simple_loss=0.3518, pruned_loss=0.1022, over 4680.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3419, pruned_loss=0.0993, over 861278.87 frames. ], batch size: 15, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:20:20,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=41570.666666666664, ans=0.035 +2024-07-27 17:20:38,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=41610.666666666664, ans=0.125 +2024-07-27 17:20:45,337 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.329e+01 6.267e+01 7.385e+01 9.027e+01 1.460e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 17:20:46,097 INFO [train.py:1114] (0/4) Epoch 4, batch 550, loss[loss=0.3083, simple_loss=0.379, pruned_loss=0.1188, over 4649.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3407, pruned_loss=0.09853, over 877265.98 frames. ], batch size: 17, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:20:46,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=41624.0, ans=0.2 +2024-07-27 17:20:47,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=41624.0, ans=0.0 +2024-07-27 17:21:00,713 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.80 vs. limit=6.0 +2024-07-27 17:21:02,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=41650.666666666664, ans=0.125 +2024-07-27 17:21:03,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=41650.666666666664, ans=0.125 +2024-07-27 17:21:12,036 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.90 vs. limit=10.0 +2024-07-27 17:21:12,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=41677.333333333336, ans=0.0 +2024-07-27 17:21:21,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=41690.666666666664, ans=0.125 +2024-07-27 17:21:21,576 INFO [train.py:1114] (0/4) Epoch 4, batch 600, loss[loss=0.2836, simple_loss=0.3559, pruned_loss=0.1056, over 4619.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3415, pruned_loss=0.09863, over 891954.57 frames. ], batch size: 16, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:21:22,666 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.98 vs. limit=22.5 +2024-07-27 17:21:23,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=41690.666666666664, ans=10.0 +2024-07-27 17:21:39,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=41717.333333333336, ans=0.125 +2024-07-27 17:21:42,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=41730.666666666664, ans=0.0 +2024-07-27 17:21:53,702 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.519e+01 6.252e+01 7.090e+01 7.980e+01 1.452e+02, threshold=1.418e+02, percent-clipped=0.0 +2024-07-27 17:21:54,417 INFO [train.py:1114] (0/4) Epoch 4, batch 650, loss[loss=0.2861, simple_loss=0.3442, pruned_loss=0.114, over 4759.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.3406, pruned_loss=0.0988, over 903792.82 frames. ], batch size: 13, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:21:57,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=41757.333333333336, ans=0.125 +2024-07-27 17:22:03,969 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.10 vs. limit=15.0 +2024-07-27 17:22:12,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=41784.0, ans=0.125 +2024-07-27 17:22:12,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=41784.0, ans=0.025 +2024-07-27 17:22:14,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=41797.333333333336, ans=0.0 +2024-07-27 17:22:25,390 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.69 vs. limit=22.5 +2024-07-27 17:22:27,770 INFO [train.py:1114] (0/4) Epoch 4, batch 700, loss[loss=0.2283, simple_loss=0.3058, pruned_loss=0.07544, over 4646.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3411, pruned_loss=0.09895, over 911832.73 frames. ], batch size: 12, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:22:27,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=41824.0, ans=0.0017773913043478261 +2024-07-27 17:22:28,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=41824.0, ans=0.125 +2024-07-27 17:22:40,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41837.333333333336, ans=0.1 +2024-07-27 17:22:48,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=41864.0, ans=0.09899494936611666 +2024-07-27 17:22:48,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=41864.0, ans=0.0 +2024-07-27 17:22:52,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=41864.0, ans=0.125 +2024-07-27 17:22:52,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=41864.0, ans=0.2 +2024-07-27 17:22:53,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=41864.0, ans=0.1 +2024-07-27 17:23:02,153 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.371e+01 6.772e+01 7.672e+01 9.334e+01 1.432e+02, threshold=1.534e+02, percent-clipped=1.0 +2024-07-27 17:23:02,186 INFO [train.py:1114] (0/4) Epoch 4, batch 750, loss[loss=0.2859, simple_loss=0.3543, pruned_loss=0.1088, over 4689.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3411, pruned_loss=0.09878, over 918357.25 frames. ], batch size: 13, lr: 1.89e-02, grad_scale: 16.0 +2024-07-27 17:23:08,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=41904.0, ans=0.09899494936611666 +2024-07-27 17:23:11,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=41904.0, ans=0.5 +2024-07-27 17:23:19,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41917.333333333336, ans=0.1 +2024-07-27 17:23:25,136 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.79 vs. limit=15.0 +2024-07-27 17:23:25,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=41930.666666666664, ans=0.125 +2024-07-27 17:23:37,987 INFO [train.py:1114] (0/4) Epoch 4, batch 800, loss[loss=0.2196, simple_loss=0.2854, pruned_loss=0.07688, over 4862.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3406, pruned_loss=0.09919, over 923476.94 frames. ], batch size: 12, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:23:42,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=41957.333333333336, ans=0.2 +2024-07-27 17:23:46,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=41970.666666666664, ans=0.025 +2024-07-27 17:23:51,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41970.666666666664, ans=0.1 +2024-07-27 17:24:07,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=41997.333333333336, ans=0.0 +2024-07-27 17:24:17,752 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.45 vs. limit=6.0 +2024-07-27 17:24:18,035 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.573e+01 6.422e+01 7.236e+01 8.133e+01 1.458e+02, threshold=1.447e+02, percent-clipped=0.0 +2024-07-27 17:24:18,104 INFO [train.py:1114] (0/4) Epoch 4, batch 850, loss[loss=0.3167, simple_loss=0.39, pruned_loss=0.1217, over 4655.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.3407, pruned_loss=0.09946, over 927252.42 frames. ], batch size: 14, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:24:32,609 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=24.28 vs. limit=15.0 +2024-07-27 17:24:33,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=42037.333333333336, ans=0.0 +2024-07-27 17:24:36,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=42037.333333333336, ans=0.2 +2024-07-27 17:24:37,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=42037.333333333336, ans=0.125 +2024-07-27 17:24:53,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=42077.333333333336, ans=0.125 +2024-07-27 17:24:58,779 INFO [train.py:1114] (0/4) Epoch 4, batch 900, loss[loss=0.2323, simple_loss=0.3051, pruned_loss=0.07975, over 4856.00 frames. ], tot_loss[loss=0.271, simple_loss=0.3414, pruned_loss=0.1003, over 928117.92 frames. ], batch size: 12, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:24:59,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=42090.666666666664, ans=0.025 +2024-07-27 17:24:59,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42090.666666666664, ans=0.1 +2024-07-27 17:25:00,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=42090.666666666664, ans=0.125 +2024-07-27 17:25:00,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=42090.666666666664, ans=0.125 +2024-07-27 17:25:05,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=42104.0, ans=0.0 +2024-07-27 17:25:11,101 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.05 vs. limit=12.0 +2024-07-27 17:25:19,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=42130.666666666664, ans=0.0017107246376811599 +2024-07-27 17:25:34,419 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.59 vs. limit=6.0 +2024-07-27 17:25:34,690 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.294e+01 6.285e+01 6.831e+01 7.468e+01 1.764e+02, threshold=1.366e+02, percent-clipped=2.0 +2024-07-27 17:25:34,737 INFO [train.py:1114] (0/4) Epoch 4, batch 950, loss[loss=0.2593, simple_loss=0.3413, pruned_loss=0.08871, over 4768.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.3408, pruned_loss=0.09937, over 929581.45 frames. ], batch size: 12, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:25:39,713 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.57 vs. limit=10.0 +2024-07-27 17:25:46,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=42170.666666666664, ans=0.0 +2024-07-27 17:25:51,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=42184.0, ans=0.95 +2024-07-27 17:25:58,089 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.94 vs. limit=15.0 +2024-07-27 17:26:01,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=42197.333333333336, ans=0.125 +2024-07-27 17:26:04,682 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.50 vs. limit=22.5 +2024-07-27 17:26:08,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=42210.666666666664, ans=0.0 +2024-07-27 17:26:12,278 INFO [train.py:1114] (0/4) Epoch 4, batch 1000, loss[loss=0.2683, simple_loss=0.3223, pruned_loss=0.1072, over 4951.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3408, pruned_loss=0.09932, over 929253.10 frames. ], batch size: 13, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:26:13,186 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:26:41,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=42264.0, ans=0.025 +2024-07-27 17:26:51,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42290.666666666664, ans=0.1 +2024-07-27 17:26:52,353 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.144e+01 6.274e+01 6.992e+01 7.907e+01 1.150e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 17:26:52,386 INFO [train.py:1114] (0/4) Epoch 4, batch 1050, loss[loss=0.252, simple_loss=0.3381, pruned_loss=0.08297, over 4880.00 frames. ], tot_loss[loss=0.2683, simple_loss=0.3393, pruned_loss=0.09866, over 931820.94 frames. ], batch size: 14, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:27:04,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=42290.666666666664, ans=0.0 +2024-07-27 17:27:22,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=42317.333333333336, ans=0.2 +2024-07-27 17:27:30,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=42330.666666666664, ans=0.125 +2024-07-27 17:27:35,426 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:27:38,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=42344.0, ans=0.0 +2024-07-27 17:27:44,214 INFO [train.py:1114] (0/4) Epoch 4, batch 1100, loss[loss=0.2702, simple_loss=0.3388, pruned_loss=0.1008, over 4903.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3405, pruned_loss=0.0993, over 934441.11 frames. ], batch size: 13, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:27:57,872 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.72 vs. limit=12.0 +2024-07-27 17:28:12,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=42397.333333333336, ans=0.125 +2024-07-27 17:28:16,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=42410.666666666664, ans=0.125 +2024-07-27 17:28:22,184 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.166e+01 6.208e+01 6.982e+01 7.743e+01 1.395e+02, threshold=1.396e+02, percent-clipped=0.0 +2024-07-27 17:28:22,218 INFO [train.py:1114] (0/4) Epoch 4, batch 1150, loss[loss=0.2396, simple_loss=0.3178, pruned_loss=0.08064, over 4899.00 frames. ], tot_loss[loss=0.2701, simple_loss=0.341, pruned_loss=0.09961, over 934224.24 frames. ], batch size: 13, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:28:23,959 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.27 vs. limit=6.0 +2024-07-27 17:29:02,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=42450.666666666664, ans=0.125 +2024-07-27 17:29:11,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=42477.333333333336, ans=0.09899494936611666 +2024-07-27 17:29:24,967 INFO [train.py:1114] (0/4) Epoch 4, batch 1200, loss[loss=0.2805, simple_loss=0.3577, pruned_loss=0.1016, over 4866.00 frames. ], tot_loss[loss=0.2707, simple_loss=0.3417, pruned_loss=0.09991, over 933335.95 frames. ], batch size: 14, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:29:29,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=42490.666666666664, ans=0.2 +2024-07-27 17:29:29,372 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.02 vs. limit=15.0 +2024-07-27 17:29:44,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=42517.333333333336, ans=0.125 +2024-07-27 17:29:44,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=42517.333333333336, ans=0.125 +2024-07-27 17:29:45,972 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:29:57,097 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:30:09,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=42544.0, ans=0.0 +2024-07-27 17:30:09,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42544.0, ans=0.1 +2024-07-27 17:30:12,124 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.401e+01 6.877e+01 7.526e+01 8.642e+01 1.436e+02, threshold=1.505e+02, percent-clipped=1.0 +2024-07-27 17:30:12,157 INFO [train.py:1114] (0/4) Epoch 4, batch 1250, loss[loss=0.2823, simple_loss=0.3409, pruned_loss=0.1119, over 4784.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3412, pruned_loss=0.09893, over 937354.13 frames. ], batch size: 15, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:30:15,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=42557.333333333336, ans=0.125 +2024-07-27 17:30:17,255 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:30:18,904 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.45 vs. limit=15.0 +2024-07-27 17:30:19,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=42570.666666666664, ans=0.125 +2024-07-27 17:30:20,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=42570.666666666664, ans=0.125 +2024-07-27 17:30:21,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=42570.666666666664, ans=0.125 +2024-07-27 17:30:21,471 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.37 vs. limit=10.0 +2024-07-27 17:30:41,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42597.333333333336, ans=0.1 +2024-07-27 17:30:53,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=42610.666666666664, ans=0.0016063768115942047 +2024-07-27 17:30:57,769 INFO [train.py:1114] (0/4) Epoch 4, batch 1300, loss[loss=0.3064, simple_loss=0.3729, pruned_loss=0.1199, over 4723.00 frames. ], tot_loss[loss=0.269, simple_loss=0.3409, pruned_loss=0.09853, over 938674.59 frames. ], batch size: 19, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:31:34,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=42664.0, ans=0.125 +2024-07-27 17:31:35,578 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-32000.pt +2024-07-27 17:31:42,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42664.0, ans=0.1 +2024-07-27 17:31:52,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=42677.333333333336, ans=0.0015918840579710134 +2024-07-27 17:31:58,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=42677.333333333336, ans=0.0 +2024-07-27 17:31:59,550 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.03 vs. limit=12.0 +2024-07-27 17:31:59,667 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.286e+01 6.475e+01 6.974e+01 8.075e+01 1.412e+02, threshold=1.395e+02, percent-clipped=0.0 +2024-07-27 17:31:59,715 INFO [train.py:1114] (0/4) Epoch 4, batch 1350, loss[loss=0.239, simple_loss=0.3231, pruned_loss=0.07739, over 4762.00 frames. ], tot_loss[loss=0.2673, simple_loss=0.3397, pruned_loss=0.09745, over 940500.66 frames. ], batch size: 13, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:32:37,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=42730.666666666664, ans=0.0 +2024-07-27 17:32:44,133 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.20 vs. limit=15.0 +2024-07-27 17:32:45,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=42744.0, ans=0.125 +2024-07-27 17:32:49,785 INFO [train.py:1114] (0/4) Epoch 4, batch 1400, loss[loss=0.2504, simple_loss=0.3206, pruned_loss=0.09015, over 4685.00 frames. ], tot_loss[loss=0.2666, simple_loss=0.3389, pruned_loss=0.09711, over 942581.80 frames. ], batch size: 11, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:33:51,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=42810.666666666664, ans=0.125 +2024-07-27 17:33:59,442 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.546e+01 6.502e+01 7.039e+01 8.275e+01 1.312e+02, threshold=1.408e+02, percent-clipped=0.0 +2024-07-27 17:33:59,489 INFO [train.py:1114] (0/4) Epoch 4, batch 1450, loss[loss=0.2508, simple_loss=0.3334, pruned_loss=0.08408, over 4693.00 frames. ], tot_loss[loss=0.2666, simple_loss=0.3391, pruned_loss=0.09704, over 942774.45 frames. ], batch size: 15, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:34:16,816 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:34:20,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=42850.666666666664, ans=0.0 +2024-07-27 17:34:31,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=42864.0, ans=0.125 +2024-07-27 17:34:54,674 INFO [train.py:1114] (0/4) Epoch 4, batch 1500, loss[loss=0.2977, simple_loss=0.3702, pruned_loss=0.1126, over 4806.00 frames. ], tot_loss[loss=0.2675, simple_loss=0.3396, pruned_loss=0.09771, over 942611.91 frames. ], batch size: 14, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:35:00,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=42890.666666666664, ans=0.125 +2024-07-27 17:35:02,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=42890.666666666664, ans=0.125 +2024-07-27 17:35:06,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=42904.0, ans=0.125 +2024-07-27 17:35:37,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=42930.666666666664, ans=0.125 +2024-07-27 17:35:42,105 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=16.69 vs. limit=15.0 +2024-07-27 17:35:46,840 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.518e+01 6.513e+01 7.459e+01 8.473e+01 1.359e+02, threshold=1.492e+02, percent-clipped=0.0 +2024-07-27 17:35:46,873 INFO [train.py:1114] (0/4) Epoch 4, batch 1550, loss[loss=0.2513, simple_loss=0.3356, pruned_loss=0.08349, over 4898.00 frames. ], tot_loss[loss=0.269, simple_loss=0.3403, pruned_loss=0.0989, over 939140.39 frames. ], batch size: 15, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:36:05,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=42984.0, ans=0.1 +2024-07-27 17:36:07,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42984.0, ans=0.1 +2024-07-27 17:36:07,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=42984.0, ans=0.125 +2024-07-27 17:36:07,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=42984.0, ans=0.1 +2024-07-27 17:36:12,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=42997.333333333336, ans=0.125 +2024-07-27 17:36:17,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-27 17:36:18,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=43010.666666666664, ans=0.0 +2024-07-27 17:36:19,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=43010.666666666664, ans=0.125 +2024-07-27 17:36:23,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=43010.666666666664, ans=0.125 +2024-07-27 17:36:25,516 INFO [train.py:1114] (0/4) Epoch 4, batch 1600, loss[loss=0.275, simple_loss=0.3506, pruned_loss=0.09966, over 4871.00 frames. ], tot_loss[loss=0.268, simple_loss=0.3395, pruned_loss=0.09825, over 938190.72 frames. ], batch size: 14, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:36:29,252 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.24 vs. limit=15.0 +2024-07-27 17:36:30,048 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.88 vs. limit=22.5 +2024-07-27 17:36:36,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=43037.333333333336, ans=0.125 +2024-07-27 17:36:46,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=43050.666666666664, ans=0.09899494936611666 +2024-07-27 17:36:58,313 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-27 17:37:04,627 INFO [train.py:1114] (0/4) Epoch 4, batch 1650, loss[loss=0.2513, simple_loss=0.3245, pruned_loss=0.089, over 4676.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.34, pruned_loss=0.09888, over 937831.95 frames. ], batch size: 14, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:37:05,272 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.309e+01 6.450e+01 7.502e+01 9.535e+01 1.419e+02, threshold=1.500e+02, percent-clipped=0.0 +2024-07-27 17:37:09,183 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=14.85 vs. limit=15.0 +2024-07-27 17:37:37,803 INFO [train.py:1114] (0/4) Epoch 4, batch 1700, loss[loss=0.2903, simple_loss=0.3402, pruned_loss=0.1202, over 4701.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.3395, pruned_loss=0.09865, over 939860.23 frames. ], batch size: 11, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:37:37,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=43157.333333333336, ans=0.125 +2024-07-27 17:37:38,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=43157.333333333336, ans=0.0 +2024-07-27 17:37:44,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=43157.333333333336, ans=0.125 +2024-07-27 17:37:54,296 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.16 vs. limit=22.5 +2024-07-27 17:37:56,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=43184.0, ans=0.125 +2024-07-27 17:37:56,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=43184.0, ans=0.0 +2024-07-27 17:38:09,881 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.06 vs. limit=15.0 +2024-07-27 17:38:12,077 INFO [train.py:1114] (0/4) Epoch 4, batch 1750, loss[loss=0.2303, simple_loss=0.3024, pruned_loss=0.07905, over 4809.00 frames. ], tot_loss[loss=0.2682, simple_loss=0.3399, pruned_loss=0.09829, over 940561.73 frames. ], batch size: 11, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:38:12,717 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.524e+01 6.769e+01 7.815e+01 9.643e+01 1.575e+02, threshold=1.563e+02, percent-clipped=1.0 +2024-07-27 17:38:15,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=43224.0, ans=0.125 +2024-07-27 17:38:25,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=43237.333333333336, ans=0.0014701449275362315 +2024-07-27 17:38:40,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=43264.0, ans=0.125 +2024-07-27 17:38:43,669 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.30 vs. limit=15.0 +2024-07-27 17:38:43,681 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.48 vs. limit=8.0 +2024-07-27 17:38:44,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43277.333333333336, ans=0.1 +2024-07-27 17:38:49,157 INFO [train.py:1114] (0/4) Epoch 4, batch 1800, loss[loss=0.2491, simple_loss=0.3241, pruned_loss=0.08708, over 4636.00 frames. ], tot_loss[loss=0.267, simple_loss=0.3387, pruned_loss=0.09767, over 940966.02 frames. ], batch size: 13, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:38:58,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43304.0, ans=0.1 +2024-07-27 17:39:03,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=43317.333333333336, ans=0.125 +2024-07-27 17:39:03,816 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.96 vs. limit=15.0 +2024-07-27 17:39:04,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=43317.333333333336, ans=0.125 +2024-07-27 17:39:16,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=43344.0, ans=0.125 +2024-07-27 17:39:18,443 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.02 vs. limit=6.0 +2024-07-27 17:39:18,988 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.97 vs. limit=15.0 +2024-07-27 17:39:23,346 INFO [train.py:1114] (0/4) Epoch 4, batch 1850, loss[loss=0.2417, simple_loss=0.3282, pruned_loss=0.07755, over 4811.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3386, pruned_loss=0.09756, over 940732.81 frames. ], batch size: 14, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:39:23,919 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.011e+01 6.740e+01 7.721e+01 9.480e+01 1.911e+02, threshold=1.544e+02, percent-clipped=3.0 +2024-07-27 17:39:39,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43384.0, ans=0.1 +2024-07-27 17:39:40,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=43384.0, ans=0.125 +2024-07-27 17:39:41,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=43384.0, ans=0.0 +2024-07-27 17:39:44,764 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:39:58,474 INFO [train.py:1114] (0/4) Epoch 4, batch 1900, loss[loss=0.3048, simple_loss=0.3708, pruned_loss=0.1194, over 4666.00 frames. ], tot_loss[loss=0.268, simple_loss=0.3396, pruned_loss=0.09816, over 941837.96 frames. ], batch size: 14, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:40:26,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43477.333333333336, ans=0.1 +2024-07-27 17:40:33,681 INFO [train.py:1114] (0/4) Epoch 4, batch 1950, loss[loss=0.2215, simple_loss=0.3073, pruned_loss=0.06786, over 4893.00 frames. ], tot_loss[loss=0.268, simple_loss=0.3402, pruned_loss=0.09792, over 943826.14 frames. ], batch size: 13, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:40:34,317 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.087e+01 6.498e+01 7.387e+01 8.650e+01 1.667e+02, threshold=1.477e+02, percent-clipped=1.0 +2024-07-27 17:40:34,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=43490.666666666664, ans=0.125 +2024-07-27 17:40:41,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=43490.666666666664, ans=0.2 +2024-07-27 17:40:48,438 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.24 vs. limit=12.0 +2024-07-27 17:40:56,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=43517.333333333336, ans=0.025 +2024-07-27 17:41:04,677 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:41:09,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=43544.0, ans=0.015 +2024-07-27 17:41:17,142 INFO [train.py:1114] (0/4) Epoch 4, batch 2000, loss[loss=0.2152, simple_loss=0.2842, pruned_loss=0.07309, over 4791.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3412, pruned_loss=0.09875, over 940659.78 frames. ], batch size: 11, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:41:19,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43557.333333333336, ans=0.1 +2024-07-27 17:41:30,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=43584.0, ans=0.0 +2024-07-27 17:41:33,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=43584.0, ans=0.0013947826086956518 +2024-07-27 17:41:36,995 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.16 vs. limit=22.5 +2024-07-27 17:41:39,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=43597.333333333336, ans=0.035 +2024-07-27 17:41:41,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=43597.333333333336, ans=0.0 +2024-07-27 17:41:42,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=43597.333333333336, ans=0.0 +2024-07-27 17:41:50,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=43610.666666666664, ans=0.2 +2024-07-27 17:41:52,689 INFO [train.py:1114] (0/4) Epoch 4, batch 2050, loss[loss=0.2408, simple_loss=0.3119, pruned_loss=0.08482, over 4623.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3409, pruned_loss=0.09886, over 938539.11 frames. ], batch size: 11, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:41:53,318 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.522e+01 6.397e+01 6.971e+01 8.145e+01 1.317e+02, threshold=1.394e+02, percent-clipped=0.0 +2024-07-27 17:42:05,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=43650.666666666664, ans=10.0 +2024-07-27 17:42:10,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=43650.666666666664, ans=0.2 +2024-07-27 17:42:11,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=43650.666666666664, ans=0.125 +2024-07-27 17:42:12,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=43664.0, ans=0.09899494936611666 +2024-07-27 17:42:13,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=43664.0, ans=0.1 +2024-07-27 17:42:13,755 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.62 vs. limit=10.0 +2024-07-27 17:42:22,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=43677.333333333336, ans=0.0 +2024-07-27 17:42:25,799 INFO [train.py:1114] (0/4) Epoch 4, batch 2100, loss[loss=0.2836, simple_loss=0.3585, pruned_loss=0.1043, over 4751.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3397, pruned_loss=0.0979, over 940673.37 frames. ], batch size: 13, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:42:27,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=43690.666666666664, ans=0.125 +2024-07-27 17:42:31,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43690.666666666664, ans=0.1 +2024-07-27 17:42:41,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=43717.333333333336, ans=0.125 +2024-07-27 17:42:46,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=43730.666666666664, ans=0.0 +2024-07-27 17:42:47,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=43730.666666666664, ans=0.0013628985507246373 +2024-07-27 17:42:49,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=43730.666666666664, ans=0.2 +2024-07-27 17:42:53,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43744.0, ans=0.1 +2024-07-27 17:42:58,955 INFO [train.py:1114] (0/4) Epoch 4, batch 2150, loss[loss=0.2382, simple_loss=0.3187, pruned_loss=0.07884, over 4899.00 frames. ], tot_loss[loss=0.266, simple_loss=0.3384, pruned_loss=0.09675, over 944125.00 frames. ], batch size: 13, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:42:59,570 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.474e+01 6.533e+01 7.336e+01 8.956e+01 1.647e+02, threshold=1.467e+02, percent-clipped=5.0 +2024-07-27 17:43:10,109 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.40 vs. limit=15.0 +2024-07-27 17:43:16,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=18.14 vs. limit=15.0 +2024-07-27 17:43:25,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=43810.666666666664, ans=0.125 +2024-07-27 17:43:32,616 INFO [train.py:1114] (0/4) Epoch 4, batch 2200, loss[loss=0.3065, simple_loss=0.378, pruned_loss=0.1175, over 4805.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.3381, pruned_loss=0.09673, over 943159.47 frames. ], batch size: 14, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:43:47,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.13 vs. limit=15.0 +2024-07-27 17:44:03,716 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.78 vs. limit=22.5 +2024-07-27 17:44:09,755 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.53 vs. limit=15.0 +2024-07-27 17:44:14,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=43877.333333333336, ans=0.125 +2024-07-27 17:44:16,791 INFO [train.py:1114] (0/4) Epoch 4, batch 2250, loss[loss=0.2996, simple_loss=0.3774, pruned_loss=0.1109, over 4699.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3371, pruned_loss=0.09603, over 941548.18 frames. ], batch size: 13, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:44:17,410 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 6.403e+01 7.459e+01 9.142e+01 2.382e+02, threshold=1.492e+02, percent-clipped=1.0 +2024-07-27 17:44:17,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=43890.666666666664, ans=0.125 +2024-07-27 17:44:19,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=43890.666666666664, ans=0.025 +2024-07-27 17:44:21,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=43890.666666666664, ans=0.2 +2024-07-27 17:44:47,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=43930.666666666664, ans=0.125 +2024-07-27 17:44:52,713 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:44:55,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=43944.0, ans=22.5 +2024-07-27 17:44:56,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=43944.0, ans=0.125 +2024-07-27 17:44:57,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=43944.0, ans=0.125 +2024-07-27 17:44:57,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=43944.0, ans=0.2 +2024-07-27 17:44:58,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=43944.0, ans=0.05 +2024-07-27 17:44:59,228 INFO [train.py:1114] (0/4) Epoch 4, batch 2300, loss[loss=0.2236, simple_loss=0.2949, pruned_loss=0.07611, over 4933.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.336, pruned_loss=0.09587, over 939160.18 frames. ], batch size: 12, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:45:17,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=43970.666666666664, ans=0.0 +2024-07-27 17:45:28,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43997.333333333336, ans=0.1 +2024-07-27 17:45:30,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43997.333333333336, ans=0.1 +2024-07-27 17:45:34,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=44010.666666666664, ans=0.0013020289855072472 +2024-07-27 17:45:41,389 INFO [train.py:1114] (0/4) Epoch 4, batch 2350, loss[loss=0.2735, simple_loss=0.3426, pruned_loss=0.1022, over 4635.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.3367, pruned_loss=0.09596, over 941165.95 frames. ], batch size: 13, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:45:41,993 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.850e+01 6.786e+01 8.508e+01 1.044e+02 1.776e+02, threshold=1.702e+02, percent-clipped=2.0 +2024-07-27 17:45:42,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=44024.0, ans=0.2 +2024-07-27 17:45:57,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=44050.666666666664, ans=0.2 +2024-07-27 17:46:03,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=44050.666666666664, ans=0.0012933333333333338 +2024-07-27 17:46:07,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=44064.0, ans=0.0012904347826086966 +2024-07-27 17:46:08,865 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.80 vs. limit=15.0 +2024-07-27 17:46:12,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=44077.333333333336, ans=0.0 +2024-07-27 17:46:15,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=44077.333333333336, ans=0.04949747468305833 +2024-07-27 17:46:26,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=44077.333333333336, ans=0.125 +2024-07-27 17:46:28,018 INFO [train.py:1114] (0/4) Epoch 4, batch 2400, loss[loss=0.2696, simple_loss=0.3344, pruned_loss=0.1024, over 4642.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.337, pruned_loss=0.09584, over 940888.98 frames. ], batch size: 12, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:46:40,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=44104.0, ans=0.125 +2024-07-27 17:46:44,026 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:46:50,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=44130.666666666664, ans=0.125 +2024-07-27 17:46:57,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=44144.0, ans=0.125 +2024-07-27 17:46:57,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=44144.0, ans=0.1 +2024-07-27 17:46:58,773 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.74 vs. limit=15.0 +2024-07-27 17:47:01,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=44157.333333333336, ans=0.0 +2024-07-27 17:47:01,679 INFO [train.py:1114] (0/4) Epoch 4, batch 2450, loss[loss=0.2522, simple_loss=0.3336, pruned_loss=0.0854, over 4693.00 frames. ], tot_loss[loss=0.266, simple_loss=0.3383, pruned_loss=0.09687, over 936908.88 frames. ], batch size: 13, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:47:02,267 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.203e+01 6.348e+01 7.314e+01 8.641e+01 1.426e+02, threshold=1.463e+02, percent-clipped=0.0 +2024-07-27 17:47:03,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=44157.333333333336, ans=0.05 +2024-07-27 17:47:12,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=44170.666666666664, ans=0.1 +2024-07-27 17:47:12,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=44170.666666666664, ans=0.0012672463768115955 +2024-07-27 17:47:16,271 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.26 vs. limit=15.0 +2024-07-27 17:47:18,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=44170.666666666664, ans=0.025 +2024-07-27 17:47:26,414 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.35 vs. limit=10.0 +2024-07-27 17:47:26,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=44197.333333333336, ans=0.125 +2024-07-27 17:47:36,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=44210.666666666664, ans=0.125 +2024-07-27 17:47:37,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=44210.666666666664, ans=0.125 +2024-07-27 17:47:38,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=44210.666666666664, ans=0.2 +2024-07-27 17:47:39,894 INFO [train.py:1114] (0/4) Epoch 4, batch 2500, loss[loss=0.2955, simple_loss=0.3735, pruned_loss=0.1088, over 4817.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3387, pruned_loss=0.0973, over 938897.30 frames. ], batch size: 14, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:47:46,783 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.76 vs. limit=22.5 +2024-07-27 17:47:47,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=44237.333333333336, ans=0.125 +2024-07-27 17:48:11,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=44277.333333333336, ans=0.0 +2024-07-27 17:48:13,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=44277.333333333336, ans=0.125 +2024-07-27 17:48:14,558 INFO [train.py:1114] (0/4) Epoch 4, batch 2550, loss[loss=0.211, simple_loss=0.2766, pruned_loss=0.07275, over 4812.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.338, pruned_loss=0.09715, over 938675.51 frames. ], batch size: 11, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:48:15,143 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.441e+01 6.325e+01 6.836e+01 7.764e+01 1.443e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-27 17:48:17,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.90 vs. limit=15.0 +2024-07-27 17:48:25,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=44304.0, ans=0.125 +2024-07-27 17:48:29,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=44317.333333333336, ans=0.1 +2024-07-27 17:48:30,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=44317.333333333336, ans=0.1 +2024-07-27 17:48:34,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=44317.333333333336, ans=0.025 +2024-07-27 17:48:48,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=44344.0, ans=0.2 +2024-07-27 17:48:49,562 INFO [train.py:1114] (0/4) Epoch 4, batch 2600, loss[loss=0.2649, simple_loss=0.3386, pruned_loss=0.09557, over 4892.00 frames. ], tot_loss[loss=0.266, simple_loss=0.3379, pruned_loss=0.09699, over 937763.58 frames. ], batch size: 13, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:48:51,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=44357.333333333336, ans=0.125 +2024-07-27 17:48:53,916 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.12 vs. limit=15.0 +2024-07-27 17:48:54,229 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.09 vs. limit=10.0 +2024-07-27 17:49:06,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=44384.0, ans=0.125 +2024-07-27 17:49:18,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=44410.666666666664, ans=0.125 +2024-07-27 17:49:18,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=44410.666666666664, ans=0.125 +2024-07-27 17:49:24,957 INFO [train.py:1114] (0/4) Epoch 4, batch 2650, loss[loss=0.275, simple_loss=0.3374, pruned_loss=0.1063, over 4633.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3386, pruned_loss=0.09739, over 939687.96 frames. ], batch size: 16, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:49:25,616 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.438e+01 6.678e+01 7.695e+01 9.100e+01 1.480e+02, threshold=1.539e+02, percent-clipped=3.0 +2024-07-27 17:49:29,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff3.min_abs, batch_count=44424.0, ans=0.2 +2024-07-27 17:49:31,368 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.04 vs. limit=22.5 +2024-07-27 17:49:38,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=44437.333333333336, ans=0.2 +2024-07-27 17:49:40,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=44437.333333333336, ans=0.125 +2024-07-27 17:49:49,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=44464.0, ans=0.2 +2024-07-27 17:50:00,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=44477.333333333336, ans=0.2 +2024-07-27 17:50:06,085 INFO [train.py:1114] (0/4) Epoch 4, batch 2700, loss[loss=0.2338, simple_loss=0.3187, pruned_loss=0.07442, over 4735.00 frames. ], tot_loss[loss=0.2674, simple_loss=0.3388, pruned_loss=0.098, over 939823.39 frames. ], batch size: 14, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:50:12,488 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:50:22,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=44517.333333333336, ans=0.0 +2024-07-27 17:50:26,417 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.45 vs. limit=15.0 +2024-07-27 17:50:41,740 INFO [train.py:1114] (0/4) Epoch 4, batch 2750, loss[loss=0.2228, simple_loss=0.2926, pruned_loss=0.07651, over 4702.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.3375, pruned_loss=0.09707, over 939700.22 frames. ], batch size: 12, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:50:42,303 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.077e+01 6.612e+01 7.573e+01 9.586e+01 1.480e+02, threshold=1.515e+02, percent-clipped=0.0 +2024-07-27 17:50:43,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=44557.333333333336, ans=0.1 +2024-07-27 17:50:46,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=44557.333333333336, ans=0.125 +2024-07-27 17:50:48,649 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.63 vs. limit=6.0 +2024-07-27 17:51:18,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=44610.666666666664, ans=0.125 +2024-07-27 17:51:19,272 INFO [train.py:1114] (0/4) Epoch 4, batch 2800, loss[loss=0.3922, simple_loss=0.4219, pruned_loss=0.1813, over 3410.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3383, pruned_loss=0.09777, over 937915.71 frames. ], batch size: 35, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:51:21,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=44624.0, ans=0.0 +2024-07-27 17:51:32,228 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.45 vs. limit=15.0 +2024-07-27 17:51:52,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=44677.333333333336, ans=0.035 +2024-07-27 17:51:54,591 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.12 vs. limit=15.0 +2024-07-27 17:51:55,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=44677.333333333336, ans=0.125 +2024-07-27 17:51:57,066 INFO [train.py:1114] (0/4) Epoch 4, batch 2850, loss[loss=0.2343, simple_loss=0.3113, pruned_loss=0.07866, over 4962.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.339, pruned_loss=0.09857, over 936203.72 frames. ], batch size: 13, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:51:57,805 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.345e+01 6.785e+01 7.509e+01 8.652e+01 1.296e+02, threshold=1.502e+02, percent-clipped=0.0 +2024-07-27 17:52:01,053 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.61 vs. limit=15.0 +2024-07-27 17:52:01,725 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.99 vs. limit=22.5 +2024-07-27 17:52:04,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=44704.0, ans=0.5 +2024-07-27 17:52:24,311 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.05 vs. limit=15.0 +2024-07-27 17:52:33,425 INFO [train.py:1114] (0/4) Epoch 4, batch 2900, loss[loss=0.2937, simple_loss=0.3588, pruned_loss=0.1143, over 4830.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3408, pruned_loss=0.09899, over 939977.97 frames. ], batch size: 13, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:52:42,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=44770.666666666664, ans=0.09899494936611666 +2024-07-27 17:52:44,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=44770.666666666664, ans=0.2 +2024-07-27 17:52:50,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=44784.0, ans=22.5 +2024-07-27 17:52:59,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=44797.333333333336, ans=0.125 +2024-07-27 17:53:05,796 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-07-27 17:53:06,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=44824.0, ans=0.125 +2024-07-27 17:53:07,378 INFO [train.py:1114] (0/4) Epoch 4, batch 2950, loss[loss=0.2555, simple_loss=0.3332, pruned_loss=0.08891, over 4705.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3397, pruned_loss=0.09885, over 938842.09 frames. ], batch size: 12, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:53:07,995 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.029e+01 6.448e+01 7.326e+01 8.943e+01 1.391e+02, threshold=1.465e+02, percent-clipped=0.0 +2024-07-27 17:53:14,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=44837.333333333336, ans=0.0011223188405797089 +2024-07-27 17:53:14,333 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.42 vs. limit=15.0 +2024-07-27 17:53:17,170 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-27 17:53:36,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=44877.333333333336, ans=0.2 +2024-07-27 17:53:40,167 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.60 vs. limit=15.0 +2024-07-27 17:53:41,134 INFO [train.py:1114] (0/4) Epoch 4, batch 3000, loss[loss=0.2458, simple_loss=0.3237, pruned_loss=0.08393, over 4762.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3388, pruned_loss=0.09753, over 938214.46 frames. ], batch size: 13, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:53:41,135 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 17:53:44,988 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([6.1545, 5.3509, 5.3141, 5.9057], device='cuda:0') +2024-07-27 17:53:45,692 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.8105, 3.2786, 3.2045, 3.1233], device='cuda:0') +2024-07-27 17:53:50,710 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([3.6580, 2.3757, 3.0606, 3.4802, 3.5213, 2.9370, 3.1421, 2.1714], + device='cuda:0') +2024-07-27 17:53:52,966 INFO [train.py:1146] (0/4) Epoch 4, validation: loss=0.2168, simple_loss=0.3177, pruned_loss=0.05793, over 944034.00 frames. +2024-07-27 17:53:52,966 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 17:54:00,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=44904.0, ans=0.125 +2024-07-27 17:54:22,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=44930.666666666664, ans=0.1 +2024-07-27 17:54:30,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=44944.0, ans=0.0 +2024-07-27 17:54:34,896 INFO [train.py:1114] (0/4) Epoch 4, batch 3050, loss[loss=0.2266, simple_loss=0.3123, pruned_loss=0.07042, over 4637.00 frames. ], tot_loss[loss=0.2692, simple_loss=0.3408, pruned_loss=0.09886, over 937283.29 frames. ], batch size: 12, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:54:42,782 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.188e+01 6.571e+01 7.374e+01 8.801e+01 1.359e+02, threshold=1.475e+02, percent-clipped=0.0 +2024-07-27 17:57:16,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=44984.0, ans=0.0 +2024-07-27 17:57:28,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=45010.666666666664, ans=0.125 +2024-07-27 17:57:42,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=45024.0, ans=0.125 +2024-07-27 17:57:42,493 INFO [train.py:1114] (0/4) Epoch 4, batch 3100, loss[loss=0.3201, simple_loss=0.3944, pruned_loss=0.1229, over 4667.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3402, pruned_loss=0.09859, over 937927.45 frames. ], batch size: 16, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:57:46,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=45024.0, ans=0.2 +2024-07-27 17:57:46,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=45024.0, ans=0.1 +2024-07-27 17:58:05,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=45050.666666666664, ans=0.125 +2024-07-27 17:58:06,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=45050.666666666664, ans=0.025 +2024-07-27 17:58:47,043 INFO [train.py:1114] (0/4) Epoch 4, batch 3150, loss[loss=0.3144, simple_loss=0.3736, pruned_loss=0.1276, over 4614.00 frames. ], tot_loss[loss=0.2675, simple_loss=0.3396, pruned_loss=0.09774, over 938336.40 frames. ], batch size: 17, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 17:58:47,646 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.206e+01 6.605e+01 7.303e+01 8.284e+01 1.349e+02, threshold=1.461e+02, percent-clipped=0.0 +2024-07-27 17:58:51,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=45090.666666666664, ans=0.2 +2024-07-27 17:58:55,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=45090.666666666664, ans=0.05 +2024-07-27 17:58:57,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=45104.0, ans=0.125 +2024-07-27 17:59:01,707 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.94 vs. limit=22.5 +2024-07-27 17:59:17,260 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.02 vs. limit=22.5 +2024-07-27 17:59:27,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=45144.0, ans=0.125 +2024-07-27 17:59:31,926 INFO [train.py:1114] (0/4) Epoch 4, batch 3200, loss[loss=0.2658, simple_loss=0.3358, pruned_loss=0.09791, over 4824.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3392, pruned_loss=0.09714, over 939602.17 frames. ], batch size: 13, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 17:59:32,127 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:59:40,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=45170.666666666664, ans=0.0 +2024-07-27 17:59:41,015 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.20 vs. limit=15.0 +2024-07-27 18:00:03,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45184.0, ans=0.1 +2024-07-27 18:01:03,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=45210.666666666664, ans=0.2 +2024-07-27 18:01:03,331 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.70 vs. limit=22.5 +2024-07-27 18:01:12,029 INFO [train.py:1114] (0/4) Epoch 4, batch 3250, loss[loss=0.3289, simple_loss=0.3906, pruned_loss=0.1336, over 4929.00 frames. ], tot_loss[loss=0.2666, simple_loss=0.3388, pruned_loss=0.09716, over 940700.72 frames. ], batch size: 14, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:01:12,742 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.153e+01 6.665e+01 7.646e+01 9.547e+01 1.516e+02, threshold=1.529e+02, percent-clipped=1.0 +2024-07-27 18:01:17,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=45224.0, ans=0.0010382608695652176 +2024-07-27 18:01:29,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=45250.666666666664, ans=0.0010324637681159432 +2024-07-27 18:01:32,759 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.07 vs. limit=15.0 +2024-07-27 18:01:35,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=45250.666666666664, ans=0.125 +2024-07-27 18:01:41,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=45264.0, ans=0.0 +2024-07-27 18:01:44,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=45277.333333333336, ans=0.2 +2024-07-27 18:01:50,299 INFO [train.py:1114] (0/4) Epoch 4, batch 3300, loss[loss=0.3253, simple_loss=0.3864, pruned_loss=0.1321, over 4695.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3382, pruned_loss=0.09709, over 940806.59 frames. ], batch size: 19, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:01:53,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=45290.666666666664, ans=0.125 +2024-07-27 18:02:08,158 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.93 vs. limit=10.0 +2024-07-27 18:02:24,053 INFO [train.py:1114] (0/4) Epoch 4, batch 3350, loss[loss=0.2574, simple_loss=0.3312, pruned_loss=0.09181, over 4636.00 frames. ], tot_loss[loss=0.2653, simple_loss=0.3378, pruned_loss=0.09638, over 938689.11 frames. ], batch size: 17, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:02:24,711 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.140e+01 6.495e+01 7.490e+01 8.565e+01 1.368e+02, threshold=1.498e+02, percent-clipped=0.0 +2024-07-27 18:02:24,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=45357.333333333336, ans=0.125 +2024-07-27 18:02:30,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=45370.666666666664, ans=0.125 +2024-07-27 18:02:40,764 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.61 vs. limit=10.0 +2024-07-27 18:02:41,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=45384.0, ans=0.0 +2024-07-27 18:02:42,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=45384.0, ans=0.0010034782608695642 +2024-07-27 18:02:56,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=45410.666666666664, ans=0.07 +2024-07-27 18:02:56,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=45410.666666666664, ans=0.125 +2024-07-27 18:02:57,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=45424.0, ans=0.125 +2024-07-27 18:02:57,869 INFO [train.py:1114] (0/4) Epoch 4, batch 3400, loss[loss=0.1865, simple_loss=0.2653, pruned_loss=0.05387, over 4813.00 frames. ], tot_loss[loss=0.2656, simple_loss=0.3378, pruned_loss=0.09667, over 937413.55 frames. ], batch size: 11, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:03:12,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=45437.333333333336, ans=0.2 +2024-07-27 18:03:13,954 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:03:18,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=45450.666666666664, ans=0.125 +2024-07-27 18:03:19,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=45450.666666666664, ans=0.125 +2024-07-27 18:03:25,933 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:03:34,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=45477.333333333336, ans=0.0 +2024-07-27 18:03:42,187 INFO [train.py:1114] (0/4) Epoch 4, batch 3450, loss[loss=0.2927, simple_loss=0.3614, pruned_loss=0.112, over 4702.00 frames. ], tot_loss[loss=0.2657, simple_loss=0.338, pruned_loss=0.09668, over 938040.00 frames. ], batch size: 19, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:03:42,785 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.068e+01 6.545e+01 7.401e+01 8.660e+01 1.564e+02, threshold=1.480e+02, percent-clipped=3.0 +2024-07-27 18:03:46,529 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.34 vs. limit=15.0 +2024-07-27 18:03:46,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=45490.666666666664, ans=0.0 +2024-07-27 18:03:49,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=45504.0, ans=0.2 +2024-07-27 18:03:51,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=45504.0, ans=0.0 +2024-07-27 18:04:02,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=45517.333333333336, ans=0.125 +2024-07-27 18:04:17,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45544.0, ans=0.1 +2024-07-27 18:04:21,781 INFO [train.py:1114] (0/4) Epoch 4, batch 3500, loss[loss=0.2551, simple_loss=0.3202, pruned_loss=0.095, over 4940.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.3368, pruned_loss=0.09552, over 938237.50 frames. ], batch size: 12, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:04:22,043 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.44 vs. limit=15.0 +2024-07-27 18:04:36,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=45584.0, ans=0.125 +2024-07-27 18:04:36,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=45584.0, ans=0.0 +2024-07-27 18:04:42,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45597.333333333336, ans=0.1 +2024-07-27 18:04:48,830 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.94 vs. limit=12.0 +2024-07-27 18:04:49,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=45610.666666666664, ans=0.125 +2024-07-27 18:04:49,883 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.09 vs. limit=15.0 +2024-07-27 18:04:55,619 INFO [train.py:1114] (0/4) Epoch 4, batch 3550, loss[loss=0.2617, simple_loss=0.3433, pruned_loss=0.09008, over 4660.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3366, pruned_loss=0.09523, over 938949.21 frames. ], batch size: 14, lr: 1.81e-02, grad_scale: 32.0 +2024-07-27 18:04:56,228 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.319e+01 6.373e+01 7.017e+01 7.924e+01 1.305e+02, threshold=1.403e+02, percent-clipped=0.0 +2024-07-27 18:05:22,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=45664.0, ans=0.125 +2024-07-27 18:05:25,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=45664.0, ans=0.0 +2024-07-27 18:05:28,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.75 vs. limit=15.0 +2024-07-27 18:05:32,452 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.48 vs. limit=22.5 +2024-07-27 18:05:34,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=45677.333333333336, ans=0.0 +2024-07-27 18:05:35,458 INFO [train.py:1114] (0/4) Epoch 4, batch 3600, loss[loss=0.2193, simple_loss=0.2798, pruned_loss=0.07936, over 4965.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3353, pruned_loss=0.09443, over 940651.09 frames. ], batch size: 13, lr: 1.81e-02, grad_scale: 32.0 +2024-07-27 18:05:36,677 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=19.76 vs. limit=22.5 +2024-07-27 18:05:38,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=45690.666666666664, ans=0.125 +2024-07-27 18:05:51,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=45717.333333333336, ans=0.0009310144927536235 +2024-07-27 18:05:51,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.whiten.whitening_limit, batch_count=45717.333333333336, ans=12.0 +2024-07-27 18:06:11,516 INFO [train.py:1114] (0/4) Epoch 4, batch 3650, loss[loss=0.279, simple_loss=0.3443, pruned_loss=0.1068, over 4893.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3349, pruned_loss=0.09477, over 941206.54 frames. ], batch size: 15, lr: 1.81e-02, grad_scale: 64.0 +2024-07-27 18:06:12,150 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.552e+01 6.653e+01 7.624e+01 9.000e+01 1.438e+02, threshold=1.525e+02, percent-clipped=1.0 +2024-07-27 18:06:15,415 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.71 vs. limit=15.0 +2024-07-27 18:06:20,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=45770.666666666664, ans=0.125 +2024-07-27 18:06:23,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=45770.666666666664, ans=0.025 +2024-07-27 18:06:28,072 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.52 vs. limit=15.0 +2024-07-27 18:06:30,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=45784.0, ans=0.2 +2024-07-27 18:06:30,478 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.52 vs. limit=22.5 +2024-07-27 18:06:33,273 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-27 18:06:37,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=45797.333333333336, ans=0.125 +2024-07-27 18:06:40,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=45810.666666666664, ans=0.0 +2024-07-27 18:06:44,770 INFO [train.py:1114] (0/4) Epoch 4, batch 3700, loss[loss=0.3127, simple_loss=0.3985, pruned_loss=0.1135, over 4939.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3347, pruned_loss=0.09442, over 942043.57 frames. ], batch size: 14, lr: 1.81e-02, grad_scale: 64.0 +2024-07-27 18:06:58,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=45850.666666666664, ans=0.0 +2024-07-27 18:07:12,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=45864.0, ans=0.0 +2024-07-27 18:07:21,642 INFO [train.py:1114] (0/4) Epoch 4, batch 3750, loss[loss=0.2105, simple_loss=0.2877, pruned_loss=0.06666, over 4808.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3346, pruned_loss=0.0943, over 943573.96 frames. ], batch size: 11, lr: 1.81e-02, grad_scale: 64.0 +2024-07-27 18:07:22,326 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.296e+01 6.507e+01 7.242e+01 8.300e+01 1.182e+02, threshold=1.448e+02, percent-clipped=0.0 +2024-07-27 18:07:25,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=45890.666666666664, ans=0.125 +2024-07-27 18:07:33,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.36 vs. limit=15.0 +2024-07-27 18:07:44,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=45930.666666666664, ans=0.125 +2024-07-27 18:07:54,669 INFO [train.py:1114] (0/4) Epoch 4, batch 3800, loss[loss=0.2385, simple_loss=0.3213, pruned_loss=0.0779, over 4809.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3345, pruned_loss=0.09437, over 941920.02 frames. ], batch size: 14, lr: 1.81e-02, grad_scale: 64.0 +2024-07-27 18:07:58,318 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.20 vs. limit=10.0 +2024-07-27 18:08:02,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=45970.666666666664, ans=0.07 +2024-07-27 18:08:11,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=45984.0, ans=0.025 +2024-07-27 18:08:28,700 INFO [train.py:1114] (0/4) Epoch 4, batch 3850, loss[loss=0.2903, simple_loss=0.3614, pruned_loss=0.1096, over 4604.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3346, pruned_loss=0.09432, over 942219.78 frames. ], batch size: 16, lr: 1.81e-02, grad_scale: 32.0 +2024-07-27 18:08:30,035 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.355e+01 6.600e+01 7.617e+01 8.935e+01 1.540e+02, threshold=1.523e+02, percent-clipped=1.0 +2024-07-27 18:08:39,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=46037.333333333336, ans=0.125 +2024-07-27 18:08:43,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=46050.666666666664, ans=0.0 +2024-07-27 18:08:46,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=46050.666666666664, ans=0.125 +2024-07-27 18:08:55,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=46064.0, ans=0.125 +2024-07-27 18:08:55,971 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-27 18:09:03,320 INFO [train.py:1114] (0/4) Epoch 4, batch 3900, loss[loss=0.2994, simple_loss=0.3733, pruned_loss=0.1128, over 4804.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3351, pruned_loss=0.09436, over 942678.41 frames. ], batch size: 14, lr: 1.81e-02, grad_scale: 32.0 +2024-07-27 18:09:03,413 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:09:12,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=46104.0, ans=0.0 +2024-07-27 18:09:22,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46117.333333333336, ans=0.1 +2024-07-27 18:09:36,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=46144.0, ans=0.125 +2024-07-27 18:09:42,265 INFO [train.py:1114] (0/4) Epoch 4, batch 3950, loss[loss=0.2599, simple_loss=0.3392, pruned_loss=0.09029, over 4853.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3351, pruned_loss=0.09482, over 944595.26 frames. ], batch size: 16, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:09:43,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=46157.333333333336, ans=0.025 +2024-07-27 18:09:44,109 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.311e+01 6.796e+01 7.722e+01 1.006e+02 1.504e+02, threshold=1.544e+02, percent-clipped=0.0 +2024-07-27 18:09:53,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=46170.666666666664, ans=0.125 +2024-07-27 18:10:27,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46210.666666666664, ans=0.1 +2024-07-27 18:10:28,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=46210.666666666664, ans=0.125 +2024-07-27 18:10:30,481 INFO [train.py:1114] (0/4) Epoch 4, batch 4000, loss[loss=0.2418, simple_loss=0.3136, pruned_loss=0.08497, over 4775.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3356, pruned_loss=0.09517, over 940838.94 frames. ], batch size: 12, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:10:33,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=46224.0, ans=0.95 +2024-07-27 18:10:33,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=46224.0, ans=0.2 +2024-07-27 18:10:36,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=46237.333333333336, ans=0.125 +2024-07-27 18:10:39,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=46237.333333333336, ans=0.125 +2024-07-27 18:10:53,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=46264.0, ans=0.125 +2024-07-27 18:10:57,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=46264.0, ans=0.2 +2024-07-27 18:11:06,065 INFO [train.py:1114] (0/4) Epoch 4, batch 4050, loss[loss=0.3639, simple_loss=0.4078, pruned_loss=0.16, over 3432.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3349, pruned_loss=0.09493, over 939583.83 frames. ], batch size: 35, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:11:07,318 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.344e+01 6.516e+01 7.339e+01 8.508e+01 1.190e+02, threshold=1.468e+02, percent-clipped=0.0 +2024-07-27 18:11:37,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=46330.666666666664, ans=0.04949747468305833 +2024-07-27 18:11:47,353 INFO [train.py:1114] (0/4) Epoch 4, batch 4100, loss[loss=0.2985, simple_loss=0.3707, pruned_loss=0.1132, over 4913.00 frames. ], tot_loss[loss=0.2636, simple_loss=0.3362, pruned_loss=0.09551, over 938522.90 frames. ], batch size: 15, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:11:48,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=46357.333333333336, ans=0.1 +2024-07-27 18:11:53,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=46370.666666666664, ans=0.0 +2024-07-27 18:11:57,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=46370.666666666664, ans=0.125 +2024-07-27 18:12:01,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=46384.0, ans=0.0007860869565217386 +2024-07-27 18:12:13,525 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.79 vs. limit=6.0 +2024-07-27 18:12:17,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=46410.666666666664, ans=0.0 +2024-07-27 18:12:19,394 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.42 vs. limit=6.0 +2024-07-27 18:12:21,675 INFO [train.py:1114] (0/4) Epoch 4, batch 4150, loss[loss=0.2533, simple_loss=0.3371, pruned_loss=0.08477, over 4837.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.335, pruned_loss=0.09463, over 938148.27 frames. ], batch size: 13, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:12:22,994 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.177e+01 6.950e+01 8.086e+01 1.014e+02 1.411e+02, threshold=1.617e+02, percent-clipped=0.0 +2024-07-27 18:12:23,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=46424.0, ans=0.125 +2024-07-27 18:12:31,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=46437.333333333336, ans=0.125 +2024-07-27 18:12:34,727 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:12:40,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=46450.666666666664, ans=0.0007715942028985508 +2024-07-27 18:12:55,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=46477.333333333336, ans=0.5 +2024-07-27 18:12:56,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=46477.333333333336, ans=0.05 +2024-07-27 18:12:58,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=46477.333333333336, ans=0.125 +2024-07-27 18:13:03,901 INFO [train.py:1114] (0/4) Epoch 4, batch 4200, loss[loss=0.2333, simple_loss=0.3114, pruned_loss=0.07756, over 4876.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3352, pruned_loss=0.09448, over 939738.30 frames. ], batch size: 15, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:13:05,108 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.49 vs. limit=22.5 +2024-07-27 18:13:05,655 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-07-27 18:13:08,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46490.666666666664, ans=0.1 +2024-07-27 18:13:33,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=46544.0, ans=0.125 +2024-07-27 18:13:39,599 INFO [train.py:1114] (0/4) Epoch 4, batch 4250, loss[loss=0.2552, simple_loss=0.3122, pruned_loss=0.09909, over 4645.00 frames. ], tot_loss[loss=0.2631, simple_loss=0.3359, pruned_loss=0.09517, over 941082.38 frames. ], batch size: 12, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:13:40,933 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.134e+01 6.597e+01 7.169e+01 7.931e+01 1.247e+02, threshold=1.434e+02, percent-clipped=0.0 +2024-07-27 18:14:02,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=46570.666666666664, ans=0.2 +2024-07-27 18:14:04,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=46584.0, ans=0.0 +2024-07-27 18:14:04,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=46584.0, ans=0.125 +2024-07-27 18:14:08,651 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.46 vs. limit=15.0 +2024-07-27 18:14:10,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=46597.333333333336, ans=0.125 +2024-07-27 18:14:16,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=46597.333333333336, ans=0.09899494936611666 +2024-07-27 18:14:17,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46610.666666666664, ans=0.1 +2024-07-27 18:14:24,133 INFO [train.py:1114] (0/4) Epoch 4, batch 4300, loss[loss=0.2359, simple_loss=0.3232, pruned_loss=0.07432, over 4764.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.3364, pruned_loss=0.09565, over 940799.84 frames. ], batch size: 13, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:14:30,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=46637.333333333336, ans=0.0 +2024-07-27 18:14:32,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=46637.333333333336, ans=0.125 +2024-07-27 18:14:35,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.33 vs. limit=15.0 +2024-07-27 18:14:36,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=46637.333333333336, ans=0.125 +2024-07-27 18:14:36,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=46637.333333333336, ans=0.1 +2024-07-27 18:14:46,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=46664.0, ans=0.125 +2024-07-27 18:14:47,777 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.98 vs. limit=12.0 +2024-07-27 18:14:48,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46664.0, ans=0.1 +2024-07-27 18:14:57,358 INFO [train.py:1114] (0/4) Epoch 4, batch 4350, loss[loss=0.2725, simple_loss=0.3534, pruned_loss=0.09578, over 4754.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3365, pruned_loss=0.09503, over 941585.94 frames. ], batch size: 13, lr: 1.79e-02, grad_scale: 32.0 +2024-07-27 18:14:58,631 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.449e+01 6.647e+01 7.749e+01 8.957e+01 1.514e+02, threshold=1.550e+02, percent-clipped=2.0 +2024-07-27 18:15:18,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=46730.666666666664, ans=0.125 +2024-07-27 18:15:20,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=46730.666666666664, ans=0.125 +2024-07-27 18:15:26,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=46744.0, ans=0.025 +2024-07-27 18:15:28,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=46744.0, ans=0.125 +2024-07-27 18:15:30,925 INFO [train.py:1114] (0/4) Epoch 4, batch 4400, loss[loss=0.2271, simple_loss=0.3214, pruned_loss=0.06637, over 4804.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3364, pruned_loss=0.09509, over 940868.90 frames. ], batch size: 14, lr: 1.79e-02, grad_scale: 32.0 +2024-07-27 18:15:42,717 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.92 vs. limit=22.5 +2024-07-27 18:15:52,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=46797.333333333336, ans=0.125 +2024-07-27 18:15:55,534 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.08 vs. limit=6.0 +2024-07-27 18:16:04,521 INFO [train.py:1114] (0/4) Epoch 4, batch 4450, loss[loss=0.2189, simple_loss=0.2883, pruned_loss=0.07474, over 4943.00 frames. ], tot_loss[loss=0.2653, simple_loss=0.3378, pruned_loss=0.09634, over 938900.31 frames. ], batch size: 12, lr: 1.79e-02, grad_scale: 32.0 +2024-07-27 18:16:05,840 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.136e+01 6.574e+01 7.932e+01 1.004e+02 1.651e+02, threshold=1.586e+02, percent-clipped=3.0 +2024-07-27 18:16:09,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=46824.0, ans=0.0 +2024-07-27 18:16:09,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=46824.0, ans=0.035 +2024-07-27 18:16:12,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.86 vs. limit=22.5 +2024-07-27 18:16:19,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=46850.666666666664, ans=0.125 +2024-07-27 18:16:21,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46850.666666666664, ans=0.1 +2024-07-27 18:16:25,413 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.99 vs. limit=22.5 +2024-07-27 18:16:38,377 INFO [train.py:1114] (0/4) Epoch 4, batch 4500, loss[loss=0.2542, simple_loss=0.3346, pruned_loss=0.08688, over 4744.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3392, pruned_loss=0.09724, over 938021.84 frames. ], batch size: 14, lr: 1.79e-02, grad_scale: 32.0 +2024-07-27 18:16:42,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=46890.666666666664, ans=0.125 +2024-07-27 18:16:52,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=46917.333333333336, ans=0.125 +2024-07-27 18:17:03,473 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.92 vs. limit=6.0 +2024-07-27 18:17:11,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=46944.0, ans=0.125 +2024-07-27 18:17:12,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=46944.0, ans=0.1 +2024-07-27 18:17:13,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=46944.0, ans=0.125 +2024-07-27 18:17:15,580 INFO [train.py:1114] (0/4) Epoch 4, batch 4550, loss[loss=0.2622, simple_loss=0.3433, pruned_loss=0.09056, over 4901.00 frames. ], tot_loss[loss=0.2656, simple_loss=0.3384, pruned_loss=0.09638, over 940111.83 frames. ], batch size: 13, lr: 1.79e-02, grad_scale: 16.0 +2024-07-27 18:17:16,713 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.82 vs. limit=6.0 +2024-07-27 18:17:17,509 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.369e+01 6.640e+01 7.268e+01 8.274e+01 1.292e+02, threshold=1.454e+02, percent-clipped=0.0 +2024-07-27 18:17:26,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46970.666666666664, ans=0.1 +2024-07-27 18:17:32,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=46984.0, ans=0.125 +2024-07-27 18:17:37,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=46997.333333333336, ans=0.125 +2024-07-27 18:17:48,042 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:17:49,272 INFO [train.py:1114] (0/4) Epoch 4, batch 4600, loss[loss=0.2663, simple_loss=0.3431, pruned_loss=0.09469, over 4518.00 frames. ], tot_loss[loss=0.265, simple_loss=0.3377, pruned_loss=0.09619, over 938703.92 frames. ], batch size: 21, lr: 1.79e-02, grad_scale: 16.0 +2024-07-27 18:17:49,700 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.36 vs. limit=22.5 +2024-07-27 18:17:52,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=47024.0, ans=0.125 +2024-07-27 18:18:09,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=47064.0, ans=0.05 +2024-07-27 18:18:12,141 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=16.30 vs. limit=15.0 +2024-07-27 18:18:22,638 INFO [train.py:1114] (0/4) Epoch 4, batch 4650, loss[loss=0.3317, simple_loss=0.4002, pruned_loss=0.1316, over 4829.00 frames. ], tot_loss[loss=0.2665, simple_loss=0.3392, pruned_loss=0.0969, over 940369.02 frames. ], batch size: 16, lr: 1.79e-02, grad_scale: 8.0 +2024-07-27 18:18:25,328 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.210e+01 6.570e+01 7.431e+01 9.301e+01 1.835e+02, threshold=1.486e+02, percent-clipped=1.0 +2024-07-27 18:18:36,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff3.min_abs, batch_count=47090.666666666664, ans=0.2 +2024-07-27 18:18:57,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=47130.666666666664, ans=0.125 +2024-07-27 18:19:06,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=47144.0, ans=0.125 +2024-07-27 18:19:10,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47157.333333333336, ans=0.1 +2024-07-27 18:19:10,949 INFO [train.py:1114] (0/4) Epoch 4, batch 4700, loss[loss=0.2535, simple_loss=0.3103, pruned_loss=0.0984, over 4709.00 frames. ], tot_loss[loss=0.2654, simple_loss=0.3382, pruned_loss=0.09628, over 938036.00 frames. ], batch size: 11, lr: 1.79e-02, grad_scale: 8.0 +2024-07-27 18:19:11,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=47157.333333333336, ans=0.125 +2024-07-27 18:19:15,588 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.70 vs. limit=5.0 +2024-07-27 18:19:18,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=47170.666666666664, ans=0.125 +2024-07-27 18:19:20,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=47170.666666666664, ans=0.2 +2024-07-27 18:19:23,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=47184.0, ans=0.125 +2024-07-27 18:19:24,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=47184.0, ans=0.125 +2024-07-27 18:19:31,581 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.12 vs. limit=8.0 +2024-07-27 18:19:32,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=47197.333333333336, ans=0.000609275362318841 +2024-07-27 18:19:32,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47197.333333333336, ans=0.1 +2024-07-27 18:19:32,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=47197.333333333336, ans=0.0 +2024-07-27 18:19:38,432 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.45 vs. limit=10.0 +2024-07-27 18:19:40,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=47210.666666666664, ans=0.025 +2024-07-27 18:19:45,065 INFO [train.py:1114] (0/4) Epoch 4, batch 4750, loss[loss=0.2769, simple_loss=0.3463, pruned_loss=0.1038, over 4515.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.339, pruned_loss=0.09656, over 935929.18 frames. ], batch size: 21, lr: 1.78e-02, grad_scale: 8.0 +2024-07-27 18:19:46,204 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.16 vs. limit=15.0 +2024-07-27 18:19:47,742 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.323e+01 6.439e+01 7.166e+01 9.768e+01 1.474e+02, threshold=1.433e+02, percent-clipped=0.0 +2024-07-27 18:19:53,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=47237.333333333336, ans=0.125 +2024-07-27 18:19:58,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=47250.666666666664, ans=0.125 +2024-07-27 18:20:08,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=47264.0, ans=0.125 +2024-07-27 18:20:13,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=47277.333333333336, ans=0.04949747468305833 +2024-07-27 18:20:13,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=47277.333333333336, ans=0.1 +2024-07-27 18:20:17,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=47277.333333333336, ans=0.125 +2024-07-27 18:20:17,972 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.60 vs. limit=22.5 +2024-07-27 18:20:19,570 INFO [train.py:1114] (0/4) Epoch 4, batch 4800, loss[loss=0.2934, simple_loss=0.3711, pruned_loss=0.1079, over 4688.00 frames. ], tot_loss[loss=0.2672, simple_loss=0.3395, pruned_loss=0.09744, over 932568.40 frames. ], batch size: 13, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:20:22,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=47290.666666666664, ans=0.125 +2024-07-27 18:20:31,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=47304.0, ans=0.125 +2024-07-27 18:20:39,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=47317.333333333336, ans=0.125 +2024-07-27 18:20:46,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=47330.666666666664, ans=0.125 +2024-07-27 18:20:55,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=47344.0, ans=0.1 +2024-07-27 18:21:03,391 INFO [train.py:1114] (0/4) Epoch 4, batch 4850, loss[loss=0.2489, simple_loss=0.3356, pruned_loss=0.08113, over 4743.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.339, pruned_loss=0.09721, over 932058.72 frames. ], batch size: 14, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:21:03,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=47357.333333333336, ans=0.025 +2024-07-27 18:21:05,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=47357.333333333336, ans=0.0 +2024-07-27 18:21:06,068 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.193e+01 6.442e+01 7.162e+01 7.877e+01 1.649e+02, threshold=1.432e+02, percent-clipped=2.0 +2024-07-27 18:21:23,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=47397.333333333336, ans=10.0 +2024-07-27 18:21:37,092 INFO [train.py:1114] (0/4) Epoch 4, batch 4900, loss[loss=0.2285, simple_loss=0.3049, pruned_loss=0.07607, over 4767.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3372, pruned_loss=0.09583, over 933817.48 frames. ], batch size: 13, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:21:49,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=47437.333333333336, ans=0.125 +2024-07-27 18:21:56,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.72 vs. limit=22.5 +2024-07-27 18:22:04,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=47477.333333333336, ans=0.125 +2024-07-27 18:22:11,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=47477.333333333336, ans=0.2 +2024-07-27 18:22:14,196 INFO [train.py:1114] (0/4) Epoch 4, batch 4950, loss[loss=0.3571, simple_loss=0.381, pruned_loss=0.1666, over 3364.00 frames. ], tot_loss[loss=0.2665, simple_loss=0.3387, pruned_loss=0.09711, over 931369.82 frames. ], batch size: 35, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:22:16,771 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.778e+01 6.647e+01 7.619e+01 9.936e+01 1.671e+02, threshold=1.524e+02, percent-clipped=3.0 +2024-07-27 18:22:34,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=47530.666666666664, ans=0.125 +2024-07-27 18:22:51,634 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.23 vs. limit=15.0 +2024-07-27 18:22:52,602 INFO [train.py:1114] (0/4) Epoch 4, batch 5000, loss[loss=0.2784, simple_loss=0.3502, pruned_loss=0.1033, over 4660.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.3377, pruned_loss=0.0963, over 935242.59 frames. ], batch size: 14, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:23:02,988 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.46 vs. limit=15.0 +2024-07-27 18:23:19,284 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.45 vs. limit=15.0 +2024-07-27 18:23:19,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=47610.666666666664, ans=0.125 +2024-07-27 18:23:23,164 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.42 vs. limit=12.0 +2024-07-27 18:23:23,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=47610.666666666664, ans=0.5 +2024-07-27 18:23:26,406 INFO [train.py:1114] (0/4) Epoch 4, batch 5050, loss[loss=0.2138, simple_loss=0.2936, pruned_loss=0.06697, over 4844.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3348, pruned_loss=0.09455, over 937711.12 frames. ], batch size: 12, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:23:29,110 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.322e+01 6.671e+01 7.390e+01 9.030e+01 1.584e+02, threshold=1.478e+02, percent-clipped=1.0 +2024-07-27 18:23:29,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=47624.0, ans=0.0005165217391304346 +2024-07-27 18:23:36,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=47637.333333333336, ans=0.2 +2024-07-27 18:23:37,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=47637.333333333336, ans=0.2 +2024-07-27 18:23:38,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=47637.333333333336, ans=0.04949747468305833 +2024-07-27 18:23:59,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=47677.333333333336, ans=0.125 +2024-07-27 18:24:01,860 INFO [train.py:1114] (0/4) Epoch 4, batch 5100, loss[loss=0.2324, simple_loss=0.301, pruned_loss=0.08185, over 4780.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3361, pruned_loss=0.09528, over 935471.18 frames. ], batch size: 12, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:24:05,207 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.62 vs. limit=6.0 +2024-07-27 18:24:12,889 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.71 vs. limit=6.0 +2024-07-27 18:24:37,615 INFO [train.py:1114] (0/4) Epoch 4, batch 5150, loss[loss=0.2729, simple_loss=0.3523, pruned_loss=0.09677, over 4842.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.3379, pruned_loss=0.09624, over 936220.69 frames. ], batch size: 16, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:24:40,253 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.251e+01 6.747e+01 7.591e+01 8.914e+01 1.388e+02, threshold=1.518e+02, percent-clipped=0.0 +2024-07-27 18:24:42,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=47757.333333333336, ans=0.0 +2024-07-27 18:24:51,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=47770.666666666664, ans=0.1 +2024-07-27 18:25:04,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=47797.333333333336, ans=0.125 +2024-07-27 18:25:13,266 INFO [train.py:1114] (0/4) Epoch 4, batch 5200, loss[loss=0.2498, simple_loss=0.3357, pruned_loss=0.08199, over 4657.00 frames. ], tot_loss[loss=0.2654, simple_loss=0.338, pruned_loss=0.09641, over 936423.60 frames. ], batch size: 14, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:25:25,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=47837.333333333336, ans=0.125 +2024-07-27 18:25:25,182 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=18.47 vs. limit=15.0 +2024-07-27 18:25:31,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=47850.666666666664, ans=0.0004672463768115951 +2024-07-27 18:25:46,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=47877.333333333336, ans=0.0 +2024-07-27 18:25:47,806 INFO [train.py:1114] (0/4) Epoch 4, batch 5250, loss[loss=0.221, simple_loss=0.2972, pruned_loss=0.07239, over 4893.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3369, pruned_loss=0.09567, over 936229.98 frames. ], batch size: 13, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:25:50,443 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.322e+01 6.549e+01 7.419e+01 9.087e+01 1.892e+02, threshold=1.484e+02, percent-clipped=1.0 +2024-07-27 18:25:54,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.17 vs. limit=10.0 +2024-07-27 18:25:58,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=47904.0, ans=0.0 +2024-07-27 18:26:02,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=47917.333333333336, ans=0.125 +2024-07-27 18:26:03,882 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.76 vs. limit=15.0 +2024-07-27 18:26:06,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=47917.333333333336, ans=0.95 +2024-07-27 18:26:15,368 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.89 vs. limit=22.5 +2024-07-27 18:26:21,664 INFO [train.py:1114] (0/4) Epoch 4, batch 5300, loss[loss=0.2355, simple_loss=0.3072, pruned_loss=0.08187, over 4589.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3361, pruned_loss=0.09545, over 934808.15 frames. ], batch size: 16, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:26:33,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=47970.666666666664, ans=0.2 +2024-07-27 18:26:38,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=47984.0, ans=0.09899494936611666 +2024-07-27 18:26:39,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=47984.0, ans=0.125 +2024-07-27 18:26:42,611 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-36000.pt +2024-07-27 18:26:53,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=48010.666666666664, ans=0.00043246376811594336 +2024-07-27 18:26:57,410 INFO [train.py:1114] (0/4) Epoch 4, batch 5350, loss[loss=0.2338, simple_loss=0.2934, pruned_loss=0.08705, over 4498.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3369, pruned_loss=0.09563, over 937156.71 frames. ], batch size: 10, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:27:00,013 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.313e+01 6.419e+01 7.171e+01 7.752e+01 1.208e+02, threshold=1.434e+02, percent-clipped=0.0 +2024-07-27 18:27:00,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=48024.0, ans=0.125 +2024-07-27 18:27:01,201 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.01 vs. limit=15.0 +2024-07-27 18:27:06,478 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.57 vs. limit=15.0 +2024-07-27 18:27:12,742 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.78 vs. limit=15.0 +2024-07-27 18:27:13,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=48050.666666666664, ans=0.125 +2024-07-27 18:27:13,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48050.666666666664, ans=0.1 +2024-07-27 18:27:31,398 INFO [train.py:1114] (0/4) Epoch 4, batch 5400, loss[loss=0.2981, simple_loss=0.3718, pruned_loss=0.1122, over 4173.00 frames. ], tot_loss[loss=0.2663, simple_loss=0.3387, pruned_loss=0.09697, over 931513.38 frames. ], batch size: 25, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:27:45,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=48117.333333333336, ans=0.2 +2024-07-27 18:27:46,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=48117.333333333336, ans=0.0 +2024-07-27 18:27:55,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=48130.666666666664, ans=0.125 +2024-07-27 18:28:01,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=48144.0, ans=0.05 +2024-07-27 18:28:02,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=48144.0, ans=0.2 +2024-07-27 18:28:05,840 INFO [train.py:1114] (0/4) Epoch 4, batch 5450, loss[loss=0.234, simple_loss=0.3064, pruned_loss=0.08074, over 4702.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3368, pruned_loss=0.09598, over 934332.80 frames. ], batch size: 11, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:28:06,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=48157.333333333336, ans=0.125 +2024-07-27 18:28:14,670 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.319e+01 6.320e+01 7.105e+01 8.639e+01 1.249e+02, threshold=1.421e+02, percent-clipped=0.0 +2024-07-27 18:28:31,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=48184.0, ans=0.0 +2024-07-27 18:28:39,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=48197.333333333336, ans=0.125 +2024-07-27 18:28:50,524 INFO [train.py:1114] (0/4) Epoch 4, batch 5500, loss[loss=0.3008, simple_loss=0.3548, pruned_loss=0.1234, over 4204.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.3363, pruned_loss=0.09582, over 931213.84 frames. ], batch size: 25, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:29:04,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48250.666666666664, ans=0.1 +2024-07-27 18:29:06,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=48250.666666666664, ans=0.0 +2024-07-27 18:29:17,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=48277.333333333336, ans=0.125 +2024-07-27 18:29:24,408 INFO [train.py:1114] (0/4) Epoch 4, batch 5550, loss[loss=0.2444, simple_loss=0.3205, pruned_loss=0.08417, over 4716.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3354, pruned_loss=0.09515, over 933151.67 frames. ], batch size: 12, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:29:27,168 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.873e+01 6.976e+01 8.822e+01 1.148e+02 2.032e+02, threshold=1.764e+02, percent-clipped=8.0 +2024-07-27 18:29:37,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=48317.333333333336, ans=0.0 +2024-07-27 18:29:52,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48330.666666666664, ans=0.1 +2024-07-27 18:29:53,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=48330.666666666664, ans=0.125 +2024-07-27 18:29:56,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=48344.0, ans=0.1 +2024-07-27 18:29:58,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=48344.0, ans=0.0 +2024-07-27 18:30:02,115 INFO [train.py:1114] (0/4) Epoch 4, batch 5600, loss[loss=0.3051, simple_loss=0.3657, pruned_loss=0.1222, over 4748.00 frames. ], tot_loss[loss=0.264, simple_loss=0.3365, pruned_loss=0.09575, over 934239.00 frames. ], batch size: 14, lr: 1.76e-02, grad_scale: 32.0 +2024-07-27 18:30:09,849 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.76 vs. limit=15.0 +2024-07-27 18:30:16,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=48384.0, ans=0.125 +2024-07-27 18:30:24,536 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.15 vs. limit=22.5 +2024-07-27 18:30:28,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=48397.333333333336, ans=0.0 +2024-07-27 18:30:28,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=48397.333333333336, ans=0.125 +2024-07-27 18:30:31,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=48410.666666666664, ans=0.025 +2024-07-27 18:30:38,214 INFO [train.py:1114] (0/4) Epoch 4, batch 5650, loss[loss=0.296, simple_loss=0.3597, pruned_loss=0.1162, over 4441.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3347, pruned_loss=0.09484, over 936588.84 frames. ], batch size: 21, lr: 1.76e-02, grad_scale: 32.0 +2024-07-27 18:30:41,022 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.980e+01 6.257e+01 6.942e+01 8.186e+01 1.408e+02, threshold=1.388e+02, percent-clipped=0.0 +2024-07-27 18:30:49,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=48437.333333333336, ans=0.0 +2024-07-27 18:30:50,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=48437.333333333336, ans=0.125 +2024-07-27 18:30:54,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48450.666666666664, ans=0.1 +2024-07-27 18:31:01,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=48464.0, ans=0.125 +2024-07-27 18:31:02,030 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.12 vs. limit=15.0 +2024-07-27 18:31:05,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=48477.333333333336, ans=0.125 +2024-07-27 18:31:11,757 INFO [train.py:1114] (0/4) Epoch 4, batch 5700, loss[loss=0.2066, simple_loss=0.3001, pruned_loss=0.05659, over 4688.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3347, pruned_loss=0.09417, over 938168.57 frames. ], batch size: 13, lr: 1.76e-02, grad_scale: 32.0 +2024-07-27 18:31:21,151 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.68 vs. limit=15.0 +2024-07-27 18:31:21,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=48504.0, ans=0.0 +2024-07-27 18:31:28,533 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-07-27 18:31:37,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=48530.666666666664, ans=0.2 +2024-07-27 18:31:45,710 INFO [train.py:1114] (0/4) Epoch 4, batch 5750, loss[loss=0.2378, simple_loss=0.3125, pruned_loss=0.08152, over 4781.00 frames. ], tot_loss[loss=0.2618, simple_loss=0.3352, pruned_loss=0.09417, over 938320.07 frames. ], batch size: 19, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:31:51,407 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.193e+01 6.612e+01 7.726e+01 1.001e+02 1.887e+02, threshold=1.545e+02, percent-clipped=6.0 +2024-07-27 18:32:06,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=48584.0, ans=0.5 +2024-07-27 18:32:16,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=48610.666666666664, ans=0.5 +2024-07-27 18:32:22,626 INFO [train.py:1114] (0/4) Epoch 4, batch 5800, loss[loss=0.2717, simple_loss=0.3472, pruned_loss=0.09804, over 4715.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.3376, pruned_loss=0.09571, over 937277.73 frames. ], batch size: 19, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:32:28,322 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.91 vs. limit=15.0 +2024-07-27 18:32:30,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=48637.333333333336, ans=0.0 +2024-07-27 18:32:45,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=48664.0, ans=0.125 +2024-07-27 18:32:47,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=48664.0, ans=0.0 +2024-07-27 18:32:52,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=48677.333333333336, ans=0.125 +2024-07-27 18:32:53,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=48677.333333333336, ans=0.0 +2024-07-27 18:32:56,530 INFO [train.py:1114] (0/4) Epoch 4, batch 5850, loss[loss=0.2897, simple_loss=0.3572, pruned_loss=0.1111, over 4545.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.337, pruned_loss=0.09578, over 938072.65 frames. ], batch size: 21, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:32:58,973 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.71 vs. limit=6.0 +2024-07-27 18:32:59,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=48690.666666666664, ans=0.125 +2024-07-27 18:32:59,842 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.208e+01 6.444e+01 7.225e+01 8.494e+01 1.330e+02, threshold=1.445e+02, percent-clipped=0.0 +2024-07-27 18:33:10,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=48704.0, ans=0.125 +2024-07-27 18:33:10,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=48704.0, ans=0.125 +2024-07-27 18:33:27,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=48744.0, ans=0.025 +2024-07-27 18:33:34,384 INFO [train.py:1114] (0/4) Epoch 4, batch 5900, loss[loss=0.2885, simple_loss=0.3641, pruned_loss=0.1065, over 4679.00 frames. ], tot_loss[loss=0.264, simple_loss=0.3366, pruned_loss=0.09572, over 938449.97 frames. ], batch size: 15, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:33:38,057 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.60 vs. limit=12.0 +2024-07-27 18:34:07,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=48810.666666666664, ans=0.2 +2024-07-27 18:34:13,798 INFO [train.py:1114] (0/4) Epoch 4, batch 5950, loss[loss=0.3294, simple_loss=0.3824, pruned_loss=0.1382, over 4681.00 frames. ], tot_loss[loss=0.2637, simple_loss=0.3365, pruned_loss=0.09539, over 940504.28 frames. ], batch size: 15, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:34:17,269 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.376e+01 6.577e+01 7.476e+01 8.958e+01 1.675e+02, threshold=1.495e+02, percent-clipped=2.0 +2024-07-27 18:34:20,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=48837.333333333336, ans=0.125 +2024-07-27 18:34:29,624 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.61 vs. limit=22.5 +2024-07-27 18:34:38,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=48864.0, ans=0.125 +2024-07-27 18:34:47,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=48890.666666666664, ans=0.125 +2024-07-27 18:34:47,644 INFO [train.py:1114] (0/4) Epoch 4, batch 6000, loss[loss=0.2978, simple_loss=0.3694, pruned_loss=0.1131, over 4173.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3354, pruned_loss=0.09485, over 938172.02 frames. ], batch size: 25, lr: 1.76e-02, grad_scale: 32.0 +2024-07-27 18:34:47,645 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 18:35:03,513 INFO [train.py:1146] (0/4) Epoch 4, validation: loss=0.2107, simple_loss=0.3128, pruned_loss=0.05435, over 944034.00 frames. +2024-07-27 18:35:03,513 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 18:35:10,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=48904.0, ans=0.0 +2024-07-27 18:35:23,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=48930.666666666664, ans=0.125 +2024-07-27 18:35:23,830 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.61 vs. limit=5.0 +2024-07-27 18:35:37,416 INFO [train.py:1114] (0/4) Epoch 4, batch 6050, loss[loss=0.2526, simple_loss=0.3247, pruned_loss=0.09023, over 4776.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3353, pruned_loss=0.09488, over 939321.09 frames. ], batch size: 12, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:35:42,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=48957.333333333336, ans=0.125 +2024-07-27 18:35:42,517 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.402e+01 6.393e+01 7.329e+01 8.400e+01 1.158e+02, threshold=1.466e+02, percent-clipped=0.0 +2024-07-27 18:35:53,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48984.0, ans=0.1 +2024-07-27 18:35:55,160 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.81 vs. limit=15.0 +2024-07-27 18:35:57,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=48984.0, ans=0.2 +2024-07-27 18:35:58,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=48984.0, ans=0.125 +2024-07-27 18:36:02,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=48997.333333333336, ans=0.0 +2024-07-27 18:36:06,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=49010.666666666664, ans=0.00021507246376811784 +2024-07-27 18:36:09,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=49010.666666666664, ans=0.125 +2024-07-27 18:36:12,767 INFO [train.py:1114] (0/4) Epoch 4, batch 6100, loss[loss=0.3541, simple_loss=0.412, pruned_loss=0.148, over 4669.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.3342, pruned_loss=0.09398, over 938371.17 frames. ], batch size: 15, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:36:18,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=49037.333333333336, ans=0.2 +2024-07-27 18:36:23,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=49037.333333333336, ans=0.125 +2024-07-27 18:36:25,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=49037.333333333336, ans=0.1 +2024-07-27 18:36:36,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=49064.0, ans=0.00020347826086956552 +2024-07-27 18:36:36,940 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.57 vs. limit=15.0 +2024-07-27 18:36:38,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=49064.0, ans=0.125 +2024-07-27 18:36:46,651 INFO [train.py:1114] (0/4) Epoch 4, batch 6150, loss[loss=0.3304, simple_loss=0.3756, pruned_loss=0.1426, over 3453.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3342, pruned_loss=0.09386, over 937338.25 frames. ], batch size: 35, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:36:50,109 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.101e+01 6.312e+01 7.204e+01 8.554e+01 1.450e+02, threshold=1.441e+02, percent-clipped=0.0 +2024-07-27 18:36:55,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=49104.0, ans=0.2 +2024-07-27 18:37:01,480 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.88 vs. limit=15.0 +2024-07-27 18:37:11,786 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.83 vs. limit=15.0 +2024-07-27 18:37:14,435 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.09 vs. limit=15.0 +2024-07-27 18:37:20,703 INFO [train.py:1114] (0/4) Epoch 4, batch 6200, loss[loss=0.2704, simple_loss=0.3529, pruned_loss=0.0939, over 4739.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3347, pruned_loss=0.09398, over 937019.00 frames. ], batch size: 14, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:37:27,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=49170.666666666664, ans=0.125 +2024-07-27 18:37:44,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=49184.0, ans=0.1 +2024-07-27 18:37:56,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49197.333333333336, ans=0.1 +2024-07-27 18:37:59,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=49210.666666666664, ans=0.000171594202898551 +2024-07-27 18:38:04,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=49210.666666666664, ans=0.1 +2024-07-27 18:38:05,390 INFO [train.py:1114] (0/4) Epoch 4, batch 6250, loss[loss=0.2485, simple_loss=0.3372, pruned_loss=0.07988, over 4821.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.3339, pruned_loss=0.0939, over 933471.36 frames. ], batch size: 14, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:38:08,832 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.216e+01 6.578e+01 7.418e+01 8.909e+01 1.704e+02, threshold=1.484e+02, percent-clipped=3.0 +2024-07-27 18:39:02,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=49277.333333333336, ans=0.125 +2024-07-27 18:39:03,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=49290.666666666664, ans=0.125 +2024-07-27 18:39:03,626 INFO [train.py:1114] (0/4) Epoch 4, batch 6300, loss[loss=0.1884, simple_loss=0.261, pruned_loss=0.05786, over 4497.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3335, pruned_loss=0.09343, over 930004.74 frames. ], batch size: 10, lr: 1.75e-02, grad_scale: 16.0 +2024-07-27 18:39:03,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=49290.666666666664, ans=0.125 +2024-07-27 18:39:17,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=49317.333333333336, ans=0.125 +2024-07-27 18:39:20,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=49317.333333333336, ans=0.2 +2024-07-27 18:39:26,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=49330.666666666664, ans=0.125 +2024-07-27 18:39:28,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=49330.666666666664, ans=0.125 +2024-07-27 18:39:49,734 INFO [train.py:1114] (0/4) Epoch 4, batch 6350, loss[loss=0.281, simple_loss=0.3529, pruned_loss=0.1045, over 4541.00 frames. ], tot_loss[loss=0.2589, simple_loss=0.3326, pruned_loss=0.09259, over 933888.57 frames. ], batch size: 21, lr: 1.75e-02, grad_scale: 16.0 +2024-07-27 18:39:49,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=49357.333333333336, ans=0.07 +2024-07-27 18:40:02,482 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.450e+01 6.143e+01 6.766e+01 7.753e+01 2.111e+02, threshold=1.353e+02, percent-clipped=1.0 +2024-07-27 18:40:14,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=49370.666666666664, ans=0.00013681159420289926 +2024-07-27 18:40:17,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=49370.666666666664, ans=0.125 +2024-07-27 18:41:01,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=49410.666666666664, ans=0.2 +2024-07-27 18:41:02,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49410.666666666664, ans=0.1 +2024-07-27 18:41:03,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=49410.666666666664, ans=0.125 +2024-07-27 18:41:05,494 INFO [train.py:1114] (0/4) Epoch 4, batch 6400, loss[loss=0.264, simple_loss=0.3433, pruned_loss=0.0923, over 4629.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.3327, pruned_loss=0.09314, over 935066.56 frames. ], batch size: 13, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:41:13,311 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.79 vs. limit=10.0 +2024-07-27 18:41:33,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=49477.333333333336, ans=0.00011362318840579637 +2024-07-27 18:41:35,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=49477.333333333336, ans=0.125 +2024-07-27 18:41:39,067 INFO [train.py:1114] (0/4) Epoch 4, batch 6450, loss[loss=0.2774, simple_loss=0.3405, pruned_loss=0.1072, over 4547.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3333, pruned_loss=0.09279, over 938690.88 frames. ], batch size: 21, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:41:41,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=49490.666666666664, ans=0.125 +2024-07-27 18:41:42,987 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.447e+01 6.416e+01 7.153e+01 7.876e+01 1.277e+02, threshold=1.431e+02, percent-clipped=0.0 +2024-07-27 18:41:48,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49504.0, ans=0.1 +2024-07-27 18:41:57,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=49517.333333333336, ans=0.125 +2024-07-27 18:42:08,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49530.666666666664, ans=0.1 +2024-07-27 18:42:13,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=49544.0, ans=0.125 +2024-07-27 18:42:19,514 INFO [train.py:1114] (0/4) Epoch 4, batch 6500, loss[loss=0.3572, simple_loss=0.3929, pruned_loss=0.1608, over 3298.00 frames. ], tot_loss[loss=0.2581, simple_loss=0.332, pruned_loss=0.09208, over 939827.08 frames. ], batch size: 35, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:42:42,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=49584.0, ans=0.125 +2024-07-27 18:42:45,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=49584.0, ans=0.0 +2024-07-27 18:42:50,027 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.85 vs. limit=15.0 +2024-07-27 18:43:14,805 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.78 vs. limit=15.0 +2024-07-27 18:43:17,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=49610.666666666664, ans=0.2 +2024-07-27 18:43:19,876 INFO [train.py:1114] (0/4) Epoch 4, batch 6550, loss[loss=0.2332, simple_loss=0.3077, pruned_loss=0.07937, over 4820.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3316, pruned_loss=0.09183, over 942890.62 frames. ], batch size: 11, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:43:23,935 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.133e+01 6.247e+01 6.814e+01 7.966e+01 1.482e+02, threshold=1.363e+02, percent-clipped=1.0 +2024-07-27 18:43:27,770 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.18 vs. limit=12.0 +2024-07-27 18:43:29,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=49637.333333333336, ans=0.025 +2024-07-27 18:43:37,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=49650.666666666664, ans=0.025 +2024-07-27 18:43:53,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49664.0, ans=0.1 +2024-07-27 18:43:58,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=49677.333333333336, ans=0.125 +2024-07-27 18:44:02,855 INFO [train.py:1114] (0/4) Epoch 4, batch 6600, loss[loss=0.284, simple_loss=0.3519, pruned_loss=0.1081, over 4928.00 frames. ], tot_loss[loss=0.2577, simple_loss=0.3315, pruned_loss=0.09196, over 944796.78 frames. ], batch size: 14, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:44:11,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=49704.0, ans=0.2 +2024-07-27 18:44:29,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=49717.333333333336, ans=0.0 +2024-07-27 18:44:44,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=49730.666666666664, ans=0.05 +2024-07-27 18:44:47,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=49744.0, ans=0.125 +2024-07-27 18:44:51,843 INFO [train.py:1114] (0/4) Epoch 4, batch 6650, loss[loss=0.2927, simple_loss=0.3632, pruned_loss=0.1111, over 4646.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.3326, pruned_loss=0.09304, over 943396.95 frames. ], batch size: 17, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:45:01,621 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.604e+01 6.574e+01 7.387e+01 9.385e+01 1.471e+02, threshold=1.477e+02, percent-clipped=2.0 +2024-07-27 18:45:13,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=49784.0, ans=0.1 +2024-07-27 18:45:18,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=49797.333333333336, ans=0.125 +2024-07-27 18:45:29,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=49797.333333333336, ans=0.125 +2024-07-27 18:45:37,465 INFO [train.py:1114] (0/4) Epoch 4, batch 6700, loss[loss=0.2515, simple_loss=0.3355, pruned_loss=0.0837, over 4756.00 frames. ], tot_loss[loss=0.2589, simple_loss=0.3319, pruned_loss=0.09293, over 942129.93 frames. ], batch size: 19, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:45:41,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=49824.0, ans=0.125 +2024-07-27 18:45:55,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=49850.666666666664, ans=0.2 +2024-07-27 18:45:56,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=49850.666666666664, ans=0.025 +2024-07-27 18:46:19,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=49877.333333333336, ans=0.0 +2024-07-27 18:46:20,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=49877.333333333336, ans=0.0 +2024-07-27 18:46:20,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49877.333333333336, ans=0.1 +2024-07-27 18:46:24,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=49877.333333333336, ans=0.125 +2024-07-27 18:46:26,259 INFO [train.py:1114] (0/4) Epoch 4, batch 6750, loss[loss=0.2412, simple_loss=0.3161, pruned_loss=0.08318, over 4360.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3324, pruned_loss=0.09344, over 940334.42 frames. ], batch size: 26, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:46:30,209 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.238e+01 6.545e+01 7.445e+01 9.250e+01 1.508e+02, threshold=1.489e+02, percent-clipped=1.0 +2024-07-27 18:46:37,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=49904.0, ans=0.125 +2024-07-27 18:46:44,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49917.333333333336, ans=0.1 +2024-07-27 18:47:01,369 INFO [train.py:1114] (0/4) Epoch 4, batch 6800, loss[loss=0.2443, simple_loss=0.3223, pruned_loss=0.08314, over 4641.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.333, pruned_loss=0.09406, over 938716.69 frames. ], batch size: 13, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:47:33,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=49970.666666666664, ans=0.0 +2024-07-27 18:48:02,803 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.88 vs. limit=6.0 +2024-07-27 18:48:05,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=50010.666666666664, ans=0.025 +2024-07-27 18:48:37,372 INFO [train.py:1114] (0/4) Epoch 4, batch 6850, loss[loss=0.226, simple_loss=0.3106, pruned_loss=0.07071, over 4694.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3323, pruned_loss=0.09367, over 940469.34 frames. ], batch size: 13, lr: 1.74e-02, grad_scale: 16.0 +2024-07-27 18:48:42,390 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.116e+01 6.490e+01 7.044e+01 8.185e+01 1.640e+02, threshold=1.409e+02, percent-clipped=3.0 +2024-07-27 18:48:42,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=50024.0, ans=0.05 +2024-07-27 18:48:44,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50024.0, ans=0.1 +2024-07-27 18:48:45,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=50037.333333333336, ans=0.125 +2024-07-27 18:48:46,655 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.75 vs. limit=12.0 +2024-07-27 18:49:07,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=50050.666666666664, ans=0.125 +2024-07-27 18:49:30,925 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.52 vs. limit=22.5 +2024-07-27 18:49:46,662 INFO [train.py:1114] (0/4) Epoch 4, batch 6900, loss[loss=0.2536, simple_loss=0.3241, pruned_loss=0.0916, over 4957.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3331, pruned_loss=0.0939, over 943026.37 frames. ], batch size: 13, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:49:53,204 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.33 vs. limit=15.0 +2024-07-27 18:49:53,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=50104.0, ans=0.0 +2024-07-27 18:49:55,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=50104.0, ans=0.5 +2024-07-27 18:49:56,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=50104.0, ans=0.125 +2024-07-27 18:50:02,962 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.59 vs. limit=12.0 +2024-07-27 18:50:42,715 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:50:43,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=50130.666666666664, ans=0.015 +2024-07-27 18:50:45,246 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.60 vs. limit=15.0 +2024-07-27 18:50:56,898 INFO [train.py:1114] (0/4) Epoch 4, batch 6950, loss[loss=0.2283, simple_loss=0.3023, pruned_loss=0.07711, over 4528.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.3327, pruned_loss=0.09358, over 940645.69 frames. ], batch size: 10, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:50:58,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=50157.333333333336, ans=0.1 +2024-07-27 18:51:01,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=50157.333333333336, ans=0.0 +2024-07-27 18:51:01,456 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.030e+01 6.625e+01 7.241e+01 8.326e+01 1.274e+02, threshold=1.448e+02, percent-clipped=0.0 +2024-07-27 18:51:28,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=50210.666666666664, ans=0.1 +2024-07-27 18:51:31,303 INFO [train.py:1114] (0/4) Epoch 4, batch 7000, loss[loss=0.3044, simple_loss=0.3784, pruned_loss=0.1152, over 4596.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3329, pruned_loss=0.09371, over 939204.15 frames. ], batch size: 17, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:51:48,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=50250.666666666664, ans=0.125 +2024-07-27 18:51:52,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=50264.0, ans=0.125 +2024-07-27 18:52:05,899 INFO [train.py:1114] (0/4) Epoch 4, batch 7050, loss[loss=0.2906, simple_loss=0.3694, pruned_loss=0.1059, over 4768.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3332, pruned_loss=0.09372, over 942264.86 frames. ], batch size: 19, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:52:10,848 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.329e+01 6.665e+01 7.548e+01 9.503e+01 1.584e+02, threshold=1.510e+02, percent-clipped=1.0 +2024-07-27 18:52:20,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=50317.333333333336, ans=0.125 +2024-07-27 18:52:41,455 INFO [train.py:1114] (0/4) Epoch 4, batch 7100, loss[loss=0.2773, simple_loss=0.3435, pruned_loss=0.1056, over 4807.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3342, pruned_loss=0.09461, over 936569.96 frames. ], batch size: 15, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:52:49,284 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.09 vs. limit=22.5 +2024-07-27 18:53:03,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=50397.333333333336, ans=0.0 +2024-07-27 18:53:13,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=50410.666666666664, ans=0.025 +2024-07-27 18:53:14,537 INFO [train.py:1114] (0/4) Epoch 4, batch 7150, loss[loss=0.265, simple_loss=0.3484, pruned_loss=0.09078, over 4729.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3319, pruned_loss=0.09318, over 937600.66 frames. ], batch size: 22, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:53:18,911 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.925e+01 6.686e+01 7.675e+01 9.181e+01 1.338e+02, threshold=1.535e+02, percent-clipped=0.0 +2024-07-27 18:53:19,263 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.86 vs. limit=15.0 +2024-07-27 18:53:26,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=50437.333333333336, ans=0.125 +2024-07-27 18:53:37,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=50464.0, ans=0.0 +2024-07-27 18:53:40,020 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.60 vs. limit=15.0 +2024-07-27 18:53:47,339 INFO [train.py:1114] (0/4) Epoch 4, batch 7200, loss[loss=0.2578, simple_loss=0.3458, pruned_loss=0.08494, over 4802.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3319, pruned_loss=0.09291, over 938166.23 frames. ], batch size: 15, lr: 1.73e-02, grad_scale: 32.0 +2024-07-27 18:53:51,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=50490.666666666664, ans=0.125 +2024-07-27 18:53:58,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=50504.0, ans=0.125 +2024-07-27 18:54:00,625 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.50 vs. limit=22.5 +2024-07-27 18:54:07,351 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.85 vs. limit=6.0 +2024-07-27 18:54:12,365 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.31 vs. limit=15.0 +2024-07-27 18:54:13,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=50544.0, ans=0.1 +2024-07-27 18:54:20,059 INFO [train.py:1114] (0/4) Epoch 4, batch 7250, loss[loss=0.2125, simple_loss=0.3034, pruned_loss=0.06081, over 4841.00 frames. ], tot_loss[loss=0.2579, simple_loss=0.3309, pruned_loss=0.09247, over 939673.48 frames. ], batch size: 12, lr: 1.73e-02, grad_scale: 32.0 +2024-07-27 18:54:24,526 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.173e+01 6.374e+01 7.128e+01 8.077e+01 1.230e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 18:54:29,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=50570.666666666664, ans=0.125 +2024-07-27 18:54:42,293 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.10 vs. limit=15.0 +2024-07-27 18:54:43,429 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.66 vs. limit=15.0 +2024-07-27 18:54:44,868 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.80 vs. limit=15.0 +2024-07-27 18:54:48,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=50610.666666666664, ans=0.0 +2024-07-27 18:54:52,633 INFO [train.py:1114] (0/4) Epoch 4, batch 7300, loss[loss=0.2077, simple_loss=0.2825, pruned_loss=0.06642, over 4852.00 frames. ], tot_loss[loss=0.2582, simple_loss=0.3309, pruned_loss=0.09273, over 940113.98 frames. ], batch size: 12, lr: 1.73e-02, grad_scale: 32.0 +2024-07-27 18:54:55,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=50624.0, ans=0.125 +2024-07-27 18:55:00,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=50637.333333333336, ans=0.125 +2024-07-27 18:55:00,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=50637.333333333336, ans=0.125 +2024-07-27 18:55:08,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=50650.666666666664, ans=0.125 +2024-07-27 18:55:22,855 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.53 vs. limit=15.0 +2024-07-27 18:55:25,718 INFO [train.py:1114] (0/4) Epoch 4, batch 7350, loss[loss=0.2514, simple_loss=0.3153, pruned_loss=0.09374, over 4641.00 frames. ], tot_loss[loss=0.258, simple_loss=0.3312, pruned_loss=0.09242, over 938988.81 frames. ], batch size: 12, lr: 1.73e-02, grad_scale: 32.0 +2024-07-27 18:55:30,232 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.456e+01 6.562e+01 7.152e+01 9.266e+01 1.352e+02, threshold=1.430e+02, percent-clipped=0.0 +2024-07-27 18:55:39,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=50717.333333333336, ans=0.125 +2024-07-27 18:55:45,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=50730.666666666664, ans=0.1 +2024-07-27 18:55:46,814 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.63 vs. limit=15.0 +2024-07-27 18:55:58,344 INFO [train.py:1114] (0/4) Epoch 4, batch 7400, loss[loss=0.2352, simple_loss=0.3184, pruned_loss=0.076, over 4695.00 frames. ], tot_loss[loss=0.2582, simple_loss=0.3321, pruned_loss=0.09216, over 940486.85 frames. ], batch size: 13, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:56:08,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=50770.666666666664, ans=0.125 +2024-07-27 18:56:14,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=50784.0, ans=0.125 +2024-07-27 18:56:28,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=50810.666666666664, ans=0.125 +2024-07-27 18:56:32,545 INFO [train.py:1114] (0/4) Epoch 4, batch 7450, loss[loss=0.2514, simple_loss=0.3271, pruned_loss=0.08781, over 4624.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3319, pruned_loss=0.09299, over 937912.96 frames. ], batch size: 11, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:56:37,133 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.488e+01 6.489e+01 7.278e+01 8.154e+01 1.203e+02, threshold=1.456e+02, percent-clipped=0.0 +2024-07-27 18:56:38,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=50837.333333333336, ans=0.0 +2024-07-27 18:56:48,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50850.666666666664, ans=0.1 +2024-07-27 18:56:54,767 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.07 vs. limit=15.0 +2024-07-27 18:56:55,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=50864.0, ans=0.1 +2024-07-27 18:56:57,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=50864.0, ans=0.0 +2024-07-27 18:56:57,294 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.85 vs. limit=15.0 +2024-07-27 18:56:57,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.27 vs. limit=22.5 +2024-07-27 18:56:57,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=50864.0, ans=0.2 +2024-07-27 18:57:01,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=50877.333333333336, ans=0.0 +2024-07-27 18:57:06,517 INFO [train.py:1114] (0/4) Epoch 4, batch 7500, loss[loss=0.3715, simple_loss=0.3859, pruned_loss=0.1785, over 3326.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.333, pruned_loss=0.09369, over 936546.76 frames. ], batch size: 36, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:57:10,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=50890.666666666664, ans=0.1 +2024-07-27 18:57:17,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=50904.0, ans=0.0 +2024-07-27 18:57:21,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=50917.333333333336, ans=0.125 +2024-07-27 18:57:30,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=50930.666666666664, ans=0.2 +2024-07-27 18:57:32,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=50944.0, ans=0.025 +2024-07-27 18:57:37,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=50944.0, ans=0.1 +2024-07-27 18:57:38,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=50957.333333333336, ans=0.2 +2024-07-27 18:57:39,286 INFO [train.py:1114] (0/4) Epoch 4, batch 7550, loss[loss=0.3122, simple_loss=0.3722, pruned_loss=0.1261, over 4651.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3347, pruned_loss=0.09428, over 936274.86 frames. ], batch size: 17, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:57:46,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=50957.333333333336, ans=0.125 +2024-07-27 18:57:46,431 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.472e+01 6.522e+01 7.335e+01 8.635e+01 1.380e+02, threshold=1.467e+02, percent-clipped=0.0 +2024-07-27 18:58:13,135 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.69 vs. limit=22.5 +2024-07-27 18:58:14,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=50997.333333333336, ans=0.025 +2024-07-27 18:58:16,620 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.43 vs. limit=22.5 +2024-07-27 18:58:20,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=51010.666666666664, ans=0.125 +2024-07-27 18:58:20,335 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.20 vs. limit=12.0 +2024-07-27 18:58:22,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=51010.666666666664, ans=0.0 +2024-07-27 18:58:24,745 INFO [train.py:1114] (0/4) Epoch 4, batch 7600, loss[loss=0.2785, simple_loss=0.3615, pruned_loss=0.09769, over 4811.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3346, pruned_loss=0.09441, over 937900.36 frames. ], batch size: 14, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:58:36,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=51037.333333333336, ans=0.125 +2024-07-27 18:58:37,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=51037.333333333336, ans=0.125 +2024-07-27 18:58:39,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=51037.333333333336, ans=0.025 +2024-07-27 18:58:52,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.whiten.whitening_limit, batch_count=51050.666666666664, ans=12.0 +2024-07-27 18:58:58,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51064.0, ans=0.1 +2024-07-27 18:59:06,973 INFO [train.py:1114] (0/4) Epoch 4, batch 7650, loss[loss=0.2471, simple_loss=0.3135, pruned_loss=0.09036, over 4933.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.3329, pruned_loss=0.09281, over 936900.87 frames. ], batch size: 12, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:59:13,315 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.235e+01 6.494e+01 7.893e+01 8.811e+01 1.540e+02, threshold=1.579e+02, percent-clipped=3.0 +2024-07-27 18:59:14,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=51090.666666666664, ans=0.125 +2024-07-27 18:59:29,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.78 vs. limit=10.0 +2024-07-27 18:59:34,368 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:59:34,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=51130.666666666664, ans=0.025 +2024-07-27 18:59:38,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=51144.0, ans=0.04949747468305833 +2024-07-27 18:59:44,098 INFO [train.py:1114] (0/4) Epoch 4, batch 7700, loss[loss=0.2391, simple_loss=0.3159, pruned_loss=0.08109, over 4704.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.3342, pruned_loss=0.0937, over 934117.86 frames. ], batch size: 13, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:59:47,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=51157.333333333336, ans=0.025 +2024-07-27 18:59:52,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.89 vs. limit=15.0 +2024-07-27 19:00:05,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=51197.333333333336, ans=0.125 +2024-07-27 19:00:19,722 INFO [train.py:1114] (0/4) Epoch 4, batch 7750, loss[loss=0.2595, simple_loss=0.3379, pruned_loss=0.09057, over 4935.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3348, pruned_loss=0.09396, over 935051.39 frames. ], batch size: 14, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 19:00:19,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=51224.0, ans=0.0 +2024-07-27 19:00:24,700 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.378e+01 6.531e+01 7.452e+01 8.452e+01 1.344e+02, threshold=1.490e+02, percent-clipped=0.0 +2024-07-27 19:00:50,279 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.83 vs. limit=15.0 +2024-07-27 19:00:54,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=51264.0, ans=0.0 +2024-07-27 19:01:09,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=51264.0, ans=0.0 +2024-07-27 19:01:10,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=51277.333333333336, ans=0.2 +2024-07-27 19:01:11,520 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.81 vs. limit=22.5 +2024-07-27 19:01:21,758 INFO [train.py:1114] (0/4) Epoch 4, batch 7800, loss[loss=0.3066, simple_loss=0.3799, pruned_loss=0.1167, over 4659.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3349, pruned_loss=0.09377, over 936626.54 frames. ], batch size: 14, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 19:01:29,606 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.51 vs. limit=15.0 +2024-07-27 19:01:34,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51317.333333333336, ans=0.1 +2024-07-27 19:01:37,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=51317.333333333336, ans=0.0 +2024-07-27 19:01:43,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=51330.666666666664, ans=0.0 +2024-07-27 19:01:53,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=51344.0, ans=0.0 +2024-07-27 19:01:59,122 INFO [train.py:1114] (0/4) Epoch 4, batch 7850, loss[loss=0.241, simple_loss=0.3165, pruned_loss=0.08273, over 4529.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.3348, pruned_loss=0.09369, over 935945.55 frames. ], batch size: 10, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:02:04,740 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.164e+01 6.243e+01 7.019e+01 7.976e+01 1.332e+02, threshold=1.404e+02, percent-clipped=0.0 +2024-07-27 19:02:22,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=51384.0, ans=0.125 +2024-07-27 19:02:29,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=51397.333333333336, ans=0.125 +2024-07-27 19:02:29,515 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.63 vs. limit=15.0 +2024-07-27 19:02:30,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=51397.333333333336, ans=0.07 +2024-07-27 19:02:43,543 INFO [train.py:1114] (0/4) Epoch 4, batch 7900, loss[loss=0.2308, simple_loss=0.3084, pruned_loss=0.07658, over 4878.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.3361, pruned_loss=0.09425, over 933479.95 frames. ], batch size: 14, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:02:55,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=51437.333333333336, ans=0.2 +2024-07-27 19:03:20,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51477.333333333336, ans=0.1 +2024-07-27 19:03:22,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=51477.333333333336, ans=0.0 +2024-07-27 19:03:25,630 INFO [train.py:1114] (0/4) Epoch 4, batch 7950, loss[loss=0.3248, simple_loss=0.3772, pruned_loss=0.1362, over 3577.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3345, pruned_loss=0.09324, over 935981.48 frames. ], batch size: 35, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:03:27,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=51490.666666666664, ans=0.0 +2024-07-27 19:03:28,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=51490.666666666664, ans=0.0 +2024-07-27 19:03:30,100 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.275e+01 6.617e+01 8.169e+01 1.040e+02 2.019e+02, threshold=1.634e+02, percent-clipped=10.0 +2024-07-27 19:03:34,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=51504.0, ans=0.0 +2024-07-27 19:03:36,250 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=15.0 +2024-07-27 19:03:40,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=51517.333333333336, ans=0.2 +2024-07-27 19:03:45,179 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:03:47,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=51530.666666666664, ans=0.0 +2024-07-27 19:03:48,657 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.81 vs. limit=6.0 +2024-07-27 19:04:01,620 INFO [train.py:1114] (0/4) Epoch 4, batch 8000, loss[loss=0.1872, simple_loss=0.2674, pruned_loss=0.05349, over 4620.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3332, pruned_loss=0.09263, over 935091.69 frames. ], batch size: 11, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:04:04,430 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=8.340e-01 +2024-07-27 19:04:15,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=51584.0, ans=0.2 +2024-07-27 19:04:29,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=51597.333333333336, ans=0.125 +2024-07-27 19:04:39,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=51610.666666666664, ans=0.125 +2024-07-27 19:04:43,561 INFO [train.py:1114] (0/4) Epoch 4, batch 8050, loss[loss=0.2802, simple_loss=0.3685, pruned_loss=0.09598, over 4815.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3327, pruned_loss=0.09211, over 935054.11 frames. ], batch size: 14, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:04:48,131 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+01 6.358e+01 7.394e+01 8.578e+01 1.528e+02, threshold=1.479e+02, percent-clipped=0.0 +2024-07-27 19:04:49,766 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.28 vs. limit=10.0 +2024-07-27 19:05:00,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51650.666666666664, ans=0.1 +2024-07-27 19:05:03,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51664.0, ans=0.1 +2024-07-27 19:05:04,612 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.60 vs. limit=15.0 +2024-07-27 19:05:08,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=51677.333333333336, ans=0.125 +2024-07-27 19:05:16,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=51677.333333333336, ans=0.125 +2024-07-27 19:05:17,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=51677.333333333336, ans=0.0 +2024-07-27 19:05:20,033 INFO [train.py:1114] (0/4) Epoch 4, batch 8100, loss[loss=0.3309, simple_loss=0.3972, pruned_loss=0.1323, over 4789.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3343, pruned_loss=0.09201, over 934112.12 frames. ], batch size: 15, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:05:22,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=51690.666666666664, ans=10.0 +2024-07-27 19:05:35,877 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.90 vs. limit=10.0 +2024-07-27 19:05:56,549 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.99 vs. limit=15.0 +2024-07-27 19:05:57,499 INFO [train.py:1114] (0/4) Epoch 4, batch 8150, loss[loss=0.2677, simple_loss=0.3515, pruned_loss=0.09192, over 4810.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.334, pruned_loss=0.0923, over 937701.52 frames. ], batch size: 15, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:05:57,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=51757.333333333336, ans=0.125 +2024-07-27 19:05:58,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=51757.333333333336, ans=0.2 +2024-07-27 19:05:59,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=51757.333333333336, ans=0.0 +2024-07-27 19:06:02,179 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.306e+01 6.341e+01 7.110e+01 7.968e+01 1.215e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 19:06:11,036 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.80 vs. limit=15.0 +2024-07-27 19:06:14,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=51784.0, ans=0.0 +2024-07-27 19:06:17,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51797.333333333336, ans=0.1 +2024-07-27 19:06:17,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=51797.333333333336, ans=0.125 +2024-07-27 19:06:22,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=51797.333333333336, ans=0.0 +2024-07-27 19:06:32,080 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.98 vs. limit=15.0 +2024-07-27 19:06:32,397 INFO [train.py:1114] (0/4) Epoch 4, batch 8200, loss[loss=0.2957, simple_loss=0.363, pruned_loss=0.1142, over 4794.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.3355, pruned_loss=0.09309, over 938741.73 frames. ], batch size: 15, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:06:41,799 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.64 vs. limit=15.0 +2024-07-27 19:06:42,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=51837.333333333336, ans=0.0 +2024-07-27 19:06:43,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=51837.333333333336, ans=0.07 +2024-07-27 19:06:48,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=51850.666666666664, ans=0.125 +2024-07-27 19:06:48,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=51850.666666666664, ans=0.125 +2024-07-27 19:06:49,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=51850.666666666664, ans=0.125 +2024-07-27 19:06:53,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=51864.0, ans=0.0 +2024-07-27 19:07:01,625 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.30 vs. limit=15.0 +2024-07-27 19:07:04,510 INFO [train.py:1114] (0/4) Epoch 4, batch 8250, loss[loss=0.2271, simple_loss=0.3013, pruned_loss=0.07646, over 4896.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3334, pruned_loss=0.09212, over 938931.48 frames. ], batch size: 13, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:07:06,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51890.666666666664, ans=0.1 +2024-07-27 19:07:08,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=51890.666666666664, ans=0.125 +2024-07-27 19:07:09,007 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.095e+01 6.190e+01 7.037e+01 8.392e+01 1.258e+02, threshold=1.407e+02, percent-clipped=0.0 +2024-07-27 19:07:15,417 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.71 vs. limit=15.0 +2024-07-27 19:07:26,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=51930.666666666664, ans=0.125 +2024-07-27 19:07:37,258 INFO [train.py:1114] (0/4) Epoch 4, batch 8300, loss[loss=0.2988, simple_loss=0.3615, pruned_loss=0.1181, over 4894.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3346, pruned_loss=0.09232, over 938791.05 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:07:38,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=51957.333333333336, ans=0.0 +2024-07-27 19:07:39,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=51957.333333333336, ans=0.0 +2024-07-27 19:07:42,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=51957.333333333336, ans=0.0 +2024-07-27 19:07:46,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=51970.666666666664, ans=0.125 +2024-07-27 19:08:01,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=51997.333333333336, ans=0.125 +2024-07-27 19:08:05,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=52010.666666666664, ans=0.1 +2024-07-27 19:08:11,101 INFO [train.py:1114] (0/4) Epoch 4, batch 8350, loss[loss=0.2572, simple_loss=0.3291, pruned_loss=0.09266, over 4803.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.3342, pruned_loss=0.09217, over 941501.57 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:08:15,728 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.058e+01 6.472e+01 7.036e+01 8.315e+01 1.538e+02, threshold=1.407e+02, percent-clipped=2.0 +2024-07-27 19:08:16,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=52024.0, ans=0.2 +2024-07-27 19:08:25,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=52050.666666666664, ans=0.025 +2024-07-27 19:08:28,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=52050.666666666664, ans=10.0 +2024-07-27 19:08:28,399 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.03 vs. limit=22.5 +2024-07-27 19:08:30,068 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.28 vs. limit=22.5 +2024-07-27 19:08:33,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=52064.0, ans=0.0 +2024-07-27 19:08:35,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=52064.0, ans=0.2 +2024-07-27 19:08:38,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=52077.333333333336, ans=0.125 +2024-07-27 19:08:40,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=52077.333333333336, ans=0.125 +2024-07-27 19:08:44,803 INFO [train.py:1114] (0/4) Epoch 4, batch 8400, loss[loss=0.2274, simple_loss=0.3116, pruned_loss=0.07161, over 4767.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3336, pruned_loss=0.09193, over 940688.81 frames. ], batch size: 12, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:08:53,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=52104.0, ans=0.125 +2024-07-27 19:09:02,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=52117.333333333336, ans=0.95 +2024-07-27 19:09:02,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=52117.333333333336, ans=0.0 +2024-07-27 19:09:07,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=52130.666666666664, ans=0.125 +2024-07-27 19:09:13,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=52144.0, ans=6.0 +2024-07-27 19:09:19,594 INFO [train.py:1114] (0/4) Epoch 4, batch 8450, loss[loss=0.2891, simple_loss=0.3593, pruned_loss=0.1094, over 4816.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.3336, pruned_loss=0.09177, over 939569.26 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:09:24,030 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.442e+01 6.588e+01 7.394e+01 8.228e+01 1.463e+02, threshold=1.479e+02, percent-clipped=1.0 +2024-07-27 19:09:44,869 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:09:46,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=52210.666666666664, ans=0.0 +2024-07-27 19:09:47,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=52210.666666666664, ans=0.05 +2024-07-27 19:09:51,744 INFO [train.py:1114] (0/4) Epoch 4, batch 8500, loss[loss=0.1952, simple_loss=0.2727, pruned_loss=0.0588, over 4627.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3334, pruned_loss=0.09224, over 939426.06 frames. ], batch size: 11, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:09:57,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=52237.333333333336, ans=0.125 +2024-07-27 19:10:02,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=52237.333333333336, ans=0.09899494936611666 +2024-07-27 19:10:16,608 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.120e+00 +2024-07-27 19:10:35,619 INFO [train.py:1114] (0/4) Epoch 4, batch 8550, loss[loss=0.1953, simple_loss=0.2717, pruned_loss=0.05947, over 4801.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.3316, pruned_loss=0.09114, over 939936.14 frames. ], batch size: 11, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:10:40,186 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 6.270e+01 6.908e+01 7.613e+01 1.129e+02, threshold=1.382e+02, percent-clipped=0.0 +2024-07-27 19:10:40,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=52290.666666666664, ans=0.125 +2024-07-27 19:10:42,632 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.04 vs. limit=15.0 +2024-07-27 19:10:56,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=52330.666666666664, ans=0.0 +2024-07-27 19:11:02,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=52344.0, ans=0.05 +2024-07-27 19:11:03,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=52344.0, ans=0.125 +2024-07-27 19:11:07,982 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:11:09,052 INFO [train.py:1114] (0/4) Epoch 4, batch 8600, loss[loss=0.3136, simple_loss=0.387, pruned_loss=0.1201, over 4799.00 frames. ], tot_loss[loss=0.2561, simple_loss=0.3305, pruned_loss=0.09087, over 939174.10 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:11:12,595 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.05 vs. limit=12.0 +2024-07-27 19:11:13,094 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.53 vs. limit=15.0 +2024-07-27 19:11:27,613 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.31 vs. limit=15.0 +2024-07-27 19:11:35,336 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.64 vs. limit=15.0 +2024-07-27 19:11:43,014 INFO [train.py:1114] (0/4) Epoch 4, batch 8650, loss[loss=0.285, simple_loss=0.3581, pruned_loss=0.1059, over 4901.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3306, pruned_loss=0.09102, over 940222.73 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:11:54,068 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.219e+01 6.477e+01 7.280e+01 8.362e+01 1.223e+02, threshold=1.456e+02, percent-clipped=0.0 +2024-07-27 19:11:55,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=52424.0, ans=0.125 +2024-07-27 19:12:02,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=52437.333333333336, ans=0.125 +2024-07-27 19:12:05,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=52450.666666666664, ans=0.025 +2024-07-27 19:12:07,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=52450.666666666664, ans=10.0 +2024-07-27 19:12:07,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=52450.666666666664, ans=0.125 +2024-07-27 19:12:12,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=52464.0, ans=0.125 +2024-07-27 19:12:24,873 INFO [train.py:1114] (0/4) Epoch 4, batch 8700, loss[loss=0.2603, simple_loss=0.339, pruned_loss=0.09077, over 4769.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3316, pruned_loss=0.09167, over 937636.60 frames. ], batch size: 13, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:12:24,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=52490.666666666664, ans=0.125 +2024-07-27 19:12:31,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=52504.0, ans=0.2 +2024-07-27 19:12:38,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=52517.333333333336, ans=0.2 +2024-07-27 19:12:38,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=52517.333333333336, ans=0.025 +2024-07-27 19:12:41,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52517.333333333336, ans=0.1 +2024-07-27 19:12:46,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52530.666666666664, ans=0.1 +2024-07-27 19:12:50,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=52530.666666666664, ans=0.2 +2024-07-27 19:13:03,653 INFO [train.py:1114] (0/4) Epoch 4, batch 8750, loss[loss=0.2781, simple_loss=0.3612, pruned_loss=0.09744, over 4680.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.3331, pruned_loss=0.09205, over 936175.65 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:13:05,186 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.21 vs. limit=15.0 +2024-07-27 19:16:51,372 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.136e+01 6.508e+01 7.367e+01 8.337e+01 1.242e+02, threshold=1.473e+02, percent-clipped=0.0 +2024-07-27 19:16:51,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52557.333333333336, ans=0.1 +2024-07-27 19:16:53,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=52570.666666666664, ans=0.025 +2024-07-27 19:17:26,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=52597.333333333336, ans=0.2 +2024-07-27 19:17:39,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=52597.333333333336, ans=0.125 +2024-07-27 19:17:41,506 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.36 vs. limit=15.0 +2024-07-27 19:17:48,813 INFO [train.py:1114] (0/4) Epoch 4, batch 8800, loss[loss=0.2359, simple_loss=0.3264, pruned_loss=0.07263, over 4937.00 frames. ], tot_loss[loss=0.2589, simple_loss=0.3336, pruned_loss=0.09209, over 937089.99 frames. ], batch size: 14, lr: 1.69e-02, grad_scale: 32.0 +2024-07-27 19:17:54,411 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.04 vs. limit=22.5 +2024-07-27 19:18:23,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=52664.0, ans=0.05 +2024-07-27 19:18:26,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=52664.0, ans=0.125 +2024-07-27 19:18:34,294 INFO [train.py:1114] (0/4) Epoch 4, batch 8850, loss[loss=0.2489, simple_loss=0.3324, pruned_loss=0.08273, over 4523.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3347, pruned_loss=0.0935, over 931701.94 frames. ], batch size: 21, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:18:34,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=52690.666666666664, ans=0.125 +2024-07-27 19:18:36,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=52690.666666666664, ans=0.2 +2024-07-27 19:18:38,867 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.151e+01 6.204e+01 6.999e+01 8.264e+01 1.249e+02, threshold=1.400e+02, percent-clipped=0.0 +2024-07-27 19:18:41,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=52704.0, ans=0.2 +2024-07-27 19:18:58,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52717.333333333336, ans=0.1 +2024-07-27 19:19:05,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=52730.666666666664, ans=0.125 +2024-07-27 19:19:09,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=52744.0, ans=0.125 +2024-07-27 19:19:15,475 INFO [train.py:1114] (0/4) Epoch 4, batch 8900, loss[loss=0.2168, simple_loss=0.2926, pruned_loss=0.07054, over 4943.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3345, pruned_loss=0.09343, over 930367.66 frames. ], batch size: 12, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:19:15,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52757.333333333336, ans=0.1 +2024-07-27 19:19:21,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=52757.333333333336, ans=0.0 +2024-07-27 19:19:28,836 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.18 vs. limit=15.0 +2024-07-27 19:19:29,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=52784.0, ans=0.125 +2024-07-27 19:19:30,238 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.04 vs. limit=22.5 +2024-07-27 19:19:32,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=52784.0, ans=0.0 +2024-07-27 19:19:49,277 INFO [train.py:1114] (0/4) Epoch 4, batch 8950, loss[loss=0.2638, simple_loss=0.3502, pruned_loss=0.0887, over 4504.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3345, pruned_loss=0.09345, over 930813.51 frames. ], batch size: 21, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:19:49,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=52824.0, ans=0.125 +2024-07-27 19:19:52,681 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.51 vs. limit=12.0 +2024-07-27 19:19:59,030 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.556e+01 6.546e+01 7.266e+01 8.543e+01 1.301e+02, threshold=1.453e+02, percent-clipped=0.0 +2024-07-27 19:20:02,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=52837.333333333336, ans=0.0 +2024-07-27 19:20:04,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=52837.333333333336, ans=0.0 +2024-07-27 19:20:09,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=52850.666666666664, ans=0.125 +2024-07-27 19:20:20,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=52864.0, ans=0.2 +2024-07-27 19:20:26,883 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.92 vs. limit=15.0 +2024-07-27 19:20:27,917 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=13.42 vs. limit=15.0 +2024-07-27 19:20:29,306 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.30 vs. limit=15.0 +2024-07-27 19:20:29,464 INFO [train.py:1114] (0/4) Epoch 4, batch 9000, loss[loss=0.2166, simple_loss=0.2837, pruned_loss=0.07472, over 4641.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3337, pruned_loss=0.09325, over 933777.74 frames. ], batch size: 12, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:20:29,465 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 19:20:33,798 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.3900, 5.2584, 4.8515, 4.9419], device='cuda:0') +2024-07-27 19:20:36,940 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.7378, 4.3056, 3.6157, 4.3070], device='cuda:0') +2024-07-27 19:20:38,701 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.7758, 4.8528, 4.8706, 5.5417], device='cuda:0') +2024-07-27 19:20:48,915 INFO [train.py:1146] (0/4) Epoch 4, validation: loss=0.2088, simple_loss=0.3114, pruned_loss=0.05305, over 944034.00 frames. +2024-07-27 19:20:48,916 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 19:20:49,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=52890.666666666664, ans=0.125 +2024-07-27 19:20:56,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=52904.0, ans=0.125 +2024-07-27 19:20:56,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=52904.0, ans=0.125 +2024-07-27 19:20:59,627 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.27 vs. limit=15.0 +2024-07-27 19:21:00,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=52904.0, ans=0.025 +2024-07-27 19:21:02,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=52917.333333333336, ans=0.0 +2024-07-27 19:21:05,791 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.99 vs. limit=22.5 +2024-07-27 19:21:24,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52930.666666666664, ans=0.1 +2024-07-27 19:21:27,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=52944.0, ans=0.05 +2024-07-27 19:21:27,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=52944.0, ans=0.025 +2024-07-27 19:21:41,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=52957.333333333336, ans=0.125 +2024-07-27 19:21:41,735 INFO [train.py:1114] (0/4) Epoch 4, batch 9050, loss[loss=0.2553, simple_loss=0.3203, pruned_loss=0.09515, over 4490.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3326, pruned_loss=0.09236, over 933737.95 frames. ], batch size: 10, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:21:46,109 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.336e+01 6.460e+01 7.493e+01 8.562e+01 1.240e+02, threshold=1.499e+02, percent-clipped=0.0 +2024-07-27 19:21:51,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=52970.666666666664, ans=0.125 +2024-07-27 19:21:51,329 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.03 vs. limit=22.5 +2024-07-27 19:21:57,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=52984.0, ans=0.025 +2024-07-27 19:22:12,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=53010.666666666664, ans=0.0 +2024-07-27 19:22:16,425 INFO [train.py:1114] (0/4) Epoch 4, batch 9100, loss[loss=0.235, simple_loss=0.3202, pruned_loss=0.0749, over 4934.00 frames. ], tot_loss[loss=0.257, simple_loss=0.3311, pruned_loss=0.09138, over 936628.69 frames. ], batch size: 14, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:22:21,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=53024.0, ans=0.125 +2024-07-27 19:22:24,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=53037.333333333336, ans=0.125 +2024-07-27 19:22:26,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53037.333333333336, ans=0.1 +2024-07-27 19:22:51,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=53064.0, ans=0.125 +2024-07-27 19:22:53,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=53064.0, ans=0.2 +2024-07-27 19:22:54,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=53064.0, ans=0.0 +2024-07-27 19:23:04,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.85 vs. limit=22.5 +2024-07-27 19:23:04,832 INFO [train.py:1114] (0/4) Epoch 4, batch 9150, loss[loss=0.2355, simple_loss=0.3161, pruned_loss=0.07746, over 4814.00 frames. ], tot_loss[loss=0.2583, simple_loss=0.3323, pruned_loss=0.09214, over 935096.43 frames. ], batch size: 14, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:23:11,026 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+01 6.280e+01 7.131e+01 8.307e+01 1.469e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 19:23:12,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=53104.0, ans=0.0 +2024-07-27 19:23:23,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53117.333333333336, ans=0.1 +2024-07-27 19:23:23,671 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.90 vs. limit=15.0 +2024-07-27 19:23:25,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=53130.666666666664, ans=0.0 +2024-07-27 19:23:25,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=53130.666666666664, ans=0.2 +2024-07-27 19:23:29,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=53130.666666666664, ans=0.2 +2024-07-27 19:23:34,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=53144.0, ans=0.2 +2024-07-27 19:23:35,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.70 vs. limit=15.0 +2024-07-27 19:23:38,852 INFO [train.py:1114] (0/4) Epoch 4, batch 9200, loss[loss=0.262, simple_loss=0.3261, pruned_loss=0.09893, over 4852.00 frames. ], tot_loss[loss=0.2577, simple_loss=0.3319, pruned_loss=0.09169, over 936990.09 frames. ], batch size: 12, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:23:40,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=53157.333333333336, ans=0.125 +2024-07-27 19:23:47,007 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.50 vs. limit=15.0 +2024-07-27 19:23:52,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=53184.0, ans=0.0 +2024-07-27 19:23:56,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=53184.0, ans=10.0 +2024-07-27 19:23:57,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.86 vs. limit=15.0 +2024-07-27 19:23:59,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=53197.333333333336, ans=0.025 +2024-07-27 19:24:00,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=53197.333333333336, ans=0.125 +2024-07-27 19:24:03,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=53197.333333333336, ans=0.125 +2024-07-27 19:24:10,828 INFO [train.py:1114] (0/4) Epoch 4, batch 9250, loss[loss=0.2439, simple_loss=0.3243, pruned_loss=0.08173, over 4639.00 frames. ], tot_loss[loss=0.2571, simple_loss=0.3315, pruned_loss=0.0914, over 937782.23 frames. ], batch size: 13, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:24:15,283 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.176e+01 6.609e+01 7.603e+01 9.259e+01 1.699e+02, threshold=1.521e+02, percent-clipped=1.0 +2024-07-27 19:24:35,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=53250.666666666664, ans=0.125 +2024-07-27 19:24:39,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=53250.666666666664, ans=0.125 +2024-07-27 19:24:40,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=53264.0, ans=0.125 +2024-07-27 19:24:43,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=53264.0, ans=0.125 +2024-07-27 19:24:44,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=53264.0, ans=0.0 +2024-07-27 19:24:49,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=53277.333333333336, ans=0.125 +2024-07-27 19:24:50,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=53277.333333333336, ans=0.2 +2024-07-27 19:24:52,842 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.31 vs. limit=15.0 +2024-07-27 19:24:53,726 INFO [train.py:1114] (0/4) Epoch 4, batch 9300, loss[loss=0.2461, simple_loss=0.3214, pruned_loss=0.08538, over 4773.00 frames. ], tot_loss[loss=0.2572, simple_loss=0.3315, pruned_loss=0.09146, over 938003.36 frames. ], batch size: 12, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:25:01,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=53304.0, ans=0.125 +2024-07-27 19:25:06,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=53317.333333333336, ans=0.125 +2024-07-27 19:25:07,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=53317.333333333336, ans=0.0 +2024-07-27 19:25:10,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=53317.333333333336, ans=0.04949747468305833 +2024-07-27 19:25:10,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=53317.333333333336, ans=0.025 +2024-07-27 19:25:13,668 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-40000.pt +2024-07-27 19:25:27,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=53344.0, ans=0.2 +2024-07-27 19:25:38,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53357.333333333336, ans=0.1 +2024-07-27 19:25:39,001 INFO [train.py:1114] (0/4) Epoch 4, batch 9350, loss[loss=0.2625, simple_loss=0.3281, pruned_loss=0.09839, over 4802.00 frames. ], tot_loss[loss=0.2579, simple_loss=0.3321, pruned_loss=0.09185, over 935067.74 frames. ], batch size: 11, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:25:40,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=53357.333333333336, ans=0.125 +2024-07-27 19:25:43,196 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.072e+01 6.213e+01 6.915e+01 8.745e+01 1.555e+02, threshold=1.383e+02, percent-clipped=1.0 +2024-07-27 19:25:47,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=53370.666666666664, ans=0.0 +2024-07-27 19:25:55,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=53384.0, ans=0.2 +2024-07-27 19:25:56,836 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=6.00 vs. limit=12.0 +2024-07-27 19:25:57,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53384.0, ans=0.1 +2024-07-27 19:26:14,206 INFO [train.py:1114] (0/4) Epoch 4, batch 9400, loss[loss=0.239, simple_loss=0.3289, pruned_loss=0.0745, over 4693.00 frames. ], tot_loss[loss=0.2557, simple_loss=0.3305, pruned_loss=0.0904, over 933306.97 frames. ], batch size: 13, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:26:18,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=53424.0, ans=0.0 +2024-07-27 19:26:18,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=53424.0, ans=0.125 +2024-07-27 19:26:21,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=53437.333333333336, ans=0.125 +2024-07-27 19:26:37,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=53464.0, ans=0.1 +2024-07-27 19:26:39,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=53464.0, ans=0.0 +2024-07-27 19:26:46,990 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:26:47,551 INFO [train.py:1114] (0/4) Epoch 4, batch 9450, loss[loss=0.2105, simple_loss=0.2774, pruned_loss=0.07184, over 4808.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3309, pruned_loss=0.09088, over 932648.43 frames. ], batch size: 11, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:26:53,651 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.978e+01 6.050e+01 6.667e+01 7.624e+01 1.196e+02, threshold=1.333e+02, percent-clipped=0.0 +2024-07-27 19:27:11,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=53530.666666666664, ans=0.95 +2024-07-27 19:27:17,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=53544.0, ans=0.0 +2024-07-27 19:27:21,641 INFO [train.py:1114] (0/4) Epoch 4, batch 9500, loss[loss=0.2101, simple_loss=0.291, pruned_loss=0.0646, over 4706.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3307, pruned_loss=0.09027, over 935060.19 frames. ], batch size: 12, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:27:25,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53557.333333333336, ans=0.1 +2024-07-27 19:27:29,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=53570.666666666664, ans=0.0 +2024-07-27 19:27:34,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=53584.0, ans=0.125 +2024-07-27 19:27:47,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53610.666666666664, ans=0.1 +2024-07-27 19:27:54,668 INFO [train.py:1114] (0/4) Epoch 4, batch 9550, loss[loss=0.2476, simple_loss=0.3245, pruned_loss=0.08532, over 4776.00 frames. ], tot_loss[loss=0.2559, simple_loss=0.3305, pruned_loss=0.09062, over 932218.76 frames. ], batch size: 12, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:27:54,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=53624.0, ans=0.025 +2024-07-27 19:27:58,970 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.893e+01 6.641e+01 7.346e+01 8.353e+01 1.240e+02, threshold=1.469e+02, percent-clipped=0.0 +2024-07-27 19:28:02,399 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.05 vs. limit=15.0 +2024-07-27 19:28:18,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=53650.666666666664, ans=0.0 +2024-07-27 19:28:18,857 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.12 vs. limit=22.5 +2024-07-27 19:28:24,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53664.0, ans=0.1 +2024-07-27 19:28:25,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=53664.0, ans=0.0 +2024-07-27 19:28:39,654 INFO [train.py:1114] (0/4) Epoch 4, batch 9600, loss[loss=0.3698, simple_loss=0.4261, pruned_loss=0.1568, over 3238.00 frames. ], tot_loss[loss=0.2564, simple_loss=0.3315, pruned_loss=0.09068, over 931086.04 frames. ], batch size: 35, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:28:45,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=53704.0, ans=0.125 +2024-07-27 19:29:04,704 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.85 vs. limit=22.5 +2024-07-27 19:29:05,461 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.52 vs. limit=15.0 +2024-07-27 19:29:11,821 INFO [train.py:1114] (0/4) Epoch 4, batch 9650, loss[loss=0.2948, simple_loss=0.3724, pruned_loss=0.1086, over 4841.00 frames. ], tot_loss[loss=0.2577, simple_loss=0.3319, pruned_loss=0.0918, over 926860.75 frames. ], batch size: 16, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:29:17,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=53757.333333333336, ans=10.0 +2024-07-27 19:29:19,465 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.095e+01 6.349e+01 7.028e+01 7.935e+01 1.425e+02, threshold=1.406e+02, percent-clipped=0.0 +2024-07-27 19:29:40,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=53784.0, ans=0.125 +2024-07-27 19:29:41,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=53784.0, ans=0.125 +2024-07-27 19:29:49,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=53797.333333333336, ans=0.125 +2024-07-27 19:29:49,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=53797.333333333336, ans=0.0 +2024-07-27 19:29:57,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=53824.0, ans=0.125 +2024-07-27 19:29:57,617 INFO [train.py:1114] (0/4) Epoch 4, batch 9700, loss[loss=0.2995, simple_loss=0.3627, pruned_loss=0.1182, over 4155.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.331, pruned_loss=0.09138, over 924793.97 frames. ], batch size: 25, lr: 1.68e-02, grad_scale: 32.0 +2024-07-27 19:30:04,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53824.0, ans=0.1 +2024-07-27 19:30:05,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=53824.0, ans=0.07 +2024-07-27 19:30:14,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=53850.666666666664, ans=0.125 +2024-07-27 19:30:24,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=53864.0, ans=0.0 +2024-07-27 19:30:25,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=53877.333333333336, ans=0.0 +2024-07-27 19:30:27,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53877.333333333336, ans=0.1 +2024-07-27 19:30:33,424 INFO [train.py:1114] (0/4) Epoch 4, batch 9750, loss[loss=0.301, simple_loss=0.3811, pruned_loss=0.1104, over 4676.00 frames. ], tot_loss[loss=0.2564, simple_loss=0.3306, pruned_loss=0.09115, over 925810.44 frames. ], batch size: 15, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:30:42,346 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.008e+01 6.435e+01 7.103e+01 8.018e+01 1.499e+02, threshold=1.421e+02, percent-clipped=1.0 +2024-07-27 19:30:57,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=53930.666666666664, ans=0.125 +2024-07-27 19:30:58,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=53930.666666666664, ans=0.125 +2024-07-27 19:30:59,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=53930.666666666664, ans=0.125 +2024-07-27 19:31:06,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=53944.0, ans=0.125 +2024-07-27 19:31:06,983 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.92 vs. limit=15.0 +2024-07-27 19:31:09,171 INFO [train.py:1114] (0/4) Epoch 4, batch 9800, loss[loss=0.2525, simple_loss=0.3398, pruned_loss=0.08258, over 4704.00 frames. ], tot_loss[loss=0.257, simple_loss=0.3308, pruned_loss=0.0916, over 925397.60 frames. ], batch size: 12, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:31:10,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=53957.333333333336, ans=0.2 +2024-07-27 19:31:23,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=53984.0, ans=0.025 +2024-07-27 19:31:25,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=53984.0, ans=0.125 +2024-07-27 19:31:27,647 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.36 vs. limit=22.5 +2024-07-27 19:31:40,689 INFO [train.py:1114] (0/4) Epoch 4, batch 9850, loss[loss=0.2329, simple_loss=0.3171, pruned_loss=0.07436, over 4896.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3313, pruned_loss=0.09184, over 927138.26 frames. ], batch size: 15, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:31:46,042 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.505e+01 6.777e+01 7.966e+01 9.401e+01 1.769e+02, threshold=1.593e+02, percent-clipped=1.0 +2024-07-27 19:31:58,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=54050.666666666664, ans=0.0 +2024-07-27 19:32:03,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=54050.666666666664, ans=0.0 +2024-07-27 19:32:14,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=54077.333333333336, ans=0.125 +2024-07-27 19:32:17,530 INFO [train.py:1114] (0/4) Epoch 4, batch 9900, loss[loss=0.3449, simple_loss=0.4021, pruned_loss=0.1439, over 4851.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3325, pruned_loss=0.09288, over 926445.33 frames. ], batch size: 16, lr: 1.67e-02, grad_scale: 16.0 +2024-07-27 19:32:28,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=54104.0, ans=0.0 +2024-07-27 19:32:28,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=54104.0, ans=0.0 +2024-07-27 19:32:32,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=54104.0, ans=0.07 +2024-07-27 19:32:47,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=54130.666666666664, ans=0.07 +2024-07-27 19:33:02,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=54144.0, ans=0.125 +2024-07-27 19:33:11,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=54144.0, ans=0.1 +2024-07-27 19:33:13,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=54144.0, ans=0.125 +2024-07-27 19:33:15,473 INFO [train.py:1114] (0/4) Epoch 4, batch 9950, loss[loss=0.2563, simple_loss=0.3186, pruned_loss=0.09705, over 4821.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3328, pruned_loss=0.09324, over 929465.74 frames. ], batch size: 11, lr: 1.67e-02, grad_scale: 16.0 +2024-07-27 19:33:16,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=54157.333333333336, ans=0.125 +2024-07-27 19:33:20,991 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.075e+01 6.568e+01 7.447e+01 8.780e+01 1.338e+02, threshold=1.489e+02, percent-clipped=0.0 +2024-07-27 19:33:50,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=54210.666666666664, ans=0.125 +2024-07-27 19:33:51,662 INFO [train.py:1114] (0/4) Epoch 4, batch 10000, loss[loss=0.2514, simple_loss=0.3428, pruned_loss=0.07997, over 4594.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3348, pruned_loss=0.09417, over 927103.95 frames. ], batch size: 16, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:33:51,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=54224.0, ans=0.0 +2024-07-27 19:33:56,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54224.0, ans=0.1 +2024-07-27 19:33:59,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=54224.0, ans=0.125 +2024-07-27 19:34:06,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=54237.333333333336, ans=0.0 +2024-07-27 19:34:06,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=54237.333333333336, ans=0.2 +2024-07-27 19:34:07,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=54237.333333333336, ans=0.025 +2024-07-27 19:34:20,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=54250.666666666664, ans=0.025 +2024-07-27 19:34:35,234 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.55 vs. limit=15.0 +2024-07-27 19:34:38,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=54290.666666666664, ans=0.125 +2024-07-27 19:34:39,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=20.51 vs. limit=15.0 +2024-07-27 19:34:39,361 INFO [train.py:1114] (0/4) Epoch 4, batch 10050, loss[loss=0.3561, simple_loss=0.391, pruned_loss=0.1606, over 3366.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.3389, pruned_loss=0.09664, over 916346.51 frames. ], batch size: 38, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:34:45,164 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.23 vs. limit=15.0 +2024-07-27 19:34:45,416 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.986e+01 6.968e+01 7.682e+01 9.310e+01 1.537e+02, threshold=1.536e+02, percent-clipped=1.0 +2024-07-27 19:35:06,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=54330.666666666664, ans=0.125 +2024-07-27 19:35:14,141 INFO [train.py:1114] (0/4) Epoch 4, batch 10100, loss[loss=0.3259, simple_loss=0.3792, pruned_loss=0.1363, over 3512.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.3465, pruned_loss=0.1051, over 863687.74 frames. ], batch size: 35, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:35:17,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=54357.333333333336, ans=15.0 +2024-07-27 19:35:24,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=54370.666666666664, ans=0.2 +2024-07-27 19:35:36,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=54397.333333333336, ans=0.1 +2024-07-27 19:35:37,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=54397.333333333336, ans=0.125 +2024-07-27 19:35:38,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=54397.333333333336, ans=0.125 +2024-07-27 19:35:42,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=54410.666666666664, ans=0.125 +2024-07-27 19:35:45,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.99 vs. limit=22.5 +2024-07-27 19:35:45,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=54410.666666666664, ans=0.125 +2024-07-27 19:35:45,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=54410.666666666664, ans=0.0 +2024-07-27 19:35:48,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=54410.666666666664, ans=0.125 +2024-07-27 19:35:49,260 INFO [train.py:1114] (0/4) Epoch 4, batch 10150, loss[loss=0.3324, simple_loss=0.3791, pruned_loss=0.1429, over 3358.00 frames. ], tot_loss[loss=0.2878, simple_loss=0.3528, pruned_loss=0.1114, over 820975.02 frames. ], batch size: 35, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:35:51,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=54424.0, ans=0.125 +2024-07-27 19:36:02,458 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.057e+01 7.252e+01 7.644e+01 8.757e+01 1.198e+02, threshold=1.529e+02, percent-clipped=0.0 +2024-07-27 19:36:11,434 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:36:13,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=54450.666666666664, ans=0.0 +2024-07-27 19:36:13,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=54450.666666666664, ans=0.125 +2024-07-27 19:36:25,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=54477.333333333336, ans=0.2 +2024-07-27 19:36:28,252 INFO [train.py:1114] (0/4) Epoch 4, batch 10200, loss[loss=0.3035, simple_loss=0.3571, pruned_loss=0.125, over 3416.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.3576, pruned_loss=0.1167, over 788903.01 frames. ], batch size: 35, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:36:29,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=54490.666666666664, ans=0.125 +2024-07-27 19:36:39,585 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.91 vs. limit=15.0 +2024-07-27 19:36:44,877 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-4.pt +2024-07-27 19:37:32,054 INFO [train.py:1114] (0/4) Epoch 5, batch 0, loss[loss=0.1955, simple_loss=0.2857, pruned_loss=0.05267, over 4854.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2857, pruned_loss=0.05267, over 4854.00 frames. ], batch size: 12, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:37:32,055 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 19:37:43,747 INFO [train.py:1146] (0/4) Epoch 5, validation: loss=0.2167, simple_loss=0.3194, pruned_loss=0.05704, over 944034.00 frames. +2024-07-27 19:37:43,747 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 19:37:50,319 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.93 vs. limit=15.0 +2024-07-27 19:37:53,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=54534.666666666664, ans=0.0 +2024-07-27 19:38:01,466 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.88 vs. limit=6.0 +2024-07-27 19:38:03,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=54561.333333333336, ans=0.015 +2024-07-27 19:38:05,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=54561.333333333336, ans=0.125 +2024-07-27 19:38:08,548 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.642e+01 6.667e+01 7.198e+01 8.159e+01 1.101e+02, threshold=1.440e+02, percent-clipped=0.0 +2024-07-27 19:43:25,077 INFO [train.py:1114] (0/4) Epoch 5, batch 50, loss[loss=0.217, simple_loss=0.2873, pruned_loss=0.07334, over 4607.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3349, pruned_loss=0.09387, over 207052.99 frames. ], batch size: 11, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:43:25,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=54588.0, ans=0.125 +2024-07-27 19:43:55,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=54601.333333333336, ans=0.1 +2024-07-27 19:44:15,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=54628.0, ans=0.125 +2024-07-27 19:44:18,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=54628.0, ans=0.125 +2024-07-27 19:44:19,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=54628.0, ans=0.125 +2024-07-27 19:44:38,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=54641.333333333336, ans=0.125 +2024-07-27 19:44:42,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=54641.333333333336, ans=0.125 +2024-07-27 19:44:42,785 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.98 vs. limit=12.0 +2024-07-27 19:44:43,621 INFO [train.py:1114] (0/4) Epoch 5, batch 100, loss[loss=0.1895, simple_loss=0.2555, pruned_loss=0.06171, over 4646.00 frames. ], tot_loss[loss=0.259, simple_loss=0.334, pruned_loss=0.09205, over 365648.64 frames. ], batch size: 12, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:45:28,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=54668.0, ans=0.125 +2024-07-27 19:45:30,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=54668.0, ans=0.125 +2024-07-27 19:45:46,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=54681.333333333336, ans=0.125 +2024-07-27 19:45:49,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=54694.666666666664, ans=0.1 +2024-07-27 19:45:50,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=54694.666666666664, ans=0.125 +2024-07-27 19:45:52,465 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 6.028e+01 6.816e+01 7.937e+01 1.219e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-27 19:46:11,839 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.78 vs. limit=12.0 +2024-07-27 19:46:20,043 INFO [train.py:1114] (0/4) Epoch 5, batch 150, loss[loss=0.2078, simple_loss=0.2914, pruned_loss=0.06208, over 4612.00 frames. ], tot_loss[loss=0.257, simple_loss=0.3323, pruned_loss=0.0909, over 494170.32 frames. ], batch size: 11, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:46:30,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=54721.333333333336, ans=0.125 +2024-07-27 19:46:48,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=54734.666666666664, ans=0.125 +2024-07-27 19:47:05,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=54748.0, ans=0.1 +2024-07-27 19:47:07,945 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:47:20,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=54761.333333333336, ans=0.125 +2024-07-27 19:47:20,823 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.52 vs. limit=15.0 +2024-07-27 19:47:21,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=54761.333333333336, ans=0.125 +2024-07-27 19:47:50,699 INFO [train.py:1114] (0/4) Epoch 5, batch 200, loss[loss=0.2787, simple_loss=0.3456, pruned_loss=0.1059, over 4452.00 frames. ], tot_loss[loss=0.256, simple_loss=0.3311, pruned_loss=0.09044, over 593745.03 frames. ], batch size: 21, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:47:51,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=54788.0, ans=0.125 +2024-07-27 19:47:56,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=54788.0, ans=0.0 +2024-07-27 19:48:14,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=54801.333333333336, ans=0.2 +2024-07-27 19:48:27,163 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:48:28,159 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.98 vs. limit=15.0 +2024-07-27 19:48:41,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=54828.0, ans=0.025 +2024-07-27 19:48:41,787 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.041e+01 6.282e+01 6.864e+01 7.827e+01 1.211e+02, threshold=1.373e+02, percent-clipped=0.0 +2024-07-27 19:49:02,710 INFO [train.py:1114] (0/4) Epoch 5, batch 250, loss[loss=0.2887, simple_loss=0.3633, pruned_loss=0.1071, over 4637.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.33, pruned_loss=0.08948, over 670659.47 frames. ], batch size: 16, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:49:18,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=54881.333333333336, ans=0.09899494936611666 +2024-07-27 19:49:24,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=54894.666666666664, ans=0.125 +2024-07-27 19:49:26,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=54894.666666666664, ans=0.0 +2024-07-27 19:49:40,103 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.30 vs. limit=15.0 +2024-07-27 19:49:50,295 INFO [train.py:1114] (0/4) Epoch 5, batch 300, loss[loss=0.2426, simple_loss=0.3268, pruned_loss=0.07918, over 4815.00 frames. ], tot_loss[loss=0.2528, simple_loss=0.3286, pruned_loss=0.08849, over 730327.62 frames. ], batch size: 15, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:50:16,106 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.053e+01 6.343e+01 7.108e+01 8.248e+01 1.263e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 19:50:18,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=54974.666666666664, ans=0.025 +2024-07-27 19:50:27,364 INFO [train.py:1114] (0/4) Epoch 5, batch 350, loss[loss=0.2623, simple_loss=0.3302, pruned_loss=0.09721, over 4954.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.3284, pruned_loss=0.08768, over 776341.13 frames. ], batch size: 12, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:50:34,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=54988.0, ans=0.125 +2024-07-27 19:50:38,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55001.333333333336, ans=0.1 +2024-07-27 19:50:39,734 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.86 vs. limit=15.0 +2024-07-27 19:51:12,113 INFO [train.py:1114] (0/4) Epoch 5, batch 400, loss[loss=0.2537, simple_loss=0.3416, pruned_loss=0.08287, over 4683.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.3287, pruned_loss=0.08801, over 814142.22 frames. ], batch size: 13, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:51:16,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=55054.666666666664, ans=0.0 +2024-07-27 19:51:17,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=55054.666666666664, ans=0.0 +2024-07-27 19:51:37,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=55081.333333333336, ans=0.125 +2024-07-27 19:51:42,307 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:51:42,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=55094.666666666664, ans=0.125 +2024-07-27 19:51:43,477 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.118e+01 6.055e+01 6.518e+01 7.484e+01 1.056e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-27 19:51:46,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=55108.0, ans=0.0 +2024-07-27 19:51:50,407 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:51:50,809 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.64 vs. limit=15.0 +2024-07-27 19:52:02,022 INFO [train.py:1114] (0/4) Epoch 5, batch 450, loss[loss=0.2786, simple_loss=0.3572, pruned_loss=0.09995, over 4631.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3297, pruned_loss=0.08925, over 839645.42 frames. ], batch size: 13, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:52:10,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=55134.666666666664, ans=0.0 +2024-07-27 19:52:12,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=55134.666666666664, ans=0.125 +2024-07-27 19:52:15,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=55134.666666666664, ans=0.0 +2024-07-27 19:52:16,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=55134.666666666664, ans=0.0 +2024-07-27 19:52:18,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=55148.0, ans=0.2 +2024-07-27 19:52:23,597 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.45 vs. limit=6.0 +2024-07-27 19:52:28,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=55161.333333333336, ans=0.2 +2024-07-27 19:52:30,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=55161.333333333336, ans=0.0 +2024-07-27 19:52:33,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=55174.666666666664, ans=0.125 +2024-07-27 19:52:39,057 INFO [train.py:1114] (0/4) Epoch 5, batch 500, loss[loss=0.2996, simple_loss=0.3705, pruned_loss=0.1144, over 4696.00 frames. ], tot_loss[loss=0.2528, simple_loss=0.3287, pruned_loss=0.08846, over 862136.61 frames. ], batch size: 15, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:52:42,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=55188.0, ans=10.0 +2024-07-27 19:52:42,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=55188.0, ans=0.0 +2024-07-27 19:52:52,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=55214.666666666664, ans=0.09899494936611666 +2024-07-27 19:52:58,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55214.666666666664, ans=0.1 +2024-07-27 19:53:02,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=55228.0, ans=0.5 +2024-07-27 19:53:04,123 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.317e+01 6.118e+01 6.781e+01 7.848e+01 1.133e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 19:53:10,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=55241.333333333336, ans=0.0 +2024-07-27 19:53:17,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=55241.333333333336, ans=0.0 +2024-07-27 19:53:19,194 INFO [train.py:1114] (0/4) Epoch 5, batch 550, loss[loss=0.262, simple_loss=0.3443, pruned_loss=0.08987, over 4669.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3269, pruned_loss=0.08733, over 878159.44 frames. ], batch size: 17, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:53:23,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=55254.666666666664, ans=0.0 +2024-07-27 19:53:45,353 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-27 19:53:47,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=55294.666666666664, ans=0.04949747468305833 +2024-07-27 19:53:57,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=55308.0, ans=0.125 +2024-07-27 19:53:59,355 INFO [train.py:1114] (0/4) Epoch 5, batch 600, loss[loss=0.2622, simple_loss=0.333, pruned_loss=0.09572, over 4628.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3267, pruned_loss=0.08702, over 892741.82 frames. ], batch size: 16, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:54:06,622 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.17 vs. limit=15.0 +2024-07-27 19:54:14,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55348.0, ans=0.1 +2024-07-27 19:54:16,320 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-07-27 19:54:17,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=55348.0, ans=0.125 +2024-07-27 19:54:22,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55361.333333333336, ans=0.1 +2024-07-27 19:54:22,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=55361.333333333336, ans=0.125 +2024-07-27 19:54:23,111 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.126e+01 6.489e+01 7.020e+01 8.216e+01 1.209e+02, threshold=1.404e+02, percent-clipped=0.0 +2024-07-27 19:54:27,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=55374.666666666664, ans=0.125 +2024-07-27 19:54:32,213 INFO [train.py:1114] (0/4) Epoch 5, batch 650, loss[loss=0.275, simple_loss=0.346, pruned_loss=0.102, over 4760.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3251, pruned_loss=0.08662, over 904143.75 frames. ], batch size: 13, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:54:36,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=55388.0, ans=0.0 +2024-07-27 19:54:37,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=55388.0, ans=0.125 +2024-07-27 19:54:50,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=55414.666666666664, ans=0.0 +2024-07-27 19:55:01,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=55441.333333333336, ans=0.0 +2024-07-27 19:55:09,879 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.60 vs. limit=15.0 +2024-07-27 19:55:10,124 INFO [train.py:1114] (0/4) Epoch 5, batch 700, loss[loss=0.2331, simple_loss=0.3079, pruned_loss=0.07918, over 4636.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3249, pruned_loss=0.08669, over 911953.86 frames. ], batch size: 12, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:55:24,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=55468.0, ans=0.125 +2024-07-27 19:55:28,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=55481.333333333336, ans=0.125 +2024-07-27 19:55:37,845 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.381e+01 6.482e+01 7.754e+01 9.297e+01 1.843e+02, threshold=1.551e+02, percent-clipped=6.0 +2024-07-27 19:55:41,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=55508.0, ans=0.2 +2024-07-27 19:55:47,635 INFO [train.py:1114] (0/4) Epoch 5, batch 750, loss[loss=0.2262, simple_loss=0.3065, pruned_loss=0.07296, over 4690.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.3253, pruned_loss=0.08622, over 918400.74 frames. ], batch size: 13, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:55:52,043 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.88 vs. limit=12.0 +2024-07-27 19:55:56,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=55534.666666666664, ans=0.0 +2024-07-27 19:56:07,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=55548.0, ans=0.04949747468305833 +2024-07-27 19:56:24,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=55561.333333333336, ans=0.025 +2024-07-27 19:56:26,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=55561.333333333336, ans=0.125 +2024-07-27 19:56:37,371 INFO [train.py:1114] (0/4) Epoch 5, batch 800, loss[loss=0.2386, simple_loss=0.3128, pruned_loss=0.08226, over 4862.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3251, pruned_loss=0.08611, over 923308.96 frames. ], batch size: 12, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:56:58,451 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.60 vs. limit=15.0 +2024-07-27 19:56:58,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=55614.666666666664, ans=0.125 +2024-07-27 19:57:05,530 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.583e+01 6.253e+01 7.054e+01 8.487e+01 1.181e+02, threshold=1.411e+02, percent-clipped=0.0 +2024-07-27 19:57:13,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=55641.333333333336, ans=0.1 +2024-07-27 19:57:17,155 INFO [train.py:1114] (0/4) Epoch 5, batch 850, loss[loss=0.3168, simple_loss=0.378, pruned_loss=0.1278, over 4657.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3261, pruned_loss=0.08677, over 927649.88 frames. ], batch size: 14, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:57:27,629 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.41 vs. limit=10.0 +2024-07-27 19:57:35,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=55681.333333333336, ans=0.125 +2024-07-27 19:57:45,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.60 vs. limit=10.0 +2024-07-27 19:57:49,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=55708.0, ans=0.025 +2024-07-27 19:57:56,343 INFO [train.py:1114] (0/4) Epoch 5, batch 900, loss[loss=0.2385, simple_loss=0.3114, pruned_loss=0.08284, over 4845.00 frames. ], tot_loss[loss=0.252, simple_loss=0.328, pruned_loss=0.08801, over 928190.48 frames. ], batch size: 12, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 19:58:02,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=55721.333333333336, ans=0.0 +2024-07-27 19:58:10,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=55748.0, ans=0.0 +2024-07-27 19:58:27,265 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:58:32,500 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.179e+01 6.369e+01 7.320e+01 8.500e+01 1.312e+02, threshold=1.464e+02, percent-clipped=0.0 +2024-07-27 19:58:53,868 INFO [train.py:1114] (0/4) Epoch 5, batch 950, loss[loss=0.2369, simple_loss=0.3019, pruned_loss=0.08595, over 4778.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.3277, pruned_loss=0.08782, over 929981.81 frames. ], batch size: 12, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 19:58:54,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=55788.0, ans=0.0 +2024-07-27 19:58:56,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=55788.0, ans=0.1 +2024-07-27 19:58:59,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55788.0, ans=0.1 +2024-07-27 19:59:00,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=55801.333333333336, ans=0.125 +2024-07-27 19:59:14,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=55814.666666666664, ans=0.5 +2024-07-27 19:59:15,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=55814.666666666664, ans=0.1 +2024-07-27 19:59:21,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=55828.0, ans=0.125 +2024-07-27 19:59:23,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=55828.0, ans=0.125 +2024-07-27 19:59:34,235 INFO [train.py:1114] (0/4) Epoch 5, batch 1000, loss[loss=0.2504, simple_loss=0.3252, pruned_loss=0.08782, over 4967.00 frames. ], tot_loss[loss=0.2531, simple_loss=0.3291, pruned_loss=0.08857, over 929807.37 frames. ], batch size: 13, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 19:59:35,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=55854.666666666664, ans=0.125 +2024-07-27 20:00:03,956 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.112e+01 6.099e+01 6.760e+01 7.878e+01 1.806e+02, threshold=1.352e+02, percent-clipped=1.0 +2024-07-27 20:00:13,328 INFO [train.py:1114] (0/4) Epoch 5, batch 1050, loss[loss=0.2509, simple_loss=0.3299, pruned_loss=0.08598, over 4881.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.3281, pruned_loss=0.08826, over 931826.91 frames. ], batch size: 14, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:00:14,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=55921.333333333336, ans=0.125 +2024-07-27 20:00:16,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=55921.333333333336, ans=0.125 +2024-07-27 20:00:17,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=55921.333333333336, ans=0.025 +2024-07-27 20:00:25,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=55934.666666666664, ans=0.07 +2024-07-27 20:00:34,735 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.58 vs. limit=12.0 +2024-07-27 20:00:43,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=55974.666666666664, ans=0.125 +2024-07-27 20:00:47,882 INFO [train.py:1114] (0/4) Epoch 5, batch 1100, loss[loss=0.2619, simple_loss=0.3403, pruned_loss=0.09171, over 4893.00 frames. ], tot_loss[loss=0.2532, simple_loss=0.3291, pruned_loss=0.08863, over 933999.28 frames. ], batch size: 13, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:00:51,472 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.06 vs. limit=15.0 +2024-07-27 20:00:53,747 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.34 vs. limit=15.0 +2024-07-27 20:00:55,612 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:00:55,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=56001.333333333336, ans=0.125 +2024-07-27 20:01:03,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=56014.666666666664, ans=0.1 +2024-07-27 20:01:04,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=56014.666666666664, ans=0.5 +2024-07-27 20:01:07,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=56028.0, ans=0.025 +2024-07-27 20:01:10,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=56028.0, ans=0.125 +2024-07-27 20:01:11,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=56028.0, ans=0.0 +2024-07-27 20:01:11,708 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.396e+01 6.284e+01 6.917e+01 8.137e+01 1.279e+02, threshold=1.383e+02, percent-clipped=0.0 +2024-07-27 20:01:17,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=56041.333333333336, ans=0.125 +2024-07-27 20:01:22,328 INFO [train.py:1114] (0/4) Epoch 5, batch 1150, loss[loss=0.2328, simple_loss=0.3129, pruned_loss=0.07639, over 4900.00 frames. ], tot_loss[loss=0.2535, simple_loss=0.3292, pruned_loss=0.08889, over 933793.87 frames. ], batch size: 13, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:01:32,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=56068.0, ans=0.2 +2024-07-27 20:01:34,385 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.36 vs. limit=6.0 +2024-07-27 20:01:36,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=56081.333333333336, ans=0.125 +2024-07-27 20:01:44,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=56094.666666666664, ans=0.125 +2024-07-27 20:01:50,171 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.82 vs. limit=6.0 +2024-07-27 20:01:52,010 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-27 20:01:53,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=56108.0, ans=0.125 +2024-07-27 20:01:57,031 INFO [train.py:1114] (0/4) Epoch 5, batch 1200, loss[loss=0.279, simple_loss=0.3554, pruned_loss=0.1013, over 4881.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3282, pruned_loss=0.08798, over 933030.22 frames. ], batch size: 14, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:02:00,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=56121.333333333336, ans=0.125 +2024-07-27 20:02:20,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=56134.666666666664, ans=0.0 +2024-07-27 20:02:29,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=56148.0, ans=0.125 +2024-07-27 20:02:33,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=56148.0, ans=0.125 +2024-07-27 20:02:33,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=56161.333333333336, ans=0.125 +2024-07-27 20:02:40,437 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.476e+01 6.643e+01 8.181e+01 1.020e+02 1.586e+02, threshold=1.636e+02, percent-clipped=2.0 +2024-07-27 20:02:51,631 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.52 vs. limit=15.0 +2024-07-27 20:02:53,265 INFO [train.py:1114] (0/4) Epoch 5, batch 1250, loss[loss=0.2655, simple_loss=0.3425, pruned_loss=0.09426, over 4792.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3277, pruned_loss=0.08697, over 937023.62 frames. ], batch size: 15, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:02:55,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=56188.0, ans=0.09899494936611666 +2024-07-27 20:03:06,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=56201.333333333336, ans=0.0 +2024-07-27 20:03:21,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=56241.333333333336, ans=0.0 +2024-07-27 20:03:29,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56254.666666666664, ans=0.1 +2024-07-27 20:03:30,286 INFO [train.py:1114] (0/4) Epoch 5, batch 1300, loss[loss=0.2981, simple_loss=0.3675, pruned_loss=0.1143, over 4677.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3271, pruned_loss=0.0871, over 938628.64 frames. ], batch size: 19, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:03:30,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56254.666666666664, ans=0.1 +2024-07-27 20:03:37,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56254.666666666664, ans=0.1 +2024-07-27 20:03:50,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff2.min_abs, batch_count=56281.333333333336, ans=0.1 +2024-07-27 20:03:52,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=56281.333333333336, ans=0.2 +2024-07-27 20:04:00,102 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.011e+01 6.264e+01 7.458e+01 8.643e+01 1.456e+02, threshold=1.492e+02, percent-clipped=0.0 +2024-07-27 20:04:10,190 INFO [train.py:1114] (0/4) Epoch 5, batch 1350, loss[loss=0.2437, simple_loss=0.3267, pruned_loss=0.08035, over 4755.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3266, pruned_loss=0.08666, over 940659.87 frames. ], batch size: 13, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:04:10,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=56321.333333333336, ans=0.125 +2024-07-27 20:04:13,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=56321.333333333336, ans=0.125 +2024-07-27 20:04:19,376 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.28 vs. limit=15.0 +2024-07-27 20:04:20,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=56334.666666666664, ans=0.125 +2024-07-27 20:04:21,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=56334.666666666664, ans=0.125 +2024-07-27 20:04:36,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=56361.333333333336, ans=0.125 +2024-07-27 20:04:38,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=56361.333333333336, ans=0.125 +2024-07-27 20:04:41,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=56374.666666666664, ans=0.2 +2024-07-27 20:04:47,685 INFO [train.py:1114] (0/4) Epoch 5, batch 1400, loss[loss=0.2484, simple_loss=0.3072, pruned_loss=0.09479, over 4704.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3253, pruned_loss=0.08603, over 942897.91 frames. ], batch size: 11, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:04:49,416 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.47 vs. limit=15.0 +2024-07-27 20:04:58,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=56401.333333333336, ans=0.05 +2024-07-27 20:04:58,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=56401.333333333336, ans=0.125 +2024-07-27 20:05:07,072 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.578e+00 +2024-07-27 20:05:48,349 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.750e+01 6.394e+01 7.108e+01 8.417e+01 1.153e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 20:05:57,874 INFO [train.py:1114] (0/4) Epoch 5, batch 1450, loss[loss=0.2495, simple_loss=0.332, pruned_loss=0.08345, over 4679.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.3248, pruned_loss=0.08612, over 942902.92 frames. ], batch size: 15, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:06:01,637 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.54 vs. limit=10.0 +2024-07-27 20:06:06,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=56468.0, ans=0.025 +2024-07-27 20:06:07,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=56468.0, ans=10.0 +2024-07-27 20:06:09,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=56468.0, ans=0.015 +2024-07-27 20:06:32,436 INFO [train.py:1114] (0/4) Epoch 5, batch 1500, loss[loss=0.2208, simple_loss=0.2947, pruned_loss=0.07351, over 4815.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3256, pruned_loss=0.08596, over 942942.89 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 32.0 +2024-07-27 20:06:41,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=56534.666666666664, ans=0.125 +2024-07-27 20:06:42,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=56534.666666666664, ans=0.0 +2024-07-27 20:06:56,265 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.97 vs. limit=22.5 +2024-07-27 20:06:59,215 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.351e+01 6.555e+01 7.313e+01 8.345e+01 1.115e+02, threshold=1.463e+02, percent-clipped=0.0 +2024-07-27 20:07:03,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=56574.666666666664, ans=0.125 +2024-07-27 20:07:08,559 INFO [train.py:1114] (0/4) Epoch 5, batch 1550, loss[loss=0.2379, simple_loss=0.3227, pruned_loss=0.07655, over 4907.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3253, pruned_loss=0.08591, over 939234.61 frames. ], batch size: 15, lr: 1.52e-02, grad_scale: 32.0 +2024-07-27 20:07:09,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=56588.0, ans=0.2 +2024-07-27 20:07:17,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=56601.333333333336, ans=0.125 +2024-07-27 20:07:30,692 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:07:42,165 INFO [train.py:1114] (0/4) Epoch 5, batch 1600, loss[loss=0.2746, simple_loss=0.3514, pruned_loss=0.09888, over 4870.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3261, pruned_loss=0.08622, over 937495.31 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 32.0 +2024-07-27 20:07:45,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=56654.666666666664, ans=0.125 +2024-07-27 20:07:47,853 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.81 vs. limit=12.0 +2024-07-27 20:07:48,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=56668.0, ans=0.1 +2024-07-27 20:07:56,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=56681.333333333336, ans=0.125 +2024-07-27 20:08:06,104 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.974e+01 6.291e+01 7.006e+01 7.974e+01 1.110e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-27 20:08:08,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=56708.0, ans=0.125 +2024-07-27 20:08:08,735 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.71 vs. limit=12.0 +2024-07-27 20:08:10,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56708.0, ans=0.1 +2024-07-27 20:08:11,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=56708.0, ans=0.125 +2024-07-27 20:08:12,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=56708.0, ans=0.0 +2024-07-27 20:08:14,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=56721.333333333336, ans=0.125 +2024-07-27 20:08:15,476 INFO [train.py:1114] (0/4) Epoch 5, batch 1650, loss[loss=0.2401, simple_loss=0.314, pruned_loss=0.08312, over 4672.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3272, pruned_loss=0.08723, over 937284.27 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:08:32,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=56748.0, ans=0.0 +2024-07-27 20:08:33,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=56748.0, ans=0.125 +2024-07-27 20:08:36,018 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.90 vs. limit=10.0 +2024-07-27 20:08:41,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=56774.666666666664, ans=0.125 +2024-07-27 20:08:44,197 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.76 vs. limit=15.0 +2024-07-27 20:08:49,256 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.39 vs. limit=15.0 +2024-07-27 20:08:50,076 INFO [train.py:1114] (0/4) Epoch 5, batch 1700, loss[loss=0.2616, simple_loss=0.3199, pruned_loss=0.1017, over 4706.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3265, pruned_loss=0.08739, over 939171.74 frames. ], batch size: 11, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:08:58,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=56801.333333333336, ans=0.07 +2024-07-27 20:09:16,485 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.365e+01 6.475e+01 7.223e+01 8.445e+01 1.275e+02, threshold=1.445e+02, percent-clipped=0.0 +2024-07-27 20:09:23,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=56841.333333333336, ans=0.5 +2024-07-27 20:09:25,645 INFO [train.py:1114] (0/4) Epoch 5, batch 1750, loss[loss=0.1922, simple_loss=0.278, pruned_loss=0.05313, over 4806.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3247, pruned_loss=0.08635, over 940053.86 frames. ], batch size: 11, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:09:39,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=56868.0, ans=0.0 +2024-07-27 20:09:42,080 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=26.51 vs. limit=22.5 +2024-07-27 20:09:46,505 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.36 vs. limit=15.0 +2024-07-27 20:09:54,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=56894.666666666664, ans=0.0 +2024-07-27 20:09:59,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56908.0, ans=0.1 +2024-07-27 20:10:02,489 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.90 vs. limit=15.0 +2024-07-27 20:10:02,665 INFO [train.py:1114] (0/4) Epoch 5, batch 1800, loss[loss=0.219, simple_loss=0.3121, pruned_loss=0.06295, over 4631.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.3244, pruned_loss=0.08623, over 940542.81 frames. ], batch size: 13, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:10:11,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=56934.666666666664, ans=0.2 +2024-07-27 20:10:20,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=56948.0, ans=0.09899494936611666 +2024-07-27 20:10:26,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=56961.333333333336, ans=0.125 +2024-07-27 20:10:26,592 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.038e+01 6.233e+01 6.949e+01 8.152e+01 1.410e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-27 20:10:28,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=56974.666666666664, ans=0.0 +2024-07-27 20:10:29,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=56974.666666666664, ans=0.025 +2024-07-27 20:10:29,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=56974.666666666664, ans=0.025 +2024-07-27 20:10:35,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=56974.666666666664, ans=0.125 +2024-07-27 20:10:36,141 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.41 vs. limit=6.0 +2024-07-27 20:10:37,642 INFO [train.py:1114] (0/4) Epoch 5, batch 1850, loss[loss=0.2428, simple_loss=0.3265, pruned_loss=0.07952, over 4810.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3233, pruned_loss=0.08556, over 940423.78 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:10:43,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=56988.0, ans=0.125 +2024-07-27 20:10:44,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=57001.333333333336, ans=0.125 +2024-07-27 20:10:51,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=57014.666666666664, ans=0.125 +2024-07-27 20:10:55,714 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.88 vs. limit=22.5 +2024-07-27 20:11:03,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=57028.0, ans=0.125 +2024-07-27 20:11:11,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=57041.333333333336, ans=0.125 +2024-07-27 20:11:12,480 INFO [train.py:1114] (0/4) Epoch 5, batch 1900, loss[loss=0.3012, simple_loss=0.3699, pruned_loss=0.1163, over 4669.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3235, pruned_loss=0.08533, over 941586.42 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:11:17,614 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.11 vs. limit=15.0 +2024-07-27 20:11:22,456 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.62 vs. limit=22.5 +2024-07-27 20:11:23,084 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.86 vs. limit=22.5 +2024-07-27 20:11:35,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=57094.666666666664, ans=0.125 +2024-07-27 20:11:36,826 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.851e+01 6.078e+01 6.608e+01 7.914e+01 1.166e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-27 20:11:46,535 INFO [train.py:1114] (0/4) Epoch 5, batch 1950, loss[loss=0.2677, simple_loss=0.3323, pruned_loss=0.1016, over 4899.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3265, pruned_loss=0.08663, over 943564.71 frames. ], batch size: 13, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:11:47,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=57121.333333333336, ans=0.0 +2024-07-27 20:11:48,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=57121.333333333336, ans=0.5 +2024-07-27 20:11:48,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=57121.333333333336, ans=0.125 +2024-07-27 20:12:04,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=57148.0, ans=0.0 +2024-07-27 20:12:07,497 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.15 vs. limit=15.0 +2024-07-27 20:12:13,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=57161.333333333336, ans=0.125 +2024-07-27 20:12:22,155 INFO [train.py:1114] (0/4) Epoch 5, batch 2000, loss[loss=0.2278, simple_loss=0.2944, pruned_loss=0.08066, over 4808.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3273, pruned_loss=0.0865, over 940841.61 frames. ], batch size: 11, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:12:31,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=57201.333333333336, ans=0.2 +2024-07-27 20:12:35,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=57214.666666666664, ans=0.125 +2024-07-27 20:12:35,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=57214.666666666664, ans=0.025 +2024-07-27 20:12:36,140 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.96 vs. limit=15.0 +2024-07-27 20:12:37,317 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:12:40,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=57214.666666666664, ans=0.0 +2024-07-27 20:12:41,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=57214.666666666664, ans=0.0 +2024-07-27 20:12:46,751 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.030e+01 6.356e+01 7.460e+01 8.642e+01 1.315e+02, threshold=1.492e+02, percent-clipped=0.0 +2024-07-27 20:12:54,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=57241.333333333336, ans=0.07 +2024-07-27 20:12:56,123 INFO [train.py:1114] (0/4) Epoch 5, batch 2050, loss[loss=0.2111, simple_loss=0.2868, pruned_loss=0.06773, over 4617.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.325, pruned_loss=0.08565, over 938809.75 frames. ], batch size: 11, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:12:56,448 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.15 vs. limit=22.5 +2024-07-27 20:12:57,812 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=14.30 vs. limit=15.0 +2024-07-27 20:13:21,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=57294.666666666664, ans=0.1 +2024-07-27 20:13:27,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=57308.0, ans=0.2 +2024-07-27 20:13:27,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=57308.0, ans=0.125 +2024-07-27 20:13:28,301 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.01 vs. limit=15.0 +2024-07-27 20:13:28,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=57308.0, ans=0.1 +2024-07-27 20:13:29,049 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.22 vs. limit=15.0 +2024-07-27 20:13:29,980 INFO [train.py:1114] (0/4) Epoch 5, batch 2100, loss[loss=0.2967, simple_loss=0.3667, pruned_loss=0.1133, over 4759.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3242, pruned_loss=0.08505, over 940604.38 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:13:33,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=57321.333333333336, ans=0.0 +2024-07-27 20:13:34,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=57321.333333333336, ans=0.125 +2024-07-27 20:13:39,962 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.66 vs. limit=15.0 +2024-07-27 20:13:41,144 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.55 vs. limit=10.0 +2024-07-27 20:13:41,296 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.07 vs. limit=22.5 +2024-07-27 20:13:45,036 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.01 vs. limit=15.0 +2024-07-27 20:13:53,737 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.52 vs. limit=10.0 +2024-07-27 20:13:53,936 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.957e+01 6.234e+01 6.918e+01 8.302e+01 1.274e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-27 20:13:54,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57361.333333333336, ans=0.1 +2024-07-27 20:13:59,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=57374.666666666664, ans=0.05 +2024-07-27 20:14:00,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57374.666666666664, ans=0.1 +2024-07-27 20:14:03,261 INFO [train.py:1114] (0/4) Epoch 5, batch 2150, loss[loss=0.2389, simple_loss=0.3239, pruned_loss=0.07697, over 4890.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3228, pruned_loss=0.08444, over 943747.31 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:14:09,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=57401.333333333336, ans=0.125 +2024-07-27 20:14:34,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=57441.333333333336, ans=0.1 +2024-07-27 20:14:34,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57441.333333333336, ans=0.1 +2024-07-27 20:14:38,783 INFO [train.py:1114] (0/4) Epoch 5, batch 2200, loss[loss=0.2818, simple_loss=0.3541, pruned_loss=0.1048, over 4804.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3225, pruned_loss=0.08398, over 943058.47 frames. ], batch size: 14, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:14:49,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=57468.0, ans=0.0 +2024-07-27 20:14:49,359 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.81 vs. limit=15.0 +2024-07-27 20:15:03,285 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.963e+01 6.386e+01 7.473e+01 9.024e+01 1.169e+02, threshold=1.495e+02, percent-clipped=0.0 +2024-07-27 20:15:10,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57508.0, ans=0.1 +2024-07-27 20:15:10,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=57508.0, ans=0.2 +2024-07-27 20:15:14,990 INFO [train.py:1114] (0/4) Epoch 5, batch 2250, loss[loss=0.2519, simple_loss=0.3299, pruned_loss=0.08693, over 4688.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.3234, pruned_loss=0.08464, over 941372.90 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:15:15,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=57521.333333333336, ans=0.125 +2024-07-27 20:15:21,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=57534.666666666664, ans=0.0 +2024-07-27 20:15:30,645 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.66 vs. limit=12.0 +2024-07-27 20:15:42,763 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.11 vs. limit=22.5 +2024-07-27 20:15:50,311 INFO [train.py:1114] (0/4) Epoch 5, batch 2300, loss[loss=0.2247, simple_loss=0.303, pruned_loss=0.07324, over 4938.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3221, pruned_loss=0.08406, over 939102.45 frames. ], batch size: 12, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:15:53,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=57588.0, ans=0.0 +2024-07-27 20:16:00,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=57601.333333333336, ans=0.125 +2024-07-27 20:16:13,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57628.0, ans=0.1 +2024-07-27 20:16:16,288 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.130e+01 6.014e+01 6.647e+01 7.772e+01 1.123e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-27 20:16:18,005 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.81 vs. limit=15.0 +2024-07-27 20:16:21,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=57641.333333333336, ans=0.125 +2024-07-27 20:16:29,318 INFO [train.py:1114] (0/4) Epoch 5, batch 2350, loss[loss=0.2549, simple_loss=0.3178, pruned_loss=0.09598, over 4635.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3224, pruned_loss=0.08473, over 941060.93 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:16:32,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57654.666666666664, ans=0.1 +2024-07-27 20:16:33,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=57654.666666666664, ans=0.125 +2024-07-27 20:16:48,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=57681.333333333336, ans=0.125 +2024-07-27 20:16:49,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=57694.666666666664, ans=0.125 +2024-07-27 20:17:00,257 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.93 vs. limit=15.0 +2024-07-27 20:17:03,204 INFO [train.py:1114] (0/4) Epoch 5, batch 2400, loss[loss=0.2367, simple_loss=0.3141, pruned_loss=0.07966, over 4627.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.323, pruned_loss=0.08477, over 940752.52 frames. ], batch size: 12, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:17:06,234 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.79 vs. limit=15.0 +2024-07-27 20:17:27,272 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.323e+01 6.252e+01 6.682e+01 7.735e+01 1.071e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-27 20:17:33,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=57774.666666666664, ans=0.5 +2024-07-27 20:17:36,683 INFO [train.py:1114] (0/4) Epoch 5, batch 2450, loss[loss=0.237, simple_loss=0.3157, pruned_loss=0.0791, over 4688.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3251, pruned_loss=0.08572, over 936645.66 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:17:36,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=57788.0, ans=0.0 +2024-07-27 20:17:38,946 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.12 vs. limit=10.0 +2024-07-27 20:17:50,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=57801.333333333336, ans=0.2 +2024-07-27 20:17:52,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=57814.666666666664, ans=0.125 +2024-07-27 20:17:55,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2.whitening_limit, batch_count=57814.666666666664, ans=15.0 +2024-07-27 20:17:55,865 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.65 vs. limit=22.5 +2024-07-27 20:18:06,459 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.58 vs. limit=22.5 +2024-07-27 20:18:06,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=57828.0, ans=0.125 +2024-07-27 20:18:20,692 INFO [train.py:1114] (0/4) Epoch 5, batch 2500, loss[loss=0.2533, simple_loss=0.3416, pruned_loss=0.08251, over 4812.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.325, pruned_loss=0.08568, over 938653.32 frames. ], batch size: 14, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:18:22,464 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.49 vs. limit=15.0 +2024-07-27 20:18:32,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=57868.0, ans=0.0 +2024-07-27 20:18:32,513 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.11 vs. limit=15.0 +2024-07-27 20:18:39,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57881.333333333336, ans=0.1 +2024-07-27 20:18:51,012 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.916e+01 6.442e+01 7.418e+01 9.024e+01 1.336e+02, threshold=1.484e+02, percent-clipped=0.0 +2024-07-27 20:18:57,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=57908.0, ans=0.125 +2024-07-27 20:19:01,025 INFO [train.py:1114] (0/4) Epoch 5, batch 2550, loss[loss=0.1949, simple_loss=0.2806, pruned_loss=0.05465, over 4796.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3244, pruned_loss=0.08537, over 938134.13 frames. ], batch size: 11, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:19:01,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=57921.333333333336, ans=0.125 +2024-07-27 20:19:12,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=57934.666666666664, ans=0.025 +2024-07-27 20:19:23,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=57961.333333333336, ans=0.2 +2024-07-27 20:19:34,682 INFO [train.py:1114] (0/4) Epoch 5, batch 2600, loss[loss=0.2218, simple_loss=0.2979, pruned_loss=0.07284, over 4898.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.3248, pruned_loss=0.08599, over 936974.48 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:19:35,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=57988.0, ans=15.0 +2024-07-27 20:19:39,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57988.0, ans=0.1 +2024-07-27 20:19:41,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=58001.333333333336, ans=0.2 +2024-07-27 20:19:45,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=58001.333333333336, ans=0.125 +2024-07-27 20:19:49,159 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.62 vs. limit=15.0 +2024-07-27 20:19:58,988 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.906e+01 6.417e+01 7.272e+01 8.306e+01 1.432e+02, threshold=1.454e+02, percent-clipped=0.0 +2024-07-27 20:20:11,729 INFO [train.py:1114] (0/4) Epoch 5, batch 2650, loss[loss=0.2311, simple_loss=0.3012, pruned_loss=0.08051, over 4648.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3242, pruned_loss=0.086, over 939698.10 frames. ], batch size: 16, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:20:24,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=58054.666666666664, ans=0.2 +2024-07-27 20:20:27,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=58068.0, ans=0.0 +2024-07-27 20:20:32,554 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.05 vs. limit=12.0 +2024-07-27 20:20:37,835 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=16.05 vs. limit=15.0 +2024-07-27 20:20:47,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58108.0, ans=0.1 +2024-07-27 20:20:59,496 INFO [train.py:1114] (0/4) Epoch 5, batch 2700, loss[loss=0.2436, simple_loss=0.3367, pruned_loss=0.07527, over 4744.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3244, pruned_loss=0.0861, over 939708.77 frames. ], batch size: 14, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:20:59,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=58121.333333333336, ans=0.1 +2024-07-27 20:21:27,049 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 6.186e+01 6.835e+01 7.719e+01 1.191e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-27 20:21:30,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=58174.666666666664, ans=0.0 +2024-07-27 20:21:38,104 INFO [train.py:1114] (0/4) Epoch 5, batch 2750, loss[loss=0.2242, simple_loss=0.3088, pruned_loss=0.0698, over 4706.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.3233, pruned_loss=0.08578, over 939681.70 frames. ], batch size: 12, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:21:40,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=58188.0, ans=15.0 +2024-07-27 20:21:43,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=58188.0, ans=15.0 +2024-07-27 20:21:51,871 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.91 vs. limit=6.0 +2024-07-27 20:21:52,808 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:22:00,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=58228.0, ans=0.0 +2024-07-27 20:22:04,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=58228.0, ans=0.125 +2024-07-27 20:22:08,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=58228.0, ans=0.2 +2024-07-27 20:22:15,484 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.83 vs. limit=22.5 +2024-07-27 20:22:15,876 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:22:19,869 INFO [train.py:1114] (0/4) Epoch 5, batch 2800, loss[loss=0.365, simple_loss=0.4019, pruned_loss=0.1641, over 3374.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3228, pruned_loss=0.08575, over 937554.71 frames. ], batch size: 36, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:22:25,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff3.min_abs, batch_count=58254.666666666664, ans=0.2 +2024-07-27 20:22:27,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=58268.0, ans=0.0 +2024-07-27 20:22:30,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=58268.0, ans=0.0 +2024-07-27 20:22:30,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=58268.0, ans=0.125 +2024-07-27 20:22:32,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=58281.333333333336, ans=0.0 +2024-07-27 20:22:38,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=58281.333333333336, ans=0.0 +2024-07-27 20:22:40,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=58294.666666666664, ans=0.0 +2024-07-27 20:22:44,011 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 6.174e+01 6.624e+01 7.261e+01 1.719e+02, threshold=1.325e+02, percent-clipped=1.0 +2024-07-27 20:22:45,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=58294.666666666664, ans=0.05 +2024-07-27 20:22:53,233 INFO [train.py:1114] (0/4) Epoch 5, batch 2850, loss[loss=0.2297, simple_loss=0.306, pruned_loss=0.0767, over 4955.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.3238, pruned_loss=0.08654, over 936428.37 frames. ], batch size: 13, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:23:00,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=58334.666666666664, ans=0.2 +2024-07-27 20:23:07,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=58348.0, ans=0.125 +2024-07-27 20:23:13,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=58361.333333333336, ans=0.125 +2024-07-27 20:23:20,118 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.52 vs. limit=12.0 +2024-07-27 20:23:23,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=58374.666666666664, ans=0.125 +2024-07-27 20:23:26,075 INFO [train.py:1114] (0/4) Epoch 5, batch 2900, loss[loss=0.2114, simple_loss=0.2937, pruned_loss=0.06452, over 4836.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3245, pruned_loss=0.08608, over 940123.48 frames. ], batch size: 13, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:23:37,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=58401.333333333336, ans=0.125 +2024-07-27 20:23:41,250 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-07-27 20:23:43,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58414.666666666664, ans=0.1 +2024-07-27 20:23:44,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=58414.666666666664, ans=0.125 +2024-07-27 20:23:51,051 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.201e+01 6.205e+01 6.873e+01 7.885e+01 1.448e+02, threshold=1.375e+02, percent-clipped=1.0 +2024-07-27 20:24:01,530 INFO [train.py:1114] (0/4) Epoch 5, batch 2950, loss[loss=0.2579, simple_loss=0.3245, pruned_loss=0.09568, over 4703.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3233, pruned_loss=0.08549, over 939257.51 frames. ], batch size: 12, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:24:05,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=58454.666666666664, ans=0.2 +2024-07-27 20:24:07,648 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.38 vs. limit=15.0 +2024-07-27 20:24:30,984 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.61 vs. limit=6.0 +2024-07-27 20:24:35,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=58508.0, ans=0.2 +2024-07-27 20:24:35,426 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.07 vs. limit=15.0 +2024-07-27 20:24:39,827 INFO [train.py:1114] (0/4) Epoch 5, batch 3000, loss[loss=0.2183, simple_loss=0.3001, pruned_loss=0.06828, over 4758.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3217, pruned_loss=0.08425, over 938699.89 frames. ], batch size: 13, lr: 1.50e-02, grad_scale: 32.0 +2024-07-27 20:24:39,828 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 20:25:07,124 INFO [train.py:1146] (0/4) Epoch 5, validation: loss=0.2018, simple_loss=0.3051, pruned_loss=0.04931, over 944034.00 frames. +2024-07-27 20:25:07,228 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 20:25:09,164 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.60 vs. limit=22.5 +2024-07-27 20:25:11,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=58521.333333333336, ans=0.2 +2024-07-27 20:25:17,543 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.29 vs. limit=22.5 +2024-07-27 20:25:17,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=58534.666666666664, ans=0.125 +2024-07-27 20:25:19,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58534.666666666664, ans=0.1 +2024-07-27 20:25:50,555 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:25:51,053 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.481e+01 6.112e+01 6.899e+01 7.724e+01 1.072e+02, threshold=1.380e+02, percent-clipped=0.0 +2024-07-27 20:26:01,546 INFO [train.py:1114] (0/4) Epoch 5, batch 3050, loss[loss=0.275, simple_loss=0.3543, pruned_loss=0.09788, over 4644.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3233, pruned_loss=0.08514, over 937730.04 frames. ], batch size: 12, lr: 1.50e-02, grad_scale: 32.0 +2024-07-27 20:26:04,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=58588.0, ans=0.0 +2024-07-27 20:26:11,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=58601.333333333336, ans=0.0 +2024-07-27 20:26:30,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=58628.0, ans=0.1 +2024-07-27 20:26:31,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=58628.0, ans=0.125 +2024-07-27 20:26:32,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=58628.0, ans=0.0 +2024-07-27 20:26:35,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=58641.333333333336, ans=0.125 +2024-07-27 20:26:40,770 INFO [train.py:1114] (0/4) Epoch 5, batch 3100, loss[loss=0.273, simple_loss=0.3486, pruned_loss=0.09873, over 4646.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3241, pruned_loss=0.08582, over 938412.41 frames. ], batch size: 16, lr: 1.50e-02, grad_scale: 16.0 +2024-07-27 20:26:50,390 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-44000.pt +2024-07-27 20:29:00,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58694.666666666664, ans=0.1 +2024-07-27 20:29:03,168 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.714e+01 6.229e+01 6.955e+01 7.996e+01 1.498e+02, threshold=1.391e+02, percent-clipped=1.0 +2024-07-27 20:29:30,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=58708.0, ans=0.125 +2024-07-27 20:29:36,746 INFO [train.py:1114] (0/4) Epoch 5, batch 3150, loss[loss=0.2874, simple_loss=0.3647, pruned_loss=0.1051, over 4633.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3245, pruned_loss=0.08587, over 938397.70 frames. ], batch size: 17, lr: 1.50e-02, grad_scale: 16.0 +2024-07-27 20:29:37,064 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.94 vs. limit=15.0 +2024-07-27 20:29:38,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=58721.333333333336, ans=0.025 +2024-07-27 20:29:58,928 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.61 vs. limit=15.0 +2024-07-27 20:30:01,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.out_whiten.whitening_limit, batch_count=58761.333333333336, ans=8.0 +2024-07-27 20:30:01,341 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:30:20,996 INFO [train.py:1114] (0/4) Epoch 5, batch 3200, loss[loss=0.2144, simple_loss=0.3033, pruned_loss=0.06271, over 4821.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.324, pruned_loss=0.08535, over 939795.12 frames. ], batch size: 13, lr: 1.50e-02, grad_scale: 32.0 +2024-07-27 20:30:22,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=58788.0, ans=0.0 +2024-07-27 20:30:23,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=58788.0, ans=0.125 +2024-07-27 20:30:40,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=58801.333333333336, ans=0.125 +2024-07-27 20:30:59,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=58814.666666666664, ans=0.125 +2024-07-27 20:31:09,396 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.060e+01 6.414e+01 7.232e+01 8.731e+01 1.300e+02, threshold=1.446e+02, percent-clipped=0.0 +2024-07-27 20:31:13,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=58841.333333333336, ans=0.0 +2024-07-27 20:31:16,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=58854.666666666664, ans=0.0 +2024-07-27 20:31:17,447 INFO [train.py:1114] (0/4) Epoch 5, batch 3250, loss[loss=0.2284, simple_loss=0.3136, pruned_loss=0.0716, over 4935.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3248, pruned_loss=0.08533, over 940949.25 frames. ], batch size: 14, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:31:18,002 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.30 vs. limit=15.0 +2024-07-27 20:31:18,685 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.62 vs. limit=10.0 +2024-07-27 20:31:19,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=58854.666666666664, ans=0.0 +2024-07-27 20:31:19,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58854.666666666664, ans=0.1 +2024-07-27 20:31:21,140 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.42 vs. limit=12.0 +2024-07-27 20:31:23,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=58868.0, ans=0.2 +2024-07-27 20:31:26,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=58868.0, ans=0.0 +2024-07-27 20:31:28,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=58868.0, ans=0.0 +2024-07-27 20:31:51,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=58894.666666666664, ans=0.125 +2024-07-27 20:31:52,996 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:32:06,920 INFO [train.py:1114] (0/4) Epoch 5, batch 3300, loss[loss=0.2693, simple_loss=0.3412, pruned_loss=0.09873, over 4779.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3232, pruned_loss=0.08547, over 941317.45 frames. ], batch size: 19, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:32:25,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=58934.666666666664, ans=0.125 +2024-07-27 20:32:49,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=58961.333333333336, ans=0.0 +2024-07-27 20:32:52,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=58961.333333333336, ans=0.0 +2024-07-27 20:32:54,500 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+01 6.806e+01 7.832e+01 9.289e+01 1.732e+02, threshold=1.566e+02, percent-clipped=1.0 +2024-07-27 20:32:57,006 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.64 vs. limit=22.5 +2024-07-27 20:33:05,629 INFO [train.py:1114] (0/4) Epoch 5, batch 3350, loss[loss=0.2855, simple_loss=0.3627, pruned_loss=0.1041, over 4615.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3257, pruned_loss=0.08666, over 939414.06 frames. ], batch size: 17, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:33:05,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=58988.0, ans=0.125 +2024-07-27 20:33:11,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58988.0, ans=0.1 +2024-07-27 20:33:11,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59001.333333333336, ans=0.1 +2024-07-27 20:33:23,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=59014.666666666664, ans=0.0 +2024-07-27 20:33:24,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59014.666666666664, ans=0.1 +2024-07-27 20:33:27,179 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.09 vs. limit=15.0 +2024-07-27 20:33:34,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=59041.333333333336, ans=0.025 +2024-07-27 20:33:39,078 INFO [train.py:1114] (0/4) Epoch 5, batch 3400, loss[loss=0.2296, simple_loss=0.3007, pruned_loss=0.07923, over 4817.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3254, pruned_loss=0.08735, over 938630.39 frames. ], batch size: 11, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:33:49,568 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.82 vs. limit=15.0 +2024-07-27 20:33:51,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=59068.0, ans=0.2 +2024-07-27 20:33:52,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=59081.333333333336, ans=0.125 +2024-07-27 20:33:55,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=59081.333333333336, ans=0.125 +2024-07-27 20:33:59,088 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.80 vs. limit=22.5 +2024-07-27 20:34:04,618 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.766e+01 6.358e+01 7.066e+01 8.502e+01 1.252e+02, threshold=1.413e+02, percent-clipped=0.0 +2024-07-27 20:34:07,864 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.45 vs. limit=15.0 +2024-07-27 20:34:08,398 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.00 vs. limit=22.5 +2024-07-27 20:34:10,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=59108.0, ans=0.0 +2024-07-27 20:34:12,604 INFO [train.py:1114] (0/4) Epoch 5, batch 3450, loss[loss=0.2843, simple_loss=0.3553, pruned_loss=0.1067, over 4798.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3259, pruned_loss=0.08779, over 939314.03 frames. ], batch size: 19, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:34:14,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=59121.333333333336, ans=0.2 +2024-07-27 20:34:18,606 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:34:18,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=59134.666666666664, ans=0.0 +2024-07-27 20:34:19,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=59134.666666666664, ans=0.0 +2024-07-27 20:34:25,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=59148.0, ans=0.125 +2024-07-27 20:34:32,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=59161.333333333336, ans=0.125 +2024-07-27 20:34:39,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=59174.666666666664, ans=0.125 +2024-07-27 20:34:40,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=59174.666666666664, ans=0.125 +2024-07-27 20:34:46,091 INFO [train.py:1114] (0/4) Epoch 5, batch 3500, loss[loss=0.1881, simple_loss=0.2741, pruned_loss=0.05106, over 4941.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3249, pruned_loss=0.08694, over 939756.59 frames. ], batch size: 12, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:34:53,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=59201.333333333336, ans=0.125 +2024-07-27 20:34:55,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=59201.333333333336, ans=0.125 +2024-07-27 20:34:59,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=59214.666666666664, ans=0.0 +2024-07-27 20:35:02,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=59214.666666666664, ans=0.125 +2024-07-27 20:35:07,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=59228.0, ans=0.0 +2024-07-27 20:35:13,413 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.758e+01 6.287e+01 6.647e+01 7.437e+01 1.544e+02, threshold=1.329e+02, percent-clipped=1.0 +2024-07-27 20:35:14,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.whiten.whitening_limit, batch_count=59228.0, ans=12.0 +2024-07-27 20:35:21,289 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.83 vs. limit=22.5 +2024-07-27 20:35:21,565 INFO [train.py:1114] (0/4) Epoch 5, batch 3550, loss[loss=0.2332, simple_loss=0.3205, pruned_loss=0.07296, over 4669.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3241, pruned_loss=0.08584, over 939900.02 frames. ], batch size: 14, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:35:51,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=59308.0, ans=0.0 +2024-07-27 20:35:55,990 INFO [train.py:1114] (0/4) Epoch 5, batch 3600, loss[loss=0.2263, simple_loss=0.3157, pruned_loss=0.06842, over 4958.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3248, pruned_loss=0.08579, over 941301.06 frames. ], batch size: 13, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:36:00,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=59321.333333333336, ans=0.0 +2024-07-27 20:36:05,853 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.75 vs. limit=15.0 +2024-07-27 20:36:24,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=59361.333333333336, ans=0.0 +2024-07-27 20:36:26,682 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.050e+01 6.226e+01 6.920e+01 7.848e+01 1.341e+02, threshold=1.384e+02, percent-clipped=1.0 +2024-07-27 20:36:28,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=59374.666666666664, ans=0.2 +2024-07-27 20:36:35,259 INFO [train.py:1114] (0/4) Epoch 5, batch 3650, loss[loss=0.2636, simple_loss=0.3444, pruned_loss=0.09141, over 4896.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3247, pruned_loss=0.08579, over 941216.97 frames. ], batch size: 15, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:36:43,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=59401.333333333336, ans=0.125 +2024-07-27 20:36:54,320 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.33 vs. limit=15.0 +2024-07-27 20:37:02,429 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.52 vs. limit=10.0 +2024-07-27 20:37:05,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59441.333333333336, ans=0.1 +2024-07-27 20:37:11,925 INFO [train.py:1114] (0/4) Epoch 5, batch 3700, loss[loss=0.2667, simple_loss=0.3412, pruned_loss=0.09609, over 4930.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.3249, pruned_loss=0.08537, over 942173.07 frames. ], batch size: 14, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:37:21,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=59468.0, ans=0.2 +2024-07-27 20:37:28,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=59481.333333333336, ans=0.125 +2024-07-27 20:37:33,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=59494.666666666664, ans=0.125 +2024-07-27 20:37:35,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=59494.666666666664, ans=0.0 +2024-07-27 20:37:37,467 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.201e+01 6.383e+01 7.266e+01 8.369e+01 1.200e+02, threshold=1.453e+02, percent-clipped=0.0 +2024-07-27 20:37:45,363 INFO [train.py:1114] (0/4) Epoch 5, batch 3750, loss[loss=0.2122, simple_loss=0.2763, pruned_loss=0.07402, over 4803.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3238, pruned_loss=0.0852, over 943490.12 frames. ], batch size: 11, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:38:09,623 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.98 vs. limit=15.0 +2024-07-27 20:38:14,844 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.54 vs. limit=15.0 +2024-07-27 20:38:18,263 INFO [train.py:1114] (0/4) Epoch 5, batch 3800, loss[loss=0.2265, simple_loss=0.3106, pruned_loss=0.07123, over 4802.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3234, pruned_loss=0.0851, over 941920.53 frames. ], batch size: 14, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:38:19,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=59588.0, ans=0.125 +2024-07-27 20:38:20,781 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.27 vs. limit=22.5 +2024-07-27 20:38:22,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=59588.0, ans=0.125 +2024-07-27 20:38:31,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=59614.666666666664, ans=0.0 +2024-07-27 20:38:35,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59614.666666666664, ans=0.1 +2024-07-27 20:38:38,205 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.55 vs. limit=22.5 +2024-07-27 20:38:43,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=59628.0, ans=0.0 +2024-07-27 20:38:44,255 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.840e+01 6.382e+01 7.291e+01 8.683e+01 1.605e+02, threshold=1.458e+02, percent-clipped=1.0 +2024-07-27 20:38:51,385 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:38:52,565 INFO [train.py:1114] (0/4) Epoch 5, batch 3850, loss[loss=0.2745, simple_loss=0.3539, pruned_loss=0.09751, over 4604.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3221, pruned_loss=0.08415, over 942563.65 frames. ], batch size: 16, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:38:58,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59668.0, ans=0.1 +2024-07-27 20:39:02,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.71 vs. limit=15.0 +2024-07-27 20:39:08,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=59681.333333333336, ans=10.0 +2024-07-27 20:39:09,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=59681.333333333336, ans=0.1 +2024-07-27 20:39:13,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=59694.666666666664, ans=0.2 +2024-07-27 20:39:13,936 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.92 vs. limit=15.0 +2024-07-27 20:39:15,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59694.666666666664, ans=0.1 +2024-07-27 20:39:22,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=59708.0, ans=0.125 +2024-07-27 20:39:26,344 INFO [train.py:1114] (0/4) Epoch 5, batch 3900, loss[loss=0.2297, simple_loss=0.3036, pruned_loss=0.07788, over 4813.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3219, pruned_loss=0.08384, over 942704.47 frames. ], batch size: 14, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:39:28,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=59721.333333333336, ans=0.125 +2024-07-27 20:39:42,509 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.07 vs. limit=15.0 +2024-07-27 20:39:44,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=59748.0, ans=0.125 +2024-07-27 20:39:45,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=59761.333333333336, ans=0.04949747468305833 +2024-07-27 20:39:49,047 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.56 vs. limit=15.0 +2024-07-27 20:39:51,227 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 6.368e+01 7.161e+01 8.539e+01 1.176e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 20:39:52,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=59774.666666666664, ans=0.125 +2024-07-27 20:39:59,296 INFO [train.py:1114] (0/4) Epoch 5, batch 3950, loss[loss=0.2561, simple_loss=0.3468, pruned_loss=0.08273, over 4825.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3211, pruned_loss=0.0828, over 944624.81 frames. ], batch size: 16, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:40:05,113 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.77 vs. limit=15.0 +2024-07-27 20:40:10,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=59801.333333333336, ans=0.09899494936611666 +2024-07-27 20:40:10,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=59801.333333333336, ans=0.125 +2024-07-27 20:40:11,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=59801.333333333336, ans=0.125 +2024-07-27 20:40:13,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=59814.666666666664, ans=0.125 +2024-07-27 20:40:21,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=59828.0, ans=0.0 +2024-07-27 20:40:21,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=59828.0, ans=0.125 +2024-07-27 20:40:32,209 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=27.41 vs. limit=15.0 +2024-07-27 20:40:33,149 INFO [train.py:1114] (0/4) Epoch 5, batch 4000, loss[loss=0.1878, simple_loss=0.2606, pruned_loss=0.05748, over 4768.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3216, pruned_loss=0.08337, over 941017.78 frames. ], batch size: 12, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:40:33,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=59854.666666666664, ans=0.125 +2024-07-27 20:40:47,209 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.89 vs. limit=22.5 +2024-07-27 20:40:48,550 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.97 vs. limit=15.0 +2024-07-27 20:40:52,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=59881.333333333336, ans=0.125 +2024-07-27 20:40:53,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=59881.333333333336, ans=0.0 +2024-07-27 20:40:53,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=59881.333333333336, ans=0.125 +2024-07-27 20:40:55,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=59894.666666666664, ans=0.025 +2024-07-27 20:41:01,639 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.290e+01 6.270e+01 7.255e+01 8.485e+01 1.075e+02, threshold=1.451e+02, percent-clipped=0.0 +2024-07-27 20:41:09,820 INFO [train.py:1114] (0/4) Epoch 5, batch 4050, loss[loss=0.3597, simple_loss=0.4008, pruned_loss=0.1593, over 3392.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3233, pruned_loss=0.08453, over 939560.39 frames. ], batch size: 35, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:41:30,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=59961.333333333336, ans=22.5 +2024-07-27 20:41:34,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=59961.333333333336, ans=0.125 +2024-07-27 20:41:40,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=59974.666666666664, ans=0.125 +2024-07-27 20:41:45,358 INFO [train.py:1114] (0/4) Epoch 5, batch 4100, loss[loss=0.2456, simple_loss=0.3369, pruned_loss=0.07711, over 4903.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.3239, pruned_loss=0.08497, over 938663.12 frames. ], batch size: 15, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:41:46,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=59988.0, ans=0.0 +2024-07-27 20:42:01,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=60014.666666666664, ans=0.2 +2024-07-27 20:42:05,334 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.17 vs. limit=22.5 +2024-07-27 20:42:06,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=60028.0, ans=0.0 +2024-07-27 20:42:12,617 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.156e+01 6.593e+01 8.156e+01 1.046e+02 1.897e+02, threshold=1.631e+02, percent-clipped=3.0 +2024-07-27 20:42:14,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=60041.333333333336, ans=0.04949747468305833 +2024-07-27 20:42:15,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=60041.333333333336, ans=0.0 +2024-07-27 20:42:18,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=60041.333333333336, ans=22.5 +2024-07-27 20:42:19,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=60041.333333333336, ans=0.025 +2024-07-27 20:42:20,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=60054.666666666664, ans=0.125 +2024-07-27 20:42:20,646 INFO [train.py:1114] (0/4) Epoch 5, batch 4150, loss[loss=0.2067, simple_loss=0.3004, pruned_loss=0.05645, over 4831.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3228, pruned_loss=0.08448, over 938310.52 frames. ], batch size: 13, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:42:23,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60054.666666666664, ans=0.1 +2024-07-27 20:42:26,956 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.74 vs. limit=10.0 +2024-07-27 20:42:30,314 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.44 vs. limit=15.0 +2024-07-27 20:42:55,757 INFO [train.py:1114] (0/4) Epoch 5, batch 4200, loss[loss=0.2607, simple_loss=0.3349, pruned_loss=0.0932, over 4906.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3231, pruned_loss=0.08492, over 939660.51 frames. ], batch size: 15, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:42:56,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.80 vs. limit=22.5 +2024-07-27 20:43:01,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=60121.333333333336, ans=0.2 +2024-07-27 20:43:05,352 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.84 vs. limit=12.0 +2024-07-27 20:43:05,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=60134.666666666664, ans=0.125 +2024-07-27 20:43:13,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=60148.0, ans=0.0 +2024-07-27 20:43:20,328 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+01 6.148e+01 7.735e+01 9.943e+01 1.461e+02, threshold=1.547e+02, percent-clipped=0.0 +2024-07-27 20:43:28,474 INFO [train.py:1114] (0/4) Epoch 5, batch 4250, loss[loss=0.2564, simple_loss=0.32, pruned_loss=0.09636, over 4643.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3247, pruned_loss=0.08575, over 940526.08 frames. ], batch size: 12, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:43:51,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=60228.0, ans=0.025 +2024-07-27 20:43:54,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=60241.333333333336, ans=0.05 +2024-07-27 20:44:01,220 INFO [train.py:1114] (0/4) Epoch 5, batch 4300, loss[loss=0.2692, simple_loss=0.3438, pruned_loss=0.09728, over 4750.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.325, pruned_loss=0.08613, over 939868.80 frames. ], batch size: 13, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:44:07,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=60268.0, ans=0.125 +2024-07-27 20:44:08,553 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.62 vs. limit=22.5 +2024-07-27 20:44:12,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60268.0, ans=0.1 +2024-07-27 20:44:14,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=60281.333333333336, ans=0.2 +2024-07-27 20:44:15,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=60281.333333333336, ans=0.125 +2024-07-27 20:44:26,303 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.022e+01 6.170e+01 6.762e+01 7.364e+01 1.372e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-27 20:44:34,438 INFO [train.py:1114] (0/4) Epoch 5, batch 4350, loss[loss=0.2419, simple_loss=0.3108, pruned_loss=0.08654, over 4761.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3249, pruned_loss=0.08587, over 940964.29 frames. ], batch size: 13, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:44:35,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=60321.333333333336, ans=0.125 +2024-07-27 20:44:36,922 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.33 vs. limit=15.0 +2024-07-27 20:44:47,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=60348.0, ans=0.125 +2024-07-27 20:44:50,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=60348.0, ans=0.125 +2024-07-27 20:44:52,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60348.0, ans=0.1 +2024-07-27 20:44:54,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=60361.333333333336, ans=0.0 +2024-07-27 20:44:57,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=60361.333333333336, ans=0.125 +2024-07-27 20:44:58,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=60361.333333333336, ans=0.125 +2024-07-27 20:45:03,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=60374.666666666664, ans=0.125 +2024-07-27 20:45:05,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=60374.666666666664, ans=0.125 +2024-07-27 20:45:06,456 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.95 vs. limit=15.0 +2024-07-27 20:45:08,034 INFO [train.py:1114] (0/4) Epoch 5, batch 4400, loss[loss=0.2327, simple_loss=0.314, pruned_loss=0.07571, over 4819.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3247, pruned_loss=0.08582, over 940718.39 frames. ], batch size: 14, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:45:16,575 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.75 vs. limit=22.5 +2024-07-27 20:45:17,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=60401.333333333336, ans=0.0 +2024-07-27 20:45:22,655 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.69 vs. limit=15.0 +2024-07-27 20:45:25,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=60414.666666666664, ans=0.125 +2024-07-27 20:45:32,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=60428.0, ans=0.0 +2024-07-27 20:45:32,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60428.0, ans=0.1 +2024-07-27 20:45:33,392 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.176e+01 6.353e+01 6.947e+01 8.100e+01 1.220e+02, threshold=1.389e+02, percent-clipped=0.0 +2024-07-27 20:45:38,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=60441.333333333336, ans=0.125 +2024-07-27 20:45:41,684 INFO [train.py:1114] (0/4) Epoch 5, batch 4450, loss[loss=0.2268, simple_loss=0.2919, pruned_loss=0.08088, over 4948.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.3249, pruned_loss=0.08599, over 939328.76 frames. ], batch size: 12, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:45:44,173 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.21 vs. limit=10.0 +2024-07-27 20:45:47,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=60468.0, ans=0.025 +2024-07-27 20:45:58,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=60481.333333333336, ans=0.5 +2024-07-27 20:46:01,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=60494.666666666664, ans=0.2 +2024-07-27 20:46:07,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=60494.666666666664, ans=0.2 +2024-07-27 20:46:13,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60508.0, ans=0.1 +2024-07-27 20:46:16,935 INFO [train.py:1114] (0/4) Epoch 5, batch 4500, loss[loss=0.2598, simple_loss=0.3425, pruned_loss=0.08861, over 4738.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3249, pruned_loss=0.08584, over 938432.48 frames. ], batch size: 14, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:46:21,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=60521.333333333336, ans=0.2 +2024-07-27 20:46:41,955 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.270e+01 6.144e+01 7.215e+01 8.358e+01 1.180e+02, threshold=1.443e+02, percent-clipped=0.0 +2024-07-27 20:46:49,960 INFO [train.py:1114] (0/4) Epoch 5, batch 4550, loss[loss=0.2479, simple_loss=0.3231, pruned_loss=0.08637, over 4897.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3248, pruned_loss=0.08567, over 940184.77 frames. ], batch size: 13, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:46:50,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=60588.0, ans=0.0 +2024-07-27 20:46:52,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=60588.0, ans=0.95 +2024-07-27 20:46:55,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=60588.0, ans=0.5 +2024-07-27 20:46:59,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.53 vs. limit=10.0 +2024-07-27 20:47:13,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=60628.0, ans=0.125 +2024-07-27 20:47:25,727 INFO [train.py:1114] (0/4) Epoch 5, batch 4600, loss[loss=0.2632, simple_loss=0.3323, pruned_loss=0.09702, over 4514.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.3249, pruned_loss=0.08603, over 938491.97 frames. ], batch size: 21, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:47:31,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=60654.666666666664, ans=0.125 +2024-07-27 20:47:36,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=60668.0, ans=0.04949747468305833 +2024-07-27 20:47:47,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=60694.666666666664, ans=0.125 +2024-07-27 20:47:53,339 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 6.426e+01 7.546e+01 8.603e+01 1.273e+02, threshold=1.509e+02, percent-clipped=0.0 +2024-07-27 20:48:03,070 INFO [train.py:1114] (0/4) Epoch 5, batch 4650, loss[loss=0.2821, simple_loss=0.3607, pruned_loss=0.1017, over 4831.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.325, pruned_loss=0.08576, over 940139.70 frames. ], batch size: 16, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:48:09,443 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.55 vs. limit=22.5 +2024-07-27 20:48:12,978 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.95 vs. limit=22.5 +2024-07-27 20:48:16,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=60748.0, ans=0.125 +2024-07-27 20:48:23,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60761.333333333336, ans=0.1 +2024-07-27 20:48:25,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=60761.333333333336, ans=0.125 +2024-07-27 20:48:28,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=60761.333333333336, ans=0.025 +2024-07-27 20:48:36,509 INFO [train.py:1114] (0/4) Epoch 5, batch 4700, loss[loss=0.2138, simple_loss=0.2744, pruned_loss=0.0766, over 4707.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3233, pruned_loss=0.08484, over 937293.46 frames. ], batch size: 11, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:48:38,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=60788.0, ans=0.125 +2024-07-27 20:49:01,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=60828.0, ans=0.0 +2024-07-27 20:49:02,022 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.640e+01 6.344e+01 7.380e+01 9.406e+01 1.591e+02, threshold=1.476e+02, percent-clipped=1.0 +2024-07-27 20:49:02,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=60828.0, ans=0.2 +2024-07-27 20:49:04,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=60841.333333333336, ans=0.09899494936611666 +2024-07-27 20:49:10,642 INFO [train.py:1114] (0/4) Epoch 5, batch 4750, loss[loss=0.2839, simple_loss=0.3526, pruned_loss=0.1076, over 4502.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3238, pruned_loss=0.08565, over 935185.38 frames. ], batch size: 21, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:49:10,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=60854.666666666664, ans=0.0 +2024-07-27 20:49:15,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=60854.666666666664, ans=0.0 +2024-07-27 20:49:22,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=60868.0, ans=0.125 +2024-07-27 20:49:29,735 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.02 vs. limit=15.0 +2024-07-27 20:49:44,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=60908.0, ans=0.125 +2024-07-27 20:49:44,312 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.82 vs. limit=15.0 +2024-07-27 20:49:45,209 INFO [train.py:1114] (0/4) Epoch 5, batch 4800, loss[loss=0.2395, simple_loss=0.303, pruned_loss=0.08798, over 4692.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3232, pruned_loss=0.0856, over 933119.43 frames. ], batch size: 13, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:50:10,635 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.566e+01 6.152e+01 6.705e+01 7.633e+01 9.767e+01, threshold=1.341e+02, percent-clipped=0.0 +2024-07-27 20:50:16,423 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.05 vs. limit=6.0 +2024-07-27 20:50:18,673 INFO [train.py:1114] (0/4) Epoch 5, batch 4850, loss[loss=0.237, simple_loss=0.3207, pruned_loss=0.07664, over 4733.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3225, pruned_loss=0.08498, over 932457.54 frames. ], batch size: 14, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:50:22,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=60988.0, ans=0.125 +2024-07-27 20:50:26,316 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.78 vs. limit=15.0 +2024-07-27 20:50:27,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=61001.333333333336, ans=15.0 +2024-07-27 20:50:33,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61014.666666666664, ans=0.1 +2024-07-27 20:50:51,589 INFO [train.py:1114] (0/4) Epoch 5, batch 4900, loss[loss=0.2783, simple_loss=0.3431, pruned_loss=0.1068, over 4762.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3214, pruned_loss=0.08501, over 934041.37 frames. ], batch size: 13, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:50:59,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=61068.0, ans=0.0 +2024-07-27 20:51:01,301 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.06 vs. limit=12.0 +2024-07-27 20:51:06,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=61081.333333333336, ans=15.0 +2024-07-27 20:51:20,168 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.134e+01 6.112e+01 6.910e+01 8.321e+01 1.535e+02, threshold=1.382e+02, percent-clipped=5.0 +2024-07-27 20:51:20,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=61094.666666666664, ans=0.0 +2024-07-27 20:51:27,438 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.07 vs. limit=15.0 +2024-07-27 20:51:35,800 INFO [train.py:1114] (0/4) Epoch 5, batch 4950, loss[loss=0.3122, simple_loss=0.3679, pruned_loss=0.1282, over 3428.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.324, pruned_loss=0.0865, over 931629.68 frames. ], batch size: 35, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:51:37,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61121.333333333336, ans=0.1 +2024-07-27 20:51:53,628 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.93 vs. limit=15.0 +2024-07-27 20:52:09,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=61161.333333333336, ans=0.05 +2024-07-27 20:52:15,838 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.19 vs. limit=22.5 +2024-07-27 20:52:18,394 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:52:18,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=61188.0, ans=0.125 +2024-07-27 20:52:19,454 INFO [train.py:1114] (0/4) Epoch 5, batch 5000, loss[loss=0.2728, simple_loss=0.3507, pruned_loss=0.09745, over 4661.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3227, pruned_loss=0.08529, over 935338.45 frames. ], batch size: 14, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:52:24,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=61188.0, ans=0.04949747468305833 +2024-07-27 20:52:29,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=61201.333333333336, ans=0.0 +2024-07-27 20:52:36,578 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.63 vs. limit=15.0 +2024-07-27 20:52:41,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=61228.0, ans=0.0 +2024-07-27 20:52:48,115 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.055e+01 6.404e+01 7.517e+01 8.761e+01 1.608e+02, threshold=1.503e+02, percent-clipped=2.0 +2024-07-27 20:53:06,826 INFO [train.py:1114] (0/4) Epoch 5, batch 5050, loss[loss=0.1958, simple_loss=0.2667, pruned_loss=0.06248, over 4846.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.3227, pruned_loss=0.08501, over 937709.59 frames. ], batch size: 12, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:53:07,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=61254.666666666664, ans=0.125 +2024-07-27 20:53:08,268 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.806e-01 +2024-07-27 20:53:11,202 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.43 vs. limit=12.0 +2024-07-27 20:53:11,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61254.666666666664, ans=0.1 +2024-07-27 20:53:13,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=61268.0, ans=0.125 +2024-07-27 20:53:22,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=61281.333333333336, ans=0.125 +2024-07-27 20:53:26,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=61281.333333333336, ans=0.2 +2024-07-27 20:53:38,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=61294.666666666664, ans=0.125 +2024-07-27 20:53:48,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=61308.0, ans=0.0 +2024-07-27 20:53:50,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=61308.0, ans=0.125 +2024-07-27 20:53:53,199 INFO [train.py:1114] (0/4) Epoch 5, batch 5100, loss[loss=0.2136, simple_loss=0.2875, pruned_loss=0.06982, over 4777.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3233, pruned_loss=0.08509, over 934881.84 frames. ], batch size: 12, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:54:19,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=61361.333333333336, ans=0.2 +2024-07-27 20:54:20,585 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.842e+01 6.137e+01 6.842e+01 8.040e+01 3.164e+02, threshold=1.368e+02, percent-clipped=1.0 +2024-07-27 20:54:21,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=61361.333333333336, ans=0.2 +2024-07-27 20:54:24,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=61374.666666666664, ans=0.1 +2024-07-27 20:54:25,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=61374.666666666664, ans=0.125 +2024-07-27 20:54:29,769 INFO [train.py:1114] (0/4) Epoch 5, batch 5150, loss[loss=0.2393, simple_loss=0.3145, pruned_loss=0.08201, over 4840.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3242, pruned_loss=0.08595, over 935729.92 frames. ], batch size: 16, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:54:30,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=61388.0, ans=0.2 +2024-07-27 20:54:48,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=61401.333333333336, ans=0.125 +2024-07-27 20:54:49,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=61414.666666666664, ans=0.1 +2024-07-27 20:55:06,525 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.49 vs. limit=15.0 +2024-07-27 20:55:10,110 INFO [train.py:1114] (0/4) Epoch 5, batch 5200, loss[loss=0.2192, simple_loss=0.3044, pruned_loss=0.06704, over 4669.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3226, pruned_loss=0.08444, over 936299.00 frames. ], batch size: 14, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:55:10,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=61454.666666666664, ans=0.0 +2024-07-27 20:55:22,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=61468.0, ans=0.125 +2024-07-27 20:55:23,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=61468.0, ans=0.125 +2024-07-27 20:55:29,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=61481.333333333336, ans=0.1 +2024-07-27 20:55:38,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=61494.666666666664, ans=0.125 +2024-07-27 20:55:42,001 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.018e+01 6.430e+01 7.385e+01 8.844e+01 1.293e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 20:55:45,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=61508.0, ans=0.05 +2024-07-27 20:55:49,875 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.75 vs. limit=6.0 +2024-07-27 20:55:50,197 INFO [train.py:1114] (0/4) Epoch 5, batch 5250, loss[loss=0.2152, simple_loss=0.2877, pruned_loss=0.07131, over 4901.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3214, pruned_loss=0.08389, over 936091.03 frames. ], batch size: 13, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:55:53,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=61521.333333333336, ans=0.0 +2024-07-27 20:55:58,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=61534.666666666664, ans=0.125 +2024-07-27 20:55:58,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=61534.666666666664, ans=0.2 +2024-07-27 20:56:24,404 INFO [train.py:1114] (0/4) Epoch 5, batch 5300, loss[loss=0.2704, simple_loss=0.3555, pruned_loss=0.09264, over 4644.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3216, pruned_loss=0.08392, over 934762.15 frames. ], batch size: 16, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:56:24,638 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:56:35,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=61601.333333333336, ans=0.125 +2024-07-27 20:56:35,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=61601.333333333336, ans=0.2 +2024-07-27 20:56:36,762 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.51 vs. limit=15.0 +2024-07-27 20:56:40,223 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.52 vs. limit=15.0 +2024-07-27 20:56:43,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=61614.666666666664, ans=0.05 +2024-07-27 20:56:45,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=61614.666666666664, ans=0.125 +2024-07-27 20:56:54,056 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.117e+01 6.200e+01 6.732e+01 7.536e+01 1.097e+02, threshold=1.346e+02, percent-clipped=0.0 +2024-07-27 20:56:54,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=61628.0, ans=0.2 +2024-07-27 20:57:02,190 INFO [train.py:1114] (0/4) Epoch 5, batch 5350, loss[loss=0.2102, simple_loss=0.2769, pruned_loss=0.0718, over 4552.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3211, pruned_loss=0.08362, over 936601.75 frames. ], batch size: 10, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:57:04,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=61654.666666666664, ans=0.125 +2024-07-27 20:57:05,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=61654.666666666664, ans=0.0 +2024-07-27 20:57:22,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=61694.666666666664, ans=0.2 +2024-07-27 20:57:37,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=61708.0, ans=0.125 +2024-07-27 20:57:41,135 INFO [train.py:1114] (0/4) Epoch 5, batch 5400, loss[loss=0.2948, simple_loss=0.3654, pruned_loss=0.112, over 4303.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3229, pruned_loss=0.0847, over 931044.06 frames. ], batch size: 26, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:57:45,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61721.333333333336, ans=0.1 +2024-07-27 20:57:45,739 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.25 vs. limit=22.5 +2024-07-27 20:57:53,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=61734.666666666664, ans=0.025 +2024-07-27 20:58:03,147 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:58:03,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=61761.333333333336, ans=0.125 +2024-07-27 20:58:07,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=61761.333333333336, ans=0.125 +2024-07-27 20:58:07,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=61761.333333333336, ans=0.125 +2024-07-27 20:58:08,958 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.945e+01 6.299e+01 6.991e+01 7.974e+01 1.272e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 20:58:09,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=61774.666666666664, ans=0.0 +2024-07-27 20:58:22,863 INFO [train.py:1114] (0/4) Epoch 5, batch 5450, loss[loss=0.2616, simple_loss=0.3222, pruned_loss=0.1005, over 4700.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3215, pruned_loss=0.08382, over 933711.97 frames. ], batch size: 11, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:58:23,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.73 vs. limit=22.5 +2024-07-27 20:58:33,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=61801.333333333336, ans=0.125 +2024-07-27 20:58:42,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=61828.0, ans=0.0 +2024-07-27 20:58:43,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=61828.0, ans=0.125 +2024-07-27 20:58:50,830 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.27 vs. limit=15.0 +2024-07-27 20:58:58,517 INFO [train.py:1114] (0/4) Epoch 5, batch 5500, loss[loss=0.2694, simple_loss=0.338, pruned_loss=0.1004, over 4410.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3217, pruned_loss=0.08406, over 931306.89 frames. ], batch size: 26, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:59:00,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=61854.666666666664, ans=0.125 +2024-07-27 20:59:02,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=61854.666666666664, ans=0.0 +2024-07-27 20:59:10,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=61868.0, ans=0.125 +2024-07-27 20:59:14,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=61881.333333333336, ans=22.5 +2024-07-27 20:59:21,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=61894.666666666664, ans=0.0 +2024-07-27 20:59:22,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=61894.666666666664, ans=0.125 +2024-07-27 20:59:24,622 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.189e+01 6.179e+01 6.952e+01 7.770e+01 1.227e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-27 20:59:26,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=61908.0, ans=0.125 +2024-07-27 20:59:32,511 INFO [train.py:1114] (0/4) Epoch 5, batch 5550, loss[loss=0.2109, simple_loss=0.2937, pruned_loss=0.06406, over 4699.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3221, pruned_loss=0.08461, over 933579.38 frames. ], batch size: 12, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:59:39,511 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.34 vs. limit=12.0 +2024-07-27 20:59:39,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61921.333333333336, ans=0.1 +2024-07-27 20:59:42,894 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.77 vs. limit=15.0 +2024-07-27 20:59:46,922 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.44 vs. limit=6.0 +2024-07-27 21:00:04,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=61974.666666666664, ans=0.1 +2024-07-27 21:00:06,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=61974.666666666664, ans=0.125 +2024-07-27 21:00:07,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=61974.666666666664, ans=0.0 +2024-07-27 21:00:08,737 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:00:09,915 INFO [train.py:1114] (0/4) Epoch 5, batch 5600, loss[loss=0.2849, simple_loss=0.3552, pruned_loss=0.1073, over 4745.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3233, pruned_loss=0.08515, over 934373.24 frames. ], batch size: 14, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 21:00:19,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61988.0, ans=0.1 +2024-07-27 21:00:53,099 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.144e+01 6.040e+01 6.647e+01 7.605e+01 1.041e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-27 21:00:53,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=62028.0, ans=0.125 +2024-07-27 21:00:57,416 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.61 vs. limit=22.5 +2024-07-27 21:01:01,047 INFO [train.py:1114] (0/4) Epoch 5, batch 5650, loss[loss=0.2616, simple_loss=0.3327, pruned_loss=0.09527, over 4577.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3224, pruned_loss=0.08463, over 936946.58 frames. ], batch size: 21, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 21:01:06,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=62054.666666666664, ans=0.125 +2024-07-27 21:01:07,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=62068.0, ans=0.09899494936611666 +2024-07-27 21:01:09,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=62068.0, ans=0.125 +2024-07-27 21:01:19,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=62068.0, ans=0.025 +2024-07-27 21:01:20,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=62068.0, ans=0.125 +2024-07-27 21:01:23,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=62081.333333333336, ans=0.09899494936611666 +2024-07-27 21:01:27,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=62081.333333333336, ans=0.125 +2024-07-27 21:01:36,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=62108.0, ans=0.0 +2024-07-27 21:01:42,139 INFO [train.py:1114] (0/4) Epoch 5, batch 5700, loss[loss=0.2716, simple_loss=0.3395, pruned_loss=0.1018, over 4695.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3227, pruned_loss=0.08449, over 938109.71 frames. ], batch size: 13, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 21:01:52,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=62134.666666666664, ans=0.0 +2024-07-27 21:01:57,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=62148.0, ans=0.125 +2024-07-27 21:02:07,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=62161.333333333336, ans=0.0 +2024-07-27 21:02:07,557 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.270e+01 6.365e+01 6.985e+01 7.849e+01 1.267e+02, threshold=1.397e+02, percent-clipped=0.0 +2024-07-27 21:02:10,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=62161.333333333336, ans=0.1 +2024-07-27 21:02:10,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=62161.333333333336, ans=0.125 +2024-07-27 21:02:18,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=62174.666666666664, ans=0.125 +2024-07-27 21:02:19,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=62174.666666666664, ans=0.035 +2024-07-27 21:02:20,524 INFO [train.py:1114] (0/4) Epoch 5, batch 5750, loss[loss=0.217, simple_loss=0.2948, pruned_loss=0.06961, over 4702.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3229, pruned_loss=0.08441, over 938056.46 frames. ], batch size: 19, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:02:24,044 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.93 vs. limit=15.0 +2024-07-27 21:02:37,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-07-27 21:02:52,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=62214.666666666664, ans=0.2 +2024-07-27 21:02:55,218 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.68 vs. limit=15.0 +2024-07-27 21:03:03,432 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.41 vs. limit=15.0 +2024-07-27 21:03:03,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=62241.333333333336, ans=0.0 +2024-07-27 21:03:06,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=62241.333333333336, ans=0.035 +2024-07-27 21:03:09,033 INFO [train.py:1114] (0/4) Epoch 5, batch 5800, loss[loss=0.2833, simple_loss=0.3535, pruned_loss=0.1066, over 4739.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3238, pruned_loss=0.08487, over 937005.94 frames. ], batch size: 19, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:03:14,607 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-07-27 21:03:17,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=62254.666666666664, ans=0.125 +2024-07-27 21:03:19,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=62268.0, ans=0.015 +2024-07-27 21:03:40,372 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.90 vs. limit=15.0 +2024-07-27 21:03:44,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=62294.666666666664, ans=0.125 +2024-07-27 21:03:46,566 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.301e+01 6.477e+01 7.083e+01 8.928e+01 1.486e+02, threshold=1.417e+02, percent-clipped=3.0 +2024-07-27 21:03:57,647 INFO [train.py:1114] (0/4) Epoch 5, batch 5850, loss[loss=0.2955, simple_loss=0.3708, pruned_loss=0.1101, over 4452.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3233, pruned_loss=0.08472, over 937714.63 frames. ], batch size: 21, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:04:08,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=62321.333333333336, ans=0.125 +2024-07-27 21:04:13,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=62334.666666666664, ans=0.0 +2024-07-27 21:04:14,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=62334.666666666664, ans=0.125 +2024-07-27 21:04:14,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=62334.666666666664, ans=0.125 +2024-07-27 21:04:21,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=62348.0, ans=0.2 +2024-07-27 21:04:36,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=62348.0, ans=0.125 +2024-07-27 21:04:47,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=62361.333333333336, ans=0.0 +2024-07-27 21:05:00,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=62374.666666666664, ans=0.025 +2024-07-27 21:05:03,538 INFO [train.py:1114] (0/4) Epoch 5, batch 5900, loss[loss=0.264, simple_loss=0.3386, pruned_loss=0.09472, over 4686.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3224, pruned_loss=0.08457, over 938064.79 frames. ], batch size: 15, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:05:07,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=62388.0, ans=0.125 +2024-07-27 21:05:19,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=62414.666666666664, ans=0.025 +2024-07-27 21:05:29,072 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.135e+01 6.365e+01 7.384e+01 8.628e+01 1.400e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 21:05:33,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=62441.333333333336, ans=0.0 +2024-07-27 21:05:37,086 INFO [train.py:1114] (0/4) Epoch 5, batch 5950, loss[loss=0.2396, simple_loss=0.3252, pruned_loss=0.07696, over 4684.00 frames. ], tot_loss[loss=0.245, simple_loss=0.322, pruned_loss=0.08402, over 939863.84 frames. ], batch size: 15, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:05:44,198 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.26 vs. limit=15.0 +2024-07-27 21:05:44,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=62468.0, ans=0.5 +2024-07-27 21:05:48,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=62468.0, ans=0.015 +2024-07-27 21:05:57,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=62481.333333333336, ans=0.125 +2024-07-27 21:05:58,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=62481.333333333336, ans=15.0 +2024-07-27 21:06:13,074 INFO [train.py:1114] (0/4) Epoch 5, batch 6000, loss[loss=0.2954, simple_loss=0.3547, pruned_loss=0.118, over 4294.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3218, pruned_loss=0.08364, over 936844.34 frames. ], batch size: 26, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:06:13,075 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 21:06:55,292 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.6452, 2.9186, 4.2927, 2.1534], device='cuda:0') +2024-07-27 21:07:20,649 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([0.0177, 2.2640, 1.6468, 2.6824, 1.4909, 1.6265, 3.4859, 2.8781], + device='cuda:0') +2024-07-27 21:07:25,585 INFO [train.py:1146] (0/4) Epoch 5, validation: loss=0.1984, simple_loss=0.3025, pruned_loss=0.04714, over 944034.00 frames. +2024-07-27 21:07:25,586 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 21:07:25,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=62521.333333333336, ans=0.0 +2024-07-27 21:07:27,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=62521.333333333336, ans=0.2 +2024-07-27 21:07:27,131 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:07:49,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62561.333333333336, ans=0.1 +2024-07-27 21:07:55,034 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.867e+01 6.382e+01 7.254e+01 8.565e+01 1.652e+02, threshold=1.451e+02, percent-clipped=1.0 +2024-07-27 21:07:55,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=62561.333333333336, ans=0.0 +2024-07-27 21:08:01,456 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.80 vs. limit=15.0 +2024-07-27 21:08:03,173 INFO [train.py:1114] (0/4) Epoch 5, batch 6050, loss[loss=0.2179, simple_loss=0.3016, pruned_loss=0.06709, over 4774.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3195, pruned_loss=0.08253, over 938132.57 frames. ], batch size: 12, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:08:06,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=62588.0, ans=0.025 +2024-07-27 21:08:08,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62588.0, ans=0.1 +2024-07-27 21:08:16,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=62601.333333333336, ans=0.0 +2024-07-27 21:08:16,382 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-07-27 21:08:22,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=62614.666666666664, ans=0.0 +2024-07-27 21:08:38,507 INFO [train.py:1114] (0/4) Epoch 5, batch 6100, loss[loss=0.2565, simple_loss=0.3267, pruned_loss=0.09313, over 4672.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.3191, pruned_loss=0.08213, over 937614.31 frames. ], batch size: 15, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:08:41,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=62654.666666666664, ans=0.95 +2024-07-27 21:08:47,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=62668.0, ans=0.125 +2024-07-27 21:08:58,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=62694.666666666664, ans=0.0 +2024-07-27 21:09:04,973 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.680e+01 6.274e+01 6.891e+01 8.796e+01 1.456e+02, threshold=1.378e+02, percent-clipped=1.0 +2024-07-27 21:09:11,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=62708.0, ans=0.07 +2024-07-27 21:09:13,063 INFO [train.py:1114] (0/4) Epoch 5, batch 6150, loss[loss=0.3103, simple_loss=0.3619, pruned_loss=0.1293, over 3328.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.3194, pruned_loss=0.08243, over 936316.79 frames. ], batch size: 36, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:09:15,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=62721.333333333336, ans=0.0 +2024-07-27 21:09:51,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=62774.666666666664, ans=0.025 +2024-07-27 21:09:53,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=62774.666666666664, ans=0.1 +2024-07-27 21:09:54,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=62774.666666666664, ans=0.1 +2024-07-27 21:09:57,034 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.78 vs. limit=22.5 +2024-07-27 21:09:58,596 INFO [train.py:1114] (0/4) Epoch 5, batch 6200, loss[loss=0.2162, simple_loss=0.3121, pruned_loss=0.06012, over 4750.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3199, pruned_loss=0.08313, over 936096.79 frames. ], batch size: 14, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:10:01,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.09 vs. limit=15.0 +2024-07-27 21:10:02,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=62788.0, ans=0.0 +2024-07-27 21:10:07,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62801.333333333336, ans=0.1 +2024-07-27 21:10:13,750 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:10:25,267 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.230e+01 6.498e+01 7.693e+01 9.750e+01 1.653e+02, threshold=1.539e+02, percent-clipped=3.0 +2024-07-27 21:10:26,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=62841.333333333336, ans=0.125 +2024-07-27 21:10:28,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=62841.333333333336, ans=0.125 +2024-07-27 21:10:30,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=62841.333333333336, ans=0.0 +2024-07-27 21:10:33,757 INFO [train.py:1114] (0/4) Epoch 5, batch 6250, loss[loss=0.2481, simple_loss=0.3252, pruned_loss=0.08546, over 4813.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.3202, pruned_loss=0.08345, over 932794.54 frames. ], batch size: 14, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:10:46,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=62868.0, ans=0.2 +2024-07-27 21:10:51,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=62881.333333333336, ans=0.125 +2024-07-27 21:10:59,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=62894.666666666664, ans=0.035 +2024-07-27 21:11:09,332 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.77 vs. limit=6.0 +2024-07-27 21:11:09,909 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.06 vs. limit=15.0 +2024-07-27 21:11:10,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=62908.0, ans=0.2 +2024-07-27 21:11:12,026 INFO [train.py:1114] (0/4) Epoch 5, batch 6300, loss[loss=0.2387, simple_loss=0.3153, pruned_loss=0.0811, over 4498.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.3202, pruned_loss=0.0835, over 929584.01 frames. ], batch size: 10, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:11:17,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=62921.333333333336, ans=0.125 +2024-07-27 21:11:19,486 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.73 vs. limit=5.0 +2024-07-27 21:11:25,065 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.05 vs. limit=22.5 +2024-07-27 21:11:31,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=62961.333333333336, ans=0.125 +2024-07-27 21:11:37,090 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.292e+01 6.261e+01 7.099e+01 7.903e+01 1.165e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 21:11:37,169 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:11:37,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=62961.333333333336, ans=0.1 +2024-07-27 21:11:40,170 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.23 vs. limit=15.0 +2024-07-27 21:11:40,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=62974.666666666664, ans=0.0 +2024-07-27 21:11:44,970 INFO [train.py:1114] (0/4) Epoch 5, batch 6350, loss[loss=0.2531, simple_loss=0.3369, pruned_loss=0.08467, over 4551.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3204, pruned_loss=0.08292, over 933700.71 frames. ], batch size: 21, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:11:48,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62988.0, ans=0.1 +2024-07-27 21:11:53,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=63001.333333333336, ans=0.0 +2024-07-27 21:12:00,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=63014.666666666664, ans=0.1 +2024-07-27 21:12:01,260 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:12:07,574 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.98 vs. limit=10.0 +2024-07-27 21:12:09,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=63028.0, ans=0.125 +2024-07-27 21:12:20,324 INFO [train.py:1114] (0/4) Epoch 5, batch 6400, loss[loss=0.2302, simple_loss=0.3038, pruned_loss=0.07832, over 4633.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.32, pruned_loss=0.08339, over 934853.41 frames. ], batch size: 13, lr: 1.45e-02, grad_scale: 32.0 +2024-07-27 21:12:22,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=63054.666666666664, ans=0.125 +2024-07-27 21:12:47,233 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.147e+01 6.719e+01 7.795e+01 8.869e+01 1.661e+02, threshold=1.559e+02, percent-clipped=1.0 +2024-07-27 21:12:50,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=63108.0, ans=0.07 +2024-07-27 21:12:52,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=63108.0, ans=0.125 +2024-07-27 21:12:53,959 INFO [train.py:1114] (0/4) Epoch 5, batch 6450, loss[loss=0.2423, simple_loss=0.3264, pruned_loss=0.0791, over 4517.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.32, pruned_loss=0.08294, over 938681.37 frames. ], batch size: 21, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:12:56,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=63121.333333333336, ans=0.2 +2024-07-27 21:12:57,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=63121.333333333336, ans=0.025 +2024-07-27 21:13:06,214 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.13 vs. limit=22.5 +2024-07-27 21:13:20,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=63161.333333333336, ans=0.125 +2024-07-27 21:13:30,344 INFO [train.py:1114] (0/4) Epoch 5, batch 6500, loss[loss=0.303, simple_loss=0.3548, pruned_loss=0.1256, over 3362.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.32, pruned_loss=0.08259, over 939747.61 frames. ], batch size: 35, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:13:32,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=63188.0, ans=0.125 +2024-07-27 21:13:46,201 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.89 vs. limit=22.5 +2024-07-27 21:13:48,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63214.666666666664, ans=0.1 +2024-07-27 21:13:54,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=63228.0, ans=0.025 +2024-07-27 21:13:57,003 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.806e+01 6.095e+01 6.739e+01 7.533e+01 1.080e+02, threshold=1.348e+02, percent-clipped=0.0 +2024-07-27 21:14:03,824 INFO [train.py:1114] (0/4) Epoch 5, batch 6550, loss[loss=0.2257, simple_loss=0.2885, pruned_loss=0.0814, over 4816.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3199, pruned_loss=0.08234, over 942690.86 frames. ], batch size: 11, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:14:20,564 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.43 vs. limit=22.5 +2024-07-27 21:14:26,468 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.32 vs. limit=22.5 +2024-07-27 21:14:27,855 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.15 vs. limit=22.5 +2024-07-27 21:14:36,529 INFO [train.py:1114] (0/4) Epoch 5, batch 6600, loss[loss=0.2254, simple_loss=0.3043, pruned_loss=0.07322, over 4930.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.3209, pruned_loss=0.08303, over 944640.22 frames. ], batch size: 14, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:14:43,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=63334.666666666664, ans=0.125 +2024-07-27 21:14:44,898 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.77 vs. limit=10.0 +2024-07-27 21:14:52,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=63348.0, ans=15.0 +2024-07-27 21:14:56,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=63361.333333333336, ans=0.0 +2024-07-27 21:15:03,306 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.320e+01 6.369e+01 7.181e+01 8.583e+01 1.412e+02, threshold=1.436e+02, percent-clipped=2.0 +2024-07-27 21:15:09,890 INFO [train.py:1114] (0/4) Epoch 5, batch 6650, loss[loss=0.3042, simple_loss=0.3804, pruned_loss=0.114, over 4632.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3215, pruned_loss=0.08345, over 943228.04 frames. ], batch size: 17, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:15:12,286 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.47 vs. limit=22.5 +2024-07-27 21:15:28,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=63414.666666666664, ans=0.07 +2024-07-27 21:15:32,149 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.70 vs. limit=12.0 +2024-07-27 21:15:43,754 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.02 vs. limit=10.0 +2024-07-27 21:15:43,874 INFO [train.py:1114] (0/4) Epoch 5, batch 6700, loss[loss=0.2524, simple_loss=0.3237, pruned_loss=0.09058, over 4715.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3225, pruned_loss=0.08389, over 942176.41 frames. ], batch size: 19, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:15:50,651 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.99 vs. limit=15.0 +2024-07-27 21:16:03,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=63481.333333333336, ans=0.125 +2024-07-27 21:16:06,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=63494.666666666664, ans=0.125 +2024-07-27 21:16:07,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=63494.666666666664, ans=0.125 +2024-07-27 21:16:11,138 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.250e+01 6.326e+01 7.074e+01 8.168e+01 1.305e+02, threshold=1.415e+02, percent-clipped=0.0 +2024-07-27 21:16:13,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=63508.0, ans=0.125 +2024-07-27 21:16:18,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=63521.333333333336, ans=0.125 +2024-07-27 21:16:19,123 INFO [train.py:1114] (0/4) Epoch 5, batch 6750, loss[loss=0.257, simple_loss=0.3224, pruned_loss=0.09581, over 4339.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3219, pruned_loss=0.08394, over 940485.17 frames. ], batch size: 25, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:16:41,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=63561.333333333336, ans=0.025 +2024-07-27 21:16:47,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=63574.666666666664, ans=10.0 +2024-07-27 21:16:50,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=63574.666666666664, ans=0.125 +2024-07-27 21:16:54,787 INFO [train.py:1114] (0/4) Epoch 5, batch 6800, loss[loss=0.2505, simple_loss=0.329, pruned_loss=0.08603, over 4629.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3216, pruned_loss=0.08381, over 938750.72 frames. ], batch size: 13, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:16:57,231 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.15 vs. limit=15.0 +2024-07-27 21:16:58,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=63588.0, ans=0.0 +2024-07-27 21:17:06,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63601.333333333336, ans=0.1 +2024-07-27 21:17:07,782 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.66 vs. limit=15.0 +2024-07-27 21:17:18,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63628.0, ans=0.1 +2024-07-27 21:17:19,677 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.64 vs. limit=15.0 +2024-07-27 21:17:21,276 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.069e+01 6.010e+01 6.782e+01 8.396e+01 1.269e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 21:17:28,002 INFO [train.py:1114] (0/4) Epoch 5, batch 6850, loss[loss=0.2168, simple_loss=0.3101, pruned_loss=0.06174, over 4702.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3229, pruned_loss=0.0846, over 940393.95 frames. ], batch size: 13, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:17:29,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=63654.666666666664, ans=0.125 +2024-07-27 21:17:30,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=63654.666666666664, ans=0.0 +2024-07-27 21:17:35,269 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.29 vs. limit=15.0 +2024-07-27 21:17:45,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=63681.333333333336, ans=0.0 +2024-07-27 21:17:49,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=63694.666666666664, ans=0.125 +2024-07-27 21:18:02,829 INFO [train.py:1114] (0/4) Epoch 5, batch 6900, loss[loss=0.2043, simple_loss=0.2823, pruned_loss=0.0632, over 4964.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.322, pruned_loss=0.08368, over 942721.66 frames. ], batch size: 13, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:18:28,850 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.78 vs. limit=15.0 +2024-07-27 21:18:30,462 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.148e+01 6.563e+01 7.062e+01 8.255e+01 1.155e+02, threshold=1.412e+02, percent-clipped=0.0 +2024-07-27 21:18:33,395 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:18:36,457 INFO [train.py:1114] (0/4) Epoch 5, batch 6950, loss[loss=0.2183, simple_loss=0.2809, pruned_loss=0.07782, over 4485.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3227, pruned_loss=0.0846, over 940008.49 frames. ], batch size: 10, lr: 1.44e-02, grad_scale: 16.0 +2024-07-27 21:18:52,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=63801.333333333336, ans=0.0 +2024-07-27 21:18:55,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=63814.666666666664, ans=0.0 +2024-07-27 21:19:10,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=63841.333333333336, ans=0.125 +2024-07-27 21:19:13,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=63854.666666666664, ans=0.0 +2024-07-27 21:19:13,741 INFO [train.py:1114] (0/4) Epoch 5, batch 7000, loss[loss=0.2752, simple_loss=0.3477, pruned_loss=0.1014, over 4655.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.322, pruned_loss=0.08452, over 938814.01 frames. ], batch size: 17, lr: 1.44e-02, grad_scale: 16.0 +2024-07-27 21:19:13,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=63854.666666666664, ans=0.125 +2024-07-27 21:19:15,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=63854.666666666664, ans=0.04949747468305833 +2024-07-27 21:19:27,752 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.73 vs. limit=22.5 +2024-07-27 21:19:28,426 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.54 vs. limit=6.0 +2024-07-27 21:19:30,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=63881.333333333336, ans=0.125 +2024-07-27 21:19:40,755 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.209e+01 6.125e+01 6.883e+01 8.000e+01 1.166e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-27 21:19:42,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=63908.0, ans=0.125 +2024-07-27 21:19:46,898 INFO [train.py:1114] (0/4) Epoch 5, batch 7050, loss[loss=0.2534, simple_loss=0.3303, pruned_loss=0.08822, over 4671.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3224, pruned_loss=0.08437, over 942004.19 frames. ], batch size: 19, lr: 1.44e-02, grad_scale: 16.0 +2024-07-27 21:19:47,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=63921.333333333336, ans=0.0 +2024-07-27 21:19:53,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=63934.666666666664, ans=0.2 +2024-07-27 21:20:00,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=63948.0, ans=0.125 +2024-07-27 21:20:00,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=63948.0, ans=0.0 +2024-07-27 21:20:05,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=63948.0, ans=0.0 +2024-07-27 21:20:20,298 INFO [train.py:1114] (0/4) Epoch 5, batch 7100, loss[loss=0.2699, simple_loss=0.3505, pruned_loss=0.0946, over 4797.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3223, pruned_loss=0.08484, over 936726.09 frames. ], batch size: 15, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:20:24,864 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:20:25,611 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-48000.pt +2024-07-27 21:20:38,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=64001.333333333336, ans=10.0 +2024-07-27 21:20:44,788 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.83 vs. limit=15.0 +2024-07-27 21:20:46,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=64028.0, ans=0.125 +2024-07-27 21:20:49,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=64028.0, ans=0.0 +2024-07-27 21:20:53,657 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.086e+01 6.110e+01 6.595e+01 7.846e+01 1.344e+02, threshold=1.319e+02, percent-clipped=0.0 +2024-07-27 21:20:55,522 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.34 vs. limit=15.0 +2024-07-27 21:20:58,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=64041.333333333336, ans=0.125 +2024-07-27 21:20:59,501 INFO [train.py:1114] (0/4) Epoch 5, batch 7150, loss[loss=0.2413, simple_loss=0.3172, pruned_loss=0.08273, over 4465.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.32, pruned_loss=0.08371, over 937235.16 frames. ], batch size: 21, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:21:08,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=64068.0, ans=0.0 +2024-07-27 21:21:11,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=64068.0, ans=0.125 +2024-07-27 21:21:22,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=64094.666666666664, ans=0.125 +2024-07-27 21:21:32,466 INFO [train.py:1114] (0/4) Epoch 5, batch 7200, loss[loss=0.2924, simple_loss=0.3739, pruned_loss=0.1055, over 4795.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3209, pruned_loss=0.08396, over 937791.20 frames. ], batch size: 15, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:21:36,047 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.11 vs. limit=10.0 +2024-07-27 21:21:41,214 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.71 vs. limit=22.5 +2024-07-27 21:21:41,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=64134.666666666664, ans=0.0 +2024-07-27 21:21:43,859 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.74 vs. limit=6.0 +2024-07-27 21:21:45,077 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.93 vs. limit=15.0 +2024-07-27 21:21:51,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=64161.333333333336, ans=0.125 +2024-07-27 21:21:56,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=64161.333333333336, ans=0.0 +2024-07-27 21:21:58,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64174.666666666664, ans=0.1 +2024-07-27 21:21:59,178 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.508e+01 6.414e+01 7.135e+01 8.390e+01 1.273e+02, threshold=1.427e+02, percent-clipped=0.0 +2024-07-27 21:22:04,942 INFO [train.py:1114] (0/4) Epoch 5, batch 7250, loss[loss=0.2293, simple_loss=0.3055, pruned_loss=0.07657, over 4860.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.3203, pruned_loss=0.08352, over 939090.56 frames. ], batch size: 12, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:22:15,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=64201.333333333336, ans=0.1 +2024-07-27 21:22:16,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=64201.333333333336, ans=0.2 +2024-07-27 21:22:31,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=64228.0, ans=0.0 +2024-07-27 21:22:34,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=64228.0, ans=0.0 +2024-07-27 21:22:40,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64241.333333333336, ans=0.1 +2024-07-27 21:22:41,009 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.85 vs. limit=22.5 +2024-07-27 21:22:43,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=64241.333333333336, ans=0.125 +2024-07-27 21:22:44,625 INFO [train.py:1114] (0/4) Epoch 5, batch 7300, loss[loss=0.1886, simple_loss=0.2632, pruned_loss=0.05702, over 4849.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3194, pruned_loss=0.08253, over 939629.98 frames. ], batch size: 12, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:22:44,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=64254.666666666664, ans=0.0 +2024-07-27 21:22:55,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=64268.0, ans=0.1 +2024-07-27 21:23:14,908 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.221e+01 6.188e+01 7.170e+01 8.494e+01 1.437e+02, threshold=1.434e+02, percent-clipped=1.0 +2024-07-27 21:23:20,883 INFO [train.py:1114] (0/4) Epoch 5, batch 7350, loss[loss=0.255, simple_loss=0.3279, pruned_loss=0.09106, over 4642.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3197, pruned_loss=0.08274, over 939087.44 frames. ], batch size: 12, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:23:23,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=64321.333333333336, ans=0.125 +2024-07-27 21:23:26,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=64334.666666666664, ans=0.025 +2024-07-27 21:23:41,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=64361.333333333336, ans=0.125 +2024-07-27 21:23:46,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64374.666666666664, ans=0.1 +2024-07-27 21:23:46,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=64374.666666666664, ans=15.0 +2024-07-27 21:23:56,166 INFO [train.py:1114] (0/4) Epoch 5, batch 7400, loss[loss=0.2294, simple_loss=0.3139, pruned_loss=0.07244, over 4696.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3199, pruned_loss=0.08263, over 940404.51 frames. ], batch size: 13, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:23:59,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_na.min_abs, batch_count=64388.0, ans=0.02 +2024-07-27 21:24:03,415 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.84 vs. limit=15.0 +2024-07-27 21:24:07,867 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.01 vs. limit=15.0 +2024-07-27 21:24:11,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=64414.666666666664, ans=0.125 +2024-07-27 21:24:19,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=64428.0, ans=0.05 +2024-07-27 21:24:23,435 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.841e+01 6.697e+01 7.956e+01 9.233e+01 1.549e+02, threshold=1.591e+02, percent-clipped=1.0 +2024-07-27 21:24:29,312 INFO [train.py:1114] (0/4) Epoch 5, batch 7450, loss[loss=0.2343, simple_loss=0.304, pruned_loss=0.08233, over 4621.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3185, pruned_loss=0.08227, over 938157.32 frames. ], batch size: 11, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:24:39,951 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.16 vs. limit=15.0 +2024-07-27 21:24:40,526 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=18.33 vs. limit=15.0 +2024-07-27 21:25:03,948 INFO [train.py:1114] (0/4) Epoch 5, batch 7500, loss[loss=0.2927, simple_loss=0.3555, pruned_loss=0.1149, over 3361.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3191, pruned_loss=0.08272, over 936086.57 frames. ], batch size: 35, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:25:11,503 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.52 vs. limit=22.5 +2024-07-27 21:25:21,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=64548.0, ans=0.0 +2024-07-27 21:25:23,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=64548.0, ans=0.0 +2024-07-27 21:25:26,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64561.333333333336, ans=0.1 +2024-07-27 21:25:32,927 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.88 vs. limit=6.0 +2024-07-27 21:25:33,073 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.613e+01 6.039e+01 6.623e+01 7.552e+01 1.223e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-27 21:25:38,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=64574.666666666664, ans=0.0 +2024-07-27 21:25:39,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=64574.666666666664, ans=0.125 +2024-07-27 21:25:42,034 INFO [train.py:1114] (0/4) Epoch 5, batch 7550, loss[loss=0.2766, simple_loss=0.3607, pruned_loss=0.09629, over 4632.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3217, pruned_loss=0.08393, over 936238.13 frames. ], batch size: 17, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:25:46,439 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.92 vs. limit=15.0 +2024-07-27 21:25:55,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=64601.333333333336, ans=0.0 +2024-07-27 21:26:11,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=64628.0, ans=0.025 +2024-07-27 21:26:15,199 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.86 vs. limit=15.0 +2024-07-27 21:26:27,399 INFO [train.py:1114] (0/4) Epoch 5, batch 7600, loss[loss=0.2519, simple_loss=0.3335, pruned_loss=0.08514, over 4812.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3215, pruned_loss=0.08354, over 938005.06 frames. ], batch size: 14, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:26:37,213 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.49 vs. limit=15.0 +2024-07-27 21:26:38,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=64668.0, ans=0.0 +2024-07-27 21:26:43,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=64681.333333333336, ans=0.125 +2024-07-27 21:26:57,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=64694.666666666664, ans=0.125 +2024-07-27 21:26:57,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64694.666666666664, ans=0.1 +2024-07-27 21:27:00,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=64708.0, ans=0.0 +2024-07-27 21:27:02,474 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.190e+01 6.111e+01 6.673e+01 8.200e+01 1.239e+02, threshold=1.335e+02, percent-clipped=0.0 +2024-07-27 21:27:07,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=64721.333333333336, ans=0.2 +2024-07-27 21:27:07,740 INFO [train.py:1114] (0/4) Epoch 5, batch 7650, loss[loss=0.2346, simple_loss=0.2965, pruned_loss=0.08637, over 4937.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3211, pruned_loss=0.08388, over 937087.84 frames. ], batch size: 12, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:27:20,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=64734.666666666664, ans=0.125 +2024-07-27 21:27:24,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=64748.0, ans=0.125 +2024-07-27 21:27:25,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=64748.0, ans=0.0 +2024-07-27 21:27:35,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=64761.333333333336, ans=0.2 +2024-07-27 21:27:37,526 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.08 vs. limit=22.5 +2024-07-27 21:27:38,927 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=11.10 vs. limit=10.0 +2024-07-27 21:27:39,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=64774.666666666664, ans=0.025 +2024-07-27 21:27:43,709 INFO [train.py:1114] (0/4) Epoch 5, batch 7700, loss[loss=0.2092, simple_loss=0.3087, pruned_loss=0.05485, over 4694.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3228, pruned_loss=0.08425, over 934246.42 frames. ], batch size: 13, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:28:02,778 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:28:03,043 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.99 vs. limit=15.0 +2024-07-27 21:28:08,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=64814.666666666664, ans=0.5 +2024-07-27 21:28:14,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=64828.0, ans=0.0 +2024-07-27 21:28:15,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=64828.0, ans=0.125 +2024-07-27 21:28:17,884 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.64 vs. limit=15.0 +2024-07-27 21:28:20,007 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.750e+01 6.292e+01 7.097e+01 8.458e+01 1.099e+02, threshold=1.419e+02, percent-clipped=0.0 +2024-07-27 21:28:21,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=64841.333333333336, ans=0.125 +2024-07-27 21:28:25,211 INFO [train.py:1114] (0/4) Epoch 5, batch 7750, loss[loss=0.2528, simple_loss=0.3248, pruned_loss=0.09043, over 4935.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3234, pruned_loss=0.08436, over 935197.28 frames. ], batch size: 14, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:28:31,817 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.02 vs. limit=15.0 +2024-07-27 21:28:34,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64868.0, ans=0.1 +2024-07-27 21:28:34,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=64868.0, ans=0.125 +2024-07-27 21:28:38,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=64868.0, ans=0.0 +2024-07-27 21:28:49,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=64894.666666666664, ans=0.125 +2024-07-27 21:28:50,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=64894.666666666664, ans=0.125 +2024-07-27 21:28:54,224 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.46 vs. limit=6.0 +2024-07-27 21:29:02,998 INFO [train.py:1114] (0/4) Epoch 5, batch 7800, loss[loss=0.2266, simple_loss=0.3065, pruned_loss=0.07336, over 4661.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.323, pruned_loss=0.08428, over 936909.79 frames. ], batch size: 14, lr: 1.42e-02, grad_scale: 16.0 +2024-07-27 21:29:09,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=64934.666666666664, ans=0.125 +2024-07-27 21:29:20,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=64948.0, ans=0.125 +2024-07-27 21:29:30,977 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.317e+01 6.312e+01 7.129e+01 8.364e+01 1.154e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 21:29:33,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=64974.666666666664, ans=0.0 +2024-07-27 21:29:36,508 INFO [train.py:1114] (0/4) Epoch 5, batch 7850, loss[loss=0.2319, simple_loss=0.2956, pruned_loss=0.08408, over 4485.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3226, pruned_loss=0.08409, over 935611.52 frames. ], batch size: 10, lr: 1.42e-02, grad_scale: 16.0 +2024-07-27 21:29:37,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=64988.0, ans=0.125 +2024-07-27 21:29:38,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=64988.0, ans=0.0 +2024-07-27 21:30:08,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=65014.666666666664, ans=0.0 +2024-07-27 21:30:15,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.40 vs. limit=22.5 +2024-07-27 21:30:16,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=65041.333333333336, ans=0.125 +2024-07-27 21:30:18,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=65041.333333333336, ans=0.05 +2024-07-27 21:30:21,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=65041.333333333336, ans=0.125 +2024-07-27 21:30:22,624 INFO [train.py:1114] (0/4) Epoch 5, batch 7900, loss[loss=0.2305, simple_loss=0.3265, pruned_loss=0.06719, over 4873.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3235, pruned_loss=0.08441, over 932715.77 frames. ], batch size: 14, lr: 1.42e-02, grad_scale: 16.0 +2024-07-27 21:30:22,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=65054.666666666664, ans=0.0 +2024-07-27 21:30:24,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=65054.666666666664, ans=0.2 +2024-07-27 21:30:28,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=65054.666666666664, ans=0.125 +2024-07-27 21:30:31,357 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.50 vs. limit=15.0 +2024-07-27 21:30:48,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=65094.666666666664, ans=0.0 +2024-07-27 21:30:50,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=65094.666666666664, ans=0.0 +2024-07-27 21:30:55,205 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.330e+01 6.363e+01 7.228e+01 8.012e+01 1.089e+02, threshold=1.446e+02, percent-clipped=0.0 +2024-07-27 21:30:58,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=65108.0, ans=0.0 +2024-07-27 21:31:03,081 INFO [train.py:1114] (0/4) Epoch 5, batch 7950, loss[loss=0.2844, simple_loss=0.3552, pruned_loss=0.1068, over 3331.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3233, pruned_loss=0.08393, over 935029.10 frames. ], batch size: 35, lr: 1.42e-02, grad_scale: 16.0 +2024-07-27 21:31:06,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=65121.333333333336, ans=0.125 +2024-07-27 21:31:12,215 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:31:12,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=65134.666666666664, ans=0.125 +2024-07-27 21:31:14,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=65134.666666666664, ans=0.125 +2024-07-27 21:31:21,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=65148.0, ans=0.0 +2024-07-27 21:31:26,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=65148.0, ans=0.0 +2024-07-27 21:31:26,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=65148.0, ans=0.125 +2024-07-27 21:31:32,653 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.00 vs. limit=12.0 +2024-07-27 21:31:40,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=65174.666666666664, ans=0.125 +2024-07-27 21:31:43,112 INFO [train.py:1114] (0/4) Epoch 5, batch 8000, loss[loss=0.233, simple_loss=0.3015, pruned_loss=0.08228, over 4622.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3204, pruned_loss=0.0829, over 934143.66 frames. ], batch size: 11, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:31:44,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=65188.0, ans=0.0 +2024-07-27 21:31:45,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=65188.0, ans=0.0 +2024-07-27 21:31:55,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=65201.333333333336, ans=0.125 +2024-07-27 21:32:03,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=65214.666666666664, ans=0.1 +2024-07-27 21:32:09,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=65228.0, ans=0.07 +2024-07-27 21:32:11,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=65228.0, ans=0.125 +2024-07-27 21:32:17,001 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.802e+01 6.447e+01 7.209e+01 8.816e+01 1.330e+02, threshold=1.442e+02, percent-clipped=0.0 +2024-07-27 21:32:19,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=65241.333333333336, ans=0.125 +2024-07-27 21:32:23,332 INFO [train.py:1114] (0/4) Epoch 5, batch 8050, loss[loss=0.2385, simple_loss=0.316, pruned_loss=0.08046, over 4811.00 frames. ], tot_loss[loss=0.2429, simple_loss=0.32, pruned_loss=0.08292, over 934062.12 frames. ], batch size: 14, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:32:25,007 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.56 vs. limit=15.0 +2024-07-27 21:32:28,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=65254.666666666664, ans=0.95 +2024-07-27 21:32:31,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=65268.0, ans=0.0 +2024-07-27 21:32:49,875 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:33:02,306 INFO [train.py:1114] (0/4) Epoch 5, batch 8100, loss[loss=0.2996, simple_loss=0.3544, pruned_loss=0.1224, over 4798.00 frames. ], tot_loss[loss=0.245, simple_loss=0.3217, pruned_loss=0.08415, over 933859.92 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:33:07,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_na.min_abs, batch_count=65321.333333333336, ans=0.02 +2024-07-27 21:33:17,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=65348.0, ans=0.125 +2024-07-27 21:33:21,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=65361.333333333336, ans=0.1 +2024-07-27 21:33:23,300 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.34 vs. limit=6.0 +2024-07-27 21:33:24,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=65361.333333333336, ans=0.0 +2024-07-27 21:33:28,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=65374.666666666664, ans=0.125 +2024-07-27 21:33:30,393 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.006e+01 6.276e+01 6.776e+01 7.896e+01 1.142e+02, threshold=1.355e+02, percent-clipped=0.0 +2024-07-27 21:33:35,667 INFO [train.py:1114] (0/4) Epoch 5, batch 8150, loss[loss=0.2096, simple_loss=0.3129, pruned_loss=0.05322, over 4811.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3207, pruned_loss=0.08374, over 937507.40 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:33:37,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=65388.0, ans=0.0 +2024-07-27 21:33:39,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=65388.0, ans=0.0 +2024-07-27 21:33:49,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=65401.333333333336, ans=0.025 +2024-07-27 21:33:52,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=65414.666666666664, ans=0.0 +2024-07-27 21:33:53,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=65414.666666666664, ans=0.125 +2024-07-27 21:34:08,016 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.22 vs. limit=15.0 +2024-07-27 21:34:11,729 INFO [train.py:1114] (0/4) Epoch 5, batch 8200, loss[loss=0.233, simple_loss=0.3235, pruned_loss=0.07118, over 4806.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.322, pruned_loss=0.08378, over 938704.31 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:34:11,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=65454.666666666664, ans=0.2 +2024-07-27 21:34:13,931 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.30 vs. limit=15.0 +2024-07-27 21:34:17,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=65468.0, ans=0.125 +2024-07-27 21:34:25,335 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.43 vs. limit=10.0 +2024-07-27 21:34:38,384 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:34:38,485 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.33 vs. limit=15.0 +2024-07-27 21:34:45,224 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.34 vs. limit=22.5 +2024-07-27 21:34:53,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=65494.666666666664, ans=0.1 +2024-07-27 21:34:59,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=65508.0, ans=0.125 +2024-07-27 21:35:01,183 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.077e+01 6.058e+01 6.820e+01 7.758e+01 1.671e+02, threshold=1.364e+02, percent-clipped=1.0 +2024-07-27 21:35:06,471 INFO [train.py:1114] (0/4) Epoch 5, batch 8250, loss[loss=0.2518, simple_loss=0.3244, pruned_loss=0.08958, over 4892.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3215, pruned_loss=0.08318, over 939099.43 frames. ], batch size: 13, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:35:42,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=65561.33333333333, ans=0.125 +2024-07-27 21:35:44,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=65561.33333333333, ans=0.07 +2024-07-27 21:35:51,486 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.58 vs. limit=15.0 +2024-07-27 21:35:56,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=65574.66666666667, ans=10.0 +2024-07-27 21:36:00,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=65588.0, ans=0.125 +2024-07-27 21:36:00,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=65588.0, ans=10.0 +2024-07-27 21:36:00,922 INFO [train.py:1114] (0/4) Epoch 5, batch 8300, loss[loss=0.2568, simple_loss=0.3314, pruned_loss=0.09112, over 4901.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3218, pruned_loss=0.08315, over 938846.10 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:36:02,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=65588.0, ans=0.1 +2024-07-27 21:36:02,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=65588.0, ans=0.125 +2024-07-27 21:36:13,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=65601.33333333333, ans=0.0 +2024-07-27 21:36:18,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=65614.66666666667, ans=0.0 +2024-07-27 21:36:27,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=65628.0, ans=0.025 +2024-07-27 21:36:32,069 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.81 vs. limit=15.0 +2024-07-27 21:36:32,860 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.068e+01 6.223e+01 6.833e+01 7.614e+01 1.184e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-27 21:36:40,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=65641.33333333333, ans=0.125 +2024-07-27 21:36:44,506 INFO [train.py:1114] (0/4) Epoch 5, batch 8350, loss[loss=0.2463, simple_loss=0.3244, pruned_loss=0.08406, over 4798.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3203, pruned_loss=0.08238, over 941613.23 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:36:51,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=65668.0, ans=0.035 +2024-07-27 21:36:52,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=65668.0, ans=0.05 +2024-07-27 21:37:11,119 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.77 vs. limit=15.0 +2024-07-27 21:37:24,227 INFO [train.py:1114] (0/4) Epoch 5, batch 8400, loss[loss=0.2515, simple_loss=0.3175, pruned_loss=0.09276, over 4776.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3209, pruned_loss=0.0827, over 940271.39 frames. ], batch size: 12, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:37:30,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=65734.66666666667, ans=0.125 +2024-07-27 21:37:33,646 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.89 vs. limit=12.0 +2024-07-27 21:37:39,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=65748.0, ans=0.2 +2024-07-27 21:37:57,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=65761.33333333333, ans=0.125 +2024-07-27 21:37:58,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=65761.33333333333, ans=0.1 +2024-07-27 21:38:02,265 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.300e+01 6.390e+01 7.817e+01 9.094e+01 1.508e+02, threshold=1.563e+02, percent-clipped=1.0 +2024-07-27 21:38:07,397 INFO [train.py:1114] (0/4) Epoch 5, batch 8450, loss[loss=0.2451, simple_loss=0.3206, pruned_loss=0.08482, over 4800.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.3211, pruned_loss=0.08298, over 939204.67 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:38:10,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=65788.0, ans=0.0 +2024-07-27 21:38:18,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=65801.33333333333, ans=0.0 +2024-07-27 21:38:19,016 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.69 vs. limit=22.5 +2024-07-27 21:38:22,029 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.48 vs. limit=15.0 +2024-07-27 21:38:29,414 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.87 vs. limit=10.0 +2024-07-27 21:38:41,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=65841.33333333333, ans=0.125 +2024-07-27 21:38:43,064 INFO [train.py:1114] (0/4) Epoch 5, batch 8500, loss[loss=0.2162, simple_loss=0.2945, pruned_loss=0.06898, over 4622.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3208, pruned_loss=0.08273, over 938976.11 frames. ], batch size: 11, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:38:47,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=65854.66666666667, ans=0.0 +2024-07-27 21:38:47,687 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:38:53,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=65868.0, ans=0.2 +2024-07-27 21:38:58,861 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.23 vs. limit=5.0 +2024-07-27 21:39:03,810 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.82 vs. limit=12.0 +2024-07-27 21:39:14,114 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.478e+01 6.095e+01 6.601e+01 7.527e+01 1.077e+02, threshold=1.320e+02, percent-clipped=0.0 +2024-07-27 21:39:19,306 INFO [train.py:1114] (0/4) Epoch 5, batch 8550, loss[loss=0.2561, simple_loss=0.3322, pruned_loss=0.09005, over 4816.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.3192, pruned_loss=0.08208, over 940195.04 frames. ], batch size: 11, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:39:35,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=65948.0, ans=0.0 +2024-07-27 21:39:36,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=65948.0, ans=0.125 +2024-07-27 21:39:43,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=65961.33333333333, ans=0.2 +2024-07-27 21:39:53,178 INFO [train.py:1114] (0/4) Epoch 5, batch 8600, loss[loss=0.2433, simple_loss=0.3241, pruned_loss=0.08118, over 4807.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.319, pruned_loss=0.08206, over 939899.38 frames. ], batch size: 15, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:39:57,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=65988.0, ans=0.125 +2024-07-27 21:40:10,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=66014.66666666667, ans=0.025 +2024-07-27 21:40:11,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=66014.66666666667, ans=0.0 +2024-07-27 21:40:13,493 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.91 vs. limit=15.0 +2024-07-27 21:40:17,314 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.81 vs. limit=15.0 +2024-07-27 21:40:19,962 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.202e+01 6.597e+01 7.756e+01 9.469e+01 1.243e+02, threshold=1.551e+02, percent-clipped=0.0 +2024-07-27 21:40:25,141 INFO [train.py:1114] (0/4) Epoch 5, batch 8650, loss[loss=0.2473, simple_loss=0.3185, pruned_loss=0.08804, over 4896.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3196, pruned_loss=0.08244, over 941096.46 frames. ], batch size: 15, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:40:31,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66068.0, ans=0.1 +2024-07-27 21:40:34,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66068.0, ans=0.1 +2024-07-27 21:40:46,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=66094.66666666667, ans=0.0 +2024-07-27 21:40:46,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=66094.66666666667, ans=0.0 +2024-07-27 21:40:47,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66094.66666666667, ans=0.1 +2024-07-27 21:40:52,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=66108.0, ans=0.0 +2024-07-27 21:40:58,367 INFO [train.py:1114] (0/4) Epoch 5, batch 8700, loss[loss=0.2689, simple_loss=0.3424, pruned_loss=0.09771, over 4752.00 frames. ], tot_loss[loss=0.245, simple_loss=0.322, pruned_loss=0.08403, over 938397.07 frames. ], batch size: 13, lr: 1.41e-02, grad_scale: 16.0 +2024-07-27 21:41:08,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66134.66666666667, ans=0.1 +2024-07-27 21:41:10,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=66134.66666666667, ans=0.95 +2024-07-27 21:41:17,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=66148.0, ans=0.0 +2024-07-27 21:41:18,107 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.01 vs. limit=15.0 +2024-07-27 21:41:18,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=66161.33333333333, ans=0.125 +2024-07-27 21:41:23,358 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:41:24,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=66161.33333333333, ans=0.2 +2024-07-27 21:41:27,648 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.897e+01 6.098e+01 6.655e+01 7.583e+01 1.149e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-27 21:41:28,783 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.63 vs. limit=10.0 +2024-07-27 21:41:31,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66188.0, ans=0.1 +2024-07-27 21:41:31,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66188.0, ans=0.1 +2024-07-27 21:41:32,113 INFO [train.py:1114] (0/4) Epoch 5, batch 8750, loss[loss=0.2871, simple_loss=0.3671, pruned_loss=0.1036, over 4679.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3209, pruned_loss=0.08343, over 937041.93 frames. ], batch size: 15, lr: 1.41e-02, grad_scale: 16.0 +2024-07-27 21:41:32,857 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:41:32,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=66188.0, ans=0.125 +2024-07-27 21:41:37,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=66188.0, ans=0.125 +2024-07-27 21:41:38,575 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.53 vs. limit=10.0 +2024-07-27 21:41:49,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=66201.33333333333, ans=0.025 +2024-07-27 21:41:54,334 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.50 vs. limit=15.0 +2024-07-27 21:42:05,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=66241.33333333333, ans=0.125 +2024-07-27 21:42:13,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=66241.33333333333, ans=0.1 +2024-07-27 21:42:21,957 INFO [train.py:1114] (0/4) Epoch 5, batch 8800, loss[loss=0.2546, simple_loss=0.337, pruned_loss=0.08615, over 4926.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3216, pruned_loss=0.0833, over 937970.79 frames. ], batch size: 14, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:42:22,474 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.72 vs. limit=15.0 +2024-07-27 21:42:26,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=66254.66666666667, ans=0.125 +2024-07-27 21:42:36,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=66268.0, ans=0.2 +2024-07-27 21:42:37,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=66268.0, ans=0.0 +2024-07-27 21:42:47,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66294.66666666667, ans=0.1 +2024-07-27 21:42:49,299 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.71 vs. limit=15.0 +2024-07-27 21:42:53,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=66308.0, ans=0.125 +2024-07-27 21:42:54,233 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.166e+01 6.254e+01 7.129e+01 8.198e+01 1.307e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 21:42:57,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=66308.0, ans=0.2 +2024-07-27 21:42:58,861 INFO [train.py:1114] (0/4) Epoch 5, batch 8850, loss[loss=0.2427, simple_loss=0.3198, pruned_loss=0.08283, over 4480.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3227, pruned_loss=0.08421, over 932272.85 frames. ], batch size: 21, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:43:00,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=66321.33333333333, ans=0.0 +2024-07-27 21:43:02,287 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.00 vs. limit=12.0 +2024-07-27 21:43:04,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=66321.33333333333, ans=0.025 +2024-07-27 21:43:06,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=66334.66666666667, ans=0.125 +2024-07-27 21:43:11,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=66334.66666666667, ans=0.0 +2024-07-27 21:43:12,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=66348.0, ans=0.125 +2024-07-27 21:43:12,772 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.38 vs. limit=22.5 +2024-07-27 21:43:18,430 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.09 vs. limit=15.0 +2024-07-27 21:43:30,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=66374.66666666667, ans=0.0 +2024-07-27 21:43:33,890 INFO [train.py:1114] (0/4) Epoch 5, batch 8900, loss[loss=0.2409, simple_loss=0.3224, pruned_loss=0.07972, over 4930.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3224, pruned_loss=0.08372, over 930463.28 frames. ], batch size: 12, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:43:59,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=66428.0, ans=0.125 +2024-07-27 21:44:03,548 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 6.558e+01 7.585e+01 9.378e+01 1.606e+02, threshold=1.517e+02, percent-clipped=2.0 +2024-07-27 21:44:08,146 INFO [train.py:1114] (0/4) Epoch 5, batch 8950, loss[loss=0.2605, simple_loss=0.3498, pruned_loss=0.0856, over 4474.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.322, pruned_loss=0.08347, over 930751.25 frames. ], batch size: 21, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:44:17,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=66468.0, ans=0.0 +2024-07-27 21:44:19,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=66468.0, ans=0.125 +2024-07-27 21:44:20,803 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.27 vs. limit=22.5 +2024-07-27 21:44:25,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=66481.33333333333, ans=0.0 +2024-07-27 21:44:43,396 INFO [train.py:1114] (0/4) Epoch 5, batch 9000, loss[loss=0.1983, simple_loss=0.2767, pruned_loss=0.05994, over 4640.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.321, pruned_loss=0.08294, over 933646.77 frames. ], batch size: 12, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:44:43,397 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 21:44:52,373 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.5822, 4.1066, 4.2277, 3.8968, 3.6474, 3.8860, 4.3360, 3.7249], + device='cuda:0') +2024-07-27 21:44:55,830 INFO [train.py:1146] (0/4) Epoch 5, validation: loss=0.197, simple_loss=0.3006, pruned_loss=0.04666, over 944034.00 frames. +2024-07-27 21:44:55,831 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 21:44:57,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=66521.33333333333, ans=0.125 +2024-07-27 21:44:58,882 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.54 vs. limit=15.0 +2024-07-27 21:45:05,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=66534.66666666667, ans=0.125 +2024-07-27 21:45:05,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=66534.66666666667, ans=0.0 +2024-07-27 21:45:10,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=66548.0, ans=0.2 +2024-07-27 21:45:35,913 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.66 vs. limit=5.0 +2024-07-27 21:45:36,643 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.268e+01 6.311e+01 7.147e+01 8.276e+01 1.860e+02, threshold=1.429e+02, percent-clipped=1.0 +2024-07-27 21:45:41,882 INFO [train.py:1114] (0/4) Epoch 5, batch 9050, loss[loss=0.2631, simple_loss=0.3193, pruned_loss=0.1035, over 4512.00 frames. ], tot_loss[loss=0.242, simple_loss=0.3198, pruned_loss=0.08215, over 934205.62 frames. ], batch size: 10, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:45:48,251 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.28 vs. limit=22.5 +2024-07-27 21:45:52,552 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.27 vs. limit=22.5 +2024-07-27 21:45:54,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=66601.33333333333, ans=0.0 +2024-07-27 21:46:02,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=66614.66666666667, ans=0.125 +2024-07-27 21:46:03,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=66628.0, ans=0.0 +2024-07-27 21:46:03,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=66628.0, ans=0.125 +2024-07-27 21:46:11,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=66641.33333333333, ans=0.125 +2024-07-27 21:46:16,351 INFO [train.py:1114] (0/4) Epoch 5, batch 9100, loss[loss=0.2352, simple_loss=0.3123, pruned_loss=0.07909, over 4931.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3188, pruned_loss=0.08168, over 936934.19 frames. ], batch size: 14, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:46:17,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=66654.66666666667, ans=0.125 +2024-07-27 21:46:22,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=66668.0, ans=0.125 +2024-07-27 21:46:44,172 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.191e+01 6.256e+01 6.952e+01 8.323e+01 1.113e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-27 21:46:48,048 INFO [train.py:1114] (0/4) Epoch 5, batch 9150, loss[loss=0.2084, simple_loss=0.3044, pruned_loss=0.05625, over 4801.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3201, pruned_loss=0.0825, over 935950.94 frames. ], batch size: 14, lr: 1.41e-02, grad_scale: 16.0 +2024-07-27 21:46:51,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=66721.33333333333, ans=0.125 +2024-07-27 21:46:52,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=66721.33333333333, ans=0.125 +2024-07-27 21:46:57,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=66734.66666666667, ans=0.0 +2024-07-27 21:46:59,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=66734.66666666667, ans=0.125 +2024-07-27 21:47:04,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=66748.0, ans=0.0 +2024-07-27 21:47:08,908 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.45 vs. limit=6.0 +2024-07-27 21:47:17,877 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.68 vs. limit=15.0 +2024-07-27 21:47:18,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66774.66666666667, ans=0.1 +2024-07-27 21:47:22,558 INFO [train.py:1114] (0/4) Epoch 5, batch 9200, loss[loss=0.1839, simple_loss=0.2619, pruned_loss=0.05294, over 4867.00 frames. ], tot_loss[loss=0.24, simple_loss=0.3177, pruned_loss=0.08115, over 937767.15 frames. ], batch size: 12, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:47:31,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=66801.33333333333, ans=0.0 +2024-07-27 21:47:31,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=66801.33333333333, ans=0.0 +2024-07-27 21:47:39,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=66814.66666666667, ans=15.0 +2024-07-27 21:47:45,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=66828.0, ans=0.05 +2024-07-27 21:47:47,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=66828.0, ans=0.125 +2024-07-27 21:47:48,604 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-27 21:47:50,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=66841.33333333333, ans=0.0 +2024-07-27 21:47:50,770 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.899e+01 6.022e+01 6.976e+01 8.483e+01 1.676e+02, threshold=1.395e+02, percent-clipped=4.0 +2024-07-27 21:47:54,598 INFO [train.py:1114] (0/4) Epoch 5, batch 9250, loss[loss=0.2264, simple_loss=0.3128, pruned_loss=0.07002, over 4635.00 frames. ], tot_loss[loss=0.24, simple_loss=0.3177, pruned_loss=0.08118, over 938598.19 frames. ], batch size: 13, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:47:56,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=66854.66666666667, ans=0.125 +2024-07-27 21:47:59,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=66854.66666666667, ans=0.125 +2024-07-27 21:48:01,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66868.0, ans=0.1 +2024-07-27 21:48:01,290 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.38 vs. limit=15.0 +2024-07-27 21:48:09,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=66881.33333333333, ans=0.125 +2024-07-27 21:48:15,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=66894.66666666667, ans=0.125 +2024-07-27 21:48:20,774 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.35 vs. limit=15.0 +2024-07-27 21:48:26,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=66921.33333333333, ans=0.2 +2024-07-27 21:48:26,555 INFO [train.py:1114] (0/4) Epoch 5, batch 9300, loss[loss=0.2286, simple_loss=0.3006, pruned_loss=0.07837, over 4774.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3177, pruned_loss=0.08169, over 938474.94 frames. ], batch size: 12, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:48:33,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=66934.66666666667, ans=0.125 +2024-07-27 21:48:43,722 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.69 vs. limit=15.0 +2024-07-27 21:48:56,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=66974.66666666667, ans=0.125 +2024-07-27 21:48:59,169 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.113e+01 6.261e+01 7.073e+01 8.470e+01 1.590e+02, threshold=1.415e+02, percent-clipped=1.0 +2024-07-27 21:49:02,796 INFO [train.py:1114] (0/4) Epoch 5, batch 9350, loss[loss=0.2349, simple_loss=0.3137, pruned_loss=0.078, over 4805.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.3177, pruned_loss=0.08146, over 935486.46 frames. ], batch size: 11, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:49:04,977 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.15 vs. limit=15.0 +2024-07-27 21:49:09,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=67001.33333333333, ans=0.2 +2024-07-27 21:49:10,665 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.14 vs. limit=15.0 +2024-07-27 21:49:11,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=67001.33333333333, ans=0.125 +2024-07-27 21:49:21,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=67014.66666666667, ans=0.125 +2024-07-27 21:49:23,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=67028.0, ans=0.125 +2024-07-27 21:49:28,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=67041.33333333333, ans=0.025 +2024-07-27 21:49:32,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=67041.33333333333, ans=0.125 +2024-07-27 21:49:33,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=67041.33333333333, ans=0.2 +2024-07-27 21:49:35,453 INFO [train.py:1114] (0/4) Epoch 5, batch 9400, loss[loss=0.2686, simple_loss=0.3403, pruned_loss=0.09848, over 4685.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3181, pruned_loss=0.08209, over 933118.43 frames. ], batch size: 13, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:49:35,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=67054.66666666667, ans=0.95 +2024-07-27 21:49:37,545 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.92 vs. limit=15.0 +2024-07-27 21:49:47,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=67081.33333333333, ans=0.125 +2024-07-27 21:49:48,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=67081.33333333333, ans=0.2 +2024-07-27 21:49:49,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=67081.33333333333, ans=0.125 +2024-07-27 21:49:51,967 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-27 21:49:51,996 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.23 vs. limit=22.5 +2024-07-27 21:49:59,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=67094.66666666667, ans=0.0 +2024-07-27 21:50:03,179 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.976e+01 5.980e+01 6.533e+01 7.095e+01 1.005e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-27 21:50:03,537 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-27 21:50:07,151 INFO [train.py:1114] (0/4) Epoch 5, batch 9450, loss[loss=0.2432, simple_loss=0.3048, pruned_loss=0.09086, over 4794.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3181, pruned_loss=0.08169, over 932227.13 frames. ], batch size: 11, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:50:19,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=67148.0, ans=0.0 +2024-07-27 21:50:24,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=67148.0, ans=0.125 +2024-07-27 21:50:32,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=67174.66666666667, ans=0.07 +2024-07-27 21:50:37,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=67188.0, ans=0.125 +2024-07-27 21:50:39,298 INFO [train.py:1114] (0/4) Epoch 5, batch 9500, loss[loss=0.23, simple_loss=0.3153, pruned_loss=0.07229, over 4689.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3193, pruned_loss=0.0822, over 934647.62 frames. ], batch size: 12, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:50:40,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=67188.0, ans=0.125 +2024-07-27 21:50:42,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=67188.0, ans=0.2 +2024-07-27 21:50:42,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=67188.0, ans=0.0 +2024-07-27 21:50:45,762 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.76 vs. limit=15.0 +2024-07-27 21:50:49,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=67201.33333333333, ans=0.125 +2024-07-27 21:50:55,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=67214.66666666667, ans=0.0 +2024-07-27 21:50:57,056 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.78 vs. limit=15.0 +2024-07-27 21:50:59,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=67228.0, ans=0.0 +2024-07-27 21:51:08,214 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.271e+01 6.540e+01 7.330e+01 8.472e+01 1.165e+02, threshold=1.466e+02, percent-clipped=0.0 +2024-07-27 21:51:11,983 INFO [train.py:1114] (0/4) Epoch 5, batch 9550, loss[loss=0.2582, simple_loss=0.3242, pruned_loss=0.09607, over 4778.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3183, pruned_loss=0.08142, over 932076.45 frames. ], batch size: 12, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:51:36,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=67281.33333333333, ans=0.125 +2024-07-27 21:51:49,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=67294.66666666667, ans=0.125 +2024-07-27 21:51:57,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=67321.33333333333, ans=0.025 +2024-07-27 21:51:58,332 INFO [train.py:1114] (0/4) Epoch 5, batch 9600, loss[loss=0.374, simple_loss=0.4021, pruned_loss=0.173, over 3174.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.3191, pruned_loss=0.08162, over 930869.81 frames. ], batch size: 35, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:52:05,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=67321.33333333333, ans=0.0 +2024-07-27 21:52:08,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=67321.33333333333, ans=0.125 +2024-07-27 21:52:11,603 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.96 vs. limit=6.0 +2024-07-27 21:52:15,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=67334.66666666667, ans=0.125 +2024-07-27 21:52:18,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=67348.0, ans=0.125 +2024-07-27 21:52:21,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=67348.0, ans=0.015 +2024-07-27 21:52:24,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67361.33333333333, ans=0.1 +2024-07-27 21:52:27,974 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-27 21:52:32,044 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.609e+01 6.845e+01 7.921e+01 9.332e+01 1.441e+02, threshold=1.584e+02, percent-clipped=0.0 +2024-07-27 21:52:35,910 INFO [train.py:1114] (0/4) Epoch 5, batch 9650, loss[loss=0.2223, simple_loss=0.3234, pruned_loss=0.06056, over 4862.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3195, pruned_loss=0.08241, over 927152.58 frames. ], batch size: 16, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:52:37,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=67388.0, ans=0.0 +2024-07-27 21:52:37,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=67388.0, ans=0.125 +2024-07-27 21:52:39,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=67388.0, ans=0.0 +2024-07-27 21:52:41,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=67401.33333333333, ans=0.035 +2024-07-27 21:52:47,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=67401.33333333333, ans=0.0 +2024-07-27 21:52:47,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=67414.66666666667, ans=0.025 +2024-07-27 21:52:52,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=67414.66666666667, ans=0.2 +2024-07-27 21:53:00,049 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:53:05,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=67441.33333333333, ans=0.125 +2024-07-27 21:53:09,957 INFO [train.py:1114] (0/4) Epoch 5, batch 9700, loss[loss=0.2504, simple_loss=0.3323, pruned_loss=0.08426, over 4171.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.319, pruned_loss=0.08177, over 925427.58 frames. ], batch size: 25, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:53:17,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=67468.0, ans=0.025 +2024-07-27 21:53:20,028 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.05 vs. limit=15.0 +2024-07-27 21:53:22,292 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.49 vs. limit=15.0 +2024-07-27 21:53:26,767 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.46 vs. limit=15.0 +2024-07-27 21:53:28,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=67494.66666666667, ans=0.025 +2024-07-27 21:53:31,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=67494.66666666667, ans=0.125 +2024-07-27 21:53:37,139 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.756e+01 6.215e+01 7.059e+01 7.921e+01 1.151e+02, threshold=1.412e+02, percent-clipped=0.0 +2024-07-27 21:53:38,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=67508.0, ans=0.125 +2024-07-27 21:53:41,359 INFO [train.py:1114] (0/4) Epoch 5, batch 9750, loss[loss=0.2238, simple_loss=0.3093, pruned_loss=0.06921, over 4677.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.3191, pruned_loss=0.08204, over 925905.03 frames. ], batch size: 15, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:53:44,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=67521.33333333333, ans=0.2 +2024-07-27 21:53:49,900 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.46 vs. limit=10.0 +2024-07-27 21:54:01,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=67561.33333333333, ans=0.125 +2024-07-27 21:54:12,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=67574.66666666667, ans=0.125 +2024-07-27 21:54:13,386 INFO [train.py:1114] (0/4) Epoch 5, batch 9800, loss[loss=0.1978, simple_loss=0.2736, pruned_loss=0.06095, over 4708.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.3188, pruned_loss=0.08216, over 925120.80 frames. ], batch size: 12, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:54:16,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=67588.0, ans=0.0 +2024-07-27 21:54:28,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=67614.66666666667, ans=0.125 +2024-07-27 21:54:31,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=67614.66666666667, ans=0.125 +2024-07-27 21:54:38,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=67641.33333333333, ans=0.125 +2024-07-27 21:54:40,551 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.676e+01 6.437e+01 7.516e+01 8.874e+01 1.109e+02, threshold=1.503e+02, percent-clipped=0.0 +2024-07-27 21:54:41,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=67641.33333333333, ans=0.125 +2024-07-27 21:54:44,117 INFO [train.py:1114] (0/4) Epoch 5, batch 9850, loss[loss=0.2517, simple_loss=0.3241, pruned_loss=0.08959, over 4899.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3196, pruned_loss=0.08278, over 927413.08 frames. ], batch size: 15, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:54:47,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=67654.66666666667, ans=0.1 +2024-07-27 21:54:48,104 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.68 vs. limit=22.5 +2024-07-27 21:54:49,935 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.83 vs. limit=15.0 +2024-07-27 21:54:56,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67681.33333333333, ans=0.1 +2024-07-27 21:55:00,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=67681.33333333333, ans=10.0 +2024-07-27 21:55:15,333 INFO [train.py:1114] (0/4) Epoch 5, batch 9900, loss[loss=0.2416, simple_loss=0.3233, pruned_loss=0.07995, over 4842.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.3206, pruned_loss=0.08351, over 926394.24 frames. ], batch size: 16, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:55:18,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=67721.33333333333, ans=0.125 +2024-07-27 21:55:26,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=67734.66666666667, ans=0.125 +2024-07-27 21:55:38,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=67761.33333333333, ans=0.2 +2024-07-27 21:55:41,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=67774.66666666667, ans=0.0 +2024-07-27 21:55:43,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=67774.66666666667, ans=0.125 +2024-07-27 21:55:44,755 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.290e+01 6.621e+01 7.499e+01 8.431e+01 1.516e+02, threshold=1.500e+02, percent-clipped=1.0 +2024-07-27 21:55:47,767 INFO [train.py:1114] (0/4) Epoch 5, batch 9950, loss[loss=0.1898, simple_loss=0.2731, pruned_loss=0.05318, over 4804.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3198, pruned_loss=0.08262, over 929521.93 frames. ], batch size: 11, lr: 1.39e-02, grad_scale: 16.0 +2024-07-27 21:55:50,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=67788.0, ans=0.0 +2024-07-27 21:55:50,616 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.96 vs. limit=15.0 +2024-07-27 21:55:58,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=67801.33333333333, ans=0.2 +2024-07-27 21:56:15,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=67828.0, ans=0.0 +2024-07-27 21:56:18,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=67841.33333333333, ans=0.125 +2024-07-27 21:56:19,282 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.55 vs. limit=12.0 +2024-07-27 21:56:22,654 INFO [train.py:1114] (0/4) Epoch 5, batch 10000, loss[loss=0.2727, simple_loss=0.3336, pruned_loss=0.1059, over 4619.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.323, pruned_loss=0.08392, over 926502.97 frames. ], batch size: 16, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:56:26,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=67854.66666666667, ans=0.0 +2024-07-27 21:56:33,477 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.75 vs. limit=15.0 +2024-07-27 21:56:41,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=67881.33333333333, ans=0.125 +2024-07-27 21:56:42,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=67881.33333333333, ans=0.0 +2024-07-27 21:56:46,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=67894.66666666667, ans=10.0 +2024-07-27 21:56:53,470 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.246e+01 6.323e+01 6.858e+01 7.699e+01 1.357e+02, threshold=1.372e+02, percent-clipped=0.0 +2024-07-27 21:56:57,393 INFO [train.py:1114] (0/4) Epoch 5, batch 10050, loss[loss=0.309, simple_loss=0.3646, pruned_loss=0.1267, over 3575.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3282, pruned_loss=0.08732, over 915388.82 frames. ], batch size: 36, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:56:59,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=67921.33333333333, ans=0.0 +2024-07-27 21:57:02,753 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.18 vs. limit=15.0 +2024-07-27 21:57:09,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=67934.66666666667, ans=10.0 +2024-07-27 21:57:14,291 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.36 vs. limit=15.0 +2024-07-27 21:57:18,248 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.43 vs. limit=15.0 +2024-07-27 21:57:26,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=67974.66666666667, ans=0.125 +2024-07-27 21:57:30,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=67988.0, ans=0.125 +2024-07-27 21:57:31,364 INFO [train.py:1114] (0/4) Epoch 5, batch 10100, loss[loss=0.2705, simple_loss=0.3486, pruned_loss=0.09621, over 3808.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3354, pruned_loss=0.09514, over 863445.30 frames. ], batch size: 35, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:57:33,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=67988.0, ans=0.0 +2024-07-27 21:57:34,527 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.10 vs. limit=22.5 +2024-07-27 21:57:38,647 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.51 vs. limit=15.0 +2024-07-27 21:57:48,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=68014.66666666667, ans=10.0 +2024-07-27 21:57:55,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=68028.0, ans=0.125 +2024-07-27 21:58:00,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=68041.33333333333, ans=0.5 +2024-07-27 21:58:01,140 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.071e+01 6.809e+01 7.405e+01 8.060e+01 1.302e+02, threshold=1.481e+02, percent-clipped=0.0 +2024-07-27 21:58:02,305 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.34 vs. limit=15.0 +2024-07-27 21:58:04,318 INFO [train.py:1114] (0/4) Epoch 5, batch 10150, loss[loss=0.302, simple_loss=0.3599, pruned_loss=0.122, over 3070.00 frames. ], tot_loss[loss=0.27, simple_loss=0.3395, pruned_loss=0.1002, over 820124.92 frames. ], batch size: 35, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:58:05,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=68054.66666666667, ans=0.0 +2024-07-27 21:58:26,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=68094.66666666667, ans=0.125 +2024-07-27 21:58:35,562 INFO [train.py:1114] (0/4) Epoch 5, batch 10200, loss[loss=0.2811, simple_loss=0.3369, pruned_loss=0.1127, over 3487.00 frames. ], tot_loss[loss=0.277, simple_loss=0.3441, pruned_loss=0.1049, over 788285.80 frames. ], batch size: 35, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:58:38,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=68121.33333333333, ans=0.09899494936611666 +2024-07-27 21:58:39,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff2.min_abs, batch_count=68121.33333333333, ans=0.1 +2024-07-27 21:58:42,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=68134.66666666667, ans=0.0 +2024-07-27 21:58:45,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=68134.66666666667, ans=0.025 +2024-07-27 21:58:47,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=68148.0, ans=0.035 +2024-07-27 21:58:48,967 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-5.pt +2024-07-27 21:59:31,829 INFO [train.py:1114] (0/4) Epoch 6, batch 0, loss[loss=0.194, simple_loss=0.2849, pruned_loss=0.05155, over 4851.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2849, pruned_loss=0.05155, over 4851.00 frames. ], batch size: 12, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 21:59:31,829 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 21:59:43,348 INFO [train.py:1146] (0/4) Epoch 6, validation: loss=0.203, simple_loss=0.3084, pruned_loss=0.04884, over 944034.00 frames. +2024-07-27 21:59:43,349 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 21:59:51,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=68164.0, ans=0.0 +2024-07-27 21:59:58,962 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.074e+01 6.594e+01 7.055e+01 7.805e+01 1.292e+02, threshold=1.411e+02, percent-clipped=0.0 +2024-07-27 22:00:00,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=68177.33333333333, ans=10.0 +2024-07-27 22:00:02,795 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.80 vs. limit=10.0 +2024-07-27 22:00:05,490 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.94 vs. limit=15.0 +2024-07-27 22:00:11,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68204.0, ans=0.1 +2024-07-27 22:00:14,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=68204.0, ans=0.125 +2024-07-27 22:00:16,432 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.73 vs. limit=10.0 +2024-07-27 22:00:18,875 INFO [train.py:1114] (0/4) Epoch 6, batch 50, loss[loss=0.2323, simple_loss=0.2924, pruned_loss=0.08612, over 4620.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3219, pruned_loss=0.08438, over 206443.90 frames. ], batch size: 11, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:00:19,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=68217.33333333333, ans=0.125 +2024-07-27 22:00:24,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68217.33333333333, ans=0.1 +2024-07-27 22:00:25,830 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.83 vs. limit=15.0 +2024-07-27 22:00:31,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=68230.66666666667, ans=0.0 +2024-07-27 22:00:31,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=68230.66666666667, ans=0.0 +2024-07-27 22:00:43,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=68257.33333333333, ans=0.0 +2024-07-27 22:00:46,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=68270.66666666667, ans=0.5 +2024-07-27 22:00:48,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=68270.66666666667, ans=0.125 +2024-07-27 22:00:52,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=68284.0, ans=0.0 +2024-07-27 22:00:52,634 INFO [train.py:1114] (0/4) Epoch 6, batch 100, loss[loss=0.2025, simple_loss=0.2855, pruned_loss=0.05978, over 4647.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.3249, pruned_loss=0.08446, over 365343.77 frames. ], batch size: 12, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:01:09,851 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+01 6.212e+01 6.939e+01 8.250e+01 1.265e+02, threshold=1.388e+02, percent-clipped=0.0 +2024-07-27 22:01:25,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=68337.33333333333, ans=0.125 +2024-07-27 22:01:27,433 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.52 vs. limit=15.0 +2024-07-27 22:01:27,683 INFO [train.py:1114] (0/4) Epoch 6, batch 150, loss[loss=0.1872, simple_loss=0.278, pruned_loss=0.04817, over 4607.00 frames. ], tot_loss[loss=0.2417, simple_loss=0.3203, pruned_loss=0.0816, over 494176.86 frames. ], batch size: 11, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:01:46,551 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.86 vs. limit=15.0 +2024-07-27 22:02:05,170 INFO [train.py:1114] (0/4) Epoch 6, batch 200, loss[loss=0.2958, simple_loss=0.3656, pruned_loss=0.113, over 4575.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.319, pruned_loss=0.08112, over 593755.36 frames. ], batch size: 21, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:02:08,126 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.50 vs. limit=22.5 +2024-07-27 22:02:20,255 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.000e+01 6.270e+01 7.736e+01 9.618e+01 1.930e+02, threshold=1.547e+02, percent-clipped=5.0 +2024-07-27 22:02:23,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff2.min_abs, batch_count=68444.0, ans=0.1 +2024-07-27 22:02:32,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=68470.66666666667, ans=0.0 +2024-07-27 22:02:32,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=68470.66666666667, ans=0.0 +2024-07-27 22:02:38,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=68484.0, ans=0.125 +2024-07-27 22:02:38,692 INFO [train.py:1114] (0/4) Epoch 6, batch 250, loss[loss=0.2622, simple_loss=0.3433, pruned_loss=0.09049, over 4666.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3178, pruned_loss=0.08022, over 670425.81 frames. ], batch size: 16, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:02:42,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=68484.0, ans=0.125 +2024-07-27 22:02:56,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=68510.66666666667, ans=0.0 +2024-07-27 22:03:00,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=68524.0, ans=0.2 +2024-07-27 22:03:03,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=68524.0, ans=0.025 +2024-07-27 22:03:09,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=68537.33333333333, ans=0.125 +2024-07-27 22:03:10,664 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.64 vs. limit=22.5 +2024-07-27 22:03:12,115 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.95 vs. limit=12.0 +2024-07-27 22:03:14,323 INFO [train.py:1114] (0/4) Epoch 6, batch 300, loss[loss=0.3045, simple_loss=0.3741, pruned_loss=0.1175, over 4786.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3164, pruned_loss=0.07988, over 730338.40 frames. ], batch size: 15, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:03:14,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68550.66666666667, ans=0.1 +2024-07-27 22:03:29,802 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.873e+01 6.141e+01 6.927e+01 8.037e+01 1.226e+02, threshold=1.385e+02, percent-clipped=0.0 +2024-07-27 22:03:32,209 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.98 vs. limit=15.0 +2024-07-27 22:03:33,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=68577.33333333333, ans=0.0 +2024-07-27 22:03:37,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=68590.66666666667, ans=0.125 +2024-07-27 22:03:41,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=68604.0, ans=0.2 +2024-07-27 22:03:46,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68604.0, ans=0.1 +2024-07-27 22:03:49,905 INFO [train.py:1114] (0/4) Epoch 6, batch 350, loss[loss=0.231, simple_loss=0.3064, pruned_loss=0.07785, over 4949.00 frames. ], tot_loss[loss=0.2393, simple_loss=0.3179, pruned_loss=0.08042, over 776056.13 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:03:58,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=68630.66666666667, ans=0.0 +2024-07-27 22:04:07,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68644.0, ans=0.1 +2024-07-27 22:04:07,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=68644.0, ans=0.125 +2024-07-27 22:04:11,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=68657.33333333333, ans=0.0 +2024-07-27 22:04:13,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=68657.33333333333, ans=0.025 +2024-07-27 22:04:16,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=68670.66666666667, ans=0.125 +2024-07-27 22:04:23,112 INFO [train.py:1114] (0/4) Epoch 6, batch 400, loss[loss=0.2449, simple_loss=0.3222, pruned_loss=0.08381, over 4690.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.317, pruned_loss=0.0794, over 813722.22 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:04:28,733 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.13 vs. limit=15.0 +2024-07-27 22:04:33,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=68697.33333333333, ans=0.0 +2024-07-27 22:04:42,395 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.890e+01 6.252e+01 7.226e+01 8.425e+01 1.439e+02, threshold=1.445e+02, percent-clipped=1.0 +2024-07-27 22:04:43,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68710.66666666667, ans=0.1 +2024-07-27 22:05:00,653 INFO [train.py:1114] (0/4) Epoch 6, batch 450, loss[loss=0.2518, simple_loss=0.3298, pruned_loss=0.0869, over 4639.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3177, pruned_loss=0.07977, over 839406.77 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:05:14,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=68777.33333333333, ans=0.1 +2024-07-27 22:05:18,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=68777.33333333333, ans=0.2 +2024-07-27 22:05:28,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=68804.0, ans=0.125 +2024-07-27 22:05:33,936 INFO [train.py:1114] (0/4) Epoch 6, batch 500, loss[loss=0.2833, simple_loss=0.3681, pruned_loss=0.09926, over 4679.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.316, pruned_loss=0.07894, over 861827.24 frames. ], batch size: 15, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:05:46,927 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.12 vs. limit=15.0 +2024-07-27 22:05:51,020 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 6.191e+01 6.809e+01 7.735e+01 1.328e+02, threshold=1.362e+02, percent-clipped=0.0 +2024-07-27 22:06:03,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=68870.66666666667, ans=0.0 +2024-07-27 22:06:09,504 INFO [train.py:1114] (0/4) Epoch 6, batch 550, loss[loss=0.2486, simple_loss=0.3249, pruned_loss=0.08614, over 4663.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3163, pruned_loss=0.07892, over 877421.66 frames. ], batch size: 17, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:06:19,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=68897.33333333333, ans=10.0 +2024-07-27 22:06:43,641 INFO [train.py:1114] (0/4) Epoch 6, batch 600, loss[loss=0.2307, simple_loss=0.3181, pruned_loss=0.07169, over 4662.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3165, pruned_loss=0.07923, over 892100.44 frames. ], batch size: 16, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:06:49,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=68950.66666666667, ans=0.125 +2024-07-27 22:06:49,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=68950.66666666667, ans=0.125 +2024-07-27 22:06:50,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=68950.66666666667, ans=0.0 +2024-07-27 22:06:58,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=68977.33333333333, ans=0.125 +2024-07-27 22:06:58,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=68977.33333333333, ans=0.2 +2024-07-27 22:07:00,815 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.994e+01 6.226e+01 6.771e+01 7.767e+01 1.130e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-27 22:07:08,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=68990.66666666667, ans=0.5 +2024-07-27 22:07:12,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=69004.0, ans=0.125 +2024-07-27 22:07:18,981 INFO [train.py:1114] (0/4) Epoch 6, batch 650, loss[loss=0.2082, simple_loss=0.2953, pruned_loss=0.06054, over 4759.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3165, pruned_loss=0.07915, over 903732.32 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:07:21,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=69017.33333333333, ans=0.0 +2024-07-27 22:07:35,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=69044.0, ans=0.0 +2024-07-27 22:07:36,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=69044.0, ans=0.2 +2024-07-27 22:07:45,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=69070.66666666667, ans=0.2 +2024-07-27 22:07:48,307 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.91 vs. limit=22.5 +2024-07-27 22:07:50,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=69070.66666666667, ans=0.125 +2024-07-27 22:07:52,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.30 vs. limit=15.0 +2024-07-27 22:07:52,609 INFO [train.py:1114] (0/4) Epoch 6, batch 700, loss[loss=0.1787, simple_loss=0.2661, pruned_loss=0.0456, over 4638.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3161, pruned_loss=0.07877, over 911583.03 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:08:04,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69097.33333333333, ans=0.1 +2024-07-27 22:08:04,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=69097.33333333333, ans=0.125 +2024-07-27 22:08:07,879 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.953e+01 5.900e+01 6.634e+01 8.042e+01 1.194e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-27 22:08:10,931 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.66 vs. limit=15.0 +2024-07-27 22:08:14,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff3.min_abs, batch_count=69124.0, ans=0.2 +2024-07-27 22:08:15,613 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.19 vs. limit=10.0 +2024-07-27 22:08:19,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69137.33333333333, ans=0.1 +2024-07-27 22:08:24,964 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.73 vs. limit=10.0 +2024-07-27 22:08:27,977 INFO [train.py:1114] (0/4) Epoch 6, batch 750, loss[loss=0.2355, simple_loss=0.3198, pruned_loss=0.07562, over 4692.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3153, pruned_loss=0.07847, over 918252.40 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:08:50,937 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.87 vs. limit=12.0 +2024-07-27 22:08:51,514 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.38 vs. limit=15.0 +2024-07-27 22:09:00,970 INFO [train.py:1114] (0/4) Epoch 6, batch 800, loss[loss=0.2579, simple_loss=0.3125, pruned_loss=0.1017, over 4852.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3157, pruned_loss=0.07912, over 923147.91 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:09:03,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=69217.33333333333, ans=0.125 +2024-07-27 22:09:04,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=69217.33333333333, ans=10.0 +2024-07-27 22:09:15,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=69244.0, ans=0.125 +2024-07-27 22:09:18,018 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.094e+01 6.022e+01 6.607e+01 7.761e+01 1.209e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-27 22:09:20,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=69244.0, ans=0.125 +2024-07-27 22:09:37,855 INFO [train.py:1114] (0/4) Epoch 6, batch 850, loss[loss=0.211, simple_loss=0.2951, pruned_loss=0.06339, over 4660.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3156, pruned_loss=0.07916, over 927235.08 frames. ], batch size: 14, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:09:49,186 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.374e-01 +2024-07-27 22:10:00,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69324.0, ans=0.1 +2024-07-27 22:10:04,069 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-52000.pt +2024-07-27 22:10:07,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=69324.0, ans=0.0 +2024-07-27 22:10:09,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=69337.33333333333, ans=0.05 +2024-07-27 22:10:13,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=69337.33333333333, ans=0.0 +2024-07-27 22:10:14,323 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.32 vs. limit=15.0 +2024-07-27 22:10:15,124 INFO [train.py:1114] (0/4) Epoch 6, batch 900, loss[loss=0.1987, simple_loss=0.2865, pruned_loss=0.05543, over 4847.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.3162, pruned_loss=0.07952, over 927858.52 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:10:21,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69364.0, ans=0.1 +2024-07-27 22:10:27,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69364.0, ans=0.1 +2024-07-27 22:10:29,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=69377.33333333333, ans=0.125 +2024-07-27 22:10:30,385 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.456e+01 6.354e+01 7.008e+01 8.406e+01 1.301e+02, threshold=1.402e+02, percent-clipped=0.0 +2024-07-27 22:10:31,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=69377.33333333333, ans=0.5 +2024-07-27 22:10:43,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=69404.0, ans=0.125 +2024-07-27 22:10:48,698 INFO [train.py:1114] (0/4) Epoch 6, batch 950, loss[loss=0.2178, simple_loss=0.2963, pruned_loss=0.06963, over 4772.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.3164, pruned_loss=0.07938, over 930284.27 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:10:57,711 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.58 vs. limit=15.0 +2024-07-27 22:10:58,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=69430.66666666667, ans=0.05 +2024-07-27 22:11:02,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=69444.0, ans=0.0 +2024-07-27 22:11:03,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=69444.0, ans=0.0 +2024-07-27 22:11:04,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=69444.0, ans=0.2 +2024-07-27 22:11:07,699 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.10 vs. limit=15.0 +2024-07-27 22:11:13,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69457.33333333333, ans=0.1 +2024-07-27 22:11:13,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69457.33333333333, ans=0.1 +2024-07-27 22:11:19,592 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.89 vs. limit=15.0 +2024-07-27 22:11:21,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69470.66666666667, ans=0.1 +2024-07-27 22:11:23,636 INFO [train.py:1114] (0/4) Epoch 6, batch 1000, loss[loss=0.2291, simple_loss=0.2993, pruned_loss=0.07945, over 4955.00 frames. ], tot_loss[loss=0.2384, simple_loss=0.317, pruned_loss=0.07988, over 929708.44 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:11:30,733 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:11:35,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=69497.33333333333, ans=0.0 +2024-07-27 22:11:39,439 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.085e+01 6.209e+01 6.779e+01 8.145e+01 1.211e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 22:11:42,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69510.66666666667, ans=0.1 +2024-07-27 22:11:44,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.57 vs. limit=15.0 +2024-07-27 22:11:46,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=69524.0, ans=0.025 +2024-07-27 22:11:52,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=69537.33333333333, ans=0.125 +2024-07-27 22:11:55,854 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.70 vs. limit=12.0 +2024-07-27 22:11:57,382 INFO [train.py:1114] (0/4) Epoch 6, batch 1050, loss[loss=0.2411, simple_loss=0.3311, pruned_loss=0.07555, over 4870.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3164, pruned_loss=0.0799, over 931917.81 frames. ], batch size: 14, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:11:58,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=69550.66666666667, ans=0.125 +2024-07-27 22:11:58,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=69550.66666666667, ans=0.025 +2024-07-27 22:12:14,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=69577.33333333333, ans=0.0 +2024-07-27 22:12:22,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=69590.66666666667, ans=0.125 +2024-07-27 22:12:28,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=69604.0, ans=0.0 +2024-07-27 22:12:29,934 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.63 vs. limit=15.0 +2024-07-27 22:12:32,805 INFO [train.py:1114] (0/4) Epoch 6, batch 1100, loss[loss=0.2276, simple_loss=0.3102, pruned_loss=0.07246, over 4904.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.315, pruned_loss=0.07869, over 934280.25 frames. ], batch size: 13, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:12:35,856 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.08 vs. limit=22.5 +2024-07-27 22:12:39,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=69630.66666666667, ans=0.125 +2024-07-27 22:12:48,097 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.955e+01 5.872e+01 6.350e+01 6.961e+01 9.139e+01, threshold=1.270e+02, percent-clipped=0.0 +2024-07-27 22:12:53,791 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.92 vs. limit=15.0 +2024-07-27 22:12:55,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=69657.33333333333, ans=0.0 +2024-07-27 22:12:56,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=69657.33333333333, ans=10.0 +2024-07-27 22:13:05,920 INFO [train.py:1114] (0/4) Epoch 6, batch 1150, loss[loss=0.2495, simple_loss=0.3273, pruned_loss=0.08582, over 4892.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3149, pruned_loss=0.07817, over 933953.76 frames. ], batch size: 13, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:13:15,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=69697.33333333333, ans=0.025 +2024-07-27 22:13:47,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69737.33333333333, ans=0.1 +2024-07-27 22:13:50,299 INFO [train.py:1114] (0/4) Epoch 6, batch 1200, loss[loss=0.2288, simple_loss=0.3141, pruned_loss=0.07174, over 4875.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3153, pruned_loss=0.07889, over 932507.79 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:14:04,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=69764.0, ans=0.07 +2024-07-27 22:14:05,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=69777.33333333333, ans=0.125 +2024-07-27 22:14:07,407 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.905e+01 5.960e+01 6.565e+01 7.380e+01 1.067e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-27 22:14:10,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=69777.33333333333, ans=0.025 +2024-07-27 22:14:25,335 INFO [train.py:1114] (0/4) Epoch 6, batch 1250, loss[loss=0.2491, simple_loss=0.3425, pruned_loss=0.07785, over 4783.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3148, pruned_loss=0.07793, over 936847.93 frames. ], batch size: 15, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:14:28,891 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.59 vs. limit=15.0 +2024-07-27 22:14:41,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=69844.0, ans=0.1 +2024-07-27 22:14:58,340 INFO [train.py:1114] (0/4) Epoch 6, batch 1300, loss[loss=0.2426, simple_loss=0.3247, pruned_loss=0.0802, over 4693.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.314, pruned_loss=0.07808, over 938440.12 frames. ], batch size: 19, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:15:10,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=69897.33333333333, ans=0.0 +2024-07-27 22:15:15,338 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.914e+01 6.589e+01 7.357e+01 1.015e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-27 22:15:16,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=69910.66666666667, ans=0.125 +2024-07-27 22:15:17,471 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:15:18,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=69910.66666666667, ans=0.2 +2024-07-27 22:15:20,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=69924.0, ans=0.2 +2024-07-27 22:15:23,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=69924.0, ans=0.5 +2024-07-27 22:15:33,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69950.66666666667, ans=0.1 +2024-07-27 22:15:33,792 INFO [train.py:1114] (0/4) Epoch 6, batch 1350, loss[loss=0.2408, simple_loss=0.3305, pruned_loss=0.0756, over 4755.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3144, pruned_loss=0.07797, over 940885.58 frames. ], batch size: 13, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:15:38,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=69950.66666666667, ans=0.0 +2024-07-27 22:15:41,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=69964.0, ans=0.0 +2024-07-27 22:15:44,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69964.0, ans=0.1 +2024-07-27 22:15:46,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=69964.0, ans=0.125 +2024-07-27 22:15:48,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=69977.33333333333, ans=0.125 +2024-07-27 22:15:56,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=69990.66666666667, ans=0.1 +2024-07-27 22:15:57,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=69990.66666666667, ans=0.125 +2024-07-27 22:16:04,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=70004.0, ans=0.0 +2024-07-27 22:16:07,350 INFO [train.py:1114] (0/4) Epoch 6, batch 1400, loss[loss=0.1824, simple_loss=0.2503, pruned_loss=0.05731, over 4708.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3151, pruned_loss=0.07875, over 942492.09 frames. ], batch size: 11, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:16:17,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=70030.66666666667, ans=0.0 +2024-07-27 22:16:22,914 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.942e+01 6.116e+01 6.900e+01 7.787e+01 1.307e+02, threshold=1.380e+02, percent-clipped=0.0 +2024-07-27 22:16:23,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=70044.0, ans=0.125 +2024-07-27 22:17:18,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=70057.33333333333, ans=0.025 +2024-07-27 22:17:25,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=70070.66666666667, ans=0.05 +2024-07-27 22:17:26,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=70070.66666666667, ans=0.125 +2024-07-27 22:17:29,168 INFO [train.py:1114] (0/4) Epoch 6, batch 1450, loss[loss=0.2559, simple_loss=0.3379, pruned_loss=0.08695, over 4689.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.316, pruned_loss=0.07868, over 942176.49 frames. ], batch size: 15, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:17:39,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=70097.33333333333, ans=0.125 +2024-07-27 22:17:51,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=70124.0, ans=0.05 +2024-07-27 22:18:02,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=70137.33333333333, ans=0.04949747468305833 +2024-07-27 22:18:04,324 INFO [train.py:1114] (0/4) Epoch 6, batch 1500, loss[loss=0.2237, simple_loss=0.3202, pruned_loss=0.06354, over 4819.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3165, pruned_loss=0.07918, over 941959.49 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:18:11,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=70164.0, ans=0.0 +2024-07-27 22:18:12,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=70164.0, ans=0.125 +2024-07-27 22:18:20,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=70177.33333333333, ans=0.025 +2024-07-27 22:18:20,389 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.729e+01 5.883e+01 6.851e+01 7.584e+01 1.194e+02, threshold=1.370e+02, percent-clipped=0.0 +2024-07-27 22:18:23,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=70177.33333333333, ans=0.1 +2024-07-27 22:18:24,251 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.26 vs. limit=22.5 +2024-07-27 22:18:29,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=70190.66666666667, ans=0.025 +2024-07-27 22:18:37,110 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.19 vs. limit=22.5 +2024-07-27 22:18:40,401 INFO [train.py:1114] (0/4) Epoch 6, batch 1550, loss[loss=0.2026, simple_loss=0.3031, pruned_loss=0.05104, over 4910.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.3176, pruned_loss=0.07989, over 937961.22 frames. ], batch size: 15, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:18:55,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=70244.0, ans=0.125 +2024-07-27 22:19:02,723 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.11 vs. limit=10.0 +2024-07-27 22:19:10,189 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.60 vs. limit=22.5 +2024-07-27 22:19:11,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=70270.66666666667, ans=0.0 +2024-07-27 22:19:13,505 INFO [train.py:1114] (0/4) Epoch 6, batch 1600, loss[loss=0.2382, simple_loss=0.3246, pruned_loss=0.0759, over 4870.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.3173, pruned_loss=0.07982, over 936167.41 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:19:15,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=70284.0, ans=0.125 +2024-07-27 22:19:18,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=70284.0, ans=0.0 +2024-07-27 22:19:21,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=70284.0, ans=0.125 +2024-07-27 22:19:31,161 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.952e+01 6.615e+01 7.870e+01 9.186e+01 1.944e+02, threshold=1.574e+02, percent-clipped=2.0 +2024-07-27 22:19:33,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=70310.66666666667, ans=0.125 +2024-07-27 22:19:40,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=70324.0, ans=0.0 +2024-07-27 22:19:40,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=70324.0, ans=0.1 +2024-07-27 22:19:41,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=70324.0, ans=0.0 +2024-07-27 22:19:49,401 INFO [train.py:1114] (0/4) Epoch 6, batch 1650, loss[loss=0.2671, simple_loss=0.3531, pruned_loss=0.09059, over 4668.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3168, pruned_loss=0.07954, over 936402.45 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:19:55,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70364.0, ans=0.1 +2024-07-27 22:20:08,699 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.06 vs. limit=15.0 +2024-07-27 22:20:16,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=70390.66666666667, ans=0.125 +2024-07-27 22:20:24,473 INFO [train.py:1114] (0/4) Epoch 6, batch 1700, loss[loss=0.1893, simple_loss=0.2643, pruned_loss=0.05717, over 4705.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.3152, pruned_loss=0.07856, over 938271.46 frames. ], batch size: 11, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:20:26,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=70417.33333333333, ans=0.0 +2024-07-27 22:20:29,484 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.09 vs. limit=22.5 +2024-07-27 22:20:33,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=70430.66666666667, ans=0.125 +2024-07-27 22:20:39,546 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.065e+01 6.237e+01 7.615e+01 9.161e+01 1.409e+02, threshold=1.523e+02, percent-clipped=0.0 +2024-07-27 22:20:41,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=70444.0, ans=0.0 +2024-07-27 22:20:46,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=70457.33333333333, ans=0.125 +2024-07-27 22:20:49,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=70457.33333333333, ans=0.09899494936611666 +2024-07-27 22:20:50,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=70457.33333333333, ans=15.0 +2024-07-27 22:20:51,892 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.74 vs. limit=10.0 +2024-07-27 22:20:54,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=70470.66666666667, ans=0.125 +2024-07-27 22:20:58,123 INFO [train.py:1114] (0/4) Epoch 6, batch 1750, loss[loss=0.1929, simple_loss=0.2632, pruned_loss=0.0613, over 4806.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3138, pruned_loss=0.07798, over 939796.99 frames. ], batch size: 11, lr: 1.28e-02, grad_scale: 64.0 +2024-07-27 22:21:20,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=70510.66666666667, ans=0.07 +2024-07-27 22:21:38,394 INFO [train.py:1114] (0/4) Epoch 6, batch 1800, loss[loss=0.2387, simple_loss=0.3166, pruned_loss=0.08044, over 4644.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3133, pruned_loss=0.07801, over 940270.13 frames. ], batch size: 13, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:21:38,849 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.30 vs. limit=22.5 +2024-07-27 22:21:39,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=70550.66666666667, ans=15.0 +2024-07-27 22:21:42,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=70550.66666666667, ans=0.0 +2024-07-27 22:21:44,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=70550.66666666667, ans=0.2 +2024-07-27 22:21:54,696 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+01 6.220e+01 7.110e+01 8.756e+01 1.676e+02, threshold=1.422e+02, percent-clipped=1.0 +2024-07-27 22:21:58,466 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-07-27 22:22:00,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=70590.66666666667, ans=0.05 +2024-07-27 22:22:12,103 INFO [train.py:1114] (0/4) Epoch 6, batch 1850, loss[loss=0.2507, simple_loss=0.323, pruned_loss=0.08919, over 4814.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3121, pruned_loss=0.07752, over 940231.95 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:22:14,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=70617.33333333333, ans=0.125 +2024-07-27 22:22:16,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=70617.33333333333, ans=0.0 +2024-07-27 22:22:22,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=70630.66666666667, ans=0.0 +2024-07-27 22:22:25,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=70644.0, ans=0.125 +2024-07-27 22:22:29,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=70644.0, ans=15.0 +2024-07-27 22:22:35,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=70657.33333333333, ans=0.125 +2024-07-27 22:22:41,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=70670.66666666667, ans=0.2 +2024-07-27 22:22:45,974 INFO [train.py:1114] (0/4) Epoch 6, batch 1900, loss[loss=0.2518, simple_loss=0.3471, pruned_loss=0.07828, over 4661.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3137, pruned_loss=0.0776, over 941409.21 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:22:49,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=70684.0, ans=0.04949747468305833 +2024-07-27 22:22:49,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=70684.0, ans=0.2 +2024-07-27 22:22:57,733 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.58 vs. limit=22.5 +2024-07-27 22:23:01,916 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 6.167e+01 7.357e+01 8.960e+01 1.368e+02, threshold=1.471e+02, percent-clipped=0.0 +2024-07-27 22:23:19,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=70737.33333333333, ans=0.125 +2024-07-27 22:23:23,084 INFO [train.py:1114] (0/4) Epoch 6, batch 1950, loss[loss=0.2367, simple_loss=0.3143, pruned_loss=0.07956, over 4884.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3156, pruned_loss=0.0782, over 943310.18 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:23:24,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=70750.66666666667, ans=0.125 +2024-07-27 22:23:26,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=70750.66666666667, ans=0.125 +2024-07-27 22:23:30,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=70764.0, ans=0.0 +2024-07-27 22:23:30,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=70764.0, ans=0.0 +2024-07-27 22:23:30,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=70764.0, ans=0.0 +2024-07-27 22:23:33,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=70764.0, ans=0.125 +2024-07-27 22:23:41,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=70777.33333333333, ans=0.125 +2024-07-27 22:23:56,794 INFO [train.py:1114] (0/4) Epoch 6, batch 2000, loss[loss=0.1966, simple_loss=0.2574, pruned_loss=0.06795, over 4811.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3154, pruned_loss=0.07806, over 940523.05 frames. ], batch size: 11, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:23:56,979 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:23:57,244 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.83 vs. limit=22.5 +2024-07-27 22:24:04,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=70830.66666666667, ans=0.0 +2024-07-27 22:24:15,302 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 6.102e+01 6.803e+01 8.517e+01 1.833e+02, threshold=1.361e+02, percent-clipped=3.0 +2024-07-27 22:24:25,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=70857.33333333333, ans=0.2 +2024-07-27 22:24:28,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=70870.66666666667, ans=0.0 +2024-07-27 22:24:32,966 INFO [train.py:1114] (0/4) Epoch 6, batch 2050, loss[loss=0.1795, simple_loss=0.2534, pruned_loss=0.05282, over 4600.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3144, pruned_loss=0.0777, over 938726.29 frames. ], batch size: 11, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:24:42,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=70897.33333333333, ans=0.0 +2024-07-27 22:25:07,041 INFO [train.py:1114] (0/4) Epoch 6, batch 2100, loss[loss=0.2139, simple_loss=0.2897, pruned_loss=0.06905, over 4757.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3144, pruned_loss=0.07747, over 940619.18 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:25:07,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=70950.66666666667, ans=0.125 +2024-07-27 22:25:07,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=70950.66666666667, ans=0.2 +2024-07-27 22:25:12,806 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.80 vs. limit=15.0 +2024-07-27 22:25:14,717 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.03 vs. limit=22.5 +2024-07-27 22:25:17,037 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.79 vs. limit=15.0 +2024-07-27 22:25:18,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=70964.0, ans=0.2 +2024-07-27 22:25:23,160 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.747e+01 6.024e+01 6.945e+01 8.681e+01 1.626e+02, threshold=1.389e+02, percent-clipped=3.0 +2024-07-27 22:25:23,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=70977.33333333333, ans=0.0 +2024-07-27 22:25:31,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=70990.66666666667, ans=0.0 +2024-07-27 22:25:40,272 INFO [train.py:1114] (0/4) Epoch 6, batch 2150, loss[loss=0.221, simple_loss=0.3058, pruned_loss=0.06814, over 4904.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3128, pruned_loss=0.07696, over 943843.09 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:25:49,291 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.02 vs. limit=12.0 +2024-07-27 22:25:56,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=71044.0, ans=0.0 +2024-07-27 22:26:00,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=71044.0, ans=0.025 +2024-07-27 22:26:05,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=71057.33333333333, ans=0.2 +2024-07-27 22:26:06,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=71057.33333333333, ans=0.0 +2024-07-27 22:26:14,877 INFO [train.py:1114] (0/4) Epoch 6, batch 2200, loss[loss=0.2633, simple_loss=0.3356, pruned_loss=0.09552, over 4806.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3117, pruned_loss=0.07637, over 943516.66 frames. ], batch size: 14, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:26:19,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=71084.0, ans=0.125 +2024-07-27 22:26:22,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=71097.33333333333, ans=0.2 +2024-07-27 22:26:22,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=71097.33333333333, ans=0.2 +2024-07-27 22:26:29,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=71110.66666666667, ans=0.0 +2024-07-27 22:26:29,779 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:26:30,849 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.032e+01 5.977e+01 6.533e+01 7.474e+01 1.096e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-27 22:26:41,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=71137.33333333333, ans=0.07 +2024-07-27 22:26:42,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=71137.33333333333, ans=0.125 +2024-07-27 22:26:45,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71137.33333333333, ans=0.1 +2024-07-27 22:26:47,963 INFO [train.py:1114] (0/4) Epoch 6, batch 2250, loss[loss=0.2263, simple_loss=0.3062, pruned_loss=0.07325, over 4699.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3119, pruned_loss=0.0764, over 941950.26 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:26:50,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=71150.66666666667, ans=0.025 +2024-07-27 22:26:57,659 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.19 vs. limit=15.0 +2024-07-27 22:27:02,327 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.88 vs. limit=22.5 +2024-07-27 22:27:06,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=71177.33333333333, ans=0.1 +2024-07-27 22:27:06,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=71177.33333333333, ans=0.0 +2024-07-27 22:27:24,049 INFO [train.py:1114] (0/4) Epoch 6, batch 2300, loss[loss=0.223, simple_loss=0.2904, pruned_loss=0.07783, over 4937.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.311, pruned_loss=0.07637, over 939439.00 frames. ], batch size: 12, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:27:29,046 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.17 vs. limit=15.0 +2024-07-27 22:27:39,872 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.011e+01 6.045e+01 6.668e+01 7.674e+01 1.080e+02, threshold=1.334e+02, percent-clipped=0.0 +2024-07-27 22:27:52,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=71270.66666666667, ans=0.0 +2024-07-27 22:27:56,964 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.77 vs. limit=15.0 +2024-07-27 22:27:57,316 INFO [train.py:1114] (0/4) Epoch 6, batch 2350, loss[loss=0.2201, simple_loss=0.3016, pruned_loss=0.0693, over 4630.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3109, pruned_loss=0.07583, over 941516.61 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:28:00,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=71284.0, ans=0.125 +2024-07-27 22:28:02,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=71284.0, ans=0.025 +2024-07-27 22:28:20,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=71324.0, ans=0.125 +2024-07-27 22:28:23,716 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.75 vs. limit=22.5 +2024-07-27 22:28:29,012 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.78 vs. limit=10.0 +2024-07-27 22:28:30,534 INFO [train.py:1114] (0/4) Epoch 6, batch 2400, loss[loss=0.1918, simple_loss=0.2747, pruned_loss=0.05451, over 4638.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3134, pruned_loss=0.07743, over 940880.39 frames. ], batch size: 12, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:28:30,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=71350.66666666667, ans=0.025 +2024-07-27 22:28:39,038 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.00 vs. limit=15.0 +2024-07-27 22:28:48,416 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.400e+01 6.375e+01 7.289e+01 8.298e+01 1.037e+02, threshold=1.458e+02, percent-clipped=0.0 +2024-07-27 22:28:57,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=71390.66666666667, ans=0.125 +2024-07-27 22:29:05,475 INFO [train.py:1114] (0/4) Epoch 6, batch 2450, loss[loss=0.268, simple_loss=0.3513, pruned_loss=0.09237, over 4695.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3151, pruned_loss=0.07836, over 936517.89 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:29:05,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=71417.33333333333, ans=0.125 +2024-07-27 22:29:06,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=71417.33333333333, ans=0.0 +2024-07-27 22:29:13,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=71430.66666666667, ans=0.0 +2024-07-27 22:29:20,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=71444.0, ans=0.125 +2024-07-27 22:29:30,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=71457.33333333333, ans=0.125 +2024-07-27 22:29:30,845 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.49 vs. limit=10.0 +2024-07-27 22:29:37,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71470.66666666667, ans=0.1 +2024-07-27 22:29:40,987 INFO [train.py:1114] (0/4) Epoch 6, batch 2500, loss[loss=0.2061, simple_loss=0.2929, pruned_loss=0.05965, over 4808.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3141, pruned_loss=0.0772, over 938773.23 frames. ], batch size: 14, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:29:43,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=71484.0, ans=0.125 +2024-07-27 22:29:43,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=71484.0, ans=0.0 +2024-07-27 22:29:56,879 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.024e+01 6.200e+01 6.677e+01 7.747e+01 1.498e+02, threshold=1.335e+02, percent-clipped=1.0 +2024-07-27 22:30:07,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=71537.33333333333, ans=0.2 +2024-07-27 22:30:14,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=71550.66666666667, ans=0.125 +2024-07-27 22:30:14,581 INFO [train.py:1114] (0/4) Epoch 6, batch 2550, loss[loss=0.2031, simple_loss=0.2885, pruned_loss=0.05887, over 4808.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3144, pruned_loss=0.07715, over 938108.52 frames. ], batch size: 11, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:30:18,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=71550.66666666667, ans=22.5 +2024-07-27 22:30:27,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=71577.33333333333, ans=0.125 +2024-07-27 22:30:35,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71590.66666666667, ans=0.1 +2024-07-27 22:30:47,949 INFO [train.py:1114] (0/4) Epoch 6, batch 2600, loss[loss=0.2133, simple_loss=0.3082, pruned_loss=0.05924, over 4904.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3152, pruned_loss=0.07765, over 937236.89 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:31:03,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=71644.0, ans=0.125 +2024-07-27 22:31:05,411 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.818e+01 6.119e+01 7.086e+01 8.200e+01 1.372e+02, threshold=1.417e+02, percent-clipped=1.0 +2024-07-27 22:31:07,699 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.92 vs. limit=15.0 +2024-07-27 22:31:09,877 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.97 vs. limit=10.0 +2024-07-27 22:31:11,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=71657.33333333333, ans=0.125 +2024-07-27 22:31:21,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=71670.66666666667, ans=0.025 +2024-07-27 22:31:22,666 INFO [train.py:1114] (0/4) Epoch 6, batch 2650, loss[loss=0.2453, simple_loss=0.32, pruned_loss=0.08527, over 4622.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3149, pruned_loss=0.0773, over 939504.18 frames. ], batch size: 16, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:31:24,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=71684.0, ans=0.0 +2024-07-27 22:31:30,281 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.39 vs. limit=12.0 +2024-07-27 22:31:30,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=71697.33333333333, ans=0.2 +2024-07-27 22:31:40,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.77 vs. limit=22.5 +2024-07-27 22:31:50,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=71737.33333333333, ans=0.0 +2024-07-27 22:31:56,617 INFO [train.py:1114] (0/4) Epoch 6, batch 2700, loss[loss=0.2623, simple_loss=0.3478, pruned_loss=0.08844, over 4740.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3149, pruned_loss=0.07742, over 939127.14 frames. ], batch size: 14, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:31:57,726 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.05 vs. limit=15.0 +2024-07-27 22:32:06,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=71764.0, ans=0.125 +2024-07-27 22:32:12,951 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.944e+01 6.253e+01 7.142e+01 8.501e+01 1.377e+02, threshold=1.428e+02, percent-clipped=0.0 +2024-07-27 22:32:15,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=71790.66666666667, ans=0.125 +2024-07-27 22:32:18,074 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.80 vs. limit=15.0 +2024-07-27 22:32:23,714 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.01 vs. limit=22.5 +2024-07-27 22:32:31,805 INFO [train.py:1114] (0/4) Epoch 6, batch 2750, loss[loss=0.2335, simple_loss=0.3083, pruned_loss=0.07932, over 4714.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3142, pruned_loss=0.07778, over 939246.99 frames. ], batch size: 12, lr: 1.27e-02, grad_scale: 16.0 +2024-07-27 22:32:37,506 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.47 vs. limit=22.5 +2024-07-27 22:32:47,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=71844.0, ans=0.125 +2024-07-27 22:32:49,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=71844.0, ans=0.0 +2024-07-27 22:32:52,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=71857.33333333333, ans=0.04949747468305833 +2024-07-27 22:33:05,115 INFO [train.py:1114] (0/4) Epoch 6, batch 2800, loss[loss=0.3231, simple_loss=0.3654, pruned_loss=0.1404, over 3463.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.3148, pruned_loss=0.07909, over 937110.49 frames. ], batch size: 35, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:33:06,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=71884.0, ans=0.0 +2024-07-27 22:33:17,053 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.95 vs. limit=22.5 +2024-07-27 22:33:22,311 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.117e+01 6.346e+01 7.274e+01 8.194e+01 1.245e+02, threshold=1.455e+02, percent-clipped=0.0 +2024-07-27 22:33:32,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=71937.33333333333, ans=0.0 +2024-07-27 22:33:38,921 INFO [train.py:1114] (0/4) Epoch 6, batch 2850, loss[loss=0.2121, simple_loss=0.2867, pruned_loss=0.06874, over 4965.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.315, pruned_loss=0.07858, over 935853.49 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:33:45,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=71964.0, ans=0.07 +2024-07-27 22:33:47,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=71964.0, ans=0.0 +2024-07-27 22:33:52,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=71977.33333333333, ans=0.125 +2024-07-27 22:33:52,810 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.18 vs. limit=22.5 +2024-07-27 22:33:59,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=71990.66666666667, ans=10.0 +2024-07-27 22:34:06,519 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.72 vs. limit=15.0 +2024-07-27 22:34:13,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=72017.33333333333, ans=0.0 +2024-07-27 22:34:14,078 INFO [train.py:1114] (0/4) Epoch 6, batch 2900, loss[loss=0.2183, simple_loss=0.3055, pruned_loss=0.0656, over 4821.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.314, pruned_loss=0.07746, over 939713.13 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:34:19,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=72017.33333333333, ans=0.125 +2024-07-27 22:34:31,191 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.576e+01 5.900e+01 6.392e+01 7.089e+01 1.311e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-27 22:34:32,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=72044.0, ans=0.2 +2024-07-27 22:34:34,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=72057.33333333333, ans=0.0 +2024-07-27 22:34:39,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=72057.33333333333, ans=0.125 +2024-07-27 22:34:42,982 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.89 vs. limit=10.0 +2024-07-27 22:34:44,575 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:34:44,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff3.min_abs, batch_count=72070.66666666667, ans=0.2 +2024-07-27 22:34:47,755 INFO [train.py:1114] (0/4) Epoch 6, batch 2950, loss[loss=0.2236, simple_loss=0.3019, pruned_loss=0.07264, over 4708.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.313, pruned_loss=0.07727, over 938854.22 frames. ], batch size: 12, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:34:48,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.whiten.whitening_limit, batch_count=72084.0, ans=15.0 +2024-07-27 22:34:57,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=72097.33333333333, ans=0.0 +2024-07-27 22:35:05,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=72110.66666666667, ans=0.125 +2024-07-27 22:35:07,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=72110.66666666667, ans=0.2 +2024-07-27 22:35:09,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=72124.0, ans=0.125 +2024-07-27 22:35:10,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=72124.0, ans=0.0 +2024-07-27 22:35:10,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=72124.0, ans=0.125 +2024-07-27 22:35:13,603 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.60 vs. limit=10.0 +2024-07-27 22:35:23,020 INFO [train.py:1114] (0/4) Epoch 6, batch 3000, loss[loss=0.22, simple_loss=0.3026, pruned_loss=0.06866, over 4752.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3122, pruned_loss=0.07694, over 938704.87 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:35:23,021 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 22:35:28,875 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([4.1237, 4.5750, 4.7528, 5.1186], device='cuda:0') +2024-07-27 22:35:32,306 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([3.8172, 2.4867, 3.2010, 3.5734, 3.6073, 2.9864, 3.5092, 2.3213], + device='cuda:0') +2024-07-27 22:35:35,849 INFO [train.py:1146] (0/4) Epoch 6, validation: loss=0.194, simple_loss=0.2973, pruned_loss=0.04533, over 944034.00 frames. +2024-07-27 22:35:35,849 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 22:35:43,137 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.25 vs. limit=6.0 +2024-07-27 22:35:45,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=72164.0, ans=0.2 +2024-07-27 22:35:51,196 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:35:53,037 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.659e+01 5.896e+01 6.493e+01 7.360e+01 1.026e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-27 22:35:56,938 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.86 vs. limit=22.5 +2024-07-27 22:35:58,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72190.66666666667, ans=0.1 +2024-07-27 22:36:04,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72204.0, ans=0.1 +2024-07-27 22:36:08,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=72204.0, ans=0.1 +2024-07-27 22:36:13,123 INFO [train.py:1114] (0/4) Epoch 6, batch 3050, loss[loss=0.2315, simple_loss=0.3075, pruned_loss=0.07773, over 4641.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3131, pruned_loss=0.07763, over 937282.03 frames. ], batch size: 12, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:36:16,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=72217.33333333333, ans=0.125 +2024-07-27 22:36:30,913 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.93 vs. limit=6.0 +2024-07-27 22:36:33,593 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=13.13 vs. limit=15.0 +2024-07-27 22:36:33,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=72257.33333333333, ans=15.0 +2024-07-27 22:36:41,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72270.66666666667, ans=0.1 +2024-07-27 22:36:43,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=72270.66666666667, ans=0.2 +2024-07-27 22:36:46,894 INFO [train.py:1114] (0/4) Epoch 6, batch 3100, loss[loss=0.2182, simple_loss=0.3029, pruned_loss=0.06672, over 4639.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3146, pruned_loss=0.07861, over 938101.80 frames. ], batch size: 16, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:36:51,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=72284.0, ans=0.0 +2024-07-27 22:37:00,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.89 vs. limit=15.0 +2024-07-27 22:37:03,365 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.197e+01 6.089e+01 6.786e+01 8.344e+01 1.227e+02, threshold=1.357e+02, percent-clipped=0.0 +2024-07-27 22:37:04,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=72310.66666666667, ans=0.1 +2024-07-27 22:37:04,895 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=7.110e-02 +2024-07-27 22:37:07,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=72324.0, ans=0.04949747468305833 +2024-07-27 22:37:10,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=72324.0, ans=0.125 +2024-07-27 22:37:19,943 INFO [train.py:1114] (0/4) Epoch 6, batch 3150, loss[loss=0.223, simple_loss=0.316, pruned_loss=0.06498, over 4598.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.314, pruned_loss=0.07772, over 938071.14 frames. ], batch size: 17, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:37:20,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=72350.66666666667, ans=0.2 +2024-07-27 22:37:24,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=72350.66666666667, ans=0.125 +2024-07-27 22:37:26,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=72364.0, ans=0.125 +2024-07-27 22:37:32,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=72364.0, ans=0.125 +2024-07-27 22:37:41,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=72390.66666666667, ans=0.125 +2024-07-27 22:37:42,959 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.62 vs. limit=15.0 +2024-07-27 22:37:50,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=72404.0, ans=0.125 +2024-07-27 22:37:55,005 INFO [train.py:1114] (0/4) Epoch 6, batch 3200, loss[loss=0.2365, simple_loss=0.3242, pruned_loss=0.07446, over 4830.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3131, pruned_loss=0.07654, over 939688.97 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:38:02,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=72430.66666666667, ans=0.0 +2024-07-27 22:38:07,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=72444.0, ans=0.125 +2024-07-27 22:38:11,656 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.049e+01 5.899e+01 6.448e+01 7.393e+01 1.095e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-27 22:38:12,047 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.51 vs. limit=22.5 +2024-07-27 22:38:17,927 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.70 vs. limit=22.5 +2024-07-27 22:38:18,609 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.66 vs. limit=10.0 +2024-07-27 22:38:28,301 INFO [train.py:1114] (0/4) Epoch 6, batch 3250, loss[loss=0.2473, simple_loss=0.3385, pruned_loss=0.07805, over 4935.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3138, pruned_loss=0.07633, over 940808.72 frames. ], batch size: 14, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:38:41,879 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.86 vs. limit=15.0 +2024-07-27 22:38:42,445 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.75 vs. limit=15.0 +2024-07-27 22:38:43,121 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.97 vs. limit=15.0 +2024-07-27 22:39:00,893 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.85 vs. limit=12.0 +2024-07-27 22:39:01,869 INFO [train.py:1114] (0/4) Epoch 6, batch 3300, loss[loss=0.3132, simple_loss=0.3797, pruned_loss=0.1234, over 4752.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3129, pruned_loss=0.07632, over 941150.76 frames. ], batch size: 19, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:39:06,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=72550.66666666667, ans=0.125 +2024-07-27 22:39:06,912 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.23 vs. limit=12.0 +2024-07-27 22:39:15,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=72564.0, ans=0.0 +2024-07-27 22:39:17,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=72577.33333333333, ans=0.125 +2024-07-27 22:39:20,235 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.031e+01 6.037e+01 6.381e+01 7.466e+01 1.307e+02, threshold=1.276e+02, percent-clipped=1.0 +2024-07-27 22:39:26,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=72590.66666666667, ans=0.0 +2024-07-27 22:39:29,224 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.13 vs. limit=15.0 +2024-07-27 22:39:30,718 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.87 vs. limit=10.0 +2024-07-27 22:39:31,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=72604.0, ans=0.125 +2024-07-27 22:39:33,346 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.06 vs. limit=15.0 +2024-07-27 22:39:36,879 INFO [train.py:1114] (0/4) Epoch 6, batch 3350, loss[loss=0.2872, simple_loss=0.3586, pruned_loss=0.1079, over 4836.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3152, pruned_loss=0.07809, over 938985.08 frames. ], batch size: 18, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:40:06,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=72670.66666666667, ans=0.125 +2024-07-27 22:40:11,972 INFO [train.py:1114] (0/4) Epoch 6, batch 3400, loss[loss=0.1679, simple_loss=0.2385, pruned_loss=0.04868, over 4801.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.3147, pruned_loss=0.07813, over 937626.44 frames. ], batch size: 11, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:40:22,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=72697.33333333333, ans=0.0 +2024-07-27 22:40:28,636 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.932e+01 5.919e+01 6.608e+01 7.688e+01 1.157e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-27 22:40:29,149 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.44 vs. limit=15.0 +2024-07-27 22:40:31,085 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.19 vs. limit=22.5 +2024-07-27 22:40:34,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=72724.0, ans=0.0 +2024-07-27 22:40:34,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=72724.0, ans=0.05 +2024-07-27 22:40:34,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=72724.0, ans=0.1 +2024-07-27 22:40:39,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=72737.33333333333, ans=0.125 +2024-07-27 22:40:42,359 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.84 vs. limit=15.0 +2024-07-27 22:40:44,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=72750.66666666667, ans=0.025 +2024-07-27 22:40:44,752 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:40:45,211 INFO [train.py:1114] (0/4) Epoch 6, batch 3450, loss[loss=0.2643, simple_loss=0.328, pruned_loss=0.1003, over 4764.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3157, pruned_loss=0.07845, over 938159.07 frames. ], batch size: 19, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:40:48,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=72750.66666666667, ans=0.0 +2024-07-27 22:40:51,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=72764.0, ans=0.0 +2024-07-27 22:40:56,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=72764.0, ans=0.125 +2024-07-27 22:41:11,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=72790.66666666667, ans=0.125 +2024-07-27 22:41:18,861 INFO [train.py:1114] (0/4) Epoch 6, batch 3500, loss[loss=0.1833, simple_loss=0.2721, pruned_loss=0.04719, over 4925.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3144, pruned_loss=0.07731, over 938548.87 frames. ], batch size: 12, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:41:20,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=72817.33333333333, ans=0.125 +2024-07-27 22:41:27,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=72830.66666666667, ans=0.125 +2024-07-27 22:41:37,021 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.90 vs. limit=15.0 +2024-07-27 22:41:37,349 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.111e+01 6.112e+01 6.695e+01 8.214e+01 1.239e+02, threshold=1.339e+02, percent-clipped=0.0 +2024-07-27 22:41:46,187 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.01 vs. limit=12.0 +2024-07-27 22:41:48,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=72870.66666666667, ans=0.125 +2024-07-27 22:41:58,738 INFO [train.py:1114] (0/4) Epoch 6, batch 3550, loss[loss=0.245, simple_loss=0.3326, pruned_loss=0.07871, over 4660.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3133, pruned_loss=0.07642, over 939299.28 frames. ], batch size: 14, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:42:02,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=72884.0, ans=0.1 +2024-07-27 22:42:05,677 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.56 vs. limit=15.0 +2024-07-27 22:42:10,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=72897.33333333333, ans=0.125 +2024-07-27 22:42:11,054 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.05 vs. limit=15.0 +2024-07-27 22:42:19,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=72924.0, ans=0.95 +2024-07-27 22:42:20,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=72924.0, ans=0.025 +2024-07-27 22:42:22,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=72924.0, ans=0.05 +2024-07-27 22:42:24,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=72924.0, ans=0.1 +2024-07-27 22:42:25,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=72937.33333333333, ans=0.125 +2024-07-27 22:42:29,044 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.75 vs. limit=10.0 +2024-07-27 22:42:30,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=72937.33333333333, ans=0.0 +2024-07-27 22:42:31,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72950.66666666667, ans=0.1 +2024-07-27 22:42:31,735 INFO [train.py:1114] (0/4) Epoch 6, batch 3600, loss[loss=0.1989, simple_loss=0.2896, pruned_loss=0.05408, over 4968.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.313, pruned_loss=0.0759, over 940880.85 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:42:32,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=72950.66666666667, ans=22.5 +2024-07-27 22:42:35,382 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.54 vs. limit=12.0 +2024-07-27 22:42:42,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=72964.0, ans=0.125 +2024-07-27 22:42:43,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=72964.0, ans=0.125 +2024-07-27 22:42:48,879 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.822e+01 6.148e+01 6.891e+01 7.768e+01 1.144e+02, threshold=1.378e+02, percent-clipped=0.0 +2024-07-27 22:42:50,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=72990.66666666667, ans=0.0 +2024-07-27 22:43:07,005 INFO [train.py:1114] (0/4) Epoch 6, batch 3650, loss[loss=0.2455, simple_loss=0.3463, pruned_loss=0.07238, over 4905.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3114, pruned_loss=0.07526, over 941528.87 frames. ], batch size: 15, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:43:13,328 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.53 vs. limit=6.0 +2024-07-27 22:43:40,183 INFO [train.py:1114] (0/4) Epoch 6, batch 3700, loss[loss=0.2161, simple_loss=0.3048, pruned_loss=0.06368, over 4934.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3107, pruned_loss=0.07476, over 942077.95 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:43:41,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=73084.0, ans=0.125 +2024-07-27 22:43:42,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=73084.0, ans=0.2 +2024-07-27 22:43:53,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=73110.66666666667, ans=0.125 +2024-07-27 22:43:55,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=73110.66666666667, ans=0.125 +2024-07-27 22:43:56,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=73110.66666666667, ans=0.0 +2024-07-27 22:43:56,985 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.456e+01 6.084e+01 6.656e+01 7.917e+01 1.226e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-27 22:44:04,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=73124.0, ans=0.04949747468305833 +2024-07-27 22:44:12,684 INFO [train.py:1114] (0/4) Epoch 6, batch 3750, loss[loss=0.2214, simple_loss=0.2943, pruned_loss=0.07425, over 4799.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3115, pruned_loss=0.07492, over 943760.60 frames. ], batch size: 11, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:44:19,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=73164.0, ans=0.0 +2024-07-27 22:44:22,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=73164.0, ans=0.0 +2024-07-27 22:44:33,540 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.87 vs. limit=15.0 +2024-07-27 22:44:35,551 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.54 vs. limit=10.0 +2024-07-27 22:44:38,240 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.28 vs. limit=15.0 +2024-07-27 22:44:47,690 INFO [train.py:1114] (0/4) Epoch 6, batch 3800, loss[loss=0.215, simple_loss=0.307, pruned_loss=0.0615, over 4813.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3109, pruned_loss=0.07542, over 941842.64 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:44:47,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=73217.33333333333, ans=0.0 +2024-07-27 22:44:50,569 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.13 vs. limit=12.0 +2024-07-27 22:44:52,702 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.41 vs. limit=15.0 +2024-07-27 22:44:54,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=73230.66666666667, ans=0.125 +2024-07-27 22:44:58,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=73230.66666666667, ans=0.125 +2024-07-27 22:45:02,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=73244.0, ans=0.125 +2024-07-27 22:45:04,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=73244.0, ans=0.025 +2024-07-27 22:45:04,710 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.923e+01 6.031e+01 6.654e+01 7.619e+01 1.236e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-27 22:45:13,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=73270.66666666667, ans=0.125 +2024-07-27 22:45:14,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=73270.66666666667, ans=0.125 +2024-07-27 22:45:14,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=73270.66666666667, ans=0.125 +2024-07-27 22:45:20,991 INFO [train.py:1114] (0/4) Epoch 6, batch 3850, loss[loss=0.2063, simple_loss=0.2905, pruned_loss=0.06105, over 4900.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3107, pruned_loss=0.07562, over 942980.31 frames. ], batch size: 17, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:45:26,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=73284.0, ans=0.0 +2024-07-27 22:45:31,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=73297.33333333333, ans=0.0 +2024-07-27 22:45:34,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73310.66666666667, ans=0.1 +2024-07-27 22:45:34,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=73310.66666666667, ans=0.125 +2024-07-27 22:45:35,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=73310.66666666667, ans=0.125 +2024-07-27 22:45:37,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=73310.66666666667, ans=0.125 +2024-07-27 22:45:45,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=73324.0, ans=0.0 +2024-07-27 22:45:56,571 INFO [train.py:1114] (0/4) Epoch 6, batch 3900, loss[loss=0.2376, simple_loss=0.3286, pruned_loss=0.07326, over 4802.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3114, pruned_loss=0.07543, over 942953.21 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:45:59,641 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.51 vs. limit=6.0 +2024-07-27 22:46:04,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.21 vs. limit=15.0 +2024-07-27 22:46:12,403 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.89 vs. limit=6.0 +2024-07-27 22:46:13,366 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.979e+01 6.121e+01 6.587e+01 7.635e+01 1.146e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-27 22:46:14,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=73377.33333333333, ans=0.0 +2024-07-27 22:46:19,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=73390.66666666667, ans=0.125 +2024-07-27 22:46:26,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=73404.0, ans=10.0 +2024-07-27 22:46:29,900 INFO [train.py:1114] (0/4) Epoch 6, batch 3950, loss[loss=0.2108, simple_loss=0.2979, pruned_loss=0.06184, over 4840.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3107, pruned_loss=0.07503, over 945023.66 frames. ], batch size: 16, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:46:33,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=73417.33333333333, ans=0.125 +2024-07-27 22:46:34,625 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:46:38,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=73430.66666666667, ans=0.0 +2024-07-27 22:46:45,245 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:46:56,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=73470.66666666667, ans=0.0 +2024-07-27 22:47:04,332 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=16.95 vs. limit=15.0 +2024-07-27 22:47:05,220 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.61 vs. limit=15.0 +2024-07-27 22:47:05,386 INFO [train.py:1114] (0/4) Epoch 6, batch 4000, loss[loss=0.201, simple_loss=0.2841, pruned_loss=0.05891, over 4777.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3116, pruned_loss=0.07611, over 941421.83 frames. ], batch size: 12, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:47:09,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=73484.0, ans=0.125 +2024-07-27 22:47:12,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73497.33333333333, ans=0.1 +2024-07-27 22:47:16,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=73497.33333333333, ans=0.125 +2024-07-27 22:47:20,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=73510.66666666667, ans=0.0 +2024-07-27 22:47:21,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=73510.66666666667, ans=0.0 +2024-07-27 22:47:21,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=73510.66666666667, ans=0.0 +2024-07-27 22:47:22,994 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.882e+01 6.236e+01 6.803e+01 7.982e+01 1.360e+02, threshold=1.361e+02, percent-clipped=1.0 +2024-07-27 22:47:32,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=73537.33333333333, ans=0.125 +2024-07-27 22:47:35,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=73537.33333333333, ans=0.2 +2024-07-27 22:47:39,851 INFO [train.py:1114] (0/4) Epoch 6, batch 4050, loss[loss=0.321, simple_loss=0.3559, pruned_loss=0.143, over 3211.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3117, pruned_loss=0.07673, over 939751.13 frames. ], batch size: 35, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:47:46,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=73564.0, ans=0.125 +2024-07-27 22:47:59,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.22 vs. limit=6.0 +2024-07-27 22:48:14,299 INFO [train.py:1114] (0/4) Epoch 6, batch 4100, loss[loss=0.2671, simple_loss=0.3436, pruned_loss=0.09535, over 4911.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3125, pruned_loss=0.07724, over 938655.03 frames. ], batch size: 15, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:48:17,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=73617.33333333333, ans=0.125 +2024-07-27 22:48:20,745 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.83 vs. limit=22.5 +2024-07-27 22:48:31,602 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.808e+01 6.159e+01 6.782e+01 8.525e+01 1.477e+02, threshold=1.356e+02, percent-clipped=2.0 +2024-07-27 22:48:39,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=73657.33333333333, ans=0.1 +2024-07-27 22:48:49,530 INFO [train.py:1114] (0/4) Epoch 6, batch 4150, loss[loss=0.1977, simple_loss=0.2921, pruned_loss=0.0517, over 4816.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3109, pruned_loss=0.07612, over 938295.03 frames. ], batch size: 13, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:48:49,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=73684.0, ans=0.125 +2024-07-27 22:48:58,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=73697.33333333333, ans=0.125 +2024-07-27 22:49:01,723 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.95 vs. limit=15.0 +2024-07-27 22:49:02,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=73710.66666666667, ans=0.125 +2024-07-27 22:49:22,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=73737.33333333333, ans=0.125 +2024-07-27 22:49:23,828 INFO [train.py:1114] (0/4) Epoch 6, batch 4200, loss[loss=0.2582, simple_loss=0.3419, pruned_loss=0.08724, over 4904.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3105, pruned_loss=0.07599, over 939832.93 frames. ], batch size: 15, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:49:34,004 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.76 vs. limit=15.0 +2024-07-27 22:49:39,790 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.13 vs. limit=22.5 +2024-07-27 22:49:40,725 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.699e+01 5.760e+01 6.554e+01 7.096e+01 1.149e+02, threshold=1.311e+02, percent-clipped=0.0 +2024-07-27 22:49:47,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=73790.66666666667, ans=0.125 +2024-07-27 22:49:53,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=73804.0, ans=0.125 +2024-07-27 22:49:57,280 INFO [train.py:1114] (0/4) Epoch 6, batch 4250, loss[loss=0.2241, simple_loss=0.314, pruned_loss=0.06714, over 4639.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.3108, pruned_loss=0.07595, over 940971.75 frames. ], batch size: 12, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:49:59,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=73817.33333333333, ans=0.1 +2024-07-27 22:50:03,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=73830.66666666667, ans=0.125 +2024-07-27 22:50:09,497 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.82 vs. limit=15.0 +2024-07-27 22:50:22,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn1.whiten.whitening_limit, batch_count=73857.33333333333, ans=22.5 +2024-07-27 22:50:32,500 INFO [train.py:1114] (0/4) Epoch 6, batch 4300, loss[loss=0.2316, simple_loss=0.32, pruned_loss=0.07163, over 4758.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3116, pruned_loss=0.0761, over 940078.37 frames. ], batch size: 13, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:50:37,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=73884.0, ans=0.125 +2024-07-27 22:50:42,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=73897.33333333333, ans=0.125 +2024-07-27 22:50:46,110 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.59 vs. limit=15.0 +2024-07-27 22:50:46,130 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.10 vs. limit=6.0 +2024-07-27 22:50:49,612 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.043e+01 6.051e+01 7.094e+01 8.613e+01 1.493e+02, threshold=1.419e+02, percent-clipped=5.0 +2024-07-27 22:51:08,576 INFO [train.py:1114] (0/4) Epoch 6, batch 4350, loss[loss=0.2535, simple_loss=0.332, pruned_loss=0.08748, over 4762.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3126, pruned_loss=0.07631, over 940453.48 frames. ], batch size: 13, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:51:17,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=73964.0, ans=0.2 +2024-07-27 22:51:21,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=73964.0, ans=0.1 +2024-07-27 22:51:31,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=73990.66666666667, ans=0.0 +2024-07-27 22:51:43,407 INFO [train.py:1114] (0/4) Epoch 6, batch 4400, loss[loss=0.2836, simple_loss=0.3582, pruned_loss=0.1045, over 4811.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3125, pruned_loss=0.07643, over 940363.65 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:51:53,312 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.45 vs. limit=15.0 +2024-07-27 22:52:00,871 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.058e+01 5.949e+01 6.629e+01 7.705e+01 1.284e+02, threshold=1.326e+02, percent-clipped=0.0 +2024-07-27 22:52:01,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=74044.0, ans=0.125 +2024-07-27 22:52:06,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=74057.33333333333, ans=0.125 +2024-07-27 22:52:09,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=74070.66666666667, ans=0.125 +2024-07-27 22:52:10,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74070.66666666667, ans=0.1 +2024-07-27 22:52:17,043 INFO [train.py:1114] (0/4) Epoch 6, batch 4450, loss[loss=0.2161, simple_loss=0.2836, pruned_loss=0.07431, over 4944.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3125, pruned_loss=0.07675, over 938266.67 frames. ], batch size: 12, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:52:18,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=74084.0, ans=0.05 +2024-07-27 22:52:21,471 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.44 vs. limit=15.0 +2024-07-27 22:52:30,441 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.34 vs. limit=15.0 +2024-07-27 22:52:36,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=74110.66666666667, ans=0.0 +2024-07-27 22:52:43,797 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.07 vs. limit=22.5 +2024-07-27 22:52:50,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74137.33333333333, ans=0.1 +2024-07-27 22:52:51,879 INFO [train.py:1114] (0/4) Epoch 6, batch 4500, loss[loss=0.2315, simple_loss=0.3196, pruned_loss=0.07165, over 4739.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3129, pruned_loss=0.07677, over 937647.53 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:52:54,257 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.55 vs. limit=22.5 +2024-07-27 22:52:56,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=74150.66666666667, ans=0.125 +2024-07-27 22:52:57,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=74164.0, ans=0.0 +2024-07-27 22:52:59,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=74164.0, ans=0.05 +2024-07-27 22:53:05,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=74177.33333333333, ans=0.125 +2024-07-27 22:53:08,911 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.835e+01 5.811e+01 6.353e+01 6.989e+01 9.336e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-27 22:53:18,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=74204.0, ans=0.2 +2024-07-27 22:53:24,773 INFO [train.py:1114] (0/4) Epoch 6, batch 4550, loss[loss=0.2606, simple_loss=0.3492, pruned_loss=0.08601, over 4894.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3124, pruned_loss=0.07628, over 939737.01 frames. ], batch size: 13, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:53:28,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=74217.33333333333, ans=0.125 +2024-07-27 22:53:30,156 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.96 vs. limit=6.0 +2024-07-27 22:53:51,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=74270.66666666667, ans=0.125 +2024-07-27 22:53:53,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=74270.66666666667, ans=0.125 +2024-07-27 22:53:53,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=74270.66666666667, ans=0.125 +2024-07-27 22:53:58,220 INFO [train.py:1114] (0/4) Epoch 6, batch 4600, loss[loss=0.2663, simple_loss=0.3598, pruned_loss=0.08638, over 4482.00 frames. ], tot_loss[loss=0.232, simple_loss=0.312, pruned_loss=0.07593, over 937972.15 frames. ], batch size: 21, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:54:12,580 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.66 vs. limit=22.5 +2024-07-27 22:54:18,338 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.798e+01 6.152e+01 6.770e+01 8.392e+01 1.380e+02, threshold=1.354e+02, percent-clipped=1.0 +2024-07-27 22:54:21,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=74324.0, ans=0.125 +2024-07-27 22:54:25,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=74324.0, ans=0.125 +2024-07-27 22:54:28,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=74337.33333333333, ans=0.125 +2024-07-27 22:54:34,076 INFO [train.py:1114] (0/4) Epoch 6, batch 4650, loss[loss=0.222, simple_loss=0.3093, pruned_loss=0.0674, over 4854.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3125, pruned_loss=0.07619, over 939813.73 frames. ], batch size: 16, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:54:34,424 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.94 vs. limit=6.0 +2024-07-27 22:54:34,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74350.66666666667, ans=0.1 +2024-07-27 22:54:44,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=74364.0, ans=0.125 +2024-07-27 22:54:48,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74377.33333333333, ans=0.1 +2024-07-27 22:55:09,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=74390.66666666667, ans=0.2 +2024-07-27 22:55:09,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=74390.66666666667, ans=0.0 +2024-07-27 22:55:10,709 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.67 vs. limit=22.5 +2024-07-27 22:55:38,802 INFO [train.py:1114] (0/4) Epoch 6, batch 4700, loss[loss=0.2161, simple_loss=0.2889, pruned_loss=0.07166, over 4705.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3116, pruned_loss=0.07637, over 937726.15 frames. ], batch size: 11, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:55:38,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=74417.33333333333, ans=0.0 +2024-07-27 22:55:41,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=74417.33333333333, ans=0.0 +2024-07-27 22:55:44,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=74417.33333333333, ans=0.125 +2024-07-27 22:55:48,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=74430.66666666667, ans=0.125 +2024-07-27 22:55:48,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74430.66666666667, ans=0.1 +2024-07-27 22:55:52,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=74444.0, ans=0.125 +2024-07-27 22:55:57,184 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.339e+01 6.070e+01 6.903e+01 7.937e+01 1.102e+02, threshold=1.381e+02, percent-clipped=0.0 +2024-07-27 22:55:58,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=74444.0, ans=0.0 +2024-07-27 22:56:24,023 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.27 vs. limit=10.0 +2024-07-27 22:56:43,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=74470.66666666667, ans=0.0 +2024-07-27 22:57:01,035 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.68 vs. limit=22.5 +2024-07-27 22:57:01,164 INFO [train.py:1114] (0/4) Epoch 6, batch 4750, loss[loss=0.2747, simple_loss=0.3471, pruned_loss=0.1011, over 4452.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3124, pruned_loss=0.07654, over 935392.14 frames. ], batch size: 21, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:57:05,067 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.43 vs. limit=6.0 +2024-07-27 22:57:06,310 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.02 vs. limit=22.5 +2024-07-27 22:57:06,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74484.0, ans=0.1 +2024-07-27 22:57:07,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74497.33333333333, ans=0.1 +2024-07-27 22:57:49,117 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.17 vs. limit=12.0 +2024-07-27 22:57:58,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=74537.33333333333, ans=0.1 +2024-07-27 22:58:00,094 INFO [train.py:1114] (0/4) Epoch 6, batch 4800, loss[loss=0.2324, simple_loss=0.3235, pruned_loss=0.0706, over 4697.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3112, pruned_loss=0.07595, over 932833.28 frames. ], batch size: 13, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:58:08,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=74564.0, ans=0.125 +2024-07-27 22:58:10,803 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.08 vs. limit=15.0 +2024-07-27 22:58:27,445 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.139e+01 5.953e+01 6.774e+01 8.357e+01 1.268e+02, threshold=1.355e+02, percent-clipped=0.0 +2024-07-27 22:58:27,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=74577.33333333333, ans=0.125 +2024-07-27 22:58:36,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=74604.0, ans=0.04949747468305833 +2024-07-27 22:58:38,032 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.43 vs. limit=15.0 +2024-07-27 22:58:43,681 INFO [train.py:1114] (0/4) Epoch 6, batch 4850, loss[loss=0.2593, simple_loss=0.339, pruned_loss=0.08976, over 4748.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3122, pruned_loss=0.07667, over 931712.80 frames. ], batch size: 14, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:58:44,661 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.91 vs. limit=15.0 +2024-07-27 22:59:08,169 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-56000.pt +2024-07-27 22:59:22,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=74670.66666666667, ans=0.125 +2024-07-27 22:59:28,404 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.36 vs. limit=15.0 +2024-07-27 22:59:29,834 INFO [train.py:1114] (0/4) Epoch 6, batch 4900, loss[loss=0.2172, simple_loss=0.2927, pruned_loss=0.07088, over 4762.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3104, pruned_loss=0.07565, over 933637.76 frames. ], batch size: 13, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:59:30,868 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.03 vs. limit=15.0 +2024-07-27 22:59:39,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=74697.33333333333, ans=0.0 +2024-07-27 22:59:44,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=74710.66666666667, ans=0.09899494936611666 +2024-07-27 22:59:48,778 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.911e+01 6.095e+01 6.974e+01 8.315e+01 1.441e+02, threshold=1.395e+02, percent-clipped=3.0 +2024-07-27 22:59:56,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74724.0, ans=0.1 +2024-07-27 22:59:57,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=74724.0, ans=0.125 +2024-07-27 23:00:07,417 INFO [train.py:1114] (0/4) Epoch 6, batch 4950, loss[loss=0.3458, simple_loss=0.3888, pruned_loss=0.1514, over 3432.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3129, pruned_loss=0.0773, over 930488.81 frames. ], batch size: 35, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:00:28,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=74777.33333333333, ans=0.2 +2024-07-27 23:00:47,378 INFO [train.py:1114] (0/4) Epoch 6, batch 5000, loss[loss=0.2395, simple_loss=0.3236, pruned_loss=0.07773, over 4667.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3128, pruned_loss=0.07691, over 934458.71 frames. ], batch size: 14, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:00:55,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74830.66666666667, ans=0.1 +2024-07-27 23:00:57,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=74830.66666666667, ans=0.125 +2024-07-27 23:01:05,418 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.034e+01 6.345e+01 7.582e+01 9.212e+01 1.315e+02, threshold=1.516e+02, percent-clipped=0.0 +2024-07-27 23:01:17,719 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.93 vs. limit=15.0 +2024-07-27 23:01:24,713 INFO [train.py:1114] (0/4) Epoch 6, batch 5050, loss[loss=0.1782, simple_loss=0.2611, pruned_loss=0.04766, over 4857.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3111, pruned_loss=0.0757, over 936907.13 frames. ], batch size: 12, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:01:29,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=74884.0, ans=0.125 +2024-07-27 23:01:32,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=74897.33333333333, ans=0.125 +2024-07-27 23:01:38,230 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.79 vs. limit=5.0 +2024-07-27 23:01:39,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=74910.66666666667, ans=0.0 +2024-07-27 23:01:53,474 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.99 vs. limit=15.0 +2024-07-27 23:02:00,474 INFO [train.py:1114] (0/4) Epoch 6, batch 5100, loss[loss=0.2066, simple_loss=0.2888, pruned_loss=0.06218, over 4785.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3122, pruned_loss=0.07704, over 934747.09 frames. ], batch size: 12, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:02:12,534 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:02:13,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=74964.0, ans=0.0 +2024-07-27 23:02:23,239 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.910e+01 5.981e+01 6.894e+01 7.665e+01 1.178e+02, threshold=1.379e+02, percent-clipped=0.0 +2024-07-27 23:02:36,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=75004.0, ans=0.0 +2024-07-27 23:02:38,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=75004.0, ans=0.025 +2024-07-27 23:02:38,921 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:02:39,310 INFO [train.py:1114] (0/4) Epoch 6, batch 5150, loss[loss=0.1929, simple_loss=0.286, pruned_loss=0.04989, over 4848.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.3137, pruned_loss=0.07752, over 935616.02 frames. ], batch size: 16, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:02:42,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=75017.33333333333, ans=0.95 +2024-07-27 23:02:57,696 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.98 vs. limit=15.0 +2024-07-27 23:03:04,035 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.86 vs. limit=15.0 +2024-07-27 23:03:07,485 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.41 vs. limit=22.5 +2024-07-27 23:03:12,931 INFO [train.py:1114] (0/4) Epoch 6, batch 5200, loss[loss=0.2426, simple_loss=0.3298, pruned_loss=0.07771, over 4670.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3135, pruned_loss=0.07742, over 936032.03 frames. ], batch size: 14, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:03:25,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=75097.33333333333, ans=0.0 +2024-07-27 23:03:27,336 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:03:31,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=75110.66666666667, ans=0.0 +2024-07-27 23:03:31,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=75110.66666666667, ans=0.2 +2024-07-27 23:03:32,620 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 6.053e+01 6.671e+01 7.847e+01 1.456e+02, threshold=1.334e+02, percent-clipped=1.0 +2024-07-27 23:03:38,697 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.89 vs. limit=15.0 +2024-07-27 23:03:39,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=75124.0, ans=0.0 +2024-07-27 23:03:46,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=75137.33333333333, ans=0.125 +2024-07-27 23:03:48,751 INFO [train.py:1114] (0/4) Epoch 6, batch 5250, loss[loss=0.1915, simple_loss=0.2761, pruned_loss=0.05348, over 4890.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3116, pruned_loss=0.07649, over 935482.81 frames. ], batch size: 13, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:03:53,803 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.30 vs. limit=12.0 +2024-07-27 23:04:14,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=75190.66666666667, ans=0.07 +2024-07-27 23:04:15,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=75190.66666666667, ans=0.125 +2024-07-27 23:04:22,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=75204.0, ans=0.125 +2024-07-27 23:04:24,655 INFO [train.py:1114] (0/4) Epoch 6, batch 5300, loss[loss=0.2451, simple_loss=0.3282, pruned_loss=0.08102, over 4632.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3115, pruned_loss=0.07713, over 934020.41 frames. ], batch size: 16, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:04:34,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=75230.66666666667, ans=0.0 +2024-07-27 23:04:40,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=75244.0, ans=0.125 +2024-07-27 23:04:41,938 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.955e+01 6.651e+01 7.573e+01 1.282e+02, threshold=1.330e+02, percent-clipped=0.0 +2024-07-27 23:04:47,350 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:04:47,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75257.33333333333, ans=0.1 +2024-07-27 23:04:57,855 INFO [train.py:1114] (0/4) Epoch 6, batch 5350, loss[loss=0.176, simple_loss=0.2527, pruned_loss=0.04962, over 4531.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3111, pruned_loss=0.07614, over 936073.26 frames. ], batch size: 10, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:05:01,887 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:05:05,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=75297.33333333333, ans=0.1 +2024-07-27 23:05:12,381 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.54 vs. limit=12.0 +2024-07-27 23:05:27,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=75337.33333333333, ans=0.2 +2024-07-27 23:05:33,179 INFO [train.py:1114] (0/4) Epoch 6, batch 5400, loss[loss=0.2853, simple_loss=0.3502, pruned_loss=0.1102, over 4264.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3132, pruned_loss=0.07767, over 930419.52 frames. ], batch size: 25, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:05:50,267 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.680e+01 5.962e+01 6.573e+01 7.607e+01 1.590e+02, threshold=1.315e+02, percent-clipped=1.0 +2024-07-27 23:05:51,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=75377.33333333333, ans=0.125 +2024-07-27 23:05:53,113 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:06:08,281 INFO [train.py:1114] (0/4) Epoch 6, batch 5450, loss[loss=0.197, simple_loss=0.2782, pruned_loss=0.05792, over 4703.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.312, pruned_loss=0.07674, over 933403.04 frames. ], batch size: 11, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:06:21,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=75444.0, ans=0.0 +2024-07-27 23:06:28,074 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.65 vs. limit=22.5 +2024-07-27 23:06:30,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=75457.33333333333, ans=0.2 +2024-07-27 23:06:36,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=75470.66666666667, ans=0.125 +2024-07-27 23:06:37,029 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:06:37,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75470.66666666667, ans=0.1 +2024-07-27 23:06:42,321 INFO [train.py:1114] (0/4) Epoch 6, batch 5500, loss[loss=0.2816, simple_loss=0.348, pruned_loss=0.1076, over 4392.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3114, pruned_loss=0.07647, over 931221.79 frames. ], batch size: 26, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:06:43,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75484.0, ans=0.1 +2024-07-27 23:06:46,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=75484.0, ans=0.125 +2024-07-27 23:06:59,686 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.538e+01 6.092e+01 6.682e+01 7.913e+01 1.212e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-27 23:07:07,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=75524.0, ans=0.125 +2024-07-27 23:07:09,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=75537.33333333333, ans=0.0 +2024-07-27 23:07:20,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=75537.33333333333, ans=0.95 +2024-07-27 23:07:21,739 INFO [train.py:1114] (0/4) Epoch 6, batch 5550, loss[loss=0.2051, simple_loss=0.2989, pruned_loss=0.05561, over 4707.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3103, pruned_loss=0.07572, over 933501.28 frames. ], batch size: 12, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:07:27,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=75564.0, ans=0.0 +2024-07-27 23:07:32,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=75564.0, ans=0.0 +2024-07-27 23:07:35,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=75577.33333333333, ans=0.0 +2024-07-27 23:07:54,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=75617.33333333333, ans=0.125 +2024-07-27 23:07:54,754 INFO [train.py:1114] (0/4) Epoch 6, batch 5600, loss[loss=0.2347, simple_loss=0.3332, pruned_loss=0.0681, over 4744.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3109, pruned_loss=0.07601, over 934542.04 frames. ], batch size: 14, lr: 1.23e-02, grad_scale: 64.0 +2024-07-27 23:07:54,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=75617.33333333333, ans=0.125 +2024-07-27 23:07:56,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75617.33333333333, ans=0.1 +2024-07-27 23:07:56,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=75617.33333333333, ans=0.0 +2024-07-27 23:08:08,173 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.12 vs. limit=15.0 +2024-07-27 23:08:13,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=75644.0, ans=0.0 +2024-07-27 23:08:16,469 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.881e+01 6.141e+01 6.729e+01 7.455e+01 1.025e+02, threshold=1.346e+02, percent-clipped=0.0 +2024-07-27 23:08:22,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=75657.33333333333, ans=0.2 +2024-07-27 23:08:23,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=75657.33333333333, ans=0.0 +2024-07-27 23:08:25,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=75670.66666666667, ans=0.0 +2024-07-27 23:08:28,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=75670.66666666667, ans=0.125 +2024-07-27 23:08:28,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=75670.66666666667, ans=0.0 +2024-07-27 23:08:32,422 INFO [train.py:1114] (0/4) Epoch 6, batch 5650, loss[loss=0.2564, simple_loss=0.3356, pruned_loss=0.08862, over 4503.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3098, pruned_loss=0.07529, over 936783.78 frames. ], batch size: 21, lr: 1.23e-02, grad_scale: 64.0 +2024-07-27 23:08:36,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=75684.0, ans=0.125 +2024-07-27 23:08:37,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=75684.0, ans=0.2 +2024-07-27 23:08:39,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=75697.33333333333, ans=0.125 +2024-07-27 23:08:42,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=75697.33333333333, ans=0.125 +2024-07-27 23:08:43,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=75697.33333333333, ans=0.2 +2024-07-27 23:08:45,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75710.66666666667, ans=0.1 +2024-07-27 23:09:05,153 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.89 vs. limit=15.0 +2024-07-27 23:09:06,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=75737.33333333333, ans=0.0 +2024-07-27 23:09:08,124 INFO [train.py:1114] (0/4) Epoch 6, batch 5700, loss[loss=0.289, simple_loss=0.3492, pruned_loss=0.1144, over 4692.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3103, pruned_loss=0.07564, over 937805.92 frames. ], batch size: 13, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:09:26,486 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.116e+01 6.584e+01 7.686e+01 8.929e+01 1.310e+02, threshold=1.537e+02, percent-clipped=0.0 +2024-07-27 23:09:30,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=75790.66666666667, ans=0.0 +2024-07-27 23:09:32,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=75790.66666666667, ans=0.025 +2024-07-27 23:09:37,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=75804.0, ans=0.125 +2024-07-27 23:09:40,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=75804.0, ans=0.125 +2024-07-27 23:09:41,525 INFO [train.py:1114] (0/4) Epoch 6, batch 5750, loss[loss=0.2303, simple_loss=0.3147, pruned_loss=0.07295, over 4703.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.31, pruned_loss=0.07511, over 937914.67 frames. ], batch size: 19, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:09:41,827 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.02 vs. limit=6.0 +2024-07-27 23:09:53,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=75830.66666666667, ans=0.07 +2024-07-27 23:09:58,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=75844.0, ans=0.0 +2024-07-27 23:10:15,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=75870.66666666667, ans=0.2 +2024-07-27 23:10:16,846 INFO [train.py:1114] (0/4) Epoch 6, batch 5800, loss[loss=0.2626, simple_loss=0.3386, pruned_loss=0.09331, over 4734.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3115, pruned_loss=0.0758, over 937349.64 frames. ], batch size: 19, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:10:20,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=75884.0, ans=0.0 +2024-07-27 23:10:20,545 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.85 vs. limit=10.0 +2024-07-27 23:10:34,692 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.114e+01 6.081e+01 6.996e+01 7.790e+01 1.543e+02, threshold=1.399e+02, percent-clipped=1.0 +2024-07-27 23:10:36,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=75910.66666666667, ans=0.2 +2024-07-27 23:10:38,571 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.82 vs. limit=6.0 +2024-07-27 23:10:39,108 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.08 vs. limit=22.5 +2024-07-27 23:10:41,765 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.24 vs. limit=10.0 +2024-07-27 23:10:50,811 INFO [train.py:1114] (0/4) Epoch 6, batch 5850, loss[loss=0.1775, simple_loss=0.26, pruned_loss=0.0475, over 4533.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3121, pruned_loss=0.07602, over 938100.11 frames. ], batch size: 21, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:10:53,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=75950.66666666667, ans=0.0 +2024-07-27 23:10:56,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=75950.66666666667, ans=0.0 +2024-07-27 23:11:08,392 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.34 vs. limit=15.0 +2024-07-27 23:11:14,194 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.72 vs. limit=10.0 +2024-07-27 23:11:16,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=75990.66666666667, ans=0.125 +2024-07-27 23:11:21,192 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.83 vs. limit=22.5 +2024-07-27 23:11:30,102 INFO [train.py:1114] (0/4) Epoch 6, batch 5900, loss[loss=0.2355, simple_loss=0.3195, pruned_loss=0.07573, over 4663.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3119, pruned_loss=0.07614, over 938542.62 frames. ], batch size: 15, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:11:38,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=76030.66666666667, ans=0.2 +2024-07-27 23:11:48,180 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.509e+01 6.028e+01 6.783e+01 7.450e+01 1.132e+02, threshold=1.357e+02, percent-clipped=0.0 +2024-07-27 23:11:48,500 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.77 vs. limit=15.0 +2024-07-27 23:12:01,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=76070.66666666667, ans=0.1 +2024-07-27 23:12:03,454 INFO [train.py:1114] (0/4) Epoch 6, batch 5950, loss[loss=0.2858, simple_loss=0.365, pruned_loss=0.1033, over 4683.00 frames. ], tot_loss[loss=0.231, simple_loss=0.311, pruned_loss=0.07546, over 940353.21 frames. ], batch size: 15, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:12:17,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=76110.66666666667, ans=0.1 +2024-07-27 23:12:18,804 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.64 vs. limit=15.0 +2024-07-27 23:12:19,329 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.22 vs. limit=15.0 +2024-07-27 23:12:20,031 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.46 vs. limit=22.5 +2024-07-27 23:12:20,692 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.50 vs. limit=15.0 +2024-07-27 23:12:28,013 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.21 vs. limit=15.0 +2024-07-27 23:12:36,365 INFO [train.py:1114] (0/4) Epoch 6, batch 6000, loss[loss=0.276, simple_loss=0.35, pruned_loss=0.101, over 4235.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3105, pruned_loss=0.07546, over 937381.01 frames. ], batch size: 25, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:12:36,366 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 23:12:50,097 INFO [train.py:1146] (0/4) Epoch 6, validation: loss=0.1905, simple_loss=0.2947, pruned_loss=0.04318, over 944034.00 frames. +2024-07-27 23:12:50,098 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 23:13:06,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76177.33333333333, ans=0.1 +2024-07-27 23:13:07,976 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.732e+01 6.230e+01 7.142e+01 8.647e+01 1.308e+02, threshold=1.428e+02, percent-clipped=0.0 +2024-07-27 23:13:12,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76190.66666666667, ans=0.1 +2024-07-27 23:13:16,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.61 vs. limit=10.0 +2024-07-27 23:13:24,112 INFO [train.py:1114] (0/4) Epoch 6, batch 6050, loss[loss=0.178, simple_loss=0.2631, pruned_loss=0.04645, over 4768.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3104, pruned_loss=0.07549, over 938607.58 frames. ], batch size: 12, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:13:25,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=76217.33333333333, ans=0.0 +2024-07-27 23:13:46,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=76257.33333333333, ans=0.125 +2024-07-27 23:13:50,342 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.59 vs. limit=15.0 +2024-07-27 23:13:54,400 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.60 vs. limit=22.5 +2024-07-27 23:13:57,319 INFO [train.py:1114] (0/4) Epoch 6, batch 6100, loss[loss=0.2585, simple_loss=0.336, pruned_loss=0.09055, over 4689.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3109, pruned_loss=0.07575, over 938499.85 frames. ], batch size: 15, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:14:08,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=76297.33333333333, ans=0.0 +2024-07-27 23:14:17,104 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 6.126e+01 6.655e+01 7.850e+01 1.418e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-27 23:14:19,606 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.19 vs. limit=12.0 +2024-07-27 23:14:22,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=76324.0, ans=0.025 +2024-07-27 23:14:27,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=76337.33333333333, ans=0.2 +2024-07-27 23:14:32,347 INFO [train.py:1114] (0/4) Epoch 6, batch 6150, loss[loss=0.2862, simple_loss=0.3344, pruned_loss=0.119, over 3283.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3109, pruned_loss=0.07577, over 937213.89 frames. ], batch size: 35, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:14:34,333 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:14:38,226 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:14:42,538 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.61 vs. limit=15.0 +2024-07-27 23:14:48,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=76377.33333333333, ans=0.125 +2024-07-27 23:14:50,946 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.19 vs. limit=15.0 +2024-07-27 23:15:07,851 INFO [train.py:1114] (0/4) Epoch 6, batch 6200, loss[loss=0.2412, simple_loss=0.3138, pruned_loss=0.08429, over 4737.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3115, pruned_loss=0.07627, over 936568.64 frames. ], batch size: 14, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:15:12,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=76417.33333333333, ans=0.0 +2024-07-27 23:15:22,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=76430.66666666667, ans=0.07 +2024-07-27 23:15:30,099 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.613e+01 5.920e+01 6.889e+01 8.181e+01 1.186e+02, threshold=1.378e+02, percent-clipped=0.0 +2024-07-27 23:15:40,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=76470.66666666667, ans=0.015 +2024-07-27 23:15:43,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=76470.66666666667, ans=0.0 +2024-07-27 23:15:45,608 INFO [train.py:1114] (0/4) Epoch 6, batch 6250, loss[loss=0.2141, simple_loss=0.3, pruned_loss=0.0641, over 4808.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3122, pruned_loss=0.07706, over 933437.57 frames. ], batch size: 14, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:15:48,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=76484.0, ans=0.125 +2024-07-27 23:15:58,171 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.06 vs. limit=15.0 +2024-07-27 23:16:00,276 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.03 vs. limit=15.0 +2024-07-27 23:16:00,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=76510.66666666667, ans=0.125 +2024-07-27 23:16:01,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=76510.66666666667, ans=0.125 +2024-07-27 23:16:06,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=76510.66666666667, ans=0.125 +2024-07-27 23:16:13,922 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.80 vs. limit=12.0 +2024-07-27 23:16:21,079 INFO [train.py:1114] (0/4) Epoch 6, batch 6300, loss[loss=0.2205, simple_loss=0.2943, pruned_loss=0.07336, over 4535.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.313, pruned_loss=0.07732, over 930187.08 frames. ], batch size: 10, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:16:26,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=76550.66666666667, ans=0.125 +2024-07-27 23:16:29,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=76564.0, ans=0.0 +2024-07-27 23:16:37,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=76577.33333333333, ans=0.125 +2024-07-27 23:16:38,839 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.517e+01 5.917e+01 6.519e+01 7.440e+01 1.686e+02, threshold=1.304e+02, percent-clipped=1.0 +2024-07-27 23:16:52,819 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.00 vs. limit=15.0 +2024-07-27 23:16:53,955 INFO [train.py:1114] (0/4) Epoch 6, batch 6350, loss[loss=0.311, simple_loss=0.3728, pruned_loss=0.1246, over 4420.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3138, pruned_loss=0.07782, over 934158.26 frames. ], batch size: 21, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:16:57,559 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.44 vs. limit=22.5 +2024-07-27 23:16:58,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=76617.33333333333, ans=0.125 +2024-07-27 23:17:05,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=76630.66666666667, ans=0.125 +2024-07-27 23:17:14,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=76657.33333333333, ans=0.125 +2024-07-27 23:17:24,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=76670.66666666667, ans=0.0 +2024-07-27 23:17:27,175 INFO [train.py:1114] (0/4) Epoch 6, batch 6400, loss[loss=0.2197, simple_loss=0.3076, pruned_loss=0.06595, over 4637.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3134, pruned_loss=0.07749, over 935279.41 frames. ], batch size: 13, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:17:30,169 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.16 vs. limit=15.0 +2024-07-27 23:17:34,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=76697.33333333333, ans=0.1 +2024-07-27 23:17:44,987 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.061e+01 6.138e+01 6.927e+01 7.775e+01 1.168e+02, threshold=1.385e+02, percent-clipped=0.0 +2024-07-27 23:17:49,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=76724.0, ans=0.1 +2024-07-27 23:17:50,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.92 vs. limit=15.0 +2024-07-27 23:18:00,370 INFO [train.py:1114] (0/4) Epoch 6, batch 6450, loss[loss=0.2643, simple_loss=0.3598, pruned_loss=0.08439, over 4523.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3126, pruned_loss=0.07656, over 938903.92 frames. ], batch size: 21, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:18:00,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=76750.66666666667, ans=0.125 +2024-07-27 23:18:07,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=76750.66666666667, ans=0.5 +2024-07-27 23:18:13,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=76764.0, ans=0.125 +2024-07-27 23:18:23,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=76777.33333333333, ans=0.2 +2024-07-27 23:18:27,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=76790.66666666667, ans=0.0 +2024-07-27 23:18:40,318 INFO [train.py:1114] (0/4) Epoch 6, batch 6500, loss[loss=0.2607, simple_loss=0.3334, pruned_loss=0.09401, over 3208.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3114, pruned_loss=0.07575, over 940376.03 frames. ], batch size: 35, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:18:42,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=76817.33333333333, ans=0.0 +2024-07-27 23:18:58,074 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 6.177e+01 7.054e+01 8.466e+01 1.519e+02, threshold=1.411e+02, percent-clipped=2.0 +2024-07-27 23:19:01,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=76857.33333333333, ans=0.125 +2024-07-27 23:19:09,622 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:19:13,480 INFO [train.py:1114] (0/4) Epoch 6, batch 6550, loss[loss=0.2077, simple_loss=0.2772, pruned_loss=0.06906, over 4804.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3098, pruned_loss=0.07449, over 943167.60 frames. ], batch size: 11, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:19:19,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=76897.33333333333, ans=0.1 +2024-07-27 23:19:23,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=76897.33333333333, ans=0.015 +2024-07-27 23:19:24,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=76897.33333333333, ans=0.0 +2024-07-27 23:19:44,774 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.62 vs. limit=15.0 +2024-07-27 23:19:47,205 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:19:47,589 INFO [train.py:1114] (0/4) Epoch 6, batch 6600, loss[loss=0.2423, simple_loss=0.3253, pruned_loss=0.07967, over 4932.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3108, pruned_loss=0.07491, over 945046.36 frames. ], batch size: 14, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:20:05,789 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.013e+01 6.034e+01 7.063e+01 8.869e+01 1.315e+02, threshold=1.413e+02, percent-clipped=0.0 +2024-07-27 23:20:06,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=76977.33333333333, ans=0.125 +2024-07-27 23:20:10,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=76990.66666666667, ans=0.1 +2024-07-27 23:20:15,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=77004.0, ans=0.125 +2024-07-27 23:20:21,208 INFO [train.py:1114] (0/4) Epoch 6, batch 6650, loss[loss=0.2286, simple_loss=0.3029, pruned_loss=0.07716, over 4595.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3107, pruned_loss=0.0748, over 943767.57 frames. ], batch size: 17, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:20:22,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=77017.33333333333, ans=0.07 +2024-07-27 23:20:28,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=77030.66666666667, ans=0.125 +2024-07-27 23:20:52,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=77070.66666666667, ans=0.0 +2024-07-27 23:20:57,069 INFO [train.py:1114] (0/4) Epoch 6, batch 6700, loss[loss=0.228, simple_loss=0.3028, pruned_loss=0.0766, over 4722.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3119, pruned_loss=0.07525, over 942402.64 frames. ], batch size: 19, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:20:57,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=77084.0, ans=15.0 +2024-07-27 23:20:59,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=77084.0, ans=0.04949747468305833 +2024-07-27 23:21:01,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=77084.0, ans=0.2 +2024-07-27 23:21:15,131 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.982e+01 6.173e+01 6.934e+01 8.423e+01 1.268e+02, threshold=1.387e+02, percent-clipped=0.0 +2024-07-27 23:21:26,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=77124.0, ans=0.125 +2024-07-27 23:21:27,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77124.0, ans=0.1 +2024-07-27 23:21:29,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=77124.0, ans=0.5 +2024-07-27 23:21:41,297 INFO [train.py:1114] (0/4) Epoch 6, batch 6750, loss[loss=0.2303, simple_loss=0.3036, pruned_loss=0.0785, over 4432.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3119, pruned_loss=0.07588, over 940693.97 frames. ], batch size: 26, lr: 1.22e-02, grad_scale: 16.0 +2024-07-27 23:21:46,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=77150.66666666667, ans=0.125 +2024-07-27 23:21:52,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77164.0, ans=0.1 +2024-07-27 23:22:02,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=77190.66666666667, ans=0.2 +2024-07-27 23:22:14,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=77204.0, ans=0.125 +2024-07-27 23:22:16,816 INFO [train.py:1114] (0/4) Epoch 6, batch 6800, loss[loss=0.2505, simple_loss=0.3276, pruned_loss=0.08673, over 4638.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3121, pruned_loss=0.07633, over 939706.61 frames. ], batch size: 13, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:23:06,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=77230.66666666667, ans=0.0 +2024-07-27 23:23:07,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77230.66666666667, ans=0.1 +2024-07-27 23:23:14,540 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.127e+01 5.858e+01 6.351e+01 7.283e+01 1.199e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-27 23:23:18,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=77257.33333333333, ans=10.0 +2024-07-27 23:23:22,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77270.66666666667, ans=0.1 +2024-07-27 23:23:23,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=77270.66666666667, ans=0.125 +2024-07-27 23:23:23,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=77270.66666666667, ans=0.2 +2024-07-27 23:23:27,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=77270.66666666667, ans=0.2 +2024-07-27 23:23:29,493 INFO [train.py:1114] (0/4) Epoch 6, batch 6850, loss[loss=0.2305, simple_loss=0.3248, pruned_loss=0.06806, over 4691.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3116, pruned_loss=0.07592, over 941096.28 frames. ], batch size: 13, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:23:29,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=77284.0, ans=0.2 +2024-07-27 23:23:33,363 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.42 vs. limit=15.0 +2024-07-27 23:23:49,906 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:23:52,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=77324.0, ans=0.0 +2024-07-27 23:23:52,082 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:23:58,534 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.77 vs. limit=15.0 +2024-07-27 23:24:03,369 INFO [train.py:1114] (0/4) Epoch 6, batch 6900, loss[loss=0.2117, simple_loss=0.2925, pruned_loss=0.06548, over 4971.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3129, pruned_loss=0.07612, over 943210.08 frames. ], batch size: 13, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:24:10,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=77364.0, ans=0.125 +2024-07-27 23:24:14,778 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:24:21,829 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.943e+01 5.966e+01 6.630e+01 7.138e+01 1.259e+02, threshold=1.326e+02, percent-clipped=0.0 +2024-07-27 23:24:26,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=77390.66666666667, ans=0.0 +2024-07-27 23:24:29,830 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.02 vs. limit=15.0 +2024-07-27 23:24:38,561 INFO [train.py:1114] (0/4) Epoch 6, batch 6950, loss[loss=0.2224, simple_loss=0.2953, pruned_loss=0.07475, over 4530.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3126, pruned_loss=0.07601, over 940343.56 frames. ], batch size: 10, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:24:45,455 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:24:52,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=77444.0, ans=0.125 +2024-07-27 23:24:56,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=77444.0, ans=0.025 +2024-07-27 23:25:12,296 INFO [train.py:1114] (0/4) Epoch 6, batch 7000, loss[loss=0.2639, simple_loss=0.3496, pruned_loss=0.08905, over 4573.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3127, pruned_loss=0.07655, over 938917.46 frames. ], batch size: 17, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:25:14,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=77484.0, ans=0.125 +2024-07-27 23:25:26,243 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.15 vs. limit=15.0 +2024-07-27 23:25:30,332 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.154e+01 6.184e+01 7.015e+01 8.119e+01 1.355e+02, threshold=1.403e+02, percent-clipped=1.0 +2024-07-27 23:25:39,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=77537.33333333333, ans=0.0 +2024-07-27 23:25:39,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=77537.33333333333, ans=0.125 +2024-07-27 23:25:44,782 INFO [train.py:1114] (0/4) Epoch 6, batch 7050, loss[loss=0.2161, simple_loss=0.3096, pruned_loss=0.0613, over 4651.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3126, pruned_loss=0.07623, over 942016.16 frames. ], batch size: 19, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:25:53,909 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.71 vs. limit=6.0 +2024-07-27 23:25:58,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.89 vs. limit=22.5 +2024-07-27 23:26:03,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=77577.33333333333, ans=0.2 +2024-07-27 23:26:04,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=77590.66666666667, ans=0.2 +2024-07-27 23:26:07,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=77590.66666666667, ans=0.125 +2024-07-27 23:26:11,651 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.49 vs. limit=8.0 +2024-07-27 23:26:18,251 INFO [train.py:1114] (0/4) Epoch 6, batch 7100, loss[loss=0.2017, simple_loss=0.2992, pruned_loss=0.05207, over 4812.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3128, pruned_loss=0.07629, over 936776.79 frames. ], batch size: 15, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:26:23,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=77617.33333333333, ans=0.2 +2024-07-27 23:26:27,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=77617.33333333333, ans=0.125 +2024-07-27 23:26:40,775 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.696e+01 6.046e+01 6.711e+01 7.848e+01 1.418e+02, threshold=1.342e+02, percent-clipped=1.0 +2024-07-27 23:26:44,611 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.03 vs. limit=12.0 +2024-07-27 23:26:55,161 INFO [train.py:1114] (0/4) Epoch 6, batch 7150, loss[loss=0.223, simple_loss=0.3076, pruned_loss=0.06919, over 4502.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3113, pruned_loss=0.07565, over 937730.89 frames. ], batch size: 21, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:27:04,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=77697.33333333333, ans=0.0 +2024-07-27 23:27:12,904 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.61 vs. limit=15.0 +2024-07-27 23:27:13,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=77710.66666666667, ans=0.0 +2024-07-27 23:27:14,144 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.92 vs. limit=22.5 +2024-07-27 23:27:26,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=77737.33333333333, ans=0.04949747468305833 +2024-07-27 23:27:29,629 INFO [train.py:1114] (0/4) Epoch 6, batch 7200, loss[loss=0.2937, simple_loss=0.3564, pruned_loss=0.1155, over 4807.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3114, pruned_loss=0.07576, over 938053.17 frames. ], batch size: 15, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:27:39,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=77764.0, ans=0.0 +2024-07-27 23:27:47,987 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.062e+01 6.035e+01 6.773e+01 8.115e+01 1.390e+02, threshold=1.355e+02, percent-clipped=1.0 +2024-07-27 23:27:48,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=77777.33333333333, ans=0.0 +2024-07-27 23:27:54,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=77790.66666666667, ans=0.125 +2024-07-27 23:27:56,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=77804.0, ans=0.125 +2024-07-27 23:27:57,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=77804.0, ans=0.125 +2024-07-27 23:28:01,329 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.78 vs. limit=12.0 +2024-07-27 23:28:02,493 INFO [train.py:1114] (0/4) Epoch 6, batch 7250, loss[loss=0.2115, simple_loss=0.2901, pruned_loss=0.06641, over 4852.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3102, pruned_loss=0.07505, over 939542.87 frames. ], batch size: 12, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:28:03,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=77817.33333333333, ans=0.125 +2024-07-27 23:28:06,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=77817.33333333333, ans=0.0 +2024-07-27 23:28:09,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=77830.66666666667, ans=0.0 +2024-07-27 23:28:12,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=77830.66666666667, ans=0.0 +2024-07-27 23:28:17,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=77844.0, ans=0.125 +2024-07-27 23:28:19,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=77844.0, ans=0.125 +2024-07-27 23:28:26,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=77857.33333333333, ans=0.2 +2024-07-27 23:28:32,200 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.58 vs. limit=15.0 +2024-07-27 23:28:36,960 INFO [train.py:1114] (0/4) Epoch 6, batch 7300, loss[loss=0.1801, simple_loss=0.2667, pruned_loss=0.04671, over 4843.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3093, pruned_loss=0.07448, over 939807.41 frames. ], batch size: 12, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:28:37,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=77884.0, ans=0.0 +2024-07-27 23:28:42,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77884.0, ans=0.1 +2024-07-27 23:28:45,767 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.58 vs. limit=22.5 +2024-07-27 23:28:52,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=77910.66666666667, ans=0.125 +2024-07-27 23:28:55,455 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.159e+01 6.187e+01 6.781e+01 8.208e+01 1.800e+02, threshold=1.356e+02, percent-clipped=4.0 +2024-07-27 23:29:00,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=77924.0, ans=0.2 +2024-07-27 23:29:02,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77937.33333333333, ans=0.1 +2024-07-27 23:29:04,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=77937.33333333333, ans=0.09899494936611666 +2024-07-27 23:29:09,750 INFO [train.py:1114] (0/4) Epoch 6, batch 7350, loss[loss=0.2327, simple_loss=0.3098, pruned_loss=0.0778, over 4638.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3101, pruned_loss=0.07471, over 939076.56 frames. ], batch size: 12, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:29:10,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff3.min_abs, batch_count=77950.66666666667, ans=0.2 +2024-07-27 23:29:11,370 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.28 vs. limit=15.0 +2024-07-27 23:29:15,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=77964.0, ans=0.125 +2024-07-27 23:29:19,306 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.86 vs. limit=6.0 +2024-07-27 23:29:27,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=77977.33333333333, ans=0.0 +2024-07-27 23:29:31,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=77990.66666666667, ans=0.0 +2024-07-27 23:29:35,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=78004.0, ans=0.0 +2024-07-27 23:29:42,397 INFO [train.py:1114] (0/4) Epoch 6, batch 7400, loss[loss=0.2129, simple_loss=0.3043, pruned_loss=0.06076, over 4700.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3084, pruned_loss=0.07391, over 940437.50 frames. ], batch size: 13, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:29:42,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=78017.33333333333, ans=0.125 +2024-07-27 23:29:44,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=78017.33333333333, ans=0.0 +2024-07-27 23:29:49,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=78030.66666666667, ans=0.125 +2024-07-27 23:30:00,718 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.794e+01 6.318e+01 7.281e+01 8.792e+01 1.336e+02, threshold=1.456e+02, percent-clipped=0.0 +2024-07-27 23:30:27,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=78070.66666666667, ans=0.125 +2024-07-27 23:30:29,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=78070.66666666667, ans=0.0 +2024-07-27 23:30:31,754 INFO [train.py:1114] (0/4) Epoch 6, batch 7450, loss[loss=0.2138, simple_loss=0.2809, pruned_loss=0.07336, over 4619.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.3081, pruned_loss=0.07425, over 937563.36 frames. ], batch size: 11, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:30:38,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=78097.33333333333, ans=0.0 +2024-07-27 23:30:51,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=78124.0, ans=0.0 +2024-07-27 23:30:52,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=78124.0, ans=0.95 +2024-07-27 23:31:02,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=78137.33333333333, ans=0.125 +2024-07-27 23:31:04,796 INFO [train.py:1114] (0/4) Epoch 6, batch 7500, loss[loss=0.314, simple_loss=0.3663, pruned_loss=0.1309, over 3470.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3097, pruned_loss=0.07548, over 936139.92 frames. ], batch size: 35, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:31:10,259 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.38 vs. limit=22.5 +2024-07-27 23:31:12,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=78164.0, ans=10.0 +2024-07-27 23:31:13,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78164.0, ans=0.1 +2024-07-27 23:31:24,019 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.339e+01 6.209e+01 6.853e+01 7.670e+01 1.087e+02, threshold=1.371e+02, percent-clipped=0.0 +2024-07-27 23:31:38,294 INFO [train.py:1114] (0/4) Epoch 6, batch 7550, loss[loss=0.2612, simple_loss=0.337, pruned_loss=0.09273, over 4653.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3123, pruned_loss=0.07682, over 936169.07 frames. ], batch size: 17, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:31:50,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=78230.66666666667, ans=0.125 +2024-07-27 23:32:02,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=78257.33333333333, ans=0.125 +2024-07-27 23:32:08,290 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.07 vs. limit=15.0 +2024-07-27 23:32:11,965 INFO [train.py:1114] (0/4) Epoch 6, batch 7600, loss[loss=0.2409, simple_loss=0.324, pruned_loss=0.07895, over 4802.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.311, pruned_loss=0.07596, over 937989.94 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:32:14,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=78284.0, ans=0.125 +2024-07-27 23:32:18,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.88 vs. limit=15.0 +2024-07-27 23:32:24,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=78297.33333333333, ans=0.0 +2024-07-27 23:32:31,141 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.17 vs. limit=6.0 +2024-07-27 23:32:31,743 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.91 vs. limit=15.0 +2024-07-27 23:32:32,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=78310.66666666667, ans=0.125 +2024-07-27 23:32:33,864 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.827e+01 6.092e+01 6.628e+01 7.251e+01 1.124e+02, threshold=1.326e+02, percent-clipped=0.0 +2024-07-27 23:32:52,077 INFO [train.py:1114] (0/4) Epoch 6, batch 7650, loss[loss=0.199, simple_loss=0.277, pruned_loss=0.06055, over 4940.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3107, pruned_loss=0.07621, over 937087.94 frames. ], batch size: 12, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:33:01,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78350.66666666667, ans=0.1 +2024-07-27 23:33:03,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=78350.66666666667, ans=0.125 +2024-07-27 23:33:03,949 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.50 vs. limit=15.0 +2024-07-27 23:33:19,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.77 vs. limit=15.0 +2024-07-27 23:33:25,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=78404.0, ans=0.125 +2024-07-27 23:33:33,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=78404.0, ans=0.125 +2024-07-27 23:33:33,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=78404.0, ans=0.2 +2024-07-27 23:33:37,390 INFO [train.py:1114] (0/4) Epoch 6, batch 7700, loss[loss=0.1897, simple_loss=0.2893, pruned_loss=0.04508, over 4693.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.311, pruned_loss=0.0759, over 934116.21 frames. ], batch size: 13, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:33:51,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=78417.33333333333, ans=0.125 +2024-07-27 23:33:51,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=78417.33333333333, ans=0.125 +2024-07-27 23:33:53,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=78430.66666666667, ans=0.2 +2024-07-27 23:34:11,043 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.983e+01 6.189e+01 6.836e+01 7.774e+01 1.390e+02, threshold=1.367e+02, percent-clipped=1.0 +2024-07-27 23:34:16,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=78457.33333333333, ans=0.2 +2024-07-27 23:34:25,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=78470.66666666667, ans=0.125 +2024-07-27 23:34:25,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=78470.66666666667, ans=0.1 +2024-07-27 23:34:28,153 INFO [train.py:1114] (0/4) Epoch 6, batch 7750, loss[loss=0.2411, simple_loss=0.3185, pruned_loss=0.08184, over 4923.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3124, pruned_loss=0.07652, over 935331.45 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:34:30,073 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.30 vs. limit=22.5 +2024-07-27 23:34:41,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=78510.66666666667, ans=0.125 +2024-07-27 23:34:42,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=78510.66666666667, ans=0.125 +2024-07-27 23:34:44,869 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.99 vs. limit=15.0 +2024-07-27 23:34:54,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=78537.33333333333, ans=0.035 +2024-07-27 23:35:01,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=78537.33333333333, ans=0.125 +2024-07-27 23:35:03,328 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.90 vs. limit=15.0 +2024-07-27 23:35:04,353 INFO [train.py:1114] (0/4) Epoch 6, batch 7800, loss[loss=0.2364, simple_loss=0.319, pruned_loss=0.07684, over 4661.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3131, pruned_loss=0.07622, over 936986.67 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:35:13,630 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.78 vs. limit=10.0 +2024-07-27 23:35:14,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=78564.0, ans=0.5 +2024-07-27 23:35:17,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78577.33333333333, ans=0.1 +2024-07-27 23:35:18,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78577.33333333333, ans=0.1 +2024-07-27 23:35:22,307 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+01 6.050e+01 6.523e+01 7.521e+01 9.871e+01, threshold=1.305e+02, percent-clipped=0.0 +2024-07-27 23:35:27,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78590.66666666667, ans=0.1 +2024-07-27 23:35:27,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=78590.66666666667, ans=0.125 +2024-07-27 23:35:34,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=78604.0, ans=0.0 +2024-07-27 23:35:36,949 INFO [train.py:1114] (0/4) Epoch 6, batch 7850, loss[loss=0.2361, simple_loss=0.2984, pruned_loss=0.08697, over 4543.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3123, pruned_loss=0.07626, over 935965.19 frames. ], batch size: 10, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:35:40,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=78617.33333333333, ans=0.125 +2024-07-27 23:35:54,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=78644.0, ans=0.0 +2024-07-27 23:36:00,389 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.10 vs. limit=15.0 +2024-07-27 23:36:11,567 INFO [train.py:1114] (0/4) Epoch 6, batch 7900, loss[loss=0.2308, simple_loss=0.3196, pruned_loss=0.07099, over 4872.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.315, pruned_loss=0.07739, over 932596.97 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:36:28,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78710.66666666667, ans=0.1 +2024-07-27 23:36:29,746 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.141e+01 6.160e+01 7.004e+01 8.333e+01 1.233e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-27 23:36:36,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=78724.0, ans=0.2 +2024-07-27 23:36:38,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=78737.33333333333, ans=0.025 +2024-07-27 23:36:44,058 INFO [train.py:1114] (0/4) Epoch 6, batch 7950, loss[loss=0.2912, simple_loss=0.3379, pruned_loss=0.1223, over 3460.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3143, pruned_loss=0.07701, over 935081.37 frames. ], batch size: 35, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:36:48,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=78750.66666666667, ans=0.125 +2024-07-27 23:37:01,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78777.33333333333, ans=0.1 +2024-07-27 23:37:04,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=78790.66666666667, ans=0.125 +2024-07-27 23:37:07,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=78790.66666666667, ans=0.125 +2024-07-27 23:37:09,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=78804.0, ans=0.125 +2024-07-27 23:37:16,326 INFO [train.py:1114] (0/4) Epoch 6, batch 8000, loss[loss=0.2247, simple_loss=0.2952, pruned_loss=0.07711, over 4629.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3128, pruned_loss=0.07638, over 934336.85 frames. ], batch size: 11, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:37:21,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=78817.33333333333, ans=0.125 +2024-07-27 23:37:23,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=78830.66666666667, ans=0.125 +2024-07-27 23:37:30,099 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.34 vs. limit=15.0 +2024-07-27 23:37:34,332 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.441e+01 5.938e+01 6.564e+01 7.603e+01 1.476e+02, threshold=1.313e+02, percent-clipped=1.0 +2024-07-27 23:37:35,240 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.67 vs. limit=15.0 +2024-07-27 23:37:43,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=78870.66666666667, ans=0.0 +2024-07-27 23:37:48,715 INFO [train.py:1114] (0/4) Epoch 6, batch 8050, loss[loss=0.2485, simple_loss=0.3355, pruned_loss=0.08072, over 4810.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3132, pruned_loss=0.07635, over 934311.96 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:37:51,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=78884.0, ans=0.0 +2024-07-27 23:37:55,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=78897.33333333333, ans=0.0 +2024-07-27 23:37:56,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=78897.33333333333, ans=0.125 +2024-07-27 23:37:57,109 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:38:19,506 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.18 vs. limit=10.0 +2024-07-27 23:38:22,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=78937.33333333333, ans=0.0 +2024-07-27 23:38:23,500 INFO [train.py:1114] (0/4) Epoch 6, batch 8100, loss[loss=0.2526, simple_loss=0.336, pruned_loss=0.0846, over 4787.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3126, pruned_loss=0.07544, over 933869.76 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:38:32,860 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.46 vs. limit=15.0 +2024-07-27 23:38:34,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=78964.0, ans=0.125 +2024-07-27 23:38:41,460 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.051e+01 5.969e+01 6.429e+01 6.997e+01 9.390e+01, threshold=1.286e+02, percent-clipped=0.0 +2024-07-27 23:38:41,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=78977.33333333333, ans=0.0 +2024-07-27 23:38:42,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78990.66666666667, ans=0.1 +2024-07-27 23:38:43,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=78990.66666666667, ans=0.07 +2024-07-27 23:38:44,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=78990.66666666667, ans=15.0 +2024-07-27 23:38:55,587 INFO [train.py:1114] (0/4) Epoch 6, batch 8150, loss[loss=0.2391, simple_loss=0.3322, pruned_loss=0.07305, over 4796.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3118, pruned_loss=0.07568, over 937273.25 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:38:56,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=79017.33333333333, ans=0.125 +2024-07-27 23:38:58,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=79017.33333333333, ans=0.025 +2024-07-27 23:39:02,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=79030.66666666667, ans=0.125 +2024-07-27 23:39:05,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=79030.66666666667, ans=0.0 +2024-07-27 23:39:28,469 INFO [train.py:1114] (0/4) Epoch 6, batch 8200, loss[loss=0.2477, simple_loss=0.3313, pruned_loss=0.08209, over 4799.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3116, pruned_loss=0.07531, over 938222.98 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:39:37,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=79097.33333333333, ans=0.0 +2024-07-27 23:39:47,315 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.029e+01 5.934e+01 6.554e+01 7.415e+01 1.580e+02, threshold=1.311e+02, percent-clipped=1.0 +2024-07-27 23:40:01,188 INFO [train.py:1114] (0/4) Epoch 6, batch 8250, loss[loss=0.1989, simple_loss=0.2824, pruned_loss=0.05763, over 4886.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3114, pruned_loss=0.07555, over 938495.24 frames. ], batch size: 13, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:40:09,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=79164.0, ans=0.025 +2024-07-27 23:40:23,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=79190.66666666667, ans=0.125 +2024-07-27 23:40:33,939 INFO [train.py:1114] (0/4) Epoch 6, batch 8300, loss[loss=0.2474, simple_loss=0.3221, pruned_loss=0.08636, over 4903.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3127, pruned_loss=0.07577, over 938663.66 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:40:44,106 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.59 vs. limit=22.5 +2024-07-27 23:40:54,341 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.146e+01 5.976e+01 6.704e+01 7.897e+01 1.175e+02, threshold=1.341e+02, percent-clipped=0.0 +2024-07-27 23:40:55,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=79257.33333333333, ans=0.125 +2024-07-27 23:40:59,203 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.64 vs. limit=12.0 +2024-07-27 23:41:07,447 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.36 vs. limit=15.0 +2024-07-27 23:41:08,390 INFO [train.py:1114] (0/4) Epoch 6, batch 8350, loss[loss=0.2222, simple_loss=0.3101, pruned_loss=0.06715, over 4800.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.312, pruned_loss=0.07515, over 941397.63 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:41:09,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=79284.0, ans=0.0 +2024-07-27 23:41:19,546 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.37 vs. limit=15.0 +2024-07-27 23:41:25,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=79310.66666666667, ans=0.125 +2024-07-27 23:41:28,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=79324.0, ans=0.125 +2024-07-27 23:41:29,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=79324.0, ans=0.2 +2024-07-27 23:41:31,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=79324.0, ans=0.0 +2024-07-27 23:41:38,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=79337.33333333333, ans=0.0 +2024-07-27 23:41:39,057 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.99 vs. limit=22.5 +2024-07-27 23:41:40,609 INFO [train.py:1114] (0/4) Epoch 6, batch 8400, loss[loss=0.1908, simple_loss=0.2771, pruned_loss=0.05223, over 4770.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3128, pruned_loss=0.07584, over 939745.95 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:41:41,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=79350.66666666667, ans=0.125 +2024-07-27 23:41:41,523 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.41 vs. limit=15.0 +2024-07-27 23:41:56,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=79377.33333333333, ans=0.125 +2024-07-27 23:41:58,575 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.741e+01 6.271e+01 7.007e+01 8.306e+01 1.253e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-27 23:42:01,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=79390.66666666667, ans=0.2 +2024-07-27 23:42:01,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=79390.66666666667, ans=0.0 +2024-07-27 23:42:10,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=79404.0, ans=0.2 +2024-07-27 23:42:12,519 INFO [train.py:1114] (0/4) Epoch 6, batch 8450, loss[loss=0.2288, simple_loss=0.3098, pruned_loss=0.07389, over 4807.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3137, pruned_loss=0.07576, over 938718.82 frames. ], batch size: 15, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:42:12,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=79417.33333333333, ans=0.07 +2024-07-27 23:42:18,683 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.92 vs. limit=15.0 +2024-07-27 23:42:22,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=79430.66666666667, ans=0.025 +2024-07-27 23:42:27,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=79444.0, ans=0.0 +2024-07-27 23:42:28,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=79444.0, ans=0.125 +2024-07-27 23:42:36,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=79457.33333333333, ans=0.1 +2024-07-27 23:42:36,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=79457.33333333333, ans=0.2 +2024-07-27 23:42:45,635 INFO [train.py:1114] (0/4) Epoch 6, batch 8500, loss[loss=0.1821, simple_loss=0.2586, pruned_loss=0.0528, over 4610.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3109, pruned_loss=0.0745, over 938475.53 frames. ], batch size: 11, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:42:46,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=79484.0, ans=0.0 +2024-07-27 23:42:52,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=79497.33333333333, ans=0.125 +2024-07-27 23:43:04,879 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.976e+01 5.862e+01 6.704e+01 7.850e+01 1.312e+02, threshold=1.341e+02, percent-clipped=0.0 +2024-07-27 23:43:11,602 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.87 vs. limit=15.0 +2024-07-27 23:43:15,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=79537.33333333333, ans=0.0 +2024-07-27 23:43:17,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=79537.33333333333, ans=0.125 +2024-07-27 23:43:19,073 INFO [train.py:1114] (0/4) Epoch 6, batch 8550, loss[loss=0.1805, simple_loss=0.2558, pruned_loss=0.05256, over 4802.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.311, pruned_loss=0.07489, over 939284.12 frames. ], batch size: 11, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:43:21,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=79550.66666666667, ans=0.0 +2024-07-27 23:43:22,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=79550.66666666667, ans=0.125 +2024-07-27 23:43:25,082 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.66 vs. limit=22.5 +2024-07-27 23:43:27,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=79564.0, ans=0.0 +2024-07-27 23:43:33,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=79577.33333333333, ans=0.2 +2024-07-27 23:43:39,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=79590.66666666667, ans=0.125 +2024-07-27 23:43:50,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=79604.0, ans=0.2 +2024-07-27 23:43:51,126 INFO [train.py:1114] (0/4) Epoch 6, batch 8600, loss[loss=0.2572, simple_loss=0.3426, pruned_loss=0.08586, over 4789.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3102, pruned_loss=0.07467, over 939139.50 frames. ], batch size: 15, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:43:59,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=79630.66666666667, ans=0.125 +2024-07-27 23:44:01,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=79630.66666666667, ans=0.1 +2024-07-27 23:44:09,903 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.199e+01 6.001e+01 6.460e+01 7.651e+01 1.281e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-27 23:44:16,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=79657.33333333333, ans=0.1 +2024-07-27 23:44:19,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=79670.66666666667, ans=0.2 +2024-07-27 23:44:24,676 INFO [train.py:1114] (0/4) Epoch 6, batch 8650, loss[loss=0.2204, simple_loss=0.3111, pruned_loss=0.06484, over 4897.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3101, pruned_loss=0.07441, over 940343.32 frames. ], batch size: 15, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:44:37,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=79710.66666666667, ans=0.125 +2024-07-27 23:44:37,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=79710.66666666667, ans=0.125 +2024-07-27 23:44:40,453 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.56 vs. limit=22.5 +2024-07-27 23:44:41,060 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.96 vs. limit=22.5 +2024-07-27 23:44:53,357 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.11 vs. limit=15.0 +2024-07-27 23:44:56,641 INFO [train.py:1114] (0/4) Epoch 6, batch 8700, loss[loss=0.2599, simple_loss=0.3377, pruned_loss=0.09104, over 4769.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3109, pruned_loss=0.07479, over 937506.27 frames. ], batch size: 13, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:45:00,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=79750.66666666667, ans=10.0 +2024-07-27 23:45:04,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=79764.0, ans=0.125 +2024-07-27 23:45:11,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=79777.33333333333, ans=0.125 +2024-07-27 23:45:14,460 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.012e+01 6.110e+01 6.862e+01 8.564e+01 1.344e+02, threshold=1.372e+02, percent-clipped=1.0 +2024-07-27 23:45:20,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=79790.66666666667, ans=0.0 +2024-07-27 23:45:24,725 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.51 vs. limit=15.0 +2024-07-27 23:45:28,859 INFO [train.py:1114] (0/4) Epoch 6, batch 8750, loss[loss=0.2484, simple_loss=0.3284, pruned_loss=0.08416, over 4687.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3103, pruned_loss=0.07467, over 936420.39 frames. ], batch size: 15, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:45:30,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff3.min_abs, batch_count=79817.33333333333, ans=0.2 +2024-07-27 23:45:32,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=79817.33333333333, ans=0.125 +2024-07-27 23:45:33,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=79817.33333333333, ans=0.1 +2024-07-27 23:45:39,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=79830.66666666667, ans=0.0 +2024-07-27 23:46:00,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=79870.66666666667, ans=0.125 +2024-07-27 23:46:01,309 INFO [train.py:1114] (0/4) Epoch 6, batch 8800, loss[loss=0.229, simple_loss=0.3117, pruned_loss=0.07316, over 4930.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.3117, pruned_loss=0.07555, over 937691.56 frames. ], batch size: 14, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:46:02,964 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.30 vs. limit=6.0 +2024-07-27 23:46:03,880 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.10 vs. limit=15.0 +2024-07-27 23:46:09,510 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=15.0 +2024-07-27 23:46:16,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=79910.66666666667, ans=0.0 +2024-07-27 23:46:17,006 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.15 vs. limit=15.0 +2024-07-27 23:46:19,210 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.942e+01 5.815e+01 6.538e+01 7.322e+01 9.632e+01, threshold=1.308e+02, percent-clipped=0.0 +2024-07-27 23:46:24,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=79924.0, ans=0.2 +2024-07-27 23:46:33,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=79950.66666666667, ans=0.02 +2024-07-27 23:46:33,566 INFO [train.py:1114] (0/4) Epoch 6, batch 8850, loss[loss=0.2727, simple_loss=0.3518, pruned_loss=0.09675, over 4519.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3108, pruned_loss=0.07545, over 932551.45 frames. ], batch size: 21, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:46:39,559 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.61 vs. limit=15.0 +2024-07-27 23:46:55,319 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.92 vs. limit=22.5 +2024-07-27 23:47:07,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=79990.66666666667, ans=0.125 +2024-07-27 23:47:09,229 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.52 vs. limit=15.0 +2024-07-27 23:47:11,842 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.05 vs. limit=15.0 +2024-07-27 23:47:12,325 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-60000.pt +2024-07-27 23:47:34,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=80017.33333333333, ans=0.125 +2024-07-27 23:47:35,383 INFO [train.py:1114] (0/4) Epoch 6, batch 8900, loss[loss=0.2028, simple_loss=0.2804, pruned_loss=0.06261, over 4933.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3114, pruned_loss=0.0755, over 930422.48 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:47:43,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=80030.66666666667, ans=0.1 +2024-07-27 23:47:48,909 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.27 vs. limit=22.5 +2024-07-27 23:47:53,366 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.820e+01 6.167e+01 6.816e+01 7.855e+01 1.273e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-27 23:48:00,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=80070.66666666667, ans=0.0 +2024-07-27 23:48:07,383 INFO [train.py:1114] (0/4) Epoch 6, batch 8950, loss[loss=0.2222, simple_loss=0.3045, pruned_loss=0.06991, over 4552.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3112, pruned_loss=0.07546, over 931093.40 frames. ], batch size: 21, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:48:22,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=80110.66666666667, ans=0.125 +2024-07-27 23:48:25,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=80110.66666666667, ans=0.125 +2024-07-27 23:48:40,046 INFO [train.py:1114] (0/4) Epoch 6, batch 9000, loss[loss=0.1913, simple_loss=0.2736, pruned_loss=0.05445, over 4641.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3105, pruned_loss=0.07487, over 933916.04 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:48:40,047 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-27 23:48:46,150 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([3.6618, 2.1772, 3.0130, 3.5420, 3.3903, 2.8376, 3.3986, 2.1992], + device='cuda:0') +2024-07-27 23:48:52,409 INFO [train.py:1146] (0/4) Epoch 6, validation: loss=0.1898, simple_loss=0.2938, pruned_loss=0.0429, over 944034.00 frames. +2024-07-27 23:48:52,410 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-27 23:48:58,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=80164.0, ans=0.95 +2024-07-27 23:49:10,599 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.150e+01 6.230e+01 7.342e+01 8.976e+01 1.203e+02, threshold=1.468e+02, percent-clipped=0.0 +2024-07-27 23:49:11,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80190.66666666667, ans=0.1 +2024-07-27 23:49:15,803 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.75 vs. limit=10.0 +2024-07-27 23:49:25,587 INFO [train.py:1114] (0/4) Epoch 6, batch 9050, loss[loss=0.1604, simple_loss=0.2447, pruned_loss=0.0381, over 4548.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3093, pruned_loss=0.07449, over 933967.79 frames. ], batch size: 10, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:49:28,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=80217.33333333333, ans=0.0 +2024-07-27 23:49:30,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=80217.33333333333, ans=0.0 +2024-07-27 23:49:35,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80230.66666666667, ans=0.1 +2024-07-27 23:49:35,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80230.66666666667, ans=0.1 +2024-07-27 23:49:52,995 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.33 vs. limit=22.5 +2024-07-27 23:49:56,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=80257.33333333333, ans=0.2 +2024-07-27 23:49:57,809 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.54 vs. limit=15.0 +2024-07-27 23:49:58,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=80270.66666666667, ans=0.125 +2024-07-27 23:50:00,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=80270.66666666667, ans=0.0 +2024-07-27 23:50:03,564 INFO [train.py:1114] (0/4) Epoch 6, batch 9100, loss[loss=0.2062, simple_loss=0.2899, pruned_loss=0.06128, over 4930.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3081, pruned_loss=0.07409, over 936569.36 frames. ], batch size: 14, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:50:09,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=80297.33333333333, ans=0.125 +2024-07-27 23:50:16,733 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.59 vs. limit=22.5 +2024-07-27 23:50:21,446 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 6.154e+01 7.130e+01 8.632e+01 1.081e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 23:50:28,837 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-27 23:50:35,538 INFO [train.py:1114] (0/4) Epoch 6, batch 9150, loss[loss=0.2617, simple_loss=0.3397, pruned_loss=0.09184, over 4800.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3092, pruned_loss=0.07459, over 935517.27 frames. ], batch size: 14, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:50:36,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80350.66666666667, ans=0.1 +2024-07-27 23:50:48,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=80377.33333333333, ans=0.125 +2024-07-27 23:50:48,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=80377.33333333333, ans=0.125 +2024-07-27 23:50:58,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=80390.66666666667, ans=0.1 +2024-07-27 23:51:16,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=80404.0, ans=0.0 +2024-07-27 23:51:18,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80417.33333333333, ans=0.1 +2024-07-27 23:51:18,717 INFO [train.py:1114] (0/4) Epoch 6, batch 9200, loss[loss=0.1822, simple_loss=0.2688, pruned_loss=0.04777, over 4856.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3083, pruned_loss=0.0739, over 937433.62 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:51:19,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=80417.33333333333, ans=0.125 +2024-07-27 23:51:27,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=80430.66666666667, ans=0.125 +2024-07-27 23:51:40,367 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 6.086e+01 6.690e+01 8.259e+01 1.289e+02, threshold=1.338e+02, percent-clipped=0.0 +2024-07-27 23:51:40,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=80444.0, ans=0.025 +2024-07-27 23:51:54,188 INFO [train.py:1114] (0/4) Epoch 6, batch 9250, loss[loss=0.2559, simple_loss=0.3399, pruned_loss=0.08601, over 4636.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3089, pruned_loss=0.0742, over 938186.52 frames. ], batch size: 13, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:52:00,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=80497.33333333333, ans=0.125 +2024-07-27 23:52:06,031 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.21 vs. limit=12.0 +2024-07-27 23:52:14,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=80524.0, ans=0.125 +2024-07-27 23:52:22,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=80537.33333333333, ans=0.125 +2024-07-27 23:52:26,147 INFO [train.py:1114] (0/4) Epoch 6, batch 9300, loss[loss=0.2, simple_loss=0.2769, pruned_loss=0.06155, over 4775.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3094, pruned_loss=0.07457, over 937908.88 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:52:31,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=80550.66666666667, ans=0.125 +2024-07-27 23:52:33,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=80564.0, ans=0.125 +2024-07-27 23:52:35,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80564.0, ans=0.1 +2024-07-27 23:52:38,670 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.79 vs. limit=15.0 +2024-07-27 23:52:40,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=80577.33333333333, ans=0.0 +2024-07-27 23:52:43,891 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.063e+01 5.901e+01 6.419e+01 7.368e+01 1.271e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-27 23:52:50,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=80590.66666666667, ans=0.04949747468305833 +2024-07-27 23:52:50,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=80604.0, ans=0.125 +2024-07-27 23:52:53,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=80604.0, ans=0.0 +2024-07-27 23:52:58,705 INFO [train.py:1114] (0/4) Epoch 6, batch 9350, loss[loss=0.2961, simple_loss=0.3439, pruned_loss=0.1242, over 4817.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3101, pruned_loss=0.07462, over 935397.51 frames. ], batch size: 11, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:53:03,738 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.36 vs. limit=6.0 +2024-07-27 23:53:09,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=80630.66666666667, ans=0.0 +2024-07-27 23:53:15,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=80644.0, ans=0.125 +2024-07-27 23:53:21,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=80657.33333333333, ans=0.025 +2024-07-27 23:53:23,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=80670.66666666667, ans=0.125 +2024-07-27 23:53:31,202 INFO [train.py:1114] (0/4) Epoch 6, batch 9400, loss[loss=0.2484, simple_loss=0.3264, pruned_loss=0.08523, over 4695.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3098, pruned_loss=0.07458, over 933164.12 frames. ], batch size: 13, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:53:34,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=80684.0, ans=0.125 +2024-07-27 23:53:41,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=80697.33333333333, ans=0.0 +2024-07-27 23:53:43,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80710.66666666667, ans=0.1 +2024-07-27 23:53:46,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=80710.66666666667, ans=0.0 +2024-07-27 23:53:49,252 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.83 vs. limit=15.0 +2024-07-27 23:53:49,498 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.580e+01 6.055e+01 7.065e+01 8.211e+01 1.397e+02, threshold=1.413e+02, percent-clipped=1.0 +2024-07-27 23:53:50,626 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.58 vs. limit=15.0 +2024-07-27 23:53:51,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=80724.0, ans=0.125 +2024-07-27 23:53:52,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=80724.0, ans=10.0 +2024-07-27 23:54:02,502 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.07 vs. limit=15.0 +2024-07-27 23:54:02,800 INFO [train.py:1114] (0/4) Epoch 6, batch 9450, loss[loss=0.1737, simple_loss=0.2534, pruned_loss=0.04698, over 4796.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3104, pruned_loss=0.07492, over 932539.41 frames. ], batch size: 11, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:54:09,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=80764.0, ans=0.2 +2024-07-27 23:54:10,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=80764.0, ans=0.0 +2024-07-27 23:54:21,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=80790.66666666667, ans=0.125 +2024-07-27 23:54:23,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=80790.66666666667, ans=0.125 +2024-07-27 23:54:30,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=80804.0, ans=0.2 +2024-07-27 23:54:32,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=80804.0, ans=0.125 +2024-07-27 23:54:32,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=80804.0, ans=0.0 +2024-07-27 23:54:34,480 INFO [train.py:1114] (0/4) Epoch 6, batch 9500, loss[loss=0.2571, simple_loss=0.3309, pruned_loss=0.09168, over 4719.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.311, pruned_loss=0.07508, over 934844.38 frames. ], batch size: 12, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:54:34,851 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.42 vs. limit=15.0 +2024-07-27 23:54:40,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=80830.66666666667, ans=0.2 +2024-07-27 23:54:43,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=80830.66666666667, ans=0.125 +2024-07-27 23:54:52,213 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.921e+01 6.030e+01 6.974e+01 8.015e+01 1.181e+02, threshold=1.395e+02, percent-clipped=0.0 +2024-07-27 23:54:52,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=80857.33333333333, ans=10.0 +2024-07-27 23:54:53,757 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.94 vs. limit=6.0 +2024-07-27 23:54:54,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=80857.33333333333, ans=0.025 +2024-07-27 23:55:01,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=80870.66666666667, ans=0.125 +2024-07-27 23:55:05,207 INFO [train.py:1114] (0/4) Epoch 6, batch 9550, loss[loss=0.1984, simple_loss=0.2816, pruned_loss=0.05765, over 4780.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3109, pruned_loss=0.07529, over 932011.03 frames. ], batch size: 12, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:55:05,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=80884.0, ans=0.05 +2024-07-27 23:55:20,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=80910.66666666667, ans=0.125 +2024-07-27 23:55:25,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=80924.0, ans=0.0 +2024-07-27 23:55:32,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=80937.33333333333, ans=0.2 +2024-07-27 23:55:34,451 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.15 vs. limit=12.0 +2024-07-27 23:55:38,078 INFO [train.py:1114] (0/4) Epoch 6, batch 9600, loss[loss=0.3227, simple_loss=0.369, pruned_loss=0.1381, over 3348.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3114, pruned_loss=0.07542, over 930752.20 frames. ], batch size: 36, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:55:56,590 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.004e+01 6.228e+01 7.001e+01 7.870e+01 1.117e+02, threshold=1.400e+02, percent-clipped=0.0 +2024-07-27 23:56:00,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=80990.66666666667, ans=0.0 +2024-07-27 23:56:01,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=80990.66666666667, ans=0.125 +2024-07-27 23:58:43,520 INFO [train.py:1114] (0/4) Epoch 6, batch 9650, loss[loss=0.2563, simple_loss=0.3405, pruned_loss=0.08603, over 4840.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3131, pruned_loss=0.07695, over 926705.20 frames. ], batch size: 16, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:58:54,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=81030.66666666667, ans=0.125 +2024-07-27 23:58:54,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=81030.66666666667, ans=0.025 +2024-07-27 23:58:58,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=81044.0, ans=0.125 +2024-07-27 23:59:04,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=81057.33333333333, ans=0.125 +2024-07-27 23:59:07,574 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.99 vs. limit=22.5 +2024-07-27 23:59:15,273 INFO [train.py:1114] (0/4) Epoch 6, batch 9700, loss[loss=0.2095, simple_loss=0.292, pruned_loss=0.06353, over 4237.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3134, pruned_loss=0.07721, over 925110.17 frames. ], batch size: 25, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:59:16,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=81084.0, ans=0.125 +2024-07-27 23:59:33,228 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.713e+01 6.355e+01 7.161e+01 8.228e+01 1.300e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 23:59:37,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81124.0, ans=0.1 +2024-07-27 23:59:38,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=81124.0, ans=0.0 +2024-07-27 23:59:40,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=81137.33333333333, ans=0.125 +2024-07-27 23:59:46,568 INFO [train.py:1114] (0/4) Epoch 6, batch 9750, loss[loss=0.2269, simple_loss=0.3217, pruned_loss=0.06602, over 4675.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3125, pruned_loss=0.07684, over 925359.76 frames. ], batch size: 15, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:59:47,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=81150.66666666667, ans=0.025 +2024-07-28 00:00:09,751 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.51 vs. limit=12.0 +2024-07-28 00:00:11,649 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=2.88 vs. limit=12.0 +2024-07-28 00:00:13,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=81164.0, ans=0.025 +2024-07-28 00:00:24,997 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.96 vs. limit=15.0 +2024-07-28 00:00:28,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81204.0, ans=0.1 +2024-07-28 00:00:30,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=81204.0, ans=0.0 +2024-07-28 00:00:34,630 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.61 vs. limit=22.5 +2024-07-28 00:00:35,015 INFO [train.py:1114] (0/4) Epoch 6, batch 9800, loss[loss=0.2047, simple_loss=0.2913, pruned_loss=0.05899, over 4703.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.3109, pruned_loss=0.07596, over 924927.08 frames. ], batch size: 12, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:00:35,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81217.33333333333, ans=0.1 +2024-07-28 00:00:44,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=81230.66666666667, ans=0.125 +2024-07-28 00:00:52,772 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.096e+01 6.416e+01 7.275e+01 8.758e+01 1.346e+02, threshold=1.455e+02, percent-clipped=0.0 +2024-07-28 00:00:54,243 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.43 vs. limit=15.0 +2024-07-28 00:00:54,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=81257.33333333333, ans=0.125 +2024-07-28 00:00:58,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=81270.66666666667, ans=0.95 +2024-07-28 00:01:01,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=81270.66666666667, ans=0.1 +2024-07-28 00:01:02,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=81270.66666666667, ans=0.125 +2024-07-28 00:01:04,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=81270.66666666667, ans=0.125 +2024-07-28 00:01:05,424 INFO [train.py:1114] (0/4) Epoch 6, batch 9850, loss[loss=0.2278, simple_loss=0.3348, pruned_loss=0.06045, over 4912.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3116, pruned_loss=0.07629, over 927014.96 frames. ], batch size: 15, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:01:06,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81284.0, ans=0.1 +2024-07-28 00:01:09,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=81284.0, ans=0.0 +2024-07-28 00:01:12,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=81297.33333333333, ans=0.125 +2024-07-28 00:01:13,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=81297.33333333333, ans=0.125 +2024-07-28 00:01:30,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=81337.33333333333, ans=0.125 +2024-07-28 00:01:37,305 INFO [train.py:1114] (0/4) Epoch 6, batch 9900, loss[loss=0.2509, simple_loss=0.3378, pruned_loss=0.08202, over 4846.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3125, pruned_loss=0.07667, over 926300.80 frames. ], batch size: 16, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:01:45,552 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.15 vs. limit=15.0 +2024-07-28 00:01:54,976 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.847e+01 6.249e+01 6.784e+01 7.688e+01 1.136e+02, threshold=1.357e+02, percent-clipped=0.0 +2024-07-28 00:02:00,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=81390.66666666667, ans=0.125 +2024-07-28 00:02:06,690 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:02:07,914 INFO [train.py:1114] (0/4) Epoch 6, batch 9950, loss[loss=0.197, simple_loss=0.2697, pruned_loss=0.06214, over 4790.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.3128, pruned_loss=0.07698, over 928752.87 frames. ], batch size: 11, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:02:10,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81417.33333333333, ans=0.1 +2024-07-28 00:02:15,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81430.66666666667, ans=0.1 +2024-07-28 00:02:15,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=81430.66666666667, ans=0.0 +2024-07-28 00:02:18,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=81430.66666666667, ans=0.125 +2024-07-28 00:02:27,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81457.33333333333, ans=0.1 +2024-07-28 00:02:31,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=81457.33333333333, ans=0.0 +2024-07-28 00:02:36,754 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.35 vs. limit=15.0 +2024-07-28 00:02:39,509 INFO [train.py:1114] (0/4) Epoch 6, batch 10000, loss[loss=0.2662, simple_loss=0.3311, pruned_loss=0.1007, over 4657.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.315, pruned_loss=0.07764, over 926511.52 frames. ], batch size: 16, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:02:47,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81497.33333333333, ans=0.1 +2024-07-28 00:02:50,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=81497.33333333333, ans=0.0 +2024-07-28 00:02:57,857 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.089e+01 5.998e+01 6.471e+01 7.600e+01 1.218e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 00:02:59,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=81524.0, ans=0.125 +2024-07-28 00:03:04,825 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:03:07,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=81537.33333333333, ans=0.125 +2024-07-28 00:03:11,408 INFO [train.py:1114] (0/4) Epoch 6, batch 10050, loss[loss=0.3311, simple_loss=0.3865, pruned_loss=0.1379, over 3500.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3197, pruned_loss=0.08073, over 915624.45 frames. ], batch size: 36, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:03:13,794 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.93 vs. limit=15.0 +2024-07-28 00:03:16,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=81550.66666666667, ans=0.125 +2024-07-28 00:03:20,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81564.0, ans=0.1 +2024-07-28 00:03:22,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=81564.0, ans=0.025 +2024-07-28 00:03:27,335 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:03:33,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=81590.66666666667, ans=0.0 +2024-07-28 00:03:34,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=81590.66666666667, ans=0.125 +2024-07-28 00:03:35,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=81590.66666666667, ans=0.125 +2024-07-28 00:03:38,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=81604.0, ans=0.95 +2024-07-28 00:03:45,250 INFO [train.py:1114] (0/4) Epoch 6, batch 10100, loss[loss=0.2099, simple_loss=0.2949, pruned_loss=0.06242, over 3424.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3251, pruned_loss=0.08691, over 863560.96 frames. ], batch size: 35, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:03:50,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=81617.33333333333, ans=0.125 +2024-07-28 00:03:51,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=81630.66666666667, ans=0.125 +2024-07-28 00:03:57,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=81644.0, ans=0.025 +2024-07-28 00:04:04,050 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.601e+01 6.841e+01 7.276e+01 7.854e+01 1.337e+02, threshold=1.455e+02, percent-clipped=1.0 +2024-07-28 00:04:12,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=81670.66666666667, ans=0.025 +2024-07-28 00:04:16,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=81670.66666666667, ans=0.125 +2024-07-28 00:04:17,439 INFO [train.py:1114] (0/4) Epoch 6, batch 10150, loss[loss=0.2979, simple_loss=0.3482, pruned_loss=0.1238, over 3465.00 frames. ], tot_loss[loss=0.2568, simple_loss=0.3295, pruned_loss=0.09206, over 821716.45 frames. ], batch size: 36, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:04:18,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=81684.0, ans=0.2 +2024-07-28 00:04:18,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=81684.0, ans=0.0 +2024-07-28 00:04:18,951 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.14 vs. limit=10.0 +2024-07-28 00:04:25,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=81697.33333333333, ans=0.0 +2024-07-28 00:04:30,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=81710.66666666667, ans=0.125 +2024-07-28 00:04:35,188 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.12 vs. limit=15.0 +2024-07-28 00:04:39,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=81724.0, ans=0.2 +2024-07-28 00:04:39,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=81724.0, ans=0.0 +2024-07-28 00:04:42,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81737.33333333333, ans=0.1 +2024-07-28 00:04:43,006 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.12 vs. limit=22.5 +2024-07-28 00:04:48,202 INFO [train.py:1114] (0/4) Epoch 6, batch 10200, loss[loss=0.3026, simple_loss=0.3439, pruned_loss=0.1307, over 3381.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3331, pruned_loss=0.09641, over 789460.69 frames. ], batch size: 35, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:05:01,801 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-6.pt +2024-07-28 00:05:45,930 INFO [train.py:1114] (0/4) Epoch 7, batch 0, loss[loss=0.1888, simple_loss=0.2725, pruned_loss=0.0526, over 4862.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2725, pruned_loss=0.0526, over 4862.00 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:05:45,930 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 00:05:57,486 INFO [train.py:1146] (0/4) Epoch 7, validation: loss=0.1928, simple_loss=0.2981, pruned_loss=0.04372, over 944034.00 frames. +2024-07-28 00:05:57,487 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 00:05:59,908 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.15 vs. limit=10.0 +2024-07-28 00:06:04,569 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.706e+01 6.568e+01 7.074e+01 7.483e+01 1.038e+02, threshold=1.415e+02, percent-clipped=0.0 +2024-07-28 00:06:08,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=81793.33333333333, ans=0.125 +2024-07-28 00:06:14,815 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.63 vs. limit=22.5 +2024-07-28 00:06:22,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=81820.0, ans=0.125 +2024-07-28 00:06:32,406 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.33 vs. limit=15.0 +2024-07-28 00:06:33,933 INFO [train.py:1114] (0/4) Epoch 7, batch 50, loss[loss=0.2317, simple_loss=0.3124, pruned_loss=0.07549, over 4610.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3116, pruned_loss=0.07492, over 206254.30 frames. ], batch size: 11, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:06:38,628 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:06:52,094 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.01 vs. limit=15.0 +2024-07-28 00:06:53,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=81886.66666666667, ans=0.125 +2024-07-28 00:07:07,525 INFO [train.py:1114] (0/4) Epoch 7, batch 100, loss[loss=0.2343, simple_loss=0.3054, pruned_loss=0.08159, over 4654.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3144, pruned_loss=0.0756, over 365314.86 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:07:08,648 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.18 vs. limit=15.0 +2024-07-28 00:07:12,079 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.841e+01 5.914e+01 6.777e+01 7.920e+01 1.192e+02, threshold=1.355e+02, percent-clipped=0.0 +2024-07-28 00:07:14,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=81926.66666666667, ans=0.125 +2024-07-28 00:07:30,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=81953.33333333333, ans=0.125 +2024-07-28 00:07:35,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=81966.66666666667, ans=0.125 +2024-07-28 00:07:40,135 INFO [train.py:1114] (0/4) Epoch 7, batch 150, loss[loss=0.2407, simple_loss=0.3265, pruned_loss=0.07749, over 4611.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3096, pruned_loss=0.07264, over 493908.79 frames. ], batch size: 11, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:07:42,555 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.90 vs. limit=15.0 +2024-07-28 00:07:44,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=81980.0, ans=0.0 +2024-07-28 00:07:49,609 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.66 vs. limit=15.0 +2024-07-28 00:07:56,931 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.64 vs. limit=15.0 +2024-07-28 00:07:57,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=82006.66666666667, ans=0.0 +2024-07-28 00:08:06,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=82033.33333333333, ans=0.125 +2024-07-28 00:08:10,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=82033.33333333333, ans=0.025 +2024-07-28 00:08:11,328 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.28 vs. limit=15.0 +2024-07-28 00:08:12,769 INFO [train.py:1114] (0/4) Epoch 7, batch 200, loss[loss=0.2121, simple_loss=0.2957, pruned_loss=0.06424, over 4522.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3069, pruned_loss=0.07193, over 593589.56 frames. ], batch size: 21, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:08:12,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=82046.66666666667, ans=0.025 +2024-07-28 00:08:12,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=82046.66666666667, ans=0.0 +2024-07-28 00:08:17,415 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.841e+01 5.956e+01 6.544e+01 7.409e+01 1.468e+02, threshold=1.309e+02, percent-clipped=1.0 +2024-07-28 00:08:17,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=82046.66666666667, ans=0.0 +2024-07-28 00:08:29,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=82073.33333333333, ans=0.95 +2024-07-28 00:08:31,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=82073.33333333333, ans=0.125 +2024-07-28 00:08:34,589 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.01 vs. limit=15.0 +2024-07-28 00:08:39,785 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:08:46,316 INFO [train.py:1114] (0/4) Epoch 7, batch 250, loss[loss=0.2114, simple_loss=0.2961, pruned_loss=0.06332, over 4635.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3065, pruned_loss=0.07162, over 670286.64 frames. ], batch size: 16, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:08:48,053 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.57 vs. limit=15.0 +2024-07-28 00:08:49,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=82113.33333333333, ans=0.125 +2024-07-28 00:08:50,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=82113.33333333333, ans=0.125 +2024-07-28 00:09:03,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=82140.0, ans=0.0 +2024-07-28 00:09:04,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=82140.0, ans=0.125 +2024-07-28 00:09:05,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=82140.0, ans=0.125 +2024-07-28 00:09:09,486 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-28 00:09:09,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=82153.33333333333, ans=0.125 +2024-07-28 00:09:12,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.93 vs. limit=22.5 +2024-07-28 00:09:19,498 INFO [train.py:1114] (0/4) Epoch 7, batch 300, loss[loss=0.1914, simple_loss=0.2987, pruned_loss=0.04202, over 4811.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3059, pruned_loss=0.07114, over 729953.51 frames. ], batch size: 15, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:09:21,895 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.46 vs. limit=10.0 +2024-07-28 00:09:23,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=82180.0, ans=0.0 +2024-07-28 00:09:24,037 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.788e+01 5.988e+01 6.705e+01 7.891e+01 1.591e+02, threshold=1.341e+02, percent-clipped=1.0 +2024-07-28 00:09:45,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82220.0, ans=0.1 +2024-07-28 00:09:47,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=82233.33333333333, ans=0.0 +2024-07-28 00:09:54,229 INFO [train.py:1114] (0/4) Epoch 7, batch 350, loss[loss=0.1869, simple_loss=0.2583, pruned_loss=0.05778, over 4939.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3053, pruned_loss=0.0706, over 776448.59 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:09:56,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=82246.66666666667, ans=0.125 +2024-07-28 00:10:24,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=82300.0, ans=0.125 +2024-07-28 00:10:29,114 INFO [train.py:1114] (0/4) Epoch 7, batch 400, loss[loss=0.223, simple_loss=0.3033, pruned_loss=0.07134, over 4692.00 frames. ], tot_loss[loss=0.223, simple_loss=0.3051, pruned_loss=0.07043, over 813903.44 frames. ], batch size: 13, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:10:31,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82313.33333333333, ans=0.1 +2024-07-28 00:10:33,743 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.839e+01 6.182e+01 6.903e+01 9.738e+01, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 00:10:34,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=82313.33333333333, ans=0.0 +2024-07-28 00:10:36,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=82326.66666666667, ans=0.09899494936611666 +2024-07-28 00:10:40,271 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.89 vs. limit=15.0 +2024-07-28 00:10:47,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=82340.0, ans=0.0 +2024-07-28 00:10:48,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=82340.0, ans=0.0 +2024-07-28 00:10:58,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=82366.66666666667, ans=0.2 +2024-07-28 00:11:01,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=82366.66666666667, ans=0.0 +2024-07-28 00:11:02,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=82366.66666666667, ans=0.125 +2024-07-28 00:11:04,408 INFO [train.py:1114] (0/4) Epoch 7, batch 450, loss[loss=0.2026, simple_loss=0.2925, pruned_loss=0.05638, over 4634.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.3056, pruned_loss=0.0709, over 839037.97 frames. ], batch size: 13, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:11:10,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=82393.33333333333, ans=0.025 +2024-07-28 00:11:14,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=82393.33333333333, ans=0.1 +2024-07-28 00:11:14,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=82393.33333333333, ans=0.125 +2024-07-28 00:11:16,652 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.86 vs. limit=22.5 +2024-07-28 00:11:25,784 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.15 vs. limit=12.0 +2024-07-28 00:11:31,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=82420.0, ans=0.2 +2024-07-28 00:11:32,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=82433.33333333333, ans=0.05 +2024-07-28 00:11:36,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=82433.33333333333, ans=0.1 +2024-07-28 00:11:39,245 INFO [train.py:1114] (0/4) Epoch 7, batch 500, loss[loss=0.2812, simple_loss=0.3493, pruned_loss=0.1066, over 4682.00 frames. ], tot_loss[loss=0.223, simple_loss=0.3049, pruned_loss=0.07059, over 861530.84 frames. ], batch size: 15, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:11:39,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=82446.66666666667, ans=0.125 +2024-07-28 00:11:44,367 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.615e+01 5.805e+01 6.520e+01 7.491e+01 1.046e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 00:11:44,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=82446.66666666667, ans=0.1 +2024-07-28 00:11:54,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=82473.33333333333, ans=0.0 +2024-07-28 00:12:00,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82486.66666666667, ans=0.1 +2024-07-28 00:12:00,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=82486.66666666667, ans=0.125 +2024-07-28 00:12:08,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=82500.0, ans=0.2 +2024-07-28 00:12:09,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.86 vs. limit=15.0 +2024-07-28 00:12:11,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=82500.0, ans=0.125 +2024-07-28 00:12:12,268 INFO [train.py:1114] (0/4) Epoch 7, batch 550, loss[loss=0.2761, simple_loss=0.3599, pruned_loss=0.09609, over 4596.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3051, pruned_loss=0.07085, over 877588.65 frames. ], batch size: 17, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:12:26,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=82526.66666666667, ans=0.125 +2024-07-28 00:12:31,055 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.08 vs. limit=22.5 +2024-07-28 00:12:33,146 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.28 vs. limit=10.0 +2024-07-28 00:12:37,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=82553.33333333333, ans=0.0 +2024-07-28 00:12:41,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=82566.66666666667, ans=0.0 +2024-07-28 00:12:43,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=82566.66666666667, ans=0.0 +2024-07-28 00:12:44,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=82566.66666666667, ans=0.0 +2024-07-28 00:12:45,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=82566.66666666667, ans=0.0 +2024-07-28 00:12:45,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.07 vs. limit=12.0 +2024-07-28 00:12:52,464 INFO [train.py:1114] (0/4) Epoch 7, batch 600, loss[loss=0.2255, simple_loss=0.3113, pruned_loss=0.06981, over 4646.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3054, pruned_loss=0.07092, over 891877.79 frames. ], batch size: 16, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:13:01,197 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.565e+01 5.825e+01 6.471e+01 7.822e+01 1.372e+02, threshold=1.294e+02, percent-clipped=1.0 +2024-07-28 00:13:07,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82593.33333333333, ans=0.1 +2024-07-28 00:13:12,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=82606.66666666667, ans=0.125 +2024-07-28 00:13:28,956 INFO [train.py:1114] (0/4) Epoch 7, batch 650, loss[loss=0.2402, simple_loss=0.321, pruned_loss=0.0797, over 4762.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3059, pruned_loss=0.07116, over 903717.57 frames. ], batch size: 13, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:13:36,745 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-28 00:13:39,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=82660.0, ans=0.0 +2024-07-28 00:13:45,600 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.27 vs. limit=22.5 +2024-07-28 00:13:56,260 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.56 vs. limit=22.5 +2024-07-28 00:14:02,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82713.33333333333, ans=0.1 +2024-07-28 00:14:02,617 INFO [train.py:1114] (0/4) Epoch 7, batch 700, loss[loss=0.2113, simple_loss=0.3024, pruned_loss=0.06012, over 4639.00 frames. ], tot_loss[loss=0.224, simple_loss=0.306, pruned_loss=0.07104, over 911823.40 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:14:06,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=82713.33333333333, ans=15.0 +2024-07-28 00:14:07,881 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.492e+01 5.955e+01 6.627e+01 7.908e+01 1.237e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-28 00:14:20,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=82740.0, ans=0.07 +2024-07-28 00:14:27,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82753.33333333333, ans=0.1 +2024-07-28 00:14:30,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=82766.66666666667, ans=0.125 +2024-07-28 00:14:34,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=82766.66666666667, ans=0.0 +2024-07-28 00:14:36,838 INFO [train.py:1114] (0/4) Epoch 7, batch 750, loss[loss=0.2075, simple_loss=0.3067, pruned_loss=0.05409, over 4697.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3065, pruned_loss=0.07128, over 918626.56 frames. ], batch size: 13, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:14:37,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=82780.0, ans=0.0 +2024-07-28 00:14:55,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=82806.66666666667, ans=0.2 +2024-07-28 00:15:01,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=82820.0, ans=0.125 +2024-07-28 00:15:03,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=82833.33333333333, ans=0.025 +2024-07-28 00:15:06,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=82833.33333333333, ans=0.2 +2024-07-28 00:15:09,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=82833.33333333333, ans=0.125 +2024-07-28 00:15:10,149 INFO [train.py:1114] (0/4) Epoch 7, batch 800, loss[loss=0.2436, simple_loss=0.3144, pruned_loss=0.0864, over 4853.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3069, pruned_loss=0.07206, over 923733.62 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:15:17,232 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.015e+01 5.902e+01 6.465e+01 7.413e+01 1.020e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-28 00:15:22,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=82860.0, ans=0.125 +2024-07-28 00:15:25,586 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.89 vs. limit=6.0 +2024-07-28 00:15:27,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=82873.33333333333, ans=0.2 +2024-07-28 00:15:29,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=82873.33333333333, ans=0.125 +2024-07-28 00:15:31,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=82886.66666666667, ans=0.125 +2024-07-28 00:15:34,740 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.59 vs. limit=15.0 +2024-07-28 00:15:36,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=82886.66666666667, ans=0.125 +2024-07-28 00:15:46,842 INFO [train.py:1114] (0/4) Epoch 7, batch 850, loss[loss=0.2231, simple_loss=0.3009, pruned_loss=0.0726, over 4655.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.3053, pruned_loss=0.07113, over 927785.55 frames. ], batch size: 14, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:15:48,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82913.33333333333, ans=0.1 +2024-07-28 00:15:50,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=82913.33333333333, ans=0.2 +2024-07-28 00:15:51,731 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.12 vs. limit=15.0 +2024-07-28 00:15:55,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=82926.66666666667, ans=0.025 +2024-07-28 00:15:59,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=82940.0, ans=0.2 +2024-07-28 00:16:01,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=82940.0, ans=0.125 +2024-07-28 00:16:03,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82940.0, ans=0.1 +2024-07-28 00:16:04,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=82940.0, ans=0.0 +2024-07-28 00:16:05,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=82940.0, ans=0.0 +2024-07-28 00:16:06,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=82953.33333333333, ans=0.0 +2024-07-28 00:16:09,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=82953.33333333333, ans=0.125 +2024-07-28 00:16:11,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=82953.33333333333, ans=0.0 +2024-07-28 00:16:18,051 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.98 vs. limit=22.5 +2024-07-28 00:16:22,244 INFO [train.py:1114] (0/4) Epoch 7, batch 900, loss[loss=0.2122, simple_loss=0.2889, pruned_loss=0.06773, over 4847.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3052, pruned_loss=0.07145, over 928298.36 frames. ], batch size: 12, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:16:27,221 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.76 vs. limit=6.0 +2024-07-28 00:16:27,461 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.904e+01 6.297e+01 6.765e+01 1.145e+02, threshold=1.259e+02, percent-clipped=0.0 +2024-07-28 00:16:34,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=82993.33333333333, ans=0.0 +2024-07-28 00:16:34,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=82993.33333333333, ans=0.125 +2024-07-28 00:16:46,066 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=5.110e-03 +2024-07-28 00:16:47,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=83020.0, ans=0.125 +2024-07-28 00:16:51,782 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.15 vs. limit=15.0 +2024-07-28 00:16:57,781 INFO [train.py:1114] (0/4) Epoch 7, batch 950, loss[loss=0.2187, simple_loss=0.2977, pruned_loss=0.06979, over 4776.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3067, pruned_loss=0.07216, over 930547.88 frames. ], batch size: 12, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:17:13,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=83073.33333333333, ans=0.2 +2024-07-28 00:17:31,243 INFO [train.py:1114] (0/4) Epoch 7, batch 1000, loss[loss=0.2334, simple_loss=0.3194, pruned_loss=0.07372, over 4962.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.3077, pruned_loss=0.07254, over 929721.14 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:17:33,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=83113.33333333333, ans=0.0 +2024-07-28 00:17:36,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=83113.33333333333, ans=0.125 +2024-07-28 00:17:36,727 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.037e+01 6.185e+01 7.251e+01 8.642e+01 1.358e+02, threshold=1.450e+02, percent-clipped=3.0 +2024-07-28 00:17:54,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=83153.33333333333, ans=0.125 +2024-07-28 00:17:59,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=83166.66666666667, ans=0.1 +2024-07-28 00:18:03,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=83166.66666666667, ans=0.125 +2024-07-28 00:18:05,097 INFO [train.py:1114] (0/4) Epoch 7, batch 1050, loss[loss=0.2456, simple_loss=0.3314, pruned_loss=0.0799, over 4875.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3069, pruned_loss=0.07212, over 932323.27 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:18:25,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=83193.33333333333, ans=0.2 +2024-07-28 00:18:41,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=83220.0, ans=0.125 +2024-07-28 00:18:44,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=83233.33333333333, ans=0.1 +2024-07-28 00:18:46,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=83233.33333333333, ans=0.0 +2024-07-28 00:18:47,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=83233.33333333333, ans=0.0 +2024-07-28 00:18:48,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=83233.33333333333, ans=0.1 +2024-07-28 00:18:50,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=83246.66666666667, ans=0.1 +2024-07-28 00:18:50,763 INFO [train.py:1114] (0/4) Epoch 7, batch 1100, loss[loss=0.211, simple_loss=0.2975, pruned_loss=0.06228, over 4901.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3065, pruned_loss=0.07198, over 934638.89 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:18:51,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=83246.66666666667, ans=0.125 +2024-07-28 00:18:52,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=83246.66666666667, ans=0.1 +2024-07-28 00:18:56,151 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.849e+01 5.958e+01 6.479e+01 7.755e+01 1.091e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 00:19:03,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=83273.33333333333, ans=0.125 +2024-07-28 00:19:12,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=83273.33333333333, ans=0.025 +2024-07-28 00:19:26,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=83300.0, ans=0.0 +2024-07-28 00:19:28,118 INFO [train.py:1114] (0/4) Epoch 7, batch 1150, loss[loss=0.226, simple_loss=0.3068, pruned_loss=0.07264, over 4887.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3064, pruned_loss=0.07219, over 934408.69 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:19:31,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=83313.33333333333, ans=0.0 +2024-07-28 00:19:36,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=83326.66666666667, ans=0.2 +2024-07-28 00:19:40,458 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.93 vs. limit=6.0 +2024-07-28 00:19:46,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=83340.0, ans=0.1 +2024-07-28 00:19:51,621 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-28 00:19:52,433 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.44 vs. limit=15.0 +2024-07-28 00:19:59,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=83366.66666666667, ans=0.0 +2024-07-28 00:20:05,238 INFO [train.py:1114] (0/4) Epoch 7, batch 1200, loss[loss=0.2356, simple_loss=0.3213, pruned_loss=0.07492, over 4877.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3071, pruned_loss=0.07234, over 933798.14 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:20:10,454 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.726e+01 5.660e+01 6.364e+01 7.390e+01 1.227e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 00:20:17,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=83393.33333333333, ans=0.0 +2024-07-28 00:20:21,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=83406.66666666667, ans=10.0 +2024-07-28 00:20:25,761 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.54 vs. limit=10.0 +2024-07-28 00:20:36,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=83433.33333333333, ans=0.1 +2024-07-28 00:20:37,998 INFO [train.py:1114] (0/4) Epoch 7, batch 1250, loss[loss=0.2489, simple_loss=0.3341, pruned_loss=0.0818, over 4799.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.3071, pruned_loss=0.07177, over 937640.45 frames. ], batch size: 15, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:20:41,691 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.15 vs. limit=15.0 +2024-07-28 00:20:46,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=83460.0, ans=0.2 +2024-07-28 00:20:50,448 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:21:19,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=83500.0, ans=0.125 +2024-07-28 00:21:21,958 INFO [train.py:1114] (0/4) Epoch 7, batch 1300, loss[loss=0.2709, simple_loss=0.3439, pruned_loss=0.09898, over 4714.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3076, pruned_loss=0.07201, over 938852.62 frames. ], batch size: 19, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:21:25,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=83513.33333333333, ans=0.125 +2024-07-28 00:21:26,945 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.618e+01 5.788e+01 6.480e+01 7.663e+01 1.256e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 00:21:28,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=83526.66666666667, ans=0.125 +2024-07-28 00:21:51,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=83566.66666666667, ans=0.125 +2024-07-28 00:21:55,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=83566.66666666667, ans=0.125 +2024-07-28 00:21:56,817 INFO [train.py:1114] (0/4) Epoch 7, batch 1350, loss[loss=0.2314, simple_loss=0.3068, pruned_loss=0.07795, over 4761.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.3056, pruned_loss=0.07104, over 940999.98 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:22:05,680 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.24 vs. limit=22.5 +2024-07-28 00:22:09,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=83606.66666666667, ans=0.2 +2024-07-28 00:22:15,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=83606.66666666667, ans=0.015 +2024-07-28 00:22:22,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=83620.0, ans=0.125 +2024-07-28 00:22:27,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=83633.33333333333, ans=0.5 +2024-07-28 00:22:31,662 INFO [train.py:1114] (0/4) Epoch 7, batch 1400, loss[loss=0.2066, simple_loss=0.2787, pruned_loss=0.06723, over 4707.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3066, pruned_loss=0.07158, over 943016.83 frames. ], batch size: 11, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:22:35,307 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.63 vs. limit=12.0 +2024-07-28 00:22:36,879 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.900e+01 5.949e+01 6.637e+01 7.853e+01 1.145e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-28 00:22:36,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=83646.66666666667, ans=0.0 +2024-07-28 00:22:44,319 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.58 vs. limit=15.0 +2024-07-28 00:22:44,421 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.52 vs. limit=15.0 +2024-07-28 00:22:57,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=83686.66666666667, ans=0.0 +2024-07-28 00:23:00,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=83700.0, ans=0.1 +2024-07-28 00:23:03,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=83700.0, ans=0.025 +2024-07-28 00:23:05,516 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.58 vs. limit=22.5 +2024-07-28 00:23:06,315 INFO [train.py:1114] (0/4) Epoch 7, batch 1450, loss[loss=0.2479, simple_loss=0.3373, pruned_loss=0.07926, over 4666.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3079, pruned_loss=0.07232, over 943016.67 frames. ], batch size: 15, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:23:06,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=83713.33333333333, ans=0.125 +2024-07-28 00:23:14,470 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.20 vs. limit=15.0 +2024-07-28 00:23:20,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=83740.0, ans=0.0 +2024-07-28 00:23:35,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=83766.66666666667, ans=0.125 +2024-07-28 00:23:39,638 INFO [train.py:1114] (0/4) Epoch 7, batch 1500, loss[loss=0.2432, simple_loss=0.3173, pruned_loss=0.08457, over 4800.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3088, pruned_loss=0.07261, over 943136.89 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:23:43,473 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:23:45,190 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 5.882e+01 6.521e+01 7.412e+01 1.092e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 00:23:53,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.20 vs. limit=22.5 +2024-07-28 00:24:04,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=83820.0, ans=0.09899494936611666 +2024-07-28 00:24:09,135 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.67 vs. limit=15.0 +2024-07-28 00:24:11,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=83833.33333333333, ans=0.125 +2024-07-28 00:24:15,242 INFO [train.py:1114] (0/4) Epoch 7, batch 1550, loss[loss=0.237, simple_loss=0.3246, pruned_loss=0.07474, over 4898.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3095, pruned_loss=0.07363, over 938987.17 frames. ], batch size: 15, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:24:18,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=83846.66666666667, ans=0.0 +2024-07-28 00:24:20,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=83846.66666666667, ans=0.025 +2024-07-28 00:24:43,307 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.94 vs. limit=22.5 +2024-07-28 00:24:49,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=83873.33333333333, ans=0.2 +2024-07-28 00:24:50,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=83873.33333333333, ans=0.0 +2024-07-28 00:24:58,078 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.09 vs. limit=15.0 +2024-07-28 00:25:05,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=83900.0, ans=0.125 +2024-07-28 00:25:09,031 INFO [train.py:1114] (0/4) Epoch 7, batch 1600, loss[loss=0.1881, simple_loss=0.2735, pruned_loss=0.05141, over 4862.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3087, pruned_loss=0.07294, over 937574.59 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:25:09,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=83913.33333333333, ans=0.125 +2024-07-28 00:25:09,483 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.05 vs. limit=15.0 +2024-07-28 00:25:13,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=83913.33333333333, ans=0.125 +2024-07-28 00:25:17,691 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.865e+01 6.513e+01 7.777e+01 1.353e+02, threshold=1.303e+02, percent-clipped=1.0 +2024-07-28 00:25:26,496 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.15 vs. limit=22.5 +2024-07-28 00:25:34,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=83953.33333333333, ans=0.125 +2024-07-28 00:25:35,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=83953.33333333333, ans=0.2 +2024-07-28 00:25:40,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=83966.66666666667, ans=0.125 +2024-07-28 00:25:45,977 INFO [train.py:1114] (0/4) Epoch 7, batch 1650, loss[loss=0.2086, simple_loss=0.3088, pruned_loss=0.05421, over 4670.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.3086, pruned_loss=0.07332, over 937910.12 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:25:54,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=83993.33333333333, ans=0.07 +2024-07-28 00:25:55,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=83993.33333333333, ans=0.125 +2024-07-28 00:25:56,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=83993.33333333333, ans=0.125 +2024-07-28 00:26:04,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=84006.66666666667, ans=0.125 +2024-07-28 00:26:08,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=84020.0, ans=0.05 +2024-07-28 00:26:12,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=84020.0, ans=0.2 +2024-07-28 00:26:19,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=84033.33333333333, ans=0.125 +2024-07-28 00:26:21,424 INFO [train.py:1114] (0/4) Epoch 7, batch 1700, loss[loss=0.1914, simple_loss=0.2745, pruned_loss=0.05414, over 4705.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3081, pruned_loss=0.07317, over 939576.03 frames. ], batch size: 11, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:26:24,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=84046.66666666667, ans=0.0 +2024-07-28 00:26:26,716 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.280e+01 6.250e+01 6.932e+01 8.047e+01 1.262e+02, threshold=1.386e+02, percent-clipped=0.0 +2024-07-28 00:26:34,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=84073.33333333333, ans=0.0 +2024-07-28 00:26:47,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=84100.0, ans=0.2 +2024-07-28 00:26:54,563 INFO [train.py:1114] (0/4) Epoch 7, batch 1750, loss[loss=0.1908, simple_loss=0.2666, pruned_loss=0.0575, over 4811.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3066, pruned_loss=0.07193, over 940751.25 frames. ], batch size: 11, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:27:03,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=84126.66666666667, ans=10.0 +2024-07-28 00:27:04,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=84126.66666666667, ans=0.125 +2024-07-28 00:27:06,514 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:27:26,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84166.66666666667, ans=0.1 +2024-07-28 00:27:27,271 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=6.0 +2024-07-28 00:27:29,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=84166.66666666667, ans=0.0 +2024-07-28 00:27:32,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=84166.66666666667, ans=0.125 +2024-07-28 00:27:35,801 INFO [train.py:1114] (0/4) Epoch 7, batch 1800, loss[loss=0.2451, simple_loss=0.326, pruned_loss=0.08205, over 4644.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3056, pruned_loss=0.07135, over 941082.98 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:27:38,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84180.0, ans=0.1 +2024-07-28 00:27:39,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84180.0, ans=0.1 +2024-07-28 00:27:41,153 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.062e+01 5.927e+01 6.951e+01 8.175e+01 1.232e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-28 00:27:56,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=84206.66666666667, ans=0.125 +2024-07-28 00:28:01,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=84220.0, ans=0.025 +2024-07-28 00:28:02,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=84220.0, ans=0.1 +2024-07-28 00:28:09,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=84233.33333333333, ans=0.0 +2024-07-28 00:28:11,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=84233.33333333333, ans=0.0 +2024-07-28 00:28:14,957 INFO [train.py:1114] (0/4) Epoch 7, batch 1850, loss[loss=0.2586, simple_loss=0.3366, pruned_loss=0.09034, over 4812.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3054, pruned_loss=0.07124, over 940879.86 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:28:43,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=84300.0, ans=0.0 +2024-07-28 00:28:47,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=84300.0, ans=0.0 +2024-07-28 00:28:50,151 INFO [train.py:1114] (0/4) Epoch 7, batch 1900, loss[loss=0.2055, simple_loss=0.2994, pruned_loss=0.05578, over 4659.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3048, pruned_loss=0.07026, over 942230.05 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:28:55,326 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.933e+01 5.987e+01 6.515e+01 7.725e+01 1.148e+02, threshold=1.303e+02, percent-clipped=0.0 +2024-07-28 00:28:55,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=84313.33333333333, ans=0.125 +2024-07-28 00:29:05,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=84340.0, ans=0.0 +2024-07-28 00:29:05,491 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:29:22,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=84380.0, ans=0.2 +2024-07-28 00:29:22,792 INFO [train.py:1114] (0/4) Epoch 7, batch 1950, loss[loss=0.2133, simple_loss=0.2903, pruned_loss=0.06818, over 4902.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3059, pruned_loss=0.07097, over 943941.90 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:29:22,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=84380.0, ans=0.0 +2024-07-28 00:29:26,654 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.29 vs. limit=15.0 +2024-07-28 00:29:36,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=84406.66666666667, ans=0.0 +2024-07-28 00:29:42,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=84420.0, ans=0.125 +2024-07-28 00:29:42,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84420.0, ans=0.1 +2024-07-28 00:29:48,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=84420.0, ans=0.0 +2024-07-28 00:29:56,352 INFO [train.py:1114] (0/4) Epoch 7, batch 2000, loss[loss=0.186, simple_loss=0.2617, pruned_loss=0.05513, over 4808.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3072, pruned_loss=0.0718, over 941657.03 frames. ], batch size: 11, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:29:57,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=84446.66666666667, ans=0.125 +2024-07-28 00:30:01,596 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 6.163e+01 6.683e+01 7.706e+01 1.195e+02, threshold=1.337e+02, percent-clipped=0.0 +2024-07-28 00:30:05,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=84460.0, ans=0.0 +2024-07-28 00:30:06,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=84460.0, ans=0.125 +2024-07-28 00:30:07,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=84460.0, ans=0.0 +2024-07-28 00:30:13,057 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:30:13,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=84473.33333333333, ans=10.0 +2024-07-28 00:30:15,073 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=2.99 vs. limit=12.0 +2024-07-28 00:30:23,382 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.97 vs. limit=15.0 +2024-07-28 00:30:23,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=84486.66666666667, ans=0.0 +2024-07-28 00:30:23,922 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.13 vs. limit=22.5 +2024-07-28 00:30:28,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=84486.66666666667, ans=0.0 +2024-07-28 00:30:35,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=84500.0, ans=0.09899494936611666 +2024-07-28 00:30:36,708 INFO [train.py:1114] (0/4) Epoch 7, batch 2050, loss[loss=0.2033, simple_loss=0.2856, pruned_loss=0.06048, over 4616.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3062, pruned_loss=0.07161, over 939598.51 frames. ], batch size: 11, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:30:37,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=84513.33333333333, ans=0.05 +2024-07-28 00:30:40,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=84513.33333333333, ans=0.125 +2024-07-28 00:30:44,473 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.12 vs. limit=15.0 +2024-07-28 00:30:45,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=84526.66666666667, ans=0.025 +2024-07-28 00:30:48,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=84526.66666666667, ans=0.125 +2024-07-28 00:30:56,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=84553.33333333333, ans=0.0 +2024-07-28 00:30:57,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=84553.33333333333, ans=0.0 +2024-07-28 00:31:00,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=84553.33333333333, ans=0.0 +2024-07-28 00:31:03,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=84566.66666666667, ans=0.1 +2024-07-28 00:31:11,095 INFO [train.py:1114] (0/4) Epoch 7, batch 2100, loss[loss=0.1904, simple_loss=0.2899, pruned_loss=0.04548, over 4743.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3055, pruned_loss=0.07117, over 941322.75 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:31:16,343 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.722e+01 5.891e+01 6.506e+01 7.465e+01 1.283e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 00:31:22,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=84593.33333333333, ans=0.2 +2024-07-28 00:31:30,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=84620.0, ans=0.125 +2024-07-28 00:31:32,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=84620.0, ans=0.2 +2024-07-28 00:31:33,650 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:31:34,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=84620.0, ans=0.125 +2024-07-28 00:31:34,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=84620.0, ans=0.125 +2024-07-28 00:31:34,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=84620.0, ans=0.025 +2024-07-28 00:31:36,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=84633.33333333333, ans=0.025 +2024-07-28 00:31:43,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=84633.33333333333, ans=0.125 +2024-07-28 00:31:44,344 INFO [train.py:1114] (0/4) Epoch 7, batch 2150, loss[loss=0.2172, simple_loss=0.3083, pruned_loss=0.06299, over 4893.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3033, pruned_loss=0.07021, over 944347.05 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:31:50,545 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:31:56,210 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.46 vs. limit=22.5 +2024-07-28 00:31:57,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84673.33333333333, ans=0.1 +2024-07-28 00:32:05,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=84686.66666666667, ans=0.0 +2024-07-28 00:32:14,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=84700.0, ans=0.04949747468305833 +2024-07-28 00:32:16,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=84700.0, ans=0.0 +2024-07-28 00:32:16,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=84713.33333333333, ans=0.0 +2024-07-28 00:32:17,326 INFO [train.py:1114] (0/4) Epoch 7, batch 2200, loss[loss=0.2144, simple_loss=0.2942, pruned_loss=0.0673, over 4808.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3043, pruned_loss=0.07077, over 943645.36 frames. ], batch size: 14, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:32:22,573 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.863e+01 5.835e+01 6.281e+01 7.163e+01 1.109e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 00:32:29,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=84726.66666666667, ans=0.0 +2024-07-28 00:32:48,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84766.66666666667, ans=0.1 +2024-07-28 00:32:52,138 INFO [train.py:1114] (0/4) Epoch 7, batch 2250, loss[loss=0.1962, simple_loss=0.2835, pruned_loss=0.05447, over 4693.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3037, pruned_loss=0.07023, over 942277.03 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:32:56,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=84780.0, ans=0.125 +2024-07-28 00:33:15,700 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.77 vs. limit=22.5 +2024-07-28 00:33:20,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=84833.33333333333, ans=0.125 +2024-07-28 00:33:22,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=84833.33333333333, ans=0.0 +2024-07-28 00:33:26,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=84833.33333333333, ans=0.025 +2024-07-28 00:33:27,433 INFO [train.py:1114] (0/4) Epoch 7, batch 2300, loss[loss=0.2067, simple_loss=0.2832, pruned_loss=0.06513, over 4929.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3033, pruned_loss=0.07028, over 939730.13 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:33:31,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84846.66666666667, ans=0.1 +2024-07-28 00:33:32,863 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.138e+01 5.907e+01 7.082e+01 8.177e+01 1.156e+02, threshold=1.416e+02, percent-clipped=0.0 +2024-07-28 00:33:34,649 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.30 vs. limit=15.0 +2024-07-28 00:33:37,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=84860.0, ans=0.125 +2024-07-28 00:33:48,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=84873.33333333333, ans=0.0 +2024-07-28 00:33:57,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=84900.0, ans=0.125 +2024-07-28 00:34:01,826 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.28 vs. limit=15.0 +2024-07-28 00:34:03,248 INFO [train.py:1114] (0/4) Epoch 7, batch 2350, loss[loss=0.2432, simple_loss=0.3267, pruned_loss=0.07988, over 4641.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3038, pruned_loss=0.07073, over 941653.87 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:34:20,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=84940.0, ans=0.125 +2024-07-28 00:34:23,621 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.40 vs. limit=15.0 +2024-07-28 00:34:26,161 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.32 vs. limit=15.0 +2024-07-28 00:34:38,000 INFO [train.py:1114] (0/4) Epoch 7, batch 2400, loss[loss=0.1983, simple_loss=0.2853, pruned_loss=0.05569, over 4638.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3045, pruned_loss=0.07043, over 941407.87 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:34:40,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=84980.0, ans=0.2 +2024-07-28 00:34:42,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=84980.0, ans=0.125 +2024-07-28 00:34:43,079 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.26 vs. limit=15.0 +2024-07-28 00:34:43,151 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 6.102e+01 6.788e+01 7.615e+01 1.111e+02, threshold=1.358e+02, percent-clipped=0.0 +2024-07-28 00:34:43,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=84980.0, ans=0.125 +2024-07-28 00:35:02,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85020.0, ans=0.1 +2024-07-28 00:35:07,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=85033.33333333333, ans=0.125 +2024-07-28 00:35:11,411 INFO [train.py:1114] (0/4) Epoch 7, batch 2450, loss[loss=0.2402, simple_loss=0.3283, pruned_loss=0.07607, over 4691.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3063, pruned_loss=0.07151, over 937730.50 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:35:15,666 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=18.07 vs. limit=15.0 +2024-07-28 00:35:20,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=85060.0, ans=0.125 +2024-07-28 00:35:32,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=85086.66666666667, ans=0.125 +2024-07-28 00:35:36,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=85086.66666666667, ans=0.2 +2024-07-28 00:35:44,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.29 vs. limit=15.0 +2024-07-28 00:35:44,968 INFO [train.py:1114] (0/4) Epoch 7, batch 2500, loss[loss=0.2423, simple_loss=0.3248, pruned_loss=0.07988, over 4810.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3063, pruned_loss=0.07128, over 939396.44 frames. ], batch size: 14, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:35:48,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=85113.33333333333, ans=0.125 +2024-07-28 00:35:50,129 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.036e+01 6.265e+01 6.846e+01 8.137e+01 1.168e+02, threshold=1.369e+02, percent-clipped=0.0 +2024-07-28 00:35:55,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=85126.66666666667, ans=0.0 +2024-07-28 00:35:59,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=85140.0, ans=0.0 +2024-07-28 00:36:01,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=85140.0, ans=0.125 +2024-07-28 00:36:06,850 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.55 vs. limit=22.5 +2024-07-28 00:36:07,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85153.33333333333, ans=0.1 +2024-07-28 00:36:14,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=85166.66666666667, ans=0.035 +2024-07-28 00:36:17,890 INFO [train.py:1114] (0/4) Epoch 7, batch 2550, loss[loss=0.2289, simple_loss=0.3082, pruned_loss=0.07477, over 4808.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.3057, pruned_loss=0.07098, over 938769.75 frames. ], batch size: 11, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:36:23,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85180.0, ans=0.1 +2024-07-28 00:36:37,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=85220.0, ans=0.1 +2024-07-28 00:36:38,964 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.23 vs. limit=22.5 +2024-07-28 00:36:39,566 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.68 vs. limit=10.0 +2024-07-28 00:36:51,103 INFO [train.py:1114] (0/4) Epoch 7, batch 2600, loss[loss=0.2147, simple_loss=0.2981, pruned_loss=0.0656, over 4907.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3063, pruned_loss=0.0712, over 937581.81 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:36:56,537 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.315e+01 5.684e+01 6.063e+01 6.727e+01 1.050e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-28 00:37:01,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=85260.0, ans=0.0 +2024-07-28 00:37:02,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=85260.0, ans=0.125 +2024-07-28 00:37:09,111 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.39 vs. limit=22.5 +2024-07-28 00:37:10,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=85273.33333333333, ans=0.04949747468305833 +2024-07-28 00:37:21,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=85300.0, ans=0.2 +2024-07-28 00:37:24,502 INFO [train.py:1114] (0/4) Epoch 7, batch 2650, loss[loss=0.2727, simple_loss=0.3433, pruned_loss=0.101, over 4640.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3076, pruned_loss=0.07175, over 939423.13 frames. ], batch size: 16, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:37:33,903 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-64000.pt +2024-07-28 00:37:37,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=85326.66666666667, ans=0.0 +2024-07-28 00:37:49,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=85353.33333333333, ans=0.0 +2024-07-28 00:37:53,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=85366.66666666667, ans=0.0 +2024-07-28 00:37:54,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.75 vs. limit=10.0 +2024-07-28 00:37:59,894 INFO [train.py:1114] (0/4) Epoch 7, batch 2700, loss[loss=0.2157, simple_loss=0.301, pruned_loss=0.06523, over 4741.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.307, pruned_loss=0.07165, over 939708.14 frames. ], batch size: 14, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:38:00,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=85380.0, ans=22.5 +2024-07-28 00:38:02,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=85380.0, ans=0.0 +2024-07-28 00:38:05,156 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.110e+01 5.828e+01 6.522e+01 7.194e+01 9.710e+01, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 00:38:27,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=85420.0, ans=0.0 +2024-07-28 00:38:33,243 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.22 vs. limit=12.0 +2024-07-28 00:38:38,343 INFO [train.py:1114] (0/4) Epoch 7, batch 2750, loss[loss=0.2159, simple_loss=0.292, pruned_loss=0.0699, over 4717.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3061, pruned_loss=0.07123, over 939648.72 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:38:42,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=85446.66666666667, ans=0.07 +2024-07-28 00:38:56,814 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:39:07,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=85500.0, ans=0.0 +2024-07-28 00:39:13,501 INFO [train.py:1114] (0/4) Epoch 7, batch 2800, loss[loss=0.3475, simple_loss=0.3856, pruned_loss=0.1547, over 3611.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3069, pruned_loss=0.07211, over 937899.06 frames. ], batch size: 35, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:39:18,796 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.845e+01 5.969e+01 6.581e+01 7.409e+01 1.159e+02, threshold=1.316e+02, percent-clipped=0.0 +2024-07-28 00:39:19,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=85513.33333333333, ans=0.2 +2024-07-28 00:39:23,702 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:39:29,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85540.0, ans=0.1 +2024-07-28 00:39:39,627 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.28 vs. limit=10.0 +2024-07-28 00:39:49,477 INFO [train.py:1114] (0/4) Epoch 7, batch 2850, loss[loss=0.2087, simple_loss=0.2852, pruned_loss=0.06613, over 4960.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3066, pruned_loss=0.07208, over 935674.67 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:39:57,963 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.83 vs. limit=15.0 +2024-07-28 00:40:01,127 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.54 vs. limit=5.0 +2024-07-28 00:40:05,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=85606.66666666667, ans=0.0 +2024-07-28 00:40:20,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=85633.33333333333, ans=0.0 +2024-07-28 00:40:22,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=85633.33333333333, ans=0.125 +2024-07-28 00:40:23,172 INFO [train.py:1114] (0/4) Epoch 7, batch 2900, loss[loss=0.2176, simple_loss=0.2949, pruned_loss=0.07015, over 4818.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3071, pruned_loss=0.072, over 939647.45 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:40:24,790 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.79 vs. limit=15.0 +2024-07-28 00:40:28,616 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 6.206e+01 7.013e+01 8.326e+01 1.461e+02, threshold=1.403e+02, percent-clipped=1.0 +2024-07-28 00:40:32,487 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.55 vs. limit=6.0 +2024-07-28 00:40:32,560 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.16 vs. limit=15.0 +2024-07-28 00:40:33,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=85660.0, ans=0.0 +2024-07-28 00:40:34,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=85660.0, ans=0.07 +2024-07-28 00:40:35,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=85660.0, ans=0.125 +2024-07-28 00:40:37,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=85673.33333333333, ans=0.025 +2024-07-28 00:40:45,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=85686.66666666667, ans=0.125 +2024-07-28 00:40:53,529 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.47 vs. limit=10.0 +2024-07-28 00:41:00,423 INFO [train.py:1114] (0/4) Epoch 7, batch 2950, loss[loss=0.2106, simple_loss=0.29, pruned_loss=0.06562, over 4708.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3043, pruned_loss=0.07102, over 938595.45 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:41:12,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.70 vs. limit=22.5 +2024-07-28 00:41:13,035 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.26 vs. limit=22.5 +2024-07-28 00:41:28,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=85766.66666666667, ans=0.1 +2024-07-28 00:41:33,778 INFO [train.py:1114] (0/4) Epoch 7, batch 3000, loss[loss=0.2541, simple_loss=0.3411, pruned_loss=0.0836, over 4768.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3046, pruned_loss=0.07096, over 938257.16 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:41:33,779 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 00:41:46,481 INFO [train.py:1146] (0/4) Epoch 7, validation: loss=0.1857, simple_loss=0.2896, pruned_loss=0.04088, over 944034.00 frames. +2024-07-28 00:41:46,482 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 00:41:49,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=85780.0, ans=0.025 +2024-07-28 00:41:51,998 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 6.009e+01 6.936e+01 8.242e+01 1.252e+02, threshold=1.387e+02, percent-clipped=0.0 +2024-07-28 00:41:53,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=85793.33333333333, ans=0.1 +2024-07-28 00:41:59,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=85806.66666666667, ans=0.04949747468305833 +2024-07-28 00:42:10,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=85820.0, ans=0.125 +2024-07-28 00:42:18,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=85833.33333333333, ans=0.1 +2024-07-28 00:42:20,774 INFO [train.py:1114] (0/4) Epoch 7, batch 3050, loss[loss=0.1933, simple_loss=0.27, pruned_loss=0.05835, over 4648.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.3052, pruned_loss=0.07117, over 937115.73 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:42:43,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=85886.66666666667, ans=0.0 +2024-07-28 00:42:48,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=85900.0, ans=0.1 +2024-07-28 00:42:54,055 INFO [train.py:1114] (0/4) Epoch 7, batch 3100, loss[loss=0.2465, simple_loss=0.3262, pruned_loss=0.0834, over 4616.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3044, pruned_loss=0.07169, over 937923.14 frames. ], batch size: 16, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:42:56,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=85913.33333333333, ans=0.1 +2024-07-28 00:42:59,248 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.759e+01 6.343e+01 7.086e+01 1.226e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 00:43:02,231 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.83 vs. limit=6.0 +2024-07-28 00:43:11,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=85940.0, ans=0.125 +2024-07-28 00:43:15,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=85953.33333333333, ans=0.0 +2024-07-28 00:43:32,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=85980.0, ans=0.0 +2024-07-28 00:43:32,991 INFO [train.py:1114] (0/4) Epoch 7, batch 3150, loss[loss=0.2294, simple_loss=0.3112, pruned_loss=0.07382, over 4615.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3055, pruned_loss=0.07209, over 938076.66 frames. ], batch size: 17, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:43:39,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=85993.33333333333, ans=0.125 +2024-07-28 00:43:57,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=86020.0, ans=0.125 +2024-07-28 00:43:59,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=86020.0, ans=0.125 +2024-07-28 00:44:08,164 INFO [train.py:1114] (0/4) Epoch 7, batch 3200, loss[loss=0.2184, simple_loss=0.3005, pruned_loss=0.06817, over 4830.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.304, pruned_loss=0.07106, over 939833.69 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:44:09,102 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.58 vs. limit=15.0 +2024-07-28 00:44:11,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=86046.66666666667, ans=0.035 +2024-07-28 00:44:13,320 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+01 6.085e+01 7.068e+01 8.225e+01 1.298e+02, threshold=1.414e+02, percent-clipped=1.0 +2024-07-28 00:44:13,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=86046.66666666667, ans=0.2 +2024-07-28 00:44:16,385 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.15 vs. limit=22.5 +2024-07-28 00:44:16,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=86060.0, ans=0.125 +2024-07-28 00:44:31,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=86086.66666666667, ans=0.0 +2024-07-28 00:44:39,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86086.66666666667, ans=0.1 +2024-07-28 00:44:45,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=86100.0, ans=0.07 +2024-07-28 00:44:47,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=86100.0, ans=0.2 +2024-07-28 00:44:50,728 INFO [train.py:1114] (0/4) Epoch 7, batch 3250, loss[loss=0.2277, simple_loss=0.3113, pruned_loss=0.07202, over 4926.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3054, pruned_loss=0.07127, over 940628.13 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:44:54,368 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.35 vs. limit=10.0 +2024-07-28 00:45:11,436 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.81 vs. limit=22.5 +2024-07-28 00:45:13,496 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.48 vs. limit=10.0 +2024-07-28 00:45:24,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=86180.0, ans=0.125 +2024-07-28 00:45:24,582 INFO [train.py:1114] (0/4) Epoch 7, batch 3300, loss[loss=0.2523, simple_loss=0.3297, pruned_loss=0.08748, over 4708.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.304, pruned_loss=0.07073, over 940839.30 frames. ], batch size: 19, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:45:30,690 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 5.736e+01 6.420e+01 6.992e+01 1.033e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 00:45:31,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=86193.33333333333, ans=0.2 +2024-07-28 00:45:33,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=86193.33333333333, ans=0.0 +2024-07-28 00:45:50,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=86220.0, ans=0.0 +2024-07-28 00:45:57,981 INFO [train.py:1114] (0/4) Epoch 7, batch 3350, loss[loss=0.2401, simple_loss=0.3113, pruned_loss=0.08451, over 4595.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.3043, pruned_loss=0.0709, over 938835.50 frames. ], batch size: 17, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:45:58,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=86246.66666666667, ans=0.125 +2024-07-28 00:46:02,958 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:46:05,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=86260.0, ans=0.0 +2024-07-28 00:46:14,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=86273.33333333333, ans=0.125 +2024-07-28 00:46:28,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=86300.0, ans=0.125 +2024-07-28 00:46:31,264 INFO [train.py:1114] (0/4) Epoch 7, batch 3400, loss[loss=0.1846, simple_loss=0.2621, pruned_loss=0.05354, over 4796.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3041, pruned_loss=0.07073, over 937369.86 frames. ], batch size: 11, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:46:33,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=86313.33333333333, ans=0.07 +2024-07-28 00:46:33,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=86313.33333333333, ans=0.1 +2024-07-28 00:46:37,182 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.995e+01 5.874e+01 6.654e+01 7.588e+01 1.124e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-28 00:47:03,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=86366.66666666667, ans=0.125 +2024-07-28 00:47:04,936 INFO [train.py:1114] (0/4) Epoch 7, batch 3450, loss[loss=0.2346, simple_loss=0.3151, pruned_loss=0.07705, over 4699.00 frames. ], tot_loss[loss=0.223, simple_loss=0.3045, pruned_loss=0.07079, over 937649.13 frames. ], batch size: 19, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:47:05,201 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.02 vs. limit=15.0 +2024-07-28 00:47:12,048 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.54 vs. limit=15.0 +2024-07-28 00:47:17,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=86406.66666666667, ans=0.0 +2024-07-28 00:47:24,156 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.86 vs. limit=15.0 +2024-07-28 00:47:26,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=86420.0, ans=0.125 +2024-07-28 00:47:38,454 INFO [train.py:1114] (0/4) Epoch 7, batch 3500, loss[loss=0.2033, simple_loss=0.2791, pruned_loss=0.06372, over 4938.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3022, pruned_loss=0.06992, over 938385.89 frames. ], batch size: 12, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:47:44,467 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.672e+01 5.841e+01 6.535e+01 7.195e+01 1.031e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 00:47:46,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=86460.0, ans=0.125 +2024-07-28 00:47:51,606 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.57 vs. limit=22.5 +2024-07-28 00:47:52,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=86473.33333333333, ans=0.0 +2024-07-28 00:47:52,933 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.99 vs. limit=15.0 +2024-07-28 00:47:57,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=86473.33333333333, ans=0.125 +2024-07-28 00:47:58,967 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.54 vs. limit=15.0 +2024-07-28 00:48:07,722 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.51 vs. limit=22.5 +2024-07-28 00:48:13,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=86500.0, ans=0.1 +2024-07-28 00:48:16,929 INFO [train.py:1114] (0/4) Epoch 7, batch 3550, loss[loss=0.2468, simple_loss=0.35, pruned_loss=0.07177, over 4664.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3028, pruned_loss=0.07006, over 938644.78 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:48:17,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=86513.33333333333, ans=0.125 +2024-07-28 00:48:26,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=86526.66666666667, ans=0.2 +2024-07-28 00:48:30,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=86540.0, ans=0.125 +2024-07-28 00:48:42,420 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.05 vs. limit=15.0 +2024-07-28 00:48:49,727 INFO [train.py:1114] (0/4) Epoch 7, batch 3600, loss[loss=0.1983, simple_loss=0.2702, pruned_loss=0.06317, over 4964.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3029, pruned_loss=0.07024, over 940151.48 frames. ], batch size: 13, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:48:52,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=86580.0, ans=0.125 +2024-07-28 00:48:55,677 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.014e+01 6.005e+01 6.689e+01 7.700e+01 1.084e+02, threshold=1.338e+02, percent-clipped=0.0 +2024-07-28 00:48:56,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=86593.33333333333, ans=0.2 +2024-07-28 00:48:58,994 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.45 vs. limit=22.5 +2024-07-28 00:49:08,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=86606.66666666667, ans=0.125 +2024-07-28 00:49:11,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=86620.0, ans=0.125 +2024-07-28 00:49:26,879 INFO [train.py:1114] (0/4) Epoch 7, batch 3650, loss[loss=0.2392, simple_loss=0.3243, pruned_loss=0.07708, over 4915.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3027, pruned_loss=0.07017, over 940632.16 frames. ], batch size: 15, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:49:39,192 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:50:02,201 INFO [train.py:1114] (0/4) Epoch 7, batch 3700, loss[loss=0.251, simple_loss=0.3359, pruned_loss=0.08305, over 4931.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.3023, pruned_loss=0.0693, over 941692.40 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:50:04,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=86713.33333333333, ans=0.1 +2024-07-28 00:50:07,940 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.091e+01 5.984e+01 6.849e+01 8.141e+01 1.285e+02, threshold=1.370e+02, percent-clipped=0.0 +2024-07-28 00:50:08,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.85 vs. limit=15.0 +2024-07-28 00:50:19,349 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.05 vs. limit=15.0 +2024-07-28 00:50:20,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.95 vs. limit=22.5 +2024-07-28 00:50:30,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=86766.66666666667, ans=0.2 +2024-07-28 00:50:36,834 INFO [train.py:1114] (0/4) Epoch 7, batch 3750, loss[loss=0.1818, simple_loss=0.2625, pruned_loss=0.0506, over 4806.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.302, pruned_loss=0.06897, over 943161.32 frames. ], batch size: 11, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:50:53,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=86806.66666666667, ans=0.125 +2024-07-28 00:51:07,522 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.81 vs. limit=12.0 +2024-07-28 00:51:10,325 INFO [train.py:1114] (0/4) Epoch 7, batch 3800, loss[loss=0.2286, simple_loss=0.3187, pruned_loss=0.06928, over 4806.00 frames. ], tot_loss[loss=0.219, simple_loss=0.3006, pruned_loss=0.06865, over 941789.79 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:51:15,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=86846.66666666667, ans=0.05 +2024-07-28 00:51:16,203 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.845e+01 5.938e+01 6.490e+01 7.260e+01 1.083e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-28 00:51:16,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=86860.0, ans=0.125 +2024-07-28 00:51:17,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=86860.0, ans=0.125 +2024-07-28 00:51:26,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=86873.33333333333, ans=0.125 +2024-07-28 00:51:26,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=86873.33333333333, ans=0.025 +2024-07-28 00:51:43,451 INFO [train.py:1114] (0/4) Epoch 7, batch 3850, loss[loss=0.2083, simple_loss=0.2829, pruned_loss=0.06679, over 4604.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.3, pruned_loss=0.06811, over 942155.18 frames. ], batch size: 16, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:51:43,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86913.33333333333, ans=0.1 +2024-07-28 00:51:50,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=86926.66666666667, ans=0.125 +2024-07-28 00:51:51,655 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.74 vs. limit=15.0 +2024-07-28 00:51:52,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff2.min_abs, batch_count=86926.66666666667, ans=0.1 +2024-07-28 00:51:55,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=86926.66666666667, ans=0.125 +2024-07-28 00:51:57,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=86940.0, ans=0.125 +2024-07-28 00:51:59,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=86940.0, ans=0.0 +2024-07-28 00:52:04,025 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.12 vs. limit=10.0 +2024-07-28 00:52:06,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=86953.33333333333, ans=0.2 +2024-07-28 00:52:13,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=86966.66666666667, ans=0.1 +2024-07-28 00:52:17,054 INFO [train.py:1114] (0/4) Epoch 7, batch 3900, loss[loss=0.2297, simple_loss=0.3157, pruned_loss=0.07182, over 4803.00 frames. ], tot_loss[loss=0.219, simple_loss=0.3012, pruned_loss=0.06845, over 942510.50 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:52:22,771 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.069e+01 5.781e+01 6.376e+01 7.079e+01 1.169e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 00:52:28,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=86993.33333333333, ans=0.125 +2024-07-28 00:52:45,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87033.33333333333, ans=0.1 +2024-07-28 00:52:49,964 INFO [train.py:1114] (0/4) Epoch 7, batch 3950, loss[loss=0.2241, simple_loss=0.3096, pruned_loss=0.06929, over 4853.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.3017, pruned_loss=0.06861, over 944775.48 frames. ], batch size: 16, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:52:52,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=87046.66666666667, ans=10.0 +2024-07-28 00:52:55,093 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.14 vs. limit=22.5 +2024-07-28 00:52:59,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=87060.0, ans=0.125 +2024-07-28 00:53:12,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=87086.66666666667, ans=0.0 +2024-07-28 00:53:15,754 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.53 vs. limit=15.0 +2024-07-28 00:53:20,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87100.0, ans=0.1 +2024-07-28 00:53:23,286 INFO [train.py:1114] (0/4) Epoch 7, batch 4000, loss[loss=0.2077, simple_loss=0.2904, pruned_loss=0.06245, over 4779.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3025, pruned_loss=0.06992, over 941937.73 frames. ], batch size: 12, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:53:29,164 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.994e+01 5.981e+01 6.594e+01 7.315e+01 1.099e+02, threshold=1.319e+02, percent-clipped=0.0 +2024-07-28 00:53:30,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=87126.66666666667, ans=0.125 +2024-07-28 00:53:38,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=87140.0, ans=0.125 +2024-07-28 00:53:40,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=87140.0, ans=0.125 +2024-07-28 00:53:54,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.44 vs. limit=15.0 +2024-07-28 00:53:57,314 INFO [train.py:1114] (0/4) Epoch 7, batch 4050, loss[loss=0.2922, simple_loss=0.3466, pruned_loss=0.1189, over 3692.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.3015, pruned_loss=0.06938, over 940742.29 frames. ], batch size: 36, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:54:10,519 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.67 vs. limit=10.0 +2024-07-28 00:54:26,376 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:54:26,696 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.53 vs. limit=15.0 +2024-07-28 00:54:29,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=87233.33333333333, ans=0.125 +2024-07-28 00:54:32,977 INFO [train.py:1114] (0/4) Epoch 7, batch 4100, loss[loss=0.2584, simple_loss=0.3475, pruned_loss=0.08467, over 4902.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3028, pruned_loss=0.07008, over 939323.35 frames. ], batch size: 15, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:54:33,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=87246.66666666667, ans=0.0 +2024-07-28 00:54:33,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.14 vs. limit=6.0 +2024-07-28 00:54:39,030 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 5.802e+01 6.541e+01 7.841e+01 1.191e+02, threshold=1.308e+02, percent-clipped=0.0 +2024-07-28 00:54:40,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=87260.0, ans=0.07 +2024-07-28 00:54:45,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=87273.33333333333, ans=0.0 +2024-07-28 00:54:49,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=87273.33333333333, ans=0.2 +2024-07-28 00:54:50,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=87273.33333333333, ans=10.0 +2024-07-28 00:54:50,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=87273.33333333333, ans=0.125 +2024-07-28 00:54:54,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=87286.66666666667, ans=0.0 +2024-07-28 00:54:54,485 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.24 vs. limit=15.0 +2024-07-28 00:54:56,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=87286.66666666667, ans=0.02 +2024-07-28 00:55:01,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=87300.0, ans=0.125 +2024-07-28 00:55:08,151 INFO [train.py:1114] (0/4) Epoch 7, batch 4150, loss[loss=0.202, simple_loss=0.2879, pruned_loss=0.058, over 4821.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.3022, pruned_loss=0.06979, over 938514.27 frames. ], batch size: 13, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:55:19,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=87326.66666666667, ans=0.0 +2024-07-28 00:55:23,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=87340.0, ans=0.0 +2024-07-28 00:55:24,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=87340.0, ans=0.2 +2024-07-28 00:55:24,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=87340.0, ans=0.02 +2024-07-28 00:55:28,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=87340.0, ans=0.1 +2024-07-28 00:55:36,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=87366.66666666667, ans=0.2 +2024-07-28 00:55:36,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=87366.66666666667, ans=0.125 +2024-07-28 00:55:44,758 INFO [train.py:1114] (0/4) Epoch 7, batch 4200, loss[loss=0.2438, simple_loss=0.3199, pruned_loss=0.08385, over 4901.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3035, pruned_loss=0.07031, over 940067.24 frames. ], batch size: 15, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:55:47,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=87380.0, ans=0.5 +2024-07-28 00:55:48,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=87380.0, ans=0.0 +2024-07-28 00:55:50,404 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 5.692e+01 6.166e+01 6.641e+01 1.038e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 00:55:52,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=87393.33333333333, ans=0.125 +2024-07-28 00:56:17,900 INFO [train.py:1114] (0/4) Epoch 7, batch 4250, loss[loss=0.2436, simple_loss=0.3078, pruned_loss=0.08968, over 4639.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3047, pruned_loss=0.07054, over 941448.21 frames. ], batch size: 12, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:56:20,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=87446.66666666667, ans=0.125 +2024-07-28 00:56:21,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=87446.66666666667, ans=0.125 +2024-07-28 00:56:22,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=87446.66666666667, ans=0.125 +2024-07-28 00:56:23,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=87460.0, ans=0.125 +2024-07-28 00:56:24,874 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.31 vs. limit=22.5 +2024-07-28 00:56:41,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=87486.66666666667, ans=0.125 +2024-07-28 00:56:51,154 INFO [train.py:1114] (0/4) Epoch 7, batch 4300, loss[loss=0.2112, simple_loss=0.3065, pruned_loss=0.05792, over 4759.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.3045, pruned_loss=0.07036, over 940284.10 frames. ], batch size: 13, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:56:52,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=87513.33333333333, ans=0.125 +2024-07-28 00:56:57,164 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.961e+01 6.079e+01 6.780e+01 8.042e+01 1.237e+02, threshold=1.356e+02, percent-clipped=1.0 +2024-07-28 00:56:59,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=87526.66666666667, ans=0.0 +2024-07-28 00:57:01,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=87526.66666666667, ans=0.125 +2024-07-28 00:57:02,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=87526.66666666667, ans=0.125 +2024-07-28 00:57:08,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=87540.0, ans=0.125 +2024-07-28 00:57:10,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=87540.0, ans=0.2 +2024-07-28 00:57:24,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=87580.0, ans=0.0 +2024-07-28 00:57:24,604 INFO [train.py:1114] (0/4) Epoch 7, batch 4350, loss[loss=0.2338, simple_loss=0.3003, pruned_loss=0.08362, over 4760.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3041, pruned_loss=0.06973, over 941094.19 frames. ], batch size: 13, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:57:25,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=87580.0, ans=0.125 +2024-07-28 00:57:32,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87593.33333333333, ans=0.1 +2024-07-28 00:57:35,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=87593.33333333333, ans=0.125 +2024-07-28 00:57:37,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=87606.66666666667, ans=0.0 +2024-07-28 00:57:47,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=87620.0, ans=0.1 +2024-07-28 00:57:50,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=87633.33333333333, ans=0.1 +2024-07-28 00:57:58,014 INFO [train.py:1114] (0/4) Epoch 7, batch 4400, loss[loss=0.2372, simple_loss=0.319, pruned_loss=0.07773, over 4813.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.304, pruned_loss=0.06979, over 940615.26 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:58:04,029 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.884e+01 5.991e+01 6.337e+01 7.130e+01 1.070e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 00:58:10,567 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.47 vs. limit=15.0 +2024-07-28 00:58:12,223 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:58:13,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=87673.33333333333, ans=0.125 +2024-07-28 00:58:23,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=87686.66666666667, ans=0.2 +2024-07-28 00:58:26,670 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.57 vs. limit=15.0 +2024-07-28 00:58:30,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=87700.0, ans=0.125 +2024-07-28 00:58:31,605 INFO [train.py:1114] (0/4) Epoch 7, batch 4450, loss[loss=0.2, simple_loss=0.2834, pruned_loss=0.05831, over 4939.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3051, pruned_loss=0.07093, over 938959.99 frames. ], batch size: 12, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:58:38,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=87726.66666666667, ans=0.125 +2024-07-28 00:58:39,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=87726.66666666667, ans=0.125 +2024-07-28 00:58:41,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=87726.66666666667, ans=0.125 +2024-07-28 00:58:43,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=87726.66666666667, ans=0.1 +2024-07-28 00:58:55,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=87753.33333333333, ans=0.125 +2024-07-28 00:58:56,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=87753.33333333333, ans=0.125 +2024-07-28 00:59:05,081 INFO [train.py:1114] (0/4) Epoch 7, batch 4500, loss[loss=0.2007, simple_loss=0.2922, pruned_loss=0.05463, over 4748.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3053, pruned_loss=0.07055, over 938149.78 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 00:59:05,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=87780.0, ans=0.025 +2024-07-28 00:59:06,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=87780.0, ans=0.1 +2024-07-28 00:59:10,922 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.444e+01 5.694e+01 6.393e+01 7.700e+01 1.282e+02, threshold=1.279e+02, percent-clipped=1.0 +2024-07-28 00:59:17,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn1.whiten.whitening_limit, batch_count=87793.33333333333, ans=22.5 +2024-07-28 00:59:20,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=87806.66666666667, ans=0.125 +2024-07-28 00:59:24,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=87820.0, ans=0.125 +2024-07-28 00:59:38,263 INFO [train.py:1114] (0/4) Epoch 7, batch 4550, loss[loss=0.2039, simple_loss=0.2778, pruned_loss=0.06498, over 4911.00 frames. ], tot_loss[loss=0.222, simple_loss=0.304, pruned_loss=0.07, over 940223.50 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 00:59:41,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=87846.66666666667, ans=0.0 +2024-07-28 00:59:47,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=87860.0, ans=0.2 +2024-07-28 00:59:51,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=87873.33333333333, ans=0.2 +2024-07-28 00:59:53,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87873.33333333333, ans=0.1 +2024-07-28 00:59:54,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=87873.33333333333, ans=0.0 +2024-07-28 01:00:10,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=87900.0, ans=0.125 +2024-07-28 01:00:13,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=87900.0, ans=0.125 +2024-07-28 01:00:15,711 INFO [train.py:1114] (0/4) Epoch 7, batch 4600, loss[loss=0.2352, simple_loss=0.2978, pruned_loss=0.08626, over 4558.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.303, pruned_loss=0.06977, over 938741.29 frames. ], batch size: 21, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:00:21,777 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.972e+01 5.990e+01 6.916e+01 8.662e+01 1.306e+02, threshold=1.383e+02, percent-clipped=1.0 +2024-07-28 01:00:21,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=87926.66666666667, ans=0.0 +2024-07-28 01:00:30,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=87940.0, ans=0.125 +2024-07-28 01:00:30,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=87940.0, ans=0.5 +2024-07-28 01:00:31,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=87940.0, ans=0.0 +2024-07-28 01:00:43,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=87966.66666666667, ans=0.125 +2024-07-28 01:00:46,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=87966.66666666667, ans=0.0 +2024-07-28 01:00:50,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87980.0, ans=0.1 +2024-07-28 01:00:50,496 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.02 vs. limit=12.0 +2024-07-28 01:00:50,828 INFO [train.py:1114] (0/4) Epoch 7, batch 4650, loss[loss=0.2634, simple_loss=0.3416, pruned_loss=0.09257, over 4848.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.304, pruned_loss=0.07026, over 940095.42 frames. ], batch size: 16, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:01:16,872 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.98 vs. limit=15.0 +2024-07-28 01:01:26,360 INFO [train.py:1114] (0/4) Epoch 7, batch 4700, loss[loss=0.168, simple_loss=0.2519, pruned_loss=0.04208, over 4705.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.3045, pruned_loss=0.07085, over 937004.43 frames. ], batch size: 11, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:01:32,304 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.754e+01 5.909e+01 6.693e+01 7.629e+01 1.851e+02, threshold=1.339e+02, percent-clipped=2.0 +2024-07-28 01:01:39,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=88073.33333333333, ans=0.1 +2024-07-28 01:01:50,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=88086.66666666667, ans=0.1 +2024-07-28 01:01:52,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=88100.0, ans=0.125 +2024-07-28 01:01:59,573 INFO [train.py:1114] (0/4) Epoch 7, batch 4750, loss[loss=0.245, simple_loss=0.3388, pruned_loss=0.07564, over 4402.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3061, pruned_loss=0.07218, over 935194.28 frames. ], batch size: 21, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:02:00,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=88113.33333333333, ans=0.015 +2024-07-28 01:02:00,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=88113.33333333333, ans=0.125 +2024-07-28 01:02:05,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=88113.33333333333, ans=0.025 +2024-07-28 01:02:13,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=88140.0, ans=0.2 +2024-07-28 01:02:14,536 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.00 vs. limit=22.5 +2024-07-28 01:02:15,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=88140.0, ans=0.125 +2024-07-28 01:02:16,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=88140.0, ans=0.125 +2024-07-28 01:02:22,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=88153.33333333333, ans=0.125 +2024-07-28 01:02:32,548 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.82 vs. limit=15.0 +2024-07-28 01:02:33,455 INFO [train.py:1114] (0/4) Epoch 7, batch 4800, loss[loss=0.2059, simple_loss=0.299, pruned_loss=0.05638, over 4690.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3051, pruned_loss=0.07201, over 932734.95 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:02:33,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=88180.0, ans=0.0 +2024-07-28 01:02:39,289 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.695e+01 5.971e+01 6.574e+01 7.583e+01 1.047e+02, threshold=1.315e+02, percent-clipped=0.0 +2024-07-28 01:02:42,037 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:02:42,948 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.53 vs. limit=15.0 +2024-07-28 01:02:46,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=88206.66666666667, ans=0.125 +2024-07-28 01:02:51,954 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:02:53,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=88220.0, ans=0.0 +2024-07-28 01:02:58,991 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.44 vs. limit=15.0 +2024-07-28 01:03:01,038 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.88 vs. limit=6.0 +2024-07-28 01:03:04,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=88233.33333333333, ans=0.5 +2024-07-28 01:03:06,448 INFO [train.py:1114] (0/4) Epoch 7, batch 4850, loss[loss=0.2213, simple_loss=0.3079, pruned_loss=0.06736, over 4749.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3052, pruned_loss=0.07199, over 932269.81 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:03:39,970 INFO [train.py:1114] (0/4) Epoch 7, batch 4900, loss[loss=0.25, simple_loss=0.3236, pruned_loss=0.08823, over 4754.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3034, pruned_loss=0.07075, over 934135.68 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:03:46,982 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.185e+01 5.896e+01 6.545e+01 7.673e+01 1.105e+02, threshold=1.309e+02, percent-clipped=0.0 +2024-07-28 01:03:47,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=88326.66666666667, ans=0.125 +2024-07-28 01:03:54,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=88340.0, ans=0.0 +2024-07-28 01:03:57,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=88340.0, ans=0.2 +2024-07-28 01:04:01,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=88353.33333333333, ans=0.2 +2024-07-28 01:04:13,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=88380.0, ans=0.2 +2024-07-28 01:04:13,956 INFO [train.py:1114] (0/4) Epoch 7, batch 4950, loss[loss=0.2781, simple_loss=0.3569, pruned_loss=0.09969, over 3473.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3055, pruned_loss=0.07194, over 931029.40 frames. ], batch size: 35, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:04:20,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=88393.33333333333, ans=0.125 +2024-07-28 01:04:26,229 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.84 vs. limit=15.0 +2024-07-28 01:04:34,383 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.99 vs. limit=15.0 +2024-07-28 01:04:39,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=88420.0, ans=0.0 +2024-07-28 01:04:48,682 INFO [train.py:1114] (0/4) Epoch 7, batch 5000, loss[loss=0.228, simple_loss=0.3147, pruned_loss=0.07071, over 4667.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3047, pruned_loss=0.0711, over 934923.68 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:04:55,129 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.044e+01 6.033e+01 7.025e+01 8.348e+01 1.303e+02, threshold=1.405e+02, percent-clipped=0.0 +2024-07-28 01:05:08,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=88486.66666666667, ans=0.125 +2024-07-28 01:05:16,161 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.69 vs. limit=15.0 +2024-07-28 01:05:16,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=88500.0, ans=0.025 +2024-07-28 01:05:20,045 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.20 vs. limit=22.5 +2024-07-28 01:05:21,759 INFO [train.py:1114] (0/4) Epoch 7, batch 5050, loss[loss=0.1841, simple_loss=0.27, pruned_loss=0.04904, over 4865.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3031, pruned_loss=0.06995, over 937495.54 frames. ], batch size: 12, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:05:38,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=88540.0, ans=0.1 +2024-07-28 01:05:48,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=88553.33333333333, ans=0.1 +2024-07-28 01:05:54,002 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.17 vs. limit=15.0 +2024-07-28 01:05:59,502 INFO [train.py:1114] (0/4) Epoch 7, batch 5100, loss[loss=0.2156, simple_loss=0.3006, pruned_loss=0.06533, over 4781.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3045, pruned_loss=0.07101, over 935223.19 frames. ], batch size: 12, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:06:04,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=88580.0, ans=0.1 +2024-07-28 01:06:06,163 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.527e+01 5.884e+01 6.519e+01 7.454e+01 1.176e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 01:06:24,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=88620.0, ans=0.125 +2024-07-28 01:06:26,323 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.11 vs. limit=22.5 +2024-07-28 01:06:26,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=88620.0, ans=0.2 +2024-07-28 01:06:27,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=88620.0, ans=0.025 +2024-07-28 01:06:35,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=88633.33333333333, ans=0.125 +2024-07-28 01:06:35,777 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.37 vs. limit=15.0 +2024-07-28 01:06:37,948 INFO [train.py:1114] (0/4) Epoch 7, batch 5150, loss[loss=0.2721, simple_loss=0.3453, pruned_loss=0.09946, over 4828.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3055, pruned_loss=0.07178, over 936121.33 frames. ], batch size: 16, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:06:47,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=88660.0, ans=0.125 +2024-07-28 01:06:52,571 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.89 vs. limit=22.5 +2024-07-28 01:06:53,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=88673.33333333333, ans=0.0 +2024-07-28 01:06:53,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=88673.33333333333, ans=0.125 +2024-07-28 01:06:55,793 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.76 vs. limit=15.0 +2024-07-28 01:06:59,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=88686.66666666667, ans=0.0 +2024-07-28 01:07:03,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=88686.66666666667, ans=0.1 +2024-07-28 01:07:10,399 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.69 vs. limit=22.5 +2024-07-28 01:07:11,378 INFO [train.py:1114] (0/4) Epoch 7, batch 5200, loss[loss=0.2222, simple_loss=0.3172, pruned_loss=0.06361, over 4660.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.3051, pruned_loss=0.07113, over 935884.74 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:07:11,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=88713.33333333333, ans=0.0 +2024-07-28 01:07:13,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=88713.33333333333, ans=0.125 +2024-07-28 01:07:13,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=88713.33333333333, ans=0.125 +2024-07-28 01:07:14,198 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:07:14,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=88713.33333333333, ans=0.125 +2024-07-28 01:07:18,345 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 6.071e+01 6.603e+01 7.061e+01 1.007e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-28 01:07:36,726 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:07:38,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=88766.66666666667, ans=0.125 +2024-07-28 01:07:39,423 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.97 vs. limit=15.0 +2024-07-28 01:07:44,919 INFO [train.py:1114] (0/4) Epoch 7, batch 5250, loss[loss=0.1982, simple_loss=0.2832, pruned_loss=0.05658, over 4895.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3045, pruned_loss=0.071, over 935544.76 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:07:48,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=88780.0, ans=0.0 +2024-07-28 01:07:51,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=88793.33333333333, ans=0.0 +2024-07-28 01:07:58,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=88806.66666666667, ans=0.1 +2024-07-28 01:08:04,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=88820.0, ans=0.025 +2024-07-28 01:08:06,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=88820.0, ans=0.0 +2024-07-28 01:08:06,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=88820.0, ans=0.07 +2024-07-28 01:08:17,097 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.28 vs. limit=22.5 +2024-07-28 01:08:18,695 INFO [train.py:1114] (0/4) Epoch 7, batch 5300, loss[loss=0.2853, simple_loss=0.3545, pruned_loss=0.108, over 4640.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.3032, pruned_loss=0.07067, over 934156.76 frames. ], batch size: 16, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:08:25,198 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.360e+01 5.926e+01 6.505e+01 7.271e+01 1.034e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 01:08:44,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=88900.0, ans=0.2 +2024-07-28 01:08:51,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=88913.33333333333, ans=0.125 +2024-07-28 01:08:51,802 INFO [train.py:1114] (0/4) Epoch 7, batch 5350, loss[loss=0.186, simple_loss=0.2721, pruned_loss=0.04994, over 4523.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.304, pruned_loss=0.0707, over 936202.08 frames. ], batch size: 10, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:08:52,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=88913.33333333333, ans=0.1 +2024-07-28 01:08:53,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=88913.33333333333, ans=0.125 +2024-07-28 01:08:59,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=88926.66666666667, ans=0.125 +2024-07-28 01:09:06,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=88940.0, ans=0.125 +2024-07-28 01:09:10,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=88940.0, ans=0.125 +2024-07-28 01:09:22,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=88953.33333333333, ans=0.125 +2024-07-28 01:09:30,893 INFO [train.py:1114] (0/4) Epoch 7, batch 5400, loss[loss=0.2215, simple_loss=0.3012, pruned_loss=0.07091, over 4152.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3045, pruned_loss=0.07095, over 930835.17 frames. ], batch size: 25, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:09:34,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=88980.0, ans=0.0 +2024-07-28 01:09:34,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=88980.0, ans=0.0 +2024-07-28 01:09:35,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=88980.0, ans=0.5 +2024-07-28 01:09:35,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=88980.0, ans=0.025 +2024-07-28 01:09:37,772 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.898e+01 5.891e+01 6.586e+01 7.274e+01 1.067e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-28 01:09:39,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=88993.33333333333, ans=0.125 +2024-07-28 01:09:43,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=88993.33333333333, ans=0.125 +2024-07-28 01:09:48,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=89006.66666666667, ans=0.025 +2024-07-28 01:09:54,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=89020.0, ans=0.2 +2024-07-28 01:10:00,035 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.40 vs. limit=15.0 +2024-07-28 01:10:04,291 INFO [train.py:1114] (0/4) Epoch 7, batch 5450, loss[loss=0.1787, simple_loss=0.25, pruned_loss=0.05372, over 4697.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3038, pruned_loss=0.0708, over 933389.02 frames. ], batch size: 11, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:10:07,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=89046.66666666667, ans=0.025 +2024-07-28 01:10:09,825 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:10:25,774 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:10:45,383 INFO [train.py:1114] (0/4) Epoch 7, batch 5500, loss[loss=0.2215, simple_loss=0.3044, pruned_loss=0.06932, over 4089.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3045, pruned_loss=0.07166, over 930992.13 frames. ], batch size: 25, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:11:09,584 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.832e+01 6.102e+01 6.764e+01 7.655e+01 1.015e+02, threshold=1.353e+02, percent-clipped=0.0 +2024-07-28 01:12:14,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=89153.33333333333, ans=0.0 +2024-07-28 01:12:17,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=89166.66666666667, ans=0.0 +2024-07-28 01:12:17,531 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-28 01:12:22,823 INFO [train.py:1114] (0/4) Epoch 7, batch 5550, loss[loss=0.2725, simple_loss=0.3448, pruned_loss=0.1002, over 4705.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3049, pruned_loss=0.07184, over 933408.45 frames. ], batch size: 12, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:12:25,561 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.64 vs. limit=15.0 +2024-07-28 01:12:30,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=89193.33333333333, ans=0.0 +2024-07-28 01:12:39,483 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.73 vs. limit=22.5 +2024-07-28 01:12:53,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=89220.0, ans=0.2 +2024-07-28 01:13:07,181 INFO [train.py:1114] (0/4) Epoch 7, batch 5600, loss[loss=0.2493, simple_loss=0.3297, pruned_loss=0.08445, over 4744.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3052, pruned_loss=0.07169, over 934651.01 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:13:14,833 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 6.099e+01 6.958e+01 8.233e+01 1.047e+02, threshold=1.392e+02, percent-clipped=0.0 +2024-07-28 01:13:42,967 INFO [train.py:1114] (0/4) Epoch 7, batch 5650, loss[loss=0.2306, simple_loss=0.2995, pruned_loss=0.08091, over 4412.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3043, pruned_loss=0.07118, over 937215.02 frames. ], batch size: 21, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:13:43,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=89313.33333333333, ans=0.125 +2024-07-28 01:14:03,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=89353.33333333333, ans=0.5 +2024-07-28 01:14:11,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.87 vs. limit=12.0 +2024-07-28 01:14:12,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=89366.66666666667, ans=0.09899494936611666 +2024-07-28 01:14:16,566 INFO [train.py:1114] (0/4) Epoch 7, batch 5700, loss[loss=0.2709, simple_loss=0.3545, pruned_loss=0.09364, over 4706.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.3039, pruned_loss=0.07044, over 938454.70 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:14:16,957 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.96 vs. limit=6.0 +2024-07-28 01:14:23,478 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.757e+01 5.798e+01 6.210e+01 7.158e+01 1.197e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 01:14:43,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.whiten.whitening_limit, batch_count=89433.33333333333, ans=12.0 +2024-07-28 01:14:43,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=89433.33333333333, ans=0.2 +2024-07-28 01:14:46,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=89433.33333333333, ans=0.125 +2024-07-28 01:14:47,408 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.90 vs. limit=15.0 +2024-07-28 01:14:50,079 INFO [train.py:1114] (0/4) Epoch 7, batch 5750, loss[loss=0.2291, simple_loss=0.3107, pruned_loss=0.07375, over 4737.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3055, pruned_loss=0.07079, over 938388.63 frames. ], batch size: 19, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:14:54,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=89446.66666666667, ans=0.1 +2024-07-28 01:14:55,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=89446.66666666667, ans=0.125 +2024-07-28 01:14:59,179 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.03 vs. limit=15.0 +2024-07-28 01:15:11,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=89486.66666666667, ans=0.125 +2024-07-28 01:15:12,282 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.55 vs. limit=15.0 +2024-07-28 01:15:23,591 INFO [train.py:1114] (0/4) Epoch 7, batch 5800, loss[loss=0.2481, simple_loss=0.3239, pruned_loss=0.08608, over 4784.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.3069, pruned_loss=0.07186, over 937394.34 frames. ], batch size: 19, lr: 1.06e-02, grad_scale: 16.0 +2024-07-28 01:15:31,068 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 6.066e+01 6.586e+01 7.704e+01 1.621e+02, threshold=1.317e+02, percent-clipped=1.0 +2024-07-28 01:15:46,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=89553.33333333333, ans=0.0 +2024-07-28 01:15:54,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=89566.66666666667, ans=0.2 +2024-07-28 01:15:57,444 INFO [train.py:1114] (0/4) Epoch 7, batch 5850, loss[loss=0.2144, simple_loss=0.2967, pruned_loss=0.06598, over 4527.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3069, pruned_loss=0.0719, over 938380.69 frames. ], batch size: 21, lr: 1.06e-02, grad_scale: 16.0 +2024-07-28 01:15:59,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=89580.0, ans=0.125 +2024-07-28 01:16:05,941 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-28 01:16:27,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=89633.33333333333, ans=0.125 +2024-07-28 01:16:29,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=89633.33333333333, ans=0.1 +2024-07-28 01:16:32,072 INFO [train.py:1114] (0/4) Epoch 7, batch 5900, loss[loss=0.2515, simple_loss=0.3136, pruned_loss=0.09477, over 4678.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3058, pruned_loss=0.07156, over 938488.32 frames. ], batch size: 15, lr: 1.06e-02, grad_scale: 16.0 +2024-07-28 01:16:39,540 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.684e+01 6.380e+01 7.370e+01 9.045e+01 1.525e+02, threshold=1.474e+02, percent-clipped=5.0 +2024-07-28 01:16:42,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=89660.0, ans=0.0 +2024-07-28 01:16:43,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=89660.0, ans=0.0 +2024-07-28 01:16:51,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=89686.66666666667, ans=0.1 +2024-07-28 01:16:52,852 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.39 vs. limit=22.5 +2024-07-28 01:16:55,696 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.25 vs. limit=15.0 +2024-07-28 01:16:57,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=89686.66666666667, ans=0.2 +2024-07-28 01:17:03,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=89700.0, ans=0.2 +2024-07-28 01:17:07,242 INFO [train.py:1114] (0/4) Epoch 7, batch 5950, loss[loss=0.2611, simple_loss=0.3389, pruned_loss=0.09162, over 4679.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.305, pruned_loss=0.07085, over 940470.60 frames. ], batch size: 15, lr: 1.06e-02, grad_scale: 16.0 +2024-07-28 01:17:12,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=89713.33333333333, ans=0.0 +2024-07-28 01:17:27,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=89753.33333333333, ans=0.2 +2024-07-28 01:17:31,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=89753.33333333333, ans=0.0 +2024-07-28 01:17:42,403 INFO [train.py:1114] (0/4) Epoch 7, batch 6000, loss[loss=0.2337, simple_loss=0.3299, pruned_loss=0.06873, over 4225.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3033, pruned_loss=0.06958, over 937478.92 frames. ], batch size: 25, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:17:42,404 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 01:17:54,532 INFO [train.py:1146] (0/4) Epoch 7, validation: loss=0.1857, simple_loss=0.2893, pruned_loss=0.04109, over 944034.00 frames. +2024-07-28 01:17:54,533 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 01:18:03,853 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.906e+01 5.859e+01 6.415e+01 7.407e+01 1.156e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 01:18:09,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=89793.33333333333, ans=0.125 +2024-07-28 01:18:12,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=89806.66666666667, ans=0.1 +2024-07-28 01:18:14,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=89806.66666666667, ans=0.125 +2024-07-28 01:18:22,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=89820.0, ans=0.0 +2024-07-28 01:18:26,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=89833.33333333333, ans=0.0 +2024-07-28 01:18:31,891 INFO [train.py:1114] (0/4) Epoch 7, batch 6050, loss[loss=0.2153, simple_loss=0.2961, pruned_loss=0.06724, over 4777.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3033, pruned_loss=0.06967, over 938680.95 frames. ], batch size: 12, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:18:42,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=89860.0, ans=0.125 +2024-07-28 01:18:45,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=89873.33333333333, ans=0.0 +2024-07-28 01:18:46,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=89873.33333333333, ans=0.2 +2024-07-28 01:18:49,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn2.whiten.whitening_limit, batch_count=89873.33333333333, ans=22.5 +2024-07-28 01:18:50,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=89873.33333333333, ans=0.0 +2024-07-28 01:18:56,436 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.88 vs. limit=6.0 +2024-07-28 01:18:56,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=89886.66666666667, ans=0.2 +2024-07-28 01:18:59,493 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:19:05,215 INFO [train.py:1114] (0/4) Epoch 7, batch 6100, loss[loss=0.2208, simple_loss=0.3098, pruned_loss=0.06594, over 4678.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.3028, pruned_loss=0.06899, over 938072.69 frames. ], batch size: 15, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:19:12,442 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.648e+01 5.958e+01 6.611e+01 7.776e+01 1.081e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-28 01:19:23,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=89940.0, ans=0.125 +2024-07-28 01:19:24,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=89940.0, ans=0.2 +2024-07-28 01:19:24,378 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.75 vs. limit=15.0 +2024-07-28 01:19:26,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=89953.33333333333, ans=0.0 +2024-07-28 01:19:34,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=89966.66666666667, ans=0.0 +2024-07-28 01:19:38,550 INFO [train.py:1114] (0/4) Epoch 7, batch 6150, loss[loss=0.3229, simple_loss=0.3637, pruned_loss=0.1411, over 3254.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.3028, pruned_loss=0.06897, over 936962.17 frames. ], batch size: 35, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:19:55,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=90006.66666666667, ans=0.125 +2024-07-28 01:19:58,037 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.92 vs. limit=15.0 +2024-07-28 01:20:05,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90020.0, ans=0.1 +2024-07-28 01:20:15,020 INFO [train.py:1114] (0/4) Epoch 7, batch 6200, loss[loss=0.2071, simple_loss=0.3128, pruned_loss=0.05065, over 4734.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3036, pruned_loss=0.06919, over 936294.99 frames. ], batch size: 14, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:20:22,606 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.692e+01 6.027e+01 6.497e+01 7.393e+01 1.206e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-28 01:20:28,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=90073.33333333333, ans=0.125 +2024-07-28 01:20:32,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=90073.33333333333, ans=0.125 +2024-07-28 01:20:36,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=90086.66666666667, ans=0.125 +2024-07-28 01:20:38,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=90086.66666666667, ans=0.125 +2024-07-28 01:20:48,961 INFO [train.py:1114] (0/4) Epoch 7, batch 6250, loss[loss=0.2271, simple_loss=0.3144, pruned_loss=0.06992, over 4817.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3041, pruned_loss=0.06972, over 932636.60 frames. ], batch size: 14, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:20:49,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=90113.33333333333, ans=0.95 +2024-07-28 01:20:51,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=90113.33333333333, ans=0.0 +2024-07-28 01:21:09,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=90153.33333333333, ans=0.0 +2024-07-28 01:21:14,575 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.60 vs. limit=15.0 +2024-07-28 01:21:21,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=90166.66666666667, ans=0.125 +2024-07-28 01:21:22,815 INFO [train.py:1114] (0/4) Epoch 7, batch 6300, loss[loss=0.1639, simple_loss=0.2454, pruned_loss=0.04123, over 4510.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.3025, pruned_loss=0.06943, over 928580.08 frames. ], batch size: 10, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:21:26,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=90180.0, ans=0.0 +2024-07-28 01:21:29,895 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.769e+01 5.979e+01 7.188e+01 8.735e+01 1.314e+02, threshold=1.438e+02, percent-clipped=1.0 +2024-07-28 01:21:35,996 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:21:43,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=90220.0, ans=0.0 +2024-07-28 01:21:44,235 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.90 vs. limit=6.0 +2024-07-28 01:21:45,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=90220.0, ans=0.0 +2024-07-28 01:21:55,881 INFO [train.py:1114] (0/4) Epoch 7, batch 6350, loss[loss=0.2841, simple_loss=0.3491, pruned_loss=0.1096, over 4570.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.3015, pruned_loss=0.06862, over 932944.35 frames. ], batch size: 21, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:22:05,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=90260.0, ans=0.0 +2024-07-28 01:22:06,467 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-07-28 01:22:14,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=90273.33333333333, ans=0.05 +2024-07-28 01:22:25,306 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.30 vs. limit=15.0 +2024-07-28 01:22:29,294 INFO [train.py:1114] (0/4) Epoch 7, batch 6400, loss[loss=0.1862, simple_loss=0.2866, pruned_loss=0.04289, over 4639.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.3018, pruned_loss=0.06934, over 934448.99 frames. ], batch size: 13, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:22:32,429 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-07-28 01:22:33,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=90313.33333333333, ans=0.125 +2024-07-28 01:22:34,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90313.33333333333, ans=0.1 +2024-07-28 01:22:36,608 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.788e+01 5.990e+01 6.724e+01 8.012e+01 1.042e+02, threshold=1.345e+02, percent-clipped=0.0 +2024-07-28 01:22:40,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=90326.66666666667, ans=0.5 +2024-07-28 01:22:42,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=90340.0, ans=0.0 +2024-07-28 01:22:47,116 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.50 vs. limit=15.0 +2024-07-28 01:22:48,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=90353.33333333333, ans=0.125 +2024-07-28 01:22:50,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=90353.33333333333, ans=0.1 +2024-07-28 01:23:00,634 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.78 vs. limit=15.0 +2024-07-28 01:23:01,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=90366.66666666667, ans=0.125 +2024-07-28 01:23:06,107 INFO [train.py:1114] (0/4) Epoch 7, batch 6450, loss[loss=0.2198, simple_loss=0.3127, pruned_loss=0.06342, over 4531.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.3022, pruned_loss=0.06911, over 938246.44 frames. ], batch size: 21, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:23:08,419 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.19 vs. limit=15.0 +2024-07-28 01:23:15,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90393.33333333333, ans=0.1 +2024-07-28 01:23:22,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90406.66666666667, ans=0.1 +2024-07-28 01:23:27,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=90420.0, ans=0.125 +2024-07-28 01:23:31,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=90420.0, ans=0.125 +2024-07-28 01:23:32,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=90420.0, ans=0.125 +2024-07-28 01:23:33,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90420.0, ans=0.1 +2024-07-28 01:23:36,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=90433.33333333333, ans=0.125 +2024-07-28 01:23:42,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=90433.33333333333, ans=0.0 +2024-07-28 01:23:45,330 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.58 vs. limit=15.0 +2024-07-28 01:23:46,208 INFO [train.py:1114] (0/4) Epoch 7, batch 6500, loss[loss=0.3031, simple_loss=0.3723, pruned_loss=0.117, over 3302.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.3018, pruned_loss=0.06849, over 939629.48 frames. ], batch size: 35, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:23:57,756 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.888e+01 5.820e+01 6.453e+01 7.206e+01 1.081e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-28 01:24:03,825 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.41 vs. limit=15.0 +2024-07-28 01:24:14,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=90473.33333333333, ans=0.125 +2024-07-28 01:24:14,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=90473.33333333333, ans=0.125 +2024-07-28 01:24:32,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90500.0, ans=0.1 +2024-07-28 01:24:38,510 INFO [train.py:1114] (0/4) Epoch 7, batch 6550, loss[loss=0.1853, simple_loss=0.2626, pruned_loss=0.05396, over 4802.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.3025, pruned_loss=0.06905, over 942726.10 frames. ], batch size: 11, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:24:52,796 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-28 01:24:54,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=90540.0, ans=0.2 +2024-07-28 01:25:01,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=90553.33333333333, ans=0.125 +2024-07-28 01:25:21,634 INFO [train.py:1114] (0/4) Epoch 7, batch 6600, loss[loss=0.2148, simple_loss=0.3016, pruned_loss=0.06396, over 4929.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3029, pruned_loss=0.06971, over 944680.83 frames. ], batch size: 14, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:25:29,263 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.788e+01 6.007e+01 7.132e+01 8.613e+01 1.294e+02, threshold=1.426e+02, percent-clipped=1.0 +2024-07-28 01:25:41,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=90606.66666666667, ans=0.125 +2024-07-28 01:25:43,497 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.23 vs. limit=22.5 +2024-07-28 01:25:57,361 INFO [train.py:1114] (0/4) Epoch 7, batch 6650, loss[loss=0.2523, simple_loss=0.3449, pruned_loss=0.07984, over 4680.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3041, pruned_loss=0.07047, over 943759.46 frames. ], batch size: 17, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:26:06,970 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-68000.pt +2024-07-28 01:26:09,165 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.19 vs. limit=6.0 +2024-07-28 01:26:12,181 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.31 vs. limit=15.0 +2024-07-28 01:26:12,320 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.00 vs. limit=15.0 +2024-07-28 01:26:19,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=90686.66666666667, ans=0.125 +2024-07-28 01:26:27,620 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.87 vs. limit=15.0 +2024-07-28 01:26:31,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=90700.0, ans=0.125 +2024-07-28 01:26:33,326 INFO [train.py:1114] (0/4) Epoch 7, batch 6700, loss[loss=0.222, simple_loss=0.3111, pruned_loss=0.06641, over 4727.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3039, pruned_loss=0.07057, over 942468.96 frames. ], batch size: 19, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:26:40,646 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.203e+01 5.978e+01 6.873e+01 8.305e+01 1.151e+02, threshold=1.375e+02, percent-clipped=0.0 +2024-07-28 01:26:48,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=90740.0, ans=0.2 +2024-07-28 01:26:54,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=90753.33333333333, ans=0.2 +2024-07-28 01:27:07,205 INFO [train.py:1114] (0/4) Epoch 7, batch 6750, loss[loss=0.2515, simple_loss=0.3381, pruned_loss=0.08245, over 4130.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3043, pruned_loss=0.07028, over 940106.45 frames. ], batch size: 25, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:27:07,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90780.0, ans=0.1 +2024-07-28 01:27:07,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=90780.0, ans=0.125 +2024-07-28 01:27:10,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=90780.0, ans=0.09899494936611666 +2024-07-28 01:27:12,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=90780.0, ans=0.125 +2024-07-28 01:27:21,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=90806.66666666667, ans=0.2 +2024-07-28 01:27:23,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=90806.66666666667, ans=0.09899494936611666 +2024-07-28 01:27:30,522 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.25 vs. limit=15.0 +2024-07-28 01:27:40,995 INFO [train.py:1114] (0/4) Epoch 7, batch 6800, loss[loss=0.2238, simple_loss=0.3129, pruned_loss=0.06733, over 4635.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3049, pruned_loss=0.07078, over 938134.58 frames. ], batch size: 13, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:27:41,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90846.66666666667, ans=0.1 +2024-07-28 01:27:43,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=90846.66666666667, ans=0.125 +2024-07-28 01:27:48,116 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.925e+01 5.741e+01 6.354e+01 7.079e+01 9.743e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 01:27:52,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=90860.0, ans=0.125 +2024-07-28 01:27:53,840 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.01 vs. limit=12.0 +2024-07-28 01:27:56,765 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:28:09,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=90900.0, ans=0.125 +2024-07-28 01:28:12,452 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.80 vs. limit=10.0 +2024-07-28 01:28:14,115 INFO [train.py:1114] (0/4) Epoch 7, batch 6850, loss[loss=0.2164, simple_loss=0.3125, pruned_loss=0.06019, over 4703.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3047, pruned_loss=0.0704, over 940133.78 frames. ], batch size: 13, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:28:16,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=90913.33333333333, ans=0.2 +2024-07-28 01:28:18,532 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.50 vs. limit=15.0 +2024-07-28 01:28:18,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=90913.33333333333, ans=0.0 +2024-07-28 01:28:22,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=90926.66666666667, ans=0.2 +2024-07-28 01:28:32,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=90940.0, ans=0.0 +2024-07-28 01:28:36,942 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.76 vs. limit=15.0 +2024-07-28 01:28:45,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90966.66666666667, ans=0.1 +2024-07-28 01:28:46,095 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.79 vs. limit=15.0 +2024-07-28 01:28:48,233 INFO [train.py:1114] (0/4) Epoch 7, batch 6900, loss[loss=0.1853, simple_loss=0.2764, pruned_loss=0.04714, over 4962.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.3044, pruned_loss=0.0702, over 942738.98 frames. ], batch size: 13, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:28:50,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=90980.0, ans=0.125 +2024-07-28 01:28:52,519 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.79 vs. limit=22.5 +2024-07-28 01:28:55,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=90980.0, ans=0.04949747468305833 +2024-07-28 01:28:56,389 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.99 vs. limit=15.0 +2024-07-28 01:28:57,324 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.878e+01 5.899e+01 6.510e+01 7.129e+01 1.062e+02, threshold=1.302e+02, percent-clipped=0.0 +2024-07-28 01:29:18,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=91033.33333333333, ans=6.0 +2024-07-28 01:29:24,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=91033.33333333333, ans=10.0 +2024-07-28 01:29:25,539 INFO [train.py:1114] (0/4) Epoch 7, batch 6950, loss[loss=0.1874, simple_loss=0.2691, pruned_loss=0.05286, over 4514.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3042, pruned_loss=0.06972, over 939720.08 frames. ], batch size: 10, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:29:54,714 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.609e-01 +2024-07-28 01:29:56,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=91100.0, ans=0.125 +2024-07-28 01:29:58,848 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.23 vs. limit=15.0 +2024-07-28 01:29:59,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=91100.0, ans=10.0 +2024-07-28 01:30:02,401 INFO [train.py:1114] (0/4) Epoch 7, batch 7000, loss[loss=0.2293, simple_loss=0.3147, pruned_loss=0.07192, over 4618.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.3029, pruned_loss=0.06873, over 937809.46 frames. ], batch size: 17, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:30:07,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=91113.33333333333, ans=0.025 +2024-07-28 01:30:09,445 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.04 vs. limit=10.0 +2024-07-28 01:30:09,590 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.067e+01 5.877e+01 6.787e+01 8.210e+01 1.500e+02, threshold=1.357e+02, percent-clipped=1.0 +2024-07-28 01:30:12,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=91126.66666666667, ans=0.125 +2024-07-28 01:30:16,531 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.82 vs. limit=10.0 +2024-07-28 01:30:18,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=91140.0, ans=0.125 +2024-07-28 01:30:21,809 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-28 01:30:35,475 INFO [train.py:1114] (0/4) Epoch 7, batch 7050, loss[loss=0.2497, simple_loss=0.3279, pruned_loss=0.08575, over 4760.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.303, pruned_loss=0.06929, over 941163.85 frames. ], batch size: 19, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:30:39,281 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.87 vs. limit=15.0 +2024-07-28 01:30:41,311 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.90 vs. limit=15.0 +2024-07-28 01:30:43,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=91193.33333333333, ans=0.125 +2024-07-28 01:30:46,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=91193.33333333333, ans=0.0 +2024-07-28 01:30:56,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=91220.0, ans=0.125 +2024-07-28 01:31:09,045 INFO [train.py:1114] (0/4) Epoch 7, batch 7100, loss[loss=0.2183, simple_loss=0.298, pruned_loss=0.06933, over 4792.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3034, pruned_loss=0.06965, over 935694.97 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 16.0 +2024-07-28 01:31:09,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=91246.66666666667, ans=0.2 +2024-07-28 01:31:16,787 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.787e+01 5.674e+01 6.634e+01 7.600e+01 1.129e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-28 01:31:19,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=91260.0, ans=0.2 +2024-07-28 01:31:41,791 INFO [train.py:1114] (0/4) Epoch 7, batch 7150, loss[loss=0.2594, simple_loss=0.3378, pruned_loss=0.09054, over 4522.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.3017, pruned_loss=0.06876, over 936833.75 frames. ], batch size: 21, lr: 1.05e-02, grad_scale: 16.0 +2024-07-28 01:31:41,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=91313.33333333333, ans=0.2 +2024-07-28 01:31:47,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=91313.33333333333, ans=0.0 +2024-07-28 01:31:49,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=91326.66666666667, ans=0.125 +2024-07-28 01:31:51,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=91326.66666666667, ans=0.0 +2024-07-28 01:32:03,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=91353.33333333333, ans=0.125 +2024-07-28 01:32:14,587 INFO [train.py:1114] (0/4) Epoch 7, batch 7200, loss[loss=0.2055, simple_loss=0.2994, pruned_loss=0.05576, over 4806.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.3023, pruned_loss=0.06895, over 937019.61 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:32:22,364 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.818e+01 5.919e+01 6.755e+01 7.806e+01 1.038e+02, threshold=1.351e+02, percent-clipped=0.0 +2024-07-28 01:32:25,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=91393.33333333333, ans=0.0 +2024-07-28 01:32:35,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=91406.66666666667, ans=0.125 +2024-07-28 01:32:37,680 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.03 vs. limit=15.0 +2024-07-28 01:32:43,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=91433.33333333333, ans=0.09899494936611666 +2024-07-28 01:32:48,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=91446.66666666667, ans=0.0 +2024-07-28 01:32:49,498 INFO [train.py:1114] (0/4) Epoch 7, batch 7250, loss[loss=0.2039, simple_loss=0.2902, pruned_loss=0.05881, over 4844.00 frames. ], tot_loss[loss=0.22, simple_loss=0.3022, pruned_loss=0.06883, over 938383.21 frames. ], batch size: 12, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:32:52,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=91446.66666666667, ans=0.035 +2024-07-28 01:32:53,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=91446.66666666667, ans=0.0 +2024-07-28 01:32:57,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=91460.0, ans=0.025 +2024-07-28 01:32:58,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=91460.0, ans=0.025 +2024-07-28 01:33:15,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=91500.0, ans=0.2 +2024-07-28 01:33:20,044 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.70 vs. limit=10.0 +2024-07-28 01:33:22,426 INFO [train.py:1114] (0/4) Epoch 7, batch 7300, loss[loss=0.186, simple_loss=0.272, pruned_loss=0.04998, over 4847.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.3027, pruned_loss=0.06913, over 939206.14 frames. ], batch size: 12, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:33:30,223 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.315e+01 6.274e+01 7.077e+01 8.324e+01 1.199e+02, threshold=1.415e+02, percent-clipped=0.0 +2024-07-28 01:33:41,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=91553.33333333333, ans=0.0 +2024-07-28 01:33:43,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=91553.33333333333, ans=0.2 +2024-07-28 01:33:44,105 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.29 vs. limit=10.0 +2024-07-28 01:33:45,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=91553.33333333333, ans=0.2 +2024-07-28 01:33:45,771 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=5.176e-01 +2024-07-28 01:33:46,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=91553.33333333333, ans=0.2 +2024-07-28 01:33:54,591 INFO [train.py:1114] (0/4) Epoch 7, batch 7350, loss[loss=0.2234, simple_loss=0.306, pruned_loss=0.07041, over 4640.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.3017, pruned_loss=0.06829, over 938975.65 frames. ], batch size: 12, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:33:57,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=91580.0, ans=0.025 +2024-07-28 01:33:58,364 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-28 01:34:00,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=91593.33333333333, ans=0.0 +2024-07-28 01:34:10,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=91606.66666666667, ans=0.125 +2024-07-28 01:34:12,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=91606.66666666667, ans=0.125 +2024-07-28 01:34:23,954 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.04 vs. limit=12.0 +2024-07-28 01:34:27,593 INFO [train.py:1114] (0/4) Epoch 7, batch 7400, loss[loss=0.2092, simple_loss=0.312, pruned_loss=0.05315, over 4685.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.302, pruned_loss=0.06834, over 940173.28 frames. ], batch size: 13, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:34:32,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=91646.66666666667, ans=0.125 +2024-07-28 01:34:35,816 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.824e+01 5.881e+01 6.822e+01 8.435e+01 1.377e+02, threshold=1.364e+02, percent-clipped=0.0 +2024-07-28 01:34:38,798 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.41 vs. limit=15.0 +2024-07-28 01:34:57,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=91700.0, ans=0.125 +2024-07-28 01:35:01,086 INFO [train.py:1114] (0/4) Epoch 7, batch 7450, loss[loss=0.201, simple_loss=0.2721, pruned_loss=0.06493, over 4592.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.3006, pruned_loss=0.06847, over 937674.10 frames. ], batch size: 11, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:35:01,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=91713.33333333333, ans=0.125 +2024-07-28 01:35:02,032 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.05 vs. limit=6.0 +2024-07-28 01:35:03,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=91713.33333333333, ans=0.125 +2024-07-28 01:35:17,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=91740.0, ans=0.2 +2024-07-28 01:35:19,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=91740.0, ans=0.1 +2024-07-28 01:35:22,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=91753.33333333333, ans=0.05 +2024-07-28 01:35:22,580 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.16 vs. limit=15.0 +2024-07-28 01:35:29,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=91766.66666666667, ans=0.025 +2024-07-28 01:35:34,037 INFO [train.py:1114] (0/4) Epoch 7, batch 7500, loss[loss=0.2614, simple_loss=0.3369, pruned_loss=0.093, over 3233.00 frames. ], tot_loss[loss=0.22, simple_loss=0.3019, pruned_loss=0.06908, over 935561.66 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:35:36,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=91780.0, ans=0.0 +2024-07-28 01:35:41,659 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.899e+01 6.430e+01 7.635e+01 1.398e+02, threshold=1.286e+02, percent-clipped=1.0 +2024-07-28 01:36:04,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=91833.33333333333, ans=0.125 +2024-07-28 01:36:07,906 INFO [train.py:1114] (0/4) Epoch 7, batch 7550, loss[loss=0.2607, simple_loss=0.3427, pruned_loss=0.08939, over 4611.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.304, pruned_loss=0.07048, over 935693.04 frames. ], batch size: 17, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:36:09,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=91846.66666666667, ans=0.0 +2024-07-28 01:36:11,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=91846.66666666667, ans=0.1 +2024-07-28 01:36:13,440 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.32 vs. limit=22.5 +2024-07-28 01:36:28,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=91886.66666666667, ans=0.1 +2024-07-28 01:36:31,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=91886.66666666667, ans=0.125 +2024-07-28 01:36:38,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=91900.0, ans=0.2 +2024-07-28 01:36:40,632 INFO [train.py:1114] (0/4) Epoch 7, batch 7600, loss[loss=0.252, simple_loss=0.3458, pruned_loss=0.07913, over 4809.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3041, pruned_loss=0.06945, over 937752.00 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:36:50,044 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.32 vs. limit=15.0 +2024-07-28 01:36:50,218 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.825e+01 5.658e+01 6.042e+01 7.178e+01 9.793e+01, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 01:36:57,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=91940.0, ans=0.0 +2024-07-28 01:36:59,414 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.23 vs. limit=22.5 +2024-07-28 01:37:06,448 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.43 vs. limit=12.0 +2024-07-28 01:37:15,265 INFO [train.py:1114] (0/4) Epoch 7, batch 7650, loss[loss=0.2361, simple_loss=0.2981, pruned_loss=0.08709, over 4946.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3043, pruned_loss=0.06976, over 936708.11 frames. ], batch size: 12, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:37:16,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91980.0, ans=0.1 +2024-07-28 01:37:33,883 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.46 vs. limit=15.0 +2024-07-28 01:37:33,991 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.09 vs. limit=22.5 +2024-07-28 01:37:34,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=92006.66666666667, ans=0.125 +2024-07-28 01:37:48,741 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.37 vs. limit=6.0 +2024-07-28 01:37:49,599 INFO [train.py:1114] (0/4) Epoch 7, batch 7700, loss[loss=0.2414, simple_loss=0.3297, pruned_loss=0.0765, over 4692.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3038, pruned_loss=0.06907, over 934520.35 frames. ], batch size: 13, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:37:53,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=92046.66666666667, ans=0.125 +2024-07-28 01:37:56,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=92060.0, ans=0.125 +2024-07-28 01:37:57,207 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.942e+01 5.877e+01 6.503e+01 7.905e+01 1.085e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 01:38:00,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=92060.0, ans=0.0 +2024-07-28 01:38:02,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=92073.33333333333, ans=0.0 +2024-07-28 01:38:02,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=92073.33333333333, ans=0.0 +2024-07-28 01:38:04,716 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.61 vs. limit=15.0 +2024-07-28 01:38:06,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=92073.33333333333, ans=0.0 +2024-07-28 01:38:19,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=92100.0, ans=0.1 +2024-07-28 01:38:21,609 INFO [train.py:1114] (0/4) Epoch 7, batch 7750, loss[loss=0.2342, simple_loss=0.32, pruned_loss=0.07425, over 4929.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3051, pruned_loss=0.06943, over 935583.45 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:38:28,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=92113.33333333333, ans=0.125 +2024-07-28 01:38:29,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.44 vs. limit=15.0 +2024-07-28 01:38:29,778 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=8.379e-02 +2024-07-28 01:38:40,993 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.59 vs. limit=15.0 +2024-07-28 01:38:48,759 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.16 vs. limit=12.0 +2024-07-28 01:38:59,297 INFO [train.py:1114] (0/4) Epoch 7, batch 7800, loss[loss=0.2511, simple_loss=0.3381, pruned_loss=0.08203, over 4667.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3045, pruned_loss=0.06903, over 937144.43 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:39:03,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=92180.0, ans=0.07 +2024-07-28 01:39:06,959 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.786e+01 5.790e+01 6.287e+01 7.177e+01 9.845e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 01:39:07,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=92193.33333333333, ans=0.125 +2024-07-28 01:39:09,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=92193.33333333333, ans=0.125 +2024-07-28 01:39:18,727 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.24 vs. limit=15.0 +2024-07-28 01:39:22,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=92206.66666666667, ans=0.125 +2024-07-28 01:39:26,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=92220.0, ans=0.0 +2024-07-28 01:39:27,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=92220.0, ans=0.2 +2024-07-28 01:39:45,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92233.33333333333, ans=0.1 +2024-07-28 01:39:51,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=92233.33333333333, ans=0.125 +2024-07-28 01:39:52,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=92233.33333333333, ans=0.05 +2024-07-28 01:39:53,513 INFO [train.py:1114] (0/4) Epoch 7, batch 7850, loss[loss=0.178, simple_loss=0.2534, pruned_loss=0.05131, over 4519.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3044, pruned_loss=0.06956, over 936008.26 frames. ], batch size: 10, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:40:14,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=92286.66666666667, ans=0.025 +2024-07-28 01:40:14,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=92286.66666666667, ans=0.0 +2024-07-28 01:40:23,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=92300.0, ans=0.125 +2024-07-28 01:40:31,971 INFO [train.py:1114] (0/4) Epoch 7, batch 7900, loss[loss=0.2489, simple_loss=0.3267, pruned_loss=0.08552, over 4872.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.3058, pruned_loss=0.07022, over 933736.37 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:40:35,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=92313.33333333333, ans=0.025 +2024-07-28 01:40:36,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=92313.33333333333, ans=0.125 +2024-07-28 01:40:39,610 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.593e+01 6.059e+01 6.486e+01 7.471e+01 1.043e+02, threshold=1.297e+02, percent-clipped=0.0 +2024-07-28 01:40:44,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=92340.0, ans=0.125 +2024-07-28 01:41:14,312 INFO [train.py:1114] (0/4) Epoch 7, batch 7950, loss[loss=0.3384, simple_loss=0.406, pruned_loss=0.1354, over 3322.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3046, pruned_loss=0.06944, over 935859.86 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 16.0 +2024-07-28 01:41:22,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=92393.33333333333, ans=0.125 +2024-07-28 01:41:24,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=92393.33333333333, ans=0.0 +2024-07-28 01:41:34,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=92420.0, ans=0.2 +2024-07-28 01:41:38,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=92420.0, ans=0.2 +2024-07-28 01:41:42,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=92433.33333333333, ans=0.0 +2024-07-28 01:41:47,118 INFO [train.py:1114] (0/4) Epoch 7, batch 8000, loss[loss=0.174, simple_loss=0.2503, pruned_loss=0.04883, over 4607.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.3027, pruned_loss=0.06881, over 934661.10 frames. ], batch size: 11, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:41:47,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=92446.66666666667, ans=0.0 +2024-07-28 01:41:47,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=92446.66666666667, ans=0.125 +2024-07-28 01:41:48,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=92446.66666666667, ans=0.0 +2024-07-28 01:41:55,557 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.050e+01 6.079e+01 6.641e+01 7.975e+01 1.086e+02, threshold=1.328e+02, percent-clipped=0.0 +2024-07-28 01:42:01,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.90 vs. limit=6.0 +2024-07-28 01:42:12,011 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.68 vs. limit=15.0 +2024-07-28 01:42:20,869 INFO [train.py:1114] (0/4) Epoch 7, batch 8050, loss[loss=0.2168, simple_loss=0.3093, pruned_loss=0.06213, over 4818.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3036, pruned_loss=0.06915, over 934179.59 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:42:25,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=92513.33333333333, ans=0.125 +2024-07-28 01:42:26,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92513.33333333333, ans=0.1 +2024-07-28 01:42:32,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=92526.66666666667, ans=0.0 +2024-07-28 01:42:47,525 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.30 vs. limit=22.5 +2024-07-28 01:42:50,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=92566.66666666667, ans=0.0 +2024-07-28 01:42:53,527 INFO [train.py:1114] (0/4) Epoch 7, batch 8100, loss[loss=0.258, simple_loss=0.3466, pruned_loss=0.08475, over 4807.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3043, pruned_loss=0.06934, over 933885.17 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:43:00,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=92593.33333333333, ans=0.0 +2024-07-28 01:43:00,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=92593.33333333333, ans=0.0 +2024-07-28 01:43:01,844 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.964e+01 5.903e+01 6.479e+01 7.411e+01 1.026e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 01:43:05,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=92593.33333333333, ans=0.0 +2024-07-28 01:43:23,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=92633.33333333333, ans=0.025 +2024-07-28 01:43:27,001 INFO [train.py:1114] (0/4) Epoch 7, batch 8150, loss[loss=0.1933, simple_loss=0.2748, pruned_loss=0.05587, over 4812.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.3034, pruned_loss=0.06924, over 937514.07 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:43:32,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92646.66666666667, ans=0.1 +2024-07-28 01:43:39,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=92673.33333333333, ans=0.025 +2024-07-28 01:43:43,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=92673.33333333333, ans=0.125 +2024-07-28 01:43:45,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=92673.33333333333, ans=0.09899494936611666 +2024-07-28 01:43:49,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=92686.66666666667, ans=0.125 +2024-07-28 01:43:49,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92686.66666666667, ans=0.1 +2024-07-28 01:44:00,491 INFO [train.py:1114] (0/4) Epoch 7, batch 8200, loss[loss=0.2281, simple_loss=0.3069, pruned_loss=0.07465, over 4804.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.3023, pruned_loss=0.06822, over 938707.28 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:44:01,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=92713.33333333333, ans=0.2 +2024-07-28 01:44:04,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=92713.33333333333, ans=0.125 +2024-07-28 01:44:05,503 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.48 vs. limit=6.0 +2024-07-28 01:44:08,979 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.964e+01 6.053e+01 7.008e+01 8.416e+01 1.296e+02, threshold=1.402e+02, percent-clipped=1.0 +2024-07-28 01:44:09,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=92726.66666666667, ans=0.0 +2024-07-28 01:44:11,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=92726.66666666667, ans=0.125 +2024-07-28 01:44:13,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=92726.66666666667, ans=0.0 +2024-07-28 01:44:15,092 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.06 vs. limit=12.0 +2024-07-28 01:44:27,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=92766.66666666667, ans=0.125 +2024-07-28 01:44:32,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=92766.66666666667, ans=0.2 +2024-07-28 01:44:34,218 INFO [train.py:1114] (0/4) Epoch 7, batch 8250, loss[loss=0.2303, simple_loss=0.3187, pruned_loss=0.07098, over 4899.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.303, pruned_loss=0.06886, over 938749.09 frames. ], batch size: 13, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:44:35,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=92780.0, ans=0.125 +2024-07-28 01:44:41,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=92793.33333333333, ans=0.125 +2024-07-28 01:44:51,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=92806.66666666667, ans=0.5 +2024-07-28 01:44:54,083 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.23 vs. limit=22.5 +2024-07-28 01:44:54,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=92820.0, ans=0.0 +2024-07-28 01:44:58,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=92820.0, ans=0.125 +2024-07-28 01:45:00,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=92833.33333333333, ans=0.0 +2024-07-28 01:45:05,801 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.44 vs. limit=22.5 +2024-07-28 01:45:06,709 INFO [train.py:1114] (0/4) Epoch 7, batch 8300, loss[loss=0.2116, simple_loss=0.3037, pruned_loss=0.05979, over 4893.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.3048, pruned_loss=0.06982, over 939069.98 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:45:15,037 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.705e+01 6.122e+01 6.815e+01 8.383e+01 1.214e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-28 01:45:16,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=92860.0, ans=0.2 +2024-07-28 01:45:18,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=92860.0, ans=0.125 +2024-07-28 01:45:25,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92886.66666666667, ans=0.1 +2024-07-28 01:45:27,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=92886.66666666667, ans=0.0 +2024-07-28 01:45:35,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=92900.0, ans=0.125 +2024-07-28 01:45:38,738 INFO [train.py:1114] (0/4) Epoch 7, batch 8350, loss[loss=0.2018, simple_loss=0.2864, pruned_loss=0.05862, over 4806.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3036, pruned_loss=0.06921, over 941576.13 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:45:39,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=92913.33333333333, ans=0.2 +2024-07-28 01:45:39,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=92913.33333333333, ans=0.025 +2024-07-28 01:45:43,245 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.13 vs. limit=15.0 +2024-07-28 01:46:03,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=92953.33333333333, ans=0.1 +2024-07-28 01:46:11,417 INFO [train.py:1114] (0/4) Epoch 7, batch 8400, loss[loss=0.1824, simple_loss=0.2663, pruned_loss=0.04929, over 4781.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3037, pruned_loss=0.06927, over 940222.68 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:46:20,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=92993.33333333333, ans=0.05 +2024-07-28 01:46:20,649 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.750e+01 5.850e+01 6.401e+01 7.146e+01 1.045e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 01:46:40,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=93033.33333333333, ans=0.0 +2024-07-28 01:46:43,901 INFO [train.py:1114] (0/4) Epoch 7, batch 8450, loss[loss=0.2179, simple_loss=0.3061, pruned_loss=0.06488, over 4809.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3051, pruned_loss=0.07023, over 938763.31 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:46:50,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=93060.0, ans=0.2 +2024-07-28 01:46:50,616 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.94 vs. limit=22.5 +2024-07-28 01:47:03,556 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.24 vs. limit=15.0 +2024-07-28 01:47:12,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=93100.0, ans=0.125 +2024-07-28 01:47:18,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=93100.0, ans=0.09899494936611666 +2024-07-28 01:47:19,787 INFO [train.py:1114] (0/4) Epoch 7, batch 8500, loss[loss=0.2185, simple_loss=0.2851, pruned_loss=0.07595, over 4605.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.3031, pruned_loss=0.0688, over 938711.79 frames. ], batch size: 11, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:47:26,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93126.66666666667, ans=0.1 +2024-07-28 01:47:29,191 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.808e+01 5.964e+01 6.473e+01 7.597e+01 1.017e+02, threshold=1.295e+02, percent-clipped=0.0 +2024-07-28 01:47:37,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=93140.0, ans=0.2 +2024-07-28 01:47:53,749 INFO [train.py:1114] (0/4) Epoch 7, batch 8550, loss[loss=0.1976, simple_loss=0.2737, pruned_loss=0.06077, over 4802.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.3017, pruned_loss=0.06827, over 939655.89 frames. ], batch size: 11, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:47:54,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=93180.0, ans=0.125 +2024-07-28 01:47:54,739 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.72 vs. limit=15.0 +2024-07-28 01:47:55,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=93180.0, ans=0.125 +2024-07-28 01:48:03,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.82 vs. limit=15.0 +2024-07-28 01:48:04,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=93193.33333333333, ans=0.125 +2024-07-28 01:48:05,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=93193.33333333333, ans=0.0 +2024-07-28 01:48:09,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=93206.66666666667, ans=0.125 +2024-07-28 01:48:11,156 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.56 vs. limit=22.5 +2024-07-28 01:48:13,870 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.80 vs. limit=15.0 +2024-07-28 01:48:15,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=93220.0, ans=0.0 +2024-07-28 01:48:16,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=93220.0, ans=0.0 +2024-07-28 01:48:25,479 INFO [train.py:1114] (0/4) Epoch 7, batch 8600, loss[loss=0.2198, simple_loss=0.3013, pruned_loss=0.06909, over 4801.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.3018, pruned_loss=0.06859, over 939160.36 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:48:31,565 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.75 vs. limit=15.0 +2024-07-28 01:48:33,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=93260.0, ans=0.125 +2024-07-28 01:48:35,626 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.806e+01 6.089e+01 7.126e+01 9.182e+01 1.339e+02, threshold=1.425e+02, percent-clipped=2.0 +2024-07-28 01:48:35,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=93260.0, ans=0.025 +2024-07-28 01:48:37,209 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.44 vs. limit=15.0 +2024-07-28 01:48:40,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=93273.33333333333, ans=0.125 +2024-07-28 01:48:56,128 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.59 vs. limit=15.0 +2024-07-28 01:49:00,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=93300.0, ans=0.0 +2024-07-28 01:49:04,767 INFO [train.py:1114] (0/4) Epoch 7, batch 8650, loss[loss=0.2254, simple_loss=0.297, pruned_loss=0.07691, over 4895.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.3017, pruned_loss=0.06854, over 939957.01 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:49:07,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=93313.33333333333, ans=0.125 +2024-07-28 01:49:08,840 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.67 vs. limit=22.5 +2024-07-28 01:49:09,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=93313.33333333333, ans=0.0 +2024-07-28 01:49:19,325 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.87 vs. limit=15.0 +2024-07-28 01:49:21,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=93326.66666666667, ans=0.0 +2024-07-28 01:49:23,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=93340.0, ans=0.0 +2024-07-28 01:49:46,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=93380.0, ans=0.0 +2024-07-28 01:49:46,860 INFO [train.py:1114] (0/4) Epoch 7, batch 8700, loss[loss=0.2179, simple_loss=0.3104, pruned_loss=0.06269, over 4770.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.3027, pruned_loss=0.06853, over 937773.18 frames. ], batch size: 13, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:49:58,260 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.007e+01 5.695e+01 6.363e+01 6.862e+01 1.009e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 01:50:03,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=93406.66666666667, ans=0.125 +2024-07-28 01:50:04,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=93406.66666666667, ans=0.125 +2024-07-28 01:50:06,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=93406.66666666667, ans=0.2 +2024-07-28 01:50:12,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=93420.0, ans=0.0 +2024-07-28 01:50:17,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=93433.33333333333, ans=0.0 +2024-07-28 01:50:21,429 INFO [train.py:1114] (0/4) Epoch 7, batch 8750, loss[loss=0.2098, simple_loss=0.296, pruned_loss=0.06179, over 4699.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.3019, pruned_loss=0.06832, over 936434.72 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:50:24,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93446.66666666667, ans=0.1 +2024-07-28 01:50:31,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=93460.0, ans=0.0 +2024-07-28 01:50:47,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=93486.66666666667, ans=0.0 +2024-07-28 01:50:48,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=93500.0, ans=15.0 +2024-07-28 01:50:54,987 INFO [train.py:1114] (0/4) Epoch 7, batch 8800, loss[loss=0.2409, simple_loss=0.3328, pruned_loss=0.07448, over 4934.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.3025, pruned_loss=0.0689, over 937490.56 frames. ], batch size: 14, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:50:59,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=93513.33333333333, ans=0.1 +2024-07-28 01:50:59,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=93513.33333333333, ans=0.0 +2024-07-28 01:51:00,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=93526.66666666667, ans=0.125 +2024-07-28 01:51:04,997 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.68 vs. limit=5.0 +2024-07-28 01:51:05,111 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.892e+01 5.962e+01 6.661e+01 7.820e+01 1.016e+02, threshold=1.332e+02, percent-clipped=0.0 +2024-07-28 01:51:10,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=93540.0, ans=0.2 +2024-07-28 01:51:10,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=93540.0, ans=0.125 +2024-07-28 01:51:12,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=93540.0, ans=0.125 +2024-07-28 01:51:17,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=93553.33333333333, ans=0.125 +2024-07-28 01:51:18,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=93553.33333333333, ans=0.2 +2024-07-28 01:51:23,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=93566.66666666667, ans=0.125 +2024-07-28 01:51:28,560 INFO [train.py:1114] (0/4) Epoch 7, batch 8850, loss[loss=0.2492, simple_loss=0.3299, pruned_loss=0.08419, over 4501.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.3012, pruned_loss=0.06809, over 932437.58 frames. ], batch size: 21, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:51:29,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=93580.0, ans=0.125 +2024-07-28 01:51:30,802 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.27 vs. limit=15.0 +2024-07-28 01:51:35,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=93593.33333333333, ans=0.2 +2024-07-28 01:51:47,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=93620.0, ans=0.125 +2024-07-28 01:51:47,357 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.60 vs. limit=22.5 +2024-07-28 01:51:57,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=93633.33333333333, ans=0.0 +2024-07-28 01:52:00,217 INFO [train.py:1114] (0/4) Epoch 7, batch 8900, loss[loss=0.2196, simple_loss=0.3024, pruned_loss=0.06838, over 4941.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.3014, pruned_loss=0.06844, over 930345.08 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:52:01,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93646.66666666667, ans=0.1 +2024-07-28 01:52:09,255 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.776e+01 6.236e+01 6.887e+01 8.483e+01 1.202e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-28 01:52:19,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=93686.66666666667, ans=10.0 +2024-07-28 01:52:22,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=93686.66666666667, ans=0.125 +2024-07-28 01:52:26,337 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.38 vs. limit=6.0 +2024-07-28 01:52:32,127 INFO [train.py:1114] (0/4) Epoch 7, batch 8950, loss[loss=0.2412, simple_loss=0.3213, pruned_loss=0.08058, over 4520.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.3016, pruned_loss=0.06861, over 930812.23 frames. ], batch size: 21, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:52:44,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=93740.0, ans=0.125 +2024-07-28 01:52:47,599 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.81 vs. limit=22.5 +2024-07-28 01:52:51,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=93753.33333333333, ans=0.125 +2024-07-28 01:52:51,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=93753.33333333333, ans=0.125 +2024-07-28 01:52:53,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93753.33333333333, ans=0.1 +2024-07-28 01:53:03,461 INFO [train.py:1114] (0/4) Epoch 7, batch 9000, loss[loss=0.2082, simple_loss=0.2858, pruned_loss=0.06531, over 4648.00 frames. ], tot_loss[loss=0.218, simple_loss=0.3001, pruned_loss=0.06799, over 933658.25 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:53:03,462 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 01:53:10,602 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.4033, 4.0218, 4.6204, 3.3006], device='cuda:0') +2024-07-28 01:53:15,550 INFO [train.py:1146] (0/4) Epoch 7, validation: loss=0.1831, simple_loss=0.2876, pruned_loss=0.03931, over 944034.00 frames. +2024-07-28 01:53:15,551 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 01:53:16,648 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.33 vs. limit=12.0 +2024-07-28 01:53:17,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=93780.0, ans=0.0 +2024-07-28 01:53:23,881 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.61 vs. limit=15.0 +2024-07-28 01:53:25,274 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.089e+01 5.776e+01 6.458e+01 7.441e+01 1.035e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 01:53:39,859 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.88 vs. limit=22.5 +2024-07-28 01:53:40,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=93806.66666666667, ans=0.05 +2024-07-28 01:53:42,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=93806.66666666667, ans=0.2 +2024-07-28 01:53:56,181 INFO [train.py:1114] (0/4) Epoch 7, batch 9050, loss[loss=0.1852, simple_loss=0.2622, pruned_loss=0.05406, over 4496.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2995, pruned_loss=0.06758, over 934194.33 frames. ], batch size: 10, lr: 1.04e-02, grad_scale: 16.0 +2024-07-28 01:53:56,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=93846.66666666667, ans=0.1 +2024-07-28 01:53:57,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=93846.66666666667, ans=0.0 +2024-07-28 01:53:58,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=93846.66666666667, ans=0.025 +2024-07-28 01:54:06,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=93860.0, ans=0.125 +2024-07-28 01:54:10,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=93873.33333333333, ans=22.5 +2024-07-28 01:54:14,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93886.66666666667, ans=0.1 +2024-07-28 01:54:18,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=93886.66666666667, ans=0.125 +2024-07-28 01:54:21,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=93900.0, ans=0.125 +2024-07-28 01:54:22,016 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:54:24,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=93900.0, ans=0.125 +2024-07-28 01:54:28,087 INFO [train.py:1114] (0/4) Epoch 7, batch 9100, loss[loss=0.205, simple_loss=0.293, pruned_loss=0.05845, over 4939.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2993, pruned_loss=0.06695, over 937329.10 frames. ], batch size: 14, lr: 1.04e-02, grad_scale: 16.0 +2024-07-28 01:54:28,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=93913.33333333333, ans=0.125 +2024-07-28 01:54:37,216 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.668e+01 5.684e+01 6.462e+01 7.112e+01 1.033e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 01:54:42,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=93940.0, ans=0.0 +2024-07-28 01:54:42,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=93940.0, ans=0.07 +2024-07-28 01:54:43,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=93940.0, ans=0.025 +2024-07-28 01:54:44,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=93940.0, ans=0.025 +2024-07-28 01:54:44,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=93940.0, ans=0.2 +2024-07-28 01:54:45,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=93940.0, ans=0.125 +2024-07-28 01:54:59,392 INFO [train.py:1114] (0/4) Epoch 7, batch 9150, loss[loss=0.242, simple_loss=0.3201, pruned_loss=0.08196, over 4821.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.3009, pruned_loss=0.06776, over 936231.26 frames. ], batch size: 14, lr: 1.04e-02, grad_scale: 16.0 +2024-07-28 01:54:59,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=93980.0, ans=0.125 +2024-07-28 01:55:15,425 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:55:16,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94006.66666666667, ans=0.1 +2024-07-28 01:55:18,461 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.25 vs. limit=10.0 +2024-07-28 01:55:24,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=94020.0, ans=0.125 +2024-07-28 01:55:28,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=94033.33333333333, ans=0.0 +2024-07-28 01:55:32,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=94033.33333333333, ans=0.125 +2024-07-28 01:55:33,134 INFO [train.py:1114] (0/4) Epoch 7, batch 9200, loss[loss=0.2097, simple_loss=0.2808, pruned_loss=0.06927, over 4851.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.3006, pruned_loss=0.06807, over 937916.45 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:55:35,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=94046.66666666667, ans=0.125 +2024-07-28 01:55:42,551 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.593e+01 5.866e+01 6.542e+01 7.562e+01 1.078e+02, threshold=1.308e+02, percent-clipped=0.0 +2024-07-28 01:55:47,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=94073.33333333333, ans=0.125 +2024-07-28 01:55:49,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=94073.33333333333, ans=0.1 +2024-07-28 01:56:00,747 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.46 vs. limit=6.0 +2024-07-28 01:56:04,045 INFO [train.py:1114] (0/4) Epoch 7, batch 9250, loss[loss=0.2066, simple_loss=0.2852, pruned_loss=0.06397, over 4646.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.3004, pruned_loss=0.06806, over 938339.59 frames. ], batch size: 13, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:56:05,083 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.60 vs. limit=12.0 +2024-07-28 01:56:18,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=94140.0, ans=0.125 +2024-07-28 01:56:24,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=94153.33333333333, ans=0.125 +2024-07-28 01:56:31,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=94166.66666666667, ans=0.0 +2024-07-28 01:56:35,493 INFO [train.py:1114] (0/4) Epoch 7, batch 9300, loss[loss=0.2243, simple_loss=0.3068, pruned_loss=0.07086, over 4778.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.3003, pruned_loss=0.06789, over 938419.00 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:56:44,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=94193.33333333333, ans=0.05 +2024-07-28 01:56:44,617 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.557e+01 5.679e+01 6.402e+01 7.728e+01 1.178e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 01:56:45,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=94193.33333333333, ans=0.125 +2024-07-28 01:56:48,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=94206.66666666667, ans=0.0 +2024-07-28 01:56:48,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=94206.66666666667, ans=0.125 +2024-07-28 01:56:49,863 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.68 vs. limit=15.0 +2024-07-28 01:56:50,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=94206.66666666667, ans=0.2 +2024-07-28 01:56:51,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=94206.66666666667, ans=0.05 +2024-07-28 01:56:52,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=94206.66666666667, ans=0.2 +2024-07-28 01:56:57,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=94220.0, ans=0.125 +2024-07-28 01:57:07,390 INFO [train.py:1114] (0/4) Epoch 7, batch 9350, loss[loss=0.1765, simple_loss=0.255, pruned_loss=0.04899, over 4793.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.3014, pruned_loss=0.06857, over 935736.57 frames. ], batch size: 11, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:57:12,966 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.77 vs. limit=15.0 +2024-07-28 01:57:27,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=94286.66666666667, ans=0.025 +2024-07-28 01:57:38,765 INFO [train.py:1114] (0/4) Epoch 7, batch 9400, loss[loss=0.2226, simple_loss=0.3168, pruned_loss=0.06422, over 4695.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.3012, pruned_loss=0.06859, over 933297.49 frames. ], batch size: 13, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:57:45,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=94326.66666666667, ans=0.125 +2024-07-28 01:57:47,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=94326.66666666667, ans=0.0 +2024-07-28 01:57:48,113 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.963e+01 5.906e+01 6.522e+01 7.564e+01 1.110e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 01:58:01,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=94353.33333333333, ans=0.1 +2024-07-28 01:58:09,837 INFO [train.py:1114] (0/4) Epoch 7, batch 9450, loss[loss=0.2149, simple_loss=0.2948, pruned_loss=0.06752, over 4790.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.3012, pruned_loss=0.06809, over 932731.79 frames. ], batch size: 11, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:58:23,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=94406.66666666667, ans=0.1 +2024-07-28 01:58:25,556 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:58:27,151 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.78 vs. limit=15.0 +2024-07-28 01:58:27,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=94406.66666666667, ans=0.0 +2024-07-28 01:58:40,110 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.81 vs. limit=22.5 +2024-07-28 01:58:47,079 INFO [train.py:1114] (0/4) Epoch 7, batch 9500, loss[loss=0.2364, simple_loss=0.3157, pruned_loss=0.07852, over 4711.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.3013, pruned_loss=0.06751, over 935025.28 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:58:56,884 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.893e+01 5.944e+01 6.483e+01 7.199e+01 9.045e+01, threshold=1.297e+02, percent-clipped=0.0 +2024-07-28 01:58:58,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=94460.0, ans=0.125 +2024-07-28 01:59:18,777 INFO [train.py:1114] (0/4) Epoch 7, batch 9550, loss[loss=0.2122, simple_loss=0.2871, pruned_loss=0.06869, over 4778.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.3023, pruned_loss=0.06864, over 931714.08 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:59:34,104 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.11 vs. limit=22.5 +2024-07-28 01:59:36,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=94553.33333333333, ans=0.2 +2024-07-28 01:59:38,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=94553.33333333333, ans=0.125 +2024-07-28 01:59:49,821 INFO [train.py:1114] (0/4) Epoch 7, batch 9600, loss[loss=0.2985, simple_loss=0.3535, pruned_loss=0.1217, over 3487.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.3015, pruned_loss=0.06816, over 931288.93 frames. ], batch size: 35, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:59:59,434 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 6.136e+01 6.787e+01 7.890e+01 1.161e+02, threshold=1.357e+02, percent-clipped=0.0 +2024-07-28 01:59:59,877 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.89 vs. limit=15.0 +2024-07-28 02:00:02,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=94606.66666666667, ans=0.2 +2024-07-28 02:00:05,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=94606.66666666667, ans=0.0 +2024-07-28 02:00:13,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=94620.0, ans=0.125 +2024-07-28 02:00:17,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=94633.33333333333, ans=0.0 +2024-07-28 02:00:21,666 INFO [train.py:1114] (0/4) Epoch 7, batch 9650, loss[loss=0.2411, simple_loss=0.3394, pruned_loss=0.07142, over 4841.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.303, pruned_loss=0.06939, over 927399.16 frames. ], batch size: 16, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 02:00:24,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=94646.66666666667, ans=0.0 +2024-07-28 02:00:36,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=94673.33333333333, ans=0.125 +2024-07-28 02:00:39,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=94673.33333333333, ans=0.2 +2024-07-28 02:00:44,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=94686.66666666667, ans=0.1 +2024-07-28 02:00:52,825 INFO [train.py:1114] (0/4) Epoch 7, batch 9700, loss[loss=0.2302, simple_loss=0.3127, pruned_loss=0.07382, over 4237.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3052, pruned_loss=0.07086, over 925888.37 frames. ], batch size: 25, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 02:00:54,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.61 vs. limit=15.0 +2024-07-28 02:01:02,008 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.092e+01 6.194e+01 6.881e+01 8.155e+01 1.257e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-28 02:01:04,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=94726.66666666667, ans=0.0 +2024-07-28 02:01:06,999 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.69 vs. limit=6.0 +2024-07-28 02:01:10,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=94740.0, ans=0.125 +2024-07-28 02:01:13,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=94753.33333333333, ans=0.0 +2024-07-28 02:01:13,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=94753.33333333333, ans=0.0 +2024-07-28 02:01:19,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=94766.66666666667, ans=0.125 +2024-07-28 02:01:24,027 INFO [train.py:1114] (0/4) Epoch 7, batch 9750, loss[loss=0.229, simple_loss=0.3176, pruned_loss=0.07021, over 4669.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3051, pruned_loss=0.0707, over 926537.72 frames. ], batch size: 15, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:01:33,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=94793.33333333333, ans=0.015 +2024-07-28 02:01:45,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=94820.0, ans=0.09899494936611666 +2024-07-28 02:01:50,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=94833.33333333333, ans=0.125 +2024-07-28 02:01:53,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=94833.33333333333, ans=0.025 +2024-07-28 02:01:54,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=94833.33333333333, ans=0.125 +2024-07-28 02:01:56,351 INFO [train.py:1114] (0/4) Epoch 7, batch 9800, loss[loss=0.2378, simple_loss=0.3211, pruned_loss=0.07722, over 4700.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3031, pruned_loss=0.06951, over 925124.17 frames. ], batch size: 12, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:02:00,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=94846.66666666667, ans=0.125 +2024-07-28 02:02:05,941 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 5.759e+01 6.678e+01 8.256e+01 1.240e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-28 02:02:19,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=94886.66666666667, ans=0.0 +2024-07-28 02:02:23,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94900.0, ans=0.1 +2024-07-28 02:02:26,298 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.27 vs. limit=15.0 +2024-07-28 02:02:27,276 INFO [train.py:1114] (0/4) Epoch 7, batch 9850, loss[loss=0.2696, simple_loss=0.3457, pruned_loss=0.09671, over 4905.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.3028, pruned_loss=0.06945, over 927161.07 frames. ], batch size: 15, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:02:31,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=94913.33333333333, ans=0.125 +2024-07-28 02:02:32,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94913.33333333333, ans=0.1 +2024-07-28 02:02:34,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=94926.66666666667, ans=0.125 +2024-07-28 02:02:45,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=94940.0, ans=0.1 +2024-07-28 02:02:49,087 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.63 vs. limit=6.0 +2024-07-28 02:02:58,782 INFO [train.py:1114] (0/4) Epoch 7, batch 9900, loss[loss=0.2362, simple_loss=0.32, pruned_loss=0.07621, over 4845.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3034, pruned_loss=0.07016, over 926230.18 frames. ], batch size: 16, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:03:00,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=94980.0, ans=0.1 +2024-07-28 02:03:01,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=94980.0, ans=0.1 +2024-07-28 02:03:01,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=94980.0, ans=0.125 +2024-07-28 02:03:06,558 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.78 vs. limit=10.0 +2024-07-28 02:03:08,106 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 5.950e+01 6.593e+01 7.492e+01 1.029e+02, threshold=1.319e+02, percent-clipped=0.0 +2024-07-28 02:03:08,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=94993.33333333333, ans=0.0 +2024-07-28 02:03:10,357 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.69 vs. limit=12.0 +2024-07-28 02:03:20,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=95020.0, ans=0.1 +2024-07-28 02:03:20,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=95020.0, ans=0.125 +2024-07-28 02:03:27,031 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.81 vs. limit=6.0 +2024-07-28 02:03:29,235 INFO [train.py:1114] (0/4) Epoch 7, batch 9950, loss[loss=0.1839, simple_loss=0.2637, pruned_loss=0.05203, over 4778.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3036, pruned_loss=0.07017, over 928825.14 frames. ], batch size: 11, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:03:42,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=95073.33333333333, ans=0.125 +2024-07-28 02:03:43,967 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.99 vs. limit=15.0 +2024-07-28 02:03:45,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=95073.33333333333, ans=0.125 +2024-07-28 02:03:49,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=95086.66666666667, ans=0.2 +2024-07-28 02:03:55,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=95100.0, ans=0.1 +2024-07-28 02:04:00,297 INFO [train.py:1114] (0/4) Epoch 7, batch 10000, loss[loss=0.2007, simple_loss=0.289, pruned_loss=0.05617, over 4609.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3063, pruned_loss=0.07165, over 926382.97 frames. ], batch size: 16, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:04:02,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=95113.33333333333, ans=0.0 +2024-07-28 02:04:03,064 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.77 vs. limit=15.0 +2024-07-28 02:04:03,774 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.29 vs. limit=15.0 +2024-07-28 02:04:06,089 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.71 vs. limit=15.0 +2024-07-28 02:04:06,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=95126.66666666667, ans=0.125 +2024-07-28 02:04:06,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=95126.66666666667, ans=0.05 +2024-07-28 02:04:08,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=95126.66666666667, ans=0.0 +2024-07-28 02:04:09,354 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.061e+01 5.825e+01 6.191e+01 6.916e+01 9.527e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 02:04:25,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=95166.66666666667, ans=0.0 +2024-07-28 02:04:30,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=95166.66666666667, ans=0.125 +2024-07-28 02:04:32,069 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.72 vs. limit=15.0 +2024-07-28 02:04:32,330 INFO [train.py:1114] (0/4) Epoch 7, batch 10050, loss[loss=0.2302, simple_loss=0.2993, pruned_loss=0.08055, over 3395.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3106, pruned_loss=0.07417, over 914259.30 frames. ], batch size: 36, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:04:33,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=95180.0, ans=0.035 +2024-07-28 02:04:35,482 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.27 vs. limit=10.0 +2024-07-28 02:04:36,191 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=7.06 vs. limit=6.0 +2024-07-28 02:04:47,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=95193.33333333333, ans=0.1 +2024-07-28 02:04:57,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=95220.0, ans=0.0 +2024-07-28 02:05:00,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=95220.0, ans=0.0 +2024-07-28 02:05:06,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=95233.33333333333, ans=0.025 +2024-07-28 02:05:07,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=95246.66666666667, ans=0.125 +2024-07-28 02:05:08,135 INFO [train.py:1114] (0/4) Epoch 7, batch 10100, loss[loss=0.2902, simple_loss=0.3466, pruned_loss=0.1169, over 3309.00 frames. ], tot_loss[loss=0.239, simple_loss=0.3167, pruned_loss=0.08068, over 862387.10 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:05:10,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=95246.66666666667, ans=0.04949747468305833 +2024-07-28 02:05:16,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=95260.0, ans=0.125 +2024-07-28 02:05:17,905 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.232e+01 6.813e+01 7.421e+01 7.882e+01 1.006e+02, threshold=1.484e+02, percent-clipped=0.0 +2024-07-28 02:05:18,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=95260.0, ans=0.09899494936611666 +2024-07-28 02:05:21,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=95273.33333333333, ans=0.125 +2024-07-28 02:05:24,692 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.63 vs. limit=15.0 +2024-07-28 02:05:38,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=95300.0, ans=0.125 +2024-07-28 02:05:40,579 INFO [train.py:1114] (0/4) Epoch 7, batch 10150, loss[loss=0.3057, simple_loss=0.3585, pruned_loss=0.1264, over 3211.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3203, pruned_loss=0.08474, over 821249.51 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:05:43,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=95313.33333333333, ans=0.2 +2024-07-28 02:05:47,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=95326.66666666667, ans=0.0 +2024-07-28 02:06:06,517 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=9.26 vs. limit=15.0 +2024-07-28 02:06:09,420 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:06:12,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=95366.66666666667, ans=0.125 +2024-07-28 02:06:14,371 INFO [train.py:1114] (0/4) Epoch 7, batch 10200, loss[loss=0.2445, simple_loss=0.3126, pruned_loss=0.08819, over 3313.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3233, pruned_loss=0.08837, over 788412.24 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:06:16,613 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.33 vs. limit=15.0 +2024-07-28 02:06:24,493 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.931e+01 6.733e+01 7.121e+01 8.042e+01 1.219e+02, threshold=1.424e+02, percent-clipped=0.0 +2024-07-28 02:06:28,846 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-7.pt +2024-07-28 02:07:12,013 INFO [train.py:1114] (0/4) Epoch 8, batch 0, loss[loss=0.1856, simple_loss=0.2802, pruned_loss=0.04546, over 4850.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2802, pruned_loss=0.04546, over 4850.00 frames. ], batch size: 12, lr: 9.72e-03, grad_scale: 32.0 +2024-07-28 02:07:12,013 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 02:07:22,484 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.1061, 3.8067, 2.6994, 2.6326], device='cuda:0') +2024-07-28 02:07:23,602 INFO [train.py:1146] (0/4) Epoch 8, validation: loss=0.1876, simple_loss=0.2932, pruned_loss=0.04099, over 944034.00 frames. +2024-07-28 02:07:23,603 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 02:07:35,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=95424.0, ans=0.125 +2024-07-28 02:08:07,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=95464.0, ans=0.025 +2024-07-28 02:08:13,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=95464.0, ans=0.05 +2024-07-28 02:08:13,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=95464.0, ans=0.125 +2024-07-28 02:08:23,646 INFO [train.py:1114] (0/4) Epoch 8, batch 50, loss[loss=0.1692, simple_loss=0.2522, pruned_loss=0.04311, over 4609.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.309, pruned_loss=0.07017, over 206624.85 frames. ], batch size: 11, lr: 9.71e-03, grad_scale: 32.0 +2024-07-28 02:08:32,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.40 vs. limit=6.0 +2024-07-28 02:08:47,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=95517.33333333333, ans=0.125 +2024-07-28 02:08:54,401 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 5.904e+01 6.447e+01 7.403e+01 1.012e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 02:08:59,041 INFO [train.py:1114] (0/4) Epoch 8, batch 100, loss[loss=0.2078, simple_loss=0.2959, pruned_loss=0.05987, over 4635.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3075, pruned_loss=0.06978, over 365843.39 frames. ], batch size: 12, lr: 9.71e-03, grad_scale: 32.0 +2024-07-28 02:09:04,739 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.24 vs. limit=10.0 +2024-07-28 02:09:14,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=95570.66666666667, ans=0.2 +2024-07-28 02:09:22,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=95584.0, ans=0.125 +2024-07-28 02:09:26,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=95597.33333333333, ans=0.025 +2024-07-28 02:09:28,932 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.07 vs. limit=22.5 +2024-07-28 02:09:30,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=95597.33333333333, ans=0.125 +2024-07-28 02:09:31,873 INFO [train.py:1114] (0/4) Epoch 8, batch 150, loss[loss=0.1894, simple_loss=0.2794, pruned_loss=0.04973, over 4618.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.3011, pruned_loss=0.06684, over 494450.50 frames. ], batch size: 11, lr: 9.71e-03, grad_scale: 32.0 +2024-07-28 02:09:36,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=95610.66666666667, ans=0.2 +2024-07-28 02:09:43,427 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.42 vs. limit=15.0 +2024-07-28 02:09:45,375 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.07 vs. limit=15.0 +2024-07-28 02:09:45,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=95637.33333333333, ans=0.125 +2024-07-28 02:09:53,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=95650.66666666667, ans=0.125 +2024-07-28 02:09:58,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=95664.0, ans=0.125 +2024-07-28 02:10:00,295 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.617e+01 5.653e+01 6.192e+01 6.799e+01 9.993e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 02:10:00,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=95664.0, ans=0.1 +2024-07-28 02:10:05,087 INFO [train.py:1114] (0/4) Epoch 8, batch 200, loss[loss=0.2717, simple_loss=0.3418, pruned_loss=0.1008, over 4580.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.299, pruned_loss=0.06557, over 594114.33 frames. ], batch size: 21, lr: 9.70e-03, grad_scale: 32.0 +2024-07-28 02:10:11,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=95690.66666666667, ans=0.0 +2024-07-28 02:10:14,108 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.89 vs. limit=10.0 +2024-07-28 02:10:17,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=95690.66666666667, ans=0.0 +2024-07-28 02:10:20,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=95704.0, ans=0.025 +2024-07-28 02:10:22,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=95704.0, ans=0.2 +2024-07-28 02:10:25,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=95717.33333333333, ans=0.125 +2024-07-28 02:10:25,742 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.12 vs. limit=15.0 +2024-07-28 02:10:29,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=95717.33333333333, ans=0.0 +2024-07-28 02:10:38,232 INFO [train.py:1114] (0/4) Epoch 8, batch 250, loss[loss=0.2861, simple_loss=0.3644, pruned_loss=0.1039, over 4617.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.299, pruned_loss=0.06595, over 671025.38 frames. ], batch size: 16, lr: 9.70e-03, grad_scale: 32.0 +2024-07-28 02:10:44,123 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.60 vs. limit=15.0 +2024-07-28 02:10:45,369 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=15.0 +2024-07-28 02:10:52,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.out_whiten.whitening_limit, batch_count=95757.33333333333, ans=8.0 +2024-07-28 02:10:57,652 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:11:02,416 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.31 vs. limit=15.0 +2024-07-28 02:11:06,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=95797.33333333333, ans=0.025 +2024-07-28 02:11:08,828 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.464e+01 5.845e+01 7.036e+01 8.606e+01 1.725e+02, threshold=1.407e+02, percent-clipped=4.0 +2024-07-28 02:11:11,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=95797.33333333333, ans=0.2 +2024-07-28 02:11:13,557 INFO [train.py:1114] (0/4) Epoch 8, batch 300, loss[loss=0.2402, simple_loss=0.3209, pruned_loss=0.07971, over 4785.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.299, pruned_loss=0.06565, over 730483.39 frames. ], batch size: 15, lr: 9.70e-03, grad_scale: 32.0 +2024-07-28 02:11:16,610 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.10 vs. limit=10.0 +2024-07-28 02:11:24,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=95824.0, ans=0.025 +2024-07-28 02:11:28,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=95837.33333333333, ans=0.125 +2024-07-28 02:11:33,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=95850.66666666667, ans=0.025 +2024-07-28 02:11:34,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=95850.66666666667, ans=0.125 +2024-07-28 02:11:43,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=95864.0, ans=0.0 +2024-07-28 02:11:46,905 INFO [train.py:1114] (0/4) Epoch 8, batch 350, loss[loss=0.2024, simple_loss=0.2739, pruned_loss=0.06543, over 4928.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2991, pruned_loss=0.06634, over 776539.77 frames. ], batch size: 12, lr: 9.69e-03, grad_scale: 32.0 +2024-07-28 02:12:02,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=95904.0, ans=0.125 +2024-07-28 02:12:07,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=95904.0, ans=0.0 +2024-07-28 02:12:18,894 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.049e+01 5.562e+01 6.059e+01 7.082e+01 1.101e+02, threshold=1.212e+02, percent-clipped=0.0 +2024-07-28 02:12:23,502 INFO [train.py:1114] (0/4) Epoch 8, batch 400, loss[loss=0.2278, simple_loss=0.3118, pruned_loss=0.07191, over 4697.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2975, pruned_loss=0.06527, over 814038.95 frames. ], batch size: 13, lr: 9.69e-03, grad_scale: 32.0 +2024-07-28 02:12:26,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=95944.0, ans=0.05 +2024-07-28 02:12:33,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=95957.33333333333, ans=0.125 +2024-07-28 02:12:34,137 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.38 vs. limit=22.5 +2024-07-28 02:12:41,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=95970.66666666667, ans=0.2 +2024-07-28 02:12:46,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=95984.0, ans=0.0 +2024-07-28 02:12:51,422 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-72000.pt +2024-07-28 02:13:06,693 INFO [train.py:1114] (0/4) Epoch 8, batch 450, loss[loss=0.2007, simple_loss=0.3022, pruned_loss=0.04955, over 4648.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2981, pruned_loss=0.06563, over 839123.12 frames. ], batch size: 13, lr: 9.69e-03, grad_scale: 32.0 +2024-07-28 02:13:08,351 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.10 vs. limit=22.5 +2024-07-28 02:13:11,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=96010.66666666667, ans=0.025 +2024-07-28 02:13:13,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=96024.0, ans=0.0 +2024-07-28 02:13:14,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=96024.0, ans=0.0 +2024-07-28 02:13:14,808 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.15 vs. limit=15.0 +2024-07-28 02:13:18,175 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.07 vs. limit=15.0 +2024-07-28 02:13:18,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=96024.0, ans=0.0 +2024-07-28 02:13:22,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=96037.33333333333, ans=0.125 +2024-07-28 02:13:34,140 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.69 vs. limit=15.0 +2024-07-28 02:13:38,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=96064.0, ans=0.2 +2024-07-28 02:13:38,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=96064.0, ans=0.125 +2024-07-28 02:13:38,588 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.745e+01 5.894e+01 6.679e+01 8.075e+01 1.208e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-28 02:13:43,460 INFO [train.py:1114] (0/4) Epoch 8, batch 500, loss[loss=0.2512, simple_loss=0.3393, pruned_loss=0.08151, over 4675.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2981, pruned_loss=0.06553, over 861503.85 frames. ], batch size: 15, lr: 9.68e-03, grad_scale: 32.0 +2024-07-28 02:13:46,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=96077.33333333333, ans=0.125 +2024-07-28 02:13:55,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=96090.66666666667, ans=0.0 +2024-07-28 02:14:05,782 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.82 vs. limit=15.0 +2024-07-28 02:14:07,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=96117.33333333333, ans=15.0 +2024-07-28 02:14:13,014 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.60 vs. limit=12.0 +2024-07-28 02:14:14,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=96130.66666666667, ans=0.125 +2024-07-28 02:14:16,595 INFO [train.py:1114] (0/4) Epoch 8, batch 550, loss[loss=0.2283, simple_loss=0.3122, pruned_loss=0.07215, over 4610.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2979, pruned_loss=0.06546, over 877314.25 frames. ], batch size: 17, lr: 9.68e-03, grad_scale: 32.0 +2024-07-28 02:14:25,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=96157.33333333333, ans=0.025 +2024-07-28 02:14:26,654 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.44 vs. limit=6.0 +2024-07-28 02:14:38,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=96184.0, ans=0.04949747468305833 +2024-07-28 02:14:43,173 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.69 vs. limit=15.0 +2024-07-28 02:14:47,418 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.015e+01 5.729e+01 6.322e+01 7.437e+01 1.078e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 02:14:52,224 INFO [train.py:1114] (0/4) Epoch 8, batch 600, loss[loss=0.2255, simple_loss=0.3092, pruned_loss=0.07091, over 4599.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2976, pruned_loss=0.06508, over 891876.60 frames. ], batch size: 16, lr: 9.68e-03, grad_scale: 32.0 +2024-07-28 02:14:57,177 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.82 vs. limit=12.0 +2024-07-28 02:15:12,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=96250.66666666667, ans=0.125 +2024-07-28 02:15:25,413 INFO [train.py:1114] (0/4) Epoch 8, batch 650, loss[loss=0.2515, simple_loss=0.3354, pruned_loss=0.08376, over 4757.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2986, pruned_loss=0.06566, over 903444.11 frames. ], batch size: 13, lr: 9.67e-03, grad_scale: 32.0 +2024-07-28 02:15:25,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96277.33333333333, ans=0.1 +2024-07-28 02:15:27,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=96277.33333333333, ans=0.0 +2024-07-28 02:15:37,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=96304.0, ans=0.0 +2024-07-28 02:15:41,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=96304.0, ans=0.125 +2024-07-28 02:15:42,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=96304.0, ans=0.0 +2024-07-28 02:15:53,644 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.945e+01 6.095e+01 6.758e+01 8.122e+01 1.148e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-28 02:15:54,211 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.19 vs. limit=22.5 +2024-07-28 02:15:58,398 INFO [train.py:1114] (0/4) Epoch 8, batch 700, loss[loss=0.1829, simple_loss=0.2736, pruned_loss=0.04613, over 4639.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2993, pruned_loss=0.06541, over 911308.26 frames. ], batch size: 12, lr: 9.67e-03, grad_scale: 32.0 +2024-07-28 02:16:00,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=96344.0, ans=0.125 +2024-07-28 02:16:03,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=96344.0, ans=0.125 +2024-07-28 02:16:03,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=96344.0, ans=0.1 +2024-07-28 02:16:17,982 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.47 vs. limit=22.5 +2024-07-28 02:16:20,781 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.98 vs. limit=15.0 +2024-07-28 02:16:33,896 INFO [train.py:1114] (0/4) Epoch 8, batch 750, loss[loss=0.2084, simple_loss=0.31, pruned_loss=0.05336, over 4692.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.299, pruned_loss=0.06562, over 918010.83 frames. ], batch size: 13, lr: 9.67e-03, grad_scale: 32.0 +2024-07-28 02:16:38,873 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.45 vs. limit=15.0 +2024-07-28 02:17:02,892 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.794e+01 5.806e+01 6.357e+01 7.174e+01 1.221e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 02:17:07,449 INFO [train.py:1114] (0/4) Epoch 8, batch 800, loss[loss=0.2009, simple_loss=0.2798, pruned_loss=0.06096, over 4844.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.299, pruned_loss=0.06591, over 923282.06 frames. ], batch size: 12, lr: 9.66e-03, grad_scale: 32.0 +2024-07-28 02:17:10,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=96477.33333333333, ans=0.0 +2024-07-28 02:17:12,163 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:17:21,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=96504.0, ans=0.125 +2024-07-28 02:17:25,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96504.0, ans=0.1 +2024-07-28 02:17:30,324 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.36 vs. limit=15.0 +2024-07-28 02:17:55,600 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.43 vs. limit=22.5 +2024-07-28 02:18:02,119 INFO [train.py:1114] (0/4) Epoch 8, batch 850, loss[loss=0.1819, simple_loss=0.2783, pruned_loss=0.04273, over 4665.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2986, pruned_loss=0.06601, over 927649.23 frames. ], batch size: 14, lr: 9.66e-03, grad_scale: 32.0 +2024-07-28 02:18:05,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=96544.0, ans=0.125 +2024-07-28 02:18:06,313 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:18:09,008 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:18:11,676 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:18:17,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=96570.66666666667, ans=0.0 +2024-07-28 02:18:25,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=96584.0, ans=0.125 +2024-07-28 02:18:26,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=96584.0, ans=0.0 +2024-07-28 02:18:26,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=96584.0, ans=0.125 +2024-07-28 02:18:29,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=96584.0, ans=0.0 +2024-07-28 02:18:32,786 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.484e+01 5.816e+01 6.612e+01 7.766e+01 1.010e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-28 02:18:33,566 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:18:36,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=96610.66666666667, ans=0.2 +2024-07-28 02:18:37,392 INFO [train.py:1114] (0/4) Epoch 8, batch 900, loss[loss=0.1705, simple_loss=0.2518, pruned_loss=0.04465, over 4843.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2989, pruned_loss=0.06622, over 928253.76 frames. ], batch size: 12, lr: 9.66e-03, grad_scale: 32.0 +2024-07-28 02:18:38,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=96610.66666666667, ans=0.125 +2024-07-28 02:18:43,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=96624.0, ans=0.125 +2024-07-28 02:18:46,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=96624.0, ans=0.125 +2024-07-28 02:18:56,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=96637.33333333333, ans=0.07 +2024-07-28 02:19:00,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=96650.66666666667, ans=0.125 +2024-07-28 02:19:02,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=96650.66666666667, ans=0.025 +2024-07-28 02:19:06,558 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.19 vs. limit=15.0 +2024-07-28 02:19:11,048 INFO [train.py:1114] (0/4) Epoch 8, batch 950, loss[loss=0.214, simple_loss=0.2931, pruned_loss=0.06741, over 4775.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2986, pruned_loss=0.06626, over 930036.79 frames. ], batch size: 12, lr: 9.65e-03, grad_scale: 32.0 +2024-07-28 02:19:13,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=96677.33333333333, ans=0.2 +2024-07-28 02:19:18,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=96690.66666666667, ans=0.0 +2024-07-28 02:19:22,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=96690.66666666667, ans=0.125 +2024-07-28 02:19:28,757 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.82 vs. limit=10.0 +2024-07-28 02:19:36,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=96717.33333333333, ans=0.125 +2024-07-28 02:19:39,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=96730.66666666667, ans=0.2 +2024-07-28 02:19:40,116 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.894e+01 6.010e+01 6.768e+01 8.162e+01 1.047e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-28 02:19:44,742 INFO [train.py:1114] (0/4) Epoch 8, batch 1000, loss[loss=0.1998, simple_loss=0.2797, pruned_loss=0.05991, over 4966.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2994, pruned_loss=0.06652, over 929412.10 frames. ], batch size: 13, lr: 9.65e-03, grad_scale: 32.0 +2024-07-28 02:19:54,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=96757.33333333333, ans=0.125 +2024-07-28 02:19:57,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=96770.66666666667, ans=0.035 +2024-07-28 02:20:00,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=96770.66666666667, ans=0.1 +2024-07-28 02:20:12,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=96797.33333333333, ans=0.2 +2024-07-28 02:20:18,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=96810.66666666667, ans=0.035 +2024-07-28 02:20:19,453 INFO [train.py:1114] (0/4) Epoch 8, batch 1050, loss[loss=0.2087, simple_loss=0.3033, pruned_loss=0.05708, over 4874.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2982, pruned_loss=0.06598, over 932433.91 frames. ], batch size: 14, lr: 9.65e-03, grad_scale: 32.0 +2024-07-28 02:20:40,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=96837.33333333333, ans=0.125 +2024-07-28 02:20:57,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=96850.66666666667, ans=0.0 +2024-07-28 02:21:00,908 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.947e+01 5.815e+01 6.423e+01 7.080e+01 9.595e+01, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 02:21:07,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=96877.33333333333, ans=0.125 +2024-07-28 02:21:28,305 INFO [train.py:1114] (0/4) Epoch 8, batch 1100, loss[loss=0.2103, simple_loss=0.2923, pruned_loss=0.0642, over 4900.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2991, pruned_loss=0.06613, over 934520.67 frames. ], batch size: 13, lr: 9.64e-03, grad_scale: 32.0 +2024-07-28 02:25:27,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96877.33333333333, ans=0.1 +2024-07-28 02:25:34,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=96890.66666666667, ans=0.025 +2024-07-28 02:25:39,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=96904.0, ans=0.125 +2024-07-28 02:25:45,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=96917.33333333333, ans=0.0 +2024-07-28 02:25:47,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=96917.33333333333, ans=0.0 +2024-07-28 02:25:53,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=96930.66666666667, ans=0.125 +2024-07-28 02:25:56,436 INFO [train.py:1114] (0/4) Epoch 8, batch 1150, loss[loss=0.1839, simple_loss=0.2715, pruned_loss=0.04817, over 4894.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2979, pruned_loss=0.06546, over 934472.34 frames. ], batch size: 13, lr: 9.64e-03, grad_scale: 32.0 +2024-07-28 02:26:07,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=96944.0, ans=0.0 +2024-07-28 02:26:07,858 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.38 vs. limit=15.0 +2024-07-28 02:26:23,693 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:26:30,187 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.889e+01 6.022e+01 6.608e+01 7.492e+01 1.273e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-28 02:26:39,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=96997.33333333333, ans=0.025 +2024-07-28 02:26:41,675 INFO [train.py:1114] (0/4) Epoch 8, batch 1200, loss[loss=0.2244, simple_loss=0.3178, pruned_loss=0.06551, over 4876.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2992, pruned_loss=0.06612, over 933292.29 frames. ], batch size: 14, lr: 9.64e-03, grad_scale: 32.0 +2024-07-28 02:26:42,710 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.93 vs. limit=15.0 +2024-07-28 02:26:54,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=97024.0, ans=0.05 +2024-07-28 02:27:08,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=97050.66666666667, ans=0.0 +2024-07-28 02:27:10,732 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.54 vs. limit=15.0 +2024-07-28 02:27:13,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=97064.0, ans=0.125 +2024-07-28 02:27:16,985 INFO [train.py:1114] (0/4) Epoch 8, batch 1250, loss[loss=0.2401, simple_loss=0.3106, pruned_loss=0.0848, over 4797.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2984, pruned_loss=0.0654, over 937214.74 frames. ], batch size: 15, lr: 9.63e-03, grad_scale: 32.0 +2024-07-28 02:27:17,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=97077.33333333333, ans=0.125 +2024-07-28 02:27:31,875 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.29 vs. limit=12.0 +2024-07-28 02:27:37,911 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.19 vs. limit=15.0 +2024-07-28 02:27:47,570 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.862e+01 5.611e+01 6.251e+01 6.902e+01 9.769e+01, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 02:27:48,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=97130.66666666667, ans=0.04949747468305833 +2024-07-28 02:27:52,338 INFO [train.py:1114] (0/4) Epoch 8, batch 1300, loss[loss=0.2244, simple_loss=0.3116, pruned_loss=0.06859, over 4725.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2982, pruned_loss=0.06557, over 938435.24 frames. ], batch size: 19, lr: 9.63e-03, grad_scale: 32.0 +2024-07-28 02:28:17,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=97170.66666666667, ans=0.025 +2024-07-28 02:28:33,054 INFO [train.py:1114] (0/4) Epoch 8, batch 1350, loss[loss=0.2245, simple_loss=0.3114, pruned_loss=0.06877, over 4757.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2968, pruned_loss=0.06443, over 940610.85 frames. ], batch size: 13, lr: 9.63e-03, grad_scale: 32.0 +2024-07-28 02:28:37,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=97210.66666666667, ans=0.09899494936611666 +2024-07-28 02:28:39,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=97210.66666666667, ans=0.125 +2024-07-28 02:28:49,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=97237.33333333333, ans=0.0 +2024-07-28 02:28:49,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=97237.33333333333, ans=0.0 +2024-07-28 02:28:57,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=97250.66666666667, ans=0.09899494936611666 +2024-07-28 02:29:04,451 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+01 5.768e+01 6.671e+01 8.189e+01 1.142e+02, threshold=1.334e+02, percent-clipped=0.0 +2024-07-28 02:29:05,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=97264.0, ans=10.0 +2024-07-28 02:29:07,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=97264.0, ans=0.125 +2024-07-28 02:29:09,285 INFO [train.py:1114] (0/4) Epoch 8, batch 1400, loss[loss=0.1649, simple_loss=0.2469, pruned_loss=0.04145, over 4710.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2968, pruned_loss=0.06431, over 942378.56 frames. ], batch size: 11, lr: 9.62e-03, grad_scale: 32.0 +2024-07-28 02:29:24,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=97304.0, ans=0.95 +2024-07-28 02:29:31,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=97317.33333333333, ans=0.0 +2024-07-28 02:29:31,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=97317.33333333333, ans=0.025 +2024-07-28 02:29:35,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=97330.66666666667, ans=0.0 +2024-07-28 02:29:37,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=97330.66666666667, ans=0.025 +2024-07-28 02:29:43,465 INFO [train.py:1114] (0/4) Epoch 8, batch 1450, loss[loss=0.2297, simple_loss=0.3316, pruned_loss=0.0639, over 4683.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2976, pruned_loss=0.06441, over 942439.82 frames. ], batch size: 15, lr: 9.62e-03, grad_scale: 32.0 +2024-07-28 02:29:48,801 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.39 vs. limit=15.0 +2024-07-28 02:29:52,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=97357.33333333333, ans=0.025 +2024-07-28 02:30:02,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=97384.0, ans=0.2 +2024-07-28 02:30:12,512 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.584e+01 5.678e+01 6.336e+01 6.902e+01 9.292e+01, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 02:30:16,533 INFO [train.py:1114] (0/4) Epoch 8, batch 1500, loss[loss=0.2254, simple_loss=0.3062, pruned_loss=0.0723, over 4818.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2983, pruned_loss=0.06506, over 941865.00 frames. ], batch size: 14, lr: 9.62e-03, grad_scale: 16.0 +2024-07-28 02:30:27,236 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:30:36,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=97437.33333333333, ans=0.0 +2024-07-28 02:30:51,713 INFO [train.py:1114] (0/4) Epoch 8, batch 1550, loss[loss=0.1977, simple_loss=0.2803, pruned_loss=0.05754, over 4899.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2986, pruned_loss=0.06554, over 938292.79 frames. ], batch size: 15, lr: 9.61e-03, grad_scale: 16.0 +2024-07-28 02:30:57,433 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.57 vs. limit=22.5 +2024-07-28 02:31:01,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=97490.66666666667, ans=0.125 +2024-07-28 02:31:13,773 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.30 vs. limit=8.0 +2024-07-28 02:31:19,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=97530.66666666667, ans=0.2 +2024-07-28 02:31:21,194 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.890e+01 6.503e+01 7.700e+01 2.674e+02, threshold=1.301e+02, percent-clipped=1.0 +2024-07-28 02:31:25,131 INFO [train.py:1114] (0/4) Epoch 8, batch 1600, loss[loss=0.213, simple_loss=0.3123, pruned_loss=0.05687, over 4878.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2984, pruned_loss=0.06511, over 937514.82 frames. ], batch size: 14, lr: 9.61e-03, grad_scale: 32.0 +2024-07-28 02:31:29,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=97544.0, ans=15.0 +2024-07-28 02:31:35,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=97557.33333333333, ans=0.125 +2024-07-28 02:31:38,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97570.66666666667, ans=0.1 +2024-07-28 02:31:51,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=97584.0, ans=0.125 +2024-07-28 02:31:55,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=97597.33333333333, ans=0.125 +2024-07-28 02:31:57,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=97597.33333333333, ans=0.1 +2024-07-28 02:31:59,464 INFO [train.py:1114] (0/4) Epoch 8, batch 1650, loss[loss=0.2128, simple_loss=0.2976, pruned_loss=0.06404, over 4673.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2988, pruned_loss=0.06531, over 937105.56 frames. ], batch size: 14, lr: 9.61e-03, grad_scale: 32.0 +2024-07-28 02:32:05,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=97624.0, ans=0.0 +2024-07-28 02:32:08,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=97624.0, ans=0.125 +2024-07-28 02:32:13,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=97637.33333333333, ans=0.0 +2024-07-28 02:32:13,330 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.74 vs. limit=15.0 +2024-07-28 02:32:13,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=97637.33333333333, ans=0.125 +2024-07-28 02:32:19,280 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.79 vs. limit=15.0 +2024-07-28 02:32:30,781 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.779e+01 6.597e+01 7.631e+01 1.276e+02, threshold=1.319e+02, percent-clipped=0.0 +2024-07-28 02:32:34,722 INFO [train.py:1114] (0/4) Epoch 8, batch 1700, loss[loss=0.1769, simple_loss=0.2597, pruned_loss=0.04705, over 4709.00 frames. ], tot_loss[loss=0.215, simple_loss=0.299, pruned_loss=0.06555, over 938903.05 frames. ], batch size: 11, lr: 9.60e-03, grad_scale: 32.0 +2024-07-28 02:32:41,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=97677.33333333333, ans=0.125 +2024-07-28 02:32:43,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=97690.66666666667, ans=0.1 +2024-07-28 02:32:48,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=97704.0, ans=0.125 +2024-07-28 02:32:51,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=97704.0, ans=0.0 +2024-07-28 02:33:03,172 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.93 vs. limit=15.0 +2024-07-28 02:33:09,509 INFO [train.py:1114] (0/4) Epoch 8, batch 1750, loss[loss=0.1845, simple_loss=0.2772, pruned_loss=0.04589, over 4813.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2985, pruned_loss=0.06556, over 939821.39 frames. ], batch size: 11, lr: 9.60e-03, grad_scale: 32.0 +2024-07-28 02:33:35,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=97784.0, ans=0.0 +2024-07-28 02:33:46,449 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.043e+01 5.863e+01 6.439e+01 7.161e+01 1.257e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 02:33:47,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=97797.33333333333, ans=0.125 +2024-07-28 02:33:51,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=97810.66666666667, ans=0.125 +2024-07-28 02:33:52,192 INFO [train.py:1114] (0/4) Epoch 8, batch 1800, loss[loss=0.2455, simple_loss=0.3241, pruned_loss=0.08343, over 4631.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2987, pruned_loss=0.06584, over 940559.25 frames. ], batch size: 13, lr: 9.60e-03, grad_scale: 32.0 +2024-07-28 02:34:19,232 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.36 vs. limit=10.0 +2024-07-28 02:34:19,794 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.78 vs. limit=22.5 +2024-07-28 02:34:22,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=97850.66666666667, ans=0.125 +2024-07-28 02:34:25,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=97850.66666666667, ans=0.2 +2024-07-28 02:34:30,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=97864.0, ans=0.125 +2024-07-28 02:34:34,877 INFO [train.py:1114] (0/4) Epoch 8, batch 1850, loss[loss=0.2662, simple_loss=0.3451, pruned_loss=0.09363, over 4824.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2991, pruned_loss=0.06606, over 940463.82 frames. ], batch size: 14, lr: 9.59e-03, grad_scale: 32.0 +2024-07-28 02:34:38,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=97877.33333333333, ans=0.1 +2024-07-28 02:34:39,186 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.90 vs. limit=10.0 +2024-07-28 02:34:39,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=97877.33333333333, ans=0.1 +2024-07-28 02:35:01,248 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.43 vs. limit=22.5 +2024-07-28 02:35:01,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=97930.66666666667, ans=0.125 +2024-07-28 02:35:04,090 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.444e+01 5.869e+01 6.668e+01 7.730e+01 1.207e+02, threshold=1.334e+02, percent-clipped=0.0 +2024-07-28 02:35:05,771 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.79 vs. limit=15.0 +2024-07-28 02:35:08,097 INFO [train.py:1114] (0/4) Epoch 8, batch 1900, loss[loss=0.2189, simple_loss=0.3236, pruned_loss=0.05714, over 4656.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.3, pruned_loss=0.06594, over 941889.55 frames. ], batch size: 14, lr: 9.59e-03, grad_scale: 32.0 +2024-07-28 02:35:14,506 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.18 vs. limit=15.0 +2024-07-28 02:35:23,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_na.min_abs, batch_count=97970.66666666667, ans=0.02 +2024-07-28 02:35:25,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=97970.66666666667, ans=0.125 +2024-07-28 02:35:26,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=97970.66666666667, ans=0.2 +2024-07-28 02:35:32,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=97984.0, ans=0.125 +2024-07-28 02:35:33,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=97984.0, ans=0.125 +2024-07-28 02:35:41,417 INFO [train.py:1114] (0/4) Epoch 8, batch 1950, loss[loss=0.2294, simple_loss=0.3214, pruned_loss=0.06869, over 4901.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.3016, pruned_loss=0.06657, over 943877.72 frames. ], batch size: 13, lr: 9.59e-03, grad_scale: 32.0 +2024-07-28 02:35:42,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=98010.66666666667, ans=0.125 +2024-07-28 02:35:49,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.47 vs. limit=15.0 +2024-07-28 02:35:49,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=98024.0, ans=0.125 +2024-07-28 02:36:02,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=98037.33333333333, ans=0.2 +2024-07-28 02:36:05,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=98050.66666666667, ans=0.1 +2024-07-28 02:36:06,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=98050.66666666667, ans=0.0 +2024-07-28 02:36:12,625 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.950e+01 5.706e+01 6.313e+01 6.898e+01 1.010e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 02:36:16,771 INFO [train.py:1114] (0/4) Epoch 8, batch 2000, loss[loss=0.2056, simple_loss=0.266, pruned_loss=0.07263, over 4805.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.3017, pruned_loss=0.06691, over 941265.51 frames. ], batch size: 11, lr: 9.58e-03, grad_scale: 32.0 +2024-07-28 02:36:17,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=98077.33333333333, ans=0.125 +2024-07-28 02:36:19,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.36 vs. limit=15.0 +2024-07-28 02:36:27,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=98090.66666666667, ans=0.0 +2024-07-28 02:36:31,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=98104.0, ans=0.2 +2024-07-28 02:36:31,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=98104.0, ans=0.0 +2024-07-28 02:36:49,956 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.82 vs. limit=22.5 +2024-07-28 02:36:50,176 INFO [train.py:1114] (0/4) Epoch 8, batch 2050, loss[loss=0.1978, simple_loss=0.2723, pruned_loss=0.06167, over 4615.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2991, pruned_loss=0.06568, over 939101.31 frames. ], batch size: 11, lr: 9.58e-03, grad_scale: 16.0 +2024-07-28 02:36:57,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98157.33333333333, ans=0.1 +2024-07-28 02:37:04,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98170.66666666667, ans=0.1 +2024-07-28 02:37:11,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=98184.0, ans=0.2 +2024-07-28 02:37:16,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98197.33333333333, ans=0.1 +2024-07-28 02:37:19,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=98197.33333333333, ans=0.125 +2024-07-28 02:37:20,052 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.635e+01 6.128e+01 6.881e+01 8.380e+01 1.718e+02, threshold=1.376e+02, percent-clipped=3.0 +2024-07-28 02:37:22,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=98197.33333333333, ans=0.2 +2024-07-28 02:37:23,355 INFO [train.py:1114] (0/4) Epoch 8, batch 2100, loss[loss=0.188, simple_loss=0.2769, pruned_loss=0.04954, over 4767.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2984, pruned_loss=0.06569, over 940933.88 frames. ], batch size: 13, lr: 9.58e-03, grad_scale: 16.0 +2024-07-28 02:37:26,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=98210.66666666667, ans=0.1 +2024-07-28 02:37:37,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=98237.33333333333, ans=0.125 +2024-07-28 02:37:46,999 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.54 vs. limit=15.0 +2024-07-28 02:37:52,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=98264.0, ans=0.0 +2024-07-28 02:37:56,376 INFO [train.py:1114] (0/4) Epoch 8, batch 2150, loss[loss=0.2112, simple_loss=0.3008, pruned_loss=0.06081, over 4893.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2976, pruned_loss=0.06566, over 943950.32 frames. ], batch size: 13, lr: 9.57e-03, grad_scale: 16.0 +2024-07-28 02:37:59,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=98277.33333333333, ans=0.125 +2024-07-28 02:38:03,554 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.84 vs. limit=15.0 +2024-07-28 02:38:06,258 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.52 vs. limit=15.0 +2024-07-28 02:38:10,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=98290.66666666667, ans=0.1 +2024-07-28 02:38:21,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=98317.33333333333, ans=0.125 +2024-07-28 02:38:22,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=98317.33333333333, ans=0.125 +2024-07-28 02:38:28,130 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.526e+01 5.621e+01 6.298e+01 7.456e+01 1.063e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 02:38:30,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=98344.0, ans=0.0 +2024-07-28 02:38:31,437 INFO [train.py:1114] (0/4) Epoch 8, batch 2200, loss[loss=0.2179, simple_loss=0.2985, pruned_loss=0.06871, over 4805.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.299, pruned_loss=0.06643, over 943425.70 frames. ], batch size: 14, lr: 9.57e-03, grad_scale: 16.0 +2024-07-28 02:38:53,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=98384.0, ans=0.125 +2024-07-28 02:39:06,688 INFO [train.py:1114] (0/4) Epoch 8, batch 2250, loss[loss=0.1936, simple_loss=0.2628, pruned_loss=0.06224, over 4687.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2988, pruned_loss=0.06649, over 941439.43 frames. ], batch size: 13, lr: 9.57e-03, grad_scale: 16.0 +2024-07-28 02:39:08,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=98410.66666666667, ans=0.0 +2024-07-28 02:39:18,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=98424.0, ans=0.2 +2024-07-28 02:39:19,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=98437.33333333333, ans=0.2 +2024-07-28 02:39:24,583 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.29 vs. limit=22.5 +2024-07-28 02:39:35,839 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.467e+01 5.847e+01 6.592e+01 7.483e+01 1.040e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-28 02:39:37,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=98464.0, ans=0.125 +2024-07-28 02:39:39,152 INFO [train.py:1114] (0/4) Epoch 8, batch 2300, loss[loss=0.2223, simple_loss=0.2921, pruned_loss=0.0763, over 4925.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2982, pruned_loss=0.06645, over 939242.67 frames. ], batch size: 12, lr: 9.57e-03, grad_scale: 16.0 +2024-07-28 02:39:42,830 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.22 vs. limit=22.5 +2024-07-28 02:39:47,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=98490.66666666667, ans=0.09899494936611666 +2024-07-28 02:41:03,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=98504.0, ans=0.125 +2024-07-28 02:41:18,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=98530.66666666667, ans=0.125 +2024-07-28 02:41:20,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98530.66666666667, ans=0.1 +2024-07-28 02:41:20,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=98530.66666666667, ans=0.125 +2024-07-28 02:41:22,930 INFO [train.py:1114] (0/4) Epoch 8, batch 2350, loss[loss=0.2154, simple_loss=0.2946, pruned_loss=0.06808, over 4634.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.297, pruned_loss=0.06543, over 941346.70 frames. ], batch size: 13, lr: 9.56e-03, grad_scale: 16.0 +2024-07-28 02:41:27,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=98544.0, ans=0.125 +2024-07-28 02:41:29,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=98557.33333333333, ans=0.0 +2024-07-28 02:41:29,403 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.75 vs. limit=22.5 +2024-07-28 02:41:53,306 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.834e+01 6.332e+01 7.540e+01 1.064e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 02:41:56,606 INFO [train.py:1114] (0/4) Epoch 8, batch 2400, loss[loss=0.23, simple_loss=0.3138, pruned_loss=0.07314, over 4645.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2985, pruned_loss=0.06626, over 941239.41 frames. ], batch size: 12, lr: 9.56e-03, grad_scale: 32.0 +2024-07-28 02:42:07,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.02 vs. limit=12.0 +2024-07-28 02:42:20,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=98637.33333333333, ans=0.125 +2024-07-28 02:42:24,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=98650.66666666667, ans=0.04949747468305833 +2024-07-28 02:42:30,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=98664.0, ans=0.2 +2024-07-28 02:42:38,021 INFO [train.py:1114] (0/4) Epoch 8, batch 2450, loss[loss=0.197, simple_loss=0.298, pruned_loss=0.04794, over 4699.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2996, pruned_loss=0.06677, over 937335.47 frames. ], batch size: 13, lr: 9.56e-03, grad_scale: 32.0 +2024-07-28 02:42:51,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98690.66666666667, ans=0.1 +2024-07-28 02:42:52,915 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.09 vs. limit=10.0 +2024-07-28 02:43:21,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=98717.33333333333, ans=0.125 +2024-07-28 02:43:25,560 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.031e+01 6.058e+01 6.704e+01 7.921e+01 1.237e+02, threshold=1.341e+02, percent-clipped=0.0 +2024-07-28 02:43:28,903 INFO [train.py:1114] (0/4) Epoch 8, batch 2500, loss[loss=0.1968, simple_loss=0.2926, pruned_loss=0.05047, over 4797.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2984, pruned_loss=0.06622, over 939268.04 frames. ], batch size: 14, lr: 9.55e-03, grad_scale: 32.0 +2024-07-28 02:43:40,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=98757.33333333333, ans=0.95 +2024-07-28 02:43:42,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=98770.66666666667, ans=0.125 +2024-07-28 02:43:45,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=98770.66666666667, ans=10.0 +2024-07-28 02:44:15,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=98797.33333333333, ans=0.2 +2024-07-28 02:44:21,370 INFO [train.py:1114] (0/4) Epoch 8, batch 2550, loss[loss=0.1953, simple_loss=0.2657, pruned_loss=0.06248, over 4801.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2979, pruned_loss=0.06603, over 938879.72 frames. ], batch size: 11, lr: 9.55e-03, grad_scale: 32.0 +2024-07-28 02:44:27,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=98824.0, ans=0.0 +2024-07-28 02:44:51,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=98850.66666666667, ans=0.0 +2024-07-28 02:44:58,733 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.823e+01 5.841e+01 6.423e+01 7.700e+01 1.142e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 02:45:02,064 INFO [train.py:1114] (0/4) Epoch 8, batch 2600, loss[loss=0.2169, simple_loss=0.2948, pruned_loss=0.06949, over 4901.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2987, pruned_loss=0.06614, over 937935.38 frames. ], batch size: 13, lr: 9.55e-03, grad_scale: 32.0 +2024-07-28 02:45:07,162 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.23 vs. limit=15.0 +2024-07-28 02:45:07,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=98877.33333333333, ans=0.025 +2024-07-28 02:45:07,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=98877.33333333333, ans=0.125 +2024-07-28 02:45:10,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=98890.66666666667, ans=0.0 +2024-07-28 02:45:12,365 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.64 vs. limit=6.0 +2024-07-28 02:45:33,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=98930.66666666667, ans=0.0 +2024-07-28 02:45:36,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=98944.0, ans=0.0 +2024-07-28 02:45:37,002 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.76 vs. limit=22.5 +2024-07-28 02:45:37,245 INFO [train.py:1114] (0/4) Epoch 8, batch 2650, loss[loss=0.2224, simple_loss=0.3075, pruned_loss=0.06864, over 4664.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2994, pruned_loss=0.06607, over 940143.60 frames. ], batch size: 16, lr: 9.54e-03, grad_scale: 32.0 +2024-07-28 02:45:39,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=98944.0, ans=0.035 +2024-07-28 02:45:41,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=98944.0, ans=0.125 +2024-07-28 02:46:02,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=98984.0, ans=0.125 +2024-07-28 02:46:10,521 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.07 vs. limit=12.0 +2024-07-28 02:46:10,562 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.54 vs. limit=22.5 +2024-07-28 02:46:12,210 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.525e+01 5.757e+01 6.469e+01 7.162e+01 1.151e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 02:46:18,803 INFO [train.py:1114] (0/4) Epoch 8, batch 2700, loss[loss=0.2414, simple_loss=0.3156, pruned_loss=0.08365, over 4746.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2995, pruned_loss=0.06599, over 939968.79 frames. ], batch size: 14, lr: 9.54e-03, grad_scale: 32.0 +2024-07-28 02:46:18,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=99010.66666666667, ans=0.0 +2024-07-28 02:46:27,411 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.12 vs. limit=12.0 +2024-07-28 02:46:27,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=99024.0, ans=10.0 +2024-07-28 02:46:35,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=99037.33333333333, ans=0.5 +2024-07-28 02:46:35,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=99037.33333333333, ans=0.2 +2024-07-28 02:46:37,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=99037.33333333333, ans=0.0 +2024-07-28 02:46:39,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=99037.33333333333, ans=0.125 +2024-07-28 02:46:47,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=99064.0, ans=0.125 +2024-07-28 02:46:54,476 INFO [train.py:1114] (0/4) Epoch 8, batch 2750, loss[loss=0.1765, simple_loss=0.2588, pruned_loss=0.04712, over 4698.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2977, pruned_loss=0.06512, over 939379.76 frames. ], batch size: 12, lr: 9.54e-03, grad_scale: 32.0 +2024-07-28 02:46:56,216 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.23 vs. limit=22.5 +2024-07-28 02:47:02,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99077.33333333333, ans=0.1 +2024-07-28 02:47:06,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=99090.66666666667, ans=0.125 +2024-07-28 02:47:06,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=99090.66666666667, ans=0.125 +2024-07-28 02:47:15,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=99104.0, ans=0.125 +2024-07-28 02:47:21,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=99117.33333333333, ans=0.0 +2024-07-28 02:47:31,848 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.602e+01 6.000e+01 6.844e+01 8.152e+01 1.229e+02, threshold=1.369e+02, percent-clipped=0.0 +2024-07-28 02:47:40,075 INFO [train.py:1114] (0/4) Epoch 8, batch 2800, loss[loss=0.3383, simple_loss=0.3852, pruned_loss=0.1457, over 3385.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.298, pruned_loss=0.06552, over 937301.55 frames. ], batch size: 35, lr: 9.53e-03, grad_scale: 32.0 +2024-07-28 02:47:41,873 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.32 vs. limit=15.0 +2024-07-28 02:47:46,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99157.33333333333, ans=0.1 +2024-07-28 02:47:46,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99157.33333333333, ans=0.1 +2024-07-28 02:47:48,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=99157.33333333333, ans=0.05 +2024-07-28 02:48:08,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=99184.0, ans=0.125 +2024-07-28 02:48:09,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=99184.0, ans=0.125 +2024-07-28 02:48:10,121 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.53 vs. limit=10.0 +2024-07-28 02:48:13,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=99197.33333333333, ans=0.125 +2024-07-28 02:48:22,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=99197.33333333333, ans=0.2 +2024-07-28 02:48:22,380 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.64 vs. limit=15.0 +2024-07-28 02:48:23,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=99197.33333333333, ans=0.125 +2024-07-28 02:48:24,709 INFO [train.py:1114] (0/4) Epoch 8, batch 2850, loss[loss=0.1979, simple_loss=0.2778, pruned_loss=0.05902, over 4964.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.299, pruned_loss=0.06637, over 936260.11 frames. ], batch size: 13, lr: 9.53e-03, grad_scale: 32.0 +2024-07-28 02:48:30,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=99210.66666666667, ans=0.025 +2024-07-28 02:48:36,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=99224.0, ans=0.2 +2024-07-28 02:48:38,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=99237.33333333333, ans=0.0 +2024-07-28 02:48:47,387 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.82 vs. limit=6.0 +2024-07-28 02:48:54,155 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.514e+01 5.619e+01 6.304e+01 7.225e+01 1.077e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 02:48:55,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=99264.0, ans=0.2 +2024-07-28 02:48:57,343 INFO [train.py:1114] (0/4) Epoch 8, batch 2900, loss[loss=0.1855, simple_loss=0.2798, pruned_loss=0.04561, over 4831.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.299, pruned_loss=0.06601, over 939891.46 frames. ], batch size: 13, lr: 9.53e-03, grad_scale: 32.0 +2024-07-28 02:48:57,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=99277.33333333333, ans=0.0 +2024-07-28 02:49:03,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=99277.33333333333, ans=0.125 +2024-07-28 02:49:05,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=99290.66666666667, ans=0.125 +2024-07-28 02:49:25,064 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.04 vs. limit=15.0 +2024-07-28 02:49:33,159 INFO [train.py:1114] (0/4) Epoch 8, batch 2950, loss[loss=0.2339, simple_loss=0.3191, pruned_loss=0.07429, over 4712.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2976, pruned_loss=0.06551, over 938659.57 frames. ], batch size: 12, lr: 9.52e-03, grad_scale: 32.0 +2024-07-28 02:49:40,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=99357.33333333333, ans=0.125 +2024-07-28 02:49:43,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=99357.33333333333, ans=0.0 +2024-07-28 02:49:48,015 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.91 vs. limit=15.0 +2024-07-28 02:50:04,312 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.366e+01 5.988e+01 6.681e+01 8.290e+01 1.259e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-28 02:50:07,618 INFO [train.py:1114] (0/4) Epoch 8, batch 3000, loss[loss=0.1867, simple_loss=0.2895, pruned_loss=0.04201, over 4758.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2982, pruned_loss=0.06524, over 938474.57 frames. ], batch size: 13, lr: 9.52e-03, grad_scale: 32.0 +2024-07-28 02:50:07,619 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 02:50:54,530 INFO [train.py:1146] (0/4) Epoch 8, validation: loss=0.1802, simple_loss=0.2848, pruned_loss=0.03781, over 944034.00 frames. +2024-07-28 02:50:54,530 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 02:50:56,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=99410.66666666667, ans=0.125 +2024-07-28 02:50:58,824 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:51:06,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=99424.0, ans=0.0 +2024-07-28 02:51:08,065 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.99 vs. limit=22.5 +2024-07-28 02:51:10,917 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.41 vs. limit=15.0 +2024-07-28 02:51:18,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=99450.66666666667, ans=0.0 +2024-07-28 02:51:20,484 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.43 vs. limit=22.5 +2024-07-28 02:51:29,233 INFO [train.py:1114] (0/4) Epoch 8, batch 3050, loss[loss=0.1982, simple_loss=0.2781, pruned_loss=0.05917, over 4640.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2993, pruned_loss=0.06599, over 937017.65 frames. ], batch size: 12, lr: 9.52e-03, grad_scale: 32.0 +2024-07-28 02:51:36,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=99490.66666666667, ans=0.2 +2024-07-28 02:52:04,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=99490.66666666667, ans=0.2 +2024-07-28 02:52:19,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=99530.66666666667, ans=0.0 +2024-07-28 02:52:22,252 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.490e+01 5.732e+01 6.156e+01 7.183e+01 1.083e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 02:52:25,460 INFO [train.py:1114] (0/4) Epoch 8, batch 3100, loss[loss=0.2368, simple_loss=0.3269, pruned_loss=0.07337, over 4615.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2982, pruned_loss=0.06558, over 937746.64 frames. ], batch size: 16, lr: 9.51e-03, grad_scale: 32.0 +2024-07-28 02:52:27,201 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.59 vs. limit=15.0 +2024-07-28 02:52:40,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=99570.66666666667, ans=0.2 +2024-07-28 02:52:41,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=99570.66666666667, ans=0.125 +2024-07-28 02:52:42,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=99570.66666666667, ans=0.5 +2024-07-28 02:53:00,754 INFO [train.py:1114] (0/4) Epoch 8, batch 3150, loss[loss=0.2334, simple_loss=0.3182, pruned_loss=0.07427, over 4618.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2983, pruned_loss=0.06524, over 937883.80 frames. ], batch size: 17, lr: 9.51e-03, grad_scale: 32.0 +2024-07-28 02:53:09,079 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.29 vs. limit=15.0 +2024-07-28 02:53:16,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=99637.33333333333, ans=0.0 +2024-07-28 02:53:33,341 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.886e+01 5.840e+01 6.506e+01 7.424e+01 1.196e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 02:53:36,727 INFO [train.py:1114] (0/4) Epoch 8, batch 3200, loss[loss=0.2204, simple_loss=0.3027, pruned_loss=0.06903, over 4835.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2972, pruned_loss=0.06459, over 939601.59 frames. ], batch size: 13, lr: 9.51e-03, grad_scale: 32.0 +2024-07-28 02:54:11,602 INFO [train.py:1114] (0/4) Epoch 8, batch 3250, loss[loss=0.2278, simple_loss=0.3176, pruned_loss=0.06896, over 4937.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2978, pruned_loss=0.06479, over 940838.66 frames. ], batch size: 14, lr: 9.50e-03, grad_scale: 32.0 +2024-07-28 02:54:11,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=99744.0, ans=0.125 +2024-07-28 02:54:13,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99744.0, ans=0.1 +2024-07-28 02:54:15,332 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.45 vs. limit=15.0 +2024-07-28 02:54:16,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=99744.0, ans=0.2 +2024-07-28 02:54:21,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=99757.33333333333, ans=0.125 +2024-07-28 02:54:22,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=99757.33333333333, ans=0.2 +2024-07-28 02:54:24,745 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.28 vs. limit=15.0 +2024-07-28 02:54:41,678 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 5.887e+01 6.598e+01 7.799e+01 2.167e+02, threshold=1.320e+02, percent-clipped=1.0 +2024-07-28 02:54:42,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=99797.33333333333, ans=0.2 +2024-07-28 02:54:45,044 INFO [train.py:1114] (0/4) Epoch 8, batch 3300, loss[loss=0.2267, simple_loss=0.3093, pruned_loss=0.07206, over 4683.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2973, pruned_loss=0.0652, over 941210.25 frames. ], batch size: 19, lr: 9.50e-03, grad_scale: 32.0 +2024-07-28 02:54:49,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=99810.66666666667, ans=0.0 +2024-07-28 02:54:49,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=99810.66666666667, ans=0.0 +2024-07-28 02:54:51,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99824.0, ans=0.1 +2024-07-28 02:55:00,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=99837.33333333333, ans=0.125 +2024-07-28 02:55:09,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=99850.66666666667, ans=0.125 +2024-07-28 02:55:14,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=99864.0, ans=0.125 +2024-07-28 02:55:18,685 INFO [train.py:1114] (0/4) Epoch 8, batch 3350, loss[loss=0.2624, simple_loss=0.3181, pruned_loss=0.1034, over 4634.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2977, pruned_loss=0.06561, over 939147.44 frames. ], batch size: 17, lr: 9.50e-03, grad_scale: 32.0 +2024-07-28 02:55:19,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=99877.33333333333, ans=0.125 +2024-07-28 02:55:26,794 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:55:50,388 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.676e+01 5.817e+01 6.427e+01 7.197e+01 1.127e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 02:55:53,732 INFO [train.py:1114] (0/4) Epoch 8, batch 3400, loss[loss=0.201, simple_loss=0.2742, pruned_loss=0.06394, over 4796.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2983, pruned_loss=0.06585, over 937535.48 frames. ], batch size: 11, lr: 9.50e-03, grad_scale: 32.0 +2024-07-28 02:55:58,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99944.0, ans=0.1 +2024-07-28 02:56:00,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=99957.33333333333, ans=0.0 +2024-07-28 02:56:22,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=99997.33333333333, ans=0.2 +2024-07-28 02:56:23,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=99997.33333333333, ans=0.5 +2024-07-28 02:56:24,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=99997.33333333333, ans=0.125 +2024-07-28 02:56:27,976 INFO [train.py:1114] (0/4) Epoch 8, batch 3450, loss[loss=0.2726, simple_loss=0.3518, pruned_loss=0.09673, over 4764.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2996, pruned_loss=0.06609, over 937866.08 frames. ], batch size: 19, lr: 9.49e-03, grad_scale: 32.0 +2024-07-28 02:56:30,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=100010.66666666667, ans=0.125 +2024-07-28 02:56:42,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=100037.33333333333, ans=0.025 +2024-07-28 02:56:45,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=100037.33333333333, ans=0.09899494936611666 +2024-07-28 02:56:47,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=100050.66666666667, ans=0.0 +2024-07-28 02:56:48,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=100050.66666666667, ans=0.125 +2024-07-28 02:56:50,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=100050.66666666667, ans=0.125 +2024-07-28 02:56:58,314 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 5.830e+01 6.643e+01 7.875e+01 1.454e+02, threshold=1.329e+02, percent-clipped=3.0 +2024-07-28 02:57:01,717 INFO [train.py:1114] (0/4) Epoch 8, batch 3500, loss[loss=0.1937, simple_loss=0.2692, pruned_loss=0.05907, over 4936.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2981, pruned_loss=0.06548, over 938574.51 frames. ], batch size: 12, lr: 9.49e-03, grad_scale: 32.0 +2024-07-28 02:57:07,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=100090.66666666667, ans=0.0 +2024-07-28 02:57:14,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=100090.66666666667, ans=0.025 +2024-07-28 02:57:20,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=100104.0, ans=0.2 +2024-07-28 02:57:35,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=100130.66666666667, ans=0.0 +2024-07-28 02:57:37,824 INFO [train.py:1114] (0/4) Epoch 8, batch 3550, loss[loss=0.2375, simple_loss=0.3275, pruned_loss=0.07381, over 4663.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2978, pruned_loss=0.06507, over 939195.50 frames. ], batch size: 14, lr: 9.49e-03, grad_scale: 32.0 +2024-07-28 02:57:50,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=100170.66666666667, ans=0.0 +2024-07-28 02:58:07,554 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.623e+01 5.767e+01 6.398e+01 7.244e+01 1.008e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 02:58:07,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100197.33333333333, ans=0.1 +2024-07-28 02:58:24,513 INFO [train.py:1114] (0/4) Epoch 8, batch 3600, loss[loss=0.2009, simple_loss=0.2873, pruned_loss=0.05728, over 4974.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2972, pruned_loss=0.06477, over 940768.69 frames. ], batch size: 13, lr: 9.48e-03, grad_scale: 32.0 +2024-07-28 02:59:03,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=100237.33333333333, ans=0.0 +2024-07-28 02:59:09,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=100237.33333333333, ans=0.0 +2024-07-28 02:59:15,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=100250.66666666667, ans=0.0 +2024-07-28 02:59:21,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=100264.0, ans=0.0 +2024-07-28 02:59:25,328 INFO [train.py:1114] (0/4) Epoch 8, batch 3650, loss[loss=0.2009, simple_loss=0.2805, pruned_loss=0.06066, over 4904.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2972, pruned_loss=0.06517, over 941051.28 frames. ], batch size: 15, lr: 9.48e-03, grad_scale: 32.0 +2024-07-28 02:59:30,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=100277.33333333333, ans=0.07 +2024-07-28 02:59:37,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=100290.66666666667, ans=0.125 +2024-07-28 02:59:38,047 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.86 vs. limit=15.0 +2024-07-28 02:59:54,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=100330.66666666667, ans=0.2 +2024-07-28 02:59:55,976 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.900e+01 6.500e+01 7.963e+01 1.457e+02, threshold=1.300e+02, percent-clipped=1.0 +2024-07-28 02:59:57,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=100330.66666666667, ans=0.0 +2024-07-28 02:59:57,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=100330.66666666667, ans=0.09899494936611666 +2024-07-28 02:59:59,598 INFO [train.py:1114] (0/4) Epoch 8, batch 3700, loss[loss=0.2079, simple_loss=0.2999, pruned_loss=0.05795, over 4933.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2978, pruned_loss=0.06553, over 942204.08 frames. ], batch size: 14, lr: 9.48e-03, grad_scale: 32.0 +2024-07-28 03:00:06,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=100344.0, ans=0.2 +2024-07-28 03:00:14,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=100370.66666666667, ans=0.125 +2024-07-28 03:00:16,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=100370.66666666667, ans=0.07 +2024-07-28 03:00:20,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=100370.66666666667, ans=0.0 +2024-07-28 03:00:30,139 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.26 vs. limit=15.0 +2024-07-28 03:00:31,300 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.17 vs. limit=15.0 +2024-07-28 03:00:32,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=100397.33333333333, ans=0.0 +2024-07-28 03:00:34,764 INFO [train.py:1114] (0/4) Epoch 8, batch 3750, loss[loss=0.1754, simple_loss=0.2494, pruned_loss=0.05073, over 4812.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2965, pruned_loss=0.06458, over 943320.21 frames. ], batch size: 11, lr: 9.47e-03, grad_scale: 16.0 +2024-07-28 03:00:34,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100410.66666666667, ans=0.1 +2024-07-28 03:00:36,174 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:00:43,081 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.43 vs. limit=12.0 +2024-07-28 03:00:43,763 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=9.30 vs. limit=12.0 +2024-07-28 03:00:47,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100437.33333333333, ans=0.1 +2024-07-28 03:00:47,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=100437.33333333333, ans=0.2 +2024-07-28 03:00:56,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=100450.66666666667, ans=0.2 +2024-07-28 03:01:02,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=100464.0, ans=10.0 +2024-07-28 03:01:05,677 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.700e+01 5.636e+01 6.396e+01 7.360e+01 1.035e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 03:01:08,368 INFO [train.py:1114] (0/4) Epoch 8, batch 3800, loss[loss=0.2482, simple_loss=0.3326, pruned_loss=0.08189, over 4815.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2967, pruned_loss=0.06521, over 940808.39 frames. ], batch size: 14, lr: 9.47e-03, grad_scale: 16.0 +2024-07-28 03:01:20,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=100504.0, ans=0.0 +2024-07-28 03:01:43,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=100517.33333333333, ans=0.125 +2024-07-28 03:01:45,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100517.33333333333, ans=0.1 +2024-07-28 03:01:45,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=100517.33333333333, ans=0.125 +2024-07-28 03:01:50,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=100530.66666666667, ans=0.025 +2024-07-28 03:01:55,212 INFO [train.py:1114] (0/4) Epoch 8, batch 3850, loss[loss=0.2337, simple_loss=0.3141, pruned_loss=0.07664, over 4634.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2968, pruned_loss=0.06548, over 941534.31 frames. ], batch size: 16, lr: 9.47e-03, grad_scale: 16.0 +2024-07-28 03:01:56,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=100544.0, ans=0.2 +2024-07-28 03:02:03,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100557.33333333333, ans=0.1 +2024-07-28 03:02:05,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=100557.33333333333, ans=0.125 +2024-07-28 03:02:13,362 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.79 vs. limit=22.5 +2024-07-28 03:02:14,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=100584.0, ans=0.125 +2024-07-28 03:02:18,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=100584.0, ans=0.125 +2024-07-28 03:02:19,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=100584.0, ans=0.125 +2024-07-28 03:02:29,297 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.866e+01 5.806e+01 6.374e+01 7.296e+01 1.382e+02, threshold=1.275e+02, percent-clipped=1.0 +2024-07-28 03:02:36,117 INFO [train.py:1114] (0/4) Epoch 8, batch 3900, loss[loss=0.2328, simple_loss=0.3193, pruned_loss=0.07313, over 4817.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2967, pruned_loss=0.06526, over 942022.77 frames. ], batch size: 14, lr: 9.46e-03, grad_scale: 16.0 +2024-07-28 03:02:42,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=100624.0, ans=0.125 +2024-07-28 03:02:49,848 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.92 vs. limit=15.0 +2024-07-28 03:02:57,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=100637.33333333333, ans=0.0 +2024-07-28 03:02:59,345 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.81 vs. limit=5.0 +2024-07-28 03:03:14,045 INFO [train.py:1114] (0/4) Epoch 8, batch 3950, loss[loss=0.2568, simple_loss=0.3521, pruned_loss=0.08071, over 4827.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2973, pruned_loss=0.06538, over 944154.52 frames. ], batch size: 16, lr: 9.46e-03, grad_scale: 16.0 +2024-07-28 03:03:44,904 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.455e+01 5.725e+01 6.427e+01 7.427e+01 2.052e+02, threshold=1.285e+02, percent-clipped=1.0 +2024-07-28 03:04:03,406 INFO [train.py:1114] (0/4) Epoch 8, batch 4000, loss[loss=0.2113, simple_loss=0.2895, pruned_loss=0.06655, over 4774.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2979, pruned_loss=0.06598, over 940851.51 frames. ], batch size: 12, lr: 9.46e-03, grad_scale: 32.0 +2024-07-28 03:04:03,545 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:04:11,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=100757.33333333333, ans=0.125 +2024-07-28 03:04:14,309 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.47 vs. limit=15.0 +2024-07-28 03:04:22,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=100757.33333333333, ans=0.125 +2024-07-28 03:04:26,007 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.28 vs. limit=22.5 +2024-07-28 03:04:31,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=100784.0, ans=0.2 +2024-07-28 03:04:36,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=100784.0, ans=0.2 +2024-07-28 03:04:36,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=100797.33333333333, ans=0.125 +2024-07-28 03:04:38,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100797.33333333333, ans=0.125 +2024-07-28 03:04:42,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=100797.33333333333, ans=0.125 +2024-07-28 03:04:44,249 INFO [train.py:1114] (0/4) Epoch 8, batch 4050, loss[loss=0.2447, simple_loss=0.3169, pruned_loss=0.08625, over 3403.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2973, pruned_loss=0.06542, over 939645.69 frames. ], batch size: 35, lr: 9.45e-03, grad_scale: 32.0 +2024-07-28 03:04:45,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=100810.66666666667, ans=0.125 +2024-07-28 03:05:17,166 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.596e+01 5.984e+01 6.561e+01 7.849e+01 1.305e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-28 03:05:19,923 INFO [train.py:1114] (0/4) Epoch 8, batch 4100, loss[loss=0.215, simple_loss=0.3161, pruned_loss=0.057, over 4902.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2975, pruned_loss=0.06575, over 938161.29 frames. ], batch size: 15, lr: 9.45e-03, grad_scale: 32.0 +2024-07-28 03:05:22,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100877.33333333333, ans=0.1 +2024-07-28 03:05:22,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=100877.33333333333, ans=0.5 +2024-07-28 03:05:24,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=100877.33333333333, ans=0.0 +2024-07-28 03:05:33,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=100904.0, ans=0.125 +2024-07-28 03:05:47,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=100917.33333333333, ans=0.1 +2024-07-28 03:06:46,140 INFO [train.py:1114] (0/4) Epoch 8, batch 4150, loss[loss=0.1967, simple_loss=0.2856, pruned_loss=0.05385, over 4838.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2964, pruned_loss=0.06541, over 938213.22 frames. ], batch size: 13, lr: 9.45e-03, grad_scale: 32.0 +2024-07-28 03:06:48,419 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.82 vs. limit=6.0 +2024-07-28 03:07:04,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=100970.66666666667, ans=0.2 +2024-07-28 03:07:18,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=100984.0, ans=0.1 +2024-07-28 03:07:29,979 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.669e+01 6.007e+01 6.703e+01 7.835e+01 1.474e+02, threshold=1.341e+02, percent-clipped=1.0 +2024-07-28 03:07:30,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=100997.33333333333, ans=0.05 +2024-07-28 03:07:52,663 INFO [train.py:1114] (0/4) Epoch 8, batch 4200, loss[loss=0.2638, simple_loss=0.3468, pruned_loss=0.09039, over 4908.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2964, pruned_loss=0.0652, over 939090.30 frames. ], batch size: 15, lr: 9.45e-03, grad_scale: 32.0 +2024-07-28 03:08:01,723 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.04 vs. limit=15.0 +2024-07-28 03:08:04,853 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.16 vs. limit=15.0 +2024-07-28 03:09:17,407 INFO [train.py:1114] (0/4) Epoch 8, batch 4250, loss[loss=0.1873, simple_loss=0.269, pruned_loss=0.05281, over 4641.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2975, pruned_loss=0.06588, over 939809.53 frames. ], batch size: 12, lr: 9.44e-03, grad_scale: 32.0 +2024-07-28 03:09:19,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=101077.33333333333, ans=0.125 +2024-07-28 03:09:38,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101117.33333333333, ans=0.1 +2024-07-28 03:09:48,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=101130.66666666667, ans=0.125 +2024-07-28 03:09:49,646 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 5.910e+01 6.569e+01 7.778e+01 1.465e+02, threshold=1.314e+02, percent-clipped=1.0 +2024-07-28 03:09:52,213 INFO [train.py:1114] (0/4) Epoch 8, batch 4300, loss[loss=0.2351, simple_loss=0.3241, pruned_loss=0.07301, over 4758.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2988, pruned_loss=0.06668, over 939380.99 frames. ], batch size: 13, lr: 9.44e-03, grad_scale: 32.0 +2024-07-28 03:09:57,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=101144.0, ans=0.0 +2024-07-28 03:10:04,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=101157.33333333333, ans=0.125 +2024-07-28 03:10:07,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=101170.66666666667, ans=0.125 +2024-07-28 03:10:17,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=101184.0, ans=0.0 +2024-07-28 03:10:25,294 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.62 vs. limit=22.5 +2024-07-28 03:10:27,455 INFO [train.py:1114] (0/4) Epoch 8, batch 4350, loss[loss=0.2016, simple_loss=0.2839, pruned_loss=0.05962, over 4763.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2981, pruned_loss=0.06589, over 940565.85 frames. ], batch size: 13, lr: 9.44e-03, grad_scale: 32.0 +2024-07-28 03:10:45,178 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:10:55,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=101264.0, ans=0.125 +2024-07-28 03:10:55,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=101264.0, ans=0.125 +2024-07-28 03:10:58,168 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.299e+01 5.773e+01 6.336e+01 7.369e+01 1.096e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 03:11:01,089 INFO [train.py:1114] (0/4) Epoch 8, batch 4400, loss[loss=0.1783, simple_loss=0.2727, pruned_loss=0.04195, over 4818.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2983, pruned_loss=0.06567, over 940459.74 frames. ], batch size: 14, lr: 9.43e-03, grad_scale: 32.0 +2024-07-28 03:11:03,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=101277.33333333333, ans=0.125 +2024-07-28 03:11:08,589 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.816e-02 +2024-07-28 03:11:20,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=101317.33333333333, ans=0.0 +2024-07-28 03:11:24,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=101317.33333333333, ans=0.025 +2024-07-28 03:11:29,049 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-76000.pt +2024-07-28 03:11:36,968 INFO [train.py:1114] (0/4) Epoch 8, batch 4450, loss[loss=0.2139, simple_loss=0.2869, pruned_loss=0.07048, over 4936.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2982, pruned_loss=0.06547, over 938750.22 frames. ], batch size: 12, lr: 9.43e-03, grad_scale: 32.0 +2024-07-28 03:11:39,087 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:11:53,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=101370.66666666667, ans=0.025 +2024-07-28 03:11:55,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=101370.66666666667, ans=0.025 +2024-07-28 03:11:55,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=101370.66666666667, ans=0.125 +2024-07-28 03:12:09,298 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.991e+01 6.173e+01 7.006e+01 8.907e+01 1.361e+02, threshold=1.401e+02, percent-clipped=3.0 +2024-07-28 03:12:12,342 INFO [train.py:1114] (0/4) Epoch 8, batch 4500, loss[loss=0.243, simple_loss=0.3327, pruned_loss=0.07658, over 4741.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2991, pruned_loss=0.06559, over 938538.02 frames. ], batch size: 14, lr: 9.43e-03, grad_scale: 32.0 +2024-07-28 03:12:13,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=101410.66666666667, ans=0.125 +2024-07-28 03:12:14,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=101410.66666666667, ans=0.025 +2024-07-28 03:12:22,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=101424.0, ans=0.125 +2024-07-28 03:12:29,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=101437.33333333333, ans=0.2 +2024-07-28 03:12:37,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=101450.66666666667, ans=0.2 +2024-07-28 03:12:38,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=101450.66666666667, ans=0.2 +2024-07-28 03:12:46,612 INFO [train.py:1114] (0/4) Epoch 8, batch 4550, loss[loss=0.2078, simple_loss=0.2884, pruned_loss=0.06356, over 4905.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2981, pruned_loss=0.06524, over 940503.00 frames. ], batch size: 13, lr: 9.42e-03, grad_scale: 32.0 +2024-07-28 03:12:49,434 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:12:54,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=101477.33333333333, ans=10.0 +2024-07-28 03:12:55,036 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.38 vs. limit=15.0 +2024-07-28 03:12:55,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=101490.66666666667, ans=0.0 +2024-07-28 03:12:56,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=101490.66666666667, ans=0.125 +2024-07-28 03:12:58,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=101490.66666666667, ans=0.0 +2024-07-28 03:13:00,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=101490.66666666667, ans=0.125 +2024-07-28 03:13:03,127 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.08 vs. limit=10.0 +2024-07-28 03:13:09,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=101517.33333333333, ans=0.0 +2024-07-28 03:13:19,424 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.586e+01 5.796e+01 6.389e+01 7.358e+01 1.083e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-28 03:13:22,140 INFO [train.py:1114] (0/4) Epoch 8, batch 4600, loss[loss=0.2518, simple_loss=0.3309, pruned_loss=0.08633, over 4519.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2972, pruned_loss=0.0653, over 939105.09 frames. ], batch size: 21, lr: 9.42e-03, grad_scale: 32.0 +2024-07-28 03:13:23,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=101544.0, ans=0.0 +2024-07-28 03:13:33,703 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.18 vs. limit=15.0 +2024-07-28 03:13:35,156 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.84 vs. limit=15.0 +2024-07-28 03:13:38,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=101570.66666666667, ans=0.125 +2024-07-28 03:13:51,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=101597.33333333333, ans=0.07 +2024-07-28 03:13:55,118 INFO [train.py:1114] (0/4) Epoch 8, batch 4650, loss[loss=0.1937, simple_loss=0.2885, pruned_loss=0.04952, over 4812.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2979, pruned_loss=0.06557, over 940687.78 frames. ], batch size: 16, lr: 9.42e-03, grad_scale: 32.0 +2024-07-28 03:13:59,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=101610.66666666667, ans=22.5 +2024-07-28 03:14:04,140 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.79 vs. limit=15.0 +2024-07-28 03:14:17,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=101650.66666666667, ans=0.2 +2024-07-28 03:14:18,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=101650.66666666667, ans=0.0 +2024-07-28 03:14:18,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=101650.66666666667, ans=0.125 +2024-07-28 03:14:27,598 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.801e+01 5.774e+01 6.444e+01 7.624e+01 1.056e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 03:14:29,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=101677.33333333333, ans=0.2 +2024-07-28 03:14:30,295 INFO [train.py:1114] (0/4) Epoch 8, batch 4700, loss[loss=0.1855, simple_loss=0.261, pruned_loss=0.05501, over 4706.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2978, pruned_loss=0.06561, over 937672.94 frames. ], batch size: 11, lr: 9.41e-03, grad_scale: 32.0 +2024-07-28 03:14:33,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=101677.33333333333, ans=0.125 +2024-07-28 03:14:33,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=101677.33333333333, ans=0.125 +2024-07-28 03:14:35,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101677.33333333333, ans=0.1 +2024-07-28 03:14:36,479 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:14:54,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=101717.33333333333, ans=0.07 +2024-07-28 03:15:02,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=101730.66666666667, ans=0.125 +2024-07-28 03:15:04,163 INFO [train.py:1114] (0/4) Epoch 8, batch 4750, loss[loss=0.2493, simple_loss=0.3281, pruned_loss=0.08522, over 4509.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2979, pruned_loss=0.066, over 935665.12 frames. ], batch size: 21, lr: 9.41e-03, grad_scale: 32.0 +2024-07-28 03:15:09,146 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.36 vs. limit=15.0 +2024-07-28 03:15:10,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=101757.33333333333, ans=0.125 +2024-07-28 03:15:17,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=101770.66666666667, ans=0.125 +2024-07-28 03:15:30,748 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.68 vs. limit=15.0 +2024-07-28 03:15:37,282 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.494e+01 5.718e+01 6.515e+01 7.341e+01 9.928e+01, threshold=1.303e+02, percent-clipped=0.0 +2024-07-28 03:15:40,386 INFO [train.py:1114] (0/4) Epoch 8, batch 4800, loss[loss=0.1936, simple_loss=0.3048, pruned_loss=0.04119, over 4695.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2979, pruned_loss=0.06578, over 933465.01 frames. ], batch size: 13, lr: 9.41e-03, grad_scale: 32.0 +2024-07-28 03:15:48,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101824.0, ans=0.1 +2024-07-28 03:15:57,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=101837.33333333333, ans=0.1 +2024-07-28 03:16:02,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=101850.66666666667, ans=0.0 +2024-07-28 03:16:06,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=101850.66666666667, ans=0.0 +2024-07-28 03:16:11,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=101864.0, ans=0.025 +2024-07-28 03:16:17,649 INFO [train.py:1114] (0/4) Epoch 8, batch 4850, loss[loss=0.2189, simple_loss=0.3151, pruned_loss=0.06133, over 4738.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2984, pruned_loss=0.0662, over 932940.23 frames. ], batch size: 14, lr: 9.41e-03, grad_scale: 32.0 +2024-07-28 03:16:19,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=101877.33333333333, ans=0.035 +2024-07-28 03:16:29,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=101890.66666666667, ans=0.2 +2024-07-28 03:16:46,838 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.14 vs. limit=15.0 +2024-07-28 03:16:51,213 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.836e+01 5.718e+01 6.267e+01 6.950e+01 1.595e+02, threshold=1.253e+02, percent-clipped=1.0 +2024-07-28 03:17:01,612 INFO [train.py:1114] (0/4) Epoch 8, batch 4900, loss[loss=0.1853, simple_loss=0.283, pruned_loss=0.04375, over 4755.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2972, pruned_loss=0.06556, over 934896.08 frames. ], batch size: 13, lr: 9.40e-03, grad_scale: 32.0 +2024-07-28 03:17:07,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=101944.0, ans=0.07 +2024-07-28 03:17:08,253 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.95 vs. limit=22.5 +2024-07-28 03:17:11,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=101957.33333333333, ans=0.0 +2024-07-28 03:17:14,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=101957.33333333333, ans=0.5 +2024-07-28 03:17:17,734 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.11 vs. limit=10.0 +2024-07-28 03:17:19,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=101970.66666666667, ans=0.2 +2024-07-28 03:17:22,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=101984.0, ans=0.125 +2024-07-28 03:17:22,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=101984.0, ans=0.125 +2024-07-28 03:17:25,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=101984.0, ans=0.125 +2024-07-28 03:17:31,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=101997.33333333333, ans=0.2 +2024-07-28 03:17:31,905 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.39 vs. limit=15.0 +2024-07-28 03:17:35,649 INFO [train.py:1114] (0/4) Epoch 8, batch 4950, loss[loss=0.3094, simple_loss=0.3662, pruned_loss=0.1263, over 3460.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2988, pruned_loss=0.06618, over 931838.63 frames. ], batch size: 35, lr: 9.40e-03, grad_scale: 32.0 +2024-07-28 03:17:44,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=102024.0, ans=0.0 +2024-07-28 03:17:44,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=102024.0, ans=0.125 +2024-07-28 03:17:58,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=102050.66666666667, ans=0.125 +2024-07-28 03:18:07,259 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.77 vs. limit=22.5 +2024-07-28 03:18:08,034 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.891e+01 5.855e+01 6.357e+01 7.218e+01 9.647e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 03:18:09,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=102064.0, ans=0.1 +2024-07-28 03:18:10,643 INFO [train.py:1114] (0/4) Epoch 8, batch 5000, loss[loss=0.2035, simple_loss=0.3152, pruned_loss=0.04596, over 4670.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2982, pruned_loss=0.06557, over 935774.63 frames. ], batch size: 14, lr: 9.40e-03, grad_scale: 32.0 +2024-07-28 03:18:18,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=102090.66666666667, ans=0.125 +2024-07-28 03:18:18,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=102090.66666666667, ans=0.125 +2024-07-28 03:18:35,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=102117.33333333333, ans=10.0 +2024-07-28 03:18:37,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=102117.33333333333, ans=0.125 +2024-07-28 03:18:45,826 INFO [train.py:1114] (0/4) Epoch 8, batch 5050, loss[loss=0.173, simple_loss=0.2617, pruned_loss=0.04217, over 4859.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2965, pruned_loss=0.06507, over 938071.65 frames. ], batch size: 12, lr: 9.39e-03, grad_scale: 16.0 +2024-07-28 03:19:00,687 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.24 vs. limit=22.5 +2024-07-28 03:19:04,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=102170.66666666667, ans=0.0 +2024-07-28 03:19:08,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=102184.0, ans=0.0 +2024-07-28 03:19:17,815 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.666e+01 5.803e+01 6.269e+01 7.279e+01 1.149e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 03:19:18,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=102197.33333333333, ans=0.125 +2024-07-28 03:19:19,901 INFO [train.py:1114] (0/4) Epoch 8, batch 5100, loss[loss=0.1936, simple_loss=0.2762, pruned_loss=0.05557, over 4783.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2973, pruned_loss=0.06542, over 935179.57 frames. ], batch size: 12, lr: 9.39e-03, grad_scale: 16.0 +2024-07-28 03:19:22,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=102210.66666666667, ans=0.2 +2024-07-28 03:19:28,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=102224.0, ans=0.025 +2024-07-28 03:19:29,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=102224.0, ans=0.0 +2024-07-28 03:19:30,458 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.87 vs. limit=15.0 +2024-07-28 03:19:32,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=102237.33333333333, ans=0.0 +2024-07-28 03:19:36,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=102237.33333333333, ans=0.125 +2024-07-28 03:19:42,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=102250.66666666667, ans=0.125 +2024-07-28 03:19:44,045 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.36 vs. limit=6.0 +2024-07-28 03:19:45,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=102250.66666666667, ans=0.0 +2024-07-28 03:19:54,810 INFO [train.py:1114] (0/4) Epoch 8, batch 5150, loss[loss=0.2222, simple_loss=0.3178, pruned_loss=0.06332, over 4847.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2985, pruned_loss=0.06572, over 936423.75 frames. ], batch size: 16, lr: 9.39e-03, grad_scale: 16.0 +2024-07-28 03:20:00,453 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.13 vs. limit=15.0 +2024-07-28 03:20:02,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=102290.66666666667, ans=0.0 +2024-07-28 03:20:06,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=102290.66666666667, ans=0.2 +2024-07-28 03:20:09,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=102304.0, ans=0.125 +2024-07-28 03:20:12,319 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.93 vs. limit=6.0 +2024-07-28 03:20:26,082 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.475e+01 5.815e+01 6.319e+01 7.025e+01 9.950e+01, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 03:20:30,183 INFO [train.py:1114] (0/4) Epoch 8, batch 5200, loss[loss=0.2154, simple_loss=0.2956, pruned_loss=0.0676, over 4660.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2974, pruned_loss=0.06511, over 936375.66 frames. ], batch size: 14, lr: 9.38e-03, grad_scale: 32.0 +2024-07-28 03:20:43,856 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.55 vs. limit=12.0 +2024-07-28 03:21:05,406 INFO [train.py:1114] (0/4) Epoch 8, batch 5250, loss[loss=0.1713, simple_loss=0.2706, pruned_loss=0.03603, over 4901.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2959, pruned_loss=0.06444, over 936067.20 frames. ], batch size: 13, lr: 9.38e-03, grad_scale: 32.0 +2024-07-28 03:21:13,258 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.75 vs. limit=10.0 +2024-07-28 03:21:13,539 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.337e-02 +2024-07-28 03:21:20,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=102437.33333333333, ans=0.025 +2024-07-28 03:21:22,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=102437.33333333333, ans=0.2 +2024-07-28 03:21:31,682 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.57 vs. limit=22.5 +2024-07-28 03:21:36,703 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.677e+01 5.809e+01 6.446e+01 7.224e+01 1.154e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 03:21:38,683 INFO [train.py:1114] (0/4) Epoch 8, batch 5300, loss[loss=0.2128, simple_loss=0.2961, pruned_loss=0.06476, over 4626.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2961, pruned_loss=0.06454, over 934562.92 frames. ], batch size: 16, lr: 9.38e-03, grad_scale: 32.0 +2024-07-28 03:21:47,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=102490.66666666667, ans=0.0 +2024-07-28 03:21:52,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=102504.0, ans=0.0 +2024-07-28 03:21:54,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=102504.0, ans=0.04949747468305833 +2024-07-28 03:22:05,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=102530.66666666667, ans=0.05 +2024-07-28 03:22:11,897 INFO [train.py:1114] (0/4) Epoch 8, batch 5350, loss[loss=0.1636, simple_loss=0.2604, pruned_loss=0.03339, over 4540.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2972, pruned_loss=0.06481, over 936532.96 frames. ], batch size: 10, lr: 9.38e-03, grad_scale: 32.0 +2024-07-28 03:22:25,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=102570.66666666667, ans=0.025 +2024-07-28 03:22:26,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=102570.66666666667, ans=0.125 +2024-07-28 03:22:31,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=102584.0, ans=0.0 +2024-07-28 03:22:36,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=102584.0, ans=0.125 +2024-07-28 03:22:43,563 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 6.073e+01 6.739e+01 7.548e+01 1.442e+02, threshold=1.348e+02, percent-clipped=1.0 +2024-07-28 03:22:44,332 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:22:45,665 INFO [train.py:1114] (0/4) Epoch 8, batch 5400, loss[loss=0.2379, simple_loss=0.3247, pruned_loss=0.07553, over 4211.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2983, pruned_loss=0.06594, over 930754.33 frames. ], batch size: 25, lr: 9.37e-03, grad_scale: 32.0 +2024-07-28 03:22:47,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=102610.66666666667, ans=0.125 +2024-07-28 03:22:51,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102624.0, ans=0.1 +2024-07-28 03:22:53,526 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=17.97 vs. limit=15.0 +2024-07-28 03:22:54,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102624.0, ans=0.1 +2024-07-28 03:22:56,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=102624.0, ans=0.125 +2024-07-28 03:23:08,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102650.66666666667, ans=0.1 +2024-07-28 03:23:09,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=102650.66666666667, ans=0.2 +2024-07-28 03:23:11,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=102650.66666666667, ans=0.07 +2024-07-28 03:23:13,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=102664.0, ans=0.04949747468305833 +2024-07-28 03:23:14,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=102664.0, ans=0.025 +2024-07-28 03:23:20,678 INFO [train.py:1114] (0/4) Epoch 8, batch 5450, loss[loss=0.1996, simple_loss=0.2746, pruned_loss=0.06231, over 4711.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2974, pruned_loss=0.06568, over 933374.37 frames. ], batch size: 11, lr: 9.37e-03, grad_scale: 32.0 +2024-07-28 03:23:27,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=102690.66666666667, ans=0.0 +2024-07-28 03:23:35,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=102704.0, ans=0.2 +2024-07-28 03:23:36,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=102704.0, ans=0.1 +2024-07-28 03:23:39,975 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.39 vs. limit=15.0 +2024-07-28 03:23:43,776 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.32 vs. limit=15.0 +2024-07-28 03:23:54,561 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.766e+01 6.203e+01 6.756e+01 7.672e+01 1.108e+02, threshold=1.351e+02, percent-clipped=0.0 +2024-07-28 03:23:56,654 INFO [train.py:1114] (0/4) Epoch 8, batch 5500, loss[loss=0.2199, simple_loss=0.3079, pruned_loss=0.0659, over 4211.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2973, pruned_loss=0.06618, over 930787.43 frames. ], batch size: 25, lr: 9.37e-03, grad_scale: 32.0 +2024-07-28 03:23:57,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=102744.0, ans=0.125 +2024-07-28 03:23:57,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=102744.0, ans=0.0 +2024-07-28 03:24:11,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=102770.66666666667, ans=0.04949747468305833 +2024-07-28 03:24:16,073 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.11 vs. limit=22.5 +2024-07-28 03:24:18,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=102784.0, ans=0.2 +2024-07-28 03:24:29,648 INFO [train.py:1114] (0/4) Epoch 8, batch 5550, loss[loss=0.1971, simple_loss=0.2845, pruned_loss=0.05489, over 4711.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2977, pruned_loss=0.0665, over 933110.66 frames. ], batch size: 12, lr: 9.36e-03, grad_scale: 32.0 +2024-07-28 03:24:34,699 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.72 vs. limit=22.5 +2024-07-28 03:24:41,964 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.07 vs. limit=15.0 +2024-07-28 03:24:42,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=102837.33333333333, ans=0.125 +2024-07-28 03:24:43,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=102837.33333333333, ans=0.125 +2024-07-28 03:25:01,405 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.969e+01 5.947e+01 6.604e+01 7.771e+01 1.160e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-28 03:25:03,445 INFO [train.py:1114] (0/4) Epoch 8, batch 5600, loss[loss=0.2015, simple_loss=0.2915, pruned_loss=0.05572, over 4734.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2985, pruned_loss=0.06635, over 934855.69 frames. ], batch size: 14, lr: 9.36e-03, grad_scale: 32.0 +2024-07-28 03:25:08,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=102877.33333333333, ans=0.0 +2024-07-28 03:25:10,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=102877.33333333333, ans=0.125 +2024-07-28 03:25:20,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=102904.0, ans=0.125 +2024-07-28 03:25:20,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=102904.0, ans=0.125 +2024-07-28 03:25:28,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=102917.33333333333, ans=15.0 +2024-07-28 03:25:38,383 INFO [train.py:1114] (0/4) Epoch 8, batch 5650, loss[loss=0.2087, simple_loss=0.3017, pruned_loss=0.0578, over 4479.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2973, pruned_loss=0.06556, over 937273.65 frames. ], batch size: 21, lr: 9.36e-03, grad_scale: 32.0 +2024-07-28 03:25:46,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=102957.33333333333, ans=0.125 +2024-07-28 03:25:52,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=102970.66666666667, ans=0.0 +2024-07-28 03:26:09,445 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.666e+01 6.096e+01 6.693e+01 9.432e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 03:26:11,539 INFO [train.py:1114] (0/4) Epoch 8, batch 5700, loss[loss=0.1853, simple_loss=0.2612, pruned_loss=0.0547, over 4693.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2966, pruned_loss=0.06464, over 938416.58 frames. ], batch size: 13, lr: 9.35e-03, grad_scale: 32.0 +2024-07-28 03:26:14,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=103010.66666666667, ans=0.125 +2024-07-28 03:26:17,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=103010.66666666667, ans=0.125 +2024-07-28 03:26:17,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=103010.66666666667, ans=0.125 +2024-07-28 03:26:18,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.75 vs. limit=6.0 +2024-07-28 03:26:38,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103050.66666666667, ans=0.1 +2024-07-28 03:26:44,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=103064.0, ans=0.0 +2024-07-28 03:26:46,892 INFO [train.py:1114] (0/4) Epoch 8, batch 5750, loss[loss=0.261, simple_loss=0.3356, pruned_loss=0.09318, over 4747.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2979, pruned_loss=0.06525, over 938343.46 frames. ], batch size: 19, lr: 9.35e-03, grad_scale: 32.0 +2024-07-28 03:26:53,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=103090.66666666667, ans=0.025 +2024-07-28 03:26:53,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=103090.66666666667, ans=0.125 +2024-07-28 03:27:09,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=103117.33333333333, ans=0.0 +2024-07-28 03:27:18,490 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.742e+01 5.884e+01 6.600e+01 7.288e+01 1.127e+02, threshold=1.320e+02, percent-clipped=0.0 +2024-07-28 03:27:20,738 INFO [train.py:1114] (0/4) Epoch 8, batch 5800, loss[loss=0.242, simple_loss=0.3268, pruned_loss=0.07855, over 4763.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2973, pruned_loss=0.06486, over 937706.56 frames. ], batch size: 19, lr: 9.35e-03, grad_scale: 32.0 +2024-07-28 03:27:31,043 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.36 vs. limit=15.0 +2024-07-28 03:27:31,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=103144.0, ans=0.125 +2024-07-28 03:27:46,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=103184.0, ans=0.04949747468305833 +2024-07-28 03:27:47,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=103184.0, ans=0.125 +2024-07-28 03:27:50,485 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.11 vs. limit=6.0 +2024-07-28 03:27:51,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103184.0, ans=0.1 +2024-07-28 03:27:52,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103197.33333333333, ans=0.1 +2024-07-28 03:27:59,761 INFO [train.py:1114] (0/4) Epoch 8, batch 5850, loss[loss=0.2302, simple_loss=0.3067, pruned_loss=0.0768, over 4492.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2972, pruned_loss=0.06515, over 937921.87 frames. ], batch size: 21, lr: 9.35e-03, grad_scale: 32.0 +2024-07-28 03:28:11,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=103224.0, ans=0.0 +2024-07-28 03:28:15,318 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.42 vs. limit=6.0 +2024-07-28 03:28:20,660 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.77 vs. limit=22.5 +2024-07-28 03:28:21,188 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.55 vs. limit=15.0 +2024-07-28 03:28:30,672 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 5.776e+01 6.352e+01 7.126e+01 1.312e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 03:28:30,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=103264.0, ans=0.2 +2024-07-28 03:28:38,759 INFO [train.py:1114] (0/4) Epoch 8, batch 5900, loss[loss=0.2089, simple_loss=0.3013, pruned_loss=0.05826, over 4684.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2968, pruned_loss=0.06522, over 938224.27 frames. ], batch size: 15, lr: 9.34e-03, grad_scale: 32.0 +2024-07-28 03:28:46,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=103290.66666666667, ans=0.025 +2024-07-28 03:28:47,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=103290.66666666667, ans=0.2 +2024-07-28 03:28:50,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=103290.66666666667, ans=0.0 +2024-07-28 03:28:50,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=103290.66666666667, ans=0.0 +2024-07-28 03:29:03,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=103317.33333333333, ans=0.125 +2024-07-28 03:29:14,063 INFO [train.py:1114] (0/4) Epoch 8, batch 5950, loss[loss=0.2398, simple_loss=0.313, pruned_loss=0.08329, over 4690.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2966, pruned_loss=0.06466, over 940049.03 frames. ], batch size: 15, lr: 9.34e-03, grad_scale: 32.0 +2024-07-28 03:29:20,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff3.min_abs, batch_count=103357.33333333333, ans=0.2 +2024-07-28 03:29:22,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=103357.33333333333, ans=0.125 +2024-07-28 03:29:29,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=103370.66666666667, ans=0.125 +2024-07-28 03:29:29,498 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.01 vs. limit=6.0 +2024-07-28 03:29:40,949 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.42 vs. limit=15.0 +2024-07-28 03:29:45,387 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.782e+01 5.778e+01 6.625e+01 7.689e+01 1.053e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-28 03:29:47,509 INFO [train.py:1114] (0/4) Epoch 8, batch 6000, loss[loss=0.2472, simple_loss=0.3204, pruned_loss=0.08703, over 4308.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2972, pruned_loss=0.06529, over 937508.86 frames. ], batch size: 25, lr: 9.34e-03, grad_scale: 32.0 +2024-07-28 03:29:47,510 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 03:30:06,861 INFO [train.py:1146] (0/4) Epoch 8, validation: loss=0.1796, simple_loss=0.2837, pruned_loss=0.03775, over 944034.00 frames. +2024-07-28 03:30:06,862 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 03:30:11,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=103410.66666666667, ans=0.125 +2024-07-28 03:30:12,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103424.0, ans=0.1 +2024-07-28 03:30:19,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103437.33333333333, ans=0.1 +2024-07-28 03:30:27,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=103450.66666666667, ans=0.0 +2024-07-28 03:30:28,721 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.60 vs. limit=10.0 +2024-07-28 03:30:28,790 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.34 vs. limit=22.5 +2024-07-28 03:30:36,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=103464.0, ans=0.125 +2024-07-28 03:30:42,205 INFO [train.py:1114] (0/4) Epoch 8, batch 6050, loss[loss=0.2113, simple_loss=0.2798, pruned_loss=0.07146, over 4779.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2952, pruned_loss=0.06462, over 938588.73 frames. ], batch size: 12, lr: 9.33e-03, grad_scale: 32.0 +2024-07-28 03:31:01,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=103517.33333333333, ans=0.09899494936611666 +2024-07-28 03:31:14,073 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.635e+01 6.111e+01 6.957e+01 1.112e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 03:31:16,061 INFO [train.py:1114] (0/4) Epoch 8, batch 6100, loss[loss=0.242, simple_loss=0.3358, pruned_loss=0.07408, over 4677.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2937, pruned_loss=0.06363, over 938290.10 frames. ], batch size: 15, lr: 9.33e-03, grad_scale: 32.0 +2024-07-28 03:31:23,060 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.57 vs. limit=15.0 +2024-07-28 03:31:40,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=103584.0, ans=0.025 +2024-07-28 03:31:54,423 INFO [train.py:1114] (0/4) Epoch 8, batch 6150, loss[loss=0.2714, simple_loss=0.3284, pruned_loss=0.1072, over 3409.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2943, pruned_loss=0.06379, over 937125.02 frames. ], batch size: 35, lr: 9.33e-03, grad_scale: 32.0 +2024-07-28 03:31:56,093 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.01 vs. limit=22.5 +2024-07-28 03:32:00,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=103624.0, ans=0.125 +2024-07-28 03:32:22,064 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.13 vs. limit=15.0 +2024-07-28 03:32:27,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=103664.0, ans=0.025 +2024-07-28 03:32:29,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=103664.0, ans=0.125 +2024-07-28 03:32:30,316 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.158e+01 5.994e+01 6.634e+01 7.988e+01 1.219e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-28 03:32:31,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=103664.0, ans=0.125 +2024-07-28 03:32:32,385 INFO [train.py:1114] (0/4) Epoch 8, batch 6200, loss[loss=0.2138, simple_loss=0.313, pruned_loss=0.05729, over 4739.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2956, pruned_loss=0.06448, over 936429.33 frames. ], batch size: 14, lr: 9.32e-03, grad_scale: 32.0 +2024-07-28 03:32:32,852 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.89 vs. limit=15.0 +2024-07-28 03:32:34,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=103677.33333333333, ans=15.0 +2024-07-28 03:32:37,001 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.43 vs. limit=12.0 +2024-07-28 03:32:42,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=103690.66666666667, ans=0.0 +2024-07-28 03:33:02,384 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.19 vs. limit=15.0 +2024-07-28 03:33:06,920 INFO [train.py:1114] (0/4) Epoch 8, batch 6250, loss[loss=0.2369, simple_loss=0.3198, pruned_loss=0.07695, over 4816.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2952, pruned_loss=0.06469, over 933194.28 frames. ], batch size: 14, lr: 9.32e-03, grad_scale: 32.0 +2024-07-28 03:34:05,281 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.565e+01 5.622e+01 6.181e+01 7.164e+01 1.267e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 03:34:07,335 INFO [train.py:1114] (0/4) Epoch 8, batch 6300, loss[loss=0.1902, simple_loss=0.2624, pruned_loss=0.05904, over 4529.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2962, pruned_loss=0.06545, over 930095.58 frames. ], batch size: 10, lr: 9.32e-03, grad_scale: 32.0 +2024-07-28 03:34:07,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=103810.66666666667, ans=0.035 +2024-07-28 03:34:12,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=103810.66666666667, ans=0.125 +2024-07-28 03:34:13,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=103810.66666666667, ans=0.125 +2024-07-28 03:34:16,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=103824.0, ans=0.2 +2024-07-28 03:34:19,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=103824.0, ans=0.2 +2024-07-28 03:34:23,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=103837.33333333333, ans=0.125 +2024-07-28 03:34:29,661 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:34:30,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=103850.66666666667, ans=0.2 +2024-07-28 03:34:48,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=103864.0, ans=0.125 +2024-07-28 03:34:50,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=103864.0, ans=0.2 +2024-07-28 03:34:51,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=103864.0, ans=10.0 +2024-07-28 03:34:53,140 INFO [train.py:1114] (0/4) Epoch 8, batch 6350, loss[loss=0.2487, simple_loss=0.3268, pruned_loss=0.0853, over 4550.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2957, pruned_loss=0.06484, over 933955.96 frames. ], batch size: 21, lr: 9.32e-03, grad_scale: 32.0 +2024-07-28 03:35:03,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=103890.66666666667, ans=0.125 +2024-07-28 03:35:11,683 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.40 vs. limit=15.0 +2024-07-28 03:35:20,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=103917.33333333333, ans=0.0 +2024-07-28 03:35:25,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=103930.66666666667, ans=0.125 +2024-07-28 03:35:37,033 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 5.697e+01 6.431e+01 7.734e+01 1.122e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 03:35:38,995 INFO [train.py:1114] (0/4) Epoch 8, batch 6400, loss[loss=0.2231, simple_loss=0.3178, pruned_loss=0.06415, over 4634.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2964, pruned_loss=0.06519, over 934745.03 frames. ], batch size: 13, lr: 9.31e-03, grad_scale: 32.0 +2024-07-28 03:35:46,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103957.33333333333, ans=0.1 +2024-07-28 03:35:48,056 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.11 vs. limit=12.0 +2024-07-28 03:36:07,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=103984.0, ans=0.125 +2024-07-28 03:36:19,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=104010.66666666667, ans=0.125 +2024-07-28 03:36:20,354 INFO [train.py:1114] (0/4) Epoch 8, batch 6450, loss[loss=0.2269, simple_loss=0.3075, pruned_loss=0.07321, over 4671.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2968, pruned_loss=0.06503, over 938615.96 frames. ], batch size: 22, lr: 9.31e-03, grad_scale: 32.0 +2024-07-28 03:36:35,407 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.37 vs. limit=22.5 +2024-07-28 03:36:39,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=104050.66666666667, ans=0.125 +2024-07-28 03:36:39,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=104050.66666666667, ans=0.125 +2024-07-28 03:36:46,240 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.49 vs. limit=15.0 +2024-07-28 03:36:48,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=104064.0, ans=10.0 +2024-07-28 03:36:52,542 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.901e+01 6.090e+01 6.968e+01 8.127e+01 1.259e+02, threshold=1.394e+02, percent-clipped=0.0 +2024-07-28 03:36:54,643 INFO [train.py:1114] (0/4) Epoch 8, batch 6500, loss[loss=0.3284, simple_loss=0.3723, pruned_loss=0.1423, over 3769.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2959, pruned_loss=0.06415, over 940188.51 frames. ], batch size: 36, lr: 9.31e-03, grad_scale: 32.0 +2024-07-28 03:36:57,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=104077.33333333333, ans=0.1 +2024-07-28 03:36:57,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104077.33333333333, ans=0.1 +2024-07-28 03:37:01,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=104090.66666666667, ans=0.1 +2024-07-28 03:37:05,857 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.63 vs. limit=15.0 +2024-07-28 03:37:06,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=104090.66666666667, ans=0.2 +2024-07-28 03:37:08,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=104090.66666666667, ans=0.1 +2024-07-28 03:37:10,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=104104.0, ans=0.125 +2024-07-28 03:37:25,651 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.57 vs. limit=15.0 +2024-07-28 03:37:30,146 INFO [train.py:1114] (0/4) Epoch 8, batch 6550, loss[loss=0.1727, simple_loss=0.2497, pruned_loss=0.04787, over 4810.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2963, pruned_loss=0.06386, over 943136.03 frames. ], batch size: 11, lr: 9.30e-03, grad_scale: 32.0 +2024-07-28 03:37:31,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=104144.0, ans=0.0 +2024-07-28 03:37:40,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=104157.33333333333, ans=0.125 +2024-07-28 03:37:48,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=104170.66666666667, ans=0.1 +2024-07-28 03:37:52,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=104184.0, ans=0.1 +2024-07-28 03:37:54,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=104184.0, ans=0.125 +2024-07-28 03:37:58,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=104197.33333333333, ans=0.025 +2024-07-28 03:38:01,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=104197.33333333333, ans=0.125 +2024-07-28 03:38:02,246 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.025e+01 5.695e+01 6.284e+01 7.396e+01 1.281e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 03:38:04,218 INFO [train.py:1114] (0/4) Epoch 8, batch 6600, loss[loss=0.2137, simple_loss=0.2941, pruned_loss=0.06659, over 4931.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2966, pruned_loss=0.06438, over 945047.27 frames. ], batch size: 14, lr: 9.30e-03, grad_scale: 32.0 +2024-07-28 03:38:08,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104210.66666666667, ans=0.1 +2024-07-28 03:38:51,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=104264.0, ans=0.0 +2024-07-28 03:38:53,702 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.36 vs. limit=10.0 +2024-07-28 03:38:55,423 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:38:57,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=104264.0, ans=0.125 +2024-07-28 03:38:58,612 INFO [train.py:1114] (0/4) Epoch 8, batch 6650, loss[loss=0.2435, simple_loss=0.3221, pruned_loss=0.08246, over 4598.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2972, pruned_loss=0.06465, over 943564.69 frames. ], batch size: 17, lr: 9.30e-03, grad_scale: 32.0 +2024-07-28 03:39:00,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=104277.33333333333, ans=0.125 +2024-07-28 03:39:00,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=104277.33333333333, ans=0.025 +2024-07-28 03:39:02,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=104277.33333333333, ans=0.125 +2024-07-28 03:39:07,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=104290.66666666667, ans=0.0 +2024-07-28 03:39:13,737 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.45 vs. limit=15.0 +2024-07-28 03:39:34,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=104330.66666666667, ans=0.125 +2024-07-28 03:39:34,790 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.880e+01 5.792e+01 6.278e+01 6.949e+01 1.059e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 03:39:44,724 INFO [train.py:1114] (0/4) Epoch 8, batch 6700, loss[loss=0.2076, simple_loss=0.2857, pruned_loss=0.06479, over 4710.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2971, pruned_loss=0.06479, over 942595.69 frames. ], batch size: 19, lr: 9.29e-03, grad_scale: 32.0 +2024-07-28 03:39:48,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=104344.0, ans=0.125 +2024-07-28 03:40:01,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=104357.33333333333, ans=0.0 +2024-07-28 03:40:05,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104370.66666666667, ans=0.1 +2024-07-28 03:40:22,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=104397.33333333333, ans=0.125 +2024-07-28 03:40:24,513 INFO [train.py:1114] (0/4) Epoch 8, batch 6750, loss[loss=0.2122, simple_loss=0.2896, pruned_loss=0.06737, over 4281.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2975, pruned_loss=0.06489, over 940652.85 frames. ], batch size: 25, lr: 9.29e-03, grad_scale: 32.0 +2024-07-28 03:40:37,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=104410.66666666667, ans=0.125 +2024-07-28 03:40:43,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104424.0, ans=0.1 +2024-07-28 03:40:43,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104424.0, ans=0.1 +2024-07-28 03:40:55,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=104450.66666666667, ans=0.0 +2024-07-28 03:41:04,953 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.898e+01 5.597e+01 6.085e+01 6.894e+01 1.207e+02, threshold=1.217e+02, percent-clipped=0.0 +2024-07-28 03:41:07,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104464.0, ans=0.1 +2024-07-28 03:41:10,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=104477.33333333333, ans=0.125 +2024-07-28 03:41:14,445 INFO [train.py:1114] (0/4) Epoch 8, batch 6800, loss[loss=0.2178, simple_loss=0.3037, pruned_loss=0.06601, over 4637.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2975, pruned_loss=0.0646, over 938865.57 frames. ], batch size: 13, lr: 9.29e-03, grad_scale: 32.0 +2024-07-28 03:41:22,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=104490.66666666667, ans=0.2 +2024-07-28 03:41:23,983 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.47 vs. limit=15.0 +2024-07-28 03:41:25,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=104490.66666666667, ans=0.0 +2024-07-28 03:41:29,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=104504.0, ans=0.2 +2024-07-28 03:41:35,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=104517.33333333333, ans=0.125 +2024-07-28 03:41:44,865 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.05 vs. limit=15.0 +2024-07-28 03:41:50,267 INFO [train.py:1114] (0/4) Epoch 8, batch 6850, loss[loss=0.2051, simple_loss=0.2975, pruned_loss=0.05633, over 4695.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2973, pruned_loss=0.06485, over 940551.98 frames. ], batch size: 13, lr: 9.29e-03, grad_scale: 32.0 +2024-07-28 03:41:57,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=104557.33333333333, ans=0.125 +2024-07-28 03:41:59,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=104557.33333333333, ans=0.2 +2024-07-28 03:42:14,762 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.66 vs. limit=22.5 +2024-07-28 03:42:14,774 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.88 vs. limit=15.0 +2024-07-28 03:42:21,581 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.580e+01 6.042e+01 6.902e+01 8.247e+01 1.133e+02, threshold=1.380e+02, percent-clipped=0.0 +2024-07-28 03:42:22,923 INFO [train.py:1114] (0/4) Epoch 8, batch 6900, loss[loss=0.209, simple_loss=0.283, pruned_loss=0.06751, over 4965.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.297, pruned_loss=0.06459, over 942587.95 frames. ], batch size: 13, lr: 9.28e-03, grad_scale: 16.0 +2024-07-28 03:42:25,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=104610.66666666667, ans=0.125 +2024-07-28 03:42:25,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=104610.66666666667, ans=0.125 +2024-07-28 03:42:36,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=104637.33333333333, ans=0.0 +2024-07-28 03:42:54,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=104664.0, ans=0.2 +2024-07-28 03:42:55,938 INFO [train.py:1114] (0/4) Epoch 8, batch 6950, loss[loss=0.1988, simple_loss=0.2727, pruned_loss=0.06251, over 4552.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2974, pruned_loss=0.06494, over 940044.26 frames. ], batch size: 10, lr: 9.28e-03, grad_scale: 16.0 +2024-07-28 03:42:56,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=104677.33333333333, ans=0.0 +2024-07-28 03:43:10,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=104704.0, ans=0.2 +2024-07-28 03:43:10,430 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.80 vs. limit=22.5 +2024-07-28 03:43:23,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=104730.66666666667, ans=0.1 +2024-07-28 03:43:28,344 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.937e+01 5.764e+01 6.206e+01 6.980e+01 1.236e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 03:43:29,736 INFO [train.py:1114] (0/4) Epoch 8, batch 7000, loss[loss=0.2449, simple_loss=0.318, pruned_loss=0.08583, over 4596.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2969, pruned_loss=0.06492, over 938304.83 frames. ], batch size: 17, lr: 9.28e-03, grad_scale: 16.0 +2024-07-28 03:43:34,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=104744.0, ans=0.0 +2024-07-28 03:43:37,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=104757.33333333333, ans=0.09899494936611666 +2024-07-28 03:43:50,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=104784.0, ans=0.125 +2024-07-28 03:43:52,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=104784.0, ans=0.0 +2024-07-28 03:44:04,991 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.27 vs. limit=15.0 +2024-07-28 03:44:05,716 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.15 vs. limit=15.0 +2024-07-28 03:44:07,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=104797.33333333333, ans=0.125 +2024-07-28 03:44:09,222 INFO [train.py:1114] (0/4) Epoch 8, batch 7050, loss[loss=0.2069, simple_loss=0.3005, pruned_loss=0.05666, over 4752.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2978, pruned_loss=0.065, over 941531.60 frames. ], batch size: 19, lr: 9.27e-03, grad_scale: 16.0 +2024-07-28 03:44:11,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=104810.66666666667, ans=0.125 +2024-07-28 03:44:12,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=104810.66666666667, ans=0.025 +2024-07-28 03:44:14,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=104810.66666666667, ans=0.2 +2024-07-28 03:44:41,369 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.865e+01 5.673e+01 6.225e+01 7.440e+01 1.294e+02, threshold=1.245e+02, percent-clipped=1.0 +2024-07-28 03:44:42,666 INFO [train.py:1114] (0/4) Epoch 8, batch 7100, loss[loss=0.2575, simple_loss=0.3205, pruned_loss=0.09732, over 4806.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2989, pruned_loss=0.06616, over 936590.92 frames. ], batch size: 15, lr: 9.27e-03, grad_scale: 16.0 +2024-07-28 03:44:50,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=104890.66666666667, ans=0.125 +2024-07-28 03:45:08,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=104930.66666666667, ans=0.025 +2024-07-28 03:45:08,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=104930.66666666667, ans=0.125 +2024-07-28 03:45:15,088 INFO [train.py:1114] (0/4) Epoch 8, batch 7150, loss[loss=0.2421, simple_loss=0.3065, pruned_loss=0.08887, over 4551.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2958, pruned_loss=0.06465, over 937329.10 frames. ], batch size: 21, lr: 9.27e-03, grad_scale: 16.0 +2024-07-28 03:45:20,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=104944.0, ans=0.125 +2024-07-28 03:45:48,076 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 5.987e+01 7.110e+01 8.384e+01 1.191e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-28 03:45:49,424 INFO [train.py:1114] (0/4) Epoch 8, batch 7200, loss[loss=0.2444, simple_loss=0.3205, pruned_loss=0.08413, over 4807.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2969, pruned_loss=0.06504, over 937708.08 frames. ], batch size: 15, lr: 9.27e-03, grad_scale: 32.0 +2024-07-28 03:45:50,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=105010.66666666667, ans=0.125 +2024-07-28 03:45:55,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=105024.0, ans=0.0 +2024-07-28 03:45:57,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=105024.0, ans=0.1 +2024-07-28 03:46:05,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=105037.33333333333, ans=0.125 +2024-07-28 03:46:07,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=105037.33333333333, ans=0.0 +2024-07-28 03:46:11,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=105050.66666666667, ans=0.2 +2024-07-28 03:46:21,906 INFO [train.py:1114] (0/4) Epoch 8, batch 7250, loss[loss=0.1699, simple_loss=0.2493, pruned_loss=0.04519, over 4864.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2964, pruned_loss=0.06465, over 939117.25 frames. ], batch size: 12, lr: 9.26e-03, grad_scale: 32.0 +2024-07-28 03:46:27,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=105077.33333333333, ans=0.0 +2024-07-28 03:46:29,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=105090.66666666667, ans=0.125 +2024-07-28 03:46:38,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=105104.0, ans=0.09899494936611666 +2024-07-28 03:46:41,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=105117.33333333333, ans=0.125 +2024-07-28 03:46:47,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=105130.66666666667, ans=0.125 +2024-07-28 03:46:48,276 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.86 vs. limit=6.0 +2024-07-28 03:46:53,137 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.994e+01 6.010e+01 6.491e+01 7.289e+01 9.989e+01, threshold=1.298e+02, percent-clipped=0.0 +2024-07-28 03:46:53,538 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.91 vs. limit=12.0 +2024-07-28 03:46:54,423 INFO [train.py:1114] (0/4) Epoch 8, batch 7300, loss[loss=0.1971, simple_loss=0.2776, pruned_loss=0.05834, over 4862.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2958, pruned_loss=0.06452, over 939766.74 frames. ], batch size: 12, lr: 9.26e-03, grad_scale: 32.0 +2024-07-28 03:47:08,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=105170.66666666667, ans=15.0 +2024-07-28 03:47:15,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=105184.0, ans=0.125 +2024-07-28 03:47:15,179 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:47:22,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=105197.33333333333, ans=0.125 +2024-07-28 03:47:27,054 INFO [train.py:1114] (0/4) Epoch 8, batch 7350, loss[loss=0.2213, simple_loss=0.2983, pruned_loss=0.07218, over 4644.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2947, pruned_loss=0.06334, over 939163.36 frames. ], batch size: 12, lr: 9.26e-03, grad_scale: 32.0 +2024-07-28 03:47:36,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=105224.0, ans=0.125 +2024-07-28 03:47:41,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105237.33333333333, ans=0.1 +2024-07-28 03:47:42,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=105237.33333333333, ans=0.125 +2024-07-28 03:47:44,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=105237.33333333333, ans=0.125 +2024-07-28 03:47:45,187 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.56 vs. limit=22.5 +2024-07-28 03:47:50,182 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.38 vs. limit=15.0 +2024-07-28 03:47:57,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105264.0, ans=0.1 +2024-07-28 03:47:58,204 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.496e+01 5.796e+01 6.371e+01 7.883e+01 1.311e+02, threshold=1.274e+02, percent-clipped=1.0 +2024-07-28 03:47:59,458 INFO [train.py:1114] (0/4) Epoch 8, batch 7400, loss[loss=0.2317, simple_loss=0.3056, pruned_loss=0.07892, over 4691.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2954, pruned_loss=0.06346, over 940302.26 frames. ], batch size: 13, lr: 9.25e-03, grad_scale: 32.0 +2024-07-28 03:48:11,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=105290.66666666667, ans=0.125 +2024-07-28 03:48:13,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=105304.0, ans=0.125 +2024-07-28 03:48:17,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=105304.0, ans=0.125 +2024-07-28 03:48:21,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=105317.33333333333, ans=0.2 +2024-07-28 03:48:32,451 INFO [train.py:1114] (0/4) Epoch 8, batch 7450, loss[loss=0.2178, simple_loss=0.3035, pruned_loss=0.06603, over 4615.00 frames. ], tot_loss[loss=0.21, simple_loss=0.294, pruned_loss=0.06298, over 937863.82 frames. ], batch size: 11, lr: 9.25e-03, grad_scale: 32.0 +2024-07-28 03:48:35,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=105344.0, ans=0.0 +2024-07-28 03:48:39,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=105357.33333333333, ans=0.0 +2024-07-28 03:48:47,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=105370.66666666667, ans=0.125 +2024-07-28 03:48:48,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=105370.66666666667, ans=0.0 +2024-07-28 03:48:54,247 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.87 vs. limit=15.0 +2024-07-28 03:48:55,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=105384.0, ans=0.09899494936611666 +2024-07-28 03:48:57,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=105397.33333333333, ans=0.125 +2024-07-28 03:49:03,908 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.015e+01 5.959e+01 6.584e+01 7.550e+01 1.203e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-28 03:49:05,227 INFO [train.py:1114] (0/4) Epoch 8, batch 7500, loss[loss=0.3033, simple_loss=0.3584, pruned_loss=0.1241, over 3002.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2951, pruned_loss=0.0637, over 936352.21 frames. ], batch size: 36, lr: 9.25e-03, grad_scale: 32.0 +2024-07-28 03:49:14,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=105424.0, ans=0.0 +2024-07-28 03:49:15,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=105424.0, ans=0.125 +2024-07-28 03:49:19,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=105437.33333333333, ans=0.0 +2024-07-28 03:49:36,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=105464.0, ans=0.125 +2024-07-28 03:49:39,661 INFO [train.py:1114] (0/4) Epoch 8, batch 7550, loss[loss=0.2324, simple_loss=0.3125, pruned_loss=0.07618, over 4599.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2969, pruned_loss=0.06512, over 936557.91 frames. ], batch size: 17, lr: 9.25e-03, grad_scale: 32.0 +2024-07-28 03:49:42,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=105477.33333333333, ans=0.2 +2024-07-28 03:49:46,389 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.66 vs. limit=15.0 +2024-07-28 03:49:50,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=105490.66666666667, ans=0.0 +2024-07-28 03:50:10,694 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.921e+01 5.717e+01 6.338e+01 7.144e+01 8.798e+01, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 03:50:12,727 INFO [train.py:1114] (0/4) Epoch 8, batch 7600, loss[loss=0.1839, simple_loss=0.28, pruned_loss=0.04385, over 4803.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2968, pruned_loss=0.0648, over 938359.42 frames. ], batch size: 14, lr: 9.24e-03, grad_scale: 32.0 +2024-07-28 03:50:17,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=105544.0, ans=0.125 +2024-07-28 03:50:18,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=105557.33333333333, ans=0.025 +2024-07-28 03:50:21,547 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.90 vs. limit=22.5 +2024-07-28 03:50:35,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=105584.0, ans=0.2 +2024-07-28 03:50:44,285 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.44 vs. limit=6.0 +2024-07-28 03:50:45,951 INFO [train.py:1114] (0/4) Epoch 8, batch 7650, loss[loss=0.167, simple_loss=0.2498, pruned_loss=0.04204, over 4944.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2962, pruned_loss=0.06464, over 937415.80 frames. ], batch size: 12, lr: 9.24e-03, grad_scale: 32.0 +2024-07-28 03:50:50,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=105610.66666666667, ans=0.5 +2024-07-28 03:51:00,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=105637.33333333333, ans=0.125 +2024-07-28 03:51:08,243 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.69 vs. limit=15.0 +2024-07-28 03:51:09,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=105650.66666666667, ans=0.0 +2024-07-28 03:51:10,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=105650.66666666667, ans=0.2 +2024-07-28 03:51:17,764 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.951e+01 6.499e+01 7.761e+01 1.442e+02, threshold=1.300e+02, percent-clipped=1.0 +2024-07-28 03:51:19,096 INFO [train.py:1114] (0/4) Epoch 8, batch 7700, loss[loss=0.2245, simple_loss=0.3169, pruned_loss=0.06603, over 4693.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2964, pruned_loss=0.06421, over 934592.71 frames. ], batch size: 13, lr: 9.24e-03, grad_scale: 32.0 +2024-07-28 03:51:42,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=105717.33333333333, ans=0.125 +2024-07-28 03:51:47,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff2.min_abs, batch_count=105730.66666666667, ans=0.1 +2024-07-28 03:51:51,755 INFO [train.py:1114] (0/4) Epoch 8, batch 7750, loss[loss=0.1989, simple_loss=0.2885, pruned_loss=0.05462, over 4935.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2981, pruned_loss=0.06489, over 935718.10 frames. ], batch size: 14, lr: 9.23e-03, grad_scale: 32.0 +2024-07-28 03:51:51,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=105744.0, ans=0.09899494936611666 +2024-07-28 03:51:55,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=105744.0, ans=0.2 +2024-07-28 03:52:00,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=105757.33333333333, ans=0.2 +2024-07-28 03:52:06,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=105770.66666666667, ans=0.125 +2024-07-28 03:52:10,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=105770.66666666667, ans=0.0 +2024-07-28 03:52:12,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=105784.0, ans=0.125 +2024-07-28 03:52:23,023 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.861e+01 6.500e+01 7.436e+01 9.708e+01, threshold=1.300e+02, percent-clipped=0.0 +2024-07-28 03:52:24,794 INFO [train.py:1114] (0/4) Epoch 8, batch 7800, loss[loss=0.2146, simple_loss=0.3167, pruned_loss=0.05623, over 4659.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2979, pruned_loss=0.06455, over 937770.66 frames. ], batch size: 14, lr: 9.23e-03, grad_scale: 32.0 +2024-07-28 03:52:25,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=105810.66666666667, ans=0.0 +2024-07-28 03:52:26,541 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.43 vs. limit=22.5 +2024-07-28 03:52:31,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=105824.0, ans=0.1 +2024-07-28 03:52:33,438 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.27 vs. limit=22.5 +2024-07-28 03:52:33,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=105824.0, ans=0.0 +2024-07-28 03:52:51,712 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.89 vs. limit=15.0 +2024-07-28 03:52:53,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105864.0, ans=0.1 +2024-07-28 03:52:55,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=105864.0, ans=0.1 +2024-07-28 03:52:59,043 INFO [train.py:1114] (0/4) Epoch 8, batch 7850, loss[loss=0.1923, simple_loss=0.2653, pruned_loss=0.05963, over 4503.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2987, pruned_loss=0.06531, over 936587.58 frames. ], batch size: 10, lr: 9.23e-03, grad_scale: 32.0 +2024-07-28 03:53:15,711 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.71 vs. limit=15.0 +2024-07-28 03:53:17,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=105904.0, ans=0.125 +2024-07-28 03:53:21,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=105917.33333333333, ans=0.125 +2024-07-28 03:53:21,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=105917.33333333333, ans=0.025 +2024-07-28 03:53:22,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=105917.33333333333, ans=0.0 +2024-07-28 03:53:28,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=105930.66666666667, ans=0.125 +2024-07-28 03:53:30,109 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.073e+01 5.939e+01 6.689e+01 8.282e+01 1.225e+02, threshold=1.338e+02, percent-clipped=0.0 +2024-07-28 03:53:30,514 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.02 vs. limit=15.0 +2024-07-28 03:53:31,392 INFO [train.py:1114] (0/4) Epoch 8, batch 7900, loss[loss=0.2321, simple_loss=0.3245, pruned_loss=0.06984, over 4867.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2995, pruned_loss=0.06565, over 933880.74 frames. ], batch size: 14, lr: 9.22e-03, grad_scale: 32.0 +2024-07-28 03:53:38,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=105957.33333333333, ans=0.0 +2024-07-28 03:53:48,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=105970.66666666667, ans=0.125 +2024-07-28 03:53:55,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=105984.0, ans=0.2 +2024-07-28 03:54:00,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=105997.33333333333, ans=0.0 +2024-07-28 03:54:04,734 INFO [train.py:1114] (0/4) Epoch 8, batch 7950, loss[loss=0.2713, simple_loss=0.3356, pruned_loss=0.1035, over 3292.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2993, pruned_loss=0.06541, over 935972.30 frames. ], batch size: 35, lr: 9.22e-03, grad_scale: 16.0 +2024-07-28 03:54:30,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=106050.66666666667, ans=0.125 +2024-07-28 03:54:39,427 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.635e+01 5.772e+01 6.445e+01 7.166e+01 9.685e+01, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 03:54:39,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106077.33333333333, ans=0.0 +2024-07-28 03:54:40,311 INFO [train.py:1114] (0/4) Epoch 8, batch 8000, loss[loss=0.1743, simple_loss=0.2566, pruned_loss=0.046, over 4618.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2966, pruned_loss=0.06475, over 935151.28 frames. ], batch size: 11, lr: 9.22e-03, grad_scale: 32.0 +2024-07-28 03:54:53,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=106104.0, ans=0.125 +2024-07-28 03:54:56,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=106104.0, ans=0.0 +2024-07-28 03:54:57,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=106104.0, ans=10.0 +2024-07-28 03:55:10,075 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.19 vs. limit=12.0 +2024-07-28 03:55:14,569 INFO [train.py:1114] (0/4) Epoch 8, batch 8050, loss[loss=0.2087, simple_loss=0.3046, pruned_loss=0.05638, over 4816.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2953, pruned_loss=0.06401, over 934655.21 frames. ], batch size: 14, lr: 9.22e-03, grad_scale: 32.0 +2024-07-28 03:55:15,062 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.75 vs. limit=6.0 +2024-07-28 03:55:17,027 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.82 vs. limit=15.0 +2024-07-28 03:55:18,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=106144.0, ans=0.1 +2024-07-28 03:55:32,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=106170.66666666667, ans=0.125 +2024-07-28 03:55:36,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=106184.0, ans=0.0 +2024-07-28 03:55:43,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=106197.33333333333, ans=0.0 +2024-07-28 03:55:47,754 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.934e+01 5.620e+01 6.153e+01 6.973e+01 1.002e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 03:55:48,433 INFO [train.py:1114] (0/4) Epoch 8, batch 8100, loss[loss=0.2277, simple_loss=0.3201, pruned_loss=0.06768, over 4809.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2957, pruned_loss=0.06354, over 934172.39 frames. ], batch size: 15, lr: 9.21e-03, grad_scale: 32.0 +2024-07-28 03:55:57,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=106224.0, ans=0.125 +2024-07-28 03:56:02,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=106237.33333333333, ans=0.125 +2024-07-28 03:56:09,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=106250.66666666667, ans=0.125 +2024-07-28 03:56:12,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=106250.66666666667, ans=0.0 +2024-07-28 03:56:13,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=106264.0, ans=0.125 +2024-07-28 03:56:17,529 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.09 vs. limit=10.0 +2024-07-28 03:56:20,979 INFO [train.py:1114] (0/4) Epoch 8, batch 8150, loss[loss=0.225, simple_loss=0.3135, pruned_loss=0.06828, over 4798.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2947, pruned_loss=0.06328, over 937604.64 frames. ], batch size: 15, lr: 9.21e-03, grad_scale: 32.0 +2024-07-28 03:56:23,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=106277.33333333333, ans=0.125 +2024-07-28 03:56:25,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=106277.33333333333, ans=0.125 +2024-07-28 03:56:26,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff3.min_abs, batch_count=106277.33333333333, ans=0.2 +2024-07-28 03:56:26,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=106277.33333333333, ans=0.1 +2024-07-28 03:56:33,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=106304.0, ans=0.0 +2024-07-28 03:56:34,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106304.0, ans=0.125 +2024-07-28 03:56:35,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=106304.0, ans=0.0 +2024-07-28 03:56:42,513 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:56:46,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106330.66666666667, ans=0.1 +2024-07-28 03:56:52,815 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.056e+01 5.810e+01 6.420e+01 7.411e+01 1.127e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 03:56:53,463 INFO [train.py:1114] (0/4) Epoch 8, batch 8200, loss[loss=0.2165, simple_loss=0.3103, pruned_loss=0.06135, over 4798.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2949, pruned_loss=0.06302, over 938314.70 frames. ], batch size: 15, lr: 9.21e-03, grad_scale: 32.0 +2024-07-28 03:56:59,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=106357.33333333333, ans=0.025 +2024-07-28 03:57:04,126 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.56 vs. limit=22.5 +2024-07-28 03:57:14,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=106384.0, ans=0.0 +2024-07-28 03:57:19,837 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.81 vs. limit=15.0 +2024-07-28 03:57:26,358 INFO [train.py:1114] (0/4) Epoch 8, batch 8250, loss[loss=0.2192, simple_loss=0.3055, pruned_loss=0.06646, over 4889.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2961, pruned_loss=0.06398, over 938795.23 frames. ], batch size: 13, lr: 9.20e-03, grad_scale: 32.0 +2024-07-28 03:57:30,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=106410.66666666667, ans=0.0 +2024-07-28 03:57:30,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=106410.66666666667, ans=0.125 +2024-07-28 03:57:34,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=106424.0, ans=0.0 +2024-07-28 03:57:42,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=106437.33333333333, ans=0.125 +2024-07-28 03:57:56,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=106464.0, ans=0.125 +2024-07-28 03:57:57,943 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.754e+01 5.787e+01 6.260e+01 6.993e+01 1.105e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 03:57:58,642 INFO [train.py:1114] (0/4) Epoch 8, batch 8300, loss[loss=0.2095, simple_loss=0.3046, pruned_loss=0.05716, over 4898.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2973, pruned_loss=0.06421, over 938715.87 frames. ], batch size: 15, lr: 9.20e-03, grad_scale: 32.0 +2024-07-28 03:57:58,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=106477.33333333333, ans=0.125 +2024-07-28 03:57:59,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=106477.33333333333, ans=0.125 +2024-07-28 03:58:11,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=106490.66666666667, ans=0.2 +2024-07-28 03:58:26,459 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.72 vs. limit=10.0 +2024-07-28 03:58:30,202 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.49 vs. limit=6.0 +2024-07-28 03:58:34,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=106517.33333333333, ans=0.125 +2024-07-28 03:58:41,114 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.92 vs. limit=15.0 +2024-07-28 03:58:45,428 INFO [train.py:1114] (0/4) Epoch 8, batch 8350, loss[loss=0.2418, simple_loss=0.3406, pruned_loss=0.07149, over 4792.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2972, pruned_loss=0.06408, over 941462.12 frames. ], batch size: 15, lr: 9.20e-03, grad_scale: 32.0 +2024-07-28 03:58:55,008 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=15.0 +2024-07-28 03:59:13,756 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.34 vs. limit=10.0 +2024-07-28 03:59:17,900 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.872e+01 6.040e+01 6.810e+01 8.092e+01 1.142e+02, threshold=1.362e+02, percent-clipped=0.0 +2024-07-28 03:59:18,631 INFO [train.py:1114] (0/4) Epoch 8, batch 8400, loss[loss=0.2001, simple_loss=0.2855, pruned_loss=0.05737, over 4777.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2982, pruned_loss=0.06493, over 940334.66 frames. ], batch size: 12, lr: 9.20e-03, grad_scale: 32.0 +2024-07-28 03:59:21,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106610.66666666667, ans=0.1 +2024-07-28 03:59:21,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=106610.66666666667, ans=0.125 +2024-07-28 03:59:22,080 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.92 vs. limit=15.0 +2024-07-28 03:59:28,804 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.36 vs. limit=15.0 +2024-07-28 03:59:43,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=106650.66666666667, ans=0.025 +2024-07-28 03:59:45,160 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-80000.pt +2024-07-28 03:59:52,390 INFO [train.py:1114] (0/4) Epoch 8, batch 8450, loss[loss=0.2065, simple_loss=0.291, pruned_loss=0.06105, over 4806.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.299, pruned_loss=0.06512, over 939356.39 frames. ], batch size: 15, lr: 9.19e-03, grad_scale: 32.0 +2024-07-28 03:59:53,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=106677.33333333333, ans=0.025 +2024-07-28 03:59:59,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=106690.66666666667, ans=0.125 +2024-07-28 04:00:00,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=106690.66666666667, ans=0.0 +2024-07-28 04:00:11,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=106717.33333333333, ans=0.0 +2024-07-28 04:00:13,176 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.42 vs. limit=15.0 +2024-07-28 04:00:25,072 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.985e+01 6.391e+01 7.364e+01 1.076e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-28 04:00:25,733 INFO [train.py:1114] (0/4) Epoch 8, batch 8500, loss[loss=0.2379, simple_loss=0.2965, pruned_loss=0.0896, over 4608.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2973, pruned_loss=0.06495, over 939023.15 frames. ], batch size: 11, lr: 9.19e-03, grad_scale: 32.0 +2024-07-28 04:00:29,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=106744.0, ans=0.125 +2024-07-28 04:00:32,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=106757.33333333333, ans=0.07 +2024-07-28 04:00:34,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=106757.33333333333, ans=0.125 +2024-07-28 04:00:36,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=106757.33333333333, ans=0.0 +2024-07-28 04:00:48,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106784.0, ans=0.125 +2024-07-28 04:00:52,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=106797.33333333333, ans=0.0 +2024-07-28 04:00:58,736 INFO [train.py:1114] (0/4) Epoch 8, batch 8550, loss[loss=0.1808, simple_loss=0.2473, pruned_loss=0.05709, over 4808.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2958, pruned_loss=0.06404, over 939933.42 frames. ], batch size: 11, lr: 9.19e-03, grad_scale: 32.0 +2024-07-28 04:01:00,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=106810.66666666667, ans=0.0 +2024-07-28 04:01:07,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=106824.0, ans=0.125 +2024-07-28 04:01:14,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=106837.33333333333, ans=0.0 +2024-07-28 04:01:31,412 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.793e+01 5.840e+01 6.821e+01 7.770e+01 1.284e+02, threshold=1.364e+02, percent-clipped=1.0 +2024-07-28 04:01:31,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=106877.33333333333, ans=0.125 +2024-07-28 04:01:32,070 INFO [train.py:1114] (0/4) Epoch 8, batch 8600, loss[loss=0.2332, simple_loss=0.3105, pruned_loss=0.07794, over 4793.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2961, pruned_loss=0.06435, over 939683.71 frames. ], batch size: 15, lr: 9.18e-03, grad_scale: 32.0 +2024-07-28 04:01:56,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=106917.33333333333, ans=0.125 +2024-07-28 04:01:58,402 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.07 vs. limit=15.0 +2024-07-28 04:01:59,402 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.50 vs. limit=15.0 +2024-07-28 04:02:04,643 INFO [train.py:1114] (0/4) Epoch 8, batch 8650, loss[loss=0.2125, simple_loss=0.3057, pruned_loss=0.05962, over 4913.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2957, pruned_loss=0.06398, over 940792.99 frames. ], batch size: 15, lr: 9.18e-03, grad_scale: 32.0 +2024-07-28 04:02:15,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=106957.33333333333, ans=0.125 +2024-07-28 04:02:28,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=106984.0, ans=0.2 +2024-07-28 04:02:33,755 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.17 vs. limit=15.0 +2024-07-28 04:02:34,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106997.33333333333, ans=0.1 +2024-07-28 04:02:37,626 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.946e+01 6.079e+01 7.020e+01 8.285e+01 1.215e+02, threshold=1.404e+02, percent-clipped=0.0 +2024-07-28 04:02:38,305 INFO [train.py:1114] (0/4) Epoch 8, batch 8700, loss[loss=0.2, simple_loss=0.2843, pruned_loss=0.05782, over 4764.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2971, pruned_loss=0.06463, over 938341.79 frames. ], batch size: 13, lr: 9.18e-03, grad_scale: 32.0 +2024-07-28 04:02:41,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=107010.66666666667, ans=0.125 +2024-07-28 04:02:55,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=107037.33333333333, ans=0.125 +2024-07-28 04:03:04,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=107050.66666666667, ans=0.0 +2024-07-28 04:03:06,514 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.41 vs. limit=10.0 +2024-07-28 04:03:09,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=107064.0, ans=0.125 +2024-07-28 04:03:13,389 INFO [train.py:1114] (0/4) Epoch 8, batch 8750, loss[loss=0.2419, simple_loss=0.3184, pruned_loss=0.08267, over 4672.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2972, pruned_loss=0.06451, over 936519.20 frames. ], batch size: 15, lr: 9.18e-03, grad_scale: 32.0 +2024-07-28 04:03:16,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=107077.33333333333, ans=0.07 +2024-07-28 04:03:29,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=107104.0, ans=0.0 +2024-07-28 04:03:30,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=107104.0, ans=0.0 +2024-07-28 04:03:36,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=107117.33333333333, ans=0.04949747468305833 +2024-07-28 04:03:45,027 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:03:46,703 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 6.010e+01 6.887e+01 8.098e+01 1.294e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-28 04:03:47,333 INFO [train.py:1114] (0/4) Epoch 8, batch 8800, loss[loss=0.2056, simple_loss=0.2859, pruned_loss=0.0627, over 4934.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2974, pruned_loss=0.06429, over 937390.65 frames. ], batch size: 14, lr: 9.17e-03, grad_scale: 32.0 +2024-07-28 04:03:48,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=107144.0, ans=0.125 +2024-07-28 04:03:55,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=107157.33333333333, ans=0.125 +2024-07-28 04:03:57,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=107157.33333333333, ans=0.125 +2024-07-28 04:04:08,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107184.0, ans=0.1 +2024-07-28 04:04:10,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=107184.0, ans=0.2 +2024-07-28 04:04:20,454 INFO [train.py:1114] (0/4) Epoch 8, batch 8850, loss[loss=0.2345, simple_loss=0.3213, pruned_loss=0.07385, over 4413.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2968, pruned_loss=0.06439, over 931683.49 frames. ], batch size: 21, lr: 9.17e-03, grad_scale: 32.0 +2024-07-28 04:04:21,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=107210.66666666667, ans=0.0 +2024-07-28 04:04:22,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=107210.66666666667, ans=0.0 +2024-07-28 04:04:41,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107250.66666666667, ans=0.1 +2024-07-28 04:04:46,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107250.66666666667, ans=0.1 +2024-07-28 04:04:53,235 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 5.819e+01 6.564e+01 7.832e+01 1.170e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-28 04:04:53,916 INFO [train.py:1114] (0/4) Epoch 8, batch 8900, loss[loss=0.1735, simple_loss=0.2626, pruned_loss=0.04214, over 4947.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2979, pruned_loss=0.06436, over 930014.69 frames. ], batch size: 12, lr: 9.17e-03, grad_scale: 32.0 +2024-07-28 04:04:59,224 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.86 vs. limit=15.0 +2024-07-28 04:05:03,719 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:05:25,874 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.56 vs. limit=6.0 +2024-07-28 04:05:26,156 INFO [train.py:1114] (0/4) Epoch 8, batch 8950, loss[loss=0.2726, simple_loss=0.3365, pruned_loss=0.1044, over 4507.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2984, pruned_loss=0.06465, over 930840.60 frames. ], batch size: 21, lr: 9.17e-03, grad_scale: 32.0 +2024-07-28 04:05:29,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=107344.0, ans=0.0 +2024-07-28 04:05:32,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=107357.33333333333, ans=0.125 +2024-07-28 04:05:35,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=107357.33333333333, ans=0.125 +2024-07-28 04:05:47,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=107384.0, ans=15.0 +2024-07-28 04:05:48,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=107384.0, ans=0.125 +2024-07-28 04:05:50,930 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.67 vs. limit=12.0 +2024-07-28 04:05:52,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=107384.0, ans=0.125 +2024-07-28 04:05:54,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=107397.33333333333, ans=0.2 +2024-07-28 04:05:59,998 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.334e+01 5.823e+01 6.551e+01 7.649e+01 1.257e+02, threshold=1.310e+02, percent-clipped=0.0 +2024-07-28 04:06:00,699 INFO [train.py:1114] (0/4) Epoch 8, batch 9000, loss[loss=0.1863, simple_loss=0.2757, pruned_loss=0.04847, over 4641.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2968, pruned_loss=0.064, over 933665.84 frames. ], batch size: 12, lr: 9.16e-03, grad_scale: 32.0 +2024-07-28 04:06:00,700 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 04:06:05,029 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.3556, 4.1441, 3.6685, 3.8589], device='cuda:0') +2024-07-28 04:06:12,615 INFO [train.py:1146] (0/4) Epoch 8, validation: loss=0.1781, simple_loss=0.2826, pruned_loss=0.03685, over 944034.00 frames. +2024-07-28 04:06:12,615 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 04:06:25,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=107437.33333333333, ans=0.0 +2024-07-28 04:06:33,614 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.43 vs. limit=6.0 +2024-07-28 04:06:39,955 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.89 vs. limit=6.0 +2024-07-28 04:06:43,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=107464.0, ans=0.0 +2024-07-28 04:06:45,398 INFO [train.py:1114] (0/4) Epoch 8, batch 9050, loss[loss=0.18, simple_loss=0.2596, pruned_loss=0.05018, over 4489.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2961, pruned_loss=0.0639, over 933841.21 frames. ], batch size: 10, lr: 9.16e-03, grad_scale: 32.0 +2024-07-28 04:06:47,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107477.33333333333, ans=0.1 +2024-07-28 04:06:49,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=107477.33333333333, ans=0.0 +2024-07-28 04:06:49,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=107477.33333333333, ans=0.125 +2024-07-28 04:06:54,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=107490.66666666667, ans=10.0 +2024-07-28 04:06:56,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=107490.66666666667, ans=0.0 +2024-07-28 04:07:09,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=107517.33333333333, ans=0.125 +2024-07-28 04:07:11,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=107530.66666666667, ans=0.2 +2024-07-28 04:07:11,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=107530.66666666667, ans=0.125 +2024-07-28 04:07:16,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=107530.66666666667, ans=0.0 +2024-07-28 04:07:16,883 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.793e+01 6.353e+01 7.194e+01 9.869e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 04:07:17,510 INFO [train.py:1114] (0/4) Epoch 8, batch 9100, loss[loss=0.2001, simple_loss=0.2906, pruned_loss=0.05476, over 4930.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.295, pruned_loss=0.06339, over 936558.33 frames. ], batch size: 14, lr: 9.16e-03, grad_scale: 32.0 +2024-07-28 04:07:31,737 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.54 vs. limit=6.0 +2024-07-28 04:07:37,971 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.17 vs. limit=22.5 +2024-07-28 04:07:48,970 INFO [train.py:1114] (0/4) Epoch 8, batch 9150, loss[loss=0.2068, simple_loss=0.2937, pruned_loss=0.05988, over 4809.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2962, pruned_loss=0.06392, over 935108.98 frames. ], batch size: 14, lr: 9.15e-03, grad_scale: 32.0 +2024-07-28 04:07:51,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=107610.66666666667, ans=0.0 +2024-07-28 04:07:53,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=107610.66666666667, ans=10.0 +2024-07-28 04:07:57,213 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.50 vs. limit=15.0 +2024-07-28 04:08:00,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=107637.33333333333, ans=0.125 +2024-07-28 04:08:13,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=107664.0, ans=0.125 +2024-07-28 04:08:19,829 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.141e+01 5.934e+01 6.513e+01 7.499e+01 1.086e+02, threshold=1.303e+02, percent-clipped=0.0 +2024-07-28 04:08:20,458 INFO [train.py:1114] (0/4) Epoch 8, batch 9200, loss[loss=0.172, simple_loss=0.264, pruned_loss=0.04001, over 4860.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2953, pruned_loss=0.06366, over 937347.24 frames. ], batch size: 12, lr: 9.15e-03, grad_scale: 32.0 +2024-07-28 04:08:21,444 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.49 vs. limit=15.0 +2024-07-28 04:08:25,753 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.98 vs. limit=6.0 +2024-07-28 04:08:28,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107690.66666666667, ans=0.1 +2024-07-28 04:08:28,543 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.46 vs. limit=15.0 +2024-07-28 04:08:51,994 INFO [train.py:1114] (0/4) Epoch 8, batch 9250, loss[loss=0.2325, simple_loss=0.317, pruned_loss=0.07404, over 4638.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2955, pruned_loss=0.06353, over 937863.36 frames. ], batch size: 13, lr: 9.15e-03, grad_scale: 32.0 +2024-07-28 04:08:54,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=107744.0, ans=0.95 +2024-07-28 04:08:55,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=107744.0, ans=0.0 +2024-07-28 04:08:55,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=107744.0, ans=0.125 +2024-07-28 04:08:59,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=107757.33333333333, ans=0.125 +2024-07-28 04:09:08,518 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:09:11,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=107784.0, ans=0.125 +2024-07-28 04:09:16,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107797.33333333333, ans=0.1 +2024-07-28 04:09:19,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=107797.33333333333, ans=0.125 +2024-07-28 04:09:22,901 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.575e+01 5.836e+01 6.420e+01 7.069e+01 1.211e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 04:09:23,527 INFO [train.py:1114] (0/4) Epoch 8, batch 9300, loss[loss=0.2008, simple_loss=0.2832, pruned_loss=0.05922, over 4774.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.296, pruned_loss=0.06461, over 938317.28 frames. ], batch size: 12, lr: 9.15e-03, grad_scale: 32.0 +2024-07-28 04:09:53,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107864.0, ans=0.125 +2024-07-28 04:09:55,750 INFO [train.py:1114] (0/4) Epoch 8, batch 9350, loss[loss=0.1552, simple_loss=0.2325, pruned_loss=0.03891, over 4790.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2957, pruned_loss=0.06391, over 935266.18 frames. ], batch size: 11, lr: 9.14e-03, grad_scale: 32.0 +2024-07-28 04:10:04,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=107890.66666666667, ans=0.07 +2024-07-28 04:10:11,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=107904.0, ans=0.0 +2024-07-28 04:10:14,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=107904.0, ans=0.125 +2024-07-28 04:10:15,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=107917.33333333333, ans=0.125 +2024-07-28 04:10:19,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=107917.33333333333, ans=0.125 +2024-07-28 04:10:23,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=107930.66666666667, ans=0.0 +2024-07-28 04:10:23,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=107930.66666666667, ans=0.07 +2024-07-28 04:10:27,331 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.820e+01 5.454e+01 5.997e+01 6.849e+01 9.161e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 04:10:27,990 INFO [train.py:1114] (0/4) Epoch 8, batch 9400, loss[loss=0.2296, simple_loss=0.3161, pruned_loss=0.07161, over 4692.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2954, pruned_loss=0.06383, over 933190.93 frames. ], batch size: 13, lr: 9.14e-03, grad_scale: 32.0 +2024-07-28 04:10:33,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=107957.33333333333, ans=0.125 +2024-07-28 04:10:53,668 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.21 vs. limit=15.0 +2024-07-28 04:10:56,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=107997.33333333333, ans=0.0 +2024-07-28 04:10:59,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=107997.33333333333, ans=0.025 +2024-07-28 04:11:03,447 INFO [train.py:1114] (0/4) Epoch 8, batch 9450, loss[loss=0.1749, simple_loss=0.2519, pruned_loss=0.04888, over 4813.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2964, pruned_loss=0.06434, over 932425.13 frames. ], batch size: 11, lr: 9.14e-03, grad_scale: 32.0 +2024-07-28 04:11:04,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=108010.66666666667, ans=0.0 +2024-07-28 04:11:07,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=108010.66666666667, ans=0.0 +2024-07-28 04:11:21,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108050.66666666667, ans=0.1 +2024-07-28 04:11:27,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=108050.66666666667, ans=0.125 +2024-07-28 04:11:34,693 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.777e+01 5.745e+01 6.311e+01 7.517e+01 1.007e+02, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 04:11:35,348 INFO [train.py:1114] (0/4) Epoch 8, batch 9500, loss[loss=0.1889, simple_loss=0.2711, pruned_loss=0.05339, over 4716.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2955, pruned_loss=0.06352, over 934824.96 frames. ], batch size: 12, lr: 9.13e-03, grad_scale: 32.0 +2024-07-28 04:11:55,384 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.61 vs. limit=15.0 +2024-07-28 04:11:55,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108117.33333333333, ans=0.0 +2024-07-28 04:12:02,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=108130.66666666667, ans=0.0 +2024-07-28 04:12:03,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=108130.66666666667, ans=0.04949747468305833 +2024-07-28 04:12:05,116 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=10.80 vs. limit=12.0 +2024-07-28 04:12:06,641 INFO [train.py:1114] (0/4) Epoch 8, batch 9550, loss[loss=0.2044, simple_loss=0.2807, pruned_loss=0.06404, over 4776.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2965, pruned_loss=0.06444, over 932055.66 frames. ], batch size: 12, lr: 9.13e-03, grad_scale: 32.0 +2024-07-28 04:12:06,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=108144.0, ans=0.125 +2024-07-28 04:12:08,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108144.0, ans=0.1 +2024-07-28 04:12:12,610 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.16 vs. limit=15.0 +2024-07-28 04:12:23,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=108170.66666666667, ans=0.125 +2024-07-28 04:12:24,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=108170.66666666667, ans=0.125 +2024-07-28 04:12:25,288 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:12:37,462 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.060e+01 5.899e+01 6.832e+01 8.942e+01 1.153e+02, threshold=1.366e+02, percent-clipped=0.0 +2024-07-28 04:12:38,142 INFO [train.py:1114] (0/4) Epoch 8, batch 9600, loss[loss=0.3026, simple_loss=0.3735, pruned_loss=0.1159, over 3633.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2951, pruned_loss=0.06411, over 931351.02 frames. ], batch size: 35, lr: 9.13e-03, grad_scale: 32.0 +2024-07-28 04:12:40,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=108210.66666666667, ans=0.125 +2024-07-28 04:12:40,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=108210.66666666667, ans=0.0 +2024-07-28 04:12:41,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=108210.66666666667, ans=0.125 +2024-07-28 04:12:46,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=108224.0, ans=0.125 +2024-07-28 04:12:47,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=108224.0, ans=0.0 +2024-07-28 04:12:48,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=108224.0, ans=0.125 +2024-07-28 04:12:53,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108237.33333333333, ans=0.1 +2024-07-28 04:12:55,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=108237.33333333333, ans=0.0 +2024-07-28 04:12:58,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=108250.66666666667, ans=0.2 +2024-07-28 04:13:05,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.51 vs. limit=22.5 +2024-07-28 04:13:07,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=108264.0, ans=0.0 +2024-07-28 04:13:10,089 INFO [train.py:1114] (0/4) Epoch 8, batch 9650, loss[loss=0.227, simple_loss=0.3113, pruned_loss=0.07137, over 4840.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2961, pruned_loss=0.06466, over 927035.09 frames. ], batch size: 16, lr: 9.13e-03, grad_scale: 32.0 +2024-07-28 04:13:12,267 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.58 vs. limit=6.0 +2024-07-28 04:13:12,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=108277.33333333333, ans=0.2 +2024-07-28 04:13:13,451 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.58 vs. limit=6.0 +2024-07-28 04:13:14,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=108277.33333333333, ans=0.0 +2024-07-28 04:13:20,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.06 vs. limit=22.5 +2024-07-28 04:13:23,854 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.76 vs. limit=22.5 +2024-07-28 04:13:31,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=108317.33333333333, ans=0.125 +2024-07-28 04:13:40,238 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.895e+01 5.890e+01 6.394e+01 7.383e+01 1.171e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 04:13:40,925 INFO [train.py:1114] (0/4) Epoch 8, batch 9700, loss[loss=0.2359, simple_loss=0.3153, pruned_loss=0.07822, over 4257.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2959, pruned_loss=0.06431, over 925530.25 frames. ], batch size: 25, lr: 9.12e-03, grad_scale: 32.0 +2024-07-28 04:13:43,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=108344.0, ans=0.0 +2024-07-28 04:13:44,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=108344.0, ans=0.125 +2024-07-28 04:14:01,441 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:14:10,574 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.00 vs. limit=15.0 +2024-07-28 04:14:11,935 INFO [train.py:1114] (0/4) Epoch 8, batch 9750, loss[loss=0.2278, simple_loss=0.3233, pruned_loss=0.06619, over 4682.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2956, pruned_loss=0.06416, over 926445.65 frames. ], batch size: 15, lr: 9.12e-03, grad_scale: 32.0 +2024-07-28 04:14:15,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=108410.66666666667, ans=0.125 +2024-07-28 04:14:18,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=108424.0, ans=0.0 +2024-07-28 04:14:27,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=108437.33333333333, ans=0.0 +2024-07-28 04:14:39,683 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.72 vs. limit=15.0 +2024-07-28 04:14:42,359 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.361e+01 6.068e+01 7.067e+01 8.452e+01 1.289e+02, threshold=1.413e+02, percent-clipped=1.0 +2024-07-28 04:14:42,998 INFO [train.py:1114] (0/4) Epoch 8, batch 9800, loss[loss=0.2081, simple_loss=0.2973, pruned_loss=0.05944, over 4707.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.295, pruned_loss=0.06389, over 925724.61 frames. ], batch size: 12, lr: 9.12e-03, grad_scale: 32.0 +2024-07-28 04:14:48,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=108490.66666666667, ans=0.09899494936611666 +2024-07-28 04:15:00,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=108517.33333333333, ans=0.0 +2024-07-28 04:15:02,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=108517.33333333333, ans=0.125 +2024-07-28 04:15:13,597 INFO [train.py:1114] (0/4) Epoch 8, batch 9850, loss[loss=0.1903, simple_loss=0.2765, pruned_loss=0.05208, over 4904.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2961, pruned_loss=0.06452, over 927769.25 frames. ], batch size: 15, lr: 9.11e-03, grad_scale: 32.0 +2024-07-28 04:15:23,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.89 vs. limit=22.5 +2024-07-28 04:15:29,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=108570.66666666667, ans=0.125 +2024-07-28 04:15:37,119 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.65 vs. limit=15.0 +2024-07-28 04:15:37,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108584.0, ans=0.1 +2024-07-28 04:15:44,372 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.741e+01 5.942e+01 6.515e+01 7.439e+01 1.419e+02, threshold=1.303e+02, percent-clipped=1.0 +2024-07-28 04:15:45,043 INFO [train.py:1114] (0/4) Epoch 8, batch 9900, loss[loss=0.232, simple_loss=0.3239, pruned_loss=0.07009, over 4838.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2979, pruned_loss=0.06574, over 926742.11 frames. ], batch size: 16, lr: 9.11e-03, grad_scale: 32.0 +2024-07-28 04:15:51,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=108624.0, ans=0.125 +2024-07-28 04:15:57,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=108637.33333333333, ans=0.0 +2024-07-28 04:16:01,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=108637.33333333333, ans=0.0 +2024-07-28 04:16:15,818 INFO [train.py:1114] (0/4) Epoch 8, batch 9950, loss[loss=0.1715, simple_loss=0.2589, pruned_loss=0.042, over 4798.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2981, pruned_loss=0.06621, over 929549.10 frames. ], batch size: 11, lr: 9.11e-03, grad_scale: 64.0 +2024-07-28 04:16:17,217 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:16:17,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=108677.33333333333, ans=0.125 +2024-07-28 04:16:19,520 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:16:19,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=108677.33333333333, ans=10.0 +2024-07-28 04:16:21,196 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.19 vs. limit=15.0 +2024-07-28 04:16:22,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=108690.66666666667, ans=0.025 +2024-07-28 04:16:26,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=108690.66666666667, ans=0.5 +2024-07-28 04:16:27,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=108704.0, ans=0.125 +2024-07-28 04:16:30,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=108704.0, ans=0.0 +2024-07-28 04:16:33,160 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.77 vs. limit=15.0 +2024-07-28 04:16:45,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=108730.66666666667, ans=10.0 +2024-07-28 04:16:46,387 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.858e+01 5.881e+01 6.237e+01 7.241e+01 1.097e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 04:16:46,978 INFO [train.py:1114] (0/4) Epoch 8, batch 10000, loss[loss=0.2243, simple_loss=0.3044, pruned_loss=0.07207, over 4611.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2995, pruned_loss=0.06671, over 927044.48 frames. ], batch size: 16, lr: 9.11e-03, grad_scale: 64.0 +2024-07-28 04:16:48,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108744.0, ans=0.0 +2024-07-28 04:16:52,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108757.33333333333, ans=0.1 +2024-07-28 04:17:02,200 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.31 vs. limit=15.0 +2024-07-28 04:17:04,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=108784.0, ans=0.0 +2024-07-28 04:17:16,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=108797.33333333333, ans=0.2 +2024-07-28 04:17:20,324 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.31 vs. limit=15.0 +2024-07-28 04:17:20,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=108797.33333333333, ans=15.0 +2024-07-28 04:17:21,340 INFO [train.py:1114] (0/4) Epoch 8, batch 10050, loss[loss=0.2626, simple_loss=0.3332, pruned_loss=0.09596, over 3220.00 frames. ], tot_loss[loss=0.22, simple_loss=0.3032, pruned_loss=0.06838, over 914665.07 frames. ], batch size: 35, lr: 9.10e-03, grad_scale: 64.0 +2024-07-28 04:17:23,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=108810.66666666667, ans=0.025 +2024-07-28 04:17:23,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=108810.66666666667, ans=0.1 +2024-07-28 04:17:24,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=108810.66666666667, ans=0.125 +2024-07-28 04:17:36,393 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.54 vs. limit=15.0 +2024-07-28 04:17:38,186 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.27 vs. limit=15.0 +2024-07-28 04:17:40,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108837.33333333333, ans=0.1 +2024-07-28 04:17:55,203 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.156e+01 6.508e+01 7.175e+01 7.748e+01 1.103e+02, threshold=1.435e+02, percent-clipped=0.0 +2024-07-28 04:17:55,236 INFO [train.py:1114] (0/4) Epoch 8, batch 10100, loss[loss=0.2253, simple_loss=0.3062, pruned_loss=0.07216, over 3603.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3089, pruned_loss=0.07427, over 861970.45 frames. ], batch size: 35, lr: 9.10e-03, grad_scale: 32.0 +2024-07-28 04:18:02,858 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.51 vs. limit=15.0 +2024-07-28 04:18:19,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=108917.33333333333, ans=0.05 +2024-07-28 04:18:27,891 INFO [train.py:1114] (0/4) Epoch 8, batch 10150, loss[loss=0.2588, simple_loss=0.3385, pruned_loss=0.08961, over 3543.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3131, pruned_loss=0.07832, over 820137.55 frames. ], batch size: 35, lr: 9.10e-03, grad_scale: 32.0 +2024-07-28 04:18:30,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=108944.0, ans=0.125 +2024-07-28 04:18:36,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.07 vs. limit=22.5 +2024-07-28 04:18:44,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=108970.66666666667, ans=0.0 +2024-07-28 04:18:47,905 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=10.64 vs. limit=12.0 +2024-07-28 04:18:50,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=108984.0, ans=0.02 +2024-07-28 04:18:52,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=108984.0, ans=0.09899494936611666 +2024-07-28 04:18:59,458 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.833e+01 6.827e+01 7.269e+01 7.768e+01 1.197e+02, threshold=1.454e+02, percent-clipped=0.0 +2024-07-28 04:18:59,491 INFO [train.py:1114] (0/4) Epoch 8, batch 10200, loss[loss=0.2383, simple_loss=0.3162, pruned_loss=0.0802, over 3523.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.3161, pruned_loss=0.08159, over 786836.41 frames. ], batch size: 35, lr: 9.10e-03, grad_scale: 32.0 +2024-07-28 04:18:59,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109010.66666666667, ans=0.1 +2024-07-28 04:19:05,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=109024.0, ans=0.0 +2024-07-28 04:19:07,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=109024.0, ans=0.025 +2024-07-28 04:19:07,946 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.13 vs. limit=15.0 +2024-07-28 04:19:08,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=109024.0, ans=0.0 +2024-07-28 04:19:10,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=109024.0, ans=0.125 +2024-07-28 04:19:13,261 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-8.pt +2024-07-28 04:19:58,655 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:19:59,131 INFO [train.py:1114] (0/4) Epoch 9, batch 0, loss[loss=0.1999, simple_loss=0.2857, pruned_loss=0.05709, over 4863.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2857, pruned_loss=0.05709, over 4863.00 frames. ], batch size: 12, lr: 8.61e-03, grad_scale: 32.0 +2024-07-28 04:19:59,132 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 04:20:10,795 INFO [train.py:1146] (0/4) Epoch 9, validation: loss=0.1818, simple_loss=0.2877, pruned_loss=0.03795, over 944034.00 frames. +2024-07-28 04:20:10,796 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 04:20:12,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109040.0, ans=0.1 +2024-07-28 04:20:20,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=109053.33333333333, ans=0.125 +2024-07-28 04:20:28,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=109066.66666666667, ans=0.125 +2024-07-28 04:20:29,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=109066.66666666667, ans=0.125 +2024-07-28 04:20:41,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=109093.33333333333, ans=0.05 +2024-07-28 04:20:45,145 INFO [train.py:1114] (0/4) Epoch 9, batch 50, loss[loss=0.1767, simple_loss=0.257, pruned_loss=0.04821, over 4615.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2987, pruned_loss=0.06486, over 206124.89 frames. ], batch size: 11, lr: 8.61e-03, grad_scale: 32.0 +2024-07-28 04:20:48,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=109106.66666666667, ans=0.0 +2024-07-28 04:20:56,980 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:21:05,390 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.946e+01 5.804e+01 6.519e+01 7.318e+01 1.022e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 04:21:05,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=109133.33333333333, ans=0.07 +2024-07-28 04:21:10,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=109146.66666666667, ans=0.2 +2024-07-28 04:21:15,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=109160.0, ans=0.2 +2024-07-28 04:21:15,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=109160.0, ans=0.125 +2024-07-28 04:21:21,888 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.07 vs. limit=15.0 +2024-07-28 04:21:22,091 INFO [train.py:1114] (0/4) Epoch 9, batch 100, loss[loss=0.1963, simple_loss=0.2748, pruned_loss=0.05893, over 4645.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2976, pruned_loss=0.06403, over 365298.10 frames. ], batch size: 12, lr: 8.60e-03, grad_scale: 32.0 +2024-07-28 04:21:24,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=109173.33333333333, ans=0.125 +2024-07-28 04:21:31,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=109186.66666666667, ans=0.125 +2024-07-28 04:21:35,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=109200.0, ans=0.125 +2024-07-28 04:21:36,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=109200.0, ans=0.0 +2024-07-28 04:21:40,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=109200.0, ans=0.125 +2024-07-28 04:21:40,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=109200.0, ans=0.07 +2024-07-28 04:21:42,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=109200.0, ans=0.2 +2024-07-28 04:21:52,857 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-28 04:21:59,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=109240.0, ans=0.125 +2024-07-28 04:21:59,551 INFO [train.py:1114] (0/4) Epoch 9, batch 150, loss[loss=0.1546, simple_loss=0.239, pruned_loss=0.03512, over 4609.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2937, pruned_loss=0.06253, over 493985.53 frames. ], batch size: 11, lr: 8.60e-03, grad_scale: 32.0 +2024-07-28 04:22:02,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=109240.0, ans=0.125 +2024-07-28 04:22:08,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=109253.33333333333, ans=0.0 +2024-07-28 04:22:09,535 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:22:17,903 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.795e+01 5.760e+01 6.227e+01 6.826e+01 1.008e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 04:22:25,023 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.39 vs. limit=15.0 +2024-07-28 04:22:25,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=109293.33333333333, ans=0.2 +2024-07-28 04:22:32,675 INFO [train.py:1114] (0/4) Epoch 9, batch 200, loss[loss=0.2194, simple_loss=0.309, pruned_loss=0.0649, over 4491.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2939, pruned_loss=0.06241, over 593783.64 frames. ], batch size: 21, lr: 8.60e-03, grad_scale: 32.0 +2024-07-28 04:22:34,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=109306.66666666667, ans=0.125 +2024-07-28 04:22:49,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=109333.33333333333, ans=0.2 +2024-07-28 04:22:50,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=109333.33333333333, ans=0.0 +2024-07-28 04:22:51,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=109333.33333333333, ans=0.125 +2024-07-28 04:23:02,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=109360.0, ans=0.0 +2024-07-28 04:23:05,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=109373.33333333333, ans=0.125 +2024-07-28 04:23:05,836 INFO [train.py:1114] (0/4) Epoch 9, batch 250, loss[loss=0.2142, simple_loss=0.3026, pruned_loss=0.06289, over 4609.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2943, pruned_loss=0.06261, over 670793.79 frames. ], batch size: 16, lr: 8.60e-03, grad_scale: 32.0 +2024-07-28 04:23:08,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=109373.33333333333, ans=0.125 +2024-07-28 04:23:14,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=109386.66666666667, ans=0.0 +2024-07-28 04:23:26,175 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.872e+01 6.084e+01 6.743e+01 8.358e+01 1.381e+02, threshold=1.349e+02, percent-clipped=2.0 +2024-07-28 04:23:34,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=109426.66666666667, ans=0.0 +2024-07-28 04:23:35,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=109426.66666666667, ans=0.0 +2024-07-28 04:23:40,734 INFO [train.py:1114] (0/4) Epoch 9, batch 300, loss[loss=0.235, simple_loss=0.3133, pruned_loss=0.07832, over 4798.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2943, pruned_loss=0.06291, over 730713.74 frames. ], batch size: 15, lr: 8.59e-03, grad_scale: 32.0 +2024-07-28 04:23:45,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=109440.0, ans=0.0 +2024-07-28 04:23:50,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=109453.33333333333, ans=0.125 +2024-07-28 04:23:52,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=109453.33333333333, ans=0.125 +2024-07-28 04:23:57,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=109466.66666666667, ans=0.0 +2024-07-28 04:24:02,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109480.0, ans=0.1 +2024-07-28 04:24:02,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=109480.0, ans=0.2 +2024-07-28 04:24:02,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=109480.0, ans=0.0 +2024-07-28 04:24:02,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=109480.0, ans=0.025 +2024-07-28 04:24:02,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109480.0, ans=0.1 +2024-07-28 04:24:09,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=109493.33333333333, ans=0.0 +2024-07-28 04:24:14,376 INFO [train.py:1114] (0/4) Epoch 9, batch 350, loss[loss=0.1935, simple_loss=0.2733, pruned_loss=0.05687, over 4943.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2953, pruned_loss=0.06272, over 776481.63 frames. ], batch size: 12, lr: 8.59e-03, grad_scale: 32.0 +2024-07-28 04:24:16,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=109506.66666666667, ans=0.015 +2024-07-28 04:24:19,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=109506.66666666667, ans=0.025 +2024-07-28 04:24:27,348 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.59 vs. limit=15.0 +2024-07-28 04:24:32,534 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.584e+01 5.878e+01 6.356e+01 6.901e+01 1.235e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 04:24:35,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=109546.66666666667, ans=0.125 +2024-07-28 04:24:41,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=109560.0, ans=6.0 +2024-07-28 04:24:42,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=109560.0, ans=0.125 +2024-07-28 04:24:46,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=109573.33333333333, ans=0.025 +2024-07-28 04:24:47,207 INFO [train.py:1114] (0/4) Epoch 9, batch 400, loss[loss=0.2538, simple_loss=0.3305, pruned_loss=0.08857, over 4688.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2943, pruned_loss=0.06244, over 813933.97 frames. ], batch size: 13, lr: 8.59e-03, grad_scale: 32.0 +2024-07-28 04:24:50,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=109573.33333333333, ans=0.125 +2024-07-28 04:24:54,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=109586.66666666667, ans=0.125 +2024-07-28 04:25:04,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=109600.0, ans=0.2 +2024-07-28 04:25:13,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109626.66666666667, ans=0.1 +2024-07-28 04:25:20,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=109640.0, ans=0.125 +2024-07-28 04:25:21,008 INFO [train.py:1114] (0/4) Epoch 9, batch 450, loss[loss=0.2131, simple_loss=0.307, pruned_loss=0.05957, over 4631.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2936, pruned_loss=0.06174, over 839407.77 frames. ], batch size: 13, lr: 8.59e-03, grad_scale: 32.0 +2024-07-28 04:25:22,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=109640.0, ans=0.0 +2024-07-28 04:25:22,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=109640.0, ans=0.125 +2024-07-28 04:25:39,077 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+01 5.801e+01 6.257e+01 7.055e+01 9.311e+01, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 04:25:40,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=109680.0, ans=0.125 +2024-07-28 04:25:46,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=109693.33333333333, ans=0.0 +2024-07-28 04:25:47,881 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.66 vs. limit=15.0 +2024-07-28 04:25:48,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=109693.33333333333, ans=0.2 +2024-07-28 04:25:53,420 INFO [train.py:1114] (0/4) Epoch 9, batch 500, loss[loss=0.2778, simple_loss=0.3538, pruned_loss=0.1009, over 4693.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2929, pruned_loss=0.06185, over 861610.12 frames. ], batch size: 15, lr: 8.58e-03, grad_scale: 32.0 +2024-07-28 04:26:12,697 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.11 vs. limit=22.5 +2024-07-28 04:26:14,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=109706.66666666667, ans=0.2 +2024-07-28 04:26:26,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109733.33333333333, ans=0.1 +2024-07-28 04:26:41,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109746.66666666667, ans=0.1 +2024-07-28 04:26:50,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=109760.0, ans=0.125 +2024-07-28 04:26:54,199 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.32 vs. limit=15.0 +2024-07-28 04:26:55,085 INFO [train.py:1114] (0/4) Epoch 9, batch 550, loss[loss=0.1989, simple_loss=0.2882, pruned_loss=0.05481, over 4609.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2923, pruned_loss=0.06131, over 877645.07 frames. ], batch size: 17, lr: 8.58e-03, grad_scale: 32.0 +2024-07-28 04:27:03,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.73 vs. limit=10.0 +2024-07-28 04:27:10,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=109800.0, ans=0.0 +2024-07-28 04:27:14,726 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:27:15,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109800.0, ans=0.1 +2024-07-28 04:27:15,812 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.786e+01 5.880e+01 6.464e+01 7.237e+01 1.061e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-28 04:27:25,201 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:27:26,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=109826.66666666667, ans=0.025 +2024-07-28 04:27:46,993 INFO [train.py:1114] (0/4) Epoch 9, batch 600, loss[loss=0.2321, simple_loss=0.3184, pruned_loss=0.07292, over 4658.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2922, pruned_loss=0.06117, over 892287.33 frames. ], batch size: 16, lr: 8.58e-03, grad_scale: 32.0 +2024-07-28 04:28:05,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=109866.66666666667, ans=0.125 +2024-07-28 04:28:09,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=109880.0, ans=0.2 +2024-07-28 04:28:15,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109893.33333333333, ans=0.1 +2024-07-28 04:28:20,224 INFO [train.py:1114] (0/4) Epoch 9, batch 650, loss[loss=0.2012, simple_loss=0.2873, pruned_loss=0.05756, over 4762.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2924, pruned_loss=0.06147, over 903796.76 frames. ], batch size: 13, lr: 8.58e-03, grad_scale: 32.0 +2024-07-28 04:28:30,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=109920.0, ans=0.2 +2024-07-28 04:28:38,935 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.902e+01 5.739e+01 6.277e+01 6.982e+01 1.071e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 04:28:39,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=109946.66666666667, ans=0.0 +2024-07-28 04:28:47,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109960.0, ans=0.1 +2024-07-28 04:28:51,713 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.44 vs. limit=15.0 +2024-07-28 04:28:53,379 INFO [train.py:1114] (0/4) Epoch 9, batch 700, loss[loss=0.2024, simple_loss=0.2869, pruned_loss=0.05896, over 4633.00 frames. ], tot_loss[loss=0.208, simple_loss=0.293, pruned_loss=0.06145, over 911810.30 frames. ], batch size: 12, lr: 8.57e-03, grad_scale: 32.0 +2024-07-28 04:28:57,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=109973.33333333333, ans=0.0 +2024-07-28 04:29:01,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=109986.66666666667, ans=0.0 +2024-07-28 04:29:20,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=110026.66666666667, ans=0.125 +2024-07-28 04:29:24,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=110026.66666666667, ans=0.125 +2024-07-28 04:29:27,314 INFO [train.py:1114] (0/4) Epoch 9, batch 750, loss[loss=0.2206, simple_loss=0.2951, pruned_loss=0.07304, over 4689.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2923, pruned_loss=0.06131, over 918706.72 frames. ], batch size: 13, lr: 8.57e-03, grad_scale: 32.0 +2024-07-28 04:29:46,421 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.66 vs. limit=12.0 +2024-07-28 04:29:47,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=110066.66666666667, ans=0.025 +2024-07-28 04:29:48,615 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.674e+01 6.132e+01 7.146e+01 1.139e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 04:29:54,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=110080.0, ans=0.2 +2024-07-28 04:30:03,386 INFO [train.py:1114] (0/4) Epoch 9, batch 800, loss[loss=0.1977, simple_loss=0.2865, pruned_loss=0.05447, over 4857.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2921, pruned_loss=0.06146, over 923442.65 frames. ], batch size: 12, lr: 8.57e-03, grad_scale: 32.0 +2024-07-28 04:30:05,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=110106.66666666667, ans=0.0 +2024-07-28 04:30:05,559 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:30:17,041 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.32 vs. limit=22.5 +2024-07-28 04:30:27,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=110146.66666666667, ans=0.125 +2024-07-28 04:30:31,278 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=48.56 vs. limit=15.0 +2024-07-28 04:30:36,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=110173.33333333333, ans=0.2 +2024-07-28 04:30:37,053 INFO [train.py:1114] (0/4) Epoch 9, batch 850, loss[loss=0.2429, simple_loss=0.3355, pruned_loss=0.07512, over 4658.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2926, pruned_loss=0.06176, over 927344.02 frames. ], batch size: 14, lr: 8.57e-03, grad_scale: 32.0 +2024-07-28 04:30:44,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=110186.66666666667, ans=0.125 +2024-07-28 04:30:49,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=110200.0, ans=0.125 +2024-07-28 04:30:55,614 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.250e+01 5.670e+01 6.591e+01 7.214e+01 1.079e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-28 04:31:10,559 INFO [train.py:1114] (0/4) Epoch 9, batch 900, loss[loss=0.1998, simple_loss=0.2862, pruned_loss=0.05672, over 4863.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2929, pruned_loss=0.06214, over 928250.60 frames. ], batch size: 12, lr: 8.56e-03, grad_scale: 32.0 +2024-07-28 04:31:39,127 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.42 vs. limit=15.0 +2024-07-28 04:31:41,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.27 vs. limit=10.0 +2024-07-28 04:31:42,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=110293.33333333333, ans=0.125 +2024-07-28 04:31:42,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=110293.33333333333, ans=0.125 +2024-07-28 04:31:44,100 INFO [train.py:1114] (0/4) Epoch 9, batch 950, loss[loss=0.1754, simple_loss=0.2732, pruned_loss=0.03878, over 4775.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2927, pruned_loss=0.06171, over 929831.36 frames. ], batch size: 12, lr: 8.56e-03, grad_scale: 32.0 +2024-07-28 04:31:44,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=110306.66666666667, ans=10.0 +2024-07-28 04:31:48,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=110306.66666666667, ans=0.2 +2024-07-28 04:31:54,882 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:32:02,679 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.659e+01 5.804e+01 6.637e+01 7.593e+01 9.914e+01, threshold=1.327e+02, percent-clipped=0.0 +2024-07-28 04:32:03,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.49 vs. limit=15.0 +2024-07-28 04:32:13,140 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.96 vs. limit=15.0 +2024-07-28 04:32:16,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=110360.0, ans=0.0 +2024-07-28 04:32:17,283 INFO [train.py:1114] (0/4) Epoch 9, batch 1000, loss[loss=0.192, simple_loss=0.2864, pruned_loss=0.04882, over 4958.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2925, pruned_loss=0.06149, over 929106.42 frames. ], batch size: 13, lr: 8.56e-03, grad_scale: 32.0 +2024-07-28 04:32:38,030 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-07-28 04:32:39,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=110413.33333333333, ans=0.0 +2024-07-28 04:32:41,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=110413.33333333333, ans=0.0 +2024-07-28 04:32:52,774 INFO [train.py:1114] (0/4) Epoch 9, batch 1050, loss[loss=0.2282, simple_loss=0.3188, pruned_loss=0.06885, over 4877.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2918, pruned_loss=0.0611, over 931568.21 frames. ], batch size: 14, lr: 8.56e-03, grad_scale: 32.0 +2024-07-28 04:32:57,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=110440.0, ans=0.1 +2024-07-28 04:33:21,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=110453.33333333333, ans=0.2 +2024-07-28 04:33:24,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=110466.66666666667, ans=0.0 +2024-07-28 04:33:37,203 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.983e+01 5.646e+01 6.301e+01 7.018e+01 9.967e+01, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 04:33:40,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=110480.0, ans=0.0 +2024-07-28 04:33:41,611 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.74 vs. limit=6.0 +2024-07-28 04:33:41,982 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:33:50,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=110493.33333333333, ans=0.125 +2024-07-28 04:33:51,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=110493.33333333333, ans=0.125 +2024-07-28 04:33:51,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=110493.33333333333, ans=0.125 +2024-07-28 04:33:58,452 INFO [train.py:1114] (0/4) Epoch 9, batch 1100, loss[loss=0.2179, simple_loss=0.3103, pruned_loss=0.0628, over 4894.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2919, pruned_loss=0.06122, over 933989.13 frames. ], batch size: 13, lr: 8.55e-03, grad_scale: 32.0 +2024-07-28 04:34:22,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.43 vs. limit=15.0 +2024-07-28 04:34:30,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=110560.0, ans=0.2 +2024-07-28 04:34:31,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=110573.33333333333, ans=0.2 +2024-07-28 04:34:32,509 INFO [train.py:1114] (0/4) Epoch 9, batch 1150, loss[loss=0.1827, simple_loss=0.2703, pruned_loss=0.04755, over 4891.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2916, pruned_loss=0.0612, over 934158.81 frames. ], batch size: 13, lr: 8.55e-03, grad_scale: 32.0 +2024-07-28 04:34:36,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=110573.33333333333, ans=0.125 +2024-07-28 04:34:48,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=110600.0, ans=0.0 +2024-07-28 04:34:49,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=110600.0, ans=0.2 +2024-07-28 04:34:51,452 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.662e+01 6.289e+01 6.921e+01 1.035e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 04:34:55,296 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.12 vs. limit=15.0 +2024-07-28 04:35:02,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110626.66666666667, ans=0.1 +2024-07-28 04:35:06,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=110640.0, ans=0.125 +2024-07-28 04:35:07,037 INFO [train.py:1114] (0/4) Epoch 9, batch 1200, loss[loss=0.25, simple_loss=0.35, pruned_loss=0.07496, over 4873.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2929, pruned_loss=0.06219, over 933706.45 frames. ], batch size: 14, lr: 8.55e-03, grad_scale: 32.0 +2024-07-28 04:35:08,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=110640.0, ans=0.0 +2024-07-28 04:35:19,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=110653.33333333333, ans=0.2 +2024-07-28 04:35:30,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=110680.0, ans=0.125 +2024-07-28 04:35:38,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=110693.33333333333, ans=0.125 +2024-07-28 04:35:43,336 INFO [train.py:1114] (0/4) Epoch 9, batch 1250, loss[loss=0.2217, simple_loss=0.3123, pruned_loss=0.06559, over 4795.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2935, pruned_loss=0.06156, over 937436.52 frames. ], batch size: 15, lr: 8.55e-03, grad_scale: 32.0 +2024-07-28 04:36:02,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=110733.33333333333, ans=0.125 +2024-07-28 04:36:06,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=110733.33333333333, ans=0.125 +2024-07-28 04:36:07,227 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.896e+01 5.807e+01 6.256e+01 7.154e+01 1.109e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 04:36:13,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=110746.66666666667, ans=0.0 +2024-07-28 04:36:15,990 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:36:18,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=110760.0, ans=0.2 +2024-07-28 04:36:18,197 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.94 vs. limit=15.0 +2024-07-28 04:36:21,788 INFO [train.py:1114] (0/4) Epoch 9, batch 1300, loss[loss=0.2339, simple_loss=0.3072, pruned_loss=0.08025, over 4710.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2929, pruned_loss=0.0613, over 938868.47 frames. ], batch size: 19, lr: 8.54e-03, grad_scale: 32.0 +2024-07-28 04:36:33,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=110786.66666666667, ans=0.0 +2024-07-28 04:37:23,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=110826.66666666667, ans=0.125 +2024-07-28 04:37:23,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=110826.66666666667, ans=0.125 +2024-07-28 04:37:25,172 INFO [train.py:1114] (0/4) Epoch 9, batch 1350, loss[loss=0.2115, simple_loss=0.2965, pruned_loss=0.06326, over 4762.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2924, pruned_loss=0.06135, over 940908.05 frames. ], batch size: 13, lr: 8.54e-03, grad_scale: 32.0 +2024-07-28 04:37:26,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=110840.0, ans=0.125 +2024-07-28 04:37:29,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=110840.0, ans=0.125 +2024-07-28 04:37:30,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110840.0, ans=0.1 +2024-07-28 04:37:36,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=110853.33333333333, ans=0.125 +2024-07-28 04:37:36,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=110853.33333333333, ans=0.0 +2024-07-28 04:37:40,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110866.66666666667, ans=0.1 +2024-07-28 04:37:43,875 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.586e+01 5.724e+01 6.443e+01 7.516e+01 1.167e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 04:37:52,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=110880.0, ans=0.125 +2024-07-28 04:37:54,702 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.44 vs. limit=15.0 +2024-07-28 04:38:02,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=110906.66666666667, ans=0.025 +2024-07-28 04:38:02,854 INFO [train.py:1114] (0/4) Epoch 9, batch 1400, loss[loss=0.1681, simple_loss=0.2553, pruned_loss=0.04046, over 4717.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2921, pruned_loss=0.06175, over 942664.85 frames. ], batch size: 11, lr: 8.54e-03, grad_scale: 32.0 +2024-07-28 04:38:02,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=110906.66666666667, ans=0.0 +2024-07-28 04:38:23,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=110920.0, ans=0.1 +2024-07-28 04:38:24,808 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.83 vs. limit=6.0 +2024-07-28 04:38:37,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=110960.0, ans=0.0 +2024-07-28 04:38:42,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=110960.0, ans=0.0 +2024-07-28 04:38:42,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=110960.0, ans=0.125 +2024-07-28 04:38:44,497 INFO [train.py:1114] (0/4) Epoch 9, batch 1450, loss[loss=0.2201, simple_loss=0.3161, pruned_loss=0.06206, over 4683.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.293, pruned_loss=0.0617, over 942396.41 frames. ], batch size: 15, lr: 8.53e-03, grad_scale: 32.0 +2024-07-28 04:38:59,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=111000.0, ans=0.0 +2024-07-28 04:39:03,122 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+01 5.882e+01 6.432e+01 7.495e+01 9.959e+01, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 04:39:04,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=111013.33333333333, ans=0.0 +2024-07-28 04:39:07,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=111013.33333333333, ans=0.125 +2024-07-28 04:39:14,924 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.64 vs. limit=22.5 +2024-07-28 04:39:16,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=111026.66666666667, ans=0.125 +2024-07-28 04:39:19,213 INFO [train.py:1114] (0/4) Epoch 9, batch 1500, loss[loss=0.2195, simple_loss=0.3192, pruned_loss=0.05989, over 4810.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2934, pruned_loss=0.06184, over 941832.98 frames. ], batch size: 14, lr: 8.53e-03, grad_scale: 32.0 +2024-07-28 04:39:26,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=111053.33333333333, ans=0.0 +2024-07-28 04:39:29,265 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=15.0 +2024-07-28 04:39:35,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=111066.66666666667, ans=0.125 +2024-07-28 04:39:39,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=111080.0, ans=0.035 +2024-07-28 04:39:39,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=111080.0, ans=0.0 +2024-07-28 04:39:50,168 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.05 vs. limit=15.0 +2024-07-28 04:39:56,249 INFO [train.py:1114] (0/4) Epoch 9, batch 1550, loss[loss=0.2294, simple_loss=0.3253, pruned_loss=0.06669, over 4899.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2947, pruned_loss=0.06276, over 938073.03 frames. ], batch size: 15, lr: 8.53e-03, grad_scale: 32.0 +2024-07-28 04:39:58,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=111106.66666666667, ans=0.125 +2024-07-28 04:40:02,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=111120.0, ans=0.125 +2024-07-28 04:40:03,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=111120.0, ans=0.0 +2024-07-28 04:40:11,559 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.11 vs. limit=15.0 +2024-07-28 04:40:14,630 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.785e+01 5.781e+01 6.614e+01 7.335e+01 1.076e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-28 04:40:20,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111146.66666666667, ans=0.1 +2024-07-28 04:40:21,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=111146.66666666667, ans=22.5 +2024-07-28 04:40:29,184 INFO [train.py:1114] (0/4) Epoch 9, batch 1600, loss[loss=0.2046, simple_loss=0.2965, pruned_loss=0.05635, over 4867.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2943, pruned_loss=0.06297, over 936460.79 frames. ], batch size: 14, lr: 8.53e-03, grad_scale: 32.0 +2024-07-28 04:40:32,904 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.81 vs. limit=22.5 +2024-07-28 04:40:35,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=111186.66666666667, ans=0.09899494936611666 +2024-07-28 04:40:41,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=111186.66666666667, ans=0.125 +2024-07-28 04:40:59,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=111226.66666666667, ans=0.2 +2024-07-28 04:41:03,419 INFO [train.py:1114] (0/4) Epoch 9, batch 1650, loss[loss=0.2468, simple_loss=0.3381, pruned_loss=0.0777, over 4665.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.294, pruned_loss=0.06293, over 936470.12 frames. ], batch size: 14, lr: 8.52e-03, grad_scale: 32.0 +2024-07-28 04:41:14,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111253.33333333333, ans=0.1 +2024-07-28 04:41:18,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=111266.66666666667, ans=0.09899494936611666 +2024-07-28 04:41:21,751 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.57 vs. limit=15.0 +2024-07-28 04:41:21,886 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.810e+01 6.591e+01 7.411e+01 1.241e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-28 04:41:24,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111280.0, ans=0.1 +2024-07-28 04:41:30,398 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.99 vs. limit=6.0 +2024-07-28 04:41:36,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=111293.33333333333, ans=0.0 +2024-07-28 04:41:38,637 INFO [train.py:1114] (0/4) Epoch 9, batch 1700, loss[loss=0.1979, simple_loss=0.2675, pruned_loss=0.06418, over 4716.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2931, pruned_loss=0.0624, over 938697.92 frames. ], batch size: 11, lr: 8.52e-03, grad_scale: 32.0 +2024-07-28 04:41:38,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=111306.66666666667, ans=0.125 +2024-07-28 04:41:41,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=111306.66666666667, ans=0.025 +2024-07-28 04:41:41,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=111306.66666666667, ans=0.125 +2024-07-28 04:41:46,962 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.52 vs. limit=22.5 +2024-07-28 04:41:57,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=111333.33333333333, ans=0.125 +2024-07-28 04:42:01,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=111346.66666666667, ans=0.125 +2024-07-28 04:42:01,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=111346.66666666667, ans=0.1 +2024-07-28 04:42:05,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=111346.66666666667, ans=0.0 +2024-07-28 04:42:11,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=111360.0, ans=0.125 +2024-07-28 04:42:13,649 INFO [train.py:1114] (0/4) Epoch 9, batch 1750, loss[loss=0.172, simple_loss=0.2453, pruned_loss=0.04939, over 4813.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2931, pruned_loss=0.062, over 939840.41 frames. ], batch size: 11, lr: 8.52e-03, grad_scale: 32.0 +2024-07-28 04:42:16,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=111373.33333333333, ans=0.125 +2024-07-28 04:42:21,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=111386.66666666667, ans=0.2 +2024-07-28 04:42:27,449 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.19 vs. limit=10.0 +2024-07-28 04:42:28,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=111400.0, ans=0.125 +2024-07-28 04:42:29,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=111400.0, ans=0.025 +2024-07-28 04:42:32,286 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.858e+01 5.540e+01 6.107e+01 6.918e+01 9.511e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 04:42:43,560 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.69 vs. limit=22.5 +2024-07-28 04:42:44,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=111426.66666666667, ans=0.0 +2024-07-28 04:42:44,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=111426.66666666667, ans=0.125 +2024-07-28 04:42:47,084 INFO [train.py:1114] (0/4) Epoch 9, batch 1800, loss[loss=0.2243, simple_loss=0.305, pruned_loss=0.07181, over 4641.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2943, pruned_loss=0.06255, over 940058.73 frames. ], batch size: 13, lr: 8.52e-03, grad_scale: 32.0 +2024-07-28 04:42:47,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=111440.0, ans=0.1 +2024-07-28 04:42:51,288 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:42:54,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=111453.33333333333, ans=0.125 +2024-07-28 04:43:02,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=111466.66666666667, ans=0.0 +2024-07-28 04:43:18,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=111493.33333333333, ans=0.0 +2024-07-28 04:43:22,194 INFO [train.py:1114] (0/4) Epoch 9, batch 1850, loss[loss=0.2082, simple_loss=0.3024, pruned_loss=0.05707, over 4811.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2937, pruned_loss=0.06213, over 940204.96 frames. ], batch size: 14, lr: 8.51e-03, grad_scale: 32.0 +2024-07-28 04:43:27,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=111506.66666666667, ans=0.2 +2024-07-28 04:43:30,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111520.0, ans=0.125 +2024-07-28 04:43:41,632 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.354e+01 5.789e+01 6.622e+01 8.000e+01 1.293e+02, threshold=1.324e+02, percent-clipped=1.0 +2024-07-28 04:43:51,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=111560.0, ans=0.125 +2024-07-28 04:43:52,332 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.50 vs. limit=12.0 +2024-07-28 04:43:54,227 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.71 vs. limit=15.0 +2024-07-28 04:43:56,532 INFO [train.py:1114] (0/4) Epoch 9, batch 1900, loss[loss=0.2093, simple_loss=0.2939, pruned_loss=0.06235, over 4659.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.294, pruned_loss=0.0623, over 941814.82 frames. ], batch size: 14, lr: 8.51e-03, grad_scale: 64.0 +2024-07-28 04:44:03,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111586.66666666667, ans=0.1 +2024-07-28 04:44:10,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=111600.0, ans=0.0 +2024-07-28 04:44:11,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=111600.0, ans=0.125 +2024-07-28 04:44:18,226 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.35 vs. limit=15.0 +2024-07-28 04:44:19,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=111613.33333333333, ans=0.125 +2024-07-28 04:44:24,279 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.15 vs. limit=12.0 +2024-07-28 04:44:24,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=111626.66666666667, ans=0.0 +2024-07-28 04:44:29,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=111640.0, ans=0.125 +2024-07-28 04:44:29,956 INFO [train.py:1114] (0/4) Epoch 9, batch 1950, loss[loss=0.2357, simple_loss=0.3154, pruned_loss=0.07805, over 4896.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.295, pruned_loss=0.06237, over 943871.88 frames. ], batch size: 13, lr: 8.51e-03, grad_scale: 64.0 +2024-07-28 04:44:32,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111640.0, ans=0.1 +2024-07-28 04:44:33,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=111640.0, ans=0.125 +2024-07-28 04:44:37,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=111653.33333333333, ans=10.0 +2024-07-28 04:44:46,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=111666.66666666667, ans=0.0 +2024-07-28 04:44:47,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=111666.66666666667, ans=0.0 +2024-07-28 04:44:50,740 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.065e+01 5.906e+01 6.292e+01 6.984e+01 1.022e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 04:44:56,534 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.25 vs. limit=15.0 +2024-07-28 04:45:01,507 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:45:03,004 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.75 vs. limit=15.0 +2024-07-28 04:45:05,314 INFO [train.py:1114] (0/4) Epoch 9, batch 2000, loss[loss=0.1658, simple_loss=0.2353, pruned_loss=0.04815, over 4805.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2957, pruned_loss=0.06286, over 941006.00 frames. ], batch size: 11, lr: 8.51e-03, grad_scale: 64.0 +2024-07-28 04:45:05,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=111706.66666666667, ans=0.0 +2024-07-28 04:45:06,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111706.66666666667, ans=0.1 +2024-07-28 04:45:23,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=111733.33333333333, ans=0.125 +2024-07-28 04:45:30,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111746.66666666667, ans=0.125 +2024-07-28 04:45:33,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=111746.66666666667, ans=0.1 +2024-07-28 04:45:38,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=111760.0, ans=0.2 +2024-07-28 04:45:42,829 INFO [train.py:1114] (0/4) Epoch 9, batch 2050, loss[loss=0.1837, simple_loss=0.2732, pruned_loss=0.04716, over 4596.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2945, pruned_loss=0.0623, over 939018.97 frames. ], batch size: 11, lr: 8.50e-03, grad_scale: 64.0 +2024-07-28 04:45:51,199 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.92 vs. limit=15.0 +2024-07-28 04:45:57,482 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.64 vs. limit=6.0 +2024-07-28 04:46:01,609 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.743e+01 6.420e+01 7.803e+01 1.541e+02, threshold=1.284e+02, percent-clipped=1.0 +2024-07-28 04:46:15,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=111840.0, ans=0.125 +2024-07-28 04:46:16,243 INFO [train.py:1114] (0/4) Epoch 9, batch 2100, loss[loss=0.2114, simple_loss=0.3089, pruned_loss=0.05695, over 4755.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2936, pruned_loss=0.06201, over 940968.68 frames. ], batch size: 13, lr: 8.50e-03, grad_scale: 64.0 +2024-07-28 04:46:27,123 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.19 vs. limit=22.5 +2024-07-28 04:46:32,391 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:46:33,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=111866.66666666667, ans=15.0 +2024-07-28 04:46:36,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=111866.66666666667, ans=0.2 +2024-07-28 04:46:37,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=111880.0, ans=0.125 +2024-07-28 04:46:39,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=111880.0, ans=0.125 +2024-07-28 04:46:41,785 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.76 vs. limit=12.0 +2024-07-28 04:46:43,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=111880.0, ans=0.125 +2024-07-28 04:46:43,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=111880.0, ans=0.125 +2024-07-28 04:46:44,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=111893.33333333333, ans=0.2 +2024-07-28 04:46:44,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=111893.33333333333, ans=0.2 +2024-07-28 04:46:50,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=111906.66666666667, ans=10.0 +2024-07-28 04:46:51,240 INFO [train.py:1114] (0/4) Epoch 9, batch 2150, loss[loss=0.1859, simple_loss=0.2769, pruned_loss=0.04742, over 4889.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2932, pruned_loss=0.06202, over 944087.05 frames. ], batch size: 13, lr: 8.50e-03, grad_scale: 64.0 +2024-07-28 04:46:51,415 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:47:03,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111920.0, ans=0.1 +2024-07-28 04:47:08,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=111933.33333333333, ans=0.125 +2024-07-28 04:47:12,510 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.843e+01 7.038e+01 8.009e+01 1.104e+02, threshold=1.408e+02, percent-clipped=0.0 +2024-07-28 04:47:15,379 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:47:18,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=111946.66666666667, ans=0.2 +2024-07-28 04:47:26,783 INFO [train.py:1114] (0/4) Epoch 9, batch 2200, loss[loss=0.1893, simple_loss=0.2834, pruned_loss=0.04756, over 4817.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2932, pruned_loss=0.0619, over 943412.93 frames. ], batch size: 14, lr: 8.50e-03, grad_scale: 32.0 +2024-07-28 04:47:36,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff2.min_abs, batch_count=111986.66666666667, ans=0.1 +2024-07-28 04:47:39,555 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-84000.pt +2024-07-28 04:47:42,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=112000.0, ans=0.0 +2024-07-28 04:47:56,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=112026.66666666667, ans=0.0 +2024-07-28 04:48:00,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=112026.66666666667, ans=0.2 +2024-07-28 04:48:00,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=112026.66666666667, ans=0.125 +2024-07-28 04:48:00,930 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.98 vs. limit=12.0 +2024-07-28 04:48:01,903 INFO [train.py:1114] (0/4) Epoch 9, batch 2250, loss[loss=0.1976, simple_loss=0.29, pruned_loss=0.05261, over 4702.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2942, pruned_loss=0.06232, over 941981.78 frames. ], batch size: 13, lr: 8.49e-03, grad_scale: 32.0 +2024-07-28 04:48:02,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.59 vs. limit=22.5 +2024-07-28 04:48:13,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=112053.33333333333, ans=0.5 +2024-07-28 04:48:16,701 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.79 vs. limit=15.0 +2024-07-28 04:48:20,921 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.419e+01 6.028e+01 6.824e+01 8.191e+01 1.096e+02, threshold=1.365e+02, percent-clipped=0.0 +2024-07-28 04:48:34,841 INFO [train.py:1114] (0/4) Epoch 9, batch 2300, loss[loss=0.1889, simple_loss=0.2697, pruned_loss=0.054, over 4954.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2917, pruned_loss=0.0616, over 939610.53 frames. ], batch size: 12, lr: 8.49e-03, grad_scale: 32.0 +2024-07-28 04:48:36,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=112106.66666666667, ans=0.0 +2024-07-28 04:48:37,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=112106.66666666667, ans=0.025 +2024-07-28 04:48:48,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=112133.33333333333, ans=0.125 +2024-07-28 04:48:51,606 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.84 vs. limit=10.0 +2024-07-28 04:49:37,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112146.66666666667, ans=0.1 +2024-07-28 04:49:47,955 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.24 vs. limit=22.5 +2024-07-28 04:49:52,131 INFO [train.py:1114] (0/4) Epoch 9, batch 2350, loss[loss=0.233, simple_loss=0.3225, pruned_loss=0.07171, over 4638.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2911, pruned_loss=0.06137, over 941632.87 frames. ], batch size: 13, lr: 8.49e-03, grad_scale: 32.0 +2024-07-28 04:49:55,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=112173.33333333333, ans=0.125 +2024-07-28 04:49:56,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=112173.33333333333, ans=0.0 +2024-07-28 04:49:56,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=112173.33333333333, ans=0.125 +2024-07-28 04:49:59,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=112186.66666666667, ans=0.125 +2024-07-28 04:50:06,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=112200.0, ans=0.1 +2024-07-28 04:50:07,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=112200.0, ans=0.0 +2024-07-28 04:50:11,751 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.897e+01 5.463e+01 6.108e+01 6.939e+01 1.035e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 04:50:12,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=112213.33333333333, ans=0.125 +2024-07-28 04:50:15,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=112213.33333333333, ans=0.2 +2024-07-28 04:50:20,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=112226.66666666667, ans=0.125 +2024-07-28 04:50:21,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=112226.66666666667, ans=0.125 +2024-07-28 04:50:25,460 INFO [train.py:1114] (0/4) Epoch 9, batch 2400, loss[loss=0.1781, simple_loss=0.262, pruned_loss=0.04712, over 4642.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2913, pruned_loss=0.06091, over 941490.10 frames. ], batch size: 12, lr: 8.49e-03, grad_scale: 32.0 +2024-07-28 04:50:35,516 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.41 vs. limit=6.0 +2024-07-28 04:50:39,863 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=15.0 +2024-07-28 04:50:56,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112280.0, ans=0.1 +2024-07-28 04:50:59,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=112293.33333333333, ans=0.125 +2024-07-28 04:51:05,891 INFO [train.py:1114] (0/4) Epoch 9, batch 2450, loss[loss=0.2184, simple_loss=0.3094, pruned_loss=0.06371, over 4689.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2926, pruned_loss=0.06127, over 937043.22 frames. ], batch size: 13, lr: 8.48e-03, grad_scale: 32.0 +2024-07-28 04:51:06,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=112306.66666666667, ans=0.2 +2024-07-28 04:51:07,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=112306.66666666667, ans=0.0 +2024-07-28 04:51:11,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=112306.66666666667, ans=0.07 +2024-07-28 04:51:11,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.60 vs. limit=22.5 +2024-07-28 04:51:20,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=112333.33333333333, ans=0.1 +2024-07-28 04:51:22,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=112333.33333333333, ans=0.0 +2024-07-28 04:51:25,466 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.551e+01 5.862e+01 6.434e+01 7.688e+01 1.164e+02, threshold=1.287e+02, percent-clipped=0.0 +2024-07-28 04:51:40,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=112360.0, ans=0.125 +2024-07-28 04:51:43,135 INFO [train.py:1114] (0/4) Epoch 9, batch 2500, loss[loss=0.221, simple_loss=0.3061, pruned_loss=0.06793, over 4813.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2923, pruned_loss=0.06082, over 938927.04 frames. ], batch size: 14, lr: 8.48e-03, grad_scale: 32.0 +2024-07-28 04:51:46,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=112373.33333333333, ans=0.0 +2024-07-28 04:51:48,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=112373.33333333333, ans=0.125 +2024-07-28 04:51:50,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=112386.66666666667, ans=0.0 +2024-07-28 04:51:53,617 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.08 vs. limit=15.0 +2024-07-28 04:51:59,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=112400.0, ans=0.5 +2024-07-28 04:52:03,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=112413.33333333333, ans=0.125 +2024-07-28 04:52:10,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112426.66666666667, ans=0.1 +2024-07-28 04:52:13,812 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.94 vs. limit=22.5 +2024-07-28 04:52:14,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=112426.66666666667, ans=0.025 +2024-07-28 04:52:15,171 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.59 vs. limit=22.5 +2024-07-28 04:52:16,762 INFO [train.py:1114] (0/4) Epoch 9, batch 2550, loss[loss=0.175, simple_loss=0.2546, pruned_loss=0.04765, over 4808.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2922, pruned_loss=0.06059, over 938614.78 frames. ], batch size: 11, lr: 8.48e-03, grad_scale: 32.0 +2024-07-28 04:52:17,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=112440.0, ans=0.125 +2024-07-28 04:52:18,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=112440.0, ans=0.125 +2024-07-28 04:52:28,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112453.33333333333, ans=0.1 +2024-07-28 04:52:36,353 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.845e+01 6.430e+01 7.273e+01 1.102e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 04:52:36,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=112480.0, ans=0.0 +2024-07-28 04:52:38,775 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.37 vs. limit=15.0 +2024-07-28 04:52:50,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=112506.66666666667, ans=0.035 +2024-07-28 04:52:50,644 INFO [train.py:1114] (0/4) Epoch 9, batch 2600, loss[loss=0.1835, simple_loss=0.2699, pruned_loss=0.04858, over 4896.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.293, pruned_loss=0.0609, over 938285.48 frames. ], batch size: 13, lr: 8.48e-03, grad_scale: 32.0 +2024-07-28 04:52:52,520 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.36 vs. limit=15.0 +2024-07-28 04:52:55,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=112506.66666666667, ans=0.125 +2024-07-28 04:53:02,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=112520.0, ans=0.2 +2024-07-28 04:53:24,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=112560.0, ans=0.025 +2024-07-28 04:53:25,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=112573.33333333333, ans=0.2 +2024-07-28 04:53:25,598 INFO [train.py:1114] (0/4) Epoch 9, batch 2650, loss[loss=0.2336, simple_loss=0.3158, pruned_loss=0.07574, over 4633.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.293, pruned_loss=0.06069, over 940436.35 frames. ], batch size: 16, lr: 8.47e-03, grad_scale: 32.0 +2024-07-28 04:53:35,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=112586.66666666667, ans=0.05 +2024-07-28 04:53:37,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=112586.66666666667, ans=0.125 +2024-07-28 04:53:42,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=112600.0, ans=0.025 +2024-07-28 04:53:42,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=112600.0, ans=0.07 +2024-07-28 04:53:44,514 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.626e+01 6.228e+01 7.272e+01 1.238e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 04:53:47,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=112613.33333333333, ans=0.125 +2024-07-28 04:53:50,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=112613.33333333333, ans=0.2 +2024-07-28 04:53:58,567 INFO [train.py:1114] (0/4) Epoch 9, batch 2700, loss[loss=0.2161, simple_loss=0.3093, pruned_loss=0.06145, over 4744.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2939, pruned_loss=0.06146, over 940618.71 frames. ], batch size: 14, lr: 8.47e-03, grad_scale: 32.0 +2024-07-28 04:53:59,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=112640.0, ans=0.125 +2024-07-28 04:54:08,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=112653.33333333333, ans=0.07 +2024-07-28 04:54:11,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=112666.66666666667, ans=0.5 +2024-07-28 04:54:24,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=112680.0, ans=0.125 +2024-07-28 04:54:32,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=112706.66666666667, ans=0.125 +2024-07-28 04:54:32,614 INFO [train.py:1114] (0/4) Epoch 9, batch 2750, loss[loss=0.1773, simple_loss=0.2643, pruned_loss=0.04513, over 4698.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2934, pruned_loss=0.0614, over 940396.61 frames. ], batch size: 12, lr: 8.47e-03, grad_scale: 32.0 +2024-07-28 04:54:41,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=112720.0, ans=0.125 +2024-07-28 04:54:44,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=112720.0, ans=0.125 +2024-07-28 04:54:51,679 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.642e+01 6.173e+01 6.885e+01 7.984e+01 1.102e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-28 04:55:01,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=112760.0, ans=0.2 +2024-07-28 04:55:05,838 INFO [train.py:1114] (0/4) Epoch 9, batch 2800, loss[loss=0.3045, simple_loss=0.3533, pruned_loss=0.1278, over 3234.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2937, pruned_loss=0.06172, over 938060.43 frames. ], batch size: 36, lr: 8.47e-03, grad_scale: 32.0 +2024-07-28 04:55:07,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=112773.33333333333, ans=0.125 +2024-07-28 04:55:10,878 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.93 vs. limit=15.0 +2024-07-28 04:55:23,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=112800.0, ans=0.025 +2024-07-28 04:55:25,115 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.00 vs. limit=15.0 +2024-07-28 04:55:31,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=112813.33333333333, ans=0.125 +2024-07-28 04:55:39,213 INFO [train.py:1114] (0/4) Epoch 9, batch 2850, loss[loss=0.2319, simple_loss=0.312, pruned_loss=0.07588, over 4956.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2943, pruned_loss=0.06241, over 935977.60 frames. ], batch size: 13, lr: 8.46e-03, grad_scale: 32.0 +2024-07-28 04:55:39,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112840.0, ans=0.1 +2024-07-28 04:55:58,253 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.781e+01 6.339e+01 7.378e+01 1.144e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 04:55:59,482 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.74 vs. limit=10.0 +2024-07-28 04:56:00,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=112880.0, ans=0.125 +2024-07-28 04:56:04,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=112880.0, ans=0.025 +2024-07-28 04:56:06,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=112893.33333333333, ans=0.0 +2024-07-28 04:56:07,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=112893.33333333333, ans=0.125 +2024-07-28 04:56:08,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=112893.33333333333, ans=0.125 +2024-07-28 04:56:10,069 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.61 vs. limit=15.0 +2024-07-28 04:56:12,161 INFO [train.py:1114] (0/4) Epoch 9, batch 2900, loss[loss=0.2358, simple_loss=0.3196, pruned_loss=0.07597, over 4827.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2944, pruned_loss=0.06165, over 940088.25 frames. ], batch size: 13, lr: 8.46e-03, grad_scale: 32.0 +2024-07-28 04:56:12,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=112906.66666666667, ans=0.125 +2024-07-28 04:56:34,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=112946.66666666667, ans=0.0 +2024-07-28 04:56:37,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=112946.66666666667, ans=0.2 +2024-07-28 04:56:37,964 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.71 vs. limit=15.0 +2024-07-28 04:56:47,509 INFO [train.py:1114] (0/4) Epoch 9, batch 2950, loss[loss=0.2151, simple_loss=0.2918, pruned_loss=0.06921, over 4700.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.293, pruned_loss=0.06205, over 938869.99 frames. ], batch size: 12, lr: 8.46e-03, grad_scale: 32.0 +2024-07-28 04:56:54,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=112973.33333333333, ans=0.125 +2024-07-28 04:57:00,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=112986.66666666667, ans=0.125 +2024-07-28 04:57:10,754 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.416e+01 5.914e+01 6.880e+01 7.946e+01 1.236e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-28 04:57:21,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=113026.66666666667, ans=0.125 +2024-07-28 04:57:24,700 INFO [train.py:1114] (0/4) Epoch 9, batch 3000, loss[loss=0.1889, simple_loss=0.2656, pruned_loss=0.0561, over 4753.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2924, pruned_loss=0.06164, over 938111.62 frames. ], batch size: 13, lr: 8.46e-03, grad_scale: 32.0 +2024-07-28 04:57:24,700 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 04:57:37,296 INFO [train.py:1146] (0/4) Epoch 9, validation: loss=0.1766, simple_loss=0.2807, pruned_loss=0.03626, over 944034.00 frames. +2024-07-28 04:57:37,297 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 04:58:00,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=113080.0, ans=0.125 +2024-07-28 04:58:04,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=113093.33333333333, ans=0.04949747468305833 +2024-07-28 04:58:05,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=113093.33333333333, ans=0.2 +2024-07-28 04:58:06,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-28 04:58:11,102 INFO [train.py:1114] (0/4) Epoch 9, batch 3050, loss[loss=0.1687, simple_loss=0.264, pruned_loss=0.03673, over 4643.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2929, pruned_loss=0.06158, over 937069.45 frames. ], batch size: 12, lr: 8.45e-03, grad_scale: 32.0 +2024-07-28 04:58:18,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=113120.0, ans=0.2 +2024-07-28 04:58:21,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=113120.0, ans=0.125 +2024-07-28 04:58:25,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=113133.33333333333, ans=0.0 +2024-07-28 04:58:26,380 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.01 vs. limit=15.0 +2024-07-28 04:58:28,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=113133.33333333333, ans=0.125 +2024-07-28 04:58:32,413 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.66 vs. limit=15.0 +2024-07-28 04:58:32,583 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.569e+01 5.528e+01 6.161e+01 6.934e+01 1.105e+02, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 04:58:35,136 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.00 vs. limit=15.0 +2024-07-28 04:58:39,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=113160.0, ans=0.0 +2024-07-28 04:58:47,586 INFO [train.py:1114] (0/4) Epoch 9, batch 3100, loss[loss=0.2156, simple_loss=0.3019, pruned_loss=0.06468, over 4621.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2916, pruned_loss=0.06124, over 937928.51 frames. ], batch size: 16, lr: 8.45e-03, grad_scale: 32.0 +2024-07-28 04:59:04,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=113200.0, ans=0.125 +2024-07-28 04:59:04,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=113200.0, ans=0.0 +2024-07-28 04:59:22,194 INFO [train.py:1114] (0/4) Epoch 9, batch 3150, loss[loss=0.2387, simple_loss=0.3132, pruned_loss=0.08205, over 4643.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2921, pruned_loss=0.06142, over 937978.17 frames. ], batch size: 17, lr: 8.45e-03, grad_scale: 32.0 +2024-07-28 04:59:32,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=113253.33333333333, ans=0.0 +2024-07-28 04:59:37,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=113266.66666666667, ans=0.125 +2024-07-28 04:59:41,417 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.794e+01 6.244e+01 6.965e+01 1.084e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-28 04:59:55,527 INFO [train.py:1114] (0/4) Epoch 9, batch 3200, loss[loss=0.2413, simple_loss=0.3187, pruned_loss=0.08191, over 4839.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2912, pruned_loss=0.06098, over 939750.61 frames. ], batch size: 13, lr: 8.45e-03, grad_scale: 32.0 +2024-07-28 04:59:59,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113306.66666666667, ans=0.1 +2024-07-28 05:00:05,855 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.58 vs. limit=12.0 +2024-07-28 05:00:07,828 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.23 vs. limit=15.0 +2024-07-28 05:00:11,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=113333.33333333333, ans=0.125 +2024-07-28 05:00:32,287 INFO [train.py:1114] (0/4) Epoch 9, batch 3250, loss[loss=0.1788, simple_loss=0.2748, pruned_loss=0.04142, over 4941.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2914, pruned_loss=0.06101, over 940712.16 frames. ], batch size: 14, lr: 8.44e-03, grad_scale: 32.0 +2024-07-28 05:00:35,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=113373.33333333333, ans=0.5 +2024-07-28 05:00:40,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=113386.66666666667, ans=0.125 +2024-07-28 05:00:47,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=113400.0, ans=0.0 +2024-07-28 05:00:51,416 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.809e+01 6.527e+01 7.156e+01 1.090e+02, threshold=1.305e+02, percent-clipped=0.0 +2024-07-28 05:00:54,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=113413.33333333333, ans=0.125 +2024-07-28 05:01:00,103 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.85 vs. limit=6.0 +2024-07-28 05:01:01,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113426.66666666667, ans=0.1 +2024-07-28 05:01:03,366 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.20 vs. limit=15.0 +2024-07-28 05:01:03,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=113426.66666666667, ans=0.0 +2024-07-28 05:01:05,601 INFO [train.py:1114] (0/4) Epoch 9, batch 3300, loss[loss=0.23, simple_loss=0.3099, pruned_loss=0.07507, over 4703.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2916, pruned_loss=0.06153, over 941328.58 frames. ], batch size: 19, lr: 8.44e-03, grad_scale: 32.0 +2024-07-28 05:01:08,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=113440.0, ans=0.125 +2024-07-28 05:01:08,726 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.85 vs. limit=6.0 +2024-07-28 05:01:14,033 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.93 vs. limit=10.0 +2024-07-28 05:01:17,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=113453.33333333333, ans=0.5 +2024-07-28 05:01:19,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=113466.66666666667, ans=0.125 +2024-07-28 05:01:26,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=113480.0, ans=0.0 +2024-07-28 05:01:26,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=113480.0, ans=15.0 +2024-07-28 05:01:35,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=113493.33333333333, ans=0.2 +2024-07-28 05:01:40,562 INFO [train.py:1114] (0/4) Epoch 9, batch 3350, loss[loss=0.1994, simple_loss=0.2817, pruned_loss=0.05852, over 4609.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.292, pruned_loss=0.06147, over 938997.05 frames. ], batch size: 17, lr: 8.44e-03, grad_scale: 32.0 +2024-07-28 05:01:45,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=113506.66666666667, ans=0.125 +2024-07-28 05:01:51,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=113520.0, ans=0.125 +2024-07-28 05:01:52,573 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.06 vs. limit=10.0 +2024-07-28 05:01:54,646 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.13 vs. limit=22.5 +2024-07-28 05:02:00,185 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.890e+01 5.762e+01 6.208e+01 6.963e+01 1.151e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 05:02:06,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=113546.66666666667, ans=0.125 +2024-07-28 05:02:08,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113546.66666666667, ans=0.1 +2024-07-28 05:02:19,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=113560.0, ans=0.0 +2024-07-28 05:02:21,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=113560.0, ans=0.125 +2024-07-28 05:02:21,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=113560.0, ans=0.025 +2024-07-28 05:02:22,812 INFO [train.py:1114] (0/4) Epoch 9, batch 3400, loss[loss=0.1934, simple_loss=0.2684, pruned_loss=0.05916, over 4795.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2918, pruned_loss=0.06136, over 937796.58 frames. ], batch size: 11, lr: 8.44e-03, grad_scale: 32.0 +2024-07-28 05:02:26,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=113573.33333333333, ans=0.2 +2024-07-28 05:02:28,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=113586.66666666667, ans=0.025 +2024-07-28 05:02:30,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=113586.66666666667, ans=0.2 +2024-07-28 05:02:31,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=113586.66666666667, ans=0.125 +2024-07-28 05:02:56,811 INFO [train.py:1114] (0/4) Epoch 9, batch 3450, loss[loss=0.1977, simple_loss=0.2744, pruned_loss=0.06046, over 4669.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2922, pruned_loss=0.06118, over 938145.05 frames. ], batch size: 19, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:03:05,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=113653.33333333333, ans=0.025 +2024-07-28 05:03:16,039 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.764e+01 6.050e+01 6.762e+01 7.636e+01 1.132e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-28 05:03:19,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=113680.0, ans=0.125 +2024-07-28 05:03:20,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=113680.0, ans=0.0 +2024-07-28 05:03:20,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=113680.0, ans=0.125 +2024-07-28 05:03:29,918 INFO [train.py:1114] (0/4) Epoch 9, batch 3500, loss[loss=0.1888, simple_loss=0.273, pruned_loss=0.05226, over 4946.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2917, pruned_loss=0.06085, over 938767.41 frames. ], batch size: 12, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:03:32,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=113706.66666666667, ans=0.125 +2024-07-28 05:03:33,008 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.93 vs. limit=22.5 +2024-07-28 05:03:35,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=113706.66666666667, ans=0.025 +2024-07-28 05:03:55,139 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.85 vs. limit=12.0 +2024-07-28 05:04:01,905 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.26 vs. limit=10.0 +2024-07-28 05:04:06,622 INFO [train.py:1114] (0/4) Epoch 9, batch 3550, loss[loss=0.2117, simple_loss=0.3059, pruned_loss=0.05873, over 4667.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2916, pruned_loss=0.06058, over 939086.04 frames. ], batch size: 14, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:04:07,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=113773.33333333333, ans=0.0 +2024-07-28 05:04:20,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=113800.0, ans=0.125 +2024-07-28 05:04:25,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=113800.0, ans=0.025 +2024-07-28 05:04:26,153 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.826e+01 5.609e+01 6.345e+01 7.145e+01 1.049e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 05:04:40,131 INFO [train.py:1114] (0/4) Epoch 9, batch 3600, loss[loss=0.2186, simple_loss=0.3082, pruned_loss=0.06451, over 4972.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.292, pruned_loss=0.06092, over 941133.12 frames. ], batch size: 13, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:04:45,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.75 vs. limit=12.0 +2024-07-28 05:04:59,034 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.45 vs. limit=15.0 +2024-07-28 05:04:59,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=113880.0, ans=0.125 +2024-07-28 05:05:00,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=113880.0, ans=0.0 +2024-07-28 05:05:02,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=113880.0, ans=0.0 +2024-07-28 05:05:12,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=113893.33333333333, ans=0.125 +2024-07-28 05:05:13,673 INFO [train.py:1114] (0/4) Epoch 9, batch 3650, loss[loss=0.2084, simple_loss=0.3041, pruned_loss=0.05636, over 4910.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2907, pruned_loss=0.06023, over 942200.58 frames. ], batch size: 15, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:05:32,935 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 5.862e+01 6.678e+01 8.090e+01 1.321e+02, threshold=1.336e+02, percent-clipped=1.0 +2024-07-28 05:05:44,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113960.0, ans=0.1 +2024-07-28 05:05:46,851 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.00 vs. limit=15.0 +2024-07-28 05:05:47,151 INFO [train.py:1114] (0/4) Epoch 9, batch 3700, loss[loss=0.1786, simple_loss=0.2795, pruned_loss=0.03883, over 4941.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2908, pruned_loss=0.05992, over 942622.11 frames. ], batch size: 14, lr: 8.42e-03, grad_scale: 32.0 +2024-07-28 05:05:55,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113986.66666666667, ans=0.1 +2024-07-28 05:05:58,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113986.66666666667, ans=0.1 +2024-07-28 05:06:07,773 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.70 vs. limit=15.0 +2024-07-28 05:06:20,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=114026.66666666667, ans=0.09899494936611666 +2024-07-28 05:06:24,205 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.59 vs. limit=15.0 +2024-07-28 05:06:24,422 INFO [train.py:1114] (0/4) Epoch 9, batch 3750, loss[loss=0.2128, simple_loss=0.2929, pruned_loss=0.06631, over 4815.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2907, pruned_loss=0.05975, over 943892.68 frames. ], batch size: 11, lr: 8.42e-03, grad_scale: 32.0 +2024-07-28 05:06:27,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=114040.0, ans=0.125 +2024-07-28 05:06:29,384 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.95 vs. limit=15.0 +2024-07-28 05:06:44,318 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.905e+01 5.778e+01 6.645e+01 7.408e+01 1.039e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-28 05:06:49,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=114080.0, ans=0.025 +2024-07-28 05:06:51,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=114093.33333333333, ans=0.125 +2024-07-28 05:06:58,247 INFO [train.py:1114] (0/4) Epoch 9, batch 3800, loss[loss=0.226, simple_loss=0.3263, pruned_loss=0.06288, over 4809.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2908, pruned_loss=0.05993, over 942035.02 frames. ], batch size: 14, lr: 8.42e-03, grad_scale: 32.0 +2024-07-28 05:07:10,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=114120.0, ans=0.125 +2024-07-28 05:07:12,272 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.70 vs. limit=15.0 +2024-07-28 05:07:16,451 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.40 vs. limit=15.0 +2024-07-28 05:07:16,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=114133.33333333333, ans=0.125 +2024-07-28 05:07:17,658 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.15 vs. limit=15.0 +2024-07-28 05:07:19,240 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.16 vs. limit=15.0 +2024-07-28 05:07:21,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=114146.66666666667, ans=0.2 +2024-07-28 05:07:24,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114146.66666666667, ans=0.1 +2024-07-28 05:07:33,873 INFO [train.py:1114] (0/4) Epoch 9, batch 3850, loss[loss=0.1799, simple_loss=0.2744, pruned_loss=0.0427, over 4639.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2902, pruned_loss=0.06014, over 942378.62 frames. ], batch size: 16, lr: 8.42e-03, grad_scale: 32.0 +2024-07-28 05:07:37,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=114173.33333333333, ans=0.5 +2024-07-28 05:07:53,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=114200.0, ans=0.125 +2024-07-28 05:07:57,338 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 5.654e+01 6.534e+01 7.463e+01 1.189e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 05:08:05,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=114226.66666666667, ans=0.125 +2024-07-28 05:08:11,622 INFO [train.py:1114] (0/4) Epoch 9, batch 3900, loss[loss=0.2695, simple_loss=0.3421, pruned_loss=0.09845, over 4811.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.292, pruned_loss=0.06067, over 942640.99 frames. ], batch size: 14, lr: 8.41e-03, grad_scale: 32.0 +2024-07-28 05:08:11,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=114240.0, ans=0.0 +2024-07-28 05:08:17,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=114240.0, ans=0.0 +2024-07-28 05:08:19,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=114253.33333333333, ans=0.125 +2024-07-28 05:08:32,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=114280.0, ans=0.125 +2024-07-28 05:08:34,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=114280.0, ans=0.05 +2024-07-28 05:08:45,044 INFO [train.py:1114] (0/4) Epoch 9, batch 3950, loss[loss=0.1943, simple_loss=0.2838, pruned_loss=0.05244, over 4853.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2925, pruned_loss=0.06098, over 944626.14 frames. ], batch size: 16, lr: 8.41e-03, grad_scale: 32.0 +2024-07-28 05:08:48,077 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.78 vs. limit=15.0 +2024-07-28 05:08:49,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=114306.66666666667, ans=0.125 +2024-07-28 05:08:53,863 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.93 vs. limit=15.0 +2024-07-28 05:08:56,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=114320.0, ans=0.07 +2024-07-28 05:08:59,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=114333.33333333333, ans=0.125 +2024-07-28 05:09:00,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=114333.33333333333, ans=0.125 +2024-07-28 05:09:04,063 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.380e+01 5.786e+01 6.190e+01 6.950e+01 9.125e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 05:09:05,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=114346.66666666667, ans=0.0 +2024-07-28 05:09:12,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=114360.0, ans=0.0 +2024-07-28 05:09:16,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=114360.0, ans=0.09899494936611666 +2024-07-28 05:09:17,988 INFO [train.py:1114] (0/4) Epoch 9, batch 4000, loss[loss=0.1697, simple_loss=0.2483, pruned_loss=0.04552, over 4784.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2921, pruned_loss=0.06109, over 941173.59 frames. ], batch size: 12, lr: 8.41e-03, grad_scale: 32.0 +2024-07-28 05:09:29,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=114386.66666666667, ans=0.125 +2024-07-28 05:09:32,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=114400.0, ans=0.125 +2024-07-28 05:09:38,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114400.0, ans=0.1 +2024-07-28 05:09:53,448 INFO [train.py:1114] (0/4) Epoch 9, batch 4050, loss[loss=0.3098, simple_loss=0.3695, pruned_loss=0.125, over 3321.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2908, pruned_loss=0.06013, over 939552.12 frames. ], batch size: 35, lr: 8.41e-03, grad_scale: 32.0 +2024-07-28 05:10:00,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=114453.33333333333, ans=0.2 +2024-07-28 05:10:06,558 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.77 vs. limit=12.0 +2024-07-28 05:10:12,635 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.800e+01 6.025e+01 6.921e+01 7.969e+01 1.217e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-28 05:10:18,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=114480.0, ans=0.125 +2024-07-28 05:10:18,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=114480.0, ans=0.07 +2024-07-28 05:10:18,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=114480.0, ans=0.125 +2024-07-28 05:10:26,733 INFO [train.py:1114] (0/4) Epoch 9, batch 4100, loss[loss=0.2215, simple_loss=0.3082, pruned_loss=0.06738, over 4908.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2915, pruned_loss=0.06067, over 938983.32 frames. ], batch size: 15, lr: 8.40e-03, grad_scale: 32.0 +2024-07-28 05:10:31,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=114506.66666666667, ans=0.04949747468305833 +2024-07-28 05:10:32,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=114506.66666666667, ans=0.0 +2024-07-28 05:11:11,976 INFO [train.py:1114] (0/4) Epoch 9, batch 4150, loss[loss=0.2109, simple_loss=0.2895, pruned_loss=0.06622, over 4820.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2907, pruned_loss=0.06027, over 938471.18 frames. ], batch size: 13, lr: 8.40e-03, grad_scale: 32.0 +2024-07-28 05:11:12,530 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.21 vs. limit=8.0 +2024-07-28 05:11:17,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=114573.33333333333, ans=0.125 +2024-07-28 05:11:18,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=114586.66666666667, ans=0.0 +2024-07-28 05:11:25,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=114600.0, ans=0.125 +2024-07-28 05:11:30,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=114600.0, ans=0.125 +2024-07-28 05:11:31,352 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.579e+01 5.561e+01 6.118e+01 6.990e+01 1.145e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 05:11:32,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=114613.33333333333, ans=0.0 +2024-07-28 05:11:32,505 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=9.06 vs. limit=12.0 +2024-07-28 05:12:04,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=114640.0, ans=0.125 +2024-07-28 05:12:05,258 INFO [train.py:1114] (0/4) Epoch 9, batch 4200, loss[loss=0.1957, simple_loss=0.283, pruned_loss=0.05421, over 4889.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2895, pruned_loss=0.05941, over 940231.90 frames. ], batch size: 15, lr: 8.40e-03, grad_scale: 64.0 +2024-07-28 05:12:30,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114640.0, ans=0.1 +2024-07-28 05:12:32,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=114653.33333333333, ans=0.0 +2024-07-28 05:12:32,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=114653.33333333333, ans=0.125 +2024-07-28 05:12:33,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=114653.33333333333, ans=0.0 +2024-07-28 05:12:38,253 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.46 vs. limit=15.0 +2024-07-28 05:12:52,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=114693.33333333333, ans=0.0 +2024-07-28 05:12:58,964 INFO [train.py:1114] (0/4) Epoch 9, batch 4250, loss[loss=0.1951, simple_loss=0.2772, pruned_loss=0.0565, over 4636.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2894, pruned_loss=0.05906, over 940947.07 frames. ], batch size: 12, lr: 8.40e-03, grad_scale: 32.0 +2024-07-28 05:13:30,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=114733.33333333333, ans=0.0 +2024-07-28 05:13:45,538 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.49 vs. limit=12.0 +2024-07-28 05:13:48,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=114733.33333333333, ans=0.125 +2024-07-28 05:13:49,188 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.539e+01 5.532e+01 6.240e+01 7.121e+01 1.493e+02, threshold=1.248e+02, percent-clipped=1.0 +2024-07-28 05:13:52,203 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:13:52,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=114746.66666666667, ans=0.2 +2024-07-28 05:14:04,279 INFO [train.py:1114] (0/4) Epoch 9, batch 4300, loss[loss=0.1853, simple_loss=0.2784, pruned_loss=0.04612, over 4759.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2896, pruned_loss=0.05926, over 939918.91 frames. ], batch size: 13, lr: 8.39e-03, grad_scale: 32.0 +2024-07-28 05:14:17,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=114800.0, ans=0.0 +2024-07-28 05:14:19,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.92 vs. limit=6.0 +2024-07-28 05:14:22,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=114800.0, ans=0.0 +2024-07-28 05:14:27,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=114813.33333333333, ans=0.2 +2024-07-28 05:14:35,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=114826.66666666667, ans=0.125 +2024-07-28 05:14:39,645 INFO [train.py:1114] (0/4) Epoch 9, batch 4350, loss[loss=0.2332, simple_loss=0.3137, pruned_loss=0.0764, over 4760.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2896, pruned_loss=0.05879, over 940771.10 frames. ], batch size: 13, lr: 8.39e-03, grad_scale: 32.0 +2024-07-28 05:14:51,321 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.13 vs. limit=15.0 +2024-07-28 05:15:01,417 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.772e+01 5.655e+01 6.124e+01 6.925e+01 1.522e+02, threshold=1.225e+02, percent-clipped=1.0 +2024-07-28 05:15:14,999 INFO [train.py:1114] (0/4) Epoch 9, batch 4400, loss[loss=0.1896, simple_loss=0.2782, pruned_loss=0.05054, over 4809.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2898, pruned_loss=0.05913, over 940616.51 frames. ], batch size: 14, lr: 8.39e-03, grad_scale: 32.0 +2024-07-28 05:15:19,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=114906.66666666667, ans=0.125 +2024-07-28 05:15:30,642 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.17 vs. limit=15.0 +2024-07-28 05:15:42,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=114960.0, ans=0.2 +2024-07-28 05:15:45,426 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:15:48,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=114960.0, ans=0.025 +2024-07-28 05:15:49,341 INFO [train.py:1114] (0/4) Epoch 9, batch 4450, loss[loss=0.2106, simple_loss=0.2864, pruned_loss=0.06743, over 4931.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2908, pruned_loss=0.06006, over 938792.23 frames. ], batch size: 12, lr: 8.39e-03, grad_scale: 32.0 +2024-07-28 05:15:54,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=114973.33333333333, ans=0.0 +2024-07-28 05:16:02,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=115000.0, ans=0.025 +2024-07-28 05:16:05,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=115000.0, ans=0.0 +2024-07-28 05:16:07,568 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.18 vs. limit=15.0 +2024-07-28 05:16:09,214 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.877e+01 5.739e+01 6.410e+01 7.552e+01 1.027e+02, threshold=1.282e+02, percent-clipped=0.0 +2024-07-28 05:16:24,099 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.46 vs. limit=15.0 +2024-07-28 05:16:24,279 INFO [train.py:1114] (0/4) Epoch 9, batch 4500, loss[loss=0.1997, simple_loss=0.2958, pruned_loss=0.05179, over 4742.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2926, pruned_loss=0.06055, over 938305.60 frames. ], batch size: 14, lr: 8.38e-03, grad_scale: 32.0 +2024-07-28 05:16:27,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=115040.0, ans=0.025 +2024-07-28 05:16:31,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=115053.33333333333, ans=0.0 +2024-07-28 05:16:39,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=115066.66666666667, ans=0.125 +2024-07-28 05:16:41,722 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.24 vs. limit=10.0 +2024-07-28 05:16:54,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=115093.33333333333, ans=0.125 +2024-07-28 05:16:57,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=115106.66666666667, ans=0.025 +2024-07-28 05:16:58,150 INFO [train.py:1114] (0/4) Epoch 9, batch 4550, loss[loss=0.2105, simple_loss=0.3028, pruned_loss=0.05907, over 4898.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2915, pruned_loss=0.05984, over 940269.50 frames. ], batch size: 13, lr: 8.38e-03, grad_scale: 32.0 +2024-07-28 05:17:00,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=115106.66666666667, ans=0.2 +2024-07-28 05:17:04,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115106.66666666667, ans=0.1 +2024-07-28 05:17:10,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=115120.0, ans=0.125 +2024-07-28 05:17:36,231 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:17:37,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115133.33333333333, ans=0.1 +2024-07-28 05:17:38,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=115146.66666666667, ans=0.125 +2024-07-28 05:17:38,912 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.875e+01 5.713e+01 6.359e+01 7.183e+01 1.180e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 05:17:52,566 INFO [train.py:1114] (0/4) Epoch 9, batch 4600, loss[loss=0.2466, simple_loss=0.3375, pruned_loss=0.07788, over 4410.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2917, pruned_loss=0.05995, over 938214.70 frames. ], batch size: 21, lr: 8.38e-03, grad_scale: 32.0 +2024-07-28 05:17:55,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=115173.33333333333, ans=0.125 +2024-07-28 05:17:59,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=115186.66666666667, ans=0.125 +2024-07-28 05:18:01,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=115186.66666666667, ans=0.025 +2024-07-28 05:18:08,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=115200.0, ans=0.0 +2024-07-28 05:18:35,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=115213.33333333333, ans=0.0 +2024-07-28 05:18:36,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=115213.33333333333, ans=0.125 +2024-07-28 05:19:44,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=115226.66666666667, ans=0.0 +2024-07-28 05:19:46,529 INFO [train.py:1114] (0/4) Epoch 9, batch 4650, loss[loss=0.2326, simple_loss=0.317, pruned_loss=0.07413, over 4856.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2922, pruned_loss=0.06012, over 939816.43 frames. ], batch size: 16, lr: 8.38e-03, grad_scale: 32.0 +2024-07-28 05:19:47,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=115240.0, ans=0.1 +2024-07-28 05:19:50,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=115240.0, ans=0.125 +2024-07-28 05:20:19,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115266.66666666667, ans=0.1 +2024-07-28 05:20:20,855 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 5.631e+01 6.409e+01 7.272e+01 9.674e+01, threshold=1.282e+02, percent-clipped=0.0 +2024-07-28 05:20:31,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=115293.33333333333, ans=0.0 +2024-07-28 05:20:39,144 INFO [train.py:1114] (0/4) Epoch 9, batch 4700, loss[loss=0.1674, simple_loss=0.2516, pruned_loss=0.04159, over 4687.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2913, pruned_loss=0.05978, over 937433.06 frames. ], batch size: 11, lr: 8.37e-03, grad_scale: 32.0 +2024-07-28 05:20:50,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=115320.0, ans=0.125 +2024-07-28 05:20:59,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=115346.66666666667, ans=0.125 +2024-07-28 05:21:12,803 INFO [train.py:1114] (0/4) Epoch 9, batch 4750, loss[loss=0.2134, simple_loss=0.2981, pruned_loss=0.06433, over 4566.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2916, pruned_loss=0.06005, over 935418.26 frames. ], batch size: 21, lr: 8.37e-03, grad_scale: 32.0 +2024-07-28 05:21:20,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=115373.33333333333, ans=0.0 +2024-07-28 05:21:34,808 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.742e+01 6.606e+01 7.346e+01 1.206e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-28 05:21:39,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=115413.33333333333, ans=0.125 +2024-07-28 05:21:50,047 INFO [train.py:1114] (0/4) Epoch 9, batch 4800, loss[loss=0.2264, simple_loss=0.3089, pruned_loss=0.07189, over 4693.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2915, pruned_loss=0.06026, over 932572.06 frames. ], batch size: 13, lr: 8.37e-03, grad_scale: 32.0 +2024-07-28 05:21:52,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=115440.0, ans=0.125 +2024-07-28 05:22:08,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=115453.33333333333, ans=0.125 +2024-07-28 05:22:09,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=115453.33333333333, ans=0.1 +2024-07-28 05:22:11,998 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.53 vs. limit=10.0 +2024-07-28 05:22:26,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=115480.0, ans=0.0 +2024-07-28 05:22:29,191 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.94 vs. limit=22.5 +2024-07-28 05:22:37,896 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.52 vs. limit=10.0 +2024-07-28 05:22:39,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=115493.33333333333, ans=0.125 +2024-07-28 05:22:41,490 INFO [train.py:1114] (0/4) Epoch 9, batch 4850, loss[loss=0.186, simple_loss=0.2705, pruned_loss=0.05075, over 4740.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2907, pruned_loss=0.05952, over 932698.95 frames. ], batch size: 14, lr: 8.37e-03, grad_scale: 32.0 +2024-07-28 05:22:47,056 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.68 vs. limit=15.0 +2024-07-28 05:22:48,396 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.92 vs. limit=12.0 +2024-07-28 05:23:02,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=115533.33333333333, ans=0.125 +2024-07-28 05:23:07,389 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 5.778e+01 6.431e+01 7.298e+01 1.043e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 05:23:14,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=115560.0, ans=0.025 +2024-07-28 05:23:14,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=115560.0, ans=0.05 +2024-07-28 05:23:17,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=115560.0, ans=0.125 +2024-07-28 05:23:18,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=115560.0, ans=0.025 +2024-07-28 05:23:20,762 INFO [train.py:1114] (0/4) Epoch 9, batch 4900, loss[loss=0.1977, simple_loss=0.2756, pruned_loss=0.05986, over 4750.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2904, pruned_loss=0.05965, over 934465.85 frames. ], batch size: 13, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:23:37,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=115586.66666666667, ans=0.125 +2024-07-28 05:23:48,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=115613.33333333333, ans=0.125 +2024-07-28 05:23:49,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=115613.33333333333, ans=0.025 +2024-07-28 05:23:54,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=115613.33333333333, ans=0.1 +2024-07-28 05:23:55,421 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.41 vs. limit=15.0 +2024-07-28 05:24:08,711 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.35 vs. limit=22.5 +2024-07-28 05:24:12,917 INFO [train.py:1114] (0/4) Epoch 9, batch 4950, loss[loss=0.2622, simple_loss=0.3217, pruned_loss=0.1014, over 3526.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2925, pruned_loss=0.06129, over 931699.73 frames. ], batch size: 35, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:24:21,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=115653.33333333333, ans=0.125 +2024-07-28 05:24:27,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=115653.33333333333, ans=0.07 +2024-07-28 05:24:31,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=115666.66666666667, ans=0.1 +2024-07-28 05:24:35,822 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.843e+01 5.657e+01 6.231e+01 6.947e+01 1.249e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 05:24:43,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=115680.0, ans=0.0 +2024-07-28 05:25:13,963 INFO [train.py:1114] (0/4) Epoch 9, batch 5000, loss[loss=0.169, simple_loss=0.2673, pruned_loss=0.03534, over 4666.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2915, pruned_loss=0.06051, over 935550.49 frames. ], batch size: 14, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:25:25,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=115706.66666666667, ans=0.0 +2024-07-28 05:25:49,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=115720.0, ans=0.2 +2024-07-28 05:25:49,742 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.33 vs. limit=10.0 +2024-07-28 05:25:52,718 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.99 vs. limit=12.0 +2024-07-28 05:26:04,164 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.72 vs. limit=6.0 +2024-07-28 05:26:07,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=115746.66666666667, ans=0.125 +2024-07-28 05:26:11,231 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.23 vs. limit=12.0 +2024-07-28 05:26:44,477 INFO [train.py:1114] (0/4) Epoch 9, batch 5050, loss[loss=0.1887, simple_loss=0.2644, pruned_loss=0.05643, over 4862.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2913, pruned_loss=0.0606, over 937909.14 frames. ], batch size: 12, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:27:01,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=115773.33333333333, ans=0.2 +2024-07-28 05:27:04,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115786.66666666667, ans=0.1 +2024-07-28 05:27:05,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=115786.66666666667, ans=0.125 +2024-07-28 05:27:05,616 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.54 vs. limit=22.5 +2024-07-28 05:27:15,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=115800.0, ans=0.2 +2024-07-28 05:27:27,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=115800.0, ans=0.07 +2024-07-28 05:27:29,887 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.024e+01 5.915e+01 6.647e+01 7.788e+01 1.077e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-28 05:27:48,484 INFO [train.py:1114] (0/4) Epoch 9, batch 5100, loss[loss=0.169, simple_loss=0.2591, pruned_loss=0.03946, over 4776.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2931, pruned_loss=0.06142, over 935357.58 frames. ], batch size: 12, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:28:11,946 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.62 vs. limit=15.0 +2024-07-28 05:28:34,390 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.92 vs. limit=6.0 +2024-07-28 05:28:34,405 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.79 vs. limit=22.5 +2024-07-28 05:28:34,433 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.49 vs. limit=22.5 +2024-07-28 05:28:44,205 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=2.77 vs. limit=12.0 +2024-07-28 05:28:47,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=115893.33333333333, ans=0.0 +2024-07-28 05:28:59,104 INFO [train.py:1114] (0/4) Epoch 9, batch 5150, loss[loss=0.2186, simple_loss=0.309, pruned_loss=0.06412, over 4841.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2936, pruned_loss=0.06151, over 936054.34 frames. ], batch size: 16, lr: 8.35e-03, grad_scale: 32.0 +2024-07-28 05:28:59,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=115906.66666666667, ans=0.2 +2024-07-28 05:29:20,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=115920.0, ans=0.125 +2024-07-28 05:29:37,795 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.832e+01 5.668e+01 6.329e+01 7.486e+01 1.027e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 05:29:46,051 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.68 vs. limit=15.0 +2024-07-28 05:29:50,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=115946.66666666667, ans=0.125 +2024-07-28 05:29:53,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115960.0, ans=0.1 +2024-07-28 05:30:01,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=115973.33333333333, ans=0.05 +2024-07-28 05:30:01,860 INFO [train.py:1114] (0/4) Epoch 9, batch 5200, loss[loss=0.2229, simple_loss=0.3203, pruned_loss=0.06277, over 4667.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2925, pruned_loss=0.06084, over 936134.04 frames. ], batch size: 14, lr: 8.35e-03, grad_scale: 32.0 +2024-07-28 05:30:02,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=115973.33333333333, ans=0.2 +2024-07-28 05:30:02,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=115973.33333333333, ans=0.05 +2024-07-28 05:30:02,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=115973.33333333333, ans=0.2 +2024-07-28 05:30:14,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=115986.66666666667, ans=0.125 +2024-07-28 05:30:14,684 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:30:36,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=116013.33333333333, ans=0.125 +2024-07-28 05:30:38,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=116013.33333333333, ans=0.0 +2024-07-28 05:30:47,193 INFO [train.py:1114] (0/4) Epoch 9, batch 5250, loss[loss=0.1992, simple_loss=0.2884, pruned_loss=0.05497, over 4898.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2914, pruned_loss=0.06006, over 936299.09 frames. ], batch size: 13, lr: 8.35e-03, grad_scale: 32.0 +2024-07-28 05:31:05,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=116053.33333333333, ans=0.0 +2024-07-28 05:31:20,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=116066.66666666667, ans=0.125 +2024-07-28 05:31:22,042 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.23 vs. limit=15.0 +2024-07-28 05:31:22,468 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.06 vs. limit=12.0 +2024-07-28 05:31:25,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=116066.66666666667, ans=0.1 +2024-07-28 05:31:31,296 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.193e+01 5.667e+01 6.856e+01 8.237e+01 1.145e+02, threshold=1.371e+02, percent-clipped=0.0 +2024-07-28 05:31:36,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=116080.0, ans=0.0 +2024-07-28 05:31:47,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=116093.33333333333, ans=0.1 +2024-07-28 05:31:47,159 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.56 vs. limit=15.0 +2024-07-28 05:32:01,305 INFO [train.py:1114] (0/4) Epoch 9, batch 5300, loss[loss=0.2413, simple_loss=0.3207, pruned_loss=0.08089, over 4636.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2901, pruned_loss=0.05951, over 934862.22 frames. ], batch size: 16, lr: 8.35e-03, grad_scale: 32.0 +2024-07-28 05:32:02,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=116106.66666666667, ans=0.025 +2024-07-28 05:32:11,261 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.72 vs. limit=6.0 +2024-07-28 05:32:20,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=116133.33333333333, ans=0.125 +2024-07-28 05:32:22,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=116133.33333333333, ans=0.125 +2024-07-28 05:32:31,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=116146.66666666667, ans=0.0 +2024-07-28 05:32:46,275 INFO [train.py:1114] (0/4) Epoch 9, batch 5350, loss[loss=0.1654, simple_loss=0.2513, pruned_loss=0.03972, over 4557.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2905, pruned_loss=0.05951, over 937175.10 frames. ], batch size: 10, lr: 8.34e-03, grad_scale: 32.0 +2024-07-28 05:32:51,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=116173.33333333333, ans=0.015 +2024-07-28 05:33:03,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=116200.0, ans=0.125 +2024-07-28 05:33:14,381 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.727e+01 5.909e+01 6.357e+01 7.144e+01 1.044e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 05:33:14,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.93 vs. limit=15.0 +2024-07-28 05:33:28,701 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.11 vs. limit=10.0 +2024-07-28 05:33:36,604 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.05 vs. limit=12.0 +2024-07-28 05:33:36,866 INFO [train.py:1114] (0/4) Epoch 9, batch 5400, loss[loss=0.2325, simple_loss=0.3196, pruned_loss=0.07273, over 4266.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2925, pruned_loss=0.06064, over 931304.16 frames. ], batch size: 25, lr: 8.34e-03, grad_scale: 32.0 +2024-07-28 05:33:58,634 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=18.20 vs. limit=22.5 +2024-07-28 05:34:22,028 INFO [train.py:1114] (0/4) Epoch 9, batch 5450, loss[loss=0.1769, simple_loss=0.2621, pruned_loss=0.04585, over 4716.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2925, pruned_loss=0.06022, over 933934.90 frames. ], batch size: 11, lr: 8.34e-03, grad_scale: 32.0 +2024-07-28 05:34:23,936 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.29 vs. limit=10.0 +2024-07-28 05:34:51,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=116333.33333333333, ans=0.125 +2024-07-28 05:34:56,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=116333.33333333333, ans=0.0 +2024-07-28 05:34:58,376 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.683e+01 5.949e+01 6.805e+01 7.625e+01 9.971e+01, threshold=1.361e+02, percent-clipped=0.0 +2024-07-28 05:35:22,096 INFO [train.py:1114] (0/4) Epoch 9, batch 5500, loss[loss=0.2402, simple_loss=0.3196, pruned_loss=0.08044, over 4222.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2919, pruned_loss=0.06024, over 931082.33 frames. ], batch size: 26, lr: 8.34e-03, grad_scale: 32.0 +2024-07-28 05:35:24,687 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.03 vs. limit=12.0 +2024-07-28 05:35:26,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=116373.33333333333, ans=0.125 +2024-07-28 05:35:36,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=116400.0, ans=0.1 +2024-07-28 05:35:56,289 INFO [train.py:1114] (0/4) Epoch 9, batch 5550, loss[loss=0.1735, simple_loss=0.2567, pruned_loss=0.04509, over 4719.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2905, pruned_loss=0.05985, over 933145.99 frames. ], batch size: 12, lr: 8.33e-03, grad_scale: 32.0 +2024-07-28 05:36:17,003 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.435e+01 5.683e+01 6.413e+01 7.380e+01 1.098e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 05:36:21,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=116480.0, ans=0.2 +2024-07-28 05:36:24,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=116493.33333333333, ans=0.125 +2024-07-28 05:36:35,079 INFO [train.py:1114] (0/4) Epoch 9, batch 5600, loss[loss=0.2002, simple_loss=0.2864, pruned_loss=0.05697, over 4749.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2905, pruned_loss=0.06005, over 933961.81 frames. ], batch size: 14, lr: 8.33e-03, grad_scale: 32.0 +2024-07-28 05:36:37,404 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:36:43,599 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:36:47,710 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.70 vs. limit=15.0 +2024-07-28 05:36:52,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=116533.33333333333, ans=0.0 +2024-07-28 05:37:08,981 INFO [train.py:1114] (0/4) Epoch 9, batch 5650, loss[loss=0.2441, simple_loss=0.3302, pruned_loss=0.07896, over 4488.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2897, pruned_loss=0.06031, over 936708.86 frames. ], batch size: 21, lr: 8.33e-03, grad_scale: 32.0 +2024-07-28 05:37:14,029 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.95 vs. limit=15.0 +2024-07-28 05:37:18,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=116586.66666666667, ans=0.0 +2024-07-28 05:37:28,754 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.968e+01 5.748e+01 6.232e+01 7.231e+01 1.019e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 05:37:38,896 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.66 vs. limit=15.0 +2024-07-28 05:37:43,668 INFO [train.py:1114] (0/4) Epoch 9, batch 5700, loss[loss=0.2129, simple_loss=0.2917, pruned_loss=0.06707, over 4689.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2895, pruned_loss=0.06017, over 937901.51 frames. ], batch size: 13, lr: 8.33e-03, grad_scale: 32.0 +2024-07-28 05:37:57,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=116666.66666666667, ans=0.1 +2024-07-28 05:38:12,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=116693.33333333333, ans=0.2 +2024-07-28 05:38:12,430 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.40 vs. limit=10.0 +2024-07-28 05:38:16,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=116693.33333333333, ans=0.0 +2024-07-28 05:38:18,379 INFO [train.py:1114] (0/4) Epoch 9, batch 5750, loss[loss=0.2408, simple_loss=0.3251, pruned_loss=0.0783, over 4762.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2896, pruned_loss=0.05989, over 938404.51 frames. ], batch size: 19, lr: 8.32e-03, grad_scale: 32.0 +2024-07-28 05:38:34,475 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.83 vs. limit=15.0 +2024-07-28 05:38:47,077 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.95 vs. limit=15.0 +2024-07-28 05:38:47,981 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.019e+01 5.728e+01 6.287e+01 7.264e+01 1.232e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 05:39:17,866 INFO [train.py:1114] (0/4) Epoch 9, batch 5800, loss[loss=0.2512, simple_loss=0.3356, pruned_loss=0.08338, over 4730.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2915, pruned_loss=0.06079, over 937680.56 frames. ], batch size: 19, lr: 8.32e-03, grad_scale: 32.0 +2024-07-28 05:39:21,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=116773.33333333333, ans=0.125 +2024-07-28 05:39:22,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=116773.33333333333, ans=0.125 +2024-07-28 05:39:23,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=116773.33333333333, ans=0.025 +2024-07-28 05:39:40,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=116800.0, ans=0.05 +2024-07-28 05:40:02,740 INFO [train.py:1114] (0/4) Epoch 9, batch 5850, loss[loss=0.1995, simple_loss=0.2984, pruned_loss=0.05035, over 4513.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2919, pruned_loss=0.06092, over 938392.69 frames. ], batch size: 21, lr: 8.32e-03, grad_scale: 32.0 +2024-07-28 05:40:03,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=116840.0, ans=0.07 +2024-07-28 05:40:07,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=116840.0, ans=0.05 +2024-07-28 05:40:11,734 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.63 vs. limit=15.0 +2024-07-28 05:40:25,339 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.305e+01 6.200e+01 6.975e+01 8.009e+01 1.394e+02, threshold=1.395e+02, percent-clipped=2.0 +2024-07-28 05:40:34,780 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=5.84 vs. limit=15.0 +2024-07-28 05:40:38,643 INFO [train.py:1114] (0/4) Epoch 9, batch 5900, loss[loss=0.2303, simple_loss=0.3227, pruned_loss=0.06898, over 4678.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2921, pruned_loss=0.06078, over 938549.51 frames. ], batch size: 15, lr: 8.32e-03, grad_scale: 32.0 +2024-07-28 05:40:46,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=116906.66666666667, ans=0.0 +2024-07-28 05:40:52,070 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.01 vs. limit=22.5 +2024-07-28 05:41:08,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=116946.66666666667, ans=0.0 +2024-07-28 05:41:11,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=116960.0, ans=0.025 +2024-07-28 05:41:12,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=116960.0, ans=0.0 +2024-07-28 05:41:17,906 INFO [train.py:1114] (0/4) Epoch 9, batch 5950, loss[loss=0.2558, simple_loss=0.3416, pruned_loss=0.08495, over 4673.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.292, pruned_loss=0.06086, over 940809.04 frames. ], batch size: 15, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:41:35,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=117000.0, ans=0.2 +2024-07-28 05:41:37,800 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.704e+01 5.843e+01 6.530e+01 7.569e+01 1.342e+02, threshold=1.306e+02, percent-clipped=0.0 +2024-07-28 05:41:44,461 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.22 vs. limit=22.5 +2024-07-28 05:41:44,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=117026.66666666667, ans=0.0 +2024-07-28 05:41:51,486 INFO [train.py:1114] (0/4) Epoch 9, batch 6000, loss[loss=0.2337, simple_loss=0.3124, pruned_loss=0.07746, over 4271.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2918, pruned_loss=0.06106, over 937705.77 frames. ], batch size: 25, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:41:51,486 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 05:42:02,093 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.4839, 3.2644, 5.1149, 2.7762], device='cuda:0') +2024-07-28 05:42:05,184 INFO [train.py:1146] (0/4) Epoch 9, validation: loss=0.175, simple_loss=0.2796, pruned_loss=0.03521, over 944034.00 frames. +2024-07-28 05:42:05,185 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 05:42:07,516 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.22 vs. limit=15.0 +2024-07-28 05:42:09,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=117040.0, ans=0.5 +2024-07-28 05:42:10,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=117040.0, ans=0.5 +2024-07-28 05:42:12,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=117053.33333333333, ans=0.125 +2024-07-28 05:42:12,965 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.22 vs. limit=15.0 +2024-07-28 05:42:38,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=117093.33333333333, ans=0.015 +2024-07-28 05:42:38,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=117093.33333333333, ans=22.5 +2024-07-28 05:42:39,950 INFO [train.py:1114] (0/4) Epoch 9, batch 6050, loss[loss=0.1583, simple_loss=0.2541, pruned_loss=0.03126, over 4776.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2914, pruned_loss=0.06096, over 938978.10 frames. ], batch size: 12, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:42:40,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117106.66666666667, ans=0.1 +2024-07-28 05:42:58,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117133.33333333333, ans=0.1 +2024-07-28 05:43:04,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=117133.33333333333, ans=0.125 +2024-07-28 05:43:05,320 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.759e+01 5.753e+01 6.307e+01 7.312e+01 1.282e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 05:43:18,569 INFO [train.py:1114] (0/4) Epoch 9, batch 6100, loss[loss=0.2258, simple_loss=0.309, pruned_loss=0.07132, over 4665.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2904, pruned_loss=0.06066, over 938279.47 frames. ], batch size: 15, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:43:28,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=117186.66666666667, ans=0.0 +2024-07-28 05:43:42,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=117213.33333333333, ans=0.125 +2024-07-28 05:43:50,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=117226.66666666667, ans=0.0 +2024-07-28 05:43:53,993 INFO [train.py:1114] (0/4) Epoch 9, batch 6150, loss[loss=0.293, simple_loss=0.359, pruned_loss=0.1135, over 3581.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.292, pruned_loss=0.06103, over 937093.95 frames. ], batch size: 35, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:44:03,779 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.73 vs. limit=15.0 +2024-07-28 05:44:05,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=117253.33333333333, ans=0.1 +2024-07-28 05:44:06,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=117253.33333333333, ans=0.1 +2024-07-28 05:44:07,763 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.23 vs. limit=15.0 +2024-07-28 05:44:08,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=117253.33333333333, ans=0.0 +2024-07-28 05:44:10,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117266.66666666667, ans=0.1 +2024-07-28 05:44:13,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=117266.66666666667, ans=0.125 +2024-07-28 05:44:15,649 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.025e+01 5.523e+01 6.022e+01 6.962e+01 1.002e+02, threshold=1.204e+02, percent-clipped=1.0 +2024-07-28 05:44:17,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=117280.0, ans=0.125 +2024-07-28 05:44:32,089 INFO [train.py:1114] (0/4) Epoch 9, batch 6200, loss[loss=0.2715, simple_loss=0.3404, pruned_loss=0.1014, over 4732.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2925, pruned_loss=0.06101, over 936444.02 frames. ], batch size: 14, lr: 8.30e-03, grad_scale: 32.0 +2024-07-28 05:44:35,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=117306.66666666667, ans=0.5 +2024-07-28 05:44:35,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=117306.66666666667, ans=0.2 +2024-07-28 05:44:39,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117306.66666666667, ans=0.1 +2024-07-28 05:44:51,805 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-88000.pt +2024-07-28 05:45:20,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=117360.0, ans=0.0 +2024-07-28 05:45:22,594 INFO [train.py:1114] (0/4) Epoch 9, batch 6250, loss[loss=0.2404, simple_loss=0.3155, pruned_loss=0.08268, over 4812.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2923, pruned_loss=0.06114, over 932626.24 frames. ], batch size: 14, lr: 8.30e-03, grad_scale: 64.0 +2024-07-28 05:45:32,006 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.12 vs. limit=15.0 +2024-07-28 05:45:52,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=117400.0, ans=0.5 +2024-07-28 05:45:57,916 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.368e+01 5.807e+01 6.495e+01 7.426e+01 1.051e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-28 05:46:00,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=117413.33333333333, ans=0.125 +2024-07-28 05:46:11,018 INFO [train.py:1114] (0/4) Epoch 9, batch 6300, loss[loss=0.155, simple_loss=0.244, pruned_loss=0.033, over 4563.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2929, pruned_loss=0.06163, over 929412.92 frames. ], batch size: 10, lr: 8.30e-03, grad_scale: 64.0 +2024-07-28 05:46:12,045 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.56 vs. limit=15.0 +2024-07-28 05:46:15,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.71 vs. limit=15.0 +2024-07-28 05:46:19,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=117453.33333333333, ans=0.125 +2024-07-28 05:46:27,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=117466.66666666667, ans=0.0 +2024-07-28 05:46:43,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117493.33333333333, ans=0.1 +2024-07-28 05:46:45,023 INFO [train.py:1114] (0/4) Epoch 9, batch 6350, loss[loss=0.1996, simple_loss=0.2925, pruned_loss=0.05331, over 4547.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2913, pruned_loss=0.06067, over 933575.88 frames. ], batch size: 21, lr: 8.30e-03, grad_scale: 64.0 +2024-07-28 05:46:53,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=117520.0, ans=0.2 +2024-07-28 05:47:05,464 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.541e+01 5.641e+01 6.337e+01 7.331e+01 1.035e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 05:47:13,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=117560.0, ans=0.0 +2024-07-28 05:47:18,813 INFO [train.py:1114] (0/4) Epoch 9, batch 6400, loss[loss=0.2147, simple_loss=0.2992, pruned_loss=0.06509, over 4631.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2913, pruned_loss=0.06059, over 935075.97 frames. ], batch size: 13, lr: 8.29e-03, grad_scale: 64.0 +2024-07-28 05:47:22,382 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.59 vs. limit=15.0 +2024-07-28 05:47:43,740 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.06 vs. limit=15.0 +2024-07-28 05:47:48,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=117626.66666666667, ans=0.2 +2024-07-28 05:47:51,885 INFO [train.py:1114] (0/4) Epoch 9, batch 6450, loss[loss=0.238, simple_loss=0.3253, pruned_loss=0.07541, over 4539.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2923, pruned_loss=0.06047, over 938770.01 frames. ], batch size: 21, lr: 8.29e-03, grad_scale: 32.0 +2024-07-28 05:47:58,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=117653.33333333333, ans=0.0 +2024-07-28 05:48:01,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=117653.33333333333, ans=0.125 +2024-07-28 05:48:04,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=117653.33333333333, ans=0.125 +2024-07-28 05:48:04,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=117666.66666666667, ans=0.0 +2024-07-28 05:48:10,174 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.82 vs. limit=10.0 +2024-07-28 05:48:11,427 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.16 vs. limit=15.0 +2024-07-28 05:48:12,203 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.958e+01 5.777e+01 6.265e+01 7.458e+01 1.073e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 05:48:14,675 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.50 vs. limit=15.0 +2024-07-28 05:48:22,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=117693.33333333333, ans=0.125 +2024-07-28 05:48:24,565 INFO [train.py:1114] (0/4) Epoch 9, batch 6500, loss[loss=0.2864, simple_loss=0.342, pruned_loss=0.1155, over 3287.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2912, pruned_loss=0.05965, over 939993.27 frames. ], batch size: 37, lr: 8.29e-03, grad_scale: 32.0 +2024-07-28 05:48:41,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=117733.33333333333, ans=0.05 +2024-07-28 05:48:50,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=117746.66666666667, ans=0.125 +2024-07-28 05:49:02,989 INFO [train.py:1114] (0/4) Epoch 9, batch 6550, loss[loss=0.2171, simple_loss=0.2737, pruned_loss=0.08023, over 4797.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.29, pruned_loss=0.05933, over 943255.02 frames. ], batch size: 11, lr: 8.29e-03, grad_scale: 32.0 +2024-07-28 05:49:16,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=117800.0, ans=0.025 +2024-07-28 05:49:17,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=117800.0, ans=0.0 +2024-07-28 05:49:20,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=117800.0, ans=0.125 +2024-07-28 05:49:22,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=117813.33333333333, ans=0.125 +2024-07-28 05:49:23,504 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.708e+01 5.652e+01 6.284e+01 7.270e+01 1.094e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 05:49:29,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=117826.66666666667, ans=0.0 +2024-07-28 05:49:29,282 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.79 vs. limit=6.0 +2024-07-28 05:49:30,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=117826.66666666667, ans=0.125 +2024-07-28 05:49:32,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=117826.66666666667, ans=0.0 +2024-07-28 05:49:35,907 INFO [train.py:1114] (0/4) Epoch 9, batch 6600, loss[loss=0.2018, simple_loss=0.2843, pruned_loss=0.05965, over 4932.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2913, pruned_loss=0.06017, over 944964.49 frames. ], batch size: 14, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:49:38,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=117840.0, ans=0.0 +2024-07-28 05:49:43,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=117853.33333333333, ans=0.125 +2024-07-28 05:49:44,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=117853.33333333333, ans=0.1 +2024-07-28 05:50:12,818 INFO [train.py:1114] (0/4) Epoch 9, batch 6650, loss[loss=0.2356, simple_loss=0.3143, pruned_loss=0.07843, over 4628.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2914, pruned_loss=0.06012, over 944245.34 frames. ], batch size: 17, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:50:18,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=117920.0, ans=0.125 +2024-07-28 05:50:25,606 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:50:35,490 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.523e+01 5.735e+01 6.176e+01 7.286e+01 9.615e+01, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 05:50:47,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=117960.0, ans=0.0 +2024-07-28 05:50:48,351 INFO [train.py:1114] (0/4) Epoch 9, batch 6700, loss[loss=0.2267, simple_loss=0.302, pruned_loss=0.07571, over 4692.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2914, pruned_loss=0.05987, over 943001.11 frames. ], batch size: 19, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:50:48,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=117973.33333333333, ans=0.125 +2024-07-28 05:51:03,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=118000.0, ans=0.0 +2024-07-28 05:51:07,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=118013.33333333333, ans=0.0 +2024-07-28 05:51:09,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=118013.33333333333, ans=0.0 +2024-07-28 05:51:13,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=118013.33333333333, ans=0.0 +2024-07-28 05:51:17,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=118026.66666666667, ans=0.125 +2024-07-28 05:51:20,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=118026.66666666667, ans=10.0 +2024-07-28 05:51:22,048 INFO [train.py:1114] (0/4) Epoch 9, batch 6750, loss[loss=0.2083, simple_loss=0.2952, pruned_loss=0.06071, over 4275.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2914, pruned_loss=0.05989, over 940911.73 frames. ], batch size: 26, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:51:23,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=118040.0, ans=0.0 +2024-07-28 05:51:26,322 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.15 vs. limit=15.0 +2024-07-28 05:51:30,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=118053.33333333333, ans=0.0 +2024-07-28 05:51:30,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=118053.33333333333, ans=0.0 +2024-07-28 05:51:34,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=118066.66666666667, ans=0.1 +2024-07-28 05:51:42,735 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.453e+01 5.833e+01 6.338e+01 7.124e+01 1.183e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 05:51:44,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=118080.0, ans=0.125 +2024-07-28 05:51:48,616 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.31 vs. limit=15.0 +2024-07-28 05:51:51,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=118093.33333333333, ans=0.125 +2024-07-28 05:51:55,805 INFO [train.py:1114] (0/4) Epoch 9, batch 6800, loss[loss=0.1902, simple_loss=0.2808, pruned_loss=0.04983, over 4635.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2918, pruned_loss=0.06023, over 939162.35 frames. ], batch size: 13, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:51:56,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=118106.66666666667, ans=0.125 +2024-07-28 05:52:06,354 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.36 vs. limit=15.0 +2024-07-28 05:52:07,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=118120.0, ans=0.125 +2024-07-28 05:52:11,633 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.45 vs. limit=22.5 +2024-07-28 05:52:12,943 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.53 vs. limit=22.5 +2024-07-28 05:52:13,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=118133.33333333333, ans=0.0 +2024-07-28 05:52:16,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=118133.33333333333, ans=0.2 +2024-07-28 05:52:26,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=118160.0, ans=0.0 +2024-07-28 05:52:32,162 INFO [train.py:1114] (0/4) Epoch 9, batch 6850, loss[loss=0.2038, simple_loss=0.2876, pruned_loss=0.06004, over 4691.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2901, pruned_loss=0.05938, over 940699.52 frames. ], batch size: 13, lr: 8.27e-03, grad_scale: 32.0 +2024-07-28 05:52:34,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=118173.33333333333, ans=0.125 +2024-07-28 05:52:39,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=118186.66666666667, ans=0.0 +2024-07-28 05:52:53,786 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.745e+01 6.443e+01 7.368e+01 1.069e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 05:52:55,552 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.10 vs. limit=22.5 +2024-07-28 05:52:58,255 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.50 vs. limit=22.5 +2024-07-28 05:53:05,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=118226.66666666667, ans=0.2 +2024-07-28 05:53:08,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=118226.66666666667, ans=0.125 +2024-07-28 05:53:08,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=118226.66666666667, ans=0.05 +2024-07-28 05:53:10,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=118240.0, ans=0.125 +2024-07-28 05:53:10,650 INFO [train.py:1114] (0/4) Epoch 9, batch 6900, loss[loss=0.2049, simple_loss=0.2798, pruned_loss=0.06498, over 4967.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2904, pruned_loss=0.05954, over 942824.95 frames. ], batch size: 13, lr: 8.27e-03, grad_scale: 32.0 +2024-07-28 05:53:10,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=118240.0, ans=10.0 +2024-07-28 05:53:14,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=118240.0, ans=0.0 +2024-07-28 05:53:25,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=118266.66666666667, ans=0.025 +2024-07-28 05:53:34,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=118280.0, ans=0.125 +2024-07-28 05:53:35,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=118280.0, ans=0.125 +2024-07-28 05:53:36,468 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.33 vs. limit=22.5 +2024-07-28 05:53:46,414 INFO [train.py:1114] (0/4) Epoch 9, batch 6950, loss[loss=0.1557, simple_loss=0.2414, pruned_loss=0.03506, over 4527.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2895, pruned_loss=0.05917, over 939925.06 frames. ], batch size: 10, lr: 8.27e-03, grad_scale: 32.0 +2024-07-28 05:53:46,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=118306.66666666667, ans=0.125 +2024-07-28 05:54:05,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=118346.66666666667, ans=0.125 +2024-07-28 05:54:06,834 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.989e+01 5.744e+01 6.460e+01 7.316e+01 1.273e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 05:54:19,638 INFO [train.py:1114] (0/4) Epoch 9, batch 7000, loss[loss=0.2423, simple_loss=0.3309, pruned_loss=0.07684, over 4580.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2891, pruned_loss=0.05916, over 938370.20 frames. ], batch size: 17, lr: 8.27e-03, grad_scale: 32.0 +2024-07-28 05:54:20,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=118373.33333333333, ans=0.2 +2024-07-28 05:54:21,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=118373.33333333333, ans=0.2 +2024-07-28 05:54:40,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=118413.33333333333, ans=0.125 +2024-07-28 05:54:44,226 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:54:53,062 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.98 vs. limit=15.0 +2024-07-28 05:54:53,281 INFO [train.py:1114] (0/4) Epoch 9, batch 7050, loss[loss=0.2194, simple_loss=0.3048, pruned_loss=0.06698, over 4727.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2893, pruned_loss=0.05886, over 941628.42 frames. ], batch size: 19, lr: 8.26e-03, grad_scale: 32.0 +2024-07-28 05:54:56,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=118440.0, ans=0.0 +2024-07-28 05:55:07,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=118466.66666666667, ans=0.025 +2024-07-28 05:55:07,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=118466.66666666667, ans=0.125 +2024-07-28 05:55:10,180 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=9.53 vs. limit=12.0 +2024-07-28 05:55:14,451 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.643e+01 5.657e+01 6.222e+01 6.949e+01 1.042e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-28 05:55:16,097 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.98 vs. limit=12.0 +2024-07-28 05:55:21,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=118493.33333333333, ans=0.09899494936611666 +2024-07-28 05:55:21,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=118493.33333333333, ans=0.125 +2024-07-28 05:55:21,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=118493.33333333333, ans=0.125 +2024-07-28 05:55:26,728 INFO [train.py:1114] (0/4) Epoch 9, batch 7100, loss[loss=0.2059, simple_loss=0.3032, pruned_loss=0.05425, over 4794.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2908, pruned_loss=0.06017, over 936422.97 frames. ], batch size: 15, lr: 8.26e-03, grad_scale: 32.0 +2024-07-28 05:55:26,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=118506.66666666667, ans=0.0 +2024-07-28 05:55:30,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=118506.66666666667, ans=0.0 +2024-07-28 05:55:31,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=118506.66666666667, ans=0.125 +2024-07-28 05:55:41,157 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.25 vs. limit=10.0 +2024-07-28 05:55:42,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=118533.33333333333, ans=0.0 +2024-07-28 05:55:48,905 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.63 vs. limit=22.5 +2024-07-28 05:55:59,506 INFO [train.py:1114] (0/4) Epoch 9, batch 7150, loss[loss=0.2055, simple_loss=0.2941, pruned_loss=0.0585, over 4518.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2895, pruned_loss=0.0593, over 937413.64 frames. ], batch size: 21, lr: 8.26e-03, grad_scale: 32.0 +2024-07-28 05:56:19,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=118613.33333333333, ans=0.125 +2024-07-28 05:56:19,955 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.871e+01 5.611e+01 6.289e+01 7.655e+01 1.013e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 05:56:31,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=118626.66666666667, ans=0.125 +2024-07-28 05:56:32,681 INFO [train.py:1114] (0/4) Epoch 9, batch 7200, loss[loss=0.2387, simple_loss=0.3267, pruned_loss=0.07533, over 4809.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2897, pruned_loss=0.05922, over 937658.23 frames. ], batch size: 15, lr: 8.26e-03, grad_scale: 32.0 +2024-07-28 05:56:32,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=118640.0, ans=0.0 +2024-07-28 05:56:39,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=118653.33333333333, ans=0.125 +2024-07-28 05:57:00,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=118693.33333333333, ans=0.0 +2024-07-28 05:57:05,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=118693.33333333333, ans=0.025 +2024-07-28 05:57:06,754 INFO [train.py:1114] (0/4) Epoch 9, batch 7250, loss[loss=0.1703, simple_loss=0.2469, pruned_loss=0.04683, over 4849.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2894, pruned_loss=0.05904, over 939549.50 frames. ], batch size: 12, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:57:09,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=118706.66666666667, ans=0.2 +2024-07-28 05:57:10,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=118706.66666666667, ans=0.0 +2024-07-28 05:57:25,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=118733.33333333333, ans=0.0 +2024-07-28 05:57:26,912 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.398e+01 5.726e+01 6.433e+01 7.236e+01 9.812e+01, threshold=1.287e+02, percent-clipped=0.0 +2024-07-28 05:57:34,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=118760.0, ans=0.125 +2024-07-28 05:57:37,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=118760.0, ans=0.035 +2024-07-28 05:57:37,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=118760.0, ans=0.2 +2024-07-28 05:57:39,731 INFO [train.py:1114] (0/4) Epoch 9, batch 7300, loss[loss=0.1807, simple_loss=0.2659, pruned_loss=0.04772, over 4859.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.29, pruned_loss=0.05951, over 940468.13 frames. ], batch size: 12, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:57:39,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=118773.33333333333, ans=0.0 +2024-07-28 05:57:44,304 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:57:47,081 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=15.0 +2024-07-28 05:57:55,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=118800.0, ans=0.125 +2024-07-28 05:58:03,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=118813.33333333333, ans=0.125 +2024-07-28 05:58:13,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=118826.66666666667, ans=0.125 +2024-07-28 05:58:16,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=118840.0, ans=0.125 +2024-07-28 05:58:16,989 INFO [train.py:1114] (0/4) Epoch 9, batch 7350, loss[loss=0.1858, simple_loss=0.2698, pruned_loss=0.05087, over 4639.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.291, pruned_loss=0.05998, over 939476.80 frames. ], batch size: 12, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:58:22,343 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.06 vs. limit=15.0 +2024-07-28 05:58:28,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=118853.33333333333, ans=0.125 +2024-07-28 05:58:28,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=118853.33333333333, ans=0.125 +2024-07-28 05:58:37,730 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.559e+01 5.606e+01 6.103e+01 6.789e+01 9.069e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 05:58:40,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=118880.0, ans=22.5 +2024-07-28 05:58:50,085 INFO [train.py:1114] (0/4) Epoch 9, batch 7400, loss[loss=0.1953, simple_loss=0.2826, pruned_loss=0.054, over 4697.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2913, pruned_loss=0.06008, over 940607.83 frames. ], batch size: 13, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:58:54,939 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.30 vs. limit=10.0 +2024-07-28 05:59:04,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=118933.33333333333, ans=0.04949747468305833 +2024-07-28 05:59:13,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=118946.66666666667, ans=0.125 +2024-07-28 05:59:18,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=118960.0, ans=0.125 +2024-07-28 05:59:22,861 INFO [train.py:1114] (0/4) Epoch 9, batch 7450, loss[loss=0.1798, simple_loss=0.2657, pruned_loss=0.04697, over 4620.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2904, pruned_loss=0.06023, over 938067.23 frames. ], batch size: 11, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:59:36,347 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:59:46,250 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.870e+01 5.815e+01 6.542e+01 7.746e+01 1.541e+02, threshold=1.308e+02, percent-clipped=5.0 +2024-07-28 05:59:57,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=119026.66666666667, ans=0.125 +2024-07-28 05:59:59,607 INFO [train.py:1114] (0/4) Epoch 9, batch 7500, loss[loss=0.2983, simple_loss=0.3529, pruned_loss=0.1219, over 3064.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2904, pruned_loss=0.06042, over 935821.80 frames. ], batch size: 35, lr: 8.24e-03, grad_scale: 32.0 +2024-07-28 06:00:02,297 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:00:02,653 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.88 vs. limit=22.5 +2024-07-28 06:00:16,324 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.44 vs. limit=15.0 +2024-07-28 06:00:23,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=119080.0, ans=0.0 +2024-07-28 06:00:23,676 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.54 vs. limit=10.0 +2024-07-28 06:00:35,137 INFO [train.py:1114] (0/4) Epoch 9, batch 7550, loss[loss=0.2048, simple_loss=0.2941, pruned_loss=0.05776, over 4607.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2913, pruned_loss=0.06067, over 935533.82 frames. ], batch size: 17, lr: 8.24e-03, grad_scale: 32.0 +2024-07-28 06:00:38,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=119106.66666666667, ans=0.2 +2024-07-28 06:00:39,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=119106.66666666667, ans=0.09899494936611666 +2024-07-28 06:00:40,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=119106.66666666667, ans=0.125 +2024-07-28 06:00:55,466 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.364e+01 5.884e+01 6.441e+01 7.385e+01 1.107e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 06:01:01,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=119160.0, ans=0.125 +2024-07-28 06:01:03,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=119160.0, ans=0.025 +2024-07-28 06:01:03,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=119160.0, ans=0.125 +2024-07-28 06:01:04,147 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.67 vs. limit=15.0 +2024-07-28 06:01:07,558 INFO [train.py:1114] (0/4) Epoch 9, batch 7600, loss[loss=0.2244, simple_loss=0.3057, pruned_loss=0.07153, over 4805.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.291, pruned_loss=0.06024, over 937567.80 frames. ], batch size: 14, lr: 8.24e-03, grad_scale: 32.0 +2024-07-28 06:01:10,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=119173.33333333333, ans=0.2 +2024-07-28 06:01:11,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=119173.33333333333, ans=0.125 +2024-07-28 06:01:17,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=119186.66666666667, ans=0.125 +2024-07-28 06:01:17,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=119186.66666666667, ans=0.125 +2024-07-28 06:01:17,745 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:01:23,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=119200.0, ans=0.125 +2024-07-28 06:01:41,965 INFO [train.py:1114] (0/4) Epoch 9, batch 7650, loss[loss=0.1653, simple_loss=0.2508, pruned_loss=0.03994, over 4931.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2914, pruned_loss=0.0605, over 936403.48 frames. ], batch size: 12, lr: 8.24e-03, grad_scale: 32.0 +2024-07-28 06:01:42,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=119240.0, ans=0.0 +2024-07-28 06:01:45,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119240.0, ans=0.1 +2024-07-28 06:01:51,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=119253.33333333333, ans=0.125 +2024-07-28 06:01:52,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=119253.33333333333, ans=0.0 +2024-07-28 06:02:04,514 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.140e+01 5.757e+01 6.386e+01 7.107e+01 1.097e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 06:02:13,719 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.03 vs. limit=6.0 +2024-07-28 06:02:16,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=119293.33333333333, ans=0.125 +2024-07-28 06:02:18,635 INFO [train.py:1114] (0/4) Epoch 9, batch 7700, loss[loss=0.2049, simple_loss=0.2944, pruned_loss=0.05771, over 4690.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2907, pruned_loss=0.06047, over 933833.89 frames. ], batch size: 13, lr: 8.23e-03, grad_scale: 16.0 +2024-07-28 06:02:19,835 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.48 vs. limit=15.0 +2024-07-28 06:02:23,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=119306.66666666667, ans=0.2 +2024-07-28 06:02:32,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=119320.0, ans=0.125 +2024-07-28 06:02:54,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=119346.66666666667, ans=0.0 +2024-07-28 06:02:55,380 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.90 vs. limit=6.0 +2024-07-28 06:03:16,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=119360.0, ans=0.09899494936611666 +2024-07-28 06:03:18,523 INFO [train.py:1114] (0/4) Epoch 9, batch 7750, loss[loss=0.1901, simple_loss=0.2795, pruned_loss=0.05039, over 4926.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2919, pruned_loss=0.0605, over 935368.46 frames. ], batch size: 14, lr: 8.23e-03, grad_scale: 16.0 +2024-07-28 06:03:25,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=119373.33333333333, ans=0.0 +2024-07-28 06:03:30,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=119373.33333333333, ans=22.5 +2024-07-28 06:03:32,824 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.75 vs. limit=15.0 +2024-07-28 06:03:51,270 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.407e+01 5.505e+01 6.110e+01 6.941e+01 1.112e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 06:04:02,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=119440.0, ans=0.125 +2024-07-28 06:04:03,701 INFO [train.py:1114] (0/4) Epoch 9, batch 7800, loss[loss=0.2159, simple_loss=0.3124, pruned_loss=0.05971, over 4671.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2924, pruned_loss=0.06054, over 937003.04 frames. ], batch size: 14, lr: 8.23e-03, grad_scale: 8.0 +2024-07-28 06:04:04,733 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.26 vs. limit=15.0 +2024-07-28 06:04:05,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=119440.0, ans=0.125 +2024-07-28 06:04:30,654 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.65 vs. limit=15.0 +2024-07-28 06:04:42,984 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.59 vs. limit=10.0 +2024-07-28 06:04:43,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119453.33333333333, ans=0.1 +2024-07-28 06:05:06,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=119480.0, ans=0.2 +2024-07-28 06:05:09,144 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.11 vs. limit=10.0 +2024-07-28 06:05:20,535 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.01 vs. limit=15.0 +2024-07-28 06:05:20,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=119493.33333333333, ans=0.125 +2024-07-28 06:05:26,843 INFO [train.py:1114] (0/4) Epoch 9, batch 7850, loss[loss=0.1794, simple_loss=0.2452, pruned_loss=0.05682, over 4504.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.291, pruned_loss=0.06035, over 936029.62 frames. ], batch size: 10, lr: 8.23e-03, grad_scale: 8.0 +2024-07-28 06:05:55,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=119520.0, ans=0.125 +2024-07-28 06:05:56,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=119520.0, ans=0.05 +2024-07-28 06:05:56,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=119520.0, ans=0.5 +2024-07-28 06:06:09,915 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.57 vs. limit=15.0 +2024-07-28 06:06:26,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=119533.33333333333, ans=0.0 +2024-07-28 06:06:27,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=119533.33333333333, ans=0.025 +2024-07-28 06:06:31,969 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:06:33,794 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.78 vs. limit=10.0 +2024-07-28 06:06:33,873 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.602e+01 5.758e+01 6.163e+01 6.826e+01 1.029e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 06:07:13,094 INFO [train.py:1114] (0/4) Epoch 9, batch 7900, loss[loss=0.2214, simple_loss=0.2963, pruned_loss=0.07326, over 4871.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2912, pruned_loss=0.05996, over 932889.38 frames. ], batch size: 14, lr: 8.22e-03, grad_scale: 8.0 +2024-07-28 06:07:46,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119600.0, ans=0.1 +2024-07-28 06:08:03,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=119613.33333333333, ans=0.125 +2024-07-28 06:08:16,887 INFO [train.py:1114] (0/4) Epoch 9, batch 7950, loss[loss=0.2268, simple_loss=0.3113, pruned_loss=0.07115, over 3337.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2909, pruned_loss=0.05978, over 934839.70 frames. ], batch size: 35, lr: 8.22e-03, grad_scale: 8.0 +2024-07-28 06:08:20,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=119640.0, ans=0.125 +2024-07-28 06:08:41,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=119653.33333333333, ans=0.2 +2024-07-28 06:08:41,498 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.97 vs. limit=22.5 +2024-07-28 06:08:43,903 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:08:45,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=119666.66666666667, ans=0.125 +2024-07-28 06:08:46,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=119666.66666666667, ans=0.0 +2024-07-28 06:08:51,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=119666.66666666667, ans=0.0 +2024-07-28 06:08:54,867 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.650e+01 5.779e+01 6.459e+01 7.298e+01 1.141e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 06:08:55,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=119680.0, ans=0.025 +2024-07-28 06:09:20,903 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.81 vs. limit=10.0 +2024-07-28 06:09:21,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=119693.33333333333, ans=0.125 +2024-07-28 06:09:21,784 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.63 vs. limit=15.0 +2024-07-28 06:09:24,263 INFO [train.py:1114] (0/4) Epoch 9, batch 8000, loss[loss=0.176, simple_loss=0.2567, pruned_loss=0.04761, over 4614.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2898, pruned_loss=0.05967, over 934172.80 frames. ], batch size: 11, lr: 8.22e-03, grad_scale: 16.0 +2024-07-28 06:09:47,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=119733.33333333333, ans=0.125 +2024-07-28 06:10:12,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=119746.66666666667, ans=0.125 +2024-07-28 06:10:13,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=119746.66666666667, ans=0.125 +2024-07-28 06:10:28,778 INFO [train.py:1114] (0/4) Epoch 9, batch 8050, loss[loss=0.2083, simple_loss=0.2914, pruned_loss=0.06262, over 4808.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2915, pruned_loss=0.06077, over 933704.10 frames. ], batch size: 14, lr: 8.22e-03, grad_scale: 16.0 +2024-07-28 06:10:28,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=119773.33333333333, ans=0.0 +2024-07-28 06:10:31,849 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.01 vs. limit=12.0 +2024-07-28 06:10:43,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=119800.0, ans=0.125 +2024-07-28 06:10:49,921 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 6.138e+01 7.014e+01 8.220e+01 1.277e+02, threshold=1.403e+02, percent-clipped=0.0 +2024-07-28 06:10:50,997 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.72 vs. limit=12.0 +2024-07-28 06:10:54,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=119826.66666666667, ans=0.125 +2024-07-28 06:10:56,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=119826.66666666667, ans=15.0 +2024-07-28 06:10:57,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=119826.66666666667, ans=0.125 +2024-07-28 06:11:03,036 INFO [train.py:1114] (0/4) Epoch 9, batch 8100, loss[loss=0.2224, simple_loss=0.3141, pruned_loss=0.06531, over 4804.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2926, pruned_loss=0.06149, over 933558.44 frames. ], batch size: 15, lr: 8.22e-03, grad_scale: 16.0 +2024-07-28 06:11:19,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=119866.66666666667, ans=0.0 +2024-07-28 06:11:23,575 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.04 vs. limit=15.0 +2024-07-28 06:11:35,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=119893.33333333333, ans=0.2 +2024-07-28 06:11:35,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=119893.33333333333, ans=0.0 +2024-07-28 06:11:36,516 INFO [train.py:1114] (0/4) Epoch 9, batch 8150, loss[loss=0.2233, simple_loss=0.3176, pruned_loss=0.06449, over 4806.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2921, pruned_loss=0.06101, over 937018.07 frames. ], batch size: 15, lr: 8.21e-03, grad_scale: 16.0 +2024-07-28 06:11:37,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=119906.66666666667, ans=0.0 +2024-07-28 06:11:45,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=119920.0, ans=0.0 +2024-07-28 06:11:49,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=119933.33333333333, ans=0.125 +2024-07-28 06:11:57,274 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.727e+01 5.730e+01 6.295e+01 7.311e+01 1.625e+02, threshold=1.259e+02, percent-clipped=1.0 +2024-07-28 06:12:08,486 INFO [train.py:1114] (0/4) Epoch 9, batch 8200, loss[loss=0.2199, simple_loss=0.3046, pruned_loss=0.06758, over 4810.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2922, pruned_loss=0.06062, over 938746.74 frames. ], batch size: 15, lr: 8.21e-03, grad_scale: 16.0 +2024-07-28 06:12:23,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff3.min_abs, batch_count=120000.0, ans=0.2 +2024-07-28 06:12:37,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=120013.33333333333, ans=0.0 +2024-07-28 06:12:47,800 INFO [train.py:1114] (0/4) Epoch 9, batch 8250, loss[loss=0.1899, simple_loss=0.2908, pruned_loss=0.04447, over 4904.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2923, pruned_loss=0.06023, over 939180.29 frames. ], batch size: 13, lr: 8.21e-03, grad_scale: 16.0 +2024-07-28 06:12:59,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=120053.33333333333, ans=0.02 +2024-07-28 06:13:07,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=120066.66666666667, ans=0.125 +2024-07-28 06:13:09,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=120066.66666666667, ans=0.2 +2024-07-28 06:13:10,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=120080.0, ans=0.125 +2024-07-28 06:13:12,021 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.840e+01 6.472e+01 7.401e+01 1.114e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 06:13:14,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=120080.0, ans=0.0 +2024-07-28 06:13:31,411 INFO [train.py:1114] (0/4) Epoch 9, batch 8300, loss[loss=0.2317, simple_loss=0.3153, pruned_loss=0.0741, over 4893.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2927, pruned_loss=0.0604, over 939366.85 frames. ], batch size: 15, lr: 8.21e-03, grad_scale: 16.0 +2024-07-28 06:13:37,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=120106.66666666667, ans=0.125 +2024-07-28 06:13:45,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=120133.33333333333, ans=0.0 +2024-07-28 06:13:46,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=120133.33333333333, ans=0.0 +2024-07-28 06:13:46,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=120133.33333333333, ans=0.2 +2024-07-28 06:13:53,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=120146.66666666667, ans=0.0 +2024-07-28 06:14:01,888 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.52 vs. limit=12.0 +2024-07-28 06:14:06,567 INFO [train.py:1114] (0/4) Epoch 9, batch 8350, loss[loss=0.2212, simple_loss=0.3043, pruned_loss=0.06906, over 4792.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2919, pruned_loss=0.06008, over 942120.45 frames. ], batch size: 15, lr: 8.20e-03, grad_scale: 16.0 +2024-07-28 06:14:09,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=120173.33333333333, ans=0.95 +2024-07-28 06:14:23,554 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.69 vs. limit=15.0 +2024-07-28 06:14:26,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=120200.0, ans=0.0 +2024-07-28 06:14:26,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=120200.0, ans=0.125 +2024-07-28 06:14:32,030 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.453e+01 5.725e+01 6.523e+01 7.692e+01 9.570e+01, threshold=1.305e+02, percent-clipped=0.0 +2024-07-28 06:14:38,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=120226.66666666667, ans=0.0 +2024-07-28 06:14:39,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=120226.66666666667, ans=0.125 +2024-07-28 06:14:42,959 INFO [train.py:1114] (0/4) Epoch 9, batch 8400, loss[loss=0.1849, simple_loss=0.2649, pruned_loss=0.05242, over 4777.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2917, pruned_loss=0.06025, over 940576.11 frames. ], batch size: 12, lr: 8.20e-03, grad_scale: 32.0 +2024-07-28 06:14:47,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=120240.0, ans=0.05 +2024-07-28 06:14:48,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=120240.0, ans=0.2 +2024-07-28 06:15:01,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=120266.66666666667, ans=0.125 +2024-07-28 06:15:14,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=120293.33333333333, ans=0.025 +2024-07-28 06:15:15,485 INFO [train.py:1114] (0/4) Epoch 9, batch 8450, loss[loss=0.2297, simple_loss=0.3054, pruned_loss=0.07698, over 4801.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2919, pruned_loss=0.06051, over 939307.65 frames. ], batch size: 15, lr: 8.20e-03, grad_scale: 32.0 +2024-07-28 06:15:26,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=120320.0, ans=0.125 +2024-07-28 06:15:37,873 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=27.61 vs. limit=15.0 +2024-07-28 06:15:37,901 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.31 vs. limit=12.0 +2024-07-28 06:15:38,206 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+01 5.798e+01 6.423e+01 7.347e+01 1.111e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 06:15:43,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=120360.0, ans=0.125 +2024-07-28 06:15:44,801 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:15:49,324 INFO [train.py:1114] (0/4) Epoch 9, batch 8500, loss[loss=0.1579, simple_loss=0.2572, pruned_loss=0.02935, over 4617.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.291, pruned_loss=0.06017, over 939001.49 frames. ], batch size: 11, lr: 8.20e-03, grad_scale: 32.0 +2024-07-28 06:15:55,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=120386.66666666667, ans=0.0 +2024-07-28 06:16:06,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=120400.0, ans=0.2 +2024-07-28 06:16:17,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=120426.66666666667, ans=0.0 +2024-07-28 06:16:17,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=120426.66666666667, ans=0.125 +2024-07-28 06:16:18,479 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:16:19,766 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.79 vs. limit=15.0 +2024-07-28 06:16:21,542 INFO [train.py:1114] (0/4) Epoch 9, batch 8550, loss[loss=0.1935, simple_loss=0.271, pruned_loss=0.05805, over 4808.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2908, pruned_loss=0.05989, over 939766.84 frames. ], batch size: 11, lr: 8.20e-03, grad_scale: 16.0 +2024-07-28 06:16:28,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=120453.33333333333, ans=0.025 +2024-07-28 06:16:36,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=120466.66666666667, ans=0.0 +2024-07-28 06:16:43,548 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.883e+01 5.723e+01 6.764e+01 8.281e+01 1.171e+02, threshold=1.353e+02, percent-clipped=0.0 +2024-07-28 06:16:44,009 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.90 vs. limit=15.0 +2024-07-28 06:16:54,057 INFO [train.py:1114] (0/4) Epoch 9, batch 8600, loss[loss=0.2595, simple_loss=0.3427, pruned_loss=0.08813, over 4806.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2913, pruned_loss=0.06063, over 939457.79 frames. ], batch size: 15, lr: 8.19e-03, grad_scale: 16.0 +2024-07-28 06:16:56,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120506.66666666667, ans=0.1 +2024-07-28 06:17:00,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=120520.0, ans=0.0 +2024-07-28 06:17:26,333 INFO [train.py:1114] (0/4) Epoch 9, batch 8650, loss[loss=0.2285, simple_loss=0.311, pruned_loss=0.07304, over 4900.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2909, pruned_loss=0.06053, over 940777.29 frames. ], batch size: 15, lr: 8.19e-03, grad_scale: 16.0 +2024-07-28 06:17:29,993 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-28 06:17:40,992 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.51 vs. limit=15.0 +2024-07-28 06:17:41,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=120600.0, ans=0.025 +2024-07-28 06:17:45,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=120613.33333333333, ans=0.125 +2024-07-28 06:17:47,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=120613.33333333333, ans=0.125 +2024-07-28 06:17:48,029 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.611e+01 5.743e+01 6.194e+01 7.423e+01 1.120e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 06:17:59,200 INFO [train.py:1114] (0/4) Epoch 9, batch 8700, loss[loss=0.2386, simple_loss=0.3151, pruned_loss=0.08104, over 4757.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2921, pruned_loss=0.06155, over 938297.80 frames. ], batch size: 13, lr: 8.19e-03, grad_scale: 16.0 +2024-07-28 06:18:13,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=120666.66666666667, ans=0.125 +2024-07-28 06:18:24,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=120680.0, ans=0.07 +2024-07-28 06:18:34,231 INFO [train.py:1114] (0/4) Epoch 9, batch 8750, loss[loss=0.2365, simple_loss=0.3186, pruned_loss=0.07715, over 4679.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2916, pruned_loss=0.06085, over 936602.53 frames. ], batch size: 15, lr: 8.19e-03, grad_scale: 16.0 +2024-07-28 06:18:39,637 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.82 vs. limit=15.0 +2024-07-28 06:18:40,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=120720.0, ans=0.2 +2024-07-28 06:18:56,344 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 5.820e+01 6.301e+01 7.114e+01 1.037e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 06:19:00,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=120760.0, ans=0.125 +2024-07-28 06:19:06,415 INFO [train.py:1114] (0/4) Epoch 9, batch 8800, loss[loss=0.1714, simple_loss=0.2611, pruned_loss=0.0408, over 4935.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2917, pruned_loss=0.06095, over 937938.49 frames. ], batch size: 14, lr: 8.18e-03, grad_scale: 32.0 +2024-07-28 06:19:08,113 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-07-28 06:19:29,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=120813.33333333333, ans=0.125 +2024-07-28 06:19:36,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=120826.66666666667, ans=0.0 +2024-07-28 06:19:39,915 INFO [train.py:1114] (0/4) Epoch 9, batch 8850, loss[loss=0.2276, simple_loss=0.3217, pruned_loss=0.06671, over 4452.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2917, pruned_loss=0.06103, over 932120.06 frames. ], batch size: 21, lr: 8.18e-03, grad_scale: 32.0 +2024-07-28 06:19:40,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=120840.0, ans=22.5 +2024-07-28 06:19:52,143 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.51 vs. limit=15.0 +2024-07-28 06:20:02,409 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.399e+01 5.678e+01 6.367e+01 7.332e+01 1.676e+02, threshold=1.273e+02, percent-clipped=2.0 +2024-07-28 06:20:05,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=120893.33333333333, ans=0.125 +2024-07-28 06:20:06,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=120893.33333333333, ans=0.025 +2024-07-28 06:20:13,226 INFO [train.py:1114] (0/4) Epoch 9, batch 8900, loss[loss=0.1797, simple_loss=0.2556, pruned_loss=0.05188, over 4951.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2925, pruned_loss=0.06155, over 930466.41 frames. ], batch size: 12, lr: 8.18e-03, grad_scale: 32.0 +2024-07-28 06:20:18,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=120906.66666666667, ans=0.2 +2024-07-28 06:20:19,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=120920.0, ans=0.125 +2024-07-28 06:20:58,028 INFO [train.py:1114] (0/4) Epoch 9, batch 8950, loss[loss=0.2026, simple_loss=0.2852, pruned_loss=0.05997, over 4430.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2917, pruned_loss=0.0616, over 930926.17 frames. ], batch size: 21, lr: 8.18e-03, grad_scale: 32.0 +2024-07-28 06:21:09,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=120986.66666666667, ans=0.2 +2024-07-28 06:21:19,652 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.879e+01 5.816e+01 6.215e+01 7.468e+01 1.036e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 06:21:20,572 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.58 vs. limit=6.0 +2024-07-28 06:21:24,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=121026.66666666667, ans=0.0 +2024-07-28 06:21:29,727 INFO [train.py:1114] (0/4) Epoch 9, batch 9000, loss[loss=0.1716, simple_loss=0.2684, pruned_loss=0.03746, over 4643.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2904, pruned_loss=0.06095, over 933745.74 frames. ], batch size: 12, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:21:29,728 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 06:21:45,147 INFO [train.py:1146] (0/4) Epoch 9, validation: loss=0.1749, simple_loss=0.2792, pruned_loss=0.03531, over 944034.00 frames. +2024-07-28 06:21:45,148 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 06:22:26,128 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.78 vs. limit=22.5 +2024-07-28 06:22:31,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121106.66666666667, ans=0.1 +2024-07-28 06:22:35,214 INFO [train.py:1114] (0/4) Epoch 9, batch 9050, loss[loss=0.1567, simple_loss=0.2344, pruned_loss=0.03951, over 4538.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2907, pruned_loss=0.06061, over 934073.40 frames. ], batch size: 10, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:22:35,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=121106.66666666667, ans=0.125 +2024-07-28 06:22:39,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=121106.66666666667, ans=0.125 +2024-07-28 06:22:45,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=121120.0, ans=0.125 +2024-07-28 06:22:47,822 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.21 vs. limit=22.5 +2024-07-28 06:23:15,951 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+01 5.675e+01 6.570e+01 7.797e+01 1.121e+02, threshold=1.314e+02, percent-clipped=0.0 +2024-07-28 06:23:17,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=121146.66666666667, ans=0.125 +2024-07-28 06:23:24,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=121160.0, ans=0.125 +2024-07-28 06:23:30,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=121160.0, ans=0.0 +2024-07-28 06:23:30,796 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.24 vs. limit=22.5 +2024-07-28 06:23:32,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=121173.33333333333, ans=0.125 +2024-07-28 06:23:32,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=121173.33333333333, ans=0.1 +2024-07-28 06:23:32,985 INFO [train.py:1114] (0/4) Epoch 9, batch 9100, loss[loss=0.1786, simple_loss=0.2707, pruned_loss=0.04327, over 4928.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2903, pruned_loss=0.06042, over 936936.05 frames. ], batch size: 14, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:23:54,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=121200.0, ans=0.035 +2024-07-28 06:24:00,222 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.89 vs. limit=15.0 +2024-07-28 06:24:04,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121213.33333333333, ans=0.1 +2024-07-28 06:24:05,248 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:24:12,197 INFO [train.py:1114] (0/4) Epoch 9, batch 9150, loss[loss=0.2503, simple_loss=0.3367, pruned_loss=0.08198, over 4813.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2905, pruned_loss=0.06035, over 935441.81 frames. ], batch size: 14, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:24:17,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=121240.0, ans=0.07 +2024-07-28 06:24:47,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=121266.66666666667, ans=0.2 +2024-07-28 06:24:53,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=121266.66666666667, ans=0.025 +2024-07-28 06:24:58,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121280.0, ans=0.1 +2024-07-28 06:24:59,230 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.588e+01 5.661e+01 6.215e+01 7.054e+01 1.564e+02, threshold=1.243e+02, percent-clipped=1.0 +2024-07-28 06:25:10,703 INFO [train.py:1114] (0/4) Epoch 9, batch 9200, loss[loss=0.1715, simple_loss=0.255, pruned_loss=0.04398, over 4859.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2891, pruned_loss=0.05934, over 937154.04 frames. ], batch size: 12, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:25:24,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121306.66666666667, ans=0.1 +2024-07-28 06:25:29,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=121320.0, ans=0.2 +2024-07-28 06:25:31,516 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.99 vs. limit=10.0 +2024-07-28 06:25:42,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=121346.66666666667, ans=0.2 +2024-07-28 06:25:54,954 INFO [train.py:1114] (0/4) Epoch 9, batch 9250, loss[loss=0.2016, simple_loss=0.303, pruned_loss=0.05015, over 4633.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2902, pruned_loss=0.05977, over 937780.92 frames. ], batch size: 13, lr: 8.16e-03, grad_scale: 32.0 +2024-07-28 06:25:56,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=121373.33333333333, ans=0.125 +2024-07-28 06:26:05,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=121386.66666666667, ans=0.025 +2024-07-28 06:26:12,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=121400.0, ans=0.125 +2024-07-28 06:26:13,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=121413.33333333333, ans=0.025 +2024-07-28 06:26:16,176 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.708e+01 5.765e+01 6.275e+01 7.273e+01 1.016e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 06:26:19,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=121413.33333333333, ans=0.2 +2024-07-28 06:26:22,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=121426.66666666667, ans=0.125 +2024-07-28 06:26:26,999 INFO [train.py:1114] (0/4) Epoch 9, batch 9300, loss[loss=0.2376, simple_loss=0.3038, pruned_loss=0.08573, over 4772.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2901, pruned_loss=0.06014, over 937389.94 frames. ], batch size: 12, lr: 8.16e-03, grad_scale: 32.0 +2024-07-28 06:26:27,371 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.31 vs. limit=15.0 +2024-07-28 06:26:28,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=121440.0, ans=0.0 +2024-07-28 06:26:29,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=121440.0, ans=0.2 +2024-07-28 06:26:31,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.57 vs. limit=22.5 +2024-07-28 06:27:29,903 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.64 vs. limit=10.0 +2024-07-28 06:27:34,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=121506.66666666667, ans=0.0 +2024-07-28 06:27:34,653 INFO [train.py:1114] (0/4) Epoch 9, batch 9350, loss[loss=0.1654, simple_loss=0.2435, pruned_loss=0.04362, over 4799.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2902, pruned_loss=0.06062, over 934400.18 frames. ], batch size: 11, lr: 8.16e-03, grad_scale: 32.0 +2024-07-28 06:28:00,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=121546.66666666667, ans=0.125 +2024-07-28 06:28:01,024 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.924e+01 5.677e+01 6.203e+01 7.268e+01 1.059e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 06:28:21,364 INFO [train.py:1114] (0/4) Epoch 9, batch 9400, loss[loss=0.2078, simple_loss=0.2965, pruned_loss=0.05954, over 4689.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.29, pruned_loss=0.06053, over 931867.76 frames. ], batch size: 13, lr: 8.16e-03, grad_scale: 32.0 +2024-07-28 06:28:27,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=121586.66666666667, ans=0.2 +2024-07-28 06:28:27,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=121586.66666666667, ans=0.125 +2024-07-28 06:28:33,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=121586.66666666667, ans=0.0 +2024-07-28 06:28:34,923 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:28:37,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=121600.0, ans=0.125 +2024-07-28 06:28:52,889 INFO [train.py:1114] (0/4) Epoch 9, batch 9450, loss[loss=0.1773, simple_loss=0.2419, pruned_loss=0.05629, over 4811.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2899, pruned_loss=0.06006, over 931458.68 frames. ], batch size: 11, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:28:58,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=121653.33333333333, ans=0.125 +2024-07-28 06:29:01,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=121653.33333333333, ans=0.125 +2024-07-28 06:29:04,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=121666.66666666667, ans=0.125 +2024-07-28 06:29:10,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=121666.66666666667, ans=0.125 +2024-07-28 06:29:13,875 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.626e+01 6.016e+01 7.236e+01 1.280e+02, threshold=1.203e+02, percent-clipped=1.0 +2024-07-28 06:29:22,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_na.min_abs, batch_count=121693.33333333333, ans=0.02 +2024-07-28 06:29:24,030 INFO [train.py:1114] (0/4) Epoch 9, batch 9500, loss[loss=0.1957, simple_loss=0.288, pruned_loss=0.05175, over 4706.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2903, pruned_loss=0.05941, over 934424.98 frames. ], batch size: 12, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:29:31,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=121720.0, ans=0.125 +2024-07-28 06:29:40,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=121733.33333333333, ans=0.0 +2024-07-28 06:29:42,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121733.33333333333, ans=0.1 +2024-07-28 06:29:45,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=121746.66666666667, ans=0.025 +2024-07-28 06:29:48,120 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.26 vs. limit=15.0 +2024-07-28 06:29:48,673 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.26 vs. limit=15.0 +2024-07-28 06:29:49,269 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.67 vs. limit=22.5 +2024-07-28 06:29:55,484 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.74 vs. limit=15.0 +2024-07-28 06:30:02,243 INFO [train.py:1114] (0/4) Epoch 9, batch 9550, loss[loss=0.1792, simple_loss=0.2672, pruned_loss=0.04559, over 4769.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2903, pruned_loss=0.05939, over 932264.18 frames. ], batch size: 12, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:30:10,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=121786.66666666667, ans=0.0 +2024-07-28 06:30:15,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=121800.0, ans=0.0 +2024-07-28 06:30:20,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=121800.0, ans=0.125 +2024-07-28 06:30:23,657 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.774e+01 6.473e+01 7.553e+01 1.235e+02, threshold=1.295e+02, percent-clipped=1.0 +2024-07-28 06:30:35,251 INFO [train.py:1114] (0/4) Epoch 9, batch 9600, loss[loss=0.2965, simple_loss=0.3459, pruned_loss=0.1236, over 3444.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2905, pruned_loss=0.05921, over 931408.58 frames. ], batch size: 35, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:30:35,637 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.18 vs. limit=15.0 +2024-07-28 06:30:41,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121853.33333333333, ans=0.1 +2024-07-28 06:30:47,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=121853.33333333333, ans=0.2 +2024-07-28 06:31:07,158 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.44 vs. limit=15.0 +2024-07-28 06:31:08,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=121880.0, ans=0.125 +2024-07-28 06:31:10,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=121880.0, ans=0.09899494936611666 +2024-07-28 06:31:15,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=121893.33333333333, ans=0.125 +2024-07-28 06:31:18,862 INFO [train.py:1114] (0/4) Epoch 9, batch 9650, loss[loss=0.2114, simple_loss=0.3006, pruned_loss=0.06104, over 4858.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2915, pruned_loss=0.06009, over 927664.97 frames. ], batch size: 16, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:31:33,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=121920.0, ans=0.0 +2024-07-28 06:31:34,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=121920.0, ans=0.0 +2024-07-28 06:31:34,609 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.44 vs. limit=15.0 +2024-07-28 06:31:36,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=121920.0, ans=0.125 +2024-07-28 06:31:37,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121920.0, ans=0.1 +2024-07-28 06:31:43,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121933.33333333333, ans=0.1 +2024-07-28 06:31:48,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=121933.33333333333, ans=0.025 +2024-07-28 06:31:53,659 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.690e+01 5.799e+01 6.411e+01 7.286e+01 1.019e+02, threshold=1.282e+02, percent-clipped=0.0 +2024-07-28 06:31:53,761 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:31:56,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=121946.66666666667, ans=0.125 +2024-07-28 06:32:15,509 INFO [train.py:1114] (0/4) Epoch 9, batch 9700, loss[loss=0.2409, simple_loss=0.3135, pruned_loss=0.08414, over 4118.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2918, pruned_loss=0.06066, over 925537.72 frames. ], batch size: 25, lr: 8.14e-03, grad_scale: 32.0 +2024-07-28 06:32:17,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=121973.33333333333, ans=0.5 +2024-07-28 06:32:18,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=121973.33333333333, ans=0.125 +2024-07-28 06:32:24,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=121986.66666666667, ans=0.025 +2024-07-28 06:32:28,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=122000.0, ans=0.125 +2024-07-28 06:32:44,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=122026.66666666667, ans=0.125 +2024-07-28 06:32:51,305 INFO [train.py:1114] (0/4) Epoch 9, batch 9750, loss[loss=0.2419, simple_loss=0.3341, pruned_loss=0.07484, over 4681.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2908, pruned_loss=0.05982, over 926138.79 frames. ], batch size: 15, lr: 8.14e-03, grad_scale: 32.0 +2024-07-28 06:32:56,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.78 vs. limit=22.5 +2024-07-28 06:32:57,503 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.68 vs. limit=15.0 +2024-07-28 06:33:06,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=122053.33333333333, ans=0.125 +2024-07-28 06:33:06,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=122053.33333333333, ans=0.125 +2024-07-28 06:33:13,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=122066.66666666667, ans=0.125 +2024-07-28 06:33:13,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=122066.66666666667, ans=0.0 +2024-07-28 06:33:18,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=122080.0, ans=0.125 +2024-07-28 06:33:18,531 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.645e+01 5.595e+01 6.071e+01 7.420e+01 1.003e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 06:33:26,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=122093.33333333333, ans=0.025 +2024-07-28 06:33:27,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=122093.33333333333, ans=0.125 +2024-07-28 06:33:28,424 INFO [train.py:1114] (0/4) Epoch 9, batch 9800, loss[loss=0.1767, simple_loss=0.2736, pruned_loss=0.03994, over 4695.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2894, pruned_loss=0.05934, over 925566.00 frames. ], batch size: 12, lr: 8.14e-03, grad_scale: 32.0 +2024-07-28 06:33:33,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=122106.66666666667, ans=0.125 +2024-07-28 06:33:35,976 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.91 vs. limit=12.0 +2024-07-28 06:33:39,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=122120.0, ans=0.125 +2024-07-28 06:33:40,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=122120.0, ans=0.0 +2024-07-28 06:33:56,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=122146.66666666667, ans=0.2 +2024-07-28 06:34:10,659 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.83 vs. limit=15.0 +2024-07-28 06:34:10,938 INFO [train.py:1114] (0/4) Epoch 9, batch 9850, loss[loss=0.1908, simple_loss=0.2772, pruned_loss=0.05221, over 4901.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2894, pruned_loss=0.05939, over 928073.75 frames. ], batch size: 15, lr: 8.14e-03, grad_scale: 32.0 +2024-07-28 06:34:12,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=122173.33333333333, ans=0.125 +2024-07-28 06:34:32,418 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.677e+01 5.834e+01 6.555e+01 7.421e+01 1.036e+02, threshold=1.311e+02, percent-clipped=0.0 +2024-07-28 06:34:33,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=122213.33333333333, ans=0.2 +2024-07-28 06:34:42,487 INFO [train.py:1114] (0/4) Epoch 9, batch 9900, loss[loss=0.2035, simple_loss=0.2931, pruned_loss=0.05695, over 4828.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2903, pruned_loss=0.06011, over 927410.23 frames. ], batch size: 16, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:34:47,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=122240.0, ans=0.0 +2024-07-28 06:34:47,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=122240.0, ans=0.0 +2024-07-28 06:34:52,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=122253.33333333333, ans=0.0 +2024-07-28 06:34:54,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=122266.66666666667, ans=0.125 +2024-07-28 06:34:54,868 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.73 vs. limit=15.0 +2024-07-28 06:34:55,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=122266.66666666667, ans=0.125 +2024-07-28 06:34:59,702 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.36 vs. limit=15.0 +2024-07-28 06:35:00,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=122266.66666666667, ans=0.0 +2024-07-28 06:35:07,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=122293.33333333333, ans=0.07 +2024-07-28 06:35:25,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=122293.33333333333, ans=0.0 +2024-07-28 06:35:26,859 INFO [train.py:1114] (0/4) Epoch 9, batch 9950, loss[loss=0.1661, simple_loss=0.249, pruned_loss=0.04161, over 4811.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2919, pruned_loss=0.06108, over 930196.91 frames. ], batch size: 11, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:35:33,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=122306.66666666667, ans=0.0 +2024-07-28 06:35:39,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=122320.0, ans=0.125 +2024-07-28 06:35:41,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=122333.33333333333, ans=0.0 +2024-07-28 06:35:47,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=122346.66666666667, ans=0.125 +2024-07-28 06:35:49,834 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.887e+01 6.567e+01 7.886e+01 1.035e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-28 06:35:52,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=122346.66666666667, ans=0.0 +2024-07-28 06:36:00,353 INFO [train.py:1114] (0/4) Epoch 9, batch 10000, loss[loss=0.2599, simple_loss=0.3294, pruned_loss=0.09523, over 4624.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2941, pruned_loss=0.06228, over 927846.43 frames. ], batch size: 16, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:36:00,543 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:36:12,273 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.51 vs. limit=6.0 +2024-07-28 06:36:20,220 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.75 vs. limit=15.0 +2024-07-28 06:36:22,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=122400.0, ans=0.05 +2024-07-28 06:36:33,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=122413.33333333333, ans=0.0 +2024-07-28 06:36:42,702 INFO [train.py:1114] (0/4) Epoch 9, batch 10050, loss[loss=0.2527, simple_loss=0.3316, pruned_loss=0.08694, over 3526.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2971, pruned_loss=0.06392, over 914891.15 frames. ], batch size: 35, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:36:44,011 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-07-28 06:36:46,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=122440.0, ans=0.125 +2024-07-28 06:36:48,438 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.50 vs. limit=15.0 +2024-07-28 06:36:55,181 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=10.27 vs. limit=15.0 +2024-07-28 06:36:57,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=122466.66666666667, ans=0.0 +2024-07-28 06:37:02,981 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.89 vs. limit=15.0 +2024-07-28 06:37:06,684 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 6.481e+01 7.122e+01 8.299e+01 1.409e+02, threshold=1.424e+02, percent-clipped=1.0 +2024-07-28 06:37:10,360 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.90 vs. limit=22.5 +2024-07-28 06:37:17,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=122493.33333333333, ans=0.0 +2024-07-28 06:37:18,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=122506.66666666667, ans=0.125 +2024-07-28 06:37:18,581 INFO [train.py:1114] (0/4) Epoch 9, batch 10100, loss[loss=0.2189, simple_loss=0.2927, pruned_loss=0.07257, over 3236.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.3025, pruned_loss=0.06954, over 861222.76 frames. ], batch size: 35, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:37:24,199 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:37:26,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=122520.0, ans=0.0 +2024-07-28 06:37:28,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=122520.0, ans=0.1 +2024-07-28 06:37:39,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=122546.66666666667, ans=0.0 +2024-07-28 06:37:41,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=122546.66666666667, ans=0.125 +2024-07-28 06:37:41,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=122546.66666666667, ans=0.2 +2024-07-28 06:37:42,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=122546.66666666667, ans=0.0 +2024-07-28 06:37:42,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=122546.66666666667, ans=0.125 +2024-07-28 06:37:47,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=122560.0, ans=0.125 +2024-07-28 06:37:52,192 INFO [train.py:1114] (0/4) Epoch 9, batch 10150, loss[loss=0.2568, simple_loss=0.3211, pruned_loss=0.09627, over 3544.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3069, pruned_loss=0.07422, over 819702.84 frames. ], batch size: 35, lr: 8.12e-03, grad_scale: 32.0 +2024-07-28 06:38:04,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=122586.66666666667, ans=0.0 +2024-07-28 06:38:07,422 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=10.13 vs. limit=12.0 +2024-07-28 06:38:19,560 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.853e+01 6.506e+01 6.978e+01 7.406e+01 9.051e+01, threshold=1.396e+02, percent-clipped=0.0 +2024-07-28 06:38:21,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=122613.33333333333, ans=0.025 +2024-07-28 06:38:26,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=122626.66666666667, ans=0.125 +2024-07-28 06:39:22,380 INFO [train.py:1114] (0/4) Epoch 9, batch 10200, loss[loss=0.2792, simple_loss=0.3401, pruned_loss=0.1091, over 3481.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3106, pruned_loss=0.07826, over 787317.29 frames. ], batch size: 37, lr: 8.12e-03, grad_scale: 32.0 +2024-07-28 06:39:24,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=122640.0, ans=0.0 +2024-07-28 06:39:27,182 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=18.07 vs. limit=22.5 +2024-07-28 06:39:34,238 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-92000.pt +2024-07-28 06:39:49,541 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-07-28 06:39:51,041 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-9.pt +2024-07-28 06:41:02,957 INFO [train.py:1114] (0/4) Epoch 10, batch 0, loss[loss=0.1503, simple_loss=0.2333, pruned_loss=0.0336, over 4849.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2333, pruned_loss=0.0336, over 4849.00 frames. ], batch size: 12, lr: 7.72e-03, grad_scale: 32.0 +2024-07-28 06:41:02,959 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 06:41:14,705 INFO [train.py:1146] (0/4) Epoch 10, validation: loss=0.1773, simple_loss=0.2829, pruned_loss=0.03584, over 944034.00 frames. +2024-07-28 06:41:14,706 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 06:41:21,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=122682.66666666667, ans=0.0 +2024-07-28 06:41:30,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=122696.0, ans=0.0 +2024-07-28 06:41:52,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=122722.66666666667, ans=0.125 +2024-07-28 06:41:58,960 INFO [train.py:1114] (0/4) Epoch 10, batch 50, loss[loss=0.1756, simple_loss=0.2655, pruned_loss=0.04283, over 4617.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2944, pruned_loss=0.0614, over 206521.85 frames. ], batch size: 11, lr: 7.72e-03, grad_scale: 32.0 +2024-07-28 06:42:04,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=122736.0, ans=0.125 +2024-07-28 06:42:06,812 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.770e+01 5.950e+01 6.646e+01 7.258e+01 1.106e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-28 06:42:09,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=122749.33333333333, ans=0.07 +2024-07-28 06:42:09,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=122749.33333333333, ans=0.125 +2024-07-28 06:42:21,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122749.33333333333, ans=0.1 +2024-07-28 06:42:26,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=122762.66666666667, ans=0.125 +2024-07-28 06:42:33,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-28 06:42:46,653 INFO [train.py:1114] (0/4) Epoch 10, batch 100, loss[loss=0.1925, simple_loss=0.2683, pruned_loss=0.05836, over 4652.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.295, pruned_loss=0.06084, over 365285.33 frames. ], batch size: 12, lr: 7.72e-03, grad_scale: 32.0 +2024-07-28 06:42:47,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122802.66666666667, ans=0.1 +2024-07-28 06:42:59,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=122802.66666666667, ans=22.5 +2024-07-28 06:43:06,857 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.19 vs. limit=15.0 +2024-07-28 06:43:16,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=122842.66666666667, ans=0.125 +2024-07-28 06:43:27,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=122842.66666666667, ans=0.125 +2024-07-28 06:43:40,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=122856.0, ans=0.125 +2024-07-28 06:43:42,707 INFO [train.py:1114] (0/4) Epoch 10, batch 150, loss[loss=0.1582, simple_loss=0.2489, pruned_loss=0.03375, over 4613.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2909, pruned_loss=0.05843, over 493815.15 frames. ], batch size: 11, lr: 7.72e-03, grad_scale: 32.0 +2024-07-28 06:43:46,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=122869.33333333333, ans=0.125 +2024-07-28 06:43:51,034 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.790e+01 6.360e+01 7.461e+01 1.069e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 06:44:32,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=122922.66666666667, ans=0.125 +2024-07-28 06:44:33,119 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.88 vs. limit=15.0 +2024-07-28 06:44:37,870 INFO [train.py:1114] (0/4) Epoch 10, batch 200, loss[loss=0.214, simple_loss=0.2975, pruned_loss=0.06527, over 4532.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2885, pruned_loss=0.05785, over 593235.06 frames. ], batch size: 21, lr: 7.71e-03, grad_scale: 32.0 +2024-07-28 06:44:49,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=122936.0, ans=0.125 +2024-07-28 06:44:56,566 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.74 vs. limit=8.0 +2024-07-28 06:45:00,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=122949.33333333333, ans=0.0 +2024-07-28 06:45:16,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=122976.0, ans=0.125 +2024-07-28 06:45:18,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=122976.0, ans=0.125 +2024-07-28 06:45:22,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=122989.33333333333, ans=0.2 +2024-07-28 06:45:24,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=122989.33333333333, ans=0.125 +2024-07-28 06:45:27,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=122989.33333333333, ans=10.0 +2024-07-28 06:45:28,935 INFO [train.py:1114] (0/4) Epoch 10, batch 250, loss[loss=0.2292, simple_loss=0.3308, pruned_loss=0.06378, over 4637.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2887, pruned_loss=0.05781, over 670075.51 frames. ], batch size: 16, lr: 7.71e-03, grad_scale: 32.0 +2024-07-28 06:45:29,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=123002.66666666667, ans=0.125 +2024-07-28 06:45:30,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=123002.66666666667, ans=0.0 +2024-07-28 06:45:38,191 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+01 5.690e+01 6.559e+01 7.773e+01 1.314e+02, threshold=1.312e+02, percent-clipped=1.0 +2024-07-28 06:45:42,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=123016.0, ans=0.5 +2024-07-28 06:45:42,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=123016.0, ans=0.125 +2024-07-28 06:45:45,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=123029.33333333333, ans=0.125 +2024-07-28 06:45:48,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=123029.33333333333, ans=0.125 +2024-07-28 06:45:53,828 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.09 vs. limit=15.0 +2024-07-28 06:45:54,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=123042.66666666667, ans=0.0 +2024-07-28 06:45:54,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=123042.66666666667, ans=0.2 +2024-07-28 06:46:08,655 INFO [train.py:1114] (0/4) Epoch 10, batch 300, loss[loss=0.2567, simple_loss=0.3416, pruned_loss=0.08594, over 4801.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2878, pruned_loss=0.05734, over 729697.93 frames. ], batch size: 15, lr: 7.71e-03, grad_scale: 32.0 +2024-07-28 06:46:14,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=123069.33333333333, ans=0.125 +2024-07-28 06:46:27,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=123096.0, ans=0.125 +2024-07-28 06:46:32,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=123109.33333333333, ans=0.1 +2024-07-28 06:46:41,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=123122.66666666667, ans=0.07 +2024-07-28 06:46:42,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=123122.66666666667, ans=0.125 +2024-07-28 06:46:48,125 INFO [train.py:1114] (0/4) Epoch 10, batch 350, loss[loss=0.1601, simple_loss=0.2472, pruned_loss=0.03653, over 4932.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2886, pruned_loss=0.05763, over 775822.35 frames. ], batch size: 12, lr: 7.71e-03, grad_scale: 64.0 +2024-07-28 06:46:52,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=123136.0, ans=0.125 +2024-07-28 06:47:00,719 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.715e+01 5.536e+01 6.033e+01 6.929e+01 1.043e+02, threshold=1.207e+02, percent-clipped=0.0 +2024-07-28 06:47:49,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=123162.66666666667, ans=0.2 +2024-07-28 06:50:15,423 INFO [train.py:1114] (0/4) Epoch 10, batch 400, loss[loss=0.1868, simple_loss=0.2809, pruned_loss=0.04636, over 4695.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2883, pruned_loss=0.05826, over 813247.60 frames. ], batch size: 13, lr: 7.71e-03, grad_scale: 64.0 +2024-07-28 06:50:17,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=123202.66666666667, ans=0.125 +2024-07-28 06:50:27,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=123202.66666666667, ans=0.0 +2024-07-28 06:50:27,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=123202.66666666667, ans=0.125 +2024-07-28 06:50:29,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=123202.66666666667, ans=0.09899494936611666 +2024-07-28 06:50:29,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=123202.66666666667, ans=0.2 +2024-07-28 06:50:45,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=123216.0, ans=0.1 +2024-07-28 06:50:53,377 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.46 vs. limit=15.0 +2024-07-28 06:51:02,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=123229.33333333333, ans=0.125 +2024-07-28 06:51:15,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=123242.66666666667, ans=0.125 +2024-07-28 06:52:20,998 INFO [train.py:1114] (0/4) Epoch 10, batch 450, loss[loss=0.218, simple_loss=0.3073, pruned_loss=0.06434, over 4638.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2891, pruned_loss=0.05899, over 838535.03 frames. ], batch size: 13, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:52:46,130 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.482e+01 5.561e+01 6.292e+01 7.345e+01 1.157e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 06:53:18,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=123296.0, ans=0.125 +2024-07-28 06:53:19,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=123296.0, ans=0.2 +2024-07-28 06:53:23,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=123296.0, ans=10.0 +2024-07-28 06:54:42,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=123322.66666666667, ans=0.0 +2024-07-28 06:54:48,128 INFO [train.py:1114] (0/4) Epoch 10, batch 500, loss[loss=0.2166, simple_loss=0.2916, pruned_loss=0.07079, over 4698.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2887, pruned_loss=0.05863, over 861034.39 frames. ], batch size: 15, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:54:48,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=123336.0, ans=0.0 +2024-07-28 06:54:50,717 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.44 vs. limit=15.0 +2024-07-28 06:54:54,944 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.19 vs. limit=15.0 +2024-07-28 06:54:56,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=123336.0, ans=0.125 +2024-07-28 06:55:17,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=123349.33333333333, ans=0.125 +2024-07-28 06:55:47,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=123362.66666666667, ans=0.1 +2024-07-28 06:55:49,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=123376.0, ans=0.0 +2024-07-28 06:55:59,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=123376.0, ans=0.125 +2024-07-28 06:56:00,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=123376.0, ans=0.125 +2024-07-28 06:56:05,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=123389.33333333333, ans=0.1 +2024-07-28 06:56:07,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=123389.33333333333, ans=0.125 +2024-07-28 06:56:13,169 INFO [train.py:1114] (0/4) Epoch 10, batch 550, loss[loss=0.1868, simple_loss=0.2783, pruned_loss=0.0477, over 4593.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2884, pruned_loss=0.05867, over 877083.90 frames. ], batch size: 17, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:56:15,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=123402.66666666667, ans=0.125 +2024-07-28 06:56:16,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=123402.66666666667, ans=0.125 +2024-07-28 06:56:18,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=123402.66666666667, ans=0.0 +2024-07-28 06:56:22,001 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.607e+01 5.657e+01 6.359e+01 7.249e+01 1.002e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 06:56:26,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=123416.0, ans=0.125 +2024-07-28 06:56:31,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.67 vs. limit=6.0 +2024-07-28 06:56:41,523 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.18 vs. limit=15.0 +2024-07-28 06:56:53,506 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.74 vs. limit=15.0 +2024-07-28 06:56:55,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=123456.0, ans=0.0 +2024-07-28 06:56:59,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=123456.0, ans=0.125 +2024-07-28 06:57:03,612 INFO [train.py:1114] (0/4) Epoch 10, batch 600, loss[loss=0.261, simple_loss=0.3365, pruned_loss=0.09272, over 4633.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2896, pruned_loss=0.05892, over 891524.57 frames. ], batch size: 16, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:57:16,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=123482.66666666667, ans=0.025 +2024-07-28 06:57:26,532 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-28 06:57:28,900 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.76 vs. limit=10.0 +2024-07-28 06:57:29,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=123509.33333333333, ans=0.0 +2024-07-28 06:57:30,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=123509.33333333333, ans=0.0 +2024-07-28 06:57:41,775 INFO [train.py:1114] (0/4) Epoch 10, batch 650, loss[loss=0.1927, simple_loss=0.2773, pruned_loss=0.05402, over 4752.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2884, pruned_loss=0.05829, over 903650.43 frames. ], batch size: 13, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:57:45,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=123536.0, ans=0.1 +2024-07-28 06:57:47,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=123536.0, ans=0.0 +2024-07-28 06:57:49,745 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.335e+01 5.819e+01 6.416e+01 7.118e+01 9.444e+01, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 06:58:18,673 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:58:24,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=123576.0, ans=0.125 +2024-07-28 06:58:25,150 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.43 vs. limit=10.0 +2024-07-28 06:58:37,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=123602.66666666667, ans=0.2 +2024-07-28 06:58:37,439 INFO [train.py:1114] (0/4) Epoch 10, batch 700, loss[loss=0.198, simple_loss=0.2863, pruned_loss=0.05486, over 4649.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2883, pruned_loss=0.05866, over 911855.14 frames. ], batch size: 12, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 06:58:50,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=123616.0, ans=0.1 +2024-07-28 06:59:21,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=123629.33333333333, ans=0.125 +2024-07-28 06:59:22,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=123629.33333333333, ans=0.05 +2024-07-28 06:59:25,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=123642.66666666667, ans=0.0 +2024-07-28 06:59:38,549 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.06 vs. limit=15.0 +2024-07-28 06:59:57,865 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.38 vs. limit=12.0 +2024-07-28 06:59:59,344 INFO [train.py:1114] (0/4) Epoch 10, batch 750, loss[loss=0.2178, simple_loss=0.3082, pruned_loss=0.06366, over 4685.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2884, pruned_loss=0.0585, over 918184.72 frames. ], batch size: 13, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 07:00:02,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=123669.33333333333, ans=0.1 +2024-07-28 07:00:07,404 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.006e+01 5.598e+01 6.088e+01 6.743e+01 1.006e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 07:01:01,968 INFO [train.py:1114] (0/4) Epoch 10, batch 800, loss[loss=0.2371, simple_loss=0.293, pruned_loss=0.0906, over 4857.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2888, pruned_loss=0.05887, over 922980.35 frames. ], batch size: 12, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 07:01:03,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=123736.0, ans=0.0 +2024-07-28 07:01:04,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=123736.0, ans=0.125 +2024-07-28 07:01:19,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=123762.66666666667, ans=0.0 +2024-07-28 07:01:19,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=123762.66666666667, ans=0.0 +2024-07-28 07:01:30,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=123776.0, ans=0.125 +2024-07-28 07:01:38,555 INFO [train.py:1114] (0/4) Epoch 10, batch 850, loss[loss=0.1876, simple_loss=0.2891, pruned_loss=0.04302, over 4667.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.289, pruned_loss=0.05892, over 926997.77 frames. ], batch size: 14, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 07:01:44,364 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.16 vs. limit=15.0 +2024-07-28 07:01:45,643 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.29 vs. limit=15.0 +2024-07-28 07:01:47,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=123816.0, ans=0.0 +2024-07-28 07:01:48,407 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.695e+01 6.333e+01 6.870e+01 1.740e+02, threshold=1.267e+02, percent-clipped=1.0 +2024-07-28 07:01:50,122 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.32 vs. limit=12.0 +2024-07-28 07:01:50,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=123816.0, ans=0.125 +2024-07-28 07:01:57,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.40 vs. limit=15.0 +2024-07-28 07:01:58,268 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.76 vs. limit=10.0 +2024-07-28 07:02:00,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.76 vs. limit=22.5 +2024-07-28 07:02:03,678 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.26 vs. limit=15.0 +2024-07-28 07:02:05,057 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.73 vs. limit=15.0 +2024-07-28 07:02:21,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=123869.33333333333, ans=0.0 +2024-07-28 07:02:22,328 INFO [train.py:1114] (0/4) Epoch 10, batch 900, loss[loss=0.2129, simple_loss=0.2864, pruned_loss=0.06972, over 4857.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2892, pruned_loss=0.05879, over 928266.88 frames. ], batch size: 12, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 07:02:33,548 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:02:35,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=123896.0, ans=0.125 +2024-07-28 07:02:42,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=123909.33333333333, ans=0.025 +2024-07-28 07:02:44,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=123909.33333333333, ans=0.04949747468305833 +2024-07-28 07:02:53,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=123922.66666666667, ans=0.0 +2024-07-28 07:02:56,034 INFO [train.py:1114] (0/4) Epoch 10, batch 950, loss[loss=0.1607, simple_loss=0.2414, pruned_loss=0.04002, over 4772.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2884, pruned_loss=0.05825, over 929897.35 frames. ], batch size: 12, lr: 7.68e-03, grad_scale: 64.0 +2024-07-28 07:02:58,447 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.14 vs. limit=15.0 +2024-07-28 07:03:00,573 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.90 vs. limit=22.5 +2024-07-28 07:03:04,121 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.892e+01 5.603e+01 6.108e+01 6.683e+01 9.503e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 07:03:14,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=123962.66666666667, ans=0.125 +2024-07-28 07:03:24,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=123989.33333333333, ans=0.125 +2024-07-28 07:03:29,721 INFO [train.py:1114] (0/4) Epoch 10, batch 1000, loss[loss=0.214, simple_loss=0.3014, pruned_loss=0.06332, over 4956.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2893, pruned_loss=0.05893, over 929502.19 frames. ], batch size: 13, lr: 7.68e-03, grad_scale: 64.0 +2024-07-28 07:03:51,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.whiten.whitening_limit, batch_count=124002.66666666667, ans=12.0 +2024-07-28 07:04:04,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=124029.33333333333, ans=0.0 +2024-07-28 07:04:13,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=124042.66666666667, ans=10.0 +2024-07-28 07:04:20,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=124056.0, ans=0.125 +2024-07-28 07:04:22,345 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.26 vs. limit=15.0 +2024-07-28 07:04:23,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=124056.0, ans=0.0 +2024-07-28 07:04:24,879 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:04:25,385 INFO [train.py:1114] (0/4) Epoch 10, batch 1050, loss[loss=0.1927, simple_loss=0.285, pruned_loss=0.05016, over 4882.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2878, pruned_loss=0.05845, over 932189.05 frames. ], batch size: 14, lr: 7.68e-03, grad_scale: 64.0 +2024-07-28 07:04:26,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124069.33333333333, ans=0.1 +2024-07-28 07:04:55,555 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.602e+01 6.129e+01 7.252e+01 1.285e+02, threshold=1.226e+02, percent-clipped=1.0 +2024-07-28 07:05:08,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=124109.33333333333, ans=0.125 +2024-07-28 07:05:10,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.59 vs. limit=6.0 +2024-07-28 07:05:12,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=124109.33333333333, ans=0.125 +2024-07-28 07:05:28,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=124122.66666666667, ans=0.0 +2024-07-28 07:05:39,275 INFO [train.py:1114] (0/4) Epoch 10, batch 1100, loss[loss=0.2189, simple_loss=0.298, pruned_loss=0.0699, over 4909.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2876, pruned_loss=0.05822, over 934437.62 frames. ], batch size: 13, lr: 7.68e-03, grad_scale: 32.0 +2024-07-28 07:05:46,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=124149.33333333333, ans=0.2 +2024-07-28 07:05:50,007 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.74 vs. limit=10.0 +2024-07-28 07:06:05,618 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.24 vs. limit=22.5 +2024-07-28 07:06:06,192 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.89 vs. limit=22.5 +2024-07-28 07:06:08,470 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.75 vs. limit=22.5 +2024-07-28 07:06:22,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=124189.33333333333, ans=0.0 +2024-07-28 07:06:23,649 INFO [train.py:1114] (0/4) Epoch 10, batch 1150, loss[loss=0.1799, simple_loss=0.2664, pruned_loss=0.04667, over 4903.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2867, pruned_loss=0.05776, over 933913.79 frames. ], batch size: 13, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:06:57,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=124216.0, ans=0.125 +2024-07-28 07:07:00,534 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.669e+01 6.088e+01 6.784e+01 1.007e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 07:07:03,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=124216.0, ans=10.0 +2024-07-28 07:07:48,873 INFO [train.py:1114] (0/4) Epoch 10, batch 1200, loss[loss=0.2245, simple_loss=0.3185, pruned_loss=0.06524, over 4876.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2886, pruned_loss=0.05857, over 933046.56 frames. ], batch size: 14, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:08:02,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=124282.66666666667, ans=0.0 +2024-07-28 07:08:02,630 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.24 vs. limit=22.5 +2024-07-28 07:08:21,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=124296.0, ans=0.125 +2024-07-28 07:08:31,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=124322.66666666667, ans=0.0 +2024-07-28 07:08:31,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=124322.66666666667, ans=0.0 +2024-07-28 07:08:37,752 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.89 vs. limit=15.0 +2024-07-28 07:08:41,844 INFO [train.py:1114] (0/4) Epoch 10, batch 1250, loss[loss=0.2063, simple_loss=0.2961, pruned_loss=0.05822, over 4808.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2903, pruned_loss=0.05889, over 937432.94 frames. ], batch size: 15, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:08:50,579 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.912e+01 6.433e+01 7.478e+01 1.098e+02, threshold=1.287e+02, percent-clipped=0.0 +2024-07-28 07:09:04,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=124376.0, ans=0.125 +2024-07-28 07:09:13,607 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.54 vs. limit=22.5 +2024-07-28 07:09:15,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=124389.33333333333, ans=10.0 +2024-07-28 07:09:17,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=124389.33333333333, ans=0.125 +2024-07-28 07:09:20,452 INFO [train.py:1114] (0/4) Epoch 10, batch 1300, loss[loss=0.2251, simple_loss=0.3046, pruned_loss=0.07278, over 4679.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2877, pruned_loss=0.05763, over 939021.17 frames. ], batch size: 19, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:09:22,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=124402.66666666667, ans=0.125 +2024-07-28 07:09:37,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=124416.0, ans=0.125 +2024-07-28 07:10:08,999 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=20.52 vs. limit=22.5 +2024-07-28 07:10:15,028 INFO [train.py:1114] (0/4) Epoch 10, batch 1350, loss[loss=0.1804, simple_loss=0.2694, pruned_loss=0.04573, over 4758.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2878, pruned_loss=0.05761, over 940832.21 frames. ], batch size: 13, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:10:23,757 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.509e+01 5.516e+01 6.216e+01 7.014e+01 1.025e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 07:10:23,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=124482.66666666667, ans=0.035 +2024-07-28 07:10:29,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=124496.0, ans=0.125 +2024-07-28 07:10:29,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124496.0, ans=0.1 +2024-07-28 07:10:39,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=124509.33333333333, ans=0.125 +2024-07-28 07:10:43,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124522.66666666667, ans=0.1 +2024-07-28 07:10:48,572 INFO [train.py:1114] (0/4) Epoch 10, batch 1400, loss[loss=0.2131, simple_loss=0.277, pruned_loss=0.07465, over 4710.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2859, pruned_loss=0.05668, over 943026.27 frames. ], batch size: 11, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:10:56,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=124549.33333333333, ans=0.125 +2024-07-28 07:10:59,667 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.36 vs. limit=22.5 +2024-07-28 07:11:09,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=124576.0, ans=0.125 +2024-07-28 07:11:11,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=124576.0, ans=0.125 +2024-07-28 07:11:17,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=124589.33333333333, ans=0.125 +2024-07-28 07:11:17,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=124589.33333333333, ans=0.0 +2024-07-28 07:11:21,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=124602.66666666667, ans=15.0 +2024-07-28 07:11:22,018 INFO [train.py:1114] (0/4) Epoch 10, batch 1450, loss[loss=0.1941, simple_loss=0.2879, pruned_loss=0.05014, over 4689.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2871, pruned_loss=0.05738, over 942862.36 frames. ], batch size: 15, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:11:30,548 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.685e+01 6.213e+01 7.325e+01 1.109e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 07:11:40,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=124629.33333333333, ans=0.09899494936611666 +2024-07-28 07:11:45,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=124642.66666666667, ans=0.025 +2024-07-28 07:11:50,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=124656.0, ans=0.0 +2024-07-28 07:11:57,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=124656.0, ans=0.125 +2024-07-28 07:11:59,750 INFO [train.py:1114] (0/4) Epoch 10, batch 1500, loss[loss=0.1815, simple_loss=0.2749, pruned_loss=0.04401, over 4805.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2877, pruned_loss=0.05765, over 942656.01 frames. ], batch size: 14, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:12:06,231 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.65 vs. limit=15.0 +2024-07-28 07:12:16,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=124696.0, ans=0.2 +2024-07-28 07:12:17,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=124696.0, ans=0.125 +2024-07-28 07:12:21,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=124709.33333333333, ans=0.0 +2024-07-28 07:12:33,109 INFO [train.py:1114] (0/4) Epoch 10, batch 1550, loss[loss=0.2411, simple_loss=0.3229, pruned_loss=0.07967, over 4902.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2876, pruned_loss=0.05755, over 938992.69 frames. ], batch size: 15, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:12:41,761 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.366e+01 5.520e+01 6.164e+01 6.899e+01 9.824e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 07:13:00,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=124789.33333333333, ans=0.0 +2024-07-28 07:13:00,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=124789.33333333333, ans=0.125 +2024-07-28 07:13:00,316 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.10 vs. limit=15.0 +2024-07-28 07:13:06,576 INFO [train.py:1114] (0/4) Epoch 10, batch 1600, loss[loss=0.2479, simple_loss=0.3444, pruned_loss=0.07574, over 4868.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2882, pruned_loss=0.05798, over 937499.37 frames. ], batch size: 14, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:13:13,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=124816.0, ans=0.2 +2024-07-28 07:13:19,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=124829.33333333333, ans=0.125 +2024-07-28 07:13:22,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124829.33333333333, ans=0.1 +2024-07-28 07:13:23,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=124829.33333333333, ans=0.125 +2024-07-28 07:13:32,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=124842.66666666667, ans=0.125 +2024-07-28 07:13:40,191 INFO [train.py:1114] (0/4) Epoch 10, batch 1650, loss[loss=0.2509, simple_loss=0.3274, pruned_loss=0.08717, over 4665.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2889, pruned_loss=0.05848, over 937308.77 frames. ], batch size: 14, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:13:48,849 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.680e+01 5.790e+01 6.415e+01 7.555e+01 1.180e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 07:13:49,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=124882.66666666667, ans=0.0 +2024-07-28 07:13:50,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=124882.66666666667, ans=0.0 +2024-07-28 07:13:51,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=124882.66666666667, ans=0.0 +2024-07-28 07:13:55,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=124896.0, ans=0.125 +2024-07-28 07:13:57,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=124896.0, ans=0.5 +2024-07-28 07:13:59,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124909.33333333333, ans=0.1 +2024-07-28 07:14:05,943 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.68 vs. limit=22.5 +2024-07-28 07:14:16,907 INFO [train.py:1114] (0/4) Epoch 10, batch 1700, loss[loss=0.2011, simple_loss=0.272, pruned_loss=0.0651, over 4709.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2888, pruned_loss=0.05811, over 939607.22 frames. ], batch size: 11, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:14:25,724 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.99 vs. limit=22.5 +2024-07-28 07:14:29,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=124949.33333333333, ans=0.0 +2024-07-28 07:14:31,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124949.33333333333, ans=0.1 +2024-07-28 07:14:42,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=124976.0, ans=0.125 +2024-07-28 07:14:48,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=124989.33333333333, ans=15.0 +2024-07-28 07:14:53,773 INFO [train.py:1114] (0/4) Epoch 10, batch 1750, loss[loss=0.2018, simple_loss=0.2912, pruned_loss=0.05623, over 4797.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2886, pruned_loss=0.05798, over 940510.46 frames. ], batch size: 11, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:15:04,250 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.898e+01 5.615e+01 6.197e+01 6.752e+01 9.322e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 07:15:16,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=125042.66666666667, ans=0.125 +2024-07-28 07:15:28,303 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:15:31,047 INFO [train.py:1114] (0/4) Epoch 10, batch 1800, loss[loss=0.1978, simple_loss=0.2999, pruned_loss=0.04779, over 4641.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2895, pruned_loss=0.0585, over 940776.45 frames. ], batch size: 13, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:15:35,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=125069.33333333333, ans=0.0 +2024-07-28 07:15:42,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1.whitening_limit, batch_count=125082.66666666667, ans=10.0 +2024-07-28 07:16:00,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=125122.66666666667, ans=0.0 +2024-07-28 07:16:01,437 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.12 vs. limit=15.0 +2024-07-28 07:16:05,131 INFO [train.py:1114] (0/4) Epoch 10, batch 1850, loss[loss=0.2528, simple_loss=0.3262, pruned_loss=0.08976, over 4808.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2889, pruned_loss=0.05827, over 940804.44 frames. ], batch size: 14, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:16:10,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=125136.0, ans=0.0 +2024-07-28 07:16:14,683 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.312e+01 5.824e+01 6.671e+01 8.109e+01 1.121e+02, threshold=1.334e+02, percent-clipped=0.0 +2024-07-28 07:16:19,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=125162.66666666667, ans=0.05 +2024-07-28 07:16:19,988 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.83 vs. limit=10.0 +2024-07-28 07:16:20,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=125162.66666666667, ans=0.125 +2024-07-28 07:16:40,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=125189.33333333333, ans=0.0 +2024-07-28 07:16:41,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125189.33333333333, ans=0.1 +2024-07-28 07:16:42,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=125189.33333333333, ans=0.125 +2024-07-28 07:16:45,548 INFO [train.py:1114] (0/4) Epoch 10, batch 1900, loss[loss=0.1689, simple_loss=0.2826, pruned_loss=0.02758, over 4656.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2883, pruned_loss=0.05761, over 941961.71 frames. ], batch size: 14, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:17:02,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=125216.0, ans=0.0 +2024-07-28 07:17:11,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=125229.33333333333, ans=0.2 +2024-07-28 07:17:16,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=125242.66666666667, ans=0.125 +2024-07-28 07:17:18,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=125242.66666666667, ans=0.125 +2024-07-28 07:17:20,313 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.36 vs. limit=6.0 +2024-07-28 07:17:28,545 INFO [train.py:1114] (0/4) Epoch 10, batch 1950, loss[loss=0.1664, simple_loss=0.2645, pruned_loss=0.03417, over 4888.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2901, pruned_loss=0.05843, over 943842.88 frames. ], batch size: 13, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:17:37,346 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.707e+01 5.662e+01 6.185e+01 7.189e+01 1.102e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 07:17:40,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=125282.66666666667, ans=0.2 +2024-07-28 07:17:46,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=125296.0, ans=0.125 +2024-07-28 07:17:57,389 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.66 vs. limit=15.0 +2024-07-28 07:18:03,837 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.43 vs. limit=15.0 +2024-07-28 07:18:04,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=125336.0, ans=0.0 +2024-07-28 07:18:04,794 INFO [train.py:1114] (0/4) Epoch 10, batch 2000, loss[loss=0.1807, simple_loss=0.2585, pruned_loss=0.05144, over 4799.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2903, pruned_loss=0.05822, over 940952.62 frames. ], batch size: 11, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:18:11,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=125349.33333333333, ans=0.125 +2024-07-28 07:18:14,086 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.87 vs. limit=10.0 +2024-07-28 07:18:15,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=125349.33333333333, ans=0.125 +2024-07-28 07:18:28,559 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=2.87 vs. limit=12.0 +2024-07-28 07:18:38,318 INFO [train.py:1114] (0/4) Epoch 10, batch 2050, loss[loss=0.1596, simple_loss=0.2375, pruned_loss=0.04089, over 4617.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2893, pruned_loss=0.05782, over 939298.55 frames. ], batch size: 11, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:18:39,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=125402.66666666667, ans=0.125 +2024-07-28 07:18:44,202 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.73 vs. limit=15.0 +2024-07-28 07:18:47,047 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.499e+01 5.685e+01 6.326e+01 7.286e+01 1.205e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-28 07:18:57,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=125429.33333333333, ans=0.125 +2024-07-28 07:19:13,196 INFO [train.py:1114] (0/4) Epoch 10, batch 2100, loss[loss=0.2035, simple_loss=0.3014, pruned_loss=0.05286, over 4760.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2882, pruned_loss=0.05757, over 941149.87 frames. ], batch size: 13, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:19:21,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=125482.66666666667, ans=0.0 +2024-07-28 07:19:24,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=125482.66666666667, ans=0.125 +2024-07-28 07:19:29,822 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.17 vs. limit=15.0 +2024-07-28 07:19:32,338 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.60 vs. limit=15.0 +2024-07-28 07:19:34,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=125509.33333333333, ans=0.07 +2024-07-28 07:19:37,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=125509.33333333333, ans=0.125 +2024-07-28 07:19:44,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=125522.66666666667, ans=0.1 +2024-07-28 07:19:47,874 INFO [train.py:1114] (0/4) Epoch 10, batch 2150, loss[loss=0.2371, simple_loss=0.3194, pruned_loss=0.07736, over 4908.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2876, pruned_loss=0.05769, over 944221.02 frames. ], batch size: 13, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:19:53,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=125536.0, ans=0.0 +2024-07-28 07:19:54,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=125549.33333333333, ans=0.125 +2024-07-28 07:19:56,669 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.831e+01 5.697e+01 6.227e+01 7.381e+01 1.023e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 07:20:01,009 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=20.01 vs. limit=22.5 +2024-07-28 07:20:05,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=125562.66666666667, ans=0.2 +2024-07-28 07:20:06,055 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:20:09,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=125576.0, ans=0.125 +2024-07-28 07:20:21,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125576.0, ans=0.1 +2024-07-28 07:20:22,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=125576.0, ans=0.0 +2024-07-28 07:20:23,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=125589.33333333333, ans=0.2 +2024-07-28 07:20:24,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=125589.33333333333, ans=0.2 +2024-07-28 07:20:25,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.64 vs. limit=15.0 +2024-07-28 07:20:30,862 INFO [train.py:1114] (0/4) Epoch 10, batch 2200, loss[loss=0.2094, simple_loss=0.3104, pruned_loss=0.05419, over 4799.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2876, pruned_loss=0.05745, over 943218.21 frames. ], batch size: 14, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:20:49,679 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.17 vs. limit=10.0 +2024-07-28 07:21:07,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125629.33333333333, ans=0.1 +2024-07-28 07:21:17,665 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.26 vs. limit=6.0 +2024-07-28 07:21:19,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=125656.0, ans=0.125 +2024-07-28 07:21:22,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=125656.0, ans=0.2 +2024-07-28 07:21:23,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=125656.0, ans=0.0 +2024-07-28 07:21:24,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=125656.0, ans=0.0 +2024-07-28 07:21:26,428 INFO [train.py:1114] (0/4) Epoch 10, batch 2250, loss[loss=0.1661, simple_loss=0.2586, pruned_loss=0.03677, over 4694.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2868, pruned_loss=0.05675, over 941832.13 frames. ], batch size: 13, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:21:28,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=125669.33333333333, ans=0.1 +2024-07-28 07:21:35,181 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.513e+01 5.590e+01 6.237e+01 6.942e+01 1.306e+02, threshold=1.247e+02, percent-clipped=1.0 +2024-07-28 07:21:44,052 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:22:07,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=125722.66666666667, ans=0.125 +2024-07-28 07:22:09,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff2.min_abs, batch_count=125736.0, ans=0.1 +2024-07-28 07:22:10,012 INFO [train.py:1114] (0/4) Epoch 10, batch 2300, loss[loss=0.1637, simple_loss=0.2479, pruned_loss=0.03979, over 4935.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2867, pruned_loss=0.05718, over 939592.98 frames. ], batch size: 12, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:22:27,284 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.71 vs. limit=15.0 +2024-07-28 07:22:35,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=125776.0, ans=0.125 +2024-07-28 07:22:47,908 INFO [train.py:1114] (0/4) Epoch 10, batch 2350, loss[loss=0.2027, simple_loss=0.2862, pruned_loss=0.05964, over 4642.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2853, pruned_loss=0.05606, over 941607.61 frames. ], batch size: 13, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:22:50,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=125802.66666666667, ans=0.125 +2024-07-28 07:22:56,458 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.493e+01 6.004e+01 6.754e+01 1.065e+02, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 07:22:58,247 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.10 vs. limit=15.0 +2024-07-28 07:23:00,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=125829.33333333333, ans=0.125 +2024-07-28 07:23:02,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=125829.33333333333, ans=0.1 +2024-07-28 07:23:09,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=125842.66666666667, ans=0.025 +2024-07-28 07:23:19,356 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.46 vs. limit=15.0 +2024-07-28 07:23:20,987 INFO [train.py:1114] (0/4) Epoch 10, batch 2400, loss[loss=0.1969, simple_loss=0.2844, pruned_loss=0.05468, over 4638.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2856, pruned_loss=0.0567, over 941220.54 frames. ], batch size: 12, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:23:21,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=125869.33333333333, ans=0.125 +2024-07-28 07:23:21,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=125869.33333333333, ans=0.0 +2024-07-28 07:23:54,377 INFO [train.py:1114] (0/4) Epoch 10, batch 2450, loss[loss=0.1988, simple_loss=0.3002, pruned_loss=0.04869, over 4697.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2875, pruned_loss=0.05768, over 937085.53 frames. ], batch size: 13, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:24:01,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=125949.33333333333, ans=0.125 +2024-07-28 07:24:02,997 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.788e+01 5.824e+01 6.375e+01 7.344e+01 1.011e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 07:24:08,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=125962.66666666667, ans=0.0 +2024-07-28 07:24:09,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=125962.66666666667, ans=0.125 +2024-07-28 07:24:17,348 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.61 vs. limit=15.0 +2024-07-28 07:24:24,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=125989.33333333333, ans=0.2 +2024-07-28 07:24:25,581 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=15.0 +2024-07-28 07:24:27,188 INFO [train.py:1114] (0/4) Epoch 10, batch 2500, loss[loss=0.2165, simple_loss=0.305, pruned_loss=0.06396, over 4814.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2875, pruned_loss=0.05779, over 938806.29 frames. ], batch size: 14, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:24:34,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=126002.66666666667, ans=0.0 +2024-07-28 07:24:36,241 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:24:39,833 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.92 vs. limit=12.0 +2024-07-28 07:24:41,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=126016.0, ans=0.125 +2024-07-28 07:24:49,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=126029.33333333333, ans=0.125 +2024-07-28 07:25:05,719 INFO [train.py:1114] (0/4) Epoch 10, batch 2550, loss[loss=0.1929, simple_loss=0.2712, pruned_loss=0.05725, over 4778.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2874, pruned_loss=0.05743, over 938578.26 frames. ], batch size: 11, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:25:09,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=126069.33333333333, ans=0.0 +2024-07-28 07:25:13,537 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.49 vs. limit=12.0 +2024-07-28 07:25:14,258 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.249e+01 5.571e+01 6.137e+01 7.112e+01 1.171e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 07:25:16,617 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.60 vs. limit=15.0 +2024-07-28 07:25:18,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=126096.0, ans=0.125 +2024-07-28 07:25:22,039 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.31 vs. limit=15.0 +2024-07-28 07:25:23,833 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.53 vs. limit=15.0 +2024-07-28 07:25:33,848 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.24 vs. limit=15.0 +2024-07-28 07:25:34,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=126122.66666666667, ans=0.125 +2024-07-28 07:25:38,944 INFO [train.py:1114] (0/4) Epoch 10, batch 2600, loss[loss=0.2006, simple_loss=0.3057, pruned_loss=0.04777, over 4897.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2885, pruned_loss=0.05795, over 937706.08 frames. ], batch size: 13, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:25:44,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=126136.0, ans=0.125 +2024-07-28 07:25:47,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126149.33333333333, ans=0.1 +2024-07-28 07:25:48,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=126149.33333333333, ans=0.125 +2024-07-28 07:26:03,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=126176.0, ans=0.0 +2024-07-28 07:26:07,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=126189.33333333333, ans=0.0 +2024-07-28 07:26:07,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=126189.33333333333, ans=0.125 +2024-07-28 07:26:08,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.58 vs. limit=15.0 +2024-07-28 07:26:08,827 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.48 vs. limit=15.0 +2024-07-28 07:26:09,051 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.93 vs. limit=10.0 +2024-07-28 07:26:15,510 INFO [train.py:1114] (0/4) Epoch 10, batch 2650, loss[loss=0.2298, simple_loss=0.3176, pruned_loss=0.071, over 4659.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2896, pruned_loss=0.05791, over 939631.75 frames. ], batch size: 16, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:26:23,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=126216.0, ans=0.0 +2024-07-28 07:26:25,849 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.753e+01 5.752e+01 6.121e+01 6.935e+01 9.272e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 07:26:36,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126242.66666666667, ans=0.1 +2024-07-28 07:26:41,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=126242.66666666667, ans=0.04949747468305833 +2024-07-28 07:26:43,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126242.66666666667, ans=0.1 +2024-07-28 07:26:44,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126256.0, ans=0.1 +2024-07-28 07:26:46,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=126256.0, ans=0.2 +2024-07-28 07:26:51,009 INFO [train.py:1114] (0/4) Epoch 10, batch 2700, loss[loss=0.2327, simple_loss=0.3252, pruned_loss=0.0701, over 4728.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2903, pruned_loss=0.05846, over 939624.42 frames. ], batch size: 14, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:27:02,877 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.71 vs. limit=22.5 +2024-07-28 07:27:06,836 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.76 vs. limit=6.0 +2024-07-28 07:27:12,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=126296.0, ans=0.125 +2024-07-28 07:27:16,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=126309.33333333333, ans=0.0 +2024-07-28 07:27:28,565 INFO [train.py:1114] (0/4) Epoch 10, batch 2750, loss[loss=0.1917, simple_loss=0.2677, pruned_loss=0.05787, over 4709.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2895, pruned_loss=0.05864, over 939601.62 frames. ], batch size: 12, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:27:29,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=126336.0, ans=0.125 +2024-07-28 07:27:31,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=126336.0, ans=0.1 +2024-07-28 07:27:33,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=126336.0, ans=0.0 +2024-07-28 07:27:37,076 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.293e+01 5.804e+01 6.361e+01 7.427e+01 1.283e+02, threshold=1.272e+02, percent-clipped=1.0 +2024-07-28 07:27:37,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126349.33333333333, ans=0.1 +2024-07-28 07:27:41,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=126362.66666666667, ans=0.1 +2024-07-28 07:27:47,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=126362.66666666667, ans=0.2 +2024-07-28 07:27:47,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126362.66666666667, ans=0.1 +2024-07-28 07:27:48,001 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:27:53,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=126376.0, ans=0.125 +2024-07-28 07:28:00,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=126389.33333333333, ans=0.125 +2024-07-28 07:28:02,089 INFO [train.py:1114] (0/4) Epoch 10, batch 2800, loss[loss=0.2306, simple_loss=0.3034, pruned_loss=0.07884, over 3510.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2896, pruned_loss=0.05882, over 937783.59 frames. ], batch size: 36, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:28:11,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=126416.0, ans=0.07 +2024-07-28 07:28:12,559 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.76 vs. limit=15.0 +2024-07-28 07:28:13,826 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.98 vs. limit=15.0 +2024-07-28 07:28:19,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=126429.33333333333, ans=0.04949747468305833 +2024-07-28 07:28:20,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=126429.33333333333, ans=0.05 +2024-07-28 07:28:23,918 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.30 vs. limit=22.5 +2024-07-28 07:28:35,567 INFO [train.py:1114] (0/4) Epoch 10, batch 2850, loss[loss=0.2221, simple_loss=0.3094, pruned_loss=0.0674, over 4959.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2901, pruned_loss=0.05905, over 935739.57 frames. ], batch size: 13, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:28:39,409 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-28 07:28:44,288 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.524e+01 5.758e+01 6.530e+01 7.801e+01 1.215e+02, threshold=1.306e+02, percent-clipped=0.0 +2024-07-28 07:28:45,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=126482.66666666667, ans=0.025 +2024-07-28 07:28:48,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126496.0, ans=0.1 +2024-07-28 07:28:49,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=126496.0, ans=0.125 +2024-07-28 07:29:02,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=126522.66666666667, ans=0.125 +2024-07-28 07:29:05,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=126522.66666666667, ans=0.125 +2024-07-28 07:29:08,563 INFO [train.py:1114] (0/4) Epoch 10, batch 2900, loss[loss=0.1998, simple_loss=0.2838, pruned_loss=0.05792, over 4829.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2903, pruned_loss=0.05897, over 939957.10 frames. ], batch size: 13, lr: 7.60e-03, grad_scale: 32.0 +2024-07-28 07:29:13,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=126536.0, ans=0.125 +2024-07-28 07:29:16,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=126549.33333333333, ans=0.125 +2024-07-28 07:29:25,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=126562.66666666667, ans=0.0 +2024-07-28 07:29:32,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=126576.0, ans=10.0 +2024-07-28 07:29:33,544 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.30 vs. limit=6.0 +2024-07-28 07:29:35,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=126589.33333333333, ans=0.0 +2024-07-28 07:29:42,667 INFO [train.py:1114] (0/4) Epoch 10, batch 2950, loss[loss=0.1769, simple_loss=0.2615, pruned_loss=0.04609, over 4704.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2886, pruned_loss=0.05875, over 938833.23 frames. ], batch size: 12, lr: 7.60e-03, grad_scale: 32.0 +2024-07-28 07:29:44,047 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:29:45,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=126602.66666666667, ans=0.1 +2024-07-28 07:29:51,649 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 5.710e+01 6.450e+01 7.485e+01 1.036e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-28 07:29:55,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.02 vs. limit=6.0 +2024-07-28 07:29:59,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=126629.33333333333, ans=0.125 +2024-07-28 07:30:01,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=126629.33333333333, ans=0.125 +2024-07-28 07:30:03,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=126642.66666666667, ans=0.125 +2024-07-28 07:30:04,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=126642.66666666667, ans=0.125 +2024-07-28 07:30:24,805 INFO [train.py:1114] (0/4) Epoch 10, batch 3000, loss[loss=0.2015, simple_loss=0.2915, pruned_loss=0.05572, over 4762.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2877, pruned_loss=0.05775, over 938081.10 frames. ], batch size: 13, lr: 7.60e-03, grad_scale: 32.0 +2024-07-28 07:30:24,806 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 07:30:42,397 INFO [train.py:1146] (0/4) Epoch 10, validation: loss=0.173, simple_loss=0.277, pruned_loss=0.03444, over 944034.00 frames. +2024-07-28 07:30:42,398 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 07:30:43,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=126669.33333333333, ans=0.025 +2024-07-28 07:30:53,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.29 vs. limit=22.5 +2024-07-28 07:30:56,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=126696.0, ans=0.125 +2024-07-28 07:31:00,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=126696.0, ans=0.125 +2024-07-28 07:31:11,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126722.66666666667, ans=0.1 +2024-07-28 07:31:14,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=126722.66666666667, ans=0.2 +2024-07-28 07:31:15,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=126722.66666666667, ans=0.05 +2024-07-28 07:31:17,846 INFO [train.py:1114] (0/4) Epoch 10, batch 3050, loss[loss=0.1873, simple_loss=0.2634, pruned_loss=0.05562, over 4642.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2875, pruned_loss=0.05754, over 936952.38 frames. ], batch size: 12, lr: 7.60e-03, grad_scale: 32.0 +2024-07-28 07:31:19,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=126736.0, ans=0.125 +2024-07-28 07:31:26,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=126749.33333333333, ans=10.0 +2024-07-28 07:31:38,384 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.819e+01 5.667e+01 6.279e+01 7.137e+01 1.004e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 07:31:43,292 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.20 vs. limit=15.0 +2024-07-28 07:31:53,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=126762.66666666667, ans=0.07 +2024-07-28 07:31:53,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=126762.66666666667, ans=0.05 +2024-07-28 07:31:53,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=126762.66666666667, ans=0.2 +2024-07-28 07:32:05,775 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:32:05,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=126776.0, ans=0.025 +2024-07-28 07:32:09,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=126776.0, ans=0.0 +2024-07-28 07:32:09,621 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:32:17,189 INFO [train.py:1114] (0/4) Epoch 10, batch 3100, loss[loss=0.225, simple_loss=0.3147, pruned_loss=0.06763, over 4618.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2872, pruned_loss=0.05812, over 937963.25 frames. ], batch size: 16, lr: 7.60e-03, grad_scale: 64.0 +2024-07-28 07:32:37,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=126842.66666666667, ans=0.05 +2024-07-28 07:32:46,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=126856.0, ans=0.0 +2024-07-28 07:32:47,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=126856.0, ans=0.0 +2024-07-28 07:32:49,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=126856.0, ans=0.125 +2024-07-28 07:32:52,394 INFO [train.py:1114] (0/4) Epoch 10, batch 3150, loss[loss=0.2461, simple_loss=0.3226, pruned_loss=0.08475, over 4581.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2883, pruned_loss=0.05853, over 937962.47 frames. ], batch size: 17, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:32:52,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=126869.33333333333, ans=0.0 +2024-07-28 07:33:01,120 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.793e+01 5.563e+01 5.962e+01 7.006e+01 9.323e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 07:33:13,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=126909.33333333333, ans=0.05 +2024-07-28 07:33:17,698 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:33:19,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=126909.33333333333, ans=15.0 +2024-07-28 07:33:29,755 INFO [train.py:1114] (0/4) Epoch 10, batch 3200, loss[loss=0.2444, simple_loss=0.331, pruned_loss=0.07893, over 4835.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2873, pruned_loss=0.05781, over 939427.60 frames. ], batch size: 13, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:34:02,608 INFO [train.py:1114] (0/4) Epoch 10, batch 3250, loss[loss=0.1901, simple_loss=0.2791, pruned_loss=0.05057, over 4934.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2872, pruned_loss=0.0575, over 940529.92 frames. ], batch size: 14, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:34:03,711 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.07 vs. limit=15.0 +2024-07-28 07:34:11,318 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.705e+01 5.496e+01 6.167e+01 6.993e+01 1.063e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 07:34:16,966 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.29 vs. limit=15.0 +2024-07-28 07:34:18,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=127029.33333333333, ans=0.2 +2024-07-28 07:34:18,937 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.75 vs. limit=15.0 +2024-07-28 07:34:28,251 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.08 vs. limit=15.0 +2024-07-28 07:34:34,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=127056.0, ans=0.2 +2024-07-28 07:34:36,030 INFO [train.py:1114] (0/4) Epoch 10, batch 3300, loss[loss=0.1928, simple_loss=0.2785, pruned_loss=0.05351, over 4693.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2857, pruned_loss=0.05698, over 940967.13 frames. ], batch size: 19, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:34:41,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=127069.33333333333, ans=0.0 +2024-07-28 07:34:43,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=127082.66666666667, ans=0.0 +2024-07-28 07:34:45,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=127082.66666666667, ans=0.125 +2024-07-28 07:34:51,754 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.48 vs. limit=15.0 +2024-07-28 07:34:54,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=127096.0, ans=0.1 +2024-07-28 07:34:57,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=127109.33333333333, ans=0.1 +2024-07-28 07:35:09,150 INFO [train.py:1114] (0/4) Epoch 10, batch 3350, loss[loss=0.2377, simple_loss=0.3269, pruned_loss=0.07427, over 4652.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2867, pruned_loss=0.05729, over 939035.19 frames. ], batch size: 17, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:35:17,676 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.856e+01 5.618e+01 6.272e+01 7.252e+01 1.069e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 07:35:21,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=127162.66666666667, ans=0.05 +2024-07-28 07:35:21,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127162.66666666667, ans=0.1 +2024-07-28 07:35:23,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=127162.66666666667, ans=6.0 +2024-07-28 07:35:30,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=127176.0, ans=0.125 +2024-07-28 07:35:42,746 INFO [train.py:1114] (0/4) Epoch 10, batch 3400, loss[loss=0.1419, simple_loss=0.2314, pruned_loss=0.02624, over 4816.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.287, pruned_loss=0.05706, over 937918.86 frames. ], batch size: 11, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:35:46,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.24 vs. limit=22.5 +2024-07-28 07:35:46,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=127202.66666666667, ans=0.09899494936611666 +2024-07-28 07:35:47,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=127202.66666666667, ans=0.125 +2024-07-28 07:35:53,201 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.86 vs. limit=15.0 +2024-07-28 07:35:54,762 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.91 vs. limit=10.0 +2024-07-28 07:35:55,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=127229.33333333333, ans=0.125 +2024-07-28 07:36:06,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=127242.66666666667, ans=0.125 +2024-07-28 07:36:16,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=127269.33333333333, ans=0.125 +2024-07-28 07:36:16,682 INFO [train.py:1114] (0/4) Epoch 10, batch 3450, loss[loss=0.183, simple_loss=0.2751, pruned_loss=0.04545, over 4761.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2874, pruned_loss=0.05702, over 937860.88 frames. ], batch size: 19, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:36:25,817 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.819e+01 5.619e+01 6.055e+01 6.552e+01 2.053e+02, threshold=1.211e+02, percent-clipped=1.0 +2024-07-28 07:36:26,841 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.84 vs. limit=15.0 +2024-07-28 07:36:38,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=127309.33333333333, ans=0.125 +2024-07-28 07:36:41,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=127309.33333333333, ans=0.125 +2024-07-28 07:36:42,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=127322.66666666667, ans=0.0 +2024-07-28 07:36:56,041 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.32 vs. limit=15.0 +2024-07-28 07:36:56,169 INFO [train.py:1114] (0/4) Epoch 10, batch 3500, loss[loss=0.215, simple_loss=0.2969, pruned_loss=0.06652, over 4934.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2857, pruned_loss=0.05617, over 938134.76 frames. ], batch size: 12, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:36:57,143 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.72 vs. limit=15.0 +2024-07-28 07:37:05,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=127336.0, ans=0.0 +2024-07-28 07:37:21,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff3.min_abs, batch_count=127362.66666666667, ans=0.2 +2024-07-28 07:37:27,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=127362.66666666667, ans=0.2 +2024-07-28 07:37:34,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=127376.0, ans=0.0 +2024-07-28 07:37:35,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=127376.0, ans=0.0 +2024-07-28 07:37:36,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=127389.33333333333, ans=0.125 +2024-07-28 07:37:47,036 INFO [train.py:1114] (0/4) Epoch 10, batch 3550, loss[loss=0.2029, simple_loss=0.2907, pruned_loss=0.05759, over 4661.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2866, pruned_loss=0.05678, over 938486.47 frames. ], batch size: 14, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:37:48,899 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.92 vs. limit=15.0 +2024-07-28 07:38:09,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=127416.0, ans=0.04949747468305833 +2024-07-28 07:38:10,332 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.63 vs. limit=15.0 +2024-07-28 07:38:16,060 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.484e+01 5.635e+01 6.291e+01 7.462e+01 1.218e+02, threshold=1.258e+02, percent-clipped=1.0 +2024-07-28 07:38:22,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=127429.33333333333, ans=0.0 +2024-07-28 07:38:28,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=127442.66666666667, ans=0.0 +2024-07-28 07:38:30,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=127442.66666666667, ans=0.125 +2024-07-28 07:38:34,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=127456.0, ans=0.0 +2024-07-28 07:38:40,237 INFO [train.py:1114] (0/4) Epoch 10, batch 3600, loss[loss=0.1815, simple_loss=0.2734, pruned_loss=0.04478, over 4969.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2868, pruned_loss=0.05667, over 940577.40 frames. ], batch size: 13, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:38:43,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=127469.33333333333, ans=0.02 +2024-07-28 07:38:45,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=127469.33333333333, ans=0.125 +2024-07-28 07:38:48,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=127482.66666666667, ans=0.5 +2024-07-28 07:38:52,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=127482.66666666667, ans=0.125 +2024-07-28 07:38:53,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=127482.66666666667, ans=0.2 +2024-07-28 07:38:54,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=127496.0, ans=0.125 +2024-07-28 07:39:10,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=127509.33333333333, ans=0.0 +2024-07-28 07:39:10,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=127509.33333333333, ans=0.0 +2024-07-28 07:39:15,462 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.79 vs. limit=15.0 +2024-07-28 07:39:24,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=127522.66666666667, ans=0.2 +2024-07-28 07:39:25,740 INFO [train.py:1114] (0/4) Epoch 10, batch 3650, loss[loss=0.2264, simple_loss=0.3078, pruned_loss=0.07249, over 4903.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2854, pruned_loss=0.05637, over 941342.82 frames. ], batch size: 15, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:39:38,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=127536.0, ans=0.125 +2024-07-28 07:39:41,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=127536.0, ans=0.125 +2024-07-28 07:39:51,676 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.686e+01 5.725e+01 6.100e+01 7.132e+01 1.043e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 07:39:52,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=127549.33333333333, ans=0.125 +2024-07-28 07:39:54,002 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.37 vs. limit=15.0 +2024-07-28 07:40:03,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=127562.66666666667, ans=10.0 +2024-07-28 07:40:50,427 INFO [train.py:1114] (0/4) Epoch 10, batch 3700, loss[loss=0.2001, simple_loss=0.2894, pruned_loss=0.05542, over 4941.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2857, pruned_loss=0.05671, over 942095.95 frames. ], batch size: 14, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:40:55,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=127602.66666666667, ans=0.0 +2024-07-28 07:41:02,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=127616.0, ans=0.04949747468305833 +2024-07-28 07:41:04,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=127616.0, ans=0.2 +2024-07-28 07:41:06,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=127616.0, ans=0.0 +2024-07-28 07:41:10,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=127629.33333333333, ans=0.125 +2024-07-28 07:41:27,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=127656.0, ans=0.125 +2024-07-28 07:41:28,287 INFO [train.py:1114] (0/4) Epoch 10, batch 3750, loss[loss=0.174, simple_loss=0.2604, pruned_loss=0.04377, over 4786.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.286, pruned_loss=0.0571, over 943685.40 frames. ], batch size: 11, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:41:30,773 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.29 vs. limit=15.0 +2024-07-28 07:41:38,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127669.33333333333, ans=0.1 +2024-07-28 07:41:51,643 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.342e+01 5.968e+01 6.692e+01 7.910e+01 1.742e+02, threshold=1.338e+02, percent-clipped=0.0 +2024-07-28 07:41:57,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=127696.0, ans=0.125 +2024-07-28 07:42:05,213 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.79 vs. limit=15.0 +2024-07-28 07:42:11,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=127722.66666666667, ans=0.2 +2024-07-28 07:42:15,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=127722.66666666667, ans=0.125 +2024-07-28 07:42:22,846 INFO [train.py:1114] (0/4) Epoch 10, batch 3800, loss[loss=0.1901, simple_loss=0.2804, pruned_loss=0.04988, over 4812.00 frames. ], tot_loss[loss=0.199, simple_loss=0.285, pruned_loss=0.05647, over 941689.81 frames. ], batch size: 14, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:42:25,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=127736.0, ans=0.015 +2024-07-28 07:42:31,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=127736.0, ans=0.125 +2024-07-28 07:42:38,495 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.38 vs. limit=15.0 +2024-07-28 07:42:41,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=127749.33333333333, ans=0.125 +2024-07-28 07:42:41,728 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.70 vs. limit=22.5 +2024-07-28 07:42:49,021 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.39 vs. limit=6.0 +2024-07-28 07:43:07,377 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.21 vs. limit=15.0 +2024-07-28 07:43:10,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=127789.33333333333, ans=10.0 +2024-07-28 07:43:11,588 INFO [train.py:1114] (0/4) Epoch 10, batch 3850, loss[loss=0.2102, simple_loss=0.3145, pruned_loss=0.05293, over 4883.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2853, pruned_loss=0.05645, over 942617.83 frames. ], batch size: 17, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:43:14,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=127802.66666666667, ans=0.125 +2024-07-28 07:43:17,419 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.67 vs. limit=10.0 +2024-07-28 07:43:18,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=127816.0, ans=0.025 +2024-07-28 07:43:21,993 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.612e+01 5.662e+01 6.521e+01 7.617e+01 1.192e+02, threshold=1.304e+02, percent-clipped=1.0 +2024-07-28 07:43:24,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=127816.0, ans=10.0 +2024-07-28 07:43:28,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=127829.33333333333, ans=0.125 +2024-07-28 07:43:38,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=127842.66666666667, ans=0.5 +2024-07-28 07:43:49,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=127856.0, ans=0.125 +2024-07-28 07:43:52,024 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.66 vs. limit=15.0 +2024-07-28 07:43:52,479 INFO [train.py:1114] (0/4) Epoch 10, batch 3900, loss[loss=0.1992, simple_loss=0.2994, pruned_loss=0.0495, over 4800.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2846, pruned_loss=0.0559, over 942838.48 frames. ], batch size: 14, lr: 7.56e-03, grad_scale: 32.0 +2024-07-28 07:43:55,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127869.33333333333, ans=0.1 +2024-07-28 07:43:55,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=127869.33333333333, ans=0.125 +2024-07-28 07:43:59,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=127882.66666666667, ans=0.2 +2024-07-28 07:44:23,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=127909.33333333333, ans=0.2 +2024-07-28 07:44:25,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=127909.33333333333, ans=0.0 +2024-07-28 07:44:30,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127922.66666666667, ans=0.1 +2024-07-28 07:44:33,688 INFO [train.py:1114] (0/4) Epoch 10, batch 3950, loss[loss=0.1686, simple_loss=0.2457, pruned_loss=0.04572, over 4832.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.284, pruned_loss=0.05567, over 945106.60 frames. ], batch size: 16, lr: 7.56e-03, grad_scale: 16.0 +2024-07-28 07:44:36,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=127936.0, ans=0.0 +2024-07-28 07:44:37,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=127936.0, ans=0.04949747468305833 +2024-07-28 07:44:50,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=127936.0, ans=0.125 +2024-07-28 07:44:57,181 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.715e+01 6.133e+01 6.852e+01 1.045e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 07:45:04,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=127962.66666666667, ans=0.0 +2024-07-28 07:45:15,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127989.33333333333, ans=0.1 +2024-07-28 07:45:19,971 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-96000.pt +2024-07-28 07:45:23,682 INFO [train.py:1114] (0/4) Epoch 10, batch 4000, loss[loss=0.163, simple_loss=0.2573, pruned_loss=0.03438, over 4781.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2844, pruned_loss=0.05573, over 941085.18 frames. ], batch size: 12, lr: 7.56e-03, grad_scale: 32.0 +2024-07-28 07:45:26,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.33 vs. limit=15.0 +2024-07-28 07:45:28,050 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.58 vs. limit=15.0 +2024-07-28 07:45:55,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=128056.0, ans=0.05 +2024-07-28 07:45:56,578 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.45 vs. limit=6.0 +2024-07-28 07:46:07,107 INFO [train.py:1114] (0/4) Epoch 10, batch 4050, loss[loss=0.3011, simple_loss=0.3489, pruned_loss=0.1267, over 3369.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2849, pruned_loss=0.05621, over 939608.55 frames. ], batch size: 35, lr: 7.56e-03, grad_scale: 32.0 +2024-07-28 07:46:08,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=128069.33333333333, ans=0.05 +2024-07-28 07:46:17,338 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.919e+01 6.572e+01 7.473e+01 1.130e+02, threshold=1.314e+02, percent-clipped=0.0 +2024-07-28 07:46:28,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=128109.33333333333, ans=0.5 +2024-07-28 07:46:28,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=128109.33333333333, ans=0.125 +2024-07-28 07:46:41,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=128122.66666666667, ans=0.125 +2024-07-28 07:46:42,596 INFO [train.py:1114] (0/4) Epoch 10, batch 4100, loss[loss=0.2097, simple_loss=0.3285, pruned_loss=0.04543, over 4900.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2865, pruned_loss=0.05695, over 938486.37 frames. ], batch size: 15, lr: 7.56e-03, grad_scale: 32.0 +2024-07-28 07:46:49,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=128149.33333333333, ans=0.125 +2024-07-28 07:46:56,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=128162.66666666667, ans=0.125 +2024-07-28 07:47:04,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=128176.0, ans=0.125 +2024-07-28 07:47:17,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=128202.66666666667, ans=0.0 +2024-07-28 07:47:18,148 INFO [train.py:1114] (0/4) Epoch 10, batch 4150, loss[loss=0.2093, simple_loss=0.2902, pruned_loss=0.0642, over 4830.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2856, pruned_loss=0.05655, over 938329.31 frames. ], batch size: 13, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:47:18,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=128202.66666666667, ans=0.0 +2024-07-28 07:47:28,111 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.249e+01 5.846e+01 6.728e+01 7.607e+01 1.158e+02, threshold=1.346e+02, percent-clipped=0.0 +2024-07-28 07:47:29,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=128216.0, ans=0.0 +2024-07-28 07:47:34,968 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.22 vs. limit=15.0 +2024-07-28 07:47:47,375 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.26 vs. limit=22.5 +2024-07-28 07:47:51,039 INFO [train.py:1114] (0/4) Epoch 10, batch 4200, loss[loss=0.2332, simple_loss=0.3136, pruned_loss=0.07645, over 4900.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2858, pruned_loss=0.05672, over 939728.74 frames. ], batch size: 15, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:48:02,176 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.35 vs. limit=22.5 +2024-07-28 07:48:03,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=128296.0, ans=0.2 +2024-07-28 07:48:05,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=128296.0, ans=0.125 +2024-07-28 07:48:16,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=128322.66666666667, ans=0.07 +2024-07-28 07:48:17,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128322.66666666667, ans=0.1 +2024-07-28 07:48:21,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=128322.66666666667, ans=0.0 +2024-07-28 07:48:23,682 INFO [train.py:1114] (0/4) Epoch 10, batch 4250, loss[loss=0.1992, simple_loss=0.2889, pruned_loss=0.05471, over 4634.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2866, pruned_loss=0.05707, over 940756.33 frames. ], batch size: 12, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:48:33,347 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.588e+01 5.567e+01 6.071e+01 6.705e+01 1.236e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 07:48:35,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=128349.33333333333, ans=0.125 +2024-07-28 07:48:37,206 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.68 vs. limit=6.0 +2024-07-28 07:48:38,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=128362.66666666667, ans=0.125 +2024-07-28 07:48:45,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=128376.0, ans=0.125 +2024-07-28 07:48:49,345 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:48:49,598 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.73 vs. limit=6.0 +2024-07-28 07:48:51,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=128389.33333333333, ans=0.0 +2024-07-28 07:48:57,123 INFO [train.py:1114] (0/4) Epoch 10, batch 4300, loss[loss=0.1882, simple_loss=0.271, pruned_loss=0.0527, over 4765.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2864, pruned_loss=0.05712, over 940097.15 frames. ], batch size: 13, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:49:12,218 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.46 vs. limit=6.0 +2024-07-28 07:49:12,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=128429.33333333333, ans=0.125 +2024-07-28 07:49:14,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128429.33333333333, ans=0.1 +2024-07-28 07:49:25,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=128456.0, ans=0.125 +2024-07-28 07:49:30,482 INFO [train.py:1114] (0/4) Epoch 10, batch 4350, loss[loss=0.2084, simple_loss=0.3012, pruned_loss=0.05778, over 4756.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2869, pruned_loss=0.05686, over 940992.54 frames. ], batch size: 13, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:49:31,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=128469.33333333333, ans=0.0 +2024-07-28 07:49:40,759 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.507e+01 6.201e+01 7.013e+01 1.119e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 07:49:47,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=128496.0, ans=0.125 +2024-07-28 07:49:55,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=128509.33333333333, ans=0.125 +2024-07-28 07:50:04,256 INFO [train.py:1114] (0/4) Epoch 10, batch 4400, loss[loss=0.1982, simple_loss=0.2897, pruned_loss=0.05334, over 4808.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2867, pruned_loss=0.05688, over 940227.53 frames. ], batch size: 14, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:50:08,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=128536.0, ans=0.125 +2024-07-28 07:50:14,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=128549.33333333333, ans=0.125 +2024-07-28 07:50:21,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=128562.66666666667, ans=0.0 +2024-07-28 07:50:30,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=128589.33333333333, ans=0.035 +2024-07-28 07:50:37,980 INFO [train.py:1114] (0/4) Epoch 10, batch 4450, loss[loss=0.1728, simple_loss=0.2589, pruned_loss=0.0433, over 4945.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2864, pruned_loss=0.05682, over 938321.14 frames. ], batch size: 12, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:50:41,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=128602.66666666667, ans=0.125 +2024-07-28 07:50:45,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=128616.0, ans=0.125 +2024-07-28 07:50:47,725 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+01 5.603e+01 6.224e+01 7.010e+01 9.776e+01, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 07:51:12,777 INFO [train.py:1114] (0/4) Epoch 10, batch 4500, loss[loss=0.2014, simple_loss=0.2889, pruned_loss=0.05695, over 4750.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2864, pruned_loss=0.05658, over 937859.86 frames. ], batch size: 14, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:51:14,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=128669.33333333333, ans=0.0 +2024-07-28 07:51:16,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=128669.33333333333, ans=0.0 +2024-07-28 07:51:26,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=128682.66666666667, ans=0.125 +2024-07-28 07:51:27,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=128696.0, ans=0.025 +2024-07-28 07:51:29,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=128696.0, ans=0.025 +2024-07-28 07:51:31,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=128696.0, ans=0.125 +2024-07-28 07:51:38,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=128709.33333333333, ans=0.125 +2024-07-28 07:51:38,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=128709.33333333333, ans=0.0 +2024-07-28 07:51:47,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=128736.0, ans=0.0 +2024-07-28 07:51:47,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=128736.0, ans=0.0 +2024-07-28 07:51:47,648 INFO [train.py:1114] (0/4) Epoch 10, batch 4550, loss[loss=0.1747, simple_loss=0.2619, pruned_loss=0.0438, over 4895.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2862, pruned_loss=0.05617, over 939703.45 frames. ], batch size: 13, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:51:57,765 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.502e+01 5.839e+01 6.410e+01 7.232e+01 1.296e+02, threshold=1.282e+02, percent-clipped=2.0 +2024-07-28 07:52:11,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128776.0, ans=0.1 +2024-07-28 07:52:24,748 INFO [train.py:1114] (0/4) Epoch 10, batch 4600, loss[loss=0.2081, simple_loss=0.3048, pruned_loss=0.05564, over 4527.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2853, pruned_loss=0.05582, over 938043.38 frames. ], batch size: 21, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:52:31,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=128816.0, ans=0.125 +2024-07-28 07:52:32,231 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.81 vs. limit=6.0 +2024-07-28 07:52:34,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=128816.0, ans=0.0 +2024-07-28 07:52:36,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=128816.0, ans=0.2 +2024-07-28 07:52:39,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=128829.33333333333, ans=0.0 +2024-07-28 07:52:40,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=128829.33333333333, ans=0.125 +2024-07-28 07:52:40,619 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.48 vs. limit=12.0 +2024-07-28 07:52:41,965 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.21 vs. limit=22.5 +2024-07-28 07:52:45,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128842.66666666667, ans=0.1 +2024-07-28 07:52:49,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.65 vs. limit=15.0 +2024-07-28 07:52:49,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128842.66666666667, ans=0.1 +2024-07-28 07:52:57,432 INFO [train.py:1114] (0/4) Epoch 10, batch 4650, loss[loss=0.2104, simple_loss=0.2925, pruned_loss=0.06413, over 4851.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2854, pruned_loss=0.05597, over 940087.14 frames. ], batch size: 16, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:52:57,943 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.95 vs. limit=12.0 +2024-07-28 07:53:00,446 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.61 vs. limit=15.0 +2024-07-28 07:53:07,570 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.546e+01 5.595e+01 6.179e+01 7.275e+01 1.134e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 07:53:18,879 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.51 vs. limit=6.0 +2024-07-28 07:53:30,205 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.29 vs. limit=6.0 +2024-07-28 07:53:30,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=128936.0, ans=0.025 +2024-07-28 07:53:31,136 INFO [train.py:1114] (0/4) Epoch 10, batch 4700, loss[loss=0.184, simple_loss=0.2722, pruned_loss=0.04791, over 4709.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.285, pruned_loss=0.05614, over 937192.55 frames. ], batch size: 11, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:53:44,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=128949.33333333333, ans=0.125 +2024-07-28 07:53:51,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128976.0, ans=0.1 +2024-07-28 07:53:51,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=128976.0, ans=0.1 +2024-07-28 07:54:00,619 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.58 vs. limit=10.0 +2024-07-28 07:54:04,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=128989.33333333333, ans=15.0 +2024-07-28 07:54:05,546 INFO [train.py:1114] (0/4) Epoch 10, batch 4750, loss[loss=0.2254, simple_loss=0.3058, pruned_loss=0.07256, over 4462.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2855, pruned_loss=0.057, over 935697.09 frames. ], batch size: 21, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:54:15,564 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.408e+01 5.636e+01 6.177e+01 7.080e+01 9.506e+01, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 07:54:33,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=129056.0, ans=0.125 +2024-07-28 07:54:37,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=129056.0, ans=0.125 +2024-07-28 07:54:39,982 INFO [train.py:1114] (0/4) Epoch 10, batch 4800, loss[loss=0.1876, simple_loss=0.2752, pruned_loss=0.05002, over 4698.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2848, pruned_loss=0.05694, over 932512.48 frames. ], batch size: 13, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:54:44,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=129069.33333333333, ans=10.0 +2024-07-28 07:55:11,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=129122.66666666667, ans=0.0 +2024-07-28 07:55:11,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129122.66666666667, ans=0.1 +2024-07-28 07:55:13,176 INFO [train.py:1114] (0/4) Epoch 10, batch 4850, loss[loss=0.2118, simple_loss=0.3129, pruned_loss=0.05536, over 4750.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2857, pruned_loss=0.05699, over 932639.30 frames. ], batch size: 14, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:55:15,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129136.0, ans=0.1 +2024-07-28 07:55:22,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=129149.33333333333, ans=0.125 +2024-07-28 07:55:23,229 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.594e+01 5.570e+01 6.105e+01 6.787e+01 9.790e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 07:55:24,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=129149.33333333333, ans=0.09899494936611666 +2024-07-28 07:55:27,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=129162.66666666667, ans=0.0 +2024-07-28 07:55:27,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=129162.66666666667, ans=0.125 +2024-07-28 07:55:32,449 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.70 vs. limit=6.0 +2024-07-28 07:55:45,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=129189.33333333333, ans=0.0 +2024-07-28 07:55:46,408 INFO [train.py:1114] (0/4) Epoch 10, batch 4900, loss[loss=0.2125, simple_loss=0.3094, pruned_loss=0.05785, over 4761.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2858, pruned_loss=0.05699, over 934378.32 frames. ], batch size: 13, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:55:50,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=129202.66666666667, ans=0.05 +2024-07-28 07:55:56,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=129216.0, ans=0.125 +2024-07-28 07:55:57,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=129216.0, ans=0.0 +2024-07-28 07:56:02,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=129229.33333333333, ans=0.125 +2024-07-28 07:56:08,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=129242.66666666667, ans=0.125 +2024-07-28 07:56:20,955 INFO [train.py:1114] (0/4) Epoch 10, batch 4950, loss[loss=0.2324, simple_loss=0.3032, pruned_loss=0.08075, over 3366.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2875, pruned_loss=0.05798, over 931663.22 frames. ], batch size: 36, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:56:30,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=129282.66666666667, ans=0.5 +2024-07-28 07:56:33,106 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.746e+01 5.675e+01 6.169e+01 7.226e+01 1.073e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-28 07:56:44,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=129309.33333333333, ans=0.0 +2024-07-28 07:56:46,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=129309.33333333333, ans=0.2 +2024-07-28 07:57:01,489 INFO [train.py:1114] (0/4) Epoch 10, batch 5000, loss[loss=0.1827, simple_loss=0.2733, pruned_loss=0.04608, over 4659.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.287, pruned_loss=0.05732, over 935294.25 frames. ], batch size: 14, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:57:02,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=129336.0, ans=0.125 +2024-07-28 07:57:04,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=129336.0, ans=0.125 +2024-07-28 07:57:04,430 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.04 vs. limit=22.5 +2024-07-28 07:57:07,686 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.81 vs. limit=15.0 +2024-07-28 07:57:13,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=129349.33333333333, ans=0.0 +2024-07-28 07:57:14,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=129349.33333333333, ans=0.125 +2024-07-28 07:57:23,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=129376.0, ans=0.125 +2024-07-28 07:57:24,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=129376.0, ans=0.025 +2024-07-28 07:57:25,839 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:57:37,483 INFO [train.py:1114] (0/4) Epoch 10, batch 5050, loss[loss=0.2021, simple_loss=0.2848, pruned_loss=0.05965, over 4848.00 frames. ], tot_loss[loss=0.2, simple_loss=0.286, pruned_loss=0.05702, over 937664.46 frames. ], batch size: 12, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:57:41,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=129402.66666666667, ans=0.1 +2024-07-28 07:57:41,871 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:57:42,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=129402.66666666667, ans=0.0 +2024-07-28 07:57:42,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.74 vs. limit=6.0 +2024-07-28 07:57:47,630 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.667e+01 5.711e+01 6.360e+01 7.128e+01 1.073e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 07:57:51,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=129416.0, ans=0.125 +2024-07-28 07:57:56,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=129429.33333333333, ans=0.025 +2024-07-28 07:57:59,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=129442.66666666667, ans=0.95 +2024-07-28 07:58:05,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=129442.66666666667, ans=0.2 +2024-07-28 07:58:11,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129456.0, ans=0.1 +2024-07-28 07:58:13,502 INFO [train.py:1114] (0/4) Epoch 10, batch 5100, loss[loss=0.1975, simple_loss=0.2707, pruned_loss=0.06216, over 4778.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2867, pruned_loss=0.0576, over 935304.33 frames. ], batch size: 12, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:58:17,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=129469.33333333333, ans=0.125 +2024-07-28 07:58:19,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=129469.33333333333, ans=0.125 +2024-07-28 07:58:20,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=129482.66666666667, ans=0.0 +2024-07-28 07:58:32,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=129509.33333333333, ans=0.025 +2024-07-28 07:58:39,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=129522.66666666667, ans=0.125 +2024-07-28 07:58:46,344 INFO [train.py:1114] (0/4) Epoch 10, batch 5150, loss[loss=0.1963, simple_loss=0.2663, pruned_loss=0.06311, over 4845.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.288, pruned_loss=0.05823, over 936289.03 frames. ], batch size: 16, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:58:52,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=129549.33333333333, ans=0.125 +2024-07-28 07:58:52,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=129549.33333333333, ans=0.05 +2024-07-28 07:58:56,278 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.874e+01 5.650e+01 6.455e+01 7.114e+01 1.167e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-28 07:59:03,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=129562.66666666667, ans=0.0 +2024-07-28 07:59:08,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=129576.0, ans=0.04949747468305833 +2024-07-28 07:59:19,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=129602.66666666667, ans=10.0 +2024-07-28 07:59:20,214 INFO [train.py:1114] (0/4) Epoch 10, batch 5200, loss[loss=0.1937, simple_loss=0.2813, pruned_loss=0.05301, over 4666.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2873, pruned_loss=0.05775, over 936329.21 frames. ], batch size: 14, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 07:59:24,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=129602.66666666667, ans=0.2 +2024-07-28 07:59:25,872 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.62 vs. limit=6.0 +2024-07-28 07:59:42,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=129642.66666666667, ans=0.025 +2024-07-28 07:59:45,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=129642.66666666667, ans=0.125 +2024-07-28 07:59:53,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=129669.33333333333, ans=0.125 +2024-07-28 07:59:53,601 INFO [train.py:1114] (0/4) Epoch 10, batch 5250, loss[loss=0.1837, simple_loss=0.2658, pruned_loss=0.05084, over 4885.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2854, pruned_loss=0.05648, over 936025.00 frames. ], batch size: 13, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 07:59:55,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129669.33333333333, ans=0.1 +2024-07-28 07:59:59,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=129682.66666666667, ans=0.125 +2024-07-28 08:00:01,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129682.66666666667, ans=0.1 +2024-07-28 08:00:03,736 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.916e+01 5.858e+01 6.971e+01 8.204e+01 1.196e+02, threshold=1.394e+02, percent-clipped=0.0 +2024-07-28 08:00:27,513 INFO [train.py:1114] (0/4) Epoch 10, batch 5300, loss[loss=0.2424, simple_loss=0.3176, pruned_loss=0.0836, over 4611.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2863, pruned_loss=0.05732, over 934290.16 frames. ], batch size: 16, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 08:00:28,119 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.31 vs. limit=15.0 +2024-07-28 08:00:34,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=129749.33333333333, ans=0.1 +2024-07-28 08:00:47,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=129776.0, ans=0.125 +2024-07-28 08:00:49,900 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:00:56,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=129789.33333333333, ans=0.0 +2024-07-28 08:00:58,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=129789.33333333333, ans=0.125 +2024-07-28 08:00:59,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=129789.33333333333, ans=0.025 +2024-07-28 08:01:00,900 INFO [train.py:1114] (0/4) Epoch 10, batch 5350, loss[loss=0.182, simple_loss=0.2632, pruned_loss=0.05037, over 4515.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2881, pruned_loss=0.05768, over 936316.84 frames. ], batch size: 10, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 08:01:02,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=129802.66666666667, ans=0.0 +2024-07-28 08:01:05,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=129802.66666666667, ans=0.125 +2024-07-28 08:01:06,064 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.14 vs. limit=15.0 +2024-07-28 08:01:11,098 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.715e+01 5.483e+01 5.986e+01 6.738e+01 1.016e+02, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 08:01:19,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=129829.33333333333, ans=0.125 +2024-07-28 08:01:30,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=129856.0, ans=0.015 +2024-07-28 08:01:35,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=129856.0, ans=0.0 +2024-07-28 08:01:36,312 INFO [train.py:1114] (0/4) Epoch 10, batch 5400, loss[loss=0.2279, simple_loss=0.3054, pruned_loss=0.07521, over 4385.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2877, pruned_loss=0.05793, over 930479.98 frames. ], batch size: 26, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 08:01:38,016 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.44 vs. limit=15.0 +2024-07-28 08:01:41,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=129869.33333333333, ans=0.125 +2024-07-28 08:01:46,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=129882.66666666667, ans=0.0 +2024-07-28 08:01:57,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=129896.0, ans=0.1 +2024-07-28 08:02:01,079 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:02:01,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=129909.33333333333, ans=0.125 +2024-07-28 08:02:02,030 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.07 vs. limit=6.0 +2024-07-28 08:02:08,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129922.66666666667, ans=0.1 +2024-07-28 08:02:10,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=129936.0, ans=0.125 +2024-07-28 08:02:12,884 INFO [train.py:1114] (0/4) Epoch 10, batch 5450, loss[loss=0.1624, simple_loss=0.2439, pruned_loss=0.04048, over 4702.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2873, pruned_loss=0.05772, over 933459.51 frames. ], batch size: 11, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:02:13,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=129936.0, ans=0.0 +2024-07-28 08:02:17,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=129936.0, ans=15.0 +2024-07-28 08:02:19,696 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-28 08:02:24,705 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.820e+01 5.695e+01 6.364e+01 7.750e+01 1.165e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 08:02:33,924 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.38 vs. limit=22.5 +2024-07-28 08:02:41,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=129989.33333333333, ans=0.0 +2024-07-28 08:02:41,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129989.33333333333, ans=0.1 +2024-07-28 08:02:48,474 INFO [train.py:1114] (0/4) Epoch 10, batch 5500, loss[loss=0.242, simple_loss=0.3128, pruned_loss=0.08561, over 4275.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2868, pruned_loss=0.05787, over 930817.60 frames. ], batch size: 25, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:02:53,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=130002.66666666667, ans=0.125 +2024-07-28 08:02:57,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=130016.0, ans=0.125 +2024-07-28 08:02:59,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=130016.0, ans=0.125 +2024-07-28 08:03:05,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=130029.33333333333, ans=0.1 +2024-07-28 08:03:22,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=130056.0, ans=0.2 +2024-07-28 08:03:26,107 INFO [train.py:1114] (0/4) Epoch 10, batch 5550, loss[loss=0.1647, simple_loss=0.2549, pruned_loss=0.03725, over 4710.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2859, pruned_loss=0.0576, over 933122.98 frames. ], batch size: 12, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:03:26,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=130069.33333333333, ans=0.125 +2024-07-28 08:03:35,926 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 5.857e+01 6.242e+01 7.417e+01 1.070e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 08:03:38,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=130082.66666666667, ans=0.1 +2024-07-28 08:03:44,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=130096.0, ans=0.95 +2024-07-28 08:03:45,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=130109.33333333333, ans=0.025 +2024-07-28 08:03:51,549 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.10 vs. limit=22.5 +2024-07-28 08:03:59,470 INFO [train.py:1114] (0/4) Epoch 10, batch 5600, loss[loss=0.2034, simple_loss=0.2848, pruned_loss=0.06102, over 4738.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.286, pruned_loss=0.05746, over 934302.21 frames. ], batch size: 14, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:04:01,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=130136.0, ans=0.2 +2024-07-28 08:04:01,847 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.15 vs. limit=22.5 +2024-07-28 08:04:03,225 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.88 vs. limit=10.0 +2024-07-28 08:04:21,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=130162.66666666667, ans=0.0 +2024-07-28 08:04:24,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=130162.66666666667, ans=0.0 +2024-07-28 08:04:32,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=130176.0, ans=0.0 +2024-07-28 08:04:36,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=130189.33333333333, ans=0.07 +2024-07-28 08:04:40,141 INFO [train.py:1114] (0/4) Epoch 10, batch 5650, loss[loss=0.2082, simple_loss=0.2996, pruned_loss=0.05841, over 4512.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2852, pruned_loss=0.05658, over 936870.41 frames. ], batch size: 21, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:04:41,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=130202.66666666667, ans=0.07 +2024-07-28 08:04:48,866 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.21 vs. limit=22.5 +2024-07-28 08:04:49,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=130216.0, ans=0.0 +2024-07-28 08:04:50,450 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.652e+01 5.620e+01 6.091e+01 7.074e+01 1.306e+02, threshold=1.218e+02, percent-clipped=1.0 +2024-07-28 08:04:53,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=130229.33333333333, ans=0.07 +2024-07-28 08:05:00,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130242.66666666667, ans=0.1 +2024-07-28 08:05:09,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=130256.0, ans=0.015 +2024-07-28 08:05:11,891 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:05:13,711 INFO [train.py:1114] (0/4) Epoch 10, batch 5700, loss[loss=0.2276, simple_loss=0.3106, pruned_loss=0.07234, over 4697.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2862, pruned_loss=0.05712, over 938057.77 frames. ], batch size: 13, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:05:13,975 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.82 vs. limit=15.0 +2024-07-28 08:05:24,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=130282.66666666667, ans=0.0 +2024-07-28 08:05:27,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=130282.66666666667, ans=0.125 +2024-07-28 08:05:44,120 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:05:50,250 INFO [train.py:1114] (0/4) Epoch 10, batch 5750, loss[loss=0.2629, simple_loss=0.3382, pruned_loss=0.0938, over 4731.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2872, pruned_loss=0.05782, over 937906.46 frames. ], batch size: 19, lr: 7.49e-03, grad_scale: 32.0 +2024-07-28 08:05:59,891 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.13 vs. limit=15.0 +2024-07-28 08:06:00,018 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.227e+01 5.735e+01 6.185e+01 6.687e+01 9.991e+01, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 08:06:15,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=130376.0, ans=0.125 +2024-07-28 08:06:19,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=130389.33333333333, ans=0.0 +2024-07-28 08:06:24,191 INFO [train.py:1114] (0/4) Epoch 10, batch 5800, loss[loss=0.2461, simple_loss=0.3309, pruned_loss=0.08071, over 4750.00 frames. ], tot_loss[loss=0.202, simple_loss=0.288, pruned_loss=0.05805, over 937100.14 frames. ], batch size: 19, lr: 7.49e-03, grad_scale: 32.0 +2024-07-28 08:06:35,874 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.57 vs. limit=10.0 +2024-07-28 08:06:46,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=130442.66666666667, ans=0.125 +2024-07-28 08:06:47,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130442.66666666667, ans=0.1 +2024-07-28 08:06:47,478 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.17 vs. limit=8.0 +2024-07-28 08:06:49,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=130442.66666666667, ans=0.035 +2024-07-28 08:06:59,058 INFO [train.py:1114] (0/4) Epoch 10, batch 5850, loss[loss=0.2551, simple_loss=0.3319, pruned_loss=0.08909, over 4557.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.288, pruned_loss=0.05824, over 937581.72 frames. ], batch size: 21, lr: 7.49e-03, grad_scale: 32.0 +2024-07-28 08:07:05,436 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.72 vs. limit=22.5 +2024-07-28 08:07:09,044 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.958e+01 5.762e+01 6.655e+01 7.927e+01 1.283e+02, threshold=1.331e+02, percent-clipped=2.0 +2024-07-28 08:07:11,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=130482.66666666667, ans=0.125 +2024-07-28 08:07:14,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=130496.0, ans=0.0 +2024-07-28 08:07:20,937 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.72 vs. limit=15.0 +2024-07-28 08:07:22,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=130509.33333333333, ans=0.125 +2024-07-28 08:07:26,432 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:07:30,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=130522.66666666667, ans=0.0 +2024-07-28 08:07:34,310 INFO [train.py:1114] (0/4) Epoch 10, batch 5900, loss[loss=0.2444, simple_loss=0.3037, pruned_loss=0.09251, over 4691.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2875, pruned_loss=0.05806, over 937476.98 frames. ], batch size: 15, lr: 7.49e-03, grad_scale: 32.0 +2024-07-28 08:07:39,366 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.78 vs. limit=15.0 +2024-07-28 08:07:39,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=130536.0, ans=0.1 +2024-07-28 08:07:42,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=130549.33333333333, ans=0.125 +2024-07-28 08:07:48,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=130562.66666666667, ans=0.125 +2024-07-28 08:07:56,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=130576.0, ans=0.125 +2024-07-28 08:07:57,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=130576.0, ans=0.125 +2024-07-28 08:08:00,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=130576.0, ans=0.0 +2024-07-28 08:08:02,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=130589.33333333333, ans=0.2 +2024-07-28 08:08:10,392 INFO [train.py:1114] (0/4) Epoch 10, batch 5950, loss[loss=0.1988, simple_loss=0.2928, pruned_loss=0.05241, over 4677.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2871, pruned_loss=0.05779, over 939973.46 frames. ], batch size: 15, lr: 7.49e-03, grad_scale: 64.0 +2024-07-28 08:08:14,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=130602.66666666667, ans=0.0 +2024-07-28 08:08:20,239 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.913e+01 5.602e+01 6.081e+01 6.794e+01 9.729e+01, threshold=1.216e+02, percent-clipped=0.0 +2024-07-28 08:08:45,953 INFO [train.py:1114] (0/4) Epoch 10, batch 6000, loss[loss=0.223, simple_loss=0.3098, pruned_loss=0.06807, over 4187.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2879, pruned_loss=0.05816, over 937607.74 frames. ], batch size: 25, lr: 7.48e-03, grad_scale: 64.0 +2024-07-28 08:08:45,954 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 08:08:51,642 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.9263, 3.6033, 3.2719, 3.6407], device='cuda:0') +2024-07-28 08:08:58,310 INFO [train.py:1146] (0/4) Epoch 10, validation: loss=0.1713, simple_loss=0.2758, pruned_loss=0.03335, over 944034.00 frames. +2024-07-28 08:08:58,311 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 08:09:02,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=130669.33333333333, ans=0.0 +2024-07-28 08:09:22,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=130709.33333333333, ans=0.125 +2024-07-28 08:09:26,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=130722.66666666667, ans=0.0 +2024-07-28 08:09:32,066 INFO [train.py:1114] (0/4) Epoch 10, batch 6050, loss[loss=0.175, simple_loss=0.2585, pruned_loss=0.04576, over 4781.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2874, pruned_loss=0.05788, over 938788.87 frames. ], batch size: 12, lr: 7.48e-03, grad_scale: 32.0 +2024-07-28 08:09:32,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=130736.0, ans=0.125 +2024-07-28 08:09:42,578 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 5.819e+01 6.565e+01 7.638e+01 1.917e+02, threshold=1.313e+02, percent-clipped=1.0 +2024-07-28 08:09:52,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=130776.0, ans=0.1 +2024-07-28 08:09:57,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=130776.0, ans=0.1 +2024-07-28 08:10:04,910 INFO [train.py:1114] (0/4) Epoch 10, batch 6100, loss[loss=0.2167, simple_loss=0.3004, pruned_loss=0.06643, over 4692.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2871, pruned_loss=0.05781, over 938401.50 frames. ], batch size: 15, lr: 7.48e-03, grad_scale: 32.0 +2024-07-28 08:10:11,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=130816.0, ans=0.2 +2024-07-28 08:10:12,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=130816.0, ans=0.0 +2024-07-28 08:10:26,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=130842.66666666667, ans=0.0 +2024-07-28 08:10:38,548 INFO [train.py:1114] (0/4) Epoch 10, batch 6150, loss[loss=0.2634, simple_loss=0.3285, pruned_loss=0.09912, over 3516.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2871, pruned_loss=0.05786, over 936823.06 frames. ], batch size: 35, lr: 7.48e-03, grad_scale: 32.0 +2024-07-28 08:10:45,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=130882.66666666667, ans=0.0 +2024-07-28 08:10:49,586 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+01 5.804e+01 6.352e+01 7.086e+01 1.134e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 08:10:57,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=130896.0, ans=0.125 +2024-07-28 08:11:12,362 INFO [train.py:1114] (0/4) Epoch 10, batch 6200, loss[loss=0.1983, simple_loss=0.2907, pruned_loss=0.05299, over 4745.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2871, pruned_loss=0.05778, over 936603.33 frames. ], batch size: 14, lr: 7.48e-03, grad_scale: 32.0 +2024-07-28 08:11:19,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=130949.33333333333, ans=0.125 +2024-07-28 08:11:24,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=130949.33333333333, ans=0.125 +2024-07-28 08:11:25,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=130962.66666666667, ans=0.0 +2024-07-28 08:11:32,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=130976.0, ans=0.025 +2024-07-28 08:11:36,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=130976.0, ans=0.95 +2024-07-28 08:11:41,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=130989.33333333333, ans=0.125 +2024-07-28 08:11:45,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=130989.33333333333, ans=0.2 +2024-07-28 08:11:46,549 INFO [train.py:1114] (0/4) Epoch 10, batch 6250, loss[loss=0.2327, simple_loss=0.3073, pruned_loss=0.07902, over 4809.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2877, pruned_loss=0.05806, over 933045.27 frames. ], batch size: 14, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:11:47,562 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.16 vs. limit=22.5 +2024-07-28 08:11:48,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=131002.66666666667, ans=0.0 +2024-07-28 08:11:57,347 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.378e+01 5.979e+01 6.836e+01 8.576e+01 1.211e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-28 08:12:05,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=131029.33333333333, ans=0.2 +2024-07-28 08:12:05,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.62 vs. limit=15.0 +2024-07-28 08:12:07,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131042.66666666667, ans=0.1 +2024-07-28 08:12:15,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=131056.0, ans=0.125 +2024-07-28 08:12:19,847 INFO [train.py:1114] (0/4) Epoch 10, batch 6300, loss[loss=0.1741, simple_loss=0.2516, pruned_loss=0.04824, over 4519.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2877, pruned_loss=0.05828, over 929345.98 frames. ], batch size: 10, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:12:33,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=131082.66666666666, ans=0.125 +2024-07-28 08:12:35,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_na.min_abs, batch_count=131096.0, ans=0.02 +2024-07-28 08:12:36,297 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.08 vs. limit=15.0 +2024-07-28 08:12:37,905 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:12:54,184 INFO [train.py:1114] (0/4) Epoch 10, batch 6350, loss[loss=0.1903, simple_loss=0.2873, pruned_loss=0.04672, over 4566.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2872, pruned_loss=0.05754, over 933365.63 frames. ], batch size: 21, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:12:57,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=131136.0, ans=0.0 +2024-07-28 08:13:07,204 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.906e+01 5.570e+01 6.150e+01 7.348e+01 9.033e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 08:13:07,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=131149.33333333334, ans=0.125 +2024-07-28 08:13:08,973 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.56 vs. limit=22.5 +2024-07-28 08:13:16,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=131176.0, ans=0.0 +2024-07-28 08:13:23,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131189.33333333334, ans=0.1 +2024-07-28 08:13:29,595 INFO [train.py:1114] (0/4) Epoch 10, batch 6400, loss[loss=0.218, simple_loss=0.3072, pruned_loss=0.06444, over 4634.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2866, pruned_loss=0.05754, over 934658.89 frames. ], batch size: 13, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:13:35,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=131202.66666666666, ans=0.0 +2024-07-28 08:13:36,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=131202.66666666666, ans=0.125 +2024-07-28 08:13:40,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=131216.0, ans=0.1 +2024-07-28 08:13:49,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131229.33333333334, ans=0.1 +2024-07-28 08:14:06,251 INFO [train.py:1114] (0/4) Epoch 10, batch 6450, loss[loss=0.2241, simple_loss=0.3075, pruned_loss=0.07039, over 4499.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2863, pruned_loss=0.05662, over 938583.59 frames. ], batch size: 21, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:14:16,718 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.797e+01 5.892e+01 6.683e+01 7.805e+01 1.062e+02, threshold=1.337e+02, percent-clipped=0.0 +2024-07-28 08:14:16,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=131282.66666666666, ans=0.125 +2024-07-28 08:14:17,733 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.63 vs. limit=6.0 +2024-07-28 08:14:20,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=131296.0, ans=0.2 +2024-07-28 08:14:21,013 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.64 vs. limit=15.0 +2024-07-28 08:14:38,914 INFO [train.py:1114] (0/4) Epoch 10, batch 6500, loss[loss=0.2805, simple_loss=0.3467, pruned_loss=0.1072, over 3316.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2858, pruned_loss=0.0567, over 939875.93 frames. ], batch size: 35, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:14:41,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=131336.0, ans=0.125 +2024-07-28 08:14:44,726 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.33 vs. limit=6.0 +2024-07-28 08:14:45,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=131349.33333333334, ans=0.125 +2024-07-28 08:14:46,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131349.33333333334, ans=0.1 +2024-07-28 08:15:11,954 INFO [train.py:1114] (0/4) Epoch 10, batch 6550, loss[loss=0.1905, simple_loss=0.2724, pruned_loss=0.05432, over 4792.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2851, pruned_loss=0.05579, over 942692.49 frames. ], batch size: 11, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:15:13,054 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.53 vs. limit=15.0 +2024-07-28 08:15:14,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=131402.66666666666, ans=0.0 +2024-07-28 08:15:32,287 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.408e+01 5.455e+01 5.898e+01 6.813e+01 1.235e+02, threshold=1.180e+02, percent-clipped=0.0 +2024-07-28 08:15:32,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=131416.0, ans=0.125 +2024-07-28 08:15:43,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=131429.33333333334, ans=0.0 +2024-07-28 08:15:46,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=131442.66666666666, ans=0.025 +2024-07-28 08:15:49,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=131442.66666666666, ans=0.0 +2024-07-28 08:15:52,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=131442.66666666666, ans=0.125 +2024-07-28 08:15:57,212 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.34 vs. limit=15.0 +2024-07-28 08:16:02,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=131469.33333333334, ans=0.04949747468305833 +2024-07-28 08:16:03,318 INFO [train.py:1114] (0/4) Epoch 10, batch 6600, loss[loss=0.1814, simple_loss=0.2779, pruned_loss=0.04242, over 4940.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2847, pruned_loss=0.0556, over 944673.31 frames. ], batch size: 14, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:16:08,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=131469.33333333334, ans=0.025 +2024-07-28 08:16:08,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=131469.33333333334, ans=0.125 +2024-07-28 08:16:25,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131509.33333333334, ans=0.1 +2024-07-28 08:16:30,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=131522.66666666666, ans=0.125 +2024-07-28 08:16:37,437 INFO [train.py:1114] (0/4) Epoch 10, batch 6650, loss[loss=0.1912, simple_loss=0.2802, pruned_loss=0.05112, over 4632.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2845, pruned_loss=0.05575, over 943379.66 frames. ], batch size: 17, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:16:45,865 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:16:48,316 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.943e+01 5.716e+01 6.391e+01 7.041e+01 1.048e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-28 08:16:53,001 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.35 vs. limit=15.0 +2024-07-28 08:17:00,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131576.0, ans=0.1 +2024-07-28 08:17:02,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=131576.0, ans=0.125 +2024-07-28 08:17:08,371 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.52 vs. limit=22.5 +2024-07-28 08:17:11,432 INFO [train.py:1114] (0/4) Epoch 10, batch 6700, loss[loss=0.1997, simple_loss=0.3002, pruned_loss=0.04957, over 4701.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2851, pruned_loss=0.0562, over 942040.23 frames. ], batch size: 19, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:17:14,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=131602.66666666666, ans=0.125 +2024-07-28 08:17:34,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=131629.33333333334, ans=0.125 +2024-07-28 08:17:34,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=131629.33333333334, ans=0.0 +2024-07-28 08:17:51,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=131656.0, ans=0.95 +2024-07-28 08:17:54,051 INFO [train.py:1114] (0/4) Epoch 10, batch 6750, loss[loss=0.2058, simple_loss=0.2953, pruned_loss=0.05819, over 4324.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2856, pruned_loss=0.05628, over 940451.41 frames. ], batch size: 26, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:18:04,621 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.612e+01 5.726e+01 6.534e+01 7.091e+01 1.095e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 08:18:13,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=131709.33333333334, ans=0.1 +2024-07-28 08:18:14,895 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.97 vs. limit=15.0 +2024-07-28 08:18:20,658 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.75 vs. limit=22.5 +2024-07-28 08:18:22,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.72 vs. limit=15.0 +2024-07-28 08:18:29,031 INFO [train.py:1114] (0/4) Epoch 10, batch 6800, loss[loss=0.2043, simple_loss=0.3014, pruned_loss=0.05364, over 4631.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2852, pruned_loss=0.05603, over 938807.92 frames. ], batch size: 13, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:18:32,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=131736.0, ans=0.125 +2024-07-28 08:18:38,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=131749.33333333334, ans=0.0 +2024-07-28 08:18:40,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=131749.33333333334, ans=0.125 +2024-07-28 08:18:51,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=131776.0, ans=0.0 +2024-07-28 08:19:03,573 INFO [train.py:1114] (0/4) Epoch 10, batch 6850, loss[loss=0.176, simple_loss=0.2709, pruned_loss=0.04057, over 4692.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2838, pruned_loss=0.05569, over 940540.30 frames. ], batch size: 13, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:19:08,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=131802.66666666666, ans=0.125 +2024-07-28 08:19:13,970 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.625e+01 5.736e+01 6.428e+01 7.691e+01 1.005e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 08:19:17,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=131829.33333333334, ans=0.125 +2024-07-28 08:19:18,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=131829.33333333334, ans=0.125 +2024-07-28 08:19:18,119 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:19:29,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=131842.66666666666, ans=0.04949747468305833 +2024-07-28 08:19:29,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=131842.66666666666, ans=0.2 +2024-07-28 08:19:31,129 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:19:38,322 INFO [train.py:1114] (0/4) Epoch 10, batch 6900, loss[loss=0.1862, simple_loss=0.2807, pruned_loss=0.04581, over 4967.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2856, pruned_loss=0.0563, over 942971.68 frames. ], batch size: 13, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:19:40,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131869.33333333334, ans=0.1 +2024-07-28 08:19:45,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=131882.66666666666, ans=0.025 +2024-07-28 08:19:46,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=131882.66666666666, ans=0.125 +2024-07-28 08:19:47,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=131882.66666666666, ans=0.07 +2024-07-28 08:19:52,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=131882.66666666666, ans=0.025 +2024-07-28 08:19:59,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=131909.33333333334, ans=0.2 +2024-07-28 08:20:07,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=131922.66666666666, ans=0.125 +2024-07-28 08:20:13,659 INFO [train.py:1114] (0/4) Epoch 10, batch 6950, loss[loss=0.1557, simple_loss=0.2382, pruned_loss=0.03659, over 4504.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2852, pruned_loss=0.05628, over 940008.79 frames. ], batch size: 10, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:20:15,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=131936.0, ans=0.2 +2024-07-28 08:20:21,218 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.63 vs. limit=22.5 +2024-07-28 08:20:24,310 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.214e+01 5.671e+01 6.271e+01 7.214e+01 1.195e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 08:20:24,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=131949.33333333334, ans=0.0 +2024-07-28 08:20:31,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=131962.66666666666, ans=0.125 +2024-07-28 08:20:33,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131976.0, ans=0.1 +2024-07-28 08:20:38,648 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.16 vs. limit=15.0 +2024-07-28 08:20:46,962 INFO [train.py:1114] (0/4) Epoch 10, batch 7000, loss[loss=0.2622, simple_loss=0.3428, pruned_loss=0.09084, over 4617.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2847, pruned_loss=0.05616, over 938500.10 frames. ], batch size: 17, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:20:54,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=132016.0, ans=0.125 +2024-07-28 08:21:00,856 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.76 vs. limit=15.0 +2024-07-28 08:21:04,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=132029.33333333334, ans=0.0 +2024-07-28 08:21:19,393 INFO [train.py:1114] (0/4) Epoch 10, batch 7050, loss[loss=0.1822, simple_loss=0.2725, pruned_loss=0.04595, over 4702.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2861, pruned_loss=0.05667, over 941855.81 frames. ], batch size: 19, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:21:22,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=132069.33333333334, ans=0.125 +2024-07-28 08:21:30,143 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.631e+01 5.787e+01 6.450e+01 7.707e+01 1.222e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-28 08:21:30,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=132082.66666666666, ans=0.125 +2024-07-28 08:21:33,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=132096.0, ans=0.1 +2024-07-28 08:21:34,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=132096.0, ans=0.025 +2024-07-28 08:21:42,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=132109.33333333334, ans=0.0 +2024-07-28 08:21:48,005 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.02 vs. limit=15.0 +2024-07-28 08:21:52,628 INFO [train.py:1114] (0/4) Epoch 10, batch 7100, loss[loss=0.1869, simple_loss=0.2863, pruned_loss=0.04369, over 4792.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2884, pruned_loss=0.05796, over 936483.16 frames. ], batch size: 15, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:21:54,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=132136.0, ans=0.04949747468305833 +2024-07-28 08:22:10,016 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.46 vs. limit=15.0 +2024-07-28 08:22:25,472 INFO [train.py:1114] (0/4) Epoch 10, batch 7150, loss[loss=0.2456, simple_loss=0.3305, pruned_loss=0.08038, over 4531.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2859, pruned_loss=0.05691, over 938123.85 frames. ], batch size: 21, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:22:35,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=132216.0, ans=0.125 +2024-07-28 08:22:35,936 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+01 5.833e+01 6.423e+01 7.127e+01 1.033e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 08:22:46,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132242.66666666666, ans=0.1 +2024-07-28 08:22:48,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=132242.66666666666, ans=0.125 +2024-07-28 08:22:53,689 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:22:58,298 INFO [train.py:1114] (0/4) Epoch 10, batch 7200, loss[loss=0.2132, simple_loss=0.3158, pruned_loss=0.05533, over 4785.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2875, pruned_loss=0.05718, over 938055.90 frames. ], batch size: 15, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:23:14,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=132282.66666666666, ans=10.0 +2024-07-28 08:23:15,470 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:23:17,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=132296.0, ans=0.125 +2024-07-28 08:23:18,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=132296.0, ans=0.2 +2024-07-28 08:23:26,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=132296.0, ans=0.0 +2024-07-28 08:23:42,261 INFO [train.py:1114] (0/4) Epoch 10, batch 7250, loss[loss=0.1478, simple_loss=0.2308, pruned_loss=0.03236, over 4861.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2859, pruned_loss=0.05685, over 939604.32 frames. ], batch size: 12, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:23:45,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=132336.0, ans=0.125 +2024-07-28 08:23:52,709 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.426e+01 5.603e+01 6.257e+01 7.383e+01 1.105e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 08:24:01,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=132376.0, ans=0.04949747468305833 +2024-07-28 08:24:08,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=132389.33333333334, ans=0.125 +2024-07-28 08:24:14,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=132389.33333333334, ans=0.125 +2024-07-28 08:24:15,231 INFO [train.py:1114] (0/4) Epoch 10, batch 7300, loss[loss=0.1945, simple_loss=0.2725, pruned_loss=0.05824, over 4844.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2866, pruned_loss=0.05702, over 939889.82 frames. ], batch size: 12, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:24:21,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=132416.0, ans=0.95 +2024-07-28 08:24:36,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=132442.66666666666, ans=0.125 +2024-07-28 08:24:37,676 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.89 vs. limit=22.5 +2024-07-28 08:24:41,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=132456.0, ans=0.0 +2024-07-28 08:24:45,804 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:24:48,318 INFO [train.py:1114] (0/4) Epoch 10, batch 7350, loss[loss=0.192, simple_loss=0.2719, pruned_loss=0.05607, over 4643.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2867, pruned_loss=0.05697, over 939374.29 frames. ], batch size: 12, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:24:51,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=132469.33333333334, ans=0.125 +2024-07-28 08:24:54,135 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:24:58,623 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.713e+01 5.561e+01 5.989e+01 6.823e+01 9.799e+01, threshold=1.198e+02, percent-clipped=0.0 +2024-07-28 08:25:02,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=132496.0, ans=0.125 +2024-07-28 08:25:09,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132509.33333333334, ans=0.1 +2024-07-28 08:25:18,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=132522.66666666666, ans=0.125 +2024-07-28 08:25:20,771 INFO [train.py:1114] (0/4) Epoch 10, batch 7400, loss[loss=0.1846, simple_loss=0.2773, pruned_loss=0.04591, over 4689.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.287, pruned_loss=0.0571, over 940644.87 frames. ], batch size: 13, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:25:26,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=132549.33333333334, ans=0.125 +2024-07-28 08:25:41,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=132576.0, ans=10.0 +2024-07-28 08:25:43,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=132576.0, ans=0.125 +2024-07-28 08:25:43,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=132576.0, ans=0.0 +2024-07-28 08:25:53,302 INFO [train.py:1114] (0/4) Epoch 10, batch 7450, loss[loss=0.1588, simple_loss=0.2428, pruned_loss=0.03736, over 4606.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2866, pruned_loss=0.05733, over 938212.68 frames. ], batch size: 11, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:26:02,668 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=7.67 vs. limit=12.0 +2024-07-28 08:26:04,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=132616.0, ans=0.0 +2024-07-28 08:26:05,095 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.415e+01 5.617e+01 6.160e+01 7.093e+01 9.986e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 08:26:10,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=132629.33333333334, ans=0.125 +2024-07-28 08:26:11,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=132629.33333333334, ans=0.2 +2024-07-28 08:26:26,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=132656.0, ans=0.0 +2024-07-28 08:26:27,359 INFO [train.py:1114] (0/4) Epoch 10, batch 7500, loss[loss=0.2732, simple_loss=0.3316, pruned_loss=0.1074, over 3207.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2884, pruned_loss=0.05821, over 935802.57 frames. ], batch size: 36, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:26:35,788 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.97 vs. limit=15.0 +2024-07-28 08:26:46,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=132696.0, ans=0.125 +2024-07-28 08:26:48,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=132709.33333333334, ans=0.025 +2024-07-28 08:26:50,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=132709.33333333334, ans=0.125 +2024-07-28 08:26:58,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=132722.66666666666, ans=0.125 +2024-07-28 08:27:00,794 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.12 vs. limit=15.0 +2024-07-28 08:27:00,955 INFO [train.py:1114] (0/4) Epoch 10, batch 7550, loss[loss=0.2242, simple_loss=0.3157, pruned_loss=0.06635, over 4621.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.29, pruned_loss=0.05875, over 935906.97 frames. ], batch size: 17, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:27:11,155 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.785e+01 5.843e+01 6.500e+01 7.580e+01 1.303e+02, threshold=1.300e+02, percent-clipped=2.0 +2024-07-28 08:27:11,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=132749.33333333334, ans=0.125 +2024-07-28 08:27:13,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=132762.66666666666, ans=0.125 +2024-07-28 08:27:13,644 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.49 vs. limit=6.0 +2024-07-28 08:27:20,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=132776.0, ans=0.125 +2024-07-28 08:27:45,626 INFO [train.py:1114] (0/4) Epoch 10, batch 7600, loss[loss=0.1916, simple_loss=0.2964, pruned_loss=0.04344, over 4799.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2885, pruned_loss=0.05773, over 937827.62 frames. ], batch size: 14, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:27:52,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=132816.0, ans=15.0 +2024-07-28 08:28:16,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=132856.0, ans=0.0 +2024-07-28 08:28:19,617 INFO [train.py:1114] (0/4) Epoch 10, batch 7650, loss[loss=0.1835, simple_loss=0.2676, pruned_loss=0.04974, over 4935.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2871, pruned_loss=0.05719, over 937158.22 frames. ], batch size: 12, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:28:27,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132882.66666666666, ans=0.1 +2024-07-28 08:28:29,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132882.66666666666, ans=0.1 +2024-07-28 08:28:30,224 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.708e+01 5.574e+01 6.113e+01 7.353e+01 1.031e+02, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 08:28:31,808 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.27 vs. limit=22.5 +2024-07-28 08:28:32,482 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.36 vs. limit=10.0 +2024-07-28 08:28:33,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=132896.0, ans=0.0 +2024-07-28 08:28:40,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=132909.33333333334, ans=0.035 +2024-07-28 08:28:42,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=132909.33333333334, ans=0.0 +2024-07-28 08:28:58,489 INFO [train.py:1114] (0/4) Epoch 10, batch 7700, loss[loss=0.1906, simple_loss=0.2845, pruned_loss=0.04833, over 4696.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2868, pruned_loss=0.05693, over 934665.67 frames. ], batch size: 13, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:29:01,989 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.49 vs. limit=12.0 +2024-07-28 08:29:04,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=132949.33333333334, ans=0.2 +2024-07-28 08:29:08,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=132949.33333333334, ans=0.2 +2024-07-28 08:29:14,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=132949.33333333334, ans=0.1 +2024-07-28 08:29:32,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=132989.33333333334, ans=0.125 +2024-07-28 08:29:35,843 INFO [train.py:1114] (0/4) Epoch 10, batch 7750, loss[loss=0.1815, simple_loss=0.2733, pruned_loss=0.04482, over 4935.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2883, pruned_loss=0.05732, over 935799.12 frames. ], batch size: 14, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:29:39,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=133002.66666666666, ans=0.0 +2024-07-28 08:29:46,087 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 5.644e+01 6.328e+01 7.366e+01 9.654e+01, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 08:29:48,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=133029.33333333334, ans=0.2 +2024-07-28 08:29:50,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=133029.33333333334, ans=0.125 +2024-07-28 08:29:58,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=133042.66666666666, ans=0.125 +2024-07-28 08:29:59,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=133042.66666666666, ans=0.125 +2024-07-28 08:30:02,414 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.39 vs. limit=22.5 +2024-07-28 08:30:03,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=133056.0, ans=0.125 +2024-07-28 08:30:08,493 INFO [train.py:1114] (0/4) Epoch 10, batch 7800, loss[loss=0.2164, simple_loss=0.297, pruned_loss=0.06792, over 4661.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2871, pruned_loss=0.05707, over 937268.71 frames. ], batch size: 14, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:30:09,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=133069.33333333334, ans=0.0 +2024-07-28 08:30:18,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=133082.66666666666, ans=0.0 +2024-07-28 08:30:21,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=133096.0, ans=0.0 +2024-07-28 08:30:33,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=133109.33333333334, ans=0.0 +2024-07-28 08:30:46,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=133122.66666666666, ans=0.125 +2024-07-28 08:30:51,399 INFO [train.py:1114] (0/4) Epoch 10, batch 7850, loss[loss=0.196, simple_loss=0.2722, pruned_loss=0.05983, over 4525.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2867, pruned_loss=0.05687, over 936193.32 frames. ], batch size: 10, lr: 7.41e-03, grad_scale: 32.0 +2024-07-28 08:31:07,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133136.0, ans=0.1 +2024-07-28 08:31:10,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133136.0, ans=0.1 +2024-07-28 08:31:16,880 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.726e+01 6.171e+01 6.913e+01 1.107e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-28 08:31:17,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=133149.33333333334, ans=0.2 +2024-07-28 08:31:18,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=133149.33333333334, ans=0.125 +2024-07-28 08:31:30,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=133176.0, ans=0.125 +2024-07-28 08:31:38,792 INFO [train.py:1114] (0/4) Epoch 10, batch 7900, loss[loss=0.2058, simple_loss=0.2898, pruned_loss=0.06094, over 4869.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.288, pruned_loss=0.05757, over 933225.85 frames. ], batch size: 14, lr: 7.41e-03, grad_scale: 32.0 +2024-07-28 08:31:50,876 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.95 vs. limit=15.0 +2024-07-28 08:31:59,324 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.25 vs. limit=10.0 +2024-07-28 08:32:10,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=133229.33333333334, ans=0.125 +2024-07-28 08:32:11,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133229.33333333334, ans=0.1 +2024-07-28 08:32:12,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=133229.33333333334, ans=0.125 +2024-07-28 08:32:13,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133242.66666666666, ans=0.1 +2024-07-28 08:32:18,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=133242.66666666666, ans=0.2 +2024-07-28 08:32:24,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=133256.0, ans=0.0 +2024-07-28 08:32:26,516 INFO [train.py:1114] (0/4) Epoch 10, batch 7950, loss[loss=0.3132, simple_loss=0.3642, pruned_loss=0.1311, over 3287.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2872, pruned_loss=0.05715, over 935296.32 frames. ], batch size: 36, lr: 7.41e-03, grad_scale: 32.0 +2024-07-28 08:32:34,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=133282.66666666666, ans=0.125 +2024-07-28 08:32:37,003 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.684e+01 5.725e+01 6.303e+01 6.935e+01 1.035e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 08:32:38,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133282.66666666666, ans=0.1 +2024-07-28 08:32:38,669 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.29 vs. limit=12.0 +2024-07-28 08:32:43,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=133296.0, ans=0.125 +2024-07-28 08:32:51,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=133322.66666666666, ans=0.125 +2024-07-28 08:32:55,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=133322.66666666666, ans=0.2 +2024-07-28 08:32:57,090 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-100000.pt +2024-07-28 08:33:00,943 INFO [train.py:1114] (0/4) Epoch 10, batch 8000, loss[loss=0.1922, simple_loss=0.2708, pruned_loss=0.05683, over 4611.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2864, pruned_loss=0.05733, over 934448.03 frames. ], batch size: 11, lr: 7.41e-03, grad_scale: 32.0 +2024-07-28 08:33:01,044 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:33:01,967 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.04 vs. limit=15.0 +2024-07-28 08:33:07,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133349.33333333334, ans=0.1 +2024-07-28 08:33:08,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=133349.33333333334, ans=0.125 +2024-07-28 08:33:10,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=133349.33333333334, ans=0.125 +2024-07-28 08:33:13,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=133362.66666666666, ans=0.025 +2024-07-28 08:33:19,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=133376.0, ans=0.125 +2024-07-28 08:33:28,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=133389.33333333334, ans=0.09899494936611666 +2024-07-28 08:33:29,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=133389.33333333334, ans=0.125 +2024-07-28 08:33:32,912 INFO [train.py:1114] (0/4) Epoch 10, batch 8050, loss[loss=0.2141, simple_loss=0.3068, pruned_loss=0.06073, over 4809.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2866, pruned_loss=0.05737, over 934308.69 frames. ], batch size: 14, lr: 7.41e-03, grad_scale: 64.0 +2024-07-28 08:33:33,994 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.89 vs. limit=12.0 +2024-07-28 08:33:38,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=133402.66666666666, ans=0.0 +2024-07-28 08:33:40,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=133416.0, ans=0.125 +2024-07-28 08:33:40,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=133416.0, ans=0.1 +2024-07-28 08:33:43,163 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.644e+01 5.682e+01 6.216e+01 7.101e+01 1.040e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 08:33:44,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=133416.0, ans=0.0 +2024-07-28 08:33:49,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=133429.33333333334, ans=0.025 +2024-07-28 08:33:52,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=133429.33333333334, ans=0.2 +2024-07-28 08:34:06,150 INFO [train.py:1114] (0/4) Epoch 10, batch 8100, loss[loss=0.1943, simple_loss=0.2837, pruned_loss=0.05243, over 4800.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.287, pruned_loss=0.05714, over 933550.49 frames. ], batch size: 15, lr: 7.41e-03, grad_scale: 64.0 +2024-07-28 08:34:06,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=133469.33333333334, ans=0.0 +2024-07-28 08:34:12,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=133482.66666666666, ans=0.2 +2024-07-28 08:34:21,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=133496.0, ans=0.1 +2024-07-28 08:34:24,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133509.33333333334, ans=0.1 +2024-07-28 08:34:25,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=133509.33333333334, ans=0.0 +2024-07-28 08:34:26,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=133509.33333333334, ans=0.125 +2024-07-28 08:34:33,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133522.66666666666, ans=0.1 +2024-07-28 08:34:38,580 INFO [train.py:1114] (0/4) Epoch 10, batch 8150, loss[loss=0.2092, simple_loss=0.2922, pruned_loss=0.06306, over 4790.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2863, pruned_loss=0.05727, over 936897.77 frames. ], batch size: 15, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:34:48,789 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.759e+01 5.775e+01 6.372e+01 7.050e+01 1.046e+02, threshold=1.274e+02, percent-clipped=0.0 +2024-07-28 08:34:54,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=133562.66666666666, ans=0.09899494936611666 +2024-07-28 08:35:04,104 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.20 vs. limit=15.0 +2024-07-28 08:35:07,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=133589.33333333334, ans=0.125 +2024-07-28 08:35:09,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=133589.33333333334, ans=0.125 +2024-07-28 08:35:11,083 INFO [train.py:1114] (0/4) Epoch 10, batch 8200, loss[loss=0.2192, simple_loss=0.3056, pruned_loss=0.0664, over 4798.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2864, pruned_loss=0.05688, over 938288.48 frames. ], batch size: 15, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:35:16,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=133602.66666666666, ans=0.0 +2024-07-28 08:35:25,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=133629.33333333334, ans=0.0 +2024-07-28 08:35:26,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=133629.33333333334, ans=0.0 +2024-07-28 08:35:26,709 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.92 vs. limit=15.0 +2024-07-28 08:35:28,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133629.33333333334, ans=0.1 +2024-07-28 08:35:32,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=133642.66666666666, ans=0.125 +2024-07-28 08:35:53,143 INFO [train.py:1114] (0/4) Epoch 10, batch 8250, loss[loss=0.1987, simple_loss=0.2871, pruned_loss=0.05512, over 4901.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2861, pruned_loss=0.05664, over 938627.69 frames. ], batch size: 13, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:36:10,114 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.694e+01 6.323e+01 7.329e+01 1.024e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-28 08:36:22,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=133709.33333333334, ans=0.125 +2024-07-28 08:36:23,092 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.28 vs. limit=15.0 +2024-07-28 08:36:25,909 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.49 vs. limit=5.0 +2024-07-28 08:36:30,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=133722.66666666666, ans=0.2 +2024-07-28 08:36:30,869 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.57 vs. limit=15.0 +2024-07-28 08:36:31,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=133736.0, ans=0.125 +2024-07-28 08:36:32,392 INFO [train.py:1114] (0/4) Epoch 10, batch 8300, loss[loss=0.2317, simple_loss=0.3253, pruned_loss=0.06905, over 4883.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2878, pruned_loss=0.05732, over 938744.62 frames. ], batch size: 15, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:36:39,182 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.58 vs. limit=6.0 +2024-07-28 08:36:47,210 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.98 vs. limit=15.0 +2024-07-28 08:36:50,114 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=5.127e-03 +2024-07-28 08:36:50,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=133762.66666666666, ans=0.125 +2024-07-28 08:36:53,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=133776.0, ans=0.125 +2024-07-28 08:36:55,905 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:37:04,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=133789.33333333334, ans=0.125 +2024-07-28 08:37:05,224 INFO [train.py:1114] (0/4) Epoch 10, batch 8350, loss[loss=0.2154, simple_loss=0.2975, pruned_loss=0.06661, over 4796.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2866, pruned_loss=0.05676, over 941558.86 frames. ], batch size: 15, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:37:06,167 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.47 vs. limit=15.0 +2024-07-28 08:37:14,517 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.43 vs. limit=15.0 +2024-07-28 08:37:15,031 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.62 vs. limit=10.0 +2024-07-28 08:37:17,108 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.840e+01 5.721e+01 6.167e+01 6.839e+01 1.069e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 08:37:26,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=133842.66666666666, ans=0.125 +2024-07-28 08:37:32,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133842.66666666666, ans=0.1 +2024-07-28 08:37:43,975 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.43 vs. limit=15.0 +2024-07-28 08:37:44,296 INFO [train.py:1114] (0/4) Epoch 10, batch 8400, loss[loss=0.1684, simple_loss=0.2483, pruned_loss=0.04421, over 4785.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2869, pruned_loss=0.05712, over 939891.24 frames. ], batch size: 12, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:37:51,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=133882.66666666666, ans=0.0 +2024-07-28 08:37:52,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=133882.66666666666, ans=0.125 +2024-07-28 08:37:54,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=133882.66666666666, ans=0.0 +2024-07-28 08:38:04,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133909.33333333334, ans=0.1 +2024-07-28 08:38:06,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=133909.33333333334, ans=0.0 +2024-07-28 08:38:08,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=133909.33333333334, ans=0.125 +2024-07-28 08:38:18,209 INFO [train.py:1114] (0/4) Epoch 10, batch 8450, loss[loss=0.2356, simple_loss=0.3187, pruned_loss=0.07626, over 4801.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2877, pruned_loss=0.05716, over 938375.22 frames. ], batch size: 15, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:38:21,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=133936.0, ans=0.0 +2024-07-28 08:38:27,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=133949.33333333334, ans=0.125 +2024-07-28 08:38:28,303 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.430e+01 5.805e+01 6.479e+01 7.666e+01 1.044e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 08:38:32,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133962.66666666666, ans=0.1 +2024-07-28 08:38:35,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133962.66666666666, ans=0.1 +2024-07-28 08:38:47,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=133989.33333333334, ans=0.07 +2024-07-28 08:38:47,426 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.98 vs. limit=6.0 +2024-07-28 08:38:55,412 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.58 vs. limit=15.0 +2024-07-28 08:38:58,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=133989.33333333334, ans=0.125 +2024-07-28 08:39:02,052 INFO [train.py:1114] (0/4) Epoch 10, batch 8500, loss[loss=0.1965, simple_loss=0.2805, pruned_loss=0.05624, over 4608.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2863, pruned_loss=0.05659, over 938706.54 frames. ], batch size: 11, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:39:11,922 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:39:23,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=134016.0, ans=0.125 +2024-07-28 08:39:28,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=134016.0, ans=0.125 +2024-07-28 08:39:57,305 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.71 vs. limit=15.0 +2024-07-28 08:40:15,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=134042.66666666666, ans=0.125 +2024-07-28 08:40:24,359 INFO [train.py:1114] (0/4) Epoch 10, batch 8550, loss[loss=0.2119, simple_loss=0.2832, pruned_loss=0.07034, over 4805.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2866, pruned_loss=0.0568, over 939549.43 frames. ], batch size: 11, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:40:38,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=134069.33333333334, ans=0.125 +2024-07-28 08:40:44,217 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.526e+01 5.946e+01 6.615e+01 7.789e+01 1.197e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-28 08:40:47,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=134096.0, ans=0.025 +2024-07-28 08:40:47,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.97 vs. limit=15.0 +2024-07-28 08:40:50,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=134096.0, ans=0.125 +2024-07-28 08:40:52,407 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.34 vs. limit=6.0 +2024-07-28 08:40:54,584 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.61 vs. limit=22.5 +2024-07-28 08:41:14,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=134122.66666666666, ans=0.0 +2024-07-28 08:41:19,435 INFO [train.py:1114] (0/4) Epoch 10, batch 8600, loss[loss=0.2168, simple_loss=0.308, pruned_loss=0.06281, over 4798.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2864, pruned_loss=0.05651, over 939588.11 frames. ], batch size: 15, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:41:21,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=134136.0, ans=0.07 +2024-07-28 08:41:42,696 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.56 vs. limit=15.0 +2024-07-28 08:41:43,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=134176.0, ans=0.125 +2024-07-28 08:41:59,312 INFO [train.py:1114] (0/4) Epoch 10, batch 8650, loss[loss=0.2113, simple_loss=0.3081, pruned_loss=0.05726, over 4891.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2859, pruned_loss=0.05618, over 941025.19 frames. ], batch size: 15, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:42:09,612 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.967e+01 5.912e+01 6.591e+01 7.425e+01 1.041e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-28 08:42:09,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=134216.0, ans=0.05 +2024-07-28 08:42:10,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=134216.0, ans=0.125 +2024-07-28 08:42:16,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=134229.33333333334, ans=0.5 +2024-07-28 08:42:18,426 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.52 vs. limit=12.0 +2024-07-28 08:42:33,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=134256.0, ans=0.09899494936611666 +2024-07-28 08:42:42,728 INFO [train.py:1114] (0/4) Epoch 10, batch 8700, loss[loss=0.1968, simple_loss=0.2893, pruned_loss=0.05212, over 4757.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2875, pruned_loss=0.0571, over 938734.20 frames. ], batch size: 13, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:42:45,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=134269.33333333334, ans=10.0 +2024-07-28 08:42:46,294 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.84 vs. limit=15.0 +2024-07-28 08:42:47,359 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:42:51,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=134282.66666666666, ans=0.0 +2024-07-28 08:42:51,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=134282.66666666666, ans=0.2 +2024-07-28 08:42:57,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.01 vs. limit=15.0 +2024-07-28 08:43:02,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=134309.33333333334, ans=0.0 +2024-07-28 08:43:05,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=134309.33333333334, ans=0.0 +2024-07-28 08:43:05,685 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.63 vs. limit=12.0 +2024-07-28 08:43:05,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=134309.33333333334, ans=0.125 +2024-07-28 08:43:14,618 INFO [train.py:1114] (0/4) Epoch 10, batch 8750, loss[loss=0.2089, simple_loss=0.2905, pruned_loss=0.0636, over 4683.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2872, pruned_loss=0.05687, over 936837.69 frames. ], batch size: 15, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:43:24,870 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.932e+01 5.734e+01 6.452e+01 7.346e+01 1.067e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-28 08:43:24,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=134349.33333333334, ans=0.1 +2024-07-28 08:43:27,320 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.47 vs. limit=15.0 +2024-07-28 08:43:30,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=134362.66666666666, ans=0.1 +2024-07-28 08:43:34,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=134376.0, ans=0.125 +2024-07-28 08:43:39,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=134376.0, ans=0.05 +2024-07-28 08:43:45,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=134389.33333333334, ans=0.035 +2024-07-28 08:43:48,709 INFO [train.py:1114] (0/4) Epoch 10, batch 8800, loss[loss=0.2087, simple_loss=0.3056, pruned_loss=0.05591, over 4931.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2873, pruned_loss=0.05695, over 937411.97 frames. ], batch size: 14, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:43:48,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=134402.66666666666, ans=0.125 +2024-07-28 08:44:14,704 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.83 vs. limit=10.0 +2024-07-28 08:44:16,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=134456.0, ans=0.125 +2024-07-28 08:44:19,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=134456.0, ans=0.07 +2024-07-28 08:44:22,029 INFO [train.py:1114] (0/4) Epoch 10, batch 8850, loss[loss=0.2625, simple_loss=0.3446, pruned_loss=0.09022, over 4490.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2875, pruned_loss=0.05784, over 932979.39 frames. ], batch size: 21, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:44:26,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=134469.33333333334, ans=0.125 +2024-07-28 08:44:32,890 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.67 vs. limit=6.0 +2024-07-28 08:44:34,672 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.302e+01 5.576e+01 6.226e+01 7.150e+01 1.100e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 08:44:36,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=134496.0, ans=0.125 +2024-07-28 08:44:51,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=134522.66666666666, ans=0.125 +2024-07-28 08:44:56,776 INFO [train.py:1114] (0/4) Epoch 10, batch 8900, loss[loss=0.1838, simple_loss=0.2631, pruned_loss=0.05227, over 4939.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2869, pruned_loss=0.05762, over 931487.65 frames. ], batch size: 12, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:45:08,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=134536.0, ans=0.2 +2024-07-28 08:45:12,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=134549.33333333334, ans=0.2 +2024-07-28 08:45:29,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=134562.66666666666, ans=0.035 +2024-07-28 08:45:29,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=134576.0, ans=0.125 +2024-07-28 08:45:41,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=134589.33333333334, ans=0.125 +2024-07-28 08:45:43,820 INFO [train.py:1114] (0/4) Epoch 10, batch 8950, loss[loss=0.23, simple_loss=0.3099, pruned_loss=0.0751, over 4477.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2865, pruned_loss=0.05753, over 931625.47 frames. ], batch size: 21, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:45:50,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=134616.0, ans=0.0 +2024-07-28 08:45:53,858 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.674e+01 5.627e+01 6.283e+01 7.444e+01 1.084e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 08:45:59,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=134629.33333333334, ans=0.125 +2024-07-28 08:46:02,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=134642.66666666666, ans=0.04949747468305833 +2024-07-28 08:46:03,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=134642.66666666666, ans=0.1 +2024-07-28 08:46:12,695 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.64 vs. limit=15.0 +2024-07-28 08:46:15,716 INFO [train.py:1114] (0/4) Epoch 10, batch 9000, loss[loss=0.1959, simple_loss=0.29, pruned_loss=0.05086, over 4641.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2856, pruned_loss=0.05684, over 934595.94 frames. ], batch size: 12, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:46:15,717 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 08:46:28,323 INFO [train.py:1146] (0/4) Epoch 10, validation: loss=0.1719, simple_loss=0.2766, pruned_loss=0.0336, over 944034.00 frames. +2024-07-28 08:46:28,324 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 08:46:39,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=134682.66666666666, ans=0.125 +2024-07-28 08:46:50,206 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.28 vs. limit=22.5 +2024-07-28 08:46:51,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=134709.33333333334, ans=0.125 +2024-07-28 08:46:58,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=134722.66666666666, ans=0.0 +2024-07-28 08:47:00,437 INFO [train.py:1114] (0/4) Epoch 10, batch 9050, loss[loss=0.1708, simple_loss=0.2516, pruned_loss=0.04499, over 4505.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2851, pruned_loss=0.05659, over 935053.08 frames. ], batch size: 10, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:47:02,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=134736.0, ans=0.5 +2024-07-28 08:47:10,435 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.777e+01 6.275e+01 7.546e+01 8.998e+01 1.332e+02, threshold=1.509e+02, percent-clipped=1.0 +2024-07-28 08:47:15,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=134762.66666666666, ans=0.0 +2024-07-28 08:47:26,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=134789.33333333334, ans=0.025 +2024-07-28 08:47:32,992 INFO [train.py:1114] (0/4) Epoch 10, batch 9100, loss[loss=0.2313, simple_loss=0.3058, pruned_loss=0.07834, over 4932.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2851, pruned_loss=0.05618, over 937474.88 frames. ], batch size: 14, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:47:35,843 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.54 vs. limit=15.0 +2024-07-28 08:47:36,981 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.75 vs. limit=12.0 +2024-07-28 08:47:38,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=134802.66666666666, ans=0.1 +2024-07-28 08:47:39,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=134816.0, ans=0.025 +2024-07-28 08:47:42,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=134816.0, ans=0.2 +2024-07-28 08:47:42,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=134816.0, ans=0.0 +2024-07-28 08:48:00,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=134856.0, ans=0.0 +2024-07-28 08:48:06,630 INFO [train.py:1114] (0/4) Epoch 10, batch 9150, loss[loss=0.1748, simple_loss=0.2694, pruned_loss=0.04013, over 4811.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2864, pruned_loss=0.05663, over 936760.32 frames. ], batch size: 14, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:48:12,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=134882.66666666666, ans=0.0 +2024-07-28 08:48:16,565 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.926e+01 6.660e+01 7.545e+01 1.146e+02, threshold=1.332e+02, percent-clipped=0.0 +2024-07-28 08:48:22,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=134896.0, ans=0.0 +2024-07-28 08:48:23,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=134896.0, ans=0.125 +2024-07-28 08:48:24,300 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.99 vs. limit=15.0 +2024-07-28 08:48:33,141 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.81 vs. limit=6.0 +2024-07-28 08:48:38,471 INFO [train.py:1114] (0/4) Epoch 10, batch 9200, loss[loss=0.1884, simple_loss=0.2715, pruned_loss=0.05259, over 4851.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2859, pruned_loss=0.05661, over 938421.05 frames. ], batch size: 12, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:49:01,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=134976.0, ans=0.125 +2024-07-28 08:49:11,330 INFO [train.py:1114] (0/4) Epoch 10, batch 9250, loss[loss=0.2134, simple_loss=0.3081, pruned_loss=0.05937, over 4636.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2853, pruned_loss=0.05652, over 938844.67 frames. ], batch size: 13, lr: 7.36e-03, grad_scale: 64.0 +2024-07-28 08:49:15,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=135002.66666666666, ans=0.125 +2024-07-28 08:49:18,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=135016.0, ans=0.1 +2024-07-28 08:49:21,509 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.514e+01 5.719e+01 6.236e+01 6.936e+01 9.849e+01, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 08:49:30,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=135029.33333333334, ans=0.125 +2024-07-28 08:49:31,755 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.51 vs. limit=15.0 +2024-07-28 08:49:34,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=135042.66666666666, ans=0.0 +2024-07-28 08:49:35,809 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=20.25 vs. limit=22.5 +2024-07-28 08:49:36,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=135042.66666666666, ans=0.1 +2024-07-28 08:49:44,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=135056.0, ans=0.1 +2024-07-28 08:49:46,103 INFO [train.py:1114] (0/4) Epoch 10, batch 9300, loss[loss=0.1766, simple_loss=0.2634, pruned_loss=0.04489, over 4772.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2857, pruned_loss=0.05659, over 938482.58 frames. ], batch size: 12, lr: 7.36e-03, grad_scale: 64.0 +2024-07-28 08:49:55,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=135082.66666666666, ans=0.0 +2024-07-28 08:50:03,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=135096.0, ans=0.125 +2024-07-28 08:50:10,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=135109.33333333334, ans=0.0 +2024-07-28 08:50:13,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=135122.66666666666, ans=0.04949747468305833 +2024-07-28 08:50:17,998 INFO [train.py:1114] (0/4) Epoch 10, batch 9350, loss[loss=0.1584, simple_loss=0.2372, pruned_loss=0.03977, over 4823.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2854, pruned_loss=0.05655, over 935845.50 frames. ], batch size: 11, lr: 7.36e-03, grad_scale: 64.0 +2024-07-28 08:50:34,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=135149.33333333334, ans=0.04949747468305833 +2024-07-28 08:50:36,253 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.595e+01 5.628e+01 6.269e+01 7.143e+01 1.097e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 08:50:36,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=135149.33333333334, ans=0.0 +2024-07-28 08:50:52,993 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.96 vs. limit=15.0 +2024-07-28 08:50:58,077 INFO [train.py:1114] (0/4) Epoch 10, batch 9400, loss[loss=0.225, simple_loss=0.3242, pruned_loss=0.06288, over 4702.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2852, pruned_loss=0.05677, over 933851.95 frames. ], batch size: 13, lr: 7.36e-03, grad_scale: 64.0 +2024-07-28 08:51:00,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=135202.66666666666, ans=0.125 +2024-07-28 08:51:06,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=135216.0, ans=0.05 +2024-07-28 08:51:11,060 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.64 vs. limit=15.0 +2024-07-28 08:51:16,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=135242.66666666666, ans=0.0 +2024-07-28 08:51:23,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=135256.0, ans=0.0 +2024-07-28 08:51:27,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=135256.0, ans=0.125 +2024-07-28 08:51:29,452 INFO [train.py:1114] (0/4) Epoch 10, batch 9450, loss[loss=0.1799, simple_loss=0.2516, pruned_loss=0.05412, over 4806.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2854, pruned_loss=0.0569, over 933202.78 frames. ], batch size: 11, lr: 7.36e-03, grad_scale: 32.0 +2024-07-28 08:51:33,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=135269.33333333334, ans=0.125 +2024-07-28 08:51:34,844 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.26 vs. limit=12.0 +2024-07-28 08:51:39,983 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.423e+01 5.630e+01 6.223e+01 7.000e+01 1.011e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 08:51:49,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135309.33333333334, ans=0.1 +2024-07-28 08:51:54,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.whiten.whitening_limit, batch_count=135309.33333333334, ans=12.0 +2024-07-28 08:51:55,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.whiten.whitening_limit, batch_count=135322.66666666666, ans=12.0 +2024-07-28 08:51:58,271 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.81 vs. limit=12.0 +2024-07-28 08:51:58,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135322.66666666666, ans=0.1 +2024-07-28 08:52:01,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=135336.0, ans=0.125 +2024-07-28 08:52:01,558 INFO [train.py:1114] (0/4) Epoch 10, batch 9500, loss[loss=0.1684, simple_loss=0.2481, pruned_loss=0.04434, over 4705.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2853, pruned_loss=0.05662, over 935594.88 frames. ], batch size: 12, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:52:01,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=135336.0, ans=0.0 +2024-07-28 08:52:01,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135336.0, ans=0.1 +2024-07-28 08:52:03,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=135336.0, ans=0.0 +2024-07-28 08:52:14,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=135349.33333333334, ans=0.125 +2024-07-28 08:52:24,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135376.0, ans=0.1 +2024-07-28 08:52:30,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=135389.33333333334, ans=0.1 +2024-07-28 08:52:34,232 INFO [train.py:1114] (0/4) Epoch 10, batch 9550, loss[loss=0.1919, simple_loss=0.2763, pruned_loss=0.0538, over 4786.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2848, pruned_loss=0.05636, over 932270.08 frames. ], batch size: 12, lr: 7.35e-03, grad_scale: 16.0 +2024-07-28 08:52:36,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=135402.66666666666, ans=0.0 +2024-07-28 08:52:48,491 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.659e+01 6.121e+01 6.852e+01 1.035e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 08:52:53,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=135429.33333333334, ans=0.0 +2024-07-28 08:52:57,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=135442.66666666666, ans=0.125 +2024-07-28 08:53:06,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=135456.0, ans=0.2 +2024-07-28 08:53:09,497 INFO [train.py:1114] (0/4) Epoch 10, batch 9600, loss[loss=0.3124, simple_loss=0.3562, pruned_loss=0.1343, over 3228.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2859, pruned_loss=0.05695, over 931203.77 frames. ], batch size: 35, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:53:14,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=135469.33333333334, ans=0.2 +2024-07-28 08:53:29,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=135496.0, ans=0.0 +2024-07-28 08:53:33,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=135509.33333333334, ans=0.1 +2024-07-28 08:53:35,857 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.57 vs. limit=6.0 +2024-07-28 08:53:43,567 INFO [train.py:1114] (0/4) Epoch 10, batch 9650, loss[loss=0.2559, simple_loss=0.3472, pruned_loss=0.08231, over 4837.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2873, pruned_loss=0.05769, over 927854.81 frames. ], batch size: 16, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:53:45,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=135536.0, ans=0.05 +2024-07-28 08:53:46,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=135536.0, ans=0.1 +2024-07-28 08:53:54,685 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.653e+01 6.117e+01 7.383e+01 9.422e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 08:53:57,563 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.18 vs. limit=22.5 +2024-07-28 08:54:01,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=135576.0, ans=0.125 +2024-07-28 08:54:04,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=135576.0, ans=0.125 +2024-07-28 08:54:05,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=135576.0, ans=0.125 +2024-07-28 08:54:15,222 INFO [train.py:1114] (0/4) Epoch 10, batch 9700, loss[loss=0.2711, simple_loss=0.3441, pruned_loss=0.09906, over 4193.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2875, pruned_loss=0.0574, over 925166.58 frames. ], batch size: 25, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:54:21,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135616.0, ans=0.1 +2024-07-28 08:54:25,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=135616.0, ans=0.1 +2024-07-28 08:54:31,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=135629.33333333334, ans=0.125 +2024-07-28 08:54:35,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=135642.66666666666, ans=0.2 +2024-07-28 08:54:36,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=135642.66666666666, ans=0.125 +2024-07-28 08:54:38,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=135642.66666666666, ans=0.09899494936611666 +2024-07-28 08:54:43,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=135656.0, ans=0.07 +2024-07-28 08:54:54,893 INFO [train.py:1114] (0/4) Epoch 10, batch 9750, loss[loss=0.2385, simple_loss=0.321, pruned_loss=0.07803, over 4692.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.287, pruned_loss=0.0571, over 925473.09 frames. ], batch size: 15, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:54:56,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=135669.33333333334, ans=0.1 +2024-07-28 08:54:59,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=135669.33333333334, ans=0.125 +2024-07-28 08:55:02,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=135682.66666666666, ans=0.07 +2024-07-28 08:55:07,761 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.42 vs. limit=15.0 +2024-07-28 08:55:09,164 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.808e+01 6.506e+01 7.716e+01 1.140e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 08:55:12,155 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=21.75 vs. limit=22.5 +2024-07-28 08:55:16,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=135709.33333333334, ans=0.0 +2024-07-28 08:55:27,445 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.24 vs. limit=15.0 +2024-07-28 08:55:28,968 INFO [train.py:1114] (0/4) Epoch 10, batch 9800, loss[loss=0.1922, simple_loss=0.2783, pruned_loss=0.05306, over 4710.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2855, pruned_loss=0.05666, over 925345.56 frames. ], batch size: 12, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:55:29,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=135736.0, ans=0.125 +2024-07-28 08:55:40,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=135749.33333333334, ans=0.2 +2024-07-28 08:55:46,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=135762.66666666666, ans=0.125 +2024-07-28 08:55:49,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=135776.0, ans=0.0 +2024-07-28 08:55:59,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135789.33333333334, ans=0.1 +2024-07-28 08:56:01,766 INFO [train.py:1114] (0/4) Epoch 10, batch 9850, loss[loss=0.2265, simple_loss=0.3146, pruned_loss=0.06925, over 4897.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2861, pruned_loss=0.05707, over 927206.82 frames. ], batch size: 15, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:56:12,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=135816.0, ans=0.125 +2024-07-28 08:56:14,543 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.854e+01 5.916e+01 6.813e+01 8.007e+01 1.183e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-28 08:56:19,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=135829.33333333334, ans=0.0 +2024-07-28 08:56:20,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=135829.33333333334, ans=0.125 +2024-07-28 08:56:25,172 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:56:34,478 INFO [train.py:1114] (0/4) Epoch 10, batch 9900, loss[loss=0.1999, simple_loss=0.2899, pruned_loss=0.05498, over 4838.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2873, pruned_loss=0.05776, over 926934.83 frames. ], batch size: 16, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:56:34,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=135869.33333333334, ans=0.125 +2024-07-28 08:56:51,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=135896.0, ans=0.0 +2024-07-28 08:56:52,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135896.0, ans=0.1 +2024-07-28 08:56:55,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=135909.33333333334, ans=0.125 +2024-07-28 08:57:05,769 INFO [train.py:1114] (0/4) Epoch 10, batch 9950, loss[loss=0.161, simple_loss=0.2317, pruned_loss=0.04513, over 4802.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2882, pruned_loss=0.05885, over 929342.54 frames. ], batch size: 11, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:57:10,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=135936.0, ans=0.1 +2024-07-28 08:57:12,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=135949.33333333334, ans=0.125 +2024-07-28 08:57:15,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=135949.33333333334, ans=0.2 +2024-07-28 08:57:17,162 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.632e+01 6.153e+01 7.007e+01 8.060e+01 1.036e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-28 08:57:37,851 INFO [train.py:1114] (0/4) Epoch 10, batch 10000, loss[loss=0.2454, simple_loss=0.3177, pruned_loss=0.08659, over 4647.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2909, pruned_loss=0.06022, over 926651.59 frames. ], batch size: 16, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:57:38,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=136002.66666666666, ans=0.2 +2024-07-28 08:57:42,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=136002.66666666666, ans=0.0 +2024-07-28 08:57:46,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=136016.0, ans=0.0 +2024-07-28 08:58:03,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=136056.0, ans=0.0 +2024-07-28 08:58:09,794 INFO [train.py:1114] (0/4) Epoch 10, batch 10050, loss[loss=0.2715, simple_loss=0.3333, pruned_loss=0.1048, over 3255.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2934, pruned_loss=0.0614, over 914538.25 frames. ], batch size: 35, lr: 7.33e-03, grad_scale: 32.0 +2024-07-28 08:58:15,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=136069.33333333334, ans=0.125 +2024-07-28 08:58:19,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=136082.66666666666, ans=0.125 +2024-07-28 08:58:22,178 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.772e+01 6.455e+01 7.428e+01 1.276e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-28 08:58:30,647 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.96 vs. limit=15.0 +2024-07-28 08:58:40,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=136122.66666666666, ans=0.125 +2024-07-28 08:58:42,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=136122.66666666666, ans=0.2 +2024-07-28 08:58:43,270 INFO [train.py:1114] (0/4) Epoch 10, batch 10100, loss[loss=0.2356, simple_loss=0.3077, pruned_loss=0.08169, over 3452.00 frames. ], tot_loss[loss=0.217, simple_loss=0.299, pruned_loss=0.06751, over 861059.46 frames. ], batch size: 36, lr: 7.33e-03, grad_scale: 32.0 +2024-07-28 08:59:02,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=136176.0, ans=0.2 +2024-07-28 08:59:15,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=136202.66666666666, ans=0.015 +2024-07-28 08:59:15,617 INFO [train.py:1114] (0/4) Epoch 10, batch 10150, loss[loss=0.2452, simple_loss=0.3172, pruned_loss=0.08656, over 3149.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3034, pruned_loss=0.07189, over 822196.61 frames. ], batch size: 35, lr: 7.33e-03, grad_scale: 32.0 +2024-07-28 08:59:20,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=136202.66666666666, ans=0.125 +2024-07-28 08:59:27,022 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.570e+01 6.618e+01 7.074e+01 7.488e+01 9.490e+01, threshold=1.415e+02, percent-clipped=0.0 +2024-07-28 08:59:33,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=136229.33333333334, ans=0.125 +2024-07-28 08:59:44,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=136256.0, ans=15.0 +2024-07-28 08:59:47,627 INFO [train.py:1114] (0/4) Epoch 10, batch 10200, loss[loss=0.2362, simple_loss=0.3228, pruned_loss=0.07484, over 3330.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3062, pruned_loss=0.07494, over 789053.10 frames. ], batch size: 35, lr: 7.33e-03, grad_scale: 32.0 +2024-07-28 09:00:01,508 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-10.pt +2024-07-28 09:00:46,217 INFO [train.py:1114] (0/4) Epoch 11, batch 0, loss[loss=0.1777, simple_loss=0.2705, pruned_loss=0.0425, over 4861.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2705, pruned_loss=0.0425, over 4861.00 frames. ], batch size: 12, lr: 7.00e-03, grad_scale: 32.0 +2024-07-28 09:00:46,218 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 09:00:54,813 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.7737, 4.1820, 4.3964, 4.3805], device='cuda:0') +2024-07-28 09:00:57,972 INFO [train.py:1146] (0/4) Epoch 11, validation: loss=0.1737, simple_loss=0.279, pruned_loss=0.03421, over 944034.00 frames. +2024-07-28 09:00:57,973 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 09:01:07,483 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.43 vs. limit=6.0 +2024-07-28 09:01:11,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=136325.33333333334, ans=0.125 +2024-07-28 09:01:12,317 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.04 vs. limit=5.0 +2024-07-28 09:01:12,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=136325.33333333334, ans=0.125 +2024-07-28 09:01:15,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=136325.33333333334, ans=0.125 +2024-07-28 09:01:16,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=136325.33333333334, ans=0.125 +2024-07-28 09:01:24,170 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:01:25,417 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:01:29,221 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.650e+01 6.307e+01 6.804e+01 7.464e+01 1.172e+02, threshold=1.361e+02, percent-clipped=0.0 +2024-07-28 09:01:32,019 INFO [train.py:1114] (0/4) Epoch 11, batch 50, loss[loss=0.1921, simple_loss=0.2757, pruned_loss=0.05422, over 4629.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2889, pruned_loss=0.05804, over 206592.10 frames. ], batch size: 11, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:01:34,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=136365.33333333334, ans=0.125 +2024-07-28 09:01:34,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=136365.33333333334, ans=0.1 +2024-07-28 09:01:34,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=136365.33333333334, ans=0.0 +2024-07-28 09:01:37,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=136378.66666666666, ans=0.125 +2024-07-28 09:01:39,681 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.45 vs. limit=12.0 +2024-07-28 09:01:42,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=136378.66666666666, ans=0.125 +2024-07-28 09:01:44,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=136378.66666666666, ans=0.1 +2024-07-28 09:01:46,517 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.97 vs. limit=15.0 +2024-07-28 09:01:49,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=136392.0, ans=0.07 +2024-07-28 09:01:55,185 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.18 vs. limit=15.0 +2024-07-28 09:01:56,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=136405.33333333334, ans=0.2 +2024-07-28 09:01:59,894 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.68 vs. limit=12.0 +2024-07-28 09:02:05,894 INFO [train.py:1114] (0/4) Epoch 11, batch 100, loss[loss=0.1715, simple_loss=0.2663, pruned_loss=0.03831, over 4643.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.29, pruned_loss=0.0576, over 365292.71 frames. ], batch size: 12, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:02:25,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=136472.0, ans=0.0 +2024-07-28 09:02:36,704 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.385e+01 5.958e+01 6.972e+01 1.024e+02, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 09:02:36,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=136485.33333333334, ans=0.125 +2024-07-28 09:02:39,383 INFO [train.py:1114] (0/4) Epoch 11, batch 150, loss[loss=0.1547, simple_loss=0.2551, pruned_loss=0.02716, over 4608.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2869, pruned_loss=0.05596, over 493940.67 frames. ], batch size: 11, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:02:56,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=136525.33333333334, ans=0.2 +2024-07-28 09:02:58,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=136538.66666666666, ans=0.125 +2024-07-28 09:03:04,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=136538.66666666666, ans=0.025 +2024-07-28 09:03:04,223 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.27 vs. limit=12.0 +2024-07-28 09:03:08,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=136552.0, ans=0.125 +2024-07-28 09:03:14,116 INFO [train.py:1114] (0/4) Epoch 11, batch 200, loss[loss=0.1847, simple_loss=0.2795, pruned_loss=0.04495, over 4624.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2855, pruned_loss=0.05561, over 593526.07 frames. ], batch size: 21, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:03:24,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=136578.66666666666, ans=0.1 +2024-07-28 09:03:25,615 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.40 vs. limit=15.0 +2024-07-28 09:03:28,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=136592.0, ans=0.1 +2024-07-28 09:03:43,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=136618.66666666666, ans=0.0 +2024-07-28 09:03:45,021 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.586e+01 5.746e+01 6.330e+01 7.204e+01 1.314e+02, threshold=1.266e+02, percent-clipped=1.0 +2024-07-28 09:03:46,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=136618.66666666666, ans=0.0 +2024-07-28 09:03:47,790 INFO [train.py:1114] (0/4) Epoch 11, batch 250, loss[loss=0.2174, simple_loss=0.3156, pruned_loss=0.05957, over 4643.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2854, pruned_loss=0.0561, over 670309.52 frames. ], batch size: 16, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:03:51,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.48 vs. limit=22.5 +2024-07-28 09:03:54,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=136645.33333333334, ans=0.125 +2024-07-28 09:03:55,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=136645.33333333334, ans=0.125 +2024-07-28 09:03:56,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=136645.33333333334, ans=0.125 +2024-07-28 09:03:57,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=136645.33333333334, ans=0.2 +2024-07-28 09:03:58,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=136645.33333333334, ans=0.125 +2024-07-28 09:04:03,108 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.35 vs. limit=22.5 +2024-07-28 09:04:04,413 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.90 vs. limit=15.0 +2024-07-28 09:04:20,728 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.84 vs. limit=10.0 +2024-07-28 09:04:25,281 INFO [train.py:1114] (0/4) Epoch 11, batch 300, loss[loss=0.187, simple_loss=0.2807, pruned_loss=0.04666, over 4794.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2843, pruned_loss=0.05557, over 730068.13 frames. ], batch size: 15, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:04:31,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=136698.66666666666, ans=0.125 +2024-07-28 09:04:34,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=136712.0, ans=0.125 +2024-07-28 09:04:36,182 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.66 vs. limit=5.0 +2024-07-28 09:04:39,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=136725.33333333334, ans=0.04949747468305833 +2024-07-28 09:04:55,153 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.88 vs. limit=10.0 +2024-07-28 09:04:56,689 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.265e+01 5.546e+01 5.956e+01 6.746e+01 1.009e+02, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 09:04:59,432 INFO [train.py:1114] (0/4) Epoch 11, batch 350, loss[loss=0.1895, simple_loss=0.2712, pruned_loss=0.05388, over 4935.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2845, pruned_loss=0.05513, over 776294.01 frames. ], batch size: 12, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:05:01,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=136765.33333333334, ans=0.125 +2024-07-28 09:05:05,870 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.95 vs. limit=22.5 +2024-07-28 09:05:06,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=136778.66666666666, ans=0.125 +2024-07-28 09:05:10,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=136778.66666666666, ans=0.125 +2024-07-28 09:05:22,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=136805.33333333334, ans=0.0 +2024-07-28 09:05:24,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=136805.33333333334, ans=0.0 +2024-07-28 09:05:28,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=136818.66666666666, ans=0.125 +2024-07-28 09:05:32,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=136832.0, ans=0.0 +2024-07-28 09:05:33,228 INFO [train.py:1114] (0/4) Epoch 11, batch 400, loss[loss=0.1828, simple_loss=0.2701, pruned_loss=0.04778, over 4697.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2852, pruned_loss=0.0558, over 813588.49 frames. ], batch size: 13, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:05:39,184 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.86 vs. limit=15.0 +2024-07-28 09:05:43,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=136832.0, ans=0.2 +2024-07-28 09:05:52,368 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.92 vs. limit=15.0 +2024-07-28 09:05:54,161 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.20 vs. limit=22.5 +2024-07-28 09:05:57,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=136858.66666666666, ans=0.125 +2024-07-28 09:05:57,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=136858.66666666666, ans=0.125 +2024-07-28 09:06:12,501 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.549e+01 5.682e+01 6.253e+01 7.367e+01 1.050e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 09:06:12,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=136885.33333333334, ans=0.05 +2024-07-28 09:06:15,186 INFO [train.py:1114] (0/4) Epoch 11, batch 450, loss[loss=0.2269, simple_loss=0.3151, pruned_loss=0.06931, over 4642.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2848, pruned_loss=0.05566, over 839200.00 frames. ], batch size: 13, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:06:16,361 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.69 vs. limit=5.0 +2024-07-28 09:06:16,920 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.61 vs. limit=15.0 +2024-07-28 09:06:40,015 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.22 vs. limit=22.5 +2024-07-28 09:06:53,471 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:06:55,261 INFO [train.py:1114] (0/4) Epoch 11, batch 500, loss[loss=0.2036, simple_loss=0.2908, pruned_loss=0.05821, over 4695.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2842, pruned_loss=0.055, over 861755.47 frames. ], batch size: 15, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:07:00,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=136965.33333333334, ans=0.125 +2024-07-28 09:07:16,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=137005.33333333334, ans=0.125 +2024-07-28 09:07:21,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=137018.66666666666, ans=0.125 +2024-07-28 09:07:22,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=137018.66666666666, ans=0.025 +2024-07-28 09:07:25,824 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.558e+01 5.492e+01 6.007e+01 6.943e+01 8.543e+01, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 09:07:26,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.55 vs. limit=15.0 +2024-07-28 09:07:27,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=137018.66666666666, ans=0.125 +2024-07-28 09:07:28,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=137032.0, ans=0.025 +2024-07-28 09:07:28,435 INFO [train.py:1114] (0/4) Epoch 11, batch 550, loss[loss=0.2096, simple_loss=0.3021, pruned_loss=0.05851, over 4632.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2842, pruned_loss=0.05525, over 878013.49 frames. ], batch size: 17, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:07:29,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=137032.0, ans=10.0 +2024-07-28 09:07:30,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=137032.0, ans=0.125 +2024-07-28 09:07:33,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=137032.0, ans=0.125 +2024-07-28 09:07:36,122 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.48 vs. limit=15.0 +2024-07-28 09:07:38,117 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.12 vs. limit=15.0 +2024-07-28 09:07:44,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=137058.66666666666, ans=0.1 +2024-07-28 09:08:02,907 INFO [train.py:1114] (0/4) Epoch 11, batch 600, loss[loss=0.2014, simple_loss=0.2893, pruned_loss=0.05672, over 4629.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2833, pruned_loss=0.05481, over 892157.38 frames. ], batch size: 16, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:08:12,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=137112.0, ans=0.025 +2024-07-28 09:08:13,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=137112.0, ans=0.125 +2024-07-28 09:08:14,918 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.93 vs. limit=22.5 +2024-07-28 09:08:33,007 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.811e+01 5.574e+01 6.202e+01 6.752e+01 1.007e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 09:08:35,666 INFO [train.py:1114] (0/4) Epoch 11, batch 650, loss[loss=0.2363, simple_loss=0.3058, pruned_loss=0.08335, over 4771.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2832, pruned_loss=0.0553, over 903845.62 frames. ], batch size: 13, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:08:36,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=137165.33333333334, ans=0.1 +2024-07-28 09:08:37,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=137165.33333333334, ans=0.2 +2024-07-28 09:08:50,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=137192.0, ans=0.025 +2024-07-28 09:08:52,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=137192.0, ans=0.05 +2024-07-28 09:09:09,626 INFO [train.py:1114] (0/4) Epoch 11, batch 700, loss[loss=0.1928, simple_loss=0.2728, pruned_loss=0.0564, over 4642.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2833, pruned_loss=0.05506, over 911870.11 frames. ], batch size: 12, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:09:16,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=137245.33333333334, ans=0.0 +2024-07-28 09:09:18,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=137245.33333333334, ans=0.2 +2024-07-28 09:09:22,747 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.88 vs. limit=22.5 +2024-07-28 09:09:34,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=137272.0, ans=0.1 +2024-07-28 09:09:34,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137272.0, ans=0.1 +2024-07-28 09:09:35,498 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:09:40,611 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.468e+01 5.602e+01 6.234e+01 6.972e+01 9.125e+01, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 09:09:45,722 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.17 vs. limit=15.0 +2024-07-28 09:09:45,971 INFO [train.py:1114] (0/4) Epoch 11, batch 750, loss[loss=0.2225, simple_loss=0.3038, pruned_loss=0.07055, over 4691.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2827, pruned_loss=0.05507, over 918638.21 frames. ], batch size: 13, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:09:47,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=137298.66666666666, ans=0.0 +2024-07-28 09:09:54,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=137312.0, ans=0.0 +2024-07-28 09:09:55,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=137312.0, ans=0.0 +2024-07-28 09:10:00,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=137325.33333333334, ans=0.0 +2024-07-28 09:10:12,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=137338.66666666666, ans=0.125 +2024-07-28 09:10:15,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=137352.0, ans=0.125 +2024-07-28 09:10:21,530 INFO [train.py:1114] (0/4) Epoch 11, batch 800, loss[loss=0.1839, simple_loss=0.2596, pruned_loss=0.05409, over 4856.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2828, pruned_loss=0.05477, over 924136.08 frames. ], batch size: 12, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:10:26,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137365.33333333334, ans=0.1 +2024-07-28 09:10:58,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=137418.66666666666, ans=0.0 +2024-07-28 09:11:00,610 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.302e+01 5.597e+01 6.070e+01 6.795e+01 9.040e+01, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 09:11:03,311 INFO [train.py:1114] (0/4) Epoch 11, batch 850, loss[loss=0.162, simple_loss=0.2587, pruned_loss=0.03262, over 4672.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2827, pruned_loss=0.05478, over 927988.08 frames. ], batch size: 14, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:11:06,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=137432.0, ans=0.125 +2024-07-28 09:11:07,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=137432.0, ans=0.2 +2024-07-28 09:11:11,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=137445.33333333334, ans=0.125 +2024-07-28 09:11:14,624 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.18 vs. limit=10.0 +2024-07-28 09:11:17,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=137458.66666666666, ans=0.0 +2024-07-28 09:11:31,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=137485.33333333334, ans=0.125 +2024-07-28 09:11:32,071 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.33 vs. limit=6.0 +2024-07-28 09:11:36,221 INFO [train.py:1114] (0/4) Epoch 11, batch 900, loss[loss=0.1694, simple_loss=0.2563, pruned_loss=0.04122, over 4854.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2829, pruned_loss=0.05474, over 928404.67 frames. ], batch size: 12, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:11:51,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=137525.33333333334, ans=0.025 +2024-07-28 09:11:51,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=137525.33333333334, ans=0.025 +2024-07-28 09:11:52,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=137525.33333333334, ans=0.0 +2024-07-28 09:11:56,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=137538.66666666666, ans=0.125 +2024-07-28 09:12:09,250 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 5.732e+01 6.294e+01 7.433e+01 1.155e+02, threshold=1.259e+02, percent-clipped=0.0 +2024-07-28 09:12:10,479 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.14 vs. limit=15.0 +2024-07-28 09:12:11,157 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.15 vs. limit=15.0 +2024-07-28 09:12:12,063 INFO [train.py:1114] (0/4) Epoch 11, batch 950, loss[loss=0.19, simple_loss=0.2865, pruned_loss=0.0467, over 4774.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2835, pruned_loss=0.05443, over 930135.34 frames. ], batch size: 12, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:12:19,924 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.07 vs. limit=22.5 +2024-07-28 09:12:30,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=137592.0, ans=0.125 +2024-07-28 09:12:39,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=137618.66666666666, ans=0.0 +2024-07-28 09:12:46,508 INFO [train.py:1114] (0/4) Epoch 11, batch 1000, loss[loss=0.181, simple_loss=0.2655, pruned_loss=0.04823, over 4955.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2843, pruned_loss=0.05525, over 929808.30 frames. ], batch size: 13, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:12:49,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=137632.0, ans=0.1 +2024-07-28 09:12:56,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=137645.33333333334, ans=0.0 +2024-07-28 09:13:03,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=137658.66666666666, ans=0.125 +2024-07-28 09:13:06,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=137672.0, ans=0.025 +2024-07-28 09:13:18,718 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.919e+01 5.562e+01 6.150e+01 7.152e+01 9.857e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 09:13:18,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=137685.33333333334, ans=0.2 +2024-07-28 09:13:21,482 INFO [train.py:1114] (0/4) Epoch 11, batch 1050, loss[loss=0.1879, simple_loss=0.2867, pruned_loss=0.04457, over 4876.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2833, pruned_loss=0.0547, over 931950.04 frames. ], batch size: 14, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:13:27,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=137698.66666666666, ans=0.125 +2024-07-28 09:13:28,790 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:13:29,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=137698.66666666666, ans=0.125 +2024-07-28 09:13:32,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=137712.0, ans=0.1 +2024-07-28 09:13:56,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=137752.0, ans=0.07 +2024-07-28 09:13:58,577 INFO [train.py:1114] (0/4) Epoch 11, batch 1100, loss[loss=0.2054, simple_loss=0.2935, pruned_loss=0.05865, over 4907.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2842, pruned_loss=0.05522, over 934407.07 frames. ], batch size: 13, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:14:05,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=137778.66666666666, ans=0.025 +2024-07-28 09:14:13,684 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.74 vs. limit=6.0 +2024-07-28 09:14:18,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=137792.0, ans=0.125 +2024-07-28 09:14:33,222 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.26 vs. limit=15.0 +2024-07-28 09:14:40,318 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.240e+01 5.580e+01 6.119e+01 6.842e+01 1.423e+02, threshold=1.224e+02, percent-clipped=1.0 +2024-07-28 09:14:42,888 INFO [train.py:1114] (0/4) Epoch 11, batch 1150, loss[loss=0.2006, simple_loss=0.2891, pruned_loss=0.05607, over 4900.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2833, pruned_loss=0.05488, over 934067.22 frames. ], batch size: 13, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:14:51,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137845.33333333334, ans=0.1 +2024-07-28 09:15:09,652 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=15.0 +2024-07-28 09:15:14,851 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.50 vs. limit=6.0 +2024-07-28 09:15:16,444 INFO [train.py:1114] (0/4) Epoch 11, batch 1200, loss[loss=0.2114, simple_loss=0.3093, pruned_loss=0.0567, over 4871.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.284, pruned_loss=0.0551, over 933764.49 frames. ], batch size: 14, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:15:16,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=137898.66666666666, ans=0.0 +2024-07-28 09:15:21,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137898.66666666666, ans=0.1 +2024-07-28 09:15:24,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=137912.0, ans=0.125 +2024-07-28 09:15:31,356 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.68 vs. limit=15.0 +2024-07-28 09:15:33,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff2.min_abs, batch_count=137925.33333333334, ans=0.1 +2024-07-28 09:15:50,319 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 5.564e+01 6.259e+01 7.036e+01 9.371e+01, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 09:15:53,032 INFO [train.py:1114] (0/4) Epoch 11, batch 1250, loss[loss=0.1912, simple_loss=0.2898, pruned_loss=0.04628, over 4795.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.285, pruned_loss=0.05516, over 937706.38 frames. ], batch size: 15, lr: 6.95e-03, grad_scale: 32.0 +2024-07-28 09:15:58,132 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.48 vs. limit=15.0 +2024-07-28 09:16:01,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=137978.66666666666, ans=0.0 +2024-07-28 09:16:05,759 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:16:11,918 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.17 vs. limit=15.0 +2024-07-28 09:16:26,246 INFO [train.py:1114] (0/4) Epoch 11, batch 1300, loss[loss=0.2063, simple_loss=0.3058, pruned_loss=0.05343, over 4736.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2847, pruned_loss=0.05533, over 939319.21 frames. ], batch size: 19, lr: 6.95e-03, grad_scale: 32.0 +2024-07-28 09:16:37,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=138045.33333333334, ans=0.125 +2024-07-28 09:16:37,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=138045.33333333334, ans=0.125 +2024-07-28 09:16:38,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=138058.66666666666, ans=0.0 +2024-07-28 09:16:42,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=138058.66666666666, ans=0.2 +2024-07-28 09:16:51,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=138072.0, ans=0.025 +2024-07-28 09:16:57,014 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.412e+01 5.624e+01 6.382e+01 7.662e+01 1.173e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-28 09:16:59,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=138098.66666666666, ans=0.125 +2024-07-28 09:16:59,898 INFO [train.py:1114] (0/4) Epoch 11, batch 1350, loss[loss=0.1647, simple_loss=0.261, pruned_loss=0.03418, over 4766.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2841, pruned_loss=0.05483, over 941223.39 frames. ], batch size: 13, lr: 6.95e-03, grad_scale: 64.0 +2024-07-28 09:17:04,216 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.22 vs. limit=15.0 +2024-07-28 09:17:14,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=138125.33333333334, ans=0.015 +2024-07-28 09:17:16,878 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.64 vs. limit=22.5 +2024-07-28 09:17:24,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=138138.66666666666, ans=0.125 +2024-07-28 09:17:25,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=138138.66666666666, ans=10.0 +2024-07-28 09:17:25,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=138138.66666666666, ans=0.125 +2024-07-28 09:17:26,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=138152.0, ans=0.5 +2024-07-28 09:17:27,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=138152.0, ans=0.125 +2024-07-28 09:17:29,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=138152.0, ans=0.0 +2024-07-28 09:17:33,277 INFO [train.py:1114] (0/4) Epoch 11, batch 1400, loss[loss=0.1693, simple_loss=0.2492, pruned_loss=0.04466, over 4713.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2838, pruned_loss=0.05493, over 942954.75 frames. ], batch size: 11, lr: 6.95e-03, grad_scale: 64.0 +2024-07-28 09:17:33,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=138165.33333333334, ans=0.025 +2024-07-28 09:17:44,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=138178.66666666666, ans=0.125 +2024-07-28 09:17:50,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=138192.0, ans=0.0 +2024-07-28 09:17:55,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=138205.33333333334, ans=0.0 +2024-07-28 09:18:04,553 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.16 vs. limit=15.0 +2024-07-28 09:18:06,177 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.597e+01 5.725e+01 6.807e+01 7.781e+01 1.138e+02, threshold=1.361e+02, percent-clipped=0.0 +2024-07-28 09:18:08,987 INFO [train.py:1114] (0/4) Epoch 11, batch 1450, loss[loss=0.2049, simple_loss=0.2837, pruned_loss=0.06301, over 4673.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2842, pruned_loss=0.05508, over 943092.62 frames. ], batch size: 15, lr: 6.95e-03, grad_scale: 64.0 +2024-07-28 09:18:14,792 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.83 vs. limit=5.0 +2024-07-28 09:18:25,514 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.79 vs. limit=15.0 +2024-07-28 09:18:27,449 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.36 vs. limit=12.0 +2024-07-28 09:18:28,943 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.14 vs. limit=8.0 +2024-07-28 09:18:29,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.80 vs. limit=22.5 +2024-07-28 09:18:35,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=138272.0, ans=0.1 +2024-07-28 09:18:50,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=138285.33333333334, ans=0.125 +2024-07-28 09:18:52,730 INFO [train.py:1114] (0/4) Epoch 11, batch 1500, loss[loss=0.1812, simple_loss=0.2795, pruned_loss=0.04149, over 4815.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2852, pruned_loss=0.05546, over 942779.75 frames. ], batch size: 14, lr: 6.95e-03, grad_scale: 64.0 +2024-07-28 09:18:53,810 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.28 vs. limit=15.0 +2024-07-28 09:19:04,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=138312.0, ans=0.1 +2024-07-28 09:19:14,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=138325.33333333334, ans=0.025 +2024-07-28 09:19:25,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=138338.66666666666, ans=0.125 +2024-07-28 09:19:31,301 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.052e+01 5.776e+01 6.231e+01 7.086e+01 9.841e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 09:19:33,314 INFO [train.py:1114] (0/4) Epoch 11, batch 1550, loss[loss=0.2324, simple_loss=0.3244, pruned_loss=0.0702, over 4908.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2847, pruned_loss=0.05519, over 938480.58 frames. ], batch size: 15, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:19:38,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=138365.33333333334, ans=0.125 +2024-07-28 09:19:55,484 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.94 vs. limit=15.0 +2024-07-28 09:19:59,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=138418.66666666666, ans=0.125 +2024-07-28 09:20:09,042 INFO [train.py:1114] (0/4) Epoch 11, batch 1600, loss[loss=0.2055, simple_loss=0.3033, pruned_loss=0.0538, over 4868.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2842, pruned_loss=0.05489, over 937142.58 frames. ], batch size: 14, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:20:24,396 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.64 vs. limit=15.0 +2024-07-28 09:20:46,013 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.563e+01 5.538e+01 5.961e+01 6.813e+01 9.879e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 09:20:47,970 INFO [train.py:1114] (0/4) Epoch 11, batch 1650, loss[loss=0.1776, simple_loss=0.2713, pruned_loss=0.04197, over 4664.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2835, pruned_loss=0.05504, over 937205.83 frames. ], batch size: 14, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:20:59,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=138512.0, ans=0.0 +2024-07-28 09:21:27,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=138552.0, ans=0.125 +2024-07-28 09:21:29,810 INFO [train.py:1114] (0/4) Epoch 11, batch 1700, loss[loss=0.1679, simple_loss=0.2497, pruned_loss=0.04302, over 4698.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2833, pruned_loss=0.05476, over 938772.42 frames. ], batch size: 11, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:21:30,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=138565.33333333334, ans=0.125 +2024-07-28 09:21:56,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=138605.33333333334, ans=0.125 +2024-07-28 09:22:06,850 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.454e+01 5.772e+01 6.333e+01 7.541e+01 1.576e+02, threshold=1.267e+02, percent-clipped=2.0 +2024-07-28 09:22:07,857 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.54 vs. limit=22.5 +2024-07-28 09:22:08,865 INFO [train.py:1114] (0/4) Epoch 11, batch 1750, loss[loss=0.1818, simple_loss=0.2576, pruned_loss=0.05297, over 4794.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2828, pruned_loss=0.05421, over 939761.30 frames. ], batch size: 11, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:22:15,954 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.99 vs. limit=22.5 +2024-07-28 09:22:24,956 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.57 vs. limit=15.0 +2024-07-28 09:22:31,041 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-104000.pt +2024-07-28 09:22:34,235 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.25 vs. limit=15.0 +2024-07-28 09:22:42,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=138672.0, ans=0.0 +2024-07-28 09:22:47,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=138685.33333333334, ans=0.125 +2024-07-28 09:22:50,675 INFO [train.py:1114] (0/4) Epoch 11, batch 1800, loss[loss=0.2169, simple_loss=0.3033, pruned_loss=0.0653, over 4635.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2835, pruned_loss=0.05469, over 940378.40 frames. ], batch size: 13, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:22:50,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=138698.66666666666, ans=0.125 +2024-07-28 09:23:00,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=138712.0, ans=0.1 +2024-07-28 09:23:05,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=138725.33333333334, ans=0.125 +2024-07-28 09:23:09,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=138725.33333333334, ans=0.07 +2024-07-28 09:23:09,267 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.87 vs. limit=22.5 +2024-07-28 09:23:23,348 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.44 vs. limit=10.0 +2024-07-28 09:23:23,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=138752.0, ans=0.125 +2024-07-28 09:23:24,315 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.405e+01 5.944e+01 6.989e+01 8.458e+01 1.208e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-28 09:23:28,456 INFO [train.py:1114] (0/4) Epoch 11, batch 1850, loss[loss=0.1893, simple_loss=0.2811, pruned_loss=0.0488, over 4816.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2822, pruned_loss=0.05394, over 940134.05 frames. ], batch size: 14, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:23:35,983 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.20 vs. limit=15.0 +2024-07-28 09:23:49,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=138792.0, ans=0.125 +2024-07-28 09:23:53,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=138805.33333333334, ans=0.07 +2024-07-28 09:24:07,730 INFO [train.py:1114] (0/4) Epoch 11, batch 1900, loss[loss=0.1874, simple_loss=0.2827, pruned_loss=0.046, over 4664.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.284, pruned_loss=0.05448, over 941774.81 frames. ], batch size: 14, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:24:07,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=138832.0, ans=0.2 +2024-07-28 09:24:23,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=138858.66666666666, ans=15.0 +2024-07-28 09:24:24,918 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.66 vs. limit=10.0 +2024-07-28 09:24:26,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=138858.66666666666, ans=0.2 +2024-07-28 09:24:32,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=138872.0, ans=0.125 +2024-07-28 09:24:39,503 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.648e+01 6.210e+01 7.045e+01 1.018e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 09:24:41,146 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:24:41,645 INFO [train.py:1114] (0/4) Epoch 11, batch 1950, loss[loss=0.2087, simple_loss=0.2915, pruned_loss=0.06296, over 4900.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2848, pruned_loss=0.05469, over 943771.16 frames. ], batch size: 13, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:24:51,735 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.86 vs. limit=6.0 +2024-07-28 09:25:03,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=138938.66666666666, ans=0.0 +2024-07-28 09:25:15,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=138952.0, ans=0.125 +2024-07-28 09:25:16,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=138952.0, ans=0.125 +2024-07-28 09:25:19,167 INFO [train.py:1114] (0/4) Epoch 11, batch 2000, loss[loss=0.148, simple_loss=0.225, pruned_loss=0.03553, over 4802.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2854, pruned_loss=0.05489, over 941352.73 frames. ], batch size: 11, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:32:28,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=138978.66666666666, ans=0.2 +2024-07-28 09:32:34,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=138992.0, ans=0.0 +2024-07-28 09:32:36,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=138992.0, ans=0.0 +2024-07-28 09:32:38,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=138992.0, ans=0.125 +2024-07-28 09:32:43,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=139005.33333333334, ans=0.0 +2024-07-28 09:32:46,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=139018.66666666666, ans=0.0 +2024-07-28 09:32:47,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=139018.66666666666, ans=0.125 +2024-07-28 09:32:51,189 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.589e+01 5.827e+01 6.350e+01 7.381e+01 1.146e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 09:32:53,267 INFO [train.py:1114] (0/4) Epoch 11, batch 2050, loss[loss=0.169, simple_loss=0.2467, pruned_loss=0.04566, over 4611.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2838, pruned_loss=0.05463, over 939254.08 frames. ], batch size: 11, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:33:00,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139045.33333333334, ans=0.1 +2024-07-28 09:33:11,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=139058.66666666666, ans=0.0 +2024-07-28 09:33:20,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=139072.0, ans=0.125 +2024-07-28 09:33:20,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=139072.0, ans=0.2 +2024-07-28 09:33:25,874 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:33:28,461 INFO [train.py:1114] (0/4) Epoch 11, batch 2100, loss[loss=0.1985, simple_loss=0.2855, pruned_loss=0.05582, over 4746.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2831, pruned_loss=0.05428, over 941073.38 frames. ], batch size: 13, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:33:39,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=139112.0, ans=0.2 +2024-07-28 09:33:40,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=139112.0, ans=0.0 +2024-07-28 09:33:49,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=139138.66666666666, ans=0.0 +2024-07-28 09:34:05,605 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.787e+01 5.652e+01 6.255e+01 7.375e+01 9.920e+01, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 09:34:06,923 INFO [train.py:1114] (0/4) Epoch 11, batch 2150, loss[loss=0.1774, simple_loss=0.2794, pruned_loss=0.03767, over 4892.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2836, pruned_loss=0.05468, over 944163.73 frames. ], batch size: 13, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:34:55,030 INFO [train.py:1114] (0/4) Epoch 11, batch 2200, loss[loss=0.219, simple_loss=0.3129, pruned_loss=0.06257, over 4817.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2843, pruned_loss=0.05492, over 943370.20 frames. ], batch size: 14, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:35:15,599 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:35:25,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=139285.33333333334, ans=0.125 +2024-07-28 09:35:27,064 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.263e+01 5.559e+01 6.152e+01 7.200e+01 1.019e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 09:35:28,423 INFO [train.py:1114] (0/4) Epoch 11, batch 2250, loss[loss=0.189, simple_loss=0.2739, pruned_loss=0.05201, over 4688.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2841, pruned_loss=0.05479, over 942042.06 frames. ], batch size: 13, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:35:51,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=139338.66666666666, ans=0.125 +2024-07-28 09:36:14,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=139352.0, ans=0.0 +2024-07-28 09:36:14,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=139352.0, ans=0.125 +2024-07-28 09:36:15,179 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.39 vs. limit=6.0 +2024-07-28 09:36:27,472 INFO [train.py:1114] (0/4) Epoch 11, batch 2300, loss[loss=0.1923, simple_loss=0.2754, pruned_loss=0.05463, over 4944.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2827, pruned_loss=0.0546, over 939849.73 frames. ], batch size: 12, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:36:36,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=139378.66666666666, ans=0.2 +2024-07-28 09:36:44,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=139392.0, ans=0.125 +2024-07-28 09:36:44,732 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.75 vs. limit=15.0 +2024-07-28 09:36:54,333 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.41 vs. limit=22.5 +2024-07-28 09:36:59,892 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 5.544e+01 6.088e+01 7.000e+01 1.026e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 09:37:01,160 INFO [train.py:1114] (0/4) Epoch 11, batch 2350, loss[loss=0.1837, simple_loss=0.2796, pruned_loss=0.0439, over 4633.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2829, pruned_loss=0.0546, over 941840.18 frames. ], batch size: 13, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:37:01,517 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.95 vs. limit=15.0 +2024-07-28 09:37:02,916 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.79 vs. limit=15.0 +2024-07-28 09:37:08,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=139445.33333333334, ans=0.09899494936611666 +2024-07-28 09:37:15,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=139458.66666666666, ans=0.025 +2024-07-28 09:37:35,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=139485.33333333334, ans=0.04949747468305833 +2024-07-28 09:37:36,621 INFO [train.py:1114] (0/4) Epoch 11, batch 2400, loss[loss=0.1612, simple_loss=0.2424, pruned_loss=0.03999, over 4637.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2827, pruned_loss=0.05414, over 941506.40 frames. ], batch size: 12, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:37:39,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=139498.66666666666, ans=0.125 +2024-07-28 09:37:49,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=139512.0, ans=0.2 +2024-07-28 09:37:51,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139512.0, ans=0.1 +2024-07-28 09:37:53,376 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:37:56,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=139525.33333333334, ans=0.125 +2024-07-28 09:38:00,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=139525.33333333334, ans=0.125 +2024-07-28 09:38:03,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=139538.66666666666, ans=0.0 +2024-07-28 09:38:03,491 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.45 vs. limit=15.0 +2024-07-28 09:38:07,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=139538.66666666666, ans=0.0 +2024-07-28 09:38:18,422 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.707e+01 6.350e+01 6.927e+01 1.167e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 09:38:19,091 INFO [train.py:1114] (0/4) Epoch 11, batch 2450, loss[loss=0.19, simple_loss=0.2837, pruned_loss=0.04809, over 4692.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.284, pruned_loss=0.05477, over 937717.56 frames. ], batch size: 13, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:38:29,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=139578.66666666666, ans=0.2 +2024-07-28 09:38:30,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=139578.66666666666, ans=0.125 +2024-07-28 09:38:31,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139578.66666666666, ans=0.1 +2024-07-28 09:38:34,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=139592.0, ans=0.0 +2024-07-28 09:38:54,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=139618.66666666666, ans=10.0 +2024-07-28 09:38:57,023 INFO [train.py:1114] (0/4) Epoch 11, batch 2500, loss[loss=0.2194, simple_loss=0.3114, pruned_loss=0.06375, over 4803.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2842, pruned_loss=0.05518, over 939562.23 frames. ], batch size: 14, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:39:03,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=139645.33333333334, ans=0.0 +2024-07-28 09:39:16,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=139672.0, ans=0.125 +2024-07-28 09:39:30,783 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.91 vs. limit=15.0 +2024-07-28 09:39:32,200 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.264e+01 5.568e+01 6.165e+01 6.885e+01 1.396e+02, threshold=1.233e+02, percent-clipped=2.0 +2024-07-28 09:39:33,011 INFO [train.py:1114] (0/4) Epoch 11, batch 2550, loss[loss=0.1526, simple_loss=0.2342, pruned_loss=0.03549, over 4810.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2838, pruned_loss=0.05474, over 939190.13 frames. ], batch size: 11, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:39:38,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=139698.66666666666, ans=0.1 +2024-07-28 09:39:41,018 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.54 vs. limit=15.0 +2024-07-28 09:39:43,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=139712.0, ans=0.125 +2024-07-28 09:39:53,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139738.66666666666, ans=0.1 +2024-07-28 09:39:55,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=139738.66666666666, ans=0.125 +2024-07-28 09:40:07,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=139765.33333333334, ans=0.125 +2024-07-28 09:40:08,252 INFO [train.py:1114] (0/4) Epoch 11, batch 2600, loss[loss=0.1721, simple_loss=0.2494, pruned_loss=0.04745, over 4902.00 frames. ], tot_loss[loss=0.196, simple_loss=0.283, pruned_loss=0.05453, over 937752.90 frames. ], batch size: 13, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:40:25,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=139792.0, ans=0.125 +2024-07-28 09:40:32,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=139805.33333333334, ans=0.125 +2024-07-28 09:40:44,011 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.437e+01 5.632e+01 6.432e+01 7.757e+01 1.315e+02, threshold=1.286e+02, percent-clipped=1.0 +2024-07-28 09:40:44,715 INFO [train.py:1114] (0/4) Epoch 11, batch 2650, loss[loss=0.2178, simple_loss=0.3055, pruned_loss=0.06503, over 4614.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2837, pruned_loss=0.05474, over 939747.88 frames. ], batch size: 16, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:40:56,081 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.69 vs. limit=15.0 +2024-07-28 09:41:10,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=139858.66666666666, ans=0.0 +2024-07-28 09:41:28,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=139898.66666666666, ans=0.09899494936611666 +2024-07-28 09:41:28,862 INFO [train.py:1114] (0/4) Epoch 11, batch 2700, loss[loss=0.2216, simple_loss=0.309, pruned_loss=0.06713, over 4745.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2838, pruned_loss=0.05493, over 939447.52 frames. ], batch size: 14, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:41:29,169 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.43 vs. limit=10.0 +2024-07-28 09:41:48,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=139912.0, ans=0.015 +2024-07-28 09:41:50,371 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.68 vs. limit=15.0 +2024-07-28 09:41:56,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=139925.33333333334, ans=0.025 +2024-07-28 09:41:59,580 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-28 09:42:10,947 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.187e+01 5.691e+01 6.358e+01 7.173e+01 9.845e+01, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 09:42:11,616 INFO [train.py:1114] (0/4) Epoch 11, batch 2750, loss[loss=0.1953, simple_loss=0.2787, pruned_loss=0.05601, over 4706.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2832, pruned_loss=0.05484, over 939739.39 frames. ], batch size: 12, lr: 6.90e-03, grad_scale: 16.0 +2024-07-28 09:42:14,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=139965.33333333334, ans=0.125 +2024-07-28 09:42:17,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=139978.66666666666, ans=0.025 +2024-07-28 09:42:18,262 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:44:58,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=139978.66666666666, ans=0.1 +2024-07-28 09:44:58,849 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=22.81 vs. limit=22.5 +2024-07-28 09:45:15,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=139992.0, ans=0.0 +2024-07-28 09:45:25,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=140005.33333333334, ans=0.0 +2024-07-28 09:45:25,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=140018.66666666666, ans=0.0 +2024-07-28 09:45:33,291 INFO [train.py:1114] (0/4) Epoch 11, batch 2800, loss[loss=0.2625, simple_loss=0.3369, pruned_loss=0.094, over 3491.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2837, pruned_loss=0.05508, over 937293.27 frames. ], batch size: 36, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:45:33,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=140032.0, ans=0.05 +2024-07-28 09:45:33,700 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.47 vs. limit=22.5 +2024-07-28 09:45:36,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=140032.0, ans=0.125 +2024-07-28 09:45:47,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=140058.66666666666, ans=0.025 +2024-07-28 09:45:54,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=140072.0, ans=0.125 +2024-07-28 09:45:54,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=140072.0, ans=0.2 +2024-07-28 09:46:00,808 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.73 vs. limit=15.0 +2024-07-28 09:46:07,730 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.695e+01 5.521e+01 6.232e+01 7.025e+01 9.705e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 09:46:08,483 INFO [train.py:1114] (0/4) Epoch 11, batch 2850, loss[loss=0.229, simple_loss=0.2975, pruned_loss=0.08026, over 4968.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2836, pruned_loss=0.05485, over 936008.19 frames. ], batch size: 13, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:46:10,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=140098.66666666666, ans=0.125 +2024-07-28 09:46:15,359 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:46:24,496 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:46:27,924 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.32 vs. limit=22.5 +2024-07-28 09:46:32,912 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.15 vs. limit=12.0 +2024-07-28 09:46:39,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=140152.0, ans=0.125 +2024-07-28 09:46:40,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=140152.0, ans=0.125 +2024-07-28 09:46:42,411 INFO [train.py:1114] (0/4) Epoch 11, batch 2900, loss[loss=0.2003, simple_loss=0.293, pruned_loss=0.05376, over 4825.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2847, pruned_loss=0.05472, over 939659.21 frames. ], batch size: 13, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:47:02,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140192.0, ans=0.1 +2024-07-28 09:47:09,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=140205.33333333334, ans=0.125 +2024-07-28 09:47:21,188 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.597e+01 5.615e+01 6.138e+01 7.226e+01 1.097e+02, threshold=1.228e+02, percent-clipped=0.0 +2024-07-28 09:47:22,505 INFO [train.py:1114] (0/4) Epoch 11, batch 2950, loss[loss=0.1501, simple_loss=0.2384, pruned_loss=0.03094, over 4708.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2823, pruned_loss=0.05382, over 938688.66 frames. ], batch size: 12, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:47:25,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=140232.0, ans=0.0 +2024-07-28 09:47:30,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=140245.33333333334, ans=0.0 +2024-07-28 09:47:51,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=140272.0, ans=0.125 +2024-07-28 09:47:58,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=140285.33333333334, ans=0.0 +2024-07-28 09:48:00,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=140285.33333333334, ans=0.0 +2024-07-28 09:48:00,498 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.85 vs. limit=5.0 +2024-07-28 09:48:05,771 INFO [train.py:1114] (0/4) Epoch 11, batch 3000, loss[loss=0.1986, simple_loss=0.2817, pruned_loss=0.0578, over 4762.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2823, pruned_loss=0.05399, over 938430.25 frames. ], batch size: 13, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:48:05,772 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 09:48:12,593 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.9065, 2.7951, 3.5653, 2.5547], device='cuda:0') +2024-07-28 09:48:12,803 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.1382, 4.0957, 4.3960, 4.2125], device='cuda:0') +2024-07-28 09:48:13,060 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.3322, 1.9071, 3.3137, 2.9599, 3.2961, 3.3220, 2.8916, 1.8804], + device='cuda:0') +2024-07-28 09:48:19,193 INFO [train.py:1146] (0/4) Epoch 11, validation: loss=0.1714, simple_loss=0.2749, pruned_loss=0.03396, over 944034.00 frames. +2024-07-28 09:48:19,193 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 09:48:19,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=140298.66666666666, ans=0.2 +2024-07-28 09:48:25,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=140298.66666666666, ans=0.125 +2024-07-28 09:48:28,657 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:48:36,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=140325.33333333334, ans=0.125 +2024-07-28 09:48:38,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140325.33333333334, ans=0.1 +2024-07-28 09:48:53,332 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.500e+01 5.535e+01 6.032e+01 6.917e+01 1.051e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 09:48:54,497 INFO [train.py:1114] (0/4) Epoch 11, batch 3050, loss[loss=0.1735, simple_loss=0.2504, pruned_loss=0.04833, over 4638.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2828, pruned_loss=0.05463, over 937139.33 frames. ], batch size: 12, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:48:56,424 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.66 vs. limit=12.0 +2024-07-28 09:48:58,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=140365.33333333334, ans=0.04949747468305833 +2024-07-28 09:48:58,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=140365.33333333334, ans=0.125 +2024-07-28 09:48:59,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=140365.33333333334, ans=0.1 +2024-07-28 09:49:07,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=140392.0, ans=0.0 +2024-07-28 09:49:09,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=140392.0, ans=0.0 +2024-07-28 09:49:17,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=140405.33333333334, ans=15.0 +2024-07-28 09:49:32,123 INFO [train.py:1114] (0/4) Epoch 11, batch 3100, loss[loss=0.2355, simple_loss=0.3234, pruned_loss=0.07376, over 4618.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2822, pruned_loss=0.05446, over 937898.78 frames. ], batch size: 16, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:49:34,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=140432.0, ans=0.125 +2024-07-28 09:49:40,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=140445.33333333334, ans=0.0 +2024-07-28 09:49:42,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=140445.33333333334, ans=0.0 +2024-07-28 09:49:43,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=140445.33333333334, ans=0.07 +2024-07-28 09:49:45,609 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.28 vs. limit=6.0 +2024-07-28 09:49:51,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=140458.66666666666, ans=0.0 +2024-07-28 09:49:55,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140472.0, ans=0.1 +2024-07-28 09:49:56,031 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.13 vs. limit=15.0 +2024-07-28 09:50:01,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=140485.33333333334, ans=0.1 +2024-07-28 09:50:07,168 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.605e+01 5.405e+01 6.178e+01 7.390e+01 1.037e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 09:50:07,895 INFO [train.py:1114] (0/4) Epoch 11, batch 3150, loss[loss=0.1924, simple_loss=0.2851, pruned_loss=0.04983, over 4643.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.282, pruned_loss=0.05418, over 938345.33 frames. ], batch size: 17, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:50:07,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=140498.66666666666, ans=0.0 +2024-07-28 09:50:10,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.67 vs. limit=15.0 +2024-07-28 09:50:12,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=140498.66666666666, ans=0.0 +2024-07-28 09:50:18,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=140512.0, ans=0.0 +2024-07-28 09:50:18,430 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.42 vs. limit=15.0 +2024-07-28 09:50:18,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=140512.0, ans=0.125 +2024-07-28 09:50:32,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=140538.66666666666, ans=0.125 +2024-07-28 09:50:43,698 INFO [train.py:1114] (0/4) Epoch 11, batch 3200, loss[loss=0.1915, simple_loss=0.2764, pruned_loss=0.05327, over 4814.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2822, pruned_loss=0.05438, over 939943.21 frames. ], batch size: 13, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:50:46,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=140565.33333333334, ans=0.1 +2024-07-28 09:50:46,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=140565.33333333334, ans=0.05 +2024-07-28 09:50:57,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140578.66666666666, ans=0.1 +2024-07-28 09:51:15,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=140592.0, ans=0.0 +2024-07-28 09:51:31,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=140605.33333333334, ans=0.125 +2024-07-28 09:51:36,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=140618.66666666666, ans=0.0 +2024-07-28 09:51:49,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140618.66666666666, ans=0.1 +2024-07-28 09:52:00,438 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.837e+01 5.714e+01 6.190e+01 6.678e+01 8.069e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 09:52:01,132 INFO [train.py:1114] (0/4) Epoch 11, batch 3250, loss[loss=0.2054, simple_loss=0.297, pruned_loss=0.05687, over 4932.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2833, pruned_loss=0.05456, over 940979.79 frames. ], batch size: 14, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:52:20,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=140632.0, ans=0.025 +2024-07-28 09:52:47,155 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:52:55,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=140658.66666666666, ans=0.0 +2024-07-28 09:52:59,711 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.84 vs. limit=15.0 +2024-07-28 09:53:22,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=140672.0, ans=0.125 +2024-07-28 09:53:28,313 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.84 vs. limit=15.0 +2024-07-28 09:53:38,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=140685.33333333334, ans=0.0 +2024-07-28 09:53:46,908 INFO [train.py:1114] (0/4) Epoch 11, batch 3300, loss[loss=0.2226, simple_loss=0.3059, pruned_loss=0.0696, over 4725.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2818, pruned_loss=0.05408, over 941082.31 frames. ], batch size: 19, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:53:57,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=140698.66666666666, ans=0.5 +2024-07-28 09:54:22,103 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:54:37,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=140738.66666666666, ans=0.125 +2024-07-28 09:54:52,386 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+01 5.701e+01 6.395e+01 7.330e+01 1.076e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 09:54:53,124 INFO [train.py:1114] (0/4) Epoch 11, batch 3350, loss[loss=0.233, simple_loss=0.3183, pruned_loss=0.0739, over 4665.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2835, pruned_loss=0.05503, over 939139.46 frames. ], batch size: 17, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:54:56,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140765.33333333334, ans=0.1 +2024-07-28 09:54:58,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=140765.33333333334, ans=0.025 +2024-07-28 09:55:07,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=140792.0, ans=0.0 +2024-07-28 09:55:12,668 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.92 vs. limit=15.0 +2024-07-28 09:55:18,937 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:55:26,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=140818.66666666666, ans=0.125 +2024-07-28 09:55:29,089 INFO [train.py:1114] (0/4) Epoch 11, batch 3400, loss[loss=0.1682, simple_loss=0.2491, pruned_loss=0.04361, over 4801.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2845, pruned_loss=0.05598, over 938011.44 frames. ], batch size: 11, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:55:37,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=140845.33333333334, ans=0.125 +2024-07-28 09:55:52,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=140872.0, ans=0.0 +2024-07-28 09:56:04,029 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.505e+01 5.604e+01 6.128e+01 6.821e+01 1.006e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 09:56:04,704 INFO [train.py:1114] (0/4) Epoch 11, batch 3450, loss[loss=0.1759, simple_loss=0.2716, pruned_loss=0.04007, over 4711.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.284, pruned_loss=0.0556, over 938157.44 frames. ], batch size: 19, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:56:10,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140912.0, ans=0.1 +2024-07-28 09:56:12,326 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-07-28 09:56:18,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=140925.33333333334, ans=0.2 +2024-07-28 09:56:30,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=140952.0, ans=0.025 +2024-07-28 09:56:31,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=140952.0, ans=0.0 +2024-07-28 09:56:31,855 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.98 vs. limit=15.0 +2024-07-28 09:56:38,868 INFO [train.py:1114] (0/4) Epoch 11, batch 3500, loss[loss=0.1789, simple_loss=0.2493, pruned_loss=0.05422, over 4934.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2823, pruned_loss=0.0549, over 938776.54 frames. ], batch size: 12, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:56:42,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=140965.33333333334, ans=0.125 +2024-07-28 09:56:52,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=140978.66666666666, ans=0.0 +2024-07-28 09:57:05,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=141005.33333333334, ans=0.125 +2024-07-28 09:57:16,580 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.512e+01 5.451e+01 6.238e+01 7.293e+01 9.971e+01, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 09:57:17,308 INFO [train.py:1114] (0/4) Epoch 11, batch 3550, loss[loss=0.1807, simple_loss=0.2689, pruned_loss=0.04628, over 4653.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2834, pruned_loss=0.05528, over 939038.85 frames. ], batch size: 14, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:57:20,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=141032.0, ans=0.2 +2024-07-28 09:57:24,979 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.34 vs. limit=15.0 +2024-07-28 09:57:33,585 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.88 vs. limit=15.0 +2024-07-28 09:57:34,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=141058.66666666666, ans=0.125 +2024-07-28 09:57:57,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=141085.33333333334, ans=0.0 +2024-07-28 09:57:59,198 INFO [train.py:1114] (0/4) Epoch 11, batch 3600, loss[loss=0.1833, simple_loss=0.2686, pruned_loss=0.04903, over 4971.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.283, pruned_loss=0.05476, over 940968.84 frames. ], batch size: 13, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:58:11,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=141112.0, ans=0.125 +2024-07-28 09:58:24,090 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.68 vs. limit=22.5 +2024-07-28 09:58:34,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141152.0, ans=0.1 +2024-07-28 09:58:35,673 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.546e+01 5.527e+01 6.114e+01 7.370e+01 1.148e+02, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 09:58:37,571 INFO [train.py:1114] (0/4) Epoch 11, batch 3650, loss[loss=0.1859, simple_loss=0.268, pruned_loss=0.05187, over 4910.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2811, pruned_loss=0.05388, over 941225.73 frames. ], batch size: 15, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:58:49,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=141178.66666666666, ans=0.0 +2024-07-28 09:58:56,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=141192.0, ans=0.1 +2024-07-28 09:59:04,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=141218.66666666666, ans=0.125 +2024-07-28 09:59:10,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=141232.0, ans=0.09899494936611666 +2024-07-28 09:59:11,486 INFO [train.py:1114] (0/4) Epoch 11, batch 3700, loss[loss=0.1905, simple_loss=0.2767, pruned_loss=0.0522, over 4932.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2802, pruned_loss=0.0529, over 942142.83 frames. ], batch size: 14, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 09:59:18,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=141245.33333333334, ans=0.125 +2024-07-28 09:59:22,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=141245.33333333334, ans=0.2 +2024-07-28 09:59:48,187 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.59 vs. limit=15.0 +2024-07-28 09:59:50,247 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.396e+01 5.987e+01 6.537e+01 9.206e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 09:59:50,930 INFO [train.py:1114] (0/4) Epoch 11, batch 3750, loss[loss=0.1643, simple_loss=0.2527, pruned_loss=0.03791, over 4814.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2796, pruned_loss=0.05203, over 943679.50 frames. ], batch size: 11, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 09:59:55,865 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.82 vs. limit=15.0 +2024-07-28 09:59:59,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=141298.66666666666, ans=0.0 +2024-07-28 10:00:33,295 INFO [train.py:1114] (0/4) Epoch 11, batch 3800, loss[loss=0.1853, simple_loss=0.2841, pruned_loss=0.04323, over 4808.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2797, pruned_loss=0.05221, over 941938.33 frames. ], batch size: 14, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 10:00:36,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=141365.33333333334, ans=0.0 +2024-07-28 10:00:42,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=141378.66666666666, ans=0.0 +2024-07-28 10:00:55,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141405.33333333334, ans=0.1 +2024-07-28 10:00:57,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=141405.33333333334, ans=0.125 +2024-07-28 10:01:04,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=141418.66666666666, ans=0.2 +2024-07-28 10:01:04,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=141418.66666666666, ans=0.0 +2024-07-28 10:01:07,864 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.632e+01 5.570e+01 6.150e+01 7.131e+01 1.072e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 10:01:08,501 INFO [train.py:1114] (0/4) Epoch 11, batch 3850, loss[loss=0.1857, simple_loss=0.2744, pruned_loss=0.04852, over 4631.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2808, pruned_loss=0.05278, over 942454.24 frames. ], batch size: 16, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 10:01:13,729 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.45 vs. limit=6.0 +2024-07-28 10:01:14,205 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.27 vs. limit=22.5 +2024-07-28 10:01:21,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=141458.66666666666, ans=0.0 +2024-07-28 10:01:23,650 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.44 vs. limit=15.0 +2024-07-28 10:01:32,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=141472.0, ans=0.125 +2024-07-28 10:01:34,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141472.0, ans=0.1 +2024-07-28 10:01:36,315 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=15.0 +2024-07-28 10:01:42,059 INFO [train.py:1114] (0/4) Epoch 11, batch 3900, loss[loss=0.2053, simple_loss=0.2875, pruned_loss=0.06158, over 4809.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2819, pruned_loss=0.05323, over 942784.84 frames. ], batch size: 14, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 10:01:44,840 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:01:53,899 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.57 vs. limit=15.0 +2024-07-28 10:02:01,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=141525.33333333334, ans=0.5 +2024-07-28 10:02:16,441 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.644e+01 6.231e+01 6.992e+01 1.031e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 10:02:17,225 INFO [train.py:1114] (0/4) Epoch 11, batch 3950, loss[loss=0.2294, simple_loss=0.3189, pruned_loss=0.0699, over 4842.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2834, pruned_loss=0.05392, over 944566.74 frames. ], batch size: 16, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 10:02:25,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=141578.66666666666, ans=0.0 +2024-07-28 10:02:25,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=141578.66666666666, ans=0.125 +2024-07-28 10:02:27,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=141578.66666666666, ans=0.125 +2024-07-28 10:02:29,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141578.66666666666, ans=0.1 +2024-07-28 10:02:31,757 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.55 vs. limit=8.0 +2024-07-28 10:02:33,672 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.56 vs. limit=15.0 +2024-07-28 10:02:38,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=141605.33333333334, ans=0.125 +2024-07-28 10:02:43,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=141618.66666666666, ans=0.2 +2024-07-28 10:02:47,928 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.59 vs. limit=15.0 +2024-07-28 10:02:51,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=141618.66666666666, ans=0.125 +2024-07-28 10:02:51,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=141618.66666666666, ans=0.125 +2024-07-28 10:02:54,338 INFO [train.py:1114] (0/4) Epoch 11, batch 4000, loss[loss=0.1705, simple_loss=0.2606, pruned_loss=0.04022, over 4776.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2837, pruned_loss=0.05397, over 941168.15 frames. ], batch size: 12, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:03:15,866 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.90 vs. limit=15.0 +2024-07-28 10:03:18,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=141658.66666666666, ans=0.1 +2024-07-28 10:03:38,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=141672.0, ans=0.125 +2024-07-28 10:03:43,728 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.18 vs. limit=15.0 +2024-07-28 10:03:46,922 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.01 vs. limit=15.0 +2024-07-28 10:03:49,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=141685.33333333334, ans=0.0 +2024-07-28 10:03:53,267 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.441e+01 6.028e+01 6.961e+01 9.604e+01, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 10:03:53,944 INFO [train.py:1114] (0/4) Epoch 11, batch 4050, loss[loss=0.2687, simple_loss=0.3477, pruned_loss=0.09489, over 3307.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2832, pruned_loss=0.05422, over 939612.23 frames. ], batch size: 35, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:04:03,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=141698.66666666666, ans=0.125 +2024-07-28 10:04:04,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=141698.66666666666, ans=0.125 +2024-07-28 10:04:08,574 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.15 vs. limit=10.0 +2024-07-28 10:04:20,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=141725.33333333334, ans=0.09899494936611666 +2024-07-28 10:04:24,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=141738.66666666666, ans=0.125 +2024-07-28 10:04:25,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141738.66666666666, ans=0.1 +2024-07-28 10:04:34,746 INFO [train.py:1114] (0/4) Epoch 11, batch 4100, loss[loss=0.219, simple_loss=0.3174, pruned_loss=0.06032, over 4907.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2829, pruned_loss=0.05456, over 938760.66 frames. ], batch size: 15, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:04:35,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=141765.33333333334, ans=0.1 +2024-07-28 10:04:45,523 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.69 vs. limit=10.0 +2024-07-28 10:04:59,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=141792.0, ans=0.125 +2024-07-28 10:05:04,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=141792.0, ans=0.07 +2024-07-28 10:05:15,743 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.33 vs. limit=15.0 +2024-07-28 10:05:36,368 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.721e+01 5.886e+01 6.549e+01 7.693e+01 1.193e+02, threshold=1.310e+02, percent-clipped=0.0 +2024-07-28 10:05:37,307 INFO [train.py:1114] (0/4) Epoch 11, batch 4150, loss[loss=0.1679, simple_loss=0.2696, pruned_loss=0.03315, over 4828.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2826, pruned_loss=0.05431, over 938225.55 frames. ], batch size: 13, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:06:02,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=141832.0, ans=0.025 +2024-07-28 10:06:02,445 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:06:05,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=141845.33333333334, ans=0.125 +2024-07-28 10:06:42,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=141858.66666666666, ans=0.2 +2024-07-28 10:06:59,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=141872.0, ans=0.125 +2024-07-28 10:07:01,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_na.min_abs, batch_count=141872.0, ans=0.02 +2024-07-28 10:07:08,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=141885.33333333334, ans=10.0 +2024-07-28 10:07:08,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=141885.33333333334, ans=0.0 +2024-07-28 10:07:11,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=141885.33333333334, ans=0.5 +2024-07-28 10:07:12,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=141885.33333333334, ans=0.125 +2024-07-28 10:07:15,449 INFO [train.py:1114] (0/4) Epoch 11, batch 4200, loss[loss=0.2073, simple_loss=0.3087, pruned_loss=0.05299, over 4901.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2829, pruned_loss=0.05434, over 939832.91 frames. ], batch size: 15, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:07:20,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=141898.66666666666, ans=0.125 +2024-07-28 10:07:40,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=141912.0, ans=0.2 +2024-07-28 10:07:45,209 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.40 vs. limit=15.0 +2024-07-28 10:08:41,003 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.620e+01 5.645e+01 6.237e+01 6.874e+01 1.098e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 10:08:41,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=141965.33333333334, ans=0.0 +2024-07-28 10:08:41,708 INFO [train.py:1114] (0/4) Epoch 11, batch 4250, loss[loss=0.1686, simple_loss=0.2559, pruned_loss=0.04069, over 4644.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.284, pruned_loss=0.05511, over 940752.92 frames. ], batch size: 12, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:08:45,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=141965.33333333334, ans=0.0 +2024-07-28 10:08:53,401 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:09:03,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=141992.0, ans=0.0 +2024-07-28 10:09:07,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=141992.0, ans=0.125 +2024-07-28 10:09:11,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=142005.33333333334, ans=0.0 +2024-07-28 10:09:21,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=142005.33333333334, ans=0.0 +2024-07-28 10:09:24,652 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.40 vs. limit=6.0 +2024-07-28 10:09:27,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=142018.66666666666, ans=0.2 +2024-07-28 10:09:31,297 INFO [train.py:1114] (0/4) Epoch 11, batch 4300, loss[loss=0.2211, simple_loss=0.3212, pruned_loss=0.06052, over 4758.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2848, pruned_loss=0.05539, over 940448.58 frames. ], batch size: 13, lr: 6.85e-03, grad_scale: 32.0 +2024-07-28 10:09:45,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=142045.33333333334, ans=0.125 +2024-07-28 10:09:53,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=142045.33333333334, ans=0.125 +2024-07-28 10:10:06,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=142058.66666666666, ans=0.025 +2024-07-28 10:10:22,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=142085.33333333334, ans=0.0 +2024-07-28 10:10:27,598 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.309e+01 5.443e+01 5.948e+01 6.522e+01 9.090e+01, threshold=1.190e+02, percent-clipped=0.0 +2024-07-28 10:10:28,343 INFO [train.py:1114] (0/4) Epoch 11, batch 4350, loss[loss=0.2064, simple_loss=0.3013, pruned_loss=0.05576, over 4752.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2843, pruned_loss=0.05504, over 941195.74 frames. ], batch size: 13, lr: 6.85e-03, grad_scale: 32.0 +2024-07-28 10:10:29,141 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:10:30,008 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=19.04 vs. limit=15.0 +2024-07-28 10:10:30,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=142098.66666666666, ans=0.0 +2024-07-28 10:10:31,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142098.66666666666, ans=0.1 +2024-07-28 10:10:37,451 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.35 vs. limit=15.0 +2024-07-28 10:10:40,099 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.71 vs. limit=15.0 +2024-07-28 10:10:41,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=142112.0, ans=0.0 +2024-07-28 10:10:45,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=142125.33333333334, ans=10.0 +2024-07-28 10:11:01,415 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.86 vs. limit=10.0 +2024-07-28 10:11:05,768 INFO [train.py:1114] (0/4) Epoch 11, batch 4400, loss[loss=0.1879, simple_loss=0.2835, pruned_loss=0.04612, over 4818.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.284, pruned_loss=0.05474, over 940912.94 frames. ], batch size: 14, lr: 6.85e-03, grad_scale: 64.0 +2024-07-28 10:11:29,162 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.79 vs. limit=15.0 +2024-07-28 10:11:38,551 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+01 5.755e+01 6.372e+01 7.291e+01 1.018e+02, threshold=1.274e+02, percent-clipped=0.0 +2024-07-28 10:11:39,292 INFO [train.py:1114] (0/4) Epoch 11, batch 4450, loss[loss=0.1849, simple_loss=0.2676, pruned_loss=0.05108, over 4940.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2841, pruned_loss=0.05489, over 939716.80 frames. ], batch size: 12, lr: 6.85e-03, grad_scale: 64.0 +2024-07-28 10:12:03,783 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.93 vs. limit=15.0 +2024-07-28 10:12:17,606 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:12:20,688 INFO [train.py:1114] (0/4) Epoch 11, batch 4500, loss[loss=0.2015, simple_loss=0.2967, pruned_loss=0.05315, over 4741.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2848, pruned_loss=0.05502, over 938686.79 frames. ], batch size: 14, lr: 6.85e-03, grad_scale: 64.0 +2024-07-28 10:12:34,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=142312.0, ans=0.04949747468305833 +2024-07-28 10:12:34,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142312.0, ans=0.1 +2024-07-28 10:12:36,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=142325.33333333334, ans=0.0 +2024-07-28 10:12:42,653 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.48 vs. limit=6.0 +2024-07-28 10:12:43,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142325.33333333334, ans=0.1 +2024-07-28 10:12:47,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=142338.66666666666, ans=0.125 +2024-07-28 10:12:47,888 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.46 vs. limit=15.0 +2024-07-28 10:12:48,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=142338.66666666666, ans=0.0 +2024-07-28 10:12:51,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=142352.0, ans=0.2 +2024-07-28 10:12:57,416 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.457e+01 5.934e+01 6.532e+01 9.481e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-28 10:12:57,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=142365.33333333334, ans=0.125 +2024-07-28 10:12:58,140 INFO [train.py:1114] (0/4) Epoch 11, batch 4550, loss[loss=0.1675, simple_loss=0.245, pruned_loss=0.04499, over 4893.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.284, pruned_loss=0.05475, over 940482.14 frames. ], batch size: 13, lr: 6.85e-03, grad_scale: 64.0 +2024-07-28 10:13:00,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=142365.33333333334, ans=0.125 +2024-07-28 10:13:01,257 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.06 vs. limit=15.0 +2024-07-28 10:13:07,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=142378.66666666666, ans=0.025 +2024-07-28 10:13:09,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=142378.66666666666, ans=0.0 +2024-07-28 10:13:33,067 INFO [train.py:1114] (0/4) Epoch 11, batch 4600, loss[loss=0.2113, simple_loss=0.3032, pruned_loss=0.05971, over 4505.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2827, pruned_loss=0.05431, over 938349.53 frames. ], batch size: 21, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:13:44,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=142445.33333333334, ans=10.0 +2024-07-28 10:13:54,057 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.37 vs. limit=15.0 +2024-07-28 10:14:16,902 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.448e+01 5.751e+01 6.441e+01 7.092e+01 1.186e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 10:14:17,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=142498.66666666666, ans=0.0 +2024-07-28 10:14:22,141 INFO [train.py:1114] (0/4) Epoch 11, batch 4650, loss[loss=0.2301, simple_loss=0.3102, pruned_loss=0.07503, over 4840.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2835, pruned_loss=0.05409, over 940174.90 frames. ], batch size: 16, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:14:32,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=142512.0, ans=0.0 +2024-07-28 10:14:36,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=142512.0, ans=0.0 +2024-07-28 10:14:40,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=142525.33333333334, ans=0.125 +2024-07-28 10:14:48,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142538.66666666666, ans=0.1 +2024-07-28 10:14:49,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=142538.66666666666, ans=0.125 +2024-07-28 10:15:02,951 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.62 vs. limit=22.5 +2024-07-28 10:15:09,646 INFO [train.py:1114] (0/4) Epoch 11, batch 4700, loss[loss=0.185, simple_loss=0.2763, pruned_loss=0.0468, over 4700.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2838, pruned_loss=0.05456, over 937695.94 frames. ], batch size: 11, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:15:20,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=142578.66666666666, ans=0.125 +2024-07-28 10:15:21,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=142578.66666666666, ans=0.125 +2024-07-28 10:15:32,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=142592.0, ans=0.0 +2024-07-28 10:15:33,609 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.06 vs. limit=12.0 +2024-07-28 10:15:34,442 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.18 vs. limit=15.0 +2024-07-28 10:15:38,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=142605.33333333334, ans=0.125 +2024-07-28 10:15:38,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=142605.33333333334, ans=0.125 +2024-07-28 10:15:49,431 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.868e+01 6.350e+01 7.061e+01 1.022e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 10:15:50,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=142632.0, ans=6.0 +2024-07-28 10:15:50,109 INFO [train.py:1114] (0/4) Epoch 11, batch 4750, loss[loss=0.2183, simple_loss=0.3029, pruned_loss=0.06683, over 4563.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2847, pruned_loss=0.05488, over 935774.43 frames. ], batch size: 21, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:16:05,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=142645.33333333334, ans=0.95 +2024-07-28 10:16:10,617 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.33 vs. limit=15.0 +2024-07-28 10:16:26,416 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=2.91 vs. limit=12.0 +2024-07-28 10:16:26,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=142685.33333333334, ans=0.0 +2024-07-28 10:16:42,918 INFO [train.py:1114] (0/4) Epoch 11, batch 4800, loss[loss=0.192, simple_loss=0.2943, pruned_loss=0.04481, over 4691.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2838, pruned_loss=0.05446, over 932730.33 frames. ], batch size: 13, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:16:43,262 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.56 vs. limit=6.0 +2024-07-28 10:16:45,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142698.66666666666, ans=0.1 +2024-07-28 10:16:59,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=142712.0, ans=0.0 +2024-07-28 10:17:00,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142712.0, ans=0.1 +2024-07-28 10:17:01,113 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.65 vs. limit=6.0 +2024-07-28 10:17:01,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=142712.0, ans=0.125 +2024-07-28 10:17:02,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=142712.0, ans=0.125 +2024-07-28 10:17:11,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=142725.33333333334, ans=0.0 +2024-07-28 10:17:12,727 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.06 vs. limit=15.0 +2024-07-28 10:17:20,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=142752.0, ans=0.125 +2024-07-28 10:17:25,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=142752.0, ans=0.0 +2024-07-28 10:17:27,021 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.631e+01 5.658e+01 6.076e+01 6.872e+01 9.188e+01, threshold=1.215e+02, percent-clipped=0.0 +2024-07-28 10:17:35,886 INFO [train.py:1114] (0/4) Epoch 11, batch 4850, loss[loss=0.2082, simple_loss=0.2981, pruned_loss=0.05919, over 4741.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2844, pruned_loss=0.05461, over 932684.67 frames. ], batch size: 14, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:18:01,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=142805.33333333334, ans=0.125 +2024-07-28 10:18:01,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=142805.33333333334, ans=0.0 +2024-07-28 10:18:02,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=142805.33333333334, ans=0.125 +2024-07-28 10:18:05,169 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.95 vs. limit=15.0 +2024-07-28 10:18:15,432 INFO [train.py:1114] (0/4) Epoch 11, batch 4900, loss[loss=0.1703, simple_loss=0.2714, pruned_loss=0.03462, over 4761.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.284, pruned_loss=0.05445, over 934549.90 frames. ], batch size: 13, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:18:40,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=142872.0, ans=0.125 +2024-07-28 10:18:43,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142872.0, ans=0.1 +2024-07-28 10:18:46,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=142872.0, ans=0.125 +2024-07-28 10:18:46,927 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.24 vs. limit=10.0 +2024-07-28 10:18:54,120 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.706e+01 5.557e+01 6.177e+01 6.945e+01 1.051e+02, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 10:18:54,883 INFO [train.py:1114] (0/4) Epoch 11, batch 4950, loss[loss=0.2911, simple_loss=0.3571, pruned_loss=0.1125, over 3382.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2842, pruned_loss=0.05469, over 931873.02 frames. ], batch size: 35, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:19:02,610 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.45 vs. limit=15.0 +2024-07-28 10:19:03,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142912.0, ans=0.1 +2024-07-28 10:19:09,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=142925.33333333334, ans=0.125 +2024-07-28 10:19:20,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=142938.66666666666, ans=0.0 +2024-07-28 10:19:22,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=142938.66666666666, ans=0.125 +2024-07-28 10:19:32,507 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.76 vs. limit=22.5 +2024-07-28 10:19:32,823 INFO [train.py:1114] (0/4) Epoch 11, batch 5000, loss[loss=0.1956, simple_loss=0.2985, pruned_loss=0.04637, over 4665.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2841, pruned_loss=0.05442, over 935719.96 frames. ], batch size: 14, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:19:32,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=142965.33333333334, ans=0.125 +2024-07-28 10:19:42,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=142965.33333333334, ans=0.0 +2024-07-28 10:19:48,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142978.66666666666, ans=0.1 +2024-07-28 10:19:50,865 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.44 vs. limit=22.5 +2024-07-28 10:20:17,712 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.576e+01 5.560e+01 5.974e+01 6.425e+01 8.960e+01, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 10:20:18,280 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.46 vs. limit=15.0 +2024-07-28 10:20:18,551 INFO [train.py:1114] (0/4) Epoch 11, batch 5050, loss[loss=0.1628, simple_loss=0.2415, pruned_loss=0.04205, over 4842.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2827, pruned_loss=0.05417, over 938060.62 frames. ], batch size: 12, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:20:20,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=143032.0, ans=0.125 +2024-07-28 10:20:20,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=143032.0, ans=10.0 +2024-07-28 10:20:37,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=143058.66666666666, ans=0.1 +2024-07-28 10:20:44,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=143072.0, ans=0.125 +2024-07-28 10:20:45,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.81 vs. limit=15.0 +2024-07-28 10:20:53,979 INFO [train.py:1114] (0/4) Epoch 11, batch 5100, loss[loss=0.1912, simple_loss=0.2724, pruned_loss=0.05501, over 4772.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2835, pruned_loss=0.0545, over 935372.05 frames. ], batch size: 12, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:21:01,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=143112.0, ans=0.05 +2024-07-28 10:21:08,203 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.32 vs. limit=15.0 +2024-07-28 10:21:10,127 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.53 vs. limit=6.0 +2024-07-28 10:21:11,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143125.33333333334, ans=0.1 +2024-07-28 10:21:11,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=143125.33333333334, ans=0.125 +2024-07-28 10:21:15,977 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:21:40,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=143152.0, ans=0.0 +2024-07-28 10:21:46,390 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.445e+01 5.691e+01 6.335e+01 6.758e+01 9.887e+01, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 10:21:46,917 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.71 vs. limit=15.0 +2024-07-28 10:21:47,044 INFO [train.py:1114] (0/4) Epoch 11, batch 5150, loss[loss=0.1806, simple_loss=0.269, pruned_loss=0.04606, over 4849.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2839, pruned_loss=0.05493, over 936483.56 frames. ], batch size: 16, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:21:58,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143178.66666666666, ans=0.1 +2024-07-28 10:22:09,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143205.33333333334, ans=0.1 +2024-07-28 10:22:13,848 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.63 vs. limit=12.0 +2024-07-28 10:22:18,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=143218.66666666666, ans=0.1 +2024-07-28 10:22:18,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=143218.66666666666, ans=0.025 +2024-07-28 10:22:18,732 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:22:19,829 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.00 vs. limit=15.0 +2024-07-28 10:22:22,785 INFO [train.py:1114] (0/4) Epoch 11, batch 5200, loss[loss=0.2075, simple_loss=0.3058, pruned_loss=0.05465, over 4674.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2836, pruned_loss=0.05446, over 936561.99 frames. ], batch size: 14, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:22:44,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=143272.0, ans=0.125 +2024-07-28 10:22:56,377 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.779e+01 6.416e+01 7.170e+01 1.127e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 10:22:57,078 INFO [train.py:1114] (0/4) Epoch 11, batch 5250, loss[loss=0.1921, simple_loss=0.291, pruned_loss=0.04664, over 4891.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2834, pruned_loss=0.05433, over 936064.23 frames. ], batch size: 13, lr: 6.82e-03, grad_scale: 64.0 +2024-07-28 10:23:00,028 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.31 vs. limit=10.0 +2024-07-28 10:23:10,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=143325.33333333334, ans=0.125 +2024-07-28 10:23:15,792 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.01 vs. limit=15.0 +2024-07-28 10:23:18,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143338.66666666666, ans=0.1 +2024-07-28 10:23:18,999 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.14 vs. limit=12.0 +2024-07-28 10:23:21,597 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.87 vs. limit=22.5 +2024-07-28 10:23:30,538 INFO [train.py:1114] (0/4) Epoch 11, batch 5300, loss[loss=0.2109, simple_loss=0.2963, pruned_loss=0.06269, over 4594.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.283, pruned_loss=0.05439, over 934345.48 frames. ], batch size: 16, lr: 6.82e-03, grad_scale: 64.0 +2024-07-28 10:23:34,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=143365.33333333334, ans=0.125 +2024-07-28 10:23:44,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=143378.66666666666, ans=0.1 +2024-07-28 10:23:46,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=143392.0, ans=0.07 +2024-07-28 10:24:05,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=143405.33333333334, ans=15.0 +2024-07-28 10:24:06,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=143405.33333333334, ans=0.125 +2024-07-28 10:24:07,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=143418.66666666666, ans=0.125 +2024-07-28 10:24:13,410 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.823e+01 5.558e+01 6.072e+01 7.139e+01 1.045e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 10:24:14,200 INFO [train.py:1114] (0/4) Epoch 11, batch 5350, loss[loss=0.1718, simple_loss=0.2465, pruned_loss=0.04855, over 4490.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2838, pruned_loss=0.05478, over 936456.25 frames. ], batch size: 10, lr: 6.82e-03, grad_scale: 64.0 +2024-07-28 10:24:18,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=143432.0, ans=0.0 +2024-07-28 10:24:24,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=143445.33333333334, ans=0.0 +2024-07-28 10:24:37,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=143472.0, ans=0.025 +2024-07-28 10:24:46,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=143485.33333333334, ans=0.125 +2024-07-28 10:24:48,736 INFO [train.py:1114] (0/4) Epoch 11, batch 5400, loss[loss=0.2171, simple_loss=0.309, pruned_loss=0.06259, over 4292.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2848, pruned_loss=0.05525, over 930892.31 frames. ], batch size: 26, lr: 6.82e-03, grad_scale: 64.0 +2024-07-28 10:24:48,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=143498.66666666666, ans=0.025 +2024-07-28 10:24:50,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=143498.66666666666, ans=0.0 +2024-07-28 10:24:56,711 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.71 vs. limit=22.5 +2024-07-28 10:25:00,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=143512.0, ans=15.0 +2024-07-28 10:25:01,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=143525.33333333334, ans=0.125 +2024-07-28 10:25:23,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=143538.66666666666, ans=0.125 +2024-07-28 10:25:24,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=143538.66666666666, ans=0.0 +2024-07-28 10:25:26,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=143538.66666666666, ans=0.125 +2024-07-28 10:25:32,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=143538.66666666666, ans=0.0 +2024-07-28 10:25:37,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143552.0, ans=0.1 +2024-07-28 10:25:40,459 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.745e+01 5.583e+01 6.179e+01 6.977e+01 1.082e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 10:25:40,492 INFO [train.py:1114] (0/4) Epoch 11, batch 5450, loss[loss=0.1922, simple_loss=0.2609, pruned_loss=0.06174, over 4696.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2834, pruned_loss=0.05422, over 933685.84 frames. ], batch size: 11, lr: 6.82e-03, grad_scale: 32.0 +2024-07-28 10:25:41,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=143565.33333333334, ans=0.2 +2024-07-28 10:25:42,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=143565.33333333334, ans=0.125 +2024-07-28 10:25:56,010 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.05 vs. limit=12.0 +2024-07-28 10:26:03,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143592.0, ans=0.1 +2024-07-28 10:26:13,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=143618.66666666666, ans=0.2 +2024-07-28 10:26:19,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=143618.66666666666, ans=0.125 +2024-07-28 10:26:23,931 INFO [train.py:1114] (0/4) Epoch 11, batch 5500, loss[loss=0.2033, simple_loss=0.2864, pruned_loss=0.06008, over 4313.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2828, pruned_loss=0.05486, over 931501.52 frames. ], batch size: 25, lr: 6.82e-03, grad_scale: 32.0 +2024-07-28 10:26:36,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=143658.66666666666, ans=0.125 +2024-07-28 10:26:39,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=143658.66666666666, ans=0.125 +2024-07-28 10:26:41,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143658.66666666666, ans=0.1 +2024-07-28 10:32:04,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=143672.0, ans=0.0 +2024-07-28 10:32:15,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=143685.33333333334, ans=0.125 +2024-07-28 10:32:22,305 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.760e+01 6.498e+01 7.825e+01 1.226e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-28 10:32:22,340 INFO [train.py:1114] (0/4) Epoch 11, batch 5550, loss[loss=0.1513, simple_loss=0.242, pruned_loss=0.03028, over 4703.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2818, pruned_loss=0.05433, over 933721.70 frames. ], batch size: 12, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:32:31,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=143712.0, ans=0.0 +2024-07-28 10:32:34,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=143712.0, ans=0.125 +2024-07-28 10:32:38,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=143725.33333333334, ans=0.0 +2024-07-28 10:32:39,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=143725.33333333334, ans=0.125 +2024-07-28 10:32:45,257 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.70 vs. limit=12.0 +2024-07-28 10:32:45,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=143738.66666666666, ans=0.0 +2024-07-28 10:32:57,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=143752.0, ans=0.125 +2024-07-28 10:32:58,941 INFO [train.py:1114] (0/4) Epoch 11, batch 5600, loss[loss=0.1828, simple_loss=0.2789, pruned_loss=0.04332, over 4731.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2827, pruned_loss=0.05466, over 934784.02 frames. ], batch size: 14, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:32:59,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=143765.33333333334, ans=0.125 +2024-07-28 10:33:05,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=143778.66666666666, ans=0.125 +2024-07-28 10:33:08,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=143778.66666666666, ans=0.2 +2024-07-28 10:33:09,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143778.66666666666, ans=0.1 +2024-07-28 10:33:13,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=143792.0, ans=0.125 +2024-07-28 10:33:34,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=143805.33333333334, ans=0.0 +2024-07-28 10:33:43,359 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.771e+01 6.077e+01 6.890e+01 8.236e+01 1.387e+02, threshold=1.378e+02, percent-clipped=1.0 +2024-07-28 10:33:44,775 INFO [train.py:1114] (0/4) Epoch 11, batch 5650, loss[loss=0.2292, simple_loss=0.3188, pruned_loss=0.06986, over 4419.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2834, pruned_loss=0.05493, over 937100.95 frames. ], batch size: 21, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:33:51,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=143845.33333333334, ans=0.125 +2024-07-28 10:33:58,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=143845.33333333334, ans=0.2 +2024-07-28 10:34:00,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=143858.66666666666, ans=0.2 +2024-07-28 10:34:05,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=143858.66666666666, ans=0.125 +2024-07-28 10:34:13,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=143872.0, ans=0.125 +2024-07-28 10:34:13,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=143885.33333333334, ans=0.2 +2024-07-28 10:34:14,664 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.59 vs. limit=15.0 +2024-07-28 10:34:16,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=143885.33333333334, ans=0.0 +2024-07-28 10:34:21,287 INFO [train.py:1114] (0/4) Epoch 11, batch 5700, loss[loss=0.2193, simple_loss=0.3046, pruned_loss=0.06698, over 4694.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2841, pruned_loss=0.05512, over 938069.11 frames. ], batch size: 13, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:34:22,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=143898.66666666666, ans=0.125 +2024-07-28 10:34:23,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=143898.66666666666, ans=0.07 +2024-07-28 10:34:25,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143898.66666666666, ans=0.1 +2024-07-28 10:34:55,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=143965.33333333334, ans=0.0 +2024-07-28 10:34:56,152 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.558e+01 6.017e+01 6.629e+01 9.464e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 10:34:56,185 INFO [train.py:1114] (0/4) Epoch 11, batch 5750, loss[loss=0.2244, simple_loss=0.3223, pruned_loss=0.06324, over 4730.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2849, pruned_loss=0.05532, over 938022.31 frames. ], batch size: 19, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:35:03,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=143978.66666666666, ans=0.125 +2024-07-28 10:35:14,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=143992.0, ans=0.0 +2024-07-28 10:35:16,988 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-108000.pt +2024-07-28 10:35:24,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=144005.33333333334, ans=0.125 +2024-07-28 10:35:25,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=144005.33333333334, ans=10.0 +2024-07-28 10:35:25,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=144005.33333333334, ans=0.125 +2024-07-28 10:35:37,331 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.33 vs. limit=6.0 +2024-07-28 10:35:37,647 INFO [train.py:1114] (0/4) Epoch 11, batch 5800, loss[loss=0.2046, simple_loss=0.2843, pruned_loss=0.06244, over 4717.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2857, pruned_loss=0.05553, over 937222.57 frames. ], batch size: 19, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:35:42,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=144032.0, ans=0.0 +2024-07-28 10:36:07,819 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.54 vs. limit=15.0 +2024-07-28 10:36:11,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=144098.66666666666, ans=0.0 +2024-07-28 10:36:11,529 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.841e+01 5.791e+01 6.490e+01 7.663e+01 1.100e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-28 10:36:11,562 INFO [train.py:1114] (0/4) Epoch 11, batch 5850, loss[loss=0.2121, simple_loss=0.2927, pruned_loss=0.06577, over 4410.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.286, pruned_loss=0.05546, over 937968.72 frames. ], batch size: 21, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:36:18,375 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.21 vs. limit=10.0 +2024-07-28 10:36:35,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=144138.66666666666, ans=0.125 +2024-07-28 10:36:44,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=144165.33333333334, ans=0.125 +2024-07-28 10:36:45,268 INFO [train.py:1114] (0/4) Epoch 11, batch 5900, loss[loss=0.2086, simple_loss=0.2966, pruned_loss=0.06032, over 4689.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2844, pruned_loss=0.05535, over 938443.15 frames. ], batch size: 15, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:36:52,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=144165.33333333334, ans=0.0 +2024-07-28 10:36:56,547 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.03 vs. limit=22.5 +2024-07-28 10:37:29,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=144205.33333333334, ans=0.125 +2024-07-28 10:37:34,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=144218.66666666666, ans=0.1 +2024-07-28 10:37:40,539 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.879e+01 5.784e+01 6.286e+01 7.070e+01 1.230e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 10:37:40,587 INFO [train.py:1114] (0/4) Epoch 11, batch 5950, loss[loss=0.2212, simple_loss=0.3005, pruned_loss=0.07089, over 4682.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2839, pruned_loss=0.05495, over 940618.47 frames. ], batch size: 15, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:37:55,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=144245.33333333334, ans=0.025 +2024-07-28 10:38:25,047 INFO [train.py:1114] (0/4) Epoch 11, batch 6000, loss[loss=0.2309, simple_loss=0.3133, pruned_loss=0.07428, over 4246.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2826, pruned_loss=0.05467, over 937356.06 frames. ], batch size: 25, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:38:25,048 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 10:39:08,754 INFO [train.py:1146] (0/4) Epoch 11, validation: loss=0.1692, simple_loss=0.2732, pruned_loss=0.03262, over 944034.00 frames. +2024-07-28 10:39:08,754 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 10:39:13,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=144298.66666666666, ans=0.0 +2024-07-28 10:39:13,979 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.09 vs. limit=15.0 +2024-07-28 10:39:19,598 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.45 vs. limit=15.0 +2024-07-28 10:39:26,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=144325.33333333334, ans=0.125 +2024-07-28 10:39:26,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=144325.33333333334, ans=0.125 +2024-07-28 10:39:30,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=144338.66666666666, ans=0.2 +2024-07-28 10:39:35,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=144352.0, ans=0.125 +2024-07-28 10:39:43,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144352.0, ans=0.1 +2024-07-28 10:39:44,234 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.41 vs. limit=22.5 +2024-07-28 10:39:45,806 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.289e+01 5.724e+01 6.626e+01 8.238e+01 1.220e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-28 10:39:45,840 INFO [train.py:1114] (0/4) Epoch 11, batch 6050, loss[loss=0.1601, simple_loss=0.2441, pruned_loss=0.03802, over 4771.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2823, pruned_loss=0.05462, over 938530.40 frames. ], batch size: 12, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:39:48,957 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.30 vs. limit=15.0 +2024-07-28 10:39:53,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=144378.66666666666, ans=0.125 +2024-07-28 10:40:03,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=144378.66666666666, ans=0.0 +2024-07-28 10:40:05,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=144392.0, ans=0.125 +2024-07-28 10:40:10,566 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.23 vs. limit=15.0 +2024-07-28 10:40:11,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=144392.0, ans=0.2 +2024-07-28 10:40:17,010 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.59 vs. limit=22.5 +2024-07-28 10:40:20,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=144418.66666666666, ans=0.125 +2024-07-28 10:40:28,017 INFO [train.py:1114] (0/4) Epoch 11, batch 6100, loss[loss=0.1952, simple_loss=0.2996, pruned_loss=0.04544, over 4687.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2811, pruned_loss=0.054, over 938024.61 frames. ], batch size: 15, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:40:28,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=144432.0, ans=0.1 +2024-07-28 10:40:34,256 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.02 vs. limit=15.0 +2024-07-28 10:40:36,857 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.08 vs. limit=15.0 +2024-07-28 10:40:45,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=144458.66666666666, ans=0.125 +2024-07-28 10:40:46,928 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.78 vs. limit=15.0 +2024-07-28 10:40:47,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=144472.0, ans=0.05 +2024-07-28 10:40:49,503 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.04 vs. limit=10.0 +2024-07-28 10:41:01,439 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.304e+01 5.350e+01 6.027e+01 7.047e+01 1.301e+02, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 10:41:01,487 INFO [train.py:1114] (0/4) Epoch 11, batch 6150, loss[loss=0.2618, simple_loss=0.3263, pruned_loss=0.09866, over 3525.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2817, pruned_loss=0.05383, over 936747.80 frames. ], batch size: 35, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:41:17,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=144512.0, ans=0.125 +2024-07-28 10:41:38,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=144552.0, ans=0.125 +2024-07-28 10:41:39,943 INFO [train.py:1114] (0/4) Epoch 11, batch 6200, loss[loss=0.1717, simple_loss=0.2586, pruned_loss=0.04234, over 4739.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2827, pruned_loss=0.0544, over 936534.57 frames. ], batch size: 14, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:41:51,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=144578.66666666666, ans=0.0 +2024-07-28 10:42:01,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=144605.33333333334, ans=0.125 +2024-07-28 10:42:06,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=144605.33333333334, ans=0.125 +2024-07-28 10:42:15,879 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.646e+01 5.697e+01 6.150e+01 7.002e+01 1.067e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 10:42:15,912 INFO [train.py:1114] (0/4) Epoch 11, batch 6250, loss[loss=0.1973, simple_loss=0.2872, pruned_loss=0.05373, over 4806.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2834, pruned_loss=0.05494, over 932859.72 frames. ], batch size: 14, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:42:25,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=144645.33333333334, ans=0.125 +2024-07-28 10:42:29,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=144658.66666666666, ans=0.2 +2024-07-28 10:42:29,657 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.97 vs. limit=6.0 +2024-07-28 10:42:39,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=144658.66666666666, ans=0.125 +2024-07-28 10:42:46,617 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.62 vs. limit=15.0 +2024-07-28 10:42:47,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=144685.33333333334, ans=0.0 +2024-07-28 10:42:51,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=144685.33333333334, ans=0.125 +2024-07-28 10:42:58,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=144698.66666666666, ans=0.125 +2024-07-28 10:42:58,533 INFO [train.py:1114] (0/4) Epoch 11, batch 6300, loss[loss=0.1552, simple_loss=0.2526, pruned_loss=0.02896, over 4515.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2837, pruned_loss=0.05505, over 929790.45 frames. ], batch size: 10, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:43:05,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=144698.66666666666, ans=0.125 +2024-07-28 10:43:05,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=144698.66666666666, ans=0.0 +2024-07-28 10:43:12,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=144712.0, ans=0.025 +2024-07-28 10:43:13,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=144725.33333333334, ans=0.125 +2024-07-28 10:43:21,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=144738.66666666666, ans=0.0 +2024-07-28 10:43:28,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=144752.0, ans=0.0 +2024-07-28 10:43:30,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=144752.0, ans=0.0 +2024-07-28 10:43:36,752 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.336e+01 5.612e+01 6.120e+01 6.711e+01 9.743e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 10:43:36,800 INFO [train.py:1114] (0/4) Epoch 11, batch 6350, loss[loss=0.2158, simple_loss=0.3013, pruned_loss=0.06515, over 4540.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2823, pruned_loss=0.05405, over 933699.44 frames. ], batch size: 21, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:43:39,745 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:43:41,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=144765.33333333334, ans=0.125 +2024-07-28 10:43:41,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=144765.33333333334, ans=0.125 +2024-07-28 10:43:46,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=144778.66666666666, ans=0.125 +2024-07-28 10:44:01,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=144805.33333333334, ans=0.125 +2024-07-28 10:44:01,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=144805.33333333334, ans=0.125 +2024-07-28 10:44:05,807 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.06 vs. limit=15.0 +2024-07-28 10:44:06,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=144818.66666666666, ans=0.0 +2024-07-28 10:44:11,945 INFO [train.py:1114] (0/4) Epoch 11, batch 6400, loss[loss=0.2031, simple_loss=0.2995, pruned_loss=0.05341, over 4636.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2828, pruned_loss=0.05467, over 935457.20 frames. ], batch size: 13, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:44:18,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=144845.33333333334, ans=0.125 +2024-07-28 10:44:44,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=144898.66666666666, ans=0.125 +2024-07-28 10:44:45,106 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.492e+01 5.883e+01 6.533e+01 7.974e+01 1.055e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 10:44:45,155 INFO [train.py:1114] (0/4) Epoch 11, batch 6450, loss[loss=0.1914, simple_loss=0.2702, pruned_loss=0.05627, over 4613.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.284, pruned_loss=0.05483, over 939062.56 frames. ], batch size: 21, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:44:46,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=144898.66666666666, ans=0.125 +2024-07-28 10:45:03,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=144925.33333333334, ans=0.125 +2024-07-28 10:45:08,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=144938.66666666666, ans=0.125 +2024-07-28 10:45:18,220 INFO [train.py:1114] (0/4) Epoch 11, batch 6500, loss[loss=0.2617, simple_loss=0.3422, pruned_loss=0.09062, over 3650.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2827, pruned_loss=0.05433, over 940422.37 frames. ], batch size: 37, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:45:22,752 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.37 vs. limit=15.0 +2024-07-28 10:45:23,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=144965.33333333334, ans=0.125 +2024-07-28 10:45:28,150 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.88 vs. limit=15.0 +2024-07-28 10:45:34,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=144992.0, ans=0.125 +2024-07-28 10:45:35,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=144992.0, ans=0.0 +2024-07-28 10:45:43,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=145005.33333333334, ans=0.125 +2024-07-28 10:45:51,639 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.465e+01 5.571e+01 6.263e+01 7.370e+01 1.165e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 10:45:51,672 INFO [train.py:1114] (0/4) Epoch 11, batch 6550, loss[loss=0.1815, simple_loss=0.2546, pruned_loss=0.05426, over 4793.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2827, pruned_loss=0.05449, over 943286.67 frames. ], batch size: 11, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:45:53,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=145032.0, ans=0.0 +2024-07-28 10:45:56,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=145032.0, ans=0.0 +2024-07-28 10:46:11,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=145058.66666666666, ans=0.025 +2024-07-28 10:46:13,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=145072.0, ans=0.125 +2024-07-28 10:46:17,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=145072.0, ans=0.025 +2024-07-28 10:46:21,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=145085.33333333334, ans=0.125 +2024-07-28 10:46:25,932 INFO [train.py:1114] (0/4) Epoch 11, batch 6600, loss[loss=0.2138, simple_loss=0.3091, pruned_loss=0.05919, over 4934.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2833, pruned_loss=0.05446, over 945083.86 frames. ], batch size: 14, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:46:34,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=145112.0, ans=0.125 +2024-07-28 10:46:35,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=145112.0, ans=0.0 +2024-07-28 10:46:36,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145112.0, ans=0.1 +2024-07-28 10:46:44,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=145125.33333333334, ans=0.125 +2024-07-28 10:46:50,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=145138.66666666666, ans=0.125 +2024-07-28 10:46:52,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=145152.0, ans=0.0 +2024-07-28 10:46:56,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=145152.0, ans=0.125 +2024-07-28 10:46:59,239 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.651e+01 6.150e+01 6.851e+01 8.170e+01 1.263e+02, threshold=1.370e+02, percent-clipped=1.0 +2024-07-28 10:46:59,287 INFO [train.py:1114] (0/4) Epoch 11, batch 6650, loss[loss=0.2416, simple_loss=0.3243, pruned_loss=0.07946, over 4612.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2827, pruned_loss=0.05461, over 943975.32 frames. ], batch size: 17, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:47:05,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=145165.33333333334, ans=0.125 +2024-07-28 10:47:08,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=145178.66666666666, ans=0.125 +2024-07-28 10:47:31,689 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.69 vs. limit=15.0 +2024-07-28 10:47:40,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=145218.66666666666, ans=0.07 +2024-07-28 10:47:41,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=145218.66666666666, ans=0.0 +2024-07-28 10:47:42,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=145218.66666666666, ans=0.0 +2024-07-28 10:47:43,302 INFO [train.py:1114] (0/4) Epoch 11, batch 6700, loss[loss=0.1961, simple_loss=0.2904, pruned_loss=0.05092, over 4689.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2833, pruned_loss=0.05486, over 942322.31 frames. ], batch size: 19, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:48:00,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=145258.66666666666, ans=0.125 +2024-07-28 10:48:03,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=145272.0, ans=0.125 +2024-07-28 10:48:15,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=145285.33333333334, ans=0.125 +2024-07-28 10:48:16,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=145285.33333333334, ans=0.125 +2024-07-28 10:48:21,936 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.733e+01 5.801e+01 6.276e+01 7.380e+01 1.183e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 10:48:21,970 INFO [train.py:1114] (0/4) Epoch 11, batch 6750, loss[loss=0.2491, simple_loss=0.3264, pruned_loss=0.08592, over 4312.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2836, pruned_loss=0.05478, over 940691.18 frames. ], batch size: 26, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:48:34,480 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.01 vs. limit=15.0 +2024-07-28 10:48:54,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=145352.0, ans=0.0 +2024-07-28 10:49:01,862 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.77 vs. limit=15.0 +2024-07-28 10:49:06,061 INFO [train.py:1114] (0/4) Epoch 11, batch 6800, loss[loss=0.2254, simple_loss=0.3098, pruned_loss=0.07054, over 4633.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2841, pruned_loss=0.0544, over 939062.54 frames. ], batch size: 13, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:49:06,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145365.33333333334, ans=0.1 +2024-07-28 10:49:08,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=145365.33333333334, ans=0.0 +2024-07-28 10:49:15,438 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.79 vs. limit=15.0 +2024-07-28 10:49:24,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=145392.0, ans=0.0 +2024-07-28 10:49:38,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=145418.66666666666, ans=0.2 +2024-07-28 10:49:43,880 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.787e+01 5.530e+01 6.115e+01 7.020e+01 1.132e+02, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 10:49:43,913 INFO [train.py:1114] (0/4) Epoch 11, batch 6850, loss[loss=0.1808, simple_loss=0.2776, pruned_loss=0.042, over 4701.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2827, pruned_loss=0.05356, over 940612.17 frames. ], batch size: 13, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:49:44,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=145432.0, ans=0.125 +2024-07-28 10:49:48,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=145432.0, ans=0.0 +2024-07-28 10:49:59,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=145445.33333333334, ans=0.07 +2024-07-28 10:50:03,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=145458.66666666666, ans=0.2 +2024-07-28 10:50:22,660 INFO [train.py:1114] (0/4) Epoch 11, batch 6900, loss[loss=0.2016, simple_loss=0.2926, pruned_loss=0.05529, over 4976.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2825, pruned_loss=0.05357, over 942696.15 frames. ], batch size: 13, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:50:32,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=145512.0, ans=0.0 +2024-07-28 10:50:39,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=145525.33333333334, ans=0.0 +2024-07-28 10:50:50,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=145552.0, ans=0.2 +2024-07-28 10:50:57,130 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.940e+01 5.560e+01 6.281e+01 7.160e+01 1.002e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 10:50:57,163 INFO [train.py:1114] (0/4) Epoch 11, batch 6950, loss[loss=0.1678, simple_loss=0.2477, pruned_loss=0.04391, over 4495.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2822, pruned_loss=0.05343, over 939757.67 frames. ], batch size: 10, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:50:59,022 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.32 vs. limit=15.0 +2024-07-28 10:51:04,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=145578.66666666666, ans=0.125 +2024-07-28 10:51:06,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145578.66666666666, ans=0.1 +2024-07-28 10:51:18,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=145605.33333333334, ans=0.2 +2024-07-28 10:51:26,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=145618.66666666666, ans=0.125 +2024-07-28 10:51:27,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=145618.66666666666, ans=0.0 +2024-07-28 10:51:31,243 INFO [train.py:1114] (0/4) Epoch 11, batch 7000, loss[loss=0.2141, simple_loss=0.3033, pruned_loss=0.06246, over 4601.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2818, pruned_loss=0.05335, over 938496.43 frames. ], batch size: 17, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:51:33,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=145632.0, ans=0.025 +2024-07-28 10:51:41,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=145645.33333333334, ans=0.025 +2024-07-28 10:51:47,757 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:52:03,909 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.575e+01 5.703e+01 6.345e+01 7.288e+01 1.132e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 10:52:03,942 INFO [train.py:1114] (0/4) Epoch 11, batch 7050, loss[loss=0.2243, simple_loss=0.3142, pruned_loss=0.06724, over 4739.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2819, pruned_loss=0.05309, over 941953.96 frames. ], batch size: 19, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:52:05,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=145698.66666666666, ans=0.09899494936611666 +2024-07-28 10:52:07,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=145698.66666666666, ans=0.0 +2024-07-28 10:52:09,017 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.57 vs. limit=6.0 +2024-07-28 10:52:16,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=145712.0, ans=0.125 +2024-07-28 10:52:35,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=145752.0, ans=0.025 +2024-07-28 10:52:38,906 INFO [train.py:1114] (0/4) Epoch 11, batch 7100, loss[loss=0.2182, simple_loss=0.3215, pruned_loss=0.05745, over 4804.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.283, pruned_loss=0.05398, over 936931.65 frames. ], batch size: 15, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:52:39,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145765.33333333334, ans=0.1 +2024-07-28 10:52:40,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145765.33333333334, ans=0.1 +2024-07-28 10:52:52,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=145792.0, ans=0.125 +2024-07-28 10:53:04,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=145818.66666666666, ans=0.05 +2024-07-28 10:53:06,058 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.15 vs. limit=15.0 +2024-07-28 10:53:11,357 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.457e+01 5.413e+01 6.227e+01 7.503e+01 1.030e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 10:53:11,391 INFO [train.py:1114] (0/4) Epoch 11, batch 7150, loss[loss=0.2055, simple_loss=0.2976, pruned_loss=0.05675, over 4462.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.281, pruned_loss=0.05282, over 937944.50 frames. ], batch size: 21, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:53:23,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=145845.33333333334, ans=0.125 +2024-07-28 10:53:28,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=145858.66666666666, ans=0.0 +2024-07-28 10:53:32,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=145872.0, ans=0.04949747468305833 +2024-07-28 10:53:44,220 INFO [train.py:1114] (0/4) Epoch 11, batch 7200, loss[loss=0.2061, simple_loss=0.2921, pruned_loss=0.06005, over 4790.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2831, pruned_loss=0.05357, over 938465.44 frames. ], batch size: 15, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:53:51,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=145912.0, ans=0.125 +2024-07-28 10:54:02,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=145925.33333333334, ans=0.125 +2024-07-28 10:54:04,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145938.66666666666, ans=0.1 +2024-07-28 10:54:06,185 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.73 vs. limit=12.0 +2024-07-28 10:54:09,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=145938.66666666666, ans=0.125 +2024-07-28 10:54:13,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=145952.0, ans=0.1 +2024-07-28 10:54:15,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=145952.0, ans=0.125 +2024-07-28 10:54:23,288 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 5.502e+01 5.961e+01 6.542e+01 9.167e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 10:54:23,322 INFO [train.py:1114] (0/4) Epoch 11, batch 7250, loss[loss=0.1929, simple_loss=0.2677, pruned_loss=0.05904, over 4869.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2825, pruned_loss=0.05367, over 940126.26 frames. ], batch size: 12, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:55:12,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=146018.66666666666, ans=0.07 +2024-07-28 10:55:13,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=146032.0, ans=0.0 +2024-07-28 10:55:14,507 INFO [train.py:1114] (0/4) Epoch 11, batch 7300, loss[loss=0.1752, simple_loss=0.2588, pruned_loss=0.04584, over 4847.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2821, pruned_loss=0.05347, over 940440.09 frames. ], batch size: 12, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:55:48,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=146098.66666666666, ans=0.125 +2024-07-28 10:55:49,213 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.444e+01 5.651e+01 6.063e+01 6.776e+01 1.053e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-28 10:55:49,246 INFO [train.py:1114] (0/4) Epoch 11, batch 7350, loss[loss=0.1661, simple_loss=0.2554, pruned_loss=0.03837, over 4631.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.282, pruned_loss=0.05356, over 939587.01 frames. ], batch size: 12, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:56:03,105 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.88 vs. limit=10.0 +2024-07-28 10:56:16,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146125.33333333334, ans=0.1 +2024-07-28 10:56:20,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=146125.33333333334, ans=0.125 +2024-07-28 10:56:36,378 INFO [train.py:1114] (0/4) Epoch 11, batch 7400, loss[loss=0.2429, simple_loss=0.3429, pruned_loss=0.07146, over 4696.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2823, pruned_loss=0.05358, over 940475.59 frames. ], batch size: 13, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:56:46,803 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.28 vs. limit=22.5 +2024-07-28 10:57:11,532 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.799e+01 5.675e+01 6.306e+01 7.270e+01 1.053e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 10:57:11,579 INFO [train.py:1114] (0/4) Epoch 11, batch 7450, loss[loss=0.1763, simple_loss=0.2606, pruned_loss=0.04602, over 4612.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2814, pruned_loss=0.05342, over 938014.43 frames. ], batch size: 11, lr: 6.76e-03, grad_scale: 64.0 +2024-07-28 10:57:19,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=146245.33333333334, ans=0.125 +2024-07-28 10:57:19,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146245.33333333334, ans=0.1 +2024-07-28 10:57:25,055 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.51 vs. limit=15.0 +2024-07-28 10:57:27,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=146258.66666666666, ans=0.0 +2024-07-28 10:57:29,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=146258.66666666666, ans=0.125 +2024-07-28 10:57:41,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=146285.33333333334, ans=0.125 +2024-07-28 10:57:45,282 INFO [train.py:1114] (0/4) Epoch 11, batch 7500, loss[loss=0.2374, simple_loss=0.3084, pruned_loss=0.08314, over 3403.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2824, pruned_loss=0.05413, over 936342.98 frames. ], batch size: 35, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 10:57:52,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=146312.0, ans=0.2 +2024-07-28 10:57:52,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=146312.0, ans=0.0 +2024-07-28 10:57:59,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=146312.0, ans=0.125 +2024-07-28 10:58:36,009 INFO [train.py:1114] (0/4) Epoch 11, batch 7550, loss[loss=0.2277, simple_loss=0.3096, pruned_loss=0.07297, over 4597.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2846, pruned_loss=0.05504, over 935854.86 frames. ], batch size: 17, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 10:58:37,334 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.706e+01 6.227e+01 6.985e+01 1.230e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 10:58:38,808 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.89 vs. limit=15.0 +2024-07-28 10:58:56,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=146392.0, ans=0.2 +2024-07-28 10:59:02,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=146405.33333333334, ans=0.125 +2024-07-28 10:59:06,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=146418.66666666666, ans=0.125 +2024-07-28 10:59:09,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146418.66666666666, ans=0.1 +2024-07-28 10:59:10,209 INFO [train.py:1114] (0/4) Epoch 11, batch 7600, loss[loss=0.1945, simple_loss=0.2823, pruned_loss=0.05336, over 4811.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2835, pruned_loss=0.05431, over 938063.22 frames. ], batch size: 14, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 10:59:13,589 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.89 vs. limit=15.0 +2024-07-28 10:59:22,559 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.44 vs. limit=15.0 +2024-07-28 10:59:33,218 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.57 vs. limit=12.0 +2024-07-28 10:59:42,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=146472.0, ans=0.0 +2024-07-28 10:59:45,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=146472.0, ans=0.2 +2024-07-28 10:59:56,750 INFO [train.py:1114] (0/4) Epoch 11, batch 7650, loss[loss=0.1857, simple_loss=0.2653, pruned_loss=0.05305, over 4950.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2829, pruned_loss=0.05404, over 936810.97 frames. ], batch size: 12, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 10:59:57,329 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.695e+01 5.694e+01 6.162e+01 7.312e+01 1.050e+02, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 10:59:59,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=146498.66666666666, ans=0.0 +2024-07-28 11:00:09,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=146512.0, ans=0.125 +2024-07-28 11:00:11,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=146512.0, ans=0.125 +2024-07-28 11:00:19,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=146525.33333333334, ans=0.125 +2024-07-28 11:00:19,937 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.44 vs. limit=15.0 +2024-07-28 11:00:52,279 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.31 vs. limit=22.5 +2024-07-28 11:00:52,404 INFO [train.py:1114] (0/4) Epoch 11, batch 7700, loss[loss=0.1634, simple_loss=0.2555, pruned_loss=0.03563, over 4693.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2835, pruned_loss=0.05423, over 934332.16 frames. ], batch size: 13, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 11:00:56,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=146565.33333333334, ans=0.125 +2024-07-28 11:00:57,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=146565.33333333334, ans=0.025 +2024-07-28 11:00:59,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=146578.66666666666, ans=0.125 +2024-07-28 11:01:04,310 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.63 vs. limit=22.5 +2024-07-28 11:01:09,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=146592.0, ans=0.125 +2024-07-28 11:01:09,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=146592.0, ans=0.125 +2024-07-28 11:01:09,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=146592.0, ans=0.0 +2024-07-28 11:01:12,718 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.03 vs. limit=12.0 +2024-07-28 11:01:15,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=146605.33333333334, ans=0.125 +2024-07-28 11:01:18,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=146618.66666666666, ans=0.0 +2024-07-28 11:01:20,513 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.55 vs. limit=15.0 +2024-07-28 11:01:20,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=146618.66666666666, ans=0.125 +2024-07-28 11:01:24,525 INFO [train.py:1114] (0/4) Epoch 11, batch 7750, loss[loss=0.1883, simple_loss=0.2764, pruned_loss=0.05008, over 4931.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2842, pruned_loss=0.05453, over 935716.15 frames. ], batch size: 14, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 11:01:25,065 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.807e+01 5.502e+01 5.839e+01 6.536e+01 9.660e+01, threshold=1.168e+02, percent-clipped=0.0 +2024-07-28 11:01:25,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=146632.0, ans=0.0 +2024-07-28 11:01:26,813 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.30 vs. limit=15.0 +2024-07-28 11:01:29,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=146632.0, ans=0.0 +2024-07-28 11:01:32,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=146645.33333333334, ans=0.0 +2024-07-28 11:01:46,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=146672.0, ans=0.125 +2024-07-28 11:01:57,059 INFO [train.py:1114] (0/4) Epoch 11, batch 7800, loss[loss=0.2013, simple_loss=0.2942, pruned_loss=0.05427, over 4667.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2839, pruned_loss=0.05432, over 937730.16 frames. ], batch size: 14, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:02:02,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=146712.0, ans=0.95 +2024-07-28 11:02:06,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=146712.0, ans=0.125 +2024-07-28 11:02:10,140 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:02:12,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=146725.33333333334, ans=0.2 +2024-07-28 11:02:15,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=146738.66666666666, ans=0.0 +2024-07-28 11:02:16,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=146738.66666666666, ans=0.125 +2024-07-28 11:02:17,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=146738.66666666666, ans=0.2 +2024-07-28 11:02:18,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=146738.66666666666, ans=0.025 +2024-07-28 11:02:25,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=146752.0, ans=0.125 +2024-07-28 11:02:25,278 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:02:28,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=146752.0, ans=0.2 +2024-07-28 11:02:29,928 INFO [train.py:1114] (0/4) Epoch 11, batch 7850, loss[loss=0.1694, simple_loss=0.2527, pruned_loss=0.04309, over 4588.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2834, pruned_loss=0.05424, over 936744.90 frames. ], batch size: 10, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:02:30,521 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.015e+01 5.817e+01 6.561e+01 7.399e+01 1.277e+02, threshold=1.312e+02, percent-clipped=1.0 +2024-07-28 11:02:33,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146765.33333333334, ans=0.1 +2024-07-28 11:02:39,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=146778.66666666666, ans=0.125 +2024-07-28 11:02:42,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=146778.66666666666, ans=0.125 +2024-07-28 11:02:46,290 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.55 vs. limit=10.0 +2024-07-28 11:02:47,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=146792.0, ans=0.125 +2024-07-28 11:03:01,003 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:03:02,642 INFO [train.py:1114] (0/4) Epoch 11, batch 7900, loss[loss=0.1838, simple_loss=0.2756, pruned_loss=0.04595, over 4871.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2843, pruned_loss=0.05506, over 933748.28 frames. ], batch size: 14, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:03:05,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=146832.0, ans=0.015 +2024-07-28 11:03:06,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=146832.0, ans=0.0 +2024-07-28 11:03:26,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=146858.66666666666, ans=0.07 +2024-07-28 11:03:31,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=146872.0, ans=0.0 +2024-07-28 11:03:34,822 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.78 vs. limit=10.0 +2024-07-28 11:03:39,403 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.25 vs. limit=15.0 +2024-07-28 11:03:43,573 INFO [train.py:1114] (0/4) Epoch 11, batch 7950, loss[loss=0.2693, simple_loss=0.3473, pruned_loss=0.09566, over 3411.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2842, pruned_loss=0.05473, over 935785.22 frames. ], batch size: 35, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:03:44,164 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.442e+01 5.704e+01 6.229e+01 6.685e+01 9.610e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 11:03:53,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=146912.0, ans=0.125 +2024-07-28 11:04:01,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=146925.33333333334, ans=0.125 +2024-07-28 11:04:15,403 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:04:20,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=146952.0, ans=0.2 +2024-07-28 11:04:24,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=146965.33333333334, ans=0.1 +2024-07-28 11:04:24,560 INFO [train.py:1114] (0/4) Epoch 11, batch 8000, loss[loss=0.1458, simple_loss=0.2242, pruned_loss=0.03369, over 4634.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2817, pruned_loss=0.05387, over 934643.34 frames. ], batch size: 11, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:04:24,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=146965.33333333334, ans=0.035 +2024-07-28 11:04:29,381 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.78 vs. limit=15.0 +2024-07-28 11:04:38,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=146992.0, ans=0.1 +2024-07-28 11:04:48,794 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.65 vs. limit=10.0 +2024-07-28 11:04:49,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=147018.66666666666, ans=0.5 +2024-07-28 11:04:54,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=147018.66666666666, ans=0.125 +2024-07-28 11:04:56,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=147032.0, ans=0.0 +2024-07-28 11:04:57,133 INFO [train.py:1114] (0/4) Epoch 11, batch 8050, loss[loss=0.1735, simple_loss=0.2687, pruned_loss=0.03916, over 4814.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2812, pruned_loss=0.05392, over 934236.72 frames. ], batch size: 14, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:04:57,751 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.054e+01 5.507e+01 6.263e+01 7.215e+01 1.111e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 11:05:06,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=147045.33333333334, ans=0.1 +2024-07-28 11:05:07,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=147045.33333333334, ans=0.0 +2024-07-28 11:05:28,976 INFO [train.py:1114] (0/4) Epoch 11, batch 8100, loss[loss=0.2199, simple_loss=0.3033, pruned_loss=0.06823, over 4803.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2827, pruned_loss=0.05414, over 934064.94 frames. ], batch size: 15, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:05:41,387 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:06:01,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147125.33333333334, ans=0.1 +2024-07-28 11:06:01,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=147125.33333333334, ans=0.125 +2024-07-28 11:06:02,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=147125.33333333334, ans=0.125 +2024-07-28 11:06:19,699 INFO [train.py:1114] (0/4) Epoch 11, batch 8150, loss[loss=0.2034, simple_loss=0.2883, pruned_loss=0.05925, over 4804.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2824, pruned_loss=0.05443, over 937439.38 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:06:19,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=147165.33333333334, ans=0.125 +2024-07-28 11:06:20,274 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.704e+01 5.553e+01 6.182e+01 6.972e+01 1.059e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 11:06:29,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=147178.66666666666, ans=0.0 +2024-07-28 11:06:33,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=147192.0, ans=0.125 +2024-07-28 11:06:41,259 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.64 vs. limit=15.0 +2024-07-28 11:06:46,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=147205.33333333334, ans=0.125 +2024-07-28 11:06:54,225 INFO [train.py:1114] (0/4) Epoch 11, batch 8200, loss[loss=0.2183, simple_loss=0.3057, pruned_loss=0.06544, over 4799.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2837, pruned_loss=0.05467, over 938300.88 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:06:57,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=147232.0, ans=0.125 +2024-07-28 11:06:58,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=147232.0, ans=0.0 +2024-07-28 11:06:59,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=147232.0, ans=0.0 +2024-07-28 11:07:00,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=147245.33333333334, ans=0.0 +2024-07-28 11:07:01,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=147245.33333333334, ans=0.125 +2024-07-28 11:07:20,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=147258.66666666666, ans=0.125 +2024-07-28 11:07:24,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=147272.0, ans=0.2 +2024-07-28 11:07:25,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=147272.0, ans=0.2 +2024-07-28 11:07:32,183 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.21 vs. limit=15.0 +2024-07-28 11:07:36,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=147285.33333333334, ans=0.125 +2024-07-28 11:07:38,667 INFO [train.py:1114] (0/4) Epoch 11, batch 8250, loss[loss=0.1603, simple_loss=0.2465, pruned_loss=0.03707, over 4895.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.283, pruned_loss=0.05467, over 938449.55 frames. ], batch size: 13, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:07:39,319 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.558e+01 5.575e+01 5.968e+01 7.239e+01 1.462e+02, threshold=1.194e+02, percent-clipped=1.0 +2024-07-28 11:07:40,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=147298.66666666666, ans=0.09899494936611666 +2024-07-28 11:08:02,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=147338.66666666666, ans=0.125 +2024-07-28 11:08:09,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=147338.66666666666, ans=0.0 +2024-07-28 11:08:14,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=147352.0, ans=0.0 +2024-07-28 11:08:14,558 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.07 vs. limit=10.0 +2024-07-28 11:08:17,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=147365.33333333334, ans=0.125 +2024-07-28 11:08:17,920 INFO [train.py:1114] (0/4) Epoch 11, batch 8300, loss[loss=0.2073, simple_loss=0.3012, pruned_loss=0.05671, over 4897.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2838, pruned_loss=0.05506, over 938545.42 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:08:29,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=147378.66666666666, ans=0.025 +2024-07-28 11:08:31,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=147392.0, ans=0.95 +2024-07-28 11:08:44,929 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.22 vs. limit=15.0 +2024-07-28 11:08:51,046 INFO [train.py:1114] (0/4) Epoch 11, batch 8350, loss[loss=0.2216, simple_loss=0.3146, pruned_loss=0.06425, over 4796.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2824, pruned_loss=0.05433, over 941250.64 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:08:51,650 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.686e+01 6.163e+01 6.949e+01 9.683e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 11:08:58,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=147445.33333333334, ans=0.125 +2024-07-28 11:08:59,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=147445.33333333334, ans=0.125 +2024-07-28 11:09:05,753 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.10 vs. limit=15.0 +2024-07-28 11:09:09,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=147458.66666666666, ans=0.1 +2024-07-28 11:09:16,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=147472.0, ans=0.125 +2024-07-28 11:09:25,412 INFO [train.py:1114] (0/4) Epoch 11, batch 8400, loss[loss=0.1689, simple_loss=0.253, pruned_loss=0.04246, over 4781.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2819, pruned_loss=0.05429, over 939816.87 frames. ], batch size: 12, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:09:27,605 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=12.0 +2024-07-28 11:09:30,899 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=32.39 vs. limit=22.5 +2024-07-28 11:09:34,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=147512.0, ans=0.125 +2024-07-28 11:09:53,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=147552.0, ans=0.0 +2024-07-28 11:09:56,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=147552.0, ans=0.125 +2024-07-28 11:09:57,683 INFO [train.py:1114] (0/4) Epoch 11, batch 8450, loss[loss=0.21, simple_loss=0.2908, pruned_loss=0.06465, over 4809.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2826, pruned_loss=0.05437, over 938952.43 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:09:58,224 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+01 5.711e+01 6.250e+01 7.138e+01 1.059e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 11:09:59,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147565.33333333334, ans=0.1 +2024-07-28 11:10:00,614 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.56 vs. limit=6.0 +2024-07-28 11:10:12,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=147592.0, ans=0.5 +2024-07-28 11:10:13,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=147592.0, ans=0.125 +2024-07-28 11:10:14,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=147592.0, ans=0.1 +2024-07-28 11:10:24,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=147618.66666666666, ans=0.125 +2024-07-28 11:10:25,159 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.17 vs. limit=15.0 +2024-07-28 11:10:29,747 INFO [train.py:1114] (0/4) Epoch 11, batch 8500, loss[loss=0.1685, simple_loss=0.248, pruned_loss=0.04452, over 4613.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2809, pruned_loss=0.05301, over 938541.70 frames. ], batch size: 11, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:10:33,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=147632.0, ans=0.125 +2024-07-28 11:10:33,999 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.73 vs. limit=10.0 +2024-07-28 11:10:49,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=147672.0, ans=0.2 +2024-07-28 11:10:55,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=147685.33333333334, ans=0.5 +2024-07-28 11:10:58,580 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.60 vs. limit=6.0 +2024-07-28 11:10:59,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=147685.33333333334, ans=0.125 +2024-07-28 11:11:01,924 INFO [train.py:1114] (0/4) Epoch 11, batch 8550, loss[loss=0.1792, simple_loss=0.2643, pruned_loss=0.0471, over 4796.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2807, pruned_loss=0.05319, over 939704.78 frames. ], batch size: 11, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:11:03,205 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+01 5.768e+01 6.482e+01 7.355e+01 1.079e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 11:11:06,693 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.50 vs. limit=15.0 +2024-07-28 11:11:08,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=147712.0, ans=0.2 +2024-07-28 11:11:10,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=147712.0, ans=0.125 +2024-07-28 11:11:12,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=147712.0, ans=0.2 +2024-07-28 11:11:14,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=147725.33333333334, ans=0.125 +2024-07-28 11:11:15,336 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:11:30,112 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.56 vs. limit=10.0 +2024-07-28 11:11:34,228 INFO [train.py:1114] (0/4) Epoch 11, batch 8600, loss[loss=0.2014, simple_loss=0.2944, pruned_loss=0.05418, over 4805.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2821, pruned_loss=0.05384, over 939155.56 frames. ], batch size: 15, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:11:37,985 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:11:38,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=147765.33333333334, ans=15.0 +2024-07-28 11:11:43,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=147778.66666666666, ans=0.125 +2024-07-28 11:11:47,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=147792.0, ans=0.125 +2024-07-28 11:11:49,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=147792.0, ans=0.0 +2024-07-28 11:12:02,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=147818.66666666666, ans=0.04949747468305833 +2024-07-28 11:12:05,481 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.13 vs. limit=15.0 +2024-07-28 11:12:05,723 INFO [train.py:1114] (0/4) Epoch 11, batch 8650, loss[loss=0.2171, simple_loss=0.308, pruned_loss=0.06305, over 4900.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2813, pruned_loss=0.05309, over 940348.75 frames. ], batch size: 15, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:12:06,994 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.868e+01 5.716e+01 6.623e+01 8.030e+01 1.303e+02, threshold=1.325e+02, percent-clipped=1.0 +2024-07-28 11:12:22,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=147858.66666666666, ans=0.125 +2024-07-28 11:12:39,080 INFO [train.py:1114] (0/4) Epoch 11, batch 8700, loss[loss=0.1649, simple_loss=0.252, pruned_loss=0.03889, over 4761.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2826, pruned_loss=0.05362, over 938248.94 frames. ], batch size: 13, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:13:04,128 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:13:04,174 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:13:04,415 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.70 vs. limit=15.0 +2024-07-28 11:13:11,400 INFO [train.py:1114] (0/4) Epoch 11, batch 8750, loss[loss=0.1886, simple_loss=0.2725, pruned_loss=0.05238, over 4663.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2824, pruned_loss=0.05392, over 936665.60 frames. ], batch size: 15, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:13:12,645 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.615e+01 5.668e+01 6.375e+01 7.547e+01 1.367e+02, threshold=1.275e+02, percent-clipped=1.0 +2024-07-28 11:13:27,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=147992.0, ans=0.0 +2024-07-28 11:13:31,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=148005.33333333334, ans=0.025 +2024-07-28 11:13:51,465 INFO [train.py:1114] (0/4) Epoch 11, batch 8800, loss[loss=0.2189, simple_loss=0.3029, pruned_loss=0.06749, over 4939.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2835, pruned_loss=0.05384, over 937310.04 frames. ], batch size: 14, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:13:57,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=148045.33333333334, ans=0.125 +2024-07-28 11:13:57,672 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.24 vs. limit=15.0 +2024-07-28 11:14:30,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=148085.33333333334, ans=0.125 +2024-07-28 11:14:32,903 INFO [train.py:1114] (0/4) Epoch 11, batch 8850, loss[loss=0.2241, simple_loss=0.3194, pruned_loss=0.06442, over 4537.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2837, pruned_loss=0.05421, over 932102.33 frames. ], batch size: 21, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:14:34,134 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.414e+01 5.683e+01 6.364e+01 7.220e+01 1.136e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 11:14:34,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=148098.66666666666, ans=10.0 +2024-07-28 11:14:41,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=148112.0, ans=0.2 +2024-07-28 11:14:46,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=148125.33333333334, ans=0.125 +2024-07-28 11:14:55,583 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.67 vs. limit=15.0 +2024-07-28 11:14:58,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=148152.0, ans=0.0 +2024-07-28 11:15:05,036 INFO [train.py:1114] (0/4) Epoch 11, batch 8900, loss[loss=0.1578, simple_loss=0.2432, pruned_loss=0.03625, over 4939.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2831, pruned_loss=0.05383, over 930035.62 frames. ], batch size: 12, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:15:15,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=148178.66666666666, ans=0.07 +2024-07-28 11:15:20,092 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.94 vs. limit=10.0 +2024-07-28 11:15:26,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=148205.33333333334, ans=0.125 +2024-07-28 11:15:33,389 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.18 vs. limit=22.5 +2024-07-28 11:15:37,404 INFO [train.py:1114] (0/4) Epoch 11, batch 8950, loss[loss=0.2375, simple_loss=0.3161, pruned_loss=0.07949, over 4553.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.283, pruned_loss=0.05399, over 931287.36 frames. ], batch size: 21, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:15:38,590 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.938e+01 5.642e+01 6.194e+01 7.205e+01 1.181e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 11:15:43,273 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:15:54,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.72 vs. limit=10.0 +2024-07-28 11:15:58,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=148272.0, ans=0.0 +2024-07-28 11:16:01,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=148272.0, ans=0.125 +2024-07-28 11:16:02,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=148272.0, ans=0.125 +2024-07-28 11:16:05,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=148285.33333333334, ans=0.035 +2024-07-28 11:16:06,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=148285.33333333334, ans=0.125 +2024-07-28 11:16:07,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=148285.33333333334, ans=0.0 +2024-07-28 11:16:08,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148298.66666666666, ans=0.1 +2024-07-28 11:16:09,511 INFO [train.py:1114] (0/4) Epoch 11, batch 9000, loss[loss=0.1911, simple_loss=0.2766, pruned_loss=0.05285, over 4643.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2817, pruned_loss=0.05366, over 933946.06 frames. ], batch size: 12, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:16:09,512 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 11:16:15,194 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([6.9450, 6.5072, 6.7321, 6.5699], device='cuda:0') +2024-07-28 11:16:15,273 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.5027, 4.7926, 4.8557, 4.8533], device='cuda:0') +2024-07-28 11:16:18,725 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([0.2911, 3.0721, 1.7754, 3.3553, 2.6915, 3.0975, 3.3931, 3.2748], + device='cuda:0') +2024-07-28 11:16:21,142 INFO [train.py:1146] (0/4) Epoch 11, validation: loss=0.1703, simple_loss=0.274, pruned_loss=0.03325, over 944034.00 frames. +2024-07-28 11:16:21,143 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 11:16:29,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=148312.0, ans=0.2 +2024-07-28 11:16:41,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=148338.66666666666, ans=0.025 +2024-07-28 11:16:41,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=148338.66666666666, ans=0.025 +2024-07-28 11:16:46,837 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.64 vs. limit=22.5 +2024-07-28 11:16:51,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148352.0, ans=0.1 +2024-07-28 11:16:53,498 INFO [train.py:1114] (0/4) Epoch 11, batch 9050, loss[loss=0.1792, simple_loss=0.2629, pruned_loss=0.04773, over 4531.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2803, pruned_loss=0.0529, over 934225.08 frames. ], batch size: 10, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:16:54,787 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.736e+01 5.677e+01 6.450e+01 7.430e+01 1.132e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-28 11:17:07,978 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.23 vs. limit=15.0 +2024-07-28 11:17:14,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=148405.33333333334, ans=0.1 +2024-07-28 11:17:14,273 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.23 vs. limit=15.0 +2024-07-28 11:17:15,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=148405.33333333334, ans=0.125 +2024-07-28 11:17:20,868 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.75 vs. limit=22.5 +2024-07-28 11:17:21,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=148418.66666666666, ans=0.0 +2024-07-28 11:17:22,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=148418.66666666666, ans=0.125 +2024-07-28 11:17:22,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=148418.66666666666, ans=0.125 +2024-07-28 11:17:23,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=148418.66666666666, ans=0.0 +2024-07-28 11:17:25,679 INFO [train.py:1114] (0/4) Epoch 11, batch 9100, loss[loss=0.2174, simple_loss=0.3138, pruned_loss=0.06053, over 4926.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2804, pruned_loss=0.05285, over 936716.65 frames. ], batch size: 14, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:17:26,384 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:17:35,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=148445.33333333334, ans=0.02 +2024-07-28 11:17:37,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=148445.33333333334, ans=0.125 +2024-07-28 11:17:39,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=148458.66666666666, ans=0.0 +2024-07-28 11:17:41,006 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.19 vs. limit=15.0 +2024-07-28 11:17:43,032 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-28 11:17:49,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=148472.0, ans=0.125 +2024-07-28 11:17:54,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148485.33333333334, ans=0.1 +2024-07-28 11:17:57,375 INFO [train.py:1114] (0/4) Epoch 11, batch 9150, loss[loss=0.2187, simple_loss=0.3164, pruned_loss=0.06054, over 4816.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.282, pruned_loss=0.05346, over 935514.08 frames. ], batch size: 14, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:17:58,678 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.328e+01 5.452e+01 6.035e+01 6.657e+01 8.728e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-28 11:18:04,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=148512.0, ans=0.0 +2024-07-28 11:18:09,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=148512.0, ans=0.125 +2024-07-28 11:18:12,702 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.13 vs. limit=15.0 +2024-07-28 11:18:15,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=148525.33333333334, ans=0.125 +2024-07-28 11:18:16,197 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.96 vs. limit=10.0 +2024-07-28 11:18:17,444 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.39 vs. limit=15.0 +2024-07-28 11:18:23,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=148538.66666666666, ans=15.0 +2024-07-28 11:18:24,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=148538.66666666666, ans=0.1 +2024-07-28 11:18:30,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=148552.0, ans=0.0 +2024-07-28 11:18:32,074 INFO [train.py:1114] (0/4) Epoch 11, batch 9200, loss[loss=0.159, simple_loss=0.2444, pruned_loss=0.03682, over 4851.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2812, pruned_loss=0.05377, over 937523.69 frames. ], batch size: 12, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:18:33,055 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-28 11:18:33,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=148565.33333333334, ans=0.025 +2024-07-28 11:18:50,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=148592.0, ans=0.2 +2024-07-28 11:18:52,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=148592.0, ans=0.0 +2024-07-28 11:18:54,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=148592.0, ans=0.0 +2024-07-28 11:18:59,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=148605.33333333334, ans=0.1 +2024-07-28 11:19:00,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=148605.33333333334, ans=0.125 +2024-07-28 11:19:03,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=148618.66666666666, ans=0.125 +2024-07-28 11:19:04,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=148618.66666666666, ans=0.2 +2024-07-28 11:19:04,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=148618.66666666666, ans=0.125 +2024-07-28 11:19:10,189 INFO [train.py:1114] (0/4) Epoch 11, batch 9250, loss[loss=0.1921, simple_loss=0.2879, pruned_loss=0.04821, over 4635.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2812, pruned_loss=0.05339, over 938114.03 frames. ], batch size: 13, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:19:11,552 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.586e+01 5.571e+01 5.944e+01 7.071e+01 9.935e+01, threshold=1.189e+02, percent-clipped=0.0 +2024-07-28 11:19:26,718 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.04 vs. limit=15.0 +2024-07-28 11:19:26,755 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.71 vs. limit=10.0 +2024-07-28 11:19:33,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=148672.0, ans=0.125 +2024-07-28 11:19:37,919 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.96 vs. limit=15.0 +2024-07-28 11:19:38,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn2.whiten.whitening_limit, batch_count=148685.33333333334, ans=22.5 +2024-07-28 11:19:42,436 INFO [train.py:1114] (0/4) Epoch 11, batch 9300, loss[loss=0.183, simple_loss=0.2569, pruned_loss=0.05449, over 4784.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2804, pruned_loss=0.05331, over 937682.67 frames. ], batch size: 12, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:19:50,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148712.0, ans=0.1 +2024-07-28 11:19:53,317 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.13 vs. limit=6.0 +2024-07-28 11:19:58,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=148725.33333333334, ans=0.125 +2024-07-28 11:20:14,421 INFO [train.py:1114] (0/4) Epoch 11, batch 9350, loss[loss=0.1753, simple_loss=0.2544, pruned_loss=0.04809, over 4806.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2804, pruned_loss=0.05302, over 934877.63 frames. ], batch size: 11, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:20:15,615 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 5.532e+01 6.030e+01 6.752e+01 9.117e+01, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 11:20:16,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=148765.33333333334, ans=0.125 +2024-07-28 11:20:20,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=148778.66666666666, ans=0.1 +2024-07-28 11:20:27,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=148792.0, ans=0.125 +2024-07-28 11:20:45,893 INFO [train.py:1114] (0/4) Epoch 11, batch 9400, loss[loss=0.1891, simple_loss=0.2833, pruned_loss=0.04742, over 4694.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2806, pruned_loss=0.05309, over 933043.04 frames. ], batch size: 13, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:20:54,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=148845.33333333334, ans=0.125 +2024-07-28 11:20:57,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=148858.66666666666, ans=0.125 +2024-07-28 11:20:58,500 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.34 vs. limit=15.0 +2024-07-28 11:21:00,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=148858.66666666666, ans=0.025 +2024-07-28 11:21:04,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=148872.0, ans=0.125 +2024-07-28 11:21:13,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=148885.33333333334, ans=0.125 +2024-07-28 11:21:16,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=148885.33333333334, ans=0.125 +2024-07-28 11:21:18,899 INFO [train.py:1114] (0/4) Epoch 11, batch 9450, loss[loss=0.1444, simple_loss=0.2265, pruned_loss=0.03113, over 4807.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2799, pruned_loss=0.05258, over 931985.12 frames. ], batch size: 11, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:21:20,123 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.380e+01 5.492e+01 5.833e+01 6.605e+01 9.079e+01, threshold=1.167e+02, percent-clipped=0.0 +2024-07-28 11:21:22,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=148898.66666666666, ans=0.125 +2024-07-28 11:21:25,530 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.07 vs. limit=10.0 +2024-07-28 11:21:28,514 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.84 vs. limit=22.5 +2024-07-28 11:21:37,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=148938.66666666666, ans=0.125 +2024-07-28 11:21:38,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=148938.66666666666, ans=0.025 +2024-07-28 11:21:38,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=148938.66666666666, ans=0.07 +2024-07-28 11:21:45,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.31 vs. limit=10.0 +2024-07-28 11:21:47,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=148952.0, ans=0.125 +2024-07-28 11:21:50,264 INFO [train.py:1114] (0/4) Epoch 11, batch 9500, loss[loss=0.1732, simple_loss=0.2588, pruned_loss=0.0438, over 4700.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2811, pruned_loss=0.05309, over 934553.16 frames. ], batch size: 12, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:21:55,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=148965.33333333334, ans=0.125 +2024-07-28 11:21:55,654 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.79 vs. limit=15.0 +2024-07-28 11:21:57,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=148978.66666666666, ans=0.125 +2024-07-28 11:22:04,044 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.75 vs. limit=6.0 +2024-07-28 11:22:06,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=148992.0, ans=0.2 +2024-07-28 11:22:09,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=149005.33333333334, ans=0.125 +2024-07-28 11:22:16,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=149018.66666666666, ans=0.125 +2024-07-28 11:22:20,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=149018.66666666666, ans=0.0 +2024-07-28 11:22:21,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=149018.66666666666, ans=0.0 +2024-07-28 11:22:21,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=149018.66666666666, ans=0.125 +2024-07-28 11:22:22,556 INFO [train.py:1114] (0/4) Epoch 11, batch 9550, loss[loss=0.2247, simple_loss=0.2949, pruned_loss=0.07724, over 4778.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2815, pruned_loss=0.05364, over 931926.77 frames. ], batch size: 12, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:22:23,735 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.769e+01 5.584e+01 6.073e+01 6.801e+01 9.660e+01, threshold=1.215e+02, percent-clipped=0.0 +2024-07-28 11:22:31,596 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.24 vs. limit=15.0 +2024-07-28 11:22:36,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=149058.66666666666, ans=0.125 +2024-07-28 11:22:46,404 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.85 vs. limit=15.0 +2024-07-28 11:22:53,755 INFO [train.py:1114] (0/4) Epoch 11, batch 9600, loss[loss=0.2442, simple_loss=0.3063, pruned_loss=0.09103, over 3285.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2832, pruned_loss=0.05444, over 930818.63 frames. ], batch size: 35, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:23:01,081 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.50 vs. limit=15.0 +2024-07-28 11:23:06,206 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.74 vs. limit=15.0 +2024-07-28 11:23:07,362 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.80 vs. limit=15.0 +2024-07-28 11:23:08,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=149125.33333333334, ans=0.0 +2024-07-28 11:23:11,543 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:23:11,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=149125.33333333334, ans=0.125 +2024-07-28 11:23:17,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=149138.66666666666, ans=0.0 +2024-07-28 11:23:23,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=149152.0, ans=0.125 +2024-07-28 11:23:25,444 INFO [train.py:1114] (0/4) Epoch 11, batch 9650, loss[loss=0.2075, simple_loss=0.2974, pruned_loss=0.05877, over 4852.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2832, pruned_loss=0.0548, over 926740.07 frames. ], batch size: 16, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:23:26,661 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.554e+01 5.812e+01 6.472e+01 7.420e+01 1.092e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 11:23:26,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=149165.33333333334, ans=0.0 +2024-07-28 11:23:29,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=149165.33333333334, ans=10.0 +2024-07-28 11:23:32,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=149178.66666666666, ans=0.125 +2024-07-28 11:23:34,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=149178.66666666666, ans=0.0 +2024-07-28 11:23:36,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=149178.66666666666, ans=0.125 +2024-07-28 11:23:48,193 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.20 vs. limit=12.0 +2024-07-28 11:24:08,278 INFO [train.py:1114] (0/4) Epoch 11, batch 9700, loss[loss=0.2185, simple_loss=0.3101, pruned_loss=0.0634, over 4229.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2838, pruned_loss=0.05498, over 925340.30 frames. ], batch size: 25, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:24:10,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=149232.0, ans=0.2 +2024-07-28 11:24:10,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=149232.0, ans=0.1 +2024-07-28 11:24:33,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149285.33333333334, ans=0.1 +2024-07-28 11:24:39,672 INFO [train.py:1114] (0/4) Epoch 11, batch 9750, loss[loss=0.1975, simple_loss=0.2901, pruned_loss=0.05246, over 4695.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2827, pruned_loss=0.05454, over 925462.41 frames. ], batch size: 15, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:24:40,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=149298.66666666666, ans=0.2 +2024-07-28 11:24:40,895 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.638e+01 5.600e+01 6.430e+01 7.398e+01 1.191e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 11:24:47,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=149312.0, ans=0.07 +2024-07-28 11:24:55,958 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-112000.pt +2024-07-28 11:25:00,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=149338.66666666666, ans=0.09899494936611666 +2024-07-28 11:25:02,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=149338.66666666666, ans=0.1 +2024-07-28 11:25:07,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=149352.0, ans=0.1 +2024-07-28 11:25:13,061 INFO [train.py:1114] (0/4) Epoch 11, batch 9800, loss[loss=0.1647, simple_loss=0.2524, pruned_loss=0.0385, over 4708.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2828, pruned_loss=0.05485, over 925259.09 frames. ], batch size: 12, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:25:16,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=149365.33333333334, ans=0.125 +2024-07-28 11:25:16,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=149365.33333333334, ans=0.125 +2024-07-28 11:25:17,142 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.04 vs. limit=15.0 +2024-07-28 11:25:23,831 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.92 vs. limit=12.0 +2024-07-28 11:25:24,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=149378.66666666666, ans=0.125 +2024-07-28 11:25:30,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=149392.0, ans=0.125 +2024-07-28 11:25:30,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=149405.33333333334, ans=0.2 +2024-07-28 11:25:33,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=149405.33333333334, ans=0.125 +2024-07-28 11:25:34,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=149405.33333333334, ans=0.1 +2024-07-28 11:25:37,392 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.77 vs. limit=15.0 +2024-07-28 11:25:38,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=149418.66666666666, ans=0.025 +2024-07-28 11:25:43,724 INFO [train.py:1114] (0/4) Epoch 11, batch 9850, loss[loss=0.2084, simple_loss=0.2874, pruned_loss=0.06473, over 4907.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2828, pruned_loss=0.05485, over 927715.62 frames. ], batch size: 15, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:25:44,880 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.760e+01 6.754e+01 7.559e+01 1.117e+02, threshold=1.351e+02, percent-clipped=0.0 +2024-07-28 11:25:48,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=149432.0, ans=0.2 +2024-07-28 11:25:58,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=149458.66666666666, ans=0.0 +2024-07-28 11:26:00,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=149458.66666666666, ans=0.0 +2024-07-28 11:26:02,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=149472.0, ans=0.125 +2024-07-28 11:26:14,539 INFO [train.py:1114] (0/4) Epoch 11, batch 9900, loss[loss=0.2122, simple_loss=0.303, pruned_loss=0.06076, over 4836.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2837, pruned_loss=0.05499, over 927133.85 frames. ], batch size: 16, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:26:15,641 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.80 vs. limit=22.5 +2024-07-28 11:26:15,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=149498.66666666666, ans=0.0 +2024-07-28 11:26:19,887 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.11 vs. limit=15.0 +2024-07-28 11:26:22,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=149512.0, ans=0.125 +2024-07-28 11:26:26,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=149525.33333333334, ans=0.125 +2024-07-28 11:26:45,834 INFO [train.py:1114] (0/4) Epoch 11, batch 9950, loss[loss=0.1412, simple_loss=0.2192, pruned_loss=0.03161, over 4808.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2831, pruned_loss=0.05499, over 930048.58 frames. ], batch size: 11, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:26:47,373 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.729e+01 5.848e+01 6.460e+01 7.731e+01 1.083e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 11:27:07,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=149605.33333333334, ans=0.2 +2024-07-28 11:27:08,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=149605.33333333334, ans=0.2 +2024-07-28 11:27:12,669 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:27:15,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149618.66666666666, ans=0.1 +2024-07-28 11:27:18,176 INFO [train.py:1114] (0/4) Epoch 11, batch 10000, loss[loss=0.1925, simple_loss=0.2956, pruned_loss=0.04471, over 4634.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2865, pruned_loss=0.05619, over 927128.18 frames. ], batch size: 16, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:27:37,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149672.0, ans=0.1 +2024-07-28 11:27:37,313 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.11 vs. limit=22.5 +2024-07-28 11:27:50,545 INFO [train.py:1114] (0/4) Epoch 11, batch 10050, loss[loss=0.2799, simple_loss=0.3442, pruned_loss=0.1078, over 3364.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2905, pruned_loss=0.05866, over 915372.90 frames. ], batch size: 35, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:27:51,878 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.828e+01 6.328e+01 6.971e+01 1.016e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 11:27:56,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=149698.66666666666, ans=0.125 +2024-07-28 11:28:02,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=149712.0, ans=0.0 +2024-07-28 11:28:13,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=149738.66666666666, ans=0.0 +2024-07-28 11:28:14,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=149738.66666666666, ans=0.125 +2024-07-28 11:28:20,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=149752.0, ans=0.0 +2024-07-28 11:28:22,666 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.88 vs. limit=15.0 +2024-07-28 11:28:24,934 INFO [train.py:1114] (0/4) Epoch 11, batch 10100, loss[loss=0.2327, simple_loss=0.3088, pruned_loss=0.07835, over 3238.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.296, pruned_loss=0.06387, over 863663.51 frames. ], batch size: 35, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:28:25,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=149765.33333333334, ans=0.125 +2024-07-28 11:28:34,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=149778.66666666666, ans=0.125 +2024-07-28 11:28:35,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=149778.66666666666, ans=0.125 +2024-07-28 11:28:41,318 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.06 vs. limit=15.0 +2024-07-28 11:28:42,471 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.86 vs. limit=15.0 +2024-07-28 11:28:46,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=149805.33333333334, ans=0.125 +2024-07-28 11:28:54,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=149818.66666666666, ans=0.125 +2024-07-28 11:28:57,732 INFO [train.py:1114] (0/4) Epoch 11, batch 10150, loss[loss=0.2401, simple_loss=0.3137, pruned_loss=0.08322, over 3422.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2992, pruned_loss=0.06721, over 822285.57 frames. ], batch size: 35, lr: 6.67e-03, grad_scale: 32.0 +2024-07-28 11:28:58,998 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.659e+01 6.801e+01 7.178e+01 7.670e+01 2.138e+02, threshold=1.436e+02, percent-clipped=2.0 +2024-07-28 11:29:05,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=149845.33333333334, ans=0.125 +2024-07-28 11:29:11,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149858.66666666666, ans=0.1 +2024-07-28 11:29:12,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=149858.66666666666, ans=0.07 +2024-07-28 11:29:14,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=149858.66666666666, ans=0.07 +2024-07-28 11:29:14,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=149858.66666666666, ans=0.125 +2024-07-28 11:29:29,439 INFO [train.py:1114] (0/4) Epoch 11, batch 10200, loss[loss=0.2058, simple_loss=0.2961, pruned_loss=0.0577, over 3403.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3018, pruned_loss=0.07028, over 792063.44 frames. ], batch size: 35, lr: 6.67e-03, grad_scale: 32.0 +2024-07-28 11:29:39,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149912.0, ans=0.1 +2024-07-28 11:29:43,129 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-11.pt +2024-07-28 11:30:30,084 INFO [train.py:1114] (0/4) Epoch 12, batch 0, loss[loss=0.1577, simple_loss=0.2442, pruned_loss=0.03556, over 4863.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2442, pruned_loss=0.03556, over 4863.00 frames. ], batch size: 12, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:30:30,084 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 11:30:35,058 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.8487, 5.1855, 5.0767, 5.6314], device='cuda:0') +2024-07-28 11:30:38,352 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.8408, 4.6937, 4.3213, 4.4810], device='cuda:0') +2024-07-28 11:30:49,829 INFO [train.py:1146] (0/4) Epoch 12, validation: loss=0.171, simple_loss=0.2765, pruned_loss=0.03276, over 944034.00 frames. +2024-07-28 11:30:49,830 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 11:31:22,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=149954.66666666666, ans=0.0 +2024-07-28 11:31:27,531 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 6.296e+01 6.981e+01 7.560e+01 1.062e+02, threshold=1.396e+02, percent-clipped=0.0 +2024-07-28 11:31:30,060 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.36 vs. limit=12.0 +2024-07-28 11:31:31,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=149968.0, ans=0.0 +2024-07-28 11:31:32,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=149968.0, ans=0.125 +2024-07-28 11:31:35,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149981.33333333334, ans=0.1 +2024-07-28 11:31:39,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=149981.33333333334, ans=0.0 +2024-07-28 11:31:41,004 INFO [train.py:1114] (0/4) Epoch 12, batch 50, loss[loss=0.1898, simple_loss=0.2602, pruned_loss=0.05969, over 4621.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2861, pruned_loss=0.05515, over 205968.76 frames. ], batch size: 11, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:31:42,490 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:31:47,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=149994.66666666666, ans=0.0 +2024-07-28 11:31:50,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=150008.0, ans=0.125 +2024-07-28 11:32:01,306 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.33 vs. limit=10.0 +2024-07-28 11:32:02,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=150021.33333333334, ans=0.0 +2024-07-28 11:32:03,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=150034.66666666666, ans=0.0 +2024-07-28 11:32:14,878 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.09 vs. limit=10.0 +2024-07-28 11:32:17,182 INFO [train.py:1114] (0/4) Epoch 12, batch 100, loss[loss=0.166, simple_loss=0.2584, pruned_loss=0.03687, over 4640.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2847, pruned_loss=0.05385, over 364727.79 frames. ], batch size: 12, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:32:24,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=150074.66666666666, ans=0.0 +2024-07-28 11:32:35,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=150088.0, ans=0.1 +2024-07-28 11:32:39,037 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.212e+01 5.482e+01 5.996e+01 6.450e+01 1.001e+02, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 11:32:43,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=150101.33333333334, ans=0.0 +2024-07-28 11:32:43,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=150101.33333333334, ans=0.125 +2024-07-28 11:32:51,993 INFO [train.py:1114] (0/4) Epoch 12, batch 150, loss[loss=0.1464, simple_loss=0.2472, pruned_loss=0.02282, over 4617.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2815, pruned_loss=0.05197, over 493681.17 frames. ], batch size: 11, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:33:10,847 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:33:13,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=150168.0, ans=0.0 +2024-07-28 11:33:22,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=150181.33333333334, ans=0.1 +2024-07-28 11:33:29,116 INFO [train.py:1114] (0/4) Epoch 12, batch 200, loss[loss=0.1924, simple_loss=0.2821, pruned_loss=0.0514, over 4519.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.281, pruned_loss=0.05242, over 593364.03 frames. ], batch size: 21, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:33:29,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=150194.66666666666, ans=0.125 +2024-07-28 11:33:48,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=150234.66666666666, ans=0.0 +2024-07-28 11:33:48,814 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.686e+01 5.805e+01 6.723e+01 7.880e+01 1.326e+02, threshold=1.345e+02, percent-clipped=1.0 +2024-07-28 11:34:02,288 INFO [train.py:1114] (0/4) Epoch 12, batch 250, loss[loss=0.2237, simple_loss=0.3243, pruned_loss=0.06157, over 4651.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2808, pruned_loss=0.05248, over 670440.42 frames. ], batch size: 16, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:34:19,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=150288.0, ans=0.125 +2024-07-28 11:34:21,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150288.0, ans=0.1 +2024-07-28 11:34:25,465 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.16 vs. limit=15.0 +2024-07-28 11:34:28,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=150301.33333333334, ans=0.1 +2024-07-28 11:34:28,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=150301.33333333334, ans=0.0 +2024-07-28 11:34:35,857 INFO [train.py:1114] (0/4) Epoch 12, batch 300, loss[loss=0.1981, simple_loss=0.2893, pruned_loss=0.05349, over 4798.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2803, pruned_loss=0.05218, over 729929.77 frames. ], batch size: 15, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:34:36,293 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.42 vs. limit=22.5 +2024-07-28 11:34:37,609 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:34:55,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=150368.0, ans=0.025 +2024-07-28 11:34:56,210 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.803e+01 5.570e+01 6.129e+01 6.973e+01 1.064e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 11:35:09,575 INFO [train.py:1114] (0/4) Epoch 12, batch 350, loss[loss=0.1901, simple_loss=0.2686, pruned_loss=0.0558, over 4942.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.281, pruned_loss=0.05244, over 776085.95 frames. ], batch size: 12, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:35:28,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=150434.66666666666, ans=0.035 +2024-07-28 11:35:42,764 INFO [train.py:1114] (0/4) Epoch 12, batch 400, loss[loss=0.1984, simple_loss=0.2991, pruned_loss=0.04885, over 4693.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2809, pruned_loss=0.05246, over 813520.42 frames. ], batch size: 13, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:36:01,903 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-28 11:36:02,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=150488.0, ans=0.125 +2024-07-28 11:36:04,837 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.526e+01 5.661e+01 6.256e+01 7.189e+01 1.032e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 11:36:13,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=150514.66666666666, ans=0.0 +2024-07-28 11:36:18,206 INFO [train.py:1114] (0/4) Epoch 12, batch 450, loss[loss=0.2018, simple_loss=0.3008, pruned_loss=0.05139, over 4636.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.281, pruned_loss=0.05262, over 839207.89 frames. ], batch size: 13, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:36:23,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=150528.0, ans=0.0 +2024-07-28 11:36:41,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=150568.0, ans=0.125 +2024-07-28 11:36:44,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=150581.33333333334, ans=0.025 +2024-07-28 11:36:46,844 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.30 vs. limit=15.0 +2024-07-28 11:36:48,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=150581.33333333334, ans=0.2 +2024-07-28 11:36:51,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=150594.66666666666, ans=0.2 +2024-07-28 11:36:51,730 INFO [train.py:1114] (0/4) Epoch 12, batch 500, loss[loss=0.2192, simple_loss=0.3045, pruned_loss=0.06693, over 4666.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2809, pruned_loss=0.05257, over 861859.97 frames. ], batch size: 15, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:36:51,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=150594.66666666666, ans=0.125 +2024-07-28 11:37:09,768 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.54 vs. limit=22.5 +2024-07-28 11:37:12,393 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.57 vs. limit=15.0 +2024-07-28 11:37:13,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=150634.66666666666, ans=0.0 +2024-07-28 11:37:14,895 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+01 5.527e+01 6.124e+01 7.195e+01 1.120e+02, threshold=1.225e+02, percent-clipped=0.0 +2024-07-28 11:37:19,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=150634.66666666666, ans=0.125 +2024-07-28 11:37:30,019 INFO [train.py:1114] (0/4) Epoch 12, batch 550, loss[loss=0.1911, simple_loss=0.2608, pruned_loss=0.06067, over 4621.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2808, pruned_loss=0.05271, over 877508.70 frames. ], batch size: 17, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:37:34,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=150661.33333333334, ans=0.125 +2024-07-28 11:37:34,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=150661.33333333334, ans=0.0 +2024-07-28 11:37:40,225 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:37:41,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=150674.66666666666, ans=0.0 +2024-07-28 11:37:56,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=150714.66666666666, ans=0.0 +2024-07-28 11:37:58,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=150714.66666666666, ans=10.0 +2024-07-28 11:37:59,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=150714.66666666666, ans=0.04949747468305833 +2024-07-28 11:38:08,496 INFO [train.py:1114] (0/4) Epoch 12, batch 600, loss[loss=0.1983, simple_loss=0.2909, pruned_loss=0.05284, over 4612.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.28, pruned_loss=0.05253, over 891871.51 frames. ], batch size: 16, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:38:30,305 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.714e+01 6.286e+01 7.173e+01 1.255e+02, threshold=1.257e+02, percent-clipped=1.0 +2024-07-28 11:38:44,126 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.29 vs. limit=6.0 +2024-07-28 11:38:49,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=150781.33333333334, ans=0.125 +2024-07-28 11:38:53,096 INFO [train.py:1114] (0/4) Epoch 12, batch 650, loss[loss=0.1727, simple_loss=0.2703, pruned_loss=0.0376, over 4748.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2789, pruned_loss=0.05223, over 903668.67 frames. ], batch size: 13, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:38:53,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=150794.66666666666, ans=0.0 +2024-07-28 11:39:00,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=150794.66666666666, ans=0.125 +2024-07-28 11:39:16,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=150821.33333333334, ans=0.2 +2024-07-28 11:39:18,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=150821.33333333334, ans=0.125 +2024-07-28 11:39:21,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150834.66666666666, ans=0.1 +2024-07-28 11:39:22,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=150834.66666666666, ans=0.125 +2024-07-28 11:39:29,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=150834.66666666666, ans=0.125 +2024-07-28 11:39:29,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=150834.66666666666, ans=0.025 +2024-07-28 11:39:31,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=150848.0, ans=0.125 +2024-07-28 11:39:31,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=150848.0, ans=0.125 +2024-07-28 11:39:40,060 INFO [train.py:1114] (0/4) Epoch 12, batch 700, loss[loss=0.1659, simple_loss=0.2564, pruned_loss=0.03767, over 4636.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2795, pruned_loss=0.05223, over 911737.76 frames. ], batch size: 12, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:39:44,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=150861.33333333334, ans=0.125 +2024-07-28 11:39:44,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=150861.33333333334, ans=0.0 +2024-07-28 11:39:48,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=150874.66666666666, ans=0.07 +2024-07-28 11:39:53,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=150888.0, ans=0.0 +2024-07-28 11:39:53,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150888.0, ans=0.1 +2024-07-28 11:40:00,048 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.230e+01 5.630e+01 6.208e+01 7.148e+01 1.083e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 11:40:03,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=150901.33333333334, ans=0.125 +2024-07-28 11:40:05,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=150901.33333333334, ans=0.2 +2024-07-28 11:40:09,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=150914.66666666666, ans=0.2 +2024-07-28 11:40:13,768 INFO [train.py:1114] (0/4) Epoch 12, batch 750, loss[loss=0.1917, simple_loss=0.2892, pruned_loss=0.04709, over 4688.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2791, pruned_loss=0.05173, over 918487.34 frames. ], batch size: 13, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:40:16,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150928.0, ans=0.1 +2024-07-28 11:40:21,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=150941.33333333334, ans=0.0 +2024-07-28 11:40:28,763 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.51 vs. limit=12.0 +2024-07-28 11:40:30,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=150954.66666666666, ans=0.125 +2024-07-28 11:40:32,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=150954.66666666666, ans=0.0 +2024-07-28 11:40:45,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150981.33333333334, ans=0.1 +2024-07-28 11:40:47,019 INFO [train.py:1114] (0/4) Epoch 12, batch 800, loss[loss=0.1625, simple_loss=0.2497, pruned_loss=0.03768, over 4854.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2793, pruned_loss=0.05165, over 923852.73 frames. ], batch size: 12, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:40:47,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=150994.66666666666, ans=0.125 +2024-07-28 11:40:51,851 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:40:55,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=151008.0, ans=0.025 +2024-07-28 11:40:55,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=151008.0, ans=0.0 +2024-07-28 11:41:06,756 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.726e+01 6.208e+01 6.822e+01 1.017e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 11:41:07,271 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.93 vs. limit=10.0 +2024-07-28 11:41:12,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=151034.66666666666, ans=0.125 +2024-07-28 11:41:18,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151048.0, ans=0.1 +2024-07-28 11:41:20,183 INFO [train.py:1114] (0/4) Epoch 12, batch 850, loss[loss=0.1824, simple_loss=0.2758, pruned_loss=0.04449, over 4671.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2793, pruned_loss=0.05183, over 927976.91 frames. ], batch size: 14, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:41:20,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=151061.33333333334, ans=0.125 +2024-07-28 11:41:29,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=151074.66666666666, ans=0.125 +2024-07-28 11:41:35,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=151088.0, ans=0.2 +2024-07-28 11:41:40,169 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.73 vs. limit=22.5 +2024-07-28 11:41:55,112 INFO [train.py:1114] (0/4) Epoch 12, batch 900, loss[loss=0.1804, simple_loss=0.2597, pruned_loss=0.05056, over 4851.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2785, pruned_loss=0.05158, over 929101.07 frames. ], batch size: 12, lr: 6.37e-03, grad_scale: 32.0 +2024-07-28 11:42:07,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=151141.33333333334, ans=0.2 +2024-07-28 11:42:08,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=151154.66666666666, ans=0.125 +2024-07-28 11:42:15,735 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.369e+01 5.655e+01 6.355e+01 7.195e+01 9.950e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 11:42:21,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=151181.33333333334, ans=0.2 +2024-07-28 11:42:24,864 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.77 vs. limit=15.0 +2024-07-28 11:42:25,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=151181.33333333334, ans=0.2 +2024-07-28 11:42:26,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=151181.33333333334, ans=0.125 +2024-07-28 11:42:28,552 INFO [train.py:1114] (0/4) Epoch 12, batch 950, loss[loss=0.1874, simple_loss=0.2753, pruned_loss=0.04976, over 4787.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2793, pruned_loss=0.05182, over 930595.01 frames. ], batch size: 12, lr: 6.37e-03, grad_scale: 32.0 +2024-07-28 11:42:28,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=151194.66666666666, ans=0.05 +2024-07-28 11:42:31,029 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.04 vs. limit=15.0 +2024-07-28 11:42:45,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=151221.33333333334, ans=0.2 +2024-07-28 11:42:53,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=151234.66666666666, ans=0.125 +2024-07-28 11:43:02,379 INFO [train.py:1114] (0/4) Epoch 12, batch 1000, loss[loss=0.1977, simple_loss=0.2803, pruned_loss=0.05756, over 4965.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2801, pruned_loss=0.05223, over 929922.04 frames. ], batch size: 13, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:43:02,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=151261.33333333334, ans=0.125 +2024-07-28 11:43:08,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151274.66666666666, ans=0.1 +2024-07-28 11:43:25,154 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.535e+01 6.224e+01 7.277e+01 1.100e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 11:43:32,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=151314.66666666666, ans=0.0 +2024-07-28 11:43:37,737 INFO [train.py:1114] (0/4) Epoch 12, batch 1050, loss[loss=0.1829, simple_loss=0.2872, pruned_loss=0.03926, over 4871.00 frames. ], tot_loss[loss=0.192, simple_loss=0.28, pruned_loss=0.05202, over 932290.98 frames. ], batch size: 14, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:43:45,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=151341.33333333334, ans=0.2 +2024-07-28 11:43:46,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=151341.33333333334, ans=0.125 +2024-07-28 11:43:49,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=151341.33333333334, ans=0.0 +2024-07-28 11:43:51,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=151341.33333333334, ans=0.0 +2024-07-28 11:43:51,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=151341.33333333334, ans=0.125 +2024-07-28 11:43:55,239 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.74 vs. limit=15.0 +2024-07-28 11:44:00,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=151354.66666666666, ans=0.0 +2024-07-28 11:44:00,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151354.66666666666, ans=0.1 +2024-07-28 11:44:15,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=151381.33333333334, ans=0.0 +2024-07-28 11:44:17,165 INFO [train.py:1114] (0/4) Epoch 12, batch 1100, loss[loss=0.1741, simple_loss=0.2687, pruned_loss=0.03978, over 4897.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.281, pruned_loss=0.05307, over 934644.58 frames. ], batch size: 13, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:44:17,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=151394.66666666666, ans=0.125 +2024-07-28 11:44:21,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=151394.66666666666, ans=0.2 +2024-07-28 11:44:28,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=151408.0, ans=0.0 +2024-07-28 11:44:37,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=151434.66666666666, ans=0.125 +2024-07-28 11:44:38,705 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.346e+01 5.539e+01 6.009e+01 6.753e+01 8.123e+01, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 11:44:46,149 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-07-28 11:44:49,598 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.80 vs. limit=15.0 +2024-07-28 11:44:52,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=151448.0, ans=0.125 +2024-07-28 11:44:53,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=151461.33333333334, ans=0.0 +2024-07-28 11:44:53,940 INFO [train.py:1114] (0/4) Epoch 12, batch 1150, loss[loss=0.1913, simple_loss=0.2784, pruned_loss=0.05215, over 4898.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2798, pruned_loss=0.05247, over 934199.58 frames. ], batch size: 13, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:45:00,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=151461.33333333334, ans=0.125 +2024-07-28 11:45:01,036 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.39 vs. limit=15.0 +2024-07-28 11:45:02,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=151474.66666666666, ans=0.025 +2024-07-28 11:45:03,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=151474.66666666666, ans=0.125 +2024-07-28 11:45:03,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151474.66666666666, ans=0.1 +2024-07-28 11:45:05,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=151474.66666666666, ans=22.5 +2024-07-28 11:45:36,583 INFO [train.py:1114] (0/4) Epoch 12, batch 1200, loss[loss=0.1774, simple_loss=0.2725, pruned_loss=0.04115, over 4866.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2814, pruned_loss=0.05325, over 933350.04 frames. ], batch size: 14, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:45:39,985 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:45:45,122 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.40 vs. limit=15.0 +2024-07-28 11:45:47,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=151541.33333333334, ans=0.1 +2024-07-28 11:45:54,928 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.87 vs. limit=15.0 +2024-07-28 11:45:57,708 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.539e+01 6.207e+01 7.047e+01 1.080e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 11:46:04,103 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.57 vs. limit=15.0 +2024-07-28 11:46:08,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=151581.33333333334, ans=0.125 +2024-07-28 11:46:10,290 INFO [train.py:1114] (0/4) Epoch 12, batch 1250, loss[loss=0.1994, simple_loss=0.2814, pruned_loss=0.0587, over 4795.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2806, pruned_loss=0.05245, over 937397.74 frames. ], batch size: 15, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:46:43,308 INFO [train.py:1114] (0/4) Epoch 12, batch 1300, loss[loss=0.2158, simple_loss=0.3083, pruned_loss=0.06164, over 4737.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2805, pruned_loss=0.05214, over 939073.98 frames. ], batch size: 19, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:47:03,819 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.884e+01 5.669e+01 6.218e+01 7.134e+01 9.799e+01, threshold=1.244e+02, percent-clipped=0.0 +2024-07-28 11:47:13,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=151714.66666666666, ans=0.0 +2024-07-28 11:47:16,368 INFO [train.py:1114] (0/4) Epoch 12, batch 1350, loss[loss=0.2021, simple_loss=0.2953, pruned_loss=0.05447, over 4763.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2809, pruned_loss=0.05227, over 941196.00 frames. ], batch size: 13, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:47:25,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=151741.33333333334, ans=0.0 +2024-07-28 11:47:31,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=151741.33333333334, ans=0.0 +2024-07-28 11:47:36,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=151754.66666666666, ans=0.0 +2024-07-28 11:47:48,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=151781.33333333334, ans=0.125 +2024-07-28 11:47:54,335 INFO [train.py:1114] (0/4) Epoch 12, batch 1400, loss[loss=0.2047, simple_loss=0.2786, pruned_loss=0.06535, over 4706.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2794, pruned_loss=0.0518, over 943194.17 frames. ], batch size: 11, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:48:02,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.88 vs. limit=12.0 +2024-07-28 11:48:15,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=151834.66666666666, ans=0.125 +2024-07-28 11:48:16,692 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.713e+01 6.249e+01 7.424e+01 1.107e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 11:48:29,490 INFO [train.py:1114] (0/4) Epoch 12, batch 1450, loss[loss=0.1756, simple_loss=0.2703, pruned_loss=0.04049, over 4677.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2795, pruned_loss=0.05191, over 943534.27 frames. ], batch size: 15, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:48:36,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.98 vs. limit=15.0 +2024-07-28 11:48:45,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=151888.0, ans=0.0 +2024-07-28 11:48:49,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=151901.33333333334, ans=0.0 +2024-07-28 11:49:04,475 INFO [train.py:1114] (0/4) Epoch 12, batch 1500, loss[loss=0.1465, simple_loss=0.2533, pruned_loss=0.01982, over 4813.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2798, pruned_loss=0.05164, over 942770.68 frames. ], batch size: 14, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:49:04,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=151928.0, ans=0.07 +2024-07-28 11:49:08,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=151928.0, ans=10.0 +2024-07-28 11:49:09,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=151928.0, ans=0.125 +2024-07-28 11:49:10,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=151928.0, ans=0.125 +2024-07-28 11:49:26,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=151954.66666666666, ans=0.125 +2024-07-28 11:49:29,095 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.429e+01 5.582e+01 5.945e+01 6.654e+01 9.521e+01, threshold=1.189e+02, percent-clipped=0.0 +2024-07-28 11:49:33,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151968.0, ans=0.1 +2024-07-28 11:49:59,646 INFO [train.py:1114] (0/4) Epoch 12, batch 1550, loss[loss=0.1927, simple_loss=0.2779, pruned_loss=0.05376, over 4901.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2798, pruned_loss=0.05184, over 939001.29 frames. ], batch size: 15, lr: 6.35e-03, grad_scale: 16.0 +2024-07-28 11:50:06,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=152008.0, ans=0.125 +2024-07-28 11:50:06,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=152008.0, ans=0.0 +2024-07-28 11:50:07,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=152008.0, ans=0.125 +2024-07-28 11:50:12,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152008.0, ans=0.1 +2024-07-28 11:50:22,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=152034.66666666666, ans=0.125 +2024-07-28 11:50:30,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=152048.0, ans=0.125 +2024-07-28 11:50:35,399 INFO [train.py:1114] (0/4) Epoch 12, batch 1600, loss[loss=0.18, simple_loss=0.2616, pruned_loss=0.04918, over 4873.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2797, pruned_loss=0.05181, over 937794.37 frames. ], batch size: 14, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:50:46,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=152074.66666666666, ans=0.125 +2024-07-28 11:50:52,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=152088.0, ans=0.125 +2024-07-28 11:50:56,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152088.0, ans=0.1 +2024-07-28 11:50:59,244 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.730e+01 5.721e+01 6.309e+01 7.092e+01 1.066e+02, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 11:51:04,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=152114.66666666666, ans=0.125 +2024-07-28 11:51:13,730 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=6.30 vs. limit=12.0 +2024-07-28 11:51:16,036 INFO [train.py:1114] (0/4) Epoch 12, batch 1650, loss[loss=0.2043, simple_loss=0.2952, pruned_loss=0.05676, over 4669.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2798, pruned_loss=0.05262, over 937401.27 frames. ], batch size: 14, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:51:19,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=152128.0, ans=0.0 +2024-07-28 11:51:46,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=152154.66666666666, ans=0.0 +2024-07-28 11:51:50,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=152154.66666666666, ans=15.0 +2024-07-28 11:51:58,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=152181.33333333334, ans=0.125 +2024-07-28 11:51:58,406 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=8.24 vs. limit=8.0 +2024-07-28 11:52:05,264 INFO [train.py:1114] (0/4) Epoch 12, batch 1700, loss[loss=0.185, simple_loss=0.2619, pruned_loss=0.05409, over 4695.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2786, pruned_loss=0.05211, over 938934.15 frames. ], batch size: 11, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:52:07,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.whiten.whitening_limit, batch_count=152194.66666666666, ans=15.0 +2024-07-28 11:52:11,014 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-07-28 11:52:14,423 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.16 vs. limit=15.0 +2024-07-28 11:52:21,067 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.51 vs. limit=22.5 +2024-07-28 11:52:22,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=152221.33333333334, ans=0.125 +2024-07-28 11:52:26,631 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.325e+01 5.663e+01 6.309e+01 7.408e+01 1.033e+02, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 11:52:33,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=152248.0, ans=0.125 +2024-07-28 11:52:36,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=152248.0, ans=0.0 +2024-07-28 11:52:38,691 INFO [train.py:1114] (0/4) Epoch 12, batch 1750, loss[loss=0.1638, simple_loss=0.2553, pruned_loss=0.03618, over 4786.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2781, pruned_loss=0.05147, over 940060.73 frames. ], batch size: 11, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:52:42,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=152261.33333333334, ans=0.125 +2024-07-28 11:52:43,283 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.70 vs. limit=15.0 +2024-07-28 11:52:51,510 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.98 vs. limit=15.0 +2024-07-28 11:53:01,744 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:53:04,613 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.04 vs. limit=15.0 +2024-07-28 11:53:12,209 INFO [train.py:1114] (0/4) Epoch 12, batch 1800, loss[loss=0.2378, simple_loss=0.3206, pruned_loss=0.07744, over 4642.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2782, pruned_loss=0.05184, over 940525.31 frames. ], batch size: 13, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:53:15,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152328.0, ans=0.1 +2024-07-28 11:53:27,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=152354.66666666666, ans=0.0 +2024-07-28 11:53:27,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=152354.66666666666, ans=0.125 +2024-07-28 11:53:27,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.96 vs. limit=22.5 +2024-07-28 11:53:35,391 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.783e+01 5.693e+01 6.293e+01 7.294e+01 9.358e+01, threshold=1.259e+02, percent-clipped=0.0 +2024-07-28 11:53:37,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=152368.0, ans=0.125 +2024-07-28 11:53:49,308 INFO [train.py:1114] (0/4) Epoch 12, batch 1850, loss[loss=0.2082, simple_loss=0.2994, pruned_loss=0.05844, over 4816.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2782, pruned_loss=0.05151, over 940403.22 frames. ], batch size: 14, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:53:58,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=152408.0, ans=0.0 +2024-07-28 11:54:13,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=152434.66666666666, ans=0.125 +2024-07-28 11:54:23,282 INFO [train.py:1114] (0/4) Epoch 12, batch 1900, loss[loss=0.221, simple_loss=0.3205, pruned_loss=0.06076, over 4663.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2793, pruned_loss=0.05219, over 941535.27 frames. ], batch size: 14, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:54:32,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=152474.66666666666, ans=0.0 +2024-07-28 11:54:37,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=152488.0, ans=0.125 +2024-07-28 11:54:44,565 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.417e+01 5.599e+01 6.321e+01 7.441e+01 1.076e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 11:54:50,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=152514.66666666666, ans=0.1 +2024-07-28 11:54:52,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=152514.66666666666, ans=0.125 +2024-07-28 11:54:53,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=152514.66666666666, ans=0.04949747468305833 +2024-07-28 11:54:55,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=152528.0, ans=0.1 +2024-07-28 11:54:56,318 INFO [train.py:1114] (0/4) Epoch 12, batch 1950, loss[loss=0.1736, simple_loss=0.267, pruned_loss=0.04009, over 4890.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2808, pruned_loss=0.05248, over 943498.92 frames. ], batch size: 13, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:55:05,668 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.94 vs. limit=15.0 +2024-07-28 11:55:08,080 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:55:26,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=152581.33333333334, ans=0.2 +2024-07-28 11:55:31,682 INFO [train.py:1114] (0/4) Epoch 12, batch 2000, loss[loss=0.1646, simple_loss=0.2383, pruned_loss=0.04541, over 4804.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2814, pruned_loss=0.05275, over 940268.15 frames. ], batch size: 11, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:55:52,942 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.820e+01 6.521e+01 7.809e+01 1.085e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 11:55:55,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=152634.66666666666, ans=0.125 +2024-07-28 11:55:56,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=152634.66666666666, ans=0.125 +2024-07-28 11:56:03,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=152648.0, ans=0.125 +2024-07-28 11:56:07,052 INFO [train.py:1114] (0/4) Epoch 12, batch 2050, loss[loss=0.1642, simple_loss=0.2519, pruned_loss=0.03824, over 4613.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2799, pruned_loss=0.05233, over 938903.03 frames. ], batch size: 11, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:56:24,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=152688.0, ans=0.2 +2024-07-28 11:56:24,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=152688.0, ans=0.95 +2024-07-28 11:56:26,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=152688.0, ans=0.125 +2024-07-28 11:56:35,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=152714.66666666666, ans=0.125 +2024-07-28 11:56:41,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=152728.0, ans=0.0 +2024-07-28 11:56:42,397 INFO [train.py:1114] (0/4) Epoch 12, batch 2100, loss[loss=0.1755, simple_loss=0.2633, pruned_loss=0.0439, over 4752.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2797, pruned_loss=0.05207, over 940738.71 frames. ], batch size: 13, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:56:49,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=152741.33333333334, ans=0.1 +2024-07-28 11:57:00,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=152754.66666666666, ans=0.2 +2024-07-28 11:57:03,674 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.175e+01 5.578e+01 6.172e+01 6.931e+01 1.014e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-28 11:57:12,839 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.10 vs. limit=12.0 +2024-07-28 11:57:17,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=152781.33333333334, ans=0.2 +2024-07-28 11:57:19,569 INFO [train.py:1114] (0/4) Epoch 12, batch 2150, loss[loss=0.1887, simple_loss=0.2896, pruned_loss=0.0439, over 4892.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2795, pruned_loss=0.05157, over 943783.85 frames. ], batch size: 13, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:57:19,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=152794.66666666666, ans=0.125 +2024-07-28 11:57:20,776 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.46 vs. limit=22.5 +2024-07-28 11:57:25,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152808.0, ans=0.1 +2024-07-28 11:57:39,106 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:57:45,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=152848.0, ans=0.0 +2024-07-28 11:57:52,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152861.33333333334, ans=0.1 +2024-07-28 11:57:52,755 INFO [train.py:1114] (0/4) Epoch 12, batch 2200, loss[loss=0.2139, simple_loss=0.2994, pruned_loss=0.06418, over 4811.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2794, pruned_loss=0.05144, over 943500.65 frames. ], batch size: 14, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:57:52,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=152861.33333333334, ans=0.0 +2024-07-28 11:57:54,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=152861.33333333334, ans=0.125 +2024-07-28 11:57:56,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=152861.33333333334, ans=0.125 +2024-07-28 11:58:02,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=152874.66666666666, ans=0.025 +2024-07-28 11:58:05,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=152874.66666666666, ans=0.0 +2024-07-28 11:58:05,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=152888.0, ans=0.2 +2024-07-28 11:58:11,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=152888.0, ans=0.025 +2024-07-28 11:58:14,226 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.745e+01 5.791e+01 6.232e+01 7.216e+01 1.117e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 11:58:14,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=152901.33333333334, ans=0.125 +2024-07-28 11:58:23,153 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:58:23,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=152914.66666666666, ans=0.125 +2024-07-28 11:58:25,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=152914.66666666666, ans=0.2 +2024-07-28 11:58:27,785 INFO [train.py:1114] (0/4) Epoch 12, batch 2250, loss[loss=0.2047, simple_loss=0.3024, pruned_loss=0.05345, over 4695.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2794, pruned_loss=0.05163, over 942063.79 frames. ], batch size: 13, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:58:34,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=152941.33333333334, ans=0.125 +2024-07-28 11:59:01,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=152981.33333333334, ans=0.125 +2024-07-28 11:59:03,210 INFO [train.py:1114] (0/4) Epoch 12, batch 2300, loss[loss=0.1835, simple_loss=0.2685, pruned_loss=0.04927, over 4943.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2792, pruned_loss=0.05217, over 939838.26 frames. ], batch size: 12, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:59:19,642 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.91 vs. limit=10.0 +2024-07-28 11:59:24,486 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.625e+01 5.617e+01 6.257e+01 7.219e+01 1.104e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 11:59:24,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=153034.66666666666, ans=0.0 +2024-07-28 11:59:34,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=153048.0, ans=0.2 +2024-07-28 11:59:36,104 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.52 vs. limit=22.5 +2024-07-28 11:59:36,826 INFO [train.py:1114] (0/4) Epoch 12, batch 2350, loss[loss=0.1818, simple_loss=0.2696, pruned_loss=0.04699, over 4631.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2791, pruned_loss=0.05163, over 941845.86 frames. ], batch size: 13, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:59:41,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=153061.33333333334, ans=0.2 +2024-07-28 11:59:47,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=153074.66666666666, ans=0.125 +2024-07-28 11:59:53,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=153088.0, ans=0.125 +2024-07-28 11:59:58,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=153101.33333333334, ans=0.125 +2024-07-28 12:00:01,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=153101.33333333334, ans=0.0 +2024-07-28 12:00:10,575 INFO [train.py:1114] (0/4) Epoch 12, batch 2400, loss[loss=0.1903, simple_loss=0.2817, pruned_loss=0.04938, over 4644.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2795, pruned_loss=0.05159, over 941458.85 frames. ], batch size: 12, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 12:00:23,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=153154.66666666666, ans=0.1 +2024-07-28 12:00:25,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=153154.66666666666, ans=0.125 +2024-07-28 12:00:31,898 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 5.703e+01 6.200e+01 6.966e+01 9.820e+01, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 12:00:43,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=153181.33333333334, ans=0.125 +2024-07-28 12:00:45,550 INFO [train.py:1114] (0/4) Epoch 12, batch 2450, loss[loss=0.1931, simple_loss=0.2902, pruned_loss=0.04797, over 4697.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2805, pruned_loss=0.05254, over 937276.18 frames. ], batch size: 13, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:00:47,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=153194.66666666666, ans=0.07 +2024-07-28 12:00:49,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=153194.66666666666, ans=0.1 +2024-07-28 12:00:59,311 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.48 vs. limit=15.0 +2024-07-28 12:01:01,977 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.60 vs. limit=12.0 +2024-07-28 12:01:03,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=153221.33333333334, ans=0.0 +2024-07-28 12:01:09,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=153234.66666666666, ans=0.025 +2024-07-28 12:01:11,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=153248.0, ans=0.125 +2024-07-28 12:01:14,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=153248.0, ans=0.125 +2024-07-28 12:01:18,861 INFO [train.py:1114] (0/4) Epoch 12, batch 2500, loss[loss=0.2014, simple_loss=0.2907, pruned_loss=0.0561, over 4809.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2804, pruned_loss=0.0526, over 939532.40 frames. ], batch size: 14, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:01:19,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=153261.33333333334, ans=0.0 +2024-07-28 12:01:20,561 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.50 vs. limit=22.5 +2024-07-28 12:01:21,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=153261.33333333334, ans=10.0 +2024-07-28 12:01:30,430 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:01:39,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=153288.0, ans=0.1 +2024-07-28 12:01:41,949 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.651e+01 6.166e+01 7.013e+01 1.450e+02, threshold=1.233e+02, percent-clipped=1.0 +2024-07-28 12:01:54,208 INFO [train.py:1114] (0/4) Epoch 12, batch 2550, loss[loss=0.168, simple_loss=0.2429, pruned_loss=0.04657, over 4810.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2799, pruned_loss=0.05204, over 938611.66 frames. ], batch size: 11, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:02:09,829 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.90 vs. limit=15.0 +2024-07-28 12:02:17,327 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.97 vs. limit=22.5 +2024-07-28 12:02:17,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=153368.0, ans=0.125 +2024-07-28 12:02:24,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=153381.33333333334, ans=0.2 +2024-07-28 12:02:26,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=153381.33333333334, ans=0.125 +2024-07-28 12:02:31,981 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.47 vs. limit=15.0 +2024-07-28 12:02:32,078 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.83 vs. limit=15.0 +2024-07-28 12:02:32,216 INFO [train.py:1114] (0/4) Epoch 12, batch 2600, loss[loss=0.1706, simple_loss=0.2623, pruned_loss=0.03946, over 4901.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2803, pruned_loss=0.05213, over 937419.12 frames. ], batch size: 13, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:02:52,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=153408.0, ans=0.1 +2024-07-28 12:02:54,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=153408.0, ans=0.125 +2024-07-28 12:03:07,396 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.455e+01 5.682e+01 6.373e+01 7.145e+01 1.030e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 12:03:19,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=153448.0, ans=0.0 +2024-07-28 12:03:23,911 INFO [train.py:1114] (0/4) Epoch 12, batch 2650, loss[loss=0.2028, simple_loss=0.2823, pruned_loss=0.06168, over 4638.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2816, pruned_loss=0.05252, over 939861.76 frames. ], batch size: 16, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:03:33,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=153474.66666666666, ans=0.125 +2024-07-28 12:03:39,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=153488.0, ans=0.2 +2024-07-28 12:03:42,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=153488.0, ans=0.125 +2024-07-28 12:03:43,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=153501.33333333334, ans=0.1 +2024-07-28 12:03:48,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=153501.33333333334, ans=0.125 +2024-07-28 12:03:51,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=153514.66666666666, ans=0.5 +2024-07-28 12:03:57,537 INFO [train.py:1114] (0/4) Epoch 12, batch 2700, loss[loss=0.2018, simple_loss=0.2904, pruned_loss=0.05665, over 4735.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2814, pruned_loss=0.05264, over 940059.94 frames. ], batch size: 14, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:04:01,807 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:04:07,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=153541.33333333334, ans=0.2 +2024-07-28 12:04:11,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=153554.66666666666, ans=0.125 +2024-07-28 12:04:22,342 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.622e+01 5.642e+01 6.087e+01 6.756e+01 9.576e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-28 12:04:34,301 INFO [train.py:1114] (0/4) Epoch 12, batch 2750, loss[loss=0.1657, simple_loss=0.2534, pruned_loss=0.03907, over 4707.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2797, pruned_loss=0.05187, over 940114.24 frames. ], batch size: 12, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:04:38,947 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.41 vs. limit=15.0 +2024-07-28 12:04:46,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=153608.0, ans=0.025 +2024-07-28 12:04:46,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=153608.0, ans=0.125 +2024-07-28 12:04:48,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=153608.0, ans=0.0 +2024-07-28 12:04:50,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=153621.33333333334, ans=0.125 +2024-07-28 12:04:50,937 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:05:10,281 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.40 vs. limit=10.0 +2024-07-28 12:05:10,509 INFO [train.py:1114] (0/4) Epoch 12, batch 2800, loss[loss=0.3188, simple_loss=0.3825, pruned_loss=0.1276, over 3452.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2808, pruned_loss=0.0526, over 938365.77 frames. ], batch size: 35, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:05:22,736 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.63 vs. limit=15.0 +2024-07-28 12:05:31,814 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.685e+01 5.464e+01 6.125e+01 7.070e+01 1.105e+02, threshold=1.225e+02, percent-clipped=0.0 +2024-07-28 12:05:37,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=153714.66666666666, ans=0.025 +2024-07-28 12:05:40,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=153714.66666666666, ans=0.025 +2024-07-28 12:05:43,877 INFO [train.py:1114] (0/4) Epoch 12, batch 2850, loss[loss=0.1672, simple_loss=0.2579, pruned_loss=0.03827, over 4974.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2811, pruned_loss=0.05296, over 936577.34 frames. ], batch size: 13, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:06:16,604 INFO [train.py:1114] (0/4) Epoch 12, batch 2900, loss[loss=0.1817, simple_loss=0.2777, pruned_loss=0.0428, over 4827.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2818, pruned_loss=0.05262, over 940128.52 frames. ], batch size: 13, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:06:23,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-07-28 12:06:26,110 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.62 vs. limit=22.5 +2024-07-28 12:06:31,819 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.71 vs. limit=15.0 +2024-07-28 12:06:33,613 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:06:38,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=153834.66666666666, ans=0.0 +2024-07-28 12:06:39,902 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.688e+01 6.255e+01 7.399e+01 1.060e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 12:06:41,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=153834.66666666666, ans=0.0 +2024-07-28 12:06:46,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=153848.0, ans=0.125 +2024-07-28 12:06:46,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=153848.0, ans=0.2 +2024-07-28 12:06:46,991 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.33 vs. limit=22.5 +2024-07-28 12:06:51,618 INFO [train.py:1114] (0/4) Epoch 12, batch 2950, loss[loss=0.1692, simple_loss=0.2482, pruned_loss=0.04508, over 4701.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2802, pruned_loss=0.05191, over 938907.92 frames. ], batch size: 12, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:07:14,487 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.46 vs. limit=12.0 +2024-07-28 12:07:25,338 INFO [train.py:1114] (0/4) Epoch 12, batch 3000, loss[loss=0.1726, simple_loss=0.2573, pruned_loss=0.04392, over 4756.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2791, pruned_loss=0.05131, over 938147.66 frames. ], batch size: 13, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:07:25,339 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 12:07:35,280 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.3445, 2.6274, 3.3366, 3.3442], device='cuda:0') +2024-07-28 12:07:36,617 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([0.4394, 2.8531, 1.5055, 3.0818, 2.5167, 2.7828, 3.1466, 2.7774], + device='cuda:0') +2024-07-28 12:07:37,194 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([1.9715, 2.6345, 2.6886, 2.2079, 2.7239, 2.7390, 2.8052, 2.5522], + device='cuda:0') +2024-07-28 12:07:46,424 INFO [train.py:1146] (0/4) Epoch 12, validation: loss=0.1682, simple_loss=0.272, pruned_loss=0.03224, over 944034.00 frames. +2024-07-28 12:07:46,425 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 12:07:46,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=153928.0, ans=0.1 +2024-07-28 12:07:54,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=153941.33333333334, ans=0.125 +2024-07-28 12:07:55,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=153941.33333333334, ans=0.2 +2024-07-28 12:07:58,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=153941.33333333334, ans=0.025 +2024-07-28 12:08:08,182 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.713e+01 5.599e+01 6.354e+01 7.168e+01 1.019e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 12:08:17,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=153981.33333333334, ans=0.1 +2024-07-28 12:08:18,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=153981.33333333334, ans=0.0 +2024-07-28 12:08:20,419 INFO [train.py:1114] (0/4) Epoch 12, batch 3050, loss[loss=0.1943, simple_loss=0.2806, pruned_loss=0.05405, over 4644.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2804, pruned_loss=0.0522, over 936448.55 frames. ], batch size: 12, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:08:29,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=154008.0, ans=0.125 +2024-07-28 12:08:42,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=154034.66666666666, ans=0.2 +2024-07-28 12:08:43,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=154034.66666666666, ans=0.0 +2024-07-28 12:08:48,952 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.51 vs. limit=15.0 +2024-07-28 12:08:53,644 INFO [train.py:1114] (0/4) Epoch 12, batch 3100, loss[loss=0.2102, simple_loss=0.3025, pruned_loss=0.05897, over 4626.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2801, pruned_loss=0.05234, over 937292.34 frames. ], batch size: 16, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:09:00,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=154074.66666666666, ans=0.0 +2024-07-28 12:09:08,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=154088.0, ans=0.125 +2024-07-28 12:09:14,834 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.787e+01 5.719e+01 6.483e+01 7.749e+01 1.294e+02, threshold=1.297e+02, percent-clipped=1.0 +2024-07-28 12:09:18,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=154101.33333333334, ans=0.0 +2024-07-28 12:09:19,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=154114.66666666666, ans=0.125 +2024-07-28 12:09:25,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=154114.66666666666, ans=0.0 +2024-07-28 12:09:26,893 INFO [train.py:1114] (0/4) Epoch 12, batch 3150, loss[loss=0.2146, simple_loss=0.3083, pruned_loss=0.0605, over 4643.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2799, pruned_loss=0.05187, over 937492.12 frames. ], batch size: 17, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:09:29,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=154128.0, ans=0.0 +2024-07-28 12:09:29,642 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.66 vs. limit=15.0 +2024-07-28 12:09:30,181 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.58 vs. limit=10.0 +2024-07-28 12:09:32,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=154128.0, ans=0.05 +2024-07-28 12:09:36,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=154141.33333333334, ans=0.125 +2024-07-28 12:09:41,081 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.17 vs. limit=15.0 +2024-07-28 12:09:45,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=154154.66666666666, ans=0.0 +2024-07-28 12:09:53,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=154168.0, ans=0.0 +2024-07-28 12:10:02,188 INFO [train.py:1114] (0/4) Epoch 12, batch 3200, loss[loss=0.1509, simple_loss=0.2497, pruned_loss=0.02607, over 4823.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2803, pruned_loss=0.05188, over 939434.53 frames. ], batch size: 13, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:10:04,666 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.01 vs. limit=12.0 +2024-07-28 12:10:05,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=154194.66666666666, ans=0.125 +2024-07-28 12:10:09,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=154208.0, ans=0.09899494936611666 +2024-07-28 12:10:09,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=154208.0, ans=0.125 +2024-07-28 12:10:15,590 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:10:21,121 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.31 vs. limit=15.0 +2024-07-28 12:10:23,361 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.738e+01 6.146e+01 6.845e+01 1.156e+02, threshold=1.229e+02, percent-clipped=0.0 +2024-07-28 12:10:23,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=154234.66666666666, ans=0.125 +2024-07-28 12:10:24,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=154234.66666666666, ans=0.0 +2024-07-28 12:10:30,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=154248.0, ans=0.0 +2024-07-28 12:10:35,321 INFO [train.py:1114] (0/4) Epoch 12, batch 3250, loss[loss=0.2216, simple_loss=0.3136, pruned_loss=0.06479, over 4934.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.281, pruned_loss=0.05223, over 940501.68 frames. ], batch size: 14, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:10:36,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=154261.33333333334, ans=0.2 +2024-07-28 12:10:44,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=154274.66666666666, ans=0.05 +2024-07-28 12:10:45,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=154274.66666666666, ans=0.2 +2024-07-28 12:10:52,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=154288.0, ans=0.0 +2024-07-28 12:10:56,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=154301.33333333334, ans=0.0 +2024-07-28 12:10:58,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=154301.33333333334, ans=0.09899494936611666 +2024-07-28 12:11:09,164 INFO [train.py:1114] (0/4) Epoch 12, batch 3300, loss[loss=0.1986, simple_loss=0.2944, pruned_loss=0.05142, over 4763.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2809, pruned_loss=0.05266, over 940686.19 frames. ], batch size: 19, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:11:12,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=154328.0, ans=0.125 +2024-07-28 12:11:18,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=154341.33333333334, ans=0.125 +2024-07-28 12:11:31,600 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.932e+01 5.622e+01 6.140e+01 6.825e+01 9.627e+01, threshold=1.228e+02, percent-clipped=0.0 +2024-07-28 12:11:32,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=154368.0, ans=0.125 +2024-07-28 12:11:33,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=154368.0, ans=0.125 +2024-07-28 12:11:45,547 INFO [train.py:1114] (0/4) Epoch 12, batch 3350, loss[loss=0.1892, simple_loss=0.2941, pruned_loss=0.04212, over 4621.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2811, pruned_loss=0.05293, over 938757.71 frames. ], batch size: 17, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:11:48,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=154394.66666666666, ans=0.125 +2024-07-28 12:11:49,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=154394.66666666666, ans=0.0 +2024-07-28 12:11:53,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=154408.0, ans=0.125 +2024-07-28 12:12:11,380 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.20 vs. limit=15.0 +2024-07-28 12:12:23,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154448.0, ans=0.1 +2024-07-28 12:12:28,961 INFO [train.py:1114] (0/4) Epoch 12, batch 3400, loss[loss=0.1346, simple_loss=0.2224, pruned_loss=0.0234, over 4801.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2816, pruned_loss=0.05323, over 937409.97 frames. ], batch size: 11, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:12:30,345 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:12:33,416 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.21 vs. limit=15.0 +2024-07-28 12:12:41,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=154474.66666666666, ans=0.125 +2024-07-28 12:12:43,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=154488.0, ans=0.025 +2024-07-28 12:12:50,515 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.411e+01 5.633e+01 6.152e+01 6.788e+01 1.015e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 12:12:56,701 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.48 vs. limit=6.0 +2024-07-28 12:12:58,633 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.29 vs. limit=22.5 +2024-07-28 12:13:04,217 INFO [train.py:1114] (0/4) Epoch 12, batch 3450, loss[loss=0.2235, simple_loss=0.3031, pruned_loss=0.07194, over 4735.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2821, pruned_loss=0.05336, over 937488.50 frames. ], batch size: 19, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:13:07,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=154528.0, ans=0.0 +2024-07-28 12:13:09,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=154528.0, ans=0.025 +2024-07-28 12:13:21,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=154554.66666666666, ans=0.0 +2024-07-28 12:13:33,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=154581.33333333334, ans=0.125 +2024-07-28 12:13:39,606 INFO [train.py:1114] (0/4) Epoch 12, batch 3500, loss[loss=0.1633, simple_loss=0.2502, pruned_loss=0.03821, over 4940.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2803, pruned_loss=0.05249, over 938068.57 frames. ], batch size: 12, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:14:01,205 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.093e+01 5.546e+01 6.148e+01 6.737e+01 9.893e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 12:14:03,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=154634.66666666666, ans=0.025 +2024-07-28 12:14:11,886 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.15 vs. limit=22.5 +2024-07-28 12:14:12,942 INFO [train.py:1114] (0/4) Epoch 12, batch 3550, loss[loss=0.2051, simple_loss=0.3134, pruned_loss=0.04836, over 4673.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2811, pruned_loss=0.05264, over 938359.87 frames. ], batch size: 14, lr: 6.29e-03, grad_scale: 64.0 +2024-07-28 12:14:15,201 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-116000.pt +2024-07-28 12:14:25,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=154661.33333333334, ans=0.025 +2024-07-28 12:14:31,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=154674.66666666666, ans=0.125 +2024-07-28 12:14:50,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=154714.66666666666, ans=0.1 +2024-07-28 12:14:54,228 INFO [train.py:1114] (0/4) Epoch 12, batch 3600, loss[loss=0.181, simple_loss=0.2769, pruned_loss=0.0425, over 4960.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2798, pruned_loss=0.05232, over 940208.23 frames. ], batch size: 13, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:14:59,949 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.92 vs. limit=15.0 +2024-07-28 12:15:09,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=154754.66666666666, ans=0.125 +2024-07-28 12:15:13,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=154768.0, ans=0.125 +2024-07-28 12:15:16,132 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.517e+01 5.873e+01 6.627e+01 7.814e+01 1.281e+02, threshold=1.325e+02, percent-clipped=1.0 +2024-07-28 12:15:16,707 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.24 vs. limit=15.0 +2024-07-28 12:15:18,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=154768.0, ans=0.0 +2024-07-28 12:15:19,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=154768.0, ans=0.125 +2024-07-28 12:15:21,375 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.03 vs. limit=15.0 +2024-07-28 12:15:27,739 INFO [train.py:1114] (0/4) Epoch 12, batch 3650, loss[loss=0.2103, simple_loss=0.2848, pruned_loss=0.06794, over 4918.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2788, pruned_loss=0.05205, over 940652.01 frames. ], batch size: 15, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:15:34,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=154794.66666666666, ans=0.125 +2024-07-28 12:15:39,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=154808.0, ans=0.125 +2024-07-28 12:15:43,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=154821.33333333334, ans=0.125 +2024-07-28 12:15:45,255 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.62 vs. limit=15.0 +2024-07-28 12:15:56,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=154848.0, ans=0.0 +2024-07-28 12:16:02,902 INFO [train.py:1114] (0/4) Epoch 12, batch 3700, loss[loss=0.2026, simple_loss=0.3025, pruned_loss=0.05129, over 4930.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2782, pruned_loss=0.05141, over 941541.39 frames. ], batch size: 14, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:16:05,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=154861.33333333334, ans=0.125 +2024-07-28 12:16:15,817 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.66 vs. limit=15.0 +2024-07-28 12:16:19,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=154888.0, ans=0.125 +2024-07-28 12:16:21,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=154888.0, ans=0.125 +2024-07-28 12:16:23,139 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.12 vs. limit=15.0 +2024-07-28 12:16:24,633 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.998e+01 5.499e+01 5.998e+01 6.974e+01 1.210e+02, threshold=1.200e+02, percent-clipped=0.0 +2024-07-28 12:16:35,615 INFO [train.py:1114] (0/4) Epoch 12, batch 3750, loss[loss=0.1641, simple_loss=0.2528, pruned_loss=0.03773, over 4806.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2775, pruned_loss=0.05137, over 943217.45 frames. ], batch size: 11, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:16:44,028 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.70 vs. limit=15.0 +2024-07-28 12:16:49,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=154954.66666666666, ans=0.125 +2024-07-28 12:16:56,272 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.07 vs. limit=22.5 +2024-07-28 12:16:56,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=154968.0, ans=0.125 +2024-07-28 12:17:07,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=154981.33333333334, ans=0.125 +2024-07-28 12:17:09,273 INFO [train.py:1114] (0/4) Epoch 12, batch 3800, loss[loss=0.166, simple_loss=0.2623, pruned_loss=0.03486, over 4813.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2773, pruned_loss=0.05106, over 941116.39 frames. ], batch size: 14, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:17:09,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=154994.66666666666, ans=0.2 +2024-07-28 12:17:09,585 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.07 vs. limit=15.0 +2024-07-28 12:17:24,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=155021.33333333334, ans=0.125 +2024-07-28 12:17:24,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=155021.33333333334, ans=0.0 +2024-07-28 12:17:30,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=155034.66666666666, ans=0.025 +2024-07-28 12:17:32,841 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.431e+01 5.749e+01 6.230e+01 7.169e+01 2.120e+02, threshold=1.246e+02, percent-clipped=1.0 +2024-07-28 12:17:41,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=155048.0, ans=0.0 +2024-07-28 12:17:48,229 INFO [train.py:1114] (0/4) Epoch 12, batch 3850, loss[loss=0.1688, simple_loss=0.2625, pruned_loss=0.03759, over 4642.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2767, pruned_loss=0.05048, over 941759.15 frames. ], batch size: 16, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:17:59,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=155061.33333333334, ans=0.125 +2024-07-28 12:18:04,193 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.68 vs. limit=15.0 +2024-07-28 12:18:06,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=155074.66666666666, ans=0.125 +2024-07-28 12:18:07,898 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:18:13,013 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.58 vs. limit=22.5 +2024-07-28 12:18:16,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155101.33333333334, ans=0.1 +2024-07-28 12:18:17,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=155101.33333333334, ans=0.0 +2024-07-28 12:18:18,024 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.50 vs. limit=15.0 +2024-07-28 12:18:19,446 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.89 vs. limit=10.0 +2024-07-28 12:18:22,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=155114.66666666666, ans=0.125 +2024-07-28 12:18:26,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=155114.66666666666, ans=0.125 +2024-07-28 12:18:28,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=155114.66666666666, ans=0.125 +2024-07-28 12:18:29,376 INFO [train.py:1114] (0/4) Epoch 12, batch 3900, loss[loss=0.1804, simple_loss=0.2829, pruned_loss=0.03895, over 4818.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2775, pruned_loss=0.05057, over 942359.08 frames. ], batch size: 14, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:18:36,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=155141.33333333334, ans=0.025 +2024-07-28 12:18:44,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155154.66666666666, ans=0.1 +2024-07-28 12:18:46,917 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.29 vs. limit=15.0 +2024-07-28 12:18:48,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155154.66666666666, ans=0.1 +2024-07-28 12:18:48,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=155154.66666666666, ans=0.0 +2024-07-28 12:18:50,293 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.86 vs. limit=6.0 +2024-07-28 12:18:52,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=155168.0, ans=0.0 +2024-07-28 12:18:53,187 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.209e+01 5.496e+01 6.124e+01 6.680e+01 9.090e+01, threshold=1.225e+02, percent-clipped=0.0 +2024-07-28 12:18:58,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=155181.33333333334, ans=0.025 +2024-07-28 12:19:00,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=155181.33333333334, ans=0.07 +2024-07-28 12:19:00,906 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:19:07,238 INFO [train.py:1114] (0/4) Epoch 12, batch 3950, loss[loss=0.2087, simple_loss=0.2961, pruned_loss=0.06064, over 4836.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2772, pruned_loss=0.05054, over 944426.05 frames. ], batch size: 16, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:19:08,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=155194.66666666666, ans=0.125 +2024-07-28 12:19:15,294 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.54 vs. limit=15.0 +2024-07-28 12:19:18,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=155208.0, ans=0.2 +2024-07-28 12:19:29,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=155234.66666666666, ans=0.0 +2024-07-28 12:19:33,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=155248.0, ans=0.0 +2024-07-28 12:19:41,093 INFO [train.py:1114] (0/4) Epoch 12, batch 4000, loss[loss=0.1724, simple_loss=0.2582, pruned_loss=0.04332, over 4779.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2782, pruned_loss=0.05095, over 940721.01 frames. ], batch size: 12, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:19:43,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=155261.33333333334, ans=0.2 +2024-07-28 12:19:43,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=155261.33333333334, ans=0.04949747468305833 +2024-07-28 12:19:52,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=155274.66666666666, ans=0.2 +2024-07-28 12:20:02,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=155301.33333333334, ans=0.0 +2024-07-28 12:20:02,784 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.540e+01 6.444e+01 7.146e+01 1.519e+02, threshold=1.289e+02, percent-clipped=1.0 +2024-07-28 12:20:14,416 INFO [train.py:1114] (0/4) Epoch 12, batch 4050, loss[loss=0.2288, simple_loss=0.3102, pruned_loss=0.07372, over 3464.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2778, pruned_loss=0.051, over 939914.53 frames. ], batch size: 35, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:20:17,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=155328.0, ans=0.0 +2024-07-28 12:20:18,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=155328.0, ans=0.125 +2024-07-28 12:20:23,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=155341.33333333334, ans=0.125 +2024-07-28 12:20:48,505 INFO [train.py:1114] (0/4) Epoch 12, batch 4100, loss[loss=0.2204, simple_loss=0.3077, pruned_loss=0.06649, over 4898.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2777, pruned_loss=0.051, over 938604.44 frames. ], batch size: 15, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:21:04,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=155421.33333333334, ans=0.125 +2024-07-28 12:21:09,434 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.52 vs. limit=6.0 +2024-07-28 12:21:12,002 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.11 vs. limit=15.0 +2024-07-28 12:21:12,177 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.341e+01 5.731e+01 6.585e+01 8.286e+01 1.195e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-28 12:21:13,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=155434.66666666666, ans=0.125 +2024-07-28 12:21:17,269 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.96 vs. limit=15.0 +2024-07-28 12:21:23,669 INFO [train.py:1114] (0/4) Epoch 12, batch 4150, loss[loss=0.1706, simple_loss=0.2588, pruned_loss=0.04121, over 4826.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2781, pruned_loss=0.05139, over 938630.42 frames. ], batch size: 13, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:21:27,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=155461.33333333334, ans=0.2 +2024-07-28 12:21:39,185 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:21:49,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=155501.33333333334, ans=0.125 +2024-07-28 12:21:50,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=155514.66666666666, ans=0.125 +2024-07-28 12:21:55,491 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.95 vs. limit=22.5 +2024-07-28 12:21:56,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=155528.0, ans=0.2 +2024-07-28 12:21:57,094 INFO [train.py:1114] (0/4) Epoch 12, batch 4200, loss[loss=0.2523, simple_loss=0.3293, pruned_loss=0.08762, over 4904.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2787, pruned_loss=0.05176, over 940128.84 frames. ], batch size: 15, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:21:58,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=155528.0, ans=0.04949747468305833 +2024-07-28 12:21:58,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=155528.0, ans=0.125 +2024-07-28 12:22:10,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=155554.66666666666, ans=0.125 +2024-07-28 12:22:11,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=155554.66666666666, ans=0.2 +2024-07-28 12:22:14,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=155554.66666666666, ans=0.125 +2024-07-28 12:22:15,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=155554.66666666666, ans=0.0 +2024-07-28 12:22:18,891 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.397e+01 5.594e+01 6.177e+01 7.434e+01 1.256e+02, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 12:22:26,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=155581.33333333334, ans=0.05 +2024-07-28 12:22:30,327 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.34 vs. limit=15.0 +2024-07-28 12:22:30,568 INFO [train.py:1114] (0/4) Epoch 12, batch 4250, loss[loss=0.1579, simple_loss=0.2417, pruned_loss=0.03703, over 4646.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.278, pruned_loss=0.05169, over 941062.60 frames. ], batch size: 12, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:22:31,012 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=9.86 vs. limit=12.0 +2024-07-28 12:22:53,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=155634.66666666666, ans=0.0 +2024-07-28 12:23:06,442 INFO [train.py:1114] (0/4) Epoch 12, batch 4300, loss[loss=0.1548, simple_loss=0.2446, pruned_loss=0.03248, over 4765.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2778, pruned_loss=0.05149, over 940038.35 frames. ], batch size: 13, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:23:08,113 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.74 vs. limit=10.0 +2024-07-28 12:23:10,064 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.45 vs. limit=12.0 +2024-07-28 12:23:19,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=155688.0, ans=0.125 +2024-07-28 12:23:20,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=155688.0, ans=0.125 +2024-07-28 12:23:22,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=155688.0, ans=0.125 +2024-07-28 12:23:24,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155688.0, ans=0.1 +2024-07-28 12:23:24,428 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.53 vs. limit=22.5 +2024-07-28 12:23:27,974 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.634e+01 6.193e+01 6.969e+01 9.578e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 12:23:39,248 INFO [train.py:1114] (0/4) Epoch 12, batch 4350, loss[loss=0.199, simple_loss=0.2944, pruned_loss=0.05185, over 4766.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2786, pruned_loss=0.05169, over 940630.29 frames. ], batch size: 13, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:23:45,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=155741.33333333334, ans=0.0 +2024-07-28 12:23:49,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=155741.33333333334, ans=0.125 +2024-07-28 12:23:59,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=155768.0, ans=0.025 +2024-07-28 12:24:28,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=155794.66666666666, ans=0.125 +2024-07-28 12:24:28,622 INFO [train.py:1114] (0/4) Epoch 12, batch 4400, loss[loss=0.1701, simple_loss=0.2629, pruned_loss=0.03867, over 4812.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2791, pruned_loss=0.05193, over 940588.95 frames. ], batch size: 14, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:24:38,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=155808.0, ans=0.0 +2024-07-28 12:24:42,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=155808.0, ans=0.0 +2024-07-28 12:24:51,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=155834.66666666666, ans=0.125 +2024-07-28 12:24:52,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=155834.66666666666, ans=0.0 +2024-07-28 12:24:52,798 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.007e+01 5.483e+01 6.215e+01 6.857e+01 1.527e+02, threshold=1.243e+02, percent-clipped=1.0 +2024-07-28 12:24:58,034 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.75 vs. limit=15.0 +2024-07-28 12:25:06,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155848.0, ans=0.1 +2024-07-28 12:25:07,429 INFO [train.py:1114] (0/4) Epoch 12, batch 4450, loss[loss=0.1681, simple_loss=0.2439, pruned_loss=0.04615, over 4945.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2792, pruned_loss=0.05213, over 938979.68 frames. ], batch size: 12, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:25:18,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=155861.33333333334, ans=0.0 +2024-07-28 12:25:21,037 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.48 vs. limit=6.0 +2024-07-28 12:25:40,606 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.39 vs. limit=6.0 +2024-07-28 12:25:46,669 INFO [train.py:1114] (0/4) Epoch 12, batch 4500, loss[loss=0.1791, simple_loss=0.2743, pruned_loss=0.04192, over 4744.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2791, pruned_loss=0.05126, over 938281.07 frames. ], batch size: 14, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:25:49,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155928.0, ans=0.1 +2024-07-28 12:26:06,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=155968.0, ans=0.125 +2024-07-28 12:26:08,086 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.62 vs. limit=22.5 +2024-07-28 12:26:08,393 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.634e+01 5.459e+01 6.375e+01 7.469e+01 1.021e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 12:26:11,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=155968.0, ans=0.04949747468305833 +2024-07-28 12:26:13,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=155981.33333333334, ans=0.125 +2024-07-28 12:26:16,478 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:26:19,640 INFO [train.py:1114] (0/4) Epoch 12, batch 4550, loss[loss=0.1419, simple_loss=0.2244, pruned_loss=0.02969, over 4886.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2787, pruned_loss=0.05114, over 940389.73 frames. ], batch size: 13, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:26:28,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=156008.0, ans=0.125 +2024-07-28 12:26:29,335 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-28 12:26:48,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=156048.0, ans=0.125 +2024-07-28 12:26:50,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=156048.0, ans=0.0 +2024-07-28 12:26:55,005 INFO [train.py:1114] (0/4) Epoch 12, batch 4600, loss[loss=0.1986, simple_loss=0.2998, pruned_loss=0.0487, over 4521.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2787, pruned_loss=0.05134, over 938405.87 frames. ], batch size: 21, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:27:07,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=156074.66666666666, ans=15.0 +2024-07-28 12:27:11,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=156088.0, ans=0.1 +2024-07-28 12:27:13,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=156088.0, ans=0.0 +2024-07-28 12:27:16,923 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.470e+01 5.723e+01 6.384e+01 7.730e+01 1.121e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 12:27:19,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=156101.33333333334, ans=0.125 +2024-07-28 12:27:25,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=156114.66666666666, ans=0.125 +2024-07-28 12:27:26,651 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.21 vs. limit=15.0 +2024-07-28 12:27:28,153 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.07 vs. limit=15.0 +2024-07-28 12:27:28,346 INFO [train.py:1114] (0/4) Epoch 12, batch 4650, loss[loss=0.1954, simple_loss=0.2772, pruned_loss=0.05678, over 4858.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2798, pruned_loss=0.05194, over 939830.24 frames. ], batch size: 16, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:27:39,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=156141.33333333334, ans=0.025 +2024-07-28 12:27:42,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=156154.66666666666, ans=0.025 +2024-07-28 12:27:55,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=156181.33333333334, ans=0.125 +2024-07-28 12:28:01,581 INFO [train.py:1114] (0/4) Epoch 12, batch 4700, loss[loss=0.1851, simple_loss=0.2649, pruned_loss=0.0527, over 4694.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2794, pruned_loss=0.05193, over 937555.35 frames. ], batch size: 11, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:28:13,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=156208.0, ans=0.0 +2024-07-28 12:28:23,596 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.610e+01 5.553e+01 6.034e+01 6.597e+01 9.759e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-28 12:28:27,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=156234.66666666666, ans=0.95 +2024-07-28 12:28:32,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=156248.0, ans=0.0 +2024-07-28 12:28:35,483 INFO [train.py:1114] (0/4) Epoch 12, batch 4750, loss[loss=0.1854, simple_loss=0.2691, pruned_loss=0.05091, over 4504.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2802, pruned_loss=0.05244, over 935506.28 frames. ], batch size: 21, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:28:37,345 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.34 vs. limit=15.0 +2024-07-28 12:28:58,748 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:29:11,599 INFO [train.py:1114] (0/4) Epoch 12, batch 4800, loss[loss=0.1586, simple_loss=0.2509, pruned_loss=0.03314, over 4690.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2798, pruned_loss=0.05268, over 933075.55 frames. ], batch size: 13, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:29:14,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=156328.0, ans=0.125 +2024-07-28 12:29:17,637 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:29:32,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=156368.0, ans=0.125 +2024-07-28 12:29:35,339 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.785e+01 5.583e+01 6.047e+01 7.018e+01 9.420e+01, threshold=1.209e+02, percent-clipped=0.0 +2024-07-28 12:29:46,705 INFO [train.py:1114] (0/4) Epoch 12, batch 4850, loss[loss=0.1803, simple_loss=0.2693, pruned_loss=0.04566, over 4735.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2797, pruned_loss=0.05225, over 932171.06 frames. ], batch size: 14, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:30:03,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=156421.33333333334, ans=0.0 +2024-07-28 12:30:17,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=156434.66666666666, ans=0.125 +2024-07-28 12:30:20,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=156448.0, ans=0.0 +2024-07-28 12:30:23,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=156448.0, ans=0.02 +2024-07-28 12:30:25,568 INFO [train.py:1114] (0/4) Epoch 12, batch 4900, loss[loss=0.1665, simple_loss=0.2471, pruned_loss=0.04298, over 4753.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.278, pruned_loss=0.05134, over 934051.89 frames. ], batch size: 13, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:30:28,185 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.33 vs. limit=15.0 +2024-07-28 12:30:36,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=156474.66666666666, ans=0.125 +2024-07-28 12:30:37,792 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.50 vs. limit=12.0 +2024-07-28 12:30:39,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=156488.0, ans=0.125 +2024-07-28 12:30:48,383 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.179e+01 5.700e+01 6.377e+01 7.192e+01 1.081e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 12:30:51,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=156501.33333333334, ans=0.125 +2024-07-28 12:30:52,222 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.94 vs. limit=15.0 +2024-07-28 12:30:57,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=156514.66666666666, ans=0.0 +2024-07-28 12:30:59,635 INFO [train.py:1114] (0/4) Epoch 12, batch 4950, loss[loss=0.2332, simple_loss=0.307, pruned_loss=0.07963, over 3511.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2795, pruned_loss=0.05259, over 931545.60 frames. ], batch size: 35, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:31:00,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=156528.0, ans=0.0 +2024-07-28 12:31:05,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=156541.33333333334, ans=0.125 +2024-07-28 12:31:07,982 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.35 vs. limit=15.0 +2024-07-28 12:31:19,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=156568.0, ans=0.125 +2024-07-28 12:31:25,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=156568.0, ans=0.125 +2024-07-28 12:31:33,222 INFO [train.py:1114] (0/4) Epoch 12, batch 5000, loss[loss=0.1604, simple_loss=0.2534, pruned_loss=0.03369, over 4666.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2785, pruned_loss=0.05198, over 935359.60 frames. ], batch size: 14, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:31:59,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=156634.66666666666, ans=0.125 +2024-07-28 12:32:00,477 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.430e+01 5.699e+01 6.190e+01 6.580e+01 9.599e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 12:32:01,584 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.30 vs. limit=15.0 +2024-07-28 12:32:11,986 INFO [train.py:1114] (0/4) Epoch 12, batch 5050, loss[loss=0.1573, simple_loss=0.2459, pruned_loss=0.03435, over 4852.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2788, pruned_loss=0.05214, over 937850.91 frames. ], batch size: 12, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:32:18,001 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.02 vs. limit=15.0 +2024-07-28 12:32:19,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=156674.66666666666, ans=0.125 +2024-07-28 12:32:21,358 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.06 vs. limit=15.0 +2024-07-28 12:32:44,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=156714.66666666666, ans=0.5 +2024-07-28 12:32:48,321 INFO [train.py:1114] (0/4) Epoch 12, batch 5100, loss[loss=0.163, simple_loss=0.2468, pruned_loss=0.03956, over 4769.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2793, pruned_loss=0.05258, over 935258.60 frames. ], batch size: 12, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:32:49,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=156728.0, ans=0.125 +2024-07-28 12:32:51,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=156728.0, ans=0.2 +2024-07-28 12:32:52,719 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.90 vs. limit=15.0 +2024-07-28 12:33:13,056 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.09 vs. limit=12.0 +2024-07-28 12:33:18,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156768.0, ans=0.1 +2024-07-28 12:33:19,267 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.805e+01 5.671e+01 6.012e+01 6.981e+01 1.009e+02, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 12:33:20,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=156768.0, ans=0.125 +2024-07-28 12:33:20,974 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.74 vs. limit=15.0 +2024-07-28 12:33:29,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=156781.33333333334, ans=0.125 +2024-07-28 12:33:36,876 INFO [train.py:1114] (0/4) Epoch 12, batch 5150, loss[loss=0.2212, simple_loss=0.3083, pruned_loss=0.06704, over 4830.00 frames. ], tot_loss[loss=0.193, simple_loss=0.28, pruned_loss=0.05302, over 935908.68 frames. ], batch size: 16, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:33:39,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=156794.66666666666, ans=0.125 +2024-07-28 12:33:45,960 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:33:54,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=156821.33333333334, ans=0.125 +2024-07-28 12:33:57,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=156821.33333333334, ans=0.125 +2024-07-28 12:34:04,463 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.43 vs. limit=12.0 +2024-07-28 12:34:06,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=156848.0, ans=0.2 +2024-07-28 12:34:07,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=156848.0, ans=0.025 +2024-07-28 12:34:12,707 INFO [train.py:1114] (0/4) Epoch 12, batch 5200, loss[loss=0.1787, simple_loss=0.2885, pruned_loss=0.03448, over 4653.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2793, pruned_loss=0.0527, over 936321.50 frames. ], batch size: 14, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:34:23,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=156874.66666666666, ans=0.1 +2024-07-28 12:34:29,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=156888.0, ans=0.125 +2024-07-28 12:34:39,943 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.716e+01 5.675e+01 6.398e+01 7.446e+01 1.094e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 12:34:46,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=156914.66666666666, ans=0.2 +2024-07-28 12:34:51,289 INFO [train.py:1114] (0/4) Epoch 12, batch 5250, loss[loss=0.1683, simple_loss=0.265, pruned_loss=0.0358, over 4894.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2787, pruned_loss=0.05188, over 935876.56 frames. ], batch size: 13, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:34:55,599 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.32 vs. limit=12.0 +2024-07-28 12:34:56,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=156928.0, ans=0.0 +2024-07-28 12:35:01,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=156941.33333333334, ans=0.0 +2024-07-28 12:35:06,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=156954.66666666666, ans=0.125 +2024-07-28 12:35:15,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=156968.0, ans=0.0 +2024-07-28 12:35:19,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=156981.33333333334, ans=0.025 +2024-07-28 12:35:20,142 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.58 vs. limit=10.0 +2024-07-28 12:35:22,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=156981.33333333334, ans=0.0 +2024-07-28 12:35:24,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.94 vs. limit=22.5 +2024-07-28 12:35:24,840 INFO [train.py:1114] (0/4) Epoch 12, batch 5300, loss[loss=0.2094, simple_loss=0.3041, pruned_loss=0.05738, over 4635.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.279, pruned_loss=0.05233, over 934278.58 frames. ], batch size: 16, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:35:29,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=156994.66666666666, ans=0.125 +2024-07-28 12:35:32,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.06 vs. limit=15.0 +2024-07-28 12:35:32,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=157008.0, ans=0.125 +2024-07-28 12:35:38,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=157021.33333333334, ans=0.0 +2024-07-28 12:35:46,893 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.557e+01 5.552e+01 6.428e+01 7.649e+01 1.141e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 12:36:00,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=157061.33333333334, ans=0.125 +2024-07-28 12:36:00,607 INFO [train.py:1114] (0/4) Epoch 12, batch 5350, loss[loss=0.2148, simple_loss=0.2824, pruned_loss=0.07362, over 4504.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2807, pruned_loss=0.05311, over 936086.06 frames. ], batch size: 10, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:36:01,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=157061.33333333334, ans=0.125 +2024-07-28 12:36:03,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=157061.33333333334, ans=0.5 +2024-07-28 12:36:08,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=157074.66666666666, ans=0.1 +2024-07-28 12:36:08,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=157074.66666666666, ans=0.0 +2024-07-28 12:36:09,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=157074.66666666666, ans=0.025 +2024-07-28 12:36:15,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=157088.0, ans=0.125 +2024-07-28 12:36:18,131 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.07 vs. limit=10.0 +2024-07-28 12:36:23,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=157101.33333333334, ans=0.125 +2024-07-28 12:36:27,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=157101.33333333334, ans=0.125 +2024-07-28 12:36:36,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=157128.0, ans=0.025 +2024-07-28 12:36:36,749 INFO [train.py:1114] (0/4) Epoch 12, batch 5400, loss[loss=0.2762, simple_loss=0.352, pruned_loss=0.1001, over 4334.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.282, pruned_loss=0.05365, over 930641.62 frames. ], batch size: 26, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:36:46,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=157141.33333333334, ans=0.0 +2024-07-28 12:36:55,062 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.86 vs. limit=15.0 +2024-07-28 12:36:58,407 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.20 vs. limit=15.0 +2024-07-28 12:36:58,608 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.622e+01 6.108e+01 6.944e+01 7.812e+01 1.147e+02, threshold=1.389e+02, percent-clipped=0.0 +2024-07-28 12:37:04,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157181.33333333334, ans=0.1 +2024-07-28 12:37:09,794 INFO [train.py:1114] (0/4) Epoch 12, batch 5450, loss[loss=0.1411, simple_loss=0.2161, pruned_loss=0.03305, over 4709.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2808, pruned_loss=0.05272, over 933469.08 frames. ], batch size: 11, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:37:11,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=157194.66666666666, ans=0.0 +2024-07-28 12:37:17,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=157208.0, ans=0.125 +2024-07-28 12:37:40,573 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.40 vs. limit=10.0 +2024-07-28 12:37:43,607 INFO [train.py:1114] (0/4) Epoch 12, batch 5500, loss[loss=0.1942, simple_loss=0.2756, pruned_loss=0.05643, over 4272.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2802, pruned_loss=0.05272, over 930980.72 frames. ], batch size: 26, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:37:46,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=157261.33333333334, ans=0.0 +2024-07-28 12:37:55,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.11 vs. limit=12.0 +2024-07-28 12:38:06,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=157301.33333333334, ans=0.125 +2024-07-28 12:38:07,910 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.744e+01 6.392e+01 7.523e+01 1.431e+02, threshold=1.278e+02, percent-clipped=1.0 +2024-07-28 12:38:11,040 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.11 vs. limit=22.5 +2024-07-28 12:38:19,220 INFO [train.py:1114] (0/4) Epoch 12, batch 5550, loss[loss=0.1934, simple_loss=0.274, pruned_loss=0.05638, over 4714.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2794, pruned_loss=0.05263, over 933057.65 frames. ], batch size: 12, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:38:29,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=157341.33333333334, ans=0.0 +2024-07-28 12:38:34,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=157354.66666666666, ans=0.125 +2024-07-28 12:38:42,062 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.96 vs. limit=15.0 +2024-07-28 12:38:43,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=157368.0, ans=0.125 +2024-07-28 12:38:49,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157381.33333333334, ans=0.1 +2024-07-28 12:38:52,994 INFO [train.py:1114] (0/4) Epoch 12, batch 5600, loss[loss=0.1894, simple_loss=0.2909, pruned_loss=0.04393, over 4743.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2793, pruned_loss=0.05228, over 933701.35 frames. ], batch size: 14, lr: 6.24e-03, grad_scale: 64.0 +2024-07-28 12:39:06,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157421.33333333334, ans=0.1 +2024-07-28 12:39:13,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=157434.66666666666, ans=0.125 +2024-07-28 12:39:15,774 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.811e+01 5.521e+01 6.342e+01 7.244e+01 1.033e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 12:39:23,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157448.0, ans=0.1 +2024-07-28 12:39:26,402 INFO [train.py:1114] (0/4) Epoch 12, batch 5650, loss[loss=0.1873, simple_loss=0.28, pruned_loss=0.0473, over 4582.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2783, pruned_loss=0.05159, over 936518.36 frames. ], batch size: 21, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:39:27,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=157461.33333333334, ans=0.125 +2024-07-28 12:39:28,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=157461.33333333334, ans=0.125 +2024-07-28 12:39:32,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=157461.33333333334, ans=0.0 +2024-07-28 12:39:32,983 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.46 vs. limit=15.0 +2024-07-28 12:39:35,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=157474.66666666666, ans=0.0 +2024-07-28 12:39:35,694 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.02 vs. limit=15.0 +2024-07-28 12:39:58,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=157514.66666666666, ans=0.0 +2024-07-28 12:40:01,571 INFO [train.py:1114] (0/4) Epoch 12, batch 5700, loss[loss=0.1753, simple_loss=0.2693, pruned_loss=0.04066, over 4692.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2779, pruned_loss=0.05124, over 937486.04 frames. ], batch size: 13, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:40:01,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=157528.0, ans=0.125 +2024-07-28 12:40:04,815 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:40:04,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=157528.0, ans=0.2 +2024-07-28 12:40:21,552 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:40:23,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=157568.0, ans=0.125 +2024-07-28 12:40:26,154 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.907e+01 5.849e+01 6.761e+01 7.551e+01 1.061e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-28 12:40:27,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=157568.0, ans=0.07 +2024-07-28 12:40:36,678 INFO [train.py:1114] (0/4) Epoch 12, batch 5750, loss[loss=0.1867, simple_loss=0.2794, pruned_loss=0.04701, over 4738.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2794, pruned_loss=0.05151, over 937647.15 frames. ], batch size: 19, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:41:01,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=157621.33333333334, ans=0.0 +2024-07-28 12:41:14,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157648.0, ans=0.1 +2024-07-28 12:41:16,559 INFO [train.py:1114] (0/4) Epoch 12, batch 5800, loss[loss=0.2225, simple_loss=0.3222, pruned_loss=0.06142, over 4719.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2807, pruned_loss=0.05246, over 936629.30 frames. ], batch size: 19, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:41:20,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=157661.33333333334, ans=0.125 +2024-07-28 12:41:21,504 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.15 vs. limit=22.5 +2024-07-28 12:41:38,729 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.70 vs. limit=15.0 +2024-07-28 12:41:42,222 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=15.0 +2024-07-28 12:41:43,104 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.866e+01 5.542e+01 6.072e+01 7.218e+01 1.008e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 12:41:47,549 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.91 vs. limit=22.5 +2024-07-28 12:41:53,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=157714.66666666666, ans=0.05 +2024-07-28 12:41:59,704 INFO [train.py:1114] (0/4) Epoch 12, batch 5850, loss[loss=0.2196, simple_loss=0.3034, pruned_loss=0.06789, over 4404.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2803, pruned_loss=0.05228, over 937388.69 frames. ], batch size: 21, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:42:11,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=157741.33333333334, ans=0.125 +2024-07-28 12:42:29,311 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.25 vs. limit=15.0 +2024-07-28 12:42:33,080 INFO [train.py:1114] (0/4) Epoch 12, batch 5900, loss[loss=0.2233, simple_loss=0.3151, pruned_loss=0.06572, over 4685.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2796, pruned_loss=0.05198, over 937576.06 frames. ], batch size: 15, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:42:36,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=157794.66666666666, ans=0.2 +2024-07-28 12:42:45,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=157808.0, ans=0.0 +2024-07-28 12:42:45,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=157808.0, ans=0.125 +2024-07-28 12:42:56,394 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 5.671e+01 6.327e+01 7.319e+01 1.125e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-28 12:43:00,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=157848.0, ans=0.0 +2024-07-28 12:43:07,225 INFO [train.py:1114] (0/4) Epoch 12, batch 5950, loss[loss=0.2357, simple_loss=0.3256, pruned_loss=0.07292, over 4700.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2798, pruned_loss=0.05193, over 939453.69 frames. ], batch size: 15, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:43:08,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=157861.33333333334, ans=0.09899494936611666 +2024-07-28 12:43:14,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=157874.66666666666, ans=0.0 +2024-07-28 12:43:42,927 INFO [train.py:1114] (0/4) Epoch 12, batch 6000, loss[loss=0.1953, simple_loss=0.2807, pruned_loss=0.05495, over 4272.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2799, pruned_loss=0.05237, over 937245.76 frames. ], batch size: 26, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:43:42,928 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 12:43:52,209 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.2918, 5.0576, 4.4605, 4.8229], device='cuda:0') +2024-07-28 12:43:54,398 INFO [train.py:1146] (0/4) Epoch 12, validation: loss=0.1672, simple_loss=0.2713, pruned_loss=0.03161, over 944034.00 frames. +2024-07-28 12:43:54,399 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 12:44:01,159 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.55 vs. limit=8.0 +2024-07-28 12:44:17,591 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.752e+01 5.694e+01 6.318e+01 7.255e+01 1.160e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 12:44:27,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=157981.33333333334, ans=0.125 +2024-07-28 12:44:28,536 INFO [train.py:1114] (0/4) Epoch 12, batch 6050, loss[loss=0.1747, simple_loss=0.2643, pruned_loss=0.04257, over 4773.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2799, pruned_loss=0.05227, over 938267.53 frames. ], batch size: 12, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:44:31,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=157994.66666666666, ans=0.0 +2024-07-28 12:44:34,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=157994.66666666666, ans=0.125 +2024-07-28 12:44:39,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=158008.0, ans=0.0 +2024-07-28 12:44:49,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=158034.66666666666, ans=0.125 +2024-07-28 12:44:55,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=158048.0, ans=0.0 +2024-07-28 12:45:01,895 INFO [train.py:1114] (0/4) Epoch 12, batch 6100, loss[loss=0.1803, simple_loss=0.2791, pruned_loss=0.04079, over 4683.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2791, pruned_loss=0.05145, over 937550.99 frames. ], batch size: 15, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:45:23,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=158101.33333333334, ans=0.0 +2024-07-28 12:45:26,344 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.227e+01 5.720e+01 6.414e+01 7.144e+01 1.177e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 12:45:35,319 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.32 vs. limit=22.5 +2024-07-28 12:45:36,975 INFO [train.py:1114] (0/4) Epoch 12, batch 6150, loss[loss=0.2364, simple_loss=0.3047, pruned_loss=0.08411, over 3145.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2793, pruned_loss=0.05153, over 935993.23 frames. ], batch size: 35, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:45:40,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=158128.0, ans=0.0 +2024-07-28 12:45:41,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=158128.0, ans=0.015 +2024-07-28 12:45:51,382 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.59 vs. limit=15.0 +2024-07-28 12:45:55,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=158154.66666666666, ans=0.0 +2024-07-28 12:46:00,224 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.31 vs. limit=15.0 +2024-07-28 12:46:02,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=158168.0, ans=0.125 +2024-07-28 12:46:02,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=158168.0, ans=0.0 +2024-07-28 12:46:02,911 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.89 vs. limit=15.0 +2024-07-28 12:46:04,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=158181.33333333334, ans=0.09899494936611666 +2024-07-28 12:46:05,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158181.33333333334, ans=0.1 +2024-07-28 12:46:10,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=158194.66666666666, ans=0.0 +2024-07-28 12:46:11,266 INFO [train.py:1114] (0/4) Epoch 12, batch 6200, loss[loss=0.2127, simple_loss=0.3059, pruned_loss=0.05976, over 4740.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2793, pruned_loss=0.0516, over 935767.95 frames. ], batch size: 14, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:46:11,693 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.83 vs. limit=15.0 +2024-07-28 12:46:27,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=158221.33333333334, ans=0.125 +2024-07-28 12:46:27,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=158221.33333333334, ans=0.0 +2024-07-28 12:46:27,333 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-28 12:46:33,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=158234.66666666666, ans=0.0 +2024-07-28 12:46:34,432 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.522e+01 5.559e+01 6.111e+01 6.861e+01 1.032e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 12:46:40,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=158248.0, ans=0.125 +2024-07-28 12:46:47,154 INFO [train.py:1114] (0/4) Epoch 12, batch 6250, loss[loss=0.2001, simple_loss=0.2922, pruned_loss=0.05399, over 4795.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2792, pruned_loss=0.05208, over 932289.76 frames. ], batch size: 14, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:47:07,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=158288.0, ans=0.0 +2024-07-28 12:47:22,585 INFO [train.py:1114] (0/4) Epoch 12, batch 6300, loss[loss=0.1423, simple_loss=0.2322, pruned_loss=0.02616, over 4532.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2796, pruned_loss=0.05217, over 929166.69 frames. ], batch size: 10, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:47:22,911 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.58 vs. limit=15.0 +2024-07-28 12:47:26,994 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.83 vs. limit=15.0 +2024-07-28 12:47:31,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=158341.33333333334, ans=0.125 +2024-07-28 12:47:32,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=158341.33333333334, ans=0.025 +2024-07-28 12:47:38,621 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.08 vs. limit=22.5 +2024-07-28 12:47:44,774 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.429e+01 5.769e+01 6.275e+01 7.297e+01 9.885e+01, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 12:47:45,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=158368.0, ans=0.0 +2024-07-28 12:47:46,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=158368.0, ans=0.125 +2024-07-28 12:47:47,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=158368.0, ans=0.0 +2024-07-28 12:47:49,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=158381.33333333334, ans=0.125 +2024-07-28 12:47:50,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=158381.33333333334, ans=0.125 +2024-07-28 12:47:55,499 INFO [train.py:1114] (0/4) Epoch 12, batch 6350, loss[loss=0.1904, simple_loss=0.2839, pruned_loss=0.04842, over 4519.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2781, pruned_loss=0.05123, over 933328.27 frames. ], batch size: 21, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:47:57,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=158394.66666666666, ans=0.125 +2024-07-28 12:47:58,722 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.53 vs. limit=22.5 +2024-07-28 12:48:02,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=158408.0, ans=0.0 +2024-07-28 12:48:06,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=158408.0, ans=0.125 +2024-07-28 12:48:08,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=158421.33333333334, ans=0.0 +2024-07-28 12:48:16,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=158434.66666666666, ans=0.125 +2024-07-28 12:48:18,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=158434.66666666666, ans=0.125 +2024-07-28 12:48:29,205 INFO [train.py:1114] (0/4) Epoch 12, batch 6400, loss[loss=0.2248, simple_loss=0.3135, pruned_loss=0.06808, over 4632.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2782, pruned_loss=0.05173, over 934859.96 frames. ], batch size: 13, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:48:30,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=158461.33333333334, ans=0.04949747468305833 +2024-07-28 12:48:34,850 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.42 vs. limit=12.0 +2024-07-28 12:48:38,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=158474.66666666666, ans=0.125 +2024-07-28 12:48:44,593 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.62 vs. limit=10.0 +2024-07-28 12:48:48,677 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.97 vs. limit=6.0 +2024-07-28 12:48:50,586 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.34 vs. limit=22.5 +2024-07-28 12:48:51,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=158501.33333333334, ans=0.125 +2024-07-28 12:48:51,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=158501.33333333334, ans=0.125 +2024-07-28 12:48:53,536 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.709e+01 5.709e+01 6.303e+01 7.389e+01 1.106e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 12:49:04,231 INFO [train.py:1114] (0/4) Epoch 12, batch 6450, loss[loss=0.2064, simple_loss=0.2862, pruned_loss=0.06336, over 4630.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2794, pruned_loss=0.05152, over 938774.50 frames. ], batch size: 21, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:49:05,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158528.0, ans=0.1 +2024-07-28 12:49:12,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=158541.33333333334, ans=0.1 +2024-07-28 12:49:18,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=158554.66666666666, ans=0.95 +2024-07-28 12:49:21,086 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.63 vs. limit=15.0 +2024-07-28 12:49:30,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=158581.33333333334, ans=0.125 +2024-07-28 12:49:36,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=158581.33333333334, ans=0.5 +2024-07-28 12:49:37,246 INFO [train.py:1114] (0/4) Epoch 12, batch 6500, loss[loss=0.3091, simple_loss=0.3506, pruned_loss=0.1338, over 2981.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2786, pruned_loss=0.05112, over 939595.56 frames. ], batch size: 36, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:49:45,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158608.0, ans=0.1 +2024-07-28 12:49:50,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158621.33333333334, ans=0.1 +2024-07-28 12:49:56,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=158634.66666666666, ans=0.125 +2024-07-28 12:49:57,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=158634.66666666666, ans=0.125 +2024-07-28 12:49:59,311 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.679e+01 6.205e+01 7.346e+01 1.316e+02, threshold=1.241e+02, percent-clipped=1.0 +2024-07-28 12:50:04,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=158648.0, ans=0.025 +2024-07-28 12:50:06,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=158648.0, ans=0.1 +2024-07-28 12:50:10,243 INFO [train.py:1114] (0/4) Epoch 12, batch 6550, loss[loss=0.1808, simple_loss=0.2658, pruned_loss=0.04792, over 4794.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.278, pruned_loss=0.05059, over 942555.25 frames. ], batch size: 11, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:50:20,770 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.72 vs. limit=22.5 +2024-07-28 12:50:33,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=158701.33333333334, ans=0.125 +2024-07-28 12:50:35,840 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=18.11 vs. limit=22.5 +2024-07-28 12:50:36,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158714.66666666666, ans=0.1 +2024-07-28 12:50:43,173 INFO [train.py:1114] (0/4) Epoch 12, batch 6600, loss[loss=0.1602, simple_loss=0.2606, pruned_loss=0.02986, over 4932.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2784, pruned_loss=0.05086, over 944586.14 frames. ], batch size: 14, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:50:56,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=158741.33333333334, ans=0.125 +2024-07-28 12:50:57,786 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.19 vs. limit=15.0 +2024-07-28 12:51:06,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=158768.0, ans=0.125 +2024-07-28 12:51:07,914 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.596e+01 5.716e+01 6.452e+01 7.100e+01 1.307e+02, threshold=1.290e+02, percent-clipped=2.0 +2024-07-28 12:51:10,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158768.0, ans=0.1 +2024-07-28 12:51:10,906 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.80 vs. limit=15.0 +2024-07-28 12:51:22,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=158794.66666666666, ans=0.0 +2024-07-28 12:51:23,533 INFO [train.py:1114] (0/4) Epoch 12, batch 6650, loss[loss=0.205, simple_loss=0.3005, pruned_loss=0.05477, over 4601.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2777, pruned_loss=0.05049, over 943486.70 frames. ], batch size: 17, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:51:50,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=158821.33333333334, ans=0.125 +2024-07-28 12:52:04,768 INFO [train.py:1114] (0/4) Epoch 12, batch 6700, loss[loss=0.1883, simple_loss=0.2812, pruned_loss=0.04766, over 4723.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2777, pruned_loss=0.05076, over 942304.39 frames. ], batch size: 19, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:52:17,416 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.39 vs. limit=12.0 +2024-07-28 12:52:22,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=158888.0, ans=0.2 +2024-07-28 12:52:30,333 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-07-28 12:52:31,272 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.714e+01 5.714e+01 6.445e+01 7.279e+01 1.274e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 12:52:37,749 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:52:42,415 INFO [train.py:1114] (0/4) Epoch 12, batch 6750, loss[loss=0.2367, simple_loss=0.3168, pruned_loss=0.07823, over 4186.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2785, pruned_loss=0.05121, over 940291.87 frames. ], batch size: 25, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:52:43,453 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.92 vs. limit=22.5 +2024-07-28 12:52:46,407 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.45 vs. limit=15.0 +2024-07-28 12:52:50,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=158941.33333333334, ans=0.0 +2024-07-28 12:52:53,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=158941.33333333334, ans=0.125 +2024-07-28 12:52:54,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=158941.33333333334, ans=0.2 +2024-07-28 12:52:57,762 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.34 vs. limit=22.5 +2024-07-28 12:53:03,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=158968.0, ans=0.0 +2024-07-28 12:53:04,386 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.01 vs. limit=22.5 +2024-07-28 12:53:13,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=158981.33333333334, ans=0.05 +2024-07-28 12:53:15,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=158994.66666666666, ans=0.0 +2024-07-28 12:53:16,209 INFO [train.py:1114] (0/4) Epoch 12, batch 6800, loss[loss=0.1717, simple_loss=0.2688, pruned_loss=0.03733, over 4635.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2792, pruned_loss=0.05127, over 938504.22 frames. ], batch size: 13, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:53:16,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158994.66666666666, ans=0.1 +2024-07-28 12:53:22,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=159008.0, ans=0.025 +2024-07-28 12:53:27,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=159008.0, ans=0.1 +2024-07-28 12:53:30,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=159021.33333333334, ans=0.025 +2024-07-28 12:53:32,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=159021.33333333334, ans=0.2 +2024-07-28 12:53:32,990 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.97 vs. limit=15.0 +2024-07-28 12:53:38,553 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.462e+01 5.632e+01 6.105e+01 6.995e+01 1.094e+02, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 12:53:49,461 INFO [train.py:1114] (0/4) Epoch 12, batch 6850, loss[loss=0.2168, simple_loss=0.3117, pruned_loss=0.06098, over 4689.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2789, pruned_loss=0.05135, over 940208.67 frames. ], batch size: 13, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:53:52,604 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.39 vs. limit=6.0 +2024-07-28 12:54:03,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=159088.0, ans=0.025 +2024-07-28 12:54:08,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=159088.0, ans=0.2 +2024-07-28 12:54:24,191 INFO [train.py:1114] (0/4) Epoch 12, batch 6900, loss[loss=0.1907, simple_loss=0.2671, pruned_loss=0.05715, over 4958.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2787, pruned_loss=0.05117, over 942399.10 frames. ], batch size: 13, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:54:33,855 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.11 vs. limit=15.0 +2024-07-28 12:54:34,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=159141.33333333334, ans=0.0 +2024-07-28 12:54:35,916 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.99 vs. limit=22.5 +2024-07-28 12:54:37,960 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.98 vs. limit=15.0 +2024-07-28 12:54:44,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=159168.0, ans=15.0 +2024-07-28 12:54:46,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159168.0, ans=0.1 +2024-07-28 12:54:46,744 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.739e+01 5.528e+01 6.156e+01 7.028e+01 9.720e+01, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 12:54:48,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=159168.0, ans=0.0 +2024-07-28 12:54:50,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=159181.33333333334, ans=0.2 +2024-07-28 12:54:54,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=159181.33333333334, ans=0.125 +2024-07-28 12:54:57,575 INFO [train.py:1114] (0/4) Epoch 12, batch 6950, loss[loss=0.1889, simple_loss=0.2572, pruned_loss=0.0603, over 4486.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.279, pruned_loss=0.05157, over 939851.69 frames. ], batch size: 10, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:54:58,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=159194.66666666666, ans=0.0 +2024-07-28 12:54:58,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=159194.66666666666, ans=0.0 +2024-07-28 12:55:00,782 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.73 vs. limit=15.0 +2024-07-28 12:55:04,117 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.63 vs. limit=15.0 +2024-07-28 12:55:16,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=159221.33333333334, ans=0.0 +2024-07-28 12:55:21,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=159234.66666666666, ans=0.0 +2024-07-28 12:55:24,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=159248.0, ans=0.125 +2024-07-28 12:55:30,920 INFO [train.py:1114] (0/4) Epoch 12, batch 7000, loss[loss=0.203, simple_loss=0.2932, pruned_loss=0.05643, over 4572.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2789, pruned_loss=0.0516, over 938068.31 frames. ], batch size: 17, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:55:36,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=159274.66666666666, ans=0.0 +2024-07-28 12:55:37,055 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.37 vs. limit=22.5 +2024-07-28 12:55:40,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=159274.66666666666, ans=0.2 +2024-07-28 12:55:53,275 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.935e+01 5.641e+01 6.482e+01 7.445e+01 1.063e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 12:56:02,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=159314.66666666666, ans=0.2 +2024-07-28 12:56:07,653 INFO [train.py:1114] (0/4) Epoch 12, batch 7050, loss[loss=0.2325, simple_loss=0.3082, pruned_loss=0.07842, over 4785.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2805, pruned_loss=0.05244, over 941490.79 frames. ], batch size: 19, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:56:10,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=159328.0, ans=0.1 +2024-07-28 12:56:14,164 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.80 vs. limit=6.0 +2024-07-28 12:56:15,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=159341.33333333334, ans=0.0 +2024-07-28 12:56:21,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=159354.66666666666, ans=0.5 +2024-07-28 12:56:33,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=159368.0, ans=0.125 +2024-07-28 12:56:33,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=159368.0, ans=0.0 +2024-07-28 12:56:34,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=159368.0, ans=0.0 +2024-07-28 12:56:39,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=159381.33333333334, ans=0.0 +2024-07-28 12:56:42,931 INFO [train.py:1114] (0/4) Epoch 12, batch 7100, loss[loss=0.2007, simple_loss=0.2964, pruned_loss=0.05249, over 4792.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2804, pruned_loss=0.05244, over 936303.59 frames. ], batch size: 15, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:56:45,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_na.min_abs, batch_count=159394.66666666666, ans=0.02 +2024-07-28 12:56:48,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=159394.66666666666, ans=0.05 +2024-07-28 12:56:49,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=159394.66666666666, ans=0.0 +2024-07-28 12:56:50,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=159408.0, ans=0.125 +2024-07-28 12:57:06,527 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.638e+01 6.257e+01 7.591e+01 1.588e+02, threshold=1.251e+02, percent-clipped=2.0 +2024-07-28 12:57:07,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=159434.66666666666, ans=0.0 +2024-07-28 12:57:11,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159448.0, ans=0.1 +2024-07-28 12:57:16,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=159461.33333333334, ans=0.07 +2024-07-28 12:57:16,941 INFO [train.py:1114] (0/4) Epoch 12, batch 7150, loss[loss=0.1941, simple_loss=0.2885, pruned_loss=0.04986, over 4545.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2791, pruned_loss=0.05207, over 937268.63 frames. ], batch size: 21, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:57:24,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=159474.66666666666, ans=0.0 +2024-07-28 12:57:24,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159474.66666666666, ans=0.1 +2024-07-28 12:57:26,508 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.24 vs. limit=15.0 +2024-07-28 12:57:34,563 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.87 vs. limit=10.0 +2024-07-28 12:57:37,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=159501.33333333334, ans=0.125 +2024-07-28 12:57:50,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=159514.66666666666, ans=0.0 +2024-07-28 12:57:51,496 INFO [train.py:1114] (0/4) Epoch 12, batch 7200, loss[loss=0.2151, simple_loss=0.2932, pruned_loss=0.06853, over 4801.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2806, pruned_loss=0.05276, over 937665.40 frames. ], batch size: 15, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:57:55,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=159528.0, ans=10.0 +2024-07-28 12:57:57,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=159541.33333333334, ans=0.0 +2024-07-28 12:58:00,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=159541.33333333334, ans=0.0 +2024-07-28 12:58:05,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=159541.33333333334, ans=0.125 +2024-07-28 12:58:12,535 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.35 vs. limit=15.0 +2024-07-28 12:58:13,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=159554.66666666666, ans=0.2 +2024-07-28 12:58:13,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=159554.66666666666, ans=0.07 +2024-07-28 12:58:16,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=159568.0, ans=0.125 +2024-07-28 12:58:17,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=159568.0, ans=0.125 +2024-07-28 12:58:18,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=159568.0, ans=0.2 +2024-07-28 12:58:18,802 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.659e+01 5.813e+01 6.361e+01 7.395e+01 9.715e+01, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 12:58:19,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=159568.0, ans=0.0 +2024-07-28 12:58:27,760 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.64 vs. limit=15.0 +2024-07-28 12:58:29,574 INFO [train.py:1114] (0/4) Epoch 12, batch 7250, loss[loss=0.1753, simple_loss=0.2604, pruned_loss=0.04511, over 4850.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2797, pruned_loss=0.05224, over 939263.98 frames. ], batch size: 12, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:58:40,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=159608.0, ans=0.125 +2024-07-28 12:58:41,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=159621.33333333334, ans=0.0 +2024-07-28 12:58:53,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=159634.66666666666, ans=0.125 +2024-07-28 12:59:01,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=159648.0, ans=0.125 +2024-07-28 12:59:02,435 INFO [train.py:1114] (0/4) Epoch 12, batch 7300, loss[loss=0.1725, simple_loss=0.2633, pruned_loss=0.04084, over 4854.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2788, pruned_loss=0.0515, over 939388.83 frames. ], batch size: 12, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:59:03,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=159661.33333333334, ans=0.0 +2024-07-28 12:59:10,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=159674.66666666666, ans=0.125 +2024-07-28 12:59:16,191 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.26 vs. limit=22.5 +2024-07-28 12:59:20,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=159688.0, ans=0.0 +2024-07-28 12:59:23,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=159701.33333333334, ans=0.0 +2024-07-28 12:59:27,310 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.673e+01 5.393e+01 5.789e+01 6.409e+01 1.096e+02, threshold=1.158e+02, percent-clipped=0.0 +2024-07-28 12:59:30,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=159714.66666666666, ans=0.0 +2024-07-28 12:59:36,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=159714.66666666666, ans=10.0 +2024-07-28 12:59:37,714 INFO [train.py:1114] (0/4) Epoch 12, batch 7350, loss[loss=0.1891, simple_loss=0.2834, pruned_loss=0.0474, over 4642.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2793, pruned_loss=0.05138, over 938857.02 frames. ], batch size: 12, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 12:59:48,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=159728.0, ans=0.125 +2024-07-28 12:59:49,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=159728.0, ans=0.125 +2024-07-28 12:59:49,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=159728.0, ans=0.125 +2024-07-28 12:59:49,695 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.20 vs. limit=15.0 +2024-07-28 12:59:50,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=159741.33333333334, ans=0.125 +2024-07-28 13:00:08,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=159768.0, ans=0.2 +2024-07-28 13:00:19,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=159781.33333333334, ans=0.125 +2024-07-28 13:00:23,178 INFO [train.py:1114] (0/4) Epoch 12, batch 7400, loss[loss=0.2292, simple_loss=0.3162, pruned_loss=0.0711, over 4691.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2787, pruned_loss=0.05067, over 940252.55 frames. ], batch size: 13, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:00:41,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159808.0, ans=0.1 +2024-07-28 13:00:47,482 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.03 vs. limit=15.0 +2024-07-28 13:00:55,285 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.574e+01 5.910e+01 6.477e+01 7.704e+01 1.281e+02, threshold=1.295e+02, percent-clipped=1.0 +2024-07-28 13:00:56,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=159834.66666666666, ans=0.125 +2024-07-28 13:01:05,628 INFO [train.py:1114] (0/4) Epoch 12, batch 7450, loss[loss=0.1873, simple_loss=0.2683, pruned_loss=0.05312, over 4614.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2787, pruned_loss=0.05126, over 937806.01 frames. ], batch size: 11, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:01:13,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=159874.66666666666, ans=0.125 +2024-07-28 13:01:26,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=159901.33333333334, ans=0.125 +2024-07-28 13:01:27,988 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.83 vs. limit=15.0 +2024-07-28 13:01:29,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=159901.33333333334, ans=0.07 +2024-07-28 13:01:37,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=159928.0, ans=0.125 +2024-07-28 13:01:38,266 INFO [train.py:1114] (0/4) Epoch 12, batch 7500, loss[loss=0.2177, simple_loss=0.3083, pruned_loss=0.06355, over 3279.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2785, pruned_loss=0.05107, over 935998.99 frames. ], batch size: 35, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:01:42,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=159928.0, ans=0.0 +2024-07-28 13:01:46,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=159928.0, ans=0.0 +2024-07-28 13:01:57,772 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.84 vs. limit=15.0 +2024-07-28 13:02:07,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=159941.33333333334, ans=0.125 +2024-07-28 13:02:08,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=159941.33333333334, ans=0.125 +2024-07-28 13:02:11,887 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.37 vs. limit=6.0 +2024-07-28 13:02:13,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=159954.66666666666, ans=0.125 +2024-07-28 13:02:14,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=159954.66666666666, ans=0.125 +2024-07-28 13:05:16,501 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.752e+01 6.223e+01 6.904e+01 1.181e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 13:05:21,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=159981.33333333334, ans=0.125 +2024-07-28 13:05:24,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=159981.33333333334, ans=0.0 +2024-07-28 13:05:26,949 INFO [train.py:1114] (0/4) Epoch 12, batch 7550, loss[loss=0.2153, simple_loss=0.3009, pruned_loss=0.06486, over 4657.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2793, pruned_loss=0.05156, over 936115.99 frames. ], batch size: 17, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:05:28,596 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.71 vs. limit=15.0 +2024-07-28 13:05:29,197 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-120000.pt +2024-07-28 13:05:35,214 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.87 vs. limit=5.0 +2024-07-28 13:05:45,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160021.33333333334, ans=0.1 +2024-07-28 13:05:47,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=160021.33333333334, ans=0.125 +2024-07-28 13:05:54,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=160034.66666666666, ans=0.125 +2024-07-28 13:05:59,988 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.35 vs. limit=15.0 +2024-07-28 13:06:03,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=160048.0, ans=0.125 +2024-07-28 13:06:04,265 INFO [train.py:1114] (0/4) Epoch 12, batch 7600, loss[loss=0.2067, simple_loss=0.2907, pruned_loss=0.06133, over 4811.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2793, pruned_loss=0.05162, over 937825.49 frames. ], batch size: 14, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:06:05,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.21 vs. limit=15.0 +2024-07-28 13:06:08,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=160061.33333333334, ans=0.2 +2024-07-28 13:06:14,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=160074.66666666666, ans=0.0 +2024-07-28 13:06:17,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.74 vs. limit=15.0 +2024-07-28 13:06:26,387 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.534e+01 5.580e+01 6.028e+01 7.060e+01 1.012e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 13:06:36,749 INFO [train.py:1114] (0/4) Epoch 12, batch 7650, loss[loss=0.1653, simple_loss=0.2478, pruned_loss=0.04138, over 4937.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2794, pruned_loss=0.05184, over 936534.39 frames. ], batch size: 12, lr: 6.19e-03, grad_scale: 64.0 +2024-07-28 13:06:42,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160128.0, ans=0.1 +2024-07-28 13:06:45,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=160141.33333333334, ans=0.125 +2024-07-28 13:06:46,220 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.65 vs. limit=22.5 +2024-07-28 13:06:52,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=160154.66666666666, ans=0.125 +2024-07-28 13:07:01,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=160168.0, ans=0.1 +2024-07-28 13:07:03,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=160168.0, ans=0.0 +2024-07-28 13:07:03,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=160181.33333333334, ans=0.0 +2024-07-28 13:07:08,586 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:07:10,798 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.96 vs. limit=15.0 +2024-07-28 13:07:11,010 INFO [train.py:1114] (0/4) Epoch 12, batch 7700, loss[loss=0.2046, simple_loss=0.2859, pruned_loss=0.06166, over 4694.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.28, pruned_loss=0.05203, over 934443.41 frames. ], batch size: 13, lr: 6.18e-03, grad_scale: 64.0 +2024-07-28 13:07:11,976 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.23 vs. limit=6.0 +2024-07-28 13:07:12,724 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.66 vs. limit=15.0 +2024-07-28 13:07:32,959 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.508e+01 5.585e+01 6.116e+01 6.946e+01 9.555e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 13:07:34,039 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.28 vs. limit=15.0 +2024-07-28 13:07:40,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=160248.0, ans=0.0 +2024-07-28 13:07:43,367 INFO [train.py:1114] (0/4) Epoch 12, batch 7750, loss[loss=0.1839, simple_loss=0.2875, pruned_loss=0.0401, over 4926.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.281, pruned_loss=0.05201, over 935677.47 frames. ], batch size: 14, lr: 6.18e-03, grad_scale: 64.0 +2024-07-28 13:07:49,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=160274.66666666666, ans=0.025 +2024-07-28 13:08:03,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=160301.33333333334, ans=0.125 +2024-07-28 13:08:16,058 INFO [train.py:1114] (0/4) Epoch 12, batch 7800, loss[loss=0.1895, simple_loss=0.2958, pruned_loss=0.04156, over 4654.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2817, pruned_loss=0.05251, over 937288.77 frames. ], batch size: 14, lr: 6.18e-03, grad_scale: 64.0 +2024-07-28 13:08:21,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=160341.33333333334, ans=0.125 +2024-07-28 13:08:38,409 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.749e+01 5.545e+01 6.012e+01 6.981e+01 9.442e+01, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 13:08:44,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.83 vs. limit=22.5 +2024-07-28 13:08:48,374 INFO [train.py:1114] (0/4) Epoch 12, batch 7850, loss[loss=0.1846, simple_loss=0.2591, pruned_loss=0.05508, over 4518.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2814, pruned_loss=0.05225, over 936181.05 frames. ], batch size: 10, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:08:49,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=160394.66666666666, ans=0.2 +2024-07-28 13:08:51,272 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.47 vs. limit=15.0 +2024-07-28 13:08:53,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=160394.66666666666, ans=0.0 +2024-07-28 13:08:53,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.86 vs. limit=15.0 +2024-07-28 13:08:54,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=160408.0, ans=0.2 +2024-07-28 13:08:55,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=160408.0, ans=0.0 +2024-07-28 13:09:12,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=160434.66666666666, ans=0.0 +2024-07-28 13:09:12,978 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.40 vs. limit=22.5 +2024-07-28 13:09:20,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=160461.33333333334, ans=0.125 +2024-07-28 13:09:21,154 INFO [train.py:1114] (0/4) Epoch 12, batch 7900, loss[loss=0.196, simple_loss=0.2877, pruned_loss=0.05211, over 4875.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2826, pruned_loss=0.05282, over 933456.31 frames. ], batch size: 14, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:09:30,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=160474.66666666666, ans=0.0 +2024-07-28 13:09:37,458 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.96 vs. limit=15.0 +2024-07-28 13:09:41,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=160501.33333333334, ans=0.125 +2024-07-28 13:09:43,188 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.620e+01 5.515e+01 6.026e+01 6.730e+01 9.606e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 13:09:45,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160514.66666666666, ans=0.1 +2024-07-28 13:09:53,236 INFO [train.py:1114] (0/4) Epoch 12, batch 7950, loss[loss=0.2484, simple_loss=0.3126, pruned_loss=0.09209, over 3041.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2809, pruned_loss=0.05205, over 935396.03 frames. ], batch size: 35, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:09:53,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=160528.0, ans=0.0 +2024-07-28 13:10:09,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=160554.66666666666, ans=0.0 +2024-07-28 13:10:09,482 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.98 vs. limit=15.0 +2024-07-28 13:10:13,184 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.63 vs. limit=6.0 +2024-07-28 13:10:17,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=160568.0, ans=0.0 +2024-07-28 13:10:25,952 INFO [train.py:1114] (0/4) Epoch 12, batch 8000, loss[loss=0.1856, simple_loss=0.264, pruned_loss=0.05356, over 4614.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2795, pruned_loss=0.05177, over 934185.00 frames. ], batch size: 11, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:10:26,292 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.36 vs. limit=15.0 +2024-07-28 13:10:27,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=160594.66666666666, ans=0.1 +2024-07-28 13:10:32,584 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:10:34,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=160608.0, ans=0.125 +2024-07-28 13:10:36,716 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.18 vs. limit=15.0 +2024-07-28 13:10:50,017 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.821e+01 5.959e+01 6.918e+01 8.297e+01 1.204e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-28 13:10:51,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=160634.66666666666, ans=0.125 +2024-07-28 13:10:53,263 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.72 vs. limit=15.0 +2024-07-28 13:10:58,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=160648.0, ans=0.125 +2024-07-28 13:11:00,332 INFO [train.py:1114] (0/4) Epoch 12, batch 8050, loss[loss=0.1485, simple_loss=0.2353, pruned_loss=0.03082, over 4808.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2792, pruned_loss=0.05155, over 933928.18 frames. ], batch size: 14, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:11:01,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=160661.33333333334, ans=0.125 +2024-07-28 13:11:13,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=160688.0, ans=0.0 +2024-07-28 13:11:18,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=160688.0, ans=0.0 +2024-07-28 13:11:22,728 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.60 vs. limit=15.0 +2024-07-28 13:11:32,801 INFO [train.py:1114] (0/4) Epoch 12, batch 8100, loss[loss=0.2253, simple_loss=0.321, pruned_loss=0.06485, over 4806.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2798, pruned_loss=0.05158, over 933326.96 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:11:38,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=160741.33333333334, ans=0.125 +2024-07-28 13:11:42,194 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.17 vs. limit=22.5 +2024-07-28 13:11:42,888 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.82 vs. limit=22.5 +2024-07-28 13:11:45,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=160754.66666666666, ans=0.125 +2024-07-28 13:11:55,388 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 5.710e+01 6.581e+01 7.221e+01 1.063e+02, threshold=1.316e+02, percent-clipped=0.0 +2024-07-28 13:12:06,108 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.38 vs. limit=22.5 +2024-07-28 13:12:06,479 INFO [train.py:1114] (0/4) Epoch 12, batch 8150, loss[loss=0.2474, simple_loss=0.3232, pruned_loss=0.08573, over 4799.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2791, pruned_loss=0.05163, over 937145.77 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:12:07,401 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.92 vs. limit=15.0 +2024-07-28 13:12:10,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=160794.66666666666, ans=0.05 +2024-07-28 13:12:16,723 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.16 vs. limit=15.0 +2024-07-28 13:12:19,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=160821.33333333334, ans=0.125 +2024-07-28 13:12:20,888 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:12:34,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=160848.0, ans=0.0 +2024-07-28 13:12:36,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=160848.0, ans=0.125 +2024-07-28 13:12:36,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=160848.0, ans=0.125 +2024-07-28 13:12:38,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=160848.0, ans=0.0 +2024-07-28 13:12:39,520 INFO [train.py:1114] (0/4) Epoch 12, batch 8200, loss[loss=0.2064, simple_loss=0.3, pruned_loss=0.05643, over 4806.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2799, pruned_loss=0.05162, over 938331.83 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:12:44,380 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.65 vs. limit=15.0 +2024-07-28 13:12:45,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=160874.66666666666, ans=0.0 +2024-07-28 13:13:03,304 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.924e+01 5.545e+01 6.398e+01 7.080e+01 1.151e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 13:13:06,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=160914.66666666666, ans=0.0 +2024-07-28 13:13:12,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=160928.0, ans=0.0 +2024-07-28 13:13:12,844 INFO [train.py:1114] (0/4) Epoch 12, batch 8250, loss[loss=0.1979, simple_loss=0.2932, pruned_loss=0.05133, over 4900.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2802, pruned_loss=0.05173, over 938756.41 frames. ], batch size: 13, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:13:16,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=160928.0, ans=0.1 +2024-07-28 13:13:18,339 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:13:21,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160941.33333333334, ans=0.1 +2024-07-28 13:13:29,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=160954.66666666666, ans=0.05 +2024-07-28 13:13:32,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=160968.0, ans=0.125 +2024-07-28 13:13:35,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=160968.0, ans=0.125 +2024-07-28 13:13:37,810 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:13:45,317 INFO [train.py:1114] (0/4) Epoch 12, batch 8300, loss[loss=0.2007, simple_loss=0.2829, pruned_loss=0.05928, over 4897.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2807, pruned_loss=0.05202, over 938503.17 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:13:48,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=160994.66666666666, ans=0.2 +2024-07-28 13:13:48,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=160994.66666666666, ans=0.1 +2024-07-28 13:13:50,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.93 vs. limit=15.0 +2024-07-28 13:13:51,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=161008.0, ans=0.125 +2024-07-28 13:13:53,961 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.33 vs. limit=15.0 +2024-07-28 13:14:07,776 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+01 5.767e+01 6.201e+01 7.053e+01 1.187e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 13:14:14,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=161048.0, ans=0.95 +2024-07-28 13:14:17,543 INFO [train.py:1114] (0/4) Epoch 12, batch 8350, loss[loss=0.2125, simple_loss=0.3029, pruned_loss=0.06102, over 4807.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2797, pruned_loss=0.05131, over 941286.25 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:14:17,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=161061.33333333334, ans=0.1 +2024-07-28 13:14:27,133 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.75 vs. limit=15.0 +2024-07-28 13:14:29,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161074.66666666666, ans=0.1 +2024-07-28 13:14:30,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=161088.0, ans=0.2 +2024-07-28 13:14:32,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=161088.0, ans=0.1 +2024-07-28 13:14:33,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=161088.0, ans=0.0 +2024-07-28 13:14:34,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=161088.0, ans=0.125 +2024-07-28 13:14:39,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=161101.33333333334, ans=0.0 +2024-07-28 13:14:41,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=161101.33333333334, ans=0.125 +2024-07-28 13:14:48,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161114.66666666666, ans=0.1 +2024-07-28 13:14:50,099 INFO [train.py:1114] (0/4) Epoch 12, batch 8400, loss[loss=0.1536, simple_loss=0.2359, pruned_loss=0.03568, over 4773.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2804, pruned_loss=0.05181, over 939662.89 frames. ], batch size: 12, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:15:05,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161154.66666666666, ans=0.1 +2024-07-28 13:15:11,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=161168.0, ans=0.0 +2024-07-28 13:15:12,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=161168.0, ans=0.0 +2024-07-28 13:15:13,422 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.804e+01 5.899e+01 6.443e+01 7.292e+01 9.298e+01, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 13:15:23,163 INFO [train.py:1114] (0/4) Epoch 12, batch 8450, loss[loss=0.2194, simple_loss=0.3034, pruned_loss=0.06769, over 4800.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2804, pruned_loss=0.05187, over 938320.22 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:15:23,406 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.98 vs. limit=15.0 +2024-07-28 13:15:30,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=161208.0, ans=0.125 +2024-07-28 13:15:34,564 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.36 vs. limit=22.5 +2024-07-28 13:15:35,606 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=16.22 vs. limit=15.0 +2024-07-28 13:15:36,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161221.33333333334, ans=0.1 +2024-07-28 13:15:55,156 INFO [train.py:1114] (0/4) Epoch 12, batch 8500, loss[loss=0.1755, simple_loss=0.249, pruned_loss=0.05103, over 4615.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2786, pruned_loss=0.05114, over 938514.58 frames. ], batch size: 11, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:16:02,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=161274.66666666666, ans=0.125 +2024-07-28 13:16:17,529 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.709e+01 5.769e+01 6.331e+01 7.345e+01 1.019e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 13:16:19,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=161301.33333333334, ans=0.0 +2024-07-28 13:16:24,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=161314.66666666666, ans=0.0 +2024-07-28 13:16:26,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=161314.66666666666, ans=0.05 +2024-07-28 13:16:26,356 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.14 vs. limit=15.0 +2024-07-28 13:16:27,213 INFO [train.py:1114] (0/4) Epoch 12, batch 8550, loss[loss=0.1727, simple_loss=0.2612, pruned_loss=0.04213, over 4805.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2782, pruned_loss=0.05087, over 939439.95 frames. ], batch size: 11, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:16:28,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=161328.0, ans=0.125 +2024-07-28 13:16:31,681 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.75 vs. limit=6.0 +2024-07-28 13:16:56,525 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.77 vs. limit=15.0 +2024-07-28 13:16:57,834 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.16 vs. limit=15.0 +2024-07-28 13:17:00,039 INFO [train.py:1114] (0/4) Epoch 12, batch 8600, loss[loss=0.2071, simple_loss=0.2934, pruned_loss=0.06042, over 4805.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2783, pruned_loss=0.05085, over 939242.50 frames. ], batch size: 15, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:17:04,688 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:17:07,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=161408.0, ans=0.2 +2024-07-28 13:17:10,986 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.01 vs. limit=15.0 +2024-07-28 13:17:16,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=161421.33333333334, ans=0.0 +2024-07-28 13:17:21,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=161434.66666666666, ans=10.0 +2024-07-28 13:17:22,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=161434.66666666666, ans=10.0 +2024-07-28 13:17:23,033 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.04 vs. limit=22.5 +2024-07-28 13:17:23,226 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.568e+01 5.734e+01 6.405e+01 7.244e+01 9.929e+01, threshold=1.281e+02, percent-clipped=0.0 +2024-07-28 13:17:25,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=161448.0, ans=0.125 +2024-07-28 13:17:32,679 INFO [train.py:1114] (0/4) Epoch 12, batch 8650, loss[loss=0.2201, simple_loss=0.3153, pruned_loss=0.06244, over 4907.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2778, pruned_loss=0.05077, over 940550.69 frames. ], batch size: 15, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:17:38,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=161461.33333333334, ans=0.025 +2024-07-28 13:17:39,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161461.33333333334, ans=0.1 +2024-07-28 13:17:43,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=161474.66666666666, ans=0.0 +2024-07-28 13:17:45,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=161474.66666666666, ans=0.0 +2024-07-28 13:17:59,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=161501.33333333334, ans=0.2 +2024-07-28 13:18:01,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161501.33333333334, ans=0.1 +2024-07-28 13:18:10,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=161514.66666666666, ans=0.0 +2024-07-28 13:18:11,211 INFO [train.py:1114] (0/4) Epoch 12, batch 8700, loss[loss=0.1891, simple_loss=0.2838, pruned_loss=0.0472, over 4753.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2786, pruned_loss=0.05138, over 938045.07 frames. ], batch size: 13, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:18:11,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=161528.0, ans=0.2 +2024-07-28 13:18:19,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161541.33333333334, ans=0.1 +2024-07-28 13:18:21,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=161541.33333333334, ans=0.125 +2024-07-28 13:18:22,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161541.33333333334, ans=0.1 +2024-07-28 13:18:26,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=161554.66666666666, ans=0.0 +2024-07-28 13:18:30,563 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.80 vs. limit=22.5 +2024-07-28 13:18:30,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=161568.0, ans=0.2 +2024-07-28 13:18:34,022 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.629e+01 5.645e+01 6.105e+01 7.078e+01 1.033e+02, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 13:18:43,671 INFO [train.py:1114] (0/4) Epoch 12, batch 8750, loss[loss=0.2002, simple_loss=0.284, pruned_loss=0.05821, over 4682.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2779, pruned_loss=0.05116, over 936541.64 frames. ], batch size: 15, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:19:00,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=161621.33333333334, ans=0.125 +2024-07-28 13:19:00,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=161621.33333333334, ans=0.2 +2024-07-28 13:19:08,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=161634.66666666666, ans=0.125 +2024-07-28 13:19:09,307 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.68 vs. limit=15.0 +2024-07-28 13:19:16,564 INFO [train.py:1114] (0/4) Epoch 12, batch 8800, loss[loss=0.1789, simple_loss=0.2566, pruned_loss=0.05062, over 4934.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2778, pruned_loss=0.05125, over 937329.47 frames. ], batch size: 14, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:19:17,686 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.59 vs. limit=22.5 +2024-07-28 13:19:19,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=161661.33333333334, ans=0.2 +2024-07-28 13:19:21,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161661.33333333334, ans=0.1 +2024-07-28 13:19:28,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=161674.66666666666, ans=0.0 +2024-07-28 13:19:37,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=161701.33333333334, ans=0.07 +2024-07-28 13:19:40,343 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.669e+01 5.675e+01 6.216e+01 7.145e+01 9.386e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 13:19:48,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=161714.66666666666, ans=0.0 +2024-07-28 13:19:49,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=161728.0, ans=0.125 +2024-07-28 13:19:50,055 INFO [train.py:1114] (0/4) Epoch 12, batch 8850, loss[loss=0.1906, simple_loss=0.2875, pruned_loss=0.04686, over 4502.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2779, pruned_loss=0.05145, over 931984.84 frames. ], batch size: 21, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:19:56,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=161741.33333333334, ans=0.2 +2024-07-28 13:19:59,853 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.49 vs. limit=15.0 +2024-07-28 13:20:00,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=161741.33333333334, ans=0.125 +2024-07-28 13:20:10,100 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.62 vs. limit=6.0 +2024-07-28 13:20:10,784 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.11 vs. limit=15.0 +2024-07-28 13:20:13,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=161768.0, ans=0.0 +2024-07-28 13:20:14,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=161768.0, ans=0.125 +2024-07-28 13:20:17,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=161781.33333333334, ans=0.0 +2024-07-28 13:20:23,565 INFO [train.py:1114] (0/4) Epoch 12, batch 8900, loss[loss=0.1719, simple_loss=0.2537, pruned_loss=0.04503, over 4938.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2782, pruned_loss=0.05143, over 930525.99 frames. ], batch size: 12, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:20:25,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=161794.66666666666, ans=0.125 +2024-07-28 13:20:34,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=161808.0, ans=0.125 +2024-07-28 13:20:43,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=161834.66666666666, ans=0.125 +2024-07-28 13:20:45,942 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.688e+01 5.749e+01 6.497e+01 7.319e+01 1.057e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-28 13:20:55,369 INFO [train.py:1114] (0/4) Epoch 12, batch 8950, loss[loss=0.1995, simple_loss=0.2849, pruned_loss=0.05701, over 4442.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2774, pruned_loss=0.05086, over 931761.16 frames. ], batch size: 21, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:20:56,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=161861.33333333334, ans=0.0 +2024-07-28 13:20:58,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=161861.33333333334, ans=0.125 +2024-07-28 13:21:05,420 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.33 vs. limit=22.5 +2024-07-28 13:21:11,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=161888.0, ans=0.04949747468305833 +2024-07-28 13:21:25,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=161914.66666666666, ans=0.125 +2024-07-28 13:21:26,981 INFO [train.py:1114] (0/4) Epoch 12, batch 9000, loss[loss=0.2035, simple_loss=0.2749, pruned_loss=0.066, over 4631.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2765, pruned_loss=0.05086, over 934352.42 frames. ], batch size: 12, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:21:26,981 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 13:21:39,259 INFO [train.py:1146] (0/4) Epoch 12, validation: loss=0.1673, simple_loss=0.2713, pruned_loss=0.03166, over 944034.00 frames. +2024-07-28 13:21:39,260 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 13:21:43,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=161928.0, ans=0.2 +2024-07-28 13:21:47,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161941.33333333334, ans=0.1 +2024-07-28 13:21:59,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=161968.0, ans=0.125 +2024-07-28 13:22:02,101 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.972e+01 5.644e+01 6.027e+01 6.782e+01 9.850e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 13:22:02,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=161968.0, ans=0.2 +2024-07-28 13:22:04,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=161981.33333333334, ans=0.07 +2024-07-28 13:22:05,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=161981.33333333334, ans=0.125 +2024-07-28 13:22:11,777 INFO [train.py:1114] (0/4) Epoch 12, batch 9050, loss[loss=0.1875, simple_loss=0.2625, pruned_loss=0.05621, over 4530.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2771, pruned_loss=0.0515, over 934685.79 frames. ], batch size: 10, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:22:20,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162008.0, ans=0.1 +2024-07-28 13:22:37,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=162048.0, ans=0.125 +2024-07-28 13:22:40,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=162048.0, ans=0.2 +2024-07-28 13:22:43,279 INFO [train.py:1114] (0/4) Epoch 12, batch 9100, loss[loss=0.1949, simple_loss=0.287, pruned_loss=0.0514, over 4930.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2762, pruned_loss=0.05083, over 937157.51 frames. ], batch size: 14, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:22:43,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=162061.33333333334, ans=0.0 +2024-07-28 13:22:45,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=162061.33333333334, ans=0.125 +2024-07-28 13:22:50,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=162074.66666666666, ans=0.125 +2024-07-28 13:22:52,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=162074.66666666666, ans=0.125 +2024-07-28 13:23:06,062 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.556e+01 5.681e+01 6.344e+01 7.391e+01 1.004e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 13:23:10,798 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.77 vs. limit=15.0 +2024-07-28 13:23:15,581 INFO [train.py:1114] (0/4) Epoch 12, batch 9150, loss[loss=0.1611, simple_loss=0.2621, pruned_loss=0.03004, over 4813.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2778, pruned_loss=0.05122, over 936223.67 frames. ], batch size: 14, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:23:18,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=162128.0, ans=0.0 +2024-07-28 13:23:25,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=162141.33333333334, ans=0.0 +2024-07-28 13:23:27,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=162154.66666666666, ans=0.125 +2024-07-28 13:23:32,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=162154.66666666666, ans=0.125 +2024-07-28 13:23:35,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=162168.0, ans=0.0 +2024-07-28 13:23:41,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=162181.33333333334, ans=0.2 +2024-07-28 13:23:47,234 INFO [train.py:1114] (0/4) Epoch 12, batch 9200, loss[loss=0.204, simple_loss=0.2793, pruned_loss=0.06431, over 4842.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.278, pruned_loss=0.05141, over 937838.66 frames. ], batch size: 12, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:23:49,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=162194.66666666666, ans=0.0 +2024-07-28 13:23:54,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=162208.0, ans=0.025 +2024-07-28 13:23:54,613 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.22 vs. limit=15.0 +2024-07-28 13:24:03,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=162221.33333333334, ans=0.125 +2024-07-28 13:24:05,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162221.33333333334, ans=0.1 +2024-07-28 13:24:09,770 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.766e+01 5.530e+01 6.301e+01 7.507e+01 1.119e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 13:24:15,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=162248.0, ans=0.125 +2024-07-28 13:24:19,316 INFO [train.py:1114] (0/4) Epoch 12, batch 9250, loss[loss=0.1886, simple_loss=0.2773, pruned_loss=0.0499, over 4634.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2776, pruned_loss=0.05115, over 938415.48 frames. ], batch size: 13, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:24:33,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=162288.0, ans=0.125 +2024-07-28 13:24:36,057 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.59 vs. limit=6.0 +2024-07-28 13:24:43,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=162301.33333333334, ans=0.125 +2024-07-28 13:24:43,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=162301.33333333334, ans=0.1 +2024-07-28 13:24:51,047 INFO [train.py:1114] (0/4) Epoch 12, batch 9300, loss[loss=0.2048, simple_loss=0.2774, pruned_loss=0.0661, over 4779.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2781, pruned_loss=0.05162, over 938372.69 frames. ], batch size: 12, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:24:56,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.whiten.whitening_limit, batch_count=162341.33333333334, ans=12.0 +2024-07-28 13:25:00,230 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.94 vs. limit=15.0 +2024-07-28 13:25:01,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162341.33333333334, ans=0.1 +2024-07-28 13:25:05,125 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=2.71 vs. limit=12.0 +2024-07-28 13:25:10,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=162368.0, ans=0.125 +2024-07-28 13:25:13,005 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.254e+01 5.656e+01 6.395e+01 7.099e+01 1.199e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 13:25:13,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=162368.0, ans=0.125 +2024-07-28 13:25:15,489 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.63 vs. limit=15.0 +2024-07-28 13:25:16,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=162381.33333333334, ans=0.2 +2024-07-28 13:25:21,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=162381.33333333334, ans=0.125 +2024-07-28 13:25:21,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=162381.33333333334, ans=0.125 +2024-07-28 13:25:21,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=162381.33333333334, ans=0.0 +2024-07-28 13:25:22,436 INFO [train.py:1114] (0/4) Epoch 12, batch 9350, loss[loss=0.151, simple_loss=0.2391, pruned_loss=0.03151, over 4788.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2784, pruned_loss=0.05138, over 934820.06 frames. ], batch size: 11, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:25:26,538 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.37 vs. limit=15.0 +2024-07-28 13:25:28,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162408.0, ans=0.1 +2024-07-28 13:25:29,582 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.16 vs. limit=15.0 +2024-07-28 13:25:30,912 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.59 vs. limit=15.0 +2024-07-28 13:25:32,138 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.43 vs. limit=22.5 +2024-07-28 13:25:32,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=162408.0, ans=0.2 +2024-07-28 13:25:35,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=162421.33333333334, ans=0.125 +2024-07-28 13:25:41,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=162434.66666666666, ans=0.125 +2024-07-28 13:25:51,049 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.13 vs. limit=22.5 +2024-07-28 13:25:53,359 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.03 vs. limit=12.0 +2024-07-28 13:25:53,667 INFO [train.py:1114] (0/4) Epoch 12, batch 9400, loss[loss=0.1966, simple_loss=0.2798, pruned_loss=0.05671, over 4694.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2781, pruned_loss=0.0513, over 932707.61 frames. ], batch size: 13, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:26:05,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=162488.0, ans=0.0 +2024-07-28 13:26:08,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=162488.0, ans=0.0 +2024-07-28 13:26:10,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162488.0, ans=0.1 +2024-07-28 13:26:13,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162501.33333333334, ans=0.1 +2024-07-28 13:26:15,660 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.679e+01 5.567e+01 6.093e+01 7.292e+01 1.222e+02, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 13:26:17,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162501.33333333334, ans=0.1 +2024-07-28 13:26:19,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=162514.66666666666, ans=0.125 +2024-07-28 13:26:22,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=162514.66666666666, ans=0.0 +2024-07-28 13:26:25,587 INFO [train.py:1114] (0/4) Epoch 12, batch 9450, loss[loss=0.1514, simple_loss=0.234, pruned_loss=0.03443, over 4809.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2775, pruned_loss=0.05061, over 932548.03 frames. ], batch size: 11, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:26:34,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=162541.33333333334, ans=0.04949747468305833 +2024-07-28 13:26:44,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=162568.0, ans=0.125 +2024-07-28 13:26:50,528 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.84 vs. limit=15.0 +2024-07-28 13:26:55,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=162581.33333333334, ans=0.125 +2024-07-28 13:26:56,291 INFO [train.py:1114] (0/4) Epoch 12, batch 9500, loss[loss=0.1753, simple_loss=0.2686, pruned_loss=0.04102, over 4722.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2784, pruned_loss=0.0502, over 935064.80 frames. ], batch size: 12, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:27:00,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=162594.66666666666, ans=0.0 +2024-07-28 13:27:05,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162608.0, ans=0.1 +2024-07-28 13:27:09,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162621.33333333334, ans=0.1 +2024-07-28 13:27:13,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=162621.33333333334, ans=0.2 +2024-07-28 13:27:14,945 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.76 vs. limit=22.5 +2024-07-28 13:27:16,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162634.66666666666, ans=0.1 +2024-07-28 13:27:16,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=162634.66666666666, ans=0.0 +2024-07-28 13:27:17,795 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.732e+01 5.541e+01 6.151e+01 7.043e+01 9.368e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 13:27:20,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=162648.0, ans=0.125 +2024-07-28 13:27:25,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=162648.0, ans=0.2 +2024-07-28 13:27:27,266 INFO [train.py:1114] (0/4) Epoch 12, batch 9550, loss[loss=0.1721, simple_loss=0.2595, pruned_loss=0.04229, over 4778.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2785, pruned_loss=0.05045, over 932206.56 frames. ], batch size: 12, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:27:30,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162661.33333333334, ans=0.1 +2024-07-28 13:27:36,005 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.87 vs. limit=22.5 +2024-07-28 13:27:38,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=162674.66666666666, ans=0.125 +2024-07-28 13:27:46,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=162701.33333333334, ans=0.125 +2024-07-28 13:27:57,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.24 vs. limit=15.0 +2024-07-28 13:27:57,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=162714.66666666666, ans=0.125 +2024-07-28 13:27:59,607 INFO [train.py:1114] (0/4) Epoch 12, batch 9600, loss[loss=0.2402, simple_loss=0.3129, pruned_loss=0.08376, over 3636.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2792, pruned_loss=0.05092, over 931529.26 frames. ], batch size: 35, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:28:07,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=162741.33333333334, ans=0.0 +2024-07-28 13:28:07,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=162741.33333333334, ans=0.025 +2024-07-28 13:28:11,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=162741.33333333334, ans=0.0 +2024-07-28 13:28:16,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=162754.66666666666, ans=0.025 +2024-07-28 13:28:16,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162754.66666666666, ans=0.1 +2024-07-28 13:28:22,489 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 5.832e+01 6.811e+01 8.204e+01 1.211e+02, threshold=1.362e+02, percent-clipped=0.0 +2024-07-28 13:28:23,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162768.0, ans=0.1 +2024-07-28 13:28:27,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=162781.33333333334, ans=0.015 +2024-07-28 13:28:28,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=162781.33333333334, ans=0.125 +2024-07-28 13:28:31,982 INFO [train.py:1114] (0/4) Epoch 12, batch 9650, loss[loss=0.1821, simple_loss=0.2687, pruned_loss=0.04776, over 4852.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2795, pruned_loss=0.05113, over 927347.95 frames. ], batch size: 16, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:28:41,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=162808.0, ans=0.125 +2024-07-28 13:28:41,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=162808.0, ans=0.0 +2024-07-28 13:28:44,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162821.33333333334, ans=0.1 +2024-07-28 13:28:51,500 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.49 vs. limit=10.0 +2024-07-28 13:28:57,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=162848.0, ans=0.125 +2024-07-28 13:28:58,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=162848.0, ans=0.125 +2024-07-28 13:28:59,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=162848.0, ans=0.2 +2024-07-28 13:29:02,809 INFO [train.py:1114] (0/4) Epoch 12, batch 9700, loss[loss=0.2276, simple_loss=0.3114, pruned_loss=0.07192, over 4179.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.28, pruned_loss=0.05176, over 925439.48 frames. ], batch size: 25, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:29:04,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=162861.33333333334, ans=0.125 +2024-07-28 13:29:07,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=162861.33333333334, ans=0.0 +2024-07-28 13:29:08,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=162861.33333333334, ans=0.125 +2024-07-28 13:29:25,016 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.802e+01 5.692e+01 6.242e+01 7.537e+01 1.052e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 13:29:25,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=162901.33333333334, ans=0.2 +2024-07-28 13:29:27,219 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.82 vs. limit=15.0 +2024-07-28 13:29:34,180 INFO [train.py:1114] (0/4) Epoch 12, batch 9750, loss[loss=0.208, simple_loss=0.2894, pruned_loss=0.06334, over 4684.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.28, pruned_loss=0.05214, over 925779.92 frames. ], batch size: 15, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:29:49,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=162954.66666666666, ans=0.0 +2024-07-28 13:29:50,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=162954.66666666666, ans=0.0 +2024-07-28 13:29:56,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=162968.0, ans=0.04949747468305833 +2024-07-28 13:30:01,404 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.10 vs. limit=15.0 +2024-07-28 13:30:05,285 INFO [train.py:1114] (0/4) Epoch 12, batch 9800, loss[loss=0.1694, simple_loss=0.2579, pruned_loss=0.0404, over 4695.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2796, pruned_loss=0.05224, over 925296.74 frames. ], batch size: 12, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:30:06,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162994.66666666666, ans=0.1 +2024-07-28 13:30:20,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=163021.33333333334, ans=0.025 +2024-07-28 13:30:23,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=163034.66666666666, ans=0.2 +2024-07-28 13:30:26,611 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.850e+01 5.828e+01 6.429e+01 7.275e+01 1.013e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 13:30:35,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=163061.33333333334, ans=0.125 +2024-07-28 13:30:36,213 INFO [train.py:1114] (0/4) Epoch 12, batch 9850, loss[loss=0.1996, simple_loss=0.2819, pruned_loss=0.05869, over 4893.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2797, pruned_loss=0.05234, over 927499.88 frames. ], batch size: 15, lr: 6.13e-03, grad_scale: 64.0 +2024-07-28 13:30:38,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=163061.33333333334, ans=0.125 +2024-07-28 13:30:38,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=163061.33333333334, ans=0.125 +2024-07-28 13:30:38,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163061.33333333334, ans=0.1 +2024-07-28 13:30:43,898 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.19 vs. limit=22.5 +2024-07-28 13:30:51,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=163088.0, ans=0.0 +2024-07-28 13:30:59,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=163101.33333333334, ans=0.0 +2024-07-28 13:31:02,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=163114.66666666666, ans=0.125 +2024-07-28 13:31:06,867 INFO [train.py:1114] (0/4) Epoch 12, batch 9900, loss[loss=0.208, simple_loss=0.2983, pruned_loss=0.05882, over 4830.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2805, pruned_loss=0.05291, over 926720.91 frames. ], batch size: 16, lr: 6.13e-03, grad_scale: 64.0 +2024-07-28 13:31:25,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=163168.0, ans=0.125 +2024-07-28 13:31:29,352 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.746e+01 5.809e+01 6.400e+01 7.583e+01 1.176e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 13:31:31,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=163181.33333333334, ans=0.0 +2024-07-28 13:31:36,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=163181.33333333334, ans=0.125 +2024-07-28 13:31:38,446 INFO [train.py:1114] (0/4) Epoch 12, batch 9950, loss[loss=0.153, simple_loss=0.2431, pruned_loss=0.03147, over 4800.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2809, pruned_loss=0.05312, over 929401.44 frames. ], batch size: 11, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:31:49,853 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.03 vs. limit=15.0 +2024-07-28 13:31:51,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=163221.33333333334, ans=0.125 +2024-07-28 13:31:52,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=163221.33333333334, ans=0.125 +2024-07-28 13:31:55,819 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.74 vs. limit=15.0 +2024-07-28 13:32:00,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=163234.66666666666, ans=0.125 +2024-07-28 13:32:01,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=163234.66666666666, ans=0.0 +2024-07-28 13:32:01,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=163234.66666666666, ans=0.1 +2024-07-28 13:32:02,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=163234.66666666666, ans=0.125 +2024-07-28 13:32:09,642 INFO [train.py:1114] (0/4) Epoch 12, batch 10000, loss[loss=0.1967, simple_loss=0.2876, pruned_loss=0.05294, over 4662.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2838, pruned_loss=0.05416, over 926382.17 frames. ], batch size: 16, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:32:14,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=163261.33333333334, ans=0.0 +2024-07-28 13:32:15,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=163274.66666666666, ans=0.1 +2024-07-28 13:32:28,763 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.76 vs. limit=15.0 +2024-07-28 13:32:30,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=163301.33333333334, ans=0.125 +2024-07-28 13:32:31,313 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.999e+01 5.883e+01 6.345e+01 7.076e+01 8.600e+01, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 13:32:40,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=163314.66666666666, ans=0.125 +2024-07-28 13:32:41,183 INFO [train.py:1114] (0/4) Epoch 12, batch 10050, loss[loss=0.2153, simple_loss=0.2875, pruned_loss=0.07154, over 3411.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2871, pruned_loss=0.05633, over 914872.98 frames. ], batch size: 35, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:32:41,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=163328.0, ans=0.125 +2024-07-28 13:32:47,054 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.63 vs. limit=22.5 +2024-07-28 13:32:50,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=163341.33333333334, ans=0.04949747468305833 +2024-07-28 13:32:56,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=163354.66666666666, ans=0.125 +2024-07-28 13:33:01,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=163368.0, ans=0.0 +2024-07-28 13:33:09,856 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.13 vs. limit=15.0 +2024-07-28 13:33:10,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=163381.33333333334, ans=0.125 +2024-07-28 13:33:12,472 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.64 vs. limit=15.0 +2024-07-28 13:33:14,567 INFO [train.py:1114] (0/4) Epoch 12, batch 10100, loss[loss=0.2448, simple_loss=0.3114, pruned_loss=0.08908, over 3363.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2911, pruned_loss=0.06092, over 861520.39 frames. ], batch size: 35, lr: 6.12e-03, grad_scale: 32.0 +2024-07-28 13:33:21,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=163408.0, ans=0.125 +2024-07-28 13:33:22,867 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=15.0 +2024-07-28 13:33:25,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=163408.0, ans=0.125 +2024-07-28 13:33:29,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=163421.33333333334, ans=0.2 +2024-07-28 13:33:37,793 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.859e+01 6.715e+01 7.355e+01 7.791e+01 1.006e+02, threshold=1.471e+02, percent-clipped=0.0 +2024-07-28 13:33:46,748 INFO [train.py:1114] (0/4) Epoch 12, batch 10150, loss[loss=0.2041, simple_loss=0.2845, pruned_loss=0.06188, over 3378.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.295, pruned_loss=0.06522, over 820339.06 frames. ], batch size: 35, lr: 6.12e-03, grad_scale: 32.0 +2024-07-28 13:33:54,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=163474.66666666666, ans=0.5 +2024-07-28 13:33:55,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=163474.66666666666, ans=0.0 +2024-07-28 13:33:57,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=163474.66666666666, ans=0.0 +2024-07-28 13:33:59,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=163488.0, ans=0.125 +2024-07-28 13:34:16,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=163514.66666666666, ans=0.0 +2024-07-28 13:34:18,694 INFO [train.py:1114] (0/4) Epoch 12, batch 10200, loss[loss=0.2285, simple_loss=0.3167, pruned_loss=0.07017, over 3329.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2978, pruned_loss=0.06842, over 788676.91 frames. ], batch size: 35, lr: 6.12e-03, grad_scale: 32.0 +2024-07-28 13:34:32,353 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-12.pt +2024-07-28 13:35:14,754 INFO [train.py:1114] (0/4) Epoch 13, batch 0, loss[loss=0.1607, simple_loss=0.2528, pruned_loss=0.03428, over 4857.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2528, pruned_loss=0.03428, over 4857.00 frames. ], batch size: 12, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:35:14,755 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 13:35:20,155 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.8357, 5.1748, 5.0006, 5.5870], device='cuda:0') +2024-07-28 13:35:26,208 INFO [train.py:1146] (0/4) Epoch 13, validation: loss=0.1689, simple_loss=0.2745, pruned_loss=0.03167, over 944034.00 frames. +2024-07-28 13:35:26,209 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 13:35:26,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=163557.33333333334, ans=0.125 +2024-07-28 13:35:26,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=163557.33333333334, ans=0.125 +2024-07-28 13:35:27,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=163557.33333333334, ans=0.125 +2024-07-28 13:35:27,931 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=9.94 vs. limit=10.0 +2024-07-28 13:35:28,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163557.33333333334, ans=0.1 +2024-07-28 13:35:33,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=163570.66666666666, ans=0.04949747468305833 +2024-07-28 13:35:35,871 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.887e+01 6.365e+01 6.777e+01 7.332e+01 9.562e+01, threshold=1.355e+02, percent-clipped=0.0 +2024-07-28 13:35:43,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=163584.0, ans=0.125 +2024-07-28 13:35:44,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163584.0, ans=0.1 +2024-07-28 13:35:50,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=163597.33333333334, ans=0.125 +2024-07-28 13:36:00,571 INFO [train.py:1114] (0/4) Epoch 13, batch 50, loss[loss=0.1688, simple_loss=0.2555, pruned_loss=0.04107, over 4611.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2782, pruned_loss=0.05141, over 205820.49 frames. ], batch size: 11, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:36:14,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=163650.66666666666, ans=0.09899494936611666 +2024-07-28 13:36:16,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=163650.66666666666, ans=0.0 +2024-07-28 13:36:36,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=163677.33333333334, ans=0.125 +2024-07-28 13:36:40,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=163677.33333333334, ans=0.025 +2024-07-28 13:36:42,395 INFO [train.py:1114] (0/4) Epoch 13, batch 100, loss[loss=0.1763, simple_loss=0.2604, pruned_loss=0.04612, over 4634.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.278, pruned_loss=0.05016, over 365314.87 frames. ], batch size: 12, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:36:43,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=163690.66666666666, ans=0.125 +2024-07-28 13:36:45,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=163690.66666666666, ans=0.125 +2024-07-28 13:36:45,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=163690.66666666666, ans=0.09899494936611666 +2024-07-28 13:36:46,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=163690.66666666666, ans=0.125 +2024-07-28 13:36:46,234 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.48 vs. limit=6.0 +2024-07-28 13:36:48,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=163690.66666666666, ans=0.0 +2024-07-28 13:36:49,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=163704.0, ans=0.125 +2024-07-28 13:36:51,941 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.677e+01 5.407e+01 6.133e+01 6.720e+01 8.973e+01, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 13:36:54,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=163704.0, ans=0.125 +2024-07-28 13:36:54,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=163704.0, ans=0.2 +2024-07-28 13:37:07,247 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.34 vs. limit=15.0 +2024-07-28 13:37:17,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=163730.66666666666, ans=0.125 +2024-07-28 13:37:20,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=163744.0, ans=0.125 +2024-07-28 13:37:29,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=163744.0, ans=0.2 +2024-07-28 13:37:31,315 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.65 vs. limit=15.0 +2024-07-28 13:37:32,113 INFO [train.py:1114] (0/4) Epoch 13, batch 150, loss[loss=0.1743, simple_loss=0.265, pruned_loss=0.04187, over 4616.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2757, pruned_loss=0.04932, over 493995.23 frames. ], batch size: 11, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:37:32,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=163757.33333333334, ans=0.0 +2024-07-28 13:37:33,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=163757.33333333334, ans=0.2 +2024-07-28 13:37:37,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=163757.33333333334, ans=0.125 +2024-07-28 13:37:49,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=163784.0, ans=0.07 +2024-07-28 13:37:49,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=163784.0, ans=0.125 +2024-07-28 13:37:57,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=163797.33333333334, ans=0.025 +2024-07-28 13:38:08,722 INFO [train.py:1114] (0/4) Epoch 13, batch 200, loss[loss=0.2166, simple_loss=0.3003, pruned_loss=0.06649, over 4543.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2753, pruned_loss=0.04922, over 593909.20 frames. ], batch size: 21, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:38:08,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=163824.0, ans=0.2 +2024-07-28 13:38:12,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=163824.0, ans=0.2 +2024-07-28 13:38:16,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=163837.33333333334, ans=0.125 +2024-07-28 13:38:18,038 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.812e+01 5.615e+01 6.251e+01 7.683e+01 1.063e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 13:38:20,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=163837.33333333334, ans=0.09899494936611666 +2024-07-28 13:38:29,320 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:38:42,110 INFO [train.py:1114] (0/4) Epoch 13, batch 250, loss[loss=0.2075, simple_loss=0.3043, pruned_loss=0.05532, over 4625.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2768, pruned_loss=0.05004, over 670214.25 frames. ], batch size: 16, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:38:43,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=163890.66666666666, ans=0.0 +2024-07-28 13:38:46,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=163890.66666666666, ans=0.125 +2024-07-28 13:38:47,369 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.52 vs. limit=15.0 +2024-07-28 13:38:53,321 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.74 vs. limit=5.0 +2024-07-28 13:38:54,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=163917.33333333334, ans=0.2 +2024-07-28 13:38:59,988 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.17 vs. limit=15.0 +2024-07-28 13:39:01,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=163917.33333333334, ans=0.0 +2024-07-28 13:39:02,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=163930.66666666666, ans=0.0 +2024-07-28 13:39:04,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=163930.66666666666, ans=0.1 +2024-07-28 13:39:16,246 INFO [train.py:1114] (0/4) Epoch 13, batch 300, loss[loss=0.1805, simple_loss=0.2812, pruned_loss=0.03993, over 4782.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2769, pruned_loss=0.05014, over 729716.57 frames. ], batch size: 15, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:39:18,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=163957.33333333334, ans=0.0 +2024-07-28 13:39:18,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=163957.33333333334, ans=0.1 +2024-07-28 13:39:25,704 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+01 5.595e+01 6.354e+01 7.540e+01 1.026e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 13:39:26,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=163970.66666666666, ans=0.2 +2024-07-28 13:39:27,429 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.44 vs. limit=15.0 +2024-07-28 13:39:28,209 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.65 vs. limit=6.0 +2024-07-28 13:39:47,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=164010.66666666666, ans=0.125 +2024-07-28 13:39:48,717 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.05 vs. limit=15.0 +2024-07-28 13:39:49,687 INFO [train.py:1114] (0/4) Epoch 13, batch 350, loss[loss=0.1711, simple_loss=0.2615, pruned_loss=0.04037, over 4941.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2769, pruned_loss=0.04999, over 775917.27 frames. ], batch size: 12, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:39:51,920 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.12 vs. limit=12.0 +2024-07-28 13:39:59,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=164037.33333333334, ans=0.125 +2024-07-28 13:40:16,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=164064.0, ans=0.2 +2024-07-28 13:40:19,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=164077.33333333334, ans=0.0 +2024-07-28 13:40:24,320 INFO [train.py:1114] (0/4) Epoch 13, batch 400, loss[loss=0.187, simple_loss=0.274, pruned_loss=0.05003, over 4686.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2749, pruned_loss=0.04901, over 813500.12 frames. ], batch size: 13, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:40:29,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=164090.66666666666, ans=0.2 +2024-07-28 13:40:29,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=164090.66666666666, ans=0.125 +2024-07-28 13:40:31,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=164104.0, ans=0.125 +2024-07-28 13:40:35,599 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.297e+01 5.430e+01 5.754e+01 6.889e+01 9.909e+01, threshold=1.151e+02, percent-clipped=0.0 +2024-07-28 13:40:48,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=164130.66666666666, ans=0.125 +2024-07-28 13:40:56,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164144.0, ans=0.1 +2024-07-28 13:40:59,516 INFO [train.py:1114] (0/4) Epoch 13, batch 450, loss[loss=0.2011, simple_loss=0.2891, pruned_loss=0.05658, over 4640.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2763, pruned_loss=0.0498, over 838923.90 frames. ], batch size: 13, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:41:04,667 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-28 13:41:13,870 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.15 vs. limit=15.0 +2024-07-28 13:41:16,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=164184.0, ans=0.125 +2024-07-28 13:41:20,175 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:41:23,592 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.66 vs. limit=15.0 +2024-07-28 13:41:32,445 INFO [train.py:1114] (0/4) Epoch 13, batch 500, loss[loss=0.255, simple_loss=0.3445, pruned_loss=0.0827, over 4665.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.276, pruned_loss=0.04913, over 861274.88 frames. ], batch size: 15, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:41:37,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=164224.0, ans=0.125 +2024-07-28 13:41:41,729 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.481e+01 5.521e+01 6.089e+01 6.841e+01 9.670e+01, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 13:41:44,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=164237.33333333334, ans=10.0 +2024-07-28 13:41:50,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=164250.66666666666, ans=0.0 +2024-07-28 13:41:58,622 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.39 vs. limit=15.0 +2024-07-28 13:41:59,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=164277.33333333334, ans=0.0 +2024-07-28 13:42:02,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=164277.33333333334, ans=0.09899494936611666 +2024-07-28 13:42:02,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=164277.33333333334, ans=0.1 +2024-07-28 13:42:03,744 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:42:04,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=164277.33333333334, ans=0.07 +2024-07-28 13:42:06,193 INFO [train.py:1114] (0/4) Epoch 13, batch 550, loss[loss=0.2363, simple_loss=0.3239, pruned_loss=0.07437, over 4646.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2758, pruned_loss=0.04943, over 877212.26 frames. ], batch size: 17, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:42:08,571 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.87 vs. limit=15.0 +2024-07-28 13:42:20,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164317.33333333334, ans=0.1 +2024-07-28 13:42:21,141 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.55 vs. limit=15.0 +2024-07-28 13:42:21,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=164317.33333333334, ans=0.2 +2024-07-28 13:42:30,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=164330.66666666666, ans=0.025 +2024-07-28 13:42:30,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=164330.66666666666, ans=0.0 +2024-07-28 13:42:39,328 INFO [train.py:1114] (0/4) Epoch 13, batch 600, loss[loss=0.1913, simple_loss=0.2784, pruned_loss=0.05216, over 4624.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2761, pruned_loss=0.04964, over 892043.14 frames. ], batch size: 16, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:42:42,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=164357.33333333334, ans=0.2 +2024-07-28 13:42:48,633 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.455e+01 5.528e+01 6.337e+01 7.273e+01 1.055e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 13:42:48,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=164370.66666666666, ans=0.0 +2024-07-28 13:42:50,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=164370.66666666666, ans=0.0 +2024-07-28 13:42:53,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=164384.0, ans=0.1 +2024-07-28 13:42:55,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=164384.0, ans=0.125 +2024-07-28 13:42:58,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164397.33333333334, ans=0.1 +2024-07-28 13:42:58,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=164397.33333333334, ans=0.1 +2024-07-28 13:43:09,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164397.33333333334, ans=0.1 +2024-07-28 13:43:19,287 INFO [train.py:1114] (0/4) Epoch 13, batch 650, loss[loss=0.1559, simple_loss=0.2536, pruned_loss=0.02905, over 4758.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2751, pruned_loss=0.04886, over 903926.64 frames. ], batch size: 13, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:43:19,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=164424.0, ans=0.04949747468305833 +2024-07-28 13:43:23,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=164424.0, ans=0.0 +2024-07-28 13:43:39,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=164450.66666666666, ans=0.025 +2024-07-28 13:43:43,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=164464.0, ans=0.2 +2024-07-28 13:43:50,151 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:43:53,479 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.80 vs. limit=15.0 +2024-07-28 13:43:54,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=164477.33333333334, ans=15.0 +2024-07-28 13:43:55,022 INFO [train.py:1114] (0/4) Epoch 13, batch 700, loss[loss=0.1672, simple_loss=0.2534, pruned_loss=0.04051, over 4635.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2755, pruned_loss=0.0488, over 911695.25 frames. ], batch size: 12, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:43:57,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=164490.66666666666, ans=0.125 +2024-07-28 13:44:04,379 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.803e+01 5.621e+01 6.058e+01 7.095e+01 1.199e+02, threshold=1.212e+02, percent-clipped=0.0 +2024-07-28 13:44:22,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=164517.33333333334, ans=0.1 +2024-07-28 13:44:23,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=164517.33333333334, ans=0.125 +2024-07-28 13:44:38,250 INFO [train.py:1114] (0/4) Epoch 13, batch 750, loss[loss=0.1843, simple_loss=0.28, pruned_loss=0.04424, over 4694.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2764, pruned_loss=0.04967, over 918162.56 frames. ], batch size: 13, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:44:43,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=164557.33333333334, ans=0.0 +2024-07-28 13:44:58,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164584.0, ans=0.1 +2024-07-28 13:45:00,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=164597.33333333334, ans=0.0 +2024-07-28 13:45:05,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=164597.33333333334, ans=0.04949747468305833 +2024-07-28 13:45:07,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=164610.66666666666, ans=0.0 +2024-07-28 13:45:08,572 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.45 vs. limit=6.0 +2024-07-28 13:45:13,493 INFO [train.py:1114] (0/4) Epoch 13, batch 800, loss[loss=0.1806, simple_loss=0.2571, pruned_loss=0.05206, over 4855.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2772, pruned_loss=0.05019, over 923080.71 frames. ], batch size: 12, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:45:18,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=164624.0, ans=0.0 +2024-07-28 13:45:19,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=164637.33333333334, ans=0.125 +2024-07-28 13:45:22,567 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.403e+01 5.509e+01 5.892e+01 6.560e+01 1.053e+02, threshold=1.178e+02, percent-clipped=0.0 +2024-07-28 13:45:45,580 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.18 vs. limit=10.0 +2024-07-28 13:45:46,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=164664.0, ans=0.0 +2024-07-28 13:45:53,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=164664.0, ans=0.0 +2024-07-28 13:45:59,380 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-28 13:46:00,991 INFO [train.py:1114] (0/4) Epoch 13, batch 850, loss[loss=0.1777, simple_loss=0.268, pruned_loss=0.04366, over 4655.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2768, pruned_loss=0.04966, over 926918.23 frames. ], batch size: 14, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:46:05,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=164690.66666666666, ans=0.125 +2024-07-28 13:46:06,700 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.31 vs. limit=6.0 +2024-07-28 13:46:09,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=164704.0, ans=0.1 +2024-07-28 13:46:51,613 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:46:51,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=164730.66666666666, ans=0.125 +2024-07-28 13:47:03,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=164744.0, ans=0.0 +2024-07-28 13:47:04,292 INFO [train.py:1114] (0/4) Epoch 13, batch 900, loss[loss=0.1847, simple_loss=0.263, pruned_loss=0.05323, over 4837.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2768, pruned_loss=0.04978, over 927441.50 frames. ], batch size: 12, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:47:07,370 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.85 vs. limit=10.0 +2024-07-28 13:47:10,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=164770.66666666666, ans=0.2 +2024-07-28 13:47:13,467 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.792e+01 6.438e+01 7.268e+01 1.084e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 13:47:21,814 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:47:28,917 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.70 vs. limit=15.0 +2024-07-28 13:47:29,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=164797.33333333334, ans=0.125 +2024-07-28 13:47:29,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=164797.33333333334, ans=0.125 +2024-07-28 13:47:38,017 INFO [train.py:1114] (0/4) Epoch 13, batch 950, loss[loss=0.1712, simple_loss=0.2545, pruned_loss=0.04392, over 4777.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2764, pruned_loss=0.04907, over 929211.97 frames. ], batch size: 12, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:47:54,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164850.66666666666, ans=0.1 +2024-07-28 13:47:57,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=164864.0, ans=0.0 +2024-07-28 13:48:03,770 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.27 vs. limit=15.0 +2024-07-28 13:48:08,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=164877.33333333334, ans=0.2 +2024-07-28 13:48:11,275 INFO [train.py:1114] (0/4) Epoch 13, batch 1000, loss[loss=0.1708, simple_loss=0.2685, pruned_loss=0.03657, over 4965.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2769, pruned_loss=0.04959, over 929756.41 frames. ], batch size: 13, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:48:14,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=164890.66666666666, ans=0.125 +2024-07-28 13:48:20,577 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.383e+01 5.622e+01 6.136e+01 7.218e+01 8.877e+01, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 13:48:20,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=164904.0, ans=0.125 +2024-07-28 13:48:44,471 INFO [train.py:1114] (0/4) Epoch 13, batch 1050, loss[loss=0.1797, simple_loss=0.2803, pruned_loss=0.03956, over 4869.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2761, pruned_loss=0.04932, over 931822.91 frames. ], batch size: 14, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:48:44,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=164957.33333333334, ans=0.125 +2024-07-28 13:48:51,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=164957.33333333334, ans=0.0 +2024-07-28 13:49:00,953 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.69 vs. limit=15.0 +2024-07-28 13:49:04,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=164984.0, ans=0.0 +2024-07-28 13:49:04,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=164984.0, ans=0.125 +2024-07-28 13:49:04,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=164984.0, ans=0.0 +2024-07-28 13:49:09,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=164997.33333333334, ans=22.5 +2024-07-28 13:49:11,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=164997.33333333334, ans=0.125 +2024-07-28 13:49:12,089 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-28 13:49:23,110 INFO [train.py:1114] (0/4) Epoch 13, batch 1100, loss[loss=0.1964, simple_loss=0.2739, pruned_loss=0.05946, over 4898.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2761, pruned_loss=0.04933, over 934342.65 frames. ], batch size: 13, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:49:50,755 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.914e+01 5.557e+01 6.150e+01 6.948e+01 9.915e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 13:50:16,136 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.44 vs. limit=22.5 +2024-07-28 13:50:38,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=165077.33333333334, ans=0.5 +2024-07-28 13:50:46,036 INFO [train.py:1114] (0/4) Epoch 13, batch 1150, loss[loss=0.1487, simple_loss=0.2349, pruned_loss=0.03129, over 4895.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2763, pruned_loss=0.04957, over 934145.38 frames. ], batch size: 13, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:50:47,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=165090.66666666666, ans=0.125 +2024-07-28 13:51:01,947 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.43 vs. limit=15.0 +2024-07-28 13:51:02,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=165117.33333333334, ans=0.125 +2024-07-28 13:51:07,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=165130.66666666666, ans=0.2 +2024-07-28 13:51:07,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=165130.66666666666, ans=0.125 +2024-07-28 13:51:09,778 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.76 vs. limit=15.0 +2024-07-28 13:51:15,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=165144.0, ans=0.0 +2024-07-28 13:51:20,620 INFO [train.py:1114] (0/4) Epoch 13, batch 1200, loss[loss=0.2519, simple_loss=0.3196, pruned_loss=0.09215, over 4872.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2766, pruned_loss=0.04957, over 933331.06 frames. ], batch size: 14, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:51:23,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=165157.33333333334, ans=0.125 +2024-07-28 13:51:30,263 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.565e+01 5.602e+01 6.215e+01 7.036e+01 9.353e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 13:51:34,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.06 vs. limit=15.0 +2024-07-28 13:51:54,631 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:51:55,921 INFO [train.py:1114] (0/4) Epoch 13, batch 1250, loss[loss=0.2179, simple_loss=0.2974, pruned_loss=0.06921, over 4812.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2776, pruned_loss=0.04994, over 937377.04 frames. ], batch size: 15, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:51:58,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=165224.0, ans=0.125 +2024-07-28 13:52:02,817 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:52:13,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=165250.66666666666, ans=0.125 +2024-07-28 13:52:26,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=165277.33333333334, ans=0.07 +2024-07-28 13:52:35,535 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.97 vs. limit=15.0 +2024-07-28 13:52:37,847 INFO [train.py:1114] (0/4) Epoch 13, batch 1300, loss[loss=0.2284, simple_loss=0.3119, pruned_loss=0.07248, over 4702.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2766, pruned_loss=0.04936, over 938885.37 frames. ], batch size: 19, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:52:40,203 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.86 vs. limit=15.0 +2024-07-28 13:52:48,772 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.215e+01 5.537e+01 6.038e+01 6.682e+01 9.542e+01, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 13:52:58,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=165317.33333333334, ans=0.0 +2024-07-28 13:53:04,481 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-124000.pt +2024-07-28 13:53:19,172 INFO [train.py:1114] (0/4) Epoch 13, batch 1350, loss[loss=0.183, simple_loss=0.277, pruned_loss=0.04453, over 4758.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2755, pruned_loss=0.04891, over 940866.02 frames. ], batch size: 13, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:53:23,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=165357.33333333334, ans=0.125 +2024-07-28 13:53:32,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=165370.66666666666, ans=0.125 +2024-07-28 13:53:37,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=165384.0, ans=0.125 +2024-07-28 13:53:42,068 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:53:54,749 INFO [train.py:1114] (0/4) Epoch 13, batch 1400, loss[loss=0.1645, simple_loss=0.2436, pruned_loss=0.04264, over 4702.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2758, pruned_loss=0.04906, over 942829.56 frames. ], batch size: 11, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:53:59,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=165424.0, ans=0.5 +2024-07-28 13:54:03,177 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.75 vs. limit=10.0 +2024-07-28 13:54:04,062 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.297e+01 5.870e+01 6.563e+01 8.092e+01 1.108e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-28 13:54:11,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=165450.66666666666, ans=0.125 +2024-07-28 13:54:22,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=165477.33333333334, ans=0.2 +2024-07-28 13:54:28,181 INFO [train.py:1114] (0/4) Epoch 13, batch 1450, loss[loss=0.209, simple_loss=0.3033, pruned_loss=0.0573, over 4658.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2763, pruned_loss=0.04922, over 942872.65 frames. ], batch size: 15, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:54:30,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=165490.66666666666, ans=0.125 +2024-07-28 13:54:34,660 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.84 vs. limit=22.5 +2024-07-28 13:54:55,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=165544.0, ans=0.1 +2024-07-28 13:54:58,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=165544.0, ans=0.125 +2024-07-28 13:55:01,066 INFO [train.py:1114] (0/4) Epoch 13, batch 1500, loss[loss=0.1713, simple_loss=0.2651, pruned_loss=0.03876, over 4807.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2762, pruned_loss=0.0492, over 942348.78 frames. ], batch size: 14, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:55:10,418 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.997e+01 5.672e+01 6.060e+01 6.827e+01 9.493e+01, threshold=1.212e+02, percent-clipped=0.0 +2024-07-28 13:55:10,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=165570.66666666666, ans=0.1 +2024-07-28 13:55:31,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=165610.66666666666, ans=0.0 +2024-07-28 13:55:32,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=165610.66666666666, ans=0.125 +2024-07-28 13:55:34,845 INFO [train.py:1114] (0/4) Epoch 13, batch 1550, loss[loss=0.1783, simple_loss=0.2756, pruned_loss=0.04055, over 4903.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2766, pruned_loss=0.04955, over 938714.65 frames. ], batch size: 15, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:55:45,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=165637.33333333334, ans=0.0 +2024-07-28 13:56:01,038 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.77 vs. limit=15.0 +2024-07-28 13:56:03,075 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.28 vs. limit=22.5 +2024-07-28 13:56:08,015 INFO [train.py:1114] (0/4) Epoch 13, batch 1600, loss[loss=0.1925, simple_loss=0.2898, pruned_loss=0.04762, over 4875.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2768, pruned_loss=0.04992, over 937406.51 frames. ], batch size: 14, lr: 5.84e-03, grad_scale: 32.0 +2024-07-28 13:56:12,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=165690.66666666666, ans=0.0 +2024-07-28 13:56:12,788 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:56:19,117 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.588e+01 5.661e+01 6.268e+01 7.174e+01 9.497e+01, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 13:56:29,546 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=28.58 vs. limit=22.5 +2024-07-28 13:56:33,327 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.58 vs. limit=12.0 +2024-07-28 13:56:41,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=165744.0, ans=0.025 +2024-07-28 13:56:42,677 INFO [train.py:1114] (0/4) Epoch 13, batch 1650, loss[loss=0.1975, simple_loss=0.3102, pruned_loss=0.04246, over 4661.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.277, pruned_loss=0.05026, over 937553.83 frames. ], batch size: 14, lr: 5.84e-03, grad_scale: 32.0 +2024-07-28 13:56:54,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=165770.66666666666, ans=0.125 +2024-07-28 13:56:55,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=165784.0, ans=0.2 +2024-07-28 13:57:05,367 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.10 vs. limit=22.5 +2024-07-28 13:57:13,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=165810.66666666666, ans=0.125 +2024-07-28 13:57:15,597 INFO [train.py:1114] (0/4) Epoch 13, batch 1700, loss[loss=0.1899, simple_loss=0.2697, pruned_loss=0.05504, over 4710.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2778, pruned_loss=0.05028, over 939380.33 frames. ], batch size: 11, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:57:26,594 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.45 vs. limit=15.0 +2024-07-28 13:57:26,864 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 5.803e+01 6.268e+01 7.328e+01 1.138e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 13:57:29,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=165837.33333333334, ans=0.125 +2024-07-28 13:57:29,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.55 vs. limit=22.5 +2024-07-28 13:57:47,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=165877.33333333334, ans=0.125 +2024-07-28 13:57:51,047 INFO [train.py:1114] (0/4) Epoch 13, batch 1750, loss[loss=0.1507, simple_loss=0.24, pruned_loss=0.03073, over 4801.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2774, pruned_loss=0.05005, over 940424.26 frames. ], batch size: 11, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:57:52,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=165890.66666666666, ans=0.125 +2024-07-28 13:57:53,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=165890.66666666666, ans=0.5 +2024-07-28 13:58:01,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=165904.0, ans=0.0 +2024-07-28 13:58:12,759 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.98 vs. limit=6.0 +2024-07-28 13:58:16,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=165930.66666666666, ans=0.09899494936611666 +2024-07-28 13:58:19,110 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.87 vs. limit=22.5 +2024-07-28 13:58:26,047 INFO [train.py:1114] (0/4) Epoch 13, batch 1800, loss[loss=0.1763, simple_loss=0.2652, pruned_loss=0.04375, over 4629.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2773, pruned_loss=0.05031, over 940686.87 frames. ], batch size: 13, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:58:28,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=165957.33333333334, ans=0.2 +2024-07-28 13:58:35,520 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+01 5.623e+01 6.283e+01 7.470e+01 1.047e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 13:58:40,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=165984.0, ans=0.125 +2024-07-28 13:58:45,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=165997.33333333334, ans=0.2 +2024-07-28 13:58:55,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=166010.66666666666, ans=0.125 +2024-07-28 13:59:01,620 INFO [train.py:1114] (0/4) Epoch 13, batch 1850, loss[loss=0.1678, simple_loss=0.2632, pruned_loss=0.03617, over 4804.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2768, pruned_loss=0.05028, over 940851.89 frames. ], batch size: 14, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:59:06,167 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.45 vs. limit=12.0 +2024-07-28 13:59:10,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=166037.33333333334, ans=0.125 +2024-07-28 13:59:12,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166037.33333333334, ans=0.1 +2024-07-28 13:59:15,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=166050.66666666666, ans=0.2 +2024-07-28 13:59:26,130 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.29 vs. limit=15.0 +2024-07-28 13:59:29,977 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.53 vs. limit=22.5 +2024-07-28 13:59:35,543 INFO [train.py:1114] (0/4) Epoch 13, batch 1900, loss[loss=0.2054, simple_loss=0.2949, pruned_loss=0.05792, over 4660.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2773, pruned_loss=0.05063, over 941891.94 frames. ], batch size: 14, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:59:39,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=166090.66666666666, ans=0.125 +2024-07-28 13:59:44,624 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.534e+01 5.588e+01 6.157e+01 7.144e+01 1.104e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 13:59:55,480 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.65 vs. limit=15.0 +2024-07-28 13:59:59,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=166130.66666666666, ans=0.0 +2024-07-28 14:00:02,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=166144.0, ans=0.0 +2024-07-28 14:00:04,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=166144.0, ans=0.025 +2024-07-28 14:00:08,403 INFO [train.py:1114] (0/4) Epoch 13, batch 1950, loss[loss=0.1763, simple_loss=0.2648, pruned_loss=0.04393, over 4899.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2786, pruned_loss=0.05076, over 943831.61 frames. ], batch size: 13, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 14:00:11,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff3.min_abs, batch_count=166157.33333333334, ans=0.2 +2024-07-28 14:00:25,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=166184.0, ans=0.0 +2024-07-28 14:00:26,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166184.0, ans=0.1 +2024-07-28 14:00:26,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=166184.0, ans=0.125 +2024-07-28 14:00:35,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=166210.66666666666, ans=0.0 +2024-07-28 14:00:42,310 INFO [train.py:1114] (0/4) Epoch 13, batch 2000, loss[loss=0.169, simple_loss=0.2458, pruned_loss=0.04607, over 4800.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.279, pruned_loss=0.05074, over 941432.31 frames. ], batch size: 11, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 14:00:46,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=166224.0, ans=0.125 +2024-07-28 14:00:47,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166224.0, ans=0.1 +2024-07-28 14:00:47,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166224.0, ans=0.1 +2024-07-28 14:00:49,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166237.33333333334, ans=0.1 +2024-07-28 14:00:51,856 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+01 5.856e+01 6.495e+01 7.461e+01 1.148e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-28 14:00:56,557 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.97 vs. limit=15.0 +2024-07-28 14:01:16,502 INFO [train.py:1114] (0/4) Epoch 13, batch 2050, loss[loss=0.1575, simple_loss=0.2438, pruned_loss=0.0356, over 4630.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2777, pruned_loss=0.05026, over 939703.70 frames. ], batch size: 11, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:01:19,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=166290.66666666666, ans=0.0 +2024-07-28 14:01:21,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=166290.66666666666, ans=0.125 +2024-07-28 14:01:39,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=166330.66666666666, ans=0.125 +2024-07-28 14:01:51,280 INFO [train.py:1114] (0/4) Epoch 13, batch 2100, loss[loss=0.1672, simple_loss=0.2547, pruned_loss=0.03984, over 4767.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2769, pruned_loss=0.04979, over 941366.11 frames. ], batch size: 13, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:02:00,482 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.442e+01 5.528e+01 6.162e+01 7.061e+01 9.278e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 14:02:07,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=166384.0, ans=0.2 +2024-07-28 14:02:18,051 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.83 vs. limit=15.0 +2024-07-28 14:02:19,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=166410.66666666666, ans=0.125 +2024-07-28 14:02:24,094 INFO [train.py:1114] (0/4) Epoch 13, batch 2150, loss[loss=0.163, simple_loss=0.262, pruned_loss=0.03196, over 4902.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2759, pruned_loss=0.04957, over 944443.13 frames. ], batch size: 13, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:02:28,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=166424.0, ans=0.2 +2024-07-28 14:02:46,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=166464.0, ans=0.2 +2024-07-28 14:02:48,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=166464.0, ans=0.025 +2024-07-28 14:02:50,095 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.58 vs. limit=15.0 +2024-07-28 14:02:50,831 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.81 vs. limit=10.0 +2024-07-28 14:02:59,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=166490.66666666666, ans=0.0 +2024-07-28 14:02:59,555 INFO [train.py:1114] (0/4) Epoch 13, batch 2200, loss[loss=0.1711, simple_loss=0.2697, pruned_loss=0.03624, over 4813.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2756, pruned_loss=0.04933, over 943813.26 frames. ], batch size: 14, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:03:01,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=166490.66666666666, ans=0.125 +2024-07-28 14:03:02,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=166490.66666666666, ans=0.125 +2024-07-28 14:03:08,843 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.709e+01 5.698e+01 6.654e+01 7.833e+01 2.383e+02, threshold=1.331e+02, percent-clipped=1.0 +2024-07-28 14:03:08,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=166504.0, ans=0.125 +2024-07-28 14:03:24,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=166530.66666666666, ans=0.125 +2024-07-28 14:03:25,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=166544.0, ans=0.0 +2024-07-28 14:03:27,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166544.0, ans=0.1 +2024-07-28 14:03:30,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=166544.0, ans=0.05 +2024-07-28 14:03:32,662 INFO [train.py:1114] (0/4) Epoch 13, batch 2250, loss[loss=0.1715, simple_loss=0.2572, pruned_loss=0.04289, over 4704.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2756, pruned_loss=0.04945, over 942377.23 frames. ], batch size: 13, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:04:07,911 INFO [train.py:1114] (0/4) Epoch 13, batch 2300, loss[loss=0.1635, simple_loss=0.2471, pruned_loss=0.03995, over 4931.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2738, pruned_loss=0.04901, over 940136.13 frames. ], batch size: 12, lr: 5.83e-03, grad_scale: 32.0 +2024-07-28 14:04:17,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=166637.33333333334, ans=0.125 +2024-07-28 14:04:19,879 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.764e+01 5.399e+01 5.798e+01 6.898e+01 9.306e+01, threshold=1.160e+02, percent-clipped=0.0 +2024-07-28 14:04:35,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=166664.0, ans=0.125 +2024-07-28 14:04:35,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=166664.0, ans=0.125 +2024-07-28 14:04:40,458 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.59 vs. limit=15.0 +2024-07-28 14:04:44,034 INFO [train.py:1114] (0/4) Epoch 13, batch 2350, loss[loss=0.2141, simple_loss=0.299, pruned_loss=0.06459, over 4636.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2741, pruned_loss=0.04936, over 942049.65 frames. ], batch size: 13, lr: 5.83e-03, grad_scale: 32.0 +2024-07-28 14:04:48,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=166690.66666666666, ans=0.0 +2024-07-28 14:05:13,289 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=20.55 vs. limit=22.5 +2024-07-28 14:05:17,415 INFO [train.py:1114] (0/4) Epoch 13, batch 2400, loss[loss=0.1615, simple_loss=0.2505, pruned_loss=0.03626, over 4647.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2746, pruned_loss=0.04884, over 941639.78 frames. ], batch size: 12, lr: 5.83e-03, grad_scale: 32.0 +2024-07-28 14:05:27,492 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.619e+01 5.555e+01 6.337e+01 7.554e+01 1.093e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 14:05:28,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=166770.66666666666, ans=0.0 +2024-07-28 14:05:33,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=166784.0, ans=0.0 +2024-07-28 14:05:40,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=166797.33333333334, ans=0.125 +2024-07-28 14:05:44,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=166810.66666666666, ans=0.0 +2024-07-28 14:05:45,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=166810.66666666666, ans=0.125 +2024-07-28 14:05:48,404 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.39 vs. limit=22.5 +2024-07-28 14:05:49,162 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.66 vs. limit=12.0 +2024-07-28 14:05:50,688 INFO [train.py:1114] (0/4) Epoch 13, batch 2450, loss[loss=0.173, simple_loss=0.2742, pruned_loss=0.03588, over 4690.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2751, pruned_loss=0.04917, over 936862.47 frames. ], batch size: 13, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:05:57,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=166837.33333333334, ans=0.025 +2024-07-28 14:06:06,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166850.66666666666, ans=0.1 +2024-07-28 14:06:09,148 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.66 vs. limit=22.5 +2024-07-28 14:06:13,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=166864.0, ans=0.1 +2024-07-28 14:06:13,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=166864.0, ans=0.125 +2024-07-28 14:06:17,495 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:06:23,975 INFO [train.py:1114] (0/4) Epoch 13, batch 2500, loss[loss=0.1924, simple_loss=0.2796, pruned_loss=0.05264, over 4808.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2749, pruned_loss=0.04873, over 938654.91 frames. ], batch size: 14, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:06:33,894 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.764e+01 5.445e+01 5.909e+01 6.665e+01 1.016e+02, threshold=1.182e+02, percent-clipped=0.0 +2024-07-28 14:06:35,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=166904.0, ans=0.0 +2024-07-28 14:06:43,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=166930.66666666666, ans=0.125 +2024-07-28 14:06:54,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=166944.0, ans=0.0 +2024-07-28 14:06:57,786 INFO [train.py:1114] (0/4) Epoch 13, batch 2550, loss[loss=0.1635, simple_loss=0.242, pruned_loss=0.04252, over 4809.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2747, pruned_loss=0.049, over 938386.23 frames. ], batch size: 11, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:06:57,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=166957.33333333334, ans=0.05 +2024-07-28 14:06:58,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=166957.33333333334, ans=0.125 +2024-07-28 14:07:08,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=166970.66666666666, ans=0.125 +2024-07-28 14:07:32,593 INFO [train.py:1114] (0/4) Epoch 13, batch 2600, loss[loss=0.1841, simple_loss=0.2721, pruned_loss=0.04803, over 4892.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2754, pruned_loss=0.04926, over 937143.41 frames. ], batch size: 13, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:07:32,937 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.50 vs. limit=15.0 +2024-07-28 14:07:42,338 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.776e+01 5.608e+01 6.313e+01 7.050e+01 1.090e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 14:07:47,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=167050.66666666666, ans=0.05 +2024-07-28 14:07:47,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=167050.66666666666, ans=0.125 +2024-07-28 14:07:54,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167064.0, ans=0.1 +2024-07-28 14:08:04,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=167077.33333333334, ans=0.2 +2024-07-28 14:08:07,482 INFO [train.py:1114] (0/4) Epoch 13, batch 2650, loss[loss=0.1798, simple_loss=0.2717, pruned_loss=0.04395, over 4627.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2761, pruned_loss=0.04964, over 939356.17 frames. ], batch size: 16, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:08:10,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=167090.66666666666, ans=0.0 +2024-07-28 14:08:17,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.75 vs. limit=10.0 +2024-07-28 14:08:23,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=167117.33333333334, ans=0.125 +2024-07-28 14:08:24,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=167117.33333333334, ans=0.5 +2024-07-28 14:08:29,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=167130.66666666666, ans=0.07 +2024-07-28 14:08:30,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=167130.66666666666, ans=0.07 +2024-07-28 14:08:41,022 INFO [train.py:1114] (0/4) Epoch 13, batch 2700, loss[loss=0.1776, simple_loss=0.2775, pruned_loss=0.03886, over 4742.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2756, pruned_loss=0.04913, over 939034.35 frames. ], batch size: 14, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:08:47,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=167170.66666666666, ans=0.125 +2024-07-28 14:08:49,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=167170.66666666666, ans=0.125 +2024-07-28 14:08:51,012 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.399e+01 5.482e+01 5.925e+01 6.824e+01 1.004e+02, threshold=1.185e+02, percent-clipped=0.0 +2024-07-28 14:08:53,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=167184.0, ans=0.125 +2024-07-28 14:09:04,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167197.33333333334, ans=0.1 +2024-07-28 14:09:04,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=167197.33333333334, ans=0.125 +2024-07-28 14:09:11,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=167210.66666666666, ans=0.2 +2024-07-28 14:09:17,204 INFO [train.py:1114] (0/4) Epoch 13, batch 2750, loss[loss=0.1697, simple_loss=0.2601, pruned_loss=0.03964, over 4711.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.274, pruned_loss=0.04875, over 939350.81 frames. ], batch size: 12, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:09:19,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=167224.0, ans=0.0 +2024-07-28 14:09:24,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=167237.33333333334, ans=0.2 +2024-07-28 14:09:29,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=167250.66666666666, ans=0.125 +2024-07-28 14:09:47,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=167277.33333333334, ans=0.125 +2024-07-28 14:09:52,620 INFO [train.py:1114] (0/4) Epoch 13, batch 2800, loss[loss=0.2547, simple_loss=0.3158, pruned_loss=0.09674, over 3503.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2752, pruned_loss=0.04956, over 937201.76 frames. ], batch size: 37, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:09:53,799 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.32 vs. limit=6.0 +2024-07-28 14:09:55,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167290.66666666666, ans=0.1 +2024-07-28 14:09:57,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167290.66666666666, ans=0.1 +2024-07-28 14:09:58,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=167290.66666666666, ans=0.125 +2024-07-28 14:09:58,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=167304.0, ans=0.05 +2024-07-28 14:10:01,654 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.59 vs. limit=15.0 +2024-07-28 14:10:02,564 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.817e+01 5.664e+01 6.211e+01 7.205e+01 1.021e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 14:10:08,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=167317.33333333334, ans=0.1 +2024-07-28 14:10:10,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=167317.33333333334, ans=0.125 +2024-07-28 14:10:18,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167330.66666666666, ans=0.1 +2024-07-28 14:10:26,045 INFO [train.py:1114] (0/4) Epoch 13, batch 2850, loss[loss=0.21, simple_loss=0.2921, pruned_loss=0.06398, over 4969.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2757, pruned_loss=0.04986, over 935721.75 frames. ], batch size: 13, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:10:38,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=167370.66666666666, ans=0.125 +2024-07-28 14:10:42,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=167384.0, ans=0.125 +2024-07-28 14:10:46,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=167397.33333333334, ans=0.0 +2024-07-28 14:10:50,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=167397.33333333334, ans=0.125 +2024-07-28 14:10:53,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=167410.66666666666, ans=0.125 +2024-07-28 14:10:59,332 INFO [train.py:1114] (0/4) Epoch 13, batch 2900, loss[loss=0.1929, simple_loss=0.2762, pruned_loss=0.05481, over 4824.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2769, pruned_loss=0.04959, over 939677.70 frames. ], batch size: 13, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:11:05,101 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.13 vs. limit=22.5 +2024-07-28 14:11:09,570 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.537e+01 5.851e+01 6.550e+01 7.504e+01 1.142e+02, threshold=1.310e+02, percent-clipped=0.0 +2024-07-28 14:11:13,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167450.66666666666, ans=0.1 +2024-07-28 14:11:23,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=167464.0, ans=0.0 +2024-07-28 14:11:33,209 INFO [train.py:1114] (0/4) Epoch 13, batch 2950, loss[loss=0.2021, simple_loss=0.2928, pruned_loss=0.05568, over 4706.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2755, pruned_loss=0.04944, over 938519.73 frames. ], batch size: 12, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:11:35,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=167490.66666666666, ans=0.125 +2024-07-28 14:11:46,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=167517.33333333334, ans=0.125 +2024-07-28 14:11:50,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=167517.33333333334, ans=0.0 +2024-07-28 14:11:55,652 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.82 vs. limit=22.5 +2024-07-28 14:11:57,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=167530.66666666666, ans=0.125 +2024-07-28 14:12:02,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=167544.0, ans=0.125 +2024-07-28 14:12:04,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=167544.0, ans=0.025 +2024-07-28 14:12:06,772 INFO [train.py:1114] (0/4) Epoch 13, batch 3000, loss[loss=0.1959, simple_loss=0.284, pruned_loss=0.05393, over 4757.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2753, pruned_loss=0.049, over 938549.53 frames. ], batch size: 13, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:12:06,773 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 14:12:18,644 INFO [train.py:1146] (0/4) Epoch 13, validation: loss=0.1663, simple_loss=0.2701, pruned_loss=0.0312, over 944034.00 frames. +2024-07-28 14:12:18,645 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 14:12:21,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=167557.33333333334, ans=0.2 +2024-07-28 14:12:23,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=167557.33333333334, ans=0.0 +2024-07-28 14:12:29,075 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.643e+01 5.635e+01 6.154e+01 7.337e+01 1.248e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 14:12:35,644 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.60 vs. limit=22.5 +2024-07-28 14:12:46,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=167610.66666666666, ans=0.2 +2024-07-28 14:12:51,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=167610.66666666666, ans=0.0 +2024-07-28 14:12:52,952 INFO [train.py:1114] (0/4) Epoch 13, batch 3050, loss[loss=0.2082, simple_loss=0.2955, pruned_loss=0.0605, over 4642.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2761, pruned_loss=0.04905, over 937272.32 frames. ], batch size: 12, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:12:53,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=167624.0, ans=0.125 +2024-07-28 14:12:55,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=167624.0, ans=0.0 +2024-07-28 14:12:59,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=167637.33333333334, ans=0.125 +2024-07-28 14:13:00,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=167637.33333333334, ans=0.035 +2024-07-28 14:13:00,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=167637.33333333334, ans=0.125 +2024-07-28 14:13:03,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=167637.33333333334, ans=0.025 +2024-07-28 14:13:10,578 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.66 vs. limit=10.0 +2024-07-28 14:13:27,862 INFO [train.py:1114] (0/4) Epoch 13, batch 3100, loss[loss=0.2105, simple_loss=0.2978, pruned_loss=0.06164, over 4618.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2762, pruned_loss=0.04947, over 938234.70 frames. ], batch size: 16, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:13:32,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=167690.66666666666, ans=0.125 +2024-07-28 14:13:35,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=167704.0, ans=0.04949747468305833 +2024-07-28 14:13:37,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=167704.0, ans=0.125 +2024-07-28 14:13:37,634 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.158e+01 5.544e+01 6.108e+01 7.072e+01 9.683e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 14:13:40,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167717.33333333334, ans=0.1 +2024-07-28 14:13:44,620 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:13:48,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=167730.66666666666, ans=0.04949747468305833 +2024-07-28 14:13:48,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=167730.66666666666, ans=0.125 +2024-07-28 14:13:50,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=167730.66666666666, ans=0.125 +2024-07-28 14:14:01,410 INFO [train.py:1114] (0/4) Epoch 13, batch 3150, loss[loss=0.1963, simple_loss=0.2995, pruned_loss=0.04656, over 4617.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2764, pruned_loss=0.04974, over 938599.03 frames. ], batch size: 17, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:14:26,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=167797.33333333334, ans=0.125 +2024-07-28 14:14:37,496 INFO [train.py:1114] (0/4) Epoch 13, batch 3200, loss[loss=0.2062, simple_loss=0.2938, pruned_loss=0.05927, over 4825.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2756, pruned_loss=0.04933, over 939676.10 frames. ], batch size: 13, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:14:47,195 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.951e+01 5.665e+01 6.377e+01 7.022e+01 9.065e+01, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 14:15:01,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167850.66666666666, ans=0.1 +2024-07-28 14:15:12,969 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.26 vs. limit=6.0 +2024-07-28 14:15:18,617 INFO [train.py:1114] (0/4) Epoch 13, batch 3250, loss[loss=0.1988, simple_loss=0.2937, pruned_loss=0.05196, over 4938.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2757, pruned_loss=0.04955, over 940605.92 frames. ], batch size: 14, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:15:24,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=167890.66666666666, ans=0.0 +2024-07-28 14:15:31,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=167917.33333333334, ans=0.125 +2024-07-28 14:15:48,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=167944.0, ans=0.5 +2024-07-28 14:15:52,607 INFO [train.py:1114] (0/4) Epoch 13, batch 3300, loss[loss=0.2064, simple_loss=0.2947, pruned_loss=0.05909, over 4740.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2743, pruned_loss=0.0489, over 941256.52 frames. ], batch size: 19, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:15:53,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=167957.33333333334, ans=0.0 +2024-07-28 14:15:54,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=167957.33333333334, ans=0.125 +2024-07-28 14:15:54,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=167957.33333333334, ans=0.125 +2024-07-28 14:15:55,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167957.33333333334, ans=0.1 +2024-07-28 14:15:58,091 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=167957.33333333334, ans=0.025 +2024-07-28 14:16:02,804 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 5.327e+01 5.938e+01 6.571e+01 1.063e+02, threshold=1.188e+02, percent-clipped=0.0 +2024-07-28 14:16:05,186 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.81 vs. limit=15.0 +2024-07-28 14:16:12,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=167997.33333333334, ans=0.125 +2024-07-28 14:16:26,186 INFO [train.py:1114] (0/4) Epoch 13, batch 3350, loss[loss=0.1824, simple_loss=0.2786, pruned_loss=0.04313, over 4642.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2755, pruned_loss=0.04979, over 939467.96 frames. ], batch size: 17, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:16:27,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=168024.0, ans=0.07 +2024-07-28 14:16:27,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=168024.0, ans=0.125 +2024-07-28 14:16:55,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=168077.33333333334, ans=0.125 +2024-07-28 14:16:57,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=168077.33333333334, ans=0.125 +2024-07-28 14:17:00,174 INFO [train.py:1114] (0/4) Epoch 13, batch 3400, loss[loss=0.2027, simple_loss=0.2665, pruned_loss=0.06947, over 4812.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.275, pruned_loss=0.04976, over 937902.03 frames. ], batch size: 11, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:17:10,167 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.911e+01 6.407e+01 7.548e+01 1.179e+02, threshold=1.281e+02, percent-clipped=0.0 +2024-07-28 14:17:14,150 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.01 vs. limit=15.0 +2024-07-28 14:17:15,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=168117.33333333334, ans=0.125 +2024-07-28 14:17:16,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=168117.33333333334, ans=0.2 +2024-07-28 14:17:33,661 INFO [train.py:1114] (0/4) Epoch 13, batch 3450, loss[loss=0.2112, simple_loss=0.3055, pruned_loss=0.0585, over 4684.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2757, pruned_loss=0.04961, over 937850.86 frames. ], batch size: 19, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:17:38,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=168157.33333333334, ans=0.2 +2024-07-28 14:17:47,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=168170.66666666666, ans=0.125 +2024-07-28 14:18:08,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=168224.0, ans=0.125 +2024-07-28 14:18:08,645 INFO [train.py:1114] (0/4) Epoch 13, batch 3500, loss[loss=0.1702, simple_loss=0.2504, pruned_loss=0.04499, over 4958.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2743, pruned_loss=0.04888, over 938308.44 frames. ], batch size: 12, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:18:13,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=168224.0, ans=0.125 +2024-07-28 14:18:18,524 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.446e+01 5.616e+01 6.376e+01 7.329e+01 9.586e+01, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 14:18:29,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=168264.0, ans=0.0 +2024-07-28 14:18:38,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=168277.33333333334, ans=0.0 +2024-07-28 14:18:43,306 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.19 vs. limit=10.0 +2024-07-28 14:18:44,205 INFO [train.py:1114] (0/4) Epoch 13, batch 3550, loss[loss=0.2028, simple_loss=0.3013, pruned_loss=0.05218, over 4674.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2748, pruned_loss=0.0493, over 939060.74 frames. ], batch size: 14, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:18:53,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=168304.0, ans=0.2 +2024-07-28 14:18:55,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168304.0, ans=0.1 +2024-07-28 14:19:00,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=168317.33333333334, ans=0.2 +2024-07-28 14:19:13,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=168344.0, ans=0.025 +2024-07-28 14:19:17,387 INFO [train.py:1114] (0/4) Epoch 13, batch 3600, loss[loss=0.1801, simple_loss=0.2842, pruned_loss=0.03796, over 4965.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2753, pruned_loss=0.04928, over 940914.46 frames. ], batch size: 13, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:19:27,241 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.558e+01 5.903e+01 6.553e+01 7.584e+01 1.363e+02, threshold=1.311e+02, percent-clipped=1.0 +2024-07-28 14:19:42,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=168397.33333333334, ans=0.0 +2024-07-28 14:19:48,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=168410.66666666666, ans=0.04949747468305833 +2024-07-28 14:19:50,591 INFO [train.py:1114] (0/4) Epoch 13, batch 3650, loss[loss=0.1762, simple_loss=0.2759, pruned_loss=0.03821, over 4900.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2754, pruned_loss=0.04934, over 941437.66 frames. ], batch size: 15, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:20:01,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=168437.33333333334, ans=0.125 +2024-07-28 14:20:03,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=168437.33333333334, ans=0.125 +2024-07-28 14:20:08,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=168450.66666666666, ans=0.125 +2024-07-28 14:20:21,335 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.74 vs. limit=15.0 +2024-07-28 14:20:27,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=168490.66666666666, ans=0.2 +2024-07-28 14:20:27,679 INFO [train.py:1114] (0/4) Epoch 13, batch 3700, loss[loss=0.2015, simple_loss=0.2905, pruned_loss=0.05628, over 4934.00 frames. ], tot_loss[loss=0.187, simple_loss=0.276, pruned_loss=0.04897, over 942303.84 frames. ], batch size: 14, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:20:30,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=168490.66666666666, ans=0.0 +2024-07-28 14:20:35,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=168504.0, ans=0.0 +2024-07-28 14:20:37,609 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.274e+01 5.551e+01 6.034e+01 6.765e+01 1.404e+02, threshold=1.207e+02, percent-clipped=1.0 +2024-07-28 14:20:42,107 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.05 vs. limit=15.0 +2024-07-28 14:20:45,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168517.33333333334, ans=0.1 +2024-07-28 14:20:55,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=168544.0, ans=0.125 +2024-07-28 14:21:01,045 INFO [train.py:1114] (0/4) Epoch 13, batch 3750, loss[loss=0.1665, simple_loss=0.2522, pruned_loss=0.04039, over 4799.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2746, pruned_loss=0.04825, over 944011.06 frames. ], batch size: 11, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:21:01,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=168557.33333333334, ans=0.2 +2024-07-28 14:21:04,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=168557.33333333334, ans=0.125 +2024-07-28 14:21:17,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=168584.0, ans=0.95 +2024-07-28 14:21:30,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=168610.66666666666, ans=0.125 +2024-07-28 14:21:34,572 INFO [train.py:1114] (0/4) Epoch 13, batch 3800, loss[loss=0.2038, simple_loss=0.3011, pruned_loss=0.05325, over 4804.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2757, pruned_loss=0.04896, over 942224.63 frames. ], batch size: 14, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:21:38,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=168624.0, ans=0.125 +2024-07-28 14:21:40,166 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.94 vs. limit=22.5 +2024-07-28 14:21:44,602 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.768e+01 5.664e+01 6.425e+01 7.356e+01 1.029e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 14:21:46,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=168637.33333333334, ans=0.2 +2024-07-28 14:22:02,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=168677.33333333334, ans=0.125 +2024-07-28 14:22:08,474 INFO [train.py:1114] (0/4) Epoch 13, batch 3850, loss[loss=0.2037, simple_loss=0.3065, pruned_loss=0.05048, over 4642.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2751, pruned_loss=0.04852, over 942744.88 frames. ], batch size: 16, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:22:08,802 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.64 vs. limit=15.0 +2024-07-28 14:22:12,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=168690.66666666666, ans=0.1 +2024-07-28 14:22:19,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=168704.0, ans=0.0 +2024-07-28 14:22:22,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=168717.33333333334, ans=0.125 +2024-07-28 14:22:41,774 INFO [train.py:1114] (0/4) Epoch 13, batch 3900, loss[loss=0.2079, simple_loss=0.2977, pruned_loss=0.05904, over 4811.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2758, pruned_loss=0.04867, over 942918.01 frames. ], batch size: 14, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:22:48,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168770.66666666666, ans=0.1 +2024-07-28 14:22:51,519 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.515e+01 5.611e+01 6.115e+01 6.716e+01 9.720e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 14:22:53,835 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.21 vs. limit=15.0 +2024-07-28 14:23:00,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=168784.0, ans=0.125 +2024-07-28 14:23:01,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=168797.33333333334, ans=0.2 +2024-07-28 14:23:17,165 INFO [train.py:1114] (0/4) Epoch 13, batch 3950, loss[loss=0.2264, simple_loss=0.3108, pruned_loss=0.07101, over 4843.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2757, pruned_loss=0.04881, over 944768.53 frames. ], batch size: 16, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:23:23,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168837.33333333334, ans=0.1 +2024-07-28 14:23:27,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=168837.33333333334, ans=0.125 +2024-07-28 14:23:31,407 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.40 vs. limit=10.0 +2024-07-28 14:23:33,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=168850.66666666666, ans=0.04949747468305833 +2024-07-28 14:23:37,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=168864.0, ans=0.5 +2024-07-28 14:23:38,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=168864.0, ans=0.125 +2024-07-28 14:23:45,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=168877.33333333334, ans=0.0 +2024-07-28 14:23:50,572 INFO [train.py:1114] (0/4) Epoch 13, batch 4000, loss[loss=0.1915, simple_loss=0.271, pruned_loss=0.05599, over 4777.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2757, pruned_loss=0.04926, over 941507.26 frames. ], batch size: 12, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:23:59,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=168904.0, ans=0.5 +2024-07-28 14:24:00,473 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.269e+01 5.777e+01 6.304e+01 7.103e+01 1.026e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 14:24:05,973 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.94 vs. limit=15.0 +2024-07-28 14:24:06,903 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:24:09,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=168917.33333333334, ans=0.025 +2024-07-28 14:24:10,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=168917.33333333334, ans=0.0 +2024-07-28 14:24:12,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168930.66666666666, ans=0.1 +2024-07-28 14:24:13,181 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.39 vs. limit=22.5 +2024-07-28 14:24:24,796 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.90 vs. limit=15.0 +2024-07-28 14:24:25,725 INFO [train.py:1114] (0/4) Epoch 13, batch 4050, loss[loss=0.206, simple_loss=0.2994, pruned_loss=0.05634, over 3409.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2757, pruned_loss=0.04937, over 939731.61 frames. ], batch size: 35, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:24:28,319 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.19 vs. limit=15.0 +2024-07-28 14:24:48,184 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.32 vs. limit=22.5 +2024-07-28 14:24:54,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=169010.66666666666, ans=0.0 +2024-07-28 14:24:59,993 INFO [train.py:1114] (0/4) Epoch 13, batch 4100, loss[loss=0.2046, simple_loss=0.3042, pruned_loss=0.0525, over 4904.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2759, pruned_loss=0.04939, over 938621.18 frames. ], batch size: 15, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:25:00,356 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.17 vs. limit=6.0 +2024-07-28 14:25:00,525 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.13 vs. limit=15.0 +2024-07-28 14:25:08,456 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-07-28 14:25:09,985 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.483e+01 5.994e+01 6.398e+01 7.649e+01 1.244e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 14:25:19,102 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.35 vs. limit=15.0 +2024-07-28 14:25:29,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=169077.33333333334, ans=0.0 +2024-07-28 14:25:29,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=169077.33333333334, ans=0.025 +2024-07-28 14:25:33,414 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.44 vs. limit=12.0 +2024-07-28 14:25:35,750 INFO [train.py:1114] (0/4) Epoch 13, batch 4150, loss[loss=0.1816, simple_loss=0.2699, pruned_loss=0.04661, over 4827.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2754, pruned_loss=0.04916, over 938134.50 frames. ], batch size: 13, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:25:37,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=169090.66666666666, ans=0.1 +2024-07-28 14:25:40,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169090.66666666666, ans=0.1 +2024-07-28 14:25:43,605 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.84 vs. limit=10.0 +2024-07-28 14:25:48,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=169104.0, ans=0.0 +2024-07-28 14:26:11,042 INFO [train.py:1114] (0/4) Epoch 13, batch 4200, loss[loss=0.1951, simple_loss=0.2864, pruned_loss=0.05186, over 4908.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2753, pruned_loss=0.04855, over 939149.84 frames. ], batch size: 15, lr: 5.78e-03, grad_scale: 32.0 +2024-07-28 14:26:20,878 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 5.568e+01 6.158e+01 7.068e+01 9.655e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 14:26:21,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=169170.66666666666, ans=0.125 +2024-07-28 14:26:22,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=169170.66666666666, ans=0.0 +2024-07-28 14:26:25,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=169184.0, ans=0.125 +2024-07-28 14:26:26,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=169184.0, ans=0.0 +2024-07-28 14:26:31,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=169197.33333333334, ans=0.125 +2024-07-28 14:26:41,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=169210.66666666666, ans=0.025 +2024-07-28 14:26:43,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=169210.66666666666, ans=0.0 +2024-07-28 14:26:43,949 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:26:44,179 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.00 vs. limit=15.0 +2024-07-28 14:26:44,498 INFO [train.py:1114] (0/4) Epoch 13, batch 4250, loss[loss=0.1632, simple_loss=0.2529, pruned_loss=0.03672, over 4644.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2763, pruned_loss=0.04889, over 940221.37 frames. ], batch size: 12, lr: 5.78e-03, grad_scale: 32.0 +2024-07-28 14:26:44,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=169224.0, ans=0.0 +2024-07-28 14:26:45,023 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.92 vs. limit=15.0 +2024-07-28 14:26:48,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=169224.0, ans=0.125 +2024-07-28 14:26:51,521 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.99 vs. limit=15.0 +2024-07-28 14:26:59,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169250.66666666666, ans=0.1 +2024-07-28 14:27:05,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169264.0, ans=0.1 +2024-07-28 14:27:07,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=169264.0, ans=0.1 +2024-07-28 14:27:10,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=169277.33333333334, ans=0.125 +2024-07-28 14:27:12,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=169277.33333333334, ans=0.125 +2024-07-28 14:27:17,704 INFO [train.py:1114] (0/4) Epoch 13, batch 4300, loss[loss=0.1821, simple_loss=0.269, pruned_loss=0.04757, over 4769.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2764, pruned_loss=0.04911, over 939455.92 frames. ], batch size: 13, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:27:27,629 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.628e+01 5.556e+01 6.095e+01 6.767e+01 1.249e+02, threshold=1.219e+02, percent-clipped=1.0 +2024-07-28 14:27:41,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=169330.66666666666, ans=0.2 +2024-07-28 14:27:45,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=169344.0, ans=0.025 +2024-07-28 14:27:50,995 INFO [train.py:1114] (0/4) Epoch 13, batch 4350, loss[loss=0.1712, simple_loss=0.2634, pruned_loss=0.03952, over 4757.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2767, pruned_loss=0.04892, over 940306.51 frames. ], batch size: 13, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:28:18,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.07 vs. limit=15.0 +2024-07-28 14:28:24,268 INFO [train.py:1114] (0/4) Epoch 13, batch 4400, loss[loss=0.2222, simple_loss=0.3071, pruned_loss=0.06861, over 4807.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2766, pruned_loss=0.04874, over 940249.95 frames. ], batch size: 14, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:28:27,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=169424.0, ans=0.125 +2024-07-28 14:28:28,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=169424.0, ans=0.125 +2024-07-28 14:28:30,227 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.07 vs. limit=10.0 +2024-07-28 14:28:30,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=169437.33333333334, ans=0.05 +2024-07-28 14:28:35,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=169437.33333333334, ans=0.95 +2024-07-28 14:28:36,402 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.899e+01 5.545e+01 6.054e+01 6.710e+01 1.195e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-28 14:28:36,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=169437.33333333334, ans=0.1 +2024-07-28 14:29:00,265 INFO [train.py:1114] (0/4) Epoch 13, batch 4450, loss[loss=0.176, simple_loss=0.2573, pruned_loss=0.04739, over 4933.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2765, pruned_loss=0.04914, over 938916.35 frames. ], batch size: 12, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:29:03,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=169490.66666666666, ans=0.025 +2024-07-28 14:29:17,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=169517.33333333334, ans=0.1 +2024-07-28 14:29:26,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=169530.66666666666, ans=0.125 +2024-07-28 14:29:27,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=169530.66666666666, ans=0.125 +2024-07-28 14:29:32,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=169544.0, ans=0.0 +2024-07-28 14:29:34,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=169544.0, ans=0.0 +2024-07-28 14:29:35,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=169544.0, ans=0.09899494936611666 +2024-07-28 14:29:37,978 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.38 vs. limit=22.5 +2024-07-28 14:29:38,978 INFO [train.py:1114] (0/4) Epoch 13, batch 4500, loss[loss=0.1797, simple_loss=0.27, pruned_loss=0.04469, over 4732.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2763, pruned_loss=0.0487, over 937516.00 frames. ], batch size: 14, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:29:44,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=169557.33333333334, ans=0.125 +2024-07-28 14:29:46,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=169570.66666666666, ans=0.2 +2024-07-28 14:29:48,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=169570.66666666666, ans=0.125 +2024-07-28 14:29:48,688 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.567e+01 5.617e+01 6.099e+01 7.289e+01 9.992e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 14:29:52,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=169584.0, ans=0.0 +2024-07-28 14:31:55,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=169610.66666666666, ans=0.0 +2024-07-28 14:31:57,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=169610.66666666666, ans=0.5 +2024-07-28 14:31:57,938 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.78 vs. limit=12.0 +2024-07-28 14:31:59,574 INFO [train.py:1114] (0/4) Epoch 13, batch 4550, loss[loss=0.1839, simple_loss=0.2674, pruned_loss=0.05023, over 4904.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2758, pruned_loss=0.04806, over 939571.00 frames. ], batch size: 13, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:32:18,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=169650.66666666666, ans=0.2 +2024-07-28 14:32:19,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=169650.66666666666, ans=0.0 +2024-07-28 14:32:26,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=169664.0, ans=0.125 +2024-07-28 14:32:34,427 INFO [train.py:1114] (0/4) Epoch 13, batch 4600, loss[loss=0.225, simple_loss=0.3194, pruned_loss=0.06534, over 4524.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.275, pruned_loss=0.04841, over 937922.92 frames. ], batch size: 21, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:32:35,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=169690.66666666666, ans=0.2 +2024-07-28 14:32:48,093 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.248e+01 5.789e+01 6.719e+01 7.977e+01 1.194e+02, threshold=1.344e+02, percent-clipped=0.0 +2024-07-28 14:33:05,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=169730.66666666666, ans=0.0 +2024-07-28 14:33:12,839 INFO [train.py:1114] (0/4) Epoch 13, batch 4650, loss[loss=0.2129, simple_loss=0.304, pruned_loss=0.06088, over 4853.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2761, pruned_loss=0.04856, over 939497.81 frames. ], batch size: 16, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:33:20,715 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.63 vs. limit=15.0 +2024-07-28 14:33:29,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=169784.0, ans=0.07 +2024-07-28 14:33:30,635 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:33:42,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=169810.66666666666, ans=0.04949747468305833 +2024-07-28 14:33:44,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=169810.66666666666, ans=0.125 +2024-07-28 14:33:46,620 INFO [train.py:1114] (0/4) Epoch 13, batch 4700, loss[loss=0.1801, simple_loss=0.2533, pruned_loss=0.05342, over 4706.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2761, pruned_loss=0.0494, over 936651.56 frames. ], batch size: 11, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:33:48,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=169824.0, ans=0.0 +2024-07-28 14:33:50,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=169824.0, ans=0.025 +2024-07-28 14:33:56,557 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.492e+01 5.422e+01 6.008e+01 7.035e+01 1.017e+02, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 14:33:56,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=169837.33333333334, ans=0.125 +2024-07-28 14:34:09,692 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.21 vs. limit=6.0 +2024-07-28 14:34:12,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169877.33333333334, ans=0.1 +2024-07-28 14:34:14,515 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.76 vs. limit=15.0 +2024-07-28 14:34:20,195 INFO [train.py:1114] (0/4) Epoch 13, batch 4750, loss[loss=0.1913, simple_loss=0.2905, pruned_loss=0.04612, over 4573.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2771, pruned_loss=0.05033, over 934505.60 frames. ], batch size: 21, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:34:21,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=169890.66666666666, ans=0.2 +2024-07-28 14:34:40,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=169930.66666666666, ans=0.0 +2024-07-28 14:34:42,968 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.77 vs. limit=15.0 +2024-07-28 14:34:44,748 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:34:50,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=169944.0, ans=0.0 +2024-07-28 14:34:53,980 INFO [train.py:1114] (0/4) Epoch 13, batch 4800, loss[loss=0.1686, simple_loss=0.2587, pruned_loss=0.03931, over 4685.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2768, pruned_loss=0.05005, over 931551.01 frames. ], batch size: 13, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:34:56,239 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:35:03,985 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.473e+01 5.668e+01 6.259e+01 7.420e+01 1.160e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 14:35:13,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=169984.0, ans=0.0 +2024-07-28 14:35:22,287 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.50 vs. limit=15.0 +2024-07-28 14:35:22,730 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:35:31,452 INFO [train.py:1114] (0/4) Epoch 13, batch 4850, loss[loss=0.1527, simple_loss=0.2355, pruned_loss=0.03497, over 4739.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2774, pruned_loss=0.05051, over 931348.54 frames. ], batch size: 14, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:35:41,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=170037.33333333334, ans=0.0 +2024-07-28 14:35:42,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=170037.33333333334, ans=0.0 +2024-07-28 14:35:43,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=170037.33333333334, ans=0.0 +2024-07-28 14:35:56,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=170064.0, ans=0.0 +2024-07-28 14:36:12,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=170077.33333333334, ans=0.125 +2024-07-28 14:36:14,119 INFO [train.py:1114] (0/4) Epoch 13, batch 4900, loss[loss=0.2029, simple_loss=0.2949, pruned_loss=0.0555, over 4763.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2767, pruned_loss=0.05005, over 933344.44 frames. ], batch size: 13, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:36:32,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=170090.66666666666, ans=0.0 +2024-07-28 14:36:34,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=170104.0, ans=0.025 +2024-07-28 14:36:37,689 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.642e+01 5.628e+01 6.419e+01 7.139e+01 1.048e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 14:36:38,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=170104.0, ans=0.2 +2024-07-28 14:36:44,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=170117.33333333334, ans=0.125 +2024-07-28 14:36:51,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170130.66666666666, ans=0.1 +2024-07-28 14:36:55,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170144.0, ans=0.1 +2024-07-28 14:37:04,297 INFO [train.py:1114] (0/4) Epoch 13, batch 4950, loss[loss=0.27, simple_loss=0.3305, pruned_loss=0.1047, over 3173.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2782, pruned_loss=0.05111, over 930401.79 frames. ], batch size: 35, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:37:10,292 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.34 vs. limit=22.5 +2024-07-28 14:37:11,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170157.33333333334, ans=0.1 +2024-07-28 14:37:17,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=170170.66666666666, ans=0.125 +2024-07-28 14:37:20,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=170184.0, ans=0.07 +2024-07-28 14:37:20,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=170184.0, ans=0.125 +2024-07-28 14:37:32,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170197.33333333334, ans=0.1 +2024-07-28 14:37:36,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=170210.66666666666, ans=0.0 +2024-07-28 14:37:40,473 INFO [train.py:1114] (0/4) Epoch 13, batch 5000, loss[loss=0.201, simple_loss=0.3092, pruned_loss=0.04637, over 4662.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2778, pruned_loss=0.05047, over 934367.41 frames. ], batch size: 14, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:37:52,212 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.619e+01 5.707e+01 6.178e+01 6.994e+01 1.058e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 14:37:52,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=170237.33333333334, ans=0.2 +2024-07-28 14:37:56,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=170250.66666666666, ans=0.125 +2024-07-28 14:38:11,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=170277.33333333334, ans=0.0 +2024-07-28 14:38:13,337 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.19 vs. limit=10.0 +2024-07-28 14:38:13,456 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.66 vs. limit=22.5 +2024-07-28 14:38:15,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.89 vs. limit=15.0 +2024-07-28 14:38:15,732 INFO [train.py:1114] (0/4) Epoch 13, batch 5050, loss[loss=0.1464, simple_loss=0.241, pruned_loss=0.02596, over 4858.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2772, pruned_loss=0.04983, over 936841.27 frames. ], batch size: 12, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:38:17,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=170290.66666666666, ans=0.125 +2024-07-28 14:38:34,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170317.33333333334, ans=0.1 +2024-07-28 14:38:44,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=170344.0, ans=0.125 +2024-07-28 14:38:50,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=170357.33333333334, ans=0.125 +2024-07-28 14:38:51,512 INFO [train.py:1114] (0/4) Epoch 13, batch 5100, loss[loss=0.1769, simple_loss=0.2588, pruned_loss=0.04747, over 4787.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.277, pruned_loss=0.04996, over 934190.99 frames. ], batch size: 12, lr: 5.76e-03, grad_scale: 64.0 +2024-07-28 14:38:56,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=170357.33333333334, ans=0.025 +2024-07-28 14:39:03,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170370.66666666666, ans=0.1 +2024-07-28 14:39:04,406 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.671e+01 6.468e+01 7.600e+01 1.076e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 14:39:05,531 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.90 vs. limit=15.0 +2024-07-28 14:39:13,801 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:39:15,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=170397.33333333334, ans=0.2 +2024-07-28 14:39:20,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=170397.33333333334, ans=0.07 +2024-07-28 14:39:23,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=170410.66666666666, ans=0.0 +2024-07-28 14:39:24,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170410.66666666666, ans=0.1 +2024-07-28 14:39:25,013 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.39 vs. limit=6.0 +2024-07-28 14:39:25,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=170410.66666666666, ans=0.125 +2024-07-28 14:39:27,937 INFO [train.py:1114] (0/4) Epoch 13, batch 5150, loss[loss=0.2176, simple_loss=0.3137, pruned_loss=0.06076, over 4858.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2783, pruned_loss=0.05076, over 935380.36 frames. ], batch size: 16, lr: 5.76e-03, grad_scale: 64.0 +2024-07-28 14:39:34,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=170437.33333333334, ans=0.125 +2024-07-28 14:39:38,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=170437.33333333334, ans=0.125 +2024-07-28 14:39:43,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=170450.66666666666, ans=15.0 +2024-07-28 14:39:56,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=170477.33333333334, ans=0.125 +2024-07-28 14:40:01,713 INFO [train.py:1114] (0/4) Epoch 13, batch 5200, loss[loss=0.1908, simple_loss=0.2757, pruned_loss=0.05292, over 4673.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2774, pruned_loss=0.05012, over 935483.82 frames. ], batch size: 14, lr: 5.76e-03, grad_scale: 64.0 +2024-07-28 14:40:07,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=170504.0, ans=0.0 +2024-07-28 14:40:10,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=170504.0, ans=0.0 +2024-07-28 14:40:10,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=170504.0, ans=0.125 +2024-07-28 14:40:11,854 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.435e+01 5.593e+01 6.249e+01 7.313e+01 1.397e+02, threshold=1.250e+02, percent-clipped=1.0 +2024-07-28 14:40:17,718 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.54 vs. limit=15.0 +2024-07-28 14:40:34,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=170544.0, ans=0.025 +2024-07-28 14:40:35,398 INFO [train.py:1114] (0/4) Epoch 13, batch 5250, loss[loss=0.1754, simple_loss=0.2629, pruned_loss=0.04399, over 4893.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2771, pruned_loss=0.05008, over 935324.77 frames. ], batch size: 13, lr: 5.76e-03, grad_scale: 64.0 +2024-07-28 14:40:36,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=170557.33333333334, ans=0.125 +2024-07-28 14:40:43,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=170570.66666666666, ans=0.0 +2024-07-28 14:40:46,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=170570.66666666666, ans=0.125 +2024-07-28 14:40:48,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=170570.66666666666, ans=0.2 +2024-07-28 14:40:56,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=170597.33333333334, ans=0.0 +2024-07-28 14:41:01,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=170610.66666666666, ans=0.125 +2024-07-28 14:41:09,238 INFO [train.py:1114] (0/4) Epoch 13, batch 5300, loss[loss=0.2126, simple_loss=0.2895, pruned_loss=0.06784, over 4643.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.276, pruned_loss=0.04975, over 934097.79 frames. ], batch size: 16, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:41:10,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=170624.0, ans=0.125 +2024-07-28 14:41:19,624 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.242e+01 5.756e+01 6.384e+01 7.054e+01 9.587e+01, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 14:41:20,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=170637.33333333334, ans=0.0 +2024-07-28 14:41:29,929 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-128000.pt +2024-07-28 14:41:47,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=170690.66666666666, ans=0.125 +2024-07-28 14:41:47,977 INFO [train.py:1114] (0/4) Epoch 13, batch 5350, loss[loss=0.1557, simple_loss=0.2351, pruned_loss=0.03813, over 4523.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2768, pruned_loss=0.04979, over 936093.54 frames. ], batch size: 10, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:41:54,260 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.93 vs. limit=22.5 +2024-07-28 14:42:04,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=170717.33333333334, ans=0.125 +2024-07-28 14:42:21,589 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=15.0 +2024-07-28 14:42:33,632 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=170744.0, ans=0.09899494936611666 +2024-07-28 14:42:34,781 INFO [train.py:1114] (0/4) Epoch 13, batch 5400, loss[loss=0.204, simple_loss=0.2856, pruned_loss=0.06116, over 4310.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2776, pruned_loss=0.05031, over 929994.16 frames. ], batch size: 26, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:42:37,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170757.33333333334, ans=0.1 +2024-07-28 14:42:47,129 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.370e+01 5.692e+01 6.413e+01 7.093e+01 1.081e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 14:43:05,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=170810.66666666666, ans=0.0 +2024-07-28 14:43:06,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170810.66666666666, ans=0.1 +2024-07-28 14:43:06,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=170810.66666666666, ans=0.125 +2024-07-28 14:43:09,619 INFO [train.py:1114] (0/4) Epoch 13, batch 5450, loss[loss=0.1806, simple_loss=0.2572, pruned_loss=0.052, over 4704.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2772, pruned_loss=0.05018, over 932976.81 frames. ], batch size: 11, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:43:11,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=170824.0, ans=0.2 +2024-07-28 14:43:16,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=170824.0, ans=0.025 +2024-07-28 14:43:22,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=170837.33333333334, ans=0.125 +2024-07-28 14:43:23,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=170837.33333333334, ans=0.0 +2024-07-28 14:43:31,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=170850.66666666666, ans=0.0 +2024-07-28 14:43:34,197 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.93 vs. limit=6.0 +2024-07-28 14:43:41,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=170877.33333333334, ans=0.125 +2024-07-28 14:43:46,380 INFO [train.py:1114] (0/4) Epoch 13, batch 5500, loss[loss=0.1692, simple_loss=0.2564, pruned_loss=0.04103, over 4188.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2763, pruned_loss=0.04985, over 930590.91 frames. ], batch size: 25, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:43:57,658 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.576e+01 6.394e+01 7.172e+01 9.673e+01, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 14:43:57,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=170904.0, ans=0.0 +2024-07-28 14:43:59,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=170917.33333333334, ans=0.125 +2024-07-28 14:44:08,773 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.60 vs. limit=15.0 +2024-07-28 14:44:09,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=170930.66666666666, ans=0.04949747468305833 +2024-07-28 14:44:42,515 INFO [train.py:1114] (0/4) Epoch 13, batch 5550, loss[loss=0.1857, simple_loss=0.2702, pruned_loss=0.05062, over 4708.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2762, pruned_loss=0.04998, over 932876.79 frames. ], batch size: 12, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:44:56,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=170957.33333333334, ans=0.125 +2024-07-28 14:45:03,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=170984.0, ans=0.0 +2024-07-28 14:45:05,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=170984.0, ans=0.125 +2024-07-28 14:45:08,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=170984.0, ans=0.2 +2024-07-28 14:45:10,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=170984.0, ans=0.125 +2024-07-28 14:45:34,089 INFO [train.py:1114] (0/4) Epoch 13, batch 5600, loss[loss=0.196, simple_loss=0.2881, pruned_loss=0.05195, over 4740.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2766, pruned_loss=0.05003, over 933850.49 frames. ], batch size: 14, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:45:42,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171037.33333333334, ans=0.1 +2024-07-28 14:45:44,818 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.519e+01 5.953e+01 6.683e+01 8.989e+01, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 14:46:07,399 INFO [train.py:1114] (0/4) Epoch 13, batch 5650, loss[loss=0.1887, simple_loss=0.2889, pruned_loss=0.04429, over 4574.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2752, pruned_loss=0.04939, over 936724.22 frames. ], batch size: 21, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:46:27,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=171117.33333333334, ans=0.125 +2024-07-28 14:46:42,396 INFO [train.py:1114] (0/4) Epoch 13, batch 5700, loss[loss=0.1785, simple_loss=0.2734, pruned_loss=0.04183, over 4689.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2773, pruned_loss=0.05043, over 937947.60 frames. ], batch size: 13, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:46:44,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=171157.33333333334, ans=0.125 +2024-07-28 14:46:58,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=171170.66666666666, ans=0.0 +2024-07-28 14:46:58,589 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.488e+01 5.340e+01 5.994e+01 6.863e+01 1.115e+02, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 14:47:06,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=171184.0, ans=0.0 +2024-07-28 14:47:07,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=171197.33333333334, ans=0.125 +2024-07-28 14:47:17,702 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:47:23,376 INFO [train.py:1114] (0/4) Epoch 13, batch 5750, loss[loss=0.2032, simple_loss=0.2917, pruned_loss=0.05733, over 4700.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2775, pruned_loss=0.05051, over 938597.35 frames. ], batch size: 19, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:47:28,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=171224.0, ans=0.025 +2024-07-28 14:47:38,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=171250.66666666666, ans=0.0 +2024-07-28 14:47:47,274 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.08 vs. limit=15.0 +2024-07-28 14:47:49,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=171277.33333333334, ans=0.0 +2024-07-28 14:47:53,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=171277.33333333334, ans=0.025 +2024-07-28 14:47:56,852 INFO [train.py:1114] (0/4) Epoch 13, batch 5800, loss[loss=0.2208, simple_loss=0.3083, pruned_loss=0.06669, over 4714.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2776, pruned_loss=0.05023, over 937173.85 frames. ], batch size: 19, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:47:58,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171290.66666666666, ans=0.1 +2024-07-28 14:48:01,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=171290.66666666666, ans=0.0 +2024-07-28 14:48:07,664 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.533e+01 5.852e+01 6.546e+01 7.322e+01 1.389e+02, threshold=1.309e+02, percent-clipped=1.0 +2024-07-28 14:48:20,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=171330.66666666666, ans=0.1 +2024-07-28 14:48:26,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=171344.0, ans=0.04949747468305833 +2024-07-28 14:48:28,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171344.0, ans=0.1 +2024-07-28 14:48:33,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=171344.0, ans=0.125 +2024-07-28 14:48:35,975 INFO [train.py:1114] (0/4) Epoch 13, batch 5850, loss[loss=0.1874, simple_loss=0.2842, pruned_loss=0.04532, over 4460.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2763, pruned_loss=0.04998, over 937482.40 frames. ], batch size: 21, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:48:38,392 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.92 vs. limit=15.0 +2024-07-28 14:48:38,944 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.41 vs. limit=6.0 +2024-07-28 14:48:42,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.09 vs. limit=10.0 +2024-07-28 14:48:54,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=171384.0, ans=0.125 +2024-07-28 14:49:07,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171410.66666666666, ans=0.1 +2024-07-28 14:49:12,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=171424.0, ans=0.125 +2024-07-28 14:49:12,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=171424.0, ans=10.0 +2024-07-28 14:49:13,162 INFO [train.py:1114] (0/4) Epoch 13, batch 5900, loss[loss=0.1977, simple_loss=0.2832, pruned_loss=0.05609, over 4705.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2758, pruned_loss=0.04944, over 937803.80 frames. ], batch size: 15, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:49:46,004 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 5.643e+01 6.441e+01 7.134e+01 1.016e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 14:49:49,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=171450.66666666666, ans=0.125 +2024-07-28 14:49:57,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=171464.0, ans=0.125 +2024-07-28 14:50:01,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=171464.0, ans=0.125 +2024-07-28 14:50:08,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=171490.66666666666, ans=0.125 +2024-07-28 14:50:09,032 INFO [train.py:1114] (0/4) Epoch 13, batch 5950, loss[loss=0.1953, simple_loss=0.2951, pruned_loss=0.04778, over 4687.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2763, pruned_loss=0.04926, over 940334.02 frames. ], batch size: 15, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:50:12,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=171490.66666666666, ans=0.2 +2024-07-28 14:50:12,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=171490.66666666666, ans=0.0 +2024-07-28 14:50:18,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=171490.66666666666, ans=0.2 +2024-07-28 14:50:23,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=171504.0, ans=0.125 +2024-07-28 14:50:30,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=171517.33333333334, ans=0.0 +2024-07-28 14:50:31,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=171517.33333333334, ans=0.0 +2024-07-28 14:50:33,257 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.15 vs. limit=15.0 +2024-07-28 14:50:47,140 INFO [train.py:1114] (0/4) Epoch 13, batch 6000, loss[loss=0.2018, simple_loss=0.3043, pruned_loss=0.04962, over 4121.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2761, pruned_loss=0.04956, over 937153.72 frames. ], batch size: 25, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:50:47,141 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 14:51:12,227 INFO [train.py:1146] (0/4) Epoch 13, validation: loss=0.1644, simple_loss=0.2689, pruned_loss=0.02993, over 944034.00 frames. +2024-07-28 14:51:12,228 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 14:51:18,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171557.33333333334, ans=0.1 +2024-07-28 14:51:18,423 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.45 vs. limit=15.0 +2024-07-28 14:51:24,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=171570.66666666666, ans=0.125 +2024-07-28 14:51:25,694 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.755e+01 5.656e+01 6.363e+01 7.172e+01 1.139e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 14:51:39,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=171597.33333333334, ans=0.125 +2024-07-28 14:51:56,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=171610.66666666666, ans=0.0 +2024-07-28 14:51:57,393 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.79 vs. limit=10.0 +2024-07-28 14:52:00,245 INFO [train.py:1114] (0/4) Epoch 13, batch 6050, loss[loss=0.2045, simple_loss=0.2967, pruned_loss=0.05615, over 4774.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2761, pruned_loss=0.05007, over 938471.25 frames. ], batch size: 12, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:52:02,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=171624.0, ans=0.2 +2024-07-28 14:52:04,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171624.0, ans=0.1 +2024-07-28 14:52:07,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=171624.0, ans=0.0 +2024-07-28 14:52:19,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=171650.66666666666, ans=0.125 +2024-07-28 14:52:35,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=171677.33333333334, ans=0.125 +2024-07-28 14:52:36,823 INFO [train.py:1114] (0/4) Epoch 13, batch 6100, loss[loss=0.2054, simple_loss=0.3044, pruned_loss=0.05323, over 4679.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2769, pruned_loss=0.05052, over 937910.32 frames. ], batch size: 15, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:52:38,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=171690.66666666666, ans=0.0 +2024-07-28 14:52:38,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=171690.66666666666, ans=0.0 +2024-07-28 14:52:41,186 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.99 vs. limit=15.0 +2024-07-28 14:52:48,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171704.0, ans=0.1 +2024-07-28 14:52:51,824 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.506e+01 6.070e+01 6.932e+01 1.254e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 14:52:53,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=171704.0, ans=0.125 +2024-07-28 14:52:54,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=171717.33333333334, ans=0.5 +2024-07-28 14:53:20,068 INFO [train.py:1114] (0/4) Epoch 13, batch 6150, loss[loss=0.2169, simple_loss=0.3089, pruned_loss=0.06243, over 3422.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2766, pruned_loss=0.05011, over 936409.25 frames. ], batch size: 35, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:53:20,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=171757.33333333334, ans=0.125 +2024-07-28 14:53:34,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=171784.0, ans=0.125 +2024-07-28 14:53:34,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=171784.0, ans=0.0 +2024-07-28 14:53:53,562 INFO [train.py:1114] (0/4) Epoch 13, batch 6200, loss[loss=0.1617, simple_loss=0.2577, pruned_loss=0.03286, over 4739.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2765, pruned_loss=0.05023, over 936420.42 frames. ], batch size: 14, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:54:07,617 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.605e+01 5.672e+01 6.206e+01 7.275e+01 9.803e+01, threshold=1.241e+02, percent-clipped=1.0 +2024-07-28 14:54:25,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=171877.33333333334, ans=0.2 +2024-07-28 14:54:29,210 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.80 vs. limit=15.0 +2024-07-28 14:54:35,959 INFO [train.py:1114] (0/4) Epoch 13, batch 6250, loss[loss=0.1721, simple_loss=0.2644, pruned_loss=0.03993, over 4810.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2763, pruned_loss=0.05025, over 933124.33 frames. ], batch size: 14, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:54:36,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=171890.66666666666, ans=0.125 +2024-07-28 14:54:46,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=171904.0, ans=0.125 +2024-07-28 14:54:49,445 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.90 vs. limit=6.0 +2024-07-28 14:54:52,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171917.33333333334, ans=0.1 +2024-07-28 14:54:52,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=171917.33333333334, ans=0.125 +2024-07-28 14:54:59,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=171930.66666666666, ans=0.125 +2024-07-28 14:55:05,084 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.61 vs. limit=15.0 +2024-07-28 14:55:06,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=171944.0, ans=0.125 +2024-07-28 14:55:10,132 INFO [train.py:1114] (0/4) Epoch 13, batch 6300, loss[loss=0.1618, simple_loss=0.2477, pruned_loss=0.03794, over 4558.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2762, pruned_loss=0.05036, over 929109.11 frames. ], batch size: 10, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:55:10,657 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-28 14:55:11,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=171957.33333333334, ans=0.0 +2024-07-28 14:55:16,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=171957.33333333334, ans=0.125 +2024-07-28 14:55:22,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=171970.66666666666, ans=0.1 +2024-07-28 14:55:26,618 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.561e+01 5.859e+01 6.673e+01 7.738e+01 1.141e+02, threshold=1.335e+02, percent-clipped=0.0 +2024-07-28 14:55:27,741 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.87 vs. limit=12.0 +2024-07-28 14:55:29,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=171984.0, ans=0.1 +2024-07-28 14:55:33,462 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=15.0 +2024-07-28 14:55:40,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=171997.33333333334, ans=0.0 +2024-07-28 14:55:44,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=172010.66666666666, ans=0.1 +2024-07-28 14:55:46,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172010.66666666666, ans=0.1 +2024-07-28 14:55:49,027 INFO [train.py:1114] (0/4) Epoch 13, batch 6350, loss[loss=0.184, simple_loss=0.2781, pruned_loss=0.045, over 4605.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2753, pruned_loss=0.04933, over 933383.36 frames. ], batch size: 21, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:55:49,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=172024.0, ans=0.05 +2024-07-28 14:56:01,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=172037.33333333334, ans=0.0 +2024-07-28 14:56:06,935 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-28 14:56:07,627 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.13 vs. limit=6.0 +2024-07-28 14:56:12,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=172064.0, ans=0.0 +2024-07-28 14:56:14,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=172064.0, ans=0.125 +2024-07-28 14:56:20,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=172077.33333333334, ans=0.125 +2024-07-28 14:56:22,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172077.33333333334, ans=0.1 +2024-07-28 14:56:26,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=172090.66666666666, ans=0.125 +2024-07-28 14:56:26,803 INFO [train.py:1114] (0/4) Epoch 13, batch 6400, loss[loss=0.2006, simple_loss=0.2959, pruned_loss=0.05269, over 4632.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2764, pruned_loss=0.04972, over 934720.32 frames. ], batch size: 13, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:56:37,088 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.564e+01 5.588e+01 6.261e+01 7.317e+01 1.038e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 14:56:45,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=172117.33333333334, ans=0.2 +2024-07-28 14:56:48,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=172130.66666666666, ans=0.125 +2024-07-28 14:57:00,241 INFO [train.py:1114] (0/4) Epoch 13, batch 6450, loss[loss=0.1962, simple_loss=0.287, pruned_loss=0.0527, over 4437.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2769, pruned_loss=0.05012, over 938316.53 frames. ], batch size: 21, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:57:01,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=172157.33333333334, ans=0.07 +2024-07-28 14:57:04,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=172157.33333333334, ans=0.07 +2024-07-28 14:57:06,080 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:57:14,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=172184.0, ans=0.125 +2024-07-28 14:57:30,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=172197.33333333334, ans=0.0 +2024-07-28 14:57:39,252 INFO [train.py:1114] (0/4) Epoch 13, batch 6500, loss[loss=0.2584, simple_loss=0.3192, pruned_loss=0.0988, over 3475.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2762, pruned_loss=0.04963, over 939665.53 frames. ], batch size: 36, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:57:43,128 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.11 vs. limit=15.0 +2024-07-28 14:57:49,793 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.639e+01 5.677e+01 6.560e+01 8.086e+01 1.120e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-28 14:57:50,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=172237.33333333334, ans=0.2 +2024-07-28 14:58:02,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=172264.0, ans=0.1 +2024-07-28 14:58:04,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=172264.0, ans=0.125 +2024-07-28 14:58:14,171 INFO [train.py:1114] (0/4) Epoch 13, batch 6550, loss[loss=0.1567, simple_loss=0.2357, pruned_loss=0.03889, over 4801.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2748, pruned_loss=0.04907, over 942625.20 frames. ], batch size: 11, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:58:15,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.41 vs. limit=15.0 +2024-07-28 14:58:16,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=172290.66666666666, ans=0.125 +2024-07-28 14:58:22,561 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.86 vs. limit=15.0 +2024-07-28 14:58:40,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172344.0, ans=0.1 +2024-07-28 14:58:47,984 INFO [train.py:1114] (0/4) Epoch 13, batch 6600, loss[loss=0.2062, simple_loss=0.2909, pruned_loss=0.06079, over 4930.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2752, pruned_loss=0.0495, over 944818.23 frames. ], batch size: 14, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:58:54,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=172370.66666666666, ans=0.125 +2024-07-28 14:58:58,755 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.632e+01 5.699e+01 6.105e+01 6.926e+01 1.138e+02, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 14:59:04,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=172384.0, ans=0.05 +2024-07-28 14:59:06,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172384.0, ans=0.1 +2024-07-28 14:59:09,555 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.92 vs. limit=22.5 +2024-07-28 14:59:10,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=172397.33333333334, ans=10.0 +2024-07-28 14:59:22,867 INFO [train.py:1114] (0/4) Epoch 13, batch 6650, loss[loss=0.1937, simple_loss=0.2762, pruned_loss=0.05558, over 4605.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2752, pruned_loss=0.04931, over 943790.25 frames. ], batch size: 17, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:59:33,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=172437.33333333334, ans=0.025 +2024-07-28 14:59:35,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=172450.66666666666, ans=0.07 +2024-07-28 14:59:42,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=172450.66666666666, ans=0.025 +2024-07-28 14:59:42,292 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.56 vs. limit=15.0 +2024-07-28 14:59:56,820 INFO [train.py:1114] (0/4) Epoch 13, batch 6700, loss[loss=0.2283, simple_loss=0.3178, pruned_loss=0.06939, over 4690.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2758, pruned_loss=0.04959, over 942084.23 frames. ], batch size: 19, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 15:00:06,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=172504.0, ans=0.2 +2024-07-28 15:00:07,466 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.867e+01 5.630e+01 6.292e+01 7.000e+01 1.303e+02, threshold=1.258e+02, percent-clipped=1.0 +2024-07-28 15:00:25,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=172544.0, ans=0.0 +2024-07-28 15:00:32,526 INFO [train.py:1114] (0/4) Epoch 13, batch 6750, loss[loss=0.2053, simple_loss=0.2936, pruned_loss=0.05856, over 4161.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2758, pruned_loss=0.04937, over 939993.91 frames. ], batch size: 25, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 15:00:33,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=172557.33333333334, ans=0.95 +2024-07-28 15:00:36,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=172557.33333333334, ans=0.0 +2024-07-28 15:00:40,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=172570.66666666666, ans=0.125 +2024-07-28 15:00:41,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=172570.66666666666, ans=0.0 +2024-07-28 15:00:47,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172584.0, ans=0.1 +2024-07-28 15:00:49,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=172584.0, ans=0.125 +2024-07-28 15:01:04,564 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.01 vs. limit=6.0 +2024-07-28 15:01:08,824 INFO [train.py:1114] (0/4) Epoch 13, batch 6800, loss[loss=0.1918, simple_loss=0.2762, pruned_loss=0.05374, over 4634.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2758, pruned_loss=0.04911, over 938730.70 frames. ], batch size: 13, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 15:01:19,465 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.751e+01 5.597e+01 6.324e+01 7.266e+01 1.591e+02, threshold=1.265e+02, percent-clipped=1.0 +2024-07-28 15:01:41,583 INFO [train.py:1114] (0/4) Epoch 13, batch 6850, loss[loss=0.2001, simple_loss=0.2965, pruned_loss=0.0519, over 4687.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2747, pruned_loss=0.04904, over 940309.27 frames. ], batch size: 13, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 15:01:48,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=172690.66666666666, ans=0.125 +2024-07-28 15:02:01,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=172704.0, ans=0.2 +2024-07-28 15:02:08,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=172717.33333333334, ans=0.125 +2024-07-28 15:02:09,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=172717.33333333334, ans=0.025 +2024-07-28 15:02:11,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=172717.33333333334, ans=0.125 +2024-07-28 15:02:17,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=172730.66666666666, ans=10.0 +2024-07-28 15:02:27,238 INFO [train.py:1114] (0/4) Epoch 13, batch 6900, loss[loss=0.1846, simple_loss=0.2651, pruned_loss=0.05202, over 4962.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2749, pruned_loss=0.04908, over 942549.55 frames. ], batch size: 13, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:02:35,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=172770.66666666666, ans=0.125 +2024-07-28 15:02:36,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=172770.66666666666, ans=0.125 +2024-07-28 15:02:38,173 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.866e+01 5.650e+01 5.997e+01 6.576e+01 8.900e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 15:02:44,375 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.09 vs. limit=22.5 +2024-07-28 15:03:01,354 INFO [train.py:1114] (0/4) Epoch 13, batch 6950, loss[loss=0.1545, simple_loss=0.2418, pruned_loss=0.03359, over 4540.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2746, pruned_loss=0.04902, over 939801.76 frames. ], batch size: 10, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:03:13,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=172837.33333333334, ans=0.125 +2024-07-28 15:03:15,298 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.04 vs. limit=15.0 +2024-07-28 15:03:15,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=172837.33333333334, ans=0.0 +2024-07-28 15:03:17,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172850.66666666666, ans=0.1 +2024-07-28 15:03:21,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=172850.66666666666, ans=0.125 +2024-07-28 15:03:21,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=172850.66666666666, ans=0.125 +2024-07-28 15:03:28,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=172864.0, ans=0.125 +2024-07-28 15:03:28,847 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.74 vs. limit=6.0 +2024-07-28 15:03:32,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=172877.33333333334, ans=0.125 +2024-07-28 15:03:33,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=172877.33333333334, ans=0.1 +2024-07-28 15:03:38,283 INFO [train.py:1114] (0/4) Epoch 13, batch 7000, loss[loss=0.1786, simple_loss=0.2757, pruned_loss=0.04073, over 4642.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2743, pruned_loss=0.04857, over 937668.78 frames. ], batch size: 17, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:03:48,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=172904.0, ans=0.0 +2024-07-28 15:03:48,578 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.631e+01 6.423e+01 7.992e+01 1.097e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 15:03:49,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=172904.0, ans=0.125 +2024-07-28 15:04:02,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=172930.66666666666, ans=0.0 +2024-07-28 15:04:09,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172944.0, ans=0.1 +2024-07-28 15:04:10,953 INFO [train.py:1114] (0/4) Epoch 13, batch 7050, loss[loss=0.2124, simple_loss=0.2918, pruned_loss=0.06652, over 4699.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2744, pruned_loss=0.04854, over 941249.61 frames. ], batch size: 19, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:04:16,461 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.29 vs. limit=15.0 +2024-07-28 15:04:17,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=172970.66666666666, ans=0.0 +2024-07-28 15:04:23,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172970.66666666666, ans=0.1 +2024-07-28 15:04:28,182 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.83 vs. limit=6.0 +2024-07-28 15:04:34,776 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.27 vs. limit=15.0 +2024-07-28 15:04:37,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=173010.66666666666, ans=0.1 +2024-07-28 15:04:42,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=173010.66666666666, ans=0.125 +2024-07-28 15:04:44,107 INFO [train.py:1114] (0/4) Epoch 13, batch 7100, loss[loss=0.2034, simple_loss=0.3034, pruned_loss=0.05166, over 4806.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2766, pruned_loss=0.04962, over 935611.35 frames. ], batch size: 15, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:04:49,464 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.11 vs. limit=15.0 +2024-07-28 15:04:54,286 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+01 5.692e+01 6.139e+01 7.289e+01 1.294e+02, threshold=1.228e+02, percent-clipped=1.0 +2024-07-28 15:05:01,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=173050.66666666666, ans=0.125 +2024-07-28 15:05:05,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=173064.0, ans=0.125 +2024-07-28 15:05:08,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173064.0, ans=0.1 +2024-07-28 15:05:16,983 INFO [train.py:1114] (0/4) Epoch 13, batch 7150, loss[loss=0.2012, simple_loss=0.3035, pruned_loss=0.04941, over 4604.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2752, pruned_loss=0.04893, over 936595.61 frames. ], batch size: 21, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:05:17,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=173090.66666666666, ans=0.2 +2024-07-28 15:05:37,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=173130.66666666666, ans=0.0 +2024-07-28 15:05:50,029 INFO [train.py:1114] (0/4) Epoch 13, batch 7200, loss[loss=0.1895, simple_loss=0.2782, pruned_loss=0.05045, over 4798.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2764, pruned_loss=0.04957, over 937414.40 frames. ], batch size: 15, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:06:00,400 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.641e+01 6.340e+01 7.110e+01 1.006e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 15:06:03,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=173184.0, ans=0.1 +2024-07-28 15:06:09,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=173197.33333333334, ans=0.125 +2024-07-28 15:06:09,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=173197.33333333334, ans=0.2 +2024-07-28 15:06:17,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=173210.66666666666, ans=0.125 +2024-07-28 15:06:20,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=173210.66666666666, ans=0.125 +2024-07-28 15:06:22,770 INFO [train.py:1114] (0/4) Epoch 13, batch 7250, loss[loss=0.2092, simple_loss=0.2809, pruned_loss=0.0688, over 4850.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2754, pruned_loss=0.04931, over 938817.15 frames. ], batch size: 12, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:06:23,262 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.42 vs. limit=15.0 +2024-07-28 15:06:24,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=173224.0, ans=0.025 +2024-07-28 15:06:31,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=173237.33333333334, ans=0.5 +2024-07-28 15:06:35,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=173250.66666666666, ans=0.0 +2024-07-28 15:06:39,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=173250.66666666666, ans=0.0 +2024-07-28 15:06:47,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=173264.0, ans=0.2 +2024-07-28 15:06:55,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=173290.66666666666, ans=0.1 +2024-07-28 15:06:55,551 INFO [train.py:1114] (0/4) Epoch 13, batch 7300, loss[loss=0.1427, simple_loss=0.2308, pruned_loss=0.02732, over 4849.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2747, pruned_loss=0.04892, over 938954.32 frames. ], batch size: 12, lr: 5.72e-03, grad_scale: 64.0 +2024-07-28 15:06:56,481 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.60 vs. limit=10.0 +2024-07-28 15:06:58,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=173290.66666666666, ans=0.125 +2024-07-28 15:07:01,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=173304.0, ans=0.0 +2024-07-28 15:07:03,841 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.55 vs. limit=15.0 +2024-07-28 15:07:05,992 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.989e+01 5.468e+01 5.985e+01 6.770e+01 9.344e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 15:07:07,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=173304.0, ans=0.2 +2024-07-28 15:07:13,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=173317.33333333334, ans=0.125 +2024-07-28 15:07:18,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=173330.66666666666, ans=0.05 +2024-07-28 15:07:22,182 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.23 vs. limit=22.5 +2024-07-28 15:07:28,289 INFO [train.py:1114] (0/4) Epoch 13, batch 7350, loss[loss=0.1594, simple_loss=0.2507, pruned_loss=0.03411, over 4635.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.274, pruned_loss=0.04857, over 938309.60 frames. ], batch size: 12, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:07:31,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=173357.33333333334, ans=0.125 +2024-07-28 15:07:33,909 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.49 vs. limit=15.0 +2024-07-28 15:07:39,821 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.15 vs. limit=15.0 +2024-07-28 15:07:40,481 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.22 vs. limit=10.0 +2024-07-28 15:07:42,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173384.0, ans=0.1 +2024-07-28 15:07:44,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=173384.0, ans=0.05 +2024-07-28 15:07:44,250 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.13 vs. limit=6.0 +2024-07-28 15:07:52,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=173397.33333333334, ans=0.2 +2024-07-28 15:07:59,984 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.83 vs. limit=22.5 +2024-07-28 15:08:01,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=173410.66666666666, ans=0.2 +2024-07-28 15:08:02,238 INFO [train.py:1114] (0/4) Epoch 13, batch 7400, loss[loss=0.1822, simple_loss=0.271, pruned_loss=0.04668, over 4690.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2739, pruned_loss=0.04875, over 939608.77 frames. ], batch size: 13, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:08:12,824 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.374e+01 5.640e+01 6.317e+01 7.601e+01 1.154e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 15:08:13,396 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.02 vs. limit=6.0 +2024-07-28 15:08:33,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=173477.33333333334, ans=0.2 +2024-07-28 15:08:36,730 INFO [train.py:1114] (0/4) Epoch 13, batch 7450, loss[loss=0.1819, simple_loss=0.2586, pruned_loss=0.05255, over 4618.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2739, pruned_loss=0.04883, over 937402.49 frames. ], batch size: 11, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:08:42,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=173504.0, ans=0.125 +2024-07-28 15:08:49,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=173517.33333333334, ans=0.07 +2024-07-28 15:08:56,717 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.94 vs. limit=15.0 +2024-07-28 15:09:00,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=173530.66666666666, ans=0.125 +2024-07-28 15:09:03,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=173544.0, ans=15.0 +2024-07-28 15:09:05,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=173544.0, ans=0.125 +2024-07-28 15:09:06,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=173544.0, ans=0.2 +2024-07-28 15:09:06,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=173544.0, ans=0.0 +2024-07-28 15:09:08,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173544.0, ans=0.1 +2024-07-28 15:09:09,563 INFO [train.py:1114] (0/4) Epoch 13, batch 7500, loss[loss=0.2533, simple_loss=0.3418, pruned_loss=0.08237, over 3413.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2746, pruned_loss=0.04916, over 936044.63 frames. ], batch size: 36, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:09:20,250 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.712e+01 6.192e+01 7.126e+01 1.284e+02, threshold=1.238e+02, percent-clipped=1.0 +2024-07-28 15:09:22,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=173584.0, ans=0.0 +2024-07-28 15:09:31,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=173584.0, ans=0.0 +2024-07-28 15:09:32,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=173584.0, ans=0.125 +2024-07-28 15:09:39,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=173597.33333333334, ans=0.0 +2024-07-28 15:09:47,457 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=6.29 vs. limit=12.0 +2024-07-28 15:09:50,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=173610.66666666666, ans=0.125 +2024-07-28 15:09:51,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=173610.66666666666, ans=0.0 +2024-07-28 15:09:56,878 INFO [train.py:1114] (0/4) Epoch 13, batch 7550, loss[loss=0.2037, simple_loss=0.2857, pruned_loss=0.06087, over 4637.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2759, pruned_loss=0.04975, over 935662.66 frames. ], batch size: 17, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:10:21,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=173637.33333333334, ans=0.125 +2024-07-28 15:10:22,322 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.33 vs. limit=15.0 +2024-07-28 15:10:28,837 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.68 vs. limit=10.0 +2024-07-28 15:10:29,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=173650.66666666666, ans=0.0 +2024-07-28 15:10:57,608 INFO [train.py:1114] (0/4) Epoch 13, batch 7600, loss[loss=0.1757, simple_loss=0.2675, pruned_loss=0.04196, over 4799.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2749, pruned_loss=0.04912, over 937784.86 frames. ], batch size: 14, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:10:59,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173690.66666666666, ans=0.1 +2024-07-28 15:11:04,797 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=173704.0, ans=0.0 +2024-07-28 15:11:08,017 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.498e+01 5.988e+01 6.691e+01 9.239e+01, threshold=1.198e+02, percent-clipped=0.0 +2024-07-28 15:11:14,875 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.48 vs. limit=22.5 +2024-07-28 15:11:19,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=173717.33333333334, ans=0.0 +2024-07-28 15:11:57,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=173730.66666666666, ans=0.125 +2024-07-28 15:12:01,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=173744.0, ans=0.125 +2024-07-28 15:12:03,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=173744.0, ans=0.125 +2024-07-28 15:12:06,084 INFO [train.py:1114] (0/4) Epoch 13, batch 7650, loss[loss=0.1726, simple_loss=0.26, pruned_loss=0.04261, over 4937.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.275, pruned_loss=0.04911, over 937564.03 frames. ], batch size: 12, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:12:28,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=173797.33333333334, ans=0.025 +2024-07-28 15:12:41,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=173824.0, ans=0.125 +2024-07-28 15:12:41,865 INFO [train.py:1114] (0/4) Epoch 13, batch 7700, loss[loss=0.1864, simple_loss=0.2751, pruned_loss=0.04889, over 4701.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2755, pruned_loss=0.04922, over 934770.44 frames. ], batch size: 13, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:12:43,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=173824.0, ans=0.0 +2024-07-28 15:12:43,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=173824.0, ans=0.0 +2024-07-28 15:12:52,766 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.633e+01 5.534e+01 6.118e+01 6.663e+01 8.734e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 15:13:06,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=173864.0, ans=0.1 +2024-07-28 15:13:14,218 INFO [train.py:1114] (0/4) Epoch 13, batch 7750, loss[loss=0.189, simple_loss=0.2824, pruned_loss=0.0478, over 4934.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2757, pruned_loss=0.04921, over 935869.21 frames. ], batch size: 14, lr: 5.71e-03, grad_scale: 32.0 +2024-07-28 15:13:15,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=173890.66666666666, ans=0.125 +2024-07-28 15:13:22,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=173904.0, ans=0.0 +2024-07-28 15:13:23,678 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.91 vs. limit=15.0 +2024-07-28 15:13:32,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=173917.33333333334, ans=0.0 +2024-07-28 15:13:46,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=173944.0, ans=0.125 +2024-07-28 15:13:49,767 INFO [train.py:1114] (0/4) Epoch 13, batch 7800, loss[loss=0.1608, simple_loss=0.2506, pruned_loss=0.03548, over 4665.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2754, pruned_loss=0.04893, over 937699.25 frames. ], batch size: 14, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:13:53,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=173957.33333333334, ans=0.125 +2024-07-28 15:14:01,090 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.796e+01 5.555e+01 6.069e+01 6.471e+01 9.594e+01, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 15:14:12,132 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.77 vs. limit=15.0 +2024-07-28 15:14:35,068 INFO [train.py:1114] (0/4) Epoch 13, batch 7850, loss[loss=0.1405, simple_loss=0.2274, pruned_loss=0.02687, over 4567.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2759, pruned_loss=0.04931, over 936350.21 frames. ], batch size: 10, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:14:38,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=174024.0, ans=0.0 +2024-07-28 15:14:42,122 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.27 vs. limit=22.5 +2024-07-28 15:14:45,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=174037.33333333334, ans=0.125 +2024-07-28 15:14:47,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174037.33333333334, ans=0.1 +2024-07-28 15:14:59,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174064.0, ans=0.1 +2024-07-28 15:15:00,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=174064.0, ans=0.0 +2024-07-28 15:15:06,595 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.62 vs. limit=15.0 +2024-07-28 15:15:10,000 INFO [train.py:1114] (0/4) Epoch 13, batch 7900, loss[loss=0.1975, simple_loss=0.2936, pruned_loss=0.05066, over 4871.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.278, pruned_loss=0.04968, over 933365.53 frames. ], batch size: 14, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:15:15,460 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.99 vs. limit=15.0 +2024-07-28 15:15:20,616 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.723e+01 5.632e+01 6.110e+01 7.084e+01 9.814e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 15:15:21,711 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.11 vs. limit=12.0 +2024-07-28 15:15:31,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=174130.66666666666, ans=0.0 +2024-07-28 15:15:36,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=174144.0, ans=0.0 +2024-07-28 15:15:38,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=174144.0, ans=0.09899494936611666 +2024-07-28 15:15:41,921 INFO [train.py:1114] (0/4) Epoch 13, batch 7950, loss[loss=0.2128, simple_loss=0.2887, pruned_loss=0.06845, over 3639.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2768, pruned_loss=0.04868, over 935773.40 frames. ], batch size: 35, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:15:47,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=174170.66666666666, ans=0.125 +2024-07-28 15:15:51,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=174170.66666666666, ans=0.1 +2024-07-28 15:15:53,428 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.17 vs. limit=10.0 +2024-07-28 15:15:55,731 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:16:14,622 INFO [train.py:1114] (0/4) Epoch 13, batch 8000, loss[loss=0.1862, simple_loss=0.263, pruned_loss=0.05475, over 4613.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2754, pruned_loss=0.04839, over 935214.79 frames. ], batch size: 11, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:16:22,184 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=26.28 vs. limit=22.5 +2024-07-28 15:16:22,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=174237.33333333334, ans=0.125 +2024-07-28 15:16:25,590 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.549e+01 5.751e+01 6.184e+01 6.866e+01 1.059e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 15:16:28,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=174250.66666666666, ans=0.025 +2024-07-28 15:16:31,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=174250.66666666666, ans=0.025 +2024-07-28 15:16:36,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=174264.0, ans=0.09899494936611666 +2024-07-28 15:16:42,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=174277.33333333334, ans=0.025 +2024-07-28 15:16:47,971 INFO [train.py:1114] (0/4) Epoch 13, batch 8050, loss[loss=0.1852, simple_loss=0.2757, pruned_loss=0.04738, over 4807.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2757, pruned_loss=0.04861, over 935173.30 frames. ], batch size: 14, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:16:50,193 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:16:52,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=174290.66666666666, ans=0.125 +2024-07-28 15:16:54,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=174304.0, ans=0.015 +2024-07-28 15:17:00,868 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.72 vs. limit=15.0 +2024-07-28 15:17:04,818 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.48 vs. limit=15.0 +2024-07-28 15:17:08,752 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.44 vs. limit=12.0 +2024-07-28 15:17:15,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=174330.66666666666, ans=0.05 +2024-07-28 15:17:19,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=174344.0, ans=0.125 +2024-07-28 15:17:23,899 INFO [train.py:1114] (0/4) Epoch 13, batch 8100, loss[loss=0.2398, simple_loss=0.3186, pruned_loss=0.08047, over 4806.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2766, pruned_loss=0.04915, over 935259.57 frames. ], batch size: 15, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:17:25,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=174357.33333333334, ans=0.0 +2024-07-28 15:17:26,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=174357.33333333334, ans=0.0 +2024-07-28 15:17:27,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=174357.33333333334, ans=0.0 +2024-07-28 15:17:28,840 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.61 vs. limit=15.0 +2024-07-28 15:17:30,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=174370.66666666666, ans=0.0 +2024-07-28 15:17:34,671 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.608e+01 5.712e+01 6.251e+01 7.311e+01 9.756e+01, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 15:17:38,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=174384.0, ans=0.2 +2024-07-28 15:17:45,133 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.75 vs. limit=15.0 +2024-07-28 15:18:14,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=174410.66666666666, ans=0.1 +2024-07-28 15:18:25,034 INFO [train.py:1114] (0/4) Epoch 13, batch 8150, loss[loss=0.1605, simple_loss=0.2637, pruned_loss=0.02864, over 4800.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2751, pruned_loss=0.04863, over 938439.81 frames. ], batch size: 15, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:18:29,012 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.03 vs. limit=22.5 +2024-07-28 15:18:29,129 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.36 vs. limit=22.5 +2024-07-28 15:18:40,999 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.68 vs. limit=10.0 +2024-07-28 15:18:42,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=174437.33333333334, ans=0.04949747468305833 +2024-07-28 15:21:31,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174477.33333333334, ans=0.1 +2024-07-28 15:21:31,668 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:21:34,238 INFO [train.py:1114] (0/4) Epoch 13, batch 8200, loss[loss=0.1667, simple_loss=0.2617, pruned_loss=0.03583, over 4803.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2756, pruned_loss=0.04846, over 939540.15 frames. ], batch size: 15, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:21:57,446 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.419e+01 5.624e+01 6.115e+01 7.227e+01 1.322e+02, threshold=1.223e+02, percent-clipped=1.0 +2024-07-28 15:21:59,075 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.19 vs. limit=15.0 +2024-07-28 15:22:01,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=174517.33333333334, ans=0.0 +2024-07-28 15:22:07,364 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.10 vs. limit=15.0 +2024-07-28 15:22:23,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=174530.66666666666, ans=0.125 +2024-07-28 15:23:17,784 INFO [train.py:1114] (0/4) Epoch 13, batch 8250, loss[loss=0.1839, simple_loss=0.2715, pruned_loss=0.04811, over 4894.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2751, pruned_loss=0.04815, over 939479.15 frames. ], batch size: 13, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:23:29,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=174557.33333333334, ans=0.0 +2024-07-28 15:23:30,111 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.82 vs. limit=12.0 +2024-07-28 15:23:30,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=174557.33333333334, ans=0.125 +2024-07-28 15:23:34,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=174570.66666666666, ans=0.5 +2024-07-28 15:23:42,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=174584.0, ans=0.125 +2024-07-28 15:23:45,870 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.03 vs. limit=15.0 +2024-07-28 15:23:47,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=174597.33333333334, ans=0.125 +2024-07-28 15:23:52,471 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:23:57,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=174610.66666666666, ans=0.2 +2024-07-28 15:24:00,770 INFO [train.py:1114] (0/4) Epoch 13, batch 8300, loss[loss=0.191, simple_loss=0.2817, pruned_loss=0.05013, over 4893.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2765, pruned_loss=0.04886, over 939240.72 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:24:09,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=174637.33333333334, ans=0.125 +2024-07-28 15:24:11,821 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.568e+01 5.640e+01 5.984e+01 6.893e+01 9.803e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 15:24:14,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=174650.66666666666, ans=0.0 +2024-07-28 15:24:21,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=174664.0, ans=0.1 +2024-07-28 15:24:35,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=174677.33333333334, ans=0.2 +2024-07-28 15:24:38,267 INFO [train.py:1114] (0/4) Epoch 13, batch 8350, loss[loss=0.1917, simple_loss=0.2762, pruned_loss=0.05361, over 4802.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2767, pruned_loss=0.04903, over 941850.31 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:24:38,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=174690.66666666666, ans=0.125 +2024-07-28 15:24:41,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=174690.66666666666, ans=0.0 +2024-07-28 15:24:48,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=174704.0, ans=0.0 +2024-07-28 15:24:51,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=174717.33333333334, ans=0.1 +2024-07-28 15:25:00,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=174730.66666666666, ans=0.125 +2024-07-28 15:25:06,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=174744.0, ans=0.0 +2024-07-28 15:25:12,898 INFO [train.py:1114] (0/4) Epoch 13, batch 8400, loss[loss=0.173, simple_loss=0.252, pruned_loss=0.04695, over 4788.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2767, pruned_loss=0.04945, over 940527.35 frames. ], batch size: 12, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:25:23,771 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.906e+01 5.833e+01 6.092e+01 7.413e+01 1.221e+02, threshold=1.218e+02, percent-clipped=1.0 +2024-07-28 15:25:29,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=174784.0, ans=0.125 +2024-07-28 15:25:33,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=174784.0, ans=0.0 +2024-07-28 15:25:37,256 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.36 vs. limit=15.0 +2024-07-28 15:25:45,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=174810.66666666666, ans=0.125 +2024-07-28 15:25:51,145 INFO [train.py:1114] (0/4) Epoch 13, batch 8450, loss[loss=0.1733, simple_loss=0.2684, pruned_loss=0.03915, over 4799.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2773, pruned_loss=0.04949, over 938997.01 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:26:03,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=174850.66666666666, ans=0.0 +2024-07-28 15:26:25,543 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.92 vs. limit=10.0 +2024-07-28 15:26:29,351 INFO [train.py:1114] (0/4) Epoch 13, batch 8500, loss[loss=0.1582, simple_loss=0.2452, pruned_loss=0.03557, over 4611.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2758, pruned_loss=0.04893, over 938798.87 frames. ], batch size: 11, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:29:35,088 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.221e+01 5.689e+01 6.230e+01 7.373e+01 1.057e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 15:29:35,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=174904.0, ans=0.0 +2024-07-28 15:29:38,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=174917.33333333334, ans=0.2 +2024-07-28 15:29:51,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=174930.66666666666, ans=0.1 +2024-07-28 15:30:03,277 INFO [train.py:1114] (0/4) Epoch 13, batch 8550, loss[loss=0.1843, simple_loss=0.2628, pruned_loss=0.0529, over 4793.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2755, pruned_loss=0.04886, over 939625.07 frames. ], batch size: 11, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:30:26,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=174997.33333333334, ans=0.07 +2024-07-28 15:30:41,173 INFO [train.py:1114] (0/4) Epoch 13, batch 8600, loss[loss=0.1798, simple_loss=0.2659, pruned_loss=0.04682, over 4811.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2745, pruned_loss=0.04835, over 938963.14 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:30:51,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=175037.33333333334, ans=0.1 +2024-07-28 15:30:51,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=175037.33333333334, ans=0.0 +2024-07-28 15:30:52,889 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.32 vs. limit=15.0 +2024-07-28 15:30:54,282 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.590e+01 5.714e+01 6.617e+01 7.604e+01 1.022e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-28 15:30:58,115 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.78 vs. limit=15.0 +2024-07-28 15:31:08,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=175077.33333333334, ans=0.2 +2024-07-28 15:31:14,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175077.33333333334, ans=0.1 +2024-07-28 15:31:16,120 INFO [train.py:1114] (0/4) Epoch 13, batch 8650, loss[loss=0.1926, simple_loss=0.2857, pruned_loss=0.04975, over 4889.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2741, pruned_loss=0.04802, over 940380.45 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:31:17,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=175090.66666666666, ans=0.125 +2024-07-28 15:36:13,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=175104.0, ans=0.125 +2024-07-28 15:36:24,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=175117.33333333334, ans=0.125 +2024-07-28 15:36:28,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=175117.33333333334, ans=0.2 +2024-07-28 15:36:39,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=175144.0, ans=10.0 +2024-07-28 15:36:43,600 INFO [train.py:1114] (0/4) Epoch 13, batch 8700, loss[loss=0.2019, simple_loss=0.2911, pruned_loss=0.05631, over 4749.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2755, pruned_loss=0.04882, over 938123.96 frames. ], batch size: 13, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:36:52,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=175170.66666666666, ans=0.0 +2024-07-28 15:36:59,762 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.706e+01 5.561e+01 6.137e+01 6.917e+01 9.151e+01, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 15:37:34,720 INFO [train.py:1114] (0/4) Epoch 13, batch 8750, loss[loss=0.2156, simple_loss=0.2988, pruned_loss=0.0662, over 4683.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2746, pruned_loss=0.04877, over 936577.02 frames. ], batch size: 15, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:37:39,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175224.0, ans=0.1 +2024-07-28 15:37:46,825 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.59 vs. limit=10.0 +2024-07-28 15:37:52,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.94 vs. limit=22.5 +2024-07-28 15:38:00,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=175264.0, ans=0.125 +2024-07-28 15:38:03,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=175277.33333333334, ans=0.0 +2024-07-28 15:38:04,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=175277.33333333334, ans=0.07 +2024-07-28 15:38:09,910 INFO [train.py:1114] (0/4) Epoch 13, batch 8800, loss[loss=0.1747, simple_loss=0.2694, pruned_loss=0.04001, over 4933.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2751, pruned_loss=0.04853, over 937353.52 frames. ], batch size: 14, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:38:12,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=175290.66666666666, ans=0.125 +2024-07-28 15:38:15,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=175290.66666666666, ans=0.125 +2024-07-28 15:38:15,874 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.02 vs. limit=22.5 +2024-07-28 15:38:21,215 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.482e+01 5.841e+01 6.340e+01 7.291e+01 9.820e+01, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 15:38:32,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.75 vs. limit=15.0 +2024-07-28 15:38:41,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=175344.0, ans=0.125 +2024-07-28 15:38:43,106 INFO [train.py:1114] (0/4) Epoch 13, batch 8850, loss[loss=0.1782, simple_loss=0.2682, pruned_loss=0.04412, over 4539.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2744, pruned_loss=0.04868, over 931842.82 frames. ], batch size: 21, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:38:49,260 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.35 vs. limit=15.0 +2024-07-28 15:39:04,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=175397.33333333334, ans=0.125 +2024-07-28 15:39:07,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=175397.33333333334, ans=0.125 +2024-07-28 15:39:12,105 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=15.0 +2024-07-28 15:39:15,617 INFO [train.py:1114] (0/4) Epoch 13, batch 8900, loss[loss=0.1598, simple_loss=0.2478, pruned_loss=0.03588, over 4936.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2761, pruned_loss=0.04943, over 929433.22 frames. ], batch size: 12, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:39:17,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=175424.0, ans=0.125 +2024-07-28 15:39:32,293 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.207e+01 5.752e+01 6.427e+01 7.462e+01 1.101e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 15:39:36,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175450.66666666666, ans=0.1 +2024-07-28 15:39:37,121 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.78 vs. limit=15.0 +2024-07-28 15:39:41,952 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.70 vs. limit=15.0 +2024-07-28 15:39:44,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1.whitening_limit, batch_count=175464.0, ans=10.0 +2024-07-28 15:39:53,988 INFO [train.py:1114] (0/4) Epoch 13, batch 8950, loss[loss=0.2184, simple_loss=0.2962, pruned_loss=0.07035, over 4490.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2763, pruned_loss=0.04933, over 930077.98 frames. ], batch size: 21, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:39:57,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=175490.66666666666, ans=0.125 +2024-07-28 15:40:08,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=175517.33333333334, ans=0.0 +2024-07-28 15:40:20,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=175530.66666666666, ans=0.125 +2024-07-28 15:40:23,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=175530.66666666666, ans=0.2 +2024-07-28 15:40:24,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=175530.66666666666, ans=0.125 +2024-07-28 15:40:32,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=175544.0, ans=0.0 +2024-07-28 15:40:32,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=175544.0, ans=0.125 +2024-07-28 15:40:38,073 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.40 vs. limit=15.0 +2024-07-28 15:40:38,281 INFO [train.py:1114] (0/4) Epoch 13, batch 9000, loss[loss=0.1689, simple_loss=0.2591, pruned_loss=0.03932, over 4641.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2758, pruned_loss=0.04951, over 933250.05 frames. ], batch size: 12, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:40:38,282 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 15:42:46,194 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.9987, 3.0317, 3.7355, 3.7774], device='cuda:0') +2024-07-28 15:42:48,985 INFO [train.py:1146] (0/4) Epoch 13, validation: loss=0.1657, simple_loss=0.2696, pruned_loss=0.03096, over 944034.00 frames. +2024-07-28 15:44:57,367 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 15:44:59,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=175557.33333333334, ans=0.0 +2024-07-28 15:45:12,490 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.84 vs. limit=22.5 +2024-07-28 15:45:14,903 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.54 vs. limit=22.5 +2024-07-28 15:45:15,800 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.541e+01 5.562e+01 6.322e+01 7.112e+01 1.143e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 15:45:19,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=175584.0, ans=0.125 +2024-07-28 15:45:20,888 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.99 vs. limit=6.0 +2024-07-28 15:45:27,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175597.33333333334, ans=0.1 +2024-07-28 15:45:31,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=175597.33333333334, ans=0.125 +2024-07-28 15:45:39,136 INFO [train.py:1114] (0/4) Epoch 13, batch 9050, loss[loss=0.1689, simple_loss=0.2501, pruned_loss=0.04385, over 4508.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2752, pruned_loss=0.04945, over 934179.56 frames. ], batch size: 10, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:45:47,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=175637.33333333334, ans=0.125 +2024-07-28 15:45:52,229 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:46:00,523 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.32 vs. limit=15.0 +2024-07-28 15:46:02,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=175664.0, ans=0.5 +2024-07-28 15:46:06,722 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.05 vs. limit=10.0 +2024-07-28 15:46:10,028 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.27 vs. limit=15.0 +2024-07-28 15:46:12,003 INFO [train.py:1114] (0/4) Epoch 13, batch 9100, loss[loss=0.1977, simple_loss=0.2905, pruned_loss=0.05246, over 4929.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2745, pruned_loss=0.0488, over 936742.20 frames. ], batch size: 14, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:46:13,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=175690.66666666666, ans=0.125 +2024-07-28 15:46:16,694 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=15.0 +2024-07-28 15:46:22,483 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.359e+01 5.613e+01 6.012e+01 6.953e+01 8.806e+01, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 15:46:28,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=175717.33333333334, ans=0.125 +2024-07-28 15:46:59,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175744.0, ans=0.1 +2024-07-28 15:47:19,830 INFO [train.py:1114] (0/4) Epoch 13, batch 9150, loss[loss=0.1623, simple_loss=0.2506, pruned_loss=0.03695, over 4810.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2759, pruned_loss=0.04908, over 935423.96 frames. ], batch size: 14, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:47:43,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=175770.66666666666, ans=0.125 +2024-07-28 15:47:43,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=175770.66666666666, ans=0.0 +2024-07-28 15:48:24,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=175784.0, ans=0.0 +2024-07-28 15:48:25,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=175784.0, ans=0.04949747468305833 +2024-07-28 15:48:29,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=175797.33333333334, ans=0.125 +2024-07-28 15:48:30,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=175797.33333333334, ans=0.035 +2024-07-28 15:48:31,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=175797.33333333334, ans=0.07 +2024-07-28 15:48:41,160 INFO [train.py:1114] (0/4) Epoch 13, batch 9200, loss[loss=0.2057, simple_loss=0.2915, pruned_loss=0.05993, over 4855.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.275, pruned_loss=0.04889, over 937521.80 frames. ], batch size: 12, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:48:41,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=175824.0, ans=0.125 +2024-07-28 15:48:49,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=175837.33333333334, ans=0.0 +2024-07-28 15:48:50,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=175837.33333333334, ans=0.0 +2024-07-28 15:48:51,968 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.679e+01 5.600e+01 6.167e+01 6.927e+01 1.004e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 15:48:54,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=175850.66666666666, ans=0.0 +2024-07-28 15:49:11,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175877.33333333334, ans=0.1 +2024-07-28 15:49:12,536 INFO [train.py:1114] (0/4) Epoch 13, batch 9250, loss[loss=0.1876, simple_loss=0.2774, pruned_loss=0.04885, over 4634.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2747, pruned_loss=0.04843, over 938546.94 frames. ], batch size: 13, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:51:55,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175917.33333333334, ans=0.1 +2024-07-28 15:52:37,595 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.64 vs. limit=12.0 +2024-07-28 15:52:38,250 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.22 vs. limit=12.0 +2024-07-28 15:52:46,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=175930.66666666666, ans=0.0 +2024-07-28 15:52:53,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=175944.0, ans=0.125 +2024-07-28 15:52:54,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=175944.0, ans=0.125 +2024-07-28 15:52:56,077 INFO [train.py:1114] (0/4) Epoch 13, batch 9300, loss[loss=0.1557, simple_loss=0.253, pruned_loss=0.02924, over 4784.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2743, pruned_loss=0.04858, over 938176.80 frames. ], batch size: 12, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:53:05,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=175970.66666666666, ans=0.125 +2024-07-28 15:53:06,736 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+01 5.420e+01 5.839e+01 6.596e+01 1.003e+02, threshold=1.168e+02, percent-clipped=0.0 +2024-07-28 15:53:08,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=175984.0, ans=0.125 +2024-07-28 15:53:15,787 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-132000.pt +2024-07-28 15:53:24,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=175997.33333333334, ans=0.0 +2024-07-28 15:53:25,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175997.33333333334, ans=0.1 +2024-07-28 15:53:29,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=176010.66666666666, ans=0.125 +2024-07-28 15:53:35,866 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.81 vs. limit=12.0 +2024-07-28 15:53:36,026 INFO [train.py:1114] (0/4) Epoch 13, batch 9350, loss[loss=0.1511, simple_loss=0.246, pruned_loss=0.02809, over 4801.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2751, pruned_loss=0.04887, over 935524.14 frames. ], batch size: 11, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:53:40,170 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.10 vs. limit=15.0 +2024-07-28 15:53:51,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.94 vs. limit=15.0 +2024-07-28 15:53:58,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=176064.0, ans=0.0 +2024-07-28 15:54:05,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176077.33333333334, ans=0.1 +2024-07-28 15:54:07,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=176077.33333333334, ans=0.07 +2024-07-28 15:54:08,585 INFO [train.py:1114] (0/4) Epoch 13, batch 9400, loss[loss=0.1458, simple_loss=0.2449, pruned_loss=0.02332, over 4691.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.275, pruned_loss=0.04865, over 933366.40 frames. ], batch size: 13, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:54:09,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=176090.66666666666, ans=0.0 +2024-07-28 15:54:09,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=176090.66666666666, ans=0.125 +2024-07-28 15:54:19,734 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.585e+01 5.549e+01 6.208e+01 6.780e+01 1.030e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 15:54:20,146 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.95 vs. limit=15.0 +2024-07-28 15:54:23,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=176117.33333333334, ans=0.125 +2024-07-28 15:54:25,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-28 15:54:36,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=176144.0, ans=0.125 +2024-07-28 15:54:37,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=176144.0, ans=0.1 +2024-07-28 15:54:40,627 INFO [train.py:1114] (0/4) Epoch 13, batch 9450, loss[loss=0.165, simple_loss=0.2347, pruned_loss=0.04768, over 4810.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2748, pruned_loss=0.0484, over 932668.62 frames. ], batch size: 11, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:54:55,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=176184.0, ans=0.125 +2024-07-28 15:54:57,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=176184.0, ans=0.04949747468305833 +2024-07-28 15:55:02,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176197.33333333334, ans=0.1 +2024-07-28 15:55:11,689 INFO [train.py:1114] (0/4) Epoch 13, batch 9500, loss[loss=0.2178, simple_loss=0.3147, pruned_loss=0.06048, over 4704.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.275, pruned_loss=0.04802, over 934915.30 frames. ], batch size: 12, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:55:11,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=176224.0, ans=0.1 +2024-07-28 15:55:20,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.38 vs. limit=22.5 +2024-07-28 15:55:22,350 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.570e+01 5.491e+01 5.977e+01 6.811e+01 8.816e+01, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 15:55:25,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176250.66666666666, ans=0.1 +2024-07-28 15:55:25,251 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.99 vs. limit=12.0 +2024-07-28 15:55:25,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=176250.66666666666, ans=0.0 +2024-07-28 15:55:39,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=176277.33333333334, ans=0.025 +2024-07-28 15:55:43,368 INFO [train.py:1114] (0/4) Epoch 13, batch 9550, loss[loss=0.1763, simple_loss=0.2683, pruned_loss=0.04217, over 4772.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2763, pruned_loss=0.04904, over 932230.25 frames. ], batch size: 12, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:55:57,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.whiten.whitening_limit, batch_count=176317.33333333334, ans=15.0 +2024-07-28 15:55:59,407 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.00 vs. limit=15.0 +2024-07-28 15:56:05,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=176330.66666666666, ans=0.125 +2024-07-28 15:56:15,427 INFO [train.py:1114] (0/4) Epoch 13, batch 9600, loss[loss=0.2344, simple_loss=0.3069, pruned_loss=0.08093, over 3425.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2761, pruned_loss=0.04874, over 930905.30 frames. ], batch size: 35, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:56:17,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=176357.33333333334, ans=0.2 +2024-07-28 15:56:23,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=176370.66666666666, ans=0.125 +2024-07-28 15:56:26,140 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.903e+01 5.951e+01 6.565e+01 7.484e+01 1.008e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-28 15:56:28,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=176384.0, ans=6.0 +2024-07-28 15:56:39,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=176397.33333333334, ans=0.2 +2024-07-28 15:56:47,956 INFO [train.py:1114] (0/4) Epoch 13, batch 9650, loss[loss=0.2128, simple_loss=0.2943, pruned_loss=0.06562, over 4837.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2771, pruned_loss=0.04932, over 927067.68 frames. ], batch size: 16, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 15:56:50,178 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.96 vs. limit=22.5 +2024-07-28 15:56:52,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=176424.0, ans=0.07 +2024-07-28 15:56:54,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=176437.33333333334, ans=0.125 +2024-07-28 15:56:59,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=176450.66666666666, ans=0.0 +2024-07-28 15:56:59,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=176450.66666666666, ans=0.0 +2024-07-28 15:56:59,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=176450.66666666666, ans=0.0 +2024-07-28 15:57:02,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176450.66666666666, ans=0.1 +2024-07-28 15:57:12,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.60 vs. limit=15.0 +2024-07-28 15:57:15,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=176464.0, ans=0.125 +2024-07-28 15:57:24,274 INFO [train.py:1114] (0/4) Epoch 13, batch 9700, loss[loss=0.2114, simple_loss=0.3064, pruned_loss=0.05821, over 4196.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2777, pruned_loss=0.04981, over 925889.87 frames. ], batch size: 25, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 15:57:24,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176490.66666666666, ans=0.1 +2024-07-28 15:57:34,737 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.912e+01 5.596e+01 6.037e+01 6.865e+01 8.980e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-28 15:57:40,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=176517.33333333334, ans=0.125 +2024-07-28 15:57:41,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=176517.33333333334, ans=0.125 +2024-07-28 15:57:50,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=176544.0, ans=0.125 +2024-07-28 15:57:58,939 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.65 vs. limit=15.0 +2024-07-28 15:57:59,697 INFO [train.py:1114] (0/4) Epoch 13, batch 9750, loss[loss=0.1978, simple_loss=0.2863, pruned_loss=0.05462, over 4690.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.277, pruned_loss=0.04964, over 926000.36 frames. ], batch size: 15, lr: 5.66e-03, grad_scale: 64.0 +2024-07-28 15:58:02,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=176557.33333333334, ans=0.2 +2024-07-28 15:58:03,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=176557.33333333334, ans=0.2 +2024-07-28 15:58:05,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.72 vs. limit=15.0 +2024-07-28 16:00:27,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=176584.0, ans=0.95 +2024-07-28 16:01:18,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=176597.33333333334, ans=0.0 +2024-07-28 16:01:18,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=176597.33333333334, ans=0.0 +2024-07-28 16:01:28,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=176597.33333333334, ans=0.125 +2024-07-28 16:01:45,808 INFO [train.py:1114] (0/4) Epoch 13, batch 9800, loss[loss=0.204, simple_loss=0.2938, pruned_loss=0.05711, over 4707.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2751, pruned_loss=0.04924, over 925648.75 frames. ], batch size: 12, lr: 5.66e-03, grad_scale: 64.0 +2024-07-28 16:01:46,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=176624.0, ans=0.125 +2024-07-28 16:01:46,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=176624.0, ans=0.125 +2024-07-28 16:02:08,118 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.529e+01 5.638e+01 6.459e+01 7.664e+01 1.106e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 16:02:29,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=176650.66666666666, ans=0.0 +2024-07-28 16:02:30,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=176650.66666666666, ans=0.0 +2024-07-28 16:02:34,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=176650.66666666666, ans=0.0 +2024-07-28 16:02:42,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=176664.0, ans=0.1 +2024-07-28 16:03:00,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=176677.33333333334, ans=0.125 +2024-07-28 16:03:21,593 INFO [train.py:1114] (0/4) Epoch 13, batch 9850, loss[loss=0.2048, simple_loss=0.2923, pruned_loss=0.0586, over 4909.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.276, pruned_loss=0.04933, over 927509.23 frames. ], batch size: 15, lr: 5.66e-03, grad_scale: 64.0 +2024-07-28 16:03:23,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176690.66666666666, ans=0.1 +2024-07-28 16:03:49,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=176704.0, ans=10.0 +2024-07-28 16:05:19,152 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.55 vs. limit=15.0 +2024-07-28 16:05:23,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=176744.0, ans=0.2 +2024-07-28 16:05:24,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=176744.0, ans=0.0 +2024-07-28 16:05:30,411 INFO [train.py:1114] (0/4) Epoch 13, batch 9900, loss[loss=0.2261, simple_loss=0.3018, pruned_loss=0.07523, over 4824.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2764, pruned_loss=0.04974, over 926976.41 frames. ], batch size: 16, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 16:06:10,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=176770.66666666666, ans=0.0 +2024-07-28 16:06:26,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=176770.66666666666, ans=0.125 +2024-07-28 16:06:49,265 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.859e+01 5.715e+01 6.519e+01 7.339e+01 1.147e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 16:06:49,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=176770.66666666666, ans=0.0 +2024-07-28 16:06:49,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=176784.0, ans=0.125 +2024-07-28 16:07:03,340 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.87 vs. limit=15.0 +2024-07-28 16:07:11,626 INFO [train.py:1114] (0/4) Epoch 13, batch 9950, loss[loss=0.1898, simple_loss=0.267, pruned_loss=0.05632, over 4782.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2767, pruned_loss=0.05014, over 929348.45 frames. ], batch size: 11, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 16:07:19,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=176824.0, ans=0.0 +2024-07-28 16:07:26,885 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.80 vs. limit=22.5 +2024-07-28 16:07:30,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=176837.33333333334, ans=0.125 +2024-07-28 16:07:32,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=176850.66666666666, ans=0.125 +2024-07-28 16:07:38,046 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:07:50,061 INFO [train.py:1114] (0/4) Epoch 13, batch 10000, loss[loss=0.1696, simple_loss=0.2615, pruned_loss=0.03889, over 4594.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2787, pruned_loss=0.05086, over 926686.31 frames. ], batch size: 16, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 16:07:58,677 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.38 vs. limit=22.5 +2024-07-28 16:08:01,259 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.743e+01 6.303e+01 7.198e+01 1.105e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 16:08:07,047 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.94 vs. limit=15.0 +2024-07-28 16:08:19,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=176944.0, ans=0.2 +2024-07-28 16:08:22,357 INFO [train.py:1114] (0/4) Epoch 13, batch 10050, loss[loss=0.2604, simple_loss=0.3251, pruned_loss=0.09786, over 3400.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2833, pruned_loss=0.05332, over 915341.55 frames. ], batch size: 38, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 16:08:23,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=176957.33333333334, ans=0.2 +2024-07-28 16:08:24,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=176957.33333333334, ans=0.025 +2024-07-28 16:08:36,214 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=20.27 vs. limit=15.0 +2024-07-28 16:08:39,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=176984.0, ans=0.09899494936611666 +2024-07-28 16:08:55,891 INFO [train.py:1114] (0/4) Epoch 13, batch 10100, loss[loss=0.2257, simple_loss=0.2984, pruned_loss=0.07653, over 3329.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2881, pruned_loss=0.05836, over 861463.70 frames. ], batch size: 35, lr: 5.65e-03, grad_scale: 32.0 +2024-07-28 16:08:55,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=177024.0, ans=0.125 +2024-07-28 16:09:07,526 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.274e+01 6.562e+01 7.156e+01 7.782e+01 1.093e+02, threshold=1.431e+02, percent-clipped=0.0 +2024-07-28 16:09:09,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=177050.66666666666, ans=0.0 +2024-07-28 16:09:28,338 INFO [train.py:1114] (0/4) Epoch 13, batch 10150, loss[loss=0.2064, simple_loss=0.2864, pruned_loss=0.06318, over 3113.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2918, pruned_loss=0.06229, over 819509.57 frames. ], batch size: 36, lr: 5.65e-03, grad_scale: 32.0 +2024-07-28 16:09:29,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=177090.66666666666, ans=0.125 +2024-07-28 16:09:31,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=177090.66666666666, ans=0.125 +2024-07-28 16:09:39,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=177104.0, ans=0.0 +2024-07-28 16:09:45,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=177117.33333333334, ans=0.2 +2024-07-28 16:09:51,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=177130.66666666666, ans=0.0 +2024-07-28 16:10:15,121 INFO [train.py:1114] (0/4) Epoch 13, batch 10200, loss[loss=0.2354, simple_loss=0.3107, pruned_loss=0.08004, over 3172.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2944, pruned_loss=0.0651, over 787440.90 frames. ], batch size: 36, lr: 5.65e-03, grad_scale: 32.0 +2024-07-28 16:10:15,562 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.37 vs. limit=15.0 +2024-07-28 16:10:17,371 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.57 vs. limit=15.0 +2024-07-28 16:10:18,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=177157.33333333334, ans=0.0 +2024-07-28 16:10:23,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=177170.66666666666, ans=0.125 +2024-07-28 16:10:24,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=177170.66666666666, ans=0.0 +2024-07-28 16:10:26,160 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.992e+01 6.629e+01 7.003e+01 7.390e+01 9.064e+01, threshold=1.401e+02, percent-clipped=0.0 +2024-07-28 16:10:26,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=177184.0, ans=0.025 +2024-07-28 16:10:28,807 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-13.pt +2024-07-28 16:11:16,421 INFO [train.py:1114] (0/4) Epoch 14, batch 0, loss[loss=0.1554, simple_loss=0.2484, pruned_loss=0.03121, over 4856.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2484, pruned_loss=0.03121, over 4856.00 frames. ], batch size: 12, lr: 5.45e-03, grad_scale: 32.0 +2024-07-28 16:11:16,421 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 16:14:50,843 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.2961, 4.6968, 4.6419, 5.0804], device='cuda:0') +2024-07-28 16:14:55,768 INFO [train.py:1146] (0/4) Epoch 14, validation: loss=0.1673, simple_loss=0.2724, pruned_loss=0.03104, over 944034.00 frames. +2024-07-28 16:14:55,769 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 16:15:07,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177186.66666666666, ans=0.1 +2024-07-28 16:15:11,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=177200.0, ans=0.125 +2024-07-28 16:15:19,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=177213.33333333334, ans=0.125 +2024-07-28 16:15:24,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=177226.66666666666, ans=0.125 +2024-07-28 16:15:32,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=177240.0, ans=0.125 +2024-07-28 16:15:35,830 INFO [train.py:1114] (0/4) Epoch 14, batch 50, loss[loss=0.1456, simple_loss=0.2251, pruned_loss=0.03306, over 4618.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2742, pruned_loss=0.04898, over 206341.96 frames. ], batch size: 11, lr: 5.45e-03, grad_scale: 32.0 +2024-07-28 16:15:45,985 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:15:54,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=177280.0, ans=0.0 +2024-07-28 16:16:06,632 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.72 vs. limit=15.0 +2024-07-28 16:16:11,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=177293.33333333334, ans=0.125 +2024-07-28 16:16:13,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=177306.66666666666, ans=0.2 +2024-07-28 16:16:17,039 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.504e+01 5.430e+01 5.954e+01 6.690e+01 1.022e+02, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 16:16:19,819 INFO [train.py:1114] (0/4) Epoch 14, batch 100, loss[loss=0.1976, simple_loss=0.2897, pruned_loss=0.05275, over 4638.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2804, pruned_loss=0.05059, over 365243.20 frames. ], batch size: 12, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:17:11,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=177373.33333333334, ans=0.125 +2024-07-28 16:17:13,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=177373.33333333334, ans=0.125 +2024-07-28 16:17:14,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=177386.66666666666, ans=0.2 +2024-07-28 16:17:14,952 INFO [train.py:1114] (0/4) Epoch 14, batch 150, loss[loss=0.1397, simple_loss=0.2304, pruned_loss=0.0245, over 4603.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2772, pruned_loss=0.04933, over 493804.70 frames. ], batch size: 11, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:17:25,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=177400.0, ans=0.04949747468305833 +2024-07-28 16:17:26,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=177400.0, ans=0.025 +2024-07-28 16:17:32,715 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.66 vs. limit=22.5 +2024-07-28 16:17:36,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=177426.66666666666, ans=0.125 +2024-07-28 16:17:45,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=177426.66666666666, ans=0.07 +2024-07-28 16:17:47,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=177440.0, ans=0.0 +2024-07-28 16:17:50,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=177440.0, ans=0.0 +2024-07-28 16:17:52,154 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.588e+01 5.424e+01 5.956e+01 7.040e+01 1.129e+02, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 16:17:57,488 INFO [train.py:1114] (0/4) Epoch 14, batch 200, loss[loss=0.183, simple_loss=0.2698, pruned_loss=0.04806, over 4461.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2763, pruned_loss=0.04904, over 592915.58 frames. ], batch size: 21, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:17:59,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=177453.33333333334, ans=0.125 +2024-07-28 16:18:01,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=177453.33333333334, ans=0.125 +2024-07-28 16:18:04,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177466.66666666666, ans=0.1 +2024-07-28 16:18:19,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177493.33333333334, ans=0.1 +2024-07-28 16:18:37,117 INFO [train.py:1114] (0/4) Epoch 14, batch 250, loss[loss=0.1831, simple_loss=0.2831, pruned_loss=0.04149, over 4645.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.274, pruned_loss=0.04758, over 669735.27 frames. ], batch size: 16, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:18:38,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=177520.0, ans=0.0 +2024-07-28 16:18:45,724 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.12 vs. limit=12.0 +2024-07-28 16:18:49,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=177533.33333333334, ans=0.125 +2024-07-28 16:18:54,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=177546.66666666666, ans=0.025 +2024-07-28 16:18:55,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=177546.66666666666, ans=0.125 +2024-07-28 16:18:59,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=177560.0, ans=0.125 +2024-07-28 16:19:02,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=177560.0, ans=0.125 +2024-07-28 16:19:06,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=177573.33333333334, ans=0.0 +2024-07-28 16:19:08,678 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.684e+01 5.662e+01 6.232e+01 7.449e+01 1.133e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 16:19:11,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=177586.66666666666, ans=0.5 +2024-07-28 16:19:11,378 INFO [train.py:1114] (0/4) Epoch 14, batch 300, loss[loss=0.1957, simple_loss=0.2971, pruned_loss=0.0471, over 4807.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2728, pruned_loss=0.04686, over 729455.73 frames. ], batch size: 15, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:19:25,661 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.03 vs. limit=15.0 +2024-07-28 16:19:31,240 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.13 vs. limit=15.0 +2024-07-28 16:19:40,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=177626.66666666666, ans=0.2 +2024-07-28 16:19:42,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=177640.0, ans=0.025 +2024-07-28 16:19:48,699 INFO [train.py:1114] (0/4) Epoch 14, batch 350, loss[loss=0.1787, simple_loss=0.2583, pruned_loss=0.04961, over 4925.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2736, pruned_loss=0.04721, over 776054.15 frames. ], batch size: 12, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:20:25,101 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=177666.66666666666, ans=0.125 +2024-07-28 16:20:29,318 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.21 vs. limit=6.0 +2024-07-28 16:20:32,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177680.0, ans=0.1 +2024-07-28 16:20:37,297 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.59 vs. limit=22.5 +2024-07-28 16:20:44,595 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.533e+01 5.994e+01 6.845e+01 1.570e+02, threshold=1.199e+02, percent-clipped=1.0 +2024-07-28 16:20:47,239 INFO [train.py:1114] (0/4) Epoch 14, batch 400, loss[loss=0.1617, simple_loss=0.2504, pruned_loss=0.03645, over 4696.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2732, pruned_loss=0.04673, over 813420.82 frames. ], batch size: 13, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:20:54,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177733.33333333334, ans=0.1 +2024-07-28 16:21:00,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=177746.66666666666, ans=0.125 +2024-07-28 16:21:03,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=177746.66666666666, ans=0.125 +2024-07-28 16:21:11,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=177760.0, ans=0.125 +2024-07-28 16:21:12,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=177760.0, ans=0.125 +2024-07-28 16:21:13,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.24 vs. limit=15.0 +2024-07-28 16:21:22,172 INFO [train.py:1114] (0/4) Epoch 14, batch 450, loss[loss=0.1841, simple_loss=0.2743, pruned_loss=0.04691, over 4645.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2746, pruned_loss=0.04765, over 838638.87 frames. ], batch size: 13, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:21:36,941 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:21:44,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=177826.66666666666, ans=0.125 +2024-07-28 16:21:51,812 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.64 vs. limit=6.0 +2024-07-28 16:21:52,670 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.533e+01 5.509e+01 6.147e+01 6.796e+01 9.434e+01, threshold=1.229e+02, percent-clipped=0.0 +2024-07-28 16:21:55,366 INFO [train.py:1114] (0/4) Epoch 14, batch 500, loss[loss=0.1855, simple_loss=0.2719, pruned_loss=0.04953, over 4681.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2735, pruned_loss=0.04743, over 861120.34 frames. ], batch size: 15, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:21:58,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=177853.33333333334, ans=0.0 +2024-07-28 16:21:59,081 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.73 vs. limit=22.5 +2024-07-28 16:22:22,004 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.86 vs. limit=22.5 +2024-07-28 16:22:28,997 INFO [train.py:1114] (0/4) Epoch 14, batch 550, loss[loss=0.2121, simple_loss=0.3048, pruned_loss=0.05968, over 4593.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2732, pruned_loss=0.04761, over 877013.48 frames. ], batch size: 17, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:22:34,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=177920.0, ans=15.0 +2024-07-28 16:22:34,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=177920.0, ans=0.05 +2024-07-28 16:22:34,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=177920.0, ans=0.05 +2024-07-28 16:22:55,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=177973.33333333334, ans=0.95 +2024-07-28 16:22:59,816 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+01 5.791e+01 6.101e+01 6.506e+01 8.521e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 16:23:00,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=177973.33333333334, ans=0.025 +2024-07-28 16:23:02,706 INFO [train.py:1114] (0/4) Epoch 14, batch 600, loss[loss=0.1902, simple_loss=0.2808, pruned_loss=0.04981, over 4636.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2745, pruned_loss=0.04794, over 891671.79 frames. ], batch size: 16, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:23:08,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=178000.0, ans=0.0 +2024-07-28 16:23:09,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=178000.0, ans=0.125 +2024-07-28 16:23:53,564 INFO [train.py:1114] (0/4) Epoch 14, batch 650, loss[loss=0.1704, simple_loss=0.2651, pruned_loss=0.03787, over 4758.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2734, pruned_loss=0.04715, over 903644.22 frames. ], batch size: 13, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:24:03,202 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.22 vs. limit=10.0 +2024-07-28 16:24:08,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=178066.66666666666, ans=0.1 +2024-07-28 16:24:21,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=178093.33333333334, ans=0.0 +2024-07-28 16:24:24,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=178106.66666666666, ans=0.04949747468305833 +2024-07-28 16:24:27,860 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.659e+01 6.159e+01 7.127e+01 1.309e+02, threshold=1.232e+02, percent-clipped=1.0 +2024-07-28 16:24:28,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=178106.66666666666, ans=0.2 +2024-07-28 16:24:30,538 INFO [train.py:1114] (0/4) Epoch 14, batch 700, loss[loss=0.214, simple_loss=0.2856, pruned_loss=0.07124, over 4646.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2733, pruned_loss=0.0468, over 911414.09 frames. ], batch size: 12, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:24:30,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=178120.0, ans=0.1 +2024-07-28 16:24:38,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=178133.33333333334, ans=0.125 +2024-07-28 16:24:42,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=178133.33333333334, ans=0.0 +2024-07-28 16:24:44,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=178133.33333333334, ans=0.0 +2024-07-28 16:24:45,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=178146.66666666666, ans=0.125 +2024-07-28 16:24:50,097 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:24:53,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=178160.0, ans=0.0 +2024-07-28 16:26:35,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=178173.33333333334, ans=0.125 +2024-07-28 16:26:48,782 INFO [train.py:1114] (0/4) Epoch 14, batch 750, loss[loss=0.17, simple_loss=0.2668, pruned_loss=0.03656, over 4700.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2726, pruned_loss=0.0464, over 918288.14 frames. ], batch size: 13, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:26:55,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178200.0, ans=0.1 +2024-07-28 16:26:56,676 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.48 vs. limit=6.0 +2024-07-28 16:27:05,559 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.32 vs. limit=15.0 +2024-07-28 16:27:20,238 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.515e+01 6.007e+01 6.700e+01 1.144e+02, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 16:27:22,840 INFO [train.py:1114] (0/4) Epoch 14, batch 800, loss[loss=0.1488, simple_loss=0.2329, pruned_loss=0.03235, over 4853.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2717, pruned_loss=0.04647, over 922868.43 frames. ], batch size: 12, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:27:47,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=178293.33333333334, ans=0.2 +2024-07-28 16:27:56,280 INFO [train.py:1114] (0/4) Epoch 14, batch 850, loss[loss=0.1763, simple_loss=0.2738, pruned_loss=0.03945, over 4658.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2717, pruned_loss=0.04656, over 926936.37 frames. ], batch size: 14, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:28:06,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=178333.33333333334, ans=0.125 +2024-07-28 16:28:09,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=178346.66666666666, ans=0.0 +2024-07-28 16:28:14,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=178346.66666666666, ans=0.0 +2024-07-28 16:28:16,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=178360.0, ans=0.025 +2024-07-28 16:28:25,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=178373.33333333334, ans=0.125 +2024-07-28 16:28:27,737 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.266e+01 5.522e+01 6.221e+01 7.119e+01 8.769e+01, threshold=1.244e+02, percent-clipped=0.0 +2024-07-28 16:28:30,525 INFO [train.py:1114] (0/4) Epoch 14, batch 900, loss[loss=0.1633, simple_loss=0.2439, pruned_loss=0.04138, over 4852.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2721, pruned_loss=0.04719, over 928355.03 frames. ], batch size: 12, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:28:34,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=178386.66666666666, ans=0.125 +2024-07-28 16:28:36,705 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:28:42,497 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.11 vs. limit=15.0 +2024-07-28 16:28:53,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=178426.66666666666, ans=0.035 +2024-07-28 16:28:57,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=178426.66666666666, ans=0.025 +2024-07-28 16:28:57,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=178426.66666666666, ans=0.2 +2024-07-28 16:28:57,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=178426.66666666666, ans=0.125 +2024-07-28 16:28:58,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=178426.66666666666, ans=0.2 +2024-07-28 16:28:59,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_na.min_abs, batch_count=178440.0, ans=0.02 +2024-07-28 16:29:06,328 INFO [train.py:1114] (0/4) Epoch 14, batch 950, loss[loss=0.1463, simple_loss=0.2259, pruned_loss=0.0333, over 4783.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2723, pruned_loss=0.04694, over 929784.48 frames. ], batch size: 12, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:29:09,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=178453.33333333334, ans=0.07 +2024-07-28 16:29:09,384 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.38 vs. limit=22.5 +2024-07-28 16:29:26,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=178493.33333333334, ans=0.125 +2024-07-28 16:29:30,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=178493.33333333334, ans=0.125 +2024-07-28 16:29:37,156 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.450e+01 5.573e+01 6.236e+01 6.880e+01 1.050e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 16:29:39,838 INFO [train.py:1114] (0/4) Epoch 14, batch 1000, loss[loss=0.1815, simple_loss=0.268, pruned_loss=0.04752, over 4964.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2737, pruned_loss=0.04751, over 929402.78 frames. ], batch size: 13, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:29:41,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=178520.0, ans=0.0 +2024-07-28 16:29:45,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=178520.0, ans=0.125 +2024-07-28 16:29:54,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=178546.66666666666, ans=0.125 +2024-07-28 16:29:54,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=178546.66666666666, ans=0.05 +2024-07-28 16:30:01,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=178560.0, ans=0.0 +2024-07-28 16:30:03,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=178560.0, ans=0.0 +2024-07-28 16:30:03,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=178560.0, ans=0.125 +2024-07-28 16:30:06,955 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.06 vs. limit=12.0 +2024-07-28 16:30:16,083 INFO [train.py:1114] (0/4) Epoch 14, batch 1050, loss[loss=0.2053, simple_loss=0.2895, pruned_loss=0.06054, over 4869.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2729, pruned_loss=0.04705, over 931714.38 frames. ], batch size: 14, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:30:26,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=178600.0, ans=0.07 +2024-07-28 16:30:36,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=178613.33333333334, ans=0.125 +2024-07-28 16:30:38,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=178626.66666666666, ans=0.125 +2024-07-28 16:30:40,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=178626.66666666666, ans=0.0 +2024-07-28 16:30:40,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=178626.66666666666, ans=0.125 +2024-07-28 16:30:48,748 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.456e+01 5.778e+01 6.259e+01 7.627e+01 1.146e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 16:30:53,943 INFO [train.py:1114] (0/4) Epoch 14, batch 1100, loss[loss=0.1898, simple_loss=0.2848, pruned_loss=0.04744, over 4890.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2732, pruned_loss=0.04739, over 933994.20 frames. ], batch size: 13, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:30:57,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=178653.33333333334, ans=0.0 +2024-07-28 16:30:57,728 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.71 vs. limit=15.0 +2024-07-28 16:31:00,838 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.91 vs. limit=15.0 +2024-07-28 16:31:03,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=178666.66666666666, ans=0.0 +2024-07-28 16:31:12,206 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.31 vs. limit=6.0 +2024-07-28 16:31:19,104 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.94 vs. limit=12.0 +2024-07-28 16:31:23,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=178693.33333333334, ans=0.0 +2024-07-28 16:31:31,679 INFO [train.py:1114] (0/4) Epoch 14, batch 1150, loss[loss=0.1996, simple_loss=0.281, pruned_loss=0.05909, over 4893.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2735, pruned_loss=0.04764, over 934057.21 frames. ], batch size: 13, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:31:33,496 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.08 vs. limit=22.5 +2024-07-28 16:31:34,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=178720.0, ans=0.1 +2024-07-28 16:31:36,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=178720.0, ans=0.1 +2024-07-28 16:31:40,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=178733.33333333334, ans=0.025 +2024-07-28 16:32:02,870 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+01 5.551e+01 6.026e+01 6.659e+01 1.121e+02, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 16:32:05,613 INFO [train.py:1114] (0/4) Epoch 14, batch 1200, loss[loss=0.1679, simple_loss=0.2581, pruned_loss=0.03889, over 4877.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2738, pruned_loss=0.0479, over 933331.07 frames. ], batch size: 14, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:32:19,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=178813.33333333334, ans=0.125 +2024-07-28 16:32:21,222 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.31 vs. limit=15.0 +2024-07-28 16:32:26,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=178826.66666666666, ans=0.125 +2024-07-28 16:32:38,596 INFO [train.py:1114] (0/4) Epoch 14, batch 1250, loss[loss=0.2126, simple_loss=0.3044, pruned_loss=0.06042, over 4806.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2739, pruned_loss=0.04756, over 937642.31 frames. ], batch size: 15, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:32:42,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=178853.33333333334, ans=0.025 +2024-07-28 16:32:45,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=178866.66666666666, ans=0.0 +2024-07-28 16:32:47,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=178866.66666666666, ans=0.125 +2024-07-28 16:32:58,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=178893.33333333334, ans=0.2 +2024-07-28 16:32:59,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=178893.33333333334, ans=0.125 +2024-07-28 16:33:02,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=178893.33333333334, ans=0.0 +2024-07-28 16:33:03,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=178893.33333333334, ans=0.125 +2024-07-28 16:33:05,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=178906.66666666666, ans=0.0 +2024-07-28 16:33:09,547 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.792e+01 5.596e+01 6.109e+01 6.927e+01 8.665e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 16:33:10,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=178906.66666666666, ans=0.5 +2024-07-28 16:33:12,231 INFO [train.py:1114] (0/4) Epoch 14, batch 1300, loss[loss=0.1817, simple_loss=0.2765, pruned_loss=0.04345, over 4754.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2735, pruned_loss=0.04759, over 938799.10 frames. ], batch size: 19, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:33:12,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=178920.0, ans=10.0 +2024-07-28 16:33:12,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=178920.0, ans=0.125 +2024-07-28 16:33:15,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=178920.0, ans=0.025 +2024-07-28 16:33:22,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=178933.33333333334, ans=0.0 +2024-07-28 16:33:22,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=178933.33333333334, ans=0.0 +2024-07-28 16:33:34,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=178960.0, ans=0.1 +2024-07-28 16:33:44,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=178986.66666666666, ans=0.125 +2024-07-28 16:33:44,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=178986.66666666666, ans=0.07 +2024-07-28 16:33:45,471 INFO [train.py:1114] (0/4) Epoch 14, batch 1350, loss[loss=0.1667, simple_loss=0.263, pruned_loss=0.03516, over 4763.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2726, pruned_loss=0.04714, over 940800.75 frames. ], batch size: 13, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:33:49,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=178986.66666666666, ans=0.0 +2024-07-28 16:33:50,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=178986.66666666666, ans=0.0 +2024-07-28 16:33:55,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=179000.0, ans=0.0 +2024-07-28 16:34:02,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=179013.33333333334, ans=0.125 +2024-07-28 16:34:18,367 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.737e+01 5.767e+01 6.518e+01 7.803e+01 1.206e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 16:34:21,091 INFO [train.py:1114] (0/4) Epoch 14, batch 1400, loss[loss=0.1531, simple_loss=0.2349, pruned_loss=0.03565, over 4705.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.273, pruned_loss=0.04739, over 942846.28 frames. ], batch size: 11, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:34:28,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=179066.66666666666, ans=0.125 +2024-07-28 16:34:33,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=179066.66666666666, ans=0.025 +2024-07-28 16:34:47,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=179106.66666666666, ans=0.125 +2024-07-28 16:34:49,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=179106.66666666666, ans=0.2 +2024-07-28 16:34:54,874 INFO [train.py:1114] (0/4) Epoch 14, batch 1450, loss[loss=0.2288, simple_loss=0.3114, pruned_loss=0.0731, over 4674.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.274, pruned_loss=0.04779, over 942376.46 frames. ], batch size: 15, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:34:59,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.31 vs. limit=6.0 +2024-07-28 16:35:00,606 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.81 vs. limit=15.0 +2024-07-28 16:35:11,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179146.66666666666, ans=0.1 +2024-07-28 16:35:17,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=179160.0, ans=0.025 +2024-07-28 16:35:25,332 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.281e+01 5.717e+01 6.158e+01 6.700e+01 8.649e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 16:35:26,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=179173.33333333334, ans=0.2 +2024-07-28 16:35:28,175 INFO [train.py:1114] (0/4) Epoch 14, batch 1500, loss[loss=0.1676, simple_loss=0.2607, pruned_loss=0.03728, over 4809.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2743, pruned_loss=0.04763, over 942141.93 frames. ], batch size: 14, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:35:28,751 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.47 vs. limit=15.0 +2024-07-28 16:35:31,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=179186.66666666666, ans=0.125 +2024-07-28 16:35:32,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=179186.66666666666, ans=0.125 +2024-07-28 16:35:47,791 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.93 vs. limit=22.5 +2024-07-28 16:35:54,612 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-28 16:36:00,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=179240.0, ans=0.025 +2024-07-28 16:36:00,568 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.10 vs. limit=22.5 +2024-07-28 16:36:02,059 INFO [train.py:1114] (0/4) Epoch 14, batch 1550, loss[loss=0.1977, simple_loss=0.2766, pruned_loss=0.0594, over 4902.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2737, pruned_loss=0.04733, over 938670.29 frames. ], batch size: 15, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:36:05,283 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.01 vs. limit=22.5 +2024-07-28 16:36:06,094 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.27 vs. limit=15.0 +2024-07-28 16:36:14,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=179280.0, ans=0.125 +2024-07-28 16:36:26,579 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.06 vs. limit=22.5 +2024-07-28 16:36:28,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=179293.33333333334, ans=0.0 +2024-07-28 16:36:30,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=179306.66666666666, ans=0.0 +2024-07-28 16:36:30,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=179306.66666666666, ans=0.0 +2024-07-28 16:36:33,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=179306.66666666666, ans=0.125 +2024-07-28 16:36:34,742 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.776e+01 5.675e+01 6.307e+01 6.776e+01 1.138e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 16:36:39,313 INFO [train.py:1114] (0/4) Epoch 14, batch 1600, loss[loss=0.1892, simple_loss=0.2856, pruned_loss=0.0464, over 4872.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2746, pruned_loss=0.04778, over 937919.13 frames. ], batch size: 14, lr: 5.41e-03, grad_scale: 32.0 +2024-07-28 16:36:54,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=179346.66666666666, ans=0.0 +2024-07-28 16:36:56,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=179346.66666666666, ans=0.125 +2024-07-28 16:37:03,168 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.20 vs. limit=15.0 +2024-07-28 16:37:14,629 INFO [train.py:1114] (0/4) Epoch 14, batch 1650, loss[loss=0.1921, simple_loss=0.2703, pruned_loss=0.05691, over 4671.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2742, pruned_loss=0.04836, over 937974.33 frames. ], batch size: 14, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:37:20,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=179386.66666666666, ans=0.0 +2024-07-28 16:37:23,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=179400.0, ans=0.125 +2024-07-28 16:37:29,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=179413.33333333334, ans=0.125 +2024-07-28 16:37:31,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=179413.33333333334, ans=0.0 +2024-07-28 16:37:36,965 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.71 vs. limit=12.0 +2024-07-28 16:37:44,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=179440.0, ans=0.125 +2024-07-28 16:37:45,304 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.393e+01 5.723e+01 6.070e+01 6.636e+01 1.142e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 16:37:46,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=179440.0, ans=0.125 +2024-07-28 16:37:47,999 INFO [train.py:1114] (0/4) Epoch 14, batch 1700, loss[loss=0.132, simple_loss=0.2185, pruned_loss=0.02281, over 4708.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2738, pruned_loss=0.04806, over 939939.81 frames. ], batch size: 11, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:37:58,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=179466.66666666666, ans=0.125 +2024-07-28 16:38:03,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=179480.0, ans=0.04949747468305833 +2024-07-28 16:38:17,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=179506.66666666666, ans=0.125 +2024-07-28 16:38:18,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=179506.66666666666, ans=0.125 +2024-07-28 16:38:20,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=179520.0, ans=0.125 +2024-07-28 16:38:21,306 INFO [train.py:1114] (0/4) Epoch 14, batch 1750, loss[loss=0.1752, simple_loss=0.2567, pruned_loss=0.04689, over 4815.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.273, pruned_loss=0.04755, over 940740.40 frames. ], batch size: 11, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:38:24,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=179520.0, ans=0.125 +2024-07-28 16:38:25,603 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-28 16:38:32,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=179533.33333333334, ans=0.04949747468305833 +2024-07-28 16:38:46,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=179560.0, ans=0.125 +2024-07-28 16:38:48,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=179573.33333333334, ans=0.125 +2024-07-28 16:38:52,729 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.429e+01 5.501e+01 6.182e+01 7.069e+01 1.179e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 16:38:55,469 INFO [train.py:1114] (0/4) Epoch 14, batch 1800, loss[loss=0.1914, simple_loss=0.2814, pruned_loss=0.05066, over 4641.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2739, pruned_loss=0.04753, over 941349.19 frames. ], batch size: 13, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:39:17,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179626.66666666666, ans=0.1 +2024-07-28 16:39:18,310 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.78 vs. limit=15.0 +2024-07-28 16:39:25,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=179640.0, ans=0.125 +2024-07-28 16:39:29,096 INFO [train.py:1114] (0/4) Epoch 14, batch 1850, loss[loss=0.1814, simple_loss=0.2802, pruned_loss=0.04134, over 4809.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2737, pruned_loss=0.04769, over 941055.47 frames. ], batch size: 14, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:39:30,773 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.09 vs. limit=15.0 +2024-07-28 16:39:31,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=179653.33333333334, ans=0.125 +2024-07-28 16:39:49,556 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.30 vs. limit=12.0 +2024-07-28 16:39:56,302 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.81 vs. limit=15.0 +2024-07-28 16:40:01,781 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.632e+01 6.282e+01 7.683e+01 1.282e+02, threshold=1.256e+02, percent-clipped=1.0 +2024-07-28 16:42:22,140 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.97 vs. limit=10.0 +2024-07-28 16:42:23,048 INFO [train.py:1114] (0/4) Epoch 14, batch 1900, loss[loss=0.1659, simple_loss=0.2678, pruned_loss=0.03201, over 4662.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2741, pruned_loss=0.04777, over 941989.88 frames. ], batch size: 14, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:42:25,981 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.13 vs. limit=22.5 +2024-07-28 16:42:44,499 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.12 vs. limit=22.5 +2024-07-28 16:42:55,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=179733.33333333334, ans=0.2 +2024-07-28 16:43:08,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.20 vs. limit=6.0 +2024-07-28 16:43:09,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179760.0, ans=0.1 +2024-07-28 16:43:09,764 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:43:10,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=179760.0, ans=0.125 +2024-07-28 16:43:14,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=179760.0, ans=0.0 +2024-07-28 16:43:22,188 INFO [train.py:1114] (0/4) Epoch 14, batch 1950, loss[loss=0.1632, simple_loss=0.2624, pruned_loss=0.03199, over 4901.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2742, pruned_loss=0.04744, over 944041.81 frames. ], batch size: 13, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:43:28,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=179800.0, ans=0.09899494936611666 +2024-07-28 16:43:40,698 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.94 vs. limit=15.0 +2024-07-28 16:43:43,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=179826.66666666666, ans=0.125 +2024-07-28 16:43:47,861 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.86 vs. limit=22.5 +2024-07-28 16:43:50,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=179840.0, ans=0.125 +2024-07-28 16:43:52,884 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.413e+01 5.659e+01 6.185e+01 6.876e+01 9.171e+01, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 16:43:54,099 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.42 vs. limit=6.0 +2024-07-28 16:43:55,614 INFO [train.py:1114] (0/4) Epoch 14, batch 2000, loss[loss=0.1594, simple_loss=0.2532, pruned_loss=0.03279, over 4796.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2752, pruned_loss=0.04793, over 941814.48 frames. ], batch size: 11, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:44:09,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=179880.0, ans=0.025 +2024-07-28 16:44:39,858 INFO [train.py:1114] (0/4) Epoch 14, batch 2050, loss[loss=0.1657, simple_loss=0.2425, pruned_loss=0.04441, over 4606.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2743, pruned_loss=0.04809, over 939708.43 frames. ], batch size: 11, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:44:47,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=179933.33333333334, ans=0.025 +2024-07-28 16:44:53,601 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.20 vs. limit=22.5 +2024-07-28 16:44:57,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=179946.66666666666, ans=0.0 +2024-07-28 16:45:00,802 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.58 vs. limit=10.0 +2024-07-28 16:45:12,985 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:45:14,166 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.388e+01 5.698e+01 6.586e+01 7.906e+01 1.162e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-28 16:45:16,950 INFO [train.py:1114] (0/4) Epoch 14, batch 2100, loss[loss=0.1612, simple_loss=0.2479, pruned_loss=0.03727, over 4760.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2739, pruned_loss=0.0475, over 941286.35 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:45:24,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=180000.0, ans=0.025 +2024-07-28 16:45:28,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=180000.0, ans=0.0 +2024-07-28 16:45:30,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=180013.33333333334, ans=0.125 +2024-07-28 16:45:42,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=180026.66666666666, ans=0.0 +2024-07-28 16:45:50,454 INFO [train.py:1114] (0/4) Epoch 14, batch 2150, loss[loss=0.1706, simple_loss=0.2677, pruned_loss=0.03672, over 4897.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2731, pruned_loss=0.04733, over 944421.08 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:45:58,465 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.76 vs. limit=15.0 +2024-07-28 16:46:00,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180066.66666666666, ans=0.1 +2024-07-28 16:46:17,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=180093.33333333334, ans=0.0 +2024-07-28 16:46:19,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=180106.66666666666, ans=0.0 +2024-07-28 16:46:22,333 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.354e+01 5.458e+01 6.111e+01 6.832e+01 1.017e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 16:46:36,577 INFO [train.py:1114] (0/4) Epoch 14, batch 2200, loss[loss=0.152, simple_loss=0.2441, pruned_loss=0.02998, over 4808.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2729, pruned_loss=0.04715, over 943482.72 frames. ], batch size: 14, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:47:02,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=180146.66666666666, ans=0.125 +2024-07-28 16:47:03,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=180160.0, ans=0.125 +2024-07-28 16:47:15,111 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.27 vs. limit=15.0 +2024-07-28 16:47:17,927 INFO [train.py:1114] (0/4) Epoch 14, batch 2250, loss[loss=0.2017, simple_loss=0.3022, pruned_loss=0.0506, over 4686.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2735, pruned_loss=0.04753, over 941886.66 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:47:21,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=180186.66666666666, ans=0.125 +2024-07-28 16:47:26,690 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:47:27,502 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.42 vs. limit=22.5 +2024-07-28 16:47:43,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=180226.66666666666, ans=0.125 +2024-07-28 16:47:45,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180226.66666666666, ans=0.1 +2024-07-28 16:48:09,499 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.321e+01 5.481e+01 6.034e+01 6.798e+01 1.360e+02, threshold=1.207e+02, percent-clipped=1.0 +2024-07-28 16:48:20,572 INFO [train.py:1114] (0/4) Epoch 14, batch 2300, loss[loss=0.144, simple_loss=0.2298, pruned_loss=0.0291, over 4935.00 frames. ], tot_loss[loss=0.183, simple_loss=0.272, pruned_loss=0.04695, over 939307.07 frames. ], batch size: 12, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:49:46,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=180306.66666666666, ans=0.05 +2024-07-28 16:49:49,260 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.58 vs. limit=15.0 +2024-07-28 16:49:52,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=180306.66666666666, ans=0.0 +2024-07-28 16:49:52,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=180306.66666666666, ans=0.1 +2024-07-28 16:49:54,922 INFO [train.py:1114] (0/4) Epoch 14, batch 2350, loss[loss=0.205, simple_loss=0.2921, pruned_loss=0.05897, over 4637.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2731, pruned_loss=0.04717, over 941552.14 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:49:55,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=180320.0, ans=0.2 +2024-07-28 16:50:05,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=180333.33333333334, ans=0.0 +2024-07-28 16:50:11,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=180346.66666666666, ans=0.125 +2024-07-28 16:50:26,411 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.883e+01 5.752e+01 6.311e+01 7.505e+01 9.885e+01, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 16:50:29,085 INFO [train.py:1114] (0/4) Epoch 14, batch 2400, loss[loss=0.192, simple_loss=0.2704, pruned_loss=0.05679, over 4641.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2739, pruned_loss=0.04759, over 941285.59 frames. ], batch size: 12, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:50:31,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=180386.66666666666, ans=0.0 +2024-07-28 16:50:38,026 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.12 vs. limit=10.0 +2024-07-28 16:50:45,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=180413.33333333334, ans=0.0 +2024-07-28 16:50:56,276 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.32 vs. limit=6.0 +2024-07-28 16:50:56,464 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.39 vs. limit=6.0 +2024-07-28 16:50:57,773 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.62 vs. limit=15.0 +2024-07-28 16:50:58,714 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.64 vs. limit=12.0 +2024-07-28 16:51:03,420 INFO [train.py:1114] (0/4) Epoch 14, batch 2450, loss[loss=0.1616, simple_loss=0.2561, pruned_loss=0.03353, over 4689.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2739, pruned_loss=0.04787, over 936749.28 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:51:11,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=180466.66666666666, ans=0.2 +2024-07-28 16:51:18,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=180480.0, ans=0.125 +2024-07-28 16:51:34,058 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.381e+01 5.494e+01 6.004e+01 6.734e+01 1.227e+02, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 16:51:35,794 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.78 vs. limit=15.0 +2024-07-28 16:51:36,698 INFO [train.py:1114] (0/4) Epoch 14, batch 2500, loss[loss=0.2005, simple_loss=0.2944, pruned_loss=0.05333, over 4807.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2744, pruned_loss=0.04804, over 938879.92 frames. ], batch size: 14, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:51:53,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=180546.66666666666, ans=0.125 +2024-07-28 16:51:58,755 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:52:00,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=180560.0, ans=0.025 +2024-07-28 16:52:07,305 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.85 vs. limit=22.5 +2024-07-28 16:52:08,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=180573.33333333334, ans=0.0 +2024-07-28 16:52:15,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=180573.33333333334, ans=0.125 +2024-07-28 16:52:16,696 INFO [train.py:1114] (0/4) Epoch 14, batch 2550, loss[loss=0.1538, simple_loss=0.2309, pruned_loss=0.03835, over 4792.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2739, pruned_loss=0.04778, over 938430.09 frames. ], batch size: 11, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:52:18,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=180586.66666666666, ans=0.025 +2024-07-28 16:52:19,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=180586.66666666666, ans=0.025 +2024-07-28 16:52:23,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=180600.0, ans=10.0 +2024-07-28 16:52:29,856 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.61 vs. limit=10.0 +2024-07-28 16:52:29,963 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-28 16:52:48,648 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.421e+01 5.545e+01 6.227e+01 6.776e+01 1.046e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 16:52:48,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=180640.0, ans=0.125 +2024-07-28 16:52:51,356 INFO [train.py:1114] (0/4) Epoch 14, batch 2600, loss[loss=0.1749, simple_loss=0.2684, pruned_loss=0.04068, over 4894.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2744, pruned_loss=0.04815, over 937695.59 frames. ], batch size: 13, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:53:00,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=180666.66666666666, ans=0.1 +2024-07-28 16:53:03,750 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:53:08,101 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.48 vs. limit=10.0 +2024-07-28 16:53:08,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=180680.0, ans=0.125 +2024-07-28 16:53:16,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=180693.33333333334, ans=0.125 +2024-07-28 16:53:17,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=180693.33333333334, ans=0.0 +2024-07-28 16:53:19,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=180706.66666666666, ans=0.125 +2024-07-28 16:53:25,230 INFO [train.py:1114] (0/4) Epoch 14, batch 2650, loss[loss=0.1851, simple_loss=0.2795, pruned_loss=0.04538, over 4664.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.275, pruned_loss=0.04776, over 939903.49 frames. ], batch size: 16, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:53:25,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=180720.0, ans=0.1 +2024-07-28 16:53:36,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_na.min_abs, batch_count=180733.33333333334, ans=0.02 +2024-07-28 16:53:43,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=180746.66666666666, ans=0.125 +2024-07-28 16:53:51,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=180773.33333333334, ans=0.125 +2024-07-28 16:53:52,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=180773.33333333334, ans=0.125 +2024-07-28 16:53:53,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=180773.33333333334, ans=0.0 +2024-07-28 16:53:56,284 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.582e+01 5.983e+01 6.716e+01 1.150e+02, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 16:53:58,983 INFO [train.py:1114] (0/4) Epoch 14, batch 2700, loss[loss=0.199, simple_loss=0.2977, pruned_loss=0.0502, over 4733.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2754, pruned_loss=0.04826, over 939733.49 frames. ], batch size: 14, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:54:16,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=180813.33333333334, ans=0.125 +2024-07-28 16:54:16,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=180813.33333333334, ans=0.125 +2024-07-28 16:54:20,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180826.66666666666, ans=0.1 +2024-07-28 16:54:23,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=180826.66666666666, ans=0.125 +2024-07-28 16:54:32,656 INFO [train.py:1114] (0/4) Epoch 14, batch 2750, loss[loss=0.2278, simple_loss=0.3073, pruned_loss=0.07412, over 4713.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2745, pruned_loss=0.04812, over 939498.18 frames. ], batch size: 12, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:54:43,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=180866.66666666666, ans=0.025 +2024-07-28 16:54:46,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=180880.0, ans=0.05 +2024-07-28 16:55:03,434 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.266e+01 5.739e+01 6.573e+01 7.646e+01 1.098e+02, threshold=1.315e+02, percent-clipped=0.0 +2024-07-28 16:55:03,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=180906.66666666666, ans=0.07 +2024-07-28 16:55:06,153 INFO [train.py:1114] (0/4) Epoch 14, batch 2800, loss[loss=0.292, simple_loss=0.3548, pruned_loss=0.1146, over 3638.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2739, pruned_loss=0.04833, over 937731.93 frames. ], batch size: 35, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:55:24,550 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.10 vs. limit=10.0 +2024-07-28 16:55:25,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=180946.66666666666, ans=0.0 +2024-07-28 16:55:26,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180946.66666666666, ans=0.1 +2024-07-28 16:55:32,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=180960.0, ans=0.125 +2024-07-28 16:55:42,084 INFO [train.py:1114] (0/4) Epoch 14, batch 2850, loss[loss=0.1758, simple_loss=0.2541, pruned_loss=0.04875, over 4965.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2752, pruned_loss=0.04924, over 935967.73 frames. ], batch size: 13, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:55:42,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=180986.66666666666, ans=0.0 +2024-07-28 16:55:46,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=180986.66666666666, ans=0.0 +2024-07-28 16:55:49,025 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.50 vs. limit=22.5 +2024-07-28 16:55:49,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=181000.0, ans=0.125 +2024-07-28 16:55:49,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=181000.0, ans=0.125 +2024-07-28 16:55:55,541 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.15 vs. limit=15.0 +2024-07-28 16:55:58,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=181013.33333333334, ans=0.1 +2024-07-28 16:56:15,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=181040.0, ans=0.125 +2024-07-28 16:56:15,625 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.493e+01 5.822e+01 6.351e+01 7.357e+01 1.031e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 16:56:17,996 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.29 vs. limit=15.0 +2024-07-28 16:56:18,145 INFO [train.py:1114] (0/4) Epoch 14, batch 2900, loss[loss=0.1532, simple_loss=0.2375, pruned_loss=0.03445, over 4831.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2753, pruned_loss=0.04875, over 939948.22 frames. ], batch size: 13, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:56:19,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=181053.33333333334, ans=0.2 +2024-07-28 16:56:19,821 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.69 vs. limit=10.0 +2024-07-28 16:56:21,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=181053.33333333334, ans=0.1 +2024-07-28 16:56:29,567 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.88 vs. limit=15.0 +2024-07-28 16:56:38,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=181080.0, ans=0.125 +2024-07-28 16:56:40,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=181080.0, ans=0.125 +2024-07-28 16:57:03,409 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.25 vs. limit=15.0 +2024-07-28 16:57:04,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=181120.0, ans=0.125 +2024-07-28 16:57:04,994 INFO [train.py:1114] (0/4) Epoch 14, batch 2950, loss[loss=0.1842, simple_loss=0.2682, pruned_loss=0.05011, over 4712.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2735, pruned_loss=0.04842, over 938835.08 frames. ], batch size: 12, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:57:15,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=181120.0, ans=0.04949747468305833 +2024-07-28 16:57:31,925 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.58 vs. limit=15.0 +2024-07-28 16:57:40,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=181160.0, ans=0.125 +2024-07-28 16:57:53,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=181173.33333333334, ans=0.0 +2024-07-28 16:57:55,635 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.382e+01 5.574e+01 6.305e+01 7.129e+01 1.096e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 16:57:58,354 INFO [train.py:1114] (0/4) Epoch 14, batch 3000, loss[loss=0.1985, simple_loss=0.2831, pruned_loss=0.05698, over 4760.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2738, pruned_loss=0.04867, over 938530.51 frames. ], batch size: 13, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:57:58,355 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 16:58:17,165 INFO [train.py:1146] (0/4) Epoch 14, validation: loss=0.1652, simple_loss=0.2685, pruned_loss=0.03098, over 944034.00 frames. +2024-07-28 16:58:17,165 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 16:58:18,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=181186.66666666666, ans=0.125 +2024-07-28 16:58:26,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=181200.0, ans=0.025 +2024-07-28 16:58:36,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=181213.33333333334, ans=0.0 +2024-07-28 16:58:41,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=181226.66666666666, ans=0.1 +2024-07-28 16:58:42,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=181226.66666666666, ans=0.125 +2024-07-28 16:58:44,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=181226.66666666666, ans=0.125 +2024-07-28 16:58:58,656 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:58:58,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=181240.0, ans=0.125 +2024-07-28 16:59:04,086 INFO [train.py:1114] (0/4) Epoch 14, batch 3050, loss[loss=0.1696, simple_loss=0.2504, pruned_loss=0.04443, over 4640.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.274, pruned_loss=0.04846, over 937109.17 frames. ], batch size: 12, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:59:05,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=181253.33333333334, ans=0.0 +2024-07-28 16:59:20,761 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:59:37,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=181306.66666666666, ans=0.0 +2024-07-28 16:59:39,406 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.56 vs. limit=22.5 +2024-07-28 16:59:41,675 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.769e+01 5.765e+01 6.488e+01 7.325e+01 1.172e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-28 16:59:48,334 INFO [train.py:1114] (0/4) Epoch 14, batch 3100, loss[loss=0.2016, simple_loss=0.2925, pruned_loss=0.05531, over 4642.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2739, pruned_loss=0.04837, over 937920.37 frames. ], batch size: 16, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:00:08,429 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-136000.pt +2024-07-28 17:00:49,980 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.41 vs. limit=6.0 +2024-07-28 17:00:55,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=181346.66666666666, ans=0.125 +2024-07-28 17:01:01,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=181360.0, ans=0.125 +2024-07-28 17:01:06,142 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.03 vs. limit=15.0 +2024-07-28 17:01:07,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=181373.33333333334, ans=0.125 +2024-07-28 17:01:10,553 INFO [train.py:1114] (0/4) Epoch 14, batch 3150, loss[loss=0.2186, simple_loss=0.3058, pruned_loss=0.06575, over 4628.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2743, pruned_loss=0.04813, over 938483.93 frames. ], batch size: 17, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:01:35,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=181413.33333333334, ans=0.125 +2024-07-28 17:01:37,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.76 vs. limit=15.0 +2024-07-28 17:01:38,573 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.68 vs. limit=15.0 +2024-07-28 17:01:39,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=181426.66666666666, ans=0.125 +2024-07-28 17:01:39,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=181426.66666666666, ans=15.0 +2024-07-28 17:01:45,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=181426.66666666666, ans=0.0 +2024-07-28 17:01:50,654 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.18 vs. limit=22.5 +2024-07-28 17:01:50,901 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.644e+01 5.747e+01 6.201e+01 6.953e+01 1.061e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 17:01:51,486 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.11 vs. limit=6.0 +2024-07-28 17:01:53,631 INFO [train.py:1114] (0/4) Epoch 14, batch 3200, loss[loss=0.1814, simple_loss=0.2701, pruned_loss=0.04631, over 4827.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2732, pruned_loss=0.04756, over 940025.62 frames. ], batch size: 13, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:01:53,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=181453.33333333334, ans=0.04949747468305833 +2024-07-28 17:01:55,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=181453.33333333334, ans=0.2 +2024-07-28 17:02:00,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=181466.66666666666, ans=0.0 +2024-07-28 17:02:00,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=181466.66666666666, ans=0.1 +2024-07-28 17:02:28,102 INFO [train.py:1114] (0/4) Epoch 14, batch 3250, loss[loss=0.1812, simple_loss=0.2744, pruned_loss=0.04396, over 4938.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.274, pruned_loss=0.04758, over 941122.19 frames. ], batch size: 14, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:02:38,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=181533.33333333334, ans=0.0 +2024-07-28 17:02:41,843 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.61 vs. limit=15.0 +2024-07-28 17:03:00,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=181560.0, ans=0.125 +2024-07-28 17:03:00,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181560.0, ans=0.1 +2024-07-28 17:03:15,225 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.70 vs. limit=15.0 +2024-07-28 17:03:17,420 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.388e+01 5.561e+01 6.069e+01 6.754e+01 1.054e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 17:04:11,134 INFO [train.py:1114] (0/4) Epoch 14, batch 3300, loss[loss=0.1844, simple_loss=0.2731, pruned_loss=0.04785, over 4740.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2735, pruned_loss=0.04778, over 941283.29 frames. ], batch size: 19, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:04:12,758 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:04:13,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=181586.66666666666, ans=0.2 +2024-07-28 17:04:14,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-07-28 17:04:15,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=181586.66666666666, ans=0.2 +2024-07-28 17:04:33,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=181613.33333333334, ans=0.2 +2024-07-28 17:04:36,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=181613.33333333334, ans=0.125 +2024-07-28 17:04:54,350 INFO [train.py:1114] (0/4) Epoch 14, batch 3350, loss[loss=0.2098, simple_loss=0.2954, pruned_loss=0.06203, over 4639.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2746, pruned_loss=0.04833, over 938394.41 frames. ], batch size: 17, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:04:55,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=181653.33333333334, ans=0.125 +2024-07-28 17:04:58,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=181653.33333333334, ans=0.04949747468305833 +2024-07-28 17:05:01,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=181666.66666666666, ans=0.025 +2024-07-28 17:05:05,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=181666.66666666666, ans=0.0 +2024-07-28 17:05:10,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=181680.0, ans=0.125 +2024-07-28 17:05:14,845 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.33 vs. limit=6.0 +2024-07-28 17:05:25,881 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.710e+01 5.702e+01 6.286e+01 7.207e+01 1.084e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 17:05:26,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=181706.66666666666, ans=0.125 +2024-07-28 17:05:29,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=181720.0, ans=0.125 +2024-07-28 17:05:29,881 INFO [train.py:1114] (0/4) Epoch 14, batch 3400, loss[loss=0.1589, simple_loss=0.2403, pruned_loss=0.03871, over 4803.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.275, pruned_loss=0.04881, over 937255.30 frames. ], batch size: 11, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:05:34,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=181720.0, ans=0.2 +2024-07-28 17:05:48,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=181746.66666666666, ans=0.125 +2024-07-28 17:05:51,888 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.94 vs. limit=22.5 +2024-07-28 17:05:53,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=181760.0, ans=0.125 +2024-07-28 17:06:07,194 INFO [train.py:1114] (0/4) Epoch 14, batch 3450, loss[loss=0.226, simple_loss=0.3068, pruned_loss=0.07264, over 4706.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2755, pruned_loss=0.04876, over 937145.81 frames. ], batch size: 19, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:06:09,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=181786.66666666666, ans=0.07 +2024-07-28 17:06:14,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=181800.0, ans=0.125 +2024-07-28 17:06:14,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=181800.0, ans=0.0 +2024-07-28 17:06:21,046 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.25 vs. limit=15.0 +2024-07-28 17:06:25,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181813.33333333334, ans=0.1 +2024-07-28 17:06:26,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=181813.33333333334, ans=0.025 +2024-07-28 17:06:34,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=181826.66666666666, ans=0.125 +2024-07-28 17:06:41,514 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.360e+01 5.614e+01 6.099e+01 6.810e+01 1.220e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 17:06:44,287 INFO [train.py:1114] (0/4) Epoch 14, batch 3500, loss[loss=0.1526, simple_loss=0.2374, pruned_loss=0.03386, over 4941.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2735, pruned_loss=0.04753, over 938157.63 frames. ], batch size: 12, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:06:46,085 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.72 vs. limit=15.0 +2024-07-28 17:06:46,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181853.33333333334, ans=0.1 +2024-07-28 17:06:47,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=181853.33333333334, ans=0.0 +2024-07-28 17:06:57,085 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.07 vs. limit=15.0 +2024-07-28 17:06:58,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=181880.0, ans=0.0 +2024-07-28 17:06:58,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=181880.0, ans=0.05 +2024-07-28 17:07:01,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=181880.0, ans=0.125 +2024-07-28 17:07:02,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=181880.0, ans=0.125 +2024-07-28 17:07:03,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=181880.0, ans=0.1 +2024-07-28 17:07:06,215 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.63 vs. limit=15.0 +2024-07-28 17:07:12,249 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.25 vs. limit=22.5 +2024-07-28 17:07:17,721 INFO [train.py:1114] (0/4) Epoch 14, batch 3550, loss[loss=0.2008, simple_loss=0.2959, pruned_loss=0.05283, over 4670.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2735, pruned_loss=0.04761, over 938801.55 frames. ], batch size: 14, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:07:23,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=181933.33333333334, ans=0.2 +2024-07-28 17:07:36,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff3.min_abs, batch_count=181960.0, ans=0.2 +2024-07-28 17:07:42,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=181960.0, ans=0.2 +2024-07-28 17:07:47,928 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 5.433e+01 6.095e+01 6.753e+01 1.044e+02, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 17:07:50,533 INFO [train.py:1114] (0/4) Epoch 14, batch 3600, loss[loss=0.1798, simple_loss=0.2575, pruned_loss=0.05105, over 4954.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.274, pruned_loss=0.04774, over 940481.37 frames. ], batch size: 13, lr: 5.37e-03, grad_scale: 64.0 +2024-07-28 17:07:55,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=181986.66666666666, ans=0.2 +2024-07-28 17:08:03,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182013.33333333334, ans=0.1 +2024-07-28 17:08:04,831 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.12 vs. limit=22.5 +2024-07-28 17:08:06,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=182013.33333333334, ans=0.125 +2024-07-28 17:08:10,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=182026.66666666666, ans=0.2 +2024-07-28 17:08:18,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=182040.0, ans=0.125 +2024-07-28 17:08:26,157 INFO [train.py:1114] (0/4) Epoch 14, batch 3650, loss[loss=0.2107, simple_loss=0.3095, pruned_loss=0.05596, over 4905.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2732, pruned_loss=0.04717, over 940781.73 frames. ], batch size: 15, lr: 5.37e-03, grad_scale: 64.0 +2024-07-28 17:08:27,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=182053.33333333334, ans=0.125 +2024-07-28 17:08:29,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=182053.33333333334, ans=0.0 +2024-07-28 17:08:29,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=182053.33333333334, ans=0.0 +2024-07-28 17:08:30,576 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.43 vs. limit=10.0 +2024-07-28 17:08:31,265 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.48 vs. limit=22.5 +2024-07-28 17:08:45,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=182093.33333333334, ans=0.125 +2024-07-28 17:08:57,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.86 vs. limit=15.0 +2024-07-28 17:08:57,483 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.626e+01 5.717e+01 6.299e+01 7.471e+01 1.089e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 17:08:59,560 INFO [train.py:1114] (0/4) Epoch 14, batch 3700, loss[loss=0.199, simple_loss=0.2857, pruned_loss=0.05616, over 4934.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2718, pruned_loss=0.04634, over 941886.54 frames. ], batch size: 14, lr: 5.37e-03, grad_scale: 64.0 +2024-07-28 17:09:18,495 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.26 vs. limit=12.0 +2024-07-28 17:09:19,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=182133.33333333334, ans=0.125 +2024-07-28 17:09:25,373 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.24 vs. limit=15.0 +2024-07-28 17:09:31,592 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.54 vs. limit=15.0 +2024-07-28 17:09:43,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=182173.33333333334, ans=0.025 +2024-07-28 17:09:48,833 INFO [train.py:1114] (0/4) Epoch 14, batch 3750, loss[loss=0.2116, simple_loss=0.2881, pruned_loss=0.06759, over 4794.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2713, pruned_loss=0.04611, over 943123.31 frames. ], batch size: 11, lr: 5.37e-03, grad_scale: 64.0 +2024-07-28 17:10:01,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=182213.33333333334, ans=0.0 +2024-07-28 17:10:02,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=182213.33333333334, ans=0.0 +2024-07-28 17:10:09,903 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.79 vs. limit=15.0 +2024-07-28 17:10:19,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=182240.0, ans=0.0 +2024-07-28 17:10:20,840 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.520e+01 5.513e+01 6.095e+01 6.820e+01 9.830e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 17:10:22,243 INFO [train.py:1114] (0/4) Epoch 14, batch 3800, loss[loss=0.1923, simple_loss=0.2821, pruned_loss=0.05124, over 4807.00 frames. ], tot_loss[loss=0.182, simple_loss=0.271, pruned_loss=0.04651, over 941581.05 frames. ], batch size: 14, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:10:25,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=182253.33333333334, ans=0.0 +2024-07-28 17:10:28,658 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.67 vs. limit=15.0 +2024-07-28 17:10:36,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=182280.0, ans=0.125 +2024-07-28 17:10:43,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=182293.33333333334, ans=10.0 +2024-07-28 17:10:46,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=182293.33333333334, ans=0.0 +2024-07-28 17:10:54,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=182320.0, ans=0.0 +2024-07-28 17:10:55,063 INFO [train.py:1114] (0/4) Epoch 14, batch 3850, loss[loss=0.1855, simple_loss=0.2865, pruned_loss=0.04228, over 4636.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2714, pruned_loss=0.04645, over 942273.12 frames. ], batch size: 16, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:11:02,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=182333.33333333334, ans=0.125 +2024-07-28 17:11:05,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=182333.33333333334, ans=0.0 +2024-07-28 17:11:05,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=182333.33333333334, ans=0.125 +2024-07-28 17:11:12,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=182346.66666666666, ans=0.125 +2024-07-28 17:11:13,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=182346.66666666666, ans=0.125 +2024-07-28 17:11:15,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182360.0, ans=0.1 +2024-07-28 17:11:28,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=182373.33333333334, ans=0.125 +2024-07-28 17:11:29,000 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.431e+01 5.612e+01 6.160e+01 6.955e+01 1.058e+02, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 17:11:30,371 INFO [train.py:1114] (0/4) Epoch 14, batch 3900, loss[loss=0.2166, simple_loss=0.3039, pruned_loss=0.06461, over 4802.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2726, pruned_loss=0.04661, over 942841.73 frames. ], batch size: 14, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:11:39,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=182400.0, ans=0.035 +2024-07-28 17:11:40,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=182400.0, ans=0.125 +2024-07-28 17:11:41,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=182400.0, ans=0.0 +2024-07-28 17:11:58,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=182440.0, ans=0.2 +2024-07-28 17:12:04,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=182440.0, ans=0.0 +2024-07-28 17:12:06,199 INFO [train.py:1114] (0/4) Epoch 14, batch 3950, loss[loss=0.2152, simple_loss=0.3058, pruned_loss=0.06234, over 4822.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2726, pruned_loss=0.04691, over 944607.98 frames. ], batch size: 16, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:12:16,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=182466.66666666666, ans=0.2 +2024-07-28 17:12:17,367 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.55 vs. limit=12.0 +2024-07-28 17:12:18,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=182466.66666666666, ans=0.125 +2024-07-28 17:12:24,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=182480.0, ans=0.05 +2024-07-28 17:12:27,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.29 vs. limit=22.5 +2024-07-28 17:12:34,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182493.33333333334, ans=0.1 +2024-07-28 17:12:43,642 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.67 vs. limit=22.5 +2024-07-28 17:12:44,489 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.459e+01 5.506e+01 6.184e+01 7.058e+01 1.004e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 17:12:54,549 INFO [train.py:1114] (0/4) Epoch 14, batch 4000, loss[loss=0.1691, simple_loss=0.2592, pruned_loss=0.03946, over 4768.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2741, pruned_loss=0.04805, over 941286.35 frames. ], batch size: 12, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:13:05,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=182520.0, ans=0.125 +2024-07-28 17:13:17,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182546.66666666666, ans=0.1 +2024-07-28 17:14:10,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=182560.0, ans=0.04949747468305833 +2024-07-28 17:14:14,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=182573.33333333334, ans=0.0 +2024-07-28 17:15:01,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182573.33333333334, ans=0.1 +2024-07-28 17:15:02,340 INFO [train.py:1114] (0/4) Epoch 14, batch 4050, loss[loss=0.2556, simple_loss=0.3267, pruned_loss=0.09222, over 3464.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2742, pruned_loss=0.04764, over 939217.99 frames. ], batch size: 36, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:15:30,886 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.09 vs. limit=12.0 +2024-07-28 17:15:46,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=182613.33333333334, ans=0.125 +2024-07-28 17:15:49,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=182613.33333333334, ans=0.0 +2024-07-28 17:15:51,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=182613.33333333334, ans=0.0 +2024-07-28 17:15:52,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=182613.33333333334, ans=0.2 +2024-07-28 17:15:59,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=182626.66666666666, ans=0.125 +2024-07-28 17:16:01,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=182640.0, ans=0.125 +2024-07-28 17:16:07,187 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.679e+01 5.600e+01 6.211e+01 7.334e+01 1.251e+02, threshold=1.242e+02, percent-clipped=2.0 +2024-07-28 17:16:08,551 INFO [train.py:1114] (0/4) Epoch 14, batch 4100, loss[loss=0.1876, simple_loss=0.286, pruned_loss=0.04464, over 4906.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2743, pruned_loss=0.04765, over 938587.38 frames. ], batch size: 15, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:16:11,751 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.15 vs. limit=6.0 +2024-07-28 17:16:21,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=182666.66666666666, ans=0.125 +2024-07-28 17:16:22,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=182680.0, ans=0.0 +2024-07-28 17:16:24,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=182680.0, ans=10.0 +2024-07-28 17:16:34,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=182693.33333333334, ans=0.04949747468305833 +2024-07-28 17:16:36,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=182693.33333333334, ans=0.125 +2024-07-28 17:16:37,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=182693.33333333334, ans=0.125 +2024-07-28 17:16:46,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=182706.66666666666, ans=0.125 +2024-07-28 17:16:47,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=182706.66666666666, ans=0.125 +2024-07-28 17:16:49,325 INFO [train.py:1114] (0/4) Epoch 14, batch 4150, loss[loss=0.1437, simple_loss=0.2424, pruned_loss=0.02245, over 4812.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2736, pruned_loss=0.04757, over 937915.06 frames. ], batch size: 13, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:16:59,168 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.23 vs. limit=15.0 +2024-07-28 17:17:17,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=182733.33333333334, ans=0.0 +2024-07-28 17:17:18,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=182733.33333333334, ans=0.125 +2024-07-28 17:17:45,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=182733.33333333334, ans=0.025 +2024-07-28 17:18:30,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=182760.0, ans=0.125 +2024-07-28 17:18:45,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=182773.33333333334, ans=0.0 +2024-07-28 17:18:48,504 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.421e+01 5.631e+01 6.207e+01 7.543e+01 1.114e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 17:18:49,845 INFO [train.py:1114] (0/4) Epoch 14, batch 4200, loss[loss=0.2034, simple_loss=0.2932, pruned_loss=0.05681, over 4900.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2732, pruned_loss=0.04732, over 939500.25 frames. ], batch size: 15, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:19:31,411 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.50 vs. limit=22.5 +2024-07-28 17:19:48,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=182840.0, ans=0.125 +2024-07-28 17:19:52,369 INFO [train.py:1114] (0/4) Epoch 14, batch 4250, loss[loss=0.1862, simple_loss=0.2639, pruned_loss=0.05428, over 4641.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2734, pruned_loss=0.04726, over 940922.23 frames. ], batch size: 12, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:20:00,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=182853.33333333334, ans=0.125 +2024-07-28 17:20:01,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=182853.33333333334, ans=0.0 +2024-07-28 17:20:08,995 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:20:25,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182893.33333333334, ans=0.1 +2024-07-28 17:20:33,155 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.532e+01 5.602e+01 6.327e+01 7.435e+01 1.299e+02, threshold=1.265e+02, percent-clipped=1.0 +2024-07-28 17:20:34,462 INFO [train.py:1114] (0/4) Epoch 14, batch 4300, loss[loss=0.1692, simple_loss=0.2613, pruned_loss=0.03859, over 4756.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2737, pruned_loss=0.04782, over 940469.13 frames. ], batch size: 13, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:20:40,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=182933.33333333334, ans=0.05 +2024-07-28 17:21:02,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=182973.33333333334, ans=0.0 +2024-07-28 17:21:09,284 INFO [train.py:1114] (0/4) Epoch 14, batch 4350, loss[loss=0.1525, simple_loss=0.2323, pruned_loss=0.03629, over 4757.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2735, pruned_loss=0.04724, over 941319.00 frames. ], batch size: 13, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:21:10,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=182986.66666666666, ans=0.0 +2024-07-28 17:21:19,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=183000.0, ans=0.0 +2024-07-28 17:21:20,107 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.63 vs. limit=12.0 +2024-07-28 17:21:24,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=183013.33333333334, ans=0.125 +2024-07-28 17:21:27,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=183013.33333333334, ans=0.025 +2024-07-28 17:21:30,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=183026.66666666666, ans=0.0 +2024-07-28 17:21:30,760 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.47 vs. limit=12.0 +2024-07-28 17:21:37,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183040.0, ans=0.1 +2024-07-28 17:21:44,666 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.884e+01 5.657e+01 6.269e+01 7.008e+01 1.088e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 17:21:46,160 INFO [train.py:1114] (0/4) Epoch 14, batch 4400, loss[loss=0.183, simple_loss=0.2799, pruned_loss=0.04303, over 4819.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2747, pruned_loss=0.04794, over 941318.72 frames. ], batch size: 14, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:21:54,809 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.81 vs. limit=22.5 +2024-07-28 17:21:58,124 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:22:08,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=183093.33333333334, ans=0.125 +2024-07-28 17:22:10,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=183093.33333333334, ans=0.125 +2024-07-28 17:22:14,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=183106.66666666666, ans=0.2 +2024-07-28 17:22:22,140 INFO [train.py:1114] (0/4) Epoch 14, batch 4450, loss[loss=0.1581, simple_loss=0.2392, pruned_loss=0.03847, over 4935.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2744, pruned_loss=0.04788, over 939404.57 frames. ], batch size: 12, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:22:38,765 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.39 vs. limit=6.0 +2024-07-28 17:22:40,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=183146.66666666666, ans=0.2 +2024-07-28 17:22:42,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=183160.0, ans=0.125 +2024-07-28 17:26:47,091 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.39 vs. limit=8.0 +2024-07-28 17:26:48,548 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.393e+01 5.470e+01 5.943e+01 6.622e+01 1.092e+02, threshold=1.189e+02, percent-clipped=0.0 +2024-07-28 17:26:49,859 INFO [train.py:1114] (0/4) Epoch 14, batch 4500, loss[loss=0.1625, simple_loss=0.2526, pruned_loss=0.03615, over 4741.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2743, pruned_loss=0.04753, over 938277.46 frames. ], batch size: 14, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:27:01,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=183200.0, ans=0.05 +2024-07-28 17:27:06,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=183200.0, ans=0.0 +2024-07-28 17:27:07,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=183200.0, ans=0.2 +2024-07-28 17:27:07,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=183200.0, ans=0.125 +2024-07-28 17:27:07,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=183200.0, ans=0.125 +2024-07-28 17:27:09,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=183213.33333333334, ans=0.0 +2024-07-28 17:27:15,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=183226.66666666666, ans=0.125 +2024-07-28 17:27:23,847 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.44 vs. limit=12.0 +2024-07-28 17:27:28,601 INFO [train.py:1114] (0/4) Epoch 14, batch 4550, loss[loss=0.1826, simple_loss=0.2755, pruned_loss=0.04492, over 4892.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2738, pruned_loss=0.04721, over 940241.68 frames. ], batch size: 13, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:27:29,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=183253.33333333334, ans=0.125 +2024-07-28 17:27:32,204 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.18 vs. limit=8.0 +2024-07-28 17:27:33,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=183253.33333333334, ans=0.2 +2024-07-28 17:27:42,111 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.11 vs. limit=15.0 +2024-07-28 17:27:44,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.70 vs. limit=15.0 +2024-07-28 17:27:45,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=183280.0, ans=0.0 +2024-07-28 17:27:48,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=183293.33333333334, ans=0.2 +2024-07-28 17:27:56,882 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.22 vs. limit=15.0 +2024-07-28 17:27:57,486 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.84 vs. limit=15.0 +2024-07-28 17:27:57,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=183306.66666666666, ans=0.125 +2024-07-28 17:27:58,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183306.66666666666, ans=0.1 +2024-07-28 17:28:01,173 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.581e+01 5.634e+01 6.361e+01 7.770e+01 1.092e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 17:28:01,527 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.81 vs. limit=15.0 +2024-07-28 17:28:02,564 INFO [train.py:1114] (0/4) Epoch 14, batch 4600, loss[loss=0.2063, simple_loss=0.2916, pruned_loss=0.06054, over 4457.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2725, pruned_loss=0.04657, over 938504.85 frames. ], batch size: 21, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:28:21,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=183360.0, ans=0.125 +2024-07-28 17:28:28,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=183373.33333333334, ans=0.0 +2024-07-28 17:28:30,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=183373.33333333334, ans=0.2 +2024-07-28 17:28:32,175 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.65 vs. limit=12.0 +2024-07-28 17:28:35,619 INFO [train.py:1114] (0/4) Epoch 14, batch 4650, loss[loss=0.1865, simple_loss=0.2772, pruned_loss=0.04787, over 4836.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2729, pruned_loss=0.04656, over 940448.76 frames. ], batch size: 16, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:28:42,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=183400.0, ans=0.2 +2024-07-28 17:28:43,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=183400.0, ans=0.09899494936611666 +2024-07-28 17:28:45,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183400.0, ans=0.1 +2024-07-28 17:28:45,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=183400.0, ans=0.125 +2024-07-28 17:28:46,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=183400.0, ans=0.125 +2024-07-28 17:28:49,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=183413.33333333334, ans=0.1 +2024-07-28 17:28:50,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=183413.33333333334, ans=0.125 +2024-07-28 17:28:56,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=183426.66666666666, ans=0.2 +2024-07-28 17:28:56,971 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.50 vs. limit=10.0 +2024-07-28 17:29:08,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183440.0, ans=0.1 +2024-07-28 17:29:08,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=183440.0, ans=0.025 +2024-07-28 17:29:09,680 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.277e+01 5.803e+01 6.288e+01 7.232e+01 1.102e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 17:29:10,996 INFO [train.py:1114] (0/4) Epoch 14, batch 4700, loss[loss=0.1648, simple_loss=0.2569, pruned_loss=0.03635, over 4700.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2723, pruned_loss=0.04701, over 937875.48 frames. ], batch size: 11, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:29:16,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=183453.33333333334, ans=0.125 +2024-07-28 17:29:24,315 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-28 17:29:40,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=183506.66666666666, ans=0.0 +2024-07-28 17:29:45,319 INFO [train.py:1114] (0/4) Epoch 14, batch 4750, loss[loss=0.21, simple_loss=0.2969, pruned_loss=0.06149, over 4528.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2736, pruned_loss=0.04754, over 935884.49 frames. ], batch size: 21, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:29:46,578 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.23 vs. limit=22.5 +2024-07-28 17:29:52,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=183533.33333333334, ans=0.125 +2024-07-28 17:30:05,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=183560.0, ans=0.125 +2024-07-28 17:30:07,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=183560.0, ans=0.05 +2024-07-28 17:30:07,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=183560.0, ans=0.125 +2024-07-28 17:30:17,965 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.592e+01 6.256e+01 7.365e+01 1.010e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 17:30:19,279 INFO [train.py:1114] (0/4) Epoch 14, batch 4800, loss[loss=0.2071, simple_loss=0.2936, pruned_loss=0.06034, over 4699.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2731, pruned_loss=0.0475, over 933049.80 frames. ], batch size: 13, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:30:23,661 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.20 vs. limit=22.5 +2024-07-28 17:30:28,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=183600.0, ans=0.125 +2024-07-28 17:30:35,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=183613.33333333334, ans=0.125 +2024-07-28 17:30:37,904 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.83 vs. limit=10.0 +2024-07-28 17:30:39,291 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.93 vs. limit=15.0 +2024-07-28 17:30:39,864 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.21 vs. limit=10.0 +2024-07-28 17:30:41,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=183626.66666666666, ans=0.125 +2024-07-28 17:30:41,292 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.46 vs. limit=10.0 +2024-07-28 17:30:45,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=183640.0, ans=0.0 +2024-07-28 17:30:46,065 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.60 vs. limit=22.5 +2024-07-28 17:30:53,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=183640.0, ans=0.0 +2024-07-28 17:30:54,676 INFO [train.py:1114] (0/4) Epoch 14, batch 4850, loss[loss=0.1953, simple_loss=0.2866, pruned_loss=0.05203, over 4737.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2736, pruned_loss=0.04776, over 932578.34 frames. ], batch size: 14, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:31:17,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=183693.33333333334, ans=0.0 +2024-07-28 17:31:22,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=183706.66666666666, ans=0.125 +2024-07-28 17:31:24,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=183706.66666666666, ans=0.025 +2024-07-28 17:31:30,326 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.639e+01 5.421e+01 5.850e+01 6.499e+01 1.354e+02, threshold=1.170e+02, percent-clipped=1.0 +2024-07-28 17:31:31,709 INFO [train.py:1114] (0/4) Epoch 14, batch 4900, loss[loss=0.1956, simple_loss=0.2889, pruned_loss=0.05117, over 4761.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2731, pruned_loss=0.04741, over 934431.00 frames. ], batch size: 13, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:31:50,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=183746.66666666666, ans=0.025 +2024-07-28 17:31:59,819 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.40 vs. limit=15.0 +2024-07-28 17:32:02,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=183773.33333333334, ans=0.125 +2024-07-28 17:32:06,237 INFO [train.py:1114] (0/4) Epoch 14, batch 4950, loss[loss=0.2162, simple_loss=0.287, pruned_loss=0.07276, over 3255.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2746, pruned_loss=0.04847, over 930961.41 frames. ], batch size: 37, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:32:12,916 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.35 vs. limit=15.0 +2024-07-28 17:32:15,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=183800.0, ans=0.0 +2024-07-28 17:32:17,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=183800.0, ans=0.125 +2024-07-28 17:32:21,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183813.33333333334, ans=0.1 +2024-07-28 17:32:21,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=183813.33333333334, ans=0.125 +2024-07-28 17:32:24,324 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.83 vs. limit=15.0 +2024-07-28 17:32:25,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=183813.33333333334, ans=0.025 +2024-07-28 17:32:26,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=183826.66666666666, ans=0.0 +2024-07-28 17:32:35,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=183840.0, ans=0.125 +2024-07-28 17:32:38,292 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.280e+01 5.530e+01 6.017e+01 6.862e+01 9.810e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 17:32:39,711 INFO [train.py:1114] (0/4) Epoch 14, batch 5000, loss[loss=0.202, simple_loss=0.2955, pruned_loss=0.05426, over 4667.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2739, pruned_loss=0.04815, over 934739.04 frames. ], batch size: 14, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:32:45,612 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.12 vs. limit=15.0 +2024-07-28 17:32:49,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=183866.66666666666, ans=0.0 +2024-07-28 17:32:51,489 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.95 vs. limit=15.0 +2024-07-28 17:32:56,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=183880.0, ans=0.125 +2024-07-28 17:32:57,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183880.0, ans=0.1 +2024-07-28 17:32:58,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=183880.0, ans=0.125 +2024-07-28 17:33:00,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=183893.33333333334, ans=0.125 +2024-07-28 17:33:03,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=183893.33333333334, ans=0.0 +2024-07-28 17:33:12,767 INFO [train.py:1114] (0/4) Epoch 14, batch 5050, loss[loss=0.1776, simple_loss=0.2637, pruned_loss=0.0458, over 4853.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2737, pruned_loss=0.04819, over 937155.70 frames. ], batch size: 12, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:33:15,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=183920.0, ans=0.125 +2024-07-28 17:33:17,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=183920.0, ans=0.125 +2024-07-28 17:33:17,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=183920.0, ans=0.125 +2024-07-28 17:33:22,517 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:33:45,600 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 5.607e+01 6.225e+01 6.953e+01 1.020e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 17:33:47,325 INFO [train.py:1114] (0/4) Epoch 14, batch 5100, loss[loss=0.1711, simple_loss=0.2632, pruned_loss=0.03951, over 4772.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2732, pruned_loss=0.04817, over 934784.72 frames. ], batch size: 12, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:33:47,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=183986.66666666666, ans=0.0 +2024-07-28 17:33:49,702 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.79 vs. limit=12.0 +2024-07-28 17:33:51,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=183986.66666666666, ans=0.125 +2024-07-28 17:33:51,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=183986.66666666666, ans=0.125 +2024-07-28 17:33:54,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184000.0, ans=0.1 +2024-07-28 17:33:56,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=184000.0, ans=0.125 +2024-07-28 17:33:59,500 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.30 vs. limit=10.0 +2024-07-28 17:34:16,539 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:34:16,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=184040.0, ans=0.125 +2024-07-28 17:34:21,091 INFO [train.py:1114] (0/4) Epoch 14, batch 5150, loss[loss=0.2078, simple_loss=0.2852, pruned_loss=0.06523, over 4854.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2738, pruned_loss=0.04843, over 935854.33 frames. ], batch size: 16, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:34:32,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=184066.66666666666, ans=0.0 +2024-07-28 17:34:47,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=184093.33333333334, ans=0.0 +2024-07-28 17:34:54,938 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.666e+01 6.187e+01 7.169e+01 1.415e+02, threshold=1.237e+02, percent-clipped=1.0 +2024-07-28 17:34:55,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=184120.0, ans=0.0 +2024-07-28 17:34:55,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=184120.0, ans=0.0 +2024-07-28 17:34:56,335 INFO [train.py:1114] (0/4) Epoch 14, batch 5200, loss[loss=0.2037, simple_loss=0.2992, pruned_loss=0.05412, over 4657.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2735, pruned_loss=0.04785, over 936020.07 frames. ], batch size: 14, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:35:13,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=184146.66666666666, ans=0.05 +2024-07-28 17:35:17,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=184160.0, ans=0.125 +2024-07-28 17:35:20,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.whiten.whitening_limit, batch_count=184160.0, ans=12.0 +2024-07-28 17:35:24,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=184160.0, ans=0.05 +2024-07-28 17:35:24,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=184160.0, ans=0.5 +2024-07-28 17:35:29,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=184173.33333333334, ans=0.125 +2024-07-28 17:35:42,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=184173.33333333334, ans=0.025 +2024-07-28 17:35:44,102 INFO [train.py:1114] (0/4) Epoch 14, batch 5250, loss[loss=0.1879, simple_loss=0.2792, pruned_loss=0.04829, over 4902.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2718, pruned_loss=0.04682, over 935745.50 frames. ], batch size: 13, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:36:04,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=184186.66666666666, ans=0.0 +2024-07-28 17:36:17,676 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.97 vs. limit=15.0 +2024-07-28 17:36:36,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=184213.33333333334, ans=0.2 +2024-07-28 17:36:38,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=184226.66666666666, ans=0.125 +2024-07-28 17:37:42,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=184240.0, ans=0.125 +2024-07-28 17:37:42,453 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.26 vs. limit=6.0 +2024-07-28 17:37:46,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=184240.0, ans=0.1 +2024-07-28 17:37:48,424 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.478e+01 6.210e+01 7.367e+01 1.027e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 17:37:49,750 INFO [train.py:1114] (0/4) Epoch 14, batch 5300, loss[loss=0.1849, simple_loss=0.2851, pruned_loss=0.04234, over 4903.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2718, pruned_loss=0.04677, over 934243.83 frames. ], batch size: 17, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:37:49,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=184253.33333333334, ans=0.025 +2024-07-28 17:37:50,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=184253.33333333334, ans=0.125 +2024-07-28 17:37:59,940 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.29 vs. limit=12.0 +2024-07-28 17:38:06,604 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.69 vs. limit=15.0 +2024-07-28 17:38:08,582 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.86 vs. limit=15.0 +2024-07-28 17:38:19,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=184306.66666666666, ans=0.0 +2024-07-28 17:38:25,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=184306.66666666666, ans=0.0 +2024-07-28 17:38:26,461 INFO [train.py:1114] (0/4) Epoch 14, batch 5350, loss[loss=0.1717, simple_loss=0.2511, pruned_loss=0.04609, over 4511.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2726, pruned_loss=0.04701, over 936284.52 frames. ], batch size: 10, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:38:36,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=184333.33333333334, ans=0.0 +2024-07-28 17:38:36,648 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.01 vs. limit=15.0 +2024-07-28 17:38:41,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184346.66666666666, ans=0.1 +2024-07-28 17:38:42,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=184346.66666666666, ans=0.125 +2024-07-28 17:38:43,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=184346.66666666666, ans=0.125 +2024-07-28 17:38:46,082 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.58 vs. limit=15.0 +2024-07-28 17:38:57,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184373.33333333334, ans=0.1 +2024-07-28 17:38:58,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=184373.33333333334, ans=0.2 +2024-07-28 17:38:59,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=184373.33333333334, ans=0.2 +2024-07-28 17:39:01,378 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.641e+01 6.338e+01 7.374e+01 1.167e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 17:39:02,103 INFO [train.py:1114] (0/4) Epoch 14, batch 5400, loss[loss=0.1985, simple_loss=0.2951, pruned_loss=0.05098, over 4389.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2737, pruned_loss=0.04767, over 930550.93 frames. ], batch size: 26, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:39:08,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=184400.0, ans=0.0 +2024-07-28 17:39:10,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=184400.0, ans=0.0 +2024-07-28 17:39:21,132 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.63 vs. limit=15.0 +2024-07-28 17:39:24,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=184426.66666666666, ans=0.04949747468305833 +2024-07-28 17:39:29,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=184440.0, ans=0.0 +2024-07-28 17:39:30,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=184440.0, ans=0.5 +2024-07-28 17:39:35,482 INFO [train.py:1114] (0/4) Epoch 14, batch 5450, loss[loss=0.1692, simple_loss=0.2488, pruned_loss=0.04475, over 4698.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2735, pruned_loss=0.04774, over 933341.24 frames. ], batch size: 11, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:39:37,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=184453.33333333334, ans=0.0 +2024-07-28 17:39:41,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=184466.66666666666, ans=0.125 +2024-07-28 17:39:43,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=184466.66666666666, ans=0.0 +2024-07-28 17:39:45,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=184466.66666666666, ans=0.125 +2024-07-28 17:39:50,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=184480.0, ans=0.0 +2024-07-28 17:39:51,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=184480.0, ans=10.0 +2024-07-28 17:39:59,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=184493.33333333334, ans=0.0 +2024-07-28 17:40:01,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=184493.33333333334, ans=0.125 +2024-07-28 17:40:06,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=184506.66666666666, ans=0.95 +2024-07-28 17:40:08,545 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.352e+01 5.587e+01 6.313e+01 7.261e+01 1.072e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 17:40:09,232 INFO [train.py:1114] (0/4) Epoch 14, batch 5500, loss[loss=0.213, simple_loss=0.2999, pruned_loss=0.06309, over 4422.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2734, pruned_loss=0.04806, over 931588.74 frames. ], batch size: 26, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:40:10,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=184520.0, ans=0.125 +2024-07-28 17:40:15,906 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.53 vs. limit=15.0 +2024-07-28 17:40:18,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=184533.33333333334, ans=0.125 +2024-07-28 17:40:39,931 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:40:45,304 INFO [train.py:1114] (0/4) Epoch 14, batch 5550, loss[loss=0.1672, simple_loss=0.2593, pruned_loss=0.03756, over 4709.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2743, pruned_loss=0.04858, over 933681.03 frames. ], batch size: 12, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:40:51,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=184600.0, ans=0.2 +2024-07-28 17:41:03,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=184613.33333333334, ans=0.125 +2024-07-28 17:41:04,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=184613.33333333334, ans=0.125 +2024-07-28 17:41:07,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=184626.66666666666, ans=0.125 +2024-07-28 17:41:09,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=184626.66666666666, ans=0.125 +2024-07-28 17:41:18,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184640.0, ans=0.1 +2024-07-28 17:41:19,033 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.688e+01 6.017e+01 6.994e+01 8.294e+01 1.224e+02, threshold=1.399e+02, percent-clipped=0.0 +2024-07-28 17:41:19,711 INFO [train.py:1114] (0/4) Epoch 14, batch 5600, loss[loss=0.2087, simple_loss=0.2955, pruned_loss=0.06097, over 4748.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2746, pruned_loss=0.04855, over 934480.51 frames. ], batch size: 14, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:41:24,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=184653.33333333334, ans=0.125 +2024-07-28 17:41:24,157 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.26 vs. limit=12.0 +2024-07-28 17:41:26,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=184666.66666666666, ans=0.0 +2024-07-28 17:41:27,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=184666.66666666666, ans=0.0 +2024-07-28 17:41:30,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=184666.66666666666, ans=0.125 +2024-07-28 17:41:36,726 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.71 vs. limit=10.0 +2024-07-28 17:41:54,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=184706.66666666666, ans=0.2 +2024-07-28 17:42:53,660 INFO [train.py:1114] (0/4) Epoch 14, batch 5650, loss[loss=0.1795, simple_loss=0.2793, pruned_loss=0.03982, over 4484.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2729, pruned_loss=0.04815, over 937107.92 frames. ], batch size: 21, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:43:17,722 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:43:35,142 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.575e+01 6.312e+01 7.151e+01 9.820e+01, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 17:43:35,910 INFO [train.py:1114] (0/4) Epoch 14, batch 5700, loss[loss=0.2036, simple_loss=0.2988, pruned_loss=0.05417, over 4692.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2732, pruned_loss=0.04783, over 938249.84 frames. ], batch size: 13, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:43:45,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=184800.0, ans=0.0 +2024-07-28 17:43:59,809 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.52 vs. limit=15.0 +2024-07-28 17:44:06,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=184840.0, ans=0.05 +2024-07-28 17:44:12,487 INFO [train.py:1114] (0/4) Epoch 14, batch 5750, loss[loss=0.2247, simple_loss=0.3118, pruned_loss=0.06882, over 4747.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2739, pruned_loss=0.04802, over 938345.43 frames. ], batch size: 19, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:44:14,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184853.33333333334, ans=0.1 +2024-07-28 17:44:22,244 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.85 vs. limit=15.0 +2024-07-28 17:44:56,657 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.552e+01 6.040e+01 6.826e+01 9.653e+01, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 17:44:57,336 INFO [train.py:1114] (0/4) Epoch 14, batch 5800, loss[loss=0.2161, simple_loss=0.3066, pruned_loss=0.06284, over 4710.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2747, pruned_loss=0.04838, over 937422.16 frames. ], batch size: 19, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:44:58,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=184920.0, ans=0.0 +2024-07-28 17:45:11,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=184946.66666666666, ans=0.2 +2024-07-28 17:45:15,678 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.38 vs. limit=15.0 +2024-07-28 17:45:18,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=184960.0, ans=0.125 +2024-07-28 17:45:19,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=184960.0, ans=0.025 +2024-07-28 17:45:25,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=184973.33333333334, ans=0.0 +2024-07-28 17:45:29,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=184973.33333333334, ans=10.0 +2024-07-28 17:45:29,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=184973.33333333334, ans=0.2 +2024-07-28 17:45:29,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=184973.33333333334, ans=0.2 +2024-07-28 17:45:32,352 INFO [train.py:1114] (0/4) Epoch 14, batch 5850, loss[loss=0.2012, simple_loss=0.3047, pruned_loss=0.04881, over 4549.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2754, pruned_loss=0.04853, over 937948.76 frames. ], batch size: 21, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:45:41,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=185000.0, ans=0.125 +2024-07-28 17:45:42,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=185000.0, ans=0.04949747468305833 +2024-07-28 17:45:55,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=185026.66666666666, ans=0.2 +2024-07-28 17:46:05,088 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.554e+01 5.675e+01 6.318e+01 7.157e+01 1.040e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 17:46:05,819 INFO [train.py:1114] (0/4) Epoch 14, batch 5900, loss[loss=0.1955, simple_loss=0.2863, pruned_loss=0.05236, over 4668.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2744, pruned_loss=0.04793, over 937944.85 frames. ], batch size: 15, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:46:08,381 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.21 vs. limit=6.0 +2024-07-28 17:46:10,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=185053.33333333334, ans=0.025 +2024-07-28 17:46:12,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=185053.33333333334, ans=0.0 +2024-07-28 17:46:13,767 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.24 vs. limit=12.0 +2024-07-28 17:46:36,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.35 vs. limit=15.0 +2024-07-28 17:46:39,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=185120.0, ans=0.07 +2024-07-28 17:46:40,164 INFO [train.py:1114] (0/4) Epoch 14, batch 5950, loss[loss=0.2027, simple_loss=0.2913, pruned_loss=0.05708, over 4672.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2743, pruned_loss=0.04807, over 939711.11 frames. ], batch size: 15, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:46:43,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=185120.0, ans=0.125 +2024-07-28 17:46:45,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff3.min_abs, batch_count=185120.0, ans=0.2 +2024-07-28 17:46:58,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=185146.66666666666, ans=0.0 +2024-07-28 17:47:01,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.28 vs. limit=22.5 +2024-07-28 17:47:02,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=185160.0, ans=10.0 +2024-07-28 17:47:02,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185160.0, ans=0.1 +2024-07-28 17:47:03,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=185160.0, ans=0.0 +2024-07-28 17:47:17,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=185173.33333333334, ans=0.0 +2024-07-28 17:47:18,298 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.153e+01 5.664e+01 6.270e+01 7.000e+01 1.010e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 17:47:19,013 INFO [train.py:1114] (0/4) Epoch 14, batch 6000, loss[loss=0.2259, simple_loss=0.3067, pruned_loss=0.07251, over 4175.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2742, pruned_loss=0.04819, over 937000.95 frames. ], batch size: 25, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:47:19,013 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 17:49:13,226 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.8744, 3.4634, 3.8525, 4.2069], device='cuda:0') +2024-07-28 17:49:17,897 INFO [train.py:1146] (0/4) Epoch 14, validation: loss=0.1656, simple_loss=0.2686, pruned_loss=0.03133, over 944034.00 frames. +2024-07-28 17:49:17,898 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 17:49:24,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=185200.0, ans=0.0 +2024-07-28 17:49:31,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=185213.33333333334, ans=0.125 +2024-07-28 17:49:32,025 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.14 vs. limit=15.0 +2024-07-28 17:49:34,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=185213.33333333334, ans=15.0 +2024-07-28 17:49:35,422 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-07-28 17:49:47,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=185240.0, ans=0.125 +2024-07-28 17:49:52,015 INFO [train.py:1114] (0/4) Epoch 14, batch 6050, loss[loss=0.164, simple_loss=0.2539, pruned_loss=0.03703, over 4772.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.274, pruned_loss=0.04834, over 938083.30 frames. ], batch size: 12, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:50:04,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=185266.66666666666, ans=0.125 +2024-07-28 17:50:14,121 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.30 vs. limit=15.0 +2024-07-28 17:50:14,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=185293.33333333334, ans=0.0 +2024-07-28 17:50:14,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185293.33333333334, ans=0.1 +2024-07-28 17:50:21,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=185306.66666666666, ans=0.2 +2024-07-28 17:50:26,099 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+01 5.501e+01 6.141e+01 7.204e+01 9.755e+01, threshold=1.228e+02, percent-clipped=0.0 +2024-07-28 17:50:26,758 INFO [train.py:1114] (0/4) Epoch 14, batch 6100, loss[loss=0.193, simple_loss=0.2849, pruned_loss=0.05055, over 4688.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2734, pruned_loss=0.04771, over 937972.94 frames. ], batch size: 15, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:50:34,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=185333.33333333334, ans=0.125 +2024-07-28 17:50:39,656 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=6.03 vs. limit=12.0 +2024-07-28 17:50:54,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=185373.33333333334, ans=0.1 +2024-07-28 17:50:55,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=185373.33333333334, ans=0.125 +2024-07-28 17:50:57,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=185373.33333333334, ans=0.125 +2024-07-28 17:51:00,654 INFO [train.py:1114] (0/4) Epoch 14, batch 6150, loss[loss=0.2299, simple_loss=0.3024, pruned_loss=0.07866, over 3254.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2733, pruned_loss=0.0476, over 936368.71 frames. ], batch size: 35, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:51:07,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=185400.0, ans=0.2 +2024-07-28 17:51:16,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=185413.33333333334, ans=0.125 +2024-07-28 17:51:20,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=185426.66666666666, ans=0.125 +2024-07-28 17:51:33,270 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.416e+01 5.520e+01 6.468e+01 7.669e+01 1.156e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 17:51:33,672 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.80 vs. limit=15.0 +2024-07-28 17:51:33,903 INFO [train.py:1114] (0/4) Epoch 14, batch 6200, loss[loss=0.2052, simple_loss=0.304, pruned_loss=0.05321, over 4742.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2738, pruned_loss=0.04762, over 936565.98 frames. ], batch size: 14, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:51:38,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=185453.33333333334, ans=0.0 +2024-07-28 17:51:54,673 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.79 vs. limit=22.5 +2024-07-28 17:51:57,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=185493.33333333334, ans=0.125 +2024-07-28 17:52:05,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=185506.66666666666, ans=0.125 +2024-07-28 17:52:06,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=185506.66666666666, ans=0.025 +2024-07-28 17:52:11,471 INFO [train.py:1114] (0/4) Epoch 14, batch 6250, loss[loss=0.1777, simple_loss=0.2811, pruned_loss=0.0372, over 4802.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2737, pruned_loss=0.04793, over 933306.75 frames. ], batch size: 14, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:52:18,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=185533.33333333334, ans=0.0 +2024-07-28 17:52:19,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=185533.33333333334, ans=0.125 +2024-07-28 17:52:24,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=185533.33333333334, ans=0.05 +2024-07-28 17:52:31,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=185546.66666666666, ans=10.0 +2024-07-28 17:52:31,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=185546.66666666666, ans=0.09899494936611666 +2024-07-28 17:52:34,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=185560.0, ans=0.0 +2024-07-28 17:52:35,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=185560.0, ans=0.2 +2024-07-28 17:52:40,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=185573.33333333334, ans=0.125 +2024-07-28 17:52:44,161 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.71 vs. limit=22.5 +2024-07-28 17:52:45,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=185573.33333333334, ans=0.0 +2024-07-28 17:52:46,206 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+01 5.580e+01 6.337e+01 7.212e+01 1.101e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 17:52:46,903 INFO [train.py:1114] (0/4) Epoch 14, batch 6300, loss[loss=0.1497, simple_loss=0.2357, pruned_loss=0.03186, over 4550.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2742, pruned_loss=0.04844, over 930131.78 frames. ], batch size: 10, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:52:52,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=185586.66666666666, ans=0.0 +2024-07-28 17:52:54,401 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:53:01,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=185613.33333333334, ans=0.0 +2024-07-28 17:53:09,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=185626.66666666666, ans=0.0 +2024-07-28 17:53:10,233 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.55 vs. limit=15.0 +2024-07-28 17:53:17,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=185640.0, ans=0.0 +2024-07-28 17:53:19,477 INFO [train.py:1114] (0/4) Epoch 14, batch 6350, loss[loss=0.191, simple_loss=0.2966, pruned_loss=0.04273, over 4647.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2749, pruned_loss=0.04861, over 934376.26 frames. ], batch size: 22, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:53:40,824 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:53:45,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=185706.66666666666, ans=0.0 +2024-07-28 17:53:47,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=185706.66666666666, ans=0.125 +2024-07-28 17:53:49,493 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:54:02,939 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.778e+01 6.430e+01 7.550e+01 1.026e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 17:54:03,632 INFO [train.py:1114] (0/4) Epoch 14, batch 6400, loss[loss=0.1704, simple_loss=0.2584, pruned_loss=0.04126, over 4636.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2754, pruned_loss=0.04928, over 935955.15 frames. ], batch size: 13, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:54:18,500 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.14 vs. limit=15.0 +2024-07-28 17:54:24,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=185746.66666666666, ans=0.0 +2024-07-28 17:54:27,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=185760.0, ans=0.025 +2024-07-28 17:54:39,356 INFO [train.py:1114] (0/4) Epoch 14, batch 6450, loss[loss=0.2119, simple_loss=0.3029, pruned_loss=0.06044, over 4473.00 frames. ], tot_loss[loss=0.186, simple_loss=0.275, pruned_loss=0.04851, over 939450.12 frames. ], batch size: 21, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:54:45,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=185800.0, ans=0.1 +2024-07-28 17:54:53,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=185813.33333333334, ans=0.125 +2024-07-28 17:55:11,355 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.589e+01 5.736e+01 6.499e+01 7.740e+01 1.076e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-28 17:55:12,097 INFO [train.py:1114] (0/4) Epoch 14, batch 6500, loss[loss=0.2557, simple_loss=0.3345, pruned_loss=0.08843, over 3456.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.274, pruned_loss=0.04831, over 940711.71 frames. ], batch size: 37, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:55:12,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=185853.33333333334, ans=0.09899494936611666 +2024-07-28 17:55:16,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=185853.33333333334, ans=0.125 +2024-07-28 17:55:19,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=185866.66666666666, ans=0.1 +2024-07-28 17:55:22,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=185866.66666666666, ans=0.1 +2024-07-28 17:55:47,170 INFO [train.py:1114] (0/4) Epoch 14, batch 6550, loss[loss=0.1507, simple_loss=0.2278, pruned_loss=0.03679, over 4796.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.272, pruned_loss=0.04741, over 943492.54 frames. ], batch size: 11, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:55:53,609 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.33 vs. limit=15.0 +2024-07-28 17:55:53,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=185933.33333333334, ans=0.125 +2024-07-28 17:55:56,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=185933.33333333334, ans=0.125 +2024-07-28 17:55:59,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=185946.66666666666, ans=0.0 +2024-07-28 17:56:00,222 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.59 vs. limit=22.5 +2024-07-28 17:56:16,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=185973.33333333334, ans=0.125 +2024-07-28 17:56:19,464 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.641e+01 6.098e+01 6.852e+01 1.074e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 17:56:20,143 INFO [train.py:1114] (0/4) Epoch 14, batch 6600, loss[loss=0.1746, simple_loss=0.2658, pruned_loss=0.04171, over 4934.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2712, pruned_loss=0.04679, over 945352.11 frames. ], batch size: 14, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:56:26,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=186000.0, ans=0.125 +2024-07-28 17:56:52,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=186040.0, ans=0.0 +2024-07-28 17:56:52,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=186040.0, ans=0.125 +2024-07-28 17:56:53,739 INFO [train.py:1114] (0/4) Epoch 14, batch 6650, loss[loss=0.1793, simple_loss=0.273, pruned_loss=0.04279, over 4632.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2703, pruned_loss=0.04637, over 944108.72 frames. ], batch size: 17, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:56:59,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=186066.66666666666, ans=0.0 +2024-07-28 17:57:30,080 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.923e+01 5.766e+01 6.384e+01 7.192e+01 1.160e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 17:57:30,788 INFO [train.py:1114] (0/4) Epoch 14, batch 6700, loss[loss=0.2115, simple_loss=0.2954, pruned_loss=0.06382, over 4704.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.272, pruned_loss=0.04677, over 942798.28 frames. ], batch size: 19, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:57:38,073 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.57 vs. limit=22.5 +2024-07-28 17:57:42,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186133.33333333334, ans=0.1 +2024-07-28 17:57:51,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=186160.0, ans=0.125 +2024-07-28 17:57:56,098 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.40 vs. limit=15.0 +2024-07-28 17:58:00,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=186173.33333333334, ans=0.125 +2024-07-28 17:58:06,968 INFO [train.py:1114] (0/4) Epoch 14, batch 6750, loss[loss=0.2103, simple_loss=0.3018, pruned_loss=0.05938, over 4197.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2728, pruned_loss=0.04719, over 940775.69 frames. ], batch size: 25, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:58:09,124 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.58 vs. limit=6.0 +2024-07-28 17:58:28,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=186226.66666666666, ans=0.125 +2024-07-28 17:58:29,036 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.17 vs. limit=6.0 +2024-07-28 17:58:38,230 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.67 vs. limit=12.0 +2024-07-28 17:58:40,451 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.736e+01 5.569e+01 6.146e+01 7.020e+01 9.338e+01, threshold=1.229e+02, percent-clipped=0.0 +2024-07-28 17:58:41,117 INFO [train.py:1114] (0/4) Epoch 14, batch 6800, loss[loss=0.2169, simple_loss=0.308, pruned_loss=0.06288, over 4637.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2739, pruned_loss=0.04773, over 938999.16 frames. ], batch size: 13, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:58:47,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=186266.66666666666, ans=0.1 +2024-07-28 17:58:48,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=186266.66666666666, ans=0.0 +2024-07-28 17:58:49,352 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.67 vs. limit=22.5 +2024-07-28 17:58:53,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=186280.0, ans=0.125 +2024-07-28 17:58:56,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=186280.0, ans=0.0 +2024-07-28 17:58:57,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=186280.0, ans=0.95 +2024-07-28 17:59:00,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=186293.33333333334, ans=0.2 +2024-07-28 17:59:09,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=186306.66666666666, ans=0.1 +2024-07-28 17:59:14,770 INFO [train.py:1114] (0/4) Epoch 14, batch 6850, loss[loss=0.1624, simple_loss=0.2541, pruned_loss=0.03538, over 4698.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2725, pruned_loss=0.04733, over 940658.27 frames. ], batch size: 13, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:59:16,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=186320.0, ans=0.125 +2024-07-28 17:59:26,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=186333.33333333334, ans=0.1 +2024-07-28 17:59:26,623 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.37 vs. limit=12.0 +2024-07-28 17:59:38,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=186360.0, ans=0.125 +2024-07-28 17:59:43,453 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.34 vs. limit=15.0 +2024-07-28 17:59:47,792 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.718e+01 6.199e+01 6.949e+01 1.067e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 17:59:48,472 INFO [train.py:1114] (0/4) Epoch 14, batch 6900, loss[loss=0.1723, simple_loss=0.2605, pruned_loss=0.04204, over 4955.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2717, pruned_loss=0.04721, over 942797.07 frames. ], batch size: 13, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:59:59,156 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:00:10,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=186426.66666666666, ans=0.09899494936611666 +2024-07-28 18:00:12,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=186426.66666666666, ans=0.0 +2024-07-28 18:00:22,486 INFO [train.py:1114] (0/4) Epoch 14, batch 6950, loss[loss=0.1431, simple_loss=0.2219, pruned_loss=0.03215, over 4476.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2714, pruned_loss=0.04717, over 939749.01 frames. ], batch size: 10, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:00:23,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=186453.33333333334, ans=0.125 +2024-07-28 18:00:26,699 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.32 vs. limit=15.0 +2024-07-28 18:00:33,425 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.74 vs. limit=15.0 +2024-07-28 18:00:38,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=186480.0, ans=0.125 +2024-07-28 18:00:40,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=186480.0, ans=0.09899494936611666 +2024-07-28 18:00:47,947 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.77 vs. limit=15.0 +2024-07-28 18:00:54,801 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.637e+01 6.195e+01 7.111e+01 9.946e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 18:00:55,482 INFO [train.py:1114] (0/4) Epoch 14, batch 7000, loss[loss=0.1887, simple_loss=0.2835, pruned_loss=0.04697, over 4613.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2714, pruned_loss=0.04692, over 938717.28 frames. ], batch size: 17, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:00:56,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=186520.0, ans=0.1 +2024-07-28 18:00:56,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=186520.0, ans=0.0 +2024-07-28 18:01:00,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=186520.0, ans=0.2 +2024-07-28 18:01:00,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=186520.0, ans=0.125 +2024-07-28 18:01:02,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=186533.33333333334, ans=0.125 +2024-07-28 18:01:09,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=186546.66666666666, ans=0.0 +2024-07-28 18:01:28,541 INFO [train.py:1114] (0/4) Epoch 14, batch 7050, loss[loss=0.1715, simple_loss=0.263, pruned_loss=0.04005, over 4722.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2721, pruned_loss=0.04705, over 941907.25 frames. ], batch size: 19, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:01:30,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=186586.66666666666, ans=0.1 +2024-07-28 18:01:32,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=186586.66666666666, ans=0.125 +2024-07-28 18:01:34,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186586.66666666666, ans=0.1 +2024-07-28 18:01:41,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=186600.0, ans=0.125 +2024-07-28 18:02:03,021 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+01 5.702e+01 6.224e+01 7.168e+01 1.076e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 18:02:03,697 INFO [train.py:1114] (0/4) Epoch 14, batch 7100, loss[loss=0.215, simple_loss=0.3134, pruned_loss=0.05824, over 4795.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2739, pruned_loss=0.04802, over 936265.46 frames. ], batch size: 15, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:02:09,648 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-140000.pt +2024-07-28 18:02:24,131 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.74 vs. limit=15.0 +2024-07-28 18:02:29,939 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.28 vs. limit=15.0 +2024-07-28 18:02:35,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=186693.33333333334, ans=0.125 +2024-07-28 18:02:46,791 INFO [train.py:1114] (0/4) Epoch 14, batch 7150, loss[loss=0.2059, simple_loss=0.304, pruned_loss=0.05391, over 4377.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2727, pruned_loss=0.04766, over 937075.04 frames. ], batch size: 21, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:03:26,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=186733.33333333334, ans=0.125 +2024-07-28 18:03:57,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=186746.66666666666, ans=0.2 +2024-07-28 18:03:59,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=186760.0, ans=0.0 +2024-07-28 18:04:09,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=186760.0, ans=0.125 +2024-07-28 18:04:16,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=186773.33333333334, ans=0.2 +2024-07-28 18:04:16,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=186773.33333333334, ans=0.0 +2024-07-28 18:04:20,576 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.670e+01 5.638e+01 6.134e+01 6.924e+01 9.250e+01, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 18:04:21,223 INFO [train.py:1114] (0/4) Epoch 14, batch 7200, loss[loss=0.1928, simple_loss=0.2846, pruned_loss=0.05055, over 4809.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2727, pruned_loss=0.04768, over 937712.62 frames. ], batch size: 15, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:04:28,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=186786.66666666666, ans=0.0 +2024-07-28 18:04:45,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=186800.0, ans=0.125 +2024-07-28 18:04:45,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=186800.0, ans=0.125 +2024-07-28 18:04:57,821 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.80 vs. limit=10.0 +2024-07-28 18:04:59,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=186826.66666666666, ans=0.125 +2024-07-28 18:05:00,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=186826.66666666666, ans=0.125 +2024-07-28 18:05:06,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186826.66666666666, ans=0.1 +2024-07-28 18:05:17,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=186840.0, ans=0.125 +2024-07-28 18:05:24,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=186853.33333333334, ans=0.04949747468305833 +2024-07-28 18:05:25,088 INFO [train.py:1114] (0/4) Epoch 14, batch 7250, loss[loss=0.1771, simple_loss=0.2614, pruned_loss=0.04635, over 4838.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2718, pruned_loss=0.0473, over 939387.05 frames. ], batch size: 12, lr: 5.30e-03, grad_scale: 32.0 +2024-07-28 18:05:28,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=186853.33333333334, ans=0.125 +2024-07-28 18:05:37,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=186866.66666666666, ans=0.2 +2024-07-28 18:05:47,013 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.09 vs. limit=15.0 +2024-07-28 18:05:53,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=186880.0, ans=0.0 +2024-07-28 18:05:56,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=186893.33333333334, ans=0.5 +2024-07-28 18:05:56,871 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.04 vs. limit=15.0 +2024-07-28 18:06:09,772 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-07-28 18:06:13,407 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.490e+01 5.624e+01 6.211e+01 6.890e+01 1.048e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 18:06:14,142 INFO [train.py:1114] (0/4) Epoch 14, batch 7300, loss[loss=0.1754, simple_loss=0.2605, pruned_loss=0.04519, over 4854.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2717, pruned_loss=0.04745, over 939372.30 frames. ], batch size: 12, lr: 5.30e-03, grad_scale: 32.0 +2024-07-28 18:06:14,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=186920.0, ans=0.07 +2024-07-28 18:06:20,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=186933.33333333334, ans=0.0 +2024-07-28 18:06:20,647 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.92 vs. limit=22.5 +2024-07-28 18:06:45,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=186973.33333333334, ans=0.1 +2024-07-28 18:06:46,876 INFO [train.py:1114] (0/4) Epoch 14, batch 7350, loss[loss=0.1494, simple_loss=0.2426, pruned_loss=0.02809, over 4642.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2722, pruned_loss=0.04745, over 938638.70 frames. ], batch size: 12, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:06:57,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=187000.0, ans=0.125 +2024-07-28 18:06:57,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=187000.0, ans=0.125 +2024-07-28 18:07:12,266 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.00 vs. limit=12.0 +2024-07-28 18:07:21,804 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.550e+01 5.480e+01 5.943e+01 6.743e+01 9.456e+01, threshold=1.189e+02, percent-clipped=0.0 +2024-07-28 18:07:22,464 INFO [train.py:1114] (0/4) Epoch 14, batch 7400, loss[loss=0.1709, simple_loss=0.2729, pruned_loss=0.03448, over 4683.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2716, pruned_loss=0.04671, over 940029.85 frames. ], batch size: 13, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:07:23,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=187053.33333333334, ans=0.0 +2024-07-28 18:07:23,909 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:07:28,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.62 vs. limit=15.0 +2024-07-28 18:07:32,808 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.04 vs. limit=22.5 +2024-07-28 18:07:34,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=187066.66666666666, ans=0.125 +2024-07-28 18:07:47,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=187106.66666666666, ans=0.125 +2024-07-28 18:07:54,562 INFO [train.py:1114] (0/4) Epoch 14, batch 7450, loss[loss=0.1702, simple_loss=0.2565, pruned_loss=0.04191, over 4608.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2714, pruned_loss=0.04678, over 937657.54 frames. ], batch size: 11, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:07:55,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=187120.0, ans=0.5 +2024-07-28 18:08:01,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=187120.0, ans=0.125 +2024-07-28 18:08:05,423 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.61 vs. limit=6.0 +2024-07-28 18:08:14,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=187146.66666666666, ans=0.0 +2024-07-28 18:08:21,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=187160.0, ans=0.07 +2024-07-28 18:08:28,501 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.589e+01 6.200e+01 6.910e+01 1.220e+02, threshold=1.240e+02, percent-clipped=1.0 +2024-07-28 18:08:29,227 INFO [train.py:1114] (0/4) Epoch 14, batch 7500, loss[loss=0.2078, simple_loss=0.2967, pruned_loss=0.05942, over 3502.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.272, pruned_loss=0.04708, over 935852.87 frames. ], batch size: 35, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:08:29,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=187186.66666666666, ans=0.0 +2024-07-28 18:08:30,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=187186.66666666666, ans=0.0 +2024-07-28 18:08:32,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=187186.66666666666, ans=0.125 +2024-07-28 18:08:38,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=187200.0, ans=0.125 +2024-07-28 18:09:15,737 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:09:19,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=187240.0, ans=0.125 +2024-07-28 18:09:20,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=187240.0, ans=0.025 +2024-07-28 18:09:23,919 INFO [train.py:1114] (0/4) Epoch 14, batch 7550, loss[loss=0.184, simple_loss=0.2815, pruned_loss=0.04326, over 4898.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2734, pruned_loss=0.04733, over 936127.70 frames. ], batch size: 18, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:09:24,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=187253.33333333334, ans=0.125 +2024-07-28 18:09:26,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=187253.33333333334, ans=0.125 +2024-07-28 18:09:27,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=187253.33333333334, ans=0.0 +2024-07-28 18:09:29,288 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.21 vs. limit=6.0 +2024-07-28 18:09:33,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.99 vs. limit=10.0 +2024-07-28 18:09:36,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=187280.0, ans=0.125 +2024-07-28 18:09:37,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=187280.0, ans=0.125 +2024-07-28 18:09:40,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=187280.0, ans=0.0 +2024-07-28 18:09:51,317 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.20 vs. limit=12.0 +2024-07-28 18:09:55,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=187306.66666666666, ans=0.125 +2024-07-28 18:09:56,066 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.167e+01 5.585e+01 6.021e+01 6.660e+01 1.005e+02, threshold=1.204e+02, percent-clipped=0.0 +2024-07-28 18:09:56,727 INFO [train.py:1114] (0/4) Epoch 14, batch 7600, loss[loss=0.1618, simple_loss=0.2569, pruned_loss=0.03334, over 4807.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2727, pruned_loss=0.04743, over 938593.54 frames. ], batch size: 14, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:10:00,178 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.27 vs. limit=22.5 +2024-07-28 18:10:03,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=187333.33333333334, ans=0.0 +2024-07-28 18:10:17,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=187360.0, ans=0.125 +2024-07-28 18:10:21,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=187360.0, ans=0.5 +2024-07-28 18:10:29,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=187386.66666666666, ans=0.125 +2024-07-28 18:10:29,864 INFO [train.py:1114] (0/4) Epoch 14, batch 7650, loss[loss=0.1891, simple_loss=0.2677, pruned_loss=0.05526, over 4943.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2724, pruned_loss=0.04748, over 937155.58 frames. ], batch size: 12, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:10:44,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=187413.33333333334, ans=0.125 +2024-07-28 18:11:02,124 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.854e+01 5.638e+01 6.341e+01 7.114e+01 1.063e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 18:11:02,777 INFO [train.py:1114] (0/4) Epoch 14, batch 7700, loss[loss=0.2183, simple_loss=0.2993, pruned_loss=0.06861, over 4703.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2721, pruned_loss=0.0472, over 934141.80 frames. ], batch size: 13, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:11:09,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=187453.33333333334, ans=0.2 +2024-07-28 18:11:10,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff2.min_abs, batch_count=187466.66666666666, ans=0.1 +2024-07-28 18:11:24,554 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.48 vs. limit=10.0 +2024-07-28 18:11:25,927 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.28 vs. limit=15.0 +2024-07-28 18:11:48,688 INFO [train.py:1114] (0/4) Epoch 14, batch 7750, loss[loss=0.1606, simple_loss=0.2513, pruned_loss=0.03496, over 4936.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2739, pruned_loss=0.04789, over 935408.86 frames. ], batch size: 14, lr: 5.29e-03, grad_scale: 64.0 +2024-07-28 18:12:05,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=187546.66666666666, ans=0.125 +2024-07-28 18:12:22,670 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.155e+01 5.694e+01 6.124e+01 6.801e+01 8.564e+01, threshold=1.225e+02, percent-clipped=0.0 +2024-07-28 18:12:24,028 INFO [train.py:1114] (0/4) Epoch 14, batch 7800, loss[loss=0.2278, simple_loss=0.3178, pruned_loss=0.0689, over 4649.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2752, pruned_loss=0.04866, over 937040.78 frames. ], batch size: 14, lr: 5.29e-03, grad_scale: 64.0 +2024-07-28 18:12:33,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=187600.0, ans=0.2 +2024-07-28 18:12:34,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=187600.0, ans=0.0 +2024-07-28 18:12:40,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=187613.33333333334, ans=0.125 +2024-07-28 18:12:45,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=187626.66666666666, ans=0.0 +2024-07-28 18:13:14,554 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.99 vs. limit=15.0 +2024-07-28 18:13:18,105 INFO [train.py:1114] (0/4) Epoch 14, batch 7850, loss[loss=0.1783, simple_loss=0.2581, pruned_loss=0.04932, over 4526.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2748, pruned_loss=0.04836, over 936204.47 frames. ], batch size: 10, lr: 5.29e-03, grad_scale: 64.0 +2024-07-28 18:13:20,998 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.63 vs. limit=15.0 +2024-07-28 18:13:22,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=187653.33333333334, ans=0.0 +2024-07-28 18:13:24,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=187666.66666666666, ans=0.125 +2024-07-28 18:13:27,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=187666.66666666666, ans=0.125 +2024-07-28 18:13:38,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=187693.33333333334, ans=0.05 +2024-07-28 18:13:40,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=187693.33333333334, ans=0.125 +2024-07-28 18:13:51,293 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.474e+01 5.677e+01 6.181e+01 6.848e+01 9.012e+01, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 18:13:51,963 INFO [train.py:1114] (0/4) Epoch 14, batch 7900, loss[loss=0.2022, simple_loss=0.3031, pruned_loss=0.05069, over 4877.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2756, pruned_loss=0.0483, over 932746.29 frames. ], batch size: 14, lr: 5.29e-03, grad_scale: 64.0 +2024-07-28 18:13:57,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=187720.0, ans=0.0 +2024-07-28 18:14:04,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=187746.66666666666, ans=0.2 +2024-07-28 18:14:19,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=187773.33333333334, ans=0.125 +2024-07-28 18:14:25,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=187786.66666666666, ans=0.5 +2024-07-28 18:14:25,545 INFO [train.py:1114] (0/4) Epoch 14, batch 7950, loss[loss=0.2432, simple_loss=0.3126, pruned_loss=0.08693, over 3234.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2749, pruned_loss=0.04769, over 934794.25 frames. ], batch size: 35, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:14:27,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=187786.66666666666, ans=0.125 +2024-07-28 18:14:40,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=187813.33333333334, ans=0.125 +2024-07-28 18:14:42,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=187813.33333333334, ans=0.2 +2024-07-28 18:14:47,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=187826.66666666666, ans=0.5 +2024-07-28 18:14:52,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1.whitening_limit, batch_count=187840.0, ans=10.0 +2024-07-28 18:14:53,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=187840.0, ans=0.0 +2024-07-28 18:15:01,006 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.483e+01 5.795e+01 6.761e+01 7.899e+01 1.107e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-28 18:15:01,040 INFO [train.py:1114] (0/4) Epoch 14, batch 8000, loss[loss=0.1573, simple_loss=0.2324, pruned_loss=0.04112, over 4634.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2734, pruned_loss=0.04715, over 933911.29 frames. ], batch size: 11, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:15:12,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=187866.66666666666, ans=0.0 +2024-07-28 18:15:15,905 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.29 vs. limit=10.0 +2024-07-28 18:15:16,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=187880.0, ans=0.125 +2024-07-28 18:15:22,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=187880.0, ans=0.125 +2024-07-28 18:15:26,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=187893.33333333334, ans=0.0 +2024-07-28 18:16:09,703 INFO [train.py:1114] (0/4) Epoch 14, batch 8050, loss[loss=0.2241, simple_loss=0.3059, pruned_loss=0.07114, over 4812.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.273, pruned_loss=0.04689, over 934400.69 frames. ], batch size: 14, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:16:13,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=187920.0, ans=0.125 +2024-07-28 18:16:13,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=187920.0, ans=0.125 +2024-07-28 18:16:24,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=187946.66666666666, ans=0.0 +2024-07-28 18:16:31,490 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.97 vs. limit=22.5 +2024-07-28 18:16:39,665 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.94 vs. limit=15.0 +2024-07-28 18:16:41,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=187986.66666666666, ans=0.0 +2024-07-28 18:16:41,831 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.834e+01 6.714e+01 7.875e+01 1.229e+02, threshold=1.343e+02, percent-clipped=0.0 +2024-07-28 18:16:41,864 INFO [train.py:1114] (0/4) Epoch 14, batch 8100, loss[loss=0.2083, simple_loss=0.2938, pruned_loss=0.06133, over 4806.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2748, pruned_loss=0.04787, over 934080.57 frames. ], batch size: 15, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:16:52,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=188000.0, ans=0.07 +2024-07-28 18:17:00,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=188013.33333333334, ans=0.125 +2024-07-28 18:17:02,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188026.66666666666, ans=0.1 +2024-07-28 18:17:04,495 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.80 vs. limit=15.0 +2024-07-28 18:17:04,614 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.80 vs. limit=15.0 +2024-07-28 18:17:14,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=188053.33333333334, ans=0.125 +2024-07-28 18:17:14,593 INFO [train.py:1114] (0/4) Epoch 14, batch 8150, loss[loss=0.1829, simple_loss=0.2697, pruned_loss=0.04809, over 4801.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2729, pruned_loss=0.04754, over 937271.06 frames. ], batch size: 15, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:17:17,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=188053.33333333334, ans=0.125 +2024-07-28 18:17:22,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=188066.66666666666, ans=0.0 +2024-07-28 18:17:39,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=188093.33333333334, ans=0.125 +2024-07-28 18:17:43,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=188106.66666666666, ans=0.0 +2024-07-28 18:17:48,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=188106.66666666666, ans=0.0 +2024-07-28 18:17:48,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=188106.66666666666, ans=0.125 +2024-07-28 18:17:49,791 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.677e+01 5.661e+01 6.103e+01 6.886e+01 9.464e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 18:17:49,824 INFO [train.py:1114] (0/4) Epoch 14, batch 8200, loss[loss=0.2014, simple_loss=0.2902, pruned_loss=0.05628, over 4796.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.273, pruned_loss=0.04688, over 938515.72 frames. ], batch size: 15, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:17:52,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=188120.0, ans=0.125 +2024-07-28 18:17:52,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=188120.0, ans=0.5 +2024-07-28 18:17:56,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=188133.33333333334, ans=0.0 +2024-07-28 18:18:03,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=188146.66666666666, ans=0.125 +2024-07-28 18:18:03,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=188146.66666666666, ans=0.125 +2024-07-28 18:18:03,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=188146.66666666666, ans=0.025 +2024-07-28 18:18:20,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=188173.33333333334, ans=0.125 +2024-07-28 18:18:22,383 INFO [train.py:1114] (0/4) Epoch 14, batch 8250, loss[loss=0.1644, simple_loss=0.2554, pruned_loss=0.03671, over 4889.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2732, pruned_loss=0.04649, over 938209.63 frames. ], batch size: 13, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:18:33,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=188200.0, ans=0.09899494936611666 +2024-07-28 18:18:34,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=188200.0, ans=0.0 +2024-07-28 18:18:47,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=188226.66666666666, ans=0.0 +2024-07-28 18:18:50,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=188240.0, ans=0.04949747468305833 +2024-07-28 18:18:52,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=188240.0, ans=0.2 +2024-07-28 18:18:54,605 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.568e+01 5.616e+01 6.253e+01 7.427e+01 1.123e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 18:18:54,652 INFO [train.py:1114] (0/4) Epoch 14, batch 8300, loss[loss=0.2396, simple_loss=0.3095, pruned_loss=0.08487, over 4907.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2749, pruned_loss=0.04753, over 938473.50 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:18:56,263 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.37 vs. limit=15.0 +2024-07-28 18:19:01,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=188266.66666666666, ans=0.025 +2024-07-28 18:19:06,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=188266.66666666666, ans=0.125 +2024-07-28 18:19:07,337 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.08 vs. limit=6.0 +2024-07-28 18:19:11,919 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.68 vs. limit=15.0 +2024-07-28 18:19:16,934 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.94 vs. limit=15.0 +2024-07-28 18:19:28,346 INFO [train.py:1114] (0/4) Epoch 14, batch 8350, loss[loss=0.2369, simple_loss=0.3244, pruned_loss=0.07472, over 4807.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2756, pruned_loss=0.04729, over 941315.86 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:19:28,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=188320.0, ans=0.125 +2024-07-28 18:19:30,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=188320.0, ans=0.125 +2024-07-28 18:19:43,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188346.66666666666, ans=0.1 +2024-07-28 18:19:55,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=188373.33333333334, ans=0.0 +2024-07-28 18:19:57,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=188373.33333333334, ans=0.0 +2024-07-28 18:20:00,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=188386.66666666666, ans=0.0 +2024-07-28 18:20:00,909 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.607e+01 5.571e+01 5.977e+01 6.680e+01 9.102e+01, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 18:20:00,942 INFO [train.py:1114] (0/4) Epoch 14, batch 8400, loss[loss=0.1632, simple_loss=0.2454, pruned_loss=0.04054, over 4778.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2747, pruned_loss=0.04722, over 939972.68 frames. ], batch size: 12, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:20:08,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=188400.0, ans=0.0 +2024-07-28 18:20:13,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=188413.33333333334, ans=0.125 +2024-07-28 18:20:21,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=188426.66666666666, ans=0.0 +2024-07-28 18:20:28,061 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:20:33,569 INFO [train.py:1114] (0/4) Epoch 14, batch 8450, loss[loss=0.2354, simple_loss=0.3225, pruned_loss=0.07412, over 4790.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2746, pruned_loss=0.04705, over 938664.49 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:20:33,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188453.33333333334, ans=0.1 +2024-07-28 18:20:38,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=188453.33333333334, ans=0.025 +2024-07-28 18:20:39,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=188466.66666666666, ans=0.125 +2024-07-28 18:20:52,485 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.34 vs. limit=15.0 +2024-07-28 18:20:54,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=188493.33333333334, ans=0.125 +2024-07-28 18:20:58,209 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.51 vs. limit=15.0 +2024-07-28 18:21:01,483 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.53 vs. limit=10.0 +2024-07-28 18:21:05,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=188520.0, ans=0.125 +2024-07-28 18:21:05,570 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.332e+01 6.107e+01 6.939e+01 8.222e+01 1.191e+02, threshold=1.388e+02, percent-clipped=0.0 +2024-07-28 18:21:05,603 INFO [train.py:1114] (0/4) Epoch 14, batch 8500, loss[loss=0.1362, simple_loss=0.2158, pruned_loss=0.02828, over 4621.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2728, pruned_loss=0.04652, over 938332.35 frames. ], batch size: 11, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:21:06,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188520.0, ans=0.1 +2024-07-28 18:21:26,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=188560.0, ans=0.0 +2024-07-28 18:21:28,258 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.30 vs. limit=10.0 +2024-07-28 18:21:28,998 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.82 vs. limit=15.0 +2024-07-28 18:21:30,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=188560.0, ans=0.1 +2024-07-28 18:21:30,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=188560.0, ans=0.125 +2024-07-28 18:21:33,686 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.14 vs. limit=22.5 +2024-07-28 18:21:38,572 INFO [train.py:1114] (0/4) Epoch 14, batch 8550, loss[loss=0.1627, simple_loss=0.2436, pruned_loss=0.04087, over 4795.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2725, pruned_loss=0.04656, over 939223.21 frames. ], batch size: 11, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:21:52,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=188613.33333333334, ans=0.0 +2024-07-28 18:22:10,030 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.17 vs. limit=15.0 +2024-07-28 18:22:11,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=188613.33333333334, ans=0.2 +2024-07-28 18:22:18,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=188626.66666666666, ans=0.0 +2024-07-28 18:22:26,141 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.740e+01 5.708e+01 6.188e+01 7.242e+01 1.269e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 18:22:26,174 INFO [train.py:1114] (0/4) Epoch 14, batch 8600, loss[loss=0.2294, simple_loss=0.3165, pruned_loss=0.07115, over 4811.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2724, pruned_loss=0.04709, over 939097.61 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:22:30,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=188653.33333333334, ans=0.0 +2024-07-28 18:22:35,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=188666.66666666666, ans=0.125 +2024-07-28 18:22:50,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=188680.0, ans=0.2 +2024-07-28 18:22:53,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=188693.33333333334, ans=0.0 +2024-07-28 18:22:58,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=188693.33333333334, ans=0.125 +2024-07-28 18:22:59,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=188706.66666666666, ans=0.04949747468305833 +2024-07-28 18:23:06,729 INFO [train.py:1114] (0/4) Epoch 14, batch 8650, loss[loss=0.1936, simple_loss=0.2881, pruned_loss=0.0495, over 4904.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2718, pruned_loss=0.0469, over 940234.08 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:23:10,017 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:23:15,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=188733.33333333334, ans=0.09899494936611666 +2024-07-28 18:23:37,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=188773.33333333334, ans=0.125 +2024-07-28 18:23:38,960 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 5.747e+01 6.354e+01 7.150e+01 1.051e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 18:23:38,993 INFO [train.py:1114] (0/4) Epoch 14, batch 8700, loss[loss=0.1728, simple_loss=0.2703, pruned_loss=0.03763, over 4765.00 frames. ], tot_loss[loss=0.183, simple_loss=0.272, pruned_loss=0.04697, over 937867.99 frames. ], batch size: 13, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:23:42,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=188786.66666666666, ans=0.125 +2024-07-28 18:23:43,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=188786.66666666666, ans=0.125 +2024-07-28 18:23:55,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=188813.33333333334, ans=0.125 +2024-07-28 18:23:58,768 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-07-28 18:24:08,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=188840.0, ans=0.125 +2024-07-28 18:24:13,572 INFO [train.py:1114] (0/4) Epoch 14, batch 8750, loss[loss=0.2172, simple_loss=0.2969, pruned_loss=0.0687, over 4697.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2724, pruned_loss=0.04713, over 936320.56 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:24:22,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=188866.66666666666, ans=0.125 +2024-07-28 18:24:30,323 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.03 vs. limit=22.5 +2024-07-28 18:24:31,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=188880.0, ans=0.2 +2024-07-28 18:24:32,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=188893.33333333334, ans=0.2 +2024-07-28 18:24:35,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=188893.33333333334, ans=0.125 +2024-07-28 18:24:40,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=188906.66666666666, ans=0.125 +2024-07-28 18:24:45,712 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.401e+01 5.577e+01 5.996e+01 6.718e+01 9.459e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 18:24:45,745 INFO [train.py:1114] (0/4) Epoch 14, batch 8800, loss[loss=0.1959, simple_loss=0.2941, pruned_loss=0.04888, over 4939.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2731, pruned_loss=0.04735, over 937251.68 frames. ], batch size: 14, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:24:46,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=188920.0, ans=0.125 +2024-07-28 18:24:47,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=188920.0, ans=0.125 +2024-07-28 18:24:53,307 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.40 vs. limit=6.0 +2024-07-28 18:24:59,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=188946.66666666666, ans=0.125 +2024-07-28 18:25:08,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=188960.0, ans=0.125 +2024-07-28 18:25:18,783 INFO [train.py:1114] (0/4) Epoch 14, batch 8850, loss[loss=0.2213, simple_loss=0.2973, pruned_loss=0.07263, over 4554.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2734, pruned_loss=0.04759, over 931828.82 frames. ], batch size: 21, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:25:21,661 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.29 vs. limit=22.5 +2024-07-28 18:25:25,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=189000.0, ans=0.0 +2024-07-28 18:25:37,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=189013.33333333334, ans=0.1 +2024-07-28 18:25:51,407 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.385e+01 5.694e+01 6.232e+01 7.298e+01 9.650e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 18:25:51,440 INFO [train.py:1114] (0/4) Epoch 14, batch 8900, loss[loss=0.154, simple_loss=0.2303, pruned_loss=0.03888, over 4946.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2738, pruned_loss=0.04781, over 929626.28 frames. ], batch size: 12, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:26:19,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=189106.66666666666, ans=15.0 +2024-07-28 18:26:22,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189106.66666666666, ans=0.1 +2024-07-28 18:26:24,557 INFO [train.py:1114] (0/4) Epoch 14, batch 8950, loss[loss=0.2061, simple_loss=0.2979, pruned_loss=0.05716, over 4607.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2735, pruned_loss=0.04758, over 930130.19 frames. ], batch size: 21, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:26:35,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=189133.33333333334, ans=0.125 +2024-07-28 18:26:40,419 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=11.00 vs. limit=10.0 +2024-07-28 18:26:49,106 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.86 vs. limit=15.0 +2024-07-28 18:26:54,296 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.24 vs. limit=15.0 +2024-07-28 18:26:57,059 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.595e+01 6.149e+01 7.157e+01 9.804e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 18:26:57,092 INFO [train.py:1114] (0/4) Epoch 14, batch 9000, loss[loss=0.1897, simple_loss=0.2768, pruned_loss=0.05133, over 4646.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.273, pruned_loss=0.04731, over 933237.79 frames. ], batch size: 12, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:26:57,093 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 18:27:02,492 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.5061, 2.6299, 3.2953, 3.3171], device='cuda:0') +2024-07-28 18:27:02,619 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.4900, 4.2828, 3.2700, 3.0946], device='cuda:0') +2024-07-28 18:27:09,002 INFO [train.py:1146] (0/4) Epoch 14, validation: loss=0.1644, simple_loss=0.2676, pruned_loss=0.03058, over 944034.00 frames. +2024-07-28 18:27:09,004 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 18:27:25,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=189213.33333333334, ans=0.2 +2024-07-28 18:27:30,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=189226.66666666666, ans=0.125 +2024-07-28 18:27:31,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=189226.66666666666, ans=0.95 +2024-07-28 18:27:37,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=189240.0, ans=0.0 +2024-07-28 18:27:38,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=189240.0, ans=0.035 +2024-07-28 18:27:41,985 INFO [train.py:1114] (0/4) Epoch 14, batch 9050, loss[loss=0.1595, simple_loss=0.2384, pruned_loss=0.04027, over 4520.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2732, pruned_loss=0.04748, over 933920.40 frames. ], batch size: 10, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:27:42,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=189253.33333333334, ans=0.125 +2024-07-28 18:27:42,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=189253.33333333334, ans=0.0 +2024-07-28 18:27:46,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=189253.33333333334, ans=0.125 +2024-07-28 18:27:49,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=189266.66666666666, ans=0.125 +2024-07-28 18:27:51,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=189266.66666666666, ans=0.125 +2024-07-28 18:28:00,202 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.87 vs. limit=15.0 +2024-07-28 18:28:02,986 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.28 vs. limit=15.0 +2024-07-28 18:28:14,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=189320.0, ans=0.125 +2024-07-28 18:28:14,827 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.467e+01 5.696e+01 6.239e+01 6.974e+01 1.014e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 18:28:14,860 INFO [train.py:1114] (0/4) Epoch 14, batch 9100, loss[loss=0.177, simple_loss=0.2722, pruned_loss=0.04089, over 4942.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2726, pruned_loss=0.04699, over 936702.13 frames. ], batch size: 14, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:28:14,918 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:28:16,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=189320.0, ans=0.0 +2024-07-28 18:28:23,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=189333.33333333334, ans=0.125 +2024-07-28 18:28:37,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=189346.66666666666, ans=0.2 +2024-07-28 18:28:54,515 INFO [train.py:1114] (0/4) Epoch 14, batch 9150, loss[loss=0.1738, simple_loss=0.2698, pruned_loss=0.03884, over 4807.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2727, pruned_loss=0.04703, over 936175.48 frames. ], batch size: 14, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:28:56,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=189386.66666666666, ans=0.2 +2024-07-28 18:29:00,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=189386.66666666666, ans=0.125 +2024-07-28 18:29:02,837 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.85 vs. limit=12.0 +2024-07-28 18:29:19,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=189426.66666666666, ans=0.125 +2024-07-28 18:29:22,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=189440.0, ans=0.025 +2024-07-28 18:29:25,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=189440.0, ans=0.025 +2024-07-28 18:29:27,776 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.765e+01 6.391e+01 7.042e+01 1.009e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-28 18:29:27,809 INFO [train.py:1114] (0/4) Epoch 14, batch 9200, loss[loss=0.1691, simple_loss=0.2513, pruned_loss=0.04346, over 4858.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2726, pruned_loss=0.04732, over 937847.11 frames. ], batch size: 12, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:29:33,941 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:29:36,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=189466.66666666666, ans=0.125 +2024-07-28 18:29:43,049 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.40 vs. limit=15.0 +2024-07-28 18:29:44,869 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.77 vs. limit=12.0 +2024-07-28 18:29:51,007 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.33 vs. limit=12.0 +2024-07-28 18:29:51,570 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.60 vs. limit=10.0 +2024-07-28 18:29:55,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=189506.66666666666, ans=0.2 +2024-07-28 18:29:56,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=189506.66666666666, ans=0.2 +2024-07-28 18:30:02,096 INFO [train.py:1114] (0/4) Epoch 14, batch 9250, loss[loss=0.2075, simple_loss=0.3003, pruned_loss=0.05735, over 4640.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2737, pruned_loss=0.04742, over 938328.61 frames. ], batch size: 13, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:30:17,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=189546.66666666666, ans=0.0 +2024-07-28 18:30:21,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=189560.0, ans=0.125 +2024-07-28 18:30:23,934 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.87 vs. limit=15.0 +2024-07-28 18:30:31,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=189573.33333333334, ans=0.0 +2024-07-28 18:30:34,736 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.13 vs. limit=15.0 +2024-07-28 18:30:37,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=189573.33333333334, ans=0.125 +2024-07-28 18:30:38,951 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.722e+01 6.128e+01 6.836e+01 1.013e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 18:30:38,984 INFO [train.py:1114] (0/4) Epoch 14, batch 9300, loss[loss=0.1747, simple_loss=0.2537, pruned_loss=0.04789, over 4786.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2725, pruned_loss=0.04752, over 937914.05 frames. ], batch size: 12, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:30:52,604 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.72 vs. limit=10.0 +2024-07-28 18:30:53,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=189613.33333333334, ans=0.0 +2024-07-28 18:30:53,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=189613.33333333334, ans=0.125 +2024-07-28 18:30:57,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=189626.66666666666, ans=0.2 +2024-07-28 18:31:02,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.80 vs. limit=15.0 +2024-07-28 18:35:03,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=189640.0, ans=0.125 +2024-07-28 18:35:05,528 INFO [train.py:1114] (0/4) Epoch 14, batch 9350, loss[loss=0.181, simple_loss=0.2578, pruned_loss=0.05213, over 4783.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.273, pruned_loss=0.04766, over 934667.18 frames. ], batch size: 11, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:35:23,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=189680.0, ans=0.125 +2024-07-28 18:35:23,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=189680.0, ans=0.125 +2024-07-28 18:35:31,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=189706.66666666666, ans=0.1 +2024-07-28 18:35:33,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=189706.66666666666, ans=0.0 +2024-07-28 18:35:38,443 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.799e+01 5.700e+01 6.300e+01 7.033e+01 1.050e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 18:35:39,330 INFO [train.py:1114] (0/4) Epoch 14, batch 9400, loss[loss=0.193, simple_loss=0.2776, pruned_loss=0.05427, over 4702.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.274, pruned_loss=0.04817, over 933025.72 frames. ], batch size: 13, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:35:48,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=189733.33333333334, ans=0.125 +2024-07-28 18:35:50,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=189733.33333333334, ans=0.125 +2024-07-28 18:35:51,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=189746.66666666666, ans=0.0 +2024-07-28 18:35:56,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=189746.66666666666, ans=0.04949747468305833 +2024-07-28 18:36:12,486 INFO [train.py:1114] (0/4) Epoch 14, batch 9450, loss[loss=0.1747, simple_loss=0.2601, pruned_loss=0.04461, over 4806.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2747, pruned_loss=0.04797, over 932179.31 frames. ], batch size: 11, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:36:14,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=189786.66666666666, ans=0.2 +2024-07-28 18:36:14,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=189786.66666666666, ans=0.125 +2024-07-28 18:36:29,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=189813.33333333334, ans=0.125 +2024-07-28 18:36:35,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=189826.66666666666, ans=0.025 +2024-07-28 18:36:43,733 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.801e+01 5.560e+01 6.240e+01 6.918e+01 1.034e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 18:36:43,766 INFO [train.py:1114] (0/4) Epoch 14, batch 9500, loss[loss=0.1676, simple_loss=0.2536, pruned_loss=0.04085, over 4707.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2743, pruned_loss=0.04781, over 934777.19 frames. ], batch size: 12, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:36:56,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=189880.0, ans=0.0 +2024-07-28 18:36:58,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=189880.0, ans=0.125 +2024-07-28 18:36:58,878 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:37:00,897 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.89 vs. limit=15.0 +2024-07-28 18:37:06,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.95 vs. limit=15.0 +2024-07-28 18:37:09,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=189906.66666666666, ans=0.0 +2024-07-28 18:37:15,315 INFO [train.py:1114] (0/4) Epoch 14, batch 9550, loss[loss=0.1531, simple_loss=0.2293, pruned_loss=0.03848, over 4770.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2738, pruned_loss=0.04745, over 931911.54 frames. ], batch size: 12, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:37:22,583 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.63 vs. limit=10.0 +2024-07-28 18:37:32,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=189946.66666666666, ans=0.125 +2024-07-28 18:37:33,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=189946.66666666666, ans=0.0 +2024-07-28 18:37:34,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=189960.0, ans=0.0 +2024-07-28 18:37:35,174 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.82 vs. limit=15.0 +2024-07-28 18:37:35,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=189960.0, ans=0.1 +2024-07-28 18:37:41,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=189973.33333333334, ans=0.2 +2024-07-28 18:37:46,546 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.418e+01 5.773e+01 6.422e+01 7.521e+01 1.253e+02, threshold=1.284e+02, percent-clipped=1.0 +2024-07-28 18:37:46,579 INFO [train.py:1114] (0/4) Epoch 14, batch 9600, loss[loss=0.2234, simple_loss=0.2919, pruned_loss=0.07746, over 3551.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2742, pruned_loss=0.04812, over 931273.13 frames. ], batch size: 37, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:37:50,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=189986.66666666666, ans=0.125 +2024-07-28 18:38:00,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=190013.33333333334, ans=0.125 +2024-07-28 18:38:03,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=190013.33333333334, ans=10.0 +2024-07-28 18:38:09,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=190026.66666666666, ans=0.125 +2024-07-28 18:38:11,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=190040.0, ans=0.0 +2024-07-28 18:38:13,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=190040.0, ans=0.05 +2024-07-28 18:38:14,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=190040.0, ans=0.0 +2024-07-28 18:38:15,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=190040.0, ans=0.0 +2024-07-28 18:38:15,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=190040.0, ans=0.0 +2024-07-28 18:38:17,649 INFO [train.py:1114] (0/4) Epoch 14, batch 9650, loss[loss=0.2047, simple_loss=0.2976, pruned_loss=0.0559, over 4829.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2753, pruned_loss=0.04854, over 926793.37 frames. ], batch size: 16, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:38:31,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=190080.0, ans=0.05 +2024-07-28 18:38:45,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190106.66666666666, ans=0.1 +2024-07-28 18:38:46,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=190106.66666666666, ans=0.125 +2024-07-28 18:38:49,570 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.80 vs. limit=15.0 +2024-07-28 18:38:49,750 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.576e+01 5.692e+01 6.329e+01 7.195e+01 1.065e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 18:38:49,798 INFO [train.py:1114] (0/4) Epoch 14, batch 9700, loss[loss=0.1767, simple_loss=0.2686, pruned_loss=0.0424, over 4166.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2749, pruned_loss=0.04855, over 925667.21 frames. ], batch size: 25, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:38:59,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=190133.33333333334, ans=0.0 +2024-07-28 18:38:59,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=190133.33333333334, ans=0.07 +2024-07-28 18:39:03,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.90 vs. limit=15.0 +2024-07-28 18:39:14,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190173.33333333334, ans=0.1 +2024-07-28 18:39:15,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=190173.33333333334, ans=0.0 +2024-07-28 18:39:20,728 INFO [train.py:1114] (0/4) Epoch 14, batch 9750, loss[loss=0.1717, simple_loss=0.267, pruned_loss=0.03814, over 4689.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2743, pruned_loss=0.0483, over 926057.53 frames. ], batch size: 15, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:39:39,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190226.66666666666, ans=0.1 +2024-07-28 18:39:40,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=190226.66666666666, ans=0.04949747468305833 +2024-07-28 18:39:42,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=190226.66666666666, ans=0.0 +2024-07-28 18:39:48,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=190240.0, ans=0.0 +2024-07-28 18:39:51,829 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.404e+01 5.577e+01 6.285e+01 7.276e+01 9.873e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 18:39:51,877 INFO [train.py:1114] (0/4) Epoch 14, batch 9800, loss[loss=0.1804, simple_loss=0.2676, pruned_loss=0.04657, over 4698.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2729, pruned_loss=0.04787, over 926337.91 frames. ], batch size: 12, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:39:57,175 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.17 vs. limit=15.0 +2024-07-28 18:40:00,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190266.66666666666, ans=0.1 +2024-07-28 18:40:01,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=190266.66666666666, ans=0.125 +2024-07-28 18:40:07,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190280.0, ans=0.1 +2024-07-28 18:40:11,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=190293.33333333334, ans=0.0 +2024-07-28 18:40:13,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=190293.33333333334, ans=0.2 +2024-07-28 18:40:23,776 INFO [train.py:1114] (0/4) Epoch 14, batch 9850, loss[loss=0.2017, simple_loss=0.3008, pruned_loss=0.05128, over 4892.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2735, pruned_loss=0.04818, over 928357.92 frames. ], batch size: 15, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:40:24,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190320.0, ans=0.1 +2024-07-28 18:40:29,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=190333.33333333334, ans=0.04949747468305833 +2024-07-28 18:40:33,348 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.92 vs. limit=15.0 +2024-07-28 18:40:45,841 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.34 vs. limit=10.0 +2024-07-28 18:40:46,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=190360.0, ans=0.125 +2024-07-28 18:40:46,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=190360.0, ans=0.2 +2024-07-28 18:40:54,575 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.802e+01 6.503e+01 7.443e+01 1.103e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 18:40:54,622 INFO [train.py:1114] (0/4) Epoch 14, batch 9900, loss[loss=0.1928, simple_loss=0.2817, pruned_loss=0.05192, over 4852.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2755, pruned_loss=0.04905, over 927092.97 frames. ], batch size: 16, lr: 5.25e-03, grad_scale: 32.0 +2024-07-28 18:40:54,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=190386.66666666666, ans=0.125 +2024-07-28 18:40:57,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=190386.66666666666, ans=0.0 +2024-07-28 18:41:04,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=190400.0, ans=0.0 +2024-07-28 18:41:06,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=190413.33333333334, ans=0.125 +2024-07-28 18:41:10,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=190413.33333333334, ans=0.05 +2024-07-28 18:41:10,486 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.13 vs. limit=15.0 +2024-07-28 18:41:20,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=190440.0, ans=0.125 +2024-07-28 18:41:22,870 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=190440.0, ans=0.07 +2024-07-28 18:41:23,642 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.13 vs. limit=15.0 +2024-07-28 18:41:25,150 INFO [train.py:1114] (0/4) Epoch 14, batch 9950, loss[loss=0.17, simple_loss=0.2524, pruned_loss=0.04384, over 4803.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2747, pruned_loss=0.04885, over 929611.19 frames. ], batch size: 11, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:41:31,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=190466.66666666666, ans=0.0 +2024-07-28 18:41:46,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=190493.33333333334, ans=0.0 +2024-07-28 18:41:55,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=190520.0, ans=0.2 +2024-07-28 18:41:56,004 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.155e+01 6.024e+01 6.660e+01 7.558e+01 1.184e+02, threshold=1.332e+02, percent-clipped=0.0 +2024-07-28 18:41:56,051 INFO [train.py:1114] (0/4) Epoch 14, batch 10000, loss[loss=0.1969, simple_loss=0.278, pruned_loss=0.05785, over 4649.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2773, pruned_loss=0.04974, over 926895.81 frames. ], batch size: 16, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:42:05,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=190533.33333333334, ans=0.125 +2024-07-28 18:42:22,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=190573.33333333334, ans=0.125 +2024-07-28 18:42:28,095 INFO [train.py:1114] (0/4) Epoch 14, batch 10050, loss[loss=0.2169, simple_loss=0.3, pruned_loss=0.06689, over 3083.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.281, pruned_loss=0.05139, over 913872.58 frames. ], batch size: 35, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:42:29,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=190586.66666666666, ans=0.125 +2024-07-28 18:42:34,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=190600.0, ans=0.125 +2024-07-28 18:42:41,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=190613.33333333334, ans=0.125 +2024-07-28 18:42:50,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=190626.66666666666, ans=0.1 +2024-07-28 18:43:02,254 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.109e+01 6.099e+01 7.025e+01 7.577e+01 1.043e+02, threshold=1.405e+02, percent-clipped=0.0 +2024-07-28 18:43:02,302 INFO [train.py:1114] (0/4) Epoch 14, batch 10100, loss[loss=0.192, simple_loss=0.2788, pruned_loss=0.05258, over 3577.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2855, pruned_loss=0.05619, over 861899.12 frames. ], batch size: 36, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:43:06,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=190653.33333333334, ans=0.0 +2024-07-28 18:43:07,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=190653.33333333334, ans=0.2 +2024-07-28 18:43:09,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=190666.66666666666, ans=0.0 +2024-07-28 18:43:18,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=190680.0, ans=0.125 +2024-07-28 18:43:33,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=190706.66666666666, ans=0.09899494936611666 +2024-07-28 18:43:33,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=190706.66666666666, ans=0.2 +2024-07-28 18:43:33,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=190706.66666666666, ans=0.2 +2024-07-28 18:43:34,827 INFO [train.py:1114] (0/4) Epoch 14, batch 10150, loss[loss=0.2391, simple_loss=0.3123, pruned_loss=0.08292, over 3185.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2884, pruned_loss=0.05949, over 821644.39 frames. ], batch size: 35, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:43:35,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=190720.0, ans=0.125 +2024-07-28 18:43:38,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190720.0, ans=0.1 +2024-07-28 18:44:06,131 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.761e+01 6.519e+01 6.916e+01 7.399e+01 9.914e+01, threshold=1.383e+02, percent-clipped=0.0 +2024-07-28 18:44:06,166 INFO [train.py:1114] (0/4) Epoch 14, batch 10200, loss[loss=0.2023, simple_loss=0.2893, pruned_loss=0.05761, over 3483.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2915, pruned_loss=0.06286, over 787090.64 frames. ], batch size: 37, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:44:06,617 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.72 vs. limit=15.0 +2024-07-28 18:44:20,201 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-14.pt +2024-07-28 18:45:00,516 INFO [train.py:1114] (0/4) Epoch 15, batch 0, loss[loss=0.1575, simple_loss=0.2521, pruned_loss=0.0314, over 4847.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2521, pruned_loss=0.0314, over 4847.00 frames. ], batch size: 12, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:45:00,517 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 18:45:12,054 INFO [train.py:1146] (0/4) Epoch 15, validation: loss=0.1655, simple_loss=0.2703, pruned_loss=0.03031, over 944034.00 frames. +2024-07-28 18:45:12,054 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 18:45:12,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=190816.0, ans=0.2 +2024-07-28 18:45:13,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=190816.0, ans=0.2 +2024-07-28 18:45:17,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=190816.0, ans=0.125 +2024-07-28 18:45:41,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=190869.33333333334, ans=0.0 +2024-07-28 18:45:41,973 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.99 vs. limit=12.0 +2024-07-28 18:45:46,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=190869.33333333334, ans=0.125 +2024-07-28 18:45:49,224 INFO [train.py:1114] (0/4) Epoch 15, batch 50, loss[loss=0.1688, simple_loss=0.2536, pruned_loss=0.04197, over 4613.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2786, pruned_loss=0.05035, over 205955.74 frames. ], batch size: 11, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:45:50,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=190882.66666666666, ans=0.125 +2024-07-28 18:45:51,645 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-28 18:45:53,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=190882.66666666666, ans=0.125 +2024-07-28 18:45:56,834 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.31 vs. limit=15.0 +2024-07-28 18:46:00,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=190896.0, ans=0.0 +2024-07-28 18:46:01,960 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:46:05,042 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.19 vs. limit=15.0 +2024-07-28 18:46:07,820 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.277e+01 5.683e+01 6.465e+01 7.180e+01 1.067e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-28 18:46:20,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=190922.66666666666, ans=0.125 +2024-07-28 18:46:30,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=190936.0, ans=0.125 +2024-07-28 18:46:33,569 INFO [train.py:1114] (0/4) Epoch 15, batch 100, loss[loss=0.1731, simple_loss=0.2664, pruned_loss=0.03992, over 4636.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2765, pruned_loss=0.04885, over 364572.25 frames. ], batch size: 12, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:46:35,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=190949.33333333334, ans=0.125 +2024-07-28 18:46:40,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=190962.66666666666, ans=0.2 +2024-07-28 18:46:42,001 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.52 vs. limit=22.5 +2024-07-28 18:47:07,596 INFO [train.py:1114] (0/4) Epoch 15, batch 150, loss[loss=0.1494, simple_loss=0.2343, pruned_loss=0.03231, over 4614.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2748, pruned_loss=0.0475, over 493703.68 frames. ], batch size: 11, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:47:08,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=191016.0, ans=0.125 +2024-07-28 18:47:16,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=191029.33333333334, ans=0.125 +2024-07-28 18:47:27,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191042.66666666666, ans=0.1 +2024-07-28 18:47:28,155 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.483e+01 5.436e+01 5.988e+01 6.579e+01 9.241e+01, threshold=1.198e+02, percent-clipped=0.0 +2024-07-28 18:47:30,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=191056.0, ans=0.0 +2024-07-28 18:47:39,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=191069.33333333334, ans=0.2 +2024-07-28 18:47:42,918 INFO [train.py:1114] (0/4) Epoch 15, batch 200, loss[loss=0.1787, simple_loss=0.2761, pruned_loss=0.0407, over 4501.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2736, pruned_loss=0.04694, over 593598.25 frames. ], batch size: 21, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:47:44,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=191082.66666666666, ans=0.0 +2024-07-28 18:47:54,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=191096.0, ans=0.0 +2024-07-28 18:48:11,324 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.31 vs. limit=15.0 +2024-07-28 18:48:14,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=191136.0, ans=0.125 +2024-07-28 18:48:17,637 INFO [train.py:1114] (0/4) Epoch 15, batch 250, loss[loss=0.2025, simple_loss=0.2935, pruned_loss=0.05577, over 4636.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2733, pruned_loss=0.04715, over 670325.59 frames. ], batch size: 16, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:48:21,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=191149.33333333334, ans=0.125 +2024-07-28 18:48:32,930 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.63 vs. limit=6.0 +2024-07-28 18:48:36,569 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 5.893e+01 6.659e+01 7.264e+01 1.310e+02, threshold=1.332e+02, percent-clipped=1.0 +2024-07-28 18:48:38,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=191189.33333333334, ans=0.015 +2024-07-28 18:48:41,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=191189.33333333334, ans=0.0 +2024-07-28 18:48:51,301 INFO [train.py:1114] (0/4) Epoch 15, batch 300, loss[loss=0.1917, simple_loss=0.2797, pruned_loss=0.0518, over 4806.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2723, pruned_loss=0.04632, over 729953.21 frames. ], batch size: 15, lr: 5.06e-03, grad_scale: 64.0 +2024-07-28 18:48:51,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=191216.0, ans=0.125 +2024-07-28 18:48:51,606 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.36 vs. limit=15.0 +2024-07-28 18:49:12,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=191256.0, ans=10.0 +2024-07-28 18:49:26,577 INFO [train.py:1114] (0/4) Epoch 15, batch 350, loss[loss=0.1684, simple_loss=0.2468, pruned_loss=0.04498, over 4935.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2727, pruned_loss=0.04655, over 776179.41 frames. ], batch size: 12, lr: 5.06e-03, grad_scale: 64.0 +2024-07-28 18:49:29,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.48 vs. limit=15.0 +2024-07-28 18:49:45,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=191309.33333333334, ans=0.125 +2024-07-28 18:49:47,437 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.65 vs. limit=15.0 +2024-07-28 18:49:47,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=191309.33333333334, ans=0.0 +2024-07-28 18:49:47,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=191309.33333333334, ans=0.125 +2024-07-28 18:49:56,401 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+01 5.550e+01 6.008e+01 7.215e+01 1.087e+02, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 18:49:57,327 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:50:04,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=191336.0, ans=0.125 +2024-07-28 18:50:10,471 INFO [train.py:1114] (0/4) Epoch 15, batch 400, loss[loss=0.2027, simple_loss=0.2991, pruned_loss=0.05314, over 4688.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2729, pruned_loss=0.04645, over 813907.09 frames. ], batch size: 13, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:50:13,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=191349.33333333334, ans=0.125 +2024-07-28 18:50:39,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=191402.66666666666, ans=0.125 +2024-07-28 18:50:44,702 INFO [train.py:1114] (0/4) Epoch 15, batch 450, loss[loss=0.1906, simple_loss=0.2691, pruned_loss=0.05605, over 4634.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2736, pruned_loss=0.04683, over 839529.97 frames. ], batch size: 13, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:50:45,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=191416.0, ans=0.05 +2024-07-28 18:50:55,714 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.40 vs. limit=15.0 +2024-07-28 18:50:55,871 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.48 vs. limit=15.0 +2024-07-28 18:50:57,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.56 vs. limit=15.0 +2024-07-28 18:51:03,919 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.382e+01 5.595e+01 6.045e+01 6.958e+01 9.344e+01, threshold=1.209e+02, percent-clipped=0.0 +2024-07-28 18:51:10,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=191456.0, ans=0.2 +2024-07-28 18:51:18,076 INFO [train.py:1114] (0/4) Epoch 15, batch 500, loss[loss=0.2303, simple_loss=0.3101, pruned_loss=0.07524, over 4680.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2719, pruned_loss=0.04595, over 861788.24 frames. ], batch size: 15, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:51:28,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=191496.0, ans=0.125 +2024-07-28 18:51:30,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=191496.0, ans=0.125 +2024-07-28 18:51:35,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=191509.33333333334, ans=0.125 +2024-07-28 18:51:41,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=191522.66666666666, ans=0.0 +2024-07-28 18:51:43,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=191522.66666666666, ans=0.125 +2024-07-28 18:51:44,716 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.92 vs. limit=15.0 +2024-07-28 18:51:45,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=191522.66666666666, ans=0.0 +2024-07-28 18:51:48,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=191536.0, ans=0.125 +2024-07-28 18:51:53,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=191536.0, ans=0.0 +2024-07-28 18:51:54,373 INFO [train.py:1114] (0/4) Epoch 15, batch 550, loss[loss=0.192, simple_loss=0.3026, pruned_loss=0.04065, over 4643.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2717, pruned_loss=0.04574, over 878201.80 frames. ], batch size: 17, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:52:02,196 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.72 vs. limit=15.0 +2024-07-28 18:52:04,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=191562.66666666666, ans=0.125 +2024-07-28 18:52:08,790 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.68 vs. limit=5.0 +2024-07-28 18:52:12,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=191576.0, ans=0.125 +2024-07-28 18:52:13,590 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 5.498e+01 5.942e+01 6.485e+01 9.965e+01, threshold=1.188e+02, percent-clipped=0.0 +2024-07-28 18:52:20,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=191602.66666666666, ans=0.125 +2024-07-28 18:52:27,566 INFO [train.py:1114] (0/4) Epoch 15, batch 600, loss[loss=0.1947, simple_loss=0.2873, pruned_loss=0.05101, over 4626.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2722, pruned_loss=0.04589, over 892655.54 frames. ], batch size: 16, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:52:33,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=191616.0, ans=0.125 +2024-07-28 18:52:34,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=191629.33333333334, ans=0.125 +2024-07-28 18:52:36,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=191629.33333333334, ans=0.2 +2024-07-28 18:52:37,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=191629.33333333334, ans=0.125 +2024-07-28 18:52:49,996 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.35 vs. limit=12.0 +2024-07-28 18:52:57,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=191669.33333333334, ans=0.0 +2024-07-28 18:52:58,510 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:53:03,044 INFO [train.py:1114] (0/4) Epoch 15, batch 650, loss[loss=0.1779, simple_loss=0.2657, pruned_loss=0.045, over 4762.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2709, pruned_loss=0.04575, over 904349.80 frames. ], batch size: 13, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:53:12,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=191696.0, ans=0.125 +2024-07-28 18:53:16,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=191709.33333333334, ans=0.125 +2024-07-28 18:53:22,451 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.353e+01 5.431e+01 6.039e+01 6.829e+01 9.137e+01, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 18:53:23,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=191722.66666666666, ans=0.125 +2024-07-28 18:53:24,057 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.73 vs. limit=22.5 +2024-07-28 18:53:38,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=191749.33333333334, ans=0.07 +2024-07-28 18:53:38,718 INFO [train.py:1114] (0/4) Epoch 15, batch 700, loss[loss=0.1548, simple_loss=0.244, pruned_loss=0.03283, over 4644.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2718, pruned_loss=0.04619, over 911987.36 frames. ], batch size: 12, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:53:40,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191749.33333333334, ans=0.1 +2024-07-28 18:53:40,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191749.33333333334, ans=0.1 +2024-07-28 18:53:44,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=191749.33333333334, ans=0.125 +2024-07-28 18:53:48,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=191762.66666666666, ans=0.5 +2024-07-28 18:53:51,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=191776.0, ans=0.125 +2024-07-28 18:53:57,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=191776.0, ans=0.125 +2024-07-28 18:54:06,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=191802.66666666666, ans=0.125 +2024-07-28 18:54:11,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=191816.0, ans=0.125 +2024-07-28 18:54:11,849 INFO [train.py:1114] (0/4) Epoch 15, batch 750, loss[loss=0.1759, simple_loss=0.2601, pruned_loss=0.04589, over 4685.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2705, pruned_loss=0.04588, over 918118.83 frames. ], batch size: 13, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:54:12,404 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.81 vs. limit=22.5 +2024-07-28 18:54:24,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191842.66666666666, ans=0.1 +2024-07-28 18:54:25,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=191842.66666666666, ans=0.125 +2024-07-28 18:54:31,053 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.222e+01 5.661e+01 6.305e+01 7.556e+01 1.211e+02, threshold=1.261e+02, percent-clipped=1.0 +2024-07-28 18:54:42,751 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.72 vs. limit=15.0 +2024-07-28 18:54:43,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=191869.33333333334, ans=0.0 +2024-07-28 18:54:48,289 INFO [train.py:1114] (0/4) Epoch 15, batch 800, loss[loss=0.1653, simple_loss=0.2529, pruned_loss=0.03891, over 4844.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2711, pruned_loss=0.04605, over 923335.75 frames. ], batch size: 12, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:55:07,530 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.27 vs. limit=6.0 +2024-07-28 18:55:18,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191936.0, ans=0.1 +2024-07-28 18:55:23,963 INFO [train.py:1114] (0/4) Epoch 15, batch 850, loss[loss=0.1749, simple_loss=0.279, pruned_loss=0.03539, over 4664.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2708, pruned_loss=0.04582, over 927365.01 frames. ], batch size: 14, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 18:55:24,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=191949.33333333334, ans=0.2 +2024-07-28 18:55:37,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=191976.0, ans=0.125 +2024-07-28 18:55:43,404 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.257e+01 5.494e+01 6.079e+01 6.525e+01 1.058e+02, threshold=1.216e+02, percent-clipped=0.0 +2024-07-28 18:55:48,962 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-144000.pt +2024-07-28 19:01:39,953 INFO [train.py:1114] (0/4) Epoch 15, batch 900, loss[loss=0.183, simple_loss=0.2622, pruned_loss=0.05191, over 4853.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2711, pruned_loss=0.0461, over 928245.61 frames. ], batch size: 12, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:01:47,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=192029.33333333334, ans=0.125 +2024-07-28 19:02:05,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192056.0, ans=0.1 +2024-07-28 19:02:05,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=192069.33333333334, ans=0.025 +2024-07-28 19:02:13,183 INFO [train.py:1114] (0/4) Epoch 15, batch 950, loss[loss=0.1852, simple_loss=0.2814, pruned_loss=0.04446, over 4778.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.272, pruned_loss=0.04622, over 930426.99 frames. ], batch size: 12, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:02:23,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=192096.0, ans=0.125 +2024-07-28 19:02:23,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=192096.0, ans=0.125 +2024-07-28 19:02:24,240 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.88 vs. limit=22.5 +2024-07-28 19:02:27,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=192109.33333333334, ans=0.125 +2024-07-28 19:02:34,370 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.707e+01 5.740e+01 6.341e+01 7.414e+01 2.683e+02, threshold=1.268e+02, percent-clipped=1.0 +2024-07-28 19:02:39,447 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-07-28 19:02:42,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=192136.0, ans=0.2 +2024-07-28 19:02:45,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=192136.0, ans=0.05 +2024-07-28 19:02:48,487 INFO [train.py:1114] (0/4) Epoch 15, batch 1000, loss[loss=0.1822, simple_loss=0.272, pruned_loss=0.04624, over 4970.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2726, pruned_loss=0.04642, over 930239.99 frames. ], batch size: 13, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:02:56,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=192162.66666666666, ans=0.0 +2024-07-28 19:03:03,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192176.0, ans=0.1 +2024-07-28 19:03:04,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=192176.0, ans=0.125 +2024-07-28 19:03:14,734 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:03:22,057 INFO [train.py:1114] (0/4) Epoch 15, batch 1050, loss[loss=0.2338, simple_loss=0.3267, pruned_loss=0.07044, over 4878.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2709, pruned_loss=0.04613, over 932524.96 frames. ], batch size: 14, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:03:26,393 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.34 vs. limit=22.5 +2024-07-28 19:03:30,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=192229.33333333334, ans=0.0 +2024-07-28 19:03:30,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=192229.33333333334, ans=0.95 +2024-07-28 19:03:31,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=192229.33333333334, ans=0.125 +2024-07-28 19:03:32,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=192229.33333333334, ans=0.125 +2024-07-28 19:03:32,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=192229.33333333334, ans=0.125 +2024-07-28 19:03:34,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=192229.33333333334, ans=0.04949747468305833 +2024-07-28 19:03:41,126 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.47 vs. limit=22.5 +2024-07-28 19:03:41,200 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.689e+01 5.436e+01 6.028e+01 6.736e+01 8.653e+01, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 19:03:46,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=192256.0, ans=0.125 +2024-07-28 19:03:47,198 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.51 vs. limit=22.5 +2024-07-28 19:03:51,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=192269.33333333334, ans=0.0 +2024-07-28 19:03:54,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=192282.66666666666, ans=0.125 +2024-07-28 19:03:55,284 INFO [train.py:1114] (0/4) Epoch 15, batch 1100, loss[loss=0.1833, simple_loss=0.2857, pruned_loss=0.04041, over 4894.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2714, pruned_loss=0.04581, over 934704.97 frames. ], batch size: 13, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:03:57,136 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.59 vs. limit=12.0 +2024-07-28 19:03:58,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=192282.66666666666, ans=0.0 +2024-07-28 19:04:02,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=192296.0, ans=0.125 +2024-07-28 19:04:21,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=192322.66666666666, ans=0.0 +2024-07-28 19:04:21,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=192322.66666666666, ans=0.04949747468305833 +2024-07-28 19:04:25,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=192336.0, ans=0.2 +2024-07-28 19:04:30,241 INFO [train.py:1114] (0/4) Epoch 15, batch 1150, loss[loss=0.1987, simple_loss=0.295, pruned_loss=0.05118, over 4892.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2712, pruned_loss=0.04585, over 934401.43 frames. ], batch size: 13, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:04:33,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=192349.33333333334, ans=0.125 +2024-07-28 19:04:33,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=192349.33333333334, ans=0.0 +2024-07-28 19:04:36,580 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.07 vs. limit=22.5 +2024-07-28 19:04:38,626 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.45 vs. limit=15.0 +2024-07-28 19:04:51,450 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.762e+01 5.625e+01 6.208e+01 7.192e+01 1.002e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 19:04:59,209 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.82 vs. limit=15.0 +2024-07-28 19:05:04,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=192402.66666666666, ans=0.125 +2024-07-28 19:05:05,556 INFO [train.py:1114] (0/4) Epoch 15, batch 1200, loss[loss=0.1972, simple_loss=0.2969, pruned_loss=0.04875, over 4866.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2726, pruned_loss=0.04667, over 933518.05 frames. ], batch size: 14, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:05:11,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=192429.33333333334, ans=0.125 +2024-07-28 19:05:18,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=192442.66666666666, ans=0.125 +2024-07-28 19:05:20,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=192442.66666666666, ans=0.125 +2024-07-28 19:05:24,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=192456.0, ans=0.035 +2024-07-28 19:05:33,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=192469.33333333334, ans=0.0 +2024-07-28 19:05:38,624 INFO [train.py:1114] (0/4) Epoch 15, batch 1250, loss[loss=0.2068, simple_loss=0.2934, pruned_loss=0.06011, over 4806.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2727, pruned_loss=0.04634, over 937406.18 frames. ], batch size: 15, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:05:44,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=192496.0, ans=0.125 +2024-07-28 19:05:57,776 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.394e+01 5.598e+01 6.184e+01 7.240e+01 1.147e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 19:06:02,810 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.01 vs. limit=15.0 +2024-07-28 19:06:05,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=192536.0, ans=0.125 +2024-07-28 19:06:08,462 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.37 vs. limit=22.5 +2024-07-28 19:06:10,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=192536.0, ans=0.07 +2024-07-28 19:06:14,162 INFO [train.py:1114] (0/4) Epoch 15, batch 1300, loss[loss=0.1942, simple_loss=0.2736, pruned_loss=0.05744, over 4800.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2708, pruned_loss=0.04582, over 938918.98 frames. ], batch size: 19, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:06:14,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=192549.33333333334, ans=0.025 +2024-07-28 19:06:36,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=192589.33333333334, ans=0.125 +2024-07-28 19:06:43,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=192602.66666666666, ans=0.125 +2024-07-28 19:06:47,803 INFO [train.py:1114] (0/4) Epoch 15, batch 1350, loss[loss=0.1926, simple_loss=0.2927, pruned_loss=0.04621, over 4767.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2705, pruned_loss=0.04578, over 940787.21 frames. ], batch size: 13, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:06:54,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192629.33333333334, ans=0.1 +2024-07-28 19:07:03,257 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:07:03,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=192642.66666666666, ans=0.025 +2024-07-28 19:07:07,207 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.665e+01 5.559e+01 6.171e+01 7.538e+01 1.379e+02, threshold=1.234e+02, percent-clipped=1.0 +2024-07-28 19:07:15,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=192669.33333333334, ans=0.025 +2024-07-28 19:07:16,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=192669.33333333334, ans=0.025 +2024-07-28 19:07:21,209 INFO [train.py:1114] (0/4) Epoch 15, batch 1400, loss[loss=0.133, simple_loss=0.2157, pruned_loss=0.02511, over 4692.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.27, pruned_loss=0.04542, over 942719.05 frames. ], batch size: 11, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:07:21,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=192682.66666666666, ans=0.015 +2024-07-28 19:07:38,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=192709.33333333334, ans=0.0 +2024-07-28 19:07:47,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=192736.0, ans=0.125 +2024-07-28 19:07:56,458 INFO [train.py:1114] (0/4) Epoch 15, batch 1450, loss[loss=0.1655, simple_loss=0.2614, pruned_loss=0.03481, over 4694.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2703, pruned_loss=0.04541, over 943003.18 frames. ], batch size: 15, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:07:58,573 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:07:59,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192749.33333333334, ans=0.1 +2024-07-28 19:08:02,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=192762.66666666666, ans=0.125 +2024-07-28 19:08:15,576 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.624e+01 5.633e+01 5.995e+01 6.598e+01 8.860e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 19:08:15,965 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.22 vs. limit=6.0 +2024-07-28 19:08:25,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=192802.66666666666, ans=0.0 +2024-07-28 19:08:29,709 INFO [train.py:1114] (0/4) Epoch 15, batch 1500, loss[loss=0.162, simple_loss=0.265, pruned_loss=0.02955, over 4809.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2717, pruned_loss=0.04564, over 942588.72 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:08:31,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192816.0, ans=0.1 +2024-07-28 19:08:35,664 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.12 vs. limit=6.0 +2024-07-28 19:08:37,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192829.33333333334, ans=0.1 +2024-07-28 19:08:38,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=192829.33333333334, ans=0.0 +2024-07-28 19:08:52,766 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.99 vs. limit=15.0 +2024-07-28 19:08:57,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=192869.33333333334, ans=0.025 +2024-07-28 19:09:03,670 INFO [train.py:1114] (0/4) Epoch 15, batch 1550, loss[loss=0.205, simple_loss=0.2944, pruned_loss=0.05782, over 4892.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2718, pruned_loss=0.04578, over 938708.04 frames. ], batch size: 15, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:09:08,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=192882.66666666666, ans=10.0 +2024-07-28 19:09:11,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192896.0, ans=0.1 +2024-07-28 19:09:20,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=192909.33333333334, ans=0.0 +2024-07-28 19:09:21,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=192909.33333333334, ans=0.125 +2024-07-28 19:09:23,062 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.711e+01 6.284e+01 7.073e+01 1.043e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 19:09:39,186 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.82 vs. limit=12.0 +2024-07-28 19:09:39,421 INFO [train.py:1114] (0/4) Epoch 15, batch 1600, loss[loss=0.1707, simple_loss=0.2692, pruned_loss=0.03607, over 4875.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2713, pruned_loss=0.04592, over 937842.78 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:09:46,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=192962.66666666666, ans=0.125 +2024-07-28 19:09:48,614 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.15 vs. limit=15.0 +2024-07-28 19:09:59,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=192989.33333333334, ans=0.0 +2024-07-28 19:10:14,537 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.05 vs. limit=15.0 +2024-07-28 19:10:14,726 INFO [train.py:1114] (0/4) Epoch 15, batch 1650, loss[loss=0.1832, simple_loss=0.2773, pruned_loss=0.04457, over 4662.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2707, pruned_loss=0.04577, over 937897.77 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:10:18,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=193016.0, ans=0.0 +2024-07-28 19:10:21,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=193029.33333333334, ans=0.125 +2024-07-28 19:10:21,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=193029.33333333334, ans=0.125 +2024-07-28 19:10:24,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.whiten.whitening_limit, batch_count=193029.33333333334, ans=12.0 +2024-07-28 19:10:33,852 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.043e+01 5.642e+01 6.032e+01 7.016e+01 1.079e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 19:10:47,878 INFO [train.py:1114] (0/4) Epoch 15, batch 1700, loss[loss=0.144, simple_loss=0.234, pruned_loss=0.02704, over 4719.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2705, pruned_loss=0.04544, over 939631.52 frames. ], batch size: 11, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:10:52,918 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.76 vs. limit=10.0 +2024-07-28 19:10:55,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=193096.0, ans=0.125 +2024-07-28 19:11:04,217 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=5.92 vs. limit=15.0 +2024-07-28 19:11:05,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=193109.33333333334, ans=0.0 +2024-07-28 19:11:10,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=193122.66666666666, ans=0.125 +2024-07-28 19:11:14,464 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.56 vs. limit=10.0 +2024-07-28 19:11:18,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=193136.0, ans=0.125 +2024-07-28 19:11:21,436 INFO [train.py:1114] (0/4) Epoch 15, batch 1750, loss[loss=0.2047, simple_loss=0.2881, pruned_loss=0.06064, over 4805.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2704, pruned_loss=0.04557, over 940311.96 frames. ], batch size: 11, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:11:21,737 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.70 vs. limit=15.0 +2024-07-28 19:11:25,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=193149.33333333334, ans=0.125 +2024-07-28 19:11:39,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=193176.0, ans=0.125 +2024-07-28 19:11:42,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=193176.0, ans=0.0 +2024-07-28 19:11:42,913 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.964e+01 5.710e+01 6.387e+01 7.487e+01 1.072e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 19:11:43,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=193189.33333333334, ans=0.125 +2024-07-28 19:11:45,077 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:11:47,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=193189.33333333334, ans=0.1 +2024-07-28 19:11:48,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=193189.33333333334, ans=0.125 +2024-07-28 19:11:49,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=193189.33333333334, ans=0.125 +2024-07-28 19:11:53,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=193202.66666666666, ans=0.125 +2024-07-28 19:11:56,802 INFO [train.py:1114] (0/4) Epoch 15, batch 1800, loss[loss=0.1523, simple_loss=0.2335, pruned_loss=0.03553, over 4638.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2701, pruned_loss=0.04532, over 940903.61 frames. ], batch size: 13, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:11:57,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.18 vs. limit=15.0 +2024-07-28 19:12:07,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=193229.33333333334, ans=0.125 +2024-07-28 19:12:16,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=193256.0, ans=0.125 +2024-07-28 19:12:26,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=193269.33333333334, ans=0.125 +2024-07-28 19:12:30,439 INFO [train.py:1114] (0/4) Epoch 15, batch 1850, loss[loss=0.2122, simple_loss=0.303, pruned_loss=0.06073, over 4800.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2705, pruned_loss=0.04566, over 940763.13 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:12:39,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=193296.0, ans=0.2 +2024-07-28 19:12:41,758 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.85 vs. limit=22.5 +2024-07-28 19:12:49,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=193309.33333333334, ans=0.2 +2024-07-28 19:12:50,209 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.636e+01 5.579e+01 6.374e+01 7.062e+01 1.422e+02, threshold=1.275e+02, percent-clipped=2.0 +2024-07-28 19:13:26,500 INFO [train.py:1114] (0/4) Epoch 15, batch 1900, loss[loss=0.2031, simple_loss=0.3021, pruned_loss=0.05205, over 4665.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2717, pruned_loss=0.04605, over 942236.78 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:13:33,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=193362.66666666666, ans=0.2 +2024-07-28 19:13:45,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=193376.0, ans=0.125 +2024-07-28 19:14:01,207 INFO [train.py:1114] (0/4) Epoch 15, batch 1950, loss[loss=0.1569, simple_loss=0.2554, pruned_loss=0.02916, over 4894.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2723, pruned_loss=0.04562, over 944045.16 frames. ], batch size: 13, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:14:10,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=193429.33333333334, ans=0.125 +2024-07-28 19:14:11,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=193429.33333333334, ans=0.0 +2024-07-28 19:14:19,727 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=2.98 vs. limit=12.0 +2024-07-28 19:14:20,683 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.328e+01 5.675e+01 6.115e+01 6.828e+01 9.814e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 19:14:31,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=193469.33333333334, ans=0.125 +2024-07-28 19:14:34,640 INFO [train.py:1114] (0/4) Epoch 15, batch 2000, loss[loss=0.1511, simple_loss=0.2303, pruned_loss=0.03595, over 4791.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2723, pruned_loss=0.04566, over 941374.80 frames. ], batch size: 11, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:14:34,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=193482.66666666666, ans=0.0 +2024-07-28 19:14:49,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=193509.33333333334, ans=0.2 +2024-07-28 19:14:50,807 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.00 vs. limit=10.0 +2024-07-28 19:14:58,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=193522.66666666666, ans=0.125 +2024-07-28 19:15:03,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=193536.0, ans=0.125 +2024-07-28 19:15:08,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=193549.33333333334, ans=0.125 +2024-07-28 19:15:08,815 INFO [train.py:1114] (0/4) Epoch 15, batch 2050, loss[loss=0.1784, simple_loss=0.2615, pruned_loss=0.04767, over 4611.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2713, pruned_loss=0.04546, over 939563.54 frames. ], batch size: 11, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:15:20,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=193562.66666666666, ans=0.0 +2024-07-28 19:15:26,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=193576.0, ans=0.07 +2024-07-28 19:15:31,321 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.698e+01 6.282e+01 7.137e+01 1.040e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 19:15:45,953 INFO [train.py:1114] (0/4) Epoch 15, batch 2100, loss[loss=0.1847, simple_loss=0.2926, pruned_loss=0.03837, over 4762.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2714, pruned_loss=0.04515, over 941427.99 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:15:50,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=193616.0, ans=0.0 +2024-07-28 19:15:52,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=193629.33333333334, ans=0.125 +2024-07-28 19:15:56,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=193629.33333333334, ans=0.07 +2024-07-28 19:16:11,314 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.68 vs. limit=15.0 +2024-07-28 19:16:17,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=193669.33333333334, ans=0.1 +2024-07-28 19:16:19,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=193669.33333333334, ans=0.125 +2024-07-28 19:16:20,839 INFO [train.py:1114] (0/4) Epoch 15, batch 2150, loss[loss=0.2123, simple_loss=0.3025, pruned_loss=0.06109, over 4906.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.272, pruned_loss=0.04566, over 944581.63 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:16:22,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=193682.66666666666, ans=0.0 +2024-07-28 19:16:24,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=193682.66666666666, ans=0.0 +2024-07-28 19:16:26,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=193682.66666666666, ans=0.1 +2024-07-28 19:16:31,963 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-07-28 19:16:33,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.49 vs. limit=22.5 +2024-07-28 19:16:40,102 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.631e+01 5.589e+01 6.207e+01 7.234e+01 9.865e+01, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 19:16:42,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=193722.66666666666, ans=0.05 +2024-07-28 19:16:48,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=193736.0, ans=0.125 +2024-07-28 19:16:52,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=193736.0, ans=0.125 +2024-07-28 19:16:54,162 INFO [train.py:1114] (0/4) Epoch 15, batch 2200, loss[loss=0.2108, simple_loss=0.3004, pruned_loss=0.0606, over 4802.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2719, pruned_loss=0.04558, over 943975.15 frames. ], batch size: 14, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:17:01,123 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.58 vs. limit=15.0 +2024-07-28 19:17:06,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=193776.0, ans=0.125 +2024-07-28 19:17:07,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=193776.0, ans=0.125 +2024-07-28 19:17:40,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=193802.66666666666, ans=0.125 +2024-07-28 19:17:43,140 INFO [train.py:1114] (0/4) Epoch 15, batch 2250, loss[loss=0.1814, simple_loss=0.2622, pruned_loss=0.05027, over 4705.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2712, pruned_loss=0.04574, over 942606.55 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:17:45,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=193816.0, ans=0.125 +2024-07-28 19:17:56,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=193842.66666666666, ans=0.2 +2024-07-28 19:17:56,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=193842.66666666666, ans=0.0 +2024-07-28 19:17:57,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=193842.66666666666, ans=0.125 +2024-07-28 19:18:00,067 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:18:00,436 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.15 vs. limit=15.0 +2024-07-28 19:18:02,476 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.434e+01 5.887e+01 6.714e+01 1.189e+02, threshold=1.177e+02, percent-clipped=0.0 +2024-07-28 19:18:02,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=193856.0, ans=0.07 +2024-07-28 19:18:08,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=193856.0, ans=0.125 +2024-07-28 19:18:09,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=193869.33333333334, ans=0.0 +2024-07-28 19:18:10,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=193869.33333333334, ans=0.125 +2024-07-28 19:18:16,463 INFO [train.py:1114] (0/4) Epoch 15, batch 2300, loss[loss=0.1681, simple_loss=0.2577, pruned_loss=0.0393, over 4940.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2706, pruned_loss=0.04618, over 940300.09 frames. ], batch size: 12, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:18:17,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=193882.66666666666, ans=0.5 +2024-07-28 19:18:19,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=193882.66666666666, ans=0.125 +2024-07-28 19:18:19,481 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.12 vs. limit=15.0 +2024-07-28 19:18:20,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=193882.66666666666, ans=0.125 +2024-07-28 19:18:23,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=193896.0, ans=0.07 +2024-07-28 19:18:29,523 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:18:46,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=193936.0, ans=0.2 +2024-07-28 19:18:49,534 INFO [train.py:1114] (0/4) Epoch 15, batch 2350, loss[loss=0.2104, simple_loss=0.2973, pruned_loss=0.06176, over 4634.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2708, pruned_loss=0.0459, over 941909.96 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:19:08,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.04 vs. limit=15.0 +2024-07-28 19:19:13,032 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.332e+01 5.729e+01 6.304e+01 7.186e+01 9.939e+01, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 19:19:15,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=193989.33333333334, ans=0.125 +2024-07-28 19:19:27,931 INFO [train.py:1114] (0/4) Epoch 15, batch 2400, loss[loss=0.1762, simple_loss=0.2629, pruned_loss=0.04474, over 4639.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2707, pruned_loss=0.04563, over 941801.36 frames. ], batch size: 12, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:19:34,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=194029.33333333334, ans=0.125 +2024-07-28 19:19:40,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=194029.33333333334, ans=0.125 +2024-07-28 19:19:42,201 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:19:49,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=194056.0, ans=0.0 +2024-07-28 19:20:01,299 INFO [train.py:1114] (0/4) Epoch 15, batch 2450, loss[loss=0.1863, simple_loss=0.2819, pruned_loss=0.04538, over 4696.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2716, pruned_loss=0.04609, over 936954.57 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:20:04,437 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.21 vs. limit=15.0 +2024-07-28 19:20:12,156 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:20:19,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=194109.33333333334, ans=0.125 +2024-07-28 19:20:21,704 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.395e+01 5.722e+01 6.164e+01 6.844e+01 9.609e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 19:20:30,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=194136.0, ans=0.025 +2024-07-28 19:20:32,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=194136.0, ans=0.125 +2024-07-28 19:20:35,243 INFO [train.py:1114] (0/4) Epoch 15, batch 2500, loss[loss=0.1871, simple_loss=0.2833, pruned_loss=0.04541, over 4813.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2703, pruned_loss=0.04505, over 939054.73 frames. ], batch size: 14, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:20:36,675 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:20:41,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194162.66666666666, ans=0.1 +2024-07-28 19:20:43,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=194162.66666666666, ans=0.0 +2024-07-28 19:20:47,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.14 vs. limit=6.0 +2024-07-28 19:20:49,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=194176.0, ans=0.125 +2024-07-28 19:20:54,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=194189.33333333334, ans=0.0 +2024-07-28 19:21:08,118 INFO [train.py:1114] (0/4) Epoch 15, batch 2550, loss[loss=0.1606, simple_loss=0.2441, pruned_loss=0.03853, over 4802.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2699, pruned_loss=0.04453, over 938912.25 frames. ], batch size: 11, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:21:11,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=194216.0, ans=0.025 +2024-07-28 19:21:14,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=194216.0, ans=0.025 +2024-07-28 19:21:16,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=194229.33333333334, ans=0.0 +2024-07-28 19:21:16,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=194229.33333333334, ans=0.0 +2024-07-28 19:21:27,773 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:21:29,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=194242.66666666666, ans=0.125 +2024-07-28 19:21:30,729 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.237e+01 5.497e+01 6.084e+01 7.068e+01 9.259e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-28 19:21:32,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=194256.0, ans=0.125 +2024-07-28 19:21:41,929 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.33 vs. limit=22.5 +2024-07-28 19:21:45,688 INFO [train.py:1114] (0/4) Epoch 15, batch 2600, loss[loss=0.1803, simple_loss=0.2664, pruned_loss=0.04709, over 4905.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.271, pruned_loss=0.04537, over 937696.19 frames. ], batch size: 13, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:21:49,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=194282.66666666666, ans=0.125 +2024-07-28 19:22:09,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=194322.66666666666, ans=0.125 +2024-07-28 19:22:12,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=194336.0, ans=0.2 +2024-07-28 19:22:13,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=194336.0, ans=0.125 +2024-07-28 19:22:14,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=194336.0, ans=0.5 +2024-07-28 19:22:19,324 INFO [train.py:1114] (0/4) Epoch 15, batch 2650, loss[loss=0.1905, simple_loss=0.2801, pruned_loss=0.05047, over 4653.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2722, pruned_loss=0.04589, over 939831.12 frames. ], batch size: 16, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:22:21,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=194349.33333333334, ans=0.0 +2024-07-28 19:22:28,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=194362.66666666666, ans=0.0 +2024-07-28 19:22:29,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=194362.66666666666, ans=0.125 +2024-07-28 19:22:33,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194376.0, ans=0.1 +2024-07-28 19:22:39,768 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.85 vs. limit=12.0 +2024-07-28 19:22:42,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=194389.33333333334, ans=0.2 +2024-07-28 19:22:43,429 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.390e+01 5.437e+01 6.145e+01 6.904e+01 9.658e+01, threshold=1.229e+02, percent-clipped=0.0 +2024-07-28 19:22:53,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=194402.66666666666, ans=0.0 +2024-07-28 19:23:00,531 INFO [train.py:1114] (0/4) Epoch 15, batch 2700, loss[loss=0.2234, simple_loss=0.3215, pruned_loss=0.06262, over 4748.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.272, pruned_loss=0.04582, over 939689.49 frames. ], batch size: 14, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:23:06,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=194416.0, ans=0.05 +2024-07-28 19:23:06,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=194416.0, ans=0.025 +2024-07-28 19:23:10,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=194429.33333333334, ans=0.0 +2024-07-28 19:23:16,227 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.88 vs. limit=15.0 +2024-07-28 19:23:23,110 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.52 vs. limit=15.0 +2024-07-28 19:23:24,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=194456.0, ans=0.125 +2024-07-28 19:23:25,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=194456.0, ans=0.125 +2024-07-28 19:23:32,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194469.33333333334, ans=0.1 +2024-07-28 19:23:35,795 INFO [train.py:1114] (0/4) Epoch 15, batch 2750, loss[loss=0.159, simple_loss=0.2546, pruned_loss=0.03171, over 4705.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2707, pruned_loss=0.04557, over 940097.91 frames. ], batch size: 12, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:23:35,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=194482.66666666666, ans=0.125 +2024-07-28 19:23:43,011 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.98 vs. limit=12.0 +2024-07-28 19:23:56,144 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.670e+01 5.701e+01 6.363e+01 7.329e+01 1.129e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 19:23:56,514 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.99 vs. limit=15.0 +2024-07-28 19:24:06,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=194536.0, ans=0.025 +2024-07-28 19:24:09,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=194549.33333333334, ans=0.025 +2024-07-28 19:24:10,231 INFO [train.py:1114] (0/4) Epoch 15, batch 2800, loss[loss=0.2639, simple_loss=0.3329, pruned_loss=0.09741, over 3614.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2704, pruned_loss=0.04587, over 938082.24 frames. ], batch size: 35, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:24:16,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=194562.66666666666, ans=0.125 +2024-07-28 19:24:21,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.85 vs. limit=15.0 +2024-07-28 19:24:23,785 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.71 vs. limit=15.0 +2024-07-28 19:24:39,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194589.33333333334, ans=0.1 +2024-07-28 19:24:44,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=194602.66666666666, ans=0.125 +2024-07-28 19:24:47,816 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:24:48,362 INFO [train.py:1114] (0/4) Epoch 15, batch 2850, loss[loss=0.1477, simple_loss=0.2433, pruned_loss=0.02601, over 4959.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2703, pruned_loss=0.04634, over 936424.18 frames. ], batch size: 13, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:24:56,608 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.25 vs. limit=22.5 +2024-07-28 19:25:09,896 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.207e+01 5.952e+01 6.499e+01 7.318e+01 1.007e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-28 19:25:11,030 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.52 vs. limit=22.5 +2024-07-28 19:25:18,539 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.63 vs. limit=22.5 +2024-07-28 19:25:23,281 INFO [train.py:1114] (0/4) Epoch 15, batch 2900, loss[loss=0.2083, simple_loss=0.2971, pruned_loss=0.0598, over 4834.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2719, pruned_loss=0.04637, over 940144.53 frames. ], batch size: 13, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:25:23,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=194682.66666666666, ans=0.125 +2024-07-28 19:25:25,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=194682.66666666666, ans=0.125 +2024-07-28 19:25:29,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194696.0, ans=0.1 +2024-07-28 19:25:30,239 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:25:36,387 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.62 vs. limit=15.0 +2024-07-28 19:25:39,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=194709.33333333334, ans=10.0 +2024-07-28 19:25:45,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=194722.66666666666, ans=0.025 +2024-07-28 19:25:57,104 INFO [train.py:1114] (0/4) Epoch 15, batch 2950, loss[loss=0.1532, simple_loss=0.2407, pruned_loss=0.03283, over 4709.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2717, pruned_loss=0.04677, over 939232.63 frames. ], batch size: 12, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:26:02,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=194749.33333333334, ans=0.1 +2024-07-28 19:26:04,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=194762.66666666666, ans=0.2 +2024-07-28 19:26:08,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=194762.66666666666, ans=0.0 +2024-07-28 19:26:17,155 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.233e+01 5.428e+01 5.966e+01 6.720e+01 8.904e+01, threshold=1.193e+02, percent-clipped=0.0 +2024-07-28 19:27:06,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=194789.33333333334, ans=0.125 +2024-07-28 19:27:10,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=194802.66666666666, ans=0.125 +2024-07-28 19:27:15,593 INFO [train.py:1114] (0/4) Epoch 15, batch 3000, loss[loss=0.1759, simple_loss=0.2708, pruned_loss=0.04046, over 4760.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.271, pruned_loss=0.04608, over 938059.84 frames. ], batch size: 13, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:27:15,593 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 19:27:27,986 INFO [train.py:1146] (0/4) Epoch 15, validation: loss=0.1635, simple_loss=0.2667, pruned_loss=0.03013, over 944034.00 frames. +2024-07-28 19:27:27,987 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 19:27:33,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=194816.0, ans=0.125 +2024-07-28 19:27:33,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=194816.0, ans=0.125 +2024-07-28 19:27:35,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=194829.33333333334, ans=0.025 +2024-07-28 19:27:43,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=194842.66666666666, ans=0.125 +2024-07-28 19:28:04,548 INFO [train.py:1114] (0/4) Epoch 15, batch 3050, loss[loss=0.1558, simple_loss=0.2427, pruned_loss=0.03447, over 4650.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2714, pruned_loss=0.04617, over 936944.41 frames. ], batch size: 12, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:28:09,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=194882.66666666666, ans=15.0 +2024-07-28 19:28:11,782 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.68 vs. limit=15.0 +2024-07-28 19:28:14,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=194896.0, ans=0.125 +2024-07-28 19:28:17,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=194909.33333333334, ans=0.2 +2024-07-28 19:28:24,846 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.462e+01 5.642e+01 6.422e+01 7.764e+01 9.574e+01, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 19:28:32,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=194936.0, ans=0.125 +2024-07-28 19:28:36,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=194936.0, ans=0.04949747468305833 +2024-07-28 19:28:41,495 INFO [train.py:1114] (0/4) Epoch 15, batch 3100, loss[loss=0.2132, simple_loss=0.3092, pruned_loss=0.05858, over 4616.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2718, pruned_loss=0.0464, over 937655.49 frames. ], batch size: 16, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:28:42,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194949.33333333334, ans=0.1 +2024-07-28 19:28:44,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=194949.33333333334, ans=0.125 +2024-07-28 19:28:55,381 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.13 vs. limit=15.0 +2024-07-28 19:29:06,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=194989.33333333334, ans=0.125 +2024-07-28 19:29:09,849 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.68 vs. limit=12.0 +2024-07-28 19:29:11,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195002.66666666666, ans=0.1 +2024-07-28 19:29:11,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=195002.66666666666, ans=0.125 +2024-07-28 19:29:17,392 INFO [train.py:1114] (0/4) Epoch 15, batch 3150, loss[loss=0.209, simple_loss=0.3059, pruned_loss=0.056, over 4639.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2713, pruned_loss=0.04597, over 938035.49 frames. ], batch size: 17, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:29:22,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=195016.0, ans=0.0 +2024-07-28 19:29:23,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=195016.0, ans=0.2 +2024-07-28 19:29:44,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=195042.66666666666, ans=0.07 +2024-07-28 19:29:47,865 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.902e+01 5.711e+01 6.349e+01 7.434e+01 1.242e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 19:29:50,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195056.0, ans=0.1 +2024-07-28 19:29:52,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=195056.0, ans=0.07 +2024-07-28 19:29:53,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=195056.0, ans=0.125 +2024-07-28 19:30:01,492 INFO [train.py:1114] (0/4) Epoch 15, batch 3200, loss[loss=0.1844, simple_loss=0.2903, pruned_loss=0.03925, over 4828.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2703, pruned_loss=0.04519, over 939602.50 frames. ], batch size: 13, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:30:01,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195082.66666666666, ans=0.1 +2024-07-28 19:30:01,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=195082.66666666666, ans=0.0 +2024-07-28 19:30:02,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=195082.66666666666, ans=0.0 +2024-07-28 19:30:15,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=195109.33333333334, ans=0.125 +2024-07-28 19:30:27,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195122.66666666666, ans=0.1 +2024-07-28 19:30:35,969 INFO [train.py:1114] (0/4) Epoch 15, batch 3250, loss[loss=0.2305, simple_loss=0.3223, pruned_loss=0.06938, over 4934.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2705, pruned_loss=0.04514, over 940666.85 frames. ], batch size: 14, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:30:36,792 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:30:46,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=195162.66666666666, ans=0.0 +2024-07-28 19:30:50,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=195176.0, ans=0.125 +2024-07-28 19:30:56,194 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.478e+01 5.941e+01 6.673e+01 9.852e+01, threshold=1.188e+02, percent-clipped=0.0 +2024-07-28 19:31:10,809 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.67 vs. limit=22.5 +2024-07-28 19:31:11,862 INFO [train.py:1114] (0/4) Epoch 15, batch 3300, loss[loss=0.1894, simple_loss=0.2749, pruned_loss=0.05198, over 4695.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2703, pruned_loss=0.04522, over 940876.91 frames. ], batch size: 19, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:31:13,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=195216.0, ans=0.125 +2024-07-28 19:31:17,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=195216.0, ans=0.0 +2024-07-28 19:31:30,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=195242.66666666666, ans=0.0 +2024-07-28 19:31:32,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=195256.0, ans=0.125 +2024-07-28 19:31:40,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=195269.33333333334, ans=0.125 +2024-07-28 19:31:44,970 INFO [train.py:1114] (0/4) Epoch 15, batch 3350, loss[loss=0.1871, simple_loss=0.2797, pruned_loss=0.04728, over 4588.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2715, pruned_loss=0.04602, over 938819.41 frames. ], batch size: 17, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:31:45,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=195282.66666666666, ans=0.0 +2024-07-28 19:31:50,124 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.95 vs. limit=12.0 +2024-07-28 19:31:56,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=195296.0, ans=0.1 +2024-07-28 19:32:15,182 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:32:16,819 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.97 vs. limit=15.0 +2024-07-28 19:32:19,572 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.70 vs. limit=15.0 +2024-07-28 19:32:19,704 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.536e+01 5.781e+01 6.264e+01 6.966e+01 9.522e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 19:32:29,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=195336.0, ans=0.07 +2024-07-28 19:32:29,659 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.05 vs. limit=22.5 +2024-07-28 19:32:30,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195336.0, ans=0.1 +2024-07-28 19:32:34,326 INFO [train.py:1114] (0/4) Epoch 15, batch 3400, loss[loss=0.1725, simple_loss=0.2499, pruned_loss=0.04759, over 4810.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2721, pruned_loss=0.04673, over 937557.96 frames. ], batch size: 11, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:32:35,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=195349.33333333334, ans=0.0 +2024-07-28 19:32:36,005 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=22.14 vs. limit=22.5 +2024-07-28 19:32:37,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=195349.33333333334, ans=0.125 +2024-07-28 19:32:47,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=195376.0, ans=0.125 +2024-07-28 19:32:52,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=195376.0, ans=0.125 +2024-07-28 19:32:54,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=195389.33333333334, ans=0.125 +2024-07-28 19:33:04,050 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.09 vs. limit=22.5 +2024-07-28 19:33:07,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=195402.66666666666, ans=0.125 +2024-07-28 19:33:11,753 INFO [train.py:1114] (0/4) Epoch 15, batch 3450, loss[loss=0.1978, simple_loss=0.2861, pruned_loss=0.05471, over 4678.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.272, pruned_loss=0.04648, over 937374.60 frames. ], batch size: 19, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:33:33,581 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.369e+01 5.667e+01 6.148e+01 6.825e+01 9.914e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 19:33:33,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=195456.0, ans=0.0 +2024-07-28 19:33:49,011 INFO [train.py:1114] (0/4) Epoch 15, batch 3500, loss[loss=0.1795, simple_loss=0.2672, pruned_loss=0.04587, over 4947.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2716, pruned_loss=0.04618, over 937941.60 frames. ], batch size: 12, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:33:53,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=195482.66666666666, ans=0.125 +2024-07-28 19:33:58,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=195496.0, ans=0.0 +2024-07-28 19:33:59,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=195496.0, ans=0.0 +2024-07-28 19:34:07,607 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.53 vs. limit=12.0 +2024-07-28 19:34:09,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=195522.66666666666, ans=0.125 +2024-07-28 19:34:21,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=195536.0, ans=0.125 +2024-07-28 19:34:21,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195536.0, ans=0.1 +2024-07-28 19:34:23,087 INFO [train.py:1114] (0/4) Epoch 15, batch 3550, loss[loss=0.1678, simple_loss=0.275, pruned_loss=0.03034, over 4663.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2716, pruned_loss=0.04621, over 938519.82 frames. ], batch size: 14, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:34:30,326 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.97 vs. limit=15.0 +2024-07-28 19:34:34,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=195562.66666666666, ans=0.125 +2024-07-28 19:34:43,328 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.372e+01 5.567e+01 6.296e+01 7.208e+01 1.241e+02, threshold=1.259e+02, percent-clipped=1.0 +2024-07-28 19:34:43,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=195589.33333333334, ans=0.0 +2024-07-28 19:34:52,586 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.69 vs. limit=15.0 +2024-07-28 19:34:56,868 INFO [train.py:1114] (0/4) Epoch 15, batch 3600, loss[loss=0.1838, simple_loss=0.2713, pruned_loss=0.04814, over 4962.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2702, pruned_loss=0.04528, over 940673.58 frames. ], batch size: 13, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:35:16,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=195642.66666666666, ans=0.0 +2024-07-28 19:35:20,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=195642.66666666666, ans=0.2 +2024-07-28 19:35:21,848 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.19 vs. limit=22.5 +2024-07-28 19:35:30,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=195669.33333333334, ans=0.125 +2024-07-28 19:35:35,846 INFO [train.py:1114] (0/4) Epoch 15, batch 3650, loss[loss=0.1893, simple_loss=0.2872, pruned_loss=0.04577, over 4904.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2691, pruned_loss=0.04521, over 941196.40 frames. ], batch size: 15, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:35:51,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=195682.66666666666, ans=0.1 +2024-07-28 19:35:57,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=195696.0, ans=0.125 +2024-07-28 19:35:58,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=195696.0, ans=0.0 +2024-07-28 19:36:06,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=195722.66666666666, ans=0.125 +2024-07-28 19:36:07,036 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.599e+01 5.794e+01 6.353e+01 7.232e+01 1.193e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 19:36:16,359 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.57 vs. limit=15.0 +2024-07-28 19:36:21,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=195736.0, ans=0.125 +2024-07-28 19:36:22,229 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=18.83 vs. limit=22.5 +2024-07-28 19:36:23,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=195736.0, ans=0.125 +2024-07-28 19:36:26,520 INFO [train.py:1114] (0/4) Epoch 15, batch 3700, loss[loss=0.1669, simple_loss=0.2616, pruned_loss=0.03611, over 4924.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2699, pruned_loss=0.0454, over 942137.52 frames. ], batch size: 14, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:36:37,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=195762.66666666666, ans=0.025 +2024-07-28 19:36:45,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195776.0, ans=0.1 +2024-07-28 19:36:49,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=195789.33333333334, ans=0.125 +2024-07-28 19:36:56,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=195789.33333333334, ans=0.0 +2024-07-28 19:37:12,712 INFO [train.py:1114] (0/4) Epoch 15, batch 3750, loss[loss=0.1506, simple_loss=0.2345, pruned_loss=0.0333, over 4798.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.269, pruned_loss=0.04459, over 943715.02 frames. ], batch size: 11, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:37:35,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=195829.33333333334, ans=10.0 +2024-07-28 19:37:45,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=195842.66666666666, ans=0.125 +2024-07-28 19:37:47,143 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.420e+01 5.433e+01 6.069e+01 6.768e+01 1.859e+02, threshold=1.214e+02, percent-clipped=1.0 +2024-07-28 19:37:48,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=195856.0, ans=0.0 +2024-07-28 19:37:49,513 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.92 vs. limit=6.0 +2024-07-28 19:37:50,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=195856.0, ans=0.125 +2024-07-28 19:37:52,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=195856.0, ans=0.0 +2024-07-28 19:38:00,623 INFO [train.py:1114] (0/4) Epoch 15, batch 3800, loss[loss=0.1772, simple_loss=0.28, pruned_loss=0.03723, over 4800.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2694, pruned_loss=0.04488, over 942006.02 frames. ], batch size: 14, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:38:03,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=195882.66666666666, ans=0.125 +2024-07-28 19:38:06,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=195882.66666666666, ans=0.2 +2024-07-28 19:38:11,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=195896.0, ans=0.125 +2024-07-28 19:38:12,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=195896.0, ans=0.2 +2024-07-28 19:38:17,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=195909.33333333334, ans=0.2 +2024-07-28 19:38:17,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=195909.33333333334, ans=0.07 +2024-07-28 19:38:22,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195922.66666666666, ans=0.1 +2024-07-28 19:38:25,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=195922.66666666666, ans=0.125 +2024-07-28 19:38:29,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=195936.0, ans=0.0 +2024-07-28 19:38:29,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=195936.0, ans=0.0 +2024-07-28 19:38:35,870 INFO [train.py:1114] (0/4) Epoch 15, batch 3850, loss[loss=0.1999, simple_loss=0.2717, pruned_loss=0.06402, over 4635.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2696, pruned_loss=0.04529, over 942626.73 frames. ], batch size: 16, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:38:48,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=195949.33333333334, ans=0.125 +2024-07-28 19:38:56,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=195962.66666666666, ans=0.125 +2024-07-28 19:38:57,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=195976.0, ans=0.07 +2024-07-28 19:38:58,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=195976.0, ans=0.0 +2024-07-28 19:39:02,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=195976.0, ans=0.0 +2024-07-28 19:39:04,046 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.933e+01 5.600e+01 6.154e+01 6.941e+01 1.032e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 19:39:17,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=196002.66666666666, ans=0.0 +2024-07-28 19:39:18,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=196002.66666666666, ans=0.2 +2024-07-28 19:39:55,347 INFO [train.py:1114] (0/4) Epoch 15, batch 3900, loss[loss=0.1782, simple_loss=0.273, pruned_loss=0.04165, over 4816.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2702, pruned_loss=0.04572, over 942593.31 frames. ], batch size: 14, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:39:57,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=196016.0, ans=0.125 +2024-07-28 19:39:58,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=196016.0, ans=0.0 +2024-07-28 19:40:00,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196016.0, ans=0.1 +2024-07-28 19:40:00,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=196016.0, ans=0.0 +2024-07-28 19:42:23,013 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.54 vs. limit=22.5 +2024-07-28 19:42:26,988 INFO [train.py:1114] (0/4) Epoch 15, batch 3950, loss[loss=0.181, simple_loss=0.2771, pruned_loss=0.04251, over 4857.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2712, pruned_loss=0.0463, over 944513.23 frames. ], batch size: 16, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:42:27,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=196082.66666666666, ans=0.0 +2024-07-28 19:42:28,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=196082.66666666666, ans=0.125 +2024-07-28 19:42:31,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=196082.66666666666, ans=0.0 +2024-07-28 19:42:50,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=196109.33333333334, ans=0.2 +2024-07-28 19:42:56,266 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.391e+01 5.595e+01 6.291e+01 6.996e+01 9.236e+01, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 19:43:04,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=196122.66666666666, ans=0.2 +2024-07-28 19:44:10,296 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.46 vs. limit=15.0 +2024-07-28 19:44:13,281 INFO [train.py:1114] (0/4) Epoch 15, batch 4000, loss[loss=0.157, simple_loss=0.2289, pruned_loss=0.04253, over 4777.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2714, pruned_loss=0.04668, over 940535.84 frames. ], batch size: 12, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:44:16,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=196149.33333333334, ans=0.0 +2024-07-28 19:44:18,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=196149.33333333334, ans=0.125 +2024-07-28 19:44:18,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=196149.33333333334, ans=0.0 +2024-07-28 19:44:25,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=196162.66666666666, ans=0.2 +2024-07-28 19:44:25,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=196162.66666666666, ans=0.0 +2024-07-28 19:44:26,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=196162.66666666666, ans=0.125 +2024-07-28 19:44:35,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=196189.33333333334, ans=0.125 +2024-07-28 19:44:39,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=196189.33333333334, ans=0.0 +2024-07-28 19:44:41,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=196189.33333333334, ans=0.025 +2024-07-28 19:44:44,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=196202.66666666666, ans=0.1 +2024-07-28 19:44:46,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=196202.66666666666, ans=0.025 +2024-07-28 19:44:47,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=196202.66666666666, ans=0.125 +2024-07-28 19:44:47,901 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.17 vs. limit=15.0 +2024-07-28 19:44:49,523 INFO [train.py:1114] (0/4) Epoch 15, batch 4050, loss[loss=0.2177, simple_loss=0.3096, pruned_loss=0.06288, over 3434.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2712, pruned_loss=0.04662, over 939337.99 frames. ], batch size: 38, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:45:00,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff2.min_abs, batch_count=196216.0, ans=0.1 +2024-07-28 19:45:05,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=196229.33333333334, ans=0.125 +2024-07-28 19:45:06,626 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.32 vs. limit=15.0 +2024-07-28 19:47:41,095 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.94 vs. limit=12.0 +2024-07-28 19:47:42,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=196242.66666666666, ans=0.125 +2024-07-28 19:48:20,683 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.610e+01 5.678e+01 6.345e+01 7.266e+01 1.118e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 19:48:22,239 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:48:25,020 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:48:39,039 INFO [train.py:1114] (0/4) Epoch 15, batch 4100, loss[loss=0.2076, simple_loss=0.2957, pruned_loss=0.05981, over 4904.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2707, pruned_loss=0.04621, over 938091.00 frames. ], batch size: 15, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:48:39,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=196282.66666666666, ans=0.0 +2024-07-28 19:48:39,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=196282.66666666666, ans=0.0 +2024-07-28 19:48:51,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=196296.0, ans=0.125 +2024-07-28 19:49:03,428 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:49:22,850 INFO [train.py:1114] (0/4) Epoch 15, batch 4150, loss[loss=0.1974, simple_loss=0.2824, pruned_loss=0.05618, over 4822.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2717, pruned_loss=0.04667, over 937723.78 frames. ], batch size: 13, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:49:46,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=196376.0, ans=0.125 +2024-07-28 19:49:46,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=196376.0, ans=0.125 +2024-07-28 19:49:51,890 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.87 vs. limit=12.0 +2024-07-28 19:49:52,760 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.168e+01 5.670e+01 6.330e+01 7.256e+01 1.542e+02, threshold=1.266e+02, percent-clipped=1.0 +2024-07-28 19:49:58,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=196389.33333333334, ans=0.05 +2024-07-28 19:49:59,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=196389.33333333334, ans=0.1 +2024-07-28 19:50:04,909 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.38 vs. limit=15.0 +2024-07-28 19:50:10,364 INFO [train.py:1114] (0/4) Epoch 15, batch 4200, loss[loss=0.1947, simple_loss=0.2768, pruned_loss=0.05626, over 4908.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2706, pruned_loss=0.04611, over 939524.38 frames. ], batch size: 15, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:50:13,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=196416.0, ans=0.0 +2024-07-28 19:50:40,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=196442.66666666666, ans=0.125 +2024-07-28 19:50:52,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=196456.0, ans=0.125 +2024-07-28 19:50:57,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=196469.33333333334, ans=0.0 +2024-07-28 19:51:03,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=196482.66666666666, ans=0.125 +2024-07-28 19:51:03,996 INFO [train.py:1114] (0/4) Epoch 15, batch 4250, loss[loss=0.1679, simple_loss=0.2594, pruned_loss=0.03821, over 4645.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.271, pruned_loss=0.04597, over 940671.16 frames. ], batch size: 12, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:51:12,482 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.62 vs. limit=12.0 +2024-07-28 19:51:12,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=196496.0, ans=0.025 +2024-07-28 19:51:20,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=196509.33333333334, ans=0.2 +2024-07-28 19:51:23,970 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.654e+01 5.709e+01 6.318e+01 7.581e+01 1.158e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 19:51:24,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=196522.66666666666, ans=0.04949747468305833 +2024-07-28 19:51:31,039 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-07-28 19:51:33,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=196536.0, ans=0.0 +2024-07-28 19:51:37,889 INFO [train.py:1114] (0/4) Epoch 15, batch 4300, loss[loss=0.1858, simple_loss=0.279, pruned_loss=0.04633, over 4755.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2706, pruned_loss=0.04609, over 939998.04 frames. ], batch size: 13, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:51:43,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_na.min_abs, batch_count=196562.66666666666, ans=0.02 +2024-07-28 19:52:04,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=196589.33333333334, ans=0.125 +2024-07-28 19:52:07,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=196602.66666666666, ans=0.0 +2024-07-28 19:52:13,582 INFO [train.py:1114] (0/4) Epoch 15, batch 4350, loss[loss=0.1737, simple_loss=0.272, pruned_loss=0.03766, over 4756.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2706, pruned_loss=0.04587, over 940717.89 frames. ], batch size: 13, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:52:21,567 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.71 vs. limit=6.0 +2024-07-28 19:52:34,520 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.697e+01 5.717e+01 6.359e+01 7.024e+01 1.032e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 19:52:35,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=196656.0, ans=0.125 +2024-07-28 19:52:38,455 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.98 vs. limit=6.0 +2024-07-28 19:52:46,042 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:52:46,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=196669.33333333334, ans=0.125 +2024-07-28 19:52:46,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196669.33333333334, ans=0.1 +2024-07-28 19:52:48,067 INFO [train.py:1114] (0/4) Epoch 15, batch 4400, loss[loss=0.1789, simple_loss=0.2812, pruned_loss=0.03826, over 4813.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2709, pruned_loss=0.04549, over 940511.21 frames. ], batch size: 14, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:53:16,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=196736.0, ans=0.0 +2024-07-28 19:53:18,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=196736.0, ans=0.0 +2024-07-28 19:53:25,064 INFO [train.py:1114] (0/4) Epoch 15, batch 4450, loss[loss=0.1783, simple_loss=0.2628, pruned_loss=0.04689, over 4954.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2708, pruned_loss=0.04554, over 938818.62 frames. ], batch size: 12, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:53:25,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=196749.33333333334, ans=0.1 +2024-07-28 19:53:33,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=196749.33333333334, ans=0.0 +2024-07-28 19:55:27,145 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.28 vs. limit=22.5 +2024-07-28 19:55:28,386 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.96 vs. limit=6.0 +2024-07-28 19:55:45,667 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.242e+01 5.676e+01 6.225e+01 6.763e+01 9.651e+01, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 19:55:47,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=196789.33333333334, ans=0.05 +2024-07-28 19:55:53,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=196802.66666666666, ans=0.125 +2024-07-28 19:55:59,349 INFO [train.py:1114] (0/4) Epoch 15, batch 4500, loss[loss=0.2072, simple_loss=0.3105, pruned_loss=0.05197, over 4732.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2712, pruned_loss=0.04551, over 937839.30 frames. ], batch size: 14, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:56:11,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=196829.33333333334, ans=0.07 +2024-07-28 19:56:24,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=196856.0, ans=0.1 +2024-07-28 19:56:31,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=196869.33333333334, ans=0.125 +2024-07-28 19:56:32,361 INFO [train.py:1114] (0/4) Epoch 15, batch 4550, loss[loss=0.1963, simple_loss=0.2898, pruned_loss=0.05141, over 4891.00 frames. ], tot_loss[loss=0.181, simple_loss=0.271, pruned_loss=0.04549, over 939833.80 frames. ], batch size: 13, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:56:42,232 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.99 vs. limit=15.0 +2024-07-28 19:56:45,560 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.84 vs. limit=22.5 +2024-07-28 19:56:54,579 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.611e+01 5.447e+01 5.965e+01 6.710e+01 1.037e+02, threshold=1.193e+02, percent-clipped=0.0 +2024-07-28 19:56:56,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=196922.66666666666, ans=0.125 +2024-07-28 19:56:58,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=196922.66666666666, ans=0.04949747468305833 +2024-07-28 19:57:06,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=196936.0, ans=0.125 +2024-07-28 19:57:07,879 INFO [train.py:1114] (0/4) Epoch 15, batch 4600, loss[loss=0.2001, simple_loss=0.3004, pruned_loss=0.04986, over 4494.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2693, pruned_loss=0.04516, over 937852.38 frames. ], batch size: 21, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:57:10,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=196949.33333333334, ans=0.125 +2024-07-28 19:57:21,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196976.0, ans=0.1 +2024-07-28 19:57:39,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=197002.66666666666, ans=0.125 +2024-07-28 19:57:41,453 INFO [train.py:1114] (0/4) Epoch 15, batch 4650, loss[loss=0.2148, simple_loss=0.2995, pruned_loss=0.06498, over 4830.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2705, pruned_loss=0.04548, over 939346.35 frames. ], batch size: 16, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:57:47,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=197016.0, ans=22.5 +2024-07-28 19:57:50,963 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:57:55,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=197042.66666666666, ans=0.125 +2024-07-28 19:57:57,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=197042.66666666666, ans=0.0 +2024-07-28 19:57:58,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=197042.66666666666, ans=0.125 +2024-07-28 19:58:01,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=197042.66666666666, ans=0.125 +2024-07-28 19:58:03,995 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.740e+01 5.604e+01 6.309e+01 7.191e+01 9.740e+01, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 19:58:07,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=197056.0, ans=0.025 +2024-07-28 19:58:14,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=197069.33333333334, ans=0.0 +2024-07-28 19:58:18,643 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.43 vs. limit=12.0 +2024-07-28 19:58:23,514 INFO [train.py:1114] (0/4) Epoch 15, batch 4700, loss[loss=0.1535, simple_loss=0.2285, pruned_loss=0.03923, over 4714.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2699, pruned_loss=0.04546, over 936482.20 frames. ], batch size: 11, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:58:35,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=197096.0, ans=0.125 +2024-07-28 19:58:35,994 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.77 vs. limit=15.0 +2024-07-28 19:58:39,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=197109.33333333334, ans=0.0 +2024-07-28 19:58:47,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=197122.66666666666, ans=0.035 +2024-07-28 19:58:50,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197136.0, ans=0.1 +2024-07-28 19:58:57,041 INFO [train.py:1114] (0/4) Epoch 15, batch 4750, loss[loss=0.1807, simple_loss=0.2767, pruned_loss=0.04231, over 4564.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2703, pruned_loss=0.04621, over 935024.65 frames. ], batch size: 21, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:58:57,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=197149.33333333334, ans=0.07 +2024-07-28 19:59:02,895 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.43 vs. limit=12.0 +2024-07-28 19:59:06,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=197162.66666666666, ans=0.125 +2024-07-28 19:59:13,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=197176.0, ans=0.1 +2024-07-28 19:59:14,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=197176.0, ans=0.125 +2024-07-28 19:59:17,880 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.663e+01 6.511e+01 7.507e+01 1.082e+02, threshold=1.302e+02, percent-clipped=0.0 +2024-07-28 19:59:22,322 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.56 vs. limit=12.0 +2024-07-28 19:59:28,367 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=197202.66666666666, ans=0.0 +2024-07-28 19:59:30,785 INFO [train.py:1114] (0/4) Epoch 15, batch 4800, loss[loss=0.2038, simple_loss=0.2972, pruned_loss=0.05516, over 4698.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2701, pruned_loss=0.04622, over 932643.58 frames. ], batch size: 13, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:59:32,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=197216.0, ans=0.125 +2024-07-28 19:59:34,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=197216.0, ans=0.125 +2024-07-28 19:59:43,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=197229.33333333334, ans=0.0 +2024-07-28 19:59:53,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=197256.0, ans=0.125 +2024-07-28 19:59:58,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=197269.33333333334, ans=0.0 +2024-07-28 20:00:01,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=197269.33333333334, ans=0.1 +2024-07-28 20:00:02,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=197269.33333333334, ans=0.125 +2024-07-28 20:00:04,000 INFO [train.py:1114] (0/4) Epoch 15, batch 4850, loss[loss=0.1566, simple_loss=0.2599, pruned_loss=0.0266, over 4742.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2702, pruned_loss=0.04621, over 932566.21 frames. ], batch size: 14, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 20:00:04,310 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.50 vs. limit=15.0 +2024-07-28 20:00:12,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=197282.66666666666, ans=0.1 +2024-07-28 20:00:16,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=197282.66666666666, ans=0.125 +2024-07-28 20:00:17,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=197296.0, ans=0.04949747468305833 +2024-07-28 20:00:22,052 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.56 vs. limit=15.0 +2024-07-28 20:00:31,030 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=197322.66666666666, ans=0.125 +2024-07-28 20:00:31,447 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.502e+01 5.598e+01 6.100e+01 6.871e+01 9.023e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 20:00:33,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=197322.66666666666, ans=0.1 +2024-07-28 20:00:35,617 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-148000.pt +2024-07-28 20:00:39,823 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.91 vs. limit=15.0 +2024-07-28 20:00:43,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=197336.0, ans=0.125 +2024-07-28 20:00:46,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=197336.0, ans=0.0 +2024-07-28 20:00:48,034 INFO [train.py:1114] (0/4) Epoch 15, batch 4900, loss[loss=0.1669, simple_loss=0.2665, pruned_loss=0.03366, over 4768.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2696, pruned_loss=0.04591, over 934286.51 frames. ], batch size: 13, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 20:00:49,920 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.23 vs. limit=15.0 +2024-07-28 20:00:56,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197349.33333333334, ans=0.1 +2024-07-28 20:01:00,124 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=15.0 +2024-07-28 20:01:07,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=197376.0, ans=0.125 +2024-07-28 20:01:25,309 INFO [train.py:1114] (0/4) Epoch 15, batch 4950, loss[loss=0.2348, simple_loss=0.3012, pruned_loss=0.08418, over 3511.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.271, pruned_loss=0.04645, over 931423.42 frames. ], batch size: 35, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:01:26,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=197416.0, ans=0.0 +2024-07-28 20:01:28,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=197416.0, ans=0.125 +2024-07-28 20:01:34,403 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.19 vs. limit=15.0 +2024-07-28 20:01:40,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=197442.66666666666, ans=0.025 +2024-07-28 20:01:41,609 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.02 vs. limit=22.5 +2024-07-28 20:01:42,772 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:01:45,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=197456.0, ans=0.025 +2024-07-28 20:01:45,881 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+01 5.531e+01 5.983e+01 6.546e+01 1.015e+02, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 20:01:54,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=197469.33333333334, ans=0.125 +2024-07-28 20:01:58,984 INFO [train.py:1114] (0/4) Epoch 15, batch 5000, loss[loss=0.1931, simple_loss=0.2893, pruned_loss=0.04851, over 4670.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2709, pruned_loss=0.04598, over 935350.29 frames. ], batch size: 14, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:02:06,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=197496.0, ans=0.125 +2024-07-28 20:02:11,199 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.71 vs. limit=12.0 +2024-07-28 20:02:12,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=197509.33333333334, ans=0.05 +2024-07-28 20:02:22,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=197522.66666666666, ans=0.025 +2024-07-28 20:02:33,778 INFO [train.py:1114] (0/4) Epoch 15, batch 5050, loss[loss=0.168, simple_loss=0.2572, pruned_loss=0.03945, over 4849.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2703, pruned_loss=0.04535, over 937731.15 frames. ], batch size: 12, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:02:34,812 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.00 vs. limit=15.0 +2024-07-28 20:02:35,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=197549.33333333334, ans=0.125 +2024-07-28 20:02:36,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=197549.33333333334, ans=0.0 +2024-07-28 20:02:40,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=197562.66666666666, ans=0.125 +2024-07-28 20:02:40,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=197562.66666666666, ans=0.125 +2024-07-28 20:02:41,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=197562.66666666666, ans=0.1 +2024-07-28 20:02:47,379 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:02:49,498 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=197576.0, ans=0.0 +2024-07-28 20:02:50,878 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:02:52,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=197576.0, ans=0.125 +2024-07-28 20:02:54,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=197589.33333333334, ans=0.125 +2024-07-28 20:02:54,787 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.283e+01 5.690e+01 6.527e+01 7.473e+01 1.062e+02, threshold=1.305e+02, percent-clipped=0.0 +2024-07-28 20:02:55,335 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.28 vs. limit=15.0 +2024-07-28 20:02:59,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=197589.33333333334, ans=0.125 +2024-07-28 20:03:05,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=197602.66666666666, ans=0.04949747468305833 +2024-07-28 20:03:07,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=197616.0, ans=0.125 +2024-07-28 20:03:08,388 INFO [train.py:1114] (0/4) Epoch 15, batch 5100, loss[loss=0.1608, simple_loss=0.2426, pruned_loss=0.03946, over 4776.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2708, pruned_loss=0.04569, over 935377.30 frames. ], batch size: 12, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:03:12,831 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.96 vs. limit=15.0 +2024-07-28 20:03:16,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=197629.33333333334, ans=0.125 +2024-07-28 20:03:24,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=197642.66666666666, ans=0.125 +2024-07-28 20:03:25,776 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.54 vs. limit=15.0 +2024-07-28 20:03:41,946 INFO [train.py:1114] (0/4) Epoch 15, batch 5150, loss[loss=0.2015, simple_loss=0.2867, pruned_loss=0.05817, over 4864.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2722, pruned_loss=0.0464, over 936765.25 frames. ], batch size: 16, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:03:43,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=197682.66666666666, ans=0.0 +2024-07-28 20:03:51,646 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:03:51,841 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.11 vs. limit=10.0 +2024-07-28 20:03:56,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=197709.33333333334, ans=0.025 +2024-07-28 20:04:04,746 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.460e+01 5.595e+01 6.018e+01 6.676e+01 9.613e+01, threshold=1.204e+02, percent-clipped=0.0 +2024-07-28 20:04:16,833 INFO [train.py:1114] (0/4) Epoch 15, batch 5200, loss[loss=0.1558, simple_loss=0.2509, pruned_loss=0.03037, over 4667.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2714, pruned_loss=0.04551, over 936928.72 frames. ], batch size: 14, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:04:28,597 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.88 vs. limit=22.5 +2024-07-28 20:04:31,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=197776.0, ans=0.125 +2024-07-28 20:04:47,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=197802.66666666666, ans=0.0 +2024-07-28 20:04:50,332 INFO [train.py:1114] (0/4) Epoch 15, batch 5250, loss[loss=0.1782, simple_loss=0.2755, pruned_loss=0.04045, over 4905.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2706, pruned_loss=0.04513, over 936615.20 frames. ], batch size: 13, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:04:55,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=197816.0, ans=0.125 +2024-07-28 20:05:01,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=197829.33333333334, ans=0.125 +2024-07-28 20:05:08,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=197842.66666666666, ans=0.125 +2024-07-28 20:05:10,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=197856.0, ans=0.125 +2024-07-28 20:05:11,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=197856.0, ans=0.125 +2024-07-28 20:05:12,235 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.422e+01 5.761e+01 6.623e+01 7.609e+01 1.184e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-28 20:05:24,611 INFO [train.py:1114] (0/4) Epoch 15, batch 5300, loss[loss=0.1629, simple_loss=0.2525, pruned_loss=0.0366, over 4617.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2702, pruned_loss=0.04534, over 934877.16 frames. ], batch size: 16, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:05:26,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.69 vs. limit=15.0 +2024-07-28 20:05:30,317 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=11.07 vs. limit=15.0 +2024-07-28 20:05:41,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=197896.0, ans=0.2 +2024-07-28 20:05:41,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=197896.0, ans=0.025 +2024-07-28 20:05:43,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=197896.0, ans=0.025 +2024-07-28 20:05:59,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=197922.66666666666, ans=0.125 +2024-07-28 20:06:07,670 INFO [train.py:1114] (0/4) Epoch 15, batch 5350, loss[loss=0.1491, simple_loss=0.2465, pruned_loss=0.02588, over 4581.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2711, pruned_loss=0.04597, over 937129.80 frames. ], batch size: 10, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:06:10,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=197949.33333333334, ans=0.125 +2024-07-28 20:06:18,179 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.00 vs. limit=15.0 +2024-07-28 20:06:19,601 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.75 vs. limit=15.0 +2024-07-28 20:06:30,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=197976.0, ans=0.025 +2024-07-28 20:06:31,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=197989.33333333334, ans=0.09899494936611666 +2024-07-28 20:06:33,131 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.665e+01 5.458e+01 6.203e+01 7.042e+01 1.086e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 20:06:33,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=197989.33333333334, ans=0.2 +2024-07-28 20:06:34,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.45 vs. limit=22.5 +2024-07-28 20:06:37,849 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.69 vs. limit=15.0 +2024-07-28 20:06:45,669 INFO [train.py:1114] (0/4) Epoch 15, batch 5400, loss[loss=0.189, simple_loss=0.2795, pruned_loss=0.04929, over 4237.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2724, pruned_loss=0.04678, over 930462.64 frames. ], batch size: 25, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:06:46,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=198016.0, ans=0.125 +2024-07-28 20:06:53,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=198029.33333333334, ans=0.0 +2024-07-28 20:06:54,729 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.99 vs. limit=15.0 +2024-07-28 20:06:56,463 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.22 vs. limit=15.0 +2024-07-28 20:07:13,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=198069.33333333334, ans=0.125 +2024-07-28 20:07:15,186 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.18 vs. limit=10.0 +2024-07-28 20:07:18,648 INFO [train.py:1114] (0/4) Epoch 15, batch 5450, loss[loss=0.1482, simple_loss=0.2319, pruned_loss=0.0322, over 4696.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2713, pruned_loss=0.04606, over 933221.18 frames. ], batch size: 11, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:07:20,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=198082.66666666666, ans=0.125 +2024-07-28 20:07:22,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=198082.66666666666, ans=0.0 +2024-07-28 20:07:23,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198082.66666666666, ans=0.1 +2024-07-28 20:07:35,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=198109.33333333334, ans=0.125 +2024-07-28 20:07:40,431 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+01 5.624e+01 6.275e+01 7.403e+01 1.039e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 20:07:41,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=198122.66666666666, ans=0.2 +2024-07-28 20:07:54,577 INFO [train.py:1114] (0/4) Epoch 15, batch 5500, loss[loss=0.1981, simple_loss=0.2844, pruned_loss=0.05591, over 4384.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.271, pruned_loss=0.04613, over 931014.10 frames. ], batch size: 25, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:08:04,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198162.66666666666, ans=0.1 +2024-07-28 20:08:05,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=198162.66666666666, ans=0.125 +2024-07-28 20:08:11,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=198176.0, ans=0.07 +2024-07-28 20:08:14,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=198176.0, ans=0.125 +2024-07-28 20:08:29,856 INFO [train.py:1114] (0/4) Epoch 15, batch 5550, loss[loss=0.1633, simple_loss=0.2475, pruned_loss=0.03958, over 4705.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2706, pruned_loss=0.04593, over 933101.79 frames. ], batch size: 12, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:08:39,557 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.80 vs. limit=10.0 +2024-07-28 20:08:51,168 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.553e+01 5.843e+01 6.570e+01 7.982e+01 1.258e+02, threshold=1.314e+02, percent-clipped=1.0 +2024-07-28 20:08:55,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198256.0, ans=0.1 +2024-07-28 20:08:55,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=198256.0, ans=10.0 +2024-07-28 20:09:03,379 INFO [train.py:1114] (0/4) Epoch 15, batch 5600, loss[loss=0.1787, simple_loss=0.2694, pruned_loss=0.04404, over 4746.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2711, pruned_loss=0.04622, over 934089.27 frames. ], batch size: 14, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:09:08,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=198282.66666666666, ans=0.125 +2024-07-28 20:09:14,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=198296.0, ans=0.0 +2024-07-28 20:09:38,923 INFO [train.py:1114] (0/4) Epoch 15, batch 5650, loss[loss=0.1954, simple_loss=0.2893, pruned_loss=0.05079, over 4508.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2704, pruned_loss=0.04596, over 937036.94 frames. ], batch size: 21, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:09:44,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=198349.33333333334, ans=0.125 +2024-07-28 20:09:52,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=198376.0, ans=0.125 +2024-07-28 20:09:57,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=198376.0, ans=0.5 +2024-07-28 20:10:00,144 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.751e+01 5.802e+01 6.478e+01 7.454e+01 1.007e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 20:10:11,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198416.0, ans=0.1 +2024-07-28 20:10:12,529 INFO [train.py:1114] (0/4) Epoch 15, batch 5700, loss[loss=0.2071, simple_loss=0.3096, pruned_loss=0.05225, over 4690.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2716, pruned_loss=0.04626, over 938341.25 frames. ], batch size: 13, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:10:17,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=198416.0, ans=0.0 +2024-07-28 20:10:17,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=198416.0, ans=0.125 +2024-07-28 20:10:30,968 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.79 vs. limit=15.0 +2024-07-28 20:10:33,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=198456.0, ans=0.0 +2024-07-28 20:10:33,840 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.99 vs. limit=15.0 +2024-07-28 20:10:46,740 INFO [train.py:1114] (0/4) Epoch 15, batch 5750, loss[loss=0.1583, simple_loss=0.2505, pruned_loss=0.03299, over 4684.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2714, pruned_loss=0.04596, over 938202.21 frames. ], batch size: 19, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:10:48,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=198482.66666666666, ans=0.025 +2024-07-28 20:10:50,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=198482.66666666666, ans=0.125 +2024-07-28 20:10:56,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=198496.0, ans=0.125 +2024-07-28 20:10:59,883 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.49 vs. limit=12.0 +2024-07-28 20:11:08,235 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.757e+01 5.533e+01 6.199e+01 7.119e+01 1.016e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 20:11:09,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=198522.66666666666, ans=0.125 +2024-07-28 20:11:10,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=198522.66666666666, ans=0.05 +2024-07-28 20:11:11,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198522.66666666666, ans=0.1 +2024-07-28 20:11:13,459 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.50 vs. limit=22.5 +2024-07-28 20:11:20,280 INFO [train.py:1114] (0/4) Epoch 15, batch 5800, loss[loss=0.1859, simple_loss=0.2844, pruned_loss=0.04369, over 4744.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2722, pruned_loss=0.04634, over 937350.87 frames. ], batch size: 19, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:11:26,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=198562.66666666666, ans=0.0 +2024-07-28 20:11:32,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=198562.66666666666, ans=0.0 +2024-07-28 20:11:33,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=198576.0, ans=0.2 +2024-07-28 20:11:35,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198576.0, ans=0.1 +2024-07-28 20:11:36,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=198576.0, ans=0.0 +2024-07-28 20:11:55,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=198616.0, ans=0.1 +2024-07-28 20:11:55,691 INFO [train.py:1114] (0/4) Epoch 15, batch 5850, loss[loss=0.1906, simple_loss=0.2886, pruned_loss=0.04635, over 4474.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2726, pruned_loss=0.04654, over 937798.70 frames. ], batch size: 21, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:12:11,355 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.06 vs. limit=15.0 +2024-07-28 20:12:17,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=198656.0, ans=0.0 +2024-07-28 20:12:19,168 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.882e+01 5.725e+01 6.353e+01 6.909e+01 1.131e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 20:12:21,561 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.82 vs. limit=15.0 +2024-07-28 20:12:22,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=198656.0, ans=0.0 +2024-07-28 20:12:28,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=198669.33333333334, ans=0.125 +2024-07-28 20:12:31,563 INFO [train.py:1114] (0/4) Epoch 15, batch 5900, loss[loss=0.2074, simple_loss=0.2938, pruned_loss=0.06046, over 4693.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2724, pruned_loss=0.04648, over 938101.20 frames. ], batch size: 15, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:12:34,639 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.99 vs. limit=15.0 +2024-07-28 20:12:43,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=198696.0, ans=0.0 +2024-07-28 20:12:43,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=198696.0, ans=0.125 +2024-07-28 20:13:11,358 INFO [train.py:1114] (0/4) Epoch 15, batch 5950, loss[loss=0.207, simple_loss=0.2838, pruned_loss=0.06506, over 4680.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2718, pruned_loss=0.04603, over 940052.73 frames. ], batch size: 15, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:13:22,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=198762.66666666666, ans=0.2 +2024-07-28 20:13:31,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=198776.0, ans=0.125 +2024-07-28 20:13:34,735 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.387e+01 5.687e+01 6.210e+01 6.868e+01 1.023e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 20:13:44,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=198802.66666666666, ans=0.125 +2024-07-28 20:13:45,071 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.06 vs. limit=15.0 +2024-07-28 20:13:47,279 INFO [train.py:1114] (0/4) Epoch 15, batch 6000, loss[loss=0.1649, simple_loss=0.2553, pruned_loss=0.03728, over 4295.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2714, pruned_loss=0.04578, over 937160.57 frames. ], batch size: 25, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:13:50,112 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 20:14:05,301 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([0.9033, 3.0857, 1.7316, 3.2162, 2.8431, 2.9077, 3.2764, 3.1837], + device='cuda:0') +2024-07-28 20:14:06,506 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.4248, 2.7961, 4.1235, 3.5549, 4.2087, 4.0649, 3.2049, 2.7941], + device='cuda:0') +2024-07-28 20:14:09,824 INFO [train.py:1146] (0/4) Epoch 15, validation: loss=0.1637, simple_loss=0.2666, pruned_loss=0.03037, over 944034.00 frames. +2024-07-28 20:14:09,825 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 20:14:17,527 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.71 vs. limit=15.0 +2024-07-28 20:14:20,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=198829.33333333334, ans=0.0 +2024-07-28 20:14:22,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=198829.33333333334, ans=0.035 +2024-07-28 20:14:42,060 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.69 vs. limit=22.5 +2024-07-28 20:14:42,139 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.80 vs. limit=15.0 +2024-07-28 20:14:43,684 INFO [train.py:1114] (0/4) Epoch 15, batch 6050, loss[loss=0.1608, simple_loss=0.2509, pruned_loss=0.03535, over 4773.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2709, pruned_loss=0.04573, over 938285.03 frames. ], batch size: 12, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:14:45,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=198882.66666666666, ans=0.125 +2024-07-28 20:14:52,784 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.30 vs. limit=15.0 +2024-07-28 20:14:54,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=198896.0, ans=0.125 +2024-07-28 20:14:55,056 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=198896.0, ans=0.025 +2024-07-28 20:15:03,708 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=198909.33333333334, ans=0.05 +2024-07-28 20:15:06,830 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.394e+01 5.577e+01 6.176e+01 7.301e+01 1.116e+02, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 20:15:18,931 INFO [train.py:1114] (0/4) Epoch 15, batch 6100, loss[loss=0.1658, simple_loss=0.2587, pruned_loss=0.0364, over 4673.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2707, pruned_loss=0.04564, over 938356.23 frames. ], batch size: 15, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:15:19,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=198949.33333333334, ans=0.2 +2024-07-28 20:15:22,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=198949.33333333334, ans=0.2 +2024-07-28 20:15:26,034 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.67 vs. limit=15.0 +2024-07-28 20:15:30,892 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.96 vs. limit=22.5 +2024-07-28 20:15:37,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=198976.0, ans=0.125 +2024-07-28 20:15:42,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=198989.33333333334, ans=0.0 +2024-07-28 20:15:42,178 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.59 vs. limit=12.0 +2024-07-28 20:15:46,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=199002.66666666666, ans=0.0 +2024-07-28 20:15:46,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=199002.66666666666, ans=0.2 +2024-07-28 20:15:52,712 INFO [train.py:1114] (0/4) Epoch 15, batch 6150, loss[loss=0.2409, simple_loss=0.3076, pruned_loss=0.08706, over 3258.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2707, pruned_loss=0.04571, over 936804.94 frames. ], batch size: 35, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:15:58,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=199016.0, ans=22.5 +2024-07-28 20:16:12,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=199056.0, ans=0.0 +2024-07-28 20:16:14,488 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.237e+01 5.430e+01 6.165e+01 7.118e+01 1.181e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 20:16:21,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=199069.33333333334, ans=0.0 +2024-07-28 20:16:25,505 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.68 vs. limit=15.0 +2024-07-28 20:16:26,484 INFO [train.py:1114] (0/4) Epoch 15, batch 6200, loss[loss=0.1837, simple_loss=0.2834, pruned_loss=0.04198, over 4744.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.271, pruned_loss=0.04577, over 936498.18 frames. ], batch size: 14, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:16:42,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=199109.33333333334, ans=0.125 +2024-07-28 20:16:45,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=199109.33333333334, ans=0.125 +2024-07-28 20:16:45,706 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.32 vs. limit=22.5 +2024-07-28 20:16:56,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=199136.0, ans=0.0 +2024-07-28 20:17:00,525 INFO [train.py:1114] (0/4) Epoch 15, batch 6250, loss[loss=0.2085, simple_loss=0.2904, pruned_loss=0.06331, over 4808.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2707, pruned_loss=0.04635, over 933841.67 frames. ], batch size: 14, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:17:12,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=199162.66666666666, ans=0.125 +2024-07-28 20:17:12,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=199162.66666666666, ans=0.0 +2024-07-28 20:17:25,819 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.601e+01 6.121e+01 7.200e+01 1.148e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 20:17:26,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=199189.33333333334, ans=0.0 +2024-07-28 20:17:36,992 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:17:38,324 INFO [train.py:1114] (0/4) Epoch 15, batch 6300, loss[loss=0.1429, simple_loss=0.2312, pruned_loss=0.02729, over 4550.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2703, pruned_loss=0.04626, over 929758.41 frames. ], batch size: 10, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:17:39,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=199216.0, ans=0.025 +2024-07-28 20:17:47,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=199229.33333333334, ans=0.0 +2024-07-28 20:17:48,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=199229.33333333334, ans=0.0 +2024-07-28 20:17:52,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=199242.66666666666, ans=0.125 +2024-07-28 20:17:58,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=199256.0, ans=0.125 +2024-07-28 20:18:03,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=199256.0, ans=0.125 +2024-07-28 20:18:11,385 INFO [train.py:1114] (0/4) Epoch 15, batch 6350, loss[loss=0.2157, simple_loss=0.3047, pruned_loss=0.06338, over 4547.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2699, pruned_loss=0.04599, over 933744.80 frames. ], batch size: 21, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:18:13,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=199282.66666666666, ans=0.95 +2024-07-28 20:18:13,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.40 vs. limit=15.0 +2024-07-28 20:18:20,199 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.78 vs. limit=8.0 +2024-07-28 20:18:21,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=199296.0, ans=0.125 +2024-07-28 20:18:26,779 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.45 vs. limit=10.0 +2024-07-28 20:18:29,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=199309.33333333334, ans=0.0 +2024-07-28 20:18:33,052 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 5.687e+01 6.281e+01 7.134e+01 1.278e+02, threshold=1.256e+02, percent-clipped=1.0 +2024-07-28 20:18:40,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199336.0, ans=0.1 +2024-07-28 20:18:44,108 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.76 vs. limit=15.0 +2024-07-28 20:18:44,871 INFO [train.py:1114] (0/4) Epoch 15, batch 6400, loss[loss=0.1935, simple_loss=0.2825, pruned_loss=0.05225, over 4635.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2693, pruned_loss=0.04593, over 935255.61 frames. ], batch size: 13, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:18:56,487 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.74 vs. limit=10.0 +2024-07-28 20:18:58,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.32 vs. limit=22.5 +2024-07-28 20:19:00,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=199376.0, ans=0.04949747468305833 +2024-07-28 20:19:02,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=199376.0, ans=0.07 +2024-07-28 20:19:20,033 INFO [train.py:1114] (0/4) Epoch 15, batch 6450, loss[loss=0.2205, simple_loss=0.3129, pruned_loss=0.06408, over 4446.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2705, pruned_loss=0.0461, over 938669.06 frames. ], batch size: 21, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:19:22,026 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:19:26,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=199429.33333333334, ans=0.125 +2024-07-28 20:19:34,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=199442.66666666666, ans=0.125 +2024-07-28 20:19:41,172 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.26 vs. limit=15.0 +2024-07-28 20:19:41,416 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+01 5.703e+01 6.605e+01 7.565e+01 1.204e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-28 20:19:49,487 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.31 vs. limit=12.0 +2024-07-28 20:19:49,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=199469.33333333334, ans=0.05 +2024-07-28 20:19:53,866 INFO [train.py:1114] (0/4) Epoch 15, batch 6500, loss[loss=0.2523, simple_loss=0.3106, pruned_loss=0.09701, over 3513.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2703, pruned_loss=0.0458, over 940060.36 frames. ], batch size: 35, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:20:02,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=199496.0, ans=0.0 +2024-07-28 20:20:03,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=199496.0, ans=0.2 +2024-07-28 20:20:05,799 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.39 vs. limit=15.0 +2024-07-28 20:20:22,540 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.74 vs. limit=15.0 +2024-07-28 20:20:29,565 INFO [train.py:1114] (0/4) Epoch 15, batch 6550, loss[loss=0.155, simple_loss=0.2362, pruned_loss=0.03688, over 4809.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2698, pruned_loss=0.04523, over 942881.90 frames. ], batch size: 11, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:20:31,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199549.33333333334, ans=0.1 +2024-07-28 20:20:31,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=199549.33333333334, ans=0.125 +2024-07-28 20:20:33,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=199549.33333333334, ans=0.05 +2024-07-28 20:20:36,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=199562.66666666666, ans=0.2 +2024-07-28 20:20:51,052 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.602e+01 6.242e+01 7.548e+01 1.165e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 20:20:51,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=199589.33333333334, ans=0.125 +2024-07-28 20:21:03,267 INFO [train.py:1114] (0/4) Epoch 15, batch 6600, loss[loss=0.1651, simple_loss=0.2701, pruned_loss=0.03007, over 4935.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2697, pruned_loss=0.0453, over 944805.23 frames. ], batch size: 14, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:21:17,346 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.84 vs. limit=15.0 +2024-07-28 20:21:19,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=199642.66666666666, ans=0.125 +2024-07-28 20:21:20,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=199642.66666666666, ans=0.0 +2024-07-28 20:21:34,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=199669.33333333334, ans=0.0 +2024-07-28 20:21:37,088 INFO [train.py:1114] (0/4) Epoch 15, batch 6650, loss[loss=0.1952, simple_loss=0.2971, pruned_loss=0.04669, over 4578.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2699, pruned_loss=0.04546, over 943311.68 frames. ], batch size: 17, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:21:40,934 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.05 vs. limit=15.0 +2024-07-28 20:21:42,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=199682.66666666666, ans=0.0 +2024-07-28 20:21:51,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199709.33333333334, ans=0.1 +2024-07-28 20:21:53,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=199709.33333333334, ans=0.125 +2024-07-28 20:21:58,838 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.766e+01 5.657e+01 6.507e+01 7.358e+01 9.845e+01, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 20:22:00,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=199722.66666666666, ans=0.0 +2024-07-28 20:22:03,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=199722.66666666666, ans=0.2 +2024-07-28 20:22:03,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=199736.0, ans=0.025 +2024-07-28 20:22:04,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.58 vs. limit=6.0 +2024-07-28 20:22:11,047 INFO [train.py:1114] (0/4) Epoch 15, batch 6700, loss[loss=0.1736, simple_loss=0.2687, pruned_loss=0.03922, over 4774.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2704, pruned_loss=0.04593, over 942060.53 frames. ], batch size: 19, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:22:11,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=199749.33333333334, ans=10.0 +2024-07-28 20:22:30,137 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:22:35,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199789.33333333334, ans=0.1 +2024-07-28 20:22:41,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=199802.66666666666, ans=15.0 +2024-07-28 20:22:46,721 INFO [train.py:1114] (0/4) Epoch 15, batch 6750, loss[loss=0.1985, simple_loss=0.2833, pruned_loss=0.0568, over 4259.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2706, pruned_loss=0.04608, over 940541.55 frames. ], batch size: 25, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:22:47,898 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.28 vs. limit=6.0 +2024-07-28 20:22:51,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.74 vs. limit=22.5 +2024-07-28 20:23:09,784 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.538e+01 5.776e+01 6.215e+01 6.945e+01 1.166e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 20:23:11,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199856.0, ans=0.1 +2024-07-28 20:23:12,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=199856.0, ans=0.0 +2024-07-28 20:23:21,874 INFO [train.py:1114] (0/4) Epoch 15, batch 6800, loss[loss=0.2016, simple_loss=0.2951, pruned_loss=0.05404, over 4636.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2712, pruned_loss=0.04617, over 938567.01 frames. ], batch size: 13, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:23:30,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=199896.0, ans=0.0 +2024-07-28 20:23:42,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=199922.66666666666, ans=0.025 +2024-07-28 20:23:46,866 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.59 vs. limit=10.0 +2024-07-28 20:23:47,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=199922.66666666666, ans=0.125 +2024-07-28 20:23:48,907 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.83 vs. limit=22.5 +2024-07-28 20:23:53,144 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.09 vs. limit=15.0 +2024-07-28 20:23:55,354 INFO [train.py:1114] (0/4) Epoch 15, batch 6850, loss[loss=0.1698, simple_loss=0.2675, pruned_loss=0.03605, over 4698.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.271, pruned_loss=0.04603, over 940117.28 frames. ], batch size: 13, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:24:08,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=199976.0, ans=0.0 +2024-07-28 20:24:16,996 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.771e+01 5.729e+01 6.369e+01 7.119e+01 1.032e+02, threshold=1.274e+02, percent-clipped=0.0 +2024-07-28 20:24:27,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200002.66666666666, ans=0.1 +2024-07-28 20:24:31,051 INFO [train.py:1114] (0/4) Epoch 15, batch 6900, loss[loss=0.1969, simple_loss=0.2928, pruned_loss=0.05044, over 4954.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2704, pruned_loss=0.04595, over 942165.52 frames. ], batch size: 13, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:24:32,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=200016.0, ans=0.125 +2024-07-28 20:24:37,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=200029.33333333334, ans=0.125 +2024-07-28 20:24:43,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=200042.66666666666, ans=0.1 +2024-07-28 20:24:48,479 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.50 vs. limit=12.0 +2024-07-28 20:25:01,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200069.33333333334, ans=0.1 +2024-07-28 20:25:04,286 INFO [train.py:1114] (0/4) Epoch 15, batch 6950, loss[loss=0.1498, simple_loss=0.2281, pruned_loss=0.03569, over 4567.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2701, pruned_loss=0.04574, over 939415.29 frames. ], batch size: 10, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:25:13,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=200096.0, ans=0.0 +2024-07-28 20:25:25,324 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+01 5.615e+01 6.056e+01 6.911e+01 1.034e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-28 20:25:34,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=200136.0, ans=0.125 +2024-07-28 20:25:37,535 INFO [train.py:1114] (0/4) Epoch 15, batch 7000, loss[loss=0.1693, simple_loss=0.2621, pruned_loss=0.03827, over 4621.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2696, pruned_loss=0.0455, over 938218.16 frames. ], batch size: 17, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:25:39,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=200149.33333333334, ans=0.0 +2024-07-28 20:25:41,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=200149.33333333334, ans=0.0 +2024-07-28 20:25:59,862 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.69 vs. limit=22.5 +2024-07-28 20:26:01,155 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.81 vs. limit=15.0 +2024-07-28 20:26:09,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=200202.66666666666, ans=0.125 +2024-07-28 20:26:12,401 INFO [train.py:1114] (0/4) Epoch 15, batch 7050, loss[loss=0.1645, simple_loss=0.2576, pruned_loss=0.03565, over 4698.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2697, pruned_loss=0.04527, over 941457.38 frames. ], batch size: 19, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:26:13,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200216.0, ans=0.1 +2024-07-28 20:26:23,153 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.19 vs. limit=15.0 +2024-07-28 20:26:24,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=200229.33333333334, ans=0.95 +2024-07-28 20:26:25,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=200229.33333333334, ans=0.125 +2024-07-28 20:26:35,602 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.524e+01 5.674e+01 6.340e+01 7.118e+01 1.081e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 20:26:35,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=200256.0, ans=0.125 +2024-07-28 20:26:35,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200256.0, ans=0.1 +2024-07-28 20:26:36,788 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.47 vs. limit=15.0 +2024-07-28 20:26:37,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=200256.0, ans=0.0 +2024-07-28 20:26:38,413 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=200256.0, ans=0.125 +2024-07-28 20:26:47,620 INFO [train.py:1114] (0/4) Epoch 15, batch 7100, loss[loss=0.1422, simple_loss=0.2423, pruned_loss=0.02106, over 4813.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2709, pruned_loss=0.04616, over 936701.80 frames. ], batch size: 15, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:26:51,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=200282.66666666666, ans=0.125 +2024-07-28 20:26:54,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=200296.0, ans=0.025 +2024-07-28 20:26:54,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=200296.0, ans=0.125 +2024-07-28 20:27:00,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=200309.33333333334, ans=0.125 +2024-07-28 20:27:01,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=200309.33333333334, ans=0.0 +2024-07-28 20:27:10,141 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.37 vs. limit=12.0 +2024-07-28 20:27:15,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=200336.0, ans=0.025 +2024-07-28 20:27:20,271 INFO [train.py:1114] (0/4) Epoch 15, batch 7150, loss[loss=0.2175, simple_loss=0.297, pruned_loss=0.06903, over 4565.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2693, pruned_loss=0.04533, over 937667.20 frames. ], batch size: 21, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:27:27,632 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:27:31,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=200362.66666666666, ans=0.1 +2024-07-28 20:27:40,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=200389.33333333334, ans=0.2 +2024-07-28 20:27:41,588 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.495e+01 6.100e+01 6.664e+01 1.254e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 20:27:53,621 INFO [train.py:1114] (0/4) Epoch 15, batch 7200, loss[loss=0.2238, simple_loss=0.3132, pruned_loss=0.06718, over 4795.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2708, pruned_loss=0.04601, over 938128.69 frames. ], batch size: 15, lr: 4.95e-03, grad_scale: 64.0 +2024-07-28 20:27:57,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=200416.0, ans=0.125 +2024-07-28 20:27:59,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=200429.33333333334, ans=0.0 +2024-07-28 20:28:07,051 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.08 vs. limit=10.0 +2024-07-28 20:28:18,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=200456.0, ans=0.025 +2024-07-28 20:28:21,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200469.33333333334, ans=0.1 +2024-07-28 20:28:23,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=200469.33333333334, ans=0.125 +2024-07-28 20:28:26,771 INFO [train.py:1114] (0/4) Epoch 15, batch 7250, loss[loss=0.2034, simple_loss=0.2875, pruned_loss=0.05968, over 4847.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2696, pruned_loss=0.04575, over 939766.01 frames. ], batch size: 12, lr: 4.95e-03, grad_scale: 64.0 +2024-07-28 20:28:27,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=200482.66666666666, ans=0.5 +2024-07-28 20:28:30,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=200482.66666666666, ans=0.125 +2024-07-28 20:28:42,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=200509.33333333334, ans=0.125 +2024-07-28 20:28:47,959 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.349e+01 5.542e+01 5.960e+01 6.678e+01 9.539e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 20:28:48,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=200522.66666666666, ans=0.0 +2024-07-28 20:28:53,948 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.60 vs. limit=15.0 +2024-07-28 20:28:59,496 INFO [train.py:1114] (0/4) Epoch 15, batch 7300, loss[loss=0.1793, simple_loss=0.2683, pruned_loss=0.04517, over 4855.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2701, pruned_loss=0.04549, over 939786.10 frames. ], batch size: 12, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:29:01,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=200549.33333333334, ans=0.125 +2024-07-28 20:29:02,793 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.03 vs. limit=15.0 +2024-07-28 20:29:32,749 INFO [train.py:1114] (0/4) Epoch 15, batch 7350, loss[loss=0.1683, simple_loss=0.2643, pruned_loss=0.03616, over 4635.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2706, pruned_loss=0.04557, over 939301.44 frames. ], batch size: 12, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:29:33,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200616.0, ans=0.1 +2024-07-28 20:29:52,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200656.0, ans=0.1 +2024-07-28 20:29:53,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=200656.0, ans=0.125 +2024-07-28 20:29:54,104 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.821e+01 5.678e+01 6.177e+01 7.167e+01 1.153e+02, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 20:29:56,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=200656.0, ans=0.125 +2024-07-28 20:30:02,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=200669.33333333334, ans=0.125 +2024-07-28 20:30:05,203 INFO [train.py:1114] (0/4) Epoch 15, batch 7400, loss[loss=0.1948, simple_loss=0.2858, pruned_loss=0.05188, over 4696.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2711, pruned_loss=0.04532, over 940534.78 frames. ], batch size: 13, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:30:05,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=200682.66666666666, ans=0.0 +2024-07-28 20:30:09,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200682.66666666666, ans=0.1 +2024-07-28 20:30:18,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=200709.33333333334, ans=0.125 +2024-07-28 20:30:19,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=200709.33333333334, ans=0.0 +2024-07-28 20:30:26,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=200722.66666666666, ans=0.125 +2024-07-28 20:30:26,247 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.90 vs. limit=12.0 +2024-07-28 20:30:30,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=200722.66666666666, ans=0.0 +2024-07-28 20:30:31,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=200736.0, ans=0.0 +2024-07-28 20:30:34,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.75 vs. limit=22.5 +2024-07-28 20:30:38,381 INFO [train.py:1114] (0/4) Epoch 15, batch 7450, loss[loss=0.1788, simple_loss=0.266, pruned_loss=0.04576, over 4629.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2706, pruned_loss=0.04515, over 937995.90 frames. ], batch size: 11, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:30:44,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=200762.66666666666, ans=0.0 +2024-07-28 20:30:45,848 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.05 vs. limit=15.0 +2024-07-28 20:30:55,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=200776.0, ans=0.125 +2024-07-28 20:30:55,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=200776.0, ans=0.04949747468305833 +2024-07-28 20:30:57,696 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.85 vs. limit=15.0 +2024-07-28 20:30:59,743 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.621e+01 5.506e+01 6.120e+01 7.059e+01 1.130e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 20:31:00,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=200789.33333333334, ans=0.125 +2024-07-28 20:31:01,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=200789.33333333334, ans=0.0 +2024-07-28 20:31:03,019 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.91 vs. limit=6.0 +2024-07-28 20:31:04,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200802.66666666666, ans=0.1 +2024-07-28 20:31:05,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=200802.66666666666, ans=0.0 +2024-07-28 20:31:11,160 INFO [train.py:1114] (0/4) Epoch 15, batch 7500, loss[loss=0.228, simple_loss=0.3079, pruned_loss=0.0741, over 3571.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2719, pruned_loss=0.04626, over 936490.68 frames. ], batch size: 35, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:31:13,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=200816.0, ans=0.125 +2024-07-28 20:31:14,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=200816.0, ans=0.0 +2024-07-28 20:31:19,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=200829.33333333334, ans=0.125 +2024-07-28 20:31:34,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=200856.0, ans=0.0 +2024-07-28 20:31:45,666 INFO [train.py:1114] (0/4) Epoch 15, batch 7550, loss[loss=0.2196, simple_loss=0.3039, pruned_loss=0.06762, over 4591.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2729, pruned_loss=0.04715, over 936394.71 frames. ], batch size: 17, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:31:48,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn2.whiten.whitening_limit, batch_count=200882.66666666666, ans=22.5 +2024-07-28 20:31:51,809 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.12 vs. limit=15.0 +2024-07-28 20:32:03,360 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.84 vs. limit=22.5 +2024-07-28 20:32:04,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=200909.33333333334, ans=0.125 +2024-07-28 20:32:08,714 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.815e+01 5.439e+01 5.885e+01 6.380e+01 8.239e+01, threshold=1.177e+02, percent-clipped=0.0 +2024-07-28 20:32:13,105 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.18 vs. limit=15.0 +2024-07-28 20:32:19,815 INFO [train.py:1114] (0/4) Epoch 15, batch 7600, loss[loss=0.2003, simple_loss=0.296, pruned_loss=0.05229, over 4811.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2722, pruned_loss=0.0464, over 937966.50 frames. ], batch size: 14, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:32:37,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=200962.66666666666, ans=0.0 +2024-07-28 20:32:37,420 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.22 vs. limit=15.0 +2024-07-28 20:32:54,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=200976.0, ans=0.0 +2024-07-28 20:32:55,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=200976.0, ans=0.0 +2024-07-28 20:32:59,474 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.20 vs. limit=15.0 +2024-07-28 20:33:04,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=200989.33333333334, ans=0.2 +2024-07-28 20:33:08,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=201002.66666666666, ans=0.0 +2024-07-28 20:33:08,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=201002.66666666666, ans=0.2 +2024-07-28 20:33:13,667 INFO [train.py:1114] (0/4) Epoch 15, batch 7650, loss[loss=0.1444, simple_loss=0.2327, pruned_loss=0.02805, over 4931.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2717, pruned_loss=0.04663, over 936554.66 frames. ], batch size: 12, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:33:17,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=201016.0, ans=15.0 +2024-07-28 20:33:39,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=201042.66666666666, ans=0.1 +2024-07-28 20:33:52,761 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.430e+01 5.492e+01 6.279e+01 7.005e+01 1.015e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 20:33:54,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=201056.0, ans=0.0 +2024-07-28 20:34:01,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=201069.33333333334, ans=0.035 +2024-07-28 20:34:19,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=201069.33333333334, ans=0.2 +2024-07-28 20:34:21,681 INFO [train.py:1114] (0/4) Epoch 15, batch 7700, loss[loss=0.1883, simple_loss=0.2882, pruned_loss=0.0442, over 4698.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2713, pruned_loss=0.04682, over 934463.48 frames. ], batch size: 13, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:34:27,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=201082.66666666666, ans=0.95 +2024-07-28 20:34:33,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=201096.0, ans=0.0 +2024-07-28 20:34:58,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=201136.0, ans=0.125 +2024-07-28 20:35:06,315 INFO [train.py:1114] (0/4) Epoch 15, batch 7750, loss[loss=0.2168, simple_loss=0.3067, pruned_loss=0.06347, over 4925.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2722, pruned_loss=0.04692, over 935917.52 frames. ], batch size: 14, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:35:08,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=201149.33333333334, ans=0.2 +2024-07-28 20:35:14,186 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.11 vs. limit=15.0 +2024-07-28 20:35:19,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=201149.33333333334, ans=0.125 +2024-07-28 20:35:23,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=201162.66666666666, ans=0.2 +2024-07-28 20:35:35,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=201189.33333333334, ans=0.95 +2024-07-28 20:35:37,371 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.480e+01 5.525e+01 5.917e+01 6.791e+01 1.166e+02, threshold=1.183e+02, percent-clipped=0.0 +2024-07-28 20:35:39,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=201189.33333333334, ans=0.0 +2024-07-28 20:35:39,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=201189.33333333334, ans=0.125 +2024-07-28 20:36:12,993 INFO [train.py:1114] (0/4) Epoch 15, batch 7800, loss[loss=0.1732, simple_loss=0.2741, pruned_loss=0.03618, over 4670.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2726, pruned_loss=0.04701, over 937870.81 frames. ], batch size: 14, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:36:29,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=201216.0, ans=0.125 +2024-07-28 20:36:45,037 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.52 vs. limit=12.0 +2024-07-28 20:36:45,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=201242.66666666666, ans=0.0 +2024-07-28 20:36:48,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=201242.66666666666, ans=0.125 +2024-07-28 20:36:48,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=201256.0, ans=0.125 +2024-07-28 20:36:49,138 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.28 vs. limit=8.0 +2024-07-28 20:37:04,501 INFO [train.py:1114] (0/4) Epoch 15, batch 7850, loss[loss=0.1901, simple_loss=0.2572, pruned_loss=0.06143, over 4547.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2723, pruned_loss=0.04669, over 936577.74 frames. ], batch size: 10, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:37:11,472 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.30 vs. limit=12.0 +2024-07-28 20:37:15,489 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:37:17,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=201296.0, ans=0.125 +2024-07-28 20:37:36,073 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 5.654e+01 6.198e+01 6.976e+01 9.701e+01, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 20:37:44,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=201336.0, ans=0.125 +2024-07-28 20:37:45,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=201336.0, ans=0.0 +2024-07-28 20:37:46,420 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.50 vs. limit=12.0 +2024-07-28 20:37:47,273 INFO [train.py:1114] (0/4) Epoch 15, batch 7900, loss[loss=0.2116, simple_loss=0.3143, pruned_loss=0.05441, over 4872.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2734, pruned_loss=0.04694, over 933513.67 frames. ], batch size: 14, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:38:01,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=201376.0, ans=0.0 +2024-07-28 20:38:09,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=201389.33333333334, ans=0.125 +2024-07-28 20:38:19,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=201402.66666666666, ans=0.07 +2024-07-28 20:38:21,305 INFO [train.py:1114] (0/4) Epoch 15, batch 7950, loss[loss=0.2012, simple_loss=0.2771, pruned_loss=0.0626, over 3593.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.273, pruned_loss=0.04683, over 935700.46 frames. ], batch size: 35, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:38:23,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=201416.0, ans=0.0 +2024-07-28 20:38:34,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201442.66666666666, ans=0.1 +2024-07-28 20:38:39,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=201442.66666666666, ans=0.0 +2024-07-28 20:38:41,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=201456.0, ans=0.125 +2024-07-28 20:38:42,614 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.203e+01 5.519e+01 6.026e+01 6.724e+01 9.656e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 20:38:45,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=201456.0, ans=0.025 +2024-07-28 20:38:46,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=201469.33333333334, ans=0.125 +2024-07-28 20:38:51,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=201469.33333333334, ans=0.125 +2024-07-28 20:38:53,250 INFO [train.py:1114] (0/4) Epoch 15, batch 8000, loss[loss=0.1543, simple_loss=0.2343, pruned_loss=0.03717, over 4606.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.271, pruned_loss=0.04612, over 935040.20 frames. ], batch size: 11, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:38:56,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=201482.66666666666, ans=0.0 +2024-07-28 20:38:58,788 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.21 vs. limit=22.5 +2024-07-28 20:39:16,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=201522.66666666666, ans=0.1 +2024-07-28 20:39:25,594 INFO [train.py:1114] (0/4) Epoch 15, batch 8050, loss[loss=0.1659, simple_loss=0.2621, pruned_loss=0.03489, over 4813.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2713, pruned_loss=0.04593, over 934758.75 frames. ], batch size: 14, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:39:27,440 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.49 vs. limit=22.5 +2024-07-28 20:39:34,296 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:39:36,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=201562.66666666666, ans=0.5 +2024-07-28 20:39:36,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=201562.66666666666, ans=0.125 +2024-07-28 20:39:36,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=201562.66666666666, ans=0.0 +2024-07-28 20:39:44,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=201589.33333333334, ans=0.125 +2024-07-28 20:39:46,855 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.771e+01 6.002e+01 6.838e+01 8.210e+01 1.277e+02, threshold=1.368e+02, percent-clipped=1.0 +2024-07-28 20:39:50,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=201589.33333333334, ans=0.1 +2024-07-28 20:39:58,046 INFO [train.py:1114] (0/4) Epoch 15, batch 8100, loss[loss=0.2121, simple_loss=0.2998, pruned_loss=0.06226, over 4794.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2714, pruned_loss=0.04584, over 934024.99 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:40:04,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=201629.33333333334, ans=0.125 +2024-07-28 20:40:07,297 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.19 vs. limit=12.0 +2024-07-28 20:40:08,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=201629.33333333334, ans=0.125 +2024-07-28 20:40:10,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=201642.66666666666, ans=0.0 +2024-07-28 20:40:23,238 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.54 vs. limit=10.0 +2024-07-28 20:40:30,019 INFO [train.py:1114] (0/4) Epoch 15, batch 8150, loss[loss=0.1851, simple_loss=0.2905, pruned_loss=0.03988, over 4812.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2699, pruned_loss=0.04502, over 937702.49 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:40:34,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=201682.66666666666, ans=0.125 +2024-07-28 20:40:35,531 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=15.0 +2024-07-28 20:40:41,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=201696.0, ans=0.0 +2024-07-28 20:40:50,464 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.36 vs. limit=15.0 +2024-07-28 20:40:51,248 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.751e+01 6.330e+01 7.260e+01 1.173e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 20:40:54,900 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.46 vs. limit=10.0 +2024-07-28 20:40:55,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=201736.0, ans=0.0 +2024-07-28 20:40:58,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=201736.0, ans=0.1 +2024-07-28 20:40:59,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=201736.0, ans=0.035 +2024-07-28 20:41:02,470 INFO [train.py:1114] (0/4) Epoch 15, batch 8200, loss[loss=0.2047, simple_loss=0.294, pruned_loss=0.05768, over 4806.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2704, pruned_loss=0.04481, over 938604.46 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:41:02,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=201749.33333333334, ans=0.125 +2024-07-28 20:41:12,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201762.66666666666, ans=0.1 +2024-07-28 20:41:28,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=201802.66666666666, ans=0.125 +2024-07-28 20:41:36,036 INFO [train.py:1114] (0/4) Epoch 15, batch 8250, loss[loss=0.1996, simple_loss=0.2931, pruned_loss=0.05305, over 4900.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2713, pruned_loss=0.04529, over 938997.93 frames. ], batch size: 13, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:41:44,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201829.33333333334, ans=0.1 +2024-07-28 20:41:57,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=201856.0, ans=0.2 +2024-07-28 20:41:57,636 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.753e+01 5.622e+01 6.090e+01 6.800e+01 1.043e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 20:42:07,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=201869.33333333334, ans=0.125 +2024-07-28 20:42:08,664 INFO [train.py:1114] (0/4) Epoch 15, batch 8300, loss[loss=0.208, simple_loss=0.2959, pruned_loss=0.06008, over 4906.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2724, pruned_loss=0.04598, over 939054.77 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:42:27,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=201922.66666666666, ans=0.125 +2024-07-28 20:42:39,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=201936.0, ans=0.1 +2024-07-28 20:42:41,352 INFO [train.py:1114] (0/4) Epoch 15, batch 8350, loss[loss=0.2048, simple_loss=0.2907, pruned_loss=0.05944, over 4792.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2728, pruned_loss=0.04608, over 941668.25 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:42:45,416 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.73 vs. limit=15.0 +2024-07-28 20:42:45,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=201949.33333333334, ans=0.125 +2024-07-28 20:42:49,850 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.51 vs. limit=10.0 +2024-07-28 20:42:57,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=201976.0, ans=0.125 +2024-07-28 20:43:01,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=201989.33333333334, ans=0.125 +2024-07-28 20:43:03,764 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.556e+01 6.243e+01 6.901e+01 1.019e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-28 20:43:04,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=201989.33333333334, ans=0.2 +2024-07-28 20:43:05,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=201989.33333333334, ans=0.5 +2024-07-28 20:43:11,132 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.45 vs. limit=15.0 +2024-07-28 20:43:12,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=202002.66666666666, ans=0.1 +2024-07-28 20:43:13,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=202002.66666666666, ans=0.125 +2024-07-28 20:43:15,641 INFO [train.py:1114] (0/4) Epoch 15, batch 8400, loss[loss=0.1571, simple_loss=0.25, pruned_loss=0.03207, over 4780.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2715, pruned_loss=0.04581, over 940288.14 frames. ], batch size: 12, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:43:15,966 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.51 vs. limit=22.5 +2024-07-28 20:43:20,619 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.74 vs. limit=12.0 +2024-07-28 20:43:25,697 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.30 vs. limit=15.0 +2024-07-28 20:43:30,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202042.66666666666, ans=0.1 +2024-07-28 20:43:35,342 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.40 vs. limit=10.0 +2024-07-28 20:43:51,527 INFO [train.py:1114] (0/4) Epoch 15, batch 8450, loss[loss=0.1836, simple_loss=0.2728, pruned_loss=0.04717, over 4800.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2714, pruned_loss=0.04564, over 938967.39 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:43:52,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=202082.66666666666, ans=0.0 +2024-07-28 20:44:00,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=202096.0, ans=0.2 +2024-07-28 20:44:03,283 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:44:06,203 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.50 vs. limit=10.0 +2024-07-28 20:44:21,224 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.759e+01 5.800e+01 6.456e+01 7.440e+01 1.040e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-28 20:44:22,559 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:44:27,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=202136.0, ans=0.0 +2024-07-28 20:44:34,660 INFO [train.py:1114] (0/4) Epoch 15, batch 8500, loss[loss=0.1578, simple_loss=0.2414, pruned_loss=0.03709, over 4614.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2694, pruned_loss=0.04485, over 939067.81 frames. ], batch size: 11, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:44:42,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=202162.66666666666, ans=0.125 +2024-07-28 20:44:49,038 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.56 vs. limit=22.5 +2024-07-28 20:44:50,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=202176.0, ans=0.025 +2024-07-28 20:44:51,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=202176.0, ans=0.04949747468305833 +2024-07-28 20:44:52,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=202176.0, ans=0.0 +2024-07-28 20:44:58,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=202189.33333333334, ans=0.0 +2024-07-28 20:45:07,510 INFO [train.py:1114] (0/4) Epoch 15, batch 8550, loss[loss=0.171, simple_loss=0.2571, pruned_loss=0.04245, over 4810.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2701, pruned_loss=0.04504, over 940155.13 frames. ], batch size: 11, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:45:21,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202242.66666666666, ans=0.1 +2024-07-28 20:45:30,961 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.429e+01 5.682e+01 6.336e+01 7.358e+01 1.234e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 20:45:34,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=202269.33333333334, ans=0.07 +2024-07-28 20:45:38,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=202269.33333333334, ans=0.2 +2024-07-28 20:45:41,920 INFO [train.py:1114] (0/4) Epoch 15, batch 8600, loss[loss=0.1915, simple_loss=0.2923, pruned_loss=0.04531, over 4804.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2697, pruned_loss=0.04445, over 940309.97 frames. ], batch size: 15, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:45:44,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=202282.66666666666, ans=0.0 +2024-07-28 20:45:53,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=202296.0, ans=0.125 +2024-07-28 20:45:58,088 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.22 vs. limit=6.0 +2024-07-28 20:46:01,273 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.96 vs. limit=15.0 +2024-07-28 20:46:01,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=202322.66666666666, ans=0.125 +2024-07-28 20:46:11,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=202336.0, ans=0.125 +2024-07-28 20:46:11,481 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=202336.0, ans=0.0 +2024-07-28 20:46:15,030 INFO [train.py:1114] (0/4) Epoch 15, batch 8650, loss[loss=0.2114, simple_loss=0.2968, pruned_loss=0.06301, over 4901.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2698, pruned_loss=0.04467, over 941723.98 frames. ], batch size: 15, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:46:15,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=202349.33333333334, ans=0.125 +2024-07-28 20:46:28,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=202376.0, ans=0.1 +2024-07-28 20:46:32,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=202376.0, ans=0.04949747468305833 +2024-07-28 20:46:36,414 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.784e+01 5.651e+01 6.077e+01 6.775e+01 1.563e+02, threshold=1.215e+02, percent-clipped=1.0 +2024-07-28 20:46:39,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=202389.33333333334, ans=0.1 +2024-07-28 20:46:39,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=202389.33333333334, ans=0.1 +2024-07-28 20:46:39,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=202389.33333333334, ans=0.125 +2024-07-28 20:46:44,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=202402.66666666666, ans=0.0 +2024-07-28 20:46:47,376 INFO [train.py:1114] (0/4) Epoch 15, batch 8700, loss[loss=0.1841, simple_loss=0.2765, pruned_loss=0.04583, over 4763.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2708, pruned_loss=0.04512, over 939129.93 frames. ], batch size: 13, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:46:51,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=202416.0, ans=0.035 +2024-07-28 20:47:18,156 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:47:19,454 INFO [train.py:1114] (0/4) Epoch 15, batch 8750, loss[loss=0.2004, simple_loss=0.2938, pruned_loss=0.05347, over 4677.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2704, pruned_loss=0.04508, over 937096.10 frames. ], batch size: 15, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:47:40,678 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.522e+01 5.559e+01 6.196e+01 6.974e+01 1.029e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 20:47:43,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=202522.66666666666, ans=0.09899494936611666 +2024-07-28 20:47:44,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=202522.66666666666, ans=0.025 +2024-07-28 20:47:51,465 INFO [train.py:1114] (0/4) Epoch 15, batch 8800, loss[loss=0.1796, simple_loss=0.2816, pruned_loss=0.03879, over 4932.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2712, pruned_loss=0.04535, over 937929.40 frames. ], batch size: 14, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:47:55,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=202549.33333333334, ans=0.0 +2024-07-28 20:48:02,594 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.63 vs. limit=10.0 +2024-07-28 20:48:08,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=202576.0, ans=0.125 +2024-07-28 20:48:09,062 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.47 vs. limit=15.0 +2024-07-28 20:48:12,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=202589.33333333334, ans=0.0 +2024-07-28 20:48:20,831 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.89 vs. limit=15.0 +2024-07-28 20:48:23,856 INFO [train.py:1114] (0/4) Epoch 15, batch 8850, loss[loss=0.19, simple_loss=0.2861, pruned_loss=0.04696, over 4513.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2708, pruned_loss=0.04547, over 932404.94 frames. ], batch size: 21, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:48:34,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=202629.33333333334, ans=0.1 +2024-07-28 20:48:37,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=202629.33333333334, ans=0.125 +2024-07-28 20:48:43,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=202642.66666666666, ans=0.125 +2024-07-28 20:48:50,490 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.697e+01 5.661e+01 6.393e+01 7.198e+01 1.179e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 20:48:53,406 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-152000.pt +2024-07-28 20:49:08,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=202669.33333333334, ans=15.0 +2024-07-28 20:49:10,228 INFO [train.py:1114] (0/4) Epoch 15, batch 8900, loss[loss=0.1808, simple_loss=0.2731, pruned_loss=0.04422, over 4936.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2706, pruned_loss=0.04548, over 930219.93 frames. ], batch size: 12, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:49:22,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=202709.33333333334, ans=0.1 +2024-07-28 20:49:41,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=202749.33333333334, ans=0.0 +2024-07-28 20:49:42,093 INFO [train.py:1114] (0/4) Epoch 15, batch 8950, loss[loss=0.2015, simple_loss=0.2849, pruned_loss=0.05908, over 4533.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2713, pruned_loss=0.04591, over 930764.61 frames. ], batch size: 21, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:49:48,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=202762.66666666666, ans=0.1 +2024-07-28 20:49:56,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=202776.0, ans=0.1 +2024-07-28 20:49:58,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=202776.0, ans=0.125 +2024-07-28 20:50:03,356 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.618e+01 5.676e+01 6.111e+01 7.140e+01 9.937e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 20:50:12,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=202802.66666666666, ans=0.125 +2024-07-28 20:50:14,241 INFO [train.py:1114] (0/4) Epoch 15, batch 9000, loss[loss=0.1436, simple_loss=0.2452, pruned_loss=0.02098, over 4646.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2701, pruned_loss=0.04508, over 933869.77 frames. ], batch size: 12, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:50:14,241 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 20:50:24,546 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.3634, 3.0404, 2.6292, 2.4711], device='cuda:0') +2024-07-28 20:50:24,943 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.5370, 3.1424, 3.4443, 3.8362], device='cuda:0') +2024-07-28 20:50:26,714 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.6128, 3.8892, 3.9183, 3.9076], device='cuda:0') +2024-07-28 20:50:29,447 INFO [train.py:1146] (0/4) Epoch 15, validation: loss=0.164, simple_loss=0.2673, pruned_loss=0.03039, over 944034.00 frames. +2024-07-28 20:50:29,448 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 20:50:32,966 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.73 vs. limit=15.0 +2024-07-28 20:50:36,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202829.33333333334, ans=0.1 +2024-07-28 20:50:54,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=202869.33333333334, ans=0.125 +2024-07-28 20:51:01,321 INFO [train.py:1114] (0/4) Epoch 15, batch 9050, loss[loss=0.1435, simple_loss=0.2196, pruned_loss=0.03367, over 4565.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2692, pruned_loss=0.04501, over 934537.30 frames. ], batch size: 10, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:51:17,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=202909.33333333334, ans=0.2 +2024-07-28 20:51:17,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=202909.33333333334, ans=0.125 +2024-07-28 20:51:21,888 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.625e+01 5.604e+01 6.217e+01 7.321e+01 1.269e+02, threshold=1.243e+02, percent-clipped=1.0 +2024-07-28 20:51:26,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=202936.0, ans=0.125 +2024-07-28 20:51:29,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=202936.0, ans=0.125 +2024-07-28 20:51:32,958 INFO [train.py:1114] (0/4) Epoch 15, batch 9100, loss[loss=0.1547, simple_loss=0.2576, pruned_loss=0.02592, over 4939.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2696, pruned_loss=0.04534, over 937058.89 frames. ], batch size: 14, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:51:36,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=202949.33333333334, ans=0.015 +2024-07-28 20:51:40,916 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.54 vs. limit=5.0 +2024-07-28 20:52:04,335 INFO [train.py:1114] (0/4) Epoch 15, batch 9150, loss[loss=0.1865, simple_loss=0.2775, pruned_loss=0.0477, over 4802.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2701, pruned_loss=0.04579, over 935727.20 frames. ], batch size: 14, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:52:07,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=203016.0, ans=0.125 +2024-07-28 20:52:17,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=203042.66666666666, ans=0.015 +2024-07-28 20:52:17,667 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:52:25,465 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.329e+01 5.523e+01 6.043e+01 6.925e+01 1.017e+02, threshold=1.209e+02, percent-clipped=0.0 +2024-07-28 20:52:26,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=203056.0, ans=0.0 +2024-07-28 20:52:30,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=203069.33333333334, ans=0.0 +2024-07-28 20:52:31,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=203069.33333333334, ans=0.2 +2024-07-28 20:52:33,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=203069.33333333334, ans=0.125 +2024-07-28 20:52:36,152 INFO [train.py:1114] (0/4) Epoch 15, batch 9200, loss[loss=0.1811, simple_loss=0.2824, pruned_loss=0.0399, over 4846.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2699, pruned_loss=0.04546, over 937451.44 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:52:44,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=203096.0, ans=0.0 +2024-07-28 20:52:50,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=203109.33333333334, ans=0.125 +2024-07-28 20:52:50,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=203109.33333333334, ans=0.125 +2024-07-28 20:52:56,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=203122.66666666666, ans=0.125 +2024-07-28 20:53:03,114 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.73 vs. limit=15.0 +2024-07-28 20:53:07,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=203149.33333333334, ans=0.125 +2024-07-28 20:53:07,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=203149.33333333334, ans=0.0 +2024-07-28 20:53:08,403 INFO [train.py:1114] (0/4) Epoch 15, batch 9250, loss[loss=0.1985, simple_loss=0.2963, pruned_loss=0.05038, over 4637.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2701, pruned_loss=0.04512, over 938221.95 frames. ], batch size: 13, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:53:09,349 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.60 vs. limit=15.0 +2024-07-28 20:53:09,456 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.42 vs. limit=15.0 +2024-07-28 20:53:12,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=203149.33333333334, ans=0.125 +2024-07-28 20:53:12,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=203149.33333333334, ans=0.1 +2024-07-28 20:53:13,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=203149.33333333334, ans=0.125 +2024-07-28 20:53:20,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=203176.0, ans=0.0 +2024-07-28 20:53:29,340 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.390e+01 5.672e+01 6.344e+01 6.747e+01 1.004e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 20:53:30,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=203189.33333333334, ans=0.0 +2024-07-28 20:53:32,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=203189.33333333334, ans=0.125 +2024-07-28 20:53:32,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=203189.33333333334, ans=10.0 +2024-07-28 20:53:34,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=203202.66666666666, ans=0.125 +2024-07-28 20:53:35,636 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.61 vs. limit=12.0 +2024-07-28 20:53:37,914 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:53:39,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=203202.66666666666, ans=0.0 +2024-07-28 20:53:40,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=203216.0, ans=0.025 +2024-07-28 20:53:41,041 INFO [train.py:1114] (0/4) Epoch 15, batch 9300, loss[loss=0.1579, simple_loss=0.237, pruned_loss=0.03938, over 4773.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2703, pruned_loss=0.04561, over 938488.38 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:53:53,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=203229.33333333334, ans=0.0 +2024-07-28 20:53:56,100 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.76 vs. limit=15.0 +2024-07-28 20:53:58,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=203242.66666666666, ans=0.1 +2024-07-28 20:54:04,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=203256.0, ans=0.125 +2024-07-28 20:54:13,768 INFO [train.py:1114] (0/4) Epoch 15, batch 9350, loss[loss=0.1778, simple_loss=0.2613, pruned_loss=0.04719, over 4823.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2702, pruned_loss=0.04544, over 934955.70 frames. ], batch size: 11, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:54:15,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=203282.66666666666, ans=0.125 +2024-07-28 20:54:18,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=203282.66666666666, ans=0.125 +2024-07-28 20:54:19,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=203282.66666666666, ans=0.0 +2024-07-28 20:54:20,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=203296.0, ans=0.125 +2024-07-28 20:54:23,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=203296.0, ans=0.0 +2024-07-28 20:54:23,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=203296.0, ans=0.125 +2024-07-28 20:54:34,865 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.610e+01 5.452e+01 6.189e+01 7.531e+01 9.435e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 20:54:35,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=203322.66666666666, ans=0.1 +2024-07-28 20:54:43,641 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.68 vs. limit=10.0 +2024-07-28 20:54:45,592 INFO [train.py:1114] (0/4) Epoch 15, batch 9400, loss[loss=0.1904, simple_loss=0.2756, pruned_loss=0.05264, over 4686.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2699, pruned_loss=0.04547, over 932611.93 frames. ], batch size: 13, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:54:51,775 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=203362.66666666666, ans=0.125 +2024-07-28 20:54:57,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=203362.66666666666, ans=10.0 +2024-07-28 20:54:59,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=203376.0, ans=0.125 +2024-07-28 20:55:09,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=203389.33333333334, ans=0.125 +2024-07-28 20:55:11,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=203402.66666666666, ans=0.125 +2024-07-28 20:55:17,112 INFO [train.py:1114] (0/4) Epoch 15, batch 9450, loss[loss=0.1368, simple_loss=0.2205, pruned_loss=0.02651, over 4806.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2696, pruned_loss=0.04528, over 932062.51 frames. ], batch size: 11, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:55:17,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=203416.0, ans=0.09899494936611666 +2024-07-28 20:55:23,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=203429.33333333334, ans=0.125 +2024-07-28 20:55:37,632 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.245e+01 5.463e+01 5.974e+01 6.797e+01 9.307e+01, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 20:55:48,447 INFO [train.py:1114] (0/4) Epoch 15, batch 9500, loss[loss=0.1623, simple_loss=0.2579, pruned_loss=0.03333, over 4708.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2702, pruned_loss=0.04521, over 934590.87 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:55:51,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=203482.66666666666, ans=0.125 +2024-07-28 20:55:54,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=203496.0, ans=0.125 +2024-07-28 20:56:00,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=203509.33333333334, ans=0.125 +2024-07-28 20:56:03,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=203509.33333333334, ans=0.0 +2024-07-28 20:56:13,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=203536.0, ans=0.2 +2024-07-28 20:56:14,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=203536.0, ans=0.1 +2024-07-28 20:56:14,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=203536.0, ans=0.125 +2024-07-28 20:56:16,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=203536.0, ans=0.125 +2024-07-28 20:56:19,723 INFO [train.py:1114] (0/4) Epoch 15, batch 9550, loss[loss=0.1482, simple_loss=0.2427, pruned_loss=0.02689, over 4782.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2695, pruned_loss=0.04482, over 931660.05 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:56:22,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=203549.33333333334, ans=0.0 +2024-07-28 20:56:34,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=203576.0, ans=0.0 +2024-07-28 20:56:40,327 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.803e+01 5.499e+01 6.112e+01 6.972e+01 9.508e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 20:56:42,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=203589.33333333334, ans=0.0 +2024-07-28 20:56:50,868 INFO [train.py:1114] (0/4) Epoch 15, batch 9600, loss[loss=0.2239, simple_loss=0.3041, pruned_loss=0.07183, over 3287.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2701, pruned_loss=0.04495, over 930614.09 frames. ], batch size: 35, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:56:55,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=203616.0, ans=0.1 +2024-07-28 20:56:59,396 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.16 vs. limit=15.0 +2024-07-28 20:56:59,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=203629.33333333334, ans=0.125 +2024-07-28 20:57:00,877 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.48 vs. limit=15.0 +2024-07-28 20:57:11,299 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:57:15,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=203656.0, ans=0.1 +2024-07-28 20:57:17,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=203669.33333333334, ans=0.1 +2024-07-28 20:57:18,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=203669.33333333334, ans=0.025 +2024-07-28 20:57:21,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=203669.33333333334, ans=0.125 +2024-07-28 20:57:22,712 INFO [train.py:1114] (0/4) Epoch 15, batch 9650, loss[loss=0.1442, simple_loss=0.2315, pruned_loss=0.02839, over 4846.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2704, pruned_loss=0.04518, over 926578.61 frames. ], batch size: 16, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:57:32,402 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.30 vs. limit=15.0 +2024-07-28 20:57:39,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=203709.33333333334, ans=0.0 +2024-07-28 20:57:43,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=203722.66666666666, ans=0.2 +2024-07-28 20:57:44,445 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.709e+01 6.228e+01 7.235e+01 8.715e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 20:57:46,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=203722.66666666666, ans=0.2 +2024-07-28 20:57:53,514 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.80 vs. limit=6.0 +2024-07-28 20:57:55,066 INFO [train.py:1114] (0/4) Epoch 15, batch 9700, loss[loss=0.2181, simple_loss=0.3077, pruned_loss=0.06423, over 4220.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2709, pruned_loss=0.04592, over 924672.83 frames. ], batch size: 25, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:57:57,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=203749.33333333334, ans=0.0 +2024-07-28 20:58:00,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=203749.33333333334, ans=0.125 +2024-07-28 20:58:09,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=203776.0, ans=0.0 +2024-07-28 20:58:23,643 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:58:27,185 INFO [train.py:1114] (0/4) Epoch 15, batch 9750, loss[loss=0.2192, simple_loss=0.3117, pruned_loss=0.06335, over 4691.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2709, pruned_loss=0.04623, over 925055.73 frames. ], batch size: 15, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:58:27,562 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.54 vs. limit=12.0 +2024-07-28 20:58:39,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=203842.66666666666, ans=0.0 +2024-07-28 20:58:41,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=203842.66666666666, ans=0.1 +2024-07-28 20:58:43,907 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-28 20:58:45,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=203856.0, ans=0.125 +2024-07-28 20:58:48,319 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.470e+01 5.731e+01 6.608e+01 7.819e+01 1.278e+02, threshold=1.322e+02, percent-clipped=1.0 +2024-07-28 20:58:51,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=203869.33333333334, ans=0.125 +2024-07-28 20:59:03,964 INFO [train.py:1114] (0/4) Epoch 15, batch 9800, loss[loss=0.1737, simple_loss=0.2714, pruned_loss=0.03805, over 4709.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2694, pruned_loss=0.04576, over 924953.43 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:59:10,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=203896.0, ans=0.0 +2024-07-28 20:59:12,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=203896.0, ans=0.125 +2024-07-28 20:59:13,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=203896.0, ans=0.125 +2024-07-28 20:59:21,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=203909.33333333334, ans=0.0 +2024-07-28 20:59:23,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=203922.66666666666, ans=0.95 +2024-07-28 20:59:29,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=203936.0, ans=0.125 +2024-07-28 20:59:35,231 INFO [train.py:1114] (0/4) Epoch 15, batch 9850, loss[loss=0.1949, simple_loss=0.2837, pruned_loss=0.05304, over 4905.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2703, pruned_loss=0.04607, over 927348.88 frames. ], batch size: 15, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 20:59:38,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=203949.33333333334, ans=0.0 +2024-07-28 20:59:40,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=203962.66666666666, ans=0.025 +2024-07-28 20:59:53,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=203989.33333333334, ans=0.125 +2024-07-28 20:59:53,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=203989.33333333334, ans=0.1 +2024-07-28 20:59:54,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=203989.33333333334, ans=0.0 +2024-07-28 20:59:56,148 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.481e+01 5.690e+01 6.538e+01 7.363e+01 1.082e+02, threshold=1.308e+02, percent-clipped=0.0 +2024-07-28 20:59:58,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=203989.33333333334, ans=0.125 +2024-07-28 21:00:01,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=204002.66666666666, ans=0.0 +2024-07-28 21:00:06,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=204016.0, ans=0.125 +2024-07-28 21:00:06,888 INFO [train.py:1114] (0/4) Epoch 15, batch 9900, loss[loss=0.1944, simple_loss=0.2839, pruned_loss=0.05249, over 4858.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2716, pruned_loss=0.04647, over 926751.25 frames. ], batch size: 16, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:00:10,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=204016.0, ans=0.025 +2024-07-28 21:00:15,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=204029.33333333334, ans=0.125 +2024-07-28 21:00:20,495 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.21 vs. limit=15.0 +2024-07-28 21:00:20,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204042.66666666666, ans=0.1 +2024-07-28 21:00:29,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204056.0, ans=0.1 +2024-07-28 21:00:29,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=204056.0, ans=0.125 +2024-07-28 21:00:35,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=204069.33333333334, ans=0.125 +2024-07-28 21:00:36,028 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:00:37,699 INFO [train.py:1114] (0/4) Epoch 15, batch 9950, loss[loss=0.1493, simple_loss=0.2332, pruned_loss=0.03268, over 4800.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2721, pruned_loss=0.04693, over 929524.07 frames. ], batch size: 11, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:00:41,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=204082.66666666666, ans=0.125 +2024-07-28 21:00:46,074 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.43 vs. limit=15.0 +2024-07-28 21:00:57,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=204122.66666666666, ans=0.125 +2024-07-28 21:00:59,864 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.488e+01 6.089e+01 6.834e+01 7.968e+01 1.113e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-28 21:01:00,814 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.63 vs. limit=15.0 +2024-07-28 21:01:05,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=204136.0, ans=15.0 +2024-07-28 21:01:07,647 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.87 vs. limit=15.0 +2024-07-28 21:01:09,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=204149.33333333334, ans=0.5 +2024-07-28 21:01:09,360 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.49 vs. limit=15.0 +2024-07-28 21:01:09,730 INFO [train.py:1114] (0/4) Epoch 15, batch 10000, loss[loss=0.1592, simple_loss=0.251, pruned_loss=0.03367, over 4639.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2742, pruned_loss=0.04743, over 927216.35 frames. ], batch size: 16, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:01:20,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204162.66666666666, ans=0.1 +2024-07-28 21:01:26,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=204176.0, ans=0.0 +2024-07-28 21:01:28,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=204189.33333333334, ans=0.05 +2024-07-28 21:01:28,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=204189.33333333334, ans=0.07 +2024-07-28 21:01:32,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=204189.33333333334, ans=15.0 +2024-07-28 21:01:38,927 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.66 vs. limit=15.0 +2024-07-28 21:01:39,462 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=20.55 vs. limit=15.0 +2024-07-28 21:01:41,139 INFO [train.py:1114] (0/4) Epoch 15, batch 10050, loss[loss=0.2031, simple_loss=0.2894, pruned_loss=0.05839, over 3416.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2779, pruned_loss=0.0496, over 914464.14 frames. ], batch size: 35, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:01:47,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=204229.33333333334, ans=0.125 +2024-07-28 21:01:53,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204229.33333333334, ans=0.1 +2024-07-28 21:01:54,720 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.15 vs. limit=22.5 +2024-07-28 21:02:00,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=204242.66666666666, ans=0.125 +2024-07-28 21:02:04,623 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.770e+01 5.990e+01 6.680e+01 7.345e+01 9.959e+01, threshold=1.336e+02, percent-clipped=0.0 +2024-07-28 21:02:04,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=204256.0, ans=0.125 +2024-07-28 21:02:13,020 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=204269.33333333334, ans=0.125 +2024-07-28 21:02:14,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=204269.33333333334, ans=0.125 +2024-07-28 21:02:15,465 INFO [train.py:1114] (0/4) Epoch 15, batch 10100, loss[loss=0.2169, simple_loss=0.3036, pruned_loss=0.06509, over 3437.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2825, pruned_loss=0.05458, over 860774.19 frames. ], batch size: 35, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:02:15,919 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.59 vs. limit=15.0 +2024-07-28 21:02:20,535 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.46 vs. limit=15.0 +2024-07-28 21:02:28,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=204309.33333333334, ans=0.0 +2024-07-28 21:02:31,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=204309.33333333334, ans=0.2 +2024-07-28 21:02:32,684 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.50 vs. limit=15.0 +2024-07-28 21:02:33,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=204309.33333333334, ans=0.125 +2024-07-28 21:02:41,992 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.09 vs. limit=15.0 +2024-07-28 21:02:43,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=204336.0, ans=0.2 +2024-07-28 21:02:46,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.57 vs. limit=15.0 +2024-07-28 21:02:47,166 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.64 vs. limit=15.0 +2024-07-28 21:02:48,788 INFO [train.py:1114] (0/4) Epoch 15, batch 10150, loss[loss=0.1884, simple_loss=0.277, pruned_loss=0.04994, over 3596.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2851, pruned_loss=0.05718, over 819097.51 frames. ], batch size: 38, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:02:49,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=204349.33333333334, ans=0.125 +2024-07-28 21:03:00,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204362.66666666666, ans=0.1 +2024-07-28 21:03:04,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=204376.0, ans=0.0 +2024-07-28 21:03:06,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=204376.0, ans=0.125 +2024-07-28 21:03:09,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=204389.33333333334, ans=0.125 +2024-07-28 21:03:09,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=204389.33333333334, ans=0.1 +2024-07-28 21:03:11,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=204389.33333333334, ans=0.0 +2024-07-28 21:03:12,067 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.568e+01 6.640e+01 7.110e+01 7.457e+01 9.149e+01, threshold=1.422e+02, percent-clipped=0.0 +2024-07-28 21:03:22,475 INFO [train.py:1114] (0/4) Epoch 15, batch 10200, loss[loss=0.2274, simple_loss=0.297, pruned_loss=0.07894, over 3555.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2887, pruned_loss=0.06041, over 789229.33 frames. ], batch size: 35, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:03:28,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=204429.33333333334, ans=0.0 +2024-07-28 21:03:36,157 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-15.pt +2024-07-28 21:04:37,180 INFO [train.py:1114] (0/4) Epoch 16, batch 0, loss[loss=0.1305, simple_loss=0.2164, pruned_loss=0.02232, over 4867.00 frames. ], tot_loss[loss=0.1305, simple_loss=0.2164, pruned_loss=0.02232, over 4867.00 frames. ], batch size: 12, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:04:37,180 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 21:04:45,064 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.9083, 3.8925, 3.9638, 3.7724, 4.2745, 4.1516, 4.4137, 3.7879], + device='cuda:0') +2024-07-28 21:04:48,652 INFO [train.py:1146] (0/4) Epoch 16, validation: loss=0.1648, simple_loss=0.2693, pruned_loss=0.03017, over 944034.00 frames. +2024-07-28 21:04:48,653 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 21:04:51,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.79 vs. limit=6.0 +2024-07-28 21:04:52,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204445.33333333334, ans=0.1 +2024-07-28 21:05:13,411 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.44 vs. limit=22.5 +2024-07-28 21:05:19,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=204498.66666666666, ans=0.125 +2024-07-28 21:05:20,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204498.66666666666, ans=0.1 +2024-07-28 21:05:23,365 INFO [train.py:1114] (0/4) Epoch 16, batch 50, loss[loss=0.1774, simple_loss=0.2668, pruned_loss=0.044, over 4614.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2719, pruned_loss=0.04541, over 206183.30 frames. ], batch size: 11, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:05:29,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=204512.0, ans=0.2 +2024-07-28 21:05:36,792 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.417e+01 5.659e+01 6.518e+01 7.271e+01 1.139e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 21:05:57,942 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.02 vs. limit=6.0 +2024-07-28 21:06:04,225 INFO [train.py:1114] (0/4) Epoch 16, batch 100, loss[loss=0.1728, simple_loss=0.2622, pruned_loss=0.04172, over 4632.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2714, pruned_loss=0.0445, over 365526.77 frames. ], batch size: 12, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:06:15,849 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=12.0 +2024-07-28 21:06:22,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=204605.33333333334, ans=0.125 +2024-07-28 21:06:24,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=204605.33333333334, ans=0.0 +2024-07-28 21:06:39,328 INFO [train.py:1114] (0/4) Epoch 16, batch 150, loss[loss=0.1439, simple_loss=0.2326, pruned_loss=0.02756, over 4600.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2691, pruned_loss=0.04398, over 493990.82 frames. ], batch size: 11, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:06:48,322 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=204658.66666666666, ans=0.125 +2024-07-28 21:06:50,240 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 5.360e+01 5.968e+01 6.673e+01 1.001e+02, threshold=1.194e+02, percent-clipped=0.0 +2024-07-28 21:06:51,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=204658.66666666666, ans=0.0 +2024-07-28 21:07:13,455 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.22 vs. limit=15.0 +2024-07-28 21:07:18,885 INFO [train.py:1114] (0/4) Epoch 16, batch 200, loss[loss=0.2065, simple_loss=0.2846, pruned_loss=0.06421, over 4506.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2682, pruned_loss=0.04403, over 593645.54 frames. ], batch size: 21, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:07:23,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=204712.0, ans=0.0 +2024-07-28 21:07:33,210 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.69 vs. limit=12.0 +2024-07-28 21:07:34,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=204738.66666666666, ans=0.125 +2024-07-28 21:07:39,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=204752.0, ans=0.0 +2024-07-28 21:07:44,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=204752.0, ans=0.125 +2024-07-28 21:07:52,286 INFO [train.py:1114] (0/4) Epoch 16, batch 250, loss[loss=0.1953, simple_loss=0.2794, pruned_loss=0.05562, over 4627.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2679, pruned_loss=0.04444, over 670477.88 frames. ], batch size: 16, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:07:59,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=204778.66666666666, ans=0.0 +2024-07-28 21:08:05,818 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 5.773e+01 6.705e+01 7.902e+01 1.167e+02, threshold=1.341e+02, percent-clipped=0.0 +2024-07-28 21:08:06,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=204792.0, ans=0.125 +2024-07-28 21:08:07,757 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.05 vs. limit=22.5 +2024-07-28 21:08:10,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=204805.33333333334, ans=0.125 +2024-07-28 21:08:11,924 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.84 vs. limit=15.0 +2024-07-28 21:08:24,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=204832.0, ans=0.1 +2024-07-28 21:08:25,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=204832.0, ans=0.035 +2024-07-28 21:08:36,638 INFO [train.py:1114] (0/4) Epoch 16, batch 300, loss[loss=0.2163, simple_loss=0.2956, pruned_loss=0.06857, over 4794.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2676, pruned_loss=0.04409, over 729921.75 frames. ], batch size: 15, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:09:01,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=204885.33333333334, ans=0.125 +2024-07-28 21:09:09,791 INFO [train.py:1114] (0/4) Epoch 16, batch 350, loss[loss=0.1822, simple_loss=0.2658, pruned_loss=0.04932, over 4938.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2682, pruned_loss=0.04393, over 776124.35 frames. ], batch size: 12, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:09:17,699 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.457e+01 5.458e+01 6.054e+01 6.509e+01 1.036e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-28 21:09:31,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.59 vs. limit=10.0 +2024-07-28 21:09:43,632 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:09:44,826 INFO [train.py:1114] (0/4) Epoch 16, batch 400, loss[loss=0.1742, simple_loss=0.2638, pruned_loss=0.04233, over 4694.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.268, pruned_loss=0.04381, over 813417.31 frames. ], batch size: 13, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:09:44,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=204978.66666666666, ans=0.2 +2024-07-28 21:09:45,954 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.23 vs. limit=22.5 +2024-07-28 21:13:23,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=205005.33333333334, ans=0.125 +2024-07-28 21:13:28,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=205005.33333333334, ans=0.1 +2024-07-28 21:13:29,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=205005.33333333334, ans=0.0 +2024-07-28 21:13:30,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=205005.33333333334, ans=10.0 +2024-07-28 21:13:32,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=205018.66666666666, ans=0.125 +2024-07-28 21:13:32,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=205018.66666666666, ans=0.125 +2024-07-28 21:13:51,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=205032.0, ans=0.1 +2024-07-28 21:13:59,401 INFO [train.py:1114] (0/4) Epoch 16, batch 450, loss[loss=0.1736, simple_loss=0.2768, pruned_loss=0.03517, over 4633.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2688, pruned_loss=0.04427, over 838603.69 frames. ], batch size: 13, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:14:13,707 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.574e+01 5.588e+01 6.021e+01 6.553e+01 1.018e+02, threshold=1.204e+02, percent-clipped=0.0 +2024-07-28 21:14:19,964 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.39 vs. limit=15.0 +2024-07-28 21:14:24,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=205072.0, ans=0.0 +2024-07-28 21:14:37,007 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.31 vs. limit=15.0 +2024-07-28 21:14:39,789 INFO [train.py:1114] (0/4) Epoch 16, batch 500, loss[loss=0.1848, simple_loss=0.2888, pruned_loss=0.04036, over 4678.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2678, pruned_loss=0.04426, over 861162.51 frames. ], batch size: 15, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:14:45,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=205112.0, ans=0.0 +2024-07-28 21:14:51,930 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.11 vs. limit=15.0 +2024-07-28 21:14:53,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=205138.66666666666, ans=0.025 +2024-07-28 21:15:10,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=205165.33333333334, ans=0.0 +2024-07-28 21:15:27,933 INFO [train.py:1114] (0/4) Epoch 16, batch 550, loss[loss=0.1992, simple_loss=0.2904, pruned_loss=0.05403, over 4659.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2685, pruned_loss=0.04432, over 877788.62 frames. ], batch size: 17, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:15:29,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=205178.66666666666, ans=0.125 +2024-07-28 21:15:34,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=205178.66666666666, ans=0.125 +2024-07-28 21:15:36,531 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:15:37,692 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.508e+01 5.496e+01 6.135e+01 6.977e+01 1.008e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 21:15:46,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205205.33333333334, ans=0.1 +2024-07-28 21:15:52,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=205218.66666666666, ans=0.0 +2024-07-28 21:15:53,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=205218.66666666666, ans=10.0 +2024-07-28 21:16:02,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=205232.0, ans=0.125 +2024-07-28 21:16:05,174 INFO [train.py:1114] (0/4) Epoch 16, batch 600, loss[loss=0.1708, simple_loss=0.2577, pruned_loss=0.04188, over 4635.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.268, pruned_loss=0.04378, over 892410.27 frames. ], batch size: 16, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:16:08,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=205245.33333333334, ans=0.0 +2024-07-28 21:16:18,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=205272.0, ans=0.125 +2024-07-28 21:16:28,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=205285.33333333334, ans=0.0 +2024-07-28 21:16:32,717 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.62 vs. limit=15.0 +2024-07-28 21:16:35,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=205298.66666666666, ans=0.5 +2024-07-28 21:16:38,108 INFO [train.py:1114] (0/4) Epoch 16, batch 650, loss[loss=0.1575, simple_loss=0.2485, pruned_loss=0.03325, over 4767.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2673, pruned_loss=0.04321, over 904153.79 frames. ], batch size: 13, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:16:40,808 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.65 vs. limit=22.5 +2024-07-28 21:16:45,227 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:16:46,464 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.676e+01 5.356e+01 6.014e+01 6.947e+01 8.768e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 21:16:48,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=205325.33333333334, ans=0.0 +2024-07-28 21:17:01,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=205352.0, ans=0.125 +2024-07-28 21:17:12,598 INFO [train.py:1114] (0/4) Epoch 16, batch 700, loss[loss=0.1486, simple_loss=0.2424, pruned_loss=0.02744, over 4643.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2686, pruned_loss=0.04361, over 911924.55 frames. ], batch size: 12, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:17:17,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=205378.66666666666, ans=0.2 +2024-07-28 21:17:31,015 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.91 vs. limit=15.0 +2024-07-28 21:17:39,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=205432.0, ans=0.125 +2024-07-28 21:17:45,832 INFO [train.py:1114] (0/4) Epoch 16, batch 750, loss[loss=0.1772, simple_loss=0.2669, pruned_loss=0.04375, over 4688.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2677, pruned_loss=0.04319, over 918388.11 frames. ], batch size: 13, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:17:50,112 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.84 vs. limit=15.0 +2024-07-28 21:17:51,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=205458.66666666666, ans=0.125 +2024-07-28 21:17:52,194 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.88 vs. limit=15.0 +2024-07-28 21:17:53,638 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.216e+01 5.543e+01 6.025e+01 6.972e+01 9.778e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 21:18:03,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=205472.0, ans=0.125 +2024-07-28 21:18:22,217 INFO [train.py:1114] (0/4) Epoch 16, batch 800, loss[loss=0.1404, simple_loss=0.2377, pruned_loss=0.02157, over 4861.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2672, pruned_loss=0.04315, over 923255.77 frames. ], batch size: 12, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:18:26,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=205512.0, ans=0.2 +2024-07-28 21:18:35,488 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.99 vs. limit=15.0 +2024-07-28 21:18:43,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=205538.66666666666, ans=0.125 +2024-07-28 21:18:46,472 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:19:00,045 INFO [train.py:1114] (0/4) Epoch 16, batch 850, loss[loss=0.1841, simple_loss=0.284, pruned_loss=0.04213, over 4671.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2679, pruned_loss=0.04397, over 927433.80 frames. ], batch size: 14, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:19:00,353 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.84 vs. limit=15.0 +2024-07-28 21:19:11,254 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.554e+01 6.346e+01 7.200e+01 1.191e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 21:19:26,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=205605.33333333334, ans=0.0 +2024-07-28 21:19:30,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=205605.33333333334, ans=0.125 +2024-07-28 21:19:51,894 INFO [train.py:1114] (0/4) Epoch 16, batch 900, loss[loss=0.1519, simple_loss=0.247, pruned_loss=0.02838, over 4843.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2685, pruned_loss=0.04483, over 928099.70 frames. ], batch size: 12, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:19:52,355 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.59 vs. limit=15.0 +2024-07-28 21:20:05,296 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.02 vs. limit=10.0 +2024-07-28 21:20:19,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=205685.33333333334, ans=0.125 +2024-07-28 21:20:34,609 INFO [train.py:1114] (0/4) Epoch 16, batch 950, loss[loss=0.1658, simple_loss=0.2523, pruned_loss=0.03967, over 4770.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2683, pruned_loss=0.0442, over 929795.29 frames. ], batch size: 12, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:20:36,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=205712.0, ans=0.0 +2024-07-28 21:20:36,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=205712.0, ans=0.0 +2024-07-28 21:20:36,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=205712.0, ans=0.025 +2024-07-28 21:20:36,970 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.44 vs. limit=22.5 +2024-07-28 21:20:41,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=205725.33333333334, ans=0.0 +2024-07-28 21:20:42,591 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.753e+01 5.442e+01 5.900e+01 6.572e+01 1.088e+02, threshold=1.180e+02, percent-clipped=0.0 +2024-07-28 21:20:42,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=205725.33333333334, ans=0.0 +2024-07-28 21:20:46,360 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.60 vs. limit=10.0 +2024-07-28 21:20:48,809 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:20:50,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=205738.66666666666, ans=0.0 +2024-07-28 21:20:50,391 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.24 vs. limit=15.0 +2024-07-28 21:20:57,865 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-28 21:21:02,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=205765.33333333334, ans=0.125 +2024-07-28 21:21:07,930 INFO [train.py:1114] (0/4) Epoch 16, batch 1000, loss[loss=0.18, simple_loss=0.2727, pruned_loss=0.04367, over 4974.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2691, pruned_loss=0.04457, over 928906.41 frames. ], batch size: 13, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:21:14,403 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.05 vs. limit=12.0 +2024-07-28 21:21:28,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=205805.33333333334, ans=0.125 +2024-07-28 21:21:46,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=205818.66666666666, ans=0.0 +2024-07-28 21:21:48,841 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.13 vs. limit=15.0 +2024-07-28 21:21:56,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=205832.0, ans=0.125 +2024-07-28 21:22:08,546 INFO [train.py:1114] (0/4) Epoch 16, batch 1050, loss[loss=0.2118, simple_loss=0.3079, pruned_loss=0.05779, over 4860.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2686, pruned_loss=0.04495, over 931630.54 frames. ], batch size: 14, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:22:17,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=205845.33333333334, ans=0.0 +2024-07-28 21:22:20,840 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.413e+01 5.557e+01 6.013e+01 7.001e+01 9.107e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 21:22:23,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=205858.66666666666, ans=0.5 +2024-07-28 21:22:23,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=205858.66666666666, ans=0.125 +2024-07-28 21:22:23,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=205858.66666666666, ans=0.2 +2024-07-28 21:22:27,304 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=205858.66666666666, ans=0.125 +2024-07-28 21:22:32,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=205872.0, ans=0.0 +2024-07-28 21:22:37,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=205885.33333333334, ans=0.1 +2024-07-28 21:23:43,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205898.66666666666, ans=0.1 +2024-07-28 21:24:11,058 INFO [train.py:1114] (0/4) Epoch 16, batch 1100, loss[loss=0.2196, simple_loss=0.3064, pruned_loss=0.06637, over 4897.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2693, pruned_loss=0.04496, over 934121.73 frames. ], batch size: 13, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:24:18,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=205925.33333333334, ans=0.125 +2024-07-28 21:24:36,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=205938.66666666666, ans=0.125 +2024-07-28 21:24:36,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=205938.66666666666, ans=0.125 +2024-07-28 21:24:56,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=205952.0, ans=0.0 +2024-07-28 21:24:58,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=205965.33333333334, ans=0.125 +2024-07-28 21:25:48,840 INFO [train.py:1114] (0/4) Epoch 16, batch 1150, loss[loss=0.1607, simple_loss=0.2485, pruned_loss=0.03646, over 4895.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2688, pruned_loss=0.04446, over 934381.15 frames. ], batch size: 13, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:29:11,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=205978.66666666666, ans=0.025 +2024-07-28 21:30:29,619 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+01 5.518e+01 6.042e+01 7.033e+01 1.072e+02, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 21:30:45,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=206005.33333333334, ans=0.125 +2024-07-28 21:34:31,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=206018.66666666666, ans=0.125 +2024-07-28 21:34:40,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=206018.66666666666, ans=0.05 +2024-07-28 21:35:03,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=206032.0, ans=0.0 +2024-07-28 21:35:04,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.86 vs. limit=15.0 +2024-07-28 21:35:13,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=206032.0, ans=0.1 +2024-07-28 21:35:15,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=206045.33333333334, ans=0.125 +2024-07-28 21:35:15,732 INFO [train.py:1114] (0/4) Epoch 16, batch 1200, loss[loss=0.2073, simple_loss=0.3023, pruned_loss=0.05612, over 4865.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2698, pruned_loss=0.04464, over 933501.16 frames. ], batch size: 14, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:35:18,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=206045.33333333334, ans=0.0 +2024-07-28 21:35:23,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206058.66666666666, ans=0.1 +2024-07-28 21:35:26,727 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.42 vs. limit=22.5 +2024-07-28 21:35:31,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=206058.66666666666, ans=0.125 +2024-07-28 21:35:32,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=206058.66666666666, ans=0.125 +2024-07-28 21:35:37,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=206058.66666666666, ans=0.125 +2024-07-28 21:36:05,978 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.86 vs. limit=15.0 +2024-07-28 21:36:27,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=206085.33333333334, ans=0.0 +2024-07-28 21:38:19,714 INFO [train.py:1114] (0/4) Epoch 16, batch 1250, loss[loss=0.2099, simple_loss=0.3006, pruned_loss=0.05963, over 4806.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2705, pruned_loss=0.04444, over 937324.39 frames. ], batch size: 15, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:38:23,342 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.73 vs. limit=10.0 +2024-07-28 21:38:32,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=206112.0, ans=0.025 +2024-07-28 21:38:33,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=206112.0, ans=0.125 +2024-07-28 21:38:46,737 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.375e+01 5.568e+01 5.937e+01 6.680e+01 9.097e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-28 21:38:58,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=206125.33333333334, ans=0.125 +2024-07-28 21:39:23,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=206165.33333333334, ans=0.1 +2024-07-28 21:39:33,230 INFO [train.py:1114] (0/4) Epoch 16, batch 1300, loss[loss=0.201, simple_loss=0.2899, pruned_loss=0.05605, over 4734.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2691, pruned_loss=0.04395, over 938755.85 frames. ], batch size: 19, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:39:34,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=206178.66666666666, ans=0.125 +2024-07-28 21:39:51,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.82 vs. limit=15.0 +2024-07-28 21:40:05,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=206218.66666666666, ans=0.0 +2024-07-28 21:40:14,229 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:40:15,226 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.16 vs. limit=12.0 +2024-07-28 21:40:17,472 INFO [train.py:1114] (0/4) Epoch 16, batch 1350, loss[loss=0.1684, simple_loss=0.2627, pruned_loss=0.03708, over 4757.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2682, pruned_loss=0.04337, over 940587.24 frames. ], batch size: 13, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:40:26,348 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.922e+01 5.660e+01 6.386e+01 7.583e+01 1.369e+02, threshold=1.277e+02, percent-clipped=2.0 +2024-07-28 21:40:49,699 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:41:32,892 INFO [train.py:1114] (0/4) Epoch 16, batch 1400, loss[loss=0.1572, simple_loss=0.2545, pruned_loss=0.03, over 4719.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.269, pruned_loss=0.04396, over 942197.70 frames. ], batch size: 11, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:41:33,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=206312.0, ans=0.125 +2024-07-28 21:41:43,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=206325.33333333334, ans=0.125 +2024-07-28 21:41:45,337 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-28 21:42:13,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=206338.66666666666, ans=0.125 +2024-07-28 21:42:18,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=206338.66666666666, ans=0.125 +2024-07-28 21:42:19,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_na.min_abs, batch_count=206338.66666666666, ans=0.02 +2024-07-28 21:42:38,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=206352.0, ans=0.09899494936611666 +2024-07-28 21:45:05,384 INFO [train.py:1114] (0/4) Epoch 16, batch 1450, loss[loss=0.1773, simple_loss=0.279, pruned_loss=0.03783, over 4669.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2695, pruned_loss=0.0445, over 942300.03 frames. ], batch size: 15, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:45:18,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=206378.66666666666, ans=0.025 +2024-07-28 21:45:20,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=206392.0, ans=0.2 +2024-07-28 21:45:23,232 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.652e+01 5.557e+01 6.212e+01 6.784e+01 1.021e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 21:46:29,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=206405.33333333334, ans=0.125 +2024-07-28 21:46:35,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=206418.66666666666, ans=0.125 +2024-07-28 21:46:37,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=206432.0, ans=0.0 +2024-07-28 21:46:39,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=206432.0, ans=0.04949747468305833 +2024-07-28 21:46:41,471 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.98 vs. limit=12.0 +2024-07-28 21:46:44,897 INFO [train.py:1114] (0/4) Epoch 16, batch 1500, loss[loss=0.2139, simple_loss=0.3098, pruned_loss=0.059, over 4811.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2706, pruned_loss=0.04453, over 941511.27 frames. ], batch size: 14, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:46:45,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=206445.33333333334, ans=0.0 +2024-07-28 21:47:09,722 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.42 vs. limit=22.5 +2024-07-28 21:47:11,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=206472.0, ans=0.0 +2024-07-28 21:47:24,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=206485.33333333334, ans=0.125 +2024-07-28 21:47:40,482 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.46 vs. limit=15.0 +2024-07-28 21:47:44,837 INFO [train.py:1114] (0/4) Epoch 16, batch 1550, loss[loss=0.1623, simple_loss=0.2501, pruned_loss=0.03724, over 4902.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2697, pruned_loss=0.04456, over 937870.76 frames. ], batch size: 15, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:48:07,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff2.min_abs, batch_count=206512.0, ans=0.1 +2024-07-28 21:48:20,504 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.523e+01 5.574e+01 6.317e+01 7.056e+01 9.850e+01, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 21:48:20,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=206525.33333333334, ans=0.1 +2024-07-28 21:48:29,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=206538.66666666666, ans=0.0 +2024-07-28 21:48:58,251 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:49:10,284 INFO [train.py:1114] (0/4) Epoch 16, batch 1600, loss[loss=0.168, simple_loss=0.2725, pruned_loss=0.03174, over 4868.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2705, pruned_loss=0.04506, over 936963.82 frames. ], batch size: 14, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:49:15,570 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.19 vs. limit=15.0 +2024-07-28 21:49:45,129 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.99 vs. limit=10.0 +2024-07-28 21:49:52,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=206605.33333333334, ans=0.5 +2024-07-28 21:49:52,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=206605.33333333334, ans=0.0 +2024-07-28 21:50:24,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=206632.0, ans=0.2 +2024-07-28 21:50:27,712 INFO [train.py:1114] (0/4) Epoch 16, batch 1650, loss[loss=0.1667, simple_loss=0.27, pruned_loss=0.03174, over 4660.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2703, pruned_loss=0.04509, over 936958.14 frames. ], batch size: 14, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:50:36,686 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.778e+01 5.640e+01 6.319e+01 7.228e+01 1.155e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 21:50:39,024 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:50:50,592 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.39 vs. limit=10.0 +2024-07-28 21:51:09,318 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:51:10,380 INFO [train.py:1114] (0/4) Epoch 16, batch 1700, loss[loss=0.1753, simple_loss=0.2647, pruned_loss=0.04296, over 4711.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2705, pruned_loss=0.04522, over 938833.01 frames. ], batch size: 11, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:51:10,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=206712.0, ans=0.0 +2024-07-28 21:51:10,705 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.98 vs. limit=12.0 +2024-07-28 21:51:12,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=206712.0, ans=0.0 +2024-07-28 21:51:18,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=206725.33333333334, ans=0.0 +2024-07-28 21:51:22,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=206725.33333333334, ans=0.125 +2024-07-28 21:51:23,038 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.78 vs. limit=15.0 +2024-07-28 21:51:30,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=206738.66666666666, ans=0.125 +2024-07-28 21:51:30,349 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.56 vs. limit=15.0 +2024-07-28 21:51:51,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=206738.66666666666, ans=0.125 +2024-07-28 21:52:57,583 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.92 vs. limit=15.0 +2024-07-28 21:52:59,250 INFO [train.py:1114] (0/4) Epoch 16, batch 1750, loss[loss=0.1535, simple_loss=0.2362, pruned_loss=0.03537, over 4826.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2702, pruned_loss=0.0452, over 940024.42 frames. ], batch size: 11, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:53:09,573 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.365e+01 5.795e+01 6.698e+01 8.081e+01 1.290e+02, threshold=1.340e+02, percent-clipped=1.0 +2024-07-28 21:53:13,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=206792.0, ans=0.09899494936611666 +2024-07-28 21:53:13,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=206805.33333333334, ans=0.0 +2024-07-28 21:53:22,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=206805.33333333334, ans=0.025 +2024-07-28 21:53:26,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=206818.66666666666, ans=0.125 +2024-07-28 21:53:31,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=206818.66666666666, ans=0.125 +2024-07-28 21:53:38,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=206832.0, ans=0.1 +2024-07-28 21:53:44,575 INFO [train.py:1114] (0/4) Epoch 16, batch 1800, loss[loss=0.2296, simple_loss=0.3198, pruned_loss=0.06968, over 4638.00 frames. ], tot_loss[loss=0.18, simple_loss=0.27, pruned_loss=0.045, over 940788.89 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:53:46,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=206845.33333333334, ans=0.125 +2024-07-28 21:53:51,005 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:54:24,980 INFO [train.py:1114] (0/4) Epoch 16, batch 1850, loss[loss=0.1759, simple_loss=0.2594, pruned_loss=0.04616, over 4805.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.27, pruned_loss=0.04508, over 940670.45 frames. ], batch size: 14, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:54:31,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206912.0, ans=0.1 +2024-07-28 21:54:35,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=206925.33333333334, ans=0.025 +2024-07-28 21:54:35,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=206925.33333333334, ans=0.2 +2024-07-28 21:54:37,017 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.341e+01 5.631e+01 6.106e+01 7.258e+01 1.128e+02, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 21:54:41,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=206925.33333333334, ans=0.1 +2024-07-28 21:54:51,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=206938.66666666666, ans=0.125 +2024-07-28 21:55:17,725 INFO [train.py:1114] (0/4) Epoch 16, batch 1900, loss[loss=0.1827, simple_loss=0.285, pruned_loss=0.04022, over 4661.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2704, pruned_loss=0.04536, over 941693.86 frames. ], batch size: 14, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:55:24,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=206978.66666666666, ans=0.1 +2024-07-28 21:55:38,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=207005.33333333334, ans=0.02 +2024-07-28 21:55:39,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=207005.33333333334, ans=0.0 +2024-07-28 21:55:40,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=207005.33333333334, ans=0.125 +2024-07-28 21:55:40,288 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.78 vs. limit=22.5 +2024-07-28 21:56:04,671 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.83 vs. limit=15.0 +2024-07-28 21:56:10,924 INFO [train.py:1114] (0/4) Epoch 16, batch 1950, loss[loss=0.188, simple_loss=0.2722, pruned_loss=0.05191, over 4894.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2707, pruned_loss=0.04501, over 943749.03 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:56:23,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=207045.33333333334, ans=0.0 +2024-07-28 21:56:30,865 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.457e+01 5.561e+01 6.255e+01 6.715e+01 9.914e+01, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 21:56:34,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=207072.0, ans=0.025 +2024-07-28 21:56:53,220 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=207085.33333333334, ans=0.2 +2024-07-28 21:57:03,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=207098.66666666666, ans=0.025 +2024-07-28 21:57:05,249 INFO [train.py:1114] (0/4) Epoch 16, batch 2000, loss[loss=0.1561, simple_loss=0.2365, pruned_loss=0.03788, over 4816.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2714, pruned_loss=0.0453, over 941459.07 frames. ], batch size: 11, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:57:11,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=207112.0, ans=0.1 +2024-07-28 21:57:15,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=207112.0, ans=0.125 +2024-07-28 21:57:15,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=207125.33333333334, ans=0.0 +2024-07-28 21:57:16,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=207125.33333333334, ans=0.125 +2024-07-28 21:57:22,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=207125.33333333334, ans=0.125 +2024-07-28 21:57:46,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=207152.0, ans=0.1 +2024-07-28 21:58:14,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=207165.33333333334, ans=0.125 +2024-07-28 21:58:14,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=207165.33333333334, ans=0.125 +2024-07-28 21:58:20,297 INFO [train.py:1114] (0/4) Epoch 16, batch 2050, loss[loss=0.1796, simple_loss=0.2515, pruned_loss=0.05385, over 4613.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2701, pruned_loss=0.04517, over 939382.34 frames. ], batch size: 11, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:58:36,910 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.500e+01 5.615e+01 6.198e+01 7.046e+01 1.043e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 21:58:59,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=207218.66666666666, ans=0.125 +2024-07-28 21:59:08,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=207218.66666666666, ans=0.125 +2024-07-28 21:59:08,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=207218.66666666666, ans=0.95 +2024-07-28 21:59:11,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=207232.0, ans=0.95 +2024-07-28 21:59:19,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=207232.0, ans=0.1 +2024-07-28 21:59:20,905 INFO [train.py:1114] (0/4) Epoch 16, batch 2100, loss[loss=0.1613, simple_loss=0.2622, pruned_loss=0.03019, over 4759.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2688, pruned_loss=0.04445, over 940950.06 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:00:04,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=207285.33333333334, ans=0.0 +2024-07-28 22:00:13,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=207285.33333333334, ans=0.0 +2024-07-28 22:00:16,401 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.87 vs. limit=15.0 +2024-07-28 22:00:19,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=207298.66666666666, ans=0.2 +2024-07-28 22:01:06,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=207298.66666666666, ans=0.0 +2024-07-28 22:01:09,924 INFO [train.py:1114] (0/4) Epoch 16, batch 2150, loss[loss=0.1808, simple_loss=0.2766, pruned_loss=0.04248, over 4905.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2691, pruned_loss=0.04461, over 943907.23 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:01:25,058 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.29 vs. limit=22.5 +2024-07-28 22:01:58,062 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.517e+01 5.463e+01 6.183e+01 7.182e+01 9.894e+01, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 22:02:17,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=207325.33333333334, ans=0.125 +2024-07-28 22:02:22,890 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.07 vs. limit=22.5 +2024-07-28 22:02:23,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=207338.66666666666, ans=0.1 +2024-07-28 22:02:31,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.71 vs. limit=22.5 +2024-07-28 22:02:35,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=207352.0, ans=0.125 +2024-07-28 22:02:36,131 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.16 vs. limit=15.0 +2024-07-28 22:02:48,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=207365.33333333334, ans=0.125 +2024-07-28 22:02:50,226 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.56 vs. limit=10.0 +2024-07-28 22:02:53,144 INFO [train.py:1114] (0/4) Epoch 16, batch 2200, loss[loss=0.1957, simple_loss=0.2868, pruned_loss=0.05227, over 4816.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2691, pruned_loss=0.04448, over 943105.56 frames. ], batch size: 14, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:03:28,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=207418.66666666666, ans=0.07 +2024-07-28 22:03:32,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=207418.66666666666, ans=0.125 +2024-07-28 22:03:47,408 INFO [train.py:1114] (0/4) Epoch 16, batch 2250, loss[loss=0.2111, simple_loss=0.3129, pruned_loss=0.05466, over 4695.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2686, pruned_loss=0.04412, over 941798.16 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:03:47,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=207445.33333333334, ans=0.125 +2024-07-28 22:03:51,129 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.36 vs. limit=15.0 +2024-07-28 22:03:51,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=207445.33333333334, ans=0.125 +2024-07-28 22:03:52,543 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.78 vs. limit=15.0 +2024-07-28 22:03:53,180 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.06 vs. limit=15.0 +2024-07-28 22:03:58,320 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.399e+01 5.527e+01 6.028e+01 7.010e+01 1.004e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 22:04:08,990 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.98 vs. limit=15.0 +2024-07-28 22:04:35,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=207498.66666666666, ans=0.125 +2024-07-28 22:04:53,024 INFO [train.py:1114] (0/4) Epoch 16, batch 2300, loss[loss=0.1483, simple_loss=0.2391, pruned_loss=0.02869, over 4937.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2669, pruned_loss=0.04375, over 939141.28 frames. ], batch size: 12, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:04:54,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=207512.0, ans=0.0 +2024-07-28 22:05:05,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=207512.0, ans=0.125 +2024-07-28 22:05:13,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=207525.33333333334, ans=0.125 +2024-07-28 22:05:49,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=207565.33333333334, ans=0.0 +2024-07-28 22:05:52,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=207565.33333333334, ans=6.0 +2024-07-28 22:05:54,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=207565.33333333334, ans=0.125 +2024-07-28 22:05:57,162 INFO [train.py:1114] (0/4) Epoch 16, batch 2350, loss[loss=0.2069, simple_loss=0.2977, pruned_loss=0.05807, over 4642.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2668, pruned_loss=0.04382, over 941096.34 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:06:07,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=207578.66666666666, ans=0.125 +2024-07-28 22:06:13,018 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.426e+01 5.459e+01 6.024e+01 6.952e+01 8.823e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 22:06:20,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=207605.33333333334, ans=0.0 +2024-07-28 22:06:23,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=207605.33333333334, ans=0.125 +2024-07-28 22:06:36,420 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:06:45,791 INFO [train.py:1114] (0/4) Epoch 16, batch 2400, loss[loss=0.1953, simple_loss=0.2779, pruned_loss=0.05637, over 4647.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2671, pruned_loss=0.0435, over 941269.74 frames. ], batch size: 12, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:06:46,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=207645.33333333334, ans=0.125 +2024-07-28 22:06:47,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=207645.33333333334, ans=0.2 +2024-07-28 22:06:56,796 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.00 vs. limit=6.0 +2024-07-28 22:07:06,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=207672.0, ans=0.0 +2024-07-28 22:07:11,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=207685.33333333334, ans=0.125 +2024-07-28 22:07:33,015 INFO [train.py:1114] (0/4) Epoch 16, batch 2450, loss[loss=0.1786, simple_loss=0.2666, pruned_loss=0.04527, over 4692.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2683, pruned_loss=0.04424, over 937378.15 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:07:33,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=207712.0, ans=0.2 +2024-07-28 22:07:41,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=207725.33333333334, ans=0.125 +2024-07-28 22:07:45,695 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.532e+01 5.574e+01 6.192e+01 6.939e+01 1.187e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 22:07:50,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=207738.66666666666, ans=0.0 +2024-07-28 22:07:55,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=207738.66666666666, ans=0.125 +2024-07-28 22:08:03,524 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.54 vs. limit=15.0 +2024-07-28 22:08:05,303 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.53 vs. limit=22.5 +2024-07-28 22:08:07,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=207752.0, ans=0.125 +2024-07-28 22:08:24,508 INFO [train.py:1114] (0/4) Epoch 16, batch 2500, loss[loss=0.2145, simple_loss=0.2989, pruned_loss=0.06503, over 4807.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2687, pruned_loss=0.04452, over 939537.29 frames. ], batch size: 14, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:08:51,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207792.0, ans=0.1 +2024-07-28 22:09:13,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=207818.66666666666, ans=0.0 +2024-07-28 22:09:19,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=207832.0, ans=0.2 +2024-07-28 22:09:19,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=207832.0, ans=0.1 +2024-07-28 22:09:22,187 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2.whitening_limit, batch_count=207832.0, ans=15.0 +2024-07-28 22:09:23,877 INFO [train.py:1114] (0/4) Epoch 16, batch 2550, loss[loss=0.1861, simple_loss=0.2709, pruned_loss=0.05062, over 4816.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2683, pruned_loss=0.04438, over 939048.42 frames. ], batch size: 11, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:09:24,931 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.63 vs. limit=10.0 +2024-07-28 22:09:29,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=207845.33333333334, ans=0.025 +2024-07-28 22:09:30,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=207845.33333333334, ans=0.125 +2024-07-28 22:09:38,981 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.300e+01 5.535e+01 6.272e+01 7.311e+01 1.144e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 22:10:00,996 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.13 vs. limit=22.5 +2024-07-28 22:10:20,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=207872.0, ans=0.125 +2024-07-28 22:12:07,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=207898.66666666666, ans=10.0 +2024-07-28 22:13:03,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=207898.66666666666, ans=0.125 +2024-07-28 22:13:06,691 INFO [train.py:1114] (0/4) Epoch 16, batch 2600, loss[loss=0.1578, simple_loss=0.2513, pruned_loss=0.03216, over 4900.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.268, pruned_loss=0.04375, over 938174.77 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:13:50,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=207925.33333333334, ans=0.025 +2024-07-28 22:13:51,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=207925.33333333334, ans=0.0 +2024-07-28 22:13:59,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=207938.66666666666, ans=0.2 +2024-07-28 22:14:12,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207952.0, ans=0.1 +2024-07-28 22:14:19,268 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.60 vs. limit=22.5 +2024-07-28 22:15:02,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=207978.66666666666, ans=0.125 +2024-07-28 22:15:02,481 INFO [train.py:1114] (0/4) Epoch 16, batch 2650, loss[loss=0.1851, simple_loss=0.2816, pruned_loss=0.04427, over 4630.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2692, pruned_loss=0.04424, over 940201.68 frames. ], batch size: 16, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:15:05,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=207978.66666666666, ans=0.0 +2024-07-28 22:15:41,442 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.682e+01 6.199e+01 7.227e+01 9.483e+01, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 22:15:45,561 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-156000.pt +2024-07-28 22:20:54,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=207992.0, ans=0.0 +2024-07-28 22:22:29,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=208005.33333333334, ans=0.125 +2024-07-28 22:27:15,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=208032.0, ans=0.125 +2024-07-28 22:27:15,959 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.55 vs. limit=10.0 +2024-07-28 22:27:18,318 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.88 vs. limit=15.0 +2024-07-28 22:27:28,518 INFO [train.py:1114] (0/4) Epoch 16, batch 2700, loss[loss=0.1736, simple_loss=0.2637, pruned_loss=0.04175, over 4743.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2698, pruned_loss=0.04447, over 940081.69 frames. ], batch size: 14, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:27:41,105 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.54 vs. limit=10.0 +2024-07-28 22:27:53,760 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-07-28 22:30:45,952 INFO [train.py:1114] (0/4) Epoch 16, batch 2750, loss[loss=0.1736, simple_loss=0.2607, pruned_loss=0.0432, over 4705.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2692, pruned_loss=0.04461, over 939919.73 frames. ], batch size: 12, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:30:57,533 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.51 vs. limit=15.0 +2024-07-28 22:30:58,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=208125.33333333334, ans=0.0 +2024-07-28 22:31:02,415 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.384e+01 5.637e+01 6.771e+01 7.935e+01 1.190e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-28 22:31:02,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=208125.33333333334, ans=0.125 +2024-07-28 22:31:11,268 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.19 vs. limit=15.0 +2024-07-28 22:31:15,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=208138.66666666666, ans=0.1 +2024-07-28 22:31:41,063 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.81 vs. limit=6.0 +2024-07-28 22:31:47,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=208152.0, ans=0.2 +2024-07-28 22:31:51,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=208165.33333333334, ans=0.0 +2024-07-28 22:31:57,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=208178.66666666666, ans=0.2 +2024-07-28 22:31:58,138 INFO [train.py:1114] (0/4) Epoch 16, batch 2800, loss[loss=0.228, simple_loss=0.3033, pruned_loss=0.07637, over 3315.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2691, pruned_loss=0.04457, over 938073.82 frames. ], batch size: 35, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:32:05,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=208178.66666666666, ans=0.125 +2024-07-28 22:32:23,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=208205.33333333334, ans=0.05 +2024-07-28 22:32:28,785 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:32:30,974 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.90 vs. limit=22.5 +2024-07-28 22:32:37,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=208232.0, ans=0.125 +2024-07-28 22:32:38,582 INFO [train.py:1114] (0/4) Epoch 16, batch 2850, loss[loss=0.1844, simple_loss=0.2661, pruned_loss=0.05132, over 4972.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2694, pruned_loss=0.04459, over 936350.71 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:32:47,409 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.515e+01 5.805e+01 6.352e+01 7.417e+01 1.040e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 22:32:48,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=208258.66666666666, ans=0.2 +2024-07-28 22:32:51,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=208258.66666666666, ans=0.125 +2024-07-28 22:32:57,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=208272.0, ans=0.0 +2024-07-28 22:33:24,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=208285.33333333334, ans=0.125 +2024-07-28 22:33:33,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=208298.66666666666, ans=0.0 +2024-07-28 22:33:36,455 INFO [train.py:1114] (0/4) Epoch 16, batch 2900, loss[loss=0.1484, simple_loss=0.235, pruned_loss=0.03086, over 4839.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.27, pruned_loss=0.04468, over 939972.86 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:33:46,742 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.67 vs. limit=10.0 +2024-07-28 22:34:02,535 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:34:11,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=208352.0, ans=0.125 +2024-07-28 22:34:11,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=208352.0, ans=0.2 +2024-07-28 22:34:36,451 INFO [train.py:1114] (0/4) Epoch 16, batch 2950, loss[loss=0.1575, simple_loss=0.2476, pruned_loss=0.03366, over 4703.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.269, pruned_loss=0.04467, over 939065.08 frames. ], batch size: 12, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:34:45,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=208392.0, ans=0.125 +2024-07-28 22:34:46,793 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.287e+01 5.436e+01 5.951e+01 6.814e+01 8.870e+01, threshold=1.190e+02, percent-clipped=0.0 +2024-07-28 22:35:06,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=208418.66666666666, ans=0.125 +2024-07-28 22:35:19,624 INFO [train.py:1114] (0/4) Epoch 16, batch 3000, loss[loss=0.1809, simple_loss=0.2738, pruned_loss=0.04407, over 4753.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2685, pruned_loss=0.04448, over 938180.23 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:35:19,625 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 22:36:18,594 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.2742, 2.8930, 2.8367, 2.4793, 2.9308, 3.0789, 2.9931, 2.6834], + device='cuda:0') +2024-07-28 22:37:08,210 INFO [train.py:1146] (0/4) Epoch 16, validation: loss=0.1628, simple_loss=0.2657, pruned_loss=0.02996, over 944034.00 frames. +2024-07-28 22:37:08,211 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 22:38:00,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=208458.66666666666, ans=0.125 +2024-07-28 22:38:02,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=208458.66666666666, ans=0.125 +2024-07-28 22:38:12,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=208472.0, ans=0.125 +2024-07-28 22:38:17,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=208485.33333333334, ans=0.125 +2024-07-28 22:38:33,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=208485.33333333334, ans=0.0 +2024-07-28 22:38:36,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=208498.66666666666, ans=0.125 +2024-07-28 22:38:42,176 INFO [train.py:1114] (0/4) Epoch 16, batch 3050, loss[loss=0.1674, simple_loss=0.2494, pruned_loss=0.04268, over 4642.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2692, pruned_loss=0.04473, over 937202.89 frames. ], batch size: 12, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:38:51,729 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.977e+01 5.726e+01 6.358e+01 7.092e+01 1.092e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 22:38:54,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=208525.33333333334, ans=0.1 +2024-07-28 22:38:56,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=208538.66666666666, ans=0.125 +2024-07-28 22:39:11,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=208552.0, ans=0.125 +2024-07-28 22:39:40,205 INFO [train.py:1114] (0/4) Epoch 16, batch 3100, loss[loss=0.181, simple_loss=0.2817, pruned_loss=0.04015, over 4605.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2689, pruned_loss=0.04487, over 937624.94 frames. ], batch size: 16, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:40:05,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=208592.0, ans=0.125 +2024-07-28 22:40:06,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=208592.0, ans=0.2 +2024-07-28 22:40:52,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=208605.33333333334, ans=0.0 +2024-07-28 22:40:52,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=208605.33333333334, ans=0.025 +2024-07-28 22:41:09,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=208632.0, ans=0.0 +2024-07-28 22:41:13,448 INFO [train.py:1114] (0/4) Epoch 16, batch 3150, loss[loss=0.1864, simple_loss=0.2793, pruned_loss=0.04674, over 4588.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2693, pruned_loss=0.04479, over 938308.16 frames. ], batch size: 17, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:41:15,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=208645.33333333334, ans=0.125 +2024-07-28 22:41:20,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=208645.33333333334, ans=0.95 +2024-07-28 22:41:26,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=208645.33333333334, ans=0.2 +2024-07-28 22:41:29,374 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.743e+01 5.555e+01 6.673e+01 7.571e+01 1.321e+02, threshold=1.335e+02, percent-clipped=1.0 +2024-07-28 22:41:36,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=208672.0, ans=0.125 +2024-07-28 22:42:14,168 INFO [train.py:1114] (0/4) Epoch 16, batch 3200, loss[loss=0.1783, simple_loss=0.2824, pruned_loss=0.03707, over 4819.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2686, pruned_loss=0.04401, over 940314.94 frames. ], batch size: 13, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:42:20,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=208725.33333333334, ans=0.2 +2024-07-28 22:42:24,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=208725.33333333334, ans=0.025 +2024-07-28 22:42:43,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=208752.0, ans=0.125 +2024-07-28 22:42:45,486 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.03 vs. limit=15.0 +2024-07-28 22:42:53,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=208765.33333333334, ans=0.2 +2024-07-28 22:43:03,066 INFO [train.py:1114] (0/4) Epoch 16, batch 3250, loss[loss=0.1928, simple_loss=0.2824, pruned_loss=0.0516, over 4941.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2695, pruned_loss=0.04425, over 941117.07 frames. ], batch size: 14, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:43:13,636 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.572e+01 5.431e+01 6.056e+01 6.661e+01 1.204e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-28 22:43:15,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=208792.0, ans=0.0 +2024-07-28 22:43:28,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=208805.33333333334, ans=0.1 +2024-07-28 22:44:04,202 INFO [train.py:1114] (0/4) Epoch 16, batch 3300, loss[loss=0.1589, simple_loss=0.26, pruned_loss=0.02886, over 4605.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2677, pruned_loss=0.04419, over 941365.90 frames. ], batch size: 19, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:44:04,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=208845.33333333334, ans=0.1 +2024-07-28 22:44:15,910 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:44:23,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=208858.66666666666, ans=0.125 +2024-07-28 22:44:34,329 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.42 vs. limit=15.0 +2024-07-28 22:44:34,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=208872.0, ans=0.125 +2024-07-28 22:45:00,567 INFO [train.py:1114] (0/4) Epoch 16, batch 3350, loss[loss=0.214, simple_loss=0.2906, pruned_loss=0.06867, over 4598.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2676, pruned_loss=0.04395, over 939553.94 frames. ], batch size: 17, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:45:06,063 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-28 22:45:06,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=208912.0, ans=0.0 +2024-07-28 22:45:11,082 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.370e+01 5.569e+01 6.115e+01 6.727e+01 9.175e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 22:45:11,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=208925.33333333334, ans=0.125 +2024-07-28 22:45:14,245 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.41 vs. limit=15.0 +2024-07-28 22:45:15,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=208938.66666666666, ans=0.1 +2024-07-28 22:45:15,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=208938.66666666666, ans=0.2 +2024-07-28 22:45:17,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=208938.66666666666, ans=0.125 +2024-07-28 22:45:19,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=208938.66666666666, ans=0.1 +2024-07-28 22:45:41,712 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.96 vs. limit=15.0 +2024-07-28 22:45:57,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=208965.33333333334, ans=0.2 +2024-07-28 22:46:00,448 INFO [train.py:1114] (0/4) Epoch 16, batch 3400, loss[loss=0.1592, simple_loss=0.2429, pruned_loss=0.03771, over 4815.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2684, pruned_loss=0.0444, over 937919.94 frames. ], batch size: 11, lr: 4.69e-03, grad_scale: 64.0 +2024-07-28 22:46:28,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=208992.0, ans=0.1 +2024-07-28 22:46:37,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=209005.33333333334, ans=0.125 +2024-07-28 22:47:04,414 INFO [train.py:1114] (0/4) Epoch 16, batch 3450, loss[loss=0.2064, simple_loss=0.2977, pruned_loss=0.05755, over 4714.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2696, pruned_loss=0.04491, over 938060.82 frames. ], batch size: 19, lr: 4.69e-03, grad_scale: 64.0 +2024-07-28 22:47:12,846 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.338e+01 5.483e+01 6.084e+01 6.778e+01 9.605e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-28 22:47:14,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=209058.66666666666, ans=0.0 +2024-07-28 22:48:54,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=209098.66666666666, ans=0.0 +2024-07-28 22:48:57,638 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.13 vs. limit=15.0 +2024-07-28 22:48:59,083 INFO [train.py:1114] (0/4) Epoch 16, batch 3500, loss[loss=0.1785, simple_loss=0.2574, pruned_loss=0.04985, over 4947.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2693, pruned_loss=0.04491, over 938781.08 frames. ], batch size: 12, lr: 4.69e-03, grad_scale: 64.0 +2024-07-28 22:49:02,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209112.0, ans=0.1 +2024-07-28 22:49:06,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209125.33333333334, ans=0.1 +2024-07-28 22:49:11,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=209138.66666666666, ans=0.125 +2024-07-28 22:49:14,799 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.16 vs. limit=22.5 +2024-07-28 22:49:29,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=209165.33333333334, ans=0.0 +2024-07-28 22:49:32,644 INFO [train.py:1114] (0/4) Epoch 16, batch 3550, loss[loss=0.1503, simple_loss=0.2428, pruned_loss=0.02889, over 4656.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2689, pruned_loss=0.04458, over 939166.93 frames. ], batch size: 14, lr: 4.69e-03, grad_scale: 64.0 +2024-07-28 22:49:35,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=209178.66666666666, ans=0.125 +2024-07-28 22:49:35,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=209178.66666666666, ans=0.2 +2024-07-28 22:49:41,601 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.365e+01 5.691e+01 6.213e+01 7.399e+01 9.936e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 22:50:06,329 INFO [train.py:1114] (0/4) Epoch 16, batch 3600, loss[loss=0.1621, simple_loss=0.2571, pruned_loss=0.03357, over 4963.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2686, pruned_loss=0.04431, over 940796.93 frames. ], batch size: 13, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:50:07,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=209245.33333333334, ans=0.025 +2024-07-28 22:50:15,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=209258.66666666666, ans=0.2 +2024-07-28 22:50:44,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=209298.66666666666, ans=0.125 +2024-07-28 22:50:46,881 INFO [train.py:1114] (0/4) Epoch 16, batch 3650, loss[loss=0.1741, simple_loss=0.2694, pruned_loss=0.03944, over 4905.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.269, pruned_loss=0.04442, over 941109.33 frames. ], batch size: 15, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:50:47,482 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.18 vs. limit=6.0 +2024-07-28 22:50:53,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=209325.33333333334, ans=0.125 +2024-07-28 22:50:55,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=209325.33333333334, ans=0.0 +2024-07-28 22:50:57,254 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.506e+01 5.574e+01 6.186e+01 7.126e+01 1.218e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 22:51:00,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=209325.33333333334, ans=0.1 +2024-07-28 22:51:09,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=209352.0, ans=0.0 +2024-07-28 22:51:22,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=209365.33333333334, ans=0.125 +2024-07-28 22:51:27,290 INFO [train.py:1114] (0/4) Epoch 16, batch 3700, loss[loss=0.171, simple_loss=0.2577, pruned_loss=0.04219, over 4920.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2686, pruned_loss=0.04421, over 942211.68 frames. ], batch size: 14, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:51:28,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=209378.66666666666, ans=0.0 +2024-07-28 22:51:29,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=209378.66666666666, ans=0.0 +2024-07-28 22:51:32,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=209378.66666666666, ans=0.125 +2024-07-28 22:51:36,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=209392.0, ans=0.09899494936611666 +2024-07-28 22:51:39,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=209392.0, ans=0.2 +2024-07-28 22:51:47,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=209418.66666666666, ans=0.0 +2024-07-28 22:51:56,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=209432.0, ans=0.125 +2024-07-28 22:52:00,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=209432.0, ans=0.125 +2024-07-28 22:52:02,368 INFO [train.py:1114] (0/4) Epoch 16, batch 3750, loss[loss=0.1666, simple_loss=0.2486, pruned_loss=0.04232, over 4791.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2681, pruned_loss=0.044, over 943395.61 frames. ], batch size: 11, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:52:16,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=209458.66666666666, ans=0.125 +2024-07-28 22:52:17,623 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.493e+01 5.513e+01 6.031e+01 6.754e+01 8.866e+01, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 22:52:23,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=209472.0, ans=0.0 +2024-07-28 22:52:23,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=209472.0, ans=0.125 +2024-07-28 22:52:37,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=209498.66666666666, ans=0.125 +2024-07-28 22:52:39,796 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:52:39,967 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.34 vs. limit=22.5 +2024-07-28 22:52:41,568 INFO [train.py:1114] (0/4) Epoch 16, batch 3800, loss[loss=0.1667, simple_loss=0.2511, pruned_loss=0.04117, over 4805.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2684, pruned_loss=0.0442, over 941474.61 frames. ], batch size: 14, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:52:43,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=209512.0, ans=0.0 +2024-07-28 22:53:10,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=209552.0, ans=0.125 +2024-07-28 22:53:16,417 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.52 vs. limit=6.0 +2024-07-28 22:53:17,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=209565.33333333334, ans=0.0 +2024-07-28 22:53:24,770 INFO [train.py:1114] (0/4) Epoch 16, batch 3850, loss[loss=0.1637, simple_loss=0.2645, pruned_loss=0.03145, over 4638.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2682, pruned_loss=0.04363, over 942346.15 frames. ], batch size: 16, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:53:44,024 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.595e+01 5.438e+01 6.014e+01 6.827e+01 9.667e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 22:53:45,372 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.67 vs. limit=22.5 +2024-07-28 22:53:47,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=209605.33333333334, ans=0.125 +2024-07-28 22:53:51,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=209605.33333333334, ans=0.02 +2024-07-28 22:53:53,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=209605.33333333334, ans=0.125 +2024-07-28 22:53:54,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=209618.66666666666, ans=0.125 +2024-07-28 22:53:57,682 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=209618.66666666666, ans=0.125 +2024-07-28 22:53:57,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=209618.66666666666, ans=0.125 +2024-07-28 22:54:02,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=209618.66666666666, ans=0.04949747468305833 +2024-07-28 22:54:02,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=209618.66666666666, ans=0.125 +2024-07-28 22:54:04,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=209632.0, ans=0.1 +2024-07-28 22:54:11,542 INFO [train.py:1114] (0/4) Epoch 16, batch 3900, loss[loss=0.1916, simple_loss=0.2928, pruned_loss=0.04519, over 4813.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2682, pruned_loss=0.04365, over 942645.89 frames. ], batch size: 14, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:54:12,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=209645.33333333334, ans=0.125 +2024-07-28 22:54:17,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209658.66666666666, ans=0.1 +2024-07-28 22:54:18,420 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.61 vs. limit=15.0 +2024-07-28 22:54:35,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=209672.0, ans=0.07 +2024-07-28 22:54:51,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=209685.33333333334, ans=0.5 +2024-07-28 22:54:55,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=209698.66666666666, ans=0.125 +2024-07-28 22:54:56,725 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.81 vs. limit=22.5 +2024-07-28 22:54:58,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=209698.66666666666, ans=0.0 +2024-07-28 22:54:59,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=209712.0, ans=0.125 +2024-07-28 22:55:00,448 INFO [train.py:1114] (0/4) Epoch 16, batch 3950, loss[loss=0.1979, simple_loss=0.2912, pruned_loss=0.05226, over 4806.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2681, pruned_loss=0.04383, over 944671.22 frames. ], batch size: 16, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:55:07,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=209712.0, ans=0.05 +2024-07-28 22:55:08,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209712.0, ans=0.1 +2024-07-28 22:55:14,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=209725.33333333334, ans=0.125 +2024-07-28 22:55:14,737 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 5.589e+01 5.934e+01 6.636e+01 9.172e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-28 22:55:20,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=209738.66666666666, ans=0.5 +2024-07-28 22:55:20,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=209738.66666666666, ans=22.5 +2024-07-28 22:55:27,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=209752.0, ans=0.0 +2024-07-28 22:55:31,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=209752.0, ans=0.0 +2024-07-28 22:55:45,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=209765.33333333334, ans=0.5 +2024-07-28 22:55:49,234 INFO [train.py:1114] (0/4) Epoch 16, batch 4000, loss[loss=0.1435, simple_loss=0.2298, pruned_loss=0.02854, over 4787.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2681, pruned_loss=0.04399, over 941111.32 frames. ], batch size: 12, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:55:54,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=209778.66666666666, ans=0.0 +2024-07-28 22:55:59,623 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.75 vs. limit=22.5 +2024-07-28 22:56:13,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=209818.66666666666, ans=0.125 +2024-07-28 22:56:17,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=209818.66666666666, ans=0.0 +2024-07-28 22:56:25,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209832.0, ans=0.1 +2024-07-28 22:56:39,774 INFO [train.py:1114] (0/4) Epoch 16, batch 4050, loss[loss=0.2337, simple_loss=0.3079, pruned_loss=0.07974, over 3172.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2673, pruned_loss=0.04346, over 939391.03 frames. ], batch size: 35, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:56:40,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209845.33333333334, ans=0.1 +2024-07-28 22:56:41,503 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.09 vs. limit=6.0 +2024-07-28 22:56:47,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=209845.33333333334, ans=0.2 +2024-07-28 22:56:51,886 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.300e+01 5.462e+01 6.013e+01 7.148e+01 1.181e+02, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 22:57:08,552 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.43 vs. limit=15.0 +2024-07-28 22:57:32,726 INFO [train.py:1114] (0/4) Epoch 16, batch 4100, loss[loss=0.1956, simple_loss=0.2873, pruned_loss=0.05191, over 4912.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2682, pruned_loss=0.04388, over 938263.21 frames. ], batch size: 15, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:58:03,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=209938.66666666666, ans=0.0 +2024-07-28 22:58:06,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=209938.66666666666, ans=0.0 +2024-07-28 22:58:29,598 INFO [train.py:1114] (0/4) Epoch 16, batch 4150, loss[loss=0.1512, simple_loss=0.2507, pruned_loss=0.02585, over 4822.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2669, pruned_loss=0.04328, over 937812.21 frames. ], batch size: 13, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:58:30,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=209978.66666666666, ans=0.2 +2024-07-28 22:58:31,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=209978.66666666666, ans=0.0 +2024-07-28 22:58:34,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=209978.66666666666, ans=0.0 +2024-07-28 22:58:35,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=209978.66666666666, ans=0.0 +2024-07-28 22:58:37,079 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209992.0, ans=0.1 +2024-07-28 22:58:37,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=209992.0, ans=0.0 +2024-07-28 22:58:40,273 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.631e+01 5.817e+01 6.318e+01 7.435e+01 1.178e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 22:58:42,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=209992.0, ans=0.125 +2024-07-28 22:58:43,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=209992.0, ans=0.125 +2024-07-28 22:58:58,778 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.03 vs. limit=8.0 +2024-07-28 22:59:00,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=210032.0, ans=0.0 +2024-07-28 22:59:08,742 INFO [train.py:1114] (0/4) Epoch 16, batch 4200, loss[loss=0.1845, simple_loss=0.2745, pruned_loss=0.04722, over 4888.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2673, pruned_loss=0.04361, over 939092.87 frames. ], batch size: 15, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:59:08,910 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:59:14,337 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.16 vs. limit=6.0 +2024-07-28 22:59:28,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210072.0, ans=0.1 +2024-07-28 22:59:34,107 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.59 vs. limit=15.0 +2024-07-28 22:59:35,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=210085.33333333334, ans=0.1 +2024-07-28 22:59:37,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210085.33333333334, ans=0.1 +2024-07-28 22:59:41,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=210098.66666666666, ans=0.125 +2024-07-28 22:59:46,501 INFO [train.py:1114] (0/4) Epoch 16, batch 4250, loss[loss=0.1942, simple_loss=0.2723, pruned_loss=0.05804, over 4642.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2686, pruned_loss=0.04425, over 940523.86 frames. ], batch size: 12, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:59:46,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=210112.0, ans=0.2 +2024-07-28 22:59:55,619 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.556e+01 5.557e+01 6.153e+01 6.698e+01 1.216e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 22:59:59,481 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.26 vs. limit=15.0 +2024-07-28 22:59:59,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=210125.33333333334, ans=0.0 +2024-07-28 23:00:01,527 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.21 vs. limit=22.5 +2024-07-28 23:00:21,357 INFO [train.py:1114] (0/4) Epoch 16, batch 4300, loss[loss=0.1868, simple_loss=0.2732, pruned_loss=0.05022, over 4760.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2688, pruned_loss=0.04443, over 939960.78 frames. ], batch size: 13, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 23:00:31,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=210192.0, ans=0.1 +2024-07-28 23:00:33,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=210192.0, ans=0.2 +2024-07-28 23:00:55,602 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:00:58,194 INFO [train.py:1114] (0/4) Epoch 16, batch 4350, loss[loss=0.1902, simple_loss=0.2699, pruned_loss=0.0553, over 4762.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2681, pruned_loss=0.04354, over 940880.12 frames. ], batch size: 13, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 23:01:17,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=210258.66666666666, ans=0.0 +2024-07-28 23:01:18,167 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.840e+01 5.387e+01 6.031e+01 6.844e+01 1.009e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 23:01:29,634 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.13 vs. limit=15.0 +2024-07-28 23:01:30,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=210285.33333333334, ans=0.2 +2024-07-28 23:01:30,359 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.67 vs. limit=6.0 +2024-07-28 23:01:32,484 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.50 vs. limit=15.0 +2024-07-28 23:01:42,105 INFO [train.py:1114] (0/4) Epoch 16, batch 4400, loss[loss=0.1725, simple_loss=0.2786, pruned_loss=0.03325, over 4804.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2684, pruned_loss=0.04349, over 940434.27 frames. ], batch size: 14, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:01:45,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=210312.0, ans=0.125 +2024-07-28 23:01:48,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=210312.0, ans=0.025 +2024-07-28 23:01:58,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=210325.33333333334, ans=0.015 +2024-07-28 23:02:04,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=210338.66666666666, ans=0.04949747468305833 +2024-07-28 23:02:17,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=210365.33333333334, ans=0.05 +2024-07-28 23:02:18,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=210365.33333333334, ans=0.0 +2024-07-28 23:02:20,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=210378.66666666666, ans=0.1 +2024-07-28 23:02:21,306 INFO [train.py:1114] (0/4) Epoch 16, batch 4450, loss[loss=0.1804, simple_loss=0.2683, pruned_loss=0.04619, over 4939.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2692, pruned_loss=0.04433, over 938650.90 frames. ], batch size: 12, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:02:30,820 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.581e+01 5.996e+01 6.828e+01 9.558e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 23:02:40,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=210405.33333333334, ans=0.125 +2024-07-28 23:02:42,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=210418.66666666666, ans=0.125 +2024-07-28 23:02:48,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=210432.0, ans=0.09899494936611666 +2024-07-28 23:02:55,307 INFO [train.py:1114] (0/4) Epoch 16, batch 4500, loss[loss=0.1669, simple_loss=0.2637, pruned_loss=0.03506, over 4743.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2696, pruned_loss=0.04409, over 937972.87 frames. ], batch size: 14, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:03:03,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=210458.66666666666, ans=0.2 +2024-07-28 23:03:04,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=210458.66666666666, ans=0.0 +2024-07-28 23:03:04,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=210458.66666666666, ans=0.0 +2024-07-28 23:03:15,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.64 vs. limit=12.0 +2024-07-28 23:03:21,628 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.48 vs. limit=12.0 +2024-07-28 23:03:28,441 INFO [train.py:1114] (0/4) Epoch 16, batch 4550, loss[loss=0.1718, simple_loss=0.2699, pruned_loss=0.03687, over 4888.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2698, pruned_loss=0.04443, over 940133.15 frames. ], batch size: 13, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:03:34,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=210512.0, ans=0.125 +2024-07-28 23:03:39,689 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.876e+01 5.739e+01 6.533e+01 7.196e+01 1.162e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 23:03:47,111 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.31 vs. limit=15.0 +2024-07-28 23:03:49,551 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=210538.66666666666, ans=0.125 +2024-07-28 23:03:50,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=210552.0, ans=0.025 +2024-07-28 23:04:00,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=210565.33333333334, ans=0.125 +2024-07-28 23:04:06,251 INFO [train.py:1114] (0/4) Epoch 16, batch 4600, loss[loss=0.1858, simple_loss=0.2783, pruned_loss=0.04661, over 4555.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2689, pruned_loss=0.04435, over 938171.86 frames. ], batch size: 21, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:04:32,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=210632.0, ans=0.2 +2024-07-28 23:04:39,388 INFO [train.py:1114] (0/4) Epoch 16, batch 4650, loss[loss=0.1814, simple_loss=0.2747, pruned_loss=0.04402, over 4823.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2697, pruned_loss=0.04459, over 940089.97 frames. ], batch size: 16, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:04:52,460 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.612e+01 5.652e+01 6.180e+01 7.051e+01 1.016e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 23:04:54,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=210658.66666666666, ans=0.125 +2024-07-28 23:04:55,892 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-28 23:05:09,477 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.64 vs. limit=22.5 +2024-07-28 23:05:17,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=210698.66666666666, ans=0.125 +2024-07-28 23:05:19,333 INFO [train.py:1114] (0/4) Epoch 16, batch 4700, loss[loss=0.1626, simple_loss=0.2481, pruned_loss=0.03856, over 4717.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.269, pruned_loss=0.04471, over 937541.68 frames. ], batch size: 11, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:05:20,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210712.0, ans=0.1 +2024-07-28 23:05:23,112 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.54 vs. limit=15.0 +2024-07-28 23:05:28,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=210725.33333333334, ans=0.2 +2024-07-28 23:05:33,908 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-28 23:05:34,537 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.74 vs. limit=15.0 +2024-07-28 23:05:51,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=210765.33333333334, ans=0.125 +2024-07-28 23:05:52,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=210765.33333333334, ans=0.07 +2024-07-28 23:05:56,316 INFO [train.py:1114] (0/4) Epoch 16, batch 4750, loss[loss=0.1693, simple_loss=0.269, pruned_loss=0.03481, over 4525.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2699, pruned_loss=0.04523, over 936039.27 frames. ], batch size: 21, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:06:02,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=210792.0, ans=0.0 +2024-07-28 23:06:06,438 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.562e+01 6.169e+01 6.958e+01 1.016e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-28 23:06:06,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=210792.0, ans=0.0 +2024-07-28 23:06:19,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=210818.66666666666, ans=0.2 +2024-07-28 23:06:34,623 INFO [train.py:1114] (0/4) Epoch 16, batch 4800, loss[loss=0.1823, simple_loss=0.2843, pruned_loss=0.04015, over 4693.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2682, pruned_loss=0.04501, over 933180.28 frames. ], batch size: 13, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:06:34,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=210845.33333333334, ans=0.09899494936611666 +2024-07-28 23:06:44,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=210858.66666666666, ans=0.0 +2024-07-28 23:06:49,691 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.78 vs. limit=15.0 +2024-07-28 23:06:50,416 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.87 vs. limit=22.5 +2024-07-28 23:06:51,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210872.0, ans=0.1 +2024-07-28 23:07:01,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=210898.66666666666, ans=0.1 +2024-07-28 23:07:07,829 INFO [train.py:1114] (0/4) Epoch 16, batch 4850, loss[loss=0.2244, simple_loss=0.3303, pruned_loss=0.0593, over 4751.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2691, pruned_loss=0.04499, over 932535.43 frames. ], batch size: 14, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:07:08,110 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.24 vs. limit=6.0 +2024-07-28 23:07:28,512 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.60 vs. limit=12.0 +2024-07-28 23:07:31,468 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.823e+01 5.391e+01 6.068e+01 6.775e+01 1.177e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 23:07:34,790 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.74 vs. limit=15.0 +2024-07-28 23:07:42,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=210952.0, ans=0.125 +2024-07-28 23:07:43,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=210952.0, ans=0.2 +2024-07-28 23:07:54,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=210965.33333333334, ans=0.125 +2024-07-28 23:07:59,283 INFO [train.py:1114] (0/4) Epoch 16, batch 4900, loss[loss=0.1668, simple_loss=0.2591, pruned_loss=0.03726, over 4760.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2673, pruned_loss=0.04405, over 934013.38 frames. ], batch size: 13, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:08:02,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=210978.66666666666, ans=0.125 +2024-07-28 23:08:13,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=211005.33333333334, ans=0.0 +2024-07-28 23:08:13,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=211005.33333333334, ans=0.125 +2024-07-28 23:08:20,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211018.66666666666, ans=0.1 +2024-07-28 23:08:25,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=211018.66666666666, ans=0.04949747468305833 +2024-07-28 23:08:26,126 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.09 vs. limit=12.0 +2024-07-28 23:08:26,754 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.98 vs. limit=15.0 +2024-07-28 23:08:33,362 INFO [train.py:1114] (0/4) Epoch 16, batch 4950, loss[loss=0.2399, simple_loss=0.311, pruned_loss=0.08434, over 3220.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2693, pruned_loss=0.04474, over 930791.76 frames. ], batch size: 35, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:08:42,920 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.320e+01 5.430e+01 5.977e+01 6.818e+01 1.036e+02, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 23:08:54,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=211085.33333333334, ans=0.125 +2024-07-28 23:08:58,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=211085.33333333334, ans=0.2 +2024-07-28 23:09:00,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=211098.66666666666, ans=0.125 +2024-07-28 23:09:04,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=211098.66666666666, ans=0.125 +2024-07-28 23:09:08,878 INFO [train.py:1114] (0/4) Epoch 16, batch 5000, loss[loss=0.19, simple_loss=0.2919, pruned_loss=0.04405, over 4679.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2684, pruned_loss=0.04414, over 934539.30 frames. ], batch size: 14, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:09:15,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=211125.33333333334, ans=0.125 +2024-07-28 23:09:21,602 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:09:37,614 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.75 vs. limit=15.0 +2024-07-28 23:09:41,974 INFO [train.py:1114] (0/4) Epoch 16, batch 5050, loss[loss=0.1555, simple_loss=0.2557, pruned_loss=0.02768, over 4868.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2668, pruned_loss=0.04353, over 937312.08 frames. ], batch size: 12, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:09:53,132 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.519e+01 5.620e+01 6.064e+01 6.522e+01 1.168e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-28 23:09:59,217 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-28 23:10:18,015 INFO [train.py:1114] (0/4) Epoch 16, batch 5100, loss[loss=0.1699, simple_loss=0.2585, pruned_loss=0.04064, over 4770.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.268, pruned_loss=0.04422, over 934549.14 frames. ], batch size: 12, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:10:19,119 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.34 vs. limit=22.5 +2024-07-28 23:10:35,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=211272.0, ans=0.0 +2024-07-28 23:10:50,171 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=211298.66666666666, ans=0.125 +2024-07-28 23:10:51,317 INFO [train.py:1114] (0/4) Epoch 16, batch 5150, loss[loss=0.1749, simple_loss=0.2671, pruned_loss=0.0413, over 4837.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2696, pruned_loss=0.04496, over 935896.29 frames. ], batch size: 16, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:10:52,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=211312.0, ans=0.125 +2024-07-28 23:11:00,629 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.854e+01 6.432e+01 7.346e+01 1.040e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 23:11:02,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=211325.33333333334, ans=0.025 +2024-07-28 23:11:05,497 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.16 vs. limit=15.0 +2024-07-28 23:11:08,847 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.67 vs. limit=15.0 +2024-07-28 23:11:17,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=211352.0, ans=0.025 +2024-07-28 23:11:21,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=211365.33333333334, ans=0.125 +2024-07-28 23:11:26,570 INFO [train.py:1114] (0/4) Epoch 16, batch 5200, loss[loss=0.1697, simple_loss=0.2681, pruned_loss=0.03562, over 4663.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2686, pruned_loss=0.04426, over 935336.61 frames. ], batch size: 14, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:11:31,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=211378.66666666666, ans=0.125 +2024-07-28 23:11:39,031 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.99 vs. limit=15.0 +2024-07-28 23:11:41,022 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.87 vs. limit=15.0 +2024-07-28 23:11:41,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=211405.33333333334, ans=0.2 +2024-07-28 23:11:44,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=211405.33333333334, ans=0.0 +2024-07-28 23:11:56,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=211432.0, ans=0.125 +2024-07-28 23:11:58,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=211432.0, ans=0.95 +2024-07-28 23:12:01,690 INFO [train.py:1114] (0/4) Epoch 16, batch 5250, loss[loss=0.1941, simple_loss=0.2776, pruned_loss=0.05532, over 4899.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2686, pruned_loss=0.04467, over 934956.71 frames. ], batch size: 13, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:12:12,939 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.256e+01 5.544e+01 6.376e+01 7.640e+01 1.111e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 23:12:16,423 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.55 vs. limit=10.0 +2024-07-28 23:12:37,219 INFO [train.py:1114] (0/4) Epoch 16, batch 5300, loss[loss=0.1911, simple_loss=0.2793, pruned_loss=0.05145, over 4643.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2686, pruned_loss=0.04466, over 933468.79 frames. ], batch size: 16, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:12:41,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=211512.0, ans=0.125 +2024-07-28 23:12:53,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=211538.66666666666, ans=0.2 +2024-07-28 23:12:56,810 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.45 vs. limit=15.0 +2024-07-28 23:13:04,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=211565.33333333334, ans=0.125 +2024-07-28 23:13:09,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=211565.33333333334, ans=0.025 +2024-07-28 23:13:10,606 INFO [train.py:1114] (0/4) Epoch 16, batch 5350, loss[loss=0.1678, simple_loss=0.2478, pruned_loss=0.04385, over 4509.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2683, pruned_loss=0.04452, over 935553.91 frames. ], batch size: 10, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:13:13,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=211578.66666666666, ans=0.025 +2024-07-28 23:13:18,013 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:13:19,953 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.569e+01 5.493e+01 6.071e+01 6.914e+01 1.248e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 23:13:24,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=211605.33333333334, ans=0.2 +2024-07-28 23:13:25,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=211605.33333333334, ans=0.0 +2024-07-28 23:13:35,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=211618.66666666666, ans=0.0 +2024-07-28 23:13:42,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=211632.0, ans=0.125 +2024-07-28 23:13:42,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=211632.0, ans=0.2 +2024-07-28 23:13:44,502 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.06 vs. limit=22.5 +2024-07-28 23:13:44,746 INFO [train.py:1114] (0/4) Epoch 16, batch 5400, loss[loss=0.2272, simple_loss=0.3012, pruned_loss=0.0766, over 4188.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2697, pruned_loss=0.04523, over 930158.75 frames. ], batch size: 25, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:13:52,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211658.66666666666, ans=0.1 +2024-07-28 23:14:02,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=211672.0, ans=0.125 +2024-07-28 23:14:04,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=211685.33333333334, ans=0.2 +2024-07-28 23:14:06,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=211685.33333333334, ans=0.125 +2024-07-28 23:14:07,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=211685.33333333334, ans=0.125 +2024-07-28 23:14:17,999 INFO [train.py:1114] (0/4) Epoch 16, batch 5450, loss[loss=0.1401, simple_loss=0.2283, pruned_loss=0.02596, over 4696.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2681, pruned_loss=0.04441, over 933179.32 frames. ], batch size: 11, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:14:22,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=211712.0, ans=0.125 +2024-07-28 23:14:23,467 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:14:27,868 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.730e+01 5.574e+01 6.234e+01 6.810e+01 1.084e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 23:14:28,195 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.97 vs. limit=22.5 +2024-07-28 23:14:49,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=211765.33333333334, ans=0.0 +2024-07-28 23:14:53,031 INFO [train.py:1114] (0/4) Epoch 16, batch 5500, loss[loss=0.1667, simple_loss=0.2577, pruned_loss=0.03782, over 4145.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2667, pruned_loss=0.04402, over 930489.14 frames. ], batch size: 25, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:14:57,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211778.66666666666, ans=0.1 +2024-07-28 23:15:00,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=211792.0, ans=0.125 +2024-07-28 23:15:06,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=211805.33333333334, ans=0.125 +2024-07-28 23:15:22,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=211832.0, ans=0.125 +2024-07-28 23:15:28,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=211845.33333333334, ans=0.07 +2024-07-28 23:15:28,503 INFO [train.py:1114] (0/4) Epoch 16, batch 5550, loss[loss=0.1521, simple_loss=0.241, pruned_loss=0.03164, over 4703.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2672, pruned_loss=0.04428, over 932738.00 frames. ], batch size: 12, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:15:35,045 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.06 vs. limit=15.0 +2024-07-28 23:15:37,925 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.698e+01 6.304e+01 7.513e+01 1.256e+02, threshold=1.261e+02, percent-clipped=1.0 +2024-07-28 23:15:44,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=211872.0, ans=0.1 +2024-07-28 23:15:47,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=211872.0, ans=0.125 +2024-07-28 23:15:47,696 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.70 vs. limit=15.0 +2024-07-28 23:15:59,802 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.38 vs. limit=10.0 +2024-07-28 23:15:59,860 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.71 vs. limit=15.0 +2024-07-28 23:16:02,698 INFO [train.py:1114] (0/4) Epoch 16, batch 5600, loss[loss=0.1983, simple_loss=0.2922, pruned_loss=0.05224, over 4745.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2678, pruned_loss=0.04425, over 934048.24 frames. ], batch size: 14, lr: 4.66e-03, grad_scale: 64.0 +2024-07-28 23:16:21,852 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.35 vs. limit=12.0 +2024-07-28 23:16:26,627 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.69 vs. limit=15.0 +2024-07-28 23:16:26,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=211938.66666666666, ans=0.0 +2024-07-28 23:16:27,375 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.23 vs. limit=6.0 +2024-07-28 23:16:49,031 INFO [train.py:1114] (0/4) Epoch 16, batch 5650, loss[loss=0.1837, simple_loss=0.2827, pruned_loss=0.04235, over 4572.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2667, pruned_loss=0.04396, over 936393.02 frames. ], batch size: 21, lr: 4.66e-03, grad_scale: 64.0 +2024-07-28 23:16:49,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=211978.66666666666, ans=0.2 +2024-07-28 23:16:51,493 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.86 vs. limit=6.0 +2024-07-28 23:16:58,579 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.452e+01 5.506e+01 6.230e+01 6.941e+01 1.207e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 23:17:06,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=212005.33333333334, ans=0.125 +2024-07-28 23:17:14,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=212018.66666666666, ans=0.125 +2024-07-28 23:17:16,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212032.0, ans=0.1 +2024-07-28 23:17:19,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=212032.0, ans=0.125 +2024-07-28 23:17:22,862 INFO [train.py:1114] (0/4) Epoch 16, batch 5700, loss[loss=0.2009, simple_loss=0.289, pruned_loss=0.05643, over 4687.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2669, pruned_loss=0.04407, over 937718.03 frames. ], batch size: 13, lr: 4.66e-03, grad_scale: 64.0 +2024-07-28 23:17:26,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=212045.33333333334, ans=0.125 +2024-07-28 23:17:33,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=212058.66666666666, ans=0.125 +2024-07-28 23:17:38,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=212072.0, ans=0.1 +2024-07-28 23:17:38,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212072.0, ans=0.1 +2024-07-28 23:17:44,750 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=212072.0, ans=0.125 +2024-07-28 23:17:52,291 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.46 vs. limit=22.5 +2024-07-28 23:17:55,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=212098.66666666666, ans=0.125 +2024-07-28 23:17:58,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212098.66666666666, ans=0.1 +2024-07-28 23:17:59,819 INFO [train.py:1114] (0/4) Epoch 16, batch 5750, loss[loss=0.1776, simple_loss=0.2721, pruned_loss=0.04154, over 4700.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.268, pruned_loss=0.04455, over 937800.64 frames. ], batch size: 19, lr: 4.66e-03, grad_scale: 64.0 +2024-07-28 23:18:01,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=212112.0, ans=0.0 +2024-07-28 23:18:04,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212112.0, ans=0.1 +2024-07-28 23:18:10,179 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.804e+01 5.670e+01 6.129e+01 6.618e+01 9.069e+01, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 23:18:15,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212138.66666666666, ans=0.1 +2024-07-28 23:18:23,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=212138.66666666666, ans=0.125 +2024-07-28 23:18:26,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212152.0, ans=0.1 +2024-07-28 23:18:26,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=212152.0, ans=0.0 +2024-07-28 23:18:31,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=212165.33333333334, ans=0.0 +2024-07-28 23:18:31,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.95 vs. limit=12.0 +2024-07-28 23:18:32,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=212165.33333333334, ans=0.2 +2024-07-28 23:18:33,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=212165.33333333334, ans=0.0 +2024-07-28 23:18:36,421 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.45 vs. limit=15.0 +2024-07-28 23:18:38,600 INFO [train.py:1114] (0/4) Epoch 16, batch 5800, loss[loss=0.2298, simple_loss=0.3089, pruned_loss=0.07539, over 4714.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.269, pruned_loss=0.04514, over 937088.50 frames. ], batch size: 19, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:18:41,104 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.93 vs. limit=22.5 +2024-07-28 23:18:45,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=212192.0, ans=0.0 +2024-07-28 23:18:45,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=212192.0, ans=0.125 +2024-07-28 23:18:59,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=212218.66666666666, ans=0.125 +2024-07-28 23:19:10,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=212232.0, ans=0.125 +2024-07-28 23:19:12,013 INFO [train.py:1114] (0/4) Epoch 16, batch 5850, loss[loss=0.1795, simple_loss=0.2678, pruned_loss=0.04567, over 4456.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2685, pruned_loss=0.04468, over 937760.05 frames. ], batch size: 21, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:19:19,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=212258.66666666666, ans=0.0 +2024-07-28 23:19:20,596 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.33 vs. limit=15.0 +2024-07-28 23:19:21,302 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 5.632e+01 6.313e+01 6.909e+01 9.080e+01, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 23:19:26,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=212272.0, ans=0.125 +2024-07-28 23:19:34,500 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.05 vs. limit=15.0 +2024-07-28 23:19:46,151 INFO [train.py:1114] (0/4) Epoch 16, batch 5900, loss[loss=0.1796, simple_loss=0.2776, pruned_loss=0.04076, over 4689.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2681, pruned_loss=0.04458, over 937611.40 frames. ], batch size: 15, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:19:54,106 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.74 vs. limit=10.0 +2024-07-28 23:19:57,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=212325.33333333334, ans=0.125 +2024-07-28 23:19:59,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.40 vs. limit=12.0 +2024-07-28 23:20:06,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=212352.0, ans=0.125 +2024-07-28 23:20:10,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212352.0, ans=0.1 +2024-07-28 23:20:13,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=212365.33333333334, ans=0.125 +2024-07-28 23:20:16,694 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=212365.33333333334, ans=0.2 +2024-07-28 23:20:19,844 INFO [train.py:1114] (0/4) Epoch 16, batch 5950, loss[loss=0.2327, simple_loss=0.3336, pruned_loss=0.06585, over 4676.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2685, pruned_loss=0.04425, over 939637.39 frames. ], batch size: 15, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:20:22,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=212378.66666666666, ans=0.125 +2024-07-28 23:20:23,735 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.99 vs. limit=15.0 +2024-07-28 23:20:28,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=212392.0, ans=0.1 +2024-07-28 23:20:29,212 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.762e+01 5.558e+01 6.099e+01 6.527e+01 9.669e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 23:20:41,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=212418.66666666666, ans=0.95 +2024-07-28 23:20:48,342 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.07 vs. limit=10.0 +2024-07-28 23:20:51,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=212432.0, ans=0.0 +2024-07-28 23:20:55,251 INFO [train.py:1114] (0/4) Epoch 16, batch 6000, loss[loss=0.2039, simple_loss=0.2993, pruned_loss=0.05426, over 4231.00 frames. ], tot_loss[loss=0.179, simple_loss=0.269, pruned_loss=0.04445, over 937386.37 frames. ], batch size: 25, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:20:55,252 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-28 23:21:07,048 INFO [train.py:1146] (0/4) Epoch 16, validation: loss=0.1625, simple_loss=0.2653, pruned_loss=0.02984, over 944034.00 frames. +2024-07-28 23:21:07,049 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-28 23:21:07,448 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.39 vs. limit=15.0 +2024-07-28 23:21:17,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=212458.66666666666, ans=0.125 +2024-07-28 23:21:20,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=212472.0, ans=0.125 +2024-07-28 23:21:24,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=212472.0, ans=0.125 +2024-07-28 23:21:26,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=212485.33333333334, ans=0.025 +2024-07-28 23:21:40,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=212512.0, ans=0.1 +2024-07-28 23:21:40,707 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.88 vs. limit=15.0 +2024-07-28 23:21:41,056 INFO [train.py:1114] (0/4) Epoch 16, batch 6050, loss[loss=0.1812, simple_loss=0.2737, pruned_loss=0.04435, over 4778.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2679, pruned_loss=0.04399, over 938770.16 frames. ], batch size: 12, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:21:57,158 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.422e+01 5.490e+01 6.163e+01 6.956e+01 9.204e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 23:21:59,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=212525.33333333334, ans=0.125 +2024-07-28 23:22:04,511 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.67 vs. limit=12.0 +2024-07-28 23:22:10,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=212552.0, ans=0.125 +2024-07-28 23:22:20,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=212565.33333333334, ans=0.125 +2024-07-28 23:22:21,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=212565.33333333334, ans=0.09899494936611666 +2024-07-28 23:22:24,105 INFO [train.py:1114] (0/4) Epoch 16, batch 6100, loss[loss=0.1776, simple_loss=0.2762, pruned_loss=0.03946, over 4668.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2676, pruned_loss=0.0437, over 938018.28 frames. ], batch size: 15, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:22:28,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=212578.66666666666, ans=0.0 +2024-07-28 23:23:00,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=212605.33333333334, ans=0.125 +2024-07-28 23:23:02,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=212618.66666666666, ans=0.0 +2024-07-28 23:23:06,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.01 vs. limit=15.0 +2024-07-28 23:23:20,113 INFO [train.py:1114] (0/4) Epoch 16, batch 6150, loss[loss=0.2418, simple_loss=0.3223, pruned_loss=0.08064, over 3331.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2677, pruned_loss=0.04391, over 937029.74 frames. ], batch size: 35, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:23:27,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212658.66666666666, ans=0.1 +2024-07-28 23:23:29,821 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.196e+01 5.601e+01 6.236e+01 7.046e+01 1.205e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 23:23:30,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=212658.66666666666, ans=0.0 +2024-07-28 23:23:31,563 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.74 vs. limit=6.0 +2024-07-28 23:23:34,891 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.69 vs. limit=22.5 +2024-07-28 23:23:41,440 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.51 vs. limit=10.0 +2024-07-28 23:23:53,697 INFO [train.py:1114] (0/4) Epoch 16, batch 6200, loss[loss=0.1742, simple_loss=0.2724, pruned_loss=0.03804, over 4741.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2674, pruned_loss=0.04372, over 936362.08 frames. ], batch size: 14, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:23:53,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=212712.0, ans=0.125 +2024-07-28 23:24:06,931 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.39 vs. limit=6.0 +2024-07-28 23:24:08,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=212738.66666666666, ans=0.125 +2024-07-28 23:24:19,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212752.0, ans=0.1 +2024-07-28 23:24:25,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=212765.33333333334, ans=0.025 +2024-07-28 23:24:26,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=212765.33333333334, ans=0.2 +2024-07-28 23:24:32,616 INFO [train.py:1114] (0/4) Epoch 16, batch 6250, loss[loss=0.1771, simple_loss=0.2753, pruned_loss=0.03942, over 4806.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2678, pruned_loss=0.04411, over 932789.91 frames. ], batch size: 14, lr: 4.65e-03, grad_scale: 32.0 +2024-07-28 23:24:36,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=212778.66666666666, ans=0.125 +2024-07-28 23:24:53,133 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.346e+01 5.825e+01 6.547e+01 7.445e+01 1.087e+02, threshold=1.309e+02, percent-clipped=0.0 +2024-07-28 23:25:06,812 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.24 vs. limit=10.0 +2024-07-28 23:25:11,139 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=212805.33333333334, ans=0.0 +2024-07-28 23:25:13,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=212818.66666666666, ans=0.015 +2024-07-28 23:25:14,147 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.76 vs. limit=15.0 +2024-07-28 23:25:34,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=212832.0, ans=0.0 +2024-07-28 23:25:38,075 INFO [train.py:1114] (0/4) Epoch 16, batch 6300, loss[loss=0.1478, simple_loss=0.2417, pruned_loss=0.02691, over 4532.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2683, pruned_loss=0.04445, over 929865.34 frames. ], batch size: 10, lr: 4.65e-03, grad_scale: 32.0 +2024-07-28 23:25:41,594 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.16 vs. limit=12.0 +2024-07-28 23:25:50,346 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.21 vs. limit=10.0 +2024-07-28 23:25:54,764 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=212858.66666666666, ans=0.125 +2024-07-28 23:26:03,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=212872.0, ans=0.09899494936611666 +2024-07-28 23:26:18,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=212898.66666666666, ans=0.2 +2024-07-28 23:26:20,532 INFO [train.py:1114] (0/4) Epoch 16, batch 6350, loss[loss=0.1954, simple_loss=0.3077, pruned_loss=0.04159, over 4455.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2678, pruned_loss=0.04422, over 933987.79 frames. ], batch size: 21, lr: 4.65e-03, grad_scale: 32.0 +2024-07-28 23:26:27,522 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.23 vs. limit=15.0 +2024-07-28 23:26:45,085 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.844e+01 5.666e+01 6.076e+01 6.815e+01 1.142e+02, threshold=1.215e+02, percent-clipped=0.0 +2024-07-28 23:26:49,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=212938.66666666666, ans=0.0 +2024-07-28 23:26:54,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=212938.66666666666, ans=0.125 +2024-07-28 23:27:00,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=212952.0, ans=0.0 +2024-07-28 23:27:11,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=212965.33333333334, ans=0.125 +2024-07-28 23:27:14,292 INFO [train.py:1114] (0/4) Epoch 16, batch 6400, loss[loss=0.2231, simple_loss=0.3149, pruned_loss=0.06562, over 4639.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2674, pruned_loss=0.04433, over 934834.80 frames. ], batch size: 13, lr: 4.65e-03, grad_scale: 32.0 +2024-07-28 23:27:15,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=212978.66666666666, ans=0.025 +2024-07-28 23:27:17,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=212978.66666666666, ans=0.07 +2024-07-28 23:27:21,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212992.0, ans=0.1 +2024-07-28 23:27:28,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=212992.0, ans=0.125 +2024-07-28 23:27:44,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213032.0, ans=0.1 +2024-07-28 23:27:51,957 INFO [train.py:1114] (0/4) Epoch 16, batch 6450, loss[loss=0.1882, simple_loss=0.2871, pruned_loss=0.04461, over 4512.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2689, pruned_loss=0.04493, over 938497.43 frames. ], batch size: 21, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:27:56,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=213045.33333333334, ans=0.125 +2024-07-28 23:27:57,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213045.33333333334, ans=0.1 +2024-07-28 23:28:00,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=213045.33333333334, ans=0.2 +2024-07-28 23:28:08,736 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.830e+01 6.533e+01 7.899e+01 1.104e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 23:28:19,427 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.12 vs. limit=6.0 +2024-07-28 23:28:35,511 INFO [train.py:1114] (0/4) Epoch 16, batch 6500, loss[loss=0.2319, simple_loss=0.3035, pruned_loss=0.08012, over 3168.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2684, pruned_loss=0.0445, over 939579.25 frames. ], batch size: 35, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:28:46,439 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:28:47,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=213125.33333333334, ans=0.0 +2024-07-28 23:29:03,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=213138.66666666666, ans=0.125 +2024-07-28 23:29:09,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213152.0, ans=0.1 +2024-07-28 23:29:19,566 INFO [train.py:1114] (0/4) Epoch 16, batch 6550, loss[loss=0.1593, simple_loss=0.2454, pruned_loss=0.03665, over 4811.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2686, pruned_loss=0.04438, over 943002.68 frames. ], batch size: 11, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:29:19,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=213178.66666666666, ans=0.125 +2024-07-28 23:29:20,614 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.56 vs. limit=15.0 +2024-07-28 23:29:30,728 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.22 vs. limit=15.0 +2024-07-28 23:29:31,657 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.619e+01 5.761e+01 6.311e+01 7.241e+01 1.321e+02, threshold=1.262e+02, percent-clipped=1.0 +2024-07-28 23:29:31,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=213192.0, ans=0.0 +2024-07-28 23:29:35,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213205.33333333334, ans=0.1 +2024-07-28 23:29:53,291 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.85 vs. limit=22.5 +2024-07-28 23:29:59,647 INFO [train.py:1114] (0/4) Epoch 16, batch 6600, loss[loss=0.1724, simple_loss=0.276, pruned_loss=0.03441, over 4938.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.268, pruned_loss=0.04382, over 944886.42 frames. ], batch size: 14, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:30:08,918 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.74 vs. limit=15.0 +2024-07-28 23:30:09,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=213258.66666666666, ans=0.0 +2024-07-28 23:30:09,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213258.66666666666, ans=0.1 +2024-07-28 23:30:35,779 INFO [train.py:1114] (0/4) Epoch 16, batch 6650, loss[loss=0.1957, simple_loss=0.2978, pruned_loss=0.04682, over 4619.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2679, pruned_loss=0.04388, over 943539.01 frames. ], batch size: 17, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:30:38,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=213312.0, ans=0.125 +2024-07-28 23:30:40,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213312.0, ans=0.1 +2024-07-28 23:30:44,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=213325.33333333334, ans=0.125 +2024-07-28 23:30:45,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=213325.33333333334, ans=0.125 +2024-07-28 23:30:46,063 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.373e+01 5.740e+01 6.263e+01 6.841e+01 9.907e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 23:30:46,335 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-160000.pt +2024-07-28 23:30:56,560 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.98 vs. limit=15.0 +2024-07-28 23:30:59,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=213352.0, ans=0.0 +2024-07-28 23:31:07,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=213365.33333333334, ans=0.0 +2024-07-28 23:31:11,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=213378.66666666666, ans=0.0 +2024-07-28 23:31:11,676 INFO [train.py:1114] (0/4) Epoch 16, batch 6700, loss[loss=0.1527, simple_loss=0.2438, pruned_loss=0.03086, over 4704.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2693, pruned_loss=0.04432, over 942498.08 frames. ], batch size: 19, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:31:12,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=213378.66666666666, ans=0.125 +2024-07-28 23:31:23,679 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:31:31,894 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.82 vs. limit=15.0 +2024-07-28 23:31:40,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=213432.0, ans=0.2 +2024-07-28 23:31:40,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=213432.0, ans=0.0 +2024-07-28 23:31:44,990 INFO [train.py:1114] (0/4) Epoch 16, batch 6750, loss[loss=0.2066, simple_loss=0.2962, pruned_loss=0.05851, over 4226.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2686, pruned_loss=0.0444, over 940361.64 frames. ], batch size: 25, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:31:47,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213445.33333333334, ans=0.1 +2024-07-28 23:31:49,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=213445.33333333334, ans=0.2 +2024-07-28 23:31:56,771 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.79 vs. limit=15.0 +2024-07-28 23:31:59,718 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.135e+01 5.547e+01 6.307e+01 7.303e+01 1.020e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 23:32:05,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=213472.0, ans=0.025 +2024-07-28 23:32:13,352 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.35 vs. limit=15.0 +2024-07-28 23:33:04,891 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.26 vs. limit=15.0 +2024-07-28 23:33:06,731 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.11 vs. limit=12.0 +2024-07-28 23:33:47,755 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.16 vs. limit=12.0 +2024-07-28 23:33:52,025 INFO [train.py:1114] (0/4) Epoch 16, batch 6800, loss[loss=0.1705, simple_loss=0.2681, pruned_loss=0.03647, over 4631.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2686, pruned_loss=0.04415, over 938621.38 frames. ], batch size: 13, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:33:52,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=213512.0, ans=0.0 +2024-07-28 23:33:58,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213525.33333333334, ans=0.1 +2024-07-28 23:34:02,662 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.71 vs. limit=22.5 +2024-07-28 23:34:14,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=213538.66666666666, ans=0.2 +2024-07-28 23:34:15,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=213538.66666666666, ans=0.1 +2024-07-28 23:34:26,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213565.33333333334, ans=0.1 +2024-07-28 23:34:31,087 INFO [train.py:1114] (0/4) Epoch 16, batch 6850, loss[loss=0.1513, simple_loss=0.2409, pruned_loss=0.03084, over 4689.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2679, pruned_loss=0.04383, over 940227.59 frames. ], batch size: 13, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:34:39,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=213592.0, ans=0.05 +2024-07-28 23:34:40,944 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.265e+01 5.896e+01 6.305e+01 7.215e+01 1.193e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 23:34:48,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=213605.33333333334, ans=0.125 +2024-07-28 23:34:49,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=213605.33333333334, ans=0.125 +2024-07-28 23:35:04,474 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.46 vs. limit=15.0 +2024-07-28 23:35:04,748 INFO [train.py:1114] (0/4) Epoch 16, batch 6900, loss[loss=0.2031, simple_loss=0.2936, pruned_loss=0.05627, over 4965.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2685, pruned_loss=0.04378, over 942579.05 frames. ], batch size: 13, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:35:10,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=213645.33333333334, ans=0.04949747468305833 +2024-07-28 23:35:18,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=213658.66666666666, ans=0.125 +2024-07-28 23:35:24,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=213672.0, ans=0.125 +2024-07-28 23:35:28,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=213685.33333333334, ans=0.95 +2024-07-28 23:35:32,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=213685.33333333334, ans=0.125 +2024-07-28 23:35:34,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213698.66666666666, ans=0.1 +2024-07-28 23:35:39,960 INFO [train.py:1114] (0/4) Epoch 16, batch 6950, loss[loss=0.1579, simple_loss=0.2471, pruned_loss=0.03433, over 4484.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2691, pruned_loss=0.04444, over 939873.62 frames. ], batch size: 10, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:35:50,017 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.539e+01 5.740e+01 6.194e+01 7.107e+01 9.358e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 23:35:50,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=213725.33333333334, ans=0.125 +2024-07-28 23:35:51,212 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.41 vs. limit=15.0 +2024-07-28 23:35:53,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=213738.66666666666, ans=0.125 +2024-07-28 23:35:55,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213738.66666666666, ans=0.1 +2024-07-28 23:36:08,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=213752.0, ans=0.125 +2024-07-28 23:36:14,453 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.63 vs. limit=22.5 +2024-07-28 23:36:14,939 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:36:17,390 INFO [train.py:1114] (0/4) Epoch 16, batch 7000, loss[loss=0.2004, simple_loss=0.288, pruned_loss=0.05643, over 4621.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2678, pruned_loss=0.0442, over 938553.81 frames. ], batch size: 17, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:36:19,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=213778.66666666666, ans=0.125 +2024-07-28 23:36:29,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=213805.33333333334, ans=0.0 +2024-07-28 23:36:38,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=213818.66666666666, ans=0.0 +2024-07-28 23:36:41,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=213818.66666666666, ans=0.125 +2024-07-28 23:36:42,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=213818.66666666666, ans=0.125 +2024-07-28 23:36:44,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=213832.0, ans=0.035 +2024-07-28 23:36:50,068 INFO [train.py:1114] (0/4) Epoch 16, batch 7050, loss[loss=0.2007, simple_loss=0.2867, pruned_loss=0.05729, over 4679.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2676, pruned_loss=0.04409, over 941770.51 frames. ], batch size: 19, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:36:52,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn2.whiten.whitening_limit, batch_count=213845.33333333334, ans=22.5 +2024-07-28 23:36:52,725 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.33 vs. limit=6.0 +2024-07-28 23:36:53,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=213845.33333333334, ans=0.125 +2024-07-28 23:36:53,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=213845.33333333334, ans=0.125 +2024-07-28 23:36:59,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=213858.66666666666, ans=0.0 +2024-07-28 23:36:59,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=213858.66666666666, ans=0.0 +2024-07-28 23:37:00,889 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.296e+01 5.658e+01 6.254e+01 7.324e+01 1.123e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 23:37:01,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=213858.66666666666, ans=0.95 +2024-07-28 23:37:01,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=213858.66666666666, ans=0.125 +2024-07-28 23:37:07,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=213872.0, ans=0.0 +2024-07-28 23:37:19,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=213898.66666666666, ans=0.05 +2024-07-28 23:37:24,462 INFO [train.py:1114] (0/4) Epoch 16, batch 7100, loss[loss=0.1956, simple_loss=0.2931, pruned_loss=0.04902, over 4802.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2687, pruned_loss=0.04471, over 936586.64 frames. ], batch size: 15, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:37:37,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=213925.33333333334, ans=0.0 +2024-07-28 23:37:46,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=213952.0, ans=0.125 +2024-07-28 23:37:49,287 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.63 vs. limit=15.0 +2024-07-28 23:37:53,789 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.46 vs. limit=15.0 +2024-07-28 23:37:57,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=213965.33333333334, ans=0.125 +2024-07-28 23:37:59,200 INFO [train.py:1114] (0/4) Epoch 16, batch 7150, loss[loss=0.1852, simple_loss=0.2795, pruned_loss=0.04549, over 4594.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2669, pruned_loss=0.04383, over 937902.85 frames. ], batch size: 21, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:38:00,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=213978.66666666666, ans=0.0 +2024-07-28 23:38:04,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=213978.66666666666, ans=0.125 +2024-07-28 23:38:06,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=213992.0, ans=0.1 +2024-07-28 23:38:08,738 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.615e+01 6.266e+01 7.149e+01 9.915e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 23:38:10,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=213992.0, ans=0.125 +2024-07-28 23:38:12,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214005.33333333334, ans=0.1 +2024-07-28 23:38:15,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=214005.33333333334, ans=0.125 +2024-07-28 23:38:22,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.95 vs. limit=6.0 +2024-07-28 23:38:23,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=214018.66666666666, ans=0.125 +2024-07-28 23:38:23,343 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.27 vs. limit=15.0 +2024-07-28 23:38:25,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=214032.0, ans=0.0 +2024-07-28 23:38:31,552 INFO [train.py:1114] (0/4) Epoch 16, batch 7200, loss[loss=0.2107, simple_loss=0.2847, pruned_loss=0.06838, over 4797.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2678, pruned_loss=0.04455, over 937803.21 frames. ], batch size: 15, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:38:35,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=214045.33333333334, ans=0.125 +2024-07-28 23:38:48,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=214072.0, ans=0.125 +2024-07-28 23:39:05,754 INFO [train.py:1114] (0/4) Epoch 16, batch 7250, loss[loss=0.1515, simple_loss=0.2223, pruned_loss=0.04036, over 4857.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2679, pruned_loss=0.04448, over 939622.81 frames. ], batch size: 12, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:39:07,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214112.0, ans=0.1 +2024-07-28 23:39:11,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=214125.33333333334, ans=0.125 +2024-07-28 23:39:15,401 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.553e+01 6.069e+01 6.578e+01 8.706e+01, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 23:39:38,761 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.93 vs. limit=15.0 +2024-07-28 23:39:52,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=214165.33333333334, ans=0.0 +2024-07-28 23:39:56,662 INFO [train.py:1114] (0/4) Epoch 16, batch 7300, loss[loss=0.1813, simple_loss=0.2682, pruned_loss=0.04718, over 4853.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2669, pruned_loss=0.04371, over 940179.94 frames. ], batch size: 12, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:40:04,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=214192.0, ans=0.125 +2024-07-28 23:40:07,567 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:40:15,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=214205.33333333334, ans=0.2 +2024-07-28 23:40:18,853 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.71 vs. limit=15.0 +2024-07-28 23:40:21,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=214218.66666666666, ans=0.0 +2024-07-28 23:40:30,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=214245.33333333334, ans=0.125 +2024-07-28 23:40:30,742 INFO [train.py:1114] (0/4) Epoch 16, batch 7350, loss[loss=0.179, simple_loss=0.262, pruned_loss=0.04796, over 4641.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2674, pruned_loss=0.04384, over 939577.03 frames. ], batch size: 12, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:40:37,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=214258.66666666666, ans=0.125 +2024-07-28 23:40:39,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214258.66666666666, ans=0.1 +2024-07-28 23:40:41,014 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.596e+01 5.572e+01 6.176e+01 6.846e+01 9.880e+01, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 23:40:42,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=214258.66666666666, ans=0.2 +2024-07-28 23:40:55,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214285.33333333334, ans=0.1 +2024-07-28 23:40:57,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=214285.33333333334, ans=0.125 +2024-07-28 23:40:59,013 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:41:10,938 INFO [train.py:1114] (0/4) Epoch 16, batch 7400, loss[loss=0.1973, simple_loss=0.2786, pruned_loss=0.05801, over 4699.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2677, pruned_loss=0.04365, over 940736.72 frames. ], batch size: 13, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:41:15,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=214312.0, ans=0.125 +2024-07-28 23:41:16,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=214312.0, ans=0.2 +2024-07-28 23:41:16,536 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:41:32,544 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.08 vs. limit=15.0 +2024-07-28 23:41:36,981 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.01 vs. limit=15.0 +2024-07-28 23:41:42,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=214352.0, ans=0.025 +2024-07-28 23:41:45,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=214365.33333333334, ans=0.2 +2024-07-28 23:41:50,691 INFO [train.py:1114] (0/4) Epoch 16, batch 7450, loss[loss=0.1529, simple_loss=0.2343, pruned_loss=0.03571, over 4612.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2669, pruned_loss=0.04321, over 938161.04 frames. ], batch size: 11, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:41:51,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=214378.66666666666, ans=0.0 +2024-07-28 23:41:53,974 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.66 vs. limit=12.0 +2024-07-28 23:42:02,407 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+01 5.840e+01 6.491e+01 7.591e+01 1.266e+02, threshold=1.298e+02, percent-clipped=1.0 +2024-07-28 23:42:06,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=214405.33333333334, ans=0.125 +2024-07-28 23:42:07,645 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.77 vs. limit=10.0 +2024-07-28 23:42:12,139 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.81 vs. limit=6.0 +2024-07-28 23:42:25,719 INFO [train.py:1114] (0/4) Epoch 16, batch 7500, loss[loss=0.1861, simple_loss=0.2713, pruned_loss=0.05048, over 3458.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.267, pruned_loss=0.04341, over 936691.08 frames. ], batch size: 35, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:42:26,754 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=15.0 +2024-07-28 23:50:19,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=214445.33333333334, ans=0.025 +2024-07-28 23:50:47,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=214498.66666666666, ans=0.0 +2024-07-28 23:50:48,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=214498.66666666666, ans=0.125 +2024-07-28 23:50:51,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=214498.66666666666, ans=0.95 +2024-07-28 23:50:51,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=214498.66666666666, ans=10.0 +2024-07-28 23:50:53,143 INFO [train.py:1114] (0/4) Epoch 16, batch 7550, loss[loss=0.1886, simple_loss=0.2844, pruned_loss=0.0464, over 4609.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2687, pruned_loss=0.04447, over 936482.91 frames. ], batch size: 17, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:51:02,812 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.731e+01 5.493e+01 6.004e+01 6.763e+01 8.407e+01, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 23:51:08,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=214538.66666666666, ans=0.125 +2024-07-28 23:51:14,244 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=214552.0, ans=0.025 +2024-07-28 23:51:14,329 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.81 vs. limit=12.0 +2024-07-28 23:51:23,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=214565.33333333334, ans=0.125 +2024-07-28 23:51:25,722 INFO [train.py:1114] (0/4) Epoch 16, batch 7600, loss[loss=0.19, simple_loss=0.2817, pruned_loss=0.04915, over 4822.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2694, pruned_loss=0.04444, over 938449.87 frames. ], batch size: 14, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:51:27,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=214578.66666666666, ans=0.2 +2024-07-28 23:51:31,452 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=214592.0, ans=10.0 +2024-07-28 23:51:31,710 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.01 vs. limit=15.0 +2024-07-28 23:51:32,329 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.17 vs. limit=22.5 +2024-07-28 23:51:35,022 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.15 vs. limit=10.0 +2024-07-28 23:51:37,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=214592.0, ans=0.125 +2024-07-28 23:51:44,475 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:51:47,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=214618.66666666666, ans=0.0 +2024-07-28 23:52:20,221 INFO [train.py:1114] (0/4) Epoch 16, batch 7650, loss[loss=0.159, simple_loss=0.2385, pruned_loss=0.03975, over 4935.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2702, pruned_loss=0.04494, over 937176.29 frames. ], batch size: 12, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:52:30,298 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.652e+01 6.093e+01 6.907e+01 1.144e+02, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 23:52:41,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=214685.33333333334, ans=0.125 +2024-07-28 23:52:43,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=214685.33333333334, ans=0.125 +2024-07-28 23:52:43,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=214685.33333333334, ans=0.025 +2024-07-28 23:52:43,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=214685.33333333334, ans=0.05 +2024-07-28 23:52:49,157 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:52:53,664 INFO [train.py:1114] (0/4) Epoch 16, batch 7700, loss[loss=0.1852, simple_loss=0.2804, pruned_loss=0.04501, over 4700.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2704, pruned_loss=0.04489, over 934136.11 frames. ], batch size: 13, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:53:00,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=214725.33333333334, ans=0.125 +2024-07-28 23:53:05,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=214725.33333333334, ans=0.2 +2024-07-28 23:53:09,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=214738.66666666666, ans=0.125 +2024-07-28 23:53:20,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=214765.33333333334, ans=0.125 +2024-07-28 23:53:22,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=214765.33333333334, ans=0.0 +2024-07-28 23:53:25,965 INFO [train.py:1114] (0/4) Epoch 16, batch 7750, loss[loss=0.1716, simple_loss=0.2511, pruned_loss=0.04606, over 4926.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2712, pruned_loss=0.04511, over 935093.56 frames. ], batch size: 14, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:53:26,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=214778.66666666666, ans=0.125 +2024-07-28 23:53:46,034 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:53:46,521 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.451e+01 5.576e+01 5.953e+01 6.432e+01 8.446e+01, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 23:53:47,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214792.0, ans=0.1 +2024-07-28 23:53:49,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=214805.33333333334, ans=0.0 +2024-07-28 23:53:53,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214805.33333333334, ans=0.1 +2024-07-28 23:53:54,039 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.24 vs. limit=15.0 +2024-07-28 23:53:55,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=214805.33333333334, ans=0.2 +2024-07-28 23:53:59,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214818.66666666666, ans=0.1 +2024-07-28 23:54:06,290 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.62 vs. limit=15.0 +2024-07-28 23:54:07,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=214832.0, ans=0.0 +2024-07-28 23:54:09,346 INFO [train.py:1114] (0/4) Epoch 16, batch 7800, loss[loss=0.1573, simple_loss=0.2562, pruned_loss=0.02919, over 4662.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2697, pruned_loss=0.04413, over 936984.26 frames. ], batch size: 14, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:54:10,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=214845.33333333334, ans=0.025 +2024-07-28 23:54:16,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=214858.66666666666, ans=0.125 +2024-07-28 23:54:24,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214872.0, ans=0.1 +2024-07-28 23:54:42,823 INFO [train.py:1114] (0/4) Epoch 16, batch 7850, loss[loss=0.1753, simple_loss=0.2525, pruned_loss=0.04902, over 4522.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2691, pruned_loss=0.04372, over 935880.60 frames. ], batch size: 10, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:54:42,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=214912.0, ans=0.125 +2024-07-28 23:54:52,701 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.575e+01 6.196e+01 7.184e+01 1.116e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 23:55:04,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=214938.66666666666, ans=0.125 +2024-07-28 23:55:09,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=214938.66666666666, ans=0.2 +2024-07-28 23:55:24,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=214952.0, ans=0.0 +2024-07-28 23:55:28,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=214965.33333333334, ans=0.025 +2024-07-28 23:55:32,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=214965.33333333334, ans=0.2 +2024-07-28 23:55:40,815 INFO [train.py:1114] (0/4) Epoch 16, batch 7900, loss[loss=0.1584, simple_loss=0.2507, pruned_loss=0.03302, over 4871.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2713, pruned_loss=0.04455, over 933252.37 frames. ], batch size: 14, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:55:45,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=214978.66666666666, ans=0.125 +2024-07-28 23:55:51,217 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.22 vs. limit=15.0 +2024-07-28 23:56:01,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.whiten.whitening_limit, batch_count=215005.33333333334, ans=12.0 +2024-07-28 23:56:01,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=215005.33333333334, ans=0.125 +2024-07-28 23:56:10,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=215032.0, ans=0.2 +2024-07-28 23:56:13,169 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.72 vs. limit=10.0 +2024-07-28 23:56:19,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=215032.0, ans=0.0 +2024-07-28 23:56:21,108 INFO [train.py:1114] (0/4) Epoch 16, batch 7950, loss[loss=0.2058, simple_loss=0.2845, pruned_loss=0.06354, over 3319.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2701, pruned_loss=0.04426, over 935452.05 frames. ], batch size: 36, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:56:47,561 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:56:52,124 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.700e+01 5.562e+01 6.109e+01 6.836e+01 1.076e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 23:57:05,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215085.33333333334, ans=0.1 +2024-07-28 23:57:39,492 INFO [train.py:1114] (0/4) Epoch 16, batch 8000, loss[loss=0.1546, simple_loss=0.2392, pruned_loss=0.03504, over 4616.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2686, pruned_loss=0.04421, over 934963.56 frames. ], batch size: 11, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:57:44,974 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=215112.0, ans=0.125 +2024-07-28 23:57:45,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215125.33333333334, ans=0.1 +2024-07-28 23:57:52,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=215138.66666666666, ans=0.125 +2024-07-28 23:57:53,372 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=215138.66666666666, ans=0.0 +2024-07-28 23:58:11,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=215165.33333333334, ans=0.125 +2024-07-28 23:58:11,706 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:58:13,491 INFO [train.py:1114] (0/4) Epoch 16, batch 8050, loss[loss=0.1659, simple_loss=0.2647, pruned_loss=0.03356, over 4800.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2693, pruned_loss=0.04444, over 934725.53 frames. ], batch size: 14, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:58:20,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=215192.0, ans=0.04949747468305833 +2024-07-28 23:58:24,548 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.751e+01 5.579e+01 6.307e+01 7.164e+01 1.118e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 23:58:48,140 INFO [train.py:1114] (0/4) Epoch 16, batch 8100, loss[loss=0.2083, simple_loss=0.3046, pruned_loss=0.05606, over 4812.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2707, pruned_loss=0.04526, over 933974.47 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:58:55,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=215258.66666666666, ans=0.05 +2024-07-28 23:59:00,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215272.0, ans=0.1 +2024-07-28 23:59:01,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=215272.0, ans=0.1 +2024-07-28 23:59:09,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=215285.33333333334, ans=0.125 +2024-07-28 23:59:22,334 INFO [train.py:1114] (0/4) Epoch 16, batch 8150, loss[loss=0.1943, simple_loss=0.286, pruned_loss=0.0513, over 4808.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2698, pruned_loss=0.04469, over 937469.06 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:59:22,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=215312.0, ans=0.025 +2024-07-28 23:59:31,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=215325.33333333334, ans=0.0 +2024-07-28 23:59:32,146 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.628e+01 5.614e+01 6.330e+01 7.419e+01 1.009e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 23:59:34,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=215325.33333333334, ans=0.1 +2024-07-28 23:59:36,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=215338.66666666666, ans=0.125 +2024-07-28 23:59:50,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=215365.33333333334, ans=0.125 +2024-07-28 23:59:50,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=215365.33333333334, ans=0.2 +2024-07-28 23:59:54,895 INFO [train.py:1114] (0/4) Epoch 16, batch 8200, loss[loss=0.2102, simple_loss=0.3034, pruned_loss=0.05849, over 4808.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2699, pruned_loss=0.0444, over 938731.07 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:59:58,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=215378.66666666666, ans=0.0 +2024-07-29 00:00:00,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=215378.66666666666, ans=0.2 +2024-07-29 00:00:12,359 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.82 vs. limit=15.0 +2024-07-29 00:00:29,078 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.67 vs. limit=22.5 +2024-07-29 00:00:30,616 INFO [train.py:1114] (0/4) Epoch 16, batch 8250, loss[loss=0.16, simple_loss=0.2463, pruned_loss=0.03682, over 4887.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2691, pruned_loss=0.04401, over 938802.98 frames. ], batch size: 13, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:00:33,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=215445.33333333334, ans=0.125 +2024-07-29 00:00:48,543 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.803e+01 5.664e+01 6.137e+01 6.796e+01 1.110e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 00:01:04,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=215485.33333333334, ans=0.125 +2024-07-29 00:01:06,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=215498.66666666666, ans=0.125 +2024-07-29 00:01:13,399 INFO [train.py:1114] (0/4) Epoch 16, batch 8300, loss[loss=0.1881, simple_loss=0.2795, pruned_loss=0.04833, over 4903.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2697, pruned_loss=0.04426, over 938770.04 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:01:17,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=215512.0, ans=0.1 +2024-07-29 00:02:38,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=215525.33333333334, ans=0.125 +2024-07-29 00:02:41,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215538.66666666666, ans=0.1 +2024-07-29 00:03:00,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=215552.0, ans=0.0 +2024-07-29 00:03:03,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=215552.0, ans=0.125 +2024-07-29 00:03:23,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=215565.33333333334, ans=0.0 +2024-07-29 00:03:23,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=215565.33333333334, ans=0.125 +2024-07-29 00:03:26,034 INFO [train.py:1114] (0/4) Epoch 16, batch 8350, loss[loss=0.2146, simple_loss=0.31, pruned_loss=0.05958, over 4787.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2696, pruned_loss=0.04401, over 941467.46 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:03:29,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215578.66666666666, ans=0.1 +2024-07-29 00:03:30,656 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.63 vs. limit=15.0 +2024-07-29 00:03:39,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=215592.0, ans=0.0 +2024-07-29 00:03:39,858 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.677e+01 6.151e+01 6.738e+01 9.364e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 00:03:43,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=215605.33333333334, ans=0.125 +2024-07-29 00:04:00,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=215632.0, ans=0.0 +2024-07-29 00:04:04,284 INFO [train.py:1114] (0/4) Epoch 16, batch 8400, loss[loss=0.1442, simple_loss=0.2243, pruned_loss=0.03205, over 4770.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2692, pruned_loss=0.04438, over 940287.29 frames. ], batch size: 12, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:04:27,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=215685.33333333334, ans=0.125 +2024-07-29 00:04:34,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-07-29 00:04:36,867 INFO [train.py:1114] (0/4) Epoch 16, batch 8450, loss[loss=0.2159, simple_loss=0.3023, pruned_loss=0.06475, over 4814.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2695, pruned_loss=0.04448, over 939467.72 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:04:46,414 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.807e+01 5.964e+01 6.863e+01 7.657e+01 1.232e+02, threshold=1.373e+02, percent-clipped=1.0 +2024-07-29 00:04:54,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.94 vs. limit=10.0 +2024-07-29 00:05:09,578 INFO [train.py:1114] (0/4) Epoch 16, batch 8500, loss[loss=0.1729, simple_loss=0.2575, pruned_loss=0.04417, over 4614.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2689, pruned_loss=0.04399, over 938842.00 frames. ], batch size: 11, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:05:09,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=215778.66666666666, ans=0.125 +2024-07-29 00:05:16,848 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:05:28,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=215818.66666666666, ans=0.125 +2024-07-29 00:05:41,906 INFO [train.py:1114] (0/4) Epoch 16, batch 8550, loss[loss=0.1637, simple_loss=0.2357, pruned_loss=0.04587, over 4798.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2678, pruned_loss=0.04365, over 939392.37 frames. ], batch size: 11, lr: 4.61e-03, grad_scale: 64.0 +2024-07-29 00:05:47,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215845.33333333334, ans=0.1 +2024-07-29 00:05:52,593 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.843e+01 6.495e+01 7.573e+01 1.241e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-29 00:05:55,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=215872.0, ans=0.0 +2024-07-29 00:05:57,551 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.29 vs. limit=10.0 +2024-07-29 00:06:01,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=215885.33333333334, ans=0.0 +2024-07-29 00:06:10,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=215898.66666666666, ans=0.125 +2024-07-29 00:06:16,141 INFO [train.py:1114] (0/4) Epoch 16, batch 8600, loss[loss=0.1751, simple_loss=0.2776, pruned_loss=0.03628, over 4810.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2674, pruned_loss=0.04363, over 939166.65 frames. ], batch size: 15, lr: 4.61e-03, grad_scale: 64.0 +2024-07-29 00:06:28,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=215912.0, ans=0.1 +2024-07-29 00:06:33,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.99 vs. limit=22.5 +2024-07-29 00:06:40,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=215938.66666666666, ans=0.05 +2024-07-29 00:06:41,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=215938.66666666666, ans=0.5 +2024-07-29 00:06:57,244 INFO [train.py:1114] (0/4) Epoch 16, batch 8650, loss[loss=0.2435, simple_loss=0.3191, pruned_loss=0.08396, over 4898.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.268, pruned_loss=0.04353, over 940752.23 frames. ], batch size: 15, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:07:02,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=215978.66666666666, ans=0.125 +2024-07-29 00:07:04,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215992.0, ans=0.1 +2024-07-29 00:07:05,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215992.0, ans=0.1 +2024-07-29 00:07:08,161 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.461e+01 5.650e+01 6.263e+01 7.133e+01 1.178e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 00:07:09,367 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=7.09 vs. limit=12.0 +2024-07-29 00:07:16,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=216005.33333333334, ans=0.125 +2024-07-29 00:07:16,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=216005.33333333334, ans=0.125 +2024-07-29 00:07:18,980 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.13 vs. limit=15.0 +2024-07-29 00:07:19,012 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=29.75 vs. limit=15.0 +2024-07-29 00:07:22,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.59 vs. limit=15.0 +2024-07-29 00:07:29,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=216032.0, ans=0.0 +2024-07-29 00:07:30,205 INFO [train.py:1114] (0/4) Epoch 16, batch 8700, loss[loss=0.1792, simple_loss=0.2521, pruned_loss=0.05319, over 4759.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2688, pruned_loss=0.04356, over 938203.09 frames. ], batch size: 13, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:07:37,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=216058.66666666666, ans=0.5 +2024-07-29 00:07:37,765 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.96 vs. limit=6.0 +2024-07-29 00:07:48,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=216072.0, ans=0.125 +2024-07-29 00:08:01,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=216098.66666666666, ans=0.125 +2024-07-29 00:08:06,549 INFO [train.py:1114] (0/4) Epoch 16, batch 8750, loss[loss=0.1969, simple_loss=0.2946, pruned_loss=0.04963, over 4692.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2692, pruned_loss=0.04366, over 936461.49 frames. ], batch size: 15, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:08:16,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=216125.33333333334, ans=0.0 +2024-07-29 00:08:16,797 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.699e+01 5.631e+01 6.456e+01 7.086e+01 1.065e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-29 00:08:29,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=216152.0, ans=0.025 +2024-07-29 00:08:30,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=216152.0, ans=0.2 +2024-07-29 00:08:41,413 INFO [train.py:1114] (0/4) Epoch 16, batch 8800, loss[loss=0.181, simple_loss=0.2741, pruned_loss=0.04395, over 4929.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2703, pruned_loss=0.04444, over 937545.95 frames. ], batch size: 14, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:08:42,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216178.66666666666, ans=0.1 +2024-07-29 00:08:45,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=216178.66666666666, ans=0.0 +2024-07-29 00:08:55,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216205.33333333334, ans=0.1 +2024-07-29 00:08:57,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=216205.33333333334, ans=0.2 +2024-07-29 00:08:59,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=216205.33333333334, ans=0.0 +2024-07-29 00:09:14,340 INFO [train.py:1114] (0/4) Epoch 16, batch 8850, loss[loss=0.1938, simple_loss=0.2886, pruned_loss=0.04951, over 4461.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2701, pruned_loss=0.04449, over 932043.55 frames. ], batch size: 21, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:09:23,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=216258.66666666666, ans=0.125 +2024-07-29 00:09:25,915 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.646e+01 5.534e+01 6.492e+01 7.361e+01 1.003e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-29 00:09:29,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=216272.0, ans=0.07 +2024-07-29 00:09:35,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=216285.33333333334, ans=0.125 +2024-07-29 00:09:39,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=216285.33333333334, ans=0.125 +2024-07-29 00:09:40,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=216298.66666666666, ans=0.125 +2024-07-29 00:09:47,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216312.0, ans=0.1 +2024-07-29 00:09:48,114 INFO [train.py:1114] (0/4) Epoch 16, batch 8900, loss[loss=0.147, simple_loss=0.2327, pruned_loss=0.0306, over 4937.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.27, pruned_loss=0.04448, over 930096.21 frames. ], batch size: 12, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:09:53,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=216312.0, ans=0.125 +2024-07-29 00:09:54,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216325.33333333334, ans=0.1 +2024-07-29 00:09:57,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=216325.33333333334, ans=0.1 +2024-07-29 00:10:13,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=216352.0, ans=0.1 +2024-07-29 00:10:17,596 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.15 vs. limit=6.0 +2024-07-29 00:10:18,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=216365.33333333334, ans=0.2 +2024-07-29 00:10:21,258 INFO [train.py:1114] (0/4) Epoch 16, batch 8950, loss[loss=0.1735, simple_loss=0.2567, pruned_loss=0.04511, over 4515.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2686, pruned_loss=0.04412, over 931209.26 frames. ], batch size: 21, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:10:21,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=216378.66666666666, ans=0.2 +2024-07-29 00:10:31,364 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.323e+01 5.469e+01 6.023e+01 7.554e+01 1.113e+02, threshold=1.205e+02, percent-clipped=0.0 +2024-07-29 00:10:35,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216405.33333333334, ans=0.1 +2024-07-29 00:10:43,977 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.17 vs. limit=15.0 +2024-07-29 00:10:53,233 INFO [train.py:1114] (0/4) Epoch 16, batch 9000, loss[loss=0.165, simple_loss=0.2537, pruned_loss=0.03818, over 4645.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2667, pruned_loss=0.0435, over 934088.35 frames. ], batch size: 12, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:10:53,234 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 00:11:06,433 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.3361, 4.8211, 5.0607, 4.9751], device='cuda:0') +2024-07-29 00:11:10,926 INFO [train.py:1146] (0/4) Epoch 16, validation: loss=0.1631, simple_loss=0.2656, pruned_loss=0.03028, over 944034.00 frames. +2024-07-29 00:11:10,927 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 00:11:12,496 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-07-29 00:11:16,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=216458.66666666666, ans=0.0 +2024-07-29 00:11:34,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff3.min_abs, batch_count=216485.33333333334, ans=0.2 +2024-07-29 00:11:36,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=216485.33333333334, ans=0.025 +2024-07-29 00:11:43,614 INFO [train.py:1114] (0/4) Epoch 16, batch 9050, loss[loss=0.1538, simple_loss=0.2379, pruned_loss=0.0349, over 4516.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2676, pruned_loss=0.04392, over 934614.20 frames. ], batch size: 10, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:11:52,422 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.41 vs. limit=12.0 +2024-07-29 00:11:54,117 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.634e+01 5.894e+01 6.647e+01 7.904e+01 1.086e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-29 00:11:55,507 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:12:17,318 INFO [train.py:1114] (0/4) Epoch 16, batch 9100, loss[loss=0.1588, simple_loss=0.2503, pruned_loss=0.03363, over 4940.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2672, pruned_loss=0.04371, over 937082.42 frames. ], batch size: 14, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:12:20,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=216578.66666666666, ans=0.125 +2024-07-29 00:12:36,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=216605.33333333334, ans=0.0 +2024-07-29 00:12:51,514 INFO [train.py:1114] (0/4) Epoch 16, batch 9150, loss[loss=0.1604, simple_loss=0.2581, pruned_loss=0.03135, over 4813.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.268, pruned_loss=0.04375, over 935996.43 frames. ], batch size: 14, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:12:56,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216645.33333333334, ans=0.1 +2024-07-29 00:12:59,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=216658.66666666666, ans=0.0 +2024-07-29 00:13:01,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216658.66666666666, ans=0.1 +2024-07-29 00:13:01,748 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.263e+01 5.764e+01 6.440e+01 7.377e+01 1.090e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 00:13:01,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=216658.66666666666, ans=0.0 +2024-07-29 00:13:20,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=216698.66666666666, ans=0.0 +2024-07-29 00:13:23,185 INFO [train.py:1114] (0/4) Epoch 16, batch 9200, loss[loss=0.1477, simple_loss=0.2365, pruned_loss=0.02942, over 4850.00 frames. ], tot_loss[loss=0.177, simple_loss=0.267, pruned_loss=0.04351, over 937851.08 frames. ], batch size: 12, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:13:26,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=216712.0, ans=0.125 +2024-07-29 00:13:31,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=216725.33333333334, ans=0.2 +2024-07-29 00:13:43,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=216738.66666666666, ans=0.125 +2024-07-29 00:13:46,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=216738.66666666666, ans=0.125 +2024-07-29 00:13:46,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=216738.66666666666, ans=0.0 +2024-07-29 00:13:52,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=216752.0, ans=0.0 +2024-07-29 00:13:59,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=216765.33333333334, ans=0.0 +2024-07-29 00:14:01,569 INFO [train.py:1114] (0/4) Epoch 16, batch 9250, loss[loss=0.1522, simple_loss=0.2479, pruned_loss=0.02828, over 4637.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2678, pruned_loss=0.0435, over 938414.97 frames. ], batch size: 13, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:14:04,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=216778.66666666666, ans=0.0 +2024-07-29 00:14:08,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=216792.0, ans=0.125 +2024-07-29 00:14:11,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=216792.0, ans=0.2 +2024-07-29 00:14:11,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=216792.0, ans=0.2 +2024-07-29 00:14:11,592 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.669e+01 5.549e+01 6.033e+01 6.747e+01 9.644e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-29 00:14:17,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=216805.33333333334, ans=0.125 +2024-07-29 00:14:18,418 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.69 vs. limit=22.5 +2024-07-29 00:14:32,954 INFO [train.py:1114] (0/4) Epoch 16, batch 9300, loss[loss=0.1745, simple_loss=0.2691, pruned_loss=0.03995, over 4772.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2663, pruned_loss=0.04303, over 938064.16 frames. ], batch size: 12, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:14:38,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=216858.66666666666, ans=0.0 +2024-07-29 00:14:47,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=216872.0, ans=0.125 +2024-07-29 00:14:47,713 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.29 vs. limit=22.5 +2024-07-29 00:14:57,885 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.95 vs. limit=15.0 +2024-07-29 00:14:58,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=216898.66666666666, ans=0.125 +2024-07-29 00:15:03,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=216912.0, ans=0.125 +2024-07-29 00:15:04,425 INFO [train.py:1114] (0/4) Epoch 16, batch 9350, loss[loss=0.178, simple_loss=0.2656, pruned_loss=0.04526, over 4789.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2672, pruned_loss=0.04382, over 934436.72 frames. ], batch size: 11, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:15:09,217 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.43 vs. limit=6.0 +2024-07-29 00:15:14,953 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.736e+01 6.318e+01 7.656e+01 1.489e+02, threshold=1.264e+02, percent-clipped=1.0 +2024-07-29 00:15:23,082 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=216952.0, ans=0.2 +2024-07-29 00:15:29,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=216965.33333333334, ans=0.125 +2024-07-29 00:15:35,922 INFO [train.py:1114] (0/4) Epoch 16, batch 9400, loss[loss=0.1703, simple_loss=0.2662, pruned_loss=0.03722, over 4688.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2669, pruned_loss=0.04385, over 932978.32 frames. ], batch size: 13, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:15:47,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=217005.33333333334, ans=0.0 +2024-07-29 00:15:58,163 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.98 vs. limit=10.0 +2024-07-29 00:16:02,639 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.87 vs. limit=15.0 +2024-07-29 00:16:03,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=217032.0, ans=0.125 +2024-07-29 00:16:05,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=217032.0, ans=0.125 +2024-07-29 00:16:06,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=217032.0, ans=0.125 +2024-07-29 00:16:08,031 INFO [train.py:1114] (0/4) Epoch 16, batch 9450, loss[loss=0.157, simple_loss=0.245, pruned_loss=0.03447, over 4798.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2676, pruned_loss=0.04383, over 932252.64 frames. ], batch size: 11, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:16:25,644 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.291e+01 5.921e+01 6.735e+01 1.029e+02, threshold=1.184e+02, percent-clipped=0.0 +2024-07-29 00:16:29,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=217072.0, ans=0.125 +2024-07-29 00:16:29,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=217072.0, ans=0.0 +2024-07-29 00:16:32,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=217072.0, ans=0.125 +2024-07-29 00:16:34,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=217085.33333333334, ans=0.125 +2024-07-29 00:16:42,797 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.93 vs. limit=15.0 +2024-07-29 00:16:46,844 INFO [train.py:1114] (0/4) Epoch 16, batch 9500, loss[loss=0.1525, simple_loss=0.2458, pruned_loss=0.02962, over 4705.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2672, pruned_loss=0.04337, over 934914.67 frames. ], batch size: 12, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:16:50,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=217112.0, ans=0.1 +2024-07-29 00:16:52,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217125.33333333334, ans=0.1 +2024-07-29 00:16:56,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=217125.33333333334, ans=0.0 +2024-07-29 00:17:18,870 INFO [train.py:1114] (0/4) Epoch 16, batch 9550, loss[loss=0.1804, simple_loss=0.2745, pruned_loss=0.04309, over 4771.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2684, pruned_loss=0.04406, over 931844.69 frames. ], batch size: 12, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:17:22,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=217178.66666666666, ans=0.125 +2024-07-29 00:17:22,973 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.47 vs. limit=15.0 +2024-07-29 00:17:25,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=217192.0, ans=0.125 +2024-07-29 00:17:28,564 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.794e+01 5.662e+01 6.269e+01 6.816e+01 8.303e+01, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 00:17:33,099 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:17:45,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=217232.0, ans=0.125 +2024-07-29 00:17:47,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=217232.0, ans=0.025 +2024-07-29 00:17:50,242 INFO [train.py:1114] (0/4) Epoch 16, batch 9600, loss[loss=0.2141, simple_loss=0.296, pruned_loss=0.06606, over 3186.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2693, pruned_loss=0.04453, over 930696.05 frames. ], batch size: 35, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:17:50,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=217245.33333333334, ans=0.025 +2024-07-29 00:17:57,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=217258.66666666666, ans=6.0 +2024-07-29 00:18:09,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=217285.33333333334, ans=0.025 +2024-07-29 00:18:14,745 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.53 vs. limit=15.0 +2024-07-29 00:18:18,675 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.02 vs. limit=22.5 +2024-07-29 00:18:19,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=217298.66666666666, ans=0.125 +2024-07-29 00:18:22,113 INFO [train.py:1114] (0/4) Epoch 16, batch 9650, loss[loss=0.2031, simple_loss=0.2949, pruned_loss=0.05567, over 4843.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2697, pruned_loss=0.04484, over 927360.11 frames. ], batch size: 16, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:18:25,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=217312.0, ans=0.0 +2024-07-29 00:18:32,299 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.780e+01 5.687e+01 6.553e+01 7.550e+01 1.146e+02, threshold=1.311e+02, percent-clipped=0.0 +2024-07-29 00:18:46,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=217352.0, ans=0.05 +2024-07-29 00:18:53,907 INFO [train.py:1114] (0/4) Epoch 16, batch 9700, loss[loss=0.198, simple_loss=0.296, pruned_loss=0.04999, over 4164.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2694, pruned_loss=0.04477, over 925752.16 frames. ], batch size: 25, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:18:59,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=217378.66666666666, ans=0.2 +2024-07-29 00:19:00,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=217392.0, ans=0.0 +2024-07-29 00:19:06,740 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.28 vs. limit=22.5 +2024-07-29 00:19:10,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=217405.33333333334, ans=0.0 +2024-07-29 00:19:11,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=217405.33333333334, ans=0.125 +2024-07-29 00:19:13,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=217418.66666666666, ans=0.125 +2024-07-29 00:19:14,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=217418.66666666666, ans=0.1 +2024-07-29 00:19:16,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=217418.66666666666, ans=0.025 +2024-07-29 00:19:25,462 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.77 vs. limit=12.0 +2024-07-29 00:19:25,694 INFO [train.py:1114] (0/4) Epoch 16, batch 9750, loss[loss=0.1853, simple_loss=0.2762, pruned_loss=0.04726, over 4693.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2695, pruned_loss=0.04458, over 926494.01 frames. ], batch size: 15, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:19:30,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=217445.33333333334, ans=0.0 +2024-07-29 00:19:36,078 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.471e+01 5.634e+01 6.378e+01 7.099e+01 1.078e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 00:19:36,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=217458.66666666666, ans=0.125 +2024-07-29 00:19:37,714 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.86 vs. limit=22.5 +2024-07-29 00:19:38,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=217472.0, ans=0.2 +2024-07-29 00:19:48,930 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.78 vs. limit=15.0 +2024-07-29 00:19:57,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=217512.0, ans=0.07 +2024-07-29 00:19:57,516 INFO [train.py:1114] (0/4) Epoch 16, batch 9800, loss[loss=0.1643, simple_loss=0.2569, pruned_loss=0.0359, over 4697.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2693, pruned_loss=0.04441, over 925722.71 frames. ], batch size: 12, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:20:09,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=217512.0, ans=0.0 +2024-07-29 00:20:19,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=217538.66666666666, ans=0.125 +2024-07-29 00:20:21,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217538.66666666666, ans=0.1 +2024-07-29 00:20:25,179 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.16 vs. limit=22.5 +2024-07-29 00:20:27,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=217552.0, ans=0.2 +2024-07-29 00:20:35,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217565.33333333334, ans=0.1 +2024-07-29 00:20:36,549 INFO [train.py:1114] (0/4) Epoch 16, batch 9850, loss[loss=0.2143, simple_loss=0.3066, pruned_loss=0.061, over 4899.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2704, pruned_loss=0.04489, over 928163.63 frames. ], batch size: 15, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:20:37,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=217578.66666666666, ans=0.125 +2024-07-29 00:20:47,494 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.87 vs. limit=15.0 +2024-07-29 00:20:48,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=217592.0, ans=0.1 +2024-07-29 00:20:50,318 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.242e+01 5.769e+01 6.344e+01 7.479e+01 1.066e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-29 00:21:03,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217618.66666666666, ans=0.1 +2024-07-29 00:21:11,731 INFO [train.py:1114] (0/4) Epoch 16, batch 9900, loss[loss=0.2004, simple_loss=0.2901, pruned_loss=0.05536, over 4852.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2708, pruned_loss=0.04494, over 927169.72 frames. ], batch size: 16, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:21:13,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=217645.33333333334, ans=0.09899494936611666 +2024-07-29 00:21:17,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=217645.33333333334, ans=0.0 +2024-07-29 00:21:17,199 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.08 vs. limit=12.0 +2024-07-29 00:21:21,046 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.10 vs. limit=10.0 +2024-07-29 00:21:22,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=217658.66666666666, ans=0.125 +2024-07-29 00:21:42,964 INFO [train.py:1114] (0/4) Epoch 16, batch 9950, loss[loss=0.153, simple_loss=0.2399, pruned_loss=0.03307, over 4790.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2697, pruned_loss=0.04494, over 929732.32 frames. ], batch size: 11, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:21:56,431 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.282e+01 5.930e+01 6.462e+01 7.578e+01 1.307e+02, threshold=1.292e+02, percent-clipped=1.0 +2024-07-29 00:21:57,462 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.03 vs. limit=15.0 +2024-07-29 00:21:58,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=217738.66666666666, ans=0.0 +2024-07-29 00:22:00,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=217738.66666666666, ans=0.125 +2024-07-29 00:22:07,447 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.22 vs. limit=6.0 +2024-07-29 00:22:10,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=217752.0, ans=0.125 +2024-07-29 00:22:17,208 INFO [train.py:1114] (0/4) Epoch 16, batch 10000, loss[loss=0.2116, simple_loss=0.3086, pruned_loss=0.05725, over 4641.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2724, pruned_loss=0.04598, over 926956.44 frames. ], batch size: 16, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:22:19,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=217778.66666666666, ans=0.0 +2024-07-29 00:22:24,245 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.11 vs. limit=22.5 +2024-07-29 00:22:33,814 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.24 vs. limit=22.5 +2024-07-29 00:22:41,971 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=217832.0, ans=0.0 +2024-07-29 00:22:43,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=217832.0, ans=0.035 +2024-07-29 00:22:45,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=217832.0, ans=0.2 +2024-07-29 00:22:45,059 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=217832.0, ans=0.2 +2024-07-29 00:22:49,158 INFO [train.py:1114] (0/4) Epoch 16, batch 10050, loss[loss=0.1967, simple_loss=0.2772, pruned_loss=0.05805, over 3550.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2746, pruned_loss=0.04687, over 916433.19 frames. ], batch size: 35, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:22:52,477 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.64 vs. limit=6.0 +2024-07-29 00:23:01,093 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.821e+01 5.872e+01 6.658e+01 7.418e+01 1.272e+02, threshold=1.332e+02, percent-clipped=0.0 +2024-07-29 00:23:03,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=217872.0, ans=0.125 +2024-07-29 00:23:06,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217872.0, ans=0.1 +2024-07-29 00:23:18,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=217898.66666666666, ans=0.0 +2024-07-29 00:23:23,187 INFO [train.py:1114] (0/4) Epoch 16, batch 10100, loss[loss=0.2041, simple_loss=0.2766, pruned_loss=0.06574, over 3232.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2789, pruned_loss=0.05157, over 863920.62 frames. ], batch size: 35, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:23:27,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=217912.0, ans=0.125 +2024-07-29 00:23:30,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=217925.33333333334, ans=0.125 +2024-07-29 00:23:31,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=217925.33333333334, ans=0.2 +2024-07-29 00:23:31,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=217925.33333333334, ans=0.025 +2024-07-29 00:23:40,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=217938.66666666666, ans=0.09899494936611666 +2024-07-29 00:23:41,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=217938.66666666666, ans=0.05 +2024-07-29 00:23:41,410 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.28 vs. limit=22.5 +2024-07-29 00:23:44,732 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.83 vs. limit=22.5 +2024-07-29 00:23:50,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=217965.33333333334, ans=0.125 +2024-07-29 00:23:55,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=217978.66666666666, ans=0.0 +2024-07-29 00:23:56,126 INFO [train.py:1114] (0/4) Epoch 16, batch 10150, loss[loss=0.2051, simple_loss=0.2853, pruned_loss=0.06242, over 3234.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2831, pruned_loss=0.05532, over 823221.52 frames. ], batch size: 35, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:23:56,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=217978.66666666666, ans=0.125 +2024-07-29 00:23:57,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217978.66666666666, ans=0.1 +2024-07-29 00:24:06,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=217992.0, ans=0.125 +2024-07-29 00:24:06,955 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.947e+01 6.782e+01 7.198e+01 7.904e+01 2.355e+02, threshold=1.440e+02, percent-clipped=1.0 +2024-07-29 00:24:07,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=217992.0, ans=0.2 +2024-07-29 00:24:09,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=218005.33333333334, ans=0.0 +2024-07-29 00:24:20,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=218032.0, ans=0.0 +2024-07-29 00:24:23,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=218032.0, ans=0.125 +2024-07-29 00:24:27,772 INFO [train.py:1114] (0/4) Epoch 16, batch 10200, loss[loss=0.2653, simple_loss=0.3403, pruned_loss=0.09508, over 3303.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2857, pruned_loss=0.05834, over 791224.57 frames. ], batch size: 35, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:24:32,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218045.33333333334, ans=0.1 +2024-07-29 00:24:33,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=218058.66666666666, ans=0.0 +2024-07-29 00:24:34,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=218058.66666666666, ans=0.025 +2024-07-29 00:24:41,160 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-16.pt +2024-07-29 00:25:24,937 INFO [train.py:1114] (0/4) Epoch 17, batch 0, loss[loss=0.1602, simple_loss=0.2532, pruned_loss=0.03357, over 4853.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2532, pruned_loss=0.03357, over 4853.00 frames. ], batch size: 12, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:25:24,938 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 00:25:30,332 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.3852, 3.1769, 2.6880, 2.5390], device='cuda:0') +2024-07-29 00:25:36,964 INFO [train.py:1146] (0/4) Epoch 17, validation: loss=0.1632, simple_loss=0.2676, pruned_loss=0.0294, over 944034.00 frames. +2024-07-29 00:25:36,965 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 00:25:43,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=218088.0, ans=0.125 +2024-07-29 00:25:45,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=218088.0, ans=0.0 +2024-07-29 00:25:50,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=218101.33333333334, ans=0.0 +2024-07-29 00:25:50,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=218101.33333333334, ans=0.0 +2024-07-29 00:26:15,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=218114.66666666666, ans=0.04949747468305833 +2024-07-29 00:26:23,069 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:26:23,653 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.877e+01 5.974e+01 6.557e+01 7.210e+01 8.434e+01, threshold=1.311e+02, percent-clipped=0.0 +2024-07-29 00:26:27,183 INFO [train.py:1114] (0/4) Epoch 17, batch 50, loss[loss=0.1359, simple_loss=0.2262, pruned_loss=0.02279, over 4604.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2674, pruned_loss=0.04358, over 205793.76 frames. ], batch size: 11, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:26:40,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=218168.0, ans=0.0 +2024-07-29 00:26:43,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=218168.0, ans=0.05 +2024-07-29 00:26:44,412 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.52 vs. limit=22.5 +2024-07-29 00:26:49,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=218181.33333333334, ans=0.04949747468305833 +2024-07-29 00:26:54,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=218194.66666666666, ans=0.0 +2024-07-29 00:26:54,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218194.66666666666, ans=0.1 +2024-07-29 00:27:00,759 INFO [train.py:1114] (0/4) Epoch 17, batch 100, loss[loss=0.2011, simple_loss=0.2948, pruned_loss=0.05366, over 4637.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2692, pruned_loss=0.04389, over 364988.05 frames. ], batch size: 12, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:27:06,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=218208.0, ans=0.125 +2024-07-29 00:27:06,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=218221.33333333334, ans=0.1 +2024-07-29 00:27:06,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=218221.33333333334, ans=0.025 +2024-07-29 00:27:11,637 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.63 vs. limit=15.0 +2024-07-29 00:27:20,990 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.10 vs. limit=12.0 +2024-07-29 00:27:25,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=218248.0, ans=0.125 +2024-07-29 00:27:26,959 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.99 vs. limit=22.5 +2024-07-29 00:27:27,533 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.12 vs. limit=6.0 +2024-07-29 00:27:30,365 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.772e+01 6.593e+01 7.419e+01 9.701e+01, threshold=1.319e+02, percent-clipped=0.0 +2024-07-29 00:27:31,341 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.86 vs. limit=15.0 +2024-07-29 00:27:33,592 INFO [train.py:1114] (0/4) Epoch 17, batch 150, loss[loss=0.178, simple_loss=0.2666, pruned_loss=0.04465, over 4604.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2682, pruned_loss=0.0433, over 493836.24 frames. ], batch size: 11, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:27:55,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=218314.66666666666, ans=0.025 +2024-07-29 00:27:55,972 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.95 vs. limit=15.0 +2024-07-29 00:27:59,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=218328.0, ans=0.125 +2024-07-29 00:28:02,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=218328.0, ans=0.025 +2024-07-29 00:28:06,811 INFO [train.py:1114] (0/4) Epoch 17, batch 200, loss[loss=0.1742, simple_loss=0.2733, pruned_loss=0.03755, over 4556.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2675, pruned_loss=0.04369, over 593327.58 frames. ], batch size: 21, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:28:06,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=218341.33333333334, ans=0.125 +2024-07-29 00:28:10,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218341.33333333334, ans=0.1 +2024-07-29 00:28:12,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218354.66666666666, ans=0.1 +2024-07-29 00:28:13,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=218354.66666666666, ans=0.125 +2024-07-29 00:28:18,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=218354.66666666666, ans=0.1 +2024-07-29 00:28:26,153 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.21 vs. limit=15.0 +2024-07-29 00:28:27,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=218381.33333333334, ans=0.035 +2024-07-29 00:28:36,805 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.366e+01 5.659e+01 6.456e+01 7.215e+01 1.150e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-29 00:28:37,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=218394.66666666666, ans=0.2 +2024-07-29 00:28:39,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=218408.0, ans=0.125 +2024-07-29 00:28:40,264 INFO [train.py:1114] (0/4) Epoch 17, batch 250, loss[loss=0.19, simple_loss=0.2842, pruned_loss=0.0479, over 4654.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2678, pruned_loss=0.04404, over 670159.19 frames. ], batch size: 16, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:28:53,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=218434.66666666666, ans=0.1 +2024-07-29 00:29:02,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218448.0, ans=0.1 +2024-07-29 00:29:03,164 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:29:14,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=218474.66666666666, ans=0.5 +2024-07-29 00:29:14,904 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.52 vs. limit=15.0 +2024-07-29 00:29:15,287 INFO [train.py:1114] (0/4) Epoch 17, batch 300, loss[loss=0.1866, simple_loss=0.2838, pruned_loss=0.04474, over 4800.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2672, pruned_loss=0.04398, over 729804.82 frames. ], batch size: 15, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:29:25,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218488.0, ans=0.1 +2024-07-29 00:29:25,920 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:29:33,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=218501.33333333334, ans=0.1 +2024-07-29 00:29:39,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=218514.66666666666, ans=0.0 +2024-07-29 00:29:40,793 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.32 vs. limit=12.0 +2024-07-29 00:29:47,010 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+01 5.418e+01 5.933e+01 6.484e+01 8.977e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-29 00:29:50,324 INFO [train.py:1114] (0/4) Epoch 17, batch 350, loss[loss=0.1735, simple_loss=0.2621, pruned_loss=0.04242, over 4928.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2676, pruned_loss=0.04352, over 775892.29 frames. ], batch size: 12, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:29:50,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=218541.33333333334, ans=0.0 +2024-07-29 00:29:51,391 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.96 vs. limit=15.0 +2024-07-29 00:29:55,041 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-07-29 00:29:56,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=218541.33333333334, ans=0.0 +2024-07-29 00:30:00,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=218554.66666666666, ans=0.0 +2024-07-29 00:30:10,829 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.68 vs. limit=12.0 +2024-07-29 00:30:19,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=218594.66666666666, ans=0.95 +2024-07-29 00:30:23,657 INFO [train.py:1114] (0/4) Epoch 17, batch 400, loss[loss=0.1607, simple_loss=0.2555, pruned_loss=0.03294, over 4688.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2671, pruned_loss=0.0432, over 813536.14 frames. ], batch size: 13, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:30:25,164 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.72 vs. limit=15.0 +2024-07-29 00:30:27,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=218608.0, ans=0.125 +2024-07-29 00:30:41,931 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=218634.66666666666, ans=0.95 +2024-07-29 00:30:46,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=218648.0, ans=0.125 +2024-07-29 00:30:49,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=218648.0, ans=0.0 +2024-07-29 00:30:55,225 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-164000.pt +2024-07-29 00:30:58,111 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.93 vs. limit=15.0 +2024-07-29 00:30:58,245 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.369e+01 5.560e+01 5.995e+01 6.560e+01 9.746e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-29 00:30:58,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=218661.33333333334, ans=0.0 +2024-07-29 00:31:01,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=218674.66666666666, ans=0.0 +2024-07-29 00:31:01,603 INFO [train.py:1114] (0/4) Epoch 17, batch 450, loss[loss=0.1584, simple_loss=0.2604, pruned_loss=0.02823, over 4632.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2684, pruned_loss=0.04394, over 838468.13 frames. ], batch size: 13, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:31:21,209 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.16 vs. limit=22.5 +2024-07-29 00:31:26,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218714.66666666666, ans=0.1 +2024-07-29 00:31:34,906 INFO [train.py:1114] (0/4) Epoch 17, batch 500, loss[loss=0.1959, simple_loss=0.2933, pruned_loss=0.04927, over 4687.00 frames. ], tot_loss[loss=0.178, simple_loss=0.268, pruned_loss=0.04397, over 861331.97 frames. ], batch size: 15, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:31:36,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=218741.33333333334, ans=0.125 +2024-07-29 00:31:38,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=218741.33333333334, ans=0.125 +2024-07-29 00:31:49,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=218768.0, ans=0.125 +2024-07-29 00:31:57,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=218768.0, ans=0.125 +2024-07-29 00:32:03,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=218781.33333333334, ans=0.125 +2024-07-29 00:32:09,025 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.474e+01 5.408e+01 6.097e+01 6.893e+01 9.871e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 00:32:12,383 INFO [train.py:1114] (0/4) Epoch 17, batch 550, loss[loss=0.2003, simple_loss=0.2987, pruned_loss=0.05101, over 4649.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2677, pruned_loss=0.04332, over 877081.43 frames. ], batch size: 17, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:32:15,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=218808.0, ans=0.09899494936611666 +2024-07-29 00:32:17,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=218808.0, ans=0.2 +2024-07-29 00:32:31,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=218834.66666666666, ans=0.0 +2024-07-29 00:32:46,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=218861.33333333334, ans=0.125 +2024-07-29 00:32:47,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=218861.33333333334, ans=0.0 +2024-07-29 00:32:50,161 INFO [train.py:1114] (0/4) Epoch 17, batch 600, loss[loss=0.1829, simple_loss=0.2758, pruned_loss=0.04495, over 4636.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2673, pruned_loss=0.04328, over 891467.63 frames. ], batch size: 16, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:33:02,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=218874.66666666666, ans=0.0 +2024-07-29 00:33:03,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=218888.0, ans=0.0 +2024-07-29 00:33:12,885 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:33:18,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218914.66666666666, ans=0.1 +2024-07-29 00:33:22,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=218914.66666666666, ans=0.125 +2024-07-29 00:33:25,672 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=10.05 vs. limit=15.0 +2024-07-29 00:33:26,493 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:33:27,709 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.556e+01 5.574e+01 6.190e+01 7.231e+01 1.147e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 00:33:28,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=218928.0, ans=0.125 +2024-07-29 00:33:31,217 INFO [train.py:1114] (0/4) Epoch 17, batch 650, loss[loss=0.21, simple_loss=0.3031, pruned_loss=0.05842, over 4759.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2671, pruned_loss=0.04338, over 903298.78 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:33:40,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=218954.66666666666, ans=0.125 +2024-07-29 00:33:48,644 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.20 vs. limit=12.0 +2024-07-29 00:33:50,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=218981.33333333334, ans=0.125 +2024-07-29 00:33:53,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=218981.33333333334, ans=0.0 +2024-07-29 00:34:05,219 INFO [train.py:1114] (0/4) Epoch 17, batch 700, loss[loss=0.1737, simple_loss=0.2657, pruned_loss=0.04088, over 4645.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2674, pruned_loss=0.04309, over 911639.49 frames. ], batch size: 12, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:34:06,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=219008.0, ans=0.125 +2024-07-29 00:34:18,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=219034.66666666666, ans=0.0 +2024-07-29 00:34:21,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=219034.66666666666, ans=0.0 +2024-07-29 00:34:30,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=219048.0, ans=0.0 +2024-07-29 00:34:34,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219048.0, ans=0.1 +2024-07-29 00:34:41,483 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.534e+01 5.477e+01 6.099e+01 6.897e+01 1.014e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 00:34:41,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219061.33333333334, ans=0.1 +2024-07-29 00:34:44,988 INFO [train.py:1114] (0/4) Epoch 17, batch 750, loss[loss=0.1961, simple_loss=0.2986, pruned_loss=0.04674, over 4697.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2665, pruned_loss=0.04293, over 917934.98 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:34:45,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=219074.66666666666, ans=0.0 +2024-07-29 00:34:48,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219074.66666666666, ans=0.1 +2024-07-29 00:35:11,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=219114.66666666666, ans=0.5 +2024-07-29 00:35:13,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=219114.66666666666, ans=0.95 +2024-07-29 00:35:21,528 INFO [train.py:1114] (0/4) Epoch 17, batch 800, loss[loss=0.147, simple_loss=0.2424, pruned_loss=0.0258, over 4845.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.266, pruned_loss=0.0427, over 923020.44 frames. ], batch size: 12, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:35:28,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=219154.66666666666, ans=0.125 +2024-07-29 00:35:30,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=219154.66666666666, ans=0.125 +2024-07-29 00:35:52,219 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.327e+01 5.596e+01 6.013e+01 6.802e+01 9.397e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-29 00:35:52,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=219194.66666666666, ans=0.0 +2024-07-29 00:35:55,669 INFO [train.py:1114] (0/4) Epoch 17, batch 850, loss[loss=0.1759, simple_loss=0.2749, pruned_loss=0.03851, over 4661.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2652, pruned_loss=0.04228, over 927284.48 frames. ], batch size: 14, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:35:58,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=219208.0, ans=0.025 +2024-07-29 00:36:03,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=219221.33333333334, ans=0.125 +2024-07-29 00:36:12,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=219234.66666666666, ans=0.025 +2024-07-29 00:36:14,388 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.82 vs. limit=15.0 +2024-07-29 00:36:19,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=219248.0, ans=0.125 +2024-07-29 00:36:22,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=219261.33333333334, ans=0.2 +2024-07-29 00:36:25,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=219261.33333333334, ans=0.0 +2024-07-29 00:36:31,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=219274.66666666666, ans=0.0 +2024-07-29 00:36:31,445 INFO [train.py:1114] (0/4) Epoch 17, batch 900, loss[loss=0.1573, simple_loss=0.268, pruned_loss=0.02327, over 4855.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2662, pruned_loss=0.0426, over 927939.94 frames. ], batch size: 12, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:36:43,439 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:36:46,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=219301.33333333334, ans=0.0 +2024-07-29 00:36:52,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=219314.66666666666, ans=0.0 +2024-07-29 00:37:01,139 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.414e+01 5.608e+01 6.144e+01 6.799e+01 1.059e+02, threshold=1.229e+02, percent-clipped=0.0 +2024-07-29 00:37:04,602 INFO [train.py:1114] (0/4) Epoch 17, batch 950, loss[loss=0.1431, simple_loss=0.2335, pruned_loss=0.02638, over 4776.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2662, pruned_loss=0.04223, over 929478.09 frames. ], batch size: 12, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:37:16,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=219354.66666666666, ans=0.125 +2024-07-29 00:37:18,035 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.52 vs. limit=10.0 +2024-07-29 00:37:19,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219368.0, ans=0.1 +2024-07-29 00:37:21,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219368.0, ans=0.1 +2024-07-29 00:37:22,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=219368.0, ans=0.125 +2024-07-29 00:37:30,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219381.33333333334, ans=0.1 +2024-07-29 00:37:34,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219394.66666666666, ans=0.1 +2024-07-29 00:37:36,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=219394.66666666666, ans=0.125 +2024-07-29 00:37:36,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=219394.66666666666, ans=0.125 +2024-07-29 00:37:37,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=219394.66666666666, ans=0.0 +2024-07-29 00:37:41,958 INFO [train.py:1114] (0/4) Epoch 17, batch 1000, loss[loss=0.1473, simple_loss=0.2433, pruned_loss=0.0257, over 4965.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2669, pruned_loss=0.0431, over 929172.38 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:37:50,834 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.13 vs. limit=12.0 +2024-07-29 00:37:52,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219421.33333333334, ans=0.1 +2024-07-29 00:37:54,707 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.59 vs. limit=10.0 +2024-07-29 00:37:55,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=219434.66666666666, ans=0.125 +2024-07-29 00:38:05,489 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.70 vs. limit=6.0 +2024-07-29 00:38:13,341 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.561e+01 5.640e+01 5.981e+01 6.813e+01 9.582e+01, threshold=1.196e+02, percent-clipped=0.0 +2024-07-29 00:38:13,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=219461.33333333334, ans=0.125 +2024-07-29 00:38:16,878 INFO [train.py:1114] (0/4) Epoch 17, batch 1050, loss[loss=0.1743, simple_loss=0.268, pruned_loss=0.04031, over 4869.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2671, pruned_loss=0.04329, over 931889.99 frames. ], batch size: 14, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:38:25,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.26 vs. limit=15.0 +2024-07-29 00:38:30,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=219488.0, ans=0.2 +2024-07-29 00:38:38,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=219501.33333333334, ans=0.125 +2024-07-29 00:38:51,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219528.0, ans=0.1 +2024-07-29 00:38:52,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=219528.0, ans=0.0 +2024-07-29 00:38:53,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=219528.0, ans=0.0 +2024-07-29 00:38:57,330 INFO [train.py:1114] (0/4) Epoch 17, batch 1100, loss[loss=0.1562, simple_loss=0.2408, pruned_loss=0.03583, over 4884.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2676, pruned_loss=0.04362, over 934607.73 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:39:01,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=219541.33333333334, ans=0.0 +2024-07-29 00:39:04,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=219554.66666666666, ans=0.125 +2024-07-29 00:39:13,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219568.0, ans=0.1 +2024-07-29 00:39:18,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=219581.33333333334, ans=0.125 +2024-07-29 00:39:27,540 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.574e+01 5.915e+01 6.747e+01 1.337e+02, threshold=1.183e+02, percent-clipped=1.0 +2024-07-29 00:39:30,903 INFO [train.py:1114] (0/4) Epoch 17, batch 1150, loss[loss=0.1496, simple_loss=0.2336, pruned_loss=0.0328, over 4901.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.267, pruned_loss=0.04362, over 933963.85 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:39:37,114 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=219621.33333333334, ans=0.125 +2024-07-29 00:39:47,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=219634.66666666666, ans=0.1 +2024-07-29 00:39:52,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=219648.0, ans=0.025 +2024-07-29 00:40:04,853 INFO [train.py:1114] (0/4) Epoch 17, batch 1200, loss[loss=0.153, simple_loss=0.2551, pruned_loss=0.02539, over 4868.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2684, pruned_loss=0.04413, over 932861.55 frames. ], batch size: 14, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:40:04,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=219674.66666666666, ans=0.2 +2024-07-29 00:40:37,457 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.683e+01 5.543e+01 6.182e+01 6.957e+01 1.085e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-29 00:40:37,895 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.06 vs. limit=10.0 +2024-07-29 00:40:41,019 INFO [train.py:1114] (0/4) Epoch 17, batch 1250, loss[loss=0.1745, simple_loss=0.2637, pruned_loss=0.04266, over 4802.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2678, pruned_loss=0.04351, over 937144.27 frames. ], batch size: 15, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:40:46,889 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.14 vs. limit=15.0 +2024-07-29 00:41:15,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=219794.66666666666, ans=0.0 +2024-07-29 00:41:17,214 INFO [train.py:1114] (0/4) Epoch 17, batch 1300, loss[loss=0.1797, simple_loss=0.2711, pruned_loss=0.04418, over 4766.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2663, pruned_loss=0.04265, over 938898.47 frames. ], batch size: 19, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:41:19,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=219808.0, ans=0.0 +2024-07-29 00:41:23,487 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.99 vs. limit=15.0 +2024-07-29 00:41:33,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.16 vs. limit=15.0 +2024-07-29 00:41:39,820 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.61 vs. limit=22.5 +2024-07-29 00:41:40,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=219834.66666666666, ans=0.2 +2024-07-29 00:41:43,116 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.52 vs. limit=12.0 +2024-07-29 00:41:51,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=219861.33333333334, ans=0.0 +2024-07-29 00:41:52,954 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.663e+01 5.796e+01 6.477e+01 7.611e+01 1.197e+02, threshold=1.295e+02, percent-clipped=0.0 +2024-07-29 00:41:56,416 INFO [train.py:1114] (0/4) Epoch 17, batch 1350, loss[loss=0.1804, simple_loss=0.2685, pruned_loss=0.0461, over 4761.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2657, pruned_loss=0.04216, over 940938.05 frames. ], batch size: 13, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:42:01,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=219874.66666666666, ans=0.2 +2024-07-29 00:42:26,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=219928.0, ans=0.125 +2024-07-29 00:42:30,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=219928.0, ans=0.125 +2024-07-29 00:42:33,564 INFO [train.py:1114] (0/4) Epoch 17, batch 1400, loss[loss=0.1354, simple_loss=0.2208, pruned_loss=0.02497, over 4709.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.266, pruned_loss=0.04216, over 942957.04 frames. ], batch size: 11, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:42:51,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=219968.0, ans=0.2 +2024-07-29 00:42:56,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=219981.33333333334, ans=0.0 +2024-07-29 00:43:01,568 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.03 vs. limit=22.5 +2024-07-29 00:43:06,598 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.372e+01 5.575e+01 5.917e+01 6.621e+01 1.311e+02, threshold=1.183e+02, percent-clipped=1.0 +2024-07-29 00:43:08,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=219994.66666666666, ans=0.0 +2024-07-29 00:43:08,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=219994.66666666666, ans=0.1 +2024-07-29 00:43:10,162 INFO [train.py:1114] (0/4) Epoch 17, batch 1450, loss[loss=0.2016, simple_loss=0.3008, pruned_loss=0.05122, over 4664.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2667, pruned_loss=0.04243, over 942911.65 frames. ], batch size: 15, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:43:10,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=220008.0, ans=0.2 +2024-07-29 00:43:19,560 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=220021.33333333334, ans=0.1 +2024-07-29 00:43:30,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=220034.66666666666, ans=0.1 +2024-07-29 00:43:32,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=220034.66666666666, ans=0.0 +2024-07-29 00:43:36,345 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.57 vs. limit=15.0 +2024-07-29 00:43:43,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=220061.33333333334, ans=0.0 +2024-07-29 00:43:43,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=220061.33333333334, ans=22.5 +2024-07-29 00:43:48,449 INFO [train.py:1114] (0/4) Epoch 17, batch 1500, loss[loss=0.1904, simple_loss=0.2897, pruned_loss=0.04549, over 4805.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.267, pruned_loss=0.04241, over 942702.02 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:43:52,344 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.91 vs. limit=15.0 +2024-07-29 00:44:00,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=220088.0, ans=0.125 +2024-07-29 00:44:11,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=220114.66666666666, ans=0.125 +2024-07-29 00:44:18,916 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.888e+01 5.771e+01 6.251e+01 6.983e+01 1.071e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-29 00:44:21,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=220128.0, ans=0.0 +2024-07-29 00:44:21,941 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.01 vs. limit=10.0 +2024-07-29 00:44:22,218 INFO [train.py:1114] (0/4) Epoch 17, batch 1550, loss[loss=0.2063, simple_loss=0.2788, pruned_loss=0.06689, over 4893.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2677, pruned_loss=0.04304, over 938753.60 frames. ], batch size: 15, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:44:28,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=220154.66666666666, ans=0.125 +2024-07-29 00:44:28,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220154.66666666666, ans=0.1 +2024-07-29 00:44:30,018 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.98 vs. limit=12.0 +2024-07-29 00:44:30,125 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.53 vs. limit=15.0 +2024-07-29 00:44:35,941 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.66 vs. limit=15.0 +2024-07-29 00:44:37,810 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=220168.0, ans=0.125 +2024-07-29 00:44:37,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220168.0, ans=0.1 +2024-07-29 00:44:41,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=220168.0, ans=0.2 +2024-07-29 00:44:51,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=220194.66666666666, ans=0.125 +2024-07-29 00:44:52,904 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.71 vs. limit=12.0 +2024-07-29 00:44:55,764 INFO [train.py:1114] (0/4) Epoch 17, batch 1600, loss[loss=0.1682, simple_loss=0.2683, pruned_loss=0.03409, over 4864.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2667, pruned_loss=0.04268, over 937532.50 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:44:58,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=220208.0, ans=0.125 +2024-07-29 00:45:02,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=220221.33333333334, ans=0.125 +2024-07-29 00:45:04,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=220221.33333333334, ans=0.0 +2024-07-29 00:45:09,057 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.06 vs. limit=15.0 +2024-07-29 00:45:26,619 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.909e+01 5.495e+01 6.270e+01 6.960e+01 9.456e+01, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 00:45:26,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=220261.33333333334, ans=0.2 +2024-07-29 00:45:27,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=220261.33333333334, ans=0.0 +2024-07-29 00:45:30,200 INFO [train.py:1114] (0/4) Epoch 17, batch 1650, loss[loss=0.188, simple_loss=0.2917, pruned_loss=0.04216, over 4661.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2671, pruned_loss=0.04304, over 937288.89 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:45:38,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=220288.0, ans=0.0 +2024-07-29 00:45:42,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=220288.0, ans=0.125 +2024-07-29 00:45:50,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=220314.66666666666, ans=0.0 +2024-07-29 00:45:57,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220328.0, ans=0.1 +2024-07-29 00:46:04,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=220341.33333333334, ans=0.125 +2024-07-29 00:46:04,560 INFO [train.py:1114] (0/4) Epoch 17, batch 1700, loss[loss=0.1574, simple_loss=0.2329, pruned_loss=0.04092, over 4707.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2663, pruned_loss=0.04251, over 938968.84 frames. ], batch size: 11, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:46:19,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=220368.0, ans=0.025 +2024-07-29 00:46:26,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=220381.33333333334, ans=0.125 +2024-07-29 00:46:28,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=220381.33333333334, ans=0.0 +2024-07-29 00:46:28,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=220381.33333333334, ans=0.125 +2024-07-29 00:46:32,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=220381.33333333334, ans=0.2 +2024-07-29 00:46:36,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=220394.66666666666, ans=0.125 +2024-07-29 00:46:36,628 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.642e+01 5.850e+01 6.496e+01 7.744e+01 1.150e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-29 00:46:40,180 INFO [train.py:1114] (0/4) Epoch 17, batch 1750, loss[loss=0.1583, simple_loss=0.2408, pruned_loss=0.03787, over 4799.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2664, pruned_loss=0.04294, over 940300.41 frames. ], batch size: 11, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:46:43,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=220408.0, ans=0.125 +2024-07-29 00:46:49,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=220421.33333333334, ans=0.0 +2024-07-29 00:46:52,348 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=220421.33333333334, ans=0.025 +2024-07-29 00:46:59,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=220448.0, ans=0.0 +2024-07-29 00:47:03,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=220448.0, ans=0.125 +2024-07-29 00:47:04,566 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.57 vs. limit=15.0 +2024-07-29 00:47:07,135 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.77 vs. limit=6.0 +2024-07-29 00:47:10,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220461.33333333334, ans=0.1 +2024-07-29 00:47:10,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=220461.33333333334, ans=0.0 +2024-07-29 00:47:13,448 INFO [train.py:1114] (0/4) Epoch 17, batch 1800, loss[loss=0.2116, simple_loss=0.305, pruned_loss=0.05914, over 4632.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2668, pruned_loss=0.04317, over 941008.68 frames. ], batch size: 13, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:47:22,071 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.50 vs. limit=22.5 +2024-07-29 00:47:25,814 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.63 vs. limit=12.0 +2024-07-29 00:47:28,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=220501.33333333334, ans=0.2 +2024-07-29 00:47:40,530 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:47:45,634 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.755e+01 5.725e+01 6.271e+01 7.257e+01 1.188e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 00:47:48,114 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.12 vs. limit=15.0 +2024-07-29 00:47:49,060 INFO [train.py:1114] (0/4) Epoch 17, batch 1850, loss[loss=0.1837, simple_loss=0.2873, pruned_loss=0.04001, over 4809.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2672, pruned_loss=0.04347, over 940974.75 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:47:52,135 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.24 vs. limit=15.0 +2024-07-29 00:47:55,529 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.70 vs. limit=15.0 +2024-07-29 00:47:59,065 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.18 vs. limit=22.5 +2024-07-29 00:48:00,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=220554.66666666666, ans=0.125 +2024-07-29 00:48:06,165 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:48:10,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=220581.33333333334, ans=0.2 +2024-07-29 00:48:10,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=220581.33333333334, ans=0.0 +2024-07-29 00:48:11,884 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.27 vs. limit=22.5 +2024-07-29 00:48:13,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=220581.33333333334, ans=0.125 +2024-07-29 00:48:14,645 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.57 vs. limit=15.0 +2024-07-29 00:48:17,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=220594.66666666666, ans=0.125 +2024-07-29 00:48:17,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=220594.66666666666, ans=0.025 +2024-07-29 00:48:23,157 INFO [train.py:1114] (0/4) Epoch 17, batch 1900, loss[loss=0.1718, simple_loss=0.27, pruned_loss=0.03676, over 4664.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2667, pruned_loss=0.04295, over 942059.92 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:48:29,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=220621.33333333334, ans=0.0 +2024-07-29 00:48:35,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=220621.33333333334, ans=0.5 +2024-07-29 00:48:35,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=220634.66666666666, ans=0.125 +2024-07-29 00:48:37,573 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.76 vs. limit=15.0 +2024-07-29 00:48:37,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=220634.66666666666, ans=0.125 +2024-07-29 00:48:55,007 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.475e+01 5.883e+01 6.427e+01 8.062e+01 1.126e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-29 00:49:13,497 INFO [train.py:1114] (0/4) Epoch 17, batch 1950, loss[loss=0.1513, simple_loss=0.2487, pruned_loss=0.02691, over 4906.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2678, pruned_loss=0.04296, over 943997.77 frames. ], batch size: 13, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:49:16,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=220674.66666666666, ans=0.125 +2024-07-29 00:49:21,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=220688.0, ans=0.125 +2024-07-29 00:49:47,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=220688.0, ans=0.125 +2024-07-29 00:49:52,447 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.12 vs. limit=15.0 +2024-07-29 00:50:03,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=220714.66666666666, ans=0.025 +2024-07-29 00:50:06,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=220728.0, ans=0.2 +2024-07-29 00:50:06,433 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.94 vs. limit=15.0 +2024-07-29 00:50:27,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=220728.0, ans=0.0 +2024-07-29 00:50:28,631 INFO [train.py:1114] (0/4) Epoch 17, batch 2000, loss[loss=0.1501, simple_loss=0.2318, pruned_loss=0.03422, over 4797.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2684, pruned_loss=0.04316, over 941135.56 frames. ], batch size: 11, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:50:55,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=220741.33333333334, ans=0.1 +2024-07-29 00:51:07,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220754.66666666666, ans=0.1 +2024-07-29 00:51:13,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=220768.0, ans=0.125 +2024-07-29 00:51:15,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220768.0, ans=0.1 +2024-07-29 00:51:27,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=220781.33333333334, ans=0.0 +2024-07-29 00:51:28,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=220794.66666666666, ans=0.0 +2024-07-29 00:51:32,750 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.500e+01 5.436e+01 5.997e+01 6.741e+01 1.066e+02, threshold=1.199e+02, percent-clipped=0.0 +2024-07-29 00:51:34,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=220794.66666666666, ans=0.125 +2024-07-29 00:51:36,127 INFO [train.py:1114] (0/4) Epoch 17, batch 2050, loss[loss=0.134, simple_loss=0.22, pruned_loss=0.02399, over 4604.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2668, pruned_loss=0.04294, over 939260.01 frames. ], batch size: 11, lr: 4.42e-03, grad_scale: 64.0 +2024-07-29 00:51:45,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=220821.33333333334, ans=0.125 +2024-07-29 00:51:47,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=220821.33333333334, ans=0.125 +2024-07-29 00:51:48,987 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.06 vs. limit=15.0 +2024-07-29 00:51:57,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=220848.0, ans=0.0 +2024-07-29 00:51:58,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=220848.0, ans=0.125 +2024-07-29 00:52:07,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=220861.33333333334, ans=0.0 +2024-07-29 00:52:08,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=220861.33333333334, ans=0.025 +2024-07-29 00:52:17,013 INFO [train.py:1114] (0/4) Epoch 17, batch 2100, loss[loss=0.1842, simple_loss=0.2679, pruned_loss=0.05028, over 4767.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2659, pruned_loss=0.04267, over 941007.52 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:52:37,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=220914.66666666666, ans=0.2 +2024-07-29 00:52:40,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=220914.66666666666, ans=0.125 +2024-07-29 00:52:44,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=220928.0, ans=0.125 +2024-07-29 00:52:48,038 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.684e+01 5.567e+01 6.209e+01 7.288e+01 1.074e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-29 00:52:50,925 INFO [train.py:1114] (0/4) Epoch 17, batch 2150, loss[loss=0.1677, simple_loss=0.259, pruned_loss=0.03815, over 4889.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2656, pruned_loss=0.04284, over 944109.67 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:53:00,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=220954.66666666666, ans=0.1 +2024-07-29 00:53:05,108 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.76 vs. limit=15.0 +2024-07-29 00:53:18,616 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.47 vs. limit=15.0 +2024-07-29 00:53:21,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220994.66666666666, ans=0.1 +2024-07-29 00:53:26,710 INFO [train.py:1114] (0/4) Epoch 17, batch 2200, loss[loss=0.1867, simple_loss=0.2796, pruned_loss=0.0469, over 4822.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.266, pruned_loss=0.04329, over 943750.65 frames. ], batch size: 14, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:53:29,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=221008.0, ans=0.125 +2024-07-29 00:53:34,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=221021.33333333334, ans=0.07 +2024-07-29 00:53:37,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=221021.33333333334, ans=0.2 +2024-07-29 00:53:37,848 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-29 00:53:44,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=221034.66666666666, ans=0.125 +2024-07-29 00:53:56,881 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.74 vs. limit=15.0 +2024-07-29 00:53:57,776 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.391e+01 5.667e+01 6.562e+01 7.774e+01 1.023e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-29 00:53:59,599 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.44 vs. limit=12.0 +2024-07-29 00:54:00,465 INFO [train.py:1114] (0/4) Epoch 17, batch 2250, loss[loss=0.1798, simple_loss=0.274, pruned_loss=0.04278, over 4703.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2657, pruned_loss=0.04305, over 942287.62 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:54:03,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221074.66666666666, ans=0.1 +2024-07-29 00:54:25,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=221114.66666666666, ans=0.2 +2024-07-29 00:54:26,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=221128.0, ans=0.025 +2024-07-29 00:54:26,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff2.min_abs, batch_count=221128.0, ans=0.1 +2024-07-29 00:54:27,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221128.0, ans=0.1 +2024-07-29 00:54:29,269 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.73 vs. limit=15.0 +2024-07-29 00:54:33,733 INFO [train.py:1114] (0/4) Epoch 17, batch 2300, loss[loss=0.1208, simple_loss=0.2037, pruned_loss=0.01898, over 4938.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2645, pruned_loss=0.043, over 939841.44 frames. ], batch size: 12, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:54:35,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=221141.33333333334, ans=0.125 +2024-07-29 00:54:42,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=221154.66666666666, ans=0.2 +2024-07-29 00:55:06,914 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.22 vs. limit=15.0 +2024-07-29 00:55:06,979 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+01 5.506e+01 6.021e+01 6.838e+01 1.144e+02, threshold=1.204e+02, percent-clipped=0.0 +2024-07-29 00:55:09,656 INFO [train.py:1114] (0/4) Epoch 17, batch 2350, loss[loss=0.1671, simple_loss=0.2516, pruned_loss=0.04131, over 4641.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2649, pruned_loss=0.04264, over 941798.66 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:55:13,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=221208.0, ans=0.2 +2024-07-29 00:55:18,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=221221.33333333334, ans=0.07 +2024-07-29 00:55:18,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=221221.33333333334, ans=0.05 +2024-07-29 00:55:33,153 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.91 vs. limit=10.0 +2024-07-29 00:55:43,236 INFO [train.py:1114] (0/4) Epoch 17, batch 2400, loss[loss=0.2055, simple_loss=0.2825, pruned_loss=0.06419, over 4634.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2652, pruned_loss=0.04256, over 941372.73 frames. ], batch size: 12, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:56:02,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=221301.33333333334, ans=0.025 +2024-07-29 00:56:04,489 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.09 vs. limit=15.0 +2024-07-29 00:56:17,998 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.747e+01 5.694e+01 6.302e+01 6.928e+01 9.959e+01, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 00:56:18,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=221328.0, ans=0.1 +2024-07-29 00:56:20,727 INFO [train.py:1114] (0/4) Epoch 17, batch 2450, loss[loss=0.1815, simple_loss=0.2694, pruned_loss=0.04678, over 4684.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2665, pruned_loss=0.04325, over 937385.16 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:56:22,817 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=221341.33333333334, ans=0.125 +2024-07-29 00:56:22,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=221341.33333333334, ans=0.0 +2024-07-29 00:56:24,568 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.74 vs. limit=6.0 +2024-07-29 00:56:28,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221354.66666666666, ans=0.1 +2024-07-29 00:56:34,565 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.29 vs. limit=15.0 +2024-07-29 00:56:35,734 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=221368.0, ans=0.0 +2024-07-29 00:56:52,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=221394.66666666666, ans=0.1 +2024-07-29 00:56:54,123 INFO [train.py:1114] (0/4) Epoch 17, batch 2500, loss[loss=0.1894, simple_loss=0.2826, pruned_loss=0.04811, over 4811.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2663, pruned_loss=0.04333, over 939651.16 frames. ], batch size: 14, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:57:02,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=221421.33333333334, ans=0.125 +2024-07-29 00:57:07,894 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.95 vs. limit=15.0 +2024-07-29 00:57:08,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=221434.66666666666, ans=0.125 +2024-07-29 00:57:09,238 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.18 vs. limit=15.0 +2024-07-29 00:57:15,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=221448.0, ans=0.0 +2024-07-29 00:57:18,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=221448.0, ans=0.1 +2024-07-29 00:57:23,089 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=221461.33333333334, ans=0.125 +2024-07-29 00:57:24,996 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.527e+01 5.455e+01 6.019e+01 6.790e+01 9.676e+01, threshold=1.204e+02, percent-clipped=0.0 +2024-07-29 00:57:27,774 INFO [train.py:1114] (0/4) Epoch 17, batch 2550, loss[loss=0.1442, simple_loss=0.2261, pruned_loss=0.03114, over 4810.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2661, pruned_loss=0.04303, over 938980.21 frames. ], batch size: 11, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:57:30,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=221474.66666666666, ans=0.125 +2024-07-29 00:57:32,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221474.66666666666, ans=0.1 +2024-07-29 00:57:33,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=221474.66666666666, ans=0.0 +2024-07-29 00:57:49,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221501.33333333334, ans=0.1 +2024-07-29 00:57:50,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=221501.33333333334, ans=0.125 +2024-07-29 00:58:09,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=221514.66666666666, ans=0.09899494936611666 +2024-07-29 00:58:24,485 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=221528.0, ans=0.2 +2024-07-29 00:58:25,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=221528.0, ans=0.2 +2024-07-29 00:58:30,274 INFO [train.py:1114] (0/4) Epoch 17, batch 2600, loss[loss=0.1705, simple_loss=0.2703, pruned_loss=0.03533, over 4892.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.267, pruned_loss=0.04313, over 937958.37 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:58:30,756 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.84 vs. limit=22.5 +2024-07-29 00:58:33,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=221541.33333333334, ans=0.2 +2024-07-29 00:58:47,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=221554.66666666666, ans=0.125 +2024-07-29 00:58:49,293 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:58:49,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=221554.66666666666, ans=0.0 +2024-07-29 00:59:01,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=221581.33333333334, ans=0.0 +2024-07-29 00:59:04,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=221594.66666666666, ans=0.0 +2024-07-29 00:59:05,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221594.66666666666, ans=0.1 +2024-07-29 00:59:08,387 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.679e+01 5.744e+01 6.230e+01 7.123e+01 1.037e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-29 00:59:09,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=221594.66666666666, ans=0.025 +2024-07-29 00:59:11,710 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=221594.66666666666, ans=0.025 +2024-07-29 00:59:21,106 INFO [train.py:1114] (0/4) Epoch 17, batch 2650, loss[loss=0.1795, simple_loss=0.2688, pruned_loss=0.04503, over 4621.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2669, pruned_loss=0.04307, over 939845.88 frames. ], batch size: 16, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:59:24,057 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.50 vs. limit=10.0 +2024-07-29 00:59:25,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=221608.0, ans=0.07 +2024-07-29 00:59:30,192 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.19 vs. limit=15.0 +2024-07-29 00:59:32,925 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.88 vs. limit=15.0 +2024-07-29 00:59:54,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=221674.66666666666, ans=0.125 +2024-07-29 00:59:54,781 INFO [train.py:1114] (0/4) Epoch 17, batch 2700, loss[loss=0.2126, simple_loss=0.3012, pruned_loss=0.06202, over 4739.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2676, pruned_loss=0.04364, over 939789.07 frames. ], batch size: 14, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 01:00:04,587 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:00:14,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=221714.66666666666, ans=0.125 +2024-07-29 01:00:30,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=221714.66666666666, ans=0.125 +2024-07-29 01:00:32,058 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.56 vs. limit=15.0 +2024-07-29 01:00:36,709 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.02 vs. limit=6.0 +2024-07-29 01:00:37,056 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.539e+01 6.361e+01 7.423e+01 1.026e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-29 01:00:37,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=221728.0, ans=0.025 +2024-07-29 01:00:41,155 INFO [train.py:1114] (0/4) Epoch 17, batch 2750, loss[loss=0.1661, simple_loss=0.2551, pruned_loss=0.03859, over 4712.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2662, pruned_loss=0.04323, over 939847.09 frames. ], batch size: 12, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 01:00:47,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=221754.66666666666, ans=0.07 +2024-07-29 01:00:47,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=221754.66666666666, ans=0.125 +2024-07-29 01:00:50,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=221754.66666666666, ans=0.125 +2024-07-29 01:01:01,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=221768.0, ans=0.125 +2024-07-29 01:01:14,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=221794.66666666666, ans=0.125 +2024-07-29 01:01:21,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=221808.0, ans=0.0 +2024-07-29 01:01:22,009 INFO [train.py:1114] (0/4) Epoch 17, batch 2800, loss[loss=0.2722, simple_loss=0.3223, pruned_loss=0.111, over 3276.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2658, pruned_loss=0.04296, over 937723.29 frames. ], batch size: 35, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:01:23,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=221808.0, ans=0.125 +2024-07-29 01:01:35,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=221834.66666666666, ans=0.025 +2024-07-29 01:01:53,597 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.485e+01 5.698e+01 6.511e+01 7.478e+01 1.084e+02, threshold=1.302e+02, percent-clipped=0.0 +2024-07-29 01:01:55,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=221874.66666666666, ans=0.0 +2024-07-29 01:01:56,359 INFO [train.py:1114] (0/4) Epoch 17, batch 2850, loss[loss=0.1894, simple_loss=0.279, pruned_loss=0.04989, over 4965.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2666, pruned_loss=0.043, over 936178.68 frames. ], batch size: 13, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:01:56,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=221874.66666666666, ans=0.1 +2024-07-29 01:02:11,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=221888.0, ans=0.0 +2024-07-29 01:02:19,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=221901.33333333334, ans=0.0 +2024-07-29 01:02:20,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=221901.33333333334, ans=0.2 +2024-07-29 01:02:22,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=221914.66666666666, ans=0.125 +2024-07-29 01:02:26,979 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.25 vs. limit=15.0 +2024-07-29 01:02:33,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=221928.0, ans=0.0 +2024-07-29 01:02:35,088 INFO [train.py:1114] (0/4) Epoch 17, batch 2900, loss[loss=0.1545, simple_loss=0.2423, pruned_loss=0.0333, over 4835.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2678, pruned_loss=0.04302, over 939996.02 frames. ], batch size: 13, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:02:37,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=221941.33333333334, ans=0.0 +2024-07-29 01:02:37,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=221941.33333333334, ans=0.0 +2024-07-29 01:02:51,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=221968.0, ans=0.0 +2024-07-29 01:03:04,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=221994.66666666666, ans=0.0 +2024-07-29 01:03:07,960 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.340e+01 5.542e+01 6.312e+01 7.539e+01 1.199e+02, threshold=1.262e+02, percent-clipped=0.0 +2024-07-29 01:03:10,800 INFO [train.py:1114] (0/4) Epoch 17, batch 2950, loss[loss=0.1513, simple_loss=0.2519, pruned_loss=0.02532, over 4710.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2667, pruned_loss=0.04258, over 939128.00 frames. ], batch size: 12, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:03:12,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=222008.0, ans=0.0 +2024-07-29 01:03:22,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=222021.33333333334, ans=0.125 +2024-07-29 01:03:32,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=222048.0, ans=0.0 +2024-07-29 01:03:40,441 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=222061.33333333334, ans=15.0 +2024-07-29 01:03:44,764 INFO [train.py:1114] (0/4) Epoch 17, batch 3000, loss[loss=0.1625, simple_loss=0.2588, pruned_loss=0.03313, over 4766.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2664, pruned_loss=0.04241, over 938673.29 frames. ], batch size: 13, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:03:44,765 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 01:04:05,746 INFO [train.py:1146] (0/4) Epoch 17, validation: loss=0.1635, simple_loss=0.2655, pruned_loss=0.03068, over 944034.00 frames. +2024-07-29 01:04:05,747 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 01:04:27,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=222114.66666666666, ans=0.05 +2024-07-29 01:04:30,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=222114.66666666666, ans=0.125 +2024-07-29 01:04:37,610 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.930e+01 5.724e+01 6.244e+01 7.233e+01 1.089e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 01:04:40,426 INFO [train.py:1114] (0/4) Epoch 17, batch 3050, loss[loss=0.1703, simple_loss=0.2605, pruned_loss=0.04001, over 4636.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2668, pruned_loss=0.04292, over 937195.75 frames. ], batch size: 12, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:04:44,861 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.76 vs. limit=15.0 +2024-07-29 01:04:53,010 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.01 vs. limit=6.0 +2024-07-29 01:04:55,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=222168.0, ans=0.0 +2024-07-29 01:04:56,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=222168.0, ans=0.125 +2024-07-29 01:05:15,897 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.97 vs. limit=6.0 +2024-07-29 01:05:16,152 INFO [train.py:1114] (0/4) Epoch 17, batch 3100, loss[loss=0.2008, simple_loss=0.2875, pruned_loss=0.05701, over 4643.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2662, pruned_loss=0.04287, over 937464.39 frames. ], batch size: 16, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:05:17,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=222208.0, ans=0.07 +2024-07-29 01:05:17,835 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.06 vs. limit=6.0 +2024-07-29 01:05:19,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=222208.0, ans=0.125 +2024-07-29 01:05:20,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=222208.0, ans=0.1 +2024-07-29 01:05:22,128 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.97 vs. limit=15.0 +2024-07-29 01:05:30,047 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.74 vs. limit=22.5 +2024-07-29 01:05:30,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=222221.33333333334, ans=0.04949747468305833 +2024-07-29 01:05:34,055 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.99 vs. limit=22.5 +2024-07-29 01:05:36,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=222234.66666666666, ans=0.125 +2024-07-29 01:05:40,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=222234.66666666666, ans=0.0 +2024-07-29 01:05:51,849 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.936e+01 5.691e+01 6.608e+01 7.636e+01 1.029e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-29 01:05:54,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=222274.66666666666, ans=0.125 +2024-07-29 01:05:54,555 INFO [train.py:1114] (0/4) Epoch 17, batch 3150, loss[loss=0.2131, simple_loss=0.2906, pruned_loss=0.06775, over 4631.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2669, pruned_loss=0.043, over 937852.28 frames. ], batch size: 17, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:06:05,212 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.90 vs. limit=15.0 +2024-07-29 01:06:14,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=222301.33333333334, ans=0.125 +2024-07-29 01:06:29,485 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.05 vs. limit=15.0 +2024-07-29 01:06:29,716 INFO [train.py:1114] (0/4) Epoch 17, batch 3200, loss[loss=0.1882, simple_loss=0.2837, pruned_loss=0.04632, over 4823.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2656, pruned_loss=0.04246, over 940217.52 frames. ], batch size: 13, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:06:56,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=222381.33333333334, ans=0.125 +2024-07-29 01:07:02,015 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.480e+01 5.614e+01 6.191e+01 6.817e+01 1.066e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 01:07:05,965 INFO [train.py:1114] (0/4) Epoch 17, batch 3250, loss[loss=0.165, simple_loss=0.2685, pruned_loss=0.03074, over 4928.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2675, pruned_loss=0.04304, over 941202.95 frames. ], batch size: 14, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:07:06,317 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.81 vs. limit=22.5 +2024-07-29 01:07:16,130 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.65 vs. limit=15.0 +2024-07-29 01:07:17,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=222421.33333333334, ans=0.0 +2024-07-29 01:07:21,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=222434.66666666666, ans=0.1 +2024-07-29 01:07:22,115 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.36 vs. limit=10.0 +2024-07-29 01:07:26,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=222448.0, ans=0.0 +2024-07-29 01:07:39,378 INFO [train.py:1114] (0/4) Epoch 17, batch 3300, loss[loss=0.2071, simple_loss=0.3119, pruned_loss=0.05114, over 4735.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2675, pruned_loss=0.04334, over 941458.29 frames. ], batch size: 19, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:07:44,213 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:07:49,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=222488.0, ans=0.0 +2024-07-29 01:07:52,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=222488.0, ans=0.0 +2024-07-29 01:07:57,995 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=222501.33333333334, ans=0.05 +2024-07-29 01:08:07,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=222528.0, ans=0.1 +2024-07-29 01:08:08,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=222528.0, ans=0.125 +2024-07-29 01:08:13,578 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+01 5.660e+01 6.307e+01 7.257e+01 1.096e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 01:08:13,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=222528.0, ans=0.1 +2024-07-29 01:08:16,305 INFO [train.py:1114] (0/4) Epoch 17, batch 3350, loss[loss=0.1579, simple_loss=0.2613, pruned_loss=0.02727, over 4627.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2675, pruned_loss=0.04314, over 939004.58 frames. ], batch size: 17, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:08:22,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.74 vs. limit=12.0 +2024-07-29 01:08:25,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=222554.66666666666, ans=0.125 +2024-07-29 01:08:36,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222581.33333333334, ans=0.1 +2024-07-29 01:08:38,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=222581.33333333334, ans=0.125 +2024-07-29 01:08:41,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=222581.33333333334, ans=0.125 +2024-07-29 01:08:51,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=222608.0, ans=0.125 +2024-07-29 01:08:51,958 INFO [train.py:1114] (0/4) Epoch 17, batch 3400, loss[loss=0.1474, simple_loss=0.239, pruned_loss=0.02784, over 4808.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2678, pruned_loss=0.04348, over 937300.20 frames. ], batch size: 11, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:09:01,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=222621.33333333334, ans=0.125 +2024-07-29 01:09:04,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=222621.33333333334, ans=0.0 +2024-07-29 01:09:09,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=222634.66666666666, ans=0.0 +2024-07-29 01:09:27,087 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.040e+01 6.022e+01 6.843e+01 8.395e+01 1.350e+02, threshold=1.369e+02, percent-clipped=1.0 +2024-07-29 01:09:28,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=222661.33333333334, ans=0.07 +2024-07-29 01:09:28,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=222661.33333333334, ans=0.1 +2024-07-29 01:09:29,779 INFO [train.py:1114] (0/4) Epoch 17, batch 3450, loss[loss=0.1498, simple_loss=0.2486, pruned_loss=0.02554, over 4736.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2671, pruned_loss=0.04324, over 937452.23 frames. ], batch size: 19, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:09:31,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=222674.66666666666, ans=0.125 +2024-07-29 01:09:34,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=222674.66666666666, ans=0.125 +2024-07-29 01:09:39,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=222688.0, ans=0.05 +2024-07-29 01:09:58,673 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.42 vs. limit=10.0 +2024-07-29 01:10:02,873 INFO [train.py:1114] (0/4) Epoch 17, batch 3500, loss[loss=0.1774, simple_loss=0.268, pruned_loss=0.04344, over 4946.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2672, pruned_loss=0.04313, over 937984.62 frames. ], batch size: 12, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:10:12,558 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=222754.66666666666, ans=0.125 +2024-07-29 01:10:34,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=222794.66666666666, ans=0.125 +2024-07-29 01:10:35,617 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.400e+01 5.391e+01 6.097e+01 6.632e+01 8.722e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 01:10:38,366 INFO [train.py:1114] (0/4) Epoch 17, batch 3550, loss[loss=0.1573, simple_loss=0.2559, pruned_loss=0.02937, over 4667.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2664, pruned_loss=0.04264, over 938447.97 frames. ], batch size: 14, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:10:48,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=222821.33333333334, ans=0.2 +2024-07-29 01:10:58,374 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=222834.66666666666, ans=0.0 +2024-07-29 01:11:02,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=222848.0, ans=0.125 +2024-07-29 01:11:04,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222848.0, ans=0.1 +2024-07-29 01:11:09,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=222861.33333333334, ans=0.0 +2024-07-29 01:11:14,871 INFO [train.py:1114] (0/4) Epoch 17, batch 3600, loss[loss=0.1755, simple_loss=0.2726, pruned_loss=0.03924, over 4967.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2662, pruned_loss=0.04238, over 940159.51 frames. ], batch size: 13, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:11:18,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=222874.66666666666, ans=0.0 +2024-07-29 01:11:21,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=222888.0, ans=0.125 +2024-07-29 01:11:31,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=222901.33333333334, ans=0.1 +2024-07-29 01:11:40,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=222914.66666666666, ans=0.0 +2024-07-29 01:11:47,272 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.818e+01 6.519e+01 7.348e+01 1.094e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-29 01:11:50,117 INFO [train.py:1114] (0/4) Epoch 17, batch 3650, loss[loss=0.1903, simple_loss=0.2688, pruned_loss=0.05587, over 4895.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2656, pruned_loss=0.04237, over 940566.99 frames. ], batch size: 15, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:11:52,661 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.70 vs. limit=6.0 +2024-07-29 01:12:01,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=222954.66666666666, ans=0.125 +2024-07-29 01:12:02,516 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=222954.66666666666, ans=0.2 +2024-07-29 01:12:04,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=222968.0, ans=0.2 +2024-07-29 01:12:07,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=222968.0, ans=0.0 +2024-07-29 01:12:17,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=222981.33333333334, ans=0.0 +2024-07-29 01:12:19,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=222981.33333333334, ans=0.125 +2024-07-29 01:12:23,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=222994.66666666666, ans=0.2 +2024-07-29 01:12:26,673 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.70 vs. limit=22.5 +2024-07-29 01:12:34,074 INFO [train.py:1114] (0/4) Epoch 17, batch 3700, loss[loss=0.1826, simple_loss=0.2643, pruned_loss=0.05049, over 4933.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2662, pruned_loss=0.04279, over 941521.85 frames. ], batch size: 14, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:12:41,983 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.85 vs. limit=12.0 +2024-07-29 01:12:45,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=223021.33333333334, ans=0.07 +2024-07-29 01:12:49,486 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.51 vs. limit=15.0 +2024-07-29 01:12:49,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=223021.33333333334, ans=0.0 +2024-07-29 01:12:55,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=223034.66666666666, ans=0.0 +2024-07-29 01:12:58,701 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.59 vs. limit=10.0 +2024-07-29 01:13:02,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=223048.0, ans=0.0 +2024-07-29 01:13:09,045 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.453e+01 5.690e+01 6.166e+01 6.901e+01 9.277e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 01:13:14,299 INFO [train.py:1114] (0/4) Epoch 17, batch 3750, loss[loss=0.1458, simple_loss=0.2257, pruned_loss=0.03298, over 4809.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.266, pruned_loss=0.04232, over 942914.57 frames. ], batch size: 11, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:13:44,153 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.22 vs. limit=15.0 +2024-07-29 01:13:44,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=223114.66666666666, ans=0.1 +2024-07-29 01:13:54,165 INFO [train.py:1114] (0/4) Epoch 17, batch 3800, loss[loss=0.1736, simple_loss=0.271, pruned_loss=0.03809, over 4809.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2658, pruned_loss=0.04245, over 941896.31 frames. ], batch size: 14, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:13:55,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=223141.33333333334, ans=0.125 +2024-07-29 01:14:08,287 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=223168.0, ans=0.125 +2024-07-29 01:14:10,849 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.28 vs. limit=12.0 +2024-07-29 01:14:13,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=223168.0, ans=0.125 +2024-07-29 01:14:15,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=223168.0, ans=0.125 +2024-07-29 01:14:28,215 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.538e+01 6.338e+01 7.177e+01 1.035e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-29 01:14:29,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=223194.66666666666, ans=0.0 +2024-07-29 01:14:30,992 INFO [train.py:1114] (0/4) Epoch 17, batch 3850, loss[loss=0.1838, simple_loss=0.2867, pruned_loss=0.04046, over 4625.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2659, pruned_loss=0.04228, over 942393.99 frames. ], batch size: 16, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:14:37,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=223221.33333333334, ans=0.0 +2024-07-29 01:14:45,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=223234.66666666666, ans=0.125 +2024-07-29 01:14:55,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=223248.0, ans=0.125 +2024-07-29 01:14:59,965 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.83 vs. limit=15.0 +2024-07-29 01:15:04,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=223261.33333333334, ans=0.1 +2024-07-29 01:15:05,369 INFO [train.py:1114] (0/4) Epoch 17, batch 3900, loss[loss=0.159, simple_loss=0.2602, pruned_loss=0.02891, over 4806.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2668, pruned_loss=0.04266, over 942839.96 frames. ], batch size: 14, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:15:09,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223274.66666666666, ans=0.1 +2024-07-29 01:15:17,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=223288.0, ans=0.125 +2024-07-29 01:15:18,644 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-29 01:15:21,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=223301.33333333334, ans=0.0 +2024-07-29 01:15:40,804 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=223328.0, ans=0.0 +2024-07-29 01:15:42,537 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.779e+01 5.533e+01 5.996e+01 6.814e+01 1.002e+02, threshold=1.199e+02, percent-clipped=0.0 +2024-07-29 01:15:44,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=223328.0, ans=0.0 +2024-07-29 01:15:45,484 INFO [train.py:1114] (0/4) Epoch 17, batch 3950, loss[loss=0.1744, simple_loss=0.2665, pruned_loss=0.04119, over 4849.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2672, pruned_loss=0.04271, over 944717.67 frames. ], batch size: 16, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:15:49,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=223341.33333333334, ans=0.125 +2024-07-29 01:16:05,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223354.66666666666, ans=0.1 +2024-07-29 01:16:07,324 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.99 vs. limit=15.0 +2024-07-29 01:16:24,553 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.87 vs. limit=6.0 +2024-07-29 01:17:03,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=223381.33333333334, ans=0.2 +2024-07-29 01:17:03,841 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.54 vs. limit=15.0 +2024-07-29 01:17:19,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=223394.66666666666, ans=15.0 +2024-07-29 01:17:23,531 INFO [train.py:1114] (0/4) Epoch 17, batch 4000, loss[loss=0.1687, simple_loss=0.2564, pruned_loss=0.04055, over 4782.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2675, pruned_loss=0.04324, over 940889.14 frames. ], batch size: 12, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:17:50,177 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.33 vs. limit=15.0 +2024-07-29 01:17:59,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223434.66666666666, ans=0.1 +2024-07-29 01:18:01,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=223434.66666666666, ans=0.125 +2024-07-29 01:18:15,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223448.0, ans=0.1 +2024-07-29 01:18:25,018 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.438e+01 5.691e+01 6.092e+01 6.901e+01 9.634e+01, threshold=1.218e+02, percent-clipped=0.0 +2024-07-29 01:18:31,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=223461.33333333334, ans=0.025 +2024-07-29 01:18:33,080 INFO [train.py:1114] (0/4) Epoch 17, batch 4050, loss[loss=0.2823, simple_loss=0.3447, pruned_loss=0.1099, over 3192.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2674, pruned_loss=0.04354, over 939547.69 frames. ], batch size: 36, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:18:47,465 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:25:01,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=223528.0, ans=0.125 +2024-07-29 01:25:02,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=223528.0, ans=0.2 +2024-07-29 01:25:08,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=223528.0, ans=0.0 +2024-07-29 01:25:14,261 INFO [train.py:1114] (0/4) Epoch 17, batch 4100, loss[loss=0.1806, simple_loss=0.2718, pruned_loss=0.0447, over 4903.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2684, pruned_loss=0.04386, over 938172.79 frames. ], batch size: 15, lr: 4.40e-03, grad_scale: 64.0 +2024-07-29 01:25:28,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=223541.33333333334, ans=0.2 +2024-07-29 01:26:14,046 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.43 vs. limit=15.0 +2024-07-29 01:26:34,814 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.52 vs. limit=22.5 +2024-07-29 01:27:01,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=223581.33333333334, ans=0.1 +2024-07-29 01:28:06,858 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.82 vs. limit=10.0 +2024-07-29 01:28:22,408 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.591e+01 5.689e+01 6.101e+01 7.334e+01 1.100e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 01:28:28,342 INFO [train.py:1114] (0/4) Epoch 17, batch 4150, loss[loss=0.2014, simple_loss=0.2985, pruned_loss=0.05215, over 4827.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2672, pruned_loss=0.04339, over 937621.43 frames. ], batch size: 13, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:28:36,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223608.0, ans=0.1 +2024-07-29 01:28:52,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=223621.33333333334, ans=0.95 +2024-07-29 01:28:58,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=223621.33333333334, ans=0.125 +2024-07-29 01:29:18,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=223621.33333333334, ans=0.2 +2024-07-29 01:30:12,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223648.0, ans=0.1 +2024-07-29 01:30:27,842 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.62 vs. limit=15.0 +2024-07-29 01:30:40,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=223661.33333333334, ans=0.0 +2024-07-29 01:30:43,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=223661.33333333334, ans=0.125 +2024-07-29 01:30:48,967 INFO [train.py:1114] (0/4) Epoch 17, batch 4200, loss[loss=0.2022, simple_loss=0.286, pruned_loss=0.05925, over 4913.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2679, pruned_loss=0.04375, over 939094.23 frames. ], batch size: 15, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:30:56,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=223674.66666666666, ans=0.0 +2024-07-29 01:31:55,283 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.29 vs. limit=22.5 +2024-07-29 01:32:02,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223701.33333333334, ans=0.1 +2024-07-29 01:32:15,874 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.29 vs. limit=22.5 +2024-07-29 01:32:17,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=223701.33333333334, ans=0.125 +2024-07-29 01:32:18,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=223714.66666666666, ans=0.0 +2024-07-29 01:32:27,891 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.82 vs. limit=6.0 +2024-07-29 01:33:21,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=223728.0, ans=0.125 +2024-07-29 01:33:25,470 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.507e+01 5.649e+01 6.155e+01 7.132e+01 1.062e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-29 01:33:25,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=223728.0, ans=0.125 +2024-07-29 01:33:32,172 INFO [train.py:1114] (0/4) Epoch 17, batch 4250, loss[loss=0.1381, simple_loss=0.2348, pruned_loss=0.02075, over 4637.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2677, pruned_loss=0.04333, over 940155.39 frames. ], batch size: 12, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:33:44,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=223741.33333333334, ans=0.2 +2024-07-29 01:34:32,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=223768.0, ans=0.0 +2024-07-29 01:34:32,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=223768.0, ans=0.0 +2024-07-29 01:34:32,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=223768.0, ans=0.0 +2024-07-29 01:34:35,783 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.53 vs. limit=15.0 +2024-07-29 01:34:37,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=223781.33333333334, ans=0.2 +2024-07-29 01:34:56,065 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=223781.33333333334, ans=0.035 +2024-07-29 01:35:07,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223794.66666666666, ans=0.1 +2024-07-29 01:35:12,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=223794.66666666666, ans=0.125 +2024-07-29 01:35:13,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=223794.66666666666, ans=0.2 +2024-07-29 01:35:26,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten.whitening_limit, batch_count=223794.66666666666, ans=15.0 +2024-07-29 01:35:27,636 INFO [train.py:1114] (0/4) Epoch 17, batch 4300, loss[loss=0.1714, simple_loss=0.2667, pruned_loss=0.03801, over 4761.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2673, pruned_loss=0.04308, over 939690.92 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:36:07,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223821.33333333334, ans=0.1 +2024-07-29 01:36:28,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=223834.66666666666, ans=0.2 +2024-07-29 01:38:11,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=223848.0, ans=0.125 +2024-07-29 01:38:13,872 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:38:22,538 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.90 vs. limit=15.0 +2024-07-29 01:38:25,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=223861.33333333334, ans=0.0 +2024-07-29 01:38:25,974 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.698e+01 5.646e+01 6.421e+01 7.211e+01 1.436e+02, threshold=1.284e+02, percent-clipped=1.0 +2024-07-29 01:38:30,615 INFO [train.py:1114] (0/4) Epoch 17, batch 4350, loss[loss=0.1649, simple_loss=0.263, pruned_loss=0.03343, over 4757.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2674, pruned_loss=0.0431, over 940587.71 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:39:02,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=223901.33333333334, ans=0.2 +2024-07-29 01:39:03,407 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=223901.33333333334, ans=0.125 +2024-07-29 01:39:09,833 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.30 vs. limit=15.0 +2024-07-29 01:39:10,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=223914.66666666666, ans=0.0 +2024-07-29 01:39:25,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=223928.0, ans=0.0 +2024-07-29 01:39:35,698 INFO [train.py:1114] (0/4) Epoch 17, batch 4400, loss[loss=0.1486, simple_loss=0.2439, pruned_loss=0.02663, over 4808.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2671, pruned_loss=0.04279, over 940178.16 frames. ], batch size: 14, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:39:52,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=223941.33333333334, ans=0.0 +2024-07-29 01:40:21,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=223968.0, ans=0.025 +2024-07-29 01:40:39,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=223981.33333333334, ans=0.0 +2024-07-29 01:40:43,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=223981.33333333334, ans=0.125 +2024-07-29 01:40:50,261 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-168000.pt +2024-07-29 01:40:52,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=223994.66666666666, ans=0.125 +2024-07-29 01:40:54,554 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.541e+01 6.235e+01 6.902e+01 1.046e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-29 01:40:55,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=223994.66666666666, ans=0.025 +2024-07-29 01:40:59,855 INFO [train.py:1114] (0/4) Epoch 17, batch 4450, loss[loss=0.141, simple_loss=0.2296, pruned_loss=0.0262, over 4947.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2666, pruned_loss=0.04249, over 938500.88 frames. ], batch size: 12, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:41:26,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=224021.33333333334, ans=0.125 +2024-07-29 01:42:09,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=224034.66666666666, ans=0.125 +2024-07-29 01:42:19,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=224048.0, ans=0.2 +2024-07-29 01:42:26,363 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.94 vs. limit=12.0 +2024-07-29 01:42:57,266 INFO [train.py:1114] (0/4) Epoch 17, batch 4500, loss[loss=0.1686, simple_loss=0.2684, pruned_loss=0.03439, over 4736.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.267, pruned_loss=0.04278, over 937588.35 frames. ], batch size: 14, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:43:10,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=224088.0, ans=0.0 +2024-07-29 01:43:10,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=224088.0, ans=0.0 +2024-07-29 01:43:12,549 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-29 01:43:15,111 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:43:23,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=224101.33333333334, ans=0.0 +2024-07-29 01:43:38,797 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.56 vs. limit=12.0 +2024-07-29 01:43:48,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=224114.66666666666, ans=0.125 +2024-07-29 01:43:48,615 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.61 vs. limit=15.0 +2024-07-29 01:43:54,426 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.989e+01 5.696e+01 6.215e+01 7.468e+01 9.739e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 01:43:56,675 INFO [train.py:1114] (0/4) Epoch 17, batch 4550, loss[loss=0.1697, simple_loss=0.2623, pruned_loss=0.03853, over 4900.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2672, pruned_loss=0.04312, over 939827.09 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:44:08,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=224154.66666666666, ans=0.1 +2024-07-29 01:44:09,265 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.27 vs. limit=6.0 +2024-07-29 01:44:17,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=224168.0, ans=0.07 +2024-07-29 01:44:22,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=224181.33333333334, ans=0.07 +2024-07-29 01:44:28,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=224194.66666666666, ans=0.0 +2024-07-29 01:44:33,948 INFO [train.py:1114] (0/4) Epoch 17, batch 4600, loss[loss=0.1897, simple_loss=0.2904, pruned_loss=0.04447, over 4474.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2665, pruned_loss=0.04281, over 938148.55 frames. ], batch size: 21, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:44:35,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=224208.0, ans=0.0 +2024-07-29 01:44:43,937 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.75 vs. limit=10.0 +2024-07-29 01:44:47,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=224234.66666666666, ans=0.125 +2024-07-29 01:44:53,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=224234.66666666666, ans=0.07 +2024-07-29 01:44:55,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=224248.0, ans=0.04949747468305833 +2024-07-29 01:45:17,103 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.685e+01 5.605e+01 6.267e+01 6.922e+01 9.428e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 01:45:19,079 INFO [train.py:1114] (0/4) Epoch 17, batch 4650, loss[loss=0.1995, simple_loss=0.2878, pruned_loss=0.05556, over 4830.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2671, pruned_loss=0.04313, over 939793.04 frames. ], batch size: 16, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:45:23,417 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.50 vs. limit=10.0 +2024-07-29 01:46:09,258 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=224328.0, ans=0.0 +2024-07-29 01:46:14,278 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.56 vs. limit=12.0 +2024-07-29 01:46:15,065 INFO [train.py:1114] (0/4) Epoch 17, batch 4700, loss[loss=0.1711, simple_loss=0.2573, pruned_loss=0.04247, over 4692.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2664, pruned_loss=0.04313, over 936791.45 frames. ], batch size: 11, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:46:15,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=224341.33333333334, ans=0.0 +2024-07-29 01:46:25,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=224354.66666666666, ans=0.0 +2024-07-29 01:46:28,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=224354.66666666666, ans=0.0 +2024-07-29 01:46:28,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=224354.66666666666, ans=0.09899494936611666 +2024-07-29 01:46:33,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=224368.0, ans=0.125 +2024-07-29 01:46:34,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=224368.0, ans=0.2 +2024-07-29 01:46:49,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=224394.66666666666, ans=0.125 +2024-07-29 01:46:53,328 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.618e+01 6.268e+01 7.126e+01 1.011e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 01:46:58,153 INFO [train.py:1114] (0/4) Epoch 17, batch 4750, loss[loss=0.2153, simple_loss=0.2976, pruned_loss=0.06646, over 4484.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2669, pruned_loss=0.04362, over 935105.13 frames. ], batch size: 21, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:46:58,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=224408.0, ans=0.025 +2024-07-29 01:47:51,558 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.55 vs. limit=22.5 +2024-07-29 01:47:52,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=224448.0, ans=0.125 +2024-07-29 01:48:01,679 INFO [train.py:1114] (0/4) Epoch 17, batch 4800, loss[loss=0.1538, simple_loss=0.2476, pruned_loss=0.02997, over 4694.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2663, pruned_loss=0.04353, over 932495.41 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:48:04,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=224474.66666666666, ans=0.2 +2024-07-29 01:48:10,267 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.80 vs. limit=15.0 +2024-07-29 01:48:49,937 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.702e+01 6.152e+01 7.356e+01 9.741e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 01:48:53,632 INFO [train.py:1114] (0/4) Epoch 17, batch 4850, loss[loss=0.1802, simple_loss=0.2772, pruned_loss=0.04162, over 4753.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2669, pruned_loss=0.04358, over 932417.37 frames. ], batch size: 14, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:48:53,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=224541.33333333334, ans=0.125 +2024-07-29 01:48:55,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=224541.33333333334, ans=0.0 +2024-07-29 01:48:56,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=224541.33333333334, ans=0.1 +2024-07-29 01:49:03,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=224541.33333333334, ans=0.025 +2024-07-29 01:49:04,514 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.34 vs. limit=15.0 +2024-07-29 01:49:08,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=224554.66666666666, ans=0.0 +2024-07-29 01:49:16,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=224568.0, ans=0.2 +2024-07-29 01:49:17,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=224568.0, ans=0.0 +2024-07-29 01:49:22,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=224581.33333333334, ans=0.125 +2024-07-29 01:49:24,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=224594.66666666666, ans=0.125 +2024-07-29 01:49:25,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=224594.66666666666, ans=0.0 +2024-07-29 01:49:42,565 INFO [train.py:1114] (0/4) Epoch 17, batch 4900, loss[loss=0.1701, simple_loss=0.2655, pruned_loss=0.0374, over 4747.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2665, pruned_loss=0.04341, over 934029.63 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:49:45,806 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.41 vs. limit=15.0 +2024-07-29 01:49:49,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=224621.33333333334, ans=0.1 +2024-07-29 01:49:51,948 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.61 vs. limit=10.0 +2024-07-29 01:50:00,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=224634.66666666666, ans=0.05 +2024-07-29 01:50:10,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=224648.0, ans=0.0 +2024-07-29 01:50:15,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=224648.0, ans=0.125 +2024-07-29 01:50:36,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=224661.33333333334, ans=0.125 +2024-07-29 01:50:48,854 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.565e+01 5.722e+01 6.197e+01 6.933e+01 1.189e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 01:51:00,412 INFO [train.py:1114] (0/4) Epoch 17, batch 4950, loss[loss=0.2292, simple_loss=0.2906, pruned_loss=0.08387, over 3380.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2683, pruned_loss=0.0445, over 931558.85 frames. ], batch size: 37, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:51:02,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=224674.66666666666, ans=0.0 +2024-07-29 01:51:03,263 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.21 vs. limit=15.0 +2024-07-29 01:51:10,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_na.min_abs, batch_count=224674.66666666666, ans=0.02 +2024-07-29 01:51:16,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=224688.0, ans=0.1 +2024-07-29 01:51:24,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=224701.33333333334, ans=0.0 +2024-07-29 01:51:31,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=224714.66666666666, ans=0.125 +2024-07-29 01:51:31,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=224714.66666666666, ans=0.0 +2024-07-29 01:51:34,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=224714.66666666666, ans=0.125 +2024-07-29 01:51:42,481 INFO [train.py:1114] (0/4) Epoch 17, batch 5000, loss[loss=0.163, simple_loss=0.2634, pruned_loss=0.03133, over 4654.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2686, pruned_loss=0.04442, over 935418.85 frames. ], batch size: 14, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:51:46,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=224741.33333333334, ans=0.125 +2024-07-29 01:51:46,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.25 vs. limit=15.0 +2024-07-29 01:52:07,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=224768.0, ans=0.125 +2024-07-29 01:52:10,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=224781.33333333334, ans=0.05 +2024-07-29 01:52:47,965 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.329e+01 5.582e+01 6.302e+01 7.015e+01 1.020e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 01:52:50,184 INFO [train.py:1114] (0/4) Epoch 17, batch 5050, loss[loss=0.1661, simple_loss=0.2441, pruned_loss=0.04406, over 4859.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.267, pruned_loss=0.04341, over 937890.77 frames. ], batch size: 12, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:52:54,882 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.73 vs. limit=15.0 +2024-07-29 01:52:58,940 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.06 vs. limit=15.0 +2024-07-29 01:53:14,580 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.32 vs. limit=22.5 +2024-07-29 01:53:29,703 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.53 vs. limit=6.0 +2024-07-29 01:53:34,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=224874.66666666666, ans=0.0 +2024-07-29 01:53:34,548 INFO [train.py:1114] (0/4) Epoch 17, batch 5100, loss[loss=0.1881, simple_loss=0.2765, pruned_loss=0.0498, over 4768.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2681, pruned_loss=0.04389, over 935014.16 frames. ], batch size: 12, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:53:45,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=224888.0, ans=0.125 +2024-07-29 01:53:48,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=224888.0, ans=0.125 +2024-07-29 01:53:54,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=224901.33333333334, ans=0.0 +2024-07-29 01:54:04,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=224914.66666666666, ans=0.0 +2024-07-29 01:54:09,718 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.92 vs. limit=15.0 +2024-07-29 01:54:11,314 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.613e+01 5.725e+01 6.244e+01 7.275e+01 1.073e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 01:54:13,313 INFO [train.py:1114] (0/4) Epoch 17, batch 5150, loss[loss=0.1704, simple_loss=0.272, pruned_loss=0.03443, over 4818.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2682, pruned_loss=0.04344, over 936114.00 frames. ], batch size: 16, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:54:16,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=224941.33333333334, ans=0.125 +2024-07-29 01:54:21,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=224954.66666666666, ans=0.95 +2024-07-29 01:54:34,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.46 vs. limit=15.0 +2024-07-29 01:54:36,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=224981.33333333334, ans=0.125 +2024-07-29 01:54:37,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=224981.33333333334, ans=0.125 +2024-07-29 01:54:39,124 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.70 vs. limit=15.0 +2024-07-29 01:54:48,433 INFO [train.py:1114] (0/4) Epoch 17, batch 5200, loss[loss=0.167, simple_loss=0.2632, pruned_loss=0.03541, over 4666.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2671, pruned_loss=0.04286, over 936071.04 frames. ], batch size: 14, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:54:50,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=225008.0, ans=0.125 +2024-07-29 01:54:50,614 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:54:51,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=225008.0, ans=0.2 +2024-07-29 01:54:56,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=225021.33333333334, ans=0.2 +2024-07-29 01:55:00,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=225021.33333333334, ans=0.95 +2024-07-29 01:55:18,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=225048.0, ans=0.125 +2024-07-29 01:55:23,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=225048.0, ans=0.0 +2024-07-29 01:55:24,562 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:55:32,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=225061.33333333334, ans=0.0 +2024-07-29 01:55:37,217 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.752e+01 5.840e+01 6.748e+01 7.869e+01 1.303e+02, threshold=1.350e+02, percent-clipped=1.0 +2024-07-29 01:55:39,363 INFO [train.py:1114] (0/4) Epoch 17, batch 5250, loss[loss=0.1556, simple_loss=0.2373, pruned_loss=0.03691, over 4899.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2654, pruned_loss=0.04231, over 935720.42 frames. ], batch size: 13, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:55:54,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=225101.33333333334, ans=0.025 +2024-07-29 01:56:10,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=225101.33333333334, ans=0.125 +2024-07-29 01:56:18,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=225128.0, ans=0.07 +2024-07-29 01:56:20,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225128.0, ans=0.1 +2024-07-29 01:56:20,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=225128.0, ans=0.125 +2024-07-29 01:56:20,788 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.39 vs. limit=10.0 +2024-07-29 01:56:25,596 INFO [train.py:1114] (0/4) Epoch 17, batch 5300, loss[loss=0.1837, simple_loss=0.2788, pruned_loss=0.04435, over 4614.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2654, pruned_loss=0.04247, over 934041.82 frames. ], batch size: 16, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:56:29,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225141.33333333334, ans=0.1 +2024-07-29 01:56:34,575 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.60 vs. limit=15.0 +2024-07-29 01:56:42,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=225168.0, ans=0.1 +2024-07-29 01:56:54,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=225194.66666666666, ans=0.04949747468305833 +2024-07-29 01:56:57,576 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.355e+01 5.754e+01 6.386e+01 7.426e+01 1.100e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 01:56:59,692 INFO [train.py:1114] (0/4) Epoch 17, batch 5350, loss[loss=0.1551, simple_loss=0.2385, pruned_loss=0.03584, over 4495.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2665, pruned_loss=0.04276, over 936200.63 frames. ], batch size: 10, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:57:01,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=225208.0, ans=0.125 +2024-07-29 01:57:10,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=225221.33333333334, ans=0.125 +2024-07-29 01:57:17,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=225234.66666666666, ans=0.2 +2024-07-29 01:57:34,867 INFO [train.py:1114] (0/4) Epoch 17, batch 5400, loss[loss=0.2203, simple_loss=0.3029, pruned_loss=0.06888, over 4148.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2677, pruned_loss=0.0435, over 930001.34 frames. ], batch size: 25, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:57:35,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=225274.66666666666, ans=0.125 +2024-07-29 01:57:43,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=225288.0, ans=0.125 +2024-07-29 01:57:44,396 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.96 vs. limit=10.0 +2024-07-29 01:57:48,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=225288.0, ans=0.2 +2024-07-29 01:57:55,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225301.33333333334, ans=0.1 +2024-07-29 01:57:55,131 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:58:09,520 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.272e+01 5.738e+01 6.198e+01 6.838e+01 9.669e+01, threshold=1.240e+02, percent-clipped=0.0 +2024-07-29 01:58:11,775 INFO [train.py:1114] (0/4) Epoch 17, batch 5450, loss[loss=0.1671, simple_loss=0.2494, pruned_loss=0.04245, over 4712.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2674, pruned_loss=0.04319, over 932726.93 frames. ], batch size: 11, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:58:13,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=225341.33333333334, ans=0.125 +2024-07-29 01:58:13,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=225341.33333333334, ans=0.025 +2024-07-29 01:58:13,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225341.33333333334, ans=0.1 +2024-07-29 01:58:20,491 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.79 vs. limit=10.0 +2024-07-29 01:58:22,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=225354.66666666666, ans=0.125 +2024-07-29 01:58:27,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=225368.0, ans=0.0 +2024-07-29 01:58:30,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=225368.0, ans=0.1 +2024-07-29 01:58:32,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=225381.33333333334, ans=0.0 +2024-07-29 01:58:40,085 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.72 vs. limit=22.5 +2024-07-29 01:58:41,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=225394.66666666666, ans=0.125 +2024-07-29 01:58:45,739 INFO [train.py:1114] (0/4) Epoch 17, batch 5500, loss[loss=0.1928, simple_loss=0.2987, pruned_loss=0.04348, over 4143.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2669, pruned_loss=0.04325, over 930665.53 frames. ], batch size: 25, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:58:50,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=225408.0, ans=0.125 +2024-07-29 01:58:51,332 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.94 vs. limit=15.0 +2024-07-29 01:59:05,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=225448.0, ans=0.125 +2024-07-29 01:59:07,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=225448.0, ans=0.0 +2024-07-29 01:59:12,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=225461.33333333334, ans=0.0 +2024-07-29 01:59:16,896 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.448e+01 5.683e+01 6.448e+01 7.775e+01 1.067e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 01:59:18,943 INFO [train.py:1114] (0/4) Epoch 17, batch 5550, loss[loss=0.158, simple_loss=0.2524, pruned_loss=0.03185, over 4711.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2667, pruned_loss=0.04325, over 932942.40 frames. ], batch size: 12, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:59:19,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=225474.66666666666, ans=0.2 +2024-07-29 01:59:22,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=225474.66666666666, ans=0.0 +2024-07-29 01:59:23,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=225474.66666666666, ans=0.2 +2024-07-29 01:59:38,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=225501.33333333334, ans=0.2 +2024-07-29 01:59:49,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=225528.0, ans=0.125 +2024-07-29 01:59:54,610 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=225541.33333333334, ans=0.125 +2024-07-29 01:59:55,060 INFO [train.py:1114] (0/4) Epoch 17, batch 5600, loss[loss=0.1587, simple_loss=0.2586, pruned_loss=0.02942, over 4751.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2676, pruned_loss=0.04341, over 934508.40 frames. ], batch size: 14, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:59:57,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225541.33333333334, ans=0.1 +2024-07-29 02:00:09,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=225568.0, ans=0.0 +2024-07-29 02:00:27,273 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.722e+01 5.591e+01 6.348e+01 7.500e+01 1.117e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-29 02:00:28,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=225608.0, ans=0.125 +2024-07-29 02:00:29,295 INFO [train.py:1114] (0/4) Epoch 17, batch 5650, loss[loss=0.2316, simple_loss=0.3185, pruned_loss=0.07236, over 4556.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2671, pruned_loss=0.04322, over 937017.33 frames. ], batch size: 21, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:00:31,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=225608.0, ans=0.125 +2024-07-29 02:00:37,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=225621.33333333334, ans=0.125 +2024-07-29 02:01:11,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=225648.0, ans=0.0 +2024-07-29 02:01:14,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225648.0, ans=0.1 +2024-07-29 02:01:24,146 INFO [train.py:1114] (0/4) Epoch 17, batch 5700, loss[loss=0.1892, simple_loss=0.2776, pruned_loss=0.05039, over 4693.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.267, pruned_loss=0.04281, over 937989.38 frames. ], batch size: 13, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:01:26,076 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.35 vs. limit=22.5 +2024-07-29 02:01:33,329 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.24 vs. limit=15.0 +2024-07-29 02:01:42,129 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.48 vs. limit=10.0 +2024-07-29 02:01:52,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=225728.0, ans=0.025 +2024-07-29 02:01:55,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=225728.0, ans=0.125 +2024-07-29 02:01:56,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225728.0, ans=0.1 +2024-07-29 02:01:57,717 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.493e+01 6.225e+01 7.048e+01 1.096e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-29 02:02:02,570 INFO [train.py:1114] (0/4) Epoch 17, batch 5750, loss[loss=0.1658, simple_loss=0.2618, pruned_loss=0.03494, over 4726.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2678, pruned_loss=0.04316, over 938153.75 frames. ], batch size: 19, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:02:11,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=225754.66666666666, ans=0.0 +2024-07-29 02:02:13,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=225754.66666666666, ans=0.0 +2024-07-29 02:02:17,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=225768.0, ans=10.0 +2024-07-29 02:02:34,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=225794.66666666666, ans=0.125 +2024-07-29 02:02:35,949 INFO [train.py:1114] (0/4) Epoch 17, batch 5800, loss[loss=0.2001, simple_loss=0.2929, pruned_loss=0.05366, over 4725.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2686, pruned_loss=0.04387, over 937597.43 frames. ], batch size: 19, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:02:38,339 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:02:56,103 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=225821.33333333334, ans=0.2 +2024-07-29 02:03:04,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=225834.66666666666, ans=0.025 +2024-07-29 02:03:14,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=225861.33333333334, ans=0.025 +2024-07-29 02:03:18,482 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.618e+01 6.216e+01 6.871e+01 1.068e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 02:03:21,241 INFO [train.py:1114] (0/4) Epoch 17, batch 5850, loss[loss=0.1933, simple_loss=0.2841, pruned_loss=0.05127, over 4460.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2682, pruned_loss=0.04366, over 937923.47 frames. ], batch size: 21, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:03:24,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=225874.66666666666, ans=0.125 +2024-07-29 02:03:30,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=225888.0, ans=0.0 +2024-07-29 02:03:39,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225901.33333333334, ans=0.1 +2024-07-29 02:03:41,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=225901.33333333334, ans=0.0 +2024-07-29 02:03:41,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=225901.33333333334, ans=0.0 +2024-07-29 02:03:56,504 INFO [train.py:1114] (0/4) Epoch 17, batch 5900, loss[loss=0.1963, simple_loss=0.2876, pruned_loss=0.05253, over 4682.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2663, pruned_loss=0.04303, over 938914.82 frames. ], batch size: 15, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:03:57,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=225941.33333333334, ans=0.0 +2024-07-29 02:04:00,333 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.70 vs. limit=15.0 +2024-07-29 02:04:12,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=225968.0, ans=0.125 +2024-07-29 02:04:15,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=225968.0, ans=0.125 +2024-07-29 02:04:18,565 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.40 vs. limit=15.0 +2024-07-29 02:04:19,537 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:04:28,168 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 5.754e+01 6.416e+01 7.190e+01 1.147e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 02:04:30,247 INFO [train.py:1114] (0/4) Epoch 17, batch 5950, loss[loss=0.1795, simple_loss=0.2662, pruned_loss=0.0464, over 4671.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2661, pruned_loss=0.04281, over 940809.34 frames. ], batch size: 15, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:04:31,849 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:04:32,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=226008.0, ans=0.2 +2024-07-29 02:04:43,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=226021.33333333334, ans=0.0 +2024-07-29 02:05:06,734 INFO [train.py:1114] (0/4) Epoch 17, batch 6000, loss[loss=0.17, simple_loss=0.2607, pruned_loss=0.03969, over 4228.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2659, pruned_loss=0.04291, over 937603.22 frames. ], batch size: 26, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:05:06,735 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 02:05:39,479 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.2332, 4.0776, 3.0319, 2.8268], device='cuda:0') +2024-07-29 02:05:43,821 INFO [train.py:1146] (0/4) Epoch 17, validation: loss=0.1623, simple_loss=0.2646, pruned_loss=0.02995, over 944034.00 frames. +2024-07-29 02:05:43,821 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 02:05:48,218 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.68 vs. limit=15.0 +2024-07-29 02:05:51,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=226088.0, ans=0.0 +2024-07-29 02:06:04,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=226101.33333333334, ans=0.125 +2024-07-29 02:06:17,673 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.382e+01 5.832e+01 6.475e+01 7.861e+01 1.037e+02, threshold=1.295e+02, percent-clipped=0.0 +2024-07-29 02:06:19,740 INFO [train.py:1114] (0/4) Epoch 17, batch 6050, loss[loss=0.1713, simple_loss=0.2635, pruned_loss=0.0395, over 4789.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2662, pruned_loss=0.04339, over 938604.45 frames. ], batch size: 12, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:06:26,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=226154.66666666666, ans=0.0 +2024-07-29 02:06:29,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=226154.66666666666, ans=0.025 +2024-07-29 02:06:31,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=226154.66666666666, ans=0.125 +2024-07-29 02:06:49,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226194.66666666666, ans=0.1 +2024-07-29 02:07:00,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=226194.66666666666, ans=0.0 +2024-07-29 02:07:01,462 INFO [train.py:1114] (0/4) Epoch 17, batch 6100, loss[loss=0.1606, simple_loss=0.256, pruned_loss=0.03262, over 4683.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.266, pruned_loss=0.04296, over 938147.07 frames. ], batch size: 15, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:07:10,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=226221.33333333334, ans=0.2 +2024-07-29 02:07:11,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=226221.33333333334, ans=0.125 +2024-07-29 02:07:11,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226221.33333333334, ans=0.1 +2024-07-29 02:07:44,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226248.0, ans=0.1 +2024-07-29 02:07:47,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=226261.33333333334, ans=0.125 +2024-07-29 02:07:53,318 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.857e+01 5.509e+01 6.001e+01 6.915e+01 1.050e+02, threshold=1.200e+02, percent-clipped=0.0 +2024-07-29 02:07:53,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=226261.33333333334, ans=0.2 +2024-07-29 02:07:55,420 INFO [train.py:1114] (0/4) Epoch 17, batch 6150, loss[loss=0.1768, simple_loss=0.2655, pruned_loss=0.04409, over 3475.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2665, pruned_loss=0.0431, over 936927.26 frames. ], batch size: 35, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:08:14,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226274.66666666666, ans=0.1 +2024-07-29 02:08:16,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=226288.0, ans=0.0 +2024-07-29 02:08:20,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=226288.0, ans=0.125 +2024-07-29 02:08:29,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=226301.33333333334, ans=0.125 +2024-07-29 02:08:33,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=226314.66666666666, ans=0.125 +2024-07-29 02:08:40,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=226328.0, ans=0.125 +2024-07-29 02:08:44,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=226328.0, ans=0.1 +2024-07-29 02:08:44,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=226328.0, ans=0.025 +2024-07-29 02:08:56,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=226341.33333333334, ans=0.125 +2024-07-29 02:08:56,969 INFO [train.py:1114] (0/4) Epoch 17, batch 6200, loss[loss=0.1821, simple_loss=0.2773, pruned_loss=0.04342, over 4741.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2668, pruned_loss=0.04316, over 936603.94 frames. ], batch size: 14, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:08:59,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=226341.33333333334, ans=0.125 +2024-07-29 02:09:33,456 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.28 vs. limit=22.5 +2024-07-29 02:09:36,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.10 vs. limit=15.0 +2024-07-29 02:09:47,441 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:09:47,476 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:09:51,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=226394.66666666666, ans=0.05 +2024-07-29 02:09:51,544 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.525e+01 5.563e+01 6.274e+01 7.227e+01 1.075e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 02:09:52,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=226408.0, ans=0.125 +2024-07-29 02:09:53,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226408.0, ans=0.1 +2024-07-29 02:09:53,624 INFO [train.py:1114] (0/4) Epoch 17, batch 6250, loss[loss=0.2073, simple_loss=0.3053, pruned_loss=0.05463, over 4805.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2675, pruned_loss=0.04362, over 933167.37 frames. ], batch size: 14, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:10:03,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=226421.33333333334, ans=0.125 +2024-07-29 02:10:07,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=226434.66666666666, ans=0.125 +2024-07-29 02:10:14,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226448.0, ans=0.1 +2024-07-29 02:10:23,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=226461.33333333334, ans=0.025 +2024-07-29 02:10:30,815 INFO [train.py:1114] (0/4) Epoch 17, batch 6300, loss[loss=0.1285, simple_loss=0.2144, pruned_loss=0.02125, over 4533.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.268, pruned_loss=0.04381, over 929606.69 frames. ], batch size: 10, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:10:41,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=226474.66666666666, ans=0.125 +2024-07-29 02:10:44,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=226474.66666666666, ans=0.0 +2024-07-29 02:10:47,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=226488.0, ans=0.0 +2024-07-29 02:11:01,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=226501.33333333334, ans=0.125 +2024-07-29 02:11:11,291 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.44 vs. limit=15.0 +2024-07-29 02:11:13,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=226528.0, ans=0.0 +2024-07-29 02:11:15,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226528.0, ans=0.1 +2024-07-29 02:11:15,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=226528.0, ans=0.125 +2024-07-29 02:11:15,470 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.120e+01 5.607e+01 6.569e+01 7.954e+01 1.446e+02, threshold=1.314e+02, percent-clipped=2.0 +2024-07-29 02:11:15,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=226528.0, ans=0.125 +2024-07-29 02:11:31,901 INFO [train.py:1114] (0/4) Epoch 17, batch 6350, loss[loss=0.1961, simple_loss=0.2799, pruned_loss=0.05618, over 4470.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2669, pruned_loss=0.04337, over 933615.41 frames. ], batch size: 21, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:11:38,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=226541.33333333334, ans=0.0 +2024-07-29 02:11:39,950 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.70 vs. limit=15.0 +2024-07-29 02:11:45,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=226554.66666666666, ans=0.125 +2024-07-29 02:12:13,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=226568.0, ans=0.05 +2024-07-29 02:12:22,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=226581.33333333334, ans=0.025 +2024-07-29 02:12:30,407 INFO [train.py:1114] (0/4) Epoch 17, batch 6400, loss[loss=0.1853, simple_loss=0.2904, pruned_loss=0.04008, over 4638.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2661, pruned_loss=0.04317, over 934931.91 frames. ], batch size: 13, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:12:58,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=226621.33333333334, ans=0.125 +2024-07-29 02:12:59,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=226621.33333333334, ans=0.0 +2024-07-29 02:13:02,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=226634.66666666666, ans=0.125 +2024-07-29 02:13:08,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=226634.66666666666, ans=0.0 +2024-07-29 02:13:10,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=226634.66666666666, ans=0.125 +2024-07-29 02:13:12,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=226648.0, ans=0.1 +2024-07-29 02:13:13,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=226648.0, ans=0.125 +2024-07-29 02:13:19,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=226648.0, ans=0.2 +2024-07-29 02:13:28,832 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.008e+01 5.819e+01 6.340e+01 7.116e+01 1.046e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-29 02:13:33,324 INFO [train.py:1114] (0/4) Epoch 17, batch 6450, loss[loss=0.2019, simple_loss=0.2779, pruned_loss=0.063, over 4551.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.267, pruned_loss=0.04392, over 938587.03 frames. ], batch size: 21, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:13:39,604 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-29 02:13:40,318 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.64 vs. limit=22.5 +2024-07-29 02:14:02,521 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.22 vs. limit=15.0 +2024-07-29 02:14:10,838 INFO [train.py:1114] (0/4) Epoch 17, batch 6500, loss[loss=0.2348, simple_loss=0.3133, pruned_loss=0.07811, over 3321.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2656, pruned_loss=0.04311, over 939938.02 frames. ], batch size: 35, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:14:24,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=226768.0, ans=0.125 +2024-07-29 02:14:31,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226781.33333333334, ans=0.1 +2024-07-29 02:14:35,703 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.61 vs. limit=10.0 +2024-07-29 02:14:42,543 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.227e+01 5.659e+01 6.416e+01 7.709e+01 1.114e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 02:14:42,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=226794.66666666666, ans=0.025 +2024-07-29 02:14:43,969 INFO [train.py:1114] (0/4) Epoch 17, batch 6550, loss[loss=0.1732, simple_loss=0.2539, pruned_loss=0.04632, over 4795.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.265, pruned_loss=0.04291, over 942876.09 frames. ], batch size: 11, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:14:46,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=226808.0, ans=0.125 +2024-07-29 02:14:49,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226821.33333333334, ans=0.1 +2024-07-29 02:14:50,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=226821.33333333334, ans=0.0 +2024-07-29 02:14:53,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=226821.33333333334, ans=0.125 +2024-07-29 02:15:03,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=226834.66666666666, ans=0.125 +2024-07-29 02:15:03,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226834.66666666666, ans=0.1 +2024-07-29 02:15:10,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=226848.0, ans=0.125 +2024-07-29 02:15:12,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=226861.33333333334, ans=0.2 +2024-07-29 02:15:18,659 INFO [train.py:1114] (0/4) Epoch 17, batch 6600, loss[loss=0.2057, simple_loss=0.2962, pruned_loss=0.05762, over 4932.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2653, pruned_loss=0.04271, over 945011.80 frames. ], batch size: 14, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:15:19,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=226874.66666666666, ans=0.125 +2024-07-29 02:15:19,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=226874.66666666666, ans=0.2 +2024-07-29 02:15:20,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=226874.66666666666, ans=0.125 +2024-07-29 02:15:55,700 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.38 vs. limit=12.0 +2024-07-29 02:15:55,957 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.636e+01 5.664e+01 6.465e+01 7.332e+01 1.238e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-29 02:15:57,328 INFO [train.py:1114] (0/4) Epoch 17, batch 6650, loss[loss=0.1987, simple_loss=0.2833, pruned_loss=0.05703, over 4599.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2655, pruned_loss=0.04283, over 943777.65 frames. ], batch size: 17, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:16:02,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=226941.33333333334, ans=0.125 +2024-07-29 02:16:07,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=226941.33333333334, ans=0.07 +2024-07-29 02:16:08,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=226954.66666666666, ans=0.025 +2024-07-29 02:16:11,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=226954.66666666666, ans=0.125 +2024-07-29 02:16:35,768 INFO [train.py:1114] (0/4) Epoch 17, batch 6700, loss[loss=0.203, simple_loss=0.2927, pruned_loss=0.05669, over 4699.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.266, pruned_loss=0.04287, over 942306.68 frames. ], batch size: 19, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:16:37,972 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=227008.0, ans=0.025 +2024-07-29 02:16:51,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=227034.66666666666, ans=0.125 +2024-07-29 02:16:55,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=227048.0, ans=0.025 +2024-07-29 02:16:59,049 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.42 vs. limit=8.0 +2024-07-29 02:17:02,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=227061.33333333334, ans=0.0 +2024-07-29 02:17:03,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=227061.33333333334, ans=0.125 +2024-07-29 02:17:08,093 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.780e+01 5.902e+01 6.582e+01 7.550e+01 1.119e+02, threshold=1.316e+02, percent-clipped=0.0 +2024-07-29 02:17:09,555 INFO [train.py:1114] (0/4) Epoch 17, batch 6750, loss[loss=0.1671, simple_loss=0.256, pruned_loss=0.03914, over 4003.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2661, pruned_loss=0.04289, over 939814.50 frames. ], batch size: 25, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:17:09,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=227074.66666666666, ans=0.2 +2024-07-29 02:17:11,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=227074.66666666666, ans=0.2 +2024-07-29 02:17:13,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=227074.66666666666, ans=0.125 +2024-07-29 02:17:19,803 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=227088.0, ans=0.05 +2024-07-29 02:17:28,431 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.36 vs. limit=15.0 +2024-07-29 02:17:32,165 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.64 vs. limit=15.0 +2024-07-29 02:17:33,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=227114.66666666666, ans=0.2 +2024-07-29 02:17:36,654 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.34 vs. limit=10.0 +2024-07-29 02:17:37,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=227114.66666666666, ans=0.125 +2024-07-29 02:17:40,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=227128.0, ans=0.0 +2024-07-29 02:17:46,140 INFO [train.py:1114] (0/4) Epoch 17, batch 6800, loss[loss=0.2027, simple_loss=0.2931, pruned_loss=0.05613, over 4635.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2669, pruned_loss=0.04282, over 938389.43 frames. ], batch size: 13, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:17:55,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=227154.66666666666, ans=0.2 +2024-07-29 02:17:56,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=227154.66666666666, ans=0.07 +2024-07-29 02:18:00,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=227154.66666666666, ans=0.125 +2024-07-29 02:18:07,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=227168.0, ans=0.2 +2024-07-29 02:18:08,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=227168.0, ans=0.125 +2024-07-29 02:18:11,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=227181.33333333334, ans=0.0 +2024-07-29 02:18:14,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=227181.33333333334, ans=0.09899494936611666 +2024-07-29 02:18:20,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=227194.66666666666, ans=0.125 +2024-07-29 02:18:23,158 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.461e+01 5.796e+01 6.354e+01 7.528e+01 1.110e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-29 02:18:24,506 INFO [train.py:1114] (0/4) Epoch 17, batch 6850, loss[loss=0.1503, simple_loss=0.2378, pruned_loss=0.03135, over 4693.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2672, pruned_loss=0.04296, over 940191.08 frames. ], batch size: 13, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:18:29,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=227208.0, ans=0.125 +2024-07-29 02:18:37,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=227234.66666666666, ans=0.125 +2024-07-29 02:18:39,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=227234.66666666666, ans=0.0 +2024-07-29 02:18:43,619 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.92 vs. limit=15.0 +2024-07-29 02:18:51,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=227261.33333333334, ans=0.125 +2024-07-29 02:18:52,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=227261.33333333334, ans=0.125 +2024-07-29 02:18:57,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=227261.33333333334, ans=0.5 +2024-07-29 02:18:58,446 INFO [train.py:1114] (0/4) Epoch 17, batch 6900, loss[loss=0.1787, simple_loss=0.2695, pruned_loss=0.04395, over 4963.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2671, pruned_loss=0.04263, over 942404.32 frames. ], batch size: 13, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:19:01,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=227274.66666666666, ans=0.125 +2024-07-29 02:19:04,596 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=227288.0, ans=0.125 +2024-07-29 02:19:08,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=227288.0, ans=0.125 +2024-07-29 02:19:09,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=227288.0, ans=0.0 +2024-07-29 02:19:10,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=227288.0, ans=0.125 +2024-07-29 02:19:26,974 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.88 vs. limit=6.0 +2024-07-29 02:19:30,580 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 5.673e+01 6.337e+01 7.070e+01 9.910e+01, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 02:19:31,035 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.01 vs. limit=6.0 +2024-07-29 02:19:31,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=227341.33333333334, ans=0.2 +2024-07-29 02:19:31,920 INFO [train.py:1114] (0/4) Epoch 17, batch 6950, loss[loss=0.1513, simple_loss=0.2311, pruned_loss=0.03573, over 4512.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.267, pruned_loss=0.04286, over 939998.38 frames. ], batch size: 10, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:19:32,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=227341.33333333334, ans=0.0 +2024-07-29 02:19:38,395 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=227341.33333333334, ans=0.125 +2024-07-29 02:19:39,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=227341.33333333334, ans=0.125 +2024-07-29 02:19:43,404 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.99 vs. limit=15.0 +2024-07-29 02:19:49,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=227368.0, ans=0.025 +2024-07-29 02:19:55,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=227381.33333333334, ans=0.0 +2024-07-29 02:19:56,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=227381.33333333334, ans=0.0 +2024-07-29 02:19:59,976 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=9.40 vs. limit=15.0 +2024-07-29 02:20:00,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=227394.66666666666, ans=0.125 +2024-07-29 02:20:01,674 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:20:03,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=227394.66666666666, ans=0.125 +2024-07-29 02:20:06,040 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.35 vs. limit=15.0 +2024-07-29 02:20:06,917 INFO [train.py:1114] (0/4) Epoch 17, batch 7000, loss[loss=0.1803, simple_loss=0.2822, pruned_loss=0.03922, over 4601.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2657, pruned_loss=0.04235, over 938072.90 frames. ], batch size: 17, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:20:12,505 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.77 vs. limit=15.0 +2024-07-29 02:20:21,098 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=227434.66666666666, ans=0.125 +2024-07-29 02:20:22,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=227434.66666666666, ans=0.125 +2024-07-29 02:20:28,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=227448.0, ans=0.0 +2024-07-29 02:20:29,454 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:20:38,601 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.627e+01 5.582e+01 6.064e+01 6.691e+01 1.096e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-29 02:20:38,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=227461.33333333334, ans=0.04949747468305833 +2024-07-29 02:20:39,942 INFO [train.py:1114] (0/4) Epoch 17, batch 7050, loss[loss=0.189, simple_loss=0.2892, pruned_loss=0.04438, over 4691.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2652, pruned_loss=0.04213, over 941406.74 frames. ], batch size: 19, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:20:48,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=227488.0, ans=0.0 +2024-07-29 02:20:48,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=227488.0, ans=0.0 +2024-07-29 02:20:54,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=227488.0, ans=0.0 +2024-07-29 02:21:10,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=227514.66666666666, ans=0.0 +2024-07-29 02:21:14,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=227528.0, ans=0.125 +2024-07-29 02:21:18,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=227528.0, ans=0.0 +2024-07-29 02:21:20,652 INFO [train.py:1114] (0/4) Epoch 17, batch 7100, loss[loss=0.1703, simple_loss=0.2649, pruned_loss=0.03781, over 4802.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2662, pruned_loss=0.04264, over 936088.41 frames. ], batch size: 15, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:21:32,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=227554.66666666666, ans=0.0 +2024-07-29 02:21:34,095 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=227568.0, ans=0.025 +2024-07-29 02:21:52,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=227594.66666666666, ans=0.125 +2024-07-29 02:21:52,998 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.433e+01 5.574e+01 6.289e+01 7.294e+01 1.340e+02, threshold=1.258e+02, percent-clipped=1.0 +2024-07-29 02:21:54,446 INFO [train.py:1114] (0/4) Epoch 17, batch 7150, loss[loss=0.1935, simple_loss=0.2909, pruned_loss=0.04803, over 4541.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2645, pruned_loss=0.04188, over 937428.75 frames. ], batch size: 21, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:21:56,681 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=227608.0, ans=0.2 +2024-07-29 02:22:01,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227621.33333333334, ans=0.1 +2024-07-29 02:22:04,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=227621.33333333334, ans=0.07 +2024-07-29 02:22:08,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=227634.66666666666, ans=0.125 +2024-07-29 02:22:09,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=227634.66666666666, ans=0.125 +2024-07-29 02:22:10,216 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.83 vs. limit=12.0 +2024-07-29 02:22:10,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=227634.66666666666, ans=0.125 +2024-07-29 02:22:16,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=227648.0, ans=0.1 +2024-07-29 02:22:22,274 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:22:22,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=227661.33333333334, ans=0.125 +2024-07-29 02:22:27,925 INFO [train.py:1114] (0/4) Epoch 17, batch 7200, loss[loss=0.2078, simple_loss=0.2915, pruned_loss=0.06208, over 4797.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2655, pruned_loss=0.04249, over 937853.68 frames. ], batch size: 15, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:22:28,757 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=227674.66666666666, ans=0.125 +2024-07-29 02:22:32,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=227674.66666666666, ans=0.125 +2024-07-29 02:22:36,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=227688.0, ans=0.125 +2024-07-29 02:22:38,944 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.51 vs. limit=15.0 +2024-07-29 02:22:43,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=227701.33333333334, ans=0.5 +2024-07-29 02:22:45,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=227701.33333333334, ans=0.1 +2024-07-29 02:22:46,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=227701.33333333334, ans=0.125 +2024-07-29 02:22:48,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=227714.66666666666, ans=0.125 +2024-07-29 02:22:49,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=227714.66666666666, ans=0.125 +2024-07-29 02:22:49,400 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.32 vs. limit=15.0 +2024-07-29 02:22:49,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=227714.66666666666, ans=0.125 +2024-07-29 02:22:59,861 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.749e+01 5.624e+01 6.163e+01 6.917e+01 1.062e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 02:23:01,152 INFO [train.py:1114] (0/4) Epoch 17, batch 7250, loss[loss=0.1725, simple_loss=0.2559, pruned_loss=0.04457, over 4846.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2647, pruned_loss=0.04195, over 939856.57 frames. ], batch size: 12, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:23:06,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=227741.33333333334, ans=0.125 +2024-07-29 02:23:14,307 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=227754.66666666666, ans=0.125 +2024-07-29 02:23:29,729 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.66 vs. limit=15.0 +2024-07-29 02:23:35,263 INFO [train.py:1114] (0/4) Epoch 17, batch 7300, loss[loss=0.1476, simple_loss=0.2294, pruned_loss=0.03284, over 4862.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2651, pruned_loss=0.04211, over 939766.68 frames. ], batch size: 12, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:23:38,413 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.78 vs. limit=12.0 +2024-07-29 02:23:39,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227808.0, ans=0.1 +2024-07-29 02:23:58,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=227848.0, ans=0.0 +2024-07-29 02:24:00,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=227848.0, ans=0.125 +2024-07-29 02:24:00,761 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=227848.0, ans=0.125 +2024-07-29 02:24:06,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=227861.33333333334, ans=0.125 +2024-07-29 02:24:07,038 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.752e+01 5.670e+01 6.102e+01 6.863e+01 9.457e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 02:24:08,391 INFO [train.py:1114] (0/4) Epoch 17, batch 7350, loss[loss=0.1666, simple_loss=0.2645, pruned_loss=0.0343, over 4638.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.266, pruned_loss=0.04228, over 939151.71 frames. ], batch size: 12, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:24:12,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=227874.66666666666, ans=0.2 +2024-07-29 02:24:13,452 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.29 vs. limit=15.0 +2024-07-29 02:24:26,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=227901.33333333334, ans=0.125 +2024-07-29 02:24:28,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=227901.33333333334, ans=0.125 +2024-07-29 02:24:39,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=227914.66666666666, ans=0.2 +2024-07-29 02:24:46,993 INFO [train.py:1114] (0/4) Epoch 17, batch 7400, loss[loss=0.1844, simple_loss=0.2786, pruned_loss=0.04512, over 4697.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2659, pruned_loss=0.04218, over 940613.89 frames. ], batch size: 13, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:24:54,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=227954.66666666666, ans=0.125 +2024-07-29 02:25:08,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=227981.33333333334, ans=0.0 +2024-07-29 02:25:08,709 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=227981.33333333334, ans=0.125 +2024-07-29 02:25:08,758 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.62 vs. limit=15.0 +2024-07-29 02:25:10,256 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.73 vs. limit=6.0 +2024-07-29 02:25:12,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=227981.33333333334, ans=0.025 +2024-07-29 02:25:12,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=227981.33333333334, ans=0.125 +2024-07-29 02:25:18,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=227994.66666666666, ans=10.0 +2024-07-29 02:25:20,901 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.500e+01 5.692e+01 6.442e+01 7.535e+01 1.153e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 02:25:22,144 INFO [train.py:1114] (0/4) Epoch 17, batch 7450, loss[loss=0.1551, simple_loss=0.2407, pruned_loss=0.0347, over 4631.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2646, pruned_loss=0.04192, over 937905.96 frames. ], batch size: 11, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:25:22,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228008.0, ans=0.1 +2024-07-29 02:25:23,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228008.0, ans=0.1 +2024-07-29 02:25:23,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=228008.0, ans=0.125 +2024-07-29 02:25:26,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=228008.0, ans=0.0 +2024-07-29 02:25:27,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=228008.0, ans=0.0 +2024-07-29 02:25:28,074 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=228021.33333333334, ans=0.025 +2024-07-29 02:25:36,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=228034.66666666666, ans=0.125 +2024-07-29 02:25:42,072 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-29 02:25:43,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=228048.0, ans=0.125 +2024-07-29 02:25:49,956 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.15 vs. limit=15.0 +2024-07-29 02:25:51,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=228061.33333333334, ans=0.025 +2024-07-29 02:25:55,079 INFO [train.py:1114] (0/4) Epoch 17, batch 7500, loss[loss=0.192, simple_loss=0.2693, pruned_loss=0.05735, over 3634.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.265, pruned_loss=0.04196, over 936377.76 frames. ], batch size: 35, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:25:55,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=228074.66666666666, ans=0.125 +2024-07-29 02:26:01,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=228088.0, ans=0.0 +2024-07-29 02:26:04,068 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=24.15 vs. limit=15.0 +2024-07-29 02:26:04,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=228088.0, ans=0.125 +2024-07-29 02:26:04,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=228088.0, ans=0.125 +2024-07-29 02:26:06,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=228088.0, ans=0.125 +2024-07-29 02:26:16,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=228114.66666666666, ans=0.125 +2024-07-29 02:26:26,169 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=228128.0, ans=0.0 +2024-07-29 02:26:26,552 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.792e+01 5.692e+01 6.151e+01 7.079e+01 1.117e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 02:26:27,911 INFO [train.py:1114] (0/4) Epoch 17, batch 7550, loss[loss=0.2076, simple_loss=0.3014, pruned_loss=0.05683, over 4597.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2666, pruned_loss=0.04266, over 936407.20 frames. ], batch size: 17, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:26:37,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=228154.66666666666, ans=0.0 +2024-07-29 02:26:38,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=228154.66666666666, ans=0.0 +2024-07-29 02:26:59,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=228194.66666666666, ans=0.025 +2024-07-29 02:27:00,381 INFO [train.py:1114] (0/4) Epoch 17, batch 7600, loss[loss=0.2068, simple_loss=0.2952, pruned_loss=0.05918, over 4807.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2676, pruned_loss=0.04309, over 938190.26 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:27:08,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228221.33333333334, ans=0.1 +2024-07-29 02:27:08,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=228221.33333333334, ans=0.0 +2024-07-29 02:27:31,905 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.52 vs. limit=15.0 +2024-07-29 02:27:32,122 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.867e+01 5.577e+01 6.101e+01 6.985e+01 1.081e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 02:27:33,477 INFO [train.py:1114] (0/4) Epoch 17, batch 7650, loss[loss=0.1687, simple_loss=0.2592, pruned_loss=0.03913, over 4929.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2673, pruned_loss=0.04257, over 937376.68 frames. ], batch size: 12, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:27:44,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=228288.0, ans=0.0 +2024-07-29 02:27:47,819 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.89 vs. limit=6.0 +2024-07-29 02:27:54,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=228314.66666666666, ans=0.0 +2024-07-29 02:27:57,053 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.25 vs. limit=15.0 +2024-07-29 02:28:02,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228328.0, ans=0.1 +2024-07-29 02:28:03,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=228328.0, ans=0.125 +2024-07-29 02:28:06,776 INFO [train.py:1114] (0/4) Epoch 17, batch 7700, loss[loss=0.1615, simple_loss=0.2594, pruned_loss=0.03181, over 4693.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2676, pruned_loss=0.04263, over 934623.21 frames. ], batch size: 13, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:28:08,331 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.06 vs. limit=15.0 +2024-07-29 02:28:10,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=228341.33333333334, ans=0.125 +2024-07-29 02:28:20,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=228368.0, ans=0.125 +2024-07-29 02:28:22,686 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.42 vs. limit=12.0 +2024-07-29 02:28:29,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=228381.33333333334, ans=0.07 +2024-07-29 02:28:33,855 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.34 vs. limit=15.0 +2024-07-29 02:28:34,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=228394.66666666666, ans=0.125 +2024-07-29 02:28:38,551 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.830e+01 5.778e+01 6.221e+01 6.817e+01 1.028e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-29 02:28:39,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=228408.0, ans=0.0 +2024-07-29 02:28:39,812 INFO [train.py:1114] (0/4) Epoch 17, batch 7750, loss[loss=0.1612, simple_loss=0.2617, pruned_loss=0.03032, over 4932.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2682, pruned_loss=0.04303, over 935625.36 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:28:41,607 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.89 vs. limit=15.0 +2024-07-29 02:28:46,157 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.77 vs. limit=15.0 +2024-07-29 02:28:54,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=228434.66666666666, ans=0.025 +2024-07-29 02:29:00,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=228448.0, ans=0.125 +2024-07-29 02:29:09,944 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.90 vs. limit=22.5 +2024-07-29 02:29:13,589 INFO [train.py:1114] (0/4) Epoch 17, batch 7800, loss[loss=0.1814, simple_loss=0.274, pruned_loss=0.04441, over 4672.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2679, pruned_loss=0.04302, over 937973.75 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:29:16,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=228474.66666666666, ans=0.0 +2024-07-29 02:29:19,742 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.50 vs. limit=15.0 +2024-07-29 02:29:22,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=228488.0, ans=0.125 +2024-07-29 02:29:23,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=228488.0, ans=0.0 +2024-07-29 02:29:30,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=228501.33333333334, ans=0.125 +2024-07-29 02:29:37,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228514.66666666666, ans=0.1 +2024-07-29 02:29:42,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=228528.0, ans=0.1 +2024-07-29 02:29:45,767 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.634e+01 5.601e+01 6.061e+01 6.909e+01 9.922e+01, threshold=1.212e+02, percent-clipped=0.0 +2024-07-29 02:29:47,150 INFO [train.py:1114] (0/4) Epoch 17, batch 7850, loss[loss=0.1523, simple_loss=0.2346, pruned_loss=0.03499, over 4498.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2675, pruned_loss=0.04284, over 936076.68 frames. ], batch size: 10, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:29:53,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=228554.66666666666, ans=0.05 +2024-07-29 02:30:02,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=228568.0, ans=0.04949747468305833 +2024-07-29 02:30:07,952 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.32 vs. limit=10.0 +2024-07-29 02:30:18,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=228594.66666666666, ans=0.2 +2024-07-29 02:30:20,619 INFO [train.py:1114] (0/4) Epoch 17, batch 7900, loss[loss=0.1709, simple_loss=0.2703, pruned_loss=0.03574, over 4871.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2688, pruned_loss=0.04337, over 933564.39 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:30:22,404 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.83 vs. limit=22.5 +2024-07-29 02:30:28,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=228621.33333333334, ans=0.0 +2024-07-29 02:30:33,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=228634.66666666666, ans=0.025 +2024-07-29 02:30:37,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=228634.66666666666, ans=0.2 +2024-07-29 02:30:42,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=228648.0, ans=0.125 +2024-07-29 02:30:45,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=228648.0, ans=0.0 +2024-07-29 02:30:51,949 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.771e+01 6.375e+01 7.176e+01 1.150e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-29 02:30:53,270 INFO [train.py:1114] (0/4) Epoch 17, batch 7950, loss[loss=0.2248, simple_loss=0.3043, pruned_loss=0.07266, over 3535.00 frames. ], tot_loss[loss=0.177, simple_loss=0.268, pruned_loss=0.04297, over 935938.13 frames. ], batch size: 35, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:30:54,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=228674.66666666666, ans=0.2 +2024-07-29 02:30:57,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=228674.66666666666, ans=0.2 +2024-07-29 02:31:03,026 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.75 vs. limit=22.5 +2024-07-29 02:31:20,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=228701.33333333334, ans=0.0 +2024-07-29 02:31:22,996 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:31:27,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=228714.66666666666, ans=0.2 +2024-07-29 02:31:33,986 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.68 vs. limit=15.0 +2024-07-29 02:31:43,761 INFO [train.py:1114] (0/4) Epoch 17, batch 8000, loss[loss=0.1567, simple_loss=0.254, pruned_loss=0.02972, over 4617.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2662, pruned_loss=0.04298, over 935095.79 frames. ], batch size: 11, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:31:45,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228741.33333333334, ans=0.1 +2024-07-29 02:31:55,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=228754.66666666666, ans=0.125 +2024-07-29 02:32:07,520 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.94 vs. limit=22.5 +2024-07-29 02:32:17,777 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.670e+01 5.673e+01 6.449e+01 7.589e+01 1.080e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 02:32:19,136 INFO [train.py:1114] (0/4) Epoch 17, batch 8050, loss[loss=0.1539, simple_loss=0.2539, pruned_loss=0.02695, over 4812.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2659, pruned_loss=0.04286, over 935072.83 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:32:19,453 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=12.0 +2024-07-29 02:32:33,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=228821.33333333334, ans=0.0 +2024-07-29 02:32:46,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=228848.0, ans=0.125 +2024-07-29 02:32:49,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=228861.33333333334, ans=0.125 +2024-07-29 02:32:50,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=228861.33333333334, ans=0.0 +2024-07-29 02:32:52,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=228861.33333333334, ans=0.0 +2024-07-29 02:32:54,992 INFO [train.py:1114] (0/4) Epoch 17, batch 8100, loss[loss=0.2111, simple_loss=0.2948, pruned_loss=0.06372, over 4801.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2668, pruned_loss=0.0428, over 934725.67 frames. ], batch size: 15, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:33:00,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=228874.66666666666, ans=0.025 +2024-07-29 02:33:28,145 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.710e+01 5.750e+01 6.401e+01 7.734e+01 1.146e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-29 02:33:29,447 INFO [train.py:1114] (0/4) Epoch 17, batch 8150, loss[loss=0.1891, simple_loss=0.2847, pruned_loss=0.04674, over 4794.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2662, pruned_loss=0.04267, over 938031.04 frames. ], batch size: 15, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:33:31,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=228941.33333333334, ans=0.0 +2024-07-29 02:33:38,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=228954.66666666666, ans=0.125 +2024-07-29 02:33:45,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=228968.0, ans=0.125 +2024-07-29 02:33:46,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=228968.0, ans=0.2 +2024-07-29 02:33:49,055 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=228981.33333333334, ans=0.2 +2024-07-29 02:33:50,876 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=228981.33333333334, ans=0.125 +2024-07-29 02:33:57,420 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:34:02,001 INFO [train.py:1114] (0/4) Epoch 17, batch 8200, loss[loss=0.1663, simple_loss=0.2538, pruned_loss=0.03937, over 4789.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2659, pruned_loss=0.04223, over 939143.63 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:34:04,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=229008.0, ans=0.025 +2024-07-29 02:34:20,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=229034.66666666666, ans=15.0 +2024-07-29 02:34:25,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=229048.0, ans=0.0 +2024-07-29 02:34:34,887 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.838e+01 5.522e+01 6.074e+01 7.199e+01 1.525e+02, threshold=1.215e+02, percent-clipped=1.0 +2024-07-29 02:34:35,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=229061.33333333334, ans=0.5 +2024-07-29 02:34:36,186 INFO [train.py:1114] (0/4) Epoch 17, batch 8250, loss[loss=0.1528, simple_loss=0.2442, pruned_loss=0.03068, over 4892.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2657, pruned_loss=0.04209, over 939320.74 frames. ], batch size: 13, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:34:36,911 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:34:43,479 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.32 vs. limit=6.0 +2024-07-29 02:35:11,257 INFO [train.py:1114] (0/4) Epoch 17, batch 8300, loss[loss=0.1826, simple_loss=0.2874, pruned_loss=0.03885, over 4888.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2666, pruned_loss=0.04245, over 939240.50 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:35:11,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229141.33333333334, ans=0.1 +2024-07-29 02:35:13,560 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.48 vs. limit=15.0 +2024-07-29 02:35:14,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229141.33333333334, ans=0.1 +2024-07-29 02:35:17,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=229154.66666666666, ans=0.2 +2024-07-29 02:35:17,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=229154.66666666666, ans=0.125 +2024-07-29 02:35:34,018 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.04 vs. limit=22.5 +2024-07-29 02:35:36,111 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.58 vs. limit=15.0 +2024-07-29 02:35:40,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=229194.66666666666, ans=0.0 +2024-07-29 02:35:44,146 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.879e+01 5.675e+01 6.316e+01 6.956e+01 1.152e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 02:35:45,476 INFO [train.py:1114] (0/4) Epoch 17, batch 8350, loss[loss=0.1983, simple_loss=0.2912, pruned_loss=0.05269, over 4804.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2662, pruned_loss=0.04243, over 942129.82 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:35:55,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=229221.33333333334, ans=0.125 +2024-07-29 02:36:04,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=229234.66666666666, ans=0.0 +2024-07-29 02:36:11,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=229248.0, ans=0.125 +2024-07-29 02:36:17,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=229261.33333333334, ans=0.0 +2024-07-29 02:36:19,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=229261.33333333334, ans=0.2 +2024-07-29 02:36:22,689 INFO [train.py:1114] (0/4) Epoch 17, batch 8400, loss[loss=0.1371, simple_loss=0.2259, pruned_loss=0.0241, over 4783.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2659, pruned_loss=0.04233, over 940728.21 frames. ], batch size: 12, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:36:25,326 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:36:25,743 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.20 vs. limit=22.5 +2024-07-29 02:36:37,892 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.79 vs. limit=12.0 +2024-07-29 02:36:44,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=229314.66666666666, ans=0.025 +2024-07-29 02:36:44,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=229314.66666666666, ans=0.0 +2024-07-29 02:36:52,721 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-172000.pt +2024-07-29 02:36:55,270 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.41 vs. limit=15.0 +2024-07-29 02:36:57,647 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.781e+01 6.432e+01 7.454e+01 1.243e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-29 02:36:58,946 INFO [train.py:1114] (0/4) Epoch 17, batch 8450, loss[loss=0.1708, simple_loss=0.259, pruned_loss=0.04131, over 4814.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2675, pruned_loss=0.04286, over 939846.78 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:37:24,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=229394.66666666666, ans=0.0 +2024-07-29 02:37:28,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=229394.66666666666, ans=0.025 +2024-07-29 02:37:31,082 INFO [train.py:1114] (0/4) Epoch 17, batch 8500, loss[loss=0.1647, simple_loss=0.249, pruned_loss=0.04017, over 4605.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2666, pruned_loss=0.04245, over 939517.99 frames. ], batch size: 11, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:37:41,364 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-29 02:37:46,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=229434.66666666666, ans=0.0 +2024-07-29 02:37:46,486 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.35 vs. limit=22.5 +2024-07-29 02:37:47,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.65 vs. limit=15.0 +2024-07-29 02:37:50,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=229434.66666666666, ans=0.0 +2024-07-29 02:37:51,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=229434.66666666666, ans=0.125 +2024-07-29 02:37:55,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=229448.0, ans=0.035 +2024-07-29 02:37:57,678 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.77 vs. limit=15.0 +2024-07-29 02:38:04,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=229461.33333333334, ans=0.025 +2024-07-29 02:38:04,990 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.408e+01 5.595e+01 6.449e+01 7.243e+01 1.266e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 02:38:05,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=229461.33333333334, ans=0.125 +2024-07-29 02:38:05,444 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.32 vs. limit=12.0 +2024-07-29 02:38:06,377 INFO [train.py:1114] (0/4) Epoch 17, batch 8550, loss[loss=0.1738, simple_loss=0.2463, pruned_loss=0.05069, over 4819.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2663, pruned_loss=0.04241, over 940568.15 frames. ], batch size: 11, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:38:07,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=229474.66666666666, ans=0.025 +2024-07-29 02:38:07,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=229474.66666666666, ans=0.2 +2024-07-29 02:38:18,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229488.0, ans=0.1 +2024-07-29 02:38:21,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=229501.33333333334, ans=0.0 +2024-07-29 02:38:24,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=229501.33333333334, ans=0.0 +2024-07-29 02:38:29,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=229514.66666666666, ans=0.0 +2024-07-29 02:38:32,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=229528.0, ans=0.2 +2024-07-29 02:38:32,666 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.36 vs. limit=15.0 +2024-07-29 02:38:39,390 INFO [train.py:1114] (0/4) Epoch 17, batch 8600, loss[loss=0.1772, simple_loss=0.2707, pruned_loss=0.04183, over 4792.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2658, pruned_loss=0.04244, over 939958.93 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:38:42,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=229541.33333333334, ans=0.125 +2024-07-29 02:38:47,216 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.30 vs. limit=15.0 +2024-07-29 02:38:51,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=229554.66666666666, ans=0.125 +2024-07-29 02:38:54,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=229568.0, ans=0.125 +2024-07-29 02:38:56,265 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:38:56,644 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.29 vs. limit=12.0 +2024-07-29 02:38:57,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=229568.0, ans=0.125 +2024-07-29 02:38:59,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=229581.33333333334, ans=0.125 +2024-07-29 02:39:06,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=229594.66666666666, ans=0.125 +2024-07-29 02:39:13,137 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.480e+01 5.824e+01 6.675e+01 7.491e+01 1.199e+02, threshold=1.335e+02, percent-clipped=0.0 +2024-07-29 02:39:14,456 INFO [train.py:1114] (0/4) Epoch 17, batch 8650, loss[loss=0.1955, simple_loss=0.2824, pruned_loss=0.05432, over 4897.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2659, pruned_loss=0.04253, over 940924.25 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:39:26,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=229621.33333333334, ans=0.025 +2024-07-29 02:39:43,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=229661.33333333334, ans=0.0 +2024-07-29 02:39:46,662 INFO [train.py:1114] (0/4) Epoch 17, batch 8700, loss[loss=0.2089, simple_loss=0.3002, pruned_loss=0.05886, over 4755.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2666, pruned_loss=0.04289, over 937784.80 frames. ], batch size: 13, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:39:51,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=229674.66666666666, ans=0.125 +2024-07-29 02:39:57,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=229688.0, ans=0.125 +2024-07-29 02:40:04,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=229701.33333333334, ans=0.125 +2024-07-29 02:40:13,229 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.72 vs. limit=12.0 +2024-07-29 02:40:19,048 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.617e+01 6.057e+01 6.881e+01 1.135e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 02:40:19,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=229728.0, ans=0.125 +2024-07-29 02:40:20,339 INFO [train.py:1114] (0/4) Epoch 17, batch 8750, loss[loss=0.1879, simple_loss=0.2902, pruned_loss=0.04286, over 4676.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2673, pruned_loss=0.04338, over 935871.43 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:40:45,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=229781.33333333334, ans=0.125 +2024-07-29 02:40:51,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=229794.66666666666, ans=0.0 +2024-07-29 02:40:51,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=229794.66666666666, ans=0.025 +2024-07-29 02:40:54,030 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.60 vs. limit=15.0 +2024-07-29 02:40:54,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229794.66666666666, ans=0.1 +2024-07-29 02:40:54,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=229794.66666666666, ans=0.0 +2024-07-29 02:40:56,103 INFO [train.py:1114] (0/4) Epoch 17, batch 8800, loss[loss=0.1557, simple_loss=0.2493, pruned_loss=0.03106, over 4929.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2679, pruned_loss=0.04321, over 937164.61 frames. ], batch size: 14, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:41:12,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=229834.66666666666, ans=0.125 +2024-07-29 02:41:16,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=229848.0, ans=0.125 +2024-07-29 02:41:17,642 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.33 vs. limit=15.0 +2024-07-29 02:41:21,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=229861.33333333334, ans=0.025 +2024-07-29 02:41:28,624 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.643e+01 5.657e+01 6.109e+01 6.683e+01 1.097e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-29 02:41:29,302 INFO [train.py:1114] (0/4) Epoch 17, batch 8850, loss[loss=0.1727, simple_loss=0.2775, pruned_loss=0.03392, over 4585.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2679, pruned_loss=0.04325, over 931746.73 frames. ], batch size: 21, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:41:43,152 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.22 vs. limit=22.5 +2024-07-29 02:41:47,154 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.68 vs. limit=15.0 +2024-07-29 02:41:50,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=229901.33333333334, ans=0.0 +2024-07-29 02:41:54,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=229914.66666666666, ans=0.125 +2024-07-29 02:41:55,649 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=229914.66666666666, ans=0.09899494936611666 +2024-07-29 02:41:56,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229914.66666666666, ans=0.1 +2024-07-29 02:42:02,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=229928.0, ans=0.125 +2024-07-29 02:42:06,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=229928.0, ans=0.125 +2024-07-29 02:42:08,045 INFO [train.py:1114] (0/4) Epoch 17, batch 8900, loss[loss=0.1628, simple_loss=0.2406, pruned_loss=0.04246, over 4952.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2684, pruned_loss=0.04365, over 929278.35 frames. ], batch size: 12, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:42:13,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229941.33333333334, ans=0.1 +2024-07-29 02:42:15,155 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.08 vs. limit=15.0 +2024-07-29 02:42:22,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=229968.0, ans=0.0 +2024-07-29 02:42:43,285 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.920e+01 5.712e+01 6.272e+01 7.147e+01 1.085e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 02:42:43,949 INFO [train.py:1114] (0/4) Epoch 17, batch 8950, loss[loss=0.1544, simple_loss=0.2469, pruned_loss=0.03093, over 4505.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2678, pruned_loss=0.04336, over 930513.01 frames. ], batch size: 21, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:42:47,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=230008.0, ans=0.05 +2024-07-29 02:42:48,829 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.80 vs. limit=15.0 +2024-07-29 02:43:00,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=230021.33333333334, ans=0.2 +2024-07-29 02:43:09,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=230034.66666666666, ans=0.0 +2024-07-29 02:43:14,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=230048.0, ans=0.025 +2024-07-29 02:43:22,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=230061.33333333334, ans=0.0 +2024-07-29 02:43:26,316 INFO [train.py:1114] (0/4) Epoch 17, batch 9000, loss[loss=0.168, simple_loss=0.2626, pruned_loss=0.03675, over 4648.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.268, pruned_loss=0.04379, over 933673.74 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:43:26,317 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 02:43:33,011 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.7795, 3.4645, 2.9760, 2.7183], device='cuda:0') +2024-07-29 02:43:34,916 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.2615, 3.6444, 3.5810, 3.5087], device='cuda:0') +2024-07-29 02:43:37,870 INFO [train.py:1146] (0/4) Epoch 17, validation: loss=0.1619, simple_loss=0.2644, pruned_loss=0.02967, over 944034.00 frames. +2024-07-29 02:43:37,871 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 02:43:43,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230074.66666666666, ans=0.1 +2024-07-29 02:43:47,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=230088.0, ans=0.0 +2024-07-29 02:43:59,447 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=11.07 vs. limit=15.0 +2024-07-29 02:44:05,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=230128.0, ans=0.0 +2024-07-29 02:44:07,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=230128.0, ans=0.125 +2024-07-29 02:44:12,196 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.638e+01 5.566e+01 6.347e+01 7.363e+01 1.043e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-29 02:44:12,229 INFO [train.py:1114] (0/4) Epoch 17, batch 9050, loss[loss=0.1933, simple_loss=0.2653, pruned_loss=0.06065, over 4483.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2669, pruned_loss=0.04329, over 933910.92 frames. ], batch size: 10, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:44:14,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=230141.33333333334, ans=0.0 +2024-07-29 02:44:15,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=230141.33333333334, ans=0.125 +2024-07-29 02:44:20,231 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=230154.66666666666, ans=0.2 +2024-07-29 02:44:24,106 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.31 vs. limit=15.0 +2024-07-29 02:44:29,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=230168.0, ans=10.0 +2024-07-29 02:44:40,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=230194.66666666666, ans=0.125 +2024-07-29 02:44:41,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=230194.66666666666, ans=0.2 +2024-07-29 02:44:42,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=230194.66666666666, ans=0.0 +2024-07-29 02:44:47,571 INFO [train.py:1114] (0/4) Epoch 17, batch 9100, loss[loss=0.1582, simple_loss=0.2532, pruned_loss=0.03158, over 4939.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2666, pruned_loss=0.04287, over 936627.34 frames. ], batch size: 14, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:44:49,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230208.0, ans=0.1 +2024-07-29 02:44:49,100 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.04 vs. limit=15.0 +2024-07-29 02:44:50,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=230208.0, ans=0.125 +2024-07-29 02:45:01,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=230234.66666666666, ans=0.1 +2024-07-29 02:45:02,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=230234.66666666666, ans=0.0 +2024-07-29 02:45:19,830 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.594e+01 5.656e+01 6.287e+01 6.947e+01 9.623e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-29 02:45:19,863 INFO [train.py:1114] (0/4) Epoch 17, batch 9150, loss[loss=0.1753, simple_loss=0.269, pruned_loss=0.04077, over 4813.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2676, pruned_loss=0.04288, over 935481.15 frames. ], batch size: 14, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:45:23,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=230274.66666666666, ans=0.125 +2024-07-29 02:45:26,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=230288.0, ans=0.025 +2024-07-29 02:45:28,251 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.33 vs. limit=15.0 +2024-07-29 02:45:34,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=230301.33333333334, ans=0.125 +2024-07-29 02:45:44,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=230328.0, ans=0.125 +2024-07-29 02:45:52,561 INFO [train.py:1114] (0/4) Epoch 17, batch 9200, loss[loss=0.1676, simple_loss=0.2537, pruned_loss=0.04078, over 4847.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.267, pruned_loss=0.0427, over 937584.61 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:45:54,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=230341.33333333334, ans=0.0 +2024-07-29 02:45:59,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=230354.66666666666, ans=0.0 +2024-07-29 02:46:01,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=230354.66666666666, ans=0.0 +2024-07-29 02:46:01,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=230354.66666666666, ans=0.025 +2024-07-29 02:46:13,314 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:46:16,282 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:46:24,550 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.375e+01 5.574e+01 6.025e+01 6.747e+01 8.782e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-29 02:46:24,583 INFO [train.py:1114] (0/4) Epoch 17, batch 9250, loss[loss=0.1691, simple_loss=0.2556, pruned_loss=0.0413, over 4635.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2668, pruned_loss=0.04251, over 938113.34 frames. ], batch size: 13, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:46:28,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_na.min_abs, batch_count=230408.0, ans=0.02 +2024-07-29 02:46:34,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=230421.33333333334, ans=0.125 +2024-07-29 02:46:48,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=230448.0, ans=0.2 +2024-07-29 02:46:56,565 INFO [train.py:1114] (0/4) Epoch 17, batch 9300, loss[loss=0.1937, simple_loss=0.274, pruned_loss=0.05667, over 4787.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2664, pruned_loss=0.04253, over 938306.67 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:47:02,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230474.66666666666, ans=0.1 +2024-07-29 02:47:09,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230501.33333333334, ans=0.1 +2024-07-29 02:47:11,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=230501.33333333334, ans=0.125 +2024-07-29 02:47:15,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=230514.66666666666, ans=0.04949747468305833 +2024-07-29 02:47:28,550 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 5.719e+01 6.284e+01 7.337e+01 9.845e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-29 02:47:28,583 INFO [train.py:1114] (0/4) Epoch 17, batch 9350, loss[loss=0.174, simple_loss=0.259, pruned_loss=0.0445, over 4800.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2669, pruned_loss=0.04285, over 934902.33 frames. ], batch size: 11, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:47:33,297 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.86 vs. limit=22.5 +2024-07-29 02:47:33,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=230541.33333333334, ans=0.025 +2024-07-29 02:47:42,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=230568.0, ans=0.0 +2024-07-29 02:47:50,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=230581.33333333334, ans=0.0 +2024-07-29 02:48:00,644 INFO [train.py:1114] (0/4) Epoch 17, batch 9400, loss[loss=0.1761, simple_loss=0.2592, pruned_loss=0.04649, over 4690.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2669, pruned_loss=0.04289, over 933062.27 frames. ], batch size: 13, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:48:03,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=230608.0, ans=0.0 +2024-07-29 02:48:21,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=230648.0, ans=0.09899494936611666 +2024-07-29 02:48:24,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=230648.0, ans=0.0 +2024-07-29 02:48:28,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=230661.33333333334, ans=0.125 +2024-07-29 02:48:34,184 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=230674.66666666666, ans=0.125 +2024-07-29 02:48:34,668 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.369e+01 5.567e+01 6.049e+01 6.960e+01 9.210e+01, threshold=1.210e+02, percent-clipped=0.0 +2024-07-29 02:48:34,701 INFO [train.py:1114] (0/4) Epoch 17, batch 9450, loss[loss=0.1426, simple_loss=0.2266, pruned_loss=0.02937, over 4806.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2668, pruned_loss=0.04275, over 932604.03 frames. ], batch size: 11, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:48:37,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=230674.66666666666, ans=0.0 +2024-07-29 02:48:47,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=230701.33333333334, ans=0.0 +2024-07-29 02:48:49,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=230701.33333333334, ans=0.125 +2024-07-29 02:48:50,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=230701.33333333334, ans=0.07 +2024-07-29 02:48:50,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=230701.33333333334, ans=0.0 +2024-07-29 02:48:55,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230714.66666666666, ans=0.1 +2024-07-29 02:49:02,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=230728.0, ans=0.125 +2024-07-29 02:49:06,179 INFO [train.py:1114] (0/4) Epoch 17, batch 9500, loss[loss=0.1518, simple_loss=0.2411, pruned_loss=0.03126, over 4703.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2666, pruned_loss=0.04226, over 934839.17 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:49:15,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=230754.66666666666, ans=0.0 +2024-07-29 02:49:19,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230768.0, ans=0.1 +2024-07-29 02:49:32,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=230794.66666666666, ans=0.025 +2024-07-29 02:49:37,900 INFO [train.py:1114] (0/4) Epoch 17, batch 9550, loss[loss=0.159, simple_loss=0.2514, pruned_loss=0.03326, over 4773.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2669, pruned_loss=0.0424, over 932125.84 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:49:39,100 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 5.645e+01 6.246e+01 7.009e+01 1.042e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 02:49:42,633 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.67 vs. limit=12.0 +2024-07-29 02:49:55,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=230834.66666666666, ans=0.125 +2024-07-29 02:50:09,769 INFO [train.py:1114] (0/4) Epoch 17, batch 9600, loss[loss=0.2202, simple_loss=0.2968, pruned_loss=0.07176, over 3534.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2673, pruned_loss=0.04276, over 931484.82 frames. ], batch size: 35, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:50:18,726 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=230888.0, ans=0.0 +2024-07-29 02:50:26,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=230901.33333333334, ans=0.125 +2024-07-29 02:50:44,359 INFO [train.py:1114] (0/4) Epoch 17, batch 9650, loss[loss=0.1955, simple_loss=0.2915, pruned_loss=0.04974, over 4831.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.268, pruned_loss=0.04339, over 927265.92 frames. ], batch size: 16, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:50:44,985 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.884e+01 6.433e+01 7.222e+01 1.107e+02, threshold=1.287e+02, percent-clipped=0.0 +2024-07-29 02:50:50,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=230954.66666666666, ans=0.125 +2024-07-29 02:50:50,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=230954.66666666666, ans=0.125 +2024-07-29 02:50:58,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=230968.0, ans=0.125 +2024-07-29 02:51:07,291 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:51:11,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230994.66666666666, ans=0.1 +2024-07-29 02:51:16,186 INFO [train.py:1114] (0/4) Epoch 17, batch 9700, loss[loss=0.1731, simple_loss=0.2579, pruned_loss=0.04419, over 4243.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2679, pruned_loss=0.04392, over 925851.35 frames. ], batch size: 25, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:51:47,649 INFO [train.py:1114] (0/4) Epoch 17, batch 9750, loss[loss=0.1721, simple_loss=0.264, pruned_loss=0.04012, over 4695.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.268, pruned_loss=0.04356, over 925936.20 frames. ], batch size: 15, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:51:48,239 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.114e+01 5.556e+01 6.243e+01 6.911e+01 1.115e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 02:51:48,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=231074.66666666666, ans=0.015 +2024-07-29 02:51:50,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231074.66666666666, ans=0.1 +2024-07-29 02:51:53,559 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-29 02:51:54,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=231088.0, ans=0.025 +2024-07-29 02:51:59,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=231101.33333333334, ans=0.035 +2024-07-29 02:52:15,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=231128.0, ans=0.125 +2024-07-29 02:52:16,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=231128.0, ans=0.125 +2024-07-29 02:52:19,056 INFO [train.py:1114] (0/4) Epoch 17, batch 9800, loss[loss=0.1663, simple_loss=0.2599, pruned_loss=0.03641, over 4710.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.266, pruned_loss=0.0428, over 925410.48 frames. ], batch size: 12, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:52:19,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231141.33333333334, ans=0.1 +2024-07-29 02:52:25,370 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:52:27,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=231154.66666666666, ans=0.1 +2024-07-29 02:52:36,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=231181.33333333334, ans=0.125 +2024-07-29 02:52:47,214 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.02 vs. limit=10.0 +2024-07-29 02:52:47,867 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.75 vs. limit=15.0 +2024-07-29 02:52:50,077 INFO [train.py:1114] (0/4) Epoch 17, batch 9850, loss[loss=0.1924, simple_loss=0.2891, pruned_loss=0.04791, over 4900.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2671, pruned_loss=0.04314, over 927339.03 frames. ], batch size: 15, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:52:50,654 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.758e+01 6.441e+01 7.212e+01 9.230e+01, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 02:52:58,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=231221.33333333334, ans=0.09899494936611666 +2024-07-29 02:53:04,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=231234.66666666666, ans=0.125 +2024-07-29 02:53:19,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=231261.33333333334, ans=0.125 +2024-07-29 02:53:22,318 INFO [train.py:1114] (0/4) Epoch 17, batch 9900, loss[loss=0.1831, simple_loss=0.2715, pruned_loss=0.04734, over 4821.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2678, pruned_loss=0.04377, over 926435.29 frames. ], batch size: 16, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:53:27,109 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.24 vs. limit=15.0 +2024-07-29 02:53:47,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=231328.0, ans=0.0 +2024-07-29 02:53:48,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=231328.0, ans=0.125 +2024-07-29 02:53:53,565 INFO [train.py:1114] (0/4) Epoch 17, batch 9950, loss[loss=0.1537, simple_loss=0.2411, pruned_loss=0.03321, over 4806.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2667, pruned_loss=0.04354, over 928969.82 frames. ], batch size: 11, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:53:54,165 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.766e+01 6.356e+01 7.245e+01 1.147e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-29 02:53:58,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=231341.33333333334, ans=0.2 +2024-07-29 02:54:05,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=231354.66666666666, ans=0.0 +2024-07-29 02:54:22,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=231394.66666666666, ans=0.0 +2024-07-29 02:54:24,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=231408.0, ans=0.125 +2024-07-29 02:54:24,978 INFO [train.py:1114] (0/4) Epoch 17, batch 10000, loss[loss=0.1732, simple_loss=0.2702, pruned_loss=0.03812, over 4618.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2691, pruned_loss=0.04408, over 926611.49 frames. ], batch size: 16, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:54:25,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=231408.0, ans=0.125 +2024-07-29 02:54:33,048 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=231421.33333333334, ans=0.125 +2024-07-29 02:54:37,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=231434.66666666666, ans=0.0 +2024-07-29 02:54:38,104 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=231434.66666666666, ans=0.125 +2024-07-29 02:54:40,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=231434.66666666666, ans=0.125 +2024-07-29 02:54:43,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=231448.0, ans=0.0 +2024-07-29 02:54:46,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=231448.0, ans=0.125 +2024-07-29 02:54:53,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=231461.33333333334, ans=0.125 +2024-07-29 02:54:55,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=231461.33333333334, ans=0.1 +2024-07-29 02:54:56,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=231474.66666666666, ans=0.0 +2024-07-29 02:55:00,063 INFO [train.py:1114] (0/4) Epoch 17, batch 10050, loss[loss=0.2462, simple_loss=0.3364, pruned_loss=0.07799, over 3218.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2717, pruned_loss=0.04551, over 915895.14 frames. ], batch size: 35, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:55:00,789 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.986e+01 5.675e+01 6.187e+01 6.969e+01 9.766e+01, threshold=1.237e+02, percent-clipped=0.0 +2024-07-29 02:55:03,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=231474.66666666666, ans=0.0 +2024-07-29 02:55:05,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231474.66666666666, ans=0.1 +2024-07-29 02:55:08,955 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.38 vs. limit=15.0 +2024-07-29 02:55:10,779 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:55:17,550 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.40 vs. limit=15.0 +2024-07-29 02:55:30,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=231528.0, ans=0.125 +2024-07-29 02:55:35,508 INFO [train.py:1114] (0/4) Epoch 17, batch 10100, loss[loss=0.2285, simple_loss=0.308, pruned_loss=0.07449, over 3525.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2761, pruned_loss=0.04978, over 862332.51 frames. ], batch size: 37, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:55:41,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=231541.33333333334, ans=0.0 +2024-07-29 02:55:55,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=231568.0, ans=0.0 +2024-07-29 02:56:03,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=231581.33333333334, ans=0.125 +2024-07-29 02:56:03,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=231581.33333333334, ans=0.125 +2024-07-29 02:56:04,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=231581.33333333334, ans=0.025 +2024-07-29 02:56:08,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=231594.66666666666, ans=0.125 +2024-07-29 02:56:12,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=231594.66666666666, ans=0.2 +2024-07-29 02:56:13,982 INFO [train.py:1114] (0/4) Epoch 17, batch 10150, loss[loss=0.2105, simple_loss=0.2856, pruned_loss=0.0677, over 3211.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.28, pruned_loss=0.05332, over 820112.42 frames. ], batch size: 35, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:56:14,207 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=231608.0, ans=0.2 +2024-07-29 02:56:14,592 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.873e+01 6.975e+01 7.380e+01 8.032e+01 1.303e+02, threshold=1.476e+02, percent-clipped=1.0 +2024-07-29 02:56:21,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=231621.33333333334, ans=0.125 +2024-07-29 02:56:24,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=231621.33333333334, ans=0.0 +2024-07-29 02:56:36,100 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.69 vs. limit=15.0 +2024-07-29 02:56:40,311 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.18 vs. limit=15.0 +2024-07-29 02:56:43,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=231661.33333333334, ans=0.0 +2024-07-29 02:56:44,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=231661.33333333334, ans=0.0 +2024-07-29 02:56:44,737 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=7.29 vs. limit=10.0 +2024-07-29 02:56:45,761 INFO [train.py:1114] (0/4) Epoch 17, batch 10200, loss[loss=0.1872, simple_loss=0.282, pruned_loss=0.0462, over 3404.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2835, pruned_loss=0.0566, over 788669.29 frames. ], batch size: 35, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:56:46,697 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.57 vs. limit=6.0 +2024-07-29 02:57:02,395 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-17.pt +2024-07-29 02:58:12,613 INFO [train.py:1114] (0/4) Epoch 18, batch 0, loss[loss=0.1366, simple_loss=0.2285, pruned_loss=0.0223, over 4857.00 frames. ], tot_loss[loss=0.1366, simple_loss=0.2285, pruned_loss=0.0223, over 4857.00 frames. ], batch size: 12, lr: 4.20e-03, grad_scale: 32.0 +2024-07-29 02:58:12,613 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 02:58:24,198 INFO [train.py:1146] (0/4) Epoch 18, validation: loss=0.1629, simple_loss=0.2668, pruned_loss=0.02955, over 944034.00 frames. +2024-07-29 02:58:24,199 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 02:58:26,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231705.33333333334, ans=0.1 +2024-07-29 02:58:26,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=231705.33333333334, ans=0.125 +2024-07-29 02:58:27,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=231705.33333333334, ans=0.2 +2024-07-29 02:58:31,817 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:58:38,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=231732.0, ans=0.2 +2024-07-29 02:58:44,123 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.569e+01 6.224e+01 6.772e+01 7.416e+01 8.385e+01, threshold=1.354e+02, percent-clipped=0.0 +2024-07-29 02:58:54,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=231758.66666666666, ans=0.125 +2024-07-29 02:58:54,804 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.44 vs. limit=15.0 +2024-07-29 02:58:59,031 INFO [train.py:1114] (0/4) Epoch 18, batch 50, loss[loss=0.1739, simple_loss=0.256, pruned_loss=0.04589, over 4615.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2681, pruned_loss=0.04417, over 206575.65 frames. ], batch size: 11, lr: 4.20e-03, grad_scale: 32.0 +2024-07-29 02:59:07,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=231785.33333333334, ans=0.0 +2024-07-29 02:59:10,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=231785.33333333334, ans=0.125 +2024-07-29 02:59:34,046 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=231838.66666666666, ans=0.125 +2024-07-29 02:59:34,513 INFO [train.py:1114] (0/4) Epoch 18, batch 100, loss[loss=0.1581, simple_loss=0.2557, pruned_loss=0.03028, over 4645.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2704, pruned_loss=0.04415, over 365854.46 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 02:59:41,416 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.23 vs. limit=22.5 +2024-07-29 02:59:43,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=231852.0, ans=0.0 +2024-07-29 02:59:50,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231865.33333333334, ans=0.1 +2024-07-29 02:59:54,157 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-29 02:59:54,431 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.485e+01 5.467e+01 5.995e+01 6.645e+01 8.215e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-29 03:00:06,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231892.0, ans=0.1 +2024-07-29 03:00:07,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=231892.0, ans=0.0 +2024-07-29 03:00:08,852 INFO [train.py:1114] (0/4) Epoch 18, batch 150, loss[loss=0.1407, simple_loss=0.233, pruned_loss=0.02419, over 4617.00 frames. ], tot_loss[loss=0.176, simple_loss=0.267, pruned_loss=0.04245, over 494504.32 frames. ], batch size: 11, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:00:16,394 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-29 03:00:22,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=231932.0, ans=0.0 +2024-07-29 03:00:24,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=231932.0, ans=0.125 +2024-07-29 03:00:25,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=231932.0, ans=0.125 +2024-07-29 03:00:26,436 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.81 vs. limit=15.0 +2024-07-29 03:00:32,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=231945.33333333334, ans=0.125 +2024-07-29 03:00:42,601 INFO [train.py:1114] (0/4) Epoch 18, batch 200, loss[loss=0.17, simple_loss=0.2592, pruned_loss=0.04036, over 4445.00 frames. ], tot_loss[loss=0.176, simple_loss=0.267, pruned_loss=0.04253, over 593965.27 frames. ], batch size: 21, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:00:48,178 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.64 vs. limit=15.0 +2024-07-29 03:01:07,622 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.18 vs. limit=15.0 +2024-07-29 03:01:08,547 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.689e+01 5.881e+01 6.844e+01 7.850e+01 1.252e+02, threshold=1.369e+02, percent-clipped=1.0 +2024-07-29 03:01:12,351 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232012.0, ans=0.1 +2024-07-29 03:01:12,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=232012.0, ans=0.0 +2024-07-29 03:01:17,403 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.22 vs. limit=15.0 +2024-07-29 03:01:50,245 INFO [train.py:1114] (0/4) Epoch 18, batch 250, loss[loss=0.1953, simple_loss=0.2924, pruned_loss=0.04911, over 4663.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2661, pruned_loss=0.04218, over 670766.51 frames. ], batch size: 16, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:01:55,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=232038.66666666666, ans=0.0 +2024-07-29 03:01:57,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=232052.0, ans=0.025 +2024-07-29 03:01:58,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=232052.0, ans=0.125 +2024-07-29 03:01:58,902 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.95 vs. limit=15.0 +2024-07-29 03:02:24,294 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.12 vs. limit=15.0 +2024-07-29 03:02:29,760 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.27 vs. limit=10.0 +2024-07-29 03:02:30,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=232065.33333333334, ans=6.0 +2024-07-29 03:02:36,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=232078.66666666666, ans=0.1 +2024-07-29 03:02:37,539 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=232078.66666666666, ans=0.125 +2024-07-29 03:02:58,737 INFO [train.py:1114] (0/4) Epoch 18, batch 300, loss[loss=0.1909, simple_loss=0.2839, pruned_loss=0.04896, over 4788.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2661, pruned_loss=0.04211, over 730178.91 frames. ], batch size: 15, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:03:09,943 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:03:12,905 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.12 vs. limit=22.5 +2024-07-29 03:03:17,685 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.434e+01 5.467e+01 6.061e+01 6.995e+01 1.248e+02, threshold=1.212e+02, percent-clipped=0.0 +2024-07-29 03:03:26,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=232158.66666666666, ans=0.125 +2024-07-29 03:03:31,593 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.01 vs. limit=15.0 +2024-07-29 03:03:32,407 INFO [train.py:1114] (0/4) Epoch 18, batch 350, loss[loss=0.1588, simple_loss=0.2356, pruned_loss=0.04097, over 4935.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.265, pruned_loss=0.04133, over 776055.18 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:03:54,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=232212.0, ans=0.125 +2024-07-29 03:03:55,912 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:03:58,794 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.05 vs. limit=22.5 +2024-07-29 03:04:05,804 INFO [train.py:1114] (0/4) Epoch 18, batch 400, loss[loss=0.1581, simple_loss=0.2532, pruned_loss=0.03148, over 4689.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2642, pruned_loss=0.04092, over 813506.49 frames. ], batch size: 13, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:04:26,843 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.483e+01 6.110e+01 6.835e+01 9.648e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-29 03:04:31,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=232278.66666666666, ans=0.07 +2024-07-29 03:04:35,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=232292.0, ans=0.025 +2024-07-29 03:04:38,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=232292.0, ans=0.125 +2024-07-29 03:04:38,637 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.46 vs. limit=15.0 +2024-07-29 03:04:39,375 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.18 vs. limit=6.0 +2024-07-29 03:04:39,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=232292.0, ans=0.035 +2024-07-29 03:04:41,628 INFO [train.py:1114] (0/4) Epoch 18, batch 450, loss[loss=0.1743, simple_loss=0.2718, pruned_loss=0.03839, over 4634.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2638, pruned_loss=0.04085, over 838617.25 frames. ], batch size: 13, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:04:53,819 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.45 vs. limit=22.5 +2024-07-29 03:04:59,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=232332.0, ans=0.2 +2024-07-29 03:05:01,093 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=232345.33333333334, ans=0.125 +2024-07-29 03:05:01,211 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=232345.33333333334, ans=0.125 +2024-07-29 03:05:13,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=232358.66666666666, ans=0.125 +2024-07-29 03:05:13,423 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.59 vs. limit=6.0 +2024-07-29 03:05:14,800 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.04 vs. limit=10.0 +2024-07-29 03:05:15,091 INFO [train.py:1114] (0/4) Epoch 18, batch 500, loss[loss=0.206, simple_loss=0.3052, pruned_loss=0.05342, over 4690.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2636, pruned_loss=0.04075, over 861101.77 frames. ], batch size: 15, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:05:18,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=232372.0, ans=0.125 +2024-07-29 03:05:22,115 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=232385.33333333334, ans=0.0 +2024-07-29 03:05:24,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=232385.33333333334, ans=0.2 +2024-07-29 03:05:34,180 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.530e+01 5.559e+01 6.071e+01 6.831e+01 9.618e+01, threshold=1.214e+02, percent-clipped=0.0 +2024-07-29 03:05:43,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=232425.33333333334, ans=0.0 +2024-07-29 03:05:45,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=232425.33333333334, ans=0.0 +2024-07-29 03:05:48,917 INFO [train.py:1114] (0/4) Epoch 18, batch 550, loss[loss=0.1672, simple_loss=0.2645, pruned_loss=0.03495, over 4638.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2634, pruned_loss=0.04073, over 877135.10 frames. ], batch size: 17, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:05:51,271 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.51 vs. limit=15.0 +2024-07-29 03:05:51,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=232438.66666666666, ans=0.2 +2024-07-29 03:05:59,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=232452.0, ans=0.0 +2024-07-29 03:06:01,549 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.20 vs. limit=15.0 +2024-07-29 03:06:10,281 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.63 vs. limit=15.0 +2024-07-29 03:06:17,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=232492.0, ans=0.5 +2024-07-29 03:06:26,515 INFO [train.py:1114] (0/4) Epoch 18, batch 600, loss[loss=0.1808, simple_loss=0.267, pruned_loss=0.04732, over 4647.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.264, pruned_loss=0.04115, over 892204.66 frames. ], batch size: 16, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:06:32,157 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.30 vs. limit=15.0 +2024-07-29 03:06:43,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=232532.0, ans=0.125 +2024-07-29 03:06:44,912 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.582e+01 6.053e+01 7.206e+01 1.079e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 03:07:01,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=232558.66666666666, ans=0.1 +2024-07-29 03:07:03,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232572.0, ans=0.1 +2024-07-29 03:07:03,967 INFO [train.py:1114] (0/4) Epoch 18, batch 650, loss[loss=0.1748, simple_loss=0.267, pruned_loss=0.04129, over 4766.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2645, pruned_loss=0.04137, over 903948.89 frames. ], batch size: 13, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:07:16,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=232585.33333333334, ans=0.125 +2024-07-29 03:07:17,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=232585.33333333334, ans=0.0 +2024-07-29 03:07:34,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=232612.0, ans=0.125 +2024-07-29 03:07:43,343 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.57 vs. limit=15.0 +2024-07-29 03:07:47,035 INFO [train.py:1114] (0/4) Epoch 18, batch 700, loss[loss=0.1424, simple_loss=0.2498, pruned_loss=0.01748, over 4625.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2649, pruned_loss=0.04133, over 911630.82 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:07:49,292 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=232638.66666666666, ans=0.2 +2024-07-29 03:08:05,617 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 5.680e+01 6.121e+01 6.839e+01 1.044e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 03:08:17,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=232692.0, ans=0.125 +2024-07-29 03:08:20,417 INFO [train.py:1114] (0/4) Epoch 18, batch 750, loss[loss=0.2037, simple_loss=0.2959, pruned_loss=0.05578, over 4696.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2653, pruned_loss=0.04187, over 918007.49 frames. ], batch size: 13, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:08:21,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=232705.33333333334, ans=0.2 +2024-07-29 03:08:44,721 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=5.94 vs. limit=15.0 +2024-07-29 03:08:46,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=232732.0, ans=0.2 +2024-07-29 03:08:47,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=232732.0, ans=0.125 +2024-07-29 03:08:54,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=232732.0, ans=0.125 +2024-07-29 03:08:55,658 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.72 vs. limit=6.0 +2024-07-29 03:08:56,813 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=232745.33333333334, ans=0.125 +2024-07-29 03:08:57,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=232745.33333333334, ans=0.125 +2024-07-29 03:09:10,284 INFO [train.py:1114] (0/4) Epoch 18, batch 800, loss[loss=0.1762, simple_loss=0.2521, pruned_loss=0.05018, over 4852.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2656, pruned_loss=0.04259, over 922841.02 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:09:18,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=232785.33333333334, ans=0.0 +2024-07-29 03:09:20,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=232785.33333333334, ans=0.125 +2024-07-29 03:09:28,025 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.23 vs. limit=22.5 +2024-07-29 03:09:28,894 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.634e+01 6.203e+01 6.793e+01 1.019e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-29 03:09:38,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=232825.33333333334, ans=0.2 +2024-07-29 03:09:39,693 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.48 vs. limit=22.5 +2024-07-29 03:09:43,906 INFO [train.py:1114] (0/4) Epoch 18, batch 850, loss[loss=0.167, simple_loss=0.2592, pruned_loss=0.03741, over 4663.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2647, pruned_loss=0.04216, over 927364.27 frames. ], batch size: 14, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:09:47,580 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=232838.66666666666, ans=0.0 +2024-07-29 03:09:53,913 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.90 vs. limit=15.0 +2024-07-29 03:10:01,410 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.27 vs. limit=6.0 +2024-07-29 03:10:01,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=232865.33333333334, ans=0.125 +2024-07-29 03:10:04,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=232878.66666666666, ans=0.0 +2024-07-29 03:10:10,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=232892.0, ans=0.2 +2024-07-29 03:10:13,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=232892.0, ans=0.125 +2024-07-29 03:10:18,952 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.43 vs. limit=6.0 +2024-07-29 03:10:19,904 INFO [train.py:1114] (0/4) Epoch 18, batch 900, loss[loss=0.1488, simple_loss=0.2373, pruned_loss=0.03021, over 4853.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2655, pruned_loss=0.04264, over 928436.64 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:10:23,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=232905.33333333334, ans=0.125 +2024-07-29 03:10:27,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=232905.33333333334, ans=0.2 +2024-07-29 03:10:39,653 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=232932.0, ans=0.0 +2024-07-29 03:10:40,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=232932.0, ans=0.125 +2024-07-29 03:10:44,075 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.656e+01 6.090e+01 7.210e+01 1.010e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-29 03:10:53,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=232945.33333333334, ans=0.2 +2024-07-29 03:11:02,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=232958.66666666666, ans=0.125 +2024-07-29 03:11:04,780 INFO [train.py:1114] (0/4) Epoch 18, batch 950, loss[loss=0.1518, simple_loss=0.235, pruned_loss=0.0343, over 4784.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2654, pruned_loss=0.04212, over 930146.71 frames. ], batch size: 12, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:11:07,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=232972.0, ans=0.0 +2024-07-29 03:11:27,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=233012.0, ans=0.125 +2024-07-29 03:11:34,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=233025.33333333334, ans=0.025 +2024-07-29 03:11:37,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233025.33333333334, ans=0.1 +2024-07-29 03:11:39,157 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.80 vs. limit=5.0 +2024-07-29 03:11:39,995 INFO [train.py:1114] (0/4) Epoch 18, batch 1000, loss[loss=0.157, simple_loss=0.2526, pruned_loss=0.03069, over 4961.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2663, pruned_loss=0.04264, over 929605.17 frames. ], batch size: 13, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:11:43,596 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.29 vs. limit=15.0 +2024-07-29 03:11:45,554 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.58 vs. limit=22.5 +2024-07-29 03:11:58,676 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.759e+01 5.660e+01 6.268e+01 7.166e+01 1.041e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 03:11:59,711 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.91 vs. limit=6.0 +2024-07-29 03:12:09,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=233092.0, ans=0.05 +2024-07-29 03:12:12,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=233092.0, ans=0.05 +2024-07-29 03:12:15,364 INFO [train.py:1114] (0/4) Epoch 18, batch 1050, loss[loss=0.1946, simple_loss=0.2917, pruned_loss=0.04872, over 4870.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2655, pruned_loss=0.04224, over 931770.94 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:13:11,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=233158.66666666666, ans=0.2 +2024-07-29 03:13:11,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=233158.66666666666, ans=0.2 +2024-07-29 03:13:24,202 INFO [train.py:1114] (0/4) Epoch 18, batch 1100, loss[loss=0.147, simple_loss=0.2377, pruned_loss=0.02813, over 4888.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2653, pruned_loss=0.04198, over 934703.85 frames. ], batch size: 13, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:13:24,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=233172.0, ans=0.125 +2024-07-29 03:14:23,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233198.66666666666, ans=0.1 +2024-07-29 03:14:26,845 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.721e+01 5.370e+01 5.951e+01 6.699e+01 1.093e+02, threshold=1.190e+02, percent-clipped=0.0 +2024-07-29 03:14:33,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=233198.66666666666, ans=0.125 +2024-07-29 03:14:44,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.38 vs. limit=15.0 +2024-07-29 03:15:12,270 INFO [train.py:1114] (0/4) Epoch 18, batch 1150, loss[loss=0.1841, simple_loss=0.2805, pruned_loss=0.04386, over 4897.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2647, pruned_loss=0.04205, over 934631.34 frames. ], batch size: 13, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:15:21,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=233238.66666666666, ans=0.125 +2024-07-29 03:15:24,325 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=233252.0, ans=0.1 +2024-07-29 03:15:25,359 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.51 vs. limit=12.0 +2024-07-29 03:15:27,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=233252.0, ans=0.125 +2024-07-29 03:15:29,198 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=233265.33333333334, ans=0.2 +2024-07-29 03:15:54,170 INFO [train.py:1114] (0/4) Epoch 18, batch 1200, loss[loss=0.1764, simple_loss=0.2811, pruned_loss=0.03588, over 4876.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2652, pruned_loss=0.04157, over 933855.28 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:16:01,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=233318.66666666666, ans=0.125 +2024-07-29 03:16:20,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=233332.0, ans=0.125 +2024-07-29 03:16:24,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=233332.0, ans=0.125 +2024-07-29 03:16:24,703 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.51 vs. limit=22.5 +2024-07-29 03:19:13,690 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+01 5.527e+01 5.938e+01 6.741e+01 1.045e+02, threshold=1.188e+02, percent-clipped=0.0 +2024-07-29 03:19:15,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=233345.33333333334, ans=0.1 +2024-07-29 03:19:17,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=233345.33333333334, ans=0.09899494936611666 +2024-07-29 03:19:17,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=233345.33333333334, ans=0.2 +2024-07-29 03:19:27,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=233358.66666666666, ans=0.0 +2024-07-29 03:19:30,344 INFO [train.py:1114] (0/4) Epoch 18, batch 1250, loss[loss=0.1901, simple_loss=0.2793, pruned_loss=0.05046, over 4810.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2657, pruned_loss=0.04127, over 937727.55 frames. ], batch size: 15, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:19:33,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=233372.0, ans=0.0 +2024-07-29 03:19:38,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233385.33333333334, ans=0.1 +2024-07-29 03:19:41,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=233385.33333333334, ans=0.0 +2024-07-29 03:19:49,576 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=233412.0, ans=0.125 +2024-07-29 03:19:55,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233412.0, ans=0.1 +2024-07-29 03:19:56,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=233425.33333333334, ans=0.125 +2024-07-29 03:20:03,278 INFO [train.py:1114] (0/4) Epoch 18, batch 1300, loss[loss=0.1724, simple_loss=0.2633, pruned_loss=0.04078, over 4707.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.265, pruned_loss=0.04123, over 938798.36 frames. ], batch size: 19, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:20:12,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=233452.0, ans=0.2 +2024-07-29 03:20:16,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=233465.33333333334, ans=0.125 +2024-07-29 03:20:21,553 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.61 vs. limit=15.0 +2024-07-29 03:20:21,873 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.415e+01 5.468e+01 6.194e+01 6.881e+01 8.786e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 03:20:38,027 INFO [train.py:1114] (0/4) Epoch 18, batch 1350, loss[loss=0.1624, simple_loss=0.2649, pruned_loss=0.02993, over 4758.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2641, pruned_loss=0.0409, over 940711.44 frames. ], batch size: 13, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:21:34,904 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.22 vs. limit=6.0 +2024-07-29 03:21:38,970 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:21:39,868 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.92 vs. limit=15.0 +2024-07-29 03:21:47,199 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=15.0 +2024-07-29 03:22:26,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=233558.66666666666, ans=0.0 +2024-07-29 03:22:33,506 INFO [train.py:1114] (0/4) Epoch 18, batch 1400, loss[loss=0.1288, simple_loss=0.2196, pruned_loss=0.01901, over 4705.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2632, pruned_loss=0.04077, over 942541.51 frames. ], batch size: 11, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:22:57,224 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 5.887e+01 6.413e+01 7.105e+01 1.184e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 03:22:59,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=233612.0, ans=0.125 +2024-07-29 03:23:25,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=233625.33333333334, ans=0.125 +2024-07-29 03:23:33,205 INFO [train.py:1114] (0/4) Epoch 18, batch 1450, loss[loss=0.2065, simple_loss=0.3052, pruned_loss=0.05387, over 4687.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2638, pruned_loss=0.04102, over 942656.39 frames. ], batch size: 15, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:23:36,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=233638.66666666666, ans=0.125 +2024-07-29 03:23:37,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=233638.66666666666, ans=0.125 +2024-07-29 03:23:47,620 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.36 vs. limit=22.5 +2024-07-29 03:23:55,315 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=233678.66666666666, ans=0.125 +2024-07-29 03:23:57,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=233678.66666666666, ans=0.125 +2024-07-29 03:23:57,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=233678.66666666666, ans=0.025 +2024-07-29 03:23:58,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=233678.66666666666, ans=0.125 +2024-07-29 03:23:58,819 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.25 vs. limit=22.5 +2024-07-29 03:24:09,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=233705.33333333334, ans=0.1 +2024-07-29 03:24:09,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=233705.33333333334, ans=6.0 +2024-07-29 03:24:09,764 INFO [train.py:1114] (0/4) Epoch 18, batch 1500, loss[loss=0.1528, simple_loss=0.2509, pruned_loss=0.02731, over 4807.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2648, pruned_loss=0.04149, over 942012.24 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:24:10,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=233705.33333333334, ans=0.125 +2024-07-29 03:24:44,219 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+01 5.562e+01 6.096e+01 6.763e+01 1.145e+02, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 03:24:45,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=233745.33333333334, ans=0.025 +2024-07-29 03:24:45,191 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=233745.33333333334, ans=0.0 +2024-07-29 03:24:45,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=233745.33333333334, ans=15.0 +2024-07-29 03:25:14,228 INFO [train.py:1114] (0/4) Epoch 18, batch 1550, loss[loss=0.1753, simple_loss=0.2786, pruned_loss=0.03597, over 4904.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2657, pruned_loss=0.04176, over 938053.30 frames. ], batch size: 15, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:25:14,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233772.0, ans=0.1 +2024-07-29 03:25:16,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=233772.0, ans=0.125 +2024-07-29 03:25:17,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=233772.0, ans=0.125 +2024-07-29 03:25:17,867 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233772.0, ans=0.1 +2024-07-29 03:25:21,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=233772.0, ans=10.0 +2024-07-29 03:25:21,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=233772.0, ans=0.0 +2024-07-29 03:25:22,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=233772.0, ans=0.0 +2024-07-29 03:25:24,823 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.70 vs. limit=15.0 +2024-07-29 03:25:44,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=233812.0, ans=0.0 +2024-07-29 03:25:48,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=233825.33333333334, ans=0.125 +2024-07-29 03:25:52,695 INFO [train.py:1114] (0/4) Epoch 18, batch 1600, loss[loss=0.1757, simple_loss=0.2756, pruned_loss=0.03791, over 4876.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2652, pruned_loss=0.04124, over 936535.73 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:26:01,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=233852.0, ans=0.0 +2024-07-29 03:26:03,068 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=233852.0, ans=0.0 +2024-07-29 03:26:04,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=233852.0, ans=0.025 +2024-07-29 03:26:07,025 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.50 vs. limit=15.0 +2024-07-29 03:26:09,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=233865.33333333334, ans=0.125 +2024-07-29 03:26:12,703 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.724e+01 6.283e+01 7.250e+01 9.354e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-29 03:26:12,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=233878.66666666666, ans=0.0 +2024-07-29 03:26:22,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=233878.66666666666, ans=0.125 +2024-07-29 03:26:25,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=233878.66666666666, ans=0.125 +2024-07-29 03:26:28,742 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=15.0 +2024-07-29 03:26:29,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=233878.66666666666, ans=0.0 +2024-07-29 03:26:35,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233892.0, ans=0.1 +2024-07-29 03:26:37,502 INFO [train.py:1114] (0/4) Epoch 18, batch 1650, loss[loss=0.1658, simple_loss=0.2617, pruned_loss=0.035, over 4673.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2647, pruned_loss=0.04095, over 937013.22 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:27:04,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=233945.33333333334, ans=0.025 +2024-07-29 03:27:25,606 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.26 vs. limit=22.5 +2024-07-29 03:27:42,555 INFO [train.py:1114] (0/4) Epoch 18, batch 1700, loss[loss=0.1599, simple_loss=0.2403, pruned_loss=0.03976, over 4720.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.264, pruned_loss=0.04073, over 938967.75 frames. ], batch size: 11, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:27:46,611 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.78 vs. limit=22.5 +2024-07-29 03:27:47,780 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.81 vs. limit=22.5 +2024-07-29 03:27:49,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=233985.33333333334, ans=0.125 +2024-07-29 03:27:53,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=233985.33333333334, ans=0.125 +2024-07-29 03:27:58,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=233998.66666666666, ans=0.09899494936611666 +2024-07-29 03:28:02,133 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:28:03,747 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.21 vs. limit=15.0 +2024-07-29 03:28:03,945 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.990e+01 5.769e+01 6.208e+01 7.214e+01 1.058e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-29 03:28:06,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=234012.0, ans=0.2 +2024-07-29 03:28:09,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234012.0, ans=0.1 +2024-07-29 03:28:16,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=234025.33333333334, ans=0.125 +2024-07-29 03:28:18,160 INFO [train.py:1114] (0/4) Epoch 18, batch 1750, loss[loss=0.1621, simple_loss=0.2349, pruned_loss=0.04466, over 4794.00 frames. ], tot_loss[loss=0.172, simple_loss=0.263, pruned_loss=0.04055, over 939788.51 frames. ], batch size: 11, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:28:21,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=234038.66666666666, ans=0.2 +2024-07-29 03:28:22,347 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=234038.66666666666, ans=0.2 +2024-07-29 03:28:26,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=234052.0, ans=0.1 +2024-07-29 03:28:36,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234065.33333333334, ans=0.1 +2024-07-29 03:28:45,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=234092.0, ans=0.125 +2024-07-29 03:28:50,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=234105.33333333334, ans=0.125 +2024-07-29 03:28:51,358 INFO [train.py:1114] (0/4) Epoch 18, batch 1800, loss[loss=0.2047, simple_loss=0.2905, pruned_loss=0.05947, over 4636.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2638, pruned_loss=0.04128, over 940091.17 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:28:53,398 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:29:05,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234132.0, ans=0.1 +2024-07-29 03:29:06,852 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.06 vs. limit=15.0 +2024-07-29 03:29:10,856 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.536e+01 5.659e+01 6.366e+01 7.110e+01 1.077e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-29 03:29:12,647 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.49 vs. limit=15.0 +2024-07-29 03:29:27,023 INFO [train.py:1114] (0/4) Epoch 18, batch 1850, loss[loss=0.1979, simple_loss=0.2928, pruned_loss=0.05153, over 4797.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.263, pruned_loss=0.04091, over 940487.03 frames. ], batch size: 14, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:29:34,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=234172.0, ans=0.125 +2024-07-29 03:29:43,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=234198.66666666666, ans=0.0 +2024-07-29 03:29:46,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=234198.66666666666, ans=0.125 +2024-07-29 03:29:49,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=234212.0, ans=0.125 +2024-07-29 03:29:56,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=234225.33333333334, ans=0.0 +2024-07-29 03:29:57,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=234225.33333333334, ans=0.07 +2024-07-29 03:29:59,704 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=234225.33333333334, ans=0.0 +2024-07-29 03:30:03,576 INFO [train.py:1114] (0/4) Epoch 18, batch 1900, loss[loss=0.1913, simple_loss=0.2835, pruned_loss=0.0496, over 4663.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2636, pruned_loss=0.04116, over 941529.83 frames. ], batch size: 14, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:30:09,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=234252.0, ans=0.125 +2024-07-29 03:30:10,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=234252.0, ans=0.125 +2024-07-29 03:30:12,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=234252.0, ans=0.05 +2024-07-29 03:30:19,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=234265.33333333334, ans=0.2 +2024-07-29 03:30:22,649 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.802e+01 5.553e+01 6.272e+01 7.085e+01 9.977e+01, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 03:30:27,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234278.66666666666, ans=0.1 +2024-07-29 03:30:30,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=234292.0, ans=0.04949747468305833 +2024-07-29 03:30:36,399 INFO [train.py:1114] (0/4) Epoch 18, batch 1950, loss[loss=0.1525, simple_loss=0.2522, pruned_loss=0.02638, over 4894.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2655, pruned_loss=0.04147, over 943585.64 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:30:39,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=234305.33333333334, ans=0.09899494936611666 +2024-07-29 03:30:41,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234305.33333333334, ans=0.1 +2024-07-29 03:30:43,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=234318.66666666666, ans=0.125 +2024-07-29 03:30:47,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=234318.66666666666, ans=0.025 +2024-07-29 03:30:56,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=234345.33333333334, ans=0.2 +2024-07-29 03:31:10,190 INFO [train.py:1114] (0/4) Epoch 18, batch 2000, loss[loss=0.144, simple_loss=0.2336, pruned_loss=0.02721, over 4812.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2651, pruned_loss=0.04127, over 940739.24 frames. ], batch size: 11, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:31:17,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=234372.0, ans=0.125 +2024-07-29 03:31:28,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234398.66666666666, ans=0.1 +2024-07-29 03:31:29,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=234398.66666666666, ans=0.0 +2024-07-29 03:31:33,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=234398.66666666666, ans=0.0 +2024-07-29 03:31:33,957 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.658e+01 6.506e+01 7.206e+01 1.041e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-29 03:31:36,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=234412.0, ans=0.125 +2024-07-29 03:31:48,062 INFO [train.py:1114] (0/4) Epoch 18, batch 2050, loss[loss=0.137, simple_loss=0.2234, pruned_loss=0.0253, over 4615.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2646, pruned_loss=0.041, over 938803.24 frames. ], batch size: 11, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:32:12,318 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.03 vs. limit=22.5 +2024-07-29 03:32:39,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=234478.66666666666, ans=0.2 +2024-07-29 03:32:39,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=234478.66666666666, ans=0.125 +2024-07-29 03:32:44,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=234492.0, ans=0.125 +2024-07-29 03:32:53,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=234492.0, ans=0.0 +2024-07-29 03:32:54,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=234492.0, ans=0.0 +2024-07-29 03:32:59,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=234492.0, ans=0.1 +2024-07-29 03:32:59,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=234492.0, ans=0.1 +2024-07-29 03:33:00,548 INFO [train.py:1114] (0/4) Epoch 18, batch 2100, loss[loss=0.174, simple_loss=0.2591, pruned_loss=0.04442, over 4773.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2641, pruned_loss=0.04103, over 940869.39 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:33:01,670 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.00 vs. limit=15.0 +2024-07-29 03:33:03,286 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=234505.33333333334, ans=0.125 +2024-07-29 03:33:19,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=234532.0, ans=0.125 +2024-07-29 03:33:21,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=234532.0, ans=0.125 +2024-07-29 03:33:22,963 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.558e+01 5.524e+01 6.278e+01 7.367e+01 1.141e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-29 03:33:29,865 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=234545.33333333334, ans=0.125 +2024-07-29 03:33:51,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=234545.33333333334, ans=0.05 +2024-07-29 03:33:55,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=234558.66666666666, ans=0.125 +2024-07-29 03:33:59,992 INFO [train.py:1114] (0/4) Epoch 18, batch 2150, loss[loss=0.2147, simple_loss=0.3015, pruned_loss=0.06392, over 4889.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2638, pruned_loss=0.04115, over 943980.11 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:34:02,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=234572.0, ans=0.125 +2024-07-29 03:34:03,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=234572.0, ans=0.0 +2024-07-29 03:34:03,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=234572.0, ans=0.125 +2024-07-29 03:34:10,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=234585.33333333334, ans=0.125 +2024-07-29 03:34:18,583 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.27 vs. limit=15.0 +2024-07-29 03:34:19,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=234598.66666666666, ans=0.125 +2024-07-29 03:34:36,822 INFO [train.py:1114] (0/4) Epoch 18, batch 2200, loss[loss=0.1693, simple_loss=0.2612, pruned_loss=0.03874, over 4812.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2643, pruned_loss=0.0411, over 943251.37 frames. ], batch size: 14, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:34:42,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=234652.0, ans=0.125 +2024-07-29 03:34:50,161 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-176000.pt +2024-07-29 03:34:57,726 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.703e+01 6.363e+01 7.397e+01 1.281e+02, threshold=1.273e+02, percent-clipped=1.0 +2024-07-29 03:35:11,870 INFO [train.py:1114] (0/4) Epoch 18, batch 2250, loss[loss=0.1682, simple_loss=0.2546, pruned_loss=0.04094, over 4694.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2642, pruned_loss=0.04077, over 941936.12 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:35:34,107 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.15 vs. limit=15.0 +2024-07-29 03:35:35,332 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.15 vs. limit=15.0 +2024-07-29 03:35:37,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=234758.66666666666, ans=0.125 +2024-07-29 03:35:42,332 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.31 vs. limit=15.0 +2024-07-29 03:35:45,217 INFO [train.py:1114] (0/4) Epoch 18, batch 2300, loss[loss=0.1749, simple_loss=0.2492, pruned_loss=0.05035, over 4948.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2637, pruned_loss=0.04078, over 940166.19 frames. ], batch size: 12, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:35:57,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=234785.33333333334, ans=0.025 +2024-07-29 03:36:06,804 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.677e+01 6.195e+01 6.878e+01 1.027e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 03:36:20,966 INFO [train.py:1114] (0/4) Epoch 18, batch 2350, loss[loss=0.1844, simple_loss=0.293, pruned_loss=0.03789, over 4638.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2644, pruned_loss=0.04102, over 942021.05 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:36:28,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=234852.0, ans=0.0 +2024-07-29 03:36:34,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=234865.33333333334, ans=0.0 +2024-07-29 03:36:39,781 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.53 vs. limit=15.0 +2024-07-29 03:36:41,326 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.91 vs. limit=15.0 +2024-07-29 03:36:41,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=234878.66666666666, ans=0.0 +2024-07-29 03:36:45,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=234878.66666666666, ans=0.05 +2024-07-29 03:36:46,613 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.11 vs. limit=15.0 +2024-07-29 03:36:47,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=234878.66666666666, ans=0.125 +2024-07-29 03:36:55,209 INFO [train.py:1114] (0/4) Epoch 18, batch 2400, loss[loss=0.1402, simple_loss=0.2356, pruned_loss=0.02237, over 4643.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2648, pruned_loss=0.04107, over 941755.06 frames. ], batch size: 12, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:37:11,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=234932.0, ans=0.0 +2024-07-29 03:37:15,794 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.568e+01 6.148e+01 7.129e+01 1.066e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 03:37:44,192 INFO [train.py:1114] (0/4) Epoch 18, batch 2450, loss[loss=0.1851, simple_loss=0.2674, pruned_loss=0.05135, over 4698.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2661, pruned_loss=0.04198, over 937200.89 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:37:47,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=234972.0, ans=0.125 +2024-07-29 03:37:55,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=234985.33333333334, ans=0.0 +2024-07-29 03:38:02,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=234985.33333333334, ans=0.07 +2024-07-29 03:38:40,556 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.41 vs. limit=12.0 +2024-07-29 03:38:53,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=235038.66666666666, ans=0.1 +2024-07-29 03:38:53,615 INFO [train.py:1114] (0/4) Epoch 18, batch 2500, loss[loss=0.206, simple_loss=0.3083, pruned_loss=0.05185, over 4820.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2661, pruned_loss=0.04236, over 939311.89 frames. ], batch size: 14, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:39:57,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235052.0, ans=0.1 +2024-07-29 03:40:02,294 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.96 vs. limit=15.0 +2024-07-29 03:40:05,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=235065.33333333334, ans=0.025 +2024-07-29 03:40:08,868 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.526e+01 6.445e+01 7.148e+01 1.003e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-29 03:40:10,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=235078.66666666666, ans=0.0 +2024-07-29 03:40:12,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=235078.66666666666, ans=0.2 +2024-07-29 03:40:25,114 INFO [train.py:1114] (0/4) Epoch 18, batch 2550, loss[loss=0.1775, simple_loss=0.2574, pruned_loss=0.0488, over 4812.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2654, pruned_loss=0.04203, over 939061.08 frames. ], batch size: 11, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:40:58,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235145.33333333334, ans=0.1 +2024-07-29 03:41:01,117 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235145.33333333334, ans=0.1 +2024-07-29 03:41:04,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=235158.66666666666, ans=0.125 +2024-07-29 03:41:07,797 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.18 vs. limit=22.5 +2024-07-29 03:41:07,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=235158.66666666666, ans=15.0 +2024-07-29 03:41:10,215 INFO [train.py:1114] (0/4) Epoch 18, batch 2600, loss[loss=0.1646, simple_loss=0.2567, pruned_loss=0.03621, over 4898.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2663, pruned_loss=0.04212, over 937953.45 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:41:26,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=235185.33333333334, ans=0.125 +2024-07-29 03:41:28,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=235185.33333333334, ans=0.125 +2024-07-29 03:41:36,745 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.470e+01 5.697e+01 6.154e+01 6.937e+01 9.396e+01, threshold=1.231e+02, percent-clipped=0.0 +2024-07-29 03:41:49,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=235212.0, ans=0.09899494936611666 +2024-07-29 03:41:58,071 INFO [train.py:1114] (0/4) Epoch 18, batch 2650, loss[loss=0.195, simple_loss=0.2816, pruned_loss=0.05419, over 4620.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.266, pruned_loss=0.04189, over 940104.30 frames. ], batch size: 16, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:41:58,599 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.18 vs. limit=22.5 +2024-07-29 03:42:07,232 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.68 vs. limit=22.5 +2024-07-29 03:42:36,013 INFO [train.py:1114] (0/4) Epoch 18, batch 2700, loss[loss=0.1596, simple_loss=0.2537, pruned_loss=0.03279, over 4742.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2652, pruned_loss=0.04155, over 940307.87 frames. ], batch size: 14, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:42:42,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=235305.33333333334, ans=0.125 +2024-07-29 03:42:59,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=235332.0, ans=0.0 +2024-07-29 03:43:04,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=235332.0, ans=0.125 +2024-07-29 03:43:06,507 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.340e+01 5.534e+01 6.342e+01 7.179e+01 1.053e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-29 03:43:29,710 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.92 vs. limit=10.0 +2024-07-29 03:43:30,330 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.54 vs. limit=10.0 +2024-07-29 03:43:33,230 INFO [train.py:1114] (0/4) Epoch 18, batch 2750, loss[loss=0.1546, simple_loss=0.2443, pruned_loss=0.03247, over 4705.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2644, pruned_loss=0.0418, over 939868.29 frames. ], batch size: 12, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:43:34,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=235372.0, ans=0.025 +2024-07-29 03:43:41,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=235385.33333333334, ans=0.1 +2024-07-29 03:43:48,444 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.04 vs. limit=6.0 +2024-07-29 03:43:57,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=235398.66666666666, ans=0.125 +2024-07-29 03:44:15,750 INFO [train.py:1114] (0/4) Epoch 18, batch 2800, loss[loss=0.1943, simple_loss=0.2698, pruned_loss=0.05942, over 3385.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2644, pruned_loss=0.04179, over 937702.10 frames. ], batch size: 36, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:44:24,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=235452.0, ans=0.2 +2024-07-29 03:44:36,877 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.862e+01 5.675e+01 6.325e+01 7.095e+01 1.073e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 03:46:26,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=235478.66666666666, ans=0.0 +2024-07-29 03:46:37,206 INFO [train.py:1114] (0/4) Epoch 18, batch 2850, loss[loss=0.1504, simple_loss=0.2296, pruned_loss=0.03558, over 4958.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2641, pruned_loss=0.04167, over 935748.27 frames. ], batch size: 13, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:46:37,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=235505.33333333334, ans=0.125 +2024-07-29 03:46:42,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=235505.33333333334, ans=0.2 +2024-07-29 03:46:56,677 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.25 vs. limit=15.0 +2024-07-29 03:47:07,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=235558.66666666666, ans=0.0 +2024-07-29 03:47:07,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=235558.66666666666, ans=0.125 +2024-07-29 03:47:08,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.67 vs. limit=12.0 +2024-07-29 03:47:10,107 INFO [train.py:1114] (0/4) Epoch 18, batch 2900, loss[loss=0.2089, simple_loss=0.3005, pruned_loss=0.05866, over 4818.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2652, pruned_loss=0.04166, over 939598.79 frames. ], batch size: 13, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:47:10,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=235572.0, ans=0.125 +2024-07-29 03:47:12,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=235572.0, ans=0.0 +2024-07-29 03:47:22,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=235585.33333333334, ans=0.125 +2024-07-29 03:47:23,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=235598.66666666666, ans=0.125 +2024-07-29 03:47:28,283 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.19 vs. limit=12.0 +2024-07-29 03:47:29,764 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+01 5.690e+01 6.267e+01 7.332e+01 1.125e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 03:47:45,812 INFO [train.py:1114] (0/4) Epoch 18, batch 2950, loss[loss=0.1333, simple_loss=0.2189, pruned_loss=0.0239, over 4707.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2647, pruned_loss=0.0416, over 939028.07 frames. ], batch size: 12, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:47:47,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=235638.66666666666, ans=0.125 +2024-07-29 03:47:48,510 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:47:50,420 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.61 vs. limit=15.0 +2024-07-29 03:47:59,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=235652.0, ans=0.125 +2024-07-29 03:47:59,500 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.06 vs. limit=15.0 +2024-07-29 03:48:05,632 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:48:13,039 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.52 vs. limit=12.0 +2024-07-29 03:48:14,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=235692.0, ans=0.125 +2024-07-29 03:48:16,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=235692.0, ans=0.0 +2024-07-29 03:48:18,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=235692.0, ans=0.0 +2024-07-29 03:48:21,409 INFO [train.py:1114] (0/4) Epoch 18, batch 3000, loss[loss=0.1641, simple_loss=0.2593, pruned_loss=0.0345, over 4752.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2645, pruned_loss=0.04186, over 938648.20 frames. ], batch size: 13, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:48:21,410 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 03:48:39,664 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.5565, 3.2384, 3.5567, 3.8758], device='cuda:0') +2024-07-29 03:48:39,750 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.0227, 3.7064, 3.2621, 2.8358], device='cuda:0') +2024-07-29 03:48:44,158 INFO [train.py:1146] (0/4) Epoch 18, validation: loss=0.1624, simple_loss=0.2643, pruned_loss=0.03024, over 944034.00 frames. +2024-07-29 03:48:44,159 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 03:48:52,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=235718.66666666666, ans=0.125 +2024-07-29 03:49:04,554 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.349e+01 5.607e+01 6.067e+01 7.332e+01 1.132e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-29 03:49:08,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=235745.33333333334, ans=0.0 +2024-07-29 03:49:18,684 INFO [train.py:1114] (0/4) Epoch 18, batch 3050, loss[loss=0.1664, simple_loss=0.2602, pruned_loss=0.03634, over 4644.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2653, pruned_loss=0.04191, over 937729.52 frames. ], batch size: 12, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:49:20,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=235772.0, ans=0.125 +2024-07-29 03:49:21,751 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.42 vs. limit=22.5 +2024-07-29 03:49:26,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=235785.33333333334, ans=0.125 +2024-07-29 03:49:26,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=235785.33333333334, ans=0.2 +2024-07-29 03:49:39,790 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=11.46 vs. limit=12.0 +2024-07-29 03:49:41,869 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.09 vs. limit=15.0 +2024-07-29 03:49:46,729 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=235825.33333333334, ans=0.125 +2024-07-29 03:49:47,703 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.50 vs. limit=15.0 +2024-07-29 03:49:53,604 INFO [train.py:1114] (0/4) Epoch 18, batch 3100, loss[loss=0.1639, simple_loss=0.2586, pruned_loss=0.03462, over 4625.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2649, pruned_loss=0.042, over 938213.55 frames. ], batch size: 16, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:50:02,513 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=235852.0, ans=0.125 +2024-07-29 03:50:10,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=235865.33333333334, ans=0.125 +2024-07-29 03:50:14,448 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.311e+01 5.371e+01 5.941e+01 6.939e+01 1.181e+02, threshold=1.188e+02, percent-clipped=0.0 +2024-07-29 03:50:25,641 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.43 vs. limit=10.0 +2024-07-29 03:50:27,863 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.27 vs. limit=15.0 +2024-07-29 03:50:28,793 INFO [train.py:1114] (0/4) Epoch 18, batch 3150, loss[loss=0.178, simple_loss=0.2705, pruned_loss=0.04277, over 4611.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2643, pruned_loss=0.04171, over 938025.83 frames. ], batch size: 17, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:50:35,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.86 vs. limit=15.0 +2024-07-29 03:50:46,248 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=235932.0, ans=0.0 +2024-07-29 03:50:50,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=235945.33333333334, ans=0.0 +2024-07-29 03:50:51,178 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.04 vs. limit=22.5 +2024-07-29 03:51:01,999 INFO [train.py:1114] (0/4) Epoch 18, batch 3200, loss[loss=0.1561, simple_loss=0.2508, pruned_loss=0.03074, over 4821.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2635, pruned_loss=0.0411, over 939702.99 frames. ], batch size: 13, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:51:08,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=235985.33333333334, ans=0.125 +2024-07-29 03:51:08,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=235985.33333333334, ans=0.09899494936611666 +2024-07-29 03:51:11,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=235985.33333333334, ans=0.0 +2024-07-29 03:51:13,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=235985.33333333334, ans=0.2 +2024-07-29 03:51:23,932 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.338e+01 5.694e+01 6.317e+01 7.020e+01 1.050e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 03:51:24,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=236012.0, ans=0.125 +2024-07-29 03:51:24,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=236012.0, ans=0.125 +2024-07-29 03:51:24,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=236012.0, ans=0.125 +2024-07-29 03:51:26,843 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236012.0, ans=0.1 +2024-07-29 03:51:33,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=236025.33333333334, ans=0.125 +2024-07-29 03:51:35,370 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=236025.33333333334, ans=0.125 +2024-07-29 03:51:38,119 INFO [train.py:1114] (0/4) Epoch 18, batch 3250, loss[loss=0.1588, simple_loss=0.2508, pruned_loss=0.03338, over 4929.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2633, pruned_loss=0.04081, over 940414.75 frames. ], batch size: 14, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:51:56,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=236065.33333333334, ans=0.0 +2024-07-29 03:51:57,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=236065.33333333334, ans=0.0 +2024-07-29 03:52:05,857 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.69 vs. limit=15.0 +2024-07-29 03:52:11,779 INFO [train.py:1114] (0/4) Epoch 18, batch 3300, loss[loss=0.2014, simple_loss=0.2899, pruned_loss=0.05645, over 4700.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2632, pruned_loss=0.04122, over 940900.40 frames. ], batch size: 19, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:52:12,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=236105.33333333334, ans=0.125 +2024-07-29 03:52:27,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=236132.0, ans=0.2 +2024-07-29 03:52:28,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=236132.0, ans=0.07 +2024-07-29 03:52:31,045 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.049e+01 5.536e+01 6.135e+01 6.929e+01 1.182e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 03:52:41,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=236158.66666666666, ans=0.0 +2024-07-29 03:52:45,473 INFO [train.py:1114] (0/4) Epoch 18, batch 3350, loss[loss=0.1707, simple_loss=0.2728, pruned_loss=0.03432, over 4612.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.264, pruned_loss=0.04162, over 938620.08 frames. ], batch size: 17, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:52:46,618 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.28 vs. limit=12.0 +2024-07-29 03:52:49,437 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.21 vs. limit=15.0 +2024-07-29 03:52:57,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=236185.33333333334, ans=0.125 +2024-07-29 03:52:57,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=236185.33333333334, ans=0.2 +2024-07-29 03:53:00,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=236198.66666666666, ans=0.125 +2024-07-29 03:53:09,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=236212.0, ans=0.05 +2024-07-29 03:53:11,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=236212.0, ans=0.0 +2024-07-29 03:53:18,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=236225.33333333334, ans=0.0 +2024-07-29 03:53:23,135 INFO [train.py:1114] (0/4) Epoch 18, batch 3400, loss[loss=0.1506, simple_loss=0.2263, pruned_loss=0.03747, over 4809.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2638, pruned_loss=0.04167, over 937256.90 frames. ], batch size: 11, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:53:26,700 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=236238.66666666666, ans=0.125 +2024-07-29 03:53:41,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=236265.33333333334, ans=0.125 +2024-07-29 03:53:43,254 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.614e+01 5.661e+01 6.178e+01 6.933e+01 1.009e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-29 03:53:57,549 INFO [train.py:1114] (0/4) Epoch 18, batch 3450, loss[loss=0.176, simple_loss=0.2694, pruned_loss=0.04133, over 4689.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2645, pruned_loss=0.0418, over 937393.97 frames. ], batch size: 19, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:54:06,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=236318.66666666666, ans=0.0 +2024-07-29 03:54:08,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=236318.66666666666, ans=0.1 +2024-07-29 03:54:08,609 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.35 vs. limit=15.0 +2024-07-29 03:54:14,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=236332.0, ans=0.0 +2024-07-29 03:54:14,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=236332.0, ans=0.0 +2024-07-29 03:54:16,380 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=236332.0, ans=0.125 +2024-07-29 03:54:21,906 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-29 03:54:30,904 INFO [train.py:1114] (0/4) Epoch 18, batch 3500, loss[loss=0.1798, simple_loss=0.2667, pruned_loss=0.0465, over 4932.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2634, pruned_loss=0.04094, over 937791.17 frames. ], batch size: 12, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 03:54:38,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=236385.33333333334, ans=0.125 +2024-07-29 03:54:40,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=236385.33333333334, ans=0.0 +2024-07-29 03:54:50,373 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.411e+01 5.401e+01 5.925e+01 6.709e+01 9.541e+01, threshold=1.185e+02, percent-clipped=0.0 +2024-07-29 03:54:53,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=236412.0, ans=0.125 +2024-07-29 03:54:54,721 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.68 vs. limit=15.0 +2024-07-29 03:54:55,430 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.01 vs. limit=10.0 +2024-07-29 03:54:55,452 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.34 vs. limit=15.0 +2024-07-29 03:54:58,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=236425.33333333334, ans=0.2 +2024-07-29 03:55:04,405 INFO [train.py:1114] (0/4) Epoch 18, batch 3550, loss[loss=0.1653, simple_loss=0.2569, pruned_loss=0.03686, over 4670.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.264, pruned_loss=0.04147, over 938359.84 frames. ], batch size: 14, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 03:55:07,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=236438.66666666666, ans=0.09899494936611666 +2024-07-29 03:55:39,456 INFO [train.py:1114] (0/4) Epoch 18, batch 3600, loss[loss=0.1848, simple_loss=0.2676, pruned_loss=0.05097, over 4951.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2638, pruned_loss=0.04131, over 939632.31 frames. ], batch size: 13, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 03:55:58,953 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.27 vs. limit=22.5 +2024-07-29 03:55:59,092 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.793e+01 6.774e+01 8.193e+01 1.238e+02, threshold=1.355e+02, percent-clipped=1.0 +2024-07-29 03:56:00,238 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.16 vs. limit=22.5 +2024-07-29 03:56:01,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=236545.33333333334, ans=0.125 +2024-07-29 03:56:01,547 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.50 vs. limit=22.5 +2024-07-29 03:56:02,839 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.99 vs. limit=10.0 +2024-07-29 03:56:07,764 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.53 vs. limit=22.5 +2024-07-29 03:56:13,436 INFO [train.py:1114] (0/4) Epoch 18, batch 3650, loss[loss=0.1699, simple_loss=0.2683, pruned_loss=0.0358, over 4900.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2629, pruned_loss=0.04109, over 940722.49 frames. ], batch size: 15, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:56:20,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=236585.33333333334, ans=0.2 +2024-07-29 03:56:23,474 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.65 vs. limit=6.0 +2024-07-29 03:56:31,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=236598.66666666666, ans=0.125 +2024-07-29 03:56:46,995 INFO [train.py:1114] (0/4) Epoch 18, batch 3700, loss[loss=0.1849, simple_loss=0.2787, pruned_loss=0.04553, over 4936.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2627, pruned_loss=0.04086, over 941782.60 frames. ], batch size: 14, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:57:04,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=236665.33333333334, ans=0.125 +2024-07-29 03:57:06,641 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:57:07,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=236665.33333333334, ans=0.125 +2024-07-29 03:57:07,767 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.494e+01 5.372e+01 6.083e+01 6.875e+01 9.330e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-29 03:57:10,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236678.66666666666, ans=0.1 +2024-07-29 03:57:12,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236678.66666666666, ans=0.1 +2024-07-29 03:57:15,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=236692.0, ans=0.0 +2024-07-29 03:57:15,196 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:57:22,956 INFO [train.py:1114] (0/4) Epoch 18, batch 3750, loss[loss=0.1641, simple_loss=0.2495, pruned_loss=0.03941, over 4803.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2631, pruned_loss=0.04109, over 943196.25 frames. ], batch size: 11, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:57:29,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=236705.33333333334, ans=0.0 +2024-07-29 03:57:38,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=236718.66666666666, ans=0.125 +2024-07-29 03:57:45,321 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=236732.0, ans=0.2 +2024-07-29 03:57:48,149 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.80 vs. limit=15.0 +2024-07-29 03:57:57,268 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.46 vs. limit=22.5 +2024-07-29 03:58:00,334 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=236745.33333333334, ans=0.125 +2024-07-29 03:58:03,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=236758.66666666666, ans=0.0 +2024-07-29 03:58:04,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=236758.66666666666, ans=0.0 +2024-07-29 03:58:09,066 INFO [train.py:1114] (0/4) Epoch 18, batch 3800, loss[loss=0.1918, simple_loss=0.2904, pruned_loss=0.04657, over 4815.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2624, pruned_loss=0.04086, over 941822.36 frames. ], batch size: 14, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:58:09,338 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.36 vs. limit=15.0 +2024-07-29 03:58:23,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=236798.66666666666, ans=0.125 +2024-07-29 03:58:28,166 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.450e+01 5.460e+01 5.890e+01 6.474e+01 8.788e+01, threshold=1.178e+02, percent-clipped=0.0 +2024-07-29 03:58:36,325 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.18 vs. limit=15.0 +2024-07-29 03:58:39,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236825.33333333334, ans=0.1 +2024-07-29 03:58:44,396 INFO [train.py:1114] (0/4) Epoch 18, batch 3850, loss[loss=0.2096, simple_loss=0.2988, pruned_loss=0.06023, over 4658.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.264, pruned_loss=0.04142, over 942299.68 frames. ], batch size: 16, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:59:04,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=236865.33333333334, ans=0.125 +2024-07-29 03:59:04,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=236865.33333333334, ans=0.125 +2024-07-29 03:59:07,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=236878.66666666666, ans=15.0 +2024-07-29 03:59:09,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=236878.66666666666, ans=0.125 +2024-07-29 03:59:12,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=236892.0, ans=0.125 +2024-07-29 03:59:19,428 INFO [train.py:1114] (0/4) Epoch 18, batch 3900, loss[loss=0.1703, simple_loss=0.2697, pruned_loss=0.03546, over 4819.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2651, pruned_loss=0.04168, over 942501.28 frames. ], batch size: 14, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:59:19,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=236905.33333333334, ans=0.125 +2024-07-29 03:59:36,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=236932.0, ans=0.2 +2024-07-29 03:59:42,389 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.722e+01 5.580e+01 6.101e+01 6.865e+01 9.868e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 03:59:56,441 INFO [train.py:1114] (0/4) Epoch 18, batch 3950, loss[loss=0.1845, simple_loss=0.2731, pruned_loss=0.04795, over 4838.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2647, pruned_loss=0.04165, over 944756.83 frames. ], batch size: 16, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:00:06,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=236985.33333333334, ans=0.0 +2024-07-29 04:00:25,572 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.71 vs. limit=5.0 +2024-07-29 04:00:28,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=237025.33333333334, ans=0.1 +2024-07-29 04:00:29,756 INFO [train.py:1114] (0/4) Epoch 18, batch 4000, loss[loss=0.1521, simple_loss=0.2431, pruned_loss=0.03054, over 4766.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2651, pruned_loss=0.04209, over 941332.42 frames. ], batch size: 12, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:00:31,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=237038.66666666666, ans=0.2 +2024-07-29 04:00:38,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=237052.0, ans=0.125 +2024-07-29 04:00:43,862 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=237065.33333333334, ans=0.125 +2024-07-29 04:00:44,051 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.74 vs. limit=22.5 +2024-07-29 04:00:46,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=237065.33333333334, ans=0.035 +2024-07-29 04:00:47,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=237065.33333333334, ans=0.025 +2024-07-29 04:00:47,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=237065.33333333334, ans=0.025 +2024-07-29 04:00:49,603 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.725e+01 5.687e+01 6.252e+01 7.100e+01 1.258e+02, threshold=1.250e+02, percent-clipped=1.0 +2024-07-29 04:00:58,330 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.95 vs. limit=15.0 +2024-07-29 04:01:00,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=237092.0, ans=0.125 +2024-07-29 04:01:02,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=237092.0, ans=0.0 +2024-07-29 04:01:03,326 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=10.77 vs. limit=15.0 +2024-07-29 04:01:03,439 INFO [train.py:1114] (0/4) Epoch 18, batch 4050, loss[loss=0.2282, simple_loss=0.3048, pruned_loss=0.07579, over 3508.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2657, pruned_loss=0.04242, over 939638.31 frames. ], batch size: 35, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:01:27,489 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=237145.33333333334, ans=0.125 +2024-07-29 04:01:41,043 INFO [train.py:1114] (0/4) Epoch 18, batch 4100, loss[loss=0.1697, simple_loss=0.2583, pruned_loss=0.04062, over 4914.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.266, pruned_loss=0.04276, over 938815.41 frames. ], batch size: 15, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:01:43,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=237172.0, ans=0.0 +2024-07-29 04:01:45,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=237172.0, ans=0.125 +2024-07-29 04:01:53,557 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=237185.33333333334, ans=0.0 +2024-07-29 04:01:56,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=237198.66666666666, ans=0.125 +2024-07-29 04:01:57,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=237198.66666666666, ans=0.125 +2024-07-29 04:02:00,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=237212.0, ans=0.0 +2024-07-29 04:02:01,261 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.620e+01 5.605e+01 6.193e+01 7.147e+01 1.131e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 04:02:05,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=237212.0, ans=0.0 +2024-07-29 04:02:09,929 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.11 vs. limit=22.5 +2024-07-29 04:02:26,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=237225.33333333334, ans=0.05 +2024-07-29 04:02:51,908 INFO [train.py:1114] (0/4) Epoch 18, batch 4150, loss[loss=0.1559, simple_loss=0.2564, pruned_loss=0.0277, over 4820.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2652, pruned_loss=0.04214, over 938575.75 frames. ], batch size: 13, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:03:50,143 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=237252.0, ans=0.125 +2024-07-29 04:04:31,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237292.0, ans=0.1 +2024-07-29 04:04:32,877 INFO [train.py:1114] (0/4) Epoch 18, batch 4200, loss[loss=0.2149, simple_loss=0.3006, pruned_loss=0.06465, over 4898.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2655, pruned_loss=0.04237, over 939868.29 frames. ], batch size: 15, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:04:38,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=237305.33333333334, ans=0.1 +2024-07-29 04:04:43,538 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.65 vs. limit=22.5 +2024-07-29 04:04:49,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=237332.0, ans=0.125 +2024-07-29 04:06:02,034 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.489e+01 5.423e+01 5.970e+01 6.521e+01 1.016e+02, threshold=1.194e+02, percent-clipped=0.0 +2024-07-29 04:06:15,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=237358.66666666666, ans=0.2 +2024-07-29 04:06:16,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=237358.66666666666, ans=0.125 +2024-07-29 04:06:18,672 INFO [train.py:1114] (0/4) Epoch 18, batch 4250, loss[loss=0.1813, simple_loss=0.2725, pruned_loss=0.04505, over 4640.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2654, pruned_loss=0.04215, over 940901.43 frames. ], batch size: 12, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:06:29,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=237385.33333333334, ans=0.125 +2024-07-29 04:06:36,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=237398.66666666666, ans=0.125 +2024-07-29 04:06:44,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=237412.0, ans=0.125 +2024-07-29 04:06:46,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=237425.33333333334, ans=0.0 +2024-07-29 04:06:48,800 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.82 vs. limit=15.0 +2024-07-29 04:06:50,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=237425.33333333334, ans=0.2 +2024-07-29 04:06:52,271 INFO [train.py:1114] (0/4) Epoch 18, batch 4300, loss[loss=0.1804, simple_loss=0.2801, pruned_loss=0.04035, over 4758.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2658, pruned_loss=0.04254, over 939623.07 frames. ], batch size: 13, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:07:17,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237465.33333333334, ans=0.1 +2024-07-29 04:07:18,596 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.38 vs. limit=15.0 +2024-07-29 04:07:24,056 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.877e+01 6.356e+01 7.291e+01 9.513e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-29 04:07:25,930 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.94 vs. limit=15.0 +2024-07-29 04:07:37,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=237505.33333333334, ans=0.2 +2024-07-29 04:07:37,542 INFO [train.py:1114] (0/4) Epoch 18, batch 4350, loss[loss=0.1792, simple_loss=0.2674, pruned_loss=0.04552, over 4763.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.266, pruned_loss=0.04215, over 940760.82 frames. ], batch size: 13, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:07:41,518 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:07:42,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.45 vs. limit=10.0 +2024-07-29 04:07:43,529 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:07:43,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=237505.33333333334, ans=0.0 +2024-07-29 04:07:46,344 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=237518.66666666666, ans=0.0 +2024-07-29 04:07:53,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=237532.0, ans=0.125 +2024-07-29 04:08:07,570 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.50 vs. limit=22.5 +2024-07-29 04:08:12,604 INFO [train.py:1114] (0/4) Epoch 18, batch 4400, loss[loss=0.1832, simple_loss=0.2762, pruned_loss=0.04509, over 4807.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2662, pruned_loss=0.04198, over 941030.29 frames. ], batch size: 14, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:08:16,362 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.23 vs. limit=6.0 +2024-07-29 04:08:16,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=237572.0, ans=0.125 +2024-07-29 04:08:23,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=237585.33333333334, ans=0.125 +2024-07-29 04:08:26,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=237598.66666666666, ans=0.2 +2024-07-29 04:08:29,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=237598.66666666666, ans=0.125 +2024-07-29 04:08:33,230 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.243e+01 5.655e+01 6.397e+01 7.492e+01 1.030e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-29 04:08:40,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=237625.33333333334, ans=0.0 +2024-07-29 04:08:41,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=237625.33333333334, ans=0.0 +2024-07-29 04:08:44,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=237625.33333333334, ans=0.1 +2024-07-29 04:08:44,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=237625.33333333334, ans=0.025 +2024-07-29 04:08:46,775 INFO [train.py:1114] (0/4) Epoch 18, batch 4450, loss[loss=0.1606, simple_loss=0.2413, pruned_loss=0.03995, over 4949.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2665, pruned_loss=0.04225, over 938720.06 frames. ], batch size: 12, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:09:10,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237678.66666666666, ans=0.1 +2024-07-29 04:09:10,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=237678.66666666666, ans=0.125 +2024-07-29 04:09:19,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=237692.0, ans=0.125 +2024-07-29 04:09:21,653 INFO [train.py:1114] (0/4) Epoch 18, batch 4500, loss[loss=0.1904, simple_loss=0.2822, pruned_loss=0.04926, over 4732.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2667, pruned_loss=0.04191, over 938074.10 frames. ], batch size: 14, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:09:21,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=237705.33333333334, ans=0.125 +2024-07-29 04:09:23,215 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:09:33,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=237718.66666666666, ans=0.125 +2024-07-29 04:09:41,556 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=237732.0, ans=0.025 +2024-07-29 04:09:42,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=237732.0, ans=0.0 +2024-07-29 04:09:43,833 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.33 vs. limit=6.0 +2024-07-29 04:09:50,468 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.387e+01 5.651e+01 6.463e+01 7.658e+01 1.183e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-29 04:09:50,957 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.05 vs. limit=10.0 +2024-07-29 04:10:13,177 INFO [train.py:1114] (0/4) Epoch 18, batch 4550, loss[loss=0.1493, simple_loss=0.2364, pruned_loss=0.03109, over 4890.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2661, pruned_loss=0.04147, over 940290.83 frames. ], batch size: 13, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:10:14,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=237772.0, ans=0.125 +2024-07-29 04:10:14,805 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.04 vs. limit=15.0 +2024-07-29 04:10:18,088 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.15 vs. limit=15.0 +2024-07-29 04:10:23,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=237785.33333333334, ans=0.125 +2024-07-29 04:10:25,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=237785.33333333334, ans=0.125 +2024-07-29 04:10:26,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=237798.66666666666, ans=0.0 +2024-07-29 04:10:33,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=237812.0, ans=0.125 +2024-07-29 04:10:33,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=237812.0, ans=0.0 +2024-07-29 04:10:39,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=237812.0, ans=0.125 +2024-07-29 04:10:42,223 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=237825.33333333334, ans=0.2 +2024-07-29 04:10:47,600 INFO [train.py:1114] (0/4) Epoch 18, batch 4600, loss[loss=0.1892, simple_loss=0.2703, pruned_loss=0.0541, over 4508.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2658, pruned_loss=0.0417, over 938383.37 frames. ], batch size: 21, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:10:54,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=237852.0, ans=0.025 +2024-07-29 04:10:57,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=237852.0, ans=0.0 +2024-07-29 04:11:06,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=237865.33333333334, ans=0.015 +2024-07-29 04:11:08,572 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=237878.66666666666, ans=0.125 +2024-07-29 04:11:09,110 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.515e+01 5.588e+01 6.056e+01 7.096e+01 1.037e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 04:11:22,405 INFO [train.py:1114] (0/4) Epoch 18, batch 4650, loss[loss=0.2254, simple_loss=0.3188, pruned_loss=0.06604, over 4839.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2667, pruned_loss=0.04194, over 939939.75 frames. ], batch size: 16, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:11:51,333 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.52 vs. limit=15.0 +2024-07-29 04:11:53,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=237945.33333333334, ans=0.025 +2024-07-29 04:12:04,333 INFO [train.py:1114] (0/4) Epoch 18, batch 4700, loss[loss=0.15, simple_loss=0.2303, pruned_loss=0.03482, over 4703.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2663, pruned_loss=0.04162, over 937400.53 frames. ], batch size: 11, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:12:10,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=237985.33333333334, ans=0.125 +2024-07-29 04:12:11,519 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.25 vs. limit=15.0 +2024-07-29 04:12:12,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=237985.33333333334, ans=0.05 +2024-07-29 04:12:16,657 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:12:16,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=237985.33333333334, ans=0.0 +2024-07-29 04:12:18,279 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.85 vs. limit=15.0 +2024-07-29 04:12:18,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=237998.66666666666, ans=0.1 +2024-07-29 04:12:24,423 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.519e+01 5.692e+01 6.166e+01 6.744e+01 9.680e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 04:12:31,586 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-29 04:12:36,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=238025.33333333334, ans=0.125 +2024-07-29 04:12:41,496 INFO [train.py:1114] (0/4) Epoch 18, batch 4750, loss[loss=0.2252, simple_loss=0.3062, pruned_loss=0.07212, over 4437.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2665, pruned_loss=0.04209, over 935126.32 frames. ], batch size: 21, lr: 4.14e-03, grad_scale: 16.0 +2024-07-29 04:12:42,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=238038.66666666666, ans=0.0 +2024-07-29 04:12:43,113 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=238038.66666666666, ans=0.125 +2024-07-29 04:12:45,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=238038.66666666666, ans=0.125 +2024-07-29 04:12:48,052 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.93 vs. limit=6.0 +2024-07-29 04:12:51,596 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.48 vs. limit=15.0 +2024-07-29 04:13:00,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=238065.33333333334, ans=0.0 +2024-07-29 04:13:02,305 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=238065.33333333334, ans=0.125 +2024-07-29 04:13:03,215 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.68 vs. limit=6.0 +2024-07-29 04:13:04,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=238078.66666666666, ans=0.125 +2024-07-29 04:13:04,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=238078.66666666666, ans=0.025 +2024-07-29 04:13:07,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=238078.66666666666, ans=0.125 +2024-07-29 04:13:37,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=238092.0, ans=0.125 +2024-07-29 04:13:44,045 INFO [train.py:1114] (0/4) Epoch 18, batch 4800, loss[loss=0.1725, simple_loss=0.2551, pruned_loss=0.04492, over 4687.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2651, pruned_loss=0.04195, over 933243.27 frames. ], batch size: 13, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:13:59,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=238132.0, ans=0.125 +2024-07-29 04:14:00,916 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:14:01,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=238132.0, ans=0.07 +2024-07-29 04:14:07,344 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.662e+01 6.486e+01 7.810e+01 1.129e+02, threshold=1.297e+02, percent-clipped=0.0 +2024-07-29 04:14:11,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=238145.33333333334, ans=0.2 +2024-07-29 04:14:14,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=238158.66666666666, ans=0.125 +2024-07-29 04:14:26,496 INFO [train.py:1114] (0/4) Epoch 18, batch 4850, loss[loss=0.1815, simple_loss=0.2754, pruned_loss=0.04379, over 4733.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2651, pruned_loss=0.0417, over 932879.20 frames. ], batch size: 14, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:14:37,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=238185.33333333334, ans=0.125 +2024-07-29 04:14:40,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=238185.33333333334, ans=0.07 +2024-07-29 04:15:20,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238212.0, ans=0.1 +2024-07-29 04:15:31,245 INFO [train.py:1114] (0/4) Epoch 18, batch 4900, loss[loss=0.1581, simple_loss=0.2517, pruned_loss=0.03232, over 4763.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2644, pruned_loss=0.04144, over 934088.89 frames. ], batch size: 13, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:15:37,473 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.03 vs. limit=15.0 +2024-07-29 04:15:44,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=238252.0, ans=0.0 +2024-07-29 04:15:47,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=238265.33333333334, ans=0.0 +2024-07-29 04:15:48,976 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.84 vs. limit=6.0 +2024-07-29 04:15:51,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=238278.66666666666, ans=0.125 +2024-07-29 04:15:52,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=238278.66666666666, ans=0.1 +2024-07-29 04:15:52,802 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.332e+01 5.541e+01 6.118e+01 7.300e+01 1.058e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 04:15:57,229 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.23 vs. limit=15.0 +2024-07-29 04:15:57,258 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.09 vs. limit=8.0 +2024-07-29 04:15:57,491 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:16:05,555 INFO [train.py:1114] (0/4) Epoch 18, batch 4950, loss[loss=0.2333, simple_loss=0.3175, pruned_loss=0.07455, over 3560.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2657, pruned_loss=0.04228, over 931653.18 frames. ], batch size: 35, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:16:11,008 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:16:13,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=238318.66666666666, ans=0.125 +2024-07-29 04:16:24,950 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.99 vs. limit=15.0 +2024-07-29 04:16:26,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=238345.33333333334, ans=0.0 +2024-07-29 04:16:27,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=238345.33333333334, ans=0.125 +2024-07-29 04:16:31,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=238345.33333333334, ans=0.2 +2024-07-29 04:16:38,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=238358.66666666666, ans=0.2 +2024-07-29 04:16:39,130 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=238358.66666666666, ans=0.2 +2024-07-29 04:16:40,881 INFO [train.py:1114] (0/4) Epoch 18, batch 5000, loss[loss=0.181, simple_loss=0.2886, pruned_loss=0.03666, over 4668.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2661, pruned_loss=0.04239, over 935606.85 frames. ], batch size: 14, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:16:52,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=238385.33333333334, ans=0.0 +2024-07-29 04:16:55,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=238398.66666666666, ans=0.02 +2024-07-29 04:16:57,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=238398.66666666666, ans=0.0 +2024-07-29 04:17:01,719 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.450e+01 5.523e+01 5.940e+01 6.612e+01 9.274e+01, threshold=1.188e+02, percent-clipped=0.0 +2024-07-29 04:17:03,439 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.89 vs. limit=15.0 +2024-07-29 04:17:05,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=238412.0, ans=0.125 +2024-07-29 04:17:09,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=238425.33333333334, ans=0.0 +2024-07-29 04:17:14,227 INFO [train.py:1114] (0/4) Epoch 18, batch 5050, loss[loss=0.1454, simple_loss=0.2335, pruned_loss=0.02865, over 4860.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2643, pruned_loss=0.04159, over 938096.71 frames. ], batch size: 12, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:17:16,275 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=238438.66666666666, ans=0.2 +2024-07-29 04:17:16,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238438.66666666666, ans=0.1 +2024-07-29 04:17:21,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=238452.0, ans=0.0 +2024-07-29 04:18:30,986 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=238465.33333333334, ans=0.0 +2024-07-29 04:18:53,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=238492.0, ans=0.0 +2024-07-29 04:18:58,366 INFO [train.py:1114] (0/4) Epoch 18, batch 5100, loss[loss=0.1507, simple_loss=0.2444, pruned_loss=0.02849, over 4765.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2661, pruned_loss=0.04238, over 935349.18 frames. ], batch size: 12, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:19:04,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=238505.33333333334, ans=0.125 +2024-07-29 04:19:11,167 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238518.66666666666, ans=0.1 +2024-07-29 04:19:12,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=238532.0, ans=0.125 +2024-07-29 04:19:15,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=238532.0, ans=0.0 +2024-07-29 04:19:16,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=238532.0, ans=0.125 +2024-07-29 04:19:21,953 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.577e+01 6.307e+01 7.309e+01 1.155e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 04:19:24,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=238545.33333333334, ans=0.1 +2024-07-29 04:19:24,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=238545.33333333334, ans=0.0 +2024-07-29 04:19:27,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=238558.66666666666, ans=0.125 +2024-07-29 04:19:29,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=238558.66666666666, ans=0.125 +2024-07-29 04:19:34,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=238572.0, ans=0.0 +2024-07-29 04:19:34,575 INFO [train.py:1114] (0/4) Epoch 18, batch 5150, loss[loss=0.2027, simple_loss=0.2951, pruned_loss=0.0552, over 4840.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2675, pruned_loss=0.04322, over 936294.16 frames. ], batch size: 16, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:19:34,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=238572.0, ans=0.125 +2024-07-29 04:19:49,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=238598.66666666666, ans=0.2 +2024-07-29 04:19:49,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=238598.66666666666, ans=0.0 +2024-07-29 04:19:50,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=238598.66666666666, ans=0.025 +2024-07-29 04:20:00,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=238612.0, ans=0.125 +2024-07-29 04:20:03,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=238625.33333333334, ans=0.125 +2024-07-29 04:20:04,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=238625.33333333334, ans=0.125 +2024-07-29 04:20:04,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=238625.33333333334, ans=0.125 +2024-07-29 04:20:08,821 INFO [train.py:1114] (0/4) Epoch 18, batch 5200, loss[loss=0.1712, simple_loss=0.2678, pruned_loss=0.03727, over 4654.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.267, pruned_loss=0.04278, over 936292.68 frames. ], batch size: 14, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:20:11,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=238638.66666666666, ans=10.0 +2024-07-29 04:20:11,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=238638.66666666666, ans=0.125 +2024-07-29 04:20:13,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=238638.66666666666, ans=0.0 +2024-07-29 04:20:26,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=238665.33333333334, ans=0.125 +2024-07-29 04:20:30,411 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.633e+01 5.651e+01 6.355e+01 7.516e+01 2.460e+02, threshold=1.271e+02, percent-clipped=1.0 +2024-07-29 04:20:41,022 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.58 vs. limit=6.0 +2024-07-29 04:20:44,401 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=238705.33333333334, ans=0.125 +2024-07-29 04:20:44,964 INFO [train.py:1114] (0/4) Epoch 18, batch 5250, loss[loss=0.1611, simple_loss=0.2575, pruned_loss=0.03235, over 4900.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2662, pruned_loss=0.04228, over 936160.12 frames. ], batch size: 13, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:20:45,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238705.33333333334, ans=0.1 +2024-07-29 04:20:55,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=238718.66666666666, ans=0.0 +2024-07-29 04:21:03,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=238732.0, ans=0.0 +2024-07-29 04:21:10,294 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.17 vs. limit=15.0 +2024-07-29 04:21:18,562 INFO [train.py:1114] (0/4) Epoch 18, batch 5300, loss[loss=0.1645, simple_loss=0.2566, pruned_loss=0.03621, over 4627.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2657, pruned_loss=0.04238, over 934539.07 frames. ], batch size: 16, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:21:32,191 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.45 vs. limit=10.0 +2024-07-29 04:21:37,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=238798.66666666666, ans=0.025 +2024-07-29 04:21:41,636 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.581e+01 5.630e+01 6.188e+01 7.457e+01 1.076e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 04:21:43,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=238812.0, ans=0.07 +2024-07-29 04:22:09,570 INFO [train.py:1114] (0/4) Epoch 18, batch 5350, loss[loss=0.1459, simple_loss=0.2285, pruned_loss=0.03172, over 4560.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2657, pruned_loss=0.04221, over 936468.01 frames. ], batch size: 10, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:22:27,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=238852.0, ans=0.125 +2024-07-29 04:22:43,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=238878.66666666666, ans=0.04949747468305833 +2024-07-29 04:22:43,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=238878.66666666666, ans=0.025 +2024-07-29 04:22:46,853 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.51 vs. limit=10.0 +2024-07-29 04:22:58,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=238892.0, ans=0.2 +2024-07-29 04:23:04,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=238905.33333333334, ans=0.125 +2024-07-29 04:23:04,851 INFO [train.py:1114] (0/4) Epoch 18, batch 5400, loss[loss=0.1905, simple_loss=0.2788, pruned_loss=0.05115, over 4248.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2654, pruned_loss=0.04223, over 930877.80 frames. ], batch size: 25, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:23:08,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=238905.33333333334, ans=0.125 +2024-07-29 04:23:15,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=238918.66666666666, ans=0.2 +2024-07-29 04:23:15,992 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=238918.66666666666, ans=0.125 +2024-07-29 04:23:17,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=238932.0, ans=0.125 +2024-07-29 04:23:21,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=238932.0, ans=0.0 +2024-07-29 04:23:22,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=238932.0, ans=10.0 +2024-07-29 04:23:23,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=238932.0, ans=0.2 +2024-07-29 04:23:25,933 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+01 5.845e+01 6.439e+01 7.513e+01 9.975e+01, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 04:23:28,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238945.33333333334, ans=0.1 +2024-07-29 04:23:30,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=238945.33333333334, ans=0.125 +2024-07-29 04:23:38,613 INFO [train.py:1114] (0/4) Epoch 18, batch 5450, loss[loss=0.1782, simple_loss=0.2646, pruned_loss=0.04592, over 4703.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2653, pruned_loss=0.04206, over 933593.31 frames. ], batch size: 11, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:24:01,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=239012.0, ans=0.1 +2024-07-29 04:24:06,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=239012.0, ans=0.0 +2024-07-29 04:24:13,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=239038.66666666666, ans=0.2 +2024-07-29 04:24:14,267 INFO [train.py:1114] (0/4) Epoch 18, batch 5500, loss[loss=0.1761, simple_loss=0.2649, pruned_loss=0.04371, over 4185.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2642, pruned_loss=0.04199, over 931128.07 frames. ], batch size: 25, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:24:23,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=239052.0, ans=0.0 +2024-07-29 04:24:36,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=239065.33333333334, ans=0.2 +2024-07-29 04:24:37,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=239065.33333333334, ans=0.125 +2024-07-29 04:24:39,145 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.809e+01 5.775e+01 6.562e+01 7.641e+01 1.081e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-29 04:24:39,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=239078.66666666666, ans=0.125 +2024-07-29 04:24:53,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=239092.0, ans=0.125 +2024-07-29 04:25:02,896 INFO [train.py:1114] (0/4) Epoch 18, batch 5550, loss[loss=0.1618, simple_loss=0.2564, pruned_loss=0.03356, over 4721.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2643, pruned_loss=0.04183, over 933404.62 frames. ], batch size: 12, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:25:12,004 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=239118.66666666666, ans=0.0 +2024-07-29 04:25:17,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239132.0, ans=0.1 +2024-07-29 04:25:17,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=239132.0, ans=0.125 +2024-07-29 04:25:20,832 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.78 vs. limit=6.0 +2024-07-29 04:25:21,341 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=239132.0, ans=0.5 +2024-07-29 04:25:29,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=239145.33333333334, ans=0.0 +2024-07-29 04:25:32,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239158.66666666666, ans=0.1 +2024-07-29 04:25:38,218 INFO [train.py:1114] (0/4) Epoch 18, batch 5600, loss[loss=0.1671, simple_loss=0.2644, pruned_loss=0.03491, over 4738.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2645, pruned_loss=0.04184, over 934017.01 frames. ], batch size: 14, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:25:41,174 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=239172.0, ans=0.125 +2024-07-29 04:25:43,858 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:25:43,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=239172.0, ans=0.04949747468305833 +2024-07-29 04:25:59,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=239212.0, ans=0.0 +2024-07-29 04:25:59,986 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.772e+01 5.791e+01 6.471e+01 7.649e+01 1.137e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-29 04:26:10,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=239225.33333333334, ans=0.5 +2024-07-29 04:26:12,757 INFO [train.py:1114] (0/4) Epoch 18, batch 5650, loss[loss=0.1844, simple_loss=0.2723, pruned_loss=0.04821, over 4551.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2641, pruned_loss=0.04146, over 936668.38 frames. ], batch size: 21, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:26:21,211 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.30 vs. limit=12.0 +2024-07-29 04:26:32,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=239278.66666666666, ans=0.125 +2024-07-29 04:26:40,140 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.41 vs. limit=10.0 +2024-07-29 04:26:41,148 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=239292.0, ans=0.125 +2024-07-29 04:26:41,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=239292.0, ans=0.125 +2024-07-29 04:26:47,110 INFO [train.py:1114] (0/4) Epoch 18, batch 5700, loss[loss=0.182, simple_loss=0.2817, pruned_loss=0.04117, over 4697.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.265, pruned_loss=0.04187, over 937406.38 frames. ], batch size: 13, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:26:47,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=239305.33333333334, ans=0.0 +2024-07-29 04:26:47,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=239305.33333333334, ans=0.5 +2024-07-29 04:27:09,679 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.698e+01 5.703e+01 6.369e+01 7.336e+01 1.206e+02, threshold=1.274e+02, percent-clipped=0.0 +2024-07-29 04:27:10,723 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.88 vs. limit=22.5 +2024-07-29 04:27:12,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239345.33333333334, ans=0.125 +2024-07-29 04:27:22,296 INFO [train.py:1114] (0/4) Epoch 18, batch 5750, loss[loss=0.187, simple_loss=0.2742, pruned_loss=0.04991, over 4704.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2648, pruned_loss=0.04184, over 937972.91 frames. ], batch size: 19, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:27:26,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=239372.0, ans=0.125 +2024-07-29 04:27:26,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=239372.0, ans=0.125 +2024-07-29 04:27:30,494 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.65 vs. limit=15.0 +2024-07-29 04:27:45,250 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:27:51,443 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=239425.33333333334, ans=0.0 +2024-07-29 04:27:57,389 INFO [train.py:1114] (0/4) Epoch 18, batch 5800, loss[loss=0.1945, simple_loss=0.2852, pruned_loss=0.05185, over 4694.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2657, pruned_loss=0.04201, over 937479.48 frames. ], batch size: 19, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:28:02,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=239438.66666666666, ans=0.0 +2024-07-29 04:28:04,253 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.67 vs. limit=22.5 +2024-07-29 04:28:18,261 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.507e+01 5.596e+01 6.128e+01 6.728e+01 1.003e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-29 04:28:21,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=239478.66666666666, ans=0.2 +2024-07-29 04:28:25,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=239492.0, ans=0.2 +2024-07-29 04:28:30,893 INFO [train.py:1114] (0/4) Epoch 18, batch 5850, loss[loss=0.1773, simple_loss=0.2676, pruned_loss=0.04345, over 4546.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2656, pruned_loss=0.04207, over 938047.76 frames. ], batch size: 21, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:28:36,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=239505.33333333334, ans=0.0 +2024-07-29 04:28:41,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=239518.66666666666, ans=0.2 +2024-07-29 04:28:47,827 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.87 vs. limit=15.0 +2024-07-29 04:29:02,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=239558.66666666666, ans=0.125 +2024-07-29 04:29:02,738 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=239558.66666666666, ans=0.125 +2024-07-29 04:29:04,792 INFO [train.py:1114] (0/4) Epoch 18, batch 5900, loss[loss=0.1901, simple_loss=0.2733, pruned_loss=0.05347, over 4686.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2654, pruned_loss=0.04194, over 937829.43 frames. ], batch size: 15, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:29:05,614 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:29:07,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=239572.0, ans=0.95 +2024-07-29 04:29:13,692 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.76 vs. limit=6.0 +2024-07-29 04:29:27,149 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.760e+01 6.303e+01 7.095e+01 1.028e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 04:29:30,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=239612.0, ans=0.025 +2024-07-29 04:29:30,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=239612.0, ans=0.125 +2024-07-29 04:29:32,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=239625.33333333334, ans=0.125 +2024-07-29 04:29:33,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=239625.33333333334, ans=0.125 +2024-07-29 04:29:34,837 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.33 vs. limit=15.0 +2024-07-29 04:29:52,802 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.11 vs. limit=15.0 +2024-07-29 04:29:53,213 INFO [train.py:1114] (0/4) Epoch 18, batch 5950, loss[loss=0.2029, simple_loss=0.2943, pruned_loss=0.05579, over 4680.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2659, pruned_loss=0.04183, over 939850.37 frames. ], batch size: 15, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:30:09,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=239638.66666666666, ans=0.0 +2024-07-29 04:30:14,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=239652.0, ans=0.0 +2024-07-29 04:30:43,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239665.33333333334, ans=0.1 +2024-07-29 04:30:46,607 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:30:55,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=239692.0, ans=0.125 +2024-07-29 04:30:59,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=239692.0, ans=0.125 +2024-07-29 04:31:01,606 INFO [train.py:1114] (0/4) Epoch 18, batch 6000, loss[loss=0.1808, simple_loss=0.2742, pruned_loss=0.04373, over 4174.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2658, pruned_loss=0.04178, over 937461.54 frames. ], batch size: 25, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:31:01,606 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 04:31:13,507 INFO [train.py:1146] (0/4) Epoch 18, validation: loss=0.1615, simple_loss=0.2636, pruned_loss=0.0297, over 944034.00 frames. +2024-07-29 04:31:13,508 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 04:31:29,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=239732.0, ans=0.125 +2024-07-29 04:31:36,680 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.961e+01 5.758e+01 6.298e+01 7.334e+01 1.056e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 04:31:40,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239745.33333333334, ans=0.1 +2024-07-29 04:31:49,544 INFO [train.py:1114] (0/4) Epoch 18, batch 6050, loss[loss=0.1695, simple_loss=0.2599, pruned_loss=0.0396, over 4782.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2649, pruned_loss=0.042, over 938594.88 frames. ], batch size: 12, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:31:50,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=239772.0, ans=0.125 +2024-07-29 04:31:52,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=239772.0, ans=0.125 +2024-07-29 04:31:52,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=239772.0, ans=0.0 +2024-07-29 04:31:52,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239772.0, ans=0.125 +2024-07-29 04:31:56,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=239785.33333333334, ans=0.0 +2024-07-29 04:31:57,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=239785.33333333334, ans=0.07 +2024-07-29 04:32:11,518 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.86 vs. limit=22.5 +2024-07-29 04:32:13,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=239812.0, ans=0.125 +2024-07-29 04:32:40,017 INFO [train.py:1114] (0/4) Epoch 18, batch 6100, loss[loss=0.1981, simple_loss=0.2827, pruned_loss=0.05671, over 4668.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2637, pruned_loss=0.0415, over 937811.87 frames. ], batch size: 15, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:32:52,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.70 vs. limit=6.0 +2024-07-29 04:32:55,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=239838.66666666666, ans=0.025 +2024-07-29 04:33:01,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=239852.0, ans=0.0 +2024-07-29 04:33:04,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=239865.33333333334, ans=0.1 +2024-07-29 04:33:09,878 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.90 vs. limit=12.0 +2024-07-29 04:33:10,855 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.665e+01 5.504e+01 6.224e+01 7.220e+01 1.027e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-29 04:33:17,751 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:33:25,617 INFO [train.py:1114] (0/4) Epoch 18, batch 6150, loss[loss=0.2097, simple_loss=0.2859, pruned_loss=0.06676, over 3243.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2646, pruned_loss=0.04205, over 936119.80 frames. ], batch size: 35, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:33:32,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=239918.66666666666, ans=0.0 +2024-07-29 04:33:32,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=239918.66666666666, ans=0.5 +2024-07-29 04:33:42,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=239932.0, ans=0.125 +2024-07-29 04:33:49,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=239945.33333333334, ans=0.0 +2024-07-29 04:33:50,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=239945.33333333334, ans=0.0 +2024-07-29 04:33:59,193 INFO [train.py:1114] (0/4) Epoch 18, batch 6200, loss[loss=0.1905, simple_loss=0.2904, pruned_loss=0.0453, over 4733.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2659, pruned_loss=0.04254, over 935367.66 frames. ], batch size: 14, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:34:02,993 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.28 vs. limit=6.0 +2024-07-29 04:34:14,620 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=239998.66666666666, ans=15.0 +2024-07-29 04:34:15,121 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-180000.pt +2024-07-29 04:34:19,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=239998.66666666666, ans=0.0 +2024-07-29 04:34:44,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=240012.0, ans=0.125 +2024-07-29 04:34:45,000 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.635e+01 5.597e+01 6.193e+01 7.328e+01 9.537e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 04:34:45,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=240012.0, ans=0.0 +2024-07-29 04:34:50,751 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:34:51,467 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=240025.33333333334, ans=0.125 +2024-07-29 04:34:51,538 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240025.33333333334, ans=0.1 +2024-07-29 04:34:58,299 INFO [train.py:1114] (0/4) Epoch 18, batch 6250, loss[loss=0.2135, simple_loss=0.2975, pruned_loss=0.06479, over 4804.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2658, pruned_loss=0.04229, over 932237.05 frames. ], batch size: 14, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:35:03,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=240038.66666666666, ans=0.0 +2024-07-29 04:35:09,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=240052.0, ans=0.0 +2024-07-29 04:35:14,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=240065.33333333334, ans=0.125 +2024-07-29 04:35:21,254 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.93 vs. limit=22.5 +2024-07-29 04:35:25,809 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.18 vs. limit=15.0 +2024-07-29 04:35:30,248 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.53 vs. limit=15.0 +2024-07-29 04:35:33,227 INFO [train.py:1114] (0/4) Epoch 18, batch 6300, loss[loss=0.1518, simple_loss=0.2348, pruned_loss=0.03444, over 4560.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2658, pruned_loss=0.04219, over 929389.02 frames. ], batch size: 10, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:35:36,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=240105.33333333334, ans=0.125 +2024-07-29 04:35:44,718 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=240118.66666666666, ans=0.035 +2024-07-29 04:35:58,644 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.473e+01 5.639e+01 6.264e+01 7.118e+01 1.029e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 04:36:04,305 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.61 vs. limit=15.0 +2024-07-29 04:36:08,665 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=240158.66666666666, ans=0.0 +2024-07-29 04:36:10,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=240158.66666666666, ans=0.125 +2024-07-29 04:36:11,344 INFO [train.py:1114] (0/4) Epoch 18, batch 6350, loss[loss=0.1776, simple_loss=0.2684, pruned_loss=0.04346, over 4563.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2647, pruned_loss=0.04173, over 933508.85 frames. ], batch size: 21, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:36:18,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=240172.0, ans=0.0 +2024-07-29 04:36:23,016 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.83 vs. limit=15.0 +2024-07-29 04:36:40,085 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.95 vs. limit=10.0 +2024-07-29 04:36:41,824 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:36:50,168 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.83 vs. limit=15.0 +2024-07-29 04:37:05,087 INFO [train.py:1114] (0/4) Epoch 18, batch 6400, loss[loss=0.171, simple_loss=0.2739, pruned_loss=0.03399, over 4632.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2651, pruned_loss=0.04193, over 934816.43 frames. ], batch size: 13, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:37:05,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=240238.66666666666, ans=0.125 +2024-07-29 04:37:08,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=240238.66666666666, ans=0.2 +2024-07-29 04:37:16,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=240238.66666666666, ans=0.125 +2024-07-29 04:37:17,753 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=240238.66666666666, ans=0.0 +2024-07-29 04:37:55,456 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.669e+01 6.278e+01 7.394e+01 9.691e+01, threshold=1.256e+02, percent-clipped=0.0 +2024-07-29 04:37:59,631 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.71 vs. limit=6.0 +2024-07-29 04:38:01,608 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.78 vs. limit=12.0 +2024-07-29 04:38:04,088 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=240292.0, ans=0.1 +2024-07-29 04:38:07,967 INFO [train.py:1114] (0/4) Epoch 18, batch 6450, loss[loss=0.217, simple_loss=0.3075, pruned_loss=0.06322, over 4546.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2662, pruned_loss=0.04246, over 938776.71 frames. ], batch size: 21, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:38:08,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=240305.33333333334, ans=0.125 +2024-07-29 04:38:08,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=240305.33333333334, ans=0.125 +2024-07-29 04:38:25,059 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.64 vs. limit=10.0 +2024-07-29 04:38:51,179 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:38:53,612 INFO [train.py:1114] (0/4) Epoch 18, batch 6500, loss[loss=0.2155, simple_loss=0.2965, pruned_loss=0.06722, over 3469.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2664, pruned_loss=0.04218, over 940275.44 frames. ], batch size: 35, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:38:54,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=240372.0, ans=0.025 +2024-07-29 04:39:12,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=240412.0, ans=0.0 +2024-07-29 04:39:13,959 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.498e+01 5.507e+01 6.062e+01 6.906e+01 9.828e+01, threshold=1.212e+02, percent-clipped=0.0 +2024-07-29 04:39:27,370 INFO [train.py:1114] (0/4) Epoch 18, batch 6550, loss[loss=0.152, simple_loss=0.2363, pruned_loss=0.0339, over 4789.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2651, pruned_loss=0.04128, over 943074.09 frames. ], batch size: 11, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:39:34,969 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.51 vs. limit=22.5 +2024-07-29 04:39:49,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=240465.33333333334, ans=0.0 +2024-07-29 04:40:06,033 INFO [train.py:1114] (0/4) Epoch 18, batch 6600, loss[loss=0.1868, simple_loss=0.2769, pruned_loss=0.04838, over 4932.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2651, pruned_loss=0.04155, over 944893.55 frames. ], batch size: 14, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:40:06,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=240505.33333333334, ans=0.125 +2024-07-29 04:40:10,821 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=240505.33333333334, ans=0.2 +2024-07-29 04:40:12,903 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=240518.66666666666, ans=0.2 +2024-07-29 04:40:25,834 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:40:26,202 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.49 vs. limit=15.0 +2024-07-29 04:40:26,781 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.30 vs. limit=6.0 +2024-07-29 04:40:28,017 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=240532.0, ans=0.125 +2024-07-29 04:40:34,505 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.498e+01 5.621e+01 6.292e+01 7.270e+01 1.272e+02, threshold=1.258e+02, percent-clipped=1.0 +2024-07-29 04:40:36,468 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.56 vs. limit=15.0 +2024-07-29 04:40:40,021 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=240558.66666666666, ans=0.025 +2024-07-29 04:40:47,141 INFO [train.py:1114] (0/4) Epoch 18, batch 6650, loss[loss=0.1968, simple_loss=0.2875, pruned_loss=0.05306, over 4593.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2641, pruned_loss=0.04087, over 943587.17 frames. ], batch size: 17, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:40:48,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=240572.0, ans=0.0 +2024-07-29 04:41:01,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240598.66666666666, ans=0.1 +2024-07-29 04:41:01,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=240598.66666666666, ans=0.025 +2024-07-29 04:41:02,201 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.12 vs. limit=15.0 +2024-07-29 04:41:08,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240612.0, ans=0.1 +2024-07-29 04:41:24,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=240625.33333333334, ans=22.5 +2024-07-29 04:41:26,559 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.21 vs. limit=15.0 +2024-07-29 04:41:29,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=240638.66666666666, ans=0.0 +2024-07-29 04:41:30,166 INFO [train.py:1114] (0/4) Epoch 18, batch 6700, loss[loss=0.1933, simple_loss=0.2852, pruned_loss=0.05069, over 4699.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2643, pruned_loss=0.04092, over 942904.07 frames. ], batch size: 19, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:41:30,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=240638.66666666666, ans=0.025 +2024-07-29 04:41:40,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=240652.0, ans=0.125 +2024-07-29 04:41:41,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240652.0, ans=0.1 +2024-07-29 04:41:44,960 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.65 vs. limit=15.0 +2024-07-29 04:41:48,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=240665.33333333334, ans=0.125 +2024-07-29 04:41:51,431 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.779e+01 5.784e+01 6.386e+01 7.165e+01 1.123e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 04:41:58,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=240692.0, ans=0.125 +2024-07-29 04:41:58,984 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=240692.0, ans=0.2 +2024-07-29 04:41:59,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=240692.0, ans=0.125 +2024-07-29 04:42:01,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=240692.0, ans=0.125 +2024-07-29 04:42:04,335 INFO [train.py:1114] (0/4) Epoch 18, batch 6750, loss[loss=0.1875, simple_loss=0.2872, pruned_loss=0.04391, over 4264.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2651, pruned_loss=0.04138, over 940353.32 frames. ], batch size: 25, lr: 4.12e-03, grad_scale: 64.0 +2024-07-29 04:42:09,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=240705.33333333334, ans=0.2 +2024-07-29 04:42:20,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=240732.0, ans=0.125 +2024-07-29 04:42:21,679 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.95 vs. limit=22.5 +2024-07-29 04:42:26,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=240745.33333333334, ans=0.125 +2024-07-29 04:42:33,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240758.66666666666, ans=0.1 +2024-07-29 04:42:38,951 INFO [train.py:1114] (0/4) Epoch 18, batch 6800, loss[loss=0.1928, simple_loss=0.2785, pruned_loss=0.05352, over 4632.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2658, pruned_loss=0.04182, over 938764.38 frames. ], batch size: 13, lr: 4.12e-03, grad_scale: 64.0 +2024-07-29 04:42:57,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=240798.66666666666, ans=0.125 +2024-07-29 04:42:59,656 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.731e+01 5.569e+01 6.060e+01 6.382e+01 1.017e+02, threshold=1.212e+02, percent-clipped=0.0 +2024-07-29 04:43:11,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=240825.33333333334, ans=0.125 +2024-07-29 04:43:13,830 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:43:14,294 INFO [train.py:1114] (0/4) Epoch 18, batch 6850, loss[loss=0.1605, simple_loss=0.2554, pruned_loss=0.03281, over 4703.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2657, pruned_loss=0.04171, over 940318.81 frames. ], batch size: 13, lr: 4.12e-03, grad_scale: 64.0 +2024-07-29 04:43:17,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=240838.66666666666, ans=0.125 +2024-07-29 04:43:19,003 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=240838.66666666666, ans=0.125 +2024-07-29 04:43:19,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=240838.66666666666, ans=0.025 +2024-07-29 04:43:19,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff2.min_abs, batch_count=240838.66666666666, ans=0.1 +2024-07-29 04:43:23,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240852.0, ans=0.1 +2024-07-29 04:43:34,888 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.45 vs. limit=22.5 +2024-07-29 04:43:37,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=240878.66666666666, ans=0.0 +2024-07-29 04:43:38,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=240878.66666666666, ans=22.5 +2024-07-29 04:43:44,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=240892.0, ans=0.5 +2024-07-29 04:43:46,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=240892.0, ans=0.025 +2024-07-29 04:43:48,573 INFO [train.py:1114] (0/4) Epoch 18, batch 6900, loss[loss=0.1746, simple_loss=0.2699, pruned_loss=0.03964, over 4962.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2657, pruned_loss=0.04171, over 942626.25 frames. ], batch size: 13, lr: 4.12e-03, grad_scale: 64.0 +2024-07-29 04:43:49,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=240905.33333333334, ans=0.125 +2024-07-29 04:43:56,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=240918.66666666666, ans=0.025 +2024-07-29 04:44:09,478 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.86 vs. limit=15.0 +2024-07-29 04:44:09,863 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:44:11,092 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.810e+01 6.480e+01 7.498e+01 1.027e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-29 04:44:14,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=240945.33333333334, ans=0.125 +2024-07-29 04:44:14,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=240945.33333333334, ans=0.125 +2024-07-29 04:44:18,125 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.33 vs. limit=15.0 +2024-07-29 04:44:24,891 INFO [train.py:1114] (0/4) Epoch 18, batch 6950, loss[loss=0.1438, simple_loss=0.2232, pruned_loss=0.03222, over 4537.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2656, pruned_loss=0.04229, over 940289.40 frames. ], batch size: 10, lr: 4.11e-03, grad_scale: 64.0 +2024-07-29 04:44:31,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=240985.33333333334, ans=0.125 +2024-07-29 04:44:32,547 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.82 vs. limit=12.0 +2024-07-29 04:44:39,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=240998.66666666666, ans=0.1 +2024-07-29 04:44:42,783 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.97 vs. limit=6.0 +2024-07-29 04:44:44,792 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.29 vs. limit=15.0 +2024-07-29 04:44:46,156 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.19 vs. limit=15.0 +2024-07-29 04:44:56,606 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=241025.33333333334, ans=0.125 +2024-07-29 04:44:59,692 INFO [train.py:1114] (0/4) Epoch 18, batch 7000, loss[loss=0.1577, simple_loss=0.2464, pruned_loss=0.03454, over 4598.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2638, pruned_loss=0.04151, over 938381.75 frames. ], batch size: 17, lr: 4.11e-03, grad_scale: 64.0 +2024-07-29 04:45:00,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=241038.66666666666, ans=0.125 +2024-07-29 04:45:09,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=241052.0, ans=0.125 +2024-07-29 04:45:11,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_na.min_abs, batch_count=241052.0, ans=0.02 +2024-07-29 04:45:11,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=241052.0, ans=0.0 +2024-07-29 04:45:15,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=241065.33333333334, ans=0.125 +2024-07-29 04:45:20,297 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.289e+01 5.764e+01 6.515e+01 7.633e+01 1.207e+02, threshold=1.303e+02, percent-clipped=0.0 +2024-07-29 04:45:21,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=241078.66666666666, ans=0.125 +2024-07-29 04:45:21,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=241078.66666666666, ans=0.125 +2024-07-29 04:45:26,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=241092.0, ans=0.125 +2024-07-29 04:45:33,139 INFO [train.py:1114] (0/4) Epoch 18, batch 7050, loss[loss=0.1609, simple_loss=0.2539, pruned_loss=0.03396, over 4741.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2635, pruned_loss=0.04107, over 941820.47 frames. ], batch size: 19, lr: 4.11e-03, grad_scale: 64.0 +2024-07-29 04:45:38,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=241105.33333333334, ans=0.125 +2024-07-29 04:45:39,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=241118.66666666666, ans=0.125 +2024-07-29 04:45:48,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=241132.0, ans=0.0 +2024-07-29 04:45:51,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=241132.0, ans=0.035 +2024-07-29 04:45:54,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=241145.33333333334, ans=0.125 +2024-07-29 04:46:06,963 INFO [train.py:1114] (0/4) Epoch 18, batch 7100, loss[loss=0.2036, simple_loss=0.287, pruned_loss=0.06008, over 4811.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2642, pruned_loss=0.04105, over 936682.48 frames. ], batch size: 15, lr: 4.11e-03, grad_scale: 64.0 +2024-07-29 04:46:13,129 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=241172.0, ans=0.125 +2024-07-29 04:46:23,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=241198.66666666666, ans=0.0 +2024-07-29 04:46:28,128 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.18 vs. limit=15.0 +2024-07-29 04:46:28,380 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.603e+01 5.554e+01 6.044e+01 6.901e+01 9.600e+01, threshold=1.209e+02, percent-clipped=0.0 +2024-07-29 04:46:38,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=241225.33333333334, ans=0.125 +2024-07-29 04:46:40,688 INFO [train.py:1114] (0/4) Epoch 18, batch 7150, loss[loss=0.1572, simple_loss=0.2435, pruned_loss=0.03542, over 4446.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2627, pruned_loss=0.04059, over 937429.37 frames. ], batch size: 21, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:47:24,579 INFO [train.py:1114] (0/4) Epoch 18, batch 7200, loss[loss=0.2156, simple_loss=0.3044, pruned_loss=0.06339, over 4795.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2647, pruned_loss=0.04129, over 938050.03 frames. ], batch size: 15, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:47:29,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=241305.33333333334, ans=0.125 +2024-07-29 04:47:38,619 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=241332.0, ans=0.125 +2024-07-29 04:47:44,719 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=241345.33333333334, ans=0.125 +2024-07-29 04:47:45,850 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.087e+01 5.707e+01 6.356e+01 7.350e+01 1.020e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-29 04:47:48,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=241345.33333333334, ans=0.0 +2024-07-29 04:47:48,848 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.91 vs. limit=15.0 +2024-07-29 04:47:57,749 INFO [train.py:1114] (0/4) Epoch 18, batch 7250, loss[loss=0.1595, simple_loss=0.2452, pruned_loss=0.03689, over 4857.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2643, pruned_loss=0.04118, over 939359.63 frames. ], batch size: 12, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:48:17,203 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=241398.66666666666, ans=0.0 +2024-07-29 04:48:24,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=241412.0, ans=0.125 +2024-07-29 04:48:30,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=241425.33333333334, ans=0.125 +2024-07-29 04:48:32,928 INFO [train.py:1114] (0/4) Epoch 18, batch 7300, loss[loss=0.1514, simple_loss=0.2435, pruned_loss=0.02972, over 4852.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2637, pruned_loss=0.04069, over 939627.53 frames. ], batch size: 12, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:48:33,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=241438.66666666666, ans=0.07 +2024-07-29 04:48:40,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=241452.0, ans=0.0 +2024-07-29 04:48:43,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=241452.0, ans=0.025 +2024-07-29 04:48:46,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=241465.33333333334, ans=0.125 +2024-07-29 04:48:47,059 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.06 vs. limit=6.0 +2024-07-29 04:48:48,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=241465.33333333334, ans=0.125 +2024-07-29 04:48:53,890 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+01 5.415e+01 6.086e+01 6.711e+01 9.900e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-29 04:48:54,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=241478.66666666666, ans=0.125 +2024-07-29 04:49:11,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=241492.0, ans=0.125 +2024-07-29 04:49:14,861 INFO [train.py:1114] (0/4) Epoch 18, batch 7350, loss[loss=0.1773, simple_loss=0.2642, pruned_loss=0.04525, over 4641.00 frames. ], tot_loss[loss=0.173, simple_loss=0.264, pruned_loss=0.04098, over 938898.73 frames. ], batch size: 12, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:49:15,329 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.23 vs. limit=15.0 +2024-07-29 04:49:35,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=241532.0, ans=0.2 +2024-07-29 04:49:36,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=241532.0, ans=0.125 +2024-07-29 04:49:37,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=241532.0, ans=0.125 +2024-07-29 04:49:38,226 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=241532.0, ans=0.0 +2024-07-29 04:49:46,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=241558.66666666666, ans=0.2 +2024-07-29 04:49:58,992 INFO [train.py:1114] (0/4) Epoch 18, batch 7400, loss[loss=0.1733, simple_loss=0.2729, pruned_loss=0.03684, over 4694.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2658, pruned_loss=0.04123, over 940213.42 frames. ], batch size: 13, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:50:02,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=241572.0, ans=0.125 +2024-07-29 04:50:20,669 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.78 vs. limit=15.0 +2024-07-29 04:50:22,054 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.538e+01 5.627e+01 6.295e+01 7.057e+01 1.550e+02, threshold=1.259e+02, percent-clipped=1.0 +2024-07-29 04:50:32,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=241625.33333333334, ans=0.2 +2024-07-29 04:50:33,812 INFO [train.py:1114] (0/4) Epoch 18, batch 7450, loss[loss=0.1433, simple_loss=0.2275, pruned_loss=0.02956, over 4618.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2643, pruned_loss=0.04084, over 937472.47 frames. ], batch size: 11, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:50:35,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=241638.66666666666, ans=0.2 +2024-07-29 04:50:35,553 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.97 vs. limit=15.0 +2024-07-29 04:51:00,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=241692.0, ans=0.0 +2024-07-29 04:51:02,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=241692.0, ans=0.0 +2024-07-29 04:51:06,926 INFO [train.py:1114] (0/4) Epoch 18, batch 7500, loss[loss=0.2567, simple_loss=0.3267, pruned_loss=0.0934, over 3508.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2643, pruned_loss=0.04099, over 935842.91 frames. ], batch size: 35, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:51:12,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=241705.33333333334, ans=0.125 +2024-07-29 04:51:25,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=241732.0, ans=0.125 +2024-07-29 04:51:28,216 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.400e+01 5.735e+01 6.333e+01 6.793e+01 1.076e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 04:51:38,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=241758.66666666666, ans=0.025 +2024-07-29 04:51:40,202 INFO [train.py:1114] (0/4) Epoch 18, batch 7550, loss[loss=0.1851, simple_loss=0.2738, pruned_loss=0.04825, over 4662.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2658, pruned_loss=0.0414, over 935628.77 frames. ], batch size: 17, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:51:53,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=241772.0, ans=0.125 +2024-07-29 04:51:55,272 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.99 vs. limit=15.0 +2024-07-29 04:51:57,520 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=241785.33333333334, ans=0.125 +2024-07-29 04:52:00,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=241785.33333333334, ans=0.0 +2024-07-29 04:52:16,050 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241825.33333333334, ans=0.1 +2024-07-29 04:52:19,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=241825.33333333334, ans=0.2 +2024-07-29 04:52:20,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=241825.33333333334, ans=0.125 +2024-07-29 04:52:21,661 INFO [train.py:1114] (0/4) Epoch 18, batch 7600, loss[loss=0.1882, simple_loss=0.2772, pruned_loss=0.04964, over 4818.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2653, pruned_loss=0.04115, over 937718.49 frames. ], batch size: 14, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:52:26,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241838.66666666666, ans=0.1 +2024-07-29 04:52:28,498 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.11 vs. limit=6.0 +2024-07-29 04:52:30,447 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.78 vs. limit=15.0 +2024-07-29 04:52:33,047 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=241852.0, ans=0.1 +2024-07-29 04:52:43,230 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.006e+01 5.592e+01 6.086e+01 6.807e+01 8.936e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-29 04:52:43,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=241878.66666666666, ans=0.0 +2024-07-29 04:52:55,130 INFO [train.py:1114] (0/4) Epoch 18, batch 7650, loss[loss=0.1608, simple_loss=0.2392, pruned_loss=0.04125, over 4940.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2642, pruned_loss=0.04082, over 936358.58 frames. ], batch size: 12, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:52:55,522 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.55 vs. limit=22.5 +2024-07-29 04:52:56,169 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.03 vs. limit=15.0 +2024-07-29 04:53:01,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=241918.66666666666, ans=0.125 +2024-07-29 04:53:12,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241932.0, ans=0.1 +2024-07-29 04:53:16,183 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=241945.33333333334, ans=0.0 +2024-07-29 04:53:21,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=241945.33333333334, ans=0.125 +2024-07-29 04:53:23,866 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.75 vs. limit=12.0 +2024-07-29 04:53:28,757 INFO [train.py:1114] (0/4) Epoch 18, batch 7700, loss[loss=0.1609, simple_loss=0.26, pruned_loss=0.03093, over 4696.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2659, pruned_loss=0.04153, over 933619.15 frames. ], batch size: 13, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:53:34,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=241985.33333333334, ans=0.0 +2024-07-29 04:53:43,555 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:53:44,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.36 vs. limit=15.0 +2024-07-29 04:53:45,345 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.17 vs. limit=6.0 +2024-07-29 04:53:48,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=241998.66666666666, ans=0.125 +2024-07-29 04:53:52,077 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.452e+01 5.527e+01 6.014e+01 6.715e+01 9.821e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-29 04:53:53,308 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.75 vs. limit=10.0 +2024-07-29 04:53:53,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=242012.0, ans=0.125 +2024-07-29 04:54:03,563 INFO [train.py:1114] (0/4) Epoch 18, batch 7750, loss[loss=0.1928, simple_loss=0.2911, pruned_loss=0.04723, over 4930.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2664, pruned_loss=0.04164, over 935181.77 frames. ], batch size: 14, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:54:04,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=242038.66666666666, ans=0.2 +2024-07-29 04:54:14,368 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.41 vs. limit=22.5 +2024-07-29 04:54:29,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=242078.66666666666, ans=0.09899494936611666 +2024-07-29 04:54:30,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242078.66666666666, ans=0.1 +2024-07-29 04:54:35,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=242092.0, ans=0.125 +2024-07-29 04:54:36,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=242092.0, ans=0.2 +2024-07-29 04:54:39,127 INFO [train.py:1114] (0/4) Epoch 18, batch 7800, loss[loss=0.187, simple_loss=0.2795, pruned_loss=0.04727, over 4656.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2662, pruned_loss=0.04154, over 936935.58 frames. ], batch size: 14, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:54:44,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=242105.33333333334, ans=0.0 +2024-07-29 04:54:46,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=242118.66666666666, ans=0.125 +2024-07-29 04:55:00,042 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.583e+01 6.063e+01 6.593e+01 8.807e+01, threshold=1.213e+02, percent-clipped=0.0 +2024-07-29 04:55:28,053 INFO [train.py:1114] (0/4) Epoch 18, batch 7850, loss[loss=0.1436, simple_loss=0.2269, pruned_loss=0.03011, over 4527.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2654, pruned_loss=0.04106, over 935082.70 frames. ], batch size: 10, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:56:06,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=242212.0, ans=0.125 +2024-07-29 04:56:08,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242212.0, ans=0.1 +2024-07-29 04:56:18,464 INFO [train.py:1114] (0/4) Epoch 18, batch 7900, loss[loss=0.161, simple_loss=0.2576, pruned_loss=0.03217, over 4873.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2661, pruned_loss=0.04154, over 932642.30 frames. ], batch size: 14, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:56:21,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=242238.66666666666, ans=0.0 +2024-07-29 04:56:26,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=242252.0, ans=0.2 +2024-07-29 04:56:28,425 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-29 04:56:29,504 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=242252.0, ans=0.125 +2024-07-29 04:56:38,960 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.70 vs. limit=15.0 +2024-07-29 04:56:39,132 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+01 5.700e+01 6.249e+01 7.197e+01 1.145e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-29 04:56:42,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=242278.66666666666, ans=0.0 +2024-07-29 04:56:42,853 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.71 vs. limit=6.0 +2024-07-29 04:56:46,075 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.71 vs. limit=15.0 +2024-07-29 04:56:50,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=242305.33333333334, ans=0.125 +2024-07-29 04:56:51,097 INFO [train.py:1114] (0/4) Epoch 18, batch 7950, loss[loss=0.2108, simple_loss=0.2853, pruned_loss=0.06817, over 3653.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2657, pruned_loss=0.04163, over 935270.86 frames. ], batch size: 35, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:56:53,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=242305.33333333334, ans=0.125 +2024-07-29 04:56:55,285 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:56:56,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=242305.33333333334, ans=0.125 +2024-07-29 04:57:23,860 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.46 vs. limit=15.0 +2024-07-29 04:57:24,043 INFO [train.py:1114] (0/4) Epoch 18, batch 8000, loss[loss=0.1611, simple_loss=0.2507, pruned_loss=0.03573, over 4612.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2647, pruned_loss=0.04167, over 934576.89 frames. ], batch size: 11, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:57:24,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=242372.0, ans=0.125 +2024-07-29 04:57:38,416 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.14 vs. limit=15.0 +2024-07-29 04:57:45,065 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.436e+01 5.556e+01 6.379e+01 7.313e+01 1.044e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 04:57:49,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=242425.33333333334, ans=0.0 +2024-07-29 04:57:52,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=242425.33333333334, ans=0.0 +2024-07-29 04:57:58,338 INFO [train.py:1114] (0/4) Epoch 18, batch 8050, loss[loss=0.1724, simple_loss=0.2754, pruned_loss=0.03477, over 4807.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2653, pruned_loss=0.04216, over 934240.39 frames. ], batch size: 14, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:58:01,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=242438.66666666666, ans=0.0 +2024-07-29 04:58:03,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=242438.66666666666, ans=0.2 +2024-07-29 04:58:14,910 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=242465.33333333334, ans=0.125 +2024-07-29 04:58:18,001 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=242478.66666666666, ans=0.2 +2024-07-29 04:58:29,199 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.49 vs. limit=15.0 +2024-07-29 04:58:31,318 INFO [train.py:1114] (0/4) Epoch 18, batch 8100, loss[loss=0.1926, simple_loss=0.2828, pruned_loss=0.05123, over 4817.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2663, pruned_loss=0.04214, over 933832.92 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:58:32,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=242505.33333333334, ans=0.025 +2024-07-29 04:58:47,688 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.96 vs. limit=12.0 +2024-07-29 04:58:52,548 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.503e+01 5.727e+01 6.609e+01 7.504e+01 1.146e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-29 04:58:53,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242545.33333333334, ans=0.1 +2024-07-29 04:59:00,639 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=242558.66666666666, ans=0.025 +2024-07-29 04:59:04,243 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.00 vs. limit=15.0 +2024-07-29 04:59:04,412 INFO [train.py:1114] (0/4) Epoch 18, batch 8150, loss[loss=0.1973, simple_loss=0.282, pruned_loss=0.05632, over 4793.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.266, pruned_loss=0.04227, over 936814.10 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:59:04,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=242572.0, ans=0.125 +2024-07-29 04:59:19,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=242598.66666666666, ans=0.0 +2024-07-29 04:59:33,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=242625.33333333334, ans=0.025 +2024-07-29 04:59:34,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=242625.33333333334, ans=0.125 +2024-07-29 04:59:38,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=242625.33333333334, ans=0.0 +2024-07-29 04:59:41,326 INFO [train.py:1114] (0/4) Epoch 18, batch 8200, loss[loss=0.1769, simple_loss=0.2609, pruned_loss=0.04642, over 4812.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2656, pruned_loss=0.04154, over 938170.35 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:59:42,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242638.66666666666, ans=0.1 +2024-07-29 04:59:45,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=242638.66666666666, ans=0.2 +2024-07-29 05:00:11,691 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=242665.33333333334, ans=0.0 +2024-07-29 05:00:20,501 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.484e+01 5.660e+01 6.350e+01 7.311e+01 1.182e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-29 05:00:26,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=242678.66666666666, ans=10.0 +2024-07-29 05:00:30,518 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:00:32,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=242705.33333333334, ans=0.0 +2024-07-29 05:00:33,421 INFO [train.py:1114] (0/4) Epoch 18, batch 8250, loss[loss=0.1662, simple_loss=0.263, pruned_loss=0.03475, over 4899.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2652, pruned_loss=0.04141, over 938492.40 frames. ], batch size: 13, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:00:34,343 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.97 vs. limit=15.0 +2024-07-29 05:00:43,070 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:00:44,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=242718.66666666666, ans=0.025 +2024-07-29 05:00:56,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=242745.33333333334, ans=0.125 +2024-07-29 05:01:04,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=242758.66666666666, ans=0.125 +2024-07-29 05:01:06,376 INFO [train.py:1114] (0/4) Epoch 18, batch 8300, loss[loss=0.1671, simple_loss=0.2609, pruned_loss=0.03668, over 4914.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2662, pruned_loss=0.04181, over 938057.70 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:01:15,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=242785.33333333334, ans=0.125 +2024-07-29 05:01:22,511 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=242798.66666666666, ans=0.025 +2024-07-29 05:01:23,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=242798.66666666666, ans=0.0 +2024-07-29 05:01:26,870 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.844e+01 5.743e+01 6.319e+01 7.194e+01 1.218e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-29 05:01:34,421 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.81 vs. limit=6.0 +2024-07-29 05:01:38,530 INFO [train.py:1114] (0/4) Epoch 18, batch 8350, loss[loss=0.2206, simple_loss=0.3118, pruned_loss=0.06466, over 4802.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2655, pruned_loss=0.04157, over 941035.49 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:01:43,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=242838.66666666666, ans=0.125 +2024-07-29 05:01:52,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242852.0, ans=0.1 +2024-07-29 05:02:17,041 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.22 vs. limit=22.5 +2024-07-29 05:02:18,747 INFO [train.py:1114] (0/4) Epoch 18, batch 8400, loss[loss=0.1566, simple_loss=0.2549, pruned_loss=0.02912, over 4780.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2652, pruned_loss=0.04158, over 939735.02 frames. ], batch size: 12, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:02:19,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=242905.33333333334, ans=0.0 +2024-07-29 05:02:20,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=242905.33333333334, ans=0.125 +2024-07-29 05:02:35,474 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.68 vs. limit=15.0 +2024-07-29 05:02:38,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=242945.33333333334, ans=0.125 +2024-07-29 05:02:39,516 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.393e+01 5.882e+01 6.556e+01 7.323e+01 1.088e+02, threshold=1.311e+02, percent-clipped=0.0 +2024-07-29 05:02:42,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=242945.33333333334, ans=0.125 +2024-07-29 05:02:47,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=242958.66666666666, ans=0.125 +2024-07-29 05:02:51,082 INFO [train.py:1114] (0/4) Epoch 18, batch 8450, loss[loss=0.2077, simple_loss=0.2924, pruned_loss=0.06145, over 4799.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2661, pruned_loss=0.04206, over 938353.67 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:02:54,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=242972.0, ans=0.125 +2024-07-29 05:03:01,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=242985.33333333334, ans=0.125 +2024-07-29 05:03:29,571 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=242998.66666666666, ans=0.125 +2024-07-29 05:03:34,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=243012.0, ans=0.0 +2024-07-29 05:03:34,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=243012.0, ans=0.025 +2024-07-29 05:03:44,952 INFO [train.py:1114] (0/4) Epoch 18, batch 8500, loss[loss=0.1568, simple_loss=0.2396, pruned_loss=0.03701, over 4628.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2645, pruned_loss=0.04104, over 938760.96 frames. ], batch size: 11, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:03:45,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=243038.66666666666, ans=0.2 +2024-07-29 05:03:56,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=243052.0, ans=0.125 +2024-07-29 05:04:07,970 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.366e+01 5.501e+01 6.348e+01 7.091e+01 9.836e+01, threshold=1.270e+02, percent-clipped=0.0 +2024-07-29 05:04:15,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243092.0, ans=0.1 +2024-07-29 05:04:19,721 INFO [train.py:1114] (0/4) Epoch 18, batch 8550, loss[loss=0.1241, simple_loss=0.2061, pruned_loss=0.02104, over 4785.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2653, pruned_loss=0.04157, over 939809.71 frames. ], batch size: 11, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:04:33,960 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=243132.0, ans=0.2 +2024-07-29 05:04:52,780 INFO [train.py:1114] (0/4) Epoch 18, batch 8600, loss[loss=0.2318, simple_loss=0.3272, pruned_loss=0.06821, over 4802.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2653, pruned_loss=0.04193, over 939052.92 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:05:13,978 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.872e+01 5.639e+01 6.288e+01 7.210e+01 1.078e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-29 05:05:17,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=243212.0, ans=0.125 +2024-07-29 05:05:25,405 INFO [train.py:1114] (0/4) Epoch 18, batch 8650, loss[loss=0.1879, simple_loss=0.2668, pruned_loss=0.0545, over 4897.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.265, pruned_loss=0.04181, over 940140.01 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:05:33,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=243252.0, ans=0.125 +2024-07-29 05:05:41,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=243265.33333333334, ans=0.125 +2024-07-29 05:05:48,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=243278.66666666666, ans=0.125 +2024-07-29 05:05:57,739 INFO [train.py:1114] (0/4) Epoch 18, batch 8700, loss[loss=0.1554, simple_loss=0.2608, pruned_loss=0.02501, over 4747.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2649, pruned_loss=0.04193, over 937401.87 frames. ], batch size: 13, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:06:03,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-07-29 05:06:15,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=243332.0, ans=0.125 +2024-07-29 05:06:18,239 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.815e+01 5.797e+01 6.169e+01 6.761e+01 9.579e+01, threshold=1.234e+02, percent-clipped=0.0 +2024-07-29 05:06:29,982 INFO [train.py:1114] (0/4) Epoch 18, batch 8750, loss[loss=0.1814, simple_loss=0.2798, pruned_loss=0.04153, over 4687.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.265, pruned_loss=0.04206, over 936318.45 frames. ], batch size: 15, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:06:33,764 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.21 vs. limit=15.0 +2024-07-29 05:06:42,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=243398.66666666666, ans=0.05 +2024-07-29 05:06:46,976 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=243398.66666666666, ans=0.2 +2024-07-29 05:06:48,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243398.66666666666, ans=0.1 +2024-07-29 05:06:52,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=243412.0, ans=0.2 +2024-07-29 05:06:54,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=243412.0, ans=0.1 +2024-07-29 05:06:59,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=243425.33333333334, ans=0.04949747468305833 +2024-07-29 05:07:02,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=243425.33333333334, ans=0.025 +2024-07-29 05:07:05,129 INFO [train.py:1114] (0/4) Epoch 18, batch 8800, loss[loss=0.1865, simple_loss=0.2935, pruned_loss=0.03977, over 4928.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2651, pruned_loss=0.04188, over 937173.78 frames. ], batch size: 14, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:07:07,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243438.66666666666, ans=0.1 +2024-07-29 05:07:11,597 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=243452.0, ans=0.0 +2024-07-29 05:07:25,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=243478.66666666666, ans=0.04949747468305833 +2024-07-29 05:07:26,779 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.698e+01 5.641e+01 6.387e+01 7.548e+01 9.629e+01, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 05:07:27,885 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.62 vs. limit=15.0 +2024-07-29 05:07:33,232 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.15 vs. limit=15.0 +2024-07-29 05:07:35,234 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.34 vs. limit=22.5 +2024-07-29 05:07:38,885 INFO [train.py:1114] (0/4) Epoch 18, batch 8850, loss[loss=0.1948, simple_loss=0.2811, pruned_loss=0.05425, over 4508.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2649, pruned_loss=0.04194, over 931572.61 frames. ], batch size: 21, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:07:41,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=243505.33333333334, ans=0.125 +2024-07-29 05:07:56,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=243532.0, ans=0.0 +2024-07-29 05:08:12,261 INFO [train.py:1114] (0/4) Epoch 18, batch 8900, loss[loss=0.1463, simple_loss=0.2352, pruned_loss=0.02868, over 4924.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2645, pruned_loss=0.04156, over 929732.26 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:08:20,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=243585.33333333334, ans=0.0 +2024-07-29 05:08:30,526 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.91 vs. limit=15.0 +2024-07-29 05:08:32,777 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 5.619e+01 6.277e+01 7.423e+01 9.938e+01, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 05:08:34,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=243612.0, ans=0.125 +2024-07-29 05:08:44,473 INFO [train.py:1114] (0/4) Epoch 18, batch 8950, loss[loss=0.1848, simple_loss=0.2805, pruned_loss=0.04454, over 4573.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2644, pruned_loss=0.04134, over 929990.13 frames. ], batch size: 21, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:08:44,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=243638.66666666666, ans=0.2 +2024-07-29 05:08:52,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=243652.0, ans=15.0 +2024-07-29 05:08:58,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=243665.33333333334, ans=0.125 +2024-07-29 05:09:01,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=243665.33333333334, ans=0.1 +2024-07-29 05:09:02,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=243665.33333333334, ans=0.125 +2024-07-29 05:09:14,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=243692.0, ans=0.0 +2024-07-29 05:09:16,707 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.14 vs. limit=15.0 +2024-07-29 05:09:18,141 INFO [train.py:1114] (0/4) Epoch 18, batch 9000, loss[loss=0.1692, simple_loss=0.2471, pruned_loss=0.04562, over 4638.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2634, pruned_loss=0.04123, over 933124.18 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:09:18,142 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 05:09:26,761 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([4.6836, 3.6659, 3.8976, 4.2296, 4.5342, 3.7106, 4.5735, 3.8305], + device='cuda:0') +2024-07-29 05:09:28,847 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.7690, 4.0978, 4.3807, 4.3565], device='cuda:0') +2024-07-29 05:09:33,034 INFO [train.py:1146] (0/4) Epoch 18, validation: loss=0.1616, simple_loss=0.2637, pruned_loss=0.02971, over 944034.00 frames. +2024-07-29 05:09:33,034 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 05:09:35,644 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.87 vs. limit=5.0 +2024-07-29 05:09:42,586 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=243718.66666666666, ans=0.125 +2024-07-29 05:09:47,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=243732.0, ans=15.0 +2024-07-29 05:09:48,015 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=243732.0, ans=0.05 +2024-07-29 05:09:49,765 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.42 vs. limit=12.0 +2024-07-29 05:09:54,498 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.133e+01 5.845e+01 6.498e+01 7.420e+01 1.015e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-29 05:10:05,952 INFO [train.py:1114] (0/4) Epoch 18, batch 9050, loss[loss=0.1722, simple_loss=0.2544, pruned_loss=0.04505, over 4599.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2625, pruned_loss=0.04097, over 934013.24 frames. ], batch size: 10, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:10:08,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=243772.0, ans=0.125 +2024-07-29 05:10:19,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.49 vs. limit=15.0 +2024-07-29 05:10:26,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=243812.0, ans=0.125 +2024-07-29 05:10:37,838 INFO [train.py:1114] (0/4) Epoch 18, batch 9100, loss[loss=0.1613, simple_loss=0.2568, pruned_loss=0.03289, over 4931.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2636, pruned_loss=0.04129, over 936730.35 frames. ], batch size: 14, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:10:47,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=243852.0, ans=0.125 +2024-07-29 05:10:47,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=243852.0, ans=0.0 +2024-07-29 05:10:58,038 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+01 5.927e+01 6.725e+01 7.788e+01 1.053e+02, threshold=1.345e+02, percent-clipped=0.0 +2024-07-29 05:10:58,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=243878.66666666666, ans=0.5 +2024-07-29 05:10:58,514 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.33 vs. limit=10.0 +2024-07-29 05:11:01,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=243878.66666666666, ans=0.125 +2024-07-29 05:11:02,484 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=243892.0, ans=0.0 +2024-07-29 05:11:04,824 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.57 vs. limit=15.0 +2024-07-29 05:11:08,029 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.10 vs. limit=10.0 +2024-07-29 05:11:09,490 INFO [train.py:1114] (0/4) Epoch 18, batch 9150, loss[loss=0.1785, simple_loss=0.2761, pruned_loss=0.04039, over 4805.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2647, pruned_loss=0.04125, over 935446.60 frames. ], batch size: 14, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:11:09,878 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.15 vs. limit=15.0 +2024-07-29 05:11:10,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=243905.33333333334, ans=0.025 +2024-07-29 05:11:20,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=243918.66666666666, ans=0.125 +2024-07-29 05:11:22,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=243932.0, ans=0.025 +2024-07-29 05:11:22,797 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:11:23,177 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.93 vs. limit=10.0 +2024-07-29 05:11:29,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=243945.33333333334, ans=0.09899494936611666 +2024-07-29 05:11:31,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=243945.33333333334, ans=0.125 +2024-07-29 05:11:38,688 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=243958.66666666666, ans=0.2 +2024-07-29 05:11:42,338 INFO [train.py:1114] (0/4) Epoch 18, batch 9200, loss[loss=0.1627, simple_loss=0.2626, pruned_loss=0.03136, over 4858.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2642, pruned_loss=0.04085, over 937650.91 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:11:51,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=243985.33333333334, ans=0.2 +2024-07-29 05:12:03,096 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.715e+01 5.677e+01 6.144e+01 6.790e+01 1.037e+02, threshold=1.229e+02, percent-clipped=0.0 +2024-07-29 05:12:05,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=244012.0, ans=0.0 +2024-07-29 05:12:14,993 INFO [train.py:1114] (0/4) Epoch 18, batch 9250, loss[loss=0.1978, simple_loss=0.2944, pruned_loss=0.05057, over 4631.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2643, pruned_loss=0.04097, over 938196.50 frames. ], batch size: 13, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:12:15,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=244038.66666666666, ans=0.2 +2024-07-29 05:12:19,545 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:12:24,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=244052.0, ans=0.04949747468305833 +2024-07-29 05:12:34,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=244078.66666666666, ans=0.125 +2024-07-29 05:12:34,578 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=244078.66666666666, ans=0.125 +2024-07-29 05:12:36,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244078.66666666666, ans=0.1 +2024-07-29 05:12:44,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=244092.0, ans=0.5 +2024-07-29 05:12:47,059 INFO [train.py:1114] (0/4) Epoch 18, batch 9300, loss[loss=0.1561, simple_loss=0.246, pruned_loss=0.03313, over 4772.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.263, pruned_loss=0.04058, over 938107.02 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:12:52,042 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=244105.33333333334, ans=0.0 +2024-07-29 05:12:57,915 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.40 vs. limit=15.0 +2024-07-29 05:12:59,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=244132.0, ans=0.125 +2024-07-29 05:13:04,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=244132.0, ans=0.0 +2024-07-29 05:13:05,988 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=244145.33333333334, ans=0.125 +2024-07-29 05:13:07,076 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.598e+01 6.030e+01 6.861e+01 1.072e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-29 05:13:07,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=244145.33333333334, ans=0.07 +2024-07-29 05:13:08,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244145.33333333334, ans=0.1 +2024-07-29 05:13:15,390 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=15.0 +2024-07-29 05:13:19,382 INFO [train.py:1114] (0/4) Epoch 18, batch 9350, loss[loss=0.143, simple_loss=0.2279, pruned_loss=0.02907, over 4809.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.263, pruned_loss=0.0409, over 935320.47 frames. ], batch size: 11, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:13:25,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=244172.0, ans=0.125 +2024-07-29 05:13:28,822 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=244185.33333333334, ans=0.125 +2024-07-29 05:13:28,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=244185.33333333334, ans=0.0 +2024-07-29 05:13:32,753 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.32 vs. limit=15.0 +2024-07-29 05:13:41,378 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.11 vs. limit=12.0 +2024-07-29 05:13:49,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=244225.33333333334, ans=0.0 +2024-07-29 05:13:52,876 INFO [train.py:1114] (0/4) Epoch 18, batch 9400, loss[loss=0.189, simple_loss=0.2914, pruned_loss=0.04336, over 4692.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2636, pruned_loss=0.04103, over 933700.29 frames. ], batch size: 13, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:14:14,129 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 5.480e+01 6.250e+01 7.248e+01 1.054e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-29 05:14:19,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=244292.0, ans=0.125 +2024-07-29 05:14:24,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=244292.0, ans=0.05 +2024-07-29 05:14:25,262 INFO [train.py:1114] (0/4) Epoch 18, batch 9450, loss[loss=0.1431, simple_loss=0.2274, pruned_loss=0.02938, over 4806.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2639, pruned_loss=0.04115, over 932686.57 frames. ], batch size: 11, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:14:25,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=244305.33333333334, ans=0.125 +2024-07-29 05:14:31,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=244318.66666666666, ans=0.125 +2024-07-29 05:14:39,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=244332.0, ans=0.125 +2024-07-29 05:14:39,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=244332.0, ans=0.0 +2024-07-29 05:14:39,788 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:14:41,338 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.51 vs. limit=15.0 +2024-07-29 05:14:49,043 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:14:52,361 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.38 vs. limit=15.0 +2024-07-29 05:14:56,513 INFO [train.py:1114] (0/4) Epoch 18, batch 9500, loss[loss=0.1892, simple_loss=0.2784, pruned_loss=0.04997, over 4694.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2639, pruned_loss=0.04093, over 934896.32 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:15:06,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=244385.33333333334, ans=0.0 +2024-07-29 05:15:16,199 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.01 vs. limit=10.0 +2024-07-29 05:15:17,057 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.953e+01 5.635e+01 6.260e+01 7.098e+01 9.795e+01, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 05:15:22,488 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.57 vs. limit=15.0 +2024-07-29 05:15:22,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=244425.33333333334, ans=0.0 +2024-07-29 05:15:23,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=244425.33333333334, ans=0.2 +2024-07-29 05:15:27,756 INFO [train.py:1114] (0/4) Epoch 18, batch 9550, loss[loss=0.1759, simple_loss=0.2656, pruned_loss=0.04311, over 4782.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2634, pruned_loss=0.04046, over 932571.96 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:15:29,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=244438.66666666666, ans=0.125 +2024-07-29 05:15:31,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=244438.66666666666, ans=0.025 +2024-07-29 05:15:31,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=244438.66666666666, ans=0.025 +2024-07-29 05:15:33,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=244452.0, ans=0.0 +2024-07-29 05:15:40,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=244465.33333333334, ans=0.125 +2024-07-29 05:15:42,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=244465.33333333334, ans=0.125 +2024-07-29 05:15:49,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=244478.66666666666, ans=15.0 +2024-07-29 05:15:49,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=244478.66666666666, ans=0.025 +2024-07-29 05:15:55,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=244492.0, ans=0.125 +2024-07-29 05:15:55,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=244492.0, ans=0.125 +2024-07-29 05:16:00,332 INFO [train.py:1114] (0/4) Epoch 18, batch 9600, loss[loss=0.2172, simple_loss=0.3002, pruned_loss=0.06712, over 3516.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2639, pruned_loss=0.04077, over 931564.14 frames. ], batch size: 35, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:16:12,103 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.91 vs. limit=15.0 +2024-07-29 05:16:21,032 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 5.711e+01 6.305e+01 6.902e+01 1.149e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 05:16:25,237 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.24 vs. limit=12.0 +2024-07-29 05:16:26,330 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.22 vs. limit=22.5 +2024-07-29 05:16:26,713 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244558.66666666666, ans=0.1 +2024-07-29 05:16:31,686 INFO [train.py:1114] (0/4) Epoch 18, batch 9650, loss[loss=0.1921, simple_loss=0.2732, pruned_loss=0.05553, over 4847.00 frames. ], tot_loss[loss=0.174, simple_loss=0.265, pruned_loss=0.04155, over 927718.48 frames. ], batch size: 16, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:16:33,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=244572.0, ans=0.0 +2024-07-29 05:16:43,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=244598.66666666666, ans=0.125 +2024-07-29 05:16:47,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=244598.66666666666, ans=0.125 +2024-07-29 05:16:50,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=244612.0, ans=0.0 +2024-07-29 05:16:56,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=244625.33333333334, ans=0.125 +2024-07-29 05:16:59,774 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.06 vs. limit=15.0 +2024-07-29 05:17:01,022 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.84 vs. limit=22.5 +2024-07-29 05:17:02,954 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.36 vs. limit=15.0 +2024-07-29 05:17:03,160 INFO [train.py:1114] (0/4) Epoch 18, batch 9700, loss[loss=0.1638, simple_loss=0.2618, pruned_loss=0.03294, over 4212.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2648, pruned_loss=0.04134, over 925489.44 frames. ], batch size: 25, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:17:04,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=244638.66666666666, ans=0.04949747468305833 +2024-07-29 05:17:12,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=244652.0, ans=0.125 +2024-07-29 05:17:15,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=244665.33333333334, ans=0.125 +2024-07-29 05:17:18,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=244665.33333333334, ans=0.2 +2024-07-29 05:17:23,682 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.541e+01 5.681e+01 6.275e+01 7.162e+01 1.082e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 05:17:30,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244692.0, ans=0.1 +2024-07-29 05:17:34,273 INFO [train.py:1114] (0/4) Epoch 18, batch 9750, loss[loss=0.213, simple_loss=0.3075, pruned_loss=0.05924, over 4683.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2661, pruned_loss=0.0421, over 926216.24 frames. ], batch size: 15, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:17:41,581 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.18 vs. limit=15.0 +2024-07-29 05:17:56,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=244745.33333333334, ans=0.125 +2024-07-29 05:18:00,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244758.66666666666, ans=0.1 +2024-07-29 05:18:04,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=244758.66666666666, ans=0.125 +2024-07-29 05:18:05,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=244772.0, ans=15.0 +2024-07-29 05:18:06,006 INFO [train.py:1114] (0/4) Epoch 18, batch 9800, loss[loss=0.139, simple_loss=0.2309, pruned_loss=0.02357, over 4715.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2651, pruned_loss=0.04144, over 925200.00 frames. ], batch size: 12, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:18:16,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=244785.33333333334, ans=0.0 +2024-07-29 05:18:17,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=244798.66666666666, ans=0.2 +2024-07-29 05:18:20,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=244798.66666666666, ans=0.125 +2024-07-29 05:18:26,933 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 5.797e+01 6.276e+01 7.162e+01 9.479e+01, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 05:18:33,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244825.33333333334, ans=0.1 +2024-07-29 05:18:38,926 INFO [train.py:1114] (0/4) Epoch 18, batch 9850, loss[loss=0.1687, simple_loss=0.2704, pruned_loss=0.03351, over 4891.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2652, pruned_loss=0.04139, over 927439.95 frames. ], batch size: 15, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:18:53,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244865.33333333334, ans=0.1 +2024-07-29 05:18:59,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=244878.66666666666, ans=0.0 +2024-07-29 05:19:05,027 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:19:05,233 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.76 vs. limit=10.0 +2024-07-29 05:19:09,962 INFO [train.py:1114] (0/4) Epoch 18, batch 9900, loss[loss=0.1872, simple_loss=0.2892, pruned_loss=0.04258, over 4847.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2658, pruned_loss=0.04207, over 926258.35 frames. ], batch size: 16, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:19:10,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=244905.33333333334, ans=0.2 +2024-07-29 05:19:10,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=244905.33333333334, ans=0.125 +2024-07-29 05:19:12,868 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.75 vs. limit=10.0 +2024-07-29 05:19:27,524 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.83 vs. limit=22.5 +2024-07-29 05:19:30,945 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.456e+01 5.784e+01 6.438e+01 7.578e+01 1.058e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 05:19:31,159 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=244945.33333333334, ans=0.125 +2024-07-29 05:19:33,891 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.49 vs. limit=6.0 +2024-07-29 05:19:37,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=244958.66666666666, ans=0.125 +2024-07-29 05:19:41,553 INFO [train.py:1114] (0/4) Epoch 18, batch 9950, loss[loss=0.1457, simple_loss=0.2268, pruned_loss=0.03228, over 4787.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2658, pruned_loss=0.04263, over 929160.51 frames. ], batch size: 11, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:20:00,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=245012.0, ans=0.07 +2024-07-29 05:20:00,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=245012.0, ans=0.125 +2024-07-29 05:20:04,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=245012.0, ans=0.09899494936611666 +2024-07-29 05:20:12,657 INFO [train.py:1114] (0/4) Epoch 18, batch 10000, loss[loss=0.1606, simple_loss=0.2513, pruned_loss=0.03491, over 4631.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2686, pruned_loss=0.04378, over 926528.55 frames. ], batch size: 16, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:20:13,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=245038.66666666666, ans=10.0 +2024-07-29 05:20:23,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245052.0, ans=0.1 +2024-07-29 05:20:28,736 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.52 vs. limit=15.0 +2024-07-29 05:20:33,060 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.790e+01 5.781e+01 6.382e+01 8.189e+01 1.255e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 05:20:34,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=245078.66666666666, ans=0.0 +2024-07-29 05:20:38,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=245092.0, ans=0.125 +2024-07-29 05:20:40,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245092.0, ans=0.1 +2024-07-29 05:20:40,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=245092.0, ans=0.125 +2024-07-29 05:20:44,692 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.56 vs. limit=15.0 +2024-07-29 05:20:45,044 INFO [train.py:1114] (0/4) Epoch 18, batch 10050, loss[loss=0.2148, simple_loss=0.2987, pruned_loss=0.06549, over 3546.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2717, pruned_loss=0.04539, over 916023.20 frames. ], batch size: 35, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:20:52,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=245118.66666666666, ans=0.2 +2024-07-29 05:21:01,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=245132.0, ans=0.125 +2024-07-29 05:21:11,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=245145.33333333334, ans=0.1 +2024-07-29 05:21:15,522 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.06 vs. limit=6.0 +2024-07-29 05:21:17,399 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.51 vs. limit=22.5 +2024-07-29 05:21:19,143 INFO [train.py:1114] (0/4) Epoch 18, batch 10100, loss[loss=0.2223, simple_loss=0.3045, pruned_loss=0.07005, over 3373.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2765, pruned_loss=0.04981, over 863526.75 frames. ], batch size: 36, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:21:20,956 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.18 vs. limit=22.5 +2024-07-29 05:21:21,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=245172.0, ans=0.0 +2024-07-29 05:21:24,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=245172.0, ans=0.125 +2024-07-29 05:21:30,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=245185.33333333334, ans=0.125 +2024-07-29 05:21:31,449 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.63 vs. limit=15.0 +2024-07-29 05:21:33,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=245198.66666666666, ans=0.125 +2024-07-29 05:21:41,094 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.138e+01 6.796e+01 7.277e+01 7.758e+01 1.071e+02, threshold=1.455e+02, percent-clipped=0.0 +2024-07-29 05:21:52,421 INFO [train.py:1114] (0/4) Epoch 18, batch 10150, loss[loss=0.2183, simple_loss=0.2989, pruned_loss=0.06889, over 3450.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2799, pruned_loss=0.05341, over 820769.57 frames. ], batch size: 35, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:21:54,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff2.min_abs, batch_count=245238.66666666666, ans=0.1 +2024-07-29 05:21:59,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=245252.0, ans=0.125 +2024-07-29 05:22:02,118 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=245252.0, ans=0.0 +2024-07-29 05:22:15,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=245278.66666666666, ans=0.05 +2024-07-29 05:22:16,438 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=12.12 vs. limit=15.0 +2024-07-29 05:22:18,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=245292.0, ans=0.125 +2024-07-29 05:22:18,831 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.40 vs. limit=22.5 +2024-07-29 05:22:21,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.87 vs. limit=10.0 +2024-07-29 05:22:21,977 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.03 vs. limit=15.0 +2024-07-29 05:22:22,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=245292.0, ans=0.2 +2024-07-29 05:22:24,099 INFO [train.py:1114] (0/4) Epoch 18, batch 10200, loss[loss=0.2037, simple_loss=0.2786, pruned_loss=0.0644, over 3246.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2813, pruned_loss=0.05528, over 788920.59 frames. ], batch size: 35, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:22:28,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=245305.33333333334, ans=0.125 +2024-07-29 05:22:37,041 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-184000.pt +2024-07-29 05:22:41,669 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-18.pt +2024-07-29 05:24:06,502 INFO [train.py:1114] (0/4) Epoch 19, batch 0, loss[loss=0.1445, simple_loss=0.2333, pruned_loss=0.02787, over 4858.00 frames. ], tot_loss[loss=0.1445, simple_loss=0.2333, pruned_loss=0.02787, over 4858.00 frames. ], batch size: 12, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:24:06,503 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 05:24:10,901 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.4080, 4.0780, 3.8003, 4.1344], device='cuda:0') +2024-07-29 05:24:18,361 INFO [train.py:1146] (0/4) Epoch 19, validation: loss=0.1627, simple_loss=0.2658, pruned_loss=0.02977, over 944034.00 frames. +2024-07-29 05:24:18,361 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 05:24:20,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=245336.0, ans=0.125 +2024-07-29 05:24:25,131 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.889e+01 6.550e+01 7.036e+01 7.426e+01 9.937e+01, threshold=1.407e+02, percent-clipped=0.0 +2024-07-29 05:24:25,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=245349.33333333334, ans=0.2 +2024-07-29 05:24:29,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=245349.33333333334, ans=0.125 +2024-07-29 05:24:30,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=245349.33333333334, ans=0.2 +2024-07-29 05:24:37,907 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.48 vs. limit=6.0 +2024-07-29 05:24:50,269 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.00 vs. limit=6.0 +2024-07-29 05:24:55,451 INFO [train.py:1114] (0/4) Epoch 19, batch 50, loss[loss=0.1647, simple_loss=0.2542, pruned_loss=0.03758, over 4602.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2687, pruned_loss=0.04312, over 206465.06 frames. ], batch size: 11, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:24:58,991 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245402.66666666666, ans=0.1 +2024-07-29 05:25:13,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=245429.33333333334, ans=0.0 +2024-07-29 05:25:15,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=245442.66666666666, ans=0.025 +2024-07-29 05:25:29,105 INFO [train.py:1114] (0/4) Epoch 19, batch 100, loss[loss=0.1548, simple_loss=0.2349, pruned_loss=0.03741, over 4644.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2689, pruned_loss=0.04223, over 365413.33 frames. ], batch size: 12, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:25:30,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=245469.33333333334, ans=0.125 +2024-07-29 05:25:34,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=245469.33333333334, ans=0.125 +2024-07-29 05:25:35,850 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.410e+01 5.533e+01 6.230e+01 7.043e+01 1.593e+02, threshold=1.246e+02, percent-clipped=1.0 +2024-07-29 05:25:36,326 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.17 vs. limit=6.0 +2024-07-29 05:25:41,033 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.03 vs. limit=15.0 +2024-07-29 05:25:41,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=245482.66666666666, ans=0.0 +2024-07-29 05:25:49,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=245509.33333333334, ans=0.0 +2024-07-29 05:25:52,667 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=245509.33333333334, ans=0.125 +2024-07-29 05:25:54,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=245509.33333333334, ans=0.125 +2024-07-29 05:26:02,326 INFO [train.py:1114] (0/4) Epoch 19, batch 150, loss[loss=0.1242, simple_loss=0.1982, pruned_loss=0.02512, over 4615.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2654, pruned_loss=0.04127, over 494353.40 frames. ], batch size: 11, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:26:07,883 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.13 vs. limit=15.0 +2024-07-29 05:26:21,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=245576.0, ans=0.0 +2024-07-29 05:26:22,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=245576.0, ans=0.125 +2024-07-29 05:26:29,701 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=245589.33333333334, ans=0.07 +2024-07-29 05:26:35,604 INFO [train.py:1114] (0/4) Epoch 19, batch 200, loss[loss=0.1824, simple_loss=0.2735, pruned_loss=0.0456, over 4418.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.264, pruned_loss=0.04014, over 593532.44 frames. ], batch size: 21, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:26:39,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=245602.66666666666, ans=0.125 +2024-07-29 05:26:39,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245602.66666666666, ans=0.1 +2024-07-29 05:26:40,547 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.47 vs. limit=22.5 +2024-07-29 05:26:42,098 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.445e+01 5.609e+01 6.216e+01 6.903e+01 1.039e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 05:26:50,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=245629.33333333334, ans=0.125 +2024-07-29 05:26:50,829 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=245629.33333333334, ans=0.2 +2024-07-29 05:26:53,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=245629.33333333334, ans=0.0 +2024-07-29 05:26:55,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245642.66666666666, ans=0.1 +2024-07-29 05:26:58,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=245642.66666666666, ans=0.125 +2024-07-29 05:27:10,909 INFO [train.py:1114] (0/4) Epoch 19, batch 250, loss[loss=0.1726, simple_loss=0.2658, pruned_loss=0.03975, over 4633.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2632, pruned_loss=0.04, over 670199.22 frames. ], batch size: 16, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:27:11,117 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:27:16,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=245669.33333333334, ans=0.025 +2024-07-29 05:27:29,749 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.07 vs. limit=15.0 +2024-07-29 05:27:29,985 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=245696.0, ans=0.0 +2024-07-29 05:27:34,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=245709.33333333334, ans=0.125 +2024-07-29 05:27:35,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=245709.33333333334, ans=0.0 +2024-07-29 05:27:36,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=245709.33333333334, ans=0.0 +2024-07-29 05:27:39,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=245722.66666666666, ans=0.125 +2024-07-29 05:27:44,366 INFO [train.py:1114] (0/4) Epoch 19, batch 300, loss[loss=0.1784, simple_loss=0.2657, pruned_loss=0.04557, over 4800.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2629, pruned_loss=0.04002, over 729761.70 frames. ], batch size: 15, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:27:44,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=245736.0, ans=0.125 +2024-07-29 05:27:51,012 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.436e+01 5.537e+01 6.057e+01 6.917e+01 1.022e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 05:28:01,399 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245762.66666666666, ans=0.1 +2024-07-29 05:28:08,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=245776.0, ans=0.0 +2024-07-29 05:28:08,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=245776.0, ans=0.2 +2024-07-29 05:28:09,365 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=245776.0, ans=0.0 +2024-07-29 05:28:17,814 INFO [train.py:1114] (0/4) Epoch 19, batch 350, loss[loss=0.1548, simple_loss=0.2395, pruned_loss=0.03511, over 4923.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2623, pruned_loss=0.03982, over 775642.18 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:28:17,962 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=245802.66666666666, ans=0.07 +2024-07-29 05:28:36,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=245829.33333333334, ans=0.125 +2024-07-29 05:28:41,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=245842.66666666666, ans=0.025 +2024-07-29 05:28:42,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=245842.66666666666, ans=0.125 +2024-07-29 05:28:46,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=245856.0, ans=0.125 +2024-07-29 05:28:51,560 INFO [train.py:1114] (0/4) Epoch 19, batch 400, loss[loss=0.2121, simple_loss=0.3022, pruned_loss=0.06102, over 4704.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2621, pruned_loss=0.03969, over 813263.94 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:28:52,944 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=245869.33333333334, ans=0.125 +2024-07-29 05:28:58,551 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.537e+01 5.341e+01 5.802e+01 6.594e+01 8.688e+01, threshold=1.160e+02, percent-clipped=0.0 +2024-07-29 05:29:10,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=245896.0, ans=0.125 +2024-07-29 05:29:10,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=245896.0, ans=0.125 +2024-07-29 05:29:14,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=245909.33333333334, ans=0.125 +2024-07-29 05:29:24,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=245922.66666666666, ans=0.125 +2024-07-29 05:29:26,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=245922.66666666666, ans=0.09899494936611666 +2024-07-29 05:29:27,352 INFO [train.py:1114] (0/4) Epoch 19, batch 450, loss[loss=0.1774, simple_loss=0.2693, pruned_loss=0.04275, over 4631.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2629, pruned_loss=0.04024, over 838460.44 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:29:34,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=245949.33333333334, ans=0.125 +2024-07-29 05:29:35,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=245949.33333333334, ans=0.0 +2024-07-29 05:29:35,937 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.21 vs. limit=15.0 +2024-07-29 05:29:36,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=245949.33333333334, ans=0.025 +2024-07-29 05:29:40,465 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=245962.66666666666, ans=0.125 +2024-07-29 05:29:43,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=245962.66666666666, ans=0.0 +2024-07-29 05:29:48,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=245976.0, ans=0.0 +2024-07-29 05:29:55,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=245989.33333333334, ans=0.09899494936611666 +2024-07-29 05:30:02,705 INFO [train.py:1114] (0/4) Epoch 19, batch 500, loss[loss=0.1841, simple_loss=0.2835, pruned_loss=0.04234, over 4676.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2618, pruned_loss=0.03922, over 861031.06 frames. ], batch size: 15, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:30:03,585 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=246002.66666666666, ans=0.125 +2024-07-29 05:30:11,397 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.402e+01 5.590e+01 6.119e+01 6.735e+01 9.052e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 05:30:14,930 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=246016.0, ans=0.125 +2024-07-29 05:30:17,571 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:30:21,422 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.55 vs. limit=22.5 +2024-07-29 05:30:25,300 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.02 vs. limit=15.0 +2024-07-29 05:30:31,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=246056.0, ans=0.2 +2024-07-29 05:30:32,458 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.51 vs. limit=15.0 +2024-07-29 05:30:35,319 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.13 vs. limit=22.5 +2024-07-29 05:30:36,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=246056.0, ans=0.125 +2024-07-29 05:30:36,458 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.57 vs. limit=10.0 +2024-07-29 05:30:37,524 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=246069.33333333334, ans=0.125 +2024-07-29 05:30:37,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=246069.33333333334, ans=0.125 +2024-07-29 05:30:38,093 INFO [train.py:1114] (0/4) Epoch 19, batch 550, loss[loss=0.1875, simple_loss=0.2744, pruned_loss=0.05028, over 4601.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2635, pruned_loss=0.04014, over 877423.19 frames. ], batch size: 17, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:30:58,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=246109.33333333334, ans=0.05 +2024-07-29 05:31:00,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246109.33333333334, ans=0.1 +2024-07-29 05:31:06,998 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=246122.66666666666, ans=10.0 +2024-07-29 05:31:11,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=246136.0, ans=0.125 +2024-07-29 05:31:11,617 INFO [train.py:1114] (0/4) Epoch 19, batch 600, loss[loss=0.1999, simple_loss=0.291, pruned_loss=0.05434, over 4633.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2641, pruned_loss=0.0405, over 891960.60 frames. ], batch size: 16, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:31:13,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=246136.0, ans=0.0 +2024-07-29 05:31:18,199 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.268e+01 5.519e+01 6.137e+01 7.010e+01 1.025e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 05:31:22,963 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=246149.33333333334, ans=0.02 +2024-07-29 05:31:26,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=246162.66666666666, ans=0.125 +2024-07-29 05:31:36,925 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.00 vs. limit=15.0 +2024-07-29 05:31:41,495 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=246189.33333333334, ans=0.025 +2024-07-29 05:31:44,615 INFO [train.py:1114] (0/4) Epoch 19, batch 650, loss[loss=0.1515, simple_loss=0.2441, pruned_loss=0.02947, over 4767.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2637, pruned_loss=0.04017, over 903862.27 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:31:56,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=246216.0, ans=0.125 +2024-07-29 05:32:02,460 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.67 vs. limit=8.0 +2024-07-29 05:32:11,821 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.49 vs. limit=10.0 +2024-07-29 05:32:15,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246256.0, ans=0.1 +2024-07-29 05:32:18,693 INFO [train.py:1114] (0/4) Epoch 19, batch 700, loss[loss=0.1472, simple_loss=0.2323, pruned_loss=0.03102, over 4643.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2647, pruned_loss=0.04037, over 912090.43 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:32:21,379 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.20 vs. limit=5.0 +2024-07-29 05:32:25,377 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.359e+01 5.672e+01 6.319e+01 7.208e+01 1.301e+02, threshold=1.264e+02, percent-clipped=1.0 +2024-07-29 05:32:25,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=246282.66666666666, ans=0.05 +2024-07-29 05:32:34,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=246296.0, ans=0.125 +2024-07-29 05:32:43,555 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=246309.33333333334, ans=0.025 +2024-07-29 05:32:47,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.whiten.whitening_limit, batch_count=246322.66666666666, ans=12.0 +2024-07-29 05:32:50,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246322.66666666666, ans=0.1 +2024-07-29 05:32:54,397 INFO [train.py:1114] (0/4) Epoch 19, batch 750, loss[loss=0.1664, simple_loss=0.2601, pruned_loss=0.03631, over 4686.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2647, pruned_loss=0.04079, over 918557.96 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:32:59,610 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.36 vs. limit=15.0 +2024-07-29 05:33:01,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246349.33333333334, ans=0.1 +2024-07-29 05:33:05,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=246349.33333333334, ans=0.0 +2024-07-29 05:33:20,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=246389.33333333334, ans=0.125 +2024-07-29 05:33:22,467 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.20 vs. limit=15.0 +2024-07-29 05:33:27,239 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.10 vs. limit=15.0 +2024-07-29 05:33:28,100 INFO [train.py:1114] (0/4) Epoch 19, batch 800, loss[loss=0.1473, simple_loss=0.2408, pruned_loss=0.02688, over 4860.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2656, pruned_loss=0.04117, over 923315.16 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:33:34,594 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.732e+01 5.665e+01 6.243e+01 7.363e+01 1.175e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 05:33:39,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=246416.0, ans=0.125 +2024-07-29 05:33:40,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=246416.0, ans=0.0 +2024-07-29 05:33:42,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=246429.33333333334, ans=0.05 +2024-07-29 05:33:53,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=246442.66666666666, ans=0.125 +2024-07-29 05:33:53,954 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.92 vs. limit=15.0 +2024-07-29 05:34:01,532 INFO [train.py:1114] (0/4) Epoch 19, batch 850, loss[loss=0.19, simple_loss=0.2857, pruned_loss=0.04708, over 4662.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2648, pruned_loss=0.04118, over 927749.31 frames. ], batch size: 14, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:34:13,137 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.10 vs. limit=15.0 +2024-07-29 05:34:20,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=246496.0, ans=0.025 +2024-07-29 05:34:23,846 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.16 vs. limit=22.5 +2024-07-29 05:34:28,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=246522.66666666666, ans=22.5 +2024-07-29 05:34:32,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=246522.66666666666, ans=0.0 +2024-07-29 05:34:34,788 INFO [train.py:1114] (0/4) Epoch 19, batch 900, loss[loss=0.1636, simple_loss=0.246, pruned_loss=0.04063, over 4841.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.265, pruned_loss=0.04179, over 928612.44 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:34:38,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=246536.0, ans=0.125 +2024-07-29 05:34:38,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=246536.0, ans=0.2 +2024-07-29 05:34:38,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=246536.0, ans=0.125 +2024-07-29 05:34:39,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=246536.0, ans=0.0 +2024-07-29 05:34:41,421 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+01 5.690e+01 6.264e+01 7.142e+01 9.700e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 05:34:58,644 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.69 vs. limit=15.0 +2024-07-29 05:35:01,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=246576.0, ans=0.125 +2024-07-29 05:35:04,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=246589.33333333334, ans=0.125 +2024-07-29 05:35:07,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=246589.33333333334, ans=0.09899494936611666 +2024-07-29 05:35:10,566 INFO [train.py:1114] (0/4) Epoch 19, batch 950, loss[loss=0.1493, simple_loss=0.2369, pruned_loss=0.03087, over 4776.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2639, pruned_loss=0.04072, over 930478.43 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:35:14,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=246602.66666666666, ans=0.025 +2024-07-29 05:35:28,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=246629.33333333334, ans=0.09899494936611666 +2024-07-29 05:35:34,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=246642.66666666666, ans=0.0 +2024-07-29 05:35:39,826 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.47 vs. limit=15.0 +2024-07-29 05:35:44,959 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=246656.0, ans=0.125 +2024-07-29 05:35:48,350 INFO [train.py:1114] (0/4) Epoch 19, batch 1000, loss[loss=0.1325, simple_loss=0.2234, pruned_loss=0.02078, over 4970.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2646, pruned_loss=0.04123, over 930350.04 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:35:50,091 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.63 vs. limit=15.0 +2024-07-29 05:35:51,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246669.33333333334, ans=0.1 +2024-07-29 05:35:54,963 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.458e+01 5.789e+01 6.385e+01 7.432e+01 1.004e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 05:36:09,641 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=246709.33333333334, ans=0.125 +2024-07-29 05:36:09,675 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:36:20,800 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-29 05:36:21,550 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.38 vs. limit=15.0 +2024-07-29 05:36:21,821 INFO [train.py:1114] (0/4) Epoch 19, batch 1050, loss[loss=0.1712, simple_loss=0.2679, pruned_loss=0.03724, over 4872.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2633, pruned_loss=0.04072, over 932824.97 frames. ], batch size: 14, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:36:37,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=246762.66666666666, ans=0.125 +2024-07-29 05:36:38,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=246762.66666666666, ans=0.09899494936611666 +2024-07-29 05:36:38,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=246762.66666666666, ans=0.95 +2024-07-29 05:36:38,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.37 vs. limit=12.0 +2024-07-29 05:36:54,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=246802.66666666666, ans=0.125 +2024-07-29 05:36:55,276 INFO [train.py:1114] (0/4) Epoch 19, batch 1100, loss[loss=0.1411, simple_loss=0.2374, pruned_loss=0.02239, over 4888.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2629, pruned_loss=0.04082, over 934814.38 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:36:58,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=246802.66666666666, ans=0.0 +2024-07-29 05:37:01,947 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.394e+01 5.545e+01 5.987e+01 6.620e+01 9.087e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-29 05:37:11,434 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=246829.33333333334, ans=22.5 +2024-07-29 05:37:19,546 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.63 vs. limit=15.0 +2024-07-29 05:37:27,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246856.0, ans=0.1 +2024-07-29 05:37:28,457 INFO [train.py:1114] (0/4) Epoch 19, batch 1150, loss[loss=0.1788, simple_loss=0.2539, pruned_loss=0.05186, over 4888.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2629, pruned_loss=0.04077, over 934883.55 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:37:49,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=246909.33333333334, ans=0.125 +2024-07-29 05:37:54,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=246909.33333333334, ans=0.125 +2024-07-29 05:38:07,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=246936.0, ans=0.2 +2024-07-29 05:38:07,849 INFO [train.py:1114] (0/4) Epoch 19, batch 1200, loss[loss=0.2066, simple_loss=0.2936, pruned_loss=0.05982, over 4881.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2638, pruned_loss=0.04089, over 933771.34 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:38:14,580 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 5.835e+01 6.415e+01 7.072e+01 9.087e+01, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 05:38:18,136 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=246949.33333333334, ans=0.0 +2024-07-29 05:38:30,601 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.59 vs. limit=22.5 +2024-07-29 05:38:40,842 INFO [train.py:1114] (0/4) Epoch 19, batch 1250, loss[loss=0.2057, simple_loss=0.2979, pruned_loss=0.0568, over 4789.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2644, pruned_loss=0.04087, over 937646.86 frames. ], batch size: 15, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:38:42,269 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=247002.66666666666, ans=0.0 +2024-07-29 05:38:46,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=247002.66666666666, ans=0.07 +2024-07-29 05:38:48,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=247016.0, ans=0.0 +2024-07-29 05:39:10,490 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.94 vs. limit=15.0 +2024-07-29 05:39:14,108 INFO [train.py:1114] (0/4) Epoch 19, batch 1300, loss[loss=0.1875, simple_loss=0.2761, pruned_loss=0.04949, over 4692.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.264, pruned_loss=0.0409, over 939091.42 frames. ], batch size: 19, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:39:14,191 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:39:14,832 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=247069.33333333334, ans=0.125 +2024-07-29 05:39:20,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=247069.33333333334, ans=0.0 +2024-07-29 05:39:23,253 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.76 vs. limit=15.0 +2024-07-29 05:39:24,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247069.33333333334, ans=0.1 +2024-07-29 05:39:25,990 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.840e+01 5.578e+01 5.975e+01 6.963e+01 1.137e+02, threshold=1.195e+02, percent-clipped=0.0 +2024-07-29 05:39:40,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=247096.0, ans=0.0 +2024-07-29 05:39:44,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=247109.33333333334, ans=0.2 +2024-07-29 05:39:45,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=247109.33333333334, ans=0.125 +2024-07-29 05:39:50,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=247122.66666666666, ans=0.0 +2024-07-29 05:39:50,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=247122.66666666666, ans=0.125 +2024-07-29 05:39:51,934 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.63 vs. limit=22.5 +2024-07-29 05:39:56,793 INFO [train.py:1114] (0/4) Epoch 19, batch 1350, loss[loss=0.1694, simple_loss=0.2631, pruned_loss=0.03778, over 4749.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2636, pruned_loss=0.0408, over 940774.72 frames. ], batch size: 13, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:39:57,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=247136.0, ans=0.1 +2024-07-29 05:40:03,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=247149.33333333334, ans=0.2 +2024-07-29 05:40:10,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247162.66666666666, ans=0.1 +2024-07-29 05:40:19,430 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=247176.0, ans=0.125 +2024-07-29 05:40:25,821 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.91 vs. limit=15.0 +2024-07-29 05:40:32,020 INFO [train.py:1114] (0/4) Epoch 19, batch 1400, loss[loss=0.1374, simple_loss=0.2277, pruned_loss=0.02355, over 4707.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2636, pruned_loss=0.04066, over 942616.09 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:40:38,798 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.545e+01 5.620e+01 6.318e+01 7.023e+01 1.312e+02, threshold=1.264e+02, percent-clipped=1.0 +2024-07-29 05:40:42,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=247216.0, ans=0.5 +2024-07-29 05:40:47,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=247229.33333333334, ans=0.0 +2024-07-29 05:40:54,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=247242.66666666666, ans=0.025 +2024-07-29 05:41:05,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=247256.0, ans=0.125 +2024-07-29 05:41:06,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=247256.0, ans=0.125 +2024-07-29 05:41:07,817 INFO [train.py:1114] (0/4) Epoch 19, batch 1450, loss[loss=0.1932, simple_loss=0.289, pruned_loss=0.04866, over 4674.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2637, pruned_loss=0.04078, over 942572.75 frames. ], batch size: 15, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:41:08,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=247269.33333333334, ans=0.125 +2024-07-29 05:41:10,172 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.02 vs. limit=22.5 +2024-07-29 05:41:19,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247282.66666666666, ans=0.1 +2024-07-29 05:41:21,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=247282.66666666666, ans=0.0 +2024-07-29 05:41:22,021 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.81 vs. limit=22.5 +2024-07-29 05:41:23,225 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247296.0, ans=0.1 +2024-07-29 05:41:26,160 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.03 vs. limit=15.0 +2024-07-29 05:41:26,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=247296.0, ans=0.1 +2024-07-29 05:41:35,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=247322.66666666666, ans=0.125 +2024-07-29 05:41:42,895 INFO [train.py:1114] (0/4) Epoch 19, batch 1500, loss[loss=0.2061, simple_loss=0.2924, pruned_loss=0.05994, over 4812.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2636, pruned_loss=0.04076, over 942654.92 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:41:45,403 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.39 vs. limit=12.0 +2024-07-29 05:41:49,694 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.122e+01 5.560e+01 6.078e+01 6.890e+01 1.039e+02, threshold=1.216e+02, percent-clipped=0.0 +2024-07-29 05:41:54,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=247349.33333333334, ans=0.125 +2024-07-29 05:42:07,210 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.82 vs. limit=15.0 +2024-07-29 05:42:16,781 INFO [train.py:1114] (0/4) Epoch 19, batch 1550, loss[loss=0.1855, simple_loss=0.2864, pruned_loss=0.04227, over 4896.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2636, pruned_loss=0.04095, over 938955.52 frames. ], batch size: 15, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:42:16,868 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=247402.66666666666, ans=0.125 +2024-07-29 05:42:24,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=247416.0, ans=0.125 +2024-07-29 05:42:42,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=247442.66666666666, ans=0.125 +2024-07-29 05:42:43,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=247456.0, ans=0.0 +2024-07-29 05:42:47,631 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=247456.0, ans=0.0 +2024-07-29 05:42:50,241 INFO [train.py:1114] (0/4) Epoch 19, batch 1600, loss[loss=0.184, simple_loss=0.2687, pruned_loss=0.04964, over 4876.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2635, pruned_loss=0.0413, over 937406.95 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:42:51,855 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=247469.33333333334, ans=0.0 +2024-07-29 05:42:56,506 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.01 vs. limit=15.0 +2024-07-29 05:42:57,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=247482.66666666666, ans=0.0 +2024-07-29 05:42:58,143 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.792e+01 5.562e+01 6.323e+01 7.561e+01 1.065e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 05:43:00,332 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=247482.66666666666, ans=0.0 +2024-07-29 05:43:10,145 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.48 vs. limit=15.0 +2024-07-29 05:43:18,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247522.66666666666, ans=0.1 +2024-07-29 05:43:23,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=247522.66666666666, ans=0.0 +2024-07-29 05:43:23,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=247522.66666666666, ans=0.0 +2024-07-29 05:43:24,352 INFO [train.py:1114] (0/4) Epoch 19, batch 1650, loss[loss=0.1795, simple_loss=0.2744, pruned_loss=0.04226, over 4673.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2643, pruned_loss=0.04175, over 937857.43 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:43:34,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247549.33333333334, ans=0.1 +2024-07-29 05:43:50,711 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247576.0, ans=0.1 +2024-07-29 05:43:59,841 INFO [train.py:1114] (0/4) Epoch 19, batch 1700, loss[loss=0.1477, simple_loss=0.2408, pruned_loss=0.02734, over 4700.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2634, pruned_loss=0.04066, over 939310.80 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:44:08,233 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.612e+01 6.497e+01 7.246e+01 1.413e+02, threshold=1.299e+02, percent-clipped=1.0 +2024-07-29 05:44:08,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=247616.0, ans=0.125 +2024-07-29 05:44:13,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=247616.0, ans=0.025 +2024-07-29 05:44:33,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=247656.0, ans=0.125 +2024-07-29 05:44:34,155 INFO [train.py:1114] (0/4) Epoch 19, batch 1750, loss[loss=0.1502, simple_loss=0.2316, pruned_loss=0.03434, over 4793.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2632, pruned_loss=0.04046, over 940367.51 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:44:36,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=247669.33333333334, ans=0.125 +2024-07-29 05:44:39,771 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.36 vs. limit=10.0 +2024-07-29 05:44:43,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=247682.66666666666, ans=0.125 +2024-07-29 05:44:47,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=247696.0, ans=0.125 +2024-07-29 05:44:48,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=247696.0, ans=0.0 +2024-07-29 05:44:49,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=247696.0, ans=0.125 +2024-07-29 05:44:58,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247709.33333333334, ans=0.1 +2024-07-29 05:44:59,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=247709.33333333334, ans=0.125 +2024-07-29 05:45:10,892 INFO [train.py:1114] (0/4) Epoch 19, batch 1800, loss[loss=0.1859, simple_loss=0.283, pruned_loss=0.04439, over 4632.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2635, pruned_loss=0.04063, over 940802.26 frames. ], batch size: 13, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:45:11,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=247736.0, ans=0.2 +2024-07-29 05:45:15,770 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=247736.0, ans=0.125 +2024-07-29 05:45:18,180 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.396e+01 5.835e+01 6.491e+01 8.060e+01 1.072e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-29 05:45:18,763 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.45 vs. limit=12.0 +2024-07-29 05:45:45,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=247776.0, ans=0.0 +2024-07-29 05:45:46,077 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=247776.0, ans=0.125 +2024-07-29 05:45:56,313 INFO [train.py:1114] (0/4) Epoch 19, batch 1850, loss[loss=0.1808, simple_loss=0.2744, pruned_loss=0.04361, over 4809.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.263, pruned_loss=0.04045, over 940865.42 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:46:14,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247829.33333333334, ans=0.1 +2024-07-29 05:46:33,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=247856.0, ans=0.2 +2024-07-29 05:46:34,849 INFO [train.py:1114] (0/4) Epoch 19, batch 1900, loss[loss=0.1721, simple_loss=0.2689, pruned_loss=0.03763, over 4661.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2635, pruned_loss=0.04041, over 941829.66 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:46:36,709 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.41 vs. limit=12.0 +2024-07-29 05:46:43,055 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.675e+01 6.450e+01 7.490e+01 1.080e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 05:46:51,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247896.0, ans=0.1 +2024-07-29 05:47:07,598 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.80 vs. limit=15.0 +2024-07-29 05:47:11,512 INFO [train.py:1114] (0/4) Epoch 19, batch 1950, loss[loss=0.1803, simple_loss=0.2648, pruned_loss=0.04791, over 4913.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2648, pruned_loss=0.04053, over 943641.76 frames. ], batch size: 13, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:47:24,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=247962.66666666666, ans=0.025 +2024-07-29 05:47:43,744 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:47:45,642 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:47:46,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=247976.0, ans=0.125 +2024-07-29 05:47:57,281 INFO [train.py:1114] (0/4) Epoch 19, batch 2000, loss[loss=0.1474, simple_loss=0.225, pruned_loss=0.03494, over 4818.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2649, pruned_loss=0.04065, over 940976.65 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:48:04,713 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.516e+01 5.566e+01 6.044e+01 6.728e+01 1.044e+02, threshold=1.209e+02, percent-clipped=0.0 +2024-07-29 05:48:04,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=248016.0, ans=0.125 +2024-07-29 05:48:08,844 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:48:19,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=248042.66666666666, ans=0.125 +2024-07-29 05:48:29,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=248056.0, ans=0.2 +2024-07-29 05:48:29,927 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248056.0, ans=0.1 +2024-07-29 05:48:31,076 INFO [train.py:1114] (0/4) Epoch 19, batch 2050, loss[loss=0.1741, simple_loss=0.2583, pruned_loss=0.04498, over 4615.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2644, pruned_loss=0.04085, over 938820.37 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:48:34,085 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.74 vs. limit=15.0 +2024-07-29 05:48:36,976 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.97 vs. limit=15.0 +2024-07-29 05:48:41,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=248082.66666666666, ans=0.125 +2024-07-29 05:48:44,686 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=248096.0, ans=0.125 +2024-07-29 05:48:50,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=248096.0, ans=0.125 +2024-07-29 05:48:50,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=248096.0, ans=0.125 +2024-07-29 05:48:53,831 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:48:57,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=248109.33333333334, ans=0.0 +2024-07-29 05:48:59,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=248122.66666666666, ans=0.05 +2024-07-29 05:49:01,407 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.70 vs. limit=15.0 +2024-07-29 05:49:07,041 INFO [train.py:1114] (0/4) Epoch 19, batch 2100, loss[loss=0.1666, simple_loss=0.2509, pruned_loss=0.04114, over 4758.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2636, pruned_loss=0.04051, over 940714.03 frames. ], batch size: 13, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:49:07,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248136.0, ans=0.1 +2024-07-29 05:49:14,322 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.765e+01 5.813e+01 6.323e+01 7.221e+01 1.090e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 05:49:14,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=248149.33333333334, ans=0.125 +2024-07-29 05:49:16,161 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.24 vs. limit=15.0 +2024-07-29 05:49:27,354 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.39 vs. limit=22.5 +2024-07-29 05:49:33,013 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=248189.33333333334, ans=0.0 +2024-07-29 05:49:35,789 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.91 vs. limit=10.0 +2024-07-29 05:49:40,152 INFO [train.py:1114] (0/4) Epoch 19, batch 2150, loss[loss=0.1611, simple_loss=0.2487, pruned_loss=0.03679, over 4904.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2634, pruned_loss=0.04018, over 943943.29 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:49:46,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248202.66666666666, ans=0.1 +2024-07-29 05:49:47,141 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.90 vs. limit=15.0 +2024-07-29 05:49:48,912 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=248216.0, ans=0.125 +2024-07-29 05:49:51,486 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=248216.0, ans=0.125 +2024-07-29 05:49:54,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=248216.0, ans=0.125 +2024-07-29 05:49:55,915 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.94 vs. limit=10.0 +2024-07-29 05:50:06,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=248242.66666666666, ans=0.125 +2024-07-29 05:50:07,648 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=248242.66666666666, ans=0.0 +2024-07-29 05:50:14,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=248256.0, ans=0.125 +2024-07-29 05:50:18,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=248256.0, ans=0.0 +2024-07-29 05:50:19,710 INFO [train.py:1114] (0/4) Epoch 19, batch 2200, loss[loss=0.1898, simple_loss=0.2908, pruned_loss=0.04439, over 4809.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2629, pruned_loss=0.04016, over 942571.95 frames. ], batch size: 14, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:50:27,104 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 5.574e+01 6.118e+01 6.873e+01 9.817e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 05:50:29,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=248282.66666666666, ans=0.125 +2024-07-29 05:51:15,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=248309.33333333334, ans=0.0 +2024-07-29 05:51:25,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248336.0, ans=0.1 +2024-07-29 05:51:26,222 INFO [train.py:1114] (0/4) Epoch 19, batch 2250, loss[loss=0.1508, simple_loss=0.2502, pruned_loss=0.02576, over 4698.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2631, pruned_loss=0.04049, over 941655.74 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:51:26,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=248336.0, ans=0.125 +2024-07-29 05:51:39,710 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.96 vs. limit=12.0 +2024-07-29 05:51:40,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=248362.66666666666, ans=0.125 +2024-07-29 05:51:45,394 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:51:49,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=248376.0, ans=0.2 +2024-07-29 05:51:54,935 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.72 vs. limit=15.0 +2024-07-29 05:52:00,498 INFO [train.py:1114] (0/4) Epoch 19, batch 2300, loss[loss=0.17, simple_loss=0.2487, pruned_loss=0.04564, over 4948.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2631, pruned_loss=0.04072, over 939829.42 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:52:01,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=248402.66666666666, ans=0.04949747468305833 +2024-07-29 05:52:04,023 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248402.66666666666, ans=0.1 +2024-07-29 05:52:04,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=248402.66666666666, ans=0.125 +2024-07-29 05:52:06,432 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.65 vs. limit=15.0 +2024-07-29 05:52:09,134 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.821e+01 6.321e+01 7.286e+01 1.025e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-29 05:52:21,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=248442.66666666666, ans=0.2 +2024-07-29 05:52:22,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=248442.66666666666, ans=0.2 +2024-07-29 05:52:22,659 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.31 vs. limit=15.0 +2024-07-29 05:52:27,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=248442.66666666666, ans=0.125 +2024-07-29 05:52:35,453 INFO [train.py:1114] (0/4) Epoch 19, batch 2350, loss[loss=0.1632, simple_loss=0.2568, pruned_loss=0.03484, over 4641.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2631, pruned_loss=0.04079, over 941544.36 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:52:59,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=248509.33333333334, ans=0.125 +2024-07-29 05:53:00,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248509.33333333334, ans=0.1 +2024-07-29 05:53:04,686 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.99 vs. limit=10.0 +2024-07-29 05:53:11,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=248522.66666666666, ans=0.125 +2024-07-29 05:53:12,724 INFO [train.py:1114] (0/4) Epoch 19, batch 2400, loss[loss=0.1731, simple_loss=0.2642, pruned_loss=0.04104, over 4638.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2632, pruned_loss=0.04103, over 941285.71 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:53:15,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=248536.0, ans=0.125 +2024-07-29 05:53:15,673 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=248536.0, ans=0.125 +2024-07-29 05:53:18,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=248536.0, ans=0.125 +2024-07-29 05:53:20,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248549.33333333334, ans=0.1 +2024-07-29 05:53:21,909 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.538e+01 5.999e+01 6.676e+01 9.357e+01, threshold=1.200e+02, percent-clipped=0.0 +2024-07-29 05:53:28,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=248562.66666666666, ans=0.0 +2024-07-29 05:53:33,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=248562.66666666666, ans=0.025 +2024-07-29 05:53:48,253 INFO [train.py:1114] (0/4) Epoch 19, batch 2450, loss[loss=0.1652, simple_loss=0.2583, pruned_loss=0.03603, over 4701.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2642, pruned_loss=0.04146, over 936753.80 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:53:50,455 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=248602.66666666666, ans=0.0 +2024-07-29 05:53:51,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=248602.66666666666, ans=0.125 +2024-07-29 05:54:13,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.52 vs. limit=15.0 +2024-07-29 05:54:16,164 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=248656.0, ans=0.025 +2024-07-29 05:54:21,385 INFO [train.py:1114] (0/4) Epoch 19, batch 2500, loss[loss=0.1653, simple_loss=0.2688, pruned_loss=0.0309, over 4808.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2641, pruned_loss=0.0415, over 938912.09 frames. ], batch size: 14, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:54:28,633 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.354e+01 5.769e+01 6.395e+01 7.394e+01 1.044e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-29 05:54:30,765 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:54:37,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248696.0, ans=0.1 +2024-07-29 05:54:45,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=248709.33333333334, ans=0.125 +2024-07-29 05:54:46,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=248709.33333333334, ans=0.125 +2024-07-29 05:54:50,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=248722.66666666666, ans=0.1 +2024-07-29 05:54:54,759 INFO [train.py:1114] (0/4) Epoch 19, batch 2550, loss[loss=0.165, simple_loss=0.2541, pruned_loss=0.03802, over 4826.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2639, pruned_loss=0.04117, over 938418.27 frames. ], batch size: 11, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:54:55,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=248736.0, ans=0.07 +2024-07-29 05:55:20,748 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248776.0, ans=0.1 +2024-07-29 05:55:23,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=248789.33333333334, ans=0.025 +2024-07-29 05:55:25,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=248789.33333333334, ans=0.0 +2024-07-29 05:55:25,907 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=248789.33333333334, ans=0.125 +2024-07-29 05:55:28,125 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=248802.66666666666, ans=0.125 +2024-07-29 05:55:28,619 INFO [train.py:1114] (0/4) Epoch 19, batch 2600, loss[loss=0.1775, simple_loss=0.2717, pruned_loss=0.04164, over 4898.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2638, pruned_loss=0.0407, over 937736.97 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:55:35,908 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.520e+01 6.096e+01 6.841e+01 9.069e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 05:55:48,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=248829.33333333334, ans=0.125 +2024-07-29 05:55:49,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=248842.66666666666, ans=0.125 +2024-07-29 05:56:00,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=248856.0, ans=0.025 +2024-07-29 05:56:01,984 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.30 vs. limit=22.5 +2024-07-29 05:56:03,689 INFO [train.py:1114] (0/4) Epoch 19, batch 2650, loss[loss=0.1923, simple_loss=0.2811, pruned_loss=0.05176, over 4663.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2648, pruned_loss=0.04091, over 939695.37 frames. ], batch size: 16, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:56:14,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248882.66666666666, ans=0.1 +2024-07-29 05:56:16,124 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.54 vs. limit=15.0 +2024-07-29 05:56:39,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248922.66666666666, ans=0.1 +2024-07-29 05:56:40,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=248922.66666666666, ans=0.125 +2024-07-29 05:56:42,258 INFO [train.py:1114] (0/4) Epoch 19, batch 2700, loss[loss=0.1835, simple_loss=0.2658, pruned_loss=0.05059, over 4741.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2648, pruned_loss=0.04127, over 939940.40 frames. ], batch size: 14, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:57:05,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=248936.0, ans=0.0 +2024-07-29 05:57:09,486 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.838e+01 6.361e+01 7.244e+01 1.025e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-29 05:57:09,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=248949.33333333334, ans=0.125 +2024-07-29 05:57:13,940 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.24 vs. limit=15.0 +2024-07-29 05:57:14,408 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=248949.33333333334, ans=0.125 +2024-07-29 05:57:26,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=248976.0, ans=0.125 +2024-07-29 05:57:27,880 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=248976.0, ans=0.125 +2024-07-29 05:57:35,859 INFO [train.py:1114] (0/4) Epoch 19, batch 2750, loss[loss=0.1781, simple_loss=0.2544, pruned_loss=0.05083, over 4714.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2635, pruned_loss=0.04114, over 939525.81 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 16.0 +2024-07-29 05:57:58,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=249042.66666666666, ans=0.95 +2024-07-29 05:57:59,538 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-29 05:58:01,323 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:58:09,570 INFO [train.py:1114] (0/4) Epoch 19, batch 2800, loss[loss=0.2165, simple_loss=0.2917, pruned_loss=0.0706, over 3368.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2637, pruned_loss=0.04109, over 937278.92 frames. ], batch size: 36, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:58:15,075 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=249069.33333333334, ans=0.025 +2024-07-29 05:58:17,638 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.437e+01 5.861e+01 6.601e+01 8.054e+01 1.135e+02, threshold=1.320e+02, percent-clipped=0.0 +2024-07-29 05:58:20,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=249082.66666666666, ans=0.125 +2024-07-29 05:58:22,071 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.01 vs. limit=15.0 +2024-07-29 05:58:22,885 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.32 vs. limit=6.0 +2024-07-29 05:58:26,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=249096.0, ans=0.1 +2024-07-29 05:58:41,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=249122.66666666666, ans=0.125 +2024-07-29 05:58:44,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=249122.66666666666, ans=0.125 +2024-07-29 05:58:45,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=249122.66666666666, ans=0.125 +2024-07-29 05:58:46,859 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:58:47,371 INFO [train.py:1114] (0/4) Epoch 19, batch 2850, loss[loss=0.1484, simple_loss=0.2363, pruned_loss=0.03031, over 4963.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2646, pruned_loss=0.04153, over 935611.63 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:58:50,597 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.78 vs. limit=10.0 +2024-07-29 05:58:51,905 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.51 vs. limit=22.5 +2024-07-29 05:58:55,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=249149.33333333334, ans=0.125 +2024-07-29 05:59:10,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=249176.0, ans=0.125 +2024-07-29 05:59:14,853 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.81 vs. limit=6.0 +2024-07-29 05:59:21,142 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:59:22,324 INFO [train.py:1114] (0/4) Epoch 19, batch 2900, loss[loss=0.1593, simple_loss=0.2519, pruned_loss=0.03335, over 4816.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2658, pruned_loss=0.04171, over 939620.95 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:59:28,902 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.34 vs. limit=12.0 +2024-07-29 05:59:30,349 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.673e+01 5.762e+01 6.380e+01 7.309e+01 1.230e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 05:59:30,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249216.0, ans=0.1 +2024-07-29 05:59:32,658 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=249216.0, ans=10.0 +2024-07-29 05:59:38,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=249229.33333333334, ans=0.0 +2024-07-29 05:59:49,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=249256.0, ans=0.125 +2024-07-29 05:59:53,576 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.38 vs. limit=22.5 +2024-07-29 05:59:53,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=249256.0, ans=0.125 +2024-07-29 05:59:55,844 INFO [train.py:1114] (0/4) Epoch 19, batch 2950, loss[loss=0.1433, simple_loss=0.2371, pruned_loss=0.02477, over 4719.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2636, pruned_loss=0.04063, over 938592.58 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 16.0 +2024-07-29 06:00:16,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=249309.33333333334, ans=0.0 +2024-07-29 06:00:19,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=249309.33333333334, ans=0.0 +2024-07-29 06:00:29,676 INFO [train.py:1114] (0/4) Epoch 19, batch 3000, loss[loss=0.177, simple_loss=0.2655, pruned_loss=0.04426, over 4757.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2627, pruned_loss=0.04034, over 938225.97 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 16.0 +2024-07-29 06:00:29,676 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 06:00:34,218 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.4952, 4.7020, 4.7250, 4.6052], device='cuda:0') +2024-07-29 06:00:36,349 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.9386, 3.4545, 3.3586, 3.7094], device='cuda:0') +2024-07-29 06:00:38,963 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.1048, 4.9004, 4.2802, 4.0768], device='cuda:0') +2024-07-29 06:00:39,929 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.2635, 5.1125, 4.5413, 4.6883], device='cuda:0') +2024-07-29 06:00:41,088 INFO [train.py:1146] (0/4) Epoch 19, validation: loss=0.161, simple_loss=0.2631, pruned_loss=0.02943, over 944034.00 frames. +2024-07-29 06:00:41,088 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 06:00:42,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=249336.0, ans=0.125 +2024-07-29 06:00:50,058 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.473e+01 5.637e+01 6.118e+01 7.161e+01 1.064e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 06:00:52,671 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-29 06:01:06,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=249376.0, ans=0.125 +2024-07-29 06:01:08,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=249389.33333333334, ans=0.025 +2024-07-29 06:01:10,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=249389.33333333334, ans=0.125 +2024-07-29 06:01:14,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=249402.66666666666, ans=0.0 +2024-07-29 06:01:15,381 INFO [train.py:1114] (0/4) Epoch 19, batch 3050, loss[loss=0.147, simple_loss=0.2474, pruned_loss=0.02337, over 4639.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2639, pruned_loss=0.04079, over 937166.43 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 16.0 +2024-07-29 06:01:22,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=249402.66666666666, ans=0.09899494936611666 +2024-07-29 06:01:28,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=249416.0, ans=0.125 +2024-07-29 06:01:32,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=249429.33333333334, ans=0.1 +2024-07-29 06:01:33,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=249429.33333333334, ans=0.2 +2024-07-29 06:01:38,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=249442.66666666666, ans=0.0 +2024-07-29 06:01:51,071 INFO [train.py:1114] (0/4) Epoch 19, batch 3100, loss[loss=0.1893, simple_loss=0.2966, pruned_loss=0.04106, over 4631.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2637, pruned_loss=0.04049, over 938125.54 frames. ], batch size: 16, lr: 3.93e-03, grad_scale: 16.0 +2024-07-29 06:01:51,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=249469.33333333334, ans=10.0 +2024-07-29 06:01:59,703 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.648e+01 5.499e+01 6.213e+01 7.046e+01 1.053e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 06:02:00,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=249482.66666666666, ans=0.0 +2024-07-29 06:02:06,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=249496.0, ans=0.5 +2024-07-29 06:02:11,183 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=15.0 +2024-07-29 06:02:16,374 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.69 vs. limit=6.0 +2024-07-29 06:02:24,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=249536.0, ans=0.125 +2024-07-29 06:02:24,695 INFO [train.py:1114] (0/4) Epoch 19, batch 3150, loss[loss=0.1658, simple_loss=0.2626, pruned_loss=0.03448, over 4615.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2636, pruned_loss=0.04049, over 938600.78 frames. ], batch size: 17, lr: 3.93e-03, grad_scale: 16.0 +2024-07-29 06:02:30,658 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.21 vs. limit=15.0 +2024-07-29 06:02:38,581 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.43 vs. limit=15.0 +2024-07-29 06:02:49,530 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=249576.0, ans=0.125 +2024-07-29 06:02:53,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=249589.33333333334, ans=0.0 +2024-07-29 06:03:01,892 INFO [train.py:1114] (0/4) Epoch 19, batch 3200, loss[loss=0.1855, simple_loss=0.2758, pruned_loss=0.0476, over 4827.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2622, pruned_loss=0.03974, over 940168.14 frames. ], batch size: 13, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:03:09,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=249616.0, ans=0.2 +2024-07-29 06:03:10,259 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.865e+01 5.924e+01 6.807e+01 8.203e+01 1.254e+02, threshold=1.361e+02, percent-clipped=1.0 +2024-07-29 06:03:35,977 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=249642.66666666666, ans=0.125 +2024-07-29 06:03:39,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=249656.0, ans=0.2 +2024-07-29 06:03:41,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249656.0, ans=0.1 +2024-07-29 06:03:48,467 INFO [train.py:1114] (0/4) Epoch 19, batch 3250, loss[loss=0.1525, simple_loss=0.2553, pruned_loss=0.02483, over 4934.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2631, pruned_loss=0.04019, over 941439.63 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:06:07,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=249696.0, ans=0.0 +2024-07-29 06:06:13,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=249709.33333333334, ans=0.125 +2024-07-29 06:07:11,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=249709.33333333334, ans=0.0 +2024-07-29 06:07:20,212 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.82 vs. limit=22.5 +2024-07-29 06:07:20,368 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.43 vs. limit=15.0 +2024-07-29 06:07:20,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=249736.0, ans=0.0 +2024-07-29 06:07:21,181 INFO [train.py:1114] (0/4) Epoch 19, batch 3300, loss[loss=0.1855, simple_loss=0.2937, pruned_loss=0.03861, over 4679.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2634, pruned_loss=0.04042, over 941465.75 frames. ], batch size: 19, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:07:28,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=249736.0, ans=0.0 +2024-07-29 06:07:38,742 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.082e+01 5.786e+01 6.492e+01 7.177e+01 1.036e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-29 06:07:39,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=249749.33333333334, ans=0.125 +2024-07-29 06:07:49,942 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.78 vs. limit=22.5 +2024-07-29 06:07:50,017 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.01 vs. limit=22.5 +2024-07-29 06:07:57,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=249789.33333333334, ans=0.09899494936611666 +2024-07-29 06:08:07,290 INFO [train.py:1114] (0/4) Epoch 19, batch 3350, loss[loss=0.191, simple_loss=0.2856, pruned_loss=0.04822, over 4624.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2641, pruned_loss=0.04088, over 938535.57 frames. ], batch size: 17, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:08:08,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=249802.66666666666, ans=0.125 +2024-07-29 06:08:25,367 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.10 vs. limit=15.0 +2024-07-29 06:08:32,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=249842.66666666666, ans=0.0 +2024-07-29 06:08:41,237 INFO [train.py:1114] (0/4) Epoch 19, batch 3400, loss[loss=0.1482, simple_loss=0.2333, pruned_loss=0.03153, over 4780.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2629, pruned_loss=0.04065, over 936748.31 frames. ], batch size: 11, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:08:49,839 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.563e+01 5.488e+01 5.998e+01 6.910e+01 1.087e+02, threshold=1.200e+02, percent-clipped=0.0 +2024-07-29 06:09:04,790 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=249909.33333333334, ans=0.125 +2024-07-29 06:09:04,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=249909.33333333334, ans=0.09899494936611666 +2024-07-29 06:09:15,442 INFO [train.py:1114] (0/4) Epoch 19, batch 3450, loss[loss=0.1635, simple_loss=0.2622, pruned_loss=0.03243, over 4699.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2636, pruned_loss=0.04096, over 937127.35 frames. ], batch size: 19, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:09:21,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=249949.33333333334, ans=0.2 +2024-07-29 06:09:30,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=249962.66666666666, ans=0.0 +2024-07-29 06:09:30,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=249962.66666666666, ans=0.2 +2024-07-29 06:09:44,869 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=249989.33333333334, ans=0.0 +2024-07-29 06:09:48,787 INFO [train.py:1114] (0/4) Epoch 19, batch 3500, loss[loss=0.1408, simple_loss=0.2316, pruned_loss=0.02499, over 4940.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2624, pruned_loss=0.04037, over 937780.84 frames. ], batch size: 12, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:09:49,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=250002.66666666666, ans=0.125 +2024-07-29 06:09:57,887 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.930e+01 5.660e+01 6.096e+01 6.757e+01 8.865e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 06:10:24,502 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.38 vs. limit=15.0 +2024-07-29 06:10:24,714 INFO [train.py:1114] (0/4) Epoch 19, batch 3550, loss[loss=0.1767, simple_loss=0.2803, pruned_loss=0.03655, over 4660.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2634, pruned_loss=0.04059, over 938478.89 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:10:25,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=250069.33333333334, ans=0.025 +2024-07-29 06:10:31,594 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=250082.66666666666, ans=10.0 +2024-07-29 06:10:41,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=250082.66666666666, ans=0.0 +2024-07-29 06:10:56,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=250096.0, ans=0.125 +2024-07-29 06:11:02,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=250109.33333333334, ans=0.0 +2024-07-29 06:11:17,202 INFO [train.py:1114] (0/4) Epoch 19, batch 3600, loss[loss=0.1681, simple_loss=0.2607, pruned_loss=0.03777, over 4966.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2631, pruned_loss=0.04065, over 940145.26 frames. ], batch size: 13, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:11:28,761 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.784e+01 5.577e+01 6.277e+01 7.321e+01 1.396e+02, threshold=1.255e+02, percent-clipped=3.0 +2024-07-29 06:11:43,601 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=250176.0, ans=0.125 +2024-07-29 06:11:43,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=250176.0, ans=0.0 +2024-07-29 06:11:51,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=250189.33333333334, ans=0.2 +2024-07-29 06:11:53,651 INFO [train.py:1114] (0/4) Epoch 19, batch 3650, loss[loss=0.1786, simple_loss=0.2762, pruned_loss=0.04055, over 4901.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2623, pruned_loss=0.04048, over 940143.14 frames. ], batch size: 15, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:12:15,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=250242.66666666666, ans=0.09899494936611666 +2024-07-29 06:12:15,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250242.66666666666, ans=0.1 +2024-07-29 06:12:19,533 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.75 vs. limit=15.0 +2024-07-29 06:12:24,101 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.87 vs. limit=15.0 +2024-07-29 06:12:27,269 INFO [train.py:1114] (0/4) Epoch 19, batch 3700, loss[loss=0.1985, simple_loss=0.2896, pruned_loss=0.05374, over 4926.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2623, pruned_loss=0.04009, over 941667.31 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:12:27,303 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=250269.33333333334, ans=0.125 +2024-07-29 06:12:35,698 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.449e+01 6.027e+01 6.709e+01 1.105e+02, threshold=1.205e+02, percent-clipped=0.0 +2024-07-29 06:12:58,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=250322.66666666666, ans=0.2 +2024-07-29 06:13:02,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=250322.66666666666, ans=0.2 +2024-07-29 06:13:05,002 INFO [train.py:1114] (0/4) Epoch 19, batch 3750, loss[loss=0.1478, simple_loss=0.23, pruned_loss=0.03282, over 4802.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2618, pruned_loss=0.04009, over 942925.10 frames. ], batch size: 11, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:13:07,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=250336.0, ans=0.09899494936611666 +2024-07-29 06:13:08,480 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=250336.0, ans=0.125 +2024-07-29 06:13:09,324 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-29 06:13:25,999 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=250376.0, ans=0.07 +2024-07-29 06:13:39,998 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.99 vs. limit=22.5 +2024-07-29 06:13:41,720 INFO [train.py:1114] (0/4) Epoch 19, batch 3800, loss[loss=0.1727, simple_loss=0.2774, pruned_loss=0.03398, over 4803.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2617, pruned_loss=0.04013, over 941267.68 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:13:44,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=250402.66666666666, ans=0.125 +2024-07-29 06:13:46,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=250402.66666666666, ans=0.5 +2024-07-29 06:13:48,780 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=250416.0, ans=0.125 +2024-07-29 06:13:50,553 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.347e+01 5.643e+01 6.466e+01 7.181e+01 9.486e+01, threshold=1.293e+02, percent-clipped=0.0 +2024-07-29 06:14:08,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=250456.0, ans=0.125 +2024-07-29 06:14:15,721 INFO [train.py:1114] (0/4) Epoch 19, batch 3850, loss[loss=0.1716, simple_loss=0.269, pruned_loss=0.0371, over 4862.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2612, pruned_loss=0.03969, over 942197.76 frames. ], batch size: 17, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:14:28,019 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.35 vs. limit=15.0 +2024-07-29 06:14:30,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=250482.66666666666, ans=0.125 +2024-07-29 06:14:34,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=250482.66666666666, ans=0.0 +2024-07-29 06:14:48,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten.whitening_limit, batch_count=250509.33333333334, ans=15.0 +2024-07-29 06:14:55,840 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.17 vs. limit=15.0 +2024-07-29 06:14:56,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=250522.66666666666, ans=0.125 +2024-07-29 06:14:58,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=250522.66666666666, ans=0.2 +2024-07-29 06:15:00,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=250522.66666666666, ans=0.125 +2024-07-29 06:15:01,945 INFO [train.py:1114] (0/4) Epoch 19, batch 3900, loss[loss=0.1737, simple_loss=0.264, pruned_loss=0.04174, over 4810.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2614, pruned_loss=0.03989, over 943025.87 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:15:04,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=250536.0, ans=0.2 +2024-07-29 06:15:08,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=250549.33333333334, ans=0.125 +2024-07-29 06:15:08,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=250549.33333333334, ans=0.125 +2024-07-29 06:15:10,489 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.812e+01 5.443e+01 5.935e+01 6.800e+01 9.417e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-29 06:15:12,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=250549.33333333334, ans=0.0 +2024-07-29 06:15:15,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=250562.66666666666, ans=0.5 +2024-07-29 06:15:18,950 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.30 vs. limit=8.0 +2024-07-29 06:15:19,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=250562.66666666666, ans=0.0 +2024-07-29 06:15:37,622 INFO [train.py:1114] (0/4) Epoch 19, batch 3950, loss[loss=0.1851, simple_loss=0.2789, pruned_loss=0.0457, over 4829.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2615, pruned_loss=0.03993, over 944822.89 frames. ], batch size: 16, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:15:38,050 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.99 vs. limit=10.0 +2024-07-29 06:15:43,321 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.83 vs. limit=22.5 +2024-07-29 06:15:44,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=250616.0, ans=0.0 +2024-07-29 06:15:45,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=250616.0, ans=0.125 +2024-07-29 06:15:46,240 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=250616.0, ans=0.0 +2024-07-29 06:15:47,027 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=250616.0, ans=0.125 +2024-07-29 06:16:09,561 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-188000.pt +2024-07-29 06:16:13,301 INFO [train.py:1114] (0/4) Epoch 19, batch 4000, loss[loss=0.1296, simple_loss=0.215, pruned_loss=0.02215, over 4776.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2616, pruned_loss=0.03996, over 941475.68 frames. ], batch size: 12, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:16:21,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=250669.33333333334, ans=0.125 +2024-07-29 06:16:24,309 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.630e+01 6.259e+01 7.111e+01 1.064e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 06:16:40,503 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:16:49,234 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.52 vs. limit=15.0 +2024-07-29 06:16:49,519 INFO [train.py:1114] (0/4) Epoch 19, batch 4050, loss[loss=0.1812, simple_loss=0.274, pruned_loss=0.04422, over 3291.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2611, pruned_loss=0.04017, over 939792.39 frames. ], batch size: 35, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:16:58,858 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.43 vs. limit=22.5 +2024-07-29 06:17:00,292 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.52 vs. limit=15.0 +2024-07-29 06:17:05,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=250762.66666666666, ans=0.125 +2024-07-29 06:17:07,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=250762.66666666666, ans=0.125 +2024-07-29 06:17:10,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250776.0, ans=0.1 +2024-07-29 06:17:23,793 INFO [train.py:1114] (0/4) Epoch 19, batch 4100, loss[loss=0.2002, simple_loss=0.3023, pruned_loss=0.049, over 4893.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2626, pruned_loss=0.04047, over 938888.26 frames. ], batch size: 15, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:17:24,916 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.46 vs. limit=10.0 +2024-07-29 06:17:32,505 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 5.808e+01 6.562e+01 7.760e+01 1.349e+02, threshold=1.312e+02, percent-clipped=1.0 +2024-07-29 06:17:35,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=250816.0, ans=0.125 +2024-07-29 06:17:37,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250829.33333333334, ans=0.1 +2024-07-29 06:17:55,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=250842.66666666666, ans=0.04949747468305833 +2024-07-29 06:17:59,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=250856.0, ans=0.1 +2024-07-29 06:18:04,516 INFO [train.py:1114] (0/4) Epoch 19, batch 4150, loss[loss=0.2047, simple_loss=0.2993, pruned_loss=0.05506, over 4828.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2615, pruned_loss=0.04022, over 938298.25 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:18:22,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=250882.66666666666, ans=0.125 +2024-07-29 06:19:22,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=250909.33333333334, ans=0.125 +2024-07-29 06:19:23,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=250909.33333333334, ans=0.2 +2024-07-29 06:19:23,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=250909.33333333334, ans=0.0 +2024-07-29 06:19:24,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250909.33333333334, ans=0.1 +2024-07-29 06:19:25,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=250909.33333333334, ans=15.0 +2024-07-29 06:19:58,732 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.37 vs. limit=22.5 +2024-07-29 06:19:59,621 INFO [train.py:1114] (0/4) Epoch 19, batch 4200, loss[loss=0.1544, simple_loss=0.2599, pruned_loss=0.02445, over 4905.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2624, pruned_loss=0.04025, over 939583.54 frames. ], batch size: 15, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:20:01,641 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:20:02,450 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250936.0, ans=0.1 +2024-07-29 06:20:17,255 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.502e+01 5.500e+01 5.908e+01 6.556e+01 1.150e+02, threshold=1.182e+02, percent-clipped=0.0 +2024-07-29 06:20:18,446 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.13 vs. limit=15.0 +2024-07-29 06:20:18,652 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=250949.33333333334, ans=0.025 +2024-07-29 06:20:18,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=250949.33333333334, ans=0.125 +2024-07-29 06:20:31,045 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=250976.0, ans=0.0 +2024-07-29 06:20:46,974 INFO [train.py:1114] (0/4) Epoch 19, batch 4250, loss[loss=0.1554, simple_loss=0.2433, pruned_loss=0.03374, over 4640.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2624, pruned_loss=0.04, over 940346.22 frames. ], batch size: 12, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:20:47,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=251002.66666666666, ans=0.1 +2024-07-29 06:20:52,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=251002.66666666666, ans=0.0 +2024-07-29 06:20:54,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=251002.66666666666, ans=0.02 +2024-07-29 06:22:52,546 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.25 vs. limit=15.0 +2024-07-29 06:22:57,467 INFO [train.py:1114] (0/4) Epoch 19, batch 4300, loss[loss=0.1661, simple_loss=0.2502, pruned_loss=0.04102, over 4761.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2629, pruned_loss=0.04009, over 938979.80 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:23:01,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=251069.33333333334, ans=0.125 +2024-07-29 06:23:47,226 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.890e+01 5.705e+01 6.376e+01 7.099e+01 1.039e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-29 06:25:23,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=251096.0, ans=0.0 +2024-07-29 06:25:40,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=251109.33333333334, ans=0.0 +2024-07-29 06:25:41,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251109.33333333334, ans=0.1 +2024-07-29 06:26:43,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=251122.66666666666, ans=0.125 +2024-07-29 06:26:51,245 INFO [train.py:1114] (0/4) Epoch 19, batch 4350, loss[loss=0.1424, simple_loss=0.2382, pruned_loss=0.02328, over 4767.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2621, pruned_loss=0.03969, over 940122.38 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:27:42,615 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.18 vs. limit=15.0 +2024-07-29 06:28:23,138 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=251162.66666666666, ans=0.025 +2024-07-29 06:28:26,446 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=251162.66666666666, ans=0.0 +2024-07-29 06:28:27,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=251162.66666666666, ans=0.0 +2024-07-29 06:28:29,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=251176.0, ans=0.125 +2024-07-29 06:28:38,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=251189.33333333334, ans=0.125 +2024-07-29 06:28:43,570 INFO [train.py:1114] (0/4) Epoch 19, batch 4400, loss[loss=0.1823, simple_loss=0.2792, pruned_loss=0.04276, over 4811.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.262, pruned_loss=0.03953, over 939882.08 frames. ], batch size: 14, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:28:47,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=251202.66666666666, ans=0.125 +2024-07-29 06:28:48,416 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=251202.66666666666, ans=0.1 +2024-07-29 06:28:52,352 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.682e+01 6.192e+01 7.414e+01 9.950e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 06:28:57,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=251229.33333333334, ans=0.125 +2024-07-29 06:29:13,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251242.66666666666, ans=0.1 +2024-07-29 06:29:33,243 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.93 vs. limit=15.0 +2024-07-29 06:29:37,349 INFO [train.py:1114] (0/4) Epoch 19, batch 4450, loss[loss=0.1814, simple_loss=0.2613, pruned_loss=0.05074, over 4931.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2628, pruned_loss=0.04023, over 937848.55 frames. ], batch size: 12, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:29:53,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=251296.0, ans=0.5 +2024-07-29 06:30:09,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=251322.66666666666, ans=0.125 +2024-07-29 06:30:13,622 INFO [train.py:1114] (0/4) Epoch 19, batch 4500, loss[loss=0.1801, simple_loss=0.2656, pruned_loss=0.04727, over 4742.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2636, pruned_loss=0.04047, over 937313.89 frames. ], batch size: 14, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:30:14,012 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.15 vs. limit=15.0 +2024-07-29 06:31:20,190 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.458e+01 5.536e+01 6.082e+01 6.951e+01 9.632e+01, threshold=1.216e+02, percent-clipped=0.0 +2024-07-29 06:31:22,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251349.33333333334, ans=0.1 +2024-07-29 06:31:23,510 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=251349.33333333334, ans=0.125 +2024-07-29 06:31:33,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=251362.66666666666, ans=0.0 +2024-07-29 06:32:18,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=251389.33333333334, ans=0.02 +2024-07-29 06:32:20,194 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=251402.66666666666, ans=0.0 +2024-07-29 06:32:20,711 INFO [train.py:1114] (0/4) Epoch 19, batch 4550, loss[loss=0.1602, simple_loss=0.244, pruned_loss=0.03823, over 4900.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2635, pruned_loss=0.04035, over 939347.73 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:32:26,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=251402.66666666666, ans=0.0 +2024-07-29 06:32:27,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251416.0, ans=0.1 +2024-07-29 06:32:28,595 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.23 vs. limit=15.0 +2024-07-29 06:32:32,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251416.0, ans=0.1 +2024-07-29 06:32:47,637 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=251442.66666666666, ans=0.0 +2024-07-29 06:32:48,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=251442.66666666666, ans=0.125 +2024-07-29 06:32:48,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=251442.66666666666, ans=0.0 +2024-07-29 06:32:53,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=251456.0, ans=0.1 +2024-07-29 06:32:58,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251456.0, ans=0.1 +2024-07-29 06:33:01,144 INFO [train.py:1114] (0/4) Epoch 19, batch 4600, loss[loss=0.1789, simple_loss=0.2786, pruned_loss=0.03964, over 4509.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2636, pruned_loss=0.04049, over 937748.53 frames. ], batch size: 21, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:33:11,383 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=251482.66666666666, ans=0.125 +2024-07-29 06:33:12,679 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.648e+01 5.756e+01 6.471e+01 7.460e+01 1.091e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-29 06:33:42,469 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.08 vs. limit=15.0 +2024-07-29 06:33:42,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=251509.33333333334, ans=0.125 +2024-07-29 06:33:57,788 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.55 vs. limit=15.0 +2024-07-29 06:34:00,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=251522.66666666666, ans=0.0 +2024-07-29 06:34:16,610 INFO [train.py:1114] (0/4) Epoch 19, batch 4650, loss[loss=0.1911, simple_loss=0.2942, pruned_loss=0.04399, over 4818.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2649, pruned_loss=0.04074, over 939584.69 frames. ], batch size: 16, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:34:49,547 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.75 vs. limit=15.0 +2024-07-29 06:35:13,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=251562.66666666666, ans=0.125 +2024-07-29 06:35:32,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=251576.0, ans=0.125 +2024-07-29 06:35:57,612 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=251589.33333333334, ans=0.2 +2024-07-29 06:36:00,588 INFO [train.py:1114] (0/4) Epoch 19, batch 4700, loss[loss=0.1564, simple_loss=0.2508, pruned_loss=0.03102, over 4712.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2643, pruned_loss=0.04102, over 937409.95 frames. ], batch size: 11, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:36:02,363 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=251602.66666666666, ans=0.125 +2024-07-29 06:36:25,366 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=251616.0, ans=0.5 +2024-07-29 06:36:26,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=251616.0, ans=0.0 +2024-07-29 06:36:28,171 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.797e+01 5.853e+01 6.382e+01 7.357e+01 1.166e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 06:37:31,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251642.66666666666, ans=0.1 +2024-07-29 06:38:09,495 INFO [train.py:1114] (0/4) Epoch 19, batch 4750, loss[loss=0.1574, simple_loss=0.2567, pruned_loss=0.02908, over 4522.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2635, pruned_loss=0.04079, over 935953.60 frames. ], batch size: 21, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:38:23,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=251669.33333333334, ans=0.2 +2024-07-29 06:39:07,920 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=251682.66666666666, ans=0.025 +2024-07-29 06:39:45,385 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.85 vs. limit=22.5 +2024-07-29 06:40:01,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=251722.66666666666, ans=0.125 +2024-07-29 06:40:02,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=251722.66666666666, ans=0.1 +2024-07-29 06:40:04,380 INFO [train.py:1114] (0/4) Epoch 19, batch 4800, loss[loss=0.1779, simple_loss=0.2715, pruned_loss=0.04211, over 4693.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2641, pruned_loss=0.04138, over 933009.65 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:40:25,961 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=251749.33333333334, ans=0.0 +2024-07-29 06:40:33,256 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.567e+01 5.823e+01 6.588e+01 7.932e+01 1.236e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 06:40:34,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=251749.33333333334, ans=0.0 +2024-07-29 06:40:37,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=251749.33333333334, ans=0.2 +2024-07-29 06:40:49,140 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.87 vs. limit=15.0 +2024-07-29 06:41:15,295 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.96 vs. limit=6.0 +2024-07-29 06:41:19,277 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.74 vs. limit=6.0 +2024-07-29 06:41:52,686 INFO [train.py:1114] (0/4) Epoch 19, batch 4850, loss[loss=0.1484, simple_loss=0.2447, pruned_loss=0.02606, over 4746.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2647, pruned_loss=0.04137, over 932661.91 frames. ], batch size: 14, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:41:56,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=251802.66666666666, ans=0.04949747468305833 +2024-07-29 06:42:00,288 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.96 vs. limit=10.0 +2024-07-29 06:42:00,418 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.04 vs. limit=22.5 +2024-07-29 06:42:16,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=251816.0, ans=0.2 +2024-07-29 06:42:22,232 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=251829.33333333334, ans=0.0 +2024-07-29 06:42:27,109 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251842.66666666666, ans=0.1 +2024-07-29 06:42:55,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=251856.0, ans=10.0 +2024-07-29 06:43:11,736 INFO [train.py:1114] (0/4) Epoch 19, batch 4900, loss[loss=0.1682, simple_loss=0.2738, pruned_loss=0.03133, over 4759.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2646, pruned_loss=0.04104, over 934211.91 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:43:53,540 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=251882.66666666666, ans=0.125 +2024-07-29 06:43:55,402 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.611e+01 6.100e+01 6.685e+01 9.009e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 06:44:51,805 INFO [train.py:1114] (0/4) Epoch 19, batch 4950, loss[loss=0.2586, simple_loss=0.3164, pruned_loss=0.1004, over 3506.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2657, pruned_loss=0.04145, over 931844.59 frames. ], batch size: 35, lr: 3.92e-03, grad_scale: 64.0 +2024-07-29 06:45:05,160 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=251949.33333333334, ans=0.125 +2024-07-29 06:45:08,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=251949.33333333334, ans=0.2 +2024-07-29 06:45:10,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=251949.33333333334, ans=0.2 +2024-07-29 06:45:19,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=251962.66666666666, ans=0.125 +2024-07-29 06:45:43,435 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.08 vs. limit=22.5 +2024-07-29 06:45:47,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=251989.33333333334, ans=0.125 +2024-07-29 06:45:53,094 INFO [train.py:1114] (0/4) Epoch 19, batch 5000, loss[loss=0.1622, simple_loss=0.2697, pruned_loss=0.02732, over 4661.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2643, pruned_loss=0.04087, over 935464.23 frames. ], batch size: 14, lr: 3.91e-03, grad_scale: 64.0 +2024-07-29 06:45:54,407 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:46:17,624 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.743e+01 6.406e+01 6.805e+01 1.014e+02, threshold=1.281e+02, percent-clipped=0.0 +2024-07-29 06:46:32,084 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=252029.33333333334, ans=0.5 +2024-07-29 06:46:38,321 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.45 vs. limit=15.0 +2024-07-29 06:46:39,527 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=252042.66666666666, ans=0.0 +2024-07-29 06:46:51,739 INFO [train.py:1114] (0/4) Epoch 19, batch 5050, loss[loss=0.1484, simple_loss=0.2393, pruned_loss=0.02873, over 4855.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2641, pruned_loss=0.0409, over 938320.92 frames. ], batch size: 12, lr: 3.91e-03, grad_scale: 64.0 +2024-07-29 06:46:58,472 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.90 vs. limit=22.5 +2024-07-29 06:47:02,521 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.77 vs. limit=15.0 +2024-07-29 06:47:30,883 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=252109.33333333334, ans=0.125 +2024-07-29 06:47:37,703 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=252122.66666666666, ans=0.0 +2024-07-29 06:47:37,793 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=252122.66666666666, ans=0.0 +2024-07-29 06:47:40,383 INFO [train.py:1114] (0/4) Epoch 19, batch 5100, loss[loss=0.1931, simple_loss=0.2836, pruned_loss=0.05134, over 4777.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2646, pruned_loss=0.04079, over 935683.27 frames. ], batch size: 12, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:47:41,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=252136.0, ans=0.2 +2024-07-29 06:47:51,507 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-29 06:47:55,093 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.629e+01 5.744e+01 6.473e+01 7.169e+01 1.065e+02, threshold=1.295e+02, percent-clipped=0.0 +2024-07-29 06:47:55,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=252149.33333333334, ans=0.025 +2024-07-29 06:47:57,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=252149.33333333334, ans=0.0 +2024-07-29 06:48:48,102 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.62 vs. limit=22.5 +2024-07-29 06:49:16,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=252176.0, ans=0.2 +2024-07-29 06:49:24,781 INFO [train.py:1114] (0/4) Epoch 19, batch 5150, loss[loss=0.1577, simple_loss=0.2527, pruned_loss=0.03136, over 4854.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2638, pruned_loss=0.04039, over 936370.62 frames. ], batch size: 16, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:49:24,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=252202.66666666666, ans=0.2 +2024-07-29 06:49:35,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=252216.0, ans=0.125 +2024-07-29 06:49:52,359 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=252242.66666666666, ans=0.0 +2024-07-29 06:50:11,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252269.33333333334, ans=0.1 +2024-07-29 06:50:13,874 INFO [train.py:1114] (0/4) Epoch 19, batch 5200, loss[loss=0.1782, simple_loss=0.2819, pruned_loss=0.03726, over 4666.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2638, pruned_loss=0.04051, over 936140.53 frames. ], batch size: 14, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:50:24,642 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.474e+01 5.788e+01 6.281e+01 7.022e+01 9.096e+01, threshold=1.256e+02, percent-clipped=0.0 +2024-07-29 06:50:25,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252282.66666666666, ans=0.1 +2024-07-29 06:50:27,458 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=252282.66666666666, ans=0.09899494936611666 +2024-07-29 06:50:28,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=252296.0, ans=0.125 +2024-07-29 06:50:31,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=252296.0, ans=0.0 +2024-07-29 06:50:32,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=252296.0, ans=0.2 +2024-07-29 06:50:35,554 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252309.33333333334, ans=0.1 +2024-07-29 06:50:38,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=252309.33333333334, ans=0.125 +2024-07-29 06:50:46,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=252322.66666666666, ans=0.125 +2024-07-29 06:50:49,261 INFO [train.py:1114] (0/4) Epoch 19, batch 5250, loss[loss=0.1867, simple_loss=0.2728, pruned_loss=0.05028, over 4896.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2639, pruned_loss=0.0409, over 935543.82 frames. ], batch size: 13, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:50:58,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=252349.33333333334, ans=0.125 +2024-07-29 06:51:05,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=252362.66666666666, ans=0.125 +2024-07-29 06:51:07,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=252362.66666666666, ans=0.2 +2024-07-29 06:51:12,136 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.51 vs. limit=12.0 +2024-07-29 06:51:13,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=252376.0, ans=0.1 +2024-07-29 06:51:16,839 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=252376.0, ans=10.0 +2024-07-29 06:51:17,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=252389.33333333334, ans=0.125 +2024-07-29 06:51:21,018 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.45 vs. limit=15.0 +2024-07-29 06:51:24,376 INFO [train.py:1114] (0/4) Epoch 19, batch 5300, loss[loss=0.2032, simple_loss=0.2847, pruned_loss=0.06081, over 4639.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2633, pruned_loss=0.04092, over 934241.74 frames. ], batch size: 16, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:51:25,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252402.66666666666, ans=0.1 +2024-07-29 06:51:33,500 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+01 5.685e+01 6.229e+01 6.963e+01 9.686e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-29 06:51:48,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=252442.66666666666, ans=0.125 +2024-07-29 06:51:48,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=252442.66666666666, ans=0.09899494936611666 +2024-07-29 06:51:49,899 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=252442.66666666666, ans=0.0 +2024-07-29 06:51:57,755 INFO [train.py:1114] (0/4) Epoch 19, batch 5350, loss[loss=0.1533, simple_loss=0.2304, pruned_loss=0.03807, over 4491.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2641, pruned_loss=0.04095, over 936383.66 frames. ], batch size: 10, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:51:58,737 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.05 vs. limit=22.5 +2024-07-29 06:52:00,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=252469.33333333334, ans=0.025 +2024-07-29 06:52:05,188 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=252482.66666666666, ans=0.0 +2024-07-29 06:52:11,867 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.53 vs. limit=12.0 +2024-07-29 06:52:12,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=252496.0, ans=0.0 +2024-07-29 06:52:14,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=252496.0, ans=0.0 +2024-07-29 06:52:26,974 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.78 vs. limit=10.0 +2024-07-29 06:52:29,092 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.58 vs. limit=15.0 +2024-07-29 06:52:32,374 INFO [train.py:1114] (0/4) Epoch 19, batch 5400, loss[loss=0.1842, simple_loss=0.2841, pruned_loss=0.04214, over 4210.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2645, pruned_loss=0.0411, over 930805.97 frames. ], batch size: 25, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:52:39,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=252549.33333333334, ans=0.125 +2024-07-29 06:52:42,200 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.436e+01 5.716e+01 6.217e+01 6.684e+01 8.948e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 06:52:42,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=252549.33333333334, ans=0.015 +2024-07-29 06:52:44,538 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.14 vs. limit=15.0 +2024-07-29 06:52:55,168 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=252576.0, ans=0.0 +2024-07-29 06:53:01,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=252576.0, ans=0.125 +2024-07-29 06:53:08,947 INFO [train.py:1114] (0/4) Epoch 19, batch 5450, loss[loss=0.1514, simple_loss=0.2372, pruned_loss=0.0328, over 4698.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2634, pruned_loss=0.0404, over 933550.94 frames. ], batch size: 11, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:53:09,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252602.66666666666, ans=0.1 +2024-07-29 06:53:14,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=252602.66666666666, ans=0.125 +2024-07-29 06:53:16,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=252616.0, ans=0.0 +2024-07-29 06:53:32,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=252642.66666666666, ans=0.125 +2024-07-29 06:53:43,630 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.35 vs. limit=10.0 +2024-07-29 06:53:45,877 INFO [train.py:1114] (0/4) Epoch 19, batch 5500, loss[loss=0.1929, simple_loss=0.2844, pruned_loss=0.05066, over 4230.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2627, pruned_loss=0.04056, over 930776.13 frames. ], batch size: 25, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:53:46,200 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=252669.33333333334, ans=0.0 +2024-07-29 06:53:49,898 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.34 vs. limit=15.0 +2024-07-29 06:53:55,330 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.797e+01 5.621e+01 6.177e+01 7.042e+01 9.819e+01, threshold=1.235e+02, percent-clipped=0.0 +2024-07-29 06:54:09,022 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=252709.33333333334, ans=0.0 +2024-07-29 06:54:11,781 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=252709.33333333334, ans=0.0 +2024-07-29 06:54:15,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=252722.66666666666, ans=0.125 +2024-07-29 06:54:21,823 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.27 vs. limit=15.0 +2024-07-29 06:54:24,081 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.48 vs. limit=15.0 +2024-07-29 06:54:24,312 INFO [train.py:1114] (0/4) Epoch 19, batch 5550, loss[loss=0.1588, simple_loss=0.2529, pruned_loss=0.03233, over 4701.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2622, pruned_loss=0.04006, over 932980.62 frames. ], batch size: 12, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:54:25,620 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.01 vs. limit=15.0 +2024-07-29 06:54:35,457 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=252749.33333333334, ans=0.0 +2024-07-29 06:54:36,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=252749.33333333334, ans=0.0 +2024-07-29 06:54:51,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=252789.33333333334, ans=0.025 +2024-07-29 06:55:00,395 INFO [train.py:1114] (0/4) Epoch 19, batch 5600, loss[loss=0.194, simple_loss=0.2793, pruned_loss=0.0544, over 4741.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2635, pruned_loss=0.04048, over 934308.50 frames. ], batch size: 14, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:55:00,464 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=252802.66666666666, ans=0.2 +2024-07-29 06:55:03,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=252802.66666666666, ans=10.0 +2024-07-29 06:55:09,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=252816.0, ans=0.125 +2024-07-29 06:55:10,085 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.976e+01 6.000e+01 7.138e+01 7.919e+01 1.152e+02, threshold=1.428e+02, percent-clipped=0.0 +2024-07-29 06:55:14,996 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.99 vs. limit=15.0 +2024-07-29 06:55:37,020 INFO [train.py:1114] (0/4) Epoch 19, batch 5650, loss[loss=0.2186, simple_loss=0.3134, pruned_loss=0.06192, over 4594.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2628, pruned_loss=0.04013, over 936847.03 frames. ], batch size: 21, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:55:44,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.89 vs. limit=15.0 +2024-07-29 06:55:46,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=252882.66666666666, ans=0.1 +2024-07-29 06:55:56,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=252896.0, ans=0.0 +2024-07-29 06:56:06,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=252922.66666666666, ans=0.125 +2024-07-29 06:56:12,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=252922.66666666666, ans=0.125 +2024-07-29 06:56:15,650 INFO [train.py:1114] (0/4) Epoch 19, batch 5700, loss[loss=0.1725, simple_loss=0.269, pruned_loss=0.03797, over 4693.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2643, pruned_loss=0.04106, over 937905.85 frames. ], batch size: 13, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:56:15,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=252936.0, ans=0.0 +2024-07-29 06:56:20,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=252936.0, ans=0.125 +2024-07-29 06:56:22,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=252949.33333333334, ans=0.125 +2024-07-29 06:56:25,030 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.780e+01 5.631e+01 6.115e+01 6.862e+01 9.521e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-29 06:56:25,370 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.41 vs. limit=15.0 +2024-07-29 06:56:28,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=252962.66666666666, ans=0.125 +2024-07-29 06:56:42,171 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.12 vs. limit=15.0 +2024-07-29 06:56:55,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=252989.33333333334, ans=0.125 +2024-07-29 06:56:57,577 INFO [train.py:1114] (0/4) Epoch 19, batch 5750, loss[loss=0.1819, simple_loss=0.28, pruned_loss=0.04186, over 4721.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2648, pruned_loss=0.0411, over 938056.41 frames. ], batch size: 19, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:57:09,828 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.99 vs. limit=15.0 +2024-07-29 06:57:12,938 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=253029.33333333334, ans=0.125 +2024-07-29 06:57:14,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253029.33333333334, ans=0.1 +2024-07-29 06:57:25,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=253042.66666666666, ans=0.1 +2024-07-29 06:57:30,072 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.82 vs. limit=6.0 +2024-07-29 06:57:33,965 INFO [train.py:1114] (0/4) Epoch 19, batch 5800, loss[loss=0.1549, simple_loss=0.2494, pruned_loss=0.03015, over 4713.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2653, pruned_loss=0.04137, over 937185.06 frames. ], batch size: 19, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:57:36,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=253069.33333333334, ans=0.2 +2024-07-29 06:57:41,621 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=253082.66666666666, ans=0.0 +2024-07-29 06:57:42,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=253082.66666666666, ans=0.0 +2024-07-29 06:57:43,294 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.734e+01 5.591e+01 6.504e+01 7.272e+01 1.266e+02, threshold=1.301e+02, percent-clipped=1.0 +2024-07-29 06:57:44,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=253082.66666666666, ans=0.125 +2024-07-29 06:57:46,762 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:57:48,941 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=253096.0, ans=0.125 +2024-07-29 06:57:54,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=253109.33333333334, ans=0.125 +2024-07-29 06:57:58,599 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=253109.33333333334, ans=0.0 +2024-07-29 06:58:03,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=253122.66666666666, ans=0.125 +2024-07-29 06:58:08,052 INFO [train.py:1114] (0/4) Epoch 19, batch 5850, loss[loss=0.1673, simple_loss=0.2601, pruned_loss=0.03721, over 4439.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2641, pruned_loss=0.04078, over 938001.98 frames. ], batch size: 21, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:58:12,791 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=253136.0, ans=0.125 +2024-07-29 06:58:16,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=253149.33333333334, ans=0.0 +2024-07-29 06:58:24,596 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.53 vs. limit=22.5 +2024-07-29 06:58:29,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=253176.0, ans=0.1 +2024-07-29 06:58:32,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=253176.0, ans=0.125 +2024-07-29 06:58:36,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=253189.33333333334, ans=22.5 +2024-07-29 06:58:46,368 INFO [train.py:1114] (0/4) Epoch 19, batch 5900, loss[loss=0.1741, simple_loss=0.2689, pruned_loss=0.03965, over 4690.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2633, pruned_loss=0.0404, over 938464.46 frames. ], batch size: 15, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:58:52,811 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-29 06:58:55,597 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+01 5.657e+01 6.141e+01 7.066e+01 1.029e+02, threshold=1.228e+02, percent-clipped=0.0 +2024-07-29 06:58:58,448 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:59:00,562 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=253229.33333333334, ans=0.125 +2024-07-29 06:59:06,007 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=253242.66666666666, ans=0.125 +2024-07-29 06:59:19,488 INFO [train.py:1114] (0/4) Epoch 19, batch 5950, loss[loss=0.2047, simple_loss=0.2859, pruned_loss=0.06175, over 4688.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2627, pruned_loss=0.04013, over 940205.97 frames. ], batch size: 15, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 06:59:20,656 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.66 vs. limit=15.0 +2024-07-29 06:59:27,547 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=253282.66666666666, ans=0.125 +2024-07-29 06:59:44,679 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-07-29 06:59:47,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=253309.33333333334, ans=0.125 +2024-07-29 06:59:54,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=253322.66666666666, ans=0.1 +2024-07-29 06:59:54,343 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.82 vs. limit=15.0 +2024-07-29 06:59:56,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=253322.66666666666, ans=0.025 +2024-07-29 06:59:57,788 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=253322.66666666666, ans=0.0 +2024-07-29 06:59:58,954 INFO [train.py:1114] (0/4) Epoch 19, batch 6000, loss[loss=0.171, simple_loss=0.2662, pruned_loss=0.03793, over 4330.00 frames. ], tot_loss[loss=0.171, simple_loss=0.262, pruned_loss=0.03994, over 937777.17 frames. ], batch size: 26, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 06:59:58,955 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 07:00:07,048 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.5981, 2.8089, 3.3403, 3.2956], device='cuda:0') +2024-07-29 07:00:07,239 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.6151, 4.1388, 4.4419, 4.2931], device='cuda:0') +2024-07-29 07:00:11,172 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([3.2540, 2.1162, 2.6796, 2.9598, 2.8441, 2.6236, 2.8391, 2.1362], + device='cuda:0') +2024-07-29 07:00:12,795 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.0995, 2.5698, 3.6791, 3.3398, 3.7358, 3.7009, 3.0892, 2.6812], + device='cuda:0') +2024-07-29 07:00:15,062 INFO [train.py:1146] (0/4) Epoch 19, validation: loss=0.1606, simple_loss=0.2627, pruned_loss=0.02924, over 944034.00 frames. +2024-07-29 07:00:15,063 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 07:00:15,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=253336.0, ans=0.125 +2024-07-29 07:00:22,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=253349.33333333334, ans=0.0 +2024-07-29 07:00:24,592 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.643e+01 5.715e+01 6.299e+01 6.877e+01 1.010e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 07:00:24,698 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=253349.33333333334, ans=0.0 +2024-07-29 07:00:26,092 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=253349.33333333334, ans=0.125 +2024-07-29 07:00:26,985 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.40 vs. limit=15.0 +2024-07-29 07:00:39,925 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.88 vs. limit=15.0 +2024-07-29 07:00:40,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=253376.0, ans=0.125 +2024-07-29 07:00:57,748 INFO [train.py:1114] (0/4) Epoch 19, batch 6050, loss[loss=0.1489, simple_loss=0.2349, pruned_loss=0.03141, over 4775.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2623, pruned_loss=0.0403, over 938766.06 frames. ], batch size: 12, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:01:05,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=253402.66666666666, ans=0.1 +2024-07-29 07:01:07,316 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=253416.0, ans=0.0 +2024-07-29 07:01:35,547 INFO [train.py:1114] (0/4) Epoch 19, batch 6100, loss[loss=0.219, simple_loss=0.3098, pruned_loss=0.06412, over 4675.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2627, pruned_loss=0.04092, over 938119.23 frames. ], batch size: 15, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:01:40,347 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.21 vs. limit=15.0 +2024-07-29 07:01:42,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=253469.33333333334, ans=0.125 +2024-07-29 07:01:46,503 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.746e+01 6.337e+01 7.599e+01 1.096e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 07:01:51,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=253496.0, ans=0.0 +2024-07-29 07:01:57,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=253509.33333333334, ans=10.0 +2024-07-29 07:02:00,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=253509.33333333334, ans=0.125 +2024-07-29 07:02:04,193 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.78 vs. limit=15.0 +2024-07-29 07:02:06,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=253522.66666666666, ans=0.125 +2024-07-29 07:02:07,799 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:02:10,470 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:02:10,980 INFO [train.py:1114] (0/4) Epoch 19, batch 6150, loss[loss=0.1902, simple_loss=0.2799, pruned_loss=0.05029, over 3627.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2639, pruned_loss=0.041, over 937133.28 frames. ], batch size: 35, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:02:13,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=253536.0, ans=0.0 +2024-07-29 07:02:18,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=253549.33333333334, ans=0.1 +2024-07-29 07:02:21,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=253549.33333333334, ans=0.0 +2024-07-29 07:02:24,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=253549.33333333334, ans=0.0 +2024-07-29 07:02:26,553 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=253562.66666666666, ans=0.0 +2024-07-29 07:02:30,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=253562.66666666666, ans=0.0 +2024-07-29 07:02:39,261 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.38 vs. limit=15.0 +2024-07-29 07:02:39,946 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.51 vs. limit=15.0 +2024-07-29 07:02:40,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=253589.33333333334, ans=0.0 +2024-07-29 07:02:46,256 INFO [train.py:1114] (0/4) Epoch 19, batch 6200, loss[loss=0.1672, simple_loss=0.2748, pruned_loss=0.02979, over 4745.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2642, pruned_loss=0.04126, over 936725.18 frames. ], batch size: 14, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:02:47,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=253602.66666666666, ans=0.2 +2024-07-29 07:02:55,099 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.50 vs. limit=22.5 +2024-07-29 07:02:56,246 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=253602.66666666666, ans=0.09899494936611666 +2024-07-29 07:02:59,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=253616.0, ans=15.0 +2024-07-29 07:03:00,847 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.644e+01 5.872e+01 6.274e+01 7.114e+01 1.110e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 07:03:01,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=253616.0, ans=0.2 +2024-07-29 07:03:10,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=253629.33333333334, ans=0.0 +2024-07-29 07:03:26,825 INFO [train.py:1114] (0/4) Epoch 19, batch 6250, loss[loss=0.1741, simple_loss=0.2674, pruned_loss=0.04036, over 4813.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2645, pruned_loss=0.04136, over 933268.24 frames. ], batch size: 14, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:03:28,320 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=253669.33333333334, ans=0.125 +2024-07-29 07:03:32,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=253669.33333333334, ans=0.125 +2024-07-29 07:03:48,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=253709.33333333334, ans=0.0 +2024-07-29 07:03:48,727 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-29 07:03:53,238 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=253722.66666666666, ans=0.2 +2024-07-29 07:04:00,511 INFO [train.py:1114] (0/4) Epoch 19, batch 6300, loss[loss=0.1405, simple_loss=0.232, pruned_loss=0.02448, over 4511.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2645, pruned_loss=0.04148, over 929072.37 frames. ], batch size: 10, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:04:03,946 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:04:04,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=253736.0, ans=0.125 +2024-07-29 07:04:09,679 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.98 vs. limit=15.0 +2024-07-29 07:04:09,765 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+01 5.656e+01 6.439e+01 7.394e+01 1.114e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 07:04:32,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=253789.33333333334, ans=0.2 +2024-07-29 07:04:47,721 INFO [train.py:1114] (0/4) Epoch 19, batch 6350, loss[loss=0.195, simple_loss=0.2929, pruned_loss=0.04856, over 4527.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.264, pruned_loss=0.0412, over 933189.28 frames. ], batch size: 21, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:05:06,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=253829.33333333334, ans=0.1 +2024-07-29 07:05:08,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=253842.66666666666, ans=6.0 +2024-07-29 07:05:21,148 INFO [train.py:1114] (0/4) Epoch 19, batch 6400, loss[loss=0.1761, simple_loss=0.2759, pruned_loss=0.03815, over 4634.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.264, pruned_loss=0.04161, over 934486.74 frames. ], batch size: 13, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:05:26,767 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.70 vs. limit=15.0 +2024-07-29 07:05:30,230 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.587e+01 5.936e+01 6.680e+01 7.365e+01 1.184e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-29 07:05:34,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=253896.0, ans=0.125 +2024-07-29 07:05:39,826 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253896.0, ans=0.1 +2024-07-29 07:05:43,520 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.20 vs. limit=22.5 +2024-07-29 07:05:45,527 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.93 vs. limit=15.0 +2024-07-29 07:06:01,008 INFO [train.py:1114] (0/4) Epoch 19, batch 6450, loss[loss=0.1909, simple_loss=0.2712, pruned_loss=0.05527, over 4503.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2647, pruned_loss=0.04203, over 938279.39 frames. ], batch size: 21, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:06:12,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=253949.33333333334, ans=0.125 +2024-07-29 07:06:13,651 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=253962.66666666666, ans=0.0 +2024-07-29 07:06:32,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=253976.0, ans=0.125 +2024-07-29 07:06:32,940 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=253976.0, ans=0.2 +2024-07-29 07:06:34,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=253976.0, ans=0.125 +2024-07-29 07:06:38,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=253989.33333333334, ans=0.04949747468305833 +2024-07-29 07:06:45,348 INFO [train.py:1114] (0/4) Epoch 19, batch 6500, loss[loss=0.2486, simple_loss=0.3131, pruned_loss=0.09212, over 3429.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2633, pruned_loss=0.0415, over 939779.56 frames. ], batch size: 36, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:06:47,493 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=254002.66666666666, ans=0.5 +2024-07-29 07:06:51,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=254002.66666666666, ans=0.125 +2024-07-29 07:06:53,799 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=254016.0, ans=0.2 +2024-07-29 07:06:54,894 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.939e+01 5.824e+01 6.462e+01 7.830e+01 1.082e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-29 07:06:59,053 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=254029.33333333334, ans=0.125 +2024-07-29 07:07:07,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=254042.66666666666, ans=0.125 +2024-07-29 07:07:11,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=254042.66666666666, ans=0.0 +2024-07-29 07:07:16,428 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=254056.0, ans=0.125 +2024-07-29 07:07:17,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=254056.0, ans=0.2 +2024-07-29 07:07:17,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=254056.0, ans=0.0 +2024-07-29 07:07:19,429 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn2.whiten.whitening_limit, batch_count=254056.0, ans=22.5 +2024-07-29 07:07:20,276 INFO [train.py:1114] (0/4) Epoch 19, batch 6550, loss[loss=0.1621, simple_loss=0.2502, pruned_loss=0.03697, over 4813.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2627, pruned_loss=0.04145, over 942974.01 frames. ], batch size: 11, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:07:21,036 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=254069.33333333334, ans=0.0 +2024-07-29 07:07:30,746 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=254082.66666666666, ans=0.125 +2024-07-29 07:07:43,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=254109.33333333334, ans=0.125 +2024-07-29 07:07:44,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=254109.33333333334, ans=0.0 +2024-07-29 07:07:50,233 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=254122.66666666666, ans=0.1 +2024-07-29 07:07:56,791 INFO [train.py:1114] (0/4) Epoch 19, batch 6600, loss[loss=0.1642, simple_loss=0.26, pruned_loss=0.03417, over 4930.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2631, pruned_loss=0.04148, over 944989.03 frames. ], batch size: 14, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:07:57,776 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=254136.0, ans=0.1 +2024-07-29 07:08:00,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=254136.0, ans=0.125 +2024-07-29 07:08:06,404 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.740e+01 5.577e+01 6.191e+01 6.872e+01 1.333e+02, threshold=1.238e+02, percent-clipped=1.0 +2024-07-29 07:08:08,615 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=254149.33333333334, ans=0.0 +2024-07-29 07:08:14,219 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.29 vs. limit=15.0 +2024-07-29 07:08:20,512 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=254176.0, ans=0.0 +2024-07-29 07:08:27,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=254189.33333333334, ans=0.125 +2024-07-29 07:08:30,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=254202.66666666666, ans=0.025 +2024-07-29 07:08:30,626 INFO [train.py:1114] (0/4) Epoch 19, batch 6650, loss[loss=0.2122, simple_loss=0.3078, pruned_loss=0.05831, over 4586.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2626, pruned_loss=0.04094, over 943534.58 frames. ], batch size: 17, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:08:35,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=254202.66666666666, ans=0.2 +2024-07-29 07:08:54,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=254242.66666666666, ans=0.035 +2024-07-29 07:09:04,189 INFO [train.py:1114] (0/4) Epoch 19, batch 6700, loss[loss=0.1753, simple_loss=0.2659, pruned_loss=0.04231, over 4733.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.263, pruned_loss=0.04083, over 942319.91 frames. ], batch size: 19, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:09:13,674 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.681e+01 5.588e+01 6.301e+01 6.767e+01 8.851e+01, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 07:09:14,501 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=254282.66666666666, ans=0.125 +2024-07-29 07:09:14,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=254282.66666666666, ans=0.0 +2024-07-29 07:09:14,591 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=254282.66666666666, ans=0.025 +2024-07-29 07:09:19,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=254296.0, ans=0.125 +2024-07-29 07:09:20,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=254296.0, ans=0.125 +2024-07-29 07:09:20,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=254296.0, ans=0.07 +2024-07-29 07:09:22,226 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-29 07:09:38,292 INFO [train.py:1114] (0/4) Epoch 19, batch 6750, loss[loss=0.1773, simple_loss=0.2767, pruned_loss=0.03896, over 4212.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2635, pruned_loss=0.04085, over 939975.79 frames. ], batch size: 25, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:09:39,851 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254336.0, ans=0.1 +2024-07-29 07:09:42,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=254336.0, ans=0.125 +2024-07-29 07:09:57,542 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=254362.66666666666, ans=0.1 +2024-07-29 07:10:00,095 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:10:00,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=254376.0, ans=0.2 +2024-07-29 07:10:01,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=254376.0, ans=0.025 +2024-07-29 07:10:13,856 INFO [train.py:1114] (0/4) Epoch 19, batch 6800, loss[loss=0.2018, simple_loss=0.2967, pruned_loss=0.05345, over 4633.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2641, pruned_loss=0.04114, over 938531.81 frames. ], batch size: 13, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:10:14,051 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254402.66666666666, ans=0.1 +2024-07-29 07:10:17,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=254402.66666666666, ans=0.025 +2024-07-29 07:10:22,889 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+01 5.699e+01 6.328e+01 7.077e+01 1.070e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-29 07:10:28,180 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=254429.33333333334, ans=0.125 +2024-07-29 07:10:28,416 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.63 vs. limit=22.5 +2024-07-29 07:10:28,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=254429.33333333334, ans=0.2 +2024-07-29 07:10:28,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=254429.33333333334, ans=0.125 +2024-07-29 07:10:32,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=254429.33333333334, ans=0.125 +2024-07-29 07:10:34,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=254442.66666666666, ans=15.0 +2024-07-29 07:10:44,378 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=254456.0, ans=0.0 +2024-07-29 07:10:46,745 INFO [train.py:1114] (0/4) Epoch 19, batch 6850, loss[loss=0.1761, simple_loss=0.2718, pruned_loss=0.04015, over 4697.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2629, pruned_loss=0.04061, over 940400.46 frames. ], batch size: 13, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:10:53,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=254482.66666666666, ans=0.2 +2024-07-29 07:10:56,933 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.77 vs. limit=15.0 +2024-07-29 07:11:03,390 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=254496.0, ans=0.07 +2024-07-29 07:11:07,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=254509.33333333334, ans=0.025 +2024-07-29 07:11:08,823 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254509.33333333334, ans=0.1 +2024-07-29 07:11:09,409 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=254509.33333333334, ans=0.2 +2024-07-29 07:11:10,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=254509.33333333334, ans=0.0 +2024-07-29 07:11:13,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=254522.66666666666, ans=0.2 +2024-07-29 07:11:20,054 INFO [train.py:1114] (0/4) Epoch 19, batch 6900, loss[loss=0.161, simple_loss=0.2553, pruned_loss=0.03341, over 4969.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2619, pruned_loss=0.04001, over 943000.08 frames. ], batch size: 13, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:11:23,893 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.94 vs. limit=10.0 +2024-07-29 07:11:27,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=254549.33333333334, ans=0.025 +2024-07-29 07:11:29,546 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.808e+01 6.453e+01 7.424e+01 1.237e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-29 07:11:43,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=254576.0, ans=0.1 +2024-07-29 07:11:53,785 INFO [train.py:1114] (0/4) Epoch 19, batch 6950, loss[loss=0.1654, simple_loss=0.2471, pruned_loss=0.04185, over 4524.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2625, pruned_loss=0.04054, over 940640.09 frames. ], batch size: 10, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:11:55,512 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.89 vs. limit=12.0 +2024-07-29 07:12:09,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=254629.33333333334, ans=0.125 +2024-07-29 07:12:10,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=254629.33333333334, ans=0.125 +2024-07-29 07:12:29,159 INFO [train.py:1114] (0/4) Epoch 19, batch 7000, loss[loss=0.1769, simple_loss=0.273, pruned_loss=0.04044, over 4650.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.262, pruned_loss=0.04062, over 938939.18 frames. ], batch size: 17, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:12:29,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=254669.33333333334, ans=0.125 +2024-07-29 07:12:31,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=254669.33333333334, ans=0.2 +2024-07-29 07:12:34,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff3.min_abs, batch_count=254669.33333333334, ans=0.2 +2024-07-29 07:12:35,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=254682.66666666666, ans=0.0 +2024-07-29 07:12:38,428 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.459e+01 5.806e+01 6.455e+01 7.186e+01 1.060e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-29 07:12:44,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=254696.0, ans=0.125 +2024-07-29 07:12:45,737 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=254696.0, ans=0.0 +2024-07-29 07:12:45,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254696.0, ans=0.1 +2024-07-29 07:12:50,340 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254709.33333333334, ans=0.1 +2024-07-29 07:13:02,118 INFO [train.py:1114] (0/4) Epoch 19, batch 7050, loss[loss=0.1811, simple_loss=0.2784, pruned_loss=0.04189, over 4685.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2636, pruned_loss=0.04104, over 942008.46 frames. ], batch size: 19, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:13:08,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=254749.33333333334, ans=0.125 +2024-07-29 07:13:21,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=254762.66666666666, ans=0.0 +2024-07-29 07:13:25,178 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=254776.0, ans=0.2 +2024-07-29 07:13:27,071 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=254776.0, ans=0.0 +2024-07-29 07:13:34,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=254789.33333333334, ans=0.0 +2024-07-29 07:13:36,796 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=254789.33333333334, ans=0.2 +2024-07-29 07:13:38,655 INFO [train.py:1114] (0/4) Epoch 19, batch 7100, loss[loss=0.1965, simple_loss=0.2851, pruned_loss=0.05392, over 4817.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2645, pruned_loss=0.04133, over 937131.23 frames. ], batch size: 15, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:13:49,300 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.888e+01 5.809e+01 6.351e+01 7.232e+01 1.086e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-29 07:13:50,376 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.94 vs. limit=12.0 +2024-07-29 07:13:52,388 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.12 vs. limit=12.0 +2024-07-29 07:14:04,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=254842.66666666666, ans=0.125 +2024-07-29 07:14:13,150 INFO [train.py:1114] (0/4) Epoch 19, batch 7150, loss[loss=0.1803, simple_loss=0.2831, pruned_loss=0.03878, over 4431.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2636, pruned_loss=0.04095, over 937925.13 frames. ], batch size: 21, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:14:36,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=254909.33333333334, ans=0.125 +2024-07-29 07:14:36,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=254909.33333333334, ans=0.125 +2024-07-29 07:14:46,090 INFO [train.py:1114] (0/4) Epoch 19, batch 7200, loss[loss=0.1991, simple_loss=0.2901, pruned_loss=0.05409, over 4792.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2636, pruned_loss=0.04049, over 937797.63 frames. ], batch size: 15, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:14:52,959 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.02 vs. limit=12.0 +2024-07-29 07:14:55,077 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.575e+01 5.607e+01 6.088e+01 6.745e+01 8.858e+01, threshold=1.218e+02, percent-clipped=0.0 +2024-07-29 07:14:55,251 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=254949.33333333334, ans=0.025 +2024-07-29 07:14:56,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=254949.33333333334, ans=0.125 +2024-07-29 07:15:18,455 INFO [train.py:1114] (0/4) Epoch 19, batch 7250, loss[loss=0.1462, simple_loss=0.2319, pruned_loss=0.03024, over 4859.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2624, pruned_loss=0.04015, over 939600.65 frames. ], batch size: 12, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:15:28,997 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255016.0, ans=0.1 +2024-07-29 07:15:29,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=255016.0, ans=0.125 +2024-07-29 07:15:29,953 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.70 vs. limit=12.0 +2024-07-29 07:15:31,644 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=255029.33333333334, ans=0.125 +2024-07-29 07:15:34,687 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:15:39,343 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255042.66666666666, ans=0.1 +2024-07-29 07:15:43,309 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.66 vs. limit=15.0 +2024-07-29 07:15:48,528 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=255056.0, ans=0.125 +2024-07-29 07:15:50,595 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.97 vs. limit=22.5 +2024-07-29 07:15:50,897 INFO [train.py:1114] (0/4) Epoch 19, batch 7300, loss[loss=0.1227, simple_loss=0.217, pruned_loss=0.01414, over 4836.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2621, pruned_loss=0.04015, over 940040.85 frames. ], batch size: 12, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:15:51,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=255069.33333333334, ans=0.125 +2024-07-29 07:15:55,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=255069.33333333334, ans=0.0 +2024-07-29 07:15:56,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=255069.33333333334, ans=0.125 +2024-07-29 07:15:58,354 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=255082.66666666666, ans=0.125 +2024-07-29 07:16:00,104 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.606e+01 6.073e+01 6.714e+01 9.388e+01, threshold=1.215e+02, percent-clipped=0.0 +2024-07-29 07:16:00,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=255082.66666666666, ans=0.5 +2024-07-29 07:16:03,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=255096.0, ans=0.125 +2024-07-29 07:16:12,327 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.83 vs. limit=15.0 +2024-07-29 07:16:12,381 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.10 vs. limit=15.0 +2024-07-29 07:16:14,229 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.55 vs. limit=15.0 +2024-07-29 07:16:16,705 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=255122.66666666666, ans=0.2 +2024-07-29 07:16:16,884 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.08 vs. limit=22.5 +2024-07-29 07:16:19,778 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=255122.66666666666, ans=0.125 +2024-07-29 07:16:23,607 INFO [train.py:1114] (0/4) Epoch 19, batch 7350, loss[loss=0.1578, simple_loss=0.249, pruned_loss=0.03334, over 4646.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2615, pruned_loss=0.04003, over 939174.12 frames. ], batch size: 12, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:16:27,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=255136.0, ans=0.125 +2024-07-29 07:16:38,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255162.66666666666, ans=0.1 +2024-07-29 07:17:01,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=255176.0, ans=0.125 +2024-07-29 07:17:01,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=255176.0, ans=0.125 +2024-07-29 07:17:10,028 INFO [train.py:1114] (0/4) Epoch 19, batch 7400, loss[loss=0.1943, simple_loss=0.2873, pruned_loss=0.05059, over 4703.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.262, pruned_loss=0.03995, over 940250.79 frames. ], batch size: 13, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:17:19,335 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.361e+01 5.806e+01 6.617e+01 8.276e+01 1.312e+02, threshold=1.323e+02, percent-clipped=3.0 +2024-07-29 07:17:26,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=255229.33333333334, ans=0.2 +2024-07-29 07:17:28,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=255229.33333333334, ans=0.125 +2024-07-29 07:17:39,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=255256.0, ans=0.05 +2024-07-29 07:17:42,966 INFO [train.py:1114] (0/4) Epoch 19, batch 7450, loss[loss=0.1473, simple_loss=0.2317, pruned_loss=0.0314, over 4615.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2626, pruned_loss=0.04068, over 937630.57 frames. ], batch size: 11, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:17:51,838 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-29 07:17:55,198 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.71 vs. limit=15.0 +2024-07-29 07:18:15,923 INFO [train.py:1114] (0/4) Epoch 19, batch 7500, loss[loss=0.1888, simple_loss=0.2766, pruned_loss=0.05046, over 3403.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2629, pruned_loss=0.04063, over 935176.28 frames. ], batch size: 35, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:18:25,192 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.636e+01 5.501e+01 5.980e+01 6.814e+01 1.020e+02, threshold=1.196e+02, percent-clipped=0.0 +2024-07-29 07:18:27,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255349.33333333334, ans=0.1 +2024-07-29 07:18:38,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255376.0, ans=0.1 +2024-07-29 07:18:42,867 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.10 vs. limit=22.5 +2024-07-29 07:18:48,937 INFO [train.py:1114] (0/4) Epoch 19, batch 7550, loss[loss=0.1919, simple_loss=0.2868, pruned_loss=0.04847, over 4601.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2633, pruned_loss=0.041, over 935194.88 frames. ], batch size: 17, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:18:50,900 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=255402.66666666666, ans=0.0 +2024-07-29 07:18:55,611 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=255402.66666666666, ans=0.125 +2024-07-29 07:18:57,151 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.63 vs. limit=15.0 +2024-07-29 07:19:16,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=255442.66666666666, ans=0.125 +2024-07-29 07:19:18,850 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=255456.0, ans=0.0 +2024-07-29 07:19:30,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=255456.0, ans=0.0 +2024-07-29 07:19:34,142 INFO [train.py:1114] (0/4) Epoch 19, batch 7600, loss[loss=0.1831, simple_loss=0.2752, pruned_loss=0.04551, over 4811.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2632, pruned_loss=0.04056, over 936819.65 frames. ], batch size: 14, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:21:07,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=255469.33333333334, ans=0.125 +2024-07-29 07:21:08,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=255469.33333333334, ans=0.125 +2024-07-29 07:21:14,852 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.357e+01 5.885e+01 6.503e+01 9.082e+01, threshold=1.177e+02, percent-clipped=0.0 +2024-07-29 07:21:18,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=255496.0, ans=0.125 +2024-07-29 07:21:33,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=255522.66666666666, ans=0.125 +2024-07-29 07:21:35,657 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.38 vs. limit=15.0 +2024-07-29 07:21:38,458 INFO [train.py:1114] (0/4) Epoch 19, batch 7650, loss[loss=0.1524, simple_loss=0.2357, pruned_loss=0.03461, over 4942.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2622, pruned_loss=0.04, over 936132.62 frames. ], batch size: 12, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:21:43,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=255536.0, ans=0.0 +2024-07-29 07:21:53,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=255562.66666666666, ans=0.125 +2024-07-29 07:21:56,633 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.98 vs. limit=12.0 +2024-07-29 07:22:00,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=255576.0, ans=0.125 +2024-07-29 07:22:02,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=255576.0, ans=0.125 +2024-07-29 07:22:03,956 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.89 vs. limit=22.5 +2024-07-29 07:22:09,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=255589.33333333334, ans=0.0 +2024-07-29 07:22:09,742 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=255589.33333333334, ans=0.125 +2024-07-29 07:22:11,506 INFO [train.py:1114] (0/4) Epoch 19, batch 7700, loss[loss=0.1818, simple_loss=0.2668, pruned_loss=0.04836, over 4693.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2629, pruned_loss=0.04063, over 933566.79 frames. ], batch size: 13, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:22:18,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=255616.0, ans=0.0 +2024-07-29 07:22:19,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=255616.0, ans=0.125 +2024-07-29 07:22:21,014 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.579e+01 5.495e+01 5.903e+01 6.797e+01 9.764e+01, threshold=1.181e+02, percent-clipped=0.0 +2024-07-29 07:22:46,044 INFO [train.py:1114] (0/4) Epoch 19, batch 7750, loss[loss=0.1726, simple_loss=0.2606, pruned_loss=0.04231, over 4941.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2639, pruned_loss=0.04096, over 935017.20 frames. ], batch size: 14, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:22:46,256 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=255669.33333333334, ans=0.025 +2024-07-29 07:22:56,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=255682.66666666666, ans=0.125 +2024-07-29 07:23:00,240 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.86 vs. limit=15.0 +2024-07-29 07:23:00,618 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=255682.66666666666, ans=0.125 +2024-07-29 07:23:03,421 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.57 vs. limit=6.0 +2024-07-29 07:23:15,451 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=255722.66666666666, ans=0.125 +2024-07-29 07:23:41,558 INFO [train.py:1114] (0/4) Epoch 19, batch 7800, loss[loss=0.1713, simple_loss=0.2656, pruned_loss=0.03852, over 4656.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2646, pruned_loss=0.04093, over 936926.00 frames. ], batch size: 14, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:23:51,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255736.0, ans=0.1 +2024-07-29 07:23:52,421 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=255736.0, ans=0.05 +2024-07-29 07:23:54,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255749.33333333334, ans=0.1 +2024-07-29 07:23:55,040 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.96 vs. limit=15.0 +2024-07-29 07:23:56,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=255749.33333333334, ans=0.125 +2024-07-29 07:23:57,873 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.016e+01 5.806e+01 6.397e+01 7.223e+01 9.492e+01, threshold=1.279e+02, percent-clipped=0.0 +2024-07-29 07:24:08,828 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.58 vs. limit=6.0 +2024-07-29 07:24:21,835 INFO [train.py:1114] (0/4) Epoch 19, batch 7850, loss[loss=0.1592, simple_loss=0.2379, pruned_loss=0.04024, over 4529.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2641, pruned_loss=0.0411, over 936316.80 frames. ], batch size: 10, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:24:26,898 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.73 vs. limit=15.0 +2024-07-29 07:24:29,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=255802.66666666666, ans=0.125 +2024-07-29 07:24:35,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=255816.0, ans=0.125 +2024-07-29 07:24:47,502 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.30 vs. limit=15.0 +2024-07-29 07:24:59,391 INFO [train.py:1114] (0/4) Epoch 19, batch 7900, loss[loss=0.191, simple_loss=0.2846, pruned_loss=0.04866, over 4869.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2652, pruned_loss=0.04144, over 933546.99 frames. ], batch size: 14, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:25:19,924 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.535e+01 5.757e+01 6.184e+01 6.980e+01 1.069e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-29 07:25:28,018 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=255896.0, ans=0.0 +2024-07-29 07:25:32,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255909.33333333334, ans=0.1 +2024-07-29 07:25:32,889 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.70 vs. limit=22.5 +2024-07-29 07:25:33,905 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=255909.33333333334, ans=0.2 +2024-07-29 07:25:36,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=255922.66666666666, ans=0.2 +2024-07-29 07:25:42,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255936.0, ans=0.1 +2024-07-29 07:26:55,441 INFO [train.py:1114] (0/4) Epoch 19, batch 7950, loss[loss=0.2318, simple_loss=0.3009, pruned_loss=0.08136, over 3306.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2646, pruned_loss=0.04103, over 935819.70 frames. ], batch size: 35, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:27:00,348 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.21 vs. limit=15.0 +2024-07-29 07:27:27,042 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.05 vs. limit=15.0 +2024-07-29 07:27:30,725 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=255962.66666666666, ans=0.07 +2024-07-29 07:27:31,424 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=255962.66666666666, ans=0.95 +2024-07-29 07:27:37,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=255976.0, ans=0.125 +2024-07-29 07:27:45,731 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-192000.pt +2024-07-29 07:27:50,604 INFO [train.py:1114] (0/4) Epoch 19, batch 8000, loss[loss=0.1406, simple_loss=0.2228, pruned_loss=0.02917, over 4616.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2628, pruned_loss=0.04042, over 934840.36 frames. ], batch size: 11, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:27:53,510 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.98 vs. limit=15.0 +2024-07-29 07:28:01,421 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.608e+01 5.673e+01 6.447e+01 7.571e+01 1.092e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-29 07:28:11,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=256042.66666666666, ans=0.0 +2024-07-29 07:28:15,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=256042.66666666666, ans=0.0 +2024-07-29 07:28:24,186 INFO [train.py:1114] (0/4) Epoch 19, batch 8050, loss[loss=0.1765, simple_loss=0.274, pruned_loss=0.03944, over 4811.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2623, pruned_loss=0.0404, over 934006.03 frames. ], batch size: 14, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:28:28,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=256069.33333333334, ans=22.5 +2024-07-29 07:28:28,566 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.35 vs. limit=12.0 +2024-07-29 07:28:38,375 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.47 vs. limit=22.5 +2024-07-29 07:28:49,112 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=256109.33333333334, ans=0.125 +2024-07-29 07:28:54,702 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.32 vs. limit=12.0 +2024-07-29 07:28:56,865 INFO [train.py:1114] (0/4) Epoch 19, batch 8100, loss[loss=0.1796, simple_loss=0.261, pruned_loss=0.04916, over 4800.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2629, pruned_loss=0.04021, over 934008.16 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:28:57,660 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=256136.0, ans=0.2 +2024-07-29 07:29:06,405 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.781e+01 6.315e+01 7.245e+01 1.091e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 07:29:12,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=256162.66666666666, ans=0.125 +2024-07-29 07:29:13,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=256162.66666666666, ans=0.05 +2024-07-29 07:29:13,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=256162.66666666666, ans=0.125 +2024-07-29 07:29:17,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=256176.0, ans=0.125 +2024-07-29 07:29:22,898 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=256189.33333333334, ans=0.125 +2024-07-29 07:29:27,702 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=256189.33333333334, ans=0.05 +2024-07-29 07:29:29,468 INFO [train.py:1114] (0/4) Epoch 19, batch 8150, loss[loss=0.163, simple_loss=0.2633, pruned_loss=0.03129, over 4797.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2618, pruned_loss=0.04033, over 937246.11 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:29:31,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=256202.66666666666, ans=0.07 +2024-07-29 07:29:36,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=256216.0, ans=0.2 +2024-07-29 07:29:36,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=256216.0, ans=0.125 +2024-07-29 07:29:39,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=256216.0, ans=0.2 +2024-07-29 07:29:40,296 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=256216.0, ans=0.04949747468305833 +2024-07-29 07:29:44,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=256229.33333333334, ans=0.1 +2024-07-29 07:29:45,453 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.73 vs. limit=15.0 +2024-07-29 07:29:56,497 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=256256.0, ans=0.0 +2024-07-29 07:29:56,546 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=256256.0, ans=0.2 +2024-07-29 07:29:59,696 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=256256.0, ans=0.2 +2024-07-29 07:30:02,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=256269.33333333334, ans=0.0 +2024-07-29 07:30:03,071 INFO [train.py:1114] (0/4) Epoch 19, batch 8200, loss[loss=0.1684, simple_loss=0.2568, pruned_loss=0.03996, over 4811.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2623, pruned_loss=0.04001, over 938146.42 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:30:05,186 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=256269.33333333334, ans=0.125 +2024-07-29 07:30:12,535 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.925e+01 5.702e+01 6.206e+01 7.193e+01 9.525e+01, threshold=1.241e+02, percent-clipped=0.0 +2024-07-29 07:30:21,798 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=256309.33333333334, ans=0.0 +2024-07-29 07:30:27,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=256309.33333333334, ans=0.125 +2024-07-29 07:30:35,119 INFO [train.py:1114] (0/4) Epoch 19, batch 8250, loss[loss=0.1765, simple_loss=0.2629, pruned_loss=0.04502, over 4903.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.262, pruned_loss=0.0402, over 938388.53 frames. ], batch size: 13, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:30:56,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=256376.0, ans=0.025 +2024-07-29 07:30:57,878 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=256376.0, ans=0.0 +2024-07-29 07:31:01,349 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=256389.33333333334, ans=0.125 +2024-07-29 07:31:07,549 INFO [train.py:1114] (0/4) Epoch 19, batch 8300, loss[loss=0.182, simple_loss=0.276, pruned_loss=0.044, over 4907.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2626, pruned_loss=0.04037, over 938419.74 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:31:09,568 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=256402.66666666666, ans=0.0 +2024-07-29 07:31:14,735 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=256416.0, ans=0.125 +2024-07-29 07:31:17,053 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.581e+01 6.136e+01 6.669e+01 1.025e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 07:31:17,170 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256416.0, ans=0.1 +2024-07-29 07:31:19,903 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.88 vs. limit=15.0 +2024-07-29 07:31:33,197 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=256456.0, ans=0.1 +2024-07-29 07:31:41,615 INFO [train.py:1114] (0/4) Epoch 19, batch 8350, loss[loss=0.1992, simple_loss=0.2886, pruned_loss=0.05484, over 4799.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2627, pruned_loss=0.04034, over 941220.12 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:31:49,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=256482.66666666666, ans=0.125 +2024-07-29 07:31:52,122 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=256482.66666666666, ans=0.09899494936611666 +2024-07-29 07:31:56,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=256496.0, ans=0.0 +2024-07-29 07:31:57,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=256496.0, ans=0.125 +2024-07-29 07:32:00,105 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=256496.0, ans=0.025 +2024-07-29 07:32:01,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=256509.33333333334, ans=0.0 +2024-07-29 07:32:14,722 INFO [train.py:1114] (0/4) Epoch 19, batch 8400, loss[loss=0.1686, simple_loss=0.2556, pruned_loss=0.04081, over 4764.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2629, pruned_loss=0.04048, over 939922.82 frames. ], batch size: 12, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:32:23,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=256549.33333333334, ans=0.125 +2024-07-29 07:32:24,405 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.693e+01 5.636e+01 6.331e+01 6.924e+01 1.027e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-29 07:32:37,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=256576.0, ans=0.2 +2024-07-29 07:32:38,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=256576.0, ans=0.125 +2024-07-29 07:32:38,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=256576.0, ans=0.0 +2024-07-29 07:32:39,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=256576.0, ans=0.0 +2024-07-29 07:32:44,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=256589.33333333334, ans=0.0 +2024-07-29 07:32:49,223 INFO [train.py:1114] (0/4) Epoch 19, batch 8450, loss[loss=0.1969, simple_loss=0.2756, pruned_loss=0.05912, over 4789.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2634, pruned_loss=0.04039, over 938952.16 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:32:50,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=256602.66666666666, ans=0.125 +2024-07-29 07:32:54,337 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=256602.66666666666, ans=0.0 +2024-07-29 07:33:07,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=256616.0, ans=0.0 +2024-07-29 07:33:11,433 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=256629.33333333334, ans=15.0 +2024-07-29 07:33:28,033 INFO [train.py:1114] (0/4) Epoch 19, batch 8500, loss[loss=0.1622, simple_loss=0.2501, pruned_loss=0.03713, over 4618.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2631, pruned_loss=0.04021, over 938941.18 frames. ], batch size: 11, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:33:30,097 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=256669.33333333334, ans=0.0 +2024-07-29 07:33:31,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256669.33333333334, ans=0.1 +2024-07-29 07:33:37,643 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.699e+01 6.220e+01 6.936e+01 1.043e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-29 07:33:46,073 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.42 vs. limit=22.5 +2024-07-29 07:33:56,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=256722.66666666666, ans=0.0 +2024-07-29 07:34:03,197 INFO [train.py:1114] (0/4) Epoch 19, batch 8550, loss[loss=0.2013, simple_loss=0.2954, pruned_loss=0.05362, over 4805.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.264, pruned_loss=0.04056, over 939676.03 frames. ], batch size: 11, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:34:06,514 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=256736.0, ans=0.125 +2024-07-29 07:34:16,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=256762.66666666666, ans=0.07 +2024-07-29 07:34:19,518 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.12 vs. limit=12.0 +2024-07-29 07:34:47,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=256789.33333333334, ans=0.125 +2024-07-29 07:34:48,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=256789.33333333334, ans=0.2 +2024-07-29 07:34:52,560 INFO [train.py:1114] (0/4) Epoch 19, batch 8600, loss[loss=0.1764, simple_loss=0.2786, pruned_loss=0.03707, over 4810.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2632, pruned_loss=0.04042, over 939266.92 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:35:04,900 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.663e+01 5.627e+01 6.563e+01 7.545e+01 1.202e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-29 07:35:05,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=256816.0, ans=0.125 +2024-07-29 07:35:19,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=256842.66666666666, ans=0.1 +2024-07-29 07:35:23,308 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=256856.0, ans=0.07 +2024-07-29 07:35:29,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=256856.0, ans=0.2 +2024-07-29 07:36:04,066 INFO [train.py:1114] (0/4) Epoch 19, batch 8650, loss[loss=0.1861, simple_loss=0.2677, pruned_loss=0.05229, over 4901.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.263, pruned_loss=0.04026, over 940461.09 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:36:04,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=256869.33333333334, ans=0.95 +2024-07-29 07:36:25,830 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=256909.33333333334, ans=0.125 +2024-07-29 07:36:28,212 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.63 vs. limit=15.0 +2024-07-29 07:36:35,928 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=256909.33333333334, ans=10.0 +2024-07-29 07:36:44,196 INFO [train.py:1114] (0/4) Epoch 19, batch 8700, loss[loss=0.156, simple_loss=0.2409, pruned_loss=0.03553, over 4758.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2636, pruned_loss=0.04051, over 937887.56 frames. ], batch size: 13, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:36:46,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=256936.0, ans=0.0 +2024-07-29 07:36:50,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256949.33333333334, ans=0.1 +2024-07-29 07:36:53,805 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.837e+01 5.735e+01 6.299e+01 7.253e+01 1.043e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 07:37:00,488 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.07 vs. limit=15.0 +2024-07-29 07:37:02,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=256962.66666666666, ans=0.1 +2024-07-29 07:37:27,869 INFO [train.py:1114] (0/4) Epoch 19, batch 8750, loss[loss=0.191, simple_loss=0.2784, pruned_loss=0.05181, over 4693.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2639, pruned_loss=0.04071, over 936482.52 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:37:47,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=257002.66666666666, ans=0.2 +2024-07-29 07:37:54,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=257016.0, ans=0.0 +2024-07-29 07:37:57,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=257029.33333333334, ans=0.1 +2024-07-29 07:38:15,054 INFO [train.py:1114] (0/4) Epoch 19, batch 8800, loss[loss=0.2213, simple_loss=0.3046, pruned_loss=0.06897, over 4932.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2642, pruned_loss=0.04051, over 937142.72 frames. ], batch size: 14, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:38:38,826 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.487e+01 5.702e+01 6.437e+01 7.118e+01 1.132e+02, threshold=1.287e+02, percent-clipped=0.0 +2024-07-29 07:38:44,704 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:38:51,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=257096.0, ans=0.0 +2024-07-29 07:38:57,350 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.74 vs. limit=5.0 +2024-07-29 07:39:09,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=257122.66666666666, ans=0.125 +2024-07-29 07:39:10,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=257122.66666666666, ans=0.125 +2024-07-29 07:39:16,479 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=257122.66666666666, ans=0.0 +2024-07-29 07:39:17,545 INFO [train.py:1114] (0/4) Epoch 19, batch 8850, loss[loss=0.1835, simple_loss=0.2739, pruned_loss=0.04661, over 4402.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2635, pruned_loss=0.0408, over 932347.93 frames. ], batch size: 21, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:39:20,298 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=257136.0, ans=0.0 +2024-07-29 07:39:36,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=257162.66666666666, ans=0.0 +2024-07-29 07:39:41,196 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.27 vs. limit=15.0 +2024-07-29 07:39:43,740 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.54 vs. limit=22.5 +2024-07-29 07:39:46,134 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=257176.0, ans=0.0 +2024-07-29 07:39:46,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=257176.0, ans=0.125 +2024-07-29 07:39:47,618 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.10 vs. limit=15.0 +2024-07-29 07:39:49,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=257189.33333333334, ans=0.1 +2024-07-29 07:39:50,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=257189.33333333334, ans=0.0 +2024-07-29 07:39:54,402 INFO [train.py:1114] (0/4) Epoch 19, batch 8900, loss[loss=0.1579, simple_loss=0.2432, pruned_loss=0.03629, over 4940.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2624, pruned_loss=0.04003, over 929934.55 frames. ], batch size: 12, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:40:01,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=257202.66666666666, ans=0.125 +2024-07-29 07:40:03,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257202.66666666666, ans=0.1 +2024-07-29 07:40:20,105 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.83 vs. limit=22.5 +2024-07-29 07:40:20,227 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.370e+01 5.736e+01 6.296e+01 7.033e+01 9.064e+01, threshold=1.259e+02, percent-clipped=0.0 +2024-07-29 07:42:57,967 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=257229.33333333334, ans=0.0 +2024-07-29 07:43:14,275 INFO [train.py:1114] (0/4) Epoch 19, batch 8950, loss[loss=0.1837, simple_loss=0.2776, pruned_loss=0.04489, over 4567.00 frames. ], tot_loss[loss=0.172, simple_loss=0.263, pruned_loss=0.0405, over 930645.06 frames. ], batch size: 21, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:43:21,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=257282.66666666666, ans=0.0 +2024-07-29 07:43:32,708 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.90 vs. limit=10.0 +2024-07-29 07:43:47,528 INFO [train.py:1114] (0/4) Epoch 19, batch 9000, loss[loss=0.1636, simple_loss=0.2531, pruned_loss=0.03709, over 4641.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2628, pruned_loss=0.04054, over 933564.24 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:43:47,529 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 07:43:59,128 INFO [train.py:1146] (0/4) Epoch 19, validation: loss=0.1612, simple_loss=0.2635, pruned_loss=0.02943, over 944034.00 frames. +2024-07-29 07:43:59,128 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 07:44:00,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=257336.0, ans=0.125 +2024-07-29 07:44:05,044 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=257349.33333333334, ans=0.125 +2024-07-29 07:44:05,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=257349.33333333334, ans=0.025 +2024-07-29 07:44:08,524 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.72 vs. limit=22.5 +2024-07-29 07:44:08,779 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.731e+01 5.623e+01 6.391e+01 7.404e+01 1.117e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-29 07:44:14,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=257362.66666666666, ans=0.125 +2024-07-29 07:44:20,957 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=257376.0, ans=0.125 +2024-07-29 07:44:24,685 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=257389.33333333334, ans=0.025 +2024-07-29 07:44:24,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=257389.33333333334, ans=0.125 +2024-07-29 07:44:27,276 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=257389.33333333334, ans=0.125 +2024-07-29 07:44:27,680 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.16 vs. limit=15.0 +2024-07-29 07:44:28,059 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.51 vs. limit=15.0 +2024-07-29 07:44:31,588 INFO [train.py:1114] (0/4) Epoch 19, batch 9050, loss[loss=0.1422, simple_loss=0.2252, pruned_loss=0.02966, over 4534.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2615, pruned_loss=0.03972, over 934117.56 frames. ], batch size: 10, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:44:33,206 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.99 vs. limit=15.0 +2024-07-29 07:45:02,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=257456.0, ans=0.0 +2024-07-29 07:45:04,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=257456.0, ans=0.125 +2024-07-29 07:45:09,927 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.90 vs. limit=22.5 +2024-07-29 07:45:10,206 INFO [train.py:1114] (0/4) Epoch 19, batch 9100, loss[loss=0.175, simple_loss=0.2668, pruned_loss=0.04163, over 4945.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2611, pruned_loss=0.03944, over 936767.66 frames. ], batch size: 14, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:45:10,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=257469.33333333334, ans=0.125 +2024-07-29 07:45:26,005 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=257482.66666666666, ans=0.1 +2024-07-29 07:45:26,413 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.674e+01 6.326e+01 7.504e+01 9.644e+01, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 07:45:39,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=257509.33333333334, ans=0.125 +2024-07-29 07:45:40,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=257509.33333333334, ans=0.0 +2024-07-29 07:45:41,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=257509.33333333334, ans=0.0 +2024-07-29 07:45:54,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=257522.66666666666, ans=0.125 +2024-07-29 07:45:57,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=257536.0, ans=0.125 +2024-07-29 07:45:57,995 INFO [train.py:1114] (0/4) Epoch 19, batch 9150, loss[loss=0.2074, simple_loss=0.2936, pruned_loss=0.06057, over 4803.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2621, pruned_loss=0.03976, over 935660.76 frames. ], batch size: 14, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:46:10,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257549.33333333334, ans=0.1 +2024-07-29 07:46:14,270 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=257562.66666666666, ans=0.125 +2024-07-29 07:46:20,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=257576.0, ans=0.1 +2024-07-29 07:46:21,975 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=257576.0, ans=0.04949747468305833 +2024-07-29 07:46:30,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=257589.33333333334, ans=0.125 +2024-07-29 07:46:34,040 INFO [train.py:1114] (0/4) Epoch 19, batch 9200, loss[loss=0.1581, simple_loss=0.2436, pruned_loss=0.03625, over 4860.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2612, pruned_loss=0.03953, over 937526.84 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:46:41,119 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=257616.0, ans=0.0 +2024-07-29 07:46:43,417 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.603e+01 5.777e+01 6.391e+01 7.233e+01 9.749e+01, threshold=1.278e+02, percent-clipped=0.0 +2024-07-29 07:46:52,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=257642.66666666666, ans=0.0 +2024-07-29 07:46:58,477 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=257642.66666666666, ans=15.0 +2024-07-29 07:47:05,860 INFO [train.py:1114] (0/4) Epoch 19, batch 9250, loss[loss=0.178, simple_loss=0.2768, pruned_loss=0.03957, over 4627.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2605, pruned_loss=0.03922, over 938277.69 frames. ], batch size: 13, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:47:23,863 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=257696.0, ans=0.125 +2024-07-29 07:47:24,549 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=257696.0, ans=0.0 +2024-07-29 07:47:29,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=257709.33333333334, ans=0.125 +2024-07-29 07:47:38,218 INFO [train.py:1114] (0/4) Epoch 19, batch 9300, loss[loss=0.1327, simple_loss=0.2277, pruned_loss=0.01879, over 4782.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2613, pruned_loss=0.03963, over 938140.16 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:47:40,802 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=257736.0, ans=0.125 +2024-07-29 07:47:43,674 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.03 vs. limit=15.0 +2024-07-29 07:47:47,626 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.607e+01 5.571e+01 6.148e+01 7.388e+01 1.007e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 07:47:51,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=257749.33333333334, ans=0.125 +2024-07-29 07:47:57,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=257762.66666666666, ans=0.0 +2024-07-29 07:47:59,642 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=257776.0, ans=0.125 +2024-07-29 07:48:00,436 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.51 vs. limit=15.0 +2024-07-29 07:48:11,355 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.43 vs. limit=15.0 +2024-07-29 07:48:11,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=257789.33333333334, ans=0.125 +2024-07-29 07:48:11,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=257789.33333333334, ans=0.125 +2024-07-29 07:48:12,930 INFO [train.py:1114] (0/4) Epoch 19, batch 9350, loss[loss=0.1416, simple_loss=0.2229, pruned_loss=0.03015, over 4795.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2617, pruned_loss=0.03994, over 934884.27 frames. ], batch size: 11, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:48:14,259 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=257802.66666666666, ans=0.05 +2024-07-29 07:48:16,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=257802.66666666666, ans=0.2 +2024-07-29 07:48:19,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=257816.0, ans=0.125 +2024-07-29 07:48:30,196 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=257829.33333333334, ans=0.07 +2024-07-29 07:48:38,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=257856.0, ans=0.1 +2024-07-29 07:48:44,699 INFO [train.py:1114] (0/4) Epoch 19, batch 9400, loss[loss=0.1411, simple_loss=0.2335, pruned_loss=0.02439, over 4693.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2624, pruned_loss=0.04047, over 932792.55 frames. ], batch size: 13, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:48:50,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=257882.66666666666, ans=0.0 +2024-07-29 07:48:54,093 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 5.728e+01 6.199e+01 7.519e+01 1.174e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-29 07:49:00,994 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=257896.0, ans=0.125 +2024-07-29 07:49:16,047 INFO [train.py:1114] (0/4) Epoch 19, batch 9450, loss[loss=0.1574, simple_loss=0.2402, pruned_loss=0.03726, over 4795.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2626, pruned_loss=0.04064, over 931733.77 frames. ], batch size: 11, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:49:16,826 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:49:19,289 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=257936.0, ans=0.125 +2024-07-29 07:49:53,151 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=257989.33333333334, ans=0.07 +2024-07-29 07:49:55,423 INFO [train.py:1114] (0/4) Epoch 19, batch 9500, loss[loss=0.1456, simple_loss=0.229, pruned_loss=0.03114, over 4699.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2633, pruned_loss=0.0408, over 934170.15 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:50:04,911 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.577e+01 5.446e+01 5.959e+01 6.735e+01 9.596e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-29 07:50:08,189 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=258029.33333333334, ans=0.04949747468305833 +2024-07-29 07:50:23,145 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=258056.0, ans=0.125 +2024-07-29 07:50:27,299 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=258056.0, ans=0.125 +2024-07-29 07:50:29,030 INFO [train.py:1114] (0/4) Epoch 19, batch 9550, loss[loss=0.1552, simple_loss=0.2444, pruned_loss=0.03301, over 4775.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2637, pruned_loss=0.04044, over 931566.34 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:50:30,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=258069.33333333334, ans=0.05 +2024-07-29 07:50:33,925 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=258069.33333333334, ans=0.125 +2024-07-29 07:50:38,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=258082.66666666666, ans=0.1 +2024-07-29 07:51:12,496 INFO [train.py:1114] (0/4) Epoch 19, batch 9600, loss[loss=0.2376, simple_loss=0.306, pruned_loss=0.08459, over 3571.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2643, pruned_loss=0.04102, over 930671.92 frames. ], batch size: 35, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:51:15,154 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=258136.0, ans=0.125 +2024-07-29 07:51:20,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=258149.33333333334, ans=0.2 +2024-07-29 07:51:20,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=258149.33333333334, ans=0.2 +2024-07-29 07:51:21,988 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.779e+01 5.937e+01 6.386e+01 7.744e+01 1.025e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 07:51:22,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=258149.33333333334, ans=0.125 +2024-07-29 07:51:46,117 INFO [train.py:1114] (0/4) Epoch 19, batch 9650, loss[loss=0.1833, simple_loss=0.2748, pruned_loss=0.04588, over 4840.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2644, pruned_loss=0.04118, over 926964.29 frames. ], batch size: 16, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:51:59,952 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:52:01,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=258229.33333333334, ans=0.125 +2024-07-29 07:52:03,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258229.33333333334, ans=0.1 +2024-07-29 07:52:11,000 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:52:15,654 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.52 vs. limit=15.0 +2024-07-29 07:52:17,705 INFO [train.py:1114] (0/4) Epoch 19, batch 9700, loss[loss=0.1861, simple_loss=0.2741, pruned_loss=0.04899, over 4221.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2649, pruned_loss=0.04104, over 925297.58 frames. ], batch size: 25, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:52:26,431 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=258282.66666666666, ans=0.125 +2024-07-29 07:52:26,891 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.593e+01 5.780e+01 6.621e+01 7.551e+01 1.114e+02, threshold=1.324e+02, percent-clipped=0.0 +2024-07-29 07:52:35,626 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258296.0, ans=0.1 +2024-07-29 07:52:42,316 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.59 vs. limit=15.0 +2024-07-29 07:52:44,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=258322.66666666666, ans=0.025 +2024-07-29 07:52:52,006 INFO [train.py:1114] (0/4) Epoch 19, batch 9750, loss[loss=0.2029, simple_loss=0.2982, pruned_loss=0.05378, over 4670.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2646, pruned_loss=0.04059, over 925730.56 frames. ], batch size: 15, lr: 3.87e-03, grad_scale: 64.0 +2024-07-29 07:52:53,472 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=258336.0, ans=0.05 +2024-07-29 07:53:06,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=258362.66666666666, ans=0.2 +2024-07-29 07:53:08,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=6.30 vs. limit=12.0 +2024-07-29 07:53:10,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=258362.66666666666, ans=0.125 +2024-07-29 07:53:10,132 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=258362.66666666666, ans=0.0 +2024-07-29 07:53:57,854 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=258389.33333333334, ans=0.0 +2024-07-29 07:53:59,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=258389.33333333334, ans=0.0 +2024-07-29 07:54:17,463 INFO [train.py:1114] (0/4) Epoch 19, batch 9800, loss[loss=0.1798, simple_loss=0.2615, pruned_loss=0.0491, over 4698.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2642, pruned_loss=0.04072, over 924748.42 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 64.0 +2024-07-29 07:54:24,376 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=258416.0, ans=0.04949747468305833 +2024-07-29 07:54:27,272 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.539e+01 5.598e+01 6.395e+01 7.278e+01 1.117e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-29 07:54:28,201 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=258416.0, ans=0.125 +2024-07-29 07:54:30,814 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=258429.33333333334, ans=0.2 +2024-07-29 07:54:40,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258442.66666666666, ans=0.1 +2024-07-29 07:54:49,137 INFO [train.py:1114] (0/4) Epoch 19, batch 9850, loss[loss=0.1925, simple_loss=0.2864, pruned_loss=0.04934, over 4910.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2641, pruned_loss=0.04107, over 927576.09 frames. ], batch size: 15, lr: 3.87e-03, grad_scale: 64.0 +2024-07-29 07:54:54,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=258469.33333333334, ans=0.125 +2024-07-29 07:55:21,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=258496.0, ans=0.125 +2024-07-29 07:55:21,729 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.88 vs. limit=15.0 +2024-07-29 07:55:27,515 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=258509.33333333334, ans=0.125 +2024-07-29 07:55:40,081 INFO [train.py:1114] (0/4) Epoch 19, batch 9900, loss[loss=0.1897, simple_loss=0.2811, pruned_loss=0.04913, over 4822.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2661, pruned_loss=0.04226, over 926750.00 frames. ], batch size: 16, lr: 3.87e-03, grad_scale: 64.0 +2024-07-29 07:55:40,245 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=258536.0, ans=0.0 +2024-07-29 07:55:40,284 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258536.0, ans=0.1 +2024-07-29 07:55:45,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=258549.33333333334, ans=0.2 +2024-07-29 07:55:48,394 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=258549.33333333334, ans=0.125 +2024-07-29 07:55:49,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=258549.33333333334, ans=0.95 +2024-07-29 07:55:49,455 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.672e+01 5.747e+01 6.549e+01 7.522e+01 9.931e+01, threshold=1.310e+02, percent-clipped=0.0 +2024-07-29 07:56:07,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=258589.33333333334, ans=0.0 +2024-07-29 07:56:10,891 INFO [train.py:1114] (0/4) Epoch 19, batch 9950, loss[loss=0.1313, simple_loss=0.2224, pruned_loss=0.02005, over 4795.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2644, pruned_loss=0.04161, over 929068.91 frames. ], batch size: 11, lr: 3.86e-03, grad_scale: 64.0 +2024-07-29 07:56:22,835 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=258629.33333333334, ans=0.125 +2024-07-29 07:56:36,222 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.73 vs. limit=22.5 +2024-07-29 07:56:37,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=258656.0, ans=0.1 +2024-07-29 07:56:42,864 INFO [train.py:1114] (0/4) Epoch 19, batch 10000, loss[loss=0.1806, simple_loss=0.2837, pruned_loss=0.03869, over 4648.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2671, pruned_loss=0.04243, over 926659.44 frames. ], batch size: 16, lr: 3.86e-03, grad_scale: 64.0 +2024-07-29 07:56:43,098 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.58 vs. limit=15.0 +2024-07-29 07:56:48,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=258682.66666666666, ans=0.125 +2024-07-29 07:56:51,918 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.80 vs. limit=15.0 +2024-07-29 07:56:51,998 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.927e+01 5.763e+01 6.186e+01 6.988e+01 1.066e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-29 07:57:03,331 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.29 vs. limit=6.0 +2024-07-29 07:57:14,797 INFO [train.py:1114] (0/4) Epoch 19, batch 10050, loss[loss=0.2229, simple_loss=0.3112, pruned_loss=0.06725, over 3575.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2703, pruned_loss=0.04396, over 916407.96 frames. ], batch size: 36, lr: 3.86e-03, grad_scale: 64.0 +2024-07-29 07:57:18,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258736.0, ans=0.1 +2024-07-29 07:57:22,127 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:57:28,846 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=258762.66666666666, ans=10.0 +2024-07-29 07:57:33,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=258762.66666666666, ans=0.125 +2024-07-29 07:57:37,858 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=13.65 vs. limit=15.0 +2024-07-29 07:57:46,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=258789.33333333334, ans=0.125 +2024-07-29 07:57:48,579 INFO [train.py:1114] (0/4) Epoch 19, batch 10100, loss[loss=0.2295, simple_loss=0.3001, pruned_loss=0.07945, over 3548.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2743, pruned_loss=0.04791, over 862791.10 frames. ], batch size: 36, lr: 3.86e-03, grad_scale: 64.0 +2024-07-29 07:57:58,541 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.153e+01 6.634e+01 7.311e+01 7.897e+01 1.171e+02, threshold=1.462e+02, percent-clipped=0.0 +2024-07-29 07:58:12,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=258842.66666666666, ans=0.0 +2024-07-29 07:58:21,408 INFO [train.py:1114] (0/4) Epoch 19, batch 10150, loss[loss=0.2447, simple_loss=0.3268, pruned_loss=0.08127, over 3008.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.278, pruned_loss=0.05083, over 822517.41 frames. ], batch size: 35, lr: 3.86e-03, grad_scale: 32.0 +2024-07-29 07:58:23,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=258869.33333333334, ans=0.125 +2024-07-29 07:58:25,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=258869.33333333334, ans=0.125 +2024-07-29 07:58:26,009 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258869.33333333334, ans=0.1 +2024-07-29 07:58:54,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=258909.33333333334, ans=0.0 +2024-07-29 07:58:58,576 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:59:09,577 INFO [train.py:1114] (0/4) Epoch 19, batch 10200, loss[loss=0.2322, simple_loss=0.3142, pruned_loss=0.07514, over 3363.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2798, pruned_loss=0.05336, over 791019.50 frames. ], batch size: 36, lr: 3.86e-03, grad_scale: 32.0 +2024-07-29 07:59:19,711 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 6.022e+01 7.024e+01 7.484e+01 8.101e+01 1.029e+02, threshold=1.497e+02, percent-clipped=0.0 +2024-07-29 07:59:23,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=258962.66666666666, ans=10.0 +2024-07-29 07:59:25,262 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-19.pt +2024-07-29 08:02:27,570 INFO [train.py:1114] (0/4) Epoch 20, batch 0, loss[loss=0.1424, simple_loss=0.2368, pruned_loss=0.024, over 4853.00 frames. ], tot_loss[loss=0.1424, simple_loss=0.2368, pruned_loss=0.024, over 4853.00 frames. ], batch size: 12, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:02:27,571 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 08:02:31,788 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.0078, 3.9176, 3.5313, 3.6422], device='cuda:0') +2024-07-29 08:02:35,290 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.7838, 5.6975, 5.1057, 5.3234], device='cuda:0') +2024-07-29 08:02:40,770 INFO [train.py:1146] (0/4) Epoch 20, validation: loss=0.161, simple_loss=0.2644, pruned_loss=0.02883, over 944034.00 frames. +2024-07-29 08:02:40,770 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 08:02:44,385 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=258966.66666666666, ans=0.0 +2024-07-29 08:02:47,901 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=258980.0, ans=0.125 +2024-07-29 08:02:49,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=258980.0, ans=0.0 +2024-07-29 08:02:59,276 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.38 vs. limit=15.0 +2024-07-29 08:03:03,066 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=259006.66666666666, ans=0.2 +2024-07-29 08:03:14,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=259020.0, ans=0.0 +2024-07-29 08:03:17,490 INFO [train.py:1114] (0/4) Epoch 20, batch 50, loss[loss=0.1425, simple_loss=0.2364, pruned_loss=0.02427, over 4613.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.267, pruned_loss=0.04142, over 205392.67 frames. ], batch size: 11, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:03:21,744 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=259033.33333333334, ans=0.125 +2024-07-29 08:03:32,841 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.72 vs. limit=15.0 +2024-07-29 08:03:47,269 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.549e+01 5.582e+01 6.158e+01 6.826e+01 9.280e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-29 08:03:51,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=259100.0, ans=0.2 +2024-07-29 08:03:51,980 INFO [train.py:1114] (0/4) Epoch 20, batch 100, loss[loss=0.1427, simple_loss=0.2306, pruned_loss=0.02739, over 4642.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2666, pruned_loss=0.04109, over 364761.15 frames. ], batch size: 12, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:04:15,404 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=259140.0, ans=0.2 +2024-07-29 08:04:18,177 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=259140.0, ans=0.125 +2024-07-29 08:04:20,774 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=259153.33333333334, ans=0.0 +2024-07-29 08:04:27,188 INFO [train.py:1114] (0/4) Epoch 20, batch 150, loss[loss=0.1405, simple_loss=0.2347, pruned_loss=0.02316, over 4630.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2652, pruned_loss=0.04069, over 493589.47 frames. ], batch size: 11, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:04:27,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=259166.66666666666, ans=0.1 +2024-07-29 08:04:29,677 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.02 vs. limit=15.0 +2024-07-29 08:04:30,846 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.40 vs. limit=22.5 +2024-07-29 08:04:38,252 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=259180.0, ans=0.0 +2024-07-29 08:04:42,434 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.80 vs. limit=10.0 +2024-07-29 08:04:45,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=259193.33333333334, ans=0.0 +2024-07-29 08:04:56,712 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:04:57,099 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.593e+01 6.135e+01 6.886e+01 1.305e+02, threshold=1.227e+02, percent-clipped=1.0 +2024-07-29 08:05:01,728 INFO [train.py:1114] (0/4) Epoch 20, batch 200, loss[loss=0.1696, simple_loss=0.2577, pruned_loss=0.04073, over 4492.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2622, pruned_loss=0.03975, over 593278.95 frames. ], batch size: 21, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:05:16,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=259260.0, ans=0.125 +2024-07-29 08:05:23,621 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-07-29 08:05:24,514 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.60 vs. limit=6.0 +2024-07-29 08:05:26,945 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=259260.0, ans=0.125 +2024-07-29 08:05:31,831 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259273.33333333334, ans=0.1 +2024-07-29 08:05:39,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=259273.33333333334, ans=0.2 +2024-07-29 08:05:43,747 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.68 vs. limit=12.0 +2024-07-29 08:05:50,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=259300.0, ans=0.2 +2024-07-29 08:05:50,869 INFO [train.py:1114] (0/4) Epoch 20, batch 250, loss[loss=0.1908, simple_loss=0.274, pruned_loss=0.05382, over 4618.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2629, pruned_loss=0.04024, over 670533.67 frames. ], batch size: 16, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:05:57,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=259313.33333333334, ans=0.125 +2024-07-29 08:06:03,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=259313.33333333334, ans=0.125 +2024-07-29 08:06:04,055 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.19 vs. limit=10.0 +2024-07-29 08:06:51,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=259326.66666666666, ans=0.125 +2024-07-29 08:06:52,087 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=259326.66666666666, ans=0.125 +2024-07-29 08:06:55,227 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=259326.66666666666, ans=0.0 +2024-07-29 08:07:11,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=259340.0, ans=0.2 +2024-07-29 08:07:12,473 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:07:12,474 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=259340.0, ans=0.125 +2024-07-29 08:07:30,204 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.296e+01 5.734e+01 6.099e+01 7.044e+01 1.100e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 08:07:30,678 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.73 vs. limit=10.0 +2024-07-29 08:07:34,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259353.33333333334, ans=0.1 +2024-07-29 08:07:36,800 INFO [train.py:1114] (0/4) Epoch 20, batch 300, loss[loss=0.1636, simple_loss=0.2593, pruned_loss=0.03394, over 4798.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.262, pruned_loss=0.03979, over 730187.01 frames. ], batch size: 15, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:07:39,094 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.80 vs. limit=15.0 +2024-07-29 08:07:49,993 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.36 vs. limit=15.0 +2024-07-29 08:08:00,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=259406.66666666666, ans=0.0 +2024-07-29 08:08:14,344 INFO [train.py:1114] (0/4) Epoch 20, batch 350, loss[loss=0.1763, simple_loss=0.2641, pruned_loss=0.04422, over 4938.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2612, pruned_loss=0.03957, over 776116.97 frames. ], batch size: 12, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:08:20,635 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=259433.33333333334, ans=0.1 +2024-07-29 08:08:32,163 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=259460.0, ans=0.125 +2024-07-29 08:08:39,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=259473.33333333334, ans=0.125 +2024-07-29 08:08:39,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=259473.33333333334, ans=0.05 +2024-07-29 08:08:47,021 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.322e+01 5.507e+01 5.880e+01 6.811e+01 8.968e+01, threshold=1.176e+02, percent-clipped=0.0 +2024-07-29 08:08:49,926 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.63 vs. limit=22.5 +2024-07-29 08:08:51,658 INFO [train.py:1114] (0/4) Epoch 20, batch 400, loss[loss=0.2117, simple_loss=0.3032, pruned_loss=0.06013, over 4679.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.262, pruned_loss=0.04006, over 813759.08 frames. ], batch size: 13, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:09:00,243 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=259513.33333333334, ans=0.0 +2024-07-29 08:09:02,892 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=259513.33333333334, ans=0.125 +2024-07-29 08:09:02,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=259513.33333333334, ans=0.125 +2024-07-29 08:09:02,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=259513.33333333334, ans=10.0 +2024-07-29 08:09:09,281 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=259526.66666666666, ans=0.125 +2024-07-29 08:09:17,426 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.44 vs. limit=22.5 +2024-07-29 08:09:30,923 INFO [train.py:1114] (0/4) Epoch 20, batch 450, loss[loss=0.1702, simple_loss=0.2717, pruned_loss=0.03432, over 4631.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2619, pruned_loss=0.03965, over 839094.49 frames. ], batch size: 13, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:09:33,361 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.14 vs. limit=15.0 +2024-07-29 08:09:36,429 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:09:39,133 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=259580.0, ans=0.0 +2024-07-29 08:09:42,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=259580.0, ans=10.0 +2024-07-29 08:09:59,290 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=259606.66666666666, ans=0.125 +2024-07-29 08:10:03,827 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=259620.0, ans=0.0 +2024-07-29 08:10:04,237 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.44 vs. limit=22.5 +2024-07-29 08:10:05,756 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.282e+01 5.641e+01 6.168e+01 6.736e+01 1.200e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-29 08:10:10,563 INFO [train.py:1114] (0/4) Epoch 20, batch 500, loss[loss=0.1889, simple_loss=0.2826, pruned_loss=0.04763, over 4685.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2611, pruned_loss=0.03939, over 861341.37 frames. ], batch size: 15, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:10:27,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=259660.0, ans=0.125 +2024-07-29 08:10:35,956 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=259673.33333333334, ans=0.125 +2024-07-29 08:10:51,612 INFO [train.py:1114] (0/4) Epoch 20, batch 550, loss[loss=0.2244, simple_loss=0.3133, pruned_loss=0.0677, over 4613.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2608, pruned_loss=0.03917, over 877467.94 frames. ], batch size: 17, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:14:22,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=259700.0, ans=0.125 +2024-07-29 08:15:11,058 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=259740.0, ans=0.0 +2024-07-29 08:15:14,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=259740.0, ans=0.125 +2024-07-29 08:15:15,026 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=259740.0, ans=0.125 +2024-07-29 08:15:21,353 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 5.418e+01 6.036e+01 6.579e+01 9.144e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-29 08:15:29,554 INFO [train.py:1114] (0/4) Epoch 20, batch 600, loss[loss=0.183, simple_loss=0.2738, pruned_loss=0.04606, over 4589.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2614, pruned_loss=0.03972, over 892029.32 frames. ], batch size: 16, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:15:31,270 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.89 vs. limit=22.5 +2024-07-29 08:15:35,014 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=259766.66666666666, ans=0.125 +2024-07-29 08:15:43,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=259793.33333333334, ans=0.0 +2024-07-29 08:15:46,494 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.91 vs. limit=15.0 +2024-07-29 08:15:51,519 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.77 vs. limit=15.0 +2024-07-29 08:15:54,966 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=259806.66666666666, ans=0.0 +2024-07-29 08:15:55,552 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=259806.66666666666, ans=0.125 +2024-07-29 08:16:10,535 INFO [train.py:1114] (0/4) Epoch 20, batch 650, loss[loss=0.1797, simple_loss=0.2666, pruned_loss=0.04638, over 4758.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2617, pruned_loss=0.03999, over 903689.07 frames. ], batch size: 13, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:17:39,872 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.95 vs. limit=15.0 +2024-07-29 08:18:03,335 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=259873.33333333334, ans=0.0 +2024-07-29 08:18:25,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259873.33333333334, ans=0.1 +2024-07-29 08:18:30,218 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.467e+01 5.565e+01 6.152e+01 6.795e+01 9.682e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 08:18:35,045 INFO [train.py:1114] (0/4) Epoch 20, batch 700, loss[loss=0.1484, simple_loss=0.2363, pruned_loss=0.03022, over 4637.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2626, pruned_loss=0.04038, over 911636.56 frames. ], batch size: 12, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:18:57,358 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=259926.66666666666, ans=0.0 +2024-07-29 08:19:06,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=259940.0, ans=0.025 +2024-07-29 08:19:07,692 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.43 vs. limit=10.0 +2024-07-29 08:19:10,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=259940.0, ans=0.125 +2024-07-29 08:19:19,953 INFO [train.py:1114] (0/4) Epoch 20, batch 750, loss[loss=0.1518, simple_loss=0.247, pruned_loss=0.0283, over 4699.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2623, pruned_loss=0.0401, over 918255.73 frames. ], batch size: 13, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:19:21,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=259966.66666666666, ans=0.125 +2024-07-29 08:19:23,361 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=259966.66666666666, ans=0.125 +2024-07-29 08:19:41,906 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=260006.66666666666, ans=0.125 +2024-07-29 08:19:43,249 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=260006.66666666666, ans=0.125 +2024-07-29 08:19:46,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=260006.66666666666, ans=0.2 +2024-07-29 08:19:51,137 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.556e+01 5.579e+01 6.090e+01 6.934e+01 1.125e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-29 08:19:52,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=260020.0, ans=22.5 +2024-07-29 08:19:55,804 INFO [train.py:1114] (0/4) Epoch 20, batch 800, loss[loss=0.1561, simple_loss=0.2249, pruned_loss=0.0436, over 4849.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2611, pruned_loss=0.03975, over 923014.01 frames. ], batch size: 12, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:19:56,505 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=260033.33333333334, ans=0.125 +2024-07-29 08:19:58,032 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.70 vs. limit=15.0 +2024-07-29 08:20:09,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=260060.0, ans=0.1 +2024-07-29 08:20:15,833 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260073.33333333334, ans=0.1 +2024-07-29 08:20:25,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=260073.33333333334, ans=0.0 +2024-07-29 08:20:34,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=260073.33333333334, ans=0.125 +2024-07-29 08:20:40,679 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=260086.66666666666, ans=0.0 +2024-07-29 08:20:41,477 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.77 vs. limit=15.0 +2024-07-29 08:20:43,575 INFO [train.py:1114] (0/4) Epoch 20, batch 850, loss[loss=0.1703, simple_loss=0.2603, pruned_loss=0.04021, over 4667.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2612, pruned_loss=0.03997, over 927145.22 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:20:44,368 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=260100.0, ans=0.125 +2024-07-29 08:20:49,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260113.33333333334, ans=0.1 +2024-07-29 08:20:54,441 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.63 vs. limit=15.0 +2024-07-29 08:21:12,434 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.741e+01 5.604e+01 6.314e+01 7.197e+01 9.359e+01, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 08:21:17,262 INFO [train.py:1114] (0/4) Epoch 20, batch 900, loss[loss=0.2003, simple_loss=0.2778, pruned_loss=0.06144, over 4839.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2621, pruned_loss=0.04049, over 928021.48 frames. ], batch size: 12, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:21:23,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=260180.0, ans=0.035 +2024-07-29 08:21:42,181 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:21:52,559 INFO [train.py:1114] (0/4) Epoch 20, batch 950, loss[loss=0.153, simple_loss=0.24, pruned_loss=0.03299, over 4781.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2616, pruned_loss=0.03986, over 929305.28 frames. ], batch size: 12, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:23:11,850 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:23:11,857 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=260246.66666666666, ans=0.125 +2024-07-29 08:23:11,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=260246.66666666666, ans=0.0 +2024-07-29 08:23:49,741 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=260260.0, ans=0.2 +2024-07-29 08:23:52,064 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.15 vs. limit=22.5 +2024-07-29 08:23:53,210 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=260273.33333333334, ans=0.2 +2024-07-29 08:23:53,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=260273.33333333334, ans=0.125 +2024-07-29 08:23:54,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=260273.33333333334, ans=0.125 +2024-07-29 08:23:56,037 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=260273.33333333334, ans=0.0 +2024-07-29 08:24:00,572 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:24:02,477 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.740e+01 6.532e+01 7.410e+01 9.580e+01, threshold=1.306e+02, percent-clipped=0.0 +2024-07-29 08:24:03,729 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.78 vs. limit=15.0 +2024-07-29 08:24:05,476 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=260286.66666666666, ans=0.125 +2024-07-29 08:24:06,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=260300.0, ans=0.125 +2024-07-29 08:24:07,418 INFO [train.py:1114] (0/4) Epoch 20, batch 1000, loss[loss=0.1593, simple_loss=0.2593, pruned_loss=0.02964, over 4960.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2629, pruned_loss=0.04024, over 929378.33 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:24:32,287 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.46 vs. limit=22.5 +2024-07-29 08:24:34,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260353.33333333334, ans=0.1 +2024-07-29 08:24:36,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=260353.33333333334, ans=0.0 +2024-07-29 08:24:41,511 INFO [train.py:1114] (0/4) Epoch 20, batch 1050, loss[loss=0.1677, simple_loss=0.2751, pruned_loss=0.03019, over 4876.00 frames. ], tot_loss[loss=0.171, simple_loss=0.262, pruned_loss=0.04, over 931866.84 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:24:44,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=260366.66666666666, ans=0.0 +2024-07-29 08:24:52,942 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=260380.0, ans=0.125 +2024-07-29 08:24:57,771 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.74 vs. limit=15.0 +2024-07-29 08:25:12,294 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.430e+01 5.624e+01 6.219e+01 7.008e+01 1.029e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-29 08:25:15,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260420.0, ans=0.1 +2024-07-29 08:25:17,056 INFO [train.py:1114] (0/4) Epoch 20, batch 1100, loss[loss=0.1549, simple_loss=0.2426, pruned_loss=0.03363, over 4892.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2614, pruned_loss=0.03984, over 934253.63 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:25:30,826 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.85 vs. limit=15.0 +2024-07-29 08:25:33,477 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.93 vs. limit=15.0 +2024-07-29 08:25:45,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=260486.66666666666, ans=0.0 +2024-07-29 08:25:52,968 INFO [train.py:1114] (0/4) Epoch 20, batch 1150, loss[loss=0.1418, simple_loss=0.2369, pruned_loss=0.02336, over 4903.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2611, pruned_loss=0.03976, over 934283.85 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:25:59,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260513.33333333334, ans=0.1 +2024-07-29 08:26:06,602 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=260526.66666666666, ans=0.125 +2024-07-29 08:26:07,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=260526.66666666666, ans=0.0 +2024-07-29 08:26:17,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=260540.0, ans=0.125 +2024-07-29 08:26:22,176 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.531e+01 5.714e+01 6.232e+01 6.999e+01 1.113e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-29 08:26:23,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=260553.33333333334, ans=0.125 +2024-07-29 08:26:26,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=260566.66666666666, ans=0.05 +2024-07-29 08:26:27,023 INFO [train.py:1114] (0/4) Epoch 20, batch 1200, loss[loss=0.1511, simple_loss=0.2533, pruned_loss=0.02449, over 4877.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2611, pruned_loss=0.03968, over 933222.57 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:26:27,376 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.73 vs. limit=10.0 +2024-07-29 08:28:44,579 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.26 vs. limit=22.5 +2024-07-29 08:28:48,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=260606.66666666666, ans=0.025 +2024-07-29 08:28:59,381 INFO [train.py:1114] (0/4) Epoch 20, batch 1250, loss[loss=0.1558, simple_loss=0.2461, pruned_loss=0.0327, over 4796.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2617, pruned_loss=0.03974, over 937309.19 frames. ], batch size: 15, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:29:03,468 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=260633.33333333334, ans=0.2 +2024-07-29 08:29:04,149 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:29:04,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=260633.33333333334, ans=0.125 +2024-07-29 08:29:18,157 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.34 vs. limit=12.0 +2024-07-29 08:29:19,427 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.50 vs. limit=15.0 +2024-07-29 08:29:21,869 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.65 vs. limit=22.5 +2024-07-29 08:29:25,204 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.29 vs. limit=22.5 +2024-07-29 08:29:29,551 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.565e+01 6.190e+01 6.882e+01 9.944e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 08:29:38,329 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=260686.66666666666, ans=0.125 +2024-07-29 08:29:42,141 INFO [train.py:1114] (0/4) Epoch 20, batch 1300, loss[loss=0.1851, simple_loss=0.2727, pruned_loss=0.04877, over 4738.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2605, pruned_loss=0.03923, over 938708.16 frames. ], batch size: 19, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:29:56,153 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=260726.66666666666, ans=0.025 +2024-07-29 08:30:04,155 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=260740.0, ans=0.025 +2024-07-29 08:30:12,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=260753.33333333334, ans=0.04949747468305833 +2024-07-29 08:30:15,518 INFO [train.py:1114] (0/4) Epoch 20, batch 1350, loss[loss=0.1572, simple_loss=0.2442, pruned_loss=0.03515, over 4771.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2599, pruned_loss=0.03914, over 940762.09 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:30:17,692 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=260766.66666666666, ans=0.125 +2024-07-29 08:30:17,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=260766.66666666666, ans=0.125 +2024-07-29 08:30:33,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=260793.33333333334, ans=0.0 +2024-07-29 08:30:34,947 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=260793.33333333334, ans=0.2 +2024-07-29 08:30:34,970 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=260793.33333333334, ans=0.125 +2024-07-29 08:30:44,740 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.728e+01 5.649e+01 6.305e+01 7.298e+01 1.047e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 08:30:44,861 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=260820.0, ans=0.125 +2024-07-29 08:30:45,006 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=260820.0, ans=0.125 +2024-07-29 08:30:45,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=260820.0, ans=0.0 +2024-07-29 08:30:48,886 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=260833.33333333334, ans=0.015 +2024-07-29 08:30:49,524 INFO [train.py:1114] (0/4) Epoch 20, batch 1400, loss[loss=0.1419, simple_loss=0.2234, pruned_loss=0.03016, over 4699.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2598, pruned_loss=0.03914, over 943023.86 frames. ], batch size: 11, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:30:49,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=260833.33333333334, ans=0.025 +2024-07-29 08:30:51,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=260833.33333333334, ans=0.0 +2024-07-29 08:30:59,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=260846.66666666666, ans=0.125 +2024-07-29 08:31:11,765 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=260873.33333333334, ans=0.125 +2024-07-29 08:31:25,044 INFO [train.py:1114] (0/4) Epoch 20, batch 1450, loss[loss=0.1721, simple_loss=0.2767, pruned_loss=0.03372, over 4686.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.26, pruned_loss=0.03959, over 942997.18 frames. ], batch size: 15, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:31:36,405 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=260913.33333333334, ans=0.125 +2024-07-29 08:31:39,786 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=260926.66666666666, ans=0.125 +2024-07-29 08:31:47,763 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=260940.0, ans=0.125 +2024-07-29 08:31:50,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=260940.0, ans=0.0 +2024-07-29 08:31:53,425 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.680e+01 5.592e+01 6.212e+01 7.267e+01 9.238e+01, threshold=1.242e+02, percent-clipped=0.0 +2024-07-29 08:31:58,324 INFO [train.py:1114] (0/4) Epoch 20, batch 1500, loss[loss=0.1665, simple_loss=0.2594, pruned_loss=0.03681, over 4803.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2612, pruned_loss=0.03956, over 942923.02 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:32:10,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=260980.0, ans=0.2 +2024-07-29 08:32:19,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=261006.66666666666, ans=0.1 +2024-07-29 08:32:33,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=261033.33333333334, ans=0.125 +2024-07-29 08:32:33,958 INFO [train.py:1114] (0/4) Epoch 20, batch 1550, loss[loss=0.1675, simple_loss=0.2597, pruned_loss=0.0376, over 4906.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2611, pruned_loss=0.03969, over 939428.99 frames. ], batch size: 15, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:32:36,207 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.06 vs. limit=15.0 +2024-07-29 08:32:36,443 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.99 vs. limit=22.5 +2024-07-29 08:32:40,385 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=6.70 vs. limit=15.0 +2024-07-29 08:32:42,028 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=261046.66666666666, ans=0.125 +2024-07-29 08:33:04,817 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.534e+01 6.134e+01 7.096e+01 1.070e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 08:33:05,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=261086.66666666666, ans=0.125 +2024-07-29 08:33:07,085 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=261086.66666666666, ans=0.0 +2024-07-29 08:33:07,728 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=261086.66666666666, ans=0.125 +2024-07-29 08:33:09,554 INFO [train.py:1114] (0/4) Epoch 20, batch 1600, loss[loss=0.166, simple_loss=0.2631, pruned_loss=0.03447, over 4881.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2614, pruned_loss=0.03959, over 937940.19 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:33:09,805 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=261100.0, ans=0.125 +2024-07-29 08:33:10,772 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.00 vs. limit=22.5 +2024-07-29 08:33:16,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=261113.33333333334, ans=0.0 +2024-07-29 08:33:18,082 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.91 vs. limit=15.0 +2024-07-29 08:33:29,061 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=261126.66666666666, ans=0.125 +2024-07-29 08:33:30,505 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:33:39,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=261153.33333333334, ans=0.125 +2024-07-29 08:33:40,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=261153.33333333334, ans=0.125 +2024-07-29 08:33:44,377 INFO [train.py:1114] (0/4) Epoch 20, batch 1650, loss[loss=0.1846, simple_loss=0.2854, pruned_loss=0.04187, over 4660.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2609, pruned_loss=0.03929, over 937907.43 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:33:45,199 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=261166.66666666666, ans=0.0 +2024-07-29 08:33:52,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=261180.0, ans=0.0 +2024-07-29 08:34:22,548 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.514e+01 5.635e+01 6.098e+01 6.570e+01 1.046e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 08:34:25,529 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=261220.0, ans=0.125 +2024-07-29 08:34:27,325 INFO [train.py:1114] (0/4) Epoch 20, batch 1700, loss[loss=0.1489, simple_loss=0.2301, pruned_loss=0.03388, over 4716.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2615, pruned_loss=0.03948, over 939597.63 frames. ], batch size: 11, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:34:30,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=261233.33333333334, ans=0.125 +2024-07-29 08:34:32,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=261233.33333333334, ans=0.125 +2024-07-29 08:34:40,720 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=261260.0, ans=0.0 +2024-07-29 08:34:41,411 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=261260.0, ans=0.2 +2024-07-29 08:34:43,923 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=261260.0, ans=0.2 +2024-07-29 08:34:51,478 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.96 vs. limit=22.5 +2024-07-29 08:34:54,792 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=261273.33333333334, ans=10.0 +2024-07-29 08:35:00,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=261286.66666666666, ans=0.2 +2024-07-29 08:35:04,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=261286.66666666666, ans=0.1 +2024-07-29 08:35:05,855 INFO [train.py:1114] (0/4) Epoch 20, batch 1750, loss[loss=0.1877, simple_loss=0.2588, pruned_loss=0.05828, over 4809.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2611, pruned_loss=0.03937, over 940633.54 frames. ], batch size: 11, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:35:07,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=261300.0, ans=0.2 +2024-07-29 08:35:22,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=261313.33333333334, ans=0.125 +2024-07-29 08:35:37,852 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-196000.pt +2024-07-29 08:36:11,239 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.694e+01 6.446e+01 7.395e+01 1.026e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-29 08:36:16,165 INFO [train.py:1114] (0/4) Epoch 20, batch 1800, loss[loss=0.1702, simple_loss=0.2707, pruned_loss=0.03486, over 4638.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2616, pruned_loss=0.03968, over 940668.04 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:36:20,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=261366.66666666666, ans=0.125 +2024-07-29 08:36:22,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=261380.0, ans=0.0 +2024-07-29 08:36:28,771 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=261380.0, ans=0.0 +2024-07-29 08:36:34,110 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.83 vs. limit=10.0 +2024-07-29 08:36:34,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=261393.33333333334, ans=0.2 +2024-07-29 08:37:21,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=261406.66666666666, ans=0.1 +2024-07-29 08:37:22,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.53 vs. limit=22.5 +2024-07-29 08:37:36,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=261420.0, ans=0.0 +2024-07-29 08:37:44,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=261420.0, ans=0.0 +2024-07-29 08:37:44,650 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=261420.0, ans=0.07 +2024-07-29 08:37:47,130 INFO [train.py:1114] (0/4) Epoch 20, batch 1850, loss[loss=0.1788, simple_loss=0.2776, pruned_loss=0.04005, over 4812.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2617, pruned_loss=0.03996, over 940448.94 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:38:04,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=261433.33333333334, ans=0.0 +2024-07-29 08:38:10,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=261446.66666666666, ans=0.125 +2024-07-29 08:38:17,351 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.25 vs. limit=15.0 +2024-07-29 08:38:26,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=261473.33333333334, ans=0.0 +2024-07-29 08:38:30,217 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.557e+01 5.603e+01 6.221e+01 6.965e+01 1.039e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-29 08:38:34,801 INFO [train.py:1114] (0/4) Epoch 20, batch 1900, loss[loss=0.1561, simple_loss=0.2538, pruned_loss=0.02919, over 4664.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.262, pruned_loss=0.03967, over 941484.54 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:38:35,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=261500.0, ans=0.2 +2024-07-29 08:38:45,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=261513.33333333334, ans=0.1 +2024-07-29 08:38:46,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=261513.33333333334, ans=0.2 +2024-07-29 08:38:47,763 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-07-29 08:38:52,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=261526.66666666666, ans=0.0 +2024-07-29 08:39:11,508 INFO [train.py:1114] (0/4) Epoch 20, batch 1950, loss[loss=0.167, simple_loss=0.261, pruned_loss=0.0365, over 4900.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2639, pruned_loss=0.04017, over 943394.65 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 64.0 +2024-07-29 08:39:14,091 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.81 vs. limit=12.0 +2024-07-29 08:39:19,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=261580.0, ans=0.1 +2024-07-29 08:39:39,766 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=261620.0, ans=0.125 +2024-07-29 08:39:40,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=261620.0, ans=0.95 +2024-07-29 08:39:40,865 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.726e+01 5.670e+01 6.297e+01 7.133e+01 1.211e+02, threshold=1.259e+02, percent-clipped=0.0 +2024-07-29 08:39:45,716 INFO [train.py:1114] (0/4) Epoch 20, batch 2000, loss[loss=0.1433, simple_loss=0.2272, pruned_loss=0.02969, over 4805.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2635, pruned_loss=0.04021, over 940858.33 frames. ], batch size: 11, lr: 3.74e-03, grad_scale: 64.0 +2024-07-29 08:39:50,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=261633.33333333334, ans=0.125 +2024-07-29 08:40:11,834 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261660.0, ans=0.1 +2024-07-29 08:40:24,743 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.74 vs. limit=15.0 +2024-07-29 08:40:27,517 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=261686.66666666666, ans=0.125 +2024-07-29 08:40:29,418 INFO [train.py:1114] (0/4) Epoch 20, batch 2050, loss[loss=0.1657, simple_loss=0.2551, pruned_loss=0.03812, over 4619.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2633, pruned_loss=0.04008, over 939124.69 frames. ], batch size: 11, lr: 3.74e-03, grad_scale: 64.0 +2024-07-29 08:40:33,124 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.98 vs. limit=6.0 +2024-07-29 08:40:33,248 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.49 vs. limit=15.0 +2024-07-29 08:41:12,300 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=261753.33333333334, ans=0.0 +2024-07-29 08:41:14,699 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.484e+01 5.787e+01 6.324e+01 7.549e+01 1.272e+02, threshold=1.265e+02, percent-clipped=1.0 +2024-07-29 08:41:14,853 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=261753.33333333334, ans=0.125 +2024-07-29 08:41:18,633 INFO [train.py:1114] (0/4) Epoch 20, batch 2100, loss[loss=0.154, simple_loss=0.2459, pruned_loss=0.03102, over 4757.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.261, pruned_loss=0.03905, over 940996.88 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:41:43,966 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=15.0 +2024-07-29 08:41:51,684 INFO [train.py:1114] (0/4) Epoch 20, batch 2150, loss[loss=0.1499, simple_loss=0.2466, pruned_loss=0.02659, over 4898.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.261, pruned_loss=0.03865, over 944209.39 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:41:55,535 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:41:56,472 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.96 vs. limit=6.0 +2024-07-29 08:41:58,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=261846.66666666666, ans=0.125 +2024-07-29 08:42:01,590 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.48 vs. limit=22.5 +2024-07-29 08:42:03,758 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.81 vs. limit=15.0 +2024-07-29 08:42:31,564 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=261873.33333333334, ans=0.1 +2024-07-29 08:42:31,848 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.36 vs. limit=6.0 +2024-07-29 08:42:44,003 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.706e+01 5.512e+01 6.073e+01 7.043e+01 1.112e+02, threshold=1.215e+02, percent-clipped=0.0 +2024-07-29 08:42:53,742 INFO [train.py:1114] (0/4) Epoch 20, batch 2200, loss[loss=0.175, simple_loss=0.2756, pruned_loss=0.03726, over 4817.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2614, pruned_loss=0.03865, over 943358.75 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:42:57,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=261900.0, ans=0.125 +2024-07-29 08:42:57,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=261900.0, ans=0.07 +2024-07-29 08:42:59,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=261900.0, ans=0.2 +2024-07-29 08:42:59,956 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.18 vs. limit=12.0 +2024-07-29 08:43:08,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=261913.33333333334, ans=0.125 +2024-07-29 08:43:09,932 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=261913.33333333334, ans=0.125 +2024-07-29 08:43:19,181 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=261926.66666666666, ans=0.125 +2024-07-29 08:43:23,467 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.16 vs. limit=15.0 +2024-07-29 08:43:30,521 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.70 vs. limit=15.0 +2024-07-29 08:43:42,635 INFO [train.py:1114] (0/4) Epoch 20, batch 2250, loss[loss=0.1857, simple_loss=0.2719, pruned_loss=0.04972, over 4687.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2617, pruned_loss=0.03924, over 941926.97 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:44:16,524 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.04 vs. limit=6.0 +2024-07-29 08:44:29,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=262006.66666666666, ans=0.125 +2024-07-29 08:44:30,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=262006.66666666666, ans=0.2 +2024-07-29 08:44:51,830 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.514e+01 6.259e+01 6.946e+01 1.195e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 08:45:11,397 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.65 vs. limit=15.0 +2024-07-29 08:45:13,601 INFO [train.py:1114] (0/4) Epoch 20, batch 2300, loss[loss=0.1694, simple_loss=0.2558, pruned_loss=0.04148, over 4935.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2615, pruned_loss=0.0395, over 939277.37 frames. ], batch size: 12, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:45:14,582 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=262033.33333333334, ans=0.2 +2024-07-29 08:45:20,689 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.73 vs. limit=12.0 +2024-07-29 08:45:37,162 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=262033.33333333334, ans=0.125 +2024-07-29 08:45:55,142 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:46:05,205 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.90 vs. limit=15.0 +2024-07-29 08:47:04,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262060.0, ans=0.1 +2024-07-29 08:47:18,887 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=262073.33333333334, ans=0.0 +2024-07-29 08:47:20,209 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=262073.33333333334, ans=0.125 +2024-07-29 08:47:25,471 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=262086.66666666666, ans=0.125 +2024-07-29 08:47:32,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=262086.66666666666, ans=0.125 +2024-07-29 08:47:34,616 INFO [train.py:1114] (0/4) Epoch 20, batch 2350, loss[loss=0.1723, simple_loss=0.2757, pruned_loss=0.03444, over 4637.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2609, pruned_loss=0.03914, over 941239.35 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:47:43,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=262100.0, ans=0.1 +2024-07-29 08:48:04,989 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=262100.0, ans=0.125 +2024-07-29 08:48:45,199 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:48:50,827 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.808e+01 6.182e+01 6.944e+01 1.016e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-29 08:48:54,793 INFO [train.py:1114] (0/4) Epoch 20, batch 2400, loss[loss=0.1457, simple_loss=0.2369, pruned_loss=0.02722, over 4637.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2617, pruned_loss=0.0394, over 941138.06 frames. ], batch size: 12, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:49:11,676 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=262193.3333333333, ans=0.125 +2024-07-29 08:49:14,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=262193.3333333333, ans=0.125 +2024-07-29 08:49:15,318 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=262193.3333333333, ans=0.125 +2024-07-29 08:49:16,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262193.3333333333, ans=0.1 +2024-07-29 08:49:24,800 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=262220.0, ans=0.125 +2024-07-29 08:49:27,436 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=262220.0, ans=0.0 +2024-07-29 08:49:28,964 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=262220.0, ans=0.125 +2024-07-29 08:49:31,459 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=262233.3333333333, ans=0.025 +2024-07-29 08:49:32,082 INFO [train.py:1114] (0/4) Epoch 20, batch 2450, loss[loss=0.1299, simple_loss=0.2274, pruned_loss=0.01616, over 4704.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2631, pruned_loss=0.04022, over 936821.39 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:50:16,397 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=262273.3333333333, ans=0.125 +2024-07-29 08:50:33,804 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.751e+01 6.244e+01 7.182e+01 1.173e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 08:50:38,479 INFO [train.py:1114] (0/4) Epoch 20, batch 2500, loss[loss=0.1556, simple_loss=0.2589, pruned_loss=0.02616, over 4800.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.262, pruned_loss=0.03983, over 938990.45 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:51:09,714 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=262326.6666666667, ans=0.125 +2024-07-29 08:51:17,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=262340.0, ans=0.0 +2024-07-29 08:51:18,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262340.0, ans=0.1 +2024-07-29 08:51:18,242 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=262340.0, ans=0.0 +2024-07-29 08:51:24,645 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=262340.0, ans=0.125 +2024-07-29 08:51:26,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=262353.3333333333, ans=0.2 +2024-07-29 08:51:43,402 INFO [train.py:1114] (0/4) Epoch 20, batch 2550, loss[loss=0.1519, simple_loss=0.2307, pruned_loss=0.03654, over 4812.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2616, pruned_loss=0.03972, over 938825.56 frames. ], batch size: 11, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:51:46,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=262366.6666666667, ans=0.125 +2024-07-29 08:51:47,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=262366.6666666667, ans=0.125 +2024-07-29 08:51:58,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=262380.0, ans=0.025 +2024-07-29 08:52:01,791 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.28 vs. limit=15.0 +2024-07-29 08:52:23,025 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.587e+01 5.541e+01 6.134e+01 6.874e+01 1.013e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 08:52:27,208 INFO [train.py:1114] (0/4) Epoch 20, batch 2600, loss[loss=0.1703, simple_loss=0.2632, pruned_loss=0.03872, over 4896.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2617, pruned_loss=0.03995, over 938382.92 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:52:55,368 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.88 vs. limit=22.5 +2024-07-29 08:53:02,432 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=262473.3333333333, ans=0.125 +2024-07-29 08:53:09,847 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=262473.3333333333, ans=0.125 +2024-07-29 08:53:25,754 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262486.6666666667, ans=0.1 +2024-07-29 08:53:26,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=262486.6666666667, ans=0.125 +2024-07-29 08:53:27,585 INFO [train.py:1114] (0/4) Epoch 20, batch 2650, loss[loss=0.1802, simple_loss=0.2727, pruned_loss=0.04387, over 4634.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2618, pruned_loss=0.04001, over 940451.89 frames. ], batch size: 16, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:53:30,958 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.21 vs. limit=22.5 +2024-07-29 08:53:36,575 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=262513.3333333333, ans=0.0 +2024-07-29 08:53:36,581 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=262513.3333333333, ans=0.0 +2024-07-29 08:53:46,622 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=262526.6666666667, ans=0.125 +2024-07-29 08:54:09,631 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.850e+01 5.564e+01 6.225e+01 7.006e+01 1.126e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-29 08:54:11,360 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=262553.3333333333, ans=0.0 +2024-07-29 08:54:14,043 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=262566.6666666667, ans=0.0 +2024-07-29 08:54:14,582 INFO [train.py:1114] (0/4) Epoch 20, batch 2700, loss[loss=0.1694, simple_loss=0.2561, pruned_loss=0.04134, over 4735.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2623, pruned_loss=0.04031, over 940181.94 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:54:20,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=262580.0, ans=0.125 +2024-07-29 08:54:23,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=262580.0, ans=0.125 +2024-07-29 08:54:23,922 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=262580.0, ans=0.04949747468305833 +2024-07-29 08:54:38,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=262593.3333333333, ans=0.1 +2024-07-29 08:54:39,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.92 vs. limit=15.0 +2024-07-29 08:54:39,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=262606.6666666667, ans=10.0 +2024-07-29 08:54:39,604 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=262606.6666666667, ans=0.125 +2024-07-29 08:54:42,448 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=262606.6666666667, ans=0.2 +2024-07-29 08:54:48,967 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.56 vs. limit=22.5 +2024-07-29 08:54:54,030 INFO [train.py:1114] (0/4) Epoch 20, batch 2750, loss[loss=0.1596, simple_loss=0.2522, pruned_loss=0.0335, over 4701.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2609, pruned_loss=0.03971, over 939947.19 frames. ], batch size: 12, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:54:55,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=262633.3333333333, ans=0.0 +2024-07-29 08:54:56,156 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=262633.3333333333, ans=0.2 +2024-07-29 08:55:19,094 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.18 vs. limit=15.0 +2024-07-29 08:55:24,930 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.94 vs. limit=22.5 +2024-07-29 08:55:40,167 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.537e+01 5.878e+01 6.772e+01 7.962e+01 1.092e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-29 08:55:47,012 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=262686.6666666667, ans=0.025 +2024-07-29 08:55:48,978 INFO [train.py:1114] (0/4) Epoch 20, batch 2800, loss[loss=0.242, simple_loss=0.3191, pruned_loss=0.08248, over 3280.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2613, pruned_loss=0.03976, over 937771.85 frames. ], batch size: 35, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:55:55,939 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=262713.3333333333, ans=0.125 +2024-07-29 08:55:56,496 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=262713.3333333333, ans=0.025 +2024-07-29 08:55:58,746 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.62 vs. limit=15.0 +2024-07-29 08:56:00,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=262713.3333333333, ans=0.125 +2024-07-29 08:56:02,785 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.73 vs. limit=15.0 +2024-07-29 08:56:05,649 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.78 vs. limit=10.0 +2024-07-29 08:56:12,410 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=262740.0, ans=0.2 +2024-07-29 08:56:17,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=262740.0, ans=0.125 +2024-07-29 08:56:21,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=262753.3333333333, ans=0.2 +2024-07-29 08:56:26,497 INFO [train.py:1114] (0/4) Epoch 20, batch 2850, loss[loss=0.1616, simple_loss=0.2563, pruned_loss=0.0334, over 4962.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2609, pruned_loss=0.03948, over 935754.87 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:56:34,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=262780.0, ans=0.0 +2024-07-29 08:56:47,732 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=262793.3333333333, ans=0.1 +2024-07-29 08:56:48,357 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=262793.3333333333, ans=0.025 +2024-07-29 08:56:54,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=262806.6666666667, ans=0.2 +2024-07-29 08:56:56,990 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=262806.6666666667, ans=0.125 +2024-07-29 08:56:59,743 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=262820.0, ans=0.125 +2024-07-29 08:56:59,762 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=262820.0, ans=0.125 +2024-07-29 08:57:02,165 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.799e+01 6.410e+01 7.214e+01 1.051e+02, threshold=1.282e+02, percent-clipped=0.0 +2024-07-29 08:57:05,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=262820.0, ans=0.125 +2024-07-29 08:57:06,627 INFO [train.py:1114] (0/4) Epoch 20, batch 2900, loss[loss=0.1451, simple_loss=0.2406, pruned_loss=0.02479, over 4838.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2629, pruned_loss=0.03994, over 939737.42 frames. ], batch size: 13, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 08:57:06,699 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=262833.3333333333, ans=0.125 +2024-07-29 08:57:14,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=262846.6666666667, ans=0.0 +2024-07-29 08:57:22,771 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.25 vs. limit=12.0 +2024-07-29 08:57:25,942 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.04 vs. limit=15.0 +2024-07-29 08:57:26,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=262873.3333333333, ans=0.125 +2024-07-29 08:57:27,991 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.74 vs. limit=10.0 +2024-07-29 08:57:40,677 INFO [train.py:1114] (0/4) Epoch 20, batch 2950, loss[loss=0.1872, simple_loss=0.2846, pruned_loss=0.04486, over 4711.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2622, pruned_loss=0.03966, over 938809.42 frames. ], batch size: 12, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 08:57:42,549 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-29 08:58:06,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=262940.0, ans=0.05 +2024-07-29 08:58:10,541 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=262953.3333333333, ans=0.04949747468305833 +2024-07-29 08:58:12,847 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.161e+01 5.582e+01 5.984e+01 6.557e+01 9.213e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-29 08:58:14,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=262953.3333333333, ans=0.125 +2024-07-29 08:58:18,636 INFO [train.py:1114] (0/4) Epoch 20, batch 3000, loss[loss=0.1826, simple_loss=0.2864, pruned_loss=0.03934, over 4753.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2619, pruned_loss=0.03972, over 938459.17 frames. ], batch size: 13, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 08:58:18,636 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 08:58:32,879 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.0948, 3.1829, 1.8783, 3.4096, 2.9611, 3.1183, 3.6759, 3.5228], + device='cuda:0') +2024-07-29 08:58:44,395 INFO [train.py:1146] (0/4) Epoch 20, validation: loss=0.1605, simple_loss=0.2625, pruned_loss=0.02922, over 944034.00 frames. +2024-07-29 08:58:44,396 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 08:58:45,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=262966.6666666667, ans=0.125 +2024-07-29 08:58:45,253 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=262966.6666666667, ans=0.125 +2024-07-29 08:58:59,777 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=262980.0, ans=0.0 +2024-07-29 08:59:11,355 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=262993.3333333333, ans=0.0 +2024-07-29 08:59:16,066 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.98 vs. limit=12.0 +2024-07-29 08:59:23,492 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=263020.0, ans=0.125 +2024-07-29 08:59:25,425 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263020.0, ans=0.1 +2024-07-29 08:59:38,418 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=263020.0, ans=0.125 +2024-07-29 08:59:40,236 INFO [train.py:1114] (0/4) Epoch 20, batch 3050, loss[loss=0.1411, simple_loss=0.235, pruned_loss=0.02358, over 4632.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2623, pruned_loss=0.03951, over 937639.99 frames. ], batch size: 12, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 08:59:46,828 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=263033.3333333333, ans=0.125 +2024-07-29 08:59:48,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=263033.3333333333, ans=0.0 +2024-07-29 08:59:49,461 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263033.3333333333, ans=0.1 +2024-07-29 08:59:50,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=263033.3333333333, ans=0.0 +2024-07-29 08:59:50,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=263046.6666666667, ans=0.0 +2024-07-29 08:59:51,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=263046.6666666667, ans=0.125 +2024-07-29 09:00:03,588 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=263060.0, ans=0.1 +2024-07-29 09:00:05,633 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=263073.3333333333, ans=0.95 +2024-07-29 09:00:06,176 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=263073.3333333333, ans=0.2 +2024-07-29 09:00:19,905 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.456e+01 5.658e+01 6.248e+01 7.167e+01 1.022e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-29 09:00:30,391 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=263086.6666666667, ans=0.125 +2024-07-29 09:00:33,596 INFO [train.py:1114] (0/4) Epoch 20, batch 3100, loss[loss=0.1611, simple_loss=0.2496, pruned_loss=0.03627, over 4598.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2625, pruned_loss=0.03971, over 938220.20 frames. ], batch size: 16, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:00:40,805 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.43 vs. limit=15.0 +2024-07-29 09:00:41,825 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=263113.3333333333, ans=0.0 +2024-07-29 09:00:45,023 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.80 vs. limit=8.0 +2024-07-29 09:00:57,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=263126.6666666667, ans=0.125 +2024-07-29 09:00:57,987 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=263126.6666666667, ans=0.0 +2024-07-29 09:01:01,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=263140.0, ans=0.125 +2024-07-29 09:01:13,438 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.30 vs. limit=15.0 +2024-07-29 09:01:13,585 INFO [train.py:1114] (0/4) Epoch 20, batch 3150, loss[loss=0.1845, simple_loss=0.2685, pruned_loss=0.05028, over 4630.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2627, pruned_loss=0.03993, over 937842.93 frames. ], batch size: 17, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:01:20,074 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-29 09:01:27,106 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263180.0, ans=0.1 +2024-07-29 09:01:35,524 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:01:46,066 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.636e+01 5.746e+01 6.588e+01 7.668e+01 1.344e+02, threshold=1.318e+02, percent-clipped=1.0 +2024-07-29 09:01:50,166 INFO [train.py:1114] (0/4) Epoch 20, batch 3200, loss[loss=0.139, simple_loss=0.2245, pruned_loss=0.02672, over 4826.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2616, pruned_loss=0.03953, over 939527.24 frames. ], batch size: 13, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:01:57,250 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=263246.6666666667, ans=0.125 +2024-07-29 09:02:08,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263260.0, ans=0.1 +2024-07-29 09:02:21,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=263286.6666666667, ans=0.125 +2024-07-29 09:02:27,327 INFO [train.py:1114] (0/4) Epoch 20, batch 3250, loss[loss=0.1749, simple_loss=0.2771, pruned_loss=0.03636, over 4931.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.262, pruned_loss=0.03952, over 940496.00 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:02:29,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=263300.0, ans=10.0 +2024-07-29 09:02:30,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=263300.0, ans=0.0 +2024-07-29 09:02:32,654 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=263300.0, ans=0.125 +2024-07-29 09:02:37,824 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.48 vs. limit=12.0 +2024-07-29 09:02:41,062 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=263326.6666666667, ans=0.2 +2024-07-29 09:02:43,664 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=263326.6666666667, ans=0.125 +2024-07-29 09:02:58,825 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.63 vs. limit=6.0 +2024-07-29 09:02:59,774 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.410e+01 5.543e+01 6.289e+01 7.306e+01 9.331e+01, threshold=1.258e+02, percent-clipped=0.0 +2024-07-29 09:03:01,877 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=263353.3333333333, ans=0.2 +2024-07-29 09:03:03,856 INFO [train.py:1114] (0/4) Epoch 20, batch 3300, loss[loss=0.1851, simple_loss=0.2689, pruned_loss=0.05065, over 4721.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2604, pruned_loss=0.03937, over 940798.63 frames. ], batch size: 19, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:03:08,849 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=263366.6666666667, ans=0.125 +2024-07-29 09:03:10,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=263380.0, ans=0.125 +2024-07-29 09:03:15,215 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.03 vs. limit=15.0 +2024-07-29 09:03:15,945 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.26 vs. limit=15.0 +2024-07-29 09:03:20,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=263393.3333333333, ans=0.125 +2024-07-29 09:03:31,491 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=263420.0, ans=0.0 +2024-07-29 09:03:36,124 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=263420.0, ans=0.0 +2024-07-29 09:03:37,233 INFO [train.py:1114] (0/4) Epoch 20, batch 3350, loss[loss=0.1715, simple_loss=0.2614, pruned_loss=0.04077, over 4641.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2615, pruned_loss=0.0394, over 938750.26 frames. ], batch size: 17, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:03:44,859 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=263446.6666666667, ans=0.0 +2024-07-29 09:03:46,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=263446.6666666667, ans=0.2 +2024-07-29 09:03:48,255 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=263446.6666666667, ans=0.2 +2024-07-29 09:03:55,916 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=263460.0, ans=0.125 +2024-07-29 09:03:57,336 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=263473.3333333333, ans=0.125 +2024-07-29 09:04:07,370 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.670e+01 5.699e+01 6.337e+01 7.173e+01 1.148e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 09:04:09,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=263486.6666666667, ans=0.0 +2024-07-29 09:04:11,161 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=263500.0, ans=0.2 +2024-07-29 09:04:11,616 INFO [train.py:1114] (0/4) Epoch 20, batch 3400, loss[loss=0.1315, simple_loss=0.2162, pruned_loss=0.0234, over 4801.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2614, pruned_loss=0.03958, over 937375.04 frames. ], batch size: 11, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:04:16,595 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263500.0, ans=0.1 +2024-07-29 09:04:32,216 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:04:48,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=263566.6666666667, ans=0.0 +2024-07-29 09:04:49,170 INFO [train.py:1114] (0/4) Epoch 20, batch 3450, loss[loss=0.1594, simple_loss=0.2542, pruned_loss=0.03231, over 4629.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2618, pruned_loss=0.03967, over 937576.62 frames. ], batch size: 19, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:04:57,214 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=263580.0, ans=0.0 +2024-07-29 09:05:02,758 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.43 vs. limit=15.0 +2024-07-29 09:05:03,873 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=263593.3333333333, ans=0.125 +2024-07-29 09:05:05,486 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.49 vs. limit=6.0 +2024-07-29 09:05:18,524 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.353e+01 5.782e+01 6.590e+01 7.406e+01 1.017e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 09:05:19,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=263620.0, ans=0.0 +2024-07-29 09:05:19,392 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=263620.0, ans=0.125 +2024-07-29 09:05:19,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=263620.0, ans=0.125 +2024-07-29 09:05:21,402 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263620.0, ans=0.1 +2024-07-29 09:05:22,649 INFO [train.py:1114] (0/4) Epoch 20, batch 3500, loss[loss=0.1588, simple_loss=0.2467, pruned_loss=0.03544, over 4936.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2608, pruned_loss=0.03904, over 938145.77 frames. ], batch size: 12, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:05:30,219 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=263646.6666666667, ans=0.0 +2024-07-29 09:05:35,365 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.12 vs. limit=10.0 +2024-07-29 09:05:43,094 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=263673.3333333333, ans=0.0 +2024-07-29 09:05:53,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263686.6666666667, ans=0.1 +2024-07-29 09:05:55,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263700.0, ans=0.1 +2024-07-29 09:05:56,242 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.41 vs. limit=15.0 +2024-07-29 09:05:56,357 INFO [train.py:1114] (0/4) Epoch 20, batch 3550, loss[loss=0.1863, simple_loss=0.2737, pruned_loss=0.04944, over 4673.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.261, pruned_loss=0.03903, over 938677.86 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:05:59,185 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=263700.0, ans=0.2 +2024-07-29 09:06:15,101 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.26 vs. limit=15.0 +2024-07-29 09:06:30,158 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=263740.0, ans=0.0 +2024-07-29 09:06:35,176 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.573e+01 6.229e+01 6.741e+01 1.100e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-29 09:06:40,737 INFO [train.py:1114] (0/4) Epoch 20, batch 3600, loss[loss=0.1873, simple_loss=0.2747, pruned_loss=0.05, over 4968.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2614, pruned_loss=0.03933, over 940435.54 frames. ], batch size: 13, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:06:41,163 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.77 vs. limit=10.0 +2024-07-29 09:07:05,317 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=263806.6666666667, ans=0.125 +2024-07-29 09:07:08,262 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=263820.0, ans=0.125 +2024-07-29 09:07:14,882 INFO [train.py:1114] (0/4) Epoch 20, batch 3650, loss[loss=0.2228, simple_loss=0.3052, pruned_loss=0.07018, over 4905.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2607, pruned_loss=0.03931, over 941157.00 frames. ], batch size: 15, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:07:26,388 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:07:27,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=263846.6666666667, ans=0.0 +2024-07-29 09:07:27,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=263860.0, ans=0.125 +2024-07-29 09:07:31,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=263860.0, ans=0.125 +2024-07-29 09:07:37,150 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=263873.3333333333, ans=0.0 +2024-07-29 09:07:41,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=263886.6666666667, ans=0.0 +2024-07-29 09:07:43,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=263886.6666666667, ans=0.025 +2024-07-29 09:07:44,536 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.430e+01 6.137e+01 7.012e+01 1.010e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 09:07:48,573 INFO [train.py:1114] (0/4) Epoch 20, batch 3700, loss[loss=0.1785, simple_loss=0.2666, pruned_loss=0.04515, over 4929.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2598, pruned_loss=0.03858, over 942211.92 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:07:52,182 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.77 vs. limit=10.0 +2024-07-29 09:07:55,980 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=263913.3333333333, ans=0.125 +2024-07-29 09:08:05,896 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=263926.6666666667, ans=0.0 +2024-07-29 09:08:06,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=263926.6666666667, ans=0.2 +2024-07-29 09:08:12,261 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=263940.0, ans=0.125 +2024-07-29 09:08:18,533 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=263953.3333333333, ans=0.05 +2024-07-29 09:08:21,542 INFO [train.py:1114] (0/4) Epoch 20, batch 3750, loss[loss=0.163, simple_loss=0.2482, pruned_loss=0.03888, over 4803.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2595, pruned_loss=0.03848, over 943437.90 frames. ], batch size: 11, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:08:21,924 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.93 vs. limit=15.0 +2024-07-29 09:08:42,628 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=263993.3333333333, ans=0.0 +2024-07-29 09:08:57,866 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+01 5.705e+01 6.410e+01 7.000e+01 1.025e+02, threshold=1.282e+02, percent-clipped=0.0 +2024-07-29 09:09:02,215 INFO [train.py:1114] (0/4) Epoch 20, batch 3800, loss[loss=0.1698, simple_loss=0.2615, pruned_loss=0.03905, over 4816.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2599, pruned_loss=0.03853, over 942106.87 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:09:04,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=264033.3333333333, ans=0.125 +2024-07-29 09:09:06,265 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=264033.3333333333, ans=0.125 +2024-07-29 09:09:07,739 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.55 vs. limit=6.0 +2024-07-29 09:09:54,168 INFO [train.py:1114] (0/4) Epoch 20, batch 3850, loss[loss=0.2007, simple_loss=0.289, pruned_loss=0.05618, over 4646.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2598, pruned_loss=0.0384, over 943036.12 frames. ], batch size: 16, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:09:54,396 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=264100.0, ans=0.0 +2024-07-29 09:10:14,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=264140.0, ans=0.125 +2024-07-29 09:10:19,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=264140.0, ans=0.125 +2024-07-29 09:10:24,569 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.802e+01 5.626e+01 6.107e+01 6.849e+01 9.588e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-29 09:10:31,769 INFO [train.py:1114] (0/4) Epoch 20, batch 3900, loss[loss=0.1665, simple_loss=0.2612, pruned_loss=0.03593, over 4805.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2613, pruned_loss=0.0387, over 943301.73 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 16.0 +2024-07-29 09:10:34,470 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=264166.6666666667, ans=0.125 +2024-07-29 09:10:39,111 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=264180.0, ans=0.0 +2024-07-29 09:10:57,475 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=264220.0, ans=0.125 +2024-07-29 09:10:57,756 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=264220.0, ans=0.0 +2024-07-29 09:11:26,910 INFO [train.py:1114] (0/4) Epoch 20, batch 3950, loss[loss=0.1745, simple_loss=0.265, pruned_loss=0.042, over 4835.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2616, pruned_loss=0.03885, over 945164.50 frames. ], batch size: 16, lr: 3.72e-03, grad_scale: 16.0 +2024-07-29 09:11:27,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264233.3333333333, ans=0.1 +2024-07-29 09:11:39,488 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=264246.6666666667, ans=0.07 +2024-07-29 09:11:59,942 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.407e+01 5.612e+01 6.214e+01 7.012e+01 1.031e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 09:12:03,451 INFO [train.py:1114] (0/4) Epoch 20, batch 4000, loss[loss=0.1503, simple_loss=0.2311, pruned_loss=0.03476, over 4776.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2615, pruned_loss=0.03941, over 941394.33 frames. ], batch size: 12, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:12:38,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264300.0, ans=0.1 +2024-07-29 09:12:49,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=264313.3333333333, ans=0.125 +2024-07-29 09:12:50,449 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=264313.3333333333, ans=0.0 +2024-07-29 09:13:00,790 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.75 vs. limit=15.0 +2024-07-29 09:13:02,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=264340.0, ans=0.125 +2024-07-29 09:13:33,656 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=264353.3333333333, ans=0.025 +2024-07-29 09:13:42,202 INFO [train.py:1114] (0/4) Epoch 20, batch 4050, loss[loss=0.1993, simple_loss=0.2767, pruned_loss=0.06098, over 3436.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2611, pruned_loss=0.03944, over 939473.37 frames. ], batch size: 35, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:13:46,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=264366.6666666667, ans=0.0 +2024-07-29 09:13:49,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=264380.0, ans=0.015 +2024-07-29 09:13:52,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=264380.0, ans=0.125 +2024-07-29 09:14:01,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=264393.3333333333, ans=0.125 +2024-07-29 09:14:09,328 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=264406.6666666667, ans=0.0 +2024-07-29 09:14:18,737 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.925e+01 5.618e+01 6.150e+01 7.099e+01 1.073e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 09:14:19,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=264420.0, ans=0.125 +2024-07-29 09:14:19,718 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:14:22,377 INFO [train.py:1114] (0/4) Epoch 20, batch 4100, loss[loss=0.1745, simple_loss=0.269, pruned_loss=0.03995, over 4903.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2615, pruned_loss=0.03959, over 938486.60 frames. ], batch size: 15, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:14:27,508 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=264433.3333333333, ans=0.0 +2024-07-29 09:14:29,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=264433.3333333333, ans=0.2 +2024-07-29 09:14:35,468 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.68 vs. limit=15.0 +2024-07-29 09:14:37,836 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264460.0, ans=0.1 +2024-07-29 09:14:41,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=264460.0, ans=0.125 +2024-07-29 09:14:49,473 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=264473.3333333333, ans=0.025 +2024-07-29 09:14:52,165 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=264486.6666666667, ans=0.2 +2024-07-29 09:14:59,485 INFO [train.py:1114] (0/4) Epoch 20, batch 4150, loss[loss=0.1764, simple_loss=0.2678, pruned_loss=0.04256, over 4827.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2615, pruned_loss=0.03978, over 937865.08 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:15:01,123 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=264500.0, ans=0.125 +2024-07-29 09:15:13,326 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=264526.6666666667, ans=0.0 +2024-07-29 09:15:22,544 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=264540.0, ans=0.125 +2024-07-29 09:15:35,201 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.580e+01 5.706e+01 6.359e+01 7.433e+01 1.126e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-29 09:15:39,946 INFO [train.py:1114] (0/4) Epoch 20, batch 4200, loss[loss=0.1861, simple_loss=0.2873, pruned_loss=0.04251, over 4904.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2618, pruned_loss=0.03983, over 939226.17 frames. ], batch size: 15, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:16:11,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=264580.0, ans=0.025 +2024-07-29 09:16:15,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=264593.3333333333, ans=0.025 +2024-07-29 09:16:15,681 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.52 vs. limit=10.0 +2024-07-29 09:16:16,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=264593.3333333333, ans=0.125 +2024-07-29 09:16:16,789 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=264593.3333333333, ans=0.1 +2024-07-29 09:16:20,794 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=264606.6666666667, ans=0.0 +2024-07-29 09:16:39,851 INFO [train.py:1114] (0/4) Epoch 20, batch 4250, loss[loss=0.1637, simple_loss=0.2512, pruned_loss=0.03813, over 4636.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2624, pruned_loss=0.03964, over 940464.93 frames. ], batch size: 12, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:16:43,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=264633.3333333333, ans=0.125 +2024-07-29 09:17:09,904 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.690e+01 5.620e+01 6.275e+01 6.899e+01 1.013e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 09:17:13,166 INFO [train.py:1114] (0/4) Epoch 20, batch 4300, loss[loss=0.2044, simple_loss=0.2958, pruned_loss=0.0565, over 4761.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2626, pruned_loss=0.03989, over 939888.66 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:17:16,799 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.77 vs. limit=15.0 +2024-07-29 09:17:17,946 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=264700.0, ans=0.02 +2024-07-29 09:17:26,640 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=264713.3333333333, ans=0.0 +2024-07-29 09:17:27,858 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=264713.3333333333, ans=0.2 +2024-07-29 09:17:33,419 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=264726.6666666667, ans=0.125 +2024-07-29 09:17:39,311 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=264740.0, ans=0.0 +2024-07-29 09:17:41,306 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=264740.0, ans=0.0 +2024-07-29 09:17:43,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=264753.3333333333, ans=0.0 +2024-07-29 09:17:51,808 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=264753.3333333333, ans=0.125 +2024-07-29 09:17:52,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=264766.6666666667, ans=0.125 +2024-07-29 09:17:53,008 INFO [train.py:1114] (0/4) Epoch 20, batch 4350, loss[loss=0.148, simple_loss=0.2374, pruned_loss=0.02931, over 4759.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2619, pruned_loss=0.03923, over 940628.74 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:17:59,841 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=264780.0, ans=0.025 +2024-07-29 09:18:00,490 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=264780.0, ans=0.125 +2024-07-29 09:18:06,029 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.74 vs. limit=15.0 +2024-07-29 09:18:26,680 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.11 vs. limit=22.5 +2024-07-29 09:18:27,683 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.816e+01 5.702e+01 6.164e+01 6.960e+01 9.569e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 09:18:29,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=264820.0, ans=0.0 +2024-07-29 09:18:31,108 INFO [train.py:1114] (0/4) Epoch 20, batch 4400, loss[loss=0.1585, simple_loss=0.2499, pruned_loss=0.03348, over 4806.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.262, pruned_loss=0.03881, over 940703.79 frames. ], batch size: 14, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:18:47,624 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:18:54,282 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=264860.0, ans=0.125 +2024-07-29 09:19:01,500 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=264873.3333333333, ans=0.125 +2024-07-29 09:19:04,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=264886.6666666667, ans=0.125 +2024-07-29 09:19:12,243 INFO [train.py:1114] (0/4) Epoch 20, batch 4450, loss[loss=0.1614, simple_loss=0.239, pruned_loss=0.04197, over 4934.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2624, pruned_loss=0.03934, over 938353.82 frames. ], batch size: 12, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:19:15,072 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=264900.0, ans=0.125 +2024-07-29 09:19:16,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264900.0, ans=0.1 +2024-07-29 09:19:19,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264900.0, ans=0.1 +2024-07-29 09:19:20,384 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264900.0, ans=0.1 +2024-07-29 09:19:22,218 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=264913.3333333333, ans=0.125 +2024-07-29 09:19:36,165 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.45 vs. limit=15.0 +2024-07-29 09:19:53,316 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.879e+01 5.589e+01 6.388e+01 7.277e+01 9.167e+01, threshold=1.278e+02, percent-clipped=0.0 +2024-07-29 09:19:57,878 INFO [train.py:1114] (0/4) Epoch 20, batch 4500, loss[loss=0.1693, simple_loss=0.2644, pruned_loss=0.03713, over 4742.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.262, pruned_loss=0.03925, over 937722.25 frames. ], batch size: 14, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:20:00,677 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=264966.6666666667, ans=0.0 +2024-07-29 09:20:07,083 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=264980.0, ans=0.0 +2024-07-29 09:20:07,785 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=264980.0, ans=0.0 +2024-07-29 09:20:19,067 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=264993.3333333333, ans=0.2 +2024-07-29 09:20:19,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=264993.3333333333, ans=0.04949747468305833 +2024-07-29 09:20:35,706 INFO [train.py:1114] (0/4) Epoch 20, batch 4550, loss[loss=0.1592, simple_loss=0.2483, pruned_loss=0.03499, over 4893.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2612, pruned_loss=0.03884, over 939725.78 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:20:36,000 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.00 vs. limit=15.0 +2024-07-29 09:20:41,721 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=265046.6666666667, ans=0.125 +2024-07-29 09:20:52,179 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=265060.0, ans=0.125 +2024-07-29 09:20:52,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=265060.0, ans=0.125 +2024-07-29 09:20:53,761 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.88 vs. limit=15.0 +2024-07-29 09:20:54,222 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=265060.0, ans=0.125 +2024-07-29 09:21:07,773 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=265073.3333333333, ans=0.2 +2024-07-29 09:21:09,844 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=265073.3333333333, ans=0.125 +2024-07-29 09:21:16,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=265086.6666666667, ans=0.125 +2024-07-29 09:21:16,620 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.804e+01 5.639e+01 6.516e+01 7.459e+01 1.043e+02, threshold=1.303e+02, percent-clipped=0.0 +2024-07-29 09:21:19,972 INFO [train.py:1114] (0/4) Epoch 20, batch 4600, loss[loss=0.1792, simple_loss=0.2834, pruned_loss=0.03751, over 4546.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2622, pruned_loss=0.0394, over 938123.61 frames. ], batch size: 21, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:21:23,362 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=265100.0, ans=0.125 +2024-07-29 09:21:25,417 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=265100.0, ans=0.0 +2024-07-29 09:21:32,573 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.81 vs. limit=15.0 +2024-07-29 09:21:37,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=265126.6666666667, ans=0.025 +2024-07-29 09:21:43,342 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=265140.0, ans=0.1 +2024-07-29 09:21:53,412 INFO [train.py:1114] (0/4) Epoch 20, batch 4650, loss[loss=0.1883, simple_loss=0.2827, pruned_loss=0.04693, over 4833.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2626, pruned_loss=0.03929, over 939960.13 frames. ], batch size: 16, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:21:59,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=265166.6666666667, ans=0.125 +2024-07-29 09:22:01,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=265180.0, ans=0.125 +2024-07-29 09:22:02,142 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.21 vs. limit=22.5 +2024-07-29 09:22:15,913 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=265206.6666666667, ans=0.0 +2024-07-29 09:22:17,293 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=265206.6666666667, ans=0.125 +2024-07-29 09:22:25,180 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.723e+01 5.549e+01 5.991e+01 6.748e+01 1.053e+02, threshold=1.198e+02, percent-clipped=0.0 +2024-07-29 09:22:28,487 INFO [train.py:1114] (0/4) Epoch 20, batch 4700, loss[loss=0.1321, simple_loss=0.2238, pruned_loss=0.02023, over 4695.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2615, pruned_loss=0.03918, over 937323.85 frames. ], batch size: 11, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:22:28,630 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=265233.3333333333, ans=0.1 +2024-07-29 09:22:46,323 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=265246.6666666667, ans=0.0 +2024-07-29 09:23:12,637 INFO [train.py:1114] (0/4) Epoch 20, batch 4750, loss[loss=0.1489, simple_loss=0.2471, pruned_loss=0.02532, over 4529.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2616, pruned_loss=0.03937, over 935537.86 frames. ], batch size: 21, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:23:15,011 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=265300.0, ans=0.125 +2024-07-29 09:23:27,271 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=265313.3333333333, ans=0.125 +2024-07-29 09:23:32,312 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=265326.6666666667, ans=0.125 +2024-07-29 09:23:36,096 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.18 vs. limit=15.0 +2024-07-29 09:23:59,495 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.589e+01 5.596e+01 6.192e+01 7.037e+01 1.008e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 09:24:06,757 INFO [train.py:1114] (0/4) Epoch 20, batch 4800, loss[loss=0.1866, simple_loss=0.2868, pruned_loss=0.04324, over 4689.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2611, pruned_loss=0.03928, over 933327.71 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:24:10,751 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.01 vs. limit=15.0 +2024-07-29 09:24:31,120 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=265393.3333333333, ans=0.125 +2024-07-29 09:24:32,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265393.3333333333, ans=0.1 +2024-07-29 09:24:38,689 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=265406.6666666667, ans=0.05 +2024-07-29 09:24:40,089 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.78 vs. limit=22.5 +2024-07-29 09:24:43,031 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=265406.6666666667, ans=0.125 +2024-07-29 09:24:46,590 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=265420.0, ans=0.0 +2024-07-29 09:24:59,695 INFO [train.py:1114] (0/4) Epoch 20, batch 4850, loss[loss=0.1751, simple_loss=0.2766, pruned_loss=0.03678, over 4740.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2611, pruned_loss=0.03928, over 933118.26 frames. ], batch size: 14, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:25:11,466 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=265433.3333333333, ans=0.0 +2024-07-29 09:25:24,948 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=265460.0, ans=0.1 +2024-07-29 09:25:25,624 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=265460.0, ans=0.125 +2024-07-29 09:25:32,909 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=265473.3333333333, ans=0.125 +2024-07-29 09:25:36,440 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265486.6666666667, ans=0.1 +2024-07-29 09:25:36,453 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=265486.6666666667, ans=0.025 +2024-07-29 09:25:42,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=265486.6666666667, ans=0.125 +2024-07-29 09:25:43,426 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.605e+01 5.554e+01 6.055e+01 6.631e+01 1.173e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 09:25:44,882 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=265486.6666666667, ans=0.025 +2024-07-29 09:25:46,886 INFO [train.py:1114] (0/4) Epoch 20, batch 4900, loss[loss=0.1623, simple_loss=0.262, pruned_loss=0.03126, over 4762.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2609, pruned_loss=0.03895, over 935138.71 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:25:47,937 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.59 vs. limit=15.0 +2024-07-29 09:25:50,877 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.34 vs. limit=22.5 +2024-07-29 09:25:52,647 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=265500.0, ans=0.2 +2024-07-29 09:25:58,481 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.30 vs. limit=15.0 +2024-07-29 09:26:02,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=265526.6666666667, ans=0.125 +2024-07-29 09:26:29,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=265553.3333333333, ans=0.0 +2024-07-29 09:26:31,068 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.86 vs. limit=12.0 +2024-07-29 09:26:34,951 INFO [train.py:1114] (0/4) Epoch 20, batch 4950, loss[loss=0.1726, simple_loss=0.266, pruned_loss=0.0396, over 3281.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2623, pruned_loss=0.03971, over 931522.96 frames. ], batch size: 35, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:26:36,770 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.74 vs. limit=12.0 +2024-07-29 09:26:38,608 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:26:42,097 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.57 vs. limit=22.5 +2024-07-29 09:26:43,327 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=265580.0, ans=0.2 +2024-07-29 09:26:46,021 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.32 vs. limit=10.0 +2024-07-29 09:27:01,228 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.41 vs. limit=10.0 +2024-07-29 09:27:11,248 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.725e+01 5.554e+01 6.246e+01 6.923e+01 9.859e+01, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 09:27:14,684 INFO [train.py:1114] (0/4) Epoch 20, batch 5000, loss[loss=0.1876, simple_loss=0.2896, pruned_loss=0.04276, over 4666.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.262, pruned_loss=0.03965, over 935352.10 frames. ], batch size: 14, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:27:17,519 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=265633.3333333333, ans=0.125 +2024-07-29 09:27:32,406 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=265660.0, ans=0.125 +2024-07-29 09:27:35,140 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=265660.0, ans=0.0 +2024-07-29 09:27:41,224 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=265673.3333333333, ans=0.125 +2024-07-29 09:27:50,271 INFO [train.py:1114] (0/4) Epoch 20, batch 5050, loss[loss=0.1417, simple_loss=0.2286, pruned_loss=0.0274, over 4854.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2606, pruned_loss=0.03899, over 937844.25 frames. ], batch size: 12, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:27:54,149 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=265700.0, ans=0.025 +2024-07-29 09:28:02,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=265700.0, ans=0.0 +2024-07-29 09:28:12,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=265713.3333333333, ans=0.0 +2024-07-29 09:28:27,374 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.78 vs. limit=10.0 +2024-07-29 09:28:34,378 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+01 5.782e+01 6.489e+01 7.303e+01 1.011e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-29 09:28:38,660 INFO [train.py:1114] (0/4) Epoch 20, batch 5100, loss[loss=0.1622, simple_loss=0.2539, pruned_loss=0.03521, over 4773.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2607, pruned_loss=0.03958, over 935825.47 frames. ], batch size: 12, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:29:03,081 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=265806.6666666667, ans=0.125 +2024-07-29 09:29:11,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=265833.3333333333, ans=0.125 +2024-07-29 09:29:12,310 INFO [train.py:1114] (0/4) Epoch 20, batch 5150, loss[loss=0.2046, simple_loss=0.293, pruned_loss=0.05806, over 4845.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2625, pruned_loss=0.03979, over 937132.04 frames. ], batch size: 16, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:29:13,731 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265833.3333333333, ans=0.1 +2024-07-29 09:29:16,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=265833.3333333333, ans=0.125 +2024-07-29 09:29:19,373 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=265846.6666666667, ans=0.125 +2024-07-29 09:29:24,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=265846.6666666667, ans=0.125 +2024-07-29 09:29:26,690 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=265860.0, ans=0.125 +2024-07-29 09:29:28,983 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=265860.0, ans=0.0 +2024-07-29 09:29:43,438 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=265886.6666666667, ans=0.125 +2024-07-29 09:29:44,553 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.013e+01 5.725e+01 6.279e+01 7.318e+01 1.119e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-29 09:30:06,162 INFO [train.py:1114] (0/4) Epoch 20, batch 5200, loss[loss=0.1987, simple_loss=0.2932, pruned_loss=0.05215, over 4665.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2617, pruned_loss=0.03971, over 936685.17 frames. ], batch size: 14, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:30:09,900 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.25 vs. limit=15.0 +2024-07-29 09:30:58,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=265953.3333333333, ans=0.0 +2024-07-29 09:31:02,556 INFO [train.py:1114] (0/4) Epoch 20, batch 5250, loss[loss=0.1664, simple_loss=0.2596, pruned_loss=0.03665, over 4894.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2603, pruned_loss=0.03912, over 936247.84 frames. ], batch size: 13, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:31:04,646 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=265966.6666666667, ans=0.05 +2024-07-29 09:31:06,565 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=265966.6666666667, ans=0.125 +2024-07-29 09:31:12,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=265980.0, ans=0.0 +2024-07-29 09:31:16,467 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.32 vs. limit=15.0 +2024-07-29 09:31:17,427 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=265993.3333333333, ans=0.035 +2024-07-29 09:31:23,810 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.46 vs. limit=6.0 +2024-07-29 09:31:26,795 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=266006.6666666667, ans=0.0 +2024-07-29 09:31:29,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=266020.0, ans=15.0 +2024-07-29 09:31:32,639 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.533e+01 5.588e+01 6.109e+01 7.391e+01 1.107e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-29 09:31:33,609 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=266020.0, ans=0.04949747468305833 +2024-07-29 09:31:34,820 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=266020.0, ans=0.09899494936611666 +2024-07-29 09:31:37,706 INFO [train.py:1114] (0/4) Epoch 20, batch 5300, loss[loss=0.1747, simple_loss=0.2717, pruned_loss=0.03888, over 4659.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2598, pruned_loss=0.03906, over 935055.83 frames. ], batch size: 16, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:31:40,699 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.60 vs. limit=15.0 +2024-07-29 09:31:48,463 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=266046.6666666667, ans=0.2 +2024-07-29 09:31:54,202 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.99 vs. limit=15.0 +2024-07-29 09:31:55,557 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.54 vs. limit=22.5 +2024-07-29 09:32:05,400 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=266073.3333333333, ans=0.0 +2024-07-29 09:32:06,845 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=266073.3333333333, ans=0.1 +2024-07-29 09:32:07,478 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=266073.3333333333, ans=0.0 +2024-07-29 09:32:15,628 INFO [train.py:1114] (0/4) Epoch 20, batch 5350, loss[loss=0.1656, simple_loss=0.2508, pruned_loss=0.04018, over 4543.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2619, pruned_loss=0.0399, over 936587.64 frames. ], batch size: 10, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:32:44,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=266113.3333333333, ans=0.1 +2024-07-29 09:32:47,454 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=266113.3333333333, ans=0.1 +2024-07-29 09:32:49,509 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=266126.6666666667, ans=0.125 +2024-07-29 09:32:56,884 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:32:57,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=266140.0, ans=0.125 +2024-07-29 09:32:57,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=266140.0, ans=0.125 +2024-07-29 09:33:04,153 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.87 vs. limit=6.0 +2024-07-29 09:33:06,389 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.461e+01 5.787e+01 6.416e+01 7.278e+01 1.158e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 09:33:09,797 INFO [train.py:1114] (0/4) Epoch 20, batch 5400, loss[loss=0.1756, simple_loss=0.2763, pruned_loss=0.03744, over 4120.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2625, pruned_loss=0.04013, over 930331.44 frames. ], batch size: 25, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:33:18,382 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.16 vs. limit=15.0 +2024-07-29 09:33:28,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=266193.3333333333, ans=0.2 +2024-07-29 09:33:29,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=266193.3333333333, ans=0.125 +2024-07-29 09:33:31,191 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.34 vs. limit=15.0 +2024-07-29 09:33:40,543 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.36 vs. limit=15.0 +2024-07-29 09:33:43,807 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=266220.0, ans=0.125 +2024-07-29 09:33:46,287 INFO [train.py:1114] (0/4) Epoch 20, batch 5450, loss[loss=0.1525, simple_loss=0.2318, pruned_loss=0.03662, over 4701.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2621, pruned_loss=0.0399, over 933293.70 frames. ], batch size: 11, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:34:31,221 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=266246.6666666667, ans=0.125 +2024-07-29 09:34:35,173 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=266260.0, ans=0.125 +2024-07-29 09:34:35,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=266260.0, ans=0.125 +2024-07-29 09:34:37,041 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=266260.0, ans=0.1 +2024-07-29 09:34:38,487 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=266260.0, ans=0.0 +2024-07-29 09:34:39,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=266260.0, ans=0.0 +2024-07-29 09:34:50,654 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.141e+01 5.730e+01 6.160e+01 6.781e+01 9.375e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-29 09:34:54,236 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.54 vs. limit=15.0 +2024-07-29 09:34:54,550 INFO [train.py:1114] (0/4) Epoch 20, batch 5500, loss[loss=0.2065, simple_loss=0.291, pruned_loss=0.06096, over 4228.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2622, pruned_loss=0.04008, over 930626.15 frames. ], batch size: 25, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:34:58,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=266300.0, ans=0.125 +2024-07-29 09:35:19,268 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=266340.0, ans=0.0 +2024-07-29 09:35:22,716 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=266340.0, ans=0.125 +2024-07-29 09:35:34,348 INFO [train.py:1114] (0/4) Epoch 20, batch 5550, loss[loss=0.1542, simple_loss=0.2441, pruned_loss=0.03211, over 4709.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2615, pruned_loss=0.03964, over 932774.41 frames. ], batch size: 12, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:35:48,680 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=266393.3333333333, ans=0.125 +2024-07-29 09:35:57,057 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=266406.6666666667, ans=0.1 +2024-07-29 09:35:59,333 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.66 vs. limit=15.0 +2024-07-29 09:36:00,387 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=266406.6666666667, ans=0.0 +2024-07-29 09:36:06,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=266420.0, ans=0.125 +2024-07-29 09:36:07,404 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.955e+01 5.872e+01 6.404e+01 7.729e+01 1.135e+02, threshold=1.281e+02, percent-clipped=0.0 +2024-07-29 09:36:10,759 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.71 vs. limit=22.5 +2024-07-29 09:36:10,948 INFO [train.py:1114] (0/4) Epoch 20, batch 5600, loss[loss=0.1593, simple_loss=0.2554, pruned_loss=0.03163, over 4746.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2613, pruned_loss=0.03942, over 934035.19 frames. ], batch size: 14, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:36:20,132 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.04 vs. limit=22.5 +2024-07-29 09:36:28,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=266460.0, ans=0.2 +2024-07-29 09:36:33,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=266460.0, ans=0.05 +2024-07-29 09:36:33,213 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=266460.0, ans=0.05 +2024-07-29 09:36:33,455 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.11 vs. limit=15.0 +2024-07-29 09:36:38,739 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=266473.3333333333, ans=15.0 +2024-07-29 09:36:45,235 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=266486.6666666667, ans=0.1 +2024-07-29 09:36:50,973 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=266486.6666666667, ans=0.125 +2024-07-29 09:36:51,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=266486.6666666667, ans=0.125 +2024-07-29 09:36:52,280 INFO [train.py:1114] (0/4) Epoch 20, batch 5650, loss[loss=0.1732, simple_loss=0.2715, pruned_loss=0.0374, over 4545.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2603, pruned_loss=0.03911, over 936644.09 frames. ], batch size: 21, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:36:55,458 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.34 vs. limit=15.0 +2024-07-29 09:37:03,469 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.03 vs. limit=6.0 +2024-07-29 09:37:04,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=266513.3333333333, ans=0.1 +2024-07-29 09:37:05,952 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=266526.6666666667, ans=0.1 +2024-07-29 09:37:16,521 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=266540.0, ans=0.125 +2024-07-29 09:37:20,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=266553.3333333333, ans=0.125 +2024-07-29 09:37:22,263 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.795e+01 5.835e+01 6.614e+01 7.684e+01 1.140e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-29 09:37:25,571 INFO [train.py:1114] (0/4) Epoch 20, batch 5700, loss[loss=0.1649, simple_loss=0.2617, pruned_loss=0.03405, over 4692.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2605, pruned_loss=0.03934, over 937799.33 frames. ], batch size: 13, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:37:45,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=266580.0, ans=0.125 +2024-07-29 09:37:46,274 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=266580.0, ans=0.025 +2024-07-29 09:37:50,314 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=266593.3333333333, ans=0.2 +2024-07-29 09:38:23,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=266606.6666666667, ans=0.1 +2024-07-29 09:38:24,060 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=266606.6666666667, ans=0.0 +2024-07-29 09:38:29,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=266620.0, ans=0.125 +2024-07-29 09:38:31,838 INFO [train.py:1114] (0/4) Epoch 20, batch 5750, loss[loss=0.1712, simple_loss=0.2762, pruned_loss=0.03306, over 4729.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.261, pruned_loss=0.03945, over 938142.95 frames. ], batch size: 19, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:38:47,507 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=266646.6666666667, ans=0.0 +2024-07-29 09:38:54,842 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=266646.6666666667, ans=0.0 +2024-07-29 09:38:56,446 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.80 vs. limit=22.5 +2024-07-29 09:38:58,408 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.37 vs. limit=15.0 +2024-07-29 09:38:59,618 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-200000.pt +2024-07-29 09:39:10,445 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=266673.3333333333, ans=0.1 +2024-07-29 09:39:13,859 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.63 vs. limit=12.0 +2024-07-29 09:39:14,192 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=266686.6666666667, ans=10.0 +2024-07-29 09:39:17,490 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.798e+01 5.816e+01 6.291e+01 7.219e+01 1.004e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-29 09:39:20,789 INFO [train.py:1114] (0/4) Epoch 20, batch 5800, loss[loss=0.1825, simple_loss=0.269, pruned_loss=0.04797, over 4711.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2631, pruned_loss=0.04036, over 937508.62 frames. ], batch size: 19, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:39:22,247 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=266700.0, ans=0.125 +2024-07-29 09:39:28,346 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=266713.3333333333, ans=0.0 +2024-07-29 09:39:35,580 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.67 vs. limit=15.0 +2024-07-29 09:39:53,919 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=266753.3333333333, ans=0.0 +2024-07-29 09:39:56,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=266753.3333333333, ans=0.125 +2024-07-29 09:39:56,745 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=266753.3333333333, ans=0.0 +2024-07-29 09:39:59,744 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-29 09:40:00,113 INFO [train.py:1114] (0/4) Epoch 20, batch 5850, loss[loss=0.1892, simple_loss=0.2973, pruned_loss=0.04051, over 4525.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2633, pruned_loss=0.04054, over 937999.19 frames. ], batch size: 21, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:40:12,852 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=266780.0, ans=0.025 +2024-07-29 09:40:31,386 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.976e+01 5.785e+01 6.450e+01 7.129e+01 1.228e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 09:40:34,112 INFO [train.py:1114] (0/4) Epoch 20, batch 5900, loss[loss=0.1935, simple_loss=0.2842, pruned_loss=0.05139, over 4684.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2629, pruned_loss=0.04025, over 937889.61 frames. ], batch size: 15, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:40:34,277 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=266833.3333333333, ans=0.07 +2024-07-29 09:40:44,955 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=266833.3333333333, ans=0.0 +2024-07-29 09:40:56,856 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=266846.6666666667, ans=0.0 +2024-07-29 09:40:59,636 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=266860.0, ans=0.1 +2024-07-29 09:41:06,860 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=266873.3333333333, ans=0.125 +2024-07-29 09:41:13,228 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=266873.3333333333, ans=0.0 +2024-07-29 09:41:13,264 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=266873.3333333333, ans=0.0 +2024-07-29 09:41:21,172 INFO [train.py:1114] (0/4) Epoch 20, batch 5950, loss[loss=0.1831, simple_loss=0.2797, pruned_loss=0.04324, over 4688.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2632, pruned_loss=0.04032, over 939740.46 frames. ], batch size: 15, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:41:31,543 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.10 vs. limit=10.0 +2024-07-29 09:41:40,377 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=266913.3333333333, ans=0.0 +2024-07-29 09:41:43,205 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=266926.6666666667, ans=0.125 +2024-07-29 09:42:00,478 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.654e+01 6.112e+01 6.775e+01 1.038e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-29 09:42:28,145 INFO [train.py:1114] (0/4) Epoch 20, batch 6000, loss[loss=0.1608, simple_loss=0.2555, pruned_loss=0.03301, over 4258.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2623, pruned_loss=0.04008, over 937003.45 frames. ], batch size: 25, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:42:28,146 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 09:42:40,709 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.7854, 3.0290, 3.7358, 2.6169], device='cuda:0') +2024-07-29 09:42:42,420 INFO [zipformer.py:1858] (0/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([3.5660, 2.5190, 2.9727, 3.2590, 3.2166, 2.9373, 3.2873, 2.3854], + device='cuda:0') +2024-07-29 09:42:44,432 INFO [train.py:1146] (0/4) Epoch 20, validation: loss=0.1606, simple_loss=0.2622, pruned_loss=0.02953, over 944034.00 frames. +2024-07-29 09:42:44,432 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 09:42:46,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=266966.6666666667, ans=0.125 +2024-07-29 09:42:55,780 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.60 vs. limit=12.0 +2024-07-29 09:42:57,736 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=266993.3333333333, ans=0.0 +2024-07-29 09:43:10,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=267006.6666666667, ans=0.05 +2024-07-29 09:43:17,235 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.67 vs. limit=15.0 +2024-07-29 09:43:18,288 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=267033.3333333333, ans=0.0 +2024-07-29 09:43:18,761 INFO [train.py:1114] (0/4) Epoch 20, batch 6050, loss[loss=0.1596, simple_loss=0.244, pruned_loss=0.03764, over 4778.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2612, pruned_loss=0.03974, over 938375.75 frames. ], batch size: 12, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:43:22,393 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=267033.3333333333, ans=0.125 +2024-07-29 09:43:40,313 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=267046.6666666667, ans=0.125 +2024-07-29 09:44:05,100 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.461e+01 5.588e+01 6.267e+01 7.088e+01 1.023e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 09:44:06,996 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=267086.6666666667, ans=0.1 +2024-07-29 09:44:12,295 INFO [train.py:1114] (0/4) Epoch 20, batch 6100, loss[loss=0.2105, simple_loss=0.2932, pruned_loss=0.06396, over 4690.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2598, pruned_loss=0.03886, over 937754.33 frames. ], batch size: 15, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:44:21,610 INFO [scaling.py:1024] (0/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.05 vs. limit=8.0 +2024-07-29 09:44:26,951 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=267113.3333333333, ans=0.025 +2024-07-29 09:44:27,190 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=5.87 vs. limit=15.0 +2024-07-29 09:44:31,121 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=267126.6666666667, ans=0.125 +2024-07-29 09:44:35,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=267126.6666666667, ans=0.125 +2024-07-29 09:44:36,958 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=267140.0, ans=0.035 +2024-07-29 09:44:42,745 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.63 vs. limit=22.5 +2024-07-29 09:44:45,267 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=267153.3333333333, ans=0.025 +2024-07-29 09:44:50,613 INFO [train.py:1114] (0/4) Epoch 20, batch 6150, loss[loss=0.2426, simple_loss=0.3182, pruned_loss=0.08345, over 3279.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2606, pruned_loss=0.03926, over 936098.18 frames. ], batch size: 35, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:45:12,643 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.60 vs. limit=15.0 +2024-07-29 09:45:16,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=267206.6666666667, ans=0.2 +2024-07-29 09:45:26,396 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.706e+01 5.902e+01 6.533e+01 7.507e+01 1.268e+02, threshold=1.307e+02, percent-clipped=1.0 +2024-07-29 09:45:27,915 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=267220.0, ans=0.125 +2024-07-29 09:45:29,242 INFO [train.py:1114] (0/4) Epoch 20, batch 6200, loss[loss=0.1809, simple_loss=0.26, pruned_loss=0.05088, over 4747.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2611, pruned_loss=0.03972, over 935535.88 frames. ], batch size: 14, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:45:43,535 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=267260.0, ans=0.025 +2024-07-29 09:45:54,759 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=267273.3333333333, ans=0.05 +2024-07-29 09:46:01,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=267286.6666666667, ans=0.125 +2024-07-29 09:46:05,746 INFO [train.py:1114] (0/4) Epoch 20, batch 6250, loss[loss=0.1543, simple_loss=0.2399, pruned_loss=0.03439, over 4808.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2614, pruned_loss=0.03957, over 931893.33 frames. ], batch size: 14, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:46:16,916 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.67 vs. limit=10.0 +2024-07-29 09:46:38,442 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=267340.0, ans=0.2 +2024-07-29 09:46:39,032 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=267340.0, ans=0.0 +2024-07-29 09:46:39,142 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=267340.0, ans=0.125 +2024-07-29 09:46:39,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=267340.0, ans=0.125 +2024-07-29 09:46:45,642 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.404e+01 5.746e+01 6.370e+01 7.341e+01 9.825e+01, threshold=1.274e+02, percent-clipped=0.0 +2024-07-29 09:46:55,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=267366.6666666667, ans=0.125 +2024-07-29 09:46:56,400 INFO [train.py:1114] (0/4) Epoch 20, batch 6300, loss[loss=0.1457, simple_loss=0.2336, pruned_loss=0.02891, over 4492.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.261, pruned_loss=0.03925, over 929086.76 frames. ], batch size: 10, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:46:59,837 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=267366.6666666667, ans=0.125 +2024-07-29 09:47:05,049 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=267380.0, ans=0.2 +2024-07-29 09:47:15,462 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=267406.6666666667, ans=0.025 +2024-07-29 09:47:20,671 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=267406.6666666667, ans=0.0 +2024-07-29 09:47:32,265 INFO [train.py:1114] (0/4) Epoch 20, batch 6350, loss[loss=0.1786, simple_loss=0.276, pruned_loss=0.04054, over 4491.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2603, pruned_loss=0.03882, over 933253.62 frames. ], batch size: 21, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:47:44,035 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=267446.6666666667, ans=15.0 +2024-07-29 09:47:47,787 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=267460.0, ans=0.2 +2024-07-29 09:47:51,804 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.19 vs. limit=22.5 +2024-07-29 09:47:57,723 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=267473.3333333333, ans=0.125 +2024-07-29 09:48:03,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=267486.6666666667, ans=0.125 +2024-07-29 09:48:06,772 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 5.677e+01 6.317e+01 7.481e+01 1.107e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 09:48:07,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=267486.6666666667, ans=0.2 +2024-07-29 09:48:09,431 INFO [train.py:1114] (0/4) Epoch 20, batch 6400, loss[loss=0.1692, simple_loss=0.2656, pruned_loss=0.03636, over 4629.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2611, pruned_loss=0.03895, over 934380.98 frames. ], batch size: 13, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:48:16,330 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=267513.3333333333, ans=0.09899494936611666 +2024-07-29 09:48:19,944 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.83 vs. limit=15.0 +2024-07-29 09:48:38,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=267553.3333333333, ans=0.125 +2024-07-29 09:48:42,742 INFO [train.py:1114] (0/4) Epoch 20, batch 6450, loss[loss=0.1869, simple_loss=0.2816, pruned_loss=0.04612, over 4624.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2615, pruned_loss=0.03914, over 938023.78 frames. ], batch size: 21, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:48:45,252 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.14 vs. limit=15.0 +2024-07-29 09:48:46,338 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=267566.6666666667, ans=0.125 +2024-07-29 09:48:46,894 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=267566.6666666667, ans=0.125 +2024-07-29 09:49:10,625 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=267606.6666666667, ans=0.125 +2024-07-29 09:49:10,747 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=267606.6666666667, ans=0.5 +2024-07-29 09:49:11,389 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=267606.6666666667, ans=0.125 +2024-07-29 09:49:13,381 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=267606.6666666667, ans=0.0 +2024-07-29 09:49:13,383 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:49:23,607 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=267606.6666666667, ans=0.125 +2024-07-29 09:49:46,230 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.162e+01 5.798e+01 6.360e+01 7.229e+01 1.035e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-29 09:49:48,982 INFO [train.py:1114] (0/4) Epoch 20, batch 6500, loss[loss=0.2234, simple_loss=0.3035, pruned_loss=0.07161, over 3325.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2612, pruned_loss=0.03899, over 939393.82 frames. ], batch size: 36, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:50:03,063 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=267646.6666666667, ans=0.0 +2024-07-29 09:50:07,661 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=267646.6666666667, ans=0.2 +2024-07-29 09:50:16,908 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=267673.3333333333, ans=0.125 +2024-07-29 09:50:18,891 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=267673.3333333333, ans=0.125 +2024-07-29 09:50:25,874 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=267686.6666666667, ans=0.125 +2024-07-29 09:50:31,650 INFO [train.py:1114] (0/4) Epoch 20, batch 6550, loss[loss=0.1599, simple_loss=0.2413, pruned_loss=0.03926, over 4801.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2609, pruned_loss=0.03848, over 942521.68 frames. ], batch size: 11, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:50:32,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=267700.0, ans=0.125 +2024-07-29 09:50:35,090 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=267700.0, ans=0.125 +2024-07-29 09:50:42,266 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=267713.3333333333, ans=0.1 +2024-07-29 09:50:44,100 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=267726.6666666667, ans=0.2 +2024-07-29 09:50:44,818 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=267726.6666666667, ans=0.125 +2024-07-29 09:51:17,039 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=267740.0, ans=0.125 +2024-07-29 09:51:21,643 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=267740.0, ans=0.0 +2024-07-29 09:51:26,544 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.591e+01 5.663e+01 6.386e+01 7.207e+01 1.403e+02, threshold=1.277e+02, percent-clipped=3.0 +2024-07-29 09:51:29,175 INFO [train.py:1114] (0/4) Epoch 20, batch 6600, loss[loss=0.1841, simple_loss=0.2694, pruned_loss=0.04944, over 4938.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.261, pruned_loss=0.03856, over 944639.19 frames. ], batch size: 14, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:51:42,604 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.03 vs. limit=15.0 +2024-07-29 09:51:44,302 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=267793.3333333333, ans=0.0 +2024-07-29 09:51:44,469 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.19 vs. limit=15.0 +2024-07-29 09:51:50,414 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=267806.6666666667, ans=0.0 +2024-07-29 09:52:55,812 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=267806.6666666667, ans=0.1 +2024-07-29 09:52:57,838 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=267820.0, ans=0.2 +2024-07-29 09:53:00,083 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.73 vs. limit=10.0 +2024-07-29 09:53:10,579 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=267820.0, ans=0.1 +2024-07-29 09:53:12,460 INFO [train.py:1114] (0/4) Epoch 20, batch 6650, loss[loss=0.213, simple_loss=0.3054, pruned_loss=0.0603, over 4619.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2615, pruned_loss=0.03903, over 943342.09 frames. ], batch size: 17, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:53:16,264 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.92 vs. limit=22.5 +2024-07-29 09:53:16,525 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=267833.3333333333, ans=0.125 +2024-07-29 09:53:19,263 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=267846.6666666667, ans=0.125 +2024-07-29 09:53:29,319 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=267860.0, ans=10.0 +2024-07-29 09:53:34,469 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=267873.3333333333, ans=0.0 +2024-07-29 09:53:40,439 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=267886.6666666667, ans=0.125 +2024-07-29 09:53:40,558 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.76 vs. limit=6.0 +2024-07-29 09:53:44,858 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.709e+01 6.420e+01 7.242e+01 1.116e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-29 09:53:47,495 INFO [train.py:1114] (0/4) Epoch 20, batch 6700, loss[loss=0.1549, simple_loss=0.2546, pruned_loss=0.02763, over 4714.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2614, pruned_loss=0.03867, over 942218.43 frames. ], batch size: 19, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:53:55,673 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.37 vs. limit=6.0 +2024-07-29 09:54:02,371 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=267913.3333333333, ans=0.0 +2024-07-29 09:54:28,339 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=267926.6666666667, ans=0.035 +2024-07-29 09:54:34,498 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:54:42,219 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.43 vs. limit=15.0 +2024-07-29 09:55:01,107 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=267953.3333333333, ans=0.09899494936611666 +2024-07-29 09:55:03,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=267953.3333333333, ans=0.125 +2024-07-29 09:55:06,359 INFO [train.py:1114] (0/4) Epoch 20, batch 6750, loss[loss=0.1957, simple_loss=0.2814, pruned_loss=0.05498, over 4240.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2619, pruned_loss=0.03935, over 940528.94 frames. ], batch size: 25, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:55:16,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=267980.0, ans=0.04949747468305833 +2024-07-29 09:55:39,752 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.67 vs. limit=12.0 +2024-07-29 09:55:48,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=268006.6666666667, ans=0.035 +2024-07-29 09:55:49,570 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=268020.0, ans=0.2 +2024-07-29 09:55:53,700 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.727e+01 5.999e+01 6.595e+01 7.628e+01 1.756e+02, threshold=1.319e+02, percent-clipped=1.0 +2024-07-29 09:55:54,444 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=268020.0, ans=0.125 +2024-07-29 09:55:56,349 INFO [train.py:1114] (0/4) Epoch 20, batch 6800, loss[loss=0.1477, simple_loss=0.2438, pruned_loss=0.02584, over 4643.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2622, pruned_loss=0.03938, over 938789.18 frames. ], batch size: 13, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:56:05,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=268046.6666666667, ans=0.125 +2024-07-29 09:56:29,643 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.64 vs. limit=10.0 +2024-07-29 09:56:29,864 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=268073.3333333333, ans=0.125 +2024-07-29 09:56:40,851 INFO [train.py:1114] (0/4) Epoch 20, batch 6850, loss[loss=0.1405, simple_loss=0.2382, pruned_loss=0.02144, over 4698.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.262, pruned_loss=0.03886, over 940637.07 frames. ], batch size: 13, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:57:01,885 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=268113.3333333333, ans=0.1 +2024-07-29 09:57:04,349 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.48 vs. limit=22.5 +2024-07-29 09:57:11,914 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=268126.6666666667, ans=0.0 +2024-07-29 09:57:26,929 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=268140.0, ans=0.125 +2024-07-29 09:57:36,090 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.628e+01 5.831e+01 6.589e+01 8.147e+01 1.219e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 09:57:38,783 INFO [train.py:1114] (0/4) Epoch 20, batch 6900, loss[loss=0.1752, simple_loss=0.2566, pruned_loss=0.0469, over 4967.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2617, pruned_loss=0.03913, over 942963.68 frames. ], batch size: 13, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:57:45,386 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=268166.6666666667, ans=0.0 +2024-07-29 09:57:48,748 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.71 vs. limit=15.0 +2024-07-29 09:57:53,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=268180.0, ans=0.125 +2024-07-29 09:58:02,422 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=268193.3333333333, ans=0.1 +2024-07-29 09:58:04,364 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=268193.3333333333, ans=0.125 +2024-07-29 09:58:05,102 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=268193.3333333333, ans=0.0 +2024-07-29 09:58:21,488 INFO [train.py:1114] (0/4) Epoch 20, batch 6950, loss[loss=0.1421, simple_loss=0.2291, pruned_loss=0.02757, over 4506.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2609, pruned_loss=0.03915, over 940238.36 frames. ], batch size: 10, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:58:26,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=268233.3333333333, ans=0.0 +2024-07-29 09:59:10,543 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=268246.6666666667, ans=0.2 +2024-07-29 09:59:19,854 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.59 vs. limit=15.0 +2024-07-29 09:59:23,633 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:59:40,760 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=268286.6666666667, ans=0.2 +2024-07-29 09:59:42,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=268286.6666666667, ans=0.125 +2024-07-29 09:59:44,608 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.835e+01 5.602e+01 6.145e+01 6.791e+01 9.985e+01, threshold=1.229e+02, percent-clipped=0.0 +2024-07-29 10:00:05,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=268300.0, ans=0.125 +2024-07-29 10:00:05,720 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.63 vs. limit=10.0 +2024-07-29 10:00:05,974 INFO [train.py:1114] (0/4) Epoch 20, batch 7000, loss[loss=0.1813, simple_loss=0.2688, pruned_loss=0.04689, over 4664.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2614, pruned_loss=0.0393, over 938771.01 frames. ], batch size: 17, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 10:00:38,682 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.35 vs. limit=6.0 +2024-07-29 10:00:44,717 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=268326.6666666667, ans=0.1 +2024-07-29 10:00:47,629 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.83 vs. limit=15.0 +2024-07-29 10:00:49,382 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=268340.0, ans=0.1 +2024-07-29 10:00:56,577 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=268353.3333333333, ans=0.0 +2024-07-29 10:00:57,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=268353.3333333333, ans=0.2 +2024-07-29 10:00:57,175 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=268353.3333333333, ans=0.125 +2024-07-29 10:01:12,978 INFO [train.py:1114] (0/4) Epoch 20, batch 7050, loss[loss=0.1728, simple_loss=0.2636, pruned_loss=0.041, over 4688.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.261, pruned_loss=0.03881, over 941987.14 frames. ], batch size: 19, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 10:01:37,019 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=268406.6666666667, ans=0.125 +2024-07-29 10:01:38,453 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.62 vs. limit=12.0 +2024-07-29 10:01:39,437 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=268406.6666666667, ans=0.125 +2024-07-29 10:01:40,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=268420.0, ans=0.1 +2024-07-29 10:01:45,957 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.517e+01 5.713e+01 6.192e+01 7.192e+01 1.067e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 10:01:46,316 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.08 vs. limit=22.5 +2024-07-29 10:01:49,614 INFO [train.py:1114] (0/4) Epoch 20, batch 7100, loss[loss=0.2081, simple_loss=0.297, pruned_loss=0.05962, over 4800.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2624, pruned_loss=0.03935, over 936235.46 frames. ], batch size: 15, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 10:02:00,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=268446.6666666667, ans=0.0 +2024-07-29 10:02:02,767 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=268446.6666666667, ans=0.0 +2024-07-29 10:02:20,116 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=268473.3333333333, ans=0.125 +2024-07-29 10:02:22,326 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.10 vs. limit=15.0 +2024-07-29 10:02:28,280 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=268486.6666666667, ans=10.0 +2024-07-29 10:02:31,663 INFO [train.py:1114] (0/4) Epoch 20, batch 7150, loss[loss=0.1902, simple_loss=0.2718, pruned_loss=0.05436, over 4496.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2611, pruned_loss=0.03893, over 937285.58 frames. ], batch size: 21, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 10:02:31,902 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=268500.0, ans=0.125 +2024-07-29 10:02:43,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=268500.0, ans=0.125 +2024-07-29 10:02:44,954 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=268500.0, ans=0.125 +2024-07-29 10:02:46,070 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=268500.0, ans=0.125 +2024-07-29 10:02:52,623 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=268513.3333333333, ans=0.125 +2024-07-29 10:03:06,934 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=268540.0, ans=0.0 +2024-07-29 10:03:08,953 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=268553.3333333333, ans=0.2 +2024-07-29 10:03:09,537 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=268553.3333333333, ans=0.2 +2024-07-29 10:03:10,202 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=268553.3333333333, ans=0.0 +2024-07-29 10:03:32,836 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.22 vs. limit=15.0 +2024-07-29 10:03:33,075 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.350e+01 5.664e+01 6.258e+01 7.035e+01 1.192e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 10:03:59,852 INFO [train.py:1114] (0/4) Epoch 20, batch 7200, loss[loss=0.1921, simple_loss=0.2885, pruned_loss=0.04781, over 4797.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2622, pruned_loss=0.03916, over 937697.73 frames. ], batch size: 15, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:04:19,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=268566.6666666667, ans=0.125 +2024-07-29 10:06:41,409 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:08:23,893 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=268606.6666666667, ans=0.125 +2024-07-29 10:09:35,398 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=268620.0, ans=0.025 +2024-07-29 10:09:47,392 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.54 vs. limit=15.0 +2024-07-29 10:09:49,546 INFO [train.py:1114] (0/4) Epoch 20, batch 7250, loss[loss=0.1355, simple_loss=0.2217, pruned_loss=0.02464, over 4864.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2615, pruned_loss=0.03921, over 939671.11 frames. ], batch size: 12, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:10:18,339 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.50 vs. limit=15.0 +2024-07-29 10:10:26,337 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:10:39,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=268673.3333333333, ans=0.125 +2024-07-29 10:11:35,256 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.440e+01 5.763e+01 6.435e+01 7.253e+01 9.940e+01, threshold=1.287e+02, percent-clipped=0.0 +2024-07-29 10:12:08,069 INFO [train.py:1114] (0/4) Epoch 20, batch 7300, loss[loss=0.1668, simple_loss=0.256, pruned_loss=0.03877, over 4850.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2606, pruned_loss=0.039, over 939983.90 frames. ], batch size: 12, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:14:10,678 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=268700.0, ans=0.0 +2024-07-29 10:17:17,375 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=268713.3333333333, ans=10.0 +2024-07-29 10:17:51,988 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.32 vs. limit=15.0 +2024-07-29 10:18:08,675 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=268726.6666666667, ans=0.04949747468305833 +2024-07-29 10:18:42,881 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=268740.0, ans=0.05 +2024-07-29 10:18:48,257 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=268740.0, ans=0.2 +2024-07-29 10:18:49,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=268753.3333333333, ans=0.125 +2024-07-29 10:18:52,684 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=268753.3333333333, ans=0.125 +2024-07-29 10:19:39,298 INFO [train.py:1114] (0/4) Epoch 20, batch 7350, loss[loss=0.1734, simple_loss=0.2689, pruned_loss=0.03893, over 4633.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2604, pruned_loss=0.03877, over 939042.62 frames. ], batch size: 12, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:19:42,002 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=268766.6666666667, ans=0.0 +2024-07-29 10:19:42,207 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.59 vs. limit=15.0 +2024-07-29 10:21:02,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=268806.6666666667, ans=0.2 +2024-07-29 10:21:06,787 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.33 vs. limit=15.0 +2024-07-29 10:22:45,782 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.22 vs. limit=15.0 +2024-07-29 10:22:45,961 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.689e+01 5.764e+01 6.635e+01 7.838e+01 1.063e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-29 10:22:59,788 INFO [train.py:1114] (0/4) Epoch 20, batch 7400, loss[loss=0.148, simple_loss=0.2443, pruned_loss=0.0258, over 4685.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2605, pruned_loss=0.03828, over 940231.74 frames. ], batch size: 13, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:23:36,272 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=268833.3333333333, ans=0.125 +2024-07-29 10:23:36,604 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-07-29 10:23:37,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=268833.3333333333, ans=0.1 +2024-07-29 10:23:39,706 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=268846.6666666667, ans=0.0 +2024-07-29 10:23:45,285 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.53 vs. limit=10.0 +2024-07-29 10:23:49,324 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=268860.0, ans=0.125 +2024-07-29 10:23:53,993 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=268873.3333333333, ans=10.0 +2024-07-29 10:24:02,797 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.57 vs. limit=15.0 +2024-07-29 10:24:06,253 INFO [train.py:1114] (0/4) Epoch 20, batch 7450, loss[loss=0.162, simple_loss=0.2381, pruned_loss=0.04295, over 4616.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2604, pruned_loss=0.03853, over 938089.96 frames. ], batch size: 11, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:25:35,494 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=268940.0, ans=0.125 +2024-07-29 10:25:36,241 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=268940.0, ans=0.0 +2024-07-29 10:25:38,229 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=268940.0, ans=0.125 +2024-07-29 10:25:46,124 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.697e+01 5.622e+01 6.334e+01 7.188e+01 1.210e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 10:25:51,758 INFO [train.py:1114] (0/4) Epoch 20, batch 7500, loss[loss=0.2135, simple_loss=0.2838, pruned_loss=0.07156, over 3458.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2603, pruned_loss=0.03841, over 936577.10 frames. ], batch size: 35, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:25:54,655 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=268966.6666666667, ans=0.125 +2024-07-29 10:25:58,283 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.89 vs. limit=15.0 +2024-07-29 10:25:58,287 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.82 vs. limit=10.0 +2024-07-29 10:26:00,687 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=268980.0, ans=0.0 +2024-07-29 10:26:02,069 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=268980.0, ans=0.125 +2024-07-29 10:26:08,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=268993.3333333333, ans=0.125 +2024-07-29 10:26:10,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=268993.3333333333, ans=0.0 +2024-07-29 10:26:15,840 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=269006.6666666667, ans=0.95 +2024-07-29 10:26:18,534 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=269020.0, ans=0.2 +2024-07-29 10:26:22,297 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=269020.0, ans=0.125 +2024-07-29 10:26:23,078 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=269020.0, ans=0.0 +2024-07-29 10:26:24,836 INFO [train.py:1114] (0/4) Epoch 20, batch 7550, loss[loss=0.179, simple_loss=0.2723, pruned_loss=0.04284, over 4591.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2617, pruned_loss=0.03864, over 936279.25 frames. ], batch size: 17, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:26:46,663 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=269060.0, ans=0.125 +2024-07-29 10:26:46,693 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=269060.0, ans=0.0 +2024-07-29 10:26:52,561 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=269073.3333333333, ans=0.0 +2024-07-29 10:26:53,921 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=269073.3333333333, ans=0.09899494936611666 +2024-07-29 10:26:55,230 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=269073.3333333333, ans=0.125 +2024-07-29 10:27:04,836 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.64 vs. limit=6.0 +2024-07-29 10:27:06,373 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.638e+01 5.698e+01 6.238e+01 6.861e+01 9.805e+01, threshold=1.248e+02, percent-clipped=0.0 +2024-07-29 10:27:14,603 INFO [train.py:1114] (0/4) Epoch 20, batch 7600, loss[loss=0.1714, simple_loss=0.2664, pruned_loss=0.03816, over 4818.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2618, pruned_loss=0.0388, over 938153.83 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:27:40,204 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=269140.0, ans=0.025 +2024-07-29 10:27:40,428 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.13 vs. limit=15.0 +2024-07-29 10:27:45,273 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=269153.3333333333, ans=0.0 +2024-07-29 10:27:56,947 INFO [train.py:1114] (0/4) Epoch 20, batch 7650, loss[loss=0.1476, simple_loss=0.235, pruned_loss=0.03007, over 4942.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2623, pruned_loss=0.03893, over 936711.37 frames. ], batch size: 12, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:27:57,871 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=269166.6666666667, ans=0.2 +2024-07-29 10:28:04,458 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:28:22,482 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=269193.3333333333, ans=0.2 +2024-07-29 10:29:09,510 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.493e+01 5.550e+01 6.272e+01 7.437e+01 1.310e+02, threshold=1.254e+02, percent-clipped=1.0 +2024-07-29 10:29:13,719 INFO [train.py:1114] (0/4) Epoch 20, batch 7700, loss[loss=0.1844, simple_loss=0.2731, pruned_loss=0.04784, over 4702.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2629, pruned_loss=0.03915, over 934620.15 frames. ], batch size: 13, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:29:19,767 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.61 vs. limit=6.0 +2024-07-29 10:29:26,073 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=269260.0, ans=0.0 +2024-07-29 10:29:31,866 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=269260.0, ans=0.125 +2024-07-29 10:29:38,584 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=269273.3333333333, ans=0.0 +2024-07-29 10:29:42,835 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.67 vs. limit=10.0 +2024-07-29 10:29:49,385 INFO [train.py:1114] (0/4) Epoch 20, batch 7750, loss[loss=0.1562, simple_loss=0.259, pruned_loss=0.02676, over 4926.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2628, pruned_loss=0.039, over 935821.76 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:29:52,935 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=269300.0, ans=0.1 +2024-07-29 10:30:10,982 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=269326.6666666667, ans=0.125 +2024-07-29 10:30:12,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=269326.6666666667, ans=0.125 +2024-07-29 10:30:19,447 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=269340.0, ans=0.2 +2024-07-29 10:30:23,450 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.14 vs. limit=15.0 +2024-07-29 10:30:28,813 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.865e+01 5.687e+01 6.141e+01 6.601e+01 8.666e+01, threshold=1.228e+02, percent-clipped=0.0 +2024-07-29 10:30:31,964 INFO [train.py:1114] (0/4) Epoch 20, batch 7800, loss[loss=0.1865, simple_loss=0.2822, pruned_loss=0.04538, over 4667.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2628, pruned_loss=0.039, over 937426.71 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:30:34,752 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=269366.6666666667, ans=0.125 +2024-07-29 10:30:35,683 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.81 vs. limit=6.0 +2024-07-29 10:30:44,146 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=269366.6666666667, ans=0.0 +2024-07-29 10:31:39,059 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.49 vs. limit=15.0 +2024-07-29 10:31:41,294 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=269420.0, ans=0.125 +2024-07-29 10:31:47,983 INFO [train.py:1114] (0/4) Epoch 20, batch 7850, loss[loss=0.1634, simple_loss=0.2376, pruned_loss=0.04455, over 4492.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2623, pruned_loss=0.03922, over 936232.84 frames. ], batch size: 10, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:31:56,819 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=269446.6666666667, ans=0.125 +2024-07-29 10:32:05,413 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.30 vs. limit=15.0 +2024-07-29 10:32:08,193 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=269473.3333333333, ans=0.0 +2024-07-29 10:32:11,594 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.14 vs. limit=22.5 +2024-07-29 10:32:12,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=269473.3333333333, ans=0.025 +2024-07-29 10:32:16,076 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=269486.6666666667, ans=0.125 +2024-07-29 10:32:16,110 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=269486.6666666667, ans=0.125 +2024-07-29 10:32:18,444 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.034e+01 5.765e+01 6.588e+01 7.311e+01 1.076e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 10:32:21,163 INFO [train.py:1114] (0/4) Epoch 20, batch 7900, loss[loss=0.1806, simple_loss=0.2753, pruned_loss=0.04292, over 4880.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2634, pruned_loss=0.03993, over 933580.26 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:32:24,843 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.97 vs. limit=15.0 +2024-07-29 10:32:41,456 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=269540.0, ans=0.1 +2024-07-29 10:32:41,559 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=269540.0, ans=0.0 +2024-07-29 10:32:42,811 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=269540.0, ans=0.0 +2024-07-29 10:32:48,467 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.88 vs. limit=15.0 +2024-07-29 10:33:02,897 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=269553.3333333333, ans=15.0 +2024-07-29 10:33:09,093 INFO [train.py:1114] (0/4) Epoch 20, batch 7950, loss[loss=0.1978, simple_loss=0.2849, pruned_loss=0.05539, over 3376.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2637, pruned_loss=0.04008, over 935749.80 frames. ], batch size: 36, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:33:11,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=269566.6666666667, ans=0.125 +2024-07-29 10:33:12,613 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=269566.6666666667, ans=0.1 +2024-07-29 10:33:32,217 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=269593.3333333333, ans=0.1 +2024-07-29 10:33:44,267 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.355e+01 5.756e+01 6.342e+01 7.191e+01 1.019e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-29 10:33:45,715 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=269620.0, ans=0.125 +2024-07-29 10:33:46,948 INFO [train.py:1114] (0/4) Epoch 20, batch 8000, loss[loss=0.1551, simple_loss=0.2282, pruned_loss=0.04096, over 4597.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.262, pruned_loss=0.03966, over 935070.44 frames. ], batch size: 11, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:33:48,278 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=269633.3333333333, ans=0.125 +2024-07-29 10:34:10,536 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=269646.6666666667, ans=0.0 +2024-07-29 10:34:22,598 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=269673.3333333333, ans=0.125 +2024-07-29 10:34:22,724 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.51 vs. limit=15.0 +2024-07-29 10:34:43,610 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.76 vs. limit=10.0 +2024-07-29 10:34:48,974 INFO [train.py:1114] (0/4) Epoch 20, batch 8050, loss[loss=0.2138, simple_loss=0.3122, pruned_loss=0.05765, over 4812.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2624, pruned_loss=0.03969, over 935255.96 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:35:01,034 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=269713.3333333333, ans=0.0 +2024-07-29 10:35:14,933 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.28 vs. limit=15.0 +2024-07-29 10:35:16,141 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=269740.0, ans=0.0 +2024-07-29 10:35:19,926 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=269753.3333333333, ans=0.0 +2024-07-29 10:35:21,086 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.839e+01 5.695e+01 6.260e+01 6.907e+01 1.067e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 10:35:21,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=269753.3333333333, ans=0.125 +2024-07-29 10:35:35,901 INFO [train.py:1114] (0/4) Epoch 20, batch 8100, loss[loss=0.1546, simple_loss=0.259, pruned_loss=0.0251, over 4787.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2634, pruned_loss=0.03982, over 934702.69 frames. ], batch size: 15, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:35:37,027 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.53 vs. limit=10.0 +2024-07-29 10:35:46,523 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=269780.0, ans=0.0 +2024-07-29 10:35:51,131 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=269793.3333333333, ans=0.2 +2024-07-29 10:35:51,824 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=269793.3333333333, ans=0.2 +2024-07-29 10:35:55,749 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=269806.6666666667, ans=0.125 +2024-07-29 10:35:57,669 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=269806.6666666667, ans=0.125 +2024-07-29 10:36:01,593 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=269820.0, ans=0.125 +2024-07-29 10:36:08,434 INFO [train.py:1114] (0/4) Epoch 20, batch 8150, loss[loss=0.1806, simple_loss=0.2739, pruned_loss=0.04362, over 4800.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.263, pruned_loss=0.04023, over 937970.18 frames. ], batch size: 15, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:36:08,548 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=269833.3333333333, ans=0.0 +2024-07-29 10:36:09,422 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.51 vs. limit=15.0 +2024-07-29 10:36:12,403 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=269833.3333333333, ans=0.0 +2024-07-29 10:36:52,369 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.965e+01 5.540e+01 6.167e+01 6.859e+01 1.030e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 10:36:55,183 INFO [train.py:1114] (0/4) Epoch 20, batch 8200, loss[loss=0.1676, simple_loss=0.2509, pruned_loss=0.04215, over 4798.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2633, pruned_loss=0.04022, over 939030.52 frames. ], batch size: 15, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:36:56,605 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=269900.0, ans=0.125 +2024-07-29 10:36:58,526 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=269900.0, ans=0.5 +2024-07-29 10:36:59,233 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:37:00,583 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=269900.0, ans=0.2 +2024-07-29 10:37:14,978 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.95 vs. limit=15.0 +2024-07-29 10:37:51,237 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=269940.0, ans=0.07 +2024-07-29 10:38:06,889 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=269966.6666666667, ans=0.0 +2024-07-29 10:38:11,128 INFO [train.py:1114] (0/4) Epoch 20, batch 8250, loss[loss=0.1716, simple_loss=0.2565, pruned_loss=0.04338, over 4898.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2629, pruned_loss=0.04033, over 939115.34 frames. ], batch size: 13, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:38:23,208 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=269966.6666666667, ans=0.125 +2024-07-29 10:38:33,722 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=269993.3333333333, ans=0.1 +2024-07-29 10:38:36,415 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=269993.3333333333, ans=0.1 +2024-07-29 10:38:37,674 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=269993.3333333333, ans=0.0 +2024-07-29 10:38:41,054 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=270006.6666666667, ans=0.2 +2024-07-29 10:38:49,950 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.571e+01 5.582e+01 6.021e+01 6.658e+01 1.061e+02, threshold=1.204e+02, percent-clipped=0.0 +2024-07-29 10:38:52,322 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.35 vs. limit=15.0 +2024-07-29 10:38:52,555 INFO [train.py:1114] (0/4) Epoch 20, batch 8300, loss[loss=0.2066, simple_loss=0.303, pruned_loss=0.05509, over 4887.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2639, pruned_loss=0.04049, over 939108.93 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:38:53,884 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=270033.3333333333, ans=0.2 +2024-07-29 10:38:54,000 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=270033.3333333333, ans=0.125 +2024-07-29 10:39:12,784 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=270060.0, ans=0.0 +2024-07-29 10:39:24,888 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=270086.6666666667, ans=0.125 +2024-07-29 10:39:28,019 INFO [train.py:1114] (0/4) Epoch 20, batch 8350, loss[loss=0.2124, simple_loss=0.31, pruned_loss=0.05744, over 4805.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2628, pruned_loss=0.04034, over 941751.74 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:39:36,008 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=270113.3333333333, ans=0.125 +2024-07-29 10:39:53,307 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.24 vs. limit=22.5 +2024-07-29 10:39:54,216 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=270153.3333333333, ans=0.125 +2024-07-29 10:39:57,942 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.685e+01 5.732e+01 6.400e+01 7.266e+01 9.706e+01, threshold=1.280e+02, percent-clipped=0.0 +2024-07-29 10:40:00,040 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=270166.6666666667, ans=0.2 +2024-07-29 10:40:00,643 INFO [train.py:1114] (0/4) Epoch 20, batch 8400, loss[loss=0.1549, simple_loss=0.2479, pruned_loss=0.03099, over 4779.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2626, pruned_loss=0.04014, over 940368.08 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:40:20,126 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=270180.0, ans=0.0 +2024-07-29 10:40:31,531 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=270206.6666666667, ans=0.1 +2024-07-29 10:40:34,099 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=270220.0, ans=0.125 +2024-07-29 10:40:41,452 INFO [train.py:1114] (0/4) Epoch 20, batch 8450, loss[loss=0.1886, simple_loss=0.2742, pruned_loss=0.0515, over 4807.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2641, pruned_loss=0.04015, over 939054.91 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:40:55,936 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=270260.0, ans=0.1 +2024-07-29 10:40:59,633 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.90 vs. limit=15.0 +2024-07-29 10:41:09,981 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=270286.6666666667, ans=0.125 +2024-07-29 10:41:10,567 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=270286.6666666667, ans=0.0 +2024-07-29 10:41:12,717 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.341e+01 5.700e+01 6.559e+01 7.490e+01 1.068e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-29 10:41:22,398 INFO [train.py:1114] (0/4) Epoch 20, batch 8500, loss[loss=0.1452, simple_loss=0.2253, pruned_loss=0.03254, over 4609.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2628, pruned_loss=0.03964, over 938801.45 frames. ], batch size: 11, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:41:29,610 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.43 vs. limit=22.5 +2024-07-29 10:41:39,663 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.03 vs. limit=12.0 +2024-07-29 10:41:51,587 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=270353.3333333333, ans=0.125 +2024-07-29 10:41:56,528 INFO [train.py:1114] (0/4) Epoch 20, batch 8550, loss[loss=0.1186, simple_loss=0.2093, pruned_loss=0.01393, over 4817.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2627, pruned_loss=0.03958, over 939681.63 frames. ], batch size: 11, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:42:02,230 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.05 vs. limit=15.0 +2024-07-29 10:42:21,080 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=270406.6666666667, ans=0.0 +2024-07-29 10:42:25,305 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.15 vs. limit=10.0 +2024-07-29 10:42:28,845 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.753e+01 6.246e+01 7.230e+01 1.151e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 10:42:30,803 INFO [train.py:1114] (0/4) Epoch 20, batch 8600, loss[loss=0.165, simple_loss=0.2539, pruned_loss=0.03803, over 4796.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2614, pruned_loss=0.03914, over 939538.63 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:42:39,600 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=270433.3333333333, ans=0.125 +2024-07-29 10:42:40,809 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=270433.3333333333, ans=0.2 +2024-07-29 10:43:08,643 INFO [train.py:1114] (0/4) Epoch 20, batch 8650, loss[loss=0.2056, simple_loss=0.2949, pruned_loss=0.05811, over 4913.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2613, pruned_loss=0.03908, over 940483.82 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:43:28,697 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=270526.6666666667, ans=0.0 +2024-07-29 10:43:33,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=270540.0, ans=0.07 +2024-07-29 10:43:41,563 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=270553.3333333333, ans=0.2 +2024-07-29 10:43:41,992 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.826e+01 5.889e+01 6.581e+01 7.578e+01 1.021e+02, threshold=1.316e+02, percent-clipped=0.0 +2024-07-29 10:43:43,900 INFO [train.py:1114] (0/4) Epoch 20, batch 8700, loss[loss=0.1648, simple_loss=0.256, pruned_loss=0.03683, over 4768.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2618, pruned_loss=0.03925, over 938140.46 frames. ], batch size: 13, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:43:59,755 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=270593.3333333333, ans=0.125 +2024-07-29 10:44:05,616 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=270606.6666666667, ans=0.0 +2024-07-29 10:44:09,158 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.47 vs. limit=10.0 +2024-07-29 10:44:16,456 INFO [train.py:1114] (0/4) Epoch 20, batch 8750, loss[loss=0.2088, simple_loss=0.2913, pruned_loss=0.06319, over 4675.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2623, pruned_loss=0.03961, over 936763.12 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:44:25,010 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=270633.3333333333, ans=0.0 +2024-07-29 10:44:29,350 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=270646.6666666667, ans=0.1 +2024-07-29 10:44:37,086 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=270660.0, ans=0.125 +2024-07-29 10:44:39,924 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=270660.0, ans=0.1 +2024-07-29 10:44:41,848 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=270673.3333333333, ans=0.125 +2024-07-29 10:44:46,659 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=270673.3333333333, ans=0.025 +2024-07-29 10:44:55,919 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.478e+01 5.802e+01 6.247e+01 7.031e+01 1.068e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 10:44:57,840 INFO [train.py:1114] (0/4) Epoch 20, batch 8800, loss[loss=0.1674, simple_loss=0.2622, pruned_loss=0.03627, over 4927.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.263, pruned_loss=0.03963, over 937632.53 frames. ], batch size: 14, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:45:13,162 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.36 vs. limit=6.0 +2024-07-29 10:45:14,816 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=270713.3333333333, ans=0.025 +2024-07-29 10:45:18,423 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=270726.6666666667, ans=0.1 +2024-07-29 10:45:23,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=270740.0, ans=0.125 +2024-07-29 10:45:25,662 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=270740.0, ans=0.125 +2024-07-29 10:45:28,028 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.00 vs. limit=15.0 +2024-07-29 10:45:29,806 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff3.min_abs, batch_count=270753.3333333333, ans=0.2 +2024-07-29 10:45:35,541 INFO [train.py:1114] (0/4) Epoch 20, batch 8850, loss[loss=0.171, simple_loss=0.2611, pruned_loss=0.04046, over 4608.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.262, pruned_loss=0.03948, over 932584.24 frames. ], batch size: 22, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:45:57,949 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=270780.0, ans=0.125 +2024-07-29 10:46:03,603 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=270793.3333333333, ans=0.125 +2024-07-29 10:46:20,672 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=270806.6666666667, ans=0.125 +2024-07-29 10:46:25,141 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.48 vs. limit=22.5 +2024-07-29 10:46:26,254 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=270820.0, ans=0.125 +2024-07-29 10:46:29,210 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.01 vs. limit=15.0 +2024-07-29 10:46:30,257 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.550e+01 5.751e+01 6.544e+01 7.566e+01 1.087e+02, threshold=1.309e+02, percent-clipped=0.0 +2024-07-29 10:46:32,203 INFO [train.py:1114] (0/4) Epoch 20, batch 8900, loss[loss=0.1459, simple_loss=0.2303, pruned_loss=0.03078, over 4933.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2622, pruned_loss=0.03961, over 930731.40 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:46:35,041 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.46 vs. limit=22.5 +2024-07-29 10:46:35,420 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=270833.3333333333, ans=0.0 +2024-07-29 10:46:35,506 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=270833.3333333333, ans=0.125 +2024-07-29 10:46:40,758 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=270846.6666666667, ans=0.0 +2024-07-29 10:46:41,875 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=270846.6666666667, ans=0.0 +2024-07-29 10:46:46,196 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.48 vs. limit=6.0 +2024-07-29 10:46:50,550 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=270860.0, ans=0.125 +2024-07-29 10:46:55,209 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.10 vs. limit=15.0 +2024-07-29 10:47:00,483 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=270886.6666666667, ans=0.125 +2024-07-29 10:47:06,557 INFO [train.py:1114] (0/4) Epoch 20, batch 8950, loss[loss=0.1753, simple_loss=0.2645, pruned_loss=0.04305, over 4426.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.262, pruned_loss=0.0397, over 931754.38 frames. ], batch size: 21, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:47:14,279 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=270900.0, ans=0.0 +2024-07-29 10:47:17,666 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=270913.3333333333, ans=0.1 +2024-07-29 10:47:20,206 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=270913.3333333333, ans=0.0 +2024-07-29 10:47:23,388 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=270913.3333333333, ans=0.125 +2024-07-29 10:47:23,499 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=270913.3333333333, ans=0.5 +2024-07-29 10:47:49,979 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=270940.0, ans=0.2 +2024-07-29 10:49:33,585 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.383e+01 5.554e+01 6.323e+01 6.834e+01 1.028e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 10:49:34,768 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=270953.3333333333, ans=0.0 +2024-07-29 10:49:37,830 INFO [train.py:1114] (0/4) Epoch 20, batch 9000, loss[loss=0.1774, simple_loss=0.261, pruned_loss=0.0469, over 4639.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2603, pruned_loss=0.03909, over 934514.40 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:49:37,831 INFO [train.py:1137] (0/4) Computing validation loss +2024-07-29 10:51:38,762 INFO [train.py:1146] (0/4) Epoch 20, validation: loss=0.1604, simple_loss=0.262, pruned_loss=0.02938, over 944034.00 frames. +2024-07-29 10:51:38,763 INFO [train.py:1147] (0/4) Maximum memory allocated so far is 4178MB +2024-07-29 10:51:48,801 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=270966.6666666667, ans=0.1 +2024-07-29 10:52:44,331 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=271006.6666666667, ans=0.125 +2024-07-29 10:52:58,024 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=271006.6666666667, ans=0.0 +2024-07-29 10:53:27,063 INFO [train.py:1114] (0/4) Epoch 20, batch 9050, loss[loss=0.1271, simple_loss=0.2066, pruned_loss=0.02384, over 4510.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2602, pruned_loss=0.0393, over 935064.97 frames. ], batch size: 10, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:53:29,625 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.41 vs. limit=22.5 +2024-07-29 10:53:59,239 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=271060.0, ans=0.0 +2024-07-29 10:53:59,751 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=271060.0, ans=0.0 +2024-07-29 10:54:19,518 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=271086.6666666667, ans=0.025 +2024-07-29 10:54:22,478 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.730e+01 6.143e+01 7.007e+01 1.074e+02, threshold=1.229e+02, percent-clipped=0.0 +2024-07-29 10:54:30,322 INFO [train.py:1114] (0/4) Epoch 20, batch 9100, loss[loss=0.1529, simple_loss=0.2563, pruned_loss=0.02478, over 4930.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2598, pruned_loss=0.03876, over 937386.77 frames. ], batch size: 14, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:54:47,275 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.77 vs. limit=15.0 +2024-07-29 10:55:19,604 INFO [train.py:1114] (0/4) Epoch 20, batch 9150, loss[loss=0.1892, simple_loss=0.288, pruned_loss=0.0452, over 4812.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2609, pruned_loss=0.0393, over 935600.15 frames. ], batch size: 14, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:55:58,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=271193.3333333333, ans=0.125 +2024-07-29 10:56:46,172 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=271206.6666666667, ans=0.0 +2024-07-29 10:56:47,460 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=271220.0, ans=0.125 +2024-07-29 10:56:49,234 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=271220.0, ans=0.125 +2024-07-29 10:56:51,295 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=271220.0, ans=0.125 +2024-07-29 10:56:51,566 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=271220.0, ans=15.0 +2024-07-29 10:56:52,954 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.481e+01 5.764e+01 6.206e+01 7.056e+01 9.843e+01, threshold=1.241e+02, percent-clipped=0.0 +2024-07-29 10:56:55,336 INFO [train.py:1114] (0/4) Epoch 20, batch 9200, loss[loss=0.1694, simple_loss=0.2584, pruned_loss=0.04017, over 4848.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2606, pruned_loss=0.03899, over 937191.45 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:57:09,795 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.62 vs. limit=12.0 +2024-07-29 10:57:14,502 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=271260.0, ans=0.1 +2024-07-29 10:57:14,522 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=271260.0, ans=0.125 +2024-07-29 10:57:23,144 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=271286.6666666667, ans=0.2 +2024-07-29 10:57:30,166 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=271286.6666666667, ans=0.125 +2024-07-29 10:57:31,190 INFO [train.py:1114] (0/4) Epoch 20, batch 9250, loss[loss=0.1545, simple_loss=0.2519, pruned_loss=0.02858, over 4643.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.261, pruned_loss=0.03901, over 937851.26 frames. ], batch size: 13, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:57:49,657 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=271326.6666666667, ans=0.025 +2024-07-29 10:57:51,501 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.34 vs. limit=10.0 +2024-07-29 10:58:03,333 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=271353.3333333333, ans=0.125 +2024-07-29 10:58:11,150 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.506e+01 5.724e+01 6.498e+01 7.478e+01 1.094e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-29 10:58:12,592 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=271366.6666666667, ans=0.1 +2024-07-29 10:58:14,108 INFO [train.py:1114] (0/4) Epoch 20, batch 9300, loss[loss=0.156, simple_loss=0.242, pruned_loss=0.03496, over 4776.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2614, pruned_loss=0.0392, over 937552.34 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:58:23,872 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=271366.6666666667, ans=0.1 +2024-07-29 10:58:24,588 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.73 vs. limit=6.0 +2024-07-29 10:58:25,064 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=271380.0, ans=0.0 +2024-07-29 10:58:25,782 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=271380.0, ans=0.07 +2024-07-29 10:58:40,135 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=271380.0, ans=0.0 +2024-07-29 10:58:55,127 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=271393.3333333333, ans=0.1 +2024-07-29 10:58:57,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=271393.3333333333, ans=0.125 +2024-07-29 10:59:06,617 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=271406.6666666667, ans=0.1 +2024-07-29 10:59:13,435 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=271420.0, ans=0.1 +2024-07-29 10:59:18,707 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=271420.0, ans=0.0 +2024-07-29 11:01:37,053 INFO [train.py:1114] (0/4) Epoch 20, batch 9350, loss[loss=0.1565, simple_loss=0.2462, pruned_loss=0.03343, over 4804.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2615, pruned_loss=0.03922, over 934579.45 frames. ], batch size: 11, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 11:02:08,137 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=271433.3333333333, ans=0.125 +2024-07-29 11:02:16,879 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=271446.6666666667, ans=0.125 +2024-07-29 11:02:39,783 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=271446.6666666667, ans=0.025 +2024-07-29 11:02:54,236 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=271460.0, ans=0.04949747468305833 +2024-07-29 11:02:56,309 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.49 vs. limit=22.5 +2024-07-29 11:02:56,573 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=271473.3333333333, ans=0.0 +2024-07-29 11:03:07,486 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.70 vs. limit=15.0 +2024-07-29 11:03:20,671 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:03:22,478 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.890e+01 5.751e+01 6.401e+01 7.888e+01 1.207e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-29 11:03:23,182 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=271486.6666666667, ans=0.015 +2024-07-29 11:03:24,395 INFO [train.py:1114] (0/4) Epoch 20, batch 9400, loss[loss=0.1535, simple_loss=0.235, pruned_loss=0.03603, over 4690.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.261, pruned_loss=0.03889, over 932903.94 frames. ], batch size: 13, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:03:28,863 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.57 vs. limit=12.0 +2024-07-29 11:03:34,546 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.38 vs. limit=15.0 +2024-07-29 11:03:41,190 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=271526.6666666667, ans=0.125 +2024-07-29 11:03:46,025 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=271540.0, ans=0.125 +2024-07-29 11:03:49,051 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.56 vs. limit=22.5 +2024-07-29 11:03:56,683 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=271553.3333333333, ans=0.125 +2024-07-29 11:04:01,147 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=271553.3333333333, ans=0.125 +2024-07-29 11:04:02,881 INFO [train.py:1114] (0/4) Epoch 20, batch 9450, loss[loss=0.1564, simple_loss=0.2295, pruned_loss=0.0416, over 4810.00 frames. ], tot_loss[loss=0.169, simple_loss=0.261, pruned_loss=0.03849, over 932663.14 frames. ], batch size: 11, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:04:08,128 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=271566.6666666667, ans=0.2 +2024-07-29 11:04:21,895 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=271593.3333333333, ans=0.0 +2024-07-29 11:04:24,379 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=271606.6666666667, ans=0.125 +2024-07-29 11:04:24,965 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=271606.6666666667, ans=0.125 +2024-07-29 11:04:26,215 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=271606.6666666667, ans=0.125 +2024-07-29 11:04:33,857 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.597e+01 5.763e+01 6.160e+01 6.815e+01 1.077e+02, threshold=1.232e+02, percent-clipped=0.0 +2024-07-29 11:04:35,846 INFO [train.py:1114] (0/4) Epoch 20, batch 9500, loss[loss=0.145, simple_loss=0.2313, pruned_loss=0.02937, over 4701.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2606, pruned_loss=0.03817, over 935042.10 frames. ], batch size: 12, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:04:39,638 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=271633.3333333333, ans=0.2 +2024-07-29 11:04:51,950 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=271660.0, ans=0.0 +2024-07-29 11:04:59,262 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.95 vs. limit=15.0 +2024-07-29 11:05:08,690 INFO [train.py:1114] (0/4) Epoch 20, batch 9550, loss[loss=0.1754, simple_loss=0.259, pruned_loss=0.0459, over 4780.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2605, pruned_loss=0.03818, over 932131.96 frames. ], batch size: 12, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:05:14,195 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=271713.3333333333, ans=0.125 +2024-07-29 11:05:23,291 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=271713.3333333333, ans=0.0 +2024-07-29 11:05:24,545 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=271713.3333333333, ans=0.0 +2024-07-29 11:05:25,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=271726.6666666667, ans=0.125 +2024-07-29 11:05:39,627 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=271726.6666666667, ans=0.025 +2024-07-29 11:05:39,712 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=271726.6666666667, ans=0.1 +2024-07-29 11:05:39,730 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=271726.6666666667, ans=0.025 +2024-07-29 11:05:50,369 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=271740.0, ans=0.0 +2024-07-29 11:06:00,812 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.785e+01 5.730e+01 6.530e+01 7.456e+01 1.001e+02, threshold=1.306e+02, percent-clipped=0.0 +2024-07-29 11:06:00,968 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=271753.3333333333, ans=0.125 +2024-07-29 11:06:02,903 INFO [train.py:1114] (0/4) Epoch 20, batch 9600, loss[loss=0.2324, simple_loss=0.3053, pruned_loss=0.07976, over 3400.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2615, pruned_loss=0.03867, over 931060.40 frames. ], batch size: 35, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:06:03,574 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=271766.6666666667, ans=0.125 +2024-07-29 11:06:06,310 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=271766.6666666667, ans=0.125 +2024-07-29 11:06:24,152 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=271806.6666666667, ans=0.0 +2024-07-29 11:06:39,309 INFO [train.py:1114] (0/4) Epoch 20, batch 9650, loss[loss=0.1865, simple_loss=0.2881, pruned_loss=0.04247, over 4865.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2621, pruned_loss=0.03929, over 927156.30 frames. ], batch size: 16, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:06:48,769 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=271846.6666666667, ans=0.1 +2024-07-29 11:07:04,283 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=271860.0, ans=0.125 +2024-07-29 11:07:11,634 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=271873.3333333333, ans=0.125 +2024-07-29 11:07:12,675 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.17 vs. limit=22.5 +2024-07-29 11:07:17,557 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.56 vs. limit=15.0 +2024-07-29 11:07:19,779 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=271886.6666666667, ans=0.0 +2024-07-29 11:07:21,934 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.580e+01 5.811e+01 6.589e+01 7.580e+01 1.190e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 11:07:23,852 INFO [train.py:1114] (0/4) Epoch 20, batch 9700, loss[loss=0.2158, simple_loss=0.3116, pruned_loss=0.05997, over 4363.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2626, pruned_loss=0.03966, over 925266.55 frames. ], batch size: 25, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:07:26,904 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=271900.0, ans=0.125 +2024-07-29 11:07:35,978 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=271913.3333333333, ans=0.0 +2024-07-29 11:07:36,937 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=271913.3333333333, ans=15.0 +2024-07-29 11:07:47,911 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=271926.6666666667, ans=0.1 +2024-07-29 11:08:00,740 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=271953.3333333333, ans=0.0 +2024-07-29 11:08:12,206 INFO [train.py:1114] (0/4) Epoch 20, batch 9750, loss[loss=0.1597, simple_loss=0.2495, pruned_loss=0.03491, over 4696.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2623, pruned_loss=0.03953, over 925325.41 frames. ], batch size: 15, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:08:13,629 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=271966.6666666667, ans=0.125 +2024-07-29 11:08:41,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=271993.3333333333, ans=0.0 +2024-07-29 11:08:41,719 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/checkpoint-204000.pt +2024-07-29 11:08:48,969 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=272006.6666666667, ans=0.125 +2024-07-29 11:08:54,724 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=272006.6666666667, ans=0.125 +2024-07-29 11:08:56,108 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=272020.0, ans=0.07 +2024-07-29 11:09:06,964 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+01 5.621e+01 6.289e+01 7.582e+01 9.528e+01, threshold=1.258e+02, percent-clipped=0.0 +2024-07-29 11:09:10,472 INFO [train.py:1114] (0/4) Epoch 20, batch 9800, loss[loss=0.1512, simple_loss=0.2377, pruned_loss=0.03235, over 4700.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2607, pruned_loss=0.03915, over 924902.51 frames. ], batch size: 12, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:09:11,352 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=272033.3333333333, ans=0.125 +2024-07-29 11:09:13,096 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=272033.3333333333, ans=0.125 +2024-07-29 11:09:19,356 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=272046.6666666667, ans=0.125 +2024-07-29 11:09:23,733 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=272060.0, ans=0.125 +2024-07-29 11:09:54,029 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=272073.3333333333, ans=0.0 +2024-07-29 11:09:54,503 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=272073.3333333333, ans=0.025 +2024-07-29 11:10:07,285 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=272086.6666666667, ans=0.025 +2024-07-29 11:10:07,426 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=272086.6666666667, ans=0.05 +2024-07-29 11:10:13,502 INFO [train.py:1114] (0/4) Epoch 20, batch 9850, loss[loss=0.2007, simple_loss=0.2833, pruned_loss=0.05909, over 4895.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2613, pruned_loss=0.03931, over 926712.02 frames. ], batch size: 15, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:10:16,412 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=272100.0, ans=0.0 +2024-07-29 11:10:16,555 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.85 vs. limit=15.0 +2024-07-29 11:10:16,616 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.16 vs. limit=15.0 +2024-07-29 11:10:27,301 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=272126.6666666667, ans=0.0 +2024-07-29 11:10:38,863 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:10:41,890 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=272153.3333333333, ans=0.0 +2024-07-29 11:10:41,917 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=272153.3333333333, ans=0.125 +2024-07-29 11:10:43,571 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 6.014e+01 6.927e+01 8.025e+01 1.186e+02, threshold=1.385e+02, percent-clipped=0.0 +2024-07-29 11:10:45,518 INFO [train.py:1114] (0/4) Epoch 20, batch 9900, loss[loss=0.1837, simple_loss=0.2856, pruned_loss=0.04093, over 4842.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2628, pruned_loss=0.0403, over 926034.38 frames. ], batch size: 16, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:10:46,933 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=272166.6666666667, ans=0.0 +2024-07-29 11:10:53,016 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=272180.0, ans=0.0 +2024-07-29 11:11:08,727 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=272206.6666666667, ans=15.0 +2024-07-29 11:11:11,918 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=272220.0, ans=0.2 +2024-07-29 11:11:16,260 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=272233.3333333333, ans=0.125 +2024-07-29 11:11:16,911 INFO [train.py:1114] (0/4) Epoch 20, batch 9950, loss[loss=0.1303, simple_loss=0.2192, pruned_loss=0.02073, over 4812.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2631, pruned_loss=0.04075, over 929110.26 frames. ], batch size: 11, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:11:18,345 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=272233.3333333333, ans=0.0 +2024-07-29 11:11:23,670 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=272246.6666666667, ans=0.0 +2024-07-29 11:11:31,309 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=272246.6666666667, ans=0.125 +2024-07-29 11:11:52,157 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=272273.3333333333, ans=0.125 +2024-07-29 11:11:55,052 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=272273.3333333333, ans=0.1 +2024-07-29 11:11:58,668 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=272286.6666666667, ans=0.2 +2024-07-29 11:12:02,406 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.836e+01 6.511e+01 7.365e+01 1.166e+02, threshold=1.302e+02, percent-clipped=0.0 +2024-07-29 11:12:04,279 INFO [train.py:1114] (0/4) Epoch 20, batch 10000, loss[loss=0.1665, simple_loss=0.2597, pruned_loss=0.03667, over 4649.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2649, pruned_loss=0.04128, over 926601.88 frames. ], batch size: 16, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:12:13,614 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=272300.0, ans=0.125 +2024-07-29 11:12:29,784 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.83 vs. limit=15.0 +2024-07-29 11:12:35,589 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=272340.0, ans=0.125 +2024-07-29 11:12:48,745 INFO [train.py:1114] (0/4) Epoch 20, batch 10050, loss[loss=0.2152, simple_loss=0.2913, pruned_loss=0.06954, over 3293.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2681, pruned_loss=0.04267, over 915114.26 frames. ], batch size: 35, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:12:50,532 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=272366.6666666667, ans=0.125 +2024-07-29 11:13:19,160 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.10 vs. limit=15.0 +2024-07-29 11:14:45,437 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.683e+01 5.969e+01 6.772e+01 7.755e+01 1.002e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-29 11:14:47,489 INFO [train.py:1114] (0/4) Epoch 20, batch 10100, loss[loss=0.2308, simple_loss=0.3098, pruned_loss=0.0759, over 3116.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2724, pruned_loss=0.04682, over 863211.82 frames. ], batch size: 36, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:14:52,943 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=272433.3333333333, ans=0.1 +2024-07-29 11:14:53,033 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=272433.3333333333, ans=0.125 +2024-07-29 11:15:05,815 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=272460.0, ans=0.025 +2024-07-29 11:15:06,569 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=272473.3333333333, ans=0.0 +2024-07-29 11:15:19,716 INFO [train.py:1114] (0/4) Epoch 20, batch 10150, loss[loss=0.2371, simple_loss=0.3122, pruned_loss=0.08095, over 3459.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2759, pruned_loss=0.04991, over 821610.39 frames. ], batch size: 36, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:15:23,895 INFO [scaling.py:1120] (0/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:15:27,353 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=272513.3333333333, ans=0.125 +2024-07-29 11:15:48,695 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=272553.3333333333, ans=0.125 +2024-07-29 11:15:48,772 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=272553.3333333333, ans=0.125 +2024-07-29 11:15:53,607 WARNING [optim.py:487] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.562e+01 6.747e+01 7.203e+01 7.565e+01 9.241e+01, threshold=1.441e+02, percent-clipped=0.0 +2024-07-29 11:15:58,178 INFO [train.py:1114] (0/4) Epoch 20, batch 10200, loss[loss=0.2168, simple_loss=0.2998, pruned_loss=0.0669, over 3654.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2782, pruned_loss=0.05218, over 789741.98 frames. ], batch size: 35, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:15:58,212 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=272566.6666666667, ans=0.1 +2024-07-29 11:18:56,038 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=272580.0, ans=0.125 +2024-07-29 11:18:56,608 INFO [scaling.py:214] (0/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=272580.0, ans=0.125 +2024-07-29 11:19:20,528 INFO [scaling.py:1024] (0/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.34 vs. limit=6.0 +2024-07-29 11:19:22,617 INFO [checkpoint.py:75] (0/4) Saving checkpoint to zipformer/libri/exp/epoch-20.pt +2024-07-29 11:19:29,727 INFO [train.py:1387] (0/4) Done! diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/log/log-train-2024-07-27-09-10-10-1 b/zipformer/pretrained/non_ctc/non_causal/exp/log/log-train-2024-07-27-09-10-10-1 new file mode 100644 index 0000000000000000000000000000000000000000..0778c09dce53a7d15a1b36a9b08cd9542b6e9864 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/log/log-train-2024-07-27-09-10-10-1 @@ -0,0 +1,28629 @@ +2024-07-27 09:10:10,863 INFO [train.py:1182] (1/4) Training started +2024-07-27 09:10:10,864 INFO [train.py:1192] (1/4) Device: cuda:1 +2024-07-27 09:10:10,867 INFO [train.py:1210] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2658.int.cedar.computecanada.ca', 'IP address': '172.16.146.95'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('zipformer/libri/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 200.0, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-07-27 09:10:10,868 INFO [train.py:1212] (1/4) About to create model +2024-07-27 09:10:23,779 INFO [train.py:1216] (1/4) Number of model parameters: 65549011 +2024-07-27 09:10:24,710 INFO [train.py:1231] (1/4) Using DDP +2024-07-27 09:11:00,524 INFO [asr_datamodule.py:893] (1/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-07-27 09:11:00,830 INFO [asr_datamodule.py:696] (1/4) Disable MUSAN +2024-07-27 09:11:00,830 INFO [asr_datamodule.py:714] (1/4) Enable SpecAugment +2024-07-27 09:11:00,830 INFO [asr_datamodule.py:715] (1/4) Time warp factor: 80 +2024-07-27 09:11:00,830 INFO [asr_datamodule.py:725] (1/4) Num frame mask: 10 +2024-07-27 09:11:00,830 INFO [asr_datamodule.py:738] (1/4) About to create train dataset +2024-07-27 09:11:00,830 INFO [asr_datamodule.py:765] (1/4) Using DynamicBucketingSampler. +2024-07-27 09:11:02,439 INFO [asr_datamodule.py:782] (1/4) About to create train dataloader +2024-07-27 09:11:02,446 INFO [asr_datamodule.py:910] (1/4) About to get dev-clean cuts +2024-07-27 09:11:02,592 INFO [asr_datamodule.py:917] (1/4) About to get dev-other cuts +2024-07-27 09:11:03,488 INFO [asr_datamodule.py:813] (1/4) About to create dev dataset +2024-07-27 09:11:03,816 INFO [asr_datamodule.py:830] (1/4) About to create dev dataloader +2024-07-27 09:11:03,816 INFO [train.py:1435] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-07-27 09:17:48,871 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=192, metric=44.63 vs. limit=7.5 +2024-07-27 09:17:49,697 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 3355MB +2024-07-27 09:17:50,297 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 3355MB +2024-07-27 09:17:54,280 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 3355MB +2024-07-27 09:17:55,229 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 3355MB +2024-07-27 09:18:08,340 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=384, metric=97.64 vs. limit=5.0 +2024-07-27 09:18:08,506 INFO [scaling.py:1024] (1/4) Whitening: name=None, num_groups=1, num_channels=192, metric=42.29 vs. limit=7.5 +2024-07-27 09:18:08,566 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 3355MB +2024-07-27 09:18:09,384 INFO [train.py:1463] (1/4) Maximum memory allocated so far is 3355MB +2024-07-27 09:18:51,918 INFO [train.py:1114] (1/4) Epoch 1, batch 0, loss[loss=7.785, simple_loss=7.092, pruned_loss=6.916, over 4851.00 frames. ], tot_loss[loss=7.785, simple_loss=7.092, pruned_loss=6.916, over 4851.00 frames. ], batch size: 12, lr: 2.25e-02, grad_scale: 2.0 +2024-07-27 09:18:51,918 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 09:19:27,483 INFO [train.py:1146] (1/4) Epoch 1, validation: loss=7.631, simple_loss=6.945, pruned_loss=6.846, over 944034.00 frames. +2024-07-27 09:19:27,484 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 3365MB +2024-07-27 09:19:43,510 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=30.03 vs. limit=7.5 +2024-07-27 09:19:52,382 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.784e+02 9.392e+02 1.009e+03 1.270e+03 1.305e+03, threshold=4.037e+03, percent-clipped=0.0 +2024-07-27 09:19:53,974 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=157.65 vs. limit=4.002666666666666 +2024-07-27 09:19:56,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=13.333333333333334, ans=0.24925 +2024-07-27 09:20:09,140 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.923e+01 2.100e+02 8.784e+02 1.111e+03 1.403e+03, threshold=3.513e+03, percent-clipped=0.0 +2024-07-27 09:20:10,623 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=147.19 vs. limit=7.51 +2024-07-27 09:20:19,245 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=37.63 vs. limit=7.51 +2024-07-27 09:20:34,463 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=214.83 vs. limit=7.515 +2024-07-27 09:20:41,054 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=348.47 vs. limit=7.54 +2024-07-27 09:20:41,321 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.923e+01 1.821e+02 2.209e+02 8.784e+02 1.403e+03, threshold=8.837e+02, percent-clipped=0.0 +2024-07-27 09:20:54,575 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=331.82 vs. limit=5.026666666666666 +2024-07-27 09:21:31,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=53.333333333333336, ans=0.4975 +2024-07-27 09:21:33,265 INFO [train.py:1114] (1/4) Epoch 1, batch 50, loss[loss=1.259, simple_loss=1.115, pruned_loss=1.288, over 4613.00 frames. ], tot_loss[loss=2.986, simple_loss=2.742, pruned_loss=2.372, over 206205.09 frames. ], batch size: 11, lr: 2.48e-02, grad_scale: 1.0 +2024-07-27 09:21:40,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn2.whiten.whitening_limit, batch_count=66.66666666666667, ans=7.55 +2024-07-27 09:21:41,266 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=24.39 vs. limit=7.525 +2024-07-27 09:21:41,907 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=280.38 vs. limit=7.525 +2024-07-27 09:21:48,781 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=248.17 vs. limit=7.56 +2024-07-27 09:21:58,155 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=362.96 vs. limit=7.53 +2024-07-27 09:21:58,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80.0, ans=0.29919999999999997 +2024-07-27 09:22:01,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=93.33333333333333, ans=0.495625 +2024-07-27 09:22:01,667 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=206.23 vs. limit=7.535 +2024-07-27 09:22:12,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=93.33333333333333, ans=5.058333333333334 +2024-07-27 09:22:17,010 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=365.66 vs. limit=7.535 +2024-07-27 09:22:27,418 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=250.83 vs. limit=7.54 +2024-07-27 09:22:35,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=106.66666666666667, ans=0.495 +2024-07-27 09:22:35,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106.66666666666667, ans=0.495 +2024-07-27 09:22:37,607 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=209.47 vs. limit=7.545 +2024-07-27 09:22:58,954 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=375.83 vs. limit=7.545 +2024-07-27 09:23:03,326 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=120.56 vs. limit=7.545 +2024-07-27 09:23:04,781 INFO [train.py:1114] (1/4) Epoch 1, batch 100, loss[loss=1.12, simple_loss=0.9733, pruned_loss=1.177, over 4631.00 frames. ], tot_loss[loss=2.045, simple_loss=1.85, pruned_loss=1.787, over 365546.27 frames. ], batch size: 12, lr: 2.70e-02, grad_scale: 2.0 +2024-07-27 09:23:06,810 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.579e+01 2.513e+01 6.174e+01 1.938e+02 1.403e+03, threshold=1.235e+02, percent-clipped=0.0 +2024-07-27 09:23:09,062 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=13.48 vs. limit=4.053333333333334 +2024-07-27 09:23:19,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=133.33333333333334, ans=0.2425 +2024-07-27 09:23:43,336 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=152.24 vs. limit=7.56 +2024-07-27 09:23:47,753 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=363.39 vs. limit=7.56 +2024-07-27 09:24:04,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=173.33333333333334, ans=0.7517333333333334 +2024-07-27 09:24:17,528 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.68 vs. limit=3.028 +2024-07-27 09:24:18,684 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=22.73 vs. limit=7.57 +2024-07-27 09:24:19,851 INFO [train.py:1114] (1/4) Epoch 1, batch 150, loss[loss=0.9561, simple_loss=0.8167, pruned_loss=1.014, over 4619.00 frames. ], tot_loss[loss=1.655, simple_loss=1.476, pruned_loss=1.536, over 494287.40 frames. ], batch size: 11, lr: 2.93e-02, grad_scale: 2.0 +2024-07-27 09:24:20,465 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=54.58 vs. limit=7.575 +2024-07-27 09:24:25,975 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.96 vs. limit=7.65 +2024-07-27 09:24:26,782 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=136.98 vs. limit=7.575 +2024-07-27 09:24:28,387 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=20.15 vs. limit=5.05 +2024-07-27 09:24:36,450 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=134.26 vs. limit=7.66 +2024-07-27 09:24:40,042 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=74.45 vs. limit=7.58 +2024-07-27 09:24:44,039 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=27.52 vs. limit=4.085333333333334 +2024-07-27 09:24:47,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=226.66666666666666, ans=0.04929166666666667 +2024-07-27 09:24:47,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=52.90 vs. limit=7.585 +2024-07-27 09:24:51,468 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=33.63 vs. limit=7.67 +2024-07-27 09:24:58,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=240.0, ans=0.2365 +2024-07-27 09:25:02,417 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=14.30 vs. limit=4.096 +2024-07-27 09:25:02,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.whiten.whitening_limit, batch_count=240.0, ans=7.59 +2024-07-27 09:25:05,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240.0, ans=0.2976 +2024-07-27 09:25:05,773 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.40 vs. limit=5.12 +2024-07-27 09:25:10,965 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=16.14 vs. limit=7.595 +2024-07-27 09:25:12,948 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.76 vs. limit=7.69 +2024-07-27 09:25:22,975 INFO [train.py:1114] (1/4) Epoch 1, batch 200, loss[loss=1.068, simple_loss=0.91, pruned_loss=1.072, over 4516.00 frames. ], tot_loss[loss=1.437, simple_loss=1.267, pruned_loss=1.372, over 593944.73 frames. ], batch size: 21, lr: 3.15e-02, grad_scale: 4.0 +2024-07-27 09:25:24,355 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.807e+01 2.398e+01 2.890e+01 3.614e+01 1.455e+02, threshold=5.780e+01, percent-clipped=1.0 +2024-07-27 09:25:28,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=266.6666666666667, ans=0.09833333333333334 +2024-07-27 09:25:30,953 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=145.02 vs. limit=7.6 +2024-07-27 09:25:32,287 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.16 vs. limit=3.042 +2024-07-27 09:25:50,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=280.0, ans=0.0937 +2024-07-27 09:25:56,678 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=15.79 vs. limit=4.112 +2024-07-27 09:26:05,422 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=25.13 vs. limit=7.72 +2024-07-27 09:26:05,616 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=113.18 vs. limit=5.1466666666666665 +2024-07-27 09:26:17,092 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=23.43 vs. limit=7.73 +2024-07-27 09:26:17,762 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=37.39 vs. limit=7.615 +2024-07-27 09:26:21,317 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=14.51 vs. limit=7.615 +2024-07-27 09:26:30,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=320.0, ans=0.2048 +2024-07-27 09:26:43,581 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=195.85 vs. limit=7.62 +2024-07-27 09:26:44,486 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=89.28 vs. limit=7.74 +2024-07-27 09:26:46,240 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=7.57 vs. limit=4.133333333333334 +2024-07-27 09:26:46,528 INFO [train.py:1114] (1/4) Epoch 1, batch 250, loss[loss=1.039, simple_loss=0.8774, pruned_loss=1.018, over 4650.00 frames. ], tot_loss[loss=1.301, simple_loss=1.136, pruned_loss=1.257, over 670603.67 frames. ], batch size: 16, lr: 3.38e-02, grad_scale: 4.0 +2024-07-27 09:26:52,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=333.3333333333333, ans=0.0925 +2024-07-27 09:26:54,610 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=99.36 vs. limit=7.625 +2024-07-27 09:26:55,603 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=223.86 vs. limit=7.625 +2024-07-27 09:26:59,907 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=206.49 vs. limit=7.63 +2024-07-27 09:27:04,030 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.13 vs. limit=5.086666666666667 +2024-07-27 09:27:05,656 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=8.30 vs. limit=7.63 +2024-07-27 09:27:06,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=346.6666666666667, ans=0.187 +2024-07-27 09:27:06,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=346.6666666666667, ans=0.04891666666666667 +2024-07-27 09:27:08,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=360.0, ans=0.483125 +2024-07-27 09:27:14,986 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.88 vs. limit=7.635 +2024-07-27 09:27:15,107 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=40.66 vs. limit=7.635 +2024-07-27 09:27:15,210 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=223.02 vs. limit=7.635 +2024-07-27 09:27:22,677 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.54 vs. limit=7.78 +2024-07-27 09:27:22,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=373.3333333333333, ans=7.64 +2024-07-27 09:27:22,900 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=19.86 vs. limit=7.64 +2024-07-27 09:27:26,412 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.79 vs. limit=4.149333333333333 +2024-07-27 09:27:26,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=373.3333333333333, ans=0.2962666666666667 +2024-07-27 09:27:30,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=386.6666666666667, ans=0.0913 +2024-07-27 09:27:31,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=386.6666666666667, ans=0.8864666666666667 +2024-07-27 09:27:32,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=386.6666666666667, ans=0.481875 +2024-07-27 09:27:34,005 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=72.80 vs. limit=7.79 +2024-07-27 09:27:38,725 INFO [train.py:1114] (1/4) Epoch 1, batch 300, loss[loss=0.9919, simple_loss=0.8284, pruned_loss=0.9593, over 4819.00 frames. ], tot_loss[loss=1.211, simple_loss=1.047, pruned_loss=1.174, over 730084.78 frames. ], batch size: 15, lr: 3.60e-02, grad_scale: 8.0 +2024-07-27 09:27:40,100 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.723e+01 3.145e+01 3.570e+01 4.574e+01 1.008e+02, threshold=7.140e+01, percent-clipped=16.0 +2024-07-27 09:27:42,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=400.0, ans=0.48125 +2024-07-27 09:27:43,299 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=26.22 vs. limit=7.65 +2024-07-27 09:27:53,925 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.49 vs. limit=5.206666666666667 +2024-07-27 09:27:53,960 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=42.38 vs. limit=7.655 +2024-07-27 09:27:57,293 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=3.707e-01 +2024-07-27 09:27:57,485 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.16 vs. limit=5.1033333333333335 +2024-07-27 09:27:58,543 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.85 vs. limit=7.81 +2024-07-27 09:27:58,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=413.3333333333333, ans=0.1845 +2024-07-27 09:28:03,759 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=56.77 vs. limit=5.213333333333333 +2024-07-27 09:28:07,805 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.10 vs. limit=5.1066666666666665 +2024-07-27 09:28:08,688 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=33.31 vs. limit=7.82 +2024-07-27 09:28:15,595 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.85 vs. limit=7.83 +2024-07-27 09:28:15,830 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=45.87 vs. limit=7.83 +2024-07-27 09:28:16,413 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=45.23 vs. limit=7.83 +2024-07-27 09:28:16,527 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=49.62 vs. limit=7.665 +2024-07-27 09:28:17,297 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.23 vs. limit=7.83 +2024-07-27 09:28:18,778 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=5.779e+00 +2024-07-27 09:28:20,192 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=8.86 vs. limit=7.84 +2024-07-27 09:28:22,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.15 vs. limit=7.84 +2024-07-27 09:28:22,852 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=134.12 vs. limit=5.226666666666667 +2024-07-27 09:28:25,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=453.3333333333333, ans=0.183 +2024-07-27 09:28:25,514 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.88 vs. limit=4.181333333333333 +2024-07-27 09:28:26,464 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=16.05 vs. limit=7.67 +2024-07-27 09:28:29,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=453.3333333333333, ans=0.47875 +2024-07-27 09:28:30,486 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.66 vs. limit=3.07 +2024-07-27 09:28:31,006 INFO [train.py:1114] (1/4) Epoch 1, batch 350, loss[loss=0.9025, simple_loss=0.7444, pruned_loss=0.8659, over 4941.00 frames. ], tot_loss[loss=1.151, simple_loss=0.9863, pruned_loss=1.114, over 775972.51 frames. ], batch size: 12, lr: 3.83e-02, grad_scale: 8.0 +2024-07-27 09:28:41,203 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=58.94 vs. limit=7.86 +2024-07-27 09:28:44,499 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=36.78 vs. limit=7.86 +2024-07-27 09:28:55,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=22.90 vs. limit=5.246666666666667 +2024-07-27 09:28:57,232 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=112.41 vs. limit=7.685 +2024-07-27 09:29:00,652 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=42.65 vs. limit=7.685 +2024-07-27 09:29:01,562 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=6.27 vs. limit=5.0 +2024-07-27 09:29:02,134 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=8.77 vs. limit=4.2026666666666666 +2024-07-27 09:29:05,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=506.6666666666667, ans=0.47625 +2024-07-27 09:29:23,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=520.0, ans=0.475625 +2024-07-27 09:29:24,220 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.73 vs. limit=5.13 +2024-07-27 09:29:27,544 INFO [train.py:1114] (1/4) Epoch 1, batch 400, loss[loss=0.9633, simple_loss=0.7874, pruned_loss=0.9087, over 4692.00 frames. ], tot_loss[loss=1.103, simple_loss=0.9368, pruned_loss=1.062, over 813232.59 frames. ], batch size: 13, lr: 4.05e-02, grad_scale: 16.0 +2024-07-27 09:29:29,105 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.881e+01 3.675e+01 4.330e+01 5.451e+01 8.565e+01, threshold=8.660e+01, percent-clipped=3.0 +2024-07-27 09:29:35,103 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=21.13 vs. limit=5.266666666666667 +2024-07-27 09:29:35,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=533.3333333333334, ans=0.29466666666666663 +2024-07-27 09:29:42,348 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=38.36 vs. limit=7.705 +2024-07-27 09:29:42,959 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.37 vs. limit=7.91 +2024-07-27 09:29:45,532 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.97 vs. limit=5.273333333333333 +2024-07-27 09:29:53,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=560.0, ans=0.47375 +2024-07-27 09:29:53,721 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=19.76 vs. limit=7.71 +2024-07-27 09:30:03,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=573.3333333333334, ans=0.08710000000000001 +2024-07-27 09:30:06,457 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=18.04 vs. limit=7.715 +2024-07-27 09:30:17,857 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=17.17 vs. limit=7.715 +2024-07-27 09:31:09,337 INFO [train.py:1114] (1/4) Epoch 1, batch 450, loss[loss=1.08, simple_loss=0.8784, pruned_loss=0.9911, over 4637.00 frames. ], tot_loss[loss=1.074, simple_loss=0.9043, pruned_loss=1.024, over 838489.05 frames. ], batch size: 13, lr: 4.28e-02, grad_scale: 16.0 +2024-07-27 09:31:16,129 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.73 vs. limit=5.15 +2024-07-27 09:31:18,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=613.3333333333334, ans=0.09616666666666668 +2024-07-27 09:31:18,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=613.3333333333334, ans=0.17700000000000002 +2024-07-27 09:31:28,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.63 vs. limit=5.153333333333333 +2024-07-27 09:31:32,955 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.32 vs. limit=7.96 +2024-07-27 09:31:45,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=626.6666666666666, ans=0.470625 +2024-07-27 09:31:45,531 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=2.40 vs. limit=3.094 +2024-07-27 09:36:54,804 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=18.88 vs. limit=7.97 +2024-07-27 09:37:03,435 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=26.83 vs. limit=7.74 +2024-07-27 09:37:04,532 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.73 vs. limit=7.74 +2024-07-27 09:37:08,316 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.59 vs. limit=7.98 +2024-07-27 09:37:10,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=653.3333333333334, ans=0.469375 +2024-07-27 09:37:18,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=653.3333333333334, ans=0.41833333333333333 +2024-07-27 09:37:22,226 INFO [train.py:1114] (1/4) Epoch 1, batch 500, loss[loss=1.015, simple_loss=0.8305, pruned_loss=0.8868, over 4678.00 frames. ], tot_loss[loss=1.048, simple_loss=0.8762, pruned_loss=0.9852, over 860974.01 frames. ], batch size: 15, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:37:25,284 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.049e+01 3.795e+01 4.382e+01 5.151e+01 8.333e+01, threshold=8.764e+01, percent-clipped=0.0 +2024-07-27 09:37:38,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.77 vs. limit=8.0 +2024-07-27 09:38:05,244 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=33.27 vs. limit=8.01 +2024-07-27 09:38:15,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=693.3333333333334, ans=0.7569333333333333 +2024-07-27 09:38:29,726 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.63 vs. limit=5.3533333333333335 +2024-07-27 09:38:30,898 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.72 vs. limit=8.03 +2024-07-27 09:38:39,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=720.0, ans=0.17300000000000001 +2024-07-27 09:38:44,370 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=14.36 vs. limit=7.77 +2024-07-27 09:38:50,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=720.0, ans=0.2928 +2024-07-27 09:38:55,246 INFO [train.py:1114] (1/4) Epoch 1, batch 550, loss[loss=0.9734, simple_loss=0.7951, pruned_loss=0.8271, over 4656.00 frames. ], tot_loss[loss=1.029, simple_loss=0.8558, pruned_loss=0.952, over 876875.88 frames. ], batch size: 17, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:38:55,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=733.3333333333334, ans=0.29266666666666663 +2024-07-27 09:38:55,847 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.61 vs. limit=7.775 +2024-07-27 09:38:59,155 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=24.31 vs. limit=7.775 +2024-07-27 09:39:07,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=746.6666666666666, ans=0.46499999999999997 +2024-07-27 09:39:13,629 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=16.58 vs. limit=7.78 +2024-07-27 09:39:15,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=760.0, ans=0.1715 +2024-07-27 09:39:21,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=760.0, ans=5.19 +2024-07-27 09:39:33,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=786.6666666666666, ans=0.04754166666666667 +2024-07-27 09:39:40,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=786.6666666666666, ans=0.463125 +2024-07-27 09:39:41,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=786.6666666666666, ans=0.2921333333333333 +2024-07-27 09:39:42,346 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=51.16 vs. limit=7.8 +2024-07-27 09:39:43,021 INFO [train.py:1114] (1/4) Epoch 1, batch 600, loss[loss=0.8742, simple_loss=0.7075, pruned_loss=0.7371, over 4626.00 frames. ], tot_loss[loss=1.018, simple_loss=0.8409, pruned_loss=0.9255, over 891817.78 frames. ], batch size: 16, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:39:43,869 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.748e+01 6.137e+01 8.087e+01 1.069e+02 3.258e+02, threshold=1.617e+02, percent-clipped=41.0 +2024-07-27 09:39:44,485 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.84 vs. limit=4.32 +2024-07-27 09:39:45,298 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=19.51 vs. limit=7.8 +2024-07-27 09:39:47,018 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=37.88 vs. limit=8.1 +2024-07-27 09:39:55,283 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=8.17 vs. limit=4.325333333333333 +2024-07-27 09:40:00,146 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.41 vs. limit=7.805 +2024-07-27 09:40:00,160 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=12.96 vs. limit=7.805 +2024-07-27 09:40:05,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=826.6666666666666, ans=0.46125 +2024-07-27 09:40:06,622 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.74 vs. limit=4.330666666666667 +2024-07-27 09:40:08,220 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.02 vs. limit=5.413333333333333 +2024-07-27 09:40:08,287 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=34.69 vs. limit=8.12 +2024-07-27 09:40:09,976 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=26.49 vs. limit=7.81 +2024-07-27 09:40:10,158 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.35 vs. limit=5.413333333333333 +2024-07-27 09:40:10,772 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=44.50 vs. limit=7.81 +2024-07-27 09:40:13,443 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=36.45 vs. limit=8.13 +2024-07-27 09:40:15,268 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=35.27 vs. limit=7.815 +2024-07-27 09:40:21,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=853.3333333333334, ans=7.82 +2024-07-27 09:40:23,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=853.3333333333334, ans=0.46 +2024-07-27 09:40:24,800 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.90 vs. limit=5.0 +2024-07-27 09:40:26,573 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=16.97 vs. limit=7.82 +2024-07-27 09:40:28,758 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=23.57 vs. limit=7.82 +2024-07-27 09:40:30,100 INFO [train.py:1114] (1/4) Epoch 1, batch 650, loss[loss=1.007, simple_loss=0.8056, pruned_loss=0.8464, over 4751.00 frames. ], tot_loss[loss=1.002, simple_loss=0.8229, pruned_loss=0.8962, over 903647.87 frames. ], batch size: 13, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:40:32,406 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=30.08 vs. limit=7.825 +2024-07-27 09:40:33,280 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.40 vs. limit=7.825 +2024-07-27 09:40:34,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=866.6666666666666, ans=0.29133333333333333 +2024-07-27 09:40:39,738 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.75 vs. limit=7.83 +2024-07-27 09:40:41,511 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.60 vs. limit=7.83 +2024-07-27 09:40:44,092 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.30 vs. limit=7.83 +2024-07-27 09:40:56,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=893.3333333333334, ans=0.8687333333333334 +2024-07-27 09:41:02,534 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=8.10 vs. limit=4.362666666666667 +2024-07-27 09:41:02,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=906.6666666666666, ans=7.84 +2024-07-27 09:41:03,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=906.6666666666666, ans=0.4575 +2024-07-27 09:41:32,572 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=36.63 vs. limit=7.845 +2024-07-27 09:41:33,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=920.0, ans=0.1655 +2024-07-27 09:41:37,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=920.0, ans=5.575 +2024-07-27 09:41:40,639 INFO [train.py:1114] (1/4) Epoch 1, batch 700, loss[loss=0.9396, simple_loss=0.7537, pruned_loss=0.7653, over 4643.00 frames. ], tot_loss[loss=0.9962, simple_loss=0.8138, pruned_loss=0.8755, over 911548.71 frames. ], batch size: 12, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:41:41,493 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.672e+01 9.322e+01 1.196e+02 1.686e+02 3.909e+02, threshold=2.392e+02, percent-clipped=30.0 +2024-07-27 09:41:41,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=933.3333333333334, ans=0.165 +2024-07-27 09:41:47,078 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=11.11 vs. limit=7.85 +2024-07-27 09:41:52,731 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=8.23 vs. limit=5.233333333333333 +2024-07-27 09:41:58,580 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.78 vs. limit=5.473333333333334 +2024-07-27 09:41:58,614 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=27.52 vs. limit=8.21 +2024-07-27 09:42:16,793 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=15.10 vs. limit=7.855 +2024-07-27 09:42:17,682 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.26 vs. limit=5.48 +2024-07-27 09:42:19,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=960.0, ans=0.2904 +2024-07-27 09:42:19,666 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.89 vs. limit=5.24 +2024-07-27 09:42:21,728 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=2.544e-03 +2024-07-27 09:42:23,756 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=31.08 vs. limit=8.22 +2024-07-27 09:42:26,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=973.3333333333334, ans=0.8659333333333333 +2024-07-27 09:42:28,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=973.3333333333334, ans=0.1635 +2024-07-27 09:42:28,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=973.3333333333334, ans=0.2902666666666667 +2024-07-27 09:42:40,156 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.72 vs. limit=8.24 +2024-07-27 09:42:41,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=9.34 vs. limit=7.87 +2024-07-27 09:42:44,012 INFO [train.py:1114] (1/4) Epoch 1, batch 750, loss[loss=0.985, simple_loss=0.7909, pruned_loss=0.7822, over 4697.00 frames. ], tot_loss[loss=0.9841, simple_loss=0.8018, pruned_loss=0.847, over 918403.77 frames. ], batch size: 13, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:42:44,399 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=15.63 vs. limit=7.875 +2024-07-27 09:42:48,674 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.58 vs. limit=8.25 +2024-07-27 09:42:54,631 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=9.35 vs. limit=7.88 +2024-07-27 09:42:55,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=1013.3333333333334, ans=0.0772 +2024-07-27 09:42:55,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=1013.3333333333334, ans=0.8645333333333334 +2024-07-27 09:42:57,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=1013.3333333333334, ans=0.4525 +2024-07-27 09:42:57,997 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=1.582e-01 +2024-07-27 09:43:02,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=1026.6666666666667, ans=0.451875 +2024-07-27 09:43:04,393 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.95 vs. limit=8.27 +2024-07-27 09:43:04,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=1026.6666666666667, ans=0.451875 +2024-07-27 09:43:08,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1026.6666666666667, ans=0.28973333333333334 +2024-07-27 09:43:10,474 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=19.41 vs. limit=7.89 +2024-07-27 09:43:10,511 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=37.75 vs. limit=7.89 +2024-07-27 09:43:10,696 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.99 vs. limit=7.89 +2024-07-27 09:43:27,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=6.92 vs. limit=4.421333333333333 +2024-07-27 09:43:27,689 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 09:43:28,074 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=25.35 vs. limit=7.895 +2024-07-27 09:43:28,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=1053.3333333333333, ans=0.04670833333333334 +2024-07-27 09:43:33,256 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=20.29 vs. limit=7.895 +2024-07-27 09:43:35,565 INFO [train.py:1114] (1/4) Epoch 1, batch 800, loss[loss=0.8789, simple_loss=0.7185, pruned_loss=0.6607, over 4849.00 frames. ], tot_loss[loss=0.9721, simple_loss=0.791, pruned_loss=0.8182, over 923217.70 frames. ], batch size: 12, lr: 4.49e-02, grad_scale: 32.0 +2024-07-27 09:43:36,421 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.122e+01 7.305e+01 9.106e+01 1.068e+02 1.961e+02, threshold=1.821e+02, percent-clipped=0.0 +2024-07-27 09:43:37,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=1066.6666666666667, ans=0.45 +2024-07-27 09:43:42,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=1066.6666666666667, ans=0.16 +2024-07-27 09:43:42,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=1066.6666666666667, ans=0.09333333333333334 +2024-07-27 09:43:43,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=1080.0, ans=0.1595 +2024-07-27 09:43:47,110 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=23.22 vs. limit=7.905 +2024-07-27 09:44:40,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.whiten.whitening_limit, batch_count=1093.3333333333333, ans=4.437333333333333 +2024-07-27 09:44:42,143 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.76 vs. limit=8.33 +2024-07-27 09:44:44,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=1106.6666666666667, ans=0.04654166666666667 +2024-07-27 09:44:47,091 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=7.96 vs. limit=4.442666666666667 +2024-07-27 09:44:54,881 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.35 vs. limit=7.92 +2024-07-27 09:45:05,386 INFO [train.py:1114] (1/4) Epoch 1, batch 850, loss[loss=0.9093, simple_loss=0.7492, pruned_loss=0.6614, over 4665.00 frames. ], tot_loss[loss=0.9539, simple_loss=0.7772, pruned_loss=0.783, over 927494.17 frames. ], batch size: 14, lr: 4.49e-02, grad_scale: 32.0 +2024-07-27 09:45:58,706 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=7.04 vs. limit=7.925 +2024-07-27 09:46:04,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=1146.6666666666667, ans=0.44625 +2024-07-27 09:46:05,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=1146.6666666666667, ans=0.8598666666666667 +2024-07-27 09:46:13,482 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.51 vs. limit=7.93 +2024-07-27 09:46:20,126 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.97 vs. limit=7.935 +2024-07-27 09:46:49,430 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.85 vs. limit=7.94 +2024-07-27 09:46:49,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=1173.3333333333333, ans=0.156 +2024-07-27 09:46:52,809 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=8.87 vs. limit=7.945 +2024-07-27 09:47:00,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=1186.6666666666667, ans=0.444375 +2024-07-27 09:47:00,616 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=7.73 vs. limit=7.945 +2024-07-27 09:47:01,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.81 vs. limit=5.6 +2024-07-27 09:47:01,754 INFO [train.py:1114] (1/4) Epoch 1, batch 900, loss[loss=0.8166, simple_loss=0.6746, pruned_loss=0.5814, over 4859.00 frames. ], tot_loss[loss=0.932, simple_loss=0.7611, pruned_loss=0.7461, over 928129.09 frames. ], batch size: 12, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:47:05,077 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.509e+01 5.472e+01 6.615e+01 8.339e+01 1.626e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-27 09:47:08,946 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=7.61 vs. limit=4.48 +2024-07-27 09:47:09,817 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=18.19 vs. limit=7.95 +2024-07-27 09:47:10,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=1200.0, ans=0.35 +2024-07-27 09:47:10,840 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.86 vs. limit=7.95 +2024-07-27 09:47:12,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=1213.3333333333333, ans=0.443125 +2024-07-27 09:47:17,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=1213.3333333333333, ans=0.1545 +2024-07-27 09:47:24,234 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.55 vs. limit=8.42 +2024-07-27 09:47:26,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=1226.6666666666667, ans=0.4425 +2024-07-27 09:47:32,247 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 09:47:33,360 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.60 vs. limit=7.965 +2024-07-27 09:47:34,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=1240.0, ans=0.21860000000000002 +2024-07-27 09:47:39,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=1253.3333333333333, ans=0.8561333333333334 +2024-07-27 09:47:42,116 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.17 vs. limit=8.44 +2024-07-27 09:47:42,963 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.74 vs. limit=5.3133333333333335 +2024-07-27 09:47:46,544 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.30 vs. limit=8.44 +2024-07-27 09:47:47,817 INFO [train.py:1114] (1/4) Epoch 1, batch 950, loss[loss=0.756, simple_loss=0.634, pruned_loss=0.5165, over 4777.00 frames. ], tot_loss[loss=0.9064, simple_loss=0.7431, pruned_loss=0.7067, over 929884.35 frames. ], batch size: 12, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:47:49,197 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.61 vs. limit=7.975 +2024-07-27 09:47:50,782 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.22 vs. limit=3.19 +2024-07-27 09:47:54,971 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.44 vs. limit=8.45 +2024-07-27 09:47:55,702 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.99 vs. limit=7.975 +2024-07-27 09:48:01,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=1280.0, ans=0.44 +2024-07-27 09:48:09,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=1293.3333333333333, ans=0.439375 +2024-07-27 09:48:09,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=1293.3333333333333, ans=0.0709 +2024-07-27 09:48:12,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=1293.3333333333333, ans=0.1515 +2024-07-27 09:48:17,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.30 vs. limit=3.196 +2024-07-27 09:48:17,932 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.90 vs. limit=5.326666666666667 +2024-07-27 09:48:18,090 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.76 vs. limit=5.653333333333333 +2024-07-27 09:48:20,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=1306.6666666666667, ans=0.43875 +2024-07-27 09:48:22,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1306.6666666666667, ans=0.2869333333333333 +2024-07-27 09:48:26,081 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.38 vs. limit=5.326666666666667 +2024-07-27 09:48:26,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1306.6666666666667, ans=0.2869333333333333 +2024-07-27 09:48:31,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=1320.0, ans=0.438125 +2024-07-27 09:48:33,900 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=16.16 vs. limit=5.66 +2024-07-27 09:48:35,661 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.71 vs. limit=8.49 +2024-07-27 09:48:38,206 INFO [train.py:1114] (1/4) Epoch 1, batch 1000, loss[loss=0.6918, simple_loss=0.5933, pruned_loss=0.4487, over 4965.00 frames. ], tot_loss[loss=0.8809, simple_loss=0.7258, pruned_loss=0.6686, over 929630.34 frames. ], batch size: 13, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:48:39,234 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 6.604e+01 7.221e+01 8.711e+01 1.557e+02, threshold=1.444e+02, percent-clipped=4.0 +2024-07-27 09:48:44,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=1333.3333333333333, ans=0.4375 +2024-07-27 09:48:45,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=1333.3333333333333, ans=0.22 +2024-07-27 09:48:47,881 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.97 vs. limit=8.51 +2024-07-27 09:48:48,856 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.80 vs. limit=8.51 +2024-07-27 09:48:54,205 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=5.43 vs. limit=5.673333333333334 +2024-07-27 09:48:58,905 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=27.32 vs. limit=8.52 +2024-07-27 09:49:07,253 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.17 vs. limit=4.549333333333333 +2024-07-27 09:49:21,065 INFO [train.py:1114] (1/4) Epoch 1, batch 1050, loss[loss=0.7987, simple_loss=0.6816, pruned_loss=0.5168, over 4869.00 frames. ], tot_loss[loss=0.8532, simple_loss=0.707, pruned_loss=0.6305, over 931821.56 frames. ], batch size: 14, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:49:27,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=1400.0, ans=0.434375 +2024-07-27 09:49:33,328 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.60 vs. limit=8.56 +2024-07-27 09:49:53,034 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.31 vs. limit=8.035 +2024-07-27 09:49:54,497 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 09:49:58,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=1440.0, ans=0.4325 +2024-07-27 09:50:10,800 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.93 vs. limit=8.04 +2024-07-27 09:50:20,930 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.88 vs. limit=5.726666666666667 +2024-07-27 09:50:21,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=1453.3333333333333, ans=0.431875 +2024-07-27 09:50:30,339 INFO [train.py:1114] (1/4) Epoch 1, batch 1100, loss[loss=0.6543, simple_loss=0.5629, pruned_loss=0.4136, over 4899.00 frames. ], tot_loss[loss=0.825, simple_loss=0.688, pruned_loss=0.5938, over 934480.39 frames. ], batch size: 13, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:50:31,168 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.580e+01 6.586e+01 7.875e+01 9.417e+01 1.858e+02, threshold=1.575e+02, percent-clipped=4.0 +2024-07-27 09:50:32,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=1466.6666666666667, ans=0.2853333333333333 +2024-07-27 09:50:50,901 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.31 vs. limit=8.620000000000001 +2024-07-27 09:50:53,239 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.40 vs. limit=8.620000000000001 +2024-07-27 09:50:54,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=1493.3333333333333, ans=0.43 +2024-07-27 09:50:55,835 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.66 vs. limit=8.06 +2024-07-27 09:50:57,332 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.28 vs. limit=8.629999999999999 +2024-07-27 09:50:57,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=1506.6666666666667, ans=0.2849333333333333 +2024-07-27 09:51:06,778 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.31 vs. limit=8.64 +2024-07-27 09:51:07,832 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.46 vs. limit=5.76 +2024-07-27 09:51:13,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=1520.0, ans=0.14300000000000002 +2024-07-27 09:51:14,857 INFO [train.py:1114] (1/4) Epoch 1, batch 1150, loss[loss=0.6944, simple_loss=0.5959, pruned_loss=0.437, over 4886.00 frames. ], tot_loss[loss=0.7989, simple_loss=0.6705, pruned_loss=0.5607, over 934064.03 frames. ], batch size: 13, lr: 4.47e-02, grad_scale: 32.0 +2024-07-27 09:51:38,882 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.72 vs. limit=8.66 +2024-07-27 09:51:43,253 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.34 vs. limit=8.67 +2024-07-27 09:51:44,048 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.76 vs. limit=8.085 +2024-07-27 09:51:49,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=1560.0, ans=0.426875 +2024-07-27 09:51:52,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=1573.3333333333333, ans=0.28426666666666667 +2024-07-27 09:51:52,520 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.09 vs. limit=5.3933333333333335 +2024-07-27 09:51:52,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=1573.3333333333333, ans=0.09016666666666667 +2024-07-27 09:51:55,907 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.36 vs. limit=8.09 +2024-07-27 09:51:56,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=1573.3333333333333, ans=0.09016666666666667 +2024-07-27 09:51:57,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=1573.3333333333333, ans=5.786666666666667 +2024-07-27 09:52:06,546 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.08 vs. limit=5.793333333333333 +2024-07-27 09:52:09,179 INFO [train.py:1114] (1/4) Epoch 1, batch 1200, loss[loss=0.6568, simple_loss=0.5782, pruned_loss=0.3939, over 4871.00 frames. ], tot_loss[loss=0.7748, simple_loss=0.6547, pruned_loss=0.5305, over 933115.84 frames. ], batch size: 14, lr: 4.47e-02, grad_scale: 32.0 +2024-07-27 09:52:10,010 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.946e+01 6.977e+01 8.267e+01 1.004e+02 1.485e+02, threshold=1.653e+02, percent-clipped=0.0 +2024-07-27 09:52:21,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=1613.3333333333333, ans=0.1395 +2024-07-27 09:52:22,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=1613.3333333333333, ans=6.008333333333333 +2024-07-27 09:52:35,864 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.98 vs. limit=8.73 +2024-07-27 09:52:38,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=1640.0, ans=0.1385 +2024-07-27 09:52:41,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=1640.0, ans=0.8426 +2024-07-27 09:52:44,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=1640.0, ans=0.1385 +2024-07-27 09:52:49,449 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.97 vs. limit=8.74 +2024-07-27 09:52:53,814 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.68 vs. limit=8.74 +2024-07-27 09:52:53,835 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.40 vs. limit=8.74 +2024-07-27 09:52:54,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=1653.3333333333333, ans=0.4225 +2024-07-27 09:52:55,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=1666.6666666666667, ans=0.421875 +2024-07-27 09:52:55,867 INFO [train.py:1114] (1/4) Epoch 1, batch 1250, loss[loss=0.77, simple_loss=0.6559, pruned_loss=0.4815, over 4801.00 frames. ], tot_loss[loss=0.7516, simple_loss=0.6397, pruned_loss=0.5022, over 937164.83 frames. ], batch size: 15, lr: 4.47e-02, grad_scale: 32.0 +2024-07-27 09:52:58,247 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.95 vs. limit=8.125 +2024-07-27 09:53:03,460 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.93 vs. limit=5.833333333333333 +2024-07-27 09:53:10,894 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.21 vs. limit=5.84 +2024-07-27 09:53:11,580 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.76 vs. limit=8.13 +2024-07-27 09:53:17,477 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 09:53:18,009 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.55 vs. limit=8.135 +2024-07-27 09:53:22,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=1706.6666666666667, ans=0.23293333333333333 +2024-07-27 09:53:24,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=1706.6666666666667, ans=0.42 +2024-07-27 09:53:27,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.16 vs. limit=8.78 +2024-07-27 09:53:36,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=1720.0, ans=0.15325 +2024-07-27 09:53:43,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=1720.0, ans=0.419375 +2024-07-27 09:53:57,256 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 09:53:59,477 INFO [train.py:1114] (1/4) Epoch 1, batch 1300, loss[loss=0.6476, simple_loss=0.569, pruned_loss=0.3846, over 4727.00 frames. ], tot_loss[loss=0.7251, simple_loss=0.6215, pruned_loss=0.4739, over 938657.11 frames. ], batch size: 19, lr: 4.47e-02, grad_scale: 32.0 +2024-07-27 09:53:59,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=1733.3333333333333, ans=0.2826666666666667 +2024-07-27 09:54:00,177 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.513e+01 6.459e+01 7.334e+01 8.641e+01 1.550e+02, threshold=1.467e+02, percent-clipped=0.0 +2024-07-27 09:54:01,373 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.66 vs. limit=8.8 +2024-07-27 09:54:07,256 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.57 vs. limit=5.433333333333334 +2024-07-27 09:54:12,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=1733.3333333333333, ans=0.41875 +2024-07-27 09:54:15,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=1746.6666666666667, ans=0.41812499999999997 +2024-07-27 09:54:37,967 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.13 vs. limit=5.88 +2024-07-27 09:54:39,844 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.37 vs. limit=8.82 +2024-07-27 09:54:42,458 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.65 vs. limit=5.443333333333333 +2024-07-27 09:55:15,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=1786.6666666666667, ans=0.7678666666666667 +2024-07-27 09:55:29,125 INFO [train.py:1114] (1/4) Epoch 1, batch 1350, loss[loss=0.5466, simple_loss=0.502, pruned_loss=0.3025, over 4758.00 frames. ], tot_loss[loss=0.7022, simple_loss=0.6064, pruned_loss=0.4489, over 940606.07 frames. ], batch size: 13, lr: 4.46e-02, grad_scale: 32.0 +2024-07-27 09:55:49,950 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.20 vs. limit=8.18 +2024-07-27 09:55:51,482 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.57 vs. limit=8.18 +2024-07-27 09:56:19,350 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.77 vs. limit=4.736 +2024-07-27 09:56:25,268 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.28 vs. limit=3.278 +2024-07-27 09:56:25,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=1853.3333333333333, ans=0.28146666666666664 +2024-07-27 09:56:32,586 INFO [train.py:1114] (1/4) Epoch 1, batch 1400, loss[loss=0.4995, simple_loss=0.4687, pruned_loss=0.2669, over 4710.00 frames. ], tot_loss[loss=0.6818, simple_loss=0.593, pruned_loss=0.427, over 942409.06 frames. ], batch size: 11, lr: 4.46e-02, grad_scale: 32.0 +2024-07-27 09:56:33,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.54 vs. limit=5.466666666666667 +2024-07-27 09:56:33,349 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.208e+01 7.358e+01 8.189e+01 9.683e+01 1.850e+02, threshold=1.638e+02, percent-clipped=1.0 +2024-07-27 09:56:33,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=1866.6666666666667, ans=0.13 +2024-07-27 09:56:35,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=1866.6666666666667, ans=6.166666666666667 +2024-07-27 09:56:46,604 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.87 vs. limit=8.9 +2024-07-27 09:56:53,264 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.39 vs. limit=8.91 +2024-07-27 09:57:00,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.whiten.whitening_limit, batch_count=1893.3333333333333, ans=4.757333333333333 +2024-07-27 09:57:11,575 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.82 vs. limit=8.215 +2024-07-27 09:57:13,595 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=4.38 vs. limit=4.381333333333333 +2024-07-27 09:57:13,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=1906.6666666666667, ans=0.410625 +2024-07-27 09:57:15,056 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.56 vs. limit=8.93 +2024-07-27 09:57:24,386 INFO [train.py:1114] (1/4) Epoch 1, batch 1450, loss[loss=0.5979, simple_loss=0.5466, pruned_loss=0.3315, over 4678.00 frames. ], tot_loss[loss=0.6631, simple_loss=0.5813, pruned_loss=0.407, over 942686.18 frames. ], batch size: 15, lr: 4.46e-02, grad_scale: 32.0 +2024-07-27 09:57:33,049 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.81 vs. limit=8.96 +2024-07-27 09:57:51,536 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.36 vs. limit=3.294 +2024-07-27 09:57:58,849 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.59 vs. limit=8.98 +2024-07-27 09:58:13,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=1986.6666666666667, ans=0.1255 +2024-07-27 09:58:16,370 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.11 vs. limit=5.993333333333333 +2024-07-27 09:58:21,503 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.36 vs. limit=3.298 +2024-07-27 09:58:22,749 INFO [train.py:1114] (1/4) Epoch 1, batch 1500, loss[loss=0.6237, simple_loss=0.557, pruned_loss=0.3556, over 4806.00 frames. ], tot_loss[loss=0.6486, simple_loss=0.5723, pruned_loss=0.3911, over 942455.55 frames. ], batch size: 14, lr: 4.46e-02, grad_scale: 32.0 +2024-07-27 09:58:22,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=2000.0, ans=0.012 +2024-07-27 09:58:23,588 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.537e+01 6.985e+01 7.625e+01 8.885e+01 1.224e+02, threshold=1.525e+02, percent-clipped=0.0 +2024-07-27 09:58:24,147 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.03 vs. limit=9.0 +2024-07-27 09:58:45,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.18 vs. limit=8.255 +2024-07-27 09:58:47,508 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.88 vs. limit=9.02 +2024-07-27 09:58:58,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2040.0, ans=0.2796 +2024-07-27 09:59:06,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=2053.3333333333335, ans=0.40375 +2024-07-27 09:59:10,610 INFO [train.py:1114] (1/4) Epoch 1, batch 1550, loss[loss=0.543, simple_loss=0.5191, pruned_loss=0.2822, over 4886.00 frames. ], tot_loss[loss=0.6324, simple_loss=0.562, pruned_loss=0.375, over 939003.54 frames. ], batch size: 15, lr: 4.45e-02, grad_scale: 32.0 +2024-07-27 09:59:38,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=2080.0, ans=0.8272 +2024-07-27 09:59:55,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=2120.0, ans=0.0523 +2024-07-27 09:59:57,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2120.0, ans=0.2788 +2024-07-27 10:00:04,782 INFO [train.py:1114] (1/4) Epoch 1, batch 1600, loss[loss=0.6058, simple_loss=0.5436, pruned_loss=0.3409, over 4871.00 frames. ], tot_loss[loss=0.6203, simple_loss=0.5546, pruned_loss=0.3625, over 938127.97 frames. ], batch size: 14, lr: 4.45e-02, grad_scale: 32.0 +2024-07-27 10:00:05,612 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.018e+01 7.259e+01 8.235e+01 9.551e+01 1.793e+02, threshold=1.647e+02, percent-clipped=2.0 +2024-07-27 10:00:05,997 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.17 vs. limit=9.1 +2024-07-27 10:00:09,385 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.54 vs. limit=5.533333333333333 +2024-07-27 10:00:35,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=2146.6666666666665, ans=0.399375 +2024-07-27 10:00:42,208 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.28 vs. limit=6.08 +2024-07-27 10:00:42,412 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=4.51 vs. limit=4.432 +2024-07-27 10:00:49,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=2173.3333333333335, ans=0.8239333333333334 +2024-07-27 10:00:54,221 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.21 vs. limit=3.326 +2024-07-27 10:00:57,666 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.31 vs. limit=9.14 +2024-07-27 10:01:01,007 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.33 vs. limit=3.328 +2024-07-27 10:01:04,974 INFO [train.py:1114] (1/4) Epoch 1, batch 1650, loss[loss=0.5653, simple_loss=0.5307, pruned_loss=0.3009, over 4669.00 frames. ], tot_loss[loss=0.6092, simple_loss=0.5478, pruned_loss=0.3513, over 937737.98 frames. ], batch size: 14, lr: 4.45e-02, grad_scale: 32.0 +2024-07-27 10:01:05,526 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.17 vs. limit=8.325 +2024-07-27 10:01:09,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=2200.0, ans=0.050499999999999996 +2024-07-27 10:01:12,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=2213.3333333333335, ans=0.39625 +2024-07-27 10:01:16,423 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.73 vs. limit=5.553333333333334 +2024-07-27 10:01:18,424 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.34 vs. limit=5.553333333333334 +2024-07-27 10:01:19,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=2213.3333333333335, ans=0.050199999999999995 +2024-07-27 10:01:25,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=2226.6666666666665, ans=0.04990000000000001 +2024-07-27 10:01:31,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=2226.6666666666665, ans=0.1165 +2024-07-27 10:01:32,268 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.60 vs. limit=9.17 +2024-07-27 10:01:41,044 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.37 vs. limit=3.336 +2024-07-27 10:01:42,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=2253.3333333333335, ans=0.39437500000000003 +2024-07-27 10:01:48,263 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.81 vs. limit=8.345 +2024-07-27 10:01:50,397 INFO [train.py:1114] (1/4) Epoch 1, batch 1700, loss[loss=0.5374, simple_loss=0.4953, pruned_loss=0.292, over 4705.00 frames. ], tot_loss[loss=0.5949, simple_loss=0.5389, pruned_loss=0.3382, over 939491.59 frames. ], batch size: 11, lr: 4.44e-02, grad_scale: 32.0 +2024-07-27 10:01:50,859 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.75 vs. limit=5.566666666666666 +2024-07-27 10:01:51,141 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.631e+01 6.759e+01 7.966e+01 9.777e+01 1.760e+02, threshold=1.593e+02, percent-clipped=1.0 +2024-07-27 10:01:52,463 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.71 vs. limit=9.2 +2024-07-27 10:01:56,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=2266.6666666666665, ans=0.39375 +2024-07-27 10:02:03,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2280.0, ans=0.2772 +2024-07-27 10:02:03,831 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.24 vs. limit=6.14 +2024-07-27 10:02:05,620 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.10 vs. limit=8.355 +2024-07-27 10:02:20,083 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.59 vs. limit=9.23 +2024-07-27 10:02:29,546 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.36 vs. limit=9.24 +2024-07-27 10:02:30,362 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=7.84 vs. limit=8.37 +2024-07-27 10:02:33,852 INFO [train.py:1114] (1/4) Epoch 1, batch 1750, loss[loss=0.4471, simple_loss=0.4374, pruned_loss=0.2265, over 4800.00 frames. ], tot_loss[loss=0.5843, simple_loss=0.5327, pruned_loss=0.3281, over 940642.49 frames. ], batch size: 11, lr: 4.44e-02, grad_scale: 32.0 +2024-07-27 10:02:49,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=2346.6666666666665, ans=0.39 +2024-07-27 10:02:50,680 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.20 vs. limit=9.26 +2024-07-27 10:02:56,554 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.81 vs. limit=9.26 +2024-07-27 10:02:57,087 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.14 vs. limit=9.27 +2024-07-27 10:02:57,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=2360.0, ans=6.475 +2024-07-27 10:02:59,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=2360.0, ans=0.389375 +2024-07-27 10:03:02,756 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.65 vs. limit=5.59 +2024-07-27 10:03:03,377 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:03:05,395 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.40 vs. limit=8.39 +2024-07-27 10:03:10,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=2373.3333333333335, ans=0.8169333333333334 +2024-07-27 10:03:18,929 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.04 vs. limit=5.596666666666667 +2024-07-27 10:03:25,542 INFO [train.py:1114] (1/4) Epoch 1, batch 1800, loss[loss=0.489, simple_loss=0.4728, pruned_loss=0.2516, over 4642.00 frames. ], tot_loss[loss=0.5755, simple_loss=0.5274, pruned_loss=0.3199, over 941372.32 frames. ], batch size: 13, lr: 4.44e-02, grad_scale: 32.0 +2024-07-27 10:03:26,386 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.000e+01 7.252e+01 8.218e+01 9.576e+01 1.850e+02, threshold=1.644e+02, percent-clipped=1.0 +2024-07-27 10:03:44,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=2426.6666666666665, ans=0.38625 +2024-07-27 10:03:45,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=2426.6666666666665, ans=0.2364 +2024-07-27 10:03:46,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=2426.6666666666665, ans=0.38625 +2024-07-27 10:03:46,995 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.52 vs. limit=4.970666666666666 +2024-07-27 10:03:52,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=2440.0, ans=0.8146 +2024-07-27 10:04:03,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2453.3333333333335, ans=0.27546666666666664 +2024-07-27 10:04:04,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=2453.3333333333335, ans=0.5 +2024-07-27 10:04:07,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=2453.3333333333335, ans=0.385 +2024-07-27 10:04:08,647 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.68 vs. limit=8.42 +2024-07-27 10:04:13,823 INFO [train.py:1114] (1/4) Epoch 1, batch 1850, loss[loss=0.5425, simple_loss=0.5171, pruned_loss=0.2838, over 4817.00 frames. ], tot_loss[loss=0.5646, simple_loss=0.5212, pruned_loss=0.3103, over 940960.49 frames. ], batch size: 14, lr: 4.43e-02, grad_scale: 32.0 +2024-07-27 10:04:14,191 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.98 vs. limit=8.425 +2024-07-27 10:04:21,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=2480.0, ans=0.0442 +2024-07-27 10:04:25,556 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.43 vs. limit=9.36 +2024-07-27 10:04:29,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=2480.0, ans=9.36 +2024-07-27 10:04:34,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=2493.3333333333335, ans=0.043899999999999995 +2024-07-27 10:04:37,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=2506.6666666666665, ans=0.1866666666666667 +2024-07-27 10:04:44,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=2506.6666666666665, ans=0.106 +2024-07-27 10:04:46,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2520.0, ans=0.2748 +2024-07-27 10:04:49,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=2520.0, ans=0.185 +2024-07-27 10:04:50,666 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.45 vs. limit=8.445 +2024-07-27 10:04:54,826 INFO [train.py:1114] (1/4) Epoch 1, batch 1900, loss[loss=0.5901, simple_loss=0.5595, pruned_loss=0.3104, over 4653.00 frames. ], tot_loss[loss=0.5563, simple_loss=0.517, pruned_loss=0.3026, over 941982.98 frames. ], batch size: 14, lr: 4.43e-02, grad_scale: 32.0 +2024-07-27 10:04:55,619 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.340e+01 7.620e+01 8.335e+01 9.482e+01 1.510e+02, threshold=1.667e+02, percent-clipped=0.0 +2024-07-27 10:04:58,476 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=31.14 vs. limit=9.4 +2024-07-27 10:04:58,479 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=31.47 vs. limit=6.266666666666667 +2024-07-27 10:05:19,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=2546.6666666666665, ans=0.380625 +2024-07-27 10:05:22,452 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.04 vs. limit=9.42 +2024-07-27 10:05:22,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=2560.0, ans=0.104 +2024-07-27 10:05:23,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=2560.0, ans=0.5 +2024-07-27 10:05:36,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.87 vs. limit=9.43 +2024-07-27 10:05:40,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=2586.6666666666665, ans=0.37875000000000003 +2024-07-27 10:05:43,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=2586.6666666666665, ans=0.08383333333333334 +2024-07-27 10:05:46,184 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.62 vs. limit=9.45 +2024-07-27 10:05:46,626 INFO [train.py:1114] (1/4) Epoch 1, batch 1950, loss[loss=0.5502, simple_loss=0.5082, pruned_loss=0.2964, over 4889.00 frames. ], tot_loss[loss=0.5531, simple_loss=0.5166, pruned_loss=0.2986, over 943905.28 frames. ], batch size: 13, lr: 4.43e-02, grad_scale: 32.0 +2024-07-27 10:05:48,803 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.91 vs. limit=8.475 +2024-07-27 10:05:51,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=2600.0, ans=0.378125 +2024-07-27 10:06:09,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=2613.3333333333335, ans=0.102 +2024-07-27 10:06:21,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=2640.0, ans=0.5 +2024-07-27 10:06:27,968 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.22 vs. limit=6.326666666666667 +2024-07-27 10:06:29,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=2653.3333333333335, ans=0.27346666666666664 +2024-07-27 10:06:29,606 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.34 vs. limit=8.495 +2024-07-27 10:06:34,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=2666.6666666666665, ans=0.375 +2024-07-27 10:06:42,515 INFO [train.py:1114] (1/4) Epoch 1, batch 2000, loss[loss=0.4234, simple_loss=0.4176, pruned_loss=0.2146, over 4809.00 frames. ], tot_loss[loss=0.5454, simple_loss=0.5121, pruned_loss=0.2923, over 940977.51 frames. ], batch size: 11, lr: 4.42e-02, grad_scale: 32.0 +2024-07-27 10:06:43,352 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.312e+01 7.554e+01 8.059e+01 9.021e+01 3.573e+02, threshold=1.612e+02, percent-clipped=2.0 +2024-07-27 10:06:43,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=2666.6666666666665, ans=0.375 +2024-07-27 10:06:44,736 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.86 vs. limit=5.666666666666667 +2024-07-27 10:06:53,198 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.59 vs. limit=9.5 +2024-07-27 10:06:56,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=2680.0, ans=0.374375 +2024-07-27 10:07:02,180 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.72 vs. limit=8.51 +2024-07-27 10:07:08,607 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.79 vs. limit=9.52 +2024-07-27 10:07:12,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=2706.6666666666665, ans=0.7770666666666667 +2024-07-27 10:07:12,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=2706.6666666666665, ans=0.8052666666666667 +2024-07-27 10:07:14,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=2706.6666666666665, ans=6.691666666666666 +2024-07-27 10:07:14,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=2706.6666666666665, ans=0.37312500000000004 +2024-07-27 10:07:20,685 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.30 vs. limit=5.088 +2024-07-27 10:07:21,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2720.0, ans=0.2728 +2024-07-27 10:07:22,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=2720.0, ans=0.3725 +2024-07-27 10:07:24,010 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.50 vs. limit=6.36 +2024-07-27 10:07:26,885 INFO [train.py:1114] (1/4) Epoch 1, batch 2050, loss[loss=0.4416, simple_loss=0.4367, pruned_loss=0.2232, over 4607.00 frames. ], tot_loss[loss=0.5362, simple_loss=0.5059, pruned_loss=0.2855, over 939395.46 frames. ], batch size: 11, lr: 4.42e-02, grad_scale: 64.0 +2024-07-27 10:07:31,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=2733.3333333333335, ans=0.371875 +2024-07-27 10:07:31,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=2733.3333333333335, ans=0.371875 +2024-07-27 10:07:35,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=2746.6666666666665, ans=0.37124999999999997 +2024-07-27 10:07:47,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=2760.0, ans=0.8034 +2024-07-27 10:07:58,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=2773.3333333333335, ans=0.04133333333333333 +2024-07-27 10:08:06,592 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.98 vs. limit=5.696666666666666 +2024-07-27 10:08:07,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.53 vs. limit=8.545 +2024-07-27 10:08:08,450 INFO [train.py:1114] (1/4) Epoch 1, batch 2100, loss[loss=0.4786, simple_loss=0.4726, pruned_loss=0.2423, over 4766.00 frames. ], tot_loss[loss=0.5265, simple_loss=0.5002, pruned_loss=0.2782, over 940925.14 frames. ], batch size: 13, lr: 4.42e-02, grad_scale: 64.0 +2024-07-27 10:08:08,912 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.32 vs. limit=5.12 +2024-07-27 10:08:09,072 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.83 vs. limit=5.7 +2024-07-27 10:08:09,834 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.063e+01 7.785e+01 9.607e+01 1.091e+02 1.489e+02, threshold=1.921e+02, percent-clipped=0.0 +2024-07-27 10:08:27,391 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.15 vs. limit=5.706666666666667 +2024-07-27 10:08:35,226 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.87 vs. limit=9.629999999999999 +2024-07-27 10:08:35,248 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.95 vs. limit=9.629999999999999 +2024-07-27 10:08:39,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=2840.0, ans=0.2216 +2024-07-27 10:08:45,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=2840.0, ans=0.366875 +2024-07-27 10:08:54,096 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.09 vs. limit=5.713333333333333 +2024-07-27 10:08:56,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=2853.3333333333335, ans=0.09299999999999999 +2024-07-27 10:08:59,110 INFO [train.py:1114] (1/4) Epoch 1, batch 2150, loss[loss=0.4657, simple_loss=0.4615, pruned_loss=0.2349, over 4901.00 frames. ], tot_loss[loss=0.5188, simple_loss=0.4965, pruned_loss=0.2719, over 944330.56 frames. ], batch size: 13, lr: 4.41e-02, grad_scale: 64.0 +2024-07-27 10:09:05,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.66 vs. limit=5.716666666666667 +2024-07-27 10:09:06,952 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.69 vs. limit=6.4399999999999995 +2024-07-27 10:09:18,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=2880.0, ans=0.082 +2024-07-27 10:09:47,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.29 vs. limit=8.59 +2024-07-27 10:10:05,498 INFO [train.py:1114] (1/4) Epoch 1, batch 2200, loss[loss=0.5121, simple_loss=0.5127, pruned_loss=0.2557, over 4819.00 frames. ], tot_loss[loss=0.5139, simple_loss=0.4938, pruned_loss=0.2681, over 943452.06 frames. ], batch size: 14, lr: 4.41e-02, grad_scale: 64.0 +2024-07-27 10:10:06,253 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.332e+01 7.672e+01 8.381e+01 9.351e+01 1.723e+02, threshold=1.676e+02, percent-clipped=0.0 +2024-07-27 10:10:22,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=2946.6666666666665, ans=0.361875 +2024-07-27 10:10:27,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=2960.0, ans=0.13 +2024-07-27 10:10:32,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=2973.3333333333335, ans=0.2446 +2024-07-27 10:10:34,864 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.33 vs. limit=9.73 +2024-07-27 10:10:39,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.44 vs. limit=5.1946666666666665 +2024-07-27 10:10:42,335 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.90 vs. limit=5.746666666666666 +2024-07-27 10:10:47,985 INFO [train.py:1114] (1/4) Epoch 1, batch 2250, loss[loss=0.4406, simple_loss=0.4446, pruned_loss=0.2184, over 4695.00 frames. ], tot_loss[loss=0.5095, simple_loss=0.4923, pruned_loss=0.2642, over 941926.99 frames. ], batch size: 13, lr: 4.40e-02, grad_scale: 64.0 +2024-07-27 10:10:48,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=3000.0, ans=0.359375 +2024-07-27 10:10:48,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=3000.0, ans=0.359375 +2024-07-27 10:10:49,250 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.80 vs. limit=5.75 +2024-07-27 10:10:53,183 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.66 vs. limit=6.5 +2024-07-27 10:10:55,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=3013.3333333333335, ans=6.883333333333334 +2024-07-27 10:11:08,780 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.20 vs. limit=9.76 +2024-07-27 10:11:09,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=3013.3333333333335, ans=0.03219999999999999 +2024-07-27 10:11:23,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=3026.6666666666665, ans=0.358125 +2024-07-27 10:11:28,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=3026.6666666666665, ans=0.1216666666666667 +2024-07-27 10:11:28,605 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.69 vs. limit=3.454 +2024-07-27 10:11:29,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=3040.0, ans=0.7936000000000001 +2024-07-27 10:11:34,462 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.81 vs. limit=8.64 +2024-07-27 10:11:34,530 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.54 vs. limit=9.78 +2024-07-27 10:11:41,128 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.68 vs. limit=9.79 +2024-07-27 10:11:44,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=3053.3333333333335, ans=0.356875 +2024-07-27 10:11:45,131 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.42 vs. limit=9.79 +2024-07-27 10:11:48,647 INFO [train.py:1114] (1/4) Epoch 1, batch 2300, loss[loss=0.3897, simple_loss=0.4002, pruned_loss=0.1896, over 4944.00 frames. ], tot_loss[loss=0.5, simple_loss=0.4862, pruned_loss=0.2575, over 939488.83 frames. ], batch size: 12, lr: 4.40e-02, grad_scale: 64.0 +2024-07-27 10:11:49,436 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.418e+01 7.845e+01 8.717e+01 9.817e+01 1.762e+02, threshold=1.743e+02, percent-clipped=1.0 +2024-07-27 10:11:51,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=3066.6666666666665, ans=0.35625 +2024-07-27 10:11:55,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=3080.0, ans=0.35562499999999997 +2024-07-27 10:11:56,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=3080.0, ans=0.35562499999999997 +2024-07-27 10:12:10,339 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.64 vs. limit=8.66 +2024-07-27 10:12:15,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3106.6666666666665, ans=0.2689333333333333 +2024-07-27 10:12:17,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=3106.6666666666665, ans=0.08349999999999999 +2024-07-27 10:12:18,904 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=17.26 vs. limit=6.553333333333333 +2024-07-27 10:12:19,609 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.04 vs. limit=9.83 +2024-07-27 10:12:25,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=3120.0, ans=0.35375 +2024-07-27 10:12:31,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=3133.3333333333335, ans=0.353125 +2024-07-27 10:12:32,059 INFO [train.py:1114] (1/4) Epoch 1, batch 2350, loss[loss=0.4868, simple_loss=0.4798, pruned_loss=0.2469, over 4634.00 frames. ], tot_loss[loss=0.4949, simple_loss=0.4835, pruned_loss=0.2536, over 941414.58 frames. ], batch size: 13, lr: 4.40e-02, grad_scale: 64.0 +2024-07-27 10:12:37,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.03 vs. limit=6.566666666666666 +2024-07-27 10:12:50,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=3160.0, ans=8.685 +2024-07-27 10:12:53,670 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.26 vs. limit=6.58 +2024-07-27 10:12:55,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3160.0, ans=0.26839999999999997 +2024-07-27 10:12:57,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=3160.0, ans=0.351875 +2024-07-27 10:13:03,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=3173.3333333333335, ans=0.10333333333333333 +2024-07-27 10:13:09,039 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.14 vs. limit=8.695 +2024-07-27 10:13:16,255 INFO [train.py:1114] (1/4) Epoch 1, batch 2400, loss[loss=0.4051, simple_loss=0.4185, pruned_loss=0.1959, over 4641.00 frames. ], tot_loss[loss=0.4942, simple_loss=0.4834, pruned_loss=0.2529, over 941119.41 frames. ], batch size: 12, lr: 4.39e-02, grad_scale: 64.0 +2024-07-27 10:13:16,978 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.405e+01 7.978e+01 8.770e+01 1.032e+02 1.902e+02, threshold=1.754e+02, percent-clipped=2.0 +2024-07-27 10:13:33,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=3226.6666666666665, ans=0.035 +2024-07-27 10:13:40,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=3240.0, ans=0.348125 +2024-07-27 10:13:42,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=3240.0, ans=0.348125 +2024-07-27 10:13:46,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=3240.0, ans=0.348125 +2024-07-27 10:13:49,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=3253.3333333333335, ans=0.02679999999999999 +2024-07-27 10:13:50,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=3253.3333333333335, ans=0.34750000000000003 +2024-07-27 10:13:51,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=3253.3333333333335, ans=0.07966666666666666 +2024-07-27 10:13:56,383 INFO [train.py:1114] (1/4) Epoch 1, batch 2450, loss[loss=0.4234, simple_loss=0.448, pruned_loss=0.1994, over 4693.00 frames. ], tot_loss[loss=0.4929, simple_loss=0.4832, pruned_loss=0.2516, over 936649.38 frames. ], batch size: 13, lr: 4.39e-02, grad_scale: 64.0 +2024-07-27 10:14:05,847 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.16 vs. limit=6.64 +2024-07-27 10:14:05,868 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.53 vs. limit=3.492 +2024-07-27 10:14:06,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3280.0, ans=0.34625 +2024-07-27 10:14:30,926 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:14:32,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3293.3333333333335, ans=0.2670666666666667 +2024-07-27 10:14:33,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3293.3333333333335, ans=0.2670666666666667 +2024-07-27 10:14:39,515 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.14 vs. limit=5.0 +2024-07-27 10:14:53,953 INFO [train.py:1114] (1/4) Epoch 1, batch 2500, loss[loss=0.4916, simple_loss=0.5017, pruned_loss=0.2408, over 4813.00 frames. ], tot_loss[loss=0.4865, simple_loss=0.4799, pruned_loss=0.2468, over 938384.49 frames. ], batch size: 14, lr: 4.38e-02, grad_scale: 64.0 +2024-07-27 10:14:54,649 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.439e+01 7.441e+01 8.140e+01 9.225e+01 1.396e+02, threshold=1.628e+02, percent-clipped=0.0 +2024-07-27 10:14:58,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=3333.3333333333335, ans=7.083333333333334 +2024-07-27 10:14:58,245 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.19 vs. limit=10.0 +2024-07-27 10:15:11,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=3346.6666666666665, ans=0.26653333333333334 +2024-07-27 10:15:30,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=3346.6666666666665, ans=0.06175 +2024-07-27 10:15:33,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3360.0, ans=0.3425 +2024-07-27 10:15:34,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=3360.0, ans=0.0395 +2024-07-27 10:15:44,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=3373.3333333333335, ans=0.07349999999999998 +2024-07-27 10:15:45,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3373.3333333333335, ans=0.26626666666666665 +2024-07-27 10:16:03,238 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.95 vs. limit=6.7 +2024-07-27 10:16:03,502 INFO [train.py:1114] (1/4) Epoch 1, batch 2550, loss[loss=0.3972, simple_loss=0.3974, pruned_loss=0.1985, over 4793.00 frames. ], tot_loss[loss=0.4838, simple_loss=0.4789, pruned_loss=0.2445, over 938249.11 frames. ], batch size: 11, lr: 4.38e-02, grad_scale: 64.0 +2024-07-27 10:16:23,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=3426.6666666666665, ans=0.339375 +2024-07-27 10:16:32,102 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.81 vs. limit=8.79 +2024-07-27 10:16:37,445 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.41 vs. limit=6.72 +2024-07-27 10:16:43,683 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.46 vs. limit=8.795 +2024-07-27 10:16:49,869 INFO [train.py:1114] (1/4) Epoch 1, batch 2600, loss[loss=0.467, simple_loss=0.4702, pruned_loss=0.2319, over 4894.00 frames. ], tot_loss[loss=0.4825, simple_loss=0.4787, pruned_loss=0.2433, over 937204.24 frames. ], batch size: 13, lr: 4.37e-02, grad_scale: 64.0 +2024-07-27 10:16:50,622 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.396e+01 7.798e+01 8.275e+01 9.472e+01 1.752e+02, threshold=1.655e+02, percent-clipped=1.0 +2024-07-27 10:16:53,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=3466.6666666666665, ans=0.05499999999999999 +2024-07-27 10:17:31,722 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.25 vs. limit=6.76 +2024-07-27 10:17:34,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=3520.0, ans=0.33499999999999996 +2024-07-27 10:17:38,256 INFO [train.py:1114] (1/4) Epoch 1, batch 2650, loss[loss=0.5959, simple_loss=0.5641, pruned_loss=0.3138, over 4655.00 frames. ], tot_loss[loss=0.4807, simple_loss=0.4782, pruned_loss=0.2417, over 939304.84 frames. ], batch size: 16, lr: 4.37e-02, grad_scale: 64.0 +2024-07-27 10:17:48,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=3546.6666666666665, ans=0.7758666666666667 +2024-07-27 10:17:50,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=3546.6666666666665, ans=0.33375 +2024-07-27 10:17:52,239 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.39 vs. limit=10.16 +2024-07-27 10:17:52,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.14 vs. limit=5.418666666666667 +2024-07-27 10:17:52,392 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.67 vs. limit=10.16 +2024-07-27 10:17:52,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=3546.6666666666665, ans=0.0566666666666667 +2024-07-27 10:17:54,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=3546.6666666666665, ans=0.020199999999999996 +2024-07-27 10:17:54,648 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.20 vs. limit=6.773333333333333 +2024-07-27 10:18:00,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=3560.0, ans=0.03887500000000001 +2024-07-27 10:18:02,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=3560.0, ans=0.7856 +2024-07-27 10:18:24,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3586.6666666666665, ans=0.26413333333333333 +2024-07-27 10:18:36,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=3586.6666666666665, ans=0.04824999999999999 +2024-07-27 10:18:36,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=3586.6666666666665, ans=0.06549999999999997 +2024-07-27 10:18:37,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.07 vs. limit=8.845 +2024-07-27 10:18:38,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.18 vs. limit=8.845 +2024-07-27 10:18:40,839 INFO [train.py:1114] (1/4) Epoch 1, batch 2700, loss[loss=0.4875, simple_loss=0.4895, pruned_loss=0.2428, over 4733.00 frames. ], tot_loss[loss=0.4769, simple_loss=0.4764, pruned_loss=0.2388, over 939052.93 frames. ], batch size: 14, lr: 4.36e-02, grad_scale: 64.0 +2024-07-27 10:18:41,565 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.609e+01 7.664e+01 8.465e+01 9.239e+01 1.807e+02, threshold=1.693e+02, percent-clipped=1.0 +2024-07-27 10:18:50,139 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.83 vs. limit=10.2 +2024-07-27 10:18:52,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=3613.3333333333335, ans=0.07 +2024-07-27 10:19:09,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=3640.0, ans=0.7726 +2024-07-27 10:19:40,869 INFO [train.py:1114] (1/4) Epoch 1, batch 2750, loss[loss=0.4583, simple_loss=0.4628, pruned_loss=0.2269, over 4715.00 frames. ], tot_loss[loss=0.4713, simple_loss=0.4726, pruned_loss=0.235, over 939051.40 frames. ], batch size: 12, lr: 4.36e-02, grad_scale: 32.0 +2024-07-27 10:19:43,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=3666.6666666666665, ans=0.04375000000000001 +2024-07-27 10:19:47,253 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.09 vs. limit=5.916666666666667 +2024-07-27 10:19:48,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.91 vs. limit=10.26 +2024-07-27 10:20:02,891 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.02 vs. limit=10.27 +2024-07-27 10:20:03,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=3693.3333333333335, ans=0.7707333333333334 +2024-07-27 10:20:24,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=3720.0, ans=0.7698 +2024-07-27 10:20:27,059 INFO [train.py:1114] (1/4) Epoch 1, batch 2800, loss[loss=0.5944, simple_loss=0.544, pruned_loss=0.3224, over 3626.00 frames. ], tot_loss[loss=0.4664, simple_loss=0.4699, pruned_loss=0.2315, over 937646.41 frames. ], batch size: 37, lr: 4.36e-02, grad_scale: 32.0 +2024-07-27 10:20:28,615 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.137e+01 7.490e+01 8.370e+01 9.871e+01 2.286e+02, threshold=1.674e+02, percent-clipped=1.0 +2024-07-27 10:20:35,320 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.11 vs. limit=6.873333333333333 +2024-07-27 10:20:35,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=3746.6666666666665, ans=0.015699999999999992 +2024-07-27 10:20:44,904 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.94 vs. limit=10.32 +2024-07-27 10:20:48,203 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.50 vs. limit=8.91 +2024-07-27 10:20:54,387 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.05 vs. limit=6.886666666666667 +2024-07-27 10:21:03,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3773.3333333333335, ans=0.26226666666666665 +2024-07-27 10:21:11,083 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.29 vs. limit=6.8933333333333335 +2024-07-27 10:21:11,884 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=10.66 vs. limit=10.35 +2024-07-27 10:21:12,416 INFO [train.py:1114] (1/4) Epoch 1, batch 2850, loss[loss=0.4094, simple_loss=0.4105, pruned_loss=0.2041, over 4963.00 frames. ], tot_loss[loss=0.4652, simple_loss=0.4691, pruned_loss=0.2308, over 935670.70 frames. ], batch size: 13, lr: 4.35e-02, grad_scale: 32.0 +2024-07-27 10:21:17,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=3800.0, ans=0.321875 +2024-07-27 10:21:21,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=3813.3333333333335, ans=0.32125000000000004 +2024-07-27 10:21:24,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=3813.3333333333335, ans=0.035 +2024-07-27 10:21:24,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=3813.3333333333335, ans=0.32125000000000004 +2024-07-27 10:21:28,207 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.06 vs. limit=10.370000000000001 +2024-07-27 10:21:29,714 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.00 vs. limit=8.935 +2024-07-27 10:21:36,930 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.02 vs. limit=5.96 +2024-07-27 10:21:39,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=3840.0, ans=0.7884 +2024-07-27 10:21:46,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=3853.3333333333335, ans=0.07 +2024-07-27 10:21:58,255 INFO [train.py:1114] (1/4) Epoch 1, batch 2900, loss[loss=0.4433, simple_loss=0.4592, pruned_loss=0.2137, over 4823.00 frames. ], tot_loss[loss=0.4603, simple_loss=0.4667, pruned_loss=0.2269, over 939599.40 frames. ], batch size: 13, lr: 4.35e-02, grad_scale: 32.0 +2024-07-27 10:22:07,176 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.485e+01 7.711e+01 8.512e+01 9.288e+01 5.214e+02, threshold=1.702e+02, percent-clipped=1.0 +2024-07-27 10:22:09,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3866.6666666666665, ans=0.2613333333333333 +2024-07-27 10:22:10,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=3866.6666666666665, ans=0.31875 +2024-07-27 10:22:16,817 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.03 vs. limit=10.41 +2024-07-27 10:22:16,834 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.45 vs. limit=8.955 +2024-07-27 10:22:19,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=3880.0, ans=0.7642 +2024-07-27 10:22:21,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=3893.3333333333335, ans=0.07 +2024-07-27 10:22:26,469 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.91 vs. limit=8.96 +2024-07-27 10:22:31,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=3906.6666666666665, ans=0.05349999999999999 +2024-07-27 10:22:42,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3920.0, ans=0.2608 +2024-07-27 10:22:47,615 INFO [train.py:1114] (1/4) Epoch 1, batch 2950, loss[loss=0.4614, simple_loss=0.4752, pruned_loss=0.2238, over 4707.00 frames. ], tot_loss[loss=0.4572, simple_loss=0.4639, pruned_loss=0.2253, over 938762.38 frames. ], batch size: 12, lr: 4.34e-02, grad_scale: 32.0 +2024-07-27 10:23:01,316 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.79 vs. limit=6.966666666666667 +2024-07-27 10:23:02,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=3946.6666666666665, ans=0.7618666666666667 +2024-07-27 10:23:02,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=3946.6666666666665, ans=0.011199999999999988 +2024-07-27 10:23:02,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=3946.6666666666665, ans=0.011199999999999988 +2024-07-27 10:23:03,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=3946.6666666666665, ans=0.00666666666666671 +2024-07-27 10:23:07,876 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.76 vs. limit=10.46 +2024-07-27 10:23:13,662 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.74 vs. limit=6.98 +2024-07-27 10:23:14,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=3960.0, ans=0.31437499999999996 +2024-07-27 10:23:19,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=3973.3333333333335, ans=0.0033333333333332993 +2024-07-27 10:23:37,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=4000.0, ans=0.3125 +2024-07-27 10:23:38,731 INFO [train.py:1114] (1/4) Epoch 1, batch 3000, loss[loss=0.4541, simple_loss=0.4607, pruned_loss=0.2237, over 4760.00 frames. ], tot_loss[loss=0.4544, simple_loss=0.4624, pruned_loss=0.2232, over 938247.31 frames. ], batch size: 13, lr: 4.34e-02, grad_scale: 32.0 +2024-07-27 10:23:38,732 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 10:23:49,046 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.2669, 3.7480, 4.1959, 4.0074], device='cuda:1') +2024-07-27 10:23:52,404 INFO [train.py:1146] (1/4) Epoch 1, validation: loss=0.3584, simple_loss=0.4212, pruned_loss=0.1478, over 944034.00 frames. +2024-07-27 10:23:52,405 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 10:23:53,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=4000.0, ans=0.3125 +2024-07-27 10:23:54,436 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.536e+01 7.537e+01 8.350e+01 9.496e+01 1.510e+02, threshold=1.670e+02, percent-clipped=0.0 +2024-07-27 10:23:58,590 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.25 vs. limit=9.0 +2024-07-27 10:24:14,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=4013.3333333333335, ans=0.311875 +2024-07-27 10:24:21,819 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.53 vs. limit=9.004999999999999 +2024-07-27 10:24:22,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=4013.3333333333335, ans=0.311875 +2024-07-27 10:24:22,594 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.83 vs. limit=9.004999999999999 +2024-07-27 10:24:23,414 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.64 vs. limit=10.51 +2024-07-27 10:24:33,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=4040.0, ans=0.31062500000000004 +2024-07-27 10:24:37,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=4040.0, ans=0.2596 +2024-07-27 10:24:40,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=4040.0, ans=0.31062500000000004 +2024-07-27 10:24:45,994 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.25 vs. limit=9.02 +2024-07-27 10:24:49,751 INFO [train.py:1114] (1/4) Epoch 1, batch 3050, loss[loss=0.4333, simple_loss=0.4296, pruned_loss=0.2185, over 4643.00 frames. ], tot_loss[loss=0.4552, simple_loss=0.4636, pruned_loss=0.2235, over 937315.03 frames. ], batch size: 12, lr: 4.33e-02, grad_scale: 32.0 +2024-07-27 10:24:49,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=4066.6666666666665, ans=0.7576666666666667 +2024-07-27 10:24:50,211 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.36 vs. limit=6.016666666666667 +2024-07-27 10:24:54,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=4066.6666666666665, ans=0.261 +2024-07-27 10:24:56,795 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.43 vs. limit=9.025 +2024-07-27 10:24:58,333 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.07 vs. limit=9.03 +2024-07-27 10:25:01,547 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.65 vs. limit=10.56 +2024-07-27 10:25:05,483 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.61 vs. limit=6.023333333333333 +2024-07-27 10:25:22,408 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.39 vs. limit=10.58 +2024-07-27 10:25:25,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=4106.666666666667, ans=0.3075 +2024-07-27 10:25:26,097 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.39 vs. limit=10.58 +2024-07-27 10:25:31,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=4120.0, ans=0.0495 +2024-07-27 10:25:33,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=4120.0, ans=0.7558 +2024-07-27 10:25:36,734 INFO [train.py:1114] (1/4) Epoch 1, batch 3100, loss[loss=0.4465, simple_loss=0.4595, pruned_loss=0.2168, over 4606.00 frames. ], tot_loss[loss=0.4501, simple_loss=0.4599, pruned_loss=0.2202, over 937887.08 frames. ], batch size: 16, lr: 4.33e-02, grad_scale: 32.0 +2024-07-27 10:25:38,253 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.617e+01 7.727e+01 8.300e+01 9.366e+01 1.573e+02, threshold=1.660e+02, percent-clipped=0.0 +2024-07-27 10:25:40,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=4133.333333333333, ans=0.30625 +2024-07-27 10:25:53,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=4146.666666666667, ans=0.30562500000000004 +2024-07-27 10:25:55,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=4146.666666666667, ans=0.7548666666666667 +2024-07-27 10:26:25,179 INFO [train.py:1114] (1/4) Epoch 1, batch 3150, loss[loss=0.3904, simple_loss=0.416, pruned_loss=0.1825, over 4622.00 frames. ], tot_loss[loss=0.4471, simple_loss=0.458, pruned_loss=0.2181, over 937914.24 frames. ], batch size: 17, lr: 4.32e-02, grad_scale: 32.0 +2024-07-27 10:26:25,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=4200.0, ans=0.303125 +2024-07-27 10:26:37,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=4213.333333333333, ans=7.633333333333333 +2024-07-27 10:26:39,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=4213.333333333333, ans=0.3025 +2024-07-27 10:26:47,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=4226.666666666667, ans=0.2577333333333333 +2024-07-27 10:26:57,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=4253.333333333333, ans=0.30062500000000003 +2024-07-27 10:26:58,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=4253.333333333333, ans=0.30062500000000003 +2024-07-27 10:27:05,654 INFO [train.py:1114] (1/4) Epoch 1, batch 3200, loss[loss=0.3912, simple_loss=0.4288, pruned_loss=0.1768, over 4811.00 frames. ], tot_loss[loss=0.4435, simple_loss=0.4556, pruned_loss=0.2158, over 939232.31 frames. ], batch size: 13, lr: 4.32e-02, grad_scale: 32.0 +2024-07-27 10:27:12,167 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.236e+01 7.498e+01 8.243e+01 8.897e+01 1.348e+02, threshold=1.649e+02, percent-clipped=0.0 +2024-07-27 10:27:13,391 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.24 vs. limit=10.7 +2024-07-27 10:27:13,573 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=5.03 vs. limit=4.8533333333333335 +2024-07-27 10:27:18,006 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.20 vs. limit=6.07 +2024-07-27 10:27:38,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=4280.0, ans=0.009939130434782608 +2024-07-27 10:27:40,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=4280.0, ans=0.025 +2024-07-27 10:27:42,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=4280.0, ans=0.7502 +2024-07-27 10:27:44,214 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.22 vs. limit=6.07 +2024-07-27 10:27:55,022 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.66 vs. limit=5.717333333333333 +2024-07-27 10:27:56,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=4293.333333333333, ans=0.29874999999999996 +2024-07-27 10:27:57,300 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.67 vs. limit=10.719999999999999 +2024-07-27 10:28:07,400 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.35 vs. limit=7.153333333333334 +2024-07-27 10:28:15,775 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.80 vs. limit=10.74 +2024-07-27 10:28:16,019 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.33 vs. limit=7.16 +2024-07-27 10:28:29,681 INFO [train.py:1114] (1/4) Epoch 1, batch 3250, loss[loss=0.502, simple_loss=0.5106, pruned_loss=0.2467, over 4933.00 frames. ], tot_loss[loss=0.4446, simple_loss=0.4568, pruned_loss=0.2162, over 940436.01 frames. ], batch size: 14, lr: 4.31e-02, grad_scale: 32.0 +2024-07-27 10:28:34,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=4333.333333333333, ans=0.296875 +2024-07-27 10:28:51,583 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.22 vs. limit=10.77 +2024-07-27 10:28:53,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=4360.0, ans=0.7936 +2024-07-27 10:29:05,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=4386.666666666667, ans=0.009915942028985507 +2024-07-27 10:29:06,818 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.82 vs. limit=10.79 +2024-07-27 10:29:08,415 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.35 vs. limit=10.79 +2024-07-27 10:29:11,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=4386.666666666667, ans=0.04949747468305833 +2024-07-27 10:29:12,737 INFO [train.py:1114] (1/4) Epoch 1, batch 3300, loss[loss=0.5185, simple_loss=0.5166, pruned_loss=0.2602, over 4715.00 frames. ], tot_loss[loss=0.4391, simple_loss=0.4528, pruned_loss=0.2127, over 940665.00 frames. ], batch size: 19, lr: 4.31e-02, grad_scale: 32.0 +2024-07-27 10:29:14,416 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.348e+01 7.414e+01 8.133e+01 9.480e+01 1.579e+02, threshold=1.627e+02, percent-clipped=0.0 +2024-07-27 10:29:38,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=4413.333333333333, ans=0.04827777777777778 +2024-07-27 10:30:04,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=4413.333333333333, ans=0.29312499999999997 +2024-07-27 10:30:35,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=4453.333333333333, ans=0.00990144927536232 +2024-07-27 10:30:36,939 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.50 vs. limit=10.84 +2024-07-27 10:30:53,656 INFO [train.py:1114] (1/4) Epoch 1, batch 3350, loss[loss=0.4525, simple_loss=0.4644, pruned_loss=0.2203, over 4636.00 frames. ], tot_loss[loss=0.4404, simple_loss=0.454, pruned_loss=0.2134, over 938461.45 frames. ], batch size: 17, lr: 4.30e-02, grad_scale: 32.0 +2024-07-27 10:30:57,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=4466.666666666667, ans=0.00989855072463768 +2024-07-27 10:31:16,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=4480.0, ans=0.7432000000000001 +2024-07-27 10:31:26,500 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.33 vs. limit=6.123333333333333 +2024-07-27 10:31:31,374 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.28 vs. limit=9.185 +2024-07-27 10:31:38,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=4506.666666666667, ans=0.28875 +2024-07-27 10:31:44,676 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:31:55,293 INFO [train.py:1114] (1/4) Epoch 1, batch 3400, loss[loss=0.3474, simple_loss=0.3716, pruned_loss=0.1615, over 4816.00 frames. ], tot_loss[loss=0.4375, simple_loss=0.4522, pruned_loss=0.2114, over 937162.08 frames. ], batch size: 11, lr: 4.29e-02, grad_scale: 32.0 +2024-07-27 10:31:56,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=4533.333333333333, ans=0.07166666666666667 +2024-07-27 10:31:56,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=4533.333333333333, ans=0.009884057971014493 +2024-07-27 10:31:56,822 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.312e+01 7.521e+01 8.329e+01 9.335e+01 1.968e+02, threshold=1.666e+02, percent-clipped=1.0 +2024-07-27 10:31:58,243 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.90 vs. limit=10.9 +2024-07-27 10:32:14,205 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.48 vs. limit=6.136666666666667 +2024-07-27 10:32:54,895 INFO [train.py:1114] (1/4) Epoch 1, batch 3450, loss[loss=0.4773, simple_loss=0.4657, pruned_loss=0.2444, over 4727.00 frames. ], tot_loss[loss=0.4392, simple_loss=0.4536, pruned_loss=0.2124, over 937459.71 frames. ], batch size: 19, lr: 4.29e-02, grad_scale: 32.0 +2024-07-27 10:32:59,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=4600.0, ans=10.95 +2024-07-27 10:33:14,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=4613.333333333333, ans=0.025 +2024-07-27 10:33:14,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=4613.333333333333, ans=0.28375 +2024-07-27 10:33:15,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.82 vs. limit=9.23 +2024-07-27 10:33:19,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=4626.666666666667, ans=0.025 +2024-07-27 10:33:22,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=4626.666666666667, ans=0.04738888888888889 +2024-07-27 10:33:39,830 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.64 vs. limit=9.24 +2024-07-27 10:33:48,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=4653.333333333333, ans=0.281875 +2024-07-27 10:33:48,552 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.12 vs. limit=9.245000000000001 +2024-07-27 10:33:55,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=4653.333333333333, ans=0.281875 +2024-07-27 10:33:55,735 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.43 vs. limit=10.99 +2024-07-27 10:33:58,728 INFO [train.py:1114] (1/4) Epoch 1, batch 3500, loss[loss=0.3338, simple_loss=0.3553, pruned_loss=0.1561, over 4941.00 frames. ], tot_loss[loss=0.4335, simple_loss=0.4499, pruned_loss=0.2086, over 938107.88 frames. ], batch size: 12, lr: 4.28e-02, grad_scale: 32.0 +2024-07-27 10:34:00,614 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.699e+01 7.535e+01 8.121e+01 9.134e+01 1.279e+02, threshold=1.624e+02, percent-clipped=0.0 +2024-07-27 10:34:03,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=4666.666666666667, ans=0.035 +2024-07-27 10:35:05,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.81 vs. limit=7.359999999999999 +2024-07-27 10:35:14,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=4733.333333333333, ans=0.7343333333333334 +2024-07-27 10:35:18,199 INFO [train.py:1114] (1/4) Epoch 1, batch 3550, loss[loss=0.4541, simple_loss=0.4638, pruned_loss=0.2222, over 4664.00 frames. ], tot_loss[loss=0.4299, simple_loss=0.4477, pruned_loss=0.206, over 938782.69 frames. ], batch size: 14, lr: 4.28e-02, grad_scale: 32.0 +2024-07-27 10:35:23,973 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.65 vs. limit=11.05 +2024-07-27 10:35:36,520 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.05 vs. limit=11.06 +2024-07-27 10:35:37,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=4746.666666666667, ans=0.2712 +2024-07-27 10:35:41,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=4760.0, ans=0.04683333333333334 +2024-07-27 10:35:46,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=4760.0, ans=0.7976 +2024-07-27 10:35:50,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=4773.333333333333, ans=0.07016666666666667 +2024-07-27 10:35:51,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=4773.333333333333, ans=0.27625 +2024-07-27 10:35:53,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=4773.333333333333, ans=0.7329333333333334 +2024-07-27 10:35:55,387 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.28 vs. limit=5.909333333333333 +2024-07-27 10:36:04,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=4786.666666666667, ans=0.275625 +2024-07-27 10:36:12,105 INFO [train.py:1114] (1/4) Epoch 1, batch 3600, loss[loss=0.3906, simple_loss=0.4285, pruned_loss=0.1764, over 4961.00 frames. ], tot_loss[loss=0.4285, simple_loss=0.4467, pruned_loss=0.2051, over 940240.74 frames. ], batch size: 13, lr: 4.27e-02, grad_scale: 32.0 +2024-07-27 10:36:13,740 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.357e+01 7.358e+01 8.127e+01 9.443e+01 1.425e+02, threshold=1.625e+02, percent-clipped=0.0 +2024-07-27 10:36:14,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=4800.0, ans=0.035 +2024-07-27 10:36:26,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4813.333333333333, ans=0.2518666666666667 +2024-07-27 10:36:27,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4813.333333333333, ans=0.2518666666666667 +2024-07-27 10:36:30,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=4826.666666666667, ans=0.04655555555555556 +2024-07-27 10:36:42,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=4840.0, ans=0.035 +2024-07-27 10:36:44,140 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.37 vs. limit=9.315 +2024-07-27 10:37:07,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=4853.333333333333, ans=0.0 +2024-07-27 10:37:08,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=4853.333333333333, ans=0.27249999999999996 +2024-07-27 10:37:21,892 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.45 vs. limit=11.15 +2024-07-27 10:37:22,214 INFO [train.py:1114] (1/4) Epoch 1, batch 3650, loss[loss=0.5017, simple_loss=0.5005, pruned_loss=0.2514, over 4913.00 frames. ], tot_loss[loss=0.4296, simple_loss=0.4473, pruned_loss=0.2059, over 940886.58 frames. ], batch size: 15, lr: 4.27e-02, grad_scale: 32.0 +2024-07-27 10:37:44,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=4893.333333333333, ans=0.03470833333333334 +2024-07-27 10:37:45,625 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.52 vs. limit=11.17 +2024-07-27 10:37:56,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=4906.666666666667, ans=0.034666666666666665 +2024-07-27 10:38:00,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=4920.0, ans=0.26937500000000003 +2024-07-27 10:38:02,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.86 vs. limit=9.345 +2024-07-27 10:38:04,289 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.63 vs. limit=9.345 +2024-07-27 10:38:07,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=4920.0, ans=0.025 +2024-07-27 10:38:07,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=4933.333333333333, ans=0.26875 +2024-07-27 10:38:08,640 INFO [train.py:1114] (1/4) Epoch 1, batch 3700, loss[loss=0.4706, simple_loss=0.4872, pruned_loss=0.227, over 4935.00 frames. ], tot_loss[loss=0.4257, simple_loss=0.4455, pruned_loss=0.203, over 941781.71 frames. ], batch size: 14, lr: 4.26e-02, grad_scale: 32.0 +2024-07-27 10:38:09,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=4933.333333333333, ans=0.04611111111111112 +2024-07-27 10:38:09,824 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.23 vs. limit=6.233333333333333 +2024-07-27 10:38:10,095 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.394e+01 7.604e+01 8.315e+01 9.088e+01 1.291e+02, threshold=1.663e+02, percent-clipped=0.0 +2024-07-27 10:38:10,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=4933.333333333333, ans=0.26875 +2024-07-27 10:38:19,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=4946.666666666667, ans=0.25053333333333333 +2024-07-27 10:38:23,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=4960.0, ans=0.046 +2024-07-27 10:38:30,594 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.62 vs. limit=9.36 +2024-07-27 10:38:35,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=4973.333333333333, ans=0.266875 +2024-07-27 10:38:36,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=4973.333333333333, ans=0.266875 +2024-07-27 10:38:42,774 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.07 vs. limit=9.370000000000001 +2024-07-27 10:38:43,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4986.666666666667, ans=0.2501333333333333 +2024-07-27 10:38:45,713 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.00 vs. limit=11.24 +2024-07-27 10:38:46,995 INFO [train.py:1114] (1/4) Epoch 1, batch 3750, loss[loss=0.333, simple_loss=0.3656, pruned_loss=0.1502, over 4811.00 frames. ], tot_loss[loss=0.4265, simple_loss=0.4459, pruned_loss=0.2035, over 943348.27 frames. ], batch size: 11, lr: 4.26e-02, grad_scale: 32.0 +2024-07-27 10:38:48,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=5000.0, ans=0.025 +2024-07-27 10:38:48,357 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.06 vs. limit=9.375 +2024-07-27 10:38:52,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=5000.0, ans=0.265625 +2024-07-27 10:39:00,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=5013.333333333333, ans=0.7245333333333334 +2024-07-27 10:39:07,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=5026.666666666667, ans=0.04572222222222223 +2024-07-27 10:39:11,346 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.86 vs. limit=11.27 +2024-07-27 10:39:13,678 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.19 vs. limit=11.28 +2024-07-27 10:39:16,087 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.64 vs. limit=11.28 +2024-07-27 10:39:18,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=5040.0, ans=0.7236 +2024-07-27 10:39:23,972 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.52 vs. limit=11.29 +2024-07-27 10:39:29,993 INFO [train.py:1114] (1/4) Epoch 1, batch 3800, loss[loss=0.5009, simple_loss=0.5131, pruned_loss=0.2444, over 4814.00 frames. ], tot_loss[loss=0.4272, simple_loss=0.446, pruned_loss=0.2042, over 942216.49 frames. ], batch size: 14, lr: 4.25e-02, grad_scale: 32.0 +2024-07-27 10:39:30,568 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=7.14 vs. limit=6.266666666666667 +2024-07-27 10:39:31,118 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.46 vs. limit=11.3 +2024-07-27 10:39:31,466 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.397e+01 7.848e+01 8.926e+01 1.062e+02 1.659e+02, threshold=1.785e+02, percent-clipped=0.0 +2024-07-27 10:39:33,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=5066.666666666667, ans=0.2625 +2024-07-27 10:39:37,294 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.41 vs. limit=11.31 +2024-07-27 10:39:39,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=5080.0, ans=0.0 +2024-07-27 10:39:55,458 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.70 vs. limit=11.33 +2024-07-27 10:40:02,458 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.21 vs. limit=11.34 +2024-07-27 10:40:02,462 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.60 vs. limit=9.42 +2024-07-27 10:40:05,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=5120.0, ans=0.7208 +2024-07-27 10:40:07,914 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.16 vs. limit=11.34 +2024-07-27 10:40:09,707 INFO [train.py:1114] (1/4) Epoch 1, batch 3850, loss[loss=0.4302, simple_loss=0.4418, pruned_loss=0.2093, over 4650.00 frames. ], tot_loss[loss=0.4226, simple_loss=0.4439, pruned_loss=0.2007, over 942683.18 frames. ], batch size: 16, lr: 4.24e-02, grad_scale: 32.0 +2024-07-27 10:40:21,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=5146.666666666667, ans=0.24853333333333333 +2024-07-27 10:40:33,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=5146.666666666667, ans=0.00975072463768116 +2024-07-27 10:40:35,029 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.81 vs. limit=11.36 +2024-07-27 10:40:45,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=5160.0, ans=0.258125 +2024-07-27 10:40:45,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=5160.0, ans=0.258125 +2024-07-27 10:40:48,800 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.05 vs. limit=11.379999999999999 +2024-07-27 10:40:55,621 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.05 vs. limit=9.445 +2024-07-27 10:41:02,114 INFO [train.py:1114] (1/4) Epoch 1, batch 3900, loss[loss=0.335, simple_loss=0.3851, pruned_loss=0.1424, over 4808.00 frames. ], tot_loss[loss=0.4201, simple_loss=0.4429, pruned_loss=0.1987, over 942982.01 frames. ], batch size: 14, lr: 4.24e-02, grad_scale: 32.0 +2024-07-27 10:41:05,346 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.129e+01 7.258e+01 7.897e+01 8.876e+01 1.354e+02, threshold=1.579e+02, percent-clipped=0.0 +2024-07-27 10:41:13,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=5213.333333333333, ans=0.24786666666666668 +2024-07-27 10:41:18,844 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.94 vs. limit=11.41 +2024-07-27 10:41:24,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=5226.666666666667, ans=0.24773333333333333 +2024-07-27 10:41:37,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=5253.333333333333, ans=0.07 +2024-07-27 10:41:43,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=5253.333333333333, ans=0.25375000000000003 +2024-07-27 10:41:46,753 INFO [train.py:1114] (1/4) Epoch 1, batch 3950, loss[loss=0.4331, simple_loss=0.4451, pruned_loss=0.2105, over 4835.00 frames. ], tot_loss[loss=0.4192, simple_loss=0.4427, pruned_loss=0.1979, over 944916.04 frames. ], batch size: 16, lr: 4.23e-02, grad_scale: 32.0 +2024-07-27 10:41:49,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5266.666666666667, ans=0.253125 +2024-07-27 10:42:05,747 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.99 vs. limit=9.48 +2024-07-27 10:42:32,208 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.81 vs. limit=3.7960000000000003 +2024-07-27 10:42:32,298 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=9.49 +2024-07-27 10:42:32,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=5306.666666666667, ans=0.25125 +2024-07-27 10:42:43,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=5320.0, ans=0.250625 +2024-07-27 10:42:43,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=5320.0, ans=0.09899494936611666 +2024-07-27 10:42:48,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=5333.333333333333, ans=0.25 +2024-07-27 10:43:00,277 INFO [train.py:1114] (1/4) Epoch 1, batch 4000, loss[loss=0.3806, simple_loss=0.4122, pruned_loss=0.1745, over 4777.00 frames. ], tot_loss[loss=0.4213, simple_loss=0.4438, pruned_loss=0.1994, over 941373.97 frames. ], batch size: 12, lr: 4.23e-02, grad_scale: 32.0 +2024-07-27 10:43:01,864 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.271e+01 7.652e+01 8.472e+01 9.315e+01 2.163e+02, threshold=1.694e+02, percent-clipped=2.0 +2024-07-27 10:43:06,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=5333.333333333333, ans=0.0 +2024-07-27 10:43:15,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=5346.666666666667, ans=0.06658333333333333 +2024-07-27 10:43:19,574 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.34 vs. limit=11.51 +2024-07-27 10:43:25,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=5360.0, ans=0.24875000000000003 +2024-07-27 10:43:28,138 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.65 vs. limit=11.52 +2024-07-27 10:43:30,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=5373.333333333333, ans=0.044277777777777784 +2024-07-27 10:43:31,881 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=11.94 vs. limit=11.530000000000001 +2024-07-27 10:43:33,120 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:43:45,986 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.84 vs. limit=7.6866666666666665 +2024-07-27 10:43:49,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff2.min_abs, batch_count=5386.666666666667, ans=0.1 +2024-07-27 10:43:55,177 INFO [train.py:1114] (1/4) Epoch 1, batch 4050, loss[loss=0.5425, simple_loss=0.5099, pruned_loss=0.2876, over 3363.00 frames. ], tot_loss[loss=0.4201, simple_loss=0.4427, pruned_loss=0.1988, over 939918.84 frames. ], batch size: 35, lr: 4.22e-02, grad_scale: 32.0 +2024-07-27 10:43:57,727 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.98 vs. limit=11.55 +2024-07-27 10:44:14,080 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.11 vs. limit=11.57 +2024-07-27 10:44:20,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=5426.666666666667, ans=9.535 +2024-07-27 10:44:20,945 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.50 vs. limit=3.814 +2024-07-27 10:44:21,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=5440.0, ans=0.7096 +2024-07-27 10:44:28,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=5440.0, ans=0.066 +2024-07-27 10:44:34,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=5453.333333333333, ans=0.009684057971014494 +2024-07-27 10:44:39,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=5453.333333333333, ans=0.244375 +2024-07-27 10:44:42,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=5453.333333333333, ans=9.545 +2024-07-27 10:44:43,347 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=7.16 vs. limit=6.363333333333333 +2024-07-27 10:44:44,996 INFO [train.py:1114] (1/4) Epoch 1, batch 4100, loss[loss=0.5627, simple_loss=0.5507, pruned_loss=0.2874, over 4891.00 frames. ], tot_loss[loss=0.422, simple_loss=0.4441, pruned_loss=0.1999, over 938514.16 frames. ], batch size: 15, lr: 4.22e-02, grad_scale: 32.0 +2024-07-27 10:44:45,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=5466.666666666667, ans=0.24533333333333332 +2024-07-27 10:44:46,471 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.345e+01 7.438e+01 7.964e+01 9.010e+01 1.753e+02, threshold=1.593e+02, percent-clipped=1.0 +2024-07-27 10:45:01,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=5493.333333333333, ans=0.2425 +2024-07-27 10:45:15,530 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=7.79 vs. limit=9.56 +2024-07-27 10:45:41,327 INFO [train.py:1114] (1/4) Epoch 1, batch 4150, loss[loss=0.3655, simple_loss=0.3965, pruned_loss=0.1672, over 4833.00 frames. ], tot_loss[loss=0.4181, simple_loss=0.4416, pruned_loss=0.1973, over 938120.32 frames. ], batch size: 13, lr: 4.21e-02, grad_scale: 32.0 +2024-07-27 10:45:43,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=5533.333333333333, ans=0.24062499999999998 +2024-07-27 10:45:43,556 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.18 vs. limit=9.575 +2024-07-27 10:45:47,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=5533.333333333333, ans=0.009666666666666667 +2024-07-27 10:45:50,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=5546.666666666667, ans=0.24453333333333332 +2024-07-27 10:46:21,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=5573.333333333333, ans=0.23875000000000002 +2024-07-27 10:46:28,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5573.333333333333, ans=0.23875000000000002 +2024-07-27 10:46:31,279 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.24 vs. limit=9.595 +2024-07-27 10:46:32,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=5586.666666666667, ans=0.23812499999999998 +2024-07-27 10:46:38,731 INFO [train.py:1114] (1/4) Epoch 1, batch 4200, loss[loss=0.4784, simple_loss=0.4904, pruned_loss=0.2332, over 4897.00 frames. ], tot_loss[loss=0.4178, simple_loss=0.4415, pruned_loss=0.197, over 939279.03 frames. ], batch size: 15, lr: 4.20e-02, grad_scale: 32.0 +2024-07-27 10:46:40,169 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.767e+01 7.218e+01 8.164e+01 9.157e+01 1.293e+02, threshold=1.633e+02, percent-clipped=0.0 +2024-07-27 10:47:00,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=5613.333333333333, ans=0.7035333333333333 +2024-07-27 10:47:00,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=5613.333333333333, ans=0.00964927536231884 +2024-07-27 10:47:00,975 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.35 vs. limit=11.71 +2024-07-27 10:47:01,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=5613.333333333333, ans=0.236875 +2024-07-27 10:47:08,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=5626.666666666667, ans=0.043222222222222224 +2024-07-27 10:47:21,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=5653.333333333333, ans=0.235 +2024-07-27 10:47:32,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=5653.333333333333, ans=0.235 +2024-07-27 10:47:32,547 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=48.08 vs. limit=9.620000000000001 +2024-07-27 10:47:33,658 INFO [train.py:1114] (1/4) Epoch 1, batch 4250, loss[loss=0.3651, simple_loss=0.3944, pruned_loss=0.1679, over 4647.00 frames. ], tot_loss[loss=0.4162, simple_loss=0.4406, pruned_loss=0.1959, over 940712.39 frames. ], batch size: 12, lr: 4.20e-02, grad_scale: 32.0 +2024-07-27 10:47:52,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=5680.0, ans=11.76 +2024-07-27 10:47:52,142 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.81 vs. limit=9.629999999999999 +2024-07-27 10:47:54,587 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.58 vs. limit=9.629999999999999 +2024-07-27 10:47:55,273 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.57 vs. limit=6.42 +2024-07-27 10:47:58,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=5680.0, ans=0.23375 +2024-07-27 10:48:00,614 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.42 vs. limit=9.635 +2024-07-27 10:48:05,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=5693.333333333333, ans=0.04294444444444445 +2024-07-27 10:48:05,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=5693.333333333333, ans=0.009631884057971015 +2024-07-27 10:48:15,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=5706.666666666667, ans=0.23249999999999998 +2024-07-27 10:48:22,158 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.97 vs. limit=11.79 +2024-07-27 10:48:25,301 INFO [train.py:1114] (1/4) Epoch 1, batch 4300, loss[loss=0.3483, simple_loss=0.386, pruned_loss=0.1553, over 4761.00 frames. ], tot_loss[loss=0.4138, simple_loss=0.439, pruned_loss=0.1943, over 940260.96 frames. ], batch size: 13, lr: 4.19e-02, grad_scale: 32.0 +2024-07-27 10:48:25,567 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.80 vs. limit=11.8 +2024-07-27 10:48:26,777 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.009e+01 7.278e+01 8.201e+01 9.440e+01 2.695e+02, threshold=1.640e+02, percent-clipped=2.0 +2024-07-27 10:48:31,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=5733.333333333333, ans=9.65 +2024-07-27 10:48:36,175 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.28 vs. limit=7.873333333333333 +2024-07-27 10:48:37,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=5746.666666666667, ans=0.6988666666666667 +2024-07-27 10:48:58,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=5773.333333333333, ans=0.6979333333333334 +2024-07-27 10:49:04,977 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.10 vs. limit=11.84 +2024-07-27 10:49:15,555 INFO [train.py:1114] (1/4) Epoch 1, batch 4350, loss[loss=0.3978, simple_loss=0.432, pruned_loss=0.1818, over 4764.00 frames. ], tot_loss[loss=0.4129, simple_loss=0.4389, pruned_loss=0.1935, over 940781.21 frames. ], batch size: 13, lr: 4.19e-02, grad_scale: 32.0 +2024-07-27 10:49:18,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=5800.0, ans=0.07 +2024-07-27 10:49:50,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=5840.0, ans=0.22625 +2024-07-27 10:50:12,259 INFO [train.py:1114] (1/4) Epoch 1, batch 4400, loss[loss=0.3449, simple_loss=0.4018, pruned_loss=0.144, over 4815.00 frames. ], tot_loss[loss=0.4127, simple_loss=0.439, pruned_loss=0.1932, over 940307.52 frames. ], batch size: 14, lr: 4.18e-02, grad_scale: 32.0 +2024-07-27 10:50:13,753 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.875e+01 7.282e+01 8.065e+01 8.793e+01 1.417e+02, threshold=1.613e+02, percent-clipped=0.0 +2024-07-27 10:50:43,648 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.42 vs. limit=7.96 +2024-07-27 10:50:56,598 INFO [train.py:1114] (1/4) Epoch 1, batch 4450, loss[loss=0.3759, simple_loss=0.3999, pruned_loss=0.176, over 4937.00 frames. ], tot_loss[loss=0.4117, simple_loss=0.4384, pruned_loss=0.1925, over 938461.92 frames. ], batch size: 12, lr: 4.17e-02, grad_scale: 32.0 +2024-07-27 10:51:05,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=5933.333333333333, ans=0.221875 +2024-07-27 10:51:16,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=5960.0, ans=0.6914 +2024-07-27 10:51:17,850 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.74 vs. limit=11.969999999999999 +2024-07-27 10:52:01,848 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.89 vs. limit=9.745000000000001 +2024-07-27 10:52:02,903 INFO [train.py:1114] (1/4) Epoch 1, batch 4500, loss[loss=0.3927, simple_loss=0.4287, pruned_loss=0.1784, over 4742.00 frames. ], tot_loss[loss=0.4133, simple_loss=0.4402, pruned_loss=0.1932, over 938187.48 frames. ], batch size: 14, lr: 4.17e-02, grad_scale: 32.0 +2024-07-27 10:52:04,382 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.515e+01 7.518e+01 8.133e+01 8.921e+01 1.342e+02, threshold=1.627e+02, percent-clipped=0.0 +2024-07-27 10:52:16,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=6013.333333333333, ans=0.218125 +2024-07-27 10:52:47,950 INFO [train.py:1114] (1/4) Epoch 1, batch 4550, loss[loss=0.3919, simple_loss=0.4192, pruned_loss=0.1823, over 4890.00 frames. ], tot_loss[loss=0.4129, simple_loss=0.4397, pruned_loss=0.1931, over 940365.58 frames. ], batch size: 13, lr: 4.16e-02, grad_scale: 32.0 +2024-07-27 10:52:53,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=6066.666666666667, ans=0.215625 +2024-07-27 10:53:49,507 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.22 vs. limit=9.78 +2024-07-27 10:53:49,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=6080.0, ans=0.1892 +2024-07-27 10:55:29,901 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.22 vs. limit=12.09 +2024-07-27 10:55:37,835 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.16 vs. limit=9.795 +2024-07-27 10:55:38,759 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.78 vs. limit=12.09 +2024-07-27 10:55:39,242 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.78 vs. limit=9.795 +2024-07-27 10:55:39,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=6133.333333333333, ans=0.292 +2024-07-27 10:55:40,600 INFO [train.py:1114] (1/4) Epoch 1, batch 4600, loss[loss=0.4621, simple_loss=0.4746, pruned_loss=0.2248, over 4523.00 frames. ], tot_loss[loss=0.4109, simple_loss=0.4377, pruned_loss=0.192, over 938315.63 frames. ], batch size: 21, lr: 4.15e-02, grad_scale: 32.0 +2024-07-27 10:55:47,210 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.044e+01 7.331e+01 8.005e+01 8.983e+01 1.431e+02, threshold=1.601e+02, percent-clipped=0.0 +2024-07-27 10:55:49,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=6133.333333333333, ans=0.21250000000000002 +2024-07-27 10:55:54,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=6146.666666666667, ans=0.025 +2024-07-27 10:56:00,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=6146.666666666667, ans=0.6848666666666667 +2024-07-27 10:56:03,906 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.97 vs. limit=9.805 +2024-07-27 10:56:04,376 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:56:12,243 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.02 vs. limit=3.924 +2024-07-27 10:56:20,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.33 vs. limit=12.129999999999999 +2024-07-27 10:56:28,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=6200.0, ans=0.04083333333333333 +2024-07-27 10:56:29,196 INFO [train.py:1114] (1/4) Epoch 1, batch 4650, loss[loss=0.4549, simple_loss=0.4751, pruned_loss=0.2173, over 4840.00 frames. ], tot_loss[loss=0.4131, simple_loss=0.4395, pruned_loss=0.1933, over 939728.02 frames. ], batch size: 16, lr: 4.15e-02, grad_scale: 32.0 +2024-07-27 10:56:35,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=6200.0, ans=0.238 +2024-07-27 10:56:38,614 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.58 vs. limit=6.553333333333333 +2024-07-27 10:56:53,135 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.43 vs. limit=9.84 +2024-07-27 10:57:05,894 INFO [train.py:1114] (1/4) Epoch 1, batch 4700, loss[loss=0.3516, simple_loss=0.3962, pruned_loss=0.1535, over 4715.00 frames. ], tot_loss[loss=0.4127, simple_loss=0.4393, pruned_loss=0.1931, over 937088.56 frames. ], batch size: 11, lr: 4.14e-02, grad_scale: 32.0 +2024-07-27 10:57:06,435 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.27 vs. limit=8.133333333333333 +2024-07-27 10:57:07,352 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.557e+01 7.394e+01 8.015e+01 9.109e+01 1.664e+02, threshold=1.603e+02, percent-clipped=1.0 +2024-07-27 10:57:11,987 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.34 vs. limit=6.566666666666666 +2024-07-27 10:57:16,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=6280.0, ans=0.205625 +2024-07-27 10:57:19,921 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.35 vs. limit=9.86 +2024-07-27 10:57:25,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=6293.333333333333, ans=0.20500000000000002 +2024-07-27 10:57:29,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=6306.666666666667, ans=0.04038888888888889 +2024-07-27 10:57:33,060 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.73 vs. limit=9.865 +2024-07-27 10:57:35,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=6320.0, ans=0.20375 +2024-07-27 10:57:41,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=6320.0, ans=0.20375 +2024-07-27 10:57:42,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=6320.0, ans=0.20375 +2024-07-27 10:57:43,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=6333.333333333333, ans=0.04027777777777778 +2024-07-27 10:57:44,066 INFO [train.py:1114] (1/4) Epoch 1, batch 4750, loss[loss=0.4967, simple_loss=0.4908, pruned_loss=0.2514, over 4464.00 frames. ], tot_loss[loss=0.415, simple_loss=0.4405, pruned_loss=0.1948, over 935521.21 frames. ], batch size: 21, lr: 4.14e-02, grad_scale: 64.0 +2024-07-27 10:57:52,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=6333.333333333333, ans=0.04027777777777778 +2024-07-27 10:57:59,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=6346.666666666667, ans=0.8134666666666667 +2024-07-27 10:58:05,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=6360.0, ans=0.20187500000000003 +2024-07-27 10:58:06,882 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.73 vs. limit=9.885 +2024-07-27 10:58:21,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=6386.666666666667, ans=0.2958 +2024-07-27 10:58:22,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=6386.666666666667, ans=0.8138666666666666 +2024-07-27 10:58:26,202 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.61 vs. limit=6.6 +2024-07-27 10:58:26,654 INFO [train.py:1114] (1/4) Epoch 1, batch 4800, loss[loss=0.3758, simple_loss=0.4205, pruned_loss=0.1656, over 4692.00 frames. ], tot_loss[loss=0.4117, simple_loss=0.4379, pruned_loss=0.1927, over 932592.08 frames. ], batch size: 13, lr: 4.13e-02, grad_scale: 64.0 +2024-07-27 10:58:28,212 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.395e+01 7.298e+01 7.833e+01 8.734e+01 1.995e+02, threshold=1.567e+02, percent-clipped=2.0 +2024-07-27 10:58:29,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=6400.0, ans=0.2 +2024-07-27 10:58:37,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=6413.333333333333, ans=0.035 +2024-07-27 10:58:38,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=6413.333333333333, ans=0.19937500000000002 +2024-07-27 10:58:45,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=6426.666666666667, ans=0.19874999999999998 +2024-07-27 10:58:53,480 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.11 vs. limit=9.915 +2024-07-27 10:58:53,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=6440.0, ans=0.03983333333333334 +2024-07-27 10:59:05,498 INFO [train.py:1114] (1/4) Epoch 1, batch 4850, loss[loss=0.3867, simple_loss=0.4284, pruned_loss=0.1725, over 4736.00 frames. ], tot_loss[loss=0.4105, simple_loss=0.4372, pruned_loss=0.1919, over 932088.79 frames. ], batch size: 14, lr: 4.12e-02, grad_scale: 64.0 +2024-07-27 10:59:05,859 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.61 vs. limit=9.925 +2024-07-27 10:59:10,382 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.34 vs. limit=9.925 +2024-07-27 10:59:23,023 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.79 vs. limit=6.597333333333333 +2024-07-27 10:59:26,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=6493.333333333333, ans=0.195625 +2024-07-27 10:59:27,290 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.02 vs. limit=9.94 +2024-07-27 10:59:44,801 INFO [train.py:1114] (1/4) Epoch 1, batch 4900, loss[loss=0.4342, simple_loss=0.4435, pruned_loss=0.2125, over 4760.00 frames. ], tot_loss[loss=0.4091, simple_loss=0.436, pruned_loss=0.1911, over 933786.36 frames. ], batch size: 13, lr: 4.12e-02, grad_scale: 64.0 +2024-07-27 10:59:44,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=6533.333333333333, ans=0.05916666666666667 +2024-07-27 10:59:46,285 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.298e+01 7.338e+01 8.038e+01 8.614e+01 1.106e+02, threshold=1.608e+02, percent-clipped=0.0 +2024-07-27 10:59:50,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=6533.333333333333, ans=0.23466666666666666 +2024-07-27 10:59:54,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=6546.666666666667, ans=12.41 +2024-07-27 10:59:58,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6546.666666666667, ans=0.23453333333333332 +2024-07-27 11:00:10,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6573.333333333333, ans=0.23426666666666668 +2024-07-27 11:00:15,280 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.48 vs. limit=12.43 +2024-07-27 11:00:16,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=6586.666666666667, ans=0.03922222222222223 +2024-07-27 11:00:18,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=6586.666666666667, ans=0.025 +2024-07-27 11:00:20,692 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.71 vs. limit=12.440000000000001 +2024-07-27 11:00:23,789 INFO [train.py:1114] (1/4) Epoch 1, batch 4950, loss[loss=0.632, simple_loss=0.5685, pruned_loss=0.3477, over 3356.00 frames. ], tot_loss[loss=0.4124, simple_loss=0.4384, pruned_loss=0.1932, over 931292.01 frames. ], batch size: 36, lr: 4.11e-02, grad_scale: 64.0 +2024-07-27 11:00:30,202 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.15 vs. limit=9.975 +2024-07-27 11:00:35,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=6613.333333333333, ans=0.029333333333333336 +2024-07-27 11:00:41,657 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.85 vs. limit=6.650666666666667 +2024-07-27 11:00:46,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=6640.0, ans=0.18874999999999997 +2024-07-27 11:00:48,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=6640.0, ans=0.18874999999999997 +2024-07-27 11:00:56,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=6653.333333333333, ans=0.03894444444444445 +2024-07-27 11:00:58,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6653.333333333333, ans=0.23346666666666666 +2024-07-27 11:01:00,650 INFO [train.py:1114] (1/4) Epoch 1, batch 5000, loss[loss=0.4952, simple_loss=0.513, pruned_loss=0.2387, over 4676.00 frames. ], tot_loss[loss=0.4097, simple_loss=0.4367, pruned_loss=0.1913, over 934938.38 frames. ], batch size: 14, lr: 4.10e-02, grad_scale: 64.0 +2024-07-27 11:01:02,001 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.106e+01 7.393e+01 8.012e+01 9.177e+01 1.350e+02, threshold=1.602e+02, percent-clipped=0.0 +2024-07-27 11:01:06,867 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.85 vs. limit=6.666666666666667 +2024-07-27 11:01:12,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=6680.0, ans=0.186875 +2024-07-27 11:01:20,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=6693.333333333333, ans=0.18625000000000003 +2024-07-27 11:01:27,788 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.21 vs. limit=12.530000000000001 +2024-07-27 11:01:30,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=6720.0, ans=0.6648000000000001 +2024-07-27 11:01:34,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=6720.0, ans=0.185 +2024-07-27 11:01:36,541 INFO [train.py:1114] (1/4) Epoch 1, batch 5050, loss[loss=0.2754, simple_loss=0.3425, pruned_loss=0.1041, over 4867.00 frames. ], tot_loss[loss=0.4056, simple_loss=0.4342, pruned_loss=0.1885, over 937457.80 frames. ], batch size: 12, lr: 4.10e-02, grad_scale: 64.0 +2024-07-27 11:01:41,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=6733.333333333333, ans=0.04949747468305833 +2024-07-27 11:01:42,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=6733.333333333333, ans=0.23266666666666666 +2024-07-27 11:01:48,863 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.40 vs. limit=12.559999999999999 +2024-07-27 11:01:51,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=6746.666666666667, ans=0.03855555555555556 +2024-07-27 11:01:53,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=6760.0, ans=0.0094 +2024-07-27 11:01:56,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=6760.0, ans=0.18312499999999998 +2024-07-27 11:02:01,053 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.99 vs. limit=4.016 +2024-07-27 11:02:03,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=6773.333333333333, ans=0.23226666666666668 +2024-07-27 11:02:14,725 INFO [train.py:1114] (1/4) Epoch 1, batch 5100, loss[loss=0.3749, simple_loss=0.4066, pruned_loss=0.1716, over 4780.00 frames. ], tot_loss[loss=0.4053, simple_loss=0.4334, pruned_loss=0.1886, over 934976.74 frames. ], batch size: 12, lr: 4.09e-02, grad_scale: 64.0 +2024-07-27 11:02:15,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=6800.0, ans=0.23199999999999998 +2024-07-27 11:02:16,193 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.973e+01 7.191e+01 7.778e+01 8.421e+01 1.083e+02, threshold=1.556e+02, percent-clipped=0.0 +2024-07-27 11:02:30,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=6826.666666666667, ans=0.18 +2024-07-27 11:02:30,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=6826.666666666667, ans=0.03822222222222223 +2024-07-27 11:02:39,897 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.83 vs. limit=10.065 +2024-07-27 11:02:50,865 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.66 vs. limit=10.075 +2024-07-27 11:02:51,277 INFO [train.py:1114] (1/4) Epoch 1, batch 5150, loss[loss=0.4463, simple_loss=0.466, pruned_loss=0.2133, over 4848.00 frames. ], tot_loss[loss=0.4089, simple_loss=0.4363, pruned_loss=0.1908, over 935972.98 frames. ], batch size: 16, lr: 4.09e-02, grad_scale: 64.0 +2024-07-27 11:02:53,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=6866.666666666667, ans=0.6596666666666666 +2024-07-27 11:02:55,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=6866.666666666667, ans=0.03805555555555556 +2024-07-27 11:03:03,394 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.16 vs. limit=12.66 +2024-07-27 11:03:09,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=6893.333333333333, ans=0.6587333333333334 +2024-07-27 11:03:15,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=6906.666666666667, ans=0.6582666666666667 +2024-07-27 11:03:19,629 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.01 vs. limit=12.68 +2024-07-27 11:03:20,382 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.42 vs. limit=12.68 +2024-07-27 11:03:33,708 INFO [train.py:1114] (1/4) Epoch 1, batch 5200, loss[loss=0.3886, simple_loss=0.4364, pruned_loss=0.1704, over 4661.00 frames. ], tot_loss[loss=0.4059, simple_loss=0.4345, pruned_loss=0.1886, over 936290.50 frames. ], batch size: 14, lr: 4.08e-02, grad_scale: 64.0 +2024-07-27 11:03:35,259 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.869e+01 7.238e+01 8.043e+01 8.705e+01 1.237e+02, threshold=1.609e+02, percent-clipped=0.0 +2024-07-27 11:03:35,594 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.30 vs. limit=6.773333333333333 +2024-07-27 11:03:52,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=6960.0, ans=0.04949747468305833 +2024-07-27 11:03:58,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=6973.333333333333, ans=0.6559333333333334 +2024-07-27 11:04:08,658 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.16 vs. limit=4.048 +2024-07-27 11:04:10,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=6986.666666666667, ans=0.1725 +2024-07-27 11:04:11,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=6986.666666666667, ans=0.1725 +2024-07-27 11:04:13,843 INFO [train.py:1114] (1/4) Epoch 1, batch 5250, loss[loss=0.3583, simple_loss=0.3951, pruned_loss=0.1607, over 4887.00 frames. ], tot_loss[loss=0.4027, simple_loss=0.4319, pruned_loss=0.1868, over 935937.64 frames. ], batch size: 13, lr: 4.07e-02, grad_scale: 64.0 +2024-07-27 11:04:16,455 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.86 vs. limit=10.125 +2024-07-27 11:04:36,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=7026.666666666667, ans=0.17062500000000003 +2024-07-27 11:04:53,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=7053.333333333333, ans=0.8205333333333333 +2024-07-27 11:04:58,089 INFO [train.py:1114] (1/4) Epoch 1, batch 5300, loss[loss=0.4468, simple_loss=0.4657, pruned_loss=0.2139, over 4649.00 frames. ], tot_loss[loss=0.4035, simple_loss=0.4318, pruned_loss=0.1876, over 934200.63 frames. ], batch size: 16, lr: 4.07e-02, grad_scale: 64.0 +2024-07-27 11:05:04,093 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.771e+01 7.230e+01 7.839e+01 8.733e+01 1.218e+02, threshold=1.568e+02, percent-clipped=0.0 +2024-07-27 11:05:09,474 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.95 vs. limit=6.77 +2024-07-27 11:05:22,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=7093.333333333333, ans=0.6517333333333334 +2024-07-27 11:05:24,661 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.30 vs. limit=10.165 +2024-07-27 11:05:25,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=7106.666666666667, ans=0.03705555555555556 +2024-07-27 11:05:26,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=7106.666666666667, ans=0.03705555555555556 +2024-07-27 11:05:28,927 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.01 vs. limit=4.066 +2024-07-27 11:05:34,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=7120.0, ans=0.2288 +2024-07-27 11:05:41,456 INFO [train.py:1114] (1/4) Epoch 1, batch 5350, loss[loss=0.3524, simple_loss=0.3847, pruned_loss=0.1601, over 4504.00 frames. ], tot_loss[loss=0.401, simple_loss=0.4302, pruned_loss=0.1859, over 936541.22 frames. ], batch size: 10, lr: 4.06e-02, grad_scale: 64.0 +2024-07-27 11:05:52,237 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=5.39 vs. limit=5.429333333333333 +2024-07-27 11:06:00,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=7160.0, ans=0.036833333333333336 +2024-07-27 11:06:06,634 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.97 vs. limit=4.0760000000000005 +2024-07-27 11:06:16,056 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.10 vs. limit=6.796666666666667 +2024-07-27 11:06:16,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=7186.666666666667, ans=4.078 +2024-07-27 11:06:17,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=7186.666666666667, ans=0.6484666666666667 +2024-07-27 11:06:20,221 INFO [train.py:1114] (1/4) Epoch 1, batch 5400, loss[loss=0.4024, simple_loss=0.4265, pruned_loss=0.1892, over 4172.00 frames. ], tot_loss[loss=0.4039, simple_loss=0.4324, pruned_loss=0.1877, over 931479.74 frames. ], batch size: 25, lr: 4.05e-02, grad_scale: 64.0 +2024-07-27 11:06:21,687 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.213e+01 7.171e+01 7.909e+01 8.696e+01 2.349e+02, threshold=1.582e+02, percent-clipped=3.0 +2024-07-27 11:06:37,867 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.79 vs. limit=12.92 +2024-07-27 11:06:40,013 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.44 vs. limit=10.21 +2024-07-27 11:06:49,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=7253.333333333333, ans=0.15999999999999998 +2024-07-27 11:06:51,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=7253.333333333333, ans=0.036444444444444446 +2024-07-27 11:06:52,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7253.333333333333, ans=0.22746666666666665 +2024-07-27 11:06:54,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=7253.333333333333, ans=0.036444444444444446 +2024-07-27 11:06:55,831 INFO [train.py:1114] (1/4) Epoch 1, batch 5450, loss[loss=0.3501, simple_loss=0.3825, pruned_loss=0.1589, over 4695.00 frames. ], tot_loss[loss=0.4035, simple_loss=0.432, pruned_loss=0.1875, over 933874.56 frames. ], batch size: 11, lr: 4.05e-02, grad_scale: 64.0 +2024-07-27 11:06:59,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=7266.666666666667, ans=0.22733333333333333 +2024-07-27 11:07:04,378 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.83 vs. limit=4.0920000000000005 +2024-07-27 11:07:13,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=7280.0, ans=0.15875 +2024-07-27 11:07:14,904 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.24 vs. limit=4.094 +2024-07-27 11:07:16,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=7293.333333333333, ans=0.6447333333333334 +2024-07-27 11:07:24,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=7306.666666666667, ans=0.22693333333333332 +2024-07-27 11:07:27,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=7306.666666666667, ans=0.15749999999999997 +2024-07-27 11:07:32,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=7320.0, ans=0.2 +2024-07-27 11:07:37,161 INFO [train.py:1114] (1/4) Epoch 1, batch 5500, loss[loss=0.3778, simple_loss=0.4067, pruned_loss=0.1744, over 4255.00 frames. ], tot_loss[loss=0.4021, simple_loss=0.4303, pruned_loss=0.187, over 931090.96 frames. ], batch size: 25, lr: 4.04e-02, grad_scale: 64.0 +2024-07-27 11:07:37,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=7333.333333333333, ans=0.025 +2024-07-27 11:07:38,664 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.895e+01 7.344e+01 7.791e+01 8.854e+01 1.594e+02, threshold=1.558e+02, percent-clipped=1.0 +2024-07-27 11:07:41,136 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.40 vs. limit=10.25 +2024-07-27 11:07:50,211 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.75 vs. limit=13.01 +2024-07-27 11:07:57,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=7360.0, ans=0.2264 +2024-07-27 11:08:04,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=7373.333333333333, ans=0.6419333333333334 +2024-07-27 11:08:12,502 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.73 vs. limit=13.04 +2024-07-27 11:08:14,262 INFO [train.py:1114] (1/4) Epoch 1, batch 5550, loss[loss=0.3946, simple_loss=0.4254, pruned_loss=0.182, over 4707.00 frames. ], tot_loss[loss=0.4, simple_loss=0.4288, pruned_loss=0.1855, over 933501.01 frames. ], batch size: 12, lr: 4.03e-02, grad_scale: 64.0 +2024-07-27 11:08:14,654 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.98 vs. limit=13.05 +2024-07-27 11:08:18,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=7400.0, ans=0.153125 +2024-07-27 11:08:37,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=7440.0, ans=9.65 +2024-07-27 11:08:41,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=7440.0, ans=0.035666666666666666 +2024-07-27 11:08:50,829 INFO [train.py:1114] (1/4) Epoch 1, batch 5600, loss[loss=0.3982, simple_loss=0.435, pruned_loss=0.1807, over 4739.00 frames. ], tot_loss[loss=0.4002, simple_loss=0.4295, pruned_loss=0.1855, over 934660.89 frames. ], batch size: 14, lr: 4.03e-02, grad_scale: 64.0 +2024-07-27 11:08:52,369 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.970e+01 7.181e+01 7.813e+01 8.583e+01 1.892e+02, threshold=1.563e+02, percent-clipped=1.0 +2024-07-27 11:08:55,358 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.49 vs. limit=6.866666666666667 +2024-07-27 11:09:06,998 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.05 vs. limit=13.120000000000001 +2024-07-27 11:09:11,405 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.20 vs. limit=13.120000000000001 +2024-07-27 11:09:18,735 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.02 vs. limit=6.876666666666667 +2024-07-27 11:09:19,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=7520.0, ans=0.035333333333333335 +2024-07-27 11:09:27,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=7520.0, ans=0.14750000000000002 +2024-07-27 11:09:28,796 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.97 vs. limit=13.15 +2024-07-27 11:09:29,070 INFO [train.py:1114] (1/4) Epoch 1, batch 5650, loss[loss=0.4151, simple_loss=0.4414, pruned_loss=0.1944, over 4521.00 frames. ], tot_loss[loss=0.3968, simple_loss=0.427, pruned_loss=0.1833, over 936881.92 frames. ], batch size: 21, lr: 4.02e-02, grad_scale: 64.0 +2024-07-27 11:09:34,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=7533.333333333333, ans=0.14687499999999998 +2024-07-27 11:09:41,581 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.24 vs. limit=8.773333333333333 +2024-07-27 11:09:49,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=7560.0, ans=0.145625 +2024-07-27 11:10:05,074 INFO [train.py:1114] (1/4) Epoch 1, batch 5700, loss[loss=0.3494, simple_loss=0.3887, pruned_loss=0.1551, over 4691.00 frames. ], tot_loss[loss=0.395, simple_loss=0.4259, pruned_loss=0.182, over 937897.37 frames. ], batch size: 13, lr: 4.02e-02, grad_scale: 64.0 +2024-07-27 11:10:06,392 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.074e+01 7.227e+01 8.129e+01 9.173e+01 1.333e+02, threshold=1.626e+02, percent-clipped=0.0 +2024-07-27 11:10:07,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=7600.0, ans=0.14375 +2024-07-27 11:10:14,620 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.11 vs. limit=4.1419999999999995 +2024-07-27 11:10:21,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=7626.666666666667, ans=0.14250000000000002 +2024-07-27 11:10:23,174 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.44 vs. limit=13.219999999999999 +2024-07-27 11:10:30,565 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.18 vs. limit=10.365 +2024-07-27 11:10:36,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=7653.333333333333, ans=0.14125 +2024-07-27 11:10:37,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=7653.333333333333, ans=0.03477777777777778 +2024-07-27 11:10:41,772 INFO [train.py:1114] (1/4) Epoch 1, batch 5750, loss[loss=0.389, simple_loss=0.4261, pruned_loss=0.1759, over 4711.00 frames. ], tot_loss[loss=0.395, simple_loss=0.4264, pruned_loss=0.1818, over 938069.89 frames. ], batch size: 19, lr: 4.01e-02, grad_scale: 64.0 +2024-07-27 11:11:00,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=7693.333333333333, ans=0.09899494936611666 +2024-07-27 11:11:11,006 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=22.05 vs. limit=10.395 +2024-07-27 11:11:22,922 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.45 vs. limit=8.86 +2024-07-27 11:11:25,452 INFO [train.py:1114] (1/4) Epoch 1, batch 5800, loss[loss=0.4623, simple_loss=0.4565, pruned_loss=0.234, over 4782.00 frames. ], tot_loss[loss=0.3969, simple_loss=0.4276, pruned_loss=0.1831, over 937711.78 frames. ], batch size: 19, lr: 4.00e-02, grad_scale: 64.0 +2024-07-27 11:11:26,802 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.680e+01 7.353e+01 8.081e+01 9.227e+01 1.347e+02, threshold=1.616e+02, percent-clipped=0.0 +2024-07-27 11:11:27,872 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.53 vs. limit=13.3 +2024-07-27 11:11:28,755 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.39 vs. limit=7.093333333333334 +2024-07-27 11:11:28,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=7733.333333333333, ans=0.034444444444444444 +2024-07-27 11:11:31,622 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.24 vs. limit=13.3 +2024-07-27 11:11:36,678 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.86 vs. limit=6.9366666666666665 +2024-07-27 11:11:50,241 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.76 vs. limit=7.109333333333334 +2024-07-27 11:12:02,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=7786.666666666667, ans=0.135 +2024-07-27 11:12:04,118 INFO [train.py:1114] (1/4) Epoch 1, batch 5850, loss[loss=0.434, simple_loss=0.4532, pruned_loss=0.2074, over 4485.00 frames. ], tot_loss[loss=0.3966, simple_loss=0.4276, pruned_loss=0.1828, over 937876.18 frames. ], batch size: 21, lr: 4.00e-02, grad_scale: 64.0 +2024-07-27 11:12:13,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.22 vs. limit=10.43 +2024-07-27 11:12:15,777 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.05 vs. limit=10.43 +2024-07-27 11:12:20,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=7826.666666666667, ans=0.133125 +2024-07-27 11:12:31,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=7840.0, ans=0.1325 +2024-07-27 11:12:36,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=7853.333333333333, ans=0.05091666666666667 +2024-07-27 11:12:38,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=7853.333333333333, ans=0.033944444444444444 +2024-07-27 11:12:39,507 INFO [train.py:1114] (1/4) Epoch 1, batch 5900, loss[loss=0.4805, simple_loss=0.4922, pruned_loss=0.2344, over 4702.00 frames. ], tot_loss[loss=0.3993, simple_loss=0.4294, pruned_loss=0.1846, over 937912.36 frames. ], batch size: 15, lr: 3.99e-02, grad_scale: 64.0 +2024-07-27 11:12:40,947 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.022e+01 7.265e+01 7.754e+01 8.488e+01 1.052e+02, threshold=1.551e+02, percent-clipped=0.0 +2024-07-27 11:13:08,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=7920.0, ans=0.6228 +2024-07-27 11:13:10,387 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.63 vs. limit=13.440000000000001 +2024-07-27 11:13:14,896 INFO [train.py:1114] (1/4) Epoch 1, batch 5950, loss[loss=0.4365, simple_loss=0.4592, pruned_loss=0.2069, over 4686.00 frames. ], tot_loss[loss=0.3984, simple_loss=0.4293, pruned_loss=0.1838, over 939738.30 frames. ], batch size: 15, lr: 3.98e-02, grad_scale: 64.0 +2024-07-27 11:13:15,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=7933.333333333333, ans=0.319 +2024-07-27 11:13:21,542 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.91 vs. limit=10.475 +2024-07-27 11:13:45,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=7986.666666666667, ans=0.125625 +2024-07-27 11:13:47,561 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=10.495000000000001 +2024-07-27 11:13:49,611 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.74 vs. limit=10.495000000000001 +2024-07-27 11:13:50,889 INFO [train.py:1114] (1/4) Epoch 1, batch 6000, loss[loss=0.4701, simple_loss=0.4849, pruned_loss=0.2277, over 4196.00 frames. ], tot_loss[loss=0.3962, simple_loss=0.4275, pruned_loss=0.1824, over 936776.89 frames. ], batch size: 25, lr: 3.98e-02, grad_scale: 64.0 +2024-07-27 11:13:50,889 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 11:14:10,843 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.3721, 4.3271, 4.0179, 4.2024], device='cuda:1') +2024-07-27 11:14:16,112 INFO [train.py:1146] (1/4) Epoch 1, validation: loss=0.3082, simple_loss=0.3886, pruned_loss=0.1139, over 944034.00 frames. +2024-07-27 11:14:16,113 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 11:14:16,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=8000.0, ans=0.125 +2024-07-27 11:14:17,454 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.690e+01 7.303e+01 7.945e+01 8.512e+01 1.515e+02, threshold=1.589e+02, percent-clipped=0.0 +2024-07-27 11:14:18,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=8000.0, ans=0.125 +2024-07-27 11:14:24,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=8013.333333333333, ans=0.125 +2024-07-27 11:14:32,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=8026.666666666667, ans=0.125 +2024-07-27 11:14:52,562 INFO [train.py:1114] (1/4) Epoch 1, batch 6050, loss[loss=0.3309, simple_loss=0.3833, pruned_loss=0.1393, over 4784.00 frames. ], tot_loss[loss=0.3929, simple_loss=0.4252, pruned_loss=0.1803, over 938002.67 frames. ], batch size: 12, lr: 3.97e-02, grad_scale: 64.0 +2024-07-27 11:15:05,825 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.10 vs. limit=10.53 +2024-07-27 11:15:08,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=8093.333333333333, ans=0.125 +2024-07-27 11:15:29,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=8133.333333333333, ans=0.6153333333333333 +2024-07-27 11:15:30,042 INFO [train.py:1114] (1/4) Epoch 1, batch 6100, loss[loss=0.343, simple_loss=0.4138, pruned_loss=0.1361, over 4689.00 frames. ], tot_loss[loss=0.391, simple_loss=0.4243, pruned_loss=0.1788, over 937458.67 frames. ], batch size: 15, lr: 3.96e-02, grad_scale: 64.0 +2024-07-27 11:15:31,489 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.445e+01 6.771e+01 7.517e+01 8.445e+01 1.300e+02, threshold=1.503e+02, percent-clipped=0.0 +2024-07-27 11:15:31,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=8133.333333333333, ans=0.125 +2024-07-27 11:15:31,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=8133.333333333333, ans=0.125 +2024-07-27 11:15:32,563 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.20 vs. limit=10.55 +2024-07-27 11:15:32,952 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:15:33,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=8133.333333333333, ans=0.03277777777777778 +2024-07-27 11:15:38,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.71 vs. limit=13.61 +2024-07-27 11:16:05,332 INFO [train.py:1114] (1/4) Epoch 1, batch 6150, loss[loss=0.5182, simple_loss=0.51, pruned_loss=0.2632, over 3523.00 frames. ], tot_loss[loss=0.3901, simple_loss=0.4235, pruned_loss=0.1784, over 936530.77 frames. ], batch size: 35, lr: 3.96e-02, grad_scale: 64.0 +2024-07-27 11:16:09,390 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.10 vs. limit=7.279999999999999 +2024-07-27 11:16:20,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.77 vs. limit=10.58 +2024-07-27 11:16:24,185 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.68 vs. limit=7.056666666666667 +2024-07-27 11:16:34,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.36 vs. limit=10.59 +2024-07-27 11:16:40,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=8253.333333333334, ans=0.03227777777777778 +2024-07-27 11:16:42,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.88 vs. limit=7.0633333333333335 +2024-07-27 11:16:43,761 INFO [train.py:1114] (1/4) Epoch 1, batch 6200, loss[loss=0.3892, simple_loss=0.4194, pruned_loss=0.1795, over 4742.00 frames. ], tot_loss[loss=0.3915, simple_loss=0.4243, pruned_loss=0.1793, over 936035.11 frames. ], batch size: 14, lr: 3.95e-02, grad_scale: 64.0 +2024-07-27 11:16:45,337 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.005e+01 7.091e+01 7.789e+01 8.708e+01 1.298e+02, threshold=1.558e+02, percent-clipped=0.0 +2024-07-27 11:17:00,839 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.279e+00 +2024-07-27 11:17:07,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=8280.0, ans=0.125 +2024-07-27 11:17:09,264 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.20 vs. limit=13.71 +2024-07-27 11:17:17,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=8293.333333333334, ans=0.03211111111111111 +2024-07-27 11:17:18,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.08 vs. limit=10.61 +2024-07-27 11:17:26,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=8320.0, ans=10.0 +2024-07-27 11:17:26,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=8320.0, ans=10.0 +2024-07-27 11:17:27,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=8320.0, ans=0.009060869565217391 +2024-07-27 11:17:28,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=8320.0, ans=0.6088 +2024-07-27 11:17:32,977 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.95 vs. limit=13.74 +2024-07-27 11:17:33,027 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.19 vs. limit=10.620000000000001 +2024-07-27 11:17:33,941 INFO [train.py:1114] (1/4) Epoch 1, batch 6250, loss[loss=0.4007, simple_loss=0.4543, pruned_loss=0.1735, over 4807.00 frames. ], tot_loss[loss=0.3936, simple_loss=0.4258, pruned_loss=0.1807, over 932593.55 frames. ], batch size: 14, lr: 3.94e-02, grad_scale: 64.0 +2024-07-27 11:17:36,297 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.60 vs. limit=10.625 +2024-07-27 11:17:38,367 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.80 vs. limit=13.75 +2024-07-27 11:17:41,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=8346.666666666666, ans=0.0318888888888889 +2024-07-27 11:17:46,602 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.91 vs. limit=10.629999999999999 +2024-07-27 11:17:54,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=8360.0, ans=0.6073999999999999 +2024-07-27 11:18:02,163 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.83 vs. limit=10.645 +2024-07-27 11:18:03,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=8386.666666666666, ans=0.125 +2024-07-27 11:18:09,416 INFO [train.py:1114] (1/4) Epoch 1, batch 6300, loss[loss=0.3451, simple_loss=0.3836, pruned_loss=0.1533, over 4548.00 frames. ], tot_loss[loss=0.3936, simple_loss=0.4256, pruned_loss=0.1807, over 929063.00 frames. ], batch size: 10, lr: 3.94e-02, grad_scale: 64.0 +2024-07-27 11:18:09,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=8400.0, ans=0.07 +2024-07-27 11:18:10,748 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.284e+01 7.148e+01 7.847e+01 8.773e+01 1.332e+02, threshold=1.569e+02, percent-clipped=0.0 +2024-07-27 11:18:16,112 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.24 vs. limit=4.2620000000000005 +2024-07-27 11:18:34,422 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.46 vs. limit=13.83 +2024-07-27 11:18:36,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=8453.333333333334, ans=0.025 +2024-07-27 11:18:43,768 INFO [train.py:1114] (1/4) Epoch 1, batch 6350, loss[loss=0.3647, simple_loss=0.4023, pruned_loss=0.1635, over 4529.00 frames. ], tot_loss[loss=0.3903, simple_loss=0.4237, pruned_loss=0.1785, over 933284.68 frames. ], batch size: 21, lr: 3.93e-02, grad_scale: 64.0 +2024-07-27 11:18:46,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=8466.666666666666, ans=0.125 +2024-07-27 11:18:59,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=8493.333333333334, ans=0.125 +2024-07-27 11:19:02,347 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.09 vs. limit=13.870000000000001 +2024-07-27 11:19:05,207 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.37 vs. limit=4.276 +2024-07-27 11:19:06,051 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.90 vs. limit=10.69 +2024-07-27 11:19:10,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8506.666666666666, ans=0.21493333333333334 +2024-07-27 11:19:10,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=8506.666666666666, ans=0.125 +2024-07-27 11:19:12,266 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.39 vs. limit=7.4079999999999995 +2024-07-27 11:19:14,410 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.96 vs. limit=9.26 +2024-07-27 11:19:15,113 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.89 vs. limit=4.2780000000000005 +2024-07-27 11:19:19,052 INFO [train.py:1114] (1/4) Epoch 1, batch 6400, loss[loss=0.3947, simple_loss=0.4474, pruned_loss=0.171, over 4636.00 frames. ], tot_loss[loss=0.3892, simple_loss=0.4228, pruned_loss=0.1777, over 935176.21 frames. ], batch size: 13, lr: 3.92e-02, grad_scale: 64.0 +2024-07-27 11:19:20,477 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.815e+01 7.135e+01 7.649e+01 8.994e+01 1.161e+02, threshold=1.530e+02, percent-clipped=0.0 +2024-07-27 11:19:20,882 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.20 vs. limit=7.133333333333334 +2024-07-27 11:19:22,289 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.54 vs. limit=13.9 +2024-07-27 11:19:22,492 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=7.73 vs. limit=7.133333333333334 +2024-07-27 11:19:25,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=8546.666666666666, ans=0.125 +2024-07-27 11:19:36,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=8546.666666666666, ans=0.6008666666666667 +2024-07-27 11:19:44,914 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.12 vs. limit=13.92 +2024-07-27 11:19:46,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=8560.0, ans=0.009008695652173913 +2024-07-27 11:20:02,091 INFO [train.py:1114] (1/4) Epoch 1, batch 6450, loss[loss=0.4557, simple_loss=0.466, pruned_loss=0.2227, over 4456.00 frames. ], tot_loss[loss=0.3904, simple_loss=0.4244, pruned_loss=0.1782, over 938567.21 frames. ], batch size: 21, lr: 3.92e-02, grad_scale: 64.0 +2024-07-27 11:20:11,664 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.35 vs. limit=7.153333333333334 +2024-07-27 11:20:23,211 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.26 vs. limit=9.313333333333333 +2024-07-27 11:20:23,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8640.0, ans=0.2136 +2024-07-27 11:20:26,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=8640.0, ans=0.008991304347826088 +2024-07-27 11:20:26,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=8640.0, ans=0.125 +2024-07-27 11:20:38,003 INFO [train.py:1114] (1/4) Epoch 1, batch 6500, loss[loss=0.5424, simple_loss=0.5176, pruned_loss=0.2836, over 3437.00 frames. ], tot_loss[loss=0.3901, simple_loss=0.4247, pruned_loss=0.1777, over 940015.77 frames. ], batch size: 35, lr: 3.91e-02, grad_scale: 64.0 +2024-07-27 11:20:39,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=8666.666666666666, ans=0.125 +2024-07-27 11:20:39,465 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.881e+01 7.078e+01 7.610e+01 8.619e+01 1.357e+02, threshold=1.522e+02, percent-clipped=0.0 +2024-07-27 11:20:43,544 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.99 vs. limit=7.166666666666666 +2024-07-27 11:20:52,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8680.0, ans=0.2132 +2024-07-27 11:21:14,507 INFO [train.py:1114] (1/4) Epoch 1, batch 6550, loss[loss=0.3357, simple_loss=0.3747, pruned_loss=0.1483, over 4817.00 frames. ], tot_loss[loss=0.3875, simple_loss=0.4231, pruned_loss=0.1759, over 942973.81 frames. ], batch size: 11, lr: 3.91e-02, grad_scale: 64.0 +2024-07-27 11:21:23,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=8746.666666666666, ans=0.125 +2024-07-27 11:21:32,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8760.0, ans=0.2124 +2024-07-27 11:21:39,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8773.333333333334, ans=0.21226666666666666 +2024-07-27 11:21:48,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=8800.0, ans=0.125 +2024-07-27 11:21:49,211 INFO [train.py:1114] (1/4) Epoch 1, batch 6600, loss[loss=0.4202, simple_loss=0.4458, pruned_loss=0.1973, over 4938.00 frames. ], tot_loss[loss=0.3866, simple_loss=0.4221, pruned_loss=0.1756, over 944999.13 frames. ], batch size: 14, lr: 3.90e-02, grad_scale: 64.0 +2024-07-27 11:21:50,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=8800.0, ans=0.125 +2024-07-27 11:21:50,619 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.093e+01 7.005e+01 7.535e+01 8.213e+01 1.214e+02, threshold=1.507e+02, percent-clipped=0.0 +2024-07-27 11:21:55,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=8813.333333333334, ans=0.008953623188405797 +2024-07-27 11:21:55,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8813.333333333334, ans=0.21186666666666665 +2024-07-27 11:22:00,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=8813.333333333334, ans=0.125 +2024-07-27 11:22:06,043 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.99 vs. limit=9.413333333333334 +2024-07-27 11:22:19,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=8853.333333333334, ans=0.05 +2024-07-27 11:22:25,932 INFO [train.py:1114] (1/4) Epoch 1, batch 6650, loss[loss=0.4506, simple_loss=0.4747, pruned_loss=0.2132, over 4656.00 frames. ], tot_loss[loss=0.3863, simple_loss=0.4218, pruned_loss=0.1754, over 943729.69 frames. ], batch size: 17, lr: 3.89e-02, grad_scale: 64.0 +2024-07-27 11:22:36,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=8880.0, ans=0.125 +2024-07-27 11:22:40,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=8893.333333333334, ans=0.0 +2024-07-27 11:22:41,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=8893.333333333334, ans=0.125 +2024-07-27 11:22:53,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=8906.666666666666, ans=14.18 +2024-07-27 11:23:01,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8920.0, ans=0.2108 +2024-07-27 11:23:05,441 INFO [train.py:1114] (1/4) Epoch 1, batch 6700, loss[loss=0.3979, simple_loss=0.4454, pruned_loss=0.1752, over 4697.00 frames. ], tot_loss[loss=0.3879, simple_loss=0.4234, pruned_loss=0.1761, over 942548.09 frames. ], batch size: 19, lr: 3.89e-02, grad_scale: 64.0 +2024-07-27 11:23:06,706 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.382e+01 7.413e+01 7.948e+01 9.118e+01 1.138e+02, threshold=1.590e+02, percent-clipped=0.0 +2024-07-27 11:23:17,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=8946.666666666666, ans=0.125 +2024-07-27 11:23:17,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8946.666666666666, ans=0.21053333333333335 +2024-07-27 11:23:18,391 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.41 vs. limit=9.473333333333333 +2024-07-27 11:23:22,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=8960.0, ans=0.2104 +2024-07-27 11:23:27,489 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.63 vs. limit=14.23 +2024-07-27 11:23:32,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=8973.333333333334, ans=0.029277777777777778 +2024-07-27 11:23:37,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=8986.666666666666, ans=10.0 +2024-07-27 11:23:38,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=8986.666666666666, ans=0.029222222222222226 +2024-07-27 11:23:43,204 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:23:43,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=9000.0, ans=0.21000000000000002 +2024-07-27 11:23:44,426 INFO [train.py:1114] (1/4) Epoch 1, batch 6750, loss[loss=0.4824, simple_loss=0.4971, pruned_loss=0.2339, over 4215.00 frames. ], tot_loss[loss=0.3894, simple_loss=0.424, pruned_loss=0.1774, over 940396.00 frames. ], batch size: 25, lr: 3.88e-02, grad_scale: 128.0 +2024-07-27 11:23:52,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=9013.333333333334, ans=0.02911111111111111 +2024-07-27 11:23:55,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=9013.333333333334, ans=0.125 +2024-07-27 11:23:56,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=9013.333333333334, ans=0.125 +2024-07-27 11:24:02,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=9026.666666666666, ans=0.02905555555555556 +2024-07-27 11:24:19,784 INFO [train.py:1114] (1/4) Epoch 1, batch 6800, loss[loss=0.3837, simple_loss=0.4161, pruned_loss=0.1756, over 4629.00 frames. ], tot_loss[loss=0.3875, simple_loss=0.423, pruned_loss=0.176, over 938369.52 frames. ], batch size: 13, lr: 3.87e-02, grad_scale: 128.0 +2024-07-27 11:24:21,069 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.897e+01 7.261e+01 7.946e+01 8.901e+01 1.743e+02, threshold=1.589e+02, percent-clipped=1.0 +2024-07-27 11:24:26,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=9080.0, ans=0.04949747468305833 +2024-07-27 11:24:27,377 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.52 vs. limit=14.309999999999999 +2024-07-27 11:24:32,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=9080.0, ans=0.025 +2024-07-27 11:24:37,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=9093.333333333334, ans=0.0 +2024-07-27 11:24:37,662 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.39 vs. limit=14.32 +2024-07-27 11:24:45,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.63 vs. limit=14.33 +2024-07-27 11:24:49,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=9120.0, ans=0.20879999999999999 +2024-07-27 11:24:49,926 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.22 vs. limit=10.92 +2024-07-27 11:24:53,711 INFO [train.py:1114] (1/4) Epoch 1, batch 6850, loss[loss=0.3186, simple_loss=0.4011, pruned_loss=0.118, over 4698.00 frames. ], tot_loss[loss=0.3858, simple_loss=0.4217, pruned_loss=0.1749, over 940244.18 frames. ], batch size: 13, lr: 3.87e-02, grad_scale: 64.0 +2024-07-27 11:24:57,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=9133.333333333334, ans=0.008884057971014492 +2024-07-27 11:25:18,737 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.64 vs. limit=9.586666666666666 +2024-07-27 11:25:24,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=9186.666666666666, ans=0.0 +2024-07-27 11:25:28,389 INFO [train.py:1114] (1/4) Epoch 1, batch 6900, loss[loss=0.3237, simple_loss=0.3687, pruned_loss=0.1393, over 4964.00 frames. ], tot_loss[loss=0.3862, simple_loss=0.4224, pruned_loss=0.175, over 942405.08 frames. ], batch size: 13, lr: 3.86e-02, grad_scale: 64.0 +2024-07-27 11:25:29,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=9200.0, ans=0.20800000000000002 +2024-07-27 11:25:30,371 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.952e+01 6.982e+01 7.530e+01 8.620e+01 1.386e+02, threshold=1.506e+02, percent-clipped=0.0 +2024-07-27 11:25:40,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=9213.333333333334, ans=0.025 +2024-07-27 11:25:44,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=9226.666666666666, ans=0.00886376811594203 +2024-07-27 11:25:46,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=9226.666666666666, ans=0.028222222222222225 +2024-07-27 11:25:48,612 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.09 vs. limit=10.965 +2024-07-27 11:26:00,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=9253.333333333334, ans=0.008857971014492753 +2024-07-27 11:26:00,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=9253.333333333334, ans=0.125 +2024-07-27 11:26:02,696 INFO [train.py:1114] (1/4) Epoch 1, batch 6950, loss[loss=0.3416, simple_loss=0.3776, pruned_loss=0.1528, over 4519.00 frames. ], tot_loss[loss=0.3851, simple_loss=0.4215, pruned_loss=0.1743, over 939762.25 frames. ], batch size: 10, lr: 3.85e-02, grad_scale: 64.0 +2024-07-27 11:26:02,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=9266.666666666666, ans=0.20733333333333334 +2024-07-27 11:26:15,662 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.83 vs. limit=10.98 +2024-07-27 11:26:19,753 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=24.15 vs. limit=10.985 +2024-07-27 11:26:26,581 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.98 vs. limit=14.48 +2024-07-27 11:26:37,593 INFO [train.py:1114] (1/4) Epoch 1, batch 7000, loss[loss=0.4643, simple_loss=0.4951, pruned_loss=0.2168, over 4606.00 frames. ], tot_loss[loss=0.3831, simple_loss=0.4198, pruned_loss=0.1732, over 938476.44 frames. ], batch size: 17, lr: 3.85e-02, grad_scale: 64.0 +2024-07-27 11:26:37,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=9333.333333333334, ans=0.125 +2024-07-27 11:26:37,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=9333.333333333334, ans=0.125 +2024-07-27 11:26:38,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=9333.333333333334, ans=0.025 +2024-07-27 11:26:39,625 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.819e+01 7.301e+01 8.158e+01 9.084e+01 2.160e+02, threshold=1.632e+02, percent-clipped=1.0 +2024-07-27 11:26:40,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=9333.333333333334, ans=0.008840579710144927 +2024-07-27 11:26:43,668 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.90 vs. limit=14.5 +2024-07-27 11:26:44,348 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.25 vs. limit=14.51 +2024-07-27 11:27:02,756 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.25 vs. limit=11.01 +2024-07-27 11:27:28,083 INFO [train.py:1114] (1/4) Epoch 1, batch 7050, loss[loss=0.3337, simple_loss=0.3818, pruned_loss=0.1428, over 4679.00 frames. ], tot_loss[loss=0.3817, simple_loss=0.419, pruned_loss=0.1722, over 941778.39 frames. ], batch size: 19, lr: 3.84e-02, grad_scale: 64.0 +2024-07-27 11:27:29,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=9400.0, ans=0.34099999999999997 +2024-07-27 11:28:02,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=9440.0, ans=0.125 +2024-07-27 11:28:04,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=9440.0, ans=0.027333333333333334 +2024-07-27 11:28:08,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=9453.333333333334, ans=0.125 +2024-07-27 11:28:14,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=9466.666666666666, ans=0.125 +2024-07-27 11:28:15,350 INFO [train.py:1114] (1/4) Epoch 1, batch 7100, loss[loss=0.3531, simple_loss=0.4058, pruned_loss=0.1502, over 4792.00 frames. ], tot_loss[loss=0.3849, simple_loss=0.4208, pruned_loss=0.1745, over 936552.73 frames. ], batch size: 15, lr: 3.83e-02, grad_scale: 64.0 +2024-07-27 11:28:17,407 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.024e+01 6.989e+01 7.688e+01 8.481e+01 1.289e+02, threshold=1.538e+02, percent-clipped=0.0 +2024-07-27 11:28:24,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=9480.0, ans=0.125 +2024-07-27 11:28:33,061 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.16 vs. limit=7.373333333333333 +2024-07-27 11:28:39,539 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.21 vs. limit=7.376666666666667 +2024-07-27 11:28:42,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=9520.0, ans=0.20479999999999998 +2024-07-27 11:28:44,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=9520.0, ans=0.025 +2024-07-27 11:28:45,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=9520.0, ans=0.125 +2024-07-27 11:28:47,717 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.98 vs. limit=11.07 +2024-07-27 11:28:50,200 INFO [train.py:1114] (1/4) Epoch 1, batch 7150, loss[loss=0.4202, simple_loss=0.4434, pruned_loss=0.1985, over 4554.00 frames. ], tot_loss[loss=0.3812, simple_loss=0.4176, pruned_loss=0.1724, over 937739.11 frames. ], batch size: 21, lr: 3.83e-02, grad_scale: 64.0 +2024-07-27 11:29:08,632 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.16 vs. limit=11.085 +2024-07-27 11:29:10,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=9560.0, ans=10.0 +2024-07-27 11:29:25,725 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.69 vs. limit=14.7 +2024-07-27 11:29:26,389 INFO [train.py:1114] (1/4) Epoch 1, batch 7200, loss[loss=0.4623, simple_loss=0.4862, pruned_loss=0.2192, over 4798.00 frames. ], tot_loss[loss=0.3826, simple_loss=0.419, pruned_loss=0.1732, over 938071.11 frames. ], batch size: 15, lr: 3.82e-02, grad_scale: 64.0 +2024-07-27 11:29:28,268 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.182e+01 6.919e+01 7.589e+01 8.160e+01 1.329e+02, threshold=1.518e+02, percent-clipped=0.0 +2024-07-27 11:29:28,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.65 vs. limit=11.1 +2024-07-27 11:29:35,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=9613.333333333334, ans=0.125 +2024-07-27 11:29:36,061 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.44 vs. limit=11.105 +2024-07-27 11:29:40,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=9626.666666666666, ans=0.0087768115942029 +2024-07-27 11:29:41,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=9626.666666666666, ans=0.125 +2024-07-27 11:29:44,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=9626.666666666666, ans=0.125 +2024-07-27 11:29:52,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=9640.0, ans=0.125 +2024-07-27 11:29:54,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=9640.0, ans=0.026500000000000003 +2024-07-27 11:30:56,214 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.35 vs. limit=11.120000000000001 +2024-07-27 11:30:57,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=9653.333333333334, ans=0.20346666666666666 +2024-07-27 11:31:01,722 INFO [train.py:1114] (1/4) Epoch 1, batch 7250, loss[loss=0.2978, simple_loss=0.3478, pruned_loss=0.1239, over 4848.00 frames. ], tot_loss[loss=0.3808, simple_loss=0.4174, pruned_loss=0.1721, over 939703.34 frames. ], batch size: 12, lr: 3.82e-02, grad_scale: 64.0 +2024-07-27 11:31:04,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=9666.666666666666, ans=10.0 +2024-07-27 11:31:05,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=9666.666666666666, ans=0.125 +2024-07-27 11:31:11,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=9680.0, ans=0.025 +2024-07-27 11:31:14,821 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.49 vs. limit=11.135 +2024-07-27 11:31:21,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=9706.666666666666, ans=0.125 +2024-07-27 11:31:23,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=9706.666666666666, ans=0.125 +2024-07-27 11:31:25,233 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=23.22 vs. limit=11.14 +2024-07-27 11:31:28,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=9706.666666666666, ans=0.125 +2024-07-27 11:31:35,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff3.min_abs, batch_count=9720.0, ans=0.2 +2024-07-27 11:31:37,467 INFO [train.py:1114] (1/4) Epoch 1, batch 7300, loss[loss=0.3448, simple_loss=0.3862, pruned_loss=0.1517, over 4853.00 frames. ], tot_loss[loss=0.3794, simple_loss=0.4167, pruned_loss=0.1711, over 939920.29 frames. ], batch size: 12, lr: 3.81e-02, grad_scale: 64.0 +2024-07-27 11:31:39,741 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.899e+01 6.987e+01 7.392e+01 8.309e+01 1.190e+02, threshold=1.478e+02, percent-clipped=0.0 +2024-07-27 11:31:50,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=9746.666666666666, ans=0.125 +2024-07-27 11:31:59,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=9773.333333333334, ans=0.125 +2024-07-27 11:32:12,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.01 vs. limit=11.17 +2024-07-27 11:32:14,166 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.37 vs. limit=11.17 +2024-07-27 11:32:17,894 INFO [train.py:1114] (1/4) Epoch 1, batch 7350, loss[loss=0.2898, simple_loss=0.3552, pruned_loss=0.1122, over 4648.00 frames. ], tot_loss[loss=0.3807, simple_loss=0.4178, pruned_loss=0.1718, over 939062.14 frames. ], batch size: 12, lr: 3.80e-02, grad_scale: 64.0 +2024-07-27 11:32:20,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=9800.0, ans=0.5569999999999999 +2024-07-27 11:32:25,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=9813.333333333334, ans=0.008736231884057971 +2024-07-27 11:32:26,605 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.18 vs. limit=14.86 +2024-07-27 11:32:30,045 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.55 vs. limit=4.4719999999999995 +2024-07-27 11:32:32,455 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:32:36,707 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.57 vs. limit=11.185 +2024-07-27 11:32:52,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=9853.333333333334, ans=0.025611111111111112 +2024-07-27 11:32:54,738 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.48 vs. limit=11.2 +2024-07-27 11:32:55,342 INFO [train.py:1114] (1/4) Epoch 1, batch 7400, loss[loss=0.3324, simple_loss=0.3815, pruned_loss=0.1416, over 4704.00 frames. ], tot_loss[loss=0.3807, simple_loss=0.4183, pruned_loss=0.1715, over 940265.38 frames. ], batch size: 13, lr: 3.80e-02, grad_scale: 64.0 +2024-07-27 11:32:57,410 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.028e+01 6.927e+01 7.410e+01 8.183e+01 1.194e+02, threshold=1.482e+02, percent-clipped=0.0 +2024-07-27 11:32:58,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=9866.666666666666, ans=0.125 +2024-07-27 11:33:01,774 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=11.205 +2024-07-27 11:33:12,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=9893.333333333334, ans=0.125 +2024-07-27 11:33:19,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=9906.666666666666, ans=0.20093333333333335 +2024-07-27 11:33:37,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=9933.333333333334, ans=0.5523333333333333 +2024-07-27 11:33:37,719 INFO [train.py:1114] (1/4) Epoch 1, batch 7450, loss[loss=0.2946, simple_loss=0.3475, pruned_loss=0.1208, over 4609.00 frames. ], tot_loss[loss=0.3795, simple_loss=0.4167, pruned_loss=0.1711, over 937811.17 frames. ], batch size: 11, lr: 3.79e-02, grad_scale: 64.0 +2024-07-27 11:33:38,972 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.03 vs. limit=14.95 +2024-07-27 11:33:44,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=9946.666666666666, ans=0.5518666666666667 +2024-07-27 11:33:53,464 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.94 vs. limit=11.235 +2024-07-27 11:34:04,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=9986.666666666666, ans=0.125 +2024-07-27 11:34:38,122 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.01 vs. limit=11.245000000000001 +2024-07-27 11:34:39,721 INFO [train.py:1114] (1/4) Epoch 1, batch 7500, loss[loss=0.4955, simple_loss=0.4806, pruned_loss=0.2552, over 3293.00 frames. ], tot_loss[loss=0.3796, simple_loss=0.4171, pruned_loss=0.171, over 935927.52 frames. ], batch size: 35, lr: 3.78e-02, grad_scale: 64.0 +2024-07-27 11:34:41,634 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.028e+01 6.970e+01 7.592e+01 8.473e+01 1.449e+02, threshold=1.518e+02, percent-clipped=0.0 +2024-07-27 11:34:45,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=10013.333333333334, ans=0.5495333333333334 +2024-07-27 11:34:51,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=10013.333333333334, ans=0.024944444444444446 +2024-07-27 11:34:51,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=10013.333333333334, ans=0.125 +2024-07-27 11:34:52,323 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.63 vs. limit=15.01 +2024-07-27 11:35:12,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=10026.666666666666, ans=0.125 +2024-07-27 11:35:18,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=10040.0, ans=0.1996 +2024-07-27 11:35:25,443 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.86 vs. limit=4.508 +2024-07-27 11:35:28,973 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.55 vs. limit=4.51 +2024-07-27 11:35:29,207 INFO [train.py:1114] (1/4) Epoch 1, batch 7550, loss[loss=0.4644, simple_loss=0.4814, pruned_loss=0.2238, over 4582.00 frames. ], tot_loss[loss=0.3809, simple_loss=0.4188, pruned_loss=0.1715, over 935799.35 frames. ], batch size: 17, lr: 3.78e-02, grad_scale: 64.0 +2024-07-27 11:35:29,594 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.30 vs. limit=15.05 +2024-07-27 11:35:32,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=10066.666666666666, ans=0.024722222222222225 +2024-07-27 11:35:48,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=10093.333333333334, ans=0.125 +2024-07-27 11:36:02,806 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.02 vs. limit=11.29 +2024-07-27 11:36:09,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=10120.0, ans=0.0 +2024-07-27 11:36:10,594 INFO [train.py:1114] (1/4) Epoch 1, batch 7600, loss[loss=0.3982, simple_loss=0.4308, pruned_loss=0.1828, over 4811.00 frames. ], tot_loss[loss=0.3795, simple_loss=0.418, pruned_loss=0.1705, over 937745.39 frames. ], batch size: 14, lr: 3.77e-02, grad_scale: 64.0 +2024-07-27 11:36:12,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=10133.333333333334, ans=0.125 +2024-07-27 11:36:12,581 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.409e+01 6.929e+01 7.591e+01 8.810e+01 1.172e+02, threshold=1.518e+02, percent-clipped=0.0 +2024-07-27 11:36:14,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=10133.333333333334, ans=0.008666666666666666 +2024-07-27 11:36:15,620 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.72 vs. limit=15.1 +2024-07-27 11:36:40,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=10146.666666666666, ans=0.0 +2024-07-27 11:36:45,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=10160.0, ans=0.125 +2024-07-27 11:36:48,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=10173.333333333334, ans=0.5439333333333334 +2024-07-27 11:36:50,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=10173.333333333334, ans=0.125 +2024-07-27 11:36:54,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=10173.333333333334, ans=0.035 +2024-07-27 11:37:02,937 INFO [train.py:1114] (1/4) Epoch 1, batch 7650, loss[loss=0.3404, simple_loss=0.3696, pruned_loss=0.1556, over 4958.00 frames. ], tot_loss[loss=0.3799, simple_loss=0.4177, pruned_loss=0.1711, over 937350.18 frames. ], batch size: 12, lr: 3.77e-02, grad_scale: 64.0 +2024-07-27 11:37:06,144 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.37 vs. limit=15.15 +2024-07-27 11:37:08,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=10200.0, ans=0.025 +2024-07-27 11:37:18,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=10226.666666666666, ans=0.125 +2024-07-27 11:37:21,083 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.56 vs. limit=10.113333333333333 +2024-07-27 11:37:35,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=10253.333333333334, ans=0.07 +2024-07-27 11:37:36,566 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.18 vs. limit=11.345 +2024-07-27 11:37:37,473 INFO [train.py:1114] (1/4) Epoch 1, batch 7700, loss[loss=0.3824, simple_loss=0.429, pruned_loss=0.1679, over 4691.00 frames. ], tot_loss[loss=0.3809, simple_loss=0.4185, pruned_loss=0.1717, over 934791.77 frames. ], batch size: 13, lr: 3.76e-02, grad_scale: 64.0 +2024-07-27 11:37:38,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=10266.666666666666, ans=0.19733333333333333 +2024-07-27 11:37:39,433 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.134e+01 7.002e+01 7.732e+01 8.804e+01 1.160e+02, threshold=1.546e+02, percent-clipped=0.0 +2024-07-27 11:37:41,734 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.65 vs. limit=15.2 +2024-07-27 11:37:45,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=10280.0, ans=0.125 +2024-07-27 11:37:47,289 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.70 vs. limit=11.355 +2024-07-27 11:37:56,182 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.12 vs. limit=11.36 +2024-07-27 11:38:07,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=10320.0, ans=0.07 +2024-07-27 11:38:10,885 INFO [train.py:1114] (1/4) Epoch 1, batch 7750, loss[loss=0.3374, simple_loss=0.3955, pruned_loss=0.1397, over 4935.00 frames. ], tot_loss[loss=0.3817, simple_loss=0.42, pruned_loss=0.1717, over 935864.25 frames. ], batch size: 14, lr: 3.75e-02, grad_scale: 64.0 +2024-07-27 11:38:26,956 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.27 vs. limit=15.27 +2024-07-27 11:38:39,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=10386.666666666666, ans=0.023388888888888893 +2024-07-27 11:38:45,450 INFO [train.py:1114] (1/4) Epoch 1, batch 7800, loss[loss=0.476, simple_loss=0.4996, pruned_loss=0.2262, over 4681.00 frames. ], tot_loss[loss=0.3809, simple_loss=0.4202, pruned_loss=0.1708, over 937311.10 frames. ], batch size: 14, lr: 3.75e-02, grad_scale: 64.0 +2024-07-27 11:38:46,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=10400.0, ans=0.125 +2024-07-27 11:38:47,309 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.952e+01 6.890e+01 7.293e+01 8.300e+01 1.085e+02, threshold=1.459e+02, percent-clipped=0.0 +2024-07-27 11:38:47,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=10400.0, ans=0.125 +2024-07-27 11:38:55,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=10413.333333333334, ans=0.025 +2024-07-27 11:39:00,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=10426.666666666666, ans=0.023222222222222227 +2024-07-27 11:39:00,577 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.68 vs. limit=4.564 +2024-07-27 11:39:02,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=10426.666666666666, ans=0.125 +2024-07-27 11:39:03,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=10426.666666666666, ans=0.5350666666666668 +2024-07-27 11:39:10,531 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.59 vs. limit=10.219999999999999 +2024-07-27 11:39:20,618 INFO [train.py:1114] (1/4) Epoch 1, batch 7850, loss[loss=0.355, simple_loss=0.3898, pruned_loss=0.1601, over 4527.00 frames. ], tot_loss[loss=0.3802, simple_loss=0.4193, pruned_loss=0.1706, over 935858.08 frames. ], batch size: 10, lr: 3.74e-02, grad_scale: 64.0 +2024-07-27 11:39:21,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=10466.666666666666, ans=0.125 +2024-07-27 11:39:36,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=10493.333333333334, ans=0.125 +2024-07-27 11:39:37,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=10493.333333333334, ans=0.35740000000000005 +2024-07-27 11:39:41,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=10506.666666666666, ans=0.025 +2024-07-27 11:39:47,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=10520.0, ans=0.125 +2024-07-27 11:39:52,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.16 vs. limit=10.26 +2024-07-27 11:39:54,394 INFO [train.py:1114] (1/4) Epoch 1, batch 7900, loss[loss=0.3903, simple_loss=0.4364, pruned_loss=0.1721, over 4876.00 frames. ], tot_loss[loss=0.3813, simple_loss=0.4202, pruned_loss=0.1712, over 933432.68 frames. ], batch size: 14, lr: 3.73e-02, grad_scale: 64.0 +2024-07-27 11:39:56,373 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.883e+01 7.101e+01 7.733e+01 8.610e+01 1.628e+02, threshold=1.547e+02, percent-clipped=1.0 +2024-07-27 11:40:01,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10546.666666666666, ans=0.19453333333333334 +2024-07-27 11:40:06,715 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:40:22,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10586.666666666666, ans=0.19413333333333332 +2024-07-27 11:40:26,410 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=4.588 +2024-07-27 11:40:26,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=10586.666666666666, ans=0.125 +2024-07-27 11:40:28,109 INFO [train.py:1114] (1/4) Epoch 1, batch 7950, loss[loss=0.4677, simple_loss=0.4661, pruned_loss=0.2347, over 3343.00 frames. ], tot_loss[loss=0.3788, simple_loss=0.4184, pruned_loss=0.1696, over 935609.57 frames. ], batch size: 35, lr: 3.73e-02, grad_scale: 64.0 +2024-07-27 11:40:45,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=10626.666666666666, ans=0.022388888888888892 +2024-07-27 11:40:46,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=10626.666666666666, ans=0.022388888888888892 +2024-07-27 11:40:49,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.03 vs. limit=15.48 +2024-07-27 11:41:01,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10653.333333333334, ans=0.19346666666666668 +2024-07-27 11:41:44,678 INFO [train.py:1114] (1/4) Epoch 1, batch 8000, loss[loss=0.339, simple_loss=0.384, pruned_loss=0.147, over 4617.00 frames. ], tot_loss[loss=0.3772, simple_loss=0.4166, pruned_loss=0.1688, over 934728.98 frames. ], batch size: 11, lr: 3.72e-02, grad_scale: 64.0 +2024-07-27 11:41:46,747 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.949e+01 6.868e+01 7.730e+01 8.687e+01 2.055e+02, threshold=1.546e+02, percent-clipped=1.0 +2024-07-27 11:41:52,707 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.46 vs. limit=10.34 +2024-07-27 11:41:58,464 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.02 vs. limit=11.51 +2024-07-27 11:42:06,433 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.53 vs. limit=15.53 +2024-07-27 11:42:06,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=10706.666666666666, ans=0.07 +2024-07-27 11:42:11,400 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=3.193e+00 +2024-07-27 11:42:14,492 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.76 vs. limit=11.52 +2024-07-27 11:42:14,495 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.04 vs. limit=11.52 +2024-07-27 11:42:18,027 INFO [train.py:1114] (1/4) Epoch 1, batch 8050, loss[loss=0.3752, simple_loss=0.4386, pruned_loss=0.1559, over 4819.00 frames. ], tot_loss[loss=0.3764, simple_loss=0.4164, pruned_loss=0.1682, over 934150.08 frames. ], batch size: 14, lr: 3.72e-02, grad_scale: 64.0 +2024-07-27 11:42:18,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=10733.333333333334, ans=0.125 +2024-07-27 11:42:33,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=10760.0, ans=0.09899494936611666 +2024-07-27 11:42:35,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10760.0, ans=0.1924 +2024-07-27 11:42:37,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=10760.0, ans=0.021833333333333337 +2024-07-27 11:42:44,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=10773.333333333334, ans=0.125 +2024-07-27 11:42:52,708 INFO [train.py:1114] (1/4) Epoch 1, batch 8100, loss[loss=0.4214, simple_loss=0.4566, pruned_loss=0.1931, over 4803.00 frames. ], tot_loss[loss=0.3739, simple_loss=0.4147, pruned_loss=0.1665, over 933842.77 frames. ], batch size: 15, lr: 3.71e-02, grad_scale: 64.0 +2024-07-27 11:42:54,752 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.815e+01 7.005e+01 7.921e+01 8.722e+01 1.648e+02, threshold=1.584e+02, percent-clipped=1.0 +2024-07-27 11:42:59,947 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.30 vs. limit=11.555 +2024-07-27 11:43:22,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=10853.333333333334, ans=0.02144444444444444 +2024-07-27 11:43:26,155 INFO [train.py:1114] (1/4) Epoch 1, batch 8150, loss[loss=0.3129, simple_loss=0.3785, pruned_loss=0.1237, over 4820.00 frames. ], tot_loss[loss=0.3713, simple_loss=0.4123, pruned_loss=0.1652, over 937330.01 frames. ], batch size: 15, lr: 3.70e-02, grad_scale: 64.0 +2024-07-27 11:43:27,215 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.75 vs. limit=15.65 +2024-07-27 11:43:56,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=10920.0, ans=0.02116666666666667 +2024-07-27 11:44:06,715 INFO [train.py:1114] (1/4) Epoch 1, batch 8200, loss[loss=0.4015, simple_loss=0.4268, pruned_loss=0.1881, over 4811.00 frames. ], tot_loss[loss=0.3725, simple_loss=0.4135, pruned_loss=0.1658, over 938374.03 frames. ], batch size: 15, lr: 3.70e-02, grad_scale: 64.0 +2024-07-27 11:44:08,822 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.129e+01 6.954e+01 7.394e+01 8.427e+01 2.023e+02, threshold=1.479e+02, percent-clipped=1.0 +2024-07-27 11:44:21,414 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.25 vs. limit=15.72 +2024-07-27 11:44:21,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=10960.0, ans=0.19039999999999999 +2024-07-27 11:44:23,399 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.74 vs. limit=10.48 +2024-07-27 11:44:26,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=10973.333333333334, ans=0.125 +2024-07-27 11:44:28,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=10973.333333333334, ans=0.125 +2024-07-27 11:44:29,910 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.50 vs. limit=11.615 +2024-07-27 11:44:34,122 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.24 vs. limit=10.493333333333332 +2024-07-27 11:44:37,377 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.20 vs. limit=8.394666666666666 +2024-07-27 11:44:40,414 INFO [train.py:1114] (1/4) Epoch 1, batch 8250, loss[loss=0.3759, simple_loss=0.4006, pruned_loss=0.1755, over 4889.00 frames. ], tot_loss[loss=0.3709, simple_loss=0.4122, pruned_loss=0.1648, over 938788.40 frames. ], batch size: 13, lr: 3.69e-02, grad_scale: 64.0 +2024-07-27 11:44:43,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=11000.0, ans=0.04949747468305833 +2024-07-27 11:44:52,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=13.52 vs. limit=10.506666666666668 +2024-07-27 11:44:52,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=11013.333333333334, ans=0.125 +2024-07-27 11:44:54,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=11026.666666666666, ans=0.5140666666666667 +2024-07-27 11:44:54,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=11026.666666666666, ans=0.008472463768115942 +2024-07-27 11:44:55,211 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.20 vs. limit=7.756666666666666 +2024-07-27 11:44:55,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=11026.666666666666, ans=0.125 +2024-07-27 11:45:00,674 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.32 vs. limit=10.52 +2024-07-27 11:45:10,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=11053.333333333334, ans=0.5131333333333334 +2024-07-27 11:45:13,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=11066.666666666666, ans=0.125 +2024-07-27 11:46:40,213 INFO [train.py:1114] (1/4) Epoch 1, batch 8300, loss[loss=0.4479, simple_loss=0.4732, pruned_loss=0.2113, over 4887.00 frames. ], tot_loss[loss=0.3716, simple_loss=0.4131, pruned_loss=0.1651, over 938325.33 frames. ], batch size: 15, lr: 3.68e-02, grad_scale: 64.0 +2024-07-27 11:46:42,161 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.900e+01 6.941e+01 7.717e+01 8.510e+01 1.243e+02, threshold=1.543e+02, percent-clipped=0.0 +2024-07-27 11:47:02,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11093.333333333334, ans=0.18906666666666666 +2024-07-27 11:47:02,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=11093.333333333334, ans=0.020444444444444442 +2024-07-27 11:47:03,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=11093.333333333334, ans=0.125 +2024-07-27 11:47:15,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=11106.666666666666, ans=0.18893333333333334 +2024-07-27 11:47:28,610 INFO [train.py:1114] (1/4) Epoch 1, batch 8350, loss[loss=0.4295, simple_loss=0.4664, pruned_loss=0.1963, over 4797.00 frames. ], tot_loss[loss=0.3691, simple_loss=0.4115, pruned_loss=0.1633, over 941282.04 frames. ], batch size: 15, lr: 3.68e-02, grad_scale: 64.0 +2024-07-27 11:47:37,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11146.666666666666, ans=0.18853333333333333 +2024-07-27 11:47:48,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=11146.666666666666, ans=0.008446376811594204 +2024-07-27 11:48:04,410 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.16 vs. limit=11.695 +2024-07-27 11:48:06,741 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:48:11,907 INFO [train.py:1114] (1/4) Epoch 1, batch 8400, loss[loss=0.362, simple_loss=0.4092, pruned_loss=0.1574, over 4778.00 frames. ], tot_loss[loss=0.37, simple_loss=0.4119, pruned_loss=0.1641, over 939645.50 frames. ], batch size: 12, lr: 3.67e-02, grad_scale: 64.0 +2024-07-27 11:48:12,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=11200.0, ans=0.020000000000000004 +2024-07-27 11:48:13,816 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.876e+01 7.080e+01 7.641e+01 8.587e+01 1.412e+02, threshold=1.528e+02, percent-clipped=0.0 +2024-07-27 11:48:15,794 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=18.70 vs. limit=15.9 +2024-07-27 11:48:19,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=11213.333333333334, ans=0.008431884057971014 +2024-07-27 11:48:21,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=11213.333333333334, ans=0.36819999999999997 +2024-07-27 11:48:23,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=11213.333333333334, ans=0.125 +2024-07-27 11:48:25,502 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.77 vs. limit=10.613333333333333 +2024-07-27 11:48:45,017 INFO [train.py:1114] (1/4) Epoch 1, batch 8450, loss[loss=0.3544, simple_loss=0.407, pruned_loss=0.1509, over 4800.00 frames. ], tot_loss[loss=0.3692, simple_loss=0.4116, pruned_loss=0.1635, over 938540.14 frames. ], batch size: 15, lr: 3.67e-02, grad_scale: 64.0 +2024-07-27 11:48:54,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=11280.0, ans=0.125 +2024-07-27 11:49:09,559 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.08 vs. limit=11.74 +2024-07-27 11:49:14,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11320.0, ans=0.1868 +2024-07-27 11:49:16,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=11320.0, ans=0.1868 +2024-07-27 11:49:20,001 INFO [train.py:1114] (1/4) Epoch 1, batch 8500, loss[loss=0.3301, simple_loss=0.3803, pruned_loss=0.14, over 4603.00 frames. ], tot_loss[loss=0.3697, simple_loss=0.4113, pruned_loss=0.164, over 938475.80 frames. ], batch size: 11, lr: 3.66e-02, grad_scale: 64.0 +2024-07-27 11:49:21,905 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.800e+01 6.867e+01 7.338e+01 8.262e+01 1.317e+02, threshold=1.468e+02, percent-clipped=0.0 +2024-07-27 11:49:22,100 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=5.005e+00 +2024-07-27 11:49:31,815 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.66 vs. limit=16.009999999999998 +2024-07-27 11:49:34,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11360.0, ans=0.18639999999999998 +2024-07-27 11:49:37,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=11360.0, ans=0.019333333333333338 +2024-07-27 11:49:39,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=11360.0, ans=0.125 +2024-07-27 11:49:42,986 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.93 vs. limit=8.549333333333333 +2024-07-27 11:49:44,145 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.95 vs. limit=11.765 +2024-07-27 11:49:47,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=11386.666666666666, ans=0.019222222222222227 +2024-07-27 11:49:54,524 INFO [train.py:1114] (1/4) Epoch 1, batch 8550, loss[loss=0.3389, simple_loss=0.3748, pruned_loss=0.1515, over 4803.00 frames. ], tot_loss[loss=0.3672, simple_loss=0.4095, pruned_loss=0.1625, over 939462.18 frames. ], batch size: 11, lr: 3.65e-02, grad_scale: 64.0 +2024-07-27 11:49:57,481 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.99 vs. limit=16.05 +2024-07-27 11:49:58,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=11400.0, ans=0.125 +2024-07-27 11:50:14,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=11440.0, ans=0.125 +2024-07-27 11:50:14,659 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:50:16,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=11440.0, ans=0.008382608695652174 +2024-07-27 11:50:24,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11453.333333333334, ans=0.18546666666666667 +2024-07-27 11:50:28,073 INFO [train.py:1114] (1/4) Epoch 1, batch 8600, loss[loss=0.3349, simple_loss=0.389, pruned_loss=0.1404, over 4811.00 frames. ], tot_loss[loss=0.3679, simple_loss=0.4093, pruned_loss=0.1632, over 938798.74 frames. ], batch size: 15, lr: 3.65e-02, grad_scale: 64.0 +2024-07-27 11:50:29,056 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.21 vs. limit=11.8 +2024-07-27 11:50:31,112 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.520e+01 6.717e+01 7.221e+01 8.025e+01 1.285e+02, threshold=1.444e+02, percent-clipped=0.0 +2024-07-27 11:50:42,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=11493.333333333334, ans=0.125 +2024-07-27 11:50:45,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=11493.333333333334, ans=0.008371014492753624 +2024-07-27 11:50:47,122 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.30 vs. limit=11.809999999999999 +2024-07-27 11:51:01,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=11520.0, ans=0.125 +2024-07-27 11:51:01,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=11520.0, ans=0.125 +2024-07-27 11:51:02,921 INFO [train.py:1114] (1/4) Epoch 1, batch 8650, loss[loss=0.3962, simple_loss=0.4464, pruned_loss=0.173, over 4894.00 frames. ], tot_loss[loss=0.3693, simple_loss=0.4104, pruned_loss=0.1641, over 940527.04 frames. ], batch size: 15, lr: 3.64e-02, grad_scale: 64.0 +2024-07-27 11:51:15,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=11546.666666666666, ans=0.125 +2024-07-27 11:51:18,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=11546.666666666666, ans=0.125 +2024-07-27 11:51:21,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=11560.0, ans=0.025 +2024-07-27 11:51:21,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=11560.0, ans=0.008356521739130434 +2024-07-27 11:51:25,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=11560.0, ans=0.125 +2024-07-27 11:51:36,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=11573.333333333334, ans=0.125 +2024-07-27 11:51:39,961 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.56 vs. limit=4.7379999999999995 +2024-07-27 11:51:42,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=11586.666666666666, ans=0.125 +2024-07-27 11:51:44,971 INFO [train.py:1114] (1/4) Epoch 1, batch 8700, loss[loss=0.3014, simple_loss=0.3558, pruned_loss=0.1235, over 4760.00 frames. ], tot_loss[loss=0.3695, simple_loss=0.4111, pruned_loss=0.1639, over 937306.94 frames. ], batch size: 13, lr: 3.64e-02, grad_scale: 64.0 +2024-07-27 11:51:46,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=11600.0, ans=10.0 +2024-07-27 11:51:46,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=11600.0, ans=0.008347826086956521 +2024-07-27 11:51:46,839 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.502e+01 6.768e+01 7.395e+01 8.572e+01 1.594e+02, threshold=1.479e+02, percent-clipped=2.0 +2024-07-27 11:51:52,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.53 vs. limit=10.8 +2024-07-27 11:51:55,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=11613.333333333334, ans=0.125 +2024-07-27 11:51:57,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn1.whiten.whitening_limit, batch_count=11613.333333333334, ans=16.21 +2024-07-27 11:52:08,210 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.95 vs. limit=11.865 +2024-07-27 11:52:17,651 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.81 vs. limit=7.913333333333334 +2024-07-27 11:52:19,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=11653.333333333334, ans=0.018111111111111106 +2024-07-27 11:52:21,396 INFO [train.py:1114] (1/4) Epoch 1, batch 8750, loss[loss=0.4652, simple_loss=0.4739, pruned_loss=0.2282, over 4678.00 frames. ], tot_loss[loss=0.3691, simple_loss=0.4107, pruned_loss=0.1638, over 935939.09 frames. ], batch size: 15, lr: 3.63e-02, grad_scale: 64.0 +2024-07-27 11:52:24,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=11666.666666666666, ans=0.4916666666666667 +2024-07-27 11:52:30,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=11680.0, ans=0.125 +2024-07-27 11:52:31,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=11680.0, ans=0.018000000000000002 +2024-07-27 11:52:34,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=11680.0, ans=0.125 +2024-07-27 11:52:47,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=11720.0, ans=0.125 +2024-07-27 11:52:55,474 INFO [train.py:1114] (1/4) Epoch 1, batch 8800, loss[loss=0.3638, simple_loss=0.4097, pruned_loss=0.1589, over 4933.00 frames. ], tot_loss[loss=0.3711, simple_loss=0.4124, pruned_loss=0.1649, over 936884.51 frames. ], batch size: 14, lr: 3.62e-02, grad_scale: 64.0 +2024-07-27 11:52:57,648 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.900e+01 7.237e+01 7.954e+01 8.853e+01 1.433e+02, threshold=1.591e+02, percent-clipped=0.0 +2024-07-27 11:52:58,625 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=11.9 +2024-07-27 11:52:59,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11733.333333333334, ans=0.18266666666666664 +2024-07-27 11:53:03,548 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.16 vs. limit=11.905000000000001 +2024-07-27 11:53:15,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=11773.333333333334, ans=0.125 +2024-07-27 11:53:15,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=11773.333333333334, ans=0.48793333333333333 +2024-07-27 11:53:22,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=11786.666666666666, ans=0.01755555555555556 +2024-07-27 11:53:27,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=11786.666666666666, ans=0.01755555555555556 +2024-07-27 11:53:29,048 INFO [train.py:1114] (1/4) Epoch 1, batch 8850, loss[loss=0.3992, simple_loss=0.4289, pruned_loss=0.1847, over 4621.00 frames. ], tot_loss[loss=0.3713, simple_loss=0.4118, pruned_loss=0.1655, over 931755.81 frames. ], batch size: 21, lr: 3.62e-02, grad_scale: 128.0 +2024-07-27 11:53:35,473 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.53 vs. limit=16.36 +2024-07-27 11:53:44,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=11826.666666666666, ans=0.4860666666666667 +2024-07-27 11:53:45,612 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=11.934999999999999 +2024-07-27 11:53:52,881 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.42 vs. limit=4.776 +2024-07-27 11:53:55,173 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=5.800e+00 +2024-07-27 11:53:55,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=11853.333333333334, ans=0.4851333333333333 +2024-07-27 11:53:58,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=11853.333333333334, ans=0.125 +2024-07-27 11:54:02,398 INFO [train.py:1114] (1/4) Epoch 1, batch 8900, loss[loss=0.2887, simple_loss=0.3287, pruned_loss=0.1244, over 4926.00 frames. ], tot_loss[loss=0.3719, simple_loss=0.4124, pruned_loss=0.1657, over 930091.82 frames. ], batch size: 12, lr: 3.61e-02, grad_scale: 128.0 +2024-07-27 11:54:04,466 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.705e+01 6.769e+01 7.408e+01 8.026e+01 1.011e+02, threshold=1.482e+02, percent-clipped=0.0 +2024-07-27 11:54:05,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=11866.666666666666, ans=0.13133333333333333 +2024-07-27 11:54:11,416 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.55 vs. limit=11.955 +2024-07-27 11:54:28,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=11893.333333333334, ans=0.017111111111111105 +2024-07-27 11:54:35,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=11906.666666666666, ans=0.48326666666666673 +2024-07-27 11:54:39,542 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.22 vs. limit=16.43 +2024-07-27 11:54:42,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=11920.0, ans=0.8692 +2024-07-27 11:54:48,596 INFO [train.py:1114] (1/4) Epoch 1, batch 8950, loss[loss=0.4024, simple_loss=0.4339, pruned_loss=0.1855, over 4564.00 frames. ], tot_loss[loss=0.3702, simple_loss=0.4111, pruned_loss=0.1647, over 930884.15 frames. ], batch size: 21, lr: 3.61e-02, grad_scale: 128.0 +2024-07-27 11:54:54,115 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.06 vs. limit=16.45 +2024-07-27 11:55:13,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=11960.0, ans=0.008269565217391304 +2024-07-27 11:55:13,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=11960.0, ans=0.025 +2024-07-27 11:55:18,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=11973.333333333334, ans=0.125 +2024-07-27 11:55:21,574 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.10 vs. limit=11.99 +2024-07-27 11:55:22,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=11973.333333333334, ans=0.016777777777777773 +2024-07-27 11:55:31,946 INFO [train.py:1114] (1/4) Epoch 1, batch 9000, loss[loss=0.2811, simple_loss=0.3352, pruned_loss=0.1134, over 4648.00 frames. ], tot_loss[loss=0.3673, simple_loss=0.4087, pruned_loss=0.1629, over 933696.31 frames. ], batch size: 12, lr: 3.60e-02, grad_scale: 64.0 +2024-07-27 11:55:31,946 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 11:55:42,206 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.9582, 3.5055, 3.3469, 3.4569], device='cuda:1') +2024-07-27 11:55:45,436 INFO [train.py:1146] (1/4) Epoch 1, validation: loss=0.2917, simple_loss=0.3779, pruned_loss=0.1028, over 944034.00 frames. +2024-07-27 11:55:45,436 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 11:55:45,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.53 vs. limit=16.5 +2024-07-27 11:55:48,955 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.414e+01 6.571e+01 7.230e+01 7.907e+01 1.156e+02, threshold=1.446e+02, percent-clipped=0.0 +2024-07-27 11:55:52,809 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.71 vs. limit=12.004999999999999 +2024-07-27 11:56:11,483 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.65 vs. limit=8.01 +2024-07-27 11:56:21,460 INFO [train.py:1114] (1/4) Epoch 1, batch 9050, loss[loss=0.3602, simple_loss=0.3956, pruned_loss=0.1624, over 4545.00 frames. ], tot_loss[loss=0.3654, simple_loss=0.407, pruned_loss=0.1619, over 934392.64 frames. ], batch size: 10, lr: 3.59e-02, grad_scale: 64.0 +2024-07-27 11:56:21,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=12066.666666666666, ans=0.025 +2024-07-27 11:56:22,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=12066.666666666666, ans=0.125 +2024-07-27 11:56:31,819 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.43 vs. limit=12.030000000000001 +2024-07-27 11:56:32,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12080.0, ans=0.17919999999999997 +2024-07-27 11:56:33,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=12080.0, ans=0.125 +2024-07-27 11:56:39,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=12093.333333333334, ans=0.125 +2024-07-27 11:56:41,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=12106.666666666666, ans=0.125 +2024-07-27 11:56:47,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=12120.0, ans=0.01616666666666667 +2024-07-27 11:56:50,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=12120.0, ans=0.125 +2024-07-27 11:56:51,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=12120.0, ans=0.008234782608695652 +2024-07-27 11:56:51,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12120.0, ans=0.1788 +2024-07-27 11:56:51,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=12120.0, ans=0.008234782608695652 +2024-07-27 11:56:54,149 INFO [train.py:1114] (1/4) Epoch 1, batch 9100, loss[loss=0.3772, simple_loss=0.4184, pruned_loss=0.168, over 4932.00 frames. ], tot_loss[loss=0.3633, simple_loss=0.4059, pruned_loss=0.1604, over 936952.01 frames. ], batch size: 14, lr: 3.59e-02, grad_scale: 64.0 +2024-07-27 11:57:01,196 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.077e+01 6.999e+01 7.542e+01 8.527e+01 1.258e+02, threshold=1.508e+02, percent-clipped=0.0 +2024-07-27 11:57:01,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=12133.333333333334, ans=0.125 +2024-07-27 11:57:03,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=12133.333333333334, ans=0.01611111111111111 +2024-07-27 11:57:09,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=12146.666666666666, ans=0.125 +2024-07-27 11:57:10,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=12146.666666666666, ans=0.125 +2024-07-27 11:57:14,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=12160.0, ans=0.008226086956521739 +2024-07-27 11:57:16,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=12160.0, ans=0.47440000000000004 +2024-07-27 11:57:17,208 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.22 vs. limit=12.059999999999999 +2024-07-27 11:57:25,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=12186.666666666666, ans=0.01588888888888889 +2024-07-27 11:57:30,721 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.88 vs. limit=8.874666666666666 +2024-07-27 11:57:31,649 INFO [train.py:1114] (1/4) Epoch 1, batch 9150, loss[loss=0.3884, simple_loss=0.4419, pruned_loss=0.1675, over 4811.00 frames. ], tot_loss[loss=0.3654, simple_loss=0.4076, pruned_loss=0.1616, over 935603.49 frames. ], batch size: 14, lr: 3.58e-02, grad_scale: 64.0 +2024-07-27 11:57:32,649 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.13 vs. limit=8.05 +2024-07-27 11:57:39,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=12213.333333333334, ans=0.015777777777777773 +2024-07-27 11:57:40,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=12213.333333333334, ans=0.125 +2024-07-27 11:57:44,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=12226.666666666666, ans=0.125 +2024-07-27 11:58:00,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=12253.333333333334, ans=0.125 +2024-07-27 11:58:00,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=12253.333333333334, ans=0.125 +2024-07-27 11:58:00,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=12253.333333333334, ans=0.47113333333333335 +2024-07-27 11:58:02,303 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.03 vs. limit=12.094999999999999 +2024-07-27 11:58:04,617 INFO [train.py:1114] (1/4) Epoch 1, batch 9200, loss[loss=0.2857, simple_loss=0.3438, pruned_loss=0.1138, over 4867.00 frames. ], tot_loss[loss=0.364, simple_loss=0.407, pruned_loss=0.1605, over 937344.16 frames. ], batch size: 12, lr: 3.58e-02, grad_scale: 64.0 +2024-07-27 11:58:07,213 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.834e+01 6.731e+01 7.265e+01 8.123e+01 1.608e+02, threshold=1.453e+02, percent-clipped=1.0 +2024-07-27 11:58:17,703 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.89 vs. limit=16.72 +2024-07-27 11:58:28,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=12306.666666666666, ans=0.125 +2024-07-27 11:58:36,646 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:58:36,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=12320.0, ans=0.125 +2024-07-27 11:58:40,674 INFO [train.py:1114] (1/4) Epoch 1, batch 9250, loss[loss=0.3191, simple_loss=0.3788, pruned_loss=0.1297, over 4638.00 frames. ], tot_loss[loss=0.3635, simple_loss=0.4069, pruned_loss=0.16, over 938193.79 frames. ], batch size: 13, lr: 3.57e-02, grad_scale: 64.0 +2024-07-27 11:58:49,531 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.20 vs. limit=16.759999999999998 +2024-07-27 11:58:53,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=12360.0, ans=0.02 +2024-07-27 11:58:57,135 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.35 vs. limit=16.77 +2024-07-27 11:58:58,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=12360.0, ans=0.008182608695652174 +2024-07-27 11:59:14,658 INFO [train.py:1114] (1/4) Epoch 1, batch 9300, loss[loss=0.3692, simple_loss=0.4039, pruned_loss=0.1672, over 4783.00 frames. ], tot_loss[loss=0.3627, simple_loss=0.4059, pruned_loss=0.1597, over 938281.21 frames. ], batch size: 12, lr: 3.57e-02, grad_scale: 64.0 +2024-07-27 11:59:17,481 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.284e+01 6.919e+01 7.820e+01 8.678e+01 1.247e+02, threshold=1.564e+02, percent-clipped=0.0 +2024-07-27 11:59:24,913 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:59:29,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=12426.666666666666, ans=0.125 +2024-07-27 11:59:34,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=12440.0, ans=0.125 +2024-07-27 11:59:42,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=12453.333333333334, ans=0.46413333333333334 +2024-07-27 11:59:47,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=12466.666666666666, ans=0.008159420289855073 +2024-07-27 11:59:48,173 INFO [train.py:1114] (1/4) Epoch 1, batch 9350, loss[loss=0.3403, simple_loss=0.3696, pruned_loss=0.1555, over 4814.00 frames. ], tot_loss[loss=0.3604, simple_loss=0.404, pruned_loss=0.1584, over 935090.09 frames. ], batch size: 11, lr: 3.56e-02, grad_scale: 64.0 +2024-07-27 11:59:54,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=12480.0, ans=0.125 +2024-07-27 11:59:55,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=12480.0, ans=0.125 +2024-07-27 11:59:58,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=12480.0, ans=0.125 +2024-07-27 12:00:01,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=12493.333333333334, ans=0.008153623188405797 +2024-07-27 12:00:18,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=12520.0, ans=0.125 +2024-07-27 12:00:18,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=12520.0, ans=10.0 +2024-07-27 12:00:19,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=12520.0, ans=0.125 +2024-07-27 12:00:22,082 INFO [train.py:1114] (1/4) Epoch 1, batch 9400, loss[loss=0.379, simple_loss=0.4327, pruned_loss=0.1626, over 4687.00 frames. ], tot_loss[loss=0.3621, simple_loss=0.4054, pruned_loss=0.1594, over 932999.99 frames. ], batch size: 13, lr: 3.55e-02, grad_scale: 64.0 +2024-07-27 12:00:24,566 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.848e+01 6.575e+01 7.346e+01 8.658e+01 2.018e+02, threshold=1.469e+02, percent-clipped=2.0 +2024-07-27 12:00:37,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=12560.0, ans=0.014333333333333337 +2024-07-27 12:00:45,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=12573.333333333334, ans=0.125 +2024-07-27 12:00:46,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=12573.333333333334, ans=0.125 +2024-07-27 12:00:50,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=12586.666666666666, ans=0.125 +2024-07-27 12:00:51,684 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.09 vs. limit=9.034666666666666 +2024-07-27 12:00:57,887 INFO [train.py:1114] (1/4) Epoch 1, batch 9450, loss[loss=0.3233, simple_loss=0.364, pruned_loss=0.1413, over 4822.00 frames. ], tot_loss[loss=0.3615, simple_loss=0.4052, pruned_loss=0.1589, over 932285.61 frames. ], batch size: 11, lr: 3.55e-02, grad_scale: 64.0 +2024-07-27 12:01:03,015 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.38 vs. limit=12.225 +2024-07-27 12:01:11,746 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.38 vs. limit=12.23 +2024-07-27 12:01:12,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=12613.333333333334, ans=0.125 +2024-07-27 12:01:14,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=12626.666666666666, ans=0.0 +2024-07-27 12:01:21,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=12640.0, ans=0.125 +2024-07-27 12:01:27,205 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.18 vs. limit=12.245000000000001 +2024-07-27 12:01:33,364 INFO [train.py:1114] (1/4) Epoch 1, batch 9500, loss[loss=0.3181, simple_loss=0.3749, pruned_loss=0.1307, over 4698.00 frames. ], tot_loss[loss=0.3613, simple_loss=0.4051, pruned_loss=0.1588, over 934702.88 frames. ], batch size: 12, lr: 3.54e-02, grad_scale: 64.0 +2024-07-27 12:01:35,890 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.605e+01 6.746e+01 7.341e+01 8.122e+01 1.206e+02, threshold=1.468e+02, percent-clipped=0.0 +2024-07-27 12:01:44,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=12680.0, ans=0.45620000000000005 +2024-07-27 12:02:25,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=12706.666666666666, ans=0.4552666666666667 +2024-07-27 12:02:41,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=12720.0, ans=0.125 +2024-07-27 12:02:42,665 INFO [train.py:1114] (1/4) Epoch 1, batch 9550, loss[loss=0.3217, simple_loss=0.3769, pruned_loss=0.1332, over 4783.00 frames. ], tot_loss[loss=0.3631, simple_loss=0.4067, pruned_loss=0.1597, over 931987.86 frames. ], batch size: 12, lr: 3.54e-02, grad_scale: 64.0 +2024-07-27 12:03:00,621 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.23 vs. limit=8.186666666666667 +2024-07-27 12:03:05,572 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.75 vs. limit=17.07 +2024-07-27 12:03:07,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=12760.0, ans=0.07 +2024-07-27 12:03:15,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=12773.333333333334, ans=0.008092753623188407 +2024-07-27 12:03:24,184 INFO [train.py:1114] (1/4) Epoch 1, batch 9600, loss[loss=0.5207, simple_loss=0.5055, pruned_loss=0.2679, over 3496.00 frames. ], tot_loss[loss=0.3622, simple_loss=0.4064, pruned_loss=0.159, over 931091.08 frames. ], batch size: 35, lr: 3.53e-02, grad_scale: 64.0 +2024-07-27 12:03:29,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=12800.0, ans=0.125 +2024-07-27 12:03:30,716 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.723e+01 6.771e+01 7.099e+01 8.382e+01 1.458e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 12:03:39,169 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.66 vs. limit=11.406666666666666 +2024-07-27 12:03:41,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=12813.333333333334, ans=0.013277777777777777 +2024-07-27 12:03:43,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=12826.666666666666, ans=0.013222222222222225 +2024-07-27 12:03:50,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=12840.0, ans=0.125 +2024-07-27 12:03:52,557 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.74 vs. limit=12.315000000000001 +2024-07-27 12:04:02,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=12853.333333333334, ans=0.00807536231884058 +2024-07-27 12:04:07,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=12853.333333333334, ans=0.125 +2024-07-27 12:04:11,706 INFO [train.py:1114] (1/4) Epoch 1, batch 9650, loss[loss=0.3852, simple_loss=0.4468, pruned_loss=0.1618, over 4840.00 frames. ], tot_loss[loss=0.3641, simple_loss=0.4077, pruned_loss=0.1603, over 927285.08 frames. ], batch size: 16, lr: 3.53e-02, grad_scale: 64.0 +2024-07-27 12:04:31,724 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=12893.333333333334, ans=0.125 +2024-07-27 12:04:36,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=12906.666666666666, ans=0.125 +2024-07-27 12:04:47,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=12920.0, ans=0.125 +2024-07-27 12:04:49,096 INFO [train.py:1114] (1/4) Epoch 1, batch 9700, loss[loss=0.4004, simple_loss=0.4292, pruned_loss=0.1858, over 4335.00 frames. ], tot_loss[loss=0.3645, simple_loss=0.4081, pruned_loss=0.1604, over 925544.79 frames. ], batch size: 25, lr: 3.52e-02, grad_scale: 64.0 +2024-07-27 12:04:52,734 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.794e+01 6.661e+01 7.332e+01 8.273e+01 1.352e+02, threshold=1.466e+02, percent-clipped=0.0 +2024-07-27 12:04:56,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12946.666666666666, ans=0.17053333333333334 +2024-07-27 12:05:14,610 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.75 vs. limit=4.944 +2024-07-27 12:05:34,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=12973.333333333334, ans=0.125 +2024-07-27 12:05:35,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=12973.333333333334, ans=0.44593333333333335 +2024-07-27 12:05:52,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=12986.666666666666, ans=0.125 +2024-07-27 12:05:55,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=12986.666666666666, ans=0.012555555555555563 +2024-07-27 12:05:57,816 INFO [train.py:1114] (1/4) Epoch 1, batch 9750, loss[loss=0.3869, simple_loss=0.4337, pruned_loss=0.17, over 4682.00 frames. ], tot_loss[loss=0.3635, simple_loss=0.4074, pruned_loss=0.1598, over 925556.72 frames. ], batch size: 15, lr: 3.51e-02, grad_scale: 64.0 +2024-07-27 12:05:57,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=13000.0, ans=0.125 +2024-07-27 12:06:38,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=13040.0, ans=0.125 +2024-07-27 12:06:48,517 INFO [train.py:1114] (1/4) Epoch 1, batch 9800, loss[loss=0.3637, simple_loss=0.4047, pruned_loss=0.1613, over 4704.00 frames. ], tot_loss[loss=0.3609, simple_loss=0.405, pruned_loss=0.1584, over 925235.55 frames. ], batch size: 12, lr: 3.51e-02, grad_scale: 64.0 +2024-07-27 12:06:51,255 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.642e+01 6.855e+01 7.493e+01 8.291e+01 1.245e+02, threshold=1.499e+02, percent-clipped=0.0 +2024-07-27 12:07:02,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=13080.0, ans=0.0 +2024-07-27 12:07:31,388 INFO [train.py:1114] (1/4) Epoch 1, batch 9850, loss[loss=0.3776, simple_loss=0.4261, pruned_loss=0.1645, over 4893.00 frames. ], tot_loss[loss=0.3607, simple_loss=0.4048, pruned_loss=0.1583, over 927782.61 frames. ], batch size: 15, lr: 3.50e-02, grad_scale: 64.0 +2024-07-27 12:07:32,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13133.333333333334, ans=0.16866666666666666 +2024-07-27 12:07:36,399 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.26 vs. limit=17.35 +2024-07-27 12:07:39,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=13146.666666666666, ans=0.125 +2024-07-27 12:07:45,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=13160.0, ans=0.125 +2024-07-27 12:07:52,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=13173.333333333334, ans=0.3976 +2024-07-27 12:08:15,396 INFO [train.py:1114] (1/4) Epoch 1, batch 9900, loss[loss=0.38, simple_loss=0.4269, pruned_loss=0.1666, over 4830.00 frames. ], tot_loss[loss=0.3624, simple_loss=0.4064, pruned_loss=0.1592, over 926641.25 frames. ], batch size: 16, lr: 3.50e-02, grad_scale: 64.0 +2024-07-27 12:08:16,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=13200.0, ans=0.438 +2024-07-27 12:08:17,903 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.707e+01 6.801e+01 7.469e+01 8.450e+01 1.233e+02, threshold=1.494e+02, percent-clipped=0.0 +2024-07-27 12:08:31,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=13213.333333333334, ans=0.125 +2024-07-27 12:08:39,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=13226.666666666666, ans=17.42 +2024-07-27 12:08:47,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13253.333333333334, ans=0.16746666666666665 +2024-07-27 12:08:54,179 INFO [train.py:1114] (1/4) Epoch 1, batch 9950, loss[loss=0.2521, simple_loss=0.3042, pruned_loss=0.09997, over 4808.00 frames. ], tot_loss[loss=0.3605, simple_loss=0.405, pruned_loss=0.158, over 929181.78 frames. ], batch size: 11, lr: 3.49e-02, grad_scale: 64.0 +2024-07-27 12:09:06,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=13280.0, ans=0.125 +2024-07-27 12:09:10,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=13293.333333333334, ans=0.035 +2024-07-27 12:09:20,908 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.83 vs. limit=17.490000000000002 +2024-07-27 12:09:24,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=13320.0, ans=0.011166666666666672 +2024-07-27 12:09:27,212 INFO [train.py:1114] (1/4) Epoch 1, batch 10000, loss[loss=0.3598, simple_loss=0.4183, pruned_loss=0.1507, over 4607.00 frames. ], tot_loss[loss=0.3636, simple_loss=0.4079, pruned_loss=0.1596, over 926652.20 frames. ], batch size: 16, lr: 3.49e-02, grad_scale: 64.0 +2024-07-27 12:09:29,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=13333.333333333334, ans=0.125 +2024-07-27 12:09:29,737 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.713e+01 6.862e+01 7.247e+01 8.214e+01 1.240e+02, threshold=1.449e+02, percent-clipped=0.0 +2024-07-27 12:09:30,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=13333.333333333334, ans=0.43333333333333335 +2024-07-27 12:09:36,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=13346.666666666666, ans=0.125 +2024-07-27 12:09:49,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=13373.333333333334, ans=0.125 +2024-07-27 12:09:50,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=13373.333333333334, ans=0.00796231884057971 +2024-07-27 12:10:01,314 INFO [train.py:1114] (1/4) Epoch 1, batch 10050, loss[loss=0.3995, simple_loss=0.4155, pruned_loss=0.1918, over 3684.00 frames. ], tot_loss[loss=0.3683, simple_loss=0.4121, pruned_loss=0.1623, over 915300.35 frames. ], batch size: 37, lr: 3.48e-02, grad_scale: 64.0 +2024-07-27 12:10:17,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=13400.0, ans=0.05 +2024-07-27 12:10:22,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=13413.333333333334, ans=0.125 +2024-07-27 12:10:23,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.08 vs. limit=5.0120000000000005 +2024-07-27 12:10:30,504 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:10:35,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13440.0, ans=0.1656 +2024-07-27 12:10:42,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=13453.333333333334, ans=0.125 +2024-07-27 12:10:43,729 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.31 vs. limit=12.545 +2024-07-27 12:10:46,727 INFO [train.py:1114] (1/4) Epoch 1, batch 10100, loss[loss=0.4454, simple_loss=0.4492, pruned_loss=0.2208, over 3578.00 frames. ], tot_loss[loss=0.385, simple_loss=0.4218, pruned_loss=0.1741, over 863880.39 frames. ], batch size: 38, lr: 3.47e-02, grad_scale: 64.0 +2024-07-27 12:10:46,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=13466.666666666666, ans=0.125 +2024-07-27 12:10:47,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13466.666666666666, ans=0.16533333333333333 +2024-07-27 12:10:48,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=13466.666666666666, ans=0.007942028985507247 +2024-07-27 12:10:49,394 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.041e+01 6.990e+01 7.547e+01 8.268e+01 1.617e+02, threshold=1.509e+02, percent-clipped=1.0 +2024-07-27 12:10:53,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=13480.0, ans=0.4282 +2024-07-27 12:11:03,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=13493.333333333334, ans=0.125 +2024-07-27 12:11:03,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=13493.333333333334, ans=0.010444444444444444 +2024-07-27 12:11:23,845 INFO [train.py:1114] (1/4) Epoch 1, batch 10150, loss[loss=0.4151, simple_loss=0.4442, pruned_loss=0.193, over 3345.00 frames. ], tot_loss[loss=0.3956, simple_loss=0.4278, pruned_loss=0.1817, over 823363.58 frames. ], batch size: 35, lr: 3.47e-02, grad_scale: 64.0 +2024-07-27 12:11:42,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=13560.0, ans=0.0 +2024-07-27 12:11:42,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=13560.0, ans=0.4254 +2024-07-27 12:11:43,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.68 vs. limit=17.67 +2024-07-27 12:12:02,428 INFO [train.py:1114] (1/4) Epoch 1, batch 10200, loss[loss=0.4302, simple_loss=0.4339, pruned_loss=0.2132, over 3553.00 frames. ], tot_loss[loss=0.4016, simple_loss=0.4308, pruned_loss=0.1862, over 790820.00 frames. ], batch size: 35, lr: 3.46e-02, grad_scale: 64.0 +2024-07-27 12:12:03,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=13600.0, ans=0.00791304347826087 +2024-07-27 12:12:04,946 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.695e+01 6.612e+01 7.159e+01 7.876e+01 1.155e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 12:12:08,032 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.68 vs. limit=12.6 +2024-07-27 12:12:12,481 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.99 vs. limit=5.042 +2024-07-27 12:13:02,326 INFO [train.py:1114] (1/4) Epoch 2, batch 0, loss[loss=0.3221, simple_loss=0.3779, pruned_loss=0.1331, over 4850.00 frames. ], tot_loss[loss=0.3221, simple_loss=0.3779, pruned_loss=0.1331, over 4850.00 frames. ], batch size: 12, lr: 3.39e-02, grad_scale: 64.0 +2024-07-27 12:13:02,327 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 12:13:06,733 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([0.9015, 1.3566, 1.7133, 1.6665, 1.1642, 1.1209, 1.2316, 1.0165], + device='cuda:1') +2024-07-27 12:13:11,682 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.7046, 5.1853, 5.2575, 5.4665], device='cuda:1') +2024-07-27 12:13:13,917 INFO [train.py:1146] (1/4) Epoch 2, validation: loss=0.3005, simple_loss=0.3865, pruned_loss=0.1073, over 944034.00 frames. +2024-07-27 12:13:13,917 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 12:13:16,505 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.11 vs. limit=12.611 +2024-07-27 12:13:29,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=13656.0, ans=0.125 +2024-07-27 12:13:29,726 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:13:38,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=13669.333333333334, ans=0.42157333333333336 +2024-07-27 12:13:49,240 INFO [train.py:1114] (1/4) Epoch 2, batch 50, loss[loss=0.3151, simple_loss=0.3705, pruned_loss=0.1298, over 4602.00 frames. ], tot_loss[loss=0.3737, simple_loss=0.4144, pruned_loss=0.1665, over 206449.66 frames. ], batch size: 11, lr: 3.39e-02, grad_scale: 64.0 +2024-07-27 12:13:52,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=13696.0, ans=0.009600000000000004 +2024-07-27 12:13:53,724 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.26 vs. limit=17.772 +2024-07-27 12:13:54,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=13696.0, ans=0.42064000000000007 +2024-07-27 12:14:05,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=13709.333333333334, ans=0.125 +2024-07-27 12:14:06,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=13709.333333333334, ans=0.4201733333333334 +2024-07-27 12:14:12,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=13722.666666666666, ans=0.007886376811594204 +2024-07-27 12:14:12,908 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.24 vs. limit=17.792 +2024-07-27 12:14:14,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=13722.666666666666, ans=0.007886376811594204 +2024-07-27 12:14:16,676 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.870e+01 6.791e+01 7.517e+01 8.543e+01 1.783e+02, threshold=1.503e+02, percent-clipped=1.0 +2024-07-27 12:14:24,784 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.01 vs. limit=11.874666666666666 +2024-07-27 12:14:29,090 INFO [train.py:1114] (1/4) Epoch 2, batch 100, loss[loss=0.3049, simple_loss=0.3706, pruned_loss=0.1195, over 4646.00 frames. ], tot_loss[loss=0.3659, simple_loss=0.4102, pruned_loss=0.1608, over 365499.12 frames. ], batch size: 12, lr: 3.38e-02, grad_scale: 64.0 +2024-07-27 12:14:31,769 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.56 vs. limit=17.822 +2024-07-27 12:14:32,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=13762.666666666666, ans=0.125 +2024-07-27 12:14:35,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=13762.666666666666, ans=0.025 +2024-07-27 12:14:36,237 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.06 vs. limit=12.666 +2024-07-27 12:14:49,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=13789.333333333334, ans=0.0 +2024-07-27 12:14:51,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=13802.666666666666, ans=0.125 +2024-07-27 12:14:53,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=13802.666666666666, ans=0.025 +2024-07-27 12:15:04,095 INFO [train.py:1114] (1/4) Epoch 2, batch 150, loss[loss=0.3212, simple_loss=0.3783, pruned_loss=0.1321, over 4619.00 frames. ], tot_loss[loss=0.3604, simple_loss=0.4049, pruned_loss=0.1579, over 493829.60 frames. ], batch size: 11, lr: 3.38e-02, grad_scale: 64.0 +2024-07-27 12:15:06,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=13829.333333333334, ans=0.125 +2024-07-27 12:15:21,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=13856.0, ans=0.125 +2024-07-27 12:15:24,158 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.70 vs. limit=11.928 +2024-07-27 12:15:24,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=13869.333333333334, ans=0.008877777777777776 +2024-07-27 12:15:26,447 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.728e+01 6.633e+01 7.371e+01 8.455e+01 1.546e+02, threshold=1.474e+02, percent-clipped=1.0 +2024-07-27 12:15:27,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=13869.333333333334, ans=0.025 +2024-07-27 12:15:38,908 INFO [train.py:1114] (1/4) Epoch 2, batch 200, loss[loss=0.4175, simple_loss=0.4598, pruned_loss=0.1876, over 4532.00 frames. ], tot_loss[loss=0.3582, simple_loss=0.4026, pruned_loss=0.1569, over 593390.93 frames. ], batch size: 21, lr: 3.37e-02, grad_scale: 64.0 +2024-07-27 12:15:43,942 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.95 vs. limit=8.474 +2024-07-27 12:15:50,863 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.26 vs. limit=17.932000000000002 +2024-07-27 12:16:03,928 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.73 vs. limit=12.725999999999999 +2024-07-27 12:16:07,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=13949.333333333334, ans=0.125 +2024-07-27 12:16:11,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=13949.333333333334, ans=0.125 +2024-07-27 12:16:15,234 INFO [train.py:1114] (1/4) Epoch 2, batch 250, loss[loss=0.3487, simple_loss=0.4165, pruned_loss=0.1405, over 4602.00 frames. ], tot_loss[loss=0.3551, simple_loss=0.4011, pruned_loss=0.1545, over 670300.39 frames. ], batch size: 16, lr: 3.37e-02, grad_scale: 64.0 +2024-07-27 12:16:31,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=13976.0, ans=0.125 +2024-07-27 12:16:32,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=13976.0, ans=0.007831304347826088 +2024-07-27 12:16:34,260 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:16:36,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=13989.333333333334, ans=0.41037333333333337 +2024-07-27 12:16:41,801 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.968e+01 6.499e+01 7.152e+01 7.948e+01 1.053e+02, threshold=1.430e+02, percent-clipped=0.0 +2024-07-27 12:16:47,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=14002.666666666666, ans=10.0 +2024-07-27 12:16:49,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=14016.0, ans=0.09899494936611666 +2024-07-27 12:16:53,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=14016.0, ans=0.008266666666666665 +2024-07-27 12:16:56,552 INFO [train.py:1114] (1/4) Epoch 2, batch 300, loss[loss=0.3411, simple_loss=0.3928, pruned_loss=0.1447, over 4798.00 frames. ], tot_loss[loss=0.3528, simple_loss=0.3994, pruned_loss=0.1531, over 730185.62 frames. ], batch size: 15, lr: 3.36e-02, grad_scale: 64.0 +2024-07-27 12:17:00,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=14029.333333333334, ans=0.125 +2024-07-27 12:17:07,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=14042.666666666666, ans=0.008155555555555562 +2024-07-27 12:17:15,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=14056.0, ans=0.007813913043478261 +2024-07-27 12:17:16,664 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=12.771 +2024-07-27 12:17:21,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=14069.333333333334, ans=0.125 +2024-07-27 12:17:22,682 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=1.041e-02 +2024-07-27 12:17:27,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=14082.666666666666, ans=0.10917333333333334 +2024-07-27 12:17:31,948 INFO [train.py:1114] (1/4) Epoch 2, batch 350, loss[loss=0.3198, simple_loss=0.3624, pruned_loss=0.1386, over 4931.00 frames. ], tot_loss[loss=0.3526, simple_loss=0.3995, pruned_loss=0.1528, over 776132.21 frames. ], batch size: 12, lr: 3.36e-02, grad_scale: 64.0 +2024-07-27 12:17:32,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=14096.0, ans=0.125 +2024-07-27 12:17:32,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=14096.0, ans=0.40664000000000006 +2024-07-27 12:17:37,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=14096.0, ans=0.007805217391304348 +2024-07-27 12:17:50,948 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.67 vs. limit=9.649066666666666 +2024-07-27 12:17:57,626 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.955e+01 6.715e+01 7.349e+01 8.005e+01 1.409e+02, threshold=1.470e+02, percent-clipped=0.0 +2024-07-27 12:18:10,145 INFO [train.py:1114] (1/4) Epoch 2, batch 400, loss[loss=0.2921, simple_loss=0.363, pruned_loss=0.1106, over 4693.00 frames. ], tot_loss[loss=0.3496, simple_loss=0.398, pruned_loss=0.1506, over 813292.01 frames. ], batch size: 13, lr: 3.35e-02, grad_scale: 64.0 +2024-07-27 12:18:11,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=14162.666666666666, ans=0.125 +2024-07-27 12:18:20,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=14176.0, ans=0.025 +2024-07-27 12:18:21,662 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:18:28,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=14189.333333333334, ans=0.007784927536231884 +2024-07-27 12:18:39,226 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.14 vs. limit=12.831 +2024-07-27 12:18:39,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=14216.0, ans=0.125 +2024-07-27 12:18:44,007 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.17 vs. limit=12.831 +2024-07-27 12:18:45,069 INFO [train.py:1114] (1/4) Epoch 2, batch 450, loss[loss=0.4074, simple_loss=0.4512, pruned_loss=0.1818, over 4633.00 frames. ], tot_loss[loss=0.3502, simple_loss=0.3986, pruned_loss=0.1509, over 838746.36 frames. ], batch size: 13, lr: 3.35e-02, grad_scale: 64.0 +2024-07-27 12:18:46,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=14229.333333333334, ans=0.4019733333333334 +2024-07-27 12:18:46,769 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.08 vs. limit=5.134399999999999 +2024-07-27 12:18:47,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=14229.333333333334, ans=0.125 +2024-07-27 12:18:47,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=14229.333333333334, ans=0.125 +2024-07-27 12:18:52,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=14242.666666666666, ans=0.0 +2024-07-27 12:18:58,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=14256.0, ans=0.125 +2024-07-27 12:19:00,871 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.86 vs. limit=12.128 +2024-07-27 12:19:04,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=14256.0, ans=0.15744 +2024-07-27 12:19:06,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=14269.333333333334, ans=0.007211111111111113 +2024-07-27 12:19:07,285 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.533e+01 6.535e+01 7.099e+01 8.060e+01 1.224e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 12:19:11,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=14269.333333333334, ans=0.125 +2024-07-27 12:19:19,766 INFO [train.py:1114] (1/4) Epoch 2, batch 500, loss[loss=0.3402, simple_loss=0.4124, pruned_loss=0.1341, over 4676.00 frames. ], tot_loss[loss=0.3499, simple_loss=0.3982, pruned_loss=0.1508, over 861168.12 frames. ], batch size: 15, lr: 3.34e-02, grad_scale: 64.0 +2024-07-27 12:19:24,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=14296.0, ans=0.41444000000000003 +2024-07-27 12:19:36,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=14309.333333333334, ans=0.007758840579710145 +2024-07-27 12:19:36,711 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.88 vs. limit=12.154666666666667 +2024-07-27 12:19:46,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=14336.0, ans=0.125 +2024-07-27 12:19:49,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=14336.0, ans=0.006933333333333333 +2024-07-27 12:19:55,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=14349.333333333334, ans=0.125 +2024-07-27 12:20:00,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=14362.666666666666, ans=0.0 +2024-07-27 12:20:00,826 INFO [train.py:1114] (1/4) Epoch 2, batch 550, loss[loss=0.3389, simple_loss=0.4009, pruned_loss=0.1385, over 4628.00 frames. ], tot_loss[loss=0.3497, simple_loss=0.398, pruned_loss=0.1507, over 877267.55 frames. ], batch size: 17, lr: 3.34e-02, grad_scale: 64.0 +2024-07-27 12:20:04,583 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.88 vs. limit=12.886 +2024-07-27 12:20:07,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=14376.0, ans=0.025 +2024-07-27 12:20:27,924 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.749e+01 6.667e+01 7.400e+01 8.349e+01 1.588e+02, threshold=1.480e+02, percent-clipped=3.0 +2024-07-27 12:20:28,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=14402.666666666666, ans=0.125 +2024-07-27 12:20:30,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=14402.666666666666, ans=12.901 +2024-07-27 12:20:37,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=14416.0, ans=0.006599999999999995 +2024-07-27 12:20:40,335 INFO [train.py:1114] (1/4) Epoch 2, batch 600, loss[loss=0.4125, simple_loss=0.4556, pruned_loss=0.1847, over 4641.00 frames. ], tot_loss[loss=0.3491, simple_loss=0.3974, pruned_loss=0.1504, over 891857.05 frames. ], batch size: 16, lr: 3.33e-02, grad_scale: 64.0 +2024-07-27 12:20:50,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=14442.666666666666, ans=0.025 +2024-07-27 12:20:53,734 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.93 vs. limit=12.921 +2024-07-27 12:20:58,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=14456.0, ans=0.41684 +2024-07-27 12:21:06,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=14469.333333333334, ans=0.15530666666666668 +2024-07-27 12:21:13,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=14482.666666666666, ans=0.006322222222222229 +2024-07-27 12:21:14,594 INFO [train.py:1114] (1/4) Epoch 2, batch 650, loss[loss=0.3581, simple_loss=0.4152, pruned_loss=0.1506, over 4760.00 frames. ], tot_loss[loss=0.3491, simple_loss=0.3974, pruned_loss=0.1504, over 903648.16 frames. ], batch size: 13, lr: 3.33e-02, grad_scale: 64.0 +2024-07-27 12:21:16,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=14496.0, ans=0.0062666666666666634 +2024-07-27 12:21:18,498 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.09 vs. limit=12.936 +2024-07-27 12:21:23,391 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.73 vs. limit=12.940999999999999 +2024-07-27 12:21:25,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=14509.333333333334, ans=0.15490666666666666 +2024-07-27 12:21:27,548 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.33 vs. limit=12.254666666666667 +2024-07-27 12:21:36,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=14536.0, ans=0.006099999999999994 +2024-07-27 12:21:36,639 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.116e+01 6.647e+01 7.177e+01 7.899e+01 1.481e+02, threshold=1.435e+02, percent-clipped=1.0 +2024-07-27 12:21:37,094 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.22 vs. limit=12.951 +2024-07-27 12:21:40,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=14536.0, ans=0.125 +2024-07-27 12:21:40,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=14536.0, ans=0.007709565217391304 +2024-07-27 12:21:43,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=14549.333333333334, ans=0.09899494936611666 +2024-07-27 12:21:44,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.45 vs. limit=12.956 +2024-07-27 12:21:45,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=14549.333333333334, ans=0.125 +2024-07-27 12:21:47,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=14549.333333333334, ans=0.125 +2024-07-27 12:21:49,038 INFO [train.py:1114] (1/4) Epoch 2, batch 700, loss[loss=0.3121, simple_loss=0.3619, pruned_loss=0.1312, over 4634.00 frames. ], tot_loss[loss=0.349, simple_loss=0.3971, pruned_loss=0.1505, over 911591.84 frames. ], batch size: 12, lr: 3.32e-02, grad_scale: 64.0 +2024-07-27 12:22:07,201 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.05 vs. limit=12.971 +2024-07-27 12:22:08,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=14589.333333333334, ans=0.007697971014492754 +2024-07-27 12:22:14,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=14602.666666666666, ans=0.007695072463768117 +2024-07-27 12:22:25,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=14616.0, ans=0.15384 +2024-07-27 12:22:27,333 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:22:29,300 INFO [train.py:1114] (1/4) Epoch 2, batch 750, loss[loss=0.3573, simple_loss=0.3963, pruned_loss=0.1592, over 4693.00 frames. ], tot_loss[loss=0.3492, simple_loss=0.397, pruned_loss=0.1507, over 918070.26 frames. ], batch size: 13, lr: 3.31e-02, grad_scale: 64.0 +2024-07-27 12:22:47,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14656.0, ans=0.15344 +2024-07-27 12:22:50,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=14656.0, ans=0.005599999999999994 +2024-07-27 12:22:51,682 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.60 vs. limit=8.667333333333334 +2024-07-27 12:22:53,334 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.645e+01 6.839e+01 7.355e+01 8.149e+01 1.440e+02, threshold=1.471e+02, percent-clipped=1.0 +2024-07-27 12:22:58,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=14682.666666666666, ans=0.0 +2024-07-27 12:23:05,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=14696.0, ans=0.05 +2024-07-27 12:23:05,825 INFO [train.py:1114] (1/4) Epoch 2, batch 800, loss[loss=0.3417, simple_loss=0.3814, pruned_loss=0.151, over 4845.00 frames. ], tot_loss[loss=0.3492, simple_loss=0.3968, pruned_loss=0.1507, over 923088.75 frames. ], batch size: 12, lr: 3.31e-02, grad_scale: 128.0 +2024-07-27 12:23:35,138 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.27 vs. limit=18.561999999999998 +2024-07-27 12:23:37,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=14749.333333333334, ans=0.15250666666666668 +2024-07-27 12:23:40,054 INFO [train.py:1114] (1/4) Epoch 2, batch 850, loss[loss=0.3131, simple_loss=0.372, pruned_loss=0.1271, over 4666.00 frames. ], tot_loss[loss=0.3464, simple_loss=0.3947, pruned_loss=0.1491, over 927462.78 frames. ], batch size: 14, lr: 3.30e-02, grad_scale: 64.0 +2024-07-27 12:23:42,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=14762.666666666666, ans=0.025 +2024-07-27 12:23:43,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=14762.666666666666, ans=0.0 +2024-07-27 12:23:51,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=14776.0, ans=0.09899494936611666 +2024-07-27 12:23:55,706 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.98 vs. limit=18.592 +2024-07-27 12:23:56,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=14789.333333333334, ans=0.125 +2024-07-27 12:24:01,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=14802.666666666666, ans=0.00498888888888889 +2024-07-27 12:24:02,971 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.532e+01 6.555e+01 7.139e+01 7.731e+01 1.156e+02, threshold=1.428e+02, percent-clipped=0.0 +2024-07-27 12:24:11,209 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.13 vs. limit=13.056000000000001 +2024-07-27 12:24:11,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=14816.0, ans=0.125 +2024-07-27 12:24:15,202 INFO [train.py:1114] (1/4) Epoch 2, batch 900, loss[loss=0.3203, simple_loss=0.3641, pruned_loss=0.1382, over 4848.00 frames. ], tot_loss[loss=0.3461, simple_loss=0.3945, pruned_loss=0.1488, over 928216.01 frames. ], batch size: 12, lr: 3.30e-02, grad_scale: 64.0 +2024-07-27 12:24:16,967 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.24 vs. limit=12.414666666666667 +2024-07-27 12:24:17,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.78 vs. limit=18.622 +2024-07-27 12:24:22,512 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.35 vs. limit=5.2264 +2024-07-27 12:24:22,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=14842.666666666666, ans=0.125 +2024-07-27 12:24:34,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=14856.0, ans=0.125 +2024-07-27 12:24:35,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=14869.333333333334, ans=0.125 +2024-07-27 12:24:41,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=14869.333333333334, ans=0.37957333333333343 +2024-07-27 12:24:45,631 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.61 vs. limit=5.2324 +2024-07-27 12:24:50,047 INFO [train.py:1114] (1/4) Epoch 2, batch 950, loss[loss=0.3183, simple_loss=0.3799, pruned_loss=0.1284, over 4789.00 frames. ], tot_loss[loss=0.3451, simple_loss=0.3942, pruned_loss=0.148, over 930327.07 frames. ], batch size: 12, lr: 3.29e-02, grad_scale: 64.0 +2024-07-27 12:24:55,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=14896.0, ans=0.05 +2024-07-27 12:25:00,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=14909.333333333334, ans=0.125 +2024-07-27 12:25:00,371 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.90 vs. limit=13.091000000000001 +2024-07-27 12:25:07,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=14922.666666666666, ans=0.0044888888888888895 +2024-07-27 12:25:13,549 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.467e+01 6.513e+01 7.102e+01 8.226e+01 2.101e+02, threshold=1.420e+02, percent-clipped=1.0 +2024-07-27 12:25:15,429 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.56 vs. limit=18.701999999999998 +2024-07-27 12:25:20,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=14949.333333333334, ans=0.125 +2024-07-27 12:25:25,579 INFO [train.py:1114] (1/4) Epoch 2, batch 1000, loss[loss=0.3532, simple_loss=0.4032, pruned_loss=0.1516, over 4968.00 frames. ], tot_loss[loss=0.3475, simple_loss=0.3961, pruned_loss=0.1494, over 930003.16 frames. ], batch size: 13, lr: 3.29e-02, grad_scale: 64.0 +2024-07-27 12:25:29,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=14962.666666666666, ans=0.15037333333333336 +2024-07-27 12:25:37,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=14976.0, ans=0.125 +2024-07-27 12:25:40,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=14989.333333333334, ans=0.37537333333333334 +2024-07-27 12:25:43,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=14989.333333333334, ans=0.125 +2024-07-27 12:25:45,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=14989.333333333334, ans=0.025 +2024-07-27 12:25:48,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=15002.666666666666, ans=0.125 +2024-07-27 12:25:49,485 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.37 vs. limit=18.752000000000002 +2024-07-27 12:25:52,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=15002.666666666666, ans=0.04949747468305833 +2024-07-27 12:26:00,217 INFO [train.py:1114] (1/4) Epoch 2, batch 1050, loss[loss=0.3355, simple_loss=0.3955, pruned_loss=0.1377, over 4875.00 frames. ], tot_loss[loss=0.346, simple_loss=0.3945, pruned_loss=0.1488, over 932618.77 frames. ], batch size: 14, lr: 3.28e-02, grad_scale: 64.0 +2024-07-27 12:26:03,232 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.92 vs. limit=13.136 +2024-07-27 12:26:03,817 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:26:09,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=15042.666666666666, ans=0.125 +2024-07-27 12:26:14,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=15056.0, ans=0.125 +2024-07-27 12:26:18,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=15056.0, ans=0.00393333333333333 +2024-07-27 12:26:20,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=15069.333333333334, ans=0.3725733333333334 +2024-07-27 12:26:22,932 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.050e+01 6.480e+01 6.937e+01 7.724e+01 1.151e+02, threshold=1.387e+02, percent-clipped=0.0 +2024-07-27 12:26:24,864 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.28 vs. limit=5.260400000000001 +2024-07-27 12:26:25,512 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.08 vs. limit=5.260400000000001 +2024-07-27 12:26:31,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=15082.666666666666, ans=0.125 +2024-07-27 12:26:33,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=15082.666666666666, ans=0.125 +2024-07-27 12:26:34,221 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.14 vs. limit=5.2623999999999995 +2024-07-27 12:26:35,186 INFO [train.py:1114] (1/4) Epoch 2, batch 1100, loss[loss=0.2746, simple_loss=0.3258, pruned_loss=0.1117, over 4900.00 frames. ], tot_loss[loss=0.344, simple_loss=0.3932, pruned_loss=0.1474, over 935119.82 frames. ], batch size: 13, lr: 3.28e-02, grad_scale: 64.0 +2024-07-27 12:26:49,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=15122.666666666666, ans=0.035 +2024-07-27 12:27:09,690 INFO [train.py:1114] (1/4) Epoch 2, batch 1150, loss[loss=0.3039, simple_loss=0.3603, pruned_loss=0.1237, over 4891.00 frames. ], tot_loss[loss=0.3438, simple_loss=0.3928, pruned_loss=0.1474, over 934950.64 frames. ], batch size: 13, lr: 3.27e-02, grad_scale: 64.0 +2024-07-27 12:27:17,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=15176.0, ans=0.14824 +2024-07-27 12:27:24,481 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.49 vs. limit=12.588000000000001 +2024-07-27 12:27:30,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=15189.333333333334, ans=0.003377777777777778 +2024-07-27 12:27:35,087 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.945e+01 6.616e+01 7.321e+01 8.237e+01 1.316e+02, threshold=1.464e+02, percent-clipped=0.0 +2024-07-27 12:27:48,586 INFO [train.py:1114] (1/4) Epoch 2, batch 1200, loss[loss=0.3681, simple_loss=0.4115, pruned_loss=0.1624, over 4881.00 frames. ], tot_loss[loss=0.3472, simple_loss=0.3957, pruned_loss=0.1493, over 933936.52 frames. ], batch size: 14, lr: 3.27e-02, grad_scale: 64.0 +2024-07-27 12:27:51,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=15229.333333333334, ans=0.125 +2024-07-27 12:27:52,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=15229.333333333334, ans=0.14770666666666668 +2024-07-27 12:28:11,393 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.87 vs. limit=13.221 +2024-07-27 12:28:13,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15269.333333333334, ans=0.14730666666666667 +2024-07-27 12:28:13,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=15269.333333333334, ans=0.125 +2024-07-27 12:28:14,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=15269.333333333334, ans=0.0 +2024-07-27 12:28:19,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=15282.666666666666, ans=0.002988888888888895 +2024-07-27 12:28:22,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=15282.666666666666, ans=0.002988888888888895 +2024-07-27 12:28:26,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=15296.0, ans=0.0029333333333333295 +2024-07-27 12:28:26,768 INFO [train.py:1114] (1/4) Epoch 2, batch 1250, loss[loss=0.3939, simple_loss=0.4377, pruned_loss=0.1751, over 4808.00 frames. ], tot_loss[loss=0.3448, simple_loss=0.3946, pruned_loss=0.1475, over 937954.83 frames. ], batch size: 15, lr: 3.26e-02, grad_scale: 64.0 +2024-07-27 12:28:29,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=15296.0, ans=0.025 +2024-07-27 12:28:35,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=15309.333333333334, ans=0.125 +2024-07-27 12:28:36,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15309.333333333334, ans=0.14690666666666669 +2024-07-27 12:28:49,356 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.661e+01 6.573e+01 7.173e+01 8.198e+01 1.375e+02, threshold=1.435e+02, percent-clipped=0.0 +2024-07-27 12:28:56,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=15349.333333333334, ans=0.025 +2024-07-27 12:29:01,001 INFO [train.py:1114] (1/4) Epoch 2, batch 1300, loss[loss=0.3901, simple_loss=0.4274, pruned_loss=0.1764, over 4735.00 frames. ], tot_loss[loss=0.3431, simple_loss=0.3928, pruned_loss=0.1467, over 939231.57 frames. ], batch size: 19, lr: 3.26e-02, grad_scale: 64.0 +2024-07-27 12:29:03,448 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.38 vs. limit=5.304399999999999 +2024-07-27 12:29:07,405 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.99 vs. limit=19.032 +2024-07-27 12:29:07,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=15376.0, ans=0.125 +2024-07-27 12:29:09,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=15376.0, ans=0.125 +2024-07-27 12:29:35,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=15429.333333333334, ans=0.00751536231884058 +2024-07-27 12:29:35,583 INFO [train.py:1114] (1/4) Epoch 2, batch 1350, loss[loss=0.326, simple_loss=0.3866, pruned_loss=0.1327, over 4751.00 frames. ], tot_loss[loss=0.3428, simple_loss=0.3928, pruned_loss=0.1464, over 940984.37 frames. ], batch size: 13, lr: 3.25e-02, grad_scale: 64.0 +2024-07-27 12:29:35,960 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.40 vs. limit=13.286 +2024-07-27 12:29:41,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=15429.333333333334, ans=0.125 +2024-07-27 12:29:44,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=15442.666666666666, ans=0.0023222222222222255 +2024-07-27 12:29:45,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=15442.666666666666, ans=0.125 +2024-07-27 12:29:57,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=15469.333333333334, ans=0.3585733333333334 +2024-07-27 12:29:58,812 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.201e+01 6.395e+01 7.183e+01 7.821e+01 1.561e+02, threshold=1.437e+02, percent-clipped=1.0 +2024-07-27 12:30:10,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15496.0, ans=0.14504 +2024-07-27 12:30:10,898 INFO [train.py:1114] (1/4) Epoch 2, batch 1400, loss[loss=0.2632, simple_loss=0.3246, pruned_loss=0.1009, over 4719.00 frames. ], tot_loss[loss=0.3425, simple_loss=0.3925, pruned_loss=0.1463, over 942831.43 frames. ], batch size: 11, lr: 3.25e-02, grad_scale: 64.0 +2024-07-27 12:30:11,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=15496.0, ans=0.35764000000000007 +2024-07-27 12:30:29,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=15522.666666666666, ans=0.125 +2024-07-27 12:30:37,161 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.92 vs. limit=13.326 +2024-07-27 12:30:42,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=15549.333333333334, ans=0.3557733333333334 +2024-07-27 12:30:43,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=15549.333333333334, ans=10.0 +2024-07-27 12:30:43,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15549.333333333334, ans=0.14450666666666667 +2024-07-27 12:30:46,192 INFO [train.py:1114] (1/4) Epoch 2, batch 1450, loss[loss=0.324, simple_loss=0.3804, pruned_loss=0.1338, over 4676.00 frames. ], tot_loss[loss=0.3444, simple_loss=0.3939, pruned_loss=0.1474, over 942357.90 frames. ], batch size: 15, lr: 3.24e-02, grad_scale: 64.0 +2024-07-27 12:30:50,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=15562.666666666666, ans=0.125 +2024-07-27 12:31:00,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=15589.333333333334, ans=0.125 +2024-07-27 12:31:00,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=15589.333333333334, ans=0.125 +2024-07-27 12:31:07,959 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.67 vs. limit=12.801333333333332 +2024-07-27 12:31:08,926 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.669e+01 6.624e+01 7.283e+01 7.925e+01 1.878e+02, threshold=1.457e+02, percent-clipped=2.0 +2024-07-27 12:31:10,010 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.20 vs. limit=5.3404 +2024-07-27 12:31:11,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.31 vs. limit=13.350999999999999 +2024-07-27 12:31:16,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=15616.0, ans=0.125 +2024-07-27 12:31:18,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=15616.0, ans=0.125 +2024-07-27 12:31:23,974 INFO [train.py:1114] (1/4) Epoch 2, batch 1500, loss[loss=0.3427, simple_loss=0.4013, pruned_loss=0.142, over 4813.00 frames. ], tot_loss[loss=0.3442, simple_loss=0.3937, pruned_loss=0.1473, over 942342.39 frames. ], batch size: 14, lr: 3.24e-02, grad_scale: 64.0 +2024-07-27 12:31:27,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=15629.333333333334, ans=0.125 +2024-07-27 12:31:28,489 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:31:33,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=15642.666666666666, ans=0.0014888888888888938 +2024-07-27 12:31:40,556 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.11 vs. limit=5.3484 +2024-07-27 12:31:48,326 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.47 vs. limit=5.3504000000000005 +2024-07-27 12:31:49,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15669.333333333334, ans=0.14330666666666667 +2024-07-27 12:31:52,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15682.666666666666, ans=0.14317333333333335 +2024-07-27 12:31:55,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=15682.666666666666, ans=19.262 +2024-07-27 12:31:58,846 INFO [train.py:1114] (1/4) Epoch 2, batch 1550, loss[loss=0.3395, simple_loss=0.3945, pruned_loss=0.1423, over 4902.00 frames. ], tot_loss[loss=0.3439, simple_loss=0.3932, pruned_loss=0.1473, over 938815.59 frames. ], batch size: 15, lr: 3.23e-02, grad_scale: 64.0 +2024-07-27 12:32:06,086 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:32:08,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=15709.333333333334, ans=0.007454492753623188 +2024-07-27 12:32:15,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=15722.666666666666, ans=0.125 +2024-07-27 12:32:22,801 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.789e+01 6.571e+01 7.346e+01 8.400e+01 2.303e+02, threshold=1.469e+02, percent-clipped=1.0 +2024-07-27 12:32:25,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=15736.0, ans=0.125 +2024-07-27 12:32:34,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=15762.666666666666, ans=0.007442898550724638 +2024-07-27 12:32:34,732 INFO [train.py:1114] (1/4) Epoch 2, batch 1600, loss[loss=0.3238, simple_loss=0.3826, pruned_loss=0.1325, over 4869.00 frames. ], tot_loss[loss=0.3443, simple_loss=0.3936, pruned_loss=0.1475, over 937175.58 frames. ], batch size: 14, lr: 3.23e-02, grad_scale: 64.0 +2024-07-27 12:32:36,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=15762.666666666666, ans=0.025 +2024-07-27 12:32:43,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=15776.0, ans=0.125 +2024-07-27 12:32:55,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=15789.333333333334, ans=0.125 +2024-07-27 12:32:55,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=15789.333333333334, ans=0.09899494936611666 +2024-07-27 12:32:56,000 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.78 vs. limit=19.342 +2024-07-27 12:33:12,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15816.0, ans=0.14184 +2024-07-27 12:33:12,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=15816.0, ans=0.09899494936611666 +2024-07-27 12:33:13,394 INFO [train.py:1114] (1/4) Epoch 2, batch 1650, loss[loss=0.3707, simple_loss=0.4039, pruned_loss=0.1688, over 4664.00 frames. ], tot_loss[loss=0.3445, simple_loss=0.3937, pruned_loss=0.1477, over 936707.08 frames. ], batch size: 14, lr: 3.22e-02, grad_scale: 64.0 +2024-07-27 12:33:21,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15842.666666666666, ans=0.14157333333333336 +2024-07-27 12:33:32,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=15856.0, ans=0.0005999999999999964 +2024-07-27 12:33:40,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=15869.333333333334, ans=0.025 +2024-07-27 12:33:42,228 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.638e+01 6.520e+01 7.164e+01 7.874e+01 1.221e+02, threshold=1.433e+02, percent-clipped=0.0 +2024-07-27 12:33:47,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15882.666666666666, ans=0.14117333333333335 +2024-07-27 12:33:59,011 INFO [train.py:1114] (1/4) Epoch 2, batch 1700, loss[loss=0.3021, simple_loss=0.3655, pruned_loss=0.1194, over 4714.00 frames. ], tot_loss[loss=0.3424, simple_loss=0.3925, pruned_loss=0.1462, over 938731.23 frames. ], batch size: 11, lr: 3.22e-02, grad_scale: 64.0 +2024-07-27 12:34:01,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=15896.0, ans=0.125 +2024-07-27 12:34:21,684 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.91 vs. limit=13.475999999999999 +2024-07-27 12:34:25,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=15936.0, ans=0.025 +2024-07-27 12:34:27,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=15949.333333333334, ans=0.0002111111111111133 +2024-07-27 12:34:31,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=15949.333333333334, ans=0.125 +2024-07-27 12:34:33,910 INFO [train.py:1114] (1/4) Epoch 2, batch 1750, loss[loss=0.3135, simple_loss=0.3594, pruned_loss=0.1339, over 4818.00 frames. ], tot_loss[loss=0.3425, simple_loss=0.3929, pruned_loss=0.1461, over 939871.09 frames. ], batch size: 11, lr: 3.22e-02, grad_scale: 64.0 +2024-07-27 12:34:36,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=15962.666666666666, ans=0.125 +2024-07-27 12:34:42,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=15976.0, ans=0.125 +2024-07-27 12:34:45,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=15976.0, ans=0.125 +2024-07-27 12:34:47,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=15989.333333333334, ans=0.125 +2024-07-27 12:34:49,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=15989.333333333334, ans=0.007393623188405797 +2024-07-27 12:34:59,088 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.489e+01 6.792e+01 7.364e+01 8.042e+01 2.018e+02, threshold=1.473e+02, percent-clipped=1.0 +2024-07-27 12:34:59,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=16002.666666666666, ans=0.3399066666666668 +2024-07-27 12:35:10,709 INFO [train.py:1114] (1/4) Epoch 2, batch 1800, loss[loss=0.3736, simple_loss=0.4142, pruned_loss=0.1665, over 4628.00 frames. ], tot_loss[loss=0.3419, simple_loss=0.392, pruned_loss=0.1459, over 940467.75 frames. ], batch size: 13, lr: 3.21e-02, grad_scale: 64.0 +2024-07-27 12:35:14,960 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:35:28,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16056.0, ans=0.13944 +2024-07-27 12:35:36,847 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.93 vs. limit=13.034666666666666 +2024-07-27 12:35:39,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=16082.666666666666, ans=0.13917333333333334 +2024-07-27 12:35:39,614 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.24 vs. limit=5.4124 +2024-07-27 12:35:45,373 INFO [train.py:1114] (1/4) Epoch 2, batch 1850, loss[loss=0.3068, simple_loss=0.374, pruned_loss=0.1198, over 4812.00 frames. ], tot_loss[loss=0.3389, simple_loss=0.3902, pruned_loss=0.1438, over 940303.24 frames. ], batch size: 14, lr: 3.21e-02, grad_scale: 64.0 +2024-07-27 12:35:45,746 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.31 vs. limit=19.572 +2024-07-27 12:35:47,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=16096.0, ans=0.13904 +2024-07-27 12:36:17,282 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.330e+01 6.413e+01 7.038e+01 7.663e+01 1.052e+02, threshold=1.408e+02, percent-clipped=0.0 +2024-07-27 12:36:17,725 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.22 vs. limit=19.602 +2024-07-27 12:36:20,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=16136.0, ans=0.125 +2024-07-27 12:36:28,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=16162.666666666666, ans=0.0 +2024-07-27 12:36:28,687 INFO [train.py:1114] (1/4) Epoch 2, batch 1900, loss[loss=0.3427, simple_loss=0.3881, pruned_loss=0.1487, over 4664.00 frames. ], tot_loss[loss=0.3385, simple_loss=0.3902, pruned_loss=0.1435, over 941607.73 frames. ], batch size: 14, lr: 3.20e-02, grad_scale: 64.0 +2024-07-27 12:37:05,417 INFO [train.py:1114] (1/4) Epoch 2, batch 1950, loss[loss=0.2592, simple_loss=0.3329, pruned_loss=0.09272, over 4889.00 frames. ], tot_loss[loss=0.3392, simple_loss=0.3912, pruned_loss=0.1435, over 943481.17 frames. ], batch size: 13, lr: 3.20e-02, grad_scale: 64.0 +2024-07-27 12:37:08,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=16229.333333333334, ans=0.0 +2024-07-27 12:37:09,565 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.00 vs. limit=19.672 +2024-07-27 12:37:10,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=16229.333333333334, ans=0.125 +2024-07-27 12:37:24,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=16256.0, ans=0.125 +2024-07-27 12:37:28,261 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.625e+01 6.630e+01 7.143e+01 8.194e+01 1.176e+02, threshold=1.429e+02, percent-clipped=0.0 +2024-07-27 12:37:38,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=16269.333333333334, ans=0.007332753623188406 +2024-07-27 12:38:00,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=16296.0, ans=0.025 +2024-07-27 12:38:00,618 INFO [train.py:1114] (1/4) Epoch 2, batch 2000, loss[loss=0.3105, simple_loss=0.3488, pruned_loss=0.1361, over 4803.00 frames. ], tot_loss[loss=0.3405, simple_loss=0.3923, pruned_loss=0.1443, over 941032.80 frames. ], batch size: 11, lr: 3.19e-02, grad_scale: 64.0 +2024-07-27 12:38:00,912 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.19 vs. limit=5.4444 +2024-07-27 12:38:03,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=16296.0, ans=0.09899494936611666 +2024-07-27 12:38:04,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16296.0, ans=0.13704 +2024-07-27 12:38:06,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=16296.0, ans=0.125 +2024-07-27 12:38:17,557 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.40 vs. limit=13.621 +2024-07-27 12:38:35,083 INFO [train.py:1114] (1/4) Epoch 2, batch 2050, loss[loss=0.3298, simple_loss=0.3802, pruned_loss=0.1397, over 4617.00 frames. ], tot_loss[loss=0.3412, simple_loss=0.3925, pruned_loss=0.1449, over 939142.44 frames. ], batch size: 11, lr: 3.19e-02, grad_scale: 64.0 +2024-07-27 12:38:36,933 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.04 vs. limit=10.545066666666667 +2024-07-27 12:38:41,077 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.74 vs. limit=19.772 +2024-07-27 12:38:53,043 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.72 vs. limit=13.645999999999999 +2024-07-27 12:38:53,049 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.99 vs. limit=7.277866666666666 +2024-07-27 12:38:53,518 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.20 vs. limit=13.645999999999999 +2024-07-27 12:38:55,183 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.71 vs. limit=13.194666666666667 +2024-07-27 12:38:57,285 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.67 vs. limit=13.651 +2024-07-27 12:38:58,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=16402.666666666668, ans=0.0 +2024-07-27 12:38:58,885 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.186e+01 6.444e+01 7.138e+01 8.017e+01 1.723e+02, threshold=1.428e+02, percent-clipped=1.0 +2024-07-27 12:39:15,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=16416.0, ans=0.125 +2024-07-27 12:39:19,462 INFO [train.py:1114] (1/4) Epoch 2, batch 2100, loss[loss=0.3426, simple_loss=0.3916, pruned_loss=0.1468, over 4751.00 frames. ], tot_loss[loss=0.338, simple_loss=0.3903, pruned_loss=0.1429, over 940961.54 frames. ], batch size: 13, lr: 3.18e-02, grad_scale: 64.0 +2024-07-27 12:39:27,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=16429.333333333332, ans=0.0 +2024-07-27 12:39:27,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=16429.333333333332, ans=0.07 +2024-07-27 12:39:49,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16469.333333333332, ans=0.13530666666666666 +2024-07-27 12:40:05,985 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.86 vs. limit=13.681000000000001 +2024-07-27 12:40:06,948 INFO [train.py:1114] (1/4) Epoch 2, batch 2150, loss[loss=0.3476, simple_loss=0.3921, pruned_loss=0.1515, over 4888.00 frames. ], tot_loss[loss=0.3376, simple_loss=0.3903, pruned_loss=0.1425, over 944078.24 frames. ], batch size: 13, lr: 3.18e-02, grad_scale: 64.0 +2024-07-27 12:40:12,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=16496.0, ans=0.025 +2024-07-27 12:40:18,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=16509.333333333332, ans=0.0 +2024-07-27 12:40:22,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=16509.333333333332, ans=0.125 +2024-07-27 12:40:29,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=16522.666666666668, ans=0.125 +2024-07-27 12:40:33,588 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.279e+01 6.440e+01 7.313e+01 8.077e+01 1.347e+02, threshold=1.463e+02, percent-clipped=0.0 +2024-07-27 12:40:33,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=16536.0, ans=0.025 +2024-07-27 12:40:34,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=16536.0, ans=0.0 +2024-07-27 12:40:38,683 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.48 vs. limit=9.137333333333332 +2024-07-27 12:40:40,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=16549.333333333332, ans=10.0 +2024-07-27 12:40:43,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=16549.333333333332, ans=13.706 +2024-07-27 12:40:44,969 INFO [train.py:1114] (1/4) Epoch 2, batch 2200, loss[loss=0.3112, simple_loss=0.3873, pruned_loss=0.1176, over 4814.00 frames. ], tot_loss[loss=0.3359, simple_loss=0.3888, pruned_loss=0.1414, over 943017.92 frames. ], batch size: 14, lr: 3.17e-02, grad_scale: 64.0 +2024-07-27 12:40:46,757 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.23 vs. limit=19.922 +2024-07-27 12:40:49,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=16562.666666666668, ans=0.007268985507246377 +2024-07-27 12:40:50,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=16562.666666666668, ans=0.125 +2024-07-27 12:40:53,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=16576.0, ans=0.025 +2024-07-27 12:41:05,139 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.70 vs. limit=13.294666666666666 +2024-07-27 12:41:09,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=16589.333333333332, ans=0.0 +2024-07-27 12:41:12,223 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.91 vs. limit=13.301333333333334 +2024-07-27 12:41:21,163 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.64 vs. limit=13.308 +2024-07-27 12:41:26,214 INFO [train.py:1114] (1/4) Epoch 2, batch 2250, loss[loss=0.3228, simple_loss=0.3872, pruned_loss=0.1292, over 4700.00 frames. ], tot_loss[loss=0.3353, simple_loss=0.3882, pruned_loss=0.1412, over 941553.86 frames. ], batch size: 13, lr: 3.17e-02, grad_scale: 64.0 +2024-07-27 12:41:31,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=16629.333333333332, ans=0.125 +2024-07-27 12:41:33,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=16642.666666666668, ans=0.0 +2024-07-27 12:41:35,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=16642.666666666668, ans=0.125 +2024-07-27 12:41:42,815 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.17 vs. limit=13.746 +2024-07-27 12:41:48,632 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.022e+01 6.411e+01 7.130e+01 8.285e+01 1.332e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 12:41:48,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16669.333333333332, ans=0.13330666666666668 +2024-07-27 12:42:00,433 INFO [train.py:1114] (1/4) Epoch 2, batch 2300, loss[loss=0.2722, simple_loss=0.3321, pruned_loss=0.1061, over 4928.00 frames. ], tot_loss[loss=0.3337, simple_loss=0.3864, pruned_loss=0.1405, over 939636.62 frames. ], batch size: 12, lr: 3.16e-02, grad_scale: 64.0 +2024-07-27 12:42:12,453 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.48 vs. limit=13.761 +2024-07-27 12:42:15,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=16709.333333333332, ans=0.125 +2024-07-27 12:42:35,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=16749.333333333332, ans=0.125 +2024-07-27 12:42:47,630 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.65 vs. limit=13.786000000000001 +2024-07-27 12:42:47,925 INFO [train.py:1114] (1/4) Epoch 2, batch 2350, loss[loss=0.3406, simple_loss=0.392, pruned_loss=0.1445, over 4636.00 frames. ], tot_loss[loss=0.3352, simple_loss=0.3874, pruned_loss=0.1415, over 941939.90 frames. ], batch size: 13, lr: 3.16e-02, grad_scale: 64.0 +2024-07-27 12:42:50,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=16762.666666666668, ans=0.31330666666666673 +2024-07-27 12:42:53,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=16762.666666666668, ans=0.0 +2024-07-27 12:42:56,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=16776.0, ans=0.0 +2024-07-27 12:43:03,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=16789.333333333332, ans=0.125 +2024-07-27 12:43:12,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=16802.666666666668, ans=0.125 +2024-07-27 12:43:13,076 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.842e+01 6.484e+01 7.035e+01 7.953e+01 1.463e+02, threshold=1.407e+02, percent-clipped=1.0 +2024-07-27 12:43:24,673 INFO [train.py:1114] (1/4) Epoch 2, batch 2400, loss[loss=0.3081, simple_loss=0.372, pruned_loss=0.122, over 4634.00 frames. ], tot_loss[loss=0.3362, simple_loss=0.3883, pruned_loss=0.142, over 941489.18 frames. ], batch size: 12, lr: 3.15e-02, grad_scale: 64.0 +2024-07-27 12:43:40,982 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.34 vs. limit=13.816 +2024-07-27 12:43:42,862 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.44 vs. limit=13.821 +2024-07-27 12:44:14,564 INFO [train.py:1114] (1/4) Epoch 2, batch 2450, loss[loss=0.2935, simple_loss=0.3472, pruned_loss=0.1198, over 4693.00 frames. ], tot_loss[loss=0.3382, simple_loss=0.3903, pruned_loss=0.1431, over 937435.05 frames. ], batch size: 13, lr: 3.15e-02, grad_scale: 64.0 +2024-07-27 12:44:14,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=16896.0, ans=0.125 +2024-07-27 12:44:21,529 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.42 vs. limit=10.7584 +2024-07-27 12:44:21,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=16896.0, ans=0.0 +2024-07-27 12:44:28,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=16909.333333333332, ans=0.45364 +2024-07-27 12:44:43,826 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.068e+01 6.416e+01 7.061e+01 7.801e+01 1.253e+02, threshold=1.412e+02, percent-clipped=0.0 +2024-07-27 12:45:00,019 INFO [train.py:1114] (1/4) Epoch 2, batch 2500, loss[loss=0.4067, simple_loss=0.4486, pruned_loss=0.1824, over 4809.00 frames. ], tot_loss[loss=0.3361, simple_loss=0.3886, pruned_loss=0.1417, over 939350.55 frames. ], batch size: 14, lr: 3.14e-02, grad_scale: 64.0 +2024-07-27 12:45:01,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=16962.666666666668, ans=0.125 +2024-07-27 12:45:14,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=16989.333333333332, ans=0.025 +2024-07-27 12:45:25,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=17002.666666666668, ans=0.125 +2024-07-27 12:45:27,602 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.00 vs. limit=13.876000000000001 +2024-07-27 12:45:31,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=17016.0, ans=10.0 +2024-07-27 12:45:38,911 INFO [train.py:1114] (1/4) Epoch 2, batch 2550, loss[loss=0.3345, simple_loss=0.3718, pruned_loss=0.1486, over 4798.00 frames. ], tot_loss[loss=0.3329, simple_loss=0.3859, pruned_loss=0.1399, over 939009.91 frames. ], batch size: 11, lr: 3.14e-02, grad_scale: 64.0 +2024-07-27 12:45:39,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=17029.333333333332, ans=0.125 +2024-07-27 12:45:44,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=17029.333333333332, ans=0.125 +2024-07-27 12:46:01,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=17042.666666666668, ans=0.09899494936611666 +2024-07-27 12:46:01,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=17042.666666666668, ans=13.891000000000002 +2024-07-27 12:46:03,273 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.26 vs. limit=10.8224 +2024-07-27 12:46:11,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=17069.333333333332, ans=10.0 +2024-07-27 12:46:13,110 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.177e+01 6.481e+01 6.949e+01 7.902e+01 1.029e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-27 12:46:16,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17069.333333333332, ans=0.12930666666666668 +2024-07-27 12:46:20,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=17082.666666666668, ans=0.125 +2024-07-27 12:46:24,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=17082.666666666668, ans=0.09899494936611666 +2024-07-27 12:46:24,745 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.03 vs. limit=13.906 +2024-07-27 12:46:25,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=17096.0, ans=0.30164000000000013 +2024-07-27 12:46:26,457 INFO [train.py:1114] (1/4) Epoch 2, batch 2600, loss[loss=0.3402, simple_loss=0.4009, pruned_loss=0.1397, over 4900.00 frames. ], tot_loss[loss=0.3359, simple_loss=0.3884, pruned_loss=0.1417, over 938022.83 frames. ], batch size: 13, lr: 3.14e-02, grad_scale: 32.0 +2024-07-27 12:46:32,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=17096.0, ans=0.125 +2024-07-27 12:46:48,567 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.73 vs. limit=9.280666666666667 +2024-07-27 12:47:06,273 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.03 vs. limit=5.570399999999999 +2024-07-27 12:47:20,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17149.333333333332, ans=0.12850666666666669 +2024-07-27 12:47:23,231 INFO [train.py:1114] (1/4) Epoch 2, batch 2650, loss[loss=0.3486, simple_loss=0.4044, pruned_loss=0.1464, over 4642.00 frames. ], tot_loss[loss=0.3361, simple_loss=0.389, pruned_loss=0.1416, over 939816.12 frames. ], batch size: 16, lr: 3.13e-02, grad_scale: 32.0 +2024-07-27 12:47:41,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=17189.333333333332, ans=0.0 +2024-07-27 12:47:51,448 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.937e+01 6.612e+01 7.199e+01 8.016e+01 1.169e+02, threshold=1.440e+02, percent-clipped=0.0 +2024-07-27 12:47:55,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=17216.0, ans=0.125 +2024-07-27 12:48:02,475 INFO [train.py:1114] (1/4) Epoch 2, batch 2700, loss[loss=0.3139, simple_loss=0.3737, pruned_loss=0.1271, over 4744.00 frames. ], tot_loss[loss=0.3353, simple_loss=0.3882, pruned_loss=0.1412, over 939542.94 frames. ], batch size: 14, lr: 3.13e-02, grad_scale: 32.0 +2024-07-27 12:48:02,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=17229.333333333332, ans=0.125 +2024-07-27 12:48:02,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=17229.333333333332, ans=0.007124057971014493 +2024-07-27 12:48:17,231 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.48 vs. limit=13.971 +2024-07-27 12:48:29,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=17282.666666666668, ans=0.125 +2024-07-27 12:48:29,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=17282.666666666668, ans=0.007112463768115942 +2024-07-27 12:48:31,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=17282.666666666668, ans=0.125 +2024-07-27 12:48:33,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=17282.666666666668, ans=0.125 +2024-07-27 12:48:34,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=17282.666666666668, ans=0.125 +2024-07-27 12:48:36,989 INFO [train.py:1114] (1/4) Epoch 2, batch 2750, loss[loss=0.3385, simple_loss=0.3764, pruned_loss=0.1503, over 4708.00 frames. ], tot_loss[loss=0.334, simple_loss=0.3861, pruned_loss=0.1409, over 939883.67 frames. ], batch size: 12, lr: 3.12e-02, grad_scale: 32.0 +2024-07-27 12:48:41,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=17296.0, ans=0.035 +2024-07-27 12:48:42,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=17296.0, ans=0.125 +2024-07-27 12:48:48,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17309.333333333332, ans=0.12690666666666667 +2024-07-27 12:48:52,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=17322.666666666668, ans=0.125 +2024-07-27 12:48:55,653 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.81 vs. limit=20.492 +2024-07-27 12:49:02,554 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.446e+01 6.464e+01 7.074e+01 8.489e+01 1.052e+02, threshold=1.415e+02, percent-clipped=0.0 +2024-07-27 12:49:02,874 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.84 vs. limit=14.001000000000001 +2024-07-27 12:49:08,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=17349.333333333332, ans=13.674666666666665 +2024-07-27 12:49:13,956 INFO [train.py:1114] (1/4) Epoch 2, batch 2800, loss[loss=0.4014, simple_loss=0.4083, pruned_loss=0.1972, over 3446.00 frames. ], tot_loss[loss=0.3326, simple_loss=0.3851, pruned_loss=0.1401, over 937904.90 frames. ], batch size: 35, lr: 3.12e-02, grad_scale: 32.0 +2024-07-27 12:49:27,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17389.333333333332, ans=0.12610666666666667 +2024-07-27 12:49:29,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=17389.333333333332, ans=0.0 +2024-07-27 12:49:48,560 INFO [train.py:1114] (1/4) Epoch 2, batch 2850, loss[loss=0.3372, simple_loss=0.3869, pruned_loss=0.1437, over 4961.00 frames. ], tot_loss[loss=0.333, simple_loss=0.3853, pruned_loss=0.1403, over 936083.76 frames. ], batch size: 13, lr: 3.11e-02, grad_scale: 32.0 +2024-07-27 12:49:59,057 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:49:59,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=17442.666666666668, ans=0.125 +2024-07-27 12:50:00,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=17442.666666666668, ans=0.125 +2024-07-27 12:50:11,763 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.157e+01 6.590e+01 7.080e+01 8.267e+01 4.948e+02, threshold=1.416e+02, percent-clipped=1.0 +2024-07-27 12:50:31,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=17496.0, ans=0.0 +2024-07-27 12:50:32,060 INFO [train.py:1114] (1/4) Epoch 2, batch 2900, loss[loss=0.2887, simple_loss=0.3621, pruned_loss=0.1076, over 4830.00 frames. ], tot_loss[loss=0.334, simple_loss=0.3868, pruned_loss=0.1406, over 939964.78 frames. ], batch size: 13, lr: 3.11e-02, grad_scale: 32.0 +2024-07-27 12:50:45,266 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.45 vs. limit=11.003733333333333 +2024-07-27 12:50:46,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=17509.333333333332, ans=0.007063188405797102 +2024-07-27 12:50:54,780 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.37 vs. limit=5.6284 +2024-07-27 12:51:03,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=17549.333333333332, ans=0.28577333333333343 +2024-07-27 12:51:10,546 INFO [train.py:1114] (1/4) Epoch 2, batch 2950, loss[loss=0.2847, simple_loss=0.3534, pruned_loss=0.1079, over 4698.00 frames. ], tot_loss[loss=0.3334, simple_loss=0.3857, pruned_loss=0.1406, over 939027.14 frames. ], batch size: 12, lr: 3.10e-02, grad_scale: 32.0 +2024-07-27 12:51:12,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17562.666666666668, ans=0.12437333333333334 +2024-07-27 12:51:17,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=17576.0, ans=0.125 +2024-07-27 12:51:34,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=17589.333333333332, ans=0.125 +2024-07-27 12:51:37,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=17602.666666666668, ans=0.125 +2024-07-27 12:51:38,401 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.210e+01 6.523e+01 7.161e+01 8.021e+01 1.155e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 12:51:46,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=17616.0, ans=0.025 +2024-07-27 12:51:49,462 INFO [train.py:1114] (1/4) Epoch 2, batch 3000, loss[loss=0.3905, simple_loss=0.4282, pruned_loss=0.1764, over 4755.00 frames. ], tot_loss[loss=0.3333, simple_loss=0.386, pruned_loss=0.1403, over 938387.64 frames. ], batch size: 13, lr: 3.10e-02, grad_scale: 32.0 +2024-07-27 12:51:49,463 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 12:52:02,749 INFO [train.py:1146] (1/4) Epoch 2, validation: loss=0.2667, simple_loss=0.3583, pruned_loss=0.0876, over 944034.00 frames. +2024-07-27 12:52:02,750 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 12:52:22,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=17642.666666666668, ans=0.0 +2024-07-27 12:52:28,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=17656.0, ans=0.9265599999999999 +2024-07-27 12:52:30,664 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.24 vs. limit=14.121 +2024-07-27 12:52:37,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=17669.333333333332, ans=0.46503999999999995 +2024-07-27 12:52:38,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=17669.333333333332, ans=0.125 +2024-07-27 12:52:52,629 INFO [train.py:1114] (1/4) Epoch 2, batch 3050, loss[loss=0.341, simple_loss=0.3913, pruned_loss=0.1454, over 4639.00 frames. ], tot_loss[loss=0.3349, simple_loss=0.3873, pruned_loss=0.1412, over 937320.00 frames. ], batch size: 12, lr: 3.09e-02, grad_scale: 32.0 +2024-07-27 12:52:53,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=17696.0, ans=0.025 +2024-07-27 12:53:03,447 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.86 vs. limit=11.083733333333333 +2024-07-27 12:53:10,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=17722.666666666668, ans=0.04949747468305833 +2024-07-27 12:53:10,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=17722.666666666668, ans=0.125 +2024-07-27 12:53:18,158 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.638e+01 6.442e+01 7.179e+01 7.661e+01 1.033e+02, threshold=1.436e+02, percent-clipped=0.0 +2024-07-27 12:53:21,366 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.57 vs. limit=5.6604 +2024-07-27 12:53:29,164 INFO [train.py:1114] (1/4) Epoch 2, batch 3100, loss[loss=0.3292, simple_loss=0.3793, pruned_loss=0.1396, over 4643.00 frames. ], tot_loss[loss=0.3336, simple_loss=0.3862, pruned_loss=0.1405, over 938858.71 frames. ], batch size: 16, lr: 3.09e-02, grad_scale: 32.0 +2024-07-27 12:53:39,610 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:53:46,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=17789.333333333332, ans=0.0 +2024-07-27 12:53:47,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17789.333333333332, ans=0.12210666666666667 +2024-07-27 12:53:54,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=17802.666666666668, ans=0.9280266666666667 +2024-07-27 12:53:59,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=17816.0, ans=0.006996521739130435 +2024-07-27 12:54:01,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17816.0, ans=0.12184 +2024-07-27 12:54:03,850 INFO [train.py:1114] (1/4) Epoch 2, batch 3150, loss[loss=0.372, simple_loss=0.4245, pruned_loss=0.1597, over 4639.00 frames. ], tot_loss[loss=0.331, simple_loss=0.3844, pruned_loss=0.1388, over 938677.72 frames. ], batch size: 17, lr: 3.09e-02, grad_scale: 32.0 +2024-07-27 12:54:16,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=17842.666666666668, ans=0.125 +2024-07-27 12:54:24,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=17869.333333333332, ans=0.125 +2024-07-27 12:54:27,255 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.400e+01 6.385e+01 6.845e+01 7.954e+01 1.765e+02, threshold=1.369e+02, percent-clipped=1.0 +2024-07-27 12:54:28,345 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.74 vs. limit=14.201 +2024-07-27 12:54:33,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17882.666666666668, ans=0.12117333333333333 +2024-07-27 12:54:34,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=17882.666666666668, ans=0.0069820289855072465 +2024-07-27 12:54:35,853 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.17 vs. limit=14.206 +2024-07-27 12:54:36,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17882.666666666668, ans=0.12117333333333333 +2024-07-27 12:54:38,096 INFO [train.py:1114] (1/4) Epoch 2, batch 3200, loss[loss=0.3152, simple_loss=0.3939, pruned_loss=0.1182, over 4830.00 frames. ], tot_loss[loss=0.3303, simple_loss=0.3841, pruned_loss=0.1382, over 940007.53 frames. ], batch size: 13, lr: 3.08e-02, grad_scale: 32.0 +2024-07-27 12:54:48,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=17909.333333333332, ans=0.125 +2024-07-27 12:54:59,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=17922.666666666668, ans=0.125 +2024-07-27 12:55:02,846 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.98 vs. limit=9.484 +2024-07-27 12:55:04,983 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.31 vs. limit=5.6904 +2024-07-27 12:55:16,929 INFO [train.py:1114] (1/4) Epoch 2, batch 3250, loss[loss=0.294, simple_loss=0.3633, pruned_loss=0.1124, over 4939.00 frames. ], tot_loss[loss=0.3309, simple_loss=0.3848, pruned_loss=0.1385, over 940924.07 frames. ], batch size: 14, lr: 3.08e-02, grad_scale: 32.0 +2024-07-27 12:55:37,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17962.666666666668, ans=0.12037333333333333 +2024-07-27 12:55:46,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=17976.0, ans=0.12023999999999999 +2024-07-27 12:55:47,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=17976.0, ans=0.0 +2024-07-27 12:55:50,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17989.333333333332, ans=0.12010666666666667 +2024-07-27 12:56:00,162 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.105e+01 6.706e+01 7.327e+01 8.227e+01 1.129e+02, threshold=1.465e+02, percent-clipped=0.0 +2024-07-27 12:56:03,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=18016.0, ans=0.2694400000000001 +2024-07-27 12:56:10,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=18029.333333333332, ans=0.125 +2024-07-27 12:56:11,170 INFO [train.py:1114] (1/4) Epoch 2, batch 3300, loss[loss=0.3612, simple_loss=0.4136, pruned_loss=0.1544, over 4710.00 frames. ], tot_loss[loss=0.3302, simple_loss=0.3836, pruned_loss=0.1384, over 941094.15 frames. ], batch size: 19, lr: 3.07e-02, grad_scale: 32.0 +2024-07-27 12:56:12,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18029.333333333332, ans=0.11970666666666668 +2024-07-27 12:56:19,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=18042.666666666668, ans=0.26850666666666667 +2024-07-27 12:56:27,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=18056.0, ans=0.26804000000000006 +2024-07-27 12:56:29,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=18056.0, ans=0.125 +2024-07-27 12:56:33,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=18069.333333333332, ans=0.125 +2024-07-27 12:56:47,387 INFO [train.py:1114] (1/4) Epoch 2, batch 3350, loss[loss=0.4258, simple_loss=0.4588, pruned_loss=0.1963, over 4567.00 frames. ], tot_loss[loss=0.3341, simple_loss=0.3868, pruned_loss=0.1407, over 938616.35 frames. ], batch size: 17, lr: 3.07e-02, grad_scale: 32.0 +2024-07-27 12:57:10,885 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.417e+01 6.714e+01 7.318e+01 8.136e+01 2.148e+02, threshold=1.464e+02, percent-clipped=2.0 +2024-07-27 12:57:22,051 INFO [train.py:1114] (1/4) Epoch 2, batch 3400, loss[loss=0.313, simple_loss=0.3624, pruned_loss=0.1318, over 4813.00 frames. ], tot_loss[loss=0.3327, simple_loss=0.386, pruned_loss=0.1397, over 937270.83 frames. ], batch size: 11, lr: 3.06e-02, grad_scale: 32.0 +2024-07-27 12:57:24,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=18162.666666666668, ans=0.125 +2024-07-27 12:57:31,382 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.21 vs. limit=11.2704 +2024-07-27 12:57:42,350 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.46 vs. limit=21.152 +2024-07-27 12:57:42,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=18202.666666666668, ans=0.006912463768115942 +2024-07-27 12:57:58,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=18202.666666666668, ans=0.006912463768115942 +2024-07-27 12:57:59,238 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:58:05,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=18216.0, ans=0.125 +2024-07-27 12:58:05,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=18216.0, ans=0.0 +2024-07-27 12:58:07,981 INFO [train.py:1114] (1/4) Epoch 2, batch 3450, loss[loss=0.3255, simple_loss=0.3883, pruned_loss=0.1314, over 4694.00 frames. ], tot_loss[loss=0.3336, simple_loss=0.3869, pruned_loss=0.1402, over 937536.49 frames. ], batch size: 19, lr: 3.06e-02, grad_scale: 32.0 +2024-07-27 12:58:25,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=18256.0, ans=0.125 +2024-07-27 12:58:28,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=18256.0, ans=0.0 +2024-07-27 12:58:29,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=18256.0, ans=0.0 +2024-07-27 12:58:30,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=18269.333333333332, ans=0.035 +2024-07-27 12:58:35,293 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.179e+01 6.586e+01 6.989e+01 7.796e+01 1.302e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 12:58:45,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=18282.666666666668, ans=0.2601066666666667 +2024-07-27 12:58:50,958 INFO [train.py:1114] (1/4) Epoch 2, batch 3500, loss[loss=0.2933, simple_loss=0.3449, pruned_loss=0.1209, over 4945.00 frames. ], tot_loss[loss=0.3333, simple_loss=0.3864, pruned_loss=0.14, over 938438.80 frames. ], batch size: 12, lr: 3.06e-02, grad_scale: 32.0 +2024-07-27 12:59:01,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=18309.333333333332, ans=0.2591733333333335 +2024-07-27 12:59:24,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=18349.333333333332, ans=0.125 +2024-07-27 12:59:29,548 INFO [train.py:1114] (1/4) Epoch 2, batch 3550, loss[loss=0.3097, simple_loss=0.3771, pruned_loss=0.1212, over 4657.00 frames. ], tot_loss[loss=0.3331, simple_loss=0.3862, pruned_loss=0.14, over 939246.60 frames. ], batch size: 14, lr: 3.05e-02, grad_scale: 32.0 +2024-07-27 12:59:36,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=18376.0, ans=0.125 +2024-07-27 12:59:52,794 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.00 vs. limit=14.401 +2024-07-27 12:59:53,858 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.354e+01 6.416e+01 6.884e+01 7.445e+01 1.050e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-27 13:00:02,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=18416.0, ans=0.125 +2024-07-27 13:00:03,809 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.28 vs. limit=9.604 +2024-07-27 13:00:04,805 INFO [train.py:1114] (1/4) Epoch 2, batch 3600, loss[loss=0.3058, simple_loss=0.3581, pruned_loss=0.1268, over 4967.00 frames. ], tot_loss[loss=0.3335, simple_loss=0.3863, pruned_loss=0.1404, over 940982.35 frames. ], batch size: 13, lr: 3.05e-02, grad_scale: 32.0 +2024-07-27 13:00:08,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=18429.333333333332, ans=0.0 +2024-07-27 13:00:11,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=18442.666666666668, ans=0.025 +2024-07-27 13:00:41,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=18482.666666666668, ans=0.125 +2024-07-27 13:00:42,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=18482.666666666668, ans=0.07 +2024-07-27 13:00:46,939 INFO [train.py:1114] (1/4) Epoch 2, batch 3650, loss[loss=0.3443, simple_loss=0.3924, pruned_loss=0.1481, over 4886.00 frames. ], tot_loss[loss=0.3305, simple_loss=0.3838, pruned_loss=0.1386, over 941592.72 frames. ], batch size: 15, lr: 3.04e-02, grad_scale: 32.0 +2024-07-27 13:00:56,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=18509.333333333332, ans=0.025 +2024-07-27 13:00:57,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=18509.333333333332, ans=0.125 +2024-07-27 13:01:14,317 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.28 vs. limit=21.392 +2024-07-27 13:01:14,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=18522.666666666668, ans=0.0 +2024-07-27 13:01:18,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=18536.0, ans=0.125 +2024-07-27 13:01:18,785 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.59 vs. limit=14.451 +2024-07-27 13:01:20,523 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.247e+01 6.612e+01 7.129e+01 7.786e+01 1.024e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 13:01:21,013 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.58 vs. limit=14.451 +2024-07-27 13:01:22,569 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.19 vs. limit=14.451 +2024-07-27 13:01:30,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=18549.333333333332, ans=0.125 +2024-07-27 13:01:32,249 INFO [train.py:1114] (1/4) Epoch 2, batch 3700, loss[loss=0.3496, simple_loss=0.4013, pruned_loss=0.149, over 4926.00 frames. ], tot_loss[loss=0.3301, simple_loss=0.3839, pruned_loss=0.1382, over 942507.35 frames. ], batch size: 14, lr: 3.04e-02, grad_scale: 32.0 +2024-07-27 13:01:32,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=18562.666666666668, ans=0.0068342028985507244 +2024-07-27 13:01:38,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=18576.0, ans=0.125 +2024-07-27 13:01:42,276 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=2.86 vs. limit=14.466000000000001 +2024-07-27 13:02:19,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=18616.0, ans=0.125 +2024-07-27 13:02:23,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=18616.0, ans=0.006822608695652174 +2024-07-27 13:02:25,790 INFO [train.py:1114] (1/4) Epoch 2, batch 3750, loss[loss=0.2853, simple_loss=0.3518, pruned_loss=0.1094, over 4797.00 frames. ], tot_loss[loss=0.329, simple_loss=0.3829, pruned_loss=0.1376, over 943915.93 frames. ], batch size: 11, lr: 3.03e-02, grad_scale: 32.0 +2024-07-27 13:03:00,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=18656.0, ans=0.125 +2024-07-27 13:03:38,405 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.449e+01 6.486e+01 7.051e+01 7.963e+01 1.237e+02, threshold=1.410e+02, percent-clipped=0.0 +2024-07-27 13:03:43,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=18682.666666666668, ans=0.0 +2024-07-27 13:03:51,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=18682.666666666668, ans=0.125 +2024-07-27 13:04:17,940 INFO [train.py:1114] (1/4) Epoch 2, batch 3800, loss[loss=0.3359, simple_loss=0.3964, pruned_loss=0.1377, over 4807.00 frames. ], tot_loss[loss=0.3274, simple_loss=0.3817, pruned_loss=0.1366, over 941981.81 frames. ], batch size: 14, lr: 3.03e-02, grad_scale: 32.0 +2024-07-27 13:04:23,019 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.80 vs. limit=5.804399999999999 +2024-07-27 13:04:24,705 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.48 vs. limit=14.516 +2024-07-27 13:05:16,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=18722.666666666668, ans=0.125 +2024-07-27 13:07:03,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=18749.333333333332, ans=0.125 +2024-07-27 13:07:10,270 INFO [train.py:1114] (1/4) Epoch 2, batch 3850, loss[loss=0.3636, simple_loss=0.4031, pruned_loss=0.162, over 4631.00 frames. ], tot_loss[loss=0.3241, simple_loss=0.3795, pruned_loss=0.1344, over 942594.69 frames. ], batch size: 16, lr: 3.03e-02, grad_scale: 32.0 +2024-07-27 13:07:32,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=18762.666666666668, ans=0.125 +2024-07-27 13:08:15,255 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.86 vs. limit=14.546 +2024-07-27 13:08:15,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=18789.333333333332, ans=0.125 +2024-07-27 13:08:18,575 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.31 vs. limit=21.592 +2024-07-27 13:08:18,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18789.333333333332, ans=0.11210666666666666 +2024-07-27 13:08:20,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.whiten.whitening_limit, batch_count=18802.666666666668, ans=11.521066666666666 +2024-07-27 13:08:23,994 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.367e+01 6.538e+01 7.102e+01 7.754e+01 1.153e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 13:08:41,624 INFO [train.py:1114] (1/4) Epoch 2, batch 3900, loss[loss=0.3006, simple_loss=0.3767, pruned_loss=0.1123, over 4810.00 frames. ], tot_loss[loss=0.3246, simple_loss=0.3804, pruned_loss=0.1343, over 942631.50 frames. ], batch size: 14, lr: 3.02e-02, grad_scale: 32.0 +2024-07-27 13:08:43,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=18829.333333333332, ans=0.125 +2024-07-27 13:08:44,494 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:09:09,859 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.02 vs. limit=11.5424 +2024-07-27 13:09:10,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=18856.0, ans=0.24004000000000003 +2024-07-27 13:09:26,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=18856.0, ans=0.125 +2024-07-27 13:09:42,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=18869.333333333332, ans=0.125 +2024-07-27 13:09:52,406 INFO [train.py:1114] (1/4) Epoch 2, batch 3950, loss[loss=0.342, simple_loss=0.3971, pruned_loss=0.1434, over 4809.00 frames. ], tot_loss[loss=0.3256, simple_loss=0.3812, pruned_loss=0.135, over 944380.06 frames. ], batch size: 16, lr: 3.02e-02, grad_scale: 32.0 +2024-07-27 13:09:58,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=18909.333333333332, ans=0.0 +2024-07-27 13:10:09,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=18909.333333333332, ans=0.125 +2024-07-27 13:10:24,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=18922.666666666668, ans=0.23770666666666673 +2024-07-27 13:10:25,261 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.55 vs. limit=14.600999999999999 +2024-07-27 13:10:27,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=18936.0, ans=0.125 +2024-07-27 13:10:28,273 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.314e+01 6.596e+01 7.241e+01 7.988e+01 1.615e+02, threshold=1.448e+02, percent-clipped=1.0 +2024-07-27 13:10:29,554 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=21.89 vs. limit=21.701999999999998 +2024-07-27 13:10:35,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=18949.333333333332, ans=0.125 +2024-07-27 13:10:39,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=18949.333333333332, ans=0.125 +2024-07-27 13:10:58,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=18949.333333333332, ans=0.0 +2024-07-27 13:11:05,245 INFO [train.py:1114] (1/4) Epoch 2, batch 4000, loss[loss=0.2847, simple_loss=0.3463, pruned_loss=0.1115, over 4776.00 frames. ], tot_loss[loss=0.3258, simple_loss=0.381, pruned_loss=0.1353, over 941076.47 frames. ], batch size: 12, lr: 3.01e-02, grad_scale: 32.0 +2024-07-27 13:11:22,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=18989.333333333332, ans=0.125 +2024-07-27 13:11:23,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=18989.333333333332, ans=0.035 +2024-07-27 13:11:27,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=18989.333333333332, ans=0.125 +2024-07-27 13:11:28,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=19002.666666666668, ans=0.006738550724637681 +2024-07-27 13:11:37,254 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.21 vs. limit=9.754 +2024-07-27 13:11:38,755 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.39 vs. limit=21.762 +2024-07-27 13:12:02,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=19016.0, ans=0.006735652173913044 +2024-07-27 13:12:03,970 INFO [train.py:1114] (1/4) Epoch 2, batch 4050, loss[loss=0.3522, simple_loss=0.3947, pruned_loss=0.1549, over 3708.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3786, pruned_loss=0.1341, over 939459.71 frames. ], batch size: 35, lr: 3.01e-02, grad_scale: 32.0 +2024-07-27 13:12:05,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=19029.333333333332, ans=0.125 +2024-07-27 13:12:08,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=19029.333333333332, ans=0.07 +2024-07-27 13:12:27,059 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.071e+01 6.599e+01 7.309e+01 8.116e+01 1.221e+02, threshold=1.462e+02, percent-clipped=0.0 +2024-07-27 13:12:37,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=19096.0, ans=0.025 +2024-07-27 13:12:39,183 INFO [train.py:1114] (1/4) Epoch 2, batch 4100, loss[loss=0.4219, simple_loss=0.4581, pruned_loss=0.1928, over 4892.00 frames. ], tot_loss[loss=0.3257, simple_loss=0.3803, pruned_loss=0.1355, over 938585.79 frames. ], batch size: 15, lr: 3.01e-02, grad_scale: 32.0 +2024-07-27 13:12:49,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=19109.333333333332, ans=0.0 +2024-07-27 13:12:50,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19109.333333333332, ans=0.10890666666666668 +2024-07-27 13:12:52,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=19109.333333333332, ans=0.0 +2024-07-27 13:13:05,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=19136.0, ans=0.10863999999999999 +2024-07-27 13:13:14,990 INFO [train.py:1114] (1/4) Epoch 2, batch 4150, loss[loss=0.2708, simple_loss=0.35, pruned_loss=0.09574, over 4833.00 frames. ], tot_loss[loss=0.3244, simple_loss=0.3789, pruned_loss=0.1349, over 938406.17 frames. ], batch size: 13, lr: 3.00e-02, grad_scale: 32.0 +2024-07-27 13:13:15,998 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=14.686 +2024-07-27 13:13:17,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=19162.666666666668, ans=0.006703768115942029 +2024-07-27 13:13:19,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=19162.666666666668, ans=0.10837333333333332 +2024-07-27 13:13:36,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=19202.666666666668, ans=0.2279066666666667 +2024-07-27 13:13:44,555 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.086e+01 6.337e+01 6.945e+01 7.844e+01 2.237e+02, threshold=1.389e+02, percent-clipped=1.0 +2024-07-27 13:13:56,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=19216.0, ans=0.0066921739130434785 +2024-07-27 13:13:57,847 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.36 vs. limit=5.884399999999999 +2024-07-27 13:13:58,214 INFO [train.py:1114] (1/4) Epoch 2, batch 4200, loss[loss=0.3138, simple_loss=0.3792, pruned_loss=0.1242, over 4913.00 frames. ], tot_loss[loss=0.3256, simple_loss=0.3802, pruned_loss=0.1355, over 939933.67 frames. ], batch size: 15, lr: 3.00e-02, grad_scale: 32.0 +2024-07-27 13:13:59,794 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:14:03,909 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=5.17 vs. limit=14.711 +2024-07-27 13:14:05,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=19242.666666666668, ans=0.125 +2024-07-27 13:14:13,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=19256.0, ans=0.025 +2024-07-27 13:14:17,003 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=15.28 vs. limit=14.628 +2024-07-27 13:14:19,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=19269.333333333332, ans=0.10730666666666669 +2024-07-27 13:14:22,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=19269.333333333332, ans=0.125 +2024-07-27 13:14:32,751 INFO [train.py:1114] (1/4) Epoch 2, batch 4250, loss[loss=0.2931, simple_loss=0.3609, pruned_loss=0.1126, over 4644.00 frames. ], tot_loss[loss=0.3251, simple_loss=0.3805, pruned_loss=0.1349, over 940973.75 frames. ], batch size: 12, lr: 2.99e-02, grad_scale: 32.0 +2024-07-27 13:14:40,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=19309.333333333332, ans=0.125 +2024-07-27 13:14:55,984 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.180e+01 6.301e+01 6.853e+01 7.797e+01 1.151e+02, threshold=1.371e+02, percent-clipped=0.0 +2024-07-27 13:15:00,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=19349.333333333332, ans=0.006663188405797102 +2024-07-27 13:15:02,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=19349.333333333332, ans=10.0 +2024-07-27 13:15:05,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=19349.333333333332, ans=0.2227733333333335 +2024-07-27 13:15:05,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=19349.333333333332, ans=0.1 +2024-07-27 13:15:06,761 INFO [train.py:1114] (1/4) Epoch 2, batch 4300, loss[loss=0.31, simple_loss=0.3724, pruned_loss=0.1238, over 4758.00 frames. ], tot_loss[loss=0.3252, simple_loss=0.3808, pruned_loss=0.1348, over 940673.44 frames. ], batch size: 13, lr: 2.99e-02, grad_scale: 32.0 +2024-07-27 13:15:23,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=19376.0, ans=0.22184000000000004 +2024-07-27 13:15:24,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=19389.333333333332, ans=0.0 +2024-07-27 13:15:27,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=19389.333333333332, ans=0.09899494936611666 +2024-07-27 13:15:27,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=19389.333333333332, ans=0.125 +2024-07-27 13:15:41,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=19416.0, ans=0.125 +2024-07-27 13:15:45,765 INFO [train.py:1114] (1/4) Epoch 2, batch 4350, loss[loss=0.3587, simple_loss=0.4091, pruned_loss=0.1542, over 4759.00 frames. ], tot_loss[loss=0.325, simple_loss=0.3809, pruned_loss=0.1345, over 941249.89 frames. ], batch size: 13, lr: 2.98e-02, grad_scale: 32.0 +2024-07-27 13:16:01,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=19456.0, ans=0.125 +2024-07-27 13:16:03,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=19456.0, ans=0.125 +2024-07-27 13:16:09,150 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.359e+01 6.368e+01 6.866e+01 7.654e+01 1.225e+02, threshold=1.373e+02, percent-clipped=0.0 +2024-07-27 13:16:09,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=19469.333333333332, ans=0.10530666666666669 +2024-07-27 13:16:09,450 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.83 vs. limit=14.801 +2024-07-27 13:16:21,960 INFO [train.py:1114] (1/4) Epoch 2, batch 4400, loss[loss=0.4102, simple_loss=0.4578, pruned_loss=0.1813, over 4803.00 frames. ], tot_loss[loss=0.3249, simple_loss=0.3808, pruned_loss=0.1345, over 941102.10 frames. ], batch size: 14, lr: 2.98e-02, grad_scale: 32.0 +2024-07-27 13:16:22,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=19496.0, ans=10.0 +2024-07-27 13:16:26,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=19496.0, ans=0.125 +2024-07-27 13:16:33,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=19509.333333333332, ans=0.125 +2024-07-27 13:16:37,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=19522.666666666668, ans=0.125 +2024-07-27 13:16:42,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=19536.0, ans=0.2162400000000001 +2024-07-27 13:16:45,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=19536.0, ans=0.10464000000000001 +2024-07-27 13:16:49,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=19536.0, ans=0.10464000000000001 +2024-07-27 13:16:54,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=19549.333333333332, ans=0.025 +2024-07-27 13:16:58,311 INFO [train.py:1114] (1/4) Epoch 2, batch 4450, loss[loss=0.2795, simple_loss=0.3431, pruned_loss=0.1079, over 4940.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.3791, pruned_loss=0.1336, over 939290.03 frames. ], batch size: 12, lr: 2.98e-02, grad_scale: 32.0 +2024-07-27 13:16:58,589 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=2.82 vs. limit=14.836 +2024-07-27 13:17:00,181 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.80 vs. limit=11.825066666666668 +2024-07-27 13:17:01,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=19562.666666666668, ans=0.0 +2024-07-27 13:17:31,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=19602.666666666668, ans=0.125 +2024-07-27 13:17:31,924 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.227e+01 6.384e+01 6.851e+01 7.779e+01 1.148e+02, threshold=1.370e+02, percent-clipped=0.0 +2024-07-27 13:17:32,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=19602.666666666668, ans=0.125 +2024-07-27 13:17:39,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=19616.0, ans=0.006605217391304348 +2024-07-27 13:17:42,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=19629.333333333332, ans=0.125 +2024-07-27 13:17:42,965 INFO [train.py:1114] (1/4) Epoch 2, batch 4500, loss[loss=0.3258, simple_loss=0.3881, pruned_loss=0.1317, over 4737.00 frames. ], tot_loss[loss=0.3218, simple_loss=0.3782, pruned_loss=0.1327, over 938547.31 frames. ], batch size: 14, lr: 2.97e-02, grad_scale: 32.0 +2024-07-27 13:17:45,554 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.80 vs. limit=5.9444 +2024-07-27 13:18:13,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=19682.666666666668, ans=0.125 +2024-07-27 13:18:16,818 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.43 vs. limit=9.924 +2024-07-27 13:18:17,179 INFO [train.py:1114] (1/4) Epoch 2, batch 4550, loss[loss=0.3245, simple_loss=0.3867, pruned_loss=0.1312, over 4902.00 frames. ], tot_loss[loss=0.3228, simple_loss=0.379, pruned_loss=0.1333, over 940496.88 frames. ], batch size: 13, lr: 2.97e-02, grad_scale: 32.0 +2024-07-27 13:18:37,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=19722.666666666668, ans=0.006582028985507246 +2024-07-27 13:18:42,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=19736.0, ans=0.125 +2024-07-27 13:18:43,563 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.866e+01 6.563e+01 7.303e+01 8.334e+01 1.051e+02, threshold=1.461e+02, percent-clipped=0.0 +2024-07-27 13:18:56,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=19749.333333333332, ans=0.10250666666666669 +2024-07-27 13:18:56,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=19749.333333333332, ans=0.20877333333333348 +2024-07-27 13:18:57,897 INFO [train.py:1114] (1/4) Epoch 2, batch 4600, loss[loss=0.3513, simple_loss=0.4122, pruned_loss=0.1452, over 4482.00 frames. ], tot_loss[loss=0.32, simple_loss=0.3768, pruned_loss=0.1315, over 938814.84 frames. ], batch size: 21, lr: 2.96e-02, grad_scale: 64.0 +2024-07-27 13:19:09,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=19776.0, ans=0.0 +2024-07-27 13:19:12,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=19789.333333333332, ans=0.125 +2024-07-27 13:19:13,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=19789.333333333332, ans=0.05210666666666666 +2024-07-27 13:19:15,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=19789.333333333332, ans=0.0065675362318840585 +2024-07-27 13:19:26,247 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.94 vs. limit=14.931000000000001 +2024-07-27 13:19:27,205 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:19:30,758 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.03 vs. limit=14.931000000000001 +2024-07-27 13:19:31,756 INFO [train.py:1114] (1/4) Epoch 2, batch 4650, loss[loss=0.3272, simple_loss=0.3643, pruned_loss=0.145, over 4826.00 frames. ], tot_loss[loss=0.3225, simple_loss=0.3791, pruned_loss=0.133, over 940396.69 frames. ], batch size: 16, lr: 2.96e-02, grad_scale: 64.0 +2024-07-27 13:19:32,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=19829.333333333332, ans=0.006558840579710146 +2024-07-27 13:19:45,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=19856.0, ans=0.125 +2024-07-27 13:19:52,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=19869.333333333332, ans=0.125 +2024-07-27 13:19:54,896 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.553e+01 6.585e+01 7.200e+01 8.002e+01 1.335e+02, threshold=1.440e+02, percent-clipped=0.0 +2024-07-27 13:19:57,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=19869.333333333332, ans=0.006550144927536233 +2024-07-27 13:19:57,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=19869.333333333332, ans=0.125 +2024-07-27 13:19:59,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=19882.666666666668, ans=0.125 +2024-07-27 13:20:02,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=19882.666666666668, ans=0.125 +2024-07-27 13:20:02,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=19882.666666666668, ans=0.0 +2024-07-27 13:20:06,198 INFO [train.py:1114] (1/4) Epoch 2, batch 4700, loss[loss=0.297, simple_loss=0.3476, pruned_loss=0.1233, over 4700.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3794, pruned_loss=0.1337, over 937467.80 frames. ], batch size: 11, lr: 2.96e-02, grad_scale: 64.0 +2024-07-27 13:20:54,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19962.666666666668, ans=0.10037333333333331 +2024-07-27 13:20:55,539 INFO [train.py:1114] (1/4) Epoch 2, batch 4750, loss[loss=0.3637, simple_loss=0.3972, pruned_loss=0.1651, over 4437.00 frames. ], tot_loss[loss=0.3247, simple_loss=0.3804, pruned_loss=0.1345, over 935260.71 frames. ], batch size: 21, lr: 2.95e-02, grad_scale: 64.0 +2024-07-27 13:20:57,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=19962.666666666668, ans=0.125 +2024-07-27 13:20:59,454 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.74 vs. limit=14.986 +2024-07-27 13:21:02,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=19976.0, ans=0.125 +2024-07-27 13:21:19,837 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.240e+01 6.354e+01 6.910e+01 7.839e+01 1.849e+02, threshold=1.382e+02, percent-clipped=1.0 +2024-07-27 13:21:24,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=20016.0, ans=0.0 +2024-07-27 13:21:25,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=20016.0, ans=0.006518260869565218 +2024-07-27 13:21:27,272 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.55 vs. limit=12.0 +2024-07-27 13:21:31,069 INFO [train.py:1114] (1/4) Epoch 2, batch 4800, loss[loss=0.343, simple_loss=0.3977, pruned_loss=0.1441, over 4697.00 frames. ], tot_loss[loss=0.3243, simple_loss=0.3795, pruned_loss=0.1345, over 932326.76 frames. ], batch size: 13, lr: 2.95e-02, grad_scale: 64.0 +2024-07-27 13:21:39,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=20042.666666666668, ans=0.0 +2024-07-27 13:21:39,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.30 vs. limit=22.5 +2024-07-27 13:21:47,927 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.58 vs. limit=15.0 +2024-07-27 13:21:51,853 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.54 vs. limit=15.0 +2024-07-27 13:22:05,287 INFO [train.py:1114] (1/4) Epoch 2, batch 4850, loss[loss=0.3544, simple_loss=0.3998, pruned_loss=0.1546, over 4742.00 frames. ], tot_loss[loss=0.3254, simple_loss=0.3807, pruned_loss=0.1351, over 932122.06 frames. ], batch size: 14, lr: 2.95e-02, grad_scale: 64.0 +2024-07-27 13:22:11,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=20096.0, ans=0.025 +2024-07-27 13:22:12,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20096.0, ans=0.1 +2024-07-27 13:22:12,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=20096.0, ans=0.125 +2024-07-27 13:22:22,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=20122.666666666668, ans=0.1 +2024-07-27 13:22:33,265 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.67 vs. limit=6.0 +2024-07-27 13:22:33,551 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.340e+01 6.424e+01 6.890e+01 7.552e+01 1.246e+02, threshold=1.378e+02, percent-clipped=0.0 +2024-07-27 13:22:39,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=20149.333333333332, ans=0.125 +2024-07-27 13:22:46,561 INFO [train.py:1114] (1/4) Epoch 2, batch 4900, loss[loss=0.3573, simple_loss=0.42, pruned_loss=0.1473, over 4765.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3793, pruned_loss=0.1338, over 934222.14 frames. ], batch size: 13, lr: 2.94e-02, grad_scale: 64.0 +2024-07-27 13:22:46,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=20162.666666666668, ans=0.125 +2024-07-27 13:22:53,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=20176.0, ans=0.0 +2024-07-27 13:22:56,237 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.61 vs. limit=22.5 +2024-07-27 13:23:02,099 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.63 vs. limit=5.0 +2024-07-27 13:23:27,083 INFO [train.py:1114] (1/4) Epoch 2, batch 4950, loss[loss=0.4175, simple_loss=0.4317, pruned_loss=0.2017, over 3510.00 frames. ], tot_loss[loss=0.3259, simple_loss=0.3813, pruned_loss=0.1352, over 931272.90 frames. ], batch size: 35, lr: 2.94e-02, grad_scale: 64.0 +2024-07-27 13:23:27,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20229.333333333332, ans=0.1 +2024-07-27 13:23:28,153 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.01 vs. limit=22.5 +2024-07-27 13:23:37,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=20242.666666666668, ans=0.125 +2024-07-27 13:23:40,010 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.21 vs. limit=15.0 +2024-07-27 13:23:45,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20256.0, ans=0.1 +2024-07-27 13:23:48,758 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.80 vs. limit=15.0 +2024-07-27 13:23:51,053 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.442e+01 6.541e+01 7.146e+01 7.949e+01 1.013e+02, threshold=1.429e+02, percent-clipped=0.0 +2024-07-27 13:23:59,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20282.666666666668, ans=0.1 +2024-07-27 13:24:01,984 INFO [train.py:1114] (1/4) Epoch 2, batch 5000, loss[loss=0.3426, simple_loss=0.4033, pruned_loss=0.141, over 4664.00 frames. ], tot_loss[loss=0.3242, simple_loss=0.3804, pruned_loss=0.1339, over 935213.75 frames. ], batch size: 14, lr: 2.93e-02, grad_scale: 64.0 +2024-07-27 13:24:05,354 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.09 vs. limit=15.0 +2024-07-27 13:24:26,369 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-27 13:24:29,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=20349.333333333332, ans=0.2 +2024-07-27 13:24:31,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=20349.333333333332, ans=0.1 +2024-07-27 13:24:32,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=20349.333333333332, ans=0.006445797101449276 +2024-07-27 13:24:33,189 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.13 vs. limit=15.0 +2024-07-27 13:24:36,475 INFO [train.py:1114] (1/4) Epoch 2, batch 5050, loss[loss=0.2699, simple_loss=0.3378, pruned_loss=0.101, over 4843.00 frames. ], tot_loss[loss=0.3207, simple_loss=0.3778, pruned_loss=0.1318, over 937581.81 frames. ], batch size: 12, lr: 2.93e-02, grad_scale: 64.0 +2024-07-27 13:24:38,759 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.79 vs. limit=15.0 +2024-07-27 13:24:42,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=20376.0, ans=0.0 +2024-07-27 13:24:44,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=20376.0, ans=0.1 +2024-07-27 13:24:59,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=20402.666666666668, ans=0.125 +2024-07-27 13:25:00,637 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.122e+01 6.416e+01 7.117e+01 7.818e+01 1.344e+02, threshold=1.423e+02, percent-clipped=0.0 +2024-07-27 13:25:03,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=20402.666666666668, ans=0.125 +2024-07-27 13:25:04,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=20416.0, ans=0.0 +2024-07-27 13:25:11,749 INFO [train.py:1114] (1/4) Epoch 2, batch 5100, loss[loss=0.2811, simple_loss=0.3424, pruned_loss=0.1099, over 4780.00 frames. ], tot_loss[loss=0.3224, simple_loss=0.3793, pruned_loss=0.1327, over 934948.24 frames. ], batch size: 12, lr: 2.93e-02, grad_scale: 64.0 +2024-07-27 13:25:13,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=20429.333333333332, ans=0.125 +2024-07-27 13:25:14,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=20429.333333333332, ans=0.125 +2024-07-27 13:25:17,921 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.67 vs. limit=22.5 +2024-07-27 13:25:19,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=20442.666666666668, ans=0.125 +2024-07-27 13:25:22,833 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.98 vs. limit=15.0 +2024-07-27 13:25:24,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=20442.666666666668, ans=0.125 +2024-07-27 13:25:30,432 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.40 vs. limit=5.0 +2024-07-27 13:25:43,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=20482.666666666668, ans=0.125 +2024-07-27 13:25:43,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=20482.666666666668, ans=0.1 +2024-07-27 13:25:46,238 INFO [train.py:1114] (1/4) Epoch 2, batch 5150, loss[loss=0.3932, simple_loss=0.432, pruned_loss=0.1771, over 4835.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3801, pruned_loss=0.1333, over 936025.19 frames. ], batch size: 16, lr: 2.92e-02, grad_scale: 64.0 +2024-07-27 13:25:49,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=20496.0, ans=0.125 +2024-07-27 13:26:01,088 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.14 vs. limit=15.0 +2024-07-27 13:26:03,700 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=18.66 vs. limit=15.0 +2024-07-27 13:26:08,778 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.55 vs. limit=15.0 +2024-07-27 13:26:09,643 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.469e+01 6.536e+01 7.424e+01 8.253e+01 1.032e+02, threshold=1.485e+02, percent-clipped=0.0 +2024-07-27 13:26:20,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=20562.666666666668, ans=0.125 +2024-07-27 13:26:21,109 INFO [train.py:1114] (1/4) Epoch 2, batch 5200, loss[loss=0.3508, simple_loss=0.4096, pruned_loss=0.1461, over 4654.00 frames. ], tot_loss[loss=0.3215, simple_loss=0.379, pruned_loss=0.132, over 936261.43 frames. ], batch size: 14, lr: 2.92e-02, grad_scale: 64.0 +2024-07-27 13:26:22,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=20562.666666666668, ans=0.025 +2024-07-27 13:26:22,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=20562.666666666668, ans=0.125 +2024-07-27 13:26:58,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=20616.0, ans=0.125 +2024-07-27 13:26:59,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20616.0, ans=0.1 +2024-07-27 13:27:00,328 INFO [train.py:1114] (1/4) Epoch 2, batch 5250, loss[loss=0.3458, simple_loss=0.4102, pruned_loss=0.1407, over 4898.00 frames. ], tot_loss[loss=0.3219, simple_loss=0.3788, pruned_loss=0.1325, over 935818.29 frames. ], batch size: 13, lr: 2.91e-02, grad_scale: 64.0 +2024-07-27 13:27:00,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=20629.333333333332, ans=0.125 +2024-07-27 13:27:03,137 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.584e+00 +2024-07-27 13:27:07,749 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.08 vs. limit=22.5 +2024-07-27 13:27:24,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=20669.333333333332, ans=0.0 +2024-07-27 13:27:27,949 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.995e+01 6.506e+01 7.005e+01 7.765e+01 1.418e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-27 13:27:40,044 INFO [train.py:1114] (1/4) Epoch 2, batch 5300, loss[loss=0.3123, simple_loss=0.3794, pruned_loss=0.1226, over 4638.00 frames. ], tot_loss[loss=0.3214, simple_loss=0.3786, pruned_loss=0.132, over 934169.02 frames. ], batch size: 16, lr: 2.91e-02, grad_scale: 64.0 +2024-07-27 13:27:44,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=20696.0, ans=10.0 +2024-07-27 13:27:48,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=20709.333333333332, ans=0.2 +2024-07-27 13:27:57,663 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.10 vs. limit=15.0 +2024-07-27 13:28:09,887 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.68 vs. limit=15.0 +2024-07-27 13:28:15,710 INFO [train.py:1114] (1/4) Epoch 2, batch 5350, loss[loss=0.27, simple_loss=0.3304, pruned_loss=0.1047, over 4496.00 frames. ], tot_loss[loss=0.321, simple_loss=0.3786, pruned_loss=0.1318, over 936138.00 frames. ], batch size: 10, lr: 2.91e-02, grad_scale: 64.0 +2024-07-27 13:28:20,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=20762.666666666668, ans=0.125 +2024-07-27 13:28:23,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=20776.0, ans=0.0 +2024-07-27 13:28:34,854 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.93 vs. limit=12.0 +2024-07-27 13:28:41,452 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.442e+01 6.375e+01 6.982e+01 7.841e+01 1.512e+02, threshold=1.396e+02, percent-clipped=1.0 +2024-07-27 13:28:54,558 INFO [train.py:1114] (1/4) Epoch 2, batch 5400, loss[loss=0.3384, simple_loss=0.3642, pruned_loss=0.1563, over 4269.00 frames. ], tot_loss[loss=0.324, simple_loss=0.3805, pruned_loss=0.1337, over 930889.19 frames. ], batch size: 25, lr: 2.90e-02, grad_scale: 64.0 +2024-07-27 13:28:57,772 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.41 vs. limit=22.5 +2024-07-27 13:28:58,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20829.333333333332, ans=0.1 +2024-07-27 13:28:59,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=20829.333333333332, ans=0.015 +2024-07-27 13:29:02,879 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.69 vs. limit=15.0 +2024-07-27 13:29:08,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=20842.666666666668, ans=0.125 +2024-07-27 13:29:18,581 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.95 vs. limit=15.0 +2024-07-27 13:29:27,857 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:29:30,828 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.38 vs. limit=6.0 +2024-07-27 13:29:31,762 INFO [train.py:1114] (1/4) Epoch 2, batch 5450, loss[loss=0.2908, simple_loss=0.3312, pruned_loss=0.1252, over 4712.00 frames. ], tot_loss[loss=0.3213, simple_loss=0.3783, pruned_loss=0.1321, over 933524.80 frames. ], batch size: 11, lr: 2.90e-02, grad_scale: 64.0 +2024-07-27 13:29:43,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=20909.333333333332, ans=0.025 +2024-07-27 13:29:44,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20909.333333333332, ans=0.1 +2024-07-27 13:29:46,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=20922.666666666668, ans=0.125 +2024-07-27 13:29:48,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=20922.666666666668, ans=0.125 +2024-07-27 13:29:48,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=20922.666666666668, ans=0.006321159420289855 +2024-07-27 13:29:52,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=20936.0, ans=0.006318260869565218 +2024-07-27 13:29:55,934 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.402e+01 6.377e+01 6.925e+01 7.766e+01 1.521e+02, threshold=1.385e+02, percent-clipped=1.0 +2024-07-27 13:30:05,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=20949.333333333332, ans=0.5 +2024-07-27 13:30:06,859 INFO [train.py:1114] (1/4) Epoch 2, batch 5500, loss[loss=0.3439, simple_loss=0.4038, pruned_loss=0.142, over 4265.00 frames. ], tot_loss[loss=0.3208, simple_loss=0.3774, pruned_loss=0.1321, over 930877.51 frames. ], batch size: 25, lr: 2.90e-02, grad_scale: 64.0 +2024-07-27 13:30:07,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=20962.666666666668, ans=0.125 +2024-07-27 13:30:15,250 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-27 13:30:15,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=20976.0, ans=0.125 +2024-07-27 13:30:17,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=20976.0, ans=0.125 +2024-07-27 13:30:19,317 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.97 vs. limit=6.0 +2024-07-27 13:30:41,367 INFO [train.py:1114] (1/4) Epoch 2, batch 5550, loss[loss=0.2732, simple_loss=0.3434, pruned_loss=0.1015, over 4700.00 frames. ], tot_loss[loss=0.3205, simple_loss=0.3771, pruned_loss=0.1319, over 933509.13 frames. ], batch size: 12, lr: 2.89e-02, grad_scale: 64.0 +2024-07-27 13:30:49,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=21042.666666666668, ans=0.2 +2024-07-27 13:30:51,382 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.62 vs. limit=15.0 +2024-07-27 13:30:57,383 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:30:58,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=21056.0, ans=0.2 +2024-07-27 13:31:02,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=21069.333333333332, ans=0.2 +2024-07-27 13:31:02,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=21069.333333333332, ans=10.0 +2024-07-27 13:31:02,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=21069.333333333332, ans=0.125 +2024-07-27 13:31:03,065 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.21 vs. limit=15.0 +2024-07-27 13:31:04,809 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.723e+01 6.613e+01 7.499e+01 8.477e+01 2.130e+02, threshold=1.500e+02, percent-clipped=3.0 +2024-07-27 13:31:06,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=21069.333333333332, ans=0.0 +2024-07-27 13:31:06,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=21069.333333333332, ans=0.125 +2024-07-27 13:31:07,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=21069.333333333332, ans=0.2 +2024-07-27 13:31:15,948 INFO [train.py:1114] (1/4) Epoch 2, batch 5600, loss[loss=0.3368, simple_loss=0.4049, pruned_loss=0.1344, over 4735.00 frames. ], tot_loss[loss=0.3217, simple_loss=0.3785, pruned_loss=0.1324, over 934660.85 frames. ], batch size: 14, lr: 2.89e-02, grad_scale: 64.0 +2024-07-27 13:31:17,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21096.0, ans=0.1 +2024-07-27 13:31:17,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=21096.0, ans=0.125 +2024-07-27 13:31:20,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21096.0, ans=0.1 +2024-07-27 13:31:27,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=21109.333333333332, ans=0.125 +2024-07-27 13:31:32,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21122.666666666668, ans=0.1 +2024-07-27 13:31:38,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=21136.0, ans=0.125 +2024-07-27 13:31:39,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21136.0, ans=0.1 +2024-07-27 13:31:50,637 INFO [train.py:1114] (1/4) Epoch 2, batch 5650, loss[loss=0.3962, simple_loss=0.4346, pruned_loss=0.1789, over 4467.00 frames. ], tot_loss[loss=0.3216, simple_loss=0.3786, pruned_loss=0.1323, over 937123.14 frames. ], batch size: 21, lr: 2.88e-02, grad_scale: 64.0 +2024-07-27 13:31:54,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=21162.666666666668, ans=0.125 +2024-07-27 13:31:58,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=21176.0, ans=0.006266086956521739 +2024-07-27 13:32:03,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21176.0, ans=0.1 +2024-07-27 13:32:07,210 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.68 vs. limit=15.0 +2024-07-27 13:32:07,724 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=21189.333333333332, ans=0.006263188405797102 +2024-07-27 13:32:14,250 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.661e+01 6.325e+01 6.816e+01 7.626e+01 1.168e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-27 13:32:15,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=21202.666666666668, ans=0.025 +2024-07-27 13:32:21,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=21216.0, ans=0.0 +2024-07-27 13:32:25,395 INFO [train.py:1114] (1/4) Epoch 2, batch 5700, loss[loss=0.3203, simple_loss=0.3771, pruned_loss=0.1318, over 4695.00 frames. ], tot_loss[loss=0.3222, simple_loss=0.3784, pruned_loss=0.1331, over 938440.21 frames. ], batch size: 13, lr: 2.88e-02, grad_scale: 64.0 +2024-07-27 13:32:41,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=21242.666666666668, ans=0.5 +2024-07-27 13:33:01,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=21282.666666666668, ans=0.2 +2024-07-27 13:33:04,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=21282.666666666668, ans=0.2 +2024-07-27 13:33:05,786 INFO [train.py:1114] (1/4) Epoch 2, batch 5750, loss[loss=0.335, simple_loss=0.3819, pruned_loss=0.144, over 4697.00 frames. ], tot_loss[loss=0.3236, simple_loss=0.3796, pruned_loss=0.1338, over 938667.40 frames. ], batch size: 19, lr: 2.88e-02, grad_scale: 64.0 +2024-07-27 13:33:31,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21336.0, ans=0.1 +2024-07-27 13:33:33,546 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.01 vs. limit=22.5 +2024-07-27 13:33:34,447 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.471e+01 6.485e+01 7.135e+01 7.978e+01 1.224e+02, threshold=1.427e+02, percent-clipped=0.0 +2024-07-27 13:33:38,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=21349.333333333332, ans=0.1 +2024-07-27 13:33:45,476 INFO [train.py:1114] (1/4) Epoch 2, batch 5800, loss[loss=0.3439, simple_loss=0.3887, pruned_loss=0.1495, over 4705.00 frames. ], tot_loss[loss=0.3239, simple_loss=0.3795, pruned_loss=0.1341, over 937954.47 frames. ], batch size: 19, lr: 2.87e-02, grad_scale: 64.0 +2024-07-27 13:33:47,897 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.13 vs. limit=10.0 +2024-07-27 13:33:56,412 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=2.259e-02 +2024-07-27 13:34:03,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=21389.333333333332, ans=0.125 +2024-07-27 13:34:12,929 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.54 vs. limit=15.0 +2024-07-27 13:34:24,439 INFO [train.py:1114] (1/4) Epoch 2, batch 5850, loss[loss=0.355, simple_loss=0.4043, pruned_loss=0.1528, over 4611.00 frames. ], tot_loss[loss=0.3225, simple_loss=0.3782, pruned_loss=0.1334, over 938636.52 frames. ], batch size: 21, lr: 2.87e-02, grad_scale: 64.0 +2024-07-27 13:34:33,780 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.10 vs. limit=15.0 +2024-07-27 13:34:38,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=21456.0, ans=0.2 +2024-07-27 13:34:41,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=21456.0, ans=0.0 +2024-07-27 13:34:49,982 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.936e+01 6.249e+01 6.870e+01 7.832e+01 1.003e+02, threshold=1.374e+02, percent-clipped=0.0 +2024-07-27 13:34:54,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=21482.666666666668, ans=0.2 +2024-07-27 13:34:59,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=21482.666666666668, ans=0.125 +2024-07-27 13:35:01,012 INFO [train.py:1114] (1/4) Epoch 2, batch 5900, loss[loss=0.3081, simple_loss=0.3828, pruned_loss=0.1167, over 4682.00 frames. ], tot_loss[loss=0.3223, simple_loss=0.3784, pruned_loss=0.1332, over 938443.69 frames. ], batch size: 15, lr: 2.87e-02, grad_scale: 64.0 +2024-07-27 13:35:06,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=21496.0, ans=0.125 +2024-07-27 13:35:07,667 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.71 vs. limit=22.5 +2024-07-27 13:35:21,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=21536.0, ans=0.2 +2024-07-27 13:35:33,854 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.01 vs. limit=10.0 +2024-07-27 13:35:35,449 INFO [train.py:1114] (1/4) Epoch 2, batch 5950, loss[loss=0.4067, simple_loss=0.4451, pruned_loss=0.1842, over 4672.00 frames. ], tot_loss[loss=0.3224, simple_loss=0.3783, pruned_loss=0.1333, over 940542.88 frames. ], batch size: 15, lr: 2.86e-02, grad_scale: 64.0 +2024-07-27 13:35:43,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=21576.0, ans=0.0 +2024-07-27 13:35:45,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=21576.0, ans=0.125 +2024-07-27 13:35:52,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21589.333333333332, ans=0.1 +2024-07-27 13:35:53,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=21589.333333333332, ans=0.0 +2024-07-27 13:35:54,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=21589.333333333332, ans=0.125 +2024-07-27 13:35:59,408 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.396e+01 6.345e+01 7.134e+01 8.050e+01 1.843e+02, threshold=1.427e+02, percent-clipped=1.0 +2024-07-27 13:36:20,605 INFO [train.py:1114] (1/4) Epoch 2, batch 6000, loss[loss=0.3481, simple_loss=0.404, pruned_loss=0.146, over 4268.00 frames. ], tot_loss[loss=0.3223, simple_loss=0.3782, pruned_loss=0.1331, over 937788.04 frames. ], batch size: 25, lr: 2.86e-02, grad_scale: 64.0 +2024-07-27 13:36:20,606 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 13:36:32,979 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.6401, 3.4146, 3.6092, 3.9726], device='cuda:1') +2024-07-27 13:36:36,191 INFO [train.py:1146] (1/4) Epoch 2, validation: loss=0.2564, simple_loss=0.3503, pruned_loss=0.08121, over 944034.00 frames. +2024-07-27 13:36:36,192 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 13:36:43,637 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.31 vs. limit=15.0 +2024-07-27 13:36:57,674 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.58 vs. limit=15.0 +2024-07-27 13:37:05,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21682.666666666668, ans=0.1 +2024-07-27 13:37:11,082 INFO [train.py:1114] (1/4) Epoch 2, batch 6050, loss[loss=0.2522, simple_loss=0.3098, pruned_loss=0.09731, over 4778.00 frames. ], tot_loss[loss=0.3192, simple_loss=0.3759, pruned_loss=0.1313, over 938966.67 frames. ], batch size: 12, lr: 2.85e-02, grad_scale: 64.0 +2024-07-27 13:37:18,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=21709.333333333332, ans=0.2 +2024-07-27 13:37:18,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=21709.333333333332, ans=0.125 +2024-07-27 13:37:22,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=21709.333333333332, ans=0.125 +2024-07-27 13:37:34,503 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.397e+01 6.133e+01 6.810e+01 7.852e+01 1.499e+02, threshold=1.362e+02, percent-clipped=2.0 +2024-07-27 13:37:35,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=21736.0, ans=0.125 +2024-07-27 13:37:45,482 INFO [train.py:1114] (1/4) Epoch 2, batch 6100, loss[loss=0.403, simple_loss=0.4441, pruned_loss=0.181, over 4685.00 frames. ], tot_loss[loss=0.3185, simple_loss=0.3751, pruned_loss=0.1309, over 938243.45 frames. ], batch size: 15, lr: 2.85e-02, grad_scale: 64.0 +2024-07-27 13:37:49,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=21762.666666666668, ans=0.05 +2024-07-27 13:37:50,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=21762.666666666668, ans=10.0 +2024-07-27 13:37:58,392 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=23.85 vs. limit=15.0 +2024-07-27 13:38:00,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21789.333333333332, ans=0.1 +2024-07-27 13:38:18,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=21816.0, ans=0.0 +2024-07-27 13:38:20,140 INFO [train.py:1114] (1/4) Epoch 2, batch 6150, loss[loss=0.4026, simple_loss=0.4273, pruned_loss=0.189, over 3308.00 frames. ], tot_loss[loss=0.3182, simple_loss=0.375, pruned_loss=0.1307, over 936779.49 frames. ], batch size: 35, lr: 2.85e-02, grad_scale: 64.0 +2024-07-27 13:38:31,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=21842.666666666668, ans=0.125 +2024-07-27 13:38:40,453 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.63 vs. limit=22.5 +2024-07-27 13:38:44,316 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.693e+01 6.434e+01 7.098e+01 7.748e+01 1.262e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 13:38:52,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=21882.666666666668, ans=0.1 +2024-07-27 13:38:56,897 INFO [train.py:1114] (1/4) Epoch 2, batch 6200, loss[loss=0.3102, simple_loss=0.3737, pruned_loss=0.1234, over 4733.00 frames. ], tot_loss[loss=0.3188, simple_loss=0.376, pruned_loss=0.1308, over 936224.44 frames. ], batch size: 14, lr: 2.84e-02, grad_scale: 64.0 +2024-07-27 13:39:06,782 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.90 vs. limit=6.0 +2024-07-27 13:39:08,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21909.333333333332, ans=0.1 +2024-07-27 13:39:12,340 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.65 vs. limit=10.0 +2024-07-27 13:39:13,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=21922.666666666668, ans=0.125 +2024-07-27 13:39:21,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=21936.0, ans=0.125 +2024-07-27 13:39:31,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=21949.333333333332, ans=0.006097971014492755 +2024-07-27 13:39:38,440 INFO [train.py:1114] (1/4) Epoch 2, batch 6250, loss[loss=0.3251, simple_loss=0.3942, pruned_loss=0.128, over 4814.00 frames. ], tot_loss[loss=0.3206, simple_loss=0.377, pruned_loss=0.132, over 933064.62 frames. ], batch size: 14, lr: 2.84e-02, grad_scale: 64.0 +2024-07-27 13:39:46,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=21976.0, ans=0.125 +2024-07-27 13:39:58,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=21989.333333333332, ans=0.125 +2024-07-27 13:39:59,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=21989.333333333332, ans=0.2 +2024-07-27 13:40:04,078 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.093e+01 6.216e+01 6.990e+01 7.888e+01 1.132e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 13:40:05,950 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.90 vs. limit=12.0 +2024-07-27 13:40:07,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22016.0, ans=0.1 +2024-07-27 13:40:10,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=22016.0, ans=0.125 +2024-07-27 13:40:15,172 INFO [train.py:1114] (1/4) Epoch 2, batch 6300, loss[loss=0.2935, simple_loss=0.3413, pruned_loss=0.1228, over 4504.00 frames. ], tot_loss[loss=0.3196, simple_loss=0.3764, pruned_loss=0.1314, over 929977.59 frames. ], batch size: 10, lr: 2.84e-02, grad_scale: 64.0 +2024-07-27 13:40:30,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22056.0, ans=0.1 +2024-07-27 13:40:37,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=22069.333333333332, ans=22.5 +2024-07-27 13:40:44,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22082.666666666668, ans=0.1 +2024-07-27 13:41:02,155 INFO [train.py:1114] (1/4) Epoch 2, batch 6350, loss[loss=0.3415, simple_loss=0.3976, pruned_loss=0.1427, over 4515.00 frames. ], tot_loss[loss=0.3194, simple_loss=0.3764, pruned_loss=0.1312, over 933904.33 frames. ], batch size: 21, lr: 2.83e-02, grad_scale: 64.0 +2024-07-27 13:41:16,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=22122.666666666668, ans=0.006060289855072463 +2024-07-27 13:41:25,746 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.585e+01 6.300e+01 6.631e+01 7.435e+01 1.313e+02, threshold=1.326e+02, percent-clipped=0.0 +2024-07-27 13:41:36,416 INFO [train.py:1114] (1/4) Epoch 2, batch 6400, loss[loss=0.3123, simple_loss=0.3693, pruned_loss=0.1276, over 4632.00 frames. ], tot_loss[loss=0.3172, simple_loss=0.375, pruned_loss=0.1297, over 934875.26 frames. ], batch size: 13, lr: 2.83e-02, grad_scale: 64.0 +2024-07-27 13:41:40,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=22162.666666666668, ans=0.125 +2024-07-27 13:41:41,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=22162.666666666668, ans=0.025 +2024-07-27 13:41:51,887 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.26 vs. limit=15.0 +2024-07-27 13:42:06,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.13 vs. limit=22.5 +2024-07-27 13:42:10,973 INFO [train.py:1114] (1/4) Epoch 2, batch 6450, loss[loss=0.3385, simple_loss=0.3981, pruned_loss=0.1395, over 4583.00 frames. ], tot_loss[loss=0.3179, simple_loss=0.3762, pruned_loss=0.1298, over 938654.02 frames. ], batch size: 21, lr: 2.83e-02, grad_scale: 64.0 +2024-07-27 13:42:15,498 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.41 vs. limit=15.0 +2024-07-27 13:42:18,479 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:42:20,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=22242.666666666668, ans=0.2 +2024-07-27 13:42:22,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=22242.666666666668, ans=0.006034202898550725 +2024-07-27 13:42:34,024 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.359e+01 6.221e+01 6.785e+01 7.657e+01 1.359e+02, threshold=1.357e+02, percent-clipped=1.0 +2024-07-27 13:42:41,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=22282.666666666668, ans=0.125 +2024-07-27 13:42:45,069 INFO [train.py:1114] (1/4) Epoch 2, batch 6500, loss[loss=0.445, simple_loss=0.4533, pruned_loss=0.2183, over 3320.00 frames. ], tot_loss[loss=0.3176, simple_loss=0.3757, pruned_loss=0.1297, over 939658.31 frames. ], batch size: 35, lr: 2.82e-02, grad_scale: 64.0 +2024-07-27 13:42:45,513 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.46 vs. limit=15.0 +2024-07-27 13:42:46,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.03 vs. limit=6.0 +2024-07-27 13:42:46,842 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.27 vs. limit=15.0 +2024-07-27 13:42:47,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=22296.0, ans=0.125 +2024-07-27 13:42:59,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22322.666666666668, ans=0.1 +2024-07-27 13:43:03,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=22322.666666666668, ans=0.125 +2024-07-27 13:43:03,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=22322.666666666668, ans=0.125 +2024-07-27 13:43:06,463 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.80 vs. limit=15.0 +2024-07-27 13:43:19,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=22362.666666666668, ans=0.025 +2024-07-27 13:43:19,933 INFO [train.py:1114] (1/4) Epoch 2, batch 6550, loss[loss=0.2651, simple_loss=0.3396, pruned_loss=0.09532, over 4799.00 frames. ], tot_loss[loss=0.3159, simple_loss=0.3743, pruned_loss=0.1287, over 942729.52 frames. ], batch size: 11, lr: 2.82e-02, grad_scale: 64.0 +2024-07-27 13:43:34,345 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:43:43,158 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.181e+01 6.196e+01 6.780e+01 7.401e+01 1.122e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 13:43:43,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=22402.666666666668, ans=0.0 +2024-07-27 13:43:43,605 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=25.40 vs. limit=22.5 +2024-07-27 13:43:46,229 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.07 vs. limit=22.5 +2024-07-27 13:43:56,089 INFO [train.py:1114] (1/4) Epoch 2, batch 6600, loss[loss=0.2956, simple_loss=0.3622, pruned_loss=0.1145, over 4931.00 frames. ], tot_loss[loss=0.3154, simple_loss=0.3737, pruned_loss=0.1286, over 944761.58 frames. ], batch size: 14, lr: 2.82e-02, grad_scale: 128.0 +2024-07-27 13:44:01,337 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.15 vs. limit=22.5 +2024-07-27 13:44:03,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=22442.666666666668, ans=0.07 +2024-07-27 13:44:06,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=22442.666666666668, ans=0.1 +2024-07-27 13:44:08,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=22442.666666666668, ans=0.125 +2024-07-27 13:44:22,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=22469.333333333332, ans=0.0059849275362318845 +2024-07-27 13:44:24,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=22482.666666666668, ans=0.0 +2024-07-27 13:44:30,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=22496.0, ans=0.09899494936611666 +2024-07-27 13:44:31,234 INFO [train.py:1114] (1/4) Epoch 2, batch 6650, loss[loss=0.3395, simple_loss=0.3835, pruned_loss=0.1478, over 4604.00 frames. ], tot_loss[loss=0.3158, simple_loss=0.374, pruned_loss=0.1288, over 943756.48 frames. ], batch size: 17, lr: 2.81e-02, grad_scale: 128.0 +2024-07-27 13:44:33,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=22496.0, ans=0.025 +2024-07-27 13:44:33,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=22496.0, ans=0.125 +2024-07-27 13:44:48,534 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=15.0 +2024-07-27 13:44:50,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=22522.666666666668, ans=0.09899494936611666 +2024-07-27 13:44:57,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=22536.0, ans=0.0 +2024-07-27 13:44:58,372 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.864e+01 6.602e+01 7.128e+01 7.971e+01 1.702e+02, threshold=1.426e+02, percent-clipped=1.0 +2024-07-27 13:45:05,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=22536.0, ans=0.025 +2024-07-27 13:45:17,344 INFO [train.py:1114] (1/4) Epoch 2, batch 6700, loss[loss=0.3836, simple_loss=0.432, pruned_loss=0.1675, over 4697.00 frames. ], tot_loss[loss=0.317, simple_loss=0.3752, pruned_loss=0.1294, over 942870.12 frames. ], batch size: 19, lr: 2.81e-02, grad_scale: 128.0 +2024-07-27 13:45:17,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=22562.666666666668, ans=0.2 +2024-07-27 13:45:19,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=22562.666666666668, ans=0.5 +2024-07-27 13:45:22,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=22562.666666666668, ans=0.0 +2024-07-27 13:45:24,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=22576.0, ans=0.125 +2024-07-27 13:45:29,127 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.34 vs. limit=6.0 +2024-07-27 13:45:42,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=22602.666666666668, ans=0.125 +2024-07-27 13:45:44,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=22602.666666666668, ans=0.2 +2024-07-27 13:45:55,660 INFO [train.py:1114] (1/4) Epoch 2, batch 6750, loss[loss=0.362, simple_loss=0.4034, pruned_loss=0.1603, over 4264.00 frames. ], tot_loss[loss=0.3185, simple_loss=0.376, pruned_loss=0.1305, over 940784.04 frames. ], batch size: 25, lr: 2.81e-02, grad_scale: 128.0 +2024-07-27 13:45:56,092 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.36 vs. limit=15.0 +2024-07-27 13:46:18,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=22669.333333333332, ans=0.125 +2024-07-27 13:46:18,881 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.60 vs. limit=22.5 +2024-07-27 13:46:19,138 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.110e+01 6.419e+01 6.907e+01 8.025e+01 1.154e+02, threshold=1.381e+02, percent-clipped=0.0 +2024-07-27 13:46:19,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=22669.333333333332, ans=0.125 +2024-07-27 13:46:31,950 INFO [train.py:1114] (1/4) Epoch 2, batch 6800, loss[loss=0.2944, simple_loss=0.3584, pruned_loss=0.1151, over 4639.00 frames. ], tot_loss[loss=0.3191, simple_loss=0.377, pruned_loss=0.1307, over 938978.76 frames. ], batch size: 13, lr: 2.80e-02, grad_scale: 128.0 +2024-07-27 13:46:35,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=22696.0, ans=0.0 +2024-07-27 13:46:36,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=22696.0, ans=0.125 +2024-07-27 13:46:38,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=22709.333333333332, ans=0.0 +2024-07-27 13:46:43,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=22709.333333333332, ans=0.0 +2024-07-27 13:46:46,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=22722.666666666668, ans=0.125 +2024-07-27 13:46:50,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=22722.666666666668, ans=0.07 +2024-07-27 13:46:50,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=22722.666666666668, ans=0.125 +2024-07-27 13:46:52,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=22736.0, ans=0.125 +2024-07-27 13:46:59,096 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.31 vs. limit=15.0 +2024-07-27 13:47:06,014 INFO [train.py:1114] (1/4) Epoch 2, batch 6850, loss[loss=0.3646, simple_loss=0.4243, pruned_loss=0.1525, over 4686.00 frames. ], tot_loss[loss=0.3196, simple_loss=0.3773, pruned_loss=0.131, over 940764.06 frames. ], batch size: 13, lr: 2.80e-02, grad_scale: 128.0 +2024-07-27 13:47:07,073 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.03 vs. limit=22.5 +2024-07-27 13:47:09,090 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.53 vs. limit=15.0 +2024-07-27 13:47:09,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=22762.666666666668, ans=0.2 +2024-07-27 13:47:18,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22776.0, ans=0.1 +2024-07-27 13:47:23,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.97 vs. limit=15.0 +2024-07-27 13:47:24,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=22789.333333333332, ans=0.2 +2024-07-27 13:47:31,296 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.48 vs. limit=15.0 +2024-07-27 13:47:31,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=22802.666666666668, ans=0.125 +2024-07-27 13:47:33,801 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.902e+01 6.353e+01 6.914e+01 7.942e+01 1.137e+02, threshold=1.383e+02, percent-clipped=0.0 +2024-07-27 13:47:43,439 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.22 vs. limit=6.0 +2024-07-27 13:47:44,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=22816.0, ans=0.1 +2024-07-27 13:47:46,052 INFO [train.py:1114] (1/4) Epoch 2, batch 6900, loss[loss=0.2994, simple_loss=0.371, pruned_loss=0.1139, over 4963.00 frames. ], tot_loss[loss=0.318, simple_loss=0.376, pruned_loss=0.13, over 942880.56 frames. ], batch size: 13, lr: 2.79e-02, grad_scale: 128.0 +2024-07-27 13:47:58,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=22842.666666666668, ans=0.0 +2024-07-27 13:48:00,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=22856.0, ans=0.125 +2024-07-27 13:48:06,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=22869.333333333332, ans=0.125 +2024-07-27 13:48:17,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=22882.666666666668, ans=0.125 +2024-07-27 13:48:17,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22882.666666666668, ans=0.1 +2024-07-27 13:48:17,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=22882.666666666668, ans=0.125 +2024-07-27 13:48:18,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22882.666666666668, ans=0.1 +2024-07-27 13:48:21,663 INFO [train.py:1114] (1/4) Epoch 2, batch 6950, loss[loss=0.3285, simple_loss=0.3843, pruned_loss=0.1363, over 4547.00 frames. ], tot_loss[loss=0.3167, simple_loss=0.3748, pruned_loss=0.1293, over 940420.79 frames. ], batch size: 10, lr: 2.79e-02, grad_scale: 128.0 +2024-07-27 13:48:42,973 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.88 vs. limit=15.0 +2024-07-27 13:48:44,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=22922.666666666668, ans=0.5 +2024-07-27 13:48:52,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=22936.0, ans=0.09899494936611666 +2024-07-27 13:48:54,545 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.06 vs. limit=6.0 +2024-07-27 13:48:54,771 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.437e+01 6.446e+01 7.112e+01 7.644e+01 1.059e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 13:48:57,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=22936.0, ans=0.125 +2024-07-27 13:49:05,665 INFO [train.py:1114] (1/4) Epoch 2, batch 7000, loss[loss=0.3526, simple_loss=0.3961, pruned_loss=0.1545, over 4603.00 frames. ], tot_loss[loss=0.315, simple_loss=0.3731, pruned_loss=0.1285, over 938890.47 frames. ], batch size: 17, lr: 2.79e-02, grad_scale: 128.0 +2024-07-27 13:49:21,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=22976.0, ans=0.125 +2024-07-27 13:49:22,663 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=1.345e-02 +2024-07-27 13:49:24,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=22989.333333333332, ans=0.025 +2024-07-27 13:49:37,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=23016.0, ans=0.125 +2024-07-27 13:49:41,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=23016.0, ans=0.00586608695652174 +2024-07-27 13:49:41,818 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.86 vs. limit=6.0 +2024-07-27 13:49:42,711 INFO [train.py:1114] (1/4) Epoch 2, batch 7050, loss[loss=0.3271, simple_loss=0.3777, pruned_loss=0.1383, over 4717.00 frames. ], tot_loss[loss=0.3142, simple_loss=0.3727, pruned_loss=0.1279, over 942011.81 frames. ], batch size: 19, lr: 2.78e-02, grad_scale: 128.0 +2024-07-27 13:49:50,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=23029.333333333332, ans=0.125 +2024-07-27 13:49:58,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23029.333333333332, ans=0.1 +2024-07-27 13:50:04,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=23042.666666666668, ans=0.02 +2024-07-27 13:50:17,256 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.353e+01 6.903e+01 7.811e+01 8.989e+01 1.248e+02, threshold=1.562e+02, percent-clipped=0.0 +2024-07-27 13:50:26,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=23096.0, ans=0.125 +2024-07-27 13:50:27,351 INFO [train.py:1114] (1/4) Epoch 2, batch 7100, loss[loss=0.3418, simple_loss=0.3865, pruned_loss=0.1485, over 4798.00 frames. ], tot_loss[loss=0.3168, simple_loss=0.3742, pruned_loss=0.1297, over 936244.50 frames. ], batch size: 15, lr: 2.78e-02, grad_scale: 64.0 +2024-07-27 13:50:28,946 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.93 vs. limit=22.5 +2024-07-27 13:50:30,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=23096.0, ans=0.1 +2024-07-27 13:50:32,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=23096.0, ans=0.09899494936611666 +2024-07-27 13:50:38,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=23109.333333333332, ans=0.125 +2024-07-27 13:50:47,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=23122.666666666668, ans=0.005842898550724638 +2024-07-27 13:50:53,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=23122.666666666668, ans=0.2 +2024-07-27 13:50:53,721 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:51:00,477 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=23149.333333333332, ans=0.07 +2024-07-27 13:51:07,814 INFO [train.py:1114] (1/4) Epoch 2, batch 7150, loss[loss=0.3547, simple_loss=0.4015, pruned_loss=0.1539, over 4477.00 frames. ], tot_loss[loss=0.3143, simple_loss=0.372, pruned_loss=0.1283, over 937440.90 frames. ], batch size: 21, lr: 2.78e-02, grad_scale: 64.0 +2024-07-27 13:51:18,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=23162.666666666668, ans=0.125 +2024-07-27 13:51:24,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=23176.0, ans=0.025 +2024-07-27 13:51:24,901 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.00 vs. limit=12.0 +2024-07-27 13:51:27,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=23189.333333333332, ans=0.125 +2024-07-27 13:51:31,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=23189.333333333332, ans=0.0 +2024-07-27 13:51:39,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-07-27 13:51:41,522 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.182e+01 6.431e+01 7.159e+01 7.939e+01 1.328e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 13:51:44,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=23202.666666666668, ans=0.0 +2024-07-27 13:51:50,445 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.85 vs. limit=12.0 +2024-07-27 13:51:53,961 INFO [train.py:1114] (1/4) Epoch 2, batch 7200, loss[loss=0.3471, simple_loss=0.4057, pruned_loss=0.1442, over 4803.00 frames. ], tot_loss[loss=0.3136, simple_loss=0.3723, pruned_loss=0.1275, over 938062.17 frames. ], batch size: 15, lr: 2.77e-02, grad_scale: 64.0 +2024-07-27 13:52:02,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=23242.666666666668, ans=0.125 +2024-07-27 13:52:03,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=23242.666666666668, ans=0.09899494936611666 +2024-07-27 13:52:03,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=23242.666666666668, ans=0.125 +2024-07-27 13:52:05,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=23242.666666666668, ans=0.125 +2024-07-27 13:52:28,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23296.0, ans=0.1 +2024-07-27 13:52:28,623 INFO [train.py:1114] (1/4) Epoch 2, batch 7250, loss[loss=0.2951, simple_loss=0.3659, pruned_loss=0.1122, over 4848.00 frames. ], tot_loss[loss=0.312, simple_loss=0.3709, pruned_loss=0.1265, over 939749.38 frames. ], batch size: 12, lr: 2.77e-02, grad_scale: 64.0 +2024-07-27 13:52:28,947 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.99 vs. limit=15.0 +2024-07-27 13:52:33,854 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.95 vs. limit=10.0 +2024-07-27 13:52:43,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=23322.666666666668, ans=0.125 +2024-07-27 13:52:50,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=23336.0, ans=0.125 +2024-07-27 13:52:54,555 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.076e+01 6.237e+01 6.919e+01 7.525e+01 1.117e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-27 13:53:07,652 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.72 vs. limit=6.0 +2024-07-27 13:53:14,180 INFO [train.py:1114] (1/4) Epoch 2, batch 7300, loss[loss=0.258, simple_loss=0.3371, pruned_loss=0.08942, over 4852.00 frames. ], tot_loss[loss=0.3109, simple_loss=0.3706, pruned_loss=0.1257, over 939664.45 frames. ], batch size: 12, lr: 2.77e-02, grad_scale: 64.0 +2024-07-27 13:53:14,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=23362.666666666668, ans=0.025 +2024-07-27 13:53:20,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=23376.0, ans=0.005787826086956522 +2024-07-27 13:53:20,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=23376.0, ans=0.125 +2024-07-27 13:53:29,335 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.09 vs. limit=15.0 +2024-07-27 13:53:39,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=23402.666666666668, ans=0.1 +2024-07-27 13:53:46,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=23416.0, ans=0.125 +2024-07-27 13:53:48,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23429.333333333332, ans=0.1 +2024-07-27 13:53:48,999 INFO [train.py:1114] (1/4) Epoch 2, batch 7350, loss[loss=0.2464, simple_loss=0.3175, pruned_loss=0.08761, over 4644.00 frames. ], tot_loss[loss=0.3098, simple_loss=0.3698, pruned_loss=0.1249, over 939201.43 frames. ], batch size: 12, lr: 2.76e-02, grad_scale: 64.0 +2024-07-27 13:54:02,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=23442.666666666668, ans=0.2 +2024-07-27 13:54:07,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=23456.0, ans=0.125 +2024-07-27 13:54:14,582 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.043e+01 6.547e+01 7.387e+01 8.600e+01 1.543e+02, threshold=1.477e+02, percent-clipped=1.0 +2024-07-27 13:54:45,261 INFO [train.py:1114] (1/4) Epoch 2, batch 7400, loss[loss=0.3157, simple_loss=0.3731, pruned_loss=0.1292, over 4695.00 frames. ], tot_loss[loss=0.3114, simple_loss=0.3709, pruned_loss=0.1259, over 940246.19 frames. ], batch size: 13, lr: 2.76e-02, grad_scale: 64.0 +2024-07-27 13:55:05,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=23536.0, ans=0.2 +2024-07-27 13:55:10,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=23536.0, ans=0.125 +2024-07-27 13:55:19,702 INFO [train.py:1114] (1/4) Epoch 2, batch 7450, loss[loss=0.3287, simple_loss=0.3581, pruned_loss=0.1496, over 4617.00 frames. ], tot_loss[loss=0.3111, simple_loss=0.3697, pruned_loss=0.1262, over 937754.08 frames. ], batch size: 11, lr: 2.76e-02, grad_scale: 64.0 +2024-07-27 13:55:28,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=23576.0, ans=0.0 +2024-07-27 13:55:35,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=23589.333333333332, ans=0.125 +2024-07-27 13:55:45,536 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.04 vs. limit=22.5 +2024-07-27 13:55:46,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23602.666666666668, ans=0.1 +2024-07-27 13:55:47,787 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.335e+01 6.373e+01 7.113e+01 7.806e+01 1.283e+02, threshold=1.423e+02, percent-clipped=0.0 +2024-07-27 13:55:50,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23616.0, ans=0.1 +2024-07-27 13:55:55,279 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.59 vs. limit=15.0 +2024-07-27 13:55:58,076 INFO [train.py:1114] (1/4) Epoch 2, batch 7500, loss[loss=0.37, simple_loss=0.3914, pruned_loss=0.1743, over 3408.00 frames. ], tot_loss[loss=0.3129, simple_loss=0.3712, pruned_loss=0.1273, over 935896.80 frames. ], batch size: 35, lr: 2.75e-02, grad_scale: 64.0 +2024-07-27 13:56:14,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=23642.666666666668, ans=0.0 +2024-07-27 13:56:24,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=23642.666666666668, ans=0.005729855072463768 +2024-07-27 13:56:24,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=23642.666666666668, ans=0.125 +2024-07-27 13:56:25,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=23642.666666666668, ans=0.0 +2024-07-27 13:56:26,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=23656.0, ans=0.0 +2024-07-27 13:56:29,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=23656.0, ans=0.00572695652173913 +2024-07-27 13:56:29,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=23656.0, ans=0.125 +2024-07-27 13:56:33,815 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.67 vs. limit=15.0 +2024-07-27 13:56:44,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=23682.666666666668, ans=0.125 +2024-07-27 13:56:47,288 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.16 vs. limit=15.0 +2024-07-27 13:56:47,501 INFO [train.py:1114] (1/4) Epoch 2, batch 7550, loss[loss=0.3903, simple_loss=0.4289, pruned_loss=0.1758, over 4653.00 frames. ], tot_loss[loss=0.3146, simple_loss=0.3725, pruned_loss=0.1284, over 936052.59 frames. ], batch size: 17, lr: 2.75e-02, grad_scale: 64.0 +2024-07-27 13:56:51,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=23696.0, ans=0.125 +2024-07-27 13:57:03,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=23709.333333333332, ans=0.125 +2024-07-27 13:57:13,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=23736.0, ans=0.125 +2024-07-27 13:57:15,325 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.177e+01 6.494e+01 6.851e+01 7.705e+01 1.471e+02, threshold=1.370e+02, percent-clipped=1.0 +2024-07-27 13:57:16,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.14 vs. limit=10.0 +2024-07-27 13:57:20,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23749.333333333332, ans=0.1 +2024-07-27 13:57:24,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=23762.666666666668, ans=0.0 +2024-07-27 13:57:25,064 INFO [train.py:1114] (1/4) Epoch 2, batch 7600, loss[loss=0.2614, simple_loss=0.3446, pruned_loss=0.08906, over 4806.00 frames. ], tot_loss[loss=0.3135, simple_loss=0.3721, pruned_loss=0.1274, over 938530.34 frames. ], batch size: 14, lr: 2.75e-02, grad_scale: 64.0 +2024-07-27 13:57:32,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=23776.0, ans=0.125 +2024-07-27 13:57:36,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=23776.0, ans=0.125 +2024-07-27 13:57:43,450 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.30 vs. limit=22.5 +2024-07-27 13:57:45,455 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=17.96 vs. limit=15.0 +2024-07-27 13:57:45,588 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.64 vs. limit=15.0 +2024-07-27 13:57:46,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=23802.666666666668, ans=0.07 +2024-07-27 13:57:58,707 INFO [train.py:1114] (1/4) Epoch 2, batch 7650, loss[loss=0.2766, simple_loss=0.3355, pruned_loss=0.1088, over 4938.00 frames. ], tot_loss[loss=0.3126, simple_loss=0.3712, pruned_loss=0.127, over 937590.22 frames. ], batch size: 12, lr: 2.74e-02, grad_scale: 64.0 +2024-07-27 13:59:27,503 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.15 vs. limit=22.5 +2024-07-27 13:59:38,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=23842.666666666668, ans=0.005686376811594203 +2024-07-27 13:59:39,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=23856.0, ans=0.2 +2024-07-27 13:59:50,981 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.430e+01 6.487e+01 6.980e+01 8.234e+01 1.140e+02, threshold=1.396e+02, percent-clipped=0.0 +2024-07-27 13:59:56,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=23882.666666666668, ans=0.5 +2024-07-27 13:59:59,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=23882.666666666668, ans=0.05 +2024-07-27 14:00:01,134 INFO [train.py:1114] (1/4) Epoch 2, batch 7700, loss[loss=0.374, simple_loss=0.4346, pruned_loss=0.1567, over 4686.00 frames. ], tot_loss[loss=0.3135, simple_loss=0.3719, pruned_loss=0.1276, over 934986.05 frames. ], batch size: 13, lr: 2.74e-02, grad_scale: 64.0 +2024-07-27 14:00:16,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=23922.666666666668, ans=0.0 +2024-07-27 14:00:24,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=23936.0, ans=0.125 +2024-07-27 14:00:25,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.14 vs. limit=10.0 +2024-07-27 14:00:28,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=23949.333333333332, ans=0.0 +2024-07-27 14:00:30,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=23949.333333333332, ans=0.005663188405797101 +2024-07-27 14:00:31,092 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.00 vs. limit=15.0 +2024-07-27 14:00:34,012 INFO [train.py:1114] (1/4) Epoch 2, batch 7750, loss[loss=0.3312, simple_loss=0.3821, pruned_loss=0.1401, over 4923.00 frames. ], tot_loss[loss=0.3168, simple_loss=0.3748, pruned_loss=0.1294, over 935993.38 frames. ], batch size: 14, lr: 2.74e-02, grad_scale: 64.0 +2024-07-27 14:00:46,878 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.19 vs. limit=15.0 +2024-07-27 14:00:48,703 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.77 vs. limit=10.0 +2024-07-27 14:00:51,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=23989.333333333332, ans=0.005654492753623189 +2024-07-27 14:00:51,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=23989.333333333332, ans=0.025 +2024-07-27 14:01:01,470 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.212e+01 6.407e+01 7.069e+01 7.682e+01 1.137e+02, threshold=1.414e+02, percent-clipped=0.0 +2024-07-27 14:01:10,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=24016.0, ans=0.125 +2024-07-27 14:01:11,456 INFO [train.py:1114] (1/4) Epoch 2, batch 7800, loss[loss=0.3102, simple_loss=0.3704, pruned_loss=0.1251, over 4673.00 frames. ], tot_loss[loss=0.3163, simple_loss=0.375, pruned_loss=0.1288, over 937425.19 frames. ], batch size: 14, lr: 2.74e-02, grad_scale: 64.0 +2024-07-27 14:01:12,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=24029.333333333332, ans=0.0 +2024-07-27 14:01:24,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=24056.0, ans=0.125 +2024-07-27 14:01:25,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=24056.0, ans=0.125 +2024-07-27 14:01:28,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=24056.0, ans=0.0 +2024-07-27 14:01:44,507 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.60 vs. limit=15.0 +2024-07-27 14:01:44,896 INFO [train.py:1114] (1/4) Epoch 2, batch 7850, loss[loss=0.2881, simple_loss=0.344, pruned_loss=0.1161, over 4519.00 frames. ], tot_loss[loss=0.317, simple_loss=0.3755, pruned_loss=0.1292, over 936277.54 frames. ], batch size: 10, lr: 2.73e-02, grad_scale: 64.0 +2024-07-27 14:01:46,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=24096.0, ans=0.025 +2024-07-27 14:01:50,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=24109.333333333332, ans=0.07 +2024-07-27 14:01:55,057 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.59 vs. limit=6.0 +2024-07-27 14:01:57,977 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.66 vs. limit=22.5 +2024-07-27 14:02:10,321 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.460e+01 6.475e+01 7.021e+01 7.812e+01 1.156e+02, threshold=1.404e+02, percent-clipped=0.0 +2024-07-27 14:02:12,542 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:02:20,251 INFO [train.py:1114] (1/4) Epoch 2, batch 7900, loss[loss=0.342, simple_loss=0.4061, pruned_loss=0.1389, over 4874.00 frames. ], tot_loss[loss=0.3174, simple_loss=0.3762, pruned_loss=0.1293, over 933553.20 frames. ], batch size: 14, lr: 2.73e-02, grad_scale: 64.0 +2024-07-27 14:02:22,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=24162.666666666668, ans=0.125 +2024-07-27 14:02:25,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=24162.666666666668, ans=0.125 +2024-07-27 14:02:29,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=24176.0, ans=0.125 +2024-07-27 14:02:29,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=24176.0, ans=0.5 +2024-07-27 14:02:30,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=24176.0, ans=0.0 +2024-07-27 14:02:34,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=24189.333333333332, ans=0.125 +2024-07-27 14:02:40,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=24202.666666666668, ans=0.125 +2024-07-27 14:02:53,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=24229.333333333332, ans=0.125 +2024-07-27 14:02:53,686 INFO [train.py:1114] (1/4) Epoch 2, batch 7950, loss[loss=0.3735, simple_loss=0.3981, pruned_loss=0.1745, over 3233.00 frames. ], tot_loss[loss=0.3147, simple_loss=0.3743, pruned_loss=0.1275, over 935820.09 frames. ], batch size: 35, lr: 2.73e-02, grad_scale: 64.0 +2024-07-27 14:02:57,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=24229.333333333332, ans=0.125 +2024-07-27 14:03:05,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=24242.666666666668, ans=0.5 +2024-07-27 14:03:16,934 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.49 vs. limit=22.5 +2024-07-27 14:03:17,184 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.361e+01 6.433e+01 7.086e+01 8.045e+01 1.490e+02, threshold=1.417e+02, percent-clipped=1.0 +2024-07-27 14:03:19,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=24269.333333333332, ans=0.125 +2024-07-27 14:03:26,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=24296.0, ans=0.125 +2024-07-27 14:03:26,868 INFO [train.py:1114] (1/4) Epoch 2, batch 8000, loss[loss=0.2348, simple_loss=0.3096, pruned_loss=0.07997, over 4612.00 frames. ], tot_loss[loss=0.3119, simple_loss=0.3712, pruned_loss=0.1263, over 934566.57 frames. ], batch size: 11, lr: 2.72e-02, grad_scale: 64.0 +2024-07-27 14:03:28,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24296.0, ans=0.1 +2024-07-27 14:03:29,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=24296.0, ans=0.125 +2024-07-27 14:03:35,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=24309.333333333332, ans=0.125 +2024-07-27 14:03:43,324 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=28.25 vs. limit=22.5 +2024-07-27 14:03:47,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=24336.0, ans=0.2 +2024-07-27 14:04:00,344 INFO [train.py:1114] (1/4) Epoch 2, batch 8050, loss[loss=0.2938, simple_loss=0.3712, pruned_loss=0.1082, over 4803.00 frames. ], tot_loss[loss=0.3128, simple_loss=0.3722, pruned_loss=0.1267, over 934095.49 frames. ], batch size: 14, lr: 2.72e-02, grad_scale: 64.0 +2024-07-27 14:04:03,262 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.18 vs. limit=15.0 +2024-07-27 14:04:10,846 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.95 vs. limit=15.0 +2024-07-27 14:04:11,447 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:04:16,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=24389.333333333332, ans=0.2 +2024-07-27 14:04:16,336 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.24 vs. limit=12.0 +2024-07-27 14:04:21,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=24402.666666666668, ans=10.0 +2024-07-27 14:04:23,841 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.030e+01 6.126e+01 6.809e+01 7.483e+01 1.319e+02, threshold=1.362e+02, percent-clipped=0.0 +2024-07-27 14:04:24,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=24402.666666666668, ans=0.125 +2024-07-27 14:04:25,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=24402.666666666668, ans=0.05 +2024-07-27 14:04:26,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=24402.666666666668, ans=0.2 +2024-07-27 14:04:29,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=24416.0, ans=0.2 +2024-07-27 14:04:33,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=24429.333333333332, ans=0.0 +2024-07-27 14:04:33,617 INFO [train.py:1114] (1/4) Epoch 2, batch 8100, loss[loss=0.3381, simple_loss=0.4011, pruned_loss=0.1376, over 4793.00 frames. ], tot_loss[loss=0.3137, simple_loss=0.3731, pruned_loss=0.1272, over 934018.95 frames. ], batch size: 15, lr: 2.72e-02, grad_scale: 64.0 +2024-07-27 14:05:05,515 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.62 vs. limit=15.0 +2024-07-27 14:05:11,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=24456.0, ans=0.00555304347826087 +2024-07-27 14:05:17,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=24469.333333333332, ans=0.2 +2024-07-27 14:05:45,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=24469.333333333332, ans=0.005550144927536233 +2024-07-27 14:05:46,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=24469.333333333332, ans=0.125 +2024-07-27 14:05:57,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=24482.666666666668, ans=0.1 +2024-07-27 14:05:59,842 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:06:02,876 INFO [train.py:1114] (1/4) Epoch 2, batch 8150, loss[loss=0.3807, simple_loss=0.4362, pruned_loss=0.1626, over 4803.00 frames. ], tot_loss[loss=0.3128, simple_loss=0.3724, pruned_loss=0.1266, over 937216.28 frames. ], batch size: 15, lr: 2.71e-02, grad_scale: 64.0 +2024-07-27 14:06:03,153 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.03 vs. limit=15.0 +2024-07-27 14:06:08,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=24496.0, ans=0.125 +2024-07-27 14:06:21,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=24509.333333333332, ans=0.2 +2024-07-27 14:06:22,225 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.11 vs. limit=6.0 +2024-07-27 14:06:24,894 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.08 vs. limit=22.5 +2024-07-27 14:06:45,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff2.min_abs, batch_count=24536.0, ans=0.1 +2024-07-27 14:06:48,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=24536.0, ans=0.125 +2024-07-27 14:06:50,920 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.617e+01 6.763e+01 7.386e+01 8.088e+01 1.261e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 14:06:56,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=24549.333333333332, ans=0.125 +2024-07-27 14:06:58,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=24549.333333333332, ans=0.125 +2024-07-27 14:06:58,434 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=17.80 vs. limit=22.5 +2024-07-27 14:07:01,480 INFO [train.py:1114] (1/4) Epoch 2, batch 8200, loss[loss=0.3507, simple_loss=0.4084, pruned_loss=0.1465, over 4806.00 frames. ], tot_loss[loss=0.3126, simple_loss=0.3724, pruned_loss=0.1263, over 938461.90 frames. ], batch size: 15, lr: 2.71e-02, grad_scale: 64.0 +2024-07-27 14:07:04,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=24562.666666666668, ans=0.125 +2024-07-27 14:07:11,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=24576.0, ans=0.125 +2024-07-27 14:07:12,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=24576.0, ans=0.125 +2024-07-27 14:07:23,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=24589.333333333332, ans=0.2 +2024-07-27 14:07:36,718 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.21 vs. limit=15.0 +2024-07-27 14:07:39,092 INFO [train.py:1114] (1/4) Epoch 2, batch 8250, loss[loss=0.2925, simple_loss=0.3455, pruned_loss=0.1197, over 4892.00 frames. ], tot_loss[loss=0.3121, simple_loss=0.3718, pruned_loss=0.1262, over 938636.09 frames. ], batch size: 13, lr: 2.71e-02, grad_scale: 64.0 +2024-07-27 14:07:40,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=24629.333333333332, ans=0.125 +2024-07-27 14:07:44,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=24629.333333333332, ans=0.125 +2024-07-27 14:07:57,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=24656.0, ans=0.04949747468305833 +2024-07-27 14:07:58,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=24656.0, ans=0.05 +2024-07-27 14:08:02,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=24656.0, ans=0.07 +2024-07-27 14:08:06,929 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.64 vs. limit=22.5 +2024-07-27 14:08:07,823 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.008e+01 6.272e+01 7.057e+01 7.926e+01 1.070e+02, threshold=1.411e+02, percent-clipped=0.0 +2024-07-27 14:08:08,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=24669.333333333332, ans=0.125 +2024-07-27 14:08:22,780 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=2.61 vs. limit=15.0 +2024-07-27 14:08:26,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=24682.666666666668, ans=0.125 +2024-07-27 14:08:31,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=24696.0, ans=0.2 +2024-07-27 14:08:32,477 INFO [train.py:1114] (1/4) Epoch 2, batch 8300, loss[loss=0.3516, simple_loss=0.402, pruned_loss=0.1506, over 4898.00 frames. ], tot_loss[loss=0.3124, simple_loss=0.3723, pruned_loss=0.1262, over 938734.84 frames. ], batch size: 15, lr: 2.70e-02, grad_scale: 64.0 +2024-07-27 14:08:34,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=24696.0, ans=0.2 +2024-07-27 14:08:41,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=24709.333333333332, ans=0.125 +2024-07-27 14:08:47,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=24722.666666666668, ans=0.125 +2024-07-27 14:08:49,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=24722.666666666668, ans=0.04949747468305833 +2024-07-27 14:08:50,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=24722.666666666668, ans=0.125 +2024-07-27 14:09:04,398 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:09:06,238 INFO [train.py:1114] (1/4) Epoch 2, batch 8350, loss[loss=0.3524, simple_loss=0.4085, pruned_loss=0.1482, over 4804.00 frames. ], tot_loss[loss=0.3112, simple_loss=0.3713, pruned_loss=0.1256, over 941369.03 frames. ], batch size: 15, lr: 2.70e-02, grad_scale: 64.0 +2024-07-27 14:09:16,826 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.95 vs. limit=15.0 +2024-07-27 14:09:19,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=24789.333333333332, ans=0.125 +2024-07-27 14:09:20,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=24789.333333333332, ans=0.125 +2024-07-27 14:09:34,715 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.707e+01 6.372e+01 7.103e+01 7.786e+01 1.162e+02, threshold=1.421e+02, percent-clipped=0.0 +2024-07-27 14:09:35,866 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.03 vs. limit=22.5 +2024-07-27 14:09:37,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=24816.0, ans=0.95 +2024-07-27 14:09:39,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=24816.0, ans=0.125 +2024-07-27 14:09:44,916 INFO [train.py:1114] (1/4) Epoch 2, batch 8400, loss[loss=0.2692, simple_loss=0.3399, pruned_loss=0.09927, over 4772.00 frames. ], tot_loss[loss=0.31, simple_loss=0.3703, pruned_loss=0.1249, over 940002.56 frames. ], batch size: 12, lr: 2.70e-02, grad_scale: 64.0 +2024-07-27 14:09:45,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=24829.333333333332, ans=0.025 +2024-07-27 14:09:51,827 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:09:54,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=24842.666666666668, ans=0.5 +2024-07-27 14:10:04,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=24869.333333333332, ans=15.0 +2024-07-27 14:10:17,803 INFO [train.py:1114] (1/4) Epoch 2, batch 8450, loss[loss=0.334, simple_loss=0.3886, pruned_loss=0.1397, over 4802.00 frames. ], tot_loss[loss=0.3109, simple_loss=0.3714, pruned_loss=0.1252, over 938784.92 frames. ], batch size: 15, lr: 2.69e-02, grad_scale: 64.0 +2024-07-27 14:10:25,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24909.333333333332, ans=0.1 +2024-07-27 14:10:49,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=24936.0, ans=0.125 +2024-07-27 14:10:51,590 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.225e+01 6.363e+01 6.808e+01 7.563e+01 1.440e+02, threshold=1.362e+02, percent-clipped=1.0 +2024-07-27 14:10:58,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=24949.333333333332, ans=0.125 +2024-07-27 14:11:01,355 INFO [train.py:1114] (1/4) Epoch 2, batch 8500, loss[loss=0.2545, simple_loss=0.3138, pruned_loss=0.09761, over 4597.00 frames. ], tot_loss[loss=0.3097, simple_loss=0.3701, pruned_loss=0.1246, over 938897.52 frames. ], batch size: 11, lr: 2.69e-02, grad_scale: 64.0 +2024-07-27 14:11:04,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=24962.666666666668, ans=0.125 +2024-07-27 14:11:14,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=24989.333333333332, ans=0.125 +2024-07-27 14:11:25,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=25002.666666666668, ans=0.125 +2024-07-27 14:11:31,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=25016.0, ans=0.125 +2024-07-27 14:11:33,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.92 vs. limit=22.5 +2024-07-27 14:11:34,218 INFO [train.py:1114] (1/4) Epoch 2, batch 8550, loss[loss=0.2662, simple_loss=0.3209, pruned_loss=0.1057, over 4798.00 frames. ], tot_loss[loss=0.3093, simple_loss=0.3694, pruned_loss=0.1246, over 939526.44 frames. ], batch size: 11, lr: 2.69e-02, grad_scale: 64.0 +2024-07-27 14:11:45,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=25042.666666666668, ans=0.04949747468305833 +2024-07-27 14:11:46,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=25042.666666666668, ans=0.025 +2024-07-27 14:11:46,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=25042.666666666668, ans=0.025 +2024-07-27 14:11:52,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=25056.0, ans=0.2 +2024-07-27 14:11:56,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=25056.0, ans=0.2 +2024-07-27 14:12:01,368 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.226e+01 6.174e+01 6.782e+01 7.598e+01 1.715e+02, threshold=1.356e+02, percent-clipped=1.0 +2024-07-27 14:12:02,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=25069.333333333332, ans=0.0 +2024-07-27 14:12:12,545 INFO [train.py:1114] (1/4) Epoch 2, batch 8600, loss[loss=0.2951, simple_loss=0.3667, pruned_loss=0.1118, over 4801.00 frames. ], tot_loss[loss=0.3095, simple_loss=0.3697, pruned_loss=0.1247, over 939100.11 frames. ], batch size: 15, lr: 2.68e-02, grad_scale: 64.0 +2024-07-27 14:12:12,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=25096.0, ans=0.125 +2024-07-27 14:12:28,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=25122.666666666668, ans=0.125 +2024-07-27 14:12:30,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.40 vs. limit=10.0 +2024-07-27 14:12:42,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=25149.333333333332, ans=0.2 +2024-07-27 14:12:48,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=25149.333333333332, ans=0.0 +2024-07-27 14:12:50,761 INFO [train.py:1114] (1/4) Epoch 2, batch 8650, loss[loss=0.2943, simple_loss=0.3625, pruned_loss=0.113, over 4899.00 frames. ], tot_loss[loss=0.3096, simple_loss=0.3696, pruned_loss=0.1249, over 940344.07 frames. ], batch size: 15, lr: 2.68e-02, grad_scale: 64.0 +2024-07-27 14:13:03,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=25176.0, ans=0.005396521739130435 +2024-07-27 14:13:04,073 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.05 vs. limit=15.0 +2024-07-27 14:13:12,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=25202.666666666668, ans=0.005390724637681159 +2024-07-27 14:13:13,966 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.076e+01 6.621e+01 7.393e+01 8.155e+01 1.216e+02, threshold=1.479e+02, percent-clipped=0.0 +2024-07-27 14:13:16,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=25202.666666666668, ans=0.005390724637681159 +2024-07-27 14:13:23,796 INFO [train.py:1114] (1/4) Epoch 2, batch 8700, loss[loss=0.2962, simple_loss=0.3736, pruned_loss=0.1094, over 4760.00 frames. ], tot_loss[loss=0.3103, simple_loss=0.3705, pruned_loss=0.1251, over 937825.12 frames. ], batch size: 13, lr: 2.68e-02, grad_scale: 64.0 +2024-07-27 14:13:25,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_na.min_abs, batch_count=25229.333333333332, ans=0.02 +2024-07-27 14:13:26,244 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.26 vs. limit=15.0 +2024-07-27 14:13:28,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=25229.333333333332, ans=0.125 +2024-07-27 14:13:32,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=25242.666666666668, ans=0.2 +2024-07-27 14:13:44,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=25256.0, ans=0.0053791304347826085 +2024-07-27 14:13:48,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=25269.333333333332, ans=0.125 +2024-07-27 14:13:55,689 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.51 vs. limit=15.0 +2024-07-27 14:14:00,735 INFO [train.py:1114] (1/4) Epoch 2, batch 8750, loss[loss=0.2992, simple_loss=0.3598, pruned_loss=0.1193, over 4686.00 frames. ], tot_loss[loss=0.3086, simple_loss=0.3689, pruned_loss=0.1241, over 936587.55 frames. ], batch size: 15, lr: 2.68e-02, grad_scale: 64.0 +2024-07-27 14:14:17,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=25296.0, ans=0.005370434782608696 +2024-07-27 14:14:30,746 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.96 vs. limit=15.0 +2024-07-27 14:14:39,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=25336.0, ans=0.1 +2024-07-27 14:14:43,959 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.590e+01 6.384e+01 6.883e+01 7.910e+01 1.074e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-27 14:14:46,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=25349.333333333332, ans=0.2 +2024-07-27 14:14:52,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=25362.666666666668, ans=0.2 +2024-07-27 14:14:53,575 INFO [train.py:1114] (1/4) Epoch 2, batch 8800, loss[loss=0.3024, simple_loss=0.3714, pruned_loss=0.1167, over 4928.00 frames. ], tot_loss[loss=0.3099, simple_loss=0.37, pruned_loss=0.1249, over 937901.31 frames. ], batch size: 14, lr: 2.67e-02, grad_scale: 64.0 +2024-07-27 14:14:56,487 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.19 vs. limit=15.0 +2024-07-27 14:14:58,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=25362.666666666668, ans=0.0 +2024-07-27 14:15:00,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=25376.0, ans=0.09899494936611666 +2024-07-27 14:15:03,628 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.68 vs. limit=15.0 +2024-07-27 14:15:10,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=25389.333333333332, ans=0.2 +2024-07-27 14:15:18,988 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:15:21,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=25416.0, ans=0.125 +2024-07-27 14:15:27,654 INFO [train.py:1114] (1/4) Epoch 2, batch 8850, loss[loss=0.3525, simple_loss=0.4045, pruned_loss=0.1503, over 4523.00 frames. ], tot_loss[loss=0.3089, simple_loss=0.3691, pruned_loss=0.1244, over 931594.82 frames. ], batch size: 21, lr: 2.67e-02, grad_scale: 64.0 +2024-07-27 14:15:40,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=25442.666666666668, ans=0.0 +2024-07-27 14:15:46,988 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.31 vs. limit=15.0 +2024-07-27 14:15:58,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=25469.333333333332, ans=0.125 +2024-07-27 14:16:01,198 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.339e+01 6.518e+01 6.996e+01 7.988e+01 1.039e+02, threshold=1.399e+02, percent-clipped=0.0 +2024-07-27 14:16:03,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=25469.333333333332, ans=0.0 +2024-07-27 14:16:06,185 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.13 vs. limit=22.5 +2024-07-27 14:16:09,470 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=34.54 vs. limit=15.0 +2024-07-27 14:16:11,054 INFO [train.py:1114] (1/4) Epoch 2, batch 8900, loss[loss=0.2901, simple_loss=0.3555, pruned_loss=0.1123, over 4937.00 frames. ], tot_loss[loss=0.3084, simple_loss=0.3689, pruned_loss=0.124, over 929710.35 frames. ], batch size: 12, lr: 2.67e-02, grad_scale: 64.0 +2024-07-27 14:16:27,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=25522.666666666668, ans=0.5 +2024-07-27 14:16:32,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=25536.0, ans=0.125 +2024-07-27 14:16:35,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=25536.0, ans=0.1 +2024-07-27 14:16:44,023 INFO [train.py:1114] (1/4) Epoch 2, batch 8950, loss[loss=0.3494, simple_loss=0.3964, pruned_loss=0.1513, over 4535.00 frames. ], tot_loss[loss=0.3089, simple_loss=0.3688, pruned_loss=0.1245, over 930313.19 frames. ], batch size: 21, lr: 2.66e-02, grad_scale: 64.0 +2024-07-27 14:16:50,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=25576.0, ans=0.125 +2024-07-27 14:16:52,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=25576.0, ans=0.125 +2024-07-27 14:16:59,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=25589.333333333332, ans=0.125 +2024-07-27 14:17:02,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=25589.333333333332, ans=0.2 +2024-07-27 14:17:04,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=25602.666666666668, ans=0.0 +2024-07-27 14:17:08,255 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.246e+01 6.427e+01 6.847e+01 7.354e+01 1.255e+02, threshold=1.369e+02, percent-clipped=0.0 +2024-07-27 14:17:09,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25602.666666666668, ans=0.1 +2024-07-27 14:17:18,092 INFO [train.py:1114] (1/4) Epoch 2, batch 9000, loss[loss=0.268, simple_loss=0.3454, pruned_loss=0.09531, over 4635.00 frames. ], tot_loss[loss=0.3068, simple_loss=0.3668, pruned_loss=0.1234, over 933489.00 frames. ], batch size: 12, lr: 2.66e-02, grad_scale: 64.0 +2024-07-27 14:17:18,092 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 14:17:31,449 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.8191, 3.8615, 2.3060, 2.6297], device='cuda:1') +2024-07-27 14:17:33,543 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.8760, 3.7185, 4.0079, 4.6951], device='cuda:1') +2024-07-27 14:17:37,004 INFO [train.py:1146] (1/4) Epoch 2, validation: loss=0.2471, simple_loss=0.3424, pruned_loss=0.07587, over 944034.00 frames. +2024-07-27 14:17:37,005 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 14:17:39,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=25629.333333333332, ans=0.0052979710144927545 +2024-07-27 14:18:11,387 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.86 vs. limit=22.5 +2024-07-27 14:18:18,624 INFO [train.py:1114] (1/4) Epoch 2, batch 9050, loss[loss=0.2995, simple_loss=0.3463, pruned_loss=0.1264, over 4544.00 frames. ], tot_loss[loss=0.3071, simple_loss=0.3667, pruned_loss=0.1237, over 934032.42 frames. ], batch size: 10, lr: 2.66e-02, grad_scale: 64.0 +2024-07-27 14:18:22,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=25696.0, ans=0.125 +2024-07-27 14:18:25,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=25709.333333333332, ans=0.025 +2024-07-27 14:18:26,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=25709.333333333332, ans=0.07 +2024-07-27 14:18:35,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=25709.333333333332, ans=0.0 +2024-07-27 14:18:40,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=25722.666666666668, ans=0.0 +2024-07-27 14:18:48,201 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.394e+01 6.425e+01 6.926e+01 7.624e+01 1.076e+02, threshold=1.385e+02, percent-clipped=0.0 +2024-07-27 14:18:49,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=25736.0, ans=10.0 +2024-07-27 14:18:58,023 INFO [train.py:1114] (1/4) Epoch 2, batch 9100, loss[loss=0.3055, simple_loss=0.3657, pruned_loss=0.1226, over 4925.00 frames. ], tot_loss[loss=0.3067, simple_loss=0.3669, pruned_loss=0.1232, over 936537.84 frames. ], batch size: 14, lr: 2.65e-02, grad_scale: 128.0 +2024-07-27 14:19:00,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=25762.666666666668, ans=0.2 +2024-07-27 14:19:08,298 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.56 vs. limit=15.0 +2024-07-27 14:19:10,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=25776.0, ans=0.125 +2024-07-27 14:19:19,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=25802.666666666668, ans=0.0 +2024-07-27 14:19:29,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=25816.0, ans=0.005257391304347827 +2024-07-27 14:19:32,198 INFO [train.py:1114] (1/4) Epoch 2, batch 9150, loss[loss=0.3301, simple_loss=0.4074, pruned_loss=0.1264, over 4809.00 frames. ], tot_loss[loss=0.3077, simple_loss=0.3678, pruned_loss=0.1238, over 935549.65 frames. ], batch size: 14, lr: 2.65e-02, grad_scale: 64.0 +2024-07-27 14:19:43,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=25829.333333333332, ans=0.0 +2024-07-27 14:19:58,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=25869.333333333332, ans=0.005245797101449276 +2024-07-27 14:20:02,448 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.426e+01 6.682e+01 7.261e+01 8.100e+01 1.344e+02, threshold=1.452e+02, percent-clipped=0.0 +2024-07-27 14:20:04,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=25882.666666666668, ans=0.0 +2024-07-27 14:20:11,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=25896.0, ans=0.125 +2024-07-27 14:20:11,595 INFO [train.py:1114] (1/4) Epoch 2, batch 9200, loss[loss=0.2969, simple_loss=0.3562, pruned_loss=0.1188, over 4851.00 frames. ], tot_loss[loss=0.3073, simple_loss=0.3675, pruned_loss=0.1236, over 937383.42 frames. ], batch size: 12, lr: 2.65e-02, grad_scale: 64.0 +2024-07-27 14:20:13,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=25896.0, ans=0.0 +2024-07-27 14:20:22,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=25909.333333333332, ans=0.125 +2024-07-27 14:20:29,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=25922.666666666668, ans=0.125 +2024-07-27 14:20:41,144 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.97 vs. limit=15.0 +2024-07-27 14:20:41,690 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:20:44,395 INFO [train.py:1114] (1/4) Epoch 2, batch 9250, loss[loss=0.3043, simple_loss=0.3569, pruned_loss=0.1258, over 4643.00 frames. ], tot_loss[loss=0.3084, simple_loss=0.3684, pruned_loss=0.1242, over 938317.61 frames. ], batch size: 13, lr: 2.65e-02, grad_scale: 64.0 +2024-07-27 14:20:47,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=25962.666666666668, ans=0.125 +2024-07-27 14:20:51,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=25976.0, ans=0.125 +2024-07-27 14:20:58,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=25989.333333333332, ans=0.125 +2024-07-27 14:20:59,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=25989.333333333332, ans=0.005219710144927537 +2024-07-27 14:20:59,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=25989.333333333332, ans=0.025 +2024-07-27 14:21:05,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=26002.666666666668, ans=0.125 +2024-07-27 14:21:07,928 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.437e+01 6.326e+01 6.900e+01 7.743e+01 1.339e+02, threshold=1.380e+02, percent-clipped=0.0 +2024-07-27 14:21:10,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=26016.0, ans=0.0 +2024-07-27 14:21:13,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26016.0, ans=0.1 +2024-07-27 14:21:17,298 INFO [train.py:1114] (1/4) Epoch 2, batch 9300, loss[loss=0.3361, simple_loss=0.3814, pruned_loss=0.1454, over 4772.00 frames. ], tot_loss[loss=0.3072, simple_loss=0.3671, pruned_loss=0.1237, over 938089.50 frames. ], batch size: 12, lr: 2.64e-02, grad_scale: 64.0 +2024-07-27 14:21:18,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=26029.333333333332, ans=0.2 +2024-07-27 14:21:23,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=26042.666666666668, ans=0.125 +2024-07-27 14:21:31,232 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.86 vs. limit=15.0 +2024-07-27 14:21:32,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=26056.0, ans=0.125 +2024-07-27 14:21:35,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=26056.0, ans=0.125 +2024-07-27 14:21:36,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=26069.333333333332, ans=0.125 +2024-07-27 14:21:45,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=26082.666666666668, ans=0.09899494936611666 +2024-07-27 14:21:47,381 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-27 14:21:48,971 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:21:49,566 INFO [train.py:1114] (1/4) Epoch 2, batch 9350, loss[loss=0.2714, simple_loss=0.3367, pruned_loss=0.1031, over 4811.00 frames. ], tot_loss[loss=0.3069, simple_loss=0.3668, pruned_loss=0.1235, over 935125.74 frames. ], batch size: 11, lr: 2.64e-02, grad_scale: 64.0 +2024-07-27 14:21:57,420 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:21:58,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=26109.333333333332, ans=0.0 +2024-07-27 14:22:03,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=26109.333333333332, ans=0.125 +2024-07-27 14:22:03,260 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=5.413e-03 +2024-07-27 14:22:03,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=26109.333333333332, ans=0.025 +2024-07-27 14:22:09,249 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.29 vs. limit=10.0 +2024-07-27 14:22:12,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=26136.0, ans=0.07 +2024-07-27 14:22:16,011 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.591e+01 6.512e+01 7.104e+01 8.363e+01 3.070e+02, threshold=1.421e+02, percent-clipped=1.0 +2024-07-27 14:22:26,799 INFO [train.py:1114] (1/4) Epoch 2, batch 9400, loss[loss=0.2987, simple_loss=0.3757, pruned_loss=0.1108, over 4698.00 frames. ], tot_loss[loss=0.3065, simple_loss=0.3667, pruned_loss=0.1232, over 932872.56 frames. ], batch size: 13, lr: 2.64e-02, grad_scale: 64.0 +2024-07-27 14:22:36,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=26176.0, ans=0.125 +2024-07-27 14:22:39,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=26189.333333333332, ans=0.005176231884057972 +2024-07-27 14:22:47,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=26202.666666666668, ans=0.2 +2024-07-27 14:22:58,565 INFO [train.py:1114] (1/4) Epoch 2, batch 9450, loss[loss=0.2638, simple_loss=0.3333, pruned_loss=0.09717, over 4812.00 frames. ], tot_loss[loss=0.3075, simple_loss=0.3677, pruned_loss=0.1236, over 932399.94 frames. ], batch size: 11, lr: 2.63e-02, grad_scale: 64.0 +2024-07-27 14:23:08,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=26242.666666666668, ans=0.04949747468305833 +2024-07-27 14:23:11,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=26242.666666666668, ans=0.125 +2024-07-27 14:23:14,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=26256.0, ans=0.125 +2024-07-27 14:23:19,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=26269.333333333332, ans=0.005158840579710145 +2024-07-27 14:23:23,990 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.999e+01 6.119e+01 6.627e+01 7.680e+01 1.096e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-27 14:23:24,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=26269.333333333332, ans=0.0 +2024-07-27 14:23:28,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=26282.666666666668, ans=0.0 +2024-07-27 14:23:32,698 INFO [train.py:1114] (1/4) Epoch 2, batch 9500, loss[loss=0.3356, simple_loss=0.3789, pruned_loss=0.1461, over 4714.00 frames. ], tot_loss[loss=0.3067, simple_loss=0.3679, pruned_loss=0.1227, over 934722.81 frames. ], batch size: 12, lr: 2.63e-02, grad_scale: 64.0 +2024-07-27 14:23:40,181 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.15 vs. limit=15.0 +2024-07-27 14:23:56,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=26336.0, ans=0.07 +2024-07-27 14:23:58,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=26349.333333333332, ans=0.125 +2024-07-27 14:24:03,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=26349.333333333332, ans=10.0 +2024-07-27 14:24:05,362 INFO [train.py:1114] (1/4) Epoch 2, batch 9550, loss[loss=0.3019, simple_loss=0.3666, pruned_loss=0.1186, over 4778.00 frames. ], tot_loss[loss=0.3073, simple_loss=0.3685, pruned_loss=0.1231, over 931956.02 frames. ], batch size: 12, lr: 2.63e-02, grad_scale: 64.0 +2024-07-27 14:24:17,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=26362.666666666668, ans=0.09899494936611666 +2024-07-27 14:24:22,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=26376.0, ans=0.005135652173913044 +2024-07-27 14:24:26,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=26376.0, ans=0.005135652173913044 +2024-07-27 14:24:37,641 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.234e+01 6.396e+01 7.111e+01 8.222e+01 1.095e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 14:24:37,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=26402.666666666668, ans=0.125 +2024-07-27 14:24:44,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26416.0, ans=0.1 +2024-07-27 14:24:45,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=26416.0, ans=0.0 +2024-07-27 14:24:50,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=26429.333333333332, ans=0.125 +2024-07-27 14:24:50,599 INFO [train.py:1114] (1/4) Epoch 2, batch 9600, loss[loss=0.4445, simple_loss=0.4579, pruned_loss=0.2156, over 3373.00 frames. ], tot_loss[loss=0.3061, simple_loss=0.3675, pruned_loss=0.1223, over 930830.90 frames. ], batch size: 35, lr: 2.62e-02, grad_scale: 64.0 +2024-07-27 14:24:55,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=26429.333333333332, ans=0.005124057971014493 +2024-07-27 14:24:56,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=26429.333333333332, ans=15.0 +2024-07-27 14:25:07,205 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.78 vs. limit=10.0 +2024-07-27 14:25:08,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=26442.666666666668, ans=0.2 +2024-07-27 14:25:15,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=26456.0, ans=0.125 +2024-07-27 14:25:30,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=26469.333333333332, ans=0.125 +2024-07-27 14:25:48,419 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.79 vs. limit=15.0 +2024-07-27 14:25:48,568 INFO [train.py:1114] (1/4) Epoch 2, batch 9650, loss[loss=0.3684, simple_loss=0.42, pruned_loss=0.1584, over 4833.00 frames. ], tot_loss[loss=0.3089, simple_loss=0.37, pruned_loss=0.1239, over 926932.06 frames. ], batch size: 16, lr: 2.62e-02, grad_scale: 64.0 +2024-07-27 14:25:56,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=26509.333333333332, ans=0.125 +2024-07-27 14:26:02,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=26509.333333333332, ans=0.025 +2024-07-27 14:26:06,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=26522.666666666668, ans=0.2 +2024-07-27 14:26:14,867 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.044e+01 6.379e+01 6.976e+01 8.027e+01 1.621e+02, threshold=1.395e+02, percent-clipped=2.0 +2024-07-27 14:26:15,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=26536.0, ans=0.04949747468305833 +2024-07-27 14:26:18,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=26549.333333333332, ans=0.1 +2024-07-27 14:26:25,379 INFO [train.py:1114] (1/4) Epoch 2, batch 9700, loss[loss=0.3517, simple_loss=0.4088, pruned_loss=0.1473, over 4229.00 frames. ], tot_loss[loss=0.309, simple_loss=0.3705, pruned_loss=0.1238, over 925268.64 frames. ], batch size: 25, lr: 2.62e-02, grad_scale: 64.0 +2024-07-27 14:26:37,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=26576.0, ans=0.125 +2024-07-27 14:26:54,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=26616.0, ans=0.125 +2024-07-27 14:27:00,089 INFO [train.py:1114] (1/4) Epoch 2, batch 9750, loss[loss=0.3085, simple_loss=0.373, pruned_loss=0.1219, over 4683.00 frames. ], tot_loss[loss=0.3091, simple_loss=0.3702, pruned_loss=0.124, over 925766.56 frames. ], batch size: 15, lr: 2.62e-02, grad_scale: 64.0 +2024-07-27 14:27:02,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=26629.333333333332, ans=0.0 +2024-07-27 14:27:03,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=26629.333333333332, ans=0.125 +2024-07-27 14:27:04,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=26629.333333333332, ans=0.0 +2024-07-27 14:27:10,919 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.21 vs. limit=15.0 +2024-07-27 14:27:16,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=26656.0, ans=0.125 +2024-07-27 14:27:18,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=26656.0, ans=0.125 +2024-07-27 14:27:37,701 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.374e+01 6.232e+01 6.802e+01 7.534e+01 1.606e+02, threshold=1.360e+02, percent-clipped=1.0 +2024-07-27 14:27:37,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26669.333333333332, ans=0.1 +2024-07-27 14:27:45,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=26696.0, ans=0.0050660869565217394 +2024-07-27 14:27:46,366 INFO [train.py:1114] (1/4) Epoch 2, batch 9800, loss[loss=0.2554, simple_loss=0.3393, pruned_loss=0.08568, over 4708.00 frames. ], tot_loss[loss=0.3068, simple_loss=0.3681, pruned_loss=0.1228, over 925436.37 frames. ], batch size: 12, lr: 2.61e-02, grad_scale: 64.0 +2024-07-27 14:27:51,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.18 vs. limit=6.0 +2024-07-27 14:27:52,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=26709.333333333332, ans=0.025 +2024-07-27 14:27:53,905 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.36 vs. limit=15.0 +2024-07-27 14:28:02,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=26722.666666666668, ans=0.005060289855072463 +2024-07-27 14:28:03,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=26722.666666666668, ans=0.0 +2024-07-27 14:28:04,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=26722.666666666668, ans=0.125 +2024-07-27 14:28:06,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=26736.0, ans=0.2 +2024-07-27 14:28:07,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=26736.0, ans=0.0 +2024-07-27 14:28:14,011 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.57 vs. limit=10.0 +2024-07-27 14:28:16,995 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=2.586e-03 +2024-07-27 14:28:18,173 INFO [train.py:1114] (1/4) Epoch 2, batch 9850, loss[loss=0.2732, simple_loss=0.3406, pruned_loss=0.1029, over 4913.00 frames. ], tot_loss[loss=0.3078, simple_loss=0.3693, pruned_loss=0.1232, over 927616.83 frames. ], batch size: 15, lr: 2.61e-02, grad_scale: 64.0 +2024-07-27 14:28:27,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=26762.666666666668, ans=0.025 +2024-07-27 14:28:29,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=26762.666666666668, ans=0.2 +2024-07-27 14:28:34,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=26776.0, ans=0.0 +2024-07-27 14:28:39,564 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-27 14:28:42,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=26789.333333333332, ans=0.2 +2024-07-27 14:28:48,005 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.332e+01 6.564e+01 7.229e+01 8.186e+01 1.183e+02, threshold=1.446e+02, percent-clipped=0.0 +2024-07-27 14:28:48,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=26802.666666666668, ans=0.0 +2024-07-27 14:28:48,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=26802.666666666668, ans=0.1 +2024-07-27 14:28:52,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=26816.0, ans=0.09899494936611666 +2024-07-27 14:28:57,014 INFO [train.py:1114] (1/4) Epoch 2, batch 9900, loss[loss=0.3413, simple_loss=0.3967, pruned_loss=0.1429, over 4819.00 frames. ], tot_loss[loss=0.3083, simple_loss=0.3694, pruned_loss=0.1236, over 927408.87 frames. ], batch size: 16, lr: 2.61e-02, grad_scale: 64.0 +2024-07-27 14:29:28,777 INFO [train.py:1114] (1/4) Epoch 2, batch 9950, loss[loss=0.2818, simple_loss=0.3476, pruned_loss=0.108, over 4814.00 frames. ], tot_loss[loss=0.3085, simple_loss=0.3691, pruned_loss=0.1239, over 930113.72 frames. ], batch size: 11, lr: 2.60e-02, grad_scale: 64.0 +2024-07-27 14:29:37,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=26909.333333333332, ans=0.125 +2024-07-27 14:29:56,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=26909.333333333332, ans=0.5 +2024-07-27 14:30:19,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=26922.666666666668, ans=0.0 +2024-07-27 14:30:22,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=26936.0, ans=0.0 +2024-07-27 14:30:27,306 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.345e+01 6.388e+01 7.080e+01 7.845e+01 1.130e+02, threshold=1.416e+02, percent-clipped=0.0 +2024-07-27 14:30:32,014 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=5.829e-03 +2024-07-27 14:30:32,191 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.24 vs. limit=15.0 +2024-07-27 14:30:33,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=26949.333333333332, ans=0.125 +2024-07-27 14:30:41,809 INFO [train.py:1114] (1/4) Epoch 2, batch 10000, loss[loss=0.2797, simple_loss=0.3529, pruned_loss=0.1032, over 4645.00 frames. ], tot_loss[loss=0.3119, simple_loss=0.3724, pruned_loss=0.1257, over 927388.04 frames. ], batch size: 16, lr: 2.60e-02, grad_scale: 64.0 +2024-07-27 14:30:54,300 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.15 vs. limit=15.0 +2024-07-27 14:30:59,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=26989.333333333332, ans=0.0 +2024-07-27 14:31:00,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=26989.333333333332, ans=0.125 +2024-07-27 14:31:01,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=26989.333333333332, ans=0.00500231884057971 +2024-07-27 14:31:03,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=26989.333333333332, ans=0.0 +2024-07-27 14:31:11,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=27016.0, ans=0.0 +2024-07-27 14:31:24,565 INFO [train.py:1114] (1/4) Epoch 2, batch 10050, loss[loss=0.4213, simple_loss=0.4441, pruned_loss=0.1992, over 3386.00 frames. ], tot_loss[loss=0.3185, simple_loss=0.3778, pruned_loss=0.1296, over 916242.05 frames. ], batch size: 35, lr: 2.60e-02, grad_scale: 64.0 +2024-07-27 14:31:30,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=27029.333333333332, ans=0.125 +2024-07-27 14:31:32,311 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.07 vs. limit=22.5 +2024-07-27 14:31:34,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=27042.666666666668, ans=0.0 +2024-07-27 14:31:40,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.43 vs. limit=15.0 +2024-07-27 14:31:47,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=27069.333333333332, ans=0.0049849275362318845 +2024-07-27 14:31:50,391 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.657e+01 6.782e+01 7.547e+01 8.673e+01 1.246e+02, threshold=1.509e+02, percent-clipped=0.0 +2024-07-27 14:31:54,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27082.666666666668, ans=0.1 +2024-07-27 14:31:59,881 INFO [train.py:1114] (1/4) Epoch 2, batch 10100, loss[loss=0.3728, simple_loss=0.4114, pruned_loss=0.1671, over 3563.00 frames. ], tot_loss[loss=0.3315, simple_loss=0.3855, pruned_loss=0.1388, over 861828.51 frames. ], batch size: 35, lr: 2.60e-02, grad_scale: 64.0 +2024-07-27 14:32:15,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=27122.666666666668, ans=0.1 +2024-07-27 14:32:31,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=27149.333333333332, ans=0.125 +2024-07-27 14:32:36,445 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.56 vs. limit=5.0 +2024-07-27 14:32:36,623 INFO [train.py:1114] (1/4) Epoch 2, batch 10150, loss[loss=0.3659, simple_loss=0.4021, pruned_loss=0.1649, over 3397.00 frames. ], tot_loss[loss=0.3405, simple_loss=0.3906, pruned_loss=0.1452, over 820677.16 frames. ], batch size: 35, lr: 2.59e-02, grad_scale: 64.0 +2024-07-27 14:32:36,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=27162.666666666668, ans=0.00496463768115942 +2024-07-27 14:32:42,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=27162.666666666668, ans=0.0 +2024-07-27 14:32:44,363 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=7.43 vs. limit=12.0 +2024-07-27 14:32:46,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27176.0, ans=0.1 +2024-07-27 14:32:49,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=27176.0, ans=0.1 +2024-07-27 14:33:00,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=27189.333333333332, ans=0.004958840579710145 +2024-07-27 14:33:07,430 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.821e+01 6.551e+01 7.013e+01 7.617e+01 1.384e+02, threshold=1.403e+02, percent-clipped=0.0 +2024-07-27 14:33:13,786 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:33:14,731 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.79 vs. limit=15.0 +2024-07-27 14:33:16,424 INFO [train.py:1114] (1/4) Epoch 2, batch 10200, loss[loss=0.3964, simple_loss=0.4289, pruned_loss=0.182, over 3043.00 frames. ], tot_loss[loss=0.3475, simple_loss=0.3945, pruned_loss=0.1503, over 789173.54 frames. ], batch size: 35, lr: 2.59e-02, grad_scale: 64.0 +2024-07-27 14:33:17,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=27229.333333333332, ans=0.0 +2024-07-27 14:33:17,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=27229.333333333332, ans=0.125 +2024-07-27 14:33:23,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=27242.666666666668, ans=0.1 +2024-07-27 14:34:06,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=27260.0, ans=0.07 +2024-07-27 14:34:23,831 INFO [train.py:1114] (1/4) Epoch 3, batch 0, loss[loss=0.2618, simple_loss=0.3314, pruned_loss=0.0961, over 4843.00 frames. ], tot_loss[loss=0.2618, simple_loss=0.3314, pruned_loss=0.0961, over 4843.00 frames. ], batch size: 12, lr: 2.46e-02, grad_scale: 64.0 +2024-07-27 14:34:23,831 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 14:34:35,397 INFO [train.py:1146] (1/4) Epoch 3, validation: loss=0.2558, simple_loss=0.3526, pruned_loss=0.07947, over 944034.00 frames. +2024-07-27 14:34:35,398 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 14:34:36,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=27260.0, ans=0.0 +2024-07-27 14:34:55,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=27273.333333333332, ans=0.125 +2024-07-27 14:34:55,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=27273.333333333332, ans=0.07 +2024-07-27 14:35:01,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=27286.666666666668, ans=0.125 +2024-07-27 14:35:06,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=27286.666666666668, ans=0.0 +2024-07-27 14:35:10,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=27300.0, ans=0.0 +2024-07-27 14:35:10,505 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.34 vs. limit=15.0 +2024-07-27 14:35:21,505 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.01 vs. limit=15.0 +2024-07-27 14:35:23,879 INFO [train.py:1114] (1/4) Epoch 3, batch 50, loss[loss=0.2452, simple_loss=0.3139, pruned_loss=0.08823, over 4622.00 frames. ], tot_loss[loss=0.3192, simple_loss=0.3771, pruned_loss=0.1307, over 206226.27 frames. ], batch size: 11, lr: 2.46e-02, grad_scale: 64.0 +2024-07-27 14:35:25,301 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:35:27,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=27326.666666666668, ans=0.125 +2024-07-27 14:35:32,177 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.94 vs. limit=15.0 +2024-07-27 14:35:33,789 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.475e+01 6.508e+01 7.033e+01 7.791e+01 1.183e+02, threshold=1.407e+02, percent-clipped=0.0 +2024-07-27 14:35:34,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=27340.0, ans=0.1 +2024-07-27 14:35:34,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=27340.0, ans=10.0 +2024-07-27 14:35:46,816 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:35:53,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=27366.666666666668, ans=0.2 +2024-07-27 14:36:02,366 INFO [train.py:1114] (1/4) Epoch 3, batch 100, loss[loss=0.2464, simple_loss=0.3117, pruned_loss=0.09051, over 4635.00 frames. ], tot_loss[loss=0.314, simple_loss=0.3744, pruned_loss=0.1268, over 365186.14 frames. ], batch size: 12, lr: 2.46e-02, grad_scale: 64.0 +2024-07-27 14:36:03,548 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.72 vs. limit=22.5 +2024-07-27 14:36:07,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=27393.333333333332, ans=0.035 +2024-07-27 14:36:08,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=27406.666666666668, ans=0.0 +2024-07-27 14:36:37,590 INFO [train.py:1114] (1/4) Epoch 3, batch 150, loss[loss=0.2783, simple_loss=0.3409, pruned_loss=0.1079, over 4632.00 frames. ], tot_loss[loss=0.307, simple_loss=0.3694, pruned_loss=0.1223, over 494235.58 frames. ], batch size: 11, lr: 2.45e-02, grad_scale: 64.0 +2024-07-27 14:36:42,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=27460.0, ans=0.1 +2024-07-27 14:36:45,132 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.50 vs. limit=22.5 +2024-07-27 14:36:45,926 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.91 vs. limit=15.0 +2024-07-27 14:36:47,458 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.350e+01 6.333e+01 7.071e+01 8.102e+01 1.073e+02, threshold=1.414e+02, percent-clipped=0.0 +2024-07-27 14:36:54,908 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:36:56,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=27486.666666666668, ans=0.0048942028985507246 +2024-07-27 14:37:10,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=27513.333333333332, ans=0.125 +2024-07-27 14:37:14,975 INFO [train.py:1114] (1/4) Epoch 3, batch 200, loss[loss=0.316, simple_loss=0.3645, pruned_loss=0.1337, over 4469.00 frames. ], tot_loss[loss=0.3047, simple_loss=0.3678, pruned_loss=0.1208, over 593659.44 frames. ], batch size: 21, lr: 2.45e-02, grad_scale: 64.0 +2024-07-27 14:37:22,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27540.0, ans=0.1 +2024-07-27 14:37:24,611 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.79 vs. limit=6.0 +2024-07-27 14:37:33,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=27553.333333333332, ans=0.125 +2024-07-27 14:37:38,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=27566.666666666668, ans=0.025 +2024-07-27 14:37:48,655 INFO [train.py:1114] (1/4) Epoch 3, batch 250, loss[loss=0.3364, simple_loss=0.3889, pruned_loss=0.1419, over 4620.00 frames. ], tot_loss[loss=0.303, simple_loss=0.3662, pruned_loss=0.1199, over 670295.67 frames. ], batch size: 16, lr: 2.45e-02, grad_scale: 64.0 +2024-07-27 14:37:57,411 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.290e+01 6.405e+01 7.025e+01 7.906e+01 1.155e+02, threshold=1.405e+02, percent-clipped=0.0 +2024-07-27 14:38:01,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=27606.666666666668, ans=0.125 +2024-07-27 14:38:06,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=27620.0, ans=0.125 +2024-07-27 14:38:08,379 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.94 vs. limit=15.0 +2024-07-27 14:38:16,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=27646.666666666668, ans=0.035 +2024-07-27 14:38:18,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=27646.666666666668, ans=0.004859420289855072 +2024-07-27 14:38:24,171 INFO [train.py:1114] (1/4) Epoch 3, batch 300, loss[loss=0.2748, simple_loss=0.3512, pruned_loss=0.09922, over 4807.00 frames. ], tot_loss[loss=0.3, simple_loss=0.3633, pruned_loss=0.1183, over 729921.52 frames. ], batch size: 15, lr: 2.44e-02, grad_scale: 64.0 +2024-07-27 14:38:28,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=27660.0, ans=0.025 +2024-07-27 14:38:28,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=27660.0, ans=0.1 +2024-07-27 14:38:28,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27660.0, ans=0.1 +2024-07-27 14:38:34,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=27673.333333333332, ans=0.2 +2024-07-27 14:38:41,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27686.666666666668, ans=0.1 +2024-07-27 14:38:42,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=27686.666666666668, ans=0.125 +2024-07-27 14:38:47,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=27700.0, ans=0.125 +2024-07-27 14:38:54,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=27700.0, ans=0.125 +2024-07-27 14:39:11,992 INFO [train.py:1114] (1/4) Epoch 3, batch 350, loss[loss=0.2655, simple_loss=0.3255, pruned_loss=0.1027, over 4937.00 frames. ], tot_loss[loss=0.3006, simple_loss=0.3638, pruned_loss=0.1187, over 775963.95 frames. ], batch size: 12, lr: 2.44e-02, grad_scale: 64.0 +2024-07-27 14:39:16,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=27726.666666666668, ans=0.125 +2024-07-27 14:39:18,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=27740.0, ans=0.125 +2024-07-27 14:39:21,243 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.509e+01 6.194e+01 6.978e+01 7.817e+01 1.142e+02, threshold=1.396e+02, percent-clipped=0.0 +2024-07-27 14:39:34,970 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=15.0 +2024-07-27 14:39:53,818 INFO [train.py:1114] (1/4) Epoch 3, batch 400, loss[loss=0.3135, simple_loss=0.374, pruned_loss=0.1265, over 4686.00 frames. ], tot_loss[loss=0.3008, simple_loss=0.3639, pruned_loss=0.1189, over 813339.49 frames. ], batch size: 13, lr: 2.44e-02, grad_scale: 64.0 +2024-07-27 14:39:59,012 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.89 vs. limit=22.5 +2024-07-27 14:40:05,074 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.83 vs. limit=15.0 +2024-07-27 14:40:06,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=27806.666666666668, ans=0.125 +2024-07-27 14:40:13,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=27820.0, ans=0.125 +2024-07-27 14:40:25,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=27846.666666666668, ans=0.025 +2024-07-27 14:40:26,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=27846.666666666668, ans=0.025 +2024-07-27 14:40:29,609 INFO [train.py:1114] (1/4) Epoch 3, batch 450, loss[loss=0.3043, simple_loss=0.3697, pruned_loss=0.1194, over 4630.00 frames. ], tot_loss[loss=0.2994, simple_loss=0.3625, pruned_loss=0.1182, over 838952.38 frames. ], batch size: 13, lr: 2.44e-02, grad_scale: 64.0 +2024-07-27 14:40:38,907 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.163e+01 +2024-07-27 14:40:40,713 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.175e+01 6.156e+01 6.961e+01 7.854e+01 1.209e+02, threshold=1.392e+02, percent-clipped=0.0 +2024-07-27 14:40:56,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=27900.0, ans=0.125 +2024-07-27 14:41:09,145 INFO [train.py:1114] (1/4) Epoch 3, batch 500, loss[loss=0.3485, simple_loss=0.3931, pruned_loss=0.1519, over 4679.00 frames. ], tot_loss[loss=0.2982, simple_loss=0.3612, pruned_loss=0.1176, over 861191.38 frames. ], batch size: 15, lr: 2.43e-02, grad_scale: 64.0 +2024-07-27 14:41:09,507 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.29 vs. limit=6.0 +2024-07-27 14:41:24,294 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.99 vs. limit=6.0 +2024-07-27 14:41:42,278 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.35 vs. limit=22.5 +2024-07-27 14:41:48,915 INFO [train.py:1114] (1/4) Epoch 3, batch 550, loss[loss=0.3637, simple_loss=0.4294, pruned_loss=0.149, over 4589.00 frames. ], tot_loss[loss=0.298, simple_loss=0.3614, pruned_loss=0.1173, over 877618.19 frames. ], batch size: 17, lr: 2.43e-02, grad_scale: 64.0 +2024-07-27 14:41:55,538 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.91 vs. limit=6.0 +2024-07-27 14:41:57,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=28006.666666666668, ans=0.2 +2024-07-27 14:41:57,572 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.06 vs. limit=10.0 +2024-07-27 14:41:59,866 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.022e+01 6.181e+01 6.683e+01 7.809e+01 1.184e+02, threshold=1.337e+02, percent-clipped=0.0 +2024-07-27 14:42:36,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=28046.666666666668, ans=0.1 +2024-07-27 14:42:45,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=28046.666666666668, ans=0.1 +2024-07-27 14:42:45,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=28046.666666666668, ans=0.2 +2024-07-27 14:42:46,173 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.68 vs. limit=10.0 +2024-07-27 14:42:46,418 INFO [train.py:1114] (1/4) Epoch 3, batch 600, loss[loss=0.3297, simple_loss=0.3889, pruned_loss=0.1352, over 4680.00 frames. ], tot_loss[loss=0.2976, simple_loss=0.3614, pruned_loss=0.1169, over 892315.84 frames. ], batch size: 16, lr: 2.43e-02, grad_scale: 64.0 +2024-07-27 14:42:58,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=28060.0, ans=0.125 +2024-07-27 14:43:21,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=28100.0, ans=0.004760869565217391 +2024-07-27 14:43:28,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=28113.333333333332, ans=0.09899494936611666 +2024-07-27 14:43:30,813 INFO [train.py:1114] (1/4) Epoch 3, batch 650, loss[loss=0.2925, simple_loss=0.3581, pruned_loss=0.1134, over 4756.00 frames. ], tot_loss[loss=0.2973, simple_loss=0.3609, pruned_loss=0.1169, over 903877.94 frames. ], batch size: 13, lr: 2.43e-02, grad_scale: 64.0 +2024-07-27 14:43:30,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=28126.666666666668, ans=0.125 +2024-07-27 14:43:30,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=28126.666666666668, ans=0.1 +2024-07-27 14:43:31,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=28126.666666666668, ans=0.125 +2024-07-27 14:43:37,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=28140.0, ans=0.125 +2024-07-27 14:43:38,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=28140.0, ans=0.015 +2024-07-27 14:43:38,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=28140.0, ans=0.125 +2024-07-27 14:43:39,812 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.390e+01 6.211e+01 6.879e+01 7.737e+01 1.031e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-27 14:43:40,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=28140.0, ans=0.2 +2024-07-27 14:43:44,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=28153.333333333332, ans=0.1 +2024-07-27 14:44:01,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=28166.666666666668, ans=0.0047463768115942025 +2024-07-27 14:44:23,644 INFO [train.py:1114] (1/4) Epoch 3, batch 700, loss[loss=0.3064, simple_loss=0.3622, pruned_loss=0.1253, over 4636.00 frames. ], tot_loss[loss=0.2972, simple_loss=0.3608, pruned_loss=0.1168, over 911543.89 frames. ], batch size: 12, lr: 2.42e-02, grad_scale: 64.0 +2024-07-27 14:44:55,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=28206.666666666668, ans=0.2 +2024-07-27 14:45:25,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=28246.666666666668, ans=0.2 +2024-07-27 14:45:25,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=28246.666666666668, ans=0.2 +2024-07-27 14:45:39,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=28246.666666666668, ans=0.025 +2024-07-27 14:45:39,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=28246.666666666668, ans=0.1 +2024-07-27 14:45:42,573 INFO [train.py:1114] (1/4) Epoch 3, batch 750, loss[loss=0.2901, simple_loss=0.3707, pruned_loss=0.1048, over 4700.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.3605, pruned_loss=0.1165, over 918097.72 frames. ], batch size: 13, lr: 2.42e-02, grad_scale: 64.0 +2024-07-27 14:45:59,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=28260.0, ans=0.004726086956521739 +2024-07-27 14:45:59,982 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.57 vs. limit=12.0 +2024-07-27 14:46:01,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=28260.0, ans=0.0 +2024-07-27 14:46:06,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=28273.333333333332, ans=0.1 +2024-07-27 14:46:08,316 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.231e+01 6.433e+01 7.255e+01 8.187e+01 1.605e+02, threshold=1.451e+02, percent-clipped=1.0 +2024-07-27 14:46:15,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=28286.666666666668, ans=0.1 +2024-07-27 14:46:17,440 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.35 vs. limit=15.0 +2024-07-27 14:46:20,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=28286.666666666668, ans=0.0 +2024-07-27 14:46:35,956 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.56 vs. limit=15.0 +2024-07-27 14:46:41,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=28313.333333333332, ans=0.125 +2024-07-27 14:46:49,667 INFO [train.py:1114] (1/4) Epoch 3, batch 800, loss[loss=0.2297, simple_loss=0.2977, pruned_loss=0.08082, over 4848.00 frames. ], tot_loss[loss=0.2967, simple_loss=0.3605, pruned_loss=0.1164, over 923248.05 frames. ], batch size: 12, lr: 2.42e-02, grad_scale: 64.0 +2024-07-27 14:47:02,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=28326.666666666668, ans=0.125 +2024-07-27 14:47:10,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=28340.0, ans=0.1 +2024-07-27 14:47:38,971 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:47:52,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=28366.666666666668, ans=0.125 +2024-07-27 14:48:09,322 INFO [train.py:1114] (1/4) Epoch 3, batch 850, loss[loss=0.3444, simple_loss=0.3927, pruned_loss=0.148, over 4664.00 frames. ], tot_loss[loss=0.2954, simple_loss=0.3597, pruned_loss=0.1155, over 927536.33 frames. ], batch size: 14, lr: 2.42e-02, grad_scale: 64.0 +2024-07-27 14:48:12,180 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:48:19,144 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.98 vs. limit=6.0 +2024-07-27 14:48:21,501 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.186e+01 6.419e+01 6.895e+01 7.641e+01 1.957e+02, threshold=1.379e+02, percent-clipped=1.0 +2024-07-27 14:48:28,365 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.07 vs. limit=22.5 +2024-07-27 14:48:28,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=28406.666666666668, ans=0.2 +2024-07-27 14:48:51,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=28460.0, ans=0.125 +2024-07-27 14:48:52,060 INFO [train.py:1114] (1/4) Epoch 3, batch 900, loss[loss=0.2259, simple_loss=0.2984, pruned_loss=0.07674, over 4853.00 frames. ], tot_loss[loss=0.2962, simple_loss=0.3596, pruned_loss=0.1164, over 928170.53 frames. ], batch size: 12, lr: 2.41e-02, grad_scale: 64.0 +2024-07-27 14:48:54,455 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.63 vs. limit=12.0 +2024-07-27 14:49:39,148 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.28 vs. limit=15.0 +2024-07-27 14:49:59,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=28500.0, ans=0.2 +2024-07-27 14:50:05,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=28513.333333333332, ans=0.1 +2024-07-27 14:50:08,970 INFO [train.py:1114] (1/4) Epoch 3, batch 950, loss[loss=0.2886, simple_loss=0.3448, pruned_loss=0.1162, over 4779.00 frames. ], tot_loss[loss=0.2944, simple_loss=0.3587, pruned_loss=0.1151, over 929547.07 frames. ], batch size: 12, lr: 2.41e-02, grad_scale: 128.0 +2024-07-27 14:50:23,821 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.236e+01 6.152e+01 6.859e+01 7.763e+01 1.125e+02, threshold=1.372e+02, percent-clipped=0.0 +2024-07-27 14:50:34,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=28553.333333333332, ans=0.125 +2024-07-27 14:50:41,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=28566.666666666668, ans=0.125 +2024-07-27 14:51:03,509 INFO [train.py:1114] (1/4) Epoch 3, batch 1000, loss[loss=0.3325, simple_loss=0.3776, pruned_loss=0.1437, over 4976.00 frames. ], tot_loss[loss=0.2969, simple_loss=0.3607, pruned_loss=0.1165, over 929738.89 frames. ], batch size: 13, lr: 2.41e-02, grad_scale: 128.0 +2024-07-27 14:51:14,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=28593.333333333332, ans=0.1 +2024-07-27 14:51:18,527 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.51 vs. limit=15.0 +2024-07-27 14:51:30,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=28620.0, ans=0.0 +2024-07-27 14:51:54,876 INFO [train.py:1114] (1/4) Epoch 3, batch 1050, loss[loss=0.3365, simple_loss=0.401, pruned_loss=0.136, over 4873.00 frames. ], tot_loss[loss=0.2965, simple_loss=0.3604, pruned_loss=0.1163, over 932363.48 frames. ], batch size: 14, lr: 2.41e-02, grad_scale: 128.0 +2024-07-27 14:52:02,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=28673.333333333332, ans=0.025 +2024-07-27 14:52:06,686 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.090e+01 6.458e+01 7.095e+01 7.722e+01 9.914e+01, threshold=1.419e+02, percent-clipped=0.0 +2024-07-27 14:52:24,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=28686.666666666668, ans=0.2 +2024-07-27 14:52:25,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=28686.666666666668, ans=0.125 +2024-07-27 14:52:38,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=28700.0, ans=0.125 +2024-07-27 14:52:40,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=28713.333333333332, ans=0.1 +2024-07-27 14:52:54,289 INFO [train.py:1114] (1/4) Epoch 3, batch 1100, loss[loss=0.3108, simple_loss=0.3716, pruned_loss=0.125, over 4898.00 frames. ], tot_loss[loss=0.2955, simple_loss=0.3593, pruned_loss=0.1159, over 934779.70 frames. ], batch size: 13, lr: 2.40e-02, grad_scale: 128.0 +2024-07-27 14:52:54,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=28726.666666666668, ans=0.125 +2024-07-27 14:52:58,995 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.14 vs. limit=22.5 +2024-07-27 14:53:25,834 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.46 vs. limit=22.5 +2024-07-27 14:53:57,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=28780.0, ans=0.025 +2024-07-27 14:54:01,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=28780.0, ans=0.125 +2024-07-27 14:54:03,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=28793.333333333332, ans=0.125 +2024-07-27 14:54:04,139 INFO [train.py:1114] (1/4) Epoch 3, batch 1150, loss[loss=0.2917, simple_loss=0.3535, pruned_loss=0.115, over 4890.00 frames. ], tot_loss[loss=0.296, simple_loss=0.3595, pruned_loss=0.1162, over 934699.93 frames. ], batch size: 13, lr: 2.40e-02, grad_scale: 128.0 +2024-07-27 14:54:14,006 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.388e+01 6.303e+01 6.956e+01 7.734e+01 1.852e+02, threshold=1.391e+02, percent-clipped=1.0 +2024-07-27 14:54:15,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=28806.666666666668, ans=0.125 +2024-07-27 14:54:39,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=28846.666666666668, ans=0.07 +2024-07-27 14:54:40,676 INFO [train.py:1114] (1/4) Epoch 3, batch 1200, loss[loss=0.3415, simple_loss=0.4, pruned_loss=0.1414, over 4873.00 frames. ], tot_loss[loss=0.2983, simple_loss=0.3615, pruned_loss=0.1176, over 933287.23 frames. ], batch size: 14, lr: 2.40e-02, grad_scale: 64.0 +2024-07-27 14:54:48,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=28873.333333333332, ans=0.05 +2024-07-27 14:55:11,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=28913.333333333332, ans=0.125 +2024-07-27 14:55:32,477 INFO [train.py:1114] (1/4) Epoch 3, batch 1250, loss[loss=0.3395, simple_loss=0.3992, pruned_loss=0.1399, over 4796.00 frames. ], tot_loss[loss=0.2967, simple_loss=0.3606, pruned_loss=0.1164, over 937395.16 frames. ], batch size: 15, lr: 2.40e-02, grad_scale: 64.0 +2024-07-27 14:55:32,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=28926.666666666668, ans=0.0 +2024-07-27 14:55:41,891 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 6.356e+01 6.895e+01 7.489e+01 1.286e+02, threshold=1.379e+02, percent-clipped=0.0 +2024-07-27 14:56:03,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=28966.666666666668, ans=0.07 +2024-07-27 14:56:05,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=28966.666666666668, ans=0.125 +2024-07-27 14:56:13,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=28980.0, ans=0.125 +2024-07-27 14:56:15,696 INFO [train.py:1114] (1/4) Epoch 3, batch 1300, loss[loss=0.3799, simple_loss=0.4338, pruned_loss=0.163, over 4700.00 frames. ], tot_loss[loss=0.2957, simple_loss=0.3598, pruned_loss=0.1158, over 938854.36 frames. ], batch size: 19, lr: 2.39e-02, grad_scale: 64.0 +2024-07-27 14:56:18,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=28993.333333333332, ans=0.2 +2024-07-27 14:56:33,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=29020.0, ans=0.125 +2024-07-27 14:56:57,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=29033.333333333332, ans=0.0 +2024-07-27 14:57:16,157 INFO [train.py:1114] (1/4) Epoch 3, batch 1350, loss[loss=0.2862, simple_loss=0.357, pruned_loss=0.1077, over 4762.00 frames. ], tot_loss[loss=0.2945, simple_loss=0.359, pruned_loss=0.115, over 940924.36 frames. ], batch size: 13, lr: 2.39e-02, grad_scale: 64.0 +2024-07-27 14:57:16,578 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.71 vs. limit=15.0 +2024-07-27 14:57:18,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=29060.0, ans=0.125 +2024-07-27 14:57:21,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29060.0, ans=0.1 +2024-07-27 14:57:32,073 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.012e+01 6.171e+01 6.881e+01 8.115e+01 1.166e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-27 14:57:39,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=29086.666666666668, ans=0.0 +2024-07-27 14:57:53,676 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.54 vs. limit=15.0 +2024-07-27 14:57:59,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=29113.333333333332, ans=0.125 +2024-07-27 14:58:06,122 INFO [train.py:1114] (1/4) Epoch 3, batch 1400, loss[loss=0.204, simple_loss=0.2871, pruned_loss=0.06048, over 4708.00 frames. ], tot_loss[loss=0.2935, simple_loss=0.3583, pruned_loss=0.1144, over 942833.14 frames. ], batch size: 11, lr: 2.39e-02, grad_scale: 64.0 +2024-07-27 14:58:30,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=29153.333333333332, ans=0.125 +2024-07-27 14:58:36,662 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.15 vs. limit=15.0 +2024-07-27 14:58:56,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=29180.0, ans=0.125 +2024-07-27 14:59:02,311 INFO [train.py:1114] (1/4) Epoch 3, batch 1450, loss[loss=0.294, simple_loss=0.3559, pruned_loss=0.116, over 4693.00 frames. ], tot_loss[loss=0.2951, simple_loss=0.3599, pruned_loss=0.1151, over 943107.99 frames. ], batch size: 15, lr: 2.39e-02, grad_scale: 64.0 +2024-07-27 14:59:02,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=29193.333333333332, ans=0.125 +2024-07-27 14:59:02,646 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.05 vs. limit=15.0 +2024-07-27 14:59:04,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=29193.333333333332, ans=0.025 +2024-07-27 14:59:18,812 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.112e+01 6.399e+01 7.002e+01 7.900e+01 1.035e+02, threshold=1.400e+02, percent-clipped=0.0 +2024-07-27 14:59:27,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=29220.0, ans=0.125 +2024-07-27 14:59:30,181 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.27 vs. limit=22.5 +2024-07-27 14:59:47,967 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.73 vs. limit=15.0 +2024-07-27 14:59:53,879 INFO [train.py:1114] (1/4) Epoch 3, batch 1500, loss[loss=0.2421, simple_loss=0.3185, pruned_loss=0.0829, over 4804.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.3606, pruned_loss=0.1153, over 942620.41 frames. ], batch size: 14, lr: 2.38e-02, grad_scale: 64.0 +2024-07-27 15:00:05,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=29273.333333333332, ans=0.0 +2024-07-27 15:00:05,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29273.333333333332, ans=0.1 +2024-07-27 15:00:06,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=29273.333333333332, ans=0.0 +2024-07-27 15:00:18,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=29300.0, ans=0.1 +2024-07-27 15:00:20,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=29300.0, ans=0.2 +2024-07-27 15:00:21,922 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.385e+01 +2024-07-27 15:00:23,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=29313.333333333332, ans=0.2 +2024-07-27 15:00:28,160 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.70 vs. limit=15.0 +2024-07-27 15:00:29,946 INFO [train.py:1114] (1/4) Epoch 3, batch 1550, loss[loss=0.2742, simple_loss=0.3562, pruned_loss=0.09606, over 4887.00 frames. ], tot_loss[loss=0.2946, simple_loss=0.3597, pruned_loss=0.1147, over 939120.43 frames. ], batch size: 15, lr: 2.38e-02, grad_scale: 64.0 +2024-07-27 15:00:33,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=29326.666666666668, ans=0.125 +2024-07-27 15:00:41,580 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.380e+01 6.188e+01 6.996e+01 8.008e+01 1.128e+02, threshold=1.399e+02, percent-clipped=0.0 +2024-07-27 15:01:23,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=29366.666666666668, ans=0.125 +2024-07-27 15:01:27,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29380.0, ans=0.1 +2024-07-27 15:01:53,197 INFO [train.py:1114] (1/4) Epoch 3, batch 1600, loss[loss=0.3035, simple_loss=0.3833, pruned_loss=0.1119, over 4875.00 frames. ], tot_loss[loss=0.2938, simple_loss=0.3589, pruned_loss=0.1144, over 937810.94 frames. ], batch size: 14, lr: 2.38e-02, grad_scale: 32.0 +2024-07-27 15:01:57,942 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.96 vs. limit=22.5 +2024-07-27 15:02:05,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29406.666666666668, ans=0.1 +2024-07-27 15:02:23,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=29446.666666666668, ans=0.2 +2024-07-27 15:02:30,084 INFO [train.py:1114] (1/4) Epoch 3, batch 1650, loss[loss=0.2743, simple_loss=0.3469, pruned_loss=0.1009, over 4658.00 frames. ], tot_loss[loss=0.2928, simple_loss=0.3579, pruned_loss=0.1138, over 937540.94 frames. ], batch size: 14, lr: 2.38e-02, grad_scale: 32.0 +2024-07-27 15:02:30,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=29460.0, ans=0.125 +2024-07-27 15:02:31,892 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=19.85 vs. limit=15.0 +2024-07-27 15:02:38,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=29473.333333333332, ans=0.025 +2024-07-27 15:02:40,271 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.917e+01 6.286e+01 6.723e+01 7.368e+01 1.143e+02, threshold=1.345e+02, percent-clipped=0.0 +2024-07-27 15:02:54,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=29473.333333333332, ans=0.125 +2024-07-27 15:03:09,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=29500.0, ans=0.125 +2024-07-27 15:03:14,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=29500.0, ans=0.004456521739130435 +2024-07-27 15:03:16,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29513.333333333332, ans=0.1 +2024-07-27 15:03:27,846 INFO [train.py:1114] (1/4) Epoch 3, batch 1700, loss[loss=0.2801, simple_loss=0.3412, pruned_loss=0.1095, over 4706.00 frames. ], tot_loss[loss=0.294, simple_loss=0.359, pruned_loss=0.1145, over 939169.98 frames. ], batch size: 11, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:03:28,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=29526.666666666668, ans=0.125 +2024-07-27 15:03:56,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.23 vs. limit=15.0 +2024-07-27 15:03:59,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=29566.666666666668, ans=0.1 +2024-07-27 15:04:05,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=29580.0, ans=0.125 +2024-07-27 15:04:11,577 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.68 vs. limit=15.0 +2024-07-27 15:04:14,458 INFO [train.py:1114] (1/4) Epoch 3, batch 1750, loss[loss=0.2531, simple_loss=0.3243, pruned_loss=0.09095, over 4796.00 frames. ], tot_loss[loss=0.2937, simple_loss=0.3591, pruned_loss=0.1142, over 940071.14 frames. ], batch size: 11, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:04:30,955 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.973e+01 6.164e+01 6.649e+01 7.575e+01 1.168e+02, threshold=1.330e+02, percent-clipped=0.0 +2024-07-27 15:04:33,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=29606.666666666668, ans=0.0 +2024-07-27 15:04:46,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=29620.0, ans=0.0 +2024-07-27 15:05:04,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=29633.333333333332, ans=0.0 +2024-07-27 15:05:05,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=29633.333333333332, ans=0.5 +2024-07-27 15:05:05,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=29633.333333333332, ans=0.004427536231884059 +2024-07-27 15:05:15,312 INFO [train.py:1114] (1/4) Epoch 3, batch 1800, loss[loss=0.3113, simple_loss=0.388, pruned_loss=0.1173, over 4638.00 frames. ], tot_loss[loss=0.2942, simple_loss=0.3596, pruned_loss=0.1144, over 941061.65 frames. ], batch size: 13, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:05:17,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=29660.0, ans=0.2 +2024-07-27 15:05:24,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=29673.333333333332, ans=0.2 +2024-07-27 15:05:25,579 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.14 vs. limit=15.0 +2024-07-27 15:05:27,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=29673.333333333332, ans=0.0 +2024-07-27 15:05:33,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29686.666666666668, ans=0.1 +2024-07-27 15:05:33,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=29686.666666666668, ans=0.125 +2024-07-27 15:05:49,940 INFO [train.py:1114] (1/4) Epoch 3, batch 1850, loss[loss=0.2875, simple_loss=0.3575, pruned_loss=0.1088, over 4812.00 frames. ], tot_loss[loss=0.2929, simple_loss=0.3584, pruned_loss=0.1137, over 940832.10 frames. ], batch size: 14, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:06:06,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=29726.666666666668, ans=0.125 +2024-07-27 15:06:10,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29740.0, ans=0.1 +2024-07-27 15:06:11,844 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.02 vs. limit=15.0 +2024-07-27 15:06:13,606 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.082e+01 6.411e+01 6.989e+01 8.311e+01 1.252e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 15:06:39,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=29766.666666666668, ans=0.035 +2024-07-27 15:07:02,453 INFO [train.py:1114] (1/4) Epoch 3, batch 1900, loss[loss=0.3228, simple_loss=0.389, pruned_loss=0.1283, over 4654.00 frames. ], tot_loss[loss=0.2938, simple_loss=0.3593, pruned_loss=0.1142, over 941855.50 frames. ], batch size: 14, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:07:10,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=29793.333333333332, ans=0.2 +2024-07-27 15:07:33,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=29833.333333333332, ans=0.05 +2024-07-27 15:07:40,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=29846.666666666668, ans=0.004381159420289855 +2024-07-27 15:07:43,051 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.68 vs. limit=15.0 +2024-07-27 15:08:06,601 INFO [train.py:1114] (1/4) Epoch 3, batch 1950, loss[loss=0.2701, simple_loss=0.3357, pruned_loss=0.1023, over 4902.00 frames. ], tot_loss[loss=0.2953, simple_loss=0.3606, pruned_loss=0.115, over 943781.73 frames. ], batch size: 13, lr: 2.36e-02, grad_scale: 32.0 +2024-07-27 15:08:06,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=29860.0, ans=0.125 +2024-07-27 15:08:07,073 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.17 vs. limit=10.0 +2024-07-27 15:08:18,972 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.566e+01 6.436e+01 6.844e+01 7.392e+01 3.834e+02, threshold=1.369e+02, percent-clipped=1.0 +2024-07-27 15:08:29,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=29886.666666666668, ans=0.2 +2024-07-27 15:08:56,177 INFO [train.py:1114] (1/4) Epoch 3, batch 2000, loss[loss=0.3124, simple_loss=0.3553, pruned_loss=0.1347, over 4804.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.3607, pruned_loss=0.1153, over 941432.68 frames. ], batch size: 11, lr: 2.36e-02, grad_scale: 32.0 +2024-07-27 15:09:14,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=29953.333333333332, ans=0.125 +2024-07-27 15:09:21,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=29966.666666666668, ans=10.0 +2024-07-27 15:09:25,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=29980.0, ans=0.125 +2024-07-27 15:09:42,731 INFO [train.py:1114] (1/4) Epoch 3, batch 2050, loss[loss=0.2921, simple_loss=0.3649, pruned_loss=0.1096, over 4618.00 frames. ], tot_loss[loss=0.2951, simple_loss=0.3601, pruned_loss=0.1151, over 939501.75 frames. ], batch size: 11, lr: 2.36e-02, grad_scale: 32.0 +2024-07-27 15:09:43,020 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.48 vs. limit=10.0 +2024-07-27 15:09:48,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29993.333333333332, ans=0.1 +2024-07-27 15:09:52,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=30006.666666666668, ans=0.125 +2024-07-27 15:09:52,890 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.226e+01 6.283e+01 7.092e+01 8.463e+01 1.553e+02, threshold=1.418e+02, percent-clipped=1.0 +2024-07-27 15:09:55,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=30006.666666666668, ans=0.125 +2024-07-27 15:10:09,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=30046.666666666668, ans=0.015 +2024-07-27 15:10:11,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=30046.666666666668, ans=0.09899494936611666 +2024-07-27 15:10:11,997 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=22.37 vs. limit=15.0 +2024-07-27 15:10:12,677 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-07-27 15:10:16,478 INFO [train.py:1114] (1/4) Epoch 3, batch 2100, loss[loss=0.2956, simple_loss=0.3623, pruned_loss=0.1145, over 4773.00 frames. ], tot_loss[loss=0.2938, simple_loss=0.3588, pruned_loss=0.1144, over 941066.06 frames. ], batch size: 13, lr: 2.36e-02, grad_scale: 32.0 +2024-07-27 15:10:22,982 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.55 vs. limit=22.5 +2024-07-27 15:10:27,316 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:10:29,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=30086.666666666668, ans=0.1 +2024-07-27 15:10:29,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=30086.666666666668, ans=0.2 +2024-07-27 15:10:50,457 INFO [train.py:1114] (1/4) Epoch 3, batch 2150, loss[loss=0.2916, simple_loss=0.353, pruned_loss=0.1151, over 4890.00 frames. ], tot_loss[loss=0.2923, simple_loss=0.3571, pruned_loss=0.1138, over 944192.44 frames. ], batch size: 13, lr: 2.35e-02, grad_scale: 32.0 +2024-07-27 15:10:50,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=30126.666666666668, ans=0.125 +2024-07-27 15:10:57,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=30140.0, ans=0.004317391304347827 +2024-07-27 15:11:01,325 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.479e+01 6.161e+01 6.566e+01 7.305e+01 9.854e+01, threshold=1.313e+02, percent-clipped=0.0 +2024-07-27 15:11:03,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=30140.0, ans=0.125 +2024-07-27 15:11:05,383 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.24 vs. limit=12.0 +2024-07-27 15:11:22,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=30180.0, ans=0.004308695652173913 +2024-07-27 15:11:23,642 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.53 vs. limit=15.0 +2024-07-27 15:11:26,652 INFO [train.py:1114] (1/4) Epoch 3, batch 2200, loss[loss=0.2991, simple_loss=0.361, pruned_loss=0.1186, over 4815.00 frames. ], tot_loss[loss=0.2943, simple_loss=0.3585, pruned_loss=0.115, over 943019.97 frames. ], batch size: 14, lr: 2.35e-02, grad_scale: 32.0 +2024-07-27 15:11:30,172 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:11:43,454 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.10 vs. limit=15.0 +2024-07-27 15:11:48,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=30233.333333333332, ans=0.004297101449275363 +2024-07-27 15:11:48,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=30233.333333333332, ans=0.125 +2024-07-27 15:11:48,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=30233.333333333332, ans=0.0 +2024-07-27 15:12:05,474 INFO [train.py:1114] (1/4) Epoch 3, batch 2250, loss[loss=0.3226, simple_loss=0.3883, pruned_loss=0.1285, over 4708.00 frames. ], tot_loss[loss=0.2931, simple_loss=0.358, pruned_loss=0.1141, over 941330.25 frames. ], batch size: 13, lr: 2.35e-02, grad_scale: 32.0 +2024-07-27 15:12:09,259 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.83 vs. limit=15.0 +2024-07-27 15:12:15,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=30273.333333333332, ans=0.125 +2024-07-27 15:12:16,107 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.240e+01 6.175e+01 6.906e+01 7.852e+01 1.345e+02, threshold=1.381e+02, percent-clipped=1.0 +2024-07-27 15:12:16,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=30273.333333333332, ans=0.1 +2024-07-27 15:12:18,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=30286.666666666668, ans=0.125 +2024-07-27 15:12:19,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=30286.666666666668, ans=0.125 +2024-07-27 15:12:19,975 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=20.61 vs. limit=22.5 +2024-07-27 15:12:20,383 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.54 vs. limit=22.5 +2024-07-27 15:12:21,229 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.61 vs. limit=12.0 +2024-07-27 15:12:44,239 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.98 vs. limit=6.0 +2024-07-27 15:12:47,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=30313.333333333332, ans=0.125 +2024-07-27 15:12:50,202 INFO [train.py:1114] (1/4) Epoch 3, batch 2300, loss[loss=0.2403, simple_loss=0.3014, pruned_loss=0.0896, over 4931.00 frames. ], tot_loss[loss=0.2915, simple_loss=0.3566, pruned_loss=0.1132, over 938998.86 frames. ], batch size: 12, lr: 2.35e-02, grad_scale: 32.0 +2024-07-27 15:12:57,330 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:12:58,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=30340.0, ans=0.125 +2024-07-27 15:13:20,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30366.666666666668, ans=0.1 +2024-07-27 15:13:22,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=30380.0, ans=0.125 +2024-07-27 15:13:33,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30380.0, ans=0.1 +2024-07-27 15:13:35,755 INFO [train.py:1114] (1/4) Epoch 3, batch 2350, loss[loss=0.2819, simple_loss=0.3524, pruned_loss=0.1058, over 4633.00 frames. ], tot_loss[loss=0.2896, simple_loss=0.3553, pruned_loss=0.112, over 941132.29 frames. ], batch size: 13, lr: 2.34e-02, grad_scale: 32.0 +2024-07-27 15:13:51,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=30406.666666666668, ans=0.125 +2024-07-27 15:13:54,298 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.436e+01 6.422e+01 7.140e+01 8.022e+01 1.675e+02, threshold=1.428e+02, percent-clipped=1.0 +2024-07-27 15:14:11,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=30446.666666666668, ans=0.05 +2024-07-27 15:14:18,263 INFO [train.py:1114] (1/4) Epoch 3, batch 2400, loss[loss=0.2447, simple_loss=0.3166, pruned_loss=0.08646, over 4641.00 frames. ], tot_loss[loss=0.2907, simple_loss=0.3561, pruned_loss=0.1126, over 940802.05 frames. ], batch size: 12, lr: 2.34e-02, grad_scale: 32.0 +2024-07-27 15:14:51,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=30500.0, ans=0.0 +2024-07-27 15:14:51,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=30500.0, ans=0.125 +2024-07-27 15:14:52,688 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.03 vs. limit=15.0 +2024-07-27 15:15:03,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=30526.666666666668, ans=0.025 +2024-07-27 15:15:04,219 INFO [train.py:1114] (1/4) Epoch 3, batch 2450, loss[loss=0.2573, simple_loss=0.3187, pruned_loss=0.09791, over 4694.00 frames. ], tot_loss[loss=0.2923, simple_loss=0.3573, pruned_loss=0.1137, over 936574.75 frames. ], batch size: 13, lr: 2.34e-02, grad_scale: 32.0 +2024-07-27 15:15:22,369 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.262e+01 6.488e+01 7.535e+01 9.077e+01 1.631e+02, threshold=1.507e+02, percent-clipped=1.0 +2024-07-27 15:15:26,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=30553.333333333332, ans=0.125 +2024-07-27 15:15:46,319 INFO [train.py:1114] (1/4) Epoch 3, batch 2500, loss[loss=0.3086, simple_loss=0.3819, pruned_loss=0.1177, over 4809.00 frames. ], tot_loss[loss=0.2911, simple_loss=0.3564, pruned_loss=0.1129, over 938599.12 frames. ], batch size: 14, lr: 2.34e-02, grad_scale: 32.0 +2024-07-27 15:15:46,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=30593.333333333332, ans=0.004218840579710145 +2024-07-27 15:15:52,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=30593.333333333332, ans=0.0 +2024-07-27 15:15:57,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=30606.666666666668, ans=0.004215942028985508 +2024-07-27 15:16:03,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=30620.0, ans=0.2 +2024-07-27 15:16:05,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=30620.0, ans=0.2 +2024-07-27 15:16:07,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=30620.0, ans=0.125 +2024-07-27 15:16:23,928 INFO [train.py:1114] (1/4) Epoch 3, batch 2550, loss[loss=0.2736, simple_loss=0.335, pruned_loss=0.1061, over 4807.00 frames. ], tot_loss[loss=0.2901, simple_loss=0.3558, pruned_loss=0.1122, over 937988.03 frames. ], batch size: 11, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:16:56,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=30673.333333333332, ans=0.125 +2024-07-27 15:16:59,985 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.098e+01 6.217e+01 6.996e+01 7.708e+01 1.283e+02, threshold=1.399e+02, percent-clipped=0.0 +2024-07-27 15:17:09,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=30673.333333333332, ans=0.2 +2024-07-27 15:17:13,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=30686.666666666668, ans=0.2 +2024-07-27 15:17:26,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=30713.333333333332, ans=0.125 +2024-07-27 15:17:31,977 INFO [train.py:1114] (1/4) Epoch 3, batch 2600, loss[loss=0.3006, simple_loss=0.3618, pruned_loss=0.1197, over 4896.00 frames. ], tot_loss[loss=0.2916, simple_loss=0.3569, pruned_loss=0.1132, over 936769.14 frames. ], batch size: 13, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:17:45,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=30740.0, ans=0.0 +2024-07-27 15:17:54,919 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.51 vs. limit=15.0 +2024-07-27 15:18:08,636 INFO [train.py:1114] (1/4) Epoch 3, batch 2650, loss[loss=0.2945, simple_loss=0.3666, pruned_loss=0.1112, over 4630.00 frames. ], tot_loss[loss=0.2921, simple_loss=0.3574, pruned_loss=0.1134, over 939320.12 frames. ], batch size: 16, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:18:25,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.60 vs. limit=15.0 +2024-07-27 15:18:25,841 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.50 vs. limit=15.0 +2024-07-27 15:18:25,993 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.17 vs. limit=10.0 +2024-07-27 15:18:28,832 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.827e+01 6.212e+01 6.736e+01 7.183e+01 9.052e+01, threshold=1.347e+02, percent-clipped=0.0 +2024-07-27 15:18:42,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=30833.333333333332, ans=0.0 +2024-07-27 15:18:53,032 INFO [train.py:1114] (1/4) Epoch 3, batch 2700, loss[loss=0.2909, simple_loss=0.3606, pruned_loss=0.1106, over 4742.00 frames. ], tot_loss[loss=0.2931, simple_loss=0.3582, pruned_loss=0.114, over 939381.03 frames. ], batch size: 14, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:19:31,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=30913.333333333332, ans=0.004149275362318841 +2024-07-27 15:19:34,126 INFO [train.py:1114] (1/4) Epoch 3, batch 2750, loss[loss=0.3009, simple_loss=0.357, pruned_loss=0.1224, over 4711.00 frames. ], tot_loss[loss=0.2914, simple_loss=0.3564, pruned_loss=0.1132, over 939328.51 frames. ], batch size: 12, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:19:36,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=30926.666666666668, ans=0.1 +2024-07-27 15:19:44,301 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.704e+01 6.371e+01 6.868e+01 7.779e+01 1.190e+02, threshold=1.374e+02, percent-clipped=0.0 +2024-07-27 15:19:48,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=30953.333333333332, ans=0.025 +2024-07-27 15:19:52,251 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.55 vs. limit=15.0 +2024-07-27 15:19:57,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=30966.666666666668, ans=0.125 +2024-07-27 15:20:08,673 INFO [train.py:1114] (1/4) Epoch 3, batch 2800, loss[loss=0.39, simple_loss=0.4091, pruned_loss=0.1855, over 3485.00 frames. ], tot_loss[loss=0.2918, simple_loss=0.3566, pruned_loss=0.1135, over 936964.32 frames. ], batch size: 35, lr: 2.32e-02, grad_scale: 32.0 +2024-07-27 15:20:31,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31033.333333333332, ans=0.1 +2024-07-27 15:20:35,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=31033.333333333332, ans=0.07 +2024-07-27 15:20:36,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=31046.666666666668, ans=0.125 +2024-07-27 15:20:49,900 INFO [train.py:1114] (1/4) Epoch 3, batch 2850, loss[loss=0.2716, simple_loss=0.3382, pruned_loss=0.1025, over 4964.00 frames. ], tot_loss[loss=0.2929, simple_loss=0.3573, pruned_loss=0.1143, over 935632.12 frames. ], batch size: 13, lr: 2.32e-02, grad_scale: 32.0 +2024-07-27 15:20:54,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=31060.0, ans=0.0 +2024-07-27 15:20:59,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=31073.333333333332, ans=0.125 +2024-07-27 15:21:01,743 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.366e+01 6.414e+01 6.981e+01 8.121e+01 1.632e+02, threshold=1.396e+02, percent-clipped=1.0 +2024-07-27 15:21:10,423 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.57 vs. limit=15.0 +2024-07-27 15:21:12,643 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.77 vs. limit=12.0 +2024-07-27 15:21:15,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31086.666666666668, ans=0.1 +2024-07-27 15:21:28,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=31113.333333333332, ans=0.004105797101449276 +2024-07-27 15:21:43,052 INFO [train.py:1114] (1/4) Epoch 3, batch 2900, loss[loss=0.2472, simple_loss=0.331, pruned_loss=0.08172, over 4826.00 frames. ], tot_loss[loss=0.2925, simple_loss=0.3578, pruned_loss=0.1135, over 939485.24 frames. ], batch size: 13, lr: 2.32e-02, grad_scale: 32.0 +2024-07-27 15:21:54,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=31126.666666666668, ans=0.0 +2024-07-27 15:22:02,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=31140.0, ans=0.125 +2024-07-27 15:22:08,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=31153.333333333332, ans=0.2 +2024-07-27 15:22:12,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=31166.666666666668, ans=0.0 +2024-07-27 15:22:16,263 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.73 vs. limit=15.0 +2024-07-27 15:22:16,303 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.96 vs. limit=15.0 +2024-07-27 15:22:35,884 INFO [train.py:1114] (1/4) Epoch 3, batch 2950, loss[loss=0.3359, simple_loss=0.3745, pruned_loss=0.1487, over 4707.00 frames. ], tot_loss[loss=0.2932, simple_loss=0.3575, pruned_loss=0.1145, over 938598.76 frames. ], batch size: 12, lr: 2.32e-02, grad_scale: 32.0 +2024-07-27 15:22:47,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=31193.333333333332, ans=0.025 +2024-07-27 15:22:52,192 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.280e+01 6.239e+01 6.722e+01 7.619e+01 1.818e+02, threshold=1.344e+02, percent-clipped=1.0 +2024-07-27 15:22:55,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31220.0, ans=0.1 +2024-07-27 15:22:59,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=31220.0, ans=0.2 +2024-07-27 15:23:00,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=31220.0, ans=0.2 +2024-07-27 15:23:02,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=31220.0, ans=0.125 +2024-07-27 15:23:03,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=31220.0, ans=22.5 +2024-07-27 15:23:12,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=31246.666666666668, ans=0.07 +2024-07-27 15:23:13,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=31246.666666666668, ans=0.2 +2024-07-27 15:23:18,255 INFO [train.py:1114] (1/4) Epoch 3, batch 3000, loss[loss=0.2592, simple_loss=0.3381, pruned_loss=0.09017, over 4756.00 frames. ], tot_loss[loss=0.2915, simple_loss=0.3566, pruned_loss=0.1132, over 938220.14 frames. ], batch size: 13, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:23:18,256 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 15:23:33,136 INFO [train.py:1146] (1/4) Epoch 3, validation: loss=0.2358, simple_loss=0.3336, pruned_loss=0.06904, over 944034.00 frames. +2024-07-27 15:23:33,137 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 15:23:42,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=31273.333333333332, ans=0.125 +2024-07-27 15:23:48,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31286.666666666668, ans=0.1 +2024-07-27 15:24:09,322 INFO [train.py:1114] (1/4) Epoch 3, batch 3050, loss[loss=0.2613, simple_loss=0.3251, pruned_loss=0.0988, over 4638.00 frames. ], tot_loss[loss=0.2925, simple_loss=0.3573, pruned_loss=0.1139, over 937233.93 frames. ], batch size: 12, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:24:16,085 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.10 vs. limit=15.0 +2024-07-27 15:24:19,669 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.862e+01 6.122e+01 6.753e+01 7.490e+01 1.166e+02, threshold=1.351e+02, percent-clipped=0.0 +2024-07-27 15:24:26,608 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-07-27 15:24:38,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31380.0, ans=0.1 +2024-07-27 15:24:42,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.13 vs. limit=15.0 +2024-07-27 15:24:45,611 INFO [train.py:1114] (1/4) Epoch 3, batch 3100, loss[loss=0.3171, simple_loss=0.3847, pruned_loss=0.1247, over 4623.00 frames. ], tot_loss[loss=0.2898, simple_loss=0.3554, pruned_loss=0.1121, over 937852.19 frames. ], batch size: 16, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:24:51,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=31406.666666666668, ans=0.125 +2024-07-27 15:24:59,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=31420.0, ans=0.004039130434782609 +2024-07-27 15:25:01,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31420.0, ans=0.1 +2024-07-27 15:25:01,759 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.87 vs. limit=15.0 +2024-07-27 15:25:02,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=31420.0, ans=0.2 +2024-07-27 15:25:11,794 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.49 vs. limit=6.0 +2024-07-27 15:25:21,431 INFO [train.py:1114] (1/4) Epoch 3, batch 3150, loss[loss=0.2961, simple_loss=0.3732, pruned_loss=0.1095, over 4604.00 frames. ], tot_loss[loss=0.29, simple_loss=0.3562, pruned_loss=0.1119, over 937835.03 frames. ], batch size: 17, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:25:21,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=31460.0, ans=0.125 +2024-07-27 15:25:21,951 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.69 vs. limit=15.0 +2024-07-27 15:25:22,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=31460.0, ans=0.125 +2024-07-27 15:25:23,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=31460.0, ans=0.0 +2024-07-27 15:25:29,532 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=16.24 vs. limit=15.0 +2024-07-27 15:25:29,954 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:25:31,690 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.963e+01 6.198e+01 6.919e+01 7.574e+01 1.132e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-27 15:25:31,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=31473.333333333332, ans=0.125 +2024-07-27 15:25:32,318 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.18 vs. limit=15.0 +2024-07-27 15:25:34,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=31486.666666666668, ans=0.0 +2024-07-27 15:25:36,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=31486.666666666668, ans=0.004024637681159421 +2024-07-27 15:25:40,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=31486.666666666668, ans=0.125 +2024-07-27 15:25:40,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=31486.666666666668, ans=0.2 +2024-07-27 15:25:42,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=31500.0, ans=0.2 +2024-07-27 15:25:43,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=31500.0, ans=0.125 +2024-07-27 15:25:56,851 INFO [train.py:1114] (1/4) Epoch 3, batch 3200, loss[loss=0.2654, simple_loss=0.3315, pruned_loss=0.09971, over 4829.00 frames. ], tot_loss[loss=0.2897, simple_loss=0.356, pruned_loss=0.1117, over 939197.75 frames. ], batch size: 13, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:25:58,481 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.33 vs. limit=15.0 +2024-07-27 15:25:59,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=31526.666666666668, ans=0.125 +2024-07-27 15:26:01,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=31526.666666666668, ans=0.1 +2024-07-27 15:26:23,647 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.33 vs. limit=22.5 +2024-07-27 15:26:49,775 INFO [train.py:1114] (1/4) Epoch 3, batch 3250, loss[loss=0.2985, simple_loss=0.3849, pruned_loss=0.1061, over 4928.00 frames. ], tot_loss[loss=0.2905, simple_loss=0.3569, pruned_loss=0.1121, over 940346.64 frames. ], batch size: 14, lr: 2.30e-02, grad_scale: 32.0 +2024-07-27 15:26:58,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31606.666666666668, ans=0.1 +2024-07-27 15:27:01,541 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.545e+01 6.278e+01 6.797e+01 7.554e+01 1.103e+02, threshold=1.359e+02, percent-clipped=0.0 +2024-07-27 15:27:10,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=31620.0, ans=0.5 +2024-07-27 15:27:15,742 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.07 vs. limit=15.0 +2024-07-27 15:27:19,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=31633.333333333332, ans=0.2 +2024-07-27 15:27:30,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=31660.0, ans=0.125 +2024-07-27 15:27:30,557 INFO [train.py:1114] (1/4) Epoch 3, batch 3300, loss[loss=0.329, simple_loss=0.3811, pruned_loss=0.1385, over 4765.00 frames. ], tot_loss[loss=0.2898, simple_loss=0.3559, pruned_loss=0.1119, over 940222.37 frames. ], batch size: 19, lr: 2.30e-02, grad_scale: 32.0 +2024-07-27 15:27:45,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=31673.333333333332, ans=0.025 +2024-07-27 15:27:47,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31673.333333333332, ans=0.1 +2024-07-27 15:27:48,233 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.57 vs. limit=15.0 +2024-07-27 15:27:48,990 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.01 vs. limit=15.0 +2024-07-27 15:27:50,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=31673.333333333332, ans=0.0 +2024-07-27 15:28:10,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=31713.333333333332, ans=0.125 +2024-07-27 15:28:13,926 INFO [train.py:1114] (1/4) Epoch 3, batch 3350, loss[loss=0.318, simple_loss=0.3718, pruned_loss=0.1321, over 4630.00 frames. ], tot_loss[loss=0.2907, simple_loss=0.3565, pruned_loss=0.1124, over 938433.79 frames. ], batch size: 17, lr: 2.30e-02, grad_scale: 32.0 +2024-07-27 15:28:24,376 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.034e+01 6.313e+01 6.716e+01 7.505e+01 1.231e+02, threshold=1.343e+02, percent-clipped=0.0 +2024-07-27 15:28:44,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=31780.0, ans=0.125 +2024-07-27 15:28:48,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=31780.0, ans=0.125 +2024-07-27 15:28:49,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=31793.333333333332, ans=0.125 +2024-07-27 15:28:50,132 INFO [train.py:1114] (1/4) Epoch 3, batch 3400, loss[loss=0.2357, simple_loss=0.2963, pruned_loss=0.08759, over 4806.00 frames. ], tot_loss[loss=0.2905, simple_loss=0.3559, pruned_loss=0.1126, over 937554.72 frames. ], batch size: 11, lr: 2.30e-02, grad_scale: 32.0 +2024-07-27 15:29:01,858 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.38 vs. limit=15.0 +2024-07-27 15:29:06,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=31806.666666666668, ans=0.2 +2024-07-27 15:29:12,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=31806.666666666668, ans=0.0 +2024-07-27 15:29:16,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=31820.0, ans=0.125 +2024-07-27 15:29:22,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=31833.333333333332, ans=0.125 +2024-07-27 15:29:27,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=31833.333333333332, ans=0.003949275362318841 +2024-07-27 15:29:28,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31846.666666666668, ans=0.1 +2024-07-27 15:29:35,520 INFO [train.py:1114] (1/4) Epoch 3, batch 3450, loss[loss=0.3212, simple_loss=0.3786, pruned_loss=0.1319, over 4672.00 frames. ], tot_loss[loss=0.2899, simple_loss=0.3556, pruned_loss=0.1121, over 937800.55 frames. ], batch size: 19, lr: 2.29e-02, grad_scale: 32.0 +2024-07-27 15:29:49,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=31860.0, ans=0.0 +2024-07-27 15:29:54,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=31873.333333333332, ans=0.125 +2024-07-27 15:29:55,348 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.891e+01 6.313e+01 6.956e+01 7.933e+01 1.220e+02, threshold=1.391e+02, percent-clipped=0.0 +2024-07-27 15:30:05,215 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.01 vs. limit=15.0 +2024-07-27 15:30:20,199 INFO [train.py:1114] (1/4) Epoch 3, batch 3500, loss[loss=0.2438, simple_loss=0.3235, pruned_loss=0.08206, over 4934.00 frames. ], tot_loss[loss=0.2886, simple_loss=0.3539, pruned_loss=0.1116, over 938391.14 frames. ], batch size: 12, lr: 2.29e-02, grad_scale: 32.0 +2024-07-27 15:30:21,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31926.666666666668, ans=0.1 +2024-07-27 15:30:24,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=31926.666666666668, ans=0.0 +2024-07-27 15:30:30,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=31940.0, ans=0.0 +2024-07-27 15:30:44,993 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:30:51,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=31980.0, ans=0.125 +2024-07-27 15:30:51,567 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.01 vs. limit=22.5 +2024-07-27 15:30:55,137 INFO [train.py:1114] (1/4) Epoch 3, batch 3550, loss[loss=0.2578, simple_loss=0.3483, pruned_loss=0.08372, over 4660.00 frames. ], tot_loss[loss=0.2885, simple_loss=0.3543, pruned_loss=0.1114, over 938906.47 frames. ], batch size: 14, lr: 2.29e-02, grad_scale: 32.0 +2024-07-27 15:31:10,781 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 6.203e+01 6.849e+01 7.664e+01 1.472e+02, threshold=1.370e+02, percent-clipped=1.0 +2024-07-27 15:31:13,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=32020.0, ans=0.0 +2024-07-27 15:31:14,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=32020.0, ans=0.003908695652173913 +2024-07-27 15:31:16,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=32020.0, ans=0.125 +2024-07-27 15:31:24,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=32033.333333333332, ans=0.003905797101449276 +2024-07-27 15:31:34,511 INFO [train.py:1114] (1/4) Epoch 3, batch 3600, loss[loss=0.2584, simple_loss=0.3281, pruned_loss=0.09433, over 4957.00 frames. ], tot_loss[loss=0.2885, simple_loss=0.3544, pruned_loss=0.1113, over 940287.69 frames. ], batch size: 13, lr: 2.29e-02, grad_scale: 64.0 +2024-07-27 15:31:40,668 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:31:41,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=32060.0, ans=0.125 +2024-07-27 15:31:46,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=32073.333333333332, ans=0.2 +2024-07-27 15:31:53,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=32086.666666666668, ans=0.05 +2024-07-27 15:31:55,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=32086.666666666668, ans=0.125 +2024-07-27 15:31:57,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=32100.0, ans=0.125 +2024-07-27 15:32:00,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32100.0, ans=0.1 +2024-07-27 15:32:04,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=32113.333333333332, ans=0.0038884057971014492 +2024-07-27 15:32:07,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=32113.333333333332, ans=0.125 +2024-07-27 15:32:07,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=32113.333333333332, ans=0.5 +2024-07-27 15:32:09,053 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.56 vs. limit=22.5 +2024-07-27 15:32:10,744 INFO [train.py:1114] (1/4) Epoch 3, batch 3650, loss[loss=0.3333, simple_loss=0.3976, pruned_loss=0.1345, over 4894.00 frames. ], tot_loss[loss=0.2873, simple_loss=0.3532, pruned_loss=0.1108, over 940956.87 frames. ], batch size: 15, lr: 2.29e-02, grad_scale: 64.0 +2024-07-27 15:32:21,262 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.293e+01 6.817e+01 8.019e+01 9.949e+01 1.573e+02, threshold=1.604e+02, percent-clipped=3.0 +2024-07-27 15:32:31,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=32166.666666666668, ans=0.1 +2024-07-27 15:32:45,070 INFO [train.py:1114] (1/4) Epoch 3, batch 3700, loss[loss=0.2777, simple_loss=0.3563, pruned_loss=0.09956, over 4926.00 frames. ], tot_loss[loss=0.2863, simple_loss=0.3531, pruned_loss=0.1097, over 941805.44 frames. ], batch size: 14, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:33:02,446 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=23.47 vs. limit=15.0 +2024-07-27 15:33:06,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=32233.333333333332, ans=0.1 +2024-07-27 15:33:06,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=32233.333333333332, ans=0.125 +2024-07-27 15:33:12,060 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.84 vs. limit=15.0 +2024-07-27 15:33:20,703 INFO [train.py:1114] (1/4) Epoch 3, batch 3750, loss[loss=0.2607, simple_loss=0.3241, pruned_loss=0.09867, over 4799.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.3522, pruned_loss=0.1093, over 943446.98 frames. ], batch size: 11, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:33:28,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=32273.333333333332, ans=0.125 +2024-07-27 15:33:30,370 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.77 vs. limit=15.0 +2024-07-27 15:33:31,094 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.210e+01 6.187e+01 6.972e+01 7.768e+01 2.543e+02, threshold=1.394e+02, percent-clipped=1.0 +2024-07-27 15:33:49,523 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.60 vs. limit=22.5 +2024-07-27 15:33:49,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=32300.0, ans=0.003847826086956522 +2024-07-27 15:33:51,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=32300.0, ans=0.95 +2024-07-27 15:33:53,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=32313.333333333332, ans=0.003844927536231884 +2024-07-27 15:34:01,595 INFO [train.py:1114] (1/4) Epoch 3, batch 3800, loss[loss=0.3061, simple_loss=0.3768, pruned_loss=0.1177, over 4812.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.3524, pruned_loss=0.1099, over 941932.28 frames. ], batch size: 14, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:34:12,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=32340.0, ans=0.125 +2024-07-27 15:34:28,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=32380.0, ans=0.0038304347826086955 +2024-07-27 15:34:36,454 INFO [train.py:1114] (1/4) Epoch 3, batch 3850, loss[loss=0.2664, simple_loss=0.3461, pruned_loss=0.09332, over 4628.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3518, pruned_loss=0.1094, over 942440.13 frames. ], batch size: 16, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:34:43,668 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.48 vs. limit=15.0 +2024-07-27 15:34:44,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=32406.666666666668, ans=0.1 +2024-07-27 15:34:46,751 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.157e+01 6.304e+01 7.059e+01 8.148e+01 1.168e+02, threshold=1.412e+02, percent-clipped=0.0 +2024-07-27 15:34:48,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=32406.666666666668, ans=0.2 +2024-07-27 15:34:50,244 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:34:51,762 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.74 vs. limit=15.0 +2024-07-27 15:34:59,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=32433.333333333332, ans=0.04949747468305833 +2024-07-27 15:35:06,951 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:35:09,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=32446.666666666668, ans=10.0 +2024-07-27 15:35:12,178 INFO [train.py:1114] (1/4) Epoch 3, batch 3900, loss[loss=0.2834, simple_loss=0.3504, pruned_loss=0.1082, over 4805.00 frames. ], tot_loss[loss=0.2866, simple_loss=0.3533, pruned_loss=0.11, over 942775.36 frames. ], batch size: 14, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:35:13,098 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.18 vs. limit=6.0 +2024-07-27 15:35:18,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=32473.333333333332, ans=0.125 +2024-07-27 15:35:27,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=32486.666666666668, ans=0.0038072463768115943 +2024-07-27 15:35:29,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=32486.666666666668, ans=0.2 +2024-07-27 15:35:29,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32486.666666666668, ans=0.1 +2024-07-27 15:35:35,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=32500.0, ans=0.125 +2024-07-27 15:35:39,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=32513.333333333332, ans=0.125 +2024-07-27 15:35:41,171 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.74 vs. limit=22.5 +2024-07-27 15:35:42,685 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.22 vs. limit=22.5 +2024-07-27 15:35:46,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=32526.666666666668, ans=0.125 +2024-07-27 15:35:46,893 INFO [train.py:1114] (1/4) Epoch 3, batch 3950, loss[loss=0.337, simple_loss=0.3998, pruned_loss=0.1371, over 4832.00 frames. ], tot_loss[loss=0.2872, simple_loss=0.3538, pruned_loss=0.1103, over 944648.64 frames. ], batch size: 16, lr: 2.27e-02, grad_scale: 64.0 +2024-07-27 15:35:58,548 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.016e+01 6.366e+01 6.864e+01 8.017e+01 1.947e+02, threshold=1.373e+02, percent-clipped=1.0 +2024-07-27 15:36:03,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=32553.333333333332, ans=0.125 +2024-07-27 15:36:04,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=32553.333333333332, ans=10.0 +2024-07-27 15:36:07,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=32553.333333333332, ans=0.2 +2024-07-27 15:36:09,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=32566.666666666668, ans=0.125 +2024-07-27 15:36:11,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=32566.666666666668, ans=0.05 +2024-07-27 15:36:21,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=32580.0, ans=0.125 +2024-07-27 15:36:22,685 INFO [train.py:1114] (1/4) Epoch 3, batch 4000, loss[loss=0.2601, simple_loss=0.3381, pruned_loss=0.09106, over 4779.00 frames. ], tot_loss[loss=0.2875, simple_loss=0.3537, pruned_loss=0.1106, over 940986.69 frames. ], batch size: 12, lr: 2.27e-02, grad_scale: 64.0 +2024-07-27 15:36:22,979 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.70 vs. limit=6.0 +2024-07-27 15:36:27,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=32593.333333333332, ans=0.125 +2024-07-27 15:36:50,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=32633.333333333332, ans=0.0 +2024-07-27 15:36:59,104 INFO [train.py:1114] (1/4) Epoch 3, batch 4050, loss[loss=0.4076, simple_loss=0.4166, pruned_loss=0.1993, over 3581.00 frames. ], tot_loss[loss=0.2866, simple_loss=0.3526, pruned_loss=0.1103, over 939470.44 frames. ], batch size: 35, lr: 2.27e-02, grad_scale: 64.0 +2024-07-27 15:37:04,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=32660.0, ans=0.125 +2024-07-27 15:37:05,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32660.0, ans=0.1 +2024-07-27 15:37:09,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=32673.333333333332, ans=0.0037666666666666664 +2024-07-27 15:37:11,294 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.463e+01 6.459e+01 6.983e+01 7.697e+01 1.084e+02, threshold=1.397e+02, percent-clipped=0.0 +2024-07-27 15:37:17,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=32686.666666666668, ans=0.0 +2024-07-27 15:37:19,286 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.34 vs. limit=15.0 +2024-07-27 15:37:25,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=32700.0, ans=0.125 +2024-07-27 15:37:35,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=32713.333333333332, ans=0.09899494936611666 +2024-07-27 15:37:37,935 INFO [train.py:1114] (1/4) Epoch 3, batch 4100, loss[loss=0.2937, simple_loss=0.3653, pruned_loss=0.111, over 4892.00 frames. ], tot_loss[loss=0.2875, simple_loss=0.353, pruned_loss=0.111, over 938226.76 frames. ], batch size: 15, lr: 2.27e-02, grad_scale: 64.0 +2024-07-27 15:37:47,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=32740.0, ans=0.0 +2024-07-27 15:37:47,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=32740.0, ans=0.125 +2024-07-27 15:37:58,423 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.58 vs. limit=22.5 +2024-07-27 15:38:09,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=32780.0, ans=0.125 +2024-07-27 15:38:15,380 INFO [train.py:1114] (1/4) Epoch 3, batch 4150, loss[loss=0.3035, simple_loss=0.3565, pruned_loss=0.1253, over 4828.00 frames. ], tot_loss[loss=0.2855, simple_loss=0.3514, pruned_loss=0.1098, over 937849.94 frames. ], batch size: 13, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:38:24,221 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.97 vs. limit=22.5 +2024-07-27 15:38:25,905 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.960e+01 6.227e+01 6.781e+01 8.028e+01 1.229e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 15:38:32,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=32820.0, ans=0.025 +2024-07-27 15:38:35,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=32833.333333333336, ans=0.125 +2024-07-27 15:38:41,393 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.27 vs. limit=15.0 +2024-07-27 15:38:46,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=32846.666666666664, ans=0.0037289855072463775 +2024-07-27 15:38:47,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=32846.666666666664, ans=0.125 +2024-07-27 15:38:54,826 INFO [train.py:1114] (1/4) Epoch 3, batch 4200, loss[loss=0.3254, simple_loss=0.3842, pruned_loss=0.1333, over 4915.00 frames. ], tot_loss[loss=0.285, simple_loss=0.3515, pruned_loss=0.1092, over 939180.40 frames. ], batch size: 15, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:38:57,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=32860.0, ans=0.025 +2024-07-27 15:39:00,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=32860.0, ans=0.125 +2024-07-27 15:39:01,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=32873.333333333336, ans=0.0 +2024-07-27 15:39:02,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32873.333333333336, ans=0.1 +2024-07-27 15:39:17,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=32900.0, ans=0.09899494936611666 +2024-07-27 15:39:31,211 INFO [train.py:1114] (1/4) Epoch 3, batch 4250, loss[loss=0.2571, simple_loss=0.3239, pruned_loss=0.0951, over 4634.00 frames. ], tot_loss[loss=0.2858, simple_loss=0.3522, pruned_loss=0.1097, over 940266.26 frames. ], batch size: 12, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:39:37,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=32940.0, ans=0.125 +2024-07-27 15:39:39,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=32940.0, ans=0.025 +2024-07-27 15:39:41,108 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.439e+01 6.186e+01 6.763e+01 7.704e+01 1.140e+02, threshold=1.353e+02, percent-clipped=0.0 +2024-07-27 15:39:43,470 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.15 vs. limit=6.0 +2024-07-27 15:40:06,767 INFO [train.py:1114] (1/4) Epoch 3, batch 4300, loss[loss=0.2774, simple_loss=0.3532, pruned_loss=0.1008, over 4763.00 frames. ], tot_loss[loss=0.2867, simple_loss=0.353, pruned_loss=0.1102, over 939762.24 frames. ], batch size: 13, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:40:13,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=33006.666666666664, ans=0.125 +2024-07-27 15:40:13,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=33006.666666666664, ans=0.125 +2024-07-27 15:40:17,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=33006.666666666664, ans=0.2 +2024-07-27 15:40:18,051 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.74 vs. limit=15.0 +2024-07-27 15:40:18,846 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.54 vs. limit=15.0 +2024-07-27 15:40:42,354 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:40:42,786 INFO [train.py:1114] (1/4) Epoch 3, batch 4350, loss[loss=0.3114, simple_loss=0.3739, pruned_loss=0.1245, over 4751.00 frames. ], tot_loss[loss=0.2865, simple_loss=0.3532, pruned_loss=0.1099, over 940955.47 frames. ], batch size: 13, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:40:52,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=33073.333333333336, ans=0.003679710144927536 +2024-07-27 15:40:54,686 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.277e+01 6.236e+01 6.804e+01 7.780e+01 1.356e+02, threshold=1.361e+02, percent-clipped=1.0 +2024-07-27 15:40:57,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=33086.666666666664, ans=0.125 +2024-07-27 15:41:01,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=33086.666666666664, ans=0.003676811594202899 +2024-07-27 15:41:18,028 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.67 vs. limit=22.5 +2024-07-27 15:41:19,010 INFO [train.py:1114] (1/4) Epoch 3, batch 4400, loss[loss=0.2363, simple_loss=0.3125, pruned_loss=0.08004, over 4798.00 frames. ], tot_loss[loss=0.2875, simple_loss=0.3543, pruned_loss=0.1103, over 940735.85 frames. ], batch size: 14, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:41:19,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=33126.666666666664, ans=0.0036681159420289856 +2024-07-27 15:41:32,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=33153.333333333336, ans=0.0 +2024-07-27 15:41:54,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=33180.0, ans=15.0 +2024-07-27 15:41:58,384 INFO [train.py:1114] (1/4) Epoch 3, batch 4450, loss[loss=0.2343, simple_loss=0.3121, pruned_loss=0.07826, over 4949.00 frames. ], tot_loss[loss=0.2894, simple_loss=0.3555, pruned_loss=0.1116, over 938895.16 frames. ], batch size: 12, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:41:58,895 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.15 vs. limit=10.0 +2024-07-27 15:42:05,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=33206.666666666664, ans=0.0036507246376811598 +2024-07-27 15:42:08,444 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.127e+01 6.763e+01 7.448e+01 8.954e+01 1.362e+02, threshold=1.490e+02, percent-clipped=1.0 +2024-07-27 15:42:15,254 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.55 vs. limit=22.5 +2024-07-27 15:42:22,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=33233.333333333336, ans=0.1 +2024-07-27 15:42:28,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=33246.666666666664, ans=0.025 +2024-07-27 15:42:44,698 INFO [train.py:1114] (1/4) Epoch 3, batch 4500, loss[loss=0.2818, simple_loss=0.3538, pruned_loss=0.1049, over 4748.00 frames. ], tot_loss[loss=0.2883, simple_loss=0.3548, pruned_loss=0.1109, over 937924.11 frames. ], batch size: 14, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:42:52,611 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.51 vs. limit=15.0 +2024-07-27 15:42:53,191 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.59 vs. limit=15.0 +2024-07-27 15:43:04,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=33300.0, ans=0.0 +2024-07-27 15:43:18,848 INFO [train.py:1114] (1/4) Epoch 3, batch 4550, loss[loss=0.3147, simple_loss=0.3707, pruned_loss=0.1294, over 4889.00 frames. ], tot_loss[loss=0.2889, simple_loss=0.3556, pruned_loss=0.1111, over 939876.87 frames. ], batch size: 13, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:43:23,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=33326.666666666664, ans=0.2 +2024-07-27 15:43:24,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=33326.666666666664, ans=0.125 +2024-07-27 15:43:29,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=33340.0, ans=0.1 +2024-07-27 15:43:30,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-07-27 15:43:30,998 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.122e+01 6.619e+01 7.429e+01 8.895e+01 1.429e+02, threshold=1.486e+02, percent-clipped=0.0 +2024-07-27 15:43:34,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=33353.333333333336, ans=0.125 +2024-07-27 15:43:40,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33353.333333333336, ans=0.1 +2024-07-27 15:43:52,604 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.34 vs. limit=22.5 +2024-07-27 15:43:57,989 INFO [train.py:1114] (1/4) Epoch 3, batch 4600, loss[loss=0.3361, simple_loss=0.3985, pruned_loss=0.1368, over 4536.00 frames. ], tot_loss[loss=0.288, simple_loss=0.3549, pruned_loss=0.1106, over 938051.04 frames. ], batch size: 21, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:44:08,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=33406.666666666664, ans=0.0 +2024-07-27 15:44:11,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=33420.0, ans=0.035 +2024-07-27 15:44:11,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=33420.0, ans=0.04949747468305833 +2024-07-27 15:44:29,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=33446.666666666664, ans=0.1 +2024-07-27 15:44:30,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=33446.666666666664, ans=0.05 +2024-07-27 15:44:32,059 INFO [train.py:1114] (1/4) Epoch 3, batch 4650, loss[loss=0.3167, simple_loss=0.3792, pruned_loss=0.1271, over 4838.00 frames. ], tot_loss[loss=0.2882, simple_loss=0.3556, pruned_loss=0.1104, over 939923.71 frames. ], batch size: 16, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:44:39,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.85 vs. limit=15.0 +2024-07-27 15:44:42,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=33473.333333333336, ans=0.125 +2024-07-27 15:44:42,685 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.996e+01 6.580e+01 7.328e+01 8.938e+01 2.315e+02, threshold=1.466e+02, percent-clipped=1.0 +2024-07-27 15:44:42,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=33473.333333333336, ans=0.125 +2024-07-27 15:44:46,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=33486.666666666664, ans=0.5 +2024-07-27 15:45:07,852 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.26 vs. limit=15.0 +2024-07-27 15:45:08,279 INFO [train.py:1114] (1/4) Epoch 3, batch 4700, loss[loss=0.29, simple_loss=0.3376, pruned_loss=0.1213, over 4706.00 frames. ], tot_loss[loss=0.2895, simple_loss=0.356, pruned_loss=0.1115, over 937830.13 frames. ], batch size: 11, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:45:11,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=33526.666666666664, ans=0.125 +2024-07-27 15:45:19,262 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.98 vs. limit=10.0 +2024-07-27 15:45:46,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=33593.333333333336, ans=0.125 +2024-07-27 15:45:46,782 INFO [train.py:1114] (1/4) Epoch 3, batch 4750, loss[loss=0.3677, simple_loss=0.4151, pruned_loss=0.1602, over 4419.00 frames. ], tot_loss[loss=0.2918, simple_loss=0.3574, pruned_loss=0.1131, over 935625.66 frames. ], batch size: 21, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:45:51,131 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.97 vs. limit=15.0 +2024-07-27 15:45:53,765 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.45 vs. limit=22.5 +2024-07-27 15:45:57,481 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.591e+01 6.473e+01 7.371e+01 8.571e+01 1.233e+02, threshold=1.474e+02, percent-clipped=0.0 +2024-07-27 15:45:57,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=33606.666666666664, ans=0.125 +2024-07-27 15:46:00,765 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.74 vs. limit=22.5 +2024-07-27 15:46:01,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=33620.0, ans=0.025 +2024-07-27 15:46:09,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=33633.333333333336, ans=0.09899494936611666 +2024-07-27 15:46:11,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=33633.333333333336, ans=0.2 +2024-07-27 15:46:11,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=33633.333333333336, ans=0.0 +2024-07-27 15:46:21,154 INFO [train.py:1114] (1/4) Epoch 3, batch 4800, loss[loss=0.3227, simple_loss=0.3896, pruned_loss=0.1279, over 4700.00 frames. ], tot_loss[loss=0.292, simple_loss=0.357, pruned_loss=0.1135, over 933060.92 frames. ], batch size: 13, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:46:24,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=33660.0, ans=0.125 +2024-07-27 15:46:27,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=33673.333333333336, ans=0.125 +2024-07-27 15:46:30,444 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:46:36,439 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.31 vs. limit=8.0 +2024-07-27 15:46:45,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.27 vs. limit=10.0 +2024-07-27 15:46:56,842 INFO [train.py:1114] (1/4) Epoch 3, batch 4850, loss[loss=0.2803, simple_loss=0.3533, pruned_loss=0.1037, over 4746.00 frames. ], tot_loss[loss=0.2911, simple_loss=0.3565, pruned_loss=0.1128, over 932682.24 frames. ], batch size: 14, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:47:00,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=33726.666666666664, ans=0.125 +2024-07-27 15:47:06,427 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.83 vs. limit=15.0 +2024-07-27 15:47:07,315 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.203e+01 6.462e+01 7.308e+01 8.577e+01 1.443e+02, threshold=1.462e+02, percent-clipped=0.0 +2024-07-27 15:47:12,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=33753.333333333336, ans=0.0 +2024-07-27 15:47:20,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=33766.666666666664, ans=0.125 +2024-07-27 15:47:27,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=33780.0, ans=0.125 +2024-07-27 15:47:31,285 INFO [train.py:1114] (1/4) Epoch 3, batch 4900, loss[loss=0.3081, simple_loss=0.3799, pruned_loss=0.1181, over 4761.00 frames. ], tot_loss[loss=0.29, simple_loss=0.3563, pruned_loss=0.1118, over 934284.68 frames. ], batch size: 13, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:47:53,292 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.58 vs. limit=15.0 +2024-07-27 15:47:55,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=33833.333333333336, ans=0.125 +2024-07-27 15:47:58,655 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.51 vs. limit=22.5 +2024-07-27 15:48:01,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=33846.666666666664, ans=0.0 +2024-07-27 15:48:01,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=33846.666666666664, ans=0.0 +2024-07-27 15:48:06,512 INFO [train.py:1114] (1/4) Epoch 3, batch 4950, loss[loss=0.4246, simple_loss=0.4375, pruned_loss=0.2058, over 3192.00 frames. ], tot_loss[loss=0.2914, simple_loss=0.3574, pruned_loss=0.1127, over 931303.97 frames. ], batch size: 35, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:48:07,215 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:48:11,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=33860.0, ans=0.0 +2024-07-27 15:48:12,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=33873.333333333336, ans=0.125 +2024-07-27 15:48:15,283 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.89 vs. limit=6.0 +2024-07-27 15:48:16,790 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.224e+01 6.458e+01 7.350e+01 8.583e+01 1.982e+02, threshold=1.470e+02, percent-clipped=1.0 +2024-07-27 15:48:28,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=33900.0, ans=0.2 +2024-07-27 15:48:38,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=33913.333333333336, ans=10.0 +2024-07-27 15:48:41,076 INFO [train.py:1114] (1/4) Epoch 3, batch 5000, loss[loss=0.2646, simple_loss=0.3406, pruned_loss=0.09429, over 4658.00 frames. ], tot_loss[loss=0.2893, simple_loss=0.3557, pruned_loss=0.1114, over 935234.26 frames. ], batch size: 14, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:48:41,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=33926.666666666664, ans=0.125 +2024-07-27 15:48:53,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=33940.0, ans=0.125 +2024-07-27 15:49:05,939 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=11.23 vs. limit=15.0 +2024-07-27 15:49:19,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33980.0, ans=0.1 +2024-07-27 15:49:21,817 INFO [train.py:1114] (1/4) Epoch 3, batch 5050, loss[loss=0.2585, simple_loss=0.3282, pruned_loss=0.09439, over 4853.00 frames. ], tot_loss[loss=0.2882, simple_loss=0.355, pruned_loss=0.1107, over 938026.05 frames. ], batch size: 12, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:49:40,703 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.504e+01 6.490e+01 6.878e+01 7.828e+01 1.247e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-27 15:49:40,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=34006.666666666664, ans=0.2 +2024-07-27 15:49:41,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34006.666666666664, ans=0.1 +2024-07-27 15:49:42,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=34006.666666666664, ans=0.025 +2024-07-27 15:49:43,367 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.01 vs. limit=15.0 +2024-07-27 15:49:43,700 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:49:49,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=34020.0, ans=0.2 +2024-07-27 15:50:05,324 INFO [train.py:1114] (1/4) Epoch 3, batch 5100, loss[loss=0.2494, simple_loss=0.3131, pruned_loss=0.09281, over 4775.00 frames. ], tot_loss[loss=0.2891, simple_loss=0.3553, pruned_loss=0.1114, over 935635.25 frames. ], batch size: 12, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:50:07,244 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.70 vs. limit=15.0 +2024-07-27 15:50:07,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=34060.0, ans=0.125 +2024-07-27 15:50:09,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=34060.0, ans=0.2 +2024-07-27 15:50:20,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=34073.333333333336, ans=0.04949747468305833 +2024-07-27 15:50:20,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=34073.333333333336, ans=0.07 +2024-07-27 15:50:20,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=34073.333333333336, ans=0.00346231884057971 +2024-07-27 15:50:24,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=34086.666666666664, ans=10.0 +2024-07-27 15:50:36,554 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.07 vs. limit=15.0 +2024-07-27 15:50:45,817 INFO [train.py:1114] (1/4) Epoch 3, batch 5150, loss[loss=0.3165, simple_loss=0.3937, pruned_loss=0.1197, over 4824.00 frames. ], tot_loss[loss=0.2884, simple_loss=0.3553, pruned_loss=0.1107, over 936408.97 frames. ], batch size: 16, lr: 2.22e-02, grad_scale: 64.0 +2024-07-27 15:50:50,377 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.24 vs. limit=6.0 +2024-07-27 15:50:51,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=34126.666666666664, ans=0.0 +2024-07-27 15:50:56,019 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.400e+01 6.638e+01 7.768e+01 8.989e+01 1.373e+02, threshold=1.554e+02, percent-clipped=0.0 +2024-07-27 15:51:01,414 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.91 vs. limit=15.0 +2024-07-27 15:51:11,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=34166.666666666664, ans=0.0034420289855072476 +2024-07-27 15:51:18,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=34180.0, ans=0.0 +2024-07-27 15:51:21,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=34193.333333333336, ans=0.125 +2024-07-27 15:51:22,074 INFO [train.py:1114] (1/4) Epoch 3, batch 5200, loss[loss=0.3381, simple_loss=0.416, pruned_loss=0.1301, over 4668.00 frames. ], tot_loss[loss=0.2863, simple_loss=0.3539, pruned_loss=0.1094, over 936068.96 frames. ], batch size: 14, lr: 2.22e-02, grad_scale: 64.0 +2024-07-27 15:51:25,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=34193.333333333336, ans=0.0 +2024-07-27 15:51:27,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=34193.333333333336, ans=0.125 +2024-07-27 15:51:30,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=34206.666666666664, ans=0.125 +2024-07-27 15:51:43,288 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=25.59 vs. limit=22.5 +2024-07-27 15:51:46,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=34220.0, ans=0.0 +2024-07-27 15:52:05,530 INFO [train.py:1114] (1/4) Epoch 3, batch 5250, loss[loss=0.2326, simple_loss=0.3184, pruned_loss=0.07337, over 4890.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.3527, pruned_loss=0.1083, over 936214.41 frames. ], batch size: 13, lr: 2.22e-02, grad_scale: 64.0 +2024-07-27 15:52:13,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=34273.333333333336, ans=0.125 +2024-07-27 15:52:18,630 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.312e+01 6.590e+01 7.442e+01 8.415e+01 1.347e+02, threshold=1.488e+02, percent-clipped=0.0 +2024-07-27 15:52:21,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=34273.333333333336, ans=0.2 +2024-07-27 15:52:37,012 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.96 vs. limit=22.5 +2024-07-27 15:52:38,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=34313.333333333336, ans=0.0034101449275362314 +2024-07-27 15:52:42,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=34313.333333333336, ans=0.125 +2024-07-27 15:52:43,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=34313.333333333336, ans=0.0034101449275362314 +2024-07-27 15:52:45,537 INFO [train.py:1114] (1/4) Epoch 3, batch 5300, loss[loss=0.3712, simple_loss=0.4464, pruned_loss=0.148, over 4630.00 frames. ], tot_loss[loss=0.2845, simple_loss=0.3521, pruned_loss=0.1085, over 934557.66 frames. ], batch size: 16, lr: 2.22e-02, grad_scale: 32.0 +2024-07-27 15:52:45,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=34326.666666666664, ans=0.2 +2024-07-27 15:52:46,599 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.32 vs. limit=22.5 +2024-07-27 15:52:54,692 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.14 vs. limit=10.0 +2024-07-27 15:53:21,080 INFO [train.py:1114] (1/4) Epoch 3, batch 5350, loss[loss=0.2576, simple_loss=0.3028, pruned_loss=0.1062, over 4495.00 frames. ], tot_loss[loss=0.2849, simple_loss=0.3526, pruned_loss=0.1086, over 936282.39 frames. ], batch size: 10, lr: 2.22e-02, grad_scale: 32.0 +2024-07-27 15:53:27,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=34393.333333333336, ans=0.125 +2024-07-27 15:53:32,206 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.392e+01 6.457e+01 7.092e+01 8.534e+01 1.457e+02, threshold=1.418e+02, percent-clipped=0.0 +2024-07-27 15:53:36,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=34420.0, ans=0.125 +2024-07-27 15:53:49,515 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.16 vs. limit=15.0 +2024-07-27 15:53:56,293 INFO [train.py:1114] (1/4) Epoch 3, batch 5400, loss[loss=0.3246, simple_loss=0.3885, pruned_loss=0.1304, over 4350.00 frames. ], tot_loss[loss=0.2866, simple_loss=0.354, pruned_loss=0.1096, over 930425.68 frames. ], batch size: 26, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:54:06,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=34473.333333333336, ans=0.025 +2024-07-27 15:54:09,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=34486.666666666664, ans=0.125 +2024-07-27 15:54:16,643 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:54:22,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34513.333333333336, ans=0.1 +2024-07-27 15:54:27,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=34513.333333333336, ans=0.125 +2024-07-27 15:54:29,515 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:54:30,037 INFO [train.py:1114] (1/4) Epoch 3, batch 5450, loss[loss=0.258, simple_loss=0.3102, pruned_loss=0.1029, over 4709.00 frames. ], tot_loss[loss=0.2863, simple_loss=0.3535, pruned_loss=0.1096, over 933325.74 frames. ], batch size: 11, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:54:34,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=34526.666666666664, ans=0.025 +2024-07-27 15:54:34,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34526.666666666664, ans=0.1 +2024-07-27 15:54:37,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=34540.0, ans=0.125 +2024-07-27 15:54:37,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=34540.0, ans=0.125 +2024-07-27 15:54:40,908 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.485e+01 6.790e+01 7.649e+01 9.479e+01 1.674e+02, threshold=1.530e+02, percent-clipped=4.0 +2024-07-27 15:54:42,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=34540.0, ans=0.5 +2024-07-27 15:54:50,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=34553.333333333336, ans=0.1 +2024-07-27 15:54:56,995 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=15.0 +2024-07-27 15:55:07,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=34580.0, ans=0.2 +2024-07-27 15:55:09,458 INFO [train.py:1114] (1/4) Epoch 3, batch 5500, loss[loss=0.3335, simple_loss=0.3909, pruned_loss=0.138, over 4342.00 frames. ], tot_loss[loss=0.2863, simple_loss=0.3532, pruned_loss=0.1097, over 931246.31 frames. ], batch size: 26, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:55:10,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=34593.333333333336, ans=0.025 +2024-07-27 15:55:13,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=34593.333333333336, ans=0.2 +2024-07-27 15:55:26,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=34620.0, ans=0.125 +2024-07-27 15:55:28,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=34620.0, ans=0.125 +2024-07-27 15:55:29,492 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.50 vs. limit=15.0 +2024-07-27 15:55:29,707 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.66 vs. limit=15.0 +2024-07-27 15:55:35,810 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.09 vs. limit=12.0 +2024-07-27 15:55:45,404 INFO [train.py:1114] (1/4) Epoch 3, batch 5550, loss[loss=0.2818, simple_loss=0.3491, pruned_loss=0.1072, over 4703.00 frames. ], tot_loss[loss=0.2856, simple_loss=0.3526, pruned_loss=0.1093, over 933275.07 frames. ], batch size: 12, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:56:03,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=34673.333333333336, ans=0.0 +2024-07-27 15:56:05,368 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.078e+01 6.918e+01 7.816e+01 8.981e+01 2.239e+02, threshold=1.563e+02, percent-clipped=1.0 +2024-07-27 15:56:16,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=34700.0, ans=0.025 +2024-07-27 15:56:17,049 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.62 vs. limit=22.5 +2024-07-27 15:56:28,880 INFO [train.py:1114] (1/4) Epoch 3, batch 5600, loss[loss=0.3243, simple_loss=0.3822, pruned_loss=0.1332, over 4744.00 frames. ], tot_loss[loss=0.2871, simple_loss=0.3541, pruned_loss=0.11, over 934759.99 frames. ], batch size: 14, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:56:40,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=34740.0, ans=0.2 +2024-07-27 15:56:46,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=34753.333333333336, ans=15.0 +2024-07-27 15:56:50,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=34766.666666666664, ans=0.05 +2024-07-27 15:56:54,785 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:57:00,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=34780.0, ans=0.09899494936611666 +2024-07-27 15:57:02,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=34780.0, ans=0.125 +2024-07-27 15:57:03,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34780.0, ans=0.1 +2024-07-27 15:57:04,494 INFO [train.py:1114] (1/4) Epoch 3, batch 5650, loss[loss=0.3115, simple_loss=0.3743, pruned_loss=0.1244, over 4557.00 frames. ], tot_loss[loss=0.2842, simple_loss=0.3515, pruned_loss=0.1084, over 937441.79 frames. ], batch size: 21, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:57:06,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=34793.333333333336, ans=0.125 +2024-07-27 15:57:12,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=34793.333333333336, ans=0.125 +2024-07-27 15:57:12,433 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.36 vs. limit=15.0 +2024-07-27 15:57:13,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=34806.666666666664, ans=0.125 +2024-07-27 15:57:19,459 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.551e+01 6.421e+01 6.946e+01 8.141e+01 1.354e+02, threshold=1.389e+02, percent-clipped=0.0 +2024-07-27 15:57:28,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=34820.0, ans=0.125 +2024-07-27 15:57:31,026 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.46 vs. limit=15.0 +2024-07-27 15:57:32,527 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.29 vs. limit=22.5 +2024-07-27 15:57:43,147 INFO [train.py:1114] (1/4) Epoch 3, batch 5700, loss[loss=0.2885, simple_loss=0.3603, pruned_loss=0.1083, over 4700.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3525, pruned_loss=0.1091, over 938611.28 frames. ], batch size: 13, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:57:59,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34886.666666666664, ans=0.1 +2024-07-27 15:58:01,844 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.83 vs. limit=22.5 +2024-07-27 15:58:14,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=34913.333333333336, ans=0.125 +2024-07-27 15:58:18,610 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.89 vs. limit=6.0 +2024-07-27 15:58:19,542 INFO [train.py:1114] (1/4) Epoch 3, batch 5750, loss[loss=0.3163, simple_loss=0.3934, pruned_loss=0.1196, over 4691.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.3535, pruned_loss=0.1093, over 938560.35 frames. ], batch size: 19, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:58:20,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=34926.666666666664, ans=0.0 +2024-07-27 15:58:30,751 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.375e+01 6.773e+01 7.385e+01 8.434e+01 1.352e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 15:58:35,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=34953.333333333336, ans=0.125 +2024-07-27 15:58:45,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff3.min_abs, batch_count=34966.666666666664, ans=0.2 +2024-07-27 15:58:49,010 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.31 vs. limit=15.0 +2024-07-27 15:58:51,724 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:58:56,480 INFO [train.py:1114] (1/4) Epoch 3, batch 5800, loss[loss=0.3225, simple_loss=0.3837, pruned_loss=0.1307, over 4701.00 frames. ], tot_loss[loss=0.2878, simple_loss=0.355, pruned_loss=0.1103, over 937169.56 frames. ], batch size: 19, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:58:58,275 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.64 vs. limit=15.0 +2024-07-27 15:59:00,362 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.01 vs. limit=6.0 +2024-07-27 15:59:02,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=35006.666666666664, ans=0.125 +2024-07-27 15:59:12,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=35020.0, ans=0.0 +2024-07-27 15:59:13,732 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.83 vs. limit=15.0 +2024-07-27 15:59:20,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=35033.333333333336, ans=0.125 +2024-07-27 15:59:23,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=35046.666666666664, ans=0.125 +2024-07-27 15:59:24,587 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.72 vs. limit=15.0 +2024-07-27 15:59:27,350 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.34 vs. limit=10.0 +2024-07-27 15:59:28,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff3.min_abs, batch_count=35046.666666666664, ans=0.2 +2024-07-27 15:59:30,545 INFO [train.py:1114] (1/4) Epoch 3, batch 5850, loss[loss=0.334, simple_loss=0.3911, pruned_loss=0.1385, over 4616.00 frames. ], tot_loss[loss=0.2873, simple_loss=0.3543, pruned_loss=0.1102, over 937879.15 frames. ], batch size: 21, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:59:34,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=35060.0, ans=0.125 +2024-07-27 15:59:45,118 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.147e+01 6.773e+01 7.644e+01 9.466e+01 1.883e+02, threshold=1.529e+02, percent-clipped=1.0 +2024-07-27 16:00:03,095 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.66 vs. limit=22.5 +2024-07-27 16:00:12,755 INFO [train.py:1114] (1/4) Epoch 3, batch 5900, loss[loss=0.2981, simple_loss=0.3741, pruned_loss=0.111, over 4669.00 frames. ], tot_loss[loss=0.2877, simple_loss=0.3547, pruned_loss=0.1104, over 937905.06 frames. ], batch size: 15, lr: 2.19e-02, grad_scale: 16.0 +2024-07-27 16:00:12,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=35126.666666666664, ans=0.2 +2024-07-27 16:00:16,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=35126.666666666664, ans=0.025 +2024-07-27 16:00:20,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=35140.0, ans=0.125 +2024-07-27 16:00:25,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=35153.333333333336, ans=0.025 +2024-07-27 16:00:29,418 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.96 vs. limit=10.0 +2024-07-27 16:00:52,881 INFO [train.py:1114] (1/4) Epoch 3, batch 5950, loss[loss=0.2972, simple_loss=0.3749, pruned_loss=0.1097, over 4702.00 frames. ], tot_loss[loss=0.2864, simple_loss=0.3541, pruned_loss=0.1094, over 939764.61 frames. ], batch size: 15, lr: 2.19e-02, grad_scale: 16.0 +2024-07-27 16:00:52,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35193.333333333336, ans=0.1 +2024-07-27 16:00:57,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=35193.333333333336, ans=0.125 +2024-07-27 16:01:05,365 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.79 vs. limit=15.0 +2024-07-27 16:01:06,206 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.498e+01 6.928e+01 7.896e+01 9.145e+01 1.429e+02, threshold=1.579e+02, percent-clipped=0.0 +2024-07-27 16:01:12,790 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.28 vs. limit=15.0 +2024-07-27 16:01:13,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.41 vs. limit=22.5 +2024-07-27 16:01:29,015 INFO [train.py:1114] (1/4) Epoch 3, batch 6000, loss[loss=0.3134, simple_loss=0.3682, pruned_loss=0.1293, over 4114.00 frames. ], tot_loss[loss=0.2855, simple_loss=0.353, pruned_loss=0.109, over 936860.20 frames. ], batch size: 25, lr: 2.19e-02, grad_scale: 32.0 +2024-07-27 16:01:29,015 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 16:01:37,439 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.7664, 3.6369, 3.7334, 4.3658], device='cuda:1') +2024-07-27 16:01:40,727 INFO [train.py:1146] (1/4) Epoch 3, validation: loss=0.2286, simple_loss=0.328, pruned_loss=0.06459, over 944034.00 frames. +2024-07-27 16:01:40,728 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 16:01:47,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=35273.333333333336, ans=0.025 +2024-07-27 16:01:49,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=35273.333333333336, ans=0.125 +2024-07-27 16:02:08,210 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.81 vs. limit=15.0 +2024-07-27 16:02:13,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=35313.333333333336, ans=0.125 +2024-07-27 16:02:14,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=35313.333333333336, ans=0.125 +2024-07-27 16:02:14,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=35313.333333333336, ans=0.0 +2024-07-27 16:02:16,463 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.37 vs. limit=15.0 +2024-07-27 16:02:17,518 INFO [train.py:1114] (1/4) Epoch 3, batch 6050, loss[loss=0.2679, simple_loss=0.3364, pruned_loss=0.09973, over 4774.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3532, pruned_loss=0.1091, over 938181.38 frames. ], batch size: 12, lr: 2.19e-02, grad_scale: 32.0 +2024-07-27 16:02:17,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=35326.666666666664, ans=0.035 +2024-07-27 16:02:19,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=35326.666666666664, ans=0.04949747468305833 +2024-07-27 16:02:20,616 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.89 vs. limit=15.0 +2024-07-27 16:02:29,017 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.303e+01 6.741e+01 7.557e+01 8.762e+01 1.550e+02, threshold=1.511e+02, percent-clipped=0.0 +2024-07-27 16:02:29,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=35340.0, ans=0.125 +2024-07-27 16:02:45,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=35366.666666666664, ans=0.125 +2024-07-27 16:02:52,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=35380.0, ans=0.125 +2024-07-27 16:02:58,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35380.0, ans=0.1 +2024-07-27 16:02:59,368 INFO [train.py:1114] (1/4) Epoch 3, batch 6100, loss[loss=0.2959, simple_loss=0.3694, pruned_loss=0.1112, over 4675.00 frames. ], tot_loss[loss=0.2848, simple_loss=0.3522, pruned_loss=0.1087, over 937820.26 frames. ], batch size: 15, lr: 2.19e-02, grad_scale: 32.0 +2024-07-27 16:03:05,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=35406.666666666664, ans=0.1 +2024-07-27 16:03:06,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=35406.666666666664, ans=0.025 +2024-07-27 16:03:17,973 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.80 vs. limit=15.0 +2024-07-27 16:03:18,171 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.01 vs. limit=22.5 +2024-07-27 16:03:18,707 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.24 vs. limit=15.0 +2024-07-27 16:03:21,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=35433.333333333336, ans=0.125 +2024-07-27 16:03:31,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=35446.666666666664, ans=0.025 +2024-07-27 16:03:32,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=35460.0, ans=0.1 +2024-07-27 16:03:33,285 INFO [train.py:1114] (1/4) Epoch 3, batch 6150, loss[loss=0.302, simple_loss=0.3626, pruned_loss=0.1207, over 3371.00 frames. ], tot_loss[loss=0.2855, simple_loss=0.3529, pruned_loss=0.109, over 936990.13 frames. ], batch size: 35, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:03:42,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.out_whiten.whitening_limit, batch_count=35473.333333333336, ans=8.0 +2024-07-27 16:03:46,858 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.320e+01 6.689e+01 7.561e+01 9.895e+01 1.847e+02, threshold=1.512e+02, percent-clipped=5.0 +2024-07-27 16:03:49,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=35486.666666666664, ans=0.125 +2024-07-27 16:03:52,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=35486.666666666664, ans=0.003155072463768116 +2024-07-27 16:04:02,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=35513.333333333336, ans=0.125 +2024-07-27 16:04:09,276 INFO [train.py:1114] (1/4) Epoch 3, batch 6200, loss[loss=0.2754, simple_loss=0.3488, pruned_loss=0.101, over 4748.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.3533, pruned_loss=0.1094, over 936720.97 frames. ], batch size: 14, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:04:16,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=35540.0, ans=0.125 +2024-07-27 16:04:26,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=35553.333333333336, ans=0.0 +2024-07-27 16:04:30,802 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:04:31,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35566.666666666664, ans=0.1 +2024-07-27 16:04:31,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=35566.666666666664, ans=0.125 +2024-07-27 16:04:43,465 INFO [train.py:1114] (1/4) Epoch 3, batch 6250, loss[loss=0.2825, simple_loss=0.3628, pruned_loss=0.1011, over 4817.00 frames. ], tot_loss[loss=0.2875, simple_loss=0.3544, pruned_loss=0.1103, over 933355.11 frames. ], batch size: 14, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:04:45,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=35593.333333333336, ans=0.0 +2024-07-27 16:04:49,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=35606.666666666664, ans=0.0 +2024-07-27 16:04:54,894 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.517e+01 6.327e+01 7.433e+01 8.878e+01 1.317e+02, threshold=1.487e+02, percent-clipped=0.0 +2024-07-27 16:05:03,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=35620.0, ans=0.0 +2024-07-27 16:05:03,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=35620.0, ans=0.125 +2024-07-27 16:05:04,216 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.12 vs. limit=12.0 +2024-07-27 16:05:10,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=35633.333333333336, ans=0.2 +2024-07-27 16:05:19,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=35646.666666666664, ans=0.125 +2024-07-27 16:05:20,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=35646.666666666664, ans=0.125 +2024-07-27 16:05:26,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=35660.0, ans=0.125 +2024-07-27 16:05:27,310 INFO [train.py:1114] (1/4) Epoch 3, batch 6300, loss[loss=0.3136, simple_loss=0.3573, pruned_loss=0.135, over 4512.00 frames. ], tot_loss[loss=0.2888, simple_loss=0.3551, pruned_loss=0.1113, over 929930.39 frames. ], batch size: 10, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:05:35,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=35673.333333333336, ans=0.125 +2024-07-27 16:05:35,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35673.333333333336, ans=0.1 +2024-07-27 16:05:36,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=35673.333333333336, ans=0.0 +2024-07-27 16:05:42,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35686.666666666664, ans=0.1 +2024-07-27 16:05:43,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=35686.666666666664, ans=0.2 +2024-07-27 16:05:54,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35713.333333333336, ans=0.1 +2024-07-27 16:05:58,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=35713.333333333336, ans=0.0 +2024-07-27 16:05:59,823 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.14 vs. limit=22.5 +2024-07-27 16:06:00,823 INFO [train.py:1114] (1/4) Epoch 3, batch 6350, loss[loss=0.261, simple_loss=0.339, pruned_loss=0.09151, over 4557.00 frames. ], tot_loss[loss=0.2885, simple_loss=0.3546, pruned_loss=0.1112, over 933814.28 frames. ], batch size: 21, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:06:12,644 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.099e+01 7.124e+01 7.949e+01 9.215e+01 1.375e+02, threshold=1.590e+02, percent-clipped=0.0 +2024-07-27 16:06:24,431 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.69 vs. limit=15.0 +2024-07-27 16:06:36,558 INFO [train.py:1114] (1/4) Epoch 3, batch 6400, loss[loss=0.3028, simple_loss=0.3665, pruned_loss=0.1196, over 4635.00 frames. ], tot_loss[loss=0.2867, simple_loss=0.3528, pruned_loss=0.1103, over 935105.55 frames. ], batch size: 13, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:06:41,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=35793.333333333336, ans=0.0 +2024-07-27 16:06:50,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=35820.0, ans=0.125 +2024-07-27 16:06:55,207 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.55 vs. limit=15.0 +2024-07-27 16:07:10,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=35846.666666666664, ans=0.003076811594202899 +2024-07-27 16:07:14,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=35846.666666666664, ans=0.125 +2024-07-27 16:07:15,633 INFO [train.py:1114] (1/4) Epoch 3, batch 6450, loss[loss=0.3216, simple_loss=0.3767, pruned_loss=0.1332, over 4380.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.3524, pruned_loss=0.1092, over 938559.90 frames. ], batch size: 21, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:07:24,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=35873.333333333336, ans=0.0 +2024-07-27 16:07:26,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=35873.333333333336, ans=0.2 +2024-07-27 16:07:32,074 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.336e+01 7.043e+01 8.051e+01 9.807e+01 1.613e+02, threshold=1.610e+02, percent-clipped=2.0 +2024-07-27 16:07:32,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=35873.333333333336, ans=0.125 +2024-07-27 16:07:33,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=35873.333333333336, ans=15.0 +2024-07-27 16:07:40,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=35900.0, ans=0.0 +2024-07-27 16:07:41,639 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.31 vs. limit=10.0 +2024-07-27 16:07:43,507 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.73 vs. limit=6.0 +2024-07-27 16:07:45,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=35900.0, ans=10.0 +2024-07-27 16:07:53,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=35913.333333333336, ans=0.125 +2024-07-27 16:07:55,528 INFO [train.py:1114] (1/4) Epoch 3, batch 6500, loss[loss=0.3687, simple_loss=0.4008, pruned_loss=0.1683, over 3378.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3505, pruned_loss=0.1077, over 939623.14 frames. ], batch size: 35, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:08:02,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=35940.0, ans=0.125 +2024-07-27 16:08:03,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=35940.0, ans=0.125 +2024-07-27 16:08:11,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=35953.333333333336, ans=0.125 +2024-07-27 16:08:15,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=35966.666666666664, ans=0.5 +2024-07-27 16:08:18,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=35966.666666666664, ans=0.2 +2024-07-27 16:08:23,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=35980.0, ans=0.125 +2024-07-27 16:08:25,253 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.45 vs. limit=15.0 +2024-07-27 16:08:28,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=35993.333333333336, ans=0.2 +2024-07-27 16:08:29,004 INFO [train.py:1114] (1/4) Epoch 3, batch 6550, loss[loss=0.2708, simple_loss=0.3395, pruned_loss=0.101, over 4803.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3513, pruned_loss=0.1079, over 942590.49 frames. ], batch size: 11, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:08:33,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=35993.333333333336, ans=0.07 +2024-07-27 16:08:38,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=35993.333333333336, ans=0.0 +2024-07-27 16:08:43,235 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.11 vs. limit=22.5 +2024-07-27 16:08:44,817 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.305e+01 6.734e+01 7.453e+01 8.745e+01 1.645e+02, threshold=1.491e+02, percent-clipped=1.0 +2024-07-27 16:08:58,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=36033.333333333336, ans=0.1 +2024-07-27 16:08:58,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36033.333333333336, ans=0.1 +2024-07-27 16:08:59,212 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.93 vs. limit=15.0 +2024-07-27 16:09:00,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=36046.666666666664, ans=0.0 +2024-07-27 16:09:07,346 INFO [train.py:1114] (1/4) Epoch 3, batch 6600, loss[loss=0.2913, simple_loss=0.3439, pruned_loss=0.1194, over 4932.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.3507, pruned_loss=0.1073, over 944464.72 frames. ], batch size: 14, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:09:24,864 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.17 vs. limit=22.5 +2024-07-27 16:09:33,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=36100.0, ans=0.125 +2024-07-27 16:09:38,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=36113.333333333336, ans=0.125 +2024-07-27 16:09:42,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=36126.666666666664, ans=0.125 +2024-07-27 16:09:43,502 INFO [train.py:1114] (1/4) Epoch 3, batch 6650, loss[loss=0.3578, simple_loss=0.4049, pruned_loss=0.1554, over 4632.00 frames. ], tot_loss[loss=0.2842, simple_loss=0.3516, pruned_loss=0.1084, over 942988.95 frames. ], batch size: 17, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:09:45,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36126.666666666664, ans=0.1 +2024-07-27 16:09:51,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=36140.0, ans=0.2 +2024-07-27 16:09:51,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=36140.0, ans=0.125 +2024-07-27 16:09:54,994 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.577e+01 6.831e+01 8.168e+01 1.025e+02 1.593e+02, threshold=1.634e+02, percent-clipped=2.0 +2024-07-27 16:10:09,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=36153.333333333336, ans=0.2 +2024-07-27 16:10:22,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=36180.0, ans=0.2 +2024-07-27 16:10:23,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=36193.333333333336, ans=0.0030014492753623187 +2024-07-27 16:10:24,376 INFO [train.py:1114] (1/4) Epoch 3, batch 6700, loss[loss=0.3022, simple_loss=0.3615, pruned_loss=0.1215, over 4733.00 frames. ], tot_loss[loss=0.2836, simple_loss=0.3512, pruned_loss=0.108, over 941636.25 frames. ], batch size: 19, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:10:26,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=36193.333333333336, ans=0.125 +2024-07-27 16:10:32,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=36206.666666666664, ans=0.125 +2024-07-27 16:10:33,621 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.92 vs. limit=6.0 +2024-07-27 16:10:38,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=36220.0, ans=0.0 +2024-07-27 16:10:50,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=36233.333333333336, ans=0.025 +2024-07-27 16:10:55,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=36246.666666666664, ans=0.125 +2024-07-27 16:10:57,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=36246.666666666664, ans=0.125 +2024-07-27 16:10:59,151 INFO [train.py:1114] (1/4) Epoch 3, batch 6750, loss[loss=0.269, simple_loss=0.3415, pruned_loss=0.09829, over 4259.00 frames. ], tot_loss[loss=0.2823, simple_loss=0.3506, pruned_loss=0.107, over 939824.34 frames. ], batch size: 25, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:11:00,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=36260.0, ans=0.2 +2024-07-27 16:11:07,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=36273.333333333336, ans=0.125 +2024-07-27 16:11:10,904 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.207e+01 6.852e+01 7.798e+01 8.780e+01 1.253e+02, threshold=1.560e+02, percent-clipped=0.0 +2024-07-27 16:11:14,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=36286.666666666664, ans=0.0 +2024-07-27 16:11:18,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=36286.666666666664, ans=0.1 +2024-07-27 16:11:25,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=36313.333333333336, ans=0.125 +2024-07-27 16:11:26,018 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.93 vs. limit=22.5 +2024-07-27 16:11:33,070 INFO [train.py:1114] (1/4) Epoch 3, batch 6800, loss[loss=0.2737, simple_loss=0.3504, pruned_loss=0.09844, over 4634.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.3511, pruned_loss=0.1072, over 938072.30 frames. ], batch size: 13, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:11:37,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=36326.666666666664, ans=0.125 +2024-07-27 16:11:40,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=36326.666666666664, ans=0.125 +2024-07-27 16:11:54,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=36366.666666666664, ans=0.0029637681159420297 +2024-07-27 16:12:08,429 INFO [train.py:1114] (1/4) Epoch 3, batch 6850, loss[loss=0.3044, simple_loss=0.3617, pruned_loss=0.1235, over 4683.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3493, pruned_loss=0.1062, over 939897.41 frames. ], batch size: 13, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:12:10,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_na.min_abs, batch_count=36393.333333333336, ans=0.02 +2024-07-27 16:12:12,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=36393.333333333336, ans=0.125 +2024-07-27 16:12:19,975 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.402e+01 6.863e+01 7.550e+01 8.711e+01 1.509e+02, threshold=1.510e+02, percent-clipped=0.0 +2024-07-27 16:12:28,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=36420.0, ans=0.125 +2024-07-27 16:12:44,068 INFO [train.py:1114] (1/4) Epoch 3, batch 6900, loss[loss=0.2701, simple_loss=0.3159, pruned_loss=0.1121, over 4969.00 frames. ], tot_loss[loss=0.2811, simple_loss=0.3495, pruned_loss=0.1064, over 942205.62 frames. ], batch size: 13, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:12:49,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=36460.0, ans=0.125 +2024-07-27 16:12:50,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=36473.333333333336, ans=0.2 +2024-07-27 16:12:53,011 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.85 vs. limit=10.0 +2024-07-27 16:12:54,951 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:12:57,905 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.55 vs. limit=15.0 +2024-07-27 16:13:02,995 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.63 vs. limit=10.0 +2024-07-27 16:13:07,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=36500.0, ans=0.1 +2024-07-27 16:13:16,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36513.333333333336, ans=0.1 +2024-07-27 16:13:17,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=36513.333333333336, ans=0.1 +2024-07-27 16:13:18,508 INFO [train.py:1114] (1/4) Epoch 3, batch 6950, loss[loss=0.2424, simple_loss=0.3006, pruned_loss=0.09214, over 4485.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.3499, pruned_loss=0.1069, over 939427.26 frames. ], batch size: 10, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:13:23,945 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.11 vs. limit=22.5 +2024-07-27 16:13:32,729 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.311e+01 6.730e+01 8.056e+01 9.531e+01 1.380e+02, threshold=1.611e+02, percent-clipped=0.0 +2024-07-27 16:13:54,235 INFO [train.py:1114] (1/4) Epoch 3, batch 7000, loss[loss=0.326, simple_loss=0.3867, pruned_loss=0.1326, over 4615.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3494, pruned_loss=0.1069, over 937726.46 frames. ], batch size: 17, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:14:08,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=36620.0, ans=0.0 +2024-07-27 16:14:28,479 INFO [train.py:1114] (1/4) Epoch 3, batch 7050, loss[loss=0.2844, simple_loss=0.3601, pruned_loss=0.1044, over 4686.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3498, pruned_loss=0.1067, over 941221.57 frames. ], batch size: 19, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:14:48,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=36673.333333333336, ans=0.07 +2024-07-27 16:14:50,688 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.466e+01 6.926e+01 7.603e+01 8.954e+01 1.226e+02, threshold=1.521e+02, percent-clipped=0.0 +2024-07-27 16:15:02,933 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.66 vs. limit=6.0 +2024-07-27 16:15:13,273 INFO [train.py:1114] (1/4) Epoch 3, batch 7100, loss[loss=0.3377, simple_loss=0.4062, pruned_loss=0.1346, over 4808.00 frames. ], tot_loss[loss=0.2831, simple_loss=0.3508, pruned_loss=0.1077, over 935656.31 frames. ], batch size: 15, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:15:16,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=36726.666666666664, ans=0.05 +2024-07-27 16:15:17,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=36726.666666666664, ans=0.002885507246376812 +2024-07-27 16:15:22,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=36740.0, ans=0.125 +2024-07-27 16:15:24,670 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.46 vs. limit=22.5 +2024-07-27 16:15:37,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=36766.666666666664, ans=10.0 +2024-07-27 16:15:51,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=36793.333333333336, ans=0.125 +2024-07-27 16:15:51,805 INFO [train.py:1114] (1/4) Epoch 3, batch 7150, loss[loss=0.3445, simple_loss=0.3871, pruned_loss=0.1509, over 4488.00 frames. ], tot_loss[loss=0.2802, simple_loss=0.348, pruned_loss=0.1062, over 936815.37 frames. ], batch size: 21, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:16:02,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=36806.666666666664, ans=0.125 +2024-07-27 16:16:07,871 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.10 vs. limit=15.0 +2024-07-27 16:16:07,965 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.541e+01 6.708e+01 7.597e+01 9.458e+01 1.380e+02, threshold=1.519e+02, percent-clipped=0.0 +2024-07-27 16:16:09,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.18 vs. limit=15.0 +2024-07-27 16:16:13,494 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:16:16,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=36833.333333333336, ans=0.05 +2024-07-27 16:16:19,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=36833.333333333336, ans=0.002862318840579709 +2024-07-27 16:16:21,942 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.68 vs. limit=15.0 +2024-07-27 16:16:28,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=36846.666666666664, ans=0.125 +2024-07-27 16:16:28,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=36846.666666666664, ans=0.125 +2024-07-27 16:16:29,421 INFO [train.py:1114] (1/4) Epoch 3, batch 7200, loss[loss=0.2841, simple_loss=0.3567, pruned_loss=0.1057, over 4806.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3498, pruned_loss=0.1068, over 937142.68 frames. ], batch size: 15, lr: 2.15e-02, grad_scale: 32.0 +2024-07-27 16:16:29,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=36860.0, ans=0.05 +2024-07-27 16:16:30,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=36860.0, ans=0.2 +2024-07-27 16:16:44,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=36873.333333333336, ans=0.2 +2024-07-27 16:16:47,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=36886.666666666664, ans=0.2 +2024-07-27 16:16:53,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=36900.0, ans=0.125 +2024-07-27 16:16:56,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=36900.0, ans=0.025 +2024-07-27 16:16:59,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=36913.333333333336, ans=0.2 +2024-07-27 16:17:06,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=36913.333333333336, ans=0.2 +2024-07-27 16:17:11,409 INFO [train.py:1114] (1/4) Epoch 3, batch 7250, loss[loss=0.2411, simple_loss=0.3135, pruned_loss=0.08433, over 4852.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3492, pruned_loss=0.1064, over 939027.73 frames. ], batch size: 12, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:17:13,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=36926.666666666664, ans=0.07 +2024-07-27 16:17:16,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=36926.666666666664, ans=0.025 +2024-07-27 16:17:22,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=36940.0, ans=0.125 +2024-07-27 16:17:23,000 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.450e+01 6.548e+01 7.607e+01 9.272e+01 1.593e+02, threshold=1.521e+02, percent-clipped=2.0 +2024-07-27 16:17:31,369 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.46 vs. limit=15.0 +2024-07-27 16:17:38,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=36980.0, ans=0.025 +2024-07-27 16:17:44,570 INFO [train.py:1114] (1/4) Epoch 3, batch 7300, loss[loss=0.2563, simple_loss=0.3271, pruned_loss=0.09276, over 4845.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3487, pruned_loss=0.1063, over 939100.85 frames. ], batch size: 12, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:17:49,702 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.04 vs. limit=10.0 +2024-07-27 16:18:04,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37033.333333333336, ans=0.1 +2024-07-27 16:18:04,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=37033.333333333336, ans=0.125 +2024-07-27 16:18:06,721 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=14.28 vs. limit=15.0 +2024-07-27 16:18:06,822 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.80 vs. limit=15.0 +2024-07-27 16:18:08,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=37033.333333333336, ans=0.125 +2024-07-27 16:18:17,358 INFO [train.py:1114] (1/4) Epoch 3, batch 7350, loss[loss=0.2928, simple_loss=0.3596, pruned_loss=0.113, over 4640.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3498, pruned_loss=0.1067, over 938289.68 frames. ], batch size: 12, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:18:18,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=37060.0, ans=0.125 +2024-07-27 16:18:24,488 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=39.30 vs. limit=22.5 +2024-07-27 16:18:24,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=37073.333333333336, ans=0.125 +2024-07-27 16:18:29,217 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.542e+01 6.884e+01 7.906e+01 1.038e+02 1.585e+02, threshold=1.581e+02, percent-clipped=4.0 +2024-07-27 16:18:29,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=37073.333333333336, ans=0.125 +2024-07-27 16:18:42,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=37100.0, ans=0.125 +2024-07-27 16:18:44,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=37113.333333333336, ans=0.125 +2024-07-27 16:18:48,978 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.18 vs. limit=15.0 +2024-07-27 16:18:49,932 INFO [train.py:1114] (1/4) Epoch 3, batch 7400, loss[loss=0.2467, simple_loss=0.329, pruned_loss=0.08219, over 4692.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3497, pruned_loss=0.1062, over 939368.14 frames. ], batch size: 13, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:18:53,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=37126.666666666664, ans=0.0 +2024-07-27 16:19:06,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=37153.333333333336, ans=0.025 +2024-07-27 16:19:07,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=37153.333333333336, ans=0.1 +2024-07-27 16:19:07,630 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:19:16,073 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.81 vs. limit=22.5 +2024-07-27 16:19:22,588 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.20 vs. limit=12.0 +2024-07-27 16:19:22,924 INFO [train.py:1114] (1/4) Epoch 3, batch 7450, loss[loss=0.2459, simple_loss=0.3219, pruned_loss=0.08498, over 4612.00 frames. ], tot_loss[loss=0.28, simple_loss=0.3482, pruned_loss=0.1059, over 937201.85 frames. ], batch size: 11, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:19:23,395 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.64 vs. limit=15.0 +2024-07-27 16:19:23,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=37193.333333333336, ans=0.125 +2024-07-27 16:19:24,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=37193.333333333336, ans=0.125 +2024-07-27 16:19:25,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=37193.333333333336, ans=0.0 +2024-07-27 16:19:25,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=37193.333333333336, ans=0.125 +2024-07-27 16:19:26,996 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.23 vs. limit=15.0 +2024-07-27 16:19:27,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=37193.333333333336, ans=0.125 +2024-07-27 16:19:34,462 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.095e+01 6.772e+01 7.758e+01 9.808e+01 2.086e+02, threshold=1.552e+02, percent-clipped=2.0 +2024-07-27 16:19:34,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=37206.666666666664, ans=0.0 +2024-07-27 16:19:35,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=37220.0, ans=0.025 +2024-07-27 16:19:51,276 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.95 vs. limit=22.5 +2024-07-27 16:19:55,711 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.69 vs. limit=15.0 +2024-07-27 16:19:55,859 INFO [train.py:1114] (1/4) Epoch 3, batch 7500, loss[loss=0.4055, simple_loss=0.4183, pruned_loss=0.1963, over 3452.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3493, pruned_loss=0.1066, over 935383.50 frames. ], batch size: 35, lr: 2.13e-02, grad_scale: 16.0 +2024-07-27 16:20:09,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=37286.666666666664, ans=0.0 +2024-07-27 16:20:17,573 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.43 vs. limit=22.5 +2024-07-27 16:20:23,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=37313.333333333336, ans=0.125 +2024-07-27 16:20:26,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=37313.333333333336, ans=0.025 +2024-07-27 16:20:27,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=37313.333333333336, ans=0.04949747468305833 +2024-07-27 16:20:27,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.59 vs. limit=15.0 +2024-07-27 16:20:29,137 INFO [train.py:1114] (1/4) Epoch 3, batch 7550, loss[loss=0.2642, simple_loss=0.3402, pruned_loss=0.09405, over 4646.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3503, pruned_loss=0.1072, over 935716.23 frames. ], batch size: 17, lr: 2.13e-02, grad_scale: 16.0 +2024-07-27 16:20:31,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=37326.666666666664, ans=0.125 +2024-07-27 16:21:31,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=37326.666666666664, ans=0.0 +2024-07-27 16:21:39,446 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.630e+01 6.808e+01 7.761e+01 9.046e+01 1.679e+02, threshold=1.552e+02, percent-clipped=1.0 +2024-07-27 16:21:41,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=37353.333333333336, ans=0.1 +2024-07-27 16:21:51,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=37366.666666666664, ans=0.1 +2024-07-27 16:21:51,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=37366.666666666664, ans=0.0 +2024-07-27 16:21:57,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=37380.0, ans=0.0 +2024-07-27 16:21:59,522 INFO [train.py:1114] (1/4) Epoch 3, batch 7600, loss[loss=0.2661, simple_loss=0.3353, pruned_loss=0.09848, over 4825.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3497, pruned_loss=0.1065, over 937961.35 frames. ], batch size: 14, lr: 2.13e-02, grad_scale: 32.0 +2024-07-27 16:22:08,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=37406.666666666664, ans=0.0 +2024-07-27 16:22:13,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=37420.0, ans=0.2 +2024-07-27 16:22:23,516 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.48 vs. limit=15.0 +2024-07-27 16:22:25,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=37433.333333333336, ans=0.125 +2024-07-27 16:22:36,121 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.26 vs. limit=15.0 +2024-07-27 16:22:40,209 INFO [train.py:1114] (1/4) Epoch 3, batch 7650, loss[loss=0.2866, simple_loss=0.3593, pruned_loss=0.1069, over 4930.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3497, pruned_loss=0.1069, over 936924.68 frames. ], batch size: 12, lr: 2.13e-02, grad_scale: 32.0 +2024-07-27 16:22:48,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=37460.0, ans=0.07 +2024-07-27 16:23:02,039 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.597e+01 7.183e+01 8.812e+01 1.036e+02 1.540e+02, threshold=1.762e+02, percent-clipped=0.0 +2024-07-27 16:23:29,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=37500.0, ans=0.035 +2024-07-27 16:23:30,690 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.94 vs. limit=15.0 +2024-07-27 16:23:31,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=37500.0, ans=0.125 +2024-07-27 16:23:32,717 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.05 vs. limit=15.0 +2024-07-27 16:23:36,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=37513.333333333336, ans=0.125 +2024-07-27 16:23:39,139 INFO [train.py:1114] (1/4) Epoch 3, batch 7700, loss[loss=0.2764, simple_loss=0.3602, pruned_loss=0.09627, over 4689.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3502, pruned_loss=0.1073, over 934589.33 frames. ], batch size: 13, lr: 2.13e-02, grad_scale: 32.0 +2024-07-27 16:23:39,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=37526.666666666664, ans=0.025 +2024-07-27 16:23:49,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=37526.666666666664, ans=0.125 +2024-07-27 16:23:58,112 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:23:58,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=37540.0, ans=0.125 +2024-07-27 16:23:58,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=37540.0, ans=0.025 +2024-07-27 16:24:00,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=37553.333333333336, ans=0.125 +2024-07-27 16:24:01,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=37553.333333333336, ans=0.125 +2024-07-27 16:24:04,861 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.62 vs. limit=15.0 +2024-07-27 16:24:06,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37553.333333333336, ans=0.1 +2024-07-27 16:24:10,898 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.21 vs. limit=15.0 +2024-07-27 16:24:15,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=37566.666666666664, ans=0.125 +2024-07-27 16:24:22,435 INFO [train.py:1114] (1/4) Epoch 3, batch 7750, loss[loss=0.2618, simple_loss=0.3437, pruned_loss=0.08993, over 4929.00 frames. ], tot_loss[loss=0.2831, simple_loss=0.3515, pruned_loss=0.1073, over 935873.53 frames. ], batch size: 14, lr: 2.13e-02, grad_scale: 32.0 +2024-07-27 16:24:41,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=37593.333333333336, ans=0.002697101449275362 +2024-07-27 16:24:44,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37593.333333333336, ans=0.1 +2024-07-27 16:24:45,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=37593.333333333336, ans=0.0 +2024-07-27 16:24:52,945 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.568e+01 6.584e+01 7.302e+01 8.614e+01 1.487e+02, threshold=1.460e+02, percent-clipped=0.0 +2024-07-27 16:24:59,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=37620.0, ans=0.2 +2024-07-27 16:25:24,448 INFO [train.py:1114] (1/4) Epoch 3, batch 7800, loss[loss=0.2711, simple_loss=0.3425, pruned_loss=0.09991, over 4666.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3503, pruned_loss=0.1065, over 937379.15 frames. ], batch size: 14, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:25:36,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=37673.333333333336, ans=0.125 +2024-07-27 16:25:37,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=37686.666666666664, ans=0.125 +2024-07-27 16:25:52,180 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.66 vs. limit=15.0 +2024-07-27 16:25:57,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=37713.333333333336, ans=0.0 +2024-07-27 16:26:06,467 INFO [train.py:1114] (1/4) Epoch 3, batch 7850, loss[loss=0.3043, simple_loss=0.363, pruned_loss=0.1228, over 4502.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3494, pruned_loss=0.1062, over 936367.80 frames. ], batch size: 10, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:26:07,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.14 vs. limit=15.0 +2024-07-27 16:26:08,821 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.19 vs. limit=22.5 +2024-07-27 16:26:11,121 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.11 vs. limit=6.0 +2024-07-27 16:26:11,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=37726.666666666664, ans=0.125 +2024-07-27 16:26:19,922 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.341e+01 6.634e+01 7.796e+01 9.040e+01 1.354e+02, threshold=1.559e+02, percent-clipped=0.0 +2024-07-27 16:26:32,000 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.21 vs. limit=15.0 +2024-07-27 16:26:39,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=37766.666666666664, ans=0.002659420289855073 +2024-07-27 16:26:39,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=37766.666666666664, ans=0.1 +2024-07-27 16:26:51,025 INFO [train.py:1114] (1/4) Epoch 3, batch 7900, loss[loss=0.3463, simple_loss=0.4004, pruned_loss=0.1461, over 4873.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.3518, pruned_loss=0.1075, over 933581.13 frames. ], batch size: 14, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:27:26,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=37820.0, ans=0.125 +2024-07-27 16:27:46,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=37846.666666666664, ans=10.0 +2024-07-27 16:27:58,710 INFO [train.py:1114] (1/4) Epoch 3, batch 7950, loss[loss=0.316, simple_loss=0.3744, pruned_loss=0.1288, over 3478.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3514, pruned_loss=0.1073, over 935731.60 frames. ], batch size: 35, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:27:59,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=37860.0, ans=0.0026391304347826083 +2024-07-27 16:28:07,057 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.49 vs. limit=15.0 +2024-07-27 16:28:14,334 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.890e+01 6.860e+01 7.518e+01 9.206e+01 1.306e+02, threshold=1.504e+02, percent-clipped=0.0 +2024-07-27 16:28:14,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37886.666666666664, ans=0.1 +2024-07-27 16:28:25,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37900.0, ans=0.1 +2024-07-27 16:28:40,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=37913.333333333336, ans=0.125 +2024-07-27 16:28:45,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37913.333333333336, ans=0.1 +2024-07-27 16:28:50,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=37913.333333333336, ans=0.2 +2024-07-27 16:28:51,959 INFO [train.py:1114] (1/4) Epoch 3, batch 8000, loss[loss=0.2177, simple_loss=0.3027, pruned_loss=0.06632, over 4618.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3494, pruned_loss=0.1063, over 934813.57 frames. ], batch size: 11, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:28:52,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=37926.666666666664, ans=0.125 +2024-07-27 16:29:23,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=37940.0, ans=0.05 +2024-07-27 16:29:31,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=37940.0, ans=0.2 +2024-07-27 16:29:46,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=37953.333333333336, ans=0.025 +2024-07-27 16:29:52,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=37966.666666666664, ans=0.125 +2024-07-27 16:30:23,314 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.02 vs. limit=15.0 +2024-07-27 16:30:39,451 INFO [train.py:1114] (1/4) Epoch 3, batch 8050, loss[loss=0.3257, simple_loss=0.3847, pruned_loss=0.1333, over 4811.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.351, pruned_loss=0.1072, over 934602.24 frames. ], batch size: 14, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:30:39,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=37993.333333333336, ans=0.125 +2024-07-27 16:30:40,843 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.80 vs. limit=22.5 +2024-07-27 16:30:46,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=37993.333333333336, ans=0.002610144927536232 +2024-07-27 16:30:49,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=37993.333333333336, ans=0.125 +2024-07-27 16:30:51,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=38006.666666666664, ans=0.0 +2024-07-27 16:31:03,282 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.744e+01 7.005e+01 8.059e+01 9.966e+01 1.848e+02, threshold=1.612e+02, percent-clipped=3.0 +2024-07-27 16:32:00,561 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.20 vs. limit=15.0 +2024-07-27 16:32:01,249 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.59 vs. limit=15.0 +2024-07-27 16:32:12,020 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:32:14,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=38046.666666666664, ans=0.125 +2024-07-27 16:32:14,751 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-07-27 16:32:37,829 INFO [train.py:1114] (1/4) Epoch 3, batch 8100, loss[loss=0.2637, simple_loss=0.3389, pruned_loss=0.09423, over 4798.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.3529, pruned_loss=0.1083, over 934605.77 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 32.0 +2024-07-27 16:32:40,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=38060.0, ans=0.0 +2024-07-27 16:32:41,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=38060.0, ans=0.002595652173913043 +2024-07-27 16:33:45,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=38113.333333333336, ans=0.0 +2024-07-27 16:33:47,670 INFO [train.py:1114] (1/4) Epoch 3, batch 8150, loss[loss=0.3114, simple_loss=0.3718, pruned_loss=0.1254, over 4786.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.3509, pruned_loss=0.1071, over 937570.21 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 32.0 +2024-07-27 16:33:49,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=38126.666666666664, ans=0.125 +2024-07-27 16:34:19,631 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.808e+01 6.778e+01 7.869e+01 9.669e+01 1.901e+02, threshold=1.574e+02, percent-clipped=1.0 +2024-07-27 16:34:22,604 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.54 vs. limit=15.0 +2024-07-27 16:34:36,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=38166.666666666664, ans=0.0 +2024-07-27 16:34:56,443 INFO [train.py:1114] (1/4) Epoch 3, batch 8200, loss[loss=0.2938, simple_loss=0.3557, pruned_loss=0.1159, over 4824.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3511, pruned_loss=0.1068, over 938915.72 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 32.0 +2024-07-27 16:35:25,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=38220.0, ans=0.125 +2024-07-27 16:35:45,645 INFO [train.py:1114] (1/4) Epoch 3, batch 8250, loss[loss=0.2966, simple_loss=0.3591, pruned_loss=0.1171, over 4894.00 frames. ], tot_loss[loss=0.2823, simple_loss=0.3507, pruned_loss=0.107, over 939242.20 frames. ], batch size: 13, lr: 2.11e-02, grad_scale: 16.0 +2024-07-27 16:35:59,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=38273.333333333336, ans=0.025 +2024-07-27 16:36:00,323 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.39 vs. limit=15.0 +2024-07-27 16:36:03,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.28 vs. limit=15.0 +2024-07-27 16:36:04,215 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.365e+01 6.777e+01 7.463e+01 9.374e+01 1.482e+02, threshold=1.493e+02, percent-clipped=0.0 +2024-07-27 16:36:09,737 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.45 vs. limit=15.0 +2024-07-27 16:36:20,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=38313.333333333336, ans=0.125 +2024-07-27 16:36:24,686 INFO [train.py:1114] (1/4) Epoch 3, batch 8300, loss[loss=0.299, simple_loss=0.3713, pruned_loss=0.1133, over 4910.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3518, pruned_loss=0.1078, over 939173.96 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 16.0 +2024-07-27 16:36:26,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=38326.666666666664, ans=0.125 +2024-07-27 16:36:26,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=38326.666666666664, ans=0.07 +2024-07-27 16:36:46,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=38366.666666666664, ans=0.125 +2024-07-27 16:36:54,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=38380.0, ans=0.125 +2024-07-27 16:37:01,100 INFO [train.py:1114] (1/4) Epoch 3, batch 8350, loss[loss=0.2886, simple_loss=0.3652, pruned_loss=0.1059, over 4797.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3503, pruned_loss=0.1066, over 942331.52 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 16.0 +2024-07-27 16:37:01,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38393.333333333336, ans=0.1 +2024-07-27 16:37:12,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=38393.333333333336, ans=0.125 +2024-07-27 16:37:13,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=38393.333333333336, ans=10.0 +2024-07-27 16:38:23,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38420.0, ans=0.1 +2024-07-27 16:38:23,647 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.458e+01 6.838e+01 7.813e+01 8.986e+01 1.214e+02, threshold=1.563e+02, percent-clipped=0.0 +2024-07-27 16:38:46,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=38433.333333333336, ans=0.125 +2024-07-27 16:38:50,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=38433.333333333336, ans=0.2 +2024-07-27 16:38:51,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=38433.333333333336, ans=0.0025144927536231874 +2024-07-27 16:39:00,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=38460.0, ans=0.125 +2024-07-27 16:39:01,313 INFO [train.py:1114] (1/4) Epoch 3, batch 8400, loss[loss=0.2699, simple_loss=0.3217, pruned_loss=0.109, over 4780.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3499, pruned_loss=0.1065, over 940716.83 frames. ], batch size: 12, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:39:02,813 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.68 vs. limit=12.0 +2024-07-27 16:39:09,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=38460.0, ans=0.125 +2024-07-27 16:39:14,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=38473.333333333336, ans=0.025 +2024-07-27 16:39:17,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=38473.333333333336, ans=0.002505797101449275 +2024-07-27 16:39:32,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=38513.333333333336, ans=0.125 +2024-07-27 16:39:34,129 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.47 vs. limit=6.0 +2024-07-27 16:39:34,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=38513.333333333336, ans=0.125 +2024-07-27 16:39:34,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=38513.333333333336, ans=0.125 +2024-07-27 16:39:35,597 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.69 vs. limit=22.5 +2024-07-27 16:39:46,494 INFO [train.py:1114] (1/4) Epoch 3, batch 8450, loss[loss=0.3341, simple_loss=0.39, pruned_loss=0.1391, over 4810.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3505, pruned_loss=0.1064, over 939766.52 frames. ], batch size: 15, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:40:03,725 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.374e+01 6.960e+01 7.996e+01 9.204e+01 1.346e+02, threshold=1.599e+02, percent-clipped=0.0 +2024-07-27 16:40:09,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=38566.666666666664, ans=0.125 +2024-07-27 16:40:10,514 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.09 vs. limit=22.5 +2024-07-27 16:40:11,638 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.14 vs. limit=15.0 +2024-07-27 16:40:20,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=38580.0, ans=0.0024826086956521737 +2024-07-27 16:40:20,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=38580.0, ans=0.95 +2024-07-27 16:40:34,575 INFO [train.py:1114] (1/4) Epoch 3, batch 8500, loss[loss=0.2476, simple_loss=0.3194, pruned_loss=0.08788, over 4615.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.3492, pruned_loss=0.1057, over 939451.36 frames. ], batch size: 11, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:40:36,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=38593.333333333336, ans=0.0 +2024-07-27 16:40:48,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=38620.0, ans=0.0 +2024-07-27 16:40:50,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=38620.0, ans=0.0024739130434782612 +2024-07-27 16:40:54,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=38620.0, ans=0.1 +2024-07-27 16:40:54,202 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.54 vs. limit=15.0 +2024-07-27 16:41:11,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=38633.333333333336, ans=0.05 +2024-07-27 16:41:17,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=38646.666666666664, ans=0.125 +2024-07-27 16:41:17,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=38646.666666666664, ans=0.125 +2024-07-27 16:41:21,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=38646.666666666664, ans=0.125 +2024-07-27 16:41:22,943 INFO [train.py:1114] (1/4) Epoch 3, batch 8550, loss[loss=0.2368, simple_loss=0.2991, pruned_loss=0.08729, over 4790.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.3478, pruned_loss=0.1045, over 940362.69 frames. ], batch size: 11, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:41:59,820 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.027e+01 6.868e+01 7.768e+01 9.567e+01 1.448e+02, threshold=1.554e+02, percent-clipped=0.0 +2024-07-27 16:42:02,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=38686.666666666664, ans=0.0 +2024-07-27 16:42:11,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=38700.0, ans=0.0 +2024-07-27 16:42:11,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=38700.0, ans=0.09899494936611666 +2024-07-27 16:42:17,602 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.00 vs. limit=15.0 +2024-07-27 16:42:19,152 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.07 vs. limit=6.0 +2024-07-27 16:42:24,806 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.19 vs. limit=22.5 +2024-07-27 16:42:40,831 INFO [train.py:1114] (1/4) Epoch 3, batch 8600, loss[loss=0.2717, simple_loss=0.3536, pruned_loss=0.0949, over 4792.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.3468, pruned_loss=0.1044, over 939973.96 frames. ], batch size: 15, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:42:44,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38726.666666666664, ans=0.1 +2024-07-27 16:42:45,457 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:42:49,820 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.67 vs. limit=15.0 +2024-07-27 16:42:50,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=38740.0, ans=0.125 +2024-07-27 16:42:55,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38753.333333333336, ans=0.1 +2024-07-27 16:43:13,334 INFO [train.py:1114] (1/4) Epoch 3, batch 8650, loss[loss=0.2775, simple_loss=0.3448, pruned_loss=0.105, over 4895.00 frames. ], tot_loss[loss=0.278, simple_loss=0.3468, pruned_loss=0.1046, over 940876.95 frames. ], batch size: 15, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:43:59,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=38820.0, ans=6.0 +2024-07-27 16:44:00,158 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.000e+01 6.909e+01 7.732e+01 9.254e+01 1.585e+02, threshold=1.546e+02, percent-clipped=1.0 +2024-07-27 16:44:01,114 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.33 vs. limit=12.0 +2024-07-27 16:44:02,370 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.58 vs. limit=22.5 +2024-07-27 16:44:09,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=38833.333333333336, ans=0.0 +2024-07-27 16:44:15,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=38846.666666666664, ans=0.125 +2024-07-27 16:44:27,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=38860.0, ans=0.1 +2024-07-27 16:44:27,439 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.57 vs. limit=15.0 +2024-07-27 16:44:27,631 INFO [train.py:1114] (1/4) Epoch 3, batch 8700, loss[loss=0.2488, simple_loss=0.3277, pruned_loss=0.08494, over 4757.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.3488, pruned_loss=0.1059, over 938143.98 frames. ], batch size: 13, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:44:33,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=38860.0, ans=0.2 +2024-07-27 16:44:37,634 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:45:02,846 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.88 vs. limit=15.0 +2024-07-27 16:45:15,413 INFO [train.py:1114] (1/4) Epoch 3, batch 8750, loss[loss=0.3416, simple_loss=0.4092, pruned_loss=0.137, over 4672.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3491, pruned_loss=0.1061, over 936567.12 frames. ], batch size: 15, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:45:35,868 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.390e+01 6.721e+01 7.628e+01 9.057e+01 1.548e+02, threshold=1.526e+02, percent-clipped=1.0 +2024-07-27 16:45:39,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=38953.333333333336, ans=0.2 +2024-07-27 16:45:54,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=38980.0, ans=0.5 +2024-07-27 16:45:57,589 INFO [train.py:1114] (1/4) Epoch 3, batch 8800, loss[loss=0.2193, simple_loss=0.2995, pruned_loss=0.06959, over 4933.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3492, pruned_loss=0.1061, over 937366.02 frames. ], batch size: 14, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:46:24,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=38993.333333333336, ans=0.125 +2024-07-27 16:46:40,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=39006.666666666664, ans=0.125 +2024-07-27 16:46:44,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=39020.0, ans=0.2 +2024-07-27 16:46:47,707 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.27 vs. limit=10.0 +2024-07-27 16:47:16,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=39046.666666666664, ans=0.1 +2024-07-27 16:47:16,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=39046.666666666664, ans=0.0023811594202898557 +2024-07-27 16:47:17,203 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=22.70 vs. limit=22.5 +2024-07-27 16:47:18,770 INFO [train.py:1114] (1/4) Epoch 3, batch 8850, loss[loss=0.3476, simple_loss=0.3967, pruned_loss=0.1493, over 4520.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3508, pruned_loss=0.1076, over 932484.73 frames. ], batch size: 21, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:47:18,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=39060.0, ans=0.05 +2024-07-27 16:47:24,591 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.67 vs. limit=15.0 +2024-07-27 16:47:27,994 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.38 vs. limit=15.0 +2024-07-27 16:47:28,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=39073.333333333336, ans=0.125 +2024-07-27 16:47:29,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=39073.333333333336, ans=0.2 +2024-07-27 16:47:32,059 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.718e+01 6.978e+01 8.333e+01 9.846e+01 2.201e+02, threshold=1.667e+02, percent-clipped=2.0 +2024-07-27 16:48:08,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=39100.0, ans=15.0 +2024-07-27 16:48:10,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=39113.333333333336, ans=0.2 +2024-07-27 16:48:12,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39113.333333333336, ans=0.1 +2024-07-27 16:48:12,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=39113.333333333336, ans=0.0023666666666666662 +2024-07-27 16:48:17,152 INFO [train.py:1114] (1/4) Epoch 3, batch 8900, loss[loss=0.2566, simple_loss=0.3409, pruned_loss=0.08618, over 4935.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3518, pruned_loss=0.108, over 930278.36 frames. ], batch size: 12, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:48:23,691 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:48:24,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=39140.0, ans=0.0 +2024-07-27 16:48:34,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=39153.333333333336, ans=0.2 +2024-07-27 16:48:37,354 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.71 vs. limit=15.0 +2024-07-27 16:48:40,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=39166.666666666664, ans=0.125 +2024-07-27 16:48:56,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=39180.0, ans=0.05 +2024-07-27 16:48:59,835 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.65 vs. limit=12.0 +2024-07-27 16:49:00,626 INFO [train.py:1114] (1/4) Epoch 3, batch 8950, loss[loss=0.3103, simple_loss=0.362, pruned_loss=0.1293, over 4554.00 frames. ], tot_loss[loss=0.2841, simple_loss=0.352, pruned_loss=0.1081, over 931213.86 frames. ], batch size: 21, lr: 2.08e-02, grad_scale: 32.0 +2024-07-27 16:49:06,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=39193.333333333336, ans=0.125 +2024-07-27 16:49:15,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=39206.666666666664, ans=0.125 +2024-07-27 16:49:23,808 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.972e+01 6.809e+01 7.609e+01 8.972e+01 1.358e+02, threshold=1.522e+02, percent-clipped=0.0 +2024-07-27 16:49:51,639 INFO [train.py:1114] (1/4) Epoch 3, batch 9000, loss[loss=0.313, simple_loss=0.3535, pruned_loss=0.1363, over 4629.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3495, pruned_loss=0.1071, over 934013.14 frames. ], batch size: 12, lr: 2.08e-02, grad_scale: 32.0 +2024-07-27 16:49:51,640 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 16:50:05,981 INFO [train.py:1146] (1/4) Epoch 3, validation: loss=0.2254, simple_loss=0.3252, pruned_loss=0.06281, over 944034.00 frames. +2024-07-27 16:50:05,982 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 16:50:08,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=39260.0, ans=0.0023347826086956517 +2024-07-27 16:50:14,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=39260.0, ans=0.125 +2024-07-27 16:50:31,640 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:50:36,826 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.13 vs. limit=15.0 +2024-07-27 16:50:39,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=39300.0, ans=0.125 +2024-07-27 16:50:52,321 INFO [train.py:1114] (1/4) Epoch 3, batch 9050, loss[loss=0.2148, simple_loss=0.282, pruned_loss=0.07379, over 4554.00 frames. ], tot_loss[loss=0.2792, simple_loss=0.3472, pruned_loss=0.1056, over 934335.04 frames. ], batch size: 10, lr: 2.08e-02, grad_scale: 32.0 +2024-07-27 16:51:04,565 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.72 vs. limit=6.0 +2024-07-27 16:51:04,888 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.552e+01 6.807e+01 7.856e+01 8.861e+01 3.440e+02, threshold=1.571e+02, percent-clipped=1.0 +2024-07-27 16:51:10,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=39366.666666666664, ans=0.125 +2024-07-27 16:51:11,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=39366.666666666664, ans=15.0 +2024-07-27 16:51:16,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=39366.666666666664, ans=0.1 +2024-07-27 16:51:18,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=39380.0, ans=0.025 +2024-07-27 16:51:45,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39380.0, ans=0.1 +2024-07-27 16:52:04,332 INFO [train.py:1114] (1/4) Epoch 3, batch 9100, loss[loss=0.2467, simple_loss=0.3289, pruned_loss=0.08221, over 4938.00 frames. ], tot_loss[loss=0.2785, simple_loss=0.3469, pruned_loss=0.1051, over 936980.99 frames. ], batch size: 14, lr: 2.08e-02, grad_scale: 16.0 +2024-07-27 16:52:14,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=39393.333333333336, ans=0.125 +2024-07-27 16:52:21,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=39406.666666666664, ans=0.025 +2024-07-27 16:52:33,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=39420.0, ans=0.0 +2024-07-27 16:52:40,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=39433.333333333336, ans=0.015 +2024-07-27 16:52:59,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=39446.666666666664, ans=0.05 +2024-07-27 16:52:59,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=39446.666666666664, ans=0.125 +2024-07-27 16:53:06,166 INFO [train.py:1114] (1/4) Epoch 3, batch 9150, loss[loss=0.2808, simple_loss=0.3575, pruned_loss=0.102, over 4813.00 frames. ], tot_loss[loss=0.2796, simple_loss=0.3478, pruned_loss=0.1057, over 935989.37 frames. ], batch size: 14, lr: 2.08e-02, grad_scale: 16.0 +2024-07-27 16:53:30,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=39473.333333333336, ans=0.125 +2024-07-27 16:53:33,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=39473.333333333336, ans=0.125 +2024-07-27 16:53:38,852 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.526e+01 6.919e+01 8.427e+01 9.572e+01 1.552e+02, threshold=1.685e+02, percent-clipped=0.0 +2024-07-27 16:53:50,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=39500.0, ans=0.002282608695652174 +2024-07-27 16:53:57,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=39513.333333333336, ans=0.0 +2024-07-27 16:54:03,874 INFO [train.py:1114] (1/4) Epoch 3, batch 9200, loss[loss=0.2089, simple_loss=0.2797, pruned_loss=0.06903, over 4858.00 frames. ], tot_loss[loss=0.2785, simple_loss=0.3468, pruned_loss=0.1052, over 937874.07 frames. ], batch size: 12, lr: 2.08e-02, grad_scale: 32.0 +2024-07-27 16:54:07,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=39526.666666666664, ans=0.2 +2024-07-27 16:54:19,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=39553.333333333336, ans=0.125 +2024-07-27 16:54:36,153 INFO [train.py:1114] (1/4) Epoch 3, batch 9250, loss[loss=0.3057, simple_loss=0.3814, pruned_loss=0.115, over 4637.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.3466, pruned_loss=0.1045, over 938622.15 frames. ], batch size: 13, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:54:38,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=39593.333333333336, ans=0.00226231884057971 +2024-07-27 16:54:40,320 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.19 vs. limit=15.0 +2024-07-27 16:54:41,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=39593.333333333336, ans=0.1 +2024-07-27 16:54:45,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=39606.666666666664, ans=0.2 +2024-07-27 16:54:49,929 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.796e+01 6.391e+01 6.941e+01 8.054e+01 1.289e+02, threshold=1.388e+02, percent-clipped=0.0 +2024-07-27 16:54:51,512 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=16.66 vs. limit=15.0 +2024-07-27 16:54:55,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39633.333333333336, ans=0.1 +2024-07-27 16:54:55,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=39633.333333333336, ans=0.125 +2024-07-27 16:55:09,861 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.43 vs. limit=15.0 +2024-07-27 16:55:10,138 INFO [train.py:1114] (1/4) Epoch 3, batch 9300, loss[loss=0.2556, simple_loss=0.3221, pruned_loss=0.09456, over 4775.00 frames. ], tot_loss[loss=0.2772, simple_loss=0.346, pruned_loss=0.1042, over 938515.27 frames. ], batch size: 12, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:55:10,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=39660.0, ans=0.025 +2024-07-27 16:55:11,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=39660.0, ans=0.0 +2024-07-27 16:55:13,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39660.0, ans=0.1 +2024-07-27 16:55:16,337 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.07 vs. limit=22.5 +2024-07-27 16:55:18,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=39673.333333333336, ans=0.125 +2024-07-27 16:55:33,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=39686.666666666664, ans=0.125 +2024-07-27 16:55:52,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=39713.333333333336, ans=0.125 +2024-07-27 16:56:05,255 INFO [train.py:1114] (1/4) Epoch 3, batch 9350, loss[loss=0.2044, simple_loss=0.2764, pruned_loss=0.06621, over 4805.00 frames. ], tot_loss[loss=0.2772, simple_loss=0.3461, pruned_loss=0.1042, over 934832.93 frames. ], batch size: 11, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:56:09,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=39726.666666666664, ans=0.025 +2024-07-27 16:56:11,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=39726.666666666664, ans=0.07 +2024-07-27 16:56:20,302 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.575e+01 6.744e+01 7.337e+01 8.957e+01 1.228e+02, threshold=1.467e+02, percent-clipped=0.0 +2024-07-27 16:56:26,396 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.35 vs. limit=15.0 +2024-07-27 16:56:37,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=39780.0, ans=0.2 +2024-07-27 16:56:39,562 INFO [train.py:1114] (1/4) Epoch 3, batch 9400, loss[loss=0.31, simple_loss=0.3667, pruned_loss=0.1266, over 4702.00 frames. ], tot_loss[loss=0.278, simple_loss=0.3463, pruned_loss=0.1048, over 932550.38 frames. ], batch size: 13, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:56:39,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=39793.333333333336, ans=0.125 +2024-07-27 16:56:42,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=39793.333333333336, ans=0.05 +2024-07-27 16:57:00,350 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.21 vs. limit=15.0 +2024-07-27 16:57:11,996 INFO [train.py:1114] (1/4) Epoch 3, batch 9450, loss[loss=0.3037, simple_loss=0.3624, pruned_loss=0.1225, over 4813.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3461, pruned_loss=0.1044, over 931785.08 frames. ], batch size: 11, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:57:12,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=39860.0, ans=0.95 +2024-07-27 16:57:16,248 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.66 vs. limit=15.0 +2024-07-27 16:57:21,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.77 vs. limit=15.0 +2024-07-27 16:57:23,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=39873.333333333336, ans=0.04949747468305833 +2024-07-27 16:57:25,848 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.577e+01 6.734e+01 7.503e+01 8.983e+01 1.272e+02, threshold=1.501e+02, percent-clipped=0.0 +2024-07-27 16:57:32,604 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.03 vs. limit=12.0 +2024-07-27 16:57:36,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=39900.0, ans=0.125 +2024-07-27 16:58:03,382 INFO [train.py:1114] (1/4) Epoch 3, batch 9500, loss[loss=0.2479, simple_loss=0.3117, pruned_loss=0.09207, over 4704.00 frames. ], tot_loss[loss=0.2771, simple_loss=0.3461, pruned_loss=0.1041, over 934277.18 frames. ], batch size: 12, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:58:57,250 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.72 vs. limit=15.0 +2024-07-27 16:59:10,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=39980.0, ans=0.002178260869565218 +2024-07-27 16:59:12,963 INFO [train.py:1114] (1/4) Epoch 3, batch 9550, loss[loss=0.2779, simple_loss=0.3347, pruned_loss=0.1106, over 4784.00 frames. ], tot_loss[loss=0.2775, simple_loss=0.3459, pruned_loss=0.1045, over 931680.49 frames. ], batch size: 12, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:59:16,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=39993.333333333336, ans=0.125 +2024-07-27 16:59:25,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=40020.0, ans=0.125 +2024-07-27 16:59:25,780 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.61 vs. limit=22.5 +2024-07-27 16:59:26,039 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.187e+01 6.649e+01 7.565e+01 8.321e+01 1.560e+02, threshold=1.513e+02, percent-clipped=2.0 +2024-07-27 16:59:29,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=40020.0, ans=0.2 +2024-07-27 16:59:35,204 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.29 vs. limit=15.0 +2024-07-27 16:59:38,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=40046.666666666664, ans=10.0 +2024-07-27 16:59:44,502 INFO [train.py:1114] (1/4) Epoch 3, batch 9600, loss[loss=0.4102, simple_loss=0.4187, pruned_loss=0.2009, over 3315.00 frames. ], tot_loss[loss=0.278, simple_loss=0.3467, pruned_loss=0.1047, over 930638.79 frames. ], batch size: 36, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 16:59:47,987 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.19 vs. limit=22.5 +2024-07-27 16:59:52,312 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.72 vs. limit=15.0 +2024-07-27 17:00:00,786 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.37 vs. limit=6.0 +2024-07-27 17:00:27,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=40113.333333333336, ans=0.125 +2024-07-27 17:00:28,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=40113.333333333336, ans=0.125 +2024-07-27 17:00:30,247 INFO [train.py:1114] (1/4) Epoch 3, batch 9650, loss[loss=0.3293, simple_loss=0.3994, pruned_loss=0.1296, over 4857.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3478, pruned_loss=0.1052, over 926220.66 frames. ], batch size: 16, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:00:31,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=40126.666666666664, ans=10.0 +2024-07-27 17:00:36,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=40140.0, ans=0.125 +2024-07-27 17:00:37,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=40140.0, ans=0.0 +2024-07-27 17:00:41,801 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.05 vs. limit=15.0 +2024-07-27 17:00:44,532 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.272e+01 6.641e+01 7.549e+01 8.923e+01 1.361e+02, threshold=1.510e+02, percent-clipped=0.0 +2024-07-27 17:01:01,534 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.08 vs. limit=12.0 +2024-07-27 17:01:04,419 INFO [train.py:1114] (1/4) Epoch 3, batch 9700, loss[loss=0.2937, simple_loss=0.357, pruned_loss=0.1152, over 4100.00 frames. ], tot_loss[loss=0.2795, simple_loss=0.3482, pruned_loss=0.1054, over 923961.55 frames. ], batch size: 25, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:01:05,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=40193.333333333336, ans=0.0 +2024-07-27 17:01:33,776 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=21.73 vs. limit=15.0 +2024-07-27 17:01:34,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=40246.666666666664, ans=0.125 +2024-07-27 17:01:39,651 INFO [train.py:1114] (1/4) Epoch 3, batch 9750, loss[loss=0.2802, simple_loss=0.3521, pruned_loss=0.1042, over 4673.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.347, pruned_loss=0.1049, over 924889.48 frames. ], batch size: 15, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:01:41,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=40260.0, ans=0.09899494936611666 +2024-07-27 17:01:42,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40260.0, ans=0.1 +2024-07-27 17:01:46,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=40273.333333333336, ans=0.2 +2024-07-27 17:01:53,659 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.325e+01 6.567e+01 7.224e+01 8.540e+01 1.142e+02, threshold=1.445e+02, percent-clipped=0.0 +2024-07-27 17:02:02,275 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.69 vs. limit=10.0 +2024-07-27 17:02:03,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=40300.0, ans=0.125 +2024-07-27 17:02:04,235 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.79 vs. limit=15.0 +2024-07-27 17:02:04,920 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.05 vs. limit=15.0 +2024-07-27 17:02:12,029 INFO [train.py:1114] (1/4) Epoch 3, batch 9800, loss[loss=0.2551, simple_loss=0.323, pruned_loss=0.0936, over 4708.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3456, pruned_loss=0.104, over 925122.10 frames. ], batch size: 12, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:02:16,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=40326.666666666664, ans=0.125 +2024-07-27 17:02:46,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=40380.0, ans=0.07 +2024-07-27 17:02:47,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=40380.0, ans=0.125 +2024-07-27 17:02:48,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=40380.0, ans=0.125 +2024-07-27 17:02:51,224 INFO [train.py:1114] (1/4) Epoch 3, batch 9850, loss[loss=0.2635, simple_loss=0.3521, pruned_loss=0.08743, over 4906.00 frames. ], tot_loss[loss=0.278, simple_loss=0.3467, pruned_loss=0.1047, over 927524.91 frames. ], batch size: 15, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:02:55,728 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.86 vs. limit=15.0 +2024-07-27 17:03:14,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=40420.0, ans=0.2 +2024-07-27 17:03:15,222 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.438e+01 6.644e+01 7.357e+01 1.003e+02 1.564e+02, threshold=1.471e+02, percent-clipped=2.0 +2024-07-27 17:03:15,956 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:03:33,888 INFO [train.py:1114] (1/4) Epoch 3, batch 9900, loss[loss=0.3199, simple_loss=0.391, pruned_loss=0.1244, over 4830.00 frames. ], tot_loss[loss=0.2798, simple_loss=0.3478, pruned_loss=0.1059, over 926374.58 frames. ], batch size: 16, lr: 2.05e-02, grad_scale: 32.0 +2024-07-27 17:04:00,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=40473.333333333336, ans=0.025 +2024-07-27 17:04:12,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=40500.0, ans=0.2 +2024-07-27 17:04:14,246 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.91 vs. limit=15.0 +2024-07-27 17:04:21,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=40513.333333333336, ans=0.125 +2024-07-27 17:04:22,283 INFO [train.py:1114] (1/4) Epoch 3, batch 9950, loss[loss=0.2026, simple_loss=0.2846, pruned_loss=0.06028, over 4794.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.3488, pruned_loss=0.1071, over 928936.44 frames. ], batch size: 11, lr: 2.05e-02, grad_scale: 32.0 +2024-07-27 17:04:28,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=40526.666666666664, ans=0.0 +2024-07-27 17:04:42,171 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.432e+01 7.065e+01 7.952e+01 9.840e+01 1.527e+02, threshold=1.590e+02, percent-clipped=1.0 +2024-07-27 17:04:47,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40566.666666666664, ans=0.1 +2024-07-27 17:04:47,431 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.38 vs. limit=12.0 +2024-07-27 17:04:55,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40580.0, ans=0.1 +2024-07-27 17:04:57,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=40580.0, ans=0.125 +2024-07-27 17:05:00,003 INFO [train.py:1114] (1/4) Epoch 3, batch 10000, loss[loss=0.3671, simple_loss=0.4233, pruned_loss=0.1555, over 4642.00 frames. ], tot_loss[loss=0.2864, simple_loss=0.3538, pruned_loss=0.1095, over 927406.53 frames. ], batch size: 16, lr: 2.05e-02, grad_scale: 32.0 +2024-07-27 17:05:21,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=40633.333333333336, ans=0.07 +2024-07-27 17:05:23,510 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.34 vs. limit=15.0 +2024-07-27 17:05:32,229 INFO [train.py:1114] (1/4) Epoch 3, batch 10050, loss[loss=0.3988, simple_loss=0.4211, pruned_loss=0.1882, over 3657.00 frames. ], tot_loss[loss=0.2922, simple_loss=0.3588, pruned_loss=0.1129, over 917063.62 frames. ], batch size: 35, lr: 2.05e-02, grad_scale: 32.0 +2024-07-27 17:05:36,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=40660.0, ans=0.2 +2024-07-27 17:05:45,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=40673.333333333336, ans=0.125 +2024-07-27 17:05:47,409 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.930e+01 7.073e+01 7.900e+01 8.546e+01 1.194e+02, threshold=1.580e+02, percent-clipped=0.0 +2024-07-27 17:05:48,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=40686.666666666664, ans=10.0 +2024-07-27 17:05:55,597 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.28 vs. limit=15.0 +2024-07-27 17:06:05,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=40713.333333333336, ans=0.2 +2024-07-27 17:06:06,229 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.52 vs. limit=10.0 +2024-07-27 17:06:07,145 INFO [train.py:1114] (1/4) Epoch 3, batch 10100, loss[loss=0.3629, simple_loss=0.4065, pruned_loss=0.1597, over 3068.00 frames. ], tot_loss[loss=0.3037, simple_loss=0.3657, pruned_loss=0.1208, over 862008.29 frames. ], batch size: 35, lr: 2.05e-02, grad_scale: 16.0 +2024-07-27 17:06:16,256 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.69 vs. limit=15.0 +2024-07-27 17:06:21,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=40753.333333333336, ans=0.2 +2024-07-27 17:06:22,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.99 vs. limit=10.0 +2024-07-27 17:06:29,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=40766.666666666664, ans=0.025 +2024-07-27 17:06:33,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=40766.666666666664, ans=0.125 +2024-07-27 17:06:41,345 INFO [train.py:1114] (1/4) Epoch 3, batch 10150, loss[loss=0.3994, simple_loss=0.4229, pruned_loss=0.188, over 3096.00 frames. ], tot_loss[loss=0.3115, simple_loss=0.3703, pruned_loss=0.1263, over 821361.55 frames. ], batch size: 35, lr: 2.05e-02, grad_scale: 16.0 +2024-07-27 17:06:47,223 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.54 vs. limit=6.0 +2024-07-27 17:07:01,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=40806.666666666664, ans=0.0 +2024-07-27 17:07:09,197 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.948e+01 6.999e+01 7.537e+01 8.281e+01 1.738e+02, threshold=1.507e+02, percent-clipped=1.0 +2024-07-27 17:07:12,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=40820.0, ans=0.125 +2024-07-27 17:07:13,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=40820.0, ans=0.0 +2024-07-27 17:07:17,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=40833.333333333336, ans=0.125 +2024-07-27 17:07:24,546 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=7.08 vs. limit=10.0 +2024-07-27 17:07:32,449 INFO [train.py:1114] (1/4) Epoch 3, batch 10200, loss[loss=0.3385, simple_loss=0.3869, pruned_loss=0.145, over 3466.00 frames. ], tot_loss[loss=0.3171, simple_loss=0.3734, pruned_loss=0.1304, over 787485.92 frames. ], batch size: 35, lr: 2.04e-02, grad_scale: 16.0 +2024-07-27 17:08:04,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff2.min_abs, batch_count=40873.333333333336, ans=0.1 +2024-07-27 17:08:05,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=40886.666666666664, ans=22.5 +2024-07-27 17:08:50,761 INFO [train.py:1114] (1/4) Epoch 4, batch 0, loss[loss=0.2529, simple_loss=0.3288, pruned_loss=0.08848, over 4852.00 frames. ], tot_loss[loss=0.2529, simple_loss=0.3288, pruned_loss=0.08848, over 4852.00 frames. ], batch size: 12, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:08:50,762 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 17:09:02,630 INFO [train.py:1146] (1/4) Epoch 4, validation: loss=0.2303, simple_loss=0.3319, pruned_loss=0.06433, over 944034.00 frames. +2024-07-27 17:09:03,259 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 17:09:29,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=40917.333333333336, ans=0.015 +2024-07-27 17:09:38,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=40930.666666666664, ans=0.0 +2024-07-27 17:09:48,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=40944.0, ans=0.0019686956521739133 +2024-07-27 17:09:56,709 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.176e+01 6.703e+01 7.240e+01 7.919e+01 1.564e+02, threshold=1.448e+02, percent-clipped=1.0 +2024-07-27 17:09:57,431 INFO [train.py:1114] (1/4) Epoch 4, batch 50, loss[loss=0.2053, simple_loss=0.2695, pruned_loss=0.07057, over 4610.00 frames. ], tot_loss[loss=0.2856, simple_loss=0.3524, pruned_loss=0.1094, over 205920.93 frames. ], batch size: 11, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:10:23,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.42 vs. limit=12.0 +2024-07-27 17:10:24,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=41010.666666666664, ans=0.2 +2024-07-27 17:10:30,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41024.0, ans=0.1 +2024-07-27 17:10:31,380 INFO [train.py:1114] (1/4) Epoch 4, batch 100, loss[loss=0.2751, simple_loss=0.3371, pruned_loss=0.1066, over 4644.00 frames. ], tot_loss[loss=0.2845, simple_loss=0.353, pruned_loss=0.1079, over 366030.05 frames. ], batch size: 12, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:10:35,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=41024.0, ans=0.125 +2024-07-27 17:10:37,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=41024.0, ans=15.0 +2024-07-27 17:10:43,976 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:10:46,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff2.min_abs, batch_count=41050.666666666664, ans=0.1 +2024-07-27 17:11:04,705 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.318e+01 6.667e+01 8.145e+01 9.581e+01 1.407e+02, threshold=1.629e+02, percent-clipped=0.0 +2024-07-27 17:15:47,221 INFO [train.py:1114] (1/4) Epoch 4, batch 150, loss[loss=0.2196, simple_loss=0.2956, pruned_loss=0.07178, over 4620.00 frames. ], tot_loss[loss=0.2752, simple_loss=0.3456, pruned_loss=0.1024, over 494536.72 frames. ], batch size: 11, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:16:13,903 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.58 vs. limit=15.0 +2024-07-27 17:16:14,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41117.333333333336, ans=0.1 +2024-07-27 17:16:25,542 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.30 vs. limit=15.0 +2024-07-27 17:16:30,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=41144.0, ans=0.125 +2024-07-27 17:16:36,406 INFO [train.py:1114] (1/4) Epoch 4, batch 200, loss[loss=0.2891, simple_loss=0.3672, pruned_loss=0.1055, over 4511.00 frames. ], tot_loss[loss=0.2766, simple_loss=0.3464, pruned_loss=0.1034, over 594087.85 frames. ], batch size: 21, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:16:38,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=41157.333333333336, ans=0.1 +2024-07-27 17:16:45,195 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.87 vs. limit=10.0 +2024-07-27 17:16:48,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=41184.0, ans=0.125 +2024-07-27 17:16:51,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=41184.0, ans=0.07 +2024-07-27 17:16:59,204 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.83 vs. limit=15.0 +2024-07-27 17:16:59,946 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.42 vs. limit=6.0 +2024-07-27 17:17:06,739 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.81 vs. limit=12.0 +2024-07-27 17:17:09,101 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.496e+01 6.403e+01 7.504e+01 8.893e+01 1.315e+02, threshold=1.501e+02, percent-clipped=0.0 +2024-07-27 17:17:09,904 INFO [train.py:1114] (1/4) Epoch 4, batch 250, loss[loss=0.2865, simple_loss=0.3595, pruned_loss=0.1067, over 4640.00 frames. ], tot_loss[loss=0.2755, simple_loss=0.3459, pruned_loss=0.1026, over 670987.41 frames. ], batch size: 16, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:17:18,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=41237.333333333336, ans=0.125 +2024-07-27 17:17:18,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=41237.333333333336, ans=0.0 +2024-07-27 17:17:22,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=41237.333333333336, ans=0.125 +2024-07-27 17:17:28,629 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.85 vs. limit=22.5 +2024-07-27 17:17:39,821 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.10 vs. limit=15.0 +2024-07-27 17:17:43,154 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.94 vs. limit=22.5 +2024-07-27 17:17:43,509 INFO [train.py:1114] (1/4) Epoch 4, batch 300, loss[loss=0.2859, simple_loss=0.3716, pruned_loss=0.1001, over 4807.00 frames. ], tot_loss[loss=0.2739, simple_loss=0.3449, pruned_loss=0.1015, over 730862.99 frames. ], batch size: 15, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:17:57,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=41317.333333333336, ans=0.125 +2024-07-27 17:18:15,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=41344.0, ans=0.125 +2024-07-27 17:18:18,360 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.327e+01 6.726e+01 7.955e+01 9.020e+01 1.256e+02, threshold=1.591e+02, percent-clipped=0.0 +2024-07-27 17:18:19,051 INFO [train.py:1114] (1/4) Epoch 4, batch 350, loss[loss=0.3159, simple_loss=0.3707, pruned_loss=0.1305, over 4937.00 frames. ], tot_loss[loss=0.2747, simple_loss=0.3464, pruned_loss=0.1016, over 777072.52 frames. ], batch size: 12, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:18:20,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=41357.333333333336, ans=10.0 +2024-07-27 17:18:28,239 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.36 vs. limit=22.5 +2024-07-27 17:18:35,959 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.48 vs. limit=22.5 +2024-07-27 17:18:37,201 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.61 vs. limit=22.5 +2024-07-27 17:18:48,773 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.64 vs. limit=22.5 +2024-07-27 17:18:49,285 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=18.72 vs. limit=15.0 +2024-07-27 17:18:52,194 INFO [train.py:1114] (1/4) Epoch 4, batch 400, loss[loss=0.2669, simple_loss=0.3457, pruned_loss=0.094, over 4696.00 frames. ], tot_loss[loss=0.2734, simple_loss=0.3449, pruned_loss=0.1009, over 814474.29 frames. ], batch size: 13, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:19:01,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=41437.333333333336, ans=0.2 +2024-07-27 17:19:05,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=41450.666666666664, ans=0.0 +2024-07-27 17:19:16,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=41464.0, ans=0.125 +2024-07-27 17:19:24,665 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.330e+01 6.564e+01 7.397e+01 8.870e+01 1.499e+02, threshold=1.479e+02, percent-clipped=0.0 +2024-07-27 17:19:25,328 INFO [train.py:1114] (1/4) Epoch 4, batch 450, loss[loss=0.3019, simple_loss=0.3767, pruned_loss=0.1136, over 4630.00 frames. ], tot_loss[loss=0.2755, simple_loss=0.3466, pruned_loss=0.1022, over 839604.32 frames. ], batch size: 13, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:19:28,216 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.66 vs. limit=10.0 +2024-07-27 17:19:32,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=41490.666666666664, ans=0.025 +2024-07-27 17:19:34,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=41504.0, ans=0.125 +2024-07-27 17:19:34,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=41504.0, ans=0.2 +2024-07-27 17:19:36,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=41504.0, ans=0.125 +2024-07-27 17:19:44,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=41517.333333333336, ans=0.0 +2024-07-27 17:19:48,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=41530.666666666664, ans=0.025 +2024-07-27 17:20:05,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=41544.0, ans=0.1 +2024-07-27 17:20:08,042 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.68 vs. limit=15.0 +2024-07-27 17:20:09,639 INFO [train.py:1114] (1/4) Epoch 4, batch 500, loss[loss=0.3063, simple_loss=0.3829, pruned_loss=0.1148, over 4686.00 frames. ], tot_loss[loss=0.2747, simple_loss=0.3454, pruned_loss=0.102, over 861981.13 frames. ], batch size: 15, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:20:11,401 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.33 vs. limit=22.5 +2024-07-27 17:20:18,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=41570.666666666664, ans=0.125 +2024-07-27 17:20:25,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41584.0, ans=0.1 +2024-07-27 17:20:34,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=41597.333333333336, ans=0.125 +2024-07-27 17:20:45,337 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.329e+01 6.267e+01 7.385e+01 9.027e+01 1.460e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 17:20:46,080 INFO [train.py:1114] (1/4) Epoch 4, batch 550, loss[loss=0.2782, simple_loss=0.3509, pruned_loss=0.1028, over 4647.00 frames. ], tot_loss[loss=0.2735, simple_loss=0.3446, pruned_loss=0.1012, over 878064.97 frames. ], batch size: 17, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:20:53,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=41637.333333333336, ans=0.125 +2024-07-27 17:20:58,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=41650.666666666664, ans=0.125 +2024-07-27 17:21:01,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=41650.666666666664, ans=0.0 +2024-07-27 17:21:08,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=41664.0, ans=0.2 +2024-07-27 17:21:09,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=41664.0, ans=0.0018121739130434796 +2024-07-27 17:21:18,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=41677.333333333336, ans=0.0018092753623188407 +2024-07-27 17:21:21,546 INFO [train.py:1114] (1/4) Epoch 4, batch 600, loss[loss=0.2921, simple_loss=0.3662, pruned_loss=0.109, over 4596.00 frames. ], tot_loss[loss=0.2722, simple_loss=0.3439, pruned_loss=0.1002, over 892499.58 frames. ], batch size: 16, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:21:21,939 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.46 vs. limit=15.0 +2024-07-27 17:21:24,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=41690.666666666664, ans=0.125 +2024-07-27 17:21:27,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=41704.0, ans=0.025 +2024-07-27 17:21:45,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41730.666666666664, ans=0.1 +2024-07-27 17:21:51,659 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.19 vs. limit=15.0 +2024-07-27 17:21:53,702 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.519e+01 6.252e+01 7.090e+01 7.980e+01 1.452e+02, threshold=1.418e+02, percent-clipped=0.0 +2024-07-27 17:21:54,399 INFO [train.py:1114] (1/4) Epoch 4, batch 650, loss[loss=0.2791, simple_loss=0.3451, pruned_loss=0.1065, over 4753.00 frames. ], tot_loss[loss=0.27, simple_loss=0.3418, pruned_loss=0.09908, over 904173.55 frames. ], batch size: 13, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:22:12,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=41784.0, ans=0.125 +2024-07-27 17:22:19,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=41797.333333333336, ans=0.125 +2024-07-27 17:22:19,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=41797.333333333336, ans=0.1 +2024-07-27 17:22:25,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=41810.666666666664, ans=0.2 +2024-07-27 17:22:26,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=41810.666666666664, ans=0.0 +2024-07-27 17:22:27,765 INFO [train.py:1114] (1/4) Epoch 4, batch 700, loss[loss=0.2599, simple_loss=0.3236, pruned_loss=0.09803, over 4642.00 frames. ], tot_loss[loss=0.27, simple_loss=0.3416, pruned_loss=0.09921, over 911945.17 frames. ], batch size: 12, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:22:30,191 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.94 vs. limit=15.0 +2024-07-27 17:22:48,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=41864.0, ans=0.0 +2024-07-27 17:22:56,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=41877.333333333336, ans=0.125 +2024-07-27 17:22:57,954 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.69 vs. limit=22.5 +2024-07-27 17:22:59,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=41877.333333333336, ans=0.2 +2024-07-27 17:22:59,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=41877.333333333336, ans=0.0017657971014492756 +2024-07-27 17:23:00,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=41877.333333333336, ans=0.2 +2024-07-27 17:23:01,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=41877.333333333336, ans=0.0 +2024-07-27 17:23:02,153 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.371e+01 6.772e+01 7.672e+01 9.334e+01 1.432e+02, threshold=1.534e+02, percent-clipped=1.0 +2024-07-27 17:23:02,186 INFO [train.py:1114] (1/4) Epoch 4, batch 750, loss[loss=0.2984, simple_loss=0.3685, pruned_loss=0.1141, over 4690.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3406, pruned_loss=0.099, over 918504.83 frames. ], batch size: 13, lr: 1.89e-02, grad_scale: 16.0 +2024-07-27 17:23:10,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=41904.0, ans=0.2 +2024-07-27 17:23:16,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=41917.333333333336, ans=0.125 +2024-07-27 17:23:24,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=41930.666666666664, ans=0.0 +2024-07-27 17:23:32,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=41944.0, ans=0.2 +2024-07-27 17:23:37,968 INFO [train.py:1114] (1/4) Epoch 4, batch 800, loss[loss=0.2029, simple_loss=0.2792, pruned_loss=0.06331, over 4863.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3406, pruned_loss=0.09903, over 923713.93 frames. ], batch size: 12, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:23:54,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41970.666666666664, ans=0.1 +2024-07-27 17:23:58,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=41984.0, ans=0.125 +2024-07-27 17:23:58,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=41984.0, ans=0.0 +2024-07-27 17:24:00,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41984.0, ans=0.1 +2024-07-27 17:24:14,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=42010.666666666664, ans=0.0 +2024-07-27 17:24:18,031 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.573e+01 6.422e+01 7.236e+01 8.133e+01 1.458e+02, threshold=1.447e+02, percent-clipped=0.0 +2024-07-27 17:24:18,064 INFO [train.py:1114] (1/4) Epoch 4, batch 850, loss[loss=0.2581, simple_loss=0.3455, pruned_loss=0.08534, over 4662.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.3416, pruned_loss=0.09952, over 927842.39 frames. ], batch size: 14, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:24:27,493 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.79 vs. limit=6.0 +2024-07-27 17:24:30,800 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.27 vs. limit=10.0 +2024-07-27 17:24:34,729 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.34 vs. limit=15.0 +2024-07-27 17:24:37,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=42037.333333333336, ans=0.0 +2024-07-27 17:24:44,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=42050.666666666664, ans=0.125 +2024-07-27 17:24:46,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=42064.0, ans=0.125 +2024-07-27 17:24:50,429 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.21 vs. limit=22.5 +2024-07-27 17:24:52,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=42077.333333333336, ans=0.09899494936611666 +2024-07-27 17:24:52,452 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.44 vs. limit=22.5 +2024-07-27 17:24:58,778 INFO [train.py:1114] (1/4) Epoch 4, batch 900, loss[loss=0.2657, simple_loss=0.3473, pruned_loss=0.09204, over 4856.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3424, pruned_loss=0.1001, over 928491.14 frames. ], batch size: 12, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:25:02,477 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.99 vs. limit=15.0 +2024-07-27 17:25:02,488 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.23 vs. limit=15.0 +2024-07-27 17:25:08,269 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:25:08,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=42104.0, ans=0.025 +2024-07-27 17:25:08,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=42104.0, ans=0.125 +2024-07-27 17:25:15,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42117.333333333336, ans=0.1 +2024-07-27 17:25:34,686 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.294e+01 6.285e+01 6.831e+01 7.468e+01 1.764e+02, threshold=1.366e+02, percent-clipped=2.0 +2024-07-27 17:25:34,720 INFO [train.py:1114] (1/4) Epoch 4, batch 950, loss[loss=0.2609, simple_loss=0.3327, pruned_loss=0.0945, over 4772.00 frames. ], tot_loss[loss=0.2707, simple_loss=0.3414, pruned_loss=0.09997, over 929471.60 frames. ], batch size: 12, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:25:57,896 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=6.00 vs. limit=12.0 +2024-07-27 17:26:01,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=42197.333333333336, ans=0.125 +2024-07-27 17:26:02,720 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.24 vs. limit=15.0 +2024-07-27 17:26:04,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42197.333333333336, ans=0.1 +2024-07-27 17:26:10,094 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.39 vs. limit=15.0 +2024-07-27 17:26:12,277 INFO [train.py:1114] (1/4) Epoch 4, batch 1000, loss[loss=0.2285, simple_loss=0.3096, pruned_loss=0.07369, over 4966.00 frames. ], tot_loss[loss=0.2715, simple_loss=0.342, pruned_loss=0.1005, over 929219.80 frames. ], batch size: 13, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:26:27,441 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.52 vs. limit=15.0 +2024-07-27 17:26:28,168 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.26 vs. limit=6.0 +2024-07-27 17:26:52,348 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.144e+01 6.274e+01 6.992e+01 7.907e+01 1.150e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 17:26:52,381 INFO [train.py:1114] (1/4) Epoch 4, batch 1050, loss[loss=0.2603, simple_loss=0.3443, pruned_loss=0.08818, over 4879.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.3412, pruned_loss=0.09989, over 931728.70 frames. ], batch size: 14, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:27:04,459 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.89 vs. limit=10.0 +2024-07-27 17:27:08,267 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.63 vs. limit=22.5 +2024-07-27 17:27:09,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=42304.0, ans=0.0016730434782608692 +2024-07-27 17:27:19,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=42317.333333333336, ans=0.125 +2024-07-27 17:27:19,860 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.39 vs. limit=15.0 +2024-07-27 17:27:21,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=42317.333333333336, ans=0.125 +2024-07-27 17:27:22,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=42317.333333333336, ans=0.0 +2024-07-27 17:27:31,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=42330.666666666664, ans=0.05 +2024-07-27 17:27:34,366 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.59 vs. limit=10.0 +2024-07-27 17:27:40,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=42344.0, ans=0.09899494936611666 +2024-07-27 17:27:44,196 INFO [train.py:1114] (1/4) Epoch 4, batch 1100, loss[loss=0.2694, simple_loss=0.3442, pruned_loss=0.09725, over 4898.00 frames. ], tot_loss[loss=0.27, simple_loss=0.3409, pruned_loss=0.09961, over 934284.46 frames. ], batch size: 13, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:27:46,466 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.05 vs. limit=15.0 +2024-07-27 17:27:56,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=42370.666666666664, ans=0.2 +2024-07-27 17:28:01,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=42384.0, ans=0.0016556521739130425 +2024-07-27 17:28:03,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=42384.0, ans=0.125 +2024-07-27 17:28:22,184 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.166e+01 6.208e+01 6.982e+01 7.743e+01 1.395e+02, threshold=1.396e+02, percent-clipped=0.0 +2024-07-27 17:28:22,217 INFO [train.py:1114] (1/4) Epoch 4, batch 1150, loss[loss=0.2705, simple_loss=0.3565, pruned_loss=0.09222, over 4901.00 frames. ], tot_loss[loss=0.2688, simple_loss=0.3404, pruned_loss=0.09857, over 934418.95 frames. ], batch size: 13, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:28:42,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=42424.0, ans=0.0 +2024-07-27 17:28:46,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=42437.333333333336, ans=0.125 +2024-07-27 17:28:50,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=42437.333333333336, ans=0.125 +2024-07-27 17:28:59,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=42450.666666666664, ans=0.125 +2024-07-27 17:29:24,967 INFO [train.py:1114] (1/4) Epoch 4, batch 1200, loss[loss=0.2607, simple_loss=0.3335, pruned_loss=0.09397, over 4875.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3408, pruned_loss=0.09891, over 933176.00 frames. ], batch size: 14, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:29:31,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=42504.0, ans=0.125 +2024-07-27 17:29:42,688 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:29:46,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=42517.333333333336, ans=0.125 +2024-07-27 17:29:53,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=42530.666666666664, ans=0.125 +2024-07-27 17:29:55,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=42530.666666666664, ans=0.125 +2024-07-27 17:30:04,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=42544.0, ans=0.125 +2024-07-27 17:30:12,122 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.401e+01 6.877e+01 7.526e+01 8.642e+01 1.436e+02, threshold=1.505e+02, percent-clipped=1.0 +2024-07-27 17:30:12,155 INFO [train.py:1114] (1/4) Epoch 4, batch 1250, loss[loss=0.2874, simple_loss=0.3676, pruned_loss=0.1036, over 4797.00 frames. ], tot_loss[loss=0.2692, simple_loss=0.3416, pruned_loss=0.09842, over 937144.63 frames. ], batch size: 15, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:30:23,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=42570.666666666664, ans=0.125 +2024-07-27 17:30:26,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=42584.0, ans=0.04949747468305833 +2024-07-27 17:30:26,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=27.25 vs. limit=22.5 +2024-07-27 17:30:37,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=42597.333333333336, ans=0.0016092753623188401 +2024-07-27 17:30:37,269 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=13.31 vs. limit=15.0 +2024-07-27 17:30:40,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=42597.333333333336, ans=0.125 +2024-07-27 17:30:40,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=42597.333333333336, ans=0.2 +2024-07-27 17:30:41,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=42597.333333333336, ans=0.0 +2024-07-27 17:30:57,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=42624.0, ans=0.125 +2024-07-27 17:30:57,767 INFO [train.py:1114] (1/4) Epoch 4, batch 1300, loss[loss=0.3065, simple_loss=0.3736, pruned_loss=0.1197, over 4732.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.3409, pruned_loss=0.098, over 938827.74 frames. ], batch size: 19, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:30:58,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=42624.0, ans=0.125 +2024-07-27 17:31:33,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=42664.0, ans=0.125 +2024-07-27 17:31:40,576 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-07-27 17:31:53,737 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.28 vs. limit=15.0 +2024-07-27 17:31:59,663 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.286e+01 6.475e+01 6.974e+01 8.075e+01 1.412e+02, threshold=1.395e+02, percent-clipped=0.0 +2024-07-27 17:31:59,696 INFO [train.py:1114] (1/4) Epoch 4, batch 1350, loss[loss=0.318, simple_loss=0.3899, pruned_loss=0.1231, over 4758.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.3405, pruned_loss=0.09784, over 940844.25 frames. ], batch size: 13, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:31:59,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=42690.666666666664, ans=0.001588985507246378 +2024-07-27 17:32:01,942 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.39 vs. limit=22.5 +2024-07-27 17:32:08,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=42690.666666666664, ans=0.2 +2024-07-27 17:32:10,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=42704.0, ans=0.2 +2024-07-27 17:32:37,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=42730.666666666664, ans=0.2 +2024-07-27 17:32:49,784 INFO [train.py:1114] (1/4) Epoch 4, batch 1400, loss[loss=0.2681, simple_loss=0.3342, pruned_loss=0.101, over 4699.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.3409, pruned_loss=0.09816, over 942736.83 frames. ], batch size: 11, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:32:50,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.02 vs. limit=10.0 +2024-07-27 17:33:34,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=42797.333333333336, ans=0.125 +2024-07-27 17:33:48,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=42810.666666666664, ans=0.125 +2024-07-27 17:33:59,438 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.546e+01 6.502e+01 7.039e+01 8.275e+01 1.312e+02, threshold=1.408e+02, percent-clipped=0.0 +2024-07-27 17:33:59,472 INFO [train.py:1114] (1/4) Epoch 4, batch 1450, loss[loss=0.2963, simple_loss=0.3664, pruned_loss=0.1132, over 4679.00 frames. ], tot_loss[loss=0.2701, simple_loss=0.3423, pruned_loss=0.09895, over 942771.77 frames. ], batch size: 15, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:34:11,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42824.0, ans=0.1 +2024-07-27 17:34:18,766 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:34:18,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=42837.333333333336, ans=0.0015571014492753617 +2024-07-27 17:34:34,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=42877.333333333336, ans=0.2 +2024-07-27 17:34:54,667 INFO [train.py:1114] (1/4) Epoch 4, batch 1500, loss[loss=0.2727, simple_loss=0.3617, pruned_loss=0.09188, over 4801.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.3424, pruned_loss=0.09912, over 942543.33 frames. ], batch size: 14, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:35:05,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=42904.0, ans=0.125 +2024-07-27 17:35:24,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=42917.333333333336, ans=0.125 +2024-07-27 17:35:27,207 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.01 vs. limit=15.0 +2024-07-27 17:35:35,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=42930.666666666664, ans=0.125 +2024-07-27 17:35:36,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=42930.666666666664, ans=0.0 +2024-07-27 17:35:46,840 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.518e+01 6.513e+01 7.459e+01 8.473e+01 1.359e+02, threshold=1.492e+02, percent-clipped=0.0 +2024-07-27 17:35:46,873 INFO [train.py:1114] (1/4) Epoch 4, batch 1550, loss[loss=0.2969, simple_loss=0.3636, pruned_loss=0.1151, over 4899.00 frames. ], tot_loss[loss=0.2706, simple_loss=0.3422, pruned_loss=0.09951, over 938953.38 frames. ], batch size: 15, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:35:51,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=42957.333333333336, ans=0.125 +2024-07-27 17:36:20,532 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.21 vs. limit=15.0 +2024-07-27 17:36:23,324 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.71 vs. limit=15.0 +2024-07-27 17:36:23,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=43010.666666666664, ans=0.125 +2024-07-27 17:36:25,531 INFO [train.py:1114] (1/4) Epoch 4, batch 1600, loss[loss=0.2357, simple_loss=0.3186, pruned_loss=0.07636, over 4873.00 frames. ], tot_loss[loss=0.2706, simple_loss=0.3421, pruned_loss=0.09952, over 937542.21 frames. ], batch size: 14, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:36:28,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=43024.0, ans=0.0 +2024-07-27 17:36:37,063 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.66 vs. limit=15.0 +2024-07-27 17:36:40,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=43037.333333333336, ans=0.125 +2024-07-27 17:36:44,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=43050.666666666664, ans=0.125 +2024-07-27 17:36:50,232 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.79 vs. limit=15.0 +2024-07-27 17:36:53,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=43064.0, ans=0.2 +2024-07-27 17:36:57,446 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:37:02,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=43077.333333333336, ans=0.035 +2024-07-27 17:37:02,419 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.38 vs. limit=15.0 +2024-07-27 17:37:04,626 INFO [train.py:1114] (1/4) Epoch 4, batch 1650, loss[loss=0.3037, simple_loss=0.3777, pruned_loss=0.1149, over 4651.00 frames. ], tot_loss[loss=0.2709, simple_loss=0.3419, pruned_loss=0.09994, over 937358.03 frames. ], batch size: 14, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:37:05,272 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.309e+01 6.450e+01 7.502e+01 9.535e+01 1.419e+02, threshold=1.500e+02, percent-clipped=0.0 +2024-07-27 17:37:13,138 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.84 vs. limit=22.5 +2024-07-27 17:37:37,803 INFO [train.py:1114] (1/4) Epoch 4, batch 1700, loss[loss=0.2127, simple_loss=0.2816, pruned_loss=0.07185, over 4708.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3408, pruned_loss=0.09893, over 939095.70 frames. ], batch size: 11, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:37:46,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=43170.666666666664, ans=0.125 +2024-07-27 17:37:49,574 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.98 vs. limit=15.0 +2024-07-27 17:37:54,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=43184.0, ans=0.0 +2024-07-27 17:38:08,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43210.666666666664, ans=0.1 +2024-07-27 17:38:10,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=43210.666666666664, ans=0.125 +2024-07-27 17:38:12,047 INFO [train.py:1114] (1/4) Epoch 4, batch 1750, loss[loss=0.2447, simple_loss=0.3047, pruned_loss=0.09233, over 4804.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3393, pruned_loss=0.09815, over 939925.72 frames. ], batch size: 11, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:38:12,713 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.524e+01 6.769e+01 7.815e+01 9.643e+01 1.575e+02, threshold=1.563e+02, percent-clipped=1.0 +2024-07-27 17:38:30,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43250.666666666664, ans=0.1 +2024-07-27 17:38:31,814 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.55 vs. limit=15.0 +2024-07-27 17:38:38,148 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.78 vs. limit=15.0 +2024-07-27 17:38:49,157 INFO [train.py:1114] (1/4) Epoch 4, batch 1800, loss[loss=0.2325, simple_loss=0.3061, pruned_loss=0.07942, over 4635.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3381, pruned_loss=0.09718, over 940574.21 frames. ], batch size: 13, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:38:50,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=43290.666666666664, ans=0.125 +2024-07-27 17:38:52,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=43290.666666666664, ans=0.09899494936611666 +2024-07-27 17:38:56,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=43304.0, ans=0.0014556521739130437 +2024-07-27 17:38:57,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=43304.0, ans=0.2 +2024-07-27 17:39:23,186 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.14 vs. limit=15.0 +2024-07-27 17:39:23,346 INFO [train.py:1114] (1/4) Epoch 4, batch 1850, loss[loss=0.2586, simple_loss=0.3288, pruned_loss=0.09423, over 4805.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3369, pruned_loss=0.09621, over 940513.01 frames. ], batch size: 14, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:39:23,919 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.011e+01 6.740e+01 7.721e+01 9.480e+01 1.911e+02, threshold=1.544e+02, percent-clipped=3.0 +2024-07-27 17:39:34,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=43370.666666666664, ans=0.125 +2024-07-27 17:39:37,225 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.04 vs. limit=22.5 +2024-07-27 17:39:37,862 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.49 vs. limit=15.0 +2024-07-27 17:39:50,652 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:39:58,473 INFO [train.py:1114] (1/4) Epoch 4, batch 1900, loss[loss=0.2776, simple_loss=0.3468, pruned_loss=0.1043, over 4671.00 frames. ], tot_loss[loss=0.265, simple_loss=0.3375, pruned_loss=0.0963, over 941736.89 frames. ], batch size: 14, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:39:58,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=43424.0, ans=0.125 +2024-07-27 17:39:59,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=43424.0, ans=0.125 +2024-07-27 17:39:59,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=43424.0, ans=0.125 +2024-07-27 17:39:59,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=43424.0, ans=0.125 +2024-07-27 17:40:00,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=43424.0, ans=0.0 +2024-07-27 17:40:07,903 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.29 vs. limit=15.0 +2024-07-27 17:40:15,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.73 vs. limit=22.5 +2024-07-27 17:40:18,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43450.666666666664, ans=0.1 +2024-07-27 17:40:21,120 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:40:22,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=43464.0, ans=0.0 +2024-07-27 17:40:33,682 INFO [train.py:1114] (1/4) Epoch 4, batch 1950, loss[loss=0.243, simple_loss=0.3058, pruned_loss=0.09012, over 4903.00 frames. ], tot_loss[loss=0.2671, simple_loss=0.34, pruned_loss=0.09715, over 943591.47 frames. ], batch size: 13, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:40:34,313 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.087e+01 6.498e+01 7.387e+01 8.650e+01 1.667e+02, threshold=1.477e+02, percent-clipped=1.0 +2024-07-27 17:40:56,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=43517.333333333336, ans=0.2 +2024-07-27 17:40:57,210 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.88 vs. limit=15.0 +2024-07-27 17:41:00,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=43530.666666666664, ans=0.025 +2024-07-27 17:41:11,580 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.31 vs. limit=15.0 +2024-07-27 17:41:17,124 INFO [train.py:1114] (1/4) Epoch 4, batch 2000, loss[loss=0.2363, simple_loss=0.3103, pruned_loss=0.08116, over 4822.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3411, pruned_loss=0.0979, over 940732.18 frames. ], batch size: 11, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:41:18,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=43557.333333333336, ans=0.125 +2024-07-27 17:41:23,495 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-07-27 17:41:50,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=43610.666666666664, ans=0.0 +2024-07-27 17:41:51,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=43610.666666666664, ans=0.025 +2024-07-27 17:41:52,685 INFO [train.py:1114] (1/4) Epoch 4, batch 2050, loss[loss=0.232, simple_loss=0.3072, pruned_loss=0.07839, over 4613.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3397, pruned_loss=0.09706, over 938831.71 frames. ], batch size: 11, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:41:53,318 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.522e+01 6.397e+01 6.971e+01 8.145e+01 1.317e+02, threshold=1.394e+02, percent-clipped=0.0 +2024-07-27 17:41:56,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=43624.0, ans=0.025 +2024-07-27 17:41:59,872 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.38 vs. limit=15.0 +2024-07-27 17:42:06,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=43650.666666666664, ans=0.125 +2024-07-27 17:42:16,152 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=6.970e+00 +2024-07-27 17:42:21,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=43677.333333333336, ans=0.2 +2024-07-27 17:42:21,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=43677.333333333336, ans=0.0013744927536231879 +2024-07-27 17:42:25,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=43690.666666666664, ans=0.0 +2024-07-27 17:42:25,800 INFO [train.py:1114] (1/4) Epoch 4, batch 2100, loss[loss=0.2697, simple_loss=0.3353, pruned_loss=0.1021, over 4762.00 frames. ], tot_loss[loss=0.2654, simple_loss=0.3381, pruned_loss=0.09636, over 941058.47 frames. ], batch size: 13, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:42:27,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=43690.666666666664, ans=0.04949747468305833 +2024-07-27 17:42:33,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=43704.0, ans=0.125 +2024-07-27 17:42:37,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=43704.0, ans=0.0 +2024-07-27 17:42:38,102 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.05 vs. limit=15.0 +2024-07-27 17:42:40,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=43717.333333333336, ans=0.125 +2024-07-27 17:42:43,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=43717.333333333336, ans=0.125 +2024-07-27 17:42:47,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=43730.666666666664, ans=0.035 +2024-07-27 17:42:51,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=43744.0, ans=0.5 +2024-07-27 17:42:57,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43744.0, ans=0.1 +2024-07-27 17:42:58,955 INFO [train.py:1114] (1/4) Epoch 4, batch 2150, loss[loss=0.2245, simple_loss=0.2991, pruned_loss=0.07493, over 4895.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3374, pruned_loss=0.09598, over 944005.53 frames. ], batch size: 13, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:42:59,569 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.474e+01 6.533e+01 7.336e+01 8.956e+01 1.647e+02, threshold=1.467e+02, percent-clipped=5.0 +2024-07-27 17:43:02,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=43757.333333333336, ans=0.025 +2024-07-27 17:43:07,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43770.666666666664, ans=0.1 +2024-07-27 17:43:11,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=43784.0, ans=0.125 +2024-07-27 17:43:24,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=43797.333333333336, ans=0.025 +2024-07-27 17:43:26,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43810.666666666664, ans=0.1 +2024-07-27 17:43:30,952 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.15 vs. limit=15.0 +2024-07-27 17:43:32,597 INFO [train.py:1114] (1/4) Epoch 4, batch 2200, loss[loss=0.2517, simple_loss=0.3334, pruned_loss=0.08497, over 4814.00 frames. ], tot_loss[loss=0.2651, simple_loss=0.3378, pruned_loss=0.09624, over 942961.45 frames. ], batch size: 14, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:43:43,254 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.84 vs. limit=15.0 +2024-07-27 17:43:46,055 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.23 vs. limit=15.0 +2024-07-27 17:43:50,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=43850.666666666664, ans=0.2 +2024-07-27 17:44:08,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=43864.0, ans=0.0 +2024-07-27 17:44:16,760 INFO [train.py:1114] (1/4) Epoch 4, batch 2250, loss[loss=0.2788, simple_loss=0.3566, pruned_loss=0.1005, over 4695.00 frames. ], tot_loss[loss=0.265, simple_loss=0.3379, pruned_loss=0.0961, over 941637.54 frames. ], batch size: 13, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:44:17,406 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 6.403e+01 7.459e+01 9.142e+01 2.382e+02, threshold=1.492e+02, percent-clipped=1.0 +2024-07-27 17:44:21,189 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.80 vs. limit=22.5 +2024-07-27 17:44:39,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=43917.333333333336, ans=0.0013223188405797094 +2024-07-27 17:44:40,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=43917.333333333336, ans=0.125 +2024-07-27 17:44:46,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.97 vs. limit=15.0 +2024-07-27 17:44:58,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=43944.0, ans=0.0 +2024-07-27 17:44:59,212 INFO [train.py:1114] (1/4) Epoch 4, batch 2300, loss[loss=0.2615, simple_loss=0.3329, pruned_loss=0.09508, over 4953.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3355, pruned_loss=0.09464, over 939536.81 frames. ], batch size: 12, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:45:10,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=43957.333333333336, ans=0.125 +2024-07-27 17:45:11,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=43957.333333333336, ans=0.125 +2024-07-27 17:45:19,031 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.27 vs. limit=22.5 +2024-07-27 17:45:20,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=43984.0, ans=0.0 +2024-07-27 17:45:20,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=43984.0, ans=0.0 +2024-07-27 17:45:20,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=43984.0, ans=0.2 +2024-07-27 17:45:22,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=43984.0, ans=0.025 +2024-07-27 17:45:33,175 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.93 vs. limit=15.0 +2024-07-27 17:45:34,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=44010.666666666664, ans=0.0 +2024-07-27 17:45:41,389 INFO [train.py:1114] (1/4) Epoch 4, batch 2350, loss[loss=0.2401, simple_loss=0.3153, pruned_loss=0.08249, over 4638.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3365, pruned_loss=0.09514, over 941485.99 frames. ], batch size: 13, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:45:41,989 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.850e+01 6.786e+01 8.508e+01 1.044e+02 1.776e+02, threshold=1.702e+02, percent-clipped=2.0 +2024-07-27 17:45:44,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=44024.0, ans=0.1 +2024-07-27 17:45:48,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=44024.0, ans=0.1 +2024-07-27 17:45:49,574 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.05 vs. limit=6.0 +2024-07-27 17:45:52,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=44037.333333333336, ans=0.0012962318840579693 +2024-07-27 17:45:54,987 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.70 vs. limit=15.0 +2024-07-27 17:46:01,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=44050.666666666664, ans=0.0 +2024-07-27 17:46:02,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=44050.666666666664, ans=15.0 +2024-07-27 17:46:05,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=44064.0, ans=0.2 +2024-07-27 17:46:13,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=44077.333333333336, ans=0.125 +2024-07-27 17:46:26,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=44077.333333333336, ans=0.125 +2024-07-27 17:46:28,013 INFO [train.py:1114] (1/4) Epoch 4, batch 2400, loss[loss=0.2744, simple_loss=0.3485, pruned_loss=0.1001, over 4628.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3366, pruned_loss=0.09555, over 941162.31 frames. ], batch size: 12, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:46:31,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=44090.666666666664, ans=0.07 +2024-07-27 17:46:32,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=44090.666666666664, ans=0.0 +2024-07-27 17:46:38,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=44104.0, ans=0.125 +2024-07-27 17:46:40,167 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-27 17:47:01,649 INFO [train.py:1114] (1/4) Epoch 4, batch 2450, loss[loss=0.2671, simple_loss=0.3504, pruned_loss=0.0919, over 4687.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.3384, pruned_loss=0.09654, over 937178.84 frames. ], batch size: 13, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:47:02,263 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.203e+01 6.348e+01 7.314e+01 8.641e+01 1.426e+02, threshold=1.463e+02, percent-clipped=0.0 +2024-07-27 17:47:14,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=44170.666666666664, ans=0.0 +2024-07-27 17:47:26,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=44197.333333333336, ans=0.07 +2024-07-27 17:47:28,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=44197.333333333336, ans=0.0 +2024-07-27 17:47:35,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=44210.666666666664, ans=0.001258550724637682 +2024-07-27 17:47:39,894 INFO [train.py:1114] (1/4) Epoch 4, batch 2500, loss[loss=0.3077, simple_loss=0.3874, pruned_loss=0.114, over 4813.00 frames. ], tot_loss[loss=0.2656, simple_loss=0.3383, pruned_loss=0.09641, over 939170.77 frames. ], batch size: 14, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:47:52,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=44250.666666666664, ans=0.125 +2024-07-27 17:48:07,912 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.50 vs. limit=15.0 +2024-07-27 17:48:14,557 INFO [train.py:1114] (1/4) Epoch 4, batch 2550, loss[loss=0.2498, simple_loss=0.3091, pruned_loss=0.09527, over 4791.00 frames. ], tot_loss[loss=0.2655, simple_loss=0.3384, pruned_loss=0.09636, over 938671.07 frames. ], batch size: 11, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:48:15,138 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.441e+01 6.325e+01 6.836e+01 7.764e+01 1.443e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-27 17:48:16,992 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.90 vs. limit=6.0 +2024-07-27 17:48:30,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=44317.333333333336, ans=0.125 +2024-07-27 17:48:31,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=44317.333333333336, ans=0.0012353623188405792 +2024-07-27 17:48:32,003 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.42 vs. limit=22.5 +2024-07-27 17:48:42,714 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.52 vs. limit=6.0 +2024-07-27 17:48:49,561 INFO [train.py:1114] (1/4) Epoch 4, batch 2600, loss[loss=0.2801, simple_loss=0.3503, pruned_loss=0.105, over 4905.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3396, pruned_loss=0.09695, over 937726.92 frames. ], batch size: 13, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:48:50,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=44357.333333333336, ans=0.125 +2024-07-27 17:48:55,956 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.54 vs. limit=12.0 +2024-07-27 17:49:01,735 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=9.19 vs. limit=15.0 +2024-07-27 17:49:08,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=44384.0, ans=0.125 +2024-07-27 17:49:21,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=44410.666666666664, ans=0.0 +2024-07-27 17:49:24,927 INFO [train.py:1114] (1/4) Epoch 4, batch 2650, loss[loss=0.306, simple_loss=0.37, pruned_loss=0.121, over 4603.00 frames. ], tot_loss[loss=0.2671, simple_loss=0.3401, pruned_loss=0.09707, over 939986.23 frames. ], batch size: 16, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:49:25,612 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.438e+01 6.678e+01 7.695e+01 9.100e+01 1.480e+02, threshold=1.539e+02, percent-clipped=3.0 +2024-07-27 17:49:47,647 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.39 vs. limit=22.5 +2024-07-27 17:49:48,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=44464.0, ans=0.0012034782608695664 +2024-07-27 17:49:53,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=44464.0, ans=0.05 +2024-07-27 17:49:55,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=44477.333333333336, ans=0.125 +2024-07-27 17:50:06,085 INFO [train.py:1114] (1/4) Epoch 4, batch 2700, loss[loss=0.2571, simple_loss=0.3369, pruned_loss=0.08866, over 4735.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.3417, pruned_loss=0.09829, over 940195.46 frames. ], batch size: 14, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:50:21,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=44517.333333333336, ans=0.125 +2024-07-27 17:50:40,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=44544.0, ans=0.0 +2024-07-27 17:50:41,736 INFO [train.py:1114] (1/4) Epoch 4, batch 2750, loss[loss=0.2562, simple_loss=0.3433, pruned_loss=0.08453, over 4707.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.339, pruned_loss=0.09656, over 940187.60 frames. ], batch size: 12, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:50:42,303 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.077e+01 6.612e+01 7.573e+01 9.586e+01 1.480e+02, threshold=1.515e+02, percent-clipped=0.0 +2024-07-27 17:50:44,687 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.67 vs. limit=22.5 +2024-07-27 17:50:53,437 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.18 vs. limit=15.0 +2024-07-27 17:50:57,246 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.38 vs. limit=22.5 +2024-07-27 17:50:58,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=44584.0, ans=0.125 +2024-07-27 17:51:00,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.whiten.whitening_limit, batch_count=44584.0, ans=12.0 +2024-07-27 17:51:19,254 INFO [train.py:1114] (1/4) Epoch 4, batch 2800, loss[loss=0.3857, simple_loss=0.4065, pruned_loss=0.1825, over 3251.00 frames. ], tot_loss[loss=0.2675, simple_loss=0.3396, pruned_loss=0.0977, over 938024.32 frames. ], batch size: 35, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:51:21,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=44624.0, ans=0.1 +2024-07-27 17:51:23,643 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.18 vs. limit=15.0 +2024-07-27 17:51:26,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=44637.333333333336, ans=0.0 +2024-07-27 17:51:27,007 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-27 17:51:37,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=44650.666666666664, ans=0.09899494936611666 +2024-07-27 17:51:51,934 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.62 vs. limit=6.0 +2024-07-27 17:51:57,062 INFO [train.py:1114] (1/4) Epoch 4, batch 2850, loss[loss=0.2782, simple_loss=0.3487, pruned_loss=0.1038, over 4954.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.3399, pruned_loss=0.09819, over 936787.24 frames. ], batch size: 13, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:51:57,803 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.345e+01 6.785e+01 7.509e+01 8.652e+01 1.296e+02, threshold=1.502e+02, percent-clipped=0.0 +2024-07-27 17:52:06,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=44704.0, ans=0.0 +2024-07-27 17:52:07,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=44704.0, ans=0.125 +2024-07-27 17:52:11,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=44717.333333333336, ans=0.0 +2024-07-27 17:52:12,466 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=3.459e+00 +2024-07-27 17:52:12,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=44717.333333333336, ans=0.025 +2024-07-27 17:52:21,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=44730.666666666664, ans=0.07 +2024-07-27 17:52:28,536 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.17 vs. limit=12.0 +2024-07-27 17:52:33,420 INFO [train.py:1114] (1/4) Epoch 4, batch 2900, loss[loss=0.2287, simple_loss=0.2982, pruned_loss=0.0796, over 4828.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3415, pruned_loss=0.09886, over 940609.74 frames. ], batch size: 13, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:53:00,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=44810.666666666664, ans=0.015 +2024-07-27 17:53:01,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=44810.666666666664, ans=0.2 +2024-07-27 17:53:03,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=44810.666666666664, ans=0.0011281159420289868 +2024-07-27 17:53:07,378 INFO [train.py:1114] (1/4) Epoch 4, batch 2950, loss[loss=0.2472, simple_loss=0.3119, pruned_loss=0.09123, over 4712.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.3402, pruned_loss=0.09856, over 939207.16 frames. ], batch size: 12, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:53:07,995 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.029e+01 6.448e+01 7.326e+01 8.943e+01 1.391e+02, threshold=1.465e+02, percent-clipped=0.0 +2024-07-27 17:53:15,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=44837.333333333336, ans=0.0011223188405797089 +2024-07-27 17:53:30,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=44864.0, ans=22.5 +2024-07-27 17:53:34,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=44877.333333333336, ans=0.1 +2024-07-27 17:53:37,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=44877.333333333336, ans=0.025 +2024-07-27 17:53:41,134 INFO [train.py:1114] (1/4) Epoch 4, batch 3000, loss[loss=0.251, simple_loss=0.3307, pruned_loss=0.08567, over 4765.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.3402, pruned_loss=0.09826, over 938834.66 frames. ], batch size: 13, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:53:41,135 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 17:53:51,318 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([6.0718, 5.8960, 5.5246, 5.6297], device='cuda:1') +2024-07-27 17:53:52,965 INFO [train.py:1146] (1/4) Epoch 4, validation: loss=0.2168, simple_loss=0.3177, pruned_loss=0.05793, over 944034.00 frames. +2024-07-27 17:53:52,965 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 17:54:03,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=44904.0, ans=0.2 +2024-07-27 17:54:16,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=44917.333333333336, ans=0.125 +2024-07-27 17:54:22,447 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.26 vs. limit=15.0 +2024-07-27 17:54:26,327 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.15 vs. limit=22.5 +2024-07-27 17:54:26,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=44930.666666666664, ans=0.025 +2024-07-27 17:54:34,895 INFO [train.py:1114] (1/4) Epoch 4, batch 3050, loss[loss=0.2488, simple_loss=0.3315, pruned_loss=0.08306, over 4633.00 frames. ], tot_loss[loss=0.268, simple_loss=0.3399, pruned_loss=0.09808, over 937463.59 frames. ], batch size: 12, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:54:42,781 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.188e+01 6.571e+01 7.374e+01 8.801e+01 1.359e+02, threshold=1.475e+02, percent-clipped=0.0 +2024-07-27 17:54:44,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=44957.333333333336, ans=0.125 +2024-07-27 17:54:46,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=44957.333333333336, ans=0.0010962318840579705 +2024-07-27 17:54:53,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=44970.666666666664, ans=0.0010933333333333333 +2024-07-27 17:57:15,434 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.05 vs. limit=12.0 +2024-07-27 17:57:27,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=45010.666666666664, ans=0.125 +2024-07-27 17:57:27,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=45010.666666666664, ans=0.125 +2024-07-27 17:57:39,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=45010.666666666664, ans=0.125 +2024-07-27 17:57:42,494 INFO [train.py:1114] (1/4) Epoch 4, batch 3100, loss[loss=0.308, simple_loss=0.3743, pruned_loss=0.1209, over 4679.00 frames. ], tot_loss[loss=0.2671, simple_loss=0.3386, pruned_loss=0.09779, over 938005.54 frames. ], batch size: 16, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:57:45,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=45024.0, ans=0.025 +2024-07-27 17:57:50,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45037.333333333336, ans=0.1 +2024-07-27 17:58:01,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=45050.666666666664, ans=0.0 +2024-07-27 17:58:07,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=45050.666666666664, ans=0.0 +2024-07-27 17:58:21,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=45077.333333333336, ans=0.0 +2024-07-27 17:58:47,038 INFO [train.py:1114] (1/4) Epoch 4, batch 3150, loss[loss=0.2965, simple_loss=0.3777, pruned_loss=0.1077, over 4622.00 frames. ], tot_loss[loss=0.2677, simple_loss=0.3388, pruned_loss=0.09825, over 938412.01 frames. ], batch size: 17, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 17:58:47,641 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.206e+01 6.605e+01 7.303e+01 8.284e+01 1.349e+02, threshold=1.461e+02, percent-clipped=0.0 +2024-07-27 17:58:47,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=45090.666666666664, ans=0.2 +2024-07-27 17:59:05,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=45117.333333333336, ans=0.0 +2024-07-27 17:59:17,808 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.06 vs. limit=15.0 +2024-07-27 17:59:23,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=45144.0, ans=0.025 +2024-07-27 17:59:25,862 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.10 vs. limit=15.0 +2024-07-27 17:59:31,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=45157.333333333336, ans=0.0 +2024-07-27 17:59:31,926 INFO [train.py:1114] (1/4) Epoch 4, batch 3200, loss[loss=0.2631, simple_loss=0.3411, pruned_loss=0.09254, over 4831.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.3375, pruned_loss=0.09705, over 940134.32 frames. ], batch size: 13, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 17:59:40,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=45170.666666666664, ans=0.0 +2024-07-27 17:59:49,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=45184.0, ans=0.04949747468305833 +2024-07-27 18:00:04,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=45184.0, ans=0.95 +2024-07-27 18:00:57,785 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.46 vs. limit=10.0 +2024-07-27 18:01:12,030 INFO [train.py:1114] (1/4) Epoch 4, batch 3250, loss[loss=0.28, simple_loss=0.3462, pruned_loss=0.1068, over 4936.00 frames. ], tot_loss[loss=0.2657, simple_loss=0.3379, pruned_loss=0.0968, over 941105.26 frames. ], batch size: 14, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:01:12,742 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.153e+01 6.665e+01 7.646e+01 9.547e+01 1.516e+02, threshold=1.529e+02, percent-clipped=1.0 +2024-07-27 18:01:27,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=45237.333333333336, ans=0.125 +2024-07-27 18:01:35,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45250.666666666664, ans=0.1 +2024-07-27 18:01:41,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=45264.0, ans=0.0 +2024-07-27 18:01:45,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=45277.333333333336, ans=0.125 +2024-07-27 18:01:50,279 INFO [train.py:1114] (1/4) Epoch 4, batch 3300, loss[loss=0.2657, simple_loss=0.3398, pruned_loss=0.09583, over 4674.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3364, pruned_loss=0.09621, over 941197.00 frames. ], batch size: 19, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:01:52,793 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.70 vs. limit=6.0 +2024-07-27 18:01:54,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=45290.666666666664, ans=0.125 +2024-07-27 18:01:57,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=45304.0, ans=0.1 +2024-07-27 18:02:06,618 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.99 vs. limit=22.5 +2024-07-27 18:02:09,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=45317.333333333336, ans=0.125 +2024-07-27 18:02:24,054 INFO [train.py:1114] (1/4) Epoch 4, batch 3350, loss[loss=0.326, simple_loss=0.3968, pruned_loss=0.1276, over 4640.00 frames. ], tot_loss[loss=0.2653, simple_loss=0.3374, pruned_loss=0.09661, over 938641.67 frames. ], batch size: 17, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:02:24,731 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.140e+01 6.495e+01 7.490e+01 8.565e+01 1.368e+02, threshold=1.498e+02, percent-clipped=0.0 +2024-07-27 18:02:46,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=45397.333333333336, ans=0.125 +2024-07-27 18:02:53,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=45410.666666666664, ans=0.125 +2024-07-27 18:02:57,870 INFO [train.py:1114] (1/4) Epoch 4, batch 3400, loss[loss=0.2293, simple_loss=0.2965, pruned_loss=0.0811, over 4803.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3378, pruned_loss=0.09735, over 937374.42 frames. ], batch size: 11, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:02:59,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45424.0, ans=0.1 +2024-07-27 18:03:00,221 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.96 vs. limit=10.0 +2024-07-27 18:03:02,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=45424.0, ans=0.125 +2024-07-27 18:03:13,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=45437.333333333336, ans=0.125 +2024-07-27 18:03:13,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=45437.333333333336, ans=0.125 +2024-07-27 18:03:14,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=45437.333333333336, ans=0.125 +2024-07-27 18:03:21,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=45450.666666666664, ans=0.125 +2024-07-27 18:03:33,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=45477.333333333336, ans=0.2 +2024-07-27 18:03:33,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=45477.333333333336, ans=0.125 +2024-07-27 18:03:34,125 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:03:39,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=45477.333333333336, ans=0.0009831884057971002 +2024-07-27 18:03:42,182 INFO [train.py:1114] (1/4) Epoch 4, batch 3450, loss[loss=0.3069, simple_loss=0.379, pruned_loss=0.1174, over 4731.00 frames. ], tot_loss[loss=0.266, simple_loss=0.3379, pruned_loss=0.09706, over 937710.63 frames. ], batch size: 19, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:03:42,788 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.068e+01 6.545e+01 7.401e+01 8.660e+01 1.564e+02, threshold=1.480e+02, percent-clipped=3.0 +2024-07-27 18:03:45,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=45490.666666666664, ans=0.0 +2024-07-27 18:03:58,538 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.57 vs. limit=15.0 +2024-07-27 18:04:02,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=45517.333333333336, ans=0.125 +2024-07-27 18:04:13,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=45530.666666666664, ans=0.125 +2024-07-27 18:04:21,761 INFO [train.py:1114] (1/4) Epoch 4, batch 3500, loss[loss=0.2612, simple_loss=0.3395, pruned_loss=0.09142, over 4941.00 frames. ], tot_loss[loss=0.2655, simple_loss=0.3374, pruned_loss=0.09683, over 938307.10 frames. ], batch size: 12, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:04:23,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=45557.333333333336, ans=0.5 +2024-07-27 18:04:27,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=45570.666666666664, ans=0.125 +2024-07-27 18:04:37,215 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.11 vs. limit=15.0 +2024-07-27 18:04:42,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=45597.333333333336, ans=0.125 +2024-07-27 18:04:46,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=45597.333333333336, ans=0.0 +2024-07-27 18:04:53,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=45610.666666666664, ans=0.125 +2024-07-27 18:04:55,587 INFO [train.py:1114] (1/4) Epoch 4, batch 3550, loss[loss=0.3073, simple_loss=0.3698, pruned_loss=0.1224, over 4661.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3389, pruned_loss=0.0975, over 938752.61 frames. ], batch size: 14, lr: 1.81e-02, grad_scale: 32.0 +2024-07-27 18:04:56,223 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.319e+01 6.373e+01 7.017e+01 7.924e+01 1.305e+02, threshold=1.403e+02, percent-clipped=0.0 +2024-07-27 18:04:57,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=45624.0, ans=0.0 +2024-07-27 18:05:07,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=45637.333333333336, ans=0.2 +2024-07-27 18:05:13,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=45650.666666666664, ans=0.125 +2024-07-27 18:05:27,349 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.59 vs. limit=12.0 +2024-07-27 18:05:33,864 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.45 vs. limit=22.5 +2024-07-27 18:05:35,440 INFO [train.py:1114] (1/4) Epoch 4, batch 3600, loss[loss=0.2572, simple_loss=0.3219, pruned_loss=0.09629, over 4958.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3372, pruned_loss=0.09602, over 940878.38 frames. ], batch size: 13, lr: 1.81e-02, grad_scale: 32.0 +2024-07-27 18:05:42,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=45704.0, ans=0.0 +2024-07-27 18:05:44,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=45704.0, ans=0.125 +2024-07-27 18:05:53,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=45717.333333333336, ans=0.125 +2024-07-27 18:06:06,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=45744.0, ans=0.125 +2024-07-27 18:06:11,488 INFO [train.py:1114] (1/4) Epoch 4, batch 3650, loss[loss=0.3025, simple_loss=0.3614, pruned_loss=0.1218, over 4909.00 frames. ], tot_loss[loss=0.264, simple_loss=0.3367, pruned_loss=0.09569, over 941268.37 frames. ], batch size: 15, lr: 1.81e-02, grad_scale: 64.0 +2024-07-27 18:06:12,144 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.552e+01 6.653e+01 7.624e+01 9.000e+01 1.438e+02, threshold=1.525e+02, percent-clipped=1.0 +2024-07-27 18:06:19,067 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:06:27,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=45784.0, ans=0.025 +2024-07-27 18:06:27,217 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.55 vs. limit=15.0 +2024-07-27 18:06:30,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=45784.0, ans=0.125 +2024-07-27 18:06:44,753 INFO [train.py:1114] (1/4) Epoch 4, batch 3700, loss[loss=0.2947, simple_loss=0.356, pruned_loss=0.1167, over 4934.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3362, pruned_loss=0.09527, over 942114.03 frames. ], batch size: 14, lr: 1.81e-02, grad_scale: 64.0 +2024-07-27 18:06:44,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=45824.0, ans=0.1 +2024-07-27 18:06:48,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=45824.0, ans=0.025 +2024-07-27 18:06:58,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=45850.666666666664, ans=0.125 +2024-07-27 18:07:08,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=45864.0, ans=0.125 +2024-07-27 18:07:14,753 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.82 vs. limit=15.0 +2024-07-27 18:07:17,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=45877.333333333336, ans=0.00089623188405797 +2024-07-27 18:07:20,586 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.91 vs. limit=22.5 +2024-07-27 18:07:21,638 INFO [train.py:1114] (1/4) Epoch 4, batch 3750, loss[loss=0.2557, simple_loss=0.316, pruned_loss=0.09771, over 4803.00 frames. ], tot_loss[loss=0.2653, simple_loss=0.3375, pruned_loss=0.09654, over 943666.63 frames. ], batch size: 11, lr: 1.81e-02, grad_scale: 64.0 +2024-07-27 18:07:22,322 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.296e+01 6.507e+01 7.242e+01 8.300e+01 1.182e+02, threshold=1.448e+02, percent-clipped=0.0 +2024-07-27 18:07:31,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=45904.0, ans=0.2 +2024-07-27 18:07:36,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=45917.333333333336, ans=0.0 +2024-07-27 18:07:41,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=45930.666666666664, ans=0.0 +2024-07-27 18:07:42,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=45930.666666666664, ans=0.125 +2024-07-27 18:07:45,159 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.98 vs. limit=15.0 +2024-07-27 18:07:54,668 INFO [train.py:1114] (1/4) Epoch 4, batch 3800, loss[loss=0.2301, simple_loss=0.3087, pruned_loss=0.07574, over 4817.00 frames. ], tot_loss[loss=0.2655, simple_loss=0.3372, pruned_loss=0.09687, over 941816.42 frames. ], batch size: 14, lr: 1.81e-02, grad_scale: 64.0 +2024-07-27 18:07:55,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=45957.333333333336, ans=0.125 +2024-07-27 18:08:08,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=45984.0, ans=0.09899494936611666 +2024-07-27 18:08:28,683 INFO [train.py:1114] (1/4) Epoch 4, batch 3850, loss[loss=0.287, simple_loss=0.3619, pruned_loss=0.1061, over 4905.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.3369, pruned_loss=0.09588, over 942686.91 frames. ], batch size: 17, lr: 1.81e-02, grad_scale: 32.0 +2024-07-27 18:08:30,035 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.355e+01 6.600e+01 7.617e+01 8.935e+01 1.540e+02, threshold=1.523e+02, percent-clipped=1.0 +2024-07-27 18:08:44,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=46050.666666666664, ans=0.05 +2024-07-27 18:08:47,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46050.666666666664, ans=0.1 +2024-07-27 18:08:50,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=46064.0, ans=0.125 +2024-07-27 18:08:56,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=46077.333333333336, ans=0.0 +2024-07-27 18:08:59,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=46077.333333333336, ans=0.025 +2024-07-27 18:09:00,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=46077.333333333336, ans=0.125 +2024-07-27 18:09:03,316 INFO [train.py:1114] (1/4) Epoch 4, batch 3900, loss[loss=0.2564, simple_loss=0.3297, pruned_loss=0.09152, over 4812.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3367, pruned_loss=0.09544, over 943004.15 frames. ], batch size: 14, lr: 1.81e-02, grad_scale: 32.0 +2024-07-27 18:09:08,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=46090.666666666664, ans=0.05 +2024-07-27 18:09:13,551 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.35 vs. limit=6.0 +2024-07-27 18:09:21,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=46117.333333333336, ans=0.0 +2024-07-27 18:09:33,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=46144.0, ans=0.125 +2024-07-27 18:09:42,260 INFO [train.py:1114] (1/4) Epoch 4, batch 3950, loss[loss=0.2861, simple_loss=0.3636, pruned_loss=0.1043, over 4847.00 frames. ], tot_loss[loss=0.2648, simple_loss=0.3376, pruned_loss=0.09598, over 944871.66 frames. ], batch size: 16, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:09:44,107 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.311e+01 6.796e+01 7.722e+01 1.006e+02 1.504e+02, threshold=1.544e+02, percent-clipped=0.0 +2024-07-27 18:09:44,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=46157.333333333336, ans=0.125 +2024-07-27 18:09:46,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=46157.333333333336, ans=0.2 +2024-07-27 18:09:46,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46157.333333333336, ans=0.1 +2024-07-27 18:09:46,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=46157.333333333336, ans=0.025 +2024-07-27 18:09:48,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=46157.333333333336, ans=0.125 +2024-07-27 18:09:49,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.03 vs. limit=15.0 +2024-07-27 18:10:00,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=46184.0, ans=0.0008295652173913037 +2024-07-27 18:10:01,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=46184.0, ans=0.0 +2024-07-27 18:10:09,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=46197.333333333336, ans=0.04949747468305833 +2024-07-27 18:10:20,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=46197.333333333336, ans=0.0 +2024-07-27 18:10:30,480 INFO [train.py:1114] (1/4) Epoch 4, batch 4000, loss[loss=0.2336, simple_loss=0.3162, pruned_loss=0.07552, over 4773.00 frames. ], tot_loss[loss=0.2663, simple_loss=0.3384, pruned_loss=0.09715, over 941055.88 frames. ], batch size: 12, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:10:34,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=46224.0, ans=0.09899494936611666 +2024-07-27 18:10:39,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=46237.333333333336, ans=0.5 +2024-07-27 18:10:40,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=46237.333333333336, ans=0.125 +2024-07-27 18:10:45,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=46250.666666666664, ans=0.2 +2024-07-27 18:10:53,508 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.05 vs. limit=10.0 +2024-07-27 18:10:58,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=46277.333333333336, ans=0.0 +2024-07-27 18:11:02,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=46277.333333333336, ans=0.2 +2024-07-27 18:11:06,047 INFO [train.py:1114] (1/4) Epoch 4, batch 4050, loss[loss=0.3285, simple_loss=0.3716, pruned_loss=0.1427, over 3304.00 frames. ], tot_loss[loss=0.267, simple_loss=0.3386, pruned_loss=0.09777, over 939432.25 frames. ], batch size: 35, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:11:07,317 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.344e+01 6.516e+01 7.339e+01 8.508e+01 1.190e+02, threshold=1.468e+02, percent-clipped=0.0 +2024-07-27 18:11:07,739 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.29 vs. limit=6.0 +2024-07-27 18:11:14,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=46290.666666666664, ans=0.5 +2024-07-27 18:11:42,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=46344.0, ans=0.125 +2024-07-27 18:11:44,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=46344.0, ans=0.95 +2024-07-27 18:11:47,335 INFO [train.py:1114] (1/4) Epoch 4, batch 4100, loss[loss=0.2743, simple_loss=0.3534, pruned_loss=0.09766, over 4901.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3386, pruned_loss=0.09753, over 938219.17 frames. ], batch size: 15, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:11:52,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=46357.333333333336, ans=0.1 +2024-07-27 18:11:58,218 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:12:08,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=46397.333333333336, ans=0.125 +2024-07-27 18:12:13,032 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.57 vs. limit=10.0 +2024-07-27 18:12:19,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=46410.666666666664, ans=0.125 +2024-07-27 18:12:21,675 INFO [train.py:1114] (1/4) Epoch 4, batch 4150, loss[loss=0.27, simple_loss=0.3448, pruned_loss=0.09754, over 4819.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.3372, pruned_loss=0.09661, over 937633.85 frames. ], batch size: 13, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:12:22,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=46424.0, ans=0.0 +2024-07-27 18:12:22,994 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.177e+01 6.950e+01 8.086e+01 1.014e+02 1.411e+02, threshold=1.617e+02, percent-clipped=0.0 +2024-07-27 18:12:28,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=46437.333333333336, ans=0.1 +2024-07-27 18:12:29,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=46437.333333333336, ans=0.1 +2024-07-27 18:12:31,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=46437.333333333336, ans=0.125 +2024-07-27 18:12:32,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=46437.333333333336, ans=0.125 +2024-07-27 18:12:34,529 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:12:52,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=46464.0, ans=0.2 +2024-07-27 18:12:55,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=46477.333333333336, ans=0.125 +2024-07-27 18:12:58,263 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.00 vs. limit=22.5 +2024-07-27 18:12:58,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=46477.333333333336, ans=0.125 +2024-07-27 18:13:01,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=46477.333333333336, ans=0.125 +2024-07-27 18:13:02,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46477.333333333336, ans=0.1 +2024-07-27 18:13:02,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=46477.333333333336, ans=0.0007657971014492747 +2024-07-27 18:13:03,901 INFO [train.py:1114] (1/4) Epoch 4, batch 4200, loss[loss=0.2741, simple_loss=0.3425, pruned_loss=0.1028, over 4889.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3366, pruned_loss=0.09628, over 939293.47 frames. ], batch size: 15, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:13:03,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=46490.666666666664, ans=0.125 +2024-07-27 18:13:05,663 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.48 vs. limit=15.0 +2024-07-27 18:13:28,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=46530.666666666664, ans=0.125 +2024-07-27 18:13:34,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=46544.0, ans=0.125 +2024-07-27 18:13:39,600 INFO [train.py:1114] (1/4) Epoch 4, batch 4250, loss[loss=0.2906, simple_loss=0.3581, pruned_loss=0.1116, over 4634.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3381, pruned_loss=0.09718, over 940600.31 frames. ], batch size: 12, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:13:40,928 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.134e+01 6.597e+01 7.169e+01 7.931e+01 1.247e+02, threshold=1.434e+02, percent-clipped=0.0 +2024-07-27 18:14:20,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=46610.666666666664, ans=0.025 +2024-07-27 18:14:21,358 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.92 vs. limit=15.0 +2024-07-27 18:14:24,133 INFO [train.py:1114] (1/4) Epoch 4, batch 4300, loss[loss=0.258, simple_loss=0.3235, pruned_loss=0.09628, over 4753.00 frames. ], tot_loss[loss=0.2648, simple_loss=0.3372, pruned_loss=0.09621, over 939945.45 frames. ], batch size: 13, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:14:32,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=46637.333333333336, ans=0.125 +2024-07-27 18:14:36,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=46637.333333333336, ans=0.125 +2024-07-27 18:14:39,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=46650.666666666664, ans=0.125 +2024-07-27 18:14:50,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=46677.333333333336, ans=0.125 +2024-07-27 18:14:52,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=46677.333333333336, ans=0.07 +2024-07-27 18:14:55,562 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:14:57,358 INFO [train.py:1114] (1/4) Epoch 4, batch 4350, loss[loss=0.2323, simple_loss=0.3197, pruned_loss=0.07241, over 4770.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3365, pruned_loss=0.09528, over 940598.32 frames. ], batch size: 13, lr: 1.79e-02, grad_scale: 32.0 +2024-07-27 18:14:58,631 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.449e+01 6.647e+01 7.749e+01 8.957e+01 1.514e+02, threshold=1.550e+02, percent-clipped=2.0 +2024-07-27 18:15:13,878 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.27 vs. limit=15.0 +2024-07-27 18:15:21,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=46730.666666666664, ans=0.125 +2024-07-27 18:15:28,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=46744.0, ans=0.125 +2024-07-27 18:15:30,906 INFO [train.py:1114] (1/4) Epoch 4, batch 4400, loss[loss=0.2753, simple_loss=0.3699, pruned_loss=0.09036, over 4817.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.3362, pruned_loss=0.09462, over 940642.66 frames. ], batch size: 14, lr: 1.79e-02, grad_scale: 32.0 +2024-07-27 18:15:36,648 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.63 vs. limit=15.0 +2024-07-27 18:15:44,096 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.85 vs. limit=22.5 +2024-07-27 18:15:53,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=46797.333333333336, ans=0.0006962318840579712 +2024-07-27 18:15:56,121 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.29 vs. limit=6.0 +2024-07-27 18:16:02,328 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.75 vs. limit=22.5 +2024-07-27 18:16:04,520 INFO [train.py:1114] (1/4) Epoch 4, batch 4450, loss[loss=0.2237, simple_loss=0.2965, pruned_loss=0.07552, over 4948.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.3368, pruned_loss=0.09487, over 938865.71 frames. ], batch size: 12, lr: 1.79e-02, grad_scale: 32.0 +2024-07-27 18:16:05,840 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.136e+01 6.574e+01 7.932e+01 1.004e+02 1.651e+02, threshold=1.586e+02, percent-clipped=3.0 +2024-07-27 18:16:09,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=46824.0, ans=0.0 +2024-07-27 18:16:13,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=46837.333333333336, ans=0.0 +2024-07-27 18:16:14,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=46837.333333333336, ans=0.025 +2024-07-27 18:16:19,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=46850.666666666664, ans=0.09899494936611666 +2024-07-27 18:16:25,480 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.88 vs. limit=15.0 +2024-07-27 18:16:25,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=46864.0, ans=0.1 +2024-07-27 18:16:33,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=46877.333333333336, ans=0.04949747468305833 +2024-07-27 18:16:33,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=46877.333333333336, ans=0.125 +2024-07-27 18:16:38,377 INFO [train.py:1114] (1/4) Epoch 4, batch 4500, loss[loss=0.2301, simple_loss=0.3264, pruned_loss=0.06685, over 4745.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.336, pruned_loss=0.09413, over 937979.66 frames. ], batch size: 14, lr: 1.79e-02, grad_scale: 32.0 +2024-07-27 18:16:40,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=46890.666666666664, ans=0.125 +2024-07-27 18:16:42,752 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.27 vs. limit=22.5 +2024-07-27 18:16:43,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=46890.666666666664, ans=0.5 +2024-07-27 18:16:50,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=46904.0, ans=0.2 +2024-07-27 18:17:08,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=46944.0, ans=0.125 +2024-07-27 18:17:14,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=46944.0, ans=0.0006643478260869567 +2024-07-27 18:17:15,574 INFO [train.py:1114] (1/4) Epoch 4, batch 4550, loss[loss=0.2399, simple_loss=0.3123, pruned_loss=0.08378, over 4904.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3354, pruned_loss=0.0942, over 939957.70 frames. ], batch size: 13, lr: 1.79e-02, grad_scale: 16.0 +2024-07-27 18:17:17,509 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.369e+01 6.640e+01 7.268e+01 8.274e+01 1.292e+02, threshold=1.454e+02, percent-clipped=0.0 +2024-07-27 18:17:23,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=46970.666666666664, ans=0.0 +2024-07-27 18:17:27,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=46970.666666666664, ans=0.025 +2024-07-27 18:17:29,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=46984.0, ans=0.125 +2024-07-27 18:17:35,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=46997.333333333336, ans=0.0 +2024-07-27 18:17:36,879 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.41 vs. limit=15.0 +2024-07-27 18:17:44,165 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:17:49,271 INFO [train.py:1114] (1/4) Epoch 4, batch 4600, loss[loss=0.3051, simple_loss=0.3811, pruned_loss=0.1146, over 4417.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.3356, pruned_loss=0.09485, over 938368.09 frames. ], batch size: 21, lr: 1.79e-02, grad_scale: 16.0 +2024-07-27 18:18:06,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=47050.666666666664, ans=0.09899494936611666 +2024-07-27 18:18:10,162 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.85 vs. limit=6.0 +2024-07-27 18:18:22,634 INFO [train.py:1114] (1/4) Epoch 4, batch 4650, loss[loss=0.2992, simple_loss=0.3699, pruned_loss=0.1142, over 4840.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3371, pruned_loss=0.09486, over 940429.06 frames. ], batch size: 16, lr: 1.79e-02, grad_scale: 8.0 +2024-07-27 18:18:25,328 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.210e+01 6.570e+01 7.431e+01 9.301e+01 1.835e+02, threshold=1.486e+02, percent-clipped=1.0 +2024-07-27 18:18:43,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=47104.0, ans=0.025 +2024-07-27 18:18:47,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=47117.333333333336, ans=0.125 +2024-07-27 18:19:06,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=47144.0, ans=0.125 +2024-07-27 18:19:09,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=47144.0, ans=0.125 +2024-07-27 18:19:10,948 INFO [train.py:1114] (1/4) Epoch 4, batch 4700, loss[loss=0.2295, simple_loss=0.2984, pruned_loss=0.08034, over 4713.00 frames. ], tot_loss[loss=0.263, simple_loss=0.336, pruned_loss=0.09497, over 937382.14 frames. ], batch size: 11, lr: 1.79e-02, grad_scale: 8.0 +2024-07-27 18:19:11,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=47157.333333333336, ans=0.0 +2024-07-27 18:19:16,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=47157.333333333336, ans=0.2 +2024-07-27 18:19:18,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=47170.666666666664, ans=0.125 +2024-07-27 18:19:21,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=47170.666666666664, ans=0.125 +2024-07-27 18:19:23,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=47184.0, ans=0.0006121739130434782 +2024-07-27 18:19:28,536 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:19:29,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=47184.0, ans=15.0 +2024-07-27 18:19:36,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=47197.333333333336, ans=0.0 +2024-07-27 18:19:44,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=47210.666666666664, ans=15.0 +2024-07-27 18:19:45,065 INFO [train.py:1114] (1/4) Epoch 4, batch 4750, loss[loss=0.2427, simple_loss=0.3102, pruned_loss=0.08761, over 4541.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3361, pruned_loss=0.09523, over 935654.17 frames. ], batch size: 21, lr: 1.78e-02, grad_scale: 8.0 +2024-07-27 18:19:47,743 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.323e+01 6.439e+01 7.166e+01 9.768e+01 1.474e+02, threshold=1.433e+02, percent-clipped=0.0 +2024-07-27 18:19:52,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=47237.333333333336, ans=0.09899494936611666 +2024-07-27 18:19:52,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=47237.333333333336, ans=0.125 +2024-07-27 18:19:53,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=47237.333333333336, ans=0.025 +2024-07-27 18:20:08,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=47264.0, ans=0.1 +2024-07-27 18:20:12,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=47277.333333333336, ans=0.025 +2024-07-27 18:20:13,004 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.62 vs. limit=15.0 +2024-07-27 18:20:19,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47290.666666666664, ans=0.1 +2024-07-27 18:20:19,552 INFO [train.py:1114] (1/4) Epoch 4, batch 4800, loss[loss=0.2514, simple_loss=0.3302, pruned_loss=0.08632, over 4695.00 frames. ], tot_loss[loss=0.2636, simple_loss=0.3361, pruned_loss=0.09553, over 933044.43 frames. ], batch size: 13, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:20:19,951 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.17 vs. limit=12.0 +2024-07-27 18:20:21,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.29 vs. limit=22.5 +2024-07-27 18:20:23,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=47290.666666666664, ans=0.125 +2024-07-27 18:20:35,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=47304.0, ans=0.05 +2024-07-27 18:20:36,410 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:20:43,277 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:20:45,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=47330.666666666664, ans=0.125 +2024-07-27 18:20:46,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=47330.666666666664, ans=0.125 +2024-07-27 18:20:55,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=47344.0, ans=0.125 +2024-07-27 18:21:03,391 INFO [train.py:1114] (1/4) Epoch 4, batch 4850, loss[loss=0.2886, simple_loss=0.3598, pruned_loss=0.1087, over 4739.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.3359, pruned_loss=0.09525, over 932325.04 frames. ], batch size: 14, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:21:03,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=47357.333333333336, ans=0.125 +2024-07-27 18:21:05,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=47357.333333333336, ans=0.0 +2024-07-27 18:21:06,065 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.193e+01 6.442e+01 7.162e+01 7.877e+01 1.649e+02, threshold=1.432e+02, percent-clipped=2.0 +2024-07-27 18:21:15,917 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.14 vs. limit=22.5 +2024-07-27 18:21:17,984 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.27 vs. limit=6.0 +2024-07-27 18:21:26,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=47397.333333333336, ans=0.125 +2024-07-27 18:21:27,920 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=17.77 vs. limit=15.0 +2024-07-27 18:21:30,032 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.59 vs. limit=22.5 +2024-07-27 18:21:37,086 INFO [train.py:1114] (1/4) Epoch 4, batch 4900, loss[loss=0.2772, simple_loss=0.3589, pruned_loss=0.09774, over 4758.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.3355, pruned_loss=0.09493, over 933924.24 frames. ], batch size: 13, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:21:44,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=47437.333333333336, ans=0.125 +2024-07-27 18:21:58,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=47464.0, ans=0.0005513043478260881 +2024-07-27 18:22:00,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=47464.0, ans=0.2 +2024-07-27 18:22:04,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=47477.333333333336, ans=0.125 +2024-07-27 18:22:05,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=47477.333333333336, ans=0.0005484057971014492 +2024-07-27 18:22:14,196 INFO [train.py:1114] (1/4) Epoch 4, batch 4950, loss[loss=0.3001, simple_loss=0.3531, pruned_loss=0.1235, over 3513.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.3368, pruned_loss=0.09579, over 931399.88 frames. ], batch size: 35, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:22:16,768 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.778e+01 6.647e+01 7.619e+01 9.936e+01 1.671e+02, threshold=1.524e+02, percent-clipped=3.0 +2024-07-27 18:22:25,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=47504.0, ans=0.0 +2024-07-27 18:22:26,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=47504.0, ans=0.2 +2024-07-27 18:22:26,426 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.78 vs. limit=22.5 +2024-07-27 18:22:29,351 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.15 vs. limit=15.0 +2024-07-27 18:22:29,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=47517.333333333336, ans=0.125 +2024-07-27 18:22:29,855 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.19 vs. limit=15.0 +2024-07-27 18:22:44,612 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.73 vs. limit=22.5 +2024-07-27 18:22:52,602 INFO [train.py:1114] (1/4) Epoch 4, batch 5000, loss[loss=0.2308, simple_loss=0.3186, pruned_loss=0.07147, over 4669.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.336, pruned_loss=0.09495, over 935358.91 frames. ], batch size: 14, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:23:11,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=47584.0, ans=0.125 +2024-07-27 18:23:13,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=47597.333333333336, ans=0.000522318840579709 +2024-07-27 18:23:14,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=47597.333333333336, ans=10.0 +2024-07-27 18:23:23,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=47610.666666666664, ans=0.2 +2024-07-27 18:23:26,401 INFO [train.py:1114] (1/4) Epoch 4, batch 5050, loss[loss=0.1972, simple_loss=0.287, pruned_loss=0.05366, over 4861.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3342, pruned_loss=0.09437, over 938102.24 frames. ], batch size: 12, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:23:29,105 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.322e+01 6.671e+01 7.390e+01 9.030e+01 1.584e+02, threshold=1.478e+02, percent-clipped=1.0 +2024-07-27 18:23:32,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=47624.0, ans=0.0 +2024-07-27 18:23:32,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=47637.333333333336, ans=0.0 +2024-07-27 18:23:39,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=47650.666666666664, ans=0.025 +2024-07-27 18:24:01,841 INFO [train.py:1114] (1/4) Epoch 4, batch 5100, loss[loss=0.2394, simple_loss=0.3309, pruned_loss=0.0739, over 4783.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3354, pruned_loss=0.09522, over 935602.82 frames. ], batch size: 12, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:24:29,387 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.74 vs. limit=10.0 +2024-07-27 18:24:29,437 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.05 vs. limit=15.0 +2024-07-27 18:24:32,167 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.02 vs. limit=15.0 +2024-07-27 18:24:35,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=47744.0, ans=0.1 +2024-07-27 18:24:37,614 INFO [train.py:1114] (1/4) Epoch 4, batch 5150, loss[loss=0.3009, simple_loss=0.3632, pruned_loss=0.1193, over 4827.00 frames. ], tot_loss[loss=0.265, simple_loss=0.3372, pruned_loss=0.09645, over 936730.52 frames. ], batch size: 16, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:24:40,253 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.251e+01 6.747e+01 7.591e+01 8.914e+01 1.388e+02, threshold=1.518e+02, percent-clipped=0.0 +2024-07-27 18:24:53,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47784.0, ans=0.1 +2024-07-27 18:25:08,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=47810.666666666664, ans=0.125 +2024-07-27 18:25:13,266 INFO [train.py:1114] (1/4) Epoch 4, batch 5200, loss[loss=0.269, simple_loss=0.3513, pruned_loss=0.09339, over 4665.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3362, pruned_loss=0.09572, over 936661.04 frames. ], batch size: 14, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:25:40,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=47864.0, ans=0.125 +2024-07-27 18:25:47,789 INFO [train.py:1114] (1/4) Epoch 4, batch 5250, loss[loss=0.2249, simple_loss=0.3037, pruned_loss=0.07309, over 4893.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3349, pruned_loss=0.09504, over 935630.78 frames. ], batch size: 13, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:25:50,442 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.322e+01 6.549e+01 7.419e+01 9.087e+01 1.892e+02, threshold=1.484e+02, percent-clipped=1.0 +2024-07-27 18:25:51,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=47890.666666666664, ans=0.1 +2024-07-27 18:25:55,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=47904.0, ans=0.125 +2024-07-27 18:26:00,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=47904.0, ans=0.0 +2024-07-27 18:26:19,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=47944.0, ans=0.04949747468305833 +2024-07-27 18:26:21,663 INFO [train.py:1114] (1/4) Epoch 4, batch 5300, loss[loss=0.2898, simple_loss=0.3516, pruned_loss=0.114, over 4632.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3339, pruned_loss=0.09441, over 933804.10 frames. ], batch size: 16, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:26:25,852 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-27 18:26:26,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=47957.333333333336, ans=0.09899494936611666 +2024-07-27 18:26:56,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=48010.666666666664, ans=0.125 +2024-07-27 18:26:57,391 INFO [train.py:1114] (1/4) Epoch 4, batch 5350, loss[loss=0.2294, simple_loss=0.3053, pruned_loss=0.07675, over 4512.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3357, pruned_loss=0.09517, over 936088.50 frames. ], batch size: 10, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:26:58,367 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.09 vs. limit=12.0 +2024-07-27 18:27:00,009 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.313e+01 6.419e+01 7.171e+01 7.752e+01 1.208e+02, threshold=1.434e+02, percent-clipped=0.0 +2024-07-27 18:27:01,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=48024.0, ans=0.0 +2024-07-27 18:27:02,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_na.min_abs, batch_count=48024.0, ans=0.02 +2024-07-27 18:27:04,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48037.333333333336, ans=0.1 +2024-07-27 18:27:05,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=48037.333333333336, ans=0.05 +2024-07-27 18:27:20,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-27 18:27:22,035 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.92 vs. limit=15.0 +2024-07-27 18:27:27,084 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.64 vs. limit=15.0 +2024-07-27 18:27:31,380 INFO [train.py:1114] (1/4) Epoch 4, batch 5400, loss[loss=0.2777, simple_loss=0.3531, pruned_loss=0.1012, over 4444.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.3369, pruned_loss=0.09605, over 930792.12 frames. ], batch size: 26, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:27:35,485 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.30 vs. limit=15.0 +2024-07-27 18:27:43,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=48104.0, ans=0.125 +2024-07-27 18:27:47,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=48117.333333333336, ans=0.00040927536231884047 +2024-07-27 18:27:53,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=48130.666666666664, ans=0.00040637681159420326 +2024-07-27 18:27:53,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=48130.666666666664, ans=0.125 +2024-07-27 18:27:54,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=48130.666666666664, ans=0.125 +2024-07-27 18:27:58,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=48144.0, ans=0.00040347826086956605 +2024-07-27 18:28:02,867 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.53 vs. limit=22.5 +2024-07-27 18:28:03,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=48144.0, ans=0.0 +2024-07-27 18:28:05,836 INFO [train.py:1114] (1/4) Epoch 4, batch 5450, loss[loss=0.2503, simple_loss=0.3203, pruned_loss=0.09012, over 4712.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3349, pruned_loss=0.09405, over 933479.48 frames. ], batch size: 11, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:28:14,666 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.319e+01 6.320e+01 7.105e+01 8.639e+01 1.249e+02, threshold=1.421e+02, percent-clipped=0.0 +2024-07-27 18:28:32,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=48184.0, ans=0.2 +2024-07-27 18:28:37,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=48197.333333333336, ans=0.1 +2024-07-27 18:28:47,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=48210.666666666664, ans=0.125 +2024-07-27 18:28:50,524 INFO [train.py:1114] (1/4) Epoch 4, batch 5500, loss[loss=0.3365, simple_loss=0.3981, pruned_loss=0.1374, over 4147.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.335, pruned_loss=0.09492, over 931222.58 frames. ], batch size: 25, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:28:56,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=48237.333333333336, ans=0.125 +2024-07-27 18:29:09,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=48250.666666666664, ans=0.0003802898550724649 +2024-07-27 18:29:16,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=48264.0, ans=0.125 +2024-07-27 18:29:24,390 INFO [train.py:1114] (1/4) Epoch 4, batch 5550, loss[loss=0.249, simple_loss=0.3238, pruned_loss=0.08715, over 4706.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3344, pruned_loss=0.09445, over 933622.57 frames. ], batch size: 12, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:29:27,164 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.873e+01 6.976e+01 8.822e+01 1.148e+02 2.032e+02, threshold=1.764e+02, percent-clipped=8.0 +2024-07-27 18:29:29,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=48290.666666666664, ans=0.1 +2024-07-27 18:29:39,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=48317.333333333336, ans=0.1 +2024-07-27 18:29:44,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=48317.333333333336, ans=0.0 +2024-07-27 18:29:48,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=48330.666666666664, ans=0.125 +2024-07-27 18:29:50,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48330.666666666664, ans=0.1 +2024-07-27 18:29:52,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=48330.666666666664, ans=0.125 +2024-07-27 18:29:58,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=48344.0, ans=0.125 +2024-07-27 18:30:00,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=48344.0, ans=0.0 +2024-07-27 18:30:02,097 INFO [train.py:1114] (1/4) Epoch 4, batch 5600, loss[loss=0.258, simple_loss=0.3417, pruned_loss=0.0872, over 4735.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3356, pruned_loss=0.09515, over 934718.83 frames. ], batch size: 14, lr: 1.76e-02, grad_scale: 32.0 +2024-07-27 18:30:07,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=48357.333333333336, ans=0.125 +2024-07-27 18:30:09,049 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.60 vs. limit=15.0 +2024-07-27 18:30:15,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=48384.0, ans=0.1 +2024-07-27 18:30:25,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=48397.333333333336, ans=0.125 +2024-07-27 18:30:32,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=48410.666666666664, ans=0.1 +2024-07-27 18:30:36,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=48410.666666666664, ans=0.0 +2024-07-27 18:30:37,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=48424.0, ans=0.0 +2024-07-27 18:30:38,208 INFO [train.py:1114] (1/4) Epoch 4, batch 5650, loss[loss=0.2952, simple_loss=0.3596, pruned_loss=0.1154, over 4416.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3344, pruned_loss=0.0952, over 937279.16 frames. ], batch size: 21, lr: 1.76e-02, grad_scale: 32.0 +2024-07-27 18:30:41,017 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.980e+01 6.257e+01 6.942e+01 8.186e+01 1.408e+02, threshold=1.388e+02, percent-clipped=0.0 +2024-07-27 18:30:41,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=48424.0, ans=0.0003426086956521742 +2024-07-27 18:30:51,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=48450.666666666664, ans=0.2 +2024-07-27 18:31:09,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=48477.333333333336, ans=0.125 +2024-07-27 18:31:11,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=48490.666666666664, ans=0.0 +2024-07-27 18:31:11,757 INFO [train.py:1114] (1/4) Epoch 4, batch 5700, loss[loss=0.256, simple_loss=0.3371, pruned_loss=0.08743, over 4690.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.3352, pruned_loss=0.09556, over 938294.76 frames. ], batch size: 13, lr: 1.76e-02, grad_scale: 32.0 +2024-07-27 18:31:15,588 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.42 vs. limit=22.5 +2024-07-27 18:31:38,087 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-27 18:31:41,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=48544.0, ans=0.125 +2024-07-27 18:31:43,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=48544.0, ans=0.125 +2024-07-27 18:31:44,763 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=11.45 vs. limit=10.0 +2024-07-27 18:31:45,699 INFO [train.py:1114] (1/4) Epoch 4, batch 5750, loss[loss=0.2953, simple_loss=0.3629, pruned_loss=0.1138, over 4706.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3367, pruned_loss=0.09572, over 938149.00 frames. ], batch size: 19, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:31:51,402 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.193e+01 6.612e+01 7.726e+01 1.001e+02 1.887e+02, threshold=1.545e+02, percent-clipped=6.0 +2024-07-27 18:31:55,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=48570.666666666664, ans=0.09899494936611666 +2024-07-27 18:31:55,936 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.54 vs. limit=15.0 +2024-07-27 18:32:17,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=48610.666666666664, ans=0.0 +2024-07-27 18:32:19,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=48610.666666666664, ans=0.125 +2024-07-27 18:32:20,113 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.45 vs. limit=12.0 +2024-07-27 18:32:20,811 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.26 vs. limit=15.0 +2024-07-27 18:32:22,607 INFO [train.py:1114] (1/4) Epoch 4, batch 5800, loss[loss=0.2713, simple_loss=0.334, pruned_loss=0.1043, over 4712.00 frames. ], tot_loss[loss=0.2651, simple_loss=0.3377, pruned_loss=0.09629, over 937437.82 frames. ], batch size: 19, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:32:24,879 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.33 vs. limit=15.0 +2024-07-27 18:32:33,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=48637.333333333336, ans=0.025 +2024-07-27 18:32:42,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.out_whiten.whitening_limit, batch_count=48650.666666666664, ans=8.0 +2024-07-27 18:32:54,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=48677.333333333336, ans=0.125 +2024-07-27 18:32:56,500 INFO [train.py:1114] (1/4) Epoch 4, batch 5850, loss[loss=0.3435, simple_loss=0.4101, pruned_loss=0.1384, over 4464.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3366, pruned_loss=0.09506, over 938076.12 frames. ], batch size: 21, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:32:59,843 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.208e+01 6.444e+01 7.225e+01 8.494e+01 1.330e+02, threshold=1.445e+02, percent-clipped=0.0 +2024-07-27 18:33:05,683 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.06 vs. limit=22.5 +2024-07-27 18:33:17,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=48717.333333333336, ans=0.0 +2024-07-27 18:33:33,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=48744.0, ans=0.125 +2024-07-27 18:33:34,383 INFO [train.py:1114] (1/4) Epoch 4, batch 5900, loss[loss=0.2831, simple_loss=0.3575, pruned_loss=0.1044, over 4680.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3356, pruned_loss=0.09459, over 938268.62 frames. ], batch size: 15, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:33:35,511 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.08 vs. limit=22.5 +2024-07-27 18:33:47,295 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:33:55,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=48784.0, ans=0.2 +2024-07-27 18:33:59,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=48797.333333333336, ans=0.125 +2024-07-27 18:34:04,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=48797.333333333336, ans=0.05 +2024-07-27 18:34:13,798 INFO [train.py:1114] (1/4) Epoch 4, batch 5950, loss[loss=0.2936, simple_loss=0.3589, pruned_loss=0.1141, over 4693.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.3352, pruned_loss=0.09466, over 940320.66 frames. ], batch size: 15, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:34:16,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48824.0, ans=0.1 +2024-07-27 18:34:17,269 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.376e+01 6.577e+01 7.476e+01 8.958e+01 1.675e+02, threshold=1.495e+02, percent-clipped=2.0 +2024-07-27 18:34:26,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48850.666666666664, ans=0.1 +2024-07-27 18:34:31,644 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.66 vs. limit=6.0 +2024-07-27 18:34:47,625 INFO [train.py:1114] (1/4) Epoch 4, batch 6000, loss[loss=0.2891, simple_loss=0.3638, pruned_loss=0.1072, over 4266.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.335, pruned_loss=0.09483, over 937467.60 frames. ], batch size: 25, lr: 1.76e-02, grad_scale: 32.0 +2024-07-27 18:34:47,626 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 18:34:56,785 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.6700, 3.5957, 2.3724, 2.1857], device='cuda:1') +2024-07-27 18:34:57,673 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([0.2285, 1.4250, 0.9939, 1.7386, 1.0017, 1.6606, 1.8403, 1.5212], + device='cuda:1') +2024-07-27 18:35:03,512 INFO [train.py:1146] (1/4) Epoch 4, validation: loss=0.2107, simple_loss=0.3128, pruned_loss=0.05435, over 944034.00 frames. +2024-07-27 18:35:03,513 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 18:35:05,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=48890.666666666664, ans=0.0002411594202898562 +2024-07-27 18:35:05,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=48890.666666666664, ans=0.125 +2024-07-27 18:35:13,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=48904.0, ans=0.0 +2024-07-27 18:35:14,519 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.50 vs. limit=22.5 +2024-07-27 18:35:28,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=48930.666666666664, ans=0.09899494936611666 +2024-07-27 18:35:28,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=48930.666666666664, ans=0.025 +2024-07-27 18:35:37,386 INFO [train.py:1114] (1/4) Epoch 4, batch 6050, loss[loss=0.2387, simple_loss=0.3143, pruned_loss=0.08159, over 4776.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3347, pruned_loss=0.09462, over 938483.64 frames. ], batch size: 12, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:35:37,693 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-27 18:35:42,519 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.402e+01 6.393e+01 7.329e+01 8.400e+01 1.158e+02, threshold=1.466e+02, percent-clipped=0.0 +2024-07-27 18:35:46,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=48970.666666666664, ans=0.125 +2024-07-27 18:35:50,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=48970.666666666664, ans=0.07 +2024-07-27 18:35:54,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=48984.0, ans=0.125 +2024-07-27 18:36:03,763 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:36:08,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=49010.666666666664, ans=0.125 +2024-07-27 18:36:12,490 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.23 vs. limit=22.5 +2024-07-27 18:36:12,767 INFO [train.py:1114] (1/4) Epoch 4, batch 6100, loss[loss=0.2797, simple_loss=0.3581, pruned_loss=0.1006, over 4706.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3344, pruned_loss=0.09405, over 937620.87 frames. ], batch size: 15, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:36:13,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=49024.0, ans=0.05 +2024-07-27 18:36:23,874 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.46 vs. limit=22.5 +2024-07-27 18:36:24,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=49037.333333333336, ans=0.125 +2024-07-27 18:36:25,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=49050.666666666664, ans=0.2 +2024-07-27 18:36:25,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=49050.666666666664, ans=0.125 +2024-07-27 18:36:25,956 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.92 vs. limit=12.0 +2024-07-27 18:36:38,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=49064.0, ans=0.125 +2024-07-27 18:36:42,259 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.46 vs. limit=15.0 +2024-07-27 18:36:46,621 INFO [train.py:1114] (1/4) Epoch 4, batch 6150, loss[loss=0.3759, simple_loss=0.4057, pruned_loss=0.173, over 3601.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3345, pruned_loss=0.09419, over 936859.31 frames. ], batch size: 35, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:36:50,105 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.101e+01 6.312e+01 7.204e+01 8.554e+01 1.450e+02, threshold=1.441e+02, percent-clipped=0.0 +2024-07-27 18:36:50,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=49090.666666666664, ans=0.0 +2024-07-27 18:36:51,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=49090.666666666664, ans=0.1 +2024-07-27 18:36:55,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=49104.0, ans=0.2 +2024-07-27 18:37:00,686 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.04 vs. limit=15.0 +2024-07-27 18:37:09,037 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.15 vs. limit=15.0 +2024-07-27 18:37:15,698 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.64 vs. limit=15.0 +2024-07-27 18:37:16,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=49144.0, ans=0.1 +2024-07-27 18:37:20,685 INFO [train.py:1114] (1/4) Epoch 4, batch 6200, loss[loss=0.3113, simple_loss=0.3737, pruned_loss=0.1244, over 4729.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3357, pruned_loss=0.09522, over 936410.10 frames. ], batch size: 14, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:37:22,251 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:37:23,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=49157.333333333336, ans=0.00018318840579709984 +2024-07-27 18:37:44,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=49184.0, ans=0.125 +2024-07-27 18:38:05,389 INFO [train.py:1114] (1/4) Epoch 4, batch 6250, loss[loss=0.257, simple_loss=0.3465, pruned_loss=0.08372, over 4800.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.3357, pruned_loss=0.09535, over 932624.87 frames. ], batch size: 14, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:38:08,828 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.216e+01 6.578e+01 7.418e+01 8.909e+01 1.704e+02, threshold=1.484e+02, percent-clipped=3.0 +2024-07-27 18:38:28,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=49250.666666666664, ans=0.025 +2024-07-27 18:38:32,896 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=21.61 vs. limit=22.5 +2024-07-27 18:39:03,626 INFO [train.py:1114] (1/4) Epoch 4, batch 6300, loss[loss=0.2946, simple_loss=0.3499, pruned_loss=0.1196, over 4525.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.3356, pruned_loss=0.09537, over 929576.10 frames. ], batch size: 10, lr: 1.75e-02, grad_scale: 16.0 +2024-07-27 18:39:06,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=49290.666666666664, ans=0.125 +2024-07-27 18:39:08,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=49290.666666666664, ans=0.125 +2024-07-27 18:39:08,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=49290.666666666664, ans=0.025 +2024-07-27 18:39:15,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=49304.0, ans=0.07 +2024-07-27 18:39:44,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=49344.0, ans=0.0 +2024-07-27 18:39:46,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=49344.0, ans=0.125 +2024-07-27 18:39:49,715 INFO [train.py:1114] (1/4) Epoch 4, batch 6350, loss[loss=0.3036, simple_loss=0.3759, pruned_loss=0.1157, over 4495.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3343, pruned_loss=0.0944, over 933739.16 frames. ], batch size: 21, lr: 1.75e-02, grad_scale: 16.0 +2024-07-27 18:39:49,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=49357.333333333336, ans=0.2 +2024-07-27 18:39:50,189 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.93 vs. limit=6.0 +2024-07-27 18:40:02,481 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.450e+01 6.143e+01 6.766e+01 7.753e+01 2.111e+02, threshold=1.353e+02, percent-clipped=1.0 +2024-07-27 18:40:04,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=49370.666666666664, ans=0.125 +2024-07-27 18:40:24,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=49384.0, ans=0.125 +2024-07-27 18:40:24,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=49384.0, ans=0.125 +2024-07-27 18:40:26,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=49384.0, ans=0.0 +2024-07-27 18:41:05,490 INFO [train.py:1114] (1/4) Epoch 4, batch 6400, loss[loss=0.294, simple_loss=0.3739, pruned_loss=0.1071, over 4640.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.335, pruned_loss=0.095, over 935137.51 frames. ], batch size: 13, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:41:12,752 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.55 vs. limit=15.0 +2024-07-27 18:41:21,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=49450.666666666664, ans=0.125 +2024-07-27 18:41:23,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=49450.666666666664, ans=0.125 +2024-07-27 18:41:28,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49464.0, ans=0.1 +2024-07-27 18:41:29,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=49464.0, ans=0.07 +2024-07-27 18:41:29,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=49464.0, ans=0.125 +2024-07-27 18:41:35,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49477.333333333336, ans=0.1 +2024-07-27 18:41:39,067 INFO [train.py:1114] (1/4) Epoch 4, batch 6450, loss[loss=0.3104, simple_loss=0.3755, pruned_loss=0.1227, over 4521.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3356, pruned_loss=0.09505, over 938768.73 frames. ], batch size: 21, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:41:42,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=49490.666666666664, ans=0.125 +2024-07-27 18:41:42,711 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.59 vs. limit=15.0 +2024-07-27 18:41:42,982 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.447e+01 6.416e+01 7.153e+01 7.876e+01 1.277e+02, threshold=1.431e+02, percent-clipped=0.0 +2024-07-27 18:41:53,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=49517.333333333336, ans=0.125 +2024-07-27 18:42:08,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=49530.666666666664, ans=0.1 +2024-07-27 18:42:19,506 INFO [train.py:1114] (1/4) Epoch 4, batch 6500, loss[loss=0.4028, simple_loss=0.4331, pruned_loss=0.1863, over 3216.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3355, pruned_loss=0.09525, over 939692.89 frames. ], batch size: 35, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:42:24,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=49557.333333333336, ans=0.0 +2024-07-27 18:42:30,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=49570.666666666664, ans=0.125 +2024-07-27 18:42:42,853 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.81 vs. limit=15.0 +2024-07-27 18:42:46,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=49584.0, ans=0.07 +2024-07-27 18:42:52,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=49597.333333333336, ans=0.125 +2024-07-27 18:42:53,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=49597.333333333336, ans=0.125 +2024-07-27 18:43:14,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=49610.666666666664, ans=0.125 +2024-07-27 18:43:19,876 INFO [train.py:1114] (1/4) Epoch 4, batch 6550, loss[loss=0.2437, simple_loss=0.3039, pruned_loss=0.09172, over 4795.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3345, pruned_loss=0.09413, over 942629.02 frames. ], batch size: 11, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:43:23,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=49624.0, ans=15.0 +2024-07-27 18:43:23,931 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.133e+01 6.247e+01 6.814e+01 7.966e+01 1.482e+02, threshold=1.363e+02, percent-clipped=1.0 +2024-07-27 18:43:29,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=49637.333333333336, ans=0.2 +2024-07-27 18:43:31,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=49637.333333333336, ans=7.884057971014463e-05 +2024-07-27 18:43:37,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=49650.666666666664, ans=0.1 +2024-07-27 18:43:37,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=49650.666666666664, ans=0.025 +2024-07-27 18:43:57,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=49677.333333333336, ans=0.0 +2024-07-27 18:44:02,854 INFO [train.py:1114] (1/4) Epoch 4, batch 6600, loss[loss=0.2776, simple_loss=0.3651, pruned_loss=0.09506, over 4930.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3337, pruned_loss=0.09369, over 944658.35 frames. ], batch size: 14, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:44:07,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=49690.666666666664, ans=0.0 +2024-07-27 18:44:07,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49690.666666666664, ans=0.1 +2024-07-27 18:44:13,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49704.0, ans=0.1 +2024-07-27 18:44:16,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=49704.0, ans=10.0 +2024-07-27 18:44:16,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=49704.0, ans=0.125 +2024-07-27 18:44:28,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49717.333333333336, ans=0.1 +2024-07-27 18:44:33,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=49730.666666666664, ans=0.125 +2024-07-27 18:44:51,843 INFO [train.py:1114] (1/4) Epoch 4, batch 6650, loss[loss=0.2765, simple_loss=0.3453, pruned_loss=0.1039, over 4639.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3346, pruned_loss=0.09423, over 943714.13 frames. ], batch size: 17, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:44:58,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=49757.333333333336, ans=0.09899494936611666 +2024-07-27 18:45:00,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=49757.333333333336, ans=5.275362318840453e-05 +2024-07-27 18:45:01,621 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.604e+01 6.574e+01 7.387e+01 9.385e+01 1.471e+02, threshold=1.477e+02, percent-clipped=2.0 +2024-07-27 18:45:03,084 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:45:04,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=49770.666666666664, ans=0.125 +2024-07-27 18:45:09,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=49770.666666666664, ans=0.125 +2024-07-27 18:45:09,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=49770.666666666664, ans=0.2 +2024-07-27 18:45:25,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=49797.333333333336, ans=0.2 +2024-07-27 18:45:29,267 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.67 vs. limit=10.0 +2024-07-27 18:45:31,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=49810.666666666664, ans=0.0 +2024-07-27 18:45:37,465 INFO [train.py:1114] (1/4) Epoch 4, batch 6700, loss[loss=0.2514, simple_loss=0.3245, pruned_loss=0.0892, over 4702.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.335, pruned_loss=0.09422, over 942643.80 frames. ], batch size: 19, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:45:50,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=49837.333333333336, ans=0.125 +2024-07-27 18:45:53,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=49850.666666666664, ans=0.07 +2024-07-27 18:46:25,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49890.666666666664, ans=0.1 +2024-07-27 18:46:26,240 INFO [train.py:1114] (1/4) Epoch 4, batch 6750, loss[loss=0.2474, simple_loss=0.3267, pruned_loss=0.08405, over 4198.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3341, pruned_loss=0.09343, over 940365.85 frames. ], batch size: 26, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:46:29,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=49890.666666666664, ans=0.0 +2024-07-27 18:46:30,208 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.238e+01 6.545e+01 7.445e+01 9.250e+01 1.508e+02, threshold=1.489e+02, percent-clipped=1.0 +2024-07-27 18:46:42,524 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:46:42,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49917.333333333336, ans=0.1 +2024-07-27 18:46:58,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=49944.0, ans=0.2 +2024-07-27 18:47:00,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=49944.0, ans=0.125 +2024-07-27 18:47:01,161 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.31 vs. limit=15.0 +2024-07-27 18:47:01,349 INFO [train.py:1114] (1/4) Epoch 4, batch 6800, loss[loss=0.2707, simple_loss=0.3589, pruned_loss=0.09123, over 4639.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3351, pruned_loss=0.09399, over 938957.60 frames. ], batch size: 13, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:47:31,328 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:47:35,034 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.39 vs. limit=22.5 +2024-07-27 18:47:37,686 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.52 vs. limit=12.0 +2024-07-27 18:48:03,129 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:48:37,372 INFO [train.py:1114] (1/4) Epoch 4, batch 6850, loss[loss=0.2614, simple_loss=0.3586, pruned_loss=0.08207, over 4687.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3359, pruned_loss=0.09439, over 940720.95 frames. ], batch size: 13, lr: 1.74e-02, grad_scale: 16.0 +2024-07-27 18:48:40,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=50024.0, ans=0.125 +2024-07-27 18:48:42,389 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.116e+01 6.490e+01 7.044e+01 8.185e+01 1.640e+02, threshold=1.409e+02, percent-clipped=3.0 +2024-07-27 18:48:56,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=50037.333333333336, ans=0.125 +2024-07-27 18:49:17,825 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:49:18,062 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.57 vs. limit=15.0 +2024-07-27 18:49:29,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=50077.333333333336, ans=0.125 +2024-07-27 18:49:46,656 INFO [train.py:1114] (1/4) Epoch 4, batch 6900, loss[loss=0.2307, simple_loss=0.3179, pruned_loss=0.07181, over 4964.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3347, pruned_loss=0.09366, over 943225.85 frames. ], batch size: 13, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:49:55,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=50104.0, ans=0.125 +2024-07-27 18:49:59,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=50117.333333333336, ans=0.2 +2024-07-27 18:50:41,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=50130.666666666664, ans=0.125 +2024-07-27 18:50:43,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=50130.666666666664, ans=0.125 +2024-07-27 18:50:43,644 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.11 vs. limit=10.0 +2024-07-27 18:50:46,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=50130.666666666664, ans=0.05 +2024-07-27 18:50:47,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=50130.666666666664, ans=0.0 +2024-07-27 18:50:54,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=50144.0, ans=0.125 +2024-07-27 18:50:55,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=50144.0, ans=0.125 +2024-07-27 18:50:56,893 INFO [train.py:1114] (1/4) Epoch 4, batch 6950, loss[loss=0.2617, simple_loss=0.3239, pruned_loss=0.09972, over 4597.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3345, pruned_loss=0.09344, over 940784.20 frames. ], batch size: 10, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:51:01,452 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.030e+01 6.625e+01 7.241e+01 8.326e+01 1.274e+02, threshold=1.448e+02, percent-clipped=0.0 +2024-07-27 18:51:16,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.92 vs. limit=10.0 +2024-07-27 18:51:20,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=50197.333333333336, ans=0.05 +2024-07-27 18:51:31,303 INFO [train.py:1114] (1/4) Epoch 4, batch 7000, loss[loss=0.2493, simple_loss=0.3172, pruned_loss=0.09076, over 4605.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3329, pruned_loss=0.09252, over 939422.21 frames. ], batch size: 17, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:51:33,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=50224.0, ans=0.125 +2024-07-27 18:51:47,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=50250.666666666664, ans=0.1 +2024-07-27 18:51:47,919 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.96 vs. limit=15.0 +2024-07-27 18:51:52,496 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.98 vs. limit=12.0 +2024-07-27 18:51:53,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=50264.0, ans=0.0 +2024-07-27 18:51:56,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=50264.0, ans=0.025 +2024-07-27 18:51:56,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.20 vs. limit=15.0 +2024-07-27 18:52:05,899 INFO [train.py:1114] (1/4) Epoch 4, batch 7050, loss[loss=0.2894, simple_loss=0.3604, pruned_loss=0.1092, over 4696.00 frames. ], tot_loss[loss=0.2589, simple_loss=0.3333, pruned_loss=0.09227, over 942440.43 frames. ], batch size: 19, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:52:08,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=50290.666666666664, ans=10.0 +2024-07-27 18:52:10,848 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.329e+01 6.665e+01 7.548e+01 9.503e+01 1.584e+02, threshold=1.510e+02, percent-clipped=1.0 +2024-07-27 18:52:13,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=50304.0, ans=0.0 +2024-07-27 18:52:34,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=50344.0, ans=0.125 +2024-07-27 18:52:41,455 INFO [train.py:1114] (1/4) Epoch 4, batch 7100, loss[loss=0.343, simple_loss=0.4037, pruned_loss=0.1412, over 4809.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.3351, pruned_loss=0.09356, over 937069.58 frames. ], batch size: 15, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:52:43,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=50357.333333333336, ans=0.125 +2024-07-27 18:52:49,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=50370.666666666664, ans=0.025 +2024-07-27 18:52:56,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=50384.0, ans=0.125 +2024-07-27 18:53:10,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=50410.666666666664, ans=0.09899494936611666 +2024-07-27 18:53:14,537 INFO [train.py:1114] (1/4) Epoch 4, batch 7150, loss[loss=0.2891, simple_loss=0.3459, pruned_loss=0.1162, over 4433.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.3333, pruned_loss=0.09268, over 938249.43 frames. ], batch size: 21, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:53:18,906 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.925e+01 6.686e+01 7.675e+01 9.181e+01 1.338e+02, threshold=1.535e+02, percent-clipped=0.0 +2024-07-27 18:53:21,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=50437.333333333336, ans=0.125 +2024-07-27 18:53:21,976 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.67 vs. limit=22.5 +2024-07-27 18:53:36,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=50464.0, ans=0.0 +2024-07-27 18:53:36,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=50464.0, ans=0.2 +2024-07-27 18:53:46,418 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.48 vs. limit=15.0 +2024-07-27 18:53:46,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=50490.666666666664, ans=0.0 +2024-07-27 18:53:47,308 INFO [train.py:1114] (1/4) Epoch 4, batch 7200, loss[loss=0.3049, simple_loss=0.3746, pruned_loss=0.1176, over 4800.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.3344, pruned_loss=0.09358, over 938329.21 frames. ], batch size: 15, lr: 1.73e-02, grad_scale: 32.0 +2024-07-27 18:53:56,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=50504.0, ans=0.125 +2024-07-27 18:54:04,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=50517.333333333336, ans=0.0 +2024-07-27 18:54:07,805 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.59 vs. limit=6.0 +2024-07-27 18:54:14,682 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.17 vs. limit=10.0 +2024-07-27 18:54:20,059 INFO [train.py:1114] (1/4) Epoch 4, batch 7250, loss[loss=0.2938, simple_loss=0.3515, pruned_loss=0.118, over 4858.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3337, pruned_loss=0.09386, over 939841.61 frames. ], batch size: 12, lr: 1.73e-02, grad_scale: 32.0 +2024-07-27 18:54:23,071 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.04 vs. limit=15.0 +2024-07-27 18:54:24,521 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.173e+01 6.374e+01 7.128e+01 8.077e+01 1.230e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 18:54:27,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=50570.666666666664, ans=0.125 +2024-07-27 18:54:33,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=50584.0, ans=0.2 +2024-07-27 18:54:38,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=50597.333333333336, ans=0.0 +2024-07-27 18:54:38,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=50597.333333333336, ans=0.125 +2024-07-27 18:54:43,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=50597.333333333336, ans=0.2 +2024-07-27 18:54:43,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=50597.333333333336, ans=0.1 +2024-07-27 18:54:52,628 INFO [train.py:1114] (1/4) Epoch 4, batch 7300, loss[loss=0.242, simple_loss=0.308, pruned_loss=0.088, over 4855.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.333, pruned_loss=0.09342, over 940251.37 frames. ], batch size: 12, lr: 1.73e-02, grad_scale: 32.0 +2024-07-27 18:54:57,645 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.59 vs. limit=22.5 +2024-07-27 18:54:59,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=50637.333333333336, ans=0.0 +2024-07-27 18:55:06,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=50650.666666666664, ans=0.2 +2024-07-27 18:55:17,505 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.33 vs. limit=22.5 +2024-07-27 18:55:19,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50677.333333333336, ans=0.1 +2024-07-27 18:55:22,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=50677.333333333336, ans=0.125 +2024-07-27 18:55:25,718 INFO [train.py:1114] (1/4) Epoch 4, batch 7350, loss[loss=0.2548, simple_loss=0.33, pruned_loss=0.08985, over 4643.00 frames. ], tot_loss[loss=0.258, simple_loss=0.3314, pruned_loss=0.09226, over 939561.71 frames. ], batch size: 12, lr: 1.73e-02, grad_scale: 32.0 +2024-07-27 18:55:30,228 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.456e+01 6.562e+01 7.152e+01 9.266e+01 1.352e+02, threshold=1.430e+02, percent-clipped=0.0 +2024-07-27 18:55:37,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=50704.0, ans=0.125 +2024-07-27 18:55:47,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=50730.666666666664, ans=0.0 +2024-07-27 18:55:54,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=50744.0, ans=0.04949747468305833 +2024-07-27 18:55:58,344 INFO [train.py:1114] (1/4) Epoch 4, batch 7400, loss[loss=0.2437, simple_loss=0.3113, pruned_loss=0.08804, over 4680.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3315, pruned_loss=0.0918, over 940671.05 frames. ], batch size: 13, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:56:06,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=50770.666666666664, ans=0.0 +2024-07-27 18:56:18,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=50784.0, ans=0.0 +2024-07-27 18:56:20,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=50797.333333333336, ans=0.07 +2024-07-27 18:56:25,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=50810.666666666664, ans=0.125 +2024-07-27 18:56:32,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=50824.0, ans=0.025 +2024-07-27 18:56:32,545 INFO [train.py:1114] (1/4) Epoch 4, batch 7450, loss[loss=0.2281, simple_loss=0.3069, pruned_loss=0.07466, over 4627.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3307, pruned_loss=0.09199, over 937885.09 frames. ], batch size: 11, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:56:32,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=50824.0, ans=0.0 +2024-07-27 18:56:34,774 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.98 vs. limit=22.5 +2024-07-27 18:56:37,129 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.488e+01 6.489e+01 7.278e+01 8.154e+01 1.203e+02, threshold=1.456e+02, percent-clipped=0.0 +2024-07-27 18:56:38,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=50837.333333333336, ans=0.125 +2024-07-27 18:56:42,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=50837.333333333336, ans=0.2 +2024-07-27 18:56:44,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=50837.333333333336, ans=0.2 +2024-07-27 18:56:57,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=50864.0, ans=0.0 +2024-07-27 18:57:01,337 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.88 vs. limit=15.0 +2024-07-27 18:57:06,486 INFO [train.py:1114] (1/4) Epoch 4, batch 7500, loss[loss=0.3879, simple_loss=0.4191, pruned_loss=0.1784, over 3189.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3311, pruned_loss=0.09201, over 936532.77 frames. ], batch size: 37, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:57:18,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=50904.0, ans=0.125 +2024-07-27 18:57:19,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50917.333333333336, ans=0.1 +2024-07-27 18:57:21,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=50917.333333333336, ans=0.125 +2024-07-27 18:57:31,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=50930.666666666664, ans=0.125 +2024-07-27 18:57:38,315 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.20 vs. limit=10.0 +2024-07-27 18:57:38,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=50957.333333333336, ans=0.125 +2024-07-27 18:57:39,286 INFO [train.py:1114] (1/4) Epoch 4, batch 7550, loss[loss=0.2723, simple_loss=0.3489, pruned_loss=0.09786, over 4641.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3329, pruned_loss=0.09252, over 936180.15 frames. ], batch size: 17, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:57:42,136 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.59 vs. limit=22.5 +2024-07-27 18:57:45,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=50957.333333333336, ans=0.025 +2024-07-27 18:57:46,431 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.472e+01 6.522e+01 7.335e+01 8.635e+01 1.380e+02, threshold=1.467e+02, percent-clipped=0.0 +2024-07-27 18:57:48,227 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.03 vs. limit=22.5 +2024-07-27 18:57:51,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=50970.666666666664, ans=0.0 +2024-07-27 18:57:51,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=50970.666666666664, ans=0.125 +2024-07-27 18:58:04,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=50984.0, ans=0.09899494936611666 +2024-07-27 18:58:05,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=50984.0, ans=0.125 +2024-07-27 18:58:09,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=50984.0, ans=0.0 +2024-07-27 18:58:12,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=50997.333333333336, ans=0.2 +2024-07-27 18:58:12,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=50997.333333333336, ans=0.125 +2024-07-27 18:58:13,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50997.333333333336, ans=0.1 +2024-07-27 18:58:14,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=50997.333333333336, ans=0.125 +2024-07-27 18:58:16,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=50997.333333333336, ans=0.025 +2024-07-27 18:58:24,740 INFO [train.py:1114] (1/4) Epoch 4, batch 7600, loss[loss=0.2431, simple_loss=0.3355, pruned_loss=0.07534, over 4814.00 frames. ], tot_loss[loss=0.259, simple_loss=0.333, pruned_loss=0.09254, over 937875.42 frames. ], batch size: 14, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:58:24,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=51024.0, ans=0.125 +2024-07-27 18:58:48,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=51050.666666666664, ans=0.2 +2024-07-27 18:58:50,452 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.51 vs. limit=10.0 +2024-07-27 18:58:53,168 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.82 vs. limit=12.0 +2024-07-27 18:58:54,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=51064.0, ans=0.0 +2024-07-27 18:59:06,955 INFO [train.py:1114] (1/4) Epoch 4, batch 7650, loss[loss=0.228, simple_loss=0.302, pruned_loss=0.07702, over 4931.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3334, pruned_loss=0.09298, over 937474.95 frames. ], batch size: 12, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:59:13,315 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.235e+01 6.494e+01 7.893e+01 8.811e+01 1.540e+02, threshold=1.579e+02, percent-clipped=3.0 +2024-07-27 18:59:22,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=51104.0, ans=0.0 +2024-07-27 18:59:37,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=51144.0, ans=0.125 +2024-07-27 18:59:38,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=51144.0, ans=0.0 +2024-07-27 18:59:44,098 INFO [train.py:1114] (1/4) Epoch 4, batch 7700, loss[loss=0.2381, simple_loss=0.3134, pruned_loss=0.08141, over 4701.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3341, pruned_loss=0.09321, over 934632.64 frames. ], batch size: 13, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:59:45,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=51157.333333333336, ans=0.09899494936611666 +2024-07-27 18:59:53,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=51170.666666666664, ans=0.125 +2024-07-27 19:00:13,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=51210.666666666664, ans=0.0 +2024-07-27 19:00:19,722 INFO [train.py:1114] (1/4) Epoch 4, batch 7750, loss[loss=0.2014, simple_loss=0.2885, pruned_loss=0.05715, over 4931.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.3347, pruned_loss=0.09372, over 936159.04 frames. ], batch size: 14, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 19:00:24,700 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.378e+01 6.531e+01 7.452e+01 8.452e+01 1.344e+02, threshold=1.490e+02, percent-clipped=0.0 +2024-07-27 19:00:27,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=51237.333333333336, ans=0.0 +2024-07-27 19:00:28,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=51237.333333333336, ans=0.1 +2024-07-27 19:00:51,591 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.19 vs. limit=15.0 +2024-07-27 19:01:12,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=51277.333333333336, ans=0.2 +2024-07-27 19:01:19,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=51277.333333333336, ans=0.09899494936611666 +2024-07-27 19:01:20,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=51277.333333333336, ans=0.125 +2024-07-27 19:01:21,757 INFO [train.py:1114] (1/4) Epoch 4, batch 7800, loss[loss=0.3089, simple_loss=0.3727, pruned_loss=0.1225, over 4669.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3352, pruned_loss=0.09379, over 937685.05 frames. ], batch size: 14, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 19:01:24,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=51290.666666666664, ans=0.2 +2024-07-27 19:01:27,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=51304.0, ans=0.2 +2024-07-27 19:01:28,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=51304.0, ans=0.125 +2024-07-27 19:01:32,105 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.48 vs. limit=15.0 +2024-07-27 19:01:53,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=51344.0, ans=0.125 +2024-07-27 19:01:53,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51344.0, ans=0.1 +2024-07-27 19:01:54,824 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.45 vs. limit=22.5 +2024-07-27 19:01:59,104 INFO [train.py:1114] (1/4) Epoch 4, batch 7850, loss[loss=0.2511, simple_loss=0.3089, pruned_loss=0.09669, over 4544.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.3351, pruned_loss=0.09358, over 936348.26 frames. ], batch size: 10, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:02:01,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=51357.333333333336, ans=0.2 +2024-07-27 19:02:04,740 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.164e+01 6.243e+01 7.019e+01 7.976e+01 1.332e+02, threshold=1.404e+02, percent-clipped=0.0 +2024-07-27 19:02:21,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=51370.666666666664, ans=0.125 +2024-07-27 19:02:24,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51384.0, ans=0.1 +2024-07-27 19:02:26,382 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.86 vs. limit=12.0 +2024-07-27 19:02:32,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=51397.333333333336, ans=0.125 +2024-07-27 19:02:43,538 INFO [train.py:1114] (1/4) Epoch 4, batch 7900, loss[loss=0.252, simple_loss=0.3277, pruned_loss=0.08811, over 4876.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.336, pruned_loss=0.09451, over 933003.29 frames. ], batch size: 14, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:02:46,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=51424.0, ans=0.0 +2024-07-27 19:02:46,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51424.0, ans=0.1 +2024-07-27 19:02:54,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=51437.333333333336, ans=0.025 +2024-07-27 19:02:58,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-27 19:03:00,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=51450.666666666664, ans=0.125 +2024-07-27 19:03:00,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=7.78 vs. limit=15.0 +2024-07-27 19:03:21,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=51477.333333333336, ans=0.125 +2024-07-27 19:03:25,612 INFO [train.py:1114] (1/4) Epoch 4, batch 7950, loss[loss=0.3407, simple_loss=0.3905, pruned_loss=0.1455, over 3201.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.3347, pruned_loss=0.09329, over 935119.32 frames. ], batch size: 35, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:03:25,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=51490.666666666664, ans=0.125 +2024-07-27 19:03:28,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=51490.666666666664, ans=0.125 +2024-07-27 19:03:29,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=51490.666666666664, ans=0.025 +2024-07-27 19:03:30,100 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.275e+01 6.617e+01 8.169e+01 1.040e+02 2.019e+02, threshold=1.634e+02, percent-clipped=10.0 +2024-07-27 19:03:31,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=51504.0, ans=0.0 +2024-07-27 19:03:42,540 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.69 vs. limit=15.0 +2024-07-27 19:03:43,881 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=19.37 vs. limit=15.0 +2024-07-27 19:03:56,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51544.0, ans=0.1 +2024-07-27 19:04:01,615 INFO [train.py:1114] (1/4) Epoch 4, batch 8000, loss[loss=0.2323, simple_loss=0.3099, pruned_loss=0.07741, over 4613.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.3337, pruned_loss=0.09304, over 934445.85 frames. ], batch size: 11, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:04:04,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=51557.333333333336, ans=0.125 +2024-07-27 19:04:06,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=51557.333333333336, ans=0.125 +2024-07-27 19:04:06,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.01 vs. limit=15.0 +2024-07-27 19:04:41,909 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.01 vs. limit=15.0 +2024-07-27 19:04:42,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=51610.666666666664, ans=0.0 +2024-07-27 19:04:43,561 INFO [train.py:1114] (1/4) Epoch 4, batch 8050, loss[loss=0.255, simple_loss=0.3389, pruned_loss=0.0855, over 4802.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.334, pruned_loss=0.09278, over 934064.21 frames. ], batch size: 14, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:04:46,539 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.60 vs. limit=15.0 +2024-07-27 19:04:48,126 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+01 6.358e+01 7.394e+01 8.578e+01 1.528e+02, threshold=1.479e+02, percent-clipped=0.0 +2024-07-27 19:04:51,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=51637.333333333336, ans=0.2 +2024-07-27 19:04:54,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=51637.333333333336, ans=0.125 +2024-07-27 19:04:59,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=51650.666666666664, ans=0.125 +2024-07-27 19:05:08,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=51664.0, ans=0.04949747468305833 +2024-07-27 19:05:20,003 INFO [train.py:1114] (1/4) Epoch 4, batch 8100, loss[loss=0.2534, simple_loss=0.3534, pruned_loss=0.07672, over 4796.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3363, pruned_loss=0.094, over 934224.82 frames. ], batch size: 15, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:05:23,228 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:05:31,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=51704.0, ans=0.125 +2024-07-27 19:05:33,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=51717.333333333336, ans=0.0 +2024-07-27 19:05:33,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=51717.333333333336, ans=0.125 +2024-07-27 19:05:38,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=51717.333333333336, ans=0.0 +2024-07-27 19:05:56,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=51757.333333333336, ans=0.125 +2024-07-27 19:05:57,494 INFO [train.py:1114] (1/4) Epoch 4, batch 8150, loss[loss=0.2724, simple_loss=0.3544, pruned_loss=0.09524, over 4792.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.334, pruned_loss=0.09269, over 937415.94 frames. ], batch size: 15, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:05:58,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51757.333333333336, ans=0.1 +2024-07-27 19:05:59,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=51757.333333333336, ans=0.125 +2024-07-27 19:06:02,177 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.306e+01 6.341e+01 7.110e+01 7.968e+01 1.215e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 19:06:04,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=51770.666666666664, ans=0.125 +2024-07-27 19:06:12,273 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.07 vs. limit=15.0 +2024-07-27 19:06:13,501 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.06 vs. limit=22.5 +2024-07-27 19:06:25,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=51810.666666666664, ans=0.125 +2024-07-27 19:06:30,934 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.93 vs. limit=15.0 +2024-07-27 19:06:31,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=51824.0, ans=0.07 +2024-07-27 19:06:32,397 INFO [train.py:1114] (1/4) Epoch 4, batch 8200, loss[loss=0.2719, simple_loss=0.3667, pruned_loss=0.08859, over 4805.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.334, pruned_loss=0.09222, over 938478.05 frames. ], batch size: 15, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:06:35,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=51824.0, ans=0.0 +2024-07-27 19:06:47,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=51850.666666666664, ans=0.2 +2024-07-27 19:06:47,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=51850.666666666664, ans=0.025 +2024-07-27 19:06:49,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=51850.666666666664, ans=0.125 +2024-07-27 19:06:51,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=51864.0, ans=0.025 +2024-07-27 19:06:52,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=51864.0, ans=0.0 +2024-07-27 19:07:04,511 INFO [train.py:1114] (1/4) Epoch 4, batch 8250, loss[loss=0.2381, simple_loss=0.3197, pruned_loss=0.07819, over 4905.00 frames. ], tot_loss[loss=0.258, simple_loss=0.3331, pruned_loss=0.09144, over 938470.98 frames. ], batch size: 13, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:07:07,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=51890.666666666664, ans=0.0 +2024-07-27 19:07:09,008 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.095e+01 6.190e+01 7.037e+01 8.392e+01 1.258e+02, threshold=1.407e+02, percent-clipped=0.0 +2024-07-27 19:07:15,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=51904.0, ans=0.125 +2024-07-27 19:07:16,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=51904.0, ans=0.125 +2024-07-27 19:07:20,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=51917.333333333336, ans=0.07 +2024-07-27 19:07:32,507 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.37 vs. limit=15.0 +2024-07-27 19:07:37,258 INFO [train.py:1114] (1/4) Epoch 4, batch 8300, loss[loss=0.2497, simple_loss=0.3387, pruned_loss=0.08036, over 4895.00 frames. ], tot_loss[loss=0.2581, simple_loss=0.3335, pruned_loss=0.09132, over 938339.84 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:07:44,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=51970.666666666664, ans=0.125 +2024-07-27 19:07:46,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=51970.666666666664, ans=0.125 +2024-07-27 19:07:50,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=51984.0, ans=0.125 +2024-07-27 19:08:02,110 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:08:03,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=51997.333333333336, ans=0.0 +2024-07-27 19:08:06,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=52010.666666666664, ans=0.0 +2024-07-27 19:08:09,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=52010.666666666664, ans=0.015 +2024-07-27 19:08:11,097 INFO [train.py:1114] (1/4) Epoch 4, batch 8350, loss[loss=0.2736, simple_loss=0.3538, pruned_loss=0.0967, over 4793.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.3339, pruned_loss=0.09169, over 941131.33 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:08:11,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=52024.0, ans=0.1 +2024-07-27 19:08:15,728 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.058e+01 6.472e+01 7.036e+01 8.315e+01 1.538e+02, threshold=1.407e+02, percent-clipped=2.0 +2024-07-27 19:08:20,507 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.95 vs. limit=15.0 +2024-07-27 19:08:37,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.70 vs. limit=12.0 +2024-07-27 19:08:40,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=52077.333333333336, ans=10.0 +2024-07-27 19:08:44,798 INFO [train.py:1114] (1/4) Epoch 4, batch 8400, loss[loss=0.3064, simple_loss=0.365, pruned_loss=0.1239, over 4779.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3339, pruned_loss=0.09176, over 939923.65 frames. ], batch size: 12, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:08:46,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52090.666666666664, ans=0.1 +2024-07-27 19:08:48,655 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.76 vs. limit=22.5 +2024-07-27 19:08:54,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=52104.0, ans=0.1 +2024-07-27 19:09:01,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=52117.333333333336, ans=0.025 +2024-07-27 19:09:02,267 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.47 vs. limit=15.0 +2024-07-27 19:09:08,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=52130.666666666664, ans=0.125 +2024-07-27 19:09:09,965 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-27 19:09:19,575 INFO [train.py:1114] (1/4) Epoch 4, batch 8450, loss[loss=0.3131, simple_loss=0.3836, pruned_loss=0.1213, over 4805.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3355, pruned_loss=0.09234, over 938746.28 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:09:24,026 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.442e+01 6.588e+01 7.394e+01 8.228e+01 1.463e+02, threshold=1.479e+02, percent-clipped=1.0 +2024-07-27 19:09:29,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=52170.666666666664, ans=0.125 +2024-07-27 19:09:34,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=52184.0, ans=0.125 +2024-07-27 19:09:37,139 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:09:39,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52197.333333333336, ans=0.1 +2024-07-27 19:09:51,712 INFO [train.py:1114] (1/4) Epoch 4, batch 8500, loss[loss=0.2206, simple_loss=0.2969, pruned_loss=0.07216, over 4616.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.3345, pruned_loss=0.09224, over 938502.46 frames. ], batch size: 11, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:09:58,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=52237.333333333336, ans=0.125 +2024-07-27 19:10:02,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=52237.333333333336, ans=0.0 +2024-07-27 19:10:05,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=52250.666666666664, ans=0.125 +2024-07-27 19:10:17,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=52250.666666666664, ans=0.0 +2024-07-27 19:10:19,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=52264.0, ans=0.0 +2024-07-27 19:10:35,613 INFO [train.py:1114] (1/4) Epoch 4, batch 8550, loss[loss=0.1867, simple_loss=0.264, pruned_loss=0.05475, over 4814.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.3343, pruned_loss=0.09219, over 939493.20 frames. ], batch size: 11, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:10:40,184 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 6.270e+01 6.908e+01 7.613e+01 1.129e+02, threshold=1.382e+02, percent-clipped=0.0 +2024-07-27 19:10:41,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=52304.0, ans=0.05 +2024-07-27 19:10:44,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=52304.0, ans=0.0 +2024-07-27 19:10:44,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=52304.0, ans=0.125 +2024-07-27 19:10:54,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=52317.333333333336, ans=0.0 +2024-07-27 19:10:54,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=52317.333333333336, ans=0.07 +2024-07-27 19:11:02,489 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.53 vs. limit=22.5 +2024-07-27 19:11:06,281 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.27 vs. limit=6.0 +2024-07-27 19:11:09,051 INFO [train.py:1114] (1/4) Epoch 4, batch 8600, loss[loss=0.2404, simple_loss=0.3285, pruned_loss=0.07618, over 4792.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3344, pruned_loss=0.09279, over 939358.75 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:11:28,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=52384.0, ans=0.0 +2024-07-27 19:11:30,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=52397.333333333336, ans=0.2 +2024-07-27 19:11:31,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=52397.333333333336, ans=0.125 +2024-07-27 19:11:39,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=52410.666666666664, ans=10.0 +2024-07-27 19:11:39,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=52410.666666666664, ans=0.125 +2024-07-27 19:11:43,014 INFO [train.py:1114] (1/4) Epoch 4, batch 8650, loss[loss=0.2684, simple_loss=0.343, pruned_loss=0.09691, over 4901.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3335, pruned_loss=0.09236, over 940440.11 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:11:52,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=52424.0, ans=0.2 +2024-07-27 19:11:54,068 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.219e+01 6.477e+01 7.280e+01 8.362e+01 1.223e+02, threshold=1.456e+02, percent-clipped=0.0 +2024-07-27 19:11:55,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=52424.0, ans=0.125 +2024-07-27 19:11:55,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=52424.0, ans=0.125 +2024-07-27 19:12:07,043 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:12:12,950 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.88 vs. limit=22.5 +2024-07-27 19:12:16,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=52477.333333333336, ans=0.125 +2024-07-27 19:12:16,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=52477.333333333336, ans=0.125 +2024-07-27 19:12:18,230 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.03 vs. limit=15.0 +2024-07-27 19:12:20,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=52477.333333333336, ans=0.0 +2024-07-27 19:12:24,873 INFO [train.py:1114] (1/4) Epoch 4, batch 8700, loss[loss=0.2524, simple_loss=0.3175, pruned_loss=0.09361, over 4755.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3339, pruned_loss=0.09268, over 938256.73 frames. ], batch size: 13, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:12:29,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=52490.666666666664, ans=0.0 +2024-07-27 19:12:37,202 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.98 vs. limit=15.0 +2024-07-27 19:12:55,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=52544.0, ans=0.0 +2024-07-27 19:13:03,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=52557.333333333336, ans=0.0 +2024-07-27 19:13:03,648 INFO [train.py:1114] (1/4) Epoch 4, batch 8750, loss[loss=0.3043, simple_loss=0.3682, pruned_loss=0.1202, over 4668.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3339, pruned_loss=0.09287, over 936906.23 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:16:50,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=52557.333333333336, ans=0.125 +2024-07-27 19:16:51,368 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.136e+01 6.508e+01 7.367e+01 8.337e+01 1.242e+02, threshold=1.473e+02, percent-clipped=0.0 +2024-07-27 19:17:00,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=52570.666666666664, ans=0.1 +2024-07-27 19:17:28,323 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.70 vs. limit=15.0 +2024-07-27 19:17:48,822 INFO [train.py:1114] (1/4) Epoch 4, batch 8800, loss[loss=0.2398, simple_loss=0.3202, pruned_loss=0.07974, over 4936.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3336, pruned_loss=0.09245, over 937973.95 frames. ], batch size: 14, lr: 1.69e-02, grad_scale: 32.0 +2024-07-27 19:18:34,294 INFO [train.py:1114] (1/4) Epoch 4, batch 8850, loss[loss=0.272, simple_loss=0.3427, pruned_loss=0.1007, over 4492.00 frames. ], tot_loss[loss=0.259, simple_loss=0.333, pruned_loss=0.09252, over 932466.49 frames. ], batch size: 21, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:18:38,868 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.151e+01 6.204e+01 6.999e+01 8.264e+01 1.249e+02, threshold=1.400e+02, percent-clipped=0.0 +2024-07-27 19:18:51,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=52704.0, ans=0.0 +2024-07-27 19:18:53,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52704.0, ans=0.1 +2024-07-27 19:19:09,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=52744.0, ans=0.125 +2024-07-27 19:19:11,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=52744.0, ans=0.125 +2024-07-27 19:19:15,446 INFO [train.py:1114] (1/4) Epoch 4, batch 8900, loss[loss=0.2954, simple_loss=0.3609, pruned_loss=0.115, over 4939.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3343, pruned_loss=0.09356, over 931192.92 frames. ], batch size: 12, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:19:16,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=52757.333333333336, ans=0.125 +2024-07-27 19:19:17,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=52757.333333333336, ans=0.0 +2024-07-27 19:19:18,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=52757.333333333336, ans=0.125 +2024-07-27 19:19:29,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=52784.0, ans=0.125 +2024-07-27 19:19:33,661 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.58 vs. limit=5.0 +2024-07-27 19:19:33,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=52784.0, ans=0.0 +2024-07-27 19:19:34,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.93 vs. limit=15.0 +2024-07-27 19:19:37,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52797.333333333336, ans=0.1 +2024-07-27 19:19:39,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=52797.333333333336, ans=0.0 +2024-07-27 19:19:41,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=52797.333333333336, ans=0.125 +2024-07-27 19:19:48,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=52810.666666666664, ans=0.125 +2024-07-27 19:19:49,278 INFO [train.py:1114] (1/4) Epoch 4, batch 8950, loss[loss=0.2733, simple_loss=0.3443, pruned_loss=0.1012, over 4408.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3339, pruned_loss=0.09333, over 932150.94 frames. ], batch size: 21, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:19:50,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=52824.0, ans=0.125 +2024-07-27 19:19:51,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=52824.0, ans=0.125 +2024-07-27 19:19:53,244 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:19:59,031 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.556e+01 6.546e+01 7.266e+01 8.543e+01 1.301e+02, threshold=1.453e+02, percent-clipped=0.0 +2024-07-27 19:20:00,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=52837.333333333336, ans=0.04949747468305833 +2024-07-27 19:20:09,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=52850.666666666664, ans=0.04949747468305833 +2024-07-27 19:20:10,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=52850.666666666664, ans=0.0 +2024-07-27 19:20:16,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=52864.0, ans=0.0 +2024-07-27 19:20:17,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=52864.0, ans=0.125 +2024-07-27 19:20:21,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=52864.0, ans=0.125 +2024-07-27 19:20:23,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=52877.333333333336, ans=0.125 +2024-07-27 19:20:26,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=52877.333333333336, ans=0.125 +2024-07-27 19:20:29,460 INFO [train.py:1114] (1/4) Epoch 4, batch 9000, loss[loss=0.2493, simple_loss=0.321, pruned_loss=0.0888, over 4637.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3332, pruned_loss=0.09336, over 934803.04 frames. ], batch size: 12, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:20:29,460 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 19:20:34,190 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.3150, 3.8928, 3.3392, 3.8750], device='cuda:1') +2024-07-27 19:20:48,916 INFO [train.py:1146] (1/4) Epoch 4, validation: loss=0.2088, simple_loss=0.3114, pruned_loss=0.05305, over 944034.00 frames. +2024-07-27 19:20:48,916 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 19:20:53,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=52890.666666666664, ans=0.125 +2024-07-27 19:20:56,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=52904.0, ans=0.0 +2024-07-27 19:20:58,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=52904.0, ans=0.025 +2024-07-27 19:21:15,474 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.03 vs. limit=10.0 +2024-07-27 19:21:25,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=52944.0, ans=0.0 +2024-07-27 19:21:41,736 INFO [train.py:1114] (1/4) Epoch 4, batch 9050, loss[loss=0.2318, simple_loss=0.2982, pruned_loss=0.08266, over 4495.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3326, pruned_loss=0.09272, over 934700.13 frames. ], batch size: 10, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:21:46,105 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.336e+01 6.460e+01 7.493e+01 8.562e+01 1.240e+02, threshold=1.499e+02, percent-clipped=0.0 +2024-07-27 19:21:51,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=52970.666666666664, ans=0.125 +2024-07-27 19:21:59,622 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.60 vs. limit=15.0 +2024-07-27 19:21:59,998 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:22:16,420 INFO [train.py:1114] (1/4) Epoch 4, batch 9100, loss[loss=0.2352, simple_loss=0.3225, pruned_loss=0.074, over 4938.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3323, pruned_loss=0.09228, over 936940.90 frames. ], batch size: 14, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:22:16,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=53024.0, ans=0.125 +2024-07-27 19:22:16,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=53024.0, ans=0.125 +2024-07-27 19:22:18,080 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.72 vs. limit=15.0 +2024-07-27 19:22:19,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=53024.0, ans=0.125 +2024-07-27 19:22:25,772 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.29 vs. limit=15.0 +2024-07-27 19:22:47,784 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.92 vs. limit=15.0 +2024-07-27 19:22:51,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53064.0, ans=0.1 +2024-07-27 19:22:56,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=53064.0, ans=0.0 +2024-07-27 19:22:56,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=53064.0, ans=0.125 +2024-07-27 19:23:02,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=53077.333333333336, ans=0.125 +2024-07-27 19:23:04,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=53090.666666666664, ans=0.05 +2024-07-27 19:23:04,832 INFO [train.py:1114] (1/4) Epoch 4, batch 9150, loss[loss=0.2976, simple_loss=0.3777, pruned_loss=0.1088, over 4811.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.334, pruned_loss=0.09328, over 935738.41 frames. ], batch size: 14, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:23:04,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=53090.666666666664, ans=0.125 +2024-07-27 19:23:09,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=53090.666666666664, ans=0.0 +2024-07-27 19:23:11,021 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+01 6.280e+01 7.131e+01 8.307e+01 1.469e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 19:23:18,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=53104.0, ans=0.125 +2024-07-27 19:23:32,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=53144.0, ans=0.0 +2024-07-27 19:23:37,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=53144.0, ans=0.0 +2024-07-27 19:23:38,851 INFO [train.py:1114] (1/4) Epoch 4, batch 9200, loss[loss=0.2133, simple_loss=0.2978, pruned_loss=0.06441, over 4861.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3327, pruned_loss=0.09239, over 937383.27 frames. ], batch size: 12, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:23:39,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53157.333333333336, ans=0.1 +2024-07-27 19:23:40,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=53157.333333333336, ans=0.0 +2024-07-27 19:23:52,711 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.82 vs. limit=15.0 +2024-07-27 19:24:05,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=53210.666666666664, ans=0.125 +2024-07-27 19:24:10,827 INFO [train.py:1114] (1/4) Epoch 4, batch 9250, loss[loss=0.3548, simple_loss=0.4267, pruned_loss=0.1414, over 4642.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3327, pruned_loss=0.09215, over 937997.11 frames. ], batch size: 13, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:24:14,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=53224.0, ans=0.0 +2024-07-27 19:24:14,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=53224.0, ans=0.125 +2024-07-27 19:24:15,277 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.176e+01 6.609e+01 7.603e+01 9.259e+01 1.699e+02, threshold=1.521e+02, percent-clipped=1.0 +2024-07-27 19:24:31,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=53237.333333333336, ans=0.125 +2024-07-27 19:24:34,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=53250.666666666664, ans=0.125 +2024-07-27 19:24:46,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=53277.333333333336, ans=0.0 +2024-07-27 19:24:47,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=53277.333333333336, ans=0.0 +2024-07-27 19:24:49,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=53277.333333333336, ans=0.125 +2024-07-27 19:24:53,697 INFO [train.py:1114] (1/4) Epoch 4, batch 9300, loss[loss=0.2629, simple_loss=0.3313, pruned_loss=0.09725, over 4779.00 frames. ], tot_loss[loss=0.258, simple_loss=0.332, pruned_loss=0.09195, over 938018.26 frames. ], batch size: 12, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:24:57,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=53290.666666666664, ans=0.0 +2024-07-27 19:25:03,023 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:25:05,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=53304.0, ans=0.0 +2024-07-27 19:25:05,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=53304.0, ans=0.2 +2024-07-27 19:25:07,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=53317.333333333336, ans=0.125 +2024-07-27 19:25:09,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53317.333333333336, ans=0.1 +2024-07-27 19:25:18,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=53330.666666666664, ans=0.95 +2024-07-27 19:25:38,982 INFO [train.py:1114] (1/4) Epoch 4, batch 9350, loss[loss=0.2573, simple_loss=0.326, pruned_loss=0.09435, over 4817.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3328, pruned_loss=0.09242, over 935504.83 frames. ], batch size: 11, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:25:43,192 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.072e+01 6.213e+01 6.915e+01 8.745e+01 1.555e+02, threshold=1.383e+02, percent-clipped=1.0 +2024-07-27 19:25:43,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=53357.333333333336, ans=0.125 +2024-07-27 19:26:00,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=53384.0, ans=0.0 +2024-07-27 19:26:14,176 INFO [train.py:1114] (1/4) Epoch 4, batch 9400, loss[loss=0.2528, simple_loss=0.3357, pruned_loss=0.08496, over 4694.00 frames. ], tot_loss[loss=0.2579, simple_loss=0.3316, pruned_loss=0.09214, over 933155.40 frames. ], batch size: 13, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:26:16,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.43 vs. limit=15.0 +2024-07-27 19:26:20,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=53437.333333333336, ans=0.125 +2024-07-27 19:26:28,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff3.min_abs, batch_count=53450.666666666664, ans=0.2 +2024-07-27 19:26:35,566 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.78 vs. limit=15.0 +2024-07-27 19:26:38,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=53464.0, ans=0.125 +2024-07-27 19:26:47,551 INFO [train.py:1114] (1/4) Epoch 4, batch 9450, loss[loss=0.1902, simple_loss=0.2734, pruned_loss=0.05344, over 4804.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3326, pruned_loss=0.09289, over 932374.78 frames. ], batch size: 11, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:26:49,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=53490.666666666664, ans=0.2 +2024-07-27 19:26:53,646 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.978e+01 6.050e+01 6.667e+01 7.624e+01 1.196e+02, threshold=1.333e+02, percent-clipped=0.0 +2024-07-27 19:26:55,244 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.45 vs. limit=15.0 +2024-07-27 19:27:00,500 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:27:07,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=53517.333333333336, ans=0.125 +2024-07-27 19:27:10,360 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.67 vs. limit=12.0 +2024-07-27 19:27:10,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=53530.666666666664, ans=0.0 +2024-07-27 19:27:11,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=53530.666666666664, ans=0.0 +2024-07-27 19:27:15,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53544.0, ans=0.1 +2024-07-27 19:27:18,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=53544.0, ans=0.125 +2024-07-27 19:27:21,640 INFO [train.py:1114] (1/4) Epoch 4, batch 9500, loss[loss=0.2694, simple_loss=0.3156, pruned_loss=0.1117, over 4704.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3327, pruned_loss=0.09229, over 934969.86 frames. ], batch size: 12, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:27:25,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53557.333333333336, ans=0.1 +2024-07-27 19:27:25,669 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.07 vs. limit=15.0 +2024-07-27 19:27:27,008 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.62 vs. limit=15.0 +2024-07-27 19:27:31,356 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.85 vs. limit=22.5 +2024-07-27 19:27:35,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=53584.0, ans=0.0 +2024-07-27 19:27:40,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=53584.0, ans=0.125 +2024-07-27 19:27:44,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff2.min_abs, batch_count=53597.333333333336, ans=0.1 +2024-07-27 19:27:53,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=53610.666666666664, ans=0.125 +2024-07-27 19:27:54,668 INFO [train.py:1114] (1/4) Epoch 4, batch 9550, loss[loss=0.2307, simple_loss=0.3018, pruned_loss=0.0798, over 4774.00 frames. ], tot_loss[loss=0.258, simple_loss=0.3321, pruned_loss=0.09196, over 932155.78 frames. ], batch size: 12, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:27:58,966 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.893e+01 6.641e+01 7.346e+01 8.353e+01 1.240e+02, threshold=1.469e+02, percent-clipped=0.0 +2024-07-27 19:27:59,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=53624.0, ans=0.0 +2024-07-27 19:28:19,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=53650.666666666664, ans=0.125 +2024-07-27 19:28:36,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=53677.333333333336, ans=0.125 +2024-07-27 19:28:39,653 INFO [train.py:1114] (1/4) Epoch 4, batch 9600, loss[loss=0.291, simple_loss=0.3519, pruned_loss=0.115, over 3525.00 frames. ], tot_loss[loss=0.2568, simple_loss=0.3312, pruned_loss=0.09124, over 931231.88 frames. ], batch size: 35, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:28:46,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=53704.0, ans=0.125 +2024-07-27 19:28:48,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=53704.0, ans=0.0 +2024-07-27 19:28:51,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=53717.333333333336, ans=0.125 +2024-07-27 19:28:55,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=53717.333333333336, ans=0.1 +2024-07-27 19:28:55,409 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.18 vs. limit=12.0 +2024-07-27 19:29:08,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=53744.0, ans=0.2 +2024-07-27 19:29:11,821 INFO [train.py:1114] (1/4) Epoch 4, batch 9650, loss[loss=0.2626, simple_loss=0.3497, pruned_loss=0.08774, over 4837.00 frames. ], tot_loss[loss=0.258, simple_loss=0.3318, pruned_loss=0.0921, over 927559.16 frames. ], batch size: 16, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:29:19,465 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.095e+01 6.349e+01 7.028e+01 7.935e+01 1.425e+02, threshold=1.406e+02, percent-clipped=0.0 +2024-07-27 19:29:19,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=53757.333333333336, ans=0.025 +2024-07-27 19:29:19,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=53757.333333333336, ans=0.0 +2024-07-27 19:29:19,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=53757.333333333336, ans=15.0 +2024-07-27 19:29:39,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53784.0, ans=0.1 +2024-07-27 19:29:47,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=53797.333333333336, ans=0.1 +2024-07-27 19:29:50,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=53797.333333333336, ans=0.0 +2024-07-27 19:29:52,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53810.666666666664, ans=0.1 +2024-07-27 19:29:57,617 INFO [train.py:1114] (1/4) Epoch 4, batch 9700, loss[loss=0.3147, simple_loss=0.3928, pruned_loss=0.1182, over 4254.00 frames. ], tot_loss[loss=0.2589, simple_loss=0.3327, pruned_loss=0.09258, over 925450.88 frames. ], batch size: 25, lr: 1.68e-02, grad_scale: 32.0 +2024-07-27 19:30:00,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=53824.0, ans=0.2 +2024-07-27 19:30:08,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=53837.333333333336, ans=0.125 +2024-07-27 19:30:09,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=53837.333333333336, ans=0.0 +2024-07-27 19:30:17,785 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:30:27,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=53877.333333333336, ans=0.0 +2024-07-27 19:30:30,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=53877.333333333336, ans=10.0 +2024-07-27 19:30:33,404 INFO [train.py:1114] (1/4) Epoch 4, batch 9750, loss[loss=0.2439, simple_loss=0.3241, pruned_loss=0.08185, over 4667.00 frames. ], tot_loss[loss=0.258, simple_loss=0.3315, pruned_loss=0.09221, over 925885.04 frames. ], batch size: 15, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:30:33,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=53890.666666666664, ans=0.125 +2024-07-27 19:30:42,342 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.008e+01 6.435e+01 7.103e+01 8.018e+01 1.499e+02, threshold=1.421e+02, percent-clipped=1.0 +2024-07-27 19:30:53,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53917.333333333336, ans=0.1 +2024-07-27 19:31:05,275 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.12 vs. limit=12.0 +2024-07-27 19:31:06,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=53944.0, ans=0.125 +2024-07-27 19:31:09,152 INFO [train.py:1114] (1/4) Epoch 4, batch 9800, loss[loss=0.2677, simple_loss=0.3277, pruned_loss=0.1039, over 4708.00 frames. ], tot_loss[loss=0.258, simple_loss=0.3314, pruned_loss=0.09231, over 925130.59 frames. ], batch size: 12, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:31:29,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=53997.333333333336, ans=0.2 +2024-07-27 19:31:35,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=54010.666666666664, ans=0.125 +2024-07-27 19:31:40,669 INFO [train.py:1114] (1/4) Epoch 4, batch 9850, loss[loss=0.2856, simple_loss=0.3613, pruned_loss=0.1049, over 4911.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.3309, pruned_loss=0.09143, over 927699.99 frames. ], batch size: 15, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:31:46,037 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.505e+01 6.777e+01 7.966e+01 9.401e+01 1.769e+02, threshold=1.593e+02, percent-clipped=1.0 +2024-07-27 19:31:53,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=54037.333333333336, ans=0.125 +2024-07-27 19:31:56,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=54050.666666666664, ans=0.125 +2024-07-27 19:32:04,306 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.13 vs. limit=15.0 +2024-07-27 19:32:05,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=54064.0, ans=0.125 +2024-07-27 19:32:05,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=54064.0, ans=0.125 +2024-07-27 19:32:06,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=54064.0, ans=0.025 +2024-07-27 19:32:08,024 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.32 vs. limit=22.5 +2024-07-27 19:32:08,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=54064.0, ans=0.0 +2024-07-27 19:32:09,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=54064.0, ans=0.0 +2024-07-27 19:32:12,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=54077.333333333336, ans=0.125 +2024-07-27 19:32:13,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=54077.333333333336, ans=0.125 +2024-07-27 19:32:17,529 INFO [train.py:1114] (1/4) Epoch 4, batch 9900, loss[loss=0.2672, simple_loss=0.3489, pruned_loss=0.09276, over 4867.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3317, pruned_loss=0.09178, over 926664.90 frames. ], batch size: 16, lr: 1.67e-02, grad_scale: 16.0 +2024-07-27 19:32:17,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=54090.666666666664, ans=0.125 +2024-07-27 19:32:22,762 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.34 vs. limit=15.0 +2024-07-27 19:32:35,068 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.71 vs. limit=15.0 +2024-07-27 19:32:43,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=54117.333333333336, ans=0.125 +2024-07-27 19:32:47,203 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:33:15,475 INFO [train.py:1114] (1/4) Epoch 4, batch 9950, loss[loss=0.2019, simple_loss=0.2684, pruned_loss=0.06771, over 4795.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3321, pruned_loss=0.09273, over 929142.74 frames. ], batch size: 11, lr: 1.67e-02, grad_scale: 16.0 +2024-07-27 19:33:20,987 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.075e+01 6.568e+01 7.447e+01 8.780e+01 1.338e+02, threshold=1.489e+02, percent-clipped=0.0 +2024-07-27 19:33:30,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=54170.666666666664, ans=0.125 +2024-07-27 19:33:31,457 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.74 vs. limit=15.0 +2024-07-27 19:33:31,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=54170.666666666664, ans=0.0 +2024-07-27 19:33:38,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=54197.333333333336, ans=0.2 +2024-07-27 19:33:38,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=54197.333333333336, ans=0.125 +2024-07-27 19:33:38,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.34 vs. limit=10.0 +2024-07-27 19:33:39,480 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.38 vs. limit=22.5 +2024-07-27 19:33:40,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=54197.333333333336, ans=0.125 +2024-07-27 19:33:41,441 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.29 vs. limit=22.5 +2024-07-27 19:33:49,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=54210.666666666664, ans=0.09899494936611666 +2024-07-27 19:33:51,643 INFO [train.py:1114] (1/4) Epoch 4, batch 10000, loss[loss=0.2493, simple_loss=0.3438, pruned_loss=0.07741, over 4608.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3374, pruned_loss=0.09571, over 926631.36 frames. ], batch size: 16, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:34:39,360 INFO [train.py:1114] (1/4) Epoch 4, batch 10050, loss[loss=0.3456, simple_loss=0.3891, pruned_loss=0.151, over 3317.00 frames. ], tot_loss[loss=0.2707, simple_loss=0.3426, pruned_loss=0.09934, over 914783.28 frames. ], batch size: 36, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:34:41,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=54290.666666666664, ans=0.0 +2024-07-27 19:34:45,411 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.986e+01 6.968e+01 7.682e+01 9.310e+01 1.537e+02, threshold=1.536e+02, percent-clipped=1.0 +2024-07-27 19:34:47,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=54304.0, ans=0.0 +2024-07-27 19:34:51,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=54304.0, ans=0.125 +2024-07-27 19:34:52,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=54317.333333333336, ans=0.0 +2024-07-27 19:34:56,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=54317.333333333336, ans=0.125 +2024-07-27 19:35:00,261 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=15.15 vs. limit=15.0 +2024-07-27 19:35:00,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54330.666666666664, ans=0.1 +2024-07-27 19:35:01,548 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=7.18 vs. limit=10.0 +2024-07-27 19:35:03,555 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.90 vs. limit=15.0 +2024-07-27 19:35:12,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=54344.0, ans=0.5 +2024-07-27 19:35:14,123 INFO [train.py:1114] (1/4) Epoch 4, batch 10100, loss[loss=0.2989, simple_loss=0.3564, pruned_loss=0.1206, over 3340.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.3488, pruned_loss=0.1071, over 861942.36 frames. ], batch size: 35, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:35:17,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=54357.333333333336, ans=0.125 +2024-07-27 19:35:21,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=54357.333333333336, ans=0.1 +2024-07-27 19:35:29,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=54384.0, ans=0.125 +2024-07-27 19:35:45,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=54410.666666666664, ans=0.0 +2024-07-27 19:35:49,231 INFO [train.py:1114] (1/4) Epoch 4, batch 10150, loss[loss=0.3406, simple_loss=0.3815, pruned_loss=0.1499, over 3358.00 frames. ], tot_loss[loss=0.2897, simple_loss=0.3536, pruned_loss=0.1129, over 818423.53 frames. ], batch size: 35, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:36:02,454 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.057e+01 7.252e+01 7.644e+01 8.757e+01 1.198e+02, threshold=1.529e+02, percent-clipped=0.0 +2024-07-27 19:36:05,393 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.29 vs. limit=22.5 +2024-07-27 19:36:09,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=54450.666666666664, ans=0.125 +2024-07-27 19:36:28,235 INFO [train.py:1114] (1/4) Epoch 4, batch 10200, loss[loss=0.3928, simple_loss=0.4137, pruned_loss=0.1859, over 3102.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.357, pruned_loss=0.1171, over 785639.34 frames. ], batch size: 35, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:36:32,502 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.40 vs. limit=10.0 +2024-07-27 19:36:34,809 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.45 vs. limit=15.0 +2024-07-27 19:36:36,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=54504.0, ans=0.0 +2024-07-27 19:37:32,056 INFO [train.py:1114] (1/4) Epoch 5, batch 0, loss[loss=0.199, simple_loss=0.2809, pruned_loss=0.05859, over 4842.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2809, pruned_loss=0.05859, over 4842.00 frames. ], batch size: 12, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:37:32,056 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 19:37:43,747 INFO [train.py:1146] (1/4) Epoch 5, validation: loss=0.2167, simple_loss=0.3194, pruned_loss=0.05704, over 944034.00 frames. +2024-07-27 19:37:43,748 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 19:37:49,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=54521.333333333336, ans=0.125 +2024-07-27 19:37:53,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=54534.666666666664, ans=0.07 +2024-07-27 19:38:08,547 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.642e+01 6.667e+01 7.198e+01 8.159e+01 1.101e+02, threshold=1.440e+02, percent-clipped=0.0 +2024-07-27 19:38:15,641 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-27 19:43:25,058 INFO [train.py:1114] (1/4) Epoch 5, batch 50, loss[loss=0.2792, simple_loss=0.3479, pruned_loss=0.1053, over 4612.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3346, pruned_loss=0.0944, over 206324.69 frames. ], batch size: 11, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:43:55,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=54601.333333333336, ans=0.0 +2024-07-27 19:43:59,821 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.76 vs. limit=10.0 +2024-07-27 19:44:16,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=54628.0, ans=0.0 +2024-07-27 19:44:19,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=54628.0, ans=0.2 +2024-07-27 19:44:31,089 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.89 vs. limit=15.0 +2024-07-27 19:44:43,616 INFO [train.py:1114] (1/4) Epoch 5, batch 100, loss[loss=0.2319, simple_loss=0.31, pruned_loss=0.07696, over 4645.00 frames. ], tot_loss[loss=0.2579, simple_loss=0.3328, pruned_loss=0.09149, over 364933.45 frames. ], batch size: 12, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:45:06,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=54654.666666666664, ans=0.0 +2024-07-27 19:45:28,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=54668.0, ans=0.0 +2024-07-27 19:45:30,314 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:45:51,247 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:45:51,434 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.32 vs. limit=15.0 +2024-07-27 19:45:52,461 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 6.028e+01 6.816e+01 7.937e+01 1.219e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-27 19:46:20,043 INFO [train.py:1114] (1/4) Epoch 5, batch 150, loss[loss=0.1895, simple_loss=0.2673, pruned_loss=0.05587, over 4604.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3292, pruned_loss=0.08885, over 493727.71 frames. ], batch size: 11, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:46:50,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=54734.666666666664, ans=0.125 +2024-07-27 19:46:56,458 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.09 vs. limit=15.0 +2024-07-27 19:47:04,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=54748.0, ans=0.1 +2024-07-27 19:47:18,777 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.45 vs. limit=15.0 +2024-07-27 19:47:33,713 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.33 vs. limit=10.0 +2024-07-27 19:47:34,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=54774.666666666664, ans=0.125 +2024-07-27 19:47:49,774 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.63 vs. limit=15.0 +2024-07-27 19:47:50,698 INFO [train.py:1114] (1/4) Epoch 5, batch 200, loss[loss=0.2555, simple_loss=0.3272, pruned_loss=0.09186, over 4538.00 frames. ], tot_loss[loss=0.253, simple_loss=0.3283, pruned_loss=0.08889, over 593556.68 frames. ], batch size: 21, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:47:51,810 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.78 vs. limit=12.0 +2024-07-27 19:47:52,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=54788.0, ans=0.1 +2024-07-27 19:47:53,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=54788.0, ans=0.2 +2024-07-27 19:48:13,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=54801.333333333336, ans=0.125 +2024-07-27 19:48:15,122 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.50 vs. limit=15.0 +2024-07-27 19:48:37,657 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:48:41,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=54828.0, ans=0.125 +2024-07-27 19:48:41,787 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.041e+01 6.282e+01 6.864e+01 7.827e+01 1.211e+02, threshold=1.373e+02, percent-clipped=0.0 +2024-07-27 19:49:02,705 INFO [train.py:1114] (1/4) Epoch 5, batch 250, loss[loss=0.2767, simple_loss=0.3655, pruned_loss=0.09398, over 4632.00 frames. ], tot_loss[loss=0.2532, simple_loss=0.329, pruned_loss=0.08868, over 670092.15 frames. ], batch size: 16, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:49:10,437 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.08 vs. limit=12.0 +2024-07-27 19:49:18,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=54881.333333333336, ans=0.0 +2024-07-27 19:49:19,039 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-07-27 19:49:24,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54894.666666666664, ans=0.1 +2024-07-27 19:49:50,293 INFO [train.py:1114] (1/4) Epoch 5, batch 300, loss[loss=0.247, simple_loss=0.3376, pruned_loss=0.07817, over 4797.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3279, pruned_loss=0.08785, over 729423.05 frames. ], batch size: 15, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:49:57,656 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.00 vs. limit=22.5 +2024-07-27 19:49:57,787 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.65 vs. limit=12.0 +2024-07-27 19:50:16,106 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.053e+01 6.343e+01 7.108e+01 8.248e+01 1.263e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 19:50:27,359 INFO [train.py:1114] (1/4) Epoch 5, batch 350, loss[loss=0.2294, simple_loss=0.3065, pruned_loss=0.07616, over 4958.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.327, pruned_loss=0.0869, over 775942.83 frames. ], batch size: 12, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:50:28,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=54988.0, ans=0.0 +2024-07-27 19:50:32,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=54988.0, ans=0.025 +2024-07-27 19:50:33,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=54988.0, ans=0.2 +2024-07-27 19:50:43,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=55014.666666666664, ans=0.0 +2024-07-27 19:51:12,113 INFO [train.py:1114] (1/4) Epoch 5, batch 400, loss[loss=0.2684, simple_loss=0.3411, pruned_loss=0.09786, over 4698.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3256, pruned_loss=0.08602, over 813166.19 frames. ], batch size: 13, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:51:17,369 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.15 vs. limit=22.5 +2024-07-27 19:51:19,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=55068.0, ans=0.125 +2024-07-27 19:51:39,107 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:51:43,473 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.118e+01 6.055e+01 6.518e+01 7.484e+01 1.056e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-27 19:51:44,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=55094.666666666664, ans=0.125 +2024-07-27 19:51:48,660 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.86 vs. limit=15.0 +2024-07-27 19:52:01,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=55121.333333333336, ans=0.0 +2024-07-27 19:52:02,004 INFO [train.py:1114] (1/4) Epoch 5, batch 450, loss[loss=0.2773, simple_loss=0.357, pruned_loss=0.09887, over 4633.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3269, pruned_loss=0.08675, over 839056.72 frames. ], batch size: 13, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:52:10,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55134.666666666664, ans=0.1 +2024-07-27 19:52:12,705 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.05 vs. limit=15.0 +2024-07-27 19:52:13,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=55134.666666666664, ans=0.2 +2024-07-27 19:52:14,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=55134.666666666664, ans=0.025 +2024-07-27 19:52:16,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=55134.666666666664, ans=0.0 +2024-07-27 19:52:26,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=55161.333333333336, ans=0.125 +2024-07-27 19:52:31,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=55174.666666666664, ans=0.125 +2024-07-27 19:52:39,053 INFO [train.py:1114] (1/4) Epoch 5, batch 500, loss[loss=0.2829, simple_loss=0.3498, pruned_loss=0.108, over 4677.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3258, pruned_loss=0.08667, over 861431.39 frames. ], batch size: 15, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:52:52,222 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.91 vs. limit=15.0 +2024-07-27 19:52:56,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=55214.666666666664, ans=0.2 +2024-07-27 19:53:00,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=55228.0, ans=0.125 +2024-07-27 19:53:01,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=55228.0, ans=0.125 +2024-07-27 19:53:04,119 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.317e+01 6.118e+01 6.781e+01 7.848e+01 1.133e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 19:53:04,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=55228.0, ans=0.0 +2024-07-27 19:53:19,187 INFO [train.py:1114] (1/4) Epoch 5, batch 550, loss[loss=0.2867, simple_loss=0.3639, pruned_loss=0.1047, over 4607.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3258, pruned_loss=0.08687, over 877496.83 frames. ], batch size: 17, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:53:30,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=55268.0, ans=0.0 +2024-07-27 19:53:49,300 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.97 vs. limit=22.5 +2024-07-27 19:53:55,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=55308.0, ans=0.125 +2024-07-27 19:53:59,350 INFO [train.py:1114] (1/4) Epoch 5, batch 600, loss[loss=0.2884, simple_loss=0.3623, pruned_loss=0.1072, over 4602.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3268, pruned_loss=0.08737, over 892020.91 frames. ], batch size: 16, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:54:06,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=55334.666666666664, ans=0.0 +2024-07-27 19:54:16,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=55348.0, ans=0.125 +2024-07-27 19:54:16,099 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:54:23,110 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.126e+01 6.489e+01 7.020e+01 8.216e+01 1.209e+02, threshold=1.404e+02, percent-clipped=0.0 +2024-07-27 19:54:25,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55374.666666666664, ans=0.1 +2024-07-27 19:54:28,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=55374.666666666664, ans=0.0 +2024-07-27 19:54:32,212 INFO [train.py:1114] (1/4) Epoch 5, batch 650, loss[loss=0.2911, simple_loss=0.3557, pruned_loss=0.1133, over 4754.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3265, pruned_loss=0.08772, over 903798.78 frames. ], batch size: 13, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:54:34,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=55388.0, ans=0.07 +2024-07-27 19:54:41,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55401.333333333336, ans=0.1 +2024-07-27 19:55:04,015 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.22 vs. limit=15.0 +2024-07-27 19:55:10,105 INFO [train.py:1114] (1/4) Epoch 5, batch 700, loss[loss=0.1985, simple_loss=0.2895, pruned_loss=0.05376, over 4642.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3261, pruned_loss=0.08718, over 912104.91 frames. ], batch size: 12, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:55:20,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=55468.0, ans=0.125 +2024-07-27 19:55:23,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=55468.0, ans=0.05 +2024-07-27 19:55:33,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=55494.666666666664, ans=0.125 +2024-07-27 19:55:37,845 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.381e+01 6.482e+01 7.754e+01 9.297e+01 1.843e+02, threshold=1.551e+02, percent-clipped=6.0 +2024-07-27 19:55:42,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=55508.0, ans=0.125 +2024-07-27 19:55:47,635 INFO [train.py:1114] (1/4) Epoch 5, batch 750, loss[loss=0.2181, simple_loss=0.3068, pruned_loss=0.06473, over 4701.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3254, pruned_loss=0.087, over 918743.80 frames. ], batch size: 13, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:55:51,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=55521.333333333336, ans=0.0 +2024-07-27 19:55:53,341 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.79 vs. limit=15.0 +2024-07-27 19:55:58,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=55534.666666666664, ans=0.04949747468305833 +2024-07-27 19:56:06,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=55548.0, ans=0.0 +2024-07-27 19:56:24,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=55561.333333333336, ans=0.125 +2024-07-27 19:56:26,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.79 vs. limit=15.0 +2024-07-27 19:56:30,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=55574.666666666664, ans=0.125 +2024-07-27 19:56:34,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.12 vs. limit=15.0 +2024-07-27 19:56:37,353 INFO [train.py:1114] (1/4) Epoch 5, batch 800, loss[loss=0.2336, simple_loss=0.309, pruned_loss=0.07908, over 4857.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3256, pruned_loss=0.08721, over 923377.74 frames. ], batch size: 12, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:56:40,246 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.57 vs. limit=6.0 +2024-07-27 19:57:05,531 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.583e+01 6.253e+01 7.054e+01 8.487e+01 1.181e+02, threshold=1.411e+02, percent-clipped=0.0 +2024-07-27 19:57:12,741 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.88 vs. limit=15.0 +2024-07-27 19:57:17,155 INFO [train.py:1114] (1/4) Epoch 5, batch 850, loss[loss=0.2645, simple_loss=0.3506, pruned_loss=0.0892, over 4655.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3251, pruned_loss=0.08685, over 927646.89 frames. ], batch size: 14, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:57:24,290 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.84 vs. limit=10.0 +2024-07-27 19:57:27,114 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.78 vs. limit=6.0 +2024-07-27 19:57:54,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=55708.0, ans=0.0 +2024-07-27 19:57:56,343 INFO [train.py:1114] (1/4) Epoch 5, batch 900, loss[loss=0.2351, simple_loss=0.3031, pruned_loss=0.0835, over 4858.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.327, pruned_loss=0.08812, over 929204.77 frames. ], batch size: 12, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 19:58:02,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=55734.666666666664, ans=0.0 +2024-07-27 19:58:27,337 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.49 vs. limit=15.0 +2024-07-27 19:58:32,495 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.179e+01 6.369e+01 7.320e+01 8.500e+01 1.312e+02, threshold=1.464e+02, percent-clipped=0.0 +2024-07-27 19:58:40,430 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:58:47,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=55774.666666666664, ans=10.0 +2024-07-27 19:58:53,867 INFO [train.py:1114] (1/4) Epoch 5, batch 950, loss[loss=0.2456, simple_loss=0.3068, pruned_loss=0.09214, over 4789.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.3273, pruned_loss=0.08827, over 931359.75 frames. ], batch size: 12, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 19:58:54,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=55788.0, ans=0.0 +2024-07-27 19:58:57,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=55788.0, ans=15.0 +2024-07-27 19:58:58,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=55788.0, ans=0.125 +2024-07-27 19:58:59,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=55788.0, ans=0.125 +2024-07-27 19:59:14,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=55814.666666666664, ans=0.125 +2024-07-27 19:59:15,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=55814.666666666664, ans=0.025 +2024-07-27 19:59:23,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=55828.0, ans=0.0 +2024-07-27 19:59:30,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=55841.333333333336, ans=0.0 +2024-07-27 19:59:34,253 INFO [train.py:1114] (1/4) Epoch 5, batch 1000, loss[loss=0.2533, simple_loss=0.3257, pruned_loss=0.09049, over 4963.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3275, pruned_loss=0.08804, over 931061.52 frames. ], batch size: 13, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 19:59:36,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=55854.666666666664, ans=0.125 +2024-07-27 19:59:44,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=55854.666666666664, ans=0.0 +2024-07-27 19:59:52,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=55881.333333333336, ans=0.0 +2024-07-27 20:00:03,951 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.112e+01 6.099e+01 6.760e+01 7.878e+01 1.806e+02, threshold=1.352e+02, percent-clipped=1.0 +2024-07-27 20:00:13,322 INFO [train.py:1114] (1/4) Epoch 5, batch 1050, loss[loss=0.2743, simple_loss=0.3581, pruned_loss=0.09525, over 4865.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3263, pruned_loss=0.08746, over 933042.35 frames. ], batch size: 14, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:00:14,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=55921.333333333336, ans=0.0 +2024-07-27 20:00:17,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=55921.333333333336, ans=0.125 +2024-07-27 20:00:19,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55921.333333333336, ans=0.1 +2024-07-27 20:00:37,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=55961.333333333336, ans=0.125 +2024-07-27 20:00:44,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=55974.666666666664, ans=0.2 +2024-07-27 20:00:46,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=55974.666666666664, ans=0.025 +2024-07-27 20:00:47,864 INFO [train.py:1114] (1/4) Epoch 5, batch 1100, loss[loss=0.2754, simple_loss=0.3359, pruned_loss=0.1075, over 4893.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3256, pruned_loss=0.08696, over 935113.60 frames. ], batch size: 13, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:00:58,179 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:01:05,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=56014.666666666664, ans=0.125 +2024-07-27 20:01:11,708 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.396e+01 6.284e+01 6.917e+01 8.137e+01 1.279e+02, threshold=1.383e+02, percent-clipped=0.0 +2024-07-27 20:01:11,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=56028.0, ans=0.025 +2024-07-27 20:01:12,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=56028.0, ans=0.1 +2024-07-27 20:01:16,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=56041.333333333336, ans=0.025 +2024-07-27 20:01:22,328 INFO [train.py:1114] (1/4) Epoch 5, batch 1150, loss[loss=0.2547, simple_loss=0.3288, pruned_loss=0.09028, over 4910.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3255, pruned_loss=0.08712, over 935017.20 frames. ], batch size: 13, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:01:24,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=56054.666666666664, ans=0.2 +2024-07-27 20:01:24,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=56054.666666666664, ans=0.125 +2024-07-27 20:01:34,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=56068.0, ans=0.125 +2024-07-27 20:01:38,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=56081.333333333336, ans=0.0 +2024-07-27 20:01:47,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=56094.666666666664, ans=0.2 +2024-07-27 20:01:57,030 INFO [train.py:1114] (1/4) Epoch 5, batch 1200, loss[loss=0.2968, simple_loss=0.3688, pruned_loss=0.1124, over 4875.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3276, pruned_loss=0.08835, over 933909.61 frames. ], batch size: 14, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:02:05,351 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.77 vs. limit=15.0 +2024-07-27 20:02:17,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=56121.333333333336, ans=0.0 +2024-07-27 20:02:35,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=56161.333333333336, ans=0.125 +2024-07-27 20:02:40,436 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.476e+01 6.643e+01 8.181e+01 1.020e+02 1.586e+02, threshold=1.636e+02, percent-clipped=2.0 +2024-07-27 20:02:53,265 INFO [train.py:1114] (1/4) Epoch 5, batch 1250, loss[loss=0.2864, simple_loss=0.3555, pruned_loss=0.1086, over 4808.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3273, pruned_loss=0.08723, over 937636.47 frames. ], batch size: 15, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:02:58,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=56188.0, ans=0.0 +2024-07-27 20:03:08,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=56214.666666666664, ans=0.0 +2024-07-27 20:03:12,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=56214.666666666664, ans=0.2 +2024-07-27 20:03:19,637 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.34 vs. limit=6.0 +2024-07-27 20:03:26,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=56241.333333333336, ans=0.0 +2024-07-27 20:03:30,282 INFO [train.py:1114] (1/4) Epoch 5, batch 1300, loss[loss=0.2444, simple_loss=0.3273, pruned_loss=0.08078, over 4754.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3265, pruned_loss=0.08704, over 939098.62 frames. ], batch size: 19, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:03:37,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=56254.666666666664, ans=0.125 +2024-07-27 20:03:40,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=56268.0, ans=0.0 +2024-07-27 20:03:46,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=56268.0, ans=0.0 +2024-07-27 20:03:51,653 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.87 vs. limit=6.0 +2024-07-27 20:03:53,837 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.53 vs. limit=22.5 +2024-07-27 20:03:56,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=56294.666666666664, ans=0.0 +2024-07-27 20:04:00,102 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.011e+01 6.264e+01 7.458e+01 8.643e+01 1.456e+02, threshold=1.492e+02, percent-clipped=0.0 +2024-07-27 20:04:10,188 INFO [train.py:1114] (1/4) Epoch 5, batch 1350, loss[loss=0.2384, simple_loss=0.3152, pruned_loss=0.08084, over 4761.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3258, pruned_loss=0.08665, over 940998.68 frames. ], batch size: 13, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:04:11,830 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.27 vs. limit=15.0 +2024-07-27 20:04:16,266 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:04:17,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=56334.666666666664, ans=0.125 +2024-07-27 20:04:41,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56374.666666666664, ans=0.1 +2024-07-27 20:04:41,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=56374.666666666664, ans=0.125 +2024-07-27 20:04:47,666 INFO [train.py:1114] (1/4) Epoch 5, batch 1400, loss[loss=0.1891, simple_loss=0.2695, pruned_loss=0.05435, over 4703.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3245, pruned_loss=0.08565, over 942709.81 frames. ], batch size: 11, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:04:56,077 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.74 vs. limit=15.0 +2024-07-27 20:05:02,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=56414.666666666664, ans=0.0 +2024-07-27 20:05:07,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=56414.666666666664, ans=0.125 +2024-07-27 20:05:09,375 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.04 vs. limit=15.0 +2024-07-27 20:05:48,349 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.750e+01 6.394e+01 7.108e+01 8.417e+01 1.153e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 20:05:50,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.57 vs. limit=15.0 +2024-07-27 20:05:55,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=56441.333333333336, ans=0.1 +2024-07-27 20:05:57,868 INFO [train.py:1114] (1/4) Epoch 5, batch 1450, loss[loss=0.2967, simple_loss=0.3635, pruned_loss=0.1149, over 4670.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.325, pruned_loss=0.08591, over 942787.60 frames. ], batch size: 15, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:06:10,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=56481.333333333336, ans=0.05 +2024-07-27 20:06:13,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=56481.333333333336, ans=0.125 +2024-07-27 20:06:25,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=56508.0, ans=0.125 +2024-07-27 20:06:28,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=56508.0, ans=0.025 +2024-07-27 20:06:30,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=56508.0, ans=0.0 +2024-07-27 20:06:31,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=56508.0, ans=0.0 +2024-07-27 20:06:32,436 INFO [train.py:1114] (1/4) Epoch 5, batch 1500, loss[loss=0.2493, simple_loss=0.3281, pruned_loss=0.08523, over 4810.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3258, pruned_loss=0.08643, over 942365.37 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 32.0 +2024-07-27 20:06:42,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=56534.666666666664, ans=0.1 +2024-07-27 20:06:44,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.29 vs. limit=15.0 +2024-07-27 20:06:47,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=56548.0, ans=0.0 +2024-07-27 20:06:49,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=56548.0, ans=0.0 +2024-07-27 20:06:59,210 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.351e+01 6.555e+01 7.313e+01 8.345e+01 1.115e+02, threshold=1.463e+02, percent-clipped=0.0 +2024-07-27 20:07:08,540 INFO [train.py:1114] (1/4) Epoch 5, batch 1550, loss[loss=0.2712, simple_loss=0.348, pruned_loss=0.09722, over 4899.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.327, pruned_loss=0.0873, over 938519.35 frames. ], batch size: 15, lr: 1.52e-02, grad_scale: 32.0 +2024-07-27 20:07:08,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=56588.0, ans=0.025 +2024-07-27 20:07:14,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=56601.333333333336, ans=0.0 +2024-07-27 20:07:18,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=56601.333333333336, ans=0.125 +2024-07-27 20:07:23,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=56614.666666666664, ans=0.125 +2024-07-27 20:07:24,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=56614.666666666664, ans=0.0 +2024-07-27 20:07:31,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=56628.0, ans=0.125 +2024-07-27 20:07:34,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=56628.0, ans=0.125 +2024-07-27 20:07:41,098 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.89 vs. limit=15.0 +2024-07-27 20:07:42,160 INFO [train.py:1114] (1/4) Epoch 5, batch 1600, loss[loss=0.2208, simple_loss=0.3095, pruned_loss=0.066, over 4871.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3273, pruned_loss=0.08739, over 937156.61 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 32.0 +2024-07-27 20:07:45,843 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.55 vs. limit=22.5 +2024-07-27 20:07:49,007 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:07:55,862 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-27 20:07:59,882 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.58 vs. limit=12.0 +2024-07-27 20:08:05,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=56694.666666666664, ans=0.1 +2024-07-27 20:08:05,236 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.18 vs. limit=6.0 +2024-07-27 20:08:06,104 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.974e+01 6.291e+01 7.006e+01 7.974e+01 1.110e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-27 20:08:08,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=56708.0, ans=0.125 +2024-07-27 20:08:09,263 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.10 vs. limit=22.5 +2024-07-27 20:08:12,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56708.0, ans=0.1 +2024-07-27 20:08:15,474 INFO [train.py:1114] (1/4) Epoch 5, batch 1650, loss[loss=0.2568, simple_loss=0.3301, pruned_loss=0.09174, over 4660.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3259, pruned_loss=0.08705, over 937042.12 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:08:17,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=56721.333333333336, ans=0.125 +2024-07-27 20:08:18,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff2.min_abs, batch_count=56721.333333333336, ans=0.1 +2024-07-27 20:08:21,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=56721.333333333336, ans=0.2 +2024-07-27 20:08:27,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=56734.666666666664, ans=0.07 +2024-07-27 20:08:40,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=56761.333333333336, ans=0.125 +2024-07-27 20:08:41,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=56774.666666666664, ans=0.2 +2024-07-27 20:08:50,057 INFO [train.py:1114] (1/4) Epoch 5, batch 1700, loss[loss=0.2328, simple_loss=0.3009, pruned_loss=0.08238, over 4702.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.3253, pruned_loss=0.08648, over 938608.11 frames. ], batch size: 11, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:09:11,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=56814.666666666664, ans=0.2 +2024-07-27 20:09:11,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=56828.0, ans=0.2 +2024-07-27 20:09:16,485 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.365e+01 6.475e+01 7.223e+01 8.445e+01 1.275e+02, threshold=1.445e+02, percent-clipped=0.0 +2024-07-27 20:09:17,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=56828.0, ans=0.1 +2024-07-27 20:09:23,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=56841.333333333336, ans=0.07 +2024-07-27 20:09:25,626 INFO [train.py:1114] (1/4) Epoch 5, batch 1750, loss[loss=0.2058, simple_loss=0.2793, pruned_loss=0.06614, over 4796.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3244, pruned_loss=0.08555, over 939347.35 frames. ], batch size: 11, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:09:38,500 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.91 vs. limit=15.0 +2024-07-27 20:10:02,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=56921.333333333336, ans=0.125 +2024-07-27 20:10:02,664 INFO [train.py:1114] (1/4) Epoch 5, batch 1800, loss[loss=0.2709, simple_loss=0.3454, pruned_loss=0.09823, over 4633.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.3252, pruned_loss=0.08589, over 940377.92 frames. ], batch size: 13, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:10:02,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=56921.333333333336, ans=0.0 +2024-07-27 20:10:04,915 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.18 vs. limit=15.0 +2024-07-27 20:10:06,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=56921.333333333336, ans=0.0 +2024-07-27 20:10:10,948 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.86 vs. limit=15.0 +2024-07-27 20:10:16,194 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.09 vs. limit=15.0 +2024-07-27 20:10:19,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=56948.0, ans=0.2 +2024-07-27 20:10:26,592 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.038e+01 6.233e+01 6.949e+01 8.152e+01 1.410e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-27 20:10:34,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=56974.666666666664, ans=0.125 +2024-07-27 20:10:37,641 INFO [train.py:1114] (1/4) Epoch 5, batch 1850, loss[loss=0.2627, simple_loss=0.3357, pruned_loss=0.09488, over 4813.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3249, pruned_loss=0.08554, over 940508.73 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:10:43,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=56988.0, ans=0.125 +2024-07-27 20:10:51,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=57014.666666666664, ans=0.2 +2024-07-27 20:10:52,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=57014.666666666664, ans=0.125 +2024-07-27 20:10:57,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=57028.0, ans=0.125 +2024-07-27 20:11:12,163 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.91 vs. limit=15.0 +2024-07-27 20:11:12,475 INFO [train.py:1114] (1/4) Epoch 5, batch 1900, loss[loss=0.2669, simple_loss=0.3413, pruned_loss=0.09627, over 4661.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3263, pruned_loss=0.08667, over 941512.00 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:11:12,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=57054.666666666664, ans=0.04949747468305833 +2024-07-27 20:11:12,797 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.30 vs. limit=15.0 +2024-07-27 20:11:20,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=57068.0, ans=0.025 +2024-07-27 20:11:20,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=57068.0, ans=0.0 +2024-07-27 20:11:30,620 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.32 vs. limit=22.5 +2024-07-27 20:11:33,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=57094.666666666664, ans=0.125 +2024-07-27 20:11:35,212 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.67 vs. limit=10.0 +2024-07-27 20:11:36,825 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.851e+01 6.078e+01 6.608e+01 7.914e+01 1.166e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-27 20:11:41,286 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:11:43,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=57108.0, ans=0.2 +2024-07-27 20:11:46,530 INFO [train.py:1114] (1/4) Epoch 5, batch 1950, loss[loss=0.2332, simple_loss=0.3158, pruned_loss=0.07529, over 4890.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3262, pruned_loss=0.08634, over 943323.99 frames. ], batch size: 13, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:12:03,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=57148.0, ans=0.125 +2024-07-27 20:12:03,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=57148.0, ans=0.125 +2024-07-27 20:12:05,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=57148.0, ans=0.125 +2024-07-27 20:12:11,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=57161.333333333336, ans=0.125 +2024-07-27 20:12:18,232 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.59 vs. limit=22.5 +2024-07-27 20:12:22,149 INFO [train.py:1114] (1/4) Epoch 5, batch 2000, loss[loss=0.2162, simple_loss=0.2824, pruned_loss=0.07494, over 4807.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.327, pruned_loss=0.08688, over 940692.67 frames. ], batch size: 11, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:12:23,003 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:12:27,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=57188.0, ans=0.0 +2024-07-27 20:12:29,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=57201.333333333336, ans=0.2 +2024-07-27 20:12:30,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=57201.333333333336, ans=0.2 +2024-07-27 20:12:39,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=57214.666666666664, ans=0.125 +2024-07-27 20:12:43,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=57228.0, ans=0.2 +2024-07-27 20:12:46,750 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.030e+01 6.356e+01 7.460e+01 8.642e+01 1.315e+02, threshold=1.492e+02, percent-clipped=0.0 +2024-07-27 20:12:49,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=57241.333333333336, ans=0.125 +2024-07-27 20:12:56,122 INFO [train.py:1114] (1/4) Epoch 5, batch 2050, loss[loss=0.2037, simple_loss=0.2775, pruned_loss=0.06494, over 4609.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3253, pruned_loss=0.08602, over 938485.34 frames. ], batch size: 11, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:13:14,230 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.70 vs. limit=15.0 +2024-07-27 20:13:14,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=57281.333333333336, ans=0.125 +2024-07-27 20:13:15,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57281.333333333336, ans=0.1 +2024-07-27 20:13:17,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=57294.666666666664, ans=0.125 +2024-07-27 20:13:26,994 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.54 vs. limit=6.0 +2024-07-27 20:13:29,962 INFO [train.py:1114] (1/4) Epoch 5, batch 2100, loss[loss=0.2366, simple_loss=0.3257, pruned_loss=0.07378, over 4752.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.3243, pruned_loss=0.08521, over 940445.56 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:13:30,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=57321.333333333336, ans=10.0 +2024-07-27 20:13:35,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57321.333333333336, ans=0.1 +2024-07-27 20:13:36,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=57334.666666666664, ans=0.0 +2024-07-27 20:13:37,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57334.666666666664, ans=0.1 +2024-07-27 20:13:37,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.82 vs. limit=6.0 +2024-07-27 20:13:42,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=57334.666666666664, ans=0.125 +2024-07-27 20:13:43,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57348.0, ans=0.1 +2024-07-27 20:13:49,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=57361.333333333336, ans=0.2 +2024-07-27 20:13:52,086 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:13:52,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=57361.333333333336, ans=0.125 +2024-07-27 20:13:53,936 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.957e+01 6.234e+01 6.918e+01 8.302e+01 1.274e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-27 20:13:56,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=57374.666666666664, ans=0.2 +2024-07-27 20:14:02,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=57388.0, ans=0.04949747468305833 +2024-07-27 20:14:03,260 INFO [train.py:1114] (1/4) Epoch 5, batch 2150, loss[loss=0.2483, simple_loss=0.3263, pruned_loss=0.08512, over 4900.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.3231, pruned_loss=0.0848, over 944040.40 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:14:13,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57401.333333333336, ans=0.1 +2024-07-27 20:14:15,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=57401.333333333336, ans=0.125 +2024-07-27 20:14:22,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=57414.666666666664, ans=0.1 +2024-07-27 20:14:31,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=57441.333333333336, ans=0.0 +2024-07-27 20:14:38,764 INFO [train.py:1114] (1/4) Epoch 5, batch 2200, loss[loss=0.2691, simple_loss=0.3422, pruned_loss=0.09798, over 4806.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3231, pruned_loss=0.08468, over 943357.37 frames. ], batch size: 14, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:14:45,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=57468.0, ans=0.125 +2024-07-27 20:14:51,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=57468.0, ans=0.125 +2024-07-27 20:14:54,844 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.14 vs. limit=15.0 +2024-07-27 20:15:00,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=57494.666666666664, ans=0.125 +2024-07-27 20:15:03,284 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.963e+01 6.386e+01 7.473e+01 9.024e+01 1.169e+02, threshold=1.495e+02, percent-clipped=0.0 +2024-07-27 20:15:14,972 INFO [train.py:1114] (1/4) Epoch 5, batch 2250, loss[loss=0.2404, simple_loss=0.3228, pruned_loss=0.07896, over 4700.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3231, pruned_loss=0.08444, over 941837.12 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:15:16,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=57521.333333333336, ans=0.2 +2024-07-27 20:15:19,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=57521.333333333336, ans=0.125 +2024-07-27 20:15:21,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=57534.666666666664, ans=0.0 +2024-07-27 20:15:33,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=57548.0, ans=0.2 +2024-07-27 20:15:35,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=57548.0, ans=0.2 +2024-07-27 20:15:50,312 INFO [train.py:1114] (1/4) Epoch 5, batch 2300, loss[loss=0.2308, simple_loss=0.2977, pruned_loss=0.08197, over 4941.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3229, pruned_loss=0.08457, over 939636.94 frames. ], batch size: 12, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:16:14,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=57628.0, ans=0.07 +2024-07-27 20:16:16,283 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.130e+01 6.014e+01 6.647e+01 7.772e+01 1.123e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-27 20:16:17,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=57628.0, ans=0.125 +2024-07-27 20:16:22,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=57641.333333333336, ans=0.2 +2024-07-27 20:16:29,313 INFO [train.py:1114] (1/4) Epoch 5, batch 2350, loss[loss=0.2332, simple_loss=0.3248, pruned_loss=0.07079, over 4636.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3225, pruned_loss=0.08411, over 941511.02 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:16:37,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=57668.0, ans=0.125 +2024-07-27 20:16:46,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=57681.333333333336, ans=0.125 +2024-07-27 20:16:56,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=57708.0, ans=0.0 +2024-07-27 20:17:01,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=57708.0, ans=0.125 +2024-07-27 20:17:03,185 INFO [train.py:1114] (1/4) Epoch 5, batch 2400, loss[loss=0.2357, simple_loss=0.3245, pruned_loss=0.07342, over 4641.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3235, pruned_loss=0.08469, over 941190.29 frames. ], batch size: 12, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:17:06,224 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.52 vs. limit=15.0 +2024-07-27 20:17:09,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.44 vs. limit=15.0 +2024-07-27 20:17:21,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=57748.0, ans=0.125 +2024-07-27 20:17:27,272 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.323e+01 6.252e+01 6.682e+01 7.735e+01 1.071e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-27 20:17:36,682 INFO [train.py:1114] (1/4) Epoch 5, batch 2450, loss[loss=0.2238, simple_loss=0.3044, pruned_loss=0.07159, over 4696.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3245, pruned_loss=0.0856, over 936607.48 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:17:47,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=57801.333333333336, ans=0.2 +2024-07-27 20:17:51,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=57801.333333333336, ans=0.125 +2024-07-27 20:17:55,873 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.83 vs. limit=15.0 +2024-07-27 20:18:11,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=57828.0, ans=0.2 +2024-07-27 20:18:14,542 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.44 vs. limit=15.0 +2024-07-27 20:18:16,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=57841.333333333336, ans=0.025 +2024-07-27 20:18:19,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57841.333333333336, ans=0.1 +2024-07-27 20:18:20,673 INFO [train.py:1114] (1/4) Epoch 5, batch 2500, loss[loss=0.2744, simple_loss=0.36, pruned_loss=0.09442, over 4813.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3251, pruned_loss=0.08574, over 938307.37 frames. ], batch size: 14, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:18:22,459 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.71 vs. limit=15.0 +2024-07-27 20:18:23,050 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.92 vs. limit=15.0 +2024-07-27 20:18:35,892 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.78 vs. limit=15.0 +2024-07-27 20:18:39,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=57881.333333333336, ans=0.2 +2024-07-27 20:18:49,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57894.666666666664, ans=0.1 +2024-07-27 20:18:50,638 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:18:51,012 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.916e+01 6.442e+01 7.418e+01 9.024e+01 1.336e+02, threshold=1.484e+02, percent-clipped=0.0 +2024-07-27 20:18:54,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=57908.0, ans=0.125 +2024-07-27 20:19:00,037 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.91 vs. limit=22.5 +2024-07-27 20:19:01,024 INFO [train.py:1114] (1/4) Epoch 5, batch 2550, loss[loss=0.2133, simple_loss=0.2823, pruned_loss=0.0722, over 4801.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3241, pruned_loss=0.08504, over 938176.11 frames. ], batch size: 11, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:19:10,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=57934.666666666664, ans=0.0 +2024-07-27 20:19:10,337 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.48 vs. limit=15.0 +2024-07-27 20:19:12,125 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.062e+00 +2024-07-27 20:19:30,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=57974.666666666664, ans=0.125 +2024-07-27 20:19:30,881 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.76 vs. limit=15.0 +2024-07-27 20:19:34,662 INFO [train.py:1114] (1/4) Epoch 5, batch 2600, loss[loss=0.2251, simple_loss=0.3025, pruned_loss=0.07382, over 4896.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3244, pruned_loss=0.08537, over 937696.94 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:19:34,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=57988.0, ans=0.0 +2024-07-27 20:19:44,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=58001.333333333336, ans=0.125 +2024-07-27 20:19:48,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=58014.666666666664, ans=0.125 +2024-07-27 20:19:50,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=58014.666666666664, ans=0.07 +2024-07-27 20:19:52,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=58014.666666666664, ans=0.1 +2024-07-27 20:19:58,062 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.70 vs. limit=22.5 +2024-07-27 20:19:58,984 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.906e+01 6.417e+01 7.272e+01 8.306e+01 1.432e+02, threshold=1.454e+02, percent-clipped=0.0 +2024-07-27 20:20:11,711 INFO [train.py:1114] (1/4) Epoch 5, batch 2650, loss[loss=0.2731, simple_loss=0.3635, pruned_loss=0.09131, over 4611.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.3245, pruned_loss=0.0856, over 939668.75 frames. ], batch size: 16, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:20:29,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58068.0, ans=0.1 +2024-07-27 20:20:31,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.42 vs. limit=22.5 +2024-07-27 20:20:39,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=58094.666666666664, ans=0.05 +2024-07-27 20:20:41,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=58094.666666666664, ans=0.2 +2024-07-27 20:20:59,250 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.59 vs. limit=10.0 +2024-07-27 20:20:59,477 INFO [train.py:1114] (1/4) Epoch 5, batch 2700, loss[loss=0.3289, simple_loss=0.3956, pruned_loss=0.1311, over 4736.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3247, pruned_loss=0.08569, over 939569.98 frames. ], batch size: 14, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:21:00,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=58121.333333333336, ans=0.2 +2024-07-27 20:21:11,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58134.666666666664, ans=0.1 +2024-07-27 20:21:13,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=58134.666666666664, ans=0.125 +2024-07-27 20:21:18,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=58148.0, ans=0.125 +2024-07-27 20:21:27,044 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 6.186e+01 6.835e+01 7.719e+01 1.191e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-27 20:21:27,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=58161.333333333336, ans=0.2 +2024-07-27 20:21:30,058 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.63 vs. limit=15.0 +2024-07-27 20:21:38,085 INFO [train.py:1114] (1/4) Epoch 5, batch 2750, loss[loss=0.271, simple_loss=0.3284, pruned_loss=0.1068, over 4706.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.3235, pruned_loss=0.08557, over 939634.91 frames. ], batch size: 12, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:21:45,367 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.22 vs. limit=15.0 +2024-07-27 20:21:46,546 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.65 vs. limit=6.0 +2024-07-27 20:21:48,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=58201.333333333336, ans=0.125 +2024-07-27 20:21:56,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=58214.666666666664, ans=0.125 +2024-07-27 20:21:57,016 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.69 vs. limit=15.0 +2024-07-27 20:21:58,351 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.38 vs. limit=6.0 +2024-07-27 20:21:59,141 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.01 vs. limit=6.0 +2024-07-27 20:22:16,891 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.47 vs. limit=8.0 +2024-07-27 20:22:19,864 INFO [train.py:1114] (1/4) Epoch 5, batch 2800, loss[loss=0.3848, simple_loss=0.4287, pruned_loss=0.1704, over 3348.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3236, pruned_loss=0.08573, over 937777.03 frames. ], batch size: 35, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:22:20,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=58254.666666666664, ans=0.125 +2024-07-27 20:22:22,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=58254.666666666664, ans=0.125 +2024-07-27 20:22:23,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=58254.666666666664, ans=0.125 +2024-07-27 20:22:25,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=58254.666666666664, ans=0.125 +2024-07-27 20:22:40,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=58294.666666666664, ans=0.025 +2024-07-27 20:22:44,006 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 6.174e+01 6.624e+01 7.261e+01 1.719e+02, threshold=1.325e+02, percent-clipped=1.0 +2024-07-27 20:22:46,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=58308.0, ans=0.125 +2024-07-27 20:22:46,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=58308.0, ans=0.025 +2024-07-27 20:22:48,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=58308.0, ans=0.0 +2024-07-27 20:22:53,233 INFO [train.py:1114] (1/4) Epoch 5, batch 2850, loss[loss=0.2503, simple_loss=0.3176, pruned_loss=0.09145, over 4958.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3247, pruned_loss=0.08651, over 936178.84 frames. ], batch size: 13, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:22:56,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=58321.333333333336, ans=0.125 +2024-07-27 20:23:01,139 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:23:12,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=58361.333333333336, ans=0.125 +2024-07-27 20:23:12,800 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.34 vs. limit=22.5 +2024-07-27 20:23:15,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=58361.333333333336, ans=0.5 +2024-07-27 20:23:23,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=58374.666666666664, ans=0.125 +2024-07-27 20:23:26,074 INFO [train.py:1114] (1/4) Epoch 5, batch 2900, loss[loss=0.2302, simple_loss=0.3003, pruned_loss=0.08001, over 4829.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.326, pruned_loss=0.08616, over 940060.21 frames. ], batch size: 13, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:23:29,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=58388.0, ans=0.0 +2024-07-27 20:23:44,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=58414.666666666664, ans=0.2 +2024-07-27 20:23:51,046 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.201e+01 6.205e+01 6.873e+01 7.885e+01 1.448e+02, threshold=1.375e+02, percent-clipped=1.0 +2024-07-27 20:23:54,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=58441.333333333336, ans=0.125 +2024-07-27 20:23:57,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=58441.333333333336, ans=0.125 +2024-07-27 20:24:01,530 INFO [train.py:1114] (1/4) Epoch 5, batch 2950, loss[loss=0.1881, simple_loss=0.2711, pruned_loss=0.05252, over 4705.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.3239, pruned_loss=0.08539, over 938838.22 frames. ], batch size: 12, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:24:02,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=58454.666666666664, ans=0.1 +2024-07-27 20:24:08,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=58468.0, ans=0.2 +2024-07-27 20:24:18,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=58481.333333333336, ans=0.125 +2024-07-27 20:24:18,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=58481.333333333336, ans=0.125 +2024-07-27 20:24:39,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=58521.333333333336, ans=0.2 +2024-07-27 20:24:39,797 INFO [train.py:1114] (1/4) Epoch 5, batch 3000, loss[loss=0.2487, simple_loss=0.3348, pruned_loss=0.0813, over 4766.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3226, pruned_loss=0.08433, over 938174.62 frames. ], batch size: 13, lr: 1.50e-02, grad_scale: 32.0 +2024-07-27 20:24:39,797 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 20:25:00,231 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([0.2529, 1.2347, 0.7750, 1.8005, 0.8487, 0.8338, 1.5882, 1.3275], + device='cuda:1') +2024-07-27 20:25:05,805 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.8024, 4.8951, 3.4699, 3.1632], device='cuda:1') +2024-07-27 20:25:07,123 INFO [train.py:1146] (1/4) Epoch 5, validation: loss=0.2018, simple_loss=0.3051, pruned_loss=0.04931, over 944034.00 frames. +2024-07-27 20:25:07,151 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 20:25:12,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=58521.333333333336, ans=0.125 +2024-07-27 20:25:19,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=58534.666666666664, ans=0.1 +2024-07-27 20:25:34,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=58561.333333333336, ans=0.2 +2024-07-27 20:25:51,053 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.481e+01 6.112e+01 6.899e+01 7.724e+01 1.072e+02, threshold=1.380e+02, percent-clipped=0.0 +2024-07-27 20:25:55,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=58574.666666666664, ans=0.2 +2024-07-27 20:25:57,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58574.666666666664, ans=0.1 +2024-07-27 20:26:01,528 INFO [train.py:1114] (1/4) Epoch 5, batch 3050, loss[loss=0.2264, simple_loss=0.3035, pruned_loss=0.0746, over 4635.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3242, pruned_loss=0.08547, over 937123.47 frames. ], batch size: 12, lr: 1.50e-02, grad_scale: 32.0 +2024-07-27 20:26:11,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=58601.333333333336, ans=0.2 +2024-07-27 20:26:40,771 INFO [train.py:1114] (1/4) Epoch 5, batch 3100, loss[loss=0.2864, simple_loss=0.3597, pruned_loss=0.1066, over 4644.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3243, pruned_loss=0.08573, over 937733.82 frames. ], batch size: 16, lr: 1.50e-02, grad_scale: 16.0 +2024-07-27 20:26:46,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=58654.666666666664, ans=0.125 +2024-07-27 20:27:10,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=58668.0, ans=0.1 +2024-07-27 20:27:13,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=58668.0, ans=0.125 +2024-07-27 20:27:14,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=58668.0, ans=0.0 +2024-07-27 20:27:16,403 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-07-27 20:29:03,168 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.714e+01 6.229e+01 6.955e+01 7.996e+01 1.498e+02, threshold=1.391e+02, percent-clipped=1.0 +2024-07-27 20:29:27,407 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.13 vs. limit=10.0 +2024-07-27 20:29:36,746 INFO [train.py:1114] (1/4) Epoch 5, batch 3150, loss[loss=0.2661, simple_loss=0.3446, pruned_loss=0.09385, over 4609.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.3241, pruned_loss=0.08529, over 938145.98 frames. ], batch size: 17, lr: 1.50e-02, grad_scale: 16.0 +2024-07-27 20:29:42,980 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.57 vs. limit=12.0 +2024-07-27 20:29:44,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58734.666666666664, ans=0.1 +2024-07-27 20:30:02,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=58761.333333333336, ans=0.125 +2024-07-27 20:30:20,978 INFO [train.py:1114] (1/4) Epoch 5, batch 3200, loss[loss=0.2096, simple_loss=0.2889, pruned_loss=0.06517, over 4824.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.324, pruned_loss=0.08515, over 939632.27 frames. ], batch size: 13, lr: 1.50e-02, grad_scale: 32.0 +2024-07-27 20:30:27,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=58801.333333333336, ans=0.2 +2024-07-27 20:30:40,477 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=58801.333333333336, ans=0.0 +2024-07-27 20:30:46,573 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.26 vs. limit=15.0 +2024-07-27 20:31:05,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=58828.0, ans=0.125 +2024-07-27 20:31:05,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58828.0, ans=0.1 +2024-07-27 20:31:09,400 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.060e+01 6.414e+01 7.232e+01 8.731e+01 1.300e+02, threshold=1.446e+02, percent-clipped=0.0 +2024-07-27 20:31:12,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff2.min_abs, batch_count=58841.333333333336, ans=0.1 +2024-07-27 20:31:14,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=58841.333333333336, ans=0.125 +2024-07-27 20:31:14,764 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.04 vs. limit=10.0 +2024-07-27 20:31:17,448 INFO [train.py:1114] (1/4) Epoch 5, batch 3250, loss[loss=0.2621, simple_loss=0.3407, pruned_loss=0.09171, over 4926.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3242, pruned_loss=0.08496, over 940795.48 frames. ], batch size: 14, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:31:20,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=58854.666666666664, ans=0.125 +2024-07-27 20:31:51,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=58894.666666666664, ans=0.1 +2024-07-27 20:31:54,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=58894.666666666664, ans=0.0 +2024-07-27 20:31:56,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=58908.0, ans=0.0 +2024-07-27 20:31:57,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=58908.0, ans=0.2 +2024-07-27 20:32:06,914 INFO [train.py:1114] (1/4) Epoch 5, batch 3300, loss[loss=0.269, simple_loss=0.3413, pruned_loss=0.0983, over 4753.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.322, pruned_loss=0.08407, over 941333.25 frames. ], batch size: 19, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:32:23,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=58934.666666666664, ans=0.125 +2024-07-27 20:32:26,829 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.48 vs. limit=15.0 +2024-07-27 20:32:54,197 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.77 vs. limit=22.5 +2024-07-27 20:32:54,495 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+01 6.806e+01 7.832e+01 9.289e+01 1.732e+02, threshold=1.566e+02, percent-clipped=1.0 +2024-07-27 20:33:05,628 INFO [train.py:1114] (1/4) Epoch 5, batch 3350, loss[loss=0.2771, simple_loss=0.3517, pruned_loss=0.1013, over 4602.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3225, pruned_loss=0.08445, over 938675.18 frames. ], batch size: 17, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:33:13,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59001.333333333336, ans=0.1 +2024-07-27 20:33:14,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=59001.333333333336, ans=0.2 +2024-07-27 20:33:16,013 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.68 vs. limit=15.0 +2024-07-27 20:33:24,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=59014.666666666664, ans=0.95 +2024-07-27 20:33:27,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=59028.0, ans=0.125 +2024-07-27 20:33:39,079 INFO [train.py:1114] (1/4) Epoch 5, batch 3400, loss[loss=0.1951, simple_loss=0.2739, pruned_loss=0.05815, over 4809.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.3237, pruned_loss=0.08546, over 937143.39 frames. ], batch size: 11, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:33:45,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59068.0, ans=0.1 +2024-07-27 20:33:45,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=59068.0, ans=0.125 +2024-07-27 20:33:45,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59068.0, ans=0.1 +2024-07-27 20:33:58,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=59094.666666666664, ans=0.2 +2024-07-27 20:33:59,049 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.39 vs. limit=12.0 +2024-07-27 20:34:04,612 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.766e+01 6.358e+01 7.066e+01 8.502e+01 1.252e+02, threshold=1.413e+02, percent-clipped=0.0 +2024-07-27 20:34:07,809 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.75 vs. limit=15.0 +2024-07-27 20:34:11,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=59108.0, ans=0.0 +2024-07-27 20:34:12,601 INFO [train.py:1114] (1/4) Epoch 5, batch 3450, loss[loss=0.2753, simple_loss=0.345, pruned_loss=0.1028, over 4716.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3234, pruned_loss=0.08487, over 937608.13 frames. ], batch size: 19, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:34:12,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=59121.333333333336, ans=0.0 +2024-07-27 20:34:18,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=59134.666666666664, ans=0.035 +2024-07-27 20:34:28,670 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.44 vs. limit=15.0 +2024-07-27 20:34:31,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=59161.333333333336, ans=0.125 +2024-07-27 20:34:34,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=59161.333333333336, ans=0.2 +2024-07-27 20:34:36,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=59161.333333333336, ans=0.2 +2024-07-27 20:34:38,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=59161.333333333336, ans=0.125 +2024-07-27 20:34:41,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.92 vs. limit=22.5 +2024-07-27 20:34:42,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=59174.666666666664, ans=0.0 +2024-07-27 20:34:46,090 INFO [train.py:1114] (1/4) Epoch 5, batch 3500, loss[loss=0.2251, simple_loss=0.2982, pruned_loss=0.07597, over 4949.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3223, pruned_loss=0.08432, over 938252.43 frames. ], batch size: 12, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:35:13,403 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.758e+01 6.287e+01 6.647e+01 7.437e+01 1.544e+02, threshold=1.329e+02, percent-clipped=1.0 +2024-07-27 20:35:19,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=59241.333333333336, ans=0.125 +2024-07-27 20:35:21,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=59254.666666666664, ans=0.125 +2024-07-27 20:35:21,565 INFO [train.py:1114] (1/4) Epoch 5, batch 3550, loss[loss=0.2467, simple_loss=0.3332, pruned_loss=0.0801, over 4659.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3224, pruned_loss=0.08416, over 938870.54 frames. ], batch size: 14, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:35:34,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=59268.0, ans=0.0 +2024-07-27 20:35:40,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=59281.333333333336, ans=0.0 +2024-07-27 20:35:40,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=59281.333333333336, ans=6.0 +2024-07-27 20:35:48,306 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.74 vs. limit=15.0 +2024-07-27 20:35:55,984 INFO [train.py:1114] (1/4) Epoch 5, batch 3600, loss[loss=0.2213, simple_loss=0.2946, pruned_loss=0.07402, over 4961.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3214, pruned_loss=0.08349, over 940646.96 frames. ], batch size: 13, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:36:00,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=59321.333333333336, ans=0.125 +2024-07-27 20:36:11,226 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.59 vs. limit=22.5 +2024-07-27 20:36:20,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=59361.333333333336, ans=0.0 +2024-07-27 20:36:22,979 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.60 vs. limit=15.0 +2024-07-27 20:36:24,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=59361.333333333336, ans=10.0 +2024-07-27 20:36:26,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=59361.333333333336, ans=0.2 +2024-07-27 20:36:26,683 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.050e+01 6.226e+01 6.920e+01 7.848e+01 1.341e+02, threshold=1.384e+02, percent-clipped=1.0 +2024-07-27 20:36:26,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=59361.333333333336, ans=10.0 +2024-07-27 20:36:28,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=59374.666666666664, ans=0.2 +2024-07-27 20:36:35,258 INFO [train.py:1114] (1/4) Epoch 5, batch 3650, loss[loss=0.3064, simple_loss=0.3761, pruned_loss=0.1183, over 4911.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3217, pruned_loss=0.08374, over 941047.74 frames. ], batch size: 15, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:36:41,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=59388.0, ans=0.05 +2024-07-27 20:36:45,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=59401.333333333336, ans=0.0 +2024-07-27 20:36:50,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=59414.666666666664, ans=0.025 +2024-07-27 20:36:54,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=59414.666666666664, ans=0.2 +2024-07-27 20:37:08,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=59441.333333333336, ans=0.2 +2024-07-27 20:37:11,924 INFO [train.py:1114] (1/4) Epoch 5, batch 3700, loss[loss=0.2497, simple_loss=0.3256, pruned_loss=0.08694, over 4938.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3228, pruned_loss=0.08438, over 942008.81 frames. ], batch size: 14, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:37:31,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=59494.666666666664, ans=0.2 +2024-07-27 20:37:37,467 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.201e+01 6.383e+01 7.266e+01 8.369e+01 1.200e+02, threshold=1.453e+02, percent-clipped=0.0 +2024-07-27 20:37:45,344 INFO [train.py:1114] (1/4) Epoch 5, batch 3750, loss[loss=0.206, simple_loss=0.2666, pruned_loss=0.07272, over 4803.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3218, pruned_loss=0.08397, over 943317.44 frames. ], batch size: 11, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:37:45,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=59521.333333333336, ans=0.2 +2024-07-27 20:37:46,417 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.81 vs. limit=22.5 +2024-07-27 20:37:56,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=59534.666666666664, ans=10.0 +2024-07-27 20:37:58,780 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:38:09,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=59561.333333333336, ans=0.0 +2024-07-27 20:38:16,161 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.65 vs. limit=15.0 +2024-07-27 20:38:18,246 INFO [train.py:1114] (1/4) Epoch 5, batch 3800, loss[loss=0.2702, simple_loss=0.3418, pruned_loss=0.09925, over 4800.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.322, pruned_loss=0.08465, over 941186.63 frames. ], batch size: 14, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:38:19,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59588.0, ans=0.1 +2024-07-27 20:38:28,230 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.19 vs. limit=10.0 +2024-07-27 20:38:37,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=59614.666666666664, ans=0.0 +2024-07-27 20:38:39,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=59628.0, ans=0.125 +2024-07-27 20:38:44,250 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.840e+01 6.382e+01 7.291e+01 8.683e+01 1.605e+02, threshold=1.458e+02, percent-clipped=1.0 +2024-07-27 20:38:45,734 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:38:46,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=59641.333333333336, ans=0.125 +2024-07-27 20:38:47,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=59641.333333333336, ans=0.125 +2024-07-27 20:38:52,566 INFO [train.py:1114] (1/4) Epoch 5, batch 3850, loss[loss=0.2526, simple_loss=0.3285, pruned_loss=0.08834, over 4656.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3218, pruned_loss=0.08434, over 941769.15 frames. ], batch size: 16, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:38:57,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=59654.666666666664, ans=0.0 +2024-07-27 20:39:09,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=59681.333333333336, ans=0.125 +2024-07-27 20:39:13,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=59694.666666666664, ans=0.125 +2024-07-27 20:39:17,202 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.36 vs. limit=12.0 +2024-07-27 20:39:18,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=59694.666666666664, ans=0.2 +2024-07-27 20:39:26,343 INFO [train.py:1114] (1/4) Epoch 5, batch 3900, loss[loss=0.2309, simple_loss=0.3268, pruned_loss=0.06753, over 4808.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.3229, pruned_loss=0.08489, over 941978.83 frames. ], batch size: 14, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:39:30,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=59721.333333333336, ans=0.1 +2024-07-27 20:39:32,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=59734.666666666664, ans=0.125 +2024-07-27 20:39:33,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=59734.666666666664, ans=0.125 +2024-07-27 20:39:34,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=59734.666666666664, ans=10.0 +2024-07-27 20:39:35,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=59734.666666666664, ans=0.2 +2024-07-27 20:39:40,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59748.0, ans=0.1 +2024-07-27 20:39:49,665 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.16 vs. limit=15.0 +2024-07-27 20:39:51,226 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 6.368e+01 7.161e+01 8.539e+01 1.176e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 20:39:59,278 INFO [train.py:1114] (1/4) Epoch 5, batch 3950, loss[loss=0.2909, simple_loss=0.3527, pruned_loss=0.1146, over 4841.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3223, pruned_loss=0.08429, over 944186.32 frames. ], batch size: 16, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:40:08,992 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.92 vs. limit=15.0 +2024-07-27 20:40:09,644 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.24 vs. limit=22.5 +2024-07-27 20:40:10,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=59801.333333333336, ans=0.125 +2024-07-27 20:40:17,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=59814.666666666664, ans=0.125 +2024-07-27 20:40:19,500 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.52 vs. limit=15.0 +2024-07-27 20:40:31,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=59841.333333333336, ans=0.125 +2024-07-27 20:40:31,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=59841.333333333336, ans=0.1 +2024-07-27 20:40:33,150 INFO [train.py:1114] (1/4) Epoch 5, batch 4000, loss[loss=0.2406, simple_loss=0.3129, pruned_loss=0.08422, over 4777.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3234, pruned_loss=0.08496, over 941074.63 frames. ], batch size: 12, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:40:35,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=59854.666666666664, ans=0.025 +2024-07-27 20:40:37,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59854.666666666664, ans=0.1 +2024-07-27 20:40:38,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=59854.666666666664, ans=0.125 +2024-07-27 20:40:40,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=59868.0, ans=0.125 +2024-07-27 20:40:41,173 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:40:54,033 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.19 vs. limit=15.0 +2024-07-27 20:41:01,116 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.29 vs. limit=6.0 +2024-07-27 20:41:01,635 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.290e+01 6.270e+01 7.255e+01 8.485e+01 1.075e+02, threshold=1.451e+02, percent-clipped=0.0 +2024-07-27 20:41:03,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=59908.0, ans=0.125 +2024-07-27 20:41:03,424 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.54 vs. limit=15.0 +2024-07-27 20:41:09,818 INFO [train.py:1114] (1/4) Epoch 5, batch 4050, loss[loss=0.3459, simple_loss=0.3814, pruned_loss=0.1552, over 3564.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3228, pruned_loss=0.08504, over 939651.59 frames. ], batch size: 35, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:41:17,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=59934.666666666664, ans=0.04949747468305833 +2024-07-27 20:41:45,356 INFO [train.py:1114] (1/4) Epoch 5, batch 4100, loss[loss=0.2754, simple_loss=0.3466, pruned_loss=0.1021, over 4912.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3241, pruned_loss=0.08588, over 938528.11 frames. ], batch size: 15, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:41:52,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=60001.333333333336, ans=0.2 +2024-07-27 20:41:56,035 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.83 vs. limit=15.0 +2024-07-27 20:41:57,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=60001.333333333336, ans=0.0 +2024-07-27 20:42:01,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=60014.666666666664, ans=0.0 +2024-07-27 20:42:07,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=60028.0, ans=0.125 +2024-07-27 20:42:07,974 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.69 vs. limit=15.0 +2024-07-27 20:42:10,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=60028.0, ans=0.125 +2024-07-27 20:42:12,617 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.156e+01 6.593e+01 8.156e+01 1.046e+02 1.897e+02, threshold=1.631e+02, percent-clipped=3.0 +2024-07-27 20:42:14,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60041.333333333336, ans=0.1 +2024-07-27 20:42:17,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=60041.333333333336, ans=0.04949747468305833 +2024-07-27 20:42:18,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=60041.333333333336, ans=0.0 +2024-07-27 20:42:20,628 INFO [train.py:1114] (1/4) Epoch 5, batch 4150, loss[loss=0.2322, simple_loss=0.3086, pruned_loss=0.07791, over 4822.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3242, pruned_loss=0.08601, over 938493.28 frames. ], batch size: 13, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:42:31,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=60068.0, ans=0.125 +2024-07-27 20:42:33,007 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.30 vs. limit=15.0 +2024-07-27 20:42:38,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=60081.333333333336, ans=0.125 +2024-07-27 20:42:42,133 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.08 vs. limit=15.0 +2024-07-27 20:42:43,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=60094.666666666664, ans=0.125 +2024-07-27 20:42:49,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60108.0, ans=0.1 +2024-07-27 20:42:49,817 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:42:49,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=60108.0, ans=0.025 +2024-07-27 20:42:51,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=60108.0, ans=0.0 +2024-07-27 20:42:51,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=60108.0, ans=0.125 +2024-07-27 20:42:53,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=60108.0, ans=0.125 +2024-07-27 20:42:55,751 INFO [train.py:1114] (1/4) Epoch 5, batch 4200, loss[loss=0.3037, simple_loss=0.3768, pruned_loss=0.1153, over 4902.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3244, pruned_loss=0.08602, over 939865.00 frames. ], batch size: 15, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:43:00,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60121.333333333336, ans=0.1 +2024-07-27 20:43:07,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=60134.666666666664, ans=0.125 +2024-07-27 20:43:13,560 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.99 vs. limit=15.0 +2024-07-27 20:43:20,328 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+01 6.148e+01 7.735e+01 9.943e+01 1.461e+02, threshold=1.547e+02, percent-clipped=0.0 +2024-07-27 20:43:24,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=60174.666666666664, ans=0.125 +2024-07-27 20:43:25,402 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.68 vs. limit=22.5 +2024-07-27 20:43:28,468 INFO [train.py:1114] (1/4) Epoch 5, batch 4250, loss[loss=0.2796, simple_loss=0.3413, pruned_loss=0.109, over 4636.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.323, pruned_loss=0.08519, over 940892.69 frames. ], batch size: 12, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:43:32,072 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.10 vs. limit=15.0 +2024-07-27 20:43:41,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=60214.666666666664, ans=0.0 +2024-07-27 20:43:43,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=60214.666666666664, ans=0.0 +2024-07-27 20:43:52,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=60228.0, ans=0.0 +2024-07-27 20:43:56,365 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.24 vs. limit=15.0 +2024-07-27 20:44:01,221 INFO [train.py:1114] (1/4) Epoch 5, batch 4300, loss[loss=0.2005, simple_loss=0.286, pruned_loss=0.05751, over 4756.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3223, pruned_loss=0.08484, over 940155.55 frames. ], batch size: 13, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:44:02,195 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.85 vs. limit=15.0 +2024-07-27 20:44:04,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=60254.666666666664, ans=0.025 +2024-07-27 20:44:12,274 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.05 vs. limit=15.0 +2024-07-27 20:44:23,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=60294.666666666664, ans=0.125 +2024-07-27 20:44:24,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=60294.666666666664, ans=0.05 +2024-07-27 20:44:26,303 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.022e+01 6.170e+01 6.762e+01 7.364e+01 1.372e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-27 20:44:32,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60308.0, ans=0.1 +2024-07-27 20:44:34,438 INFO [train.py:1114] (1/4) Epoch 5, batch 4350, loss[loss=0.2517, simple_loss=0.3223, pruned_loss=0.09055, over 4767.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.3244, pruned_loss=0.08562, over 940723.31 frames. ], batch size: 13, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:44:35,387 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.26 vs. limit=22.5 +2024-07-27 20:44:38,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60321.333333333336, ans=0.1 +2024-07-27 20:44:47,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=60334.666666666664, ans=15.0 +2024-07-27 20:44:49,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=60348.0, ans=0.125 +2024-07-27 20:44:50,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=60348.0, ans=0.2 +2024-07-27 20:44:58,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=60361.333333333336, ans=0.125 +2024-07-27 20:44:58,953 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.15 vs. limit=22.5 +2024-07-27 20:45:08,017 INFO [train.py:1114] (1/4) Epoch 5, batch 4400, loss[loss=0.232, simple_loss=0.3332, pruned_loss=0.06537, over 4813.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.3239, pruned_loss=0.08488, over 940652.23 frames. ], batch size: 14, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:45:22,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=60414.666666666664, ans=0.125 +2024-07-27 20:45:25,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=60414.666666666664, ans=0.125 +2024-07-27 20:45:25,745 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.03 vs. limit=15.0 +2024-07-27 20:45:29,771 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.72 vs. limit=15.0 +2024-07-27 20:45:33,392 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.176e+01 6.353e+01 6.947e+01 8.100e+01 1.220e+02, threshold=1.389e+02, percent-clipped=0.0 +2024-07-27 20:45:37,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=60441.333333333336, ans=0.125 +2024-07-27 20:45:41,666 INFO [train.py:1114] (1/4) Epoch 5, batch 4450, loss[loss=0.2471, simple_loss=0.3172, pruned_loss=0.08855, over 4928.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.3235, pruned_loss=0.08512, over 938713.30 frames. ], batch size: 12, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:45:41,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=60454.666666666664, ans=0.125 +2024-07-27 20:45:42,031 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.59 vs. limit=12.0 +2024-07-27 20:45:49,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=60468.0, ans=0.125 +2024-07-27 20:45:53,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=60468.0, ans=0.0 +2024-07-27 20:45:56,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=60481.333333333336, ans=0.0 +2024-07-27 20:45:59,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=60481.333333333336, ans=0.0 +2024-07-27 20:46:05,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=60494.666666666664, ans=0.0 +2024-07-27 20:46:16,935 INFO [train.py:1114] (1/4) Epoch 5, batch 4500, loss[loss=0.2305, simple_loss=0.3245, pruned_loss=0.06829, over 4743.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.3242, pruned_loss=0.08478, over 937584.51 frames. ], batch size: 14, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:46:20,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60521.333333333336, ans=0.1 +2024-07-27 20:46:20,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=60521.333333333336, ans=0.125 +2024-07-27 20:46:24,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=60534.666666666664, ans=0.0 +2024-07-27 20:46:30,929 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.64 vs. limit=12.0 +2024-07-27 20:46:34,221 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.01 vs. limit=10.0 +2024-07-27 20:46:34,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=60548.0, ans=0.0 +2024-07-27 20:46:38,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=60561.333333333336, ans=0.0 +2024-07-27 20:46:41,931 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.270e+01 6.144e+01 7.215e+01 8.358e+01 1.180e+02, threshold=1.443e+02, percent-clipped=0.0 +2024-07-27 20:46:42,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=60561.333333333336, ans=0.2 +2024-07-27 20:46:49,958 INFO [train.py:1114] (1/4) Epoch 5, batch 4550, loss[loss=0.2399, simple_loss=0.3281, pruned_loss=0.07581, over 4912.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3249, pruned_loss=0.08542, over 939513.96 frames. ], batch size: 13, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:46:59,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=60601.333333333336, ans=0.125 +2024-07-27 20:47:07,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60614.666666666664, ans=0.1 +2024-07-27 20:47:15,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=60628.0, ans=0.125 +2024-07-27 20:47:25,721 INFO [train.py:1114] (1/4) Epoch 5, batch 4600, loss[loss=0.2233, simple_loss=0.3108, pruned_loss=0.0679, over 4536.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3241, pruned_loss=0.08544, over 937415.54 frames. ], batch size: 21, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:47:25,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=60654.666666666664, ans=0.0 +2024-07-27 20:47:31,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=60654.666666666664, ans=0.125 +2024-07-27 20:47:49,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=60694.666666666664, ans=0.125 +2024-07-27 20:47:51,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=60694.666666666664, ans=0.2 +2024-07-27 20:47:53,335 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 6.426e+01 7.546e+01 8.603e+01 1.273e+02, threshold=1.509e+02, percent-clipped=0.0 +2024-07-27 20:48:03,051 INFO [train.py:1114] (1/4) Epoch 5, batch 4650, loss[loss=0.2889, simple_loss=0.3654, pruned_loss=0.1062, over 4864.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3247, pruned_loss=0.08551, over 939420.05 frames. ], batch size: 16, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:48:22,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=60748.0, ans=0.0 +2024-07-27 20:48:30,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=60774.666666666664, ans=0.0 +2024-07-27 20:48:33,481 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.99 vs. limit=12.0 +2024-07-27 20:48:36,509 INFO [train.py:1114] (1/4) Epoch 5, batch 4700, loss[loss=0.239, simple_loss=0.3181, pruned_loss=0.07993, over 4715.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3235, pruned_loss=0.08489, over 936909.15 frames. ], batch size: 11, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:48:41,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=60788.0, ans=0.025 +2024-07-27 20:48:43,121 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.88 vs. limit=5.0 +2024-07-27 20:48:47,614 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.56 vs. limit=6.0 +2024-07-27 20:48:48,192 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.85 vs. limit=15.0 +2024-07-27 20:48:51,511 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.35 vs. limit=22.5 +2024-07-27 20:48:53,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=60814.666666666664, ans=0.0 +2024-07-27 20:48:55,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=60828.0, ans=0.0 +2024-07-27 20:49:00,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=60828.0, ans=0.125 +2024-07-27 20:49:02,021 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.640e+01 6.344e+01 7.380e+01 9.406e+01 1.591e+02, threshold=1.476e+02, percent-clipped=1.0 +2024-07-27 20:49:03,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=60841.333333333336, ans=0.125 +2024-07-27 20:49:07,656 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.13 vs. limit=22.5 +2024-07-27 20:49:10,275 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.15 vs. limit=15.0 +2024-07-27 20:49:10,641 INFO [train.py:1114] (1/4) Epoch 5, batch 4750, loss[loss=0.2407, simple_loss=0.3221, pruned_loss=0.07964, over 4546.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.3236, pruned_loss=0.0856, over 935567.64 frames. ], batch size: 21, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:49:16,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=60854.666666666664, ans=10.0 +2024-07-27 20:49:16,187 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:49:21,093 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.14 vs. limit=15.0 +2024-07-27 20:49:26,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60881.333333333336, ans=0.1 +2024-07-27 20:49:37,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=60908.0, ans=0.025 +2024-07-27 20:49:42,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=60908.0, ans=0.0 +2024-07-27 20:49:44,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=60908.0, ans=0.0 +2024-07-27 20:49:45,209 INFO [train.py:1114] (1/4) Epoch 5, batch 4800, loss[loss=0.2624, simple_loss=0.3321, pruned_loss=0.09635, over 4692.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3238, pruned_loss=0.08609, over 932740.16 frames. ], batch size: 13, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:49:46,351 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.87 vs. limit=6.0 +2024-07-27 20:49:47,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=60921.333333333336, ans=0.2 +2024-07-27 20:50:00,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60948.0, ans=0.1 +2024-07-27 20:50:01,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=60948.0, ans=0.2 +2024-07-27 20:50:10,635 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.566e+01 6.152e+01 6.705e+01 7.633e+01 9.767e+01, threshold=1.341e+02, percent-clipped=0.0 +2024-07-27 20:50:18,673 INFO [train.py:1114] (1/4) Epoch 5, batch 4850, loss[loss=0.291, simple_loss=0.3664, pruned_loss=0.1078, over 4740.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3248, pruned_loss=0.0862, over 932080.55 frames. ], batch size: 14, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:50:20,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=60988.0, ans=0.125 +2024-07-27 20:50:40,682 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.55 vs. limit=15.0 +2024-07-27 20:50:51,570 INFO [train.py:1114] (1/4) Epoch 5, batch 4900, loss[loss=0.2529, simple_loss=0.3359, pruned_loss=0.08495, over 4753.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3249, pruned_loss=0.08632, over 933701.95 frames. ], batch size: 13, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:50:54,083 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.69 vs. limit=15.0 +2024-07-27 20:50:59,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=61068.0, ans=0.0 +2024-07-27 20:51:01,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=61068.0, ans=0.2 +2024-07-27 20:51:17,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=61094.666666666664, ans=0.125 +2024-07-27 20:51:20,164 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.134e+01 6.112e+01 6.910e+01 8.321e+01 1.535e+02, threshold=1.382e+02, percent-clipped=5.0 +2024-07-27 20:51:35,801 INFO [train.py:1114] (1/4) Epoch 5, batch 4950, loss[loss=0.2802, simple_loss=0.3445, pruned_loss=0.1079, over 3307.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3259, pruned_loss=0.08708, over 930815.42 frames. ], batch size: 35, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:51:50,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61134.666666666664, ans=0.1 +2024-07-27 20:52:00,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=61148.0, ans=0.125 +2024-07-27 20:52:19,436 INFO [train.py:1114] (1/4) Epoch 5, batch 5000, loss[loss=0.2195, simple_loss=0.3037, pruned_loss=0.06764, over 4656.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3239, pruned_loss=0.08566, over 934950.93 frames. ], batch size: 14, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:52:20,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=61188.0, ans=0.125 +2024-07-27 20:52:20,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=61188.0, ans=0.125 +2024-07-27 20:52:21,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=61188.0, ans=0.2 +2024-07-27 20:52:23,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=61188.0, ans=0.125 +2024-07-27 20:52:24,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61188.0, ans=0.1 +2024-07-27 20:52:35,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=61214.666666666664, ans=0.0 +2024-07-27 20:52:47,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=61228.0, ans=0.07 +2024-07-27 20:52:48,111 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.055e+01 6.404e+01 7.517e+01 8.761e+01 1.608e+02, threshold=1.503e+02, percent-clipped=2.0 +2024-07-27 20:53:06,826 INFO [train.py:1114] (1/4) Epoch 5, batch 5050, loss[loss=0.2312, simple_loss=0.2942, pruned_loss=0.08405, over 4848.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3236, pruned_loss=0.08542, over 937348.12 frames. ], batch size: 12, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:53:11,288 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.98 vs. limit=10.0 +2024-07-27 20:53:17,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61268.0, ans=0.1 +2024-07-27 20:53:22,722 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.24 vs. limit=15.0 +2024-07-27 20:53:30,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=61294.666666666664, ans=0.125 +2024-07-27 20:53:37,794 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.69 vs. limit=15.0 +2024-07-27 20:53:41,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=61294.666666666664, ans=0.125 +2024-07-27 20:53:53,181 INFO [train.py:1114] (1/4) Epoch 5, batch 5100, loss[loss=0.2422, simple_loss=0.3198, pruned_loss=0.0823, over 4779.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.3236, pruned_loss=0.08561, over 934784.65 frames. ], batch size: 12, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:53:59,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=61334.666666666664, ans=0.125 +2024-07-27 20:54:04,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=61334.666666666664, ans=0.125 +2024-07-27 20:54:15,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61361.333333333336, ans=0.1 +2024-07-27 20:54:15,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.37 vs. limit=15.0 +2024-07-27 20:54:16,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=61361.333333333336, ans=0.0 +2024-07-27 20:54:18,910 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.23 vs. limit=10.0 +2024-07-27 20:54:20,585 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.842e+01 6.137e+01 6.842e+01 8.040e+01 3.164e+02, threshold=1.368e+02, percent-clipped=1.0 +2024-07-27 20:54:21,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=61361.333333333336, ans=0.025 +2024-07-27 20:54:22,724 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=61374.666666666664, ans=0.0 +2024-07-27 20:54:29,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=61388.0, ans=0.0 +2024-07-27 20:54:29,750 INFO [train.py:1114] (1/4) Epoch 5, batch 5150, loss[loss=0.2127, simple_loss=0.3048, pruned_loss=0.06034, over 4840.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3244, pruned_loss=0.08552, over 936205.49 frames. ], batch size: 16, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:54:31,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=61388.0, ans=0.125 +2024-07-27 20:54:41,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=61388.0, ans=0.125 +2024-07-27 20:54:51,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=61414.666666666664, ans=0.0 +2024-07-27 20:54:59,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=61428.0, ans=10.0 +2024-07-27 20:55:04,114 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.71 vs. limit=10.0 +2024-07-27 20:55:07,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=61441.333333333336, ans=0.0 +2024-07-27 20:55:08,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=61441.333333333336, ans=0.2 +2024-07-27 20:55:10,110 INFO [train.py:1114] (1/4) Epoch 5, batch 5200, loss[loss=0.2548, simple_loss=0.3403, pruned_loss=0.08466, over 4661.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3247, pruned_loss=0.08585, over 936590.42 frames. ], batch size: 14, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:55:18,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=61454.666666666664, ans=0.125 +2024-07-27 20:55:22,061 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.06 vs. limit=15.0 +2024-07-27 20:55:31,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=61481.333333333336, ans=0.0 +2024-07-27 20:55:34,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=61481.333333333336, ans=0.0 +2024-07-27 20:55:36,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=61494.666666666664, ans=0.125 +2024-07-27 20:55:38,339 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-07-27 20:55:42,000 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.018e+01 6.430e+01 7.385e+01 8.844e+01 1.293e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 20:55:50,191 INFO [train.py:1114] (1/4) Epoch 5, batch 5250, loss[loss=0.2478, simple_loss=0.3308, pruned_loss=0.08244, over 4899.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3249, pruned_loss=0.08623, over 935977.16 frames. ], batch size: 13, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:55:57,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=61534.666666666664, ans=0.125 +2024-07-27 20:56:01,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=61534.666666666664, ans=0.0 +2024-07-27 20:56:03,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=61548.0, ans=0.125 +2024-07-27 20:56:10,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=61561.333333333336, ans=0.125 +2024-07-27 20:56:24,400 INFO [train.py:1114] (1/4) Epoch 5, batch 5300, loss[loss=0.2708, simple_loss=0.3478, pruned_loss=0.09695, over 4626.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.323, pruned_loss=0.08577, over 934346.63 frames. ], batch size: 16, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:56:26,006 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.18 vs. limit=15.0 +2024-07-27 20:56:26,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=61588.0, ans=10.0 +2024-07-27 20:56:39,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=61601.333333333336, ans=0.025 +2024-07-27 20:56:43,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=61614.666666666664, ans=0.5 +2024-07-27 20:56:54,052 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.117e+01 6.200e+01 6.732e+01 7.536e+01 1.097e+02, threshold=1.346e+02, percent-clipped=0.0 +2024-07-27 20:57:02,172 INFO [train.py:1114] (1/4) Epoch 5, batch 5350, loss[loss=0.223, simple_loss=0.2885, pruned_loss=0.07874, over 4501.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.3236, pruned_loss=0.08546, over 936336.84 frames. ], batch size: 10, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:57:11,271 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:57:14,298 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.43 vs. limit=15.0 +2024-07-27 20:57:18,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=61681.333333333336, ans=0.125 +2024-07-27 20:57:19,172 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.14 vs. limit=15.0 +2024-07-27 20:57:26,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=61694.666666666664, ans=0.2 +2024-07-27 20:57:27,142 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.87 vs. limit=15.0 +2024-07-27 20:57:27,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=61694.666666666664, ans=0.125 +2024-07-27 20:57:32,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=61694.666666666664, ans=0.04949747468305833 +2024-07-27 20:57:39,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=61708.0, ans=0.125 +2024-07-27 20:57:41,134 INFO [train.py:1114] (1/4) Epoch 5, batch 5400, loss[loss=0.2606, simple_loss=0.3322, pruned_loss=0.0945, over 4234.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.324, pruned_loss=0.08545, over 930201.41 frames. ], batch size: 25, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:57:44,366 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:57:44,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=61721.333333333336, ans=0.025 +2024-07-27 20:58:08,952 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.945e+01 6.299e+01 6.991e+01 7.974e+01 1.272e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 20:58:11,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61774.666666666664, ans=0.1 +2024-07-27 20:58:11,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61774.666666666664, ans=0.1 +2024-07-27 20:58:22,862 INFO [train.py:1114] (1/4) Epoch 5, batch 5450, loss[loss=0.2385, simple_loss=0.3047, pruned_loss=0.08609, over 4704.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3227, pruned_loss=0.0845, over 932865.67 frames. ], batch size: 11, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:58:27,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=61788.0, ans=0.125 +2024-07-27 20:58:35,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=61814.666666666664, ans=0.125 +2024-07-27 20:58:41,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=61814.666666666664, ans=0.0 +2024-07-27 20:58:41,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=61814.666666666664, ans=0.0 +2024-07-27 20:58:52,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=61841.333333333336, ans=0.0 +2024-07-27 20:58:58,515 INFO [train.py:1114] (1/4) Epoch 5, batch 5500, loss[loss=0.3074, simple_loss=0.3743, pruned_loss=0.1202, over 4254.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3226, pruned_loss=0.08477, over 930632.33 frames. ], batch size: 25, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:58:58,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=61854.666666666664, ans=0.125 +2024-07-27 20:59:00,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=61854.666666666664, ans=0.125 +2024-07-27 20:59:03,026 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.16 vs. limit=12.0 +2024-07-27 20:59:15,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=61881.333333333336, ans=0.0 +2024-07-27 20:59:17,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=61881.333333333336, ans=0.125 +2024-07-27 20:59:24,622 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.189e+01 6.179e+01 6.952e+01 7.770e+01 1.227e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-27 20:59:28,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=61908.0, ans=0.05 +2024-07-27 20:59:28,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=61908.0, ans=0.0 +2024-07-27 20:59:32,510 INFO [train.py:1114] (1/4) Epoch 5, batch 5550, loss[loss=0.234, simple_loss=0.304, pruned_loss=0.08203, over 4697.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3217, pruned_loss=0.08437, over 932960.40 frames. ], batch size: 12, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:59:39,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=61921.333333333336, ans=0.0 +2024-07-27 20:59:39,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=61921.333333333336, ans=0.2 +2024-07-27 20:59:46,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=61934.666666666664, ans=0.125 +2024-07-27 20:59:46,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=61934.666666666664, ans=0.0 +2024-07-27 20:59:48,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=61948.0, ans=0.2 +2024-07-27 20:59:51,050 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.35 vs. limit=12.0 +2024-07-27 20:59:51,616 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.64 vs. limit=15.0 +2024-07-27 20:59:57,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=61961.333333333336, ans=0.125 +2024-07-27 21:00:06,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61974.666666666664, ans=0.1 +2024-07-27 21:00:08,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=61974.666666666664, ans=0.125 +2024-07-27 21:00:09,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=61988.0, ans=0.125 +2024-07-27 21:00:09,914 INFO [train.py:1114] (1/4) Epoch 5, batch 5600, loss[loss=0.2639, simple_loss=0.3301, pruned_loss=0.09884, over 4740.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3229, pruned_loss=0.08518, over 933806.17 frames. ], batch size: 14, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 21:00:19,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=61988.0, ans=0.0 +2024-07-27 21:00:33,857 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:00:51,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=62028.0, ans=0.125 +2024-07-27 21:00:53,094 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.144e+01 6.040e+01 6.647e+01 7.605e+01 1.041e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-27 21:01:01,028 INFO [train.py:1114] (1/4) Epoch 5, batch 5650, loss[loss=0.2786, simple_loss=0.359, pruned_loss=0.09905, over 4500.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3217, pruned_loss=0.08472, over 936484.86 frames. ], batch size: 21, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 21:01:04,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=62054.666666666664, ans=0.025 +2024-07-27 21:01:05,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=62054.666666666664, ans=0.125 +2024-07-27 21:01:06,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=62054.666666666664, ans=0.125 +2024-07-27 21:01:07,495 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.62 vs. limit=6.0 +2024-07-27 21:01:08,019 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.87 vs. limit=15.0 +2024-07-27 21:01:31,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=62094.666666666664, ans=0.1 +2024-07-27 21:01:42,139 INFO [train.py:1114] (1/4) Epoch 5, batch 5700, loss[loss=0.2749, simple_loss=0.3665, pruned_loss=0.09167, over 4698.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.323, pruned_loss=0.08524, over 937853.59 frames. ], batch size: 13, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 21:01:42,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=62121.333333333336, ans=0.5 +2024-07-27 21:01:51,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=62134.666666666664, ans=0.125 +2024-07-27 21:01:53,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=62134.666666666664, ans=0.2 +2024-07-27 21:02:01,955 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.47 vs. limit=15.0 +2024-07-27 21:02:06,416 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:02:07,556 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.270e+01 6.365e+01 6.985e+01 7.849e+01 1.267e+02, threshold=1.397e+02, percent-clipped=0.0 +2024-07-27 21:02:15,625 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.19 vs. limit=6.0 +2024-07-27 21:02:20,504 INFO [train.py:1114] (1/4) Epoch 5, batch 5750, loss[loss=0.2461, simple_loss=0.3409, pruned_loss=0.07568, over 4732.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3231, pruned_loss=0.08483, over 938192.16 frames. ], batch size: 19, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:02:45,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=62214.666666666664, ans=0.125 +2024-07-27 21:02:54,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=62214.666666666664, ans=0.125 +2024-07-27 21:02:59,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=62228.0, ans=0.125 +2024-07-27 21:03:09,030 INFO [train.py:1114] (1/4) Epoch 5, batch 5800, loss[loss=0.3019, simple_loss=0.3629, pruned_loss=0.1204, over 4715.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3236, pruned_loss=0.08496, over 937450.93 frames. ], batch size: 19, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:03:14,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=62254.666666666664, ans=0.0 +2024-07-27 21:03:15,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=62254.666666666664, ans=10.0 +2024-07-27 21:03:20,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=62268.0, ans=0.2 +2024-07-27 21:03:21,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=62268.0, ans=0.125 +2024-07-27 21:03:23,827 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.31 vs. limit=15.0 +2024-07-27 21:03:40,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=62294.666666666664, ans=0.125 +2024-07-27 21:03:41,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=62294.666666666664, ans=0.0 +2024-07-27 21:03:44,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=62294.666666666664, ans=0.0 +2024-07-27 21:03:46,565 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.301e+01 6.477e+01 7.083e+01 8.928e+01 1.486e+02, threshold=1.417e+02, percent-clipped=3.0 +2024-07-27 21:03:47,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=62308.0, ans=0.07 +2024-07-27 21:03:48,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=62308.0, ans=0.125 +2024-07-27 21:03:57,629 INFO [train.py:1114] (1/4) Epoch 5, batch 5850, loss[loss=0.2708, simple_loss=0.3399, pruned_loss=0.1008, over 4455.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3227, pruned_loss=0.08471, over 937903.70 frames. ], batch size: 21, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:04:08,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=62321.333333333336, ans=0.0 +2024-07-27 21:04:32,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=62348.0, ans=0.125 +2024-07-27 21:04:58,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=62374.666666666664, ans=0.0 +2024-07-27 21:05:03,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=62388.0, ans=0.125 +2024-07-27 21:05:03,538 INFO [train.py:1114] (1/4) Epoch 5, batch 5900, loss[loss=0.2385, simple_loss=0.3263, pruned_loss=0.07532, over 4669.00 frames. ], tot_loss[loss=0.245, simple_loss=0.3218, pruned_loss=0.08408, over 938258.23 frames. ], batch size: 15, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:05:13,601 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.03 vs. limit=15.0 +2024-07-27 21:05:18,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=62414.666666666664, ans=0.125 +2024-07-27 21:05:20,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=62414.666666666664, ans=0.2 +2024-07-27 21:05:23,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=62428.0, ans=0.125 +2024-07-27 21:05:24,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=62428.0, ans=0.5 +2024-07-27 21:05:26,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=62428.0, ans=0.125 +2024-07-27 21:05:29,069 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.135e+01 6.365e+01 7.384e+01 8.628e+01 1.400e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 21:05:32,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=62441.333333333336, ans=0.125 +2024-07-27 21:05:37,067 INFO [train.py:1114] (1/4) Epoch 5, batch 5950, loss[loss=0.2369, simple_loss=0.3084, pruned_loss=0.08276, over 4680.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3217, pruned_loss=0.08393, over 940323.64 frames. ], batch size: 15, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:05:39,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=62454.666666666664, ans=0.0 +2024-07-27 21:05:43,524 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.12 vs. limit=15.0 +2024-07-27 21:05:44,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=62468.0, ans=0.125 +2024-07-27 21:05:49,826 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.76 vs. limit=6.0 +2024-07-27 21:05:56,115 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:05:57,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=62481.333333333336, ans=0.0 +2024-07-27 21:06:05,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=62508.0, ans=0.125 +2024-07-27 21:06:13,055 INFO [train.py:1114] (1/4) Epoch 5, batch 6000, loss[loss=0.2604, simple_loss=0.3354, pruned_loss=0.09273, over 4336.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3224, pruned_loss=0.08484, over 937493.57 frames. ], batch size: 26, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:06:13,056 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 21:07:14,200 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.2286, 4.6006, 4.4023, 5.0564], device='cuda:1') +2024-07-27 21:07:25,585 INFO [train.py:1146] (1/4) Epoch 5, validation: loss=0.1984, simple_loss=0.3025, pruned_loss=0.04714, over 944034.00 frames. +2024-07-27 21:07:25,586 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 21:07:32,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=62534.666666666664, ans=0.125 +2024-07-27 21:07:42,736 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:07:47,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=62548.0, ans=0.05 +2024-07-27 21:07:55,029 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.867e+01 6.382e+01 7.254e+01 8.565e+01 1.652e+02, threshold=1.451e+02, percent-clipped=1.0 +2024-07-27 21:07:59,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=62574.666666666664, ans=0.015 +2024-07-27 21:08:03,173 INFO [train.py:1114] (1/4) Epoch 5, batch 6050, loss[loss=0.2174, simple_loss=0.2984, pruned_loss=0.06823, over 4769.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3218, pruned_loss=0.08456, over 938697.06 frames. ], batch size: 12, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:08:06,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=62588.0, ans=0.125 +2024-07-27 21:08:16,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=62601.333333333336, ans=0.125 +2024-07-27 21:08:17,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=62601.333333333336, ans=0.0 +2024-07-27 21:08:29,044 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.55 vs. limit=15.0 +2024-07-27 21:08:30,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=62628.0, ans=0.125 +2024-07-27 21:08:37,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=62641.333333333336, ans=0.125 +2024-07-27 21:08:37,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=62641.333333333336, ans=0.125 +2024-07-27 21:08:38,502 INFO [train.py:1114] (1/4) Epoch 5, batch 6100, loss[loss=0.264, simple_loss=0.3468, pruned_loss=0.09056, over 4692.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3217, pruned_loss=0.08402, over 938206.21 frames. ], batch size: 15, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:08:52,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=62681.333333333336, ans=0.0 +2024-07-27 21:08:52,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=62681.333333333336, ans=0.1 +2024-07-27 21:09:03,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62694.666666666664, ans=0.1 +2024-07-27 21:09:04,969 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.680e+01 6.274e+01 6.891e+01 8.796e+01 1.456e+02, threshold=1.378e+02, percent-clipped=1.0 +2024-07-27 21:09:05,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=62708.0, ans=0.125 +2024-07-27 21:09:07,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=62708.0, ans=0.125 +2024-07-27 21:09:09,109 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:09:13,061 INFO [train.py:1114] (1/4) Epoch 5, batch 6150, loss[loss=0.3027, simple_loss=0.3656, pruned_loss=0.1199, over 3492.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3226, pruned_loss=0.08483, over 937034.68 frames. ], batch size: 35, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:09:18,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=62721.333333333336, ans=0.125 +2024-07-27 21:09:35,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=62734.666666666664, ans=0.125 +2024-07-27 21:09:40,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=62748.0, ans=0.2 +2024-07-27 21:09:54,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=62774.666666666664, ans=0.025 +2024-07-27 21:09:58,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=62788.0, ans=0.2 +2024-07-27 21:09:58,596 INFO [train.py:1114] (1/4) Epoch 5, batch 6200, loss[loss=0.2612, simple_loss=0.3278, pruned_loss=0.0973, over 4741.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3223, pruned_loss=0.08446, over 936576.70 frames. ], batch size: 14, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:10:05,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62801.333333333336, ans=0.1 +2024-07-27 21:10:11,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=62801.333333333336, ans=0.0 +2024-07-27 21:10:25,267 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.230e+01 6.498e+01 7.693e+01 9.750e+01 1.653e+02, threshold=1.539e+02, percent-clipped=3.0 +2024-07-27 21:10:33,739 INFO [train.py:1114] (1/4) Epoch 5, batch 6250, loss[loss=0.2116, simple_loss=0.2918, pruned_loss=0.06564, over 4804.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.3227, pruned_loss=0.08557, over 933050.81 frames. ], batch size: 14, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:10:33,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=62854.666666666664, ans=0.2 +2024-07-27 21:10:42,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=62868.0, ans=0.0 +2024-07-27 21:10:45,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=62868.0, ans=0.125 +2024-07-27 21:10:48,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=62868.0, ans=0.125 +2024-07-27 21:10:56,646 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.74 vs. limit=15.0 +2024-07-27 21:10:59,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=62894.666666666664, ans=0.125 +2024-07-27 21:11:05,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=62908.0, ans=0.0 +2024-07-27 21:11:11,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=62921.333333333336, ans=0.125 +2024-07-27 21:11:12,026 INFO [train.py:1114] (1/4) Epoch 5, batch 6300, loss[loss=0.2214, simple_loss=0.3002, pruned_loss=0.07135, over 4565.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3231, pruned_loss=0.08604, over 929126.92 frames. ], batch size: 10, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:11:14,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=62921.333333333336, ans=0.125 +2024-07-27 21:11:15,076 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.98 vs. limit=22.5 +2024-07-27 21:11:21,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=62934.666666666664, ans=0.0 +2024-07-27 21:11:31,751 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.37 vs. limit=15.0 +2024-07-27 21:11:37,090 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.292e+01 6.261e+01 7.099e+01 7.903e+01 1.165e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 21:11:37,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=62961.333333333336, ans=0.1 +2024-07-27 21:11:40,788 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.00 vs. limit=22.5 +2024-07-27 21:11:41,951 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.76 vs. limit=22.5 +2024-07-27 21:11:44,969 INFO [train.py:1114] (1/4) Epoch 5, batch 6350, loss[loss=0.2496, simple_loss=0.3294, pruned_loss=0.08493, over 4441.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3223, pruned_loss=0.0851, over 933173.77 frames. ], batch size: 21, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:11:57,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=63014.666666666664, ans=0.125 +2024-07-27 21:12:03,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63014.666666666664, ans=0.1 +2024-07-27 21:12:06,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=63028.0, ans=0.125 +2024-07-27 21:12:20,304 INFO [train.py:1114] (1/4) Epoch 5, batch 6400, loss[loss=0.2506, simple_loss=0.3267, pruned_loss=0.08719, over 4638.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3224, pruned_loss=0.08533, over 934984.63 frames. ], batch size: 13, lr: 1.45e-02, grad_scale: 32.0 +2024-07-27 21:12:29,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=63068.0, ans=0.2 +2024-07-27 21:12:44,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=63094.666666666664, ans=0.125 +2024-07-27 21:12:45,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=63094.666666666664, ans=0.125 +2024-07-27 21:12:47,232 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.147e+01 6.719e+01 7.795e+01 8.869e+01 1.661e+02, threshold=1.559e+02, percent-clipped=1.0 +2024-07-27 21:12:53,958 INFO [train.py:1114] (1/4) Epoch 5, batch 6450, loss[loss=0.2773, simple_loss=0.3588, pruned_loss=0.09793, over 4581.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.323, pruned_loss=0.08481, over 938731.63 frames. ], batch size: 21, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:12:56,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63121.333333333336, ans=0.1 +2024-07-27 21:13:04,224 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.82 vs. limit=22.5 +2024-07-27 21:13:27,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=63174.666666666664, ans=0.125 +2024-07-27 21:13:30,344 INFO [train.py:1114] (1/4) Epoch 5, batch 6500, loss[loss=0.319, simple_loss=0.3616, pruned_loss=0.1382, over 3381.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3216, pruned_loss=0.0838, over 939984.37 frames. ], batch size: 35, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:13:31,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=63188.0, ans=0.07 +2024-07-27 21:13:45,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=63214.666666666664, ans=0.0 +2024-07-27 21:13:45,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=63214.666666666664, ans=0.1 +2024-07-27 21:13:47,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=63214.666666666664, ans=0.125 +2024-07-27 21:13:56,998 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.806e+01 6.095e+01 6.739e+01 7.533e+01 1.080e+02, threshold=1.348e+02, percent-clipped=0.0 +2024-07-27 21:14:03,804 INFO [train.py:1114] (1/4) Epoch 5, batch 6550, loss[loss=0.2379, simple_loss=0.3093, pruned_loss=0.08328, over 4829.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.3199, pruned_loss=0.08252, over 942958.55 frames. ], batch size: 11, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:14:06,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=63254.666666666664, ans=0.125 +2024-07-27 21:14:12,505 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.50 vs. limit=6.0 +2024-07-27 21:14:15,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=63268.0, ans=0.0 +2024-07-27 21:14:16,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=63281.333333333336, ans=0.125 +2024-07-27 21:14:17,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.04 vs. limit=15.0 +2024-07-27 21:14:20,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=63281.333333333336, ans=0.0 +2024-07-27 21:14:21,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=63281.333333333336, ans=0.2 +2024-07-27 21:14:29,062 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.11 vs. limit=15.0 +2024-07-27 21:14:35,535 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.77 vs. limit=22.5 +2024-07-27 21:14:36,523 INFO [train.py:1114] (1/4) Epoch 5, batch 6600, loss[loss=0.269, simple_loss=0.3236, pruned_loss=0.1072, over 4929.00 frames. ], tot_loss[loss=0.243, simple_loss=0.3202, pruned_loss=0.08294, over 944891.16 frames. ], batch size: 14, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:14:37,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=63321.333333333336, ans=0.125 +2024-07-27 21:14:38,184 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.16 vs. limit=15.0 +2024-07-27 21:14:45,695 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.25 vs. limit=22.5 +2024-07-27 21:14:54,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=63348.0, ans=0.125 +2024-07-27 21:14:57,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=63361.333333333336, ans=0.0 +2024-07-27 21:14:59,810 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.90 vs. limit=15.0 +2024-07-27 21:15:03,301 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.320e+01 6.369e+01 7.181e+01 8.583e+01 1.412e+02, threshold=1.436e+02, percent-clipped=2.0 +2024-07-27 21:15:06,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=63374.666666666664, ans=0.125 +2024-07-27 21:15:09,889 INFO [train.py:1114] (1/4) Epoch 5, batch 6650, loss[loss=0.2754, simple_loss=0.3505, pruned_loss=0.1002, over 4643.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3199, pruned_loss=0.0826, over 943457.58 frames. ], batch size: 17, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:15:10,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=63388.0, ans=0.0 +2024-07-27 21:15:18,322 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.78 vs. limit=15.0 +2024-07-27 21:15:24,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=63414.666666666664, ans=0.025 +2024-07-27 21:15:35,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=63428.0, ans=0.125 +2024-07-27 21:15:42,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=63441.333333333336, ans=0.1 +2024-07-27 21:15:43,856 INFO [train.py:1114] (1/4) Epoch 5, batch 6700, loss[loss=0.2583, simple_loss=0.334, pruned_loss=0.0913, over 4668.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3208, pruned_loss=0.08375, over 941975.10 frames. ], batch size: 19, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:15:44,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=63454.666666666664, ans=0.025 +2024-07-27 21:16:02,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.95 vs. limit=15.0 +2024-07-27 21:16:11,138 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.250e+01 6.326e+01 7.074e+01 8.168e+01 1.305e+02, threshold=1.415e+02, percent-clipped=0.0 +2024-07-27 21:16:19,124 INFO [train.py:1114] (1/4) Epoch 5, batch 6750, loss[loss=0.3234, simple_loss=0.3902, pruned_loss=0.1284, over 4271.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3211, pruned_loss=0.08365, over 940145.82 frames. ], batch size: 25, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:16:21,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff2.min_abs, batch_count=63521.333333333336, ans=0.1 +2024-07-27 21:16:44,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=63561.333333333336, ans=0.125 +2024-07-27 21:16:46,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=63561.333333333336, ans=0.125 +2024-07-27 21:16:48,471 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.87 vs. limit=15.0 +2024-07-27 21:16:54,729 INFO [train.py:1114] (1/4) Epoch 5, batch 6800, loss[loss=0.2112, simple_loss=0.3032, pruned_loss=0.05955, over 4636.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3213, pruned_loss=0.08399, over 938306.23 frames. ], batch size: 13, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:16:58,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=63588.0, ans=0.125 +2024-07-27 21:17:02,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=63601.333333333336, ans=0.2 +2024-07-27 21:17:02,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=63601.333333333336, ans=0.0 +2024-07-27 21:17:03,757 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-27 21:17:04,274 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.96 vs. limit=22.5 +2024-07-27 21:17:09,128 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.22 vs. limit=22.5 +2024-07-27 21:17:09,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=63614.666666666664, ans=0.125 +2024-07-27 21:17:21,272 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.069e+01 6.010e+01 6.782e+01 8.396e+01 1.269e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 21:17:23,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=63641.333333333336, ans=0.2 +2024-07-27 21:17:28,002 INFO [train.py:1114] (1/4) Epoch 5, batch 6850, loss[loss=0.2711, simple_loss=0.3487, pruned_loss=0.09675, over 4692.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3207, pruned_loss=0.0836, over 939925.30 frames. ], batch size: 13, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:17:31,656 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.22 vs. limit=10.0 +2024-07-27 21:17:44,055 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:17:55,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=63708.0, ans=0.0 +2024-07-27 21:18:02,584 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.68 vs. limit=6.0 +2024-07-27 21:18:02,694 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.38 vs. limit=10.0 +2024-07-27 21:18:02,828 INFO [train.py:1114] (1/4) Epoch 5, batch 6900, loss[loss=0.2394, simple_loss=0.3088, pruned_loss=0.08503, over 4967.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3222, pruned_loss=0.08403, over 942229.18 frames. ], batch size: 13, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:18:16,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=63748.0, ans=0.125 +2024-07-27 21:18:24,573 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=5.956e-01 +2024-07-27 21:18:24,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.65 vs. limit=15.0 +2024-07-27 21:18:30,458 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.148e+01 6.563e+01 7.062e+01 8.255e+01 1.155e+02, threshold=1.412e+02, percent-clipped=0.0 +2024-07-27 21:18:36,457 INFO [train.py:1114] (1/4) Epoch 5, batch 6950, loss[loss=0.2391, simple_loss=0.3048, pruned_loss=0.08672, over 4529.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3227, pruned_loss=0.08466, over 939779.55 frames. ], batch size: 10, lr: 1.44e-02, grad_scale: 16.0 +2024-07-27 21:18:40,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63788.0, ans=0.1 +2024-07-27 21:18:42,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=63788.0, ans=0.125 +2024-07-27 21:18:54,231 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.74 vs. limit=15.0 +2024-07-27 21:19:13,710 INFO [train.py:1114] (1/4) Epoch 5, batch 7000, loss[loss=0.2766, simple_loss=0.3567, pruned_loss=0.09819, over 4617.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3217, pruned_loss=0.08396, over 938426.65 frames. ], batch size: 17, lr: 1.44e-02, grad_scale: 16.0 +2024-07-27 21:19:32,302 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.43 vs. limit=15.0 +2024-07-27 21:19:40,509 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.75 vs. limit=15.0 +2024-07-27 21:19:40,754 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.209e+01 6.125e+01 6.883e+01 8.000e+01 1.166e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-27 21:19:41,906 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.82 vs. limit=22.5 +2024-07-27 21:19:42,082 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.65 vs. limit=10.0 +2024-07-27 21:19:44,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=63908.0, ans=0.0 +2024-07-27 21:19:46,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=63921.333333333336, ans=0.0 +2024-07-27 21:19:46,897 INFO [train.py:1114] (1/4) Epoch 5, batch 7050, loss[loss=0.2314, simple_loss=0.3171, pruned_loss=0.07283, over 4739.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3216, pruned_loss=0.08346, over 941687.89 frames. ], batch size: 19, lr: 1.44e-02, grad_scale: 16.0 +2024-07-27 21:20:04,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=63948.0, ans=0.2 +2024-07-27 21:20:13,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=63974.666666666664, ans=0.2 +2024-07-27 21:20:20,293 INFO [train.py:1114] (1/4) Epoch 5, batch 7100, loss[loss=0.2465, simple_loss=0.321, pruned_loss=0.08602, over 4813.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3227, pruned_loss=0.08415, over 936720.77 frames. ], batch size: 15, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:20:22,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=63988.0, ans=0.125 +2024-07-27 21:20:43,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=64014.666666666664, ans=0.125 +2024-07-27 21:20:53,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=64041.333333333336, ans=0.2 +2024-07-27 21:20:53,653 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.086e+01 6.110e+01 6.595e+01 7.846e+01 1.344e+02, threshold=1.319e+02, percent-clipped=0.0 +2024-07-27 21:20:59,481 INFO [train.py:1114] (1/4) Epoch 5, batch 7150, loss[loss=0.2458, simple_loss=0.3239, pruned_loss=0.08387, over 4564.00 frames. ], tot_loss[loss=0.245, simple_loss=0.3215, pruned_loss=0.08426, over 937556.34 frames. ], batch size: 21, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:20:59,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=64054.666666666664, ans=0.0 +2024-07-27 21:20:59,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=64054.666666666664, ans=0.2 +2024-07-27 21:21:04,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=64054.666666666664, ans=0.0 +2024-07-27 21:21:04,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=64054.666666666664, ans=0.125 +2024-07-27 21:21:04,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=64054.666666666664, ans=0.025 +2024-07-27 21:21:04,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=64054.666666666664, ans=0.125 +2024-07-27 21:21:08,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=64068.0, ans=0.0 +2024-07-27 21:21:15,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=64081.333333333336, ans=0.0 +2024-07-27 21:21:16,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=64081.333333333336, ans=0.025 +2024-07-27 21:21:17,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=64081.333333333336, ans=0.2 +2024-07-27 21:21:26,430 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.79 vs. limit=5.0 +2024-07-27 21:21:31,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=64108.0, ans=0.1 +2024-07-27 21:21:32,466 INFO [train.py:1114] (1/4) Epoch 5, batch 7200, loss[loss=0.2992, simple_loss=0.3601, pruned_loss=0.1191, over 4806.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3217, pruned_loss=0.08449, over 938006.18 frames. ], batch size: 15, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:21:35,317 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.35 vs. limit=22.5 +2024-07-27 21:21:39,217 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.15 vs. limit=15.0 +2024-07-27 21:21:39,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=64134.666666666664, ans=0.95 +2024-07-27 21:21:42,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=64134.666666666664, ans=0.0 +2024-07-27 21:21:44,754 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:21:46,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=64148.0, ans=0.125 +2024-07-27 21:21:59,177 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.508e+01 6.414e+01 7.135e+01 8.390e+01 1.273e+02, threshold=1.427e+02, percent-clipped=0.0 +2024-07-27 21:22:02,088 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.77 vs. limit=12.0 +2024-07-27 21:22:04,941 INFO [train.py:1114] (1/4) Epoch 5, batch 7250, loss[loss=0.2052, simple_loss=0.2901, pruned_loss=0.06017, over 4860.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3205, pruned_loss=0.08367, over 940003.23 frames. ], batch size: 12, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:22:16,014 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.92 vs. limit=6.0 +2024-07-27 21:22:33,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=64228.0, ans=0.1 +2024-07-27 21:22:41,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=64241.333333333336, ans=0.125 +2024-07-27 21:22:44,620 INFO [train.py:1114] (1/4) Epoch 5, batch 7300, loss[loss=0.2115, simple_loss=0.2966, pruned_loss=0.06318, over 4852.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.32, pruned_loss=0.08282, over 940551.21 frames. ], batch size: 12, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:22:48,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=64254.666666666664, ans=0.125 +2024-07-27 21:22:51,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=64268.0, ans=0.0 +2024-07-27 21:22:51,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=64268.0, ans=0.125 +2024-07-27 21:22:53,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=64268.0, ans=0.125 +2024-07-27 21:22:55,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=64268.0, ans=0.0 +2024-07-27 21:22:58,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64281.333333333336, ans=0.1 +2024-07-27 21:23:08,737 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-27 21:23:14,908 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.221e+01 6.188e+01 7.170e+01 8.494e+01 1.437e+02, threshold=1.434e+02, percent-clipped=1.0 +2024-07-27 21:23:19,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=64308.0, ans=0.2 +2024-07-27 21:23:20,883 INFO [train.py:1114] (1/4) Epoch 5, batch 7350, loss[loss=0.2132, simple_loss=0.2941, pruned_loss=0.0662, over 4638.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3197, pruned_loss=0.0827, over 939487.70 frames. ], batch size: 12, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:23:29,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=64334.666666666664, ans=0.02 +2024-07-27 21:23:33,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=64348.0, ans=0.125 +2024-07-27 21:23:38,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=64348.0, ans=0.125 +2024-07-27 21:23:39,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=64361.333333333336, ans=0.125 +2024-07-27 21:23:40,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=64361.333333333336, ans=0.2 +2024-07-27 21:23:47,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=64374.666666666664, ans=0.125 +2024-07-27 21:23:48,813 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.01 vs. limit=6.0 +2024-07-27 21:23:51,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=64374.666666666664, ans=0.125 +2024-07-27 21:23:56,166 INFO [train.py:1114] (1/4) Epoch 5, batch 7400, loss[loss=0.2695, simple_loss=0.3366, pruned_loss=0.1012, over 4692.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3197, pruned_loss=0.08204, over 940579.78 frames. ], batch size: 13, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:23:57,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=64388.0, ans=0.0 +2024-07-27 21:24:07,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=64401.333333333336, ans=0.125 +2024-07-27 21:24:19,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=64428.0, ans=0.125 +2024-07-27 21:24:19,879 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.29 vs. limit=15.0 +2024-07-27 21:24:23,430 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.841e+01 6.697e+01 7.956e+01 9.233e+01 1.549e+02, threshold=1.591e+02, percent-clipped=1.0 +2024-07-27 21:24:24,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=64441.333333333336, ans=0.0 +2024-07-27 21:24:29,312 INFO [train.py:1114] (1/4) Epoch 5, batch 7450, loss[loss=0.2354, simple_loss=0.3049, pruned_loss=0.08292, over 4613.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.3191, pruned_loss=0.0826, over 937791.13 frames. ], batch size: 11, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:24:45,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=64481.333333333336, ans=0.125 +2024-07-27 21:24:45,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=64481.333333333336, ans=0.125 +2024-07-27 21:24:54,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=64494.666666666664, ans=0.025 +2024-07-27 21:25:03,917 INFO [train.py:1114] (1/4) Epoch 5, batch 7500, loss[loss=0.4008, simple_loss=0.428, pruned_loss=0.1868, over 3423.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.3203, pruned_loss=0.08354, over 936102.91 frames. ], batch size: 36, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:25:08,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=64521.333333333336, ans=0.025 +2024-07-27 21:25:15,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=64534.666666666664, ans=0.125 +2024-07-27 21:25:33,069 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.613e+01 6.039e+01 6.623e+01 7.552e+01 1.223e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-27 21:25:42,033 INFO [train.py:1114] (1/4) Epoch 5, batch 7550, loss[loss=0.2675, simple_loss=0.3421, pruned_loss=0.09643, over 4614.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3229, pruned_loss=0.08457, over 935804.21 frames. ], batch size: 17, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:25:54,735 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.51 vs. limit=15.0 +2024-07-27 21:26:05,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=64614.666666666664, ans=0.0 +2024-07-27 21:26:27,399 INFO [train.py:1114] (1/4) Epoch 5, batch 7600, loss[loss=0.2913, simple_loss=0.3664, pruned_loss=0.1081, over 4823.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3223, pruned_loss=0.08393, over 937680.39 frames. ], batch size: 14, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:26:37,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64668.0, ans=0.1 +2024-07-27 21:26:43,294 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.61 vs. limit=15.0 +2024-07-27 21:26:52,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=64681.333333333336, ans=0.125 +2024-07-27 21:26:58,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=64694.666666666664, ans=0.5 +2024-07-27 21:27:01,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=64708.0, ans=0.2 +2024-07-27 21:27:02,470 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.190e+01 6.111e+01 6.673e+01 8.200e+01 1.239e+02, threshold=1.335e+02, percent-clipped=0.0 +2024-07-27 21:27:07,740 INFO [train.py:1114] (1/4) Epoch 5, batch 7650, loss[loss=0.2216, simple_loss=0.3077, pruned_loss=0.06782, over 4927.00 frames. ], tot_loss[loss=0.245, simple_loss=0.3222, pruned_loss=0.08392, over 936711.04 frames. ], batch size: 12, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:27:13,350 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.86 vs. limit=12.0 +2024-07-27 21:27:18,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=64734.666666666664, ans=0.125 +2024-07-27 21:27:22,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64734.666666666664, ans=0.1 +2024-07-27 21:27:24,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=64748.0, ans=0.125 +2024-07-27 21:27:32,742 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-07-27 21:27:41,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=64774.666666666664, ans=0.125 +2024-07-27 21:27:42,833 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.54 vs. limit=10.0 +2024-07-27 21:27:43,708 INFO [train.py:1114] (1/4) Epoch 5, batch 7700, loss[loss=0.318, simple_loss=0.3982, pruned_loss=0.1189, over 4691.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3216, pruned_loss=0.08315, over 934476.17 frames. ], batch size: 13, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:28:01,132 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.79 vs. limit=10.0 +2024-07-27 21:28:02,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=64801.333333333336, ans=0.125 +2024-07-27 21:28:02,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=64801.333333333336, ans=0.2 +2024-07-27 21:28:02,923 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.00 vs. limit=15.0 +2024-07-27 21:28:14,683 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.54 vs. limit=15.0 +2024-07-27 21:28:20,006 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.750e+01 6.292e+01 7.097e+01 8.458e+01 1.099e+02, threshold=1.419e+02, percent-clipped=0.0 +2024-07-27 21:28:25,206 INFO [train.py:1114] (1/4) Epoch 5, batch 7750, loss[loss=0.2291, simple_loss=0.3127, pruned_loss=0.07279, over 4929.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3218, pruned_loss=0.08301, over 936142.02 frames. ], batch size: 14, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:28:25,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=64854.666666666664, ans=0.125 +2024-07-27 21:28:56,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.67 vs. limit=6.0 +2024-07-27 21:29:00,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=64908.0, ans=0.125 +2024-07-27 21:29:02,993 INFO [train.py:1114] (1/4) Epoch 5, batch 7800, loss[loss=0.3035, simple_loss=0.3815, pruned_loss=0.1127, over 4650.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3228, pruned_loss=0.08306, over 938015.86 frames. ], batch size: 14, lr: 1.42e-02, grad_scale: 16.0 +2024-07-27 21:29:06,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=64921.333333333336, ans=0.125 +2024-07-27 21:29:06,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=64921.333333333336, ans=0.0 +2024-07-27 21:29:07,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=64921.333333333336, ans=0.125 +2024-07-27 21:29:14,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=64934.666666666664, ans=0.5 +2024-07-27 21:29:20,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=64948.0, ans=0.125 +2024-07-27 21:29:25,399 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=22.01 vs. limit=22.5 +2024-07-27 21:29:26,690 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.54 vs. limit=5.0 +2024-07-27 21:29:29,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=64974.666666666664, ans=0.125 +2024-07-27 21:29:30,977 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.317e+01 6.312e+01 7.129e+01 8.364e+01 1.154e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 21:29:33,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=64974.666666666664, ans=0.2 +2024-07-27 21:29:36,490 INFO [train.py:1114] (1/4) Epoch 5, batch 7850, loss[loss=0.2508, simple_loss=0.3015, pruned_loss=0.1, over 4536.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3218, pruned_loss=0.08249, over 937670.61 frames. ], batch size: 10, lr: 1.42e-02, grad_scale: 16.0 +2024-07-27 21:29:45,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64988.0, ans=0.1 +2024-07-27 21:29:49,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=65001.333333333336, ans=0.125 +2024-07-27 21:29:51,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=65001.333333333336, ans=0.125 +2024-07-27 21:30:17,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=65041.333333333336, ans=0.1 +2024-07-27 21:30:22,622 INFO [train.py:1114] (1/4) Epoch 5, batch 7900, loss[loss=0.2261, simple_loss=0.3179, pruned_loss=0.06717, over 4872.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3226, pruned_loss=0.08292, over 934115.83 frames. ], batch size: 14, lr: 1.42e-02, grad_scale: 16.0 +2024-07-27 21:30:26,405 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.16 vs. limit=15.0 +2024-07-27 21:30:31,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=65068.0, ans=0.125 +2024-07-27 21:30:34,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=65068.0, ans=0.0 +2024-07-27 21:30:43,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=65081.333333333336, ans=0.5 +2024-07-27 21:30:46,303 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:30:50,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=65094.666666666664, ans=0.025 +2024-07-27 21:30:52,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=65094.666666666664, ans=0.125 +2024-07-27 21:30:52,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=65094.666666666664, ans=0.025 +2024-07-27 21:30:55,206 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.330e+01 6.363e+01 7.228e+01 8.012e+01 1.089e+02, threshold=1.446e+02, percent-clipped=0.0 +2024-07-27 21:30:55,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=65108.0, ans=0.0 +2024-07-27 21:31:03,072 INFO [train.py:1114] (1/4) Epoch 5, batch 7950, loss[loss=0.3142, simple_loss=0.3728, pruned_loss=0.1277, over 3163.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3223, pruned_loss=0.0829, over 935575.44 frames. ], batch size: 35, lr: 1.42e-02, grad_scale: 16.0 +2024-07-27 21:31:05,208 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.22 vs. limit=10.0 +2024-07-27 21:31:13,770 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.31 vs. limit=6.0 +2024-07-27 21:31:14,783 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:31:16,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=65148.0, ans=0.125 +2024-07-27 21:31:22,629 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:31:32,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=65161.333333333336, ans=0.2 +2024-07-27 21:31:33,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=65161.333333333336, ans=0.125 +2024-07-27 21:31:35,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=65161.333333333336, ans=0.2 +2024-07-27 21:31:38,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=65174.666666666664, ans=0.025 +2024-07-27 21:31:43,111 INFO [train.py:1114] (1/4) Epoch 5, batch 8000, loss[loss=0.2611, simple_loss=0.333, pruned_loss=0.09461, over 4609.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3208, pruned_loss=0.08264, over 934856.35 frames. ], batch size: 11, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:32:00,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=65201.333333333336, ans=0.125 +2024-07-27 21:32:02,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=65214.666666666664, ans=0.125 +2024-07-27 21:32:08,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=65228.0, ans=0.2 +2024-07-27 21:32:08,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=65228.0, ans=0.125 +2024-07-27 21:32:15,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=65241.333333333336, ans=0.125 +2024-07-27 21:32:17,001 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.802e+01 6.447e+01 7.209e+01 8.816e+01 1.330e+02, threshold=1.442e+02, percent-clipped=0.0 +2024-07-27 21:32:22,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=65241.333333333336, ans=0.125 +2024-07-27 21:32:22,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=65254.666666666664, ans=0.0 +2024-07-27 21:32:23,332 INFO [train.py:1114] (1/4) Epoch 5, batch 8050, loss[loss=0.2247, simple_loss=0.3142, pruned_loss=0.0676, over 4822.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.3218, pruned_loss=0.08262, over 934542.72 frames. ], batch size: 14, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:32:24,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=65254.666666666664, ans=0.125 +2024-07-27 21:32:32,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=65268.0, ans=0.125 +2024-07-27 21:32:33,602 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.98 vs. limit=15.0 +2024-07-27 21:32:36,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=65281.333333333336, ans=0.0 +2024-07-27 21:32:42,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=65281.333333333336, ans=0.0 +2024-07-27 21:32:54,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=65308.0, ans=0.2 +2024-07-27 21:32:55,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=65308.0, ans=0.2 +2024-07-27 21:33:02,303 INFO [train.py:1114] (1/4) Epoch 5, batch 8100, loss[loss=0.2953, simple_loss=0.3731, pruned_loss=0.1088, over 4823.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3216, pruned_loss=0.08258, over 934140.03 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:33:06,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=65321.333333333336, ans=0.1 +2024-07-27 21:33:07,035 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.94 vs. limit=15.0 +2024-07-27 21:33:07,768 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=16.67 vs. limit=15.0 +2024-07-27 21:33:16,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=65348.0, ans=0.125 +2024-07-27 21:33:30,389 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.006e+01 6.276e+01 6.776e+01 7.896e+01 1.142e+02, threshold=1.355e+02, percent-clipped=0.0 +2024-07-27 21:33:35,662 INFO [train.py:1114] (1/4) Epoch 5, batch 8150, loss[loss=0.2366, simple_loss=0.3202, pruned_loss=0.07651, over 4797.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3201, pruned_loss=0.08212, over 937353.55 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:34:02,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=65428.0, ans=0.0 +2024-07-27 21:34:04,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=65441.333333333336, ans=0.125 +2024-07-27 21:34:08,163 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.82 vs. limit=12.0 +2024-07-27 21:34:08,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=65441.333333333336, ans=0.04949747468305833 +2024-07-27 21:34:09,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=65441.333333333336, ans=0.0 +2024-07-27 21:34:11,728 INFO [train.py:1114] (1/4) Epoch 5, batch 8200, loss[loss=0.2749, simple_loss=0.352, pruned_loss=0.09887, over 4802.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3205, pruned_loss=0.08199, over 938653.53 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:34:14,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=65454.666666666664, ans=0.0 +2024-07-27 21:34:39,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=65468.0, ans=0.1 +2024-07-27 21:34:43,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=65481.333333333336, ans=0.0 +2024-07-27 21:34:48,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=65481.333333333336, ans=0.125 +2024-07-27 21:34:53,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=65494.666666666664, ans=0.0 +2024-07-27 21:35:01,183 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.077e+01 6.058e+01 6.820e+01 7.758e+01 1.671e+02, threshold=1.364e+02, percent-clipped=1.0 +2024-07-27 21:35:02,252 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.90 vs. limit=15.0 +2024-07-27 21:35:06,471 INFO [train.py:1114] (1/4) Epoch 5, batch 8250, loss[loss=0.2514, simple_loss=0.3189, pruned_loss=0.09199, over 4902.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.32, pruned_loss=0.08189, over 938809.60 frames. ], batch size: 13, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:35:11,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=65521.333333333336, ans=0.125 +2024-07-27 21:35:12,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=65534.666666666664, ans=0.0 +2024-07-27 21:35:14,772 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.32 vs. limit=15.0 +2024-07-27 21:35:26,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=65548.0, ans=0.125 +2024-07-27 21:35:28,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=65548.0, ans=0.025 +2024-07-27 21:35:44,589 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.44 vs. limit=10.0 +2024-07-27 21:35:58,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=65574.66666666667, ans=0.025 +2024-07-27 21:36:00,904 INFO [train.py:1114] (1/4) Epoch 5, batch 8300, loss[loss=0.2674, simple_loss=0.364, pruned_loss=0.08546, over 4905.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3213, pruned_loss=0.08243, over 938605.68 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:36:07,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=65588.0, ans=0.1 +2024-07-27 21:36:20,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=65614.66666666667, ans=0.0 +2024-07-27 21:36:21,158 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=27.77 vs. limit=22.5 +2024-07-27 21:36:21,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=65614.66666666667, ans=0.025 +2024-07-27 21:36:32,859 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.068e+01 6.223e+01 6.833e+01 7.614e+01 1.184e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-27 21:36:32,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=65641.33333333333, ans=0.125 +2024-07-27 21:36:44,505 INFO [train.py:1114] (1/4) Epoch 5, batch 8350, loss[loss=0.2187, simple_loss=0.3065, pruned_loss=0.06543, over 4800.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3198, pruned_loss=0.08145, over 941373.93 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:36:51,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=65654.66666666667, ans=0.125 +2024-07-27 21:36:54,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=65668.0, ans=0.125 +2024-07-27 21:36:56,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=65668.0, ans=0.125 +2024-07-27 21:37:00,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=65681.33333333333, ans=0.09899494936611666 +2024-07-27 21:37:04,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=65681.33333333333, ans=0.125 +2024-07-27 21:37:11,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=65694.66666666667, ans=0.2 +2024-07-27 21:37:24,209 INFO [train.py:1114] (1/4) Epoch 5, batch 8400, loss[loss=0.2085, simple_loss=0.2726, pruned_loss=0.07216, over 4769.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3193, pruned_loss=0.08126, over 940272.16 frames. ], batch size: 12, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:37:30,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=65734.66666666667, ans=0.0 +2024-07-27 21:37:37,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=65748.0, ans=0.125 +2024-07-27 21:37:37,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=65748.0, ans=0.025 +2024-07-27 21:38:02,265 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.300e+01 6.390e+01 7.817e+01 9.094e+01 1.508e+02, threshold=1.563e+02, percent-clipped=1.0 +2024-07-27 21:38:04,574 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.75 vs. limit=22.5 +2024-07-27 21:38:07,396 INFO [train.py:1114] (1/4) Epoch 5, batch 8450, loss[loss=0.2474, simple_loss=0.3373, pruned_loss=0.07872, over 4795.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.3197, pruned_loss=0.08131, over 939247.57 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:38:08,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=65788.0, ans=0.0 +2024-07-27 21:38:08,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=65788.0, ans=0.025 +2024-07-27 21:38:16,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.05 vs. limit=10.0 +2024-07-27 21:38:16,448 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.49 vs. limit=10.0 +2024-07-27 21:38:19,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=65801.33333333333, ans=0.125 +2024-07-27 21:38:39,233 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:38:43,063 INFO [train.py:1114] (1/4) Epoch 5, batch 8500, loss[loss=0.2116, simple_loss=0.2946, pruned_loss=0.06427, over 4624.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3189, pruned_loss=0.08098, over 938973.70 frames. ], batch size: 11, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:38:55,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=65881.33333333333, ans=0.07 +2024-07-27 21:38:55,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=65881.33333333333, ans=0.1 +2024-07-27 21:39:02,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=65881.33333333333, ans=0.2 +2024-07-27 21:39:12,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=65908.0, ans=0.1 +2024-07-27 21:39:14,109 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.478e+01 6.095e+01 6.601e+01 7.527e+01 1.077e+02, threshold=1.320e+02, percent-clipped=0.0 +2024-07-27 21:39:19,306 INFO [train.py:1114] (1/4) Epoch 5, batch 8550, loss[loss=0.2989, simple_loss=0.3561, pruned_loss=0.1208, over 4814.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3183, pruned_loss=0.08093, over 939763.70 frames. ], batch size: 11, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:39:29,838 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.88 vs. limit=15.0 +2024-07-27 21:39:36,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=65948.0, ans=0.125 +2024-07-27 21:39:39,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=65961.33333333333, ans=0.0 +2024-07-27 21:39:41,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=65961.33333333333, ans=0.125 +2024-07-27 21:39:41,978 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.10 vs. limit=15.0 +2024-07-27 21:39:53,160 INFO [train.py:1114] (1/4) Epoch 5, batch 8600, loss[loss=0.2258, simple_loss=0.3054, pruned_loss=0.07317, over 4807.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3184, pruned_loss=0.08158, over 939410.51 frames. ], batch size: 15, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:40:02,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=66001.33333333333, ans=0.2 +2024-07-27 21:40:10,197 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.70 vs. limit=15.0 +2024-07-27 21:40:13,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=66028.0, ans=0.0 +2024-07-27 21:40:13,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=66028.0, ans=0.125 +2024-07-27 21:40:14,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.07 vs. limit=15.0 +2024-07-27 21:40:19,962 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.202e+01 6.597e+01 7.756e+01 9.469e+01 1.243e+02, threshold=1.551e+02, percent-clipped=0.0 +2024-07-27 21:40:22,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=66041.33333333333, ans=0.0 +2024-07-27 21:40:25,121 INFO [train.py:1114] (1/4) Epoch 5, batch 8650, loss[loss=0.2794, simple_loss=0.3485, pruned_loss=0.1052, over 4912.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3181, pruned_loss=0.08131, over 940386.50 frames. ], batch size: 15, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:40:34,000 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.95 vs. limit=12.0 +2024-07-27 21:40:35,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=66068.0, ans=0.025 +2024-07-27 21:40:39,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=66081.33333333333, ans=0.0 +2024-07-27 21:40:46,188 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.92 vs. limit=15.0 +2024-07-27 21:40:51,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=66108.0, ans=0.125 +2024-07-27 21:40:58,368 INFO [train.py:1114] (1/4) Epoch 5, batch 8700, loss[loss=0.204, simple_loss=0.2976, pruned_loss=0.05522, over 4757.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3189, pruned_loss=0.08193, over 938044.52 frames. ], batch size: 13, lr: 1.41e-02, grad_scale: 16.0 +2024-07-27 21:41:04,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=66121.33333333333, ans=0.0 +2024-07-27 21:41:05,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66134.66666666667, ans=0.1 +2024-07-27 21:41:06,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=66134.66666666667, ans=0.125 +2024-07-27 21:41:07,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=66134.66666666667, ans=0.0 +2024-07-27 21:41:27,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=66174.66666666667, ans=0.0 +2024-07-27 21:41:27,644 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.897e+01 6.098e+01 6.655e+01 7.583e+01 1.149e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-27 21:41:29,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=66174.66666666667, ans=0.125 +2024-07-27 21:41:30,487 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.68 vs. limit=15.0 +2024-07-27 21:41:32,114 INFO [train.py:1114] (1/4) Epoch 5, batch 8750, loss[loss=0.3054, simple_loss=0.3747, pruned_loss=0.1181, over 4689.00 frames. ], tot_loss[loss=0.242, simple_loss=0.3197, pruned_loss=0.0822, over 936148.71 frames. ], batch size: 15, lr: 1.41e-02, grad_scale: 16.0 +2024-07-27 21:41:49,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=66201.33333333333, ans=0.125 +2024-07-27 21:41:49,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66201.33333333333, ans=0.1 +2024-07-27 21:41:54,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=66214.66666666667, ans=0.1 +2024-07-27 21:41:58,963 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.24 vs. limit=8.0 +2024-07-27 21:42:14,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=66241.33333333333, ans=0.125 +2024-07-27 21:42:21,980 INFO [train.py:1114] (1/4) Epoch 5, batch 8800, loss[loss=0.2342, simple_loss=0.3232, pruned_loss=0.07263, over 4937.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3209, pruned_loss=0.08266, over 937018.79 frames. ], batch size: 14, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:42:22,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=66254.66666666667, ans=0.125 +2024-07-27 21:42:34,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=66268.0, ans=0.125 +2024-07-27 21:42:48,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=66294.66666666667, ans=0.0 +2024-07-27 21:42:49,911 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.16 vs. limit=10.0 +2024-07-27 21:42:54,233 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.166e+01 6.254e+01 7.129e+01 8.198e+01 1.307e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 21:42:58,856 INFO [train.py:1114] (1/4) Epoch 5, batch 8850, loss[loss=0.3049, simple_loss=0.3733, pruned_loss=0.1183, over 4524.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.32, pruned_loss=0.08241, over 931718.63 frames. ], batch size: 21, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:42:59,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=66321.33333333333, ans=0.0 +2024-07-27 21:42:59,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=66321.33333333333, ans=0.125 +2024-07-27 21:43:13,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66348.0, ans=0.1 +2024-07-27 21:43:20,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=66361.33333333333, ans=0.0 +2024-07-27 21:43:25,481 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.13 vs. limit=15.0 +2024-07-27 21:43:33,890 INFO [train.py:1114] (1/4) Epoch 5, batch 8900, loss[loss=0.2285, simple_loss=0.3067, pruned_loss=0.07519, over 4937.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.3204, pruned_loss=0.08253, over 930058.53 frames. ], batch size: 12, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:43:39,313 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.73 vs. limit=15.0 +2024-07-27 21:43:53,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=66414.66666666667, ans=0.025 +2024-07-27 21:43:53,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=66414.66666666667, ans=0.2 +2024-07-27 21:43:59,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=66428.0, ans=0.025 +2024-07-27 21:44:03,543 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 6.558e+01 7.585e+01 9.378e+01 1.606e+02, threshold=1.517e+02, percent-clipped=2.0 +2024-07-27 21:44:06,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=66441.33333333333, ans=0.125 +2024-07-27 21:44:08,114 INFO [train.py:1114] (1/4) Epoch 5, batch 8950, loss[loss=0.2624, simple_loss=0.3493, pruned_loss=0.08769, over 4490.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.3214, pruned_loss=0.08262, over 930658.27 frames. ], batch size: 21, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:44:10,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66454.66666666667, ans=0.1 +2024-07-27 21:44:22,750 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.27 vs. limit=22.5 +2024-07-27 21:44:26,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=66481.33333333333, ans=0.0 +2024-07-27 21:44:43,391 INFO [train.py:1114] (1/4) Epoch 5, batch 9000, loss[loss=0.2797, simple_loss=0.349, pruned_loss=0.1052, over 4639.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3198, pruned_loss=0.08166, over 933709.74 frames. ], batch size: 12, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:44:43,391 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 21:44:53,404 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.9255, 3.5383, 3.6342, 3.1069, 3.1374, 3.3576, 3.6598, 3.3474], + device='cuda:1') +2024-07-27 21:44:55,830 INFO [train.py:1146] (1/4) Epoch 5, validation: loss=0.197, simple_loss=0.3006, pruned_loss=0.04666, over 944034.00 frames. +2024-07-27 21:44:55,830 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 21:45:07,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=66534.66666666667, ans=0.0 +2024-07-27 21:45:11,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=66548.0, ans=0.0 +2024-07-27 21:45:34,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=66574.66666666667, ans=0.125 +2024-07-27 21:45:36,642 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.268e+01 6.311e+01 7.147e+01 8.276e+01 1.860e+02, threshold=1.429e+02, percent-clipped=1.0 +2024-07-27 21:45:40,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=66574.66666666667, ans=0.125 +2024-07-27 21:45:41,881 INFO [train.py:1114] (1/4) Epoch 5, batch 9050, loss[loss=0.2003, simple_loss=0.2785, pruned_loss=0.06109, over 4503.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3186, pruned_loss=0.08122, over 933996.20 frames. ], batch size: 10, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:46:15,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66654.66666666667, ans=0.1 +2024-07-27 21:46:16,350 INFO [train.py:1114] (1/4) Epoch 5, batch 9100, loss[loss=0.2224, simple_loss=0.3206, pruned_loss=0.06204, over 4930.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.3192, pruned_loss=0.0817, over 936748.69 frames. ], batch size: 14, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:46:42,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=66708.0, ans=0.125 +2024-07-27 21:46:44,168 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.191e+01 6.256e+01 6.952e+01 8.323e+01 1.113e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-27 21:46:48,047 INFO [train.py:1114] (1/4) Epoch 5, batch 9150, loss[loss=0.2535, simple_loss=0.3264, pruned_loss=0.09029, over 4809.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3212, pruned_loss=0.08284, over 935336.38 frames. ], batch size: 14, lr: 1.41e-02, grad_scale: 16.0 +2024-07-27 21:46:53,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66721.33333333333, ans=0.1 +2024-07-27 21:46:54,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=66721.33333333333, ans=0.0 +2024-07-27 21:46:55,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=66721.33333333333, ans=0.125 +2024-07-27 21:47:01,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=66734.66666666667, ans=0.0 +2024-07-27 21:47:20,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=66774.66666666667, ans=0.125 +2024-07-27 21:47:22,557 INFO [train.py:1114] (1/4) Epoch 5, batch 9200, loss[loss=0.2292, simple_loss=0.3041, pruned_loss=0.07713, over 4850.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3196, pruned_loss=0.08174, over 937391.05 frames. ], batch size: 12, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:47:31,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=66801.33333333333, ans=0.0 +2024-07-27 21:47:38,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66814.66666666667, ans=0.1 +2024-07-27 21:47:40,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=66814.66666666667, ans=0.125 +2024-07-27 21:47:47,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=66828.0, ans=0.2 +2024-07-27 21:47:50,766 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.899e+01 6.022e+01 6.976e+01 8.483e+01 1.676e+02, threshold=1.395e+02, percent-clipped=4.0 +2024-07-27 21:47:52,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=66841.33333333333, ans=0.2 +2024-07-27 21:47:54,597 INFO [train.py:1114] (1/4) Epoch 5, batch 9250, loss[loss=0.2417, simple_loss=0.3394, pruned_loss=0.072, over 4628.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.3197, pruned_loss=0.08219, over 937913.95 frames. ], batch size: 13, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:48:06,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=66881.33333333333, ans=0.125 +2024-07-27 21:48:11,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=66881.33333333333, ans=0.0 +2024-07-27 21:48:13,267 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.67 vs. limit=15.0 +2024-07-27 21:48:13,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.37 vs. limit=22.5 +2024-07-27 21:48:19,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66908.0, ans=0.1 +2024-07-27 21:48:26,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=66921.33333333333, ans=0.04949747468305833 +2024-07-27 21:48:26,549 INFO [train.py:1114] (1/4) Epoch 5, batch 9300, loss[loss=0.1921, simple_loss=0.2805, pruned_loss=0.05186, over 4784.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3195, pruned_loss=0.08177, over 937559.15 frames. ], batch size: 12, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:48:28,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=66921.33333333333, ans=0.07 +2024-07-27 21:48:31,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=66921.33333333333, ans=0.5 +2024-07-27 21:48:33,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=66934.66666666667, ans=0.125 +2024-07-27 21:48:36,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=66934.66666666667, ans=0.025 +2024-07-27 21:48:38,024 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.15 vs. limit=22.5 +2024-07-27 21:48:38,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=66934.66666666667, ans=0.2 +2024-07-27 21:48:42,380 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.74 vs. limit=6.0 +2024-07-27 21:48:44,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=66948.0, ans=0.125 +2024-07-27 21:48:52,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=66961.33333333333, ans=0.125 +2024-07-27 21:48:59,168 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.113e+01 6.261e+01 7.073e+01 8.470e+01 1.590e+02, threshold=1.415e+02, percent-clipped=1.0 +2024-07-27 21:48:59,594 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.13 vs. limit=15.0 +2024-07-27 21:49:02,796 INFO [train.py:1114] (1/4) Epoch 5, batch 9350, loss[loss=0.1868, simple_loss=0.2618, pruned_loss=0.05596, over 4809.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.3194, pruned_loss=0.08158, over 935260.49 frames. ], batch size: 11, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:49:08,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=67001.33333333333, ans=0.0 +2024-07-27 21:49:11,278 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.98 vs. limit=15.0 +2024-07-27 21:49:19,461 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.20 vs. limit=15.0 +2024-07-27 21:49:19,500 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.48 vs. limit=15.0 +2024-07-27 21:49:31,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=67041.33333333333, ans=0.2 +2024-07-27 21:49:35,453 INFO [train.py:1114] (1/4) Epoch 5, batch 9400, loss[loss=0.2788, simple_loss=0.3616, pruned_loss=0.09801, over 4689.00 frames. ], tot_loss[loss=0.243, simple_loss=0.3207, pruned_loss=0.08266, over 932793.18 frames. ], batch size: 13, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:49:38,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=67054.66666666667, ans=0.0 +2024-07-27 21:49:54,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67094.66666666667, ans=0.1 +2024-07-27 21:49:55,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=67094.66666666667, ans=0.125 +2024-07-27 21:49:57,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=67094.66666666667, ans=0.0 +2024-07-27 21:50:03,178 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.976e+01 5.980e+01 6.533e+01 7.095e+01 1.005e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-27 21:50:07,150 INFO [train.py:1114] (1/4) Epoch 5, batch 9450, loss[loss=0.231, simple_loss=0.3046, pruned_loss=0.07876, over 4813.00 frames. ], tot_loss[loss=0.243, simple_loss=0.3202, pruned_loss=0.08285, over 932194.06 frames. ], batch size: 11, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:50:12,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=67121.33333333333, ans=0.125 +2024-07-27 21:50:13,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=67134.66666666667, ans=0.0 +2024-07-27 21:50:17,394 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.87 vs. limit=15.0 +2024-07-27 21:50:33,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=67174.66666666667, ans=0.125 +2024-07-27 21:50:35,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=67174.66666666667, ans=0.5 +2024-07-27 21:50:39,303 INFO [train.py:1114] (1/4) Epoch 5, batch 9500, loss[loss=0.247, simple_loss=0.3233, pruned_loss=0.08539, over 4696.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.32, pruned_loss=0.08204, over 935081.28 frames. ], batch size: 12, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:50:44,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=67188.0, ans=0.025 +2024-07-27 21:50:50,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=67201.33333333333, ans=0.125 +2024-07-27 21:50:52,649 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.93 vs. limit=12.0 +2024-07-27 21:51:08,213 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.271e+01 6.540e+01 7.330e+01 8.472e+01 1.165e+02, threshold=1.466e+02, percent-clipped=0.0 +2024-07-27 21:51:10,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=67241.33333333333, ans=0.1 +2024-07-27 21:51:11,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=67254.66666666667, ans=0.125 +2024-07-27 21:51:11,964 INFO [train.py:1114] (1/4) Epoch 5, batch 9550, loss[loss=0.2652, simple_loss=0.3419, pruned_loss=0.09427, over 4775.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3193, pruned_loss=0.08182, over 932159.61 frames. ], batch size: 12, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:51:12,192 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.39 vs. limit=15.0 +2024-07-27 21:51:24,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=67254.66666666667, ans=0.0 +2024-07-27 21:51:33,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=67268.0, ans=0.2 +2024-07-27 21:51:35,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=67281.33333333333, ans=0.125 +2024-07-27 21:51:38,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=67281.33333333333, ans=0.09899494936611666 +2024-07-27 21:51:39,123 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.69 vs. limit=15.0 +2024-07-27 21:51:46,832 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.74 vs. limit=6.0 +2024-07-27 21:51:58,327 INFO [train.py:1114] (1/4) Epoch 5, batch 9600, loss[loss=0.2894, simple_loss=0.3403, pruned_loss=0.1193, over 3358.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.3192, pruned_loss=0.08163, over 931234.76 frames. ], batch size: 36, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:52:05,741 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.29 vs. limit=22.5 +2024-07-27 21:52:10,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=67334.66666666667, ans=0.09899494936611666 +2024-07-27 21:52:18,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=67348.0, ans=0.0 +2024-07-27 21:52:20,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=67348.0, ans=0.2 +2024-07-27 21:52:21,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=67348.0, ans=0.125 +2024-07-27 21:52:32,044 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.609e+01 6.845e+01 7.921e+01 9.332e+01 1.441e+02, threshold=1.584e+02, percent-clipped=0.0 +2024-07-27 21:52:32,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=67374.66666666667, ans=0.0 +2024-07-27 21:52:35,910 INFO [train.py:1114] (1/4) Epoch 5, batch 9650, loss[loss=0.2498, simple_loss=0.3252, pruned_loss=0.08721, over 4866.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3201, pruned_loss=0.0825, over 927071.20 frames. ], batch size: 16, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:52:41,249 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.43 vs. limit=15.0 +2024-07-27 21:52:46,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=67401.33333333333, ans=0.125 +2024-07-27 21:53:02,924 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.20 vs. limit=15.0 +2024-07-27 21:53:08,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67441.33333333333, ans=0.1 +2024-07-27 21:53:09,956 INFO [train.py:1114] (1/4) Epoch 5, batch 9700, loss[loss=0.294, simple_loss=0.3606, pruned_loss=0.1138, over 4178.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.3209, pruned_loss=0.08321, over 925205.95 frames. ], batch size: 25, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:53:22,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=67481.33333333333, ans=0.125 +2024-07-27 21:53:24,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=67481.33333333333, ans=0.0 +2024-07-27 21:53:31,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=67494.66666666667, ans=0.125 +2024-07-27 21:53:31,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=67494.66666666667, ans=0.0 +2024-07-27 21:53:33,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=67494.66666666667, ans=0.2 +2024-07-27 21:53:37,139 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.756e+01 6.215e+01 7.059e+01 7.921e+01 1.151e+02, threshold=1.412e+02, percent-clipped=0.0 +2024-07-27 21:53:39,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=67508.0, ans=0.0 +2024-07-27 21:53:41,358 INFO [train.py:1114] (1/4) Epoch 5, batch 9750, loss[loss=0.2649, simple_loss=0.3425, pruned_loss=0.09365, over 4680.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3207, pruned_loss=0.08306, over 925777.05 frames. ], batch size: 15, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:53:45,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=67521.33333333333, ans=0.05 +2024-07-27 21:53:47,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=67534.66666666667, ans=0.125 +2024-07-27 21:53:51,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=67534.66666666667, ans=0.0 +2024-07-27 21:53:55,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=67548.0, ans=15.0 +2024-07-27 21:54:00,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=67561.33333333333, ans=0.1 +2024-07-27 21:54:11,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=67574.66666666667, ans=0.125 +2024-07-27 21:54:13,366 INFO [train.py:1114] (1/4) Epoch 5, batch 9800, loss[loss=0.2385, simple_loss=0.3279, pruned_loss=0.0745, over 4707.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3187, pruned_loss=0.08217, over 925572.16 frames. ], batch size: 12, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:54:21,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=67601.33333333333, ans=0.125 +2024-07-27 21:54:37,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=67641.33333333333, ans=0.0 +2024-07-27 21:54:40,551 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.676e+01 6.437e+01 7.516e+01 8.874e+01 1.109e+02, threshold=1.503e+02, percent-clipped=0.0 +2024-07-27 21:54:43,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=67641.33333333333, ans=0.0 +2024-07-27 21:54:44,117 INFO [train.py:1114] (1/4) Epoch 5, batch 9850, loss[loss=0.2579, simple_loss=0.3448, pruned_loss=0.08553, over 4898.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.3198, pruned_loss=0.08249, over 927660.92 frames. ], batch size: 15, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:54:44,370 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.09 vs. limit=10.0 +2024-07-27 21:54:53,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=67668.0, ans=0.125 +2024-07-27 21:55:09,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=67708.0, ans=0.125 +2024-07-27 21:55:10,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.12 vs. limit=22.5 +2024-07-27 21:55:12,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67708.0, ans=0.1 +2024-07-27 21:55:15,332 INFO [train.py:1114] (1/4) Epoch 5, batch 9900, loss[loss=0.2941, simple_loss=0.3604, pruned_loss=0.1139, over 4835.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.3208, pruned_loss=0.08321, over 926941.46 frames. ], batch size: 16, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:55:16,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=67721.33333333333, ans=0.0 +2024-07-27 21:55:19,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=67721.33333333333, ans=0.125 +2024-07-27 21:55:19,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=67721.33333333333, ans=0.0 +2024-07-27 21:55:28,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=67734.66666666667, ans=0.0 +2024-07-27 21:55:44,750 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.290e+01 6.621e+01 7.499e+01 8.431e+01 1.516e+02, threshold=1.500e+02, percent-clipped=1.0 +2024-07-27 21:55:47,766 INFO [train.py:1114] (1/4) Epoch 5, batch 9950, loss[loss=0.2247, simple_loss=0.2931, pruned_loss=0.07814, over 4788.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3203, pruned_loss=0.08323, over 929470.06 frames. ], batch size: 11, lr: 1.39e-02, grad_scale: 16.0 +2024-07-27 21:56:01,081 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.76 vs. limit=10.0 +2024-07-27 21:56:15,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=67828.0, ans=0.0 +2024-07-27 21:56:20,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=67841.33333333333, ans=0.2 +2024-07-27 21:56:22,447 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.26 vs. limit=10.0 +2024-07-27 21:56:22,649 INFO [train.py:1114] (1/4) Epoch 5, batch 10000, loss[loss=0.2273, simple_loss=0.3091, pruned_loss=0.07275, over 4605.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3219, pruned_loss=0.08382, over 927118.92 frames. ], batch size: 16, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:56:37,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=67881.33333333333, ans=0.125 +2024-07-27 21:56:44,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67894.66666666667, ans=0.1 +2024-07-27 21:56:46,985 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.33 vs. limit=6.0 +2024-07-27 21:56:53,464 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.246e+01 6.323e+01 6.858e+01 7.699e+01 1.357e+02, threshold=1.372e+02, percent-clipped=0.0 +2024-07-27 21:56:56,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67921.33333333333, ans=0.1 +2024-07-27 21:56:57,388 INFO [train.py:1114] (1/4) Epoch 5, batch 10050, loss[loss=0.292, simple_loss=0.3615, pruned_loss=0.1113, over 3189.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.3261, pruned_loss=0.086, over 915165.87 frames. ], batch size: 35, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:56:58,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=67921.33333333333, ans=0.0 +2024-07-27 21:57:02,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67921.33333333333, ans=0.1 +2024-07-27 21:57:06,624 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.46 vs. limit=15.0 +2024-07-27 21:57:08,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=67934.66666666667, ans=0.0 +2024-07-27 21:57:10,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=67948.0, ans=0.125 +2024-07-27 21:57:11,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=67948.0, ans=0.125 +2024-07-27 21:57:11,964 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.61 vs. limit=10.0 +2024-07-27 21:57:31,358 INFO [train.py:1114] (1/4) Epoch 5, batch 10100, loss[loss=0.3084, simple_loss=0.3598, pruned_loss=0.1285, over 3176.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.3327, pruned_loss=0.09318, over 862256.16 frames. ], batch size: 35, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:57:41,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=68001.33333333333, ans=0.025 +2024-07-27 21:58:01,135 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.071e+01 6.809e+01 7.405e+01 8.060e+01 1.302e+02, threshold=1.481e+02, percent-clipped=0.0 +2024-07-27 21:58:04,299 INFO [train.py:1114] (1/4) Epoch 5, batch 10150, loss[loss=0.3098, simple_loss=0.3718, pruned_loss=0.1239, over 3333.00 frames. ], tot_loss[loss=0.2673, simple_loss=0.3376, pruned_loss=0.09853, over 821113.53 frames. ], batch size: 35, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:58:10,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=68068.0, ans=0.2 +2024-07-27 21:58:11,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=68068.0, ans=0.1 +2024-07-27 21:58:14,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=68068.0, ans=0.0 +2024-07-27 21:58:28,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=68094.66666666667, ans=0.125 +2024-07-27 21:58:35,557 INFO [train.py:1114] (1/4) Epoch 5, batch 10200, loss[loss=0.2816, simple_loss=0.3441, pruned_loss=0.1096, over 3089.00 frames. ], tot_loss[loss=0.2741, simple_loss=0.3417, pruned_loss=0.1033, over 787644.16 frames. ], batch size: 35, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:58:36,449 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.08 vs. limit=15.0 +2024-07-27 21:58:44,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68134.66666666667, ans=0.1 +2024-07-27 21:58:44,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=68134.66666666667, ans=0.1 +2024-07-27 21:58:44,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=68134.66666666667, ans=0.0 +2024-07-27 21:58:45,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=68134.66666666667, ans=0.125 +2024-07-27 21:59:31,812 INFO [train.py:1114] (1/4) Epoch 6, batch 0, loss[loss=0.2464, simple_loss=0.3315, pruned_loss=0.08068, over 4854.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3315, pruned_loss=0.08068, over 4854.00 frames. ], batch size: 12, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 21:59:31,813 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 21:59:41,882 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.9694, 5.3063, 5.2470, 5.7994], device='cuda:1') +2024-07-27 21:59:43,347 INFO [train.py:1146] (1/4) Epoch 6, validation: loss=0.203, simple_loss=0.3084, pruned_loss=0.04884, over 944034.00 frames. +2024-07-27 21:59:43,348 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 21:59:45,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68150.66666666667, ans=0.1 +2024-07-27 21:59:53,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=68164.0, ans=0.2 +2024-07-27 21:59:58,961 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.074e+01 6.594e+01 7.055e+01 7.805e+01 1.292e+02, threshold=1.411e+02, percent-clipped=0.0 +2024-07-27 21:59:59,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=68177.33333333333, ans=0.125 +2024-07-27 21:59:59,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=68177.33333333333, ans=0.125 +2024-07-27 22:00:12,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=68204.0, ans=0.125 +2024-07-27 22:00:13,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=68204.0, ans=0.125 +2024-07-27 22:00:13,965 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.60 vs. limit=10.0 +2024-07-27 22:00:18,875 INFO [train.py:1114] (1/4) Epoch 6, batch 50, loss[loss=0.2377, simple_loss=0.3105, pruned_loss=0.08242, over 4605.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.325, pruned_loss=0.08397, over 206724.13 frames. ], batch size: 11, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:00:30,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=68230.66666666667, ans=0.07 +2024-07-27 22:00:32,629 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.09 vs. limit=15.0 +2024-07-27 22:00:33,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=68244.0, ans=0.2 +2024-07-27 22:00:38,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68257.33333333333, ans=0.1 +2024-07-27 22:00:39,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68257.33333333333, ans=0.1 +2024-07-27 22:00:43,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=68257.33333333333, ans=0.0 +2024-07-27 22:00:46,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=68270.66666666667, ans=0.2 +2024-07-27 22:00:52,634 INFO [train.py:1114] (1/4) Epoch 6, batch 100, loss[loss=0.2035, simple_loss=0.2913, pruned_loss=0.05783, over 4634.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3264, pruned_loss=0.08467, over 365616.73 frames. ], batch size: 12, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:01:09,851 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+01 6.212e+01 6.939e+01 8.250e+01 1.265e+02, threshold=1.388e+02, percent-clipped=0.0 +2024-07-27 22:01:16,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=68324.0, ans=0.125 +2024-07-27 22:01:17,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=68324.0, ans=0.125 +2024-07-27 22:01:27,683 INFO [train.py:1114] (1/4) Epoch 6, batch 150, loss[loss=0.1831, simple_loss=0.2728, pruned_loss=0.0467, over 4618.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3195, pruned_loss=0.08098, over 494433.97 frames. ], batch size: 11, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:01:28,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=68350.66666666667, ans=0.0 +2024-07-27 22:01:35,251 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.79 vs. limit=6.0 +2024-07-27 22:01:58,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=68404.0, ans=0.125 +2024-07-27 22:02:05,151 INFO [train.py:1114] (1/4) Epoch 6, batch 200, loss[loss=0.2406, simple_loss=0.3268, pruned_loss=0.07721, over 4518.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3175, pruned_loss=0.07984, over 593953.89 frames. ], batch size: 21, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:02:06,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68417.33333333333, ans=0.1 +2024-07-27 22:02:11,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=68430.66666666667, ans=0.2 +2024-07-27 22:02:15,967 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.01 vs. limit=10.0 +2024-07-27 22:02:17,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=68430.66666666667, ans=0.125 +2024-07-27 22:02:17,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=68444.0, ans=0.025 +2024-07-27 22:02:20,255 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.000e+01 6.270e+01 7.736e+01 9.618e+01 1.930e+02, threshold=1.547e+02, percent-clipped=5.0 +2024-07-27 22:02:20,660 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.63 vs. limit=22.5 +2024-07-27 22:02:21,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff3.min_abs, batch_count=68444.0, ans=0.2 +2024-07-27 22:02:22,007 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.49 vs. limit=15.0 +2024-07-27 22:02:30,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=68457.33333333333, ans=0.125 +2024-07-27 22:02:32,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=68470.66666666667, ans=0.125 +2024-07-27 22:02:38,662 INFO [train.py:1114] (1/4) Epoch 6, batch 250, loss[loss=0.2601, simple_loss=0.3367, pruned_loss=0.09176, over 4616.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3162, pruned_loss=0.07899, over 670531.28 frames. ], batch size: 16, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:02:40,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=68484.0, ans=0.125 +2024-07-27 22:02:47,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=68497.33333333333, ans=0.2 +2024-07-27 22:02:56,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=68510.66666666667, ans=0.2 +2024-07-27 22:02:58,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=68510.66666666667, ans=0.5 +2024-07-27 22:03:00,453 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.82 vs. limit=15.0 +2024-07-27 22:03:00,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_na.min_abs, batch_count=68524.0, ans=0.02 +2024-07-27 22:03:00,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=68524.0, ans=0.125 +2024-07-27 22:03:06,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68537.33333333333, ans=0.1 +2024-07-27 22:03:08,938 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:03:10,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=68537.33333333333, ans=0.0 +2024-07-27 22:03:14,322 INFO [train.py:1114] (1/4) Epoch 6, batch 300, loss[loss=0.2018, simple_loss=0.2926, pruned_loss=0.05546, over 4808.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3154, pruned_loss=0.07909, over 730084.68 frames. ], batch size: 15, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:03:29,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=68577.33333333333, ans=0.125 +2024-07-27 22:03:29,802 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.873e+01 6.141e+01 6.927e+01 8.037e+01 1.226e+02, threshold=1.385e+02, percent-clipped=0.0 +2024-07-27 22:03:33,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=68577.33333333333, ans=10.0 +2024-07-27 22:03:33,598 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.83 vs. limit=15.0 +2024-07-27 22:03:33,668 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.78 vs. limit=15.0 +2024-07-27 22:03:37,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=68590.66666666667, ans=0.125 +2024-07-27 22:03:43,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68604.0, ans=0.1 +2024-07-27 22:03:49,906 INFO [train.py:1114] (1/4) Epoch 6, batch 350, loss[loss=0.2228, simple_loss=0.2884, pruned_loss=0.07858, over 4936.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.3172, pruned_loss=0.07987, over 776269.95 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:03:50,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=68617.33333333333, ans=0.125 +2024-07-27 22:03:54,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=68617.33333333333, ans=0.0 +2024-07-27 22:04:02,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=68630.66666666667, ans=0.0 +2024-07-27 22:04:11,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=68657.33333333333, ans=0.125 +2024-07-27 22:04:15,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=68657.33333333333, ans=0.125 +2024-07-27 22:04:16,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=68670.66666666667, ans=0.125 +2024-07-27 22:04:23,112 INFO [train.py:1114] (1/4) Epoch 6, batch 400, loss[loss=0.2267, simple_loss=0.305, pruned_loss=0.07419, over 4692.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3164, pruned_loss=0.07884, over 813595.83 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:04:35,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=68697.33333333333, ans=0.125 +2024-07-27 22:04:42,395 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.890e+01 6.252e+01 7.226e+01 8.425e+01 1.439e+02, threshold=1.445e+02, percent-clipped=1.0 +2024-07-27 22:04:46,174 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=15.0 +2024-07-27 22:05:00,649 INFO [train.py:1114] (1/4) Epoch 6, batch 450, loss[loss=0.2366, simple_loss=0.3136, pruned_loss=0.07976, over 4636.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.3168, pruned_loss=0.07978, over 839129.84 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:05:06,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=68764.0, ans=0.2 +2024-07-27 22:05:10,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=68764.0, ans=0.2 +2024-07-27 22:05:19,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=68777.33333333333, ans=0.125 +2024-07-27 22:05:20,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=68790.66666666667, ans=0.125 +2024-07-27 22:05:33,917 INFO [train.py:1114] (1/4) Epoch 6, batch 500, loss[loss=0.2985, simple_loss=0.3678, pruned_loss=0.1146, over 4682.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3156, pruned_loss=0.07918, over 861284.12 frames. ], batch size: 15, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:05:33,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=68817.33333333333, ans=0.2 +2024-07-27 22:05:36,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68817.33333333333, ans=0.1 +2024-07-27 22:05:49,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68844.0, ans=0.1 +2024-07-27 22:05:51,015 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 6.191e+01 6.809e+01 7.735e+01 1.328e+02, threshold=1.362e+02, percent-clipped=0.0 +2024-07-27 22:05:53,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=68844.0, ans=0.125 +2024-07-27 22:05:55,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=68857.33333333333, ans=0.2 +2024-07-27 22:06:09,499 INFO [train.py:1114] (1/4) Epoch 6, batch 550, loss[loss=0.2256, simple_loss=0.3152, pruned_loss=0.068, over 4592.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3164, pruned_loss=0.07923, over 877375.45 frames. ], batch size: 17, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:06:20,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68897.33333333333, ans=0.1 +2024-07-27 22:06:28,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=68910.66666666667, ans=0.025 +2024-07-27 22:06:32,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=68924.0, ans=0.125 +2024-07-27 22:06:34,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=68924.0, ans=0.125 +2024-07-27 22:06:37,122 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.88 vs. limit=6.0 +2024-07-27 22:06:39,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=68937.33333333333, ans=0.0 +2024-07-27 22:06:43,638 INFO [train.py:1114] (1/4) Epoch 6, batch 600, loss[loss=0.2384, simple_loss=0.3117, pruned_loss=0.08253, over 4633.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.3166, pruned_loss=0.07928, over 892070.36 frames. ], batch size: 16, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:06:49,332 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.45 vs. limit=6.0 +2024-07-27 22:06:53,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=68964.0, ans=0.0 +2024-07-27 22:06:56,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=68964.0, ans=0.125 +2024-07-27 22:06:58,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=68977.33333333333, ans=0.125 +2024-07-27 22:07:00,810 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.994e+01 6.226e+01 6.771e+01 7.767e+01 1.130e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-27 22:07:02,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=68977.33333333333, ans=0.0 +2024-07-27 22:07:10,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=68990.66666666667, ans=0.125 +2024-07-27 22:07:11,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69004.0, ans=0.1 +2024-07-27 22:07:18,951 INFO [train.py:1114] (1/4) Epoch 6, batch 650, loss[loss=0.2056, simple_loss=0.2886, pruned_loss=0.06129, over 4757.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3142, pruned_loss=0.07798, over 903745.42 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:07:26,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=69030.66666666667, ans=0.125 +2024-07-27 22:07:34,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69044.0, ans=0.1 +2024-07-27 22:07:52,605 INFO [train.py:1114] (1/4) Epoch 6, batch 700, loss[loss=0.2426, simple_loss=0.3342, pruned_loss=0.07553, over 4645.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3145, pruned_loss=0.07782, over 911822.19 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:07:56,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69084.0, ans=0.1 +2024-07-27 22:08:07,874 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.953e+01 5.900e+01 6.634e+01 8.042e+01 1.194e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-27 22:08:11,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=69110.66666666667, ans=0.0 +2024-07-27 22:08:12,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=69124.0, ans=0.1 +2024-07-27 22:08:15,800 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.86 vs. limit=6.0 +2024-07-27 22:08:19,899 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.91 vs. limit=10.0 +2024-07-27 22:08:27,947 INFO [train.py:1114] (1/4) Epoch 6, batch 750, loss[loss=0.231, simple_loss=0.3064, pruned_loss=0.07783, over 4696.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3133, pruned_loss=0.07719, over 917710.72 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:08:31,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=69150.66666666667, ans=0.125 +2024-07-27 22:08:33,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=69150.66666666667, ans=0.0 +2024-07-27 22:08:51,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=69190.66666666667, ans=0.125 +2024-07-27 22:08:52,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69190.66666666667, ans=0.1 +2024-07-27 22:09:00,970 INFO [train.py:1114] (1/4) Epoch 6, batch 800, loss[loss=0.2041, simple_loss=0.2812, pruned_loss=0.06347, over 4847.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3133, pruned_loss=0.07781, over 922598.27 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:09:18,018 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.094e+01 6.022e+01 6.607e+01 7.761e+01 1.209e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-27 22:09:22,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=69257.33333333333, ans=0.0 +2024-07-27 22:09:29,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=69257.33333333333, ans=0.0 +2024-07-27 22:09:31,096 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.70 vs. limit=12.0 +2024-07-27 22:09:37,854 INFO [train.py:1114] (1/4) Epoch 6, batch 850, loss[loss=0.2266, simple_loss=0.3108, pruned_loss=0.07119, over 4655.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3124, pruned_loss=0.07755, over 926893.64 frames. ], batch size: 14, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:09:49,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69297.33333333333, ans=0.1 +2024-07-27 22:09:49,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=69297.33333333333, ans=0.125 +2024-07-27 22:09:51,162 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:09:52,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=69310.66666666667, ans=0.125 +2024-07-27 22:09:59,847 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.63 vs. limit=22.5 +2024-07-27 22:10:15,124 INFO [train.py:1114] (1/4) Epoch 6, batch 900, loss[loss=0.2479, simple_loss=0.3098, pruned_loss=0.09297, over 4849.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3143, pruned_loss=0.07896, over 928061.29 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:10:15,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=69350.66666666667, ans=0.025 +2024-07-27 22:10:19,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=69350.66666666667, ans=0.2 +2024-07-27 22:10:29,523 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.17 vs. limit=6.0 +2024-07-27 22:10:30,381 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.456e+01 6.354e+01 7.008e+01 8.406e+01 1.301e+02, threshold=1.402e+02, percent-clipped=0.0 +2024-07-27 22:10:31,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=69377.33333333333, ans=0.125 +2024-07-27 22:10:32,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=69377.33333333333, ans=0.125 +2024-07-27 22:10:46,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=69404.0, ans=0.125 +2024-07-27 22:10:48,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=69417.33333333333, ans=0.0 +2024-07-27 22:10:48,495 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.39 vs. limit=10.0 +2024-07-27 22:10:48,668 INFO [train.py:1114] (1/4) Epoch 6, batch 950, loss[loss=0.218, simple_loss=0.2968, pruned_loss=0.0696, over 4775.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3142, pruned_loss=0.07827, over 929608.47 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:10:49,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=69417.33333333333, ans=0.0 +2024-07-27 22:10:56,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=69430.66666666667, ans=0.125 +2024-07-27 22:10:58,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=69430.66666666667, ans=0.0 +2024-07-27 22:11:23,636 INFO [train.py:1114] (1/4) Epoch 6, batch 1000, loss[loss=0.2327, simple_loss=0.3073, pruned_loss=0.07906, over 4970.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3145, pruned_loss=0.0785, over 930075.42 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:11:26,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=69484.0, ans=0.0 +2024-07-27 22:11:31,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69497.33333333333, ans=0.1 +2024-07-27 22:11:36,470 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.19 vs. limit=22.5 +2024-07-27 22:11:36,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=69510.66666666667, ans=0.0 +2024-07-27 22:11:37,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=69510.66666666667, ans=0.0 +2024-07-27 22:11:39,439 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.085e+01 6.209e+01 6.779e+01 8.145e+01 1.211e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 22:11:39,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=69510.66666666667, ans=0.2 +2024-07-27 22:11:40,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=69510.66666666667, ans=0.125 +2024-07-27 22:11:42,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69510.66666666667, ans=0.1 +2024-07-27 22:11:43,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=69524.0, ans=0.125 +2024-07-27 22:11:43,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=69524.0, ans=10.0 +2024-07-27 22:11:48,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=69524.0, ans=0.0 +2024-07-27 22:11:57,383 INFO [train.py:1114] (1/4) Epoch 6, batch 1050, loss[loss=0.2364, simple_loss=0.3264, pruned_loss=0.07326, over 4872.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3137, pruned_loss=0.07814, over 932147.94 frames. ], batch size: 14, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:12:04,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=69564.0, ans=0.125 +2024-07-27 22:12:06,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=69564.0, ans=0.125 +2024-07-27 22:12:18,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=69577.33333333333, ans=0.0 +2024-07-27 22:12:32,787 INFO [train.py:1114] (1/4) Epoch 6, batch 1100, loss[loss=0.2857, simple_loss=0.3518, pruned_loss=0.1098, over 4896.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3133, pruned_loss=0.0781, over 934526.41 frames. ], batch size: 13, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:12:35,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=69617.33333333333, ans=0.125 +2024-07-27 22:12:38,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=69630.66666666667, ans=0.2 +2024-07-27 22:12:38,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=69630.66666666667, ans=0.125 +2024-07-27 22:12:38,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=69630.66666666667, ans=0.025 +2024-07-27 22:12:40,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=69630.66666666667, ans=0.0 +2024-07-27 22:12:47,498 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:12:48,096 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.955e+01 5.872e+01 6.350e+01 6.961e+01 9.139e+01, threshold=1.270e+02, percent-clipped=0.0 +2024-07-27 22:12:48,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=69644.0, ans=0.025 +2024-07-27 22:13:00,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=69670.66666666667, ans=0.125 +2024-07-27 22:13:05,921 INFO [train.py:1114] (1/4) Epoch 6, batch 1150, loss[loss=0.233, simple_loss=0.3205, pruned_loss=0.07269, over 4893.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3127, pruned_loss=0.07774, over 933893.10 frames. ], batch size: 13, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:13:16,283 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:13:19,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=69710.66666666667, ans=0.125 +2024-07-27 22:13:21,213 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.74 vs. limit=15.0 +2024-07-27 22:13:24,971 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:13:50,299 INFO [train.py:1114] (1/4) Epoch 6, batch 1200, loss[loss=0.2567, simple_loss=0.3542, pruned_loss=0.07958, over 4875.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.315, pruned_loss=0.07843, over 933038.97 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:13:51,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=69750.66666666667, ans=0.125 +2024-07-27 22:14:07,403 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.905e+01 5.960e+01 6.565e+01 7.380e+01 1.067e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-27 22:14:23,523 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.33 vs. limit=12.0 +2024-07-27 22:14:25,334 INFO [train.py:1114] (1/4) Epoch 6, batch 1250, loss[loss=0.2205, simple_loss=0.3053, pruned_loss=0.06788, over 4797.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.315, pruned_loss=0.07797, over 937296.03 frames. ], batch size: 15, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:14:28,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=69817.33333333333, ans=0.125 +2024-07-27 22:14:32,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=69830.66666666667, ans=10.0 +2024-07-27 22:14:33,277 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.48 vs. limit=15.0 +2024-07-27 22:14:40,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69844.0, ans=0.1 +2024-07-27 22:14:51,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=69870.66666666667, ans=0.0 +2024-07-27 22:14:58,321 INFO [train.py:1114] (1/4) Epoch 6, batch 1300, loss[loss=0.2715, simple_loss=0.3471, pruned_loss=0.09791, over 4733.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3143, pruned_loss=0.07785, over 938663.53 frames. ], batch size: 19, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:15:04,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=69884.0, ans=0.95 +2024-07-27 22:15:07,730 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.99 vs. limit=22.5 +2024-07-27 22:15:15,337 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.914e+01 6.589e+01 7.357e+01 1.015e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-27 22:15:16,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=69910.66666666667, ans=0.125 +2024-07-27 22:15:18,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=69910.66666666667, ans=0.0 +2024-07-27 22:15:20,362 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.62 vs. limit=15.0 +2024-07-27 22:15:25,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=69924.0, ans=0.0 +2024-07-27 22:15:27,081 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.45 vs. limit=10.0 +2024-07-27 22:15:33,792 INFO [train.py:1114] (1/4) Epoch 6, batch 1350, loss[loss=0.2486, simple_loss=0.3262, pruned_loss=0.08545, over 4757.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3128, pruned_loss=0.07691, over 940954.61 frames. ], batch size: 13, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:15:34,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=69950.66666666667, ans=0.0 +2024-07-27 22:15:37,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=69950.66666666667, ans=0.0 +2024-07-27 22:15:42,639 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.39 vs. limit=15.0 +2024-07-27 22:15:44,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=69964.0, ans=0.125 +2024-07-27 22:15:55,631 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:15:56,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=69990.66666666667, ans=0.125 +2024-07-27 22:16:01,712 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.64 vs. limit=15.0 +2024-07-27 22:16:07,350 INFO [train.py:1114] (1/4) Epoch 6, batch 1400, loss[loss=0.1756, simple_loss=0.2505, pruned_loss=0.05031, over 4701.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3126, pruned_loss=0.07729, over 942874.85 frames. ], batch size: 11, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:16:09,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70017.33333333333, ans=0.1 +2024-07-27 22:16:13,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=70030.66666666667, ans=0.125 +2024-07-27 22:16:14,126 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=19.88 vs. limit=22.5 +2024-07-27 22:16:16,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=70030.66666666667, ans=0.1 +2024-07-27 22:16:22,913 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.942e+01 6.116e+01 6.900e+01 7.787e+01 1.307e+02, threshold=1.380e+02, percent-clipped=0.0 +2024-07-27 22:17:12,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.95 vs. limit=6.0 +2024-07-27 22:17:18,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70057.33333333333, ans=0.1 +2024-07-27 22:17:26,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=70070.66666666667, ans=0.1 +2024-07-27 22:17:26,689 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.21 vs. limit=15.0 +2024-07-27 22:17:28,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=70084.0, ans=0.2 +2024-07-27 22:17:29,168 INFO [train.py:1114] (1/4) Epoch 6, batch 1450, loss[loss=0.2555, simple_loss=0.3398, pruned_loss=0.08563, over 4705.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3132, pruned_loss=0.07746, over 942911.64 frames. ], batch size: 15, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:17:36,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=70097.33333333333, ans=0.2 +2024-07-27 22:17:38,217 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.60 vs. limit=15.0 +2024-07-27 22:17:50,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=70124.0, ans=0.125 +2024-07-27 22:18:03,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=70150.66666666667, ans=0.2 +2024-07-27 22:18:04,305 INFO [train.py:1114] (1/4) Epoch 6, batch 1500, loss[loss=0.2392, simple_loss=0.3251, pruned_loss=0.07668, over 4807.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3146, pruned_loss=0.07799, over 942351.07 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:18:07,612 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.36 vs. limit=10.0 +2024-07-27 22:18:20,389 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.729e+01 5.883e+01 6.851e+01 7.584e+01 1.194e+02, threshold=1.370e+02, percent-clipped=0.0 +2024-07-27 22:18:22,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=70177.33333333333, ans=0.125 +2024-07-27 22:18:29,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=70190.66666666667, ans=0.0 +2024-07-27 22:18:40,400 INFO [train.py:1114] (1/4) Epoch 6, batch 1550, loss[loss=0.2393, simple_loss=0.3208, pruned_loss=0.07884, over 4903.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3147, pruned_loss=0.07794, over 938642.41 frames. ], batch size: 15, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:18:45,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=70217.33333333333, ans=0.125 +2024-07-27 22:18:45,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=70217.33333333333, ans=0.0 +2024-07-27 22:18:47,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70230.66666666667, ans=0.1 +2024-07-27 22:18:48,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=70230.66666666667, ans=0.0 +2024-07-27 22:18:51,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=70230.66666666667, ans=0.035 +2024-07-27 22:18:57,548 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.91 vs. limit=15.0 +2024-07-27 22:19:11,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=70270.66666666667, ans=0.2 +2024-07-27 22:19:12,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=70284.0, ans=0.0 +2024-07-27 22:19:13,500 INFO [train.py:1114] (1/4) Epoch 6, batch 1600, loss[loss=0.1996, simple_loss=0.2892, pruned_loss=0.05498, over 4880.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3144, pruned_loss=0.07809, over 937070.75 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:19:29,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=70310.66666666667, ans=0.125 +2024-07-27 22:19:31,155 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.952e+01 6.615e+01 7.870e+01 9.186e+01 1.944e+02, threshold=1.574e+02, percent-clipped=2.0 +2024-07-27 22:19:46,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=70337.33333333333, ans=0.125 +2024-07-27 22:19:48,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=70337.33333333333, ans=0.125 +2024-07-27 22:19:49,371 INFO [train.py:1114] (1/4) Epoch 6, batch 1650, loss[loss=0.2541, simple_loss=0.3359, pruned_loss=0.08614, over 4655.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3134, pruned_loss=0.07737, over 937093.78 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:19:54,322 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.09 vs. limit=22.5 +2024-07-27 22:19:58,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=70364.0, ans=0.125 +2024-07-27 22:20:20,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=70404.0, ans=0.1 +2024-07-27 22:20:23,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=70404.0, ans=0.09899494936611666 +2024-07-27 22:20:24,472 INFO [train.py:1114] (1/4) Epoch 6, batch 1700, loss[loss=0.1839, simple_loss=0.2639, pruned_loss=0.05196, over 4711.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3127, pruned_loss=0.07722, over 938901.04 frames. ], batch size: 11, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:20:25,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=70417.33333333333, ans=0.2 +2024-07-27 22:20:28,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=70417.33333333333, ans=0.0 +2024-07-27 22:20:36,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=70430.66666666667, ans=0.125 +2024-07-27 22:20:39,545 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.065e+01 6.237e+01 7.615e+01 9.161e+01 1.409e+02, threshold=1.523e+02, percent-clipped=0.0 +2024-07-27 22:20:48,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=70457.33333333333, ans=0.07 +2024-07-27 22:20:49,704 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.55 vs. limit=22.5 +2024-07-27 22:20:58,125 INFO [train.py:1114] (1/4) Epoch 6, batch 1750, loss[loss=0.2051, simple_loss=0.2719, pruned_loss=0.06912, over 4814.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3127, pruned_loss=0.07754, over 939693.39 frames. ], batch size: 11, lr: 1.28e-02, grad_scale: 64.0 +2024-07-27 22:21:07,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=70497.33333333333, ans=0.0 +2024-07-27 22:21:28,658 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.61 vs. limit=15.0 +2024-07-27 22:21:31,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=70537.33333333333, ans=0.0 +2024-07-27 22:21:37,248 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:21:38,394 INFO [train.py:1114] (1/4) Epoch 6, batch 1800, loss[loss=0.2472, simple_loss=0.3434, pruned_loss=0.07548, over 4639.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3135, pruned_loss=0.07741, over 940375.64 frames. ], batch size: 13, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:21:47,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=70564.0, ans=0.09899494936611666 +2024-07-27 22:21:48,803 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.66 vs. limit=10.0 +2024-07-27 22:21:54,694 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+01 6.220e+01 7.110e+01 8.756e+01 1.676e+02, threshold=1.422e+02, percent-clipped=1.0 +2024-07-27 22:22:00,344 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=6.12 vs. limit=12.0 +2024-07-27 22:22:01,591 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.01 vs. limit=12.0 +2024-07-27 22:22:07,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=70604.0, ans=0.125 +2024-07-27 22:22:11,280 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.05 vs. limit=15.0 +2024-07-27 22:22:12,104 INFO [train.py:1114] (1/4) Epoch 6, batch 1850, loss[loss=0.2272, simple_loss=0.3086, pruned_loss=0.07292, over 4812.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3128, pruned_loss=0.07654, over 940066.82 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:22:14,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=70617.33333333333, ans=0.0 +2024-07-27 22:22:20,447 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.18 vs. limit=15.0 +2024-07-27 22:22:21,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=70630.66666666667, ans=0.125 +2024-07-27 22:22:21,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=70630.66666666667, ans=0.2 +2024-07-27 22:22:23,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=70630.66666666667, ans=0.125 +2024-07-27 22:22:45,973 INFO [train.py:1114] (1/4) Epoch 6, batch 1900, loss[loss=0.2495, simple_loss=0.3431, pruned_loss=0.078, over 4668.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3147, pruned_loss=0.07713, over 941575.93 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:22:54,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=70697.33333333333, ans=0.0 +2024-07-27 22:22:58,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=70697.33333333333, ans=0.2 +2024-07-27 22:22:58,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=70710.66666666667, ans=0.05 +2024-07-27 22:22:59,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=70710.66666666667, ans=0.125 +2024-07-27 22:23:01,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=70710.66666666667, ans=0.0 +2024-07-27 22:23:01,911 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 6.167e+01 7.357e+01 8.960e+01 1.368e+02, threshold=1.471e+02, percent-clipped=0.0 +2024-07-27 22:23:11,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=70724.0, ans=6.0 +2024-07-27 22:23:13,113 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.91 vs. limit=15.0 +2024-07-27 22:23:23,064 INFO [train.py:1114] (1/4) Epoch 6, batch 1950, loss[loss=0.2298, simple_loss=0.3012, pruned_loss=0.07918, over 4901.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3162, pruned_loss=0.0778, over 943809.91 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:23:23,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=70750.66666666667, ans=0.09899494936611666 +2024-07-27 22:23:29,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=70764.0, ans=0.0 +2024-07-27 22:23:33,005 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=14.91 vs. limit=15.0 +2024-07-27 22:23:41,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=70777.33333333333, ans=0.125 +2024-07-27 22:23:48,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=70790.66666666667, ans=0.1 +2024-07-27 22:23:56,789 INFO [train.py:1114] (1/4) Epoch 6, batch 2000, loss[loss=0.1936, simple_loss=0.2666, pruned_loss=0.06025, over 4800.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3172, pruned_loss=0.07868, over 941336.54 frames. ], batch size: 11, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:23:59,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=70817.33333333333, ans=15.0 +2024-07-27 22:24:13,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=70844.0, ans=0.125 +2024-07-27 22:24:15,302 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 6.102e+01 6.803e+01 8.517e+01 1.833e+02, threshold=1.361e+02, percent-clipped=3.0 +2024-07-27 22:24:24,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=70857.33333333333, ans=0.0 +2024-07-27 22:24:32,964 INFO [train.py:1114] (1/4) Epoch 6, batch 2050, loss[loss=0.2375, simple_loss=0.3184, pruned_loss=0.07833, over 4619.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.3157, pruned_loss=0.07833, over 939349.23 frames. ], batch size: 11, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:24:36,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=70884.0, ans=0.125 +2024-07-27 22:24:37,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=70884.0, ans=0.125 +2024-07-27 22:24:43,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=70897.33333333333, ans=0.125 +2024-07-27 22:24:47,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=70910.66666666667, ans=0.1 +2024-07-27 22:24:47,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=70910.66666666667, ans=0.2 +2024-07-27 22:24:49,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=70910.66666666667, ans=0.09899494936611666 +2024-07-27 22:24:56,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=70924.0, ans=0.0 +2024-07-27 22:24:59,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=70937.33333333333, ans=0.0 +2024-07-27 22:25:04,546 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:25:07,023 INFO [train.py:1114] (1/4) Epoch 6, batch 2100, loss[loss=0.2082, simple_loss=0.3024, pruned_loss=0.05705, over 4768.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3152, pruned_loss=0.07798, over 941385.20 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:25:09,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=70950.66666666667, ans=0.0 +2024-07-27 22:25:22,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=70977.33333333333, ans=0.1 +2024-07-27 22:25:23,036 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.37 vs. limit=22.5 +2024-07-27 22:25:23,160 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.747e+01 6.024e+01 6.945e+01 8.681e+01 1.626e+02, threshold=1.389e+02, percent-clipped=3.0 +2024-07-27 22:25:26,584 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:25:32,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=70990.66666666667, ans=0.125 +2024-07-27 22:25:38,106 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.69 vs. limit=10.0 +2024-07-27 22:25:40,254 INFO [train.py:1114] (1/4) Epoch 6, batch 2150, loss[loss=0.2373, simple_loss=0.3167, pruned_loss=0.07897, over 4895.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3146, pruned_loss=0.07777, over 944402.16 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:25:42,707 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.63 vs. limit=15.0 +2024-07-27 22:25:45,898 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.51 vs. limit=15.0 +2024-07-27 22:25:49,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=71030.66666666667, ans=0.0 +2024-07-27 22:25:55,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=71044.0, ans=0.125 +2024-07-27 22:26:10,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=71070.66666666667, ans=0.125 +2024-07-27 22:26:13,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=71070.66666666667, ans=0.125 +2024-07-27 22:26:14,877 INFO [train.py:1114] (1/4) Epoch 6, batch 2200, loss[loss=0.2199, simple_loss=0.3042, pruned_loss=0.0678, over 4813.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.3144, pruned_loss=0.07725, over 943266.21 frames. ], batch size: 14, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:26:18,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=71084.0, ans=0.125 +2024-07-27 22:26:25,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=71097.33333333333, ans=0.0 +2024-07-27 22:26:30,845 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.032e+01 5.977e+01 6.533e+01 7.474e+01 1.096e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-27 22:26:33,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=71110.66666666667, ans=0.125 +2024-07-27 22:26:47,957 INFO [train.py:1114] (1/4) Epoch 6, batch 2250, loss[loss=0.1875, simple_loss=0.2685, pruned_loss=0.05325, over 4691.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3138, pruned_loss=0.07698, over 941797.17 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:26:54,807 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:26:59,638 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-07-27 22:27:04,834 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.09 vs. limit=12.0 +2024-07-27 22:27:08,949 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.58 vs. limit=15.0 +2024-07-27 22:27:10,270 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.34 vs. limit=12.0 +2024-07-27 22:27:19,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=71204.0, ans=0.125 +2024-07-27 22:27:21,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=71204.0, ans=0.2 +2024-07-27 22:27:23,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=71217.33333333333, ans=0.125 +2024-07-27 22:27:24,049 INFO [train.py:1114] (1/4) Epoch 6, batch 2300, loss[loss=0.2045, simple_loss=0.2733, pruned_loss=0.06782, over 4941.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3113, pruned_loss=0.07606, over 940011.06 frames. ], batch size: 12, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:27:30,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=71230.66666666667, ans=0.1 +2024-07-27 22:27:39,867 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.011e+01 6.045e+01 6.668e+01 7.674e+01 1.080e+02, threshold=1.334e+02, percent-clipped=0.0 +2024-07-27 22:27:47,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=71257.33333333333, ans=0.125 +2024-07-27 22:27:48,304 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.69 vs. limit=15.0 +2024-07-27 22:27:50,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71270.66666666667, ans=0.1 +2024-07-27 22:27:55,688 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.81 vs. limit=15.0 +2024-07-27 22:27:57,311 INFO [train.py:1114] (1/4) Epoch 6, batch 2350, loss[loss=0.22, simple_loss=0.3019, pruned_loss=0.06903, over 4635.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3111, pruned_loss=0.07621, over 941968.71 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:28:01,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=71284.0, ans=0.0 +2024-07-27 22:28:10,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=71310.66666666667, ans=0.2 +2024-07-27 22:28:12,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71310.66666666667, ans=0.1 +2024-07-27 22:28:30,516 INFO [train.py:1114] (1/4) Epoch 6, batch 2400, loss[loss=0.2083, simple_loss=0.2878, pruned_loss=0.06436, over 4639.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3119, pruned_loss=0.07624, over 941514.75 frames. ], batch size: 12, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:28:30,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71350.66666666667, ans=0.1 +2024-07-27 22:28:31,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=71350.66666666667, ans=0.0 +2024-07-27 22:28:32,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=71350.66666666667, ans=0.0 +2024-07-27 22:28:42,213 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.21 vs. limit=15.0 +2024-07-27 22:28:43,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=71364.0, ans=0.125 +2024-07-27 22:28:48,416 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.400e+01 6.375e+01 7.289e+01 8.298e+01 1.037e+02, threshold=1.458e+02, percent-clipped=0.0 +2024-07-27 22:28:50,720 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.50 vs. limit=22.5 +2024-07-27 22:28:53,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=71390.66666666667, ans=0.2 +2024-07-27 22:28:54,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=71390.66666666667, ans=0.125 +2024-07-27 22:29:02,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=71404.0, ans=0.125 +2024-07-27 22:29:04,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=71404.0, ans=0.05 +2024-07-27 22:29:05,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=71417.33333333333, ans=0.04949747468305833 +2024-07-27 22:29:05,457 INFO [train.py:1114] (1/4) Epoch 6, batch 2450, loss[loss=0.2219, simple_loss=0.311, pruned_loss=0.06639, over 4691.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3139, pruned_loss=0.07702, over 937378.10 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:29:07,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=71417.33333333333, ans=0.0 +2024-07-27 22:29:17,715 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.79 vs. limit=10.0 +2024-07-27 22:29:22,399 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.64 vs. limit=15.0 +2024-07-27 22:29:27,948 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.33 vs. limit=22.5 +2024-07-27 22:29:28,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=71457.33333333333, ans=0.0 +2024-07-27 22:29:40,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_na.min_abs, batch_count=71484.0, ans=0.02 +2024-07-27 22:29:40,987 INFO [train.py:1114] (1/4) Epoch 6, batch 2500, loss[loss=0.2328, simple_loss=0.3261, pruned_loss=0.06969, over 4813.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.314, pruned_loss=0.07684, over 939266.74 frames. ], batch size: 14, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:29:50,961 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:29:53,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=71510.66666666667, ans=0.0 +2024-07-27 22:29:56,875 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.024e+01 6.200e+01 6.677e+01 7.747e+01 1.498e+02, threshold=1.335e+02, percent-clipped=1.0 +2024-07-27 22:30:01,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=71524.0, ans=0.125 +2024-07-27 22:30:10,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=71537.33333333333, ans=0.1 +2024-07-27 22:30:13,416 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.59 vs. limit=12.0 +2024-07-27 22:30:14,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=71550.66666666667, ans=0.125 +2024-07-27 22:30:14,581 INFO [train.py:1114] (1/4) Epoch 6, batch 2550, loss[loss=0.224, simple_loss=0.2994, pruned_loss=0.07427, over 4802.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3128, pruned_loss=0.07608, over 938603.04 frames. ], batch size: 11, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:30:15,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=71550.66666666667, ans=0.0 +2024-07-27 22:30:22,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=71564.0, ans=0.0 +2024-07-27 22:30:29,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=71577.33333333333, ans=0.125 +2024-07-27 22:30:31,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=71577.33333333333, ans=0.125 +2024-07-27 22:30:37,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=71590.66666666667, ans=0.0 +2024-07-27 22:30:45,730 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.65 vs. limit=15.0 +2024-07-27 22:30:45,756 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.51 vs. limit=15.0 +2024-07-27 22:30:47,948 INFO [train.py:1114] (1/4) Epoch 6, batch 2600, loss[loss=0.2394, simple_loss=0.3122, pruned_loss=0.08332, over 4895.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3136, pruned_loss=0.07641, over 937704.09 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:30:49,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=71617.33333333333, ans=0.0 +2024-07-27 22:30:53,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=71617.33333333333, ans=10.0 +2024-07-27 22:30:55,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=71630.66666666667, ans=0.2 +2024-07-27 22:31:05,412 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.818e+01 6.119e+01 7.086e+01 8.200e+01 1.372e+02, threshold=1.417e+02, percent-clipped=1.0 +2024-07-27 22:31:07,200 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.97 vs. limit=15.0 +2024-07-27 22:31:07,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=71644.0, ans=0.0 +2024-07-27 22:31:15,615 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:31:22,648 INFO [train.py:1114] (1/4) Epoch 6, batch 2650, loss[loss=0.2359, simple_loss=0.323, pruned_loss=0.07436, over 4625.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3145, pruned_loss=0.07749, over 939772.79 frames. ], batch size: 16, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:31:35,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=71710.66666666667, ans=10.0 +2024-07-27 22:31:38,847 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.23 vs. limit=22.5 +2024-07-27 22:31:54,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=71737.33333333333, ans=15.0 +2024-07-27 22:31:56,598 INFO [train.py:1114] (1/4) Epoch 6, batch 2700, loss[loss=0.2126, simple_loss=0.3083, pruned_loss=0.05844, over 4741.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3152, pruned_loss=0.07804, over 939780.94 frames. ], batch size: 14, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:32:05,656 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.25 vs. limit=15.0 +2024-07-27 22:32:11,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=71777.33333333333, ans=0.025 +2024-07-27 22:32:12,947 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.944e+01 6.253e+01 7.142e+01 8.501e+01 1.377e+02, threshold=1.428e+02, percent-clipped=0.0 +2024-07-27 22:32:14,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=71777.33333333333, ans=0.125 +2024-07-27 22:32:31,805 INFO [train.py:1114] (1/4) Epoch 6, batch 2750, loss[loss=0.2227, simple_loss=0.2877, pruned_loss=0.07884, over 4711.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3126, pruned_loss=0.07676, over 939646.39 frames. ], batch size: 12, lr: 1.27e-02, grad_scale: 16.0 +2024-07-27 22:32:40,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71830.66666666667, ans=0.1 +2024-07-27 22:32:49,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=71844.0, ans=0.0 +2024-07-27 22:32:58,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=71870.66666666667, ans=0.015 +2024-07-27 22:32:58,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=71870.66666666667, ans=0.125 +2024-07-27 22:33:05,086 INFO [train.py:1114] (1/4) Epoch 6, batch 2800, loss[loss=0.3364, simple_loss=0.3848, pruned_loss=0.144, over 3272.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3127, pruned_loss=0.07732, over 937680.63 frames. ], batch size: 36, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:33:11,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=71884.0, ans=15.0 +2024-07-27 22:33:12,367 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.69 vs. limit=15.0 +2024-07-27 22:33:12,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=71897.33333333333, ans=0.125 +2024-07-27 22:33:22,310 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.117e+01 6.346e+01 7.274e+01 8.194e+01 1.245e+02, threshold=1.455e+02, percent-clipped=0.0 +2024-07-27 22:33:26,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71924.0, ans=0.1 +2024-07-27 22:33:29,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=71924.0, ans=0.125 +2024-07-27 22:33:36,634 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.64 vs. limit=22.5 +2024-07-27 22:33:37,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=71937.33333333333, ans=0.125 +2024-07-27 22:33:38,922 INFO [train.py:1114] (1/4) Epoch 6, batch 2850, loss[loss=0.2386, simple_loss=0.3142, pruned_loss=0.08154, over 4969.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3144, pruned_loss=0.07822, over 936296.94 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:33:39,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=71950.66666666667, ans=0.125 +2024-07-27 22:33:42,025 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.70 vs. limit=12.0 +2024-07-27 22:33:49,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=71964.0, ans=0.125 +2024-07-27 22:33:53,249 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.15 vs. limit=15.0 +2024-07-27 22:33:58,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=71990.66666666667, ans=0.0 +2024-07-27 22:34:14,078 INFO [train.py:1114] (1/4) Epoch 6, batch 2900, loss[loss=0.24, simple_loss=0.3167, pruned_loss=0.08165, over 4825.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3149, pruned_loss=0.07773, over 939911.08 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:34:14,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=72017.33333333333, ans=0.2 +2024-07-27 22:34:22,748 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=9.20 vs. limit=12.0 +2024-07-27 22:34:31,186 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.576e+01 5.900e+01 6.392e+01 7.089e+01 1.311e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-27 22:34:34,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=72057.33333333333, ans=0.125 +2024-07-27 22:34:34,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=72057.33333333333, ans=0.125 +2024-07-27 22:34:38,089 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.99 vs. limit=6.0 +2024-07-27 22:34:39,624 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.07 vs. limit=22.5 +2024-07-27 22:34:44,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=72070.66666666667, ans=0.125 +2024-07-27 22:34:44,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=72070.66666666667, ans=0.0 +2024-07-27 22:34:47,736 INFO [train.py:1114] (1/4) Epoch 6, batch 2950, loss[loss=0.2195, simple_loss=0.3009, pruned_loss=0.0691, over 4711.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3134, pruned_loss=0.07758, over 938714.01 frames. ], batch size: 12, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:34:55,683 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.11 vs. limit=15.0 +2024-07-27 22:34:57,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=72097.33333333333, ans=0.2 +2024-07-27 22:35:10,069 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.14 vs. limit=15.0 +2024-07-27 22:35:19,373 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.72 vs. limit=22.5 +2024-07-27 22:35:21,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=72137.33333333333, ans=0.2 +2024-07-27 22:35:23,030 INFO [train.py:1114] (1/4) Epoch 6, batch 3000, loss[loss=0.2429, simple_loss=0.3169, pruned_loss=0.08447, over 4745.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3137, pruned_loss=0.07705, over 938464.61 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:35:23,030 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 22:35:35,848 INFO [train.py:1146] (1/4) Epoch 6, validation: loss=0.194, simple_loss=0.2973, pruned_loss=0.04533, over 944034.00 frames. +2024-07-27 22:35:35,849 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 22:35:43,788 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.89 vs. limit=15.0 +2024-07-27 22:35:44,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=72164.0, ans=0.0 +2024-07-27 22:35:53,037 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.659e+01 5.896e+01 6.493e+01 7.360e+01 1.026e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-27 22:35:55,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=72177.33333333333, ans=0.125 +2024-07-27 22:35:57,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=72190.66666666667, ans=0.025 +2024-07-27 22:36:06,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=72204.0, ans=0.125 +2024-07-27 22:36:09,004 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.77 vs. limit=12.0 +2024-07-27 22:36:13,118 INFO [train.py:1114] (1/4) Epoch 6, batch 3050, loss[loss=0.2626, simple_loss=0.34, pruned_loss=0.09257, over 4638.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.3152, pruned_loss=0.07794, over 937328.77 frames. ], batch size: 12, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:36:14,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=72217.33333333333, ans=0.025 +2024-07-27 22:36:29,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=72244.0, ans=0.125 +2024-07-27 22:36:33,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=72257.33333333333, ans=0.1 +2024-07-27 22:36:36,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=72257.33333333333, ans=0.125 +2024-07-27 22:36:42,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=72270.66666666667, ans=0.1 +2024-07-27 22:36:46,893 INFO [train.py:1114] (1/4) Epoch 6, batch 3100, loss[loss=0.2567, simple_loss=0.3397, pruned_loss=0.08685, over 4636.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3143, pruned_loss=0.07745, over 937731.58 frames. ], batch size: 16, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:36:47,254 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.39 vs. limit=15.0 +2024-07-27 22:36:51,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=72284.0, ans=0.2 +2024-07-27 22:37:00,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=72310.66666666667, ans=0.125 +2024-07-27 22:37:03,365 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.197e+01 6.089e+01 6.786e+01 8.344e+01 1.227e+02, threshold=1.357e+02, percent-clipped=0.0 +2024-07-27 22:37:04,833 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:37:13,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=72337.33333333333, ans=0.07 +2024-07-27 22:37:18,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=72337.33333333333, ans=0.1 +2024-07-27 22:37:18,991 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.75 vs. limit=15.0 +2024-07-27 22:37:19,943 INFO [train.py:1114] (1/4) Epoch 6, batch 3150, loss[loss=0.2531, simple_loss=0.3326, pruned_loss=0.08675, over 4576.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3147, pruned_loss=0.07785, over 937662.85 frames. ], batch size: 17, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:37:30,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=72364.0, ans=0.0 +2024-07-27 22:37:43,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=72390.66666666667, ans=0.125 +2024-07-27 22:37:49,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=72404.0, ans=0.125 +2024-07-27 22:37:55,005 INFO [train.py:1114] (1/4) Epoch 6, batch 3200, loss[loss=0.2226, simple_loss=0.296, pruned_loss=0.07455, over 4834.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3138, pruned_loss=0.07787, over 939431.93 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:37:57,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=72417.33333333333, ans=0.125 +2024-07-27 22:38:02,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=72430.66666666667, ans=0.0 +2024-07-27 22:38:11,652 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.049e+01 5.899e+01 6.448e+01 7.393e+01 1.095e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-27 22:38:21,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=72470.66666666667, ans=0.125 +2024-07-27 22:38:28,283 INFO [train.py:1114] (1/4) Epoch 6, batch 3250, loss[loss=0.2466, simple_loss=0.329, pruned_loss=0.08205, over 4937.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3139, pruned_loss=0.07789, over 940642.56 frames. ], batch size: 14, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:38:29,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=72484.0, ans=0.125 +2024-07-27 22:38:39,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=72497.33333333333, ans=0.95 +2024-07-27 22:38:39,993 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.98 vs. limit=15.0 +2024-07-27 22:38:47,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72524.0, ans=0.1 +2024-07-27 22:38:53,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=72524.0, ans=0.025 +2024-07-27 22:38:58,868 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.20 vs. limit=10.0 +2024-07-27 22:39:01,868 INFO [train.py:1114] (1/4) Epoch 6, batch 3300, loss[loss=0.2417, simple_loss=0.325, pruned_loss=0.07917, over 4717.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3118, pruned_loss=0.07734, over 940642.67 frames. ], batch size: 19, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:39:02,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=72550.66666666667, ans=0.125 +2024-07-27 22:39:05,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=72550.66666666667, ans=0.125 +2024-07-27 22:39:11,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=72564.0, ans=0.0 +2024-07-27 22:39:16,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=72577.33333333333, ans=0.2 +2024-07-27 22:39:19,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=72577.33333333333, ans=0.0 +2024-07-27 22:39:20,235 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.031e+01 6.037e+01 6.381e+01 7.466e+01 1.307e+02, threshold=1.276e+02, percent-clipped=1.0 +2024-07-27 22:39:29,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=72604.0, ans=0.025 +2024-07-27 22:39:36,878 INFO [train.py:1114] (1/4) Epoch 6, batch 3350, loss[loss=0.2874, simple_loss=0.3538, pruned_loss=0.1105, over 4649.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3126, pruned_loss=0.07799, over 938564.01 frames. ], batch size: 17, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:39:46,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=72630.66666666667, ans=0.125 +2024-07-27 22:39:47,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=72630.66666666667, ans=0.125 +2024-07-27 22:39:52,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=72644.0, ans=0.025 +2024-07-27 22:39:57,709 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:39:59,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=72657.33333333333, ans=0.125 +2024-07-27 22:40:04,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=72670.66666666667, ans=0.0 +2024-07-27 22:40:10,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=72670.66666666667, ans=0.07 +2024-07-27 22:40:10,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=72670.66666666667, ans=10.0 +2024-07-27 22:40:11,973 INFO [train.py:1114] (1/4) Epoch 6, batch 3400, loss[loss=0.1702, simple_loss=0.2549, pruned_loss=0.04276, over 4819.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3121, pruned_loss=0.07797, over 937210.65 frames. ], batch size: 11, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:40:12,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=72684.0, ans=0.0 +2024-07-27 22:40:18,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=72697.33333333333, ans=0.125 +2024-07-27 22:40:26,281 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.35 vs. limit=22.5 +2024-07-27 22:40:28,633 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.932e+01 5.919e+01 6.608e+01 7.688e+01 1.157e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-27 22:40:31,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=72724.0, ans=0.1 +2024-07-27 22:40:36,287 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.63 vs. limit=10.0 +2024-07-27 22:40:45,206 INFO [train.py:1114] (1/4) Epoch 6, batch 3450, loss[loss=0.2681, simple_loss=0.3356, pruned_loss=0.1003, over 4645.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.3128, pruned_loss=0.07797, over 937410.47 frames. ], batch size: 19, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:40:48,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=72750.66666666667, ans=0.125 +2024-07-27 22:40:54,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=72764.0, ans=0.0 +2024-07-27 22:41:06,009 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:41:18,861 INFO [train.py:1114] (1/4) Epoch 6, batch 3500, loss[loss=0.2197, simple_loss=0.2889, pruned_loss=0.0753, over 4937.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.312, pruned_loss=0.0773, over 938038.66 frames. ], batch size: 12, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:41:26,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=72830.66666666667, ans=0.125 +2024-07-27 22:41:36,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=72844.0, ans=0.0 +2024-07-27 22:41:37,344 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.111e+01 6.112e+01 6.695e+01 8.214e+01 1.239e+02, threshold=1.339e+02, percent-clipped=0.0 +2024-07-27 22:41:41,034 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.60 vs. limit=15.0 +2024-07-27 22:41:46,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=72857.33333333333, ans=0.2 +2024-07-27 22:41:46,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72870.66666666667, ans=0.1 +2024-07-27 22:41:52,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=72870.66666666667, ans=0.0 +2024-07-27 22:41:58,734 INFO [train.py:1114] (1/4) Epoch 6, batch 3550, loss[loss=0.2166, simple_loss=0.3038, pruned_loss=0.06472, over 4657.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3125, pruned_loss=0.07732, over 938754.85 frames. ], batch size: 14, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:42:01,870 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.19 vs. limit=15.0 +2024-07-27 22:42:02,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=72884.0, ans=0.125 +2024-07-27 22:42:08,335 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.78 vs. limit=15.0 +2024-07-27 22:42:12,358 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.58 vs. limit=6.0 +2024-07-27 22:42:15,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=72910.66666666667, ans=0.125 +2024-07-27 22:42:31,487 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.77 vs. limit=15.0 +2024-07-27 22:42:31,735 INFO [train.py:1114] (1/4) Epoch 6, batch 3600, loss[loss=0.2438, simple_loss=0.3095, pruned_loss=0.08909, over 4973.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.312, pruned_loss=0.07709, over 940203.94 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:42:48,879 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.822e+01 6.148e+01 6.891e+01 7.768e+01 1.144e+02, threshold=1.378e+02, percent-clipped=0.0 +2024-07-27 22:42:56,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=72990.66666666667, ans=0.125 +2024-07-27 22:42:59,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=73004.0, ans=0.125 +2024-07-27 22:43:00,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=73004.0, ans=0.125 +2024-07-27 22:43:06,987 INFO [train.py:1114] (1/4) Epoch 6, batch 3650, loss[loss=0.2737, simple_loss=0.3474, pruned_loss=0.09997, over 4907.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.3125, pruned_loss=0.07715, over 940259.87 frames. ], batch size: 15, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:43:14,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=73030.66666666667, ans=0.125 +2024-07-27 22:43:16,672 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.71 vs. limit=22.5 +2024-07-27 22:43:18,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=73030.66666666667, ans=0.05 +2024-07-27 22:43:24,540 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.95 vs. limit=10.0 +2024-07-27 22:43:27,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.11 vs. limit=22.5 +2024-07-27 22:43:39,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73070.66666666667, ans=0.1 +2024-07-27 22:43:39,848 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.22 vs. limit=15.0 +2024-07-27 22:43:40,164 INFO [train.py:1114] (1/4) Epoch 6, batch 3700, loss[loss=0.2303, simple_loss=0.314, pruned_loss=0.07331, over 4929.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3117, pruned_loss=0.07634, over 941457.84 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:43:48,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=73097.33333333333, ans=0.125 +2024-07-27 22:43:56,980 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.456e+01 6.084e+01 6.656e+01 7.917e+01 1.226e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-27 22:44:04,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=73124.0, ans=0.125 +2024-07-27 22:44:10,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=73137.33333333333, ans=0.0 +2024-07-27 22:44:12,684 INFO [train.py:1114] (1/4) Epoch 6, batch 3750, loss[loss=0.2542, simple_loss=0.3196, pruned_loss=0.09447, over 4807.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3131, pruned_loss=0.07775, over 942807.30 frames. ], batch size: 11, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:44:12,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=73150.66666666667, ans=0.125 +2024-07-27 22:44:21,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=73164.0, ans=0.0 +2024-07-27 22:44:30,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=73177.33333333333, ans=0.1 +2024-07-27 22:44:31,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=73177.33333333333, ans=0.125 +2024-07-27 22:44:31,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=73190.66666666667, ans=0.2 +2024-07-27 22:44:32,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=73190.66666666667, ans=0.0 +2024-07-27 22:44:38,159 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.91 vs. limit=22.5 +2024-07-27 22:44:45,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=73204.0, ans=0.025 +2024-07-27 22:44:47,690 INFO [train.py:1114] (1/4) Epoch 6, batch 3800, loss[loss=0.2439, simple_loss=0.325, pruned_loss=0.08139, over 4803.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3117, pruned_loss=0.07703, over 941114.62 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:44:49,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=73217.33333333333, ans=0.0 +2024-07-27 22:44:50,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=73217.33333333333, ans=0.125 +2024-07-27 22:45:00,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=73244.0, ans=0.125 +2024-07-27 22:45:04,710 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.923e+01 6.031e+01 6.654e+01 7.619e+01 1.236e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-27 22:45:09,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-27 22:45:10,560 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.72 vs. limit=12.0 +2024-07-27 22:45:13,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=73270.66666666667, ans=0.125 +2024-07-27 22:45:20,973 INFO [train.py:1114] (1/4) Epoch 6, batch 3850, loss[loss=0.2504, simple_loss=0.3259, pruned_loss=0.08746, over 4636.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3112, pruned_loss=0.07672, over 941916.26 frames. ], batch size: 16, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:45:22,152 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.53 vs. limit=15.0 +2024-07-27 22:45:35,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=73310.66666666667, ans=0.125 +2024-07-27 22:45:36,587 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:45:43,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=73324.0, ans=0.125 +2024-07-27 22:45:44,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=73324.0, ans=0.125 +2024-07-27 22:45:50,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=73337.33333333333, ans=0.0 +2024-07-27 22:45:54,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=73337.33333333333, ans=0.05 +2024-07-27 22:45:56,553 INFO [train.py:1114] (1/4) Epoch 6, batch 3900, loss[loss=0.2287, simple_loss=0.3324, pruned_loss=0.06251, over 4814.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3117, pruned_loss=0.07634, over 942277.54 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:46:04,059 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.04 vs. limit=15.0 +2024-07-27 22:46:12,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=73377.33333333333, ans=0.125 +2024-07-27 22:46:13,362 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.979e+01 6.121e+01 6.587e+01 7.635e+01 1.146e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-27 22:46:15,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=73390.66666666667, ans=0.125 +2024-07-27 22:46:22,827 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.45 vs. limit=6.0 +2024-07-27 22:46:28,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=73404.0, ans=0.125 +2024-07-27 22:46:29,895 INFO [train.py:1114] (1/4) Epoch 6, batch 3950, loss[loss=0.2546, simple_loss=0.3414, pruned_loss=0.08391, over 4837.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3116, pruned_loss=0.0761, over 944253.26 frames. ], batch size: 16, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:46:29,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=73417.33333333333, ans=0.125 +2024-07-27 22:46:30,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=73417.33333333333, ans=0.125 +2024-07-27 22:46:33,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=73417.33333333333, ans=0.125 +2024-07-27 22:46:36,078 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.14 vs. limit=12.0 +2024-07-27 22:46:46,661 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:46:48,248 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.51 vs. limit=15.0 +2024-07-27 22:47:05,385 INFO [train.py:1114] (1/4) Epoch 6, batch 4000, loss[loss=0.2344, simple_loss=0.3209, pruned_loss=0.07397, over 4777.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3127, pruned_loss=0.07745, over 940451.83 frames. ], batch size: 12, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:47:22,993 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.882e+01 6.236e+01 6.803e+01 7.982e+01 1.360e+02, threshold=1.361e+02, percent-clipped=1.0 +2024-07-27 22:47:39,833 INFO [train.py:1114] (1/4) Epoch 6, batch 4050, loss[loss=0.2831, simple_loss=0.3437, pruned_loss=0.1112, over 3779.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3116, pruned_loss=0.07704, over 939765.32 frames. ], batch size: 35, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:47:43,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=73550.66666666667, ans=0.125 +2024-07-27 22:47:47,177 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.35 vs. limit=22.5 +2024-07-27 22:47:59,931 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.13 vs. limit=15.0 +2024-07-27 22:48:01,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=73590.66666666667, ans=0.0 +2024-07-27 22:48:11,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=73604.0, ans=0.125 +2024-07-27 22:48:13,920 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.09 vs. limit=15.0 +2024-07-27 22:48:14,294 INFO [train.py:1114] (1/4) Epoch 6, batch 4100, loss[loss=0.2133, simple_loss=0.307, pruned_loss=0.05981, over 4904.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3113, pruned_loss=0.0769, over 938844.93 frames. ], batch size: 15, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:48:14,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=73617.33333333333, ans=0.125 +2024-07-27 22:48:31,603 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.808e+01 6.159e+01 6.782e+01 8.525e+01 1.477e+02, threshold=1.356e+02, percent-clipped=2.0 +2024-07-27 22:48:32,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=73644.0, ans=0.125 +2024-07-27 22:48:35,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=73657.33333333333, ans=0.125 +2024-07-27 22:48:49,529 INFO [train.py:1114] (1/4) Epoch 6, batch 4150, loss[loss=0.2251, simple_loss=0.3229, pruned_loss=0.06365, over 4823.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3113, pruned_loss=0.07658, over 938427.85 frames. ], batch size: 13, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:48:53,772 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.20 vs. limit=22.5 +2024-07-27 22:48:58,480 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.03 vs. limit=15.0 +2024-07-27 22:48:58,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=73697.33333333333, ans=0.125 +2024-07-27 22:49:02,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=73710.66666666667, ans=0.09899494936611666 +2024-07-27 22:49:04,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=73710.66666666667, ans=0.0 +2024-07-27 22:49:10,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=73724.0, ans=0.0 +2024-07-27 22:49:12,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=73724.0, ans=0.125 +2024-07-27 22:49:23,809 INFO [train.py:1114] (1/4) Epoch 6, batch 4200, loss[loss=0.1897, simple_loss=0.2804, pruned_loss=0.04949, over 4899.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3121, pruned_loss=0.07668, over 939645.16 frames. ], batch size: 15, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:49:31,241 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.38 vs. limit=12.0 +2024-07-27 22:49:34,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=73764.0, ans=0.125 +2024-07-27 22:49:40,725 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.699e+01 5.760e+01 6.554e+01 7.096e+01 1.149e+02, threshold=1.311e+02, percent-clipped=0.0 +2024-07-27 22:49:49,362 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.63 vs. limit=10.0 +2024-07-27 22:49:50,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=73804.0, ans=0.125 +2024-07-27 22:49:52,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=73804.0, ans=0.0 +2024-07-27 22:49:57,280 INFO [train.py:1114] (1/4) Epoch 6, batch 4250, loss[loss=0.2385, simple_loss=0.3173, pruned_loss=0.07989, over 4629.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3139, pruned_loss=0.078, over 940566.81 frames. ], batch size: 12, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:49:58,890 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.51 vs. limit=15.0 +2024-07-27 22:50:01,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=73817.33333333333, ans=0.0 +2024-07-27 22:50:02,203 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.13 vs. limit=15.0 +2024-07-27 22:50:26,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73870.66666666667, ans=0.1 +2024-07-27 22:50:30,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=73870.66666666667, ans=0.125 +2024-07-27 22:50:32,502 INFO [train.py:1114] (1/4) Epoch 6, batch 4300, loss[loss=0.232, simple_loss=0.3225, pruned_loss=0.07076, over 4758.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3138, pruned_loss=0.07727, over 940215.86 frames. ], batch size: 13, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:50:34,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=73884.0, ans=0.05 +2024-07-27 22:50:36,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73884.0, ans=0.1 +2024-07-27 22:50:44,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.32 vs. limit=10.0 +2024-07-27 22:50:49,607 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.043e+01 6.051e+01 7.094e+01 8.613e+01 1.493e+02, threshold=1.419e+02, percent-clipped=5.0 +2024-07-27 22:51:08,577 INFO [train.py:1114] (1/4) Epoch 6, batch 4350, loss[loss=0.2555, simple_loss=0.3358, pruned_loss=0.08763, over 4762.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3138, pruned_loss=0.07711, over 941101.94 frames. ], batch size: 13, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:51:21,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=73964.0, ans=0.2 +2024-07-27 22:51:39,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=74004.0, ans=0.1 +2024-07-27 22:51:43,387 INFO [train.py:1114] (1/4) Epoch 6, batch 4400, loss[loss=0.2262, simple_loss=0.3303, pruned_loss=0.06108, over 4809.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3128, pruned_loss=0.07636, over 941103.10 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:51:43,854 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.53 vs. limit=15.0 +2024-07-27 22:51:46,540 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.86 vs. limit=15.0 +2024-07-27 22:51:48,693 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.06 vs. limit=15.0 +2024-07-27 22:51:57,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=74044.0, ans=0.125 +2024-07-27 22:52:00,867 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.058e+01 5.949e+01 6.629e+01 7.705e+01 1.284e+02, threshold=1.326e+02, percent-clipped=0.0 +2024-07-27 22:52:01,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=74044.0, ans=0.0 +2024-07-27 22:52:01,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=74044.0, ans=0.125 +2024-07-27 22:52:05,146 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.16 vs. limit=22.5 +2024-07-27 22:52:06,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=74057.33333333333, ans=0.0 +2024-07-27 22:52:09,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=74070.66666666667, ans=0.0 +2024-07-27 22:52:11,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=74070.66666666667, ans=0.125 +2024-07-27 22:52:17,025 INFO [train.py:1114] (1/4) Epoch 6, batch 4450, loss[loss=0.218, simple_loss=0.301, pruned_loss=0.06747, over 4939.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3142, pruned_loss=0.07763, over 939091.41 frames. ], batch size: 12, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:52:23,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=74097.33333333333, ans=0.0 +2024-07-27 22:52:24,447 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.32 vs. limit=10.0 +2024-07-27 22:52:31,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=74110.66666666667, ans=0.2 +2024-07-27 22:52:41,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=74124.0, ans=0.2 +2024-07-27 22:52:42,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=74124.0, ans=0.125 +2024-07-27 22:52:51,881 INFO [train.py:1114] (1/4) Epoch 6, batch 4500, loss[loss=0.2337, simple_loss=0.322, pruned_loss=0.07271, over 4745.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3144, pruned_loss=0.07768, over 938086.39 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:53:05,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=74177.33333333333, ans=0.0 +2024-07-27 22:53:08,908 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.835e+01 5.811e+01 6.353e+01 6.989e+01 9.336e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-27 22:53:13,667 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:53:19,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=74204.0, ans=0.0 +2024-07-27 22:53:24,768 INFO [train.py:1114] (1/4) Epoch 6, batch 4550, loss[loss=0.2231, simple_loss=0.3001, pruned_loss=0.07303, over 4894.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3136, pruned_loss=0.07731, over 939942.37 frames. ], batch size: 13, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:53:30,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=74230.66666666667, ans=0.125 +2024-07-27 22:53:34,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=74230.66666666667, ans=0.125 +2024-07-27 22:53:38,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=74244.0, ans=0.125 +2024-07-27 22:53:38,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=74244.0, ans=0.0 +2024-07-27 22:53:45,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=74257.33333333333, ans=0.125 +2024-07-27 22:53:53,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=74270.66666666667, ans=0.2 +2024-07-27 22:53:58,220 INFO [train.py:1114] (1/4) Epoch 6, batch 4600, loss[loss=0.272, simple_loss=0.3448, pruned_loss=0.09953, over 4539.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3133, pruned_loss=0.07705, over 938555.22 frames. ], batch size: 21, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:54:01,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=74284.0, ans=0.0 +2024-07-27 22:54:04,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=74284.0, ans=0.025 +2024-07-27 22:54:05,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=74284.0, ans=0.125 +2024-07-27 22:54:13,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=74297.33333333333, ans=0.0 +2024-07-27 22:54:14,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=74310.66666666667, ans=0.0 +2024-07-27 22:54:18,334 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.798e+01 6.152e+01 6.770e+01 8.392e+01 1.380e+02, threshold=1.354e+02, percent-clipped=1.0 +2024-07-27 22:54:18,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=74310.66666666667, ans=0.125 +2024-07-27 22:54:21,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=74324.0, ans=0.2 +2024-07-27 22:54:22,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=74324.0, ans=0.2 +2024-07-27 22:54:22,655 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.82 vs. limit=22.5 +2024-07-27 22:54:23,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=74324.0, ans=0.0 +2024-07-27 22:54:26,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=74324.0, ans=0.125 +2024-07-27 22:54:26,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=74324.0, ans=0.125 +2024-07-27 22:54:26,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=74337.33333333333, ans=0.125 +2024-07-27 22:54:28,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=74337.33333333333, ans=0.0 +2024-07-27 22:54:31,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=74337.33333333333, ans=0.0 +2024-07-27 22:54:34,076 INFO [train.py:1114] (1/4) Epoch 6, batch 4650, loss[loss=0.2381, simple_loss=0.3168, pruned_loss=0.07973, over 4849.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3134, pruned_loss=0.07715, over 940363.39 frames. ], batch size: 16, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:54:37,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74350.66666666667, ans=0.1 +2024-07-27 22:54:43,099 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.78 vs. limit=15.0 +2024-07-27 22:55:03,023 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.80 vs. limit=15.0 +2024-07-27 22:55:04,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=74377.33333333333, ans=0.0 +2024-07-27 22:55:06,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=74377.33333333333, ans=0.1 +2024-07-27 22:55:09,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74390.66666666667, ans=0.1 +2024-07-27 22:55:12,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=74390.66666666667, ans=0.125 +2024-07-27 22:55:38,798 INFO [train.py:1114] (1/4) Epoch 6, batch 4700, loss[loss=0.2236, simple_loss=0.2934, pruned_loss=0.07689, over 4717.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3129, pruned_loss=0.07728, over 937576.40 frames. ], batch size: 11, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:55:41,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74417.33333333333, ans=0.1 +2024-07-27 22:55:44,383 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.45 vs. limit=15.0 +2024-07-27 22:55:55,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=74444.0, ans=0.2 +2024-07-27 22:55:57,187 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.339e+01 6.070e+01 6.903e+01 7.937e+01 1.102e+02, threshold=1.381e+02, percent-clipped=0.0 +2024-07-27 22:56:14,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=74457.33333333333, ans=0.125 +2024-07-27 22:56:18,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74470.66666666667, ans=0.1 +2024-07-27 22:57:01,145 INFO [train.py:1114] (1/4) Epoch 6, batch 4750, loss[loss=0.2535, simple_loss=0.3401, pruned_loss=0.08346, over 4518.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3138, pruned_loss=0.07764, over 935429.21 frames. ], batch size: 21, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:57:03,680 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.94 vs. limit=15.0 +2024-07-27 22:57:05,545 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.38 vs. limit=15.0 +2024-07-27 22:57:16,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=74510.66666666667, ans=0.0 +2024-07-27 22:57:57,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=74537.33333333333, ans=0.125 +2024-07-27 22:57:57,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=74537.33333333333, ans=0.07 +2024-07-27 22:58:00,089 INFO [train.py:1114] (1/4) Epoch 6, batch 4800, loss[loss=0.2317, simple_loss=0.3277, pruned_loss=0.06783, over 4700.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.3133, pruned_loss=0.07777, over 932573.99 frames. ], batch size: 13, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:58:09,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=74564.0, ans=0.125 +2024-07-27 22:58:10,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=74564.0, ans=0.125 +2024-07-27 22:58:27,445 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.139e+01 5.953e+01 6.774e+01 8.357e+01 1.268e+02, threshold=1.355e+02, percent-clipped=0.0 +2024-07-27 22:58:29,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=74590.66666666667, ans=0.05 +2024-07-27 22:58:31,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=74590.66666666667, ans=0.125 +2024-07-27 22:58:41,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=74604.0, ans=0.07 +2024-07-27 22:58:43,662 INFO [train.py:1114] (1/4) Epoch 6, batch 4850, loss[loss=0.2424, simple_loss=0.3263, pruned_loss=0.07922, over 4744.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3127, pruned_loss=0.07702, over 932001.46 frames. ], batch size: 14, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:58:51,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=74630.66666666667, ans=0.2 +2024-07-27 22:59:24,923 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.18 vs. limit=15.0 +2024-07-27 22:59:29,829 INFO [train.py:1114] (1/4) Epoch 6, batch 4900, loss[loss=0.1914, simple_loss=0.2835, pruned_loss=0.04961, over 4760.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3121, pruned_loss=0.07649, over 933985.77 frames. ], batch size: 13, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:59:30,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=74684.0, ans=0.025 +2024-07-27 22:59:46,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=74710.66666666667, ans=0.0 +2024-07-27 22:59:48,777 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.911e+01 6.095e+01 6.974e+01 8.315e+01 1.441e+02, threshold=1.395e+02, percent-clipped=3.0 +2024-07-27 22:59:58,234 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.19 vs. limit=12.0 +2024-07-27 22:59:58,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=74724.0, ans=0.0 +2024-07-27 23:00:07,416 INFO [train.py:1114] (1/4) Epoch 6, batch 4950, loss[loss=0.288, simple_loss=0.3606, pruned_loss=0.1076, over 3297.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.3139, pruned_loss=0.0775, over 931405.83 frames. ], batch size: 35, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:00:11,697 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.11 vs. limit=15.0 +2024-07-27 23:00:12,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=74750.66666666667, ans=0.0 +2024-07-27 23:00:37,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=74790.66666666667, ans=0.125 +2024-07-27 23:00:38,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=74790.66666666667, ans=0.0 +2024-07-27 23:00:39,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=74790.66666666667, ans=0.0 +2024-07-27 23:00:44,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=74804.0, ans=0.0 +2024-07-27 23:00:47,171 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.49 vs. limit=22.5 +2024-07-27 23:00:47,378 INFO [train.py:1114] (1/4) Epoch 6, batch 5000, loss[loss=0.2467, simple_loss=0.3327, pruned_loss=0.08039, over 4669.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3141, pruned_loss=0.07778, over 935252.05 frames. ], batch size: 14, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:00:55,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=74830.66666666667, ans=0.04949747468305833 +2024-07-27 23:01:05,413 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.034e+01 6.345e+01 7.582e+01 9.212e+01 1.315e+02, threshold=1.516e+02, percent-clipped=0.0 +2024-07-27 23:01:14,376 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.97 vs. limit=15.0 +2024-07-27 23:01:17,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=74870.66666666667, ans=0.125 +2024-07-27 23:01:24,714 INFO [train.py:1114] (1/4) Epoch 6, batch 5050, loss[loss=0.1954, simple_loss=0.2799, pruned_loss=0.05547, over 4847.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3131, pruned_loss=0.07644, over 937616.81 frames. ], batch size: 12, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:01:39,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=74910.66666666667, ans=0.0 +2024-07-27 23:01:43,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=74910.66666666667, ans=0.2 +2024-07-27 23:01:44,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=74910.66666666667, ans=0.125 +2024-07-27 23:01:44,429 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.93 vs. limit=6.0 +2024-07-27 23:01:48,628 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.67 vs. limit=22.5 +2024-07-27 23:02:00,473 INFO [train.py:1114] (1/4) Epoch 6, batch 5100, loss[loss=0.2385, simple_loss=0.3162, pruned_loss=0.08044, over 4769.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3138, pruned_loss=0.07711, over 935386.45 frames. ], batch size: 12, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:02:15,002 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.81 vs. limit=12.0 +2024-07-27 23:02:19,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=74977.33333333333, ans=0.0 +2024-07-27 23:02:23,234 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.910e+01 5.981e+01 6.894e+01 7.665e+01 1.178e+02, threshold=1.379e+02, percent-clipped=0.0 +2024-07-27 23:02:36,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=75004.0, ans=0.5 +2024-07-27 23:02:36,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=75004.0, ans=0.07 +2024-07-27 23:02:39,310 INFO [train.py:1114] (1/4) Epoch 6, batch 5150, loss[loss=0.2477, simple_loss=0.3346, pruned_loss=0.08039, over 4843.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3134, pruned_loss=0.07688, over 936418.42 frames. ], batch size: 16, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:02:44,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=75017.33333333333, ans=0.125 +2024-07-27 23:02:49,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=75030.66666666667, ans=0.0 +2024-07-27 23:02:52,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=75044.0, ans=0.125 +2024-07-27 23:03:00,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=75057.33333333333, ans=0.0 +2024-07-27 23:03:10,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=75070.66666666667, ans=22.5 +2024-07-27 23:03:11,285 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.17 vs. limit=6.0 +2024-07-27 23:03:12,931 INFO [train.py:1114] (1/4) Epoch 6, batch 5200, loss[loss=0.2157, simple_loss=0.3002, pruned_loss=0.06559, over 4671.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3117, pruned_loss=0.07559, over 936698.30 frames. ], batch size: 14, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:03:16,643 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.66 vs. limit=15.0 +2024-07-27 23:03:17,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=75084.0, ans=0.125 +2024-07-27 23:03:24,569 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:03:25,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=75097.33333333333, ans=0.2 +2024-07-27 23:03:26,852 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.27 vs. limit=15.0 +2024-07-27 23:03:31,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=75110.66666666667, ans=0.125 +2024-07-27 23:03:32,616 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 6.053e+01 6.671e+01 7.847e+01 1.456e+02, threshold=1.334e+02, percent-clipped=1.0 +2024-07-27 23:03:43,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=75137.33333333333, ans=0.2 +2024-07-27 23:03:44,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=75137.33333333333, ans=0.1 +2024-07-27 23:03:48,751 INFO [train.py:1114] (1/4) Epoch 6, batch 5250, loss[loss=0.2371, simple_loss=0.3014, pruned_loss=0.08639, over 4897.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3123, pruned_loss=0.07624, over 936289.26 frames. ], batch size: 13, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:03:51,051 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.89 vs. limit=15.0 +2024-07-27 23:03:51,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=75150.66666666667, ans=0.125 +2024-07-27 23:03:54,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75164.0, ans=0.1 +2024-07-27 23:03:56,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=75164.0, ans=0.2 +2024-07-27 23:04:10,533 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:04:19,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=75204.0, ans=0.0 +2024-07-27 23:04:20,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=75204.0, ans=0.025 +2024-07-27 23:04:21,161 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.19 vs. limit=6.0 +2024-07-27 23:04:24,656 INFO [train.py:1114] (1/4) Epoch 6, batch 5300, loss[loss=0.2263, simple_loss=0.3166, pruned_loss=0.06807, over 4662.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3116, pruned_loss=0.07594, over 935329.05 frames. ], batch size: 16, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:04:24,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=75217.33333333333, ans=0.0 +2024-07-27 23:04:25,172 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.29 vs. limit=15.0 +2024-07-27 23:04:40,072 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:04:41,938 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.955e+01 6.651e+01 7.573e+01 1.282e+02, threshold=1.330e+02, percent-clipped=0.0 +2024-07-27 23:04:53,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75270.66666666667, ans=0.1 +2024-07-27 23:04:54,308 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.72 vs. limit=15.0 +2024-07-27 23:04:57,836 INFO [train.py:1114] (1/4) Epoch 6, batch 5350, loss[loss=0.1648, simple_loss=0.2446, pruned_loss=0.04251, over 4485.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3119, pruned_loss=0.07636, over 936936.26 frames. ], batch size: 10, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:04:59,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=75284.0, ans=0.0 +2024-07-27 23:05:00,079 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.60 vs. limit=15.0 +2024-07-27 23:05:30,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=75337.33333333333, ans=0.0 +2024-07-27 23:05:33,160 INFO [train.py:1114] (1/4) Epoch 6, batch 5400, loss[loss=0.2905, simple_loss=0.3594, pruned_loss=0.1108, over 4300.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3128, pruned_loss=0.07743, over 930423.68 frames. ], batch size: 26, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:05:33,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=75350.66666666667, ans=0.2 +2024-07-27 23:05:34,180 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.30 vs. limit=12.0 +2024-07-27 23:05:35,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=75350.66666666667, ans=0.0 +2024-07-27 23:05:36,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=75350.66666666667, ans=0.1 +2024-07-27 23:05:39,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=75364.0, ans=0.125 +2024-07-27 23:05:41,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=75364.0, ans=0.0 +2024-07-27 23:05:42,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75364.0, ans=0.1 +2024-07-27 23:05:42,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75364.0, ans=0.1 +2024-07-27 23:05:46,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=75377.33333333333, ans=0.2 +2024-07-27 23:05:50,264 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.680e+01 5.962e+01 6.573e+01 7.607e+01 1.590e+02, threshold=1.315e+02, percent-clipped=1.0 +2024-07-27 23:05:59,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=75404.0, ans=0.0 +2024-07-27 23:06:00,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=75404.0, ans=0.0 +2024-07-27 23:06:08,280 INFO [train.py:1114] (1/4) Epoch 6, batch 5450, loss[loss=0.2052, simple_loss=0.2707, pruned_loss=0.06981, over 4716.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3116, pruned_loss=0.07658, over 933113.45 frames. ], batch size: 11, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:06:24,542 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.91 vs. limit=10.0 +2024-07-27 23:06:27,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=75444.0, ans=0.0 +2024-07-27 23:06:32,156 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.24 vs. limit=12.0 +2024-07-27 23:06:33,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=75457.33333333333, ans=0.0 +2024-07-27 23:06:41,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=75484.0, ans=0.125 +2024-07-27 23:06:42,302 INFO [train.py:1114] (1/4) Epoch 6, batch 5500, loss[loss=0.28, simple_loss=0.3581, pruned_loss=0.1009, over 4420.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.3122, pruned_loss=0.07731, over 930553.84 frames. ], batch size: 26, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:06:59,681 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.538e+01 6.092e+01 6.682e+01 7.913e+01 1.212e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-27 23:07:01,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=75510.66666666667, ans=0.0 +2024-07-27 23:07:02,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=75524.0, ans=0.0 +2024-07-27 23:07:04,780 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.16 vs. limit=10.0 +2024-07-27 23:07:12,075 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.57 vs. limit=15.0 +2024-07-27 23:07:21,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=75550.66666666667, ans=0.0 +2024-07-27 23:07:21,734 INFO [train.py:1114] (1/4) Epoch 6, batch 5550, loss[loss=0.2423, simple_loss=0.3153, pruned_loss=0.0846, over 4715.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3111, pruned_loss=0.07608, over 933011.84 frames. ], batch size: 12, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:07:27,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=75550.66666666667, ans=15.0 +2024-07-27 23:07:30,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=75564.0, ans=0.125 +2024-07-27 23:07:30,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=75564.0, ans=0.125 +2024-07-27 23:07:32,076 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.32 vs. limit=15.0 +2024-07-27 23:07:33,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=75564.0, ans=0.125 +2024-07-27 23:07:36,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=75577.33333333333, ans=0.2 +2024-07-27 23:07:43,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=75590.66666666667, ans=0.0 +2024-07-27 23:07:45,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=75590.66666666667, ans=0.2 +2024-07-27 23:07:49,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75604.0, ans=0.1 +2024-07-27 23:07:54,736 INFO [train.py:1114] (1/4) Epoch 6, batch 5600, loss[loss=0.2431, simple_loss=0.3287, pruned_loss=0.07873, over 4745.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.312, pruned_loss=0.0762, over 934417.92 frames. ], batch size: 14, lr: 1.23e-02, grad_scale: 64.0 +2024-07-27 23:08:05,592 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.79 vs. limit=6.0 +2024-07-27 23:08:16,465 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.881e+01 6.141e+01 6.729e+01 7.455e+01 1.025e+02, threshold=1.346e+02, percent-clipped=0.0 +2024-07-27 23:08:19,369 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:08:22,884 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.72 vs. limit=15.0 +2024-07-27 23:08:32,403 INFO [train.py:1114] (1/4) Epoch 6, batch 5650, loss[loss=0.2506, simple_loss=0.3373, pruned_loss=0.08195, over 4514.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.311, pruned_loss=0.07571, over 937005.40 frames. ], batch size: 21, lr: 1.23e-02, grad_scale: 64.0 +2024-07-27 23:08:39,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=75697.33333333333, ans=0.2 +2024-07-27 23:08:44,623 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.41 vs. limit=15.0 +2024-07-27 23:08:45,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=75710.66666666667, ans=0.125 +2024-07-27 23:08:47,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=75710.66666666667, ans=0.07 +2024-07-27 23:09:03,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75737.33333333333, ans=0.1 +2024-07-27 23:09:08,124 INFO [train.py:1114] (1/4) Epoch 6, batch 5700, loss[loss=0.2306, simple_loss=0.3129, pruned_loss=0.07419, over 4681.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3121, pruned_loss=0.07605, over 937931.11 frames. ], batch size: 13, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:09:14,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=75750.66666666667, ans=10.0 +2024-07-27 23:09:17,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=75764.0, ans=0.125 +2024-07-27 23:09:25,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=75777.33333333333, ans=0.2 +2024-07-27 23:09:26,481 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.116e+01 6.584e+01 7.686e+01 8.929e+01 1.310e+02, threshold=1.537e+02, percent-clipped=0.0 +2024-07-27 23:09:30,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=75790.66666666667, ans=0.0 +2024-07-27 23:09:36,016 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.06 vs. limit=15.0 +2024-07-27 23:09:41,525 INFO [train.py:1114] (1/4) Epoch 6, batch 5750, loss[loss=0.2313, simple_loss=0.3088, pruned_loss=0.07695, over 4745.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3124, pruned_loss=0.07609, over 938243.67 frames. ], batch size: 19, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:10:01,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=75844.0, ans=0.2 +2024-07-27 23:10:16,846 INFO [train.py:1114] (1/4) Epoch 6, batch 5800, loss[loss=0.252, simple_loss=0.3333, pruned_loss=0.08534, over 4651.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3127, pruned_loss=0.07672, over 937049.90 frames. ], batch size: 19, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:10:23,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=75897.33333333333, ans=0.1 +2024-07-27 23:10:24,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75897.33333333333, ans=0.1 +2024-07-27 23:10:34,693 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.114e+01 6.081e+01 6.996e+01 7.790e+01 1.543e+02, threshold=1.399e+02, percent-clipped=1.0 +2024-07-27 23:10:42,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75924.0, ans=0.1 +2024-07-27 23:10:42,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=75924.0, ans=0.125 +2024-07-27 23:10:48,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=75937.33333333333, ans=0.125 +2024-07-27 23:10:50,809 INFO [train.py:1114] (1/4) Epoch 6, batch 5850, loss[loss=0.2709, simple_loss=0.3513, pruned_loss=0.09523, over 4453.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3123, pruned_loss=0.07657, over 937859.87 frames. ], batch size: 21, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:10:51,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75950.66666666667, ans=0.1 +2024-07-27 23:10:58,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=75950.66666666667, ans=0.0 +2024-07-27 23:11:09,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=75977.33333333333, ans=0.0 +2024-07-27 23:11:21,973 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.68 vs. limit=22.5 +2024-07-27 23:11:28,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=76004.0, ans=0.0 +2024-07-27 23:11:30,103 INFO [train.py:1114] (1/4) Epoch 6, batch 5900, loss[loss=0.2609, simple_loss=0.3451, pruned_loss=0.08829, over 4676.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3118, pruned_loss=0.07604, over 937650.05 frames. ], batch size: 15, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:11:48,175 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.509e+01 6.028e+01 6.783e+01 7.450e+01 1.132e+02, threshold=1.357e+02, percent-clipped=0.0 +2024-07-27 23:11:52,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=76057.33333333333, ans=0.125 +2024-07-27 23:11:55,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=76057.33333333333, ans=0.125 +2024-07-27 23:11:56,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=76070.66666666667, ans=0.5 +2024-07-27 23:12:03,252 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.15 vs. limit=15.0 +2024-07-27 23:12:03,424 INFO [train.py:1114] (1/4) Epoch 6, batch 5950, loss[loss=0.2432, simple_loss=0.3332, pruned_loss=0.07665, over 4690.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3106, pruned_loss=0.07519, over 939557.27 frames. ], batch size: 15, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:12:07,456 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.05 vs. limit=15.0 +2024-07-27 23:12:36,359 INFO [train.py:1114] (1/4) Epoch 6, batch 6000, loss[loss=0.2816, simple_loss=0.3411, pruned_loss=0.1111, over 4344.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.312, pruned_loss=0.07669, over 936670.17 frames. ], batch size: 26, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:12:36,360 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 23:12:44,448 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.9266, 4.6052, 4.2102, 3.9080], device='cuda:1') +2024-07-27 23:12:50,096 INFO [train.py:1146] (1/4) Epoch 6, validation: loss=0.1905, simple_loss=0.2947, pruned_loss=0.04318, over 944034.00 frames. +2024-07-27 23:12:50,097 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 23:12:55,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=76150.66666666667, ans=0.2 +2024-07-27 23:13:00,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=76164.0, ans=0.95 +2024-07-27 23:13:07,972 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.732e+01 6.230e+01 7.142e+01 8.647e+01 1.308e+02, threshold=1.428e+02, percent-clipped=0.0 +2024-07-27 23:13:08,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=76177.33333333333, ans=0.1 +2024-07-27 23:13:08,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76177.33333333333, ans=0.1 +2024-07-27 23:13:17,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=76204.0, ans=0.0 +2024-07-27 23:13:24,111 INFO [train.py:1114] (1/4) Epoch 6, batch 6050, loss[loss=0.2109, simple_loss=0.289, pruned_loss=0.06634, over 4777.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3103, pruned_loss=0.07534, over 938303.92 frames. ], batch size: 12, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:13:47,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=76257.33333333333, ans=0.125 +2024-07-27 23:13:49,229 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.81 vs. limit=15.0 +2024-07-27 23:13:57,300 INFO [train.py:1114] (1/4) Epoch 6, batch 6100, loss[loss=0.2171, simple_loss=0.3125, pruned_loss=0.06083, over 4704.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3096, pruned_loss=0.0746, over 937753.52 frames. ], batch size: 15, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:14:17,099 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 6.126e+01 6.655e+01 7.850e+01 1.418e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-27 23:14:19,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=76324.0, ans=0.125 +2024-07-27 23:14:21,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=76324.0, ans=0.125 +2024-07-27 23:14:32,317 INFO [train.py:1114] (1/4) Epoch 6, batch 6150, loss[loss=0.3197, simple_loss=0.3758, pruned_loss=0.1318, over 3752.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3102, pruned_loss=0.07509, over 936791.60 frames. ], batch size: 35, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:14:44,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=76364.0, ans=0.125 +2024-07-27 23:14:45,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=76377.33333333333, ans=0.125 +2024-07-27 23:14:46,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=76377.33333333333, ans=0.09899494936611666 +2024-07-27 23:14:57,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=76390.66666666667, ans=0.125 +2024-07-27 23:14:59,580 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.04 vs. limit=15.0 +2024-07-27 23:15:06,370 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.76 vs. limit=15.0 +2024-07-27 23:15:07,851 INFO [train.py:1114] (1/4) Epoch 6, batch 6200, loss[loss=0.2238, simple_loss=0.3231, pruned_loss=0.06223, over 4733.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3103, pruned_loss=0.07483, over 936261.10 frames. ], batch size: 14, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:15:11,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=76417.33333333333, ans=0.125 +2024-07-27 23:15:12,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=76417.33333333333, ans=0.125 +2024-07-27 23:15:16,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=76417.33333333333, ans=0.09899494936611666 +2024-07-27 23:15:30,095 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.613e+01 5.920e+01 6.889e+01 8.181e+01 1.186e+02, threshold=1.378e+02, percent-clipped=0.0 +2024-07-27 23:15:35,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=76457.33333333333, ans=0.125 +2024-07-27 23:15:45,607 INFO [train.py:1114] (1/4) Epoch 6, batch 6250, loss[loss=0.2586, simple_loss=0.3326, pruned_loss=0.09225, over 4813.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3105, pruned_loss=0.07535, over 933094.88 frames. ], batch size: 14, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:15:46,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=76484.0, ans=0.125 +2024-07-27 23:15:50,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=76484.0, ans=0.0 +2024-07-27 23:15:51,431 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.21 vs. limit=15.0 +2024-07-27 23:16:08,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=76524.0, ans=0.125 +2024-07-27 23:16:10,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=76524.0, ans=0.0 +2024-07-27 23:16:10,485 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.52 vs. limit=22.5 +2024-07-27 23:16:15,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=76537.33333333333, ans=0.0 +2024-07-27 23:16:21,061 INFO [train.py:1114] (1/4) Epoch 6, batch 6300, loss[loss=0.2008, simple_loss=0.2753, pruned_loss=0.06312, over 4490.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3117, pruned_loss=0.07599, over 929302.93 frames. ], batch size: 10, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:16:24,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=76550.66666666667, ans=0.125 +2024-07-27 23:16:28,722 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.66 vs. limit=10.0 +2024-07-27 23:16:38,837 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.517e+01 5.917e+01 6.519e+01 7.440e+01 1.686e+02, threshold=1.304e+02, percent-clipped=1.0 +2024-07-27 23:16:44,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=76590.66666666667, ans=0.125 +2024-07-27 23:16:48,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=76604.0, ans=0.125 +2024-07-27 23:16:53,924 INFO [train.py:1114] (1/4) Epoch 6, batch 6350, loss[loss=0.2493, simple_loss=0.3321, pruned_loss=0.08326, over 4592.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3117, pruned_loss=0.0754, over 933557.67 frames. ], batch size: 21, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:17:05,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=76630.66666666667, ans=0.125 +2024-07-27 23:17:11,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=76644.0, ans=0.125 +2024-07-27 23:17:12,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=76644.0, ans=0.2 +2024-07-27 23:17:12,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=76644.0, ans=0.0 +2024-07-27 23:17:12,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=76644.0, ans=0.0 +2024-07-27 23:17:26,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=76684.0, ans=0.0 +2024-07-27 23:17:27,174 INFO [train.py:1114] (1/4) Epoch 6, batch 6400, loss[loss=0.2512, simple_loss=0.3353, pruned_loss=0.08357, over 4633.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3112, pruned_loss=0.07565, over 935327.96 frames. ], batch size: 13, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:17:30,859 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.76 vs. limit=22.5 +2024-07-27 23:17:35,602 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.29 vs. limit=15.0 +2024-07-27 23:17:36,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=76697.33333333333, ans=0.2 +2024-07-27 23:17:37,424 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.44 vs. limit=15.0 +2024-07-27 23:17:42,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=76710.66666666667, ans=0.125 +2024-07-27 23:17:44,982 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.061e+01 6.138e+01 6.927e+01 7.775e+01 1.168e+02, threshold=1.385e+02, percent-clipped=0.0 +2024-07-27 23:17:45,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=76710.66666666667, ans=0.1 +2024-07-27 23:18:00,340 INFO [train.py:1114] (1/4) Epoch 6, batch 6450, loss[loss=0.2764, simple_loss=0.3541, pruned_loss=0.09941, over 4427.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3115, pruned_loss=0.07523, over 938976.93 frames. ], batch size: 21, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:18:04,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=76750.66666666667, ans=0.125 +2024-07-27 23:18:31,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=76790.66666666667, ans=0.2 +2024-07-27 23:18:34,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=76804.0, ans=0.1 +2024-07-27 23:18:40,299 INFO [train.py:1114] (1/4) Epoch 6, batch 6500, loss[loss=0.3612, simple_loss=0.4038, pruned_loss=0.1593, over 3368.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3102, pruned_loss=0.07432, over 940178.98 frames. ], batch size: 35, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:18:48,007 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.87 vs. limit=22.5 +2024-07-27 23:18:49,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=76830.66666666667, ans=0.125 +2024-07-27 23:18:58,073 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 6.177e+01 7.054e+01 8.466e+01 1.519e+02, threshold=1.411e+02, percent-clipped=2.0 +2024-07-27 23:19:00,922 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.58 vs. limit=12.0 +2024-07-27 23:19:13,479 INFO [train.py:1114] (1/4) Epoch 6, batch 6550, loss[loss=0.1829, simple_loss=0.2706, pruned_loss=0.04763, over 4794.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3103, pruned_loss=0.07444, over 943038.17 frames. ], batch size: 11, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:19:13,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=76884.0, ans=0.125 +2024-07-27 23:19:14,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=76884.0, ans=0.0 +2024-07-27 23:19:24,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=76897.33333333333, ans=0.125 +2024-07-27 23:19:38,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=76924.0, ans=0.2 +2024-07-27 23:19:47,589 INFO [train.py:1114] (1/4) Epoch 6, batch 6600, loss[loss=0.2118, simple_loss=0.3078, pruned_loss=0.05787, over 4938.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3108, pruned_loss=0.07453, over 945084.94 frames. ], batch size: 14, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:19:50,614 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.33 vs. limit=15.0 +2024-07-27 23:19:58,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=76964.0, ans=0.0 +2024-07-27 23:19:59,463 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.74 vs. limit=15.0 +2024-07-27 23:20:01,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=76977.33333333333, ans=0.125 +2024-07-27 23:20:05,789 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.013e+01 6.034e+01 7.063e+01 8.869e+01 1.315e+02, threshold=1.413e+02, percent-clipped=0.0 +2024-07-27 23:20:21,203 INFO [train.py:1114] (1/4) Epoch 6, batch 6650, loss[loss=0.2442, simple_loss=0.3272, pruned_loss=0.08063, over 4604.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.311, pruned_loss=0.07515, over 943560.39 frames. ], batch size: 17, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:20:23,598 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.37 vs. limit=22.5 +2024-07-27 23:20:27,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77030.66666666667, ans=0.1 +2024-07-27 23:20:36,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77044.0, ans=0.1 +2024-07-27 23:20:57,068 INFO [train.py:1114] (1/4) Epoch 6, batch 6700, loss[loss=0.2624, simple_loss=0.3447, pruned_loss=0.09006, over 4674.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3124, pruned_loss=0.07615, over 942162.85 frames. ], batch size: 19, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:21:07,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=77097.33333333333, ans=0.05 +2024-07-27 23:21:12,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77110.66666666667, ans=0.1 +2024-07-27 23:21:15,127 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.982e+01 6.173e+01 6.934e+01 8.423e+01 1.268e+02, threshold=1.387e+02, percent-clipped=0.0 +2024-07-27 23:21:24,835 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.98 vs. limit=15.0 +2024-07-27 23:21:34,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=77137.33333333333, ans=0.125 +2024-07-27 23:21:35,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=77137.33333333333, ans=0.125 +2024-07-27 23:21:35,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=77137.33333333333, ans=10.0 +2024-07-27 23:21:39,620 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.07 vs. limit=15.0 +2024-07-27 23:21:41,297 INFO [train.py:1114] (1/4) Epoch 6, batch 6750, loss[loss=0.2771, simple_loss=0.3434, pruned_loss=0.1055, over 4204.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3126, pruned_loss=0.07628, over 940258.88 frames. ], batch size: 25, lr: 1.22e-02, grad_scale: 16.0 +2024-07-27 23:21:44,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77150.66666666667, ans=0.1 +2024-07-27 23:21:50,855 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.13 vs. limit=12.0 +2024-07-27 23:21:52,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=77164.0, ans=0.125 +2024-07-27 23:21:56,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=77177.33333333333, ans=22.5 +2024-07-27 23:21:57,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=77177.33333333333, ans=0.2 +2024-07-27 23:22:16,816 INFO [train.py:1114] (1/4) Epoch 6, batch 6800, loss[loss=0.2022, simple_loss=0.3069, pruned_loss=0.04875, over 4631.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3115, pruned_loss=0.07529, over 938677.97 frames. ], batch size: 13, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:23:06,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=77230.66666666667, ans=0.2 +2024-07-27 23:23:14,540 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.127e+01 5.858e+01 6.351e+01 7.283e+01 1.199e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-27 23:23:25,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=77270.66666666667, ans=0.0 +2024-07-27 23:23:25,675 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:23:27,187 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.90 vs. limit=15.0 +2024-07-27 23:23:29,481 INFO [train.py:1114] (1/4) Epoch 6, batch 6850, loss[loss=0.2504, simple_loss=0.3266, pruned_loss=0.08713, over 4681.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3102, pruned_loss=0.07498, over 940551.37 frames. ], batch size: 13, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:23:31,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=77284.0, ans=0.0 +2024-07-27 23:24:01,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=77337.33333333333, ans=10.0 +2024-07-27 23:24:03,350 INFO [train.py:1114] (1/4) Epoch 6, batch 6900, loss[loss=0.2219, simple_loss=0.3059, pruned_loss=0.069, over 4960.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3098, pruned_loss=0.07481, over 942551.24 frames. ], batch size: 13, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:24:03,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=77350.66666666667, ans=0.0 +2024-07-27 23:24:12,328 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.34 vs. limit=15.0 +2024-07-27 23:24:21,825 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.943e+01 5.966e+01 6.630e+01 7.138e+01 1.259e+02, threshold=1.326e+02, percent-clipped=0.0 +2024-07-27 23:24:38,560 INFO [train.py:1114] (1/4) Epoch 6, batch 6950, loss[loss=0.1819, simple_loss=0.258, pruned_loss=0.05291, over 4518.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3098, pruned_loss=0.0752, over 940376.63 frames. ], batch size: 10, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:24:50,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=77430.66666666667, ans=0.0 +2024-07-27 23:24:51,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=77444.0, ans=0.025 +2024-07-27 23:24:54,992 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.69 vs. limit=6.0 +2024-07-27 23:25:02,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=77457.33333333333, ans=0.125 +2024-07-27 23:25:12,290 INFO [train.py:1114] (1/4) Epoch 6, batch 7000, loss[loss=0.2333, simple_loss=0.3062, pruned_loss=0.08021, over 4627.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3086, pruned_loss=0.07444, over 938748.15 frames. ], batch size: 17, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:25:14,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77484.0, ans=0.1 +2024-07-27 23:25:20,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=77497.33333333333, ans=0.125 +2024-07-27 23:25:22,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=77497.33333333333, ans=0.125 +2024-07-27 23:25:24,913 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.22 vs. limit=15.0 +2024-07-27 23:25:26,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=77510.66666666667, ans=0.0 +2024-07-27 23:25:26,826 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.05 vs. limit=15.0 +2024-07-27 23:25:30,327 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.154e+01 6.184e+01 7.015e+01 8.119e+01 1.355e+02, threshold=1.403e+02, percent-clipped=1.0 +2024-07-27 23:25:39,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=77537.33333333333, ans=10.0 +2024-07-27 23:25:44,777 INFO [train.py:1114] (1/4) Epoch 6, batch 7050, loss[loss=0.2293, simple_loss=0.3186, pruned_loss=0.06998, over 4688.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3094, pruned_loss=0.07503, over 941859.50 frames. ], batch size: 19, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:25:54,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=77564.0, ans=0.125 +2024-07-27 23:26:00,371 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:26:08,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=77590.66666666667, ans=0.2 +2024-07-27 23:26:17,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=77617.33333333333, ans=0.125 +2024-07-27 23:26:18,231 INFO [train.py:1114] (1/4) Epoch 6, batch 7100, loss[loss=0.2832, simple_loss=0.3508, pruned_loss=0.1078, over 4796.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3121, pruned_loss=0.07678, over 936868.98 frames. ], batch size: 15, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:26:23,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=77617.33333333333, ans=0.025 +2024-07-27 23:26:34,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=77630.66666666667, ans=0.0 +2024-07-27 23:26:35,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77644.0, ans=0.1 +2024-07-27 23:26:40,769 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.696e+01 6.046e+01 6.711e+01 7.848e+01 1.418e+02, threshold=1.342e+02, percent-clipped=1.0 +2024-07-27 23:26:49,689 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.40 vs. limit=15.0 +2024-07-27 23:26:52,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=77670.66666666667, ans=0.0 +2024-07-27 23:26:55,143 INFO [train.py:1114] (1/4) Epoch 6, batch 7150, loss[loss=0.2608, simple_loss=0.3401, pruned_loss=0.09081, over 4565.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3112, pruned_loss=0.07646, over 937853.31 frames. ], batch size: 21, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:26:57,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=77684.0, ans=0.0 +2024-07-27 23:27:03,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=77697.33333333333, ans=0.125 +2024-07-27 23:27:10,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=77710.66666666667, ans=0.125 +2024-07-27 23:27:16,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=77724.0, ans=0.125 +2024-07-27 23:27:19,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=77724.0, ans=0.2 +2024-07-27 23:27:23,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=77737.33333333333, ans=0.0 +2024-07-27 23:27:23,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=77737.33333333333, ans=0.025 +2024-07-27 23:27:27,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=77737.33333333333, ans=0.125 +2024-07-27 23:27:28,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=77737.33333333333, ans=0.2 +2024-07-27 23:27:29,626 INFO [train.py:1114] (1/4) Epoch 6, batch 7200, loss[loss=0.2422, simple_loss=0.3275, pruned_loss=0.0785, over 4803.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3117, pruned_loss=0.07627, over 938067.52 frames. ], batch size: 15, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:27:46,339 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.85 vs. limit=6.0 +2024-07-27 23:27:47,986 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.062e+01 6.035e+01 6.773e+01 8.115e+01 1.390e+02, threshold=1.355e+02, percent-clipped=1.0 +2024-07-27 23:27:58,048 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.99 vs. limit=15.0 +2024-07-27 23:27:59,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=77804.0, ans=0.0 +2024-07-27 23:28:02,492 INFO [train.py:1114] (1/4) Epoch 6, batch 7250, loss[loss=0.2304, simple_loss=0.2998, pruned_loss=0.08044, over 4851.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3106, pruned_loss=0.07596, over 939930.41 frames. ], batch size: 12, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:28:11,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=77830.66666666667, ans=0.1 +2024-07-27 23:28:12,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=77830.66666666667, ans=0.0 +2024-07-27 23:28:21,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77844.0, ans=0.1 +2024-07-27 23:28:21,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.68 vs. limit=15.0 +2024-07-27 23:28:25,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=77857.33333333333, ans=0.0 +2024-07-27 23:28:29,439 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.26 vs. limit=15.0 +2024-07-27 23:28:36,955 INFO [train.py:1114] (1/4) Epoch 6, batch 7300, loss[loss=0.2174, simple_loss=0.3003, pruned_loss=0.06725, over 4843.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3109, pruned_loss=0.07604, over 940401.04 frames. ], batch size: 12, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:28:38,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=77884.0, ans=0.2 +2024-07-27 23:28:41,814 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.21 vs. limit=15.0 +2024-07-27 23:28:52,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=77910.66666666667, ans=0.125 +2024-07-27 23:28:55,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=77910.66666666667, ans=0.025 +2024-07-27 23:28:55,454 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.159e+01 6.187e+01 6.781e+01 8.208e+01 1.800e+02, threshold=1.356e+02, percent-clipped=4.0 +2024-07-27 23:28:55,832 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.80 vs. limit=12.0 +2024-07-27 23:29:02,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=77924.0, ans=0.0 +2024-07-27 23:29:09,750 INFO [train.py:1114] (1/4) Epoch 6, batch 7350, loss[loss=0.2434, simple_loss=0.3168, pruned_loss=0.08499, over 4641.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3106, pruned_loss=0.07566, over 939426.41 frames. ], batch size: 12, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:29:17,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=77964.0, ans=0.2 +2024-07-27 23:29:20,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=77964.0, ans=0.125 +2024-07-27 23:29:22,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=77977.33333333333, ans=0.0 +2024-07-27 23:29:33,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=77990.66666666667, ans=0.0 +2024-07-27 23:29:36,143 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.12 vs. limit=15.0 +2024-07-27 23:29:42,396 INFO [train.py:1114] (1/4) Epoch 6, batch 7400, loss[loss=0.2307, simple_loss=0.32, pruned_loss=0.07065, over 4692.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3109, pruned_loss=0.0753, over 940551.25 frames. ], batch size: 13, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:29:43,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=78017.33333333333, ans=0.125 +2024-07-27 23:29:44,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=78017.33333333333, ans=0.0 +2024-07-27 23:29:46,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=78017.33333333333, ans=0.1 +2024-07-27 23:29:50,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=78030.66666666667, ans=0.125 +2024-07-27 23:29:55,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=78044.0, ans=0.125 +2024-07-27 23:30:00,721 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.794e+01 6.318e+01 7.281e+01 8.792e+01 1.336e+02, threshold=1.456e+02, percent-clipped=0.0 +2024-07-27 23:30:12,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=78057.33333333333, ans=10.0 +2024-07-27 23:30:13,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=78057.33333333333, ans=0.0 +2024-07-27 23:30:14,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=78057.33333333333, ans=0.1 +2024-07-27 23:30:17,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=78057.33333333333, ans=0.125 +2024-07-27 23:30:31,753 INFO [train.py:1114] (1/4) Epoch 6, batch 7450, loss[loss=0.2188, simple_loss=0.2864, pruned_loss=0.07561, over 4625.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3091, pruned_loss=0.07467, over 938328.09 frames. ], batch size: 11, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:30:35,971 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.76 vs. limit=12.0 +2024-07-27 23:30:56,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=78124.0, ans=0.09899494936611666 +2024-07-27 23:31:04,795 INFO [train.py:1114] (1/4) Epoch 6, batch 7500, loss[loss=0.2661, simple_loss=0.331, pruned_loss=0.1006, over 3367.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.31, pruned_loss=0.07541, over 936493.28 frames. ], batch size: 35, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:31:08,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=78150.66666666667, ans=0.125 +2024-07-27 23:31:08,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=78150.66666666667, ans=0.0 +2024-07-27 23:31:10,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=78150.66666666667, ans=0.0 +2024-07-27 23:31:24,015 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.339e+01 6.209e+01 6.853e+01 7.670e+01 1.087e+02, threshold=1.371e+02, percent-clipped=0.0 +2024-07-27 23:31:28,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=78190.66666666667, ans=0.0 +2024-07-27 23:31:37,754 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:31:38,274 INFO [train.py:1114] (1/4) Epoch 6, batch 7550, loss[loss=0.2245, simple_loss=0.3131, pruned_loss=0.06794, over 4663.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3116, pruned_loss=0.07592, over 936409.61 frames. ], batch size: 17, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:31:45,252 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.55 vs. limit=22.5 +2024-07-27 23:31:57,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=78244.0, ans=0.0 +2024-07-27 23:32:07,133 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.80 vs. limit=22.5 +2024-07-27 23:32:11,964 INFO [train.py:1114] (1/4) Epoch 6, batch 7600, loss[loss=0.2642, simple_loss=0.3382, pruned_loss=0.09511, over 4810.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3106, pruned_loss=0.07551, over 937952.04 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:32:25,484 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.33 vs. limit=15.0 +2024-07-27 23:32:26,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=78297.33333333333, ans=0.0 +2024-07-27 23:32:33,859 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.827e+01 6.092e+01 6.628e+01 7.251e+01 1.124e+02, threshold=1.326e+02, percent-clipped=0.0 +2024-07-27 23:32:34,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=78310.66666666667, ans=0.125 +2024-07-27 23:32:35,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=78324.0, ans=0.125 +2024-07-27 23:32:50,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78337.33333333333, ans=0.1 +2024-07-27 23:32:52,071 INFO [train.py:1114] (1/4) Epoch 6, batch 7650, loss[loss=0.1922, simple_loss=0.2706, pruned_loss=0.0569, over 4948.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3101, pruned_loss=0.07514, over 937072.90 frames. ], batch size: 12, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:33:20,814 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.17 vs. limit=10.0 +2024-07-27 23:33:37,389 INFO [train.py:1114] (1/4) Epoch 6, batch 7700, loss[loss=0.2138, simple_loss=0.2979, pruned_loss=0.06484, over 4698.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.31, pruned_loss=0.0749, over 934531.21 frames. ], batch size: 13, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:34:05,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=78444.0, ans=0.0 +2024-07-27 23:34:11,043 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.983e+01 6.189e+01 6.836e+01 7.774e+01 1.390e+02, threshold=1.367e+02, percent-clipped=1.0 +2024-07-27 23:34:20,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=78457.33333333333, ans=0.2 +2024-07-27 23:34:21,503 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.97 vs. limit=15.0 +2024-07-27 23:34:23,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=78470.66666666667, ans=0.0 +2024-07-27 23:34:25,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=78470.66666666667, ans=0.125 +2024-07-27 23:34:28,152 INFO [train.py:1114] (1/4) Epoch 6, batch 7750, loss[loss=0.2322, simple_loss=0.3177, pruned_loss=0.07329, over 4924.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3114, pruned_loss=0.07515, over 936041.75 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:34:28,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=78484.0, ans=0.2 +2024-07-27 23:34:32,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=78484.0, ans=0.125 +2024-07-27 23:34:33,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.45 vs. limit=22.5 +2024-07-27 23:34:35,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=78497.33333333333, ans=0.125 +2024-07-27 23:34:39,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=78497.33333333333, ans=0.0 +2024-07-27 23:34:42,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=78510.66666666667, ans=0.0 +2024-07-27 23:34:45,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=78510.66666666667, ans=0.0 +2024-07-27 23:34:46,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=78510.66666666667, ans=0.0 +2024-07-27 23:34:59,788 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.27 vs. limit=22.5 +2024-07-27 23:35:01,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=78537.33333333333, ans=0.125 +2024-07-27 23:35:04,334 INFO [train.py:1114] (1/4) Epoch 6, batch 7800, loss[loss=0.2226, simple_loss=0.3127, pruned_loss=0.06628, over 4673.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3121, pruned_loss=0.07505, over 937457.87 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:35:04,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=78550.66666666667, ans=0.0 +2024-07-27 23:35:12,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=78564.0, ans=0.0 +2024-07-27 23:35:12,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78564.0, ans=0.1 +2024-07-27 23:35:13,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=78564.0, ans=0.025 +2024-07-27 23:35:17,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=78577.33333333333, ans=0.125 +2024-07-27 23:35:22,303 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+01 6.050e+01 6.523e+01 7.521e+01 9.871e+01, threshold=1.305e+02, percent-clipped=0.0 +2024-07-27 23:35:30,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=78604.0, ans=0.125 +2024-07-27 23:35:33,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=78604.0, ans=0.2 +2024-07-27 23:35:36,949 INFO [train.py:1114] (1/4) Epoch 6, batch 7850, loss[loss=0.2068, simple_loss=0.2838, pruned_loss=0.06489, over 4520.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3128, pruned_loss=0.07545, over 936275.92 frames. ], batch size: 10, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:35:56,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=78657.33333333333, ans=0.2 +2024-07-27 23:35:59,212 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.85 vs. limit=6.0 +2024-07-27 23:36:00,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=78657.33333333333, ans=0.125 +2024-07-27 23:36:11,566 INFO [train.py:1114] (1/4) Epoch 6, batch 7900, loss[loss=0.2678, simple_loss=0.3405, pruned_loss=0.09758, over 4870.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3138, pruned_loss=0.07579, over 933522.10 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:36:11,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=78684.0, ans=0.025 +2024-07-27 23:36:13,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=78684.0, ans=0.2 +2024-07-27 23:36:18,463 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.50 vs. limit=22.5 +2024-07-27 23:36:20,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=78697.33333333333, ans=0.0 +2024-07-27 23:36:29,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=78710.66666666667, ans=0.125 +2024-07-27 23:36:29,746 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.141e+01 6.160e+01 7.004e+01 8.333e+01 1.233e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-27 23:36:29,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=78710.66666666667, ans=0.125 +2024-07-27 23:36:31,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=78724.0, ans=0.125 +2024-07-27 23:36:31,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78724.0, ans=0.1 +2024-07-27 23:36:39,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=78737.33333333333, ans=0.0 +2024-07-27 23:36:44,057 INFO [train.py:1114] (1/4) Epoch 6, batch 7950, loss[loss=0.2866, simple_loss=0.3504, pruned_loss=0.1115, over 3595.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.313, pruned_loss=0.07564, over 935747.74 frames. ], batch size: 35, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:36:48,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=78750.66666666667, ans=0.0 +2024-07-27 23:36:48,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=78750.66666666667, ans=0.025 +2024-07-27 23:36:53,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=78764.0, ans=0.125 +2024-07-27 23:36:59,147 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.94 vs. limit=12.0 +2024-07-27 23:36:59,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=78777.33333333333, ans=0.125 +2024-07-27 23:37:00,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=78777.33333333333, ans=0.125 +2024-07-27 23:37:02,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=78790.66666666667, ans=0.125 +2024-07-27 23:37:16,326 INFO [train.py:1114] (1/4) Epoch 6, batch 8000, loss[loss=0.2055, simple_loss=0.2787, pruned_loss=0.0662, over 4620.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3106, pruned_loss=0.07473, over 935222.03 frames. ], batch size: 11, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:37:27,328 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:37:34,332 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.441e+01 5.938e+01 6.564e+01 7.603e+01 1.476e+02, threshold=1.313e+02, percent-clipped=1.0 +2024-07-27 23:37:45,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=78870.66666666667, ans=0.125 +2024-07-27 23:37:48,715 INFO [train.py:1114] (1/4) Epoch 6, batch 8050, loss[loss=0.238, simple_loss=0.3247, pruned_loss=0.0756, over 4801.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3099, pruned_loss=0.07427, over 934673.50 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:37:59,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=78897.33333333333, ans=0.125 +2024-07-27 23:38:03,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=78910.66666666667, ans=0.125 +2024-07-27 23:38:05,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=78910.66666666667, ans=0.09899494936611666 +2024-07-27 23:38:05,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=78910.66666666667, ans=0.125 +2024-07-27 23:38:05,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=78910.66666666667, ans=0.1 +2024-07-27 23:38:10,823 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.98 vs. limit=22.5 +2024-07-27 23:38:19,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=78937.33333333333, ans=0.0 +2024-07-27 23:38:23,500 INFO [train.py:1114] (1/4) Epoch 6, batch 8100, loss[loss=0.2098, simple_loss=0.2973, pruned_loss=0.0612, over 4797.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3117, pruned_loss=0.075, over 934473.75 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:38:26,349 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.97 vs. limit=12.0 +2024-07-27 23:38:35,929 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.21 vs. limit=15.0 +2024-07-27 23:38:39,165 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.62 vs. limit=15.0 +2024-07-27 23:38:39,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=78977.33333333333, ans=0.0 +2024-07-27 23:38:41,456 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.051e+01 5.969e+01 6.429e+01 6.997e+01 9.390e+01, threshold=1.286e+02, percent-clipped=0.0 +2024-07-27 23:38:43,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=78990.66666666667, ans=0.125 +2024-07-27 23:38:55,582 INFO [train.py:1114] (1/4) Epoch 6, batch 8150, loss[loss=0.2519, simple_loss=0.3357, pruned_loss=0.08404, over 4803.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3107, pruned_loss=0.07516, over 937606.06 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:39:00,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=79017.33333333333, ans=0.2 +2024-07-27 23:39:09,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=79044.0, ans=0.07 +2024-07-27 23:39:20,831 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.39 vs. limit=22.5 +2024-07-27 23:39:23,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=79070.66666666667, ans=0.125 +2024-07-27 23:39:28,469 INFO [train.py:1114] (1/4) Epoch 6, batch 8200, loss[loss=0.2301, simple_loss=0.3238, pruned_loss=0.0682, over 4800.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3107, pruned_loss=0.07449, over 938841.75 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:39:31,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=79084.0, ans=0.0 +2024-07-27 23:39:32,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=79084.0, ans=0.0 +2024-07-27 23:39:36,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=79097.33333333333, ans=0.025 +2024-07-27 23:39:47,311 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.029e+01 5.934e+01 6.554e+01 7.415e+01 1.580e+02, threshold=1.311e+02, percent-clipped=1.0 +2024-07-27 23:39:54,822 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.78 vs. limit=15.0 +2024-07-27 23:39:59,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=79137.33333333333, ans=0.125 +2024-07-27 23:40:01,170 INFO [train.py:1114] (1/4) Epoch 6, batch 8250, loss[loss=0.2259, simple_loss=0.3162, pruned_loss=0.06776, over 4894.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3106, pruned_loss=0.07484, over 939073.05 frames. ], batch size: 13, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:40:06,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=79150.66666666667, ans=0.125 +2024-07-27 23:40:23,437 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.48 vs. limit=15.0 +2024-07-27 23:40:30,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=79204.0, ans=0.1 +2024-07-27 23:40:33,613 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.54 vs. limit=22.5 +2024-07-27 23:40:33,939 INFO [train.py:1114] (1/4) Epoch 6, batch 8300, loss[loss=0.2397, simple_loss=0.3203, pruned_loss=0.07954, over 4905.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3115, pruned_loss=0.07529, over 938813.83 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:40:36,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=79217.33333333333, ans=0.125 +2024-07-27 23:40:45,928 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.68 vs. limit=15.0 +2024-07-27 23:40:54,342 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.146e+01 5.976e+01 6.704e+01 7.897e+01 1.175e+02, threshold=1.341e+02, percent-clipped=0.0 +2024-07-27 23:41:04,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=79270.66666666667, ans=0.125 +2024-07-27 23:41:07,471 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.10 vs. limit=15.0 +2024-07-27 23:41:08,391 INFO [train.py:1114] (1/4) Epoch 6, batch 8350, loss[loss=0.2411, simple_loss=0.3293, pruned_loss=0.07643, over 4790.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.312, pruned_loss=0.07529, over 941607.84 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:41:09,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=79284.0, ans=0.1 +2024-07-27 23:41:09,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=79284.0, ans=0.09899494936611666 +2024-07-27 23:41:26,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=79310.66666666667, ans=0.125 +2024-07-27 23:41:31,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=79324.0, ans=0.125 +2024-07-27 23:41:40,603 INFO [train.py:1114] (1/4) Epoch 6, batch 8400, loss[loss=0.1904, simple_loss=0.2808, pruned_loss=0.04997, over 4778.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3132, pruned_loss=0.07624, over 939941.53 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:41:44,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=79350.66666666667, ans=0.09899494936611666 +2024-07-27 23:41:53,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=79377.33333333333, ans=0.1 +2024-07-27 23:41:53,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=79377.33333333333, ans=0.0 +2024-07-27 23:41:58,571 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.741e+01 6.271e+01 7.007e+01 8.306e+01 1.253e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-27 23:41:58,853 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.86 vs. limit=15.0 +2024-07-27 23:42:03,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=79390.66666666667, ans=0.125 +2024-07-27 23:42:06,438 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.30 vs. limit=10.0 +2024-07-27 23:42:12,526 INFO [train.py:1114] (1/4) Epoch 6, batch 8450, loss[loss=0.223, simple_loss=0.2973, pruned_loss=0.07438, over 4797.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.3149, pruned_loss=0.07727, over 938962.19 frames. ], batch size: 15, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:42:21,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=79430.66666666667, ans=0.125 +2024-07-27 23:42:23,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=79430.66666666667, ans=0.1 +2024-07-27 23:42:33,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=79457.33333333333, ans=0.125 +2024-07-27 23:42:42,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.03 vs. limit=15.0 +2024-07-27 23:42:43,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=79470.66666666667, ans=0.125 +2024-07-27 23:42:45,635 INFO [train.py:1114] (1/4) Epoch 6, batch 8500, loss[loss=0.1966, simple_loss=0.269, pruned_loss=0.06206, over 4624.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3141, pruned_loss=0.07683, over 938644.04 frames. ], batch size: 11, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:42:50,361 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.07 vs. limit=15.0 +2024-07-27 23:43:04,878 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.976e+01 5.862e+01 6.704e+01 7.850e+01 1.312e+02, threshold=1.341e+02, percent-clipped=0.0 +2024-07-27 23:43:05,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.97 vs. limit=15.0 +2024-07-27 23:43:08,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=79524.0, ans=0.125 +2024-07-27 23:43:12,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=79537.33333333333, ans=0.0 +2024-07-27 23:43:19,073 INFO [train.py:1114] (1/4) Epoch 6, batch 8550, loss[loss=0.2236, simple_loss=0.2844, pruned_loss=0.0814, over 4802.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3127, pruned_loss=0.07598, over 939354.72 frames. ], batch size: 11, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:43:51,125 INFO [train.py:1114] (1/4) Epoch 6, batch 8600, loss[loss=0.2152, simple_loss=0.2936, pruned_loss=0.06843, over 4816.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3109, pruned_loss=0.07557, over 939034.76 frames. ], batch size: 15, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:44:04,558 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.34 vs. limit=12.0 +2024-07-27 23:44:09,903 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.199e+01 6.001e+01 6.460e+01 7.651e+01 1.281e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-27 23:44:16,228 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.79 vs. limit=22.5 +2024-07-27 23:44:20,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=79670.66666666667, ans=0.0 +2024-07-27 23:44:24,676 INFO [train.py:1114] (1/4) Epoch 6, batch 8650, loss[loss=0.238, simple_loss=0.318, pruned_loss=0.07902, over 4905.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3115, pruned_loss=0.07577, over 940379.01 frames. ], batch size: 15, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:44:29,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=79684.0, ans=0.0 +2024-07-27 23:44:41,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=79710.66666666667, ans=0.125 +2024-07-27 23:44:43,994 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:44:45,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=79724.0, ans=0.125 +2024-07-27 23:44:47,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=79724.0, ans=0.125 +2024-07-27 23:44:52,025 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.84 vs. limit=15.0 +2024-07-27 23:44:56,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=79750.66666666667, ans=0.1 +2024-07-27 23:44:56,636 INFO [train.py:1114] (1/4) Epoch 6, batch 8700, loss[loss=0.2049, simple_loss=0.2837, pruned_loss=0.0631, over 4763.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3129, pruned_loss=0.07667, over 938039.16 frames. ], batch size: 13, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:45:03,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=79764.0, ans=0.125 +2024-07-27 23:45:14,453 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.012e+01 6.110e+01 6.862e+01 8.564e+01 1.344e+02, threshold=1.372e+02, percent-clipped=1.0 +2024-07-27 23:45:17,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=79790.66666666667, ans=0.025 +2024-07-27 23:45:24,127 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.27 vs. limit=10.0 +2024-07-27 23:45:24,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff3.min_abs, batch_count=79804.0, ans=0.2 +2024-07-27 23:45:25,702 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:45:28,840 INFO [train.py:1114] (1/4) Epoch 6, batch 8750, loss[loss=0.2383, simple_loss=0.3173, pruned_loss=0.07972, over 4705.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3124, pruned_loss=0.07626, over 936879.79 frames. ], batch size: 15, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:45:34,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=79817.33333333333, ans=0.2 +2024-07-27 23:45:40,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=79830.66666666667, ans=0.2 +2024-07-27 23:45:49,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=79857.33333333333, ans=10.0 +2024-07-27 23:46:00,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=79870.66666666667, ans=0.125 +2024-07-27 23:46:01,302 INFO [train.py:1114] (1/4) Epoch 6, batch 8800, loss[loss=0.2409, simple_loss=0.3264, pruned_loss=0.07767, over 4926.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3125, pruned_loss=0.07615, over 937612.68 frames. ], batch size: 14, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:46:16,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=79910.66666666667, ans=0.2 +2024-07-27 23:46:19,209 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.942e+01 5.815e+01 6.538e+01 7.322e+01 9.632e+01, threshold=1.308e+02, percent-clipped=0.0 +2024-07-27 23:46:21,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=79924.0, ans=0.125 +2024-07-27 23:46:23,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=79924.0, ans=0.1 +2024-07-27 23:46:27,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=79937.33333333333, ans=0.125 +2024-07-27 23:46:28,827 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.08 vs. limit=22.5 +2024-07-27 23:46:33,566 INFO [train.py:1114] (1/4) Epoch 6, batch 8850, loss[loss=0.2506, simple_loss=0.326, pruned_loss=0.08755, over 4522.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3119, pruned_loss=0.07606, over 932947.89 frames. ], batch size: 21, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:46:41,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=79964.0, ans=0.125 +2024-07-27 23:47:35,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=80017.33333333333, ans=15.0 +2024-07-27 23:47:35,383 INFO [train.py:1114] (1/4) Epoch 6, batch 8900, loss[loss=0.192, simple_loss=0.2679, pruned_loss=0.05806, over 4938.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.3112, pruned_loss=0.07581, over 930884.08 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:47:42,700 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.79 vs. limit=12.0 +2024-07-27 23:47:51,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=80044.0, ans=0.0 +2024-07-27 23:47:53,362 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.820e+01 6.167e+01 6.816e+01 7.855e+01 1.273e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-27 23:47:56,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=80057.33333333333, ans=0.125 +2024-07-27 23:47:57,666 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.90 vs. limit=10.0 +2024-07-27 23:48:00,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=80070.66666666667, ans=0.2 +2024-07-27 23:48:07,381 INFO [train.py:1114] (1/4) Epoch 6, batch 8950, loss[loss=0.3098, simple_loss=0.3826, pruned_loss=0.1185, over 4458.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3122, pruned_loss=0.07621, over 931728.16 frames. ], batch size: 21, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:48:08,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80084.0, ans=0.1 +2024-07-27 23:48:32,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=80124.0, ans=0.0 +2024-07-27 23:48:33,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=80137.33333333333, ans=0.1 +2024-07-27 23:48:39,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=80150.66666666667, ans=0.125 +2024-07-27 23:48:40,045 INFO [train.py:1114] (1/4) Epoch 6, batch 9000, loss[loss=0.1966, simple_loss=0.276, pruned_loss=0.05855, over 4635.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.311, pruned_loss=0.07599, over 934679.34 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:48:40,045 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-27 23:48:47,410 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.7659, 2.1887, 3.6575, 3.4044, 3.8511, 3.4912, 3.1661, 2.3234], + device='cuda:1') +2024-07-27 23:48:52,409 INFO [train.py:1146] (1/4) Epoch 6, validation: loss=0.1898, simple_loss=0.2938, pruned_loss=0.0429, over 944034.00 frames. +2024-07-27 23:48:52,409 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-27 23:49:00,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=80164.0, ans=0.125 +2024-07-27 23:49:00,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=80164.0, ans=0.0 +2024-07-27 23:49:04,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=80177.33333333333, ans=0.2 +2024-07-27 23:49:10,599 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.150e+01 6.230e+01 7.342e+01 8.976e+01 1.203e+02, threshold=1.468e+02, percent-clipped=0.0 +2024-07-27 23:49:11,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80190.66666666667, ans=0.1 +2024-07-27 23:49:11,618 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.18 vs. limit=15.0 +2024-07-27 23:49:14,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=80190.66666666667, ans=0.0 +2024-07-27 23:49:25,568 INFO [train.py:1114] (1/4) Epoch 6, batch 9050, loss[loss=0.1801, simple_loss=0.266, pruned_loss=0.0471, over 4524.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3098, pruned_loss=0.07557, over 934928.03 frames. ], batch size: 10, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:49:28,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=80217.33333333333, ans=0.125 +2024-07-27 23:49:30,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80217.33333333333, ans=0.1 +2024-07-27 23:49:32,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=80230.66666666667, ans=0.025 +2024-07-27 23:49:33,063 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.85 vs. limit=15.0 +2024-07-27 23:49:48,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=80244.0, ans=0.2 +2024-07-27 23:50:00,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=80270.66666666667, ans=0.125 +2024-07-27 23:50:03,560 INFO [train.py:1114] (1/4) Epoch 6, batch 9100, loss[loss=0.2237, simple_loss=0.3041, pruned_loss=0.07167, over 4927.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3086, pruned_loss=0.07442, over 937255.69 frames. ], batch size: 14, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:50:07,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=80284.0, ans=0.0 +2024-07-27 23:50:15,566 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.72 vs. limit=15.0 +2024-07-27 23:50:18,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=80310.66666666667, ans=0.125 +2024-07-27 23:50:21,447 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 6.154e+01 7.130e+01 8.632e+01 1.081e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 23:50:27,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=80324.0, ans=0.125 +2024-07-27 23:50:35,539 INFO [train.py:1114] (1/4) Epoch 6, batch 9150, loss[loss=0.2383, simple_loss=0.3237, pruned_loss=0.0764, over 4812.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.309, pruned_loss=0.07399, over 936014.75 frames. ], batch size: 14, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:50:52,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=80377.33333333333, ans=0.125 +2024-07-27 23:51:15,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80404.0, ans=0.1 +2024-07-27 23:51:18,716 INFO [train.py:1114] (1/4) Epoch 6, batch 9200, loss[loss=0.1872, simple_loss=0.2589, pruned_loss=0.05773, over 4846.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.309, pruned_loss=0.07417, over 937591.69 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:51:30,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=80430.66666666667, ans=0.0 +2024-07-27 23:51:37,449 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.30 vs. limit=10.0 +2024-07-27 23:51:40,367 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 6.086e+01 6.690e+01 8.259e+01 1.289e+02, threshold=1.338e+02, percent-clipped=0.0 +2024-07-27 23:51:43,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=80457.33333333333, ans=0.125 +2024-07-27 23:51:46,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_na.min_abs, batch_count=80457.33333333333, ans=0.02 +2024-07-27 23:51:54,184 INFO [train.py:1114] (1/4) Epoch 6, batch 9250, loss[loss=0.1933, simple_loss=0.2824, pruned_loss=0.05213, over 4636.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.3075, pruned_loss=0.07317, over 938673.95 frames. ], batch size: 13, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:51:56,322 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.19 vs. limit=15.0 +2024-07-27 23:51:56,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=80484.0, ans=0.125 +2024-07-27 23:52:03,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=80497.33333333333, ans=0.125 +2024-07-27 23:52:17,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.07 vs. limit=22.5 +2024-07-27 23:52:20,571 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.60 vs. limit=15.0 +2024-07-27 23:52:22,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=80537.33333333333, ans=0.125 +2024-07-27 23:52:26,147 INFO [train.py:1114] (1/4) Epoch 6, batch 9300, loss[loss=0.1796, simple_loss=0.2528, pruned_loss=0.05324, over 4772.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3071, pruned_loss=0.07313, over 937831.38 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:52:35,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=80564.0, ans=0.125 +2024-07-27 23:52:38,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=80577.33333333333, ans=0.125 +2024-07-27 23:52:43,679 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.04 vs. limit=15.0 +2024-07-27 23:52:43,891 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.063e+01 5.901e+01 6.419e+01 7.368e+01 1.271e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-27 23:52:53,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=80604.0, ans=0.0 +2024-07-27 23:52:53,823 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.275e-02 +2024-07-27 23:52:58,699 INFO [train.py:1114] (1/4) Epoch 6, batch 9350, loss[loss=0.1769, simple_loss=0.2535, pruned_loss=0.05012, over 4808.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3078, pruned_loss=0.07383, over 934698.10 frames. ], batch size: 11, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:53:04,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=80630.66666666667, ans=0.2 +2024-07-27 23:53:05,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=80630.66666666667, ans=0.125 +2024-07-27 23:53:22,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80657.33333333333, ans=0.1 +2024-07-27 23:53:23,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=80657.33333333333, ans=0.0 +2024-07-27 23:53:27,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=80670.66666666667, ans=15.0 +2024-07-27 23:53:31,201 INFO [train.py:1114] (1/4) Epoch 6, batch 9400, loss[loss=0.2617, simple_loss=0.3439, pruned_loss=0.08976, over 4689.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.31, pruned_loss=0.07533, over 932472.27 frames. ], batch size: 13, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:53:35,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=80684.0, ans=0.0 +2024-07-27 23:53:37,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=80697.33333333333, ans=0.125 +2024-07-27 23:53:44,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=80710.66666666667, ans=0.2 +2024-07-27 23:53:46,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=80710.66666666667, ans=0.125 +2024-07-27 23:53:49,493 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.580e+01 6.055e+01 7.065e+01 8.211e+01 1.397e+02, threshold=1.413e+02, percent-clipped=1.0 +2024-07-27 23:53:57,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=80737.33333333333, ans=0.0 +2024-07-27 23:53:59,951 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.18 vs. limit=22.5 +2024-07-27 23:54:01,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80737.33333333333, ans=0.1 +2024-07-27 23:54:02,524 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.64 vs. limit=15.0 +2024-07-27 23:54:02,795 INFO [train.py:1114] (1/4) Epoch 6, batch 9450, loss[loss=0.2319, simple_loss=0.3115, pruned_loss=0.07613, over 4810.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3104, pruned_loss=0.07496, over 932026.09 frames. ], batch size: 11, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:54:07,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=80750.66666666667, ans=10.0 +2024-07-27 23:54:09,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=80764.0, ans=0.09899494936611666 +2024-07-27 23:54:17,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=80777.33333333333, ans=0.0 +2024-07-27 23:54:20,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=80777.33333333333, ans=0.125 +2024-07-27 23:54:21,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=80790.66666666667, ans=0.125 +2024-07-27 23:54:22,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=80790.66666666667, ans=0.125 +2024-07-27 23:54:28,607 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.60 vs. limit=15.0 +2024-07-27 23:54:34,481 INFO [train.py:1114] (1/4) Epoch 6, batch 9500, loss[loss=0.2524, simple_loss=0.3386, pruned_loss=0.08306, over 4704.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.31, pruned_loss=0.07448, over 934491.12 frames. ], batch size: 12, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:54:35,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=80817.33333333333, ans=0.0 +2024-07-27 23:54:41,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=80830.66666666667, ans=0.0 +2024-07-27 23:54:46,206 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:54:47,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=80844.0, ans=0.125 +2024-07-27 23:54:52,209 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.921e+01 6.030e+01 6.974e+01 8.015e+01 1.181e+02, threshold=1.395e+02, percent-clipped=0.0 +2024-07-27 23:54:59,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=80870.66666666667, ans=0.125 +2024-07-27 23:55:05,206 INFO [train.py:1114] (1/4) Epoch 6, batch 9550, loss[loss=0.2449, simple_loss=0.3226, pruned_loss=0.08363, over 4771.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3115, pruned_loss=0.07543, over 931898.70 frames. ], batch size: 12, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:55:06,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=80884.0, ans=0.0 +2024-07-27 23:55:14,079 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-27 23:55:19,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80910.66666666667, ans=0.1 +2024-07-27 23:55:19,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=80910.66666666667, ans=0.1 +2024-07-27 23:55:27,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=80924.0, ans=0.025 +2024-07-27 23:55:28,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=80924.0, ans=0.1 +2024-07-27 23:55:38,078 INFO [train.py:1114] (1/4) Epoch 6, batch 9600, loss[loss=0.3059, simple_loss=0.3659, pruned_loss=0.123, over 3398.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3113, pruned_loss=0.07529, over 931423.47 frames. ], batch size: 35, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:55:42,312 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.83 vs. limit=6.0 +2024-07-27 23:55:45,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=80964.0, ans=0.125 +2024-07-27 23:55:56,584 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.004e+01 6.228e+01 7.001e+01 7.870e+01 1.117e+02, threshold=1.400e+02, percent-clipped=0.0 +2024-07-27 23:58:43,520 INFO [train.py:1114] (1/4) Epoch 6, batch 9650, loss[loss=0.2266, simple_loss=0.3141, pruned_loss=0.06952, over 4844.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3099, pruned_loss=0.07491, over 927345.88 frames. ], batch size: 16, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:58:45,643 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-07-27 23:58:46,058 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:58:46,441 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.51 vs. limit=15.0 +2024-07-27 23:59:03,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=81057.33333333333, ans=0.09899494936611666 +2024-07-27 23:59:15,242 INFO [train.py:1114] (1/4) Epoch 6, batch 9700, loss[loss=0.2435, simple_loss=0.3205, pruned_loss=0.08327, over 4193.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3113, pruned_loss=0.07561, over 925340.88 frames. ], batch size: 25, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:59:19,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=81084.0, ans=0.125 +2024-07-27 23:59:21,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=81097.33333333333, ans=0.0 +2024-07-27 23:59:26,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=81097.33333333333, ans=0.2 +2024-07-27 23:59:30,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=81110.66666666667, ans=0.125 +2024-07-27 23:59:33,224 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.713e+01 6.355e+01 7.161e+01 8.228e+01 1.300e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 23:59:46,568 INFO [train.py:1114] (1/4) Epoch 6, batch 9750, loss[loss=0.2366, simple_loss=0.3264, pruned_loss=0.07342, over 4677.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3101, pruned_loss=0.07472, over 925956.40 frames. ], batch size: 15, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:00:08,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=81164.0, ans=0.2 +2024-07-28 00:00:12,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=81164.0, ans=0.5 +2024-07-28 00:00:13,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=81164.0, ans=0.125 +2024-07-28 00:00:16,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=81177.33333333333, ans=0.125 +2024-07-28 00:00:17,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=81177.33333333333, ans=0.125 +2024-07-28 00:00:28,755 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.85 vs. limit=6.0 +2024-07-28 00:00:29,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.whiten.whitening_limit, batch_count=81204.0, ans=12.0 +2024-07-28 00:00:30,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=81204.0, ans=0.125 +2024-07-28 00:00:35,015 INFO [train.py:1114] (1/4) Epoch 6, batch 9800, loss[loss=0.1918, simple_loss=0.2926, pruned_loss=0.04549, over 4707.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3089, pruned_loss=0.07431, over 925477.58 frames. ], batch size: 12, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:00:38,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=81217.33333333333, ans=0.0 +2024-07-28 00:00:39,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=81217.33333333333, ans=0.2 +2024-07-28 00:00:44,447 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.02 vs. limit=22.5 +2024-07-28 00:00:46,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.90 vs. limit=6.0 +2024-07-28 00:00:46,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=81244.0, ans=0.2 +2024-07-28 00:00:52,768 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.096e+01 6.416e+01 7.275e+01 8.758e+01 1.346e+02, threshold=1.455e+02, percent-clipped=0.0 +2024-07-28 00:01:02,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=81270.66666666667, ans=0.0 +2024-07-28 00:01:04,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=81284.0, ans=0.0 +2024-07-28 00:01:05,406 INFO [train.py:1114] (1/4) Epoch 6, batch 9850, loss[loss=0.2486, simple_loss=0.3268, pruned_loss=0.08514, over 4913.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3096, pruned_loss=0.07458, over 927482.82 frames. ], batch size: 15, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:01:09,795 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.71 vs. limit=10.0 +2024-07-28 00:01:10,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=81297.33333333333, ans=0.125 +2024-07-28 00:01:27,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=81324.0, ans=0.125 +2024-07-28 00:01:29,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=81324.0, ans=0.125 +2024-07-28 00:01:32,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=81337.33333333333, ans=0.125 +2024-07-28 00:01:33,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=81337.33333333333, ans=0.025 +2024-07-28 00:01:34,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81337.33333333333, ans=0.1 +2024-07-28 00:01:37,305 INFO [train.py:1114] (1/4) Epoch 6, batch 9900, loss[loss=0.2666, simple_loss=0.344, pruned_loss=0.09457, over 4829.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3108, pruned_loss=0.07543, over 926338.11 frames. ], batch size: 16, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:01:45,564 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.74 vs. limit=15.0 +2024-07-28 00:01:46,782 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.87 vs. limit=15.0 +2024-07-28 00:01:54,972 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.847e+01 6.249e+01 6.784e+01 7.688e+01 1.136e+02, threshold=1.357e+02, percent-clipped=0.0 +2024-07-28 00:01:58,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_na.min_abs, batch_count=81390.66666666667, ans=0.02 +2024-07-28 00:02:00,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=81390.66666666667, ans=0.0 +2024-07-28 00:02:07,895 INFO [train.py:1114] (1/4) Epoch 6, batch 9950, loss[loss=0.2086, simple_loss=0.2868, pruned_loss=0.06522, over 4800.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3119, pruned_loss=0.07617, over 929430.21 frames. ], batch size: 11, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:02:12,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=81417.33333333333, ans=0.2 +2024-07-28 00:02:20,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=81444.0, ans=0.125 +2024-07-28 00:02:33,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=81470.66666666667, ans=0.0 +2024-07-28 00:02:33,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=81470.66666666667, ans=0.2 +2024-07-28 00:02:37,948 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-28 00:02:39,504 INFO [train.py:1114] (1/4) Epoch 6, batch 10000, loss[loss=0.2536, simple_loss=0.326, pruned_loss=0.09062, over 4627.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3144, pruned_loss=0.07774, over 926962.22 frames. ], batch size: 16, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:02:39,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=81484.0, ans=0.125 +2024-07-28 00:02:44,125 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.32 vs. limit=15.0 +2024-07-28 00:02:47,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=81497.33333333333, ans=0.125 +2024-07-28 00:02:50,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=81497.33333333333, ans=0.1 +2024-07-28 00:02:57,853 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.089e+01 5.998e+01 6.471e+01 7.600e+01 1.218e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 00:03:07,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=81537.33333333333, ans=0.0 +2024-07-28 00:03:11,403 INFO [train.py:1114] (1/4) Epoch 6, batch 10050, loss[loss=0.3127, simple_loss=0.3637, pruned_loss=0.1308, over 3205.00 frames. ], tot_loss[loss=0.2401, simple_loss=0.3191, pruned_loss=0.08061, over 914488.24 frames. ], batch size: 35, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:03:13,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=81550.66666666667, ans=0.1 +2024-07-28 00:03:29,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=81577.33333333333, ans=0.125 +2024-07-28 00:03:35,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=81590.66666666667, ans=0.0 +2024-07-28 00:03:45,249 INFO [train.py:1114] (1/4) Epoch 6, batch 10100, loss[loss=0.2628, simple_loss=0.3232, pruned_loss=0.1012, over 3234.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3255, pruned_loss=0.08795, over 859565.08 frames. ], batch size: 35, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:03:53,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=81630.66666666667, ans=0.95 +2024-07-28 00:03:54,056 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.86 vs. limit=15.0 +2024-07-28 00:03:57,961 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.10 vs. limit=22.5 +2024-07-28 00:03:58,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=81644.0, ans=0.0 +2024-07-28 00:04:00,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=81644.0, ans=0.125 +2024-07-28 00:04:01,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=81644.0, ans=0.0 +2024-07-28 00:04:04,051 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.601e+01 6.841e+01 7.276e+01 7.854e+01 1.337e+02, threshold=1.455e+02, percent-clipped=1.0 +2024-07-28 00:04:17,438 INFO [train.py:1114] (1/4) Epoch 6, batch 10150, loss[loss=0.2852, simple_loss=0.3573, pruned_loss=0.1065, over 3691.00 frames. ], tot_loss[loss=0.2572, simple_loss=0.3301, pruned_loss=0.09212, over 820575.74 frames. ], batch size: 36, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:04:24,009 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=10.12 vs. limit=12.0 +2024-07-28 00:04:30,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=81710.66666666667, ans=0.0 +2024-07-28 00:04:34,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=81710.66666666667, ans=0.0 +2024-07-28 00:04:36,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=81724.0, ans=0.125 +2024-07-28 00:04:38,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=81724.0, ans=0.125 +2024-07-28 00:04:38,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81724.0, ans=0.1 +2024-07-28 00:04:48,172 INFO [train.py:1114] (1/4) Epoch 6, batch 10200, loss[loss=0.3196, simple_loss=0.3716, pruned_loss=0.1338, over 3429.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.333, pruned_loss=0.09582, over 788945.46 frames. ], batch size: 35, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:05:45,913 INFO [train.py:1114] (1/4) Epoch 7, batch 0, loss[loss=0.1844, simple_loss=0.2653, pruned_loss=0.05181, over 4850.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2653, pruned_loss=0.05181, over 4850.00 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:05:45,914 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 00:05:57,486 INFO [train.py:1146] (1/4) Epoch 7, validation: loss=0.1928, simple_loss=0.2981, pruned_loss=0.04372, over 944034.00 frames. +2024-07-28 00:05:57,487 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 00:05:59,868 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.18 vs. limit=10.0 +2024-07-28 00:06:04,565 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.706e+01 6.568e+01 7.074e+01 7.483e+01 1.038e+02, threshold=1.415e+02, percent-clipped=0.0 +2024-07-28 00:06:22,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=81820.0, ans=0.125 +2024-07-28 00:06:31,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=81833.33333333333, ans=0.125 +2024-07-28 00:06:33,933 INFO [train.py:1114] (1/4) Epoch 7, batch 50, loss[loss=0.1716, simple_loss=0.2545, pruned_loss=0.04433, over 4625.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3116, pruned_loss=0.0758, over 206250.21 frames. ], batch size: 11, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:06:37,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=81846.66666666667, ans=0.125 +2024-07-28 00:06:56,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=81886.66666666667, ans=0.125 +2024-07-28 00:07:07,525 INFO [train.py:1114] (1/4) Epoch 7, batch 100, loss[loss=0.2147, simple_loss=0.2927, pruned_loss=0.06837, over 4643.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3141, pruned_loss=0.07587, over 364473.30 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:07:09,319 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.56 vs. limit=15.0 +2024-07-28 00:07:12,079 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.841e+01 5.914e+01 6.777e+01 7.920e+01 1.192e+02, threshold=1.355e+02, percent-clipped=0.0 +2024-07-28 00:07:16,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=81926.66666666667, ans=0.125 +2024-07-28 00:07:27,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=81953.33333333333, ans=0.125 +2024-07-28 00:07:28,262 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.68 vs. limit=6.0 +2024-07-28 00:07:31,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=81953.33333333333, ans=0.025 +2024-07-28 00:07:31,418 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.71 vs. limit=22.5 +2024-07-28 00:07:35,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=81966.66666666667, ans=0.0 +2024-07-28 00:07:37,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=81966.66666666667, ans=0.125 +2024-07-28 00:07:40,135 INFO [train.py:1114] (1/4) Epoch 7, batch 150, loss[loss=0.2069, simple_loss=0.2819, pruned_loss=0.066, over 4612.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3113, pruned_loss=0.07449, over 493400.51 frames. ], batch size: 11, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:07:42,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=81980.0, ans=0.125 +2024-07-28 00:07:50,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=81993.33333333333, ans=0.125 +2024-07-28 00:08:07,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=82033.33333333333, ans=0.2 +2024-07-28 00:08:12,769 INFO [train.py:1114] (1/4) Epoch 7, batch 200, loss[loss=0.2698, simple_loss=0.3532, pruned_loss=0.0932, over 4445.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3107, pruned_loss=0.07502, over 593073.64 frames. ], batch size: 21, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:08:13,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=82046.66666666667, ans=0.025 +2024-07-28 00:08:16,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82046.66666666667, ans=0.1 +2024-07-28 00:08:16,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=82046.66666666667, ans=0.025 +2024-07-28 00:08:17,411 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.841e+01 5.956e+01 6.544e+01 7.409e+01 1.468e+02, threshold=1.309e+02, percent-clipped=1.0 +2024-07-28 00:08:20,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=82060.0, ans=0.0 +2024-07-28 00:08:41,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=82100.0, ans=0.125 +2024-07-28 00:08:46,286 INFO [train.py:1114] (1/4) Epoch 7, batch 250, loss[loss=0.2649, simple_loss=0.3446, pruned_loss=0.09266, over 4631.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3095, pruned_loss=0.07408, over 669930.99 frames. ], batch size: 16, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:08:53,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=82126.66666666667, ans=0.2 +2024-07-28 00:08:59,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=82140.0, ans=0.125 +2024-07-28 00:09:03,587 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.32 vs. limit=22.5 +2024-07-28 00:09:07,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=82153.33333333333, ans=0.0 +2024-07-28 00:09:08,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=82153.33333333333, ans=0.125 +2024-07-28 00:09:19,497 INFO [train.py:1114] (1/4) Epoch 7, batch 300, loss[loss=0.2386, simple_loss=0.3194, pruned_loss=0.07889, over 4804.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3111, pruned_loss=0.07471, over 729651.82 frames. ], batch size: 15, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:09:23,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=82180.0, ans=0.0 +2024-07-28 00:09:24,031 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.788e+01 5.988e+01 6.705e+01 7.891e+01 1.591e+02, threshold=1.341e+02, percent-clipped=1.0 +2024-07-28 00:09:24,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=82180.0, ans=0.0 +2024-07-28 00:09:26,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=82193.33333333333, ans=0.0 +2024-07-28 00:09:26,496 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.49 vs. limit=22.5 +2024-07-28 00:09:34,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=82206.66666666667, ans=0.125 +2024-07-28 00:09:49,912 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.54 vs. limit=15.0 +2024-07-28 00:09:54,199 INFO [train.py:1114] (1/4) Epoch 7, batch 350, loss[loss=0.2449, simple_loss=0.3172, pruned_loss=0.08631, over 4924.00 frames. ], tot_loss[loss=0.229, simple_loss=0.3101, pruned_loss=0.07393, over 776396.15 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:09:57,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=82246.66666666667, ans=0.125 +2024-07-28 00:09:57,808 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.79 vs. limit=22.5 +2024-07-28 00:10:03,361 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.78 vs. limit=15.0 +2024-07-28 00:10:27,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=82300.0, ans=0.125 +2024-07-28 00:10:29,114 INFO [train.py:1114] (1/4) Epoch 7, batch 400, loss[loss=0.2098, simple_loss=0.299, pruned_loss=0.06032, over 4695.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3111, pruned_loss=0.07441, over 813674.19 frames. ], batch size: 13, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:10:33,742 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.839e+01 6.182e+01 6.903e+01 9.738e+01, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 00:10:35,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=82326.66666666667, ans=0.125 +2024-07-28 00:10:43,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=82326.66666666667, ans=0.025 +2024-07-28 00:10:45,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=82340.0, ans=0.125 +2024-07-28 00:10:45,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=82340.0, ans=0.025 +2024-07-28 00:10:51,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=82353.33333333333, ans=0.035 +2024-07-28 00:10:51,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=82353.33333333333, ans=0.125 +2024-07-28 00:11:04,409 INFO [train.py:1114] (1/4) Epoch 7, batch 450, loss[loss=0.2722, simple_loss=0.3544, pruned_loss=0.09503, over 4642.00 frames. ], tot_loss[loss=0.228, simple_loss=0.3094, pruned_loss=0.07324, over 838634.20 frames. ], batch size: 13, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:11:08,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=82380.0, ans=0.025 +2024-07-28 00:11:13,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=82393.33333333333, ans=0.125 +2024-07-28 00:11:18,110 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.29 vs. limit=10.0 +2024-07-28 00:11:18,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=82406.66666666667, ans=0.0 +2024-07-28 00:11:28,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=82420.0, ans=0.95 +2024-07-28 00:11:34,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=82433.33333333333, ans=0.125 +2024-07-28 00:11:39,245 INFO [train.py:1114] (1/4) Epoch 7, batch 500, loss[loss=0.2511, simple_loss=0.3425, pruned_loss=0.07987, over 4670.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.308, pruned_loss=0.07288, over 861083.57 frames. ], batch size: 15, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:11:39,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=82446.66666666667, ans=0.1 +2024-07-28 00:11:42,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82446.66666666667, ans=0.1 +2024-07-28 00:11:44,367 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.615e+01 5.805e+01 6.520e+01 7.491e+01 1.046e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 00:11:46,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=82460.0, ans=15.0 +2024-07-28 00:11:48,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=82460.0, ans=0.0 +2024-07-28 00:11:50,134 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=18.28 vs. limit=15.0 +2024-07-28 00:12:05,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.52 vs. limit=10.0 +2024-07-28 00:12:05,438 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.99 vs. limit=6.0 +2024-07-28 00:12:05,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=82500.0, ans=0.125 +2024-07-28 00:12:08,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=82500.0, ans=0.04949747468305833 +2024-07-28 00:12:12,267 INFO [train.py:1114] (1/4) Epoch 7, batch 550, loss[loss=0.2339, simple_loss=0.3154, pruned_loss=0.0762, over 4601.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3088, pruned_loss=0.07309, over 876723.39 frames. ], batch size: 17, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:12:22,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=82526.66666666667, ans=0.125 +2024-07-28 00:12:32,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=82540.0, ans=0.0 +2024-07-28 00:12:43,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=82566.66666666667, ans=0.0 +2024-07-28 00:12:44,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=82566.66666666667, ans=0.125 +2024-07-28 00:12:51,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=82580.0, ans=0.125 +2024-07-28 00:12:52,463 INFO [train.py:1114] (1/4) Epoch 7, batch 600, loss[loss=0.2233, simple_loss=0.291, pruned_loss=0.07783, over 4625.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3077, pruned_loss=0.07218, over 891312.36 frames. ], batch size: 16, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:12:58,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=82580.0, ans=0.125 +2024-07-28 00:13:01,196 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.565e+01 5.825e+01 6.471e+01 7.822e+01 1.372e+02, threshold=1.294e+02, percent-clipped=1.0 +2024-07-28 00:13:13,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=82606.66666666667, ans=0.125 +2024-07-28 00:13:18,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=82620.0, ans=0.125 +2024-07-28 00:13:19,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=82620.0, ans=0.0 +2024-07-28 00:13:20,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=82620.0, ans=0.0 +2024-07-28 00:13:28,956 INFO [train.py:1114] (1/4) Epoch 7, batch 650, loss[loss=0.2426, simple_loss=0.3294, pruned_loss=0.07786, over 4768.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.307, pruned_loss=0.07201, over 903309.51 frames. ], batch size: 13, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:13:33,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=82646.66666666667, ans=0.025 +2024-07-28 00:13:46,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=82673.33333333333, ans=0.125 +2024-07-28 00:14:02,612 INFO [train.py:1114] (1/4) Epoch 7, batch 700, loss[loss=0.2025, simple_loss=0.2892, pruned_loss=0.05794, over 4637.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3073, pruned_loss=0.07196, over 911440.28 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:14:07,881 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.492e+01 5.955e+01 6.627e+01 7.908e+01 1.237e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-28 00:14:13,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=82726.66666666667, ans=0.125 +2024-07-28 00:14:28,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=82766.66666666667, ans=0.0 +2024-07-28 00:14:36,833 INFO [train.py:1114] (1/4) Epoch 7, batch 750, loss[loss=0.2373, simple_loss=0.3178, pruned_loss=0.07842, over 4695.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3069, pruned_loss=0.07206, over 917993.73 frames. ], batch size: 13, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:14:42,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=82793.33333333333, ans=0.025 +2024-07-28 00:14:43,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=82793.33333333333, ans=0.2 +2024-07-28 00:15:02,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=82820.0, ans=0.025 +2024-07-28 00:15:08,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=82833.33333333333, ans=0.125 +2024-07-28 00:15:10,149 INFO [train.py:1114] (1/4) Epoch 7, batch 800, loss[loss=0.1858, simple_loss=0.2667, pruned_loss=0.0524, over 4854.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3069, pruned_loss=0.07191, over 922804.50 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:15:14,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=82846.66666666667, ans=0.0 +2024-07-28 00:15:17,228 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.015e+01 5.902e+01 6.465e+01 7.413e+01 1.020e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-28 00:15:17,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=82846.66666666667, ans=0.2 +2024-07-28 00:15:25,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=82873.33333333333, ans=0.125 +2024-07-28 00:15:25,415 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.04 vs. limit=12.0 +2024-07-28 00:15:26,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=82873.33333333333, ans=0.1 +2024-07-28 00:15:31,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=82886.66666666667, ans=0.125 +2024-07-28 00:15:35,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=82886.66666666667, ans=0.025 +2024-07-28 00:15:37,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=82900.0, ans=0.0 +2024-07-28 00:15:38,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=82900.0, ans=0.125 +2024-07-28 00:15:39,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=82900.0, ans=0.025 +2024-07-28 00:15:40,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82900.0, ans=0.1 +2024-07-28 00:15:46,837 INFO [train.py:1114] (1/4) Epoch 7, batch 850, loss[loss=0.207, simple_loss=0.293, pruned_loss=0.06046, over 4662.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3065, pruned_loss=0.0725, over 927398.01 frames. ], batch size: 14, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:15:48,046 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.90 vs. limit=12.0 +2024-07-28 00:15:56,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82926.66666666667, ans=0.1 +2024-07-28 00:16:01,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=82940.0, ans=0.2 +2024-07-28 00:16:03,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=82940.0, ans=0.025 +2024-07-28 00:16:07,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=82953.33333333333, ans=0.125 +2024-07-28 00:16:13,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=82966.66666666667, ans=0.125 +2024-07-28 00:16:14,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=82966.66666666667, ans=0.2 +2024-07-28 00:16:15,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=82966.66666666667, ans=0.0 +2024-07-28 00:16:22,225 INFO [train.py:1114] (1/4) Epoch 7, batch 900, loss[loss=0.1932, simple_loss=0.275, pruned_loss=0.05573, over 4849.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3067, pruned_loss=0.0727, over 928525.67 frames. ], batch size: 12, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:16:23,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=82980.0, ans=0.125 +2024-07-28 00:16:24,415 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:16:27,458 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.904e+01 6.297e+01 6.765e+01 1.145e+02, threshold=1.259e+02, percent-clipped=0.0 +2024-07-28 00:16:30,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=82993.33333333333, ans=0.1 +2024-07-28 00:16:33,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=82993.33333333333, ans=0.0 +2024-07-28 00:16:50,376 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.78 vs. limit=5.0 +2024-07-28 00:16:51,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=83033.33333333333, ans=0.2 +2024-07-28 00:16:57,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=83046.66666666667, ans=0.1 +2024-07-28 00:16:57,780 INFO [train.py:1114] (1/4) Epoch 7, batch 950, loss[loss=0.1849, simple_loss=0.2733, pruned_loss=0.04831, over 4772.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3067, pruned_loss=0.07243, over 930511.61 frames. ], batch size: 12, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:16:59,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=83046.66666666667, ans=0.1 +2024-07-28 00:17:09,090 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.24 vs. limit=22.5 +2024-07-28 00:17:10,307 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.48 vs. limit=15.0 +2024-07-28 00:17:11,001 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.58 vs. limit=22.5 +2024-07-28 00:17:12,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=83073.33333333333, ans=0.0 +2024-07-28 00:17:19,572 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:17:24,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=83100.0, ans=0.2 +2024-07-28 00:17:31,224 INFO [train.py:1114] (1/4) Epoch 7, batch 1000, loss[loss=0.2044, simple_loss=0.2858, pruned_loss=0.0615, over 4957.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.3076, pruned_loss=0.07288, over 929933.96 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:17:35,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=83113.33333333333, ans=0.125 +2024-07-28 00:17:36,723 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.037e+01 6.185e+01 7.251e+01 8.642e+01 1.358e+02, threshold=1.450e+02, percent-clipped=3.0 +2024-07-28 00:17:42,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=83126.66666666667, ans=0.125 +2024-07-28 00:17:50,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=83140.0, ans=0.0 +2024-07-28 00:18:02,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=83166.66666666667, ans=0.125 +2024-07-28 00:18:05,096 INFO [train.py:1114] (1/4) Epoch 7, batch 1050, loss[loss=0.2535, simple_loss=0.3333, pruned_loss=0.08687, over 4869.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3068, pruned_loss=0.07218, over 932212.93 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:18:06,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=83180.0, ans=0.125 +2024-07-28 00:18:21,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=83193.33333333333, ans=0.2 +2024-07-28 00:18:24,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=83193.33333333333, ans=0.125 +2024-07-28 00:18:29,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=83193.33333333333, ans=0.0 +2024-07-28 00:18:29,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=83193.33333333333, ans=0.025 +2024-07-28 00:18:36,398 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.87 vs. limit=6.0 +2024-07-28 00:18:38,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=83220.0, ans=0.035 +2024-07-28 00:18:41,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=83220.0, ans=0.0 +2024-07-28 00:18:42,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=83220.0, ans=0.0 +2024-07-28 00:18:47,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=83233.33333333333, ans=0.0 +2024-07-28 00:18:50,760 INFO [train.py:1114] (1/4) Epoch 7, batch 1100, loss[loss=0.2125, simple_loss=0.2921, pruned_loss=0.06647, over 4900.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3062, pruned_loss=0.07146, over 934849.82 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:18:53,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=83246.66666666667, ans=6.0 +2024-07-28 00:18:56,162 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.849e+01 5.958e+01 6.479e+01 7.755e+01 1.091e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 00:18:57,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=83260.0, ans=0.1 +2024-07-28 00:18:59,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=83260.0, ans=0.025 +2024-07-28 00:19:02,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=83260.0, ans=0.0 +2024-07-28 00:19:03,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=83260.0, ans=0.2 +2024-07-28 00:19:05,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=83273.33333333333, ans=0.1 +2024-07-28 00:19:06,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=83273.33333333333, ans=0.0 +2024-07-28 00:19:20,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=83286.66666666667, ans=0.125 +2024-07-28 00:19:22,375 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.04 vs. limit=15.0 +2024-07-28 00:19:28,119 INFO [train.py:1114] (1/4) Epoch 7, batch 1150, loss[loss=0.1991, simple_loss=0.298, pruned_loss=0.05004, over 4900.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.3057, pruned_loss=0.07093, over 934706.91 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:19:38,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=83326.66666666667, ans=0.04949747468305833 +2024-07-28 00:19:38,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=83326.66666666667, ans=0.125 +2024-07-28 00:19:56,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=83353.33333333333, ans=0.0 +2024-07-28 00:20:02,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=83366.66666666667, ans=0.125 +2024-07-28 00:20:04,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=83380.0, ans=0.0 +2024-07-28 00:20:05,239 INFO [train.py:1114] (1/4) Epoch 7, batch 1200, loss[loss=0.2295, simple_loss=0.317, pruned_loss=0.071, over 4867.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3064, pruned_loss=0.07144, over 934080.06 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:20:10,449 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.726e+01 5.660e+01 6.364e+01 7.390e+01 1.227e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 00:20:16,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=83393.33333333333, ans=0.125 +2024-07-28 00:20:25,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=83420.0, ans=0.0 +2024-07-28 00:20:34,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=83433.33333333333, ans=0.2 +2024-07-28 00:20:35,069 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.66 vs. limit=22.5 +2024-07-28 00:20:37,997 INFO [train.py:1114] (1/4) Epoch 7, batch 1250, loss[loss=0.2296, simple_loss=0.3084, pruned_loss=0.07543, over 4818.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3054, pruned_loss=0.07015, over 937866.39 frames. ], batch size: 15, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:21:21,941 INFO [train.py:1114] (1/4) Epoch 7, batch 1300, loss[loss=0.2463, simple_loss=0.3215, pruned_loss=0.08554, over 4701.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3055, pruned_loss=0.07049, over 939051.52 frames. ], batch size: 19, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:21:23,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=83513.33333333333, ans=0.125 +2024-07-28 00:21:26,945 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.618e+01 5.788e+01 6.480e+01 7.663e+01 1.256e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 00:21:32,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=83526.66666666667, ans=0.125 +2024-07-28 00:21:37,108 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.66 vs. limit=15.0 +2024-07-28 00:21:56,798 INFO [train.py:1114] (1/4) Epoch 7, batch 1350, loss[loss=0.1956, simple_loss=0.2882, pruned_loss=0.0515, over 4760.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3049, pruned_loss=0.07044, over 941088.29 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:22:02,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=83593.33333333333, ans=0.0 +2024-07-28 00:22:06,332 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.85 vs. limit=15.0 +2024-07-28 00:22:14,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=83606.66666666667, ans=0.0 +2024-07-28 00:22:23,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=83620.0, ans=0.125 +2024-07-28 00:22:27,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=83633.33333333333, ans=0.0 +2024-07-28 00:22:30,149 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.20 vs. limit=15.0 +2024-07-28 00:22:31,661 INFO [train.py:1114] (1/4) Epoch 7, batch 1400, loss[loss=0.2126, simple_loss=0.2901, pruned_loss=0.06753, over 4689.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3038, pruned_loss=0.07014, over 942817.33 frames. ], batch size: 11, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:22:31,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=83646.66666666667, ans=0.125 +2024-07-28 00:22:36,878 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.900e+01 5.949e+01 6.637e+01 7.853e+01 1.145e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-28 00:22:37,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=83660.0, ans=0.125 +2024-07-28 00:22:54,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=83686.66666666667, ans=0.125 +2024-07-28 00:23:03,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=83700.0, ans=0.125 +2024-07-28 00:23:06,297 INFO [train.py:1114] (1/4) Epoch 7, batch 1450, loss[loss=0.2401, simple_loss=0.3269, pruned_loss=0.07664, over 4674.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.305, pruned_loss=0.07089, over 942976.94 frames. ], batch size: 15, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:23:22,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=83740.0, ans=0.0 +2024-07-28 00:23:39,632 INFO [train.py:1114] (1/4) Epoch 7, batch 1500, loss[loss=0.2529, simple_loss=0.3366, pruned_loss=0.08457, over 4814.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3065, pruned_loss=0.07157, over 942404.27 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:23:44,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=83780.0, ans=0.125 +2024-07-28 00:23:45,189 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 5.882e+01 6.521e+01 7.412e+01 1.092e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 00:23:46,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=83793.33333333333, ans=0.1 +2024-07-28 00:23:50,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=83793.33333333333, ans=0.04949747468305833 +2024-07-28 00:24:15,242 INFO [train.py:1114] (1/4) Epoch 7, batch 1550, loss[loss=0.2541, simple_loss=0.3386, pruned_loss=0.0848, over 4901.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3068, pruned_loss=0.0722, over 938468.45 frames. ], batch size: 15, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:24:21,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=83860.0, ans=0.1 +2024-07-28 00:24:22,688 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.09 vs. limit=10.0 +2024-07-28 00:24:49,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=83873.33333333333, ans=0.07 +2024-07-28 00:24:52,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=83886.66666666667, ans=0.0 +2024-07-28 00:24:53,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=83886.66666666667, ans=0.0 +2024-07-28 00:25:05,035 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.05 vs. limit=5.0 +2024-07-28 00:25:09,011 INFO [train.py:1114] (1/4) Epoch 7, batch 1600, loss[loss=0.1861, simple_loss=0.2734, pruned_loss=0.04941, over 4879.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3071, pruned_loss=0.07226, over 937272.58 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:25:09,322 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.45 vs. limit=6.0 +2024-07-28 00:25:09,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=83913.33333333333, ans=0.125 +2024-07-28 00:25:12,906 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.91 vs. limit=12.0 +2024-07-28 00:25:17,691 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.865e+01 6.513e+01 7.777e+01 1.353e+02, threshold=1.303e+02, percent-clipped=1.0 +2024-07-28 00:25:21,407 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.20 vs. limit=15.0 +2024-07-28 00:25:25,037 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.75 vs. limit=22.5 +2024-07-28 00:25:29,417 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:25:30,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=83940.0, ans=0.125 +2024-07-28 00:25:30,817 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=5.072e-03 +2024-07-28 00:25:34,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=83953.33333333333, ans=0.5 +2024-07-28 00:25:45,977 INFO [train.py:1114] (1/4) Epoch 7, batch 1650, loss[loss=0.2112, simple_loss=0.3006, pruned_loss=0.06091, over 4665.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3062, pruned_loss=0.07196, over 937056.74 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:26:03,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=84006.66666666667, ans=0.125 +2024-07-28 00:26:03,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84006.66666666667, ans=0.1 +2024-07-28 00:26:05,705 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-28 00:26:13,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=84020.0, ans=10.0 +2024-07-28 00:26:17,321 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.78 vs. limit=6.0 +2024-07-28 00:26:21,407 INFO [train.py:1114] (1/4) Epoch 7, batch 1700, loss[loss=0.2173, simple_loss=0.284, pruned_loss=0.07529, over 4712.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3068, pruned_loss=0.07218, over 938661.71 frames. ], batch size: 11, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:26:23,247 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.55 vs. limit=15.0 +2024-07-28 00:26:24,535 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.88 vs. limit=15.0 +2024-07-28 00:26:26,575 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.19 vs. limit=15.0 +2024-07-28 00:26:26,711 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.280e+01 6.250e+01 6.932e+01 8.047e+01 1.262e+02, threshold=1.386e+02, percent-clipped=0.0 +2024-07-28 00:26:31,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=84060.0, ans=0.125 +2024-07-28 00:26:33,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=84060.0, ans=0.2 +2024-07-28 00:26:34,404 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.13 vs. limit=15.0 +2024-07-28 00:26:36,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=84073.33333333333, ans=0.07 +2024-07-28 00:26:44,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=84086.66666666667, ans=0.125 +2024-07-28 00:26:54,563 INFO [train.py:1114] (1/4) Epoch 7, batch 1750, loss[loss=0.1747, simple_loss=0.258, pruned_loss=0.04574, over 4804.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3058, pruned_loss=0.07109, over 939866.55 frames. ], batch size: 11, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:26:55,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=84113.33333333333, ans=0.125 +2024-07-28 00:27:05,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=84126.66666666667, ans=0.0 +2024-07-28 00:27:07,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=84126.66666666667, ans=0.0 +2024-07-28 00:27:21,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84153.33333333333, ans=0.1 +2024-07-28 00:27:35,782 INFO [train.py:1114] (1/4) Epoch 7, batch 1800, loss[loss=0.2166, simple_loss=0.2928, pruned_loss=0.07019, over 4637.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.3056, pruned_loss=0.07137, over 940578.19 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:27:39,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=84180.0, ans=0.0 +2024-07-28 00:27:41,154 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.062e+01 5.927e+01 6.951e+01 8.175e+01 1.232e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-28 00:27:45,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=84193.33333333333, ans=0.125 +2024-07-28 00:27:46,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=84193.33333333333, ans=0.0 +2024-07-28 00:28:02,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=84220.0, ans=15.0 +2024-07-28 00:28:11,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=84233.33333333333, ans=0.035 +2024-07-28 00:28:14,938 INFO [train.py:1114] (1/4) Epoch 7, batch 1850, loss[loss=0.2731, simple_loss=0.3497, pruned_loss=0.09823, over 4805.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.3055, pruned_loss=0.07106, over 940491.76 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:28:21,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=84260.0, ans=0.025 +2024-07-28 00:28:22,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=84260.0, ans=0.1 +2024-07-28 00:28:31,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=84273.33333333333, ans=0.07 +2024-07-28 00:28:43,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=84300.0, ans=0.05 +2024-07-28 00:28:43,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=84300.0, ans=0.0 +2024-07-28 00:28:45,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84300.0, ans=0.1 +2024-07-28 00:28:50,150 INFO [train.py:1114] (1/4) Epoch 7, batch 1900, loss[loss=0.2331, simple_loss=0.3138, pruned_loss=0.0762, over 4665.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.305, pruned_loss=0.07085, over 941672.58 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:28:55,322 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.933e+01 5.987e+01 6.515e+01 7.725e+01 1.148e+02, threshold=1.303e+02, percent-clipped=0.0 +2024-07-28 00:29:03,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=84340.0, ans=0.2 +2024-07-28 00:29:08,659 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:29:19,224 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.57 vs. limit=15.0 +2024-07-28 00:29:22,774 INFO [train.py:1114] (1/4) Epoch 7, batch 1950, loss[loss=0.217, simple_loss=0.2978, pruned_loss=0.06816, over 4895.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3067, pruned_loss=0.07169, over 943679.87 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:29:39,296 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.91 vs. limit=22.5 +2024-07-28 00:29:41,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=84406.66666666667, ans=0.125 +2024-07-28 00:29:43,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=84420.0, ans=0.0 +2024-07-28 00:29:43,141 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.48 vs. limit=22.5 +2024-07-28 00:29:49,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84433.33333333333, ans=0.1 +2024-07-28 00:29:56,352 INFO [train.py:1114] (1/4) Epoch 7, batch 2000, loss[loss=0.1848, simple_loss=0.2483, pruned_loss=0.06064, over 4811.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3068, pruned_loss=0.07163, over 940999.53 frames. ], batch size: 11, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:30:01,591 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 6.163e+01 6.683e+01 7.706e+01 1.195e+02, threshold=1.337e+02, percent-clipped=0.0 +2024-07-28 00:30:03,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=84460.0, ans=0.0 +2024-07-28 00:30:09,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=84473.33333333333, ans=0.0 +2024-07-28 00:30:10,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.out_whiten.whitening_limit, batch_count=84473.33333333333, ans=8.0 +2024-07-28 00:30:12,715 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.88 vs. limit=15.0 +2024-07-28 00:30:25,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=84486.66666666667, ans=0.2 +2024-07-28 00:30:34,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=84500.0, ans=0.125 +2024-07-28 00:30:36,703 INFO [train.py:1114] (1/4) Epoch 7, batch 2050, loss[loss=0.2215, simple_loss=0.294, pruned_loss=0.0745, over 4614.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3062, pruned_loss=0.07152, over 938706.00 frames. ], batch size: 11, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:30:36,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=84513.33333333333, ans=0.125 +2024-07-28 00:30:37,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=84513.33333333333, ans=0.125 +2024-07-28 00:30:57,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84553.33333333333, ans=0.1 +2024-07-28 00:30:59,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=84553.33333333333, ans=0.1 +2024-07-28 00:31:02,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=84553.33333333333, ans=0.2 +2024-07-28 00:31:07,159 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:31:08,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84566.66666666667, ans=0.1 +2024-07-28 00:31:11,090 INFO [train.py:1114] (1/4) Epoch 7, batch 2100, loss[loss=0.2259, simple_loss=0.3097, pruned_loss=0.07103, over 4757.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.305, pruned_loss=0.07097, over 940672.78 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:31:11,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=84580.0, ans=0.125 +2024-07-28 00:31:13,536 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.36 vs. limit=15.0 +2024-07-28 00:31:16,339 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.722e+01 5.891e+01 6.506e+01 7.465e+01 1.283e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 00:31:19,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=84593.33333333333, ans=0.0 +2024-07-28 00:31:29,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=84606.66666666667, ans=0.07 +2024-07-28 00:31:35,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=84620.0, ans=0.125 +2024-07-28 00:31:35,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=84620.0, ans=0.0 +2024-07-28 00:31:41,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=84633.33333333333, ans=0.1 +2024-07-28 00:31:41,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=84633.33333333333, ans=0.2 +2024-07-28 00:31:44,326 INFO [train.py:1114] (1/4) Epoch 7, batch 2150, loss[loss=0.2291, simple_loss=0.302, pruned_loss=0.07804, over 4894.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3042, pruned_loss=0.07037, over 943982.76 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:31:45,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=84646.66666666667, ans=0.2 +2024-07-28 00:31:53,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=84660.0, ans=0.125 +2024-07-28 00:31:55,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=84660.0, ans=0.2 +2024-07-28 00:31:57,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84673.33333333333, ans=0.1 +2024-07-28 00:31:58,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=84673.33333333333, ans=0.125 +2024-07-28 00:31:59,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=84673.33333333333, ans=0.125 +2024-07-28 00:32:06,526 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.85 vs. limit=15.0 +2024-07-28 00:32:08,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=84686.66666666667, ans=0.025 +2024-07-28 00:32:17,326 INFO [train.py:1114] (1/4) Epoch 7, batch 2200, loss[loss=0.2509, simple_loss=0.3309, pruned_loss=0.08549, over 4818.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3046, pruned_loss=0.07042, over 943326.67 frames. ], batch size: 14, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:32:18,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=84713.33333333333, ans=0.0 +2024-07-28 00:32:22,573 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.863e+01 5.835e+01 6.281e+01 7.163e+01 1.109e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 00:32:41,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=84753.33333333333, ans=0.125 +2024-07-28 00:32:42,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=84753.33333333333, ans=0.125 +2024-07-28 00:32:42,196 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:32:42,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=84753.33333333333, ans=0.125 +2024-07-28 00:32:44,521 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.76 vs. limit=22.5 +2024-07-28 00:32:47,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=84766.66666666667, ans=0.125 +2024-07-28 00:32:52,119 INFO [train.py:1114] (1/4) Epoch 7, batch 2250, loss[loss=0.2037, simple_loss=0.2909, pruned_loss=0.05821, over 4691.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.3033, pruned_loss=0.06929, over 941902.57 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:32:54,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=84780.0, ans=0.1 +2024-07-28 00:33:04,646 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.57 vs. limit=10.0 +2024-07-28 00:33:09,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=84806.66666666667, ans=0.2 +2024-07-28 00:33:14,416 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.89 vs. limit=15.0 +2024-07-28 00:33:14,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=84820.0, ans=0.125 +2024-07-28 00:33:27,432 INFO [train.py:1114] (1/4) Epoch 7, batch 2300, loss[loss=0.1803, simple_loss=0.2646, pruned_loss=0.04798, over 4941.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3036, pruned_loss=0.06991, over 939301.01 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:33:32,858 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.138e+01 5.907e+01 7.082e+01 8.177e+01 1.156e+02, threshold=1.416e+02, percent-clipped=0.0 +2024-07-28 00:33:45,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=84873.33333333333, ans=0.0 +2024-07-28 00:33:48,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84873.33333333333, ans=0.1 +2024-07-28 00:33:49,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=84886.66666666667, ans=0.0 +2024-07-28 00:33:52,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=84886.66666666667, ans=0.125 +2024-07-28 00:33:55,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=84886.66666666667, ans=0.125 +2024-07-28 00:33:55,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=84900.0, ans=0.0 +2024-07-28 00:34:00,299 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.90 vs. limit=22.5 +2024-07-28 00:34:03,230 INFO [train.py:1114] (1/4) Epoch 7, batch 2350, loss[loss=0.242, simple_loss=0.3411, pruned_loss=0.07144, over 4636.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.304, pruned_loss=0.0699, over 941135.94 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:34:15,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=84926.66666666667, ans=0.125 +2024-07-28 00:34:26,729 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.92 vs. limit=22.5 +2024-07-28 00:34:34,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=84966.66666666667, ans=0.07 +2024-07-28 00:34:37,999 INFO [train.py:1114] (1/4) Epoch 7, batch 2400, loss[loss=0.2219, simple_loss=0.3074, pruned_loss=0.06822, over 4634.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.304, pruned_loss=0.07034, over 941033.30 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:34:43,147 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 6.102e+01 6.788e+01 7.615e+01 1.111e+02, threshold=1.358e+02, percent-clipped=0.0 +2024-07-28 00:34:48,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=84993.33333333333, ans=0.0 +2024-07-28 00:34:49,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=84993.33333333333, ans=0.0 +2024-07-28 00:34:59,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=85020.0, ans=0.0 +2024-07-28 00:35:08,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=85033.33333333333, ans=0.125 +2024-07-28 00:35:11,392 INFO [train.py:1114] (1/4) Epoch 7, batch 2450, loss[loss=0.2209, simple_loss=0.3056, pruned_loss=0.06814, over 4695.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3052, pruned_loss=0.07075, over 936706.96 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:35:16,574 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.08 vs. limit=22.5 +2024-07-28 00:35:29,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85073.33333333333, ans=0.1 +2024-07-28 00:35:30,987 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.71 vs. limit=15.0 +2024-07-28 00:35:36,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=85086.66666666667, ans=0.125 +2024-07-28 00:35:44,968 INFO [train.py:1114] (1/4) Epoch 7, batch 2500, loss[loss=0.2711, simple_loss=0.3496, pruned_loss=0.09633, over 4807.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3049, pruned_loss=0.07102, over 938602.43 frames. ], batch size: 14, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:35:46,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=85113.33333333333, ans=0.1 +2024-07-28 00:35:50,129 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.036e+01 6.265e+01 6.846e+01 8.137e+01 1.168e+02, threshold=1.369e+02, percent-clipped=0.0 +2024-07-28 00:35:50,456 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.56 vs. limit=22.5 +2024-07-28 00:35:51,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=85126.66666666667, ans=10.0 +2024-07-28 00:35:52,509 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.55 vs. limit=15.0 +2024-07-28 00:35:54,935 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.05 vs. limit=15.0 +2024-07-28 00:35:57,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=85140.0, ans=0.125 +2024-07-28 00:36:01,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=85140.0, ans=0.2 +2024-07-28 00:36:17,871 INFO [train.py:1114] (1/4) Epoch 7, batch 2550, loss[loss=0.1833, simple_loss=0.2689, pruned_loss=0.04886, over 4805.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3055, pruned_loss=0.07118, over 938137.18 frames. ], batch size: 11, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:36:25,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=85193.33333333333, ans=0.0 +2024-07-28 00:36:26,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=85193.33333333333, ans=0.2 +2024-07-28 00:36:37,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=85220.0, ans=0.0 +2024-07-28 00:36:40,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=85220.0, ans=0.07 +2024-07-28 00:36:48,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.22 vs. limit=22.5 +2024-07-28 00:36:49,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=85233.33333333333, ans=0.2 +2024-07-28 00:36:49,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=85233.33333333333, ans=0.125 +2024-07-28 00:36:51,084 INFO [train.py:1114] (1/4) Epoch 7, batch 2600, loss[loss=0.2122, simple_loss=0.3084, pruned_loss=0.05797, over 4900.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.306, pruned_loss=0.07167, over 937161.55 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:36:56,532 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.315e+01 5.684e+01 6.063e+01 6.727e+01 1.050e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-28 00:37:04,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=85273.33333333333, ans=0.0 +2024-07-28 00:37:05,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=85273.33333333333, ans=0.125 +2024-07-28 00:37:10,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=85273.33333333333, ans=0.07 +2024-07-28 00:37:19,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=85300.0, ans=0.0 +2024-07-28 00:37:24,484 INFO [train.py:1114] (1/4) Epoch 7, batch 2650, loss[loss=0.2163, simple_loss=0.2917, pruned_loss=0.07047, over 4597.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3065, pruned_loss=0.07179, over 939217.36 frames. ], batch size: 16, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:37:24,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=85313.33333333333, ans=0.125 +2024-07-28 00:37:26,810 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.23 vs. limit=12.0 +2024-07-28 00:37:29,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=85313.33333333333, ans=0.0 +2024-07-28 00:37:29,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=85313.33333333333, ans=0.0 +2024-07-28 00:37:49,154 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.14 vs. limit=15.0 +2024-07-28 00:37:51,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=85353.33333333333, ans=0.0 +2024-07-28 00:37:59,894 INFO [train.py:1114] (1/4) Epoch 7, batch 2700, loss[loss=0.2608, simple_loss=0.3437, pruned_loss=0.08894, over 4749.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3061, pruned_loss=0.07161, over 939304.74 frames. ], batch size: 14, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:38:05,156 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.110e+01 5.828e+01 6.522e+01 7.194e+01 9.710e+01, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 00:38:19,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=85406.66666666667, ans=0.2 +2024-07-28 00:38:20,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=85406.66666666667, ans=0.125 +2024-07-28 00:38:32,624 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.98 vs. limit=10.0 +2024-07-28 00:38:35,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=85433.33333333333, ans=0.125 +2024-07-28 00:38:38,343 INFO [train.py:1114] (1/4) Epoch 7, batch 2750, loss[loss=0.2272, simple_loss=0.308, pruned_loss=0.07318, over 4714.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3049, pruned_loss=0.07104, over 939153.07 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:38:48,733 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.65 vs. limit=6.0 +2024-07-28 00:38:49,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=85460.0, ans=0.0 +2024-07-28 00:38:58,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=85486.66666666667, ans=0.125 +2024-07-28 00:38:58,398 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.17 vs. limit=12.0 +2024-07-28 00:39:02,430 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.22 vs. limit=22.5 +2024-07-28 00:39:09,267 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.12 vs. limit=5.0 +2024-07-28 00:39:13,496 INFO [train.py:1114] (1/4) Epoch 7, batch 2800, loss[loss=0.2978, simple_loss=0.3579, pruned_loss=0.1188, over 3192.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3055, pruned_loss=0.07177, over 936756.90 frames. ], batch size: 35, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:39:14,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=85513.33333333333, ans=0.125 +2024-07-28 00:39:18,793 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.845e+01 5.969e+01 6.581e+01 7.409e+01 1.159e+02, threshold=1.316e+02, percent-clipped=0.0 +2024-07-28 00:39:22,737 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.18 vs. limit=15.0 +2024-07-28 00:39:26,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=85540.0, ans=0.125 +2024-07-28 00:39:30,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85540.0, ans=0.1 +2024-07-28 00:39:49,475 INFO [train.py:1114] (1/4) Epoch 7, batch 2850, loss[loss=0.2266, simple_loss=0.3047, pruned_loss=0.0742, over 4963.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.306, pruned_loss=0.07225, over 935449.65 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:39:50,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=85580.0, ans=0.025 +2024-07-28 00:39:59,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=85593.33333333333, ans=0.125 +2024-07-28 00:40:14,202 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.60 vs. limit=15.0 +2024-07-28 00:40:23,172 INFO [train.py:1114] (1/4) Epoch 7, batch 2900, loss[loss=0.1752, simple_loss=0.2623, pruned_loss=0.04402, over 4827.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3071, pruned_loss=0.07225, over 939461.39 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:40:25,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.87 vs. limit=15.0 +2024-07-28 00:40:28,616 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 6.206e+01 7.013e+01 8.326e+01 1.461e+02, threshold=1.403e+02, percent-clipped=1.0 +2024-07-28 00:40:35,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=85660.0, ans=0.125 +2024-07-28 00:40:45,804 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.62 vs. limit=15.0 +2024-07-28 00:40:51,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=85686.66666666667, ans=0.025 +2024-07-28 00:40:51,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=85686.66666666667, ans=0.0 +2024-07-28 00:40:54,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85700.0, ans=0.1 +2024-07-28 00:40:56,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=85700.0, ans=0.125 +2024-07-28 00:41:00,423 INFO [train.py:1114] (1/4) Epoch 7, batch 2950, loss[loss=0.2207, simple_loss=0.3067, pruned_loss=0.06734, over 4709.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3055, pruned_loss=0.07187, over 938259.19 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:41:09,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85726.66666666667, ans=0.1 +2024-07-28 00:41:12,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=85726.66666666667, ans=0.025 +2024-07-28 00:41:15,647 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.26 vs. limit=22.5 +2024-07-28 00:41:19,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=85753.33333333333, ans=0.125 +2024-07-28 00:41:24,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=85753.33333333333, ans=0.1 +2024-07-28 00:41:24,077 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:41:26,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=85753.33333333333, ans=0.0 +2024-07-28 00:41:29,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=85766.66666666667, ans=0.0 +2024-07-28 00:41:30,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=85766.66666666667, ans=0.0 +2024-07-28 00:41:33,777 INFO [train.py:1114] (1/4) Epoch 7, batch 3000, loss[loss=0.201, simple_loss=0.2776, pruned_loss=0.0622, over 4752.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3051, pruned_loss=0.07101, over 938104.84 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:41:33,778 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 00:41:39,171 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.3043, 2.8681, 3.1211, 3.4904], device='cuda:1') +2024-07-28 00:41:46,474 INFO [train.py:1146] (1/4) Epoch 7, validation: loss=0.1857, simple_loss=0.2896, pruned_loss=0.04088, over 944034.00 frames. +2024-07-28 00:41:46,475 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 00:41:48,041 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:41:51,998 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 6.009e+01 6.936e+01 8.242e+01 1.252e+02, threshold=1.387e+02, percent-clipped=0.0 +2024-07-28 00:42:03,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=85806.66666666667, ans=0.125 +2024-07-28 00:42:20,755 INFO [train.py:1114] (1/4) Epoch 7, batch 3050, loss[loss=0.2065, simple_loss=0.3012, pruned_loss=0.05592, over 4630.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3062, pruned_loss=0.07125, over 937117.51 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:42:20,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=85846.66666666667, ans=0.125 +2024-07-28 00:42:33,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=85860.0, ans=0.2 +2024-07-28 00:42:41,487 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.51 vs. limit=6.0 +2024-07-28 00:42:51,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=85900.0, ans=0.125 +2024-07-28 00:42:54,036 INFO [train.py:1114] (1/4) Epoch 7, batch 3100, loss[loss=0.2408, simple_loss=0.3257, pruned_loss=0.07793, over 4639.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3055, pruned_loss=0.07121, over 937688.81 frames. ], batch size: 16, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:42:59,247 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.759e+01 6.343e+01 7.086e+01 1.226e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 00:43:11,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=85940.0, ans=0.2 +2024-07-28 00:43:15,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.18 vs. limit=15.0 +2024-07-28 00:43:25,704 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.84 vs. limit=12.0 +2024-07-28 00:43:31,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=85966.66666666667, ans=0.125 +2024-07-28 00:43:32,972 INFO [train.py:1114] (1/4) Epoch 7, batch 3150, loss[loss=0.2643, simple_loss=0.3478, pruned_loss=0.09038, over 4606.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.305, pruned_loss=0.07107, over 938212.09 frames. ], batch size: 17, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:43:38,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=85980.0, ans=0.0 +2024-07-28 00:43:44,740 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.74 vs. limit=15.0 +2024-07-28 00:43:46,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=86006.66666666667, ans=0.0 +2024-07-28 00:43:51,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=86006.66666666667, ans=0.0 +2024-07-28 00:43:57,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86020.0, ans=0.1 +2024-07-28 00:44:00,334 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:44:07,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=86046.66666666667, ans=0.0 +2024-07-28 00:44:08,145 INFO [train.py:1114] (1/4) Epoch 7, batch 3200, loss[loss=0.2531, simple_loss=0.3261, pruned_loss=0.09006, over 4824.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3053, pruned_loss=0.07125, over 939931.34 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:44:13,316 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+01 6.085e+01 7.068e+01 8.225e+01 1.298e+02, threshold=1.414e+02, percent-clipped=1.0 +2024-07-28 00:44:27,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=86073.33333333333, ans=0.0 +2024-07-28 00:44:50,511 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.03 vs. limit=10.0 +2024-07-28 00:44:50,710 INFO [train.py:1114] (1/4) Epoch 7, batch 3250, loss[loss=0.2454, simple_loss=0.3233, pruned_loss=0.0838, over 4928.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3058, pruned_loss=0.07119, over 940933.43 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:44:54,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86113.33333333333, ans=0.1 +2024-07-28 00:44:59,631 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.83 vs. limit=12.0 +2024-07-28 00:45:00,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=86126.66666666667, ans=0.025 +2024-07-28 00:45:15,920 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.63 vs. limit=15.0 +2024-07-28 00:45:15,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten.whitening_limit, batch_count=86153.33333333333, ans=15.0 +2024-07-28 00:45:21,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=86166.66666666667, ans=0.125 +2024-07-28 00:45:23,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=86166.66666666667, ans=0.0 +2024-07-28 00:45:24,550 INFO [train.py:1114] (1/4) Epoch 7, batch 3300, loss[loss=0.2507, simple_loss=0.3066, pruned_loss=0.09734, over 4662.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3042, pruned_loss=0.07083, over 941043.36 frames. ], batch size: 19, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:45:27,652 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.54 vs. limit=15.0 +2024-07-28 00:45:29,785 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.84 vs. limit=15.0 +2024-07-28 00:45:30,687 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 5.736e+01 6.420e+01 6.992e+01 1.033e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 00:45:38,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=86206.66666666667, ans=0.125 +2024-07-28 00:45:57,982 INFO [train.py:1114] (1/4) Epoch 7, batch 3350, loss[loss=0.2426, simple_loss=0.3225, pruned_loss=0.08136, over 4587.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3061, pruned_loss=0.07205, over 938690.12 frames. ], batch size: 17, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:46:01,015 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.37 vs. limit=22.5 +2024-07-28 00:46:11,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=86273.33333333333, ans=0.125 +2024-07-28 00:46:24,919 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.45 vs. limit=15.0 +2024-07-28 00:46:28,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=86300.0, ans=0.2 +2024-07-28 00:46:31,258 INFO [train.py:1114] (1/4) Epoch 7, batch 3400, loss[loss=0.1715, simple_loss=0.2549, pruned_loss=0.0441, over 4812.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3059, pruned_loss=0.07217, over 937298.41 frames. ], batch size: 11, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:46:33,497 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.27 vs. limit=15.0 +2024-07-28 00:46:33,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=86313.33333333333, ans=0.125 +2024-07-28 00:46:37,178 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.995e+01 5.874e+01 6.654e+01 7.588e+01 1.124e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-28 00:46:38,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=86326.66666666667, ans=0.125 +2024-07-28 00:46:50,537 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.37 vs. limit=12.0 +2024-07-28 00:47:04,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=86380.0, ans=0.125 +2024-07-28 00:47:04,935 INFO [train.py:1114] (1/4) Epoch 7, batch 3450, loss[loss=0.2652, simple_loss=0.3523, pruned_loss=0.089, over 4708.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3069, pruned_loss=0.0722, over 937230.58 frames. ], batch size: 19, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:47:09,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=86380.0, ans=0.0 +2024-07-28 00:47:22,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86406.66666666667, ans=0.1 +2024-07-28 00:47:26,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.74 vs. limit=15.0 +2024-07-28 00:47:28,779 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.39 vs. limit=15.0 +2024-07-28 00:47:31,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=86433.33333333333, ans=0.125 +2024-07-28 00:47:35,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=86433.33333333333, ans=0.125 +2024-07-28 00:47:38,423 INFO [train.py:1114] (1/4) Epoch 7, batch 3500, loss[loss=0.2129, simple_loss=0.2907, pruned_loss=0.06751, over 4937.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3068, pruned_loss=0.07181, over 937614.70 frames. ], batch size: 12, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:47:40,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=86446.66666666667, ans=0.125 +2024-07-28 00:47:42,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=86446.66666666667, ans=0.125 +2024-07-28 00:47:44,466 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.672e+01 5.841e+01 6.535e+01 7.195e+01 1.031e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 00:47:47,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=86460.0, ans=0.0 +2024-07-28 00:47:47,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=86460.0, ans=0.07 +2024-07-28 00:47:54,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=86473.33333333333, ans=0.0 +2024-07-28 00:47:54,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=86473.33333333333, ans=0.125 +2024-07-28 00:48:06,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=86486.66666666667, ans=0.125 +2024-07-28 00:48:07,749 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.67 vs. limit=15.0 +2024-07-28 00:48:16,697 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.17 vs. limit=22.5 +2024-07-28 00:48:16,910 INFO [train.py:1114] (1/4) Epoch 7, batch 3550, loss[loss=0.2064, simple_loss=0.3067, pruned_loss=0.05306, over 4672.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3061, pruned_loss=0.07164, over 938356.29 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:48:30,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=86540.0, ans=0.125 +2024-07-28 00:48:32,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=86540.0, ans=0.125 +2024-07-28 00:48:34,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=86540.0, ans=0.0 +2024-07-28 00:48:36,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=86553.33333333333, ans=0.125 +2024-07-28 00:48:49,696 INFO [train.py:1114] (1/4) Epoch 7, batch 3600, loss[loss=0.1901, simple_loss=0.2738, pruned_loss=0.05324, over 4965.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3053, pruned_loss=0.07094, over 940255.12 frames. ], batch size: 13, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:48:52,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=86580.0, ans=0.125 +2024-07-28 00:48:54,137 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.18 vs. limit=15.0 +2024-07-28 00:48:55,676 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.014e+01 6.005e+01 6.689e+01 7.700e+01 1.084e+02, threshold=1.338e+02, percent-clipped=0.0 +2024-07-28 00:49:05,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=86606.66666666667, ans=0.025 +2024-07-28 00:49:09,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=86606.66666666667, ans=0.2 +2024-07-28 00:49:15,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=86620.0, ans=0.125 +2024-07-28 00:49:26,861 INFO [train.py:1114] (1/4) Epoch 7, batch 3650, loss[loss=0.2319, simple_loss=0.3149, pruned_loss=0.07449, over 4907.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3039, pruned_loss=0.07009, over 940670.64 frames. ], batch size: 15, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:49:34,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=86660.0, ans=0.1 +2024-07-28 00:49:57,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=86700.0, ans=0.0 +2024-07-28 00:49:58,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=86700.0, ans=0.0 +2024-07-28 00:50:01,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=86700.0, ans=0.0 +2024-07-28 00:50:02,200 INFO [train.py:1114] (1/4) Epoch 7, batch 3700, loss[loss=0.226, simple_loss=0.3008, pruned_loss=0.07557, over 4926.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.3041, pruned_loss=0.07029, over 941545.22 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:50:05,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=86713.33333333333, ans=0.125 +2024-07-28 00:50:07,940 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.091e+01 5.984e+01 6.849e+01 8.141e+01 1.285e+02, threshold=1.370e+02, percent-clipped=0.0 +2024-07-28 00:50:19,326 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.90 vs. limit=12.0 +2024-07-28 00:50:36,833 INFO [train.py:1114] (1/4) Epoch 7, batch 3750, loss[loss=0.1994, simple_loss=0.2728, pruned_loss=0.06299, over 4810.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3034, pruned_loss=0.06988, over 943308.50 frames. ], batch size: 11, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:50:44,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=86793.33333333333, ans=0.125 +2024-07-28 00:50:48,109 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.97 vs. limit=22.5 +2024-07-28 00:51:09,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=86846.66666666667, ans=0.125 +2024-07-28 00:51:10,294 INFO [train.py:1114] (1/4) Epoch 7, batch 3800, loss[loss=0.2426, simple_loss=0.3159, pruned_loss=0.0846, over 4812.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3026, pruned_loss=0.06993, over 941595.49 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:51:16,203 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.845e+01 5.938e+01 6.490e+01 7.260e+01 1.083e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-28 00:51:31,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=86886.66666666667, ans=0.125 +2024-07-28 00:51:43,432 INFO [train.py:1114] (1/4) Epoch 7, batch 3850, loss[loss=0.2142, simple_loss=0.3006, pruned_loss=0.06392, over 4613.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.3016, pruned_loss=0.06903, over 942506.08 frames. ], batch size: 16, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:51:43,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=86913.33333333333, ans=0.2 +2024-07-28 00:51:55,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=86926.66666666667, ans=0.025 +2024-07-28 00:51:56,748 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.59 vs. limit=22.5 +2024-07-28 00:52:07,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=86953.33333333333, ans=0.0 +2024-07-28 00:52:07,797 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=5.102e-03 +2024-07-28 00:52:13,580 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.88 vs. limit=22.5 +2024-07-28 00:52:17,054 INFO [train.py:1114] (1/4) Epoch 7, batch 3900, loss[loss=0.2266, simple_loss=0.317, pruned_loss=0.06808, over 4802.00 frames. ], tot_loss[loss=0.22, simple_loss=0.3022, pruned_loss=0.06892, over 943051.14 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:52:21,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=86980.0, ans=0.2 +2024-07-28 00:52:22,767 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.069e+01 5.781e+01 6.376e+01 7.079e+01 1.169e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 00:52:31,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=87006.66666666667, ans=0.0 +2024-07-28 00:52:32,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87006.66666666667, ans=0.1 +2024-07-28 00:52:35,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=87006.66666666667, ans=0.09899494936611666 +2024-07-28 00:52:38,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=87020.0, ans=0.0 +2024-07-28 00:52:40,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=87020.0, ans=0.1 +2024-07-28 00:52:49,964 INFO [train.py:1114] (1/4) Epoch 7, batch 3950, loss[loss=0.263, simple_loss=0.3425, pruned_loss=0.09181, over 4826.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3038, pruned_loss=0.06985, over 944788.30 frames. ], batch size: 16, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:52:53,101 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.38 vs. limit=15.0 +2024-07-28 00:52:55,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=87046.66666666667, ans=0.0 +2024-07-28 00:53:00,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=87060.0, ans=15.0 +2024-07-28 00:53:10,654 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:53:23,286 INFO [train.py:1114] (1/4) Epoch 7, batch 4000, loss[loss=0.1845, simple_loss=0.2724, pruned_loss=0.04835, over 4769.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3045, pruned_loss=0.07065, over 941055.00 frames. ], batch size: 12, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:53:25,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=87113.33333333333, ans=0.0 +2024-07-28 00:53:27,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=87113.33333333333, ans=0.0 +2024-07-28 00:53:28,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=87113.33333333333, ans=0.0 +2024-07-28 00:53:29,052 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.51 vs. limit=15.0 +2024-07-28 00:53:29,183 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.994e+01 5.981e+01 6.594e+01 7.315e+01 1.099e+02, threshold=1.319e+02, percent-clipped=0.0 +2024-07-28 00:53:34,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=87126.66666666667, ans=0.125 +2024-07-28 00:53:36,052 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.73 vs. limit=15.0 +2024-07-28 00:53:41,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=87140.0, ans=0.025 +2024-07-28 00:53:42,284 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.87 vs. limit=15.0 +2024-07-28 00:53:46,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87153.33333333333, ans=0.1 +2024-07-28 00:53:51,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=87166.66666666667, ans=0.125 +2024-07-28 00:53:52,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=87166.66666666667, ans=0.1 +2024-07-28 00:53:55,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=87166.66666666667, ans=0.04949747468305833 +2024-07-28 00:53:57,315 INFO [train.py:1114] (1/4) Epoch 7, batch 4050, loss[loss=0.3746, simple_loss=0.3995, pruned_loss=0.1748, over 3362.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3032, pruned_loss=0.07028, over 939647.41 frames. ], batch size: 35, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:54:02,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=87180.0, ans=0.025 +2024-07-28 00:54:12,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=87206.66666666667, ans=0.125 +2024-07-28 00:54:21,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=87220.0, ans=0.2 +2024-07-28 00:54:23,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=87220.0, ans=0.125 +2024-07-28 00:54:24,765 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.26 vs. limit=15.0 +2024-07-28 00:54:30,098 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.68 vs. limit=12.0 +2024-07-28 00:54:32,973 INFO [train.py:1114] (1/4) Epoch 7, batch 4100, loss[loss=0.2101, simple_loss=0.3095, pruned_loss=0.05534, over 4911.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3033, pruned_loss=0.07008, over 938693.30 frames. ], batch size: 15, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:54:36,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=87246.66666666667, ans=0.05 +2024-07-28 00:54:39,027 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 5.802e+01 6.541e+01 7.841e+01 1.191e+02, threshold=1.308e+02, percent-clipped=0.0 +2024-07-28 00:54:50,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=87273.33333333333, ans=0.2 +2024-07-28 00:54:52,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=87273.33333333333, ans=0.125 +2024-07-28 00:55:08,134 INFO [train.py:1114] (1/4) Epoch 7, batch 4150, loss[loss=0.2089, simple_loss=0.2901, pruned_loss=0.06382, over 4824.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3028, pruned_loss=0.07013, over 938189.78 frames. ], batch size: 13, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:55:16,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=87326.66666666667, ans=0.025 +2024-07-28 00:55:18,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=87326.66666666667, ans=0.1 +2024-07-28 00:55:28,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87340.0, ans=0.1 +2024-07-28 00:55:32,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=87353.33333333333, ans=0.125 +2024-07-28 00:55:34,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=87353.33333333333, ans=0.2 +2024-07-28 00:55:35,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=87353.33333333333, ans=0.09899494936611666 +2024-07-28 00:55:36,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=87366.66666666667, ans=0.2 +2024-07-28 00:55:41,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=87366.66666666667, ans=0.1 +2024-07-28 00:55:44,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=87380.0, ans=0.0 +2024-07-28 00:55:44,560 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.57 vs. limit=22.5 +2024-07-28 00:55:44,763 INFO [train.py:1114] (1/4) Epoch 7, batch 4200, loss[loss=0.238, simple_loss=0.321, pruned_loss=0.07747, over 4915.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.303, pruned_loss=0.06972, over 939432.44 frames. ], batch size: 15, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:55:45,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=87380.0, ans=0.125 +2024-07-28 00:55:50,404 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 5.692e+01 6.166e+01 6.641e+01 1.038e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 00:56:09,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=87420.0, ans=0.125 +2024-07-28 00:56:11,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=87433.33333333333, ans=0.025 +2024-07-28 00:56:15,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=87433.33333333333, ans=0.0 +2024-07-28 00:56:17,656 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.77 vs. limit=15.0 +2024-07-28 00:56:17,895 INFO [train.py:1114] (1/4) Epoch 7, batch 4250, loss[loss=0.1956, simple_loss=0.2889, pruned_loss=0.05113, over 4638.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3033, pruned_loss=0.06993, over 940045.14 frames. ], batch size: 12, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:56:23,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87460.0, ans=0.1 +2024-07-28 00:56:23,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=87460.0, ans=0.2 +2024-07-28 00:56:24,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=87460.0, ans=0.2 +2024-07-28 00:56:39,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=87486.66666666667, ans=0.0 +2024-07-28 00:56:41,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=87486.66666666667, ans=0.0 +2024-07-28 00:56:45,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=87500.0, ans=0.2 +2024-07-28 00:56:46,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=87500.0, ans=0.1 +2024-07-28 00:56:51,124 INFO [train.py:1114] (1/4) Epoch 7, batch 4300, loss[loss=0.19, simple_loss=0.2927, pruned_loss=0.0437, over 4759.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3041, pruned_loss=0.0701, over 939814.56 frames. ], batch size: 13, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:56:52,286 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.92 vs. limit=10.0 +2024-07-28 00:56:53,008 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.03 vs. limit=15.0 +2024-07-28 00:56:57,163 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.961e+01 6.079e+01 6.780e+01 8.042e+01 1.237e+02, threshold=1.356e+02, percent-clipped=1.0 +2024-07-28 00:56:58,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=87526.66666666667, ans=0.0 +2024-07-28 00:57:00,281 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.82 vs. limit=22.5 +2024-07-28 00:57:08,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=87540.0, ans=10.0 +2024-07-28 00:57:24,600 INFO [train.py:1114] (1/4) Epoch 7, batch 4350, loss[loss=0.1854, simple_loss=0.2669, pruned_loss=0.05198, over 4758.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3039, pruned_loss=0.06971, over 940695.14 frames. ], batch size: 13, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:57:26,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=87580.0, ans=0.125 +2024-07-28 00:57:48,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=87620.0, ans=0.09899494936611666 +2024-07-28 00:57:54,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=87633.33333333333, ans=0.09899494936611666 +2024-07-28 00:57:58,009 INFO [train.py:1114] (1/4) Epoch 7, batch 4400, loss[loss=0.1978, simple_loss=0.2906, pruned_loss=0.05253, over 4802.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3036, pruned_loss=0.06943, over 940893.75 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:58:04,029 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.884e+01 5.991e+01 6.337e+01 7.130e+01 1.070e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 00:58:09,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=87660.0, ans=0.125 +2024-07-28 00:58:10,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=87673.33333333333, ans=0.0 +2024-07-28 00:58:28,778 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.58 vs. limit=15.0 +2024-07-28 00:58:31,605 INFO [train.py:1114] (1/4) Epoch 7, batch 4450, loss[loss=0.1715, simple_loss=0.2643, pruned_loss=0.03935, over 4938.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3036, pruned_loss=0.06989, over 938766.11 frames. ], batch size: 12, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:58:33,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff2.min_abs, batch_count=87713.33333333333, ans=0.1 +2024-07-28 00:58:33,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=87713.33333333333, ans=0.125 +2024-07-28 00:58:36,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=87713.33333333333, ans=0.125 +2024-07-28 00:58:43,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=87726.66666666667, ans=0.125 +2024-07-28 00:58:43,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=87726.66666666667, ans=0.125 +2024-07-28 00:58:48,917 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.82 vs. limit=10.0 +2024-07-28 00:58:51,521 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.85 vs. limit=15.0 +2024-07-28 00:58:53,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=87753.33333333333, ans=0.125 +2024-07-28 00:58:54,108 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.11 vs. limit=22.5 +2024-07-28 00:58:56,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=87753.33333333333, ans=0.0 +2024-07-28 00:59:05,050 INFO [train.py:1114] (1/4) Epoch 7, batch 4500, loss[loss=0.225, simple_loss=0.3054, pruned_loss=0.07233, over 4737.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3039, pruned_loss=0.06967, over 937925.67 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 00:59:10,923 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.444e+01 5.694e+01 6.393e+01 7.700e+01 1.282e+02, threshold=1.279e+02, percent-clipped=1.0 +2024-07-28 00:59:16,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=87793.33333333333, ans=0.0 +2024-07-28 00:59:22,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=87806.66666666667, ans=0.125 +2024-07-28 00:59:25,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=87820.0, ans=0.125 +2024-07-28 00:59:26,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=87820.0, ans=0.125 +2024-07-28 00:59:34,509 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.07 vs. limit=22.5 +2024-07-28 00:59:35,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=87833.33333333333, ans=0.95 +2024-07-28 00:59:36,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=87833.33333333333, ans=0.0 +2024-07-28 00:59:38,263 INFO [train.py:1114] (1/4) Epoch 7, batch 4550, loss[loss=0.2399, simple_loss=0.3135, pruned_loss=0.08319, over 4897.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.3037, pruned_loss=0.06954, over 939903.32 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 00:59:42,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=87846.66666666667, ans=0.0 +2024-07-28 01:00:06,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=87900.0, ans=0.125 +2024-07-28 01:00:15,711 INFO [train.py:1114] (1/4) Epoch 7, batch 4600, loss[loss=0.2435, simple_loss=0.3259, pruned_loss=0.08054, over 4452.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.3026, pruned_loss=0.06915, over 938319.01 frames. ], batch size: 21, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:00:17,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=87913.33333333333, ans=0.125 +2024-07-28 01:00:19,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=87913.33333333333, ans=0.0 +2024-07-28 01:00:21,779 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.972e+01 5.990e+01 6.916e+01 8.662e+01 1.306e+02, threshold=1.383e+02, percent-clipped=1.0 +2024-07-28 01:00:25,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=87926.66666666667, ans=0.2 +2024-07-28 01:00:33,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=87940.0, ans=0.125 +2024-07-28 01:00:39,915 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:00:47,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=87966.66666666667, ans=0.125 +2024-07-28 01:00:48,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=87966.66666666667, ans=0.0 +2024-07-28 01:00:50,862 INFO [train.py:1114] (1/4) Epoch 7, batch 4650, loss[loss=0.223, simple_loss=0.3078, pruned_loss=0.06906, over 4841.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3046, pruned_loss=0.06962, over 940233.48 frames. ], batch size: 16, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:00:55,438 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=15.0 +2024-07-28 01:01:03,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=87993.33333333333, ans=0.125 +2024-07-28 01:01:07,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=88006.66666666667, ans=0.2 +2024-07-28 01:01:08,802 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.07 vs. limit=15.0 +2024-07-28 01:01:21,502 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.92 vs. limit=15.0 +2024-07-28 01:01:21,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=88033.33333333333, ans=0.0 +2024-07-28 01:01:22,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=88033.33333333333, ans=0.0 +2024-07-28 01:01:23,943 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:01:26,359 INFO [train.py:1114] (1/4) Epoch 7, batch 4700, loss[loss=0.1763, simple_loss=0.2567, pruned_loss=0.04795, over 4696.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3035, pruned_loss=0.06935, over 937227.77 frames. ], batch size: 11, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:01:32,304 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.754e+01 5.909e+01 6.693e+01 7.629e+01 1.851e+02, threshold=1.339e+02, percent-clipped=2.0 +2024-07-28 01:01:34,805 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.57 vs. limit=22.5 +2024-07-28 01:01:35,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=88060.0, ans=0.125 +2024-07-28 01:01:35,952 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=21.85 vs. limit=15.0 +2024-07-28 01:01:37,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=88060.0, ans=0.1 +2024-07-28 01:01:59,572 INFO [train.py:1114] (1/4) Epoch 7, batch 4750, loss[loss=0.2513, simple_loss=0.3274, pruned_loss=0.08759, over 4469.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3048, pruned_loss=0.07041, over 935024.42 frames. ], batch size: 21, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:02:15,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.81 vs. limit=15.0 +2024-07-28 01:02:21,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=88153.33333333333, ans=0.035 +2024-07-28 01:02:28,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=88166.66666666667, ans=0.1 +2024-07-28 01:02:30,584 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.69 vs. limit=15.0 +2024-07-28 01:02:31,282 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.61 vs. limit=12.0 +2024-07-28 01:02:33,426 INFO [train.py:1114] (1/4) Epoch 7, batch 4800, loss[loss=0.2695, simple_loss=0.3443, pruned_loss=0.09738, over 4694.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3046, pruned_loss=0.0709, over 931910.76 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:02:39,288 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.695e+01 5.971e+01 6.574e+01 7.583e+01 1.047e+02, threshold=1.315e+02, percent-clipped=0.0 +2024-07-28 01:02:41,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=88193.33333333333, ans=0.0 +2024-07-28 01:02:49,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=88206.66666666667, ans=0.125 +2024-07-28 01:02:53,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=88220.0, ans=0.07 +2024-07-28 01:03:02,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=88233.33333333333, ans=0.025 +2024-07-28 01:03:04,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=88233.33333333333, ans=0.07 +2024-07-28 01:03:06,429 INFO [train.py:1114] (1/4) Epoch 7, batch 4850, loss[loss=0.2294, simple_loss=0.324, pruned_loss=0.06743, over 4738.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3047, pruned_loss=0.07109, over 931399.12 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:03:09,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=88246.66666666667, ans=0.0 +2024-07-28 01:03:22,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=88273.33333333333, ans=0.07 +2024-07-28 01:03:39,969 INFO [train.py:1114] (1/4) Epoch 7, batch 4900, loss[loss=0.243, simple_loss=0.3312, pruned_loss=0.07745, over 4763.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.304, pruned_loss=0.07047, over 933381.33 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:03:41,661 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:03:43,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=88313.33333333333, ans=0.0 +2024-07-28 01:03:46,982 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.185e+01 5.896e+01 6.545e+01 7.673e+01 1.105e+02, threshold=1.309e+02, percent-clipped=0.0 +2024-07-28 01:03:47,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=88326.66666666667, ans=0.07 +2024-07-28 01:03:50,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=88326.66666666667, ans=0.1 +2024-07-28 01:03:55,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=88340.0, ans=0.025 +2024-07-28 01:04:13,955 INFO [train.py:1114] (1/4) Epoch 7, batch 4950, loss[loss=0.2797, simple_loss=0.346, pruned_loss=0.1067, over 3532.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3059, pruned_loss=0.07161, over 930647.58 frames. ], batch size: 35, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:04:20,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=88393.33333333333, ans=0.125 +2024-07-28 01:04:42,706 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.79 vs. limit=22.5 +2024-07-28 01:04:44,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=88433.33333333333, ans=0.125 +2024-07-28 01:04:45,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=88433.33333333333, ans=0.0 +2024-07-28 01:04:48,683 INFO [train.py:1114] (1/4) Epoch 7, batch 5000, loss[loss=0.2419, simple_loss=0.3208, pruned_loss=0.08149, over 4653.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3054, pruned_loss=0.07091, over 934644.35 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:04:49,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=88446.66666666667, ans=0.2 +2024-07-28 01:04:52,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=88446.66666666667, ans=0.125 +2024-07-28 01:04:55,130 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.044e+01 6.033e+01 7.025e+01 8.348e+01 1.303e+02, threshold=1.405e+02, percent-clipped=0.0 +2024-07-28 01:05:12,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=88486.66666666667, ans=0.0 +2024-07-28 01:05:15,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=88500.0, ans=0.015 +2024-07-28 01:05:17,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=88500.0, ans=0.05 +2024-07-28 01:05:21,739 INFO [train.py:1114] (1/4) Epoch 7, batch 5050, loss[loss=0.164, simple_loss=0.255, pruned_loss=0.03653, over 4867.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.3041, pruned_loss=0.07018, over 937442.60 frames. ], batch size: 12, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:05:31,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=88526.66666666667, ans=0.125 +2024-07-28 01:05:49,309 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.63 vs. limit=6.0 +2024-07-28 01:05:54,032 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.95 vs. limit=22.5 +2024-07-28 01:05:58,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=88566.66666666667, ans=0.125 +2024-07-28 01:05:59,502 INFO [train.py:1114] (1/4) Epoch 7, batch 5100, loss[loss=0.2044, simple_loss=0.2908, pruned_loss=0.05898, over 4775.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3053, pruned_loss=0.0714, over 935205.58 frames. ], batch size: 12, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:06:00,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=88580.0, ans=0.125 +2024-07-28 01:06:03,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=88580.0, ans=0.2 +2024-07-28 01:06:03,369 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.57 vs. limit=12.0 +2024-07-28 01:06:06,158 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.527e+01 5.884e+01 6.519e+01 7.454e+01 1.176e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 01:06:30,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=88633.33333333333, ans=0.0 +2024-07-28 01:06:37,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=88646.66666666667, ans=0.0 +2024-07-28 01:06:37,388 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:06:37,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=88646.66666666667, ans=0.2 +2024-07-28 01:06:37,929 INFO [train.py:1114] (1/4) Epoch 7, batch 5150, loss[loss=0.2406, simple_loss=0.3078, pruned_loss=0.08672, over 4841.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3064, pruned_loss=0.07184, over 936204.51 frames. ], batch size: 16, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:06:39,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=88646.66666666667, ans=0.0 +2024-07-28 01:06:56,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=88673.33333333333, ans=0.0 +2024-07-28 01:07:02,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=88686.66666666667, ans=0.1 +2024-07-28 01:07:02,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=88686.66666666667, ans=0.2 +2024-07-28 01:07:06,407 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.92 vs. limit=22.5 +2024-07-28 01:07:07,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=88700.0, ans=0.1 +2024-07-28 01:07:11,377 INFO [train.py:1114] (1/4) Epoch 7, batch 5200, loss[loss=0.226, simple_loss=0.3227, pruned_loss=0.06468, over 4671.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.3062, pruned_loss=0.07113, over 936271.72 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:07:13,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=88713.33333333333, ans=0.2 +2024-07-28 01:07:18,340 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 6.071e+01 6.603e+01 7.061e+01 1.007e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-28 01:07:18,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=88726.66666666667, ans=0.0 +2024-07-28 01:07:18,865 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.67 vs. limit=22.5 +2024-07-28 01:07:22,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=88726.66666666667, ans=0.0 +2024-07-28 01:07:29,683 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.72 vs. limit=6.0 +2024-07-28 01:07:44,101 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.62 vs. limit=15.0 +2024-07-28 01:07:44,900 INFO [train.py:1114] (1/4) Epoch 7, batch 5250, loss[loss=0.196, simple_loss=0.2725, pruned_loss=0.05971, over 4889.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3062, pruned_loss=0.07096, over 935756.06 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:07:46,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=88780.0, ans=0.0 +2024-07-28 01:07:46,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=88780.0, ans=0.125 +2024-07-28 01:08:06,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=88820.0, ans=0.2 +2024-07-28 01:08:08,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=88820.0, ans=0.0 +2024-07-28 01:08:18,676 INFO [train.py:1114] (1/4) Epoch 7, batch 5300, loss[loss=0.243, simple_loss=0.3331, pruned_loss=0.07642, over 4591.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3054, pruned_loss=0.07085, over 933994.25 frames. ], batch size: 16, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:08:18,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=88846.66666666667, ans=0.0 +2024-07-28 01:08:24,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=88860.0, ans=0.0 +2024-07-28 01:08:25,197 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.360e+01 5.926e+01 6.505e+01 7.271e+01 1.034e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 01:08:40,512 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:08:43,974 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:08:45,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=88900.0, ans=0.2 +2024-07-28 01:08:47,447 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.73 vs. limit=12.0 +2024-07-28 01:08:51,783 INFO [train.py:1114] (1/4) Epoch 7, batch 5350, loss[loss=0.2047, simple_loss=0.275, pruned_loss=0.06719, over 4532.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.3057, pruned_loss=0.07098, over 936087.29 frames. ], batch size: 10, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:08:53,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=88913.33333333333, ans=0.2 +2024-07-28 01:08:57,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=88913.33333333333, ans=0.1 +2024-07-28 01:09:00,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=88926.66666666667, ans=0.125 +2024-07-28 01:09:01,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=88926.66666666667, ans=0.125 +2024-07-28 01:09:10,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=88940.0, ans=0.04949747468305833 +2024-07-28 01:09:26,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=88966.66666666667, ans=0.125 +2024-07-28 01:09:30,872 INFO [train.py:1114] (1/4) Epoch 7, batch 5400, loss[loss=0.2504, simple_loss=0.3342, pruned_loss=0.08335, over 4234.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3063, pruned_loss=0.07179, over 930590.70 frames. ], batch size: 25, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:09:35,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=88980.0, ans=0.125 +2024-07-28 01:09:37,772 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.898e+01 5.891e+01 6.586e+01 7.274e+01 1.067e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-28 01:09:44,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=89006.66666666667, ans=0.125 +2024-07-28 01:09:52,009 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.47 vs. limit=12.0 +2024-07-28 01:09:54,566 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.19 vs. limit=10.0 +2024-07-28 01:10:04,291 INFO [train.py:1114] (1/4) Epoch 7, batch 5450, loss[loss=0.177, simple_loss=0.2603, pruned_loss=0.04689, over 4720.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3043, pruned_loss=0.07057, over 933455.93 frames. ], batch size: 11, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:10:04,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=89046.66666666667, ans=10.0 +2024-07-28 01:10:09,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=89046.66666666667, ans=0.125 +2024-07-28 01:10:12,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=89060.0, ans=0.125 +2024-07-28 01:10:26,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=89086.66666666667, ans=0.125 +2024-07-28 01:10:29,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=89086.66666666667, ans=0.125 +2024-07-28 01:10:30,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=89086.66666666667, ans=0.0 +2024-07-28 01:10:31,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=89100.0, ans=0.0 +2024-07-28 01:10:31,693 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.68 vs. limit=10.0 +2024-07-28 01:10:36,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=89100.0, ans=0.0 +2024-07-28 01:10:45,382 INFO [train.py:1114] (1/4) Epoch 7, batch 5500, loss[loss=0.2812, simple_loss=0.3404, pruned_loss=0.111, over 4232.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3046, pruned_loss=0.07108, over 931165.95 frames. ], batch size: 25, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:11:07,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=89113.33333333333, ans=0.0 +2024-07-28 01:11:09,584 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.832e+01 6.102e+01 6.764e+01 7.655e+01 1.015e+02, threshold=1.353e+02, percent-clipped=0.0 +2024-07-28 01:11:14,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=89126.66666666667, ans=0.2 +2024-07-28 01:12:22,824 INFO [train.py:1114] (1/4) Epoch 7, batch 5550, loss[loss=0.1988, simple_loss=0.2913, pruned_loss=0.05318, over 4716.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3043, pruned_loss=0.07053, over 933276.52 frames. ], batch size: 12, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:12:37,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=89206.66666666667, ans=0.125 +2024-07-28 01:12:38,292 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.12 vs. limit=22.5 +2024-07-28 01:12:51,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=89220.0, ans=0.125 +2024-07-28 01:12:58,623 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:13:07,180 INFO [train.py:1114] (1/4) Epoch 7, batch 5600, loss[loss=0.2324, simple_loss=0.3212, pruned_loss=0.07176, over 4740.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.3048, pruned_loss=0.07073, over 934319.17 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:13:14,833 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 6.099e+01 6.958e+01 8.233e+01 1.047e+02, threshold=1.392e+02, percent-clipped=0.0 +2024-07-28 01:13:21,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=89260.0, ans=0.125 +2024-07-28 01:13:25,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=89273.33333333333, ans=0.0 +2024-07-28 01:13:28,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=89273.33333333333, ans=0.0 +2024-07-28 01:13:29,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=89286.66666666667, ans=0.125 +2024-07-28 01:13:37,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=89300.0, ans=0.125 +2024-07-28 01:13:41,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=89300.0, ans=0.125 +2024-07-28 01:13:42,967 INFO [train.py:1114] (1/4) Epoch 7, batch 5650, loss[loss=0.2428, simple_loss=0.3234, pruned_loss=0.08115, over 4532.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3034, pruned_loss=0.06963, over 936707.33 frames. ], batch size: 21, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:13:58,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=89340.0, ans=0.125 +2024-07-28 01:14:16,566 INFO [train.py:1114] (1/4) Epoch 7, batch 5700, loss[loss=0.2022, simple_loss=0.294, pruned_loss=0.05518, over 4690.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.3032, pruned_loss=0.06977, over 937988.82 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:14:23,478 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.757e+01 5.798e+01 6.210e+01 7.158e+01 1.197e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 01:14:28,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=89393.33333333333, ans=0.2 +2024-07-28 01:14:50,060 INFO [train.py:1114] (1/4) Epoch 7, batch 5750, loss[loss=0.2165, simple_loss=0.3035, pruned_loss=0.06478, over 4734.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3038, pruned_loss=0.07017, over 938386.92 frames. ], batch size: 19, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:15:02,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=89473.33333333333, ans=0.07 +2024-07-28 01:15:12,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=89486.66666666667, ans=0.125 +2024-07-28 01:15:14,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=89486.66666666667, ans=0.0 +2024-07-28 01:15:23,591 INFO [train.py:1114] (1/4) Epoch 7, batch 5800, loss[loss=0.217, simple_loss=0.3072, pruned_loss=0.06343, over 4676.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3035, pruned_loss=0.07012, over 937554.64 frames. ], batch size: 19, lr: 1.06e-02, grad_scale: 16.0 +2024-07-28 01:15:25,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=89513.33333333333, ans=0.0 +2024-07-28 01:15:31,063 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 6.066e+01 6.586e+01 7.704e+01 1.621e+02, threshold=1.317e+02, percent-clipped=1.0 +2024-07-28 01:15:45,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=89553.33333333333, ans=0.0 +2024-07-28 01:15:52,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=89566.66666666667, ans=0.125 +2024-07-28 01:15:53,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=89566.66666666667, ans=0.0 +2024-07-28 01:15:57,443 INFO [train.py:1114] (1/4) Epoch 7, batch 5850, loss[loss=0.236, simple_loss=0.3096, pruned_loss=0.08114, over 4525.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.305, pruned_loss=0.07138, over 938316.41 frames. ], batch size: 21, lr: 1.06e-02, grad_scale: 16.0 +2024-07-28 01:16:01,756 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.41 vs. limit=6.0 +2024-07-28 01:16:10,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=89593.33333333333, ans=0.125 +2024-07-28 01:16:25,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=89633.33333333333, ans=0.125 +2024-07-28 01:16:32,045 INFO [train.py:1114] (1/4) Epoch 7, batch 5900, loss[loss=0.2156, simple_loss=0.3061, pruned_loss=0.0625, over 4692.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.3053, pruned_loss=0.07103, over 938719.71 frames. ], batch size: 15, lr: 1.06e-02, grad_scale: 16.0 +2024-07-28 01:16:39,539 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.684e+01 6.380e+01 7.370e+01 9.045e+01 1.525e+02, threshold=1.474e+02, percent-clipped=5.0 +2024-07-28 01:16:45,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=89673.33333333333, ans=0.125 +2024-07-28 01:16:47,343 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.53 vs. limit=15.0 +2024-07-28 01:16:52,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=89686.66666666667, ans=0.0 +2024-07-28 01:16:56,989 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.09 vs. limit=15.0 +2024-07-28 01:17:07,212 INFO [train.py:1114] (1/4) Epoch 7, batch 5950, loss[loss=0.2748, simple_loss=0.3413, pruned_loss=0.1042, over 4680.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.305, pruned_loss=0.07057, over 940583.55 frames. ], batch size: 15, lr: 1.06e-02, grad_scale: 16.0 +2024-07-28 01:17:09,686 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.11 vs. limit=22.5 +2024-07-28 01:17:19,318 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:17:25,459 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.24 vs. limit=10.0 +2024-07-28 01:17:27,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=89753.33333333333, ans=0.1 +2024-07-28 01:17:29,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=89753.33333333333, ans=0.2 +2024-07-28 01:17:41,303 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.68 vs. limit=6.0 +2024-07-28 01:17:42,383 INFO [train.py:1114] (1/4) Epoch 7, batch 6000, loss[loss=0.2459, simple_loss=0.3332, pruned_loss=0.07926, over 4210.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.3035, pruned_loss=0.07044, over 937370.20 frames. ], batch size: 25, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:17:42,383 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 01:17:48,871 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.8324, 5.3304, 4.8472, 5.6164], device='cuda:1') +2024-07-28 01:17:54,532 INFO [train.py:1146] (1/4) Epoch 7, validation: loss=0.1857, simple_loss=0.2893, pruned_loss=0.04109, over 944034.00 frames. +2024-07-28 01:17:54,532 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 01:17:56,386 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.83 vs. limit=22.5 +2024-07-28 01:17:56,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=89780.0, ans=0.125 +2024-07-28 01:17:57,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=89780.0, ans=0.2 +2024-07-28 01:18:03,852 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.906e+01 5.859e+01 6.415e+01 7.407e+01 1.156e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 01:18:09,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=89793.33333333333, ans=0.2 +2024-07-28 01:18:11,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=89806.66666666667, ans=0.07 +2024-07-28 01:18:13,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=89806.66666666667, ans=0.025 +2024-07-28 01:18:17,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=89806.66666666667, ans=0.0 +2024-07-28 01:18:17,757 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.47 vs. limit=15.0 +2024-07-28 01:18:19,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=89820.0, ans=0.125 +2024-07-28 01:18:23,572 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.27 vs. limit=22.5 +2024-07-28 01:18:29,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=89833.33333333333, ans=0.125 +2024-07-28 01:18:31,891 INFO [train.py:1114] (1/4) Epoch 7, batch 6050, loss[loss=0.1656, simple_loss=0.2405, pruned_loss=0.04536, over 4782.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3027, pruned_loss=0.07017, over 938761.47 frames. ], batch size: 12, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:18:32,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=89846.66666666667, ans=0.09899494936611666 +2024-07-28 01:18:51,088 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.25 vs. limit=15.0 +2024-07-28 01:18:52,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=89886.66666666667, ans=0.0 +2024-07-28 01:19:05,215 INFO [train.py:1114] (1/4) Epoch 7, batch 6100, loss[loss=0.2619, simple_loss=0.349, pruned_loss=0.08743, over 4685.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3021, pruned_loss=0.06994, over 937944.60 frames. ], batch size: 15, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:19:12,437 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.648e+01 5.958e+01 6.611e+01 7.776e+01 1.081e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-28 01:19:12,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=89926.66666666667, ans=0.0 +2024-07-28 01:19:35,660 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.10 vs. limit=15.0 +2024-07-28 01:19:38,545 INFO [train.py:1114] (1/4) Epoch 7, batch 6150, loss[loss=0.2283, simple_loss=0.3052, pruned_loss=0.07572, over 3546.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3027, pruned_loss=0.06991, over 937106.92 frames. ], batch size: 35, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:19:39,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=89980.0, ans=0.0 +2024-07-28 01:19:41,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=89980.0, ans=0.125 +2024-07-28 01:19:46,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=89993.33333333333, ans=0.025 +2024-07-28 01:19:56,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=90006.66666666667, ans=0.0 +2024-07-28 01:19:58,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=90006.66666666667, ans=0.125 +2024-07-28 01:20:15,019 INFO [train.py:1114] (1/4) Epoch 7, batch 6200, loss[loss=0.2053, simple_loss=0.302, pruned_loss=0.0543, over 4752.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3043, pruned_loss=0.07074, over 936793.93 frames. ], batch size: 14, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:20:22,606 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.692e+01 6.027e+01 6.497e+01 7.393e+01 1.206e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-28 01:20:24,241 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.83 vs. limit=10.0 +2024-07-28 01:20:26,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=90060.0, ans=0.125 +2024-07-28 01:20:34,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=90073.33333333333, ans=0.125 +2024-07-28 01:20:44,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=90100.0, ans=0.07 +2024-07-28 01:20:48,960 INFO [train.py:1114] (1/4) Epoch 7, batch 6250, loss[loss=0.2372, simple_loss=0.3244, pruned_loss=0.07498, over 4800.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.3035, pruned_loss=0.07062, over 933241.76 frames. ], batch size: 14, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:21:05,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=90140.0, ans=0.1 +2024-07-28 01:21:22,797 INFO [train.py:1114] (1/4) Epoch 7, batch 6300, loss[loss=0.1914, simple_loss=0.2781, pruned_loss=0.05235, over 4534.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3038, pruned_loss=0.07086, over 930248.33 frames. ], batch size: 10, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:21:29,895 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.769e+01 5.979e+01 7.188e+01 8.735e+01 1.314e+02, threshold=1.438e+02, percent-clipped=1.0 +2024-07-28 01:21:35,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=90206.66666666667, ans=0.0 +2024-07-28 01:21:37,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=90206.66666666667, ans=0.0 +2024-07-28 01:21:41,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=90206.66666666667, ans=0.125 +2024-07-28 01:21:42,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=90220.0, ans=0.0 +2024-07-28 01:21:43,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90220.0, ans=0.1 +2024-07-28 01:21:47,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=90220.0, ans=0.125 +2024-07-28 01:21:55,880 INFO [train.py:1114] (1/4) Epoch 7, batch 6350, loss[loss=0.2075, simple_loss=0.2895, pruned_loss=0.06273, over 4421.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3036, pruned_loss=0.07033, over 934151.24 frames. ], batch size: 21, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:22:08,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=90273.33333333333, ans=0.125 +2024-07-28 01:22:17,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=90286.66666666667, ans=0.125 +2024-07-28 01:22:26,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=90300.0, ans=0.0 +2024-07-28 01:22:29,294 INFO [train.py:1114] (1/4) Epoch 7, batch 6400, loss[loss=0.2009, simple_loss=0.2837, pruned_loss=0.05908, over 4634.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.3029, pruned_loss=0.06945, over 935359.27 frames. ], batch size: 13, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:22:32,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90313.33333333333, ans=0.1 +2024-07-28 01:22:36,604 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.788e+01 5.990e+01 6.724e+01 8.012e+01 1.042e+02, threshold=1.345e+02, percent-clipped=0.0 +2024-07-28 01:22:46,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=90340.0, ans=0.0 +2024-07-28 01:22:54,200 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.37 vs. limit=15.0 +2024-07-28 01:23:03,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=90366.66666666667, ans=0.07 +2024-07-28 01:23:06,107 INFO [train.py:1114] (1/4) Epoch 7, batch 6450, loss[loss=0.2538, simple_loss=0.3365, pruned_loss=0.08553, over 4525.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.3031, pruned_loss=0.06909, over 938796.25 frames. ], batch size: 21, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:23:13,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90393.33333333333, ans=0.1 +2024-07-28 01:23:19,362 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.42 vs. limit=15.0 +2024-07-28 01:23:19,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=90406.66666666667, ans=0.0 +2024-07-28 01:23:41,790 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.90 vs. limit=15.0 +2024-07-28 01:23:46,190 INFO [train.py:1114] (1/4) Epoch 7, batch 6500, loss[loss=0.2509, simple_loss=0.3294, pruned_loss=0.08621, over 3578.00 frames. ], tot_loss[loss=0.22, simple_loss=0.3023, pruned_loss=0.06881, over 940286.11 frames. ], batch size: 35, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:23:46,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=90446.66666666667, ans=0.125 +2024-07-28 01:23:52,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=90460.0, ans=0.125 +2024-07-28 01:23:57,750 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.888e+01 5.820e+01 6.453e+01 7.206e+01 1.081e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-28 01:24:03,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=90460.0, ans=0.0 +2024-07-28 01:24:10,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=90473.33333333333, ans=0.025 +2024-07-28 01:24:12,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=90473.33333333333, ans=0.2 +2024-07-28 01:24:17,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=90486.66666666667, ans=0.125 +2024-07-28 01:24:28,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=90486.66666666667, ans=0.125 +2024-07-28 01:24:28,185 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.37 vs. limit=22.5 +2024-07-28 01:24:29,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90486.66666666667, ans=0.1 +2024-07-28 01:24:38,480 INFO [train.py:1114] (1/4) Epoch 7, batch 6550, loss[loss=0.207, simple_loss=0.288, pruned_loss=0.06297, over 4802.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.3024, pruned_loss=0.06886, over 943000.38 frames. ], batch size: 11, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:24:38,908 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.89 vs. limit=15.0 +2024-07-28 01:24:43,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=90513.33333333333, ans=0.125 +2024-07-28 01:24:47,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.73 vs. limit=22.5 +2024-07-28 01:24:48,105 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.55 vs. limit=15.0 +2024-07-28 01:24:52,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=90540.0, ans=0.125 +2024-07-28 01:24:54,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=90540.0, ans=0.125 +2024-07-28 01:24:54,613 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.93 vs. limit=6.0 +2024-07-28 01:24:57,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=90540.0, ans=0.2 +2024-07-28 01:25:12,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=90553.33333333333, ans=0.07 +2024-07-28 01:25:21,615 INFO [train.py:1114] (1/4) Epoch 7, batch 6600, loss[loss=0.194, simple_loss=0.2931, pruned_loss=0.04744, over 4928.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.3012, pruned_loss=0.06823, over 944818.90 frames. ], batch size: 14, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:25:29,262 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.788e+01 6.007e+01 7.132e+01 8.613e+01 1.294e+02, threshold=1.426e+02, percent-clipped=1.0 +2024-07-28 01:25:36,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=90606.66666666667, ans=0.125 +2024-07-28 01:25:47,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90620.0, ans=0.1 +2024-07-28 01:25:47,704 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.86 vs. limit=6.0 +2024-07-28 01:25:53,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=90633.33333333333, ans=0.125 +2024-07-28 01:25:57,362 INFO [train.py:1114] (1/4) Epoch 7, batch 6650, loss[loss=0.2545, simple_loss=0.3394, pruned_loss=0.08484, over 4603.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.3016, pruned_loss=0.06837, over 943370.00 frames. ], batch size: 17, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:26:14,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=90673.33333333333, ans=0.0 +2024-07-28 01:26:29,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=90700.0, ans=0.125 +2024-07-28 01:26:32,378 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.79 vs. limit=15.0 +2024-07-28 01:26:32,490 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.04 vs. limit=22.5 +2024-07-28 01:26:33,325 INFO [train.py:1114] (1/4) Epoch 7, batch 6700, loss[loss=0.2346, simple_loss=0.3121, pruned_loss=0.07853, over 4717.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3032, pruned_loss=0.06941, over 942291.81 frames. ], batch size: 19, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:26:33,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90713.33333333333, ans=0.1 +2024-07-28 01:26:37,758 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=27.00 vs. limit=15.0 +2024-07-28 01:26:40,646 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.203e+01 5.978e+01 6.873e+01 8.305e+01 1.151e+02, threshold=1.375e+02, percent-clipped=0.0 +2024-07-28 01:26:46,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=90740.0, ans=0.125 +2024-07-28 01:26:53,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=90753.33333333333, ans=0.125 +2024-07-28 01:26:57,243 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.98 vs. limit=6.0 +2024-07-28 01:27:02,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=90766.66666666667, ans=0.025 +2024-07-28 01:27:04,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=90766.66666666667, ans=0.0 +2024-07-28 01:27:07,205 INFO [train.py:1114] (1/4) Epoch 7, batch 6750, loss[loss=0.2424, simple_loss=0.3252, pruned_loss=0.07982, over 4218.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.3027, pruned_loss=0.06921, over 940214.93 frames. ], batch size: 25, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:27:22,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=90806.66666666667, ans=0.125 +2024-07-28 01:27:32,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=90820.0, ans=0.1 +2024-07-28 01:27:34,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=90833.33333333333, ans=0.125 +2024-07-28 01:27:34,655 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.07 vs. limit=12.0 +2024-07-28 01:27:37,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=90833.33333333333, ans=0.125 +2024-07-28 01:27:40,990 INFO [train.py:1114] (1/4) Epoch 7, batch 6800, loss[loss=0.2042, simple_loss=0.3004, pruned_loss=0.05395, over 4637.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3034, pruned_loss=0.06932, over 938647.57 frames. ], batch size: 13, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:27:43,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=90846.66666666667, ans=0.125 +2024-07-28 01:27:48,115 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.925e+01 5.741e+01 6.354e+01 7.079e+01 9.743e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 01:27:52,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90860.0, ans=0.1 +2024-07-28 01:27:57,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=90873.33333333333, ans=0.0 +2024-07-28 01:28:08,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=90900.0, ans=0.0 +2024-07-28 01:28:08,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=90900.0, ans=0.0 +2024-07-28 01:28:09,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=90900.0, ans=0.125 +2024-07-28 01:28:14,114 INFO [train.py:1114] (1/4) Epoch 7, batch 6850, loss[loss=0.2146, simple_loss=0.3047, pruned_loss=0.06223, over 4693.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3036, pruned_loss=0.06927, over 940371.72 frames. ], batch size: 13, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:28:23,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90926.66666666667, ans=0.1 +2024-07-28 01:28:25,239 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:28:25,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90926.66666666667, ans=0.1 +2024-07-28 01:28:28,154 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.71 vs. limit=12.0 +2024-07-28 01:28:41,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=90966.66666666667, ans=0.0 +2024-07-28 01:28:46,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=90966.66666666667, ans=0.125 +2024-07-28 01:28:48,215 INFO [train.py:1114] (1/4) Epoch 7, batch 6900, loss[loss=0.2216, simple_loss=0.3132, pruned_loss=0.06499, over 4964.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3034, pruned_loss=0.0694, over 942647.75 frames. ], batch size: 13, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:28:57,320 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.878e+01 5.899e+01 6.510e+01 7.129e+01 1.062e+02, threshold=1.302e+02, percent-clipped=0.0 +2024-07-28 01:28:58,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=90993.33333333333, ans=0.2 +2024-07-28 01:29:14,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=91020.0, ans=0.09899494936611666 +2024-07-28 01:29:25,538 INFO [train.py:1114] (1/4) Epoch 7, batch 6950, loss[loss=0.1985, simple_loss=0.2786, pruned_loss=0.05925, over 4522.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.3027, pruned_loss=0.06944, over 939950.34 frames. ], batch size: 10, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:29:30,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=91046.66666666667, ans=15.0 +2024-07-28 01:29:39,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=91060.0, ans=0.125 +2024-07-28 01:29:46,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=91073.33333333333, ans=0.125 +2024-07-28 01:30:02,400 INFO [train.py:1114] (1/4) Epoch 7, batch 7000, loss[loss=0.1904, simple_loss=0.2923, pruned_loss=0.04425, over 4609.00 frames. ], tot_loss[loss=0.22, simple_loss=0.302, pruned_loss=0.06898, over 938438.71 frames. ], batch size: 17, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:30:03,286 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.08 vs. limit=15.0 +2024-07-28 01:30:09,586 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.067e+01 5.877e+01 6.787e+01 8.210e+01 1.500e+02, threshold=1.357e+02, percent-clipped=1.0 +2024-07-28 01:30:14,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=91126.66666666667, ans=0.125 +2024-07-28 01:30:21,739 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.48 vs. limit=12.0 +2024-07-28 01:30:29,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=91166.66666666667, ans=0.025 +2024-07-28 01:30:30,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=91166.66666666667, ans=0.125 +2024-07-28 01:30:35,444 INFO [train.py:1114] (1/4) Epoch 7, batch 7050, loss[loss=0.24, simple_loss=0.3314, pruned_loss=0.07429, over 4697.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.3022, pruned_loss=0.06848, over 941764.63 frames. ], batch size: 19, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:30:43,395 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=6.39 vs. limit=12.0 +2024-07-28 01:30:58,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=91220.0, ans=0.0 +2024-07-28 01:31:01,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=91220.0, ans=0.125 +2024-07-28 01:31:09,039 INFO [train.py:1114] (1/4) Epoch 7, batch 7100, loss[loss=0.2775, simple_loss=0.3407, pruned_loss=0.1072, over 4805.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3045, pruned_loss=0.07021, over 936775.77 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 16.0 +2024-07-28 01:31:09,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=91246.66666666667, ans=0.025 +2024-07-28 01:31:11,247 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.90 vs. limit=12.0 +2024-07-28 01:31:16,782 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.787e+01 5.674e+01 6.634e+01 7.600e+01 1.129e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-28 01:31:18,587 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.73 vs. limit=12.0 +2024-07-28 01:31:26,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91273.33333333333, ans=0.1 +2024-07-28 01:31:28,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=91286.66666666667, ans=0.125 +2024-07-28 01:31:33,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=91286.66666666667, ans=0.125 +2024-07-28 01:31:41,773 INFO [train.py:1114] (1/4) Epoch 7, batch 7150, loss[loss=0.2218, simple_loss=0.2989, pruned_loss=0.07239, over 4528.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.303, pruned_loss=0.06978, over 937901.22 frames. ], batch size: 21, lr: 1.05e-02, grad_scale: 16.0 +2024-07-28 01:31:45,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91313.33333333333, ans=0.1 +2024-07-28 01:31:47,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=91313.33333333333, ans=0.125 +2024-07-28 01:31:48,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91326.66666666667, ans=0.1 +2024-07-28 01:31:52,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=91326.66666666667, ans=6.0 +2024-07-28 01:32:08,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=91366.66666666667, ans=0.125 +2024-07-28 01:32:10,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=91366.66666666667, ans=0.0 +2024-07-28 01:32:10,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=91366.66666666667, ans=0.0 +2024-07-28 01:32:14,585 INFO [train.py:1114] (1/4) Epoch 7, batch 7200, loss[loss=0.2493, simple_loss=0.3319, pruned_loss=0.0833, over 4789.00 frames. ], tot_loss[loss=0.222, simple_loss=0.304, pruned_loss=0.07002, over 938222.19 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:32:18,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=91380.0, ans=0.125 +2024-07-28 01:32:19,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=91380.0, ans=0.0 +2024-07-28 01:32:22,358 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.818e+01 5.919e+01 6.755e+01 7.806e+01 1.038e+02, threshold=1.351e+02, percent-clipped=0.0 +2024-07-28 01:32:26,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=91393.33333333333, ans=10.0 +2024-07-28 01:32:35,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=91406.66666666667, ans=0.125 +2024-07-28 01:32:43,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=91433.33333333333, ans=0.125 +2024-07-28 01:32:48,424 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:32:49,498 INFO [train.py:1114] (1/4) Epoch 7, batch 7250, loss[loss=0.2035, simple_loss=0.2752, pruned_loss=0.06595, over 4853.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.3025, pruned_loss=0.06933, over 939627.32 frames. ], batch size: 12, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:32:50,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=91446.66666666667, ans=0.125 +2024-07-28 01:32:57,009 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.30 vs. limit=22.5 +2024-07-28 01:33:01,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=91460.0, ans=0.0 +2024-07-28 01:33:07,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=91473.33333333333, ans=0.125 +2024-07-28 01:33:13,706 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.01 vs. limit=22.5 +2024-07-28 01:33:15,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=91500.0, ans=0.2 +2024-07-28 01:33:17,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=91500.0, ans=0.125 +2024-07-28 01:33:18,843 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.25 vs. limit=10.0 +2024-07-28 01:33:22,425 INFO [train.py:1114] (1/4) Epoch 7, batch 7300, loss[loss=0.1858, simple_loss=0.2788, pruned_loss=0.04636, over 4848.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3033, pruned_loss=0.0699, over 940099.76 frames. ], batch size: 12, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:33:30,223 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.315e+01 6.274e+01 7.077e+01 8.324e+01 1.199e+02, threshold=1.415e+02, percent-clipped=0.0 +2024-07-28 01:33:36,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=91540.0, ans=0.125 +2024-07-28 01:33:45,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=91553.33333333333, ans=0.125 +2024-07-28 01:33:45,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=91553.33333333333, ans=0.0 +2024-07-28 01:33:54,591 INFO [train.py:1114] (1/4) Epoch 7, batch 7350, loss[loss=0.1866, simple_loss=0.272, pruned_loss=0.05056, over 4644.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3035, pruned_loss=0.06958, over 939591.24 frames. ], batch size: 12, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:34:10,305 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.84 vs. limit=6.0 +2024-07-28 01:34:11,394 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=3.055e-01 +2024-07-28 01:34:12,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=91606.66666666667, ans=0.0 +2024-07-28 01:34:16,746 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.13 vs. limit=12.0 +2024-07-28 01:34:16,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91620.0, ans=0.1 +2024-07-28 01:34:20,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=91633.33333333333, ans=0.125 +2024-07-28 01:34:27,593 INFO [train.py:1114] (1/4) Epoch 7, batch 7400, loss[loss=0.2042, simple_loss=0.2916, pruned_loss=0.05839, over 4686.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3037, pruned_loss=0.06911, over 940696.96 frames. ], batch size: 13, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:34:31,867 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.29 vs. limit=22.5 +2024-07-28 01:34:35,566 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.94 vs. limit=15.0 +2024-07-28 01:34:35,811 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.824e+01 5.881e+01 6.822e+01 8.435e+01 1.377e+02, threshold=1.364e+02, percent-clipped=0.0 +2024-07-28 01:34:40,308 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.97 vs. limit=15.0 +2024-07-28 01:34:46,372 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:34:49,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=91686.66666666667, ans=0.125 +2024-07-28 01:34:49,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=91686.66666666667, ans=0.2 +2024-07-28 01:34:53,244 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:35:01,067 INFO [train.py:1114] (1/4) Epoch 7, batch 7450, loss[loss=0.2063, simple_loss=0.2789, pruned_loss=0.06686, over 4616.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.3022, pruned_loss=0.06874, over 938072.33 frames. ], batch size: 11, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:35:03,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=91713.33333333333, ans=0.0 +2024-07-28 01:35:21,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=91753.33333333333, ans=0.04949747468305833 +2024-07-28 01:35:34,019 INFO [train.py:1114] (1/4) Epoch 7, batch 7500, loss[loss=0.2749, simple_loss=0.3473, pruned_loss=0.1013, over 3599.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3032, pruned_loss=0.06955, over 936609.05 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:35:41,659 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.899e+01 6.430e+01 7.635e+01 1.398e+02, threshold=1.286e+02, percent-clipped=1.0 +2024-07-28 01:35:41,862 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:35:45,468 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.68 vs. limit=22.5 +2024-07-28 01:35:46,914 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.42 vs. limit=6.0 +2024-07-28 01:35:59,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=91833.33333333333, ans=0.025 +2024-07-28 01:36:00,149 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.46 vs. limit=15.0 +2024-07-28 01:36:01,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91833.33333333333, ans=0.1 +2024-07-28 01:36:03,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91833.33333333333, ans=0.1 +2024-07-28 01:36:04,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=91833.33333333333, ans=0.125 +2024-07-28 01:36:05,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=91833.33333333333, ans=0.2 +2024-07-28 01:36:07,907 INFO [train.py:1114] (1/4) Epoch 7, batch 7550, loss[loss=0.2303, simple_loss=0.3166, pruned_loss=0.07198, over 4609.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.3048, pruned_loss=0.07017, over 936286.45 frames. ], batch size: 17, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:36:21,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=91873.33333333333, ans=0.1 +2024-07-28 01:36:22,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=91873.33333333333, ans=0.125 +2024-07-28 01:36:29,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=91886.66666666667, ans=0.0 +2024-07-28 01:36:31,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=91886.66666666667, ans=0.0 +2024-07-28 01:36:32,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=91886.66666666667, ans=0.025 +2024-07-28 01:36:39,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91900.0, ans=0.1 +2024-07-28 01:36:40,626 INFO [train.py:1114] (1/4) Epoch 7, batch 7600, loss[loss=0.2408, simple_loss=0.3215, pruned_loss=0.08006, over 4817.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3039, pruned_loss=0.06981, over 937753.88 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:36:42,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=91913.33333333333, ans=0.0 +2024-07-28 01:36:50,218 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.825e+01 5.658e+01 6.042e+01 7.178e+01 9.793e+01, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 01:37:00,585 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-28 01:37:08,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=91966.66666666667, ans=0.125 +2024-07-28 01:37:15,246 INFO [train.py:1114] (1/4) Epoch 7, batch 7650, loss[loss=0.1959, simple_loss=0.2794, pruned_loss=0.0562, over 4950.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3033, pruned_loss=0.06948, over 936324.83 frames. ], batch size: 12, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:37:15,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91980.0, ans=0.1 +2024-07-28 01:37:16,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91980.0, ans=0.1 +2024-07-28 01:37:17,006 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.45 vs. limit=15.0 +2024-07-28 01:37:20,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=91980.0, ans=0.0 +2024-07-28 01:37:27,231 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.82 vs. limit=15.0 +2024-07-28 01:37:32,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=92006.66666666667, ans=0.0 +2024-07-28 01:37:34,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=92006.66666666667, ans=15.0 +2024-07-28 01:37:35,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=92020.0, ans=0.0 +2024-07-28 01:37:35,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=92020.0, ans=0.0 +2024-07-28 01:37:49,599 INFO [train.py:1114] (1/4) Epoch 7, batch 7700, loss[loss=0.2058, simple_loss=0.2999, pruned_loss=0.05584, over 4697.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.3027, pruned_loss=0.0694, over 933686.28 frames. ], batch size: 13, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:37:57,206 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.942e+01 5.877e+01 6.503e+01 7.905e+01 1.085e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 01:38:21,608 INFO [train.py:1114] (1/4) Epoch 7, batch 7750, loss[loss=0.2396, simple_loss=0.3407, pruned_loss=0.0692, over 4933.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3039, pruned_loss=0.06979, over 934983.01 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:38:39,599 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.90 vs. limit=10.0 +2024-07-28 01:38:41,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=92153.33333333333, ans=0.0 +2024-07-28 01:38:46,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92153.33333333333, ans=0.1 +2024-07-28 01:38:55,697 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.83 vs. limit=15.0 +2024-07-28 01:38:58,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=92166.66666666667, ans=0.04949747468305833 +2024-07-28 01:38:59,296 INFO [train.py:1114] (1/4) Epoch 7, batch 7800, loss[loss=0.2422, simple_loss=0.3225, pruned_loss=0.08091, over 4659.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.3032, pruned_loss=0.06904, over 936908.66 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:38:59,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=92180.0, ans=0.125 +2024-07-28 01:39:01,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=92180.0, ans=0.0 +2024-07-28 01:39:02,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92180.0, ans=0.1 +2024-07-28 01:39:05,827 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:39:06,959 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.786e+01 5.790e+01 6.287e+01 7.177e+01 9.845e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 01:39:14,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=92193.33333333333, ans=0.0 +2024-07-28 01:39:18,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=92206.66666666667, ans=0.0 +2024-07-28 01:39:53,512 INFO [train.py:1114] (1/4) Epoch 7, batch 7850, loss[loss=0.1918, simple_loss=0.2744, pruned_loss=0.0546, over 4564.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3036, pruned_loss=0.0693, over 935565.42 frames. ], batch size: 10, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:39:56,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=92246.66666666667, ans=0.5 +2024-07-28 01:40:06,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=92273.33333333333, ans=0.025 +2024-07-28 01:40:09,710 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.86 vs. limit=10.0 +2024-07-28 01:40:16,174 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:40:22,888 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.24 vs. limit=15.0 +2024-07-28 01:40:31,968 INFO [train.py:1114] (1/4) Epoch 7, batch 7900, loss[loss=0.2483, simple_loss=0.3396, pruned_loss=0.07853, over 4876.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.3051, pruned_loss=0.06987, over 933052.12 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:40:35,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=92313.33333333333, ans=0.0 +2024-07-28 01:40:37,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=92313.33333333333, ans=0.0 +2024-07-28 01:40:37,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=92313.33333333333, ans=0.1 +2024-07-28 01:40:39,609 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.593e+01 6.059e+01 6.486e+01 7.471e+01 1.043e+02, threshold=1.297e+02, percent-clipped=0.0 +2024-07-28 01:40:39,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=92326.66666666667, ans=0.0 +2024-07-28 01:40:52,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92340.0, ans=0.1 +2024-07-28 01:40:56,273 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.75 vs. limit=15.0 +2024-07-28 01:41:08,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92366.66666666667, ans=0.1 +2024-07-28 01:41:09,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=92366.66666666667, ans=0.125 +2024-07-28 01:41:14,312 INFO [train.py:1114] (1/4) Epoch 7, batch 7950, loss[loss=0.3007, simple_loss=0.3534, pruned_loss=0.124, over 3420.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3047, pruned_loss=0.06974, over 935154.55 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 16.0 +2024-07-28 01:41:19,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=92380.0, ans=0.125 +2024-07-28 01:41:23,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=92393.33333333333, ans=0.125 +2024-07-28 01:41:30,656 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.08 vs. limit=15.0 +2024-07-28 01:41:32,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=92406.66666666667, ans=0.125 +2024-07-28 01:41:33,769 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.95 vs. limit=22.5 +2024-07-28 01:41:47,113 INFO [train.py:1114] (1/4) Epoch 7, batch 8000, loss[loss=0.2357, simple_loss=0.3125, pruned_loss=0.0794, over 4605.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3035, pruned_loss=0.06952, over 934542.26 frames. ], batch size: 11, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:41:49,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=92446.66666666667, ans=0.125 +2024-07-28 01:41:54,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=92460.0, ans=0.0 +2024-07-28 01:41:55,553 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.050e+01 6.079e+01 6.641e+01 7.975e+01 1.086e+02, threshold=1.328e+02, percent-clipped=0.0 +2024-07-28 01:41:59,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=92460.0, ans=0.09899494936611666 +2024-07-28 01:42:06,152 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.61 vs. limit=6.0 +2024-07-28 01:42:10,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=92486.66666666667, ans=0.125 +2024-07-28 01:42:13,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92500.0, ans=0.1 +2024-07-28 01:42:16,413 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:42:19,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=92500.0, ans=15.0 +2024-07-28 01:42:20,850 INFO [train.py:1114] (1/4) Epoch 7, batch 8050, loss[loss=0.1996, simple_loss=0.2819, pruned_loss=0.05868, over 4817.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3034, pruned_loss=0.06952, over 934560.69 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:42:29,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=92526.66666666667, ans=0.125 +2024-07-28 01:42:33,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=92526.66666666667, ans=0.5 +2024-07-28 01:42:33,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=92540.0, ans=0.125 +2024-07-28 01:42:38,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=92540.0, ans=0.125 +2024-07-28 01:42:43,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=92553.33333333333, ans=0.09899494936611666 +2024-07-28 01:42:46,402 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.39 vs. limit=10.0 +2024-07-28 01:42:47,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=92566.66666666667, ans=0.2 +2024-07-28 01:42:51,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=92566.66666666667, ans=0.025 +2024-07-28 01:42:53,527 INFO [train.py:1114] (1/4) Epoch 7, batch 8100, loss[loss=0.2644, simple_loss=0.329, pruned_loss=0.09987, over 4798.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3038, pruned_loss=0.06942, over 934063.33 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:43:01,844 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.964e+01 5.903e+01 6.479e+01 7.411e+01 1.026e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 01:43:02,320 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.02 vs. limit=22.5 +2024-07-28 01:43:21,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=92633.33333333333, ans=0.125 +2024-07-28 01:43:27,001 INFO [train.py:1114] (1/4) Epoch 7, batch 8150, loss[loss=0.2214, simple_loss=0.3096, pruned_loss=0.06655, over 4797.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.303, pruned_loss=0.06903, over 937308.72 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:43:29,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=92646.66666666667, ans=0.1 +2024-07-28 01:43:43,209 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.82 vs. limit=15.0 +2024-07-28 01:43:54,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=92700.0, ans=0.125 +2024-07-28 01:44:00,491 INFO [train.py:1114] (1/4) Epoch 7, batch 8200, loss[loss=0.2445, simple_loss=0.3269, pruned_loss=0.08107, over 4811.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.3035, pruned_loss=0.06895, over 938890.31 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:44:01,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=92713.33333333333, ans=0.125 +2024-07-28 01:44:03,575 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.10 vs. limit=15.0 +2024-07-28 01:44:05,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=92713.33333333333, ans=0.125 +2024-07-28 01:44:05,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92713.33333333333, ans=0.1 +2024-07-28 01:44:08,973 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.964e+01 6.053e+01 7.008e+01 8.416e+01 1.296e+02, threshold=1.402e+02, percent-clipped=1.0 +2024-07-28 01:44:09,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92726.66666666667, ans=0.1 +2024-07-28 01:44:28,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=92766.66666666667, ans=0.125 +2024-07-28 01:44:33,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=92766.66666666667, ans=0.125 +2024-07-28 01:44:34,199 INFO [train.py:1114] (1/4) Epoch 7, batch 8250, loss[loss=0.2137, simple_loss=0.3067, pruned_loss=0.06036, over 4898.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3044, pruned_loss=0.06957, over 939060.59 frames. ], batch size: 13, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:44:37,815 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.50 vs. limit=15.0 +2024-07-28 01:44:40,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=92793.33333333333, ans=0.0 +2024-07-28 01:44:41,325 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.42 vs. limit=5.0 +2024-07-28 01:44:45,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=92793.33333333333, ans=0.2 +2024-07-28 01:44:48,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=92806.66666666667, ans=0.0 +2024-07-28 01:44:48,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=92806.66666666667, ans=0.0 +2024-07-28 01:44:49,591 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.94 vs. limit=15.0 +2024-07-28 01:44:59,183 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.26 vs. limit=6.0 +2024-07-28 01:45:06,282 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.94 vs. limit=15.0 +2024-07-28 01:45:06,704 INFO [train.py:1114] (1/4) Epoch 7, batch 8300, loss[loss=0.2129, simple_loss=0.2917, pruned_loss=0.0671, over 4907.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3061, pruned_loss=0.07031, over 938721.44 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:45:15,032 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.705e+01 6.122e+01 6.815e+01 8.383e+01 1.214e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-28 01:45:27,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=92886.66666666667, ans=0.2 +2024-07-28 01:45:28,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=92886.66666666667, ans=0.125 +2024-07-28 01:45:31,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=92886.66666666667, ans=0.0 +2024-07-28 01:45:31,320 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.50 vs. limit=10.0 +2024-07-28 01:45:36,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=92900.0, ans=0.125 +2024-07-28 01:45:37,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=92900.0, ans=0.0 +2024-07-28 01:45:38,733 INFO [train.py:1114] (1/4) Epoch 7, batch 8350, loss[loss=0.2337, simple_loss=0.3172, pruned_loss=0.07511, over 4807.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3049, pruned_loss=0.06965, over 941400.87 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:45:42,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=92913.33333333333, ans=0.035 +2024-07-28 01:45:53,464 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.99 vs. limit=15.0 +2024-07-28 01:46:02,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=92953.33333333333, ans=0.125 +2024-07-28 01:46:11,398 INFO [train.py:1114] (1/4) Epoch 7, batch 8400, loss[loss=0.2088, simple_loss=0.2862, pruned_loss=0.06567, over 4784.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.3038, pruned_loss=0.06902, over 940319.01 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:46:18,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=92993.33333333333, ans=0.2 +2024-07-28 01:46:20,640 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.750e+01 5.850e+01 6.401e+01 7.146e+01 1.045e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 01:46:20,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=92993.33333333333, ans=0.0 +2024-07-28 01:46:27,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93006.66666666667, ans=0.1 +2024-07-28 01:46:29,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=93006.66666666667, ans=0.125 +2024-07-28 01:46:38,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=93033.33333333333, ans=0.2 +2024-07-28 01:46:42,474 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.19 vs. limit=15.0 +2024-07-28 01:46:43,906 INFO [train.py:1114] (1/4) Epoch 7, batch 8450, loss[loss=0.2043, simple_loss=0.2845, pruned_loss=0.06202, over 4800.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3044, pruned_loss=0.06927, over 938864.82 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:46:54,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=93060.0, ans=0.125 +2024-07-28 01:47:03,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93073.33333333333, ans=0.1 +2024-07-28 01:47:09,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=93086.66666666667, ans=0.1 +2024-07-28 01:47:17,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=93100.0, ans=0.0 +2024-07-28 01:47:19,787 INFO [train.py:1114] (1/4) Epoch 7, batch 8500, loss[loss=0.1924, simple_loss=0.2742, pruned_loss=0.0553, over 4626.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3042, pruned_loss=0.06947, over 938546.86 frames. ], batch size: 11, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:47:29,191 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.808e+01 5.964e+01 6.473e+01 7.597e+01 1.017e+02, threshold=1.295e+02, percent-clipped=0.0 +2024-07-28 01:47:31,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=93126.66666666667, ans=0.125 +2024-07-28 01:47:53,744 INFO [train.py:1114] (1/4) Epoch 7, batch 8550, loss[loss=0.1964, simple_loss=0.2812, pruned_loss=0.05579, over 4807.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.3029, pruned_loss=0.06878, over 939609.82 frames. ], batch size: 11, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:48:09,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.54 vs. limit=12.0 +2024-07-28 01:48:12,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=93220.0, ans=0.0 +2024-07-28 01:48:20,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=93233.33333333333, ans=0.0 +2024-07-28 01:48:20,293 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.74 vs. limit=22.5 +2024-07-28 01:48:25,460 INFO [train.py:1114] (1/4) Epoch 7, batch 8600, loss[loss=0.2276, simple_loss=0.3153, pruned_loss=0.06993, over 4805.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.3024, pruned_loss=0.06845, over 939172.75 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:48:30,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=93246.66666666667, ans=0.0 +2024-07-28 01:48:31,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=93246.66666666667, ans=0.125 +2024-07-28 01:48:35,626 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.806e+01 6.089e+01 7.126e+01 9.182e+01 1.339e+02, threshold=1.425e+02, percent-clipped=2.0 +2024-07-28 01:48:57,224 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.00 vs. limit=10.0 +2024-07-28 01:49:02,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=93300.0, ans=0.125 +2024-07-28 01:49:04,762 INFO [train.py:1114] (1/4) Epoch 7, batch 8650, loss[loss=0.2252, simple_loss=0.3064, pruned_loss=0.07202, over 4896.00 frames. ], tot_loss[loss=0.219, simple_loss=0.3014, pruned_loss=0.06825, over 940535.86 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:49:10,313 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.65 vs. limit=10.0 +2024-07-28 01:49:17,236 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.51 vs. limit=6.0 +2024-07-28 01:49:19,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=93326.66666666667, ans=0.0 +2024-07-28 01:49:21,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=93326.66666666667, ans=0.125 +2024-07-28 01:49:25,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=93340.0, ans=0.125 +2024-07-28 01:49:32,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=93353.33333333333, ans=0.125 +2024-07-28 01:49:32,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=93353.33333333333, ans=0.0 +2024-07-28 01:49:41,086 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.76 vs. limit=6.0 +2024-07-28 01:49:46,862 INFO [train.py:1114] (1/4) Epoch 7, batch 8700, loss[loss=0.2085, simple_loss=0.3112, pruned_loss=0.05288, over 4753.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.3027, pruned_loss=0.06909, over 937679.97 frames. ], batch size: 13, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:49:58,261 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.007e+01 5.695e+01 6.363e+01 6.862e+01 1.009e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 01:50:00,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=93393.33333333333, ans=0.125 +2024-07-28 01:50:15,349 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.48 vs. limit=15.0 +2024-07-28 01:50:20,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=93446.66666666667, ans=0.125 +2024-07-28 01:50:21,412 INFO [train.py:1114] (1/4) Epoch 7, batch 8750, loss[loss=0.2371, simple_loss=0.3221, pruned_loss=0.07601, over 4686.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.302, pruned_loss=0.06868, over 936424.92 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:50:22,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=93446.66666666667, ans=0.0 +2024-07-28 01:50:24,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93446.66666666667, ans=0.1 +2024-07-28 01:50:29,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=93460.0, ans=0.125 +2024-07-28 01:50:50,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=93500.0, ans=0.04949747468305833 +2024-07-28 01:50:54,987 INFO [train.py:1114] (1/4) Epoch 7, batch 8800, loss[loss=0.1997, simple_loss=0.288, pruned_loss=0.05571, over 4924.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.3019, pruned_loss=0.0686, over 937116.06 frames. ], batch size: 14, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:50:57,976 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.04 vs. limit=22.5 +2024-07-28 01:51:05,111 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.892e+01 5.962e+01 6.661e+01 7.820e+01 1.016e+02, threshold=1.332e+02, percent-clipped=0.0 +2024-07-28 01:51:12,334 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.68 vs. limit=22.5 +2024-07-28 01:51:13,924 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.16 vs. limit=15.0 +2024-07-28 01:51:14,366 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.35 vs. limit=15.0 +2024-07-28 01:51:17,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=93553.33333333333, ans=0.1 +2024-07-28 01:51:21,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=93553.33333333333, ans=0.125 +2024-07-28 01:51:28,561 INFO [train.py:1114] (1/4) Epoch 7, batch 8850, loss[loss=0.2318, simple_loss=0.3172, pruned_loss=0.07321, over 4492.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.3023, pruned_loss=0.06919, over 932296.62 frames. ], batch size: 21, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:51:35,137 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:51:45,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=93606.66666666667, ans=0.0 +2024-07-28 01:51:49,248 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.82 vs. limit=22.5 +2024-07-28 01:51:57,516 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.05 vs. limit=15.0 +2024-07-28 01:52:00,216 INFO [train.py:1114] (1/4) Epoch 7, batch 8900, loss[loss=0.2153, simple_loss=0.3008, pruned_loss=0.06497, over 4935.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.3028, pruned_loss=0.06955, over 930108.98 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:52:02,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=93646.66666666667, ans=0.125 +2024-07-28 01:52:02,327 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.25 vs. limit=22.5 +2024-07-28 01:52:09,251 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.776e+01 6.236e+01 6.887e+01 8.483e+01 1.202e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-28 01:52:11,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=93660.0, ans=0.125 +2024-07-28 01:52:12,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93673.33333333333, ans=0.1 +2024-07-28 01:52:21,785 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.99 vs. limit=10.0 +2024-07-28 01:52:22,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93686.66666666667, ans=0.1 +2024-07-28 01:52:24,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=93686.66666666667, ans=0.125 +2024-07-28 01:52:30,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=93700.0, ans=0.1 +2024-07-28 01:52:32,125 INFO [train.py:1114] (1/4) Epoch 7, batch 8950, loss[loss=0.2511, simple_loss=0.3286, pruned_loss=0.08686, over 4537.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.3021, pruned_loss=0.06938, over 931573.86 frames. ], batch size: 21, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:52:33,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=93713.33333333333, ans=0.025 +2024-07-28 01:52:33,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=93713.33333333333, ans=0.025 +2024-07-28 01:52:36,248 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.00 vs. limit=15.0 +2024-07-28 01:52:40,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=93726.66666666667, ans=0.0 +2024-07-28 01:52:42,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.40 vs. limit=15.0 +2024-07-28 01:52:48,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=93740.0, ans=0.125 +2024-07-28 01:52:53,923 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.97 vs. limit=22.5 +2024-07-28 01:52:58,344 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.79 vs. limit=10.0 +2024-07-28 01:52:59,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=93766.66666666667, ans=10.0 +2024-07-28 01:53:02,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=93766.66666666667, ans=0.2 +2024-07-28 01:53:03,461 INFO [train.py:1114] (1/4) Epoch 7, batch 9000, loss[loss=0.2232, simple_loss=0.2987, pruned_loss=0.07381, over 4650.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.3017, pruned_loss=0.0689, over 934362.90 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:53:03,461 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 01:53:10,776 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.7010, 2.8097, 4.3649, 2.0973], device='cuda:1') +2024-07-28 01:53:15,550 INFO [train.py:1146] (1/4) Epoch 7, validation: loss=0.1831, simple_loss=0.2876, pruned_loss=0.03931, over 944034.00 frames. +2024-07-28 01:53:15,550 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 01:53:24,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=93793.33333333333, ans=0.0 +2024-07-28 01:53:25,270 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.089e+01 5.776e+01 6.458e+01 7.441e+01 1.035e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 01:53:28,877 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.89 vs. limit=15.0 +2024-07-28 01:53:38,597 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.89 vs. limit=15.0 +2024-07-28 01:53:41,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=93806.66666666667, ans=0.125 +2024-07-28 01:53:43,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=93820.0, ans=0.125 +2024-07-28 01:53:43,815 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.87 vs. limit=22.5 +2024-07-28 01:53:51,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=93833.33333333333, ans=0.125 +2024-07-28 01:53:55,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=93833.33333333333, ans=0.125 +2024-07-28 01:53:56,181 INFO [train.py:1114] (1/4) Epoch 7, batch 9050, loss[loss=0.1662, simple_loss=0.2582, pruned_loss=0.03707, over 4503.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.3013, pruned_loss=0.06893, over 934423.43 frames. ], batch size: 10, lr: 1.04e-02, grad_scale: 16.0 +2024-07-28 01:53:59,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.38 vs. limit=15.0 +2024-07-28 01:54:06,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93860.0, ans=0.1 +2024-07-28 01:54:12,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93873.33333333333, ans=0.1 +2024-07-28 01:54:26,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=93900.0, ans=0.0 +2024-07-28 01:54:28,086 INFO [train.py:1114] (1/4) Epoch 7, batch 9100, loss[loss=0.2292, simple_loss=0.3217, pruned_loss=0.06836, over 4934.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.3009, pruned_loss=0.06848, over 936940.28 frames. ], batch size: 14, lr: 1.04e-02, grad_scale: 16.0 +2024-07-28 01:54:37,212 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.668e+01 5.684e+01 6.462e+01 7.112e+01 1.033e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 01:54:41,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=93940.0, ans=0.05 +2024-07-28 01:54:42,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=93940.0, ans=0.2 +2024-07-28 01:54:49,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=93953.33333333333, ans=15.0 +2024-07-28 01:54:57,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=93966.66666666667, ans=0.1 +2024-07-28 01:54:58,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=93966.66666666667, ans=0.0 +2024-07-28 01:54:59,360 INFO [train.py:1114] (1/4) Epoch 7, batch 9150, loss[loss=0.2673, simple_loss=0.3493, pruned_loss=0.09261, over 4808.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.303, pruned_loss=0.06979, over 935658.14 frames. ], batch size: 14, lr: 1.04e-02, grad_scale: 16.0 +2024-07-28 01:54:59,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=93980.0, ans=0.025 +2024-07-28 01:55:02,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93980.0, ans=0.1 +2024-07-28 01:55:12,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=94006.66666666667, ans=0.2 +2024-07-28 01:55:17,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=94006.66666666667, ans=0.125 +2024-07-28 01:55:21,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=94020.0, ans=0.0 +2024-07-28 01:55:24,277 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=10.06 vs. limit=10.0 +2024-07-28 01:55:33,115 INFO [train.py:1114] (1/4) Epoch 7, batch 9200, loss[loss=0.2069, simple_loss=0.2909, pruned_loss=0.06148, over 4846.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.3023, pruned_loss=0.0695, over 937575.11 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:55:36,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=94046.66666666667, ans=0.2 +2024-07-28 01:55:36,607 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.70 vs. limit=22.5 +2024-07-28 01:55:42,551 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.593e+01 5.866e+01 6.542e+01 7.562e+01 1.078e+02, threshold=1.308e+02, percent-clipped=0.0 +2024-07-28 01:55:45,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=94073.33333333333, ans=0.125 +2024-07-28 01:55:48,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=94073.33333333333, ans=0.0 +2024-07-28 01:55:49,789 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.48 vs. limit=12.0 +2024-07-28 01:55:53,903 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.79 vs. limit=15.0 +2024-07-28 01:56:01,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=94100.0, ans=0.04949747468305833 +2024-07-28 01:56:02,653 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.44 vs. limit=6.0 +2024-07-28 01:56:04,047 INFO [train.py:1114] (1/4) Epoch 7, batch 9250, loss[loss=0.2501, simple_loss=0.338, pruned_loss=0.08107, over 4645.00 frames. ], tot_loss[loss=0.22, simple_loss=0.302, pruned_loss=0.06898, over 937983.10 frames. ], batch size: 13, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:56:14,672 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.87 vs. limit=22.5 +2024-07-28 01:56:17,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=94140.0, ans=0.125 +2024-07-28 01:56:19,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=94140.0, ans=0.2 +2024-07-28 01:56:28,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=94166.66666666667, ans=0.125 +2024-07-28 01:56:32,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=94166.66666666667, ans=0.125 +2024-07-28 01:56:33,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=94166.66666666667, ans=0.0 +2024-07-28 01:56:35,474 INFO [train.py:1114] (1/4) Epoch 7, batch 9300, loss[loss=0.232, simple_loss=0.3087, pruned_loss=0.07771, over 4770.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.3015, pruned_loss=0.06837, over 938227.61 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:56:44,613 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.557e+01 5.679e+01 6.402e+01 7.728e+01 1.178e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 01:56:47,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=94206.66666666667, ans=0.125 +2024-07-28 01:56:49,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=94206.66666666667, ans=0.0 +2024-07-28 01:56:50,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=94206.66666666667, ans=0.125 +2024-07-28 01:56:54,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=94220.0, ans=0.125 +2024-07-28 01:56:57,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=94220.0, ans=0.125 +2024-07-28 01:57:01,517 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.20 vs. limit=22.5 +2024-07-28 01:57:07,390 INFO [train.py:1114] (1/4) Epoch 7, batch 9350, loss[loss=0.1606, simple_loss=0.2388, pruned_loss=0.04121, over 4818.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3032, pruned_loss=0.06944, over 935147.64 frames. ], batch size: 11, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:57:14,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=94260.0, ans=0.125 +2024-07-28 01:57:19,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.86 vs. limit=22.5 +2024-07-28 01:57:32,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=94300.0, ans=0.125 +2024-07-28 01:57:37,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=94300.0, ans=0.0 +2024-07-28 01:57:38,747 INFO [train.py:1114] (1/4) Epoch 7, batch 9400, loss[loss=0.2513, simple_loss=0.3423, pruned_loss=0.08013, over 4696.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.303, pruned_loss=0.06966, over 933062.32 frames. ], batch size: 13, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:57:45,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=94326.66666666667, ans=0.0 +2024-07-28 01:57:48,110 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.963e+01 5.906e+01 6.522e+01 7.564e+01 1.110e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 01:57:58,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=94353.33333333333, ans=0.125 +2024-07-28 01:58:09,837 INFO [train.py:1114] (1/4) Epoch 7, batch 9450, loss[loss=0.1791, simple_loss=0.2688, pruned_loss=0.0447, over 4820.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.3027, pruned_loss=0.06939, over 932169.36 frames. ], batch size: 11, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:58:12,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=94380.0, ans=0.125 +2024-07-28 01:58:28,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=94420.0, ans=0.2 +2024-07-28 01:58:47,079 INFO [train.py:1114] (1/4) Epoch 7, batch 9500, loss[loss=0.2122, simple_loss=0.3, pruned_loss=0.06221, over 4711.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.3021, pruned_loss=0.06889, over 934670.97 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:58:51,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=94446.66666666667, ans=0.1 +2024-07-28 01:58:56,880 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.893e+01 5.944e+01 6.483e+01 7.199e+01 9.045e+01, threshold=1.297e+02, percent-clipped=0.0 +2024-07-28 01:59:00,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=94473.33333333333, ans=0.0 +2024-07-28 01:59:07,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=94486.66666666667, ans=0.05 +2024-07-28 01:59:07,917 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.70 vs. limit=22.5 +2024-07-28 01:59:18,778 INFO [train.py:1114] (1/4) Epoch 7, batch 9550, loss[loss=0.2497, simple_loss=0.3192, pruned_loss=0.09012, over 4773.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.3024, pruned_loss=0.06872, over 932118.19 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:59:32,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=94540.0, ans=0.125 +2024-07-28 01:59:45,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=94566.66666666667, ans=0.0 +2024-07-28 01:59:49,820 INFO [train.py:1114] (1/4) Epoch 7, batch 9600, loss[loss=0.3058, simple_loss=0.3621, pruned_loss=0.1247, over 3246.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3035, pruned_loss=0.06941, over 930989.91 frames. ], batch size: 35, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:59:50,824 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=8.03 vs. limit=12.0 +2024-07-28 01:59:52,777 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.90 vs. limit=15.0 +2024-07-28 01:59:58,509 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.80 vs. limit=6.0 +2024-07-28 01:59:59,447 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 6.136e+01 6.787e+01 7.890e+01 1.161e+02, threshold=1.357e+02, percent-clipped=0.0 +2024-07-28 02:00:06,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=94606.66666666667, ans=0.05 +2024-07-28 02:00:21,635 INFO [train.py:1114] (1/4) Epoch 7, batch 9650, loss[loss=0.2853, simple_loss=0.3574, pruned_loss=0.1066, over 4855.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3034, pruned_loss=0.0696, over 927708.00 frames. ], batch size: 16, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 02:00:24,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=94646.66666666667, ans=0.1 +2024-07-28 02:00:52,825 INFO [train.py:1114] (1/4) Epoch 7, batch 9700, loss[loss=0.2743, simple_loss=0.3397, pruned_loss=0.1044, over 4247.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3035, pruned_loss=0.06978, over 925903.93 frames. ], batch size: 25, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 02:01:02,008 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.092e+01 6.194e+01 6.881e+01 8.155e+01 1.257e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-28 02:01:02,488 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-07-28 02:01:02,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=94726.66666666667, ans=0.125 +2024-07-28 02:01:13,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=94753.33333333333, ans=0.0 +2024-07-28 02:01:13,352 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.83 vs. limit=15.0 +2024-07-28 02:01:13,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=94753.33333333333, ans=0.0 +2024-07-28 02:01:16,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=94753.33333333333, ans=0.0 +2024-07-28 02:01:17,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=94766.66666666667, ans=0.125 +2024-07-28 02:01:19,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94766.66666666667, ans=0.1 +2024-07-28 02:01:20,568 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.64 vs. limit=15.0 +2024-07-28 02:01:23,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=94780.0, ans=0.125 +2024-07-28 02:01:24,027 INFO [train.py:1114] (1/4) Epoch 7, batch 9750, loss[loss=0.2316, simple_loss=0.32, pruned_loss=0.07162, over 4688.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.3023, pruned_loss=0.06878, over 926524.11 frames. ], batch size: 15, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:01:29,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=94780.0, ans=0.2 +2024-07-28 02:01:36,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=94793.33333333333, ans=0.0 +2024-07-28 02:01:38,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=94806.66666666667, ans=0.1 +2024-07-28 02:01:50,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=94833.33333333333, ans=0.1 +2024-07-28 02:01:56,350 INFO [train.py:1114] (1/4) Epoch 7, batch 9800, loss[loss=0.1975, simple_loss=0.282, pruned_loss=0.0565, over 4717.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.3015, pruned_loss=0.06883, over 925473.61 frames. ], batch size: 12, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:02:05,937 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 5.759e+01 6.678e+01 8.256e+01 1.240e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-28 02:02:13,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=94873.33333333333, ans=0.07 +2024-07-28 02:02:18,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=94886.66666666667, ans=0.125 +2024-07-28 02:02:25,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=94900.0, ans=0.125 +2024-07-28 02:02:27,276 INFO [train.py:1114] (1/4) Epoch 7, batch 9850, loss[loss=0.2308, simple_loss=0.3022, pruned_loss=0.07969, over 4896.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.3017, pruned_loss=0.06883, over 927887.53 frames. ], batch size: 15, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:02:32,396 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.05 vs. limit=15.0 +2024-07-28 02:02:35,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=94926.66666666667, ans=0.025 +2024-07-28 02:02:42,985 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.28 vs. limit=6.0 +2024-07-28 02:02:43,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=94940.0, ans=0.125 +2024-07-28 02:02:48,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=94953.33333333333, ans=0.125 +2024-07-28 02:02:52,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=94966.66666666667, ans=0.125 +2024-07-28 02:02:56,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=94966.66666666667, ans=0.125 +2024-07-28 02:02:57,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=94966.66666666667, ans=0.0 +2024-07-28 02:02:58,781 INFO [train.py:1114] (1/4) Epoch 7, batch 9900, loss[loss=0.292, simple_loss=0.3648, pruned_loss=0.1096, over 4847.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3031, pruned_loss=0.06958, over 926939.27 frames. ], batch size: 16, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:03:08,108 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 5.950e+01 6.593e+01 7.492e+01 1.029e+02, threshold=1.319e+02, percent-clipped=0.0 +2024-07-28 02:03:10,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=95006.66666666667, ans=0.025 +2024-07-28 02:03:14,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=95006.66666666667, ans=0.025 +2024-07-28 02:03:17,207 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.62 vs. limit=15.0 +2024-07-28 02:03:21,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=95020.0, ans=0.0 +2024-07-28 02:03:21,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.09 vs. limit=22.5 +2024-07-28 02:03:22,480 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.18 vs. limit=22.5 +2024-07-28 02:03:24,249 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.44 vs. limit=15.0 +2024-07-28 02:03:28,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=95046.66666666667, ans=0.125 +2024-07-28 02:03:29,234 INFO [train.py:1114] (1/4) Epoch 7, batch 9950, loss[loss=0.2231, simple_loss=0.3102, pruned_loss=0.068, over 4805.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3044, pruned_loss=0.07102, over 929982.52 frames. ], batch size: 11, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:03:29,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=95046.66666666667, ans=0.125 +2024-07-28 02:03:35,502 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.20 vs. limit=22.5 +2024-07-28 02:03:42,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=95073.33333333333, ans=0.125 +2024-07-28 02:03:45,884 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.92 vs. limit=15.0 +2024-07-28 02:03:48,628 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:03:53,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=95100.0, ans=0.025 +2024-07-28 02:03:56,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=95100.0, ans=0.125 +2024-07-28 02:03:57,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=95100.0, ans=0.0 +2024-07-28 02:03:58,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=95100.0, ans=0.0 +2024-07-28 02:03:58,491 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:03:58,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=95100.0, ans=0.2 +2024-07-28 02:04:00,296 INFO [train.py:1114] (1/4) Epoch 7, batch 10000, loss[loss=0.2194, simple_loss=0.3258, pruned_loss=0.05656, over 4630.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3067, pruned_loss=0.07173, over 927368.21 frames. ], batch size: 16, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:04:09,350 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.061e+01 5.825e+01 6.191e+01 6.916e+01 9.527e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 02:04:15,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=95140.0, ans=0.125 +2024-07-28 02:04:16,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=95140.0, ans=0.025 +2024-07-28 02:04:28,742 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.43 vs. limit=6.0 +2024-07-28 02:04:32,325 INFO [train.py:1114] (1/4) Epoch 7, batch 10050, loss[loss=0.2896, simple_loss=0.355, pruned_loss=0.1121, over 3541.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3105, pruned_loss=0.07392, over 916752.56 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:04:39,634 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.01 vs. limit=22.5 +2024-07-28 02:04:47,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=95206.66666666667, ans=0.0 +2024-07-28 02:04:48,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=95206.66666666667, ans=0.125 +2024-07-28 02:04:53,880 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.502e-03 +2024-07-28 02:04:55,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=95220.0, ans=0.2 +2024-07-28 02:04:56,643 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.11 vs. limit=15.0 +2024-07-28 02:05:00,362 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:05:04,576 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.26 vs. limit=22.5 +2024-07-28 02:05:06,353 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=10.27 vs. limit=10.0 +2024-07-28 02:05:06,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=95233.33333333333, ans=0.125 +2024-07-28 02:05:07,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=95233.33333333333, ans=0.05 +2024-07-28 02:05:08,117 INFO [train.py:1114] (1/4) Epoch 7, batch 10100, loss[loss=0.2698, simple_loss=0.3276, pruned_loss=0.106, over 3539.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3168, pruned_loss=0.08065, over 861899.79 frames. ], batch size: 36, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:05:14,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=95260.0, ans=0.0 +2024-07-28 02:05:17,904 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.232e+01 6.813e+01 7.421e+01 7.882e+01 1.006e+02, threshold=1.484e+02, percent-clipped=0.0 +2024-07-28 02:05:18,000 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:05:26,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=95273.33333333333, ans=0.0 +2024-07-28 02:05:40,555 INFO [train.py:1114] (1/4) Epoch 7, batch 10150, loss[loss=0.2225, simple_loss=0.3028, pruned_loss=0.07113, over 3421.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3208, pruned_loss=0.0853, over 821205.46 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:05:52,041 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=6.26 vs. limit=12.0 +2024-07-28 02:05:55,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=95340.0, ans=15.0 +2024-07-28 02:05:59,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=95340.0, ans=0.0 +2024-07-28 02:06:05,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=95353.33333333333, ans=0.125 +2024-07-28 02:06:08,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=95366.66666666667, ans=0.2 +2024-07-28 02:06:14,353 INFO [train.py:1114] (1/4) Epoch 7, batch 10200, loss[loss=0.2549, simple_loss=0.3258, pruned_loss=0.09203, over 3557.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3236, pruned_loss=0.08864, over 789658.41 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:06:14,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=95380.0, ans=0.05 +2024-07-28 02:06:16,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=95380.0, ans=0.125 +2024-07-28 02:06:20,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=95393.33333333333, ans=0.125 +2024-07-28 02:06:24,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=95393.33333333333, ans=0.1 +2024-07-28 02:06:24,489 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.931e+01 6.733e+01 7.121e+01 8.042e+01 1.219e+02, threshold=1.424e+02, percent-clipped=0.0 +2024-07-28 02:07:11,985 INFO [train.py:1114] (1/4) Epoch 8, batch 0, loss[loss=0.1778, simple_loss=0.2509, pruned_loss=0.05233, over 4848.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2509, pruned_loss=0.05233, over 4848.00 frames. ], batch size: 12, lr: 9.72e-03, grad_scale: 32.0 +2024-07-28 02:07:11,985 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 02:07:22,075 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.8965, 3.5878, 2.4759, 2.3965], device='cuda:1') +2024-07-28 02:07:23,602 INFO [train.py:1146] (1/4) Epoch 8, validation: loss=0.1876, simple_loss=0.2932, pruned_loss=0.04099, over 944034.00 frames. +2024-07-28 02:07:23,603 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 02:07:35,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=95424.0, ans=0.0 +2024-07-28 02:08:07,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=95464.0, ans=0.125 +2024-07-28 02:08:09,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=95464.0, ans=0.125 +2024-07-28 02:08:13,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=95464.0, ans=0.05 +2024-07-28 02:08:13,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=95464.0, ans=0.0 +2024-07-28 02:08:23,642 INFO [train.py:1114] (1/4) Epoch 8, batch 50, loss[loss=0.1869, simple_loss=0.2656, pruned_loss=0.0541, over 4614.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3093, pruned_loss=0.07137, over 206340.85 frames. ], batch size: 11, lr: 9.71e-03, grad_scale: 32.0 +2024-07-28 02:08:26,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=95477.33333333333, ans=0.125 +2024-07-28 02:08:54,397 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 5.904e+01 6.447e+01 7.403e+01 1.012e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 02:08:59,011 INFO [train.py:1114] (1/4) Epoch 8, batch 100, loss[loss=0.193, simple_loss=0.2642, pruned_loss=0.06087, over 4642.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3056, pruned_loss=0.07013, over 365620.38 frames. ], batch size: 12, lr: 9.71e-03, grad_scale: 32.0 +2024-07-28 02:09:00,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.54 vs. limit=22.5 +2024-07-28 02:09:02,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=95544.0, ans=0.2 +2024-07-28 02:09:11,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=95570.66666666667, ans=0.125 +2024-07-28 02:09:22,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=95584.0, ans=0.1 +2024-07-28 02:09:23,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=95584.0, ans=0.125 +2024-07-28 02:09:27,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=95597.33333333333, ans=0.1 +2024-07-28 02:09:28,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=95597.33333333333, ans=0.125 +2024-07-28 02:09:31,872 INFO [train.py:1114] (1/4) Epoch 8, batch 150, loss[loss=0.2026, simple_loss=0.2929, pruned_loss=0.05609, over 4623.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.3023, pruned_loss=0.06742, over 494481.95 frames. ], batch size: 11, lr: 9.71e-03, grad_scale: 32.0 +2024-07-28 02:09:43,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=95624.0, ans=0.125 +2024-07-28 02:09:43,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=95624.0, ans=0.0 +2024-07-28 02:09:45,842 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:09:58,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=95664.0, ans=0.125 +2024-07-28 02:10:00,295 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.617e+01 5.653e+01 6.192e+01 6.799e+01 9.993e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 02:10:05,087 INFO [train.py:1114] (1/4) Epoch 8, batch 200, loss[loss=0.1967, simple_loss=0.2897, pruned_loss=0.05189, over 4536.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.3006, pruned_loss=0.06639, over 594195.50 frames. ], batch size: 21, lr: 9.70e-03, grad_scale: 32.0 +2024-07-28 02:10:13,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=95690.66666666667, ans=0.0 +2024-07-28 02:10:17,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=95690.66666666667, ans=0.125 +2024-07-28 02:10:19,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=95704.0, ans=0.0 +2024-07-28 02:10:22,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=95704.0, ans=0.125 +2024-07-28 02:10:26,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.71 vs. limit=15.0 +2024-07-28 02:10:35,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.09 vs. limit=15.0 +2024-07-28 02:10:38,231 INFO [train.py:1114] (1/4) Epoch 8, batch 250, loss[loss=0.2347, simple_loss=0.3119, pruned_loss=0.0788, over 4621.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.3005, pruned_loss=0.06687, over 671058.32 frames. ], batch size: 16, lr: 9.70e-03, grad_scale: 32.0 +2024-07-28 02:10:42,612 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.28 vs. limit=15.0 +2024-07-28 02:10:57,248 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.12 vs. limit=15.0 +2024-07-28 02:11:02,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=95784.0, ans=0.05 +2024-07-28 02:11:06,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=95797.33333333333, ans=0.125 +2024-07-28 02:11:08,832 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.464e+01 5.845e+01 7.036e+01 8.606e+01 1.725e+02, threshold=1.407e+02, percent-clipped=4.0 +2024-07-28 02:11:13,555 INFO [train.py:1114] (1/4) Epoch 8, batch 300, loss[loss=0.2727, simple_loss=0.3496, pruned_loss=0.09791, over 4797.00 frames. ], tot_loss[loss=0.218, simple_loss=0.3014, pruned_loss=0.06726, over 730109.15 frames. ], batch size: 15, lr: 9.70e-03, grad_scale: 32.0 +2024-07-28 02:11:23,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=95824.0, ans=0.125 +2024-07-28 02:11:24,504 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.70 vs. limit=15.0 +2024-07-28 02:11:27,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=95837.33333333333, ans=0.0 +2024-07-28 02:11:29,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=95837.33333333333, ans=0.025 +2024-07-28 02:11:38,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=95850.66666666667, ans=0.125 +2024-07-28 02:11:46,904 INFO [train.py:1114] (1/4) Epoch 8, batch 350, loss[loss=0.2135, simple_loss=0.2886, pruned_loss=0.0692, over 4944.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.3024, pruned_loss=0.0675, over 776541.46 frames. ], batch size: 12, lr: 9.69e-03, grad_scale: 32.0 +2024-07-28 02:11:57,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=95890.66666666667, ans=0.5 +2024-07-28 02:11:57,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=95890.66666666667, ans=0.5 +2024-07-28 02:12:01,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=95904.0, ans=0.09899494936611666 +2024-07-28 02:12:15,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=95930.66666666667, ans=0.0 +2024-07-28 02:12:18,890 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.049e+01 5.562e+01 6.059e+01 7.082e+01 1.101e+02, threshold=1.212e+02, percent-clipped=0.0 +2024-07-28 02:12:23,502 INFO [train.py:1114] (1/4) Epoch 8, batch 400, loss[loss=0.2094, simple_loss=0.2971, pruned_loss=0.06088, over 4687.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.3003, pruned_loss=0.06616, over 813770.74 frames. ], batch size: 13, lr: 9.69e-03, grad_scale: 32.0 +2024-07-28 02:12:27,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=95944.0, ans=0.125 +2024-07-28 02:12:35,531 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.57 vs. limit=15.0 +2024-07-28 02:12:37,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=95970.66666666667, ans=0.125 +2024-07-28 02:12:41,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=95970.66666666667, ans=0.0 +2024-07-28 02:13:04,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=95997.33333333333, ans=0.1 +2024-07-28 02:13:06,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=96010.66666666667, ans=0.125 +2024-07-28 02:13:06,692 INFO [train.py:1114] (1/4) Epoch 8, batch 450, loss[loss=0.2269, simple_loss=0.309, pruned_loss=0.07241, over 4634.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.3002, pruned_loss=0.06609, over 839370.47 frames. ], batch size: 13, lr: 9.69e-03, grad_scale: 32.0 +2024-07-28 02:13:08,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=96010.66666666667, ans=0.125 +2024-07-28 02:13:10,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=96010.66666666667, ans=0.1 +2024-07-28 02:13:13,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=96024.0, ans=0.2 +2024-07-28 02:13:25,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=96037.33333333333, ans=0.2 +2024-07-28 02:13:27,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=96050.66666666667, ans=0.025 +2024-07-28 02:13:33,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96064.0, ans=0.1 +2024-07-28 02:13:38,587 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.745e+01 5.894e+01 6.679e+01 8.075e+01 1.208e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-28 02:13:41,857 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.13 vs. limit=15.0 +2024-07-28 02:13:43,461 INFO [train.py:1114] (1/4) Epoch 8, batch 500, loss[loss=0.2147, simple_loss=0.3039, pruned_loss=0.06277, over 4694.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2992, pruned_loss=0.06613, over 861946.59 frames. ], batch size: 15, lr: 9.68e-03, grad_scale: 32.0 +2024-07-28 02:13:48,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=96077.33333333333, ans=0.125 +2024-07-28 02:13:49,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=96090.66666666667, ans=0.0 +2024-07-28 02:14:01,849 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.00 vs. limit=15.0 +2024-07-28 02:14:16,577 INFO [train.py:1114] (1/4) Epoch 8, batch 550, loss[loss=0.2172, simple_loss=0.3076, pruned_loss=0.06339, over 4664.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2988, pruned_loss=0.06576, over 878497.80 frames. ], batch size: 17, lr: 9.68e-03, grad_scale: 32.0 +2024-07-28 02:14:17,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96144.0, ans=0.1 +2024-07-28 02:14:17,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=96144.0, ans=0.0 +2024-07-28 02:14:21,692 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.11 vs. limit=15.0 +2024-07-28 02:14:33,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=96170.66666666667, ans=0.125 +2024-07-28 02:14:47,413 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.015e+01 5.729e+01 6.322e+01 7.437e+01 1.078e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 02:14:49,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=96197.33333333333, ans=0.125 +2024-07-28 02:14:52,223 INFO [train.py:1114] (1/4) Epoch 8, batch 600, loss[loss=0.2086, simple_loss=0.2858, pruned_loss=0.06575, over 4633.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2982, pruned_loss=0.06499, over 892738.76 frames. ], batch size: 16, lr: 9.68e-03, grad_scale: 32.0 +2024-07-28 02:15:02,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=96224.0, ans=0.125 +2024-07-28 02:15:18,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=96264.0, ans=0.0 +2024-07-28 02:15:25,413 INFO [train.py:1114] (1/4) Epoch 8, batch 650, loss[loss=0.2036, simple_loss=0.2921, pruned_loss=0.0576, over 4764.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2976, pruned_loss=0.06491, over 904412.98 frames. ], batch size: 13, lr: 9.67e-03, grad_scale: 32.0 +2024-07-28 02:15:26,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=96277.33333333333, ans=0.125 +2024-07-28 02:15:30,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=96277.33333333333, ans=0.125 +2024-07-28 02:15:35,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=96290.66666666667, ans=0.0 +2024-07-28 02:15:38,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=96304.0, ans=0.125 +2024-07-28 02:15:41,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=96304.0, ans=0.2 +2024-07-28 02:15:52,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=96330.66666666667, ans=0.125 +2024-07-28 02:15:53,644 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.945e+01 6.095e+01 6.758e+01 8.122e+01 1.148e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-28 02:15:56,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=96330.66666666667, ans=0.125 +2024-07-28 02:15:58,368 INFO [train.py:1114] (1/4) Epoch 8, batch 700, loss[loss=0.1916, simple_loss=0.2783, pruned_loss=0.05238, over 4643.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2974, pruned_loss=0.06461, over 912324.56 frames. ], batch size: 12, lr: 9.67e-03, grad_scale: 32.0 +2024-07-28 02:16:02,079 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.76 vs. limit=15.0 +2024-07-28 02:16:05,312 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.87 vs. limit=6.0 +2024-07-28 02:16:33,895 INFO [train.py:1114] (1/4) Epoch 8, batch 750, loss[loss=0.2339, simple_loss=0.3182, pruned_loss=0.07479, over 4689.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2966, pruned_loss=0.06458, over 918688.59 frames. ], batch size: 13, lr: 9.67e-03, grad_scale: 32.0 +2024-07-28 02:16:39,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=96410.66666666667, ans=0.125 +2024-07-28 02:16:41,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=96424.0, ans=0.0 +2024-07-28 02:17:02,888 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.794e+01 5.806e+01 6.357e+01 7.174e+01 1.221e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 02:17:05,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=96464.0, ans=0.0 +2024-07-28 02:17:06,488 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.80 vs. limit=6.0 +2024-07-28 02:17:07,418 INFO [train.py:1114] (1/4) Epoch 8, batch 800, loss[loss=0.2327, simple_loss=0.2991, pruned_loss=0.08319, over 4859.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2966, pruned_loss=0.06511, over 923775.83 frames. ], batch size: 12, lr: 9.66e-03, grad_scale: 32.0 +2024-07-28 02:17:12,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=96477.33333333333, ans=0.125 +2024-07-28 02:17:19,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=96504.0, ans=0.125 +2024-07-28 02:17:23,438 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:17:49,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.84 vs. limit=15.0 +2024-07-28 02:17:57,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=96530.66666666667, ans=0.0 +2024-07-28 02:17:58,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=96530.66666666667, ans=0.0 +2024-07-28 02:18:02,119 INFO [train.py:1114] (1/4) Epoch 8, batch 850, loss[loss=0.2352, simple_loss=0.3262, pruned_loss=0.07212, over 4666.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2972, pruned_loss=0.06521, over 927636.09 frames. ], batch size: 14, lr: 9.66e-03, grad_scale: 32.0 +2024-07-28 02:18:14,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=96557.33333333333, ans=0.0 +2024-07-28 02:18:14,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=96570.66666666667, ans=0.125 +2024-07-28 02:18:15,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=96570.66666666667, ans=0.0 +2024-07-28 02:18:17,048 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:18:26,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=96584.0, ans=15.0 +2024-07-28 02:18:28,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=96584.0, ans=0.125 +2024-07-28 02:18:32,786 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.484e+01 5.816e+01 6.612e+01 7.766e+01 1.010e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-28 02:18:33,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=96597.33333333333, ans=0.125 +2024-07-28 02:18:35,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=96597.33333333333, ans=0.125 +2024-07-28 02:18:35,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96597.33333333333, ans=0.1 +2024-07-28 02:18:37,392 INFO [train.py:1114] (1/4) Epoch 8, batch 900, loss[loss=0.1676, simple_loss=0.2524, pruned_loss=0.04138, over 4862.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2979, pruned_loss=0.0656, over 928717.22 frames. ], batch size: 12, lr: 9.66e-03, grad_scale: 32.0 +2024-07-28 02:19:01,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=96650.66666666667, ans=0.125 +2024-07-28 02:19:03,745 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.53 vs. limit=15.0 +2024-07-28 02:19:10,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=96677.33333333333, ans=0.125 +2024-07-28 02:19:10,779 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.92 vs. limit=22.5 +2024-07-28 02:19:11,047 INFO [train.py:1114] (1/4) Epoch 8, batch 950, loss[loss=0.2092, simple_loss=0.2862, pruned_loss=0.06605, over 4782.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2989, pruned_loss=0.06603, over 930359.47 frames. ], batch size: 12, lr: 9.65e-03, grad_scale: 32.0 +2024-07-28 02:19:40,112 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.894e+01 6.010e+01 6.768e+01 8.162e+01 1.047e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-28 02:19:42,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=96730.66666666667, ans=0.05 +2024-07-28 02:19:42,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=96730.66666666667, ans=0.125 +2024-07-28 02:19:44,741 INFO [train.py:1114] (1/4) Epoch 8, batch 1000, loss[loss=0.1917, simple_loss=0.2737, pruned_loss=0.05486, over 4970.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2991, pruned_loss=0.06625, over 930169.86 frames. ], batch size: 13, lr: 9.65e-03, grad_scale: 32.0 +2024-07-28 02:19:59,660 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.02 vs. limit=15.0 +2024-07-28 02:20:05,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=96784.0, ans=0.125 +2024-07-28 02:20:09,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=96784.0, ans=0.125 +2024-07-28 02:20:11,981 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.91 vs. limit=15.0 +2024-07-28 02:20:18,478 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.30 vs. limit=12.0 +2024-07-28 02:20:19,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96810.66666666667, ans=0.1 +2024-07-28 02:20:19,452 INFO [train.py:1114] (1/4) Epoch 8, batch 1050, loss[loss=0.2495, simple_loss=0.3303, pruned_loss=0.08439, over 4870.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2974, pruned_loss=0.06547, over 932254.32 frames. ], batch size: 14, lr: 9.65e-03, grad_scale: 32.0 +2024-07-28 02:20:39,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=96824.0, ans=0.0 +2024-07-28 02:20:43,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96837.33333333333, ans=0.1 +2024-07-28 02:20:48,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=96850.66666666667, ans=0.0 +2024-07-28 02:20:56,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=96850.66666666667, ans=0.125 +2024-07-28 02:21:00,904 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.947e+01 5.815e+01 6.423e+01 7.080e+01 9.595e+01, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 02:21:28,311 INFO [train.py:1114] (1/4) Epoch 8, batch 1100, loss[loss=0.2156, simple_loss=0.2984, pruned_loss=0.06641, over 4894.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2972, pruned_loss=0.06545, over 934566.88 frames. ], batch size: 13, lr: 9.64e-03, grad_scale: 32.0 +2024-07-28 02:21:31,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=96877.33333333333, ans=0.05 +2024-07-28 02:25:33,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=96890.66666666667, ans=0.0 +2024-07-28 02:25:40,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=96904.0, ans=0.125 +2024-07-28 02:25:40,297 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.44 vs. limit=15.0 +2024-07-28 02:25:56,432 INFO [train.py:1114] (1/4) Epoch 8, batch 1150, loss[loss=0.2301, simple_loss=0.3052, pruned_loss=0.0775, over 4900.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.298, pruned_loss=0.06627, over 934606.07 frames. ], batch size: 13, lr: 9.64e-03, grad_scale: 32.0 +2024-07-28 02:26:22,432 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.57 vs. limit=15.0 +2024-07-28 02:26:23,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=96984.0, ans=0.125 +2024-07-28 02:26:30,181 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.889e+01 6.022e+01 6.608e+01 7.492e+01 1.273e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-28 02:26:41,672 INFO [train.py:1114] (1/4) Epoch 8, batch 1200, loss[loss=0.2246, simple_loss=0.3126, pruned_loss=0.06828, over 4863.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2989, pruned_loss=0.067, over 933601.86 frames. ], batch size: 14, lr: 9.64e-03, grad_scale: 32.0 +2024-07-28 02:26:44,771 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.59 vs. limit=10.0 +2024-07-28 02:26:47,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=97010.66666666667, ans=0.125 +2024-07-28 02:26:54,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=97024.0, ans=0.1 +2024-07-28 02:26:58,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=97037.33333333333, ans=0.125 +2024-07-28 02:27:00,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=97037.33333333333, ans=0.025 +2024-07-28 02:27:04,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=97050.66666666667, ans=0.125 +2024-07-28 02:27:09,788 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:27:09,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=97064.0, ans=0.0 +2024-07-28 02:27:13,489 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.53 vs. limit=15.0 +2024-07-28 02:27:16,979 INFO [train.py:1114] (1/4) Epoch 8, batch 1250, loss[loss=0.2201, simple_loss=0.3091, pruned_loss=0.06559, over 4797.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2993, pruned_loss=0.06687, over 937721.44 frames. ], batch size: 15, lr: 9.63e-03, grad_scale: 32.0 +2024-07-28 02:27:31,344 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.87 vs. limit=15.0 +2024-07-28 02:27:34,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=97104.0, ans=0.0 +2024-07-28 02:27:47,565 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.862e+01 5.611e+01 6.251e+01 6.902e+01 9.769e+01, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 02:27:52,329 INFO [train.py:1114] (1/4) Epoch 8, batch 1300, loss[loss=0.2306, simple_loss=0.3212, pruned_loss=0.07001, over 4734.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.299, pruned_loss=0.06655, over 939378.11 frames. ], batch size: 19, lr: 9.63e-03, grad_scale: 32.0 +2024-07-28 02:27:56,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=97144.0, ans=0.0 +2024-07-28 02:28:19,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=97184.0, ans=0.125 +2024-07-28 02:28:21,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=97184.0, ans=0.125 +2024-07-28 02:28:22,855 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.01 vs. limit=6.0 +2024-07-28 02:28:28,729 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=16.72 vs. limit=15.0 +2024-07-28 02:28:29,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=97197.33333333333, ans=0.125 +2024-07-28 02:28:33,049 INFO [train.py:1114] (1/4) Epoch 8, batch 1350, loss[loss=0.207, simple_loss=0.2881, pruned_loss=0.06296, over 4750.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2985, pruned_loss=0.06582, over 941409.79 frames. ], batch size: 13, lr: 9.63e-03, grad_scale: 32.0 +2024-07-28 02:28:39,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=97210.66666666667, ans=0.09899494936611666 +2024-07-28 02:28:46,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=97224.0, ans=0.0 +2024-07-28 02:28:47,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=97224.0, ans=0.5 +2024-07-28 02:28:49,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=97237.33333333333, ans=0.125 +2024-07-28 02:29:02,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=97264.0, ans=0.5 +2024-07-28 02:29:04,450 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+01 5.768e+01 6.671e+01 8.189e+01 1.142e+02, threshold=1.334e+02, percent-clipped=0.0 +2024-07-28 02:29:05,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97264.0, ans=0.1 +2024-07-28 02:29:05,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=97264.0, ans=0.2 +2024-07-28 02:29:09,255 INFO [train.py:1114] (1/4) Epoch 8, batch 1400, loss[loss=0.1919, simple_loss=0.2849, pruned_loss=0.04945, over 4710.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2983, pruned_loss=0.06601, over 943179.08 frames. ], batch size: 11, lr: 9.62e-03, grad_scale: 32.0 +2024-07-28 02:29:12,979 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.72 vs. limit=15.0 +2024-07-28 02:29:14,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=97277.33333333333, ans=0.0 +2024-07-28 02:29:18,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=97290.66666666667, ans=0.125 +2024-07-28 02:29:18,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=97290.66666666667, ans=0.1 +2024-07-28 02:29:22,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=97304.0, ans=0.0 +2024-07-28 02:29:28,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97304.0, ans=0.1 +2024-07-28 02:29:43,465 INFO [train.py:1114] (1/4) Epoch 8, batch 1450, loss[loss=0.2076, simple_loss=0.2968, pruned_loss=0.05922, over 4684.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2986, pruned_loss=0.06591, over 943074.05 frames. ], batch size: 15, lr: 9.62e-03, grad_scale: 32.0 +2024-07-28 02:29:46,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=97344.0, ans=0.09899494936611666 +2024-07-28 02:29:46,672 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.91 vs. limit=22.5 +2024-07-28 02:29:54,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=97357.33333333333, ans=0.125 +2024-07-28 02:29:59,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=97370.66666666667, ans=0.0 +2024-07-28 02:30:06,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=97384.0, ans=0.1 +2024-07-28 02:30:07,541 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.14 vs. limit=15.0 +2024-07-28 02:30:12,513 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.584e+01 5.678e+01 6.336e+01 6.902e+01 9.292e+01, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 02:30:16,528 INFO [train.py:1114] (1/4) Epoch 8, batch 1500, loss[loss=0.198, simple_loss=0.28, pruned_loss=0.05804, over 4809.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.299, pruned_loss=0.0659, over 942755.28 frames. ], batch size: 14, lr: 9.62e-03, grad_scale: 16.0 +2024-07-28 02:30:16,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=97410.66666666667, ans=0.2 +2024-07-28 02:30:25,474 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:30:34,228 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.42 vs. limit=22.5 +2024-07-28 02:30:37,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97437.33333333333, ans=0.1 +2024-07-28 02:30:42,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=97450.66666666667, ans=10.0 +2024-07-28 02:30:50,755 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.31 vs. limit=15.0 +2024-07-28 02:30:51,713 INFO [train.py:1114] (1/4) Epoch 8, batch 1550, loss[loss=0.2015, simple_loss=0.288, pruned_loss=0.05756, over 4902.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2993, pruned_loss=0.06662, over 938634.67 frames. ], batch size: 15, lr: 9.61e-03, grad_scale: 16.0 +2024-07-28 02:30:57,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=97490.66666666667, ans=0.2 +2024-07-28 02:31:12,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=97517.33333333333, ans=0.125 +2024-07-28 02:31:15,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97517.33333333333, ans=0.1 +2024-07-28 02:31:21,194 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.890e+01 6.503e+01 7.700e+01 2.674e+02, threshold=1.301e+02, percent-clipped=1.0 +2024-07-28 02:31:22,737 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=6.457e-02 +2024-07-28 02:31:25,113 INFO [train.py:1114] (1/4) Epoch 8, batch 1600, loss[loss=0.2345, simple_loss=0.327, pruned_loss=0.07097, over 4881.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2985, pruned_loss=0.06632, over 937885.89 frames. ], batch size: 14, lr: 9.61e-03, grad_scale: 32.0 +2024-07-28 02:31:45,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=97584.0, ans=0.0 +2024-07-28 02:31:50,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=97584.0, ans=0.125 +2024-07-28 02:31:51,593 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=16.91 vs. limit=15.0 +2024-07-28 02:31:59,460 INFO [train.py:1114] (1/4) Epoch 8, batch 1650, loss[loss=0.2421, simple_loss=0.332, pruned_loss=0.07604, over 4658.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2989, pruned_loss=0.06652, over 937962.72 frames. ], batch size: 14, lr: 9.61e-03, grad_scale: 32.0 +2024-07-28 02:32:11,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=97624.0, ans=0.0 +2024-07-28 02:32:17,977 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.17 vs. limit=15.0 +2024-07-28 02:32:23,135 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.69 vs. limit=6.0 +2024-07-28 02:32:23,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=97650.66666666667, ans=0.125 +2024-07-28 02:32:29,125 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.57 vs. limit=22.5 +2024-07-28 02:32:30,781 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.779e+01 6.597e+01 7.631e+01 1.276e+02, threshold=1.319e+02, percent-clipped=0.0 +2024-07-28 02:32:34,504 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.44 vs. limit=6.0 +2024-07-28 02:32:34,704 INFO [train.py:1114] (1/4) Epoch 8, batch 1700, loss[loss=0.2021, simple_loss=0.2897, pruned_loss=0.05727, over 4717.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2992, pruned_loss=0.06637, over 939630.29 frames. ], batch size: 11, lr: 9.60e-03, grad_scale: 32.0 +2024-07-28 02:33:09,490 INFO [train.py:1114] (1/4) Epoch 8, batch 1750, loss[loss=0.1977, simple_loss=0.2811, pruned_loss=0.05711, over 4797.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2987, pruned_loss=0.06587, over 940437.97 frames. ], batch size: 11, lr: 9.60e-03, grad_scale: 32.0 +2024-07-28 02:33:10,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=97744.0, ans=6.0 +2024-07-28 02:33:30,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=97770.66666666667, ans=0.0 +2024-07-28 02:33:33,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=97784.0, ans=0.5 +2024-07-28 02:33:46,444 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.043e+01 5.863e+01 6.439e+01 7.161e+01 1.257e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 02:33:48,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=97797.33333333333, ans=0.125 +2024-07-28 02:33:52,172 INFO [train.py:1114] (1/4) Epoch 8, batch 1800, loss[loss=0.2303, simple_loss=0.3129, pruned_loss=0.07388, over 4629.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2999, pruned_loss=0.06653, over 940971.46 frames. ], batch size: 13, lr: 9.60e-03, grad_scale: 32.0 +2024-07-28 02:33:55,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=97810.66666666667, ans=0.2 +2024-07-28 02:34:09,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=97837.33333333333, ans=0.0 +2024-07-28 02:34:28,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=97864.0, ans=0.0 +2024-07-28 02:34:30,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=97864.0, ans=0.0 +2024-07-28 02:34:34,877 INFO [train.py:1114] (1/4) Epoch 8, batch 1850, loss[loss=0.2602, simple_loss=0.3351, pruned_loss=0.09266, over 4811.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2982, pruned_loss=0.06578, over 941241.12 frames. ], batch size: 14, lr: 9.59e-03, grad_scale: 32.0 +2024-07-28 02:34:47,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=97904.0, ans=10.0 +2024-07-28 02:34:58,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=97917.33333333333, ans=0.125 +2024-07-28 02:35:02,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=97930.66666666667, ans=0.025 +2024-07-28 02:35:04,085 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.444e+01 5.869e+01 6.668e+01 7.730e+01 1.207e+02, threshold=1.334e+02, percent-clipped=0.0 +2024-07-28 02:35:06,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=97930.66666666667, ans=0.0 +2024-07-28 02:35:08,078 INFO [train.py:1114] (1/4) Epoch 8, batch 1900, loss[loss=0.2477, simple_loss=0.3195, pruned_loss=0.08792, over 4649.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2981, pruned_loss=0.06516, over 942213.67 frames. ], batch size: 14, lr: 9.59e-03, grad_scale: 32.0 +2024-07-28 02:35:13,198 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.64 vs. limit=22.5 +2024-07-28 02:35:27,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=97984.0, ans=0.0 +2024-07-28 02:35:33,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=97997.33333333333, ans=0.125 +2024-07-28 02:35:36,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=97997.33333333333, ans=0.125 +2024-07-28 02:35:40,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=98010.66666666667, ans=0.125 +2024-07-28 02:35:41,416 INFO [train.py:1114] (1/4) Epoch 8, batch 1950, loss[loss=0.1831, simple_loss=0.2706, pruned_loss=0.04779, over 4887.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.3011, pruned_loss=0.06679, over 944045.76 frames. ], batch size: 13, lr: 9.59e-03, grad_scale: 32.0 +2024-07-28 02:35:55,830 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.09 vs. limit=15.0 +2024-07-28 02:36:03,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=98050.66666666667, ans=0.125 +2024-07-28 02:36:04,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=98050.66666666667, ans=0.5 +2024-07-28 02:36:12,626 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.950e+01 5.706e+01 6.313e+01 6.898e+01 1.010e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 02:36:16,765 INFO [train.py:1114] (1/4) Epoch 8, batch 2000, loss[loss=0.2101, simple_loss=0.2905, pruned_loss=0.06488, over 4803.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.3015, pruned_loss=0.06692, over 941246.52 frames. ], batch size: 11, lr: 9.58e-03, grad_scale: 32.0 +2024-07-28 02:36:22,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=98077.33333333333, ans=0.0 +2024-07-28 02:36:25,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98090.66666666667, ans=0.1 +2024-07-28 02:36:27,226 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.47 vs. limit=15.0 +2024-07-28 02:36:27,281 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.27 vs. limit=12.0 +2024-07-28 02:36:36,656 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.59 vs. limit=15.0 +2024-07-28 02:36:42,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=98130.66666666667, ans=10.0 +2024-07-28 02:36:48,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=98130.66666666667, ans=0.125 +2024-07-28 02:36:50,176 INFO [train.py:1114] (1/4) Epoch 8, batch 2050, loss[loss=0.1983, simple_loss=0.271, pruned_loss=0.06285, over 4617.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.301, pruned_loss=0.06677, over 939312.14 frames. ], batch size: 11, lr: 9.58e-03, grad_scale: 16.0 +2024-07-28 02:36:50,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=98144.0, ans=0.125 +2024-07-28 02:36:53,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=98144.0, ans=0.5 +2024-07-28 02:36:55,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=98144.0, ans=0.125 +2024-07-28 02:36:56,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=98157.33333333333, ans=0.125 +2024-07-28 02:36:59,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=98157.33333333333, ans=0.125 +2024-07-28 02:37:03,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=98170.66666666667, ans=0.125 +2024-07-28 02:37:06,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=98170.66666666667, ans=0.125 +2024-07-28 02:37:09,814 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.18 vs. limit=15.0 +2024-07-28 02:37:11,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=98184.0, ans=0.0 +2024-07-28 02:37:20,052 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.635e+01 6.128e+01 6.881e+01 8.380e+01 1.718e+02, threshold=1.376e+02, percent-clipped=3.0 +2024-07-28 02:37:23,355 INFO [train.py:1114] (1/4) Epoch 8, batch 2100, loss[loss=0.1882, simple_loss=0.2769, pruned_loss=0.04976, over 4761.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.301, pruned_loss=0.0671, over 941110.47 frames. ], batch size: 13, lr: 9.58e-03, grad_scale: 16.0 +2024-07-28 02:37:30,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=98224.0, ans=0.125 +2024-07-28 02:37:38,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=98237.33333333333, ans=0.025 +2024-07-28 02:37:46,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=98250.66666666667, ans=0.125 +2024-07-28 02:37:49,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=98264.0, ans=0.0 +2024-07-28 02:37:56,375 INFO [train.py:1114] (1/4) Epoch 8, batch 2150, loss[loss=0.2286, simple_loss=0.3066, pruned_loss=0.07532, over 4900.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2995, pruned_loss=0.06638, over 944236.26 frames. ], batch size: 13, lr: 9.57e-03, grad_scale: 16.0 +2024-07-28 02:37:57,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=98277.33333333333, ans=0.125 +2024-07-28 02:37:59,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=98277.33333333333, ans=0.125 +2024-07-28 02:38:16,089 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.37 vs. limit=15.0 +2024-07-28 02:38:23,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=98317.33333333333, ans=0.125 +2024-07-28 02:38:24,047 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.82 vs. limit=15.0 +2024-07-28 02:38:28,125 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.526e+01 5.621e+01 6.298e+01 7.456e+01 1.063e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 02:38:28,231 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:38:30,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=98330.66666666667, ans=0.025 +2024-07-28 02:38:31,419 INFO [train.py:1114] (1/4) Epoch 8, batch 2200, loss[loss=0.2116, simple_loss=0.3028, pruned_loss=0.06022, over 4811.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.3003, pruned_loss=0.0668, over 943581.90 frames. ], batch size: 14, lr: 9.57e-03, grad_scale: 16.0 +2024-07-28 02:38:37,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=98357.33333333333, ans=0.125 +2024-07-28 02:38:49,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=98370.66666666667, ans=0.125 +2024-07-28 02:38:55,996 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.35 vs. limit=15.0 +2024-07-28 02:39:06,688 INFO [train.py:1114] (1/4) Epoch 8, batch 2250, loss[loss=0.2268, simple_loss=0.313, pruned_loss=0.07032, over 4695.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2995, pruned_loss=0.06672, over 941835.42 frames. ], batch size: 13, lr: 9.57e-03, grad_scale: 16.0 +2024-07-28 02:39:08,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=98410.66666666667, ans=0.09899494936611666 +2024-07-28 02:39:19,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=98437.33333333333, ans=0.125 +2024-07-28 02:39:23,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=98437.33333333333, ans=0.2 +2024-07-28 02:39:26,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=98450.66666666667, ans=0.1 +2024-07-28 02:39:30,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=98450.66666666667, ans=0.125 +2024-07-28 02:39:32,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=98464.0, ans=0.0 +2024-07-28 02:39:35,839 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.467e+01 5.847e+01 6.592e+01 7.483e+01 1.040e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-28 02:39:37,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=98464.0, ans=0.125 +2024-07-28 02:39:39,152 INFO [train.py:1114] (1/4) Epoch 8, batch 2300, loss[loss=0.1925, simple_loss=0.2695, pruned_loss=0.05773, over 4940.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2978, pruned_loss=0.06611, over 939780.66 frames. ], batch size: 12, lr: 9.57e-03, grad_scale: 16.0 +2024-07-28 02:39:39,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=98477.33333333333, ans=0.125 +2024-07-28 02:41:10,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=98517.33333333333, ans=0.2 +2024-07-28 02:41:22,666 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.22 vs. limit=10.0 +2024-07-28 02:41:22,928 INFO [train.py:1114] (1/4) Epoch 8, batch 2350, loss[loss=0.2294, simple_loss=0.3144, pruned_loss=0.07221, over 4643.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2981, pruned_loss=0.0657, over 941934.44 frames. ], batch size: 13, lr: 9.56e-03, grad_scale: 16.0 +2024-07-28 02:41:33,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=98557.33333333333, ans=0.125 +2024-07-28 02:41:35,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=98557.33333333333, ans=0.125 +2024-07-28 02:41:36,865 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.37 vs. limit=15.0 +2024-07-28 02:41:49,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=98597.33333333333, ans=0.025 +2024-07-28 02:41:53,305 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.834e+01 6.332e+01 7.540e+01 1.064e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 02:41:56,605 INFO [train.py:1114] (1/4) Epoch 8, batch 2400, loss[loss=0.1957, simple_loss=0.2761, pruned_loss=0.05769, over 4638.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2985, pruned_loss=0.06571, over 941331.30 frames. ], batch size: 12, lr: 9.56e-03, grad_scale: 32.0 +2024-07-28 02:42:18,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=98637.33333333333, ans=0.2 +2024-07-28 02:42:28,017 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.01 vs. limit=15.0 +2024-07-28 02:42:31,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=98664.0, ans=0.125 +2024-07-28 02:42:37,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=98677.33333333333, ans=0.025 +2024-07-28 02:42:37,992 INFO [train.py:1114] (1/4) Epoch 8, batch 2450, loss[loss=0.2154, simple_loss=0.3049, pruned_loss=0.06298, over 4695.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2989, pruned_loss=0.06612, over 937414.91 frames. ], batch size: 13, lr: 9.56e-03, grad_scale: 32.0 +2024-07-28 02:42:38,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=98677.33333333333, ans=0.125 +2024-07-28 02:43:03,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=98704.0, ans=0.125 +2024-07-28 02:43:03,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=98704.0, ans=0.0 +2024-07-28 02:43:05,357 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.00 vs. limit=10.0 +2024-07-28 02:43:25,560 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.031e+01 6.058e+01 6.704e+01 7.921e+01 1.237e+02, threshold=1.341e+02, percent-clipped=0.0 +2024-07-28 02:43:28,891 INFO [train.py:1114] (1/4) Epoch 8, batch 2500, loss[loss=0.2659, simple_loss=0.3405, pruned_loss=0.09564, over 4811.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2985, pruned_loss=0.06554, over 939526.20 frames. ], batch size: 14, lr: 9.55e-03, grad_scale: 32.0 +2024-07-28 02:43:33,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=98744.0, ans=0.125 +2024-07-28 02:43:33,876 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.91 vs. limit=10.0 +2024-07-28 02:43:35,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=98757.33333333333, ans=0.025 +2024-07-28 02:43:42,311 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.96 vs. limit=22.5 +2024-07-28 02:44:05,435 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.10 vs. limit=15.0 +2024-07-28 02:44:05,541 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.29 vs. limit=15.0 +2024-07-28 02:44:16,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=98797.33333333333, ans=0.125 +2024-07-28 02:44:21,339 INFO [train.py:1114] (1/4) Epoch 8, batch 2550, loss[loss=0.2059, simple_loss=0.2799, pruned_loss=0.06592, over 4813.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.298, pruned_loss=0.06548, over 939066.35 frames. ], batch size: 11, lr: 9.55e-03, grad_scale: 32.0 +2024-07-28 02:44:28,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=98824.0, ans=0.025 +2024-07-28 02:44:29,270 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:44:44,349 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.55 vs. limit=6.0 +2024-07-28 02:44:58,728 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.823e+01 5.841e+01 6.423e+01 7.700e+01 1.142e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 02:45:00,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=98864.0, ans=0.125 +2024-07-28 02:45:02,063 INFO [train.py:1114] (1/4) Epoch 8, batch 2600, loss[loss=0.2264, simple_loss=0.3104, pruned_loss=0.07123, over 4905.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2989, pruned_loss=0.06626, over 938492.56 frames. ], batch size: 13, lr: 9.55e-03, grad_scale: 32.0 +2024-07-28 02:45:02,390 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.96 vs. limit=15.0 +2024-07-28 02:45:02,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=98877.33333333333, ans=0.125 +2024-07-28 02:45:02,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98877.33333333333, ans=0.1 +2024-07-28 02:45:04,618 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.01 vs. limit=8.0 +2024-07-28 02:45:06,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=98877.33333333333, ans=0.1 +2024-07-28 02:45:23,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=98917.33333333333, ans=0.125 +2024-07-28 02:45:25,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=98917.33333333333, ans=0.0 +2024-07-28 02:45:29,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=98930.66666666667, ans=0.09899494936611666 +2024-07-28 02:45:37,240 INFO [train.py:1114] (1/4) Epoch 8, batch 2650, loss[loss=0.2221, simple_loss=0.307, pruned_loss=0.06862, over 4608.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.299, pruned_loss=0.06616, over 940289.98 frames. ], batch size: 16, lr: 9.54e-03, grad_scale: 32.0 +2024-07-28 02:45:51,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=98970.66666666667, ans=0.2 +2024-07-28 02:45:51,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=98970.66666666667, ans=0.2 +2024-07-28 02:46:05,691 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.41 vs. limit=12.0 +2024-07-28 02:46:09,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=98997.33333333333, ans=0.125 +2024-07-28 02:46:09,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=98997.33333333333, ans=0.125 +2024-07-28 02:46:12,206 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.525e+01 5.757e+01 6.469e+01 7.162e+01 1.151e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 02:46:13,368 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.69 vs. limit=15.0 +2024-07-28 02:46:18,785 INFO [train.py:1114] (1/4) Epoch 8, batch 2700, loss[loss=0.2363, simple_loss=0.331, pruned_loss=0.07075, over 4743.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2988, pruned_loss=0.06604, over 940105.14 frames. ], batch size: 14, lr: 9.54e-03, grad_scale: 32.0 +2024-07-28 02:46:23,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=99010.66666666667, ans=15.0 +2024-07-28 02:46:23,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=99010.66666666667, ans=0.125 +2024-07-28 02:46:27,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=99024.0, ans=0.5 +2024-07-28 02:46:30,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=99024.0, ans=0.125 +2024-07-28 02:46:35,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=99037.33333333333, ans=0.0 +2024-07-28 02:46:44,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=99050.66666666667, ans=0.125 +2024-07-28 02:46:47,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=99064.0, ans=0.125 +2024-07-28 02:46:54,446 INFO [train.py:1114] (1/4) Epoch 8, batch 2750, loss[loss=0.2108, simple_loss=0.2882, pruned_loss=0.0667, over 4703.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2979, pruned_loss=0.06582, over 940271.86 frames. ], batch size: 12, lr: 9.54e-03, grad_scale: 32.0 +2024-07-28 02:46:59,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=99077.33333333333, ans=0.0 +2024-07-28 02:47:02,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=99077.33333333333, ans=0.1 +2024-07-28 02:47:03,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=99077.33333333333, ans=0.125 +2024-07-28 02:47:06,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=99090.66666666667, ans=0.0 +2024-07-28 02:47:19,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99117.33333333333, ans=0.1 +2024-07-28 02:47:22,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=99117.33333333333, ans=0.2 +2024-07-28 02:47:23,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=99117.33333333333, ans=0.2 +2024-07-28 02:47:30,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=99130.66666666667, ans=0.0 +2024-07-28 02:47:31,844 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.602e+01 6.000e+01 6.844e+01 8.152e+01 1.229e+02, threshold=1.369e+02, percent-clipped=0.0 +2024-07-28 02:47:40,075 INFO [train.py:1114] (1/4) Epoch 8, batch 2800, loss[loss=0.3136, simple_loss=0.3673, pruned_loss=0.1299, over 3432.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2974, pruned_loss=0.0657, over 938273.81 frames. ], batch size: 38, lr: 9.53e-03, grad_scale: 32.0 +2024-07-28 02:47:40,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=99144.0, ans=0.125 +2024-07-28 02:47:48,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=99157.33333333333, ans=0.0 +2024-07-28 02:47:50,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=99157.33333333333, ans=0.2 +2024-07-28 02:47:52,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=99157.33333333333, ans=0.125 +2024-07-28 02:48:07,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=99184.0, ans=0.125 +2024-07-28 02:48:13,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=99197.33333333333, ans=0.125 +2024-07-28 02:48:24,680 INFO [train.py:1114] (1/4) Epoch 8, batch 2850, loss[loss=0.1843, simple_loss=0.2786, pruned_loss=0.04502, over 4967.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2995, pruned_loss=0.06678, over 936183.94 frames. ], batch size: 13, lr: 9.53e-03, grad_scale: 32.0 +2024-07-28 02:48:26,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=99210.66666666667, ans=0.125 +2024-07-28 02:48:28,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=99210.66666666667, ans=0.1 +2024-07-28 02:48:32,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=99224.0, ans=0.125 +2024-07-28 02:48:35,538 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=2.85 vs. limit=12.0 +2024-07-28 02:48:37,511 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.39 vs. limit=22.5 +2024-07-28 02:48:40,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=99237.33333333333, ans=0.09899494936611666 +2024-07-28 02:48:44,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=99250.66666666667, ans=0.0 +2024-07-28 02:48:54,155 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.514e+01 5.619e+01 6.304e+01 7.225e+01 1.077e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 02:48:57,343 INFO [train.py:1114] (1/4) Epoch 8, batch 2900, loss[loss=0.2159, simple_loss=0.2861, pruned_loss=0.07289, over 4822.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2999, pruned_loss=0.06621, over 939985.08 frames. ], batch size: 13, lr: 9.53e-03, grad_scale: 32.0 +2024-07-28 02:49:16,607 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.31 vs. limit=22.5 +2024-07-28 02:49:25,004 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.83 vs. limit=22.5 +2024-07-28 02:49:27,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=99330.66666666667, ans=0.0 +2024-07-28 02:49:33,154 INFO [train.py:1114] (1/4) Epoch 8, batch 2950, loss[loss=0.205, simple_loss=0.291, pruned_loss=0.05957, over 4699.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2983, pruned_loss=0.06577, over 938753.80 frames. ], batch size: 12, lr: 9.52e-03, grad_scale: 32.0 +2024-07-28 02:49:37,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=99344.0, ans=0.125 +2024-07-28 02:49:48,075 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.18 vs. limit=15.0 +2024-07-28 02:49:51,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=99370.66666666667, ans=0.0 +2024-07-28 02:49:55,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=99384.0, ans=0.2 +2024-07-28 02:49:59,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99384.0, ans=0.1 +2024-07-28 02:50:04,307 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.366e+01 5.988e+01 6.681e+01 8.290e+01 1.259e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-28 02:50:07,618 INFO [train.py:1114] (1/4) Epoch 8, batch 3000, loss[loss=0.1978, simple_loss=0.2874, pruned_loss=0.05409, over 4760.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2986, pruned_loss=0.0658, over 938287.90 frames. ], batch size: 13, lr: 9.52e-03, grad_scale: 32.0 +2024-07-28 02:50:07,619 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 02:50:20,737 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.0354, 3.4098, 3.4844, 3.3110], device='cuda:1') +2024-07-28 02:50:54,530 INFO [train.py:1146] (1/4) Epoch 8, validation: loss=0.1802, simple_loss=0.2848, pruned_loss=0.03781, over 944034.00 frames. +2024-07-28 02:50:54,532 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 02:50:59,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=99410.66666666667, ans=0.0 +2024-07-28 02:51:05,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=99424.0, ans=0.0 +2024-07-28 02:51:14,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=99450.66666666667, ans=0.0 +2024-07-28 02:51:14,911 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.38 vs. limit=15.0 +2024-07-28 02:51:17,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=99450.66666666667, ans=0.2 +2024-07-28 02:51:29,232 INFO [train.py:1114] (1/4) Epoch 8, batch 3050, loss[loss=0.1889, simple_loss=0.2858, pruned_loss=0.04602, over 4638.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2991, pruned_loss=0.06608, over 937638.65 frames. ], batch size: 12, lr: 9.52e-03, grad_scale: 32.0 +2024-07-28 02:51:30,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=99477.33333333333, ans=0.0 +2024-07-28 02:51:35,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99490.66666666667, ans=0.1 +2024-07-28 02:51:59,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=99490.66666666667, ans=0.0 +2024-07-28 02:52:21,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=99530.66666666667, ans=0.125 +2024-07-28 02:52:22,248 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.490e+01 5.732e+01 6.156e+01 7.183e+01 1.083e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 02:52:25,460 INFO [train.py:1114] (1/4) Epoch 8, batch 3100, loss[loss=0.2656, simple_loss=0.3504, pruned_loss=0.09045, over 4626.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2988, pruned_loss=0.06619, over 938171.44 frames. ], batch size: 16, lr: 9.51e-03, grad_scale: 32.0 +2024-07-28 02:52:31,220 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.90 vs. limit=5.0 +2024-07-28 02:52:33,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=99557.33333333333, ans=0.025 +2024-07-28 02:52:33,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99557.33333333333, ans=0.1 +2024-07-28 02:53:00,724 INFO [train.py:1114] (1/4) Epoch 8, batch 3150, loss[loss=0.239, simple_loss=0.3187, pruned_loss=0.07961, over 4594.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2994, pruned_loss=0.06643, over 938688.00 frames. ], batch size: 17, lr: 9.51e-03, grad_scale: 32.0 +2024-07-28 02:53:04,518 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.34 vs. limit=6.0 +2024-07-28 02:53:28,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=99650.66666666667, ans=0.125 +2024-07-28 02:53:33,337 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.886e+01 5.840e+01 6.506e+01 7.424e+01 1.196e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 02:53:36,727 INFO [train.py:1114] (1/4) Epoch 8, batch 3200, loss[loss=0.1876, simple_loss=0.2766, pruned_loss=0.04928, over 4830.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2981, pruned_loss=0.06583, over 940751.57 frames. ], batch size: 13, lr: 9.51e-03, grad_scale: 32.0 +2024-07-28 02:53:50,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=99704.0, ans=0.0 +2024-07-28 02:54:00,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=99717.33333333333, ans=0.025 +2024-07-28 02:54:05,095 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.35 vs. limit=10.0 +2024-07-28 02:54:09,434 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.65 vs. limit=12.0 +2024-07-28 02:54:11,572 INFO [train.py:1114] (1/4) Epoch 8, batch 3250, loss[loss=0.2401, simple_loss=0.3317, pruned_loss=0.07426, over 4938.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2992, pruned_loss=0.06627, over 941413.07 frames. ], batch size: 14, lr: 9.50e-03, grad_scale: 32.0 +2024-07-28 02:54:17,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=99757.33333333333, ans=0.0 +2024-07-28 02:54:26,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=99770.66666666667, ans=0.125 +2024-07-28 02:54:26,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=99770.66666666667, ans=22.5 +2024-07-28 02:54:30,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.whiten.whitening_limit, batch_count=99770.66666666667, ans=12.0 +2024-07-28 02:54:31,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=99784.0, ans=0.0 +2024-07-28 02:54:35,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=99784.0, ans=0.025 +2024-07-28 02:54:41,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=99797.33333333333, ans=0.125 +2024-07-28 02:54:41,673 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 5.887e+01 6.598e+01 7.799e+01 2.167e+02, threshold=1.320e+02, percent-clipped=1.0 +2024-07-28 02:54:45,026 INFO [train.py:1114] (1/4) Epoch 8, batch 3300, loss[loss=0.1985, simple_loss=0.2882, pruned_loss=0.05436, over 4782.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2973, pruned_loss=0.06577, over 941513.65 frames. ], batch size: 19, lr: 9.50e-03, grad_scale: 32.0 +2024-07-28 02:54:45,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99810.66666666667, ans=0.1 +2024-07-28 02:54:46,175 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.17 vs. limit=15.0 +2024-07-28 02:54:58,818 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.06 vs. limit=15.0 +2024-07-28 02:55:02,243 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.67 vs. limit=15.0 +2024-07-28 02:55:03,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=99837.33333333333, ans=0.125 +2024-07-28 02:55:12,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=99864.0, ans=0.0 +2024-07-28 02:55:18,685 INFO [train.py:1114] (1/4) Epoch 8, batch 3350, loss[loss=0.2381, simple_loss=0.3259, pruned_loss=0.07517, over 4627.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2968, pruned_loss=0.06542, over 939500.70 frames. ], batch size: 17, lr: 9.50e-03, grad_scale: 32.0 +2024-07-28 02:55:21,751 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.99 vs. limit=10.0 +2024-07-28 02:55:36,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=99904.0, ans=0.0 +2024-07-28 02:55:38,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=99904.0, ans=0.125 +2024-07-28 02:55:50,388 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.676e+01 5.817e+01 6.427e+01 7.197e+01 1.127e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 02:55:53,713 INFO [train.py:1114] (1/4) Epoch 8, batch 3400, loss[loss=0.1843, simple_loss=0.2644, pruned_loss=0.05215, over 4804.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2975, pruned_loss=0.06568, over 937492.61 frames. ], batch size: 11, lr: 9.50e-03, grad_scale: 32.0 +2024-07-28 02:55:58,335 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.66 vs. limit=15.0 +2024-07-28 02:56:01,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=99957.33333333333, ans=0.125 +2024-07-28 02:56:02,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=99957.33333333333, ans=0.5 +2024-07-28 02:56:03,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=99957.33333333333, ans=0.125 +2024-07-28 02:56:06,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=99970.66666666667, ans=0.125 +2024-07-28 02:56:09,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99970.66666666667, ans=0.1 +2024-07-28 02:56:15,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=99984.0, ans=0.0 +2024-07-28 02:56:17,227 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:56:18,034 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.21 vs. limit=15.0 +2024-07-28 02:56:18,234 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.47 vs. limit=22.5 +2024-07-28 02:56:27,976 INFO [train.py:1114] (1/4) Epoch 8, batch 3450, loss[loss=0.2707, simple_loss=0.3421, pruned_loss=0.09964, over 4706.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2986, pruned_loss=0.06575, over 937485.70 frames. ], batch size: 19, lr: 9.49e-03, grad_scale: 32.0 +2024-07-28 02:56:31,060 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.98 vs. limit=10.0 +2024-07-28 02:56:32,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=100010.66666666667, ans=0.5 +2024-07-28 02:56:44,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=100037.33333333333, ans=0.125 +2024-07-28 02:56:47,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=100050.66666666667, ans=0.0 +2024-07-28 02:56:58,166 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-28 02:56:58,309 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 5.830e+01 6.643e+01 7.875e+01 1.454e+02, threshold=1.329e+02, percent-clipped=3.0 +2024-07-28 02:56:59,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=100064.0, ans=0.0 +2024-07-28 02:57:01,712 INFO [train.py:1114] (1/4) Epoch 8, batch 3500, loss[loss=0.2039, simple_loss=0.28, pruned_loss=0.06391, over 4937.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2977, pruned_loss=0.06524, over 938363.26 frames. ], batch size: 12, lr: 9.49e-03, grad_scale: 32.0 +2024-07-28 02:57:01,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100077.33333333333, ans=0.1 +2024-07-28 02:57:17,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=100104.0, ans=0.125 +2024-07-28 02:57:36,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=100130.66666666667, ans=0.0 +2024-07-28 02:57:37,824 INFO [train.py:1114] (1/4) Epoch 8, batch 3550, loss[loss=0.2042, simple_loss=0.2976, pruned_loss=0.05543, over 4661.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2979, pruned_loss=0.06522, over 938915.27 frames. ], batch size: 14, lr: 9.49e-03, grad_scale: 32.0 +2024-07-28 02:57:46,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=100157.33333333333, ans=0.125 +2024-07-28 02:57:46,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=100157.33333333333, ans=0.2 +2024-07-28 02:57:48,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=100157.33333333333, ans=0.125 +2024-07-28 02:57:51,876 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=12.0 +2024-07-28 02:57:52,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=100170.66666666667, ans=0.125 +2024-07-28 02:58:00,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=100184.0, ans=0.2 +2024-07-28 02:58:07,550 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.623e+01 5.767e+01 6.398e+01 7.244e+01 1.008e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 02:58:24,512 INFO [train.py:1114] (1/4) Epoch 8, batch 3600, loss[loss=0.1801, simple_loss=0.2861, pruned_loss=0.03709, over 4964.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2979, pruned_loss=0.06505, over 940352.11 frames. ], batch size: 13, lr: 9.48e-03, grad_scale: 32.0 +2024-07-28 02:58:49,082 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.48 vs. limit=12.0 +2024-07-28 02:58:57,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=100224.0, ans=15.0 +2024-07-28 02:59:02,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100237.33333333333, ans=0.125 +2024-07-28 02:59:03,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=100237.33333333333, ans=0.125 +2024-07-28 02:59:04,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=100237.33333333333, ans=0.0 +2024-07-28 02:59:05,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100237.33333333333, ans=0.1 +2024-07-28 02:59:11,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=100250.66666666667, ans=0.0 +2024-07-28 02:59:13,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=100250.66666666667, ans=0.0 +2024-07-28 02:59:25,325 INFO [train.py:1114] (1/4) Epoch 8, batch 3650, loss[loss=0.2403, simple_loss=0.3256, pruned_loss=0.07748, over 4894.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2963, pruned_loss=0.06439, over 940637.13 frames. ], batch size: 15, lr: 9.48e-03, grad_scale: 32.0 +2024-07-28 02:59:25,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=100277.33333333333, ans=0.025 +2024-07-28 02:59:28,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=100277.33333333333, ans=0.125 +2024-07-28 02:59:33,908 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=15.0 +2024-07-28 02:59:50,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=100317.33333333333, ans=0.125 +2024-07-28 02:59:54,823 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:59:55,976 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.900e+01 6.500e+01 7.963e+01 1.457e+02, threshold=1.300e+02, percent-clipped=1.0 +2024-07-28 02:59:56,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=100330.66666666667, ans=0.05 +2024-07-28 02:59:57,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=100330.66666666667, ans=0.125 +2024-07-28 02:59:57,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=100330.66666666667, ans=0.2 +2024-07-28 02:59:57,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=100330.66666666667, ans=0.0 +2024-07-28 02:59:59,598 INFO [train.py:1114] (1/4) Epoch 8, batch 3700, loss[loss=0.2272, simple_loss=0.3312, pruned_loss=0.06153, over 4935.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2965, pruned_loss=0.06475, over 941573.95 frames. ], batch size: 14, lr: 9.48e-03, grad_scale: 32.0 +2024-07-28 03:00:06,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=100344.0, ans=0.125 +2024-07-28 03:00:21,052 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.60 vs. limit=15.0 +2024-07-28 03:00:30,687 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=18.39 vs. limit=15.0 +2024-07-28 03:00:34,748 INFO [train.py:1114] (1/4) Epoch 8, batch 3750, loss[loss=0.1552, simple_loss=0.237, pruned_loss=0.0367, over 4807.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2958, pruned_loss=0.06468, over 943320.00 frames. ], batch size: 11, lr: 9.47e-03, grad_scale: 16.0 +2024-07-28 03:00:39,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=100410.66666666667, ans=0.125 +2024-07-28 03:00:41,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=100424.0, ans=0.125 +2024-07-28 03:00:42,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=100424.0, ans=0.125 +2024-07-28 03:00:54,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.78 vs. limit=10.0 +2024-07-28 03:00:59,233 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.93 vs. limit=15.0 +2024-07-28 03:01:05,677 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.700e+01 5.636e+01 6.396e+01 7.360e+01 1.035e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 03:01:08,363 INFO [train.py:1114] (1/4) Epoch 8, batch 3800, loss[loss=0.2113, simple_loss=0.3073, pruned_loss=0.05769, over 4809.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.295, pruned_loss=0.06415, over 941677.92 frames. ], batch size: 14, lr: 9.47e-03, grad_scale: 16.0 +2024-07-28 03:01:14,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=100490.66666666667, ans=0.125 +2024-07-28 03:01:55,206 INFO [train.py:1114] (1/4) Epoch 8, batch 3850, loss[loss=0.2473, simple_loss=0.3318, pruned_loss=0.08142, over 4626.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2954, pruned_loss=0.06407, over 942323.04 frames. ], batch size: 16, lr: 9.47e-03, grad_scale: 16.0 +2024-07-28 03:02:09,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=100570.66666666667, ans=0.0 +2024-07-28 03:02:21,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=100597.33333333333, ans=0.125 +2024-07-28 03:02:21,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=100597.33333333333, ans=0.125 +2024-07-28 03:02:26,435 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.65 vs. limit=22.5 +2024-07-28 03:02:29,297 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.866e+01 5.806e+01 6.374e+01 7.296e+01 1.382e+02, threshold=1.275e+02, percent-clipped=1.0 +2024-07-28 03:02:36,097 INFO [train.py:1114] (1/4) Epoch 8, batch 3900, loss[loss=0.1876, simple_loss=0.2734, pruned_loss=0.05086, over 4818.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2955, pruned_loss=0.06423, over 942704.03 frames. ], batch size: 14, lr: 9.46e-03, grad_scale: 16.0 +2024-07-28 03:02:57,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=100637.33333333333, ans=0.125 +2024-07-28 03:03:04,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100650.66666666667, ans=0.1 +2024-07-28 03:03:06,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=100650.66666666667, ans=0.125 +2024-07-28 03:03:10,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=100664.0, ans=0.125 +2024-07-28 03:03:14,045 INFO [train.py:1114] (1/4) Epoch 8, batch 3950, loss[loss=0.211, simple_loss=0.2984, pruned_loss=0.06178, over 4831.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.296, pruned_loss=0.06457, over 944520.59 frames. ], batch size: 16, lr: 9.46e-03, grad_scale: 16.0 +2024-07-28 03:03:17,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.15 vs. limit=15.0 +2024-07-28 03:03:18,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=100677.33333333333, ans=0.2 +2024-07-28 03:03:19,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=100677.33333333333, ans=0.2 +2024-07-28 03:03:32,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100704.0, ans=0.1 +2024-07-28 03:03:42,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=100730.66666666667, ans=0.125 +2024-07-28 03:03:44,899 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.455e+01 5.725e+01 6.427e+01 7.427e+01 2.052e+02, threshold=1.285e+02, percent-clipped=1.0 +2024-07-28 03:04:02,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=100744.0, ans=0.0 +2024-07-28 03:04:03,406 INFO [train.py:1114] (1/4) Epoch 8, batch 4000, loss[loss=0.1818, simple_loss=0.2574, pruned_loss=0.05316, over 4780.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2963, pruned_loss=0.06505, over 940906.85 frames. ], batch size: 12, lr: 9.46e-03, grad_scale: 32.0 +2024-07-28 03:04:13,571 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.81 vs. limit=22.5 +2024-07-28 03:04:25,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=100770.66666666667, ans=0.125 +2024-07-28 03:04:26,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=100770.66666666667, ans=0.0 +2024-07-28 03:04:27,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=100770.66666666667, ans=0.125 +2024-07-28 03:04:29,279 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.66 vs. limit=15.0 +2024-07-28 03:04:44,248 INFO [train.py:1114] (1/4) Epoch 8, batch 4050, loss[loss=0.2783, simple_loss=0.346, pruned_loss=0.1053, over 3265.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.296, pruned_loss=0.0652, over 940247.46 frames. ], batch size: 35, lr: 9.45e-03, grad_scale: 32.0 +2024-07-28 03:04:50,027 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.02 vs. limit=15.0 +2024-07-28 03:04:56,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=100824.0, ans=0.0 +2024-07-28 03:05:17,166 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.596e+01 5.984e+01 6.561e+01 7.849e+01 1.305e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-28 03:05:17,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=100864.0, ans=0.125 +2024-07-28 03:05:19,918 INFO [train.py:1114] (1/4) Epoch 8, batch 4100, loss[loss=0.1985, simple_loss=0.2927, pruned_loss=0.05215, over 4912.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2969, pruned_loss=0.06529, over 938983.80 frames. ], batch size: 15, lr: 9.45e-03, grad_scale: 32.0 +2024-07-28 03:05:21,742 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.82 vs. limit=12.0 +2024-07-28 03:05:24,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100877.33333333333, ans=0.1 +2024-07-28 03:05:49,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=100917.33333333333, ans=0.025 +2024-07-28 03:06:31,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=100930.66666666667, ans=0.125 +2024-07-28 03:06:32,463 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.84 vs. limit=15.0 +2024-07-28 03:06:46,139 INFO [train.py:1114] (1/4) Epoch 8, batch 4150, loss[loss=0.201, simple_loss=0.294, pruned_loss=0.05401, over 4818.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2965, pruned_loss=0.06496, over 938622.60 frames. ], batch size: 13, lr: 9.45e-03, grad_scale: 32.0 +2024-07-28 03:06:48,129 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:06:48,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=100944.0, ans=0.0 +2024-07-28 03:06:50,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=100944.0, ans=0.0 +2024-07-28 03:06:58,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten.whitening_limit, batch_count=100957.33333333333, ans=22.5 +2024-07-28 03:07:13,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=100970.66666666667, ans=0.125 +2024-07-28 03:07:28,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=100997.33333333333, ans=0.0 +2024-07-28 03:07:28,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=100997.33333333333, ans=0.125 +2024-07-28 03:07:28,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=100997.33333333333, ans=0.2 +2024-07-28 03:07:29,975 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.669e+01 6.007e+01 6.703e+01 7.835e+01 1.474e+02, threshold=1.341e+02, percent-clipped=1.0 +2024-07-28 03:07:31,154 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.38 vs. limit=15.0 +2024-07-28 03:07:52,645 INFO [train.py:1114] (1/4) Epoch 8, batch 4200, loss[loss=0.257, simple_loss=0.3405, pruned_loss=0.08674, over 4908.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2969, pruned_loss=0.06484, over 940030.83 frames. ], batch size: 15, lr: 9.45e-03, grad_scale: 32.0 +2024-07-28 03:07:56,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101010.66666666667, ans=0.1 +2024-07-28 03:08:01,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=101024.0, ans=0.0 +2024-07-28 03:08:37,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=101037.33333333333, ans=0.05 +2024-07-28 03:08:41,067 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.64 vs. limit=22.5 +2024-07-28 03:08:56,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=101050.66666666667, ans=0.0 +2024-07-28 03:08:57,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=101050.66666666667, ans=0.0 +2024-07-28 03:09:16,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=101077.33333333333, ans=0.0 +2024-07-28 03:09:17,407 INFO [train.py:1114] (1/4) Epoch 8, batch 4250, loss[loss=0.1939, simple_loss=0.2806, pruned_loss=0.05362, over 4640.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2972, pruned_loss=0.06438, over 940808.17 frames. ], batch size: 12, lr: 9.44e-03, grad_scale: 32.0 +2024-07-28 03:09:21,723 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.36 vs. limit=22.5 +2024-07-28 03:09:26,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=101090.66666666667, ans=0.025 +2024-07-28 03:09:37,965 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.42 vs. limit=15.0 +2024-07-28 03:09:39,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=101117.33333333333, ans=0.0 +2024-07-28 03:09:41,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=101117.33333333333, ans=0.125 +2024-07-28 03:09:49,640 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 5.910e+01 6.569e+01 7.778e+01 1.465e+02, threshold=1.314e+02, percent-clipped=1.0 +2024-07-28 03:09:52,207 INFO [train.py:1114] (1/4) Epoch 8, batch 4300, loss[loss=0.2172, simple_loss=0.2974, pruned_loss=0.06853, over 4766.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2968, pruned_loss=0.06471, over 939719.21 frames. ], batch size: 13, lr: 9.44e-03, grad_scale: 32.0 +2024-07-28 03:09:54,247 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.30 vs. limit=15.0 +2024-07-28 03:09:59,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=101144.0, ans=0.0 +2024-07-28 03:09:59,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101144.0, ans=0.1 +2024-07-28 03:10:17,222 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.02 vs. limit=22.5 +2024-07-28 03:10:23,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101197.33333333333, ans=0.1 +2024-07-28 03:10:27,455 INFO [train.py:1114] (1/4) Epoch 8, batch 4350, loss[loss=0.1917, simple_loss=0.2846, pruned_loss=0.0494, over 4754.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2968, pruned_loss=0.06429, over 940747.63 frames. ], batch size: 13, lr: 9.44e-03, grad_scale: 32.0 +2024-07-28 03:10:30,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101210.66666666667, ans=0.1 +2024-07-28 03:10:34,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=101224.0, ans=0.0 +2024-07-28 03:10:55,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=101264.0, ans=0.125 +2024-07-28 03:10:56,580 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.91 vs. limit=6.0 +2024-07-28 03:10:58,163 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.299e+01 5.773e+01 6.336e+01 7.369e+01 1.096e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 03:11:01,088 INFO [train.py:1114] (1/4) Epoch 8, batch 4400, loss[loss=0.2499, simple_loss=0.3249, pruned_loss=0.0875, over 4816.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.296, pruned_loss=0.06351, over 940816.50 frames. ], batch size: 14, lr: 9.43e-03, grad_scale: 32.0 +2024-07-28 03:11:17,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=101304.0, ans=0.035 +2024-07-28 03:11:19,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=101304.0, ans=0.0 +2024-07-28 03:11:22,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=101317.33333333333, ans=0.0 +2024-07-28 03:11:26,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=101317.33333333333, ans=0.0 +2024-07-28 03:11:36,963 INFO [train.py:1114] (1/4) Epoch 8, batch 4450, loss[loss=0.164, simple_loss=0.2546, pruned_loss=0.03673, over 4920.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.296, pruned_loss=0.06423, over 938671.19 frames. ], batch size: 12, lr: 9.43e-03, grad_scale: 32.0 +2024-07-28 03:11:37,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=101344.0, ans=0.125 +2024-07-28 03:11:37,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=101344.0, ans=0.0 +2024-07-28 03:11:42,660 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.82 vs. limit=15.0 +2024-07-28 03:11:51,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=101370.66666666667, ans=0.025 +2024-07-28 03:11:57,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=101384.0, ans=0.125 +2024-07-28 03:12:08,996 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.19 vs. limit=15.0 +2024-07-28 03:12:09,299 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.991e+01 6.173e+01 7.006e+01 8.907e+01 1.361e+02, threshold=1.401e+02, percent-clipped=3.0 +2024-07-28 03:12:12,341 INFO [train.py:1114] (1/4) Epoch 8, batch 4500, loss[loss=0.2001, simple_loss=0.2919, pruned_loss=0.05409, over 4736.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2959, pruned_loss=0.06391, over 938077.95 frames. ], batch size: 14, lr: 9.43e-03, grad_scale: 32.0 +2024-07-28 03:12:15,461 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.77 vs. limit=22.5 +2024-07-28 03:12:33,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=101450.66666666667, ans=0.125 +2024-07-28 03:12:41,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=101464.0, ans=0.0 +2024-07-28 03:12:46,593 INFO [train.py:1114] (1/4) Epoch 8, batch 4550, loss[loss=0.1997, simple_loss=0.2817, pruned_loss=0.05882, over 4887.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2956, pruned_loss=0.06359, over 939983.51 frames. ], batch size: 13, lr: 9.42e-03, grad_scale: 32.0 +2024-07-28 03:12:55,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=101490.66666666667, ans=0.125 +2024-07-28 03:12:58,410 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.19 vs. limit=15.0 +2024-07-28 03:12:58,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=101490.66666666667, ans=0.125 +2024-07-28 03:13:05,892 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.76 vs. limit=8.0 +2024-07-28 03:13:13,131 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.88 vs. limit=6.0 +2024-07-28 03:13:18,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=101530.66666666667, ans=0.2 +2024-07-28 03:13:19,425 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.586e+01 5.796e+01 6.389e+01 7.358e+01 1.083e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-28 03:13:22,135 INFO [train.py:1114] (1/4) Epoch 8, batch 4600, loss[loss=0.2053, simple_loss=0.2915, pruned_loss=0.05956, over 4498.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2946, pruned_loss=0.06305, over 938420.05 frames. ], batch size: 21, lr: 9.42e-03, grad_scale: 32.0 +2024-07-28 03:13:23,724 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=101544.0, ans=0.125 +2024-07-28 03:13:28,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=101557.33333333333, ans=0.0 +2024-07-28 03:13:29,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=101557.33333333333, ans=0.125 +2024-07-28 03:13:40,218 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.75 vs. limit=10.0 +2024-07-28 03:13:41,304 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:13:43,904 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:13:51,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=101597.33333333333, ans=0.035 +2024-07-28 03:13:51,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=101597.33333333333, ans=0.125 +2024-07-28 03:13:55,114 INFO [train.py:1114] (1/4) Epoch 8, batch 4650, loss[loss=0.2601, simple_loss=0.338, pruned_loss=0.09109, over 4865.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2963, pruned_loss=0.06398, over 940089.46 frames. ], batch size: 16, lr: 9.42e-03, grad_scale: 32.0 +2024-07-28 03:14:06,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=101624.0, ans=0.125 +2024-07-28 03:14:15,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=101637.33333333333, ans=0.2 +2024-07-28 03:14:23,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101664.0, ans=0.1 +2024-07-28 03:14:27,597 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.801e+01 5.774e+01 6.444e+01 7.624e+01 1.056e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 03:14:28,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=101664.0, ans=0.2 +2024-07-28 03:14:30,289 INFO [train.py:1114] (1/4) Epoch 8, batch 4700, loss[loss=0.2147, simple_loss=0.2779, pruned_loss=0.07576, over 4698.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2962, pruned_loss=0.064, over 937427.69 frames. ], batch size: 11, lr: 9.41e-03, grad_scale: 32.0 +2024-07-28 03:14:43,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101704.0, ans=0.1 +2024-07-28 03:14:52,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=101717.33333333333, ans=0.0 +2024-07-28 03:15:04,163 INFO [train.py:1114] (1/4) Epoch 8, batch 4750, loss[loss=0.2086, simple_loss=0.308, pruned_loss=0.05464, over 4617.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2968, pruned_loss=0.06421, over 935950.74 frames. ], batch size: 21, lr: 9.41e-03, grad_scale: 32.0 +2024-07-28 03:15:04,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=101744.0, ans=0.125 +2024-07-28 03:15:11,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=101757.33333333333, ans=0.125 +2024-07-28 03:15:12,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101757.33333333333, ans=0.1 +2024-07-28 03:15:20,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=101770.66666666667, ans=0.125 +2024-07-28 03:15:37,281 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.494e+01 5.718e+01 6.515e+01 7.341e+01 9.928e+01, threshold=1.303e+02, percent-clipped=0.0 +2024-07-28 03:15:38,859 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.18 vs. limit=15.0 +2024-07-28 03:15:39,464 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.20 vs. limit=22.5 +2024-07-28 03:15:40,385 INFO [train.py:1114] (1/4) Epoch 8, batch 4800, loss[loss=0.2342, simple_loss=0.3149, pruned_loss=0.07675, over 4695.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2967, pruned_loss=0.06452, over 933018.75 frames. ], batch size: 13, lr: 9.41e-03, grad_scale: 32.0 +2024-07-28 03:15:41,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=101810.66666666667, ans=0.0 +2024-07-28 03:15:50,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=101824.0, ans=0.0 +2024-07-28 03:15:52,467 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.52 vs. limit=22.5 +2024-07-28 03:15:56,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_na.min_abs, batch_count=101837.33333333333, ans=0.02 +2024-07-28 03:15:56,707 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-28 03:15:57,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=101837.33333333333, ans=0.07 +2024-07-28 03:15:58,559 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:16:08,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=101850.66666666667, ans=0.125 +2024-07-28 03:16:09,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101850.66666666667, ans=0.1 +2024-07-28 03:16:09,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=101850.66666666667, ans=0.0 +2024-07-28 03:16:17,646 INFO [train.py:1114] (1/4) Epoch 8, batch 4850, loss[loss=0.2079, simple_loss=0.301, pruned_loss=0.05737, over 4746.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2979, pruned_loss=0.0651, over 932232.34 frames. ], batch size: 14, lr: 9.41e-03, grad_scale: 32.0 +2024-07-28 03:16:19,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=101877.33333333333, ans=0.2 +2024-07-28 03:16:20,622 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.13 vs. limit=15.0 +2024-07-28 03:16:26,859 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.83 vs. limit=22.5 +2024-07-28 03:16:46,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=101930.66666666667, ans=0.125 +2024-07-28 03:16:47,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=101930.66666666667, ans=0.5 +2024-07-28 03:16:51,215 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.836e+01 5.718e+01 6.267e+01 6.950e+01 1.595e+02, threshold=1.253e+02, percent-clipped=1.0 +2024-07-28 03:17:01,612 INFO [train.py:1114] (1/4) Epoch 8, batch 4900, loss[loss=0.2333, simple_loss=0.3142, pruned_loss=0.0762, over 4759.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2974, pruned_loss=0.06488, over 934242.83 frames. ], batch size: 13, lr: 9.40e-03, grad_scale: 32.0 +2024-07-28 03:17:02,042 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.11 vs. limit=15.0 +2024-07-28 03:17:14,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=101957.33333333333, ans=0.0 +2024-07-28 03:17:24,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=101984.0, ans=0.0 +2024-07-28 03:17:28,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=101997.33333333333, ans=0.2 +2024-07-28 03:17:35,650 INFO [train.py:1114] (1/4) Epoch 8, batch 4950, loss[loss=0.2484, simple_loss=0.3099, pruned_loss=0.09347, over 3455.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2995, pruned_loss=0.06635, over 930782.46 frames. ], batch size: 35, lr: 9.40e-03, grad_scale: 32.0 +2024-07-28 03:17:49,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=102024.0, ans=0.125 +2024-07-28 03:17:58,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102050.66666666667, ans=0.1 +2024-07-28 03:18:08,030 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.891e+01 5.855e+01 6.357e+01 7.218e+01 9.647e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 03:18:10,643 INFO [train.py:1114] (1/4) Epoch 8, batch 5000, loss[loss=0.2184, simple_loss=0.2974, pruned_loss=0.06967, over 4664.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2986, pruned_loss=0.06555, over 934732.84 frames. ], batch size: 14, lr: 9.40e-03, grad_scale: 32.0 +2024-07-28 03:18:22,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=102090.66666666667, ans=0.125 +2024-07-28 03:18:27,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=102104.0, ans=15.0 +2024-07-28 03:18:28,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=102104.0, ans=0.125 +2024-07-28 03:18:34,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=102117.33333333333, ans=0.125 +2024-07-28 03:18:38,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102130.66666666667, ans=0.1 +2024-07-28 03:18:41,288 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.60 vs. limit=6.0 +2024-07-28 03:18:43,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=102130.66666666667, ans=0.125 +2024-07-28 03:18:45,827 INFO [train.py:1114] (1/4) Epoch 8, batch 5050, loss[loss=0.1978, simple_loss=0.2692, pruned_loss=0.06319, over 4861.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2981, pruned_loss=0.06594, over 937427.28 frames. ], batch size: 12, lr: 9.39e-03, grad_scale: 16.0 +2024-07-28 03:18:49,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=102144.0, ans=0.2 +2024-07-28 03:18:56,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102157.33333333333, ans=0.1 +2024-07-28 03:18:57,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=102157.33333333333, ans=0.125 +2024-07-28 03:19:03,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102170.66666666667, ans=0.1 +2024-07-28 03:19:03,995 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.09 vs. limit=15.0 +2024-07-28 03:19:17,811 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.666e+01 5.803e+01 6.269e+01 7.279e+01 1.149e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 03:19:19,900 INFO [train.py:1114] (1/4) Epoch 8, batch 5100, loss[loss=0.1878, simple_loss=0.2689, pruned_loss=0.05339, over 4778.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2985, pruned_loss=0.06614, over 935335.95 frames. ], batch size: 12, lr: 9.39e-03, grad_scale: 16.0 +2024-07-28 03:19:33,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102237.33333333333, ans=0.1 +2024-07-28 03:19:48,747 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.45 vs. limit=15.0 +2024-07-28 03:19:54,791 INFO [train.py:1114] (1/4) Epoch 8, batch 5150, loss[loss=0.2204, simple_loss=0.311, pruned_loss=0.06496, over 4832.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2988, pruned_loss=0.06601, over 936289.31 frames. ], batch size: 16, lr: 9.39e-03, grad_scale: 16.0 +2024-07-28 03:19:55,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=102277.33333333333, ans=0.0 +2024-07-28 03:20:00,450 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.75 vs. limit=15.0 +2024-07-28 03:20:11,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=102304.0, ans=0.0 +2024-07-28 03:20:11,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=102304.0, ans=0.05 +2024-07-28 03:20:16,394 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.64 vs. limit=15.0 +2024-07-28 03:20:26,078 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.475e+01 5.815e+01 6.319e+01 7.025e+01 9.950e+01, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 03:20:30,184 INFO [train.py:1114] (1/4) Epoch 8, batch 5200, loss[loss=0.202, simple_loss=0.2959, pruned_loss=0.05404, over 4660.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2977, pruned_loss=0.06535, over 935720.36 frames. ], batch size: 14, lr: 9.38e-03, grad_scale: 32.0 +2024-07-28 03:20:46,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=102370.66666666667, ans=0.125 +2024-07-28 03:21:05,405 INFO [train.py:1114] (1/4) Epoch 8, batch 5250, loss[loss=0.2516, simple_loss=0.326, pruned_loss=0.08856, over 4884.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2963, pruned_loss=0.06453, over 935378.37 frames. ], batch size: 13, lr: 9.38e-03, grad_scale: 32.0 +2024-07-28 03:21:06,955 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.08 vs. limit=15.0 +2024-07-28 03:21:19,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=102437.33333333333, ans=0.125 +2024-07-28 03:21:26,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=102450.66666666667, ans=0.125 +2024-07-28 03:21:29,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=102450.66666666667, ans=0.125 +2024-07-28 03:21:32,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102464.0, ans=0.1 +2024-07-28 03:21:36,698 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.677e+01 5.809e+01 6.446e+01 7.224e+01 1.154e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 03:21:37,188 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.12 vs. limit=15.0 +2024-07-28 03:21:37,655 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.93 vs. limit=15.0 +2024-07-28 03:21:38,683 INFO [train.py:1114] (1/4) Epoch 8, batch 5300, loss[loss=0.2292, simple_loss=0.3182, pruned_loss=0.07011, over 4642.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2958, pruned_loss=0.06457, over 933700.04 frames. ], batch size: 16, lr: 9.38e-03, grad_scale: 32.0 +2024-07-28 03:21:44,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=102490.66666666667, ans=0.125 +2024-07-28 03:21:47,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=102490.66666666667, ans=0.0 +2024-07-28 03:21:47,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102490.66666666667, ans=0.1 +2024-07-28 03:21:48,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=102490.66666666667, ans=0.07 +2024-07-28 03:21:54,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=102504.0, ans=0.0 +2024-07-28 03:21:58,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=102517.33333333333, ans=0.125 +2024-07-28 03:22:00,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=102517.33333333333, ans=0.0 +2024-07-28 03:22:02,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=102517.33333333333, ans=0.0 +2024-07-28 03:22:08,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=102530.66666666667, ans=0.025 +2024-07-28 03:22:11,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=102544.0, ans=0.0 +2024-07-28 03:22:11,878 INFO [train.py:1114] (1/4) Epoch 8, batch 5350, loss[loss=0.1662, simple_loss=0.2434, pruned_loss=0.04447, over 4517.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2973, pruned_loss=0.06488, over 936063.47 frames. ], batch size: 10, lr: 9.38e-03, grad_scale: 32.0 +2024-07-28 03:22:12,688 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:22:25,906 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:22:27,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=102570.66666666667, ans=0.2 +2024-07-28 03:22:43,563 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 6.073e+01 6.739e+01 7.548e+01 1.442e+02, threshold=1.348e+02, percent-clipped=1.0 +2024-07-28 03:22:44,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=102597.33333333333, ans=0.125 +2024-07-28 03:22:45,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=102610.66666666667, ans=0.125 +2024-07-28 03:22:45,665 INFO [train.py:1114] (1/4) Epoch 8, batch 5400, loss[loss=0.2005, simple_loss=0.2887, pruned_loss=0.05616, over 4312.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2987, pruned_loss=0.06611, over 929986.59 frames. ], batch size: 26, lr: 9.37e-03, grad_scale: 32.0 +2024-07-28 03:22:58,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102637.33333333333, ans=0.1 +2024-07-28 03:22:59,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=102637.33333333333, ans=0.2 +2024-07-28 03:23:11,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102650.66666666667, ans=0.1 +2024-07-28 03:23:20,678 INFO [train.py:1114] (1/4) Epoch 8, batch 5450, loss[loss=0.1615, simple_loss=0.242, pruned_loss=0.04044, over 4712.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2983, pruned_loss=0.0658, over 933048.60 frames. ], batch size: 11, lr: 9.37e-03, grad_scale: 32.0 +2024-07-28 03:23:32,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=102690.66666666667, ans=0.125 +2024-07-28 03:23:44,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=102717.33333333333, ans=0.09899494936611666 +2024-07-28 03:23:48,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=102717.33333333333, ans=0.125 +2024-07-28 03:23:54,556 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.766e+01 6.203e+01 6.756e+01 7.672e+01 1.108e+02, threshold=1.351e+02, percent-clipped=0.0 +2024-07-28 03:23:56,653 INFO [train.py:1114] (1/4) Epoch 8, batch 5500, loss[loss=0.2779, simple_loss=0.3509, pruned_loss=0.1024, over 4129.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2977, pruned_loss=0.06557, over 931305.69 frames. ], batch size: 25, lr: 9.37e-03, grad_scale: 32.0 +2024-07-28 03:23:59,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=102744.0, ans=0.5 +2024-07-28 03:24:27,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=102797.33333333333, ans=0.2 +2024-07-28 03:24:29,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=102810.66666666667, ans=0.07 +2024-07-28 03:24:29,630 INFO [train.py:1114] (1/4) Epoch 8, batch 5550, loss[loss=0.1559, simple_loss=0.2528, pruned_loss=0.02954, over 4715.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2976, pruned_loss=0.06574, over 933207.24 frames. ], batch size: 12, lr: 9.36e-03, grad_scale: 32.0 +2024-07-28 03:24:34,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=102810.66666666667, ans=0.125 +2024-07-28 03:24:40,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102824.0, ans=0.1 +2024-07-28 03:24:57,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=102864.0, ans=0.0 +2024-07-28 03:25:01,400 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.969e+01 5.947e+01 6.604e+01 7.771e+01 1.160e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-28 03:25:02,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102877.33333333333, ans=0.1 +2024-07-28 03:25:03,444 INFO [train.py:1114] (1/4) Epoch 8, batch 5600, loss[loss=0.2126, simple_loss=0.2958, pruned_loss=0.06473, over 4744.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.299, pruned_loss=0.06606, over 934473.11 frames. ], batch size: 14, lr: 9.36e-03, grad_scale: 32.0 +2024-07-28 03:25:03,771 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.49 vs. limit=10.0 +2024-07-28 03:25:05,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=102877.33333333333, ans=0.0 +2024-07-28 03:25:14,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=102890.66666666667, ans=0.125 +2024-07-28 03:25:25,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=102917.33333333333, ans=0.125 +2024-07-28 03:25:33,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=102930.66666666667, ans=0.0 +2024-07-28 03:25:37,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=102930.66666666667, ans=0.125 +2024-07-28 03:25:38,383 INFO [train.py:1114] (1/4) Epoch 8, batch 5650, loss[loss=0.2182, simple_loss=0.2959, pruned_loss=0.07032, over 4566.00 frames. ], tot_loss[loss=0.216, simple_loss=0.299, pruned_loss=0.06645, over 936789.45 frames. ], batch size: 21, lr: 9.36e-03, grad_scale: 32.0 +2024-07-28 03:25:58,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=102984.0, ans=0.125 +2024-07-28 03:26:06,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=102997.33333333333, ans=10.0 +2024-07-28 03:26:09,442 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.666e+01 6.096e+01 6.693e+01 9.432e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 03:26:11,508 INFO [train.py:1114] (1/4) Epoch 8, batch 5700, loss[loss=0.2511, simple_loss=0.3331, pruned_loss=0.08457, over 4694.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2991, pruned_loss=0.06632, over 937936.23 frames. ], batch size: 13, lr: 9.35e-03, grad_scale: 32.0 +2024-07-28 03:26:24,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=103024.0, ans=0.0 +2024-07-28 03:26:26,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=103024.0, ans=0.2 +2024-07-28 03:26:44,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=103064.0, ans=0.2 +2024-07-28 03:26:46,891 INFO [train.py:1114] (1/4) Epoch 8, batch 5750, loss[loss=0.2039, simple_loss=0.2897, pruned_loss=0.05904, over 4707.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2981, pruned_loss=0.06528, over 938045.31 frames. ], batch size: 19, lr: 9.35e-03, grad_scale: 32.0 +2024-07-28 03:26:55,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=103090.66666666667, ans=0.025 +2024-07-28 03:27:13,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=103130.66666666667, ans=0.2 +2024-07-28 03:27:18,486 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.742e+01 5.884e+01 6.600e+01 7.288e+01 1.127e+02, threshold=1.320e+02, percent-clipped=0.0 +2024-07-28 03:27:18,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=103130.66666666667, ans=0.0 +2024-07-28 03:27:20,735 INFO [train.py:1114] (1/4) Epoch 8, batch 5800, loss[loss=0.2362, simple_loss=0.3175, pruned_loss=0.07742, over 4667.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.299, pruned_loss=0.06582, over 937275.19 frames. ], batch size: 19, lr: 9.35e-03, grad_scale: 32.0 +2024-07-28 03:27:30,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=103144.0, ans=0.0 +2024-07-28 03:27:30,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=103144.0, ans=0.0 +2024-07-28 03:27:32,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=103157.33333333333, ans=0.125 +2024-07-28 03:27:32,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=103157.33333333333, ans=0.125 +2024-07-28 03:27:41,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=103170.66666666667, ans=0.125 +2024-07-28 03:27:45,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=103184.0, ans=0.125 +2024-07-28 03:27:59,742 INFO [train.py:1114] (1/4) Epoch 8, batch 5850, loss[loss=0.2119, simple_loss=0.2953, pruned_loss=0.06428, over 4398.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2991, pruned_loss=0.06648, over 937712.23 frames. ], batch size: 21, lr: 9.35e-03, grad_scale: 32.0 +2024-07-28 03:28:08,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=103224.0, ans=0.125 +2024-07-28 03:28:08,446 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.86 vs. limit=22.5 +2024-07-28 03:28:08,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=103224.0, ans=0.125 +2024-07-28 03:28:30,668 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 5.776e+01 6.352e+01 7.126e+01 1.312e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 03:28:38,755 INFO [train.py:1114] (1/4) Epoch 8, batch 5900, loss[loss=0.2247, simple_loss=0.3055, pruned_loss=0.07194, over 4688.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2982, pruned_loss=0.0659, over 937914.20 frames. ], batch size: 15, lr: 9.34e-03, grad_scale: 32.0 +2024-07-28 03:28:43,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=103277.33333333333, ans=0.0 +2024-07-28 03:28:45,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=103290.66666666667, ans=0.0 +2024-07-28 03:28:52,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=103304.0, ans=0.0 +2024-07-28 03:28:57,154 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.39 vs. limit=10.0 +2024-07-28 03:29:01,892 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.81 vs. limit=22.5 +2024-07-28 03:29:14,044 INFO [train.py:1114] (1/4) Epoch 8, batch 5950, loss[loss=0.2377, simple_loss=0.3106, pruned_loss=0.08246, over 4683.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2975, pruned_loss=0.06573, over 939895.07 frames. ], batch size: 15, lr: 9.34e-03, grad_scale: 32.0 +2024-07-28 03:29:21,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103357.33333333333, ans=0.1 +2024-07-28 03:29:23,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=103357.33333333333, ans=0.0 +2024-07-28 03:29:24,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=103357.33333333333, ans=0.125 +2024-07-28 03:29:25,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=103357.33333333333, ans=0.025 +2024-07-28 03:29:33,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=103384.0, ans=0.125 +2024-07-28 03:29:45,386 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.782e+01 5.778e+01 6.625e+01 7.689e+01 1.053e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-28 03:29:47,504 INFO [train.py:1114] (1/4) Epoch 8, batch 6000, loss[loss=0.2823, simple_loss=0.353, pruned_loss=0.1057, over 4214.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2982, pruned_loss=0.06627, over 936910.41 frames. ], batch size: 25, lr: 9.34e-03, grad_scale: 32.0 +2024-07-28 03:29:47,505 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 03:30:01,689 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.4567, 5.1843, 4.6017, 4.4888], device='cuda:1') +2024-07-28 03:30:06,868 INFO [train.py:1146] (1/4) Epoch 8, validation: loss=0.1796, simple_loss=0.2837, pruned_loss=0.03775, over 944034.00 frames. +2024-07-28 03:30:06,879 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 03:30:23,268 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.39 vs. limit=15.0 +2024-07-28 03:30:36,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=103464.0, ans=0.0 +2024-07-28 03:30:42,208 INFO [train.py:1114] (1/4) Epoch 8, batch 6050, loss[loss=0.1843, simple_loss=0.2666, pruned_loss=0.05105, over 4774.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2972, pruned_loss=0.06562, over 938038.06 frames. ], batch size: 12, lr: 9.33e-03, grad_scale: 32.0 +2024-07-28 03:31:03,414 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.50 vs. limit=15.0 +2024-07-28 03:31:05,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=103517.33333333333, ans=0.125 +2024-07-28 03:31:07,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=103517.33333333333, ans=0.125 +2024-07-28 03:31:14,073 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.635e+01 6.111e+01 6.957e+01 1.112e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 03:31:16,060 INFO [train.py:1114] (1/4) Epoch 8, batch 6100, loss[loss=0.2327, simple_loss=0.3176, pruned_loss=0.07393, over 4680.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2967, pruned_loss=0.06539, over 937948.69 frames. ], batch size: 15, lr: 9.33e-03, grad_scale: 32.0 +2024-07-28 03:31:17,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.51 vs. limit=6.0 +2024-07-28 03:31:18,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=103544.0, ans=0.125 +2024-07-28 03:31:19,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=103544.0, ans=0.125 +2024-07-28 03:31:23,536 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.68 vs. limit=12.0 +2024-07-28 03:31:25,935 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:31:27,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=103557.33333333333, ans=0.0 +2024-07-28 03:31:45,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=103597.33333333333, ans=0.2 +2024-07-28 03:31:52,919 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.79 vs. limit=6.0 +2024-07-28 03:31:54,404 INFO [train.py:1114] (1/4) Epoch 8, batch 6150, loss[loss=0.3167, simple_loss=0.3582, pruned_loss=0.1375, over 3011.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2962, pruned_loss=0.06452, over 936338.32 frames. ], batch size: 35, lr: 9.33e-03, grad_scale: 32.0 +2024-07-28 03:32:23,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=103650.66666666667, ans=0.07 +2024-07-28 03:32:28,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103664.0, ans=0.1 +2024-07-28 03:32:28,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=103664.0, ans=0.125 +2024-07-28 03:32:30,316 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.158e+01 5.994e+01 6.634e+01 7.988e+01 1.219e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-28 03:32:32,385 INFO [train.py:1114] (1/4) Epoch 8, batch 6200, loss[loss=0.2454, simple_loss=0.3239, pruned_loss=0.08341, over 4744.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2968, pruned_loss=0.06493, over 936253.48 frames. ], batch size: 14, lr: 9.32e-03, grad_scale: 32.0 +2024-07-28 03:32:42,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=103690.66666666667, ans=0.125 +2024-07-28 03:32:42,836 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.22 vs. limit=22.5 +2024-07-28 03:32:48,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=103704.0, ans=0.125 +2024-07-28 03:32:55,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=103717.33333333333, ans=0.125 +2024-07-28 03:33:06,922 INFO [train.py:1114] (1/4) Epoch 8, batch 6250, loss[loss=0.211, simple_loss=0.3109, pruned_loss=0.05555, over 4825.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.297, pruned_loss=0.06488, over 933067.13 frames. ], batch size: 14, lr: 9.32e-03, grad_scale: 32.0 +2024-07-28 03:33:07,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=103744.0, ans=0.0 +2024-07-28 03:33:11,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=103744.0, ans=0.125 +2024-07-28 03:33:19,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=103757.33333333333, ans=0.125 +2024-07-28 03:33:26,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=103770.66666666667, ans=0.0 +2024-07-28 03:33:56,593 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.50 vs. limit=12.0 +2024-07-28 03:33:59,285 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.46 vs. limit=15.0 +2024-07-28 03:34:05,276 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.565e+01 5.622e+01 6.181e+01 7.164e+01 1.267e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 03:34:07,329 INFO [train.py:1114] (1/4) Epoch 8, batch 6300, loss[loss=0.1896, simple_loss=0.2688, pruned_loss=0.05522, over 4512.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.296, pruned_loss=0.06434, over 930056.88 frames. ], batch size: 10, lr: 9.32e-03, grad_scale: 32.0 +2024-07-28 03:34:11,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=103810.66666666667, ans=0.0 +2024-07-28 03:34:21,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=103837.33333333333, ans=0.125 +2024-07-28 03:34:33,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=103850.66666666667, ans=0.125 +2024-07-28 03:34:36,384 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.38 vs. limit=15.0 +2024-07-28 03:34:49,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=103864.0, ans=0.2 +2024-07-28 03:34:50,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=103864.0, ans=0.95 +2024-07-28 03:34:53,140 INFO [train.py:1114] (1/4) Epoch 8, batch 6350, loss[loss=0.2058, simple_loss=0.2929, pruned_loss=0.05935, over 4523.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2948, pruned_loss=0.06385, over 933938.32 frames. ], batch size: 21, lr: 9.32e-03, grad_scale: 32.0 +2024-07-28 03:34:56,896 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.97 vs. limit=15.0 +2024-07-28 03:34:58,282 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.75 vs. limit=6.0 +2024-07-28 03:34:58,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=103877.33333333333, ans=0.125 +2024-07-28 03:35:00,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=103890.66666666667, ans=0.125 +2024-07-28 03:35:03,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103890.66666666667, ans=0.1 +2024-07-28 03:35:11,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=103904.0, ans=0.125 +2024-07-28 03:35:13,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=103904.0, ans=0.0 +2024-07-28 03:35:21,345 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.78 vs. limit=22.5 +2024-07-28 03:35:22,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=103930.66666666667, ans=0.125 +2024-07-28 03:35:22,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=103930.66666666667, ans=0.125 +2024-07-28 03:35:23,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=103930.66666666667, ans=0.0 +2024-07-28 03:35:37,028 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 5.697e+01 6.431e+01 7.734e+01 1.122e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 03:35:38,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=103944.0, ans=0.125 +2024-07-28 03:35:38,994 INFO [train.py:1114] (1/4) Epoch 8, batch 6400, loss[loss=0.2075, simple_loss=0.2988, pruned_loss=0.05811, over 4633.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2954, pruned_loss=0.0644, over 935468.30 frames. ], batch size: 13, lr: 9.31e-03, grad_scale: 32.0 +2024-07-28 03:35:42,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=103944.0, ans=0.125 +2024-07-28 03:35:53,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103970.66666666667, ans=0.1 +2024-07-28 03:36:07,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=103984.0, ans=0.025 +2024-07-28 03:36:19,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=103997.33333333333, ans=0.125 +2024-07-28 03:36:19,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104010.66666666667, ans=0.1 +2024-07-28 03:36:20,335 INFO [train.py:1114] (1/4) Epoch 8, batch 6450, loss[loss=0.2229, simple_loss=0.3148, pruned_loss=0.06543, over 4516.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2958, pruned_loss=0.06432, over 938959.70 frames. ], batch size: 21, lr: 9.31e-03, grad_scale: 32.0 +2024-07-28 03:36:27,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=104024.0, ans=0.125 +2024-07-28 03:36:29,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=104024.0, ans=0.125 +2024-07-28 03:36:44,655 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.11 vs. limit=15.0 +2024-07-28 03:36:46,762 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.54 vs. limit=22.5 +2024-07-28 03:36:52,542 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.901e+01 6.090e+01 6.968e+01 8.127e+01 1.259e+02, threshold=1.394e+02, percent-clipped=0.0 +2024-07-28 03:36:54,642 INFO [train.py:1114] (1/4) Epoch 8, batch 6500, loss[loss=0.2462, simple_loss=0.3266, pruned_loss=0.08291, over 3355.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2958, pruned_loss=0.06386, over 940111.85 frames. ], batch size: 35, lr: 9.31e-03, grad_scale: 32.0 +2024-07-28 03:36:55,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=104077.33333333333, ans=15.0 +2024-07-28 03:37:09,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104104.0, ans=0.1 +2024-07-28 03:37:11,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=104104.0, ans=0.0 +2024-07-28 03:37:13,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=104104.0, ans=0.125 +2024-07-28 03:37:20,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=104117.33333333333, ans=0.025 +2024-07-28 03:37:21,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=104117.33333333333, ans=0.125 +2024-07-28 03:37:30,145 INFO [train.py:1114] (1/4) Epoch 8, batch 6550, loss[loss=0.1891, simple_loss=0.2645, pruned_loss=0.05688, over 4801.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2946, pruned_loss=0.06333, over 943099.75 frames. ], batch size: 11, lr: 9.30e-03, grad_scale: 32.0 +2024-07-28 03:37:33,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=104144.0, ans=0.07 +2024-07-28 03:37:38,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=104157.33333333333, ans=0.125 +2024-07-28 03:37:45,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=104170.66666666667, ans=0.0 +2024-07-28 03:37:47,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=104170.66666666667, ans=0.125 +2024-07-28 03:37:51,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=104184.0, ans=0.125 +2024-07-28 03:37:53,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=104184.0, ans=0.07 +2024-07-28 03:37:56,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104184.0, ans=0.1 +2024-07-28 03:37:59,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=104197.33333333333, ans=0.125 +2024-07-28 03:37:59,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=104197.33333333333, ans=0.0 +2024-07-28 03:38:00,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104197.33333333333, ans=0.1 +2024-07-28 03:38:02,246 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.025e+01 5.695e+01 6.284e+01 7.396e+01 1.281e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 03:38:02,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=104197.33333333333, ans=0.0 +2024-07-28 03:38:04,218 INFO [train.py:1114] (1/4) Epoch 8, batch 6600, loss[loss=0.2301, simple_loss=0.3163, pruned_loss=0.07194, over 4933.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2951, pruned_loss=0.06347, over 944959.02 frames. ], batch size: 14, lr: 9.30e-03, grad_scale: 32.0 +2024-07-28 03:38:10,481 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.93 vs. limit=10.0 +2024-07-28 03:38:11,085 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.65 vs. limit=15.0 +2024-07-28 03:38:13,926 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.36 vs. limit=12.0 +2024-07-28 03:38:24,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=104237.33333333333, ans=0.0 +2024-07-28 03:38:27,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=104237.33333333333, ans=0.025 +2024-07-28 03:38:33,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=104237.33333333333, ans=15.0 +2024-07-28 03:38:39,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=104250.66666666667, ans=0.09899494936611666 +2024-07-28 03:38:51,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=104264.0, ans=0.125 +2024-07-28 03:38:55,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104264.0, ans=0.1 +2024-07-28 03:38:56,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=104264.0, ans=0.025 +2024-07-28 03:38:58,593 INFO [train.py:1114] (1/4) Epoch 8, batch 6650, loss[loss=0.2165, simple_loss=0.2949, pruned_loss=0.06907, over 4607.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2951, pruned_loss=0.06359, over 943532.14 frames. ], batch size: 17, lr: 9.30e-03, grad_scale: 32.0 +2024-07-28 03:39:06,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=104290.66666666667, ans=0.125 +2024-07-28 03:39:19,203 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.70 vs. limit=15.0 +2024-07-28 03:39:21,042 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:39:22,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=104304.0, ans=0.1 +2024-07-28 03:39:23,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104317.33333333333, ans=0.1 +2024-07-28 03:39:30,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=104330.66666666667, ans=0.2 +2024-07-28 03:39:34,785 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.880e+01 5.792e+01 6.278e+01 6.949e+01 1.059e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 03:39:44,718 INFO [train.py:1114] (1/4) Epoch 8, batch 6700, loss[loss=0.2376, simple_loss=0.3394, pruned_loss=0.06787, over 4694.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2969, pruned_loss=0.06447, over 942255.92 frames. ], batch size: 19, lr: 9.29e-03, grad_scale: 32.0 +2024-07-28 03:39:47,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=104344.0, ans=0.125 +2024-07-28 03:39:53,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=104344.0, ans=0.025 +2024-07-28 03:40:17,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=104397.33333333333, ans=0.95 +2024-07-28 03:40:19,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=104397.33333333333, ans=0.2 +2024-07-28 03:40:24,513 INFO [train.py:1114] (1/4) Epoch 8, batch 6750, loss[loss=0.2366, simple_loss=0.3184, pruned_loss=0.07735, over 4206.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2973, pruned_loss=0.06498, over 940507.40 frames. ], batch size: 25, lr: 9.29e-03, grad_scale: 32.0 +2024-07-28 03:40:42,478 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:40:43,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=104424.0, ans=0.0 +2024-07-28 03:40:55,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104450.66666666667, ans=0.1 +2024-07-28 03:41:01,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104464.0, ans=0.1 +2024-07-28 03:41:04,953 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.898e+01 5.597e+01 6.085e+01 6.894e+01 1.207e+02, threshold=1.217e+02, percent-clipped=0.0 +2024-07-28 03:41:14,446 INFO [train.py:1114] (1/4) Epoch 8, batch 6800, loss[loss=0.2663, simple_loss=0.3599, pruned_loss=0.08632, over 4625.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2981, pruned_loss=0.06536, over 938642.77 frames. ], batch size: 13, lr: 9.29e-03, grad_scale: 32.0 +2024-07-28 03:41:14,531 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:41:14,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=104477.33333333333, ans=0.05 +2024-07-28 03:41:19,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104477.33333333333, ans=0.1 +2024-07-28 03:41:25,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=104490.66666666667, ans=0.125 +2024-07-28 03:41:34,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=104517.33333333333, ans=0.025 +2024-07-28 03:41:41,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=104530.66666666667, ans=0.125 +2024-07-28 03:41:41,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104530.66666666667, ans=0.1 +2024-07-28 03:41:50,247 INFO [train.py:1114] (1/4) Epoch 8, batch 6850, loss[loss=0.239, simple_loss=0.3232, pruned_loss=0.07739, over 4688.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2985, pruned_loss=0.06549, over 940190.95 frames. ], batch size: 13, lr: 9.29e-03, grad_scale: 32.0 +2024-07-28 03:41:50,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=104544.0, ans=0.125 +2024-07-28 03:42:04,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=104570.66666666667, ans=0.125 +2024-07-28 03:42:06,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=104570.66666666667, ans=0.125 +2024-07-28 03:42:14,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=104584.0, ans=0.125 +2024-07-28 03:42:17,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=104597.33333333333, ans=0.125 +2024-07-28 03:42:21,577 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.580e+01 6.042e+01 6.902e+01 8.247e+01 1.133e+02, threshold=1.380e+02, percent-clipped=0.0 +2024-07-28 03:42:21,988 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.34 vs. limit=10.0 +2024-07-28 03:42:22,904 INFO [train.py:1114] (1/4) Epoch 8, batch 6900, loss[loss=0.1623, simple_loss=0.246, pruned_loss=0.03932, over 4958.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.297, pruned_loss=0.06431, over 942559.06 frames. ], batch size: 13, lr: 9.28e-03, grad_scale: 16.0 +2024-07-28 03:42:24,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=104610.66666666667, ans=0.125 +2024-07-28 03:42:30,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=104624.0, ans=0.2 +2024-07-28 03:42:32,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=104624.0, ans=0.0 +2024-07-28 03:42:33,558 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:42:39,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=104637.33333333333, ans=0.125 +2024-07-28 03:42:41,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=104637.33333333333, ans=0.125 +2024-07-28 03:42:47,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=104650.66666666667, ans=0.125 +2024-07-28 03:42:55,938 INFO [train.py:1114] (1/4) Epoch 8, batch 6950, loss[loss=0.1967, simple_loss=0.26, pruned_loss=0.06664, over 4505.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2963, pruned_loss=0.06416, over 940407.92 frames. ], batch size: 10, lr: 9.28e-03, grad_scale: 16.0 +2024-07-28 03:43:08,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=104690.66666666667, ans=0.025 +2024-07-28 03:43:15,955 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.05 vs. limit=10.0 +2024-07-28 03:43:16,572 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.82 vs. limit=22.5 +2024-07-28 03:43:22,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=104730.66666666667, ans=0.125 +2024-07-28 03:43:28,343 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.937e+01 5.764e+01 6.206e+01 6.980e+01 1.236e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 03:43:29,715 INFO [train.py:1114] (1/4) Epoch 8, batch 7000, loss[loss=0.1889, simple_loss=0.2737, pruned_loss=0.05204, over 4628.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2941, pruned_loss=0.06334, over 938480.38 frames. ], batch size: 17, lr: 9.28e-03, grad_scale: 16.0 +2024-07-28 03:43:43,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=104770.66666666667, ans=0.5 +2024-07-28 03:43:52,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=104784.0, ans=0.5 +2024-07-28 03:44:09,222 INFO [train.py:1114] (1/4) Epoch 8, batch 7050, loss[loss=0.2313, simple_loss=0.3195, pruned_loss=0.07154, over 4751.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2949, pruned_loss=0.06348, over 941762.98 frames. ], batch size: 19, lr: 9.27e-03, grad_scale: 16.0 +2024-07-28 03:44:26,709 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.54 vs. limit=15.0 +2024-07-28 03:44:34,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=104850.66666666667, ans=0.0 +2024-07-28 03:44:34,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=104850.66666666667, ans=0.2 +2024-07-28 03:44:34,667 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.27 vs. limit=22.5 +2024-07-28 03:44:35,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=104850.66666666667, ans=0.125 +2024-07-28 03:44:41,369 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.865e+01 5.673e+01 6.225e+01 7.440e+01 1.294e+02, threshold=1.245e+02, percent-clipped=1.0 +2024-07-28 03:44:41,692 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.82 vs. limit=15.0 +2024-07-28 03:44:42,708 INFO [train.py:1114] (1/4) Epoch 8, batch 7100, loss[loss=0.2173, simple_loss=0.308, pruned_loss=0.06328, over 4810.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2967, pruned_loss=0.065, over 937014.29 frames. ], batch size: 15, lr: 9.27e-03, grad_scale: 16.0 +2024-07-28 03:44:44,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=104877.33333333333, ans=0.5 +2024-07-28 03:44:46,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=104877.33333333333, ans=0.0 +2024-07-28 03:44:52,560 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:45:03,209 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.12 vs. limit=15.0 +2024-07-28 03:45:05,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=104917.33333333333, ans=0.125 +2024-07-28 03:45:05,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=104917.33333333333, ans=0.0 +2024-07-28 03:45:07,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=104917.33333333333, ans=0.025 +2024-07-28 03:45:12,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.85 vs. limit=15.0 +2024-07-28 03:45:15,083 INFO [train.py:1114] (1/4) Epoch 8, batch 7150, loss[loss=0.2806, simple_loss=0.3631, pruned_loss=0.09901, over 4548.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2958, pruned_loss=0.06437, over 937894.87 frames. ], batch size: 21, lr: 9.27e-03, grad_scale: 16.0 +2024-07-28 03:45:16,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=104944.0, ans=0.2 +2024-07-28 03:45:19,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104944.0, ans=0.1 +2024-07-28 03:45:29,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=104970.66666666667, ans=0.0 +2024-07-28 03:45:34,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=104970.66666666667, ans=0.125 +2024-07-28 03:45:37,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=104984.0, ans=0.125 +2024-07-28 03:45:41,980 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.06 vs. limit=22.5 +2024-07-28 03:45:42,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=104997.33333333333, ans=0.2 +2024-07-28 03:45:46,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104997.33333333333, ans=0.1 +2024-07-28 03:45:48,071 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 5.987e+01 7.110e+01 8.384e+01 1.191e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-28 03:45:49,419 INFO [train.py:1114] (1/4) Epoch 8, batch 7200, loss[loss=0.2299, simple_loss=0.3023, pruned_loss=0.07881, over 4800.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2962, pruned_loss=0.06436, over 937972.54 frames. ], batch size: 15, lr: 9.27e-03, grad_scale: 32.0 +2024-07-28 03:45:54,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=105010.66666666667, ans=0.125 +2024-07-28 03:45:57,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=105024.0, ans=0.2 +2024-07-28 03:45:58,876 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.79 vs. limit=15.0 +2024-07-28 03:46:07,205 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.11 vs. limit=15.0 +2024-07-28 03:46:08,685 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.99 vs. limit=15.0 +2024-07-28 03:46:14,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=105064.0, ans=0.125 +2024-07-28 03:46:20,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=105064.0, ans=0.125 +2024-07-28 03:46:20,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=105064.0, ans=0.125 +2024-07-28 03:46:21,906 INFO [train.py:1114] (1/4) Epoch 8, batch 7250, loss[loss=0.1809, simple_loss=0.267, pruned_loss=0.04741, over 4851.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2945, pruned_loss=0.06381, over 939676.90 frames. ], batch size: 12, lr: 9.26e-03, grad_scale: 32.0 +2024-07-28 03:46:22,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105077.33333333333, ans=0.1 +2024-07-28 03:46:23,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=105077.33333333333, ans=0.0 +2024-07-28 03:46:32,533 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.92 vs. limit=22.5 +2024-07-28 03:46:35,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=105104.0, ans=0.125 +2024-07-28 03:46:36,950 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.63 vs. limit=6.0 +2024-07-28 03:46:40,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=105104.0, ans=0.2 +2024-07-28 03:46:44,387 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.34 vs. limit=15.0 +2024-07-28 03:46:48,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=105130.66666666667, ans=22.5 +2024-07-28 03:46:53,133 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.994e+01 6.010e+01 6.491e+01 7.289e+01 9.989e+01, threshold=1.298e+02, percent-clipped=0.0 +2024-07-28 03:46:54,405 INFO [train.py:1114] (1/4) Epoch 8, batch 7300, loss[loss=0.1637, simple_loss=0.255, pruned_loss=0.03616, over 4850.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2951, pruned_loss=0.06424, over 939892.44 frames. ], batch size: 12, lr: 9.26e-03, grad_scale: 32.0 +2024-07-28 03:46:54,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=105144.0, ans=0.125 +2024-07-28 03:47:03,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=105157.33333333333, ans=0.2 +2024-07-28 03:47:15,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=105184.0, ans=0.125 +2024-07-28 03:47:21,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=105197.33333333333, ans=0.125 +2024-07-28 03:47:25,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=105197.33333333333, ans=0.025 +2024-07-28 03:47:27,035 INFO [train.py:1114] (1/4) Epoch 8, batch 7350, loss[loss=0.2032, simple_loss=0.2845, pruned_loss=0.06097, over 4643.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2957, pruned_loss=0.06377, over 938992.70 frames. ], batch size: 12, lr: 9.26e-03, grad_scale: 32.0 +2024-07-28 03:47:29,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=105210.66666666667, ans=0.125 +2024-07-28 03:47:34,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=105224.0, ans=0.125 +2024-07-28 03:47:50,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=105250.66666666667, ans=0.07 +2024-07-28 03:47:58,203 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.496e+01 5.796e+01 6.371e+01 7.883e+01 1.311e+02, threshold=1.274e+02, percent-clipped=1.0 +2024-07-28 03:47:59,457 INFO [train.py:1114] (1/4) Epoch 8, batch 7400, loss[loss=0.2213, simple_loss=0.3146, pruned_loss=0.06397, over 4700.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2959, pruned_loss=0.06382, over 940472.29 frames. ], batch size: 13, lr: 9.25e-03, grad_scale: 32.0 +2024-07-28 03:48:01,927 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.53 vs. limit=15.0 +2024-07-28 03:48:08,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=105290.66666666667, ans=0.0 +2024-07-28 03:48:22,682 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:48:30,952 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.48 vs. limit=15.0 +2024-07-28 03:48:31,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=105344.0, ans=0.125 +2024-07-28 03:48:32,432 INFO [train.py:1114] (1/4) Epoch 8, batch 7450, loss[loss=0.1997, simple_loss=0.2886, pruned_loss=0.05537, over 4619.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.295, pruned_loss=0.06364, over 938007.21 frames. ], batch size: 11, lr: 9.25e-03, grad_scale: 32.0 +2024-07-28 03:48:34,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=105344.0, ans=0.125 +2024-07-28 03:48:49,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=105370.66666666667, ans=0.125 +2024-07-28 03:48:50,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=105370.66666666667, ans=0.05 +2024-07-28 03:48:52,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.30 vs. limit=22.5 +2024-07-28 03:48:55,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=105384.0, ans=0.125 +2024-07-28 03:49:03,908 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.015e+01 5.959e+01 6.584e+01 7.550e+01 1.203e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-28 03:49:05,209 INFO [train.py:1114] (1/4) Epoch 8, batch 7500, loss[loss=0.271, simple_loss=0.3276, pruned_loss=0.1073, over 3429.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2948, pruned_loss=0.06394, over 936257.55 frames. ], batch size: 35, lr: 9.25e-03, grad_scale: 32.0 +2024-07-28 03:49:08,213 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.39 vs. limit=15.0 +2024-07-28 03:49:20,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=105437.33333333333, ans=0.125 +2024-07-28 03:49:21,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=105437.33333333333, ans=0.5 +2024-07-28 03:49:22,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=105437.33333333333, ans=0.2 +2024-07-28 03:49:26,953 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.12 vs. limit=15.0 +2024-07-28 03:49:32,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105464.0, ans=0.1 +2024-07-28 03:49:39,661 INFO [train.py:1114] (1/4) Epoch 8, batch 7550, loss[loss=0.2214, simple_loss=0.3115, pruned_loss=0.06569, over 4611.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2971, pruned_loss=0.06477, over 936039.78 frames. ], batch size: 17, lr: 9.25e-03, grad_scale: 32.0 +2024-07-28 03:49:49,343 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:50:10,694 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.921e+01 5.717e+01 6.338e+01 7.144e+01 8.798e+01, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 03:50:12,726 INFO [train.py:1114] (1/4) Epoch 8, batch 7600, loss[loss=0.2296, simple_loss=0.3302, pruned_loss=0.0645, over 4811.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2969, pruned_loss=0.06425, over 938341.23 frames. ], batch size: 14, lr: 9.24e-03, grad_scale: 32.0 +2024-07-28 03:50:15,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=105544.0, ans=0.125 +2024-07-28 03:50:21,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105557.33333333333, ans=0.1 +2024-07-28 03:50:39,871 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:50:41,652 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.94 vs. limit=6.0 +2024-07-28 03:50:42,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105597.33333333333, ans=0.1 +2024-07-28 03:50:44,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=105597.33333333333, ans=0.025 +2024-07-28 03:50:45,462 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:50:45,951 INFO [train.py:1114] (1/4) Epoch 8, batch 7650, loss[loss=0.1764, simple_loss=0.2571, pruned_loss=0.04786, over 4940.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2961, pruned_loss=0.06422, over 937689.77 frames. ], batch size: 12, lr: 9.24e-03, grad_scale: 32.0 +2024-07-28 03:50:49,804 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-07-28 03:50:51,867 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=14.42 vs. limit=15.0 +2024-07-28 03:50:54,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=105624.0, ans=0.0 +2024-07-28 03:50:58,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=105624.0, ans=0.025 +2024-07-28 03:51:02,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=105637.33333333333, ans=0.2 +2024-07-28 03:51:08,908 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.79 vs. limit=15.0 +2024-07-28 03:51:14,311 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.58 vs. limit=15.0 +2024-07-28 03:51:17,764 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.951e+01 6.499e+01 7.761e+01 1.442e+02, threshold=1.300e+02, percent-clipped=1.0 +2024-07-28 03:51:19,095 INFO [train.py:1114] (1/4) Epoch 8, batch 7700, loss[loss=0.2123, simple_loss=0.3094, pruned_loss=0.05758, over 4691.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2962, pruned_loss=0.06406, over 934730.41 frames. ], batch size: 13, lr: 9.24e-03, grad_scale: 32.0 +2024-07-28 03:51:23,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=105677.33333333333, ans=0.0 +2024-07-28 03:51:25,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=105690.66666666667, ans=10.0 +2024-07-28 03:51:27,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=105690.66666666667, ans=0.125 +2024-07-28 03:51:40,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=105717.33333333333, ans=0.125 +2024-07-28 03:51:42,494 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.43 vs. limit=15.0 +2024-07-28 03:51:44,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105717.33333333333, ans=0.1 +2024-07-28 03:51:47,111 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.08 vs. limit=12.0 +2024-07-28 03:51:51,754 INFO [train.py:1114] (1/4) Epoch 8, batch 7750, loss[loss=0.2315, simple_loss=0.3206, pruned_loss=0.0712, over 4932.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2976, pruned_loss=0.0646, over 935901.19 frames. ], batch size: 14, lr: 9.23e-03, grad_scale: 32.0 +2024-07-28 03:51:55,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=105744.0, ans=0.0 +2024-07-28 03:51:57,965 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.15 vs. limit=22.5 +2024-07-28 03:51:58,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=105757.33333333333, ans=0.0 +2024-07-28 03:52:04,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=105770.66666666667, ans=0.025 +2024-07-28 03:52:08,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=105770.66666666667, ans=0.125 +2024-07-28 03:52:13,409 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:52:13,716 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.36 vs. limit=22.5 +2024-07-28 03:52:23,022 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.861e+01 6.500e+01 7.436e+01 9.708e+01, threshold=1.300e+02, percent-clipped=0.0 +2024-07-28 03:52:24,793 INFO [train.py:1114] (1/4) Epoch 8, batch 7800, loss[loss=0.2242, simple_loss=0.3152, pruned_loss=0.06664, over 4656.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2982, pruned_loss=0.0648, over 937482.21 frames. ], batch size: 14, lr: 9.23e-03, grad_scale: 32.0 +2024-07-28 03:52:25,136 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.80 vs. limit=15.0 +2024-07-28 03:52:45,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=105850.66666666667, ans=0.125 +2024-07-28 03:52:46,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=105850.66666666667, ans=0.125 +2024-07-28 03:52:56,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=105864.0, ans=15.0 +2024-07-28 03:52:59,043 INFO [train.py:1114] (1/4) Epoch 8, batch 7850, loss[loss=0.1781, simple_loss=0.2583, pruned_loss=0.04893, over 4939.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2989, pruned_loss=0.06539, over 936734.43 frames. ], batch size: 11, lr: 9.23e-03, grad_scale: 32.0 +2024-07-28 03:52:59,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=105877.33333333333, ans=0.125 +2024-07-28 03:53:00,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=105877.33333333333, ans=0.0 +2024-07-28 03:53:02,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=105877.33333333333, ans=0.125 +2024-07-28 03:53:04,002 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.81 vs. limit=15.0 +2024-07-28 03:53:06,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=105890.66666666667, ans=0.1 +2024-07-28 03:53:10,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=105890.66666666667, ans=0.1 +2024-07-28 03:53:27,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=105930.66666666667, ans=0.125 +2024-07-28 03:53:30,109 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.073e+01 5.939e+01 6.689e+01 8.282e+01 1.225e+02, threshold=1.338e+02, percent-clipped=0.0 +2024-07-28 03:53:31,392 INFO [train.py:1114] (1/4) Epoch 8, batch 7900, loss[loss=0.2276, simple_loss=0.3137, pruned_loss=0.07074, over 4864.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.3004, pruned_loss=0.06584, over 934663.11 frames. ], batch size: 14, lr: 9.22e-03, grad_scale: 32.0 +2024-07-28 03:53:39,916 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.65 vs. limit=10.0 +2024-07-28 03:53:45,784 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.91 vs. limit=15.0 +2024-07-28 03:53:57,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=105984.0, ans=0.125 +2024-07-28 03:53:59,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=105997.33333333333, ans=0.95 +2024-07-28 03:54:00,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=105997.33333333333, ans=0.125 +2024-07-28 03:54:04,734 INFO [train.py:1114] (1/4) Epoch 8, batch 7950, loss[loss=0.2952, simple_loss=0.3576, pruned_loss=0.1165, over 3151.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2997, pruned_loss=0.06536, over 936420.91 frames. ], batch size: 36, lr: 9.22e-03, grad_scale: 16.0 +2024-07-28 03:54:06,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=106010.66666666667, ans=0.125 +2024-07-28 03:54:37,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten.whitening_limit, batch_count=106064.0, ans=22.5 +2024-07-28 03:54:39,423 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.635e+01 5.772e+01 6.445e+01 7.166e+01 9.685e+01, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 03:54:40,292 INFO [train.py:1114] (1/4) Epoch 8, batch 8000, loss[loss=0.1731, simple_loss=0.2554, pruned_loss=0.04539, over 4623.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.298, pruned_loss=0.06518, over 934904.66 frames. ], batch size: 11, lr: 9.22e-03, grad_scale: 32.0 +2024-07-28 03:54:40,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=106077.33333333333, ans=0.125 +2024-07-28 03:54:44,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=106077.33333333333, ans=0.125 +2024-07-28 03:54:51,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=106090.66666666667, ans=0.125 +2024-07-28 03:54:53,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=106104.0, ans=0.0 +2024-07-28 03:54:57,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=106104.0, ans=0.2 +2024-07-28 03:54:59,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=106117.33333333333, ans=0.2 +2024-07-28 03:55:10,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=106130.66666666667, ans=0.2 +2024-07-28 03:55:14,592 INFO [train.py:1114] (1/4) Epoch 8, batch 8050, loss[loss=0.2123, simple_loss=0.3142, pruned_loss=0.05524, over 4813.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2984, pruned_loss=0.06496, over 934602.26 frames. ], batch size: 14, lr: 9.22e-03, grad_scale: 32.0 +2024-07-28 03:55:17,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=106144.0, ans=0.125 +2024-07-28 03:55:47,755 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.934e+01 5.620e+01 6.153e+01 6.973e+01 1.002e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 03:55:48,430 INFO [train.py:1114] (1/4) Epoch 8, batch 8100, loss[loss=0.2541, simple_loss=0.3165, pruned_loss=0.09584, over 4805.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2991, pruned_loss=0.066, over 934010.02 frames. ], batch size: 15, lr: 9.21e-03, grad_scale: 32.0 +2024-07-28 03:55:49,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=106210.66666666667, ans=0.2 +2024-07-28 03:55:58,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=106224.0, ans=0.125 +2024-07-28 03:56:02,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=106237.33333333333, ans=0.2 +2024-07-28 03:56:17,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=106264.0, ans=0.0 +2024-07-28 03:56:20,977 INFO [train.py:1114] (1/4) Epoch 8, batch 8150, loss[loss=0.2199, simple_loss=0.298, pruned_loss=0.07089, over 4802.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2984, pruned_loss=0.06558, over 937456.01 frames. ], batch size: 15, lr: 9.21e-03, grad_scale: 32.0 +2024-07-28 03:56:24,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=106277.33333333333, ans=0.125 +2024-07-28 03:56:25,001 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:56:28,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106290.66666666667, ans=0.0 +2024-07-28 03:56:32,195 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.16 vs. limit=15.0 +2024-07-28 03:56:38,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=106304.0, ans=0.025 +2024-07-28 03:56:45,246 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.80 vs. limit=12.0 +2024-07-28 03:56:52,815 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.056e+01 5.810e+01 6.420e+01 7.411e+01 1.127e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 03:56:53,443 INFO [train.py:1114] (1/4) Epoch 8, batch 8200, loss[loss=0.223, simple_loss=0.3067, pruned_loss=0.06969, over 4800.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2987, pruned_loss=0.06574, over 938123.29 frames. ], batch size: 15, lr: 9.21e-03, grad_scale: 32.0 +2024-07-28 03:57:26,358 INFO [train.py:1114] (1/4) Epoch 8, batch 8250, loss[loss=0.1829, simple_loss=0.2694, pruned_loss=0.04815, over 4900.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2979, pruned_loss=0.06489, over 938225.21 frames. ], batch size: 13, lr: 9.20e-03, grad_scale: 32.0 +2024-07-28 03:57:39,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106437.33333333333, ans=0.1 +2024-07-28 03:57:45,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106450.66666666667, ans=0.1 +2024-07-28 03:57:56,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=106464.0, ans=0.125 +2024-07-28 03:57:57,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106464.0, ans=0.1 +2024-07-28 03:57:57,943 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.754e+01 5.787e+01 6.260e+01 6.993e+01 1.105e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 03:57:58,641 INFO [train.py:1114] (1/4) Epoch 8, batch 8300, loss[loss=0.2431, simple_loss=0.3252, pruned_loss=0.08044, over 4904.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2983, pruned_loss=0.06533, over 938141.48 frames. ], batch size: 15, lr: 9.20e-03, grad_scale: 32.0 +2024-07-28 03:58:01,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=106477.33333333333, ans=0.125 +2024-07-28 03:58:04,036 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.84 vs. limit=15.0 +2024-07-28 03:58:25,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=106504.0, ans=0.2 +2024-07-28 03:58:26,494 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.63 vs. limit=6.0 +2024-07-28 03:58:30,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=106504.0, ans=0.125 +2024-07-28 03:58:39,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=106530.66666666667, ans=0.125 +2024-07-28 03:58:39,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106530.66666666667, ans=0.125 +2024-07-28 03:58:41,159 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.63 vs. limit=15.0 +2024-07-28 03:58:45,426 INFO [train.py:1114] (1/4) Epoch 8, batch 8350, loss[loss=0.2494, simple_loss=0.3362, pruned_loss=0.08134, over 4784.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2981, pruned_loss=0.06518, over 940826.73 frames. ], batch size: 15, lr: 9.20e-03, grad_scale: 32.0 +2024-07-28 03:58:48,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=106544.0, ans=0.025 +2024-07-28 03:58:53,577 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.35 vs. limit=15.0 +2024-07-28 03:58:56,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=106557.33333333333, ans=0.125 +2024-07-28 03:58:59,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=106570.66666666667, ans=0.2 +2024-07-28 03:58:59,310 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:59:02,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=106570.66666666667, ans=0.0 +2024-07-28 03:59:11,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=106584.0, ans=0.0 +2024-07-28 03:59:11,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=106584.0, ans=0.05 +2024-07-28 03:59:11,352 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.71 vs. limit=6.0 +2024-07-28 03:59:14,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=106597.33333333333, ans=0.125 +2024-07-28 03:59:15,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=106597.33333333333, ans=0.2 +2024-07-28 03:59:17,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=106597.33333333333, ans=0.125 +2024-07-28 03:59:17,896 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.872e+01 6.040e+01 6.810e+01 8.092e+01 1.142e+02, threshold=1.362e+02, percent-clipped=0.0 +2024-07-28 03:59:18,626 INFO [train.py:1114] (1/4) Epoch 8, batch 8400, loss[loss=0.2046, simple_loss=0.2841, pruned_loss=0.06258, over 4782.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2974, pruned_loss=0.06463, over 939582.37 frames. ], batch size: 12, lr: 9.20e-03, grad_scale: 32.0 +2024-07-28 03:59:22,149 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.23 vs. limit=22.5 +2024-07-28 03:59:27,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=106624.0, ans=0.0 +2024-07-28 03:59:32,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=106637.33333333333, ans=0.125 +2024-07-28 03:59:32,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=106637.33333333333, ans=0.2 +2024-07-28 03:59:37,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=106650.66666666667, ans=0.0 +2024-07-28 03:59:38,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106650.66666666667, ans=0.0 +2024-07-28 03:59:39,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=106650.66666666667, ans=0.125 +2024-07-28 03:59:52,389 INFO [train.py:1114] (1/4) Epoch 8, batch 8450, loss[loss=0.2269, simple_loss=0.3148, pruned_loss=0.06952, over 4801.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2994, pruned_loss=0.0656, over 938753.35 frames. ], batch size: 15, lr: 9.19e-03, grad_scale: 32.0 +2024-07-28 03:59:58,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=106690.66666666667, ans=0.0 +2024-07-28 04:00:18,966 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.39 vs. limit=12.0 +2024-07-28 04:00:25,072 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.985e+01 6.391e+01 7.364e+01 1.076e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-28 04:00:25,733 INFO [train.py:1114] (1/4) Epoch 8, batch 8500, loss[loss=0.1694, simple_loss=0.2522, pruned_loss=0.04335, over 4623.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2981, pruned_loss=0.06504, over 938505.46 frames. ], batch size: 11, lr: 9.19e-03, grad_scale: 32.0 +2024-07-28 04:00:34,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=106757.33333333333, ans=0.125 +2024-07-28 04:00:39,644 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.41 vs. limit=10.0 +2024-07-28 04:00:43,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=106770.66666666667, ans=0.0 +2024-07-28 04:00:58,735 INFO [train.py:1114] (1/4) Epoch 8, batch 8550, loss[loss=0.1748, simple_loss=0.2584, pruned_loss=0.04565, over 4806.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2982, pruned_loss=0.06505, over 939268.77 frames. ], batch size: 11, lr: 9.19e-03, grad_scale: 32.0 +2024-07-28 04:01:17,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106837.33333333333, ans=0.1 +2024-07-28 04:01:19,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=106850.66666666667, ans=0.0 +2024-07-28 04:01:31,411 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.793e+01 5.840e+01 6.821e+01 7.770e+01 1.284e+02, threshold=1.364e+02, percent-clipped=1.0 +2024-07-28 04:01:32,069 INFO [train.py:1114] (1/4) Epoch 8, batch 8600, loss[loss=0.1926, simple_loss=0.2829, pruned_loss=0.0511, over 4786.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.297, pruned_loss=0.06463, over 938679.37 frames. ], batch size: 15, lr: 9.18e-03, grad_scale: 32.0 +2024-07-28 04:01:33,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106877.33333333333, ans=0.1 +2024-07-28 04:01:54,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=106917.33333333333, ans=0.09899494936611666 +2024-07-28 04:01:56,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=106917.33333333333, ans=0.0 +2024-07-28 04:02:01,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=106930.66666666667, ans=0.125 +2024-07-28 04:02:01,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=106930.66666666667, ans=0.025 +2024-07-28 04:02:04,637 INFO [train.py:1114] (1/4) Epoch 8, batch 8650, loss[loss=0.1811, simple_loss=0.2783, pruned_loss=0.0419, over 4919.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2959, pruned_loss=0.06388, over 940261.92 frames. ], batch size: 15, lr: 9.18e-03, grad_scale: 32.0 +2024-07-28 04:02:20,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=106970.66666666667, ans=0.025 +2024-07-28 04:02:24,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=106970.66666666667, ans=0.125 +2024-07-28 04:02:35,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=106997.33333333333, ans=0.2 +2024-07-28 04:02:37,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=106997.33333333333, ans=0.025 +2024-07-28 04:02:37,621 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.946e+01 6.079e+01 7.020e+01 8.285e+01 1.215e+02, threshold=1.404e+02, percent-clipped=0.0 +2024-07-28 04:02:38,286 INFO [train.py:1114] (1/4) Epoch 8, batch 8700, loss[loss=0.2077, simple_loss=0.2981, pruned_loss=0.05864, over 4754.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2974, pruned_loss=0.06477, over 937906.34 frames. ], batch size: 13, lr: 9.18e-03, grad_scale: 32.0 +2024-07-28 04:02:48,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107024.0, ans=0.1 +2024-07-28 04:03:01,902 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:03:02,266 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.07 vs. limit=15.0 +2024-07-28 04:03:02,786 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.44 vs. limit=15.0 +2024-07-28 04:03:08,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=107064.0, ans=0.0 +2024-07-28 04:03:09,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=107064.0, ans=0.125 +2024-07-28 04:03:13,345 INFO [train.py:1114] (1/4) Epoch 8, batch 8750, loss[loss=0.1976, simple_loss=0.2826, pruned_loss=0.05627, over 4686.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.297, pruned_loss=0.06468, over 936232.52 frames. ], batch size: 15, lr: 9.18e-03, grad_scale: 32.0 +2024-07-28 04:03:21,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=107090.66666666667, ans=0.2 +2024-07-28 04:03:24,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=107090.66666666667, ans=0.125 +2024-07-28 04:03:25,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=107090.66666666667, ans=0.0 +2024-07-28 04:03:42,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=107130.66666666667, ans=0.09899494936611666 +2024-07-28 04:03:42,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=107130.66666666667, ans=0.125 +2024-07-28 04:03:46,703 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 6.010e+01 6.887e+01 8.098e+01 1.294e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-28 04:03:47,334 INFO [train.py:1114] (1/4) Epoch 8, batch 8800, loss[loss=0.2123, simple_loss=0.2918, pruned_loss=0.06645, over 4936.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2977, pruned_loss=0.06484, over 937439.66 frames. ], batch size: 14, lr: 9.17e-03, grad_scale: 32.0 +2024-07-28 04:04:05,068 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.84 vs. limit=15.0 +2024-07-28 04:04:10,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107184.0, ans=0.1 +2024-07-28 04:04:19,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=107197.33333333333, ans=15.0 +2024-07-28 04:04:20,448 INFO [train.py:1114] (1/4) Epoch 8, batch 8850, loss[loss=0.2076, simple_loss=0.2992, pruned_loss=0.05797, over 4526.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.298, pruned_loss=0.06509, over 931776.53 frames. ], batch size: 21, lr: 9.17e-03, grad_scale: 32.0 +2024-07-28 04:04:21,450 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.76 vs. limit=15.0 +2024-07-28 04:04:23,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=107210.66666666667, ans=0.125 +2024-07-28 04:04:23,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=107210.66666666667, ans=0.2 +2024-07-28 04:04:27,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107224.0, ans=0.1 +2024-07-28 04:04:42,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=107250.66666666667, ans=0.025 +2024-07-28 04:04:53,231 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 5.819e+01 6.564e+01 7.832e+01 1.170e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-28 04:04:53,898 INFO [train.py:1114] (1/4) Epoch 8, batch 8900, loss[loss=0.1546, simple_loss=0.2316, pruned_loss=0.0388, over 4924.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2976, pruned_loss=0.06493, over 929930.83 frames. ], batch size: 12, lr: 9.17e-03, grad_scale: 32.0 +2024-07-28 04:04:58,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=107277.33333333333, ans=0.125 +2024-07-28 04:05:01,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=107290.66666666667, ans=0.0 +2024-07-28 04:05:08,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=107304.0, ans=0.0 +2024-07-28 04:05:12,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=107317.33333333333, ans=0.1 +2024-07-28 04:05:14,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=107317.33333333333, ans=0.0 +2024-07-28 04:05:23,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=107330.66666666667, ans=0.125 +2024-07-28 04:05:26,156 INFO [train.py:1114] (1/4) Epoch 8, batch 8950, loss[loss=0.2026, simple_loss=0.2923, pruned_loss=0.05644, over 4440.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2967, pruned_loss=0.06444, over 930559.54 frames. ], batch size: 21, lr: 9.17e-03, grad_scale: 32.0 +2024-07-28 04:05:28,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=107344.0, ans=0.025 +2024-07-28 04:05:39,603 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=22.95 vs. limit=22.5 +2024-07-28 04:05:55,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=107397.33333333333, ans=0.0 +2024-07-28 04:05:56,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107397.33333333333, ans=0.1 +2024-07-28 04:05:59,998 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.334e+01 5.823e+01 6.551e+01 7.649e+01 1.257e+02, threshold=1.310e+02, percent-clipped=0.0 +2024-07-28 04:06:00,699 INFO [train.py:1114] (1/4) Epoch 8, batch 9000, loss[loss=0.1996, simple_loss=0.2782, pruned_loss=0.06047, over 4643.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2946, pruned_loss=0.06324, over 933274.61 frames. ], batch size: 12, lr: 9.16e-03, grad_scale: 32.0 +2024-07-28 04:06:00,700 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 04:06:12,614 INFO [train.py:1146] (1/4) Epoch 8, validation: loss=0.1781, simple_loss=0.2826, pruned_loss=0.03685, over 944034.00 frames. +2024-07-28 04:06:12,615 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 04:06:14,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=107410.66666666667, ans=0.2 +2024-07-28 04:06:21,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.77 vs. limit=15.0 +2024-07-28 04:06:25,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107437.33333333333, ans=0.1 +2024-07-28 04:06:30,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=107437.33333333333, ans=0.0 +2024-07-28 04:06:30,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=107437.33333333333, ans=0.125 +2024-07-28 04:06:32,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=107450.66666666667, ans=0.125 +2024-07-28 04:06:35,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=107450.66666666667, ans=10.0 +2024-07-28 04:06:45,366 INFO [train.py:1114] (1/4) Epoch 8, batch 9050, loss[loss=0.193, simple_loss=0.2687, pruned_loss=0.05865, over 4503.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2931, pruned_loss=0.06295, over 934050.67 frames. ], batch size: 10, lr: 9.16e-03, grad_scale: 32.0 +2024-07-28 04:06:49,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=107477.33333333333, ans=0.125 +2024-07-28 04:06:52,705 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.00 vs. limit=15.0 +2024-07-28 04:06:59,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=107504.0, ans=0.125 +2024-07-28 04:07:16,878 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.793e+01 6.353e+01 7.194e+01 9.869e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 04:07:17,491 INFO [train.py:1114] (1/4) Epoch 8, batch 9100, loss[loss=0.2162, simple_loss=0.317, pruned_loss=0.05772, over 4933.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2927, pruned_loss=0.06232, over 936689.17 frames. ], batch size: 14, lr: 9.16e-03, grad_scale: 32.0 +2024-07-28 04:07:30,483 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.82 vs. limit=15.0 +2024-07-28 04:07:30,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=107570.66666666667, ans=0.95 +2024-07-28 04:07:31,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=107570.66666666667, ans=0.09899494936611666 +2024-07-28 04:07:34,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=107570.66666666667, ans=0.025 +2024-07-28 04:07:37,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=107584.0, ans=0.125 +2024-07-28 04:07:39,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=107584.0, ans=0.125 +2024-07-28 04:07:48,970 INFO [train.py:1114] (1/4) Epoch 8, batch 9150, loss[loss=0.2018, simple_loss=0.2854, pruned_loss=0.05912, over 4804.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2936, pruned_loss=0.063, over 935554.25 frames. ], batch size: 14, lr: 9.15e-03, grad_scale: 32.0 +2024-07-28 04:07:52,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107610.66666666667, ans=0.1 +2024-07-28 04:07:52,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=107610.66666666667, ans=0.2 +2024-07-28 04:07:55,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=107624.0, ans=0.125 +2024-07-28 04:08:04,418 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.54 vs. limit=15.0 +2024-07-28 04:08:07,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=107650.66666666667, ans=0.0 +2024-07-28 04:08:19,828 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.141e+01 5.934e+01 6.513e+01 7.499e+01 1.086e+02, threshold=1.303e+02, percent-clipped=0.0 +2024-07-28 04:08:20,440 INFO [train.py:1114] (1/4) Epoch 8, batch 9200, loss[loss=0.1775, simple_loss=0.2616, pruned_loss=0.04669, over 4857.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2923, pruned_loss=0.06255, over 937477.63 frames. ], batch size: 12, lr: 9.15e-03, grad_scale: 32.0 +2024-07-28 04:08:20,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=107677.33333333333, ans=0.0 +2024-07-28 04:08:25,072 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.36 vs. limit=6.0 +2024-07-28 04:08:41,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=107717.33333333333, ans=0.04949747468305833 +2024-07-28 04:08:50,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=107730.66666666667, ans=0.2 +2024-07-28 04:08:51,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.65 vs. limit=6.0 +2024-07-28 04:08:51,961 INFO [train.py:1114] (1/4) Epoch 8, batch 9250, loss[loss=0.2249, simple_loss=0.3091, pruned_loss=0.07034, over 4641.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2925, pruned_loss=0.0624, over 938193.34 frames. ], batch size: 13, lr: 9.15e-03, grad_scale: 32.0 +2024-07-28 04:08:52,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=107744.0, ans=0.125 +2024-07-28 04:08:55,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107744.0, ans=0.125 +2024-07-28 04:09:07,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=107770.66666666667, ans=0.1 +2024-07-28 04:09:19,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=107797.33333333333, ans=0.025 +2024-07-28 04:09:22,900 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.575e+01 5.836e+01 6.420e+01 7.069e+01 1.211e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 04:09:23,526 INFO [train.py:1114] (1/4) Epoch 8, batch 9300, loss[loss=0.202, simple_loss=0.2813, pruned_loss=0.06137, over 4775.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2928, pruned_loss=0.06246, over 937937.66 frames. ], batch size: 12, lr: 9.15e-03, grad_scale: 32.0 +2024-07-28 04:09:23,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=107810.66666666667, ans=0.0 +2024-07-28 04:09:24,599 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.00 vs. limit=12.0 +2024-07-28 04:09:26,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=107810.66666666667, ans=0.125 +2024-07-28 04:09:27,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107810.66666666667, ans=0.1 +2024-07-28 04:09:28,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107810.66666666667, ans=0.1 +2024-07-28 04:09:32,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=107824.0, ans=0.2 +2024-07-28 04:09:36,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107837.33333333333, ans=0.1 +2024-07-28 04:09:54,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=107864.0, ans=0.0 +2024-07-28 04:09:55,750 INFO [train.py:1114] (1/4) Epoch 8, batch 9350, loss[loss=0.1722, simple_loss=0.2452, pruned_loss=0.04958, over 4815.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2935, pruned_loss=0.06307, over 934544.03 frames. ], batch size: 11, lr: 9.14e-03, grad_scale: 32.0 +2024-07-28 04:10:08,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=107904.0, ans=0.0 +2024-07-28 04:10:24,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=107930.66666666667, ans=0.125 +2024-07-28 04:10:27,326 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.820e+01 5.454e+01 5.997e+01 6.849e+01 9.161e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 04:10:27,985 INFO [train.py:1114] (1/4) Epoch 8, batch 9400, loss[loss=0.2139, simple_loss=0.298, pruned_loss=0.0649, over 4684.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2944, pruned_loss=0.06389, over 932307.65 frames. ], batch size: 13, lr: 9.14e-03, grad_scale: 32.0 +2024-07-28 04:10:29,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=107944.0, ans=0.0 +2024-07-28 04:10:29,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=107944.0, ans=0.125 +2024-07-28 04:10:31,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107944.0, ans=0.125 +2024-07-28 04:10:34,576 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.05 vs. limit=15.0 +2024-07-28 04:10:36,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=107957.33333333333, ans=0.04949747468305833 +2024-07-28 04:10:40,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=107970.66666666667, ans=0.125 +2024-07-28 04:11:01,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=107997.33333333333, ans=0.125 +2024-07-28 04:11:02,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=107997.33333333333, ans=0.025 +2024-07-28 04:11:03,417 INFO [train.py:1114] (1/4) Epoch 8, batch 9450, loss[loss=0.2043, simple_loss=0.281, pruned_loss=0.06374, over 4802.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2962, pruned_loss=0.06468, over 931992.26 frames. ], batch size: 11, lr: 9.14e-03, grad_scale: 32.0 +2024-07-28 04:11:17,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=108037.33333333333, ans=0.125 +2024-07-28 04:11:17,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=108037.33333333333, ans=0.0 +2024-07-28 04:11:24,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108050.66666666667, ans=0.1 +2024-07-28 04:11:24,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108050.66666666667, ans=0.1 +2024-07-28 04:11:30,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108064.0, ans=0.1 +2024-07-28 04:11:34,692 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.777e+01 5.745e+01 6.311e+01 7.517e+01 1.007e+02, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 04:11:35,148 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.01 vs. limit=15.0 +2024-07-28 04:11:35,348 INFO [train.py:1114] (1/4) Epoch 8, batch 9500, loss[loss=0.2338, simple_loss=0.3087, pruned_loss=0.07945, over 4716.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2967, pruned_loss=0.06466, over 934624.95 frames. ], batch size: 12, lr: 9.13e-03, grad_scale: 32.0 +2024-07-28 04:11:36,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108077.33333333333, ans=0.1 +2024-07-28 04:11:47,345 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=16.85 vs. limit=15.0 +2024-07-28 04:11:50,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=108104.0, ans=0.5 +2024-07-28 04:11:59,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=108130.66666666667, ans=0.025 +2024-07-28 04:12:06,610 INFO [train.py:1114] (1/4) Epoch 8, batch 9550, loss[loss=0.1829, simple_loss=0.2722, pruned_loss=0.04682, over 4775.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2965, pruned_loss=0.06466, over 931793.36 frames. ], batch size: 12, lr: 9.13e-03, grad_scale: 32.0 +2024-07-28 04:12:08,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108144.0, ans=0.1 +2024-07-28 04:12:12,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108157.33333333333, ans=0.1 +2024-07-28 04:12:13,752 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.39 vs. limit=15.0 +2024-07-28 04:12:14,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=108157.33333333333, ans=0.125 +2024-07-28 04:12:15,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=108157.33333333333, ans=0.0 +2024-07-28 04:12:16,123 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.85 vs. limit=22.5 +2024-07-28 04:12:21,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=108170.66666666667, ans=0.0 +2024-07-28 04:12:22,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108170.66666666667, ans=0.1 +2024-07-28 04:12:37,462 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.060e+01 5.899e+01 6.832e+01 8.942e+01 1.153e+02, threshold=1.366e+02, percent-clipped=0.0 +2024-07-28 04:12:38,141 INFO [train.py:1114] (1/4) Epoch 8, batch 9600, loss[loss=0.2667, simple_loss=0.3273, pruned_loss=0.103, over 3611.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2962, pruned_loss=0.06389, over 931066.50 frames. ], batch size: 35, lr: 9.13e-03, grad_scale: 32.0 +2024-07-28 04:13:05,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=108264.0, ans=0.125 +2024-07-28 04:13:10,088 INFO [train.py:1114] (1/4) Epoch 8, batch 9650, loss[loss=0.2254, simple_loss=0.318, pruned_loss=0.06645, over 4852.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2966, pruned_loss=0.06408, over 926806.42 frames. ], batch size: 16, lr: 9.13e-03, grad_scale: 32.0 +2024-07-28 04:13:15,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=108290.66666666667, ans=0.05 +2024-07-28 04:13:16,466 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.07 vs. limit=15.0 +2024-07-28 04:13:40,237 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.895e+01 5.890e+01 6.394e+01 7.383e+01 1.171e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 04:13:40,925 INFO [train.py:1114] (1/4) Epoch 8, batch 9700, loss[loss=0.1831, simple_loss=0.275, pruned_loss=0.0456, over 4177.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2963, pruned_loss=0.06353, over 924912.17 frames. ], batch size: 25, lr: 9.12e-03, grad_scale: 32.0 +2024-07-28 04:14:06,658 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.11 vs. limit=15.0 +2024-07-28 04:14:09,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=108397.33333333333, ans=0.2 +2024-07-28 04:14:09,783 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=6.29 vs. limit=12.0 +2024-07-28 04:14:11,904 INFO [train.py:1114] (1/4) Epoch 8, batch 9750, loss[loss=0.2216, simple_loss=0.2962, pruned_loss=0.07348, over 4682.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2963, pruned_loss=0.06386, over 925498.78 frames. ], batch size: 15, lr: 9.12e-03, grad_scale: 32.0 +2024-07-28 04:14:18,019 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.78 vs. limit=5.0 +2024-07-28 04:14:31,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108450.66666666667, ans=0.0 +2024-07-28 04:14:33,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=108450.66666666667, ans=0.125 +2024-07-28 04:14:40,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=108464.0, ans=0.125 +2024-07-28 04:14:40,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=108464.0, ans=0.2 +2024-07-28 04:14:42,359 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.361e+01 6.068e+01 7.067e+01 8.452e+01 1.289e+02, threshold=1.413e+02, percent-clipped=1.0 +2024-07-28 04:14:42,979 INFO [train.py:1114] (1/4) Epoch 8, batch 9800, loss[loss=0.1837, simple_loss=0.2632, pruned_loss=0.05208, over 4708.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.295, pruned_loss=0.06388, over 924472.62 frames. ], batch size: 12, lr: 9.12e-03, grad_scale: 32.0 +2024-07-28 04:14:49,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=108490.66666666667, ans=0.0 +2024-07-28 04:14:49,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=108490.66666666667, ans=0.2 +2024-07-28 04:14:52,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=108490.66666666667, ans=0.0 +2024-07-28 04:14:58,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=108504.0, ans=0.05 +2024-07-28 04:14:59,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=108504.0, ans=10.0 +2024-07-28 04:15:02,893 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.12 vs. limit=10.0 +2024-07-28 04:15:03,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=108517.33333333333, ans=0.125 +2024-07-28 04:15:06,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=108530.66666666667, ans=0.95 +2024-07-28 04:15:13,596 INFO [train.py:1114] (1/4) Epoch 8, batch 9850, loss[loss=0.2523, simple_loss=0.3442, pruned_loss=0.08017, over 4907.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2958, pruned_loss=0.06411, over 926717.54 frames. ], batch size: 15, lr: 9.11e-03, grad_scale: 32.0 +2024-07-28 04:15:21,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=108557.33333333333, ans=0.125 +2024-07-28 04:15:23,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=108557.33333333333, ans=0.07 +2024-07-28 04:15:23,978 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.82 vs. limit=15.0 +2024-07-28 04:15:26,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=108570.66666666667, ans=0.025 +2024-07-28 04:15:29,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=108570.66666666667, ans=0.125 +2024-07-28 04:15:33,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108584.0, ans=0.1 +2024-07-28 04:15:33,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108584.0, ans=0.1 +2024-07-28 04:15:39,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108597.33333333333, ans=0.1 +2024-07-28 04:15:44,369 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.741e+01 5.942e+01 6.515e+01 7.439e+01 1.419e+02, threshold=1.303e+02, percent-clipped=1.0 +2024-07-28 04:15:45,043 INFO [train.py:1114] (1/4) Epoch 8, batch 9900, loss[loss=0.2259, simple_loss=0.3205, pruned_loss=0.06569, over 4838.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2969, pruned_loss=0.06481, over 926146.40 frames. ], batch size: 16, lr: 9.11e-03, grad_scale: 32.0 +2024-07-28 04:15:47,724 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.91 vs. limit=15.0 +2024-07-28 04:15:51,294 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=6.07 vs. limit=12.0 +2024-07-28 04:15:55,016 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.08 vs. limit=12.0 +2024-07-28 04:15:55,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108624.0, ans=0.1 +2024-07-28 04:15:57,395 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:15:59,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108637.33333333333, ans=0.0 +2024-07-28 04:16:01,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=108637.33333333333, ans=0.125 +2024-07-28 04:16:02,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108637.33333333333, ans=0.1 +2024-07-28 04:16:02,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=108637.33333333333, ans=0.125 +2024-07-28 04:16:14,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=108664.0, ans=0.0 +2024-07-28 04:16:15,788 INFO [train.py:1114] (1/4) Epoch 8, batch 9950, loss[loss=0.1676, simple_loss=0.2552, pruned_loss=0.04002, over 4811.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2975, pruned_loss=0.0649, over 928657.10 frames. ], batch size: 11, lr: 9.11e-03, grad_scale: 64.0 +2024-07-28 04:16:16,123 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.58 vs. limit=22.5 +2024-07-28 04:16:18,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=108677.33333333333, ans=0.09899494936611666 +2024-07-28 04:16:41,489 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.53 vs. limit=12.0 +2024-07-28 04:16:41,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=108730.66666666667, ans=0.125 +2024-07-28 04:16:44,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=108730.66666666667, ans=0.025 +2024-07-28 04:16:46,387 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.858e+01 5.881e+01 6.237e+01 7.241e+01 1.097e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 04:16:46,978 INFO [train.py:1114] (1/4) Epoch 8, batch 10000, loss[loss=0.2188, simple_loss=0.3124, pruned_loss=0.06257, over 4616.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.3008, pruned_loss=0.06623, over 926317.03 frames. ], batch size: 16, lr: 9.11e-03, grad_scale: 64.0 +2024-07-28 04:16:54,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=108757.33333333333, ans=0.05 +2024-07-28 04:16:58,087 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.29 vs. limit=22.5 +2024-07-28 04:17:00,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=108770.66666666667, ans=0.125 +2024-07-28 04:17:04,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=108784.0, ans=0.125 +2024-07-28 04:17:09,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108784.0, ans=0.1 +2024-07-28 04:17:14,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=108797.33333333333, ans=0.2 +2024-07-28 04:17:14,590 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.97 vs. limit=15.0 +2024-07-28 04:17:21,342 INFO [train.py:1114] (1/4) Epoch 8, batch 10050, loss[loss=0.2735, simple_loss=0.3353, pruned_loss=0.1059, over 3509.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.3041, pruned_loss=0.0684, over 913998.38 frames. ], batch size: 35, lr: 9.10e-03, grad_scale: 64.0 +2024-07-28 04:17:30,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=108824.0, ans=0.0 +2024-07-28 04:17:31,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=108824.0, ans=0.0 +2024-07-28 04:17:45,504 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.34 vs. limit=10.0 +2024-07-28 04:17:46,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=108850.66666666667, ans=0.0 +2024-07-28 04:17:47,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=108864.0, ans=0.125 +2024-07-28 04:17:52,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=108864.0, ans=0.125 +2024-07-28 04:17:54,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=108864.0, ans=0.0 +2024-07-28 04:17:54,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108877.33333333333, ans=0.1 +2024-07-28 04:17:55,197 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.156e+01 6.508e+01 7.175e+01 7.748e+01 1.103e+02, threshold=1.435e+02, percent-clipped=0.0 +2024-07-28 04:17:55,230 INFO [train.py:1114] (1/4) Epoch 8, batch 10100, loss[loss=0.2834, simple_loss=0.3427, pruned_loss=0.1121, over 3050.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3095, pruned_loss=0.07447, over 859795.06 frames. ], batch size: 35, lr: 9.10e-03, grad_scale: 32.0 +2024-07-28 04:18:09,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=108904.0, ans=0.0 +2024-07-28 04:18:11,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=108904.0, ans=0.025 +2024-07-28 04:18:14,192 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.95 vs. limit=22.5 +2024-07-28 04:18:15,483 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-28 04:18:18,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=108917.33333333333, ans=0.125 +2024-07-28 04:18:27,872 INFO [train.py:1114] (1/4) Epoch 8, batch 10150, loss[loss=0.236, simple_loss=0.315, pruned_loss=0.07854, over 3611.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3139, pruned_loss=0.07914, over 817845.23 frames. ], batch size: 36, lr: 9.10e-03, grad_scale: 32.0 +2024-07-28 04:18:28,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=108944.0, ans=0.125 +2024-07-28 04:18:37,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=108957.33333333333, ans=0.125 +2024-07-28 04:18:43,608 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.77 vs. limit=22.5 +2024-07-28 04:18:43,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=108970.66666666667, ans=0.125 +2024-07-28 04:18:59,458 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.833e+01 6.827e+01 7.269e+01 7.768e+01 1.197e+02, threshold=1.454e+02, percent-clipped=0.0 +2024-07-28 04:18:59,491 INFO [train.py:1114] (1/4) Epoch 8, batch 10200, loss[loss=0.2423, simple_loss=0.3125, pruned_loss=0.08609, over 3259.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3165, pruned_loss=0.08224, over 787759.79 frames. ], batch size: 35, lr: 9.10e-03, grad_scale: 32.0 +2024-07-28 04:19:07,089 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:19:08,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109024.0, ans=0.1 +2024-07-28 04:19:10,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=109024.0, ans=0.125 +2024-07-28 04:19:49,272 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:19:59,115 INFO [train.py:1114] (1/4) Epoch 9, batch 0, loss[loss=0.2096, simple_loss=0.2968, pruned_loss=0.06124, over 4857.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2968, pruned_loss=0.06124, over 4857.00 frames. ], batch size: 12, lr: 8.61e-03, grad_scale: 32.0 +2024-07-28 04:19:59,115 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 04:20:10,796 INFO [train.py:1146] (1/4) Epoch 9, validation: loss=0.1818, simple_loss=0.2877, pruned_loss=0.03795, over 944034.00 frames. +2024-07-28 04:20:10,796 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 04:20:17,526 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.91 vs. limit=10.0 +2024-07-28 04:20:19,530 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.73 vs. limit=15.0 +2024-07-28 04:20:30,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=109066.66666666667, ans=0.125 +2024-07-28 04:20:37,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=109080.0, ans=0.09899494936611666 +2024-07-28 04:20:45,145 INFO [train.py:1114] (1/4) Epoch 9, batch 50, loss[loss=0.2271, simple_loss=0.3078, pruned_loss=0.07314, over 4608.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.3036, pruned_loss=0.06789, over 206208.45 frames. ], batch size: 11, lr: 8.61e-03, grad_scale: 32.0 +2024-07-28 04:20:49,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=109106.66666666667, ans=0.025 +2024-07-28 04:21:02,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=109133.33333333333, ans=0.0 +2024-07-28 04:21:05,385 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.946e+01 5.804e+01 6.519e+01 7.318e+01 1.022e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 04:21:07,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=109146.66666666667, ans=0.125 +2024-07-28 04:21:07,903 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.70 vs. limit=15.0 +2024-07-28 04:21:12,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=109146.66666666667, ans=0.2 +2024-07-28 04:21:12,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=109160.0, ans=0.125 +2024-07-28 04:21:16,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=109160.0, ans=0.0 +2024-07-28 04:21:17,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=109160.0, ans=0.125 +2024-07-28 04:21:22,072 INFO [train.py:1114] (1/4) Epoch 9, batch 100, loss[loss=0.2039, simple_loss=0.2828, pruned_loss=0.0625, over 4641.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.3057, pruned_loss=0.06752, over 365081.81 frames. ], batch size: 12, lr: 8.60e-03, grad_scale: 32.0 +2024-07-28 04:21:22,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=109173.33333333333, ans=0.0 +2024-07-28 04:21:25,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=109173.33333333333, ans=0.025 +2024-07-28 04:21:32,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=109186.66666666667, ans=0.125 +2024-07-28 04:21:40,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=109200.0, ans=0.0 +2024-07-28 04:21:40,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=109200.0, ans=0.125 +2024-07-28 04:21:43,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=109213.33333333333, ans=0.2 +2024-07-28 04:21:49,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=109213.33333333333, ans=0.125 +2024-07-28 04:21:53,535 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.70 vs. limit=15.0 +2024-07-28 04:21:55,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=109226.66666666667, ans=0.125 +2024-07-28 04:21:59,532 INFO [train.py:1114] (1/4) Epoch 9, batch 150, loss[loss=0.168, simple_loss=0.2526, pruned_loss=0.04172, over 4599.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.3012, pruned_loss=0.06423, over 493730.87 frames. ], batch size: 11, lr: 8.60e-03, grad_scale: 32.0 +2024-07-28 04:22:05,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=109240.0, ans=0.1 +2024-07-28 04:22:11,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=109253.33333333333, ans=0.5 +2024-07-28 04:22:17,892 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.795e+01 5.760e+01 6.227e+01 6.826e+01 1.008e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 04:22:26,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=109293.33333333333, ans=0.0 +2024-07-28 04:22:32,674 INFO [train.py:1114] (1/4) Epoch 9, batch 200, loss[loss=0.2861, simple_loss=0.3673, pruned_loss=0.1024, over 4435.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2971, pruned_loss=0.06315, over 593274.17 frames. ], batch size: 21, lr: 8.60e-03, grad_scale: 32.0 +2024-07-28 04:22:34,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=109306.66666666667, ans=0.125 +2024-07-28 04:22:38,876 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:22:45,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=109333.33333333333, ans=0.0 +2024-07-28 04:22:45,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=109333.33333333333, ans=0.125 +2024-07-28 04:22:46,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=109333.33333333333, ans=15.0 +2024-07-28 04:22:51,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=109346.66666666667, ans=0.125 +2024-07-28 04:22:57,676 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.73 vs. limit=6.0 +2024-07-28 04:22:58,809 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.80 vs. limit=15.0 +2024-07-28 04:23:05,830 INFO [train.py:1114] (1/4) Epoch 9, batch 250, loss[loss=0.2002, simple_loss=0.298, pruned_loss=0.05124, over 4636.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2969, pruned_loss=0.06305, over 670188.55 frames. ], batch size: 16, lr: 8.60e-03, grad_scale: 32.0 +2024-07-28 04:23:05,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=109373.33333333333, ans=0.0 +2024-07-28 04:23:19,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=109386.66666666667, ans=0.125 +2024-07-28 04:23:26,175 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.872e+01 6.084e+01 6.743e+01 8.358e+01 1.381e+02, threshold=1.349e+02, percent-clipped=2.0 +2024-07-28 04:23:28,616 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.97 vs. limit=22.5 +2024-07-28 04:23:29,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=109413.33333333333, ans=0.0 +2024-07-28 04:23:36,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=109426.66666666667, ans=0.125 +2024-07-28 04:23:40,734 INFO [train.py:1114] (1/4) Epoch 9, batch 300, loss[loss=0.2074, simple_loss=0.2934, pruned_loss=0.06071, over 4804.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2951, pruned_loss=0.06212, over 729927.23 frames. ], batch size: 15, lr: 8.59e-03, grad_scale: 32.0 +2024-07-28 04:23:42,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=109440.0, ans=0.125 +2024-07-28 04:23:54,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=109466.66666666667, ans=0.0 +2024-07-28 04:24:00,440 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.36 vs. limit=22.5 +2024-07-28 04:24:05,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=109480.0, ans=0.125 +2024-07-28 04:24:06,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=109480.0, ans=0.125 +2024-07-28 04:24:06,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=109480.0, ans=0.0 +2024-07-28 04:24:13,421 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.67 vs. limit=22.5 +2024-07-28 04:24:14,375 INFO [train.py:1114] (1/4) Epoch 9, batch 350, loss[loss=0.19, simple_loss=0.2781, pruned_loss=0.05102, over 4930.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2953, pruned_loss=0.0618, over 776701.62 frames. ], batch size: 12, lr: 8.59e-03, grad_scale: 32.0 +2024-07-28 04:24:28,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=109533.33333333333, ans=0.07 +2024-07-28 04:24:32,534 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.584e+01 5.878e+01 6.356e+01 6.901e+01 1.235e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 04:24:33,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109546.66666666667, ans=0.1 +2024-07-28 04:24:37,672 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.58 vs. limit=6.0 +2024-07-28 04:24:41,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=109560.0, ans=0.07 +2024-07-28 04:24:46,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=109573.33333333333, ans=0.0 +2024-07-28 04:24:47,188 INFO [train.py:1114] (1/4) Epoch 9, batch 400, loss[loss=0.2364, simple_loss=0.3104, pruned_loss=0.08115, over 4683.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2937, pruned_loss=0.0616, over 814285.29 frames. ], batch size: 13, lr: 8.59e-03, grad_scale: 32.0 +2024-07-28 04:24:47,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=109573.33333333333, ans=0.125 +2024-07-28 04:24:53,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=109586.66666666667, ans=0.125 +2024-07-28 04:24:54,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=109586.66666666667, ans=10.0 +2024-07-28 04:25:00,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=109600.0, ans=0.2 +2024-07-28 04:25:21,007 INFO [train.py:1114] (1/4) Epoch 9, batch 450, loss[loss=0.2344, simple_loss=0.3254, pruned_loss=0.0717, over 4637.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2925, pruned_loss=0.06139, over 839555.76 frames. ], batch size: 13, lr: 8.59e-03, grad_scale: 32.0 +2024-07-28 04:25:21,868 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=20.69 vs. limit=15.0 +2024-07-28 04:25:25,878 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.59 vs. limit=15.0 +2024-07-28 04:25:26,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=109653.33333333333, ans=0.125 +2024-07-28 04:25:26,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109653.33333333333, ans=0.1 +2024-07-28 04:25:27,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=109653.33333333333, ans=0.125 +2024-07-28 04:25:38,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=109666.66666666667, ans=0.125 +2024-07-28 04:25:39,078 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+01 5.801e+01 6.257e+01 7.055e+01 9.311e+01, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 04:25:39,414 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.64 vs. limit=6.0 +2024-07-28 04:25:47,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=109693.33333333333, ans=0.0 +2024-07-28 04:25:52,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.05 vs. limit=22.5 +2024-07-28 04:25:53,153 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.18 vs. limit=15.0 +2024-07-28 04:25:53,415 INFO [train.py:1114] (1/4) Epoch 9, batch 500, loss[loss=0.2542, simple_loss=0.3256, pruned_loss=0.09138, over 4689.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2924, pruned_loss=0.06126, over 861822.99 frames. ], batch size: 15, lr: 8.58e-03, grad_scale: 32.0 +2024-07-28 04:26:13,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=109706.66666666667, ans=0.125 +2024-07-28 04:26:18,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=109720.0, ans=0.0 +2024-07-28 04:26:26,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=109733.33333333333, ans=0.2 +2024-07-28 04:26:34,782 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.62 vs. limit=22.5 +2024-07-28 04:26:52,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=109760.0, ans=0.125 +2024-07-28 04:26:53,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=109760.0, ans=0.125 +2024-07-28 04:26:55,067 INFO [train.py:1114] (1/4) Epoch 9, batch 550, loss[loss=0.1968, simple_loss=0.2884, pruned_loss=0.05263, over 4639.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2928, pruned_loss=0.06113, over 877701.70 frames. ], batch size: 17, lr: 8.58e-03, grad_scale: 32.0 +2024-07-28 04:27:00,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=109773.33333333333, ans=0.0 +2024-07-28 04:27:15,812 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.786e+01 5.880e+01 6.464e+01 7.237e+01 1.061e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-28 04:27:46,994 INFO [train.py:1114] (1/4) Epoch 9, batch 600, loss[loss=0.225, simple_loss=0.3022, pruned_loss=0.0739, over 4602.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2922, pruned_loss=0.06086, over 892085.04 frames. ], batch size: 16, lr: 8.58e-03, grad_scale: 32.0 +2024-07-28 04:27:47,267 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.31 vs. limit=15.0 +2024-07-28 04:27:49,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=109840.0, ans=0.125 +2024-07-28 04:27:54,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=109853.33333333333, ans=0.0 +2024-07-28 04:28:10,534 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.78 vs. limit=15.0 +2024-07-28 04:28:14,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109893.33333333333, ans=0.1 +2024-07-28 04:28:15,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=109893.33333333333, ans=0.125 +2024-07-28 04:28:20,223 INFO [train.py:1114] (1/4) Epoch 9, batch 650, loss[loss=0.2115, simple_loss=0.2933, pruned_loss=0.06479, over 4754.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2919, pruned_loss=0.06112, over 903750.65 frames. ], batch size: 13, lr: 8.58e-03, grad_scale: 32.0 +2024-07-28 04:28:20,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=109906.66666666667, ans=0.125 +2024-07-28 04:28:20,589 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.70 vs. limit=6.0 +2024-07-28 04:28:28,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=109920.0, ans=0.025 +2024-07-28 04:28:38,935 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.902e+01 5.739e+01 6.277e+01 6.982e+01 1.071e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 04:28:40,670 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.27 vs. limit=12.0 +2024-07-28 04:28:52,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=109960.0, ans=0.125 +2024-07-28 04:28:53,379 INFO [train.py:1114] (1/4) Epoch 9, batch 700, loss[loss=0.2076, simple_loss=0.2984, pruned_loss=0.05834, over 4646.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2938, pruned_loss=0.06172, over 911638.48 frames. ], batch size: 12, lr: 8.57e-03, grad_scale: 32.0 +2024-07-28 04:29:10,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=110000.0, ans=0.0 +2024-07-28 04:29:15,343 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.13 vs. limit=6.0 +2024-07-28 04:29:27,308 INFO [train.py:1114] (1/4) Epoch 9, batch 750, loss[loss=0.2439, simple_loss=0.311, pruned_loss=0.08843, over 4691.00 frames. ], tot_loss[loss=0.208, simple_loss=0.293, pruned_loss=0.0615, over 917992.62 frames. ], batch size: 13, lr: 8.57e-03, grad_scale: 32.0 +2024-07-28 04:29:30,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=110040.0, ans=0.0 +2024-07-28 04:29:30,771 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.525e-03 +2024-07-28 04:29:33,796 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.77 vs. limit=15.0 +2024-07-28 04:29:36,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=110053.33333333333, ans=0.125 +2024-07-28 04:29:39,536 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:29:40,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=110053.33333333333, ans=0.025 +2024-07-28 04:29:42,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=110066.66666666667, ans=0.125 +2024-07-28 04:29:43,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=110066.66666666667, ans=0.125 +2024-07-28 04:29:47,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=110066.66666666667, ans=0.125 +2024-07-28 04:29:48,610 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.674e+01 6.132e+01 7.146e+01 1.139e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 04:30:01,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=110093.33333333333, ans=0.125 +2024-07-28 04:30:03,385 INFO [train.py:1114] (1/4) Epoch 9, batch 800, loss[loss=0.1638, simple_loss=0.2476, pruned_loss=0.03996, over 4858.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2921, pruned_loss=0.061, over 923111.01 frames. ], batch size: 12, lr: 8.57e-03, grad_scale: 32.0 +2024-07-28 04:30:20,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110133.33333333333, ans=0.1 +2024-07-28 04:30:21,136 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-28 04:30:23,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=25.87 vs. limit=22.5 +2024-07-28 04:30:24,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=110146.66666666667, ans=0.0 +2024-07-28 04:30:26,442 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.23 vs. limit=22.5 +2024-07-28 04:30:29,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.10 vs. limit=15.0 +2024-07-28 04:30:37,052 INFO [train.py:1114] (1/4) Epoch 9, batch 850, loss[loss=0.1973, simple_loss=0.2992, pruned_loss=0.04771, over 4660.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2915, pruned_loss=0.06079, over 927338.27 frames. ], batch size: 14, lr: 8.57e-03, grad_scale: 32.0 +2024-07-28 04:30:41,502 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.97 vs. limit=22.5 +2024-07-28 04:30:43,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=110186.66666666667, ans=0.125 +2024-07-28 04:30:55,614 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.250e+01 5.670e+01 6.591e+01 7.214e+01 1.079e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-28 04:30:57,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=110213.33333333333, ans=0.125 +2024-07-28 04:31:00,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=110213.33333333333, ans=0.125 +2024-07-28 04:31:06,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=110226.66666666667, ans=0.125 +2024-07-28 04:31:10,554 INFO [train.py:1114] (1/4) Epoch 9, batch 900, loss[loss=0.1834, simple_loss=0.2711, pruned_loss=0.0478, over 4854.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2918, pruned_loss=0.06103, over 927968.00 frames. ], batch size: 12, lr: 8.56e-03, grad_scale: 32.0 +2024-07-28 04:31:31,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110280.0, ans=0.1 +2024-07-28 04:31:44,081 INFO [train.py:1114] (1/4) Epoch 9, batch 950, loss[loss=0.2131, simple_loss=0.3018, pruned_loss=0.06218, over 4773.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.292, pruned_loss=0.06166, over 929893.47 frames. ], batch size: 12, lr: 8.56e-03, grad_scale: 32.0 +2024-07-28 04:31:59,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=110333.33333333333, ans=0.125 +2024-07-28 04:32:02,678 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.659e+01 5.804e+01 6.637e+01 7.593e+01 9.914e+01, threshold=1.327e+02, percent-clipped=0.0 +2024-07-28 04:32:08,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=110346.66666666667, ans=0.0 +2024-07-28 04:32:09,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=110346.66666666667, ans=0.0 +2024-07-28 04:32:16,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=110373.33333333333, ans=0.125 +2024-07-28 04:32:17,283 INFO [train.py:1114] (1/4) Epoch 9, batch 1000, loss[loss=0.2096, simple_loss=0.2812, pruned_loss=0.06898, over 4966.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2927, pruned_loss=0.06204, over 929813.13 frames. ], batch size: 13, lr: 8.56e-03, grad_scale: 32.0 +2024-07-28 04:32:18,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=110373.33333333333, ans=0.025 +2024-07-28 04:32:18,897 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.13 vs. limit=15.0 +2024-07-28 04:32:21,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=110373.33333333333, ans=0.125 +2024-07-28 04:32:35,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=110400.0, ans=0.125 +2024-07-28 04:32:46,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=110426.66666666667, ans=0.125 +2024-07-28 04:32:52,775 INFO [train.py:1114] (1/4) Epoch 9, batch 1050, loss[loss=0.2113, simple_loss=0.2939, pruned_loss=0.06431, over 4864.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2925, pruned_loss=0.06209, over 932088.22 frames. ], batch size: 14, lr: 8.56e-03, grad_scale: 32.0 +2024-07-28 04:33:10,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=110453.33333333333, ans=0.125 +2024-07-28 04:33:37,203 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.983e+01 5.646e+01 6.301e+01 7.018e+01 9.967e+01, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 04:33:48,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=110480.0, ans=0.2 +2024-07-28 04:33:58,447 INFO [train.py:1114] (1/4) Epoch 9, batch 1100, loss[loss=0.1909, simple_loss=0.2647, pruned_loss=0.0585, over 4907.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2919, pruned_loss=0.06173, over 934585.60 frames. ], batch size: 13, lr: 8.55e-03, grad_scale: 32.0 +2024-07-28 04:34:03,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=110506.66666666667, ans=0.2 +2024-07-28 04:34:10,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=110520.0, ans=0.2 +2024-07-28 04:34:10,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=110520.0, ans=0.125 +2024-07-28 04:34:23,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=110546.66666666667, ans=0.1 +2024-07-28 04:34:23,867 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:34:26,787 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.35 vs. limit=22.5 +2024-07-28 04:34:31,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=110560.0, ans=0.2 +2024-07-28 04:34:32,503 INFO [train.py:1114] (1/4) Epoch 9, batch 1150, loss[loss=0.207, simple_loss=0.2909, pruned_loss=0.06151, over 4902.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2919, pruned_loss=0.0613, over 933859.52 frames. ], batch size: 13, lr: 8.55e-03, grad_scale: 32.0 +2024-07-28 04:34:36,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=110573.33333333333, ans=0.125 +2024-07-28 04:34:41,873 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.03 vs. limit=15.0 +2024-07-28 04:34:50,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=110600.0, ans=0.0 +2024-07-28 04:34:51,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=110600.0, ans=0.125 +2024-07-28 04:34:51,451 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.662e+01 6.289e+01 6.921e+01 1.035e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 04:34:57,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=110613.33333333333, ans=0.0 +2024-07-28 04:35:01,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110626.66666666667, ans=0.1 +2024-07-28 04:35:04,762 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:35:07,033 INFO [train.py:1114] (1/4) Epoch 9, batch 1200, loss[loss=0.2629, simple_loss=0.3533, pruned_loss=0.08631, over 4869.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2935, pruned_loss=0.06227, over 932778.42 frames. ], batch size: 14, lr: 8.55e-03, grad_scale: 32.0 +2024-07-28 04:35:08,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=110640.0, ans=15.0 +2024-07-28 04:35:09,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=110640.0, ans=0.0 +2024-07-28 04:35:29,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=110680.0, ans=0.125 +2024-07-28 04:35:32,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=110680.0, ans=0.1 +2024-07-28 04:35:43,337 INFO [train.py:1114] (1/4) Epoch 9, batch 1250, loss[loss=0.2246, simple_loss=0.3185, pruned_loss=0.06531, over 4809.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2942, pruned_loss=0.06214, over 936776.15 frames. ], batch size: 15, lr: 8.55e-03, grad_scale: 32.0 +2024-07-28 04:35:43,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=110706.66666666667, ans=0.0 +2024-07-28 04:35:44,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=110706.66666666667, ans=0.125 +2024-07-28 04:36:06,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=110733.33333333333, ans=0.125 +2024-07-28 04:36:07,226 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.896e+01 5.807e+01 6.256e+01 7.154e+01 1.109e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 04:36:12,561 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.75 vs. limit=10.0 +2024-07-28 04:36:21,787 INFO [train.py:1114] (1/4) Epoch 9, batch 1300, loss[loss=0.2485, simple_loss=0.3234, pruned_loss=0.08679, over 4763.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2937, pruned_loss=0.06207, over 938456.93 frames. ], batch size: 19, lr: 8.54e-03, grad_scale: 32.0 +2024-07-28 04:36:23,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=110773.33333333333, ans=0.04949747468305833 +2024-07-28 04:36:26,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=110773.33333333333, ans=0.125 +2024-07-28 04:36:27,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=110773.33333333333, ans=0.0 +2024-07-28 04:36:30,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=110786.66666666667, ans=0.2 +2024-07-28 04:36:55,440 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:36:57,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=110813.33333333333, ans=0.2 +2024-07-28 04:36:57,637 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.94 vs. limit=15.0 +2024-07-28 04:37:25,166 INFO [train.py:1114] (1/4) Epoch 9, batch 1350, loss[loss=0.1966, simple_loss=0.29, pruned_loss=0.05158, over 4762.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.294, pruned_loss=0.06168, over 940686.95 frames. ], batch size: 13, lr: 8.54e-03, grad_scale: 32.0 +2024-07-28 04:37:29,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=110840.0, ans=0.07 +2024-07-28 04:37:31,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=110853.33333333333, ans=0.1 +2024-07-28 04:37:43,870 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.586e+01 5.724e+01 6.443e+01 7.516e+01 1.167e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 04:37:53,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=110880.0, ans=0.025 +2024-07-28 04:37:53,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=110880.0, ans=0.125 +2024-07-28 04:37:59,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=110893.33333333333, ans=0.2 +2024-07-28 04:38:01,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=110893.33333333333, ans=0.125 +2024-07-28 04:38:02,852 INFO [train.py:1114] (1/4) Epoch 9, batch 1400, loss[loss=0.2218, simple_loss=0.2955, pruned_loss=0.07401, over 4701.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2937, pruned_loss=0.06186, over 942502.82 frames. ], batch size: 11, lr: 8.54e-03, grad_scale: 32.0 +2024-07-28 04:38:19,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110920.0, ans=0.1 +2024-07-28 04:38:27,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=110933.33333333333, ans=0.0 +2024-07-28 04:38:33,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=110946.66666666667, ans=0.125 +2024-07-28 04:38:34,839 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.02 vs. limit=15.0 +2024-07-28 04:38:38,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=110960.0, ans=0.0 +2024-07-28 04:38:44,497 INFO [train.py:1114] (1/4) Epoch 9, batch 1450, loss[loss=0.2125, simple_loss=0.3001, pruned_loss=0.06248, over 4697.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2937, pruned_loss=0.0617, over 942709.05 frames. ], batch size: 15, lr: 8.53e-03, grad_scale: 32.0 +2024-07-28 04:38:44,995 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.23 vs. limit=10.0 +2024-07-28 04:39:03,121 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+01 5.882e+01 6.432e+01 7.495e+01 9.959e+01, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 04:39:07,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=111013.33333333333, ans=0.125 +2024-07-28 04:39:12,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=111026.66666666667, ans=0.0 +2024-07-28 04:39:12,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=111026.66666666667, ans=0.125 +2024-07-28 04:39:19,212 INFO [train.py:1114] (1/4) Epoch 9, batch 1500, loss[loss=0.2068, simple_loss=0.3069, pruned_loss=0.05334, over 4806.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2938, pruned_loss=0.06132, over 942598.83 frames. ], batch size: 14, lr: 8.53e-03, grad_scale: 32.0 +2024-07-28 04:39:27,105 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.70 vs. limit=15.0 +2024-07-28 04:39:30,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111053.33333333333, ans=0.1 +2024-07-28 04:39:36,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=111066.66666666667, ans=0.125 +2024-07-28 04:39:39,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=111080.0, ans=0.125 +2024-07-28 04:39:42,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=111080.0, ans=10.0 +2024-07-28 04:39:48,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=111093.33333333333, ans=0.125 +2024-07-28 04:39:56,248 INFO [train.py:1114] (1/4) Epoch 9, batch 1550, loss[loss=0.1954, simple_loss=0.2919, pruned_loss=0.04951, over 4907.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2929, pruned_loss=0.06089, over 938767.66 frames. ], batch size: 15, lr: 8.53e-03, grad_scale: 32.0 +2024-07-28 04:39:59,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=111106.66666666667, ans=0.07 +2024-07-28 04:40:14,624 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.785e+01 5.781e+01 6.614e+01 7.335e+01 1.076e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-28 04:40:29,182 INFO [train.py:1114] (1/4) Epoch 9, batch 1600, loss[loss=0.1787, simple_loss=0.2792, pruned_loss=0.03916, over 4872.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2926, pruned_loss=0.06087, over 937686.33 frames. ], batch size: 14, lr: 8.53e-03, grad_scale: 32.0 +2024-07-28 04:40:29,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111173.33333333333, ans=0.125 +2024-07-28 04:40:30,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=111173.33333333333, ans=0.0 +2024-07-28 04:40:34,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111173.33333333333, ans=0.125 +2024-07-28 04:40:41,467 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.14 vs. limit=15.0 +2024-07-28 04:40:52,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=111213.33333333333, ans=0.0 +2024-07-28 04:40:54,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=111213.33333333333, ans=0.025 +2024-07-28 04:41:03,419 INFO [train.py:1114] (1/4) Epoch 9, batch 1650, loss[loss=0.2399, simple_loss=0.3292, pruned_loss=0.07525, over 4664.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2933, pruned_loss=0.06152, over 937922.06 frames. ], batch size: 14, lr: 8.52e-03, grad_scale: 32.0 +2024-07-28 04:41:09,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=111253.33333333333, ans=0.125 +2024-07-28 04:41:09,939 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.36 vs. limit=15.0 +2024-07-28 04:41:14,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=111253.33333333333, ans=0.125 +2024-07-28 04:41:21,886 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.810e+01 6.591e+01 7.411e+01 1.241e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-28 04:41:22,987 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.36 vs. limit=22.5 +2024-07-28 04:41:26,803 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:41:30,264 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.67 vs. limit=15.0 +2024-07-28 04:41:38,618 INFO [train.py:1114] (1/4) Epoch 9, batch 1700, loss[loss=0.1769, simple_loss=0.2614, pruned_loss=0.04622, over 4697.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2928, pruned_loss=0.06124, over 939508.94 frames. ], batch size: 11, lr: 8.52e-03, grad_scale: 32.0 +2024-07-28 04:41:39,606 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.19 vs. limit=15.0 +2024-07-28 04:41:40,396 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.15 vs. limit=10.0 +2024-07-28 04:41:52,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=111320.0, ans=0.025 +2024-07-28 04:41:53,114 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.49 vs. limit=15.0 +2024-07-28 04:41:58,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=111333.33333333333, ans=0.125 +2024-07-28 04:42:08,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=111360.0, ans=0.125 +2024-07-28 04:42:13,649 INFO [train.py:1114] (1/4) Epoch 9, batch 1750, loss[loss=0.1648, simple_loss=0.2451, pruned_loss=0.04227, over 4786.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.292, pruned_loss=0.06079, over 940205.49 frames. ], batch size: 11, lr: 8.52e-03, grad_scale: 32.0 +2024-07-28 04:42:13,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=111373.33333333333, ans=0.125 +2024-07-28 04:42:18,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=111373.33333333333, ans=0.125 +2024-07-28 04:42:18,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=111373.33333333333, ans=0.0 +2024-07-28 04:42:22,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=111386.66666666667, ans=0.2 +2024-07-28 04:42:32,287 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.858e+01 5.540e+01 6.107e+01 6.918e+01 9.511e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 04:42:41,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=111426.66666666667, ans=0.035 +2024-07-28 04:42:47,084 INFO [train.py:1114] (1/4) Epoch 9, batch 1800, loss[loss=0.2184, simple_loss=0.3165, pruned_loss=0.06015, over 4623.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2921, pruned_loss=0.06088, over 940838.55 frames. ], batch size: 13, lr: 8.52e-03, grad_scale: 32.0 +2024-07-28 04:42:54,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=111453.33333333333, ans=0.2 +2024-07-28 04:42:58,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=111453.33333333333, ans=0.95 +2024-07-28 04:43:08,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=111480.0, ans=0.125 +2024-07-28 04:43:12,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=111480.0, ans=0.125 +2024-07-28 04:43:22,190 INFO [train.py:1114] (1/4) Epoch 9, batch 1850, loss[loss=0.2459, simple_loss=0.3258, pruned_loss=0.08299, over 4814.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.293, pruned_loss=0.06168, over 940612.12 frames. ], batch size: 14, lr: 8.51e-03, grad_scale: 32.0 +2024-07-28 04:43:24,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=111506.66666666667, ans=0.2 +2024-07-28 04:43:24,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=111506.66666666667, ans=0.2 +2024-07-28 04:43:31,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=111520.0, ans=0.0 +2024-07-28 04:43:34,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=111520.0, ans=0.125 +2024-07-28 04:43:40,721 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.81 vs. limit=12.0 +2024-07-28 04:43:41,628 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.354e+01 5.789e+01 6.622e+01 8.000e+01 1.293e+02, threshold=1.324e+02, percent-clipped=1.0 +2024-07-28 04:43:42,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=111546.66666666667, ans=0.125 +2024-07-28 04:43:42,752 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.06 vs. limit=15.0 +2024-07-28 04:43:45,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=111546.66666666667, ans=0.125 +2024-07-28 04:43:56,515 INFO [train.py:1114] (1/4) Epoch 9, batch 1900, loss[loss=0.199, simple_loss=0.2868, pruned_loss=0.05565, over 4661.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2936, pruned_loss=0.06175, over 942084.37 frames. ], batch size: 14, lr: 8.51e-03, grad_scale: 64.0 +2024-07-28 04:43:58,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=111573.33333333333, ans=0.0 +2024-07-28 04:44:17,528 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.29 vs. limit=10.0 +2024-07-28 04:44:29,955 INFO [train.py:1114] (1/4) Epoch 9, batch 1950, loss[loss=0.1458, simple_loss=0.2293, pruned_loss=0.03119, over 4896.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2945, pruned_loss=0.06172, over 944013.93 frames. ], batch size: 13, lr: 8.51e-03, grad_scale: 64.0 +2024-07-28 04:44:36,488 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.86 vs. limit=15.0 +2024-07-28 04:44:45,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111666.66666666667, ans=0.1 +2024-07-28 04:44:50,740 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.065e+01 5.906e+01 6.292e+01 6.984e+01 1.022e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 04:44:52,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=111680.0, ans=0.125 +2024-07-28 04:45:01,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=111693.33333333333, ans=0.125 +2024-07-28 04:45:05,295 INFO [train.py:1114] (1/4) Epoch 9, batch 2000, loss[loss=0.1547, simple_loss=0.2358, pruned_loss=0.03684, over 4802.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2955, pruned_loss=0.06235, over 941594.20 frames. ], batch size: 11, lr: 8.51e-03, grad_scale: 64.0 +2024-07-28 04:45:05,713 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.31 vs. limit=15.0 +2024-07-28 04:45:12,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=111720.0, ans=0.2 +2024-07-28 04:45:23,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=111733.33333333333, ans=0.2 +2024-07-28 04:45:24,942 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.98 vs. limit=15.0 +2024-07-28 04:45:25,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=111733.33333333333, ans=0.0 +2024-07-28 04:45:31,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=111746.66666666667, ans=0.0 +2024-07-28 04:45:41,489 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:45:42,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=111773.33333333333, ans=0.125 +2024-07-28 04:45:42,824 INFO [train.py:1114] (1/4) Epoch 9, batch 2050, loss[loss=0.1995, simple_loss=0.2757, pruned_loss=0.06162, over 4609.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2944, pruned_loss=0.06223, over 939580.50 frames. ], batch size: 11, lr: 8.50e-03, grad_scale: 64.0 +2024-07-28 04:45:45,218 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.82 vs. limit=15.0 +2024-07-28 04:45:47,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.49 vs. limit=15.0 +2024-07-28 04:45:50,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=111786.66666666667, ans=0.125 +2024-07-28 04:45:51,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=111786.66666666667, ans=0.035 +2024-07-28 04:45:53,338 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.24 vs. limit=10.0 +2024-07-28 04:45:55,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=111800.0, ans=0.1 +2024-07-28 04:46:01,608 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.743e+01 6.420e+01 7.803e+01 1.541e+02, threshold=1.284e+02, percent-clipped=1.0 +2024-07-28 04:46:16,241 INFO [train.py:1114] (1/4) Epoch 9, batch 2100, loss[loss=0.1768, simple_loss=0.2718, pruned_loss=0.0409, over 4745.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2928, pruned_loss=0.06116, over 941317.68 frames. ], batch size: 13, lr: 8.50e-03, grad_scale: 64.0 +2024-07-28 04:46:18,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111840.0, ans=0.125 +2024-07-28 04:46:18,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=111840.0, ans=0.025 +2024-07-28 04:46:24,687 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.14 vs. limit=15.0 +2024-07-28 04:46:29,268 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.27 vs. limit=15.0 +2024-07-28 04:46:31,134 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.38 vs. limit=22.5 +2024-07-28 04:46:51,240 INFO [train.py:1114] (1/4) Epoch 9, batch 2150, loss[loss=0.1724, simple_loss=0.2612, pruned_loss=0.04175, over 4893.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2917, pruned_loss=0.06014, over 944469.99 frames. ], batch size: 13, lr: 8.50e-03, grad_scale: 64.0 +2024-07-28 04:46:55,069 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-28 04:47:03,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=111920.0, ans=0.0 +2024-07-28 04:47:12,113 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.20 vs. limit=15.0 +2024-07-28 04:47:12,509 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.843e+01 7.038e+01 8.009e+01 1.104e+02, threshold=1.408e+02, percent-clipped=0.0 +2024-07-28 04:47:19,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=111960.0, ans=0.125 +2024-07-28 04:47:20,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=111960.0, ans=0.125 +2024-07-28 04:47:20,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=111960.0, ans=0.2 +2024-07-28 04:47:21,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=111960.0, ans=0.125 +2024-07-28 04:47:22,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111960.0, ans=0.1 +2024-07-28 04:47:26,782 INFO [train.py:1114] (1/4) Epoch 9, batch 2200, loss[loss=0.1948, simple_loss=0.2864, pruned_loss=0.05166, over 4809.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2915, pruned_loss=0.06025, over 943919.82 frames. ], batch size: 14, lr: 8.50e-03, grad_scale: 32.0 +2024-07-28 04:47:28,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=111973.33333333333, ans=0.0 +2024-07-28 04:47:29,137 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.99 vs. limit=6.0 +2024-07-28 04:47:35,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=111986.66666666667, ans=0.0 +2024-07-28 04:47:36,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=111986.66666666667, ans=0.125 +2024-07-28 04:47:42,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=112000.0, ans=0.2 +2024-07-28 04:47:51,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=112013.33333333333, ans=0.0 +2024-07-28 04:47:52,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=112013.33333333333, ans=0.125 +2024-07-28 04:47:53,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112013.33333333333, ans=0.1 +2024-07-28 04:47:58,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=112026.66666666667, ans=0.025 +2024-07-28 04:47:59,474 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:48:00,172 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:48:01,873 INFO [train.py:1114] (1/4) Epoch 9, batch 2250, loss[loss=0.2019, simple_loss=0.2892, pruned_loss=0.05731, over 4690.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2913, pruned_loss=0.06017, over 942397.11 frames. ], batch size: 13, lr: 8.49e-03, grad_scale: 32.0 +2024-07-28 04:48:03,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=112040.0, ans=0.2 +2024-07-28 04:48:20,916 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.419e+01 6.028e+01 6.824e+01 8.191e+01 1.096e+02, threshold=1.365e+02, percent-clipped=0.0 +2024-07-28 04:48:21,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff2.min_abs, batch_count=112080.0, ans=0.1 +2024-07-28 04:48:26,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112080.0, ans=0.1 +2024-07-28 04:48:31,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=112093.33333333333, ans=0.125 +2024-07-28 04:48:34,842 INFO [train.py:1114] (1/4) Epoch 9, batch 2300, loss[loss=0.1992, simple_loss=0.2698, pruned_loss=0.06426, over 4952.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2905, pruned_loss=0.06062, over 940130.29 frames. ], batch size: 12, lr: 8.49e-03, grad_scale: 32.0 +2024-07-28 04:48:40,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=112120.0, ans=0.125 +2024-07-28 04:48:44,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=112120.0, ans=0.025 +2024-07-28 04:49:37,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=112146.66666666667, ans=0.125 +2024-07-28 04:49:37,266 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.14 vs. limit=12.0 +2024-07-28 04:49:52,131 INFO [train.py:1114] (1/4) Epoch 9, batch 2350, loss[loss=0.1862, simple_loss=0.2783, pruned_loss=0.04709, over 4637.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2902, pruned_loss=0.06053, over 941851.10 frames. ], batch size: 13, lr: 8.49e-03, grad_scale: 32.0 +2024-07-28 04:49:52,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=112173.33333333333, ans=10.0 +2024-07-28 04:50:06,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=112200.0, ans=0.0 +2024-07-28 04:50:07,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=112200.0, ans=0.125 +2024-07-28 04:50:10,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=112200.0, ans=0.125 +2024-07-28 04:50:11,750 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.897e+01 5.463e+01 6.108e+01 6.939e+01 1.035e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 04:50:13,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112213.33333333333, ans=0.1 +2024-07-28 04:50:22,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=112226.66666666667, ans=0.025 +2024-07-28 04:50:22,495 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.61 vs. limit=6.0 +2024-07-28 04:50:25,455 INFO [train.py:1114] (1/4) Epoch 9, batch 2400, loss[loss=0.2101, simple_loss=0.2949, pruned_loss=0.06266, over 4637.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2918, pruned_loss=0.06102, over 941646.67 frames. ], batch size: 12, lr: 8.49e-03, grad_scale: 32.0 +2024-07-28 04:50:33,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112240.0, ans=0.1 +2024-07-28 04:50:33,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=112240.0, ans=0.125 +2024-07-28 04:50:41,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=112253.33333333333, ans=0.125 +2024-07-28 04:50:47,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=112266.66666666667, ans=0.1 +2024-07-28 04:50:47,915 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.10 vs. limit=15.0 +2024-07-28 04:50:52,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=112280.0, ans=0.125 +2024-07-28 04:50:59,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=112293.33333333333, ans=0.0 +2024-07-28 04:51:05,886 INFO [train.py:1114] (1/4) Epoch 9, batch 2450, loss[loss=0.212, simple_loss=0.2974, pruned_loss=0.06329, over 4692.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2927, pruned_loss=0.06132, over 937252.62 frames. ], batch size: 13, lr: 8.48e-03, grad_scale: 32.0 +2024-07-28 04:51:07,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=112306.66666666667, ans=0.125 +2024-07-28 04:51:13,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=112320.0, ans=0.125 +2024-07-28 04:51:14,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112320.0, ans=0.1 +2024-07-28 04:51:15,136 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.26 vs. limit=15.0 +2024-07-28 04:51:15,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=112320.0, ans=0.125 +2024-07-28 04:51:25,462 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.551e+01 5.862e+01 6.434e+01 7.688e+01 1.164e+02, threshold=1.287e+02, percent-clipped=0.0 +2024-07-28 04:51:40,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=112360.0, ans=0.125 +2024-07-28 04:51:40,939 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.85 vs. limit=15.0 +2024-07-28 04:51:42,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=112373.33333333333, ans=0.0 +2024-07-28 04:51:43,135 INFO [train.py:1114] (1/4) Epoch 9, batch 2500, loss[loss=0.2352, simple_loss=0.3108, pruned_loss=0.07973, over 4814.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2928, pruned_loss=0.06137, over 939255.06 frames. ], batch size: 14, lr: 8.48e-03, grad_scale: 32.0 +2024-07-28 04:51:47,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=112373.33333333333, ans=0.125 +2024-07-28 04:51:55,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=112386.66666666667, ans=0.2 +2024-07-28 04:52:12,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112426.66666666667, ans=0.1 +2024-07-28 04:52:16,731 INFO [train.py:1114] (1/4) Epoch 9, batch 2550, loss[loss=0.1606, simple_loss=0.2473, pruned_loss=0.03696, over 4804.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2919, pruned_loss=0.06097, over 938973.40 frames. ], batch size: 11, lr: 8.48e-03, grad_scale: 32.0 +2024-07-28 04:52:29,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112453.33333333333, ans=0.1 +2024-07-28 04:52:33,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=112466.66666666667, ans=0.125 +2024-07-28 04:52:33,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=112466.66666666667, ans=0.025 +2024-07-28 04:52:36,353 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.845e+01 6.430e+01 7.273e+01 1.102e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 04:52:50,638 INFO [train.py:1114] (1/4) Epoch 9, batch 2600, loss[loss=0.1712, simple_loss=0.2632, pruned_loss=0.03966, over 4885.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.292, pruned_loss=0.06117, over 938548.43 frames. ], batch size: 13, lr: 8.48e-03, grad_scale: 32.0 +2024-07-28 04:52:59,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=112520.0, ans=0.5 +2024-07-28 04:53:04,189 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.30 vs. limit=12.0 +2024-07-28 04:53:05,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=112533.33333333333, ans=0.125 +2024-07-28 04:53:05,581 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.86 vs. limit=15.0 +2024-07-28 04:53:17,510 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.31 vs. limit=15.0 +2024-07-28 04:53:19,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=112560.0, ans=0.0 +2024-07-28 04:53:25,567 INFO [train.py:1114] (1/4) Epoch 9, batch 2650, loss[loss=0.2244, simple_loss=0.3124, pruned_loss=0.06821, over 4606.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2934, pruned_loss=0.06164, over 940336.17 frames. ], batch size: 16, lr: 8.47e-03, grad_scale: 32.0 +2024-07-28 04:53:36,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112586.66666666667, ans=0.1 +2024-07-28 04:53:37,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.84 vs. limit=22.5 +2024-07-28 04:53:40,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=112600.0, ans=0.05 +2024-07-28 04:53:43,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112600.0, ans=0.1 +2024-07-28 04:53:44,510 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.626e+01 6.228e+01 7.272e+01 1.238e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 04:53:54,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=112626.66666666667, ans=0.125 +2024-07-28 04:53:54,565 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.77 vs. limit=12.0 +2024-07-28 04:53:55,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=112626.66666666667, ans=0.125 +2024-07-28 04:53:58,567 INFO [train.py:1114] (1/4) Epoch 9, batch 2700, loss[loss=0.2053, simple_loss=0.3002, pruned_loss=0.05521, over 4747.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2942, pruned_loss=0.06228, over 939919.43 frames. ], batch size: 14, lr: 8.47e-03, grad_scale: 32.0 +2024-07-28 04:53:58,921 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.35 vs. limit=22.5 +2024-07-28 04:54:11,913 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.19 vs. limit=15.0 +2024-07-28 04:54:18,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=112680.0, ans=0.0 +2024-07-28 04:54:18,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=112680.0, ans=0.125 +2024-07-28 04:54:26,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=112693.33333333333, ans=0.125 +2024-07-28 04:54:28,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=112693.33333333333, ans=0.125 +2024-07-28 04:54:30,976 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.07 vs. limit=15.0 +2024-07-28 04:54:32,614 INFO [train.py:1114] (1/4) Epoch 9, batch 2750, loss[loss=0.2012, simple_loss=0.2842, pruned_loss=0.05911, over 4703.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2933, pruned_loss=0.06181, over 939505.24 frames. ], batch size: 12, lr: 8.47e-03, grad_scale: 32.0 +2024-07-28 04:54:33,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=112706.66666666667, ans=0.125 +2024-07-28 04:54:50,128 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.92 vs. limit=22.5 +2024-07-28 04:54:51,674 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.642e+01 6.173e+01 6.885e+01 7.984e+01 1.102e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-28 04:55:03,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112760.0, ans=0.1 +2024-07-28 04:55:05,838 INFO [train.py:1114] (1/4) Epoch 9, batch 2800, loss[loss=0.3008, simple_loss=0.3552, pruned_loss=0.1233, over 3186.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2935, pruned_loss=0.06179, over 937449.61 frames. ], batch size: 35, lr: 8.47e-03, grad_scale: 32.0 +2024-07-28 04:55:07,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.46 vs. limit=15.0 +2024-07-28 04:55:07,235 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:55:14,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=112786.66666666667, ans=0.125 +2024-07-28 04:55:15,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=112786.66666666667, ans=0.125 +2024-07-28 04:55:16,134 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.17 vs. limit=22.5 +2024-07-28 04:55:23,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112800.0, ans=0.1 +2024-07-28 04:55:32,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=112826.66666666667, ans=0.0 +2024-07-28 04:55:32,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=112826.66666666667, ans=0.0 +2024-07-28 04:55:36,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=112826.66666666667, ans=0.0 +2024-07-28 04:55:39,182 INFO [train.py:1114] (1/4) Epoch 9, batch 2850, loss[loss=0.193, simple_loss=0.2738, pruned_loss=0.05609, over 4962.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2941, pruned_loss=0.06237, over 935605.99 frames. ], batch size: 13, lr: 8.46e-03, grad_scale: 32.0 +2024-07-28 04:55:57,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=112866.66666666667, ans=0.125 +2024-07-28 04:55:58,249 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.781e+01 6.339e+01 7.378e+01 1.144e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 04:56:11,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=112906.66666666667, ans=0.125 +2024-07-28 04:56:12,160 INFO [train.py:1114] (1/4) Epoch 9, batch 2900, loss[loss=0.2438, simple_loss=0.3187, pruned_loss=0.08446, over 4838.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.295, pruned_loss=0.06221, over 939507.13 frames. ], batch size: 13, lr: 8.46e-03, grad_scale: 32.0 +2024-07-28 04:56:14,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=112906.66666666667, ans=0.025 +2024-07-28 04:56:33,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=112946.66666666667, ans=0.025 +2024-07-28 04:56:37,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.46 vs. limit=15.0 +2024-07-28 04:56:37,785 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.90 vs. limit=15.0 +2024-07-28 04:56:40,553 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.74 vs. limit=6.0 +2024-07-28 04:56:47,479 INFO [train.py:1114] (1/4) Epoch 9, batch 2950, loss[loss=0.1853, simple_loss=0.2764, pruned_loss=0.04707, over 4699.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2932, pruned_loss=0.06167, over 939223.73 frames. ], batch size: 12, lr: 8.46e-03, grad_scale: 32.0 +2024-07-28 04:56:59,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=112986.66666666667, ans=0.0 +2024-07-28 04:57:06,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=113000.0, ans=0.0 +2024-07-28 04:57:09,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=113000.0, ans=0.2 +2024-07-28 04:57:10,754 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.416e+01 5.914e+01 6.880e+01 7.946e+01 1.236e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-28 04:57:11,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=113013.33333333333, ans=0.1 +2024-07-28 04:57:24,699 INFO [train.py:1114] (1/4) Epoch 9, batch 3000, loss[loss=0.1868, simple_loss=0.2694, pruned_loss=0.05214, over 4766.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2931, pruned_loss=0.06152, over 938515.38 frames. ], batch size: 13, lr: 8.46e-03, grad_scale: 32.0 +2024-07-28 04:57:24,699 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 04:57:33,522 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.5207, 3.2077, 5.0829, 2.7488], device='cuda:1') +2024-07-28 04:57:37,296 INFO [train.py:1146] (1/4) Epoch 9, validation: loss=0.1766, simple_loss=0.2807, pruned_loss=0.03626, over 944034.00 frames. +2024-07-28 04:57:37,296 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 04:57:43,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=113053.33333333333, ans=0.2 +2024-07-28 04:57:46,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=113053.33333333333, ans=0.125 +2024-07-28 04:57:50,417 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.26 vs. limit=15.0 +2024-07-28 04:57:51,369 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.47 vs. limit=15.0 +2024-07-28 04:57:51,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=113066.66666666667, ans=0.0 +2024-07-28 04:57:53,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=113066.66666666667, ans=0.0 +2024-07-28 04:57:54,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=113066.66666666667, ans=0.125 +2024-07-28 04:57:54,858 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.11 vs. limit=15.0 +2024-07-28 04:58:05,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=113093.33333333333, ans=0.125 +2024-07-28 04:58:10,137 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.92 vs. limit=15.0 +2024-07-28 04:58:11,103 INFO [train.py:1114] (1/4) Epoch 9, batch 3050, loss[loss=0.1793, simple_loss=0.2712, pruned_loss=0.04371, over 4637.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2948, pruned_loss=0.06297, over 937488.19 frames. ], batch size: 12, lr: 8.45e-03, grad_scale: 32.0 +2024-07-28 04:58:15,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=113106.66666666667, ans=0.125 +2024-07-28 04:58:20,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=113120.0, ans=0.125 +2024-07-28 04:58:32,583 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.569e+01 5.528e+01 6.161e+01 6.934e+01 1.105e+02, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 04:58:41,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=113160.0, ans=0.0 +2024-07-28 04:58:44,331 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:58:47,568 INFO [train.py:1114] (1/4) Epoch 9, batch 3100, loss[loss=0.2285, simple_loss=0.3134, pruned_loss=0.0718, over 4632.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2937, pruned_loss=0.06265, over 938161.00 frames. ], batch size: 16, lr: 8.45e-03, grad_scale: 32.0 +2024-07-28 04:58:49,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=113173.33333333333, ans=0.125 +2024-07-28 04:58:56,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113186.66666666667, ans=0.1 +2024-07-28 04:59:02,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=113200.0, ans=0.125 +2024-07-28 04:59:17,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=113226.66666666667, ans=0.125 +2024-07-28 04:59:17,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=113226.66666666667, ans=0.125 +2024-07-28 04:59:22,194 INFO [train.py:1114] (1/4) Epoch 9, batch 3150, loss[loss=0.2252, simple_loss=0.3105, pruned_loss=0.06993, over 4847.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.293, pruned_loss=0.06185, over 938676.66 frames. ], batch size: 18, lr: 8.45e-03, grad_scale: 32.0 +2024-07-28 04:59:24,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=113240.0, ans=0.0 +2024-07-28 04:59:24,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=113240.0, ans=0.125 +2024-07-28 04:59:37,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=113266.66666666667, ans=0.0 +2024-07-28 04:59:38,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=113266.66666666667, ans=0.0 +2024-07-28 04:59:41,417 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.794e+01 6.244e+01 6.965e+01 1.084e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-28 04:59:42,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=113280.0, ans=0.0 +2024-07-28 04:59:55,508 INFO [train.py:1114] (1/4) Epoch 9, batch 3200, loss[loss=0.1751, simple_loss=0.256, pruned_loss=0.04707, over 4829.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2922, pruned_loss=0.06141, over 940083.95 frames. ], batch size: 13, lr: 8.45e-03, grad_scale: 32.0 +2024-07-28 04:59:58,393 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.69 vs. limit=15.0 +2024-07-28 05:00:00,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=113306.66666666667, ans=0.0 +2024-07-28 05:00:01,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113320.0, ans=0.1 +2024-07-28 05:00:21,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=113346.66666666667, ans=10.0 +2024-07-28 05:00:23,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=113346.66666666667, ans=0.2 +2024-07-28 05:00:32,258 INFO [train.py:1114] (1/4) Epoch 9, batch 3250, loss[loss=0.2466, simple_loss=0.3388, pruned_loss=0.07726, over 4929.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2939, pruned_loss=0.06204, over 940877.04 frames. ], batch size: 14, lr: 8.44e-03, grad_scale: 32.0 +2024-07-28 05:00:34,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=113373.33333333333, ans=0.0 +2024-07-28 05:00:37,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=113373.33333333333, ans=0.2 +2024-07-28 05:00:37,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=113373.33333333333, ans=0.125 +2024-07-28 05:00:43,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=113386.66666666667, ans=0.0 +2024-07-28 05:00:48,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=113400.0, ans=0.0 +2024-07-28 05:00:51,416 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.809e+01 6.527e+01 7.156e+01 1.090e+02, threshold=1.305e+02, percent-clipped=0.0 +2024-07-28 05:00:56,579 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.56 vs. limit=10.0 +2024-07-28 05:01:05,601 INFO [train.py:1114] (1/4) Epoch 9, batch 3300, loss[loss=0.2396, simple_loss=0.3156, pruned_loss=0.08183, over 4735.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2925, pruned_loss=0.06176, over 940788.98 frames. ], batch size: 19, lr: 8.44e-03, grad_scale: 32.0 +2024-07-28 05:01:20,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=113466.66666666667, ans=0.2 +2024-07-28 05:01:20,510 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.28 vs. limit=15.0 +2024-07-28 05:01:27,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=113480.0, ans=0.04949747468305833 +2024-07-28 05:01:39,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=113506.66666666667, ans=0.0 +2024-07-28 05:01:40,562 INFO [train.py:1114] (1/4) Epoch 9, batch 3350, loss[loss=0.2078, simple_loss=0.2883, pruned_loss=0.06367, over 4624.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2935, pruned_loss=0.06212, over 938171.47 frames. ], batch size: 17, lr: 8.44e-03, grad_scale: 32.0 +2024-07-28 05:01:46,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=113520.0, ans=0.125 +2024-07-28 05:01:47,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=113520.0, ans=0.0 +2024-07-28 05:01:50,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=113520.0, ans=0.0 +2024-07-28 05:01:54,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=113533.33333333333, ans=0.5 +2024-07-28 05:01:55,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=113533.33333333333, ans=0.0 +2024-07-28 05:01:58,560 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.77 vs. limit=22.5 +2024-07-28 05:02:00,181 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.890e+01 5.762e+01 6.208e+01 6.963e+01 1.151e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 05:02:22,811 INFO [train.py:1114] (1/4) Epoch 9, batch 3400, loss[loss=0.1576, simple_loss=0.2393, pruned_loss=0.03794, over 4818.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2927, pruned_loss=0.06171, over 937000.34 frames. ], batch size: 11, lr: 8.44e-03, grad_scale: 32.0 +2024-07-28 05:02:24,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=113573.33333333333, ans=0.0 +2024-07-28 05:02:33,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=113586.66666666667, ans=0.0 +2024-07-28 05:02:37,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=113600.0, ans=0.125 +2024-07-28 05:02:38,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=113600.0, ans=0.125 +2024-07-28 05:02:41,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=113600.0, ans=0.125 +2024-07-28 05:02:43,710 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.94 vs. limit=15.0 +2024-07-28 05:02:44,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=113613.33333333333, ans=0.2 +2024-07-28 05:02:53,647 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:02:54,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=113626.66666666667, ans=0.125 +2024-07-28 05:02:56,780 INFO [train.py:1114] (1/4) Epoch 9, batch 3450, loss[loss=0.2316, simple_loss=0.3208, pruned_loss=0.07124, over 4680.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2931, pruned_loss=0.06182, over 937337.25 frames. ], batch size: 19, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:03:01,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=113640.0, ans=0.125 +2024-07-28 05:03:02,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=113640.0, ans=0.125 +2024-07-28 05:03:12,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=113666.66666666667, ans=0.125 +2024-07-28 05:03:16,034 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.764e+01 6.050e+01 6.762e+01 7.636e+01 1.132e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-28 05:03:21,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=113680.0, ans=0.0 +2024-07-28 05:03:22,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=113680.0, ans=0.125 +2024-07-28 05:03:22,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=113680.0, ans=0.025 +2024-07-28 05:03:29,917 INFO [train.py:1114] (1/4) Epoch 9, batch 3500, loss[loss=0.1875, simple_loss=0.2607, pruned_loss=0.05719, over 4959.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2924, pruned_loss=0.06144, over 938089.66 frames. ], batch size: 12, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:03:52,493 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.23 vs. limit=22.5 +2024-07-28 05:04:02,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=113760.0, ans=0.0 +2024-07-28 05:04:06,621 INFO [train.py:1114] (1/4) Epoch 9, batch 3550, loss[loss=0.2303, simple_loss=0.3218, pruned_loss=0.06936, over 4661.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2927, pruned_loss=0.06146, over 938499.48 frames. ], batch size: 14, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:04:08,984 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.36 vs. limit=22.5 +2024-07-28 05:04:18,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=113786.66666666667, ans=0.125 +2024-07-28 05:04:18,963 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:04:20,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=113800.0, ans=0.125 +2024-07-28 05:04:24,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=113800.0, ans=0.125 +2024-07-28 05:04:25,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=113800.0, ans=0.125 +2024-07-28 05:04:26,149 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.826e+01 5.609e+01 6.345e+01 7.145e+01 1.049e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 05:04:26,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=113813.33333333333, ans=15.0 +2024-07-28 05:04:30,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=113813.33333333333, ans=0.125 +2024-07-28 05:04:30,901 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:04:40,131 INFO [train.py:1114] (1/4) Epoch 9, batch 3600, loss[loss=0.1705, simple_loss=0.2557, pruned_loss=0.0427, over 4969.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2917, pruned_loss=0.06107, over 940253.97 frames. ], batch size: 13, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:04:44,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=113840.0, ans=0.125 +2024-07-28 05:04:47,240 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.68 vs. limit=10.0 +2024-07-28 05:04:49,285 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.44 vs. limit=22.5 +2024-07-28 05:04:49,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=113853.33333333333, ans=0.2 +2024-07-28 05:04:56,924 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.42 vs. limit=22.5 +2024-07-28 05:05:01,198 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:05:13,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=113906.66666666667, ans=0.0 +2024-07-28 05:05:13,672 INFO [train.py:1114] (1/4) Epoch 9, batch 3650, loss[loss=0.2212, simple_loss=0.3078, pruned_loss=0.0673, over 4897.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2913, pruned_loss=0.06065, over 941168.63 frames. ], batch size: 15, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:05:13,948 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.77 vs. limit=15.0 +2024-07-28 05:05:21,437 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.98 vs. limit=22.5 +2024-07-28 05:05:28,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=113933.33333333333, ans=0.025 +2024-07-28 05:05:32,023 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.91 vs. limit=10.0 +2024-07-28 05:05:32,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=113933.33333333333, ans=0.125 +2024-07-28 05:05:32,931 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 5.862e+01 6.678e+01 8.090e+01 1.321e+02, threshold=1.336e+02, percent-clipped=1.0 +2024-07-28 05:05:47,150 INFO [train.py:1114] (1/4) Epoch 9, batch 3700, loss[loss=0.2039, simple_loss=0.299, pruned_loss=0.05433, over 4929.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2908, pruned_loss=0.06006, over 942267.29 frames. ], batch size: 14, lr: 8.42e-03, grad_scale: 32.0 +2024-07-28 05:05:52,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=113986.66666666667, ans=0.2 +2024-07-28 05:06:02,876 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.64 vs. limit=22.5 +2024-07-28 05:06:13,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=114013.33333333333, ans=0.035 +2024-07-28 05:06:21,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=114026.66666666667, ans=0.125 +2024-07-28 05:06:24,416 INFO [train.py:1114] (1/4) Epoch 9, batch 3750, loss[loss=0.2412, simple_loss=0.3119, pruned_loss=0.08519, over 4812.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.29, pruned_loss=0.05989, over 943726.46 frames. ], batch size: 11, lr: 8.42e-03, grad_scale: 32.0 +2024-07-28 05:06:41,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=114066.66666666667, ans=0.0 +2024-07-28 05:06:42,874 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.99 vs. limit=15.0 +2024-07-28 05:06:44,317 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.905e+01 5.778e+01 6.645e+01 7.408e+01 1.039e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-28 05:06:58,228 INFO [train.py:1114] (1/4) Epoch 9, batch 3800, loss[loss=0.2156, simple_loss=0.3, pruned_loss=0.06556, over 4809.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2909, pruned_loss=0.06057, over 941397.77 frames. ], batch size: 14, lr: 8.42e-03, grad_scale: 32.0 +2024-07-28 05:07:08,539 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:07:11,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=114120.0, ans=0.0 +2024-07-28 05:07:26,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=114160.0, ans=0.0 +2024-07-28 05:07:33,841 INFO [train.py:1114] (1/4) Epoch 9, batch 3850, loss[loss=0.2073, simple_loss=0.3001, pruned_loss=0.05723, over 4626.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2903, pruned_loss=0.06041, over 941980.31 frames. ], batch size: 16, lr: 8.42e-03, grad_scale: 32.0 +2024-07-28 05:07:37,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=114173.33333333333, ans=0.125 +2024-07-28 05:07:46,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=114186.66666666667, ans=0.125 +2024-07-28 05:07:53,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=114200.0, ans=0.2 +2024-07-28 05:07:57,332 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 5.654e+01 6.534e+01 7.463e+01 1.189e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 05:07:58,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=114213.33333333333, ans=0.2 +2024-07-28 05:08:02,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=114213.33333333333, ans=0.025 +2024-07-28 05:08:11,616 INFO [train.py:1114] (1/4) Epoch 9, batch 3900, loss[loss=0.1804, simple_loss=0.275, pruned_loss=0.04288, over 4806.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.29, pruned_loss=0.05977, over 942258.46 frames. ], batch size: 14, lr: 8.41e-03, grad_scale: 32.0 +2024-07-28 05:08:14,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=114240.0, ans=0.125 +2024-07-28 05:08:23,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=114253.33333333333, ans=0.125 +2024-07-28 05:08:34,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=114280.0, ans=0.0 +2024-07-28 05:08:45,013 INFO [train.py:1114] (1/4) Epoch 9, batch 3950, loss[loss=0.2149, simple_loss=0.2982, pruned_loss=0.06586, over 4821.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2896, pruned_loss=0.0597, over 944322.59 frames. ], batch size: 16, lr: 8.41e-03, grad_scale: 32.0 +2024-07-28 05:08:50,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=114306.66666666667, ans=0.125 +2024-07-28 05:09:04,058 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.380e+01 5.786e+01 6.190e+01 6.950e+01 9.125e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 05:09:06,478 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.63 vs. limit=15.0 +2024-07-28 05:09:12,353 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.25 vs. limit=15.0 +2024-07-28 05:09:14,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=114360.0, ans=0.125 +2024-07-28 05:09:15,725 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.96 vs. limit=10.0 +2024-07-28 05:09:16,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=114360.0, ans=0.0 +2024-07-28 05:09:17,987 INFO [train.py:1114] (1/4) Epoch 9, batch 4000, loss[loss=0.1743, simple_loss=0.2604, pruned_loss=0.04407, over 4776.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2898, pruned_loss=0.0602, over 941473.60 frames. ], batch size: 12, lr: 8.41e-03, grad_scale: 32.0 +2024-07-28 05:09:20,900 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.94 vs. limit=10.0 +2024-07-28 05:09:29,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=114386.66666666667, ans=0.025 +2024-07-28 05:09:45,539 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.44 vs. limit=15.0 +2024-07-28 05:09:46,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=114426.66666666667, ans=0.0 +2024-07-28 05:09:53,448 INFO [train.py:1114] (1/4) Epoch 9, batch 4050, loss[loss=0.2588, simple_loss=0.3365, pruned_loss=0.09059, over 3193.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2896, pruned_loss=0.06026, over 940062.74 frames. ], batch size: 35, lr: 8.41e-03, grad_scale: 32.0 +2024-07-28 05:09:54,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=114440.0, ans=0.125 +2024-07-28 05:09:55,271 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.41 vs. limit=15.0 +2024-07-28 05:10:02,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=114453.33333333333, ans=0.125 +2024-07-28 05:10:12,635 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.800e+01 6.025e+01 6.921e+01 7.969e+01 1.217e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-28 05:10:12,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=114480.0, ans=0.0 +2024-07-28 05:10:13,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=114480.0, ans=0.2 +2024-07-28 05:10:18,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=114480.0, ans=0.025 +2024-07-28 05:10:23,523 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:10:23,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=114493.33333333333, ans=0.125 +2024-07-28 05:10:25,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=114493.33333333333, ans=0.025 +2024-07-28 05:10:26,732 INFO [train.py:1114] (1/4) Epoch 9, batch 4100, loss[loss=0.1934, simple_loss=0.2785, pruned_loss=0.0541, over 4895.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2906, pruned_loss=0.06106, over 938755.65 frames. ], batch size: 15, lr: 8.40e-03, grad_scale: 32.0 +2024-07-28 05:10:27,154 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.79 vs. limit=15.0 +2024-07-28 05:10:30,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=114506.66666666667, ans=0.125 +2024-07-28 05:10:37,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114520.0, ans=0.1 +2024-07-28 05:10:38,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=114520.0, ans=0.125 +2024-07-28 05:10:52,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=114546.66666666667, ans=0.125 +2024-07-28 05:10:55,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=114546.66666666667, ans=0.125 +2024-07-28 05:11:02,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=114560.0, ans=0.125 +2024-07-28 05:11:11,951 INFO [train.py:1114] (1/4) Epoch 9, batch 4150, loss[loss=0.2149, simple_loss=0.3072, pruned_loss=0.06127, over 4826.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2893, pruned_loss=0.06034, over 938261.22 frames. ], batch size: 13, lr: 8.40e-03, grad_scale: 32.0 +2024-07-28 05:11:12,993 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.60 vs. limit=12.0 +2024-07-28 05:11:16,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.26 vs. limit=15.0 +2024-07-28 05:11:20,314 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.90 vs. limit=12.0 +2024-07-28 05:11:25,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=114600.0, ans=0.1 +2024-07-28 05:11:31,352 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.579e+01 5.561e+01 6.118e+01 6.990e+01 1.145e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 05:11:36,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=114613.33333333333, ans=0.125 +2024-07-28 05:11:38,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.37 vs. limit=10.0 +2024-07-28 05:11:45,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=114626.66666666667, ans=0.125 +2024-07-28 05:11:52,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=114626.66666666667, ans=0.125 +2024-07-28 05:11:52,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=114626.66666666667, ans=0.125 +2024-07-28 05:12:05,253 INFO [train.py:1114] (1/4) Epoch 9, batch 4200, loss[loss=0.24, simple_loss=0.3204, pruned_loss=0.07983, over 4902.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2902, pruned_loss=0.06062, over 939394.59 frames. ], batch size: 15, lr: 8.40e-03, grad_scale: 64.0 +2024-07-28 05:12:30,814 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.60 vs. limit=15.0 +2024-07-28 05:12:36,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=114653.33333333333, ans=0.125 +2024-07-28 05:12:46,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=114680.0, ans=0.125 +2024-07-28 05:12:46,731 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.19 vs. limit=22.5 +2024-07-28 05:12:46,828 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-28 05:12:55,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=114693.33333333333, ans=0.125 +2024-07-28 05:12:58,963 INFO [train.py:1114] (1/4) Epoch 9, batch 4250, loss[loss=0.1931, simple_loss=0.2739, pruned_loss=0.05613, over 4633.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2903, pruned_loss=0.06008, over 940568.89 frames. ], batch size: 12, lr: 8.40e-03, grad_scale: 32.0 +2024-07-28 05:13:30,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=114733.33333333333, ans=0.1 +2024-07-28 05:13:37,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=114733.33333333333, ans=0.125 +2024-07-28 05:13:49,189 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.539e+01 5.532e+01 6.240e+01 7.121e+01 1.493e+02, threshold=1.248e+02, percent-clipped=1.0 +2024-07-28 05:13:57,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114760.0, ans=0.1 +2024-07-28 05:14:02,745 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.66 vs. limit=22.5 +2024-07-28 05:14:04,261 INFO [train.py:1114] (1/4) Epoch 9, batch 4300, loss[loss=0.1896, simple_loss=0.2728, pruned_loss=0.05319, over 4756.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2911, pruned_loss=0.06077, over 940175.80 frames. ], batch size: 13, lr: 8.39e-03, grad_scale: 32.0 +2024-07-28 05:14:11,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=114786.66666666667, ans=0.025 +2024-07-28 05:14:17,176 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-07-28 05:14:19,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=114800.0, ans=0.125 +2024-07-28 05:14:30,347 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=7.784e-03 +2024-07-28 05:14:36,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=114826.66666666667, ans=0.125 +2024-07-28 05:14:39,640 INFO [train.py:1114] (1/4) Epoch 9, batch 4350, loss[loss=0.217, simple_loss=0.2973, pruned_loss=0.06835, over 4758.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.292, pruned_loss=0.06089, over 941156.41 frames. ], batch size: 13, lr: 8.39e-03, grad_scale: 32.0 +2024-07-28 05:14:51,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=114853.33333333333, ans=0.125 +2024-07-28 05:15:01,416 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.772e+01 5.655e+01 6.124e+01 6.925e+01 1.522e+02, threshold=1.225e+02, percent-clipped=1.0 +2024-07-28 05:15:12,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=114893.33333333333, ans=0.125 +2024-07-28 05:15:14,980 INFO [train.py:1114] (1/4) Epoch 9, batch 4400, loss[loss=0.2021, simple_loss=0.2958, pruned_loss=0.05425, over 4811.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2915, pruned_loss=0.06068, over 940900.22 frames. ], batch size: 14, lr: 8.39e-03, grad_scale: 32.0 +2024-07-28 05:15:29,957 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.50 vs. limit=6.0 +2024-07-28 05:15:37,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=114946.66666666667, ans=0.025 +2024-07-28 05:15:37,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=114946.66666666667, ans=0.125 +2024-07-28 05:15:39,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=114946.66666666667, ans=0.125 +2024-07-28 05:15:42,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=114960.0, ans=0.125 +2024-07-28 05:15:49,341 INFO [train.py:1114] (1/4) Epoch 9, batch 4450, loss[loss=0.1871, simple_loss=0.2635, pruned_loss=0.05535, over 4938.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2931, pruned_loss=0.06174, over 938770.51 frames. ], batch size: 12, lr: 8.39e-03, grad_scale: 32.0 +2024-07-28 05:15:54,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=114973.33333333333, ans=0.125 +2024-07-28 05:15:59,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=114986.66666666667, ans=0.04949747468305833 +2024-07-28 05:16:09,215 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.877e+01 5.739e+01 6.410e+01 7.552e+01 1.027e+02, threshold=1.282e+02, percent-clipped=0.0 +2024-07-28 05:16:24,260 INFO [train.py:1114] (1/4) Epoch 9, batch 4500, loss[loss=0.2695, simple_loss=0.3608, pruned_loss=0.08909, over 4737.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2941, pruned_loss=0.06198, over 938034.94 frames. ], batch size: 14, lr: 8.38e-03, grad_scale: 32.0 +2024-07-28 05:16:26,671 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.96 vs. limit=10.0 +2024-07-28 05:16:38,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=115066.66666666667, ans=0.125 +2024-07-28 05:16:48,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=115080.0, ans=0.2 +2024-07-28 05:16:58,145 INFO [train.py:1114] (1/4) Epoch 9, batch 4550, loss[loss=0.222, simple_loss=0.2987, pruned_loss=0.07265, over 4891.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2934, pruned_loss=0.06158, over 940016.73 frames. ], batch size: 13, lr: 8.38e-03, grad_scale: 32.0 +2024-07-28 05:16:58,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=115106.66666666667, ans=0.125 +2024-07-28 05:17:06,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=115120.0, ans=0.125 +2024-07-28 05:17:08,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=115120.0, ans=0.025 +2024-07-28 05:17:10,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=115120.0, ans=0.0 +2024-07-28 05:17:30,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=115133.33333333333, ans=0.0 +2024-07-28 05:17:38,911 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.875e+01 5.713e+01 6.359e+01 7.183e+01 1.180e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 05:17:46,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=115160.0, ans=0.125 +2024-07-28 05:17:48,281 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.70 vs. limit=15.0 +2024-07-28 05:17:52,565 INFO [train.py:1114] (1/4) Epoch 9, batch 4600, loss[loss=0.2163, simple_loss=0.3171, pruned_loss=0.05776, over 4508.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2934, pruned_loss=0.06181, over 938386.95 frames. ], batch size: 21, lr: 8.38e-03, grad_scale: 32.0 +2024-07-28 05:18:06,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=115186.66666666667, ans=0.2 +2024-07-28 05:19:06,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=115213.33333333333, ans=0.125 +2024-07-28 05:19:26,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=115226.66666666667, ans=0.125 +2024-07-28 05:19:46,528 INFO [train.py:1114] (1/4) Epoch 9, batch 4650, loss[loss=0.2059, simple_loss=0.2864, pruned_loss=0.06268, over 4854.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2935, pruned_loss=0.06174, over 939941.02 frames. ], batch size: 16, lr: 8.38e-03, grad_scale: 32.0 +2024-07-28 05:19:47,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=115240.0, ans=0.125 +2024-07-28 05:19:49,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=115240.0, ans=0.125 +2024-07-28 05:19:49,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=115240.0, ans=0.125 +2024-07-28 05:19:49,741 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.64 vs. limit=15.0 +2024-07-28 05:19:50,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=115240.0, ans=10.0 +2024-07-28 05:19:59,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=115253.33333333333, ans=0.125 +2024-07-28 05:20:03,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=115266.66666666667, ans=0.09899494936611666 +2024-07-28 05:20:20,851 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 5.631e+01 6.409e+01 7.272e+01 9.674e+01, threshold=1.282e+02, percent-clipped=0.0 +2024-07-28 05:20:29,849 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.93 vs. limit=15.0 +2024-07-28 05:20:30,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=115293.33333333333, ans=0.125 +2024-07-28 05:20:32,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=115293.33333333333, ans=0.0 +2024-07-28 05:20:33,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=115293.33333333333, ans=0.0 +2024-07-28 05:20:39,135 INFO [train.py:1114] (1/4) Epoch 9, batch 4700, loss[loss=0.1813, simple_loss=0.2602, pruned_loss=0.05115, over 4698.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2927, pruned_loss=0.06164, over 936897.31 frames. ], batch size: 11, lr: 8.37e-03, grad_scale: 32.0 +2024-07-28 05:20:42,238 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.71 vs. limit=15.0 +2024-07-28 05:20:44,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=115306.66666666667, ans=0.0 +2024-07-28 05:20:51,696 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.94 vs. limit=22.5 +2024-07-28 05:20:52,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=115333.33333333333, ans=0.0 +2024-07-28 05:20:53,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=115333.33333333333, ans=0.125 +2024-07-28 05:20:57,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=115333.33333333333, ans=0.09899494936611666 +2024-07-28 05:21:04,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=115346.66666666667, ans=0.125 +2024-07-28 05:21:08,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=115360.0, ans=0.0 +2024-07-28 05:21:09,780 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.92 vs. limit=15.0 +2024-07-28 05:21:12,801 INFO [train.py:1114] (1/4) Epoch 9, batch 4750, loss[loss=0.2038, simple_loss=0.2932, pruned_loss=0.05719, over 4509.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2927, pruned_loss=0.06157, over 935361.32 frames. ], batch size: 21, lr: 8.37e-03, grad_scale: 32.0 +2024-07-28 05:21:12,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=115373.33333333333, ans=0.125 +2024-07-28 05:21:24,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=115386.66666666667, ans=0.1 +2024-07-28 05:21:28,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=115400.0, ans=0.1 +2024-07-28 05:21:31,142 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.23 vs. limit=15.0 +2024-07-28 05:21:34,803 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.742e+01 6.606e+01 7.346e+01 1.206e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-28 05:21:37,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=115413.33333333333, ans=0.125 +2024-07-28 05:21:50,044 INFO [train.py:1114] (1/4) Epoch 9, batch 4800, loss[loss=0.1879, simple_loss=0.2764, pruned_loss=0.04976, over 4698.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2921, pruned_loss=0.06203, over 932847.61 frames. ], batch size: 13, lr: 8.37e-03, grad_scale: 32.0 +2024-07-28 05:21:51,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=115440.0, ans=0.0 +2024-07-28 05:22:08,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=115453.33333333333, ans=0.2 +2024-07-28 05:22:18,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=115466.66666666667, ans=0.125 +2024-07-28 05:22:18,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=115466.66666666667, ans=0.2 +2024-07-28 05:22:39,899 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.86 vs. limit=22.5 +2024-07-28 05:22:41,485 INFO [train.py:1114] (1/4) Epoch 9, batch 4850, loss[loss=0.1988, simple_loss=0.2939, pruned_loss=0.05188, over 4735.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.292, pruned_loss=0.06176, over 931812.42 frames. ], batch size: 14, lr: 8.37e-03, grad_scale: 32.0 +2024-07-28 05:22:55,692 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.55 vs. limit=15.0 +2024-07-28 05:22:56,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=115520.0, ans=0.2 +2024-07-28 05:22:58,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=115520.0, ans=0.0 +2024-07-28 05:23:04,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=115533.33333333333, ans=0.0 +2024-07-28 05:23:07,385 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 5.778e+01 6.431e+01 7.298e+01 1.043e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 05:23:09,491 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:23:20,761 INFO [train.py:1114] (1/4) Epoch 9, batch 4900, loss[loss=0.2369, simple_loss=0.3204, pruned_loss=0.07673, over 4758.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2916, pruned_loss=0.06119, over 933662.77 frames. ], batch size: 13, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:23:30,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=115573.33333333333, ans=0.1 +2024-07-28 05:23:42,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=115600.0, ans=0.1 +2024-07-28 05:23:48,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=115613.33333333333, ans=0.125 +2024-07-28 05:23:48,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=115613.33333333333, ans=0.025 +2024-07-28 05:24:10,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.32 vs. limit=15.0 +2024-07-28 05:24:11,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115626.66666666667, ans=0.1 +2024-07-28 05:24:12,916 INFO [train.py:1114] (1/4) Epoch 9, batch 4950, loss[loss=0.2894, simple_loss=0.353, pruned_loss=0.1128, over 3573.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2925, pruned_loss=0.06137, over 931308.66 frames. ], batch size: 35, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:24:13,248 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.57 vs. limit=15.0 +2024-07-28 05:24:16,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.07 vs. limit=15.0 +2024-07-28 05:24:20,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=115653.33333333333, ans=0.125 +2024-07-28 05:24:26,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=115653.33333333333, ans=0.0 +2024-07-28 05:24:35,818 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.843e+01 5.657e+01 6.231e+01 6.947e+01 1.249e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 05:24:45,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=115680.0, ans=0.125 +2024-07-28 05:25:02,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=115693.33333333333, ans=0.125 +2024-07-28 05:25:12,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=115693.33333333333, ans=0.0 +2024-07-28 05:25:13,964 INFO [train.py:1114] (1/4) Epoch 9, batch 5000, loss[loss=0.2395, simple_loss=0.3334, pruned_loss=0.07284, over 4670.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2917, pruned_loss=0.06102, over 935082.76 frames. ], batch size: 14, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:25:33,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=115706.66666666667, ans=0.125 +2024-07-28 05:25:48,030 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.43 vs. limit=15.0 +2024-07-28 05:25:49,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=115720.0, ans=10.0 +2024-07-28 05:25:49,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=115720.0, ans=0.125 +2024-07-28 05:25:51,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=115720.0, ans=0.0 +2024-07-28 05:25:53,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=115720.0, ans=0.0 +2024-07-28 05:25:53,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=115720.0, ans=0.125 +2024-07-28 05:25:53,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=115720.0, ans=0.125 +2024-07-28 05:25:59,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=115733.33333333333, ans=0.1 +2024-07-28 05:26:14,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=115746.66666666667, ans=0.125 +2024-07-28 05:26:27,954 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.00 vs. limit=15.0 +2024-07-28 05:26:44,472 INFO [train.py:1114] (1/4) Epoch 9, batch 5050, loss[loss=0.1868, simple_loss=0.2671, pruned_loss=0.05326, over 4861.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2917, pruned_loss=0.06115, over 937973.19 frames. ], batch size: 12, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:26:45,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=115773.33333333333, ans=0.0 +2024-07-28 05:27:03,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=115786.66666666667, ans=0.125 +2024-07-28 05:27:09,006 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.63 vs. limit=12.0 +2024-07-28 05:27:12,539 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.41 vs. limit=15.0 +2024-07-28 05:27:28,832 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.01 vs. limit=15.0 +2024-07-28 05:27:29,887 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.024e+01 5.915e+01 6.647e+01 7.788e+01 1.077e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-28 05:27:34,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=115813.33333333333, ans=0.125 +2024-07-28 05:27:44,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=115826.66666666667, ans=0.1 +2024-07-28 05:27:48,483 INFO [train.py:1114] (1/4) Epoch 9, batch 5100, loss[loss=0.2128, simple_loss=0.2937, pruned_loss=0.0659, over 4775.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2926, pruned_loss=0.06167, over 935753.60 frames. ], batch size: 12, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:28:09,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=115840.0, ans=0.125 +2024-07-28 05:28:12,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=115840.0, ans=0.0 +2024-07-28 05:28:14,664 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.01 vs. limit=15.0 +2024-07-28 05:28:34,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=115866.66666666667, ans=0.0 +2024-07-28 05:28:40,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=115880.0, ans=0.1 +2024-07-28 05:28:40,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=115880.0, ans=0.0 +2024-07-28 05:28:47,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=115893.33333333333, ans=0.0 +2024-07-28 05:28:48,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=115893.33333333333, ans=0.07 +2024-07-28 05:28:59,106 INFO [train.py:1114] (1/4) Epoch 9, batch 5150, loss[loss=0.2174, simple_loss=0.2951, pruned_loss=0.06984, over 4854.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2933, pruned_loss=0.06193, over 936803.12 frames. ], batch size: 16, lr: 8.35e-03, grad_scale: 32.0 +2024-07-28 05:29:18,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=115920.0, ans=0.0 +2024-07-28 05:29:19,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115920.0, ans=0.1 +2024-07-28 05:29:37,795 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.832e+01 5.668e+01 6.329e+01 7.486e+01 1.027e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 05:29:39,564 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.29 vs. limit=15.0 +2024-07-28 05:29:40,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=115946.66666666667, ans=0.0 +2024-07-28 05:29:48,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=115946.66666666667, ans=0.125 +2024-07-28 05:30:01,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=115973.33333333333, ans=0.025 +2024-07-28 05:30:01,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=115973.33333333333, ans=0.125 +2024-07-28 05:30:01,860 INFO [train.py:1114] (1/4) Epoch 9, batch 5200, loss[loss=0.1777, simple_loss=0.2718, pruned_loss=0.04185, over 4675.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2927, pruned_loss=0.06133, over 936664.72 frames. ], batch size: 14, lr: 8.35e-03, grad_scale: 32.0 +2024-07-28 05:30:02,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=115973.33333333333, ans=0.2 +2024-07-28 05:30:04,217 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:30:16,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=115986.66666666667, ans=0.2 +2024-07-28 05:30:26,025 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.54 vs. limit=6.0 +2024-07-28 05:30:46,186 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.19 vs. limit=15.0 +2024-07-28 05:30:47,193 INFO [train.py:1114] (1/4) Epoch 9, batch 5250, loss[loss=0.1801, simple_loss=0.2626, pruned_loss=0.04879, over 4898.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2922, pruned_loss=0.06119, over 936258.14 frames. ], batch size: 13, lr: 8.35e-03, grad_scale: 32.0 +2024-07-28 05:30:50,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=116040.0, ans=0.125 +2024-07-28 05:31:09,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=116053.33333333333, ans=0.0 +2024-07-28 05:31:17,886 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.12 vs. limit=10.0 +2024-07-28 05:31:25,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=116066.66666666667, ans=0.1 +2024-07-28 05:31:31,296 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.193e+01 5.667e+01 6.856e+01 8.237e+01 1.145e+02, threshold=1.371e+02, percent-clipped=0.0 +2024-07-28 05:31:34,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=116080.0, ans=0.025 +2024-07-28 05:32:01,285 INFO [train.py:1114] (1/4) Epoch 9, batch 5300, loss[loss=0.2348, simple_loss=0.3223, pruned_loss=0.07363, over 4631.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2913, pruned_loss=0.06072, over 934519.89 frames. ], batch size: 16, lr: 8.35e-03, grad_scale: 32.0 +2024-07-28 05:32:17,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=116120.0, ans=0.125 +2024-07-28 05:32:20,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=116133.33333333333, ans=0.07 +2024-07-28 05:32:24,158 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.33 vs. limit=10.0 +2024-07-28 05:32:25,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=116133.33333333333, ans=0.09899494936611666 +2024-07-28 05:32:25,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=116133.33333333333, ans=0.0 +2024-07-28 05:32:26,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=116133.33333333333, ans=0.2 +2024-07-28 05:32:46,256 INFO [train.py:1114] (1/4) Epoch 9, batch 5350, loss[loss=0.1751, simple_loss=0.2526, pruned_loss=0.04874, over 4531.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2927, pruned_loss=0.06153, over 936519.58 frames. ], batch size: 10, lr: 8.34e-03, grad_scale: 32.0 +2024-07-28 05:32:49,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=116173.33333333333, ans=0.2 +2024-07-28 05:32:58,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=116186.66666666667, ans=0.0 +2024-07-28 05:33:00,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=116186.66666666667, ans=0.0 +2024-07-28 05:33:02,103 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.03 vs. limit=6.0 +2024-07-28 05:33:05,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=116200.0, ans=0.05 +2024-07-28 05:33:14,380 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.727e+01 5.909e+01 6.357e+01 7.144e+01 1.044e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 05:33:21,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=116213.33333333333, ans=0.125 +2024-07-28 05:33:22,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=116226.66666666667, ans=0.0 +2024-07-28 05:33:36,866 INFO [train.py:1114] (1/4) Epoch 9, batch 5400, loss[loss=0.2121, simple_loss=0.3008, pruned_loss=0.06174, over 4301.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2936, pruned_loss=0.06224, over 930334.05 frames. ], batch size: 25, lr: 8.34e-03, grad_scale: 32.0 +2024-07-28 05:33:40,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=116240.0, ans=0.125 +2024-07-28 05:33:55,770 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.60 vs. limit=15.0 +2024-07-28 05:34:00,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=116266.66666666667, ans=0.125 +2024-07-28 05:34:03,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=116266.66666666667, ans=0.125 +2024-07-28 05:34:18,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=116293.33333333333, ans=0.2 +2024-07-28 05:34:22,028 INFO [train.py:1114] (1/4) Epoch 9, batch 5450, loss[loss=0.1646, simple_loss=0.2422, pruned_loss=0.04351, over 4705.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2914, pruned_loss=0.06097, over 933410.27 frames. ], batch size: 11, lr: 8.34e-03, grad_scale: 32.0 +2024-07-28 05:34:35,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=116320.0, ans=0.0 +2024-07-28 05:34:36,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=116320.0, ans=0.125 +2024-07-28 05:34:45,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=116320.0, ans=0.0 +2024-07-28 05:34:58,371 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.683e+01 5.949e+01 6.805e+01 7.625e+01 9.971e+01, threshold=1.361e+02, percent-clipped=0.0 +2024-07-28 05:34:58,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=116346.66666666667, ans=0.0 +2024-07-28 05:35:12,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=116346.66666666667, ans=0.2 +2024-07-28 05:35:17,723 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.44 vs. limit=15.0 +2024-07-28 05:35:19,916 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.18 vs. limit=22.5 +2024-07-28 05:35:22,097 INFO [train.py:1114] (1/4) Epoch 9, batch 5500, loss[loss=0.2177, simple_loss=0.315, pruned_loss=0.06025, over 4234.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2906, pruned_loss=0.06058, over 930872.20 frames. ], batch size: 25, lr: 8.34e-03, grad_scale: 32.0 +2024-07-28 05:35:23,302 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.15 vs. limit=15.0 +2024-07-28 05:35:24,647 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.97 vs. limit=6.0 +2024-07-28 05:35:27,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=116373.33333333333, ans=0.0 +2024-07-28 05:35:40,324 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.08 vs. limit=10.0 +2024-07-28 05:35:40,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=116400.0, ans=0.125 +2024-07-28 05:35:43,559 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:35:45,156 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.82 vs. limit=15.0 +2024-07-28 05:35:47,288 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.25 vs. limit=15.0 +2024-07-28 05:35:56,269 INFO [train.py:1114] (1/4) Epoch 9, batch 5550, loss[loss=0.215, simple_loss=0.2976, pruned_loss=0.06618, over 4710.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2911, pruned_loss=0.06097, over 933268.48 frames. ], batch size: 12, lr: 8.33e-03, grad_scale: 32.0 +2024-07-28 05:36:02,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=116440.0, ans=0.125 +2024-07-28 05:36:16,999 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.435e+01 5.683e+01 6.413e+01 7.380e+01 1.098e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 05:36:19,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=116480.0, ans=0.1 +2024-07-28 05:36:32,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=116493.33333333333, ans=0.1 +2024-07-28 05:36:35,080 INFO [train.py:1114] (1/4) Epoch 9, batch 5600, loss[loss=0.2234, simple_loss=0.3023, pruned_loss=0.07229, over 4734.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2912, pruned_loss=0.06102, over 934481.62 frames. ], batch size: 14, lr: 8.33e-03, grad_scale: 32.0 +2024-07-28 05:36:38,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=116506.66666666667, ans=0.2 +2024-07-28 05:36:41,865 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.53 vs. limit=6.0 +2024-07-28 05:37:01,724 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=116560.0, ans=0.1 +2024-07-28 05:37:08,962 INFO [train.py:1114] (1/4) Epoch 9, batch 5650, loss[loss=0.2456, simple_loss=0.3311, pruned_loss=0.08004, over 4607.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2907, pruned_loss=0.0604, over 937090.36 frames. ], batch size: 21, lr: 8.33e-03, grad_scale: 32.0 +2024-07-28 05:37:28,750 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.968e+01 5.748e+01 6.232e+01 7.231e+01 1.019e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 05:37:43,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=116640.0, ans=0.125 +2024-07-28 05:37:43,668 INFO [train.py:1114] (1/4) Epoch 9, batch 5700, loss[loss=0.2167, simple_loss=0.3131, pruned_loss=0.06013, over 4698.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2906, pruned_loss=0.06001, over 938254.13 frames. ], batch size: 13, lr: 8.33e-03, grad_scale: 32.0 +2024-07-28 05:37:45,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=116640.0, ans=0.1 +2024-07-28 05:38:01,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=116666.66666666667, ans=0.125 +2024-07-28 05:38:18,379 INFO [train.py:1114] (1/4) Epoch 9, batch 5750, loss[loss=0.2419, simple_loss=0.3365, pruned_loss=0.07363, over 4698.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2913, pruned_loss=0.06008, over 938409.85 frames. ], batch size: 19, lr: 8.32e-03, grad_scale: 32.0 +2024-07-28 05:38:19,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=116706.66666666667, ans=0.5 +2024-07-28 05:38:25,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=116706.66666666667, ans=0.2 +2024-07-28 05:38:34,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=116720.0, ans=0.2 +2024-07-28 05:38:35,339 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.41 vs. limit=10.0 +2024-07-28 05:38:47,982 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.019e+01 5.728e+01 6.287e+01 7.264e+01 1.232e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 05:39:14,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=116760.0, ans=0.125 +2024-07-28 05:39:17,865 INFO [train.py:1114] (1/4) Epoch 9, batch 5800, loss[loss=0.2392, simple_loss=0.3244, pruned_loss=0.07704, over 4797.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.292, pruned_loss=0.06026, over 937699.46 frames. ], batch size: 19, lr: 8.32e-03, grad_scale: 32.0 +2024-07-28 05:39:26,592 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.93 vs. limit=10.0 +2024-07-28 05:39:28,034 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.70 vs. limit=10.0 +2024-07-28 05:40:00,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=116826.66666666667, ans=0.0 +2024-07-28 05:40:00,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=116826.66666666667, ans=0.125 +2024-07-28 05:40:02,721 INFO [train.py:1114] (1/4) Epoch 9, batch 5850, loss[loss=0.2242, simple_loss=0.3154, pruned_loss=0.06645, over 4479.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2919, pruned_loss=0.06083, over 937990.05 frames. ], batch size: 21, lr: 8.32e-03, grad_scale: 32.0 +2024-07-28 05:40:04,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=116840.0, ans=0.125 +2024-07-28 05:40:25,339 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.305e+01 6.200e+01 6.975e+01 8.009e+01 1.394e+02, threshold=1.395e+02, percent-clipped=2.0 +2024-07-28 05:40:25,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=116880.0, ans=0.0 +2024-07-28 05:40:38,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=116906.66666666667, ans=0.1 +2024-07-28 05:40:38,638 INFO [train.py:1114] (1/4) Epoch 9, batch 5900, loss[loss=0.2323, simple_loss=0.3107, pruned_loss=0.07697, over 4687.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2922, pruned_loss=0.06115, over 938262.79 frames. ], batch size: 15, lr: 8.32e-03, grad_scale: 32.0 +2024-07-28 05:40:47,720 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.29 vs. limit=12.0 +2024-07-28 05:40:50,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.93 vs. limit=10.0 +2024-07-28 05:41:09,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=116960.0, ans=0.125 +2024-07-28 05:41:09,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=116960.0, ans=0.125 +2024-07-28 05:41:17,905 INFO [train.py:1114] (1/4) Epoch 9, batch 5950, loss[loss=0.2424, simple_loss=0.32, pruned_loss=0.08238, over 4678.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2914, pruned_loss=0.06059, over 940037.44 frames. ], batch size: 15, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:41:25,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=116986.66666666667, ans=0.1 +2024-07-28 05:41:25,763 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.16 vs. limit=22.5 +2024-07-28 05:41:37,800 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.704e+01 5.843e+01 6.530e+01 7.569e+01 1.342e+02, threshold=1.306e+02, percent-clipped=0.0 +2024-07-28 05:41:39,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=117013.33333333333, ans=0.125 +2024-07-28 05:41:40,343 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.20 vs. limit=15.0 +2024-07-28 05:41:43,837 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.06 vs. limit=22.5 +2024-07-28 05:41:51,467 INFO [train.py:1114] (1/4) Epoch 9, batch 6000, loss[loss=0.1915, simple_loss=0.2884, pruned_loss=0.04735, over 4278.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2911, pruned_loss=0.06046, over 937142.75 frames. ], batch size: 25, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:41:51,467 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 05:41:59,196 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.5554, 2.4826, 5.0315, 2.9504], device='cuda:1') +2024-07-28 05:42:04,282 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([4.3350, 3.0505, 3.5627, 4.3284, 4.3493, 3.2679, 4.1502, 3.2965], + device='cuda:1') +2024-07-28 05:42:05,184 INFO [train.py:1146] (1/4) Epoch 9, validation: loss=0.175, simple_loss=0.2796, pruned_loss=0.03521, over 944034.00 frames. +2024-07-28 05:42:05,184 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 05:42:26,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_na.min_abs, batch_count=117080.0, ans=0.02 +2024-07-28 05:42:39,950 INFO [train.py:1114] (1/4) Epoch 9, batch 6050, loss[loss=0.1743, simple_loss=0.268, pruned_loss=0.04032, over 4769.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.29, pruned_loss=0.05983, over 937889.83 frames. ], batch size: 12, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:43:05,319 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.759e+01 5.753e+01 6.307e+01 7.312e+01 1.282e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 05:43:06,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=117146.66666666667, ans=0.025 +2024-07-28 05:43:18,564 INFO [train.py:1114] (1/4) Epoch 9, batch 6100, loss[loss=0.2172, simple_loss=0.3145, pruned_loss=0.05996, over 4693.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2896, pruned_loss=0.05978, over 937430.70 frames. ], batch size: 15, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:43:23,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=117173.33333333333, ans=0.125 +2024-07-28 05:43:25,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=117186.66666666667, ans=0.0 +2024-07-28 05:43:28,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=117186.66666666667, ans=0.125 +2024-07-28 05:43:30,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=117186.66666666667, ans=0.125 +2024-07-28 05:43:30,141 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:43:38,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=117213.33333333333, ans=0.0 +2024-07-28 05:43:42,253 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.80 vs. limit=15.0 +2024-07-28 05:43:49,534 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.68 vs. limit=15.0 +2024-07-28 05:43:52,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=117226.66666666667, ans=0.5 +2024-07-28 05:43:53,988 INFO [train.py:1114] (1/4) Epoch 9, batch 6150, loss[loss=0.2862, simple_loss=0.3526, pruned_loss=0.1099, over 3632.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2896, pruned_loss=0.0598, over 936449.41 frames. ], batch size: 36, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:43:54,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=117240.0, ans=0.125 +2024-07-28 05:43:54,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.92 vs. limit=15.0 +2024-07-28 05:43:57,079 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.96 vs. limit=12.0 +2024-07-28 05:44:02,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=117253.33333333333, ans=0.125 +2024-07-28 05:44:11,547 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.53 vs. limit=15.0 +2024-07-28 05:44:15,649 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.025e+01 5.523e+01 6.022e+01 6.962e+01 1.002e+02, threshold=1.204e+02, percent-clipped=1.0 +2024-07-28 05:44:19,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=117280.0, ans=0.09899494936611666 +2024-07-28 05:44:22,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=117293.33333333333, ans=0.09899494936611666 +2024-07-28 05:44:32,084 INFO [train.py:1114] (1/4) Epoch 9, batch 6200, loss[loss=0.1876, simple_loss=0.2841, pruned_loss=0.04556, over 4738.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2907, pruned_loss=0.0603, over 936286.79 frames. ], batch size: 14, lr: 8.30e-03, grad_scale: 32.0 +2024-07-28 05:44:40,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=117320.0, ans=0.0 +2024-07-28 05:44:51,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=117320.0, ans=0.125 +2024-07-28 05:44:51,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=117333.33333333333, ans=0.125 +2024-07-28 05:45:06,742 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.02 vs. limit=22.5 +2024-07-28 05:45:22,589 INFO [train.py:1114] (1/4) Epoch 9, batch 6250, loss[loss=0.2289, simple_loss=0.3184, pruned_loss=0.06969, over 4805.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2911, pruned_loss=0.06097, over 933120.63 frames. ], batch size: 14, lr: 8.30e-03, grad_scale: 64.0 +2024-07-28 05:45:57,915 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.368e+01 5.807e+01 6.495e+01 7.426e+01 1.051e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-28 05:46:00,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=117413.33333333333, ans=0.2 +2024-07-28 05:46:11,018 INFO [train.py:1114] (1/4) Epoch 9, batch 6300, loss[loss=0.1725, simple_loss=0.2577, pruned_loss=0.04364, over 4515.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2908, pruned_loss=0.0611, over 929258.19 frames. ], batch size: 10, lr: 8.30e-03, grad_scale: 64.0 +2024-07-28 05:46:11,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=117440.0, ans=0.0 +2024-07-28 05:46:14,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=117440.0, ans=0.05 +2024-07-28 05:46:15,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117440.0, ans=0.1 +2024-07-28 05:46:26,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=117466.66666666667, ans=0.035 +2024-07-28 05:46:27,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=117466.66666666667, ans=0.0 +2024-07-28 05:46:30,012 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.23 vs. limit=15.0 +2024-07-28 05:46:41,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=117493.33333333333, ans=0.125 +2024-07-28 05:46:45,005 INFO [train.py:1114] (1/4) Epoch 9, batch 6350, loss[loss=0.2398, simple_loss=0.3343, pruned_loss=0.07265, over 4548.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2911, pruned_loss=0.06077, over 933409.93 frames. ], batch size: 21, lr: 8.30e-03, grad_scale: 64.0 +2024-07-28 05:46:45,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=117506.66666666667, ans=0.0 +2024-07-28 05:46:45,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117506.66666666667, ans=0.1 +2024-07-28 05:46:56,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=117520.0, ans=0.125 +2024-07-28 05:47:02,606 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.78 vs. limit=10.0 +2024-07-28 05:47:05,464 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.541e+01 5.641e+01 6.337e+01 7.331e+01 1.035e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 05:47:18,812 INFO [train.py:1114] (1/4) Epoch 9, batch 6400, loss[loss=0.1948, simple_loss=0.2847, pruned_loss=0.05242, over 4637.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.291, pruned_loss=0.06081, over 934356.30 frames. ], batch size: 13, lr: 8.29e-03, grad_scale: 64.0 +2024-07-28 05:47:28,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=117586.66666666667, ans=0.5 +2024-07-28 05:47:36,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=117600.0, ans=0.1 +2024-07-28 05:47:45,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=117626.66666666667, ans=0.125 +2024-07-28 05:47:48,305 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.48 vs. limit=12.0 +2024-07-28 05:47:50,354 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.95 vs. limit=12.0 +2024-07-28 05:47:51,885 INFO [train.py:1114] (1/4) Epoch 9, batch 6450, loss[loss=0.1957, simple_loss=0.2892, pruned_loss=0.05112, over 4530.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.291, pruned_loss=0.06027, over 938249.65 frames. ], batch size: 21, lr: 8.29e-03, grad_scale: 32.0 +2024-07-28 05:47:56,134 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.35 vs. limit=12.0 +2024-07-28 05:48:02,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=117653.33333333333, ans=0.125 +2024-07-28 05:48:04,136 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.04 vs. limit=15.0 +2024-07-28 05:48:12,203 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.958e+01 5.777e+01 6.265e+01 7.458e+01 1.073e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 05:48:12,347 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:48:16,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=117680.0, ans=0.125 +2024-07-28 05:48:17,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=117693.33333333333, ans=0.95 +2024-07-28 05:48:24,565 INFO [train.py:1114] (1/4) Epoch 9, batch 6500, loss[loss=0.2799, simple_loss=0.3399, pruned_loss=0.1099, over 3350.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2896, pruned_loss=0.05922, over 939781.52 frames. ], batch size: 35, lr: 8.29e-03, grad_scale: 32.0 +2024-07-28 05:48:29,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=117706.66666666667, ans=0.125 +2024-07-28 05:48:40,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=117733.33333333333, ans=0.125 +2024-07-28 05:48:48,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=117746.66666666667, ans=0.125 +2024-07-28 05:49:02,990 INFO [train.py:1114] (1/4) Epoch 9, batch 6550, loss[loss=0.19, simple_loss=0.2711, pruned_loss=0.05443, over 4803.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2889, pruned_loss=0.05908, over 942715.32 frames. ], batch size: 11, lr: 8.29e-03, grad_scale: 32.0 +2024-07-28 05:49:05,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=117773.33333333333, ans=0.125 +2024-07-28 05:49:16,055 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.13 vs. limit=10.0 +2024-07-28 05:49:18,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=117800.0, ans=0.0 +2024-07-28 05:49:23,500 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.708e+01 5.652e+01 6.284e+01 7.270e+01 1.094e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 05:49:29,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117826.66666666667, ans=0.1 +2024-07-28 05:49:32,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=117826.66666666667, ans=0.125 +2024-07-28 05:49:35,906 INFO [train.py:1114] (1/4) Epoch 9, batch 6600, loss[loss=0.2118, simple_loss=0.294, pruned_loss=0.06478, over 4938.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2895, pruned_loss=0.05953, over 944618.26 frames. ], batch size: 14, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:49:38,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=117840.0, ans=0.0 +2024-07-28 05:49:41,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=117840.0, ans=0.025 +2024-07-28 05:49:49,923 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.19 vs. limit=15.0 +2024-07-28 05:49:53,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=117866.66666666667, ans=0.125 +2024-07-28 05:50:06,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=117893.33333333333, ans=0.0 +2024-07-28 05:50:12,818 INFO [train.py:1114] (1/4) Epoch 9, batch 6650, loss[loss=0.2315, simple_loss=0.3156, pruned_loss=0.07367, over 4639.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2902, pruned_loss=0.05969, over 943240.12 frames. ], batch size: 17, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:50:20,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=117920.0, ans=0.1 +2024-07-28 05:50:35,490 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.523e+01 5.735e+01 6.176e+01 7.286e+01 9.615e+01, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 05:50:37,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=117946.66666666667, ans=0.04949747468305833 +2024-07-28 05:50:48,321 INFO [train.py:1114] (1/4) Epoch 9, batch 6700, loss[loss=0.2236, simple_loss=0.3072, pruned_loss=0.07, over 4694.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2914, pruned_loss=0.06067, over 942389.12 frames. ], batch size: 19, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:50:54,145 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.68 vs. limit=15.0 +2024-07-28 05:50:57,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=117986.66666666667, ans=0.0 +2024-07-28 05:51:03,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=118000.0, ans=0.95 +2024-07-28 05:51:12,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=118013.33333333333, ans=0.0 +2024-07-28 05:51:15,610 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.28 vs. limit=12.0 +2024-07-28 05:51:16,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=118026.66666666667, ans=0.125 +2024-07-28 05:51:21,734 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.08 vs. limit=15.0 +2024-07-28 05:51:22,049 INFO [train.py:1114] (1/4) Epoch 9, batch 6750, loss[loss=0.2226, simple_loss=0.2936, pruned_loss=0.0758, over 4168.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.291, pruned_loss=0.06059, over 941096.69 frames. ], batch size: 25, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:51:24,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=118040.0, ans=0.04949747468305833 +2024-07-28 05:51:26,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=118040.0, ans=0.125 +2024-07-28 05:51:26,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=118040.0, ans=0.1 +2024-07-28 05:51:34,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=118053.33333333333, ans=0.1 +2024-07-28 05:51:42,735 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.453e+01 5.833e+01 6.338e+01 7.124e+01 1.183e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 05:51:44,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=118080.0, ans=0.125 +2024-07-28 05:51:55,781 INFO [train.py:1114] (1/4) Epoch 9, batch 6800, loss[loss=0.1879, simple_loss=0.2797, pruned_loss=0.04806, over 4629.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2905, pruned_loss=0.06016, over 939159.98 frames. ], batch size: 13, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:51:55,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=118106.66666666667, ans=0.2 +2024-07-28 05:51:56,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=118106.66666666667, ans=0.125 +2024-07-28 05:52:01,479 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.19 vs. limit=15.0 +2024-07-28 05:52:05,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=118120.0, ans=0.2 +2024-07-28 05:52:29,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=118160.0, ans=0.015 +2024-07-28 05:52:31,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=118173.33333333333, ans=0.1 +2024-07-28 05:52:32,162 INFO [train.py:1114] (1/4) Epoch 9, batch 6850, loss[loss=0.1845, simple_loss=0.2738, pruned_loss=0.04758, over 4693.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2893, pruned_loss=0.05979, over 940528.29 frames. ], batch size: 13, lr: 8.27e-03, grad_scale: 32.0 +2024-07-28 05:52:45,802 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.31 vs. limit=22.5 +2024-07-28 05:52:46,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=118200.0, ans=0.05 +2024-07-28 05:52:48,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=118200.0, ans=0.0 +2024-07-28 05:52:50,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=118200.0, ans=0.0 +2024-07-28 05:52:50,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.13 vs. limit=15.0 +2024-07-28 05:52:53,782 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.745e+01 6.443e+01 7.368e+01 1.069e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 05:53:06,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=118226.66666666667, ans=0.125 +2024-07-28 05:53:09,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=118226.66666666667, ans=10.0 +2024-07-28 05:53:10,621 INFO [train.py:1114] (1/4) Epoch 9, batch 6900, loss[loss=0.2, simple_loss=0.2759, pruned_loss=0.06201, over 4971.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2913, pruned_loss=0.06065, over 942967.83 frames. ], batch size: 13, lr: 8.27e-03, grad_scale: 32.0 +2024-07-28 05:53:14,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=118240.0, ans=0.0 +2024-07-28 05:53:14,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=118240.0, ans=0.125 +2024-07-28 05:53:14,280 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.05 vs. limit=22.5 +2024-07-28 05:53:17,006 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.80 vs. limit=15.0 +2024-07-28 05:53:23,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=118253.33333333333, ans=0.2 +2024-07-28 05:53:39,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=118293.33333333333, ans=0.125 +2024-07-28 05:53:43,330 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:53:46,414 INFO [train.py:1114] (1/4) Epoch 9, batch 6950, loss[loss=0.2024, simple_loss=0.2819, pruned_loss=0.0614, over 4513.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2909, pruned_loss=0.06047, over 939994.78 frames. ], batch size: 10, lr: 8.27e-03, grad_scale: 32.0 +2024-07-28 05:53:46,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=118306.66666666667, ans=0.125 +2024-07-28 05:53:54,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=118320.0, ans=0.025 +2024-07-28 05:53:55,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=118320.0, ans=0.125 +2024-07-28 05:53:58,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=118320.0, ans=0.125 +2024-07-28 05:54:02,492 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.86 vs. limit=15.0 +2024-07-28 05:54:06,830 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.989e+01 5.744e+01 6.460e+01 7.316e+01 1.273e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 05:54:11,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=118346.66666666667, ans=0.125 +2024-07-28 05:54:19,633 INFO [train.py:1114] (1/4) Epoch 9, batch 7000, loss[loss=0.2603, simple_loss=0.3463, pruned_loss=0.0871, over 4597.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.29, pruned_loss=0.06013, over 938812.90 frames. ], batch size: 17, lr: 8.27e-03, grad_scale: 32.0 +2024-07-28 05:54:20,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=118373.33333333333, ans=0.0 +2024-07-28 05:54:24,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=118373.33333333333, ans=0.125 +2024-07-28 05:54:24,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=118373.33333333333, ans=0.2 +2024-07-28 05:54:28,553 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.92 vs. limit=15.0 +2024-07-28 05:54:32,588 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.32 vs. limit=6.0 +2024-07-28 05:54:41,784 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.08 vs. limit=15.0 +2024-07-28 05:54:46,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=118426.66666666667, ans=6.0 +2024-07-28 05:54:46,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=118426.66666666667, ans=0.125 +2024-07-28 05:54:48,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=118426.66666666667, ans=0.07 +2024-07-28 05:54:49,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=118426.66666666667, ans=0.0 +2024-07-28 05:54:51,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=118426.66666666667, ans=0.1 +2024-07-28 05:54:52,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=118426.66666666667, ans=0.025 +2024-07-28 05:54:53,262 INFO [train.py:1114] (1/4) Epoch 9, batch 7050, loss[loss=0.2289, simple_loss=0.3103, pruned_loss=0.07382, over 4698.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2898, pruned_loss=0.05958, over 942114.55 frames. ], batch size: 19, lr: 8.26e-03, grad_scale: 32.0 +2024-07-28 05:55:05,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=118453.33333333333, ans=0.05 +2024-07-28 05:55:14,457 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.643e+01 5.657e+01 6.222e+01 6.949e+01 1.042e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-28 05:55:14,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=118480.0, ans=0.1 +2024-07-28 05:55:14,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=118480.0, ans=0.025 +2024-07-28 05:55:16,923 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.92 vs. limit=15.0 +2024-07-28 05:55:26,697 INFO [train.py:1114] (1/4) Epoch 9, batch 7100, loss[loss=0.2238, simple_loss=0.2971, pruned_loss=0.07521, over 4798.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2915, pruned_loss=0.06087, over 936629.44 frames. ], batch size: 15, lr: 8.26e-03, grad_scale: 32.0 +2024-07-28 05:55:45,281 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:55:59,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=118573.33333333333, ans=0.2 +2024-07-28 05:55:59,506 INFO [train.py:1114] (1/4) Epoch 9, batch 7150, loss[loss=0.2819, simple_loss=0.3592, pruned_loss=0.1023, over 4433.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2902, pruned_loss=0.06032, over 937661.61 frames. ], batch size: 21, lr: 8.26e-03, grad_scale: 32.0 +2024-07-28 05:56:00,860 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:56:16,289 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.54 vs. limit=15.0 +2024-07-28 05:56:19,956 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.871e+01 5.611e+01 6.289e+01 7.655e+01 1.013e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 05:56:23,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.88 vs. limit=15.0 +2024-07-28 05:56:23,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.88 vs. limit=15.0 +2024-07-28 05:56:26,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=118626.66666666667, ans=0.1 +2024-07-28 05:56:32,650 INFO [train.py:1114] (1/4) Epoch 9, batch 7200, loss[loss=0.2247, simple_loss=0.3039, pruned_loss=0.07271, over 4788.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2911, pruned_loss=0.0607, over 937529.34 frames. ], batch size: 15, lr: 8.26e-03, grad_scale: 32.0 +2024-07-28 05:56:47,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=118666.66666666667, ans=0.1 +2024-07-28 05:56:49,024 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:56:51,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=118680.0, ans=0.125 +2024-07-28 05:56:51,882 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.51 vs. limit=15.0 +2024-07-28 05:56:52,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=118680.0, ans=0.025 +2024-07-28 05:57:02,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118693.33333333333, ans=0.1 +2024-07-28 05:57:04,413 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.57 vs. limit=15.0 +2024-07-28 05:57:04,595 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.31 vs. limit=10.0 +2024-07-28 05:57:06,751 INFO [train.py:1114] (1/4) Epoch 9, batch 7250, loss[loss=0.1594, simple_loss=0.2426, pruned_loss=0.03807, over 4845.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2907, pruned_loss=0.06042, over 939342.10 frames. ], batch size: 12, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:57:15,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=118720.0, ans=0.125 +2024-07-28 05:57:18,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=118720.0, ans=0.1 +2024-07-28 05:57:23,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=118733.33333333333, ans=0.0 +2024-07-28 05:57:26,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118746.66666666667, ans=0.1 +2024-07-28 05:57:26,911 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.398e+01 5.726e+01 6.433e+01 7.236e+01 9.812e+01, threshold=1.287e+02, percent-clipped=0.0 +2024-07-28 05:57:35,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=118760.0, ans=0.125 +2024-07-28 05:57:39,731 INFO [train.py:1114] (1/4) Epoch 9, batch 7300, loss[loss=0.191, simple_loss=0.2765, pruned_loss=0.05271, over 4858.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.29, pruned_loss=0.06024, over 939427.69 frames. ], batch size: 12, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:58:03,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=118813.33333333333, ans=0.1 +2024-07-28 05:58:16,988 INFO [train.py:1114] (1/4) Epoch 9, batch 7350, loss[loss=0.1902, simple_loss=0.2704, pruned_loss=0.05498, over 4644.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2897, pruned_loss=0.06045, over 938920.00 frames. ], batch size: 12, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:58:17,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=118840.0, ans=0.0 +2024-07-28 05:58:37,350 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:58:37,726 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.559e+01 5.606e+01 6.103e+01 6.789e+01 9.069e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 05:58:38,865 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.75 vs. limit=15.0 +2024-07-28 05:58:46,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118893.33333333333, ans=0.1 +2024-07-28 05:58:50,084 INFO [train.py:1114] (1/4) Epoch 9, batch 7400, loss[loss=0.1828, simple_loss=0.2759, pruned_loss=0.04482, over 4697.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2896, pruned_loss=0.06002, over 940074.05 frames. ], batch size: 13, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:58:51,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=118906.66666666667, ans=0.09899494936611666 +2024-07-28 05:58:54,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=118906.66666666667, ans=0.2 +2024-07-28 05:58:54,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=118906.66666666667, ans=0.0 +2024-07-28 05:59:08,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=118933.33333333333, ans=0.125 +2024-07-28 05:59:10,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=118946.66666666667, ans=0.0 +2024-07-28 05:59:15,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=118960.0, ans=0.2 +2024-07-28 05:59:19,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=118960.0, ans=0.07 +2024-07-28 05:59:19,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=118960.0, ans=0.1 +2024-07-28 05:59:20,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118960.0, ans=0.1 +2024-07-28 05:59:20,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=118960.0, ans=0.125 +2024-07-28 05:59:21,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=118960.0, ans=0.125 +2024-07-28 05:59:22,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=118973.33333333333, ans=0.2 +2024-07-28 05:59:22,842 INFO [train.py:1114] (1/4) Epoch 9, batch 7450, loss[loss=0.1834, simple_loss=0.2611, pruned_loss=0.05283, over 4610.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.289, pruned_loss=0.05992, over 937386.91 frames. ], batch size: 11, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:59:25,192 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.40 vs. limit=15.0 +2024-07-28 05:59:25,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=118973.33333333333, ans=0.2 +2024-07-28 05:59:42,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=119000.0, ans=0.125 +2024-07-28 05:59:45,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=119013.33333333333, ans=0.125 +2024-07-28 05:59:46,250 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.870e+01 5.815e+01 6.542e+01 7.746e+01 1.541e+02, threshold=1.308e+02, percent-clipped=5.0 +2024-07-28 05:59:49,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=119013.33333333333, ans=0.125 +2024-07-28 05:59:57,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=119026.66666666667, ans=0.2 +2024-07-28 05:59:59,607 INFO [train.py:1114] (1/4) Epoch 9, batch 7500, loss[loss=0.2666, simple_loss=0.332, pruned_loss=0.1006, over 3430.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2899, pruned_loss=0.06026, over 935892.70 frames. ], batch size: 36, lr: 8.24e-03, grad_scale: 32.0 +2024-07-28 06:00:02,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=119040.0, ans=0.0 +2024-07-28 06:00:06,598 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:00:22,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=119080.0, ans=0.1 +2024-07-28 06:00:35,132 INFO [train.py:1114] (1/4) Epoch 9, batch 7550, loss[loss=0.2602, simple_loss=0.339, pruned_loss=0.09071, over 4649.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2927, pruned_loss=0.06165, over 936260.61 frames. ], batch size: 17, lr: 8.24e-03, grad_scale: 32.0 +2024-07-28 06:00:42,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=119120.0, ans=0.125 +2024-07-28 06:00:50,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=119133.33333333333, ans=0.125 +2024-07-28 06:00:51,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=119133.33333333333, ans=0.0 +2024-07-28 06:00:55,461 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.364e+01 5.884e+01 6.441e+01 7.385e+01 1.107e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 06:00:55,861 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.36 vs. limit=10.0 +2024-07-28 06:01:07,527 INFO [train.py:1114] (1/4) Epoch 9, batch 7600, loss[loss=0.2484, simple_loss=0.3301, pruned_loss=0.08333, over 4812.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2935, pruned_loss=0.06178, over 937859.33 frames. ], batch size: 14, lr: 8.24e-03, grad_scale: 32.0 +2024-07-28 06:01:12,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=119173.33333333333, ans=0.04949747468305833 +2024-07-28 06:01:30,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=119213.33333333333, ans=0.125 +2024-07-28 06:01:31,056 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:01:41,965 INFO [train.py:1114] (1/4) Epoch 9, batch 7650, loss[loss=0.1851, simple_loss=0.2623, pruned_loss=0.05395, over 4950.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2934, pruned_loss=0.06184, over 937159.55 frames. ], batch size: 12, lr: 8.24e-03, grad_scale: 32.0 +2024-07-28 06:01:43,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=119240.0, ans=0.025 +2024-07-28 06:01:48,382 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:02:04,510 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.140e+01 5.757e+01 6.386e+01 7.107e+01 1.097e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 06:02:10,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=119280.0, ans=0.125 +2024-07-28 06:02:10,440 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.83 vs. limit=15.0 +2024-07-28 06:02:18,631 INFO [train.py:1114] (1/4) Epoch 9, batch 7700, loss[loss=0.1972, simple_loss=0.2669, pruned_loss=0.0638, over 4697.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2936, pruned_loss=0.06169, over 934444.58 frames. ], batch size: 13, lr: 8.23e-03, grad_scale: 16.0 +2024-07-28 06:02:18,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=119306.66666666667, ans=0.125 +2024-07-28 06:02:31,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=119320.0, ans=0.0 +2024-07-28 06:02:44,125 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.32 vs. limit=15.0 +2024-07-28 06:03:18,522 INFO [train.py:1114] (1/4) Epoch 9, batch 7750, loss[loss=0.1981, simple_loss=0.2891, pruned_loss=0.05353, over 4941.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2936, pruned_loss=0.06151, over 935769.14 frames. ], batch size: 14, lr: 8.23e-03, grad_scale: 16.0 +2024-07-28 06:03:39,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=119400.0, ans=0.125 +2024-07-28 06:03:40,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=119400.0, ans=0.125 +2024-07-28 06:03:40,334 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:03:49,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=119413.33333333333, ans=0.125 +2024-07-28 06:03:51,265 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.407e+01 5.505e+01 6.110e+01 6.941e+01 1.112e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 06:03:54,399 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.98 vs. limit=22.5 +2024-07-28 06:03:57,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=119426.66666666667, ans=0.125 +2024-07-28 06:03:59,460 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.46 vs. limit=15.0 +2024-07-28 06:04:03,703 INFO [train.py:1114] (1/4) Epoch 9, batch 7800, loss[loss=0.175, simple_loss=0.2824, pruned_loss=0.03386, over 4665.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2926, pruned_loss=0.06019, over 937306.14 frames. ], batch size: 14, lr: 8.23e-03, grad_scale: 8.0 +2024-07-28 06:04:19,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=119440.0, ans=0.1 +2024-07-28 06:04:55,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=119466.66666666667, ans=0.2 +2024-07-28 06:05:02,083 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.96 vs. limit=15.0 +2024-07-28 06:05:26,843 INFO [train.py:1114] (1/4) Epoch 9, batch 7850, loss[loss=0.213, simple_loss=0.2822, pruned_loss=0.07186, over 4495.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2928, pruned_loss=0.06077, over 936098.75 frames. ], batch size: 10, lr: 8.23e-03, grad_scale: 8.0 +2024-07-28 06:05:28,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=119506.66666666667, ans=0.2 +2024-07-28 06:05:29,824 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.41 vs. limit=12.0 +2024-07-28 06:05:45,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=119520.0, ans=0.125 +2024-07-28 06:06:16,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=119533.33333333333, ans=0.2 +2024-07-28 06:06:33,867 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.602e+01 5.758e+01 6.163e+01 6.826e+01 1.029e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 06:06:34,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=119546.66666666667, ans=0.0 +2024-07-28 06:06:47,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=119560.0, ans=0.1 +2024-07-28 06:06:51,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=119560.0, ans=0.07 +2024-07-28 06:07:13,097 INFO [train.py:1114] (1/4) Epoch 9, batch 7900, loss[loss=0.2525, simple_loss=0.3359, pruned_loss=0.08454, over 4868.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2941, pruned_loss=0.06183, over 933523.27 frames. ], batch size: 14, lr: 8.22e-03, grad_scale: 8.0 +2024-07-28 06:07:13,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=119573.33333333333, ans=0.1 +2024-07-28 06:07:19,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=119586.66666666667, ans=0.0 +2024-07-28 06:07:42,665 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.81 vs. limit=12.0 +2024-07-28 06:08:03,498 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.25 vs. limit=15.0 +2024-07-28 06:08:15,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=119626.66666666667, ans=0.0 +2024-07-28 06:08:16,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=119640.0, ans=0.0 +2024-07-28 06:08:16,884 INFO [train.py:1114] (1/4) Epoch 9, batch 7950, loss[loss=0.2885, simple_loss=0.3504, pruned_loss=0.1133, over 3359.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2934, pruned_loss=0.06135, over 935333.27 frames. ], batch size: 35, lr: 8.22e-03, grad_scale: 8.0 +2024-07-28 06:08:17,157 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.92 vs. limit=12.0 +2024-07-28 06:08:29,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=119640.0, ans=0.125 +2024-07-28 06:08:30,621 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.46 vs. limit=15.0 +2024-07-28 06:08:31,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=119653.33333333333, ans=0.125 +2024-07-28 06:08:36,581 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.47 vs. limit=15.0 +2024-07-28 06:08:54,861 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.650e+01 5.779e+01 6.459e+01 7.298e+01 1.141e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 06:09:17,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=119693.33333333333, ans=0.2 +2024-07-28 06:09:24,212 INFO [train.py:1114] (1/4) Epoch 9, batch 8000, loss[loss=0.188, simple_loss=0.2646, pruned_loss=0.05573, over 4612.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.292, pruned_loss=0.06131, over 934498.95 frames. ], batch size: 11, lr: 8.22e-03, grad_scale: 16.0 +2024-07-28 06:09:26,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=119706.66666666667, ans=0.125 +2024-07-28 06:09:41,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=119720.0, ans=0.2 +2024-07-28 06:09:44,446 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.98 vs. limit=15.0 +2024-07-28 06:09:53,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=119733.33333333333, ans=0.0 +2024-07-28 06:09:55,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=119733.33333333333, ans=0.0 +2024-07-28 06:10:07,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=119746.66666666667, ans=0.0 +2024-07-28 06:10:08,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=119746.66666666667, ans=0.0 +2024-07-28 06:10:28,779 INFO [train.py:1114] (1/4) Epoch 9, batch 8050, loss[loss=0.183, simple_loss=0.2766, pruned_loss=0.04469, over 4807.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2922, pruned_loss=0.06087, over 934330.45 frames. ], batch size: 14, lr: 8.22e-03, grad_scale: 16.0 +2024-07-28 06:10:32,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=119773.33333333333, ans=0.025 +2024-07-28 06:10:38,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=119786.66666666667, ans=0.125 +2024-07-28 06:10:48,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=119813.33333333333, ans=0.0 +2024-07-28 06:10:49,917 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 6.138e+01 7.014e+01 8.220e+01 1.277e+02, threshold=1.403e+02, percent-clipped=0.0 +2024-07-28 06:10:51,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=119813.33333333333, ans=0.2 +2024-07-28 06:10:54,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=119826.66666666667, ans=0.2 +2024-07-28 06:11:03,032 INFO [train.py:1114] (1/4) Epoch 9, batch 8100, loss[loss=0.215, simple_loss=0.2986, pruned_loss=0.06566, over 4815.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2934, pruned_loss=0.06144, over 933808.31 frames. ], batch size: 15, lr: 8.22e-03, grad_scale: 16.0 +2024-07-28 06:11:12,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=119853.33333333333, ans=0.125 +2024-07-28 06:11:19,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=119866.66666666667, ans=0.125 +2024-07-28 06:11:20,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=119866.66666666667, ans=0.125 +2024-07-28 06:11:25,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=119880.0, ans=0.0 +2024-07-28 06:11:27,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=119880.0, ans=0.0 +2024-07-28 06:11:28,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=119893.33333333333, ans=0.025 +2024-07-28 06:11:30,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119893.33333333333, ans=0.1 +2024-07-28 06:11:36,511 INFO [train.py:1114] (1/4) Epoch 9, batch 8150, loss[loss=0.2117, simple_loss=0.3031, pruned_loss=0.06016, over 4803.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2927, pruned_loss=0.06124, over 937241.97 frames. ], batch size: 15, lr: 8.21e-03, grad_scale: 16.0 +2024-07-28 06:11:57,270 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.727e+01 5.730e+01 6.295e+01 7.311e+01 1.625e+02, threshold=1.259e+02, percent-clipped=1.0 +2024-07-28 06:11:58,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=119946.66666666667, ans=0.1 +2024-07-28 06:12:01,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=119960.0, ans=0.125 +2024-07-28 06:12:05,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=119960.0, ans=0.125 +2024-07-28 06:12:05,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=119960.0, ans=0.125 +2024-07-28 06:12:08,486 INFO [train.py:1114] (1/4) Epoch 9, batch 8200, loss[loss=0.2569, simple_loss=0.3562, pruned_loss=0.07884, over 4799.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2925, pruned_loss=0.06059, over 938080.92 frames. ], batch size: 15, lr: 8.21e-03, grad_scale: 16.0 +2024-07-28 06:12:15,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=119986.66666666667, ans=0.0 +2024-07-28 06:12:47,769 INFO [train.py:1114] (1/4) Epoch 9, batch 8250, loss[loss=0.1813, simple_loss=0.2756, pruned_loss=0.04354, over 4891.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2926, pruned_loss=0.0603, over 938483.63 frames. ], batch size: 13, lr: 8.21e-03, grad_scale: 16.0 +2024-07-28 06:12:51,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=120040.0, ans=0.125 +2024-07-28 06:12:59,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=120053.33333333333, ans=0.125 +2024-07-28 06:13:02,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=120053.33333333333, ans=0.125 +2024-07-28 06:13:06,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=120066.66666666667, ans=0.125 +2024-07-28 06:13:12,021 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.840e+01 6.472e+01 7.401e+01 1.114e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 06:13:16,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=120093.33333333333, ans=0.125 +2024-07-28 06:13:17,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=120093.33333333333, ans=0.125 +2024-07-28 06:13:23,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=120093.33333333333, ans=0.0 +2024-07-28 06:13:27,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120093.33333333333, ans=0.1 +2024-07-28 06:13:31,393 INFO [train.py:1114] (1/4) Epoch 9, batch 8300, loss[loss=0.2222, simple_loss=0.3073, pruned_loss=0.06853, over 4894.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2923, pruned_loss=0.0601, over 938520.98 frames. ], batch size: 15, lr: 8.21e-03, grad_scale: 16.0 +2024-07-28 06:13:38,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=120120.0, ans=0.025 +2024-07-28 06:13:41,768 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:13:46,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=120133.33333333333, ans=0.0 +2024-07-28 06:13:54,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=120146.66666666667, ans=0.04949747468305833 +2024-07-28 06:14:06,245 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.09 vs. limit=15.0 +2024-07-28 06:14:06,567 INFO [train.py:1114] (1/4) Epoch 9, batch 8350, loss[loss=0.2268, simple_loss=0.3159, pruned_loss=0.06885, over 4805.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2911, pruned_loss=0.05956, over 941616.33 frames. ], batch size: 15, lr: 8.20e-03, grad_scale: 16.0 +2024-07-28 06:14:24,861 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.56 vs. limit=15.0 +2024-07-28 06:14:26,838 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.25 vs. limit=15.0 +2024-07-28 06:14:27,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=120200.0, ans=0.125 +2024-07-28 06:14:32,031 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.453e+01 5.725e+01 6.523e+01 7.692e+01 9.570e+01, threshold=1.305e+02, percent-clipped=0.0 +2024-07-28 06:14:42,954 INFO [train.py:1114] (1/4) Epoch 9, batch 8400, loss[loss=0.2223, simple_loss=0.3012, pruned_loss=0.07169, over 4772.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2914, pruned_loss=0.06007, over 939997.08 frames. ], batch size: 12, lr: 8.20e-03, grad_scale: 32.0 +2024-07-28 06:15:05,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=120280.0, ans=0.0 +2024-07-28 06:15:07,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=120280.0, ans=0.1 +2024-07-28 06:15:07,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=120280.0, ans=0.0 +2024-07-28 06:15:08,686 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.86 vs. limit=15.0 +2024-07-28 06:15:11,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120293.33333333333, ans=0.1 +2024-07-28 06:15:13,328 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.86 vs. limit=22.5 +2024-07-28 06:15:13,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=120293.33333333333, ans=0.125 +2024-07-28 06:15:15,454 INFO [train.py:1114] (1/4) Epoch 9, batch 8450, loss[loss=0.2138, simple_loss=0.3068, pruned_loss=0.06046, over 4795.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2917, pruned_loss=0.06011, over 938827.96 frames. ], batch size: 15, lr: 8.20e-03, grad_scale: 32.0 +2024-07-28 06:15:25,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120320.0, ans=0.1 +2024-07-28 06:15:36,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=120346.66666666667, ans=0.125 +2024-07-28 06:15:38,206 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+01 5.798e+01 6.423e+01 7.347e+01 1.111e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 06:15:39,950 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.68 vs. limit=15.0 +2024-07-28 06:15:41,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=120346.66666666667, ans=0.125 +2024-07-28 06:15:45,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=120360.0, ans=0.125 +2024-07-28 06:15:47,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120360.0, ans=0.1 +2024-07-28 06:15:49,307 INFO [train.py:1114] (1/4) Epoch 9, batch 8500, loss[loss=0.1919, simple_loss=0.2596, pruned_loss=0.06214, over 4602.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2902, pruned_loss=0.05978, over 938514.48 frames. ], batch size: 11, lr: 8.20e-03, grad_scale: 32.0 +2024-07-28 06:15:50,855 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.87 vs. limit=15.0 +2024-07-28 06:16:01,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=120400.0, ans=0.2 +2024-07-28 06:16:08,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=120413.33333333333, ans=0.125 +2024-07-28 06:16:17,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=120426.66666666667, ans=0.125 +2024-07-28 06:16:21,538 INFO [train.py:1114] (1/4) Epoch 9, batch 8550, loss[loss=0.1778, simple_loss=0.2526, pruned_loss=0.05152, over 4806.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2899, pruned_loss=0.05976, over 939514.64 frames. ], batch size: 11, lr: 8.20e-03, grad_scale: 16.0 +2024-07-28 06:16:22,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=120440.0, ans=0.125 +2024-07-28 06:16:23,521 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.535e-03 +2024-07-28 06:16:24,372 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.74 vs. limit=15.0 +2024-07-28 06:16:29,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=120453.33333333333, ans=0.0 +2024-07-28 06:16:31,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=120453.33333333333, ans=0.125 +2024-07-28 06:16:38,714 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.71 vs. limit=15.0 +2024-07-28 06:16:43,549 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.883e+01 5.723e+01 6.764e+01 8.281e+01 1.171e+02, threshold=1.353e+02, percent-clipped=0.0 +2024-07-28 06:16:46,685 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.58 vs. limit=15.0 +2024-07-28 06:16:50,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=120493.33333333333, ans=10.0 +2024-07-28 06:16:54,057 INFO [train.py:1114] (1/4) Epoch 9, batch 8600, loss[loss=0.2511, simple_loss=0.3244, pruned_loss=0.08894, over 4818.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2897, pruned_loss=0.05964, over 938843.64 frames. ], batch size: 15, lr: 8.19e-03, grad_scale: 16.0 +2024-07-28 06:16:58,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=120506.66666666667, ans=0.1 +2024-07-28 06:17:24,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=120560.0, ans=0.125 +2024-07-28 06:17:26,332 INFO [train.py:1114] (1/4) Epoch 9, batch 8650, loss[loss=0.2028, simple_loss=0.2809, pruned_loss=0.06237, over 4906.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2893, pruned_loss=0.0594, over 940154.72 frames. ], batch size: 15, lr: 8.19e-03, grad_scale: 16.0 +2024-07-28 06:17:38,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=120600.0, ans=0.125 +2024-07-28 06:17:46,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=120613.33333333333, ans=0.025 +2024-07-28 06:17:48,025 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.611e+01 5.743e+01 6.194e+01 7.423e+01 1.120e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 06:17:49,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=120613.33333333333, ans=0.1 +2024-07-28 06:17:59,181 INFO [train.py:1114] (1/4) Epoch 9, batch 8700, loss[loss=0.219, simple_loss=0.2999, pruned_loss=0.06902, over 4762.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2907, pruned_loss=0.06022, over 938181.94 frames. ], batch size: 13, lr: 8.19e-03, grad_scale: 16.0 +2024-07-28 06:17:59,509 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.53 vs. limit=15.0 +2024-07-28 06:18:02,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=120640.0, ans=0.2 +2024-07-28 06:18:02,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=120640.0, ans=0.125 +2024-07-28 06:18:09,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=120653.33333333333, ans=0.125 +2024-07-28 06:18:11,451 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.44 vs. limit=12.0 +2024-07-28 06:18:13,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=120666.66666666667, ans=0.125 +2024-07-28 06:18:30,759 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.74 vs. limit=15.0 +2024-07-28 06:18:33,460 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.05 vs. limit=15.0 +2024-07-28 06:18:34,226 INFO [train.py:1114] (1/4) Epoch 9, batch 8750, loss[loss=0.2225, simple_loss=0.3047, pruned_loss=0.07018, over 4696.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2905, pruned_loss=0.05996, over 937074.11 frames. ], batch size: 15, lr: 8.19e-03, grad_scale: 16.0 +2024-07-28 06:18:41,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.64 vs. limit=22.5 +2024-07-28 06:18:42,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=120720.0, ans=0.125 +2024-07-28 06:18:46,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=120733.33333333333, ans=0.0 +2024-07-28 06:18:48,056 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:18:51,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=120733.33333333333, ans=0.0 +2024-07-28 06:18:54,680 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=2.92 vs. limit=12.0 +2024-07-28 06:18:56,339 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 5.820e+01 6.301e+01 7.114e+01 1.037e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 06:19:06,415 INFO [train.py:1114] (1/4) Epoch 9, batch 8800, loss[loss=0.1984, simple_loss=0.2894, pruned_loss=0.05375, over 4929.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2911, pruned_loss=0.06001, over 938053.18 frames. ], batch size: 14, lr: 8.18e-03, grad_scale: 32.0 +2024-07-28 06:19:15,366 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:19:15,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=120786.66666666667, ans=0.5 +2024-07-28 06:19:24,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=120800.0, ans=0.125 +2024-07-28 06:19:26,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=120813.33333333333, ans=0.125 +2024-07-28 06:19:27,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=120813.33333333333, ans=0.05 +2024-07-28 06:19:32,363 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.04 vs. limit=15.0 +2024-07-28 06:19:34,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=120826.66666666667, ans=0.125 +2024-07-28 06:19:39,896 INFO [train.py:1114] (1/4) Epoch 9, batch 8850, loss[loss=0.2118, simple_loss=0.3028, pruned_loss=0.06036, over 4542.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2896, pruned_loss=0.05942, over 933142.84 frames. ], batch size: 21, lr: 8.18e-03, grad_scale: 32.0 +2024-07-28 06:19:59,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=120880.0, ans=0.2 +2024-07-28 06:20:02,408 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.399e+01 5.678e+01 6.367e+01 7.332e+01 1.676e+02, threshold=1.273e+02, percent-clipped=2.0 +2024-07-28 06:20:13,208 INFO [train.py:1114] (1/4) Epoch 9, batch 8900, loss[loss=0.1812, simple_loss=0.2621, pruned_loss=0.05015, over 4937.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2902, pruned_loss=0.05964, over 931114.70 frames. ], batch size: 12, lr: 8.18e-03, grad_scale: 32.0 +2024-07-28 06:20:20,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=120920.0, ans=0.2 +2024-07-28 06:20:29,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.whiten.whitening_limit, batch_count=120933.33333333333, ans=12.0 +2024-07-28 06:20:49,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=120960.0, ans=0.125 +2024-07-28 06:20:58,023 INFO [train.py:1114] (1/4) Epoch 9, batch 8950, loss[loss=0.2494, simple_loss=0.3333, pruned_loss=0.08278, over 4539.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2903, pruned_loss=0.05975, over 931692.39 frames. ], batch size: 21, lr: 8.18e-03, grad_scale: 32.0 +2024-07-28 06:20:58,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=120973.33333333333, ans=0.0 +2024-07-28 06:21:03,504 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.87 vs. limit=15.0 +2024-07-28 06:21:08,517 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.21 vs. limit=15.0 +2024-07-28 06:21:19,652 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.879e+01 5.816e+01 6.215e+01 7.468e+01 1.036e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 06:21:22,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=121013.33333333333, ans=0.125 +2024-07-28 06:21:23,183 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.98 vs. limit=10.0 +2024-07-28 06:21:29,708 INFO [train.py:1114] (1/4) Epoch 9, batch 9000, loss[loss=0.2371, simple_loss=0.3143, pruned_loss=0.07997, over 4641.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2897, pruned_loss=0.05946, over 934333.12 frames. ], batch size: 12, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:21:29,709 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 06:21:45,146 INFO [train.py:1146] (1/4) Epoch 9, validation: loss=0.1749, simple_loss=0.2792, pruned_loss=0.03531, over 944034.00 frames. +2024-07-28 06:21:45,147 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 06:21:50,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=121040.0, ans=0.2 +2024-07-28 06:21:52,644 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.32 vs. limit=15.0 +2024-07-28 06:22:02,344 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.31 vs. limit=8.0 +2024-07-28 06:22:13,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=121066.66666666667, ans=0.2 +2024-07-28 06:22:15,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=121066.66666666667, ans=0.2 +2024-07-28 06:22:35,216 INFO [train.py:1114] (1/4) Epoch 9, batch 9050, loss[loss=0.2102, simple_loss=0.2813, pruned_loss=0.06949, over 4556.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2889, pruned_loss=0.05909, over 934362.30 frames. ], batch size: 10, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:22:35,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=121106.66666666667, ans=0.0 +2024-07-28 06:22:41,201 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.65 vs. limit=15.0 +2024-07-28 06:22:54,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121120.0, ans=0.1 +2024-07-28 06:22:55,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=121133.33333333333, ans=0.125 +2024-07-28 06:23:12,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121146.66666666667, ans=0.1 +2024-07-28 06:23:13,816 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.63 vs. limit=6.0 +2024-07-28 06:23:15,945 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+01 5.675e+01 6.570e+01 7.797e+01 1.121e+02, threshold=1.314e+02, percent-clipped=0.0 +2024-07-28 06:23:32,979 INFO [train.py:1114] (1/4) Epoch 9, batch 9100, loss[loss=0.1982, simple_loss=0.2944, pruned_loss=0.05101, over 4928.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2895, pruned_loss=0.05905, over 936866.84 frames. ], batch size: 14, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:23:41,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=121173.33333333333, ans=0.025 +2024-07-28 06:23:43,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=121186.66666666667, ans=0.2 +2024-07-28 06:23:44,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=121186.66666666667, ans=0.07 +2024-07-28 06:23:46,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=121186.66666666667, ans=0.0 +2024-07-28 06:24:11,257 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.95 vs. limit=22.5 +2024-07-28 06:24:12,178 INFO [train.py:1114] (1/4) Epoch 9, batch 9150, loss[loss=0.2017, simple_loss=0.2972, pruned_loss=0.05307, over 4805.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2907, pruned_loss=0.05934, over 935729.06 frames. ], batch size: 14, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:24:12,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=121240.0, ans=0.2 +2024-07-28 06:24:37,060 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.13 vs. limit=15.0 +2024-07-28 06:24:58,278 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.21 vs. limit=10.0 +2024-07-28 06:24:59,225 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.588e+01 5.661e+01 6.215e+01 7.054e+01 1.564e+02, threshold=1.243e+02, percent-clipped=1.0 +2024-07-28 06:25:05,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=121293.33333333333, ans=0.0 +2024-07-28 06:25:06,643 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.39 vs. limit=22.5 +2024-07-28 06:25:10,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=121306.66666666667, ans=0.125 +2024-07-28 06:25:10,703 INFO [train.py:1114] (1/4) Epoch 9, batch 9200, loss[loss=0.1541, simple_loss=0.2463, pruned_loss=0.03096, over 4849.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2904, pruned_loss=0.05928, over 938017.68 frames. ], batch size: 12, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:25:19,172 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.37 vs. limit=15.0 +2024-07-28 06:25:25,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=121306.66666666667, ans=0.0 +2024-07-28 06:25:26,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=121306.66666666667, ans=0.125 +2024-07-28 06:25:33,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=121320.0, ans=0.1 +2024-07-28 06:25:54,969 INFO [train.py:1114] (1/4) Epoch 9, batch 9250, loss[loss=0.2201, simple_loss=0.3005, pruned_loss=0.0699, over 4639.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2911, pruned_loss=0.05965, over 938471.07 frames. ], batch size: 13, lr: 8.16e-03, grad_scale: 32.0 +2024-07-28 06:25:55,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=121373.33333333333, ans=0.125 +2024-07-28 06:25:56,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=121373.33333333333, ans=0.2 +2024-07-28 06:25:58,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=121373.33333333333, ans=0.0 +2024-07-28 06:26:14,752 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.56 vs. limit=12.0 +2024-07-28 06:26:16,172 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.708e+01 5.765e+01 6.275e+01 7.273e+01 1.016e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 06:26:25,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=121426.66666666667, ans=0.125 +2024-07-28 06:26:25,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=121426.66666666667, ans=0.125 +2024-07-28 06:26:26,993 INFO [train.py:1114] (1/4) Epoch 9, batch 9300, loss[loss=0.194, simple_loss=0.2696, pruned_loss=0.05918, over 4776.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2905, pruned_loss=0.05949, over 938305.22 frames. ], batch size: 12, lr: 8.16e-03, grad_scale: 32.0 +2024-07-28 06:26:27,926 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.06 vs. limit=6.0 +2024-07-28 06:26:29,021 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:26:45,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=121453.33333333333, ans=0.07 +2024-07-28 06:26:54,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=121466.66666666667, ans=0.05 +2024-07-28 06:27:01,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=121480.0, ans=0.0 +2024-07-28 06:27:27,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121493.33333333333, ans=0.1 +2024-07-28 06:27:30,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=121493.33333333333, ans=0.05 +2024-07-28 06:27:34,635 INFO [train.py:1114] (1/4) Epoch 9, batch 9350, loss[loss=0.1551, simple_loss=0.2392, pruned_loss=0.03545, over 4809.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2902, pruned_loss=0.05945, over 935030.34 frames. ], batch size: 11, lr: 8.16e-03, grad_scale: 32.0 +2024-07-28 06:27:43,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=121506.66666666667, ans=0.125 +2024-07-28 06:27:50,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121520.0, ans=0.1 +2024-07-28 06:27:54,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=121533.33333333333, ans=0.04949747468305833 +2024-07-28 06:27:55,957 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.14 vs. limit=22.5 +2024-07-28 06:28:01,020 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.924e+01 5.677e+01 6.203e+01 7.268e+01 1.059e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 06:28:15,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=121560.0, ans=0.2 +2024-07-28 06:28:20,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=121573.33333333333, ans=0.125 +2024-07-28 06:28:21,358 INFO [train.py:1114] (1/4) Epoch 9, batch 9400, loss[loss=0.2264, simple_loss=0.3033, pruned_loss=0.07475, over 4691.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2904, pruned_loss=0.05972, over 933269.68 frames. ], batch size: 13, lr: 8.16e-03, grad_scale: 32.0 +2024-07-28 06:28:24,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=121573.33333333333, ans=0.0 +2024-07-28 06:28:30,056 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.19 vs. limit=22.5 +2024-07-28 06:28:31,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=121586.66666666667, ans=0.125 +2024-07-28 06:28:38,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=121600.0, ans=0.05 +2024-07-28 06:28:39,906 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:28:45,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=121613.33333333333, ans=0.125 +2024-07-28 06:28:52,886 INFO [train.py:1114] (1/4) Epoch 9, batch 9450, loss[loss=0.1705, simple_loss=0.2492, pruned_loss=0.04592, over 4833.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2904, pruned_loss=0.05968, over 932658.07 frames. ], batch size: 11, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:28:59,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=121653.33333333333, ans=0.0 +2024-07-28 06:29:00,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=121653.33333333333, ans=0.2 +2024-07-28 06:29:01,624 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:29:10,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=121666.66666666667, ans=0.04949747468305833 +2024-07-28 06:29:13,874 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.626e+01 6.016e+01 7.236e+01 1.280e+02, threshold=1.203e+02, percent-clipped=1.0 +2024-07-28 06:29:18,135 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=12.69 vs. limit=15.0 +2024-07-28 06:29:24,030 INFO [train.py:1114] (1/4) Epoch 9, batch 9500, loss[loss=0.1587, simple_loss=0.2483, pruned_loss=0.03451, over 4704.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2902, pruned_loss=0.05871, over 934973.90 frames. ], batch size: 12, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:29:29,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=121720.0, ans=0.0 +2024-07-28 06:29:51,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=121746.66666666667, ans=0.125 +2024-07-28 06:29:59,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=121760.0, ans=0.2 +2024-07-28 06:30:02,242 INFO [train.py:1114] (1/4) Epoch 9, batch 9550, loss[loss=0.2057, simple_loss=0.295, pruned_loss=0.05819, over 4776.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2905, pruned_loss=0.05906, over 932145.97 frames. ], batch size: 12, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:30:13,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=121786.66666666667, ans=0.125 +2024-07-28 06:30:14,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=121800.0, ans=0.0 +2024-07-28 06:30:21,299 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:30:23,657 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.774e+01 6.473e+01 7.553e+01 1.235e+02, threshold=1.295e+02, percent-clipped=1.0 +2024-07-28 06:30:24,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=121813.33333333333, ans=0.0 +2024-07-28 06:30:24,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121813.33333333333, ans=0.1 +2024-07-28 06:30:35,251 INFO [train.py:1114] (1/4) Epoch 9, batch 9600, loss[loss=0.2802, simple_loss=0.3262, pruned_loss=0.1171, over 3262.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2911, pruned_loss=0.05943, over 931010.51 frames. ], batch size: 35, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:31:03,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=121866.66666666667, ans=0.0 +2024-07-28 06:31:10,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=121880.0, ans=0.125 +2024-07-28 06:31:12,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=121893.33333333333, ans=0.125 +2024-07-28 06:31:17,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=121893.33333333333, ans=0.2 +2024-07-28 06:31:18,862 INFO [train.py:1114] (1/4) Epoch 9, batch 9650, loss[loss=0.2366, simple_loss=0.3087, pruned_loss=0.0823, over 4833.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2918, pruned_loss=0.06045, over 926318.04 frames. ], batch size: 16, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:31:19,839 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.56 vs. limit=15.0 +2024-07-28 06:31:31,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=121906.66666666667, ans=0.1 +2024-07-28 06:31:31,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=121906.66666666667, ans=0.2 +2024-07-28 06:31:41,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=121933.33333333333, ans=0.125 +2024-07-28 06:31:53,654 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.690e+01 5.799e+01 6.411e+01 7.286e+01 1.019e+02, threshold=1.282e+02, percent-clipped=0.0 +2024-07-28 06:31:54,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=121946.66666666667, ans=0.125 +2024-07-28 06:31:58,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=121960.0, ans=0.05 +2024-07-28 06:31:58,626 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.22 vs. limit=15.0 +2024-07-28 06:32:11,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121960.0, ans=0.1 +2024-07-28 06:32:14,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=121960.0, ans=0.2 +2024-07-28 06:32:15,509 INFO [train.py:1114] (1/4) Epoch 9, batch 9700, loss[loss=0.2238, simple_loss=0.3056, pruned_loss=0.071, over 4316.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2913, pruned_loss=0.06028, over 924659.88 frames. ], batch size: 25, lr: 8.14e-03, grad_scale: 32.0 +2024-07-28 06:32:28,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=122000.0, ans=0.0 +2024-07-28 06:32:30,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=122000.0, ans=0.0 +2024-07-28 06:32:39,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=122013.33333333333, ans=0.125 +2024-07-28 06:32:43,635 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.62 vs. limit=15.0 +2024-07-28 06:32:44,052 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:32:51,286 INFO [train.py:1114] (1/4) Epoch 9, batch 9750, loss[loss=0.2033, simple_loss=0.2952, pruned_loss=0.05573, over 4691.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2903, pruned_loss=0.05971, over 924965.18 frames. ], batch size: 15, lr: 8.14e-03, grad_scale: 32.0 +2024-07-28 06:32:53,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=122040.0, ans=0.125 +2024-07-28 06:33:04,501 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:33:05,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122053.33333333333, ans=0.1 +2024-07-28 06:33:18,531 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.645e+01 5.595e+01 6.071e+01 7.420e+01 1.003e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 06:33:19,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=122080.0, ans=0.0 +2024-07-28 06:33:23,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=122093.33333333333, ans=0.125 +2024-07-28 06:33:25,103 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.34 vs. limit=6.0 +2024-07-28 06:33:28,406 INFO [train.py:1114] (1/4) Epoch 9, batch 9800, loss[loss=0.1962, simple_loss=0.2849, pruned_loss=0.05378, over 4703.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2899, pruned_loss=0.0597, over 924731.09 frames. ], batch size: 12, lr: 8.14e-03, grad_scale: 32.0 +2024-07-28 06:33:35,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=122106.66666666667, ans=0.125 +2024-07-28 06:33:43,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=122120.0, ans=0.5 +2024-07-28 06:33:46,097 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.75 vs. limit=10.0 +2024-07-28 06:33:48,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=122133.33333333333, ans=0.0 +2024-07-28 06:33:48,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=122133.33333333333, ans=0.125 +2024-07-28 06:33:50,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122133.33333333333, ans=0.1 +2024-07-28 06:33:53,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122146.66666666667, ans=0.1 +2024-07-28 06:33:54,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=122146.66666666667, ans=0.04949747468305833 +2024-07-28 06:34:02,602 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.15 vs. limit=22.5 +2024-07-28 06:34:10,938 INFO [train.py:1114] (1/4) Epoch 9, batch 9850, loss[loss=0.2249, simple_loss=0.3058, pruned_loss=0.07201, over 4913.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2899, pruned_loss=0.06001, over 927041.13 frames. ], batch size: 15, lr: 8.14e-03, grad_scale: 32.0 +2024-07-28 06:34:18,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=122186.66666666667, ans=0.125 +2024-07-28 06:34:25,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122200.0, ans=0.1 +2024-07-28 06:34:30,944 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.18 vs. limit=22.5 +2024-07-28 06:34:32,418 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.677e+01 5.834e+01 6.555e+01 7.421e+01 1.036e+02, threshold=1.311e+02, percent-clipped=0.0 +2024-07-28 06:34:40,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=122226.66666666667, ans=0.125 +2024-07-28 06:34:42,487 INFO [train.py:1114] (1/4) Epoch 9, batch 9900, loss[loss=0.2312, simple_loss=0.3126, pruned_loss=0.0749, over 4858.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2914, pruned_loss=0.06047, over 926254.52 frames. ], batch size: 16, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:34:42,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122240.0, ans=0.1 +2024-07-28 06:34:45,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=122240.0, ans=0.125 +2024-07-28 06:34:49,779 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.36 vs. limit=6.0 +2024-07-28 06:34:51,724 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.48 vs. limit=15.0 +2024-07-28 06:34:54,232 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.79 vs. limit=6.0 +2024-07-28 06:34:54,374 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.31 vs. limit=8.0 +2024-07-28 06:34:56,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122266.66666666667, ans=0.1 +2024-07-28 06:34:56,720 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-28 06:35:01,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=122280.0, ans=0.0 +2024-07-28 06:35:04,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=122280.0, ans=0.125 +2024-07-28 06:35:26,858 INFO [train.py:1114] (1/4) Epoch 9, batch 9950, loss[loss=0.1876, simple_loss=0.2595, pruned_loss=0.05788, over 4819.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2921, pruned_loss=0.06118, over 929819.45 frames. ], batch size: 11, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:35:31,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=122306.66666666667, ans=0.0 +2024-07-28 06:35:43,402 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.09 vs. limit=12.0 +2024-07-28 06:35:46,371 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.94 vs. limit=15.0 +2024-07-28 06:35:49,830 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.887e+01 6.567e+01 7.886e+01 1.035e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-28 06:35:54,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=122360.0, ans=0.125 +2024-07-28 06:35:55,105 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.89 vs. limit=12.0 +2024-07-28 06:35:56,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=122360.0, ans=0.125 +2024-07-28 06:36:00,348 INFO [train.py:1114] (1/4) Epoch 9, batch 10000, loss[loss=0.2417, simple_loss=0.3244, pruned_loss=0.07952, over 4659.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2955, pruned_loss=0.06197, over 927740.81 frames. ], batch size: 16, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:36:23,178 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:36:24,649 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.71 vs. limit=12.0 +2024-07-28 06:36:32,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=122413.33333333333, ans=0.125 +2024-07-28 06:36:36,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=122426.66666666667, ans=0.2 +2024-07-28 06:36:42,689 INFO [train.py:1114] (1/4) Epoch 9, batch 10050, loss[loss=0.2423, simple_loss=0.3156, pruned_loss=0.08446, over 3515.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2999, pruned_loss=0.06441, over 915251.37 frames. ], batch size: 35, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:36:45,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=122440.0, ans=0.05 +2024-07-28 06:36:47,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=122440.0, ans=0.0 +2024-07-28 06:36:53,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=122453.33333333333, ans=0.0 +2024-07-28 06:36:53,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=122453.33333333333, ans=0.125 +2024-07-28 06:36:58,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=122466.66666666667, ans=0.0 +2024-07-28 06:37:06,684 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 6.481e+01 7.122e+01 8.299e+01 1.409e+02, threshold=1.424e+02, percent-clipped=1.0 +2024-07-28 06:37:14,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=122493.33333333333, ans=0.1 +2024-07-28 06:37:16,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=122493.33333333333, ans=0.125 +2024-07-28 06:37:18,576 INFO [train.py:1114] (1/4) Epoch 9, batch 10100, loss[loss=0.3081, simple_loss=0.3612, pruned_loss=0.1275, over 3065.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3058, pruned_loss=0.07114, over 861809.40 frames. ], batch size: 35, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:37:22,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=122506.66666666667, ans=0.125 +2024-07-28 06:37:22,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=122506.66666666667, ans=0.125 +2024-07-28 06:37:28,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=122520.0, ans=10.0 +2024-07-28 06:37:35,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122533.33333333333, ans=0.1 +2024-07-28 06:37:36,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=122533.33333333333, ans=0.0 +2024-07-28 06:37:39,679 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.62 vs. limit=15.0 +2024-07-28 06:37:40,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=122546.66666666667, ans=0.125 +2024-07-28 06:37:52,174 INFO [train.py:1114] (1/4) Epoch 9, batch 10150, loss[loss=0.2419, simple_loss=0.329, pruned_loss=0.07737, over 3490.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3091, pruned_loss=0.07487, over 819628.53 frames. ], batch size: 35, lr: 8.12e-03, grad_scale: 32.0 +2024-07-28 06:38:05,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=122586.66666666667, ans=0.125 +2024-07-28 06:38:07,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=122586.66666666667, ans=0.0 +2024-07-28 06:38:14,367 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.48 vs. limit=15.0 +2024-07-28 06:38:19,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=122613.33333333333, ans=0.125 +2024-07-28 06:38:19,557 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.853e+01 6.506e+01 6.978e+01 7.406e+01 9.051e+01, threshold=1.396e+02, percent-clipped=0.0 +2024-07-28 06:38:22,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=122626.66666666667, ans=0.035 +2024-07-28 06:38:22,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=122626.66666666667, ans=0.125 +2024-07-28 06:38:25,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=122626.66666666667, ans=0.0 +2024-07-28 06:39:22,384 INFO [train.py:1114] (1/4) Epoch 9, batch 10200, loss[loss=0.2456, simple_loss=0.3214, pruned_loss=0.08483, over 3364.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3114, pruned_loss=0.07778, over 788436.02 frames. ], batch size: 35, lr: 8.12e-03, grad_scale: 32.0 +2024-07-28 06:39:28,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=122653.33333333333, ans=0.1 +2024-07-28 06:39:29,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=122653.33333333333, ans=0.0 +2024-07-28 06:41:02,955 INFO [train.py:1114] (1/4) Epoch 10, batch 0, loss[loss=0.1797, simple_loss=0.2725, pruned_loss=0.04347, over 4851.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2725, pruned_loss=0.04347, over 4851.00 frames. ], batch size: 12, lr: 7.72e-03, grad_scale: 32.0 +2024-07-28 06:41:02,956 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 06:41:14,704 INFO [train.py:1146] (1/4) Epoch 10, validation: loss=0.1773, simple_loss=0.2829, pruned_loss=0.03584, over 944034.00 frames. +2024-07-28 06:41:14,705 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 06:41:19,216 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.48 vs. limit=12.0 +2024-07-28 06:41:25,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=122682.66666666667, ans=0.125 +2024-07-28 06:41:25,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=122682.66666666667, ans=0.125 +2024-07-28 06:41:31,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122696.0, ans=0.1 +2024-07-28 06:41:34,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122696.0, ans=0.1 +2024-07-28 06:41:51,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=122722.66666666667, ans=0.0 +2024-07-28 06:41:58,941 INFO [train.py:1114] (1/4) Epoch 10, batch 50, loss[loss=0.1912, simple_loss=0.2733, pruned_loss=0.05454, over 4609.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2903, pruned_loss=0.0602, over 206452.61 frames. ], batch size: 11, lr: 7.72e-03, grad_scale: 32.0 +2024-07-28 06:42:04,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=122749.33333333333, ans=0.025 +2024-07-28 06:42:06,807 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.770e+01 5.950e+01 6.646e+01 7.258e+01 1.106e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-28 06:42:10,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=122749.33333333333, ans=0.025 +2024-07-28 06:42:21,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=122749.33333333333, ans=0.125 +2024-07-28 06:42:26,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=122762.66666666667, ans=0.1 +2024-07-28 06:42:35,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=122776.0, ans=0.09899494936611666 +2024-07-28 06:42:46,654 INFO [train.py:1114] (1/4) Epoch 10, batch 100, loss[loss=0.1845, simple_loss=0.2712, pruned_loss=0.04887, over 4642.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.295, pruned_loss=0.06089, over 365358.27 frames. ], batch size: 12, lr: 7.72e-03, grad_scale: 32.0 +2024-07-28 06:42:50,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122802.66666666667, ans=0.1 +2024-07-28 06:43:00,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=122816.0, ans=0.0 +2024-07-28 06:43:11,423 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.90 vs. limit=6.0 +2024-07-28 06:43:27,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=122842.66666666667, ans=0.0 +2024-07-28 06:43:35,208 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.66 vs. limit=10.0 +2024-07-28 06:43:39,844 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.48 vs. limit=22.5 +2024-07-28 06:43:40,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=122856.0, ans=0.125 +2024-07-28 06:43:42,689 INFO [train.py:1114] (1/4) Epoch 10, batch 150, loss[loss=0.1582, simple_loss=0.241, pruned_loss=0.03769, over 4621.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2896, pruned_loss=0.05822, over 493658.26 frames. ], batch size: 11, lr: 7.72e-03, grad_scale: 32.0 +2024-07-28 06:43:46,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122869.33333333333, ans=0.1 +2024-07-28 06:43:51,032 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.790e+01 6.360e+01 7.461e+01 1.069e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 06:44:04,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=122896.0, ans=0.025 +2024-07-28 06:44:32,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=122922.66666666667, ans=0.125 +2024-07-28 06:44:33,113 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.87 vs. limit=15.0 +2024-07-28 06:44:37,851 INFO [train.py:1114] (1/4) Epoch 10, batch 200, loss[loss=0.2423, simple_loss=0.3347, pruned_loss=0.07495, over 4556.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2891, pruned_loss=0.05797, over 593157.76 frames. ], batch size: 21, lr: 7.71e-03, grad_scale: 32.0 +2024-07-28 06:44:51,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=122936.0, ans=0.0 +2024-07-28 06:44:52,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=122949.33333333333, ans=0.0 +2024-07-28 06:44:55,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=122949.33333333333, ans=0.0 +2024-07-28 06:45:00,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=122949.33333333333, ans=0.125 +2024-07-28 06:45:11,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=122962.66666666667, ans=0.025 +2024-07-28 06:45:19,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=122989.33333333333, ans=0.0 +2024-07-28 06:45:28,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=123002.66666666667, ans=10.0 +2024-07-28 06:45:28,934 INFO [train.py:1114] (1/4) Epoch 10, batch 250, loss[loss=0.1985, simple_loss=0.2825, pruned_loss=0.05725, over 4627.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2904, pruned_loss=0.05901, over 669832.33 frames. ], batch size: 16, lr: 7.71e-03, grad_scale: 32.0 +2024-07-28 06:45:29,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=123002.66666666667, ans=0.035 +2024-07-28 06:45:31,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=123002.66666666667, ans=0.125 +2024-07-28 06:45:36,558 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.08 vs. limit=15.0 +2024-07-28 06:45:36,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=123016.0, ans=0.1 +2024-07-28 06:45:38,191 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+01 5.690e+01 6.559e+01 7.773e+01 1.314e+02, threshold=1.312e+02, percent-clipped=1.0 +2024-07-28 06:45:43,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=123016.0, ans=0.125 +2024-07-28 06:45:45,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=123029.33333333333, ans=0.125 +2024-07-28 06:45:47,113 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.79 vs. limit=15.0 +2024-07-28 06:46:02,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=123056.0, ans=0.125 +2024-07-28 06:46:08,655 INFO [train.py:1114] (1/4) Epoch 10, batch 300, loss[loss=0.2052, simple_loss=0.3029, pruned_loss=0.0537, over 4808.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2899, pruned_loss=0.0588, over 729543.53 frames. ], batch size: 15, lr: 7.71e-03, grad_scale: 32.0 +2024-07-28 06:46:15,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=123082.66666666667, ans=0.0 +2024-07-28 06:46:19,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=123082.66666666667, ans=0.2 +2024-07-28 06:46:27,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=123096.0, ans=0.125 +2024-07-28 06:46:28,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=123096.0, ans=0.04949747468305833 +2024-07-28 06:46:48,124 INFO [train.py:1114] (1/4) Epoch 10, batch 350, loss[loss=0.189, simple_loss=0.2733, pruned_loss=0.05236, over 4958.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2893, pruned_loss=0.05847, over 775638.93 frames. ], batch size: 12, lr: 7.71e-03, grad_scale: 64.0 +2024-07-28 06:46:48,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=123136.0, ans=0.125 +2024-07-28 06:46:49,099 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.40 vs. limit=15.0 +2024-07-28 06:47:00,724 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.715e+01 5.536e+01 6.033e+01 6.929e+01 1.043e+02, threshold=1.207e+02, percent-clipped=0.0 +2024-07-28 06:47:48,880 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.35 vs. limit=12.0 +2024-07-28 06:50:15,406 INFO [train.py:1114] (1/4) Epoch 10, batch 400, loss[loss=0.1717, simple_loss=0.2719, pruned_loss=0.03578, over 4696.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2891, pruned_loss=0.05826, over 813022.92 frames. ], batch size: 13, lr: 7.71e-03, grad_scale: 64.0 +2024-07-28 06:50:15,923 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.16 vs. limit=15.0 +2024-07-28 06:50:19,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=123202.66666666667, ans=0.0 +2024-07-28 06:50:46,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=123216.0, ans=0.125 +2024-07-28 06:52:21,000 INFO [train.py:1114] (1/4) Epoch 10, batch 450, loss[loss=0.1997, simple_loss=0.289, pruned_loss=0.05518, over 4643.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2891, pruned_loss=0.05813, over 838508.41 frames. ], batch size: 13, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:52:33,583 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:52:46,131 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.482e+01 5.561e+01 6.292e+01 7.345e+01 1.157e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 06:53:23,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=123296.0, ans=0.0 +2024-07-28 06:53:46,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=123309.33333333333, ans=0.0 +2024-07-28 06:54:48,129 INFO [train.py:1114] (1/4) Epoch 10, batch 500, loss[loss=0.2395, simple_loss=0.3234, pruned_loss=0.07778, over 4690.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.288, pruned_loss=0.05787, over 860985.58 frames. ], batch size: 15, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:54:57,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=123336.0, ans=0.2 +2024-07-28 06:55:15,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=123349.33333333333, ans=0.125 +2024-07-28 06:55:17,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=123349.33333333333, ans=0.125 +2024-07-28 06:55:48,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=123376.0, ans=0.125 +2024-07-28 06:55:49,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=123376.0, ans=0.0 +2024-07-28 06:55:56,091 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.48 vs. limit=6.0 +2024-07-28 06:56:04,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=123389.33333333333, ans=0.125 +2024-07-28 06:56:13,167 INFO [train.py:1114] (1/4) Epoch 10, batch 550, loss[loss=0.1943, simple_loss=0.2933, pruned_loss=0.04769, over 4621.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2885, pruned_loss=0.05817, over 877246.88 frames. ], batch size: 17, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:56:15,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=123402.66666666667, ans=0.125 +2024-07-28 06:56:16,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=123402.66666666667, ans=0.2 +2024-07-28 06:56:21,996 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.607e+01 5.657e+01 6.359e+01 7.249e+01 1.002e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 06:56:26,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=123416.0, ans=0.2 +2024-07-28 06:56:50,697 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.09 vs. limit=15.0 +2024-07-28 06:57:03,607 INFO [train.py:1114] (1/4) Epoch 10, batch 600, loss[loss=0.2411, simple_loss=0.3253, pruned_loss=0.07846, over 4618.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2886, pruned_loss=0.05815, over 891791.18 frames. ], batch size: 16, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:57:14,383 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.09 vs. limit=12.0 +2024-07-28 06:57:14,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=123482.66666666667, ans=15.0 +2024-07-28 06:57:19,455 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.24 vs. limit=15.0 +2024-07-28 06:57:25,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=123509.33333333333, ans=15.0 +2024-07-28 06:57:31,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=123509.33333333333, ans=0.1 +2024-07-28 06:57:38,107 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.95 vs. limit=15.0 +2024-07-28 06:57:40,698 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.01 vs. limit=15.0 +2024-07-28 06:57:41,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=123536.0, ans=0.125 +2024-07-28 06:57:41,756 INFO [train.py:1114] (1/4) Epoch 10, batch 650, loss[loss=0.207, simple_loss=0.3049, pruned_loss=0.05457, over 4746.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2884, pruned_loss=0.05785, over 903497.26 frames. ], batch size: 13, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:57:47,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=123549.33333333333, ans=0.2 +2024-07-28 06:57:49,741 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.335e+01 5.819e+01 6.416e+01 7.118e+01 9.444e+01, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 06:58:26,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=123576.0, ans=0.125 +2024-07-28 06:58:36,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=123602.66666666667, ans=0.125 +2024-07-28 06:58:37,439 INFO [train.py:1114] (1/4) Epoch 10, batch 700, loss[loss=0.1645, simple_loss=0.2458, pruned_loss=0.04155, over 4644.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2877, pruned_loss=0.05773, over 911338.65 frames. ], batch size: 12, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 06:58:47,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=123616.0, ans=0.0 +2024-07-28 06:59:06,232 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.77 vs. limit=12.0 +2024-07-28 06:59:20,375 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.23 vs. limit=22.5 +2024-07-28 06:59:21,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=123629.33333333333, ans=0.1 +2024-07-28 06:59:37,342 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.52 vs. limit=5.0 +2024-07-28 06:59:46,575 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.15 vs. limit=22.5 +2024-07-28 06:59:58,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=123656.0, ans=0.09899494936611666 +2024-07-28 06:59:59,345 INFO [train.py:1114] (1/4) Epoch 10, batch 750, loss[loss=0.2098, simple_loss=0.2998, pruned_loss=0.05991, over 4701.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2872, pruned_loss=0.05766, over 917929.11 frames. ], batch size: 13, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 07:00:01,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=123669.33333333333, ans=0.125 +2024-07-28 07:00:02,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=123669.33333333333, ans=0.125 +2024-07-28 07:00:07,404 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.006e+01 5.598e+01 6.088e+01 6.743e+01 1.006e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 07:00:22,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=123696.0, ans=0.1 +2024-07-28 07:01:01,970 INFO [train.py:1114] (1/4) Epoch 10, batch 800, loss[loss=0.1809, simple_loss=0.2648, pruned_loss=0.04852, over 4846.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2871, pruned_loss=0.05777, over 923013.34 frames. ], batch size: 12, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 07:01:03,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=123736.0, ans=0.125 +2024-07-28 07:01:09,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=123749.33333333333, ans=0.125 +2024-07-28 07:01:12,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=123749.33333333333, ans=0.1 +2024-07-28 07:01:18,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=123762.66666666667, ans=0.05 +2024-07-28 07:01:18,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=123762.66666666667, ans=0.0 +2024-07-28 07:01:21,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=123762.66666666667, ans=0.125 +2024-07-28 07:01:29,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=123776.0, ans=0.125 +2024-07-28 07:01:32,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=123789.33333333333, ans=0.125 +2024-07-28 07:01:38,555 INFO [train.py:1114] (1/4) Epoch 10, batch 850, loss[loss=0.2245, simple_loss=0.3115, pruned_loss=0.06872, over 4665.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2871, pruned_loss=0.05795, over 927319.95 frames. ], batch size: 14, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 07:01:46,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=123802.66666666667, ans=0.0 +2024-07-28 07:01:48,401 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.695e+01 6.333e+01 6.870e+01 1.740e+02, threshold=1.267e+02, percent-clipped=1.0 +2024-07-28 07:01:49,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=123816.0, ans=0.125 +2024-07-28 07:01:54,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=123829.33333333333, ans=0.125 +2024-07-28 07:01:58,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=123829.33333333333, ans=0.025 +2024-07-28 07:02:21,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=123856.0, ans=0.0 +2024-07-28 07:02:22,327 INFO [train.py:1114] (1/4) Epoch 10, batch 900, loss[loss=0.2093, simple_loss=0.2939, pruned_loss=0.06236, over 4851.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2864, pruned_loss=0.05764, over 928348.57 frames. ], batch size: 12, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 07:02:34,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=123882.66666666667, ans=0.125 +2024-07-28 07:02:42,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=123909.33333333333, ans=0.1 +2024-07-28 07:02:43,465 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.48 vs. limit=10.0 +2024-07-28 07:02:43,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=123909.33333333333, ans=0.04949747468305833 +2024-07-28 07:02:50,128 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.54 vs. limit=15.0 +2024-07-28 07:02:56,035 INFO [train.py:1114] (1/4) Epoch 10, batch 950, loss[loss=0.2002, simple_loss=0.2784, pruned_loss=0.06099, over 4762.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2868, pruned_loss=0.05781, over 930080.45 frames. ], batch size: 12, lr: 7.68e-03, grad_scale: 64.0 +2024-07-28 07:03:04,117 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.892e+01 5.603e+01 6.108e+01 6.683e+01 9.503e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 07:03:14,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=123962.66666666667, ans=0.025 +2024-07-28 07:03:24,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=123989.33333333333, ans=0.025 +2024-07-28 07:03:26,625 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.63 vs. limit=12.0 +2024-07-28 07:03:29,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=124002.66666666667, ans=0.1 +2024-07-28 07:03:29,721 INFO [train.py:1114] (1/4) Epoch 10, batch 1000, loss[loss=0.1711, simple_loss=0.2545, pruned_loss=0.04381, over 4956.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2884, pruned_loss=0.05883, over 929813.07 frames. ], batch size: 13, lr: 7.68e-03, grad_scale: 64.0 +2024-07-28 07:03:51,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=124002.66666666667, ans=0.125 +2024-07-28 07:03:58,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=124029.33333333333, ans=0.2 +2024-07-28 07:04:02,863 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.65 vs. limit=22.5 +2024-07-28 07:04:04,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=124029.33333333333, ans=0.0 +2024-07-28 07:04:23,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=124056.0, ans=0.07 +2024-07-28 07:04:25,365 INFO [train.py:1114] (1/4) Epoch 10, batch 1050, loss[loss=0.2114, simple_loss=0.3016, pruned_loss=0.06058, over 4877.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2871, pruned_loss=0.05778, over 931759.63 frames. ], batch size: 14, lr: 7.68e-03, grad_scale: 64.0 +2024-07-28 07:04:25,806 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.89 vs. limit=6.0 +2024-07-28 07:04:31,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=124069.33333333333, ans=0.0 +2024-07-28 07:04:55,554 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.602e+01 6.129e+01 7.252e+01 1.285e+02, threshold=1.226e+02, percent-clipped=1.0 +2024-07-28 07:04:55,955 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.84 vs. limit=22.5 +2024-07-28 07:05:08,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=124109.33333333333, ans=10.0 +2024-07-28 07:05:11,130 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.81 vs. limit=6.0 +2024-07-28 07:05:12,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=124109.33333333333, ans=0.125 +2024-07-28 07:05:27,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=124122.66666666667, ans=0.2 +2024-07-28 07:05:33,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=124122.66666666667, ans=0.0 +2024-07-28 07:05:39,258 INFO [train.py:1114] (1/4) Epoch 10, batch 1100, loss[loss=0.1997, simple_loss=0.2877, pruned_loss=0.05583, over 4887.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2857, pruned_loss=0.05712, over 934379.55 frames. ], batch size: 13, lr: 7.68e-03, grad_scale: 32.0 +2024-07-28 07:05:46,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=124149.33333333333, ans=0.025 +2024-07-28 07:05:50,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=124149.33333333333, ans=0.125 +2024-07-28 07:06:09,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=124176.0, ans=0.0 +2024-07-28 07:06:13,396 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.53 vs. limit=15.0 +2024-07-28 07:06:23,649 INFO [train.py:1114] (1/4) Epoch 10, batch 1150, loss[loss=0.1762, simple_loss=0.2662, pruned_loss=0.04304, over 4901.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2854, pruned_loss=0.05703, over 934035.88 frames. ], batch size: 13, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:07:00,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.42 vs. limit=22.5 +2024-07-28 07:07:00,533 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.669e+01 6.088e+01 6.784e+01 1.007e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 07:07:29,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=124256.0, ans=0.0 +2024-07-28 07:07:48,142 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.14 vs. limit=15.0 +2024-07-28 07:07:48,873 INFO [train.py:1114] (1/4) Epoch 10, batch 1200, loss[loss=0.234, simple_loss=0.3119, pruned_loss=0.07808, over 4877.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2871, pruned_loss=0.05754, over 933199.60 frames. ], batch size: 14, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:08:06,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=124282.66666666667, ans=0.2 +2024-07-28 07:08:11,094 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.38 vs. limit=22.5 +2024-07-28 07:08:14,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=124296.0, ans=0.0 +2024-07-28 07:08:31,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=124322.66666666667, ans=0.0 +2024-07-28 07:08:32,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=124322.66666666667, ans=0.0 +2024-07-28 07:08:41,844 INFO [train.py:1114] (1/4) Epoch 10, batch 1250, loss[loss=0.218, simple_loss=0.3054, pruned_loss=0.0653, over 4803.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2874, pruned_loss=0.0575, over 937165.55 frames. ], batch size: 15, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:08:45,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=124336.0, ans=0.0 +2024-07-28 07:08:48,431 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.45 vs. limit=15.0 +2024-07-28 07:08:50,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=124349.33333333333, ans=0.125 +2024-07-28 07:08:50,579 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.912e+01 6.433e+01 7.478e+01 1.098e+02, threshold=1.287e+02, percent-clipped=0.0 +2024-07-28 07:08:54,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=124362.66666666667, ans=0.1 +2024-07-28 07:08:56,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=124362.66666666667, ans=0.0 +2024-07-28 07:09:05,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=124376.0, ans=0.025 +2024-07-28 07:09:17,631 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.75 vs. limit=10.0 +2024-07-28 07:09:18,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=124389.33333333333, ans=0.125 +2024-07-28 07:09:20,433 INFO [train.py:1114] (1/4) Epoch 10, batch 1300, loss[loss=0.1893, simple_loss=0.2691, pruned_loss=0.0548, over 4689.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2864, pruned_loss=0.05744, over 938642.27 frames. ], batch size: 19, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:09:28,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=124416.0, ans=0.2 +2024-07-28 07:09:37,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=124416.0, ans=0.125 +2024-07-28 07:09:38,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=124416.0, ans=0.025 +2024-07-28 07:09:38,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=124416.0, ans=0.125 +2024-07-28 07:10:02,298 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.55 vs. limit=15.0 +2024-07-28 07:10:02,454 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.27 vs. limit=12.0 +2024-07-28 07:10:15,027 INFO [train.py:1114] (1/4) Epoch 10, batch 1350, loss[loss=0.1912, simple_loss=0.2869, pruned_loss=0.04778, over 4764.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2861, pruned_loss=0.05725, over 940686.34 frames. ], batch size: 13, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:10:23,757 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.509e+01 5.516e+01 6.216e+01 7.014e+01 1.025e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 07:10:29,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=124496.0, ans=0.125 +2024-07-28 07:10:29,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124496.0, ans=0.1 +2024-07-28 07:10:35,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=124509.33333333333, ans=0.125 +2024-07-28 07:10:39,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=124509.33333333333, ans=0.125 +2024-07-28 07:10:43,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124522.66666666667, ans=0.1 +2024-07-28 07:10:48,572 INFO [train.py:1114] (1/4) Epoch 10, batch 1400, loss[loss=0.1661, simple_loss=0.2415, pruned_loss=0.04533, over 4701.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2851, pruned_loss=0.05685, over 942362.28 frames. ], batch size: 11, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:10:49,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=124536.0, ans=0.0 +2024-07-28 07:10:52,350 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.00 vs. limit=15.0 +2024-07-28 07:10:54,407 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.81 vs. limit=22.5 +2024-07-28 07:10:54,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=124549.33333333333, ans=0.125 +2024-07-28 07:11:07,116 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.08 vs. limit=15.0 +2024-07-28 07:11:09,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=124576.0, ans=0.125 +2024-07-28 07:11:16,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=124589.33333333333, ans=0.125 +2024-07-28 07:11:21,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=124602.66666666667, ans=0.125 +2024-07-28 07:11:21,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=124602.66666666667, ans=0.0 +2024-07-28 07:11:21,988 INFO [train.py:1114] (1/4) Epoch 10, batch 1450, loss[loss=0.2032, simple_loss=0.3025, pruned_loss=0.052, over 4667.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2859, pruned_loss=0.05683, over 942593.88 frames. ], batch size: 15, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:11:30,548 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.685e+01 6.213e+01 7.325e+01 1.109e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 07:11:35,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=124629.33333333333, ans=0.125 +2024-07-28 07:11:38,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=124629.33333333333, ans=0.2 +2024-07-28 07:11:50,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=124656.0, ans=0.05 +2024-07-28 07:11:57,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=124656.0, ans=0.07 +2024-07-28 07:11:59,749 INFO [train.py:1114] (1/4) Epoch 10, batch 1500, loss[loss=0.2322, simple_loss=0.3252, pruned_loss=0.06962, over 4814.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2865, pruned_loss=0.05709, over 942270.24 frames. ], batch size: 14, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:12:01,521 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.48 vs. limit=15.0 +2024-07-28 07:12:06,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.73 vs. limit=15.0 +2024-07-28 07:12:06,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=124682.66666666667, ans=0.125 +2024-07-28 07:12:08,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=124682.66666666667, ans=0.125 +2024-07-28 07:12:14,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=124696.0, ans=0.125 +2024-07-28 07:12:15,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=124696.0, ans=0.0 +2024-07-28 07:12:19,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=124709.33333333333, ans=0.0 +2024-07-28 07:12:20,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=124709.33333333333, ans=0.0 +2024-07-28 07:12:33,108 INFO [train.py:1114] (1/4) Epoch 10, batch 1550, loss[loss=0.224, simple_loss=0.3114, pruned_loss=0.06829, over 4902.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.288, pruned_loss=0.05827, over 938631.56 frames. ], batch size: 15, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:12:33,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=124736.0, ans=0.2 +2024-07-28 07:12:41,760 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.366e+01 5.520e+01 6.164e+01 6.899e+01 9.824e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 07:12:52,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=124762.66666666667, ans=0.0 +2024-07-28 07:12:54,203 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.28 vs. limit=15.0 +2024-07-28 07:12:58,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=124776.0, ans=0.025 +2024-07-28 07:12:58,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=124776.0, ans=0.2 +2024-07-28 07:13:06,576 INFO [train.py:1114] (1/4) Epoch 10, batch 1600, loss[loss=0.1774, simple_loss=0.269, pruned_loss=0.04295, over 4867.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2883, pruned_loss=0.05872, over 937652.02 frames. ], batch size: 14, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:13:23,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=124829.33333333333, ans=0.025 +2024-07-28 07:13:29,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=124842.66666666667, ans=0.125 +2024-07-28 07:13:40,191 INFO [train.py:1114] (1/4) Epoch 10, batch 1650, loss[loss=0.207, simple_loss=0.2976, pruned_loss=0.05818, over 4660.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2878, pruned_loss=0.0584, over 937343.82 frames. ], batch size: 14, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:13:41,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=124869.33333333333, ans=0.125 +2024-07-28 07:13:48,849 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.680e+01 5.790e+01 6.415e+01 7.555e+01 1.180e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 07:13:53,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=124896.0, ans=0.1 +2024-07-28 07:13:55,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=124896.0, ans=0.5 +2024-07-28 07:13:59,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=124896.0, ans=0.0 +2024-07-28 07:14:11,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=124922.66666666667, ans=0.125 +2024-07-28 07:14:16,907 INFO [train.py:1114] (1/4) Epoch 10, batch 1700, loss[loss=0.2079, simple_loss=0.2853, pruned_loss=0.0653, over 4701.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.288, pruned_loss=0.05811, over 939095.13 frames. ], batch size: 11, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:14:21,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=124936.0, ans=0.125 +2024-07-28 07:14:33,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=124962.66666666667, ans=0.025 +2024-07-28 07:14:39,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=124976.0, ans=0.125 +2024-07-28 07:14:44,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.99 vs. limit=15.0 +2024-07-28 07:14:45,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=124976.0, ans=0.1 +2024-07-28 07:14:47,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=124989.33333333333, ans=0.125 +2024-07-28 07:14:53,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=125002.66666666667, ans=0.025 +2024-07-28 07:14:53,743 INFO [train.py:1114] (1/4) Epoch 10, batch 1750, loss[loss=0.1886, simple_loss=0.2511, pruned_loss=0.06302, over 4811.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.288, pruned_loss=0.05792, over 940145.49 frames. ], batch size: 11, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:14:59,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125002.66666666667, ans=0.1 +2024-07-28 07:15:04,099 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.71 vs. limit=15.0 +2024-07-28 07:15:04,250 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.898e+01 5.615e+01 6.197e+01 6.752e+01 9.322e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 07:15:05,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=125016.0, ans=0.2 +2024-07-28 07:15:31,046 INFO [train.py:1114] (1/4) Epoch 10, batch 1800, loss[loss=0.1906, simple_loss=0.2778, pruned_loss=0.05174, over 4631.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2883, pruned_loss=0.05828, over 940830.05 frames. ], batch size: 13, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:15:39,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=125082.66666666667, ans=0.025 +2024-07-28 07:15:45,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=125096.0, ans=0.0 +2024-07-28 07:15:57,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=125122.66666666667, ans=0.2 +2024-07-28 07:15:59,411 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.96 vs. limit=15.0 +2024-07-28 07:16:05,131 INFO [train.py:1114] (1/4) Epoch 10, batch 1850, loss[loss=0.2028, simple_loss=0.2913, pruned_loss=0.0572, over 4812.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2887, pruned_loss=0.05832, over 940454.48 frames. ], batch size: 14, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:16:07,909 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.33 vs. limit=15.0 +2024-07-28 07:16:12,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=125149.33333333333, ans=0.125 +2024-07-28 07:16:13,065 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-28 07:16:14,683 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.312e+01 5.824e+01 6.671e+01 8.109e+01 1.121e+02, threshold=1.334e+02, percent-clipped=0.0 +2024-07-28 07:16:18,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=125149.33333333333, ans=0.125 +2024-07-28 07:16:37,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125176.0, ans=0.1 +2024-07-28 07:16:38,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=125189.33333333333, ans=0.125 +2024-07-28 07:16:39,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=125189.33333333333, ans=0.125 +2024-07-28 07:16:45,547 INFO [train.py:1114] (1/4) Epoch 10, batch 1900, loss[loss=0.1934, simple_loss=0.2942, pruned_loss=0.04628, over 4666.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2882, pruned_loss=0.05758, over 941925.72 frames. ], batch size: 14, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:16:47,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=125202.66666666667, ans=0.0 +2024-07-28 07:16:50,300 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.50 vs. limit=15.0 +2024-07-28 07:17:07,735 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:17:08,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=125229.33333333333, ans=0.0 +2024-07-28 07:17:13,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=125229.33333333333, ans=0.125 +2024-07-28 07:17:15,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=125242.66666666667, ans=0.0 +2024-07-28 07:17:19,086 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.27 vs. limit=10.0 +2024-07-28 07:17:25,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=125256.0, ans=0.0 +2024-07-28 07:17:28,515 INFO [train.py:1114] (1/4) Epoch 10, batch 1950, loss[loss=0.1893, simple_loss=0.2668, pruned_loss=0.05589, over 4901.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2891, pruned_loss=0.05752, over 943944.75 frames. ], batch size: 13, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:17:30,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=125269.33333333333, ans=0.125 +2024-07-28 07:17:31,739 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.11 vs. limit=10.0 +2024-07-28 07:17:36,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=125282.66666666667, ans=0.2 +2024-07-28 07:17:37,346 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.707e+01 5.662e+01 6.185e+01 7.189e+01 1.102e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 07:17:42,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=125296.0, ans=0.125 +2024-07-28 07:17:42,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=125296.0, ans=0.2 +2024-07-28 07:17:53,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=125309.33333333333, ans=0.125 +2024-07-28 07:18:00,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=125322.66666666667, ans=0.125 +2024-07-28 07:18:03,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=125322.66666666667, ans=0.1 +2024-07-28 07:18:03,831 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.83 vs. limit=6.0 +2024-07-28 07:18:04,792 INFO [train.py:1114] (1/4) Epoch 10, batch 2000, loss[loss=0.1731, simple_loss=0.2585, pruned_loss=0.04387, over 4808.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2883, pruned_loss=0.05732, over 941250.23 frames. ], batch size: 11, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:18:13,314 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.94 vs. limit=15.0 +2024-07-28 07:18:14,732 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.27 vs. limit=12.0 +2024-07-28 07:18:27,895 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.80 vs. limit=15.0 +2024-07-28 07:18:35,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=125389.33333333333, ans=0.125 +2024-07-28 07:18:37,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=125389.33333333333, ans=0.125 +2024-07-28 07:18:38,318 INFO [train.py:1114] (1/4) Epoch 10, batch 2050, loss[loss=0.1771, simple_loss=0.2668, pruned_loss=0.04369, over 4600.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.288, pruned_loss=0.05786, over 939350.02 frames. ], batch size: 11, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:18:46,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=125416.0, ans=0.2 +2024-07-28 07:18:47,041 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.499e+01 5.685e+01 6.326e+01 7.286e+01 1.205e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-28 07:19:08,107 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-07-28 07:19:13,195 INFO [train.py:1114] (1/4) Epoch 10, batch 2100, loss[loss=0.1815, simple_loss=0.2838, pruned_loss=0.03958, over 4767.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2865, pruned_loss=0.0571, over 941285.80 frames. ], batch size: 13, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:19:14,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=125469.33333333333, ans=0.2 +2024-07-28 07:19:21,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=125482.66666666667, ans=0.125 +2024-07-28 07:19:21,768 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.17 vs. limit=15.0 +2024-07-28 07:19:31,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=125496.0, ans=15.0 +2024-07-28 07:19:33,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=125509.33333333333, ans=0.125 +2024-07-28 07:19:35,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=125509.33333333333, ans=0.125 +2024-07-28 07:19:41,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=125522.66666666667, ans=0.2 +2024-07-28 07:19:46,141 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.77 vs. limit=15.0 +2024-07-28 07:19:47,872 INFO [train.py:1114] (1/4) Epoch 10, batch 2150, loss[loss=0.1976, simple_loss=0.3014, pruned_loss=0.04692, over 4893.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2865, pruned_loss=0.05743, over 944304.39 frames. ], batch size: 13, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:19:50,931 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.86 vs. limit=15.0 +2024-07-28 07:19:51,360 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:19:53,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=125536.0, ans=0.125 +2024-07-28 07:19:56,663 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.831e+01 5.697e+01 6.227e+01 7.381e+01 1.023e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 07:20:01,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=125549.33333333333, ans=0.1 +2024-07-28 07:20:05,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=125562.66666666667, ans=0.0 +2024-07-28 07:20:20,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=125576.0, ans=0.2 +2024-07-28 07:20:21,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=125576.0, ans=0.125 +2024-07-28 07:20:28,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=125589.33333333333, ans=0.125 +2024-07-28 07:20:30,843 INFO [train.py:1114] (1/4) Epoch 10, batch 2200, loss[loss=0.2398, simple_loss=0.3292, pruned_loss=0.07519, over 4806.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2878, pruned_loss=0.05804, over 943365.97 frames. ], batch size: 14, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:20:34,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=125602.66666666667, ans=0.0 +2024-07-28 07:20:51,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=125616.0, ans=0.125 +2024-07-28 07:20:51,495 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.40 vs. limit=22.5 +2024-07-28 07:20:54,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=125629.33333333333, ans=0.0 +2024-07-28 07:21:16,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=125642.66666666667, ans=0.2 +2024-07-28 07:21:17,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=125656.0, ans=0.0 +2024-07-28 07:21:26,428 INFO [train.py:1114] (1/4) Epoch 10, batch 2250, loss[loss=0.1964, simple_loss=0.2893, pruned_loss=0.05172, over 4696.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2869, pruned_loss=0.05744, over 942129.57 frames. ], batch size: 13, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:21:35,181 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.513e+01 5.590e+01 6.237e+01 6.942e+01 1.306e+02, threshold=1.247e+02, percent-clipped=1.0 +2024-07-28 07:22:03,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=125722.66666666667, ans=0.07 +2024-07-28 07:22:09,993 INFO [train.py:1114] (1/4) Epoch 10, batch 2300, loss[loss=0.1823, simple_loss=0.2645, pruned_loss=0.05008, over 4931.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2848, pruned_loss=0.05702, over 939796.51 frames. ], batch size: 12, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:22:10,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=125736.0, ans=0.125 +2024-07-28 07:22:21,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=125749.33333333333, ans=0.125 +2024-07-28 07:22:26,680 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.54 vs. limit=15.0 +2024-07-28 07:22:29,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=125776.0, ans=0.125 +2024-07-28 07:22:36,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=125776.0, ans=0.2 +2024-07-28 07:22:36,685 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.571e-03 +2024-07-28 07:22:42,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=125789.33333333333, ans=0.2 +2024-07-28 07:22:43,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=125789.33333333333, ans=0.0 +2024-07-28 07:22:46,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=125789.33333333333, ans=0.0 +2024-07-28 07:22:47,877 INFO [train.py:1114] (1/4) Epoch 10, batch 2350, loss[loss=0.2308, simple_loss=0.3268, pruned_loss=0.06742, over 4638.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2848, pruned_loss=0.05716, over 941947.92 frames. ], batch size: 13, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:22:51,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=125802.66666666667, ans=0.0 +2024-07-28 07:22:55,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=125816.0, ans=0.0 +2024-07-28 07:22:56,458 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.493e+01 6.004e+01 6.754e+01 1.065e+02, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 07:22:57,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=125816.0, ans=0.09899494936611666 +2024-07-28 07:23:15,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=125856.0, ans=0.0 +2024-07-28 07:23:15,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=125856.0, ans=0.1 +2024-07-28 07:23:15,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=125856.0, ans=0.2 +2024-07-28 07:23:20,986 INFO [train.py:1114] (1/4) Epoch 10, batch 2400, loss[loss=0.1587, simple_loss=0.2411, pruned_loss=0.03817, over 4641.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.286, pruned_loss=0.05752, over 941480.83 frames. ], batch size: 12, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:23:21,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=125869.33333333333, ans=0.1 +2024-07-28 07:23:22,699 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.51 vs. limit=6.0 +2024-07-28 07:23:39,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=125896.0, ans=0.0 +2024-07-28 07:23:39,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=125896.0, ans=0.1 +2024-07-28 07:23:53,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125922.66666666667, ans=0.1 +2024-07-28 07:23:53,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125936.0, ans=0.1 +2024-07-28 07:23:54,377 INFO [train.py:1114] (1/4) Epoch 10, batch 2450, loss[loss=0.1892, simple_loss=0.2833, pruned_loss=0.04756, over 4701.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2876, pruned_loss=0.05803, over 937693.58 frames. ], batch size: 13, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:23:59,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=125936.0, ans=0.04949747468305833 +2024-07-28 07:24:01,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=125949.33333333333, ans=0.125 +2024-07-28 07:24:02,997 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.788e+01 5.824e+01 6.375e+01 7.344e+01 1.011e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 07:24:03,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=125949.33333333333, ans=0.0 +2024-07-28 07:24:15,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=125976.0, ans=0.125 +2024-07-28 07:24:16,532 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.52 vs. limit=15.0 +2024-07-28 07:24:17,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=125976.0, ans=0.0 +2024-07-28 07:24:21,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=125989.33333333333, ans=0.125 +2024-07-28 07:24:27,188 INFO [train.py:1114] (1/4) Epoch 10, batch 2500, loss[loss=0.2031, simple_loss=0.2937, pruned_loss=0.05618, over 4810.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.286, pruned_loss=0.05723, over 939588.08 frames. ], batch size: 14, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:24:41,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=126016.0, ans=0.0 +2024-07-28 07:24:43,079 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.88 vs. limit=12.0 +2024-07-28 07:24:44,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=126016.0, ans=0.0 +2024-07-28 07:24:59,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=126056.0, ans=0.025 +2024-07-28 07:25:01,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=126056.0, ans=0.5 +2024-07-28 07:25:05,718 INFO [train.py:1114] (1/4) Epoch 10, batch 2550, loss[loss=0.1851, simple_loss=0.264, pruned_loss=0.05309, over 4794.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2855, pruned_loss=0.05713, over 939141.54 frames. ], batch size: 11, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:25:07,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=126069.33333333333, ans=0.125 +2024-07-28 07:25:10,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=126069.33333333333, ans=0.125 +2024-07-28 07:25:12,061 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.42 vs. limit=15.0 +2024-07-28 07:25:13,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=126082.66666666667, ans=0.0 +2024-07-28 07:25:14,257 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.249e+01 5.571e+01 6.137e+01 7.112e+01 1.171e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 07:25:22,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=126096.0, ans=0.2 +2024-07-28 07:25:23,904 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.85 vs. limit=15.0 +2024-07-28 07:25:25,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=126109.33333333333, ans=0.2 +2024-07-28 07:25:26,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=126109.33333333333, ans=0.2 +2024-07-28 07:25:30,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=126109.33333333333, ans=15.0 +2024-07-28 07:25:33,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=126122.66666666667, ans=0.125 +2024-07-28 07:25:38,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=126136.0, ans=0.0 +2024-07-28 07:25:38,943 INFO [train.py:1114] (1/4) Epoch 10, batch 2600, loss[loss=0.172, simple_loss=0.2578, pruned_loss=0.04308, over 4885.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2863, pruned_loss=0.05728, over 937894.07 frames. ], batch size: 13, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:25:48,802 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.29 vs. limit=15.0 +2024-07-28 07:25:51,315 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.33 vs. limit=15.0 +2024-07-28 07:26:02,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=126176.0, ans=0.125 +2024-07-28 07:26:15,509 INFO [train.py:1114] (1/4) Epoch 10, batch 2650, loss[loss=0.3222, simple_loss=0.3951, pruned_loss=0.1247, over 4631.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2865, pruned_loss=0.05705, over 940080.26 frames. ], batch size: 16, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:26:16,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=126202.66666666667, ans=0.125 +2024-07-28 07:26:18,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=126202.66666666667, ans=0.125 +2024-07-28 07:26:21,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=126202.66666666667, ans=0.125 +2024-07-28 07:26:23,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126216.0, ans=0.1 +2024-07-28 07:26:24,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=126216.0, ans=0.07 +2024-07-28 07:26:25,845 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.753e+01 5.752e+01 6.121e+01 6.935e+01 9.272e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 07:26:31,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=126229.33333333333, ans=0.125 +2024-07-28 07:26:45,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=126256.0, ans=0.125 +2024-07-28 07:26:50,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=126269.33333333333, ans=0.125 +2024-07-28 07:26:51,009 INFO [train.py:1114] (1/4) Epoch 10, batch 2700, loss[loss=0.1862, simple_loss=0.2812, pruned_loss=0.04562, over 4746.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2878, pruned_loss=0.05759, over 940029.94 frames. ], batch size: 14, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:26:54,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=126269.33333333333, ans=0.125 +2024-07-28 07:26:57,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=126282.66666666667, ans=0.0 +2024-07-28 07:27:10,829 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.36 vs. limit=15.0 +2024-07-28 07:27:14,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=126309.33333333333, ans=0.2 +2024-07-28 07:27:28,534 INFO [train.py:1114] (1/4) Epoch 10, batch 2750, loss[loss=0.2114, simple_loss=0.2944, pruned_loss=0.06417, over 4702.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2861, pruned_loss=0.05697, over 939903.41 frames. ], batch size: 12, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:27:34,333 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.52 vs. limit=10.0 +2024-07-28 07:27:37,076 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.293e+01 5.804e+01 6.361e+01 7.427e+01 1.283e+02, threshold=1.272e+02, percent-clipped=1.0 +2024-07-28 07:27:38,188 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=25.69 vs. limit=22.5 +2024-07-28 07:27:46,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=126362.66666666667, ans=0.1 +2024-07-28 07:27:46,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.71 vs. limit=15.0 +2024-07-28 07:27:49,283 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:27:55,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=126389.33333333333, ans=0.125 +2024-07-28 07:27:56,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=126389.33333333333, ans=0.125 +2024-07-28 07:28:00,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=126389.33333333333, ans=0.125 +2024-07-28 07:28:02,089 INFO [train.py:1114] (1/4) Epoch 10, batch 2800, loss[loss=0.2647, simple_loss=0.3302, pruned_loss=0.0996, over 3426.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2865, pruned_loss=0.05716, over 937628.74 frames. ], batch size: 36, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:28:05,229 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.71 vs. limit=15.0 +2024-07-28 07:28:29,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=126456.0, ans=0.5 +2024-07-28 07:28:32,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126456.0, ans=0.1 +2024-07-28 07:28:34,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=126456.0, ans=0.0 +2024-07-28 07:28:35,567 INFO [train.py:1114] (1/4) Epoch 10, batch 2850, loss[loss=0.2185, simple_loss=0.309, pruned_loss=0.06394, over 4963.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2873, pruned_loss=0.05778, over 935185.40 frames. ], batch size: 13, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:28:39,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=126469.33333333333, ans=0.125 +2024-07-28 07:28:44,289 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.524e+01 5.758e+01 6.530e+01 7.801e+01 1.215e+02, threshold=1.306e+02, percent-clipped=0.0 +2024-07-28 07:28:49,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126496.0, ans=0.1 +2024-07-28 07:28:57,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=126509.33333333333, ans=0.035 +2024-07-28 07:28:59,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=126509.33333333333, ans=0.0 +2024-07-28 07:29:08,558 INFO [train.py:1114] (1/4) Epoch 10, batch 2900, loss[loss=0.1974, simple_loss=0.2866, pruned_loss=0.05406, over 4829.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.288, pruned_loss=0.05764, over 939083.21 frames. ], batch size: 13, lr: 7.60e-03, grad_scale: 32.0 +2024-07-28 07:29:27,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=126562.66666666667, ans=0.125 +2024-07-28 07:29:29,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=126576.0, ans=0.09899494936611666 +2024-07-28 07:29:42,667 INFO [train.py:1114] (1/4) Epoch 10, batch 2950, loss[loss=0.1584, simple_loss=0.2401, pruned_loss=0.03832, over 4703.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2867, pruned_loss=0.05743, over 937941.95 frames. ], batch size: 12, lr: 7.60e-03, grad_scale: 32.0 +2024-07-28 07:29:43,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=126602.66666666667, ans=0.125 +2024-07-28 07:29:47,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=126602.66666666667, ans=0.09899494936611666 +2024-07-28 07:29:50,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=126616.0, ans=0.125 +2024-07-28 07:29:51,644 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 5.710e+01 6.450e+01 7.485e+01 1.036e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-28 07:30:03,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=126642.66666666667, ans=0.0 +2024-07-28 07:30:03,340 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.32 vs. limit=22.5 +2024-07-28 07:30:04,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.66 vs. limit=22.5 +2024-07-28 07:30:11,841 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.45 vs. limit=15.0 +2024-07-28 07:30:17,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=126656.0, ans=0.05 +2024-07-28 07:30:24,806 INFO [train.py:1114] (1/4) Epoch 10, batch 3000, loss[loss=0.2341, simple_loss=0.3177, pruned_loss=0.07525, over 4764.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2865, pruned_loss=0.05738, over 937836.18 frames. ], batch size: 13, lr: 7.60e-03, grad_scale: 32.0 +2024-07-28 07:30:24,806 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 07:30:37,790 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.6055, 5.0967, 4.5507, 5.3520], device='cuda:1') +2024-07-28 07:30:42,396 INFO [train.py:1146] (1/4) Epoch 10, validation: loss=0.173, simple_loss=0.277, pruned_loss=0.03444, over 944034.00 frames. +2024-07-28 07:30:42,397 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 07:30:50,676 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.89 vs. limit=15.0 +2024-07-28 07:30:51,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=126682.66666666667, ans=0.95 +2024-07-28 07:30:58,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=126696.0, ans=0.5 +2024-07-28 07:31:08,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=126709.33333333333, ans=0.0 +2024-07-28 07:31:14,078 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.76 vs. limit=8.0 +2024-07-28 07:31:14,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=126722.66666666667, ans=0.125 +2024-07-28 07:31:14,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=126722.66666666667, ans=0.125 +2024-07-28 07:31:17,816 INFO [train.py:1114] (1/4) Epoch 10, batch 3050, loss[loss=0.1948, simple_loss=0.2875, pruned_loss=0.05099, over 4641.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2873, pruned_loss=0.05791, over 936466.60 frames. ], batch size: 12, lr: 7.60e-03, grad_scale: 32.0 +2024-07-28 07:31:23,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=126736.0, ans=0.0 +2024-07-28 07:31:38,384 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.819e+01 5.667e+01 6.279e+01 7.137e+01 1.004e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 07:31:43,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=126749.33333333333, ans=0.2 +2024-07-28 07:31:47,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=126749.33333333333, ans=0.0 +2024-07-28 07:32:17,190 INFO [train.py:1114] (1/4) Epoch 10, batch 3100, loss[loss=0.1938, simple_loss=0.2849, pruned_loss=0.05129, over 4674.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2868, pruned_loss=0.05795, over 937375.42 frames. ], batch size: 16, lr: 7.60e-03, grad_scale: 64.0 +2024-07-28 07:32:17,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=126802.66666666667, ans=0.95 +2024-07-28 07:32:20,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=126802.66666666667, ans=0.04949747468305833 +2024-07-28 07:32:23,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=126816.0, ans=0.1 +2024-07-28 07:32:26,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=126816.0, ans=0.125 +2024-07-28 07:32:27,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=126816.0, ans=0.0 +2024-07-28 07:32:30,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=126829.33333333333, ans=0.125 +2024-07-28 07:32:33,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=126829.33333333333, ans=0.125 +2024-07-28 07:32:46,132 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.83 vs. limit=10.0 +2024-07-28 07:32:48,696 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.23 vs. limit=10.0 +2024-07-28 07:32:52,364 INFO [train.py:1114] (1/4) Epoch 10, batch 3150, loss[loss=0.221, simple_loss=0.3157, pruned_loss=0.06314, over 4598.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2868, pruned_loss=0.05785, over 937933.96 frames. ], batch size: 17, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:32:52,854 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.81 vs. limit=22.5 +2024-07-28 07:32:57,170 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:32:58,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=126882.66666666667, ans=0.04949747468305833 +2024-07-28 07:33:00,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=126882.66666666667, ans=0.1 +2024-07-28 07:33:01,115 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.793e+01 5.563e+01 5.962e+01 7.006e+01 9.323e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 07:33:27,179 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.87 vs. limit=15.0 +2024-07-28 07:33:29,755 INFO [train.py:1114] (1/4) Epoch 10, batch 3200, loss[loss=0.2369, simple_loss=0.3192, pruned_loss=0.07733, over 4820.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2862, pruned_loss=0.05718, over 939637.92 frames. ], batch size: 13, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:33:33,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=126936.0, ans=0.125 +2024-07-28 07:33:35,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=126936.0, ans=0.0 +2024-07-28 07:33:50,999 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.97 vs. limit=12.0 +2024-07-28 07:34:02,607 INFO [train.py:1114] (1/4) Epoch 10, batch 3250, loss[loss=0.171, simple_loss=0.2583, pruned_loss=0.04183, over 4937.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2862, pruned_loss=0.05674, over 940624.98 frames. ], batch size: 14, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:34:05,051 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.95 vs. limit=10.0 +2024-07-28 07:34:05,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=127002.66666666667, ans=0.0 +2024-07-28 07:34:11,317 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.705e+01 5.496e+01 6.167e+01 6.993e+01 1.063e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 07:34:12,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=127016.0, ans=0.2 +2024-07-28 07:34:12,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.36 vs. limit=12.0 +2024-07-28 07:34:22,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=127042.66666666667, ans=0.0 +2024-07-28 07:34:26,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=127042.66666666667, ans=0.125 +2024-07-28 07:34:32,186 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.11 vs. limit=15.0 +2024-07-28 07:34:34,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=127056.0, ans=0.125 +2024-07-28 07:34:36,024 INFO [train.py:1114] (1/4) Epoch 10, batch 3300, loss[loss=0.1893, simple_loss=0.2737, pruned_loss=0.05245, over 4692.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2858, pruned_loss=0.05641, over 940565.11 frames. ], batch size: 19, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:34:42,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=127082.66666666667, ans=0.125 +2024-07-28 07:34:45,208 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.28 vs. limit=15.0 +2024-07-28 07:34:54,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=127096.0, ans=0.1 +2024-07-28 07:35:02,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=127122.66666666667, ans=0.125 +2024-07-28 07:35:05,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127122.66666666667, ans=0.1 +2024-07-28 07:35:07,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=127122.66666666667, ans=0.0 +2024-07-28 07:35:09,150 INFO [train.py:1114] (1/4) Epoch 10, batch 3350, loss[loss=0.2173, simple_loss=0.303, pruned_loss=0.06584, over 4602.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2871, pruned_loss=0.05721, over 938870.41 frames. ], batch size: 17, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:35:09,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=127136.0, ans=0.0 +2024-07-28 07:35:16,528 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:35:17,675 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.856e+01 5.618e+01 6.272e+01 7.252e+01 1.069e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 07:35:25,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=127162.66666666667, ans=0.0 +2024-07-28 07:35:40,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127189.33333333333, ans=0.1 +2024-07-28 07:35:42,746 INFO [train.py:1114] (1/4) Epoch 10, batch 3400, loss[loss=0.1993, simple_loss=0.2667, pruned_loss=0.06594, over 4790.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2868, pruned_loss=0.05763, over 937400.71 frames. ], batch size: 11, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:35:44,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=127202.66666666667, ans=0.04949747468305833 +2024-07-28 07:35:46,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=127202.66666666667, ans=0.125 +2024-07-28 07:35:51,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=127216.0, ans=0.2 +2024-07-28 07:35:54,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=127216.0, ans=0.07 +2024-07-28 07:36:00,167 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.34 vs. limit=12.0 +2024-07-28 07:36:16,677 INFO [train.py:1114] (1/4) Epoch 10, batch 3450, loss[loss=0.1872, simple_loss=0.2689, pruned_loss=0.0528, over 4720.00 frames. ], tot_loss[loss=0.201, simple_loss=0.287, pruned_loss=0.05753, over 937949.60 frames. ], batch size: 19, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:36:18,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=127269.33333333333, ans=0.125 +2024-07-28 07:36:20,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=127269.33333333333, ans=0.5 +2024-07-28 07:36:25,812 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.819e+01 5.619e+01 6.055e+01 6.552e+01 2.053e+02, threshold=1.211e+02, percent-clipped=1.0 +2024-07-28 07:36:32,215 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.78 vs. limit=22.5 +2024-07-28 07:36:37,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=127309.33333333333, ans=0.0 +2024-07-28 07:36:39,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=127309.33333333333, ans=0.125 +2024-07-28 07:36:56,168 INFO [train.py:1114] (1/4) Epoch 10, batch 3500, loss[loss=0.2038, simple_loss=0.2847, pruned_loss=0.06142, over 4945.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2864, pruned_loss=0.05698, over 938954.59 frames. ], batch size: 12, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:36:57,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=127336.0, ans=6.0 +2024-07-28 07:36:58,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=127336.0, ans=0.2 +2024-07-28 07:37:06,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=127336.0, ans=0.125 +2024-07-28 07:37:09,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=127349.33333333333, ans=0.0 +2024-07-28 07:37:26,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127362.66666666667, ans=0.1 +2024-07-28 07:37:28,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=127362.66666666667, ans=0.025 +2024-07-28 07:37:28,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=127362.66666666667, ans=0.5 +2024-07-28 07:37:33,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=127376.0, ans=0.025 +2024-07-28 07:37:35,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=127376.0, ans=0.0 +2024-07-28 07:37:40,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=127389.33333333333, ans=0.125 +2024-07-28 07:37:47,036 INFO [train.py:1114] (1/4) Epoch 10, batch 3550, loss[loss=0.1971, simple_loss=0.2981, pruned_loss=0.04805, over 4659.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2868, pruned_loss=0.05714, over 939349.15 frames. ], batch size: 14, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:37:47,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=127402.66666666667, ans=0.05 +2024-07-28 07:37:48,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=127402.66666666667, ans=0.0 +2024-07-28 07:38:07,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=127402.66666666667, ans=0.0 +2024-07-28 07:38:16,055 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.484e+01 5.635e+01 6.291e+01 7.462e+01 1.218e+02, threshold=1.258e+02, percent-clipped=1.0 +2024-07-28 07:38:18,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=127416.0, ans=0.05 +2024-07-28 07:38:20,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=127429.33333333333, ans=0.125 +2024-07-28 07:38:21,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127429.33333333333, ans=0.1 +2024-07-28 07:38:27,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=127442.66666666667, ans=0.125 +2024-07-28 07:38:34,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=127456.0, ans=0.2 +2024-07-28 07:38:34,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=127456.0, ans=0.2 +2024-07-28 07:38:40,232 INFO [train.py:1114] (1/4) Epoch 10, batch 3600, loss[loss=0.1912, simple_loss=0.2685, pruned_loss=0.05697, over 4962.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2864, pruned_loss=0.05687, over 940903.23 frames. ], batch size: 13, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:38:42,563 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.26 vs. limit=15.0 +2024-07-28 07:38:43,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=127469.33333333333, ans=0.0 +2024-07-28 07:38:52,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=127482.66666666667, ans=0.0 +2024-07-28 07:39:10,243 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.02 vs. limit=15.0 +2024-07-28 07:39:12,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=127509.33333333333, ans=0.125 +2024-07-28 07:39:18,015 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.34 vs. limit=15.0 +2024-07-28 07:39:25,739 INFO [train.py:1114] (1/4) Epoch 10, batch 3650, loss[loss=0.2564, simple_loss=0.336, pruned_loss=0.08841, over 4908.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2852, pruned_loss=0.05632, over 941023.26 frames. ], batch size: 15, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:39:30,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=127536.0, ans=0.125 +2024-07-28 07:39:49,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=127549.33333333333, ans=0.025 +2024-07-28 07:39:51,676 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.686e+01 5.725e+01 6.100e+01 7.132e+01 1.043e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 07:39:51,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=127549.33333333333, ans=0.1 +2024-07-28 07:40:28,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127562.66666666667, ans=0.1 +2024-07-28 07:40:28,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=127562.66666666667, ans=0.2 +2024-07-28 07:40:32,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=127576.0, ans=0.125 +2024-07-28 07:40:40,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=127589.33333333333, ans=0.0 +2024-07-28 07:40:50,426 INFO [train.py:1114] (1/4) Epoch 10, batch 3700, loss[loss=0.1835, simple_loss=0.2747, pruned_loss=0.04613, over 4932.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2853, pruned_loss=0.05615, over 942099.23 frames. ], batch size: 14, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:40:58,739 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.74 vs. limit=15.0 +2024-07-28 07:40:59,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=127602.66666666667, ans=0.0 +2024-07-28 07:41:05,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=127616.0, ans=0.125 +2024-07-28 07:41:08,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=127629.33333333333, ans=0.07 +2024-07-28 07:41:09,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=127629.33333333333, ans=0.125 +2024-07-28 07:41:14,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=127642.66666666667, ans=0.025 +2024-07-28 07:41:17,659 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.72 vs. limit=10.0 +2024-07-28 07:41:24,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=127656.0, ans=0.2 +2024-07-28 07:41:26,749 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.53 vs. limit=15.0 +2024-07-28 07:41:28,281 INFO [train.py:1114] (1/4) Epoch 10, batch 3750, loss[loss=0.175, simple_loss=0.26, pruned_loss=0.04501, over 4811.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2843, pruned_loss=0.05557, over 943684.76 frames. ], batch size: 11, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:41:37,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=127669.33333333333, ans=0.125 +2024-07-28 07:41:49,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=127682.66666666667, ans=0.0 +2024-07-28 07:41:51,638 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.342e+01 5.968e+01 6.692e+01 7.910e+01 1.742e+02, threshold=1.338e+02, percent-clipped=0.0 +2024-07-28 07:41:54,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127682.66666666667, ans=0.1 +2024-07-28 07:41:58,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=127696.0, ans=0.125 +2024-07-28 07:42:07,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=127709.33333333333, ans=0.125 +2024-07-28 07:42:22,846 INFO [train.py:1114] (1/4) Epoch 10, batch 3800, loss[loss=0.2104, simple_loss=0.3066, pruned_loss=0.05712, over 4809.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2851, pruned_loss=0.05659, over 941927.71 frames. ], batch size: 14, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:42:23,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=127736.0, ans=0.125 +2024-07-28 07:42:38,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=127749.33333333333, ans=0.0 +2024-07-28 07:42:47,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=127762.66666666667, ans=0.0 +2024-07-28 07:42:48,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=127762.66666666667, ans=0.2 +2024-07-28 07:42:53,039 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.10 vs. limit=15.0 +2024-07-28 07:43:11,570 INFO [train.py:1114] (1/4) Epoch 10, batch 3850, loss[loss=0.2327, simple_loss=0.3171, pruned_loss=0.07414, over 4638.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2845, pruned_loss=0.05617, over 942340.82 frames. ], batch size: 16, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:43:11,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=127802.66666666667, ans=0.2 +2024-07-28 07:43:13,386 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.26 vs. limit=15.0 +2024-07-28 07:43:18,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127816.0, ans=0.1 +2024-07-28 07:43:21,988 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.612e+01 5.662e+01 6.521e+01 7.617e+01 1.192e+02, threshold=1.304e+02, percent-clipped=1.0 +2024-07-28 07:43:23,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=127816.0, ans=0.125 +2024-07-28 07:43:35,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=127829.33333333333, ans=0.125 +2024-07-28 07:43:38,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=127842.66666666667, ans=0.125 +2024-07-28 07:43:43,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=127842.66666666667, ans=0.125 +2024-07-28 07:43:45,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=127856.0, ans=0.025 +2024-07-28 07:43:45,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=127856.0, ans=0.125 +2024-07-28 07:43:52,478 INFO [train.py:1114] (1/4) Epoch 10, batch 3900, loss[loss=0.2167, simple_loss=0.2971, pruned_loss=0.06813, over 4805.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2861, pruned_loss=0.05707, over 942149.78 frames. ], batch size: 14, lr: 7.56e-03, grad_scale: 32.0 +2024-07-28 07:43:54,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127869.33333333333, ans=0.1 +2024-07-28 07:43:55,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=127869.33333333333, ans=0.125 +2024-07-28 07:43:55,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=127869.33333333333, ans=0.2 +2024-07-28 07:44:04,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.59 vs. limit=6.0 +2024-07-28 07:44:22,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=127909.33333333333, ans=0.2 +2024-07-28 07:44:23,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=127909.33333333333, ans=0.125 +2024-07-28 07:44:27,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=127922.66666666667, ans=0.125 +2024-07-28 07:44:33,687 INFO [train.py:1114] (1/4) Epoch 10, batch 3950, loss[loss=0.2252, simple_loss=0.3022, pruned_loss=0.07414, over 4846.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.286, pruned_loss=0.05686, over 944105.21 frames. ], batch size: 16, lr: 7.56e-03, grad_scale: 16.0 +2024-07-28 07:44:37,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=127936.0, ans=0.1 +2024-07-28 07:44:54,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.07 vs. limit=6.0 +2024-07-28 07:44:57,176 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.715e+01 6.133e+01 6.852e+01 1.045e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 07:45:05,723 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.94 vs. limit=15.0 +2024-07-28 07:45:15,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=127989.33333333333, ans=0.05 +2024-07-28 07:45:16,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=127989.33333333333, ans=0.1 +2024-07-28 07:45:23,676 INFO [train.py:1114] (1/4) Epoch 10, batch 4000, loss[loss=0.1731, simple_loss=0.2512, pruned_loss=0.04748, over 4768.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2862, pruned_loss=0.05764, over 940603.85 frames. ], batch size: 12, lr: 7.56e-03, grad_scale: 32.0 +2024-07-28 07:45:29,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=128002.66666666667, ans=0.0 +2024-07-28 07:45:32,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=128016.0, ans=0.125 +2024-07-28 07:46:03,318 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.03 vs. limit=15.0 +2024-07-28 07:46:05,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=128056.0, ans=0.05 +2024-07-28 07:46:07,088 INFO [train.py:1114] (1/4) Epoch 10, batch 4050, loss[loss=0.2799, simple_loss=0.3397, pruned_loss=0.1101, over 3370.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2862, pruned_loss=0.05753, over 938994.01 frames. ], batch size: 35, lr: 7.56e-03, grad_scale: 32.0 +2024-07-28 07:46:17,338 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.919e+01 6.572e+01 7.473e+01 1.130e+02, threshold=1.314e+02, percent-clipped=0.0 +2024-07-28 07:46:17,891 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.25 vs. limit=15.0 +2024-07-28 07:46:20,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=128096.0, ans=0.025 +2024-07-28 07:46:20,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=128096.0, ans=0.125 +2024-07-28 07:46:24,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=128096.0, ans=0.1 +2024-07-28 07:46:25,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=128096.0, ans=0.125 +2024-07-28 07:46:26,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=128096.0, ans=0.2 +2024-07-28 07:46:37,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128122.66666666667, ans=0.1 +2024-07-28 07:46:42,597 INFO [train.py:1114] (1/4) Epoch 10, batch 4100, loss[loss=0.2107, simple_loss=0.2996, pruned_loss=0.06094, over 4899.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2871, pruned_loss=0.05793, over 938152.65 frames. ], batch size: 15, lr: 7.56e-03, grad_scale: 32.0 +2024-07-28 07:46:46,407 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.56 vs. limit=10.0 +2024-07-28 07:46:47,073 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.79 vs. limit=15.0 +2024-07-28 07:46:55,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=128162.66666666667, ans=0.125 +2024-07-28 07:46:55,923 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.01 vs. limit=6.0 +2024-07-28 07:47:18,147 INFO [train.py:1114] (1/4) Epoch 10, batch 4150, loss[loss=0.1862, simple_loss=0.2688, pruned_loss=0.05181, over 4833.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2859, pruned_loss=0.05744, over 937904.77 frames. ], batch size: 13, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:47:28,110 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.249e+01 5.846e+01 6.728e+01 7.607e+01 1.158e+02, threshold=1.346e+02, percent-clipped=0.0 +2024-07-28 07:47:29,784 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.01 vs. limit=15.0 +2024-07-28 07:47:37,591 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.41 vs. limit=15.0 +2024-07-28 07:47:40,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=128242.66666666667, ans=0.2 +2024-07-28 07:47:41,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=128242.66666666667, ans=0.125 +2024-07-28 07:47:51,039 INFO [train.py:1114] (1/4) Epoch 10, batch 4200, loss[loss=0.2214, simple_loss=0.3058, pruned_loss=0.06854, over 4912.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2864, pruned_loss=0.05721, over 939343.93 frames. ], batch size: 15, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:47:53,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=128269.33333333333, ans=0.125 +2024-07-28 07:47:58,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=128282.66666666667, ans=0.025 +2024-07-28 07:47:58,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=128282.66666666667, ans=0.2 +2024-07-28 07:47:59,499 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.74 vs. limit=15.0 +2024-07-28 07:48:04,179 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.04 vs. limit=15.0 +2024-07-28 07:48:11,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.47 vs. limit=15.0 +2024-07-28 07:48:13,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=128309.33333333333, ans=0.0 +2024-07-28 07:48:15,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=128309.33333333333, ans=0.0 +2024-07-28 07:48:23,682 INFO [train.py:1114] (1/4) Epoch 10, batch 4250, loss[loss=0.149, simple_loss=0.2323, pruned_loss=0.03288, over 4644.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.287, pruned_loss=0.05774, over 940584.65 frames. ], batch size: 12, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:48:28,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=128336.0, ans=0.125 +2024-07-28 07:48:33,347 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.588e+01 5.567e+01 6.071e+01 6.705e+01 1.236e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 07:48:35,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=128349.33333333333, ans=0.0 +2024-07-28 07:48:36,959 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.51 vs. limit=10.0 +2024-07-28 07:48:47,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=128376.0, ans=0.1 +2024-07-28 07:48:52,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=128389.33333333333, ans=0.025 +2024-07-28 07:48:57,118 INFO [train.py:1114] (1/4) Epoch 10, batch 4300, loss[loss=0.1938, simple_loss=0.2897, pruned_loss=0.04897, over 4757.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2865, pruned_loss=0.05733, over 940100.52 frames. ], batch size: 13, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:49:00,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=128402.66666666667, ans=0.125 +2024-07-28 07:49:00,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=128402.66666666667, ans=0.125 +2024-07-28 07:49:10,884 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.99 vs. limit=22.5 +2024-07-28 07:49:15,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=128429.33333333333, ans=0.125 +2024-07-28 07:49:18,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=128442.66666666667, ans=0.125 +2024-07-28 07:49:30,481 INFO [train.py:1114] (1/4) Epoch 10, batch 4350, loss[loss=0.1998, simple_loss=0.2749, pruned_loss=0.06235, over 4759.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2867, pruned_loss=0.05724, over 940771.58 frames. ], batch size: 13, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:49:40,758 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.507e+01 6.201e+01 7.013e+01 1.119e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 07:49:41,220 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.57 vs. limit=22.5 +2024-07-28 07:49:51,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=128509.33333333333, ans=0.0 +2024-07-28 07:50:04,256 INFO [train.py:1114] (1/4) Epoch 10, batch 4400, loss[loss=0.2277, simple_loss=0.3115, pruned_loss=0.07191, over 4819.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2861, pruned_loss=0.05675, over 940771.96 frames. ], batch size: 14, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:50:06,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=128536.0, ans=0.125 +2024-07-28 07:50:34,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=128589.33333333333, ans=0.0 +2024-07-28 07:50:36,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=128589.33333333333, ans=0.0 +2024-07-28 07:50:37,961 INFO [train.py:1114] (1/4) Epoch 10, batch 4450, loss[loss=0.2063, simple_loss=0.2822, pruned_loss=0.06513, over 4935.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2865, pruned_loss=0.05744, over 938384.75 frames. ], batch size: 12, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:50:39,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=128602.66666666667, ans=0.2 +2024-07-28 07:50:47,722 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+01 5.603e+01 6.224e+01 7.010e+01 9.776e+01, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 07:50:48,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=128616.0, ans=0.025 +2024-07-28 07:50:49,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=128616.0, ans=0.0 +2024-07-28 07:50:51,511 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.33 vs. limit=6.0 +2024-07-28 07:51:03,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=128656.0, ans=0.125 +2024-07-28 07:51:07,759 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.76 vs. limit=15.0 +2024-07-28 07:51:08,467 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.33 vs. limit=22.5 +2024-07-28 07:51:12,772 INFO [train.py:1114] (1/4) Epoch 10, batch 4500, loss[loss=0.2, simple_loss=0.2853, pruned_loss=0.05731, over 4740.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.287, pruned_loss=0.05738, over 938039.78 frames. ], batch size: 14, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:51:13,855 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.12 vs. limit=15.0 +2024-07-28 07:51:22,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=128682.66666666667, ans=0.2 +2024-07-28 07:51:25,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=128682.66666666667, ans=0.125 +2024-07-28 07:51:34,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.63 vs. limit=22.5 +2024-07-28 07:51:35,987 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:51:36,898 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.65 vs. limit=22.5 +2024-07-28 07:51:39,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=128709.33333333333, ans=0.2 +2024-07-28 07:51:46,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=128722.66666666667, ans=0.125 +2024-07-28 07:51:47,648 INFO [train.py:1114] (1/4) Epoch 10, batch 4550, loss[loss=0.208, simple_loss=0.2889, pruned_loss=0.06359, over 4894.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2871, pruned_loss=0.05754, over 939718.49 frames. ], batch size: 13, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:51:47,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=128736.0, ans=0.125 +2024-07-28 07:51:50,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=128736.0, ans=0.09899494936611666 +2024-07-28 07:51:55,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=128749.33333333333, ans=0.2 +2024-07-28 07:51:57,765 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.502e+01 5.839e+01 6.410e+01 7.232e+01 1.296e+02, threshold=1.282e+02, percent-clipped=2.0 +2024-07-28 07:52:03,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=128762.66666666667, ans=0.0 +2024-07-28 07:52:17,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=128789.33333333333, ans=0.0 +2024-07-28 07:52:18,781 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.05 vs. limit=15.0 +2024-07-28 07:52:20,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=128789.33333333333, ans=0.0 +2024-07-28 07:52:24,214 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:52:24,745 INFO [train.py:1114] (1/4) Epoch 10, batch 4600, loss[loss=0.2161, simple_loss=0.2818, pruned_loss=0.0752, over 4495.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2863, pruned_loss=0.05678, over 938271.60 frames. ], batch size: 21, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:52:37,216 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.44 vs. limit=6.0 +2024-07-28 07:52:43,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=128829.33333333333, ans=0.1 +2024-07-28 07:52:43,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=128842.66666666667, ans=0.2 +2024-07-28 07:52:48,764 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.25 vs. limit=6.0 +2024-07-28 07:52:49,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=128842.66666666667, ans=0.025 +2024-07-28 07:52:54,554 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.23 vs. limit=10.0 +2024-07-28 07:52:57,427 INFO [train.py:1114] (1/4) Epoch 10, batch 4650, loss[loss=0.2054, simple_loss=0.2937, pruned_loss=0.05859, over 4836.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2874, pruned_loss=0.05701, over 939788.12 frames. ], batch size: 16, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:52:57,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=128869.33333333333, ans=0.125 +2024-07-28 07:53:00,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=128869.33333333333, ans=0.0 +2024-07-28 07:53:07,565 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.546e+01 5.595e+01 6.179e+01 7.275e+01 1.134e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 07:53:07,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=128882.66666666667, ans=0.025 +2024-07-28 07:53:08,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=128882.66666666667, ans=0.0 +2024-07-28 07:53:20,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=128909.33333333333, ans=0.0 +2024-07-28 07:53:31,106 INFO [train.py:1114] (1/4) Epoch 10, batch 4700, loss[loss=0.1577, simple_loss=0.234, pruned_loss=0.04071, over 4716.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2865, pruned_loss=0.05638, over 937056.75 frames. ], batch size: 11, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:53:31,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=128936.0, ans=0.0 +2024-07-28 07:53:37,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=128936.0, ans=0.125 +2024-07-28 07:53:51,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=128976.0, ans=0.2 +2024-07-28 07:53:52,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=128976.0, ans=0.0 +2024-07-28 07:53:58,957 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.26 vs. limit=6.0 +2024-07-28 07:54:00,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=128989.33333333333, ans=10.0 +2024-07-28 07:54:02,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128989.33333333333, ans=0.1 +2024-07-28 07:54:05,545 INFO [train.py:1114] (1/4) Epoch 10, batch 4750, loss[loss=0.2323, simple_loss=0.3158, pruned_loss=0.07444, over 4474.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2874, pruned_loss=0.05712, over 935686.91 frames. ], batch size: 21, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:54:14,515 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.21 vs. limit=15.0 +2024-07-28 07:54:15,560 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.408e+01 5.636e+01 6.177e+01 7.080e+01 9.506e+01, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 07:54:19,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=129029.33333333333, ans=0.025 +2024-07-28 07:54:23,505 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:54:27,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.68 vs. limit=12.0 +2024-07-28 07:54:28,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=129042.66666666667, ans=0.0 +2024-07-28 07:54:31,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=129042.66666666667, ans=0.0 +2024-07-28 07:54:37,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=129056.0, ans=0.0 +2024-07-28 07:54:39,982 INFO [train.py:1114] (1/4) Epoch 10, batch 4800, loss[loss=0.1802, simple_loss=0.2713, pruned_loss=0.04454, over 4696.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2865, pruned_loss=0.05736, over 933455.76 frames. ], batch size: 13, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:54:40,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=129069.33333333333, ans=0.2 +2024-07-28 07:54:44,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=129069.33333333333, ans=0.025 +2024-07-28 07:54:55,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=129096.0, ans=0.125 +2024-07-28 07:55:03,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=129109.33333333333, ans=0.0 +2024-07-28 07:55:05,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=129122.66666666667, ans=0.0 +2024-07-28 07:55:09,649 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.17 vs. limit=15.0 +2024-07-28 07:55:10,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=129122.66666666667, ans=0.125 +2024-07-28 07:55:11,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=129122.66666666667, ans=0.2 +2024-07-28 07:55:13,157 INFO [train.py:1114] (1/4) Epoch 10, batch 4850, loss[loss=0.1986, simple_loss=0.3006, pruned_loss=0.04834, over 4741.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2866, pruned_loss=0.0576, over 933260.97 frames. ], batch size: 14, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:55:13,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=129136.0, ans=0.0 +2024-07-28 07:55:15,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129136.0, ans=0.1 +2024-07-28 07:55:16,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=129136.0, ans=0.125 +2024-07-28 07:55:23,229 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.594e+01 5.570e+01 6.105e+01 6.787e+01 9.790e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 07:55:28,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=129162.66666666667, ans=0.125 +2024-07-28 07:55:36,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=129176.0, ans=0.0 +2024-07-28 07:55:38,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=129176.0, ans=0.125 +2024-07-28 07:55:46,404 INFO [train.py:1114] (1/4) Epoch 10, batch 4900, loss[loss=0.2022, simple_loss=0.2868, pruned_loss=0.05882, over 4765.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2859, pruned_loss=0.05734, over 934409.01 frames. ], batch size: 13, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:56:01,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=129229.33333333333, ans=0.125 +2024-07-28 07:56:07,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=129242.66666666667, ans=0.125 +2024-07-28 07:56:17,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=129256.0, ans=0.0 +2024-07-28 07:56:18,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=129256.0, ans=0.0 +2024-07-28 07:56:20,955 INFO [train.py:1114] (1/4) Epoch 10, batch 4950, loss[loss=0.2503, simple_loss=0.3236, pruned_loss=0.08852, over 3117.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2862, pruned_loss=0.05735, over 930985.32 frames. ], batch size: 35, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:56:29,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=129282.66666666667, ans=0.125 +2024-07-28 07:56:30,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=129282.66666666667, ans=0.125 +2024-07-28 07:56:33,106 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.746e+01 5.675e+01 6.169e+01 7.226e+01 1.073e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-28 07:56:33,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=129282.66666666667, ans=0.2 +2024-07-28 07:56:38,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=129296.0, ans=0.125 +2024-07-28 07:56:58,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=129322.66666666667, ans=0.125 +2024-07-28 07:57:00,695 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.07 vs. limit=15.0 +2024-07-28 07:57:01,458 INFO [train.py:1114] (1/4) Epoch 10, batch 5000, loss[loss=0.2321, simple_loss=0.3179, pruned_loss=0.07319, over 4670.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2847, pruned_loss=0.05711, over 934973.60 frames. ], batch size: 14, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:57:02,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=129336.0, ans=0.2 +2024-07-28 07:57:05,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=129336.0, ans=0.125 +2024-07-28 07:57:08,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=129349.33333333333, ans=0.2 +2024-07-28 07:57:12,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=129349.33333333333, ans=0.025 +2024-07-28 07:57:18,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=129362.66666666667, ans=0.0 +2024-07-28 07:57:21,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=129362.66666666667, ans=0.0 +2024-07-28 07:57:25,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=129376.0, ans=0.0 +2024-07-28 07:57:28,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=129376.0, ans=0.125 +2024-07-28 07:57:28,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=129376.0, ans=0.09899494936611666 +2024-07-28 07:57:36,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=129389.33333333333, ans=0.04949747468305833 +2024-07-28 07:57:37,478 INFO [train.py:1114] (1/4) Epoch 10, batch 5050, loss[loss=0.1774, simple_loss=0.2696, pruned_loss=0.04258, over 4848.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2856, pruned_loss=0.05683, over 937375.57 frames. ], batch size: 12, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:57:37,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=129402.66666666667, ans=0.1 +2024-07-28 07:57:47,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=129416.0, ans=0.1 +2024-07-28 07:57:47,629 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.667e+01 5.711e+01 6.360e+01 7.128e+01 1.073e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 07:57:52,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=129429.33333333333, ans=0.125 +2024-07-28 07:57:55,235 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:58:02,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=129442.66666666667, ans=0.5 +2024-07-28 07:58:02,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129442.66666666667, ans=0.1 +2024-07-28 07:58:06,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=129456.0, ans=0.0 +2024-07-28 07:58:06,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=129456.0, ans=0.025 +2024-07-28 07:58:13,497 INFO [train.py:1114] (1/4) Epoch 10, batch 5100, loss[loss=0.1792, simple_loss=0.2647, pruned_loss=0.04679, over 4784.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2869, pruned_loss=0.05752, over 934944.20 frames. ], batch size: 12, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:58:13,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=129469.33333333333, ans=0.125 +2024-07-28 07:58:20,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=129482.66666666667, ans=0.0 +2024-07-28 07:58:46,344 INFO [train.py:1114] (1/4) Epoch 10, batch 5150, loss[loss=0.21, simple_loss=0.2985, pruned_loss=0.06071, over 4849.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2878, pruned_loss=0.05802, over 935877.33 frames. ], batch size: 16, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:58:49,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129536.0, ans=0.1 +2024-07-28 07:58:54,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=129549.33333333333, ans=0.125 +2024-07-28 07:58:56,279 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.874e+01 5.650e+01 6.455e+01 7.114e+01 1.167e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-28 07:59:00,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=129562.66666666667, ans=6.0 +2024-07-28 07:59:04,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=129562.66666666667, ans=0.125 +2024-07-28 07:59:05,254 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.45 vs. limit=10.0 +2024-07-28 07:59:05,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=129576.0, ans=0.125 +2024-07-28 07:59:05,942 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.42 vs. limit=15.0 +2024-07-28 07:59:08,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129576.0, ans=0.1 +2024-07-28 07:59:20,214 INFO [train.py:1114] (1/4) Epoch 10, batch 5200, loss[loss=0.2293, simple_loss=0.3292, pruned_loss=0.06466, over 4667.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2875, pruned_loss=0.05758, over 936067.16 frames. ], batch size: 14, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 07:59:41,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=129642.66666666667, ans=0.2 +2024-07-28 07:59:43,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=129642.66666666667, ans=0.2 +2024-07-28 07:59:45,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=129642.66666666667, ans=0.2 +2024-07-28 07:59:53,601 INFO [train.py:1114] (1/4) Epoch 10, batch 5250, loss[loss=0.219, simple_loss=0.3131, pruned_loss=0.06244, over 4888.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2875, pruned_loss=0.05739, over 936035.48 frames. ], batch size: 13, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 08:00:03,736 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.916e+01 5.858e+01 6.971e+01 8.204e+01 1.196e+02, threshold=1.394e+02, percent-clipped=0.0 +2024-07-28 08:00:05,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=129682.66666666667, ans=0.025 +2024-07-28 08:00:06,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=129696.0, ans=0.125 +2024-07-28 08:00:08,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=129696.0, ans=0.125 +2024-07-28 08:00:12,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=129696.0, ans=0.125 +2024-07-28 08:00:16,066 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:00:18,951 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.58 vs. limit=22.5 +2024-07-28 08:00:27,508 INFO [train.py:1114] (1/4) Epoch 10, batch 5300, loss[loss=0.2583, simple_loss=0.342, pruned_loss=0.08731, over 4665.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2872, pruned_loss=0.0573, over 934708.14 frames. ], batch size: 16, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 08:00:40,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=129762.66666666667, ans=0.0 +2024-07-28 08:00:41,346 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.14 vs. limit=22.5 +2024-07-28 08:00:45,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=129762.66666666667, ans=0.0 +2024-07-28 08:00:46,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.26 vs. limit=15.0 +2024-07-28 08:00:46,871 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.19 vs. limit=15.0 +2024-07-28 08:01:00,900 INFO [train.py:1114] (1/4) Epoch 10, batch 5350, loss[loss=0.1567, simple_loss=0.2417, pruned_loss=0.03579, over 4536.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2874, pruned_loss=0.05753, over 936606.72 frames. ], batch size: 10, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 08:01:04,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=129802.66666666667, ans=0.125 +2024-07-28 08:01:05,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=129802.66666666667, ans=0.1 +2024-07-28 08:01:09,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=129816.0, ans=0.125 +2024-07-28 08:01:11,098 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.715e+01 5.483e+01 5.986e+01 6.738e+01 1.016e+02, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 08:01:11,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=129816.0, ans=0.025 +2024-07-28 08:01:11,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=129816.0, ans=0.125 +2024-07-28 08:01:14,102 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:01:15,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=129829.33333333333, ans=0.2 +2024-07-28 08:01:17,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=129829.33333333333, ans=0.0 +2024-07-28 08:01:24,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=129842.66666666667, ans=0.0 +2024-07-28 08:01:31,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=129856.0, ans=0.0 +2024-07-28 08:01:34,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=129856.0, ans=0.0 +2024-07-28 08:01:36,328 INFO [train.py:1114] (1/4) Epoch 10, batch 5400, loss[loss=0.1939, simple_loss=0.2839, pruned_loss=0.05194, over 4345.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2893, pruned_loss=0.0589, over 931866.62 frames. ], batch size: 25, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 08:01:42,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=129869.33333333333, ans=0.025 +2024-07-28 08:01:54,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129896.0, ans=0.1 +2024-07-28 08:02:05,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=129922.66666666667, ans=0.125 +2024-07-28 08:02:07,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=129922.66666666667, ans=0.125 +2024-07-28 08:02:12,883 INFO [train.py:1114] (1/4) Epoch 10, batch 5450, loss[loss=0.2036, simple_loss=0.2737, pruned_loss=0.06678, over 4702.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2885, pruned_loss=0.0582, over 934411.87 frames. ], batch size: 11, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:02:13,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=129936.0, ans=0.09899494936611666 +2024-07-28 08:02:20,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=129936.0, ans=0.125 +2024-07-28 08:02:22,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=129949.33333333333, ans=0.2 +2024-07-28 08:02:24,705 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.820e+01 5.695e+01 6.364e+01 7.750e+01 1.165e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 08:02:25,625 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:02:31,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=129962.66666666667, ans=0.2 +2024-07-28 08:02:33,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=129962.66666666667, ans=0.125 +2024-07-28 08:02:43,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=129989.33333333333, ans=0.125 +2024-07-28 08:02:45,430 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.47 vs. limit=15.0 +2024-07-28 08:02:46,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=129989.33333333333, ans=0.035 +2024-07-28 08:02:48,473 INFO [train.py:1114] (1/4) Epoch 10, batch 5500, loss[loss=0.2179, simple_loss=0.3027, pruned_loss=0.06653, over 4111.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2877, pruned_loss=0.05844, over 931889.65 frames. ], batch size: 25, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:03:11,119 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.38 vs. limit=15.0 +2024-07-28 08:03:15,645 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.53 vs. limit=15.0 +2024-07-28 08:03:22,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=130056.0, ans=0.0 +2024-07-28 08:03:26,089 INFO [train.py:1114] (1/4) Epoch 10, batch 5550, loss[loss=0.1741, simple_loss=0.2586, pruned_loss=0.04476, over 4710.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2876, pruned_loss=0.0583, over 934216.52 frames. ], batch size: 12, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:03:26,561 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.63 vs. limit=12.0 +2024-07-28 08:03:35,926 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 5.857e+01 6.242e+01 7.417e+01 1.070e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 08:03:37,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=130082.66666666667, ans=0.0 +2024-07-28 08:03:41,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=130096.0, ans=0.2 +2024-07-28 08:03:44,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=130096.0, ans=0.125 +2024-07-28 08:03:55,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=130122.66666666667, ans=0.1 +2024-07-28 08:03:59,469 INFO [train.py:1114] (1/4) Epoch 10, batch 5600, loss[loss=0.1758, simple_loss=0.2744, pruned_loss=0.03863, over 4741.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2881, pruned_loss=0.05844, over 934920.02 frames. ], batch size: 14, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:04:05,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=130149.33333333333, ans=0.125 +2024-07-28 08:04:06,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=130149.33333333333, ans=0.125 +2024-07-28 08:04:12,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=130162.66666666667, ans=0.1 +2024-07-28 08:04:24,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=130162.66666666667, ans=0.0 +2024-07-28 08:04:25,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=130162.66666666667, ans=0.125 +2024-07-28 08:04:28,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130176.0, ans=0.1 +2024-07-28 08:04:29,479 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.50 vs. limit=15.0 +2024-07-28 08:04:30,187 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.30 vs. limit=10.0 +2024-07-28 08:04:40,122 INFO [train.py:1114] (1/4) Epoch 10, batch 5650, loss[loss=0.2023, simple_loss=0.2919, pruned_loss=0.05635, over 4511.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2863, pruned_loss=0.05761, over 937204.12 frames. ], batch size: 21, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:04:45,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=130202.66666666667, ans=0.0 +2024-07-28 08:04:46,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.81 vs. limit=15.0 +2024-07-28 08:04:50,450 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.652e+01 5.620e+01 6.091e+01 7.074e+01 1.306e+02, threshold=1.218e+02, percent-clipped=1.0 +2024-07-28 08:04:56,265 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.32 vs. limit=10.0 +2024-07-28 08:05:05,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=130242.66666666667, ans=0.0 +2024-07-28 08:05:13,680 INFO [train.py:1114] (1/4) Epoch 10, batch 5700, loss[loss=0.2049, simple_loss=0.2952, pruned_loss=0.05736, over 4692.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2868, pruned_loss=0.0579, over 938037.39 frames. ], batch size: 13, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:05:14,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=130269.33333333333, ans=0.125 +2024-07-28 08:05:18,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=130269.33333333333, ans=0.1 +2024-07-28 08:05:20,103 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.65 vs. limit=15.0 +2024-07-28 08:05:35,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=130296.0, ans=0.125 +2024-07-28 08:05:40,311 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.68 vs. limit=22.5 +2024-07-28 08:05:45,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=130322.66666666667, ans=0.125 +2024-07-28 08:05:46,582 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.99 vs. limit=22.5 +2024-07-28 08:05:50,245 INFO [train.py:1114] (1/4) Epoch 10, batch 5750, loss[loss=0.2394, simple_loss=0.3187, pruned_loss=0.08006, over 4758.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2872, pruned_loss=0.05782, over 937822.00 frames. ], batch size: 19, lr: 7.49e-03, grad_scale: 32.0 +2024-07-28 08:06:00,018 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.227e+01 5.735e+01 6.185e+01 6.687e+01 9.991e+01, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 08:06:08,745 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.79 vs. limit=12.0 +2024-07-28 08:06:13,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=130376.0, ans=0.125 +2024-07-28 08:06:20,994 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.90 vs. limit=10.0 +2024-07-28 08:06:24,161 INFO [train.py:1114] (1/4) Epoch 10, batch 5800, loss[loss=0.2238, simple_loss=0.3034, pruned_loss=0.07208, over 4696.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.288, pruned_loss=0.05816, over 937267.94 frames. ], batch size: 19, lr: 7.49e-03, grad_scale: 32.0 +2024-07-28 08:06:26,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=130402.66666666667, ans=0.125 +2024-07-28 08:06:40,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=130429.33333333333, ans=0.125 +2024-07-28 08:06:57,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=130456.0, ans=0.125 +2024-07-28 08:06:58,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=130469.33333333333, ans=0.1 +2024-07-28 08:06:59,058 INFO [train.py:1114] (1/4) Epoch 10, batch 5850, loss[loss=0.2698, simple_loss=0.3414, pruned_loss=0.09906, over 4411.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.288, pruned_loss=0.05791, over 937424.34 frames. ], batch size: 21, lr: 7.49e-03, grad_scale: 32.0 +2024-07-28 08:07:09,040 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.958e+01 5.762e+01 6.655e+01 7.927e+01 1.283e+02, threshold=1.331e+02, percent-clipped=2.0 +2024-07-28 08:07:19,601 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.78 vs. limit=15.0 +2024-07-28 08:07:26,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=130509.33333333333, ans=0.125 +2024-07-28 08:07:30,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=130522.66666666667, ans=0.125 +2024-07-28 08:07:34,278 INFO [train.py:1114] (1/4) Epoch 10, batch 5900, loss[loss=0.2423, simple_loss=0.3286, pruned_loss=0.07806, over 4690.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.287, pruned_loss=0.05762, over 937613.23 frames. ], batch size: 15, lr: 7.49e-03, grad_scale: 32.0 +2024-07-28 08:07:45,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=130549.33333333333, ans=0.0 +2024-07-28 08:08:06,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=130589.33333333333, ans=0.2 +2024-07-28 08:08:10,392 INFO [train.py:1114] (1/4) Epoch 10, batch 5950, loss[loss=0.2225, simple_loss=0.3113, pruned_loss=0.06683, over 4691.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2867, pruned_loss=0.05687, over 939686.57 frames. ], batch size: 15, lr: 7.49e-03, grad_scale: 64.0 +2024-07-28 08:08:11,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=130602.66666666667, ans=0.025 +2024-07-28 08:08:18,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=130616.0, ans=0.0 +2024-07-28 08:08:20,239 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.913e+01 5.602e+01 6.081e+01 6.794e+01 9.729e+01, threshold=1.216e+02, percent-clipped=0.0 +2024-07-28 08:08:39,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=130656.0, ans=0.0 +2024-07-28 08:08:45,947 INFO [train.py:1114] (1/4) Epoch 10, batch 6000, loss[loss=0.2517, simple_loss=0.3347, pruned_loss=0.08432, over 4266.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2868, pruned_loss=0.05763, over 936687.76 frames. ], batch size: 25, lr: 7.48e-03, grad_scale: 64.0 +2024-07-28 08:08:45,948 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 08:08:51,925 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.8100, 1.9914, 3.5943, 2.2189], device='cuda:1') +2024-07-28 08:08:52,166 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.2040, 4.5449, 4.6220, 4.7486], device='cuda:1') +2024-07-28 08:08:58,309 INFO [train.py:1146] (1/4) Epoch 10, validation: loss=0.1713, simple_loss=0.2758, pruned_loss=0.03335, over 944034.00 frames. +2024-07-28 08:08:58,310 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 08:09:00,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=130669.33333333333, ans=0.125 +2024-07-28 08:09:11,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=130696.0, ans=0.0 +2024-07-28 08:09:12,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=130696.0, ans=0.0 +2024-07-28 08:09:29,764 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.73 vs. limit=22.5 +2024-07-28 08:09:30,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=130722.66666666667, ans=0.125 +2024-07-28 08:09:32,065 INFO [train.py:1114] (1/4) Epoch 10, batch 6050, loss[loss=0.2192, simple_loss=0.2959, pruned_loss=0.07125, over 4778.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2854, pruned_loss=0.05736, over 938229.70 frames. ], batch size: 12, lr: 7.48e-03, grad_scale: 32.0 +2024-07-28 08:09:34,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=130736.0, ans=0.125 +2024-07-28 08:09:41,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=130749.33333333333, ans=10.0 +2024-07-28 08:09:42,573 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 5.819e+01 6.565e+01 7.638e+01 1.917e+02, threshold=1.313e+02, percent-clipped=1.0 +2024-07-28 08:09:42,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=130749.33333333333, ans=0.015 +2024-07-28 08:09:47,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=130762.66666666667, ans=0.125 +2024-07-28 08:09:59,466 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.46 vs. limit=22.5 +2024-07-28 08:10:00,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=130789.33333333333, ans=0.125 +2024-07-28 08:10:03,888 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.80 vs. limit=15.0 +2024-07-28 08:10:04,891 INFO [train.py:1114] (1/4) Epoch 10, batch 6100, loss[loss=0.2473, simple_loss=0.3411, pruned_loss=0.07679, over 4698.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2846, pruned_loss=0.05683, over 937962.24 frames. ], batch size: 15, lr: 7.48e-03, grad_scale: 32.0 +2024-07-28 08:10:21,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=130829.33333333333, ans=0.2 +2024-07-28 08:10:21,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=130829.33333333333, ans=0.025 +2024-07-28 08:10:21,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=130829.33333333333, ans=0.0 +2024-07-28 08:10:23,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=130829.33333333333, ans=0.0 +2024-07-28 08:10:36,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=130856.0, ans=0.125 +2024-07-28 08:10:38,547 INFO [train.py:1114] (1/4) Epoch 10, batch 6150, loss[loss=0.2631, simple_loss=0.3356, pruned_loss=0.09528, over 3441.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2856, pruned_loss=0.05732, over 936911.32 frames. ], batch size: 35, lr: 7.48e-03, grad_scale: 32.0 +2024-07-28 08:10:40,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=130869.33333333333, ans=0.0 +2024-07-28 08:10:49,586 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+01 5.804e+01 6.352e+01 7.086e+01 1.134e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 08:11:00,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=130909.33333333333, ans=0.09899494936611666 +2024-07-28 08:11:09,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=130922.66666666667, ans=0.125 +2024-07-28 08:11:12,357 INFO [train.py:1114] (1/4) Epoch 10, batch 6200, loss[loss=0.1977, simple_loss=0.3004, pruned_loss=0.04752, over 4735.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2857, pruned_loss=0.05694, over 936300.67 frames. ], batch size: 14, lr: 7.48e-03, grad_scale: 32.0 +2024-07-28 08:11:15,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=130936.0, ans=0.125 +2024-07-28 08:11:20,845 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.12 vs. limit=15.0 +2024-07-28 08:11:21,579 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.81 vs. limit=5.0 +2024-07-28 08:11:35,453 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.43 vs. limit=15.0 +2024-07-28 08:11:43,978 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:11:46,549 INFO [train.py:1114] (1/4) Epoch 10, batch 6250, loss[loss=0.1677, simple_loss=0.2646, pruned_loss=0.03543, over 4816.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2859, pruned_loss=0.05689, over 932940.65 frames. ], batch size: 14, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:11:47,571 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.01 vs. limit=22.5 +2024-07-28 08:11:55,047 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.57 vs. limit=15.0 +2024-07-28 08:11:57,348 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.378e+01 5.979e+01 6.836e+01 8.576e+01 1.211e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-28 08:12:01,152 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.41 vs. limit=22.5 +2024-07-28 08:12:12,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=131042.66666666667, ans=0.125 +2024-07-28 08:12:19,829 INFO [train.py:1114] (1/4) Epoch 10, batch 6300, loss[loss=0.2428, simple_loss=0.3115, pruned_loss=0.08708, over 4529.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2861, pruned_loss=0.05717, over 929423.91 frames. ], batch size: 10, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:12:21,009 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.05 vs. limit=22.5 +2024-07-28 08:12:22,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=131069.33333333333, ans=0.125 +2024-07-28 08:12:23,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=131069.33333333333, ans=0.0 +2024-07-28 08:12:27,957 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=21.77 vs. limit=22.5 +2024-07-28 08:12:30,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131082.66666666666, ans=0.1 +2024-07-28 08:12:30,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131082.66666666666, ans=0.1 +2024-07-28 08:12:38,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131096.0, ans=0.1 +2024-07-28 08:12:40,561 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.30 vs. limit=15.0 +2024-07-28 08:12:45,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=131109.33333333334, ans=0.5 +2024-07-28 08:12:54,183 INFO [train.py:1114] (1/4) Epoch 10, batch 6350, loss[loss=0.1777, simple_loss=0.27, pruned_loss=0.04275, over 4417.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2854, pruned_loss=0.0565, over 933829.35 frames. ], batch size: 21, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:12:56,595 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.56 vs. limit=12.0 +2024-07-28 08:13:07,204 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.906e+01 5.570e+01 6.150e+01 7.348e+01 9.033e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 08:13:10,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=131162.66666666666, ans=0.125 +2024-07-28 08:13:29,576 INFO [train.py:1114] (1/4) Epoch 10, batch 6400, loss[loss=0.1973, simple_loss=0.2848, pruned_loss=0.05489, over 4641.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2858, pruned_loss=0.05684, over 935317.44 frames. ], batch size: 13, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:13:38,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=131216.0, ans=0.125 +2024-07-28 08:14:06,250 INFO [train.py:1114] (1/4) Epoch 10, batch 6450, loss[loss=0.2085, simple_loss=0.3029, pruned_loss=0.05705, over 4479.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2859, pruned_loss=0.05659, over 938862.02 frames. ], batch size: 21, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:14:16,714 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.797e+01 5.892e+01 6.683e+01 7.805e+01 1.062e+02, threshold=1.337e+02, percent-clipped=0.0 +2024-07-28 08:14:18,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131282.66666666666, ans=0.1 +2024-07-28 08:14:20,407 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.70 vs. limit=15.0 +2024-07-28 08:14:21,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=131296.0, ans=0.07 +2024-07-28 08:14:24,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131296.0, ans=0.1 +2024-07-28 08:14:27,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=131309.33333333334, ans=0.125 +2024-07-28 08:14:30,614 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.64 vs. limit=15.0 +2024-07-28 08:14:31,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=131309.33333333334, ans=0.125 +2024-07-28 08:14:38,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=131336.0, ans=0.125 +2024-07-28 08:14:38,895 INFO [train.py:1114] (1/4) Epoch 10, batch 6500, loss[loss=0.2647, simple_loss=0.3336, pruned_loss=0.09784, over 3247.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2856, pruned_loss=0.05625, over 939825.64 frames. ], batch size: 35, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:14:46,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=131349.33333333334, ans=0.125 +2024-07-28 08:14:48,568 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.77 vs. limit=15.0 +2024-07-28 08:14:59,709 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.93 vs. limit=10.0 +2024-07-28 08:14:59,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=131376.0, ans=0.125 +2024-07-28 08:15:01,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=131376.0, ans=0.0 +2024-07-28 08:15:11,953 INFO [train.py:1114] (1/4) Epoch 10, batch 6550, loss[loss=0.1934, simple_loss=0.2653, pruned_loss=0.06077, over 4793.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2858, pruned_loss=0.05619, over 942716.63 frames. ], batch size: 11, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:15:31,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=131416.0, ans=0.09899494936611666 +2024-07-28 08:15:32,283 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.408e+01 5.455e+01 5.898e+01 6.813e+01 1.235e+02, threshold=1.180e+02, percent-clipped=0.0 +2024-07-28 08:15:38,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=131416.0, ans=0.0 +2024-07-28 08:15:43,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=131429.33333333334, ans=0.125 +2024-07-28 08:15:56,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=131456.0, ans=0.125 +2024-07-28 08:16:03,313 INFO [train.py:1114] (1/4) Epoch 10, batch 6600, loss[loss=0.1803, simple_loss=0.2707, pruned_loss=0.04497, over 4924.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2857, pruned_loss=0.05615, over 944593.26 frames. ], batch size: 14, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:16:04,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=131469.33333333334, ans=0.2 +2024-07-28 08:16:14,737 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.40 vs. limit=15.0 +2024-07-28 08:16:34,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=131522.66666666666, ans=0.95 +2024-07-28 08:16:36,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=131522.66666666666, ans=0.2 +2024-07-28 08:16:37,432 INFO [train.py:1114] (1/4) Epoch 10, batch 6650, loss[loss=0.2099, simple_loss=0.2931, pruned_loss=0.06335, over 4634.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2864, pruned_loss=0.05692, over 943240.55 frames. ], batch size: 17, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:16:47,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=131549.33333333334, ans=10.0 +2024-07-28 08:16:48,317 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.943e+01 5.716e+01 6.391e+01 7.041e+01 1.048e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-28 08:16:56,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=131562.66666666666, ans=0.125 +2024-07-28 08:16:56,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=131562.66666666666, ans=0.2 +2024-07-28 08:17:01,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=131576.0, ans=0.0 +2024-07-28 08:17:11,433 INFO [train.py:1114] (1/4) Epoch 10, batch 6700, loss[loss=0.2026, simple_loss=0.2995, pruned_loss=0.0528, over 4681.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2852, pruned_loss=0.05653, over 942406.19 frames. ], batch size: 19, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:17:19,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=131616.0, ans=0.1 +2024-07-28 08:17:22,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=131616.0, ans=0.125 +2024-07-28 08:17:34,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=131629.33333333334, ans=0.025 +2024-07-28 08:17:35,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=131629.33333333334, ans=0.2 +2024-07-28 08:17:38,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131629.33333333334, ans=0.1 +2024-07-28 08:17:47,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=131656.0, ans=0.035 +2024-07-28 08:17:54,050 INFO [train.py:1114] (1/4) Epoch 10, batch 6750, loss[loss=0.2357, simple_loss=0.3075, pruned_loss=0.08199, over 4294.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2852, pruned_loss=0.05684, over 940581.07 frames. ], batch size: 25, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:17:54,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=131669.33333333334, ans=0.2 +2024-07-28 08:17:59,203 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.25 vs. limit=15.0 +2024-07-28 08:18:00,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=131682.66666666666, ans=0.125 +2024-07-28 08:18:04,620 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.612e+01 5.726e+01 6.534e+01 7.091e+01 1.095e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 08:18:04,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=131682.66666666666, ans=0.5 +2024-07-28 08:18:11,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131696.0, ans=0.1 +2024-07-28 08:18:14,999 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.48 vs. limit=22.5 +2024-07-28 08:18:17,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=131709.33333333334, ans=0.09899494936611666 +2024-07-28 08:18:28,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=131736.0, ans=0.5 +2024-07-28 08:18:29,031 INFO [train.py:1114] (1/4) Epoch 10, batch 6800, loss[loss=0.2208, simple_loss=0.3143, pruned_loss=0.06368, over 4646.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.285, pruned_loss=0.05654, over 938582.61 frames. ], batch size: 13, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:18:50,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=131776.0, ans=0.125 +2024-07-28 08:18:51,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=131776.0, ans=0.2 +2024-07-28 08:18:54,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=131776.0, ans=0.125 +2024-07-28 08:19:03,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=131802.66666666666, ans=15.0 +2024-07-28 08:19:03,571 INFO [train.py:1114] (1/4) Epoch 10, batch 6850, loss[loss=0.2299, simple_loss=0.3158, pruned_loss=0.07202, over 4693.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2853, pruned_loss=0.05683, over 940578.01 frames. ], batch size: 13, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:19:03,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=131802.66666666666, ans=0.125 +2024-07-28 08:19:04,694 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.51 vs. limit=15.0 +2024-07-28 08:19:11,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=131816.0, ans=0.125 +2024-07-28 08:19:13,970 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.625e+01 5.736e+01 6.428e+01 7.691e+01 1.005e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 08:19:17,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=131829.33333333334, ans=0.125 +2024-07-28 08:19:35,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131856.0, ans=0.1 +2024-07-28 08:19:38,304 INFO [train.py:1114] (1/4) Epoch 10, batch 6900, loss[loss=0.2115, simple_loss=0.3033, pruned_loss=0.05983, over 4972.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2857, pruned_loss=0.05655, over 943076.23 frames. ], batch size: 13, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:19:46,564 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.37 vs. limit=10.0 +2024-07-28 08:19:51,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=131882.66666666666, ans=0.125 +2024-07-28 08:19:54,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=131896.0, ans=0.125 +2024-07-28 08:19:57,290 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.39 vs. limit=12.0 +2024-07-28 08:20:07,940 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.64 vs. limit=15.0 +2024-07-28 08:20:13,659 INFO [train.py:1114] (1/4) Epoch 10, batch 6950, loss[loss=0.1815, simple_loss=0.2675, pruned_loss=0.04772, over 4504.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.286, pruned_loss=0.05709, over 940476.07 frames. ], batch size: 10, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:20:24,310 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.214e+01 5.671e+01 6.271e+01 7.214e+01 1.195e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 08:20:24,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=131949.33333333334, ans=0.0 +2024-07-28 08:20:24,721 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=10.49 vs. limit=10.0 +2024-07-28 08:20:25,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=131949.33333333334, ans=0.125 +2024-07-28 08:20:46,957 INFO [train.py:1114] (1/4) Epoch 10, batch 7000, loss[loss=0.1783, simple_loss=0.2754, pruned_loss=0.04059, over 4594.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2856, pruned_loss=0.05679, over 938423.35 frames. ], batch size: 17, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:20:53,256 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.55 vs. limit=10.0 +2024-07-28 08:21:07,459 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.71 vs. limit=22.5 +2024-07-28 08:21:19,392 INFO [train.py:1114] (1/4) Epoch 10, batch 7050, loss[loss=0.1907, simple_loss=0.2954, pruned_loss=0.04296, over 4720.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.286, pruned_loss=0.05659, over 941694.25 frames. ], batch size: 19, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:21:20,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=132069.33333333334, ans=0.05 +2024-07-28 08:21:24,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=132069.33333333334, ans=0.125 +2024-07-28 08:21:26,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=132082.66666666666, ans=0.125 +2024-07-28 08:21:30,142 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.631e+01 5.787e+01 6.450e+01 7.707e+01 1.222e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-28 08:21:33,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132096.0, ans=0.1 +2024-07-28 08:21:35,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=132096.0, ans=0.2 +2024-07-28 08:21:37,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=132096.0, ans=0.125 +2024-07-28 08:21:37,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=132096.0, ans=0.0 +2024-07-28 08:21:38,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=132096.0, ans=0.125 +2024-07-28 08:21:50,199 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:21:52,623 INFO [train.py:1114] (1/4) Epoch 10, batch 7100, loss[loss=0.2075, simple_loss=0.3055, pruned_loss=0.05477, over 4782.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2867, pruned_loss=0.05682, over 936711.09 frames. ], batch size: 15, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:21:55,513 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.39 vs. limit=22.5 +2024-07-28 08:21:59,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=132149.33333333334, ans=0.125 +2024-07-28 08:22:02,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=132149.33333333334, ans=0.2 +2024-07-28 08:22:03,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=132149.33333333334, ans=0.0 +2024-07-28 08:22:04,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=132149.33333333334, ans=0.0 +2024-07-28 08:22:13,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=132176.0, ans=0.125 +2024-07-28 08:22:19,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=132189.33333333334, ans=0.0 +2024-07-28 08:22:25,453 INFO [train.py:1114] (1/4) Epoch 10, batch 7150, loss[loss=0.1927, simple_loss=0.2758, pruned_loss=0.05477, over 4454.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2848, pruned_loss=0.05628, over 937702.44 frames. ], batch size: 21, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:22:35,933 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+01 5.833e+01 6.423e+01 7.127e+01 1.033e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 08:22:39,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=132229.33333333334, ans=0.0 +2024-07-28 08:22:43,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=132229.33333333334, ans=0.2 +2024-07-28 08:22:45,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132242.66666666666, ans=0.1 +2024-07-28 08:22:45,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=132242.66666666666, ans=0.0 +2024-07-28 08:22:50,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=132242.66666666666, ans=0.125 +2024-07-28 08:22:58,298 INFO [train.py:1114] (1/4) Epoch 10, batch 7200, loss[loss=0.2477, simple_loss=0.3139, pruned_loss=0.0907, over 4788.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2861, pruned_loss=0.05667, over 937559.69 frames. ], batch size: 15, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:23:17,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=132296.0, ans=0.125 +2024-07-28 08:23:28,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=132309.33333333334, ans=0.0 +2024-07-28 08:23:28,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=132309.33333333334, ans=0.0 +2024-07-28 08:23:37,645 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.26 vs. limit=22.5 +2024-07-28 08:23:39,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=132322.66666666666, ans=0.125 +2024-07-28 08:23:42,261 INFO [train.py:1114] (1/4) Epoch 10, batch 7250, loss[loss=0.1605, simple_loss=0.2383, pruned_loss=0.04136, over 4850.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2847, pruned_loss=0.05606, over 939120.20 frames. ], batch size: 12, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:23:49,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=132349.33333333334, ans=0.05 +2024-07-28 08:23:52,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=132349.33333333334, ans=0.125 +2024-07-28 08:23:52,709 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.426e+01 5.603e+01 6.257e+01 7.383e+01 1.105e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 08:23:57,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=132362.66666666666, ans=0.125 +2024-07-28 08:23:57,897 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.50 vs. limit=15.0 +2024-07-28 08:24:03,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=132376.0, ans=0.125 +2024-07-28 08:24:15,231 INFO [train.py:1114] (1/4) Epoch 10, batch 7300, loss[loss=0.1802, simple_loss=0.2678, pruned_loss=0.04631, over 4850.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2843, pruned_loss=0.05572, over 939428.88 frames. ], batch size: 12, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:24:30,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=132429.33333333334, ans=0.0 +2024-07-28 08:24:37,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=132442.66666666666, ans=0.0 +2024-07-28 08:24:48,300 INFO [train.py:1114] (1/4) Epoch 10, batch 7350, loss[loss=0.1518, simple_loss=0.248, pruned_loss=0.02782, over 4641.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2837, pruned_loss=0.05528, over 938830.32 frames. ], batch size: 12, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:24:49,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=132469.33333333334, ans=0.025 +2024-07-28 08:24:54,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=132482.66666666666, ans=0.1 +2024-07-28 08:24:58,619 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.713e+01 5.561e+01 5.989e+01 6.823e+01 9.799e+01, threshold=1.198e+02, percent-clipped=0.0 +2024-07-28 08:25:09,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=132509.33333333334, ans=0.125 +2024-07-28 08:25:13,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132522.66666666666, ans=0.1 +2024-07-28 08:25:20,770 INFO [train.py:1114] (1/4) Epoch 10, batch 7400, loss[loss=0.1976, simple_loss=0.2975, pruned_loss=0.04887, over 4696.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2831, pruned_loss=0.0547, over 940194.07 frames. ], batch size: 13, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:25:23,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=132536.0, ans=0.04949747468305833 +2024-07-28 08:25:30,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=132549.33333333334, ans=0.0 +2024-07-28 08:25:32,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=132549.33333333334, ans=0.125 +2024-07-28 08:25:37,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=132562.66666666666, ans=0.05 +2024-07-28 08:25:40,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=132576.0, ans=0.0 +2024-07-28 08:25:53,283 INFO [train.py:1114] (1/4) Epoch 10, batch 7450, loss[loss=0.1722, simple_loss=0.2506, pruned_loss=0.04694, over 4619.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2835, pruned_loss=0.05522, over 937488.69 frames. ], batch size: 11, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:25:56,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=132602.66666666666, ans=0.0 +2024-07-28 08:26:01,392 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-07-28 08:26:04,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=132616.0, ans=0.0 +2024-07-28 08:26:05,095 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.415e+01 5.617e+01 6.160e+01 7.093e+01 9.986e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 08:26:16,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=132642.66666666666, ans=0.0 +2024-07-28 08:26:27,360 INFO [train.py:1114] (1/4) Epoch 10, batch 7500, loss[loss=0.3088, simple_loss=0.3793, pruned_loss=0.1191, over 3302.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2848, pruned_loss=0.05595, over 935172.18 frames. ], batch size: 35, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:26:44,886 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:26:46,462 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.31 vs. limit=10.0 +2024-07-28 08:26:48,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=132709.33333333334, ans=0.0 +2024-07-28 08:27:00,936 INFO [train.py:1114] (1/4) Epoch 10, batch 7550, loss[loss=0.1658, simple_loss=0.2602, pruned_loss=0.0357, over 4640.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2859, pruned_loss=0.05671, over 935520.18 frames. ], batch size: 17, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:27:11,155 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.785e+01 5.843e+01 6.500e+01 7.580e+01 1.303e+02, threshold=1.300e+02, percent-clipped=2.0 +2024-07-28 08:27:14,992 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.64 vs. limit=22.5 +2024-07-28 08:27:33,654 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.86 vs. limit=15.0 +2024-07-28 08:27:45,625 INFO [train.py:1114] (1/4) Epoch 10, batch 7600, loss[loss=0.1907, simple_loss=0.2917, pruned_loss=0.04487, over 4811.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2871, pruned_loss=0.05761, over 937243.79 frames. ], batch size: 14, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:27:45,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132802.66666666666, ans=0.1 +2024-07-28 08:27:48,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132802.66666666666, ans=0.1 +2024-07-28 08:28:10,317 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.73 vs. limit=15.0 +2024-07-28 08:28:19,598 INFO [train.py:1114] (1/4) Epoch 10, batch 7650, loss[loss=0.1647, simple_loss=0.2529, pruned_loss=0.03828, over 4938.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2872, pruned_loss=0.05776, over 936233.70 frames. ], batch size: 12, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:28:22,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=132869.33333333334, ans=0.025 +2024-07-28 08:28:23,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=132869.33333333334, ans=0.125 +2024-07-28 08:28:30,218 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.708e+01 5.574e+01 6.113e+01 7.353e+01 1.031e+02, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 08:28:58,469 INFO [train.py:1114] (1/4) Epoch 10, batch 7700, loss[loss=0.1827, simple_loss=0.283, pruned_loss=0.04126, over 4695.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2868, pruned_loss=0.05722, over 933595.16 frames. ], batch size: 13, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:28:59,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=132936.0, ans=0.2 +2024-07-28 08:28:59,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=132936.0, ans=0.2 +2024-07-28 08:29:02,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=132936.0, ans=0.09899494936611666 +2024-07-28 08:29:21,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=132962.66666666666, ans=0.0 +2024-07-28 08:29:30,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=132989.33333333334, ans=0.2 +2024-07-28 08:29:34,238 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.03 vs. limit=12.0 +2024-07-28 08:29:34,886 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.00 vs. limit=10.0 +2024-07-28 08:29:35,842 INFO [train.py:1114] (1/4) Epoch 10, batch 7750, loss[loss=0.175, simple_loss=0.2689, pruned_loss=0.04055, over 4934.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2872, pruned_loss=0.05709, over 935233.65 frames. ], batch size: 14, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:29:38,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=133002.66666666666, ans=0.125 +2024-07-28 08:29:45,957 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.03 vs. limit=15.0 +2024-07-28 08:29:46,087 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 5.644e+01 6.328e+01 7.366e+01 9.654e+01, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 08:29:46,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=133016.0, ans=0.07 +2024-07-28 08:29:50,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=133029.33333333334, ans=0.125 +2024-07-28 08:29:52,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=133029.33333333334, ans=0.125 +2024-07-28 08:29:56,452 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.45 vs. limit=10.0 +2024-07-28 08:30:08,493 INFO [train.py:1114] (1/4) Epoch 10, batch 7800, loss[loss=0.2102, simple_loss=0.2923, pruned_loss=0.06405, over 4665.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2867, pruned_loss=0.05647, over 937105.11 frames. ], batch size: 14, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:30:12,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=133069.33333333334, ans=0.125 +2024-07-28 08:30:14,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=133082.66666666666, ans=0.0 +2024-07-28 08:30:16,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=133082.66666666666, ans=0.2 +2024-07-28 08:30:24,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=133096.0, ans=0.125 +2024-07-28 08:30:29,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=133109.33333333334, ans=0.125 +2024-07-28 08:30:32,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.41 vs. limit=15.0 +2024-07-28 08:30:49,477 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=133122.66666666666, ans=0.125 +2024-07-28 08:30:51,380 INFO [train.py:1114] (1/4) Epoch 10, batch 7850, loss[loss=0.1943, simple_loss=0.2753, pruned_loss=0.05664, over 4554.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2855, pruned_loss=0.05629, over 936653.86 frames. ], batch size: 10, lr: 7.41e-03, grad_scale: 32.0 +2024-07-28 08:31:08,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=133136.0, ans=0.125 +2024-07-28 08:31:13,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=133149.33333333334, ans=0.0 +2024-07-28 08:31:16,880 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.726e+01 6.171e+01 6.913e+01 1.107e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-28 08:31:19,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=133162.66666666666, ans=0.0 +2024-07-28 08:31:21,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133162.66666666666, ans=0.1 +2024-07-28 08:31:24,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=133162.66666666666, ans=0.125 +2024-07-28 08:31:35,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=133189.33333333334, ans=0.125 +2024-07-28 08:31:36,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=133189.33333333334, ans=0.125 +2024-07-28 08:31:38,773 INFO [train.py:1114] (1/4) Epoch 10, batch 7900, loss[loss=0.1895, simple_loss=0.2807, pruned_loss=0.04912, over 4878.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2882, pruned_loss=0.0579, over 933490.02 frames. ], batch size: 14, lr: 7.41e-03, grad_scale: 32.0 +2024-07-28 08:31:55,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=133216.0, ans=0.025 +2024-07-28 08:32:12,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133229.33333333334, ans=0.1 +2024-07-28 08:32:22,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=133256.0, ans=0.125 +2024-07-28 08:32:23,429 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:32:26,511 INFO [train.py:1114] (1/4) Epoch 10, batch 7950, loss[loss=0.2668, simple_loss=0.3281, pruned_loss=0.1028, over 3505.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.288, pruned_loss=0.05786, over 935494.62 frames. ], batch size: 35, lr: 7.41e-03, grad_scale: 32.0 +2024-07-28 08:32:27,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=133269.33333333334, ans=0.0 +2024-07-28 08:32:27,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=133269.33333333334, ans=0.0 +2024-07-28 08:32:28,616 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.82 vs. limit=15.0 +2024-07-28 08:32:37,003 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.684e+01 5.725e+01 6.303e+01 6.935e+01 1.035e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 08:33:00,923 INFO [train.py:1114] (1/4) Epoch 10, batch 8000, loss[loss=0.1586, simple_loss=0.2512, pruned_loss=0.03295, over 4622.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2857, pruned_loss=0.05727, over 935321.77 frames. ], batch size: 11, lr: 7.41e-03, grad_scale: 32.0 +2024-07-28 08:33:09,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=133349.33333333334, ans=0.0 +2024-07-28 08:33:13,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=133362.66666666666, ans=0.1 +2024-07-28 08:33:18,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=133362.66666666666, ans=0.125 +2024-07-28 08:33:19,566 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.97 vs. limit=6.0 +2024-07-28 08:33:20,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=133376.0, ans=0.0 +2024-07-28 08:33:27,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=133389.33333333334, ans=0.2 +2024-07-28 08:33:32,893 INFO [train.py:1114] (1/4) Epoch 10, batch 8050, loss[loss=0.1848, simple_loss=0.2788, pruned_loss=0.04539, over 4812.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.286, pruned_loss=0.0569, over 934877.96 frames. ], batch size: 14, lr: 7.41e-03, grad_scale: 64.0 +2024-07-28 08:33:43,158 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.644e+01 5.682e+01 6.216e+01 7.101e+01 1.040e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 08:33:59,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=133456.0, ans=0.125 +2024-07-28 08:34:06,151 INFO [train.py:1114] (1/4) Epoch 10, batch 8100, loss[loss=0.2, simple_loss=0.3003, pruned_loss=0.04989, over 4798.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2869, pruned_loss=0.05696, over 934224.79 frames. ], batch size: 15, lr: 7.41e-03, grad_scale: 64.0 +2024-07-28 08:34:09,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=133469.33333333334, ans=0.125 +2024-07-28 08:34:10,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=133469.33333333334, ans=0.0 +2024-07-28 08:34:14,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=133482.66666666666, ans=0.0 +2024-07-28 08:34:23,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=133496.0, ans=0.0 +2024-07-28 08:34:30,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=133509.33333333334, ans=0.125 +2024-07-28 08:34:38,560 INFO [train.py:1114] (1/4) Epoch 10, batch 8150, loss[loss=0.1688, simple_loss=0.2619, pruned_loss=0.03784, over 4793.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2858, pruned_loss=0.05667, over 937426.20 frames. ], batch size: 15, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:34:48,785 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.759e+01 5.775e+01 6.372e+01 7.050e+01 1.046e+02, threshold=1.274e+02, percent-clipped=0.0 +2024-07-28 08:34:57,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=133576.0, ans=0.125 +2024-07-28 08:34:59,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=133576.0, ans=0.0 +2024-07-28 08:35:00,095 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.68 vs. limit=22.5 +2024-07-28 08:35:04,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=133589.33333333334, ans=0.125 +2024-07-28 08:35:06,642 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.14 vs. limit=22.5 +2024-07-28 08:35:11,083 INFO [train.py:1114] (1/4) Epoch 10, batch 8200, loss[loss=0.198, simple_loss=0.2771, pruned_loss=0.05945, over 4803.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2851, pruned_loss=0.0557, over 938770.55 frames. ], batch size: 15, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:35:16,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=133616.0, ans=0.09899494936611666 +2024-07-28 08:35:20,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=133616.0, ans=0.1 +2024-07-28 08:35:25,543 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.27 vs. limit=12.0 +2024-07-28 08:35:30,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=133642.66666666666, ans=0.1 +2024-07-28 08:35:36,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=133642.66666666666, ans=0.125 +2024-07-28 08:35:42,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=133656.0, ans=0.0 +2024-07-28 08:35:53,140 INFO [train.py:1114] (1/4) Epoch 10, batch 8250, loss[loss=0.1888, simple_loss=0.2733, pruned_loss=0.05215, over 4892.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2852, pruned_loss=0.05566, over 939119.82 frames. ], batch size: 13, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:36:10,113 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.694e+01 6.323e+01 7.329e+01 1.024e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-28 08:36:11,841 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.06 vs. limit=15.0 +2024-07-28 08:36:26,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=133722.66666666666, ans=0.0 +2024-07-28 08:36:30,629 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:36:32,392 INFO [train.py:1114] (1/4) Epoch 10, batch 8300, loss[loss=0.2364, simple_loss=0.321, pruned_loss=0.0759, over 4897.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2873, pruned_loss=0.05673, over 938932.51 frames. ], batch size: 15, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:36:33,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=133736.0, ans=0.0 +2024-07-28 08:36:38,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=133749.33333333334, ans=0.125 +2024-07-28 08:36:39,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133749.33333333334, ans=0.1 +2024-07-28 08:36:42,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=133749.33333333334, ans=0.95 +2024-07-28 08:36:55,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=133776.0, ans=0.0 +2024-07-28 08:37:05,223 INFO [train.py:1114] (1/4) Epoch 10, batch 8350, loss[loss=0.2488, simple_loss=0.3212, pruned_loss=0.08823, over 4795.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2871, pruned_loss=0.05651, over 941497.69 frames. ], batch size: 15, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:37:05,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=133802.66666666666, ans=0.125 +2024-07-28 08:37:11,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=133802.66666666666, ans=0.125 +2024-07-28 08:37:17,104 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.840e+01 5.721e+01 6.167e+01 6.839e+01 1.069e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 08:37:18,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=133816.0, ans=0.0 +2024-07-28 08:37:19,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133829.33333333334, ans=0.1 +2024-07-28 08:37:24,746 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.84 vs. limit=15.0 +2024-07-28 08:37:33,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=133856.0, ans=0.125 +2024-07-28 08:37:44,279 INFO [train.py:1114] (1/4) Epoch 10, batch 8400, loss[loss=0.1999, simple_loss=0.2684, pruned_loss=0.06565, over 4775.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2866, pruned_loss=0.05639, over 939876.44 frames. ], batch size: 12, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:37:52,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=133882.66666666666, ans=0.0 +2024-07-28 08:37:52,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=133882.66666666666, ans=0.125 +2024-07-28 08:37:57,410 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.10 vs. limit=12.0 +2024-07-28 08:38:15,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=133922.66666666666, ans=0.125 +2024-07-28 08:38:18,210 INFO [train.py:1114] (1/4) Epoch 10, batch 8450, loss[loss=0.2033, simple_loss=0.2949, pruned_loss=0.05581, over 4795.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2868, pruned_loss=0.05607, over 938394.68 frames. ], batch size: 15, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:38:28,300 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.430e+01 5.805e+01 6.479e+01 7.666e+01 1.044e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 08:38:31,817 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.70 vs. limit=15.0 +2024-07-28 08:39:02,046 INFO [train.py:1114] (1/4) Epoch 10, batch 8500, loss[loss=0.2038, simple_loss=0.282, pruned_loss=0.06279, over 4602.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2865, pruned_loss=0.05636, over 938297.33 frames. ], batch size: 11, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:40:18,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=134056.0, ans=0.0 +2024-07-28 08:40:19,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=134056.0, ans=0.0 +2024-07-28 08:40:19,533 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.85 vs. limit=22.5 +2024-07-28 08:40:24,355 INFO [train.py:1114] (1/4) Epoch 10, batch 8550, loss[loss=0.1793, simple_loss=0.2573, pruned_loss=0.05064, over 4821.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.286, pruned_loss=0.05638, over 939227.91 frames. ], batch size: 11, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:40:25,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=134069.33333333334, ans=0.0 +2024-07-28 08:40:44,213 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.526e+01 5.946e+01 6.615e+01 7.789e+01 1.197e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-28 08:40:52,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=134096.0, ans=0.2 +2024-07-28 08:41:10,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=134122.66666666666, ans=0.125 +2024-07-28 08:41:12,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.31 vs. limit=22.5 +2024-07-28 08:41:14,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=134122.66666666666, ans=0.07 +2024-07-28 08:41:18,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=134136.0, ans=0.2 +2024-07-28 08:41:19,450 INFO [train.py:1114] (1/4) Epoch 10, batch 8600, loss[loss=0.1979, simple_loss=0.2901, pruned_loss=0.05282, over 4799.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2865, pruned_loss=0.05693, over 938992.59 frames. ], batch size: 15, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:41:19,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=134136.0, ans=0.2 +2024-07-28 08:41:34,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=134162.66666666666, ans=0.0 +2024-07-28 08:41:42,047 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.28 vs. limit=22.5 +2024-07-28 08:41:56,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=134189.33333333334, ans=0.125 +2024-07-28 08:41:59,312 INFO [train.py:1114] (1/4) Epoch 10, batch 8650, loss[loss=0.1971, simple_loss=0.2926, pruned_loss=0.05085, over 4900.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2862, pruned_loss=0.05649, over 940377.87 frames. ], batch size: 15, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:42:01,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134202.66666666666, ans=0.1 +2024-07-28 08:42:05,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=134216.0, ans=0.95 +2024-07-28 08:42:09,608 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.967e+01 5.912e+01 6.591e+01 7.425e+01 1.041e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-28 08:42:14,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=134229.33333333334, ans=0.125 +2024-07-28 08:42:39,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=134256.0, ans=0.2 +2024-07-28 08:42:42,710 INFO [train.py:1114] (1/4) Epoch 10, batch 8700, loss[loss=0.176, simple_loss=0.2819, pruned_loss=0.03501, over 4764.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2872, pruned_loss=0.05691, over 938046.03 frames. ], batch size: 13, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:42:45,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=134269.33333333334, ans=0.0 +2024-07-28 08:42:53,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=134282.66666666666, ans=0.125 +2024-07-28 08:43:06,850 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.49 vs. limit=10.0 +2024-07-28 08:43:09,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=134322.66666666666, ans=0.125 +2024-07-28 08:43:14,601 INFO [train.py:1114] (1/4) Epoch 10, batch 8750, loss[loss=0.2351, simple_loss=0.3206, pruned_loss=0.0748, over 4698.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2868, pruned_loss=0.05691, over 936063.01 frames. ], batch size: 15, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:43:23,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=134349.33333333334, ans=0.125 +2024-07-28 08:43:24,870 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.932e+01 5.734e+01 6.452e+01 7.346e+01 1.067e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-28 08:43:26,473 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.22 vs. limit=15.0 +2024-07-28 08:43:28,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=134362.66666666666, ans=0.125 +2024-07-28 08:43:46,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134389.33333333334, ans=0.1 +2024-07-28 08:43:48,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=134402.66666666666, ans=0.125 +2024-07-28 08:43:48,696 INFO [train.py:1114] (1/4) Epoch 10, batch 8800, loss[loss=0.1932, simple_loss=0.2823, pruned_loss=0.0521, over 4926.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2881, pruned_loss=0.05747, over 936887.16 frames. ], batch size: 14, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:43:51,910 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:43:52,777 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.55 vs. limit=6.0 +2024-07-28 08:44:09,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=134442.66666666666, ans=0.125 +2024-07-28 08:44:17,929 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.11 vs. limit=15.0 +2024-07-28 08:44:21,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=134469.33333333334, ans=0.2 +2024-07-28 08:44:22,024 INFO [train.py:1114] (1/4) Epoch 10, batch 8850, loss[loss=0.2306, simple_loss=0.3141, pruned_loss=0.07358, over 4513.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2876, pruned_loss=0.05728, over 932046.37 frames. ], batch size: 21, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:44:27,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=134469.33333333334, ans=0.125 +2024-07-28 08:44:34,668 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.302e+01 5.576e+01 6.226e+01 7.150e+01 1.100e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 08:44:47,528 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.69 vs. limit=15.0 +2024-07-28 08:44:47,545 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.78 vs. limit=22.5 +2024-07-28 08:44:56,772 INFO [train.py:1114] (1/4) Epoch 10, batch 8900, loss[loss=0.1874, simple_loss=0.2741, pruned_loss=0.0504, over 4937.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2875, pruned_loss=0.05682, over 930007.83 frames. ], batch size: 12, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:44:58,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.92 vs. limit=10.0 +2024-07-28 08:45:10,071 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.40 vs. limit=6.0 +2024-07-28 08:45:19,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_na.min_abs, batch_count=134562.66666666666, ans=0.02 +2024-07-28 08:45:40,078 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:45:43,819 INFO [train.py:1114] (1/4) Epoch 10, batch 8950, loss[loss=0.2186, simple_loss=0.3021, pruned_loss=0.06755, over 4542.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2868, pruned_loss=0.05664, over 930541.49 frames. ], batch size: 21, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:45:48,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=134602.66666666666, ans=0.0 +2024-07-28 08:45:53,854 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.674e+01 5.627e+01 6.283e+01 7.444e+01 1.084e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 08:46:01,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=134629.33333333334, ans=0.2 +2024-07-28 08:46:03,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=134642.66666666666, ans=0.125 +2024-07-28 08:46:15,697 INFO [train.py:1114] (1/4) Epoch 10, batch 9000, loss[loss=0.1943, simple_loss=0.2852, pruned_loss=0.05175, over 4636.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2853, pruned_loss=0.05573, over 933161.42 frames. ], batch size: 12, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:46:15,698 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 08:46:25,443 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.1685, 3.3860, 4.1148, 3.1323], device='cuda:1') +2024-07-28 08:46:28,323 INFO [train.py:1146] (1/4) Epoch 10, validation: loss=0.1719, simple_loss=0.2766, pruned_loss=0.0336, over 944034.00 frames. +2024-07-28 08:46:28,323 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 08:46:43,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=134696.0, ans=0.125 +2024-07-28 08:46:50,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=134709.33333333334, ans=0.025 +2024-07-28 08:46:55,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=134722.66666666666, ans=0.2 +2024-07-28 08:46:57,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=134722.66666666666, ans=0.125 +2024-07-28 08:47:00,432 INFO [train.py:1114] (1/4) Epoch 10, batch 9050, loss[loss=0.1821, simple_loss=0.261, pruned_loss=0.05158, over 4510.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2847, pruned_loss=0.05573, over 933836.94 frames. ], batch size: 10, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:47:04,432 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.16 vs. limit=10.0 +2024-07-28 08:47:06,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=134749.33333333334, ans=0.0 +2024-07-28 08:47:10,430 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.777e+01 6.275e+01 7.546e+01 8.998e+01 1.332e+02, threshold=1.509e+02, percent-clipped=1.0 +2024-07-28 08:47:20,442 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.11 vs. limit=15.0 +2024-07-28 08:47:22,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=134776.0, ans=0.125 +2024-07-28 08:47:24,026 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:47:28,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=134789.33333333334, ans=0.05 +2024-07-28 08:47:32,972 INFO [train.py:1114] (1/4) Epoch 10, batch 9100, loss[loss=0.1907, simple_loss=0.2886, pruned_loss=0.04633, over 4936.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.285, pruned_loss=0.05609, over 936494.69 frames. ], batch size: 14, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:47:51,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=134829.33333333334, ans=0.125 +2024-07-28 08:48:01,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=134856.0, ans=0.125 +2024-07-28 08:48:04,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=134856.0, ans=0.2 +2024-07-28 08:48:06,625 INFO [train.py:1114] (1/4) Epoch 10, batch 9150, loss[loss=0.2077, simple_loss=0.2864, pruned_loss=0.06449, over 4812.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.287, pruned_loss=0.05712, over 935788.27 frames. ], batch size: 14, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:48:06,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=134869.33333333334, ans=0.125 +2024-07-28 08:48:14,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134882.66666666666, ans=0.1 +2024-07-28 08:48:15,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=134882.66666666666, ans=0.125 +2024-07-28 08:48:16,565 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.926e+01 6.660e+01 7.545e+01 1.146e+02, threshold=1.332e+02, percent-clipped=0.0 +2024-07-28 08:48:17,451 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.40 vs. limit=12.0 +2024-07-28 08:48:19,936 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.20 vs. limit=15.0 +2024-07-28 08:48:31,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=134909.33333333334, ans=0.125 +2024-07-28 08:48:38,470 INFO [train.py:1114] (1/4) Epoch 10, batch 9200, loss[loss=0.1884, simple_loss=0.2713, pruned_loss=0.0528, over 4858.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2862, pruned_loss=0.0567, over 937505.92 frames. ], batch size: 12, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:48:38,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=134936.0, ans=0.2 +2024-07-28 08:48:39,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=134936.0, ans=0.2 +2024-07-28 08:48:49,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=134949.33333333334, ans=0.125 +2024-07-28 08:48:56,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=134962.66666666666, ans=0.04949747468305833 +2024-07-28 08:48:57,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=134976.0, ans=0.125 +2024-07-28 08:48:58,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=134976.0, ans=0.125 +2024-07-28 08:49:07,984 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.31 vs. limit=22.5 +2024-07-28 08:49:11,330 INFO [train.py:1114] (1/4) Epoch 10, batch 9250, loss[loss=0.2288, simple_loss=0.2994, pruned_loss=0.07909, over 4631.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2871, pruned_loss=0.05745, over 938167.52 frames. ], batch size: 13, lr: 7.36e-03, grad_scale: 64.0 +2024-07-28 08:49:11,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=135002.66666666666, ans=0.0 +2024-07-28 08:49:19,859 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.61 vs. limit=6.0 +2024-07-28 08:49:20,979 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-07-28 08:49:21,508 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.514e+01 5.719e+01 6.236e+01 6.936e+01 9.849e+01, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 08:49:29,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=135029.33333333334, ans=0.05 +2024-07-28 08:49:46,099 INFO [train.py:1114] (1/4) Epoch 10, batch 9300, loss[loss=0.1643, simple_loss=0.2576, pruned_loss=0.03545, over 4786.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2872, pruned_loss=0.05713, over 937900.64 frames. ], batch size: 12, lr: 7.36e-03, grad_scale: 64.0 +2024-07-28 08:49:53,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=135082.66666666666, ans=0.125 +2024-07-28 08:49:59,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=135096.0, ans=0.125 +2024-07-28 08:50:00,674 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.34 vs. limit=12.0 +2024-07-28 08:50:13,258 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.24 vs. limit=12.0 +2024-07-28 08:50:14,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=135122.66666666666, ans=0.0 +2024-07-28 08:50:14,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=135122.66666666666, ans=0.04949747468305833 +2024-07-28 08:50:17,994 INFO [train.py:1114] (1/4) Epoch 10, batch 9350, loss[loss=0.187, simple_loss=0.2748, pruned_loss=0.0496, over 4798.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2867, pruned_loss=0.05696, over 935630.46 frames. ], batch size: 11, lr: 7.36e-03, grad_scale: 64.0 +2024-07-28 08:50:18,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=135136.0, ans=0.125 +2024-07-28 08:50:29,375 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:50:34,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=135149.33333333334, ans=0.5 +2024-07-28 08:50:36,248 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.595e+01 5.628e+01 6.269e+01 7.143e+01 1.097e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 08:50:46,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=135176.0, ans=0.0 +2024-07-28 08:50:47,776 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.24 vs. limit=15.0 +2024-07-28 08:50:48,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=135176.0, ans=0.2 +2024-07-28 08:50:50,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=135176.0, ans=0.1 +2024-07-28 08:50:54,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=135189.33333333334, ans=0.0 +2024-07-28 08:50:58,072 INFO [train.py:1114] (1/4) Epoch 10, batch 9400, loss[loss=0.256, simple_loss=0.3365, pruned_loss=0.08774, over 4694.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2879, pruned_loss=0.05761, over 932954.50 frames. ], batch size: 13, lr: 7.36e-03, grad_scale: 64.0 +2024-07-28 08:51:26,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=135256.0, ans=0.0 +2024-07-28 08:51:29,448 INFO [train.py:1114] (1/4) Epoch 10, batch 9450, loss[loss=0.2218, simple_loss=0.2902, pruned_loss=0.07671, over 4817.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2876, pruned_loss=0.0572, over 932294.07 frames. ], batch size: 11, lr: 7.36e-03, grad_scale: 32.0 +2024-07-28 08:51:38,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=135282.66666666666, ans=0.025 +2024-07-28 08:51:39,983 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.423e+01 5.630e+01 6.223e+01 7.000e+01 1.011e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 08:51:47,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=135296.0, ans=0.0 +2024-07-28 08:51:47,945 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.98 vs. limit=6.0 +2024-07-28 08:51:55,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=135322.66666666666, ans=0.125 +2024-07-28 08:51:55,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=135322.66666666666, ans=0.125 +2024-07-28 08:52:01,557 INFO [train.py:1114] (1/4) Epoch 10, batch 9500, loss[loss=0.2043, simple_loss=0.2897, pruned_loss=0.05944, over 4696.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.287, pruned_loss=0.05699, over 934599.07 frames. ], batch size: 12, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:52:07,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=135349.33333333334, ans=0.1 +2024-07-28 08:52:07,907 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:52:08,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=135349.33333333334, ans=0.0 +2024-07-28 08:52:12,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=135349.33333333334, ans=0.0 +2024-07-28 08:52:16,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=135362.66666666666, ans=0.1 +2024-07-28 08:52:16,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=135362.66666666666, ans=0.0 +2024-07-28 08:52:16,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=135362.66666666666, ans=0.09899494936611666 +2024-07-28 08:52:19,926 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-07-28 08:52:25,668 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.65 vs. limit=12.0 +2024-07-28 08:52:31,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=135389.33333333334, ans=0.2 +2024-07-28 08:52:31,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=135389.33333333334, ans=0.125 +2024-07-28 08:52:34,229 INFO [train.py:1114] (1/4) Epoch 10, batch 9550, loss[loss=0.187, simple_loss=0.2701, pruned_loss=0.05199, over 4772.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2872, pruned_loss=0.05769, over 931702.16 frames. ], batch size: 12, lr: 7.35e-03, grad_scale: 16.0 +2024-07-28 08:52:42,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=135416.0, ans=0.5 +2024-07-28 08:52:48,486 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.659e+01 6.121e+01 6.852e+01 1.035e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 08:52:50,141 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.63 vs. limit=15.0 +2024-07-28 08:52:51,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=135429.33333333334, ans=0.1 +2024-07-28 08:52:51,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=135429.33333333334, ans=0.2 +2024-07-28 08:52:53,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=135429.33333333334, ans=0.125 +2024-07-28 08:52:53,130 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.63 vs. limit=15.0 +2024-07-28 08:52:54,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=135429.33333333334, ans=0.2 +2024-07-28 08:52:56,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=135442.66666666666, ans=10.0 +2024-07-28 08:52:59,053 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.72 vs. limit=15.0 +2024-07-28 08:53:06,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=135456.0, ans=10.0 +2024-07-28 08:53:07,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=135456.0, ans=0.0 +2024-07-28 08:53:09,497 INFO [train.py:1114] (1/4) Epoch 10, batch 9600, loss[loss=0.2718, simple_loss=0.3406, pruned_loss=0.1015, over 3596.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2866, pruned_loss=0.05714, over 930852.49 frames. ], batch size: 36, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:53:10,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=135469.33333333334, ans=0.125 +2024-07-28 08:53:15,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=135469.33333333334, ans=0.125 +2024-07-28 08:53:16,353 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.06 vs. limit=12.0 +2024-07-28 08:53:19,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=135482.66666666666, ans=0.125 +2024-07-28 08:53:23,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=135482.66666666666, ans=0.125 +2024-07-28 08:53:26,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=135496.0, ans=0.0 +2024-07-28 08:53:28,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=135496.0, ans=0.025 +2024-07-28 08:53:30,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=135509.33333333334, ans=0.125 +2024-07-28 08:53:39,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=135522.66666666666, ans=0.125 +2024-07-28 08:53:40,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=135522.66666666666, ans=0.0 +2024-07-28 08:53:43,548 INFO [train.py:1114] (1/4) Epoch 10, batch 9650, loss[loss=0.2286, simple_loss=0.3113, pruned_loss=0.07293, over 4818.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2871, pruned_loss=0.05752, over 926771.35 frames. ], batch size: 16, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:53:49,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=135549.33333333334, ans=0.125 +2024-07-28 08:53:54,684 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.653e+01 6.117e+01 7.383e+01 9.422e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 08:54:00,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=135562.66666666666, ans=0.0 +2024-07-28 08:54:03,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=135576.0, ans=0.05 +2024-07-28 08:54:07,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=135576.0, ans=0.2 +2024-07-28 08:54:14,780 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.78 vs. limit=15.0 +2024-07-28 08:54:15,217 INFO [train.py:1114] (1/4) Epoch 10, batch 9700, loss[loss=0.1912, simple_loss=0.2893, pruned_loss=0.04652, over 4224.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2868, pruned_loss=0.05696, over 925275.85 frames. ], batch size: 25, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:54:37,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=135642.66666666666, ans=0.125 +2024-07-28 08:54:51,974 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:54:54,873 INFO [train.py:1114] (1/4) Epoch 10, batch 9750, loss[loss=0.2296, simple_loss=0.3224, pruned_loss=0.06838, over 4706.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2861, pruned_loss=0.0569, over 925597.89 frames. ], batch size: 15, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:55:00,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=135682.66666666666, ans=0.125 +2024-07-28 08:55:00,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=135682.66666666666, ans=0.025 +2024-07-28 08:55:01,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=135682.66666666666, ans=0.0 +2024-07-28 08:55:01,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=135682.66666666666, ans=0.0 +2024-07-28 08:55:09,164 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.808e+01 6.506e+01 7.716e+01 1.140e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 08:55:15,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=135696.0, ans=0.2 +2024-07-28 08:55:21,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=135709.33333333334, ans=0.1 +2024-07-28 08:55:28,967 INFO [train.py:1114] (1/4) Epoch 10, batch 9800, loss[loss=0.1943, simple_loss=0.2874, pruned_loss=0.05063, over 4709.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2857, pruned_loss=0.05673, over 925014.48 frames. ], batch size: 12, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:55:39,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=135749.33333333334, ans=0.1 +2024-07-28 08:55:43,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=135762.66666666666, ans=0.0 +2024-07-28 08:55:49,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=135776.0, ans=0.0 +2024-07-28 08:56:01,762 INFO [train.py:1114] (1/4) Epoch 10, batch 9850, loss[loss=0.1872, simple_loss=0.2758, pruned_loss=0.04931, over 4905.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2863, pruned_loss=0.05671, over 927275.97 frames. ], batch size: 15, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:56:10,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=135816.0, ans=0.125 +2024-07-28 08:56:14,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=135816.0, ans=0.0 +2024-07-28 08:56:14,542 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.854e+01 5.916e+01 6.813e+01 8.007e+01 1.183e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-28 08:56:18,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=135829.33333333334, ans=0.2 +2024-07-28 08:56:34,474 INFO [train.py:1114] (1/4) Epoch 10, batch 9900, loss[loss=0.216, simple_loss=0.3012, pruned_loss=0.06543, over 4837.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2874, pruned_loss=0.05719, over 926768.76 frames. ], batch size: 16, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:56:51,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=135896.0, ans=0.125 +2024-07-28 08:57:05,750 INFO [train.py:1114] (1/4) Epoch 10, batch 9950, loss[loss=0.1798, simple_loss=0.2587, pruned_loss=0.05048, over 4816.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2878, pruned_loss=0.05772, over 928918.84 frames. ], batch size: 11, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:57:17,158 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.632e+01 6.153e+01 7.007e+01 8.060e+01 1.036e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-28 08:57:28,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=135976.0, ans=0.125 +2024-07-28 08:57:28,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=135976.0, ans=0.125 +2024-07-28 08:57:29,288 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.24 vs. limit=6.0 +2024-07-28 08:57:31,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=135989.33333333334, ans=0.125 +2024-07-28 08:57:35,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=135989.33333333334, ans=0.5 +2024-07-28 08:57:37,846 INFO [train.py:1114] (1/4) Epoch 10, batch 10000, loss[loss=0.2073, simple_loss=0.2973, pruned_loss=0.05867, over 4610.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2913, pruned_loss=0.05926, over 926543.63 frames. ], batch size: 16, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:58:02,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=136056.0, ans=0.125 +2024-07-28 08:58:07,411 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.73 vs. limit=15.0 +2024-07-28 08:58:09,776 INFO [train.py:1114] (1/4) Epoch 10, batch 10050, loss[loss=0.2126, simple_loss=0.2813, pruned_loss=0.07195, over 3232.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2942, pruned_loss=0.0611, over 914295.14 frames. ], batch size: 35, lr: 7.33e-03, grad_scale: 32.0 +2024-07-28 08:58:22,175 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.772e+01 6.455e+01 7.428e+01 1.276e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-28 08:58:24,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=136096.0, ans=0.025 +2024-07-28 08:58:37,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=136122.66666666666, ans=0.125 +2024-07-28 08:58:40,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136122.66666666666, ans=0.1 +2024-07-28 08:58:43,270 INFO [train.py:1114] (1/4) Epoch 10, batch 10100, loss[loss=0.283, simple_loss=0.3368, pruned_loss=0.1146, over 3205.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2994, pruned_loss=0.06674, over 862104.16 frames. ], batch size: 35, lr: 7.33e-03, grad_scale: 32.0 +2024-07-28 08:58:46,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=136136.0, ans=0.0 +2024-07-28 08:58:46,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136136.0, ans=0.1 +2024-07-28 08:58:46,984 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.31 vs. limit=15.0 +2024-07-28 08:58:51,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=136149.33333333334, ans=0.125 +2024-07-28 08:58:52,081 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.93 vs. limit=15.0 +2024-07-28 08:59:06,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=136176.0, ans=0.125 +2024-07-28 08:59:13,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=136189.33333333334, ans=0.0 +2024-07-28 08:59:15,617 INFO [train.py:1114] (1/4) Epoch 10, batch 10150, loss[loss=0.2372, simple_loss=0.3141, pruned_loss=0.08016, over 3436.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3044, pruned_loss=0.0723, over 820480.12 frames. ], batch size: 35, lr: 7.33e-03, grad_scale: 32.0 +2024-07-28 08:59:22,373 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=10.00 vs. limit=10.0 +2024-07-28 08:59:27,022 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.570e+01 6.618e+01 7.074e+01 7.488e+01 9.490e+01, threshold=1.415e+02, percent-clipped=0.0 +2024-07-28 08:59:31,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=136229.33333333334, ans=0.125 +2024-07-28 08:59:41,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=136256.0, ans=0.04949747468305833 +2024-07-28 08:59:42,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=136256.0, ans=0.0 +2024-07-28 08:59:43,915 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=7.64 vs. limit=12.0 +2024-07-28 08:59:46,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=136256.0, ans=0.125 +2024-07-28 08:59:47,608 INFO [train.py:1114] (1/4) Epoch 10, batch 10200, loss[loss=0.3111, simple_loss=0.3687, pruned_loss=0.1268, over 3264.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3089, pruned_loss=0.07664, over 789006.76 frames. ], batch size: 35, lr: 7.33e-03, grad_scale: 32.0 +2024-07-28 09:00:46,198 INFO [train.py:1114] (1/4) Epoch 11, batch 0, loss[loss=0.1608, simple_loss=0.2499, pruned_loss=0.03586, over 4857.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2499, pruned_loss=0.03586, over 4857.00 frames. ], batch size: 12, lr: 7.00e-03, grad_scale: 32.0 +2024-07-28 09:00:46,198 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 09:00:54,438 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([3.5125, 3.8424, 4.1863, 4.6751], device='cuda:1') +2024-07-28 09:00:57,970 INFO [train.py:1146] (1/4) Epoch 11, validation: loss=0.1737, simple_loss=0.279, pruned_loss=0.03421, over 944034.00 frames. +2024-07-28 09:00:57,971 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 09:01:03,866 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.55 vs. limit=15.0 +2024-07-28 09:01:11,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=136325.33333333334, ans=0.025 +2024-07-28 09:01:12,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=136325.33333333334, ans=0.125 +2024-07-28 09:01:15,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136325.33333333334, ans=0.1 +2024-07-28 09:01:20,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=136338.66666666666, ans=0.1 +2024-07-28 09:01:21,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=136338.66666666666, ans=0.125 +2024-07-28 09:01:24,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=136338.66666666666, ans=0.0 +2024-07-28 09:01:25,684 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.20 vs. limit=22.5 +2024-07-28 09:01:29,220 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.650e+01 6.307e+01 6.804e+01 7.464e+01 1.172e+02, threshold=1.361e+02, percent-clipped=0.0 +2024-07-28 09:01:32,018 INFO [train.py:1114] (1/4) Epoch 11, batch 50, loss[loss=0.1829, simple_loss=0.2675, pruned_loss=0.04909, over 4605.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2906, pruned_loss=0.05805, over 205679.03 frames. ], batch size: 11, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:01:32,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=136365.33333333334, ans=22.5 +2024-07-28 09:01:34,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=136365.33333333334, ans=0.0 +2024-07-28 09:01:37,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=136365.33333333334, ans=0.05 +2024-07-28 09:01:41,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136378.66666666666, ans=0.1 +2024-07-28 09:01:48,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=136392.0, ans=0.09899494936611666 +2024-07-28 09:01:50,390 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.83 vs. limit=15.0 +2024-07-28 09:01:56,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=136405.33333333334, ans=0.05 +2024-07-28 09:01:56,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=136405.33333333334, ans=0.0 +2024-07-28 09:02:05,895 INFO [train.py:1114] (1/4) Epoch 11, batch 100, loss[loss=0.1835, simple_loss=0.2689, pruned_loss=0.04905, over 4635.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2884, pruned_loss=0.05652, over 365225.89 frames. ], batch size: 12, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:02:11,087 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.43 vs. limit=22.5 +2024-07-28 09:02:13,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=136445.33333333334, ans=0.125 +2024-07-28 09:02:14,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=136445.33333333334, ans=0.07 +2024-07-28 09:02:25,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=136458.66666666666, ans=0.125 +2024-07-28 09:02:26,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=136472.0, ans=0.0 +2024-07-28 09:02:36,700 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.385e+01 5.958e+01 6.972e+01 1.024e+02, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 09:02:39,365 INFO [train.py:1114] (1/4) Epoch 11, batch 150, loss[loss=0.1806, simple_loss=0.2699, pruned_loss=0.0457, over 4619.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2856, pruned_loss=0.05505, over 494091.33 frames. ], batch size: 11, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:02:40,207 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:02:40,553 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.76 vs. limit=15.0 +2024-07-28 09:02:45,614 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.82 vs. limit=15.0 +2024-07-28 09:02:46,251 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.51 vs. limit=10.0 +2024-07-28 09:02:52,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=136525.33333333334, ans=0.0 +2024-07-28 09:02:55,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=136525.33333333334, ans=0.025 +2024-07-28 09:03:03,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=136538.66666666666, ans=0.025 +2024-07-28 09:03:03,381 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.90 vs. limit=12.0 +2024-07-28 09:03:07,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=136552.0, ans=0.0 +2024-07-28 09:03:14,116 INFO [train.py:1114] (1/4) Epoch 11, batch 200, loss[loss=0.2315, simple_loss=0.3256, pruned_loss=0.06868, over 4438.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2875, pruned_loss=0.05619, over 593536.07 frames. ], batch size: 21, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:03:15,857 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.40 vs. limit=15.0 +2024-07-28 09:03:19,715 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.19 vs. limit=22.5 +2024-07-28 09:03:27,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=136592.0, ans=0.125 +2024-07-28 09:03:28,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136592.0, ans=0.1 +2024-07-28 09:03:36,529 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.72 vs. limit=22.5 +2024-07-28 09:03:43,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.10 vs. limit=15.0 +2024-07-28 09:03:45,021 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.586e+01 5.746e+01 6.330e+01 7.204e+01 1.314e+02, threshold=1.266e+02, percent-clipped=1.0 +2024-07-28 09:03:47,789 INFO [train.py:1114] (1/4) Epoch 11, batch 250, loss[loss=0.2463, simple_loss=0.3413, pruned_loss=0.0757, over 4651.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2867, pruned_loss=0.05594, over 670623.64 frames. ], batch size: 16, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:03:52,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=136632.0, ans=0.1 +2024-07-28 09:03:53,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=136632.0, ans=0.09899494936611666 +2024-07-28 09:03:54,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=136645.33333333334, ans=0.125 +2024-07-28 09:03:57,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=136645.33333333334, ans=0.125 +2024-07-28 09:04:03,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=136658.66666666666, ans=0.0 +2024-07-28 09:04:15,422 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.40 vs. limit=15.0 +2024-07-28 09:04:17,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=136685.33333333334, ans=0.04949747468305833 +2024-07-28 09:04:25,276 INFO [train.py:1114] (1/4) Epoch 11, batch 300, loss[loss=0.1905, simple_loss=0.2806, pruned_loss=0.05027, over 4795.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2858, pruned_loss=0.05607, over 730218.40 frames. ], batch size: 15, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:04:28,576 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.57 vs. limit=22.5 +2024-07-28 09:04:32,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=136712.0, ans=0.07 +2024-07-28 09:04:41,269 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.72 vs. limit=15.0 +2024-07-28 09:04:56,685 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.265e+01 5.546e+01 5.956e+01 6.746e+01 1.009e+02, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 09:04:57,593 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.04 vs. limit=15.0 +2024-07-28 09:04:59,413 INFO [train.py:1114] (1/4) Epoch 11, batch 350, loss[loss=0.1439, simple_loss=0.2353, pruned_loss=0.02619, over 4936.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2852, pruned_loss=0.05532, over 776767.48 frames. ], batch size: 12, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:05:00,400 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.64 vs. limit=22.5 +2024-07-28 09:05:04,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=136765.33333333334, ans=0.025 +2024-07-28 09:05:08,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=136778.66666666666, ans=0.125 +2024-07-28 09:05:11,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=136778.66666666666, ans=0.2 +2024-07-28 09:05:13,355 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.04 vs. limit=15.0 +2024-07-28 09:05:20,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=136805.33333333334, ans=0.125 +2024-07-28 09:05:22,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=136805.33333333334, ans=0.0 +2024-07-28 09:05:24,289 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.81 vs. limit=22.5 +2024-07-28 09:05:26,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=136818.66666666666, ans=0.125 +2024-07-28 09:05:29,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=136818.66666666666, ans=0.125 +2024-07-28 09:05:33,210 INFO [train.py:1114] (1/4) Epoch 11, batch 400, loss[loss=0.1976, simple_loss=0.2878, pruned_loss=0.05373, over 4690.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2857, pruned_loss=0.05576, over 813834.04 frames. ], batch size: 13, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:05:39,332 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.25 vs. limit=15.0 +2024-07-28 09:05:43,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.72 vs. limit=22.5 +2024-07-28 09:05:54,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=136858.66666666666, ans=0.125 +2024-07-28 09:05:55,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=136858.66666666666, ans=0.125 +2024-07-28 09:05:55,535 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.27 vs. limit=15.0 +2024-07-28 09:06:00,086 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.02 vs. limit=22.5 +2024-07-28 09:06:10,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=136885.33333333334, ans=0.05 +2024-07-28 09:06:12,500 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.549e+01 5.682e+01 6.253e+01 7.367e+01 1.050e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 09:06:15,186 INFO [train.py:1114] (1/4) Epoch 11, batch 450, loss[loss=0.1837, simple_loss=0.2749, pruned_loss=0.04623, over 4637.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2847, pruned_loss=0.05563, over 839142.43 frames. ], batch size: 13, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:06:17,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=136898.66666666666, ans=0.2 +2024-07-28 09:06:29,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=136912.0, ans=0.125 +2024-07-28 09:06:45,669 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.70 vs. limit=15.0 +2024-07-28 09:06:55,243 INFO [train.py:1114] (1/4) Epoch 11, batch 500, loss[loss=0.2239, simple_loss=0.3103, pruned_loss=0.06874, over 4677.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2839, pruned_loss=0.05532, over 861488.74 frames. ], batch size: 15, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:06:58,365 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.91 vs. limit=15.0 +2024-07-28 09:07:10,902 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-28 09:07:15,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=137005.33333333334, ans=0.0 +2024-07-28 09:07:17,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=137005.33333333334, ans=0.2 +2024-07-28 09:07:23,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=137018.66666666666, ans=0.125 +2024-07-28 09:07:24,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=137018.66666666666, ans=0.1 +2024-07-28 09:07:25,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=137018.66666666666, ans=0.2 +2024-07-28 09:07:25,819 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.558e+01 5.492e+01 6.007e+01 6.943e+01 8.543e+01, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 09:07:26,184 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=25.14 vs. limit=22.5 +2024-07-28 09:07:28,418 INFO [train.py:1114] (1/4) Epoch 11, batch 550, loss[loss=0.2122, simple_loss=0.3097, pruned_loss=0.05738, over 4828.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2835, pruned_loss=0.05493, over 878126.66 frames. ], batch size: 18, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:07:29,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=137032.0, ans=0.0 +2024-07-28 09:07:31,885 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:07:32,089 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.44 vs. limit=6.0 +2024-07-28 09:07:34,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=137045.33333333334, ans=0.125 +2024-07-28 09:07:36,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=137045.33333333334, ans=0.025 +2024-07-28 09:07:40,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=137045.33333333334, ans=0.1 +2024-07-28 09:07:56,348 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.22 vs. limit=15.0 +2024-07-28 09:07:58,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=137085.33333333334, ans=0.0 +2024-07-28 09:08:02,905 INFO [train.py:1114] (1/4) Epoch 11, batch 600, loss[loss=0.2185, simple_loss=0.3047, pruned_loss=0.06612, over 4624.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2826, pruned_loss=0.05452, over 892419.32 frames. ], batch size: 16, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:08:06,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=137098.66666666666, ans=0.125 +2024-07-28 09:08:32,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=137152.0, ans=10.0 +2024-07-28 09:08:32,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137152.0, ans=0.1 +2024-07-28 09:08:33,003 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.811e+01 5.574e+01 6.202e+01 6.752e+01 1.007e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 09:08:35,647 INFO [train.py:1114] (1/4) Epoch 11, batch 650, loss[loss=0.1714, simple_loss=0.2669, pruned_loss=0.03796, over 4757.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2817, pruned_loss=0.05411, over 904111.04 frames. ], batch size: 13, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:08:44,410 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.63 vs. limit=15.0 +2024-07-28 09:08:47,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=137178.66666666666, ans=0.0 +2024-07-28 09:08:52,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=137192.0, ans=0.025 +2024-07-28 09:09:05,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=137218.66666666666, ans=0.95 +2024-07-28 09:09:07,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=137218.66666666666, ans=0.1 +2024-07-28 09:09:09,626 INFO [train.py:1114] (1/4) Epoch 11, batch 700, loss[loss=0.1819, simple_loss=0.2696, pruned_loss=0.04707, over 4638.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2831, pruned_loss=0.0546, over 912393.09 frames. ], batch size: 12, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:09:11,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=137232.0, ans=0.0 +2024-07-28 09:09:13,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=137232.0, ans=0.0 +2024-07-28 09:09:14,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=137232.0, ans=0.125 +2024-07-28 09:09:19,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=137245.33333333334, ans=0.035 +2024-07-28 09:09:24,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=137258.66666666666, ans=0.125 +2024-07-28 09:09:40,611 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.468e+01 5.602e+01 6.234e+01 6.972e+01 9.125e+01, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 09:09:45,969 INFO [train.py:1114] (1/4) Epoch 11, batch 750, loss[loss=0.2162, simple_loss=0.3171, pruned_loss=0.05766, over 4699.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2831, pruned_loss=0.054, over 918707.67 frames. ], batch size: 13, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:09:48,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=137298.66666666666, ans=0.0 +2024-07-28 09:09:55,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=137312.0, ans=0.025 +2024-07-28 09:09:57,355 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.62 vs. limit=12.0 +2024-07-28 09:10:20,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=137352.0, ans=0.125 +2024-07-28 09:10:21,530 INFO [train.py:1114] (1/4) Epoch 11, batch 800, loss[loss=0.1657, simple_loss=0.2471, pruned_loss=0.04215, over 4858.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2829, pruned_loss=0.0539, over 923733.66 frames. ], batch size: 12, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:10:30,345 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.01 vs. limit=22.5 +2024-07-28 09:10:36,859 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.99 vs. limit=15.0 +2024-07-28 09:10:37,396 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:10:42,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=137405.33333333334, ans=0.0 +2024-07-28 09:11:00,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=137418.66666666666, ans=0.0 +2024-07-28 09:11:00,610 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.302e+01 5.597e+01 6.070e+01 6.795e+01 9.040e+01, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 09:11:03,294 INFO [train.py:1114] (1/4) Epoch 11, batch 850, loss[loss=0.1728, simple_loss=0.259, pruned_loss=0.04329, over 4661.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2829, pruned_loss=0.05424, over 927368.17 frames. ], batch size: 14, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:11:03,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=137432.0, ans=0.0 +2024-07-28 09:11:05,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=137432.0, ans=0.125 +2024-07-28 09:11:06,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=137432.0, ans=0.0 +2024-07-28 09:11:26,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=137472.0, ans=0.07 +2024-07-28 09:11:36,216 INFO [train.py:1114] (1/4) Epoch 11, batch 900, loss[loss=0.1769, simple_loss=0.2525, pruned_loss=0.05066, over 4850.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2845, pruned_loss=0.05544, over 928649.49 frames. ], batch size: 12, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:11:44,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=137512.0, ans=0.025 +2024-07-28 09:11:45,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=137512.0, ans=0.07 +2024-07-28 09:11:48,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=137512.0, ans=0.125 +2024-07-28 09:11:49,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=137525.33333333334, ans=0.0 +2024-07-28 09:12:08,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=137552.0, ans=0.2 +2024-07-28 09:12:09,245 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 5.732e+01 6.294e+01 7.433e+01 1.155e+02, threshold=1.259e+02, percent-clipped=0.0 +2024-07-28 09:12:12,061 INFO [train.py:1114] (1/4) Epoch 11, batch 950, loss[loss=0.1679, simple_loss=0.2481, pruned_loss=0.04391, over 4780.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2842, pruned_loss=0.05526, over 929961.61 frames. ], batch size: 12, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:12:22,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=137578.66666666666, ans=0.125 +2024-07-28 09:12:32,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=137605.33333333334, ans=0.125 +2024-07-28 09:12:46,508 INFO [train.py:1114] (1/4) Epoch 11, batch 1000, loss[loss=0.1864, simple_loss=0.2789, pruned_loss=0.04699, over 4960.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2846, pruned_loss=0.05551, over 929678.13 frames. ], batch size: 13, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:12:49,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=137632.0, ans=0.1 +2024-07-28 09:12:58,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=137645.33333333334, ans=0.125 +2024-07-28 09:13:09,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=137672.0, ans=0.125 +2024-07-28 09:13:10,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=137672.0, ans=15.0 +2024-07-28 09:13:18,718 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.919e+01 5.562e+01 6.150e+01 7.152e+01 9.857e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 09:13:18,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=137685.33333333334, ans=0.125 +2024-07-28 09:13:18,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=137685.33333333334, ans=0.2 +2024-07-28 09:13:21,482 INFO [train.py:1114] (1/4) Epoch 11, batch 1050, loss[loss=0.2255, simple_loss=0.3162, pruned_loss=0.06738, over 4879.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2839, pruned_loss=0.05484, over 932507.89 frames. ], batch size: 14, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:13:29,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=137698.66666666666, ans=0.1 +2024-07-28 09:13:42,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=137725.33333333334, ans=0.125 +2024-07-28 09:13:48,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=137738.66666666666, ans=0.125 +2024-07-28 09:13:48,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=137738.66666666666, ans=0.125 +2024-07-28 09:13:49,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=137738.66666666666, ans=0.125 +2024-07-28 09:13:57,433 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:13:58,577 INFO [train.py:1114] (1/4) Epoch 11, batch 1100, loss[loss=0.1739, simple_loss=0.2604, pruned_loss=0.04369, over 4893.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2845, pruned_loss=0.05503, over 934586.71 frames. ], batch size: 13, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:13:59,016 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.81 vs. limit=6.0 +2024-07-28 09:14:04,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=137778.66666666666, ans=0.125 +2024-07-28 09:14:06,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=137778.66666666666, ans=0.2 +2024-07-28 09:14:08,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=137778.66666666666, ans=0.125 +2024-07-28 09:14:08,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=137778.66666666666, ans=0.0 +2024-07-28 09:14:17,368 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.33 vs. limit=15.0 +2024-07-28 09:14:40,318 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.240e+01 5.580e+01 6.119e+01 6.842e+01 1.423e+02, threshold=1.224e+02, percent-clipped=1.0 +2024-07-28 09:14:42,887 INFO [train.py:1114] (1/4) Epoch 11, batch 1150, loss[loss=0.1917, simple_loss=0.2768, pruned_loss=0.05334, over 4891.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2841, pruned_loss=0.055, over 934038.42 frames. ], batch size: 13, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:15:09,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=137885.33333333334, ans=0.0 +2024-07-28 09:15:12,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=137885.33333333334, ans=0.125 +2024-07-28 09:15:16,426 INFO [train.py:1114] (1/4) Epoch 11, batch 1200, loss[loss=0.1823, simple_loss=0.2802, pruned_loss=0.04223, over 4873.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2851, pruned_loss=0.05507, over 933104.69 frames. ], batch size: 14, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:15:16,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=137898.66666666666, ans=0.0 +2024-07-28 09:15:18,833 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.34 vs. limit=22.5 +2024-07-28 09:15:21,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=137898.66666666666, ans=0.0 +2024-07-28 09:15:23,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=137912.0, ans=0.125 +2024-07-28 09:15:31,158 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:15:39,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=137938.66666666666, ans=0.025 +2024-07-28 09:15:39,163 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.49 vs. limit=6.0 +2024-07-28 09:15:49,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=137952.0, ans=0.125 +2024-07-28 09:15:50,319 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 5.564e+01 6.259e+01 7.036e+01 9.371e+01, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 09:15:53,032 INFO [train.py:1114] (1/4) Epoch 11, batch 1250, loss[loss=0.2036, simple_loss=0.2932, pruned_loss=0.05704, over 4807.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.285, pruned_loss=0.05437, over 936957.29 frames. ], batch size: 15, lr: 6.95e-03, grad_scale: 32.0 +2024-07-28 09:16:04,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=137978.66666666666, ans=0.125 +2024-07-28 09:16:07,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=137992.0, ans=10.0 +2024-07-28 09:16:26,247 INFO [train.py:1114] (1/4) Epoch 11, batch 1300, loss[loss=0.1908, simple_loss=0.2826, pruned_loss=0.04957, over 4683.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2835, pruned_loss=0.05353, over 938545.39 frames. ], batch size: 19, lr: 6.95e-03, grad_scale: 32.0 +2024-07-28 09:16:26,628 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.21 vs. limit=15.0 +2024-07-28 09:16:28,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=138032.0, ans=0.125 +2024-07-28 09:16:32,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=138045.33333333334, ans=0.125 +2024-07-28 09:16:32,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138045.33333333334, ans=0.1 +2024-07-28 09:16:32,879 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.26 vs. limit=10.0 +2024-07-28 09:16:36,393 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.84 vs. limit=22.5 +2024-07-28 09:16:44,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=138058.66666666666, ans=0.125 +2024-07-28 09:16:50,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=138072.0, ans=0.2 +2024-07-28 09:16:57,014 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.412e+01 5.624e+01 6.382e+01 7.662e+01 1.173e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-28 09:16:59,899 INFO [train.py:1114] (1/4) Epoch 11, batch 1350, loss[loss=0.2103, simple_loss=0.2879, pruned_loss=0.06638, over 4751.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2833, pruned_loss=0.05365, over 940817.41 frames. ], batch size: 13, lr: 6.95e-03, grad_scale: 64.0 +2024-07-28 09:17:00,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138098.66666666666, ans=0.1 +2024-07-28 09:17:15,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=138125.33333333334, ans=0.125 +2024-07-28 09:17:16,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=138125.33333333334, ans=0.125 +2024-07-28 09:17:17,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=138125.33333333334, ans=0.125 +2024-07-28 09:17:24,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=138138.66666666666, ans=0.025 +2024-07-28 09:17:33,276 INFO [train.py:1114] (1/4) Epoch 11, batch 1400, loss[loss=0.1645, simple_loss=0.262, pruned_loss=0.03346, over 4709.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2829, pruned_loss=0.05315, over 942538.63 frames. ], batch size: 11, lr: 6.95e-03, grad_scale: 64.0 +2024-07-28 09:17:39,206 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.78 vs. limit=5.0 +2024-07-28 09:17:39,822 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.60 vs. limit=10.0 +2024-07-28 09:18:06,173 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.597e+01 5.725e+01 6.807e+01 7.781e+01 1.138e+02, threshold=1.361e+02, percent-clipped=0.0 +2024-07-28 09:18:06,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=138218.66666666666, ans=0.125 +2024-07-28 09:18:08,970 INFO [train.py:1114] (1/4) Epoch 11, batch 1450, loss[loss=0.2344, simple_loss=0.3149, pruned_loss=0.0769, over 4677.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2847, pruned_loss=0.05411, over 942670.13 frames. ], batch size: 15, lr: 6.95e-03, grad_scale: 64.0 +2024-07-28 09:18:22,765 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.84 vs. limit=12.0 +2024-07-28 09:18:28,032 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.07 vs. limit=12.0 +2024-07-28 09:18:29,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=138258.66666666666, ans=0.05 +2024-07-28 09:18:30,188 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.71 vs. limit=15.0 +2024-07-28 09:18:33,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=138272.0, ans=0.125 +2024-07-28 09:18:52,730 INFO [train.py:1114] (1/4) Epoch 11, batch 1500, loss[loss=0.1951, simple_loss=0.2787, pruned_loss=0.05573, over 4811.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2852, pruned_loss=0.05441, over 942284.88 frames. ], batch size: 14, lr: 6.95e-03, grad_scale: 64.0 +2024-07-28 09:18:53,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=138298.66666666666, ans=0.125 +2024-07-28 09:18:54,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=138298.66666666666, ans=0.0 +2024-07-28 09:18:55,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=138298.66666666666, ans=0.125 +2024-07-28 09:18:56,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=138298.66666666666, ans=0.125 +2024-07-28 09:19:06,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=138325.33333333334, ans=0.125 +2024-07-28 09:19:13,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=138325.33333333334, ans=0.0 +2024-07-28 09:19:14,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=138325.33333333334, ans=0.125 +2024-07-28 09:19:18,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=138338.66666666666, ans=0.0 +2024-07-28 09:19:28,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=138352.0, ans=10.0 +2024-07-28 09:19:31,301 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.052e+01 5.776e+01 6.231e+01 7.086e+01 9.841e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 09:19:33,309 INFO [train.py:1114] (1/4) Epoch 11, batch 1550, loss[loss=0.2063, simple_loss=0.3029, pruned_loss=0.0549, over 4912.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2844, pruned_loss=0.05461, over 938263.93 frames. ], batch size: 15, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:19:34,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=138365.33333333334, ans=0.2 +2024-07-28 09:19:48,466 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.96 vs. limit=15.0 +2024-07-28 09:19:48,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=138392.0, ans=0.125 +2024-07-28 09:19:50,471 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.11 vs. limit=15.0 +2024-07-28 09:19:53,259 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=11.92 vs. limit=15.0 +2024-07-28 09:19:53,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=138405.33333333334, ans=10.0 +2024-07-28 09:19:55,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=138405.33333333334, ans=0.0 +2024-07-28 09:20:09,041 INFO [train.py:1114] (1/4) Epoch 11, batch 1600, loss[loss=0.1879, simple_loss=0.2838, pruned_loss=0.04593, over 4871.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2836, pruned_loss=0.05491, over 936735.15 frames. ], batch size: 14, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:20:18,269 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.90 vs. limit=10.0 +2024-07-28 09:20:30,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=138472.0, ans=0.125 +2024-07-28 09:20:30,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=138472.0, ans=0.025 +2024-07-28 09:20:46,013 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.563e+01 5.538e+01 5.961e+01 6.813e+01 9.879e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 09:20:47,970 INFO [train.py:1114] (1/4) Epoch 11, batch 1650, loss[loss=0.2399, simple_loss=0.3348, pruned_loss=0.07247, over 4668.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2838, pruned_loss=0.05532, over 936463.85 frames. ], batch size: 14, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:20:52,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=138498.66666666666, ans=0.0 +2024-07-28 09:21:08,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=138538.66666666666, ans=0.0 +2024-07-28 09:21:21,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=138552.0, ans=0.2 +2024-07-28 09:21:25,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138552.0, ans=0.1 +2024-07-28 09:21:29,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=138565.33333333334, ans=0.125 +2024-07-28 09:21:29,791 INFO [train.py:1114] (1/4) Epoch 11, batch 1700, loss[loss=0.1762, simple_loss=0.2524, pruned_loss=0.04998, over 4697.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2835, pruned_loss=0.055, over 938469.04 frames. ], batch size: 11, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:21:36,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=138578.66666666666, ans=0.125 +2024-07-28 09:21:39,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=138578.66666666666, ans=0.125 +2024-07-28 09:21:56,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=138605.33333333334, ans=0.125 +2024-07-28 09:22:06,849 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.454e+01 5.772e+01 6.333e+01 7.541e+01 1.576e+02, threshold=1.267e+02, percent-clipped=2.0 +2024-07-28 09:22:07,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138618.66666666666, ans=0.1 +2024-07-28 09:22:08,865 INFO [train.py:1114] (1/4) Epoch 11, batch 1750, loss[loss=0.1555, simple_loss=0.2354, pruned_loss=0.03778, over 4818.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2817, pruned_loss=0.054, over 939765.49 frames. ], batch size: 11, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:22:15,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=138645.33333333334, ans=0.0 +2024-07-28 09:22:18,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=138645.33333333334, ans=0.125 +2024-07-28 09:22:25,103 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.41 vs. limit=15.0 +2024-07-28 09:22:38,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=138672.0, ans=0.0 +2024-07-28 09:22:42,404 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.42 vs. limit=15.0 +2024-07-28 09:22:49,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=138685.33333333334, ans=0.1 +2024-07-28 09:22:50,656 INFO [train.py:1114] (1/4) Epoch 11, batch 1800, loss[loss=0.1884, simple_loss=0.2949, pruned_loss=0.04098, over 4636.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2822, pruned_loss=0.05436, over 940446.19 frames. ], batch size: 13, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:22:56,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=138698.66666666666, ans=0.125 +2024-07-28 09:23:10,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138725.33333333334, ans=0.1 +2024-07-28 09:23:12,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=138738.66666666666, ans=0.125 +2024-07-28 09:23:24,315 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.405e+01 5.944e+01 6.989e+01 8.458e+01 1.208e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-28 09:23:28,451 INFO [train.py:1114] (1/4) Epoch 11, batch 1850, loss[loss=0.1932, simple_loss=0.2888, pruned_loss=0.04878, over 4804.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2821, pruned_loss=0.05444, over 940398.77 frames. ], batch size: 14, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:23:36,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=138765.33333333334, ans=0.05 +2024-07-28 09:23:37,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138778.66666666666, ans=0.1 +2024-07-28 09:24:07,712 INFO [train.py:1114] (1/4) Epoch 11, batch 1900, loss[loss=0.1986, simple_loss=0.2918, pruned_loss=0.05276, over 4666.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.283, pruned_loss=0.05517, over 942213.61 frames. ], batch size: 14, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:24:11,759 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.10 vs. limit=15.0 +2024-07-28 09:24:12,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=138832.0, ans=0.125 +2024-07-28 09:24:21,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=138858.66666666666, ans=0.125 +2024-07-28 09:24:35,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=138885.33333333334, ans=0.0 +2024-07-28 09:24:36,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138885.33333333334, ans=0.1 +2024-07-28 09:24:39,499 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.648e+01 6.210e+01 7.045e+01 1.018e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 09:24:41,645 INFO [train.py:1114] (1/4) Epoch 11, batch 1950, loss[loss=0.1879, simple_loss=0.2687, pruned_loss=0.05355, over 4895.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2849, pruned_loss=0.05588, over 944179.41 frames. ], batch size: 13, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:24:44,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=138898.66666666666, ans=0.125 +2024-07-28 09:24:57,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138925.33333333334, ans=0.1 +2024-07-28 09:24:57,925 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.72 vs. limit=15.0 +2024-07-28 09:25:03,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=138925.33333333334, ans=0.125 +2024-07-28 09:25:03,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=138938.66666666666, ans=0.125 +2024-07-28 09:25:19,148 INFO [train.py:1114] (1/4) Epoch 11, batch 2000, loss[loss=0.1635, simple_loss=0.2438, pruned_loss=0.04161, over 4812.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2846, pruned_loss=0.05548, over 941546.63 frames. ], batch size: 11, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:25:20,793 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.05 vs. limit=22.5 +2024-07-28 09:32:25,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=138978.66666666666, ans=0.0 +2024-07-28 09:32:26,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=138978.66666666666, ans=0.2 +2024-07-28 09:32:30,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=138978.66666666666, ans=0.0 +2024-07-28 09:32:31,198 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.72 vs. limit=10.0 +2024-07-28 09:32:32,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=138992.0, ans=0.125 +2024-07-28 09:32:35,317 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.63 vs. limit=12.0 +2024-07-28 09:32:36,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=138992.0, ans=0.125 +2024-07-28 09:32:37,125 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.93 vs. limit=15.0 +2024-07-28 09:32:44,229 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.67 vs. limit=10.0 +2024-07-28 09:32:46,858 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.25 vs. limit=22.5 +2024-07-28 09:32:48,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139018.66666666666, ans=0.1 +2024-07-28 09:32:48,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=139018.66666666666, ans=0.2 +2024-07-28 09:32:49,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=139018.66666666666, ans=0.125 +2024-07-28 09:32:50,248 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.28 vs. limit=15.0 +2024-07-28 09:32:51,184 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.589e+01 5.827e+01 6.350e+01 7.381e+01 1.146e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 09:32:53,237 INFO [train.py:1114] (1/4) Epoch 11, batch 2050, loss[loss=0.1969, simple_loss=0.276, pruned_loss=0.05887, over 4611.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2841, pruned_loss=0.05547, over 939266.85 frames. ], batch size: 11, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:33:02,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=139045.33333333334, ans=0.0 +2024-07-28 09:33:07,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=139058.66666666666, ans=0.0 +2024-07-28 09:33:18,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten.whitening_limit, batch_count=139072.0, ans=22.5 +2024-07-28 09:33:20,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=139072.0, ans=0.125 +2024-07-28 09:33:22,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=139085.33333333334, ans=0.125 +2024-07-28 09:33:24,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139085.33333333334, ans=0.1 +2024-07-28 09:33:28,460 INFO [train.py:1114] (1/4) Epoch 11, batch 2100, loss[loss=0.1972, simple_loss=0.2935, pruned_loss=0.0504, over 4758.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2832, pruned_loss=0.05475, over 941012.16 frames. ], batch size: 13, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:33:28,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=139098.66666666666, ans=0.0 +2024-07-28 09:33:33,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.73 vs. limit=10.0 +2024-07-28 09:33:34,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=139112.0, ans=0.125 +2024-07-28 09:33:34,718 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.10 vs. limit=15.0 +2024-07-28 09:33:38,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139112.0, ans=0.1 +2024-07-28 09:33:40,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=139112.0, ans=0.125 +2024-07-28 09:33:47,592 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:34:05,602 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.787e+01 5.652e+01 6.255e+01 7.375e+01 9.920e+01, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 09:34:06,904 INFO [train.py:1114] (1/4) Epoch 11, batch 2150, loss[loss=0.1797, simple_loss=0.2737, pruned_loss=0.04285, over 4886.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2819, pruned_loss=0.05441, over 944140.10 frames. ], batch size: 13, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:34:39,706 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-28 09:34:42,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=139205.33333333334, ans=0.125 +2024-07-28 09:34:44,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=139205.33333333334, ans=0.125 +2024-07-28 09:34:46,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=139218.66666666666, ans=0.0 +2024-07-28 09:34:48,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=139218.66666666666, ans=0.125 +2024-07-28 09:34:55,030 INFO [train.py:1114] (1/4) Epoch 11, batch 2200, loss[loss=0.2021, simple_loss=0.2942, pruned_loss=0.05497, over 4811.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2821, pruned_loss=0.05428, over 943150.61 frames. ], batch size: 14, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:34:59,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=139232.0, ans=0.07 +2024-07-28 09:35:00,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=139232.0, ans=0.025 +2024-07-28 09:35:14,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=139272.0, ans=0.2 +2024-07-28 09:35:18,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=139272.0, ans=0.0 +2024-07-28 09:35:27,039 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.263e+01 5.559e+01 6.152e+01 7.200e+01 1.019e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 09:35:28,418 INFO [train.py:1114] (1/4) Epoch 11, batch 2250, loss[loss=0.1815, simple_loss=0.2724, pruned_loss=0.04524, over 4688.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2818, pruned_loss=0.05383, over 941895.54 frames. ], batch size: 13, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:35:35,528 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:35:44,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=139325.33333333334, ans=0.125 +2024-07-28 09:35:48,281 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.68 vs. limit=22.5 +2024-07-28 09:36:27,453 INFO [train.py:1114] (1/4) Epoch 11, batch 2300, loss[loss=0.1665, simple_loss=0.2431, pruned_loss=0.04497, over 4938.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2801, pruned_loss=0.0535, over 939743.74 frames. ], batch size: 12, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:36:47,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=139405.33333333334, ans=0.09899494936611666 +2024-07-28 09:36:56,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=139418.66666666666, ans=0.025 +2024-07-28 09:36:56,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=139418.66666666666, ans=0.025 +2024-07-28 09:36:57,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=139418.66666666666, ans=0.125 +2024-07-28 09:36:58,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=139418.66666666666, ans=0.125 +2024-07-28 09:36:59,892 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 5.544e+01 6.088e+01 7.000e+01 1.026e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 09:37:01,159 INFO [train.py:1114] (1/4) Epoch 11, batch 2350, loss[loss=0.2114, simple_loss=0.3198, pruned_loss=0.05144, over 4635.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2809, pruned_loss=0.05358, over 941470.96 frames. ], batch size: 13, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:37:04,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=139432.0, ans=0.125 +2024-07-28 09:37:13,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=139458.66666666666, ans=0.0 +2024-07-28 09:37:14,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139458.66666666666, ans=0.1 +2024-07-28 09:37:21,405 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.48 vs. limit=15.0 +2024-07-28 09:37:23,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=139472.0, ans=0.0 +2024-07-28 09:37:28,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139472.0, ans=0.1 +2024-07-28 09:37:32,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=139485.33333333334, ans=0.1 +2024-07-28 09:37:35,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=139485.33333333334, ans=0.125 +2024-07-28 09:37:36,621 INFO [train.py:1114] (1/4) Epoch 11, batch 2400, loss[loss=0.1848, simple_loss=0.2714, pruned_loss=0.04911, over 4638.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2821, pruned_loss=0.05427, over 941197.63 frames. ], batch size: 12, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:37:41,755 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.51 vs. limit=22.5 +2024-07-28 09:37:49,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=139512.0, ans=0.125 +2024-07-28 09:37:52,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=139512.0, ans=0.04949747468305833 +2024-07-28 09:37:54,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=139512.0, ans=0.0 +2024-07-28 09:37:55,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=139525.33333333334, ans=0.1 +2024-07-28 09:38:12,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.out_whiten.whitening_limit, batch_count=139552.0, ans=8.0 +2024-07-28 09:38:16,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=139552.0, ans=0.2 +2024-07-28 09:38:18,418 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.707e+01 6.350e+01 6.927e+01 1.167e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 09:38:18,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=139565.33333333334, ans=0.125 +2024-07-28 09:38:19,091 INFO [train.py:1114] (1/4) Epoch 11, batch 2450, loss[loss=0.1718, simple_loss=0.2684, pruned_loss=0.03765, over 4698.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2823, pruned_loss=0.05466, over 936500.25 frames. ], batch size: 13, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:38:20,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=139565.33333333334, ans=0.0 +2024-07-28 09:38:38,216 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.28 vs. limit=15.0 +2024-07-28 09:38:57,023 INFO [train.py:1114] (1/4) Epoch 11, batch 2500, loss[loss=0.2139, simple_loss=0.2982, pruned_loss=0.06475, over 4814.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2832, pruned_loss=0.0551, over 938619.93 frames. ], batch size: 14, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:39:01,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=139632.0, ans=0.125 +2024-07-28 09:39:06,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=139645.33333333334, ans=0.125 +2024-07-28 09:39:29,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=139685.33333333334, ans=0.125 +2024-07-28 09:39:32,200 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.264e+01 5.568e+01 6.165e+01 6.885e+01 1.396e+02, threshold=1.233e+02, percent-clipped=2.0 +2024-07-28 09:39:32,992 INFO [train.py:1114] (1/4) Epoch 11, batch 2550, loss[loss=0.1601, simple_loss=0.2456, pruned_loss=0.03729, over 4812.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2821, pruned_loss=0.0544, over 938437.27 frames. ], batch size: 11, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:39:33,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=139698.66666666666, ans=0.125 +2024-07-28 09:39:46,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=139725.33333333334, ans=0.0 +2024-07-28 09:39:46,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=139725.33333333334, ans=0.125 +2024-07-28 09:40:08,032 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.99 vs. limit=6.0 +2024-07-28 09:40:08,245 INFO [train.py:1114] (1/4) Epoch 11, batch 2600, loss[loss=0.1772, simple_loss=0.2648, pruned_loss=0.04477, over 4903.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2826, pruned_loss=0.05449, over 937441.33 frames. ], batch size: 13, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:40:10,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=139765.33333333334, ans=0.0 +2024-07-28 09:40:11,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=139765.33333333334, ans=0.0 +2024-07-28 09:40:21,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=139792.0, ans=0.1 +2024-07-28 09:40:25,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=139792.0, ans=22.5 +2024-07-28 09:40:26,695 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.71 vs. limit=10.0 +2024-07-28 09:40:34,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=139805.33333333334, ans=0.0 +2024-07-28 09:40:36,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=139818.66666666666, ans=0.125 +2024-07-28 09:40:44,007 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.437e+01 5.632e+01 6.432e+01 7.757e+01 1.315e+02, threshold=1.286e+02, percent-clipped=1.0 +2024-07-28 09:40:44,695 INFO [train.py:1114] (1/4) Epoch 11, batch 2650, loss[loss=0.1995, simple_loss=0.2929, pruned_loss=0.05301, over 4660.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2835, pruned_loss=0.05499, over 939693.82 frames. ], batch size: 16, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:40:45,193 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.20 vs. limit=15.0 +2024-07-28 09:40:56,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=139845.33333333334, ans=0.125 +2024-07-28 09:40:58,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=139845.33333333334, ans=0.025 +2024-07-28 09:41:08,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=139858.66666666666, ans=0.015 +2024-07-28 09:41:25,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=139885.33333333334, ans=0.125 +2024-07-28 09:41:28,862 INFO [train.py:1114] (1/4) Epoch 11, batch 2700, loss[loss=0.1837, simple_loss=0.2848, pruned_loss=0.04128, over 4736.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2843, pruned_loss=0.05516, over 940204.71 frames. ], batch size: 14, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:41:38,934 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.50 vs. limit=12.0 +2024-07-28 09:41:45,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=139912.0, ans=0.125 +2024-07-28 09:41:57,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=139925.33333333334, ans=0.0 +2024-07-28 09:42:08,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=139952.0, ans=0.2 +2024-07-28 09:42:09,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139952.0, ans=0.1 +2024-07-28 09:42:10,947 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.187e+01 5.691e+01 6.358e+01 7.173e+01 9.845e+01, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 09:42:11,615 INFO [train.py:1114] (1/4) Epoch 11, batch 2750, loss[loss=0.1959, simple_loss=0.2836, pruned_loss=0.05417, over 4710.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2832, pruned_loss=0.05496, over 940096.31 frames. ], batch size: 12, lr: 6.90e-03, grad_scale: 16.0 +2024-07-28 09:42:13,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139965.33333333334, ans=0.1 +2024-07-28 09:42:13,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=139965.33333333334, ans=0.09899494936611666 +2024-07-28 09:45:14,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=139992.0, ans=0.0 +2024-07-28 09:45:14,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=139992.0, ans=0.125 +2024-07-28 09:45:14,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=139992.0, ans=0.125 +2024-07-28 09:45:18,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=139992.0, ans=0.2 +2024-07-28 09:45:23,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=140005.33333333334, ans=0.2 +2024-07-28 09:45:28,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=140018.66666666666, ans=0.125 +2024-07-28 09:45:33,260 INFO [train.py:1114] (1/4) Epoch 11, batch 2800, loss[loss=0.2366, simple_loss=0.3182, pruned_loss=0.07746, over 3073.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2832, pruned_loss=0.05508, over 937471.91 frames. ], batch size: 36, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:45:35,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=140032.0, ans=0.0 +2024-07-28 09:45:36,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=140032.0, ans=0.0 +2024-07-28 09:45:38,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=140032.0, ans=0.125 +2024-07-28 09:45:45,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=140058.66666666666, ans=0.0 +2024-07-28 09:45:48,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=140058.66666666666, ans=0.0 +2024-07-28 09:45:50,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=140058.66666666666, ans=0.125 +2024-07-28 09:46:07,730 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.695e+01 5.521e+01 6.232e+01 7.025e+01 9.705e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 09:46:08,483 INFO [train.py:1114] (1/4) Epoch 11, batch 2850, loss[loss=0.194, simple_loss=0.2831, pruned_loss=0.05245, over 4966.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.284, pruned_loss=0.05525, over 935622.67 frames. ], batch size: 13, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:46:09,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=140098.66666666666, ans=0.0 +2024-07-28 09:46:19,299 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.53 vs. limit=15.0 +2024-07-28 09:46:24,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=140125.33333333334, ans=0.2 +2024-07-28 09:46:31,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=140138.66666666666, ans=0.125 +2024-07-28 09:46:32,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140138.66666666666, ans=0.1 +2024-07-28 09:46:34,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=140138.66666666666, ans=0.125 +2024-07-28 09:46:42,411 INFO [train.py:1114] (1/4) Epoch 11, batch 2900, loss[loss=0.1905, simple_loss=0.2772, pruned_loss=0.05188, over 4825.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2847, pruned_loss=0.05506, over 939552.87 frames. ], batch size: 13, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:46:47,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=140165.33333333334, ans=0.0 +2024-07-28 09:46:53,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=140178.66666666666, ans=10.0 +2024-07-28 09:46:54,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=140178.66666666666, ans=0.2 +2024-07-28 09:46:59,671 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-28 09:47:05,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=140205.33333333334, ans=0.0 +2024-07-28 09:47:07,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=140205.33333333334, ans=0.125 +2024-07-28 09:47:20,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=140218.66666666666, ans=0.2 +2024-07-28 09:47:21,183 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.597e+01 5.615e+01 6.138e+01 7.226e+01 1.097e+02, threshold=1.228e+02, percent-clipped=0.0 +2024-07-28 09:47:22,502 INFO [train.py:1114] (1/4) Epoch 11, batch 2950, loss[loss=0.1671, simple_loss=0.2588, pruned_loss=0.03766, over 4707.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2838, pruned_loss=0.05489, over 938654.70 frames. ], batch size: 12, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:47:22,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140232.0, ans=0.1 +2024-07-28 09:47:22,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=140232.0, ans=0.2 +2024-07-28 09:47:26,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=140232.0, ans=0.125 +2024-07-28 09:47:26,956 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.46 vs. limit=10.0 +2024-07-28 09:47:50,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=140272.0, ans=0.125 +2024-07-28 09:47:51,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=140272.0, ans=0.2 +2024-07-28 09:48:05,767 INFO [train.py:1114] (1/4) Epoch 11, batch 3000, loss[loss=0.178, simple_loss=0.2734, pruned_loss=0.04123, over 4761.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.284, pruned_loss=0.05515, over 937991.76 frames. ], batch size: 13, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:48:05,768 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 09:48:15,165 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.7702, 2.6375, 4.4064, 2.0701], device='cuda:1') +2024-07-28 09:48:19,194 INFO [train.py:1146] (1/4) Epoch 11, validation: loss=0.1714, simple_loss=0.2749, pruned_loss=0.03396, over 944034.00 frames. +2024-07-28 09:48:19,195 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 09:48:27,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140312.0, ans=0.1 +2024-07-28 09:48:28,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=140312.0, ans=0.125 +2024-07-28 09:48:30,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=140312.0, ans=0.125 +2024-07-28 09:48:46,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=140338.66666666666, ans=0.125 +2024-07-28 09:48:48,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=140352.0, ans=0.125 +2024-07-28 09:48:50,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=140352.0, ans=0.025 +2024-07-28 09:48:50,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=140352.0, ans=0.5 +2024-07-28 09:48:53,328 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.500e+01 5.535e+01 6.032e+01 6.917e+01 1.051e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 09:48:54,497 INFO [train.py:1114] (1/4) Epoch 11, batch 3050, loss[loss=0.1776, simple_loss=0.2593, pruned_loss=0.04793, over 4642.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2841, pruned_loss=0.05551, over 936727.80 frames. ], batch size: 12, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:49:01,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=140378.66666666666, ans=0.0 +2024-07-28 09:49:17,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140405.33333333334, ans=0.1 +2024-07-28 09:49:18,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=140405.33333333334, ans=0.0 +2024-07-28 09:49:25,888 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.24 vs. limit=22.5 +2024-07-28 09:49:32,122 INFO [train.py:1114] (1/4) Epoch 11, batch 3100, loss[loss=0.2115, simple_loss=0.2947, pruned_loss=0.06411, over 4621.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2829, pruned_loss=0.05488, over 937566.17 frames. ], batch size: 16, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:49:34,348 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.14 vs. limit=15.0 +2024-07-28 09:49:35,127 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.29 vs. limit=15.0 +2024-07-28 09:49:35,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140432.0, ans=0.1 +2024-07-28 09:49:35,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=140432.0, ans=0.125 +2024-07-28 09:49:36,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=140432.0, ans=0.0 +2024-07-28 09:49:36,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=140432.0, ans=0.125 +2024-07-28 09:49:46,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=140458.66666666666, ans=0.0 +2024-07-28 09:49:47,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=140458.66666666666, ans=0.025 +2024-07-28 09:49:51,293 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=10.44 vs. limit=12.0 +2024-07-28 09:49:56,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=140472.0, ans=0.125 +2024-07-28 09:50:07,163 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.605e+01 5.405e+01 6.178e+01 7.390e+01 1.037e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 09:50:07,876 INFO [train.py:1114] (1/4) Epoch 11, batch 3150, loss[loss=0.1948, simple_loss=0.2906, pruned_loss=0.04953, over 4635.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2825, pruned_loss=0.05444, over 937531.35 frames. ], batch size: 17, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:50:10,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=140498.66666666666, ans=0.125 +2024-07-28 09:50:10,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=140498.66666666666, ans=0.125 +2024-07-28 09:50:14,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=140512.0, ans=0.2 +2024-07-28 09:50:17,134 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.78 vs. limit=15.0 +2024-07-28 09:50:17,709 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.06 vs. limit=15.0 +2024-07-28 09:50:19,517 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:50:22,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=140525.33333333334, ans=0.0 +2024-07-28 09:50:25,522 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:50:30,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=140538.66666666666, ans=0.125 +2024-07-28 09:50:31,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=140538.66666666666, ans=0.1 +2024-07-28 09:50:37,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=140552.0, ans=0.2 +2024-07-28 09:50:41,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=140552.0, ans=0.1 +2024-07-28 09:50:42,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=140552.0, ans=0.125 +2024-07-28 09:50:43,669 INFO [train.py:1114] (1/4) Epoch 11, batch 3200, loss[loss=0.1671, simple_loss=0.2529, pruned_loss=0.04064, over 4819.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2826, pruned_loss=0.05443, over 939275.81 frames. ], batch size: 13, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:50:48,709 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.48 vs. limit=15.0 +2024-07-28 09:50:53,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=140578.66666666666, ans=0.125 +2024-07-28 09:51:19,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=140605.33333333334, ans=0.04949747468305833 +2024-07-28 09:51:31,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=140605.33333333334, ans=0.0 +2024-07-28 09:52:00,433 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.837e+01 5.714e+01 6.190e+01 6.678e+01 8.069e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 09:52:01,127 INFO [train.py:1114] (1/4) Epoch 11, batch 3250, loss[loss=0.1776, simple_loss=0.2755, pruned_loss=0.03988, over 4932.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2831, pruned_loss=0.05432, over 940728.67 frames. ], batch size: 14, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:52:43,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=140645.33333333334, ans=0.125 +2024-07-28 09:52:52,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140658.66666666666, ans=0.1 +2024-07-28 09:53:46,944 INFO [train.py:1114] (1/4) Epoch 11, batch 3300, loss[loss=0.196, simple_loss=0.2893, pruned_loss=0.05134, over 4699.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2825, pruned_loss=0.05403, over 940973.17 frames. ], batch size: 19, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:54:04,560 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.48 vs. limit=22.5 +2024-07-28 09:54:07,591 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:54:50,043 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.33 vs. limit=22.5 +2024-07-28 09:54:52,380 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+01 5.701e+01 6.395e+01 7.330e+01 1.076e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 09:54:53,119 INFO [train.py:1114] (1/4) Epoch 11, batch 3350, loss[loss=0.2099, simple_loss=0.291, pruned_loss=0.06441, over 4612.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2822, pruned_loss=0.05421, over 939027.30 frames. ], batch size: 17, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:55:02,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=140778.66666666666, ans=0.125 +2024-07-28 09:55:13,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=140805.33333333334, ans=0.125 +2024-07-28 09:55:14,923 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.55 vs. limit=12.0 +2024-07-28 09:55:18,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=140805.33333333334, ans=0.025 +2024-07-28 09:55:25,459 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.41 vs. limit=22.5 +2024-07-28 09:55:29,089 INFO [train.py:1114] (1/4) Epoch 11, batch 3400, loss[loss=0.1557, simple_loss=0.2424, pruned_loss=0.03447, over 4810.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2814, pruned_loss=0.0542, over 937670.83 frames. ], batch size: 11, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:55:29,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=140832.0, ans=0.0 +2024-07-28 09:55:44,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=140858.66666666666, ans=0.125 +2024-07-28 09:55:45,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=140858.66666666666, ans=0.125 +2024-07-28 09:55:46,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=140858.66666666666, ans=0.0 +2024-07-28 09:55:47,093 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.26 vs. limit=12.0 +2024-07-28 09:55:50,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=140872.0, ans=0.2 +2024-07-28 09:55:51,856 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.53 vs. limit=15.0 +2024-07-28 09:55:51,934 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.18 vs. limit=15.0 +2024-07-28 09:55:56,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=140872.0, ans=0.125 +2024-07-28 09:55:57,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=140885.33333333334, ans=0.0 +2024-07-28 09:56:02,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=140885.33333333334, ans=0.0 +2024-07-28 09:56:04,025 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.505e+01 5.604e+01 6.128e+01 6.821e+01 1.006e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 09:56:04,704 INFO [train.py:1114] (1/4) Epoch 11, batch 3450, loss[loss=0.181, simple_loss=0.2666, pruned_loss=0.04772, over 4702.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2815, pruned_loss=0.05416, over 938103.99 frames. ], batch size: 19, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:56:04,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=140898.66666666666, ans=0.125 +2024-07-28 09:56:25,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=140938.66666666666, ans=0.0 +2024-07-28 09:56:27,031 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.79 vs. limit=6.0 +2024-07-28 09:56:30,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=140952.0, ans=0.125 +2024-07-28 09:56:36,410 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:56:37,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=140952.0, ans=0.0 +2024-07-28 09:56:38,862 INFO [train.py:1114] (1/4) Epoch 11, batch 3500, loss[loss=0.1859, simple_loss=0.2774, pruned_loss=0.04723, over 4948.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2808, pruned_loss=0.05383, over 938967.20 frames. ], batch size: 12, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:56:38,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140965.33333333334, ans=0.1 +2024-07-28 09:56:43,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=140965.33333333334, ans=0.0 +2024-07-28 09:56:56,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=140992.0, ans=0.125 +2024-07-28 09:57:07,720 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.71 vs. limit=22.5 +2024-07-28 09:57:12,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=141018.66666666666, ans=0.125 +2024-07-28 09:57:15,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=141018.66666666666, ans=0.5 +2024-07-28 09:57:15,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=141018.66666666666, ans=0.125 +2024-07-28 09:57:16,580 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.512e+01 5.451e+01 6.238e+01 7.293e+01 9.971e+01, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 09:57:16,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=141032.0, ans=0.0 +2024-07-28 09:57:17,306 INFO [train.py:1114] (1/4) Epoch 11, batch 3550, loss[loss=0.2284, simple_loss=0.3333, pruned_loss=0.06175, over 4655.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2814, pruned_loss=0.05366, over 939137.53 frames. ], batch size: 14, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:57:22,364 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.92 vs. limit=15.0 +2024-07-28 09:57:27,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=141045.33333333334, ans=0.125 +2024-07-28 09:57:28,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=141045.33333333334, ans=0.2 +2024-07-28 09:57:36,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=141058.66666666666, ans=0.025 +2024-07-28 09:57:47,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=141085.33333333334, ans=0.125 +2024-07-28 09:57:47,365 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.29 vs. limit=15.0 +2024-07-28 09:57:59,193 INFO [train.py:1114] (1/4) Epoch 11, batch 3600, loss[loss=0.1538, simple_loss=0.2562, pruned_loss=0.02573, over 4958.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2811, pruned_loss=0.05341, over 940895.87 frames. ], batch size: 13, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:58:10,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=141112.0, ans=0.125 +2024-07-28 09:58:14,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=141125.33333333334, ans=0.1 +2024-07-28 09:58:26,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=141138.66666666666, ans=0.125 +2024-07-28 09:58:28,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=141152.0, ans=0.0 +2024-07-28 09:58:35,668 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.546e+01 5.527e+01 6.114e+01 7.370e+01 1.148e+02, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 09:58:37,552 INFO [train.py:1114] (1/4) Epoch 11, batch 3650, loss[loss=0.2105, simple_loss=0.3127, pruned_loss=0.05409, over 4903.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2807, pruned_loss=0.05317, over 941370.27 frames. ], batch size: 15, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:58:56,263 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.03 vs. limit=15.0 +2024-07-28 09:58:58,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=141205.33333333334, ans=0.0 +2024-07-28 09:59:00,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=141205.33333333334, ans=0.0 +2024-07-28 09:59:05,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=141218.66666666666, ans=0.0 +2024-07-28 09:59:06,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=141218.66666666666, ans=0.1 +2024-07-28 09:59:11,456 INFO [train.py:1114] (1/4) Epoch 11, batch 3700, loss[loss=0.194, simple_loss=0.2984, pruned_loss=0.04478, over 4931.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2815, pruned_loss=0.05339, over 941964.88 frames. ], batch size: 14, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 09:59:13,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141232.0, ans=0.1 +2024-07-28 09:59:18,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=141245.33333333334, ans=0.125 +2024-07-28 09:59:21,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.33 vs. limit=22.5 +2024-07-28 09:59:50,247 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.396e+01 5.987e+01 6.537e+01 9.206e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 09:59:50,929 INFO [train.py:1114] (1/4) Epoch 11, batch 3750, loss[loss=0.1897, simple_loss=0.2792, pruned_loss=0.05007, over 4806.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2813, pruned_loss=0.05331, over 943516.07 frames. ], batch size: 11, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 09:59:59,765 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.26 vs. limit=15.0 +2024-07-28 10:00:15,697 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.29 vs. limit=6.0 +2024-07-28 10:00:20,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141338.66666666666, ans=0.1 +2024-07-28 10:00:20,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=141338.66666666666, ans=0.0 +2024-07-28 10:00:29,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=141352.0, ans=0.125 +2024-07-28 10:00:32,264 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=41.53 vs. limit=15.0 +2024-07-28 10:00:33,296 INFO [train.py:1114] (1/4) Epoch 11, batch 3800, loss[loss=0.1968, simple_loss=0.2912, pruned_loss=0.05122, over 4815.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2818, pruned_loss=0.05397, over 941841.17 frames. ], batch size: 14, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 10:00:33,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=141365.33333333334, ans=0.1 +2024-07-28 10:00:36,274 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.93 vs. limit=15.0 +2024-07-28 10:00:47,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=141378.66666666666, ans=0.125 +2024-07-28 10:00:54,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=141392.0, ans=0.0 +2024-07-28 10:00:55,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=141405.33333333334, ans=0.0 +2024-07-28 10:00:58,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=141405.33333333334, ans=0.025 +2024-07-28 10:01:07,865 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.632e+01 5.570e+01 6.150e+01 7.131e+01 1.072e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 10:01:08,502 INFO [train.py:1114] (1/4) Epoch 11, batch 3850, loss[loss=0.2168, simple_loss=0.31, pruned_loss=0.06177, over 4621.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2817, pruned_loss=0.05335, over 942511.94 frames. ], batch size: 16, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 10:01:11,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=141432.0, ans=0.0 +2024-07-28 10:01:29,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=141472.0, ans=0.125 +2024-07-28 10:01:42,033 INFO [train.py:1114] (1/4) Epoch 11, batch 3900, loss[loss=0.2105, simple_loss=0.3053, pruned_loss=0.05784, over 4805.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2825, pruned_loss=0.05352, over 943144.59 frames. ], batch size: 14, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 10:01:44,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=141498.66666666666, ans=0.0 +2024-07-28 10:01:45,592 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.66 vs. limit=15.0 +2024-07-28 10:01:55,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=141512.0, ans=0.0 +2024-07-28 10:01:58,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=141525.33333333334, ans=0.125 +2024-07-28 10:02:07,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=141538.66666666666, ans=0.125 +2024-07-28 10:02:09,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=141538.66666666666, ans=0.125 +2024-07-28 10:02:16,442 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.644e+01 6.231e+01 6.992e+01 1.031e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 10:02:17,244 INFO [train.py:1114] (1/4) Epoch 11, batch 3950, loss[loss=0.2129, simple_loss=0.3138, pruned_loss=0.05597, over 4853.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2831, pruned_loss=0.05388, over 944962.63 frames. ], batch size: 16, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 10:02:17,787 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.44 vs. limit=15.0 +2024-07-28 10:02:20,949 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-28 10:02:24,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=141578.66666666666, ans=0.0 +2024-07-28 10:02:25,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=141578.66666666666, ans=0.0 +2024-07-28 10:02:37,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=141605.33333333334, ans=0.125 +2024-07-28 10:02:44,037 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.16 vs. limit=15.0 +2024-07-28 10:02:45,364 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.44 vs. limit=15.0 +2024-07-28 10:02:47,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=141618.66666666666, ans=0.125 +2024-07-28 10:02:54,309 INFO [train.py:1114] (1/4) Epoch 11, batch 4000, loss[loss=0.184, simple_loss=0.2707, pruned_loss=0.04864, over 4775.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2839, pruned_loss=0.05481, over 941462.20 frames. ], batch size: 12, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:03:14,935 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:03:18,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=141658.66666666666, ans=0.125 +2024-07-28 10:03:25,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=141672.0, ans=0.125 +2024-07-28 10:03:44,952 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.29 vs. limit=22.5 +2024-07-28 10:03:46,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=141685.33333333334, ans=0.0 +2024-07-28 10:03:48,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=141685.33333333334, ans=0.125 +2024-07-28 10:03:50,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=141685.33333333334, ans=0.125 +2024-07-28 10:03:53,266 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.441e+01 6.028e+01 6.961e+01 9.604e+01, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 10:03:53,943 INFO [train.py:1114] (1/4) Epoch 11, batch 4050, loss[loss=0.22, simple_loss=0.3155, pruned_loss=0.0623, over 3477.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2832, pruned_loss=0.05437, over 940170.61 frames. ], batch size: 35, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:04:13,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=141712.0, ans=0.0 +2024-07-28 10:04:24,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141738.66666666666, ans=0.1 +2024-07-28 10:04:31,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=141752.0, ans=0.2 +2024-07-28 10:04:34,716 INFO [train.py:1114] (1/4) Epoch 11, batch 4100, loss[loss=0.2095, simple_loss=0.296, pruned_loss=0.06148, over 4896.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2831, pruned_loss=0.05467, over 939176.70 frames. ], batch size: 15, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:04:35,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=141765.33333333334, ans=0.125 +2024-07-28 10:04:41,379 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.41 vs. limit=15.0 +2024-07-28 10:05:03,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=141792.0, ans=0.07 +2024-07-28 10:05:36,364 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.721e+01 5.886e+01 6.549e+01 7.693e+01 1.193e+02, threshold=1.310e+02, percent-clipped=0.0 +2024-07-28 10:05:37,302 INFO [train.py:1114] (1/4) Epoch 11, batch 4150, loss[loss=0.1657, simple_loss=0.2741, pruned_loss=0.02866, over 4825.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2826, pruned_loss=0.05434, over 938784.83 frames. ], batch size: 13, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:05:37,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=141832.0, ans=0.0 +2024-07-28 10:05:55,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.80 vs. limit=15.0 +2024-07-28 10:06:03,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=141832.0, ans=0.0 +2024-07-28 10:06:41,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=141858.66666666666, ans=0.0 +2024-07-28 10:07:11,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=141885.33333333334, ans=0.2 +2024-07-28 10:07:15,449 INFO [train.py:1114] (1/4) Epoch 11, batch 4200, loss[loss=0.2426, simple_loss=0.3245, pruned_loss=0.08036, over 4890.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2834, pruned_loss=0.05454, over 940098.68 frames. ], batch size: 15, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:07:39,938 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.99 vs. limit=15.0 +2024-07-28 10:08:41,003 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.620e+01 5.645e+01 6.237e+01 6.874e+01 1.098e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 10:08:41,703 INFO [train.py:1114] (1/4) Epoch 11, batch 4250, loss[loss=0.1701, simple_loss=0.2549, pruned_loss=0.04265, over 4643.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2833, pruned_loss=0.05451, over 941162.37 frames. ], batch size: 12, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:08:42,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=141965.33333333334, ans=0.2 +2024-07-28 10:08:43,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=141965.33333333334, ans=0.125 +2024-07-28 10:08:44,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=141965.33333333334, ans=0.125 +2024-07-28 10:09:10,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=142005.33333333334, ans=0.05 +2024-07-28 10:09:25,205 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.87 vs. limit=22.5 +2024-07-28 10:09:27,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=142018.66666666666, ans=0.125 +2024-07-28 10:09:31,299 INFO [train.py:1114] (1/4) Epoch 11, batch 4300, loss[loss=0.2128, simple_loss=0.2867, pruned_loss=0.06951, over 4752.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2827, pruned_loss=0.05432, over 940877.32 frames. ], batch size: 13, lr: 6.85e-03, grad_scale: 32.0 +2024-07-28 10:09:36,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=142032.0, ans=0.125 +2024-07-28 10:09:39,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=142045.33333333334, ans=0.125 +2024-07-28 10:09:40,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=142045.33333333334, ans=0.125 +2024-07-28 10:09:53,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=142045.33333333334, ans=0.0 +2024-07-28 10:10:06,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=142058.66666666666, ans=0.125 +2024-07-28 10:10:18,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=142072.0, ans=0.2 +2024-07-28 10:10:27,597 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.309e+01 5.443e+01 5.948e+01 6.522e+01 9.090e+01, threshold=1.190e+02, percent-clipped=0.0 +2024-07-28 10:10:28,341 INFO [train.py:1114] (1/4) Epoch 11, batch 4350, loss[loss=0.1878, simple_loss=0.2775, pruned_loss=0.04908, over 4757.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2827, pruned_loss=0.05417, over 940915.41 frames. ], batch size: 13, lr: 6.85e-03, grad_scale: 32.0 +2024-07-28 10:10:35,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=142098.66666666666, ans=0.0 +2024-07-28 10:10:42,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=142112.0, ans=0.0 +2024-07-28 10:11:05,768 INFO [train.py:1114] (1/4) Epoch 11, batch 4400, loss[loss=0.2454, simple_loss=0.3432, pruned_loss=0.07381, over 4820.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2837, pruned_loss=0.05422, over 940823.76 frames. ], batch size: 14, lr: 6.85e-03, grad_scale: 64.0 +2024-07-28 10:11:05,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=142165.33333333334, ans=0.025 +2024-07-28 10:11:38,551 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+01 5.755e+01 6.372e+01 7.291e+01 1.018e+02, threshold=1.274e+02, percent-clipped=0.0 +2024-07-28 10:11:39,287 INFO [train.py:1114] (1/4) Epoch 11, batch 4450, loss[loss=0.1806, simple_loss=0.2678, pruned_loss=0.04668, over 4940.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2836, pruned_loss=0.05457, over 938570.23 frames. ], batch size: 12, lr: 6.85e-03, grad_scale: 64.0 +2024-07-28 10:11:57,727 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.68 vs. limit=10.0 +2024-07-28 10:12:22,658 INFO [train.py:1114] (1/4) Epoch 11, batch 4500, loss[loss=0.1912, simple_loss=0.2804, pruned_loss=0.05097, over 4737.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2839, pruned_loss=0.05414, over 937896.70 frames. ], batch size: 14, lr: 6.85e-03, grad_scale: 64.0 +2024-07-28 10:12:32,000 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:12:36,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=142325.33333333334, ans=0.125 +2024-07-28 10:12:45,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=142338.66666666666, ans=0.07 +2024-07-28 10:12:54,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=142352.0, ans=0.0 +2024-07-28 10:12:57,411 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.457e+01 5.934e+01 6.532e+01 9.481e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-28 10:12:58,140 INFO [train.py:1114] (1/4) Epoch 11, batch 4550, loss[loss=0.1822, simple_loss=0.2725, pruned_loss=0.04595, over 4906.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2839, pruned_loss=0.05432, over 939588.05 frames. ], batch size: 13, lr: 6.85e-03, grad_scale: 64.0 +2024-07-28 10:13:00,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=142365.33333333334, ans=0.2 +2024-07-28 10:13:02,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=142365.33333333334, ans=0.0 +2024-07-28 10:13:02,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142365.33333333334, ans=0.1 +2024-07-28 10:13:33,034 INFO [train.py:1114] (1/4) Epoch 11, batch 4600, loss[loss=0.1512, simple_loss=0.2491, pruned_loss=0.02667, over 4463.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2831, pruned_loss=0.05414, over 937917.85 frames. ], batch size: 21, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:13:36,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=142432.0, ans=0.2 +2024-07-28 10:13:41,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=142432.0, ans=0.0 +2024-07-28 10:13:45,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=142445.33333333334, ans=0.0 +2024-07-28 10:13:49,067 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:14:16,898 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.448e+01 5.751e+01 6.441e+01 7.092e+01 1.186e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 10:14:22,122 INFO [train.py:1114] (1/4) Epoch 11, batch 4650, loss[loss=0.2215, simple_loss=0.2979, pruned_loss=0.07259, over 4851.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2842, pruned_loss=0.05478, over 939319.70 frames. ], batch size: 16, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:14:30,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=142498.66666666666, ans=0.125 +2024-07-28 10:14:32,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=142512.0, ans=0.09899494936611666 +2024-07-28 10:14:40,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=142525.33333333334, ans=0.0 +2024-07-28 10:15:01,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=142538.66666666666, ans=0.2 +2024-07-28 10:15:03,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=142552.0, ans=0.125 +2024-07-28 10:15:04,593 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:15:07,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=142552.0, ans=0.025 +2024-07-28 10:15:09,641 INFO [train.py:1114] (1/4) Epoch 11, batch 4700, loss[loss=0.1879, simple_loss=0.2671, pruned_loss=0.0543, over 4695.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.284, pruned_loss=0.0547, over 937005.70 frames. ], batch size: 11, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:15:10,740 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.08 vs. limit=15.0 +2024-07-28 10:15:13,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=142565.33333333334, ans=0.0 +2024-07-28 10:15:25,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=142578.66666666666, ans=0.025 +2024-07-28 10:15:38,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=142605.33333333334, ans=0.05 +2024-07-28 10:15:42,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=142618.66666666666, ans=0.02 +2024-07-28 10:15:47,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=142618.66666666666, ans=0.0 +2024-07-28 10:15:49,432 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.868e+01 6.350e+01 7.061e+01 1.022e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 10:15:50,091 INFO [train.py:1114] (1/4) Epoch 11, batch 4750, loss[loss=0.2277, simple_loss=0.302, pruned_loss=0.07672, over 4533.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2838, pruned_loss=0.05515, over 935220.90 frames. ], batch size: 21, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:16:28,614 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.35 vs. limit=22.5 +2024-07-28 10:16:28,657 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.00 vs. limit=15.0 +2024-07-28 10:16:42,914 INFO [train.py:1114] (1/4) Epoch 11, batch 4800, loss[loss=0.1905, simple_loss=0.2868, pruned_loss=0.04706, over 4697.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2839, pruned_loss=0.05536, over 932788.35 frames. ], batch size: 13, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:16:59,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=142712.0, ans=0.0 +2024-07-28 10:16:59,973 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.09 vs. limit=15.0 +2024-07-28 10:17:06,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=142725.33333333334, ans=0.035 +2024-07-28 10:17:07,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=142725.33333333334, ans=0.0 +2024-07-28 10:17:20,902 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.27 vs. limit=22.5 +2024-07-28 10:17:27,021 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.631e+01 5.658e+01 6.076e+01 6.872e+01 9.188e+01, threshold=1.215e+02, percent-clipped=0.0 +2024-07-28 10:17:35,870 INFO [train.py:1114] (1/4) Epoch 11, batch 4850, loss[loss=0.1693, simple_loss=0.2682, pruned_loss=0.03521, over 4734.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2842, pruned_loss=0.05542, over 932270.87 frames. ], batch size: 14, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:17:38,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=142765.33333333334, ans=0.125 +2024-07-28 10:18:06,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=142805.33333333334, ans=0.125 +2024-07-28 10:18:09,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=142818.66666666666, ans=0.125 +2024-07-28 10:18:13,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=142818.66666666666, ans=0.125 +2024-07-28 10:18:15,433 INFO [train.py:1114] (1/4) Epoch 11, batch 4900, loss[loss=0.2258, simple_loss=0.3114, pruned_loss=0.0701, over 4763.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.283, pruned_loss=0.05497, over 933742.87 frames. ], batch size: 13, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:18:21,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=142832.0, ans=0.0 +2024-07-28 10:18:26,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142845.33333333334, ans=0.1 +2024-07-28 10:18:28,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=142845.33333333334, ans=0.0 +2024-07-28 10:18:39,881 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:18:40,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=142872.0, ans=0.0 +2024-07-28 10:18:44,246 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.22 vs. limit=15.0 +2024-07-28 10:18:47,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=142885.33333333334, ans=0.0 +2024-07-28 10:18:50,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=142885.33333333334, ans=0.025 +2024-07-28 10:18:52,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=142885.33333333334, ans=0.125 +2024-07-28 10:18:54,120 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.706e+01 5.557e+01 6.177e+01 6.945e+01 1.051e+02, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 10:18:54,882 INFO [train.py:1114] (1/4) Epoch 11, batch 4950, loss[loss=0.2635, simple_loss=0.3286, pruned_loss=0.09926, over 3675.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2845, pruned_loss=0.05545, over 931688.45 frames. ], batch size: 35, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:18:58,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=142898.66666666666, ans=0.09899494936611666 +2024-07-28 10:19:05,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=142912.0, ans=0.125 +2024-07-28 10:19:11,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=142925.33333333334, ans=0.125 +2024-07-28 10:19:11,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=142925.33333333334, ans=0.125 +2024-07-28 10:19:16,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142938.66666666666, ans=0.1 +2024-07-28 10:19:32,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=142965.33333333334, ans=0.125 +2024-07-28 10:19:32,823 INFO [train.py:1114] (1/4) Epoch 11, batch 5000, loss[loss=0.2149, simple_loss=0.296, pruned_loss=0.06687, over 4656.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2828, pruned_loss=0.05443, over 935438.85 frames. ], batch size: 14, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:19:33,145 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.58 vs. limit=15.0 +2024-07-28 10:19:44,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=142965.33333333334, ans=10.0 +2024-07-28 10:19:48,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=142978.66666666666, ans=0.0 +2024-07-28 10:19:50,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=142978.66666666666, ans=0.0 +2024-07-28 10:20:17,707 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.576e+01 5.560e+01 5.974e+01 6.425e+01 8.960e+01, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 10:20:18,546 INFO [train.py:1114] (1/4) Epoch 11, batch 5050, loss[loss=0.1635, simple_loss=0.2484, pruned_loss=0.03928, over 4841.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2818, pruned_loss=0.05351, over 937942.13 frames. ], batch size: 12, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:20:24,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=143032.0, ans=0.2 +2024-07-28 10:20:25,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=143045.33333333334, ans=0.125 +2024-07-28 10:20:27,178 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.51 vs. limit=15.0 +2024-07-28 10:20:28,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=143045.33333333334, ans=0.0 +2024-07-28 10:20:33,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=143058.66666666666, ans=0.125 +2024-07-28 10:20:53,949 INFO [train.py:1114] (1/4) Epoch 11, batch 5100, loss[loss=0.1905, simple_loss=0.2815, pruned_loss=0.0498, over 4772.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2825, pruned_loss=0.05381, over 935033.21 frames. ], batch size: 12, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:20:54,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=143098.66666666666, ans=0.125 +2024-07-28 10:21:33,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=143138.66666666666, ans=0.05 +2024-07-28 10:21:46,389 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.445e+01 5.691e+01 6.335e+01 6.758e+01 9.887e+01, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 10:21:47,043 INFO [train.py:1114] (1/4) Epoch 11, batch 5150, loss[loss=0.1911, simple_loss=0.2763, pruned_loss=0.05295, over 4832.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2829, pruned_loss=0.05391, over 935786.64 frames. ], batch size: 16, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:21:51,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.76 vs. limit=15.0 +2024-07-28 10:21:53,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143178.66666666666, ans=0.1 +2024-07-28 10:22:08,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=143205.33333333334, ans=0.125 +2024-07-28 10:22:13,815 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.02 vs. limit=15.0 +2024-07-28 10:22:14,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=143218.66666666666, ans=0.035 +2024-07-28 10:22:14,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=143218.66666666666, ans=0.025 +2024-07-28 10:22:15,160 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.02 vs. limit=15.0 +2024-07-28 10:22:22,360 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.32 vs. limit=6.0 +2024-07-28 10:22:22,785 INFO [train.py:1114] (1/4) Epoch 11, batch 5200, loss[loss=0.1839, simple_loss=0.2862, pruned_loss=0.04078, over 4662.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2828, pruned_loss=0.05378, over 936117.78 frames. ], batch size: 14, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:22:37,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=143258.66666666666, ans=0.0 +2024-07-28 10:22:39,023 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.55 vs. limit=22.5 +2024-07-28 10:22:46,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=143272.0, ans=0.09899494936611666 +2024-07-28 10:22:48,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=143272.0, ans=0.125 +2024-07-28 10:22:52,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=143285.33333333334, ans=0.0 +2024-07-28 10:22:56,373 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.779e+01 6.416e+01 7.170e+01 1.127e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 10:22:57,060 INFO [train.py:1114] (1/4) Epoch 11, batch 5250, loss[loss=0.1882, simple_loss=0.2773, pruned_loss=0.04955, over 4904.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2815, pruned_loss=0.05319, over 935582.74 frames. ], batch size: 13, lr: 6.82e-03, grad_scale: 64.0 +2024-07-28 10:23:04,315 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.43 vs. limit=15.0 +2024-07-28 10:23:18,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=143338.66666666666, ans=0.125 +2024-07-28 10:23:25,468 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:23:26,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143352.0, ans=0.1 +2024-07-28 10:23:30,538 INFO [train.py:1114] (1/4) Epoch 11, batch 5300, loss[loss=0.1925, simple_loss=0.2778, pruned_loss=0.05358, over 4660.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2812, pruned_loss=0.05346, over 934070.55 frames. ], batch size: 16, lr: 6.82e-03, grad_scale: 64.0 +2024-07-28 10:23:56,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=143392.0, ans=0.025 +2024-07-28 10:24:04,424 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.65 vs. limit=22.5 +2024-07-28 10:24:13,405 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.823e+01 5.558e+01 6.072e+01 7.139e+01 1.045e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 10:24:14,196 INFO [train.py:1114] (1/4) Epoch 11, batch 5350, loss[loss=0.1556, simple_loss=0.2334, pruned_loss=0.03895, over 4483.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2827, pruned_loss=0.05379, over 936053.84 frames. ], batch size: 10, lr: 6.82e-03, grad_scale: 64.0 +2024-07-28 10:24:14,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=143432.0, ans=0.125 +2024-07-28 10:24:19,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=143432.0, ans=10.0 +2024-07-28 10:24:25,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=143445.33333333334, ans=0.1 +2024-07-28 10:24:28,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=143458.66666666666, ans=0.0 +2024-07-28 10:24:34,009 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.27 vs. limit=12.0 +2024-07-28 10:24:38,025 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.00 vs. limit=15.0 +2024-07-28 10:24:40,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=143472.0, ans=0.125 +2024-07-28 10:24:41,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=143485.33333333334, ans=0.025 +2024-07-28 10:24:45,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=143485.33333333334, ans=0.2 +2024-07-28 10:24:48,735 INFO [train.py:1114] (1/4) Epoch 11, batch 5400, loss[loss=0.1997, simple_loss=0.2813, pruned_loss=0.05905, over 4344.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2827, pruned_loss=0.0541, over 930101.52 frames. ], batch size: 25, lr: 6.82e-03, grad_scale: 64.0 +2024-07-28 10:24:54,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143498.66666666666, ans=0.1 +2024-07-28 10:24:58,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=143512.0, ans=0.0 +2024-07-28 10:25:00,682 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.49 vs. limit=6.0 +2024-07-28 10:25:18,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=143525.33333333334, ans=0.025 +2024-07-28 10:25:20,409 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.01 vs. limit=10.0 +2024-07-28 10:25:24,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=143538.66666666666, ans=0.125 +2024-07-28 10:25:25,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=143538.66666666666, ans=0.0 +2024-07-28 10:25:36,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=143552.0, ans=0.125 +2024-07-28 10:25:40,459 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.745e+01 5.583e+01 6.179e+01 6.977e+01 1.082e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 10:25:40,492 INFO [train.py:1114] (1/4) Epoch 11, batch 5450, loss[loss=0.1735, simple_loss=0.2521, pruned_loss=0.0475, over 4696.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.282, pruned_loss=0.05368, over 932859.04 frames. ], batch size: 11, lr: 6.82e-03, grad_scale: 32.0 +2024-07-28 10:25:41,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=143565.33333333334, ans=0.2 +2024-07-28 10:25:46,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=143565.33333333334, ans=0.0 +2024-07-28 10:25:51,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=143578.66666666666, ans=0.0 +2024-07-28 10:25:53,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=143578.66666666666, ans=0.125 +2024-07-28 10:25:55,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=143592.0, ans=0.2 +2024-07-28 10:26:19,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=143618.66666666666, ans=0.0 +2024-07-28 10:26:23,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=143632.0, ans=0.2 +2024-07-28 10:26:23,927 INFO [train.py:1114] (1/4) Epoch 11, batch 5500, loss[loss=0.2385, simple_loss=0.3223, pruned_loss=0.07739, over 4303.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2814, pruned_loss=0.05358, over 930857.59 frames. ], batch size: 25, lr: 6.82e-03, grad_scale: 32.0 +2024-07-28 10:26:28,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143632.0, ans=0.1 +2024-07-28 10:26:29,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143645.33333333334, ans=0.1 +2024-07-28 10:32:05,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=143672.0, ans=0.125 +2024-07-28 10:32:15,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=143685.33333333334, ans=0.2 +2024-07-28 10:32:22,305 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.760e+01 6.498e+01 7.825e+01 1.226e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-28 10:32:22,338 INFO [train.py:1114] (1/4) Epoch 11, batch 5550, loss[loss=0.1817, simple_loss=0.2672, pruned_loss=0.04812, over 4712.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2815, pruned_loss=0.05384, over 933116.19 frames. ], batch size: 12, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:32:37,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=143725.33333333334, ans=0.125 +2024-07-28 10:32:42,234 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.41 vs. limit=12.0 +2024-07-28 10:32:43,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=143725.33333333334, ans=0.125 +2024-07-28 10:32:48,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=143738.66666666666, ans=10.0 +2024-07-28 10:32:49,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=143738.66666666666, ans=0.125 +2024-07-28 10:32:56,023 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.68 vs. limit=15.0 +2024-07-28 10:32:58,923 INFO [train.py:1114] (1/4) Epoch 11, batch 5600, loss[loss=0.2265, simple_loss=0.305, pruned_loss=0.07403, over 4739.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2815, pruned_loss=0.05403, over 934296.45 frames. ], batch size: 14, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:33:05,427 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.02 vs. limit=22.5 +2024-07-28 10:33:09,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=143778.66666666666, ans=0.025 +2024-07-28 10:33:17,877 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.11 vs. limit=15.0 +2024-07-28 10:33:18,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143792.0, ans=0.1 +2024-07-28 10:33:19,280 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.85 vs. limit=15.0 +2024-07-28 10:33:29,724 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=143805.33333333334, ans=0.2 +2024-07-28 10:33:30,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=143805.33333333334, ans=0.125 +2024-07-28 10:33:31,835 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.10 vs. limit=15.0 +2024-07-28 10:33:33,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=143805.33333333334, ans=0.125 +2024-07-28 10:33:35,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=143805.33333333334, ans=0.125 +2024-07-28 10:33:42,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=143818.66666666666, ans=0.125 +2024-07-28 10:33:43,358 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.771e+01 6.077e+01 6.890e+01 8.236e+01 1.387e+02, threshold=1.378e+02, percent-clipped=1.0 +2024-07-28 10:33:44,775 INFO [train.py:1114] (1/4) Epoch 11, batch 5650, loss[loss=0.2402, simple_loss=0.3149, pruned_loss=0.0828, over 4486.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2812, pruned_loss=0.05365, over 936917.56 frames. ], batch size: 21, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:33:45,213 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.27 vs. limit=15.0 +2024-07-28 10:33:46,693 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.91 vs. limit=10.0 +2024-07-28 10:33:48,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=143832.0, ans=0.125 +2024-07-28 10:33:51,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=143845.33333333334, ans=0.125 +2024-07-28 10:33:57,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=143845.33333333334, ans=0.125 +2024-07-28 10:34:04,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=143858.66666666666, ans=0.125 +2024-07-28 10:34:06,868 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.18 vs. limit=15.0 +2024-07-28 10:34:08,127 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.01 vs. limit=15.0 +2024-07-28 10:34:12,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=143872.0, ans=0.125 +2024-07-28 10:34:18,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=143885.33333333334, ans=0.0 +2024-07-28 10:34:20,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=143885.33333333334, ans=0.07 +2024-07-28 10:34:21,287 INFO [train.py:1114] (1/4) Epoch 11, batch 5700, loss[loss=0.1966, simple_loss=0.2812, pruned_loss=0.05596, over 4689.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2817, pruned_loss=0.05356, over 937650.51 frames. ], batch size: 13, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:34:34,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=143912.0, ans=0.025 +2024-07-28 10:34:56,152 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.558e+01 6.017e+01 6.629e+01 9.464e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 10:34:56,185 INFO [train.py:1114] (1/4) Epoch 11, batch 5750, loss[loss=0.2207, simple_loss=0.3053, pruned_loss=0.06809, over 4696.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2833, pruned_loss=0.05418, over 937546.54 frames. ], batch size: 19, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:34:56,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=143965.33333333334, ans=0.2 +2024-07-28 10:35:04,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=143978.66666666666, ans=0.0 +2024-07-28 10:35:07,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=143978.66666666666, ans=0.025 +2024-07-28 10:35:20,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=143992.0, ans=0.125 +2024-07-28 10:35:24,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=144005.33333333334, ans=0.1 +2024-07-28 10:35:28,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=144005.33333333334, ans=0.2 +2024-07-28 10:35:31,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=144018.66666666666, ans=0.0 +2024-07-28 10:35:36,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=144018.66666666666, ans=0.0 +2024-07-28 10:35:37,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=144032.0, ans=0.025 +2024-07-28 10:35:37,647 INFO [train.py:1114] (1/4) Epoch 11, batch 5800, loss[loss=0.1937, simple_loss=0.2752, pruned_loss=0.05609, over 4717.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2838, pruned_loss=0.05437, over 936662.99 frames. ], batch size: 19, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:35:37,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=144032.0, ans=0.125 +2024-07-28 10:35:50,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=144058.66666666666, ans=0.125 +2024-07-28 10:35:55,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=144058.66666666666, ans=0.2 +2024-07-28 10:36:04,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=144085.33333333334, ans=0.125 +2024-07-28 10:36:05,308 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:36:10,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=144098.66666666666, ans=0.035 +2024-07-28 10:36:11,529 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.841e+01 5.791e+01 6.490e+01 7.663e+01 1.100e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-28 10:36:11,562 INFO [train.py:1114] (1/4) Epoch 11, batch 5850, loss[loss=0.1882, simple_loss=0.2802, pruned_loss=0.04808, over 4497.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2827, pruned_loss=0.05413, over 937471.51 frames. ], batch size: 21, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:36:13,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=144098.66666666666, ans=0.125 +2024-07-28 10:36:23,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=144112.0, ans=0.2 +2024-07-28 10:36:37,617 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-28 10:36:45,263 INFO [train.py:1114] (1/4) Epoch 11, batch 5900, loss[loss=0.2153, simple_loss=0.3089, pruned_loss=0.06081, over 4688.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2823, pruned_loss=0.0543, over 938023.87 frames. ], batch size: 15, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:36:47,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=144165.33333333334, ans=0.035 +2024-07-28 10:37:00,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=144178.66666666666, ans=0.0 +2024-07-28 10:37:28,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=144205.33333333334, ans=0.035 +2024-07-28 10:37:40,536 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.879e+01 5.784e+01 6.286e+01 7.070e+01 1.230e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 10:37:40,569 INFO [train.py:1114] (1/4) Epoch 11, batch 5950, loss[loss=0.2122, simple_loss=0.2978, pruned_loss=0.06327, over 4702.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2821, pruned_loss=0.05388, over 939643.52 frames. ], batch size: 15, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:37:48,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=144232.0, ans=0.125 +2024-07-28 10:37:52,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=144245.33333333334, ans=0.1 +2024-07-28 10:38:07,471 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.34 vs. limit=10.0 +2024-07-28 10:38:13,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=144272.0, ans=0.025 +2024-07-28 10:38:16,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=144272.0, ans=0.2 +2024-07-28 10:38:25,029 INFO [train.py:1114] (1/4) Epoch 11, batch 6000, loss[loss=0.2383, simple_loss=0.3316, pruned_loss=0.07248, over 4226.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2823, pruned_loss=0.05403, over 937034.66 frames. ], batch size: 25, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:38:25,029 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 10:39:04,064 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.7437, 4.4777, 3.8440, 4.4786], device='cuda:1') +2024-07-28 10:39:08,753 INFO [train.py:1146] (1/4) Epoch 11, validation: loss=0.1692, simple_loss=0.2732, pruned_loss=0.03262, over 944034.00 frames. +2024-07-28 10:39:08,754 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 10:39:10,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=144298.66666666666, ans=0.125 +2024-07-28 10:39:13,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=144298.66666666666, ans=0.0 +2024-07-28 10:39:27,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144325.33333333334, ans=0.1 +2024-07-28 10:39:27,645 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.28 vs. limit=22.5 +2024-07-28 10:39:28,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=144325.33333333334, ans=0.125 +2024-07-28 10:39:29,048 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.58 vs. limit=12.0 +2024-07-28 10:39:35,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144352.0, ans=0.1 +2024-07-28 10:39:45,806 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.289e+01 5.724e+01 6.626e+01 8.238e+01 1.220e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-28 10:39:45,839 INFO [train.py:1114] (1/4) Epoch 11, batch 6050, loss[loss=0.1736, simple_loss=0.2634, pruned_loss=0.04188, over 4784.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2805, pruned_loss=0.05318, over 938379.36 frames. ], batch size: 12, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:39:47,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=144365.33333333334, ans=0.95 +2024-07-28 10:40:04,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=144378.66666666666, ans=0.0 +2024-07-28 10:40:15,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=144405.33333333334, ans=0.0 +2024-07-28 10:40:23,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=144418.66666666666, ans=0.125 +2024-07-28 10:40:28,017 INFO [train.py:1114] (1/4) Epoch 11, batch 6100, loss[loss=0.205, simple_loss=0.2903, pruned_loss=0.05982, over 4691.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.28, pruned_loss=0.05271, over 937812.36 frames. ], batch size: 15, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:40:28,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=144432.0, ans=0.2 +2024-07-28 10:40:32,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=144432.0, ans=0.125 +2024-07-28 10:40:36,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=144445.33333333334, ans=0.0 +2024-07-28 10:40:39,124 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.70 vs. limit=22.5 +2024-07-28 10:40:49,629 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.70 vs. limit=22.5 +2024-07-28 10:40:55,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=144485.33333333334, ans=0.125 +2024-07-28 10:41:01,435 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.304e+01 5.350e+01 6.027e+01 7.047e+01 1.301e+02, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 10:41:01,468 INFO [train.py:1114] (1/4) Epoch 11, batch 6150, loss[loss=0.2204, simple_loss=0.3055, pruned_loss=0.06772, over 3741.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2807, pruned_loss=0.05244, over 937207.32 frames. ], batch size: 35, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:41:02,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=144498.66666666666, ans=0.125 +2024-07-28 10:41:03,817 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.87 vs. limit=22.5 +2024-07-28 10:41:04,665 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.48 vs. limit=22.5 +2024-07-28 10:41:15,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=144512.0, ans=0.1 +2024-07-28 10:41:21,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=144525.33333333334, ans=0.2 +2024-07-28 10:41:22,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=144525.33333333334, ans=0.2 +2024-07-28 10:41:35,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=144552.0, ans=0.025 +2024-07-28 10:41:37,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=144552.0, ans=0.125 +2024-07-28 10:41:39,924 INFO [train.py:1114] (1/4) Epoch 11, batch 6200, loss[loss=0.1985, simple_loss=0.2863, pruned_loss=0.05532, over 4745.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2812, pruned_loss=0.05298, over 936564.18 frames. ], batch size: 14, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:42:12,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=144618.66666666666, ans=0.0 +2024-07-28 10:42:15,875 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.646e+01 5.697e+01 6.150e+01 7.002e+01 1.067e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 10:42:15,908 INFO [train.py:1114] (1/4) Epoch 11, batch 6250, loss[loss=0.1837, simple_loss=0.2733, pruned_loss=0.04703, over 4809.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2813, pruned_loss=0.05336, over 933669.31 frames. ], batch size: 14, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:42:17,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=144632.0, ans=0.0 +2024-07-28 10:42:23,169 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.35 vs. limit=6.0 +2024-07-28 10:42:25,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=144645.33333333334, ans=0.0 +2024-07-28 10:42:26,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=144645.33333333334, ans=0.1 +2024-07-28 10:42:47,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=144685.33333333334, ans=0.125 +2024-07-28 10:42:54,697 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.37 vs. limit=6.0 +2024-07-28 10:42:58,533 INFO [train.py:1114] (1/4) Epoch 11, batch 6300, loss[loss=0.1618, simple_loss=0.2402, pruned_loss=0.04166, over 4549.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2816, pruned_loss=0.05375, over 930494.01 frames. ], batch size: 10, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:42:58,984 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.82 vs. limit=12.0 +2024-07-28 10:43:02,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=144698.66666666666, ans=0.125 +2024-07-28 10:43:16,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=144725.33333333334, ans=0.0 +2024-07-28 10:43:17,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=144725.33333333334, ans=0.1 +2024-07-28 10:43:23,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=144738.66666666666, ans=0.125 +2024-07-28 10:43:26,687 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:43:35,771 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.62 vs. limit=6.0 +2024-07-28 10:43:36,748 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.336e+01 5.612e+01 6.120e+01 6.711e+01 9.743e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 10:43:36,781 INFO [train.py:1114] (1/4) Epoch 11, batch 6350, loss[loss=0.2242, simple_loss=0.3119, pruned_loss=0.06822, over 4489.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2816, pruned_loss=0.05375, over 934470.26 frames. ], batch size: 21, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:43:43,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=144778.66666666666, ans=0.0 +2024-07-28 10:43:50,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=144778.66666666666, ans=0.125 +2024-07-28 10:43:52,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=144792.0, ans=0.0 +2024-07-28 10:44:01,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=144805.33333333334, ans=0.1 +2024-07-28 10:44:02,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=144805.33333333334, ans=0.2 +2024-07-28 10:44:06,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144818.66666666666, ans=0.1 +2024-07-28 10:44:11,945 INFO [train.py:1114] (1/4) Epoch 11, batch 6400, loss[loss=0.1932, simple_loss=0.2974, pruned_loss=0.0445, over 4634.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2814, pruned_loss=0.05331, over 935373.79 frames. ], batch size: 13, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:44:20,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=144845.33333333334, ans=0.125 +2024-07-28 10:44:20,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=144845.33333333334, ans=0.0 +2024-07-28 10:44:24,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=144858.66666666666, ans=0.2 +2024-07-28 10:44:25,459 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.30 vs. limit=15.0 +2024-07-28 10:44:31,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=144872.0, ans=0.2 +2024-07-28 10:44:45,102 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.492e+01 5.883e+01 6.533e+01 7.974e+01 1.055e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 10:44:45,135 INFO [train.py:1114] (1/4) Epoch 11, batch 6450, loss[loss=0.2088, simple_loss=0.2957, pruned_loss=0.06096, over 4642.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2823, pruned_loss=0.05335, over 939019.16 frames. ], batch size: 21, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:44:48,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.58 vs. limit=22.5 +2024-07-28 10:44:49,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.03 vs. limit=15.0 +2024-07-28 10:44:56,873 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.17 vs. limit=15.0 +2024-07-28 10:45:01,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=144925.33333333334, ans=0.0 +2024-07-28 10:45:01,421 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.77 vs. limit=12.0 +2024-07-28 10:45:06,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=144938.66666666666, ans=0.125 +2024-07-28 10:45:11,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=144952.0, ans=0.0 +2024-07-28 10:45:12,402 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.46 vs. limit=15.0 +2024-07-28 10:45:17,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=144965.33333333334, ans=0.1 +2024-07-28 10:45:18,201 INFO [train.py:1114] (1/4) Epoch 11, batch 6500, loss[loss=0.2668, simple_loss=0.3179, pruned_loss=0.1079, over 3507.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2819, pruned_loss=0.05329, over 940069.43 frames. ], batch size: 35, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:45:18,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=144965.33333333334, ans=0.1 +2024-07-28 10:45:23,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=144965.33333333334, ans=0.0 +2024-07-28 10:45:43,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=145005.33333333334, ans=0.125 +2024-07-28 10:45:49,805 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:45:51,639 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.465e+01 5.571e+01 6.263e+01 7.370e+01 1.165e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 10:45:51,672 INFO [train.py:1114] (1/4) Epoch 11, batch 6550, loss[loss=0.19, simple_loss=0.2688, pruned_loss=0.05557, over 4817.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2807, pruned_loss=0.05262, over 943114.20 frames. ], batch size: 11, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:45:55,342 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.45 vs. limit=6.0 +2024-07-28 10:45:59,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145045.33333333334, ans=0.1 +2024-07-28 10:46:01,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=145045.33333333334, ans=0.04949747468305833 +2024-07-28 10:46:06,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=145058.66666666666, ans=0.2 +2024-07-28 10:46:15,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=145072.0, ans=0.125 +2024-07-28 10:46:19,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145085.33333333334, ans=0.1 +2024-07-28 10:46:20,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=145085.33333333334, ans=0.2 +2024-07-28 10:46:25,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=145098.66666666666, ans=0.025 +2024-07-28 10:46:25,931 INFO [train.py:1114] (1/4) Epoch 11, batch 6600, loss[loss=0.1912, simple_loss=0.2779, pruned_loss=0.05222, over 4931.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2812, pruned_loss=0.05276, over 944996.33 frames. ], batch size: 14, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:46:36,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=145112.0, ans=0.125 +2024-07-28 10:46:39,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=145125.33333333334, ans=0.125 +2024-07-28 10:46:41,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=145125.33333333334, ans=0.0 +2024-07-28 10:46:42,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145125.33333333334, ans=0.1 +2024-07-28 10:46:50,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=145138.66666666666, ans=0.125 +2024-07-28 10:46:56,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145152.0, ans=0.1 +2024-07-28 10:46:59,235 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.651e+01 6.150e+01 6.851e+01 8.170e+01 1.263e+02, threshold=1.370e+02, percent-clipped=1.0 +2024-07-28 10:46:59,269 INFO [train.py:1114] (1/4) Epoch 11, batch 6650, loss[loss=0.2442, simple_loss=0.3361, pruned_loss=0.0762, over 4627.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2816, pruned_loss=0.0535, over 943416.63 frames. ], batch size: 17, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:47:28,933 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.46 vs. limit=12.0 +2024-07-28 10:47:35,738 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.51 vs. limit=15.0 +2024-07-28 10:47:36,241 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:47:43,297 INFO [train.py:1114] (1/4) Epoch 11, batch 6700, loss[loss=0.2483, simple_loss=0.3179, pruned_loss=0.0893, over 4767.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2817, pruned_loss=0.05354, over 942311.47 frames. ], batch size: 19, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:48:00,443 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.48 vs. limit=6.0 +2024-07-28 10:48:01,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=145258.66666666666, ans=0.025 +2024-07-28 10:48:02,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=145272.0, ans=0.125 +2024-07-28 10:48:03,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=145272.0, ans=0.125 +2024-07-28 10:48:03,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=145272.0, ans=0.2 +2024-07-28 10:48:04,578 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.71 vs. limit=15.0 +2024-07-28 10:48:21,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=145298.66666666666, ans=0.0 +2024-07-28 10:48:21,936 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.733e+01 5.801e+01 6.276e+01 7.380e+01 1.183e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 10:48:21,969 INFO [train.py:1114] (1/4) Epoch 11, batch 6750, loss[loss=0.1937, simple_loss=0.2814, pruned_loss=0.05304, over 4209.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2819, pruned_loss=0.05402, over 940524.28 frames. ], batch size: 25, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:48:27,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=145298.66666666666, ans=0.125 +2024-07-28 10:48:30,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=145312.0, ans=0.125 +2024-07-28 10:48:32,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=145312.0, ans=15.0 +2024-07-28 10:48:38,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=145325.33333333334, ans=0.0 +2024-07-28 10:48:42,536 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-07-28 10:49:06,042 INFO [train.py:1114] (1/4) Epoch 11, batch 6800, loss[loss=0.2075, simple_loss=0.2903, pruned_loss=0.06229, over 4632.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2822, pruned_loss=0.05434, over 938601.56 frames. ], batch size: 13, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:49:06,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=145365.33333333334, ans=0.125 +2024-07-28 10:49:10,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=145365.33333333334, ans=0.0 +2024-07-28 10:49:28,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145405.33333333334, ans=0.1 +2024-07-28 10:49:34,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=145405.33333333334, ans=0.0 +2024-07-28 10:49:43,880 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.787e+01 5.530e+01 6.115e+01 7.020e+01 1.132e+02, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 10:49:43,913 INFO [train.py:1114] (1/4) Epoch 11, batch 6850, loss[loss=0.2111, simple_loss=0.3017, pruned_loss=0.06026, over 4695.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2817, pruned_loss=0.05398, over 940390.21 frames. ], batch size: 13, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:49:56,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=145445.33333333334, ans=0.07 +2024-07-28 10:50:12,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=145472.0, ans=0.125 +2024-07-28 10:50:12,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=145472.0, ans=0.0 +2024-07-28 10:50:19,138 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.02 vs. limit=15.0 +2024-07-28 10:50:19,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=145485.33333333334, ans=0.2 +2024-07-28 10:50:22,660 INFO [train.py:1114] (1/4) Epoch 11, batch 6900, loss[loss=0.1721, simple_loss=0.2621, pruned_loss=0.04107, over 4967.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2823, pruned_loss=0.05433, over 942912.29 frames. ], batch size: 13, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:50:29,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=145512.0, ans=0.125 +2024-07-28 10:50:49,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=145552.0, ans=0.125 +2024-07-28 10:50:54,697 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.44 vs. limit=22.5 +2024-07-28 10:50:55,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=145552.0, ans=0.125 +2024-07-28 10:50:57,125 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.940e+01 5.560e+01 6.281e+01 7.160e+01 1.002e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 10:50:57,158 INFO [train.py:1114] (1/4) Epoch 11, batch 6950, loss[loss=0.1923, simple_loss=0.2761, pruned_loss=0.05429, over 4535.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2823, pruned_loss=0.05429, over 940415.53 frames. ], batch size: 10, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:50:58,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=145565.33333333334, ans=0.125 +2024-07-28 10:51:01,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145565.33333333334, ans=0.1 +2024-07-28 10:51:05,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=145578.66666666666, ans=0.0 +2024-07-28 10:51:06,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=145578.66666666666, ans=0.2 +2024-07-28 10:51:09,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=145578.66666666666, ans=0.0 +2024-07-28 10:51:13,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=145592.0, ans=0.125 +2024-07-28 10:51:17,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=145605.33333333334, ans=0.125 +2024-07-28 10:51:21,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=145605.33333333334, ans=0.125 +2024-07-28 10:51:31,224 INFO [train.py:1114] (1/4) Epoch 11, batch 7000, loss[loss=0.2062, simple_loss=0.291, pruned_loss=0.06067, over 4589.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2819, pruned_loss=0.05397, over 938621.26 frames. ], batch size: 17, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:51:31,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=145632.0, ans=0.125 +2024-07-28 10:51:33,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=145632.0, ans=0.0 +2024-07-28 10:51:37,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145645.33333333334, ans=0.1 +2024-07-28 10:51:44,731 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.11 vs. limit=22.5 +2024-07-28 10:51:58,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=145685.33333333334, ans=0.125 +2024-07-28 10:52:03,909 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.575e+01 5.703e+01 6.345e+01 7.288e+01 1.132e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 10:52:03,942 INFO [train.py:1114] (1/4) Epoch 11, batch 7050, loss[loss=0.2249, simple_loss=0.3165, pruned_loss=0.06661, over 4699.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2817, pruned_loss=0.05387, over 942127.63 frames. ], batch size: 19, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:52:05,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=145698.66666666666, ans=0.125 +2024-07-28 10:52:06,413 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.53 vs. limit=15.0 +2024-07-28 10:52:20,833 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-07-28 10:52:33,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=145752.0, ans=0.125 +2024-07-28 10:52:38,906 INFO [train.py:1114] (1/4) Epoch 11, batch 7100, loss[loss=0.2017, simple_loss=0.2951, pruned_loss=0.05411, over 4817.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2827, pruned_loss=0.05431, over 936811.98 frames. ], batch size: 15, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:52:45,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=145778.66666666666, ans=0.125 +2024-07-28 10:52:50,982 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.35 vs. limit=22.5 +2024-07-28 10:53:06,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=145818.66666666666, ans=0.2 +2024-07-28 10:53:08,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=145818.66666666666, ans=0.125 +2024-07-28 10:53:11,357 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.457e+01 5.413e+01 6.227e+01 7.503e+01 1.030e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 10:53:11,390 INFO [train.py:1114] (1/4) Epoch 11, batch 7150, loss[loss=0.228, simple_loss=0.311, pruned_loss=0.0725, over 4574.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2814, pruned_loss=0.05396, over 937668.27 frames. ], batch size: 21, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:53:11,683 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.62 vs. limit=15.0 +2024-07-28 10:53:20,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=145845.33333333334, ans=0.125 +2024-07-28 10:53:33,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=145872.0, ans=0.0 +2024-07-28 10:53:39,501 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.67 vs. limit=22.5 +2024-07-28 10:53:44,201 INFO [train.py:1114] (1/4) Epoch 11, batch 7200, loss[loss=0.1851, simple_loss=0.2747, pruned_loss=0.04777, over 4809.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2823, pruned_loss=0.05401, over 938506.19 frames. ], batch size: 15, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:53:49,740 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.14 vs. limit=22.5 +2024-07-28 10:53:50,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=145912.0, ans=0.025 +2024-07-28 10:53:54,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=145912.0, ans=0.125 +2024-07-28 10:53:57,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=145925.33333333334, ans=0.125 +2024-07-28 10:54:01,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=145925.33333333334, ans=0.125 +2024-07-28 10:54:03,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=145925.33333333334, ans=0.125 +2024-07-28 10:54:04,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145938.66666666666, ans=0.1 +2024-07-28 10:54:08,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=145938.66666666666, ans=0.125 +2024-07-28 10:54:10,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=145952.0, ans=0.0 +2024-07-28 10:54:23,289 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 5.502e+01 5.961e+01 6.542e+01 9.167e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 10:54:23,323 INFO [train.py:1114] (1/4) Epoch 11, batch 7250, loss[loss=0.1771, simple_loss=0.2517, pruned_loss=0.05126, over 4857.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2811, pruned_loss=0.05343, over 940261.49 frames. ], batch size: 12, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:54:30,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=145965.33333333334, ans=0.125 +2024-07-28 10:54:35,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=145978.66666666666, ans=0.2 +2024-07-28 10:55:05,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=146005.33333333334, ans=0.0 +2024-07-28 10:55:06,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=146005.33333333334, ans=0.0 +2024-07-28 10:55:09,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=146018.66666666666, ans=0.125 +2024-07-28 10:55:12,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=146018.66666666666, ans=0.07 +2024-07-28 10:55:14,508 INFO [train.py:1114] (1/4) Epoch 11, batch 7300, loss[loss=0.1649, simple_loss=0.251, pruned_loss=0.03934, over 4852.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2805, pruned_loss=0.05291, over 940128.15 frames. ], batch size: 12, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:55:16,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=146032.0, ans=0.125 +2024-07-28 10:55:27,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=146058.66666666666, ans=0.025 +2024-07-28 10:55:37,519 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.72 vs. limit=15.0 +2024-07-28 10:55:45,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=146085.33333333334, ans=0.0 +2024-07-28 10:55:46,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=146085.33333333334, ans=0.1 +2024-07-28 10:55:48,663 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:55:49,207 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.444e+01 5.651e+01 6.063e+01 6.776e+01 1.053e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-28 10:55:49,240 INFO [train.py:1114] (1/4) Epoch 11, batch 7350, loss[loss=0.2074, simple_loss=0.3025, pruned_loss=0.05614, over 4641.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2807, pruned_loss=0.05266, over 939460.65 frames. ], batch size: 12, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:56:02,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=146098.66666666666, ans=0.0 +2024-07-28 10:56:02,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=146098.66666666666, ans=0.2 +2024-07-28 10:56:09,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=146112.0, ans=0.125 +2024-07-28 10:56:20,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=146125.33333333334, ans=0.0 +2024-07-28 10:56:24,504 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.71 vs. limit=6.0 +2024-07-28 10:56:29,103 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.51 vs. limit=12.0 +2024-07-28 10:56:32,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=146152.0, ans=0.0 +2024-07-28 10:56:35,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=146152.0, ans=0.0 +2024-07-28 10:56:36,372 INFO [train.py:1114] (1/4) Epoch 11, batch 7400, loss[loss=0.2122, simple_loss=0.2942, pruned_loss=0.06505, over 4694.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2814, pruned_loss=0.05274, over 940927.37 frames. ], batch size: 13, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:57:01,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=146205.33333333334, ans=0.0 +2024-07-28 10:57:11,528 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.799e+01 5.675e+01 6.306e+01 7.270e+01 1.053e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 10:57:11,561 INFO [train.py:1114] (1/4) Epoch 11, batch 7450, loss[loss=0.1771, simple_loss=0.2697, pruned_loss=0.04226, over 4617.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2814, pruned_loss=0.0535, over 938712.25 frames. ], batch size: 11, lr: 6.76e-03, grad_scale: 64.0 +2024-07-28 10:57:14,875 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:57:23,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=146245.33333333334, ans=0.125 +2024-07-28 10:57:25,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=146258.66666666666, ans=10.0 +2024-07-28 10:57:27,071 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.07 vs. limit=15.0 +2024-07-28 10:57:27,355 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:57:28,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=146258.66666666666, ans=0.0 +2024-07-28 10:57:30,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=146258.66666666666, ans=0.0 +2024-07-28 10:57:45,282 INFO [train.py:1114] (1/4) Epoch 11, batch 7500, loss[loss=0.2411, simple_loss=0.3232, pruned_loss=0.07944, over 3603.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2815, pruned_loss=0.05344, over 936945.98 frames. ], batch size: 37, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 10:57:45,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=146298.66666666666, ans=0.5 +2024-07-28 10:57:52,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=146312.0, ans=0.1 +2024-07-28 10:57:53,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=146312.0, ans=0.0 +2024-07-28 10:58:06,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=146325.33333333334, ans=0.0 +2024-07-28 10:58:11,907 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.29 vs. limit=22.5 +2024-07-28 10:58:13,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=146338.66666666666, ans=0.125 +2024-07-28 10:58:16,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=146352.0, ans=0.125 +2024-07-28 10:58:24,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=146352.0, ans=0.035 +2024-07-28 10:58:36,008 INFO [train.py:1114] (1/4) Epoch 11, batch 7550, loss[loss=0.2274, simple_loss=0.3145, pruned_loss=0.07015, over 4638.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2828, pruned_loss=0.05426, over 936973.35 frames. ], batch size: 17, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 10:58:37,332 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.706e+01 6.227e+01 6.985e+01 1.230e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 10:59:00,254 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.81 vs. limit=15.0 +2024-07-28 10:59:10,232 INFO [train.py:1114] (1/4) Epoch 11, batch 7600, loss[loss=0.1648, simple_loss=0.2626, pruned_loss=0.03353, over 4806.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2821, pruned_loss=0.05399, over 938406.81 frames. ], batch size: 14, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 10:59:13,044 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.76 vs. limit=22.5 +2024-07-28 10:59:14,726 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.27 vs. limit=15.0 +2024-07-28 10:59:25,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=146445.33333333334, ans=0.0 +2024-07-28 10:59:55,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=146485.33333333334, ans=0.125 +2024-07-28 10:59:56,748 INFO [train.py:1114] (1/4) Epoch 11, batch 7650, loss[loss=0.1619, simple_loss=0.2585, pruned_loss=0.0327, over 4928.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2815, pruned_loss=0.05402, over 937918.86 frames. ], batch size: 12, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 10:59:57,328 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.695e+01 5.694e+01 6.162e+01 7.312e+01 1.050e+02, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 11:00:02,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=146498.66666666666, ans=0.2 +2024-07-28 11:00:19,145 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.32 vs. limit=22.5 +2024-07-28 11:00:27,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=146538.66666666666, ans=0.0 +2024-07-28 11:00:42,916 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.16 vs. limit=12.0 +2024-07-28 11:00:43,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=146552.0, ans=0.0 +2024-07-28 11:00:51,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=146552.0, ans=0.2 +2024-07-28 11:00:51,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=146565.33333333334, ans=0.025 +2024-07-28 11:00:52,403 INFO [train.py:1114] (1/4) Epoch 11, batch 7700, loss[loss=0.1932, simple_loss=0.2842, pruned_loss=0.05114, over 4690.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2822, pruned_loss=0.05423, over 935135.77 frames. ], batch size: 13, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 11:00:52,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=146565.33333333334, ans=0.2 +2024-07-28 11:00:56,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=146565.33333333334, ans=0.025 +2024-07-28 11:00:58,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=146578.66666666666, ans=0.0 +2024-07-28 11:01:00,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=146578.66666666666, ans=0.125 +2024-07-28 11:01:10,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=146592.0, ans=0.125 +2024-07-28 11:01:16,611 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.67 vs. limit=15.0 +2024-07-28 11:01:22,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.68 vs. limit=6.0 +2024-07-28 11:01:24,494 INFO [train.py:1114] (1/4) Epoch 11, batch 7750, loss[loss=0.1915, simple_loss=0.275, pruned_loss=0.05398, over 4932.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2834, pruned_loss=0.05451, over 936132.17 frames. ], batch size: 14, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 11:01:25,061 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.807e+01 5.502e+01 5.839e+01 6.536e+01 9.660e+01, threshold=1.168e+02, percent-clipped=0.0 +2024-07-28 11:01:32,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=146645.33333333334, ans=0.0 +2024-07-28 11:01:36,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=146658.66666666666, ans=0.125 +2024-07-28 11:01:38,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=146658.66666666666, ans=0.0 +2024-07-28 11:01:43,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=146672.0, ans=0.125 +2024-07-28 11:01:44,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=146672.0, ans=0.09899494936611666 +2024-07-28 11:01:47,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=146672.0, ans=0.0 +2024-07-28 11:01:49,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=146672.0, ans=0.2 +2024-07-28 11:01:52,283 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.24 vs. limit=22.5 +2024-07-28 11:01:53,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=146685.33333333334, ans=0.04949747468305833 +2024-07-28 11:01:57,059 INFO [train.py:1114] (1/4) Epoch 11, batch 7800, loss[loss=0.1729, simple_loss=0.266, pruned_loss=0.03994, over 4668.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2827, pruned_loss=0.05446, over 937661.67 frames. ], batch size: 14, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:02:00,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=146698.66666666666, ans=0.125 +2024-07-28 11:02:07,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=146712.0, ans=0.025 +2024-07-28 11:02:25,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=146752.0, ans=0.125 +2024-07-28 11:02:29,897 INFO [train.py:1114] (1/4) Epoch 11, batch 7850, loss[loss=0.1723, simple_loss=0.251, pruned_loss=0.04674, over 4531.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2837, pruned_loss=0.05484, over 936433.23 frames. ], batch size: 10, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:02:30,520 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.015e+01 5.817e+01 6.561e+01 7.399e+01 1.277e+02, threshold=1.312e+02, percent-clipped=1.0 +2024-07-28 11:02:31,435 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.13 vs. limit=15.0 +2024-07-28 11:02:33,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=146765.33333333334, ans=0.125 +2024-07-28 11:02:37,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=146778.66666666666, ans=0.2 +2024-07-28 11:02:39,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=146778.66666666666, ans=0.95 +2024-07-28 11:02:42,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=146792.0, ans=0.125 +2024-07-28 11:02:48,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=146792.0, ans=0.2 +2024-07-28 11:02:51,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=146805.33333333334, ans=0.0 +2024-07-28 11:02:52,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=146805.33333333334, ans=0.125 +2024-07-28 11:02:55,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=146805.33333333334, ans=0.125 +2024-07-28 11:02:57,922 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.85 vs. limit=10.0 +2024-07-28 11:03:00,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=146818.66666666666, ans=0.2 +2024-07-28 11:03:02,642 INFO [train.py:1114] (1/4) Epoch 11, batch 7900, loss[loss=0.1544, simple_loss=0.2522, pruned_loss=0.02835, over 4876.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2854, pruned_loss=0.05517, over 933471.19 frames. ], batch size: 14, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:03:04,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=146832.0, ans=0.2 +2024-07-28 11:03:05,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=146832.0, ans=0.125 +2024-07-28 11:03:09,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=146845.33333333334, ans=0.125 +2024-07-28 11:03:35,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=146872.0, ans=0.125 +2024-07-28 11:03:43,573 INFO [train.py:1114] (1/4) Epoch 11, batch 7950, loss[loss=0.2315, simple_loss=0.3153, pruned_loss=0.07387, over 3367.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2856, pruned_loss=0.05517, over 935381.52 frames. ], batch size: 36, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:03:44,163 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.442e+01 5.704e+01 6.229e+01 6.685e+01 9.610e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 11:03:50,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=146912.0, ans=0.1 +2024-07-28 11:03:51,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=146912.0, ans=0.125 +2024-07-28 11:03:52,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=146912.0, ans=0.125 +2024-07-28 11:03:52,237 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.61 vs. limit=15.0 +2024-07-28 11:04:15,554 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.85 vs. limit=15.0 +2024-07-28 11:04:17,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=146952.0, ans=0.2 +2024-07-28 11:04:24,564 INFO [train.py:1114] (1/4) Epoch 11, batch 8000, loss[loss=0.1804, simple_loss=0.2583, pruned_loss=0.05119, over 4620.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2834, pruned_loss=0.05412, over 934910.30 frames. ], batch size: 11, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:04:37,830 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.08 vs. limit=22.5 +2024-07-28 11:04:42,262 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.29 vs. limit=15.0 +2024-07-28 11:04:45,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=147005.33333333334, ans=0.125 +2024-07-28 11:04:45,319 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:04:48,206 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.98 vs. limit=22.5 +2024-07-28 11:04:49,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=147005.33333333334, ans=0.125 +2024-07-28 11:04:49,523 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.78 vs. limit=12.0 +2024-07-28 11:04:57,133 INFO [train.py:1114] (1/4) Epoch 11, batch 8050, loss[loss=0.2017, simple_loss=0.2992, pruned_loss=0.05215, over 4822.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2825, pruned_loss=0.05341, over 934644.15 frames. ], batch size: 14, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:04:57,750 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.054e+01 5.507e+01 6.263e+01 7.215e+01 1.111e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 11:05:01,805 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:05:15,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=147072.0, ans=0.0 +2024-07-28 11:05:27,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=147085.33333333334, ans=0.125 +2024-07-28 11:05:28,977 INFO [train.py:1114] (1/4) Epoch 11, batch 8100, loss[loss=0.2221, simple_loss=0.3191, pruned_loss=0.06252, over 4809.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2828, pruned_loss=0.05288, over 933733.45 frames. ], batch size: 15, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:05:33,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=147098.66666666666, ans=0.125 +2024-07-28 11:05:36,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=147098.66666666666, ans=0.2 +2024-07-28 11:05:42,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=147112.0, ans=0.125 +2024-07-28 11:06:03,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=147125.33333333334, ans=0.025 +2024-07-28 11:06:12,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=147138.66666666666, ans=0.0 +2024-07-28 11:06:15,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=147152.0, ans=0.125 +2024-07-28 11:06:18,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147152.0, ans=0.1 +2024-07-28 11:06:19,671 INFO [train.py:1114] (1/4) Epoch 11, batch 8150, loss[loss=0.1811, simple_loss=0.2628, pruned_loss=0.04971, over 4786.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2809, pruned_loss=0.05211, over 937038.96 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:06:20,269 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.704e+01 5.553e+01 6.182e+01 6.972e+01 1.059e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 11:06:26,782 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:06:28,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=147178.66666666666, ans=0.0 +2024-07-28 11:06:42,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=147205.33333333334, ans=0.2 +2024-07-28 11:06:47,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=147218.66666666666, ans=0.125 +2024-07-28 11:06:54,224 INFO [train.py:1114] (1/4) Epoch 11, batch 8200, loss[loss=0.2154, simple_loss=0.3103, pruned_loss=0.06021, over 4796.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2814, pruned_loss=0.05245, over 938002.20 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:06:57,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147232.0, ans=0.1 +2024-07-28 11:06:58,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=147232.0, ans=15.0 +2024-07-28 11:07:06,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=147258.66666666666, ans=0.5 +2024-07-28 11:07:35,806 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.79 vs. limit=6.0 +2024-07-28 11:07:38,663 INFO [train.py:1114] (1/4) Epoch 11, batch 8250, loss[loss=0.174, simple_loss=0.2594, pruned_loss=0.04431, over 4896.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2819, pruned_loss=0.05287, over 938323.94 frames. ], batch size: 13, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:07:38,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=147298.66666666666, ans=0.0 +2024-07-28 11:07:39,314 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.558e+01 5.575e+01 5.968e+01 7.239e+01 1.462e+02, threshold=1.194e+02, percent-clipped=1.0 +2024-07-28 11:07:52,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=147312.0, ans=0.0 +2024-07-28 11:07:54,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=147312.0, ans=0.025 +2024-07-28 11:07:56,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147325.33333333334, ans=0.1 +2024-07-28 11:07:56,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=147325.33333333334, ans=0.2 +2024-07-28 11:07:58,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147325.33333333334, ans=0.1 +2024-07-28 11:08:09,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=147338.66666666666, ans=0.025 +2024-07-28 11:08:17,919 INFO [train.py:1114] (1/4) Epoch 11, batch 8300, loss[loss=0.2035, simple_loss=0.2913, pruned_loss=0.05784, over 4910.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2824, pruned_loss=0.05342, over 938607.90 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:08:18,315 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.19 vs. limit=12.0 +2024-07-28 11:08:29,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=147378.66666666666, ans=0.125 +2024-07-28 11:08:47,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=147418.66666666666, ans=0.1 +2024-07-28 11:08:49,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=147418.66666666666, ans=0.125 +2024-07-28 11:08:51,016 INFO [train.py:1114] (1/4) Epoch 11, batch 8350, loss[loss=0.2084, simple_loss=0.2992, pruned_loss=0.05881, over 4794.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2817, pruned_loss=0.05309, over 941354.63 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:08:51,646 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.686e+01 6.163e+01 6.949e+01 9.683e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 11:09:11,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=147472.0, ans=0.125 +2024-07-28 11:09:25,411 INFO [train.py:1114] (1/4) Epoch 11, batch 8400, loss[loss=0.1738, simple_loss=0.2592, pruned_loss=0.04422, over 4778.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2822, pruned_loss=0.053, over 940229.71 frames. ], batch size: 12, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:09:34,083 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-07-28 11:09:40,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=147525.33333333334, ans=0.125 +2024-07-28 11:09:41,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=147525.33333333334, ans=0.0 +2024-07-28 11:09:43,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147525.33333333334, ans=0.1 +2024-07-28 11:09:44,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=147538.66666666666, ans=0.1 +2024-07-28 11:09:51,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=147552.0, ans=0.0 +2024-07-28 11:09:53,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147552.0, ans=0.1 +2024-07-28 11:09:53,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=147552.0, ans=0.07 +2024-07-28 11:09:55,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=147552.0, ans=0.2 +2024-07-28 11:09:57,652 INFO [train.py:1114] (1/4) Epoch 11, batch 8450, loss[loss=0.2047, simple_loss=0.2937, pruned_loss=0.0579, over 4825.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2832, pruned_loss=0.05357, over 938989.27 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:09:58,224 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+01 5.711e+01 6.250e+01 7.138e+01 1.059e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 11:09:58,528 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.27 vs. limit=15.0 +2024-07-28 11:10:01,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=147565.33333333334, ans=0.0 +2024-07-28 11:10:10,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=147592.0, ans=0.125 +2024-07-28 11:10:10,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=147592.0, ans=0.0 +2024-07-28 11:10:11,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=147592.0, ans=10.0 +2024-07-28 11:10:29,742 INFO [train.py:1114] (1/4) Epoch 11, batch 8500, loss[loss=0.1477, simple_loss=0.2351, pruned_loss=0.03016, over 4631.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2819, pruned_loss=0.05322, over 938896.10 frames. ], batch size: 11, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:10:30,700 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.87 vs. limit=22.5 +2024-07-28 11:10:44,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=147658.66666666666, ans=0.125 +2024-07-28 11:10:55,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=147685.33333333334, ans=0.125 +2024-07-28 11:10:57,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=147685.33333333334, ans=0.1 +2024-07-28 11:10:58,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=147685.33333333334, ans=0.0 +2024-07-28 11:10:59,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=147685.33333333334, ans=0.0 +2024-07-28 11:11:01,924 INFO [train.py:1114] (1/4) Epoch 11, batch 8550, loss[loss=0.1481, simple_loss=0.2242, pruned_loss=0.03601, over 4803.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2818, pruned_loss=0.0536, over 940348.06 frames. ], batch size: 11, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:11:03,200 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+01 5.768e+01 6.482e+01 7.355e+01 1.079e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 11:11:03,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=147698.66666666666, ans=0.125 +2024-07-28 11:11:06,042 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.54 vs. limit=6.0 +2024-07-28 11:11:09,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=147712.0, ans=0.125 +2024-07-28 11:11:15,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=147725.33333333334, ans=0.0 +2024-07-28 11:11:18,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=147725.33333333334, ans=0.125 +2024-07-28 11:11:24,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=147738.66666666666, ans=0.1 +2024-07-28 11:11:28,019 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:11:33,878 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.63 vs. limit=15.0 +2024-07-28 11:11:34,226 INFO [train.py:1114] (1/4) Epoch 11, batch 8600, loss[loss=0.212, simple_loss=0.3091, pruned_loss=0.05744, over 4804.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2835, pruned_loss=0.05444, over 939972.57 frames. ], batch size: 15, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:11:42,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=147778.66666666666, ans=10.0 +2024-07-28 11:11:45,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=147778.66666666666, ans=0.0 +2024-07-28 11:11:46,217 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.09 vs. limit=15.0 +2024-07-28 11:11:52,322 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:11:57,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=147805.33333333334, ans=0.125 +2024-07-28 11:11:58,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=147805.33333333334, ans=0.2 +2024-07-28 11:12:00,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=147818.66666666666, ans=0.125 +2024-07-28 11:12:05,722 INFO [train.py:1114] (1/4) Epoch 11, batch 8650, loss[loss=0.2379, simple_loss=0.3277, pruned_loss=0.07403, over 4906.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2828, pruned_loss=0.05413, over 940881.01 frames. ], batch size: 15, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:12:06,994 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.868e+01 5.716e+01 6.623e+01 8.030e+01 1.303e+02, threshold=1.325e+02, percent-clipped=1.0 +2024-07-28 11:12:15,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=147845.33333333334, ans=0.0 +2024-07-28 11:12:19,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=147858.66666666666, ans=0.0 +2024-07-28 11:12:25,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=147872.0, ans=0.5 +2024-07-28 11:12:28,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=147872.0, ans=0.125 +2024-07-28 11:12:34,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=147885.33333333334, ans=0.2 +2024-07-28 11:12:39,080 INFO [train.py:1114] (1/4) Epoch 11, batch 8700, loss[loss=0.2158, simple_loss=0.2912, pruned_loss=0.0702, over 4762.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2835, pruned_loss=0.05455, over 938401.71 frames. ], batch size: 13, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:13:01,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=147938.66666666666, ans=0.125 +2024-07-28 11:13:05,668 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.15 vs. limit=15.0 +2024-07-28 11:13:08,842 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:13:09,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=147952.0, ans=0.125 +2024-07-28 11:13:11,400 INFO [train.py:1114] (1/4) Epoch 11, batch 8750, loss[loss=0.2147, simple_loss=0.3021, pruned_loss=0.06361, over 4683.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2841, pruned_loss=0.05487, over 936578.31 frames. ], batch size: 15, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:13:12,641 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.615e+01 5.668e+01 6.375e+01 7.547e+01 1.367e+02, threshold=1.275e+02, percent-clipped=1.0 +2024-07-28 11:13:42,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=148005.33333333334, ans=0.0 +2024-07-28 11:13:49,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=148018.66666666666, ans=0.2 +2024-07-28 11:13:51,465 INFO [train.py:1114] (1/4) Epoch 11, batch 8800, loss[loss=0.1983, simple_loss=0.2808, pruned_loss=0.05792, over 4929.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2839, pruned_loss=0.05467, over 937616.83 frames. ], batch size: 14, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:13:54,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=148032.0, ans=0.125 +2024-07-28 11:14:11,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148045.33333333334, ans=0.1 +2024-07-28 11:14:32,884 INFO [train.py:1114] (1/4) Epoch 11, batch 8850, loss[loss=0.2056, simple_loss=0.2971, pruned_loss=0.05709, over 4475.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2835, pruned_loss=0.05446, over 931838.72 frames. ], batch size: 21, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:14:32,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=148098.66666666666, ans=0.0 +2024-07-28 11:14:34,134 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.414e+01 5.683e+01 6.364e+01 7.220e+01 1.136e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 11:14:36,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff2.min_abs, batch_count=148098.66666666666, ans=0.1 +2024-07-28 11:14:37,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=148098.66666666666, ans=0.0 +2024-07-28 11:14:44,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=148112.0, ans=0.125 +2024-07-28 11:15:03,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=148152.0, ans=0.125 +2024-07-28 11:15:05,035 INFO [train.py:1114] (1/4) Epoch 11, batch 8900, loss[loss=0.1754, simple_loss=0.263, pruned_loss=0.0439, over 4935.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2826, pruned_loss=0.05391, over 929824.77 frames. ], batch size: 12, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:15:05,351 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.59 vs. limit=15.0 +2024-07-28 11:15:07,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=148165.33333333334, ans=0.2 +2024-07-28 11:15:19,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=148192.0, ans=0.125 +2024-07-28 11:15:20,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=148192.0, ans=0.125 +2024-07-28 11:15:24,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=148205.33333333334, ans=0.2 +2024-07-28 11:15:27,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=148205.33333333334, ans=0.125 +2024-07-28 11:15:30,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=148218.66666666666, ans=0.125 +2024-07-28 11:15:33,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148218.66666666666, ans=0.1 +2024-07-28 11:15:37,385 INFO [train.py:1114] (1/4) Epoch 11, batch 8950, loss[loss=0.209, simple_loss=0.2949, pruned_loss=0.06158, over 4714.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.283, pruned_loss=0.05426, over 931079.62 frames. ], batch size: 22, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:15:37,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn2.whiten.whitening_limit, batch_count=148232.0, ans=22.5 +2024-07-28 11:15:38,187 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:15:38,590 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.938e+01 5.642e+01 6.194e+01 7.205e+01 1.181e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 11:15:38,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=148232.0, ans=0.05 +2024-07-28 11:15:45,513 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.68 vs. limit=5.0 +2024-07-28 11:15:52,322 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.32 vs. limit=15.0 +2024-07-28 11:15:57,642 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.79 vs. limit=22.5 +2024-07-28 11:16:06,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=148285.33333333334, ans=0.125 +2024-07-28 11:16:09,510 INFO [train.py:1114] (1/4) Epoch 11, batch 9000, loss[loss=0.1972, simple_loss=0.2908, pruned_loss=0.05177, over 4634.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2822, pruned_loss=0.05415, over 934060.92 frames. ], batch size: 12, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:16:09,511 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 11:16:15,846 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.2014, 2.3013, 3.9196, 2.0336], device='cuda:1') +2024-07-28 11:16:17,136 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.2145, 4.3678, 4.3647, 4.9929], device='cuda:1') +2024-07-28 11:16:19,890 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([0.0742, 3.1325, 1.7303, 3.5234, 2.7545, 3.1776, 3.4789, 3.5647], + device='cuda:1') +2024-07-28 11:16:21,142 INFO [train.py:1146] (1/4) Epoch 11, validation: loss=0.1703, simple_loss=0.274, pruned_loss=0.03325, over 944034.00 frames. +2024-07-28 11:16:21,143 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 11:16:52,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=148352.0, ans=0.125 +2024-07-28 11:16:53,498 INFO [train.py:1114] (1/4) Epoch 11, batch 9050, loss[loss=0.1744, simple_loss=0.2512, pruned_loss=0.04877, over 4481.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.281, pruned_loss=0.05376, over 934350.02 frames. ], batch size: 10, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:16:54,782 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.736e+01 5.677e+01 6.450e+01 7.430e+01 1.132e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-28 11:17:00,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=148378.66666666666, ans=0.125 +2024-07-28 11:17:03,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=148378.66666666666, ans=0.125 +2024-07-28 11:17:03,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=148378.66666666666, ans=0.0 +2024-07-28 11:17:05,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=148392.0, ans=0.125 +2024-07-28 11:17:06,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=148392.0, ans=0.0 +2024-07-28 11:17:07,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=148392.0, ans=0.0 +2024-07-28 11:17:09,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=148392.0, ans=0.125 +2024-07-28 11:17:10,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=148392.0, ans=0.0 +2024-07-28 11:17:12,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=148405.33333333334, ans=0.0 +2024-07-28 11:17:20,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=148418.66666666666, ans=0.2 +2024-07-28 11:17:22,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=148418.66666666666, ans=0.125 +2024-07-28 11:17:25,678 INFO [train.py:1114] (1/4) Epoch 11, batch 9100, loss[loss=0.2112, simple_loss=0.2949, pruned_loss=0.0638, over 4941.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2801, pruned_loss=0.05337, over 937061.36 frames. ], batch size: 14, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:17:26,993 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:17:27,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=148432.0, ans=0.07 +2024-07-28 11:17:28,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=148432.0, ans=0.125 +2024-07-28 11:17:28,648 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.59 vs. limit=6.0 +2024-07-28 11:17:32,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=148445.33333333334, ans=0.0 +2024-07-28 11:17:32,101 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.15 vs. limit=15.0 +2024-07-28 11:17:32,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.51 vs. limit=10.0 +2024-07-28 11:17:37,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=148445.33333333334, ans=0.1 +2024-07-28 11:17:45,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=148472.0, ans=0.125 +2024-07-28 11:17:55,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=148485.33333333334, ans=15.0 +2024-07-28 11:17:56,423 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.12 vs. limit=22.5 +2024-07-28 11:17:57,375 INFO [train.py:1114] (1/4) Epoch 11, batch 9150, loss[loss=0.1819, simple_loss=0.2758, pruned_loss=0.04396, over 4792.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2824, pruned_loss=0.05401, over 935867.88 frames. ], batch size: 14, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:17:58,678 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.328e+01 5.452e+01 6.035e+01 6.657e+01 8.728e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-28 11:18:08,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=148512.0, ans=0.125 +2024-07-28 11:18:10,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.26 vs. limit=15.0 +2024-07-28 11:18:13,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=148525.33333333334, ans=0.025 +2024-07-28 11:18:14,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=148525.33333333334, ans=0.125 +2024-07-28 11:18:16,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=148525.33333333334, ans=0.2 +2024-07-28 11:18:17,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=148525.33333333334, ans=0.025 +2024-07-28 11:18:32,055 INFO [train.py:1114] (1/4) Epoch 11, batch 9200, loss[loss=0.1946, simple_loss=0.2809, pruned_loss=0.05419, over 4848.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.282, pruned_loss=0.05343, over 937525.08 frames. ], batch size: 12, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:18:55,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=148592.0, ans=0.125 +2024-07-28 11:18:58,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=148605.33333333334, ans=0.125 +2024-07-28 11:19:04,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=148618.66666666666, ans=0.2 +2024-07-28 11:19:09,950 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.62 vs. limit=6.0 +2024-07-28 11:19:10,155 INFO [train.py:1114] (1/4) Epoch 11, batch 9250, loss[loss=0.1895, simple_loss=0.2732, pruned_loss=0.05292, over 4642.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2808, pruned_loss=0.05313, over 938295.74 frames. ], batch size: 13, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:19:11,548 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.586e+01 5.571e+01 5.944e+01 7.071e+01 9.935e+01, threshold=1.189e+02, percent-clipped=0.0 +2024-07-28 11:19:23,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=148658.66666666666, ans=0.0 +2024-07-28 11:19:32,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148672.0, ans=0.1 +2024-07-28 11:19:39,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=148685.33333333334, ans=0.0 +2024-07-28 11:19:42,416 INFO [train.py:1114] (1/4) Epoch 11, batch 9300, loss[loss=0.1694, simple_loss=0.2639, pruned_loss=0.0374, over 4786.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2797, pruned_loss=0.05276, over 938212.28 frames. ], batch size: 12, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:19:44,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=148698.66666666666, ans=0.0 +2024-07-28 11:19:45,778 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.75 vs. limit=22.5 +2024-07-28 11:19:46,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=148698.66666666666, ans=0.0 +2024-07-28 11:19:53,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=148712.0, ans=0.125 +2024-07-28 11:19:57,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=148725.33333333334, ans=0.2 +2024-07-28 11:19:57,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=148725.33333333334, ans=0.0 +2024-07-28 11:20:08,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=148752.0, ans=0.125 +2024-07-28 11:20:14,402 INFO [train.py:1114] (1/4) Epoch 11, batch 9350, loss[loss=0.194, simple_loss=0.2599, pruned_loss=0.06399, over 4795.00 frames. ], tot_loss[loss=0.193, simple_loss=0.28, pruned_loss=0.05306, over 935326.34 frames. ], batch size: 11, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:20:15,614 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 5.532e+01 6.030e+01 6.752e+01 9.117e+01, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 11:20:26,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=148792.0, ans=0.0 +2024-07-28 11:20:42,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=148818.66666666666, ans=15.0 +2024-07-28 11:20:45,874 INFO [train.py:1114] (1/4) Epoch 11, batch 9400, loss[loss=0.1639, simple_loss=0.2627, pruned_loss=0.03252, over 4684.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.281, pruned_loss=0.05335, over 933067.82 frames. ], batch size: 13, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:20:56,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=148845.33333333334, ans=0.125 +2024-07-28 11:21:00,409 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.30 vs. limit=15.0 +2024-07-28 11:21:00,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=148858.66666666666, ans=0.09899494936611666 +2024-07-28 11:21:12,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=148885.33333333334, ans=0.0 +2024-07-28 11:21:18,898 INFO [train.py:1114] (1/4) Epoch 11, batch 9450, loss[loss=0.1636, simple_loss=0.2364, pruned_loss=0.04539, over 4812.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2809, pruned_loss=0.05359, over 932248.76 frames. ], batch size: 11, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:21:20,119 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.380e+01 5.492e+01 5.833e+01 6.605e+01 9.079e+01, threshold=1.167e+02, percent-clipped=0.0 +2024-07-28 11:21:32,786 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.02 vs. limit=15.0 +2024-07-28 11:21:35,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=148925.33333333334, ans=0.125 +2024-07-28 11:21:36,494 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.13 vs. limit=15.0 +2024-07-28 11:21:45,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=148952.0, ans=0.125 +2024-07-28 11:21:48,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=148952.0, ans=0.125 +2024-07-28 11:21:49,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=148965.33333333334, ans=0.04949747468305833 +2024-07-28 11:21:50,263 INFO [train.py:1114] (1/4) Epoch 11, batch 9500, loss[loss=0.1793, simple_loss=0.2608, pruned_loss=0.04888, over 4710.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2812, pruned_loss=0.05361, over 934792.35 frames. ], batch size: 12, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:21:52,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=148965.33333333334, ans=0.2 +2024-07-28 11:21:55,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=148965.33333333334, ans=0.025 +2024-07-28 11:22:08,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=148992.0, ans=0.2 +2024-07-28 11:22:11,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=149005.33333333334, ans=0.125 +2024-07-28 11:22:15,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=149018.66666666666, ans=0.07 +2024-07-28 11:22:22,537 INFO [train.py:1114] (1/4) Epoch 11, batch 9550, loss[loss=0.1902, simple_loss=0.2646, pruned_loss=0.0579, over 4776.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2815, pruned_loss=0.0537, over 931811.86 frames. ], batch size: 12, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:22:23,735 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.769e+01 5.584e+01 6.073e+01 6.801e+01 9.660e+01, threshold=1.215e+02, percent-clipped=0.0 +2024-07-28 11:22:34,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=149058.66666666666, ans=0.125 +2024-07-28 11:22:41,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149072.0, ans=0.1 +2024-07-28 11:22:41,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=149072.0, ans=0.125 +2024-07-28 11:22:53,735 INFO [train.py:1114] (1/4) Epoch 11, batch 9600, loss[loss=0.2212, simple_loss=0.299, pruned_loss=0.07165, over 3348.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2825, pruned_loss=0.05406, over 930760.34 frames. ], batch size: 37, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:22:57,338 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.09 vs. limit=15.0 +2024-07-28 11:23:00,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=149112.0, ans=0.0 +2024-07-28 11:23:06,116 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.14 vs. limit=15.0 +2024-07-28 11:23:10,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=149125.33333333334, ans=0.125 +2024-07-28 11:23:13,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149138.66666666666, ans=0.1 +2024-07-28 11:23:14,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=149138.66666666666, ans=0.0 +2024-07-28 11:23:25,443 INFO [train.py:1114] (1/4) Epoch 11, batch 9650, loss[loss=0.1962, simple_loss=0.2975, pruned_loss=0.04745, over 4816.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2822, pruned_loss=0.05387, over 926915.93 frames. ], batch size: 16, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:23:25,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=149165.33333333334, ans=0.0 +2024-07-28 11:23:26,661 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.554e+01 5.812e+01 6.472e+01 7.420e+01 1.092e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 11:23:29,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=149165.33333333334, ans=0.125 +2024-07-28 11:23:30,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=149165.33333333334, ans=0.125 +2024-07-28 11:23:32,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=149178.66666666666, ans=0.125 +2024-07-28 11:23:40,070 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:24:00,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149205.33333333334, ans=0.1 +2024-07-28 11:24:08,278 INFO [train.py:1114] (1/4) Epoch 11, batch 9700, loss[loss=0.2142, simple_loss=0.2977, pruned_loss=0.06536, over 4116.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2832, pruned_loss=0.05437, over 924875.46 frames. ], batch size: 25, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:24:14,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=149245.33333333334, ans=10.0 +2024-07-28 11:24:14,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=149245.33333333334, ans=0.125 +2024-07-28 11:24:15,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149245.33333333334, ans=0.1 +2024-07-28 11:24:24,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=149258.66666666666, ans=0.0 +2024-07-28 11:24:30,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=149272.0, ans=0.125 +2024-07-28 11:24:30,886 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.75 vs. limit=15.0 +2024-07-28 11:24:32,091 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.11 vs. limit=15.0 +2024-07-28 11:24:34,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=149285.33333333334, ans=0.125 +2024-07-28 11:24:38,680 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.55 vs. limit=22.5 +2024-07-28 11:24:39,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=149298.66666666666, ans=0.125 +2024-07-28 11:24:39,667 INFO [train.py:1114] (1/4) Epoch 11, batch 9750, loss[loss=0.1978, simple_loss=0.2782, pruned_loss=0.05876, over 4684.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2837, pruned_loss=0.05462, over 925511.20 frames. ], batch size: 15, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:24:40,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=149298.66666666666, ans=0.125 +2024-07-28 11:24:40,894 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.638e+01 5.600e+01 6.430e+01 7.398e+01 1.191e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 11:24:47,918 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.55 vs. limit=15.0 +2024-07-28 11:24:49,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=149312.0, ans=0.125 +2024-07-28 11:25:07,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149352.0, ans=0.1 +2024-07-28 11:25:13,041 INFO [train.py:1114] (1/4) Epoch 11, batch 9800, loss[loss=0.1644, simple_loss=0.2518, pruned_loss=0.03852, over 4706.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2823, pruned_loss=0.05427, over 925065.86 frames. ], batch size: 12, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:25:18,649 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:25:35,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=149405.33333333334, ans=0.2 +2024-07-28 11:25:38,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=149418.66666666666, ans=0.125 +2024-07-28 11:25:43,719 INFO [train.py:1114] (1/4) Epoch 11, batch 9850, loss[loss=0.2028, simple_loss=0.2939, pruned_loss=0.05587, over 4899.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2831, pruned_loss=0.05505, over 927428.06 frames. ], batch size: 15, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:25:44,876 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.760e+01 6.754e+01 7.559e+01 1.117e+02, threshold=1.351e+02, percent-clipped=0.0 +2024-07-28 11:25:51,876 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.84 vs. limit=22.5 +2024-07-28 11:25:53,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=149445.33333333334, ans=0.0 +2024-07-28 11:25:55,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=149445.33333333334, ans=0.025 +2024-07-28 11:25:59,709 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.16 vs. limit=12.0 +2024-07-28 11:25:59,768 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.57 vs. limit=6.0 +2024-07-28 11:26:00,843 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.45 vs. limit=15.0 +2024-07-28 11:26:02,801 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.18 vs. limit=15.0 +2024-07-28 11:26:09,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=149485.33333333334, ans=0.125 +2024-07-28 11:26:14,538 INFO [train.py:1114] (1/4) Epoch 11, batch 9900, loss[loss=0.1857, simple_loss=0.2775, pruned_loss=0.04697, over 4829.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2846, pruned_loss=0.05578, over 927060.08 frames. ], batch size: 16, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:26:17,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=149498.66666666666, ans=0.07 +2024-07-28 11:26:25,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.18 vs. limit=12.0 +2024-07-28 11:26:26,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=149525.33333333334, ans=0.125 +2024-07-28 11:26:27,005 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.69 vs. limit=12.0 +2024-07-28 11:26:28,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=149525.33333333334, ans=0.125 +2024-07-28 11:26:45,833 INFO [train.py:1114] (1/4) Epoch 11, batch 9950, loss[loss=0.1824, simple_loss=0.2572, pruned_loss=0.05378, over 4797.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2844, pruned_loss=0.0559, over 929553.06 frames. ], batch size: 11, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:26:47,369 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.729e+01 5.848e+01 6.460e+01 7.731e+01 1.083e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 11:26:52,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=149578.66666666666, ans=0.0 +2024-07-28 11:26:55,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=149578.66666666666, ans=0.05 +2024-07-28 11:27:11,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=149618.66666666666, ans=0.0 +2024-07-28 11:27:18,156 INFO [train.py:1114] (1/4) Epoch 11, batch 10000, loss[loss=0.1885, simple_loss=0.2837, pruned_loss=0.04671, over 4624.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2863, pruned_loss=0.0566, over 926807.18 frames. ], batch size: 16, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:27:18,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=149632.0, ans=0.125 +2024-07-28 11:27:19,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=149632.0, ans=0.07 +2024-07-28 11:27:20,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=149632.0, ans=0.95 +2024-07-28 11:27:21,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=149632.0, ans=0.0 +2024-07-28 11:27:34,112 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.84 vs. limit=15.0 +2024-07-28 11:27:36,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149658.66666666666, ans=0.1 +2024-07-28 11:27:45,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=149685.33333333334, ans=0.1 +2024-07-28 11:27:50,526 INFO [train.py:1114] (1/4) Epoch 11, batch 10050, loss[loss=0.2853, simple_loss=0.3473, pruned_loss=0.1117, over 3210.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2909, pruned_loss=0.05907, over 914708.24 frames. ], batch size: 35, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:27:51,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=149698.66666666666, ans=0.0 +2024-07-28 11:27:51,874 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.828e+01 6.328e+01 6.971e+01 1.016e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 11:27:52,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=149698.66666666666, ans=0.125 +2024-07-28 11:28:03,088 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.73 vs. limit=15.0 +2024-07-28 11:28:05,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=149725.33333333334, ans=0.025 +2024-07-28 11:28:13,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=149738.66666666666, ans=0.0 +2024-07-28 11:28:20,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=149752.0, ans=0.0 +2024-07-28 11:28:24,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.16 vs. limit=6.0 +2024-07-28 11:28:24,933 INFO [train.py:1114] (1/4) Epoch 11, batch 10100, loss[loss=0.226, simple_loss=0.3065, pruned_loss=0.07269, over 3381.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2956, pruned_loss=0.06378, over 862018.72 frames. ], batch size: 35, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:28:46,391 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.18 vs. limit=10.0 +2024-07-28 11:28:52,996 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.95 vs. limit=22.5 +2024-07-28 11:28:57,731 INFO [train.py:1114] (1/4) Epoch 11, batch 10150, loss[loss=0.2249, simple_loss=0.3046, pruned_loss=0.07253, over 3444.00 frames. ], tot_loss[loss=0.219, simple_loss=0.3006, pruned_loss=0.06867, over 819581.75 frames. ], batch size: 35, lr: 6.67e-03, grad_scale: 32.0 +2024-07-28 11:28:58,121 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.89 vs. limit=22.5 +2024-07-28 11:28:58,996 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.659e+01 6.801e+01 7.178e+01 7.670e+01 2.138e+02, threshold=1.436e+02, percent-clipped=2.0 +2024-07-28 11:29:01,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=149832.0, ans=0.125 +2024-07-28 11:29:07,649 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.77 vs. limit=15.0 +2024-07-28 11:29:11,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=149858.66666666666, ans=0.0 +2024-07-28 11:29:17,324 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.72 vs. limit=22.5 +2024-07-28 11:29:18,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=149872.0, ans=0.125 +2024-07-28 11:29:21,160 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.21 vs. limit=15.0 +2024-07-28 11:29:21,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=149872.0, ans=10.0 +2024-07-28 11:29:22,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=149885.33333333334, ans=0.125 +2024-07-28 11:29:29,435 INFO [train.py:1114] (1/4) Epoch 11, batch 10200, loss[loss=0.2515, simple_loss=0.3276, pruned_loss=0.08769, over 3292.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3028, pruned_loss=0.07146, over 788207.83 frames. ], batch size: 37, lr: 6.67e-03, grad_scale: 32.0 +2024-07-28 11:30:30,086 INFO [train.py:1114] (1/4) Epoch 12, batch 0, loss[loss=0.2253, simple_loss=0.329, pruned_loss=0.06081, over 4845.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.329, pruned_loss=0.06081, over 4845.00 frames. ], batch size: 12, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:30:30,086 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 11:30:34,990 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.0065, 2.0629, 4.0191, 2.3626], device='cuda:1') +2024-07-28 11:30:38,156 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.5563, 5.1240, 5.3303, 5.2479], device='cuda:1') +2024-07-28 11:30:49,828 INFO [train.py:1146] (1/4) Epoch 12, validation: loss=0.171, simple_loss=0.2765, pruned_loss=0.03276, over 944034.00 frames. +2024-07-28 11:30:49,829 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 11:31:23,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=149954.66666666666, ans=0.125 +2024-07-28 11:31:27,526 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 6.296e+01 6.981e+01 7.560e+01 1.062e+02, threshold=1.396e+02, percent-clipped=0.0 +2024-07-28 11:31:31,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=149968.0, ans=0.2 +2024-07-28 11:31:40,985 INFO [train.py:1114] (1/4) Epoch 12, batch 50, loss[loss=0.1663, simple_loss=0.2449, pruned_loss=0.04387, over 4606.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2855, pruned_loss=0.05404, over 206153.79 frames. ], batch size: 11, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:31:42,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.03 vs. limit=15.0 +2024-07-28 11:31:47,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=149994.66666666666, ans=0.95 +2024-07-28 11:31:49,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=150008.0, ans=0.125 +2024-07-28 11:31:53,121 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.31 vs. limit=15.0 +2024-07-28 11:31:53,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=150008.0, ans=0.125 +2024-07-28 11:31:57,811 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.69 vs. limit=15.0 +2024-07-28 11:32:12,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150048.0, ans=0.1 +2024-07-28 11:32:15,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=150048.0, ans=0.2 +2024-07-28 11:32:17,176 INFO [train.py:1114] (1/4) Epoch 12, batch 100, loss[loss=0.2038, simple_loss=0.2849, pruned_loss=0.06134, over 4639.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2848, pruned_loss=0.05376, over 364859.14 frames. ], batch size: 12, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:32:18,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=150061.33333333334, ans=0.0 +2024-07-28 11:32:21,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=150061.33333333334, ans=0.125 +2024-07-28 11:32:25,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=150074.66666666666, ans=0.1 +2024-07-28 11:32:29,152 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.19 vs. limit=15.0 +2024-07-28 11:32:39,032 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.212e+01 5.482e+01 5.996e+01 6.450e+01 1.001e+02, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 11:32:44,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=150101.33333333334, ans=0.0 +2024-07-28 11:32:44,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=150101.33333333334, ans=0.125 +2024-07-28 11:32:51,992 INFO [train.py:1114] (1/4) Epoch 12, batch 150, loss[loss=0.158, simple_loss=0.2479, pruned_loss=0.03404, over 4613.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2828, pruned_loss=0.0527, over 493809.85 frames. ], batch size: 11, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:33:07,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.35 vs. limit=15.0 +2024-07-28 11:33:11,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=150154.66666666666, ans=0.125 +2024-07-28 11:33:16,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=150168.0, ans=0.125 +2024-07-28 11:33:23,649 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.23 vs. limit=15.0 +2024-07-28 11:33:29,115 INFO [train.py:1114] (1/4) Epoch 12, batch 200, loss[loss=0.1981, simple_loss=0.2889, pruned_loss=0.05369, over 4497.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2819, pruned_loss=0.05322, over 593674.83 frames. ], batch size: 21, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:33:48,810 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.686e+01 5.805e+01 6.723e+01 7.880e+01 1.326e+02, threshold=1.345e+02, percent-clipped=1.0 +2024-07-28 11:33:51,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=150234.66666666666, ans=0.125 +2024-07-28 11:33:54,585 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.43 vs. limit=15.0 +2024-07-28 11:34:00,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=150248.0, ans=0.125 +2024-07-28 11:34:02,288 INFO [train.py:1114] (1/4) Epoch 12, batch 250, loss[loss=0.2042, simple_loss=0.3032, pruned_loss=0.05258, over 4620.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2827, pruned_loss=0.05325, over 670344.36 frames. ], batch size: 16, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:34:23,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=150301.33333333334, ans=0.125 +2024-07-28 11:34:23,526 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.98 vs. limit=6.0 +2024-07-28 11:34:23,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=150301.33333333334, ans=10.0 +2024-07-28 11:34:30,724 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=150314.66666666666, ans=0.0 +2024-07-28 11:34:31,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=150314.66666666666, ans=0.035 +2024-07-28 11:34:35,839 INFO [train.py:1114] (1/4) Epoch 12, batch 300, loss[loss=0.174, simple_loss=0.2776, pruned_loss=0.0352, over 4822.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2817, pruned_loss=0.05312, over 730263.79 frames. ], batch size: 15, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:34:41,611 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:34:55,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=150354.66666666666, ans=0.125 +2024-07-28 11:34:56,212 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.803e+01 5.570e+01 6.129e+01 6.973e+01 1.064e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 11:34:59,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=150368.0, ans=0.05 +2024-07-28 11:35:01,298 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.70 vs. limit=15.0 +2024-07-28 11:35:07,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=150381.33333333334, ans=0.0 +2024-07-28 11:35:09,569 INFO [train.py:1114] (1/4) Epoch 12, batch 350, loss[loss=0.1727, simple_loss=0.2497, pruned_loss=0.04785, over 4954.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2803, pruned_loss=0.05218, over 776299.54 frames. ], batch size: 12, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:35:09,927 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.79 vs. limit=6.0 +2024-07-28 11:35:14,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=150394.66666666666, ans=0.125 +2024-07-28 11:35:32,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=150434.66666666666, ans=0.0 +2024-07-28 11:35:35,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=150448.0, ans=0.1 +2024-07-28 11:35:36,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150448.0, ans=0.1 +2024-07-28 11:35:42,763 INFO [train.py:1114] (1/4) Epoch 12, batch 400, loss[loss=0.1942, simple_loss=0.2918, pruned_loss=0.04835, over 4687.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2795, pruned_loss=0.05201, over 813841.24 frames. ], batch size: 13, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:36:04,833 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.526e+01 5.661e+01 6.256e+01 7.189e+01 1.032e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 11:36:06,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=150501.33333333334, ans=0.125 +2024-07-28 11:36:11,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=150514.66666666666, ans=0.125 +2024-07-28 11:36:16,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=150514.66666666666, ans=0.2 +2024-07-28 11:36:18,205 INFO [train.py:1114] (1/4) Epoch 12, batch 450, loss[loss=0.2464, simple_loss=0.3252, pruned_loss=0.08379, over 4633.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2812, pruned_loss=0.05318, over 838861.18 frames. ], batch size: 13, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:36:24,004 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.63 vs. limit=15.0 +2024-07-28 11:36:26,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=150541.33333333334, ans=0.125 +2024-07-28 11:36:35,951 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:36:45,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=150581.33333333334, ans=0.125 +2024-07-28 11:36:48,054 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:36:51,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=150594.66666666666, ans=0.0 +2024-07-28 11:36:51,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=150594.66666666666, ans=15.0 +2024-07-28 11:36:51,730 INFO [train.py:1114] (1/4) Epoch 12, batch 500, loss[loss=0.2035, simple_loss=0.292, pruned_loss=0.05751, over 4678.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2803, pruned_loss=0.05237, over 861431.56 frames. ], batch size: 15, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:36:59,941 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:37:14,896 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+01 5.527e+01 6.124e+01 7.195e+01 1.120e+02, threshold=1.225e+02, percent-clipped=0.0 +2024-07-28 11:37:17,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150634.66666666666, ans=0.1 +2024-07-28 11:37:30,001 INFO [train.py:1114] (1/4) Epoch 12, batch 550, loss[loss=0.2324, simple_loss=0.3241, pruned_loss=0.07032, over 4623.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.281, pruned_loss=0.05313, over 877699.89 frames. ], batch size: 17, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:37:46,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=150688.0, ans=0.125 +2024-07-28 11:38:01,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150714.66666666666, ans=0.1 +2024-07-28 11:38:08,497 INFO [train.py:1114] (1/4) Epoch 12, batch 600, loss[loss=0.1722, simple_loss=0.2534, pruned_loss=0.04554, over 4630.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2814, pruned_loss=0.05301, over 892098.01 frames. ], batch size: 16, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:38:11,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=150728.0, ans=0.125 +2024-07-28 11:38:21,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=150741.33333333334, ans=0.025 +2024-07-28 11:38:23,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=150754.66666666666, ans=0.125 +2024-07-28 11:38:30,257 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.18 vs. limit=6.0 +2024-07-28 11:38:30,305 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.714e+01 6.286e+01 7.173e+01 1.255e+02, threshold=1.257e+02, percent-clipped=1.0 +2024-07-28 11:38:44,723 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.83 vs. limit=15.0 +2024-07-28 11:38:49,480 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.62 vs. limit=22.5 +2024-07-28 11:38:53,096 INFO [train.py:1114] (1/4) Epoch 12, batch 650, loss[loss=0.1612, simple_loss=0.2575, pruned_loss=0.03247, over 4762.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2798, pruned_loss=0.05251, over 903946.00 frames. ], batch size: 13, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:38:53,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=150794.66666666666, ans=0.125 +2024-07-28 11:39:02,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=150794.66666666666, ans=0.125 +2024-07-28 11:39:04,530 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.66 vs. limit=15.0 +2024-07-28 11:39:05,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=150808.0, ans=0.0 +2024-07-28 11:39:18,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=150821.33333333334, ans=0.0 +2024-07-28 11:39:22,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=150834.66666666666, ans=0.125 +2024-07-28 11:39:24,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=150834.66666666666, ans=0.05 +2024-07-28 11:39:31,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=150848.0, ans=0.125 +2024-07-28 11:39:38,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=150848.0, ans=0.0 +2024-07-28 11:39:40,042 INFO [train.py:1114] (1/4) Epoch 12, batch 700, loss[loss=0.1775, simple_loss=0.2476, pruned_loss=0.05368, over 4635.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2802, pruned_loss=0.0525, over 911470.45 frames. ], batch size: 12, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:39:40,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=150861.33333333334, ans=0.125 +2024-07-28 11:39:48,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=150874.66666666666, ans=0.0 +2024-07-28 11:39:55,121 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.40 vs. limit=15.0 +2024-07-28 11:40:00,047 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.230e+01 5.630e+01 6.208e+01 7.148e+01 1.083e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 11:40:00,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=150901.33333333334, ans=0.125 +2024-07-28 11:40:13,768 INFO [train.py:1114] (1/4) Epoch 12, batch 750, loss[loss=0.1767, simple_loss=0.2735, pruned_loss=0.03999, over 4693.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.28, pruned_loss=0.05263, over 918166.62 frames. ], batch size: 13, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:40:15,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=150928.0, ans=0.2 +2024-07-28 11:40:19,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=150928.0, ans=0.07 +2024-07-28 11:40:36,578 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:40:37,505 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.40 vs. limit=22.5 +2024-07-28 11:40:47,001 INFO [train.py:1114] (1/4) Epoch 12, batch 800, loss[loss=0.2171, simple_loss=0.2947, pruned_loss=0.06982, over 4861.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2804, pruned_loss=0.05293, over 922997.53 frames. ], batch size: 12, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:40:54,023 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.43 vs. limit=15.0 +2024-07-28 11:41:02,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=151021.33333333334, ans=0.0 +2024-07-28 11:41:06,752 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.726e+01 6.208e+01 6.822e+01 1.017e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 11:41:20,182 INFO [train.py:1114] (1/4) Epoch 12, batch 850, loss[loss=0.2506, simple_loss=0.3316, pruned_loss=0.08474, over 4656.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2793, pruned_loss=0.05248, over 927135.08 frames. ], batch size: 14, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:41:37,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=151088.0, ans=0.125 +2024-07-28 11:41:40,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=151101.33333333334, ans=22.5 +2024-07-28 11:41:41,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=151101.33333333334, ans=0.0 +2024-07-28 11:41:43,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=151101.33333333334, ans=0.09899494936611666 +2024-07-28 11:41:55,112 INFO [train.py:1114] (1/4) Epoch 12, batch 900, loss[loss=0.1771, simple_loss=0.2517, pruned_loss=0.05124, over 4836.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2799, pruned_loss=0.05276, over 928148.99 frames. ], batch size: 12, lr: 6.37e-03, grad_scale: 32.0 +2024-07-28 11:41:57,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151128.0, ans=0.1 +2024-07-28 11:42:05,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=151141.33333333334, ans=0.1 +2024-07-28 11:42:14,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=151168.0, ans=0.0 +2024-07-28 11:42:15,735 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.369e+01 5.655e+01 6.355e+01 7.195e+01 9.950e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 11:42:16,874 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.81 vs. limit=15.0 +2024-07-28 11:42:24,824 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.54 vs. limit=15.0 +2024-07-28 11:42:28,520 INFO [train.py:1114] (1/4) Epoch 12, batch 950, loss[loss=0.1866, simple_loss=0.272, pruned_loss=0.05065, over 4769.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2801, pruned_loss=0.05257, over 929548.14 frames. ], batch size: 12, lr: 6.37e-03, grad_scale: 32.0 +2024-07-28 11:42:29,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=151194.66666666666, ans=0.0 +2024-07-28 11:42:33,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=151194.66666666666, ans=0.125 +2024-07-28 11:42:36,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=151208.0, ans=0.0 +2024-07-28 11:42:37,051 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.45 vs. limit=22.5 +2024-07-28 11:42:55,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151248.0, ans=0.1 +2024-07-28 11:43:02,360 INFO [train.py:1114] (1/4) Epoch 12, batch 1000, loss[loss=0.1907, simple_loss=0.2728, pruned_loss=0.05434, over 4957.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2808, pruned_loss=0.05291, over 928871.31 frames. ], batch size: 13, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:43:11,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=151274.66666666666, ans=0.0 +2024-07-28 11:43:15,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=151288.0, ans=0.0 +2024-07-28 11:43:18,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151288.0, ans=0.1 +2024-07-28 11:43:19,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=151288.0, ans=0.125 +2024-07-28 11:43:20,902 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.48 vs. limit=15.0 +2024-07-28 11:43:21,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=151288.0, ans=0.015 +2024-07-28 11:43:25,150 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.535e+01 6.224e+01 7.277e+01 1.100e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 11:43:37,737 INFO [train.py:1114] (1/4) Epoch 12, batch 1050, loss[loss=0.185, simple_loss=0.2666, pruned_loss=0.05174, over 4873.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2801, pruned_loss=0.05283, over 931308.84 frames. ], batch size: 14, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:43:45,537 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-28 11:43:46,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=151341.33333333334, ans=0.125 +2024-07-28 11:44:00,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=151354.66666666666, ans=0.125 +2024-07-28 11:44:10,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=151381.33333333334, ans=0.125 +2024-07-28 11:44:13,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=151381.33333333334, ans=0.125 +2024-07-28 11:44:17,156 INFO [train.py:1114] (1/4) Epoch 12, batch 1100, loss[loss=0.2241, simple_loss=0.303, pruned_loss=0.07259, over 4897.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2798, pruned_loss=0.05279, over 933853.49 frames. ], batch size: 13, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:44:18,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=151394.66666666666, ans=0.125 +2024-07-28 11:44:19,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=151394.66666666666, ans=0.0 +2024-07-28 11:44:26,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=151408.0, ans=0.125 +2024-07-28 11:44:30,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=151421.33333333334, ans=0.2 +2024-07-28 11:44:38,701 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.346e+01 5.539e+01 6.009e+01 6.753e+01 8.123e+01, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 11:44:40,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=151434.66666666666, ans=0.2 +2024-07-28 11:44:48,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=151448.0, ans=0.125 +2024-07-28 11:44:48,827 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.81 vs. limit=10.0 +2024-07-28 11:44:50,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=151448.0, ans=0.1 +2024-07-28 11:44:51,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=151448.0, ans=0.09899494936611666 +2024-07-28 11:44:53,939 INFO [train.py:1114] (1/4) Epoch 12, batch 1150, loss[loss=0.1854, simple_loss=0.2724, pruned_loss=0.04924, over 4890.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2804, pruned_loss=0.05267, over 933418.45 frames. ], batch size: 13, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:44:56,456 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:44:57,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=151461.33333333334, ans=0.07 +2024-07-28 11:45:03,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=151474.66666666666, ans=0.0 +2024-07-28 11:45:05,037 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.97 vs. limit=12.0 +2024-07-28 11:45:09,480 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-28 11:45:10,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151488.0, ans=0.1 +2024-07-28 11:45:12,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=151488.0, ans=0.0 +2024-07-28 11:45:13,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=151488.0, ans=0.2 +2024-07-28 11:45:14,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=151501.33333333334, ans=0.2 +2024-07-28 11:45:15,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=151501.33333333334, ans=0.125 +2024-07-28 11:45:36,564 INFO [train.py:1114] (1/4) Epoch 12, batch 1200, loss[loss=0.1983, simple_loss=0.2953, pruned_loss=0.05062, over 4869.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2815, pruned_loss=0.05324, over 932850.16 frames. ], batch size: 14, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:45:38,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=151528.0, ans=0.0 +2024-07-28 11:45:39,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=151528.0, ans=0.125 +2024-07-28 11:45:44,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=151541.33333333334, ans=0.2 +2024-07-28 11:45:46,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151541.33333333334, ans=0.1 +2024-07-28 11:45:57,707 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.539e+01 6.207e+01 7.047e+01 1.080e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 11:45:58,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151568.0, ans=0.1 +2024-07-28 11:46:01,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=151568.0, ans=0.125 +2024-07-28 11:46:04,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151581.33333333334, ans=0.1 +2024-07-28 11:46:10,284 INFO [train.py:1114] (1/4) Epoch 12, batch 1250, loss[loss=0.2203, simple_loss=0.2968, pruned_loss=0.07195, over 4794.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2805, pruned_loss=0.05267, over 937019.67 frames. ], batch size: 15, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:46:19,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=151608.0, ans=0.1 +2024-07-28 11:46:22,666 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.04 vs. limit=15.0 +2024-07-28 11:46:23,303 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.67 vs. limit=15.0 +2024-07-28 11:46:38,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=151648.0, ans=0.125 +2024-07-28 11:46:43,308 INFO [train.py:1114] (1/4) Epoch 12, batch 1300, loss[loss=0.2189, simple_loss=0.3065, pruned_loss=0.06562, over 4663.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.281, pruned_loss=0.05315, over 938515.71 frames. ], batch size: 19, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:46:55,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=151674.66666666666, ans=0.0 +2024-07-28 11:46:58,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=151688.0, ans=0.125 +2024-07-28 11:47:03,819 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.884e+01 5.669e+01 6.218e+01 7.134e+01 9.799e+01, threshold=1.244e+02, percent-clipped=0.0 +2024-07-28 11:47:13,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=151714.66666666666, ans=0.125 +2024-07-28 11:47:13,488 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.29 vs. limit=15.0 +2024-07-28 11:47:16,367 INFO [train.py:1114] (1/4) Epoch 12, batch 1350, loss[loss=0.1971, simple_loss=0.2867, pruned_loss=0.05374, over 4756.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2808, pruned_loss=0.05276, over 940577.95 frames. ], batch size: 13, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:47:39,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=151754.66666666666, ans=0.125 +2024-07-28 11:47:41,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=151768.0, ans=0.0 +2024-07-28 11:47:41,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=151768.0, ans=0.0 +2024-07-28 11:47:43,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=151768.0, ans=0.125 +2024-07-28 11:47:43,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=151768.0, ans=0.025 +2024-07-28 11:47:45,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151768.0, ans=0.1 +2024-07-28 11:47:47,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=151781.33333333334, ans=0.0 +2024-07-28 11:47:54,334 INFO [train.py:1114] (1/4) Epoch 12, batch 1400, loss[loss=0.1839, simple_loss=0.2602, pruned_loss=0.05377, over 4703.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2805, pruned_loss=0.05296, over 942665.63 frames. ], batch size: 11, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:47:57,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=151794.66666666666, ans=0.125 +2024-07-28 11:48:10,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=151821.33333333334, ans=0.125 +2024-07-28 11:48:16,693 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.713e+01 6.249e+01 7.424e+01 1.107e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 11:48:18,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=151834.66666666666, ans=0.125 +2024-07-28 11:48:26,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=151848.0, ans=0.0 +2024-07-28 11:48:29,487 INFO [train.py:1114] (1/4) Epoch 12, batch 1450, loss[loss=0.2217, simple_loss=0.3119, pruned_loss=0.06572, over 4688.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2817, pruned_loss=0.05314, over 942778.89 frames. ], batch size: 15, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:48:59,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=151914.66666666666, ans=0.2 +2024-07-28 11:49:00,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151914.66666666666, ans=0.1 +2024-07-28 11:49:04,470 INFO [train.py:1114] (1/4) Epoch 12, batch 1500, loss[loss=0.216, simple_loss=0.3019, pruned_loss=0.06501, over 4802.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2826, pruned_loss=0.05351, over 942499.60 frames. ], batch size: 14, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:49:17,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151941.33333333334, ans=0.1 +2024-07-28 11:49:20,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=151941.33333333334, ans=0.015 +2024-07-28 11:49:26,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=151954.66666666666, ans=0.125 +2024-07-28 11:49:28,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151968.0, ans=0.1 +2024-07-28 11:49:29,095 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.429e+01 5.582e+01 5.945e+01 6.654e+01 9.521e+01, threshold=1.189e+02, percent-clipped=0.0 +2024-07-28 11:49:33,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=151968.0, ans=0.125 +2024-07-28 11:49:59,628 INFO [train.py:1114] (1/4) Epoch 12, batch 1550, loss[loss=0.1611, simple_loss=0.2653, pruned_loss=0.02842, over 4899.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2817, pruned_loss=0.05283, over 938478.33 frames. ], batch size: 15, lr: 6.35e-03, grad_scale: 16.0 +2024-07-28 11:50:17,646 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.53 vs. limit=10.0 +2024-07-28 11:50:25,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=152034.66666666666, ans=0.125 +2024-07-28 11:50:34,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=152061.33333333334, ans=0.0 +2024-07-28 11:50:35,401 INFO [train.py:1114] (1/4) Epoch 12, batch 1600, loss[loss=0.1811, simple_loss=0.2797, pruned_loss=0.04125, over 4880.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2805, pruned_loss=0.05237, over 936952.71 frames. ], batch size: 14, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:50:45,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152074.66666666666, ans=0.1 +2024-07-28 11:50:50,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=152088.0, ans=0.0 +2024-07-28 11:50:59,239 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.730e+01 5.721e+01 6.309e+01 7.092e+01 1.066e+02, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 11:51:00,216 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.64 vs. limit=10.0 +2024-07-28 11:51:15,759 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.22 vs. limit=22.5 +2024-07-28 11:51:16,030 INFO [train.py:1114] (1/4) Epoch 12, batch 1650, loss[loss=0.1899, simple_loss=0.2867, pruned_loss=0.04661, over 4659.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2806, pruned_loss=0.05222, over 937318.17 frames. ], batch size: 14, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:51:19,889 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.78 vs. limit=15.0 +2024-07-28 11:51:21,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=152128.0, ans=0.0 +2024-07-28 11:51:25,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=152141.33333333334, ans=0.125 +2024-07-28 11:51:27,643 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.01 vs. limit=15.0 +2024-07-28 11:51:46,272 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.54 vs. limit=15.0 +2024-07-28 11:51:49,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.74 vs. limit=22.5 +2024-07-28 11:51:53,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=152168.0, ans=0.0 +2024-07-28 11:51:58,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=152181.33333333334, ans=0.125 +2024-07-28 11:52:03,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152181.33333333334, ans=0.1 +2024-07-28 11:52:05,264 INFO [train.py:1114] (1/4) Epoch 12, batch 1700, loss[loss=0.1454, simple_loss=0.2303, pruned_loss=0.03024, over 4704.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2798, pruned_loss=0.0517, over 939095.35 frames. ], batch size: 11, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:52:09,468 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:52:10,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152194.66666666666, ans=0.1 +2024-07-28 11:52:10,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=152194.66666666666, ans=0.125 +2024-07-28 11:52:13,078 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.41 vs. limit=22.5 +2024-07-28 11:52:20,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=152221.33333333334, ans=0.125 +2024-07-28 11:52:26,630 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.325e+01 5.663e+01 6.309e+01 7.408e+01 1.033e+02, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 11:52:32,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=152248.0, ans=0.125 +2024-07-28 11:52:36,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=152248.0, ans=0.0 +2024-07-28 11:52:38,672 INFO [train.py:1114] (1/4) Epoch 12, batch 1750, loss[loss=0.1383, simple_loss=0.2157, pruned_loss=0.0304, over 4799.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2793, pruned_loss=0.05201, over 940176.52 frames. ], batch size: 11, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:52:46,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152274.66666666666, ans=0.1 +2024-07-28 11:52:46,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=152274.66666666666, ans=15.0 +2024-07-28 11:52:50,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=152274.66666666666, ans=10.0 +2024-07-28 11:52:55,923 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:52:56,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=152288.0, ans=0.125 +2024-07-28 11:53:07,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=152314.66666666666, ans=0.2 +2024-07-28 11:53:12,209 INFO [train.py:1114] (1/4) Epoch 12, batch 1800, loss[loss=0.1923, simple_loss=0.2934, pruned_loss=0.04561, over 4636.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.279, pruned_loss=0.05173, over 940758.09 frames. ], batch size: 13, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:53:12,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=152328.0, ans=0.0 +2024-07-28 11:53:15,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=152328.0, ans=0.125 +2024-07-28 11:53:18,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152341.33333333334, ans=0.1 +2024-07-28 11:53:19,407 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.39 vs. limit=22.5 +2024-07-28 11:53:23,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=152341.33333333334, ans=0.125 +2024-07-28 11:53:30,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=152354.66666666666, ans=0.125 +2024-07-28 11:53:32,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.99 vs. limit=22.5 +2024-07-28 11:53:35,386 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.783e+01 5.693e+01 6.293e+01 7.294e+01 9.358e+01, threshold=1.259e+02, percent-clipped=0.0 +2024-07-28 11:53:36,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=152368.0, ans=0.125 +2024-07-28 11:53:41,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152381.33333333334, ans=0.1 +2024-07-28 11:53:42,570 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.77 vs. limit=10.0 +2024-07-28 11:53:43,672 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:53:49,303 INFO [train.py:1114] (1/4) Epoch 12, batch 1850, loss[loss=0.1997, simple_loss=0.3024, pruned_loss=0.04851, over 4819.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2791, pruned_loss=0.05194, over 940854.60 frames. ], batch size: 14, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:53:52,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=152394.66666666666, ans=0.125 +2024-07-28 11:53:55,816 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.48 vs. limit=15.0 +2024-07-28 11:53:59,303 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.29 vs. limit=22.5 +2024-07-28 11:54:08,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152421.33333333334, ans=0.1 +2024-07-28 11:54:09,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152434.66666666666, ans=0.1 +2024-07-28 11:54:18,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=152448.0, ans=0.125 +2024-07-28 11:54:23,278 INFO [train.py:1114] (1/4) Epoch 12, batch 1900, loss[loss=0.2134, simple_loss=0.2998, pruned_loss=0.0635, over 4668.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2796, pruned_loss=0.0519, over 941709.13 frames. ], batch size: 14, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:54:28,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=152461.33333333334, ans=0.125 +2024-07-28 11:54:44,566 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.417e+01 5.599e+01 6.321e+01 7.441e+01 1.076e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 11:54:52,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=152514.66666666666, ans=0.0 +2024-07-28 11:54:56,300 INFO [train.py:1114] (1/4) Epoch 12, batch 1950, loss[loss=0.1523, simple_loss=0.2528, pruned_loss=0.02595, over 4889.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2799, pruned_loss=0.05155, over 943650.51 frames. ], batch size: 13, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:54:58,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=6.06 vs. limit=12.0 +2024-07-28 11:55:05,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=152541.33333333334, ans=0.125 +2024-07-28 11:55:06,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=152541.33333333334, ans=0.0 +2024-07-28 11:55:08,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=152541.33333333334, ans=0.2 +2024-07-28 11:55:12,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=152554.66666666666, ans=0.5 +2024-07-28 11:55:31,682 INFO [train.py:1114] (1/4) Epoch 12, batch 2000, loss[loss=0.1805, simple_loss=0.2696, pruned_loss=0.04569, over 4828.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2808, pruned_loss=0.05233, over 940878.90 frames. ], batch size: 11, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:55:41,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=152608.0, ans=0.125 +2024-07-28 11:55:52,942 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.820e+01 6.521e+01 7.809e+01 1.085e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 11:56:07,049 INFO [train.py:1114] (1/4) Epoch 12, batch 2050, loss[loss=0.1647, simple_loss=0.25, pruned_loss=0.03968, over 4604.00 frames. ], tot_loss[loss=0.192, simple_loss=0.28, pruned_loss=0.05205, over 938461.71 frames. ], batch size: 11, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:56:12,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.whiten.whitening_limit, batch_count=152661.33333333334, ans=15.0 +2024-07-28 11:56:13,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152674.66666666666, ans=0.1 +2024-07-28 11:56:18,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=152674.66666666666, ans=0.015 +2024-07-28 11:56:21,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=152688.0, ans=0.125 +2024-07-28 11:56:30,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=152701.33333333334, ans=0.0 +2024-07-28 11:56:40,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=152714.66666666666, ans=0.125 +2024-07-28 11:56:42,395 INFO [train.py:1114] (1/4) Epoch 12, batch 2100, loss[loss=0.1678, simple_loss=0.253, pruned_loss=0.0413, over 4765.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2787, pruned_loss=0.05152, over 940380.91 frames. ], batch size: 13, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:56:47,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=152728.0, ans=0.125 +2024-07-28 11:56:49,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=152741.33333333334, ans=0.125 +2024-07-28 11:56:56,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=152754.66666666666, ans=0.125 +2024-07-28 11:57:03,674 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.175e+01 5.578e+01 6.172e+01 6.931e+01 1.014e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-28 11:57:04,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=152768.0, ans=0.0 +2024-07-28 11:57:18,749 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.18 vs. limit=15.0 +2024-07-28 11:57:19,550 INFO [train.py:1114] (1/4) Epoch 12, batch 2150, loss[loss=0.1558, simple_loss=0.2486, pruned_loss=0.03144, over 4894.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2777, pruned_loss=0.05116, over 944165.90 frames. ], batch size: 13, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:57:19,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_na.min_abs, batch_count=152794.66666666666, ans=0.02 +2024-07-28 11:57:27,391 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.43 vs. limit=12.0 +2024-07-28 11:57:27,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152808.0, ans=0.1 +2024-07-28 11:57:27,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=152808.0, ans=0.0 +2024-07-28 11:57:35,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=152821.33333333334, ans=0.125 +2024-07-28 11:57:35,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=152821.33333333334, ans=0.125 +2024-07-28 11:57:40,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=152834.66666666666, ans=0.04949747468305833 +2024-07-28 11:57:52,751 INFO [train.py:1114] (1/4) Epoch 12, batch 2200, loss[loss=0.1937, simple_loss=0.2869, pruned_loss=0.05029, over 4811.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.277, pruned_loss=0.05077, over 943560.49 frames. ], batch size: 14, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:58:07,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=152888.0, ans=0.125 +2024-07-28 11:58:09,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=152888.0, ans=0.125 +2024-07-28 11:58:11,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=152888.0, ans=0.0 +2024-07-28 11:58:14,226 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.745e+01 5.791e+01 6.232e+01 7.216e+01 1.117e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 11:58:17,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=152901.33333333334, ans=0.0 +2024-07-28 11:58:22,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=152914.66666666666, ans=0.0 +2024-07-28 11:58:27,779 INFO [train.py:1114] (1/4) Epoch 12, batch 2250, loss[loss=0.1762, simple_loss=0.2671, pruned_loss=0.04267, over 4693.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2767, pruned_loss=0.0508, over 942185.05 frames. ], batch size: 13, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:58:31,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=152928.0, ans=0.125 +2024-07-28 11:58:34,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=152941.33333333334, ans=6.0 +2024-07-28 11:58:38,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152941.33333333334, ans=0.1 +2024-07-28 11:58:49,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152968.0, ans=0.1 +2024-07-28 11:58:55,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=152981.33333333334, ans=0.125 +2024-07-28 11:59:03,209 INFO [train.py:1114] (1/4) Epoch 12, batch 2300, loss[loss=0.1517, simple_loss=0.2383, pruned_loss=0.03253, over 4929.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.276, pruned_loss=0.0506, over 939906.27 frames. ], batch size: 12, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:59:07,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=152994.66666666666, ans=0.125 +2024-07-28 11:59:07,723 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.63 vs. limit=10.0 +2024-07-28 11:59:17,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=153021.33333333334, ans=0.0 +2024-07-28 11:59:18,964 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.65 vs. limit=15.0 +2024-07-28 11:59:24,486 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.625e+01 5.617e+01 6.257e+01 7.219e+01 1.104e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 11:59:28,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=153034.66666666666, ans=0.125 +2024-07-28 11:59:30,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=153048.0, ans=0.2 +2024-07-28 11:59:35,919 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.54 vs. limit=6.0 +2024-07-28 11:59:36,825 INFO [train.py:1114] (1/4) Epoch 12, batch 2350, loss[loss=0.1659, simple_loss=0.2487, pruned_loss=0.04161, over 4636.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2762, pruned_loss=0.05068, over 942010.37 frames. ], batch size: 13, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:59:57,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=153101.33333333334, ans=10.0 +2024-07-28 12:00:02,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=153101.33333333334, ans=0.0 +2024-07-28 12:00:04,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=153114.66666666666, ans=0.125 +2024-07-28 12:00:10,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=153128.0, ans=0.0 +2024-07-28 12:00:10,574 INFO [train.py:1114] (1/4) Epoch 12, batch 2400, loss[loss=0.1839, simple_loss=0.2669, pruned_loss=0.05044, over 4639.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.277, pruned_loss=0.05068, over 941909.73 frames. ], batch size: 12, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 12:00:18,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=153141.33333333334, ans=0.125 +2024-07-28 12:00:19,022 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.30 vs. limit=22.5 +2024-07-28 12:00:22,372 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=8.76 vs. limit=15.0 +2024-07-28 12:00:31,899 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 5.703e+01 6.200e+01 6.966e+01 9.820e+01, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 12:00:33,009 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.38 vs. limit=15.0 +2024-07-28 12:00:36,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=153181.33333333334, ans=0.125 +2024-07-28 12:00:41,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=153181.33333333334, ans=10.0 +2024-07-28 12:00:45,549 INFO [train.py:1114] (1/4) Epoch 12, batch 2450, loss[loss=0.2072, simple_loss=0.3033, pruned_loss=0.05553, over 4701.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2789, pruned_loss=0.05184, over 937484.51 frames. ], batch size: 13, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:00:50,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=153194.66666666666, ans=0.125 +2024-07-28 12:00:55,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=153208.0, ans=0.125 +2024-07-28 12:00:59,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=153221.33333333334, ans=0.07 +2024-07-28 12:01:05,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=153234.66666666666, ans=0.0 +2024-07-28 12:01:08,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=153234.66666666666, ans=0.1 +2024-07-28 12:01:18,860 INFO [train.py:1114] (1/4) Epoch 12, batch 2500, loss[loss=0.2159, simple_loss=0.2863, pruned_loss=0.07278, over 4807.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2795, pruned_loss=0.05255, over 939371.95 frames. ], batch size: 14, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:01:19,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=153261.33333333334, ans=0.125 +2024-07-28 12:01:24,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=153274.66666666666, ans=0.1 +2024-07-28 12:01:26,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=153274.66666666666, ans=0.2 +2024-07-28 12:01:41,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=153301.33333333334, ans=0.1 +2024-07-28 12:01:41,948 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.651e+01 6.166e+01 7.013e+01 1.450e+02, threshold=1.233e+02, percent-clipped=1.0 +2024-07-28 12:01:54,189 INFO [train.py:1114] (1/4) Epoch 12, batch 2550, loss[loss=0.1655, simple_loss=0.2458, pruned_loss=0.04261, over 4816.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2798, pruned_loss=0.05234, over 938947.07 frames. ], batch size: 11, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:01:54,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=153328.0, ans=0.125 +2024-07-28 12:02:28,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=153381.33333333334, ans=0.125 +2024-07-28 12:02:30,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=153381.33333333334, ans=0.0 +2024-07-28 12:02:32,198 INFO [train.py:1114] (1/4) Epoch 12, batch 2600, loss[loss=0.1985, simple_loss=0.2882, pruned_loss=0.05437, over 4894.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2803, pruned_loss=0.05252, over 937549.59 frames. ], batch size: 13, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:02:49,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=153394.66666666666, ans=0.125 +2024-07-28 12:02:54,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=153408.0, ans=0.0 +2024-07-28 12:02:57,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=153408.0, ans=0.0 +2024-07-28 12:02:57,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=153408.0, ans=0.0 +2024-07-28 12:02:59,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=153421.33333333334, ans=0.0 +2024-07-28 12:03:00,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=153421.33333333334, ans=0.07 +2024-07-28 12:03:04,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=153421.33333333334, ans=0.1 +2024-07-28 12:03:07,391 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.455e+01 5.682e+01 6.373e+01 7.145e+01 1.030e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 12:03:20,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=153448.0, ans=0.125 +2024-07-28 12:03:23,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=153461.33333333334, ans=15.0 +2024-07-28 12:03:23,893 INFO [train.py:1114] (1/4) Epoch 12, batch 2650, loss[loss=0.2127, simple_loss=0.3051, pruned_loss=0.06015, over 4636.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2806, pruned_loss=0.05236, over 940179.77 frames. ], batch size: 16, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:03:32,253 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.90 vs. limit=15.0 +2024-07-28 12:03:32,265 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.86 vs. limit=15.0 +2024-07-28 12:03:38,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=153488.0, ans=0.0 +2024-07-28 12:03:48,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=153501.33333333334, ans=0.025 +2024-07-28 12:03:57,518 INFO [train.py:1114] (1/4) Epoch 12, batch 2700, loss[loss=0.1979, simple_loss=0.2945, pruned_loss=0.05065, over 4739.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2811, pruned_loss=0.05232, over 940348.09 frames. ], batch size: 14, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:04:03,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=153528.0, ans=0.125 +2024-07-28 12:04:04,457 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.48 vs. limit=15.0 +2024-07-28 12:04:16,848 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.29 vs. limit=22.5 +2024-07-28 12:04:22,342 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.622e+01 5.642e+01 6.087e+01 6.756e+01 9.576e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-28 12:04:27,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=153581.33333333334, ans=0.0 +2024-07-28 12:04:28,578 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.81 vs. limit=10.0 +2024-07-28 12:04:29,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=153581.33333333334, ans=0.125 +2024-07-28 12:04:31,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=153581.33333333334, ans=0.125 +2024-07-28 12:04:34,300 INFO [train.py:1114] (1/4) Epoch 12, batch 2750, loss[loss=0.1575, simple_loss=0.2398, pruned_loss=0.03755, over 4711.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2798, pruned_loss=0.0519, over 940636.36 frames. ], batch size: 12, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:04:35,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=153594.66666666666, ans=0.1 +2024-07-28 12:04:43,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=153608.0, ans=0.125 +2024-07-28 12:05:03,242 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=6.68 vs. limit=12.0 +2024-07-28 12:05:07,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=153648.0, ans=0.125 +2024-07-28 12:05:10,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=153661.33333333334, ans=0.125 +2024-07-28 12:05:10,491 INFO [train.py:1114] (1/4) Epoch 12, batch 2800, loss[loss=0.2485, simple_loss=0.3252, pruned_loss=0.08591, over 3324.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2794, pruned_loss=0.052, over 938181.62 frames. ], batch size: 35, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:05:15,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=153661.33333333334, ans=0.2 +2024-07-28 12:05:31,814 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.685e+01 5.464e+01 6.125e+01 7.070e+01 1.105e+02, threshold=1.225e+02, percent-clipped=0.0 +2024-07-28 12:05:36,113 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.37 vs. limit=12.0 +2024-07-28 12:05:43,859 INFO [train.py:1114] (1/4) Epoch 12, batch 2850, loss[loss=0.1657, simple_loss=0.2566, pruned_loss=0.0374, over 4960.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.28, pruned_loss=0.05209, over 936535.28 frames. ], batch size: 13, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:05:44,949 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.38 vs. limit=15.0 +2024-07-28 12:05:57,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=153754.66666666666, ans=0.125 +2024-07-28 12:06:02,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=153754.66666666666, ans=0.0 +2024-07-28 12:06:13,917 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.89 vs. limit=22.5 +2024-07-28 12:06:16,604 INFO [train.py:1114] (1/4) Epoch 12, batch 2900, loss[loss=0.1752, simple_loss=0.2589, pruned_loss=0.0458, over 4829.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.281, pruned_loss=0.05196, over 940157.46 frames. ], batch size: 13, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:06:30,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=153808.0, ans=0.125 +2024-07-28 12:06:39,898 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.688e+01 6.255e+01 7.399e+01 1.060e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 12:06:41,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=153834.66666666666, ans=0.025 +2024-07-28 12:06:49,994 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.00 vs. limit=10.0 +2024-07-28 12:06:51,618 INFO [train.py:1114] (1/4) Epoch 12, batch 2950, loss[loss=0.1779, simple_loss=0.266, pruned_loss=0.04491, over 4705.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.28, pruned_loss=0.05178, over 939089.99 frames. ], batch size: 12, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:06:52,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=153861.33333333334, ans=0.2 +2024-07-28 12:06:53,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=153861.33333333334, ans=0.125 +2024-07-28 12:07:04,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=153888.0, ans=0.125 +2024-07-28 12:07:19,669 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.17 vs. limit=10.0 +2024-07-28 12:07:25,338 INFO [train.py:1114] (1/4) Epoch 12, batch 3000, loss[loss=0.1876, simple_loss=0.2758, pruned_loss=0.04972, over 4757.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2795, pruned_loss=0.05152, over 938379.51 frames. ], batch size: 13, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:07:25,338 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 12:07:44,403 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.8752, 4.7434, 3.7541, 3.6297], device='cuda:1') +2024-07-28 12:07:46,423 INFO [train.py:1146] (1/4) Epoch 12, validation: loss=0.1682, simple_loss=0.272, pruned_loss=0.03224, over 944034.00 frames. +2024-07-28 12:07:46,424 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 12:08:03,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=153954.66666666666, ans=0.125 +2024-07-28 12:08:08,182 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.713e+01 5.599e+01 6.354e+01 7.168e+01 1.019e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 12:08:11,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=153968.0, ans=0.125 +2024-07-28 12:08:15,390 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.78 vs. limit=15.0 +2024-07-28 12:08:17,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=153981.33333333334, ans=0.2 +2024-07-28 12:08:20,400 INFO [train.py:1114] (1/4) Epoch 12, batch 3050, loss[loss=0.1902, simple_loss=0.2728, pruned_loss=0.05379, over 4646.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2801, pruned_loss=0.05181, over 937208.60 frames. ], batch size: 12, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:08:36,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=154021.33333333334, ans=0.05 +2024-07-28 12:08:36,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=154021.33333333334, ans=0.125 +2024-07-28 12:08:36,967 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.63 vs. limit=15.0 +2024-07-28 12:08:42,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=154034.66666666666, ans=0.0 +2024-07-28 12:08:53,626 INFO [train.py:1114] (1/4) Epoch 12, batch 3100, loss[loss=0.1963, simple_loss=0.2872, pruned_loss=0.05273, over 4634.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2794, pruned_loss=0.05153, over 938053.46 frames. ], batch size: 16, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:08:54,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=154061.33333333334, ans=0.125 +2024-07-28 12:09:07,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=154088.0, ans=0.015 +2024-07-28 12:09:14,830 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.787e+01 5.719e+01 6.483e+01 7.749e+01 1.294e+02, threshold=1.297e+02, percent-clipped=1.0 +2024-07-28 12:09:19,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=154114.66666666666, ans=0.0 +2024-07-28 12:09:26,874 INFO [train.py:1114] (1/4) Epoch 12, batch 3150, loss[loss=0.1938, simple_loss=0.2763, pruned_loss=0.05565, over 4615.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.279, pruned_loss=0.05166, over 938316.53 frames. ], batch size: 17, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:09:34,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=154141.33333333334, ans=0.025 +2024-07-28 12:09:40,195 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.99 vs. limit=15.0 +2024-07-28 12:09:44,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154154.66666666666, ans=0.1 +2024-07-28 12:09:54,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=154168.0, ans=0.125 +2024-07-28 12:09:59,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=154181.33333333334, ans=0.0 +2024-07-28 12:10:02,169 INFO [train.py:1114] (1/4) Epoch 12, batch 3200, loss[loss=0.2064, simple_loss=0.3021, pruned_loss=0.05537, over 4814.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2778, pruned_loss=0.05082, over 939835.07 frames. ], batch size: 13, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:10:04,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=154194.66666666666, ans=0.0 +2024-07-28 12:10:05,211 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.37 vs. limit=15.0 +2024-07-28 12:10:05,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=154194.66666666666, ans=0.0 +2024-07-28 12:10:07,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff2.min_abs, batch_count=154194.66666666666, ans=0.1 +2024-07-28 12:10:08,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=154208.0, ans=0.125 +2024-07-28 12:10:10,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=154208.0, ans=0.025 +2024-07-28 12:10:14,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=154208.0, ans=0.125 +2024-07-28 12:10:23,356 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.738e+01 6.146e+01 6.845e+01 1.156e+02, threshold=1.229e+02, percent-clipped=0.0 +2024-07-28 12:10:28,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=154248.0, ans=0.125 +2024-07-28 12:10:35,320 INFO [train.py:1114] (1/4) Epoch 12, batch 3250, loss[loss=0.2002, simple_loss=0.2916, pruned_loss=0.05438, over 4935.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2781, pruned_loss=0.05094, over 941026.26 frames. ], batch size: 14, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:10:36,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=154261.33333333334, ans=0.125 +2024-07-28 12:10:37,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=154261.33333333334, ans=0.125 +2024-07-28 12:10:41,343 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:10:50,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=154288.0, ans=0.125 +2024-07-28 12:10:50,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=154288.0, ans=0.0 +2024-07-28 12:10:52,251 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.93 vs. limit=12.0 +2024-07-28 12:10:53,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=154288.0, ans=0.0 +2024-07-28 12:11:09,145 INFO [train.py:1114] (1/4) Epoch 12, batch 3300, loss[loss=0.1878, simple_loss=0.279, pruned_loss=0.04831, over 4704.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2782, pruned_loss=0.05129, over 941336.34 frames. ], batch size: 19, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:11:10,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=154328.0, ans=0.125 +2024-07-28 12:11:12,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=154328.0, ans=0.2 +2024-07-28 12:11:14,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=154328.0, ans=0.125 +2024-07-28 12:11:20,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=154341.33333333334, ans=0.09899494936611666 +2024-07-28 12:11:23,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=154354.66666666666, ans=0.2 +2024-07-28 12:11:25,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=154354.66666666666, ans=0.125 +2024-07-28 12:11:25,995 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.37 vs. limit=15.0 +2024-07-28 12:11:31,596 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.932e+01 5.622e+01 6.140e+01 6.825e+01 9.627e+01, threshold=1.228e+02, percent-clipped=0.0 +2024-07-28 12:11:32,034 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.92 vs. limit=10.0 +2024-07-28 12:11:38,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154381.33333333334, ans=0.1 +2024-07-28 12:11:45,528 INFO [train.py:1114] (1/4) Epoch 12, batch 3350, loss[loss=0.2222, simple_loss=0.31, pruned_loss=0.06718, over 4601.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2799, pruned_loss=0.05173, over 938988.52 frames. ], batch size: 17, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:11:47,156 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=15.0 +2024-07-28 12:12:10,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=154421.33333333334, ans=0.0 +2024-07-28 12:12:10,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=154421.33333333334, ans=0.125 +2024-07-28 12:12:25,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=154448.0, ans=0.125 +2024-07-28 12:12:28,956 INFO [train.py:1114] (1/4) Epoch 12, batch 3400, loss[loss=0.1521, simple_loss=0.2385, pruned_loss=0.03284, over 4789.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2797, pruned_loss=0.05209, over 937334.17 frames. ], batch size: 11, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:12:38,716 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.17 vs. limit=22.5 +2024-07-28 12:12:50,514 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.411e+01 5.633e+01 6.152e+01 6.788e+01 1.015e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 12:12:54,699 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.70 vs. limit=22.5 +2024-07-28 12:12:58,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=154514.66666666666, ans=0.125 +2024-07-28 12:12:59,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=154514.66666666666, ans=10.0 +2024-07-28 12:12:59,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=154514.66666666666, ans=0.125 +2024-07-28 12:13:04,217 INFO [train.py:1114] (1/4) Epoch 12, batch 3450, loss[loss=0.1776, simple_loss=0.2786, pruned_loss=0.03825, over 4722.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2793, pruned_loss=0.05186, over 937545.91 frames. ], batch size: 19, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:13:10,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=154541.33333333334, ans=0.0 +2024-07-28 12:13:17,357 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:13:27,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=154568.0, ans=0.125 +2024-07-28 12:13:36,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=154581.33333333334, ans=0.125 +2024-07-28 12:13:39,606 INFO [train.py:1114] (1/4) Epoch 12, batch 3500, loss[loss=0.1662, simple_loss=0.2465, pruned_loss=0.04293, over 4945.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2777, pruned_loss=0.0513, over 938506.85 frames. ], batch size: 12, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:13:39,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=154594.66666666666, ans=0.125 +2024-07-28 12:13:41,449 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.57 vs. limit=6.0 +2024-07-28 12:13:49,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=154608.0, ans=0.035 +2024-07-28 12:13:54,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=154621.33333333334, ans=0.125 +2024-07-28 12:14:01,205 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.093e+01 5.546e+01 6.148e+01 6.737e+01 9.893e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 12:14:06,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=154648.0, ans=0.0 +2024-07-28 12:14:07,554 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.86 vs. limit=15.0 +2024-07-28 12:14:12,941 INFO [train.py:1114] (1/4) Epoch 12, batch 3550, loss[loss=0.1769, simple_loss=0.2799, pruned_loss=0.03691, over 4674.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2771, pruned_loss=0.05086, over 939007.57 frames. ], batch size: 14, lr: 6.29e-03, grad_scale: 64.0 +2024-07-28 12:14:13,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=154661.33333333334, ans=0.125 +2024-07-28 12:14:14,622 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.03 vs. limit=15.0 +2024-07-28 12:14:30,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=154674.66666666666, ans=0.2 +2024-07-28 12:14:33,264 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-28 12:14:35,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=154688.0, ans=0.09899494936611666 +2024-07-28 12:14:39,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=154688.0, ans=0.2 +2024-07-28 12:14:46,231 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-07-28 12:14:53,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=154728.0, ans=0.0 +2024-07-28 12:14:54,227 INFO [train.py:1114] (1/4) Epoch 12, batch 3600, loss[loss=0.1643, simple_loss=0.2531, pruned_loss=0.03779, over 4967.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2775, pruned_loss=0.05122, over 940566.13 frames. ], batch size: 13, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:15:16,128 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.517e+01 5.873e+01 6.627e+01 7.814e+01 1.281e+02, threshold=1.325e+02, percent-clipped=1.0 +2024-07-28 12:15:20,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=154781.33333333334, ans=0.1 +2024-07-28 12:15:23,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=154781.33333333334, ans=0.125 +2024-07-28 12:15:27,720 INFO [train.py:1114] (1/4) Epoch 12, batch 3650, loss[loss=0.1911, simple_loss=0.281, pruned_loss=0.05058, over 4889.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2772, pruned_loss=0.05129, over 941041.10 frames. ], batch size: 15, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:15:28,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=154794.66666666666, ans=0.125 +2024-07-28 12:15:38,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=154808.0, ans=0.05 +2024-07-28 12:15:50,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=154834.66666666666, ans=0.125 +2024-07-28 12:15:55,894 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=26.54 vs. limit=22.5 +2024-07-28 12:15:59,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=154848.0, ans=0.125 +2024-07-28 12:16:02,902 INFO [train.py:1114] (1/4) Epoch 12, batch 3700, loss[loss=0.193, simple_loss=0.2943, pruned_loss=0.04588, over 4934.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2767, pruned_loss=0.05051, over 941987.81 frames. ], batch size: 14, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:16:03,327 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.49 vs. limit=12.0 +2024-07-28 12:16:05,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=154861.33333333334, ans=0.125 +2024-07-28 12:16:08,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=154861.33333333334, ans=0.125 +2024-07-28 12:16:16,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=154888.0, ans=0.0 +2024-07-28 12:16:22,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=154901.33333333334, ans=0.2 +2024-07-28 12:16:24,632 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.998e+01 5.499e+01 5.998e+01 6.974e+01 1.210e+02, threshold=1.200e+02, percent-clipped=0.0 +2024-07-28 12:16:32,063 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.08 vs. limit=12.0 +2024-07-28 12:16:34,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=154914.66666666666, ans=0.125 +2024-07-28 12:16:34,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=154914.66666666666, ans=0.2 +2024-07-28 12:16:35,613 INFO [train.py:1114] (1/4) Epoch 12, batch 3750, loss[loss=0.1658, simple_loss=0.2436, pruned_loss=0.04402, over 4802.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2775, pruned_loss=0.05043, over 943586.25 frames. ], batch size: 11, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:16:45,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=154941.33333333334, ans=0.125 +2024-07-28 12:16:49,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=154954.66666666666, ans=0.125 +2024-07-28 12:17:09,272 INFO [train.py:1114] (1/4) Epoch 12, batch 3800, loss[loss=0.1795, simple_loss=0.2683, pruned_loss=0.04533, over 4806.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2769, pruned_loss=0.05023, over 941736.78 frames. ], batch size: 14, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:17:13,252 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:17:17,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155008.0, ans=0.1 +2024-07-28 12:17:21,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155008.0, ans=0.1 +2024-07-28 12:17:32,840 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.431e+01 5.749e+01 6.230e+01 7.169e+01 2.120e+02, threshold=1.246e+02, percent-clipped=1.0 +2024-07-28 12:17:47,886 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.21 vs. limit=10.0 +2024-07-28 12:17:48,210 INFO [train.py:1114] (1/4) Epoch 12, batch 3850, loss[loss=0.166, simple_loss=0.2511, pruned_loss=0.04049, over 4632.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2762, pruned_loss=0.04972, over 942263.66 frames. ], batch size: 16, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:18:22,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=155114.66666666666, ans=0.2 +2024-07-28 12:18:29,371 INFO [train.py:1114] (1/4) Epoch 12, batch 3900, loss[loss=0.1662, simple_loss=0.269, pruned_loss=0.03165, over 4809.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2763, pruned_loss=0.04956, over 942277.75 frames. ], batch size: 14, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:18:30,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=155128.0, ans=0.125 +2024-07-28 12:18:37,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=12.03 vs. limit=15.0 +2024-07-28 12:18:38,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=155141.33333333334, ans=0.1 +2024-07-28 12:18:49,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=155154.66666666666, ans=0.125 +2024-07-28 12:18:49,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=155154.66666666666, ans=0.125 +2024-07-28 12:18:50,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=155154.66666666666, ans=0.2 +2024-07-28 12:18:53,187 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.209e+01 5.496e+01 6.124e+01 6.680e+01 9.090e+01, threshold=1.225e+02, percent-clipped=0.0 +2024-07-28 12:18:54,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=155168.0, ans=0.125 +2024-07-28 12:18:58,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=155181.33333333334, ans=0.125 +2024-07-28 12:19:07,238 INFO [train.py:1114] (1/4) Epoch 12, batch 3950, loss[loss=0.2071, simple_loss=0.2984, pruned_loss=0.05788, over 4845.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2768, pruned_loss=0.04992, over 944384.89 frames. ], batch size: 16, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:19:12,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=155194.66666666666, ans=0.1 +2024-07-28 12:19:25,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=155221.33333333334, ans=0.0 +2024-07-28 12:19:31,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=155234.66666666666, ans=0.125 +2024-07-28 12:19:39,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=155248.0, ans=0.0 +2024-07-28 12:19:41,093 INFO [train.py:1114] (1/4) Epoch 12, batch 4000, loss[loss=0.164, simple_loss=0.2471, pruned_loss=0.04046, over 4764.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2779, pruned_loss=0.05092, over 940471.61 frames. ], batch size: 12, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:19:41,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=155261.33333333334, ans=0.2 +2024-07-28 12:19:41,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=155261.33333333334, ans=0.09899494936611666 +2024-07-28 12:19:48,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=155274.66666666666, ans=0.0 +2024-07-28 12:19:49,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=155274.66666666666, ans=0.0 +2024-07-28 12:19:51,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=155274.66666666666, ans=0.2 +2024-07-28 12:20:02,780 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.540e+01 6.444e+01 7.146e+01 1.519e+02, threshold=1.289e+02, percent-clipped=1.0 +2024-07-28 12:20:07,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=155314.66666666666, ans=0.2 +2024-07-28 12:20:12,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=155314.66666666666, ans=0.0 +2024-07-28 12:20:14,415 INFO [train.py:1114] (1/4) Epoch 12, batch 4050, loss[loss=0.2611, simple_loss=0.3266, pruned_loss=0.0978, over 3036.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.278, pruned_loss=0.05134, over 938468.96 frames. ], batch size: 35, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:20:14,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=155328.0, ans=0.2 +2024-07-28 12:20:15,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=155328.0, ans=0.125 +2024-07-28 12:20:33,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=155354.66666666666, ans=0.2 +2024-07-28 12:20:48,474 INFO [train.py:1114] (1/4) Epoch 12, batch 4100, loss[loss=0.1897, simple_loss=0.2884, pruned_loss=0.04551, over 4908.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2785, pruned_loss=0.05186, over 937337.16 frames. ], batch size: 15, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:20:49,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=155394.66666666666, ans=0.0 +2024-07-28 12:20:50,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155394.66666666666, ans=0.1 +2024-07-28 12:21:06,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=155421.33333333334, ans=0.0 +2024-07-28 12:21:12,173 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.341e+01 5.731e+01 6.585e+01 8.286e+01 1.195e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-28 12:21:12,598 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.92 vs. limit=15.0 +2024-07-28 12:21:15,259 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-28 12:21:19,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=155448.0, ans=0.125 +2024-07-28 12:21:23,652 INFO [train.py:1114] (1/4) Epoch 12, batch 4150, loss[loss=0.1681, simple_loss=0.2554, pruned_loss=0.04039, over 4819.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2785, pruned_loss=0.05197, over 937316.90 frames. ], batch size: 13, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:21:27,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=155461.33333333334, ans=0.2 +2024-07-28 12:21:40,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=155488.0, ans=0.2 +2024-07-28 12:21:46,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=155501.33333333334, ans=0.125 +2024-07-28 12:21:49,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=155514.66666666666, ans=0.0 +2024-07-28 12:21:57,094 INFO [train.py:1114] (1/4) Epoch 12, batch 4200, loss[loss=0.2357, simple_loss=0.3128, pruned_loss=0.07929, over 4904.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2791, pruned_loss=0.05218, over 939100.84 frames. ], batch size: 15, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:22:08,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=155541.33333333334, ans=0.125 +2024-07-28 12:22:08,569 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.94 vs. limit=15.0 +2024-07-28 12:22:09,931 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.82 vs. limit=15.0 +2024-07-28 12:22:18,891 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.397e+01 5.594e+01 6.177e+01 7.434e+01 1.256e+02, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 12:22:29,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=155594.66666666666, ans=0.0 +2024-07-28 12:22:30,563 INFO [train.py:1114] (1/4) Epoch 12, batch 4250, loss[loss=0.166, simple_loss=0.263, pruned_loss=0.03451, over 4648.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.28, pruned_loss=0.05266, over 939958.24 frames. ], batch size: 12, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:22:30,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155594.66666666666, ans=0.1 +2024-07-28 12:22:47,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=155621.33333333334, ans=0.125 +2024-07-28 12:22:57,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=155648.0, ans=0.125 +2024-07-28 12:22:59,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=155648.0, ans=0.125 +2024-07-28 12:23:03,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=155648.0, ans=0.0 +2024-07-28 12:23:06,442 INFO [train.py:1114] (1/4) Epoch 12, batch 4300, loss[loss=0.1764, simple_loss=0.2643, pruned_loss=0.04425, over 4759.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2803, pruned_loss=0.0528, over 939132.52 frames. ], batch size: 13, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:23:19,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155688.0, ans=0.1 +2024-07-28 12:23:24,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=155688.0, ans=0.2 +2024-07-28 12:23:27,974 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.634e+01 6.193e+01 6.969e+01 9.578e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 12:23:37,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=155714.66666666666, ans=0.05 +2024-07-28 12:23:39,247 INFO [train.py:1114] (1/4) Epoch 12, batch 4350, loss[loss=0.1741, simple_loss=0.2636, pruned_loss=0.04233, over 4757.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2795, pruned_loss=0.05205, over 940288.43 frames. ], batch size: 13, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:23:45,969 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.24 vs. limit=6.0 +2024-07-28 12:23:46,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155741.33333333334, ans=0.1 +2024-07-28 12:23:53,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=155754.66666666666, ans=0.2 +2024-07-28 12:23:56,364 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:24:28,593 INFO [train.py:1114] (1/4) Epoch 12, batch 4400, loss[loss=0.2063, simple_loss=0.2986, pruned_loss=0.057, over 4808.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2793, pruned_loss=0.0518, over 940291.13 frames. ], batch size: 14, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:24:30,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=155794.66666666666, ans=0.125 +2024-07-28 12:24:35,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=155808.0, ans=0.125 +2024-07-28 12:24:52,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=155834.66666666666, ans=0.0 +2024-07-28 12:24:52,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=155834.66666666666, ans=0.0 +2024-07-28 12:24:52,798 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.007e+01 5.483e+01 6.215e+01 6.857e+01 1.527e+02, threshold=1.243e+02, percent-clipped=1.0 +2024-07-28 12:25:07,446 INFO [train.py:1114] (1/4) Epoch 12, batch 4450, loss[loss=0.1518, simple_loss=0.2389, pruned_loss=0.03232, over 4946.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2792, pruned_loss=0.05201, over 938516.91 frames. ], batch size: 12, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:25:07,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=155861.33333333334, ans=0.0 +2024-07-28 12:25:29,057 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.01 vs. limit=15.0 +2024-07-28 12:25:34,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=155901.33333333334, ans=0.2 +2024-07-28 12:25:35,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155901.33333333334, ans=0.1 +2024-07-28 12:25:43,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=155914.66666666666, ans=0.125 +2024-07-28 12:25:46,638 INFO [train.py:1114] (1/4) Epoch 12, batch 4500, loss[loss=0.208, simple_loss=0.3023, pruned_loss=0.05683, over 4748.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2795, pruned_loss=0.05173, over 938071.37 frames. ], batch size: 14, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:25:48,378 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.91 vs. limit=15.0 +2024-07-28 12:25:48,393 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.76 vs. limit=15.0 +2024-07-28 12:25:57,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=155941.33333333334, ans=0.07 +2024-07-28 12:26:03,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=155954.66666666666, ans=0.2 +2024-07-28 12:26:08,387 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.634e+01 5.459e+01 6.375e+01 7.469e+01 1.021e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 12:26:19,641 INFO [train.py:1114] (1/4) Epoch 12, batch 4550, loss[loss=0.1805, simple_loss=0.2658, pruned_loss=0.04761, over 4893.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2796, pruned_loss=0.05191, over 939826.67 frames. ], batch size: 13, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:26:29,946 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.92 vs. limit=15.0 +2024-07-28 12:26:33,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=156021.33333333334, ans=0.5 +2024-07-28 12:26:35,295 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.72 vs. limit=15.0 +2024-07-28 12:26:35,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=156021.33333333334, ans=0.05 +2024-07-28 12:26:40,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=156034.66666666666, ans=0.125 +2024-07-28 12:26:48,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=156048.0, ans=0.125 +2024-07-28 12:26:48,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=156048.0, ans=0.125 +2024-07-28 12:26:54,973 INFO [train.py:1114] (1/4) Epoch 12, batch 4600, loss[loss=0.1647, simple_loss=0.261, pruned_loss=0.03419, over 4508.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2792, pruned_loss=0.05154, over 938057.44 frames. ], batch size: 21, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:26:58,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.97 vs. limit=10.0 +2024-07-28 12:27:16,922 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.470e+01 5.723e+01 6.384e+01 7.730e+01 1.121e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 12:27:26,077 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.33 vs. limit=12.0 +2024-07-28 12:27:28,346 INFO [train.py:1114] (1/4) Epoch 12, batch 4650, loss[loss=0.2027, simple_loss=0.2943, pruned_loss=0.05552, over 4850.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2803, pruned_loss=0.05178, over 940115.80 frames. ], batch size: 16, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:27:41,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=156154.66666666666, ans=0.0 +2024-07-28 12:27:47,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=156168.0, ans=0.125 +2024-07-28 12:27:57,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156181.33333333334, ans=0.1 +2024-07-28 12:28:00,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=156181.33333333334, ans=0.125 +2024-07-28 12:28:00,612 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.24 vs. limit=22.5 +2024-07-28 12:28:01,576 INFO [train.py:1114] (1/4) Epoch 12, batch 4700, loss[loss=0.1771, simple_loss=0.2576, pruned_loss=0.04829, over 4705.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2802, pruned_loss=0.05222, over 937411.32 frames. ], batch size: 11, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:28:14,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=156221.33333333334, ans=0.125 +2024-07-28 12:28:15,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=156221.33333333334, ans=0.125 +2024-07-28 12:28:20,601 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.66 vs. limit=15.0 +2024-07-28 12:28:23,590 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.610e+01 5.553e+01 6.034e+01 6.597e+01 9.759e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-28 12:28:35,482 INFO [train.py:1114] (1/4) Epoch 12, batch 4750, loss[loss=0.1866, simple_loss=0.2738, pruned_loss=0.0497, over 4579.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2803, pruned_loss=0.05218, over 935333.04 frames. ], batch size: 21, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:28:38,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=156261.33333333334, ans=0.125 +2024-07-28 12:28:51,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=156288.0, ans=0.0 +2024-07-28 12:28:57,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=156301.33333333334, ans=0.0 +2024-07-28 12:29:01,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=156301.33333333334, ans=0.125 +2024-07-28 12:29:11,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=156328.0, ans=0.125 +2024-07-28 12:29:11,593 INFO [train.py:1114] (1/4) Epoch 12, batch 4800, loss[loss=0.1927, simple_loss=0.2892, pruned_loss=0.04808, over 4695.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2799, pruned_loss=0.05194, over 933212.79 frames. ], batch size: 13, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:29:16,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=156328.0, ans=0.1 +2024-07-28 12:29:17,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=156341.33333333334, ans=0.125 +2024-07-28 12:29:20,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=156341.33333333334, ans=0.1 +2024-07-28 12:29:35,338 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.785e+01 5.583e+01 6.047e+01 7.018e+01 9.420e+01, threshold=1.209e+02, percent-clipped=0.0 +2024-07-28 12:29:46,701 INFO [train.py:1114] (1/4) Epoch 12, batch 4850, loss[loss=0.2095, simple_loss=0.3059, pruned_loss=0.05651, over 4742.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2803, pruned_loss=0.05206, over 932486.99 frames. ], batch size: 14, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:29:59,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=156408.0, ans=0.125 +2024-07-28 12:30:01,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=156421.33333333334, ans=0.1 +2024-07-28 12:30:04,827 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.45 vs. limit=15.0 +2024-07-28 12:30:25,568 INFO [train.py:1114] (1/4) Epoch 12, batch 4900, loss[loss=0.1784, simple_loss=0.2653, pruned_loss=0.04572, over 4756.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2802, pruned_loss=0.05226, over 934472.35 frames. ], batch size: 13, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:30:26,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156461.33333333334, ans=0.1 +2024-07-28 12:30:26,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=156461.33333333334, ans=0.0 +2024-07-28 12:30:38,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=156488.0, ans=0.1 +2024-07-28 12:30:46,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156501.33333333334, ans=0.1 +2024-07-28 12:30:48,383 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.179e+01 5.700e+01 6.377e+01 7.192e+01 1.081e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 12:30:53,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=156514.66666666666, ans=0.0 +2024-07-28 12:30:57,500 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.00 vs. limit=15.0 +2024-07-28 12:30:59,635 INFO [train.py:1114] (1/4) Epoch 12, batch 4950, loss[loss=0.2312, simple_loss=0.3072, pruned_loss=0.07763, over 3124.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2814, pruned_loss=0.05291, over 931015.71 frames. ], batch size: 35, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:31:04,074 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.56 vs. limit=15.0 +2024-07-28 12:31:07,436 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.66 vs. limit=6.0 +2024-07-28 12:31:14,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=156554.66666666666, ans=0.125 +2024-07-28 12:31:27,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=156581.33333333334, ans=0.125 +2024-07-28 12:31:30,930 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.75 vs. limit=15.0 +2024-07-28 12:31:33,221 INFO [train.py:1114] (1/4) Epoch 12, batch 5000, loss[loss=0.213, simple_loss=0.3079, pruned_loss=0.05908, over 4668.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2805, pruned_loss=0.05238, over 935057.35 frames. ], batch size: 14, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:31:33,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=156594.66666666666, ans=0.1 +2024-07-28 12:31:36,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=156594.66666666666, ans=0.125 +2024-07-28 12:31:36,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=156594.66666666666, ans=0.0 +2024-07-28 12:31:41,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=156608.0, ans=0.125 +2024-07-28 12:32:00,477 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.430e+01 5.699e+01 6.190e+01 6.580e+01 9.599e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 12:32:05,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=156648.0, ans=0.025 +2024-07-28 12:32:11,981 INFO [train.py:1114] (1/4) Epoch 12, batch 5050, loss[loss=0.2003, simple_loss=0.2893, pruned_loss=0.05565, over 4843.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2801, pruned_loss=0.05217, over 937579.48 frames. ], batch size: 12, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:32:21,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=156674.66666666666, ans=0.0 +2024-07-28 12:32:26,477 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=156674.66666666666, ans=0.0 +2024-07-28 12:32:27,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=156688.0, ans=0.04949747468305833 +2024-07-28 12:32:28,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=156688.0, ans=0.5 +2024-07-28 12:32:37,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=156701.33333333334, ans=15.0 +2024-07-28 12:32:37,264 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.58 vs. limit=15.0 +2024-07-28 12:32:45,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=156714.66666666666, ans=0.0 +2024-07-28 12:32:45,502 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.22 vs. limit=15.0 +2024-07-28 12:32:48,321 INFO [train.py:1114] (1/4) Epoch 12, batch 5100, loss[loss=0.2014, simple_loss=0.2765, pruned_loss=0.06314, over 4776.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2798, pruned_loss=0.05246, over 935098.72 frames. ], batch size: 12, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:33:17,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=156768.0, ans=0.2 +2024-07-28 12:33:19,267 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.805e+01 5.671e+01 6.012e+01 6.981e+01 1.009e+02, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 12:33:36,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=156794.66666666666, ans=0.2 +2024-07-28 12:33:36,870 INFO [train.py:1114] (1/4) Epoch 12, batch 5150, loss[loss=0.2197, simple_loss=0.3, pruned_loss=0.06968, over 4831.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2808, pruned_loss=0.0531, over 935964.48 frames. ], batch size: 16, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:33:43,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=156808.0, ans=0.125 +2024-07-28 12:33:44,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=156808.0, ans=0.0 +2024-07-28 12:34:01,074 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.86 vs. limit=15.0 +2024-07-28 12:34:02,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=156834.66666666666, ans=0.0 +2024-07-28 12:34:04,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=156834.66666666666, ans=0.125 +2024-07-28 12:34:04,961 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.10 vs. limit=10.0 +2024-07-28 12:34:12,706 INFO [train.py:1114] (1/4) Epoch 12, batch 5200, loss[loss=0.2127, simple_loss=0.2938, pruned_loss=0.06576, over 4664.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2806, pruned_loss=0.05276, over 936256.80 frames. ], batch size: 14, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:34:17,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=156861.33333333334, ans=0.2 +2024-07-28 12:34:21,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=156874.66666666666, ans=0.2 +2024-07-28 12:34:22,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=156874.66666666666, ans=0.09899494936611666 +2024-07-28 12:34:23,415 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.35 vs. limit=15.0 +2024-07-28 12:34:34,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=156888.0, ans=0.125 +2024-07-28 12:34:39,943 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.716e+01 5.675e+01 6.398e+01 7.446e+01 1.094e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 12:34:42,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=156901.33333333334, ans=0.0 +2024-07-28 12:34:46,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=156914.66666666666, ans=0.025 +2024-07-28 12:34:51,270 INFO [train.py:1114] (1/4) Epoch 12, batch 5250, loss[loss=0.209, simple_loss=0.3041, pruned_loss=0.05697, over 4897.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2802, pruned_loss=0.05247, over 936239.93 frames. ], batch size: 13, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:34:54,922 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.96 vs. limit=15.0 +2024-07-28 12:34:58,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=156941.33333333334, ans=0.0 +2024-07-28 12:34:58,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=156941.33333333334, ans=0.1 +2024-07-28 12:34:58,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=156941.33333333334, ans=0.0 +2024-07-28 12:35:02,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=156941.33333333334, ans=0.1 +2024-07-28 12:35:07,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=156954.66666666666, ans=0.07 +2024-07-28 12:35:08,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=156954.66666666666, ans=0.125 +2024-07-28 12:35:15,142 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.62 vs. limit=15.0 +2024-07-28 12:35:21,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=156981.33333333334, ans=0.0 +2024-07-28 12:35:21,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=156981.33333333334, ans=0.0 +2024-07-28 12:35:21,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=156981.33333333334, ans=0.07 +2024-07-28 12:35:24,840 INFO [train.py:1114] (1/4) Epoch 12, batch 5300, loss[loss=0.2226, simple_loss=0.3132, pruned_loss=0.06601, over 4643.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2791, pruned_loss=0.05191, over 934866.67 frames. ], batch size: 16, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:35:29,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=156994.66666666666, ans=0.125 +2024-07-28 12:35:30,499 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.15 vs. limit=15.0 +2024-07-28 12:35:34,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=157008.0, ans=0.0 +2024-07-28 12:35:39,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=157021.33333333334, ans=0.125 +2024-07-28 12:35:46,889 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.557e+01 5.552e+01 6.428e+01 7.649e+01 1.141e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 12:35:48,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=157034.66666666666, ans=0.0 +2024-07-28 12:35:51,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=157048.0, ans=0.1 +2024-07-28 12:35:58,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=157048.0, ans=0.2 +2024-07-28 12:36:00,588 INFO [train.py:1114] (1/4) Epoch 12, batch 5350, loss[loss=0.1747, simple_loss=0.2519, pruned_loss=0.04876, over 4523.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2795, pruned_loss=0.05182, over 936807.11 frames. ], batch size: 10, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:36:07,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=157061.33333333334, ans=0.025 +2024-07-28 12:36:10,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157074.66666666666, ans=0.1 +2024-07-28 12:36:10,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=157074.66666666666, ans=0.2 +2024-07-28 12:36:15,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=157088.0, ans=0.125 +2024-07-28 12:36:16,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=157088.0, ans=0.0 +2024-07-28 12:36:19,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157088.0, ans=0.1 +2024-07-28 12:36:26,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=157101.33333333334, ans=0.0 +2024-07-28 12:36:35,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=157114.66666666666, ans=0.2 +2024-07-28 12:36:36,742 INFO [train.py:1114] (1/4) Epoch 12, batch 5400, loss[loss=0.2307, simple_loss=0.3233, pruned_loss=0.06909, over 4120.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.28, pruned_loss=0.05166, over 931598.64 frames. ], batch size: 25, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:36:38,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=157128.0, ans=15.0 +2024-07-28 12:36:40,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=157128.0, ans=0.0 +2024-07-28 12:36:42,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=157128.0, ans=0.2 +2024-07-28 12:36:48,833 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:36:58,603 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.622e+01 6.108e+01 6.944e+01 7.812e+01 1.147e+02, threshold=1.389e+02, percent-clipped=0.0 +2024-07-28 12:37:08,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=157181.33333333334, ans=0.0 +2024-07-28 12:37:09,775 INFO [train.py:1114] (1/4) Epoch 12, batch 5450, loss[loss=0.1554, simple_loss=0.2338, pruned_loss=0.03849, over 4710.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.279, pruned_loss=0.05108, over 934219.29 frames. ], batch size: 11, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:37:17,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.23 vs. limit=15.0 +2024-07-28 12:37:31,194 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.31 vs. limit=15.0 +2024-07-28 12:37:43,607 INFO [train.py:1114] (1/4) Epoch 12, batch 5500, loss[loss=0.201, simple_loss=0.2854, pruned_loss=0.05826, over 4252.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2787, pruned_loss=0.05155, over 931844.27 frames. ], batch size: 26, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:37:51,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=157274.66666666666, ans=0.125 +2024-07-28 12:37:54,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=157274.66666666666, ans=0.125 +2024-07-28 12:37:59,430 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.69 vs. limit=15.0 +2024-07-28 12:38:07,909 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.744e+01 6.392e+01 7.523e+01 1.431e+02, threshold=1.278e+02, percent-clipped=1.0 +2024-07-28 12:38:16,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=157314.66666666666, ans=0.125 +2024-07-28 12:38:16,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=157314.66666666666, ans=0.125 +2024-07-28 12:38:19,219 INFO [train.py:1114] (1/4) Epoch 12, batch 5550, loss[loss=0.2252, simple_loss=0.3014, pruned_loss=0.07456, over 4703.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2777, pruned_loss=0.05113, over 933645.37 frames. ], batch size: 12, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:38:22,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=157328.0, ans=0.0 +2024-07-28 12:38:29,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=157341.33333333334, ans=0.1 +2024-07-28 12:38:35,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=157354.66666666666, ans=0.2 +2024-07-28 12:38:41,806 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:38:45,452 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.58 vs. limit=12.0 +2024-07-28 12:38:46,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=157381.33333333334, ans=0.125 +2024-07-28 12:38:52,989 INFO [train.py:1114] (1/4) Epoch 12, batch 5600, loss[loss=0.1859, simple_loss=0.285, pruned_loss=0.04335, over 4746.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2787, pruned_loss=0.05127, over 934854.25 frames. ], batch size: 14, lr: 6.24e-03, grad_scale: 64.0 +2024-07-28 12:38:53,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=157394.66666666666, ans=0.125 +2024-07-28 12:39:03,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=157408.0, ans=0.05 +2024-07-28 12:39:08,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=157421.33333333334, ans=0.125 +2024-07-28 12:39:11,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=157421.33333333334, ans=0.0 +2024-07-28 12:39:15,770 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.811e+01 5.521e+01 6.342e+01 7.244e+01 1.033e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 12:39:17,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157434.66666666666, ans=0.1 +2024-07-28 12:39:26,384 INFO [train.py:1114] (1/4) Epoch 12, batch 5650, loss[loss=0.1991, simple_loss=0.2816, pruned_loss=0.05834, over 4532.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2769, pruned_loss=0.05084, over 937171.88 frames. ], batch size: 21, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:39:34,302 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.02 vs. limit=22.5 +2024-07-28 12:39:42,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=157488.0, ans=0.07 +2024-07-28 12:39:53,361 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:39:58,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=157514.66666666666, ans=0.125 +2024-07-28 12:40:01,288 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.99 vs. limit=15.0 +2024-07-28 12:40:01,552 INFO [train.py:1114] (1/4) Epoch 12, batch 5700, loss[loss=0.1846, simple_loss=0.2757, pruned_loss=0.04674, over 4697.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2773, pruned_loss=0.05088, over 938481.48 frames. ], batch size: 13, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:40:04,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=157528.0, ans=0.125 +2024-07-28 12:40:08,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=157528.0, ans=0.0 +2024-07-28 12:40:10,401 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.32 vs. limit=15.0 +2024-07-28 12:40:11,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=157541.33333333334, ans=0.1 +2024-07-28 12:40:22,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=157554.66666666666, ans=0.125 +2024-07-28 12:40:25,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=157568.0, ans=15.0 +2024-07-28 12:40:26,154 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.907e+01 5.849e+01 6.761e+01 7.551e+01 1.061e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-28 12:40:36,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=157594.66666666666, ans=0.125 +2024-07-28 12:40:36,677 INFO [train.py:1114] (1/4) Epoch 12, batch 5750, loss[loss=0.1765, simple_loss=0.2805, pruned_loss=0.03623, over 4683.00 frames. ], tot_loss[loss=0.19, simple_loss=0.278, pruned_loss=0.05102, over 938289.10 frames. ], batch size: 19, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:40:56,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=157621.33333333334, ans=0.1 +2024-07-28 12:41:00,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=157621.33333333334, ans=0.125 +2024-07-28 12:41:01,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=157621.33333333334, ans=0.025 +2024-07-28 12:41:13,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=157648.0, ans=0.125 +2024-07-28 12:41:16,559 INFO [train.py:1114] (1/4) Epoch 12, batch 5800, loss[loss=0.2081, simple_loss=0.2961, pruned_loss=0.06004, over 4685.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.279, pruned_loss=0.05139, over 937110.64 frames. ], batch size: 19, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:41:29,305 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.70 vs. limit=15.0 +2024-07-28 12:41:33,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=157688.0, ans=0.2 +2024-07-28 12:41:36,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=157688.0, ans=0.125 +2024-07-28 12:41:37,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=157688.0, ans=0.125 +2024-07-28 12:41:41,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=157701.33333333334, ans=0.125 +2024-07-28 12:41:43,099 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.866e+01 5.542e+01 6.072e+01 7.218e+01 1.008e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 12:41:47,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=157714.66666666666, ans=0.0 +2024-07-28 12:41:54,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=157714.66666666666, ans=0.04949747468305833 +2024-07-28 12:41:59,704 INFO [train.py:1114] (1/4) Epoch 12, batch 5850, loss[loss=0.1944, simple_loss=0.2857, pruned_loss=0.05156, over 4569.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.279, pruned_loss=0.05182, over 937958.32 frames. ], batch size: 21, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:42:01,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=157728.0, ans=0.0 +2024-07-28 12:42:08,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157741.33333333334, ans=0.1 +2024-07-28 12:42:18,653 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.23 vs. limit=6.0 +2024-07-28 12:42:22,129 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.07 vs. limit=6.0 +2024-07-28 12:42:25,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=157768.0, ans=0.05 +2024-07-28 12:42:28,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=157781.33333333334, ans=0.0 +2024-07-28 12:42:33,079 INFO [train.py:1114] (1/4) Epoch 12, batch 5900, loss[loss=0.2156, simple_loss=0.3052, pruned_loss=0.06301, over 4693.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.28, pruned_loss=0.05231, over 937961.34 frames. ], batch size: 15, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:42:42,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=157808.0, ans=0.0 +2024-07-28 12:42:44,914 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:42:47,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=157821.33333333334, ans=0.2 +2024-07-28 12:42:48,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=157821.33333333334, ans=0.125 +2024-07-28 12:42:51,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=157821.33333333334, ans=0.2 +2024-07-28 12:42:56,389 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 5.671e+01 6.327e+01 7.319e+01 1.125e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-28 12:42:58,095 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.90 vs. limit=22.5 +2024-07-28 12:43:07,205 INFO [train.py:1114] (1/4) Epoch 12, batch 5950, loss[loss=0.2068, simple_loss=0.3033, pruned_loss=0.05519, over 4666.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2804, pruned_loss=0.05216, over 939554.15 frames. ], batch size: 15, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:43:08,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=157861.33333333334, ans=0.125 +2024-07-28 12:43:17,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=157874.66666666666, ans=0.125 +2024-07-28 12:43:18,255 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.43 vs. limit=22.5 +2024-07-28 12:43:18,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=157874.66666666666, ans=0.0 +2024-07-28 12:43:18,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=157874.66666666666, ans=0.2 +2024-07-28 12:43:25,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=157888.0, ans=0.125 +2024-07-28 12:43:30,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=157901.33333333334, ans=0.025 +2024-07-28 12:43:31,018 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.58 vs. limit=15.0 +2024-07-28 12:43:31,661 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.43 vs. limit=15.0 +2024-07-28 12:43:40,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=157914.66666666666, ans=0.125 +2024-07-28 12:43:42,909 INFO [train.py:1114] (1/4) Epoch 12, batch 6000, loss[loss=0.2343, simple_loss=0.3209, pruned_loss=0.07384, over 4176.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2801, pruned_loss=0.05237, over 937166.30 frames. ], batch size: 25, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:43:42,910 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 12:43:50,657 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.5632, 3.4173, 5.2010, 2.6581], device='cuda:1') +2024-07-28 12:43:54,398 INFO [train.py:1146] (1/4) Epoch 12, validation: loss=0.1672, simple_loss=0.2713, pruned_loss=0.03161, over 944034.00 frames. +2024-07-28 12:43:54,398 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 12:44:17,590 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.752e+01 5.694e+01 6.318e+01 7.255e+01 1.160e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 12:44:28,518 INFO [train.py:1114] (1/4) Epoch 12, batch 6050, loss[loss=0.18, simple_loss=0.2542, pruned_loss=0.05287, over 4783.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2789, pruned_loss=0.05174, over 938580.71 frames. ], batch size: 12, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:44:30,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157994.66666666666, ans=0.1 +2024-07-28 12:44:35,756 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.94 vs. limit=15.0 +2024-07-28 12:44:56,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=158048.0, ans=0.0 +2024-07-28 12:44:59,635 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.50 vs. limit=15.0 +2024-07-28 12:45:01,895 INFO [train.py:1114] (1/4) Epoch 12, batch 6100, loss[loss=0.2155, simple_loss=0.3115, pruned_loss=0.05973, over 4672.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2786, pruned_loss=0.05174, over 938192.53 frames. ], batch size: 15, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:45:05,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158061.33333333334, ans=0.1 +2024-07-28 12:45:07,379 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.76 vs. limit=15.0 +2024-07-28 12:45:14,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=158088.0, ans=0.125 +2024-07-28 12:45:26,344 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.227e+01 5.720e+01 6.414e+01 7.144e+01 1.177e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 12:45:36,974 INFO [train.py:1114] (1/4) Epoch 12, batch 6150, loss[loss=0.2384, simple_loss=0.3145, pruned_loss=0.08116, over 3617.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2789, pruned_loss=0.05174, over 937175.20 frames. ], batch size: 35, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:45:41,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=158128.0, ans=0.0 +2024-07-28 12:45:55,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=158154.66666666666, ans=0.0 +2024-07-28 12:46:01,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=158168.0, ans=0.1 +2024-07-28 12:46:08,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=158181.33333333334, ans=0.0 +2024-07-28 12:46:11,261 INFO [train.py:1114] (1/4) Epoch 12, batch 6200, loss[loss=0.1468, simple_loss=0.2385, pruned_loss=0.02752, over 4736.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2779, pruned_loss=0.05134, over 936724.01 frames. ], batch size: 14, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:46:11,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=158194.66666666666, ans=0.0 +2024-07-28 12:46:13,725 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.74 vs. limit=22.5 +2024-07-28 12:46:20,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=158208.0, ans=0.0 +2024-07-28 12:46:24,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=158221.33333333334, ans=0.09899494936611666 +2024-07-28 12:46:24,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=158221.33333333334, ans=0.0 +2024-07-28 12:46:25,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=158221.33333333334, ans=0.125 +2024-07-28 12:46:26,615 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.13 vs. limit=15.0 +2024-07-28 12:46:33,454 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.99 vs. limit=15.0 +2024-07-28 12:46:34,428 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.522e+01 5.559e+01 6.111e+01 6.861e+01 1.032e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 12:46:35,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=158234.66666666666, ans=0.0 +2024-07-28 12:46:39,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=158248.0, ans=0.125 +2024-07-28 12:46:45,883 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:46:47,153 INFO [train.py:1114] (1/4) Epoch 12, batch 6250, loss[loss=0.1691, simple_loss=0.2593, pruned_loss=0.03946, over 4803.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2785, pruned_loss=0.05164, over 933300.62 frames. ], batch size: 14, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:47:00,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=158274.66666666666, ans=0.125 +2024-07-28 12:47:03,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=158288.0, ans=0.0 +2024-07-28 12:47:04,739 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:47:05,090 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.41 vs. limit=22.5 +2024-07-28 12:47:06,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=158288.0, ans=0.0 +2024-07-28 12:47:09,631 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.13 vs. limit=6.0 +2024-07-28 12:47:12,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=158301.33333333334, ans=0.125 +2024-07-28 12:47:22,585 INFO [train.py:1114] (1/4) Epoch 12, batch 6300, loss[loss=0.1554, simple_loss=0.2342, pruned_loss=0.03832, over 4542.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2789, pruned_loss=0.05192, over 930380.39 frames. ], batch size: 10, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:47:26,830 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.83 vs. limit=22.5 +2024-07-28 12:47:32,098 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-07-28 12:47:35,357 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=15.0 +2024-07-28 12:47:36,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=158354.66666666666, ans=0.125 +2024-07-28 12:47:39,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=158354.66666666666, ans=0.125 +2024-07-28 12:47:44,770 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.429e+01 5.769e+01 6.275e+01 7.297e+01 9.885e+01, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 12:47:55,498 INFO [train.py:1114] (1/4) Epoch 12, batch 6350, loss[loss=0.2063, simple_loss=0.2973, pruned_loss=0.05761, over 4507.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2785, pruned_loss=0.0513, over 934256.94 frames. ], batch size: 21, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:48:01,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=158394.66666666666, ans=0.125 +2024-07-28 12:48:06,859 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.99 vs. limit=22.5 +2024-07-28 12:48:12,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158421.33333333334, ans=0.1 +2024-07-28 12:48:13,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=158421.33333333334, ans=0.125 +2024-07-28 12:48:14,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=158421.33333333334, ans=0.125 +2024-07-28 12:48:15,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=158434.66666666666, ans=0.125 +2024-07-28 12:48:15,652 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:48:26,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=158448.0, ans=0.125 +2024-07-28 12:48:29,204 INFO [train.py:1114] (1/4) Epoch 12, batch 6400, loss[loss=0.2036, simple_loss=0.297, pruned_loss=0.05515, over 4640.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2787, pruned_loss=0.0514, over 935506.22 frames. ], batch size: 13, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:48:31,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=158461.33333333334, ans=0.07 +2024-07-28 12:48:31,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=158461.33333333334, ans=22.5 +2024-07-28 12:48:32,124 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-28 12:48:53,532 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.709e+01 5.709e+01 6.303e+01 7.389e+01 1.106e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 12:48:57,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=158514.66666666666, ans=0.0 +2024-07-28 12:48:59,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=158514.66666666666, ans=0.125 +2024-07-28 12:49:00,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=158514.66666666666, ans=0.125 +2024-07-28 12:49:03,792 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:49:04,231 INFO [train.py:1114] (1/4) Epoch 12, batch 6450, loss[loss=0.1771, simple_loss=0.2787, pruned_loss=0.0378, over 4516.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2796, pruned_loss=0.05144, over 938980.41 frames. ], batch size: 21, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:49:05,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=158528.0, ans=0.125 +2024-07-28 12:49:07,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=158528.0, ans=0.125 +2024-07-28 12:49:12,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=158541.33333333334, ans=0.0 +2024-07-28 12:49:17,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=158554.66666666666, ans=0.025 +2024-07-28 12:49:19,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=158554.66666666666, ans=0.125 +2024-07-28 12:49:25,138 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.43 vs. limit=12.0 +2024-07-28 12:49:26,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=158568.0, ans=0.125 +2024-07-28 12:49:32,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=158581.33333333334, ans=0.5 +2024-07-28 12:49:37,250 INFO [train.py:1114] (1/4) Epoch 12, batch 6500, loss[loss=0.2449, simple_loss=0.3124, pruned_loss=0.08869, over 3179.00 frames. ], tot_loss[loss=0.191, simple_loss=0.279, pruned_loss=0.05147, over 939937.27 frames. ], batch size: 35, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:49:38,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=158594.66666666666, ans=0.125 +2024-07-28 12:49:42,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=158594.66666666666, ans=0.05 +2024-07-28 12:49:46,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158608.0, ans=0.1 +2024-07-28 12:49:55,596 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.76 vs. limit=15.0 +2024-07-28 12:49:58,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=158634.66666666666, ans=0.125 +2024-07-28 12:49:59,307 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.679e+01 6.205e+01 7.346e+01 1.316e+02, threshold=1.241e+02, percent-clipped=1.0 +2024-07-28 12:50:01,086 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.46 vs. limit=6.0 +2024-07-28 12:50:08,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=158648.0, ans=0.2 +2024-07-28 12:50:10,237 INFO [train.py:1114] (1/4) Epoch 12, batch 6550, loss[loss=0.1711, simple_loss=0.2487, pruned_loss=0.04677, over 4796.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2764, pruned_loss=0.04992, over 942844.22 frames. ], batch size: 11, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:50:19,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=158674.66666666666, ans=0.125 +2024-07-28 12:50:33,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=158701.33333333334, ans=0.0 +2024-07-28 12:50:43,172 INFO [train.py:1114] (1/4) Epoch 12, batch 6600, loss[loss=0.2438, simple_loss=0.3168, pruned_loss=0.08541, over 4932.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2774, pruned_loss=0.05079, over 944698.60 frames. ], batch size: 14, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:50:43,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158728.0, ans=0.1 +2024-07-28 12:50:57,107 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.80 vs. limit=15.0 +2024-07-28 12:51:07,914 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.596e+01 5.716e+01 6.452e+01 7.100e+01 1.307e+02, threshold=1.290e+02, percent-clipped=2.0 +2024-07-28 12:51:22,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=158781.33333333334, ans=0.125 +2024-07-28 12:51:23,533 INFO [train.py:1114] (1/4) Epoch 12, batch 6650, loss[loss=0.2006, simple_loss=0.305, pruned_loss=0.04808, over 4617.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2773, pruned_loss=0.0507, over 943484.03 frames. ], batch size: 17, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:51:33,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=158794.66666666666, ans=0.125 +2024-07-28 12:51:36,685 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.93 vs. limit=22.5 +2024-07-28 12:51:40,964 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.67 vs. limit=12.0 +2024-07-28 12:51:49,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=158821.33333333334, ans=0.125 +2024-07-28 12:51:52,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=158834.66666666666, ans=0.125 +2024-07-28 12:51:57,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=158834.66666666666, ans=0.025 +2024-07-28 12:52:04,751 INFO [train.py:1114] (1/4) Epoch 12, batch 6700, loss[loss=0.2377, simple_loss=0.3104, pruned_loss=0.08251, over 4698.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2772, pruned_loss=0.05092, over 942248.77 frames. ], batch size: 19, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:52:06,469 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.30 vs. limit=15.0 +2024-07-28 12:52:31,271 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.714e+01 5.714e+01 6.445e+01 7.279e+01 1.274e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 12:52:37,980 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.16 vs. limit=22.5 +2024-07-28 12:52:42,415 INFO [train.py:1114] (1/4) Epoch 12, batch 6750, loss[loss=0.1992, simple_loss=0.2933, pruned_loss=0.05254, over 4303.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2769, pruned_loss=0.05061, over 940611.84 frames. ], batch size: 26, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:52:53,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158941.33333333334, ans=0.1 +2024-07-28 12:52:55,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=158954.66666666666, ans=0.2 +2024-07-28 12:53:07,055 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.46 vs. limit=22.5 +2024-07-28 12:53:13,101 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:53:16,209 INFO [train.py:1114] (1/4) Epoch 12, batch 6800, loss[loss=0.2113, simple_loss=0.3117, pruned_loss=0.05543, over 4632.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2786, pruned_loss=0.05154, over 938797.53 frames. ], batch size: 13, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:53:16,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=158994.66666666666, ans=0.0 +2024-07-28 12:53:20,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=158994.66666666666, ans=0.125 +2024-07-28 12:53:35,350 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:53:38,553 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.462e+01 5.632e+01 6.105e+01 6.995e+01 1.094e+02, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 12:53:41,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=159034.66666666666, ans=0.0 +2024-07-28 12:53:42,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=159048.0, ans=0.125 +2024-07-28 12:53:42,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=159048.0, ans=0.95 +2024-07-28 12:53:42,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=159048.0, ans=0.1 +2024-07-28 12:53:44,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=159048.0, ans=0.0 +2024-07-28 12:53:49,460 INFO [train.py:1114] (1/4) Epoch 12, batch 6850, loss[loss=0.1734, simple_loss=0.2794, pruned_loss=0.03371, over 4690.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2788, pruned_loss=0.05154, over 940480.72 frames. ], batch size: 13, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:53:52,541 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.66 vs. limit=22.5 +2024-07-28 12:53:56,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=159074.66666666666, ans=0.05 +2024-07-28 12:53:58,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=159074.66666666666, ans=0.2 +2024-07-28 12:53:59,067 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.21 vs. limit=15.0 +2024-07-28 12:54:13,796 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.04 vs. limit=22.5 +2024-07-28 12:54:24,190 INFO [train.py:1114] (1/4) Epoch 12, batch 6900, loss[loss=0.174, simple_loss=0.2599, pruned_loss=0.04408, over 4951.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2796, pruned_loss=0.05166, over 942896.32 frames. ], batch size: 13, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:54:30,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=159141.33333333334, ans=0.125 +2024-07-28 12:54:45,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=159168.0, ans=0.125 +2024-07-28 12:54:46,740 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.739e+01 5.528e+01 6.156e+01 7.028e+01 9.720e+01, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 12:54:57,579 INFO [train.py:1114] (1/4) Epoch 12, batch 6950, loss[loss=0.1484, simple_loss=0.235, pruned_loss=0.03094, over 4533.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2803, pruned_loss=0.05244, over 940290.29 frames. ], batch size: 10, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:55:10,228 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.81 vs. limit=22.5 +2024-07-28 12:55:13,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=159221.33333333334, ans=0.0 +2024-07-28 12:55:15,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=159221.33333333334, ans=0.125 +2024-07-28 12:55:22,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.01 vs. limit=6.0 +2024-07-28 12:55:24,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=159248.0, ans=0.0 +2024-07-28 12:55:30,914 INFO [train.py:1114] (1/4) Epoch 12, batch 7000, loss[loss=0.2122, simple_loss=0.2944, pruned_loss=0.06498, over 4668.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2797, pruned_loss=0.05212, over 938607.80 frames. ], batch size: 17, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:55:40,326 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.11 vs. limit=12.0 +2024-07-28 12:55:45,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=159288.0, ans=0.125 +2024-07-28 12:55:48,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=159288.0, ans=0.125 +2024-07-28 12:55:53,272 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.935e+01 5.641e+01 6.482e+01 7.445e+01 1.063e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 12:55:57,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=159301.33333333334, ans=0.0 +2024-07-28 12:56:02,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=159314.66666666666, ans=0.125 +2024-07-28 12:56:07,636 INFO [train.py:1114] (1/4) Epoch 12, batch 7050, loss[loss=0.2308, simple_loss=0.3252, pruned_loss=0.06818, over 4707.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2792, pruned_loss=0.05116, over 941912.07 frames. ], batch size: 19, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:56:09,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=159328.0, ans=0.125 +2024-07-28 12:56:13,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=159328.0, ans=0.0 +2024-07-28 12:56:19,538 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.35 vs. limit=22.5 +2024-07-28 12:56:32,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=159368.0, ans=0.025 +2024-07-28 12:56:33,994 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:56:37,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=159381.33333333334, ans=0.125 +2024-07-28 12:56:42,912 INFO [train.py:1114] (1/4) Epoch 12, batch 7100, loss[loss=0.2018, simple_loss=0.2972, pruned_loss=0.0532, over 4798.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2806, pruned_loss=0.05237, over 936264.20 frames. ], batch size: 15, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:56:58,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=159421.33333333334, ans=0.07 +2024-07-28 12:57:00,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=159421.33333333334, ans=0.125 +2024-07-28 12:57:04,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=159434.66666666666, ans=0.125 +2024-07-28 12:57:06,526 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.638e+01 6.257e+01 7.591e+01 1.588e+02, threshold=1.251e+02, percent-clipped=2.0 +2024-07-28 12:57:08,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=159434.66666666666, ans=0.2 +2024-07-28 12:57:13,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=159448.0, ans=0.125 +2024-07-28 12:57:16,936 INFO [train.py:1114] (1/4) Epoch 12, batch 7150, loss[loss=0.2042, simple_loss=0.2967, pruned_loss=0.05587, over 4571.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.279, pruned_loss=0.0521, over 937421.67 frames. ], batch size: 21, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:57:20,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=159461.33333333334, ans=0.2 +2024-07-28 12:57:31,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=159488.0, ans=0.0 +2024-07-28 12:57:32,690 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.32 vs. limit=15.0 +2024-07-28 12:57:33,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=159488.0, ans=0.0 +2024-07-28 12:57:41,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159501.33333333334, ans=0.1 +2024-07-28 12:57:49,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=159514.66666666666, ans=0.125 +2024-07-28 12:57:49,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=159514.66666666666, ans=0.0 +2024-07-28 12:57:51,107 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.45 vs. limit=15.0 +2024-07-28 12:57:51,490 INFO [train.py:1114] (1/4) Epoch 12, batch 7200, loss[loss=0.199, simple_loss=0.2931, pruned_loss=0.05243, over 4808.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2805, pruned_loss=0.05262, over 937887.54 frames. ], batch size: 15, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:58:18,798 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.659e+01 5.813e+01 6.361e+01 7.395e+01 9.715e+01, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 12:58:21,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=159568.0, ans=0.0 +2024-07-28 12:58:22,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=159581.33333333334, ans=0.125 +2024-07-28 12:58:24,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=159581.33333333334, ans=0.125 +2024-07-28 12:58:28,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=159581.33333333334, ans=0.0 +2024-07-28 12:58:29,556 INFO [train.py:1114] (1/4) Epoch 12, batch 7250, loss[loss=0.152, simple_loss=0.2406, pruned_loss=0.03173, over 4857.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2795, pruned_loss=0.05229, over 939699.57 frames. ], batch size: 12, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:58:35,822 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.39 vs. limit=10.0 +2024-07-28 12:58:36,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=159608.0, ans=0.125 +2024-07-28 12:58:38,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159608.0, ans=0.1 +2024-07-28 12:58:43,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=159621.33333333334, ans=0.2 +2024-07-28 12:58:54,802 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.87 vs. limit=15.0 +2024-07-28 12:59:02,435 INFO [train.py:1114] (1/4) Epoch 12, batch 7300, loss[loss=0.1448, simple_loss=0.2362, pruned_loss=0.02669, over 4849.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2787, pruned_loss=0.05155, over 940719.56 frames. ], batch size: 12, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:59:05,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=159661.33333333334, ans=0.0 +2024-07-28 12:59:08,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=159674.66666666666, ans=0.125 +2024-07-28 12:59:12,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=159674.66666666666, ans=0.2 +2024-07-28 12:59:17,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=159688.0, ans=0.0 +2024-07-28 12:59:19,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=159688.0, ans=0.125 +2024-07-28 12:59:27,309 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.673e+01 5.393e+01 5.789e+01 6.409e+01 1.096e+02, threshold=1.158e+02, percent-clipped=0.0 +2024-07-28 12:59:28,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=159701.33333333334, ans=0.2 +2024-07-28 12:59:35,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=159714.66666666666, ans=0.2 +2024-07-28 12:59:37,710 INFO [train.py:1114] (1/4) Epoch 12, batch 7350, loss[loss=0.1857, simple_loss=0.2702, pruned_loss=0.05056, over 4647.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2792, pruned_loss=0.05156, over 939754.37 frames. ], batch size: 12, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 12:59:37,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159728.0, ans=0.1 +2024-07-28 12:59:53,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=159741.33333333334, ans=0.125 +2024-07-28 12:59:59,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159754.66666666666, ans=0.1 +2024-07-28 13:00:06,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=159768.0, ans=0.125 +2024-07-28 13:00:17,346 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.42 vs. limit=6.0 +2024-07-28 13:00:17,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=159781.33333333334, ans=0.0 +2024-07-28 13:00:18,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=159781.33333333334, ans=0.05 +2024-07-28 13:00:20,400 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.91 vs. limit=22.5 +2024-07-28 13:00:23,178 INFO [train.py:1114] (1/4) Epoch 12, batch 7400, loss[loss=0.2226, simple_loss=0.3206, pruned_loss=0.06231, over 4694.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2807, pruned_loss=0.05244, over 940545.37 frames. ], batch size: 13, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:00:50,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=159821.33333333334, ans=0.125 +2024-07-28 13:00:52,322 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.21 vs. limit=6.0 +2024-07-28 13:00:52,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=159834.66666666666, ans=0.025 +2024-07-28 13:00:55,281 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.574e+01 5.910e+01 6.477e+01 7.704e+01 1.281e+02, threshold=1.295e+02, percent-clipped=1.0 +2024-07-28 13:01:05,627 INFO [train.py:1114] (1/4) Epoch 12, batch 7450, loss[loss=0.197, simple_loss=0.2643, pruned_loss=0.06481, over 4628.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2796, pruned_loss=0.05241, over 937747.86 frames. ], batch size: 11, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:01:10,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=159861.33333333334, ans=0.0 +2024-07-28 13:01:22,777 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.67 vs. limit=12.0 +2024-07-28 13:01:26,094 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.98 vs. limit=15.0 +2024-07-28 13:01:27,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=159901.33333333334, ans=0.2 +2024-07-28 13:01:32,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=159914.66666666666, ans=0.125 +2024-07-28 13:01:38,266 INFO [train.py:1114] (1/4) Epoch 12, batch 7500, loss[loss=0.1999, simple_loss=0.2857, pruned_loss=0.05703, over 3440.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2794, pruned_loss=0.0525, over 936192.24 frames. ], batch size: 35, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:02:08,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=159941.33333333334, ans=0.0 +2024-07-28 13:02:11,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=159941.33333333334, ans=0.125 +2024-07-28 13:05:16,501 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.752e+01 6.223e+01 6.904e+01 1.181e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 13:05:23,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=159981.33333333334, ans=0.125 +2024-07-28 13:05:25,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=159981.33333333334, ans=0.125 +2024-07-28 13:05:26,948 INFO [train.py:1114] (1/4) Epoch 12, batch 7550, loss[loss=0.1974, simple_loss=0.2916, pruned_loss=0.0516, over 4625.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2807, pruned_loss=0.05265, over 936089.39 frames. ], batch size: 17, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:05:27,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=159994.66666666666, ans=0.1 +2024-07-28 13:05:34,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=159994.66666666666, ans=0.2 +2024-07-28 13:05:36,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159994.66666666666, ans=0.1 +2024-07-28 13:05:42,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=160008.0, ans=0.0 +2024-07-28 13:05:45,322 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.85 vs. limit=22.5 +2024-07-28 13:05:50,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=160034.66666666666, ans=0.125 +2024-07-28 13:05:52,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=160034.66666666666, ans=0.0 +2024-07-28 13:05:59,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=160048.0, ans=0.125 +2024-07-28 13:06:04,265 INFO [train.py:1114] (1/4) Epoch 12, batch 7600, loss[loss=0.1807, simple_loss=0.2749, pruned_loss=0.04324, over 4808.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2794, pruned_loss=0.05192, over 938166.65 frames. ], batch size: 14, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:06:05,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160061.33333333334, ans=0.1 +2024-07-28 13:06:06,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=160061.33333333334, ans=0.0 +2024-07-28 13:06:07,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=160061.33333333334, ans=0.0 +2024-07-28 13:06:13,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160074.66666666666, ans=0.1 +2024-07-28 13:06:18,432 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.67 vs. limit=15.0 +2024-07-28 13:06:19,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=160088.0, ans=0.0 +2024-07-28 13:06:26,387 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.534e+01 5.580e+01 6.028e+01 7.060e+01 1.012e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 13:06:28,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=160101.33333333334, ans=0.125 +2024-07-28 13:06:29,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=160101.33333333334, ans=0.0 +2024-07-28 13:06:34,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160114.66666666666, ans=0.1 +2024-07-28 13:06:35,190 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.92 vs. limit=22.5 +2024-07-28 13:06:36,730 INFO [train.py:1114] (1/4) Epoch 12, batch 7650, loss[loss=0.1739, simple_loss=0.2613, pruned_loss=0.04322, over 4940.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2793, pruned_loss=0.05199, over 936938.40 frames. ], batch size: 12, lr: 6.19e-03, grad_scale: 64.0 +2024-07-28 13:06:40,465 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.08 vs. limit=15.0 +2024-07-28 13:07:03,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=160181.33333333334, ans=0.125 +2024-07-28 13:07:04,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=160181.33333333334, ans=0.125 +2024-07-28 13:07:06,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=160181.33333333334, ans=10.0 +2024-07-28 13:07:10,992 INFO [train.py:1114] (1/4) Epoch 12, batch 7700, loss[loss=0.1939, simple_loss=0.2862, pruned_loss=0.05082, over 4696.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.28, pruned_loss=0.05161, over 934181.97 frames. ], batch size: 13, lr: 6.18e-03, grad_scale: 64.0 +2024-07-28 13:07:13,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=160194.66666666666, ans=0.025 +2024-07-28 13:07:14,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=160194.66666666666, ans=0.125 +2024-07-28 13:07:23,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=160221.33333333334, ans=0.0 +2024-07-28 13:07:24,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=160221.33333333334, ans=0.125 +2024-07-28 13:07:25,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=160221.33333333334, ans=0.1 +2024-07-28 13:07:32,958 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.508e+01 5.585e+01 6.116e+01 6.946e+01 9.555e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 13:07:37,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=160248.0, ans=0.0 +2024-07-28 13:07:40,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=160248.0, ans=0.125 +2024-07-28 13:07:43,366 INFO [train.py:1114] (1/4) Epoch 12, batch 7750, loss[loss=0.1879, simple_loss=0.2891, pruned_loss=0.04336, over 4936.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2804, pruned_loss=0.05167, over 935153.67 frames. ], batch size: 14, lr: 6.18e-03, grad_scale: 64.0 +2024-07-28 13:08:04,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=160301.33333333334, ans=0.0 +2024-07-28 13:08:10,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=160314.66666666666, ans=0.0 +2024-07-28 13:08:13,161 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.89 vs. limit=15.0 +2024-07-28 13:08:16,040 INFO [train.py:1114] (1/4) Epoch 12, batch 7800, loss[loss=0.2134, simple_loss=0.3116, pruned_loss=0.05755, over 4655.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2806, pruned_loss=0.05135, over 936898.97 frames. ], batch size: 14, lr: 6.18e-03, grad_scale: 64.0 +2024-07-28 13:08:20,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=160328.0, ans=0.0 +2024-07-28 13:08:23,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=160341.33333333334, ans=0.0 +2024-07-28 13:08:24,380 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.68 vs. limit=6.0 +2024-07-28 13:08:38,405 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.749e+01 5.545e+01 6.012e+01 6.981e+01 9.442e+01, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 13:08:48,344 INFO [train.py:1114] (1/4) Epoch 12, batch 7850, loss[loss=0.1663, simple_loss=0.2501, pruned_loss=0.04125, over 4526.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2808, pruned_loss=0.05195, over 936235.98 frames. ], batch size: 10, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:08:55,905 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.37 vs. limit=15.0 +2024-07-28 13:09:10,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=160434.66666666666, ans=0.0 +2024-07-28 13:09:14,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=160448.0, ans=0.125 +2024-07-28 13:09:16,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=160448.0, ans=0.1 +2024-07-28 13:09:16,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=160448.0, ans=0.04949747468305833 +2024-07-28 13:09:21,154 INFO [train.py:1114] (1/4) Epoch 12, batch 7900, loss[loss=0.1945, simple_loss=0.2916, pruned_loss=0.04873, over 4873.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2811, pruned_loss=0.05172, over 933242.51 frames. ], batch size: 14, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:09:33,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=160488.0, ans=0.05 +2024-07-28 13:09:34,282 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.27 vs. limit=22.5 +2024-07-28 13:09:34,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.80 vs. limit=22.5 +2024-07-28 13:09:40,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=160501.33333333334, ans=0.2 +2024-07-28 13:09:43,188 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.620e+01 5.515e+01 6.026e+01 6.730e+01 9.606e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 13:09:50,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=160514.66666666666, ans=0.5 +2024-07-28 13:09:51,068 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.74 vs. limit=15.0 +2024-07-28 13:09:53,231 INFO [train.py:1114] (1/4) Epoch 12, batch 7950, loss[loss=0.2474, simple_loss=0.3091, pruned_loss=0.09284, over 3459.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2806, pruned_loss=0.05168, over 935739.99 frames. ], batch size: 35, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:10:04,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=160541.33333333334, ans=0.125 +2024-07-28 13:10:06,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=160554.66666666666, ans=0.125 +2024-07-28 13:10:06,840 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.24 vs. limit=15.0 +2024-07-28 13:10:21,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=160581.33333333334, ans=0.05 +2024-07-28 13:10:25,951 INFO [train.py:1114] (1/4) Epoch 12, batch 8000, loss[loss=0.1588, simple_loss=0.2382, pruned_loss=0.03972, over 4622.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2797, pruned_loss=0.05157, over 934475.56 frames. ], batch size: 11, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:10:26,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=160594.66666666666, ans=0.0 +2024-07-28 13:10:29,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160594.66666666666, ans=0.1 +2024-07-28 13:10:34,738 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.07 vs. limit=15.0 +2024-07-28 13:10:41,083 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.16 vs. limit=12.0 +2024-07-28 13:10:48,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.55 vs. limit=15.0 +2024-07-28 13:10:50,017 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.821e+01 5.959e+01 6.918e+01 8.297e+01 1.204e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-28 13:10:52,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=160648.0, ans=0.1 +2024-07-28 13:10:53,187 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.01 vs. limit=10.0 +2024-07-28 13:10:54,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=160648.0, ans=0.125 +2024-07-28 13:11:00,326 INFO [train.py:1114] (1/4) Epoch 12, batch 8050, loss[loss=0.1758, simple_loss=0.2739, pruned_loss=0.03881, over 4812.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2796, pruned_loss=0.05145, over 934296.05 frames. ], batch size: 14, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:11:11,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=160674.66666666666, ans=0.125 +2024-07-28 13:11:13,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=160688.0, ans=0.125 +2024-07-28 13:11:18,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=160688.0, ans=0.0 +2024-07-28 13:11:20,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.14 vs. limit=15.0 +2024-07-28 13:11:22,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=160701.33333333334, ans=0.0 +2024-07-28 13:11:29,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=160714.66666666666, ans=0.0 +2024-07-28 13:11:32,801 INFO [train.py:1114] (1/4) Epoch 12, batch 8100, loss[loss=0.1855, simple_loss=0.275, pruned_loss=0.04796, over 4812.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2799, pruned_loss=0.05162, over 934036.45 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:11:33,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=160728.0, ans=0.125 +2024-07-28 13:11:55,383 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 5.710e+01 6.581e+01 7.221e+01 1.063e+02, threshold=1.316e+02, percent-clipped=0.0 +2024-07-28 13:11:58,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=160781.33333333334, ans=0.1 +2024-07-28 13:12:05,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=160794.66666666666, ans=0.025 +2024-07-28 13:12:06,448 INFO [train.py:1114] (1/4) Epoch 12, batch 8150, loss[loss=0.205, simple_loss=0.2923, pruned_loss=0.05887, over 4797.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2786, pruned_loss=0.05118, over 937549.97 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:12:18,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=160821.33333333334, ans=0.2 +2024-07-28 13:12:26,774 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.66 vs. limit=22.5 +2024-07-28 13:12:27,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.65 vs. limit=15.0 +2024-07-28 13:12:30,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=160834.66666666666, ans=0.125 +2024-07-28 13:12:39,543 INFO [train.py:1114] (1/4) Epoch 12, batch 8200, loss[loss=0.1771, simple_loss=0.2837, pruned_loss=0.03526, over 4794.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2786, pruned_loss=0.05112, over 938281.48 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:12:40,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=160861.33333333334, ans=0.025 +2024-07-28 13:12:47,548 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.03 vs. limit=10.0 +2024-07-28 13:12:56,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=160888.0, ans=10.0 +2024-07-28 13:12:57,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=160888.0, ans=0.125 +2024-07-28 13:13:03,299 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.924e+01 5.545e+01 6.398e+01 7.080e+01 1.151e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 13:13:05,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=160901.33333333334, ans=0.125 +2024-07-28 13:13:12,840 INFO [train.py:1114] (1/4) Epoch 12, batch 8250, loss[loss=0.1519, simple_loss=0.2608, pruned_loss=0.02151, over 4891.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2787, pruned_loss=0.05115, over 938691.28 frames. ], batch size: 13, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:13:23,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.35 vs. limit=22.5 +2024-07-28 13:13:24,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=160941.33333333334, ans=0.025 +2024-07-28 13:13:29,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160954.66666666666, ans=0.1 +2024-07-28 13:13:34,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=160968.0, ans=0.1 +2024-07-28 13:13:40,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=160981.33333333334, ans=0.2 +2024-07-28 13:13:44,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=160981.33333333334, ans=0.1 +2024-07-28 13:13:45,317 INFO [train.py:1114] (1/4) Epoch 12, batch 8300, loss[loss=0.1976, simple_loss=0.2867, pruned_loss=0.05421, over 4896.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.28, pruned_loss=0.05193, over 938453.90 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:13:46,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=160994.66666666666, ans=0.0 +2024-07-28 13:13:47,045 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.01 vs. limit=15.0 +2024-07-28 13:13:49,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=160994.66666666666, ans=0.0 +2024-07-28 13:13:53,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161008.0, ans=0.1 +2024-07-28 13:13:53,371 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.40 vs. limit=15.0 +2024-07-28 13:14:04,766 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:14:07,774 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+01 5.767e+01 6.201e+01 7.053e+01 1.187e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 13:14:17,543 INFO [train.py:1114] (1/4) Epoch 12, batch 8350, loss[loss=0.1981, simple_loss=0.2882, pruned_loss=0.05403, over 4812.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2799, pruned_loss=0.05178, over 941244.43 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:14:31,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=161088.0, ans=0.2 +2024-07-28 13:14:35,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=161088.0, ans=0.0 +2024-07-28 13:14:35,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=161088.0, ans=0.125 +2024-07-28 13:14:41,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=161101.33333333334, ans=0.2 +2024-07-28 13:14:41,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161101.33333333334, ans=0.1 +2024-07-28 13:14:46,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161114.66666666666, ans=0.1 +2024-07-28 13:14:50,099 INFO [train.py:1114] (1/4) Epoch 12, batch 8400, loss[loss=0.1905, simple_loss=0.2834, pruned_loss=0.04878, over 4784.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.28, pruned_loss=0.05193, over 939470.49 frames. ], batch size: 12, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:14:50,532 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.01 vs. limit=15.0 +2024-07-28 13:14:52,824 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.88 vs. limit=15.0 +2024-07-28 13:15:06,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=161154.66666666666, ans=0.0 +2024-07-28 13:15:07,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161154.66666666666, ans=0.1 +2024-07-28 13:15:13,421 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.804e+01 5.899e+01 6.443e+01 7.292e+01 9.298e+01, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 13:15:20,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=161181.33333333334, ans=0.125 +2024-07-28 13:15:23,162 INFO [train.py:1114] (1/4) Epoch 12, batch 8450, loss[loss=0.2337, simple_loss=0.3267, pruned_loss=0.0703, over 4803.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2807, pruned_loss=0.05204, over 938684.78 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:15:23,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=161194.66666666666, ans=0.125 +2024-07-28 13:15:28,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=161208.0, ans=0.0 +2024-07-28 13:15:29,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=161208.0, ans=0.07 +2024-07-28 13:15:33,224 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.41 vs. limit=6.0 +2024-07-28 13:15:33,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=161208.0, ans=0.125 +2024-07-28 13:15:47,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.whiten.whitening_limit, batch_count=161234.66666666666, ans=12.0 +2024-07-28 13:15:49,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=161248.0, ans=0.125 +2024-07-28 13:15:53,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=161248.0, ans=0.125 +2024-07-28 13:15:55,155 INFO [train.py:1114] (1/4) Epoch 12, batch 8500, loss[loss=0.1769, simple_loss=0.2652, pruned_loss=0.04431, over 4619.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2801, pruned_loss=0.05154, over 938537.93 frames. ], batch size: 11, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:16:01,158 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.70 vs. limit=22.5 +2024-07-28 13:16:17,529 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.709e+01 5.769e+01 6.331e+01 7.345e+01 1.019e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 13:16:18,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=161301.33333333334, ans=0.0 +2024-07-28 13:16:27,213 INFO [train.py:1114] (1/4) Epoch 12, batch 8550, loss[loss=0.1657, simple_loss=0.2475, pruned_loss=0.04195, over 4805.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2801, pruned_loss=0.05156, over 939702.18 frames. ], batch size: 11, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:16:36,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=161341.33333333334, ans=0.0 +2024-07-28 13:17:00,035 INFO [train.py:1114] (1/4) Epoch 12, batch 8600, loss[loss=0.2067, simple_loss=0.2907, pruned_loss=0.06134, over 4800.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2797, pruned_loss=0.05144, over 939584.81 frames. ], batch size: 15, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:17:09,072 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.81 vs. limit=12.0 +2024-07-28 13:17:14,198 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.47 vs. limit=12.0 +2024-07-28 13:17:23,226 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.568e+01 5.734e+01 6.405e+01 7.244e+01 9.929e+01, threshold=1.281e+02, percent-clipped=0.0 +2024-07-28 13:17:31,715 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.59 vs. limit=15.0 +2024-07-28 13:17:32,664 INFO [train.py:1114] (1/4) Epoch 12, batch 8650, loss[loss=0.2384, simple_loss=0.3202, pruned_loss=0.07832, over 4908.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2793, pruned_loss=0.05113, over 940652.30 frames. ], batch size: 15, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:17:39,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=161461.33333333334, ans=0.2 +2024-07-28 13:17:43,802 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.12 vs. limit=22.5 +2024-07-28 13:17:59,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=161501.33333333334, ans=0.0 +2024-07-28 13:18:03,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=161501.33333333334, ans=0.0 +2024-07-28 13:18:11,211 INFO [train.py:1114] (1/4) Epoch 12, batch 8700, loss[loss=0.216, simple_loss=0.2909, pruned_loss=0.07056, over 4760.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2808, pruned_loss=0.052, over 938142.50 frames. ], batch size: 13, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:18:14,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=161528.0, ans=0.125 +2024-07-28 13:18:16,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=161528.0, ans=0.125 +2024-07-28 13:18:17,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=161541.33333333334, ans=0.125 +2024-07-28 13:18:26,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=161554.66666666666, ans=0.95 +2024-07-28 13:18:34,019 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.629e+01 5.645e+01 6.105e+01 7.078e+01 1.033e+02, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 13:18:34,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161568.0, ans=0.1 +2024-07-28 13:18:36,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=161568.0, ans=0.125 +2024-07-28 13:18:41,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=161581.33333333334, ans=0.0 +2024-07-28 13:18:43,670 INFO [train.py:1114] (1/4) Epoch 12, batch 8750, loss[loss=0.2016, simple_loss=0.2822, pruned_loss=0.06048, over 4693.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2808, pruned_loss=0.05213, over 936852.62 frames. ], batch size: 15, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:18:43,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=161594.66666666666, ans=0.025 +2024-07-28 13:18:47,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=161594.66666666666, ans=0.0 +2024-07-28 13:18:52,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=161608.0, ans=0.125 +2024-07-28 13:18:53,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=161608.0, ans=0.1 +2024-07-28 13:18:59,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=161621.33333333334, ans=0.0 +2024-07-28 13:19:13,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=161648.0, ans=0.125 +2024-07-28 13:19:16,201 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.29 vs. limit=15.0 +2024-07-28 13:19:16,559 INFO [train.py:1114] (1/4) Epoch 12, batch 8800, loss[loss=0.1978, simple_loss=0.2951, pruned_loss=0.05024, over 4926.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2818, pruned_loss=0.05241, over 937487.31 frames. ], batch size: 14, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:19:40,338 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.669e+01 5.675e+01 6.216e+01 7.145e+01 9.386e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 13:19:48,368 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.30 vs. limit=15.0 +2024-07-28 13:19:50,055 INFO [train.py:1114] (1/4) Epoch 12, batch 8850, loss[loss=0.2029, simple_loss=0.2894, pruned_loss=0.05815, over 4348.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2815, pruned_loss=0.05245, over 932670.62 frames. ], batch size: 21, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:19:56,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=161741.33333333334, ans=0.0 +2024-07-28 13:19:58,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=161741.33333333334, ans=0.0 +2024-07-28 13:20:03,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=161754.66666666666, ans=0.025 +2024-07-28 13:20:04,899 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.25 vs. limit=22.5 +2024-07-28 13:20:05,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=161754.66666666666, ans=0.125 +2024-07-28 13:20:17,125 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.38 vs. limit=6.0 +2024-07-28 13:20:23,221 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.66 vs. limit=6.0 +2024-07-28 13:20:23,563 INFO [train.py:1114] (1/4) Epoch 12, batch 8900, loss[loss=0.1995, simple_loss=0.2861, pruned_loss=0.05647, over 4940.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2818, pruned_loss=0.05249, over 930622.09 frames. ], batch size: 12, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:20:23,997 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.27 vs. limit=15.0 +2024-07-28 13:20:28,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=161794.66666666666, ans=0.125 +2024-07-28 13:20:30,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=161808.0, ans=0.0 +2024-07-28 13:20:30,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=161808.0, ans=0.125 +2024-07-28 13:20:38,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=161821.33333333334, ans=0.07 +2024-07-28 13:20:45,938 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.688e+01 5.749e+01 6.497e+01 7.319e+01 1.057e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-28 13:20:47,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=161834.66666666666, ans=0.035 +2024-07-28 13:20:55,341 INFO [train.py:1114] (1/4) Epoch 12, batch 8950, loss[loss=0.2171, simple_loss=0.3084, pruned_loss=0.06291, over 4537.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2804, pruned_loss=0.05187, over 931539.45 frames. ], batch size: 21, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:21:26,981 INFO [train.py:1114] (1/4) Epoch 12, batch 9000, loss[loss=0.181, simple_loss=0.2646, pruned_loss=0.04871, over 4642.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2776, pruned_loss=0.05105, over 933833.84 frames. ], batch size: 12, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:21:26,981 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 13:21:39,258 INFO [train.py:1146] (1/4) Epoch 12, validation: loss=0.1673, simple_loss=0.2713, pruned_loss=0.03166, over 944034.00 frames. +2024-07-28 13:21:39,259 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 13:21:50,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=161941.33333333334, ans=0.125 +2024-07-28 13:21:56,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=161954.66666666666, ans=0.125 +2024-07-28 13:21:56,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=161954.66666666666, ans=0.125 +2024-07-28 13:21:57,184 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.37 vs. limit=10.0 +2024-07-28 13:21:57,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=161954.66666666666, ans=0.125 +2024-07-28 13:22:02,098 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.972e+01 5.644e+01 6.027e+01 6.782e+01 9.850e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 13:22:10,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=161981.33333333334, ans=0.025 +2024-07-28 13:22:11,778 INFO [train.py:1114] (1/4) Epoch 12, batch 9050, loss[loss=0.1603, simple_loss=0.2523, pruned_loss=0.03421, over 4569.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2777, pruned_loss=0.05129, over 934478.57 frames. ], batch size: 10, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:22:12,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.27 vs. limit=22.5 +2024-07-28 13:22:15,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=161994.66666666666, ans=0.5 +2024-07-28 13:22:18,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=162008.0, ans=0.125 +2024-07-28 13:22:18,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=162008.0, ans=0.125 +2024-07-28 13:22:21,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=162008.0, ans=0.125 +2024-07-28 13:22:37,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=162048.0, ans=0.125 +2024-07-28 13:22:43,278 INFO [train.py:1114] (1/4) Epoch 12, batch 9100, loss[loss=0.1732, simple_loss=0.2691, pruned_loss=0.0386, over 4933.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2765, pruned_loss=0.0505, over 937047.99 frames. ], batch size: 14, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:22:53,616 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.26 vs. limit=15.0 +2024-07-28 13:23:06,062 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.556e+01 5.681e+01 6.344e+01 7.391e+01 1.004e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 13:23:06,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=162101.33333333334, ans=0.125 +2024-07-28 13:23:10,790 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.61 vs. limit=15.0 +2024-07-28 13:23:15,553 INFO [train.py:1114] (1/4) Epoch 12, batch 9150, loss[loss=0.1821, simple_loss=0.2808, pruned_loss=0.04171, over 4811.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2779, pruned_loss=0.05096, over 936099.74 frames. ], batch size: 14, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:23:19,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162128.0, ans=0.1 +2024-07-28 13:23:24,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=162141.33333333334, ans=0.125 +2024-07-28 13:23:31,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=162154.66666666666, ans=0.025 +2024-07-28 13:23:31,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=162154.66666666666, ans=0.0 +2024-07-28 13:23:41,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=162181.33333333334, ans=0.125 +2024-07-28 13:23:44,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=162181.33333333334, ans=0.125 +2024-07-28 13:23:47,215 INFO [train.py:1114] (1/4) Epoch 12, batch 9200, loss[loss=0.1551, simple_loss=0.2411, pruned_loss=0.03461, over 4850.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.277, pruned_loss=0.05093, over 937488.94 frames. ], batch size: 12, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:23:48,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=162194.66666666666, ans=0.0 +2024-07-28 13:23:54,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=162208.0, ans=0.0 +2024-07-28 13:24:02,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=162221.33333333334, ans=0.125 +2024-07-28 13:24:07,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=162234.66666666666, ans=0.125 +2024-07-28 13:24:09,764 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.766e+01 5.530e+01 6.301e+01 7.507e+01 1.119e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 13:24:19,317 INFO [train.py:1114] (1/4) Epoch 12, batch 9250, loss[loss=0.2031, simple_loss=0.29, pruned_loss=0.05811, over 4628.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2773, pruned_loss=0.05048, over 938334.31 frames. ], batch size: 13, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:24:28,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=162274.66666666666, ans=0.0 +2024-07-28 13:24:35,271 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=2.98 vs. limit=12.0 +2024-07-28 13:24:51,029 INFO [train.py:1114] (1/4) Epoch 12, batch 9300, loss[loss=0.1781, simple_loss=0.2628, pruned_loss=0.04669, over 4782.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2788, pruned_loss=0.05143, over 938129.47 frames. ], batch size: 12, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:24:53,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=162328.0, ans=0.0 +2024-07-28 13:24:54,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=162328.0, ans=0.2 +2024-07-28 13:25:05,785 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.34 vs. limit=12.0 +2024-07-28 13:25:10,485 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:25:11,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=162368.0, ans=0.09899494936611666 +2024-07-28 13:25:13,004 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.254e+01 5.656e+01 6.395e+01 7.099e+01 1.199e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 13:25:19,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=162381.33333333334, ans=0.125 +2024-07-28 13:25:20,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=162381.33333333334, ans=0.125 +2024-07-28 13:25:22,435 INFO [train.py:1114] (1/4) Epoch 12, batch 9350, loss[loss=0.153, simple_loss=0.2384, pruned_loss=0.03377, over 4821.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2784, pruned_loss=0.05139, over 935261.34 frames. ], batch size: 11, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:25:23,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=162394.66666666666, ans=0.125 +2024-07-28 13:25:31,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=162408.0, ans=0.125 +2024-07-28 13:25:31,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=162408.0, ans=0.125 +2024-07-28 13:25:31,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=162408.0, ans=0.0 +2024-07-28 13:25:38,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162421.33333333334, ans=0.1 +2024-07-28 13:25:41,502 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.22 vs. limit=22.5 +2024-07-28 13:25:45,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=162434.66666666666, ans=0.125 +2024-07-28 13:25:53,647 INFO [train.py:1114] (1/4) Epoch 12, batch 9400, loss[loss=0.1849, simple_loss=0.2834, pruned_loss=0.04323, over 4688.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.278, pruned_loss=0.05093, over 933310.51 frames. ], batch size: 13, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:25:57,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162461.33333333334, ans=0.1 +2024-07-28 13:25:57,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=162461.33333333334, ans=0.125 +2024-07-28 13:25:57,669 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.36 vs. limit=22.5 +2024-07-28 13:26:15,655 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.679e+01 5.567e+01 6.093e+01 7.292e+01 1.222e+02, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 13:26:17,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=162501.33333333334, ans=0.125 +2024-07-28 13:26:18,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=162514.66666666666, ans=0.0 +2024-07-28 13:26:25,586 INFO [train.py:1114] (1/4) Epoch 12, batch 9450, loss[loss=0.1745, simple_loss=0.2525, pruned_loss=0.04823, over 4783.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2777, pruned_loss=0.05061, over 932445.02 frames. ], batch size: 11, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:26:29,569 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.07 vs. limit=12.0 +2024-07-28 13:26:40,884 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.58 vs. limit=10.0 +2024-07-28 13:26:43,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=162568.0, ans=0.125 +2024-07-28 13:26:56,291 INFO [train.py:1114] (1/4) Epoch 12, batch 9500, loss[loss=0.1578, simple_loss=0.2521, pruned_loss=0.03178, over 4711.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2784, pruned_loss=0.05091, over 934871.75 frames. ], batch size: 12, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:26:58,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=162594.66666666666, ans=0.07 +2024-07-28 13:27:09,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162621.33333333334, ans=0.1 +2024-07-28 13:27:17,796 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.732e+01 5.541e+01 6.151e+01 7.043e+01 9.368e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 13:27:22,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=162648.0, ans=0.025 +2024-07-28 13:27:25,617 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.15 vs. limit=22.5 +2024-07-28 13:27:27,265 INFO [train.py:1114] (1/4) Epoch 12, batch 9550, loss[loss=0.2135, simple_loss=0.2911, pruned_loss=0.06794, over 4763.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2788, pruned_loss=0.05136, over 931893.08 frames. ], batch size: 12, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:27:47,117 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-28 13:27:56,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=162714.66666666666, ans=0.125 +2024-07-28 13:27:57,933 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.85 vs. limit=15.0 +2024-07-28 13:27:59,581 INFO [train.py:1114] (1/4) Epoch 12, batch 9600, loss[loss=0.2435, simple_loss=0.3286, pruned_loss=0.07919, over 3342.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.279, pruned_loss=0.05157, over 931036.19 frames. ], batch size: 36, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:28:16,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162754.66666666666, ans=0.1 +2024-07-28 13:28:16,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=162754.66666666666, ans=0.125 +2024-07-28 13:28:19,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=162768.0, ans=0.2 +2024-07-28 13:28:22,485 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 5.832e+01 6.811e+01 8.204e+01 1.211e+02, threshold=1.362e+02, percent-clipped=0.0 +2024-07-28 13:28:25,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=162781.33333333334, ans=0.125 +2024-07-28 13:28:31,978 INFO [train.py:1114] (1/4) Epoch 12, batch 9650, loss[loss=0.2019, simple_loss=0.2942, pruned_loss=0.05478, over 4840.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2795, pruned_loss=0.05205, over 927085.49 frames. ], batch size: 16, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:28:46,408 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.73 vs. limit=15.0 +2024-07-28 13:28:50,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=162834.66666666666, ans=0.04949747468305833 +2024-07-28 13:28:56,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162848.0, ans=0.1 +2024-07-28 13:29:02,790 INFO [train.py:1114] (1/4) Epoch 12, batch 9700, loss[loss=0.2086, simple_loss=0.2913, pruned_loss=0.06301, over 4148.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2792, pruned_loss=0.05199, over 925502.08 frames. ], batch size: 25, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:29:02,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=162861.33333333334, ans=0.025 +2024-07-28 13:29:05,087 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.96 vs. limit=15.0 +2024-07-28 13:29:05,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=162861.33333333334, ans=0.95 +2024-07-28 13:29:06,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=162861.33333333334, ans=0.0 +2024-07-28 13:29:07,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=162861.33333333334, ans=0.125 +2024-07-28 13:29:09,345 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.65 vs. limit=15.0 +2024-07-28 13:29:10,059 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.54 vs. limit=15.0 +2024-07-28 13:29:11,315 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.01 vs. limit=15.0 +2024-07-28 13:29:13,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=162874.66666666666, ans=0.125 +2024-07-28 13:29:25,016 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.802e+01 5.692e+01 6.242e+01 7.537e+01 1.052e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 13:29:34,180 INFO [train.py:1114] (1/4) Epoch 12, batch 9750, loss[loss=0.2164, simple_loss=0.3055, pruned_loss=0.06365, over 4677.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2795, pruned_loss=0.05216, over 925930.15 frames. ], batch size: 15, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:29:34,516 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.51 vs. limit=15.0 +2024-07-28 13:29:49,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162954.66666666666, ans=0.1 +2024-07-28 13:29:49,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162954.66666666666, ans=0.1 +2024-07-28 13:29:50,196 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:29:53,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=162968.0, ans=0.0 +2024-07-28 13:29:53,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=162968.0, ans=0.125 +2024-07-28 13:30:05,284 INFO [train.py:1114] (1/4) Epoch 12, batch 9800, loss[loss=0.1541, simple_loss=0.2403, pruned_loss=0.03401, over 4699.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2784, pruned_loss=0.05182, over 925161.72 frames. ], batch size: 12, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:30:12,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=163008.0, ans=0.125 +2024-07-28 13:30:13,684 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.91 vs. limit=22.5 +2024-07-28 13:30:23,847 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-28 13:30:24,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=163034.66666666666, ans=0.0 +2024-07-28 13:30:26,611 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.850e+01 5.828e+01 6.429e+01 7.275e+01 1.013e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 13:30:27,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=163034.66666666666, ans=0.0 +2024-07-28 13:30:31,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=163048.0, ans=0.125 +2024-07-28 13:30:36,208 INFO [train.py:1114] (1/4) Epoch 12, batch 9850, loss[loss=0.2269, simple_loss=0.3116, pruned_loss=0.07114, over 4905.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2786, pruned_loss=0.05186, over 927620.16 frames. ], batch size: 15, lr: 6.13e-03, grad_scale: 64.0 +2024-07-28 13:30:44,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=163074.66666666666, ans=0.125 +2024-07-28 13:30:46,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=163074.66666666666, ans=0.0 +2024-07-28 13:30:48,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=163088.0, ans=0.2 +2024-07-28 13:31:05,941 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=15.0 +2024-07-28 13:31:06,847 INFO [train.py:1114] (1/4) Epoch 12, batch 9900, loss[loss=0.2603, simple_loss=0.3396, pruned_loss=0.09052, over 4852.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2801, pruned_loss=0.0526, over 926846.71 frames. ], batch size: 16, lr: 6.13e-03, grad_scale: 64.0 +2024-07-28 13:31:08,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=163128.0, ans=0.025 +2024-07-28 13:31:12,828 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:31:12,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=163141.33333333334, ans=0.125 +2024-07-28 13:31:16,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=163141.33333333334, ans=0.125 +2024-07-28 13:31:17,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=163141.33333333334, ans=0.0 +2024-07-28 13:31:19,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=163154.66666666666, ans=0.125 +2024-07-28 13:31:29,746 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.746e+01 5.809e+01 6.400e+01 7.583e+01 1.176e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 13:31:31,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=163181.33333333334, ans=0.1 +2024-07-28 13:31:37,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=163181.33333333334, ans=0.0 +2024-07-28 13:31:38,446 INFO [train.py:1114] (1/4) Epoch 12, batch 9950, loss[loss=0.1518, simple_loss=0.2384, pruned_loss=0.03263, over 4790.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.281, pruned_loss=0.05336, over 929455.58 frames. ], batch size: 11, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:31:43,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=163194.66666666666, ans=0.125 +2024-07-28 13:31:51,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=163221.33333333334, ans=0.025 +2024-07-28 13:31:56,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=163234.66666666666, ans=0.125 +2024-07-28 13:31:58,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=163234.66666666666, ans=0.125 +2024-07-28 13:32:05,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=163248.0, ans=0.1 +2024-07-28 13:32:07,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=163248.0, ans=0.0 +2024-07-28 13:32:07,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=163248.0, ans=0.0 +2024-07-28 13:32:07,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=163248.0, ans=0.125 +2024-07-28 13:32:09,637 INFO [train.py:1114] (1/4) Epoch 12, batch 10000, loss[loss=0.197, simple_loss=0.2925, pruned_loss=0.05079, over 4619.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2832, pruned_loss=0.05417, over 927082.88 frames. ], batch size: 16, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:32:10,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=163261.33333333334, ans=0.125 +2024-07-28 13:32:20,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=163274.66666666666, ans=0.07 +2024-07-28 13:32:26,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=163288.0, ans=0.05 +2024-07-28 13:32:26,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=163288.0, ans=0.125 +2024-07-28 13:32:30,976 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.53 vs. limit=10.0 +2024-07-28 13:32:31,310 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.999e+01 5.883e+01 6.345e+01 7.076e+01 8.600e+01, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 13:32:34,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=163314.66666666666, ans=0.125 +2024-07-28 13:32:35,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=163314.66666666666, ans=0.1 +2024-07-28 13:32:41,183 INFO [train.py:1114] (1/4) Epoch 12, batch 10050, loss[loss=0.2412, simple_loss=0.3127, pruned_loss=0.08486, over 3548.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.287, pruned_loss=0.05599, over 914909.20 frames. ], batch size: 36, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:32:46,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=163328.0, ans=0.0 +2024-07-28 13:32:52,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=163341.33333333334, ans=15.0 +2024-07-28 13:32:58,692 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:33:02,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=163368.0, ans=0.125 +2024-07-28 13:33:14,549 INFO [train.py:1114] (1/4) Epoch 12, batch 10100, loss[loss=0.271, simple_loss=0.3305, pruned_loss=0.1058, over 3460.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2923, pruned_loss=0.06119, over 862231.15 frames. ], batch size: 35, lr: 6.12e-03, grad_scale: 32.0 +2024-07-28 13:33:20,305 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.93 vs. limit=15.0 +2024-07-28 13:33:27,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=163421.33333333334, ans=0.0 +2024-07-28 13:33:30,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163421.33333333334, ans=0.1 +2024-07-28 13:33:30,467 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.45 vs. limit=22.5 +2024-07-28 13:33:35,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=163434.66666666666, ans=0.2 +2024-07-28 13:33:37,793 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.859e+01 6.715e+01 7.355e+01 7.791e+01 1.006e+02, threshold=1.471e+02, percent-clipped=0.0 +2024-07-28 13:33:38,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=163434.66666666666, ans=0.125 +2024-07-28 13:33:43,156 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.28 vs. limit=22.5 +2024-07-28 13:33:46,743 INFO [train.py:1114] (1/4) Epoch 12, batch 10150, loss[loss=0.2341, simple_loss=0.3196, pruned_loss=0.07426, over 3520.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2964, pruned_loss=0.06533, over 820450.49 frames. ], batch size: 37, lr: 6.12e-03, grad_scale: 32.0 +2024-07-28 13:33:50,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=163461.33333333334, ans=0.125 +2024-07-28 13:33:59,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163488.0, ans=0.1 +2024-07-28 13:34:01,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=163488.0, ans=0.125 +2024-07-28 13:34:05,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=163501.33333333334, ans=0.125 +2024-07-28 13:34:07,169 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.67 vs. limit=22.5 +2024-07-28 13:34:18,683 INFO [train.py:1114] (1/4) Epoch 12, batch 10200, loss[loss=0.2255, simple_loss=0.3104, pruned_loss=0.07031, over 3378.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2987, pruned_loss=0.06813, over 789854.85 frames. ], batch size: 35, lr: 6.12e-03, grad_scale: 32.0 +2024-07-28 13:34:22,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=163528.0, ans=0.125 +2024-07-28 13:34:25,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=163541.33333333334, ans=0.0 +2024-07-28 13:34:30,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=163541.33333333334, ans=0.125 +2024-07-28 13:35:14,738 INFO [train.py:1114] (1/4) Epoch 13, batch 0, loss[loss=0.1744, simple_loss=0.2714, pruned_loss=0.0387, over 4844.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2714, pruned_loss=0.0387, over 4844.00 frames. ], batch size: 12, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:35:14,739 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 13:35:20,120 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.8495, 5.1625, 5.0245, 5.5997], device='cuda:1') +2024-07-28 13:35:26,207 INFO [train.py:1146] (1/4) Epoch 13, validation: loss=0.1689, simple_loss=0.2745, pruned_loss=0.03167, over 944034.00 frames. +2024-07-28 13:35:26,208 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 13:35:26,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=163557.33333333334, ans=0.125 +2024-07-28 13:35:26,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=163557.33333333334, ans=0.125 +2024-07-28 13:35:27,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=163557.33333333334, ans=0.125 +2024-07-28 13:35:27,921 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.91 vs. limit=10.0 +2024-07-28 13:35:28,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163557.33333333334, ans=0.1 +2024-07-28 13:35:35,872 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.887e+01 6.365e+01 6.777e+01 7.332e+01 9.562e+01, threshold=1.355e+02, percent-clipped=0.0 +2024-07-28 13:35:36,936 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.84 vs. limit=15.0 +2024-07-28 13:35:38,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=163570.66666666666, ans=0.0 +2024-07-28 13:35:50,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=163597.33333333334, ans=0.0 +2024-07-28 13:36:00,566 INFO [train.py:1114] (1/4) Epoch 13, batch 50, loss[loss=0.1587, simple_loss=0.249, pruned_loss=0.03422, over 4619.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2814, pruned_loss=0.05163, over 206035.77 frames. ], batch size: 11, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:36:00,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163624.0, ans=0.1 +2024-07-28 13:36:07,946 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:36:10,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163637.33333333334, ans=0.1 +2024-07-28 13:36:15,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=163650.66666666666, ans=0.0 +2024-07-28 13:36:16,581 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.36 vs. limit=6.0 +2024-07-28 13:36:29,488 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=15.0 +2024-07-28 13:36:35,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=163677.33333333334, ans=0.125 +2024-07-28 13:36:37,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=163677.33333333334, ans=0.125 +2024-07-28 13:36:42,396 INFO [train.py:1114] (1/4) Epoch 13, batch 100, loss[loss=0.1732, simple_loss=0.2644, pruned_loss=0.04103, over 4649.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2832, pruned_loss=0.05223, over 365216.44 frames. ], batch size: 12, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:36:42,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=163690.66666666666, ans=0.125 +2024-07-28 13:36:46,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=163690.66666666666, ans=0.0 +2024-07-28 13:36:48,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163690.66666666666, ans=0.1 +2024-07-28 13:36:51,941 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.677e+01 5.407e+01 6.133e+01 6.720e+01 8.973e+01, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 13:36:53,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=163704.0, ans=0.0 +2024-07-28 13:37:06,334 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:37:07,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=163717.33333333334, ans=0.125 +2024-07-28 13:37:17,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=163730.66666666666, ans=0.0 +2024-07-28 13:37:30,644 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.96 vs. limit=10.0 +2024-07-28 13:37:32,109 INFO [train.py:1114] (1/4) Epoch 13, batch 150, loss[loss=0.1456, simple_loss=0.2495, pruned_loss=0.02089, over 4601.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2815, pruned_loss=0.0514, over 493899.18 frames. ], batch size: 11, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:37:32,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=163757.33333333334, ans=0.2 +2024-07-28 13:37:34,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=163757.33333333334, ans=0.125 +2024-07-28 13:37:36,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=163757.33333333334, ans=0.1 +2024-07-28 13:37:39,108 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.06 vs. limit=15.0 +2024-07-28 13:37:48,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=163784.0, ans=0.125 +2024-07-28 13:37:48,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=163784.0, ans=0.125 +2024-07-28 13:37:51,282 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.55 vs. limit=6.0 +2024-07-28 13:38:08,722 INFO [train.py:1114] (1/4) Epoch 13, batch 200, loss[loss=0.2125, simple_loss=0.3025, pruned_loss=0.06128, over 4641.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2799, pruned_loss=0.051, over 593506.68 frames. ], batch size: 22, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:38:12,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=163824.0, ans=0.2 +2024-07-28 13:38:13,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=163824.0, ans=0.2 +2024-07-28 13:38:18,038 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.812e+01 5.615e+01 6.251e+01 7.683e+01 1.063e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 13:38:42,109 INFO [train.py:1114] (1/4) Epoch 13, batch 250, loss[loss=0.1955, simple_loss=0.2788, pruned_loss=0.05609, over 4621.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2792, pruned_loss=0.05085, over 670281.03 frames. ], batch size: 16, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:38:43,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=163890.66666666666, ans=0.04949747468305833 +2024-07-28 13:38:46,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163890.66666666666, ans=0.1 +2024-07-28 13:38:47,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=163890.66666666666, ans=0.1 +2024-07-28 13:39:00,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=163917.33333333334, ans=0.0 +2024-07-28 13:39:06,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=163930.66666666666, ans=0.125 +2024-07-28 13:39:16,246 INFO [train.py:1114] (1/4) Epoch 13, batch 300, loss[loss=0.2346, simple_loss=0.3106, pruned_loss=0.07932, over 4803.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2797, pruned_loss=0.05098, over 729960.71 frames. ], batch size: 15, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:39:18,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=163957.33333333334, ans=0.0 +2024-07-28 13:39:25,703 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+01 5.595e+01 6.354e+01 7.540e+01 1.026e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 13:39:43,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.41 vs. limit=15.0 +2024-07-28 13:39:49,687 INFO [train.py:1114] (1/4) Epoch 13, batch 350, loss[loss=0.1907, simple_loss=0.262, pruned_loss=0.05968, over 4929.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2802, pruned_loss=0.05124, over 775912.19 frames. ], batch size: 12, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:39:58,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164037.33333333334, ans=0.1 +2024-07-28 13:40:02,369 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:40:10,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164064.0, ans=0.1 +2024-07-28 13:40:13,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=164064.0, ans=0.2 +2024-07-28 13:40:18,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=164077.33333333334, ans=0.2 +2024-07-28 13:40:22,075 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.14 vs. limit=10.0 +2024-07-28 13:40:24,301 INFO [train.py:1114] (1/4) Epoch 13, batch 400, loss[loss=0.187, simple_loss=0.2821, pruned_loss=0.04594, over 4693.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2782, pruned_loss=0.05014, over 813273.01 frames. ], batch size: 13, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:40:25,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=164090.66666666666, ans=0.125 +2024-07-28 13:40:28,649 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.82 vs. limit=15.0 +2024-07-28 13:40:35,598 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.297e+01 5.430e+01 5.754e+01 6.889e+01 9.909e+01, threshold=1.151e+02, percent-clipped=0.0 +2024-07-28 13:40:46,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164130.66666666666, ans=0.1 +2024-07-28 13:40:46,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=164130.66666666666, ans=0.125 +2024-07-28 13:40:50,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=164130.66666666666, ans=0.2 +2024-07-28 13:40:55,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=164144.0, ans=0.0 +2024-07-28 13:40:59,515 INFO [train.py:1114] (1/4) Epoch 13, batch 450, loss[loss=0.1797, simple_loss=0.2745, pruned_loss=0.04241, over 4638.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2793, pruned_loss=0.0509, over 838844.50 frames. ], batch size: 13, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:41:08,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=164170.66666666666, ans=0.1 +2024-07-28 13:41:08,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=164170.66666666666, ans=0.04949747468305833 +2024-07-28 13:41:09,129 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-28 13:41:14,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=164184.0, ans=0.0 +2024-07-28 13:41:18,831 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:41:24,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=164197.33333333334, ans=0.125 +2024-07-28 13:41:24,934 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.42 vs. limit=12.0 +2024-07-28 13:41:30,313 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.99 vs. limit=15.0 +2024-07-28 13:41:32,427 INFO [train.py:1114] (1/4) Epoch 13, batch 500, loss[loss=0.2373, simple_loss=0.3218, pruned_loss=0.07643, over 4668.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2776, pruned_loss=0.04992, over 861310.70 frames. ], batch size: 15, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:41:39,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=164237.33333333334, ans=0.0 +2024-07-28 13:41:41,729 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.481e+01 5.521e+01 6.089e+01 6.841e+01 9.670e+01, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 13:41:41,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=164237.33333333334, ans=0.125 +2024-07-28 13:41:47,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=164250.66666666666, ans=0.0 +2024-07-28 13:41:59,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=164277.33333333334, ans=0.0 +2024-07-28 13:42:05,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=164290.66666666666, ans=0.04949747468305833 +2024-07-28 13:42:06,174 INFO [train.py:1114] (1/4) Epoch 13, batch 550, loss[loss=0.201, simple_loss=0.2933, pruned_loss=0.05438, over 4647.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2769, pruned_loss=0.04987, over 877455.26 frames. ], batch size: 17, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:42:17,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=164304.0, ans=0.0 +2024-07-28 13:42:18,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=164317.33333333334, ans=0.0 +2024-07-28 13:42:27,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=164330.66666666666, ans=0.025 +2024-07-28 13:42:27,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=164330.66666666666, ans=0.0 +2024-07-28 13:42:38,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=164357.33333333334, ans=0.025 +2024-07-28 13:42:39,323 INFO [train.py:1114] (1/4) Epoch 13, batch 600, loss[loss=0.2113, simple_loss=0.3067, pruned_loss=0.05792, over 4611.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2774, pruned_loss=0.04983, over 892045.02 frames. ], batch size: 16, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:42:46,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=164370.66666666666, ans=0.125 +2024-07-28 13:42:48,628 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.455e+01 5.528e+01 6.337e+01 7.273e+01 1.055e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 13:42:55,627 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.39 vs. limit=15.0 +2024-07-28 13:42:56,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=164384.0, ans=0.0 +2024-07-28 13:42:58,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=164384.0, ans=0.0 +2024-07-28 13:42:58,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=164397.33333333334, ans=0.125 +2024-07-28 13:43:09,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=164397.33333333334, ans=0.125 +2024-07-28 13:43:18,099 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:43:19,287 INFO [train.py:1114] (1/4) Epoch 13, batch 650, loss[loss=0.1557, simple_loss=0.2365, pruned_loss=0.03752, over 4759.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2768, pruned_loss=0.04996, over 903743.65 frames. ], batch size: 13, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:43:20,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=164424.0, ans=0.1 +2024-07-28 13:43:36,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=164450.66666666666, ans=0.07 +2024-07-28 13:43:39,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=164450.66666666666, ans=0.2 +2024-07-28 13:43:40,331 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.88 vs. limit=15.0 +2024-07-28 13:43:48,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=164477.33333333334, ans=0.0 +2024-07-28 13:43:48,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=164477.33333333334, ans=0.0 +2024-07-28 13:43:50,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=164477.33333333334, ans=0.125 +2024-07-28 13:43:52,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=164477.33333333334, ans=0.09899494936611666 +2024-07-28 13:43:55,021 INFO [train.py:1114] (1/4) Epoch 13, batch 700, loss[loss=0.1649, simple_loss=0.2385, pruned_loss=0.04563, over 4639.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2775, pruned_loss=0.05008, over 911675.64 frames. ], batch size: 12, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:43:57,168 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:44:04,379 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.803e+01 5.621e+01 6.058e+01 7.095e+01 1.199e+02, threshold=1.212e+02, percent-clipped=0.0 +2024-07-28 13:44:12,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=164517.33333333334, ans=0.125 +2024-07-28 13:44:13,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=164517.33333333334, ans=0.1 +2024-07-28 13:44:14,556 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.16 vs. limit=15.0 +2024-07-28 13:44:15,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=164517.33333333334, ans=0.125 +2024-07-28 13:44:27,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=164530.66666666666, ans=0.0 +2024-07-28 13:44:27,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=164530.66666666666, ans=0.2 +2024-07-28 13:44:37,209 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-07-28 13:44:38,231 INFO [train.py:1114] (1/4) Epoch 13, batch 750, loss[loss=0.2212, simple_loss=0.2987, pruned_loss=0.07182, over 4693.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2773, pruned_loss=0.05015, over 918223.44 frames. ], batch size: 13, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:44:44,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=164570.66666666666, ans=0.125 +2024-07-28 13:45:01,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=164597.33333333334, ans=0.09899494936611666 +2024-07-28 13:45:03,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=164597.33333333334, ans=0.125 +2024-07-28 13:45:05,684 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:45:13,493 INFO [train.py:1114] (1/4) Epoch 13, batch 800, loss[loss=0.1562, simple_loss=0.2406, pruned_loss=0.03588, over 4861.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2773, pruned_loss=0.05001, over 923393.33 frames. ], batch size: 12, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:45:14,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=164624.0, ans=0.0 +2024-07-28 13:45:22,567 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.403e+01 5.509e+01 5.892e+01 6.560e+01 1.053e+02, threshold=1.178e+02, percent-clipped=0.0 +2024-07-28 13:45:42,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=164650.66666666666, ans=0.0 +2024-07-28 13:45:43,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=164650.66666666666, ans=0.125 +2024-07-28 13:45:47,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=164664.0, ans=0.0 +2024-07-28 13:45:48,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=164664.0, ans=0.0 +2024-07-28 13:45:53,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=164664.0, ans=0.0 +2024-07-28 13:46:00,990 INFO [train.py:1114] (1/4) Epoch 13, batch 850, loss[loss=0.1768, simple_loss=0.2631, pruned_loss=0.0452, over 4674.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2766, pruned_loss=0.04993, over 927546.40 frames. ], batch size: 14, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:46:01,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=164690.66666666666, ans=0.0 +2024-07-28 13:46:06,005 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.23 vs. limit=12.0 +2024-07-28 13:46:09,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=164704.0, ans=0.125 +2024-07-28 13:46:26,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=164717.33333333334, ans=10.0 +2024-07-28 13:46:28,366 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.47 vs. limit=10.0 +2024-07-28 13:46:58,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=164744.0, ans=0.0 +2024-07-28 13:47:04,273 INFO [train.py:1114] (1/4) Epoch 13, batch 900, loss[loss=0.1945, simple_loss=0.2761, pruned_loss=0.05648, over 4851.00 frames. ], tot_loss[loss=0.189, simple_loss=0.277, pruned_loss=0.05052, over 928265.57 frames. ], batch size: 12, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:47:06,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=164757.33333333334, ans=0.2 +2024-07-28 13:47:07,977 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.67 vs. limit=15.0 +2024-07-28 13:47:13,467 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.792e+01 6.438e+01 7.268e+01 1.084e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 13:47:29,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=164797.33333333334, ans=0.125 +2024-07-28 13:47:37,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=164824.0, ans=0.2 +2024-07-28 13:47:37,861 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.87 vs. limit=5.0 +2024-07-28 13:47:37,999 INFO [train.py:1114] (1/4) Epoch 13, batch 950, loss[loss=0.1547, simple_loss=0.2401, pruned_loss=0.03469, over 4767.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2759, pruned_loss=0.04964, over 930140.09 frames. ], batch size: 12, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:47:46,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=164837.33333333334, ans=0.125 +2024-07-28 13:47:52,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=164850.66666666666, ans=0.125 +2024-07-28 13:47:55,842 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.73 vs. limit=15.0 +2024-07-28 13:48:01,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=164864.0, ans=0.2 +2024-07-28 13:48:05,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164877.33333333334, ans=0.1 +2024-07-28 13:48:06,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=164877.33333333334, ans=0.125 +2024-07-28 13:48:11,274 INFO [train.py:1114] (1/4) Epoch 13, batch 1000, loss[loss=0.1611, simple_loss=0.254, pruned_loss=0.0341, over 4963.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2763, pruned_loss=0.04962, over 929532.61 frames. ], batch size: 13, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:48:11,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=164890.66666666666, ans=0.125 +2024-07-28 13:48:12,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=164890.66666666666, ans=0.125 +2024-07-28 13:48:15,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=164890.66666666666, ans=0.0 +2024-07-28 13:48:20,577 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.383e+01 5.622e+01 6.136e+01 7.218e+01 8.877e+01, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 13:48:20,671 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:48:21,582 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:48:33,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=164930.66666666666, ans=0.0 +2024-07-28 13:48:37,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=164944.0, ans=0.1 +2024-07-28 13:48:39,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164944.0, ans=0.1 +2024-07-28 13:48:44,470 INFO [train.py:1114] (1/4) Epoch 13, batch 1050, loss[loss=0.1584, simple_loss=0.2619, pruned_loss=0.02744, over 4875.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2755, pruned_loss=0.04944, over 932201.50 frames. ], batch size: 14, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:48:57,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=164970.66666666666, ans=0.125 +2024-07-28 13:48:57,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=164970.66666666666, ans=0.2 +2024-07-28 13:49:03,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=164984.0, ans=0.2 +2024-07-28 13:49:21,633 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.82 vs. limit=22.5 +2024-07-28 13:49:23,108 INFO [train.py:1114] (1/4) Epoch 13, batch 1100, loss[loss=0.2088, simple_loss=0.2905, pruned_loss=0.06358, over 4904.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2754, pruned_loss=0.04913, over 934299.86 frames. ], batch size: 13, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:49:46,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=165037.33333333334, ans=0.1 +2024-07-28 13:49:50,755 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.914e+01 5.557e+01 6.150e+01 6.948e+01 9.915e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 13:50:12,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=165037.33333333334, ans=0.125 +2024-07-28 13:50:19,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=165064.0, ans=0.0 +2024-07-28 13:50:22,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=165064.0, ans=0.5 +2024-07-28 13:50:26,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=165064.0, ans=0.04949747468305833 +2024-07-28 13:50:27,678 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.18 vs. limit=12.0 +2024-07-28 13:50:36,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=165077.33333333334, ans=0.0 +2024-07-28 13:50:45,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=165090.66666666666, ans=0.125 +2024-07-28 13:50:46,017 INFO [train.py:1114] (1/4) Epoch 13, batch 1150, loss[loss=0.1944, simple_loss=0.2835, pruned_loss=0.05268, over 4899.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2757, pruned_loss=0.04928, over 934306.02 frames. ], batch size: 13, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:50:48,629 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.12 vs. limit=15.0 +2024-07-28 13:50:49,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=165090.66666666666, ans=0.2 +2024-07-28 13:50:54,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=165104.0, ans=0.025 +2024-07-28 13:50:59,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=165117.33333333334, ans=0.125 +2024-07-28 13:51:00,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=165117.33333333334, ans=0.125 +2024-07-28 13:51:06,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=165130.66666666666, ans=0.125 +2024-07-28 13:51:13,787 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.39 vs. limit=12.0 +2024-07-28 13:51:15,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=165144.0, ans=0.125 +2024-07-28 13:51:20,620 INFO [train.py:1114] (1/4) Epoch 13, batch 1200, loss[loss=0.1871, simple_loss=0.283, pruned_loss=0.04564, over 4875.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2762, pruned_loss=0.04929, over 933328.52 frames. ], batch size: 14, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:51:26,808 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.62 vs. limit=6.0 +2024-07-28 13:51:29,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=165170.66666666666, ans=0.125 +2024-07-28 13:51:30,264 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.565e+01 5.602e+01 6.215e+01 7.036e+01 9.353e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 13:51:30,656 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.81 vs. limit=6.0 +2024-07-28 13:51:37,827 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.25 vs. limit=15.0 +2024-07-28 13:51:40,446 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.95 vs. limit=6.0 +2024-07-28 13:51:42,353 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.99 vs. limit=15.0 +2024-07-28 13:51:42,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=165197.33333333334, ans=0.95 +2024-07-28 13:51:47,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=165197.33333333334, ans=0.125 +2024-07-28 13:51:55,903 INFO [train.py:1114] (1/4) Epoch 13, batch 1250, loss[loss=0.184, simple_loss=0.2791, pruned_loss=0.04449, over 4800.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.277, pruned_loss=0.04913, over 937269.25 frames. ], batch size: 15, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:52:16,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=165250.66666666666, ans=0.07 +2024-07-28 13:52:36,247 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.40 vs. limit=22.5 +2024-07-28 13:52:37,828 INFO [train.py:1114] (1/4) Epoch 13, batch 1300, loss[loss=0.2373, simple_loss=0.3307, pruned_loss=0.07199, over 4749.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2761, pruned_loss=0.04898, over 938938.31 frames. ], batch size: 19, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:52:38,141 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.21 vs. limit=15.0 +2024-07-28 13:52:48,771 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.215e+01 5.537e+01 6.038e+01 6.682e+01 9.542e+01, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 13:53:11,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=165344.0, ans=0.125 +2024-07-28 13:53:19,166 INFO [train.py:1114] (1/4) Epoch 13, batch 1350, loss[loss=0.2013, simple_loss=0.2904, pruned_loss=0.05609, over 4757.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2767, pruned_loss=0.04917, over 940780.03 frames. ], batch size: 13, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:53:21,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165357.33333333334, ans=0.1 +2024-07-28 13:53:25,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=165370.66666666666, ans=0.125 +2024-07-28 13:53:48,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=165410.66666666666, ans=0.5 +2024-07-28 13:53:54,749 INFO [train.py:1114] (1/4) Epoch 13, batch 1400, loss[loss=0.1291, simple_loss=0.2072, pruned_loss=0.02553, over 4702.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2756, pruned_loss=0.04876, over 942561.36 frames. ], batch size: 11, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:54:00,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=165424.0, ans=0.125 +2024-07-28 13:54:04,061 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.297e+01 5.870e+01 6.563e+01 8.092e+01 1.108e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-28 13:54:10,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=165450.66666666666, ans=0.2 +2024-07-28 13:54:22,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=165477.33333333334, ans=0.2 +2024-07-28 13:54:26,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=165477.33333333334, ans=0.0 +2024-07-28 13:54:28,177 INFO [train.py:1114] (1/4) Epoch 13, batch 1450, loss[loss=0.2143, simple_loss=0.2965, pruned_loss=0.06608, over 4691.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2763, pruned_loss=0.04905, over 942506.32 frames. ], batch size: 15, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:54:34,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=165504.0, ans=0.125 +2024-07-28 13:54:42,185 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:54:45,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=165517.33333333334, ans=0.125 +2024-07-28 13:54:50,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=165530.66666666666, ans=0.125 +2024-07-28 13:54:56,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=165544.0, ans=0.025 +2024-07-28 13:55:01,061 INFO [train.py:1114] (1/4) Epoch 13, batch 1500, loss[loss=0.1637, simple_loss=0.2656, pruned_loss=0.03091, over 4809.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2765, pruned_loss=0.04863, over 942174.09 frames. ], batch size: 14, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:55:01,568 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.93 vs. limit=15.0 +2024-07-28 13:55:10,414 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.997e+01 5.672e+01 6.060e+01 6.827e+01 9.493e+01, threshold=1.212e+02, percent-clipped=0.0 +2024-07-28 13:55:17,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=165584.0, ans=0.2 +2024-07-28 13:55:18,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=165584.0, ans=0.2 +2024-07-28 13:55:24,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=165597.33333333334, ans=0.125 +2024-07-28 13:55:34,841 INFO [train.py:1114] (1/4) Epoch 13, batch 1550, loss[loss=0.2145, simple_loss=0.3033, pruned_loss=0.0628, over 4905.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2775, pruned_loss=0.04944, over 938452.80 frames. ], batch size: 15, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:55:35,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=165624.0, ans=0.0 +2024-07-28 13:55:38,903 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.56 vs. limit=15.0 +2024-07-28 13:55:43,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=165637.33333333334, ans=0.5 +2024-07-28 13:55:50,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=165650.66666666666, ans=0.2 +2024-07-28 13:55:56,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=165664.0, ans=0.0 +2024-07-28 13:56:08,015 INFO [train.py:1114] (1/4) Epoch 13, batch 1600, loss[loss=0.2189, simple_loss=0.2973, pruned_loss=0.07024, over 4870.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2771, pruned_loss=0.05017, over 936888.36 frames. ], batch size: 14, lr: 5.84e-03, grad_scale: 32.0 +2024-07-28 13:56:09,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=165690.66666666666, ans=0.2 +2024-07-28 13:56:17,118 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.87 vs. limit=6.0 +2024-07-28 13:56:19,117 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.588e+01 5.661e+01 6.268e+01 7.174e+01 9.497e+01, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 13:56:27,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=165717.33333333334, ans=0.125 +2024-07-28 13:56:42,658 INFO [train.py:1114] (1/4) Epoch 13, batch 1650, loss[loss=0.1635, simple_loss=0.2631, pruned_loss=0.03192, over 4661.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2768, pruned_loss=0.05015, over 936873.50 frames. ], batch size: 14, lr: 5.84e-03, grad_scale: 32.0 +2024-07-28 13:57:01,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=165784.0, ans=0.125 +2024-07-28 13:57:01,323 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.93 vs. limit=15.0 +2024-07-28 13:57:13,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=165810.66666666666, ans=0.025 +2024-07-28 13:57:15,592 INFO [train.py:1114] (1/4) Epoch 13, batch 1700, loss[loss=0.183, simple_loss=0.2601, pruned_loss=0.05299, over 4717.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2763, pruned_loss=0.04993, over 938613.27 frames. ], batch size: 11, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:57:25,918 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.43 vs. limit=15.0 +2024-07-28 13:57:26,860 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 5.803e+01 6.268e+01 7.328e+01 1.138e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 13:57:31,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=165850.66666666666, ans=0.125 +2024-07-28 13:57:33,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=165850.66666666666, ans=0.125 +2024-07-28 13:57:38,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=165864.0, ans=0.0 +2024-07-28 13:57:51,030 INFO [train.py:1114] (1/4) Epoch 13, batch 1750, loss[loss=0.1716, simple_loss=0.255, pruned_loss=0.04408, over 4811.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2755, pruned_loss=0.04934, over 939650.69 frames. ], batch size: 11, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:57:54,441 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:57:59,417 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.04 vs. limit=15.0 +2024-07-28 13:57:59,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=165904.0, ans=0.125 +2024-07-28 13:58:01,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=165904.0, ans=0.125 +2024-07-28 13:58:03,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165904.0, ans=0.1 +2024-07-28 13:58:26,047 INFO [train.py:1114] (1/4) Epoch 13, batch 1800, loss[loss=0.2176, simple_loss=0.3112, pruned_loss=0.062, over 4639.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2768, pruned_loss=0.05016, over 940482.98 frames. ], batch size: 13, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:58:26,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=165957.33333333334, ans=0.1 +2024-07-28 13:58:35,515 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+01 5.623e+01 6.283e+01 7.470e+01 1.047e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 13:58:53,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=165997.33333333334, ans=0.125 +2024-07-28 13:58:54,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=166010.66666666666, ans=0.0 +2024-07-28 13:58:59,222 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.87 vs. limit=6.0 +2024-07-28 13:59:00,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=166010.66666666666, ans=0.125 +2024-07-28 13:59:01,620 INFO [train.py:1114] (1/4) Epoch 13, batch 1850, loss[loss=0.1942, simple_loss=0.285, pruned_loss=0.05176, over 4807.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2774, pruned_loss=0.04996, over 940233.48 frames. ], batch size: 14, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:59:05,241 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.03 vs. limit=15.0 +2024-07-28 13:59:09,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166037.33333333334, ans=0.1 +2024-07-28 13:59:16,355 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:59:20,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=166050.66666666666, ans=0.125 +2024-07-28 13:59:25,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=166064.0, ans=0.125 +2024-07-28 13:59:35,544 INFO [train.py:1114] (1/4) Epoch 13, batch 1900, loss[loss=0.2049, simple_loss=0.288, pruned_loss=0.06092, over 4676.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2772, pruned_loss=0.04972, over 941682.78 frames. ], batch size: 14, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:59:44,624 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.534e+01 5.588e+01 6.157e+01 7.144e+01 1.104e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 13:59:54,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=166117.33333333334, ans=0.0 +2024-07-28 13:59:57,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=166130.66666666666, ans=0.0 +2024-07-28 14:00:05,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=166144.0, ans=0.125 +2024-07-28 14:00:08,384 INFO [train.py:1114] (1/4) Epoch 13, batch 1950, loss[loss=0.1809, simple_loss=0.2595, pruned_loss=0.05115, over 4905.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2778, pruned_loss=0.04995, over 943797.79 frames. ], batch size: 13, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 14:00:09,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166157.33333333334, ans=0.1 +2024-07-28 14:00:10,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=166157.33333333334, ans=0.125 +2024-07-28 14:00:17,642 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.03 vs. limit=10.0 +2024-07-28 14:00:18,888 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.11 vs. limit=22.5 +2024-07-28 14:00:25,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166184.0, ans=0.1 +2024-07-28 14:00:30,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=166197.33333333334, ans=0.125 +2024-07-28 14:00:31,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166197.33333333334, ans=0.1 +2024-07-28 14:00:36,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=166210.66666666666, ans=0.0 +2024-07-28 14:00:42,310 INFO [train.py:1114] (1/4) Epoch 13, batch 2000, loss[loss=0.1841, simple_loss=0.2697, pruned_loss=0.04922, over 4831.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2782, pruned_loss=0.05036, over 941320.00 frames. ], batch size: 11, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 14:00:42,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=166224.0, ans=0.125 +2024-07-28 14:00:44,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=166224.0, ans=0.125 +2024-07-28 14:00:51,856 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+01 5.856e+01 6.495e+01 7.461e+01 1.148e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-28 14:01:09,538 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.26 vs. limit=22.5 +2024-07-28 14:01:11,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=166277.33333333334, ans=0.2 +2024-07-28 14:01:16,502 INFO [train.py:1114] (1/4) Epoch 13, batch 2050, loss[loss=0.1554, simple_loss=0.2349, pruned_loss=0.03801, over 4610.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2771, pruned_loss=0.04994, over 939239.50 frames. ], batch size: 11, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:01:29,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=166317.33333333334, ans=0.05 +2024-07-28 14:01:29,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166317.33333333334, ans=0.1 +2024-07-28 14:01:43,163 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.71 vs. limit=10.0 +2024-07-28 14:01:48,332 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.77 vs. limit=15.0 +2024-07-28 14:01:50,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=166357.33333333334, ans=0.0 +2024-07-28 14:01:51,261 INFO [train.py:1114] (1/4) Epoch 13, batch 2100, loss[loss=0.1818, simple_loss=0.2739, pruned_loss=0.04478, over 4756.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2763, pruned_loss=0.04922, over 940974.34 frames. ], batch size: 13, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:01:54,376 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.72 vs. limit=22.5 +2024-07-28 14:01:58,234 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.13 vs. limit=22.5 +2024-07-28 14:02:00,482 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.442e+01 5.528e+01 6.162e+01 7.061e+01 9.278e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 14:02:03,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=166370.66666666666, ans=0.0 +2024-07-28 14:02:05,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=166384.0, ans=10.0 +2024-07-28 14:02:05,991 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.15 vs. limit=15.0 +2024-07-28 14:02:11,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=166397.33333333334, ans=0.2 +2024-07-28 14:02:24,075 INFO [train.py:1114] (1/4) Epoch 13, batch 2150, loss[loss=0.1816, simple_loss=0.2749, pruned_loss=0.04411, over 4897.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2756, pruned_loss=0.04897, over 944174.60 frames. ], batch size: 13, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:02:26,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=166424.0, ans=0.025 +2024-07-28 14:02:26,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=166424.0, ans=0.125 +2024-07-28 14:02:27,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=166424.0, ans=0.125 +2024-07-28 14:02:44,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=166450.66666666666, ans=0.125 +2024-07-28 14:02:51,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=166464.0, ans=0.125 +2024-07-28 14:02:56,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=166477.33333333334, ans=0.2 +2024-07-28 14:02:59,555 INFO [train.py:1114] (1/4) Epoch 13, batch 2200, loss[loss=0.1729, simple_loss=0.273, pruned_loss=0.03637, over 4806.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2747, pruned_loss=0.04834, over 944061.24 frames. ], batch size: 14, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:03:07,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=166504.0, ans=0.125 +2024-07-28 14:03:08,837 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.709e+01 5.698e+01 6.654e+01 7.833e+01 2.383e+02, threshold=1.331e+02, percent-clipped=1.0 +2024-07-28 14:03:09,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=166504.0, ans=0.125 +2024-07-28 14:03:25,662 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.99 vs. limit=15.0 +2024-07-28 14:03:32,644 INFO [train.py:1114] (1/4) Epoch 13, batch 2250, loss[loss=0.1829, simple_loss=0.2759, pruned_loss=0.04489, over 4681.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2754, pruned_loss=0.04829, over 942524.65 frames. ], batch size: 13, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:03:36,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=166557.33333333334, ans=0.125 +2024-07-28 14:03:37,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=166557.33333333334, ans=0.125 +2024-07-28 14:03:46,748 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.49 vs. limit=12.0 +2024-07-28 14:03:51,965 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.20 vs. limit=10.0 +2024-07-28 14:03:57,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=166597.33333333334, ans=0.125 +2024-07-28 14:03:57,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=166597.33333333334, ans=0.125 +2024-07-28 14:04:03,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=166610.66666666666, ans=0.125 +2024-07-28 14:04:07,910 INFO [train.py:1114] (1/4) Epoch 13, batch 2300, loss[loss=0.1734, simple_loss=0.2501, pruned_loss=0.04839, over 4936.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2732, pruned_loss=0.04783, over 940432.46 frames. ], batch size: 12, lr: 5.83e-03, grad_scale: 32.0 +2024-07-28 14:04:16,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=166637.33333333334, ans=0.07 +2024-07-28 14:04:19,878 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.764e+01 5.399e+01 5.798e+01 6.898e+01 9.306e+01, threshold=1.160e+02, percent-clipped=0.0 +2024-07-28 14:04:23,004 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.97 vs. limit=15.0 +2024-07-28 14:04:29,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=166664.0, ans=0.0 +2024-07-28 14:04:35,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=166664.0, ans=0.125 +2024-07-28 14:04:44,034 INFO [train.py:1114] (1/4) Epoch 13, batch 2350, loss[loss=0.1835, simple_loss=0.2687, pruned_loss=0.04918, over 4642.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2739, pruned_loss=0.04788, over 942346.44 frames. ], batch size: 13, lr: 5.83e-03, grad_scale: 32.0 +2024-07-28 14:05:03,477 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.00 vs. limit=15.0 +2024-07-28 14:05:07,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=166730.66666666666, ans=0.025 +2024-07-28 14:05:16,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=166744.0, ans=0.0 +2024-07-28 14:05:17,414 INFO [train.py:1114] (1/4) Epoch 13, batch 2400, loss[loss=0.1724, simple_loss=0.2569, pruned_loss=0.04394, over 4639.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2757, pruned_loss=0.04893, over 941924.43 frames. ], batch size: 12, lr: 5.83e-03, grad_scale: 32.0 +2024-07-28 14:05:18,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=166757.33333333334, ans=0.0 +2024-07-28 14:05:19,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=166757.33333333334, ans=0.125 +2024-07-28 14:05:26,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=166770.66666666666, ans=0.0 +2024-07-28 14:05:27,488 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.619e+01 5.555e+01 6.337e+01 7.554e+01 1.093e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 14:05:48,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=166810.66666666666, ans=0.0 +2024-07-28 14:05:50,657 INFO [train.py:1114] (1/4) Epoch 13, batch 2450, loss[loss=0.1753, simple_loss=0.2751, pruned_loss=0.03769, over 4689.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2768, pruned_loss=0.04962, over 937474.81 frames. ], batch size: 13, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:05:54,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.25 vs. limit=15.0 +2024-07-28 14:05:57,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=166837.33333333334, ans=0.125 +2024-07-28 14:06:01,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166837.33333333334, ans=0.1 +2024-07-28 14:06:22,121 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.61 vs. limit=15.0 +2024-07-28 14:06:23,998 INFO [train.py:1114] (1/4) Epoch 13, batch 2500, loss[loss=0.1828, simple_loss=0.2675, pruned_loss=0.04903, over 4804.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2762, pruned_loss=0.04913, over 939632.30 frames. ], batch size: 14, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:06:25,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=166890.66666666666, ans=0.2 +2024-07-28 14:06:26,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=166890.66666666666, ans=0.025 +2024-07-28 14:06:27,118 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.03 vs. limit=15.0 +2024-07-28 14:06:29,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=166890.66666666666, ans=0.1 +2024-07-28 14:06:32,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=166904.0, ans=0.1 +2024-07-28 14:06:33,894 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.764e+01 5.445e+01 5.909e+01 6.665e+01 1.016e+02, threshold=1.182e+02, percent-clipped=0.0 +2024-07-28 14:06:39,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=166917.33333333334, ans=22.5 +2024-07-28 14:06:42,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=166917.33333333334, ans=0.04949747468305833 +2024-07-28 14:06:44,052 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:06:44,326 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.44 vs. limit=15.0 +2024-07-28 14:06:47,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=166930.66666666666, ans=0.125 +2024-07-28 14:06:55,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=166944.0, ans=0.125 +2024-07-28 14:06:56,110 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.65 vs. limit=22.5 +2024-07-28 14:06:57,756 INFO [train.py:1114] (1/4) Epoch 13, batch 2550, loss[loss=0.1633, simple_loss=0.243, pruned_loss=0.04184, over 4815.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2757, pruned_loss=0.04888, over 939134.19 frames. ], batch size: 11, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:07:14,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=166984.0, ans=0.0 +2024-07-28 14:07:14,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=166984.0, ans=0.2 +2024-07-28 14:07:29,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=167010.66666666666, ans=0.125 +2024-07-28 14:07:32,593 INFO [train.py:1114] (1/4) Epoch 13, batch 2600, loss[loss=0.2043, simple_loss=0.2987, pruned_loss=0.05493, over 4897.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2775, pruned_loss=0.04958, over 938349.17 frames. ], batch size: 13, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:07:41,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=167037.33333333334, ans=0.125 +2024-07-28 14:07:42,338 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.776e+01 5.608e+01 6.313e+01 7.050e+01 1.090e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 14:07:44,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=167037.33333333334, ans=0.0 +2024-07-28 14:07:48,073 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.69 vs. limit=5.0 +2024-07-28 14:07:49,236 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.34 vs. limit=22.5 +2024-07-28 14:07:49,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=167050.66666666666, ans=0.2 +2024-07-28 14:07:52,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=167064.0, ans=0.025 +2024-07-28 14:07:53,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=167064.0, ans=0.125 +2024-07-28 14:08:05,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=167077.33333333334, ans=0.95 +2024-07-28 14:08:07,477 INFO [train.py:1114] (1/4) Epoch 13, batch 2650, loss[loss=0.2233, simple_loss=0.3107, pruned_loss=0.06797, over 4634.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2777, pruned_loss=0.04947, over 940415.55 frames. ], batch size: 16, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:08:07,918 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.41 vs. limit=15.0 +2024-07-28 14:08:11,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167090.66666666666, ans=0.1 +2024-07-28 14:08:14,572 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-28 14:08:18,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=167104.0, ans=0.2 +2024-07-28 14:08:24,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=167117.33333333334, ans=0.125 +2024-07-28 14:08:30,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=167130.66666666666, ans=0.0 +2024-07-28 14:08:32,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=167130.66666666666, ans=0.125 +2024-07-28 14:08:33,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=167144.0, ans=0.0 +2024-07-28 14:08:37,306 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.56 vs. limit=15.0 +2024-07-28 14:08:41,003 INFO [train.py:1114] (1/4) Epoch 13, batch 2700, loss[loss=0.1895, simple_loss=0.2921, pruned_loss=0.04346, over 4746.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2784, pruned_loss=0.0497, over 940039.16 frames. ], batch size: 14, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:08:47,468 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-07-28 14:08:51,006 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.399e+01 5.482e+01 5.925e+01 6.824e+01 1.004e+02, threshold=1.185e+02, percent-clipped=0.0 +2024-07-28 14:08:55,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=167184.0, ans=0.0 +2024-07-28 14:09:03,171 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.73 vs. limit=22.5 +2024-07-28 14:09:03,803 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.27 vs. limit=15.0 +2024-07-28 14:09:17,199 INFO [train.py:1114] (1/4) Epoch 13, batch 2750, loss[loss=0.1881, simple_loss=0.2765, pruned_loss=0.04985, over 4699.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2765, pruned_loss=0.04926, over 940482.35 frames. ], batch size: 12, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:09:18,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=167224.0, ans=0.0 +2024-07-28 14:09:23,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=167237.33333333334, ans=0.125 +2024-07-28 14:09:24,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=167237.33333333334, ans=0.125 +2024-07-28 14:09:28,012 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.16 vs. limit=15.0 +2024-07-28 14:09:32,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167250.66666666666, ans=0.1 +2024-07-28 14:09:35,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=167250.66666666666, ans=0.035 +2024-07-28 14:09:47,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff3.min_abs, batch_count=167277.33333333334, ans=0.2 +2024-07-28 14:09:49,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=167277.33333333334, ans=0.0 +2024-07-28 14:09:52,620 INFO [train.py:1114] (1/4) Epoch 13, batch 2800, loss[loss=0.2351, simple_loss=0.308, pruned_loss=0.08114, over 3261.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2775, pruned_loss=0.04988, over 938112.23 frames. ], batch size: 35, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:09:55,569 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.25 vs. limit=10.0 +2024-07-28 14:10:02,559 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.817e+01 5.664e+01 6.211e+01 7.205e+01 1.021e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 14:10:11,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=167317.33333333334, ans=0.0 +2024-07-28 14:10:16,618 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.85 vs. limit=10.0 +2024-07-28 14:10:18,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=167344.0, ans=0.0 +2024-07-28 14:10:20,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=167344.0, ans=0.0 +2024-07-28 14:10:24,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=167344.0, ans=0.125 +2024-07-28 14:10:26,045 INFO [train.py:1114] (1/4) Epoch 13, batch 2850, loss[loss=0.1832, simple_loss=0.2706, pruned_loss=0.04791, over 4963.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2772, pruned_loss=0.04968, over 936460.68 frames. ], batch size: 13, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:10:26,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=167357.33333333334, ans=0.0 +2024-07-28 14:10:30,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=167357.33333333334, ans=0.125 +2024-07-28 14:10:33,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=167370.66666666666, ans=0.025 +2024-07-28 14:10:50,591 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.88 vs. limit=15.0 +2024-07-28 14:10:52,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=167410.66666666666, ans=0.025 +2024-07-28 14:10:59,331 INFO [train.py:1114] (1/4) Epoch 13, batch 2900, loss[loss=0.1601, simple_loss=0.2552, pruned_loss=0.03248, over 4824.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2783, pruned_loss=0.04994, over 939987.66 frames. ], batch size: 13, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:10:59,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=167424.0, ans=0.2 +2024-07-28 14:11:01,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=167424.0, ans=0.125 +2024-07-28 14:11:02,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=167424.0, ans=0.04949747468305833 +2024-07-28 14:11:09,570 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.537e+01 5.851e+01 6.550e+01 7.504e+01 1.142e+02, threshold=1.310e+02, percent-clipped=0.0 +2024-07-28 14:11:18,510 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.17 vs. limit=6.0 +2024-07-28 14:11:23,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=167464.0, ans=0.0 +2024-07-28 14:11:28,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=167477.33333333334, ans=0.025 +2024-07-28 14:11:33,209 INFO [train.py:1114] (1/4) Epoch 13, batch 2950, loss[loss=0.189, simple_loss=0.2719, pruned_loss=0.05306, over 4704.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2765, pruned_loss=0.04942, over 938840.22 frames. ], batch size: 12, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:11:35,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=167490.66666666666, ans=0.025 +2024-07-28 14:11:38,262 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.74 vs. limit=15.0 +2024-07-28 14:11:48,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=167517.33333333334, ans=0.125 +2024-07-28 14:11:50,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=167517.33333333334, ans=0.1 +2024-07-28 14:11:51,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=167517.33333333334, ans=0.0 +2024-07-28 14:11:58,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167530.66666666666, ans=0.1 +2024-07-28 14:12:06,767 INFO [train.py:1114] (1/4) Epoch 13, batch 3000, loss[loss=0.1853, simple_loss=0.2903, pruned_loss=0.04013, over 4759.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2763, pruned_loss=0.04904, over 938406.21 frames. ], batch size: 13, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:12:06,767 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 14:12:10,918 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([0.7860, 3.4993, 2.1366, 3.7319, 3.2205, 3.4625, 4.0591, 3.7665], + device='cuda:1') +2024-07-28 14:12:14,778 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.3551, 4.9540, 4.3464, 3.9753], device='cuda:1') +2024-07-28 14:12:18,643 INFO [train.py:1146] (1/4) Epoch 13, validation: loss=0.1663, simple_loss=0.2701, pruned_loss=0.0312, over 944034.00 frames. +2024-07-28 14:12:18,644 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 14:12:21,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=167557.33333333334, ans=0.95 +2024-07-28 14:12:29,075 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.643e+01 5.635e+01 6.154e+01 7.337e+01 1.248e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 14:12:31,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=167584.0, ans=0.0 +2024-07-28 14:12:33,456 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.40 vs. limit=22.5 +2024-07-28 14:12:36,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=167584.0, ans=0.0 +2024-07-28 14:12:37,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=167584.0, ans=0.0 +2024-07-28 14:12:38,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=167584.0, ans=0.125 +2024-07-28 14:12:38,398 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.30 vs. limit=15.0 +2024-07-28 14:12:39,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=167597.33333333334, ans=0.5 +2024-07-28 14:12:41,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=167597.33333333334, ans=0.125 +2024-07-28 14:12:52,947 INFO [train.py:1114] (1/4) Epoch 13, batch 3050, loss[loss=0.1579, simple_loss=0.2501, pruned_loss=0.03281, over 4644.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2772, pruned_loss=0.04958, over 936989.92 frames. ], batch size: 12, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:13:10,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167650.66666666666, ans=0.1 +2024-07-28 14:13:18,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=167664.0, ans=0.025 +2024-07-28 14:13:25,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=167677.33333333334, ans=0.0 +2024-07-28 14:13:26,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=167677.33333333334, ans=0.125 +2024-07-28 14:13:27,843 INFO [train.py:1114] (1/4) Epoch 13, batch 3100, loss[loss=0.2089, simple_loss=0.3037, pruned_loss=0.05707, over 4649.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2768, pruned_loss=0.04957, over 937554.51 frames. ], batch size: 16, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:13:34,773 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.31 vs. limit=15.0 +2024-07-28 14:13:37,633 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.158e+01 5.544e+01 6.108e+01 7.072e+01 9.683e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 14:13:45,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=167717.33333333334, ans=0.5 +2024-07-28 14:14:01,403 INFO [train.py:1114] (1/4) Epoch 13, batch 3150, loss[loss=0.1981, simple_loss=0.2907, pruned_loss=0.05271, over 4603.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2761, pruned_loss=0.04896, over 937917.96 frames. ], batch size: 17, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:14:02,481 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.38 vs. limit=12.0 +2024-07-28 14:14:04,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=167757.33333333334, ans=0.125 +2024-07-28 14:14:08,644 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.52 vs. limit=10.0 +2024-07-28 14:14:22,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=167784.0, ans=0.025 +2024-07-28 14:14:37,496 INFO [train.py:1114] (1/4) Epoch 13, batch 3200, loss[loss=0.194, simple_loss=0.2844, pruned_loss=0.05181, over 4825.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.275, pruned_loss=0.04815, over 939510.74 frames. ], batch size: 13, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:14:41,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=167824.0, ans=0.125 +2024-07-28 14:14:47,194 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.951e+01 5.665e+01 6.377e+01 7.022e+01 9.065e+01, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 14:14:57,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=167837.33333333334, ans=0.125 +2024-07-28 14:15:01,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=167850.66666666666, ans=0.125 +2024-07-28 14:15:18,617 INFO [train.py:1114] (1/4) Epoch 13, batch 3250, loss[loss=0.1842, simple_loss=0.2855, pruned_loss=0.04141, over 4932.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2758, pruned_loss=0.04839, over 940461.77 frames. ], batch size: 14, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:15:23,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=167890.66666666666, ans=0.1 +2024-07-28 14:15:24,858 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:15:28,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=167904.0, ans=0.0 +2024-07-28 14:15:46,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=167944.0, ans=0.5 +2024-07-28 14:15:52,607 INFO [train.py:1114] (1/4) Epoch 13, batch 3300, loss[loss=0.2019, simple_loss=0.2865, pruned_loss=0.05864, over 4718.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2749, pruned_loss=0.0487, over 941055.96 frames. ], batch size: 19, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:16:02,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167970.66666666666, ans=0.1 +2024-07-28 14:16:02,796 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 5.327e+01 5.938e+01 6.571e+01 1.063e+02, threshold=1.188e+02, percent-clipped=0.0 +2024-07-28 14:16:02,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=167970.66666666666, ans=0.07 +2024-07-28 14:16:11,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=167997.33333333334, ans=0.0 +2024-07-28 14:16:26,156 INFO [train.py:1114] (1/4) Epoch 13, batch 3350, loss[loss=0.1827, simple_loss=0.2672, pruned_loss=0.04913, over 4632.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2747, pruned_loss=0.04857, over 938918.70 frames. ], batch size: 17, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:16:27,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=168024.0, ans=0.125 +2024-07-28 14:16:29,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168024.0, ans=0.1 +2024-07-28 14:16:30,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=168024.0, ans=0.0 +2024-07-28 14:16:44,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=168050.66666666666, ans=0.125 +2024-07-28 14:17:00,155 INFO [train.py:1114] (1/4) Epoch 13, batch 3400, loss[loss=0.1642, simple_loss=0.2469, pruned_loss=0.04079, over 4800.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.276, pruned_loss=0.0497, over 937664.14 frames. ], batch size: 11, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:17:10,163 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.911e+01 6.407e+01 7.548e+01 1.179e+02, threshold=1.281e+02, percent-clipped=0.0 +2024-07-28 14:17:13,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=168117.33333333334, ans=0.125 +2024-07-28 14:17:19,380 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.20 vs. limit=12.0 +2024-07-28 14:17:33,660 INFO [train.py:1114] (1/4) Epoch 13, batch 3450, loss[loss=0.1892, simple_loss=0.2849, pruned_loss=0.04678, over 4723.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2757, pruned_loss=0.04922, over 937778.23 frames. ], batch size: 19, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:17:37,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=168157.33333333334, ans=0.125 +2024-07-28 14:17:48,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=168184.0, ans=0.125 +2024-07-28 14:17:54,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=168184.0, ans=0.125 +2024-07-28 14:18:01,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=168210.66666666666, ans=0.125 +2024-07-28 14:18:03,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=168210.66666666666, ans=0.125 +2024-07-28 14:18:08,626 INFO [train.py:1114] (1/4) Epoch 13, batch 3500, loss[loss=0.1764, simple_loss=0.257, pruned_loss=0.04793, over 4948.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.276, pruned_loss=0.04961, over 938319.50 frames. ], batch size: 12, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:18:10,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=168224.0, ans=0.2 +2024-07-28 14:18:16,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=168237.33333333334, ans=0.0 +2024-07-28 14:18:18,524 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.446e+01 5.616e+01 6.376e+01 7.329e+01 9.586e+01, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 14:18:24,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=168250.66666666666, ans=0.125 +2024-07-28 14:18:43,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.23 vs. limit=15.0 +2024-07-28 14:18:44,200 INFO [train.py:1114] (1/4) Epoch 13, batch 3550, loss[loss=0.2047, simple_loss=0.2937, pruned_loss=0.05781, over 4657.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2757, pruned_loss=0.04915, over 938907.38 frames. ], batch size: 14, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:19:00,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=168317.33333333334, ans=0.04949747468305833 +2024-07-28 14:19:01,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=168317.33333333334, ans=0.0 +2024-07-28 14:19:11,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.95 vs. limit=22.5 +2024-07-28 14:19:17,382 INFO [train.py:1114] (1/4) Epoch 13, batch 3600, loss[loss=0.1686, simple_loss=0.264, pruned_loss=0.0366, over 4954.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2765, pruned_loss=0.04936, over 940456.20 frames. ], batch size: 13, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:19:27,241 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.558e+01 5.903e+01 6.553e+01 7.584e+01 1.363e+02, threshold=1.311e+02, percent-clipped=1.0 +2024-07-28 14:19:35,006 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.92 vs. limit=12.0 +2024-07-28 14:19:38,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=168397.33333333334, ans=0.125 +2024-07-28 14:19:42,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168397.33333333334, ans=0.1 +2024-07-28 14:19:44,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=168410.66666666666, ans=0.07 +2024-07-28 14:19:47,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=168410.66666666666, ans=0.0 +2024-07-28 14:19:50,591 INFO [train.py:1114] (1/4) Epoch 13, batch 3650, loss[loss=0.2001, simple_loss=0.2897, pruned_loss=0.05525, over 4906.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2758, pruned_loss=0.04901, over 940896.46 frames. ], batch size: 15, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:20:00,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=168437.33333333334, ans=0.125 +2024-07-28 14:20:05,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168450.66666666666, ans=0.1 +2024-07-28 14:20:12,680 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.07 vs. limit=15.0 +2024-07-28 14:20:14,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168464.0, ans=0.1 +2024-07-28 14:20:14,668 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.25 vs. limit=15.0 +2024-07-28 14:20:16,129 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-07-28 14:20:26,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=168477.33333333334, ans=0.125 +2024-07-28 14:20:27,678 INFO [train.py:1114] (1/4) Epoch 13, batch 3700, loss[loss=0.1872, simple_loss=0.2819, pruned_loss=0.04631, over 4932.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2748, pruned_loss=0.04858, over 942027.96 frames. ], batch size: 14, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:20:28,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=168490.66666666666, ans=0.07 +2024-07-28 14:20:30,000 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.52 vs. limit=22.5 +2024-07-28 14:20:32,665 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.62 vs. limit=15.0 +2024-07-28 14:20:33,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168490.66666666666, ans=0.1 +2024-07-28 14:20:35,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=168504.0, ans=0.125 +2024-07-28 14:20:36,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=168504.0, ans=0.125 +2024-07-28 14:20:37,608 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.274e+01 5.551e+01 6.034e+01 6.765e+01 1.404e+02, threshold=1.207e+02, percent-clipped=1.0 +2024-07-28 14:20:45,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=168517.33333333334, ans=0.125 +2024-07-28 14:20:49,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=168530.66666666666, ans=0.0 +2024-07-28 14:20:51,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=168530.66666666666, ans=0.125 +2024-07-28 14:20:58,156 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.43 vs. limit=10.0 +2024-07-28 14:20:59,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=168544.0, ans=0.125 +2024-07-28 14:21:01,039 INFO [train.py:1114] (1/4) Epoch 13, batch 3750, loss[loss=0.1689, simple_loss=0.2538, pruned_loss=0.04202, over 4801.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2751, pruned_loss=0.0488, over 943420.75 frames. ], batch size: 11, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:21:09,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=168570.66666666666, ans=0.025 +2024-07-28 14:21:15,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=168584.0, ans=0.0 +2024-07-28 14:21:17,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=168584.0, ans=0.035 +2024-07-28 14:21:22,093 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.11 vs. limit=15.0 +2024-07-28 14:21:28,775 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.37 vs. limit=12.0 +2024-07-28 14:21:34,567 INFO [train.py:1114] (1/4) Epoch 13, batch 3800, loss[loss=0.1843, simple_loss=0.2768, pruned_loss=0.04591, over 4802.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.275, pruned_loss=0.04894, over 941949.05 frames. ], batch size: 14, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:21:35,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168624.0, ans=0.1 +2024-07-28 14:21:37,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=168624.0, ans=0.125 +2024-07-28 14:21:37,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=168624.0, ans=0.05 +2024-07-28 14:21:37,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=168624.0, ans=0.125 +2024-07-28 14:21:38,101 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.19 vs. limit=15.0 +2024-07-28 14:21:40,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=168637.33333333334, ans=0.0 +2024-07-28 14:21:41,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=168637.33333333334, ans=0.025 +2024-07-28 14:21:41,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=168637.33333333334, ans=0.0 +2024-07-28 14:21:42,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=168637.33333333334, ans=0.2 +2024-07-28 14:21:42,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168637.33333333334, ans=0.1 +2024-07-28 14:21:44,597 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.768e+01 5.664e+01 6.425e+01 7.356e+01 1.029e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 14:21:48,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=168650.66666666666, ans=0.125 +2024-07-28 14:21:50,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168650.66666666666, ans=0.1 +2024-07-28 14:21:54,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=168664.0, ans=0.2 +2024-07-28 14:22:08,474 INFO [train.py:1114] (1/4) Epoch 13, batch 3850, loss[loss=0.1785, simple_loss=0.2713, pruned_loss=0.04282, over 4589.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2756, pruned_loss=0.04925, over 942437.67 frames. ], batch size: 16, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:22:14,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=168704.0, ans=0.0 +2024-07-28 14:22:17,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=168704.0, ans=0.025 +2024-07-28 14:22:26,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=168717.33333333334, ans=0.125 +2024-07-28 14:22:29,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168730.66666666666, ans=0.1 +2024-07-28 14:22:35,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=168744.0, ans=0.1 +2024-07-28 14:22:37,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=168744.0, ans=0.125 +2024-07-28 14:22:41,755 INFO [train.py:1114] (1/4) Epoch 13, batch 3900, loss[loss=0.1981, simple_loss=0.3001, pruned_loss=0.04804, over 4811.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2761, pruned_loss=0.04935, over 942632.17 frames. ], batch size: 14, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:22:43,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168757.33333333334, ans=0.1 +2024-07-28 14:22:44,002 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.99 vs. limit=6.0 +2024-07-28 14:22:51,514 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.515e+01 5.611e+01 6.115e+01 6.716e+01 9.720e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 14:22:52,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=168770.66666666666, ans=0.125 +2024-07-28 14:22:58,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=168784.0, ans=0.125 +2024-07-28 14:22:59,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=168784.0, ans=0.0 +2024-07-28 14:23:04,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=168797.33333333334, ans=0.0 +2024-07-28 14:23:12,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=168810.66666666666, ans=0.125 +2024-07-28 14:23:13,868 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.36 vs. limit=15.0 +2024-07-28 14:23:17,165 INFO [train.py:1114] (1/4) Epoch 13, batch 3950, loss[loss=0.1991, simple_loss=0.2982, pruned_loss=0.05, over 4845.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2758, pruned_loss=0.04914, over 944589.29 frames. ], batch size: 16, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:23:25,552 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.21 vs. limit=22.5 +2024-07-28 14:23:28,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=168837.33333333334, ans=0.5 +2024-07-28 14:23:29,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=168850.66666666666, ans=0.125 +2024-07-28 14:23:32,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=168850.66666666666, ans=0.125 +2024-07-28 14:23:37,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168864.0, ans=0.1 +2024-07-28 14:23:38,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=168864.0, ans=0.125 +2024-07-28 14:23:44,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168877.33333333334, ans=0.1 +2024-07-28 14:23:48,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=168877.33333333334, ans=0.025 +2024-07-28 14:23:50,572 INFO [train.py:1114] (1/4) Epoch 13, batch 4000, loss[loss=0.161, simple_loss=0.2576, pruned_loss=0.03221, over 4771.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2764, pruned_loss=0.04996, over 941047.45 frames. ], batch size: 12, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:23:53,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=168890.66666666666, ans=0.0 +2024-07-28 14:23:55,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=168890.66666666666, ans=0.125 +2024-07-28 14:23:55,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=168890.66666666666, ans=0.125 +2024-07-28 14:24:00,468 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.269e+01 5.777e+01 6.304e+01 7.103e+01 1.026e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 14:24:15,165 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.82 vs. limit=10.0 +2024-07-28 14:24:16,486 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.52 vs. limit=15.0 +2024-07-28 14:24:21,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168944.0, ans=0.1 +2024-07-28 14:24:25,720 INFO [train.py:1114] (1/4) Epoch 13, batch 4050, loss[loss=0.2614, simple_loss=0.3257, pruned_loss=0.09857, over 3584.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.276, pruned_loss=0.0496, over 939784.66 frames. ], batch size: 35, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:24:52,083 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.55 vs. limit=15.0 +2024-07-28 14:24:55,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=169010.66666666666, ans=0.015 +2024-07-28 14:24:58,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=169010.66666666666, ans=0.1 +2024-07-28 14:24:59,972 INFO [train.py:1114] (1/4) Epoch 13, batch 4100, loss[loss=0.2445, simple_loss=0.3239, pruned_loss=0.08256, over 4895.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2769, pruned_loss=0.05039, over 938745.54 frames. ], batch size: 15, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:25:03,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=169024.0, ans=0.0 +2024-07-28 14:25:06,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169037.33333333334, ans=0.1 +2024-07-28 14:25:09,985 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.483e+01 5.994e+01 6.398e+01 7.649e+01 1.244e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 14:25:35,749 INFO [train.py:1114] (1/4) Epoch 13, batch 4150, loss[loss=0.1718, simple_loss=0.2655, pruned_loss=0.0391, over 4823.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2763, pruned_loss=0.04964, over 938649.75 frames. ], batch size: 13, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:25:35,819 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:25:48,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=169104.0, ans=0.0 +2024-07-28 14:25:50,613 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.98 vs. limit=15.0 +2024-07-28 14:25:52,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=169117.33333333334, ans=0.125 +2024-07-28 14:26:06,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=169144.0, ans=0.025 +2024-07-28 14:26:07,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=169144.0, ans=0.025 +2024-07-28 14:26:08,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.65 vs. limit=15.0 +2024-07-28 14:26:10,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=169157.33333333334, ans=0.0 +2024-07-28 14:26:11,037 INFO [train.py:1114] (1/4) Epoch 13, batch 4200, loss[loss=0.2015, simple_loss=0.2953, pruned_loss=0.05382, over 4910.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2756, pruned_loss=0.0493, over 940103.39 frames. ], batch size: 15, lr: 5.78e-03, grad_scale: 32.0 +2024-07-28 14:26:20,875 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 5.568e+01 6.158e+01 7.068e+01 9.655e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 14:26:24,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=169184.0, ans=0.125 +2024-07-28 14:26:32,477 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=169197.33333333334, ans=0.125 +2024-07-28 14:26:43,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=169224.0, ans=0.2 +2024-07-28 14:26:44,494 INFO [train.py:1114] (1/4) Epoch 13, batch 4250, loss[loss=0.1729, simple_loss=0.2633, pruned_loss=0.04126, over 4634.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2755, pruned_loss=0.04961, over 941049.53 frames. ], batch size: 12, lr: 5.78e-03, grad_scale: 32.0 +2024-07-28 14:26:53,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=169237.33333333334, ans=0.125 +2024-07-28 14:26:56,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=169237.33333333334, ans=0.125 +2024-07-28 14:26:57,936 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.50 vs. limit=22.5 +2024-07-28 14:27:01,357 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.19 vs. limit=15.0 +2024-07-28 14:27:06,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=169264.0, ans=0.025 +2024-07-28 14:27:11,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.63 vs. limit=15.0 +2024-07-28 14:27:17,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=169290.66666666666, ans=0.125 +2024-07-28 14:27:17,704 INFO [train.py:1114] (1/4) Epoch 13, batch 4300, loss[loss=0.1663, simple_loss=0.2561, pruned_loss=0.03826, over 4757.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2748, pruned_loss=0.04947, over 940223.91 frames. ], batch size: 13, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:27:27,629 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.628e+01 5.556e+01 6.095e+01 6.767e+01 1.249e+02, threshold=1.219e+02, percent-clipped=1.0 +2024-07-28 14:27:27,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=169304.0, ans=0.125 +2024-07-28 14:27:28,008 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.43 vs. limit=15.0 +2024-07-28 14:27:35,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=169317.33333333334, ans=0.1 +2024-07-28 14:27:50,985 INFO [train.py:1114] (1/4) Epoch 13, batch 4350, loss[loss=0.1719, simple_loss=0.2575, pruned_loss=0.04316, over 4749.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2762, pruned_loss=0.04981, over 940953.16 frames. ], batch size: 13, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:27:51,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=169357.33333333334, ans=0.125 +2024-07-28 14:27:52,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=169357.33333333334, ans=0.125 +2024-07-28 14:27:57,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=169370.66666666666, ans=0.0 +2024-07-28 14:27:58,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=169370.66666666666, ans=0.0 +2024-07-28 14:28:04,046 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.46 vs. limit=15.0 +2024-07-28 14:28:06,582 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.06 vs. limit=22.5 +2024-07-28 14:28:24,264 INFO [train.py:1114] (1/4) Epoch 13, batch 4400, loss[loss=0.1983, simple_loss=0.296, pruned_loss=0.05033, over 4819.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2755, pruned_loss=0.04911, over 940920.18 frames. ], batch size: 14, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:28:36,398 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.899e+01 5.545e+01 6.054e+01 6.710e+01 1.195e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-28 14:28:40,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=169450.66666666666, ans=0.0 +2024-07-28 14:28:44,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=169450.66666666666, ans=0.125 +2024-07-28 14:28:44,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=169450.66666666666, ans=0.2 +2024-07-28 14:28:45,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169450.66666666666, ans=0.1 +2024-07-28 14:28:46,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=169464.0, ans=0.95 +2024-07-28 14:29:00,265 INFO [train.py:1114] (1/4) Epoch 13, batch 4450, loss[loss=0.1533, simple_loss=0.2501, pruned_loss=0.02823, over 4943.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2761, pruned_loss=0.04942, over 939235.28 frames. ], batch size: 12, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:29:05,356 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.05 vs. limit=10.0 +2024-07-28 14:29:07,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169504.0, ans=0.1 +2024-07-28 14:29:10,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=169504.0, ans=0.2 +2024-07-28 14:29:19,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=169517.33333333334, ans=0.125 +2024-07-28 14:29:33,093 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.96 vs. limit=15.0 +2024-07-28 14:29:38,960 INFO [train.py:1114] (1/4) Epoch 13, batch 4500, loss[loss=0.2067, simple_loss=0.2937, pruned_loss=0.05986, over 4751.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2765, pruned_loss=0.04969, over 938703.42 frames. ], batch size: 14, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:29:46,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=169570.66666666666, ans=0.0 +2024-07-28 14:29:48,688 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.567e+01 5.617e+01 6.099e+01 7.289e+01 9.992e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 14:29:51,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=169584.0, ans=0.0 +2024-07-28 14:29:53,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=169584.0, ans=0.125 +2024-07-28 14:29:54,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=169584.0, ans=0.2 +2024-07-28 14:31:42,971 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.34 vs. limit=22.5 +2024-07-28 14:31:47,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=169597.33333333334, ans=0.0 +2024-07-28 14:31:53,667 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.04 vs. limit=22.5 +2024-07-28 14:31:58,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=169610.66666666666, ans=0.0 +2024-07-28 14:31:59,574 INFO [train.py:1114] (1/4) Epoch 13, batch 4550, loss[loss=0.2037, simple_loss=0.2837, pruned_loss=0.06188, over 4893.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.276, pruned_loss=0.04959, over 940680.74 frames. ], batch size: 13, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:32:13,576 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.04 vs. limit=15.0 +2024-07-28 14:32:13,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=169650.66666666666, ans=0.1 +2024-07-28 14:32:17,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=169650.66666666666, ans=0.0 +2024-07-28 14:32:33,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=169690.66666666666, ans=0.125 +2024-07-28 14:32:34,423 INFO [train.py:1114] (1/4) Epoch 13, batch 4600, loss[loss=0.1909, simple_loss=0.2924, pruned_loss=0.04467, over 4537.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2753, pruned_loss=0.04911, over 938784.94 frames. ], batch size: 21, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:32:37,569 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.27 vs. limit=15.0 +2024-07-28 14:32:42,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=169704.0, ans=0.125 +2024-07-28 14:32:48,089 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.248e+01 5.789e+01 6.719e+01 7.977e+01 1.194e+02, threshold=1.344e+02, percent-clipped=0.0 +2024-07-28 14:32:51,550 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:33:04,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=169730.66666666666, ans=0.025 +2024-07-28 14:33:12,260 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:33:12,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=169757.33333333334, ans=0.5 +2024-07-28 14:33:12,837 INFO [train.py:1114] (1/4) Epoch 13, batch 4650, loss[loss=0.1965, simple_loss=0.2861, pruned_loss=0.05345, over 4827.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2766, pruned_loss=0.04937, over 940369.19 frames. ], batch size: 16, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:33:13,897 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.13 vs. limit=12.0 +2024-07-28 14:33:14,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=169757.33333333334, ans=0.125 +2024-07-28 14:33:14,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=169757.33333333334, ans=0.125 +2024-07-28 14:33:16,578 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.96 vs. limit=15.0 +2024-07-28 14:33:18,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=169757.33333333334, ans=0.1 +2024-07-28 14:33:21,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=169770.66666666666, ans=0.0 +2024-07-28 14:33:25,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=169784.0, ans=0.2 +2024-07-28 14:33:29,485 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.64 vs. limit=15.0 +2024-07-28 14:33:31,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=169784.0, ans=10.0 +2024-07-28 14:33:34,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=169797.33333333334, ans=0.05 +2024-07-28 14:33:45,123 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.71 vs. limit=15.0 +2024-07-28 14:33:46,616 INFO [train.py:1114] (1/4) Epoch 13, batch 4700, loss[loss=0.1364, simple_loss=0.2119, pruned_loss=0.03042, over 4712.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2763, pruned_loss=0.04939, over 937933.09 frames. ], batch size: 11, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:33:51,512 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-28 14:33:56,552 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.492e+01 5.422e+01 6.008e+01 7.035e+01 1.017e+02, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 14:34:02,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=169850.66666666666, ans=0.1 +2024-07-28 14:34:13,875 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.87 vs. limit=15.0 +2024-07-28 14:34:17,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=169877.33333333334, ans=0.125 +2024-07-28 14:34:20,163 INFO [train.py:1114] (1/4) Epoch 13, batch 4750, loss[loss=0.1954, simple_loss=0.2823, pruned_loss=0.05428, over 4455.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2774, pruned_loss=0.05014, over 936009.16 frames. ], batch size: 21, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:34:20,495 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-28 14:34:22,090 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.48 vs. limit=15.0 +2024-07-28 14:34:25,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=169890.66666666666, ans=0.0 +2024-07-28 14:34:53,980 INFO [train.py:1114] (1/4) Epoch 13, batch 4800, loss[loss=0.1705, simple_loss=0.258, pruned_loss=0.04148, over 4690.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2766, pruned_loss=0.04995, over 933225.31 frames. ], batch size: 13, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:35:00,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=169970.66666666666, ans=0.125 +2024-07-28 14:35:03,983 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.473e+01 5.668e+01 6.259e+01 7.420e+01 1.160e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 14:35:04,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=169970.66666666666, ans=0.025 +2024-07-28 14:35:09,668 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:35:13,323 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:35:31,453 INFO [train.py:1114] (1/4) Epoch 13, batch 4850, loss[loss=0.2163, simple_loss=0.3053, pruned_loss=0.06363, over 4732.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.277, pruned_loss=0.0502, over 932931.77 frames. ], batch size: 14, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:35:33,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=170024.0, ans=0.125 +2024-07-28 14:35:40,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=170024.0, ans=0.0 +2024-07-28 14:35:41,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=170037.33333333334, ans=0.125 +2024-07-28 14:35:42,102 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.09 vs. limit=15.0 +2024-07-28 14:35:54,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=170050.66666666666, ans=0.125 +2024-07-28 14:36:01,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=170064.0, ans=0.125 +2024-07-28 14:36:05,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170077.33333333334, ans=0.1 +2024-07-28 14:36:09,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=170077.33333333334, ans=0.0 +2024-07-28 14:36:14,100 INFO [train.py:1114] (1/4) Epoch 13, batch 4900, loss[loss=0.1754, simple_loss=0.2557, pruned_loss=0.04753, over 4768.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2763, pruned_loss=0.04987, over 934503.08 frames. ], batch size: 13, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:36:34,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=170104.0, ans=0.125 +2024-07-28 14:36:37,689 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.642e+01 5.628e+01 6.419e+01 7.139e+01 1.048e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 14:36:37,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=170104.0, ans=0.04949747468305833 +2024-07-28 14:36:45,666 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:36:53,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=170130.66666666666, ans=0.125 +2024-07-28 14:36:58,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=170144.0, ans=0.125 +2024-07-28 14:37:04,268 INFO [train.py:1114] (1/4) Epoch 13, batch 4950, loss[loss=0.2829, simple_loss=0.3402, pruned_loss=0.1128, over 3604.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.277, pruned_loss=0.05067, over 931525.26 frames. ], batch size: 36, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:37:13,125 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.92 vs. limit=5.0 +2024-07-28 14:37:15,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=170170.66666666666, ans=0.0 +2024-07-28 14:37:18,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=170184.0, ans=0.2 +2024-07-28 14:37:40,472 INFO [train.py:1114] (1/4) Epoch 13, batch 5000, loss[loss=0.2228, simple_loss=0.3146, pruned_loss=0.06546, over 4673.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2772, pruned_loss=0.05079, over 935452.52 frames. ], batch size: 14, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:37:45,518 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.48 vs. limit=15.0 +2024-07-28 14:37:46,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=170237.33333333334, ans=0.0 +2024-07-28 14:37:52,211 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.619e+01 5.707e+01 6.178e+01 6.994e+01 1.058e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 14:38:14,643 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.95 vs. limit=6.0 +2024-07-28 14:38:15,700 INFO [train.py:1114] (1/4) Epoch 13, batch 5050, loss[loss=0.1626, simple_loss=0.2453, pruned_loss=0.03993, over 4866.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2761, pruned_loss=0.0503, over 937952.08 frames. ], batch size: 12, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:38:18,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=170290.66666666666, ans=0.2 +2024-07-28 14:38:27,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=170304.0, ans=0.0 +2024-07-28 14:38:36,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=170330.66666666666, ans=0.0 +2024-07-28 14:38:41,025 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.69 vs. limit=15.0 +2024-07-28 14:38:41,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=170330.66666666666, ans=0.0 +2024-07-28 14:38:41,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=170330.66666666666, ans=0.2 +2024-07-28 14:38:42,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=170330.66666666666, ans=0.125 +2024-07-28 14:38:43,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=170330.66666666666, ans=0.125 +2024-07-28 14:38:51,500 INFO [train.py:1114] (1/4) Epoch 13, batch 5100, loss[loss=0.15, simple_loss=0.2434, pruned_loss=0.02826, over 4780.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2773, pruned_loss=0.05059, over 935561.13 frames. ], batch size: 12, lr: 5.76e-03, grad_scale: 64.0 +2024-07-28 14:38:57,358 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.57 vs. limit=15.0 +2024-07-28 14:39:04,402 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.671e+01 6.468e+01 7.600e+01 1.076e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 14:39:05,489 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.26 vs. limit=15.0 +2024-07-28 14:39:05,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=170370.66666666666, ans=0.07 +2024-07-28 14:39:16,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=170397.33333333334, ans=0.125 +2024-07-28 14:39:17,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=170397.33333333334, ans=0.0 +2024-07-28 14:39:27,930 INFO [train.py:1114] (1/4) Epoch 13, batch 5150, loss[loss=0.1838, simple_loss=0.2688, pruned_loss=0.04944, over 4836.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2775, pruned_loss=0.0505, over 936177.02 frames. ], batch size: 16, lr: 5.76e-03, grad_scale: 64.0 +2024-07-28 14:39:28,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=170424.0, ans=0.125 +2024-07-28 14:39:33,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=170424.0, ans=0.125 +2024-07-28 14:39:39,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=170437.33333333334, ans=0.2 +2024-07-28 14:39:40,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=170450.66666666666, ans=0.125 +2024-07-28 14:40:01,713 INFO [train.py:1114] (1/4) Epoch 13, batch 5200, loss[loss=0.1843, simple_loss=0.2691, pruned_loss=0.04972, over 4660.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2768, pruned_loss=0.05004, over 936003.55 frames. ], batch size: 14, lr: 5.76e-03, grad_scale: 64.0 +2024-07-28 14:40:06,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=170490.66666666666, ans=0.125 +2024-07-28 14:40:07,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=170504.0, ans=0.2 +2024-07-28 14:40:11,855 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.435e+01 5.593e+01 6.249e+01 7.313e+01 1.397e+02, threshold=1.250e+02, percent-clipped=1.0 +2024-07-28 14:40:21,705 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.23 vs. limit=15.0 +2024-07-28 14:40:21,918 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.50 vs. limit=15.0 +2024-07-28 14:40:35,396 INFO [train.py:1114] (1/4) Epoch 13, batch 5250, loss[loss=0.1794, simple_loss=0.2671, pruned_loss=0.04585, over 4895.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2751, pruned_loss=0.04921, over 935571.84 frames. ], batch size: 13, lr: 5.76e-03, grad_scale: 64.0 +2024-07-28 14:40:41,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170557.33333333334, ans=0.1 +2024-07-28 14:40:41,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=170570.66666666666, ans=0.125 +2024-07-28 14:40:42,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=170570.66666666666, ans=0.0 +2024-07-28 14:40:50,048 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:41:01,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=170597.33333333334, ans=0.1 +2024-07-28 14:41:02,717 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.28 vs. limit=15.0 +2024-07-28 14:41:06,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=170610.66666666666, ans=0.125 +2024-07-28 14:41:08,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=170610.66666666666, ans=0.025 +2024-07-28 14:41:09,237 INFO [train.py:1114] (1/4) Epoch 13, batch 5300, loss[loss=0.1982, simple_loss=0.2749, pruned_loss=0.06072, over 4637.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2758, pruned_loss=0.04968, over 934283.35 frames. ], batch size: 16, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:41:10,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=170624.0, ans=0.0 +2024-07-28 14:41:19,623 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.242e+01 5.756e+01 6.384e+01 7.054e+01 9.587e+01, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 14:41:47,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=170690.66666666666, ans=0.125 +2024-07-28 14:41:47,976 INFO [train.py:1114] (1/4) Epoch 13, batch 5350, loss[loss=0.1712, simple_loss=0.249, pruned_loss=0.04674, over 4546.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2772, pruned_loss=0.05006, over 936171.46 frames. ], batch size: 10, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:41:48,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=170690.66666666666, ans=0.025 +2024-07-28 14:41:51,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=170690.66666666666, ans=0.125 +2024-07-28 14:42:05,109 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=8.30 vs. limit=15.0 +2024-07-28 14:42:08,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170717.33333333334, ans=0.1 +2024-07-28 14:42:09,106 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.04 vs. limit=22.5 +2024-07-28 14:42:10,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=170730.66666666666, ans=0.2 +2024-07-28 14:42:15,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=170730.66666666666, ans=0.1 +2024-07-28 14:42:20,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170744.0, ans=0.1 +2024-07-28 14:42:34,763 INFO [train.py:1114] (1/4) Epoch 13, batch 5400, loss[loss=0.2172, simple_loss=0.3065, pruned_loss=0.06399, over 4117.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2781, pruned_loss=0.0508, over 930896.90 frames. ], batch size: 25, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:42:38,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=170757.33333333334, ans=0.125 +2024-07-28 14:42:39,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=170757.33333333334, ans=0.2 +2024-07-28 14:42:46,092 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.45 vs. limit=15.0 +2024-07-28 14:42:47,125 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.370e+01 5.692e+01 6.413e+01 7.093e+01 1.081e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 14:42:49,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170784.0, ans=0.1 +2024-07-28 14:42:54,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=170784.0, ans=0.125 +2024-07-28 14:43:07,983 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.87 vs. limit=15.0 +2024-07-28 14:43:09,613 INFO [train.py:1114] (1/4) Epoch 13, batch 5450, loss[loss=0.1581, simple_loss=0.2372, pruned_loss=0.03951, over 4683.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2759, pruned_loss=0.04987, over 933605.55 frames. ], batch size: 11, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:43:19,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=170837.33333333334, ans=0.0 +2024-07-28 14:43:22,998 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:43:44,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=170877.33333333334, ans=0.125 +2024-07-28 14:43:46,361 INFO [train.py:1114] (1/4) Epoch 13, batch 5500, loss[loss=0.1734, simple_loss=0.2632, pruned_loss=0.04181, over 4182.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2757, pruned_loss=0.05026, over 931173.91 frames. ], batch size: 25, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:43:52,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=170890.66666666666, ans=0.0 +2024-07-28 14:43:53,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170904.0, ans=0.1 +2024-07-28 14:43:55,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=170904.0, ans=0.2 +2024-07-28 14:43:57,654 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.576e+01 6.394e+01 7.172e+01 9.673e+01, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 14:44:00,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=170917.33333333334, ans=0.0 +2024-07-28 14:44:32,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.59 vs. limit=15.0 +2024-07-28 14:44:42,497 INFO [train.py:1114] (1/4) Epoch 13, batch 5550, loss[loss=0.1819, simple_loss=0.277, pruned_loss=0.04337, over 4707.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2747, pruned_loss=0.04956, over 933663.62 frames. ], batch size: 12, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:44:55,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=170957.33333333334, ans=0.05 +2024-07-28 14:45:05,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170984.0, ans=0.1 +2024-07-28 14:45:11,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.50 vs. limit=12.0 +2024-07-28 14:45:25,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=170997.33333333334, ans=0.125 +2024-07-28 14:45:32,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171010.66666666666, ans=0.1 +2024-07-28 14:45:34,088 INFO [train.py:1114] (1/4) Epoch 13, batch 5600, loss[loss=0.1707, simple_loss=0.2638, pruned_loss=0.03879, over 4743.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2755, pruned_loss=0.04978, over 934770.32 frames. ], batch size: 14, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:45:34,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=171024.0, ans=0.5 +2024-07-28 14:45:35,008 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.14 vs. limit=15.0 +2024-07-28 14:45:44,817 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.519e+01 5.953e+01 6.683e+01 8.989e+01, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 14:45:46,543 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.62 vs. limit=15.0 +2024-07-28 14:46:00,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=171077.33333333334, ans=0.125 +2024-07-28 14:46:01,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=171077.33333333334, ans=0.0 +2024-07-28 14:46:07,380 INFO [train.py:1114] (1/4) Epoch 13, batch 5650, loss[loss=0.1886, simple_loss=0.2771, pruned_loss=0.05003, over 4655.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2748, pruned_loss=0.04917, over 937130.91 frames. ], batch size: 21, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:46:09,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=171090.66666666666, ans=0.125 +2024-07-28 14:46:18,775 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.81 vs. limit=15.0 +2024-07-28 14:46:37,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171144.0, ans=0.1 +2024-07-28 14:46:40,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=171144.0, ans=0.125 +2024-07-28 14:46:42,391 INFO [train.py:1114] (1/4) Epoch 13, batch 5700, loss[loss=0.1593, simple_loss=0.2586, pruned_loss=0.03005, over 4697.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2753, pruned_loss=0.0496, over 938098.31 frames. ], batch size: 13, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:46:53,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=171170.66666666666, ans=0.0 +2024-07-28 14:46:58,589 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.488e+01 5.340e+01 5.994e+01 6.863e+01 1.115e+02, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 14:47:05,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=171184.0, ans=0.125 +2024-07-28 14:47:08,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=171197.33333333334, ans=0.125 +2024-07-28 14:47:13,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=171197.33333333334, ans=0.125 +2024-07-28 14:47:23,375 INFO [train.py:1114] (1/4) Epoch 13, batch 5750, loss[loss=0.2102, simple_loss=0.3148, pruned_loss=0.05276, over 4631.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2759, pruned_loss=0.04961, over 938054.64 frames. ], batch size: 19, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:47:31,147 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.63 vs. limit=22.5 +2024-07-28 14:47:43,292 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.67 vs. limit=22.5 +2024-07-28 14:47:45,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=171264.0, ans=0.1 +2024-07-28 14:47:45,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=171264.0, ans=0.125 +2024-07-28 14:47:49,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=171264.0, ans=0.2 +2024-07-28 14:47:56,851 INFO [train.py:1114] (1/4) Epoch 13, batch 5800, loss[loss=0.1874, simple_loss=0.2838, pruned_loss=0.04549, over 4682.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2752, pruned_loss=0.04929, over 937179.84 frames. ], batch size: 19, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:48:01,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=171290.66666666666, ans=0.0 +2024-07-28 14:48:06,270 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=20.67 vs. limit=22.5 +2024-07-28 14:48:07,660 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.533e+01 5.852e+01 6.546e+01 7.322e+01 1.389e+02, threshold=1.309e+02, percent-clipped=1.0 +2024-07-28 14:48:12,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171304.0, ans=0.1 +2024-07-28 14:48:14,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171317.33333333334, ans=0.1 +2024-07-28 14:48:16,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=171317.33333333334, ans=0.0 +2024-07-28 14:48:18,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=171317.33333333334, ans=0.025 +2024-07-28 14:48:22,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=171330.66666666666, ans=0.125 +2024-07-28 14:48:23,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=171330.66666666666, ans=0.1 +2024-07-28 14:48:35,956 INFO [train.py:1114] (1/4) Epoch 13, batch 5850, loss[loss=0.2183, simple_loss=0.3069, pruned_loss=0.06491, over 4510.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2754, pruned_loss=0.04941, over 937700.99 frames. ], batch size: 21, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:48:49,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=171370.66666666666, ans=0.125 +2024-07-28 14:48:52,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=171384.0, ans=0.04949747468305833 +2024-07-28 14:48:54,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=171384.0, ans=0.2 +2024-07-28 14:48:55,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=171384.0, ans=0.0 +2024-07-28 14:48:56,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=171384.0, ans=0.0 +2024-07-28 14:49:00,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=171397.33333333334, ans=0.0 +2024-07-28 14:49:02,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=171397.33333333334, ans=0.125 +2024-07-28 14:49:08,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=171410.66666666666, ans=0.125 +2024-07-28 14:49:13,156 INFO [train.py:1114] (1/4) Epoch 13, batch 5900, loss[loss=0.2231, simple_loss=0.3297, pruned_loss=0.05827, over 4701.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2753, pruned_loss=0.04912, over 938160.89 frames. ], batch size: 15, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:49:14,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171424.0, ans=0.1 +2024-07-28 14:49:16,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=171424.0, ans=0.125 +2024-07-28 14:49:21,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=171437.33333333334, ans=0.125 +2024-07-28 14:49:46,003 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 5.643e+01 6.441e+01 7.134e+01 1.016e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 14:49:54,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=171450.66666666666, ans=0.0 +2024-07-28 14:50:06,670 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.95 vs. limit=15.0 +2024-07-28 14:50:09,014 INFO [train.py:1114] (1/4) Epoch 13, batch 5950, loss[loss=0.2248, simple_loss=0.2966, pruned_loss=0.07652, over 4679.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2758, pruned_loss=0.04959, over 939895.23 frames. ], batch size: 15, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:50:14,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=171490.66666666666, ans=0.2 +2024-07-28 14:50:19,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=171504.0, ans=0.125 +2024-07-28 14:50:33,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=171530.66666666666, ans=0.125 +2024-07-28 14:50:37,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=171530.66666666666, ans=0.0 +2024-07-28 14:50:38,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171530.66666666666, ans=0.1 +2024-07-28 14:50:46,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=171557.33333333334, ans=0.125 +2024-07-28 14:50:47,139 INFO [train.py:1114] (1/4) Epoch 13, batch 6000, loss[loss=0.1885, simple_loss=0.2867, pruned_loss=0.0452, over 4269.00 frames. ], tot_loss[loss=0.187, simple_loss=0.275, pruned_loss=0.04945, over 936693.07 frames. ], batch size: 26, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:50:47,139 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 14:51:05,731 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([1.7108, 2.0947, 2.1058, 1.9988, 2.3867, 2.3980, 2.3167, 2.1556], + device='cuda:1') +2024-07-28 14:51:10,555 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([6.0372, 5.6249, 5.2263, 5.9844], device='cuda:1') +2024-07-28 14:51:12,226 INFO [train.py:1146] (1/4) Epoch 13, validation: loss=0.1644, simple_loss=0.2689, pruned_loss=0.02993, over 944034.00 frames. +2024-07-28 14:51:12,227 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 14:51:15,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=171557.33333333334, ans=0.125 +2024-07-28 14:51:23,777 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.03 vs. limit=15.0 +2024-07-28 14:51:25,689 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.755e+01 5.656e+01 6.363e+01 7.172e+01 1.139e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 14:51:41,698 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:51:57,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=171610.66666666666, ans=0.1 +2024-07-28 14:52:00,226 INFO [train.py:1114] (1/4) Epoch 13, batch 6050, loss[loss=0.1653, simple_loss=0.2498, pruned_loss=0.04043, over 4773.00 frames. ], tot_loss[loss=0.187, simple_loss=0.275, pruned_loss=0.04952, over 937757.70 frames. ], batch size: 12, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:52:03,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=171624.0, ans=0.125 +2024-07-28 14:52:19,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=171650.66666666666, ans=0.0 +2024-07-28 14:52:36,817 INFO [train.py:1114] (1/4) Epoch 13, batch 6100, loss[loss=0.2324, simple_loss=0.3129, pruned_loss=0.07594, over 4661.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2745, pruned_loss=0.04894, over 937419.01 frames. ], batch size: 15, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:52:40,455 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.34 vs. limit=10.0 +2024-07-28 14:52:41,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=171690.66666666666, ans=0.125 +2024-07-28 14:52:51,824 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.506e+01 6.070e+01 6.932e+01 1.254e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 14:53:20,068 INFO [train.py:1114] (1/4) Epoch 13, batch 6150, loss[loss=0.2653, simple_loss=0.3323, pruned_loss=0.09913, over 3506.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2747, pruned_loss=0.04901, over 936485.09 frames. ], batch size: 35, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:53:20,447 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.63 vs. limit=22.5 +2024-07-28 14:53:34,754 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.62 vs. limit=22.5 +2024-07-28 14:53:35,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=171784.0, ans=0.0 +2024-07-28 14:53:51,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=171810.66666666666, ans=0.125 +2024-07-28 14:53:53,562 INFO [train.py:1114] (1/4) Epoch 13, batch 6200, loss[loss=0.18, simple_loss=0.2761, pruned_loss=0.04198, over 4747.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2743, pruned_loss=0.04893, over 936388.90 frames. ], batch size: 14, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:53:54,732 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.47 vs. limit=15.0 +2024-07-28 14:54:00,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=171824.0, ans=0.125 +2024-07-28 14:54:07,617 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.605e+01 5.672e+01 6.206e+01 7.275e+01 9.803e+01, threshold=1.241e+02, percent-clipped=1.0 +2024-07-28 14:54:17,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=171850.66666666666, ans=0.0 +2024-07-28 14:54:21,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=171864.0, ans=0.0 +2024-07-28 14:54:27,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=171877.33333333334, ans=0.125 +2024-07-28 14:54:35,959 INFO [train.py:1114] (1/4) Epoch 13, batch 6250, loss[loss=0.2033, simple_loss=0.2913, pruned_loss=0.05763, over 4808.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2746, pruned_loss=0.04988, over 933000.93 frames. ], batch size: 14, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:54:37,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=171890.66666666666, ans=0.1 +2024-07-28 14:54:42,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171904.0, ans=0.1 +2024-07-28 14:54:42,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=171904.0, ans=0.125 +2024-07-28 14:54:57,305 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=171930.66666666666, ans=0.0 +2024-07-28 14:54:57,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=171930.66666666666, ans=0.05 +2024-07-28 14:55:09,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=171957.33333333334, ans=0.2 +2024-07-28 14:55:10,113 INFO [train.py:1114] (1/4) Epoch 13, batch 6300, loss[loss=0.168, simple_loss=0.2472, pruned_loss=0.04439, over 4559.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2751, pruned_loss=0.05001, over 929740.97 frames. ], batch size: 10, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:55:25,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=171970.66666666666, ans=0.0 +2024-07-28 14:55:26,613 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.561e+01 5.859e+01 6.673e+01 7.738e+01 1.141e+02, threshold=1.335e+02, percent-clipped=0.0 +2024-07-28 14:55:31,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=171984.0, ans=0.125 +2024-07-28 14:55:49,026 INFO [train.py:1114] (1/4) Epoch 13, batch 6350, loss[loss=0.2183, simple_loss=0.3058, pruned_loss=0.06546, over 4485.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2748, pruned_loss=0.04963, over 933809.61 frames. ], batch size: 21, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:55:57,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=172037.33333333334, ans=0.125 +2024-07-28 14:56:10,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=172064.0, ans=0.125 +2024-07-28 14:56:13,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=172064.0, ans=0.125 +2024-07-28 14:56:14,780 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.27 vs. limit=15.0 +2024-07-28 14:56:15,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=172064.0, ans=0.2 +2024-07-28 14:56:21,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=172077.33333333334, ans=0.125 +2024-07-28 14:56:23,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.86 vs. limit=22.5 +2024-07-28 14:56:26,785 INFO [train.py:1114] (1/4) Epoch 13, batch 6400, loss[loss=0.1725, simple_loss=0.2752, pruned_loss=0.03487, over 4641.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2738, pruned_loss=0.04923, over 935727.72 frames. ], batch size: 13, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:56:31,415 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:56:31,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=172090.66666666666, ans=0.125 +2024-07-28 14:56:34,354 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.09 vs. limit=6.0 +2024-07-28 14:56:37,084 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.564e+01 5.588e+01 6.261e+01 7.317e+01 1.038e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 14:56:37,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=172104.0, ans=0.0 +2024-07-28 14:56:43,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=172117.33333333334, ans=0.125 +2024-07-28 14:56:57,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172144.0, ans=0.1 +2024-07-28 14:57:00,241 INFO [train.py:1114] (1/4) Epoch 13, batch 6450, loss[loss=0.1983, simple_loss=0.2839, pruned_loss=0.0564, over 4445.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2751, pruned_loss=0.04935, over 938995.87 frames. ], batch size: 21, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:57:08,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=172170.66666666666, ans=0.125 +2024-07-28 14:57:08,571 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:57:09,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=172170.66666666666, ans=0.09899494936611666 +2024-07-28 14:57:17,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=172184.0, ans=0.125 +2024-07-28 14:57:26,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=172197.33333333334, ans=0.125 +2024-07-28 14:57:34,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172210.66666666666, ans=0.1 +2024-07-28 14:57:39,250 INFO [train.py:1114] (1/4) Epoch 13, batch 6500, loss[loss=0.2066, simple_loss=0.2971, pruned_loss=0.05802, over 3560.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2743, pruned_loss=0.04849, over 940294.37 frames. ], batch size: 35, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:57:49,792 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.639e+01 5.677e+01 6.560e+01 8.086e+01 1.120e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-28 14:57:51,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172250.66666666666, ans=0.1 +2024-07-28 14:57:58,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=172264.0, ans=0.125 +2024-07-28 14:58:04,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=172264.0, ans=0.125 +2024-07-28 14:58:07,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=172277.33333333334, ans=0.125 +2024-07-28 14:58:14,166 INFO [train.py:1114] (1/4) Epoch 13, batch 6550, loss[loss=0.1513, simple_loss=0.2295, pruned_loss=0.0366, over 4814.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2743, pruned_loss=0.04842, over 943242.12 frames. ], batch size: 11, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:58:17,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=172290.66666666666, ans=0.05 +2024-07-28 14:58:18,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172290.66666666666, ans=0.1 +2024-07-28 14:58:20,540 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.75 vs. limit=22.5 +2024-07-28 14:58:22,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=172304.0, ans=0.125 +2024-07-28 14:58:23,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=172304.0, ans=0.125 +2024-07-28 14:58:27,458 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.30 vs. limit=15.0 +2024-07-28 14:58:34,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=172330.66666666666, ans=0.025 +2024-07-28 14:58:47,966 INFO [train.py:1114] (1/4) Epoch 13, batch 6600, loss[loss=0.1975, simple_loss=0.2976, pruned_loss=0.04871, over 4932.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2742, pruned_loss=0.04801, over 945100.37 frames. ], batch size: 14, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:58:54,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=172370.66666666666, ans=0.025 +2024-07-28 14:58:54,489 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=15.0 +2024-07-28 14:58:58,751 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.632e+01 5.699e+01 6.105e+01 6.926e+01 1.138e+02, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 14:59:02,556 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=19.07 vs. limit=22.5 +2024-07-28 14:59:16,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.81 vs. limit=10.0 +2024-07-28 14:59:20,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=172410.66666666666, ans=0.125 +2024-07-28 14:59:22,866 INFO [train.py:1114] (1/4) Epoch 13, batch 6650, loss[loss=0.2446, simple_loss=0.34, pruned_loss=0.07459, over 4591.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2748, pruned_loss=0.04844, over 943416.12 frames. ], batch size: 17, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:59:27,192 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.96 vs. limit=15.0 +2024-07-28 14:59:28,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=172424.0, ans=0.025 +2024-07-28 14:59:31,291 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.04 vs. limit=22.5 +2024-07-28 14:59:35,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=172450.66666666666, ans=0.125 +2024-07-28 14:59:36,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=172450.66666666666, ans=0.0 +2024-07-28 14:59:40,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=172450.66666666666, ans=0.2 +2024-07-28 14:59:40,925 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.67 vs. limit=6.0 +2024-07-28 14:59:44,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=172464.0, ans=0.1 +2024-07-28 14:59:44,804 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.89 vs. limit=22.5 +2024-07-28 14:59:49,393 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.17 vs. limit=12.0 +2024-07-28 14:59:51,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=172477.33333333334, ans=0.0 +2024-07-28 14:59:56,802 INFO [train.py:1114] (1/4) Epoch 13, batch 6700, loss[loss=0.2047, simple_loss=0.2852, pruned_loss=0.06212, over 4685.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2758, pruned_loss=0.04931, over 941921.37 frames. ], batch size: 19, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:59:58,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172490.66666666666, ans=0.1 +2024-07-28 15:00:00,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=172490.66666666666, ans=0.125 +2024-07-28 15:00:07,462 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.867e+01 5.630e+01 6.292e+01 7.000e+01 1.303e+02, threshold=1.258e+02, percent-clipped=1.0 +2024-07-28 15:00:18,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=172530.66666666666, ans=0.125 +2024-07-28 15:00:22,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=172530.66666666666, ans=0.125 +2024-07-28 15:00:30,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=172544.0, ans=0.0 +2024-07-28 15:00:32,524 INFO [train.py:1114] (1/4) Epoch 13, batch 6750, loss[loss=0.1862, simple_loss=0.2749, pruned_loss=0.04872, over 4228.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2758, pruned_loss=0.04955, over 940012.05 frames. ], batch size: 25, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 15:00:51,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172584.0, ans=0.1 +2024-07-28 15:00:54,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=172597.33333333334, ans=0.125 +2024-07-28 15:01:06,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=172610.66666666666, ans=0.0 +2024-07-28 15:01:08,804 INFO [train.py:1114] (1/4) Epoch 13, batch 6800, loss[loss=0.2269, simple_loss=0.3237, pruned_loss=0.06503, over 4644.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2765, pruned_loss=0.04997, over 938729.07 frames. ], batch size: 13, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 15:01:09,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=172624.0, ans=0.2 +2024-07-28 15:01:09,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=172624.0, ans=0.125 +2024-07-28 15:01:12,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=172624.0, ans=0.0 +2024-07-28 15:01:16,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=172637.33333333334, ans=0.0 +2024-07-28 15:01:19,460 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.751e+01 5.597e+01 6.324e+01 7.266e+01 1.591e+02, threshold=1.265e+02, percent-clipped=1.0 +2024-07-28 15:01:20,458 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.73 vs. limit=15.0 +2024-07-28 15:01:22,589 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=20.52 vs. limit=22.5 +2024-07-28 15:01:36,238 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.84 vs. limit=10.0 +2024-07-28 15:01:37,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=172677.33333333334, ans=0.0 +2024-07-28 15:01:37,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=172677.33333333334, ans=0.2 +2024-07-28 15:01:38,732 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.11 vs. limit=15.0 +2024-07-28 15:01:41,582 INFO [train.py:1114] (1/4) Epoch 13, batch 6850, loss[loss=0.1543, simple_loss=0.2472, pruned_loss=0.0307, over 4697.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2754, pruned_loss=0.04944, over 940221.97 frames. ], batch size: 13, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 15:01:49,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=172704.0, ans=0.125 +2024-07-28 15:02:14,603 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.76 vs. limit=15.0 +2024-07-28 15:02:14,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=172730.66666666666, ans=0.0 +2024-07-28 15:02:23,626 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.52 vs. limit=15.0 +2024-07-28 15:02:26,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=172757.33333333334, ans=0.0 +2024-07-28 15:02:27,237 INFO [train.py:1114] (1/4) Epoch 13, batch 6900, loss[loss=0.179, simple_loss=0.2821, pruned_loss=0.03795, over 4968.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2761, pruned_loss=0.04947, over 942489.27 frames. ], batch size: 13, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:02:31,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=172757.33333333334, ans=0.1 +2024-07-28 15:02:37,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=172770.66666666666, ans=0.125 +2024-07-28 15:02:38,168 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.866e+01 5.650e+01 5.997e+01 6.576e+01 8.900e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 15:02:40,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=172784.0, ans=0.2 +2024-07-28 15:02:41,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172784.0, ans=0.1 +2024-07-28 15:02:43,088 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.60 vs. limit=22.5 +2024-07-28 15:02:56,312 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.36 vs. limit=15.0 +2024-07-28 15:02:57,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=172810.66666666666, ans=0.125 +2024-07-28 15:03:01,334 INFO [train.py:1114] (1/4) Epoch 13, batch 6950, loss[loss=0.1926, simple_loss=0.2713, pruned_loss=0.05696, over 4495.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.276, pruned_loss=0.04928, over 940032.46 frames. ], batch size: 10, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:03:10,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=172837.33333333334, ans=0.1 +2024-07-28 15:03:15,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=172837.33333333334, ans=0.0 +2024-07-28 15:03:22,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=172850.66666666666, ans=0.0 +2024-07-28 15:03:36,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=172877.33333333334, ans=0.2 +2024-07-28 15:03:38,283 INFO [train.py:1114] (1/4) Epoch 13, batch 7000, loss[loss=0.1973, simple_loss=0.3009, pruned_loss=0.04691, over 4625.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2757, pruned_loss=0.04918, over 938559.97 frames. ], batch size: 17, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:03:42,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172890.66666666666, ans=0.1 +2024-07-28 15:03:44,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=172904.0, ans=0.5 +2024-07-28 15:03:48,574 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.631e+01 6.423e+01 7.992e+01 1.097e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 15:03:49,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=172904.0, ans=0.125 +2024-07-28 15:03:55,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=172917.33333333334, ans=0.125 +2024-07-28 15:04:06,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=172944.0, ans=0.2 +2024-07-28 15:04:08,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.60 vs. limit=15.0 +2024-07-28 15:04:10,934 INFO [train.py:1114] (1/4) Epoch 13, batch 7050, loss[loss=0.1647, simple_loss=0.2642, pruned_loss=0.03264, over 4729.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2759, pruned_loss=0.04912, over 941836.53 frames. ], batch size: 19, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:04:16,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=172970.66666666666, ans=0.015 +2024-07-28 15:04:17,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=172970.66666666666, ans=0.0 +2024-07-28 15:04:19,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2.whitening_limit, batch_count=172970.66666666666, ans=15.0 +2024-07-28 15:04:23,438 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.36 vs. limit=22.5 +2024-07-28 15:04:23,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=172984.0, ans=0.2 +2024-07-28 15:04:29,495 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.46 vs. limit=15.0 +2024-07-28 15:04:40,699 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.87 vs. limit=10.0 +2024-07-28 15:04:44,106 INFO [train.py:1114] (1/4) Epoch 13, batch 7100, loss[loss=0.206, simple_loss=0.3129, pruned_loss=0.04958, over 4798.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2761, pruned_loss=0.04938, over 936757.63 frames. ], batch size: 15, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:04:46,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=173024.0, ans=0.2 +2024-07-28 15:04:49,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=173024.0, ans=0.125 +2024-07-28 15:04:52,149 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.91 vs. limit=15.0 +2024-07-28 15:04:52,707 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.53 vs. limit=15.0 +2024-07-28 15:04:54,282 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+01 5.692e+01 6.139e+01 7.289e+01 1.294e+02, threshold=1.228e+02, percent-clipped=1.0 +2024-07-28 15:05:05,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=173064.0, ans=0.0 +2024-07-28 15:05:14,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=173077.33333333334, ans=0.125 +2024-07-28 15:05:16,964 INFO [train.py:1114] (1/4) Epoch 13, batch 7150, loss[loss=0.2314, simple_loss=0.3224, pruned_loss=0.07025, over 4583.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2746, pruned_loss=0.04876, over 937788.17 frames. ], batch size: 21, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:05:17,226 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.89 vs. limit=22.5 +2024-07-28 15:05:20,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=173090.66666666666, ans=0.125 +2024-07-28 15:05:24,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=173104.0, ans=0.025 +2024-07-28 15:05:35,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173117.33333333334, ans=0.1 +2024-07-28 15:05:42,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=173144.0, ans=0.125 +2024-07-28 15:05:45,128 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.55 vs. limit=22.5 +2024-07-28 15:05:50,029 INFO [train.py:1114] (1/4) Epoch 13, batch 7200, loss[loss=0.1938, simple_loss=0.2801, pruned_loss=0.05373, over 4796.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2762, pruned_loss=0.04915, over 938117.45 frames. ], batch size: 15, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:05:50,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173157.33333333334, ans=0.1 +2024-07-28 15:05:51,985 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:06:00,400 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.641e+01 6.340e+01 7.110e+01 1.006e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 15:06:05,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=173184.0, ans=0.125 +2024-07-28 15:06:08,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=173184.0, ans=0.125 +2024-07-28 15:06:22,406 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.12 vs. limit=15.0 +2024-07-28 15:06:22,752 INFO [train.py:1114] (1/4) Epoch 13, batch 7250, loss[loss=0.1394, simple_loss=0.2391, pruned_loss=0.01986, over 4857.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2747, pruned_loss=0.04856, over 939535.34 frames. ], batch size: 12, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:06:24,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173224.0, ans=0.1 +2024-07-28 15:06:28,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=173237.33333333334, ans=0.5 +2024-07-28 15:06:30,195 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.40 vs. limit=15.0 +2024-07-28 15:06:34,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=173237.33333333334, ans=0.125 +2024-07-28 15:06:35,321 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:06:41,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=173250.66666666666, ans=0.125 +2024-07-28 15:06:55,532 INFO [train.py:1114] (1/4) Epoch 13, batch 7300, loss[loss=0.1682, simple_loss=0.2542, pruned_loss=0.04106, over 4850.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2741, pruned_loss=0.0482, over 939821.43 frames. ], batch size: 12, lr: 5.72e-03, grad_scale: 64.0 +2024-07-28 15:06:55,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=173290.66666666666, ans=0.0 +2024-07-28 15:07:02,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=173304.0, ans=0.2 +2024-07-28 15:07:05,992 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.989e+01 5.468e+01 5.985e+01 6.770e+01 9.344e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 15:07:10,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=173317.33333333334, ans=0.125 +2024-07-28 15:07:10,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=173317.33333333334, ans=0.0 +2024-07-28 15:07:18,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=173330.66666666666, ans=0.0 +2024-07-28 15:07:28,289 INFO [train.py:1114] (1/4) Epoch 13, batch 7350, loss[loss=0.1731, simple_loss=0.2691, pruned_loss=0.03855, over 4646.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2736, pruned_loss=0.04758, over 939282.18 frames. ], batch size: 12, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:07:49,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=173397.33333333334, ans=0.125 +2024-07-28 15:07:57,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=173410.66666666666, ans=0.0 +2024-07-28 15:08:01,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173424.0, ans=0.1 +2024-07-28 15:08:02,219 INFO [train.py:1114] (1/4) Epoch 13, batch 7400, loss[loss=0.1603, simple_loss=0.2543, pruned_loss=0.03313, over 4693.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2736, pruned_loss=0.04755, over 940537.78 frames. ], batch size: 13, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:08:03,478 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.79 vs. limit=15.0 +2024-07-28 15:08:05,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=173424.0, ans=0.0 +2024-07-28 15:08:07,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=173424.0, ans=0.125 +2024-07-28 15:08:09,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=173437.33333333334, ans=0.09899494936611666 +2024-07-28 15:08:12,819 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.374e+01 5.640e+01 6.317e+01 7.601e+01 1.154e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 15:08:12,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173437.33333333334, ans=0.1 +2024-07-28 15:08:17,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=173450.66666666666, ans=0.125 +2024-07-28 15:08:23,686 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.16 vs. limit=6.0 +2024-07-28 15:08:26,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=173464.0, ans=0.2 +2024-07-28 15:08:32,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=173477.33333333334, ans=0.0 +2024-07-28 15:08:34,610 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.22 vs. limit=15.0 +2024-07-28 15:08:35,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=173477.33333333334, ans=0.125 +2024-07-28 15:08:36,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=173490.66666666666, ans=0.125 +2024-07-28 15:08:36,730 INFO [train.py:1114] (1/4) Epoch 13, batch 7450, loss[loss=0.1537, simple_loss=0.2328, pruned_loss=0.03725, over 4603.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2735, pruned_loss=0.04769, over 937995.76 frames. ], batch size: 11, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:08:41,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=173490.66666666666, ans=0.2 +2024-07-28 15:08:41,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=173490.66666666666, ans=0.125 +2024-07-28 15:08:45,437 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.78 vs. limit=12.0 +2024-07-28 15:08:46,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=173504.0, ans=0.125 +2024-07-28 15:08:51,434 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.97 vs. limit=15.0 +2024-07-28 15:08:53,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173517.33333333334, ans=0.1 +2024-07-28 15:08:56,844 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.47 vs. limit=22.5 +2024-07-28 15:08:59,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=173530.66666666666, ans=0.04949747468305833 +2024-07-28 15:09:08,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=173544.0, ans=0.125 +2024-07-28 15:09:09,545 INFO [train.py:1114] (1/4) Epoch 13, batch 7500, loss[loss=0.2649, simple_loss=0.3306, pruned_loss=0.09956, over 3235.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2757, pruned_loss=0.04889, over 936354.36 frames. ], batch size: 35, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:09:10,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=173557.33333333334, ans=0.125 +2024-07-28 15:09:11,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=173557.33333333334, ans=0.125 +2024-07-28 15:09:17,995 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.87 vs. limit=10.0 +2024-07-28 15:09:19,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=173570.66666666666, ans=0.0 +2024-07-28 15:09:20,245 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.712e+01 6.192e+01 7.126e+01 1.284e+02, threshold=1.238e+02, percent-clipped=1.0 +2024-07-28 15:09:33,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=173584.0, ans=0.025 +2024-07-28 15:09:37,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=173597.33333333334, ans=0.0 +2024-07-28 15:09:46,828 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.37 vs. limit=15.0 +2024-07-28 15:09:47,376 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.79 vs. limit=15.0 +2024-07-28 15:09:53,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=173610.66666666666, ans=0.0 +2024-07-28 15:09:56,860 INFO [train.py:1114] (1/4) Epoch 13, batch 7550, loss[loss=0.2177, simple_loss=0.2857, pruned_loss=0.0748, over 4626.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2765, pruned_loss=0.0491, over 935710.85 frames. ], batch size: 17, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:10:12,208 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.34 vs. limit=22.5 +2024-07-28 15:10:15,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=173624.0, ans=0.125 +2024-07-28 15:10:18,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=173624.0, ans=0.1 +2024-07-28 15:10:22,184 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.28 vs. limit=15.0 +2024-07-28 15:10:23,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173637.33333333334, ans=0.1 +2024-07-28 15:10:25,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=173637.33333333334, ans=0.125 +2024-07-28 15:10:29,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=173650.66666666666, ans=0.0 +2024-07-28 15:10:38,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=173664.0, ans=0.125 +2024-07-28 15:10:40,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=173677.33333333334, ans=0.125 +2024-07-28 15:10:44,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=173677.33333333334, ans=0.05 +2024-07-28 15:10:57,561 INFO [train.py:1114] (1/4) Epoch 13, batch 7600, loss[loss=0.1851, simple_loss=0.283, pruned_loss=0.04361, over 4810.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2758, pruned_loss=0.04862, over 937452.12 frames. ], batch size: 14, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:10:58,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=173690.66666666666, ans=0.125 +2024-07-28 15:11:06,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=173704.0, ans=0.125 +2024-07-28 15:11:08,016 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.498e+01 5.988e+01 6.691e+01 9.239e+01, threshold=1.198e+02, percent-clipped=0.0 +2024-07-28 15:11:18,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=173717.33333333334, ans=0.125 +2024-07-28 15:11:23,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=173717.33333333334, ans=0.0 +2024-07-28 15:11:24,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=173717.33333333334, ans=0.125 +2024-07-28 15:11:34,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=173717.33333333334, ans=10.0 +2024-07-28 15:11:58,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=173730.66666666666, ans=0.125 +2024-07-28 15:12:04,498 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-28 15:12:06,083 INFO [train.py:1114] (1/4) Epoch 13, batch 7650, loss[loss=0.147, simple_loss=0.2397, pruned_loss=0.02715, over 4943.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2756, pruned_loss=0.049, over 936281.51 frames. ], batch size: 12, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:12:08,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=173757.33333333334, ans=0.125 +2024-07-28 15:12:18,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=173770.66666666666, ans=0.0 +2024-07-28 15:12:25,586 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=29.59 vs. limit=22.5 +2024-07-28 15:12:32,858 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.12 vs. limit=22.5 +2024-07-28 15:12:40,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=173810.66666666666, ans=0.125 +2024-07-28 15:12:41,861 INFO [train.py:1114] (1/4) Epoch 13, batch 7700, loss[loss=0.194, simple_loss=0.2871, pruned_loss=0.05044, over 4710.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2763, pruned_loss=0.04946, over 934079.40 frames. ], batch size: 13, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:12:52,767 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.633e+01 5.534e+01 6.118e+01 6.663e+01 8.734e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 15:12:59,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=173850.66666666666, ans=0.125 +2024-07-28 15:13:01,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=173864.0, ans=0.125 +2024-07-28 15:13:14,198 INFO [train.py:1114] (1/4) Epoch 13, batch 7750, loss[loss=0.1717, simple_loss=0.2705, pruned_loss=0.03648, over 4932.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.277, pruned_loss=0.04933, over 935311.21 frames. ], batch size: 14, lr: 5.71e-03, grad_scale: 32.0 +2024-07-28 15:13:19,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=173890.66666666666, ans=0.2 +2024-07-28 15:13:38,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=173930.66666666666, ans=0.125 +2024-07-28 15:13:39,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=173930.66666666666, ans=0.1 +2024-07-28 15:13:49,447 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.58 vs. limit=22.5 +2024-07-28 15:13:49,762 INFO [train.py:1114] (1/4) Epoch 13, batch 7800, loss[loss=0.1762, simple_loss=0.2707, pruned_loss=0.04086, over 4660.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2775, pruned_loss=0.04958, over 936963.88 frames. ], batch size: 14, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:13:52,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=173957.33333333334, ans=0.0 +2024-07-28 15:14:01,086 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.796e+01 5.555e+01 6.069e+01 6.471e+01 9.594e+01, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 15:14:09,477 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.43 vs. limit=15.0 +2024-07-28 15:14:35,068 INFO [train.py:1114] (1/4) Epoch 13, batch 7850, loss[loss=0.1682, simple_loss=0.2588, pruned_loss=0.03879, over 4539.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2778, pruned_loss=0.04996, over 935946.36 frames. ], batch size: 10, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:14:38,628 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.26 vs. limit=15.0 +2024-07-28 15:14:46,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=174037.33333333334, ans=0.025 +2024-07-28 15:15:00,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=174064.0, ans=0.125 +2024-07-28 15:15:04,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=174077.33333333334, ans=0.025 +2024-07-28 15:15:09,970 INFO [train.py:1114] (1/4) Epoch 13, batch 7900, loss[loss=0.22, simple_loss=0.3071, pruned_loss=0.06646, over 4875.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2786, pruned_loss=0.05044, over 932997.13 frames. ], batch size: 14, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:15:20,615 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.723e+01 5.632e+01 6.110e+01 7.084e+01 9.814e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 15:15:30,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=174130.66666666666, ans=0.125 +2024-07-28 15:15:30,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=174130.66666666666, ans=0.0 +2024-07-28 15:15:34,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.93 vs. limit=22.5 +2024-07-28 15:15:41,920 INFO [train.py:1114] (1/4) Epoch 13, batch 7950, loss[loss=0.2284, simple_loss=0.3037, pruned_loss=0.07653, over 3198.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2773, pruned_loss=0.04985, over 935052.26 frames. ], batch size: 35, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:15:43,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=174157.33333333334, ans=0.125 +2024-07-28 15:15:46,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=174157.33333333334, ans=0.125 +2024-07-28 15:15:47,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=174157.33333333334, ans=0.1 +2024-07-28 15:15:47,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=174157.33333333334, ans=0.0 +2024-07-28 15:15:49,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=174170.66666666666, ans=0.125 +2024-07-28 15:15:49,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=174170.66666666666, ans=0.125 +2024-07-28 15:15:57,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=174184.0, ans=0.025 +2024-07-28 15:16:00,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=174197.33333333334, ans=0.125 +2024-07-28 15:16:05,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=174197.33333333334, ans=0.125 +2024-07-28 15:16:06,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=174197.33333333334, ans=0.125 +2024-07-28 15:16:14,589 INFO [train.py:1114] (1/4) Epoch 13, batch 8000, loss[loss=0.2029, simple_loss=0.2802, pruned_loss=0.06277, over 4619.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2752, pruned_loss=0.04924, over 934431.20 frames. ], batch size: 11, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:16:19,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=174224.0, ans=0.05 +2024-07-28 15:16:23,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=174237.33333333334, ans=0.0 +2024-07-28 15:16:25,590 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.549e+01 5.751e+01 6.184e+01 6.866e+01 1.059e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 15:16:30,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=174250.66666666666, ans=0.125 +2024-07-28 15:16:35,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=174264.0, ans=0.125 +2024-07-28 15:16:41,122 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.91 vs. limit=15.0 +2024-07-28 15:16:42,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=174277.33333333334, ans=0.125 +2024-07-28 15:16:47,952 INFO [train.py:1114] (1/4) Epoch 13, batch 8050, loss[loss=0.2133, simple_loss=0.3005, pruned_loss=0.06303, over 4817.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2757, pruned_loss=0.04963, over 934284.05 frames. ], batch size: 14, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:17:16,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=174330.66666666666, ans=0.09899494936611666 +2024-07-28 15:17:16,437 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.74 vs. limit=15.0 +2024-07-28 15:17:23,899 INFO [train.py:1114] (1/4) Epoch 13, batch 8100, loss[loss=0.2272, simple_loss=0.3172, pruned_loss=0.06866, over 4804.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2752, pruned_loss=0.04931, over 933864.23 frames. ], batch size: 15, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:17:25,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=174357.33333333334, ans=0.0 +2024-07-28 15:17:26,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=174357.33333333334, ans=0.0 +2024-07-28 15:17:27,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=174357.33333333334, ans=0.1 +2024-07-28 15:17:34,671 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.608e+01 5.712e+01 6.251e+01 7.311e+01 9.756e+01, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 15:17:35,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=174370.66666666666, ans=0.125 +2024-07-28 15:17:35,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=174370.66666666666, ans=0.2 +2024-07-28 15:17:42,963 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.85 vs. limit=15.0 +2024-07-28 15:18:25,030 INFO [train.py:1114] (1/4) Epoch 13, batch 8150, loss[loss=0.2028, simple_loss=0.2966, pruned_loss=0.05447, over 4817.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.275, pruned_loss=0.04931, over 937435.74 frames. ], batch size: 15, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:18:41,663 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.79 vs. limit=10.0 +2024-07-28 15:18:53,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=174450.66666666666, ans=0.0 +2024-07-28 15:21:18,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=174450.66666666666, ans=0.05 +2024-07-28 15:21:28,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=174477.33333333334, ans=0.125 +2024-07-28 15:21:33,166 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.03 vs. limit=22.5 +2024-07-28 15:21:34,238 INFO [train.py:1114] (1/4) Epoch 13, batch 8200, loss[loss=0.212, simple_loss=0.2969, pruned_loss=0.06352, over 4795.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2745, pruned_loss=0.04892, over 938535.41 frames. ], batch size: 15, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:21:51,345 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.18 vs. limit=15.0 +2024-07-28 15:21:53,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=174504.0, ans=0.07 +2024-07-28 15:21:55,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=174504.0, ans=0.125 +2024-07-28 15:21:57,442 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.419e+01 5.624e+01 6.115e+01 7.227e+01 1.322e+02, threshold=1.223e+02, percent-clipped=1.0 +2024-07-28 15:21:57,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=174504.0, ans=0.0 +2024-07-28 15:22:06,042 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.44 vs. limit=15.0 +2024-07-28 15:22:07,423 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=15.0 +2024-07-28 15:22:19,833 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.73 vs. limit=22.5 +2024-07-28 15:22:20,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=174530.66666666666, ans=0.09899494936611666 +2024-07-28 15:22:24,218 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=13.48 vs. limit=15.0 +2024-07-28 15:23:17,765 INFO [train.py:1114] (1/4) Epoch 13, batch 8250, loss[loss=0.1934, simple_loss=0.2851, pruned_loss=0.0508, over 4890.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2739, pruned_loss=0.0486, over 938721.58 frames. ], batch size: 13, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:23:33,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-28 15:23:35,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=174570.66666666666, ans=0.1 +2024-07-28 15:23:35,859 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:23:38,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=174570.66666666666, ans=0.125 +2024-07-28 15:23:45,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=174584.0, ans=0.0 +2024-07-28 15:23:59,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=174610.66666666666, ans=0.0 +2024-07-28 15:23:59,298 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.30 vs. limit=15.0 +2024-07-28 15:24:00,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=174624.0, ans=0.2 +2024-07-28 15:24:00,769 INFO [train.py:1114] (1/4) Epoch 13, batch 8300, loss[loss=0.2069, simple_loss=0.2944, pruned_loss=0.05967, over 4895.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2756, pruned_loss=0.04914, over 939761.04 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:24:01,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.08 vs. limit=22.5 +2024-07-28 15:24:04,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=174624.0, ans=0.0 +2024-07-28 15:24:11,814 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.568e+01 5.640e+01 5.984e+01 6.893e+01 9.803e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 15:24:17,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=174650.66666666666, ans=0.1 +2024-07-28 15:24:27,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=174664.0, ans=0.125 +2024-07-28 15:24:38,261 INFO [train.py:1114] (1/4) Epoch 13, batch 8350, loss[loss=0.1938, simple_loss=0.2801, pruned_loss=0.05373, over 4797.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2747, pruned_loss=0.04864, over 942448.16 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:24:40,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=174690.66666666666, ans=0.125 +2024-07-28 15:24:41,380 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.60 vs. limit=12.0 +2024-07-28 15:24:44,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=174704.0, ans=0.125 +2024-07-28 15:24:50,027 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.68 vs. limit=22.5 +2024-07-28 15:24:57,733 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.56 vs. limit=22.5 +2024-07-28 15:25:02,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=174730.66666666666, ans=0.025 +2024-07-28 15:25:08,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=174744.0, ans=0.2 +2024-07-28 15:25:12,892 INFO [train.py:1114] (1/4) Epoch 13, batch 8400, loss[loss=0.1588, simple_loss=0.2487, pruned_loss=0.03443, over 4784.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2741, pruned_loss=0.04857, over 940738.35 frames. ], batch size: 12, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:25:18,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=174757.33333333334, ans=0.0 +2024-07-28 15:25:22,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=174770.66666666666, ans=0.125 +2024-07-28 15:25:23,771 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.906e+01 5.833e+01 6.092e+01 7.413e+01 1.221e+02, threshold=1.218e+02, percent-clipped=1.0 +2024-07-28 15:25:24,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=174770.66666666666, ans=0.125 +2024-07-28 15:25:40,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=174797.33333333334, ans=0.0 +2024-07-28 15:25:41,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=174797.33333333334, ans=0.2 +2024-07-28 15:25:47,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=174810.66666666666, ans=0.125 +2024-07-28 15:25:49,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=174810.66666666666, ans=0.2 +2024-07-28 15:25:51,139 INFO [train.py:1114] (1/4) Epoch 13, batch 8450, loss[loss=0.1901, simple_loss=0.2898, pruned_loss=0.04525, over 4808.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2755, pruned_loss=0.04891, over 939617.12 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:25:51,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=174824.0, ans=0.2 +2024-07-28 15:25:53,181 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:25:57,933 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.03 vs. limit=15.0 +2024-07-28 15:25:59,175 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.89 vs. limit=15.0 +2024-07-28 15:26:09,155 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.70 vs. limit=6.0 +2024-07-28 15:26:29,351 INFO [train.py:1114] (1/4) Epoch 13, batch 8500, loss[loss=0.1506, simple_loss=0.243, pruned_loss=0.02904, over 4614.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2748, pruned_loss=0.04866, over 938964.57 frames. ], batch size: 11, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:29:35,084 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.221e+01 5.689e+01 6.230e+01 7.373e+01 1.057e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 15:29:46,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=174917.33333333334, ans=0.0 +2024-07-28 15:29:47,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=174917.33333333334, ans=0.125 +2024-07-28 15:30:03,277 INFO [train.py:1114] (1/4) Epoch 13, batch 8550, loss[loss=0.1586, simple_loss=0.238, pruned_loss=0.03965, over 4802.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2742, pruned_loss=0.04813, over 939707.27 frames. ], batch size: 11, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:30:09,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=174970.66666666666, ans=0.07 +2024-07-28 15:30:12,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=174970.66666666666, ans=0.125 +2024-07-28 15:30:13,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=174970.66666666666, ans=0.125 +2024-07-28 15:30:15,363 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.30 vs. limit=15.0 +2024-07-28 15:30:18,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=174984.0, ans=0.125 +2024-07-28 15:30:21,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=174984.0, ans=0.0 +2024-07-28 15:30:24,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=174997.33333333334, ans=0.0 +2024-07-28 15:30:31,383 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.04 vs. limit=22.5 +2024-07-28 15:30:32,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=175010.66666666666, ans=0.0 +2024-07-28 15:30:41,168 INFO [train.py:1114] (1/4) Epoch 13, batch 8600, loss[loss=0.2227, simple_loss=0.322, pruned_loss=0.06173, over 4801.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2737, pruned_loss=0.04826, over 939551.92 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:30:41,314 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:30:52,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=175037.33333333334, ans=0.1 +2024-07-28 15:30:53,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=175037.33333333334, ans=0.125 +2024-07-28 15:30:54,282 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.590e+01 5.714e+01 6.617e+01 7.604e+01 1.022e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-28 15:30:58,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=175050.66666666666, ans=0.125 +2024-07-28 15:31:00,015 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.40 vs. limit=6.0 +2024-07-28 15:31:16,115 INFO [train.py:1114] (1/4) Epoch 13, batch 8650, loss[loss=0.2269, simple_loss=0.3179, pruned_loss=0.06795, over 4895.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2745, pruned_loss=0.04849, over 940670.62 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:36:23,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=175117.33333333334, ans=0.2 +2024-07-28 15:36:24,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=175117.33333333334, ans=0.0 +2024-07-28 15:36:31,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=175130.66666666666, ans=0.1 +2024-07-28 15:36:43,569 INFO [train.py:1114] (1/4) Epoch 13, batch 8700, loss[loss=0.1635, simple_loss=0.2522, pruned_loss=0.03742, over 4758.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2756, pruned_loss=0.04906, over 937808.57 frames. ], batch size: 13, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:36:45,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=175157.33333333334, ans=0.125 +2024-07-28 15:36:58,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=175170.66666666666, ans=0.0 +2024-07-28 15:36:58,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=175170.66666666666, ans=0.2 +2024-07-28 15:36:59,762 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.706e+01 5.561e+01 6.137e+01 6.917e+01 9.151e+01, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 15:37:03,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=175184.0, ans=0.125 +2024-07-28 15:37:08,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=175184.0, ans=0.125 +2024-07-28 15:37:22,442 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:37:34,701 INFO [train.py:1114] (1/4) Epoch 13, batch 8750, loss[loss=0.1927, simple_loss=0.2881, pruned_loss=0.04864, over 4676.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.276, pruned_loss=0.04936, over 936232.78 frames. ], batch size: 15, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:37:36,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=175224.0, ans=0.125 +2024-07-28 15:37:44,140 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.17 vs. limit=10.0 +2024-07-28 15:38:05,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=175277.33333333334, ans=0.05 +2024-07-28 15:38:06,391 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:38:09,909 INFO [train.py:1114] (1/4) Epoch 13, batch 8800, loss[loss=0.1913, simple_loss=0.2738, pruned_loss=0.05439, over 4928.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2767, pruned_loss=0.04958, over 937120.72 frames. ], batch size: 14, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:38:14,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=175290.66666666666, ans=0.125 +2024-07-28 15:38:14,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=175290.66666666666, ans=0.125 +2024-07-28 15:38:21,214 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.482e+01 5.841e+01 6.340e+01 7.291e+01 9.820e+01, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 15:38:23,960 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:38:33,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=175330.66666666666, ans=0.125 +2024-07-28 15:38:43,105 INFO [train.py:1114] (1/4) Epoch 13, batch 8850, loss[loss=0.1773, simple_loss=0.2755, pruned_loss=0.0396, over 4530.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2764, pruned_loss=0.04987, over 932390.86 frames. ], batch size: 21, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:39:02,332 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.38 vs. limit=6.0 +2024-07-28 15:39:11,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=175410.66666666666, ans=0.2 +2024-07-28 15:39:13,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=175410.66666666666, ans=0.0 +2024-07-28 15:39:15,585 INFO [train.py:1114] (1/4) Epoch 13, batch 8900, loss[loss=0.1971, simple_loss=0.2829, pruned_loss=0.05566, over 4937.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2774, pruned_loss=0.05027, over 930068.49 frames. ], batch size: 12, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:39:17,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=175424.0, ans=0.125 +2024-07-28 15:39:20,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.88 vs. limit=12.0 +2024-07-28 15:39:31,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=175437.33333333334, ans=0.0 +2024-07-28 15:39:32,292 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.207e+01 5.752e+01 6.427e+01 7.462e+01 1.101e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 15:39:38,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=175450.66666666666, ans=0.07 +2024-07-28 15:39:49,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=175477.33333333334, ans=0.0 +2024-07-28 15:39:50,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=175477.33333333334, ans=0.125 +2024-07-28 15:39:53,969 INFO [train.py:1114] (1/4) Epoch 13, batch 8950, loss[loss=0.1943, simple_loss=0.2754, pruned_loss=0.05657, over 4521.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2771, pruned_loss=0.04985, over 931095.09 frames. ], batch size: 21, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:40:13,439 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.88 vs. limit=15.0 +2024-07-28 15:40:23,064 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.73 vs. limit=12.0 +2024-07-28 15:40:34,209 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.31 vs. limit=22.5 +2024-07-28 15:40:38,250 INFO [train.py:1114] (1/4) Epoch 13, batch 9000, loss[loss=0.1995, simple_loss=0.2995, pruned_loss=0.04975, over 4642.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2756, pruned_loss=0.04934, over 934154.21 frames. ], batch size: 12, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:40:38,251 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 15:40:49,960 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([6.7731, 5.9929, 5.8491, 6.5001], device='cuda:1') +2024-07-28 15:42:45,090 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.1489, 2.0855, 4.3205, 2.5085], device='cuda:1') +2024-07-28 15:42:48,985 INFO [train.py:1146] (1/4) Epoch 13, validation: loss=0.1657, simple_loss=0.2696, pruned_loss=0.03096, over 944034.00 frames. +2024-07-28 15:44:57,274 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 15:45:02,999 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.28 vs. limit=12.0 +2024-07-28 15:45:12,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=175570.66666666666, ans=0.0 +2024-07-28 15:45:15,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175570.66666666666, ans=0.1 +2024-07-28 15:45:15,799 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.541e+01 5.562e+01 6.322e+01 7.112e+01 1.143e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 15:45:23,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=175584.0, ans=0.025 +2024-07-28 15:45:35,121 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.71 vs. limit=6.0 +2024-07-28 15:45:38,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=175624.0, ans=0.0 +2024-07-28 15:45:39,135 INFO [train.py:1114] (1/4) Epoch 13, batch 9050, loss[loss=0.1609, simple_loss=0.2418, pruned_loss=0.04004, over 4555.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2748, pruned_loss=0.04935, over 934607.65 frames. ], batch size: 10, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:45:41,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175624.0, ans=0.1 +2024-07-28 15:45:43,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=175624.0, ans=0.0 +2024-07-28 15:45:49,482 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.97 vs. limit=15.0 +2024-07-28 15:45:54,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175650.66666666666, ans=0.1 +2024-07-28 15:45:55,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=175650.66666666666, ans=0.125 +2024-07-28 15:45:55,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=175650.66666666666, ans=0.025 +2024-07-28 15:46:03,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=175664.0, ans=0.07 +2024-07-28 15:46:09,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=175677.33333333334, ans=0.125 +2024-07-28 15:46:11,126 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.21 vs. limit=15.0 +2024-07-28 15:46:11,972 INFO [train.py:1114] (1/4) Epoch 13, batch 9100, loss[loss=0.1824, simple_loss=0.2827, pruned_loss=0.04103, over 4931.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2747, pruned_loss=0.04917, over 937036.56 frames. ], batch size: 14, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:46:13,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=175690.66666666666, ans=0.025 +2024-07-28 15:46:15,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=175690.66666666666, ans=0.1 +2024-07-28 15:46:21,512 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.50 vs. limit=22.5 +2024-07-28 15:46:22,479 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.359e+01 5.613e+01 6.012e+01 6.953e+01 8.806e+01, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 15:46:26,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=175717.33333333334, ans=0.0 +2024-07-28 15:46:27,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=175717.33333333334, ans=0.125 +2024-07-28 15:46:31,219 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.60 vs. limit=15.0 +2024-07-28 15:46:56,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=175744.0, ans=0.125 +2024-07-28 15:47:19,812 INFO [train.py:1114] (1/4) Epoch 13, batch 9150, loss[loss=0.1794, simple_loss=0.2836, pruned_loss=0.0376, over 4813.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2751, pruned_loss=0.04915, over 935706.96 frames. ], batch size: 14, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:47:19,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=175757.33333333334, ans=0.125 +2024-07-28 15:47:20,565 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:47:21,484 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.68 vs. limit=10.0 +2024-07-28 15:47:41,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=175757.33333333334, ans=0.2 +2024-07-28 15:48:22,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=175784.0, ans=0.125 +2024-07-28 15:48:22,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=175784.0, ans=0.0 +2024-07-28 15:48:24,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=175784.0, ans=0.125 +2024-07-28 15:48:25,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=175784.0, ans=0.125 +2024-07-28 15:48:29,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=175797.33333333334, ans=0.0 +2024-07-28 15:48:35,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=175810.66666666666, ans=0.2 +2024-07-28 15:48:41,159 INFO [train.py:1114] (1/4) Epoch 13, batch 9200, loss[loss=0.1637, simple_loss=0.2433, pruned_loss=0.04208, over 4866.00 frames. ], tot_loss[loss=0.186, simple_loss=0.274, pruned_loss=0.04898, over 937240.07 frames. ], batch size: 12, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:48:41,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=175824.0, ans=0.125 +2024-07-28 15:48:50,504 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.67 vs. limit=15.0 +2024-07-28 15:48:51,963 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.679e+01 5.600e+01 6.167e+01 6.927e+01 1.004e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 15:49:09,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=175877.33333333334, ans=0.125 +2024-07-28 15:49:12,535 INFO [train.py:1114] (1/4) Epoch 13, batch 9250, loss[loss=0.1819, simple_loss=0.2736, pruned_loss=0.04512, over 4632.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2735, pruned_loss=0.04839, over 937794.49 frames. ], batch size: 13, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:49:13,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=175890.66666666666, ans=0.125 +2024-07-28 15:49:16,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=175890.66666666666, ans=0.125 +2024-07-28 15:52:43,977 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.64 vs. limit=15.0 +2024-07-28 15:52:44,367 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.20 vs. limit=12.0 +2024-07-28 15:52:48,825 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.06 vs. limit=10.0 +2024-07-28 15:52:56,046 INFO [train.py:1114] (1/4) Epoch 13, batch 9300, loss[loss=0.1737, simple_loss=0.2623, pruned_loss=0.0425, over 4782.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2733, pruned_loss=0.04835, over 937623.17 frames. ], batch size: 12, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:53:00,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=175957.33333333334, ans=0.2 +2024-07-28 15:53:01,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=175957.33333333334, ans=0.125 +2024-07-28 15:53:06,731 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+01 5.420e+01 5.839e+01 6.596e+01 1.003e+02, threshold=1.168e+02, percent-clipped=0.0 +2024-07-28 15:53:12,755 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.83 vs. limit=15.0 +2024-07-28 15:53:15,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=175997.33333333334, ans=0.125 +2024-07-28 15:53:36,025 INFO [train.py:1114] (1/4) Epoch 13, batch 9350, loss[loss=0.1767, simple_loss=0.2488, pruned_loss=0.05232, over 4803.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2731, pruned_loss=0.04846, over 934509.15 frames. ], batch size: 11, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:53:36,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=176024.0, ans=0.125 +2024-07-28 15:54:08,556 INFO [train.py:1114] (1/4) Epoch 13, batch 9400, loss[loss=0.1745, simple_loss=0.2642, pruned_loss=0.04236, over 4690.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2741, pruned_loss=0.04891, over 932596.80 frames. ], batch size: 13, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:54:14,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=176090.66666666666, ans=0.0 +2024-07-28 15:54:19,729 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.585e+01 5.549e+01 6.208e+01 6.780e+01 1.030e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 15:54:40,624 INFO [train.py:1114] (1/4) Epoch 13, batch 9450, loss[loss=0.1638, simple_loss=0.2335, pruned_loss=0.04705, over 4801.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2746, pruned_loss=0.04921, over 932180.61 frames. ], batch size: 11, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:54:41,246 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:54:43,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=176157.33333333334, ans=0.0 +2024-07-28 15:55:05,056 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.71 vs. limit=22.5 +2024-07-28 15:55:06,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=176210.66666666666, ans=0.0 +2024-07-28 15:55:11,689 INFO [train.py:1114] (1/4) Epoch 13, batch 9500, loss[loss=0.1863, simple_loss=0.2731, pruned_loss=0.04977, over 4701.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2747, pruned_loss=0.04878, over 934791.93 frames. ], batch size: 12, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:55:22,346 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.570e+01 5.491e+01 5.977e+01 6.811e+01 8.816e+01, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 15:55:24,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=176250.66666666666, ans=0.125 +2024-07-28 15:55:33,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.whiten.whitening_limit, batch_count=176264.0, ans=12.0 +2024-07-28 15:55:40,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=176277.33333333334, ans=0.05 +2024-07-28 15:55:43,368 INFO [train.py:1114] (1/4) Epoch 13, batch 9550, loss[loss=0.1552, simple_loss=0.2462, pruned_loss=0.03211, over 4784.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2764, pruned_loss=0.0496, over 932007.11 frames. ], batch size: 12, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:55:52,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176304.0, ans=0.1 +2024-07-28 15:55:53,030 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.11 vs. limit=10.0 +2024-07-28 15:56:00,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=176317.33333333334, ans=0.0 +2024-07-28 15:56:02,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=176330.66666666666, ans=0.125 +2024-07-28 15:56:09,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=176344.0, ans=0.125 +2024-07-28 15:56:11,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=176344.0, ans=0.125 +2024-07-28 15:56:15,428 INFO [train.py:1114] (1/4) Epoch 13, batch 9600, loss[loss=0.2241, simple_loss=0.303, pruned_loss=0.0726, over 3390.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2772, pruned_loss=0.04948, over 930849.59 frames. ], batch size: 35, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:56:15,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176357.33333333334, ans=0.1 +2024-07-28 15:56:18,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=176357.33333333334, ans=15.0 +2024-07-28 15:56:19,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=176357.33333333334, ans=0.0 +2024-07-28 15:56:25,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=176370.66666666666, ans=0.025 +2024-07-28 15:56:26,135 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.903e+01 5.951e+01 6.565e+01 7.484e+01 1.008e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-28 15:56:26,856 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:56:32,382 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:56:34,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=176397.33333333334, ans=0.125 +2024-07-28 15:56:38,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=176397.33333333334, ans=0.125 +2024-07-28 15:56:47,956 INFO [train.py:1114] (1/4) Epoch 13, batch 9650, loss[loss=0.2199, simple_loss=0.3032, pruned_loss=0.06828, over 4827.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.278, pruned_loss=0.05017, over 926697.66 frames. ], batch size: 16, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 15:56:56,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=176437.33333333334, ans=0.125 +2024-07-28 15:57:14,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=176464.0, ans=0.125 +2024-07-28 15:57:17,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176477.33333333334, ans=0.1 +2024-07-28 15:57:24,273 INFO [train.py:1114] (1/4) Epoch 13, batch 9700, loss[loss=0.1799, simple_loss=0.2657, pruned_loss=0.04709, over 4217.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2775, pruned_loss=0.04972, over 924906.21 frames. ], batch size: 25, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 15:57:27,184 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.98 vs. limit=10.0 +2024-07-28 15:57:28,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=176490.66666666666, ans=0.125 +2024-07-28 15:57:28,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=176490.66666666666, ans=0.0 +2024-07-28 15:57:34,736 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.912e+01 5.596e+01 6.037e+01 6.865e+01 8.980e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-28 15:57:35,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=176504.0, ans=0.125 +2024-07-28 15:57:45,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=176530.66666666666, ans=0.0 +2024-07-28 15:57:47,005 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.67 vs. limit=15.0 +2024-07-28 15:57:52,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=176544.0, ans=0.0 +2024-07-28 15:57:52,768 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.37 vs. limit=22.5 +2024-07-28 15:57:59,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176557.33333333334, ans=0.1 +2024-07-28 15:57:59,697 INFO [train.py:1114] (1/4) Epoch 13, batch 9750, loss[loss=0.2129, simple_loss=0.296, pruned_loss=0.06489, over 4689.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.277, pruned_loss=0.04969, over 925406.43 frames. ], batch size: 15, lr: 5.66e-03, grad_scale: 64.0 +2024-07-28 15:58:04,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=176557.33333333334, ans=0.0 +2024-07-28 16:00:27,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=176584.0, ans=0.125 +2024-07-28 16:01:32,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=176610.66666666666, ans=0.0 +2024-07-28 16:01:33,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=176610.66666666666, ans=0.07 +2024-07-28 16:01:35,056 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.72 vs. limit=22.5 +2024-07-28 16:01:45,800 INFO [train.py:1114] (1/4) Epoch 13, batch 9800, loss[loss=0.1653, simple_loss=0.256, pruned_loss=0.03731, over 4706.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2758, pruned_loss=0.04943, over 925000.50 frames. ], batch size: 12, lr: 5.66e-03, grad_scale: 64.0 +2024-07-28 16:02:08,118 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.529e+01 5.638e+01 6.459e+01 7.664e+01 1.106e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 16:02:31,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=176650.66666666666, ans=0.125 +2024-07-28 16:03:21,594 INFO [train.py:1114] (1/4) Epoch 13, batch 9850, loss[loss=0.1829, simple_loss=0.2796, pruned_loss=0.0431, over 4887.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2764, pruned_loss=0.04967, over 927617.55 frames. ], batch size: 15, lr: 5.66e-03, grad_scale: 64.0 +2024-07-28 16:03:22,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=176690.66666666666, ans=0.125 +2024-07-28 16:03:24,902 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.67 vs. limit=22.5 +2024-07-28 16:03:25,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=176690.66666666666, ans=0.1 +2024-07-28 16:04:41,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176717.33333333334, ans=0.1 +2024-07-28 16:04:43,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=176717.33333333334, ans=0.125 +2024-07-28 16:04:44,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=176717.33333333334, ans=0.07 +2024-07-28 16:05:22,361 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.40 vs. limit=15.0 +2024-07-28 16:05:22,918 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.05 vs. limit=15.0 +2024-07-28 16:05:24,263 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.41 vs. limit=12.0 +2024-07-28 16:05:30,411 INFO [train.py:1114] (1/4) Epoch 13, batch 9900, loss[loss=0.1641, simple_loss=0.2483, pruned_loss=0.03993, over 4836.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2767, pruned_loss=0.05007, over 926756.40 frames. ], batch size: 16, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 16:06:07,591 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.66 vs. limit=10.0 +2024-07-28 16:06:49,267 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.859e+01 5.715e+01 6.519e+01 7.339e+01 1.147e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 16:06:51,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=176784.0, ans=0.2 +2024-07-28 16:07:11,625 INFO [train.py:1114] (1/4) Epoch 13, batch 9950, loss[loss=0.1599, simple_loss=0.2502, pruned_loss=0.03476, over 4811.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2769, pruned_loss=0.0502, over 928968.33 frames. ], batch size: 11, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 16:07:20,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=176824.0, ans=0.0 +2024-07-28 16:07:23,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=176824.0, ans=0.025 +2024-07-28 16:07:24,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=176837.33333333334, ans=0.2 +2024-07-28 16:07:27,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176837.33333333334, ans=0.1 +2024-07-28 16:07:28,507 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:07:38,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=176864.0, ans=0.0 +2024-07-28 16:07:50,042 INFO [train.py:1114] (1/4) Epoch 13, batch 10000, loss[loss=0.1588, simple_loss=0.2588, pruned_loss=0.02942, over 4643.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2795, pruned_loss=0.05096, over 926911.61 frames. ], batch size: 16, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 16:07:53,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=176890.66666666666, ans=0.125 +2024-07-28 16:07:54,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=176890.66666666666, ans=0.025 +2024-07-28 16:07:54,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=176890.66666666666, ans=0.125 +2024-07-28 16:07:56,095 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.34 vs. limit=15.0 +2024-07-28 16:08:01,259 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.743e+01 6.303e+01 7.198e+01 1.105e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 16:08:01,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=176904.0, ans=0.0 +2024-07-28 16:08:05,828 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.40 vs. limit=22.5 +2024-07-28 16:08:08,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176930.66666666666, ans=0.1 +2024-07-28 16:08:22,352 INFO [train.py:1114] (1/4) Epoch 13, batch 10050, loss[loss=0.2243, simple_loss=0.3016, pruned_loss=0.07346, over 3274.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2831, pruned_loss=0.05308, over 914476.39 frames. ], batch size: 35, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 16:08:25,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=176957.33333333334, ans=0.025 +2024-07-28 16:08:36,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=176984.0, ans=0.2 +2024-07-28 16:08:49,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=177010.66666666666, ans=0.0 +2024-07-28 16:08:49,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=177010.66666666666, ans=0.125 +2024-07-28 16:08:55,886 INFO [train.py:1114] (1/4) Epoch 13, batch 10100, loss[loss=0.2511, simple_loss=0.315, pruned_loss=0.09362, over 3628.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2884, pruned_loss=0.05861, over 862171.81 frames. ], batch size: 35, lr: 5.65e-03, grad_scale: 32.0 +2024-07-28 16:08:57,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.99 vs. limit=6.0 +2024-07-28 16:09:07,526 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.274e+01 6.562e+01 7.156e+01 7.782e+01 1.093e+02, threshold=1.431e+02, percent-clipped=0.0 +2024-07-28 16:09:07,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=177037.33333333334, ans=0.125 +2024-07-28 16:09:09,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=177050.66666666666, ans=0.125 +2024-07-28 16:09:11,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=177050.66666666666, ans=0.07 +2024-07-28 16:09:12,584 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.40 vs. limit=15.0 +2024-07-28 16:09:22,436 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.61 vs. limit=22.5 +2024-07-28 16:09:22,975 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.02 vs. limit=15.0 +2024-07-28 16:09:28,321 INFO [train.py:1114] (1/4) Epoch 13, batch 10150, loss[loss=0.2509, simple_loss=0.3319, pruned_loss=0.08498, over 3218.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2922, pruned_loss=0.06274, over 821405.42 frames. ], batch size: 36, lr: 5.65e-03, grad_scale: 32.0 +2024-07-28 16:09:33,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=177090.66666666666, ans=0.035 +2024-07-28 16:09:35,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=177104.0, ans=0.2 +2024-07-28 16:09:47,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=177130.66666666666, ans=0.025 +2024-07-28 16:09:48,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177130.66666666666, ans=0.1 +2024-07-28 16:10:00,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=177144.0, ans=0.125 +2024-07-28 16:10:08,333 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.42 vs. limit=15.0 +2024-07-28 16:10:15,115 INFO [train.py:1114] (1/4) Epoch 13, batch 10200, loss[loss=0.2377, simple_loss=0.3144, pruned_loss=0.08049, over 3393.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2945, pruned_loss=0.06499, over 787688.50 frames. ], batch size: 35, lr: 5.65e-03, grad_scale: 32.0 +2024-07-28 16:10:19,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177157.33333333334, ans=0.1 +2024-07-28 16:10:26,155 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.992e+01 6.629e+01 7.003e+01 7.390e+01 9.064e+01, threshold=1.401e+02, percent-clipped=0.0 +2024-07-28 16:11:16,418 INFO [train.py:1114] (1/4) Epoch 14, batch 0, loss[loss=0.1452, simple_loss=0.2396, pruned_loss=0.02542, over 4859.00 frames. ], tot_loss[loss=0.1452, simple_loss=0.2396, pruned_loss=0.02542, over 4859.00 frames. ], batch size: 12, lr: 5.45e-03, grad_scale: 32.0 +2024-07-28 16:11:16,419 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 16:14:51,473 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.2951, 4.6991, 4.6284, 5.0723], device='cuda:1') +2024-07-28 16:14:55,767 INFO [train.py:1146] (1/4) Epoch 14, validation: loss=0.1673, simple_loss=0.2724, pruned_loss=0.03104, over 944034.00 frames. +2024-07-28 16:14:55,768 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 16:15:07,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=177186.66666666666, ans=0.125 +2024-07-28 16:15:07,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=177186.66666666666, ans=0.0 +2024-07-28 16:15:11,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=177200.0, ans=0.2 +2024-07-28 16:15:20,717 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.18 vs. limit=22.5 +2024-07-28 16:15:24,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=177226.66666666666, ans=0.0 +2024-07-28 16:15:35,829 INFO [train.py:1114] (1/4) Epoch 14, batch 50, loss[loss=0.2075, simple_loss=0.279, pruned_loss=0.06802, over 4600.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2759, pruned_loss=0.04784, over 205844.46 frames. ], batch size: 11, lr: 5.45e-03, grad_scale: 32.0 +2024-07-28 16:15:40,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=177253.33333333334, ans=0.025 +2024-07-28 16:15:51,596 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.91 vs. limit=15.0 +2024-07-28 16:15:54,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=177280.0, ans=0.125 +2024-07-28 16:16:14,939 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.55 vs. limit=15.0 +2024-07-28 16:16:17,034 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.504e+01 5.430e+01 5.954e+01 6.690e+01 1.022e+02, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 16:16:19,818 INFO [train.py:1114] (1/4) Epoch 14, batch 100, loss[loss=0.1655, simple_loss=0.2582, pruned_loss=0.03642, over 4643.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2821, pruned_loss=0.05009, over 365243.19 frames. ], batch size: 12, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:16:20,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=177320.0, ans=0.125 +2024-07-28 16:16:23,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=177320.0, ans=0.0 +2024-07-28 16:16:27,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=177333.33333333334, ans=0.125 +2024-07-28 16:16:55,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=177346.66666666666, ans=0.2 +2024-07-28 16:17:00,940 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.34 vs. limit=15.0 +2024-07-28 16:17:06,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=177373.33333333334, ans=0.125 +2024-07-28 16:17:08,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=177373.33333333334, ans=0.125 +2024-07-28 16:17:13,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=177373.33333333334, ans=0.0 +2024-07-28 16:17:13,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=177373.33333333334, ans=0.2 +2024-07-28 16:17:14,950 INFO [train.py:1114] (1/4) Epoch 14, batch 150, loss[loss=0.1601, simple_loss=0.239, pruned_loss=0.04063, over 4624.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2777, pruned_loss=0.04894, over 493893.35 frames. ], batch size: 11, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:17:22,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=177400.0, ans=0.125 +2024-07-28 16:17:24,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=177400.0, ans=0.2 +2024-07-28 16:17:34,868 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.88 vs. limit=15.0 +2024-07-28 16:17:40,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=177426.66666666666, ans=0.2 +2024-07-28 16:17:44,700 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:17:45,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=177426.66666666666, ans=0.125 +2024-07-28 16:17:48,225 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.55 vs. limit=15.0 +2024-07-28 16:17:52,154 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.588e+01 5.424e+01 5.956e+01 7.040e+01 1.129e+02, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 16:17:57,480 INFO [train.py:1114] (1/4) Epoch 14, batch 200, loss[loss=0.1894, simple_loss=0.2671, pruned_loss=0.05582, over 4537.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2765, pruned_loss=0.04884, over 593693.79 frames. ], batch size: 21, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:17:58,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=177453.33333333334, ans=0.125 +2024-07-28 16:18:00,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=177453.33333333334, ans=0.0 +2024-07-28 16:18:02,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=177453.33333333334, ans=0.125 +2024-07-28 16:18:02,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=177453.33333333334, ans=0.125 +2024-07-28 16:18:24,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=177493.33333333334, ans=0.2 +2024-07-28 16:18:37,119 INFO [train.py:1114] (1/4) Epoch 14, batch 250, loss[loss=0.1957, simple_loss=0.2895, pruned_loss=0.05094, over 4647.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2754, pruned_loss=0.04828, over 670356.06 frames. ], batch size: 16, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:18:37,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=177520.0, ans=0.125 +2024-07-28 16:18:54,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177546.66666666666, ans=0.1 +2024-07-28 16:18:57,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=177560.0, ans=0.025 +2024-07-28 16:19:00,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=177560.0, ans=0.2 +2024-07-28 16:19:04,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177573.33333333334, ans=0.1 +2024-07-28 16:19:08,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177573.33333333334, ans=0.1 +2024-07-28 16:19:08,673 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.684e+01 5.662e+01 6.232e+01 7.449e+01 1.133e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 16:19:08,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=177573.33333333334, ans=0.5 +2024-07-28 16:19:11,377 INFO [train.py:1114] (1/4) Epoch 14, batch 300, loss[loss=0.1522, simple_loss=0.2495, pruned_loss=0.02749, over 4814.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2744, pruned_loss=0.04766, over 729996.14 frames. ], batch size: 15, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:19:22,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=177600.0, ans=0.0 +2024-07-28 16:19:33,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=177613.33333333334, ans=0.125 +2024-07-28 16:19:40,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177626.66666666666, ans=0.1 +2024-07-28 16:19:48,699 INFO [train.py:1114] (1/4) Epoch 14, batch 350, loss[loss=0.1793, simple_loss=0.2657, pruned_loss=0.04642, over 4928.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2748, pruned_loss=0.04753, over 776163.44 frames. ], batch size: 12, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:19:48,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=177653.33333333334, ans=0.0 +2024-07-28 16:19:49,734 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.27 vs. limit=15.0 +2024-07-28 16:20:09,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=177666.66666666666, ans=0.125 +2024-07-28 16:20:32,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=177680.0, ans=0.125 +2024-07-28 16:20:36,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_na.min_abs, batch_count=177693.33333333334, ans=0.02 +2024-07-28 16:20:44,595 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.533e+01 5.994e+01 6.845e+01 1.570e+02, threshold=1.199e+02, percent-clipped=1.0 +2024-07-28 16:20:47,239 INFO [train.py:1114] (1/4) Epoch 14, batch 400, loss[loss=0.1853, simple_loss=0.2654, pruned_loss=0.05261, over 4695.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2746, pruned_loss=0.04773, over 813543.45 frames. ], batch size: 13, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:20:54,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=177733.33333333334, ans=0.0 +2024-07-28 16:21:11,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=177760.0, ans=0.125 +2024-07-28 16:21:22,171 INFO [train.py:1114] (1/4) Epoch 14, batch 450, loss[loss=0.2014, simple_loss=0.281, pruned_loss=0.0609, over 4626.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2744, pruned_loss=0.04803, over 838916.96 frames. ], batch size: 13, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:21:22,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=177786.66666666666, ans=0.0 +2024-07-28 16:21:25,983 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.42 vs. limit=15.0 +2024-07-28 16:21:48,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177840.0, ans=0.1 +2024-07-28 16:21:50,851 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:21:52,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=177840.0, ans=0.0 +2024-07-28 16:21:52,670 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.533e+01 5.509e+01 6.147e+01 6.796e+01 9.434e+01, threshold=1.229e+02, percent-clipped=0.0 +2024-07-28 16:21:53,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=177840.0, ans=0.0 +2024-07-28 16:21:55,365 INFO [train.py:1114] (1/4) Epoch 14, batch 500, loss[loss=0.2124, simple_loss=0.2908, pruned_loss=0.06696, over 4688.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.273, pruned_loss=0.04724, over 861314.93 frames. ], batch size: 15, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:21:59,258 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.00 vs. limit=22.5 +2024-07-28 16:22:01,838 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.18 vs. limit=12.0 +2024-07-28 16:22:12,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=177880.0, ans=0.125 +2024-07-28 16:22:21,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=177893.33333333334, ans=0.125 +2024-07-28 16:22:21,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=177906.66666666666, ans=0.0 +2024-07-28 16:22:25,452 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.45 vs. limit=10.0 +2024-07-28 16:22:26,683 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.50 vs. limit=12.0 +2024-07-28 16:22:28,997 INFO [train.py:1114] (1/4) Epoch 14, batch 550, loss[loss=0.22, simple_loss=0.3045, pruned_loss=0.06774, over 4622.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2732, pruned_loss=0.04766, over 877606.99 frames. ], batch size: 17, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:22:30,684 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.05 vs. limit=10.0 +2024-07-28 16:22:32,159 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.41 vs. limit=22.5 +2024-07-28 16:22:34,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=177920.0, ans=0.125 +2024-07-28 16:22:55,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=177973.33333333334, ans=10.0 +2024-07-28 16:22:55,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=177973.33333333334, ans=0.125 +2024-07-28 16:22:59,816 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+01 5.791e+01 6.101e+01 6.506e+01 8.521e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 16:23:00,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=177973.33333333334, ans=0.09899494936611666 +2024-07-28 16:23:01,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=177973.33333333334, ans=0.125 +2024-07-28 16:23:02,686 INFO [train.py:1114] (1/4) Epoch 14, batch 600, loss[loss=0.2291, simple_loss=0.3176, pruned_loss=0.07033, over 4623.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2733, pruned_loss=0.04781, over 892052.34 frames. ], batch size: 16, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:23:06,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177986.66666666666, ans=0.1 +2024-07-28 16:23:09,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=178000.0, ans=0.0 +2024-07-28 16:23:09,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=178000.0, ans=0.2 +2024-07-28 16:23:11,976 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.98 vs. limit=15.0 +2024-07-28 16:23:15,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=178000.0, ans=0.0 +2024-07-28 16:23:53,564 INFO [train.py:1114] (1/4) Epoch 14, batch 650, loss[loss=0.1741, simple_loss=0.2621, pruned_loss=0.04303, over 4757.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2734, pruned_loss=0.04751, over 903611.86 frames. ], batch size: 13, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:24:18,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=178093.33333333334, ans=0.125 +2024-07-28 16:24:20,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=178093.33333333334, ans=0.125 +2024-07-28 16:24:23,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=178106.66666666666, ans=0.0 +2024-07-28 16:24:26,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=178106.66666666666, ans=0.0 +2024-07-28 16:24:26,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=178106.66666666666, ans=0.2 +2024-07-28 16:24:27,861 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.659e+01 6.159e+01 7.127e+01 1.309e+02, threshold=1.232e+02, percent-clipped=1.0 +2024-07-28 16:24:29,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=178106.66666666666, ans=0.125 +2024-07-28 16:24:30,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=178120.0, ans=0.2 +2024-07-28 16:24:30,519 INFO [train.py:1114] (1/4) Epoch 14, batch 700, loss[loss=0.153, simple_loss=0.2517, pruned_loss=0.02715, over 4638.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2737, pruned_loss=0.04742, over 911836.81 frames. ], batch size: 12, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:24:30,992 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.44 vs. limit=12.0 +2024-07-28 16:24:32,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.83 vs. limit=15.0 +2024-07-28 16:24:34,896 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.89 vs. limit=15.0 +2024-07-28 16:24:43,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=178133.33333333334, ans=0.2 +2024-07-28 16:24:46,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=178146.66666666666, ans=0.125 +2024-07-28 16:24:50,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=178146.66666666666, ans=0.0 +2024-07-28 16:24:53,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=178160.0, ans=0.125 +2024-07-28 16:26:48,782 INFO [train.py:1114] (1/4) Epoch 14, batch 750, loss[loss=0.1898, simple_loss=0.2719, pruned_loss=0.05383, over 4691.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2736, pruned_loss=0.04711, over 918348.44 frames. ], batch size: 13, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:26:54,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=178200.0, ans=0.0 +2024-07-28 16:26:56,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=178200.0, ans=0.125 +2024-07-28 16:27:03,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=178213.33333333334, ans=0.125 +2024-07-28 16:27:16,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=178240.0, ans=0.125 +2024-07-28 16:27:20,237 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.515e+01 6.007e+01 6.700e+01 1.144e+02, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 16:27:22,821 INFO [train.py:1114] (1/4) Epoch 14, batch 800, loss[loss=0.1713, simple_loss=0.253, pruned_loss=0.04484, over 4852.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2739, pruned_loss=0.04742, over 923721.47 frames. ], batch size: 12, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:27:31,005 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.57 vs. limit=15.0 +2024-07-28 16:27:52,609 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.31 vs. limit=15.0 +2024-07-28 16:27:56,262 INFO [train.py:1114] (1/4) Epoch 14, batch 850, loss[loss=0.1861, simple_loss=0.2764, pruned_loss=0.04792, over 4659.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2726, pruned_loss=0.04717, over 927932.56 frames. ], batch size: 14, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:27:57,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=178320.0, ans=0.025 +2024-07-28 16:27:57,981 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.68 vs. limit=15.0 +2024-07-28 16:28:11,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=178346.66666666666, ans=0.025 +2024-07-28 16:28:17,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=178360.0, ans=0.125 +2024-07-28 16:28:18,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178360.0, ans=0.1 +2024-07-28 16:28:27,737 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.266e+01 5.522e+01 6.221e+01 7.119e+01 8.769e+01, threshold=1.244e+02, percent-clipped=0.0 +2024-07-28 16:28:30,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=178386.66666666666, ans=0.04949747468305833 +2024-07-28 16:28:30,527 INFO [train.py:1114] (1/4) Epoch 14, batch 900, loss[loss=0.1654, simple_loss=0.257, pruned_loss=0.03692, over 4844.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2738, pruned_loss=0.04768, over 928455.17 frames. ], batch size: 12, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:28:51,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=178413.33333333334, ans=0.125 +2024-07-28 16:28:57,208 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.48 vs. limit=15.0 +2024-07-28 16:29:00,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=178440.0, ans=0.125 +2024-07-28 16:29:00,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=178440.0, ans=0.125 +2024-07-28 16:29:01,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=178440.0, ans=0.2 +2024-07-28 16:29:06,310 INFO [train.py:1114] (1/4) Epoch 14, batch 950, loss[loss=0.1785, simple_loss=0.2665, pruned_loss=0.04527, over 4783.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2739, pruned_loss=0.04766, over 930314.39 frames. ], batch size: 12, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:29:12,010 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.04 vs. limit=10.0 +2024-07-28 16:29:21,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=178480.0, ans=0.95 +2024-07-28 16:29:21,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=178480.0, ans=0.2 +2024-07-28 16:29:26,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=178493.33333333334, ans=0.125 +2024-07-28 16:29:37,156 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.450e+01 5.573e+01 6.236e+01 6.880e+01 1.050e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 16:29:39,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=178520.0, ans=0.0 +2024-07-28 16:29:39,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178520.0, ans=0.1 +2024-07-28 16:29:39,833 INFO [train.py:1114] (1/4) Epoch 14, batch 1000, loss[loss=0.1582, simple_loss=0.2527, pruned_loss=0.03189, over 4959.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2737, pruned_loss=0.04779, over 929860.26 frames. ], batch size: 13, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:29:48,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=178533.33333333334, ans=0.0 +2024-07-28 16:29:49,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=178533.33333333334, ans=0.125 +2024-07-28 16:30:05,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.16 vs. limit=15.0 +2024-07-28 16:30:07,411 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.29 vs. limit=15.0 +2024-07-28 16:30:16,064 INFO [train.py:1114] (1/4) Epoch 14, batch 1050, loss[loss=0.2129, simple_loss=0.3071, pruned_loss=0.05936, over 4869.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2743, pruned_loss=0.04844, over 931948.93 frames. ], batch size: 14, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:30:25,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=178600.0, ans=0.125 +2024-07-28 16:30:30,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=178613.33333333334, ans=0.2 +2024-07-28 16:30:36,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=178613.33333333334, ans=0.0 +2024-07-28 16:30:41,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=178626.66666666666, ans=0.125 +2024-07-28 16:30:45,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=178640.0, ans=0.125 +2024-07-28 16:30:48,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=178640.0, ans=0.125 +2024-07-28 16:30:48,754 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.456e+01 5.778e+01 6.259e+01 7.627e+01 1.146e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 16:30:53,944 INFO [train.py:1114] (1/4) Epoch 14, batch 1100, loss[loss=0.1701, simple_loss=0.2569, pruned_loss=0.04164, over 4889.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2741, pruned_loss=0.04868, over 934440.96 frames. ], batch size: 13, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:30:57,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=178653.33333333334, ans=0.0 +2024-07-28 16:31:09,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=178680.0, ans=0.125 +2024-07-28 16:31:26,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=178706.66666666666, ans=0.0 +2024-07-28 16:31:31,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178720.0, ans=0.1 +2024-07-28 16:31:31,674 INFO [train.py:1114] (1/4) Epoch 14, batch 1150, loss[loss=0.1825, simple_loss=0.2695, pruned_loss=0.04775, over 4886.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2734, pruned_loss=0.0484, over 934416.78 frames. ], batch size: 13, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:31:40,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178733.33333333334, ans=0.1 +2024-07-28 16:31:46,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=178746.66666666666, ans=0.0 +2024-07-28 16:31:47,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=178746.66666666666, ans=0.125 +2024-07-28 16:31:51,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=178760.0, ans=0.0 +2024-07-28 16:31:53,886 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.73 vs. limit=15.0 +2024-07-28 16:32:02,869 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+01 5.551e+01 6.026e+01 6.659e+01 1.121e+02, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 16:32:05,593 INFO [train.py:1114] (1/4) Epoch 14, batch 1200, loss[loss=0.22, simple_loss=0.2997, pruned_loss=0.07018, over 4870.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.274, pruned_loss=0.04862, over 933335.57 frames. ], batch size: 14, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:32:17,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=178800.0, ans=0.125 +2024-07-28 16:32:17,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=178800.0, ans=0.125 +2024-07-28 16:32:18,939 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:32:26,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=178826.66666666666, ans=0.125 +2024-07-28 16:32:28,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=178826.66666666666, ans=0.125 +2024-07-28 16:32:28,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=178826.66666666666, ans=0.04949747468305833 +2024-07-28 16:32:29,029 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.38 vs. limit=15.0 +2024-07-28 16:32:38,591 INFO [train.py:1114] (1/4) Epoch 14, batch 1250, loss[loss=0.203, simple_loss=0.2849, pruned_loss=0.06052, over 4800.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2741, pruned_loss=0.04832, over 937451.98 frames. ], batch size: 15, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:32:42,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=178853.33333333334, ans=0.125 +2024-07-28 16:32:50,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=178866.66666666666, ans=0.2 +2024-07-28 16:33:07,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=178906.66666666666, ans=0.125 +2024-07-28 16:33:09,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=178906.66666666666, ans=0.0 +2024-07-28 16:33:09,201 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.85 vs. limit=22.5 +2024-07-28 16:33:09,547 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.792e+01 5.596e+01 6.109e+01 6.927e+01 8.665e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 16:33:10,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=178906.66666666666, ans=0.0 +2024-07-28 16:33:12,230 INFO [train.py:1114] (1/4) Epoch 14, batch 1300, loss[loss=0.1683, simple_loss=0.2691, pruned_loss=0.03375, over 4746.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2739, pruned_loss=0.04863, over 939384.23 frames. ], batch size: 19, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:33:12,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=178920.0, ans=0.125 +2024-07-28 16:33:15,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=178920.0, ans=0.0 +2024-07-28 16:33:20,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=178933.33333333334, ans=0.025 +2024-07-28 16:33:20,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=178933.33333333334, ans=0.0 +2024-07-28 16:33:26,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=178946.66666666666, ans=0.125 +2024-07-28 16:33:31,990 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:33:34,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=178960.0, ans=0.025 +2024-07-28 16:33:40,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=178973.33333333334, ans=0.125 +2024-07-28 16:33:41,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.70 vs. limit=12.0 +2024-07-28 16:33:43,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178973.33333333334, ans=0.1 +2024-07-28 16:33:44,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=178986.66666666666, ans=0.0 +2024-07-28 16:33:45,452 INFO [train.py:1114] (1/4) Epoch 14, batch 1350, loss[loss=0.1944, simple_loss=0.2894, pruned_loss=0.04967, over 4760.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2736, pruned_loss=0.04795, over 941530.31 frames. ], batch size: 13, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:33:50,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=178986.66666666666, ans=0.2 +2024-07-28 16:33:50,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=178986.66666666666, ans=0.0 +2024-07-28 16:33:50,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=178986.66666666666, ans=0.125 +2024-07-28 16:34:00,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=179013.33333333334, ans=0.0 +2024-07-28 16:34:18,362 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.737e+01 5.767e+01 6.518e+01 7.803e+01 1.206e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 16:34:21,086 INFO [train.py:1114] (1/4) Epoch 14, batch 1400, loss[loss=0.1674, simple_loss=0.2425, pruned_loss=0.04612, over 4712.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2737, pruned_loss=0.04821, over 943150.20 frames. ], batch size: 11, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:34:23,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=179053.33333333334, ans=0.2 +2024-07-28 16:34:25,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=179053.33333333334, ans=0.0 +2024-07-28 16:34:27,291 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.86 vs. limit=15.0 +2024-07-28 16:34:33,908 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.10 vs. limit=15.0 +2024-07-28 16:34:45,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=179093.33333333334, ans=0.125 +2024-07-28 16:34:49,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=179106.66666666666, ans=0.125 +2024-07-28 16:34:50,236 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.56 vs. limit=6.0 +2024-07-28 16:34:51,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=179106.66666666666, ans=0.0 +2024-07-28 16:34:52,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=179106.66666666666, ans=0.125 +2024-07-28 16:34:53,309 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.12 vs. limit=10.0 +2024-07-28 16:34:54,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=179120.0, ans=0.0 +2024-07-28 16:34:54,874 INFO [train.py:1114] (1/4) Epoch 14, batch 1450, loss[loss=0.2046, simple_loss=0.3022, pruned_loss=0.05349, over 4681.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2741, pruned_loss=0.04792, over 943000.08 frames. ], batch size: 15, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:34:59,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=179120.0, ans=0.04949747468305833 +2024-07-28 16:35:10,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=179146.66666666666, ans=0.125 +2024-07-28 16:35:19,192 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.52 vs. limit=22.5 +2024-07-28 16:35:25,328 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.281e+01 5.717e+01 6.158e+01 6.700e+01 8.649e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 16:35:28,156 INFO [train.py:1114] (1/4) Epoch 14, batch 1500, loss[loss=0.1664, simple_loss=0.2648, pruned_loss=0.03404, over 4821.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2744, pruned_loss=0.04778, over 942736.61 frames. ], batch size: 14, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:35:28,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=179186.66666666666, ans=0.0 +2024-07-28 16:35:30,968 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:35:33,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=179186.66666666666, ans=0.0 +2024-07-28 16:35:40,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=179200.0, ans=0.0 +2024-07-28 16:35:45,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=179213.33333333334, ans=15.0 +2024-07-28 16:35:46,990 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.33 vs. limit=6.0 +2024-07-28 16:35:51,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=179226.66666666666, ans=0.125 +2024-07-28 16:35:52,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179226.66666666666, ans=0.1 +2024-07-28 16:35:57,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179240.0, ans=0.1 +2024-07-28 16:36:02,054 INFO [train.py:1114] (1/4) Epoch 14, batch 1550, loss[loss=0.1728, simple_loss=0.2797, pruned_loss=0.03293, over 4902.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2738, pruned_loss=0.04775, over 939424.50 frames. ], batch size: 15, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:36:02,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=179253.33333333334, ans=0.0 +2024-07-28 16:36:07,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=179253.33333333334, ans=0.125 +2024-07-28 16:36:11,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=179266.66666666666, ans=0.0 +2024-07-28 16:36:11,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=179266.66666666666, ans=0.125 +2024-07-28 16:36:22,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=179280.0, ans=0.125 +2024-07-28 16:36:30,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=179306.66666666666, ans=0.125 +2024-07-28 16:36:32,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=179306.66666666666, ans=0.0 +2024-07-28 16:36:34,738 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.776e+01 5.675e+01 6.307e+01 6.776e+01 1.138e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 16:36:39,294 INFO [train.py:1114] (1/4) Epoch 14, batch 1600, loss[loss=0.1654, simple_loss=0.2641, pruned_loss=0.03337, over 4878.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2739, pruned_loss=0.04811, over 937824.18 frames. ], batch size: 14, lr: 5.41e-03, grad_scale: 32.0 +2024-07-28 16:36:48,456 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.20 vs. limit=15.0 +2024-07-28 16:36:55,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=179346.66666666666, ans=0.125 +2024-07-28 16:36:58,467 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.46 vs. limit=22.5 +2024-07-28 16:36:59,832 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.72 vs. limit=6.0 +2024-07-28 16:37:14,611 INFO [train.py:1114] (1/4) Epoch 14, batch 1650, loss[loss=0.2026, simple_loss=0.2936, pruned_loss=0.05575, over 4655.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2744, pruned_loss=0.04887, over 937258.56 frames. ], batch size: 14, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:37:21,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=179400.0, ans=0.0 +2024-07-28 16:37:33,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=179426.66666666666, ans=0.2 +2024-07-28 16:37:35,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=179426.66666666666, ans=0.09899494936611666 +2024-07-28 16:37:41,518 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.76 vs. limit=15.0 +2024-07-28 16:37:45,304 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.393e+01 5.723e+01 6.070e+01 6.636e+01 1.142e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 16:37:46,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=179440.0, ans=0.0 +2024-07-28 16:37:47,998 INFO [train.py:1114] (1/4) Epoch 14, batch 1700, loss[loss=0.1535, simple_loss=0.2288, pruned_loss=0.03913, over 4708.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2742, pruned_loss=0.04834, over 938969.81 frames. ], batch size: 11, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:37:58,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=179466.66666666666, ans=0.125 +2024-07-28 16:38:03,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=179480.0, ans=0.025 +2024-07-28 16:38:05,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=179480.0, ans=0.0 +2024-07-28 16:38:11,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=27.70 vs. limit=22.5 +2024-07-28 16:38:18,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=179506.66666666666, ans=0.2 +2024-07-28 16:38:20,926 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.67 vs. limit=15.0 +2024-07-28 16:38:21,301 INFO [train.py:1114] (1/4) Epoch 14, batch 1750, loss[loss=0.1536, simple_loss=0.2412, pruned_loss=0.03303, over 4792.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2734, pruned_loss=0.04802, over 939870.61 frames. ], batch size: 11, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:38:33,147 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.71 vs. limit=15.0 +2024-07-28 16:38:35,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179546.66666666666, ans=0.1 +2024-07-28 16:38:40,533 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.48 vs. limit=6.0 +2024-07-28 16:38:42,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=179560.0, ans=0.0 +2024-07-28 16:38:50,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=179573.33333333334, ans=0.125 +2024-07-28 16:38:52,728 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.429e+01 5.501e+01 6.182e+01 7.069e+01 1.179e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 16:38:53,046 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.36 vs. limit=15.0 +2024-07-28 16:38:55,449 INFO [train.py:1114] (1/4) Epoch 14, batch 1800, loss[loss=0.1827, simple_loss=0.2662, pruned_loss=0.04957, over 4634.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2732, pruned_loss=0.04837, over 940739.33 frames. ], batch size: 13, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:39:04,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=179600.0, ans=0.0 +2024-07-28 16:39:07,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=179600.0, ans=0.0 +2024-07-28 16:39:13,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=179613.33333333334, ans=0.09899494936611666 +2024-07-28 16:39:29,077 INFO [train.py:1114] (1/4) Epoch 14, batch 1850, loss[loss=0.2026, simple_loss=0.2951, pruned_loss=0.05511, over 4805.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.273, pruned_loss=0.04808, over 940560.13 frames. ], batch size: 14, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:39:33,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=179653.33333333334, ans=0.125 +2024-07-28 16:39:38,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=179666.66666666666, ans=0.025 +2024-07-28 16:39:55,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=179693.33333333334, ans=0.125 +2024-07-28 16:40:01,780 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.632e+01 6.282e+01 7.683e+01 1.282e+02, threshold=1.256e+02, percent-clipped=1.0 +2024-07-28 16:42:23,048 INFO [train.py:1114] (1/4) Epoch 14, batch 1900, loss[loss=0.1802, simple_loss=0.2703, pruned_loss=0.04504, over 4648.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2747, pruned_loss=0.04859, over 941939.64 frames. ], batch size: 14, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:42:59,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=179733.33333333334, ans=0.125 +2024-07-28 16:43:12,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=179760.0, ans=0.125 +2024-07-28 16:43:14,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=179760.0, ans=0.2 +2024-07-28 16:43:17,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179773.33333333334, ans=0.1 +2024-07-28 16:43:22,170 INFO [train.py:1114] (1/4) Epoch 14, batch 1950, loss[loss=0.1623, simple_loss=0.2648, pruned_loss=0.02992, over 4897.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2752, pruned_loss=0.04845, over 943945.06 frames. ], batch size: 13, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:43:37,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=179813.33333333334, ans=0.0 +2024-07-28 16:43:40,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=179813.33333333334, ans=0.125 +2024-07-28 16:43:45,318 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.27 vs. limit=22.5 +2024-07-28 16:43:45,796 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:43:47,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=179826.66666666666, ans=0.0 +2024-07-28 16:43:48,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=179840.0, ans=0.125 +2024-07-28 16:43:52,883 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.413e+01 5.659e+01 6.185e+01 6.876e+01 9.171e+01, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 16:43:55,613 INFO [train.py:1114] (1/4) Epoch 14, batch 2000, loss[loss=0.1678, simple_loss=0.2553, pruned_loss=0.04016, over 4805.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2752, pruned_loss=0.04841, over 941126.98 frames. ], batch size: 11, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:44:00,012 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.68 vs. limit=15.0 +2024-07-28 16:44:13,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=179880.0, ans=0.025 +2024-07-28 16:44:13,930 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=10.46 vs. limit=15.0 +2024-07-28 16:44:18,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=179893.33333333334, ans=0.125 +2024-07-28 16:44:39,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=179920.0, ans=0.0 +2024-07-28 16:44:39,838 INFO [train.py:1114] (1/4) Epoch 14, batch 2050, loss[loss=0.1712, simple_loss=0.247, pruned_loss=0.0477, over 4623.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2744, pruned_loss=0.04813, over 939025.41 frames. ], batch size: 11, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:44:50,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=179933.33333333334, ans=0.125 +2024-07-28 16:45:01,292 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.79 vs. limit=15.0 +2024-07-28 16:45:01,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=179960.0, ans=0.125 +2024-07-28 16:45:12,611 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.68 vs. limit=22.5 +2024-07-28 16:45:14,162 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.388e+01 5.698e+01 6.586e+01 7.906e+01 1.162e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-28 16:45:16,944 INFO [train.py:1114] (1/4) Epoch 14, batch 2100, loss[loss=0.1775, simple_loss=0.2482, pruned_loss=0.05336, over 4767.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2732, pruned_loss=0.04767, over 940947.26 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:45:32,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=180013.33333333334, ans=0.125 +2024-07-28 16:45:36,897 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.03 vs. limit=12.0 +2024-07-28 16:45:46,925 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.51 vs. limit=12.0 +2024-07-28 16:45:50,453 INFO [train.py:1114] (1/4) Epoch 14, batch 2150, loss[loss=0.1829, simple_loss=0.2794, pruned_loss=0.04321, over 4897.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2714, pruned_loss=0.0466, over 944065.00 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:45:58,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=180066.66666666666, ans=0.125 +2024-07-28 16:46:08,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=180080.0, ans=0.125 +2024-07-28 16:46:22,333 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.354e+01 5.458e+01 6.111e+01 6.832e+01 1.017e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 16:46:28,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=180106.66666666666, ans=0.125 +2024-07-28 16:46:36,558 INFO [train.py:1114] (1/4) Epoch 14, batch 2200, loss[loss=0.1978, simple_loss=0.2887, pruned_loss=0.05346, over 4812.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2712, pruned_loss=0.0462, over 943662.30 frames. ], batch size: 14, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:46:37,037 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.99 vs. limit=15.0 +2024-07-28 16:46:48,452 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.51 vs. limit=10.0 +2024-07-28 16:46:50,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=180133.33333333334, ans=0.0 +2024-07-28 16:46:52,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=180133.33333333334, ans=0.2 +2024-07-28 16:46:52,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=180133.33333333334, ans=0.125 +2024-07-28 16:46:55,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=180133.33333333334, ans=0.0 +2024-07-28 16:46:59,391 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.45 vs. limit=15.0 +2024-07-28 16:47:09,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=180160.0, ans=0.2 +2024-07-28 16:47:17,610 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.26 vs. limit=15.0 +2024-07-28 16:47:17,909 INFO [train.py:1114] (1/4) Epoch 14, batch 2250, loss[loss=0.1797, simple_loss=0.2812, pruned_loss=0.03908, over 4692.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2724, pruned_loss=0.04678, over 942628.41 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:47:23,272 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.53 vs. limit=22.5 +2024-07-28 16:47:27,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=180200.0, ans=0.0 +2024-07-28 16:47:37,544 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.13 vs. limit=22.5 +2024-07-28 16:47:57,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=180240.0, ans=0.125 +2024-07-28 16:48:09,490 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.321e+01 5.481e+01 6.034e+01 6.798e+01 1.360e+02, threshold=1.207e+02, percent-clipped=1.0 +2024-07-28 16:48:18,455 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.70 vs. limit=10.0 +2024-07-28 16:48:20,571 INFO [train.py:1114] (1/4) Epoch 14, batch 2300, loss[loss=0.131, simple_loss=0.2181, pruned_loss=0.02188, over 4947.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.272, pruned_loss=0.04683, over 940460.85 frames. ], batch size: 12, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:49:20,679 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.69 vs. limit=15.0 +2024-07-28 16:49:31,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=180266.66666666666, ans=0.025 +2024-07-28 16:49:32,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=180280.0, ans=0.1 +2024-07-28 16:49:39,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=180293.33333333334, ans=0.125 +2024-07-28 16:49:54,917 INFO [train.py:1114] (1/4) Epoch 14, batch 2350, loss[loss=0.2074, simple_loss=0.3102, pruned_loss=0.05233, over 4627.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2724, pruned_loss=0.04673, over 942320.07 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:49:57,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=180320.0, ans=0.125 +2024-07-28 16:50:04,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=180333.33333333334, ans=0.0 +2024-07-28 16:50:05,167 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:50:10,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=180346.66666666666, ans=0.125 +2024-07-28 16:50:16,488 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.55 vs. limit=22.5 +2024-07-28 16:50:26,411 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.883e+01 5.752e+01 6.311e+01 7.505e+01 9.885e+01, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 16:50:29,052 INFO [train.py:1114] (1/4) Epoch 14, batch 2400, loss[loss=0.1712, simple_loss=0.2551, pruned_loss=0.04363, over 4648.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.273, pruned_loss=0.04691, over 941931.15 frames. ], batch size: 12, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:50:36,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=180400.0, ans=0.125 +2024-07-28 16:50:40,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=180400.0, ans=0.125 +2024-07-28 16:51:02,389 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.50 vs. limit=15.0 +2024-07-28 16:51:03,418 INFO [train.py:1114] (1/4) Epoch 14, batch 2450, loss[loss=0.1763, simple_loss=0.2696, pruned_loss=0.04153, over 4700.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2738, pruned_loss=0.04763, over 937641.56 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:51:14,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=180466.66666666666, ans=0.0 +2024-07-28 16:51:23,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=180493.33333333334, ans=0.125 +2024-07-28 16:51:25,191 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.70 vs. limit=15.0 +2024-07-28 16:51:30,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.96 vs. limit=6.0 +2024-07-28 16:51:31,084 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.02 vs. limit=15.0 +2024-07-28 16:51:34,058 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.381e+01 5.494e+01 6.004e+01 6.734e+01 1.227e+02, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 16:51:36,693 INFO [train.py:1114] (1/4) Epoch 14, batch 2500, loss[loss=0.2009, simple_loss=0.2944, pruned_loss=0.0537, over 4805.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2735, pruned_loss=0.04743, over 939616.88 frames. ], batch size: 14, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:51:58,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=180560.0, ans=0.125 +2024-07-28 16:52:16,695 INFO [train.py:1114] (1/4) Epoch 14, batch 2550, loss[loss=0.1804, simple_loss=0.2707, pruned_loss=0.04505, over 4798.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2739, pruned_loss=0.04757, over 939278.18 frames. ], batch size: 11, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:52:18,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=180586.66666666666, ans=0.125 +2024-07-28 16:52:22,387 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.82 vs. limit=15.0 +2024-07-28 16:52:23,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=180600.0, ans=0.125 +2024-07-28 16:52:30,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=180600.0, ans=10.0 +2024-07-28 16:52:31,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=180613.33333333334, ans=0.2 +2024-07-28 16:52:34,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=180613.33333333334, ans=0.0 +2024-07-28 16:52:48,645 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.421e+01 5.545e+01 6.227e+01 6.776e+01 1.046e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 16:52:49,863 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.78 vs. limit=15.0 +2024-07-28 16:52:51,355 INFO [train.py:1114] (1/4) Epoch 14, batch 2600, loss[loss=0.1769, simple_loss=0.2695, pruned_loss=0.04212, over 4890.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2741, pruned_loss=0.04824, over 938360.86 frames. ], batch size: 13, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:52:51,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=180653.33333333334, ans=0.125 +2024-07-28 16:53:01,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=180666.66666666666, ans=0.125 +2024-07-28 16:53:03,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=180666.66666666666, ans=0.1 +2024-07-28 16:53:08,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=180680.0, ans=0.125 +2024-07-28 16:53:13,760 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:53:20,867 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.87 vs. limit=10.0 +2024-07-28 16:53:21,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180706.66666666666, ans=0.1 +2024-07-28 16:53:22,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.52 vs. limit=15.0 +2024-07-28 16:53:25,230 INFO [train.py:1114] (1/4) Epoch 14, batch 2650, loss[loss=0.1996, simple_loss=0.289, pruned_loss=0.05506, over 4625.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.274, pruned_loss=0.04823, over 940070.94 frames. ], batch size: 16, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:53:26,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=180720.0, ans=0.125 +2024-07-28 16:53:29,007 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.14 vs. limit=6.0 +2024-07-28 16:53:36,313 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.23 vs. limit=10.0 +2024-07-28 16:53:41,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=180746.66666666666, ans=0.1 +2024-07-28 16:53:42,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=180746.66666666666, ans=0.125 +2024-07-28 16:53:47,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180760.0, ans=0.1 +2024-07-28 16:53:48,290 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.91 vs. limit=15.0 +2024-07-28 16:53:56,284 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.582e+01 5.983e+01 6.716e+01 1.150e+02, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 16:53:57,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=180773.33333333334, ans=0.125 +2024-07-28 16:53:58,982 INFO [train.py:1114] (1/4) Epoch 14, batch 2700, loss[loss=0.1553, simple_loss=0.2527, pruned_loss=0.02892, over 4741.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2742, pruned_loss=0.04797, over 939951.52 frames. ], batch size: 14, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:54:21,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=180826.66666666666, ans=0.0 +2024-07-28 16:54:31,784 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.71 vs. limit=6.0 +2024-07-28 16:54:32,349 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.66 vs. limit=15.0 +2024-07-28 16:54:32,656 INFO [train.py:1114] (1/4) Epoch 14, batch 2750, loss[loss=0.1846, simple_loss=0.2717, pruned_loss=0.0487, over 4703.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2733, pruned_loss=0.0476, over 939802.63 frames. ], batch size: 12, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:54:40,503 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.00 vs. limit=22.5 +2024-07-28 16:54:48,620 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:54:48,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180880.0, ans=0.1 +2024-07-28 16:54:51,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=180893.33333333334, ans=0.07 +2024-07-28 16:55:01,990 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:55:03,428 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.266e+01 5.739e+01 6.573e+01 7.646e+01 1.098e+02, threshold=1.315e+02, percent-clipped=0.0 +2024-07-28 16:55:06,152 INFO [train.py:1114] (1/4) Epoch 14, batch 2800, loss[loss=0.2325, simple_loss=0.3071, pruned_loss=0.07895, over 3291.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2745, pruned_loss=0.04835, over 937412.50 frames. ], batch size: 35, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:55:09,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=180920.0, ans=0.95 +2024-07-28 16:55:10,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=180920.0, ans=0.125 +2024-07-28 16:55:15,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.35 vs. limit=15.0 +2024-07-28 16:55:30,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=180960.0, ans=0.0 +2024-07-28 16:55:31,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=180960.0, ans=0.125 +2024-07-28 16:55:32,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=180960.0, ans=0.0 +2024-07-28 16:55:38,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=180973.33333333334, ans=0.125 +2024-07-28 16:55:42,082 INFO [train.py:1114] (1/4) Epoch 14, batch 2850, loss[loss=0.1976, simple_loss=0.2923, pruned_loss=0.05145, over 4963.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2748, pruned_loss=0.04794, over 935634.24 frames. ], batch size: 13, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:55:56,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=181013.33333333334, ans=0.0 +2024-07-28 16:56:12,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=181040.0, ans=0.125 +2024-07-28 16:56:15,625 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.493e+01 5.822e+01 6.351e+01 7.357e+01 1.031e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 16:56:17,145 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.17 vs. limit=15.0 +2024-07-28 16:56:18,127 INFO [train.py:1114] (1/4) Epoch 14, batch 2900, loss[loss=0.1812, simple_loss=0.2718, pruned_loss=0.04529, over 4828.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2756, pruned_loss=0.04784, over 939467.65 frames. ], batch size: 13, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:56:18,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=181053.33333333334, ans=0.125 +2024-07-28 16:56:28,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=181053.33333333334, ans=0.0 +2024-07-28 16:56:47,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=181093.33333333334, ans=0.025 +2024-07-28 16:57:04,975 INFO [train.py:1114] (1/4) Epoch 14, batch 2950, loss[loss=0.1827, simple_loss=0.2684, pruned_loss=0.04852, over 4711.00 frames. ], tot_loss[loss=0.185, simple_loss=0.274, pruned_loss=0.04799, over 938735.39 frames. ], batch size: 12, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:57:15,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=181120.0, ans=0.125 +2024-07-28 16:57:19,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=181133.33333333334, ans=0.125 +2024-07-28 16:57:55,634 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.382e+01 5.574e+01 6.305e+01 7.129e+01 1.096e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 16:57:58,335 INFO [train.py:1114] (1/4) Epoch 14, batch 3000, loss[loss=0.1586, simple_loss=0.2486, pruned_loss=0.03434, over 4764.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2725, pruned_loss=0.04714, over 938253.72 frames. ], batch size: 13, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:57:58,336 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 16:58:13,903 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.6234, 4.7671, 4.6855, 5.3515], device='cuda:1') +2024-07-28 16:58:17,163 INFO [train.py:1146] (1/4) Epoch 14, validation: loss=0.1652, simple_loss=0.2685, pruned_loss=0.03098, over 944034.00 frames. +2024-07-28 16:58:17,164 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 16:58:18,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=181186.66666666666, ans=0.04949747468305833 +2024-07-28 16:58:21,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=181186.66666666666, ans=10.0 +2024-07-28 16:58:31,872 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=181200.0, ans=0.2 +2024-07-28 16:58:34,712 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.58 vs. limit=10.0 +2024-07-28 16:58:35,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=181213.33333333334, ans=0.0 +2024-07-28 16:58:38,191 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.10 vs. limit=22.5 +2024-07-28 16:58:44,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=181226.66666666666, ans=0.05 +2024-07-28 16:59:00,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=181240.0, ans=0.05 +2024-07-28 16:59:00,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=181240.0, ans=0.125 +2024-07-28 16:59:02,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=181240.0, ans=0.125 +2024-07-28 16:59:04,085 INFO [train.py:1114] (1/4) Epoch 14, batch 3050, loss[loss=0.1674, simple_loss=0.2599, pruned_loss=0.03748, over 4628.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2745, pruned_loss=0.04791, over 937334.06 frames. ], batch size: 12, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:59:13,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=181266.66666666666, ans=0.0 +2024-07-28 16:59:14,844 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.35 vs. limit=6.0 +2024-07-28 16:59:24,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=181280.0, ans=0.2 +2024-07-28 16:59:27,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=181280.0, ans=0.125 +2024-07-28 16:59:29,000 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:59:41,675 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.769e+01 5.765e+01 6.488e+01 7.325e+01 1.172e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-28 16:59:47,281 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.97 vs. limit=15.0 +2024-07-28 16:59:48,315 INFO [train.py:1114] (1/4) Epoch 14, batch 3100, loss[loss=0.2065, simple_loss=0.2967, pruned_loss=0.05816, over 4611.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2739, pruned_loss=0.04778, over 938497.20 frames. ], batch size: 16, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 16:59:52,888 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.17 vs. limit=22.5 +2024-07-28 17:00:49,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181346.66666666666, ans=0.1 +2024-07-28 17:00:52,707 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.37 vs. limit=22.5 +2024-07-28 17:00:55,431 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.77 vs. limit=22.5 +2024-07-28 17:01:02,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=181360.0, ans=0.125 +2024-07-28 17:01:10,553 INFO [train.py:1114] (1/4) Epoch 14, batch 3150, loss[loss=0.1769, simple_loss=0.2642, pruned_loss=0.04482, over 4605.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2728, pruned_loss=0.04685, over 938753.93 frames. ], batch size: 17, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:01:20,607 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:01:20,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=181400.0, ans=0.0 +2024-07-28 17:01:22,757 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.62 vs. limit=15.0 +2024-07-28 17:01:34,334 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:01:37,813 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.55 vs. limit=15.0 +2024-07-28 17:01:42,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=181426.66666666666, ans=0.04949747468305833 +2024-07-28 17:01:48,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=181440.0, ans=0.0 +2024-07-28 17:01:50,901 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.644e+01 5.747e+01 6.201e+01 6.953e+01 1.061e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 17:01:52,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=181440.0, ans=0.125 +2024-07-28 17:01:53,630 INFO [train.py:1114] (1/4) Epoch 14, batch 3200, loss[loss=0.1929, simple_loss=0.2751, pruned_loss=0.05537, over 4812.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2728, pruned_loss=0.04724, over 940013.66 frames. ], batch size: 13, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:02:26,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=181506.66666666666, ans=0.0 +2024-07-28 17:02:28,096 INFO [train.py:1114] (1/4) Epoch 14, batch 3250, loss[loss=0.2041, simple_loss=0.2946, pruned_loss=0.05679, over 4929.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.274, pruned_loss=0.04768, over 940833.54 frames. ], batch size: 14, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:02:35,031 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.59 vs. limit=22.5 +2024-07-28 17:02:49,972 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.02 vs. limit=10.0 +2024-07-28 17:02:51,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=181546.66666666666, ans=0.025 +2024-07-28 17:03:17,416 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.388e+01 5.561e+01 6.069e+01 6.754e+01 1.054e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 17:04:11,095 INFO [train.py:1114] (1/4) Epoch 14, batch 3300, loss[loss=0.2304, simple_loss=0.3061, pruned_loss=0.07737, over 4711.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2733, pruned_loss=0.0478, over 940675.34 frames. ], batch size: 19, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:04:15,488 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.52 vs. limit=15.0 +2024-07-28 17:04:31,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=181600.0, ans=0.0 +2024-07-28 17:04:40,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=181626.66666666666, ans=0.2 +2024-07-28 17:04:53,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=181653.33333333334, ans=0.125 +2024-07-28 17:04:54,331 INFO [train.py:1114] (1/4) Epoch 14, batch 3350, loss[loss=0.2092, simple_loss=0.3116, pruned_loss=0.05343, over 4605.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2738, pruned_loss=0.04762, over 938806.59 frames. ], batch size: 17, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:05:06,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=181666.66666666666, ans=0.125 +2024-07-28 17:05:08,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=181680.0, ans=0.125 +2024-07-28 17:05:09,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=181680.0, ans=0.0 +2024-07-28 17:05:11,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=181680.0, ans=0.125 +2024-07-28 17:05:13,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=181680.0, ans=0.0 +2024-07-28 17:05:19,553 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.72 vs. limit=15.0 +2024-07-28 17:05:20,884 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.48 vs. limit=22.5 +2024-07-28 17:05:23,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=181706.66666666666, ans=0.1 +2024-07-28 17:05:25,877 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.710e+01 5.702e+01 6.286e+01 7.207e+01 1.084e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 17:05:29,881 INFO [train.py:1114] (1/4) Epoch 14, batch 3400, loss[loss=0.1427, simple_loss=0.2258, pruned_loss=0.02983, over 4820.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2737, pruned_loss=0.048, over 937947.63 frames. ], batch size: 11, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:05:47,725 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.54 vs. limit=15.0 +2024-07-28 17:05:52,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.92 vs. limit=15.0 +2024-07-28 17:05:52,589 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.59 vs. limit=15.0 +2024-07-28 17:05:57,477 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=181760.0, ans=0.015 +2024-07-28 17:06:07,188 INFO [train.py:1114] (1/4) Epoch 14, batch 3450, loss[loss=0.2005, simple_loss=0.288, pruned_loss=0.05651, over 4719.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2746, pruned_loss=0.04826, over 938218.23 frames. ], batch size: 19, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:06:22,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181813.33333333334, ans=0.1 +2024-07-28 17:06:33,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=181826.66666666666, ans=0.2 +2024-07-28 17:06:35,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=181826.66666666666, ans=0.0 +2024-07-28 17:06:41,514 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.360e+01 5.614e+01 6.099e+01 6.810e+01 1.220e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 17:06:43,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=181840.0, ans=0.2 +2024-07-28 17:06:44,268 INFO [train.py:1114] (1/4) Epoch 14, batch 3500, loss[loss=0.1532, simple_loss=0.2376, pruned_loss=0.0344, over 4935.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2735, pruned_loss=0.04773, over 938937.52 frames. ], batch size: 12, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:06:44,665 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.30 vs. limit=15.0 +2024-07-28 17:07:07,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=181893.33333333334, ans=0.125 +2024-07-28 17:07:09,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=181893.33333333334, ans=0.2 +2024-07-28 17:07:10,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=181906.66666666666, ans=0.125 +2024-07-28 17:07:11,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=181906.66666666666, ans=0.1 +2024-07-28 17:07:12,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=181906.66666666666, ans=0.125 +2024-07-28 17:07:15,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=181906.66666666666, ans=0.125 +2024-07-28 17:07:16,477 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=181906.66666666666, ans=0.07 +2024-07-28 17:07:17,702 INFO [train.py:1114] (1/4) Epoch 14, batch 3550, loss[loss=0.1773, simple_loss=0.2789, pruned_loss=0.03779, over 4667.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2722, pruned_loss=0.04718, over 939347.74 frames. ], batch size: 14, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:07:31,137 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.38 vs. limit=12.0 +2024-07-28 17:07:44,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=181973.33333333334, ans=0.04949747468305833 +2024-07-28 17:07:47,923 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 5.433e+01 6.095e+01 6.753e+01 1.044e+02, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 17:07:50,514 INFO [train.py:1114] (1/4) Epoch 14, batch 3600, loss[loss=0.1719, simple_loss=0.2636, pruned_loss=0.04012, over 4965.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2713, pruned_loss=0.04651, over 940629.20 frames. ], batch size: 13, lr: 5.37e-03, grad_scale: 64.0 +2024-07-28 17:07:52,580 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:07:53,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181986.66666666666, ans=0.1 +2024-07-28 17:08:03,465 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.90 vs. limit=10.0 +2024-07-28 17:08:03,992 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.64 vs. limit=10.0 +2024-07-28 17:08:12,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=182026.66666666666, ans=0.0 +2024-07-28 17:08:15,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=182026.66666666666, ans=0.0 +2024-07-28 17:08:15,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=182026.66666666666, ans=0.125 +2024-07-28 17:08:26,139 INFO [train.py:1114] (1/4) Epoch 14, batch 3650, loss[loss=0.2237, simple_loss=0.3136, pruned_loss=0.06691, over 4895.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2716, pruned_loss=0.04691, over 941076.24 frames. ], batch size: 15, lr: 5.37e-03, grad_scale: 64.0 +2024-07-28 17:08:30,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=182053.33333333334, ans=0.125 +2024-07-28 17:08:37,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=182066.66666666666, ans=0.125 +2024-07-28 17:08:39,176 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.55 vs. limit=22.5 +2024-07-28 17:08:39,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=182080.0, ans=0.0 +2024-07-28 17:08:48,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=182093.33333333334, ans=0.0 +2024-07-28 17:08:51,947 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.87 vs. limit=15.0 +2024-07-28 17:08:57,483 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.626e+01 5.717e+01 6.299e+01 7.471e+01 1.089e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 17:08:59,530 INFO [train.py:1114] (1/4) Epoch 14, batch 3700, loss[loss=0.1839, simple_loss=0.274, pruned_loss=0.04688, over 4927.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2711, pruned_loss=0.04669, over 941663.11 frames. ], batch size: 14, lr: 5.37e-03, grad_scale: 64.0 +2024-07-28 17:09:29,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=182146.66666666666, ans=0.2 +2024-07-28 17:09:31,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=182146.66666666666, ans=0.125 +2024-07-28 17:09:48,833 INFO [train.py:1114] (1/4) Epoch 14, batch 3750, loss[loss=0.1757, simple_loss=0.2464, pruned_loss=0.0525, over 4817.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2708, pruned_loss=0.04662, over 943392.74 frames. ], batch size: 11, lr: 5.37e-03, grad_scale: 64.0 +2024-07-28 17:10:00,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=182200.0, ans=0.025 +2024-07-28 17:10:08,581 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.40 vs. limit=15.0 +2024-07-28 17:10:13,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=182226.66666666666, ans=0.0 +2024-07-28 17:10:20,836 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.520e+01 5.513e+01 6.095e+01 6.820e+01 9.830e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 17:10:22,243 INFO [train.py:1114] (1/4) Epoch 14, batch 3800, loss[loss=0.201, simple_loss=0.2946, pruned_loss=0.05374, over 4810.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2701, pruned_loss=0.0465, over 941904.96 frames. ], batch size: 14, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:10:35,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=182280.0, ans=0.025 +2024-07-28 17:10:37,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=182280.0, ans=0.0 +2024-07-28 17:10:41,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=182293.33333333334, ans=0.125 +2024-07-28 17:10:43,369 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.80 vs. limit=15.0 +2024-07-28 17:10:47,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=182306.66666666666, ans=0.0 +2024-07-28 17:10:54,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=182320.0, ans=0.125 +2024-07-28 17:10:55,057 INFO [train.py:1114] (1/4) Epoch 14, batch 3850, loss[loss=0.2141, simple_loss=0.3053, pruned_loss=0.06148, over 4655.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2701, pruned_loss=0.04622, over 942349.46 frames. ], batch size: 16, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:11:05,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=182333.33333333334, ans=0.0 +2024-07-28 17:11:13,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=182346.66666666666, ans=0.0 +2024-07-28 17:11:15,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=182360.0, ans=0.125 +2024-07-28 17:11:17,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=182360.0, ans=0.125 +2024-07-28 17:11:23,013 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.33 vs. limit=12.0 +2024-07-28 17:11:29,000 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.431e+01 5.612e+01 6.160e+01 6.955e+01 1.058e+02, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 17:11:30,353 INFO [train.py:1114] (1/4) Epoch 14, batch 3900, loss[loss=0.1792, simple_loss=0.2668, pruned_loss=0.04575, over 4821.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2711, pruned_loss=0.04631, over 942635.36 frames. ], batch size: 14, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:11:30,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=182386.66666666666, ans=0.0 +2024-07-28 17:11:30,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=182386.66666666666, ans=0.025 +2024-07-28 17:11:33,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=182386.66666666666, ans=0.125 +2024-07-28 17:11:43,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=182413.33333333334, ans=0.2 +2024-07-28 17:11:45,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=182413.33333333334, ans=0.2 +2024-07-28 17:11:54,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=182426.66666666666, ans=0.125 +2024-07-28 17:11:56,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=182426.66666666666, ans=0.125 +2024-07-28 17:12:06,180 INFO [train.py:1114] (1/4) Epoch 14, batch 3950, loss[loss=0.1884, simple_loss=0.2755, pruned_loss=0.05069, over 4832.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2708, pruned_loss=0.04622, over 944475.06 frames. ], batch size: 16, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:12:10,709 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:12:29,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=182493.33333333334, ans=0.0 +2024-07-28 17:12:36,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=182493.33333333334, ans=0.125 +2024-07-28 17:12:44,489 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.459e+01 5.506e+01 6.184e+01 7.058e+01 1.004e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 17:12:54,272 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.51 vs. limit=10.0 +2024-07-28 17:12:54,530 INFO [train.py:1114] (1/4) Epoch 14, batch 4000, loss[loss=0.1799, simple_loss=0.2634, pruned_loss=0.04821, over 4772.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2715, pruned_loss=0.04708, over 940731.02 frames. ], batch size: 12, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:14:39,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=182573.33333333334, ans=0.0 +2024-07-28 17:14:57,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=182573.33333333334, ans=0.2 +2024-07-28 17:14:58,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=182573.33333333334, ans=0.02 +2024-07-28 17:14:59,901 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.44 vs. limit=22.5 +2024-07-28 17:15:01,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=182586.66666666666, ans=0.025 +2024-07-28 17:15:02,339 INFO [train.py:1114] (1/4) Epoch 14, batch 4050, loss[loss=0.234, simple_loss=0.3109, pruned_loss=0.07849, over 3482.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.272, pruned_loss=0.04741, over 939594.24 frames. ], batch size: 36, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:15:46,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=182613.33333333334, ans=0.125 +2024-07-28 17:15:47,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=182613.33333333334, ans=0.125 +2024-07-28 17:15:52,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=182613.33333333334, ans=0.2 +2024-07-28 17:15:53,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=182626.66666666666, ans=0.125 +2024-07-28 17:15:58,766 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.79 vs. limit=6.0 +2024-07-28 17:16:04,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182640.0, ans=0.1 +2024-07-28 17:16:07,185 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.679e+01 5.600e+01 6.211e+01 7.334e+01 1.251e+02, threshold=1.242e+02, percent-clipped=2.0 +2024-07-28 17:16:08,551 INFO [train.py:1114] (1/4) Epoch 14, batch 4100, loss[loss=0.2038, simple_loss=0.3005, pruned_loss=0.0535, over 4906.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2726, pruned_loss=0.04735, over 938235.17 frames. ], batch size: 15, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:16:10,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=182653.33333333334, ans=0.125 +2024-07-28 17:16:16,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=182666.66666666666, ans=0.0 +2024-07-28 17:16:19,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182666.66666666666, ans=0.1 +2024-07-28 17:16:37,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=182693.33333333334, ans=0.0 +2024-07-28 17:16:43,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=182706.66666666666, ans=0.125 +2024-07-28 17:16:46,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=182706.66666666666, ans=0.125 +2024-07-28 17:16:49,307 INFO [train.py:1114] (1/4) Epoch 14, batch 4150, loss[loss=0.1593, simple_loss=0.2469, pruned_loss=0.03589, over 4823.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2721, pruned_loss=0.04717, over 937892.19 frames. ], batch size: 13, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:16:57,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182733.33333333334, ans=0.1 +2024-07-28 17:17:46,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182746.66666666666, ans=0.1 +2024-07-28 17:17:52,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=182746.66666666666, ans=0.125 +2024-07-28 17:18:03,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=182746.66666666666, ans=0.125 +2024-07-28 17:18:33,356 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:18:39,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182773.33333333334, ans=0.1 +2024-07-28 17:18:48,504 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.421e+01 5.631e+01 6.207e+01 7.543e+01 1.114e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 17:18:49,844 INFO [train.py:1114] (1/4) Epoch 14, batch 4200, loss[loss=0.1891, simple_loss=0.2886, pruned_loss=0.0448, over 4888.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2723, pruned_loss=0.04725, over 939215.47 frames. ], batch size: 15, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:18:59,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=182786.66666666666, ans=0.125 +2024-07-28 17:18:59,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=182786.66666666666, ans=0.125 +2024-07-28 17:19:29,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=182813.33333333334, ans=0.125 +2024-07-28 17:19:32,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=182813.33333333334, ans=0.125 +2024-07-28 17:19:51,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=182840.0, ans=0.125 +2024-07-28 17:19:52,369 INFO [train.py:1114] (1/4) Epoch 14, batch 4250, loss[loss=0.1545, simple_loss=0.2416, pruned_loss=0.03369, over 4632.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2724, pruned_loss=0.04694, over 940472.34 frames. ], batch size: 12, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:19:59,886 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.74 vs. limit=22.5 +2024-07-28 17:20:01,263 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.17 vs. limit=15.0 +2024-07-28 17:20:12,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=182880.0, ans=0.125 +2024-07-28 17:20:18,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=182893.33333333334, ans=0.125 +2024-07-28 17:20:19,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=182893.33333333334, ans=0.2 +2024-07-28 17:20:33,151 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.532e+01 5.602e+01 6.327e+01 7.435e+01 1.299e+02, threshold=1.265e+02, percent-clipped=1.0 +2024-07-28 17:20:34,464 INFO [train.py:1114] (1/4) Epoch 14, batch 4300, loss[loss=0.1613, simple_loss=0.2583, pruned_loss=0.03216, over 4758.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2715, pruned_loss=0.04671, over 939577.56 frames. ], batch size: 13, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:20:35,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=182920.0, ans=0.125 +2024-07-28 17:20:37,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=182920.0, ans=0.125 +2024-07-28 17:21:00,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=182960.0, ans=0.125 +2024-07-28 17:21:02,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=182973.33333333334, ans=0.125 +2024-07-28 17:21:09,266 INFO [train.py:1114] (1/4) Epoch 14, batch 4350, loss[loss=0.2178, simple_loss=0.3094, pruned_loss=0.06312, over 4760.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2724, pruned_loss=0.04716, over 940511.71 frames. ], batch size: 13, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:21:30,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=183026.66666666666, ans=0.125 +2024-07-28 17:21:39,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=183040.0, ans=0.0 +2024-07-28 17:21:44,666 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.884e+01 5.657e+01 6.269e+01 7.008e+01 1.088e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 17:21:46,159 INFO [train.py:1114] (1/4) Epoch 14, batch 4400, loss[loss=0.1747, simple_loss=0.2714, pruned_loss=0.03899, over 4811.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2725, pruned_loss=0.04711, over 940362.14 frames. ], batch size: 14, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:21:48,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183053.33333333334, ans=0.1 +2024-07-28 17:21:50,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=183053.33333333334, ans=0.125 +2024-07-28 17:22:11,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=183093.33333333334, ans=0.125 +2024-07-28 17:22:22,139 INFO [train.py:1114] (1/4) Epoch 14, batch 4450, loss[loss=0.1663, simple_loss=0.2486, pruned_loss=0.04202, over 4943.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2728, pruned_loss=0.04732, over 938691.54 frames. ], batch size: 12, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:22:23,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=183120.0, ans=0.125 +2024-07-28 17:22:25,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=183120.0, ans=0.0 +2024-07-28 17:22:27,478 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.43 vs. limit=6.0 +2024-07-28 17:22:27,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=183120.0, ans=0.0 +2024-07-28 17:22:35,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=183146.66666666666, ans=0.0 +2024-07-28 17:26:48,549 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.393e+01 5.470e+01 5.943e+01 6.622e+01 1.092e+02, threshold=1.189e+02, percent-clipped=0.0 +2024-07-28 17:26:49,860 INFO [train.py:1114] (1/4) Epoch 14, batch 4500, loss[loss=0.1817, simple_loss=0.2734, pruned_loss=0.04496, over 4745.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2739, pruned_loss=0.04735, over 937771.41 frames. ], batch size: 14, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:26:56,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=183186.66666666666, ans=0.125 +2024-07-28 17:26:59,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183186.66666666666, ans=0.1 +2024-07-28 17:26:59,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=183186.66666666666, ans=0.0 +2024-07-28 17:27:00,906 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.45 vs. limit=15.0 +2024-07-28 17:27:27,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=183240.0, ans=0.0 +2024-07-28 17:27:28,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=183253.33333333334, ans=0.0 +2024-07-28 17:27:28,602 INFO [train.py:1114] (1/4) Epoch 14, batch 4550, loss[loss=0.1675, simple_loss=0.244, pruned_loss=0.04545, over 4900.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2737, pruned_loss=0.04737, over 939593.69 frames. ], batch size: 13, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:27:37,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=183266.66666666666, ans=0.125 +2024-07-28 17:28:01,172 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.581e+01 5.634e+01 6.361e+01 7.770e+01 1.092e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 17:28:02,545 INFO [train.py:1114] (1/4) Epoch 14, batch 4600, loss[loss=0.2, simple_loss=0.2901, pruned_loss=0.05497, over 4459.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2733, pruned_loss=0.04733, over 938046.29 frames. ], batch size: 21, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:28:04,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=183320.0, ans=0.0 +2024-07-28 17:28:06,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=183320.0, ans=0.125 +2024-07-28 17:28:07,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183320.0, ans=0.1 +2024-07-28 17:28:09,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=183333.33333333334, ans=0.0 +2024-07-28 17:28:16,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=183346.66666666666, ans=0.0 +2024-07-28 17:28:21,925 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.11 vs. limit=15.0 +2024-07-28 17:28:24,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=183360.0, ans=0.125 +2024-07-28 17:28:33,257 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.65 vs. limit=22.5 +2024-07-28 17:28:35,613 INFO [train.py:1114] (1/4) Epoch 14, batch 4650, loss[loss=0.1977, simple_loss=0.2883, pruned_loss=0.05353, over 4847.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2737, pruned_loss=0.04748, over 939913.24 frames. ], batch size: 16, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:28:41,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=183400.0, ans=0.125 +2024-07-28 17:28:46,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=183400.0, ans=0.125 +2024-07-28 17:28:50,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=183413.33333333334, ans=0.2 +2024-07-28 17:28:51,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=183413.33333333334, ans=0.2 +2024-07-28 17:28:53,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.63 vs. limit=6.0 +2024-07-28 17:29:02,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=183426.66666666666, ans=0.05 +2024-07-28 17:29:05,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183440.0, ans=0.1 +2024-07-28 17:29:07,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=183440.0, ans=0.125 +2024-07-28 17:29:09,679 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.277e+01 5.803e+01 6.288e+01 7.232e+01 1.102e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 17:29:09,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=183440.0, ans=0.125 +2024-07-28 17:29:10,996 INFO [train.py:1114] (1/4) Epoch 14, batch 4700, loss[loss=0.1851, simple_loss=0.2586, pruned_loss=0.05583, over 4714.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2732, pruned_loss=0.04726, over 937214.09 frames. ], batch size: 11, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:29:11,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=183453.33333333334, ans=0.125 +2024-07-28 17:29:18,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=183466.66666666666, ans=0.025 +2024-07-28 17:29:19,018 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.78 vs. limit=15.0 +2024-07-28 17:29:20,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183466.66666666666, ans=0.1 +2024-07-28 17:29:29,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=183480.0, ans=22.5 +2024-07-28 17:29:29,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=183480.0, ans=0.025 +2024-07-28 17:29:38,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=183506.66666666666, ans=0.0 +2024-07-28 17:29:45,321 INFO [train.py:1114] (1/4) Epoch 14, batch 4750, loss[loss=0.2251, simple_loss=0.2869, pruned_loss=0.08169, over 4464.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2738, pruned_loss=0.04781, over 935705.83 frames. ], batch size: 21, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:29:46,470 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.37 vs. limit=22.5 +2024-07-28 17:29:54,402 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.08 vs. limit=15.0 +2024-07-28 17:30:01,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183546.66666666666, ans=0.1 +2024-07-28 17:30:05,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=183560.0, ans=0.125 +2024-07-28 17:30:17,965 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.592e+01 6.256e+01 7.365e+01 1.010e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 17:30:19,278 INFO [train.py:1114] (1/4) Epoch 14, batch 4800, loss[loss=0.2119, simple_loss=0.312, pruned_loss=0.05591, over 4689.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2728, pruned_loss=0.04767, over 933063.57 frames. ], batch size: 13, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:30:32,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=183613.33333333334, ans=0.025 +2024-07-28 17:30:54,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=183653.33333333334, ans=0.125 +2024-07-28 17:30:54,676 INFO [train.py:1114] (1/4) Epoch 14, batch 4850, loss[loss=0.1839, simple_loss=0.2805, pruned_loss=0.0437, over 4727.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2725, pruned_loss=0.04751, over 932330.11 frames. ], batch size: 14, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:30:55,741 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.88 vs. limit=10.0 +2024-07-28 17:30:56,068 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:30:58,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=183653.33333333334, ans=0.0 +2024-07-28 17:31:06,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=183666.66666666666, ans=0.025 +2024-07-28 17:31:14,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=183680.0, ans=0.125 +2024-07-28 17:31:16,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=183693.33333333334, ans=0.125 +2024-07-28 17:31:18,284 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.49 vs. limit=12.0 +2024-07-28 17:31:21,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=183693.33333333334, ans=0.125 +2024-07-28 17:31:28,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=183706.66666666666, ans=0.125 +2024-07-28 17:31:28,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=183706.66666666666, ans=0.2 +2024-07-28 17:31:30,320 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.639e+01 5.421e+01 5.850e+01 6.499e+01 1.354e+02, threshold=1.170e+02, percent-clipped=1.0 +2024-07-28 17:31:31,703 INFO [train.py:1114] (1/4) Epoch 14, batch 4900, loss[loss=0.1654, simple_loss=0.2561, pruned_loss=0.03739, over 4752.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2728, pruned_loss=0.04747, over 934025.46 frames. ], batch size: 13, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:31:32,934 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.48 vs. limit=15.0 +2024-07-28 17:31:39,584 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.13 vs. limit=10.0 +2024-07-28 17:31:41,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183733.33333333334, ans=0.1 +2024-07-28 17:31:41,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=183733.33333333334, ans=0.0 +2024-07-28 17:31:42,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=183733.33333333334, ans=0.125 +2024-07-28 17:31:46,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=183746.66666666666, ans=0.125 +2024-07-28 17:31:48,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=183746.66666666666, ans=0.2 +2024-07-28 17:31:49,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=183746.66666666666, ans=0.125 +2024-07-28 17:31:49,844 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.06 vs. limit=15.0 +2024-07-28 17:31:52,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183760.0, ans=0.1 +2024-07-28 17:31:55,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=183760.0, ans=0.125 +2024-07-28 17:32:00,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=183773.33333333334, ans=0.1 +2024-07-28 17:32:06,232 INFO [train.py:1114] (1/4) Epoch 14, batch 4950, loss[loss=0.2295, simple_loss=0.3087, pruned_loss=0.07513, over 3347.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2739, pruned_loss=0.04807, over 931459.83 frames. ], batch size: 36, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:32:10,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183786.66666666666, ans=0.1 +2024-07-28 17:32:22,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=183813.33333333334, ans=0.0 +2024-07-28 17:32:26,098 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:32:38,286 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.280e+01 5.530e+01 6.017e+01 6.862e+01 9.810e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 17:32:39,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=183853.33333333334, ans=0.0 +2024-07-28 17:32:39,706 INFO [train.py:1114] (1/4) Epoch 14, batch 5000, loss[loss=0.1736, simple_loss=0.2855, pruned_loss=0.03083, over 4645.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2738, pruned_loss=0.04773, over 935329.25 frames. ], batch size: 14, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:32:41,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=183853.33333333334, ans=0.0 +2024-07-28 17:32:45,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=183866.66666666666, ans=0.015 +2024-07-28 17:32:48,864 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-28 17:32:51,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=183866.66666666666, ans=0.2 +2024-07-28 17:32:56,008 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.69 vs. limit=22.5 +2024-07-28 17:33:12,763 INFO [train.py:1114] (1/4) Epoch 14, batch 5050, loss[loss=0.1495, simple_loss=0.2387, pruned_loss=0.03012, over 4864.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.273, pruned_loss=0.04721, over 937818.07 frames. ], batch size: 12, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:33:17,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=183920.0, ans=0.5 +2024-07-28 17:33:19,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183933.33333333334, ans=0.1 +2024-07-28 17:33:22,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183933.33333333334, ans=0.1 +2024-07-28 17:33:22,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=183933.33333333334, ans=0.125 +2024-07-28 17:33:24,514 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:33:26,889 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.85 vs. limit=15.0 +2024-07-28 17:33:28,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183946.66666666666, ans=0.1 +2024-07-28 17:33:29,582 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.24 vs. limit=15.0 +2024-07-28 17:33:42,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183973.33333333334, ans=0.1 +2024-07-28 17:33:45,596 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 5.607e+01 6.225e+01 6.953e+01 1.020e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 17:33:47,327 INFO [train.py:1114] (1/4) Epoch 14, batch 5100, loss[loss=0.1627, simple_loss=0.2487, pruned_loss=0.03833, over 4789.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.274, pruned_loss=0.0482, over 935338.66 frames. ], batch size: 12, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:33:48,379 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-28 17:33:51,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=183986.66666666666, ans=15.0 +2024-07-28 17:33:55,252 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:34:06,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=184013.33333333334, ans=0.125 +2024-07-28 17:34:17,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184040.0, ans=0.1 +2024-07-28 17:34:19,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=184040.0, ans=0.0 +2024-07-28 17:34:19,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=184040.0, ans=0.125 +2024-07-28 17:34:21,091 INFO [train.py:1114] (1/4) Epoch 14, batch 5150, loss[loss=0.2537, simple_loss=0.3387, pruned_loss=0.0844, over 4851.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2745, pruned_loss=0.04794, over 936210.85 frames. ], batch size: 16, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:34:21,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=184053.33333333334, ans=0.125 +2024-07-28 17:34:28,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=184053.33333333334, ans=0.125 +2024-07-28 17:34:29,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=184066.66666666666, ans=0.125 +2024-07-28 17:34:31,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.01 vs. limit=6.0 +2024-07-28 17:34:33,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=184066.66666666666, ans=0.07 +2024-07-28 17:34:50,176 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.41 vs. limit=15.0 +2024-07-28 17:34:54,937 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.666e+01 6.187e+01 7.169e+01 1.415e+02, threshold=1.237e+02, percent-clipped=1.0 +2024-07-28 17:34:56,316 INFO [train.py:1114] (1/4) Epoch 14, batch 5200, loss[loss=0.214, simple_loss=0.3154, pruned_loss=0.05626, over 4660.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2742, pruned_loss=0.04765, over 935936.16 frames. ], batch size: 14, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:35:05,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=184133.33333333334, ans=0.125 +2024-07-28 17:35:06,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=184133.33333333334, ans=0.0 +2024-07-28 17:35:16,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184146.66666666666, ans=0.1 +2024-07-28 17:35:25,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=184173.33333333334, ans=0.125 +2024-07-28 17:35:44,101 INFO [train.py:1114] (1/4) Epoch 14, batch 5250, loss[loss=0.1558, simple_loss=0.2485, pruned_loss=0.03158, over 4899.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2738, pruned_loss=0.04775, over 935464.30 frames. ], batch size: 13, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:36:04,453 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:36:06,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=184186.66666666666, ans=0.2 +2024-07-28 17:36:10,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=184200.0, ans=0.125 +2024-07-28 17:36:19,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=184213.33333333334, ans=0.125 +2024-07-28 17:36:35,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=184213.33333333334, ans=0.0 +2024-07-28 17:37:36,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=184226.66666666666, ans=0.0 +2024-07-28 17:37:41,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=184226.66666666666, ans=0.0 +2024-07-28 17:37:42,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=184240.0, ans=0.125 +2024-07-28 17:37:44,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=184240.0, ans=0.125 +2024-07-28 17:37:47,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=184240.0, ans=0.0 +2024-07-28 17:37:48,424 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.478e+01 6.210e+01 7.367e+01 1.027e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 17:37:49,750 INFO [train.py:1114] (1/4) Epoch 14, batch 5300, loss[loss=0.1957, simple_loss=0.2906, pruned_loss=0.05039, over 4630.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.273, pruned_loss=0.0477, over 933598.16 frames. ], batch size: 16, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:37:54,839 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.96 vs. limit=12.0 +2024-07-28 17:37:55,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184266.66666666666, ans=0.1 +2024-07-28 17:38:03,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=184266.66666666666, ans=0.0 +2024-07-28 17:38:14,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=184293.33333333334, ans=0.125 +2024-07-28 17:38:14,267 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.09 vs. limit=6.0 +2024-07-28 17:38:18,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=184293.33333333334, ans=0.125 +2024-07-28 17:38:26,461 INFO [train.py:1114] (1/4) Epoch 14, batch 5350, loss[loss=0.1857, simple_loss=0.2626, pruned_loss=0.05438, over 4523.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2738, pruned_loss=0.04796, over 936006.83 frames. ], batch size: 10, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:38:31,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=184320.0, ans=0.125 +2024-07-28 17:38:46,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=184346.66666666666, ans=0.0 +2024-07-28 17:38:46,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=184346.66666666666, ans=0.125 +2024-07-28 17:38:48,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=184360.0, ans=0.0 +2024-07-28 17:38:48,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=184360.0, ans=0.125 +2024-07-28 17:38:53,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=184360.0, ans=0.125 +2024-07-28 17:39:01,373 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.641e+01 6.338e+01 7.374e+01 1.167e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 17:39:02,098 INFO [train.py:1114] (1/4) Epoch 14, batch 5400, loss[loss=0.1772, simple_loss=0.2794, pruned_loss=0.03747, over 4286.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2749, pruned_loss=0.04881, over 929959.54 frames. ], batch size: 25, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:39:02,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=184386.66666666666, ans=0.125 +2024-07-28 17:39:07,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=184386.66666666666, ans=0.0 +2024-07-28 17:39:26,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=184426.66666666666, ans=0.125 +2024-07-28 17:39:27,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=184426.66666666666, ans=0.125 +2024-07-28 17:39:27,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=184426.66666666666, ans=0.125 +2024-07-28 17:39:32,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184440.0, ans=0.1 +2024-07-28 17:39:33,595 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:39:35,482 INFO [train.py:1114] (1/4) Epoch 14, batch 5450, loss[loss=0.1787, simple_loss=0.2548, pruned_loss=0.0513, over 4710.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2742, pruned_loss=0.04835, over 932712.61 frames. ], batch size: 11, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:39:47,739 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:39:51,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=184480.0, ans=0.125 +2024-07-28 17:39:54,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184480.0, ans=0.1 +2024-07-28 17:39:56,882 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.86 vs. limit=10.0 +2024-07-28 17:40:01,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=184506.66666666666, ans=0.125 +2024-07-28 17:40:08,545 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.352e+01 5.587e+01 6.313e+01 7.261e+01 1.072e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 17:40:09,232 INFO [train.py:1114] (1/4) Epoch 14, batch 5500, loss[loss=0.185, simple_loss=0.2788, pruned_loss=0.04561, over 4287.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2731, pruned_loss=0.04791, over 930167.44 frames. ], batch size: 26, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:40:14,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=184520.0, ans=0.0 +2024-07-28 17:40:22,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184546.66666666666, ans=0.1 +2024-07-28 17:40:39,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=184573.33333333334, ans=0.125 +2024-07-28 17:40:43,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=184573.33333333334, ans=0.125 +2024-07-28 17:40:45,304 INFO [train.py:1114] (1/4) Epoch 14, batch 5550, loss[loss=0.1637, simple_loss=0.241, pruned_loss=0.0432, over 4712.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2735, pruned_loss=0.0476, over 932435.99 frames. ], batch size: 12, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:41:01,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=184613.33333333334, ans=0.125 +2024-07-28 17:41:12,400 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.39 vs. limit=10.0 +2024-07-28 17:41:19,033 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.688e+01 6.017e+01 6.994e+01 8.294e+01 1.224e+02, threshold=1.399e+02, percent-clipped=0.0 +2024-07-28 17:41:19,710 INFO [train.py:1114] (1/4) Epoch 14, batch 5600, loss[loss=0.1762, simple_loss=0.2743, pruned_loss=0.03905, over 4750.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2741, pruned_loss=0.0479, over 933993.55 frames. ], batch size: 14, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:41:27,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=184666.66666666666, ans=0.125 +2024-07-28 17:41:33,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=184680.0, ans=0.125 +2024-07-28 17:41:35,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=184680.0, ans=0.125 +2024-07-28 17:41:36,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=184680.0, ans=0.125 +2024-07-28 17:41:37,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=184680.0, ans=0.125 +2024-07-28 17:41:38,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.04 vs. limit=15.0 +2024-07-28 17:41:42,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=184693.33333333334, ans=0.2 +2024-07-28 17:41:54,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=184706.66666666666, ans=0.125 +2024-07-28 17:42:53,630 INFO [train.py:1114] (1/4) Epoch 14, batch 5650, loss[loss=0.1759, simple_loss=0.2667, pruned_loss=0.04251, over 4467.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2731, pruned_loss=0.04749, over 936513.36 frames. ], batch size: 21, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:42:57,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=184720.0, ans=0.09899494936611666 +2024-07-28 17:43:07,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=184720.0, ans=0.025 +2024-07-28 17:43:23,266 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:43:34,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=184773.33333333334, ans=0.125 +2024-07-28 17:43:35,138 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.575e+01 6.312e+01 7.151e+01 9.820e+01, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 17:43:35,893 INFO [train.py:1114] (1/4) Epoch 14, batch 5700, loss[loss=0.1609, simple_loss=0.263, pruned_loss=0.02946, over 4694.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2737, pruned_loss=0.04798, over 937799.06 frames. ], batch size: 13, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:43:47,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184800.0, ans=0.1 +2024-07-28 17:43:51,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=184800.0, ans=0.125 +2024-07-28 17:43:58,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=184826.66666666666, ans=0.0 +2024-07-28 17:44:12,487 INFO [train.py:1114] (1/4) Epoch 14, batch 5750, loss[loss=0.2326, simple_loss=0.3198, pruned_loss=0.07277, over 4679.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2744, pruned_loss=0.04825, over 937733.29 frames. ], batch size: 19, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:44:14,155 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.01 vs. limit=15.0 +2024-07-28 17:44:14,207 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.30 vs. limit=15.0 +2024-07-28 17:44:16,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=184853.33333333334, ans=0.0 +2024-07-28 17:44:17,489 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.15 vs. limit=15.0 +2024-07-28 17:44:47,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=184893.33333333334, ans=0.125 +2024-07-28 17:44:47,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=184893.33333333334, ans=0.05 +2024-07-28 17:44:48,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=184906.66666666666, ans=0.125 +2024-07-28 17:44:56,654 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.552e+01 6.040e+01 6.826e+01 9.653e+01, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 17:44:57,317 INFO [train.py:1114] (1/4) Epoch 14, batch 5800, loss[loss=0.1911, simple_loss=0.2857, pruned_loss=0.04828, over 4721.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2744, pruned_loss=0.04802, over 936868.34 frames. ], batch size: 19, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:45:31,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184986.66666666666, ans=0.1 +2024-07-28 17:45:32,351 INFO [train.py:1114] (1/4) Epoch 14, batch 5850, loss[loss=0.217, simple_loss=0.3086, pruned_loss=0.06272, over 4485.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2738, pruned_loss=0.04762, over 937399.19 frames. ], batch size: 21, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:45:46,155 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:45:49,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=185013.33333333334, ans=0.5 +2024-07-28 17:45:53,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185026.66666666666, ans=0.1 +2024-07-28 17:45:56,953 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.29 vs. limit=12.0 +2024-07-28 17:46:02,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=185040.0, ans=0.0 +2024-07-28 17:46:02,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=185040.0, ans=0.125 +2024-07-28 17:46:05,084 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.554e+01 5.675e+01 6.318e+01 7.157e+01 1.040e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 17:46:05,801 INFO [train.py:1114] (1/4) Epoch 14, batch 5900, loss[loss=0.1781, simple_loss=0.2672, pruned_loss=0.04451, over 4681.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2741, pruned_loss=0.04789, over 938154.32 frames. ], batch size: 15, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:46:06,222 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.50 vs. limit=15.0 +2024-07-28 17:46:08,162 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.76 vs. limit=15.0 +2024-07-28 17:46:11,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.00 vs. limit=15.0 +2024-07-28 17:46:15,191 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.34 vs. limit=15.0 +2024-07-28 17:46:15,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=185066.66666666666, ans=0.0 +2024-07-28 17:46:18,900 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:46:24,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=185080.0, ans=0.2 +2024-07-28 17:46:30,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=185093.33333333334, ans=0.125 +2024-07-28 17:46:30,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=185093.33333333334, ans=0.1 +2024-07-28 17:46:36,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=185106.66666666666, ans=0.125 +2024-07-28 17:46:38,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=185106.66666666666, ans=0.125 +2024-07-28 17:46:39,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=185120.0, ans=0.0 +2024-07-28 17:46:40,163 INFO [train.py:1114] (1/4) Epoch 14, batch 5950, loss[loss=0.2004, simple_loss=0.2812, pruned_loss=0.05978, over 4697.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.273, pruned_loss=0.04736, over 939999.35 frames. ], batch size: 15, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:46:41,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=185120.0, ans=0.125 +2024-07-28 17:46:45,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=185120.0, ans=0.025 +2024-07-28 17:46:49,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=185133.33333333334, ans=0.0 +2024-07-28 17:46:53,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.49 vs. limit=10.0 +2024-07-28 17:46:55,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=185146.66666666666, ans=0.0 +2024-07-28 17:46:56,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.48 vs. limit=15.0 +2024-07-28 17:47:00,816 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.15 vs. limit=10.0 +2024-07-28 17:47:03,594 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.59 vs. limit=6.0 +2024-07-28 17:47:12,479 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.11 vs. limit=6.0 +2024-07-28 17:47:15,191 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.00 vs. limit=15.0 +2024-07-28 17:47:15,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185173.33333333334, ans=0.1 +2024-07-28 17:47:18,298 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.153e+01 5.664e+01 6.270e+01 7.000e+01 1.010e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 17:47:19,013 INFO [train.py:1114] (1/4) Epoch 14, batch 6000, loss[loss=0.205, simple_loss=0.2948, pruned_loss=0.05759, over 4381.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2722, pruned_loss=0.04727, over 937342.70 frames. ], batch size: 26, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:47:19,013 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 17:49:03,738 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.4917, 3.2334, 5.0490, 2.4177], device='cuda:1') +2024-07-28 17:49:17,897 INFO [train.py:1146] (1/4) Epoch 14, validation: loss=0.1656, simple_loss=0.2686, pruned_loss=0.03133, over 944034.00 frames. +2024-07-28 17:49:17,897 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 17:49:18,304 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.24 vs. limit=15.0 +2024-07-28 17:49:23,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=185186.66666666666, ans=0.0 +2024-07-28 17:49:24,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=185200.0, ans=0.0 +2024-07-28 17:49:31,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=185213.33333333334, ans=0.1 +2024-07-28 17:49:40,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=185226.66666666666, ans=0.0 +2024-07-28 17:49:51,984 INFO [train.py:1114] (1/4) Epoch 14, batch 6050, loss[loss=0.1574, simple_loss=0.2341, pruned_loss=0.04035, over 4779.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2712, pruned_loss=0.04728, over 938716.36 frames. ], batch size: 12, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:49:52,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=185253.33333333334, ans=0.0 +2024-07-28 17:49:58,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=185266.66666666666, ans=0.125 +2024-07-28 17:50:03,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=185266.66666666666, ans=0.2 +2024-07-28 17:50:03,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185266.66666666666, ans=0.1 +2024-07-28 17:50:16,034 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.82 vs. limit=15.0 +2024-07-28 17:50:24,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=185306.66666666666, ans=0.125 +2024-07-28 17:50:26,095 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+01 5.501e+01 6.141e+01 7.204e+01 9.755e+01, threshold=1.228e+02, percent-clipped=0.0 +2024-07-28 17:50:26,758 INFO [train.py:1114] (1/4) Epoch 14, batch 6100, loss[loss=0.2006, simple_loss=0.2906, pruned_loss=0.05533, over 4699.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.271, pruned_loss=0.04704, over 938270.97 frames. ], batch size: 15, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:50:31,172 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.43 vs. limit=15.0 +2024-07-28 17:50:50,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=185360.0, ans=0.2 +2024-07-28 17:50:54,390 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.02 vs. limit=10.0 +2024-07-28 17:51:00,625 INFO [train.py:1114] (1/4) Epoch 14, batch 6150, loss[loss=0.2269, simple_loss=0.2998, pruned_loss=0.07697, over 3319.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2715, pruned_loss=0.04669, over 936487.47 frames. ], batch size: 36, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:51:04,281 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:51:11,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185400.0, ans=0.1 +2024-07-28 17:51:30,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=185440.0, ans=0.125 +2024-07-28 17:51:30,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=185440.0, ans=0.125 +2024-07-28 17:51:31,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=185440.0, ans=0.125 +2024-07-28 17:51:32,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=185440.0, ans=0.125 +2024-07-28 17:51:33,269 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.416e+01 5.520e+01 6.468e+01 7.669e+01 1.156e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 17:51:33,902 INFO [train.py:1114] (1/4) Epoch 14, batch 6200, loss[loss=0.1883, simple_loss=0.2918, pruned_loss=0.04243, over 4734.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2718, pruned_loss=0.04669, over 936068.94 frames. ], batch size: 14, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:52:04,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=185506.66666666666, ans=0.0 +2024-07-28 17:52:11,471 INFO [train.py:1114] (1/4) Epoch 14, batch 6250, loss[loss=0.1987, simple_loss=0.2982, pruned_loss=0.04957, over 4808.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2719, pruned_loss=0.04669, over 932564.86 frames. ], batch size: 14, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:52:13,828 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.07 vs. limit=15.0 +2024-07-28 17:52:17,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=185533.33333333334, ans=0.0 +2024-07-28 17:52:29,967 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.84 vs. limit=22.5 +2024-07-28 17:52:45,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=185573.33333333334, ans=0.125 +2024-07-28 17:52:45,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=185573.33333333334, ans=0.1 +2024-07-28 17:52:46,202 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+01 5.580e+01 6.337e+01 7.212e+01 1.101e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 17:52:46,903 INFO [train.py:1114] (1/4) Epoch 14, batch 6300, loss[loss=0.1555, simple_loss=0.2447, pruned_loss=0.03312, over 4515.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2724, pruned_loss=0.04695, over 929498.38 frames. ], batch size: 10, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:52:54,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=185600.0, ans=0.125 +2024-07-28 17:52:59,188 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.92 vs. limit=6.0 +2024-07-28 17:53:02,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=185613.33333333334, ans=0.0 +2024-07-28 17:53:07,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=185626.66666666666, ans=0.05 +2024-07-28 17:53:10,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=185626.66666666666, ans=0.0 +2024-07-28 17:53:11,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=185626.66666666666, ans=0.2 +2024-07-28 17:53:16,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=185640.0, ans=0.0 +2024-07-28 17:53:17,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=185640.0, ans=0.025 +2024-07-28 17:53:19,477 INFO [train.py:1114] (1/4) Epoch 14, batch 6350, loss[loss=0.1918, simple_loss=0.278, pruned_loss=0.05275, over 4512.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2717, pruned_loss=0.04674, over 933586.08 frames. ], batch size: 21, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:53:21,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=185653.33333333334, ans=0.0 +2024-07-28 17:53:25,391 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.84 vs. limit=15.0 +2024-07-28 17:53:39,821 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.61 vs. limit=15.0 +2024-07-28 17:53:46,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=185706.66666666666, ans=0.125 +2024-07-28 17:53:50,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=185706.66666666666, ans=0.07 +2024-07-28 17:54:02,939 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.778e+01 6.430e+01 7.550e+01 1.026e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 17:54:03,632 INFO [train.py:1114] (1/4) Epoch 14, batch 6400, loss[loss=0.2103, simple_loss=0.3097, pruned_loss=0.05544, over 4633.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2727, pruned_loss=0.04709, over 935073.06 frames. ], batch size: 13, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:54:05,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=185720.0, ans=0.125 +2024-07-28 17:54:38,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=185773.33333333334, ans=0.125 +2024-07-28 17:54:39,356 INFO [train.py:1114] (1/4) Epoch 14, batch 6450, loss[loss=0.1978, simple_loss=0.2966, pruned_loss=0.0495, over 4528.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2736, pruned_loss=0.04711, over 938463.16 frames. ], batch size: 21, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:54:46,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=185800.0, ans=0.125 +2024-07-28 17:54:51,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=185813.33333333334, ans=0.1 +2024-07-28 17:54:58,814 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.21 vs. limit=15.0 +2024-07-28 17:55:01,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=185826.66666666666, ans=0.025 +2024-07-28 17:55:04,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=185826.66666666666, ans=0.125 +2024-07-28 17:55:10,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.42 vs. limit=15.0 +2024-07-28 17:55:11,351 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.589e+01 5.736e+01 6.499e+01 7.740e+01 1.076e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-28 17:55:12,079 INFO [train.py:1114] (1/4) Epoch 14, batch 6500, loss[loss=0.2243, simple_loss=0.2887, pruned_loss=0.07999, over 3623.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2728, pruned_loss=0.04661, over 939891.19 frames. ], batch size: 35, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:55:21,450 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.80 vs. limit=15.0 +2024-07-28 17:55:28,526 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.06 vs. limit=12.0 +2024-07-28 17:55:47,170 INFO [train.py:1114] (1/4) Epoch 14, batch 6550, loss[loss=0.1992, simple_loss=0.2785, pruned_loss=0.06001, over 4790.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2721, pruned_loss=0.04606, over 942738.73 frames. ], batch size: 11, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:55:51,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=185920.0, ans=15.0 +2024-07-28 17:55:55,572 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.87 vs. limit=15.0 +2024-07-28 17:55:59,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=185946.66666666666, ans=0.125 +2024-07-28 17:56:01,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=185946.66666666666, ans=0.125 +2024-07-28 17:56:19,459 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.641e+01 6.098e+01 6.852e+01 1.074e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 17:56:20,139 INFO [train.py:1114] (1/4) Epoch 14, batch 6600, loss[loss=0.1721, simple_loss=0.2632, pruned_loss=0.04045, over 4925.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2728, pruned_loss=0.04662, over 944513.43 frames. ], batch size: 14, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:56:20,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=185986.66666666666, ans=0.125 +2024-07-28 17:56:37,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=186013.33333333334, ans=0.1 +2024-07-28 17:56:49,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=186040.0, ans=0.0 +2024-07-28 17:56:53,739 INFO [train.py:1114] (1/4) Epoch 14, batch 6650, loss[loss=0.1721, simple_loss=0.2614, pruned_loss=0.04134, over 4637.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2726, pruned_loss=0.04676, over 943072.04 frames. ], batch size: 17, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:57:02,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.23 vs. limit=15.0 +2024-07-28 17:57:04,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=186066.66666666666, ans=0.125 +2024-07-28 17:57:09,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=186080.0, ans=0.1 +2024-07-28 17:57:13,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=186093.33333333334, ans=0.125 +2024-07-28 17:57:15,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=186093.33333333334, ans=0.125 +2024-07-28 17:57:26,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=186106.66666666666, ans=0.09899494936611666 +2024-07-28 17:57:30,076 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.923e+01 5.766e+01 6.384e+01 7.192e+01 1.160e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 17:57:30,770 INFO [train.py:1114] (1/4) Epoch 14, batch 6700, loss[loss=0.1792, simple_loss=0.2655, pruned_loss=0.04648, over 4681.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2724, pruned_loss=0.04691, over 941868.24 frames. ], batch size: 19, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:58:06,967 INFO [train.py:1114] (1/4) Epoch 14, batch 6750, loss[loss=0.1871, simple_loss=0.2846, pruned_loss=0.04479, over 4217.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2726, pruned_loss=0.04685, over 939715.78 frames. ], batch size: 25, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:58:17,698 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:58:20,528 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.77 vs. limit=22.5 +2024-07-28 17:58:25,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=186213.33333333334, ans=0.09899494936611666 +2024-07-28 17:58:39,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=186240.0, ans=0.125 +2024-07-28 17:58:40,451 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.736e+01 5.569e+01 6.146e+01 7.020e+01 9.338e+01, threshold=1.229e+02, percent-clipped=0.0 +2024-07-28 17:58:41,117 INFO [train.py:1114] (1/4) Epoch 14, batch 6800, loss[loss=0.1741, simple_loss=0.2771, pruned_loss=0.03557, over 4633.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2728, pruned_loss=0.04684, over 938139.46 frames. ], batch size: 13, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:58:43,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=186253.33333333334, ans=0.125 +2024-07-28 17:58:45,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=186253.33333333334, ans=0.125 +2024-07-28 17:58:46,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=186253.33333333334, ans=0.125 +2024-07-28 17:58:49,319 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.05 vs. limit=6.0 +2024-07-28 17:58:49,446 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.80 vs. limit=15.0 +2024-07-28 17:58:52,668 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.31 vs. limit=6.0 +2024-07-28 17:58:59,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=186280.0, ans=0.125 +2024-07-28 17:59:00,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=186293.33333333334, ans=0.1 +2024-07-28 17:59:01,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=186293.33333333334, ans=0.1 +2024-07-28 17:59:07,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=186306.66666666666, ans=0.2 +2024-07-28 17:59:10,256 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.77 vs. limit=15.0 +2024-07-28 17:59:14,769 INFO [train.py:1114] (1/4) Epoch 14, batch 6850, loss[loss=0.212, simple_loss=0.3114, pruned_loss=0.05631, over 4689.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2729, pruned_loss=0.04683, over 939852.70 frames. ], batch size: 13, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:59:15,957 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.89 vs. limit=15.0 +2024-07-28 17:59:17,222 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.32 vs. limit=10.0 +2024-07-28 17:59:27,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=186346.66666666666, ans=0.09899494936611666 +2024-07-28 17:59:36,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=186360.0, ans=0.0 +2024-07-28 17:59:41,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=186373.33333333334, ans=0.125 +2024-07-28 17:59:47,792 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.718e+01 6.199e+01 6.949e+01 1.067e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 17:59:48,471 INFO [train.py:1114] (1/4) Epoch 14, batch 6900, loss[loss=0.183, simple_loss=0.2761, pruned_loss=0.04496, over 4963.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.273, pruned_loss=0.04695, over 942089.58 frames. ], batch size: 13, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:59:58,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=186400.0, ans=0.2 +2024-07-28 18:00:04,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=186413.33333333334, ans=0.125 +2024-07-28 18:00:07,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=186413.33333333334, ans=0.125 +2024-07-28 18:00:17,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=186440.0, ans=0.0 +2024-07-28 18:00:19,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=186440.0, ans=0.0 +2024-07-28 18:00:22,486 INFO [train.py:1114] (1/4) Epoch 14, batch 6950, loss[loss=0.1609, simple_loss=0.2488, pruned_loss=0.03645, over 4542.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.273, pruned_loss=0.04695, over 939309.33 frames. ], batch size: 10, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:00:22,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=186453.33333333334, ans=0.0 +2024-07-28 18:00:39,360 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.69 vs. limit=15.0 +2024-07-28 18:00:41,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=186480.0, ans=0.025 +2024-07-28 18:00:49,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=186506.66666666666, ans=0.1 +2024-07-28 18:00:52,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=186506.66666666666, ans=0.0 +2024-07-28 18:00:53,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=186506.66666666666, ans=0.025 +2024-07-28 18:00:54,796 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.637e+01 6.195e+01 7.111e+01 9.946e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 18:00:55,476 INFO [train.py:1114] (1/4) Epoch 14, batch 7000, loss[loss=0.1879, simple_loss=0.2778, pruned_loss=0.04903, over 4840.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2735, pruned_loss=0.04737, over 938629.25 frames. ], batch size: 18, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:00:57,675 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.71 vs. limit=22.5 +2024-07-28 18:00:59,864 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.24 vs. limit=15.0 +2024-07-28 18:01:00,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=186520.0, ans=0.0 +2024-07-28 18:01:04,987 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.14 vs. limit=15.0 +2024-07-28 18:01:05,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=186533.33333333334, ans=0.125 +2024-07-28 18:01:10,833 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=15.0 +2024-07-28 18:01:14,847 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.15 vs. limit=6.0 +2024-07-28 18:01:20,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=186560.0, ans=0.125 +2024-07-28 18:01:28,536 INFO [train.py:1114] (1/4) Epoch 14, batch 7050, loss[loss=0.167, simple_loss=0.2598, pruned_loss=0.03713, over 4713.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.274, pruned_loss=0.04761, over 941927.72 frames. ], batch size: 19, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:01:32,714 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.91 vs. limit=15.0 +2024-07-28 18:01:40,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=186600.0, ans=0.1 +2024-07-28 18:01:44,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=186613.33333333334, ans=0.125 +2024-07-28 18:01:47,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186613.33333333334, ans=0.1 +2024-07-28 18:01:49,556 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.84 vs. limit=22.5 +2024-07-28 18:01:52,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186626.66666666666, ans=0.1 +2024-07-28 18:01:57,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=186640.0, ans=0.025 +2024-07-28 18:02:03,021 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+01 5.702e+01 6.224e+01 7.168e+01 1.076e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 18:02:03,696 INFO [train.py:1114] (1/4) Epoch 14, batch 7100, loss[loss=0.1929, simple_loss=0.2855, pruned_loss=0.0501, over 4789.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2735, pruned_loss=0.04747, over 936820.57 frames. ], batch size: 15, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:02:25,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=186666.66666666666, ans=0.125 +2024-07-28 18:02:25,235 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.15 vs. limit=15.0 +2024-07-28 18:02:25,271 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.93 vs. limit=15.0 +2024-07-28 18:02:35,965 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-07-28 18:02:40,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=186706.66666666666, ans=0.2 +2024-07-28 18:02:45,505 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:02:46,760 INFO [train.py:1114] (1/4) Epoch 14, batch 7150, loss[loss=0.2029, simple_loss=0.3036, pruned_loss=0.05107, over 4572.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2726, pruned_loss=0.04739, over 937772.83 frames. ], batch size: 21, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:02:49,016 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-28 18:02:51,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=186720.0, ans=0.125 +2024-07-28 18:02:52,931 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.56 vs. limit=10.0 +2024-07-28 18:03:27,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=186733.33333333334, ans=0.0 +2024-07-28 18:04:13,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=186773.33333333334, ans=0.125 +2024-07-28 18:04:13,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=186773.33333333334, ans=0.125 +2024-07-28 18:04:20,571 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.670e+01 5.638e+01 6.134e+01 6.924e+01 9.250e+01, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 18:04:21,223 INFO [train.py:1114] (1/4) Epoch 14, batch 7200, loss[loss=0.1897, simple_loss=0.2834, pruned_loss=0.04797, over 4799.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2733, pruned_loss=0.04712, over 938558.84 frames. ], batch size: 15, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:04:21,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=186786.66666666666, ans=0.0 +2024-07-28 18:04:21,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=186786.66666666666, ans=0.0 +2024-07-28 18:04:21,489 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.43 vs. limit=15.0 +2024-07-28 18:04:44,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=186800.0, ans=0.0 +2024-07-28 18:04:59,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=186813.33333333334, ans=0.1 +2024-07-28 18:05:18,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=186840.0, ans=0.125 +2024-07-28 18:05:18,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=186840.0, ans=0.125 +2024-07-28 18:05:25,088 INFO [train.py:1114] (1/4) Epoch 14, batch 7250, loss[loss=0.178, simple_loss=0.2706, pruned_loss=0.04274, over 4852.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2725, pruned_loss=0.04673, over 940262.19 frames. ], batch size: 12, lr: 5.30e-03, grad_scale: 32.0 +2024-07-28 18:05:35,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=186853.33333333334, ans=0.125 +2024-07-28 18:05:46,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=186880.0, ans=0.2 +2024-07-28 18:05:55,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=186880.0, ans=0.125 +2024-07-28 18:05:59,358 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.83 vs. limit=22.5 +2024-07-28 18:06:05,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=12.0 +2024-07-28 18:06:05,826 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.04 vs. limit=22.5 +2024-07-28 18:06:10,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=186906.66666666666, ans=0.125 +2024-07-28 18:06:11,825 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.60 vs. limit=15.0 +2024-07-28 18:06:12,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=186906.66666666666, ans=0.125 +2024-07-28 18:06:13,403 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.490e+01 5.624e+01 6.211e+01 6.890e+01 1.048e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 18:06:14,138 INFO [train.py:1114] (1/4) Epoch 14, batch 7300, loss[loss=0.1503, simple_loss=0.2309, pruned_loss=0.03489, over 4861.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2722, pruned_loss=0.04672, over 940528.01 frames. ], batch size: 12, lr: 5.30e-03, grad_scale: 32.0 +2024-07-28 18:06:15,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=186920.0, ans=0.0 +2024-07-28 18:06:29,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=186946.66666666666, ans=0.0 +2024-07-28 18:06:29,601 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.19 vs. limit=12.0 +2024-07-28 18:06:31,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=186946.66666666666, ans=0.2 +2024-07-28 18:06:33,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=186960.0, ans=0.05 +2024-07-28 18:06:43,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=186973.33333333334, ans=0.125 +2024-07-28 18:06:46,845 INFO [train.py:1114] (1/4) Epoch 14, batch 7350, loss[loss=0.1592, simple_loss=0.2514, pruned_loss=0.03348, over 4642.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2721, pruned_loss=0.04693, over 939671.28 frames. ], batch size: 12, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:06:52,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=186986.66666666666, ans=0.0 +2024-07-28 18:06:58,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=187000.0, ans=0.0 +2024-07-28 18:07:02,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=187013.33333333334, ans=0.125 +2024-07-28 18:07:04,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=187013.33333333334, ans=0.1 +2024-07-28 18:07:04,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=187013.33333333334, ans=0.0 +2024-07-28 18:07:06,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=187013.33333333334, ans=0.125 +2024-07-28 18:07:16,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=187040.0, ans=0.125 +2024-07-28 18:07:21,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=187040.0, ans=0.125 +2024-07-28 18:07:21,804 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.550e+01 5.480e+01 5.943e+01 6.743e+01 9.456e+01, threshold=1.189e+02, percent-clipped=0.0 +2024-07-28 18:07:22,464 INFO [train.py:1114] (1/4) Epoch 14, batch 7400, loss[loss=0.1881, simple_loss=0.2796, pruned_loss=0.04829, over 4697.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2723, pruned_loss=0.04687, over 940525.67 frames. ], batch size: 13, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:07:25,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=187053.33333333334, ans=0.0 +2024-07-28 18:07:33,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=187066.66666666666, ans=0.125 +2024-07-28 18:07:41,736 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.15 vs. limit=12.0 +2024-07-28 18:07:54,557 INFO [train.py:1114] (1/4) Epoch 14, batch 7450, loss[loss=0.1472, simple_loss=0.2272, pruned_loss=0.03359, over 4625.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2718, pruned_loss=0.04713, over 938362.74 frames. ], batch size: 11, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:08:07,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=187133.33333333334, ans=0.2 +2024-07-28 18:08:13,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=187146.66666666666, ans=0.025 +2024-07-28 18:08:18,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=187160.0, ans=0.1 +2024-07-28 18:08:23,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=187173.33333333334, ans=0.025 +2024-07-28 18:08:28,502 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.589e+01 6.200e+01 6.910e+01 1.220e+02, threshold=1.240e+02, percent-clipped=1.0 +2024-07-28 18:08:29,221 INFO [train.py:1114] (1/4) Epoch 14, batch 7500, loss[loss=0.24, simple_loss=0.3149, pruned_loss=0.08259, over 3470.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2715, pruned_loss=0.04719, over 936574.29 frames. ], batch size: 35, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:08:35,397 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.61 vs. limit=15.0 +2024-07-28 18:08:41,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=187200.0, ans=0.5 +2024-07-28 18:09:21,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=187240.0, ans=0.125 +2024-07-28 18:09:23,919 INFO [train.py:1114] (1/4) Epoch 14, batch 7550, loss[loss=0.1896, simple_loss=0.2826, pruned_loss=0.04824, over 4621.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2729, pruned_loss=0.04744, over 936338.96 frames. ], batch size: 17, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:09:26,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=187253.33333333334, ans=0.125 +2024-07-28 18:09:36,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=187280.0, ans=0.0 +2024-07-28 18:09:56,065 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.167e+01 5.585e+01 6.021e+01 6.660e+01 1.005e+02, threshold=1.204e+02, percent-clipped=0.0 +2024-07-28 18:09:56,708 INFO [train.py:1114] (1/4) Epoch 14, batch 7600, loss[loss=0.1891, simple_loss=0.2888, pruned_loss=0.04468, over 4807.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2723, pruned_loss=0.04669, over 938148.17 frames. ], batch size: 14, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:10:00,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=187320.0, ans=0.125 +2024-07-28 18:10:04,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=187333.33333333334, ans=0.2 +2024-07-28 18:10:11,193 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.54 vs. limit=22.5 +2024-07-28 18:10:12,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=187346.66666666666, ans=0.0 +2024-07-28 18:10:13,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=187346.66666666666, ans=0.0 +2024-07-28 18:10:16,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=187360.0, ans=0.2 +2024-07-28 18:10:24,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=187373.33333333334, ans=0.125 +2024-07-28 18:10:29,864 INFO [train.py:1114] (1/4) Epoch 14, batch 7650, loss[loss=0.1807, simple_loss=0.2642, pruned_loss=0.04864, over 4943.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2717, pruned_loss=0.04664, over 937131.87 frames. ], batch size: 12, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:10:40,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=187400.0, ans=0.0 +2024-07-28 18:10:43,057 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.06 vs. limit=10.0 +2024-07-28 18:10:43,502 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.39 vs. limit=15.0 +2024-07-28 18:10:47,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.81 vs. limit=22.5 +2024-07-28 18:10:50,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=187426.66666666666, ans=0.1 +2024-07-28 18:11:02,119 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.854e+01 5.638e+01 6.341e+01 7.114e+01 1.063e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 18:11:02,777 INFO [train.py:1114] (1/4) Epoch 14, batch 7700, loss[loss=0.1943, simple_loss=0.279, pruned_loss=0.05485, over 4684.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.273, pruned_loss=0.04725, over 933975.29 frames. ], batch size: 13, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:11:09,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=187453.33333333334, ans=0.125 +2024-07-28 18:11:23,275 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.10 vs. limit=10.0 +2024-07-28 18:11:31,009 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.01 vs. limit=22.5 +2024-07-28 18:11:48,689 INFO [train.py:1114] (1/4) Epoch 14, batch 7750, loss[loss=0.1937, simple_loss=0.2749, pruned_loss=0.05621, over 4929.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2739, pruned_loss=0.04729, over 935275.26 frames. ], batch size: 14, lr: 5.29e-03, grad_scale: 64.0 +2024-07-28 18:11:49,079 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-07-28 18:12:22,669 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.155e+01 5.694e+01 6.124e+01 6.801e+01 8.564e+01, threshold=1.225e+02, percent-clipped=0.0 +2024-07-28 18:12:24,027 INFO [train.py:1114] (1/4) Epoch 14, batch 7800, loss[loss=0.1761, simple_loss=0.2815, pruned_loss=0.03538, over 4663.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2742, pruned_loss=0.04682, over 937239.62 frames. ], batch size: 14, lr: 5.29e-03, grad_scale: 64.0 +2024-07-28 18:12:24,676 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:12:26,640 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:12:30,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=187600.0, ans=0.2 +2024-07-28 18:12:41,540 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.77 vs. limit=22.5 +2024-07-28 18:13:18,099 INFO [train.py:1114] (1/4) Epoch 14, batch 7850, loss[loss=0.1366, simple_loss=0.2341, pruned_loss=0.01955, over 4522.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2747, pruned_loss=0.04732, over 936240.97 frames. ], batch size: 10, lr: 5.29e-03, grad_scale: 64.0 +2024-07-28 18:13:24,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=187666.66666666666, ans=0.1 +2024-07-28 18:13:24,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=187666.66666666666, ans=0.5 +2024-07-28 18:13:51,288 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.474e+01 5.677e+01 6.181e+01 6.848e+01 9.012e+01, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 18:13:51,944 INFO [train.py:1114] (1/4) Epoch 14, batch 7900, loss[loss=0.199, simple_loss=0.2824, pruned_loss=0.05783, over 4874.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2754, pruned_loss=0.0479, over 933258.15 frames. ], batch size: 14, lr: 5.29e-03, grad_scale: 64.0 +2024-07-28 18:13:55,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=187720.0, ans=0.0 +2024-07-28 18:14:10,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=187746.66666666666, ans=0.09899494936611666 +2024-07-28 18:14:12,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=187760.0, ans=0.125 +2024-07-28 18:14:15,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=187760.0, ans=0.125 +2024-07-28 18:14:25,544 INFO [train.py:1114] (1/4) Epoch 14, batch 7950, loss[loss=0.2122, simple_loss=0.2891, pruned_loss=0.06766, over 3511.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.275, pruned_loss=0.04777, over 935543.61 frames. ], batch size: 35, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:14:27,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=187786.66666666666, ans=0.05 +2024-07-28 18:14:41,618 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.71 vs. limit=6.0 +2024-07-28 18:14:45,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=187826.66666666666, ans=0.0 +2024-07-28 18:14:53,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=187840.0, ans=0.125 +2024-07-28 18:14:57,670 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:15:01,008 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.483e+01 5.795e+01 6.761e+01 7.899e+01 1.107e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-28 18:15:01,041 INFO [train.py:1114] (1/4) Epoch 14, batch 8000, loss[loss=0.1801, simple_loss=0.2703, pruned_loss=0.04492, over 4619.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2732, pruned_loss=0.04692, over 934799.70 frames. ], batch size: 11, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:15:09,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=187866.66666666666, ans=0.025 +2024-07-28 18:15:20,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=187880.0, ans=0.125 +2024-07-28 18:16:01,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=187893.33333333334, ans=0.0 +2024-07-28 18:16:09,698 INFO [train.py:1114] (1/4) Epoch 14, batch 8050, loss[loss=0.1929, simple_loss=0.2897, pruned_loss=0.04806, over 4803.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2729, pruned_loss=0.0466, over 934615.63 frames. ], batch size: 14, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:16:15,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=187920.0, ans=0.0 +2024-07-28 18:16:19,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=187933.33333333334, ans=0.05 +2024-07-28 18:16:20,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=187933.33333333334, ans=0.125 +2024-07-28 18:16:21,996 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.47 vs. limit=12.0 +2024-07-28 18:16:29,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=187960.0, ans=0.0 +2024-07-28 18:16:32,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=187960.0, ans=0.125 +2024-07-28 18:16:41,831 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.834e+01 6.714e+01 7.875e+01 1.229e+02, threshold=1.343e+02, percent-clipped=0.0 +2024-07-28 18:16:41,864 INFO [train.py:1114] (1/4) Epoch 14, batch 8100, loss[loss=0.2154, simple_loss=0.305, pruned_loss=0.06287, over 4797.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2736, pruned_loss=0.04714, over 934003.54 frames. ], batch size: 15, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:16:45,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=187986.66666666666, ans=0.125 +2024-07-28 18:16:52,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=188000.0, ans=0.0 +2024-07-28 18:16:55,867 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=9.26 vs. limit=15.0 +2024-07-28 18:16:56,308 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.48 vs. limit=15.0 +2024-07-28 18:17:00,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=188026.66666666666, ans=0.0 +2024-07-28 18:17:04,818 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:17:13,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=188040.0, ans=0.0 +2024-07-28 18:17:14,588 INFO [train.py:1114] (1/4) Epoch 14, batch 8150, loss[loss=0.2348, simple_loss=0.318, pruned_loss=0.07581, over 4796.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2729, pruned_loss=0.0467, over 937749.50 frames. ], batch size: 15, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:17:49,786 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.677e+01 5.661e+01 6.103e+01 6.886e+01 9.464e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 18:17:49,820 INFO [train.py:1114] (1/4) Epoch 14, batch 8200, loss[loss=0.2031, simple_loss=0.2952, pruned_loss=0.05553, over 4805.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2734, pruned_loss=0.04668, over 938733.30 frames. ], batch size: 15, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:18:06,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188146.66666666666, ans=0.1 +2024-07-28 18:18:09,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188160.0, ans=0.1 +2024-07-28 18:18:22,384 INFO [train.py:1114] (1/4) Epoch 14, batch 8250, loss[loss=0.1856, simple_loss=0.2708, pruned_loss=0.05022, over 4900.00 frames. ], tot_loss[loss=0.183, simple_loss=0.273, pruned_loss=0.0465, over 939144.94 frames. ], batch size: 13, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:18:26,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=188186.66666666666, ans=0.04949747468305833 +2024-07-28 18:18:37,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=188213.33333333334, ans=0.125 +2024-07-28 18:18:40,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=188213.33333333334, ans=0.025 +2024-07-28 18:18:46,774 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.81 vs. limit=15.0 +2024-07-28 18:18:54,601 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.568e+01 5.616e+01 6.253e+01 7.427e+01 1.123e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 18:18:54,634 INFO [train.py:1114] (1/4) Epoch 14, batch 8300, loss[loss=0.1929, simple_loss=0.2934, pruned_loss=0.04617, over 4892.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.274, pruned_loss=0.04743, over 939397.53 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:18:54,876 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.58 vs. limit=12.0 +2024-07-28 18:19:01,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=188266.66666666666, ans=0.2 +2024-07-28 18:19:16,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=188293.33333333334, ans=0.1 +2024-07-28 18:19:21,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=188306.66666666666, ans=0.125 +2024-07-28 18:19:25,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=188306.66666666666, ans=0.0 +2024-07-28 18:19:28,347 INFO [train.py:1114] (1/4) Epoch 14, batch 8350, loss[loss=0.1744, simple_loss=0.2701, pruned_loss=0.0393, over 4810.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2737, pruned_loss=0.04728, over 941790.96 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:19:39,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=188333.33333333334, ans=0.125 +2024-07-28 18:19:42,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=188346.66666666666, ans=0.09899494936611666 +2024-07-28 18:19:44,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=188346.66666666666, ans=0.125 +2024-07-28 18:19:49,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=188360.0, ans=0.0 +2024-07-28 18:19:50,952 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.16 vs. limit=15.0 +2024-07-28 18:19:53,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=188373.33333333334, ans=0.125 +2024-07-28 18:20:00,908 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.607e+01 5.571e+01 5.977e+01 6.680e+01 9.102e+01, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 18:20:00,941 INFO [train.py:1114] (1/4) Epoch 14, batch 8400, loss[loss=0.171, simple_loss=0.2604, pruned_loss=0.04077, over 4781.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2729, pruned_loss=0.04728, over 940115.54 frames. ], batch size: 12, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:20:02,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=188386.66666666666, ans=0.2 +2024-07-28 18:20:18,170 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.82 vs. limit=15.0 +2024-07-28 18:20:23,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=188426.66666666666, ans=0.125 +2024-07-28 18:20:33,549 INFO [train.py:1114] (1/4) Epoch 14, batch 8450, loss[loss=0.2133, simple_loss=0.315, pruned_loss=0.05582, over 4804.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2741, pruned_loss=0.04741, over 939265.26 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:20:47,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=188480.0, ans=0.2 +2024-07-28 18:20:49,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=188480.0, ans=0.0 +2024-07-28 18:20:57,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.01 vs. limit=15.0 +2024-07-28 18:20:58,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=188506.66666666666, ans=0.125 +2024-07-28 18:20:59,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=188506.66666666666, ans=0.125 +2024-07-28 18:21:02,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=188506.66666666666, ans=0.025 +2024-07-28 18:21:05,570 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.332e+01 6.107e+01 6.939e+01 8.222e+01 1.191e+02, threshold=1.388e+02, percent-clipped=0.0 +2024-07-28 18:21:05,603 INFO [train.py:1114] (1/4) Epoch 14, batch 8500, loss[loss=0.1594, simple_loss=0.2434, pruned_loss=0.0377, over 4621.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2737, pruned_loss=0.04733, over 938623.25 frames. ], batch size: 11, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:21:13,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=188533.33333333334, ans=0.2 +2024-07-28 18:21:13,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=188533.33333333334, ans=0.04949747468305833 +2024-07-28 18:21:14,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=188533.33333333334, ans=0.0 +2024-07-28 18:21:21,793 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.24 vs. limit=15.0 +2024-07-28 18:21:27,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=188560.0, ans=0.0 +2024-07-28 18:21:27,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=188560.0, ans=0.125 +2024-07-28 18:21:28,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=188560.0, ans=0.0 +2024-07-28 18:21:31,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=188573.33333333334, ans=0.025 +2024-07-28 18:21:32,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=188573.33333333334, ans=0.125 +2024-07-28 18:21:38,567 INFO [train.py:1114] (1/4) Epoch 14, batch 8550, loss[loss=0.1331, simple_loss=0.2199, pruned_loss=0.02311, over 4799.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2732, pruned_loss=0.04758, over 939749.71 frames. ], batch size: 11, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:22:13,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=188626.66666666666, ans=0.125 +2024-07-28 18:22:15,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188626.66666666666, ans=0.1 +2024-07-28 18:22:26,140 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.740e+01 5.708e+01 6.188e+01 7.242e+01 1.269e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 18:22:26,173 INFO [train.py:1114] (1/4) Epoch 14, batch 8600, loss[loss=0.2126, simple_loss=0.3059, pruned_loss=0.05964, over 4790.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2724, pruned_loss=0.04747, over 939438.97 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:23:06,724 INFO [train.py:1114] (1/4) Epoch 14, batch 8650, loss[loss=0.167, simple_loss=0.2616, pruned_loss=0.03616, over 4887.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2722, pruned_loss=0.04734, over 940673.91 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:23:08,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=188720.0, ans=0.0 +2024-07-28 18:23:11,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=188720.0, ans=0.125 +2024-07-28 18:23:12,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=188733.33333333334, ans=0.125 +2024-07-28 18:23:14,232 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.04 vs. limit=15.0 +2024-07-28 18:23:21,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=188746.66666666666, ans=0.07 +2024-07-28 18:23:22,007 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.34 vs. limit=15.0 +2024-07-28 18:23:31,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=188760.0, ans=0.0 +2024-07-28 18:23:38,960 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 5.747e+01 6.354e+01 7.150e+01 1.051e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 18:23:38,993 INFO [train.py:1114] (1/4) Epoch 14, batch 8700, loss[loss=0.1626, simple_loss=0.2641, pruned_loss=0.03051, over 4762.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2735, pruned_loss=0.04775, over 938720.56 frames. ], batch size: 13, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:23:49,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188800.0, ans=0.1 +2024-07-28 18:23:49,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=188800.0, ans=0.125 +2024-07-28 18:23:52,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=188813.33333333334, ans=0.1 +2024-07-28 18:23:53,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=188813.33333333334, ans=0.0 +2024-07-28 18:23:54,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=188813.33333333334, ans=0.5 +2024-07-28 18:24:11,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=188840.0, ans=0.125 +2024-07-28 18:24:11,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=188840.0, ans=0.125 +2024-07-28 18:24:13,571 INFO [train.py:1114] (1/4) Epoch 14, batch 8750, loss[loss=0.2259, simple_loss=0.3073, pruned_loss=0.07224, over 4690.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2737, pruned_loss=0.04797, over 936803.28 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:24:20,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=188866.66666666666, ans=0.125 +2024-07-28 18:24:26,231 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.04 vs. limit=15.0 +2024-07-28 18:24:32,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=188893.33333333334, ans=0.0 +2024-07-28 18:24:36,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=188893.33333333334, ans=0.0 +2024-07-28 18:24:45,707 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.401e+01 5.577e+01 5.996e+01 6.718e+01 9.459e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 18:24:45,740 INFO [train.py:1114] (1/4) Epoch 14, batch 8800, loss[loss=0.1812, simple_loss=0.2872, pruned_loss=0.03758, over 4933.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2749, pruned_loss=0.04821, over 937433.22 frames. ], batch size: 14, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:24:46,094 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.60 vs. limit=15.0 +2024-07-28 18:24:50,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=188920.0, ans=0.0 +2024-07-28 18:24:52,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=188933.33333333334, ans=0.2 +2024-07-28 18:24:54,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188933.33333333334, ans=0.1 +2024-07-28 18:25:02,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=188946.66666666666, ans=0.04949747468305833 +2024-07-28 18:25:18,764 INFO [train.py:1114] (1/4) Epoch 14, batch 8850, loss[loss=0.174, simple_loss=0.2631, pruned_loss=0.04242, over 4515.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2742, pruned_loss=0.04836, over 932457.04 frames. ], batch size: 21, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:25:27,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=189000.0, ans=0.125 +2024-07-28 18:25:27,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=189000.0, ans=0.2 +2024-07-28 18:25:30,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=189000.0, ans=0.2 +2024-07-28 18:25:35,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=189013.33333333334, ans=0.0 +2024-07-28 18:25:36,092 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.47 vs. limit=15.0 +2024-07-28 18:25:39,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=189026.66666666666, ans=0.0 +2024-07-28 18:25:41,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=189026.66666666666, ans=0.025 +2024-07-28 18:25:42,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=189026.66666666666, ans=0.125 +2024-07-28 18:25:43,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=189026.66666666666, ans=0.025 +2024-07-28 18:25:51,402 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.385e+01 5.694e+01 6.232e+01 7.298e+01 9.650e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 18:25:51,436 INFO [train.py:1114] (1/4) Epoch 14, batch 8900, loss[loss=0.1918, simple_loss=0.2781, pruned_loss=0.05273, over 4947.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2748, pruned_loss=0.04846, over 930319.83 frames. ], batch size: 12, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:25:52,474 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.47 vs. limit=10.0 +2024-07-28 18:25:57,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=189066.66666666666, ans=0.035 +2024-07-28 18:26:03,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=189066.66666666666, ans=0.125 +2024-07-28 18:26:03,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=189080.0, ans=0.09899494936611666 +2024-07-28 18:26:21,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=189106.66666666666, ans=0.2 +2024-07-28 18:26:24,556 INFO [train.py:1114] (1/4) Epoch 14, batch 8950, loss[loss=0.2154, simple_loss=0.3029, pruned_loss=0.06396, over 4450.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2745, pruned_loss=0.04846, over 931149.28 frames. ], batch size: 21, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:26:26,581 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:26:28,066 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.80 vs. limit=22.5 +2024-07-28 18:26:41,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=189146.66666666666, ans=0.0 +2024-07-28 18:26:48,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=189160.0, ans=0.125 +2024-07-28 18:26:49,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=189160.0, ans=0.2 +2024-07-28 18:26:55,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=189173.33333333334, ans=0.125 +2024-07-28 18:26:57,058 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.595e+01 6.149e+01 7.157e+01 9.804e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 18:26:57,092 INFO [train.py:1114] (1/4) Epoch 14, batch 9000, loss[loss=0.133, simple_loss=0.2279, pruned_loss=0.01911, over 4637.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.274, pruned_loss=0.04833, over 933812.20 frames. ], batch size: 12, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:26:57,092 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 18:27:09,002 INFO [train.py:1146] (1/4) Epoch 14, validation: loss=0.1644, simple_loss=0.2676, pruned_loss=0.03058, over 944034.00 frames. +2024-07-28 18:27:09,006 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 18:27:24,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=189213.33333333334, ans=0.2 +2024-07-28 18:27:26,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=189213.33333333334, ans=0.2 +2024-07-28 18:27:27,512 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:27:36,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.81 vs. limit=15.0 +2024-07-28 18:27:41,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=189253.33333333334, ans=22.5 +2024-07-28 18:27:41,986 INFO [train.py:1114] (1/4) Epoch 14, batch 9050, loss[loss=0.1417, simple_loss=0.2216, pruned_loss=0.0309, over 4509.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2738, pruned_loss=0.048, over 934011.76 frames. ], batch size: 10, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:27:46,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=189253.33333333334, ans=0.125 +2024-07-28 18:28:03,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=189293.33333333334, ans=0.07 +2024-07-28 18:28:04,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=189293.33333333334, ans=0.025 +2024-07-28 18:28:14,826 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.467e+01 5.696e+01 6.239e+01 6.974e+01 1.014e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 18:28:14,859 INFO [train.py:1114] (1/4) Epoch 14, batch 9100, loss[loss=0.1785, simple_loss=0.2763, pruned_loss=0.04038, over 4940.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2734, pruned_loss=0.04783, over 936546.57 frames. ], batch size: 14, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:28:23,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=189333.33333333334, ans=0.04949747468305833 +2024-07-28 18:28:32,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=189333.33333333334, ans=0.125 +2024-07-28 18:28:41,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=189360.0, ans=0.05 +2024-07-28 18:28:45,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=189360.0, ans=0.1 +2024-07-28 18:28:45,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=189360.0, ans=0.2 +2024-07-28 18:28:48,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=189373.33333333334, ans=0.2 +2024-07-28 18:28:54,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=189386.66666666666, ans=0.125 +2024-07-28 18:28:54,496 INFO [train.py:1114] (1/4) Epoch 14, batch 9150, loss[loss=0.1757, simple_loss=0.2681, pruned_loss=0.0416, over 4802.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2749, pruned_loss=0.04849, over 935137.66 frames. ], batch size: 14, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:29:17,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=189426.66666666666, ans=0.0 +2024-07-28 18:29:27,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=189453.33333333334, ans=0.0 +2024-07-28 18:29:27,776 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.765e+01 6.391e+01 7.042e+01 1.009e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-28 18:29:27,809 INFO [train.py:1114] (1/4) Epoch 14, batch 9200, loss[loss=0.1395, simple_loss=0.225, pruned_loss=0.02696, over 4854.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2734, pruned_loss=0.04804, over 937198.89 frames. ], batch size: 12, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:29:27,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=189453.33333333334, ans=0.2 +2024-07-28 18:29:42,922 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.69 vs. limit=22.5 +2024-07-28 18:29:52,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=189493.33333333334, ans=0.0 +2024-07-28 18:29:52,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=189506.66666666666, ans=0.1 +2024-07-28 18:29:55,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=189506.66666666666, ans=0.0 +2024-07-28 18:30:00,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=189506.66666666666, ans=0.0 +2024-07-28 18:30:02,096 INFO [train.py:1114] (1/4) Epoch 14, batch 9250, loss[loss=0.1868, simple_loss=0.2841, pruned_loss=0.04477, over 4646.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2733, pruned_loss=0.04778, over 938105.58 frames. ], batch size: 13, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:30:03,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=189520.0, ans=0.0 +2024-07-28 18:30:08,822 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.33 vs. limit=12.0 +2024-07-28 18:30:11,767 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.21 vs. limit=15.0 +2024-07-28 18:30:19,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=189546.66666666666, ans=0.025 +2024-07-28 18:30:38,951 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.722e+01 6.128e+01 6.836e+01 1.013e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 18:30:38,984 INFO [train.py:1114] (1/4) Epoch 14, batch 9300, loss[loss=0.1893, simple_loss=0.2803, pruned_loss=0.04912, over 4776.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2738, pruned_loss=0.04851, over 938229.23 frames. ], batch size: 12, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:30:47,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=189600.0, ans=0.125 +2024-07-28 18:30:51,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=189613.33333333334, ans=0.2 +2024-07-28 18:35:05,509 INFO [train.py:1114] (1/4) Epoch 14, batch 9350, loss[loss=0.1699, simple_loss=0.2552, pruned_loss=0.0423, over 4828.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2736, pruned_loss=0.04818, over 935019.98 frames. ], batch size: 11, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:35:13,569 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.75 vs. limit=22.5 +2024-07-28 18:35:14,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=189666.66666666666, ans=0.125 +2024-07-28 18:35:19,937 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.81 vs. limit=15.0 +2024-07-28 18:35:23,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=189680.0, ans=0.125 +2024-07-28 18:35:28,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=189693.33333333334, ans=0.125 +2024-07-28 18:35:36,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=189706.66666666666, ans=0.0 +2024-07-28 18:35:38,438 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.799e+01 5.700e+01 6.300e+01 7.033e+01 1.050e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 18:35:39,332 INFO [train.py:1114] (1/4) Epoch 14, batch 9400, loss[loss=0.1839, simple_loss=0.2686, pruned_loss=0.0496, over 4696.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2742, pruned_loss=0.04858, over 932533.56 frames. ], batch size: 13, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:35:45,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=189733.33333333334, ans=0.0 +2024-07-28 18:35:45,734 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.20 vs. limit=15.0 +2024-07-28 18:35:49,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=189733.33333333334, ans=0.0 +2024-07-28 18:35:53,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=189746.66666666666, ans=0.125 +2024-07-28 18:36:10,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=189773.33333333334, ans=0.0 +2024-07-28 18:36:12,486 INFO [train.py:1114] (1/4) Epoch 14, batch 9450, loss[loss=0.1565, simple_loss=0.2402, pruned_loss=0.03637, over 4805.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2743, pruned_loss=0.0481, over 931951.51 frames. ], batch size: 11, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:36:13,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=189786.66666666666, ans=0.125 +2024-07-28 18:36:17,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=189786.66666666666, ans=0.0 +2024-07-28 18:36:18,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=189800.0, ans=0.2 +2024-07-28 18:36:22,118 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=189800.0, ans=0.025 +2024-07-28 18:36:36,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=189826.66666666666, ans=0.125 +2024-07-28 18:36:43,733 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.801e+01 5.560e+01 6.240e+01 6.918e+01 1.034e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 18:36:43,766 INFO [train.py:1114] (1/4) Epoch 14, batch 9500, loss[loss=0.2089, simple_loss=0.2883, pruned_loss=0.0647, over 4704.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2746, pruned_loss=0.04832, over 934428.98 frames. ], batch size: 12, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:36:45,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=189853.33333333334, ans=0.125 +2024-07-28 18:36:48,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.35 vs. limit=22.5 +2024-07-28 18:36:49,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=189866.66666666666, ans=0.025 +2024-07-28 18:37:04,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=189893.33333333334, ans=0.1 +2024-07-28 18:37:11,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=189906.66666666666, ans=0.125 +2024-07-28 18:37:15,310 INFO [train.py:1114] (1/4) Epoch 14, batch 9550, loss[loss=0.1923, simple_loss=0.2863, pruned_loss=0.04908, over 4766.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2743, pruned_loss=0.04827, over 932553.45 frames. ], batch size: 12, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:37:18,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=189920.0, ans=0.1 +2024-07-28 18:37:20,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=189920.0, ans=0.0 +2024-07-28 18:37:20,610 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.51 vs. limit=22.5 +2024-07-28 18:37:25,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=189933.33333333334, ans=0.0 +2024-07-28 18:37:35,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=189960.0, ans=0.125 +2024-07-28 18:37:36,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=189960.0, ans=0.0 +2024-07-28 18:37:46,546 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.418e+01 5.773e+01 6.422e+01 7.521e+01 1.253e+02, threshold=1.284e+02, percent-clipped=1.0 +2024-07-28 18:37:46,579 INFO [train.py:1114] (1/4) Epoch 14, batch 9600, loss[loss=0.2159, simple_loss=0.2966, pruned_loss=0.06756, over 3198.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2743, pruned_loss=0.04809, over 930950.77 frames. ], batch size: 35, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:37:47,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=189986.66666666666, ans=0.0 +2024-07-28 18:37:52,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=190000.0, ans=0.2 +2024-07-28 18:37:54,984 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:37:57,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=190000.0, ans=0.2 +2024-07-28 18:38:06,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=190026.66666666666, ans=0.125 +2024-07-28 18:38:14,876 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.85 vs. limit=6.0 +2024-07-28 18:38:17,648 INFO [train.py:1114] (1/4) Epoch 14, batch 9650, loss[loss=0.22, simple_loss=0.307, pruned_loss=0.06649, over 4837.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2745, pruned_loss=0.04845, over 927015.83 frames. ], batch size: 16, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:38:17,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=190053.33333333334, ans=0.125 +2024-07-28 18:38:22,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=190053.33333333334, ans=0.125 +2024-07-28 18:38:26,283 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.92 vs. limit=15.0 +2024-07-28 18:38:31,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=190080.0, ans=0.04949747468305833 +2024-07-28 18:38:32,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=190080.0, ans=0.0 +2024-07-28 18:38:49,747 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.576e+01 5.692e+01 6.329e+01 7.195e+01 1.065e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 18:38:49,780 INFO [train.py:1114] (1/4) Epoch 14, batch 9700, loss[loss=0.1975, simple_loss=0.2839, pruned_loss=0.05554, over 4278.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2742, pruned_loss=0.04808, over 925296.19 frames. ], batch size: 26, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:38:58,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190133.33333333334, ans=0.1 +2024-07-28 18:39:01,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=190133.33333333334, ans=0.09899494936611666 +2024-07-28 18:39:06,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=190146.66666666666, ans=0.125 +2024-07-28 18:39:07,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=190160.0, ans=0.125 +2024-07-28 18:39:08,850 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.12 vs. limit=6.0 +2024-07-28 18:39:09,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=190160.0, ans=0.2 +2024-07-28 18:39:10,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff2.min_abs, batch_count=190160.0, ans=0.1 +2024-07-28 18:39:10,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=190160.0, ans=0.1 +2024-07-28 18:39:11,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=190160.0, ans=0.125 +2024-07-28 18:39:12,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=190160.0, ans=0.0 +2024-07-28 18:39:20,728 INFO [train.py:1114] (1/4) Epoch 14, batch 9750, loss[loss=0.1657, simple_loss=0.2575, pruned_loss=0.03697, over 4682.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2734, pruned_loss=0.0481, over 925589.34 frames. ], batch size: 15, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:39:27,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=190200.0, ans=0.0 +2024-07-28 18:39:35,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=190213.33333333334, ans=0.0 +2024-07-28 18:39:43,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=190226.66666666666, ans=0.125 +2024-07-28 18:39:51,824 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.404e+01 5.577e+01 6.285e+01 7.276e+01 9.873e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 18:39:51,857 INFO [train.py:1114] (1/4) Epoch 14, batch 9800, loss[loss=0.1519, simple_loss=0.238, pruned_loss=0.03291, over 4718.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2731, pruned_loss=0.04805, over 925605.54 frames. ], batch size: 12, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:39:55,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=190253.33333333334, ans=0.0 +2024-07-28 18:39:55,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=190253.33333333334, ans=0.125 +2024-07-28 18:39:55,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=190253.33333333334, ans=0.1 +2024-07-28 18:40:04,772 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:40:07,079 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.11 vs. limit=6.0 +2024-07-28 18:40:11,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=190293.33333333334, ans=0.0 +2024-07-28 18:40:12,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=190293.33333333334, ans=0.125 +2024-07-28 18:40:17,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=190306.66666666666, ans=0.0 +2024-07-28 18:40:18,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190306.66666666666, ans=0.1 +2024-07-28 18:40:23,776 INFO [train.py:1114] (1/4) Epoch 14, batch 9850, loss[loss=0.1794, simple_loss=0.2711, pruned_loss=0.04386, over 4910.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2733, pruned_loss=0.04805, over 928471.22 frames. ], batch size: 15, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:40:36,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=190346.66666666666, ans=0.0 +2024-07-28 18:40:38,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=190346.66666666666, ans=0.0 +2024-07-28 18:40:54,571 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.802e+01 6.503e+01 7.443e+01 1.103e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 18:40:54,604 INFO [train.py:1114] (1/4) Epoch 14, batch 9900, loss[loss=0.1962, simple_loss=0.3041, pruned_loss=0.04415, over 4828.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2737, pruned_loss=0.04855, over 927460.48 frames. ], batch size: 16, lr: 5.25e-03, grad_scale: 32.0 +2024-07-28 18:40:55,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=190386.66666666666, ans=0.2 +2024-07-28 18:40:57,377 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.19 vs. limit=15.0 +2024-07-28 18:41:04,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190400.0, ans=0.1 +2024-07-28 18:41:08,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=190413.33333333334, ans=0.035 +2024-07-28 18:41:11,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=190413.33333333334, ans=0.125 +2024-07-28 18:41:25,150 INFO [train.py:1114] (1/4) Epoch 14, batch 9950, loss[loss=0.1807, simple_loss=0.2658, pruned_loss=0.04783, over 4792.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2747, pruned_loss=0.04965, over 930678.87 frames. ], batch size: 11, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:41:42,395 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.41 vs. limit=15.0 +2024-07-28 18:41:42,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=190480.0, ans=0.0 +2024-07-28 18:41:43,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=190493.33333333334, ans=0.125 +2024-07-28 18:41:53,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=190506.66666666666, ans=0.125 +2024-07-28 18:41:53,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=190506.66666666666, ans=0.125 +2024-07-28 18:41:55,999 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.155e+01 6.024e+01 6.660e+01 7.558e+01 1.184e+02, threshold=1.332e+02, percent-clipped=0.0 +2024-07-28 18:41:56,033 INFO [train.py:1114] (1/4) Epoch 14, batch 10000, loss[loss=0.1577, simple_loss=0.2618, pruned_loss=0.02675, over 4622.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2758, pruned_loss=0.04947, over 927778.16 frames. ], batch size: 16, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:41:58,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=190520.0, ans=0.1 +2024-07-28 18:42:03,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=190533.33333333334, ans=0.0 +2024-07-28 18:42:06,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=190533.33333333334, ans=0.05 +2024-07-28 18:42:10,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=190546.66666666666, ans=0.0 +2024-07-28 18:42:11,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=190546.66666666666, ans=0.0 +2024-07-28 18:42:14,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=190560.0, ans=0.125 +2024-07-28 18:42:17,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=190560.0, ans=0.125 +2024-07-28 18:42:17,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=190560.0, ans=0.0 +2024-07-28 18:42:19,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=190560.0, ans=0.025 +2024-07-28 18:42:28,094 INFO [train.py:1114] (1/4) Epoch 14, batch 10050, loss[loss=0.2689, simple_loss=0.3336, pruned_loss=0.1022, over 3082.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2791, pruned_loss=0.05125, over 916115.80 frames. ], batch size: 35, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:42:28,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=190586.66666666666, ans=0.5 +2024-07-28 18:42:29,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=190586.66666666666, ans=0.125 +2024-07-28 18:42:44,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=190613.33333333334, ans=0.025 +2024-07-28 18:42:46,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=190613.33333333334, ans=0.125 +2024-07-28 18:42:46,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=190613.33333333334, ans=0.125 +2024-07-28 18:42:54,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=190626.66666666666, ans=0.125 +2024-07-28 18:42:56,058 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.80 vs. limit=6.0 +2024-07-28 18:42:56,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=190640.0, ans=0.125 +2024-07-28 18:43:02,249 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.109e+01 6.099e+01 7.025e+01 7.577e+01 1.043e+02, threshold=1.405e+02, percent-clipped=0.0 +2024-07-28 18:43:02,282 INFO [train.py:1114] (1/4) Epoch 14, batch 10100, loss[loss=0.2173, simple_loss=0.2944, pruned_loss=0.0701, over 3582.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2843, pruned_loss=0.05606, over 864569.59 frames. ], batch size: 35, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:43:03,337 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.91 vs. limit=6.0 +2024-07-28 18:43:06,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=190653.33333333334, ans=0.0 +2024-07-28 18:43:24,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=190693.33333333334, ans=0.125 +2024-07-28 18:43:28,818 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.05 vs. limit=15.0 +2024-07-28 18:43:31,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=190706.66666666666, ans=0.125 +2024-07-28 18:43:34,827 INFO [train.py:1114] (1/4) Epoch 14, batch 10150, loss[loss=0.2183, simple_loss=0.3101, pruned_loss=0.0632, over 3411.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2876, pruned_loss=0.0596, over 821309.73 frames. ], batch size: 35, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:43:35,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=190720.0, ans=0.125 +2024-07-28 18:43:40,208 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.23 vs. limit=10.0 +2024-07-28 18:43:41,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=190733.33333333334, ans=0.125 +2024-07-28 18:43:42,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=190733.33333333334, ans=0.95 +2024-07-28 18:43:57,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=190760.0, ans=0.025 +2024-07-28 18:43:57,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=190760.0, ans=0.125 +2024-07-28 18:44:06,126 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.761e+01 6.519e+01 6.916e+01 7.399e+01 9.914e+01, threshold=1.383e+02, percent-clipped=0.0 +2024-07-28 18:44:06,163 INFO [train.py:1114] (1/4) Epoch 14, batch 10200, loss[loss=0.2336, simple_loss=0.3118, pruned_loss=0.07769, over 3492.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2894, pruned_loss=0.06225, over 788038.89 frames. ], batch size: 35, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:44:06,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=190786.66666666666, ans=0.09899494936611666 +2024-07-28 18:44:08,530 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=8.02 vs. limit=12.0 +2024-07-28 18:44:09,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=190786.66666666666, ans=0.125 +2024-07-28 18:45:00,512 INFO [train.py:1114] (1/4) Epoch 15, batch 0, loss[loss=0.1491, simple_loss=0.2398, pruned_loss=0.02918, over 4851.00 frames. ], tot_loss[loss=0.1491, simple_loss=0.2398, pruned_loss=0.02918, over 4851.00 frames. ], batch size: 12, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:45:00,512 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 18:45:12,053 INFO [train.py:1146] (1/4) Epoch 15, validation: loss=0.1655, simple_loss=0.2703, pruned_loss=0.03031, over 944034.00 frames. +2024-07-28 18:45:12,054 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 18:45:12,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=190816.0, ans=0.2 +2024-07-28 18:45:13,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=190816.0, ans=0.2 +2024-07-28 18:45:28,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=190842.66666666666, ans=0.2 +2024-07-28 18:45:41,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=190869.33333333334, ans=0.0 +2024-07-28 18:45:43,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=190869.33333333334, ans=0.025 +2024-07-28 18:45:49,206 INFO [train.py:1114] (1/4) Epoch 15, batch 50, loss[loss=0.1491, simple_loss=0.2277, pruned_loss=0.03522, over 4611.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2773, pruned_loss=0.04952, over 206265.47 frames. ], batch size: 11, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:45:50,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=190882.66666666666, ans=0.0 +2024-07-28 18:45:53,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=190882.66666666666, ans=0.2 +2024-07-28 18:46:01,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=190909.33333333334, ans=0.04949747468305833 +2024-07-28 18:46:07,817 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.277e+01 5.683e+01 6.465e+01 7.180e+01 1.067e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-28 18:46:21,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=190922.66666666666, ans=0.125 +2024-07-28 18:46:31,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=190936.0, ans=0.0 +2024-07-28 18:46:33,569 INFO [train.py:1114] (1/4) Epoch 15, batch 100, loss[loss=0.1812, simple_loss=0.2757, pruned_loss=0.04333, over 4645.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2777, pruned_loss=0.04879, over 365633.48 frames. ], batch size: 12, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:46:36,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=190949.33333333334, ans=0.2 +2024-07-28 18:46:41,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=190962.66666666666, ans=0.0 +2024-07-28 18:47:07,596 INFO [train.py:1114] (1/4) Epoch 15, batch 150, loss[loss=0.1704, simple_loss=0.2487, pruned_loss=0.04604, over 4621.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2753, pruned_loss=0.04766, over 494264.91 frames. ], batch size: 11, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:47:09,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=191016.0, ans=0.125 +2024-07-28 18:47:13,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=191029.33333333334, ans=0.1 +2024-07-28 18:47:16,010 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.25 vs. limit=12.0 +2024-07-28 18:47:25,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=191042.66666666666, ans=0.125 +2024-07-28 18:47:28,155 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.483e+01 5.436e+01 5.988e+01 6.579e+01 9.241e+01, threshold=1.198e+02, percent-clipped=0.0 +2024-07-28 18:47:40,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=191069.33333333334, ans=0.2 +2024-07-28 18:47:42,918 INFO [train.py:1114] (1/4) Epoch 15, batch 200, loss[loss=0.2111, simple_loss=0.3025, pruned_loss=0.05981, over 4479.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2725, pruned_loss=0.04669, over 593701.21 frames. ], batch size: 21, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:47:45,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191082.66666666666, ans=0.1 +2024-07-28 18:47:56,452 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.18 vs. limit=12.0 +2024-07-28 18:48:04,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191122.66666666666, ans=0.1 +2024-07-28 18:48:17,637 INFO [train.py:1114] (1/4) Epoch 15, batch 250, loss[loss=0.1908, simple_loss=0.2784, pruned_loss=0.05157, over 4662.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2724, pruned_loss=0.0468, over 670283.66 frames. ], batch size: 16, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:48:26,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=191162.66666666666, ans=0.0 +2024-07-28 18:48:32,886 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.70 vs. limit=15.0 +2024-07-28 18:48:35,399 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:48:36,565 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 5.893e+01 6.659e+01 7.264e+01 1.310e+02, threshold=1.332e+02, percent-clipped=1.0 +2024-07-28 18:48:36,969 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.80 vs. limit=15.0 +2024-07-28 18:48:41,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=191189.33333333334, ans=0.125 +2024-07-28 18:48:49,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=191202.66666666666, ans=0.1 +2024-07-28 18:48:51,301 INFO [train.py:1114] (1/4) Epoch 15, batch 300, loss[loss=0.1971, simple_loss=0.2981, pruned_loss=0.04803, over 4795.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2706, pruned_loss=0.04638, over 729987.04 frames. ], batch size: 15, lr: 5.06e-03, grad_scale: 64.0 +2024-07-28 18:48:54,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191216.0, ans=0.1 +2024-07-28 18:49:09,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=191242.66666666666, ans=0.125 +2024-07-28 18:49:12,245 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.42 vs. limit=12.0 +2024-07-28 18:49:14,398 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.90 vs. limit=10.0 +2024-07-28 18:49:15,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=191256.0, ans=0.2 +2024-07-28 18:49:26,578 INFO [train.py:1114] (1/4) Epoch 15, batch 350, loss[loss=0.1594, simple_loss=0.2319, pruned_loss=0.04347, over 4936.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2713, pruned_loss=0.04637, over 776071.55 frames. ], batch size: 12, lr: 5.06e-03, grad_scale: 64.0 +2024-07-28 18:49:31,479 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.83 vs. limit=22.5 +2024-07-28 18:49:40,626 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.18 vs. limit=22.5 +2024-07-28 18:49:45,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=191309.33333333334, ans=0.95 +2024-07-28 18:49:54,160 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.48 vs. limit=15.0 +2024-07-28 18:49:55,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=191309.33333333334, ans=0.125 +2024-07-28 18:49:56,401 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+01 5.550e+01 6.008e+01 7.215e+01 1.087e+02, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 18:49:57,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=191322.66666666666, ans=0.125 +2024-07-28 18:50:07,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=191336.0, ans=0.025 +2024-07-28 18:50:10,471 INFO [train.py:1114] (1/4) Epoch 15, batch 400, loss[loss=0.202, simple_loss=0.299, pruned_loss=0.05246, over 4699.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2703, pruned_loss=0.04592, over 813400.23 frames. ], batch size: 13, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:50:11,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=191349.33333333334, ans=0.0 +2024-07-28 18:50:13,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=191349.33333333334, ans=0.05 +2024-07-28 18:50:17,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=191362.66666666666, ans=0.07 +2024-07-28 18:50:29,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=191376.0, ans=0.07 +2024-07-28 18:50:43,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=191402.66666666666, ans=0.125 +2024-07-28 18:50:44,703 INFO [train.py:1114] (1/4) Epoch 15, batch 450, loss[loss=0.1688, simple_loss=0.2522, pruned_loss=0.04272, over 4640.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2707, pruned_loss=0.04605, over 838444.42 frames. ], batch size: 13, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:50:48,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=191416.0, ans=0.0 +2024-07-28 18:50:51,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=191429.33333333334, ans=0.0 +2024-07-28 18:51:01,025 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.05 vs. limit=15.0 +2024-07-28 18:51:02,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=191442.66666666666, ans=0.0 +2024-07-28 18:51:03,915 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.382e+01 5.595e+01 6.045e+01 6.958e+01 9.344e+01, threshold=1.209e+02, percent-clipped=0.0 +2024-07-28 18:51:18,075 INFO [train.py:1114] (1/4) Epoch 15, batch 500, loss[loss=0.1863, simple_loss=0.2636, pruned_loss=0.05446, over 4686.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2693, pruned_loss=0.0456, over 860847.62 frames. ], batch size: 15, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:51:24,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=191482.66666666666, ans=0.125 +2024-07-28 18:51:32,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=191496.0, ans=0.025 +2024-07-28 18:51:34,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=191509.33333333334, ans=0.0 +2024-07-28 18:51:34,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=191509.33333333334, ans=0.0 +2024-07-28 18:51:34,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=191509.33333333334, ans=0.125 +2024-07-28 18:51:45,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff3.min_abs, batch_count=191522.66666666666, ans=0.2 +2024-07-28 18:51:47,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=191536.0, ans=0.0 +2024-07-28 18:51:47,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=191536.0, ans=0.0 +2024-07-28 18:51:48,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=191536.0, ans=0.0 +2024-07-28 18:51:49,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=191536.0, ans=0.125 +2024-07-28 18:51:54,343 INFO [train.py:1114] (1/4) Epoch 15, batch 550, loss[loss=0.1851, simple_loss=0.2776, pruned_loss=0.04635, over 4637.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2702, pruned_loss=0.04561, over 876995.20 frames. ], batch size: 17, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:51:57,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=191549.33333333334, ans=0.125 +2024-07-28 18:51:57,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191549.33333333334, ans=0.1 +2024-07-28 18:52:06,063 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.57 vs. limit=22.5 +2024-07-28 18:52:08,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=191576.0, ans=0.0 +2024-07-28 18:52:10,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191576.0, ans=0.1 +2024-07-28 18:52:13,498 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.04 vs. limit=15.0 +2024-07-28 18:52:13,586 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 5.498e+01 5.942e+01 6.485e+01 9.965e+01, threshold=1.188e+02, percent-clipped=0.0 +2024-07-28 18:52:14,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=191589.33333333334, ans=0.125 +2024-07-28 18:52:27,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=191616.0, ans=0.125 +2024-07-28 18:52:27,549 INFO [train.py:1114] (1/4) Epoch 15, batch 600, loss[loss=0.1892, simple_loss=0.2853, pruned_loss=0.04653, over 4598.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2695, pruned_loss=0.04563, over 891633.85 frames. ], batch size: 16, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:52:30,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=191616.0, ans=0.125 +2024-07-28 18:52:37,969 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.56 vs. limit=6.0 +2024-07-28 18:52:39,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=191629.33333333334, ans=0.125 +2024-07-28 18:52:40,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=191642.66666666666, ans=0.125 +2024-07-28 18:52:41,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=191642.66666666666, ans=0.05 +2024-07-28 18:52:42,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=191642.66666666666, ans=0.125 +2024-07-28 18:53:03,044 INFO [train.py:1114] (1/4) Epoch 15, batch 650, loss[loss=0.1527, simple_loss=0.2576, pruned_loss=0.02385, over 4756.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2704, pruned_loss=0.04624, over 903294.10 frames. ], batch size: 13, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:53:09,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=191696.0, ans=0.0 +2024-07-28 18:53:10,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=191696.0, ans=0.2 +2024-07-28 18:53:12,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=191696.0, ans=0.0 +2024-07-28 18:53:13,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=191696.0, ans=0.025 +2024-07-28 18:53:21,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=191709.33333333334, ans=0.125 +2024-07-28 18:53:22,451 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.353e+01 5.431e+01 6.039e+01 6.829e+01 9.137e+01, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 18:53:27,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=191722.66666666666, ans=0.2 +2024-07-28 18:53:34,437 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.85 vs. limit=15.0 +2024-07-28 18:53:36,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=191736.0, ans=0.0 +2024-07-28 18:53:38,717 INFO [train.py:1114] (1/4) Epoch 15, batch 700, loss[loss=0.1799, simple_loss=0.275, pruned_loss=0.04237, over 4642.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2714, pruned_loss=0.04609, over 911182.36 frames. ], batch size: 12, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:53:44,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=191762.66666666666, ans=0.0 +2024-07-28 18:53:46,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=191762.66666666666, ans=0.025 +2024-07-28 18:53:49,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191762.66666666666, ans=0.1 +2024-07-28 18:53:57,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=191776.0, ans=0.0 +2024-07-28 18:53:59,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=191789.33333333334, ans=0.025 +2024-07-28 18:54:03,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=191789.33333333334, ans=0.125 +2024-07-28 18:54:11,819 INFO [train.py:1114] (1/4) Epoch 15, batch 750, loss[loss=0.1766, simple_loss=0.2669, pruned_loss=0.04318, over 4694.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2708, pruned_loss=0.04598, over 917723.35 frames. ], batch size: 13, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:54:12,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=191816.0, ans=0.0 +2024-07-28 18:54:12,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=191816.0, ans=0.0 +2024-07-28 18:54:13,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=191816.0, ans=0.125 +2024-07-28 18:54:21,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=191829.33333333334, ans=0.95 +2024-07-28 18:54:23,403 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.88 vs. limit=15.0 +2024-07-28 18:54:31,056 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.222e+01 5.661e+01 6.305e+01 7.556e+01 1.211e+02, threshold=1.261e+02, percent-clipped=1.0 +2024-07-28 18:54:35,862 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.22 vs. limit=6.0 +2024-07-28 18:54:47,076 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:54:48,289 INFO [train.py:1114] (1/4) Epoch 15, batch 800, loss[loss=0.1711, simple_loss=0.2657, pruned_loss=0.03824, over 4860.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2708, pruned_loss=0.04579, over 923167.74 frames. ], batch size: 12, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:55:11,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=191922.66666666666, ans=10.0 +2024-07-28 18:55:23,935 INFO [train.py:1114] (1/4) Epoch 15, batch 850, loss[loss=0.194, simple_loss=0.293, pruned_loss=0.04753, over 4664.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2703, pruned_loss=0.0456, over 927374.29 frames. ], batch size: 14, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 18:55:34,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=191962.66666666666, ans=0.07 +2024-07-28 18:55:34,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=191962.66666666666, ans=0.09899494936611666 +2024-07-28 18:55:40,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=191976.0, ans=0.125 +2024-07-28 18:55:41,191 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.73 vs. limit=15.0 +2024-07-28 18:55:43,398 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.257e+01 5.494e+01 6.079e+01 6.525e+01 1.058e+02, threshold=1.216e+02, percent-clipped=0.0 +2024-07-28 19:01:32,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=192002.66666666666, ans=0.0 +2024-07-28 19:01:39,933 INFO [train.py:1114] (1/4) Epoch 15, batch 900, loss[loss=0.1717, simple_loss=0.2525, pruned_loss=0.04547, over 4858.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2712, pruned_loss=0.0458, over 928307.42 frames. ], batch size: 12, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:01:43,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=192016.0, ans=0.0 +2024-07-28 19:01:50,744 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.95 vs. limit=15.0 +2024-07-28 19:01:54,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=192042.66666666666, ans=0.125 +2024-07-28 19:02:13,183 INFO [train.py:1114] (1/4) Epoch 15, batch 950, loss[loss=0.1842, simple_loss=0.2743, pruned_loss=0.0471, over 4778.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2708, pruned_loss=0.04564, over 930060.88 frames. ], batch size: 12, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:02:13,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=192082.66666666666, ans=0.0 +2024-07-28 19:02:18,230 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.89 vs. limit=15.0 +2024-07-28 19:02:27,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=192109.33333333334, ans=0.025 +2024-07-28 19:02:30,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=192109.33333333334, ans=0.125 +2024-07-28 19:02:30,684 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.23 vs. limit=15.0 +2024-07-28 19:02:33,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=192109.33333333334, ans=0.95 +2024-07-28 19:02:34,366 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.707e+01 5.740e+01 6.341e+01 7.414e+01 2.683e+02, threshold=1.268e+02, percent-clipped=1.0 +2024-07-28 19:02:41,321 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:02:46,121 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.38 vs. limit=22.5 +2024-07-28 19:02:48,487 INFO [train.py:1114] (1/4) Epoch 15, batch 1000, loss[loss=0.1388, simple_loss=0.2305, pruned_loss=0.02354, over 4957.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2714, pruned_loss=0.04597, over 929884.85 frames. ], batch size: 13, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:02:50,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=192149.33333333334, ans=0.0 +2024-07-28 19:02:53,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=192149.33333333334, ans=0.025 +2024-07-28 19:02:54,369 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.84 vs. limit=15.0 +2024-07-28 19:03:08,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=192189.33333333334, ans=0.2 +2024-07-28 19:03:10,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=192189.33333333334, ans=0.025 +2024-07-28 19:03:11,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=192189.33333333334, ans=0.125 +2024-07-28 19:03:22,057 INFO [train.py:1114] (1/4) Epoch 15, batch 1050, loss[loss=0.1868, simple_loss=0.2847, pruned_loss=0.04449, over 4872.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2708, pruned_loss=0.04551, over 932126.69 frames. ], batch size: 14, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:03:29,695 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-07-28 19:03:38,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=192242.66666666666, ans=0.125 +2024-07-28 19:03:40,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=192242.66666666666, ans=0.0 +2024-07-28 19:03:41,201 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.689e+01 5.436e+01 6.028e+01 6.736e+01 8.653e+01, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 19:03:42,373 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.12 vs. limit=15.0 +2024-07-28 19:03:54,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=192269.33333333334, ans=0.0 +2024-07-28 19:03:55,266 INFO [train.py:1114] (1/4) Epoch 15, batch 1100, loss[loss=0.1978, simple_loss=0.2861, pruned_loss=0.05472, over 4886.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2706, pruned_loss=0.04564, over 934556.03 frames. ], batch size: 13, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:03:56,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=192282.66666666666, ans=0.125 +2024-07-28 19:04:00,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=192282.66666666666, ans=0.5 +2024-07-28 19:04:03,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=192296.0, ans=0.125 +2024-07-28 19:04:07,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=192309.33333333334, ans=0.125 +2024-07-28 19:04:16,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=192322.66666666666, ans=0.0 +2024-07-28 19:04:30,211 INFO [train.py:1114] (1/4) Epoch 15, batch 1150, loss[loss=0.1749, simple_loss=0.2575, pruned_loss=0.04614, over 4896.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2714, pruned_loss=0.04628, over 934332.56 frames. ], batch size: 13, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:04:30,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=192349.33333333334, ans=0.0 +2024-07-28 19:04:30,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=192349.33333333334, ans=0.07 +2024-07-28 19:04:38,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192362.66666666666, ans=0.1 +2024-07-28 19:04:51,446 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.762e+01 5.625e+01 6.208e+01 7.192e+01 1.002e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 19:04:56,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=192389.33333333334, ans=0.125 +2024-07-28 19:05:05,252 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.49 vs. limit=22.5 +2024-07-28 19:05:05,556 INFO [train.py:1114] (1/4) Epoch 15, batch 1200, loss[loss=0.1871, simple_loss=0.2762, pruned_loss=0.04903, over 4872.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2721, pruned_loss=0.04653, over 933500.97 frames. ], batch size: 14, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:05:06,054 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.19 vs. limit=15.0 +2024-07-28 19:05:19,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=192442.66666666666, ans=0.0 +2024-07-28 19:05:27,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=192456.0, ans=0.2 +2024-07-28 19:05:32,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=192469.33333333334, ans=0.125 +2024-07-28 19:05:38,619 INFO [train.py:1114] (1/4) Epoch 15, batch 1250, loss[loss=0.1638, simple_loss=0.2554, pruned_loss=0.03615, over 4806.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2717, pruned_loss=0.04616, over 937582.93 frames. ], batch size: 15, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:05:42,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192482.66666666666, ans=0.1 +2024-07-28 19:05:42,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=192482.66666666666, ans=0.125 +2024-07-28 19:05:56,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=192509.33333333334, ans=0.125 +2024-07-28 19:05:57,774 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.394e+01 5.598e+01 6.184e+01 7.240e+01 1.147e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 19:06:13,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.71 vs. limit=15.0 +2024-07-28 19:06:14,163 INFO [train.py:1114] (1/4) Epoch 15, batch 1300, loss[loss=0.1956, simple_loss=0.2774, pruned_loss=0.05687, over 4634.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2713, pruned_loss=0.04631, over 938976.33 frames. ], batch size: 19, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:06:16,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=192549.33333333334, ans=0.2 +2024-07-28 19:06:16,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=192549.33333333334, ans=0.05 +2024-07-28 19:06:18,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=192549.33333333334, ans=0.0 +2024-07-28 19:06:20,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=192562.66666666666, ans=0.0 +2024-07-28 19:06:22,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=192562.66666666666, ans=0.0 +2024-07-28 19:06:24,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=192562.66666666666, ans=0.1 +2024-07-28 19:06:31,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=192576.0, ans=0.125 +2024-07-28 19:06:46,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=192602.66666666666, ans=0.125 +2024-07-28 19:06:47,772 INFO [train.py:1114] (1/4) Epoch 15, batch 1350, loss[loss=0.1538, simple_loss=0.2564, pruned_loss=0.02559, over 4754.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2715, pruned_loss=0.04575, over 941340.57 frames. ], batch size: 13, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:06:58,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=192629.33333333334, ans=0.0 +2024-07-28 19:07:00,323 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.01 vs. limit=15.0 +2024-07-28 19:07:04,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192642.66666666666, ans=0.1 +2024-07-28 19:07:07,203 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.665e+01 5.559e+01 6.171e+01 7.538e+01 1.379e+02, threshold=1.234e+02, percent-clipped=1.0 +2024-07-28 19:07:12,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=192656.0, ans=22.5 +2024-07-28 19:07:14,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=192669.33333333334, ans=0.0 +2024-07-28 19:07:17,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=192669.33333333334, ans=0.2 +2024-07-28 19:07:21,208 INFO [train.py:1114] (1/4) Epoch 15, batch 1400, loss[loss=0.164, simple_loss=0.2518, pruned_loss=0.03812, over 4710.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2707, pruned_loss=0.04545, over 942776.15 frames. ], batch size: 11, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:07:23,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=192682.66666666666, ans=10.0 +2024-07-28 19:07:34,054 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.68 vs. limit=22.5 +2024-07-28 19:07:35,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=192709.33333333334, ans=0.0 +2024-07-28 19:07:42,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=192722.66666666666, ans=0.0 +2024-07-28 19:07:44,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=192722.66666666666, ans=0.0 +2024-07-28 19:07:51,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=192736.0, ans=0.125 +2024-07-28 19:07:52,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.whiten.whitening_limit, batch_count=192736.0, ans=12.0 +2024-07-28 19:07:56,456 INFO [train.py:1114] (1/4) Epoch 15, batch 1450, loss[loss=0.2041, simple_loss=0.2901, pruned_loss=0.05908, over 4655.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2704, pruned_loss=0.04521, over 942763.47 frames. ], batch size: 15, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:08:01,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=192749.33333333334, ans=0.125 +2024-07-28 19:08:13,954 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.05 vs. limit=6.0 +2024-07-28 19:08:15,576 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.624e+01 5.633e+01 5.995e+01 6.598e+01 8.860e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 19:08:29,690 INFO [train.py:1114] (1/4) Epoch 15, batch 1500, loss[loss=0.2055, simple_loss=0.3057, pruned_loss=0.05265, over 4801.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2712, pruned_loss=0.04524, over 942328.92 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:08:36,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=192829.33333333334, ans=0.2 +2024-07-28 19:08:36,898 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.53 vs. limit=10.0 +2024-07-28 19:08:47,701 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.74 vs. limit=10.0 +2024-07-28 19:08:48,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=192842.66666666666, ans=0.2 +2024-07-28 19:08:49,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=192856.0, ans=10.0 +2024-07-28 19:09:03,668 INFO [train.py:1114] (1/4) Epoch 15, batch 1550, loss[loss=0.1822, simple_loss=0.2766, pruned_loss=0.04385, over 4895.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2712, pruned_loss=0.04555, over 938964.82 frames. ], batch size: 15, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:09:09,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=192882.66666666666, ans=0.125 +2024-07-28 19:09:20,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=192909.33333333334, ans=0.95 +2024-07-28 19:09:20,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=192909.33333333334, ans=0.1 +2024-07-28 19:09:22,639 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:09:23,056 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.711e+01 6.284e+01 7.073e+01 1.043e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 19:09:32,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=192936.0, ans=0.125 +2024-07-28 19:09:39,403 INFO [train.py:1114] (1/4) Epoch 15, batch 1600, loss[loss=0.1635, simple_loss=0.2622, pruned_loss=0.03236, over 4879.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2709, pruned_loss=0.04555, over 937103.19 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:09:59,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=192989.33333333334, ans=0.1 +2024-07-28 19:10:01,366 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.15 vs. limit=15.0 +2024-07-28 19:10:12,563 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=11.27 vs. limit=15.0 +2024-07-28 19:10:14,696 INFO [train.py:1114] (1/4) Epoch 15, batch 1650, loss[loss=0.1638, simple_loss=0.2537, pruned_loss=0.03694, over 4661.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2708, pruned_loss=0.04599, over 936449.95 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:10:19,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=193016.0, ans=0.0 +2024-07-28 19:10:33,849 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.043e+01 5.642e+01 6.032e+01 7.016e+01 1.079e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 19:10:37,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=193056.0, ans=15.0 +2024-07-28 19:10:41,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=193069.33333333334, ans=0.2 +2024-07-28 19:10:47,876 INFO [train.py:1114] (1/4) Epoch 15, batch 1700, loss[loss=0.1777, simple_loss=0.2555, pruned_loss=0.04991, over 4694.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2711, pruned_loss=0.04543, over 938342.65 frames. ], batch size: 11, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:11:06,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=193109.33333333334, ans=0.2 +2024-07-28 19:11:07,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=193122.66666666666, ans=0.0 +2024-07-28 19:11:08,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=193122.66666666666, ans=0.125 +2024-07-28 19:11:12,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=193122.66666666666, ans=0.1 +2024-07-28 19:11:17,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=193136.0, ans=0.05 +2024-07-28 19:11:19,640 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.03 vs. limit=22.5 +2024-07-28 19:11:20,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=193136.0, ans=0.125 +2024-07-28 19:11:21,432 INFO [train.py:1114] (1/4) Epoch 15, batch 1750, loss[loss=0.1394, simple_loss=0.2197, pruned_loss=0.02959, over 4797.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2709, pruned_loss=0.04578, over 939865.29 frames. ], batch size: 11, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:11:22,649 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.43 vs. limit=12.0 +2024-07-28 19:11:24,540 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.62 vs. limit=15.0 +2024-07-28 19:11:34,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=193162.66666666666, ans=0.0 +2024-07-28 19:11:35,929 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.46 vs. limit=15.0 +2024-07-28 19:11:42,913 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.964e+01 5.710e+01 6.387e+01 7.487e+01 1.072e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 19:11:44,623 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.67 vs. limit=22.5 +2024-07-28 19:11:55,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=193202.66666666666, ans=15.0 +2024-07-28 19:11:56,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=193216.0, ans=0.125 +2024-07-28 19:11:56,802 INFO [train.py:1114] (1/4) Epoch 15, batch 1800, loss[loss=0.1573, simple_loss=0.2478, pruned_loss=0.03334, over 4644.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.271, pruned_loss=0.04629, over 940694.64 frames. ], batch size: 13, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:12:01,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=193216.0, ans=0.0 +2024-07-28 19:12:02,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=193216.0, ans=0.125 +2024-07-28 19:12:02,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=193229.33333333334, ans=0.1 +2024-07-28 19:12:16,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=193256.0, ans=0.125 +2024-07-28 19:12:30,406 INFO [train.py:1114] (1/4) Epoch 15, batch 1850, loss[loss=0.2166, simple_loss=0.2907, pruned_loss=0.07121, over 4812.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2706, pruned_loss=0.0463, over 941219.36 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:12:32,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=193282.66666666666, ans=0.09899494936611666 +2024-07-28 19:12:40,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=193296.0, ans=0.125 +2024-07-28 19:12:47,233 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.27 vs. limit=15.0 +2024-07-28 19:12:50,209 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.636e+01 5.579e+01 6.374e+01 7.062e+01 1.422e+02, threshold=1.275e+02, percent-clipped=2.0 +2024-07-28 19:12:55,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=193322.66666666666, ans=0.2 +2024-07-28 19:13:26,482 INFO [train.py:1114] (1/4) Epoch 15, batch 1900, loss[loss=0.1825, simple_loss=0.2744, pruned_loss=0.04531, over 4662.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2707, pruned_loss=0.04607, over 942439.46 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:13:42,374 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.32 vs. limit=10.0 +2024-07-28 19:13:52,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=193389.33333333334, ans=0.5 +2024-07-28 19:14:01,207 INFO [train.py:1114] (1/4) Epoch 15, batch 1950, loss[loss=0.1775, simple_loss=0.2635, pruned_loss=0.04582, over 4894.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2726, pruned_loss=0.04634, over 944296.36 frames. ], batch size: 13, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:14:15,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=193442.66666666666, ans=0.0 +2024-07-28 19:14:20,683 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.328e+01 5.675e+01 6.115e+01 6.828e+01 9.814e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 19:14:34,640 INFO [train.py:1114] (1/4) Epoch 15, batch 2000, loss[loss=0.1476, simple_loss=0.2328, pruned_loss=0.0312, over 4811.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2729, pruned_loss=0.04645, over 941833.69 frames. ], batch size: 11, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:14:38,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=193482.66666666666, ans=0.1 +2024-07-28 19:14:49,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=193509.33333333334, ans=0.0 +2024-07-28 19:14:51,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=193509.33333333334, ans=0.125 +2024-07-28 19:14:52,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=193509.33333333334, ans=0.2 +2024-07-28 19:15:04,433 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.23 vs. limit=15.0 +2024-07-28 19:15:07,097 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.61 vs. limit=22.5 +2024-07-28 19:15:08,815 INFO [train.py:1114] (1/4) Epoch 15, batch 2050, loss[loss=0.1562, simple_loss=0.2355, pruned_loss=0.03847, over 4619.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2722, pruned_loss=0.0463, over 939903.92 frames. ], batch size: 11, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:15:14,910 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.54 vs. limit=10.0 +2024-07-28 19:15:18,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=193562.66666666666, ans=0.125 +2024-07-28 19:15:22,529 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.87 vs. limit=15.0 +2024-07-28 19:15:27,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=193576.0, ans=0.125 +2024-07-28 19:15:31,315 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.698e+01 6.282e+01 7.137e+01 1.040e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 19:15:45,947 INFO [train.py:1114] (1/4) Epoch 15, batch 2100, loss[loss=0.1777, simple_loss=0.2736, pruned_loss=0.04088, over 4761.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2716, pruned_loss=0.04573, over 941609.78 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:15:48,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=193616.0, ans=0.125 +2024-07-28 19:15:51,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=193616.0, ans=0.1 +2024-07-28 19:16:06,934 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.99 vs. limit=15.0 +2024-07-28 19:16:11,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=193656.0, ans=0.2 +2024-07-28 19:16:13,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=193656.0, ans=0.125 +2024-07-28 19:16:20,809 INFO [train.py:1114] (1/4) Epoch 15, batch 2150, loss[loss=0.1968, simple_loss=0.2801, pruned_loss=0.05677, over 4897.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2706, pruned_loss=0.04578, over 944403.82 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:16:24,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=193682.66666666666, ans=0.1 +2024-07-28 19:16:31,777 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.59 vs. limit=15.0 +2024-07-28 19:16:32,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=193696.0, ans=0.125 +2024-07-28 19:16:32,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=193696.0, ans=0.1 +2024-07-28 19:16:38,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=193709.33333333334, ans=0.125 +2024-07-28 19:16:38,589 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.39 vs. limit=15.0 +2024-07-28 19:16:40,102 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.631e+01 5.589e+01 6.207e+01 7.234e+01 9.865e+01, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 19:16:41,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=193722.66666666666, ans=0.025 +2024-07-28 19:16:43,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=193722.66666666666, ans=0.125 +2024-07-28 19:16:43,842 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.48 vs. limit=15.0 +2024-07-28 19:16:45,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=193722.66666666666, ans=0.0 +2024-07-28 19:16:47,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=193736.0, ans=0.2 +2024-07-28 19:16:51,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=193736.0, ans=0.0 +2024-07-28 19:16:54,161 INFO [train.py:1114] (1/4) Epoch 15, batch 2200, loss[loss=0.1906, simple_loss=0.2835, pruned_loss=0.04888, over 4809.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2705, pruned_loss=0.04561, over 943332.14 frames. ], batch size: 14, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:16:59,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=193749.33333333334, ans=0.2 +2024-07-28 19:17:00,375 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.41 vs. limit=15.0 +2024-07-28 19:17:00,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=193762.66666666666, ans=0.05 +2024-07-28 19:17:42,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=193816.0, ans=0.025 +2024-07-28 19:17:43,135 INFO [train.py:1114] (1/4) Epoch 15, batch 2250, loss[loss=0.1837, simple_loss=0.2727, pruned_loss=0.04734, over 4686.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2711, pruned_loss=0.04612, over 941485.72 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:18:02,476 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.434e+01 5.887e+01 6.714e+01 1.189e+02, threshold=1.177e+02, percent-clipped=0.0 +2024-07-28 19:18:05,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=193856.0, ans=0.025 +2024-07-28 19:18:16,463 INFO [train.py:1114] (1/4) Epoch 15, batch 2300, loss[loss=0.1603, simple_loss=0.2439, pruned_loss=0.03839, over 4934.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2703, pruned_loss=0.04615, over 939394.58 frames. ], batch size: 12, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:18:17,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=193882.66666666666, ans=0.0 +2024-07-28 19:18:23,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=193896.0, ans=0.07 +2024-07-28 19:18:29,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=193909.33333333334, ans=0.0 +2024-07-28 19:18:39,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=193922.66666666666, ans=0.125 +2024-07-28 19:18:39,887 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.05 vs. limit=15.0 +2024-07-28 19:18:40,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=193922.66666666666, ans=0.125 +2024-07-28 19:18:49,534 INFO [train.py:1114] (1/4) Epoch 15, batch 2350, loss[loss=0.1937, simple_loss=0.2879, pruned_loss=0.04978, over 4635.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2697, pruned_loss=0.04589, over 941670.30 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:19:01,269 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.39 vs. limit=6.0 +2024-07-28 19:19:06,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=193976.0, ans=0.1 +2024-07-28 19:19:13,027 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.332e+01 5.729e+01 6.304e+01 7.186e+01 9.939e+01, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 19:19:15,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=193989.33333333334, ans=0.2 +2024-07-28 19:19:15,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=193989.33333333334, ans=0.125 +2024-07-28 19:19:20,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=193989.33333333334, ans=0.2 +2024-07-28 19:19:22,451 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.03 vs. limit=15.0 +2024-07-28 19:19:27,911 INFO [train.py:1114] (1/4) Epoch 15, batch 2400, loss[loss=0.1447, simple_loss=0.2354, pruned_loss=0.02695, over 4647.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2703, pruned_loss=0.04623, over 941382.47 frames. ], batch size: 12, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:19:39,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=194029.33333333334, ans=0.125 +2024-07-28 19:19:43,847 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.38 vs. limit=15.0 +2024-07-28 19:19:44,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=194042.66666666666, ans=0.2 +2024-07-28 19:19:52,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=194056.0, ans=0.0 +2024-07-28 19:19:55,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=194069.33333333334, ans=0.025 +2024-07-28 19:19:57,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=194069.33333333334, ans=0.125 +2024-07-28 19:20:00,236 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.06 vs. limit=10.0 +2024-07-28 19:20:01,299 INFO [train.py:1114] (1/4) Epoch 15, batch 2450, loss[loss=0.1683, simple_loss=0.2563, pruned_loss=0.04016, over 4702.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2709, pruned_loss=0.04625, over 937382.30 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:20:01,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=194082.66666666666, ans=0.0 +2024-07-28 19:20:21,699 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.395e+01 5.722e+01 6.164e+01 6.844e+01 9.609e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 19:20:33,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=194136.0, ans=0.125 +2024-07-28 19:20:35,243 INFO [train.py:1114] (1/4) Epoch 15, batch 2500, loss[loss=0.1832, simple_loss=0.2769, pruned_loss=0.04475, over 4811.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2708, pruned_loss=0.04605, over 939222.94 frames. ], batch size: 14, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:20:41,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=194162.66666666666, ans=0.2 +2024-07-28 19:20:47,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=194176.0, ans=0.125 +2024-07-28 19:20:48,720 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.72 vs. limit=6.0 +2024-07-28 19:20:52,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=194176.0, ans=0.025 +2024-07-28 19:21:02,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=194202.66666666666, ans=0.0 +2024-07-28 19:21:02,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=194202.66666666666, ans=0.025 +2024-07-28 19:21:03,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=194202.66666666666, ans=0.125 +2024-07-28 19:21:04,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=194202.66666666666, ans=0.125 +2024-07-28 19:21:08,113 INFO [train.py:1114] (1/4) Epoch 15, batch 2550, loss[loss=0.1582, simple_loss=0.2376, pruned_loss=0.03939, over 4789.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2706, pruned_loss=0.04587, over 938687.42 frames. ], batch size: 11, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:21:11,918 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-28 19:21:17,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=194229.33333333334, ans=15.0 +2024-07-28 19:21:20,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=194229.33333333334, ans=0.0 +2024-07-28 19:21:21,671 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.76 vs. limit=22.5 +2024-07-28 19:21:26,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194242.66666666666, ans=0.1 +2024-07-28 19:21:30,729 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.237e+01 5.497e+01 6.084e+01 7.068e+01 9.259e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-28 19:21:37,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=194256.0, ans=0.1 +2024-07-28 19:21:39,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=194269.33333333334, ans=0.0 +2024-07-28 19:21:40,266 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.25 vs. limit=15.0 +2024-07-28 19:21:45,670 INFO [train.py:1114] (1/4) Epoch 15, batch 2600, loss[loss=0.1721, simple_loss=0.2691, pruned_loss=0.03754, over 4900.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2716, pruned_loss=0.04645, over 937933.36 frames. ], batch size: 13, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:21:54,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=194296.0, ans=0.0 +2024-07-28 19:22:10,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=194322.66666666666, ans=0.125 +2024-07-28 19:22:13,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=194336.0, ans=0.0 +2024-07-28 19:22:17,728 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.72 vs. limit=6.0 +2024-07-28 19:22:19,324 INFO [train.py:1114] (1/4) Epoch 15, batch 2650, loss[loss=0.1704, simple_loss=0.2632, pruned_loss=0.03883, over 4638.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2722, pruned_loss=0.04654, over 939825.41 frames. ], batch size: 16, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:22:28,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=194362.66666666666, ans=0.125 +2024-07-28 19:22:41,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=194376.0, ans=0.125 +2024-07-28 19:22:43,425 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.390e+01 5.437e+01 6.145e+01 6.904e+01 9.658e+01, threshold=1.229e+02, percent-clipped=0.0 +2024-07-28 19:22:43,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194389.33333333334, ans=0.1 +2024-07-28 19:22:48,979 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.70 vs. limit=15.0 +2024-07-28 19:22:58,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=194402.66666666666, ans=0.0 +2024-07-28 19:23:00,531 INFO [train.py:1114] (1/4) Epoch 15, batch 2700, loss[loss=0.1951, simple_loss=0.2954, pruned_loss=0.04741, over 4732.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2717, pruned_loss=0.04624, over 939352.90 frames. ], batch size: 14, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:23:03,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194416.0, ans=0.1 +2024-07-28 19:23:14,834 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.21 vs. limit=15.0 +2024-07-28 19:23:29,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=194469.33333333334, ans=0.2 +2024-07-28 19:23:29,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=194469.33333333334, ans=0.125 +2024-07-28 19:23:34,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=194469.33333333334, ans=0.0 +2024-07-28 19:23:35,795 INFO [train.py:1114] (1/4) Epoch 15, batch 2750, loss[loss=0.1931, simple_loss=0.2872, pruned_loss=0.04952, over 4711.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2712, pruned_loss=0.04628, over 939879.23 frames. ], batch size: 12, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:23:36,239 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.95 vs. limit=22.5 +2024-07-28 19:23:43,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=194496.0, ans=0.0 +2024-07-28 19:23:47,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=194496.0, ans=0.125 +2024-07-28 19:23:47,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194496.0, ans=0.1 +2024-07-28 19:23:54,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=194509.33333333334, ans=0.0 +2024-07-28 19:23:56,140 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.670e+01 5.701e+01 6.363e+01 7.329e+01 1.129e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 19:23:57,247 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.68 vs. limit=15.0 +2024-07-28 19:24:10,271 INFO [train.py:1114] (1/4) Epoch 15, batch 2800, loss[loss=0.2001, simple_loss=0.2841, pruned_loss=0.05803, over 3433.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2713, pruned_loss=0.04653, over 937684.07 frames. ], batch size: 35, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:24:29,271 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.72 vs. limit=15.0 +2024-07-28 19:24:39,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=194589.33333333334, ans=0.1 +2024-07-28 19:24:48,361 INFO [train.py:1114] (1/4) Epoch 15, batch 2850, loss[loss=0.1619, simple_loss=0.2456, pruned_loss=0.03907, over 4961.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2715, pruned_loss=0.04684, over 935684.61 frames. ], batch size: 13, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:24:52,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=194616.0, ans=0.0 +2024-07-28 19:25:02,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=194629.33333333334, ans=0.2 +2024-07-28 19:25:09,891 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.207e+01 5.952e+01 6.499e+01 7.318e+01 1.007e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-28 19:25:10,749 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:25:11,442 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:25:23,263 INFO [train.py:1114] (1/4) Epoch 15, batch 2900, loss[loss=0.1644, simple_loss=0.2542, pruned_loss=0.03731, over 4828.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2719, pruned_loss=0.04649, over 939594.69 frames. ], batch size: 13, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:25:32,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=194696.0, ans=0.0 +2024-07-28 19:25:45,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=194722.66666666666, ans=0.125 +2024-07-28 19:25:47,590 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.87 vs. limit=5.0 +2024-07-28 19:25:51,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194736.0, ans=0.1 +2024-07-28 19:25:57,086 INFO [train.py:1114] (1/4) Epoch 15, batch 2950, loss[loss=0.1546, simple_loss=0.2489, pruned_loss=0.03009, over 4692.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2708, pruned_loss=0.04604, over 939040.98 frames. ], batch size: 12, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:26:05,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194762.66666666666, ans=0.1 +2024-07-28 19:26:07,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=194762.66666666666, ans=0.025 +2024-07-28 19:26:17,151 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.233e+01 5.428e+01 5.966e+01 6.720e+01 8.904e+01, threshold=1.193e+02, percent-clipped=0.0 +2024-07-28 19:27:13,683 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:27:15,592 INFO [train.py:1114] (1/4) Epoch 15, batch 3000, loss[loss=0.1756, simple_loss=0.2725, pruned_loss=0.03936, over 4764.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2703, pruned_loss=0.04578, over 938706.50 frames. ], batch size: 13, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:27:15,593 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 19:27:24,153 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.1900, 4.3486, 4.2199, 4.8804], device='cuda:1') +2024-07-28 19:27:27,985 INFO [train.py:1146] (1/4) Epoch 15, validation: loss=0.1635, simple_loss=0.2667, pruned_loss=0.03013, over 944034.00 frames. +2024-07-28 19:27:27,986 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 19:27:32,606 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-28 19:27:42,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=194829.33333333334, ans=0.0 +2024-07-28 19:27:43,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=194842.66666666666, ans=0.125 +2024-07-28 19:27:47,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=194842.66666666666, ans=0.125 +2024-07-28 19:27:58,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194869.33333333334, ans=0.1 +2024-07-28 19:27:58,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=194869.33333333334, ans=0.2 +2024-07-28 19:27:59,428 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.60 vs. limit=15.0 +2024-07-28 19:28:04,530 INFO [train.py:1114] (1/4) Epoch 15, batch 3050, loss[loss=0.1631, simple_loss=0.2548, pruned_loss=0.03574, over 4645.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2717, pruned_loss=0.04621, over 937433.25 frames. ], batch size: 12, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:28:09,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.14 vs. limit=15.0 +2024-07-28 19:28:10,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=194882.66666666666, ans=0.0 +2024-07-28 19:28:15,119 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.75 vs. limit=10.0 +2024-07-28 19:28:19,126 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.77 vs. limit=10.0 +2024-07-28 19:28:24,841 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.462e+01 5.642e+01 6.422e+01 7.764e+01 9.574e+01, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 19:28:27,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=194922.66666666666, ans=0.0 +2024-07-28 19:28:38,605 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.03 vs. limit=10.0 +2024-07-28 19:28:41,477 INFO [train.py:1114] (1/4) Epoch 15, batch 3100, loss[loss=0.2266, simple_loss=0.322, pruned_loss=0.06559, over 4624.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2717, pruned_loss=0.04626, over 938301.06 frames. ], batch size: 16, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:28:50,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=194962.66666666666, ans=0.125 +2024-07-28 19:28:57,236 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:29:03,005 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.21 vs. limit=22.5 +2024-07-28 19:29:06,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=194989.33333333334, ans=0.2 +2024-07-28 19:29:08,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=195002.66666666666, ans=0.125 +2024-07-28 19:29:14,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=195002.66666666666, ans=0.125 +2024-07-28 19:29:17,393 INFO [train.py:1114] (1/4) Epoch 15, batch 3150, loss[loss=0.1684, simple_loss=0.2612, pruned_loss=0.03778, over 4589.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2712, pruned_loss=0.04561, over 938374.25 frames. ], batch size: 17, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:29:43,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=195042.66666666666, ans=0.125 +2024-07-28 19:29:45,044 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.63 vs. limit=12.0 +2024-07-28 19:29:47,865 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.902e+01 5.711e+01 6.349e+01 7.434e+01 1.242e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 19:29:54,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=195069.33333333334, ans=0.125 +2024-07-28 19:29:54,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=195069.33333333334, ans=0.0 +2024-07-28 19:29:57,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=195069.33333333334, ans=0.2 +2024-07-28 19:29:58,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=195069.33333333334, ans=0.2 +2024-07-28 19:30:01,492 INFO [train.py:1114] (1/4) Epoch 15, batch 3200, loss[loss=0.1746, simple_loss=0.2546, pruned_loss=0.04734, over 4829.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2707, pruned_loss=0.04544, over 939471.76 frames. ], batch size: 13, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:30:26,244 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=195122.66666666666, ans=0.2 +2024-07-28 19:30:34,954 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.73 vs. limit=15.0 +2024-07-28 19:30:35,968 INFO [train.py:1114] (1/4) Epoch 15, batch 3250, loss[loss=0.1512, simple_loss=0.2507, pruned_loss=0.02587, over 4928.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.271, pruned_loss=0.04537, over 940490.85 frames. ], batch size: 14, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:30:38,357 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.89 vs. limit=15.0 +2024-07-28 19:30:39,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=195149.33333333334, ans=0.125 +2024-07-28 19:30:51,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=195176.0, ans=0.2 +2024-07-28 19:30:56,189 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.478e+01 5.941e+01 6.673e+01 9.852e+01, threshold=1.188e+02, percent-clipped=0.0 +2024-07-28 19:30:57,181 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.89 vs. limit=10.0 +2024-07-28 19:31:11,844 INFO [train.py:1114] (1/4) Epoch 15, batch 3300, loss[loss=0.2201, simple_loss=0.293, pruned_loss=0.07354, over 4763.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2696, pruned_loss=0.04514, over 940713.89 frames. ], batch size: 19, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:31:13,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=195216.0, ans=0.09899494936611666 +2024-07-28 19:31:27,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=195242.66666666666, ans=0.2 +2024-07-28 19:31:30,291 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.10 vs. limit=22.5 +2024-07-28 19:31:32,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=195256.0, ans=0.125 +2024-07-28 19:31:38,644 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:31:44,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=195282.66666666666, ans=10.0 +2024-07-28 19:31:44,970 INFO [train.py:1114] (1/4) Epoch 15, batch 3350, loss[loss=0.1777, simple_loss=0.2756, pruned_loss=0.03988, over 4646.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2697, pruned_loss=0.04494, over 938315.93 frames. ], batch size: 17, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:31:56,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=195296.0, ans=0.125 +2024-07-28 19:32:19,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=195322.66666666666, ans=0.125 +2024-07-28 19:32:19,700 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.536e+01 5.781e+01 6.264e+01 6.966e+01 9.522e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 19:32:34,326 INFO [train.py:1114] (1/4) Epoch 15, batch 3400, loss[loss=0.1934, simple_loss=0.2735, pruned_loss=0.05666, over 4811.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2712, pruned_loss=0.0457, over 937131.91 frames. ], batch size: 11, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:32:37,196 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.84 vs. limit=15.0 +2024-07-28 19:32:40,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=195362.66666666666, ans=0.0 +2024-07-28 19:32:44,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=195362.66666666666, ans=0.0 +2024-07-28 19:32:55,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=195389.33333333334, ans=0.125 +2024-07-28 19:32:56,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=195389.33333333334, ans=0.125 +2024-07-28 19:33:05,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195402.66666666666, ans=0.1 +2024-07-28 19:33:11,734 INFO [train.py:1114] (1/4) Epoch 15, batch 3450, loss[loss=0.1846, simple_loss=0.2772, pruned_loss=0.04599, over 4711.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2719, pruned_loss=0.04592, over 937435.33 frames. ], batch size: 19, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:33:11,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=195416.0, ans=0.125 +2024-07-28 19:33:32,625 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.24 vs. limit=15.0 +2024-07-28 19:33:33,577 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.369e+01 5.667e+01 6.148e+01 6.825e+01 9.914e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 19:33:39,564 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:33:39,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=195456.0, ans=0.0 +2024-07-28 19:33:44,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=195469.33333333334, ans=0.125 +2024-07-28 19:33:48,992 INFO [train.py:1114] (1/4) Epoch 15, batch 3500, loss[loss=0.1511, simple_loss=0.2388, pruned_loss=0.03168, over 4941.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2708, pruned_loss=0.04536, over 938319.87 frames. ], batch size: 12, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:33:54,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=195482.66666666666, ans=0.125 +2024-07-28 19:34:13,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=195522.66666666666, ans=0.1 +2024-07-28 19:34:19,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=195536.0, ans=0.0 +2024-07-28 19:34:23,069 INFO [train.py:1114] (1/4) Epoch 15, batch 3550, loss[loss=0.1527, simple_loss=0.2538, pruned_loss=0.02578, over 4663.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2706, pruned_loss=0.04543, over 939100.75 frames. ], batch size: 14, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:34:24,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=195549.33333333334, ans=10.0 +2024-07-28 19:34:42,993 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.42 vs. limit=10.0 +2024-07-28 19:34:43,327 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.372e+01 5.567e+01 6.296e+01 7.208e+01 1.241e+02, threshold=1.259e+02, percent-clipped=1.0 +2024-07-28 19:34:46,432 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.06 vs. limit=15.0 +2024-07-28 19:34:48,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=195589.33333333334, ans=0.125 +2024-07-28 19:34:48,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=195589.33333333334, ans=0.025 +2024-07-28 19:34:56,850 INFO [train.py:1114] (1/4) Epoch 15, batch 3600, loss[loss=0.17, simple_loss=0.2539, pruned_loss=0.04303, over 4963.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2704, pruned_loss=0.0453, over 940946.95 frames. ], batch size: 13, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:35:00,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=195616.0, ans=0.125 +2024-07-28 19:35:11,226 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.64 vs. limit=15.0 +2024-07-28 19:35:14,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=195642.66666666666, ans=0.04949747468305833 +2024-07-28 19:35:24,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=195656.0, ans=0.0 +2024-07-28 19:35:33,390 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.65 vs. limit=22.5 +2024-07-28 19:35:34,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=195669.33333333334, ans=0.125 +2024-07-28 19:35:35,846 INFO [train.py:1114] (1/4) Epoch 15, batch 3650, loss[loss=0.2118, simple_loss=0.3078, pruned_loss=0.05787, over 4894.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2705, pruned_loss=0.04548, over 941366.43 frames. ], batch size: 15, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:35:53,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=195696.0, ans=0.0 +2024-07-28 19:35:54,943 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.84 vs. limit=15.0 +2024-07-28 19:35:57,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=195696.0, ans=0.125 +2024-07-28 19:35:57,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=195696.0, ans=0.0 +2024-07-28 19:36:05,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=195709.33333333334, ans=0.0 +2024-07-28 19:36:07,032 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.599e+01 5.794e+01 6.353e+01 7.232e+01 1.193e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 19:36:24,913 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.74 vs. limit=10.0 +2024-07-28 19:36:26,501 INFO [train.py:1114] (1/4) Epoch 15, batch 3700, loss[loss=0.2134, simple_loss=0.2995, pruned_loss=0.06364, over 4935.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2706, pruned_loss=0.04553, over 942340.33 frames. ], batch size: 14, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:36:30,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=195749.33333333334, ans=0.0 +2024-07-28 19:36:40,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195776.0, ans=0.1 +2024-07-28 19:37:08,288 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:37:12,711 INFO [train.py:1114] (1/4) Epoch 15, batch 3750, loss[loss=0.1797, simple_loss=0.2551, pruned_loss=0.05215, over 4798.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2694, pruned_loss=0.04506, over 943166.65 frames. ], batch size: 11, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:37:27,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=195816.0, ans=0.025 +2024-07-28 19:37:31,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=195816.0, ans=0.125 +2024-07-28 19:37:32,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=195816.0, ans=0.0 +2024-07-28 19:37:38,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=195829.33333333334, ans=0.0 +2024-07-28 19:37:46,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=195856.0, ans=0.125 +2024-07-28 19:37:47,139 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.420e+01 5.433e+01 6.069e+01 6.768e+01 1.859e+02, threshold=1.214e+02, percent-clipped=1.0 +2024-07-28 19:38:00,623 INFO [train.py:1114] (1/4) Epoch 15, batch 3800, loss[loss=0.1619, simple_loss=0.2593, pruned_loss=0.03224, over 4816.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2691, pruned_loss=0.04528, over 941099.55 frames. ], batch size: 14, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:38:04,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=195882.66666666666, ans=0.04949747468305833 +2024-07-28 19:38:06,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=195882.66666666666, ans=0.125 +2024-07-28 19:38:06,656 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-07-28 19:38:13,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=195909.33333333334, ans=0.125 +2024-07-28 19:38:19,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195909.33333333334, ans=0.1 +2024-07-28 19:38:31,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=195936.0, ans=0.0 +2024-07-28 19:38:35,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=195949.33333333334, ans=0.0 +2024-07-28 19:38:35,869 INFO [train.py:1114] (1/4) Epoch 15, batch 3850, loss[loss=0.2014, simple_loss=0.2932, pruned_loss=0.05479, over 4623.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.269, pruned_loss=0.04488, over 941696.32 frames. ], batch size: 16, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:38:49,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=195949.33333333334, ans=0.0 +2024-07-28 19:38:50,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=195962.66666666666, ans=10.0 +2024-07-28 19:38:53,827 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.00 vs. limit=15.0 +2024-07-28 19:38:55,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=195962.66666666666, ans=0.0 +2024-07-28 19:39:00,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=195976.0, ans=0.0 +2024-07-28 19:39:02,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=195976.0, ans=0.5 +2024-07-28 19:39:04,043 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.933e+01 5.600e+01 6.154e+01 6.941e+01 1.032e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 19:39:10,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=195989.33333333334, ans=0.025 +2024-07-28 19:39:10,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=195989.33333333334, ans=0.2 +2024-07-28 19:39:17,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=196002.66666666666, ans=0.09899494936611666 +2024-07-28 19:39:19,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=196016.0, ans=0.125 +2024-07-28 19:39:19,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=196016.0, ans=0.125 +2024-07-28 19:39:55,318 INFO [train.py:1114] (1/4) Epoch 15, batch 3900, loss[loss=0.1767, simple_loss=0.2738, pruned_loss=0.03984, over 4817.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2692, pruned_loss=0.04501, over 941969.22 frames. ], batch size: 14, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:39:57,680 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.08 vs. limit=12.0 +2024-07-28 19:40:02,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=196029.33333333334, ans=0.125 +2024-07-28 19:40:03,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=196029.33333333334, ans=0.125 +2024-07-28 19:40:04,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=196029.33333333334, ans=0.1 +2024-07-28 19:41:56,037 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.10 vs. limit=22.5 +2024-07-28 19:41:58,117 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.34 vs. limit=15.0 +2024-07-28 19:42:00,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=196042.66666666666, ans=0.1 +2024-07-28 19:42:10,024 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.55 vs. limit=12.0 +2024-07-28 19:42:14,054 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.55 vs. limit=15.0 +2024-07-28 19:42:16,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=196056.0, ans=0.1 +2024-07-28 19:42:21,843 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.54 vs. limit=10.0 +2024-07-28 19:42:23,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=196069.33333333334, ans=0.2 +2024-07-28 19:42:26,988 INFO [train.py:1114] (1/4) Epoch 15, batch 3950, loss[loss=0.1889, simple_loss=0.2789, pruned_loss=0.04946, over 4835.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2688, pruned_loss=0.04513, over 944047.12 frames. ], batch size: 16, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:42:30,724 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.47 vs. limit=6.0 +2024-07-28 19:42:48,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=196096.0, ans=0.125 +2024-07-28 19:42:53,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=196109.33333333334, ans=0.125 +2024-07-28 19:42:56,265 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.391e+01 5.595e+01 6.291e+01 6.996e+01 9.236e+01, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 19:44:13,280 INFO [train.py:1114] (1/4) Epoch 15, batch 4000, loss[loss=0.1671, simple_loss=0.2573, pruned_loss=0.03844, over 4776.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2702, pruned_loss=0.04582, over 941221.84 frames. ], batch size: 12, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:44:44,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=196202.66666666666, ans=0.125 +2024-07-28 19:44:46,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=196202.66666666666, ans=0.125 +2024-07-28 19:44:49,522 INFO [train.py:1114] (1/4) Epoch 15, batch 4050, loss[loss=0.2137, simple_loss=0.2965, pruned_loss=0.0654, over 3557.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2705, pruned_loss=0.04568, over 939742.17 frames. ], batch size: 36, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:44:59,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=196216.0, ans=0.125 +2024-07-28 19:45:01,184 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:47:41,097 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.74 vs. limit=10.0 +2024-07-28 19:47:44,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=196242.66666666666, ans=0.0 +2024-07-28 19:48:20,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=196256.0, ans=0.02 +2024-07-28 19:48:20,682 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.610e+01 5.678e+01 6.345e+01 7.266e+01 1.118e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 19:48:21,806 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.42 vs. limit=15.0 +2024-07-28 19:48:23,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=196256.0, ans=0.2 +2024-07-28 19:48:26,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=196256.0, ans=0.125 +2024-07-28 19:48:35,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=196269.33333333334, ans=0.125 +2024-07-28 19:48:39,020 INFO [train.py:1114] (1/4) Epoch 15, batch 4100, loss[loss=0.212, simple_loss=0.3048, pruned_loss=0.05963, over 4901.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.271, pruned_loss=0.04582, over 938996.61 frames. ], batch size: 15, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:48:40,881 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.41 vs. limit=15.0 +2024-07-28 19:48:41,937 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=196282.66666666666, ans=0.125 +2024-07-28 19:48:48,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=196296.0, ans=0.09899494936611666 +2024-07-28 19:49:00,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=196309.33333333334, ans=0.0 +2024-07-28 19:49:04,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=196322.66666666666, ans=0.125 +2024-07-28 19:49:18,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=196336.0, ans=0.04949747468305833 +2024-07-28 19:49:22,845 INFO [train.py:1114] (1/4) Epoch 15, batch 4150, loss[loss=0.1593, simple_loss=0.2514, pruned_loss=0.03356, over 4831.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2692, pruned_loss=0.04493, over 938437.98 frames. ], batch size: 13, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:49:26,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=196349.33333333334, ans=0.125 +2024-07-28 19:49:29,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.69 vs. limit=6.0 +2024-07-28 19:49:52,755 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.168e+01 5.670e+01 6.330e+01 7.256e+01 1.542e+02, threshold=1.266e+02, percent-clipped=1.0 +2024-07-28 19:50:10,364 INFO [train.py:1114] (1/4) Epoch 15, batch 4200, loss[loss=0.1843, simple_loss=0.2675, pruned_loss=0.0505, over 4897.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2702, pruned_loss=0.04535, over 939671.54 frames. ], batch size: 15, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:50:17,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=196416.0, ans=0.0 +2024-07-28 19:50:37,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=196429.33333333334, ans=0.125 +2024-07-28 19:50:44,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=196442.66666666666, ans=0.05 +2024-07-28 19:51:02,352 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.41 vs. limit=15.0 +2024-07-28 19:51:02,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.84 vs. limit=6.0 +2024-07-28 19:51:03,978 INFO [train.py:1114] (1/4) Epoch 15, batch 4250, loss[loss=0.1504, simple_loss=0.2409, pruned_loss=0.02992, over 4640.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.27, pruned_loss=0.0451, over 940629.74 frames. ], batch size: 12, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:51:23,973 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.654e+01 5.709e+01 6.318e+01 7.581e+01 1.158e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 19:51:25,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=196522.66666666666, ans=0.025 +2024-07-28 19:51:34,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=196536.0, ans=0.125 +2024-07-28 19:51:37,889 INFO [train.py:1114] (1/4) Epoch 15, batch 4300, loss[loss=0.1917, simple_loss=0.2832, pruned_loss=0.0501, over 4760.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2704, pruned_loss=0.04577, over 939991.13 frames. ], batch size: 13, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:52:13,577 INFO [train.py:1114] (1/4) Epoch 15, batch 4350, loss[loss=0.1609, simple_loss=0.2383, pruned_loss=0.04176, over 4759.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.27, pruned_loss=0.04515, over 940646.18 frames. ], batch size: 13, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:52:15,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=196616.0, ans=0.025 +2024-07-28 19:52:34,516 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.697e+01 5.717e+01 6.359e+01 7.024e+01 1.032e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 19:52:39,633 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.16 vs. limit=15.0 +2024-07-28 19:52:48,049 INFO [train.py:1114] (1/4) Epoch 15, batch 4400, loss[loss=0.1793, simple_loss=0.2859, pruned_loss=0.03637, over 4809.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2709, pruned_loss=0.04523, over 940652.81 frames. ], batch size: 14, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:52:55,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=196696.0, ans=0.125 +2024-07-28 19:52:58,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=196696.0, ans=0.125 +2024-07-28 19:53:12,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=196722.66666666666, ans=0.0 +2024-07-28 19:53:16,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=196736.0, ans=0.125 +2024-07-28 19:53:25,045 INFO [train.py:1114] (1/4) Epoch 15, batch 4450, loss[loss=0.1541, simple_loss=0.2405, pruned_loss=0.03388, over 4949.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2713, pruned_loss=0.04571, over 939087.74 frames. ], batch size: 12, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:55:37,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=196762.66666666666, ans=0.0 +2024-07-28 19:55:45,663 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.242e+01 5.676e+01 6.225e+01 6.763e+01 9.651e+01, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 19:55:52,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196802.66666666666, ans=0.1 +2024-07-28 19:55:56,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=196802.66666666666, ans=0.0 +2024-07-28 19:55:57,189 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.30 vs. limit=15.0 +2024-07-28 19:55:57,756 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.03 vs. limit=15.0 +2024-07-28 19:55:59,331 INFO [train.py:1114] (1/4) Epoch 15, batch 4500, loss[loss=0.195, simple_loss=0.2925, pruned_loss=0.0488, over 4741.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2723, pruned_loss=0.04588, over 938213.48 frames. ], batch size: 14, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:56:22,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=196856.0, ans=0.025 +2024-07-28 19:56:32,360 INFO [train.py:1114] (1/4) Epoch 15, batch 4550, loss[loss=0.1494, simple_loss=0.2399, pruned_loss=0.02945, over 4893.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2712, pruned_loss=0.04525, over 940473.02 frames. ], batch size: 13, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:56:34,140 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.94 vs. limit=22.5 +2024-07-28 19:56:54,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=196922.66666666666, ans=0.125 +2024-07-28 19:56:54,579 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.611e+01 5.447e+01 5.965e+01 6.710e+01 1.037e+02, threshold=1.193e+02, percent-clipped=0.0 +2024-07-28 19:56:56,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196922.66666666666, ans=0.1 +2024-07-28 19:57:00,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=196936.0, ans=0.09899494936611666 +2024-07-28 19:57:04,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=196936.0, ans=0.125 +2024-07-28 19:57:06,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=196936.0, ans=0.125 +2024-07-28 19:57:07,871 INFO [train.py:1114] (1/4) Epoch 15, batch 4600, loss[loss=0.1632, simple_loss=0.2552, pruned_loss=0.03563, over 4570.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2709, pruned_loss=0.04512, over 938666.66 frames. ], batch size: 21, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:57:07,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=196949.33333333334, ans=0.125 +2024-07-28 19:57:14,292 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.84 vs. limit=22.5 +2024-07-28 19:57:28,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=196989.33333333334, ans=0.125 +2024-07-28 19:57:28,662 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.63 vs. limit=15.0 +2024-07-28 19:57:28,756 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.13 vs. limit=15.0 +2024-07-28 19:57:31,816 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.67 vs. limit=15.0 +2024-07-28 19:57:33,383 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:57:38,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=197002.66666666666, ans=0.0 +2024-07-28 19:57:41,452 INFO [train.py:1114] (1/4) Epoch 15, batch 4650, loss[loss=0.2171, simple_loss=0.3095, pruned_loss=0.06229, over 4812.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2715, pruned_loss=0.04545, over 940219.62 frames. ], batch size: 16, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:57:47,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=197016.0, ans=0.07 +2024-07-28 19:58:03,991 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.740e+01 5.604e+01 6.309e+01 7.191e+01 9.740e+01, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 19:58:05,578 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.22 vs. limit=12.0 +2024-07-28 19:58:08,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=197056.0, ans=0.0 +2024-07-28 19:58:23,484 INFO [train.py:1114] (1/4) Epoch 15, batch 4700, loss[loss=0.1597, simple_loss=0.2474, pruned_loss=0.03599, over 4699.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2713, pruned_loss=0.04558, over 937288.44 frames. ], batch size: 11, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:58:30,232 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=197096.0, ans=0.0 +2024-07-28 19:58:35,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=197096.0, ans=0.125 +2024-07-28 19:58:41,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=197109.33333333334, ans=0.125 +2024-07-28 19:58:50,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.59 vs. limit=15.0 +2024-07-28 19:58:54,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=197136.0, ans=0.0 +2024-07-28 19:58:57,023 INFO [train.py:1114] (1/4) Epoch 15, batch 4750, loss[loss=0.2122, simple_loss=0.299, pruned_loss=0.06268, over 4493.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2715, pruned_loss=0.04579, over 935297.34 frames. ], batch size: 21, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:59:02,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=197149.33333333334, ans=0.2 +2024-07-28 19:59:06,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=197162.66666666666, ans=0.0 +2024-07-28 19:59:17,880 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.663e+01 6.511e+01 7.507e+01 1.082e+02, threshold=1.302e+02, percent-clipped=0.0 +2024-07-28 19:59:30,785 INFO [train.py:1114] (1/4) Epoch 15, batch 4800, loss[loss=0.1947, simple_loss=0.2862, pruned_loss=0.05157, over 4691.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2713, pruned_loss=0.04576, over 933474.13 frames. ], batch size: 13, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:59:35,888 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.72 vs. limit=15.0 +2024-07-28 19:59:59,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=197269.33333333334, ans=0.125 +2024-07-28 20:00:00,482 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.54 vs. limit=10.0 +2024-07-28 20:00:00,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=197269.33333333334, ans=0.125 +2024-07-28 20:00:03,999 INFO [train.py:1114] (1/4) Epoch 15, batch 4850, loss[loss=0.1812, simple_loss=0.2723, pruned_loss=0.04506, over 4743.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2712, pruned_loss=0.04578, over 933061.94 frames. ], batch size: 14, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 20:00:17,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=197296.0, ans=0.0 +2024-07-28 20:00:18,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=197296.0, ans=0.125 +2024-07-28 20:00:23,291 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.41 vs. limit=15.0 +2024-07-28 20:00:27,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=197309.33333333334, ans=0.0 +2024-07-28 20:00:30,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=197322.66666666666, ans=0.125 +2024-07-28 20:00:31,447 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.502e+01 5.598e+01 6.100e+01 6.871e+01 9.023e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 20:00:32,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=197322.66666666666, ans=0.125 +2024-07-28 20:00:35,013 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.99 vs. limit=22.5 +2024-07-28 20:00:40,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=197336.0, ans=0.05 +2024-07-28 20:00:47,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=197349.33333333334, ans=0.1 +2024-07-28 20:00:48,004 INFO [train.py:1114] (1/4) Epoch 15, batch 4900, loss[loss=0.164, simple_loss=0.2639, pruned_loss=0.03211, over 4761.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.271, pruned_loss=0.04581, over 934950.12 frames. ], batch size: 13, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 20:01:06,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=197376.0, ans=0.125 +2024-07-28 20:01:15,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=197389.33333333334, ans=0.125 +2024-07-28 20:01:15,545 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.49 vs. limit=15.0 +2024-07-28 20:01:17,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=197389.33333333334, ans=15.0 +2024-07-28 20:01:24,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=197402.66666666666, ans=0.125 +2024-07-28 20:01:25,290 INFO [train.py:1114] (1/4) Epoch 15, batch 4950, loss[loss=0.2363, simple_loss=0.3238, pruned_loss=0.07441, over 3376.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2719, pruned_loss=0.0463, over 931807.45 frames. ], batch size: 35, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:01:45,880 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+01 5.531e+01 5.983e+01 6.546e+01 1.015e+02, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 20:01:52,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=197469.33333333334, ans=0.2 +2024-07-28 20:01:58,979 INFO [train.py:1114] (1/4) Epoch 15, batch 5000, loss[loss=0.2009, simple_loss=0.2915, pruned_loss=0.05514, over 4647.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2721, pruned_loss=0.04642, over 935723.87 frames. ], batch size: 14, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:01:59,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=197482.66666666666, ans=0.125 +2024-07-28 20:02:06,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=197496.0, ans=0.1 +2024-07-28 20:02:12,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=197509.33333333334, ans=0.125 +2024-07-28 20:02:14,526 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-28 20:02:26,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197522.66666666666, ans=0.1 +2024-07-28 20:02:33,778 INFO [train.py:1114] (1/4) Epoch 15, batch 5050, loss[loss=0.1343, simple_loss=0.2238, pruned_loss=0.02244, over 4848.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2719, pruned_loss=0.04643, over 938002.18 frames. ], batch size: 12, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:02:37,253 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:02:41,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=197562.66666666666, ans=0.0 +2024-07-28 20:02:54,787 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.283e+01 5.690e+01 6.527e+01 7.473e+01 1.062e+02, threshold=1.305e+02, percent-clipped=0.0 +2024-07-28 20:02:55,133 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.16 vs. limit=6.0 +2024-07-28 20:03:01,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=197602.66666666666, ans=0.0 +2024-07-28 20:03:03,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=197602.66666666666, ans=0.125 +2024-07-28 20:03:08,384 INFO [train.py:1114] (1/4) Epoch 15, batch 5100, loss[loss=0.1479, simple_loss=0.2359, pruned_loss=0.02994, over 4776.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2716, pruned_loss=0.04649, over 934849.30 frames. ], batch size: 12, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:03:09,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=197616.0, ans=0.125 +2024-07-28 20:03:15,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=197629.33333333334, ans=0.125 +2024-07-28 20:03:24,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=197642.66666666666, ans=0.125 +2024-07-28 20:03:24,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197642.66666666666, ans=0.1 +2024-07-28 20:03:32,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=197656.0, ans=0.0 +2024-07-28 20:03:40,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=197669.33333333334, ans=0.0 +2024-07-28 20:03:41,945 INFO [train.py:1114] (1/4) Epoch 15, batch 5150, loss[loss=0.1926, simple_loss=0.2727, pruned_loss=0.05624, over 4835.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2724, pruned_loss=0.04712, over 936019.78 frames. ], batch size: 16, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:03:49,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=197696.0, ans=0.2 +2024-07-28 20:04:04,747 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.460e+01 5.595e+01 6.018e+01 6.676e+01 9.613e+01, threshold=1.204e+02, percent-clipped=0.0 +2024-07-28 20:04:06,753 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.19 vs. limit=22.5 +2024-07-28 20:04:07,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=197722.66666666666, ans=0.2 +2024-07-28 20:04:08,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=197722.66666666666, ans=0.1 +2024-07-28 20:04:10,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=197736.0, ans=0.07 +2024-07-28 20:04:16,833 INFO [train.py:1114] (1/4) Epoch 15, batch 5200, loss[loss=0.1677, simple_loss=0.2705, pruned_loss=0.03249, over 4655.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2711, pruned_loss=0.04657, over 936232.72 frames. ], batch size: 14, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:04:20,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=197749.33333333334, ans=0.125 +2024-07-28 20:04:27,677 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=197762.66666666666, ans=0.125 +2024-07-28 20:04:32,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=197776.0, ans=0.0 +2024-07-28 20:04:37,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=197789.33333333334, ans=0.2 +2024-07-28 20:04:45,350 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.03 vs. limit=15.0 +2024-07-28 20:04:47,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=197802.66666666666, ans=0.04949747468305833 +2024-07-28 20:04:50,332 INFO [train.py:1114] (1/4) Epoch 15, batch 5250, loss[loss=0.1896, simple_loss=0.2724, pruned_loss=0.05337, over 4900.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2708, pruned_loss=0.04638, over 935801.39 frames. ], batch size: 13, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:05:02,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=197829.33333333334, ans=0.0 +2024-07-28 20:05:11,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=197856.0, ans=0.0 +2024-07-28 20:05:12,231 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.422e+01 5.761e+01 6.623e+01 7.609e+01 1.184e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-28 20:05:20,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=197869.33333333334, ans=0.125 +2024-07-28 20:05:24,592 INFO [train.py:1114] (1/4) Epoch 15, batch 5300, loss[loss=0.2192, simple_loss=0.3129, pruned_loss=0.06274, over 4624.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2712, pruned_loss=0.04647, over 934150.12 frames. ], batch size: 16, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:05:26,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=197882.66666666666, ans=0.125 +2024-07-28 20:05:26,243 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.31 vs. limit=15.0 +2024-07-28 20:05:45,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197896.0, ans=0.1 +2024-07-28 20:05:54,660 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.58 vs. limit=15.0 +2024-07-28 20:06:05,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=197936.0, ans=0.0 +2024-07-28 20:06:07,651 INFO [train.py:1114] (1/4) Epoch 15, batch 5350, loss[loss=0.1695, simple_loss=0.257, pruned_loss=0.04103, over 4551.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2719, pruned_loss=0.04714, over 936764.82 frames. ], batch size: 10, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:06:07,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=197949.33333333334, ans=0.2 +2024-07-28 20:06:09,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=197949.33333333334, ans=0.1 +2024-07-28 20:06:25,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=197976.0, ans=0.125 +2024-07-28 20:06:33,131 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.665e+01 5.458e+01 6.203e+01 7.042e+01 1.086e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 20:06:45,669 INFO [train.py:1114] (1/4) Epoch 15, batch 5400, loss[loss=0.1783, simple_loss=0.2606, pruned_loss=0.04801, over 4353.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2727, pruned_loss=0.04737, over 930538.71 frames. ], batch size: 26, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:06:46,127 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.68 vs. limit=15.0 +2024-07-28 20:06:47,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=198016.0, ans=0.125 +2024-07-28 20:06:54,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=198029.33333333334, ans=0.125 +2024-07-28 20:06:57,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=198029.33333333334, ans=0.035 +2024-07-28 20:06:59,808 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-28 20:07:10,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=198056.0, ans=0.0 +2024-07-28 20:07:18,649 INFO [train.py:1114] (1/4) Epoch 15, batch 5450, loss[loss=0.1634, simple_loss=0.2583, pruned_loss=0.03424, over 4693.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2719, pruned_loss=0.04644, over 933430.41 frames. ], batch size: 11, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:07:18,702 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=198082.66666666666, ans=0.0 +2024-07-28 20:07:18,949 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.24 vs. limit=6.0 +2024-07-28 20:07:23,028 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.26 vs. limit=22.5 +2024-07-28 20:07:25,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=198096.0, ans=0.125 +2024-07-28 20:07:29,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=198096.0, ans=0.2 +2024-07-28 20:07:40,427 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+01 5.624e+01 6.275e+01 7.403e+01 1.039e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 20:07:47,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=198136.0, ans=0.0 +2024-07-28 20:07:48,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=198136.0, ans=0.125 +2024-07-28 20:07:48,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198136.0, ans=0.1 +2024-07-28 20:07:49,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=198136.0, ans=0.125 +2024-07-28 20:07:51,597 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.24 vs. limit=15.0 +2024-07-28 20:07:54,576 INFO [train.py:1114] (1/4) Epoch 15, batch 5500, loss[loss=0.1614, simple_loss=0.2609, pruned_loss=0.03096, over 4218.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2709, pruned_loss=0.04626, over 930709.05 frames. ], batch size: 25, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:08:03,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=198162.66666666666, ans=0.5 +2024-07-28 20:08:06,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.03 vs. limit=5.0 +2024-07-28 20:08:09,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=198176.0, ans=0.2 +2024-07-28 20:08:16,451 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.81 vs. limit=12.0 +2024-07-28 20:08:26,896 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.60 vs. limit=15.0 +2024-07-28 20:08:27,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=198202.66666666666, ans=0.0 +2024-07-28 20:08:29,837 INFO [train.py:1114] (1/4) Epoch 15, batch 5550, loss[loss=0.1981, simple_loss=0.282, pruned_loss=0.0571, over 4705.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2717, pruned_loss=0.04687, over 933090.41 frames. ], batch size: 12, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:08:35,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=198216.0, ans=0.125 +2024-07-28 20:08:37,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=198229.33333333334, ans=0.2 +2024-07-28 20:08:50,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=198256.0, ans=0.0 +2024-07-28 20:08:51,163 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.553e+01 5.843e+01 6.570e+01 7.982e+01 1.258e+02, threshold=1.314e+02, percent-clipped=1.0 +2024-07-28 20:09:03,374 INFO [train.py:1114] (1/4) Epoch 15, batch 5600, loss[loss=0.1819, simple_loss=0.2703, pruned_loss=0.04678, over 4737.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2727, pruned_loss=0.04694, over 933972.84 frames. ], batch size: 14, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:09:19,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=198309.33333333334, ans=0.0 +2024-07-28 20:09:19,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=198309.33333333334, ans=0.125 +2024-07-28 20:09:21,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=198309.33333333334, ans=0.125 +2024-07-28 20:09:37,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=198336.0, ans=0.1 +2024-07-28 20:09:38,923 INFO [train.py:1114] (1/4) Epoch 15, batch 5650, loss[loss=0.1835, simple_loss=0.2704, pruned_loss=0.04829, over 4497.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2704, pruned_loss=0.04576, over 936775.38 frames. ], batch size: 21, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:09:42,576 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.24 vs. limit=15.0 +2024-07-28 20:10:00,144 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.751e+01 5.802e+01 6.478e+01 7.454e+01 1.007e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 20:10:02,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=198389.33333333334, ans=0.125 +2024-07-28 20:10:12,529 INFO [train.py:1114] (1/4) Epoch 15, batch 5700, loss[loss=0.1649, simple_loss=0.2633, pruned_loss=0.03321, over 4702.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2713, pruned_loss=0.046, over 937996.67 frames. ], batch size: 13, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:10:14,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=198416.0, ans=0.0 +2024-07-28 20:10:18,028 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.35 vs. limit=6.0 +2024-07-28 20:10:26,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=198442.66666666666, ans=0.2 +2024-07-28 20:10:34,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198456.0, ans=0.1 +2024-07-28 20:10:40,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=198469.33333333334, ans=0.0 +2024-07-28 20:10:40,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=198469.33333333334, ans=0.125 +2024-07-28 20:10:46,722 INFO [train.py:1114] (1/4) Epoch 15, batch 5750, loss[loss=0.1673, simple_loss=0.265, pruned_loss=0.03481, over 4706.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2717, pruned_loss=0.04625, over 937987.18 frames. ], batch size: 19, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:10:55,531 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:10:55,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=198496.0, ans=0.1 +2024-07-28 20:10:57,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=198496.0, ans=0.125 +2024-07-28 20:10:59,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=198509.33333333334, ans=0.04949747468305833 +2024-07-28 20:11:00,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=198509.33333333334, ans=0.0 +2024-07-28 20:11:06,446 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.48 vs. limit=15.0 +2024-07-28 20:11:08,231 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.757e+01 5.533e+01 6.199e+01 7.119e+01 1.016e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 20:11:09,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=198522.66666666666, ans=0.025 +2024-07-28 20:11:10,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=198522.66666666666, ans=0.1 +2024-07-28 20:11:13,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=198536.0, ans=0.125 +2024-07-28 20:11:20,262 INFO [train.py:1114] (1/4) Epoch 15, batch 5800, loss[loss=0.2038, simple_loss=0.2875, pruned_loss=0.06002, over 4664.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2721, pruned_loss=0.04652, over 936846.93 frames. ], batch size: 19, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:11:26,654 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.61 vs. limit=15.0 +2024-07-28 20:11:31,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=198562.66666666666, ans=0.125 +2024-07-28 20:11:33,094 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.00 vs. limit=6.0 +2024-07-28 20:11:34,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=198576.0, ans=0.025 +2024-07-28 20:11:34,569 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.58 vs. limit=15.0 +2024-07-28 20:11:50,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=198602.66666666666, ans=0.0 +2024-07-28 20:11:55,690 INFO [train.py:1114] (1/4) Epoch 15, batch 5850, loss[loss=0.2067, simple_loss=0.2865, pruned_loss=0.06349, over 4487.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2709, pruned_loss=0.0462, over 937881.99 frames. ], batch size: 21, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:11:56,477 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=198616.0, ans=0.1 +2024-07-28 20:12:01,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198616.0, ans=0.1 +2024-07-28 20:12:10,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=198642.66666666666, ans=0.125 +2024-07-28 20:12:18,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=198656.0, ans=0.125 +2024-07-28 20:12:19,163 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.882e+01 5.725e+01 6.353e+01 6.909e+01 1.131e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 20:12:26,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=198669.33333333334, ans=0.125 +2024-07-28 20:12:29,000 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:12:31,562 INFO [train.py:1114] (1/4) Epoch 15, batch 5900, loss[loss=0.1843, simple_loss=0.2743, pruned_loss=0.0471, over 4682.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2707, pruned_loss=0.04652, over 937949.60 frames. ], batch size: 15, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:12:39,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=198696.0, ans=0.125 +2024-07-28 20:12:50,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=198709.33333333334, ans=0.125 +2024-07-28 20:13:03,107 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.04 vs. limit=15.0 +2024-07-28 20:13:09,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198736.0, ans=0.1 +2024-07-28 20:13:10,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=198749.33333333334, ans=0.0 +2024-07-28 20:13:11,341 INFO [train.py:1114] (1/4) Epoch 15, batch 5950, loss[loss=0.1988, simple_loss=0.2974, pruned_loss=0.05008, over 4675.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2701, pruned_loss=0.04608, over 939783.34 frames. ], batch size: 15, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:13:11,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=198749.33333333334, ans=0.05 +2024-07-28 20:13:30,551 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.73 vs. limit=22.5 +2024-07-28 20:13:34,730 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.387e+01 5.687e+01 6.210e+01 6.868e+01 1.023e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 20:13:47,277 INFO [train.py:1114] (1/4) Epoch 15, batch 6000, loss[loss=0.1984, simple_loss=0.2826, pruned_loss=0.05712, over 4157.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2707, pruned_loss=0.04613, over 937220.76 frames. ], batch size: 25, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:13:50,111 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 20:14:09,824 INFO [train.py:1146] (1/4) Epoch 15, validation: loss=0.1637, simple_loss=0.2666, pruned_loss=0.03037, over 944034.00 frames. +2024-07-28 20:14:09,824 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 20:14:14,221 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.99 vs. limit=15.0 +2024-07-28 20:14:18,173 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.33 vs. limit=22.5 +2024-07-28 20:14:20,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=198829.33333333334, ans=0.2 +2024-07-28 20:14:31,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=198856.0, ans=0.0 +2024-07-28 20:14:36,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=198869.33333333334, ans=0.0 +2024-07-28 20:14:39,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=198869.33333333334, ans=0.05 +2024-07-28 20:14:41,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198869.33333333334, ans=0.1 +2024-07-28 20:14:43,678 INFO [train.py:1114] (1/4) Epoch 15, batch 6050, loss[loss=0.1784, simple_loss=0.2567, pruned_loss=0.05006, over 4776.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2702, pruned_loss=0.04592, over 938468.67 frames. ], batch size: 12, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:14:44,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=198882.66666666666, ans=0.2 +2024-07-28 20:14:45,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=198882.66666666666, ans=0.0 +2024-07-28 20:14:46,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=198882.66666666666, ans=0.125 +2024-07-28 20:14:55,107 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:15:06,544 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.56 vs. limit=12.0 +2024-07-28 20:15:06,831 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.394e+01 5.577e+01 6.176e+01 7.301e+01 1.116e+02, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 20:15:17,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=198936.0, ans=0.0 +2024-07-28 20:15:18,930 INFO [train.py:1114] (1/4) Epoch 15, batch 6100, loss[loss=0.1614, simple_loss=0.2566, pruned_loss=0.03312, over 4695.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.27, pruned_loss=0.04581, over 937945.72 frames. ], batch size: 15, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:15:25,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=198962.66666666666, ans=15.0 +2024-07-28 20:15:32,971 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.70 vs. limit=15.0 +2024-07-28 20:15:33,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=198976.0, ans=0.0 +2024-07-28 20:15:37,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=198976.0, ans=0.0 +2024-07-28 20:15:40,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=198989.33333333334, ans=0.125 +2024-07-28 20:15:42,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=198989.33333333334, ans=0.0 +2024-07-28 20:15:52,712 INFO [train.py:1114] (1/4) Epoch 15, batch 6150, loss[loss=0.1781, simple_loss=0.2711, pruned_loss=0.04256, over 3337.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2704, pruned_loss=0.04607, over 936554.05 frames. ], batch size: 35, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:15:54,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=199016.0, ans=0.0 +2024-07-28 20:16:00,224 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.24 vs. limit=15.0 +2024-07-28 20:16:03,379 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:16:03,577 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.21 vs. limit=12.0 +2024-07-28 20:16:09,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=199042.66666666666, ans=0.0 +2024-07-28 20:16:10,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=199042.66666666666, ans=0.125 +2024-07-28 20:16:14,487 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.237e+01 5.430e+01 6.165e+01 7.118e+01 1.181e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 20:16:15,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=199056.0, ans=10.0 +2024-07-28 20:16:21,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=199069.33333333334, ans=0.125 +2024-07-28 20:16:26,465 INFO [train.py:1114] (1/4) Epoch 15, batch 6200, loss[loss=0.2013, simple_loss=0.3046, pruned_loss=0.04904, over 4737.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.271, pruned_loss=0.04642, over 935725.86 frames. ], batch size: 14, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:16:31,295 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:16:34,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=199096.0, ans=0.125 +2024-07-28 20:16:36,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=199096.0, ans=0.0 +2024-07-28 20:16:44,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199109.33333333334, ans=0.1 +2024-07-28 20:16:47,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=199122.66666666666, ans=0.125 +2024-07-28 20:16:49,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=199122.66666666666, ans=0.04949747468305833 +2024-07-28 20:16:54,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=199136.0, ans=0.125 +2024-07-28 20:17:00,525 INFO [train.py:1114] (1/4) Epoch 15, batch 6250, loss[loss=0.1792, simple_loss=0.2628, pruned_loss=0.04782, over 4808.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2708, pruned_loss=0.04631, over 932915.92 frames. ], batch size: 14, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:17:00,959 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.19 vs. limit=15.0 +2024-07-28 20:17:12,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=199162.66666666666, ans=0.125 +2024-07-28 20:17:21,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=199176.0, ans=0.125 +2024-07-28 20:17:25,819 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.601e+01 6.121e+01 7.200e+01 1.148e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 20:17:32,484 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.38 vs. limit=15.0 +2024-07-28 20:17:34,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199202.66666666666, ans=0.1 +2024-07-28 20:17:38,304 INFO [train.py:1114] (1/4) Epoch 15, batch 6300, loss[loss=0.17, simple_loss=0.2528, pruned_loss=0.04357, over 4519.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2711, pruned_loss=0.04644, over 929639.26 frames. ], batch size: 10, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:17:39,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=199216.0, ans=0.125 +2024-07-28 20:17:46,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199229.33333333334, ans=0.1 +2024-07-28 20:17:49,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=199229.33333333334, ans=0.125 +2024-07-28 20:17:57,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=199256.0, ans=0.2 +2024-07-28 20:18:10,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=199269.33333333334, ans=0.125 +2024-07-28 20:18:10,946 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=199282.66666666666, ans=0.125 +2024-07-28 20:18:11,385 INFO [train.py:1114] (1/4) Epoch 15, batch 6350, loss[loss=0.1576, simple_loss=0.2515, pruned_loss=0.03189, over 4556.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2698, pruned_loss=0.04612, over 933789.54 frames. ], batch size: 22, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:18:14,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=199282.66666666666, ans=0.125 +2024-07-28 20:18:15,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=199282.66666666666, ans=0.125 +2024-07-28 20:18:21,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=199296.0, ans=0.125 +2024-07-28 20:18:21,298 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=199296.0, ans=0.125 +2024-07-28 20:18:33,049 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 5.687e+01 6.281e+01 7.134e+01 1.278e+02, threshold=1.256e+02, percent-clipped=1.0 +2024-07-28 20:18:35,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=199322.66666666666, ans=0.025 +2024-07-28 20:18:37,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=199322.66666666666, ans=0.2 +2024-07-28 20:18:44,399 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=199349.33333333334, ans=0.07 +2024-07-28 20:18:44,871 INFO [train.py:1114] (1/4) Epoch 15, batch 6400, loss[loss=0.1987, simple_loss=0.2963, pruned_loss=0.05056, over 4635.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2691, pruned_loss=0.04621, over 935084.38 frames. ], batch size: 13, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:18:57,576 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.00 vs. limit=10.0 +2024-07-28 20:18:58,125 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:19:00,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=199376.0, ans=0.125 +2024-07-28 20:19:19,720 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.26 vs. limit=15.0 +2024-07-28 20:19:20,033 INFO [train.py:1114] (1/4) Epoch 15, batch 6450, loss[loss=0.1933, simple_loss=0.2717, pruned_loss=0.05748, over 4469.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2704, pruned_loss=0.04638, over 938614.79 frames. ], batch size: 21, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:19:25,831 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.59 vs. limit=12.0 +2024-07-28 20:19:28,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=199429.33333333334, ans=0.05 +2024-07-28 20:19:33,671 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.72 vs. limit=22.5 +2024-07-28 20:19:38,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=199442.66666666666, ans=0.125 +2024-07-28 20:19:41,416 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+01 5.703e+01 6.605e+01 7.565e+01 1.204e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-28 20:19:45,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=199456.0, ans=0.2 +2024-07-28 20:19:45,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=199456.0, ans=0.0 +2024-07-28 20:19:49,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=199469.33333333334, ans=0.09899494936611666 +2024-07-28 20:19:53,866 INFO [train.py:1114] (1/4) Epoch 15, batch 6500, loss[loss=0.2372, simple_loss=0.3062, pruned_loss=0.08413, over 3375.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2708, pruned_loss=0.04609, over 940118.59 frames. ], batch size: 35, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:19:58,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=199482.66666666666, ans=0.125 +2024-07-28 20:19:59,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=199482.66666666666, ans=0.125 +2024-07-28 20:20:02,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=199496.0, ans=0.0 +2024-07-28 20:20:09,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=199509.33333333334, ans=0.025 +2024-07-28 20:20:18,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=199522.66666666666, ans=0.125 +2024-07-28 20:20:28,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=199536.0, ans=0.0 +2024-07-28 20:20:28,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=199549.33333333334, ans=0.125 +2024-07-28 20:20:29,566 INFO [train.py:1114] (1/4) Epoch 15, batch 6550, loss[loss=0.1966, simple_loss=0.2731, pruned_loss=0.06009, over 4801.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2706, pruned_loss=0.04569, over 942958.35 frames. ], batch size: 11, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:20:30,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=199549.33333333334, ans=0.125 +2024-07-28 20:20:35,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=199549.33333333334, ans=0.2 +2024-07-28 20:20:42,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=199576.0, ans=0.125 +2024-07-28 20:20:44,078 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.88 vs. limit=6.0 +2024-07-28 20:20:51,051 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.602e+01 6.242e+01 7.548e+01 1.165e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 20:20:53,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199589.33333333334, ans=0.1 +2024-07-28 20:20:54,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=199589.33333333334, ans=0.09899494936611666 +2024-07-28 20:21:03,267 INFO [train.py:1114] (1/4) Epoch 15, batch 6600, loss[loss=0.1645, simple_loss=0.2694, pruned_loss=0.0298, over 4922.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2706, pruned_loss=0.04527, over 944813.41 frames. ], batch size: 14, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:21:25,194 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.73 vs. limit=15.0 +2024-07-28 20:21:32,846 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.91 vs. limit=6.0 +2024-07-28 20:21:37,069 INFO [train.py:1114] (1/4) Epoch 15, batch 6650, loss[loss=0.2045, simple_loss=0.2975, pruned_loss=0.05577, over 4613.00 frames. ], tot_loss[loss=0.18, simple_loss=0.27, pruned_loss=0.04498, over 943396.81 frames. ], batch size: 17, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:21:50,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=199709.33333333334, ans=0.125 +2024-07-28 20:21:58,838 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.766e+01 5.657e+01 6.507e+01 7.358e+01 9.845e+01, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 20:22:00,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=199722.66666666666, ans=0.0 +2024-07-28 20:22:01,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=199722.66666666666, ans=0.125 +2024-07-28 20:22:03,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=199722.66666666666, ans=0.125 +2024-07-28 20:22:08,859 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.67 vs. limit=6.0 +2024-07-28 20:22:11,046 INFO [train.py:1114] (1/4) Epoch 15, batch 6700, loss[loss=0.1939, simple_loss=0.2713, pruned_loss=0.0582, over 4700.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2703, pruned_loss=0.04513, over 942469.57 frames. ], batch size: 19, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:22:14,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=199749.33333333334, ans=0.0 +2024-07-28 20:22:15,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=199749.33333333334, ans=0.125 +2024-07-28 20:22:18,767 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=199762.66666666666, ans=0.125 +2024-07-28 20:22:22,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=199762.66666666666, ans=0.125 +2024-07-28 20:22:24,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=199776.0, ans=0.0 +2024-07-28 20:22:30,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=199789.33333333334, ans=0.125 +2024-07-28 20:22:33,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=199789.33333333334, ans=0.125 +2024-07-28 20:22:46,721 INFO [train.py:1114] (1/4) Epoch 15, batch 6750, loss[loss=0.1876, simple_loss=0.2773, pruned_loss=0.04891, over 4340.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2701, pruned_loss=0.04528, over 940494.88 frames. ], batch size: 25, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:22:59,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=199829.33333333334, ans=0.04949747468305833 +2024-07-28 20:23:01,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=199842.66666666666, ans=0.125 +2024-07-28 20:23:08,335 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.51 vs. limit=12.0 +2024-07-28 20:23:09,780 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.538e+01 5.776e+01 6.215e+01 6.945e+01 1.166e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 20:23:12,049 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.80 vs. limit=22.5 +2024-07-28 20:23:15,114 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.22 vs. limit=8.0 +2024-07-28 20:23:21,869 INFO [train.py:1114] (1/4) Epoch 15, batch 6800, loss[loss=0.1981, simple_loss=0.3031, pruned_loss=0.04657, over 4633.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2707, pruned_loss=0.04539, over 939121.61 frames. ], batch size: 13, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:23:31,534 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.10 vs. limit=12.0 +2024-07-28 20:23:37,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=199909.33333333334, ans=0.0 +2024-07-28 20:23:51,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=199936.0, ans=0.2 +2024-07-28 20:23:55,349 INFO [train.py:1114] (1/4) Epoch 15, batch 6850, loss[loss=0.1974, simple_loss=0.2879, pruned_loss=0.05341, over 4690.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2712, pruned_loss=0.04541, over 940801.13 frames. ], batch size: 13, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:24:02,256 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=199962.66666666666, ans=0.0 +2024-07-28 20:24:04,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=199962.66666666666, ans=0.0 +2024-07-28 20:24:05,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=199962.66666666666, ans=0.125 +2024-07-28 20:24:08,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=199976.0, ans=0.04949747468305833 +2024-07-28 20:24:14,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=199976.0, ans=0.2 +2024-07-28 20:24:16,991 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.771e+01 5.729e+01 6.369e+01 7.119e+01 1.032e+02, threshold=1.274e+02, percent-clipped=0.0 +2024-07-28 20:24:21,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=199989.33333333334, ans=0.125 +2024-07-28 20:24:21,722 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=199989.33333333334, ans=0.07 +2024-07-28 20:24:31,052 INFO [train.py:1114] (1/4) Epoch 15, batch 6900, loss[loss=0.2065, simple_loss=0.2961, pruned_loss=0.05846, over 4955.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2712, pruned_loss=0.0456, over 943023.06 frames. ], batch size: 13, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:24:33,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=200016.0, ans=0.0 +2024-07-28 20:24:42,625 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.28 vs. limit=15.0 +2024-07-28 20:24:49,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=200042.66666666666, ans=0.125 +2024-07-28 20:24:49,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=200042.66666666666, ans=0.125 +2024-07-28 20:24:53,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=200056.0, ans=0.2 +2024-07-28 20:24:55,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=200056.0, ans=0.0 +2024-07-28 20:24:55,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=200056.0, ans=0.125 +2024-07-28 20:24:55,773 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:24:59,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=200069.33333333334, ans=0.125 +2024-07-28 20:25:00,059 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.58 vs. limit=6.0 +2024-07-28 20:25:04,286 INFO [train.py:1114] (1/4) Epoch 15, batch 6950, loss[loss=0.1488, simple_loss=0.2378, pruned_loss=0.02991, over 4554.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2702, pruned_loss=0.04519, over 940340.93 frames. ], batch size: 10, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:25:05,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=200082.66666666666, ans=0.0 +2024-07-28 20:25:06,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=200082.66666666666, ans=0.125 +2024-07-28 20:25:13,860 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.19 vs. limit=15.0 +2024-07-28 20:25:23,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=200122.66666666666, ans=0.0 +2024-07-28 20:25:25,330 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+01 5.615e+01 6.056e+01 6.911e+01 1.034e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-28 20:25:37,535 INFO [train.py:1114] (1/4) Epoch 15, batch 7000, loss[loss=0.2123, simple_loss=0.296, pruned_loss=0.06431, over 4632.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2694, pruned_loss=0.04485, over 938803.77 frames. ], batch size: 17, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:25:48,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=200162.66666666666, ans=0.125 +2024-07-28 20:25:54,894 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.09 vs. limit=6.0 +2024-07-28 20:25:56,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=200176.0, ans=0.0 +2024-07-28 20:25:59,615 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.67 vs. limit=22.5 +2024-07-28 20:26:03,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=200189.33333333334, ans=0.5 +2024-07-28 20:26:12,401 INFO [train.py:1114] (1/4) Epoch 15, batch 7050, loss[loss=0.184, simple_loss=0.2762, pruned_loss=0.04589, over 4689.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2696, pruned_loss=0.04492, over 941968.40 frames. ], batch size: 19, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:26:20,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=200229.33333333334, ans=0.0 +2024-07-28 20:26:21,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=200229.33333333334, ans=0.125 +2024-07-28 20:26:30,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=200242.66666666666, ans=0.125 +2024-07-28 20:26:35,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=200256.0, ans=0.125 +2024-07-28 20:26:35,598 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.524e+01 5.674e+01 6.340e+01 7.118e+01 1.081e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 20:26:37,759 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:26:37,912 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.96 vs. limit=6.0 +2024-07-28 20:26:47,601 INFO [train.py:1114] (1/4) Epoch 15, batch 7100, loss[loss=0.1958, simple_loss=0.2889, pruned_loss=0.05132, over 4796.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2703, pruned_loss=0.04569, over 937107.23 frames. ], batch size: 15, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:26:52,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=200282.66666666666, ans=0.125 +2024-07-28 20:26:57,823 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.21 vs. limit=15.0 +2024-07-28 20:27:04,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=200309.33333333334, ans=0.2 +2024-07-28 20:27:10,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=200322.66666666666, ans=0.125 +2024-07-28 20:27:12,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=200322.66666666666, ans=0.1 +2024-07-28 20:27:14,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=200336.0, ans=0.2 +2024-07-28 20:27:19,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=200349.33333333334, ans=0.125 +2024-07-28 20:27:20,253 INFO [train.py:1114] (1/4) Epoch 15, batch 7150, loss[loss=0.1988, simple_loss=0.3, pruned_loss=0.0488, over 4511.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2684, pruned_loss=0.04492, over 937824.88 frames. ], batch size: 21, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:27:20,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=200349.33333333334, ans=0.125 +2024-07-28 20:27:25,774 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.29 vs. limit=10.0 +2024-07-28 20:27:33,738 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:27:41,583 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.495e+01 6.100e+01 6.664e+01 1.254e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 20:27:53,621 INFO [train.py:1114] (1/4) Epoch 15, batch 7200, loss[loss=0.1838, simple_loss=0.2783, pruned_loss=0.0447, over 4808.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2699, pruned_loss=0.04555, over 937985.51 frames. ], batch size: 15, lr: 4.95e-03, grad_scale: 64.0 +2024-07-28 20:28:02,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=200429.33333333334, ans=0.2 +2024-07-28 20:28:15,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=200456.0, ans=0.125 +2024-07-28 20:28:15,884 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.54 vs. limit=15.0 +2024-07-28 20:28:18,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=200456.0, ans=0.2 +2024-07-28 20:28:21,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=200469.33333333334, ans=0.2 +2024-07-28 20:28:26,771 INFO [train.py:1114] (1/4) Epoch 15, batch 7250, loss[loss=0.1612, simple_loss=0.2422, pruned_loss=0.04012, over 4851.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2692, pruned_loss=0.04511, over 939270.68 frames. ], batch size: 12, lr: 4.95e-03, grad_scale: 64.0 +2024-07-28 20:28:36,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=200496.0, ans=0.125 +2024-07-28 20:28:40,569 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.05 vs. limit=22.5 +2024-07-28 20:28:41,609 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:28:43,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=200509.33333333334, ans=0.2 +2024-07-28 20:28:45,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200522.66666666666, ans=0.1 +2024-07-28 20:28:47,958 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.349e+01 5.542e+01 5.960e+01 6.678e+01 9.539e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 20:28:52,493 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.62 vs. limit=15.0 +2024-07-28 20:28:59,491 INFO [train.py:1114] (1/4) Epoch 15, batch 7300, loss[loss=0.1671, simple_loss=0.2626, pruned_loss=0.0358, over 4861.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2687, pruned_loss=0.04484, over 939521.04 frames. ], batch size: 12, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:28:59,978 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.42 vs. limit=15.0 +2024-07-28 20:29:01,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=200549.33333333334, ans=0.0 +2024-07-28 20:29:06,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=200562.66666666666, ans=0.1 +2024-07-28 20:29:16,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=200576.0, ans=0.0 +2024-07-28 20:29:21,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=200589.33333333334, ans=0.125 +2024-07-28 20:29:25,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=200602.66666666666, ans=0.0 +2024-07-28 20:29:31,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.17 vs. limit=22.5 +2024-07-28 20:29:32,744 INFO [train.py:1114] (1/4) Epoch 15, batch 7350, loss[loss=0.1917, simple_loss=0.269, pruned_loss=0.05725, over 4642.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2691, pruned_loss=0.04464, over 938738.78 frames. ], batch size: 12, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:29:33,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=200616.0, ans=0.0 +2024-07-28 20:29:42,188 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:29:51,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=200656.0, ans=0.09899494936611666 +2024-07-28 20:29:54,104 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.821e+01 5.678e+01 6.177e+01 7.167e+01 1.153e+02, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 20:30:05,184 INFO [train.py:1114] (1/4) Epoch 15, batch 7400, loss[loss=0.1832, simple_loss=0.2733, pruned_loss=0.04656, over 4690.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2691, pruned_loss=0.04438, over 940135.08 frames. ], batch size: 13, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:30:16,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=200696.0, ans=0.125 +2024-07-28 20:30:18,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200709.33333333334, ans=0.1 +2024-07-28 20:30:21,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=200709.33333333334, ans=0.015 +2024-07-28 20:30:23,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=200709.33333333334, ans=0.0 +2024-07-28 20:30:23,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=200709.33333333334, ans=0.0 +2024-07-28 20:30:29,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=200722.66666666666, ans=0.2 +2024-07-28 20:30:37,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=200736.0, ans=0.0 +2024-07-28 20:30:38,350 INFO [train.py:1114] (1/4) Epoch 15, batch 7450, loss[loss=0.155, simple_loss=0.2322, pruned_loss=0.03892, over 4618.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2683, pruned_loss=0.04448, over 937191.28 frames. ], batch size: 11, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:30:44,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=200762.66666666666, ans=0.125 +2024-07-28 20:30:49,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=200762.66666666666, ans=0.0 +2024-07-28 20:30:54,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=200776.0, ans=0.025 +2024-07-28 20:30:55,941 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:30:59,743 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.621e+01 5.506e+01 6.120e+01 7.059e+01 1.130e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 20:31:01,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=200789.33333333334, ans=0.125 +2024-07-28 20:31:10,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200816.0, ans=0.1 +2024-07-28 20:31:11,142 INFO [train.py:1114] (1/4) Epoch 15, batch 7500, loss[loss=0.2758, simple_loss=0.338, pruned_loss=0.1068, over 3295.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2692, pruned_loss=0.04459, over 935313.64 frames. ], batch size: 36, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:31:17,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=200829.33333333334, ans=0.0 +2024-07-28 20:31:21,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=200829.33333333334, ans=0.0 +2024-07-28 20:31:38,913 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.99 vs. limit=15.0 +2024-07-28 20:31:41,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=200869.33333333334, ans=0.125 +2024-07-28 20:31:41,810 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:31:45,665 INFO [train.py:1114] (1/4) Epoch 15, batch 7550, loss[loss=0.2038, simple_loss=0.2857, pruned_loss=0.06092, over 4619.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2702, pruned_loss=0.04513, over 935441.55 frames. ], batch size: 17, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:31:50,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=200882.66666666666, ans=0.0 +2024-07-28 20:32:08,710 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.815e+01 5.439e+01 5.885e+01 6.380e+01 8.239e+01, threshold=1.177e+02, percent-clipped=0.0 +2024-07-28 20:32:17,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=200936.0, ans=0.125 +2024-07-28 20:32:18,127 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.43 vs. limit=6.0 +2024-07-28 20:32:19,796 INFO [train.py:1114] (1/4) Epoch 15, batch 7600, loss[loss=0.163, simple_loss=0.2645, pruned_loss=0.03076, over 4823.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2703, pruned_loss=0.04507, over 937446.16 frames. ], batch size: 14, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:32:21,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=200949.33333333334, ans=0.0 +2024-07-28 20:32:22,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=200949.33333333334, ans=0.0 +2024-07-28 20:32:26,764 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.65 vs. limit=15.0 +2024-07-28 20:32:41,487 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=15.0 +2024-07-28 20:32:55,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=200976.0, ans=0.125 +2024-07-28 20:33:03,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200989.33333333334, ans=0.1 +2024-07-28 20:33:04,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_na.min_abs, batch_count=200989.33333333334, ans=0.02 +2024-07-28 20:33:12,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=201002.66666666666, ans=0.5 +2024-07-28 20:33:13,667 INFO [train.py:1114] (1/4) Epoch 15, batch 7650, loss[loss=0.1425, simple_loss=0.2287, pruned_loss=0.02816, over 4934.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2706, pruned_loss=0.04543, over 936311.50 frames. ], batch size: 12, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:33:31,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=201029.33333333334, ans=0.04949747468305833 +2024-07-28 20:33:36,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=201042.66666666666, ans=0.0 +2024-07-28 20:33:39,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=201042.66666666666, ans=0.5 +2024-07-28 20:33:39,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=201042.66666666666, ans=0.0 +2024-07-28 20:33:52,757 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.430e+01 5.492e+01 6.279e+01 7.005e+01 1.015e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 20:33:52,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=201056.0, ans=0.05 +2024-07-28 20:34:17,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201069.33333333334, ans=0.1 +2024-07-28 20:34:21,681 INFO [train.py:1114] (1/4) Epoch 15, batch 7700, loss[loss=0.1742, simple_loss=0.2798, pruned_loss=0.03429, over 4700.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2705, pruned_loss=0.04518, over 933678.52 frames. ], batch size: 13, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:34:25,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=201082.66666666666, ans=0.0 +2024-07-28 20:34:28,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=201082.66666666666, ans=0.0 +2024-07-28 20:34:37,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=201109.33333333334, ans=0.0 +2024-07-28 20:34:47,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=201109.33333333334, ans=0.2 +2024-07-28 20:34:53,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=201122.66666666666, ans=0.125 +2024-07-28 20:34:57,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201136.0, ans=0.1 +2024-07-28 20:35:06,316 INFO [train.py:1114] (1/4) Epoch 15, batch 7750, loss[loss=0.2051, simple_loss=0.3063, pruned_loss=0.05194, over 4930.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2714, pruned_loss=0.04575, over 935012.48 frames. ], batch size: 14, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:35:06,666 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.29 vs. limit=15.0 +2024-07-28 20:35:08,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=201149.33333333334, ans=0.125 +2024-07-28 20:35:21,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=201162.66666666666, ans=0.125 +2024-07-28 20:35:23,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=201162.66666666666, ans=0.1 +2024-07-28 20:35:30,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=201176.0, ans=0.125 +2024-07-28 20:35:34,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=201189.33333333334, ans=0.1 +2024-07-28 20:35:37,371 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.480e+01 5.525e+01 5.917e+01 6.791e+01 1.166e+02, threshold=1.183e+02, percent-clipped=0.0 +2024-07-28 20:35:38,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=201189.33333333334, ans=0.1 +2024-07-28 20:35:39,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=201189.33333333334, ans=0.125 +2024-07-28 20:36:08,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=201202.66666666666, ans=15.0 +2024-07-28 20:36:10,169 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.91 vs. limit=15.0 +2024-07-28 20:36:12,992 INFO [train.py:1114] (1/4) Epoch 15, batch 7800, loss[loss=0.1883, simple_loss=0.2832, pruned_loss=0.04674, over 4664.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2714, pruned_loss=0.04563, over 936938.21 frames. ], batch size: 14, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:36:32,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=201216.0, ans=0.0 +2024-07-28 20:36:33,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=201229.33333333334, ans=0.0 +2024-07-28 20:36:45,454 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.58 vs. limit=10.0 +2024-07-28 20:37:01,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=201269.33333333334, ans=0.015 +2024-07-28 20:37:04,504 INFO [train.py:1114] (1/4) Epoch 15, batch 7850, loss[loss=0.1582, simple_loss=0.2389, pruned_loss=0.03871, over 4545.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2718, pruned_loss=0.04555, over 936078.94 frames. ], batch size: 10, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:37:11,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=201296.0, ans=0.125 +2024-07-28 20:37:36,068 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 5.654e+01 6.198e+01 6.976e+01 9.701e+01, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 20:37:39,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=201322.66666666666, ans=0.05 +2024-07-28 20:37:47,268 INFO [train.py:1114] (1/4) Epoch 15, batch 7900, loss[loss=0.1754, simple_loss=0.2705, pruned_loss=0.04012, over 4873.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2727, pruned_loss=0.04583, over 932563.50 frames. ], batch size: 14, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:38:01,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201376.0, ans=0.1 +2024-07-28 20:38:02,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=201376.0, ans=0.0 +2024-07-28 20:38:07,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=201376.0, ans=0.125 +2024-07-28 20:38:16,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=201402.66666666666, ans=0.0 +2024-07-28 20:38:21,273 INFO [train.py:1114] (1/4) Epoch 15, batch 7950, loss[loss=0.1993, simple_loss=0.2724, pruned_loss=0.06311, over 3114.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2721, pruned_loss=0.0454, over 934756.37 frames. ], batch size: 36, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:38:25,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=201416.0, ans=0.025 +2024-07-28 20:38:33,915 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.90 vs. limit=15.0 +2024-07-28 20:38:35,134 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.16 vs. limit=15.0 +2024-07-28 20:38:38,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=201442.66666666666, ans=0.0 +2024-07-28 20:38:42,610 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.203e+01 5.519e+01 6.026e+01 6.724e+01 9.656e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 20:38:44,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.25 vs. limit=6.0 +2024-07-28 20:38:52,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=201482.66666666666, ans=0.125 +2024-07-28 20:38:53,249 INFO [train.py:1114] (1/4) Epoch 15, batch 8000, loss[loss=0.1868, simple_loss=0.2644, pruned_loss=0.05456, over 4626.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2702, pruned_loss=0.04505, over 934067.40 frames. ], batch size: 11, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:38:55,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=201482.66666666666, ans=0.025 +2024-07-28 20:38:58,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=201482.66666666666, ans=0.125 +2024-07-28 20:39:04,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=201496.0, ans=0.1 +2024-07-28 20:39:06,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=201509.33333333334, ans=0.125 +2024-07-28 20:39:10,032 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:39:12,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.19 vs. limit=15.0 +2024-07-28 20:39:25,594 INFO [train.py:1114] (1/4) Epoch 15, batch 8050, loss[loss=0.179, simple_loss=0.2762, pruned_loss=0.04086, over 4817.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2708, pruned_loss=0.04541, over 934271.94 frames. ], batch size: 14, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:39:30,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=201549.33333333334, ans=0.1 +2024-07-28 20:39:36,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=201562.66666666666, ans=0.0 +2024-07-28 20:39:42,912 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.44 vs. limit=22.5 +2024-07-28 20:39:46,855 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.771e+01 6.002e+01 6.838e+01 8.210e+01 1.277e+02, threshold=1.368e+02, percent-clipped=1.0 +2024-07-28 20:39:48,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=201589.33333333334, ans=0.1 +2024-07-28 20:39:49,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=201589.33333333334, ans=0.125 +2024-07-28 20:39:57,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=201616.0, ans=0.125 +2024-07-28 20:39:58,046 INFO [train.py:1114] (1/4) Epoch 15, batch 8100, loss[loss=0.2255, simple_loss=0.3089, pruned_loss=0.07108, over 4802.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.272, pruned_loss=0.04586, over 933650.44 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:40:20,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=201656.0, ans=0.125 +2024-07-28 20:40:22,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=201669.33333333334, ans=0.025 +2024-07-28 20:40:24,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=201669.33333333334, ans=0.025 +2024-07-28 20:40:30,018 INFO [train.py:1114] (1/4) Epoch 15, batch 8150, loss[loss=0.1944, simple_loss=0.2965, pruned_loss=0.04615, over 4799.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2718, pruned_loss=0.046, over 937010.81 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:40:41,774 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.36 vs. limit=10.0 +2024-07-28 20:40:44,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=201709.33333333334, ans=0.125 +2024-07-28 20:40:44,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=201709.33333333334, ans=0.0 +2024-07-28 20:40:49,010 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.81 vs. limit=12.0 +2024-07-28 20:40:51,248 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.751e+01 6.330e+01 7.260e+01 1.173e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 20:40:52,335 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.65 vs. limit=22.5 +2024-07-28 20:40:53,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=201722.66666666666, ans=0.125 +2024-07-28 20:40:56,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=201736.0, ans=0.2 +2024-07-28 20:41:02,469 INFO [train.py:1114] (1/4) Epoch 15, batch 8200, loss[loss=0.1752, simple_loss=0.2604, pruned_loss=0.04503, over 4808.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2721, pruned_loss=0.04595, over 938557.37 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:41:03,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=201749.33333333334, ans=0.1 +2024-07-28 20:41:05,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=201749.33333333334, ans=0.0 +2024-07-28 20:41:09,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=201762.66666666666, ans=0.07 +2024-07-28 20:41:14,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=201776.0, ans=0.0 +2024-07-28 20:41:36,030 INFO [train.py:1114] (1/4) Epoch 15, batch 8250, loss[loss=0.1371, simple_loss=0.2284, pruned_loss=0.02284, over 4889.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2718, pruned_loss=0.04593, over 938696.11 frames. ], batch size: 13, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:41:38,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=201816.0, ans=0.0 +2024-07-28 20:41:39,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=201816.0, ans=0.1 +2024-07-28 20:41:53,898 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=201842.66666666666, ans=0.04949747468305833 +2024-07-28 20:41:57,636 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.753e+01 5.622e+01 6.090e+01 6.800e+01 1.043e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 20:42:00,672 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.55 vs. limit=15.0 +2024-07-28 20:42:08,690 INFO [train.py:1114] (1/4) Epoch 15, batch 8300, loss[loss=0.1879, simple_loss=0.2814, pruned_loss=0.04724, over 4896.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2719, pruned_loss=0.04623, over 938560.28 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:42:17,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=201896.0, ans=0.125 +2024-07-28 20:42:21,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=201909.33333333334, ans=0.125 +2024-07-28 20:42:24,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=201909.33333333334, ans=0.125 +2024-07-28 20:42:41,351 INFO [train.py:1114] (1/4) Epoch 15, batch 8350, loss[loss=0.1856, simple_loss=0.2837, pruned_loss=0.04374, over 4788.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2717, pruned_loss=0.04597, over 941686.12 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:42:42,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=201949.33333333334, ans=0.125 +2024-07-28 20:42:45,843 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.99 vs. limit=15.0 +2024-07-28 20:42:47,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=201949.33333333334, ans=0.125 +2024-07-28 20:42:51,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=201962.66666666666, ans=0.2 +2024-07-28 20:42:53,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=201962.66666666666, ans=0.125 +2024-07-28 20:42:54,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=201962.66666666666, ans=0.1 +2024-07-28 20:42:56,783 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=201976.0, ans=0.125 +2024-07-28 20:42:59,925 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:43:03,764 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.556e+01 6.243e+01 6.901e+01 1.019e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-28 20:43:08,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=202002.66666666666, ans=0.125 +2024-07-28 20:43:12,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202002.66666666666, ans=0.1 +2024-07-28 20:43:15,641 INFO [train.py:1114] (1/4) Epoch 15, batch 8400, loss[loss=0.1385, simple_loss=0.2233, pruned_loss=0.02686, over 4778.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2712, pruned_loss=0.04574, over 940030.36 frames. ], batch size: 12, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:43:15,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=202016.0, ans=0.5 +2024-07-28 20:43:17,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=202016.0, ans=10.0 +2024-07-28 20:43:26,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=202029.33333333334, ans=0.0 +2024-07-28 20:43:32,494 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.04 vs. limit=12.0 +2024-07-28 20:43:36,988 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:43:40,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=202056.0, ans=0.125 +2024-07-28 20:43:51,496 INFO [train.py:1114] (1/4) Epoch 15, batch 8450, loss[loss=0.2149, simple_loss=0.3062, pruned_loss=0.06186, over 4786.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2718, pruned_loss=0.04574, over 938867.99 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:43:51,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=202082.66666666666, ans=0.125 +2024-07-28 20:43:56,459 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.59 vs. limit=15.0 +2024-07-28 20:44:02,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=202096.0, ans=0.1 +2024-07-28 20:44:02,749 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:44:03,582 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.09 vs. limit=12.0 +2024-07-28 20:44:18,476 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.84 vs. limit=15.0 +2024-07-28 20:44:21,220 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.759e+01 5.800e+01 6.456e+01 7.440e+01 1.040e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-28 20:44:21,711 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.64 vs. limit=15.0 +2024-07-28 20:44:22,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=202122.66666666666, ans=0.125 +2024-07-28 20:44:27,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=202136.0, ans=0.125 +2024-07-28 20:44:34,655 INFO [train.py:1114] (1/4) Epoch 15, batch 8500, loss[loss=0.145, simple_loss=0.2381, pruned_loss=0.02591, over 4614.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2718, pruned_loss=0.04587, over 938822.03 frames. ], batch size: 11, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:44:38,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=202149.33333333334, ans=0.025 +2024-07-28 20:44:47,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=202176.0, ans=0.125 +2024-07-28 20:44:49,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=202176.0, ans=0.025 +2024-07-28 20:44:51,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=202176.0, ans=0.125 +2024-07-28 20:44:53,530 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.81 vs. limit=15.0 +2024-07-28 20:44:57,592 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.66 vs. limit=22.5 +2024-07-28 20:44:58,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=202189.33333333334, ans=0.125 +2024-07-28 20:45:05,230 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.23 vs. limit=22.5 +2024-07-28 20:45:07,511 INFO [train.py:1114] (1/4) Epoch 15, batch 8550, loss[loss=0.1352, simple_loss=0.2253, pruned_loss=0.02255, over 4804.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2712, pruned_loss=0.04572, over 939510.36 frames. ], batch size: 11, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:45:30,957 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.429e+01 5.682e+01 6.336e+01 7.358e+01 1.234e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 20:45:41,901 INFO [train.py:1114] (1/4) Epoch 15, batch 8600, loss[loss=0.2113, simple_loss=0.2992, pruned_loss=0.06166, over 4804.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2718, pruned_loss=0.0462, over 939397.47 frames. ], batch size: 15, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:45:42,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=202282.66666666666, ans=0.2 +2024-07-28 20:45:57,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=202309.33333333334, ans=0.0 +2024-07-28 20:46:08,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=202336.0, ans=0.025 +2024-07-28 20:46:15,000 INFO [train.py:1114] (1/4) Epoch 15, batch 8650, loss[loss=0.2109, simple_loss=0.2958, pruned_loss=0.06303, over 4904.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2713, pruned_loss=0.04595, over 940493.07 frames. ], batch size: 15, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:46:15,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=202349.33333333334, ans=0.2 +2024-07-28 20:46:17,910 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.97 vs. limit=15.0 +2024-07-28 20:46:18,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=202349.33333333334, ans=0.0 +2024-07-28 20:46:18,446 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=11.33 vs. limit=15.0 +2024-07-28 20:46:34,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=202389.33333333334, ans=0.125 +2024-07-28 20:46:36,414 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.784e+01 5.651e+01 6.077e+01 6.775e+01 1.563e+02, threshold=1.215e+02, percent-clipped=1.0 +2024-07-28 20:46:40,765 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.28 vs. limit=10.0 +2024-07-28 20:46:45,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=202402.66666666666, ans=0.125 +2024-07-28 20:46:45,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=202402.66666666666, ans=0.125 +2024-07-28 20:46:47,376 INFO [train.py:1114] (1/4) Epoch 15, batch 8700, loss[loss=0.1945, simple_loss=0.279, pruned_loss=0.05501, over 4764.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2725, pruned_loss=0.04607, over 938439.65 frames. ], batch size: 13, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:46:50,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=202416.0, ans=0.0 +2024-07-28 20:47:05,510 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:47:18,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=202482.66666666666, ans=0.0 +2024-07-28 20:47:19,423 INFO [train.py:1114] (1/4) Epoch 15, batch 8750, loss[loss=0.1849, simple_loss=0.2734, pruned_loss=0.04825, over 4665.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2718, pruned_loss=0.04575, over 937147.86 frames. ], batch size: 15, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:47:24,116 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:47:33,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=202509.33333333334, ans=0.0 +2024-07-28 20:47:40,673 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.522e+01 5.559e+01 6.196e+01 6.974e+01 1.029e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 20:47:48,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=202536.0, ans=0.1 +2024-07-28 20:47:51,465 INFO [train.py:1114] (1/4) Epoch 15, batch 8800, loss[loss=0.1726, simple_loss=0.2627, pruned_loss=0.04127, over 4923.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.272, pruned_loss=0.04582, over 938131.17 frames. ], batch size: 14, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:48:00,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=202562.66666666666, ans=0.2 +2024-07-28 20:48:01,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=202562.66666666666, ans=0.0 +2024-07-28 20:48:06,903 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=7.12 vs. limit=15.0 +2024-07-28 20:48:13,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=202589.33333333334, ans=0.0 +2024-07-28 20:48:17,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=202602.66666666666, ans=0.0 +2024-07-28 20:48:20,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=202602.66666666666, ans=0.125 +2024-07-28 20:48:22,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=202602.66666666666, ans=0.0 +2024-07-28 20:48:23,825 INFO [train.py:1114] (1/4) Epoch 15, batch 8850, loss[loss=0.1887, simple_loss=0.2808, pruned_loss=0.04825, over 4560.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2719, pruned_loss=0.04634, over 932978.18 frames. ], batch size: 21, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:48:28,739 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=202616.0, ans=0.125 +2024-07-28 20:48:36,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=202629.33333333334, ans=0.125 +2024-07-28 20:48:41,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=202642.66666666666, ans=0.0 +2024-07-28 20:48:44,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=202642.66666666666, ans=0.0 +2024-07-28 20:48:50,489 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.697e+01 5.661e+01 6.393e+01 7.198e+01 1.179e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 20:49:10,209 INFO [train.py:1114] (1/4) Epoch 15, batch 8900, loss[loss=0.154, simple_loss=0.2315, pruned_loss=0.03824, over 4948.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2728, pruned_loss=0.04697, over 930435.82 frames. ], batch size: 12, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:49:16,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=202696.0, ans=0.125 +2024-07-28 20:49:23,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=202709.33333333334, ans=0.125 +2024-07-28 20:49:32,901 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.15 vs. limit=12.0 +2024-07-28 20:49:35,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=202736.0, ans=0.0 +2024-07-28 20:49:39,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=202736.0, ans=0.025 +2024-07-28 20:49:40,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=202736.0, ans=0.2 +2024-07-28 20:49:42,090 INFO [train.py:1114] (1/4) Epoch 15, batch 8950, loss[loss=0.2023, simple_loss=0.2885, pruned_loss=0.058, over 4419.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2725, pruned_loss=0.04683, over 930943.06 frames. ], batch size: 21, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:49:53,508 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.98 vs. limit=10.0 +2024-07-28 20:50:00,245 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:50:02,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=202789.33333333334, ans=0.0 +2024-07-28 20:50:03,356 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.618e+01 5.676e+01 6.111e+01 7.140e+01 9.937e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 20:50:12,585 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.47 vs. limit=15.0 +2024-07-28 20:50:14,239 INFO [train.py:1114] (1/4) Epoch 15, batch 9000, loss[loss=0.1389, simple_loss=0.224, pruned_loss=0.0269, over 4646.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2706, pruned_loss=0.04605, over 933803.45 frames. ], batch size: 12, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:50:14,239 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 20:50:29,447 INFO [train.py:1146] (1/4) Epoch 15, validation: loss=0.164, simple_loss=0.2673, pruned_loss=0.03039, over 944034.00 frames. +2024-07-28 20:50:29,447 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 20:50:32,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=202816.0, ans=0.0 +2024-07-28 20:50:32,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=202816.0, ans=0.125 +2024-07-28 20:50:46,578 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.14 vs. limit=15.0 +2024-07-28 20:50:48,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=202856.0, ans=0.2 +2024-07-28 20:50:53,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=202856.0, ans=0.2 +2024-07-28 20:51:01,321 INFO [train.py:1114] (1/4) Epoch 15, batch 9050, loss[loss=0.1595, simple_loss=0.2358, pruned_loss=0.04155, over 4528.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2693, pruned_loss=0.04544, over 934522.02 frames. ], batch size: 10, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:51:17,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=202909.33333333334, ans=0.125 +2024-07-28 20:51:20,132 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=202922.66666666666, ans=0.0 +2024-07-28 20:51:21,885 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.625e+01 5.604e+01 6.217e+01 7.321e+01 1.269e+02, threshold=1.243e+02, percent-clipped=1.0 +2024-07-28 20:51:27,090 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.34 vs. limit=22.5 +2024-07-28 20:51:31,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=202936.0, ans=0.125 +2024-07-28 20:51:32,957 INFO [train.py:1114] (1/4) Epoch 15, batch 9100, loss[loss=0.1907, simple_loss=0.2854, pruned_loss=0.048, over 4925.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2688, pruned_loss=0.04524, over 936805.54 frames. ], batch size: 14, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:51:42,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.48 vs. limit=10.0 +2024-07-28 20:51:43,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=202962.66666666666, ans=0.125 +2024-07-28 20:52:04,335 INFO [train.py:1114] (1/4) Epoch 15, batch 9150, loss[loss=0.1999, simple_loss=0.3031, pruned_loss=0.04833, over 4814.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2696, pruned_loss=0.04499, over 935650.40 frames. ], batch size: 14, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:52:10,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=203029.33333333334, ans=0.125 +2024-07-28 20:52:16,034 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.74 vs. limit=6.0 +2024-07-28 20:52:20,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=203042.66666666666, ans=0.025 +2024-07-28 20:52:25,460 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.329e+01 5.523e+01 6.043e+01 6.925e+01 1.017e+02, threshold=1.209e+02, percent-clipped=0.0 +2024-07-28 20:52:31,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=203069.33333333334, ans=0.125 +2024-07-28 20:52:36,146 INFO [train.py:1114] (1/4) Epoch 15, batch 9200, loss[loss=0.1595, simple_loss=0.2472, pruned_loss=0.03594, over 4850.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2694, pruned_loss=0.0448, over 937251.13 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:52:47,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=203096.0, ans=0.0 +2024-07-28 20:52:53,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=203109.33333333334, ans=0.125 +2024-07-28 20:53:01,871 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.88 vs. limit=15.0 +2024-07-28 20:53:07,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=203136.0, ans=0.0 +2024-07-28 20:53:08,403 INFO [train.py:1114] (1/4) Epoch 15, batch 9250, loss[loss=0.1966, simple_loss=0.2846, pruned_loss=0.05429, over 4637.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2697, pruned_loss=0.04501, over 938228.79 frames. ], batch size: 13, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:53:09,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=203149.33333333334, ans=0.125 +2024-07-28 20:53:16,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=203162.66666666666, ans=0.125 +2024-07-28 20:53:19,647 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.82 vs. limit=22.5 +2024-07-28 20:53:23,106 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:53:29,341 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.390e+01 5.672e+01 6.344e+01 6.747e+01 1.004e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 20:53:34,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=203202.66666666666, ans=0.125 +2024-07-28 20:53:34,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=203202.66666666666, ans=0.5 +2024-07-28 20:53:34,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=203202.66666666666, ans=0.125 +2024-07-28 20:53:41,023 INFO [train.py:1114] (1/4) Epoch 15, batch 9300, loss[loss=0.1445, simple_loss=0.2306, pruned_loss=0.02918, over 4766.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2701, pruned_loss=0.04545, over 937815.25 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:53:52,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=203229.33333333334, ans=0.0 +2024-07-28 20:53:55,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=203242.66666666666, ans=0.2 +2024-07-28 20:54:13,767 INFO [train.py:1114] (1/4) Epoch 15, batch 9350, loss[loss=0.1584, simple_loss=0.238, pruned_loss=0.03944, over 4797.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2709, pruned_loss=0.04591, over 934535.03 frames. ], batch size: 11, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:54:21,515 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.71 vs. limit=6.0 +2024-07-28 20:54:25,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=203296.0, ans=0.09899494936611666 +2024-07-28 20:54:25,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=203296.0, ans=0.125 +2024-07-28 20:54:25,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=203296.0, ans=0.125 +2024-07-28 20:54:34,860 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.610e+01 5.452e+01 6.189e+01 7.531e+01 9.435e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 20:54:36,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=203322.66666666666, ans=0.025 +2024-07-28 20:54:45,592 INFO [train.py:1114] (1/4) Epoch 15, batch 9400, loss[loss=0.1832, simple_loss=0.2804, pruned_loss=0.04297, over 4698.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2708, pruned_loss=0.04602, over 932698.07 frames. ], batch size: 13, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:54:54,617 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.48 vs. limit=15.0 +2024-07-28 20:54:59,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=203376.0, ans=0.125 +2024-07-28 20:55:01,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=203376.0, ans=0.0 +2024-07-28 20:55:11,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=203402.66666666666, ans=0.125 +2024-07-28 20:55:17,112 INFO [train.py:1114] (1/4) Epoch 15, batch 9450, loss[loss=0.14, simple_loss=0.2288, pruned_loss=0.02563, over 4790.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.271, pruned_loss=0.0457, over 932015.38 frames. ], batch size: 11, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:55:19,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=203416.0, ans=0.125 +2024-07-28 20:55:23,106 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.00 vs. limit=15.0 +2024-07-28 20:55:25,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=203429.33333333334, ans=0.0 +2024-07-28 20:55:29,744 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.68 vs. limit=22.5 +2024-07-28 20:55:33,303 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:55:37,632 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.245e+01 5.463e+01 5.974e+01 6.797e+01 9.307e+01, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 20:55:48,428 INFO [train.py:1114] (1/4) Epoch 15, batch 9500, loss[loss=0.155, simple_loss=0.247, pruned_loss=0.03144, over 4708.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2715, pruned_loss=0.04564, over 934623.50 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:55:54,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=203496.0, ans=0.0 +2024-07-28 20:56:02,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=203509.33333333334, ans=0.125 +2024-07-28 20:56:05,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=203509.33333333334, ans=0.025 +2024-07-28 20:56:06,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=203509.33333333334, ans=0.07 +2024-07-28 20:56:07,343 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.99 vs. limit=6.0 +2024-07-28 20:56:12,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=203536.0, ans=0.0 +2024-07-28 20:56:16,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=203536.0, ans=0.0 +2024-07-28 20:56:19,723 INFO [train.py:1114] (1/4) Epoch 15, batch 9550, loss[loss=0.1712, simple_loss=0.2617, pruned_loss=0.04037, over 4769.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2709, pruned_loss=0.04574, over 931865.45 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:56:23,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=203549.33333333334, ans=0.1 +2024-07-28 20:56:37,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=203576.0, ans=0.09899494936611666 +2024-07-28 20:56:37,558 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.42 vs. limit=22.5 +2024-07-28 20:56:40,323 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.803e+01 5.499e+01 6.112e+01 6.972e+01 9.508e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 20:56:43,074 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-28 20:56:43,177 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.87 vs. limit=22.5 +2024-07-28 20:56:43,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=203589.33333333334, ans=0.025 +2024-07-28 20:56:44,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=203602.66666666666, ans=0.1 +2024-07-28 20:56:46,885 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.26 vs. limit=15.0 +2024-07-28 20:56:50,868 INFO [train.py:1114] (1/4) Epoch 15, batch 9600, loss[loss=0.215, simple_loss=0.2937, pruned_loss=0.06814, over 3411.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2713, pruned_loss=0.04571, over 930567.90 frames. ], batch size: 36, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:56:52,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=203616.0, ans=0.015 +2024-07-28 20:56:56,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=203629.33333333334, ans=0.2 +2024-07-28 20:57:00,808 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.77 vs. limit=15.0 +2024-07-28 20:57:15,587 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.57 vs. limit=15.0 +2024-07-28 20:57:15,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=203669.33333333334, ans=0.2 +2024-07-28 20:57:19,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=203669.33333333334, ans=0.0 +2024-07-28 20:57:22,694 INFO [train.py:1114] (1/4) Epoch 15, batch 9650, loss[loss=0.2064, simple_loss=0.3058, pruned_loss=0.05349, over 4851.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2723, pruned_loss=0.04611, over 926739.65 frames. ], batch size: 16, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:57:27,319 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.19 vs. limit=6.0 +2024-07-28 20:57:34,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=203696.0, ans=0.125 +2024-07-28 20:57:39,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=203709.33333333334, ans=0.0 +2024-07-28 20:57:40,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=203709.33333333334, ans=0.125 +2024-07-28 20:57:44,440 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.709e+01 6.228e+01 7.235e+01 8.715e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 20:57:44,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=203722.66666666666, ans=0.125 +2024-07-28 20:57:55,047 INFO [train.py:1114] (1/4) Epoch 15, batch 9700, loss[loss=0.2269, simple_loss=0.3348, pruned_loss=0.05951, over 4321.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2719, pruned_loss=0.04624, over 925362.59 frames. ], batch size: 26, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:58:07,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=203776.0, ans=0.2 +2024-07-28 20:58:12,471 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.50 vs. limit=15.0 +2024-07-28 20:58:19,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=203789.33333333334, ans=0.125 +2024-07-28 20:58:19,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=203789.33333333334, ans=0.125 +2024-07-28 20:58:27,165 INFO [train.py:1114] (1/4) Epoch 15, batch 9750, loss[loss=0.1845, simple_loss=0.2719, pruned_loss=0.04853, over 4697.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2709, pruned_loss=0.04588, over 925815.38 frames. ], batch size: 15, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:58:36,346 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.97 vs. limit=15.0 +2024-07-28 20:58:38,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=203829.33333333334, ans=0.2 +2024-07-28 20:58:38,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=203829.33333333334, ans=0.0 +2024-07-28 20:58:39,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=203842.66666666666, ans=0.025 +2024-07-28 20:58:42,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=203842.66666666666, ans=0.125 +2024-07-28 20:58:44,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=203842.66666666666, ans=0.125 +2024-07-28 20:58:48,315 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.470e+01 5.731e+01 6.608e+01 7.819e+01 1.278e+02, threshold=1.322e+02, percent-clipped=1.0 +2024-07-28 20:58:50,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=203856.0, ans=0.125 +2024-07-28 20:59:03,964 INFO [train.py:1114] (1/4) Epoch 15, batch 9800, loss[loss=0.1423, simple_loss=0.2324, pruned_loss=0.02603, over 4701.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2696, pruned_loss=0.04561, over 925858.70 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:59:05,387 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:59:11,792 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.40 vs. limit=8.0 +2024-07-28 20:59:15,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=203909.33333333334, ans=0.125 +2024-07-28 20:59:19,278 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-07-28 20:59:24,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=203922.66666666666, ans=0.05 +2024-07-28 20:59:27,154 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.60 vs. limit=15.0 +2024-07-28 20:59:35,230 INFO [train.py:1114] (1/4) Epoch 15, batch 9850, loss[loss=0.2332, simple_loss=0.3047, pruned_loss=0.08087, over 4886.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2702, pruned_loss=0.04579, over 928201.23 frames. ], batch size: 15, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 20:59:37,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=203949.33333333334, ans=0.0 +2024-07-28 20:59:38,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=203949.33333333334, ans=0.025 +2024-07-28 20:59:39,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=203949.33333333334, ans=0.0 +2024-07-28 20:59:46,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=203962.66666666666, ans=0.125 +2024-07-28 20:59:47,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=203976.0, ans=0.125 +2024-07-28 20:59:52,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=203976.0, ans=0.0 +2024-07-28 20:59:56,144 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.481e+01 5.690e+01 6.538e+01 7.363e+01 1.082e+02, threshold=1.308e+02, percent-clipped=0.0 +2024-07-28 20:59:59,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=204002.66666666666, ans=0.0 +2024-07-28 21:00:06,869 INFO [train.py:1114] (1/4) Epoch 15, batch 9900, loss[loss=0.1997, simple_loss=0.2907, pruned_loss=0.05439, over 4832.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.271, pruned_loss=0.04625, over 927385.98 frames. ], batch size: 16, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:00:09,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=204016.0, ans=0.125 +2024-07-28 21:00:14,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204029.33333333334, ans=0.1 +2024-07-28 21:00:18,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=204042.66666666666, ans=0.125 +2024-07-28 21:00:27,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=204056.0, ans=0.025 +2024-07-28 21:00:28,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=204056.0, ans=0.2 +2024-07-28 21:00:37,695 INFO [train.py:1114] (1/4) Epoch 15, batch 9950, loss[loss=0.1739, simple_loss=0.2434, pruned_loss=0.05214, over 4806.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2724, pruned_loss=0.04719, over 930140.76 frames. ], batch size: 11, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:00:49,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204096.0, ans=0.1 +2024-07-28 21:00:58,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=204122.66666666666, ans=0.125 +2024-07-28 21:00:59,864 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.488e+01 6.089e+01 6.834e+01 7.968e+01 1.113e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-28 21:01:06,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204136.0, ans=0.1 +2024-07-28 21:01:07,952 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:01:08,265 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=12.0 +2024-07-28 21:01:09,729 INFO [train.py:1114] (1/4) Epoch 15, batch 10000, loss[loss=0.1703, simple_loss=0.2764, pruned_loss=0.03209, over 4643.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2744, pruned_loss=0.04771, over 927287.36 frames. ], batch size: 16, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:01:12,559 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.34 vs. limit=12.0 +2024-07-28 21:01:17,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=204162.66666666666, ans=0.0 +2024-07-28 21:01:20,326 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.21 vs. limit=12.0 +2024-07-28 21:01:25,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=204176.0, ans=0.0 +2024-07-28 21:01:29,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=204189.33333333334, ans=15.0 +2024-07-28 21:01:36,699 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.44 vs. limit=15.0 +2024-07-28 21:01:41,139 INFO [train.py:1114] (1/4) Epoch 15, batch 10050, loss[loss=0.2355, simple_loss=0.3194, pruned_loss=0.07574, over 3455.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2773, pruned_loss=0.04909, over 915196.57 frames. ], batch size: 35, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:01:48,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=204229.33333333334, ans=0.07 +2024-07-28 21:01:51,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=204229.33333333334, ans=10.0 +2024-07-28 21:01:57,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=204242.66666666666, ans=0.125 +2024-07-28 21:02:02,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=204256.0, ans=0.0 +2024-07-28 21:02:04,619 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.770e+01 5.990e+01 6.680e+01 7.345e+01 9.959e+01, threshold=1.336e+02, percent-clipped=0.0 +2024-07-28 21:02:10,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=204269.33333333334, ans=0.2 +2024-07-28 21:02:15,464 INFO [train.py:1114] (1/4) Epoch 15, batch 10100, loss[loss=0.1959, simple_loss=0.2821, pruned_loss=0.0549, over 3324.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2817, pruned_loss=0.05404, over 862434.03 frames. ], batch size: 35, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:02:17,853 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.17 vs. limit=10.0 +2024-07-28 21:02:18,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=204282.66666666666, ans=0.125 +2024-07-28 21:02:30,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=204309.33333333334, ans=0.125 +2024-07-28 21:02:33,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=204309.33333333334, ans=0.0 +2024-07-28 21:02:40,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=204322.66666666666, ans=0.125 +2024-07-28 21:02:40,484 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.53 vs. limit=6.0 +2024-07-28 21:02:44,284 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:02:45,265 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.68 vs. limit=22.5 +2024-07-28 21:02:46,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=204336.0, ans=0.0 +2024-07-28 21:02:48,786 INFO [train.py:1114] (1/4) Epoch 15, batch 10150, loss[loss=0.2692, simple_loss=0.3346, pruned_loss=0.1019, over 3099.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2848, pruned_loss=0.0573, over 822575.01 frames. ], batch size: 35, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:02:50,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=204349.33333333334, ans=0.0 +2024-07-28 21:02:57,074 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.83 vs. limit=15.0 +2024-07-28 21:03:01,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=204376.0, ans=0.015 +2024-07-28 21:03:03,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=204376.0, ans=0.0 +2024-07-28 21:03:05,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=204376.0, ans=0.125 +2024-07-28 21:03:07,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=204376.0, ans=0.0 +2024-07-28 21:03:12,074 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.568e+01 6.640e+01 7.110e+01 7.457e+01 9.149e+01, threshold=1.422e+02, percent-clipped=0.0 +2024-07-28 21:03:16,514 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.42 vs. limit=15.0 +2024-07-28 21:03:22,475 INFO [train.py:1114] (1/4) Epoch 15, batch 10200, loss[loss=0.2397, simple_loss=0.3073, pruned_loss=0.08601, over 3432.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2882, pruned_loss=0.06113, over 789138.82 frames. ], batch size: 35, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:03:26,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=204416.0, ans=0.0 +2024-07-28 21:04:37,196 INFO [train.py:1114] (1/4) Epoch 16, batch 0, loss[loss=0.1567, simple_loss=0.2547, pruned_loss=0.02938, over 4850.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2547, pruned_loss=0.02938, over 4850.00 frames. ], batch size: 12, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:04:37,197 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 21:04:45,242 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.0873, 3.8777, 3.9642, 3.8930, 4.3691, 4.2373, 4.4221, 3.8022], + device='cuda:1') +2024-07-28 21:04:48,651 INFO [train.py:1146] (1/4) Epoch 16, validation: loss=0.1648, simple_loss=0.2693, pruned_loss=0.03017, over 944034.00 frames. +2024-07-28 21:04:48,652 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 21:04:54,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=204445.33333333334, ans=0.0 +2024-07-28 21:04:55,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=204458.66666666666, ans=0.2 +2024-07-28 21:05:13,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204485.33333333334, ans=0.1 +2024-07-28 21:05:23,359 INFO [train.py:1114] (1/4) Epoch 16, batch 50, loss[loss=0.1575, simple_loss=0.251, pruned_loss=0.032, over 4611.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2739, pruned_loss=0.04614, over 206474.77 frames. ], batch size: 11, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:05:23,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=204512.0, ans=0.125 +2024-07-28 21:05:30,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=204512.0, ans=0.0 +2024-07-28 21:05:33,290 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.55 vs. limit=15.0 +2024-07-28 21:05:36,791 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.417e+01 5.659e+01 6.518e+01 7.271e+01 1.139e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 21:05:45,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=204538.66666666666, ans=0.125 +2024-07-28 21:05:57,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.71 vs. limit=15.0 +2024-07-28 21:06:04,206 INFO [train.py:1114] (1/4) Epoch 16, batch 100, loss[loss=0.1729, simple_loss=0.2611, pruned_loss=0.04236, over 4636.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.275, pruned_loss=0.04695, over 365328.44 frames. ], batch size: 12, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:06:15,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=204592.0, ans=0.2 +2024-07-28 21:06:21,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=204605.33333333334, ans=0.125 +2024-07-28 21:06:23,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204605.33333333334, ans=0.1 +2024-07-28 21:06:36,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=204632.0, ans=0.0 +2024-07-28 21:06:39,309 INFO [train.py:1114] (1/4) Epoch 16, batch 150, loss[loss=0.1383, simple_loss=0.2238, pruned_loss=0.02639, over 4607.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2715, pruned_loss=0.04535, over 493952.81 frames. ], batch size: 11, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:06:47,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=204645.33333333334, ans=0.0 +2024-07-28 21:06:48,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204658.66666666666, ans=0.1 +2024-07-28 21:06:50,236 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 5.360e+01 5.968e+01 6.673e+01 1.001e+02, threshold=1.194e+02, percent-clipped=0.0 +2024-07-28 21:06:52,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=204658.66666666666, ans=0.125 +2024-07-28 21:06:55,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=204672.0, ans=0.125 +2024-07-28 21:07:13,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=204698.66666666666, ans=0.125 +2024-07-28 21:07:18,885 INFO [train.py:1114] (1/4) Epoch 16, batch 200, loss[loss=0.1906, simple_loss=0.281, pruned_loss=0.05012, over 4426.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2707, pruned_loss=0.04525, over 593602.05 frames. ], batch size: 21, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:07:19,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=204712.0, ans=0.0 +2024-07-28 21:07:23,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=204712.0, ans=0.125 +2024-07-28 21:07:28,543 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-07-28 21:07:33,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=204738.66666666666, ans=0.2 +2024-07-28 21:07:38,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=204752.0, ans=0.0 +2024-07-28 21:07:48,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=204765.33333333334, ans=0.125 +2024-07-28 21:07:52,282 INFO [train.py:1114] (1/4) Epoch 16, batch 250, loss[loss=0.209, simple_loss=0.3035, pruned_loss=0.05721, over 4631.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2713, pruned_loss=0.04522, over 669989.07 frames. ], batch size: 16, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:07:57,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=204778.66666666666, ans=0.125 +2024-07-28 21:07:58,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=204778.66666666666, ans=0.5 +2024-07-28 21:08:05,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=204792.0, ans=0.025 +2024-07-28 21:08:05,814 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 5.773e+01 6.705e+01 7.902e+01 1.167e+02, threshold=1.341e+02, percent-clipped=0.0 +2024-07-28 21:08:09,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=204792.0, ans=0.0 +2024-07-28 21:08:09,801 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.78 vs. limit=15.0 +2024-07-28 21:08:10,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204792.0, ans=0.1 +2024-07-28 21:08:16,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204805.33333333334, ans=0.1 +2024-07-28 21:08:20,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=204818.66666666666, ans=0.0 +2024-07-28 21:08:25,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=204832.0, ans=0.125 +2024-07-28 21:08:28,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204832.0, ans=0.1 +2024-07-28 21:08:36,621 INFO [train.py:1114] (1/4) Epoch 16, batch 300, loss[loss=0.205, simple_loss=0.305, pruned_loss=0.05247, over 4807.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2706, pruned_loss=0.04442, over 729326.67 frames. ], batch size: 15, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:08:40,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=204845.33333333334, ans=0.0 +2024-07-28 21:08:57,840 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=204885.33333333334, ans=0.125 +2024-07-28 21:09:09,772 INFO [train.py:1114] (1/4) Epoch 16, batch 350, loss[loss=0.1698, simple_loss=0.2528, pruned_loss=0.04341, over 4948.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2696, pruned_loss=0.0439, over 775557.96 frames. ], batch size: 12, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:09:12,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=204912.0, ans=0.125 +2024-07-28 21:09:17,695 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.457e+01 5.458e+01 6.054e+01 6.509e+01 1.036e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-28 21:09:32,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=204952.0, ans=0.0 +2024-07-28 21:09:44,826 INFO [train.py:1114] (1/4) Epoch 16, batch 400, loss[loss=0.1785, simple_loss=0.2745, pruned_loss=0.04124, over 4692.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.269, pruned_loss=0.0438, over 813471.37 frames. ], batch size: 13, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:09:46,029 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.31 vs. limit=8.0 +2024-07-28 21:09:46,660 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.97 vs. limit=5.0 +2024-07-28 21:09:57,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=205005.33333333334, ans=0.5 +2024-07-28 21:13:24,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=205005.33333333334, ans=0.025 +2024-07-28 21:13:45,432 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.77 vs. limit=22.5 +2024-07-28 21:13:45,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=205018.66666666666, ans=0.125 +2024-07-28 21:13:45,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=205018.66666666666, ans=0.0 +2024-07-28 21:13:51,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=205032.0, ans=0.125 +2024-07-28 21:13:51,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=205032.0, ans=0.125 +2024-07-28 21:13:55,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=205032.0, ans=0.025 +2024-07-28 21:13:59,382 INFO [train.py:1114] (1/4) Epoch 16, batch 450, loss[loss=0.1921, simple_loss=0.2999, pruned_loss=0.04212, over 4639.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2692, pruned_loss=0.044, over 839356.01 frames. ], batch size: 13, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:14:13,707 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.574e+01 5.588e+01 6.021e+01 6.553e+01 1.018e+02, threshold=1.204e+02, percent-clipped=0.0 +2024-07-28 21:14:21,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=205072.0, ans=0.125 +2024-07-28 21:14:24,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.65 vs. limit=10.0 +2024-07-28 21:14:25,741 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.06 vs. limit=6.0 +2024-07-28 21:14:39,772 INFO [train.py:1114] (1/4) Epoch 16, batch 500, loss[loss=0.2117, simple_loss=0.2852, pruned_loss=0.06909, over 4688.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2687, pruned_loss=0.04384, over 861687.60 frames. ], batch size: 15, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:14:55,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=205138.66666666666, ans=0.1 +2024-07-28 21:15:01,782 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.10 vs. limit=22.5 +2024-07-28 21:15:27,932 INFO [train.py:1114] (1/4) Epoch 16, batch 550, loss[loss=0.1894, simple_loss=0.2841, pruned_loss=0.04738, over 4622.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2684, pruned_loss=0.04371, over 877974.35 frames. ], batch size: 17, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:15:28,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=205178.66666666666, ans=0.125 +2024-07-28 21:15:34,868 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.40 vs. limit=15.0 +2024-07-28 21:15:37,688 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.508e+01 5.496e+01 6.135e+01 6.977e+01 1.008e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 21:15:37,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=205192.0, ans=0.025 +2024-07-28 21:15:42,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=205205.33333333334, ans=0.025 +2024-07-28 21:15:54,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=205218.66666666666, ans=0.125 +2024-07-28 21:16:04,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=205245.33333333334, ans=0.09899494936611666 +2024-07-28 21:16:05,155 INFO [train.py:1114] (1/4) Epoch 16, batch 600, loss[loss=0.1873, simple_loss=0.2833, pruned_loss=0.04559, over 4610.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2687, pruned_loss=0.04385, over 892340.19 frames. ], batch size: 16, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:16:15,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=205258.66666666666, ans=0.07 +2024-07-28 21:16:28,044 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.76 vs. limit=15.0 +2024-07-28 21:16:28,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=205285.33333333334, ans=0.125 +2024-07-28 21:16:30,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=205285.33333333334, ans=0.0 +2024-07-28 21:16:32,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205298.66666666666, ans=0.1 +2024-07-28 21:16:38,108 INFO [train.py:1114] (1/4) Epoch 16, batch 650, loss[loss=0.1887, simple_loss=0.2677, pruned_loss=0.05487, over 4756.00 frames. ], tot_loss[loss=0.179, simple_loss=0.269, pruned_loss=0.04449, over 904043.28 frames. ], batch size: 13, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:16:44,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=205325.33333333334, ans=0.0 +2024-07-28 21:16:46,460 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.676e+01 5.356e+01 6.014e+01 6.947e+01 8.768e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 21:16:46,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=205325.33333333334, ans=0.125 +2024-07-28 21:16:51,489 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.84 vs. limit=22.5 +2024-07-28 21:16:57,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=205338.66666666666, ans=0.125 +2024-07-28 21:17:10,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=205365.33333333334, ans=0.125 +2024-07-28 21:17:12,597 INFO [train.py:1114] (1/4) Epoch 16, batch 700, loss[loss=0.1693, simple_loss=0.2618, pruned_loss=0.03838, over 4645.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2686, pruned_loss=0.04415, over 911522.11 frames. ], batch size: 12, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:17:12,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=205378.66666666666, ans=0.2 +2024-07-28 21:17:36,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=205418.66666666666, ans=0.0 +2024-07-28 21:17:45,815 INFO [train.py:1114] (1/4) Epoch 16, batch 750, loss[loss=0.1721, simple_loss=0.2731, pruned_loss=0.03554, over 4695.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2688, pruned_loss=0.04456, over 917794.70 frames. ], batch size: 13, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:17:45,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=205445.33333333334, ans=0.125 +2024-07-28 21:17:46,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=205445.33333333334, ans=0.025 +2024-07-28 21:17:46,834 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.96 vs. limit=15.0 +2024-07-28 21:17:53,634 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.216e+01 5.543e+01 6.025e+01 6.972e+01 9.778e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 21:17:56,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=205458.66666666666, ans=0.05 +2024-07-28 21:18:00,207 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.91 vs. limit=15.0 +2024-07-28 21:18:05,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=205485.33333333334, ans=0.1 +2024-07-28 21:18:19,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=205498.66666666666, ans=0.125 +2024-07-28 21:18:22,217 INFO [train.py:1114] (1/4) Epoch 16, batch 800, loss[loss=0.1476, simple_loss=0.2276, pruned_loss=0.03379, over 4855.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2696, pruned_loss=0.04523, over 922641.75 frames. ], batch size: 12, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:18:27,281 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.00 vs. limit=10.0 +2024-07-28 21:18:30,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=205525.33333333334, ans=0.125 +2024-07-28 21:18:35,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=205525.33333333334, ans=0.0 +2024-07-28 21:18:39,355 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:18:48,571 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.46 vs. limit=15.0 +2024-07-28 21:18:59,938 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.76 vs. limit=15.0 +2024-07-28 21:19:00,044 INFO [train.py:1114] (1/4) Epoch 16, batch 850, loss[loss=0.1775, simple_loss=0.2663, pruned_loss=0.04433, over 4655.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2694, pruned_loss=0.04557, over 926792.44 frames. ], batch size: 14, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:19:07,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=205578.66666666666, ans=0.125 +2024-07-28 21:19:11,255 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.554e+01 6.346e+01 7.200e+01 1.191e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 21:19:31,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=205605.33333333334, ans=0.1 +2024-07-28 21:19:51,892 INFO [train.py:1114] (1/4) Epoch 16, batch 900, loss[loss=0.1934, simple_loss=0.2711, pruned_loss=0.05788, over 4846.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2696, pruned_loss=0.04539, over 927957.06 frames. ], batch size: 12, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:20:08,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=205658.66666666666, ans=0.125 +2024-07-28 21:20:22,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205685.33333333334, ans=0.1 +2024-07-28 21:20:22,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=205685.33333333334, ans=0.0 +2024-07-28 21:20:28,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=205698.66666666666, ans=0.2 +2024-07-28 21:20:34,590 INFO [train.py:1114] (1/4) Epoch 16, batch 950, loss[loss=0.1289, simple_loss=0.2209, pruned_loss=0.01851, over 4787.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2692, pruned_loss=0.04506, over 929611.20 frames. ], batch size: 12, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:20:39,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=205712.0, ans=0.125 +2024-07-28 21:20:40,160 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=205712.0, ans=0.125 +2024-07-28 21:20:42,594 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.753e+01 5.442e+01 5.900e+01 6.572e+01 1.088e+02, threshold=1.180e+02, percent-clipped=0.0 +2024-07-28 21:20:50,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=205738.66666666666, ans=0.125 +2024-07-28 21:20:51,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=205738.66666666666, ans=0.0 +2024-07-28 21:20:55,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205752.0, ans=0.1 +2024-07-28 21:20:58,358 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.54 vs. limit=12.0 +2024-07-28 21:21:03,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=205765.33333333334, ans=0.025 +2024-07-28 21:21:05,088 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.29 vs. limit=12.0 +2024-07-28 21:21:07,912 INFO [train.py:1114] (1/4) Epoch 16, batch 1000, loss[loss=0.172, simple_loss=0.2592, pruned_loss=0.0424, over 4965.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2698, pruned_loss=0.04526, over 929815.63 frames. ], batch size: 13, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:21:12,281 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.45 vs. limit=15.0 +2024-07-28 21:21:13,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=205778.66666666666, ans=0.125 +2024-07-28 21:21:14,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=205778.66666666666, ans=0.2 +2024-07-28 21:21:22,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=205805.33333333334, ans=0.025 +2024-07-28 21:21:23,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=205805.33333333334, ans=0.125 +2024-07-28 21:21:29,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=205805.33333333334, ans=0.025 +2024-07-28 21:21:51,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=205832.0, ans=0.2 +2024-07-28 21:21:57,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=205832.0, ans=0.125 +2024-07-28 21:22:05,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=205832.0, ans=0.125 +2024-07-28 21:22:08,533 INFO [train.py:1114] (1/4) Epoch 16, batch 1050, loss[loss=0.2002, simple_loss=0.2964, pruned_loss=0.05198, over 4871.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2688, pruned_loss=0.0448, over 932107.56 frames. ], batch size: 14, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:22:08,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=205845.33333333334, ans=0.125 +2024-07-28 21:22:14,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=205845.33333333334, ans=0.125 +2024-07-28 21:22:19,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=205858.66666666666, ans=0.2 +2024-07-28 21:22:20,839 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.413e+01 5.557e+01 6.013e+01 7.001e+01 9.107e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 21:22:30,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=205872.0, ans=10.0 +2024-07-28 21:22:33,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=205872.0, ans=0.0 +2024-07-28 21:22:37,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=205885.33333333334, ans=0.125 +2024-07-28 21:24:11,057 INFO [train.py:1114] (1/4) Epoch 16, batch 1100, loss[loss=0.1547, simple_loss=0.2526, pruned_loss=0.02838, over 4891.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2683, pruned_loss=0.04455, over 934873.66 frames. ], batch size: 13, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:24:15,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=205912.0, ans=0.125 +2024-07-28 21:24:16,570 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.74 vs. limit=15.0 +2024-07-28 21:24:23,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=205925.33333333334, ans=0.125 +2024-07-28 21:24:51,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=205952.0, ans=0.1 +2024-07-28 21:24:59,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.whiten.whitening_limit, batch_count=205965.33333333334, ans=12.0 +2024-07-28 21:25:48,842 INFO [train.py:1114] (1/4) Epoch 16, batch 1150, loss[loss=0.1781, simple_loss=0.2623, pruned_loss=0.04699, over 4896.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2679, pruned_loss=0.04398, over 935322.74 frames. ], batch size: 13, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:29:10,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=205978.66666666666, ans=0.2 +2024-07-28 21:29:59,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=205992.0, ans=0.125 +2024-07-28 21:30:29,616 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+01 5.518e+01 6.042e+01 7.033e+01 1.072e+02, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 21:30:29,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=205992.0, ans=0.025 +2024-07-28 21:30:30,744 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.14 vs. limit=6.0 +2024-07-28 21:31:12,336 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.59 vs. limit=10.0 +2024-07-28 21:34:06,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=206018.66666666666, ans=0.0 +2024-07-28 21:34:07,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=206018.66666666666, ans=0.125 +2024-07-28 21:34:38,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=206018.66666666666, ans=0.0 +2024-07-28 21:34:40,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=206018.66666666666, ans=0.0 +2024-07-28 21:35:15,734 INFO [train.py:1114] (1/4) Epoch 16, batch 1200, loss[loss=0.1878, simple_loss=0.2869, pruned_loss=0.04434, over 4874.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2682, pruned_loss=0.0441, over 934146.03 frames. ], batch size: 14, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:35:22,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=206058.66666666666, ans=0.125 +2024-07-28 21:35:38,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=206058.66666666666, ans=0.125 +2024-07-28 21:35:38,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206058.66666666666, ans=0.1 +2024-07-28 21:38:19,714 INFO [train.py:1114] (1/4) Epoch 16, batch 1250, loss[loss=0.1924, simple_loss=0.2746, pruned_loss=0.05507, over 4813.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2685, pruned_loss=0.04403, over 938114.61 frames. ], batch size: 15, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:38:33,495 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.93 vs. limit=15.0 +2024-07-28 21:38:46,735 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.375e+01 5.568e+01 5.937e+01 6.680e+01 9.097e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-28 21:38:55,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=206125.33333333334, ans=0.025 +2024-07-28 21:39:01,201 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.70 vs. limit=15.0 +2024-07-28 21:39:05,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=206138.66666666666, ans=0.05 +2024-07-28 21:39:15,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=206152.0, ans=0.0 +2024-07-28 21:39:15,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=206152.0, ans=0.125 +2024-07-28 21:39:15,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=206152.0, ans=0.95 +2024-07-28 21:39:17,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=206152.0, ans=0.0 +2024-07-28 21:39:20,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=206152.0, ans=0.125 +2024-07-28 21:39:33,230 INFO [train.py:1114] (1/4) Epoch 16, batch 1300, loss[loss=0.1858, simple_loss=0.2812, pruned_loss=0.04517, over 4658.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2676, pruned_loss=0.04365, over 939232.17 frames. ], batch size: 19, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:39:36,375 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.34 vs. limit=5.0 +2024-07-28 21:39:37,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=206178.66666666666, ans=0.0 +2024-07-28 21:39:46,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=206192.0, ans=0.1 +2024-07-28 21:39:58,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=206205.33333333334, ans=0.125 +2024-07-28 21:39:59,349 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=206205.33333333334, ans=0.125 +2024-07-28 21:40:08,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=206232.0, ans=0.09899494936611666 +2024-07-28 21:40:15,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=206232.0, ans=0.125 +2024-07-28 21:40:17,466 INFO [train.py:1114] (1/4) Epoch 16, batch 1350, loss[loss=0.1697, simple_loss=0.2589, pruned_loss=0.04027, over 4755.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2667, pruned_loss=0.04303, over 941072.74 frames. ], batch size: 13, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:40:26,347 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.922e+01 5.660e+01 6.386e+01 7.583e+01 1.369e+02, threshold=1.277e+02, percent-clipped=2.0 +2024-07-28 21:40:43,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.16 vs. limit=15.0 +2024-07-28 21:40:51,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=206285.33333333334, ans=0.0 +2024-07-28 21:40:51,974 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.42 vs. limit=22.5 +2024-07-28 21:41:32,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=206312.0, ans=0.125 +2024-07-28 21:41:32,872 INFO [train.py:1114] (1/4) Epoch 16, batch 1400, loss[loss=0.1571, simple_loss=0.2489, pruned_loss=0.0326, over 4707.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2664, pruned_loss=0.04278, over 942753.42 frames. ], batch size: 11, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:42:00,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=206325.33333333334, ans=0.2 +2024-07-28 21:42:19,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=206338.66666666666, ans=0.0 +2024-07-28 21:42:23,455 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.09 vs. limit=15.0 +2024-07-28 21:42:24,940 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.22 vs. limit=15.0 +2024-07-28 21:42:36,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=206352.0, ans=0.09899494936611666 +2024-07-28 21:45:05,378 INFO [train.py:1114] (1/4) Epoch 16, batch 1450, loss[loss=0.2048, simple_loss=0.2963, pruned_loss=0.05665, over 4682.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2674, pruned_loss=0.04289, over 942692.82 frames. ], batch size: 15, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:45:23,232 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.652e+01 5.557e+01 6.212e+01 6.784e+01 1.021e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 21:45:24,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=206392.0, ans=0.025 +2024-07-28 21:45:30,612 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.23 vs. limit=15.0 +2024-07-28 21:45:36,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=206392.0, ans=0.0 +2024-07-28 21:46:29,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=206405.33333333334, ans=0.2 +2024-07-28 21:46:44,897 INFO [train.py:1114] (1/4) Epoch 16, batch 1500, loss[loss=0.1985, simple_loss=0.2952, pruned_loss=0.0509, over 4810.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2673, pruned_loss=0.04271, over 942427.43 frames. ], batch size: 14, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:46:47,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=206445.33333333334, ans=0.125 +2024-07-28 21:47:19,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=206485.33333333334, ans=0.025 +2024-07-28 21:47:31,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=206485.33333333334, ans=0.0 +2024-07-28 21:47:43,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=206498.66666666666, ans=0.125 +2024-07-28 21:47:44,836 INFO [train.py:1114] (1/4) Epoch 16, batch 1550, loss[loss=0.2187, simple_loss=0.3106, pruned_loss=0.06339, over 4899.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2675, pruned_loss=0.04317, over 939270.89 frames. ], batch size: 15, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:48:20,504 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.523e+01 5.574e+01 6.317e+01 7.056e+01 9.850e+01, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 21:49:10,278 INFO [train.py:1114] (1/4) Epoch 16, batch 1600, loss[loss=0.1959, simple_loss=0.2906, pruned_loss=0.05066, over 4876.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2685, pruned_loss=0.04409, over 937161.89 frames. ], batch size: 14, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:49:14,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=206578.66666666666, ans=0.0 +2024-07-28 21:49:14,937 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.89 vs. limit=15.0 +2024-07-28 21:49:21,753 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.26 vs. limit=10.0 +2024-07-28 21:49:34,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=206605.33333333334, ans=0.125 +2024-07-28 21:50:27,681 INFO [train.py:1114] (1/4) Epoch 16, batch 1650, loss[loss=0.1944, simple_loss=0.2924, pruned_loss=0.04819, over 4661.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2689, pruned_loss=0.04464, over 937145.40 frames. ], batch size: 14, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:50:29,224 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:50:32,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=206645.33333333334, ans=0.125 +2024-07-28 21:50:35,725 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.19 vs. limit=10.0 +2024-07-28 21:50:36,683 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.778e+01 5.640e+01 6.319e+01 7.228e+01 1.155e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 21:50:39,218 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.92 vs. limit=12.0 +2024-07-28 21:50:39,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=206658.66666666666, ans=0.1 +2024-07-28 21:50:41,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=206672.0, ans=0.0 +2024-07-28 21:50:43,166 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.21 vs. limit=15.0 +2024-07-28 21:50:47,126 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.56 vs. limit=6.0 +2024-07-28 21:50:48,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=206685.33333333334, ans=0.2 +2024-07-28 21:50:52,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=206685.33333333334, ans=0.125 +2024-07-28 21:51:08,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=206698.66666666666, ans=0.2 +2024-07-28 21:51:10,362 INFO [train.py:1114] (1/4) Epoch 16, batch 1700, loss[loss=0.1458, simple_loss=0.2344, pruned_loss=0.02857, over 4708.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2684, pruned_loss=0.04428, over 939022.16 frames. ], batch size: 11, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:51:15,898 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.94 vs. limit=22.5 +2024-07-28 21:51:21,590 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.23 vs. limit=15.0 +2024-07-28 21:51:39,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=206738.66666666666, ans=0.2 +2024-07-28 21:52:37,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=206752.0, ans=0.0 +2024-07-28 21:52:56,296 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.31 vs. limit=12.0 +2024-07-28 21:52:59,249 INFO [train.py:1114] (1/4) Epoch 16, batch 1750, loss[loss=0.1683, simple_loss=0.2526, pruned_loss=0.04193, over 4800.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2676, pruned_loss=0.04387, over 939872.09 frames. ], batch size: 11, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:53:02,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=206778.66666666666, ans=0.125 +2024-07-28 21:53:09,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=206792.0, ans=0.0 +2024-07-28 21:53:09,496 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.21 vs. limit=6.0 +2024-07-28 21:53:09,574 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.365e+01 5.795e+01 6.698e+01 8.081e+01 1.290e+02, threshold=1.340e+02, percent-clipped=1.0 +2024-07-28 21:53:09,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=206792.0, ans=0.125 +2024-07-28 21:53:10,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=206792.0, ans=0.025 +2024-07-28 21:53:17,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=206805.33333333334, ans=0.125 +2024-07-28 21:53:38,141 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.81 vs. limit=15.0 +2024-07-28 21:53:44,225 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.29 vs. limit=15.0 +2024-07-28 21:53:44,569 INFO [train.py:1114] (1/4) Epoch 16, batch 1800, loss[loss=0.1732, simple_loss=0.271, pruned_loss=0.03768, over 4631.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2686, pruned_loss=0.04413, over 940510.17 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:53:59,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=206872.0, ans=0.0 +2024-07-28 21:54:06,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=206885.33333333334, ans=0.1 +2024-07-28 21:54:21,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=206898.66666666666, ans=0.125 +2024-07-28 21:54:23,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=206898.66666666666, ans=0.125 +2024-07-28 21:54:24,980 INFO [train.py:1114] (1/4) Epoch 16, batch 1850, loss[loss=0.1642, simple_loss=0.2597, pruned_loss=0.03435, over 4812.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2679, pruned_loss=0.04351, over 940441.51 frames. ], batch size: 14, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:54:31,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206912.0, ans=0.1 +2024-07-28 21:54:37,013 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.341e+01 5.631e+01 6.106e+01 7.258e+01 1.128e+02, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 21:54:41,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=206925.33333333334, ans=0.125 +2024-07-28 21:55:04,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=206952.0, ans=0.04949747468305833 +2024-07-28 21:55:06,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=206965.33333333334, ans=0.0 +2024-07-28 21:55:13,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=206965.33333333334, ans=0.0 +2024-07-28 21:55:14,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=206965.33333333334, ans=0.125 +2024-07-28 21:55:14,783 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.56 vs. limit=22.5 +2024-07-28 21:55:17,726 INFO [train.py:1114] (1/4) Epoch 16, batch 1900, loss[loss=0.1813, simple_loss=0.2776, pruned_loss=0.04253, over 4660.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2678, pruned_loss=0.04346, over 941566.50 frames. ], batch size: 14, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:55:27,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=206992.0, ans=0.0 +2024-07-28 21:55:36,436 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.21 vs. limit=15.0 +2024-07-28 21:55:49,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=207018.66666666666, ans=0.0 +2024-07-28 21:55:58,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=207018.66666666666, ans=0.05 +2024-07-28 21:56:06,884 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.56 vs. limit=15.0 +2024-07-28 21:56:10,894 INFO [train.py:1114] (1/4) Epoch 16, batch 1950, loss[loss=0.14, simple_loss=0.2317, pruned_loss=0.02413, over 4906.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.269, pruned_loss=0.04386, over 943619.08 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:56:11,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=207045.33333333334, ans=0.2 +2024-07-28 21:56:29,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=207058.66666666666, ans=0.125 +2024-07-28 21:56:30,865 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.457e+01 5.561e+01 6.255e+01 6.715e+01 9.914e+01, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 21:56:34,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=207058.66666666666, ans=0.2 +2024-07-28 21:56:45,283 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=207072.0, ans=0.1 +2024-07-28 21:56:48,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=207072.0, ans=0.0 +2024-07-28 21:56:49,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=207072.0, ans=0.0 +2024-07-28 21:56:53,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=207085.33333333334, ans=0.1 +2024-07-28 21:56:53,726 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=207085.33333333334, ans=0.125 +2024-07-28 21:56:58,656 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.29 vs. limit=22.5 +2024-07-28 21:56:59,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=207098.66666666666, ans=0.125 +2024-07-28 21:57:00,863 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.43 vs. limit=15.0 +2024-07-28 21:57:05,248 INFO [train.py:1114] (1/4) Epoch 16, batch 2000, loss[loss=0.1853, simple_loss=0.2552, pruned_loss=0.05772, over 4800.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2698, pruned_loss=0.04433, over 940317.77 frames. ], batch size: 11, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:57:22,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=207125.33333333334, ans=0.1 +2024-07-28 21:57:25,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=207125.33333333334, ans=0.2 +2024-07-28 21:57:40,110 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.07 vs. limit=15.0 +2024-07-28 21:57:59,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=207165.33333333334, ans=0.125 +2024-07-28 21:58:13,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=207165.33333333334, ans=0.0 +2024-07-28 21:58:20,237 INFO [train.py:1114] (1/4) Epoch 16, batch 2050, loss[loss=0.1658, simple_loss=0.2525, pruned_loss=0.03958, over 4612.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2684, pruned_loss=0.04389, over 938958.26 frames. ], batch size: 11, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:58:23,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=207178.66666666666, ans=0.95 +2024-07-28 21:58:36,910 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.500e+01 5.615e+01 6.198e+01 7.046e+01 1.043e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 21:59:17,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=207232.0, ans=0.025 +2024-07-28 21:59:20,887 INFO [train.py:1114] (1/4) Epoch 16, batch 2100, loss[loss=0.1738, simple_loss=0.2581, pruned_loss=0.04475, over 4765.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2679, pruned_loss=0.04359, over 940757.58 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:59:22,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=207245.33333333334, ans=0.1 +2024-07-28 21:59:44,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=207258.66666666666, ans=0.125 +2024-07-28 21:59:49,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=207272.0, ans=0.125 +2024-07-28 22:00:21,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=207298.66666666666, ans=0.1 +2024-07-28 22:01:09,049 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.11 vs. limit=22.5 +2024-07-28 22:01:09,923 INFO [train.py:1114] (1/4) Epoch 16, batch 2150, loss[loss=0.1707, simple_loss=0.2703, pruned_loss=0.03554, over 4897.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2681, pruned_loss=0.04396, over 944044.84 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:01:56,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=207325.33333333334, ans=0.125 +2024-07-28 22:01:58,055 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.517e+01 5.463e+01 6.183e+01 7.182e+01 9.894e+01, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 22:02:23,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=207338.66666666666, ans=0.0 +2024-07-28 22:02:31,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=207352.0, ans=0.125 +2024-07-28 22:02:51,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=207365.33333333334, ans=0.0 +2024-07-28 22:02:53,139 INFO [train.py:1114] (1/4) Epoch 16, batch 2200, loss[loss=0.1863, simple_loss=0.2796, pruned_loss=0.04647, over 4820.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2678, pruned_loss=0.04362, over 943463.14 frames. ], batch size: 14, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:03:09,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=207392.0, ans=0.125 +2024-07-28 22:03:09,998 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.66 vs. limit=15.0 +2024-07-28 22:03:11,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=207392.0, ans=0.0 +2024-07-28 22:03:11,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=207392.0, ans=0.2 +2024-07-28 22:03:47,378 INFO [train.py:1114] (1/4) Epoch 16, batch 2250, loss[loss=0.1783, simple_loss=0.2696, pruned_loss=0.04347, over 4698.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2675, pruned_loss=0.0437, over 942414.52 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:03:57,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=207458.66666666666, ans=0.0 +2024-07-28 22:03:58,321 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.399e+01 5.527e+01 6.028e+01 7.010e+01 1.004e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 22:04:01,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=207458.66666666666, ans=0.1 +2024-07-28 22:04:07,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=207472.0, ans=0.025 +2024-07-28 22:04:13,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=207472.0, ans=0.125 +2024-07-28 22:04:36,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=207498.66666666666, ans=0.04949747468305833 +2024-07-28 22:04:53,007 INFO [train.py:1114] (1/4) Epoch 16, batch 2300, loss[loss=0.1536, simple_loss=0.2355, pruned_loss=0.03588, over 4947.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2665, pruned_loss=0.04349, over 939588.64 frames. ], batch size: 12, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:05:05,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=207512.0, ans=0.125 +2024-07-28 22:05:13,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=207525.33333333334, ans=6.0 +2024-07-28 22:05:15,778 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-28 22:05:31,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=207538.66666666666, ans=0.125 +2024-07-28 22:05:46,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=207552.0, ans=0.025 +2024-07-28 22:05:57,165 INFO [train.py:1114] (1/4) Epoch 16, batch 2350, loss[loss=0.1643, simple_loss=0.2626, pruned_loss=0.03299, over 4642.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2668, pruned_loss=0.04378, over 941869.62 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:06:12,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=207592.0, ans=0.125 +2024-07-28 22:06:13,018 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.426e+01 5.459e+01 6.024e+01 6.952e+01 8.823e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 22:06:15,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=207592.0, ans=0.125 +2024-07-28 22:06:23,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=207605.33333333334, ans=0.0 +2024-07-28 22:06:29,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=207618.66666666666, ans=0.0 +2024-07-28 22:06:37,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=207632.0, ans=0.125 +2024-07-28 22:06:45,790 INFO [train.py:1114] (1/4) Epoch 16, batch 2400, loss[loss=0.1802, simple_loss=0.2843, pruned_loss=0.03809, over 4638.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2674, pruned_loss=0.04394, over 941569.35 frames. ], batch size: 12, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:06:47,250 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:06:56,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=207658.66666666666, ans=0.125 +2024-07-28 22:07:04,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=207672.0, ans=0.125 +2024-07-28 22:07:17,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=207698.66666666666, ans=0.125 +2024-07-28 22:07:33,016 INFO [train.py:1114] (1/4) Epoch 16, batch 2450, loss[loss=0.146, simple_loss=0.2461, pruned_loss=0.0229, over 4696.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.268, pruned_loss=0.04417, over 937682.02 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:07:37,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=207712.0, ans=0.125 +2024-07-28 22:07:41,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.34 vs. limit=22.5 +2024-07-28 22:07:45,691 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.532e+01 5.574e+01 6.192e+01 6.939e+01 1.187e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 22:07:54,378 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:07:55,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=207738.66666666666, ans=0.015 +2024-07-28 22:08:24,489 INFO [train.py:1114] (1/4) Epoch 16, batch 2500, loss[loss=0.1882, simple_loss=0.2838, pruned_loss=0.04628, over 4801.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2671, pruned_loss=0.04399, over 939571.01 frames. ], batch size: 14, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:08:39,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=207792.0, ans=0.0 +2024-07-28 22:08:44,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=207792.0, ans=0.2 +2024-07-28 22:08:51,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=207792.0, ans=0.125 +2024-07-28 22:08:57,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=207805.33333333334, ans=0.125 +2024-07-28 22:09:10,106 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.59 vs. limit=15.0 +2024-07-28 22:09:23,877 INFO [train.py:1114] (1/4) Epoch 16, batch 2550, loss[loss=0.1483, simple_loss=0.2346, pruned_loss=0.03101, over 4810.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2676, pruned_loss=0.04396, over 938837.10 frames. ], batch size: 11, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:09:38,982 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.300e+01 5.535e+01 6.272e+01 7.311e+01 1.144e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 22:09:54,394 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=207858.66666666666, ans=0.1 +2024-07-28 22:10:40,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=207872.0, ans=0.125 +2024-07-28 22:11:01,892 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.49 vs. limit=15.0 +2024-07-28 22:11:49,810 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.81 vs. limit=15.0 +2024-07-28 22:11:55,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=207885.33333333334, ans=0.125 +2024-07-28 22:13:04,328 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.34 vs. limit=15.0 +2024-07-28 22:13:06,685 INFO [train.py:1114] (1/4) Epoch 16, batch 2600, loss[loss=0.1654, simple_loss=0.2545, pruned_loss=0.03816, over 4892.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2681, pruned_loss=0.04392, over 938355.16 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:13:45,112 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.53 vs. limit=15.0 +2024-07-28 22:13:45,792 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.93 vs. limit=15.0 +2024-07-28 22:13:47,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=207912.0, ans=0.0 +2024-07-28 22:13:51,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207925.33333333334, ans=0.1 +2024-07-28 22:14:02,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=207938.66666666666, ans=0.2 +2024-07-28 22:15:00,478 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:15:02,479 INFO [train.py:1114] (1/4) Epoch 16, batch 2650, loss[loss=0.1769, simple_loss=0.262, pruned_loss=0.04587, over 4639.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2688, pruned_loss=0.04426, over 940698.91 frames. ], batch size: 16, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:15:04,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=207978.66666666666, ans=0.1 +2024-07-28 22:15:20,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207992.0, ans=0.1 +2024-07-28 22:15:41,444 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.682e+01 6.199e+01 7.227e+01 9.483e+01, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 22:27:28,517 INFO [train.py:1114] (1/4) Epoch 16, batch 2700, loss[loss=0.1948, simple_loss=0.2918, pruned_loss=0.04886, over 4742.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2697, pruned_loss=0.0447, over 940285.87 frames. ], batch size: 14, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:27:38,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=208058.66666666666, ans=0.125 +2024-07-28 22:27:45,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=208072.0, ans=0.125 +2024-07-28 22:30:44,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=208112.0, ans=0.09899494936611666 +2024-07-28 22:30:45,955 INFO [train.py:1114] (1/4) Epoch 16, batch 2750, loss[loss=0.162, simple_loss=0.2463, pruned_loss=0.0388, over 4708.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2676, pruned_loss=0.04406, over 940466.15 frames. ], batch size: 12, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:30:48,644 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.89 vs. limit=15.0 +2024-07-28 22:30:57,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=208112.0, ans=0.1 +2024-07-28 22:31:02,417 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.384e+01 5.637e+01 6.771e+01 7.935e+01 1.190e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-28 22:31:09,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=208125.33333333334, ans=0.125 +2024-07-28 22:31:13,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=208125.33333333334, ans=0.1 +2024-07-28 22:31:39,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=208152.0, ans=0.2 +2024-07-28 22:31:47,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=208152.0, ans=0.2 +2024-07-28 22:31:54,096 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:31:57,214 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.77 vs. limit=15.0 +2024-07-28 22:31:58,120 INFO [train.py:1114] (1/4) Epoch 16, batch 2800, loss[loss=0.2209, simple_loss=0.3003, pruned_loss=0.07069, over 3432.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2684, pruned_loss=0.04421, over 938620.10 frames. ], batch size: 35, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:31:58,522 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.65 vs. limit=6.0 +2024-07-28 22:31:58,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=208178.66666666666, ans=0.2 +2024-07-28 22:32:16,218 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.04 vs. limit=22.5 +2024-07-28 22:32:24,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=208205.33333333334, ans=0.125 +2024-07-28 22:32:38,582 INFO [train.py:1114] (1/4) Epoch 16, batch 2850, loss[loss=0.1685, simple_loss=0.2527, pruned_loss=0.04219, over 4975.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2694, pruned_loss=0.04478, over 936542.70 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:32:45,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=208258.66666666666, ans=0.1 +2024-07-28 22:32:47,409 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.515e+01 5.805e+01 6.352e+01 7.417e+01 1.040e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 22:33:29,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=208298.66666666666, ans=0.0 +2024-07-28 22:33:35,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=208312.0, ans=0.125 +2024-07-28 22:33:36,454 INFO [train.py:1114] (1/4) Epoch 16, batch 2900, loss[loss=0.1994, simple_loss=0.2791, pruned_loss=0.05988, over 4831.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2711, pruned_loss=0.04465, over 940270.68 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:33:57,139 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.57 vs. limit=15.0 +2024-07-28 22:34:02,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=208338.66666666666, ans=0.125 +2024-07-28 22:34:10,345 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.14 vs. limit=15.0 +2024-07-28 22:34:15,312 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.75 vs. limit=12.0 +2024-07-28 22:34:22,070 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.28 vs. limit=15.0 +2024-07-28 22:34:26,653 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.19 vs. limit=15.0 +2024-07-28 22:34:36,446 INFO [train.py:1114] (1/4) Epoch 16, batch 2950, loss[loss=0.168, simple_loss=0.2594, pruned_loss=0.03829, over 4706.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2693, pruned_loss=0.04455, over 939138.96 frames. ], batch size: 12, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:34:44,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=208392.0, ans=0.025 +2024-07-28 22:34:46,788 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.287e+01 5.436e+01 5.951e+01 6.814e+01 8.870e+01, threshold=1.190e+02, percent-clipped=0.0 +2024-07-28 22:34:54,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=208405.33333333334, ans=0.07 +2024-07-28 22:34:59,938 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:35:07,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=208418.66666666666, ans=0.125 +2024-07-28 22:35:09,405 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.15 vs. limit=15.0 +2024-07-28 22:35:15,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=208432.0, ans=0.125 +2024-07-28 22:35:19,625 INFO [train.py:1114] (1/4) Epoch 16, batch 3000, loss[loss=0.2174, simple_loss=0.3014, pruned_loss=0.06672, over 4753.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2695, pruned_loss=0.04476, over 938430.05 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:35:19,626 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 22:37:08,209 INFO [train.py:1146] (1/4) Epoch 16, validation: loss=0.1628, simple_loss=0.2657, pruned_loss=0.02996, over 944034.00 frames. +2024-07-28 22:37:08,210 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 22:37:08,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=208445.33333333334, ans=0.0 +2024-07-28 22:38:09,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=208472.0, ans=0.125 +2024-07-28 22:38:10,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=208472.0, ans=0.0 +2024-07-28 22:38:11,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=208472.0, ans=0.125 +2024-07-28 22:38:34,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=208498.66666666666, ans=0.125 +2024-07-28 22:38:37,059 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:38:40,421 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.67 vs. limit=15.0 +2024-07-28 22:38:42,176 INFO [train.py:1114] (1/4) Epoch 16, batch 3050, loss[loss=0.1639, simple_loss=0.2479, pruned_loss=0.03997, over 4638.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2701, pruned_loss=0.04539, over 937447.40 frames. ], batch size: 12, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:38:51,725 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.977e+01 5.726e+01 6.358e+01 7.092e+01 1.092e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 22:39:08,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=208552.0, ans=0.0 +2024-07-28 22:39:11,609 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.67 vs. limit=15.0 +2024-07-28 22:39:40,200 INFO [train.py:1114] (1/4) Epoch 16, batch 3100, loss[loss=0.1894, simple_loss=0.2727, pruned_loss=0.05302, over 4638.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.27, pruned_loss=0.04519, over 937975.07 frames. ], batch size: 16, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:40:04,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=208592.0, ans=0.125 +2024-07-28 22:40:29,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=208605.33333333334, ans=0.125 +2024-07-28 22:40:56,499 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.07 vs. limit=22.5 +2024-07-28 22:41:02,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=208618.66666666666, ans=0.0 +2024-07-28 22:41:13,448 INFO [train.py:1114] (1/4) Epoch 16, batch 3150, loss[loss=0.1857, simple_loss=0.2876, pruned_loss=0.04189, over 4646.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2688, pruned_loss=0.04431, over 937705.93 frames. ], batch size: 17, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:41:13,825 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.77 vs. limit=15.0 +2024-07-28 22:41:15,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.50 vs. limit=12.0 +2024-07-28 22:41:26,590 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.01 vs. limit=22.5 +2024-07-28 22:41:29,374 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.743e+01 5.555e+01 6.673e+01 7.571e+01 1.321e+02, threshold=1.335e+02, percent-clipped=1.0 +2024-07-28 22:41:29,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=208658.66666666666, ans=0.0 +2024-07-28 22:41:35,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=208672.0, ans=0.0 +2024-07-28 22:42:12,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=208698.66666666666, ans=0.125 +2024-07-28 22:42:14,164 INFO [train.py:1114] (1/4) Epoch 16, batch 3200, loss[loss=0.1713, simple_loss=0.2521, pruned_loss=0.04523, over 4825.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2691, pruned_loss=0.0445, over 939238.34 frames. ], batch size: 13, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:42:26,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=208725.33333333334, ans=0.0 +2024-07-28 22:42:28,261 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=208738.66666666666, ans=0.5 +2024-07-28 22:42:37,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=208738.66666666666, ans=0.1 +2024-07-28 22:43:03,061 INFO [train.py:1114] (1/4) Epoch 16, batch 3250, loss[loss=0.2062, simple_loss=0.3072, pruned_loss=0.0526, over 4928.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2697, pruned_loss=0.04436, over 940394.89 frames. ], batch size: 14, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:43:07,995 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:43:13,632 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.572e+01 5.431e+01 6.056e+01 6.661e+01 1.204e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-28 22:43:13,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=208792.0, ans=0.05 +2024-07-28 22:43:27,169 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.60 vs. limit=15.0 +2024-07-28 22:43:30,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=208805.33333333334, ans=0.05 +2024-07-28 22:43:40,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=208818.66666666666, ans=0.125 +2024-07-28 22:43:45,815 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.30 vs. limit=12.0 +2024-07-28 22:44:04,201 INFO [train.py:1114] (1/4) Epoch 16, batch 3300, loss[loss=0.1977, simple_loss=0.2922, pruned_loss=0.0516, over 4692.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2687, pruned_loss=0.04449, over 940359.71 frames. ], batch size: 19, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:44:15,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=208858.66666666666, ans=0.125 +2024-07-28 22:44:34,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=208872.0, ans=0.125 +2024-07-28 22:45:00,563 INFO [train.py:1114] (1/4) Epoch 16, batch 3350, loss[loss=0.2027, simple_loss=0.286, pruned_loss=0.05968, over 4667.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2697, pruned_loss=0.04484, over 938638.90 frames. ], batch size: 17, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:45:11,078 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.370e+01 5.569e+01 6.115e+01 6.727e+01 9.175e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 22:45:16,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=208938.66666666666, ans=10.0 +2024-07-28 22:45:41,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=208952.0, ans=0.0 +2024-07-28 22:46:00,442 INFO [train.py:1114] (1/4) Epoch 16, batch 3400, loss[loss=0.1579, simple_loss=0.2454, pruned_loss=0.03519, over 4819.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2702, pruned_loss=0.04532, over 937335.93 frames. ], batch size: 11, lr: 4.69e-03, grad_scale: 64.0 +2024-07-28 22:46:26,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=208992.0, ans=0.0 +2024-07-28 22:46:51,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=209018.66666666666, ans=0.2 +2024-07-28 22:46:54,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209018.66666666666, ans=0.1 +2024-07-28 22:46:59,960 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:47:04,414 INFO [train.py:1114] (1/4) Epoch 16, batch 3450, loss[loss=0.2031, simple_loss=0.2918, pruned_loss=0.05723, over 4719.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2696, pruned_loss=0.04486, over 938227.41 frames. ], batch size: 19, lr: 4.69e-03, grad_scale: 64.0 +2024-07-28 22:47:11,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=209058.66666666666, ans=0.1 +2024-07-28 22:47:12,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.98 vs. limit=12.0 +2024-07-28 22:47:12,846 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.338e+01 5.483e+01 6.084e+01 6.778e+01 9.605e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-28 22:47:40,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=209072.0, ans=0.2 +2024-07-28 22:47:41,800 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.22 vs. limit=15.0 +2024-07-28 22:47:59,620 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-28 22:48:20,549 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.34 vs. limit=15.0 +2024-07-28 22:48:59,082 INFO [train.py:1114] (1/4) Epoch 16, batch 3500, loss[loss=0.1556, simple_loss=0.2468, pruned_loss=0.03223, over 4941.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2688, pruned_loss=0.04457, over 938382.86 frames. ], batch size: 12, lr: 4.69e-03, grad_scale: 64.0 +2024-07-28 22:48:59,518 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.46 vs. limit=15.0 +2024-07-28 22:49:01,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=209112.0, ans=0.0 +2024-07-28 22:49:10,022 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.58 vs. limit=15.0 +2024-07-28 22:49:13,383 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.36 vs. limit=22.5 +2024-07-28 22:49:28,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=209165.33333333334, ans=0.0 +2024-07-28 22:49:32,614 INFO [train.py:1114] (1/4) Epoch 16, batch 3550, loss[loss=0.1862, simple_loss=0.2751, pruned_loss=0.04864, over 4657.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2678, pruned_loss=0.04407, over 938865.83 frames. ], batch size: 14, lr: 4.69e-03, grad_scale: 64.0 +2024-07-28 22:49:33,074 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.33 vs. limit=10.0 +2024-07-28 22:49:41,600 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.365e+01 5.691e+01 6.213e+01 7.399e+01 9.936e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 22:49:44,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=209192.0, ans=10.0 +2024-07-28 22:49:56,737 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:50:02,956 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.72 vs. limit=10.0 +2024-07-28 22:50:06,328 INFO [train.py:1114] (1/4) Epoch 16, batch 3600, loss[loss=0.213, simple_loss=0.2852, pruned_loss=0.07036, over 4966.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2686, pruned_loss=0.04455, over 940671.16 frames. ], batch size: 13, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:50:08,702 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.47 vs. limit=15.0 +2024-07-28 22:50:09,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=209245.33333333334, ans=0.125 +2024-07-28 22:50:11,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=209245.33333333334, ans=0.125 +2024-07-28 22:50:16,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=209258.66666666666, ans=0.125 +2024-07-28 22:50:17,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=209258.66666666666, ans=0.0 +2024-07-28 22:50:31,643 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:50:31,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=209285.33333333334, ans=0.025 +2024-07-28 22:50:39,007 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.15 vs. limit=22.5 +2024-07-28 22:50:46,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=209312.0, ans=0.0 +2024-07-28 22:50:46,878 INFO [train.py:1114] (1/4) Epoch 16, batch 3650, loss[loss=0.1857, simple_loss=0.2711, pruned_loss=0.05013, over 4910.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2677, pruned_loss=0.04409, over 941027.68 frames. ], batch size: 15, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:50:49,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=209312.0, ans=0.0 +2024-07-28 22:50:57,251 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.506e+01 5.574e+01 6.186e+01 7.126e+01 1.218e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 22:50:58,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=209325.33333333334, ans=0.2 +2024-07-28 22:51:02,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=209338.66666666666, ans=0.125 +2024-07-28 22:51:04,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=209338.66666666666, ans=0.125 +2024-07-28 22:51:06,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=209338.66666666666, ans=0.125 +2024-07-28 22:51:27,273 INFO [train.py:1114] (1/4) Epoch 16, batch 3700, loss[loss=0.1665, simple_loss=0.2623, pruned_loss=0.03532, over 4930.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2672, pruned_loss=0.04361, over 941984.56 frames. ], batch size: 14, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:51:29,622 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.10 vs. limit=22.5 +2024-07-28 22:51:41,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=209405.33333333334, ans=0.125 +2024-07-28 22:51:41,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=209405.33333333334, ans=0.125 +2024-07-28 22:51:45,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=209405.33333333334, ans=0.125 +2024-07-28 22:51:46,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=209418.66666666666, ans=0.0 +2024-07-28 22:52:02,367 INFO [train.py:1114] (1/4) Epoch 16, batch 3750, loss[loss=0.1903, simple_loss=0.254, pruned_loss=0.06328, over 4805.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2673, pruned_loss=0.04392, over 943562.14 frames. ], batch size: 11, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:52:02,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=209445.33333333334, ans=0.125 +2024-07-28 22:52:05,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=209445.33333333334, ans=0.125 +2024-07-28 22:52:17,618 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.493e+01 5.513e+01 6.031e+01 6.754e+01 8.866e+01, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 22:52:21,241 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.95 vs. limit=6.0 +2024-07-28 22:52:35,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=209498.66666666666, ans=0.125 +2024-07-28 22:52:41,569 INFO [train.py:1114] (1/4) Epoch 16, batch 3800, loss[loss=0.1687, simple_loss=0.2696, pruned_loss=0.03388, over 4810.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2666, pruned_loss=0.0439, over 941861.63 frames. ], batch size: 14, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:52:45,348 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.34 vs. limit=15.0 +2024-07-28 22:52:59,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=209525.33333333334, ans=0.0 +2024-07-28 22:53:00,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=209525.33333333334, ans=0.125 +2024-07-28 22:53:03,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=209538.66666666666, ans=0.125 +2024-07-28 22:53:07,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=209538.66666666666, ans=0.0 +2024-07-28 22:53:16,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=209552.0, ans=0.125 +2024-07-28 22:53:22,987 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=209565.33333333334, ans=0.125 +2024-07-28 22:53:24,771 INFO [train.py:1114] (1/4) Epoch 16, batch 3850, loss[loss=0.19, simple_loss=0.2795, pruned_loss=0.05025, over 4614.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2665, pruned_loss=0.04365, over 942392.31 frames. ], batch size: 16, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:53:30,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=209578.66666666666, ans=0.0 +2024-07-28 22:53:35,401 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.49 vs. limit=22.5 +2024-07-28 22:53:44,020 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.595e+01 5.438e+01 6.014e+01 6.827e+01 9.667e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 22:53:45,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=209592.0, ans=0.2 +2024-07-28 22:53:50,664 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.21 vs. limit=15.0 +2024-07-28 22:54:04,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=209632.0, ans=0.125 +2024-07-28 22:54:11,537 INFO [train.py:1114] (1/4) Epoch 16, batch 3900, loss[loss=0.1679, simple_loss=0.2686, pruned_loss=0.03357, over 4816.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2669, pruned_loss=0.04386, over 942453.49 frames. ], batch size: 14, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:54:12,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=209645.33333333334, ans=0.125 +2024-07-28 22:54:14,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=209645.33333333334, ans=0.125 +2024-07-28 22:54:22,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209658.66666666666, ans=0.1 +2024-07-28 22:54:36,988 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.42 vs. limit=12.0 +2024-07-28 22:54:37,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=209672.0, ans=0.125 +2024-07-28 22:54:37,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=209672.0, ans=0.2 +2024-07-28 22:54:37,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=209672.0, ans=0.07 +2024-07-28 22:54:40,754 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.33 vs. limit=15.0 +2024-07-28 22:54:43,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=209685.33333333334, ans=0.125 +2024-07-28 22:54:43,989 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.30 vs. limit=10.0 +2024-07-28 22:54:51,381 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.88 vs. limit=10.0 +2024-07-28 22:54:57,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=209698.66666666666, ans=0.125 +2024-07-28 22:54:59,408 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-07-28 22:55:00,448 INFO [train.py:1114] (1/4) Epoch 16, batch 3950, loss[loss=0.1695, simple_loss=0.266, pruned_loss=0.03649, over 4850.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2667, pruned_loss=0.0437, over 944483.13 frames. ], batch size: 16, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:55:05,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=209712.0, ans=0.125 +2024-07-28 22:55:14,737 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 5.589e+01 5.934e+01 6.636e+01 9.172e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-28 22:55:23,457 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.41 vs. limit=15.0 +2024-07-28 22:55:26,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=209738.66666666666, ans=0.0 +2024-07-28 22:55:29,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=209752.0, ans=0.95 +2024-07-28 22:55:38,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209752.0, ans=0.1 +2024-07-28 22:55:43,353 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.22 vs. limit=22.5 +2024-07-28 22:55:49,229 INFO [train.py:1114] (1/4) Epoch 16, batch 4000, loss[loss=0.1718, simple_loss=0.2508, pruned_loss=0.04644, over 4774.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2668, pruned_loss=0.04405, over 940467.27 frames. ], batch size: 12, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:55:50,708 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=209778.66666666666, ans=0.0 +2024-07-28 22:55:52,277 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.57 vs. limit=15.0 +2024-07-28 22:55:56,180 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.55 vs. limit=12.0 +2024-07-28 22:56:09,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.98 vs. limit=15.0 +2024-07-28 22:56:13,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=209818.66666666666, ans=0.125 +2024-07-28 22:56:21,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=209832.0, ans=0.0 +2024-07-28 22:56:22,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=209832.0, ans=0.125 +2024-07-28 22:56:39,771 INFO [train.py:1114] (1/4) Epoch 16, batch 4050, loss[loss=0.2573, simple_loss=0.3146, pruned_loss=0.1, over 3212.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2673, pruned_loss=0.04439, over 938955.07 frames. ], batch size: 35, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:56:48,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=209858.66666666666, ans=0.0 +2024-07-28 22:56:51,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=209858.66666666666, ans=0.04949747468305833 +2024-07-28 22:56:51,881 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.300e+01 5.462e+01 6.013e+01 7.148e+01 1.181e+02, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 22:56:54,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=209858.66666666666, ans=0.0 +2024-07-28 22:56:55,887 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.62 vs. limit=15.0 +2024-07-28 22:57:24,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=209898.66666666666, ans=0.125 +2024-07-28 22:57:25,680 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.30 vs. limit=22.5 +2024-07-28 22:57:27,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=209898.66666666666, ans=0.1 +2024-07-28 22:57:32,725 INFO [train.py:1114] (1/4) Epoch 16, batch 4100, loss[loss=0.1966, simple_loss=0.2852, pruned_loss=0.05398, over 4892.00 frames. ], tot_loss[loss=0.179, simple_loss=0.268, pruned_loss=0.04501, over 937723.75 frames. ], batch size: 15, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:57:49,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=209912.0, ans=0.125 +2024-07-28 22:57:50,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=209912.0, ans=0.0 +2024-07-28 22:57:56,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=209925.33333333334, ans=0.2 +2024-07-28 22:57:57,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=209925.33333333334, ans=0.04949747468305833 +2024-07-28 22:58:09,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=209938.66666666666, ans=0.125 +2024-07-28 22:58:14,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209952.0, ans=0.1 +2024-07-28 22:58:15,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=209952.0, ans=0.125 +2024-07-28 22:58:29,580 INFO [train.py:1114] (1/4) Epoch 16, batch 4150, loss[loss=0.1743, simple_loss=0.2584, pruned_loss=0.04512, over 4830.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2677, pruned_loss=0.04469, over 937646.41 frames. ], batch size: 13, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:58:33,185 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:58:40,273 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.631e+01 5.817e+01 6.318e+01 7.435e+01 1.178e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 22:58:41,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=209992.0, ans=0.125 +2024-07-28 22:59:06,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210032.0, ans=0.1 +2024-07-28 22:59:08,736 INFO [train.py:1114] (1/4) Epoch 16, batch 4200, loss[loss=0.2148, simple_loss=0.3107, pruned_loss=0.05947, over 4898.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2686, pruned_loss=0.04482, over 939664.65 frames. ], batch size: 15, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:59:13,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=210045.33333333334, ans=0.2 +2024-07-28 22:59:13,550 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.10 vs. limit=6.0 +2024-07-28 22:59:15,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=210058.66666666666, ans=0.125 +2024-07-28 22:59:17,360 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.05 vs. limit=15.0 +2024-07-28 22:59:36,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=210085.33333333334, ans=0.125 +2024-07-28 22:59:46,501 INFO [train.py:1114] (1/4) Epoch 16, batch 4250, loss[loss=0.1467, simple_loss=0.2379, pruned_loss=0.02768, over 4639.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2689, pruned_loss=0.04482, over 940703.47 frames. ], batch size: 12, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:59:53,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=210125.33333333334, ans=0.125 +2024-07-28 22:59:53,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=210125.33333333334, ans=0.0 +2024-07-28 22:59:55,619 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.556e+01 5.557e+01 6.153e+01 6.698e+01 1.216e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 22:59:57,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210125.33333333334, ans=0.1 +2024-07-28 23:00:13,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=210152.0, ans=0.0 +2024-07-28 23:00:21,355 INFO [train.py:1114] (1/4) Epoch 16, batch 4300, loss[loss=0.1694, simple_loss=0.2768, pruned_loss=0.03097, over 4759.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2694, pruned_loss=0.04501, over 940121.64 frames. ], batch size: 13, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 23:00:21,706 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.69 vs. limit=15.0 +2024-07-28 23:00:27,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=210178.66666666666, ans=0.0 +2024-07-28 23:00:27,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=210178.66666666666, ans=0.07 +2024-07-28 23:00:31,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=210192.0, ans=0.04949747468305833 +2024-07-28 23:00:40,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=210205.33333333334, ans=0.0 +2024-07-28 23:00:40,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=210205.33333333334, ans=0.0 +2024-07-28 23:00:54,595 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.33 vs. limit=15.0 +2024-07-28 23:00:56,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=210232.0, ans=0.1 +2024-07-28 23:00:57,496 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:00:57,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=210245.33333333334, ans=0.025 +2024-07-28 23:00:58,188 INFO [train.py:1114] (1/4) Epoch 16, batch 4350, loss[loss=0.1529, simple_loss=0.2414, pruned_loss=0.03219, over 4757.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2688, pruned_loss=0.04472, over 940786.87 frames. ], batch size: 13, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 23:01:15,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=210258.66666666666, ans=0.015 +2024-07-28 23:01:18,166 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.840e+01 5.387e+01 6.031e+01 6.844e+01 1.009e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 23:01:20,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=210258.66666666666, ans=0.0 +2024-07-28 23:01:34,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=210298.66666666666, ans=0.1 +2024-07-28 23:01:36,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=210298.66666666666, ans=0.2 +2024-07-28 23:01:42,105 INFO [train.py:1114] (1/4) Epoch 16, batch 4400, loss[loss=0.1845, simple_loss=0.2926, pruned_loss=0.03816, over 4809.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2683, pruned_loss=0.0442, over 940733.99 frames. ], batch size: 14, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:01:42,595 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.76 vs. limit=15.0 +2024-07-28 23:01:44,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210312.0, ans=0.1 +2024-07-28 23:01:48,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210312.0, ans=0.1 +2024-07-28 23:01:49,039 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.40 vs. limit=6.0 +2024-07-28 23:02:03,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=210338.66666666666, ans=0.125 +2024-07-28 23:02:10,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=210352.0, ans=0.2 +2024-07-28 23:02:21,301 INFO [train.py:1114] (1/4) Epoch 16, batch 4450, loss[loss=0.154, simple_loss=0.2498, pruned_loss=0.02907, over 4940.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.269, pruned_loss=0.04424, over 939493.47 frames. ], batch size: 12, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:02:26,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=210378.66666666666, ans=0.0 +2024-07-28 23:02:29,971 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.50 vs. limit=15.0 +2024-07-28 23:02:30,820 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.581e+01 5.996e+01 6.828e+01 9.558e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 23:02:37,523 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.70 vs. limit=22.5 +2024-07-28 23:02:38,206 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.66 vs. limit=15.0 +2024-07-28 23:02:55,306 INFO [train.py:1114] (1/4) Epoch 16, batch 4500, loss[loss=0.208, simple_loss=0.3051, pruned_loss=0.05546, over 4746.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2692, pruned_loss=0.04416, over 938376.33 frames. ], batch size: 14, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:03:05,678 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.78 vs. limit=6.0 +2024-07-28 23:03:07,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=210458.66666666666, ans=0.125 +2024-07-28 23:03:10,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=210472.0, ans=0.125 +2024-07-28 23:03:18,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=210485.33333333334, ans=0.125 +2024-07-28 23:03:24,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=210498.66666666666, ans=0.125 +2024-07-28 23:03:28,422 INFO [train.py:1114] (1/4) Epoch 16, batch 4550, loss[loss=0.1992, simple_loss=0.2861, pruned_loss=0.05614, over 4907.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2688, pruned_loss=0.04403, over 940190.98 frames. ], batch size: 13, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:03:35,310 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.14 vs. limit=22.5 +2024-07-28 23:03:39,688 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.876e+01 5.739e+01 6.533e+01 7.196e+01 1.162e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 23:03:43,000 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.21 vs. limit=22.5 +2024-07-28 23:03:51,131 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.30 vs. limit=15.0 +2024-07-28 23:04:06,250 INFO [train.py:1114] (1/4) Epoch 16, batch 4600, loss[loss=0.1924, simple_loss=0.2832, pruned_loss=0.05075, over 4459.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2681, pruned_loss=0.04424, over 937965.71 frames. ], batch size: 21, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:04:07,250 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.39 vs. limit=15.0 +2024-07-28 23:04:12,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=210592.0, ans=0.125 +2024-07-28 23:04:14,995 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.47 vs. limit=15.0 +2024-07-28 23:04:34,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=210632.0, ans=0.09899494936611666 +2024-07-28 23:04:39,383 INFO [train.py:1114] (1/4) Epoch 16, batch 4650, loss[loss=0.1986, simple_loss=0.2894, pruned_loss=0.05394, over 4844.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.27, pruned_loss=0.04467, over 939918.12 frames. ], batch size: 16, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:04:41,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=210645.33333333334, ans=0.05 +2024-07-28 23:04:43,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=210645.33333333334, ans=0.0 +2024-07-28 23:04:43,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=210645.33333333334, ans=0.1 +2024-07-28 23:04:52,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=210658.66666666666, ans=0.0 +2024-07-28 23:04:52,460 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.612e+01 5.652e+01 6.180e+01 7.051e+01 1.016e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 23:05:04,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=210685.33333333334, ans=0.125 +2024-07-28 23:05:05,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210685.33333333334, ans=0.1 +2024-07-28 23:05:19,327 INFO [train.py:1114] (1/4) Epoch 16, batch 4700, loss[loss=0.1742, simple_loss=0.2456, pruned_loss=0.0514, over 4703.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2683, pruned_loss=0.04384, over 936820.72 frames. ], batch size: 11, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:05:22,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=210712.0, ans=0.0 +2024-07-28 23:05:38,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=210738.66666666666, ans=10.0 +2024-07-28 23:05:40,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=210752.0, ans=0.0 +2024-07-28 23:05:42,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=210752.0, ans=0.0 +2024-07-28 23:05:56,316 INFO [train.py:1114] (1/4) Epoch 16, batch 4750, loss[loss=0.1999, simple_loss=0.2963, pruned_loss=0.05171, over 4497.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2689, pruned_loss=0.04407, over 935327.04 frames. ], batch size: 21, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:05:58,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=210778.66666666666, ans=0.125 +2024-07-28 23:05:58,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=210778.66666666666, ans=0.025 +2024-07-28 23:06:06,438 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.562e+01 6.169e+01 6.958e+01 1.016e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-28 23:06:14,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=210805.33333333334, ans=0.125 +2024-07-28 23:06:32,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210832.0, ans=0.1 +2024-07-28 23:06:32,275 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.88 vs. limit=15.0 +2024-07-28 23:06:34,617 INFO [train.py:1114] (1/4) Epoch 16, batch 4800, loss[loss=0.1712, simple_loss=0.2738, pruned_loss=0.03428, over 4693.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2684, pruned_loss=0.04397, over 932708.88 frames. ], batch size: 13, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:06:36,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=210845.33333333334, ans=0.0 +2024-07-28 23:06:44,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=210858.66666666666, ans=0.125 +2024-07-28 23:06:48,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.whiten.whitening_limit, batch_count=210872.0, ans=12.0 +2024-07-28 23:06:55,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=210885.33333333334, ans=0.5 +2024-07-28 23:07:00,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=210885.33333333334, ans=0.125 +2024-07-28 23:07:01,388 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=210898.66666666666, ans=0.0 +2024-07-28 23:07:07,829 INFO [train.py:1114] (1/4) Epoch 16, batch 4850, loss[loss=0.1848, simple_loss=0.2833, pruned_loss=0.04313, over 4734.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2686, pruned_loss=0.04389, over 932253.34 frames. ], batch size: 14, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:07:28,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=210925.33333333334, ans=0.2 +2024-07-28 23:07:31,468 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.823e+01 5.391e+01 6.068e+01 6.775e+01 1.177e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 23:07:42,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210952.0, ans=0.1 +2024-07-28 23:07:48,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=210952.0, ans=0.125 +2024-07-28 23:07:56,341 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.13 vs. limit=15.0 +2024-07-28 23:07:59,284 INFO [train.py:1114] (1/4) Epoch 16, batch 4900, loss[loss=0.1472, simple_loss=0.2461, pruned_loss=0.02422, over 4765.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2702, pruned_loss=0.0443, over 933923.47 frames. ], batch size: 13, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:08:13,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=211005.33333333334, ans=0.125 +2024-07-28 23:08:18,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=211005.33333333334, ans=0.0 +2024-07-28 23:08:20,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=211018.66666666666, ans=0.125 +2024-07-28 23:08:22,370 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.19 vs. limit=8.0 +2024-07-28 23:08:29,356 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.01 vs. limit=15.0 +2024-07-28 23:08:30,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=211032.0, ans=0.125 +2024-07-28 23:08:33,344 INFO [train.py:1114] (1/4) Epoch 16, batch 4950, loss[loss=0.231, simple_loss=0.3082, pruned_loss=0.07687, over 3654.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.271, pruned_loss=0.04513, over 931079.89 frames. ], batch size: 35, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:08:40,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=211058.66666666666, ans=0.0 +2024-07-28 23:08:42,919 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.320e+01 5.430e+01 5.977e+01 6.818e+01 1.036e+02, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 23:08:44,093 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.62 vs. limit=12.0 +2024-07-28 23:09:00,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=211098.66666666666, ans=0.125 +2024-07-28 23:09:04,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=211098.66666666666, ans=0.1 +2024-07-28 23:09:04,943 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.89 vs. limit=10.0 +2024-07-28 23:09:08,839 INFO [train.py:1114] (1/4) Epoch 16, batch 5000, loss[loss=0.1825, simple_loss=0.2823, pruned_loss=0.04137, over 4669.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2693, pruned_loss=0.04408, over 934785.17 frames. ], batch size: 14, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:09:41,974 INFO [train.py:1114] (1/4) Epoch 16, batch 5050, loss[loss=0.1528, simple_loss=0.244, pruned_loss=0.03077, over 4840.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2684, pruned_loss=0.04369, over 937662.47 frames. ], batch size: 12, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:09:45,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=211178.66666666666, ans=0.125 +2024-07-28 23:09:53,128 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.519e+01 5.620e+01 6.064e+01 6.522e+01 1.168e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-28 23:09:53,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211192.0, ans=0.1 +2024-07-28 23:10:09,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=211218.66666666666, ans=0.2 +2024-07-28 23:10:10,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=211232.0, ans=15.0 +2024-07-28 23:10:11,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=211232.0, ans=0.0 +2024-07-28 23:10:14,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=211232.0, ans=0.2 +2024-07-28 23:10:16,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=211232.0, ans=0.0 +2024-07-28 23:10:16,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=211232.0, ans=0.0 +2024-07-28 23:10:18,014 INFO [train.py:1114] (1/4) Epoch 16, batch 5100, loss[loss=0.1618, simple_loss=0.2569, pruned_loss=0.03338, over 4771.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2692, pruned_loss=0.04403, over 935460.01 frames. ], batch size: 12, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:10:24,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=211258.66666666666, ans=0.0 +2024-07-28 23:10:26,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=211258.66666666666, ans=0.125 +2024-07-28 23:10:26,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=211258.66666666666, ans=0.07 +2024-07-28 23:10:31,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff2.min_abs, batch_count=211272.0, ans=0.1 +2024-07-28 23:10:34,355 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.75 vs. limit=6.0 +2024-07-28 23:10:40,418 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.80 vs. limit=15.0 +2024-07-28 23:10:43,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=211285.33333333334, ans=0.0 +2024-07-28 23:10:44,347 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=15.0 +2024-07-28 23:10:50,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=211298.66666666666, ans=0.125 +2024-07-28 23:10:51,318 INFO [train.py:1114] (1/4) Epoch 16, batch 5150, loss[loss=0.1852, simple_loss=0.2585, pruned_loss=0.05599, over 4820.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2703, pruned_loss=0.045, over 936191.45 frames. ], batch size: 16, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:11:00,629 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.854e+01 6.432e+01 7.346e+01 1.040e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 23:11:00,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=211325.33333333334, ans=0.2 +2024-07-28 23:11:13,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=211352.0, ans=0.0 +2024-07-28 23:11:16,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=211352.0, ans=0.125 +2024-07-28 23:11:20,086 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.08 vs. limit=22.5 +2024-07-28 23:11:21,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=211365.33333333334, ans=0.125 +2024-07-28 23:11:26,551 INFO [train.py:1114] (1/4) Epoch 16, batch 5200, loss[loss=0.1843, simple_loss=0.2786, pruned_loss=0.04499, over 4663.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.271, pruned_loss=0.0453, over 936481.20 frames. ], batch size: 14, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:11:46,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=211418.66666666666, ans=0.2 +2024-07-28 23:11:54,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=211432.0, ans=0.2 +2024-07-28 23:12:01,689 INFO [train.py:1114] (1/4) Epoch 16, batch 5250, loss[loss=0.1697, simple_loss=0.2623, pruned_loss=0.03859, over 4892.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2696, pruned_loss=0.04462, over 936090.97 frames. ], batch size: 13, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:12:11,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=211458.66666666666, ans=0.2 +2024-07-28 23:12:12,938 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.256e+01 5.544e+01 6.376e+01 7.640e+01 1.111e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 23:12:14,781 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:12:14,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=211458.66666666666, ans=0.0 +2024-07-28 23:12:14,921 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.36 vs. limit=15.0 +2024-07-28 23:12:22,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211472.0, ans=0.1 +2024-07-28 23:12:24,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=211485.33333333334, ans=0.025 +2024-07-28 23:12:31,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=211498.66666666666, ans=0.0 +2024-07-28 23:12:37,218 INFO [train.py:1114] (1/4) Epoch 16, batch 5300, loss[loss=0.1761, simple_loss=0.2832, pruned_loss=0.03452, over 4627.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2676, pruned_loss=0.04378, over 934646.56 frames. ], batch size: 16, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:12:37,567 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.09 vs. limit=10.0 +2024-07-28 23:12:38,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=211512.0, ans=0.0 +2024-07-28 23:12:41,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=211512.0, ans=0.025 +2024-07-28 23:12:51,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=211538.66666666666, ans=0.2 +2024-07-28 23:12:52,859 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.11 vs. limit=6.0 +2024-07-28 23:12:57,517 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.43 vs. limit=10.0 +2024-07-28 23:12:59,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=211552.0, ans=0.025 +2024-07-28 23:12:59,838 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=211552.0, ans=0.125 +2024-07-28 23:13:10,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=211578.66666666666, ans=0.0 +2024-07-28 23:13:10,601 INFO [train.py:1114] (1/4) Epoch 16, batch 5350, loss[loss=0.1445, simple_loss=0.2211, pruned_loss=0.03396, over 4590.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2682, pruned_loss=0.04408, over 936662.01 frames. ], batch size: 10, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:13:15,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=211578.66666666666, ans=0.125 +2024-07-28 23:13:19,947 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.569e+01 5.493e+01 6.071e+01 6.914e+01 1.248e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 23:13:22,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=211592.0, ans=0.0 +2024-07-28 23:13:23,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=211605.33333333334, ans=0.125 +2024-07-28 23:13:31,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=211618.66666666666, ans=0.0 +2024-07-28 23:13:41,375 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:13:43,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=211632.0, ans=0.125 +2024-07-28 23:13:44,728 INFO [train.py:1114] (1/4) Epoch 16, batch 5400, loss[loss=0.1976, simple_loss=0.2967, pruned_loss=0.04925, over 4185.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2691, pruned_loss=0.04474, over 930848.98 frames. ], batch size: 25, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:13:45,745 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.49 vs. limit=22.5 +2024-07-28 23:13:48,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211645.33333333334, ans=0.1 +2024-07-28 23:13:50,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=211645.33333333334, ans=0.0 +2024-07-28 23:13:56,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=211658.66666666666, ans=0.035 +2024-07-28 23:14:11,671 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.51 vs. limit=12.0 +2024-07-28 23:14:13,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=211698.66666666666, ans=0.0 +2024-07-28 23:14:16,981 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.06 vs. limit=22.5 +2024-07-28 23:14:17,998 INFO [train.py:1114] (1/4) Epoch 16, batch 5450, loss[loss=0.151, simple_loss=0.2307, pruned_loss=0.03565, over 4699.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2682, pruned_loss=0.04424, over 933386.17 frames. ], batch size: 11, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:14:22,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=211712.0, ans=0.0 +2024-07-28 23:14:26,677 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:14:26,885 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.42 vs. limit=15.0 +2024-07-28 23:14:27,868 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.730e+01 5.574e+01 6.234e+01 6.810e+01 1.084e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 23:14:36,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211738.66666666666, ans=0.1 +2024-07-28 23:14:38,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=211738.66666666666, ans=0.125 +2024-07-28 23:14:42,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=211752.0, ans=0.125 +2024-07-28 23:14:43,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=211752.0, ans=0.09899494936611666 +2024-07-28 23:14:53,030 INFO [train.py:1114] (1/4) Epoch 16, batch 5500, loss[loss=0.2016, simple_loss=0.2908, pruned_loss=0.05623, over 4257.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2684, pruned_loss=0.04492, over 930995.90 frames. ], batch size: 25, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:15:03,361 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.84 vs. limit=22.5 +2024-07-28 23:15:28,502 INFO [train.py:1114] (1/4) Epoch 16, batch 5550, loss[loss=0.1595, simple_loss=0.2512, pruned_loss=0.03393, over 4706.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2694, pruned_loss=0.04545, over 932900.13 frames. ], batch size: 12, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:15:35,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=211858.66666666666, ans=0.0 +2024-07-28 23:15:37,921 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.698e+01 6.304e+01 7.513e+01 1.256e+02, threshold=1.261e+02, percent-clipped=1.0 +2024-07-28 23:15:39,416 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:15:46,553 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.75 vs. limit=15.0 +2024-07-28 23:15:52,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=211885.33333333334, ans=0.125 +2024-07-28 23:15:54,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=211885.33333333334, ans=0.1 +2024-07-28 23:16:02,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=211912.0, ans=0.0 +2024-07-28 23:16:02,698 INFO [train.py:1114] (1/4) Epoch 16, batch 5600, loss[loss=0.1592, simple_loss=0.2651, pruned_loss=0.02661, over 4743.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2693, pruned_loss=0.04487, over 933996.66 frames. ], batch size: 14, lr: 4.66e-03, grad_scale: 64.0 +2024-07-28 23:16:16,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=211925.33333333334, ans=0.0 +2024-07-28 23:16:19,876 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.18 vs. limit=15.0 +2024-07-28 23:16:21,089 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.44 vs. limit=15.0 +2024-07-28 23:16:26,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=211938.66666666666, ans=0.125 +2024-07-28 23:16:40,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=211965.33333333334, ans=0.125 +2024-07-28 23:16:44,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=211965.33333333334, ans=0.125 +2024-07-28 23:16:49,053 INFO [train.py:1114] (1/4) Epoch 16, batch 5650, loss[loss=0.1622, simple_loss=0.2557, pruned_loss=0.03434, over 4509.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2687, pruned_loss=0.04479, over 936425.12 frames. ], batch size: 21, lr: 4.66e-03, grad_scale: 64.0 +2024-07-28 23:16:49,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=211978.66666666666, ans=0.0 +2024-07-28 23:16:56,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=211992.0, ans=0.125 +2024-07-28 23:16:58,579 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.452e+01 5.506e+01 6.230e+01 6.941e+01 1.207e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 23:17:11,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=212018.66666666666, ans=0.125 +2024-07-28 23:17:16,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=212032.0, ans=0.2 +2024-07-28 23:17:17,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212032.0, ans=0.1 +2024-07-28 23:17:22,862 INFO [train.py:1114] (1/4) Epoch 16, batch 5700, loss[loss=0.1979, simple_loss=0.2891, pruned_loss=0.05334, over 4694.00 frames. ], tot_loss[loss=0.179, simple_loss=0.269, pruned_loss=0.04455, over 938182.05 frames. ], batch size: 13, lr: 4.66e-03, grad_scale: 64.0 +2024-07-28 23:17:24,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=212045.33333333334, ans=0.05 +2024-07-28 23:17:27,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=212045.33333333334, ans=0.125 +2024-07-28 23:17:31,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212058.66666666666, ans=0.1 +2024-07-28 23:17:42,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=212072.0, ans=0.0 +2024-07-28 23:17:44,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=212072.0, ans=0.0 +2024-07-28 23:17:46,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=212085.33333333334, ans=0.0 +2024-07-28 23:17:47,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=212085.33333333334, ans=0.125 +2024-07-28 23:17:52,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=212085.33333333334, ans=0.125 +2024-07-28 23:17:53,592 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.82 vs. limit=15.0 +2024-07-28 23:17:55,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=212098.66666666666, ans=0.125 +2024-07-28 23:17:55,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=212098.66666666666, ans=0.2 +2024-07-28 23:17:59,819 INFO [train.py:1114] (1/4) Epoch 16, batch 5750, loss[loss=0.1779, simple_loss=0.2909, pruned_loss=0.03241, over 4701.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2691, pruned_loss=0.04414, over 938361.40 frames. ], batch size: 19, lr: 4.66e-03, grad_scale: 64.0 +2024-07-28 23:18:06,603 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.39 vs. limit=15.0 +2024-07-28 23:18:10,179 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.804e+01 5.670e+01 6.129e+01 6.618e+01 9.069e+01, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 23:18:12,515 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.50 vs. limit=15.0 +2024-07-28 23:18:13,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=212138.66666666666, ans=0.125 +2024-07-28 23:18:36,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=212165.33333333334, ans=0.95 +2024-07-28 23:18:38,582 INFO [train.py:1114] (1/4) Epoch 16, batch 5800, loss[loss=0.2149, simple_loss=0.3072, pruned_loss=0.06129, over 4768.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2703, pruned_loss=0.0448, over 937912.29 frames. ], batch size: 19, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:18:42,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=212178.66666666666, ans=0.125 +2024-07-28 23:18:49,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=212192.0, ans=0.125 +2024-07-28 23:18:53,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=212205.33333333334, ans=0.125 +2024-07-28 23:19:02,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=212218.66666666666, ans=0.025 +2024-07-28 23:19:12,011 INFO [train.py:1114] (1/4) Epoch 16, batch 5850, loss[loss=0.1711, simple_loss=0.2669, pruned_loss=0.03767, over 4550.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2701, pruned_loss=0.04507, over 937902.44 frames. ], batch size: 21, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:19:12,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=212245.33333333334, ans=0.125 +2024-07-28 23:19:20,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=212258.66666666666, ans=0.125 +2024-07-28 23:19:21,302 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 5.632e+01 6.313e+01 6.909e+01 9.080e+01, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 23:19:29,565 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.23 vs. limit=15.0 +2024-07-28 23:19:32,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=212285.33333333334, ans=0.025 +2024-07-28 23:19:38,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=212298.66666666666, ans=0.0 +2024-07-28 23:19:42,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=212298.66666666666, ans=0.125 +2024-07-28 23:19:46,150 INFO [train.py:1114] (1/4) Epoch 16, batch 5900, loss[loss=0.1832, simple_loss=0.2807, pruned_loss=0.04285, over 4695.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2698, pruned_loss=0.045, over 938411.03 frames. ], batch size: 15, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:19:55,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=212325.33333333334, ans=0.125 +2024-07-28 23:19:59,345 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.83 vs. limit=15.0 +2024-07-28 23:20:04,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=212338.66666666666, ans=0.0 +2024-07-28 23:20:19,842 INFO [train.py:1114] (1/4) Epoch 16, batch 5950, loss[loss=0.1873, simple_loss=0.2851, pruned_loss=0.04476, over 4682.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.269, pruned_loss=0.04459, over 940093.02 frames. ], batch size: 15, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:20:23,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=212378.66666666666, ans=0.125 +2024-07-28 23:20:26,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=212392.0, ans=0.025 +2024-07-28 23:20:28,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=212392.0, ans=0.125 +2024-07-28 23:20:29,212 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.762e+01 5.558e+01 6.099e+01 6.527e+01 9.669e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 23:20:31,611 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.51 vs. limit=15.0 +2024-07-28 23:20:34,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=212405.33333333334, ans=0.125 +2024-07-28 23:20:41,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212418.66666666666, ans=0.1 +2024-07-28 23:20:46,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=212432.0, ans=0.0 +2024-07-28 23:20:55,232 INFO [train.py:1114] (1/4) Epoch 16, batch 6000, loss[loss=0.1667, simple_loss=0.2575, pruned_loss=0.03799, over 4298.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2687, pruned_loss=0.04465, over 937616.60 frames. ], batch size: 26, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:20:55,233 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-28 23:21:04,993 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([3.7885, 2.5920, 3.1143, 3.4346, 3.4933, 3.0188, 3.5496, 2.6865], + device='cuda:1') +2024-07-28 23:21:07,047 INFO [train.py:1146] (1/4) Epoch 16, validation: loss=0.1625, simple_loss=0.2653, pruned_loss=0.02984, over 944034.00 frames. +2024-07-28 23:21:07,047 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-28 23:21:12,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=212445.33333333334, ans=0.125 +2024-07-28 23:21:15,543 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.58 vs. limit=15.0 +2024-07-28 23:21:25,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=212472.0, ans=0.2 +2024-07-28 23:21:31,364 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.46 vs. limit=15.0 +2024-07-28 23:21:31,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=212485.33333333334, ans=0.0 +2024-07-28 23:21:32,580 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.40 vs. limit=22.5 +2024-07-28 23:21:33,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212485.33333333334, ans=0.1 +2024-07-28 23:21:41,038 INFO [train.py:1114] (1/4) Epoch 16, batch 6050, loss[loss=0.1545, simple_loss=0.2438, pruned_loss=0.03256, over 4779.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.268, pruned_loss=0.04452, over 938731.11 frames. ], batch size: 12, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:21:43,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.20 vs. limit=6.0 +2024-07-28 23:21:45,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=212512.0, ans=0.125 +2024-07-28 23:21:55,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=212525.33333333334, ans=0.125 +2024-07-28 23:21:57,154 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.422e+01 5.490e+01 6.163e+01 6.956e+01 9.204e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 23:22:17,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=212565.33333333334, ans=0.125 +2024-07-28 23:22:19,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212565.33333333334, ans=0.1 +2024-07-28 23:22:22,983 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.95 vs. limit=6.0 +2024-07-28 23:22:24,106 INFO [train.py:1114] (1/4) Epoch 16, batch 6100, loss[loss=0.1883, simple_loss=0.2861, pruned_loss=0.04525, over 4687.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2681, pruned_loss=0.04456, over 937952.60 frames. ], batch size: 15, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:23:20,108 INFO [train.py:1114] (1/4) Epoch 16, batch 6150, loss[loss=0.2381, simple_loss=0.3098, pruned_loss=0.08323, over 3317.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2681, pruned_loss=0.04414, over 936706.49 frames. ], batch size: 35, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:23:23,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=212645.33333333334, ans=0.125 +2024-07-28 23:23:23,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=212645.33333333334, ans=0.125 +2024-07-28 23:23:27,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=212658.66666666666, ans=0.0 +2024-07-28 23:23:29,816 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.196e+01 5.601e+01 6.236e+01 7.046e+01 1.205e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 23:23:33,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=212672.0, ans=0.125 +2024-07-28 23:23:33,538 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.99 vs. limit=22.5 +2024-07-28 23:23:36,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=212672.0, ans=0.0 +2024-07-28 23:23:39,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=212685.33333333334, ans=0.125 +2024-07-28 23:23:43,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=212685.33333333334, ans=0.125 +2024-07-28 23:23:46,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=212698.66666666666, ans=0.125 +2024-07-28 23:23:53,678 INFO [train.py:1114] (1/4) Epoch 16, batch 6200, loss[loss=0.1968, simple_loss=0.2936, pruned_loss=0.04996, over 4748.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2683, pruned_loss=0.04422, over 936722.75 frames. ], batch size: 14, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:23:58,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=212712.0, ans=0.0 +2024-07-28 23:24:01,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212725.33333333334, ans=0.1 +2024-07-28 23:24:07,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=212738.66666666666, ans=0.0 +2024-07-28 23:24:29,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=212765.33333333334, ans=0.0 +2024-07-28 23:24:30,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=212765.33333333334, ans=0.125 +2024-07-28 23:24:32,615 INFO [train.py:1114] (1/4) Epoch 16, batch 6250, loss[loss=0.1975, simple_loss=0.2951, pruned_loss=0.04992, over 4812.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2687, pruned_loss=0.04407, over 933203.97 frames. ], batch size: 14, lr: 4.65e-03, grad_scale: 32.0 +2024-07-28 23:24:47,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=212778.66666666666, ans=0.125 +2024-07-28 23:24:53,130 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.346e+01 5.825e+01 6.547e+01 7.445e+01 1.087e+02, threshold=1.309e+02, percent-clipped=0.0 +2024-07-28 23:25:10,157 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.75 vs. limit=15.0 +2024-07-28 23:25:10,547 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=212805.33333333334, ans=0.125 +2024-07-28 23:25:15,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212818.66666666666, ans=0.1 +2024-07-28 23:25:28,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=212818.66666666666, ans=0.125 +2024-07-28 23:25:31,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=212832.0, ans=0.2 +2024-07-28 23:25:38,075 INFO [train.py:1114] (1/4) Epoch 16, batch 6300, loss[loss=0.1529, simple_loss=0.2344, pruned_loss=0.03571, over 4497.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2695, pruned_loss=0.04478, over 930412.74 frames. ], batch size: 10, lr: 4.65e-03, grad_scale: 32.0 +2024-07-28 23:25:42,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=212845.33333333334, ans=0.125 +2024-07-28 23:25:54,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=212858.66666666666, ans=0.2 +2024-07-28 23:26:04,269 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.64 vs. limit=12.0 +2024-07-28 23:26:10,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=212898.66666666666, ans=0.0 +2024-07-28 23:26:14,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=212898.66666666666, ans=0.125 +2024-07-28 23:26:20,513 INFO [train.py:1114] (1/4) Epoch 16, batch 6350, loss[loss=0.2005, simple_loss=0.2916, pruned_loss=0.05466, over 4539.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2686, pruned_loss=0.04437, over 934233.52 frames. ], batch size: 21, lr: 4.65e-03, grad_scale: 32.0 +2024-07-28 23:26:45,085 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.844e+01 5.666e+01 6.076e+01 6.815e+01 1.142e+02, threshold=1.215e+02, percent-clipped=0.0 +2024-07-28 23:26:48,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212938.66666666666, ans=0.1 +2024-07-28 23:26:51,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=212938.66666666666, ans=10.0 +2024-07-28 23:26:56,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212952.0, ans=0.1 +2024-07-28 23:27:01,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=212952.0, ans=0.125 +2024-07-28 23:27:02,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=212952.0, ans=0.125 +2024-07-28 23:27:03,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212965.33333333334, ans=0.1 +2024-07-28 23:27:14,262 INFO [train.py:1114] (1/4) Epoch 16, batch 6400, loss[loss=0.2273, simple_loss=0.3107, pruned_loss=0.07197, over 4636.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2688, pruned_loss=0.04475, over 935894.79 frames. ], batch size: 13, lr: 4.65e-03, grad_scale: 32.0 +2024-07-28 23:27:26,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=212992.0, ans=0.0 +2024-07-28 23:27:29,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=213005.33333333334, ans=0.125 +2024-07-28 23:27:37,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=213005.33333333334, ans=0.125 +2024-07-28 23:27:48,772 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=213032.0, ans=0.125 +2024-07-28 23:27:51,958 INFO [train.py:1114] (1/4) Epoch 16, batch 6450, loss[loss=0.1957, simple_loss=0.2964, pruned_loss=0.04749, over 4479.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2695, pruned_loss=0.04443, over 939312.12 frames. ], batch size: 21, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:27:52,415 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.39 vs. limit=15.0 +2024-07-28 23:27:58,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=213045.33333333334, ans=0.2 +2024-07-28 23:28:05,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=213058.66666666666, ans=0.125 +2024-07-28 23:28:08,730 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.830e+01 6.533e+01 7.899e+01 1.104e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 23:28:26,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=213085.33333333334, ans=0.2 +2024-07-28 23:28:35,507 INFO [train.py:1114] (1/4) Epoch 16, batch 6500, loss[loss=0.2246, simple_loss=0.295, pruned_loss=0.07712, over 3556.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2685, pruned_loss=0.04381, over 940558.87 frames. ], batch size: 36, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:29:06,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=213152.0, ans=0.0 +2024-07-28 23:29:15,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=213165.33333333334, ans=0.125 +2024-07-28 23:29:15,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=213165.33333333334, ans=0.035 +2024-07-28 23:29:19,565 INFO [train.py:1114] (1/4) Epoch 16, batch 6550, loss[loss=0.1492, simple_loss=0.2274, pruned_loss=0.03547, over 4802.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2672, pruned_loss=0.0433, over 943352.37 frames. ], batch size: 11, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:29:22,673 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.88 vs. limit=15.0 +2024-07-28 23:29:27,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=213178.66666666666, ans=0.07 +2024-07-28 23:29:30,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=213192.0, ans=0.0 +2024-07-28 23:29:31,657 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.619e+01 5.761e+01 6.311e+01 7.241e+01 1.321e+02, threshold=1.262e+02, percent-clipped=1.0 +2024-07-28 23:29:39,831 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.98 vs. limit=12.0 +2024-07-28 23:29:43,774 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.17 vs. limit=15.0 +2024-07-28 23:29:59,643 INFO [train.py:1114] (1/4) Epoch 16, batch 6600, loss[loss=0.1655, simple_loss=0.271, pruned_loss=0.03007, over 4936.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2675, pruned_loss=0.04315, over 945007.26 frames. ], batch size: 14, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:30:00,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213245.33333333334, ans=0.1 +2024-07-28 23:30:08,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=213258.66666666666, ans=0.125 +2024-07-28 23:30:13,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=213272.0, ans=0.0 +2024-07-28 23:30:17,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=213272.0, ans=0.0 +2024-07-28 23:30:18,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=213272.0, ans=0.125 +2024-07-28 23:30:21,674 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:30:23,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=213285.33333333334, ans=0.0 +2024-07-28 23:30:23,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=213285.33333333334, ans=0.125 +2024-07-28 23:30:24,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=213285.33333333334, ans=0.125 +2024-07-28 23:30:33,004 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.65 vs. limit=15.0 +2024-07-28 23:30:35,778 INFO [train.py:1114] (1/4) Epoch 16, batch 6650, loss[loss=0.18, simple_loss=0.2687, pruned_loss=0.04568, over 4620.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2669, pruned_loss=0.04299, over 943433.08 frames. ], batch size: 17, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:30:40,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=213312.0, ans=0.0 +2024-07-28 23:30:46,058 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.373e+01 5.740e+01 6.263e+01 6.841e+01 9.907e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 23:30:50,997 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.98 vs. limit=6.0 +2024-07-28 23:30:52,534 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.63 vs. limit=22.5 +2024-07-28 23:30:54,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=213338.66666666666, ans=10.0 +2024-07-28 23:31:01,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=213352.0, ans=0.07 +2024-07-28 23:31:05,411 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.10 vs. limit=6.0 +2024-07-28 23:31:11,671 INFO [train.py:1114] (1/4) Epoch 16, batch 6700, loss[loss=0.187, simple_loss=0.2764, pruned_loss=0.04883, over 4754.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2678, pruned_loss=0.0433, over 941988.45 frames. ], batch size: 19, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:31:26,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213405.33333333334, ans=0.1 +2024-07-28 23:31:36,445 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.83 vs. limit=12.0 +2024-07-28 23:31:44,986 INFO [train.py:1114] (1/4) Epoch 16, batch 6750, loss[loss=0.2199, simple_loss=0.307, pruned_loss=0.06637, over 4078.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2676, pruned_loss=0.04317, over 939802.28 frames. ], batch size: 25, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:31:48,086 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.04 vs. limit=15.0 +2024-07-28 23:31:57,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213458.66666666666, ans=0.1 +2024-07-28 23:31:59,718 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.135e+01 5.547e+01 6.307e+01 7.303e+01 1.020e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 23:32:01,208 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:32:09,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.whiten.whitening_limit, batch_count=213472.0, ans=12.0 +2024-07-28 23:33:11,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213498.66666666666, ans=0.1 +2024-07-28 23:33:52,026 INFO [train.py:1114] (1/4) Epoch 16, batch 6800, loss[loss=0.2099, simple_loss=0.3058, pruned_loss=0.05699, over 4628.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2682, pruned_loss=0.04374, over 938241.83 frames. ], batch size: 13, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:33:53,703 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.80 vs. limit=15.0 +2024-07-28 23:33:55,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213512.0, ans=0.1 +2024-07-28 23:34:05,518 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=8.14 vs. limit=8.0 +2024-07-28 23:34:12,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=213538.66666666666, ans=15.0 +2024-07-28 23:34:12,201 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.01 vs. limit=15.0 +2024-07-28 23:34:18,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213552.0, ans=0.1 +2024-07-28 23:34:26,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=213565.33333333334, ans=0.0 +2024-07-28 23:34:31,086 INFO [train.py:1114] (1/4) Epoch 16, batch 6850, loss[loss=0.1927, simple_loss=0.2752, pruned_loss=0.05511, over 4702.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2681, pruned_loss=0.04438, over 940201.86 frames. ], batch size: 13, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:34:32,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=213578.66666666666, ans=0.125 +2024-07-28 23:34:33,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=213578.66666666666, ans=0.125 +2024-07-28 23:34:35,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=213578.66666666666, ans=0.0 +2024-07-28 23:34:40,946 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.265e+01 5.896e+01 6.305e+01 7.215e+01 1.193e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 23:34:43,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=213592.0, ans=0.02 +2024-07-28 23:34:47,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213605.33333333334, ans=0.1 +2024-07-28 23:34:48,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=213605.33333333334, ans=0.05 +2024-07-28 23:34:51,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=213618.66666666666, ans=0.2 +2024-07-28 23:35:03,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.06 vs. limit=15.0 +2024-07-28 23:35:04,749 INFO [train.py:1114] (1/4) Epoch 16, batch 6900, loss[loss=0.1685, simple_loss=0.2557, pruned_loss=0.04064, over 4957.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2694, pruned_loss=0.04486, over 942807.53 frames. ], batch size: 13, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:35:12,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=213658.66666666666, ans=0.025 +2024-07-28 23:35:19,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=213672.0, ans=10.0 +2024-07-28 23:35:28,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213685.33333333334, ans=0.1 +2024-07-28 23:35:28,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=213685.33333333334, ans=0.0 +2024-07-28 23:35:38,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=213698.66666666666, ans=0.07 +2024-07-28 23:35:39,960 INFO [train.py:1114] (1/4) Epoch 16, batch 6950, loss[loss=0.1606, simple_loss=0.2421, pruned_loss=0.03952, over 4507.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.269, pruned_loss=0.04445, over 939961.23 frames. ], batch size: 10, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:35:44,255 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.81 vs. limit=15.0 +2024-07-28 23:35:46,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213725.33333333334, ans=0.1 +2024-07-28 23:35:46,819 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.97 vs. limit=15.0 +2024-07-28 23:35:47,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=213725.33333333334, ans=0.0 +2024-07-28 23:35:50,012 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.539e+01 5.740e+01 6.194e+01 7.107e+01 9.358e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 23:35:53,703 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.22 vs. limit=15.0 +2024-07-28 23:35:56,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=213738.66666666666, ans=0.125 +2024-07-28 23:35:57,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213738.66666666666, ans=0.1 +2024-07-28 23:36:07,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=213752.0, ans=22.5 +2024-07-28 23:36:10,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=213765.33333333334, ans=0.125 +2024-07-28 23:36:11,081 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.48 vs. limit=10.0 +2024-07-28 23:36:16,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=213778.66666666666, ans=0.0 +2024-07-28 23:36:17,389 INFO [train.py:1114] (1/4) Epoch 16, batch 7000, loss[loss=0.2003, simple_loss=0.2896, pruned_loss=0.05552, over 4658.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2684, pruned_loss=0.04444, over 938655.28 frames. ], batch size: 17, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:36:19,289 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.88 vs. limit=15.0 +2024-07-28 23:36:21,084 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.51 vs. limit=15.0 +2024-07-28 23:36:24,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=213792.0, ans=0.125 +2024-07-28 23:36:28,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=213792.0, ans=0.1 +2024-07-28 23:36:44,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=213832.0, ans=0.125 +2024-07-28 23:36:50,055 INFO [train.py:1114] (1/4) Epoch 16, batch 7050, loss[loss=0.1967, simple_loss=0.2989, pruned_loss=0.04723, over 4700.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.268, pruned_loss=0.04388, over 941869.06 frames. ], batch size: 19, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:36:52,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=213845.33333333334, ans=0.2 +2024-07-28 23:37:00,890 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.296e+01 5.658e+01 6.254e+01 7.324e+01 1.123e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 23:37:02,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-28 23:37:07,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=213872.0, ans=0.125 +2024-07-28 23:37:11,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=213885.33333333334, ans=0.125 +2024-07-28 23:37:13,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=213885.33333333334, ans=0.1 +2024-07-28 23:37:17,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=213898.66666666666, ans=0.0 +2024-07-28 23:37:18,058 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.46 vs. limit=12.0 +2024-07-28 23:37:24,463 INFO [train.py:1114] (1/4) Epoch 16, batch 7100, loss[loss=0.2097, simple_loss=0.2918, pruned_loss=0.06383, over 4806.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2696, pruned_loss=0.04496, over 936872.54 frames. ], batch size: 15, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:37:33,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=213925.33333333334, ans=15.0 +2024-07-28 23:37:46,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=213952.0, ans=0.2 +2024-07-28 23:37:59,200 INFO [train.py:1114] (1/4) Epoch 16, batch 7150, loss[loss=0.2693, simple_loss=0.362, pruned_loss=0.08825, over 4435.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2685, pruned_loss=0.04413, over 937811.27 frames. ], batch size: 21, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:38:08,737 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.615e+01 6.266e+01 7.149e+01 9.915e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 23:38:10,372 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.56 vs. limit=15.0 +2024-07-28 23:38:14,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=214005.33333333334, ans=0.1 +2024-07-28 23:38:16,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=214005.33333333334, ans=0.125 +2024-07-28 23:38:20,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=214018.66666666666, ans=0.2 +2024-07-28 23:38:20,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=214018.66666666666, ans=0.2 +2024-07-28 23:38:24,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=214032.0, ans=0.025 +2024-07-28 23:38:31,552 INFO [train.py:1114] (1/4) Epoch 16, batch 7200, loss[loss=0.1841, simple_loss=0.2646, pruned_loss=0.05181, over 4801.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2686, pruned_loss=0.04432, over 938076.31 frames. ], batch size: 15, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:38:55,271 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:39:05,748 INFO [train.py:1114] (1/4) Epoch 16, batch 7250, loss[loss=0.1418, simple_loss=0.2215, pruned_loss=0.03099, over 4847.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2674, pruned_loss=0.0441, over 939609.26 frames. ], batch size: 12, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:39:10,142 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=11.03 vs. limit=10.0 +2024-07-28 23:39:15,401 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.553e+01 6.069e+01 6.578e+01 8.706e+01, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 23:39:15,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=214125.33333333334, ans=0.125 +2024-07-28 23:39:40,850 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.72 vs. limit=22.5 +2024-07-28 23:39:42,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=214138.66666666666, ans=0.025 +2024-07-28 23:39:43,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=214152.0, ans=0.2 +2024-07-28 23:39:45,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=214152.0, ans=0.125 +2024-07-28 23:39:47,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=214152.0, ans=0.2 +2024-07-28 23:39:50,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=214165.33333333334, ans=0.5 +2024-07-28 23:39:56,632 INFO [train.py:1114] (1/4) Epoch 16, batch 7300, loss[loss=0.1566, simple_loss=0.2422, pruned_loss=0.03555, over 4846.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2672, pruned_loss=0.04419, over 939926.40 frames. ], batch size: 12, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:40:06,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=214192.0, ans=0.04949747468305833 +2024-07-28 23:40:17,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=214218.66666666666, ans=0.07 +2024-07-28 23:40:21,313 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.16 vs. limit=22.5 +2024-07-28 23:40:24,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=214232.0, ans=0.025 +2024-07-28 23:40:30,740 INFO [train.py:1114] (1/4) Epoch 16, batch 7350, loss[loss=0.2, simple_loss=0.2983, pruned_loss=0.05087, over 4642.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2677, pruned_loss=0.04417, over 939287.65 frames. ], batch size: 12, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:40:36,546 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.78 vs. limit=12.0 +2024-07-28 23:40:37,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=214258.66666666666, ans=0.0 +2024-07-28 23:40:40,777 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.40 vs. limit=10.0 +2024-07-28 23:40:41,009 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.596e+01 5.572e+01 6.176e+01 6.846e+01 9.880e+01, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 23:40:41,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214258.66666666666, ans=0.1 +2024-07-28 23:40:56,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=214285.33333333334, ans=0.0 +2024-07-28 23:41:09,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=214298.66666666666, ans=0.09899494936611666 +2024-07-28 23:41:10,932 INFO [train.py:1114] (1/4) Epoch 16, batch 7400, loss[loss=0.1767, simple_loss=0.2641, pruned_loss=0.04464, over 4698.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2684, pruned_loss=0.04463, over 940481.37 frames. ], batch size: 13, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:41:13,332 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.10 vs. limit=12.0 +2024-07-28 23:41:29,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=214338.66666666666, ans=0.125 +2024-07-28 23:41:30,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.34 vs. limit=15.0 +2024-07-28 23:41:40,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=214352.0, ans=0.2 +2024-07-28 23:41:50,691 INFO [train.py:1114] (1/4) Epoch 16, batch 7450, loss[loss=0.1489, simple_loss=0.2371, pruned_loss=0.03033, over 4606.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2678, pruned_loss=0.04462, over 937676.14 frames. ], batch size: 11, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:41:55,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=214378.66666666666, ans=0.025 +2024-07-28 23:42:01,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=214392.0, ans=0.2 +2024-07-28 23:42:02,403 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+01 5.840e+01 6.491e+01 7.591e+01 1.266e+02, threshold=1.298e+02, percent-clipped=1.0 +2024-07-28 23:42:06,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=214405.33333333334, ans=0.95 +2024-07-28 23:42:25,724 INFO [train.py:1114] (1/4) Epoch 16, batch 7500, loss[loss=0.207, simple_loss=0.2991, pruned_loss=0.05738, over 3677.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2675, pruned_loss=0.0441, over 936333.76 frames. ], batch size: 35, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:42:28,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=214445.33333333334, ans=0.125 +2024-07-28 23:50:33,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=214472.0, ans=0.2 +2024-07-28 23:50:46,126 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:50:53,142 INFO [train.py:1114] (1/4) Epoch 16, batch 7550, loss[loss=0.1886, simple_loss=0.287, pruned_loss=0.04511, over 4626.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2692, pruned_loss=0.04436, over 936703.01 frames. ], batch size: 17, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:50:59,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=214525.33333333334, ans=0.125 +2024-07-28 23:51:02,809 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.731e+01 5.493e+01 6.004e+01 6.763e+01 8.407e+01, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 23:51:10,584 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.72 vs. limit=15.0 +2024-07-28 23:51:14,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=214552.0, ans=0.125 +2024-07-28 23:51:25,729 INFO [train.py:1114] (1/4) Epoch 16, batch 7600, loss[loss=0.1833, simple_loss=0.2854, pruned_loss=0.04062, over 4809.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2691, pruned_loss=0.0439, over 938546.27 frames. ], batch size: 14, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:51:27,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=214578.66666666666, ans=0.0 +2024-07-28 23:51:28,579 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.37 vs. limit=15.0 +2024-07-28 23:51:30,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=214578.66666666666, ans=0.0 +2024-07-28 23:51:32,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=214592.0, ans=0.04949747468305833 +2024-07-28 23:52:20,201 INFO [train.py:1114] (1/4) Epoch 16, batch 7650, loss[loss=0.1469, simple_loss=0.2264, pruned_loss=0.03371, over 4940.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2678, pruned_loss=0.04347, over 937709.55 frames. ], batch size: 12, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:52:20,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=214645.33333333334, ans=0.07 +2024-07-28 23:52:26,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=214658.66666666666, ans=0.0 +2024-07-28 23:52:27,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=214658.66666666666, ans=0.125 +2024-07-28 23:52:29,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=214658.66666666666, ans=0.1 +2024-07-28 23:52:29,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=214658.66666666666, ans=0.2 +2024-07-28 23:52:30,298 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.652e+01 6.093e+01 6.907e+01 1.144e+02, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 23:52:43,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214685.33333333334, ans=0.1 +2024-07-28 23:52:44,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=214685.33333333334, ans=0.0 +2024-07-28 23:52:51,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=214698.66666666666, ans=0.125 +2024-07-28 23:52:53,659 INFO [train.py:1114] (1/4) Epoch 16, batch 7700, loss[loss=0.1867, simple_loss=0.2864, pruned_loss=0.04353, over 4693.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2691, pruned_loss=0.04394, over 934787.20 frames. ], batch size: 13, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:52:56,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214712.0, ans=0.1 +2024-07-28 23:53:02,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=214725.33333333334, ans=0.0 +2024-07-28 23:53:04,345 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.76 vs. limit=15.0 +2024-07-28 23:53:07,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=214738.66666666666, ans=0.2 +2024-07-28 23:53:11,148 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:53:12,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=214752.0, ans=0.04949747468305833 +2024-07-28 23:53:25,965 INFO [train.py:1114] (1/4) Epoch 16, batch 7750, loss[loss=0.1744, simple_loss=0.2761, pruned_loss=0.03634, over 4934.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2692, pruned_loss=0.04381, over 936026.95 frames. ], batch size: 14, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:53:43,104 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.11 vs. limit=15.0 +2024-07-28 23:53:46,521 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.451e+01 5.576e+01 5.953e+01 6.432e+01 8.446e+01, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 23:53:49,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=214805.33333333334, ans=0.125 +2024-07-28 23:54:03,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214832.0, ans=0.1 +2024-07-28 23:54:06,841 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:54:06,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=214832.0, ans=0.0 +2024-07-28 23:54:06,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=214832.0, ans=0.125 +2024-07-28 23:54:09,315 INFO [train.py:1114] (1/4) Epoch 16, batch 7800, loss[loss=0.1852, simple_loss=0.2812, pruned_loss=0.04461, over 4670.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2691, pruned_loss=0.04362, over 937720.36 frames. ], batch size: 14, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:54:27,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=214885.33333333334, ans=0.125 +2024-07-28 23:54:42,805 INFO [train.py:1114] (1/4) Epoch 16, batch 7850, loss[loss=0.1648, simple_loss=0.2513, pruned_loss=0.03914, over 4901.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.269, pruned_loss=0.04385, over 936592.22 frames. ], batch size: 11, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:54:52,708 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.575e+01 6.196e+01 7.184e+01 1.116e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 23:54:53,972 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.01 vs. limit=6.0 +2024-07-28 23:55:09,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=214938.66666666666, ans=0.0 +2024-07-28 23:55:22,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214952.0, ans=0.1 +2024-07-28 23:55:40,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=214978.66666666666, ans=0.125 +2024-07-28 23:55:40,815 INFO [train.py:1114] (1/4) Epoch 16, batch 7900, loss[loss=0.1912, simple_loss=0.2819, pruned_loss=0.05021, over 4876.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2703, pruned_loss=0.04434, over 933429.58 frames. ], batch size: 14, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:55:40,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=214978.66666666666, ans=0.2 +2024-07-28 23:55:57,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=215005.33333333334, ans=0.0 +2024-07-28 23:56:02,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=215005.33333333334, ans=0.125 +2024-07-28 23:56:09,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=215018.66666666666, ans=0.025 +2024-07-28 23:56:10,673 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.24 vs. limit=15.0 +2024-07-28 23:56:11,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=215032.0, ans=0.2 +2024-07-28 23:56:18,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.39 vs. limit=15.0 +2024-07-28 23:56:21,108 INFO [train.py:1114] (1/4) Epoch 16, batch 7950, loss[loss=0.1882, simple_loss=0.2824, pruned_loss=0.04698, over 3362.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2698, pruned_loss=0.04427, over 935516.53 frames. ], batch size: 35, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:56:29,728 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.55 vs. limit=22.5 +2024-07-28 23:56:52,123 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.700e+01 5.562e+01 6.109e+01 6.836e+01 1.076e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 23:57:05,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215085.33333333334, ans=0.1 +2024-07-28 23:57:35,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=215098.66666666666, ans=0.0 +2024-07-28 23:57:39,460 INFO [train.py:1114] (1/4) Epoch 16, batch 8000, loss[loss=0.1686, simple_loss=0.2453, pruned_loss=0.04593, over 4610.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2684, pruned_loss=0.04366, over 934972.99 frames. ], batch size: 11, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:57:55,498 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.63 vs. limit=22.5 +2024-07-28 23:58:07,067 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=215165.33333333334, ans=0.125 +2024-07-28 23:58:07,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=215165.33333333334, ans=0.0 +2024-07-28 23:58:13,492 INFO [train.py:1114] (1/4) Epoch 16, batch 8050, loss[loss=0.1638, simple_loss=0.2788, pruned_loss=0.02443, over 4809.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2682, pruned_loss=0.04325, over 934629.15 frames. ], batch size: 14, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:58:24,548 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.751e+01 5.579e+01 6.307e+01 7.164e+01 1.118e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 23:58:27,024 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.81 vs. limit=6.0 +2024-07-28 23:58:41,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=215232.0, ans=0.1 +2024-07-28 23:58:45,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=215232.0, ans=0.2 +2024-07-28 23:58:48,136 INFO [train.py:1114] (1/4) Epoch 16, batch 8100, loss[loss=0.2133, simple_loss=0.2973, pruned_loss=0.06459, over 4802.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2688, pruned_loss=0.04359, over 934209.15 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:58:57,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=215258.66666666666, ans=0.125 +2024-07-28 23:59:06,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=215272.0, ans=0.95 +2024-07-28 23:59:07,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=215285.33333333334, ans=0.125 +2024-07-28 23:59:17,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=215298.66666666666, ans=0.025 +2024-07-28 23:59:22,313 INFO [train.py:1114] (1/4) Epoch 16, batch 8150, loss[loss=0.2032, simple_loss=0.2843, pruned_loss=0.06107, over 4791.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2674, pruned_loss=0.04328, over 937764.92 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:59:26,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=215312.0, ans=0.0 +2024-07-28 23:59:27,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=215312.0, ans=0.125 +2024-07-28 23:59:32,146 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.628e+01 5.614e+01 6.330e+01 7.419e+01 1.009e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 23:59:35,713 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.41 vs. limit=22.5 +2024-07-28 23:59:37,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=215338.66666666666, ans=0.0 +2024-07-28 23:59:37,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=215338.66666666666, ans=0.125 +2024-07-28 23:59:47,895 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.51 vs. limit=12.0 +2024-07-28 23:59:54,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=215378.66666666666, ans=0.125 +2024-07-28 23:59:54,895 INFO [train.py:1114] (1/4) Epoch 16, batch 8200, loss[loss=0.1968, simple_loss=0.2898, pruned_loss=0.05185, over 4788.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2687, pruned_loss=0.0437, over 939266.57 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:59:59,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=215378.66666666666, ans=0.125 +2024-07-29 00:00:02,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=215392.0, ans=0.95 +2024-07-29 00:00:02,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215392.0, ans=0.1 +2024-07-29 00:00:19,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=215418.66666666666, ans=0.0 +2024-07-29 00:00:23,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=215432.0, ans=0.2 +2024-07-29 00:00:30,609 INFO [train.py:1114] (1/4) Epoch 16, batch 8250, loss[loss=0.1659, simple_loss=0.248, pruned_loss=0.04193, over 4900.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2689, pruned_loss=0.04405, over 938912.52 frames. ], batch size: 13, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:00:48,543 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.803e+01 5.664e+01 6.137e+01 6.796e+01 1.110e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 00:00:57,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=215472.0, ans=0.125 +2024-07-29 00:00:58,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=215472.0, ans=0.125 +2024-07-29 00:01:02,401 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.99 vs. limit=15.0 +2024-07-29 00:01:03,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=215485.33333333334, ans=0.05 +2024-07-29 00:01:06,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215498.66666666666, ans=0.1 +2024-07-29 00:01:11,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=215498.66666666666, ans=0.125 +2024-07-29 00:01:13,398 INFO [train.py:1114] (1/4) Epoch 16, batch 8300, loss[loss=0.171, simple_loss=0.2725, pruned_loss=0.03474, over 4896.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2692, pruned_loss=0.04403, over 938739.80 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:01:14,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=215512.0, ans=0.025 +2024-07-29 00:01:50,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215525.33333333334, ans=0.1 +2024-07-29 00:01:53,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=215525.33333333334, ans=0.125 +2024-07-29 00:02:39,318 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.20 vs. limit=10.0 +2024-07-29 00:02:39,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=215538.66666666666, ans=0.0 +2024-07-29 00:02:42,301 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=215538.66666666666, ans=0.125 +2024-07-29 00:03:26,029 INFO [train.py:1114] (1/4) Epoch 16, batch 8350, loss[loss=0.1856, simple_loss=0.276, pruned_loss=0.04761, over 4777.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2684, pruned_loss=0.0435, over 941296.65 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:03:26,818 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:03:39,855 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.677e+01 6.151e+01 6.738e+01 9.364e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 00:03:39,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=215592.0, ans=0.125 +2024-07-29 00:03:42,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215592.0, ans=0.1 +2024-07-29 00:03:51,601 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.13 vs. limit=12.0 +2024-07-29 00:04:01,271 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.91 vs. limit=15.0 +2024-07-29 00:04:04,284 INFO [train.py:1114] (1/4) Epoch 16, batch 8400, loss[loss=0.154, simple_loss=0.2527, pruned_loss=0.02767, over 4776.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2693, pruned_loss=0.044, over 939994.67 frames. ], batch size: 12, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:04:05,130 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.49 vs. limit=22.5 +2024-07-29 00:04:18,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=215672.0, ans=0.125 +2024-07-29 00:04:32,672 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.37 vs. limit=15.0 +2024-07-29 00:04:36,867 INFO [train.py:1114] (1/4) Epoch 16, batch 8450, loss[loss=0.1909, simple_loss=0.2852, pruned_loss=0.04825, over 4808.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2703, pruned_loss=0.04436, over 938722.30 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:04:37,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=215712.0, ans=0.125 +2024-07-29 00:04:46,409 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.807e+01 5.964e+01 6.863e+01 7.657e+01 1.232e+02, threshold=1.373e+02, percent-clipped=1.0 +2024-07-29 00:04:53,633 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.45 vs. limit=15.0 +2024-07-29 00:04:55,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=215738.66666666666, ans=0.125 +2024-07-29 00:04:55,472 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.52 vs. limit=15.0 +2024-07-29 00:04:55,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=215752.0, ans=0.2 +2024-07-29 00:04:57,999 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.77 vs. limit=6.0 +2024-07-29 00:05:00,778 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.11 vs. limit=15.0 +2024-07-29 00:05:09,548 INFO [train.py:1114] (1/4) Epoch 16, batch 8500, loss[loss=0.1694, simple_loss=0.2443, pruned_loss=0.0473, over 4608.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2682, pruned_loss=0.04379, over 938748.73 frames. ], batch size: 11, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:05:10,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=215778.66666666666, ans=0.0 +2024-07-29 00:05:14,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=215778.66666666666, ans=0.5 +2024-07-29 00:05:18,135 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=215792.0, ans=0.025 +2024-07-29 00:05:18,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=215792.0, ans=0.125 +2024-07-29 00:05:20,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=215792.0, ans=0.125 +2024-07-29 00:05:21,171 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.80 vs. limit=22.5 +2024-07-29 00:05:23,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=215805.33333333334, ans=0.0 +2024-07-29 00:05:27,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=215805.33333333334, ans=0.0 +2024-07-29 00:05:34,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_na.min_abs, batch_count=215818.66666666666, ans=0.02 +2024-07-29 00:05:40,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=215832.0, ans=10.0 +2024-07-29 00:05:41,906 INFO [train.py:1114] (1/4) Epoch 16, batch 8550, loss[loss=0.1319, simple_loss=0.2153, pruned_loss=0.02429, over 4809.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2678, pruned_loss=0.04366, over 939856.07 frames. ], batch size: 11, lr: 4.61e-03, grad_scale: 64.0 +2024-07-29 00:05:44,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=215845.33333333334, ans=0.2 +2024-07-29 00:05:52,589 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.843e+01 6.495e+01 7.573e+01 1.241e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-29 00:05:59,962 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.56 vs. limit=15.0 +2024-07-29 00:06:02,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=215885.33333333334, ans=0.2 +2024-07-29 00:06:05,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=215885.33333333334, ans=0.0 +2024-07-29 00:06:08,102 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.41 vs. limit=15.0 +2024-07-29 00:06:16,110 INFO [train.py:1114] (1/4) Epoch 16, batch 8600, loss[loss=0.2199, simple_loss=0.3186, pruned_loss=0.06063, over 4801.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2674, pruned_loss=0.0439, over 939261.01 frames. ], batch size: 15, lr: 4.61e-03, grad_scale: 64.0 +2024-07-29 00:06:16,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=215912.0, ans=0.125 +2024-07-29 00:06:39,454 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.94 vs. limit=22.5 +2024-07-29 00:06:47,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.03 vs. limit=22.5 +2024-07-29 00:06:57,229 INFO [train.py:1114] (1/4) Epoch 16, batch 8650, loss[loss=0.1958, simple_loss=0.2765, pruned_loss=0.0576, over 4908.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2686, pruned_loss=0.04441, over 940408.20 frames. ], batch size: 15, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:07:00,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=215978.66666666666, ans=0.125 +2024-07-29 00:07:04,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=215992.0, ans=0.05 +2024-07-29 00:07:08,158 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.461e+01 5.650e+01 6.263e+01 7.133e+01 1.178e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 00:07:10,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=216005.33333333334, ans=0.125 +2024-07-29 00:07:16,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=216018.66666666666, ans=0.025 +2024-07-29 00:07:18,764 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:07:20,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=216018.66666666666, ans=0.125 +2024-07-29 00:07:22,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=216018.66666666666, ans=0.07 +2024-07-29 00:07:30,200 INFO [train.py:1114] (1/4) Epoch 16, batch 8700, loss[loss=0.1655, simple_loss=0.2624, pruned_loss=0.03431, over 4756.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2689, pruned_loss=0.0447, over 937908.59 frames. ], batch size: 13, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:07:38,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=216058.66666666666, ans=0.125 +2024-07-29 00:07:41,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=216058.66666666666, ans=0.125 +2024-07-29 00:07:41,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=216058.66666666666, ans=0.125 +2024-07-29 00:07:43,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216072.0, ans=0.1 +2024-07-29 00:07:45,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=216072.0, ans=0.0 +2024-07-29 00:07:49,724 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=216085.33333333334, ans=0.125 +2024-07-29 00:07:59,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=216098.66666666666, ans=0.025 +2024-07-29 00:08:01,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=216098.66666666666, ans=0.0 +2024-07-29 00:08:06,546 INFO [train.py:1114] (1/4) Epoch 16, batch 8750, loss[loss=0.2375, simple_loss=0.3199, pruned_loss=0.07757, over 4683.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2695, pruned_loss=0.04499, over 936053.89 frames. ], batch size: 15, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:08:10,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=216112.0, ans=0.09899494936611666 +2024-07-29 00:08:16,791 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.699e+01 5.631e+01 6.456e+01 7.086e+01 1.065e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-29 00:08:19,846 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-29 00:08:28,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=216152.0, ans=0.2 +2024-07-29 00:08:32,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216152.0, ans=0.1 +2024-07-29 00:08:35,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=216165.33333333334, ans=0.025 +2024-07-29 00:08:35,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=216165.33333333334, ans=0.0 +2024-07-29 00:08:39,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=216165.33333333334, ans=0.2 +2024-07-29 00:08:39,704 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:08:41,392 INFO [train.py:1114] (1/4) Epoch 16, batch 8800, loss[loss=0.1783, simple_loss=0.2723, pruned_loss=0.04213, over 4928.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2694, pruned_loss=0.04492, over 937016.62 frames. ], batch size: 14, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:08:43,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=216178.66666666666, ans=6.0 +2024-07-29 00:08:45,435 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.66 vs. limit=22.5 +2024-07-29 00:08:47,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=216192.0, ans=0.125 +2024-07-29 00:08:54,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=216205.33333333334, ans=0.125 +2024-07-29 00:09:04,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=216218.66666666666, ans=0.125 +2024-07-29 00:09:09,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=216232.0, ans=0.125 +2024-07-29 00:09:10,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=216232.0, ans=0.125 +2024-07-29 00:09:12,060 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.63 vs. limit=5.0 +2024-07-29 00:09:14,338 INFO [train.py:1114] (1/4) Epoch 16, batch 8850, loss[loss=0.1977, simple_loss=0.2991, pruned_loss=0.04816, over 4482.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2682, pruned_loss=0.04464, over 930944.78 frames. ], batch size: 21, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:09:14,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=216245.33333333334, ans=0.125 +2024-07-29 00:09:14,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=216245.33333333334, ans=0.125 +2024-07-29 00:09:18,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=216245.33333333334, ans=0.0 +2024-07-29 00:09:22,955 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.15 vs. limit=22.5 +2024-07-29 00:09:25,916 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.646e+01 5.534e+01 6.492e+01 7.361e+01 1.003e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-29 00:09:26,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=216258.66666666666, ans=0.125 +2024-07-29 00:09:29,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=216272.0, ans=0.025 +2024-07-29 00:09:29,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216272.0, ans=0.1 +2024-07-29 00:09:40,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=216285.33333333334, ans=0.125 +2024-07-29 00:09:43,319 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.46 vs. limit=6.0 +2024-07-29 00:09:47,751 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.61 vs. limit=22.5 +2024-07-29 00:09:48,096 INFO [train.py:1114] (1/4) Epoch 16, batch 8900, loss[loss=0.1471, simple_loss=0.2319, pruned_loss=0.03116, over 4949.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2683, pruned_loss=0.04423, over 928942.81 frames. ], batch size: 12, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:10:02,055 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.00 vs. limit=15.0 +2024-07-29 00:10:04,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=216338.66666666666, ans=0.125 +2024-07-29 00:10:04,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=216338.66666666666, ans=0.125 +2024-07-29 00:10:21,258 INFO [train.py:1114] (1/4) Epoch 16, batch 8950, loss[loss=0.1823, simple_loss=0.269, pruned_loss=0.04778, over 4417.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2688, pruned_loss=0.04424, over 929756.12 frames. ], batch size: 21, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:10:25,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=216378.66666666666, ans=0.0 +2024-07-29 00:10:27,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=216392.0, ans=0.0 +2024-07-29 00:10:31,360 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.323e+01 5.469e+01 6.023e+01 7.554e+01 1.113e+02, threshold=1.205e+02, percent-clipped=0.0 +2024-07-29 00:10:53,215 INFO [train.py:1114] (1/4) Epoch 16, batch 9000, loss[loss=0.1722, simple_loss=0.2679, pruned_loss=0.03831, over 4640.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2686, pruned_loss=0.04469, over 932729.55 frames. ], batch size: 12, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:10:53,216 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 00:11:03,575 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([6.0950, 6.0215, 5.3880, 5.6890], device='cuda:1') +2024-07-29 00:11:10,919 INFO [train.py:1146] (1/4) Epoch 16, validation: loss=0.1631, simple_loss=0.2656, pruned_loss=0.03028, over 944034.00 frames. +2024-07-29 00:11:10,921 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 00:11:23,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=216472.0, ans=0.125 +2024-07-29 00:11:41,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=216498.66666666666, ans=0.0 +2024-07-29 00:11:41,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=216498.66666666666, ans=0.025 +2024-07-29 00:11:43,595 INFO [train.py:1114] (1/4) Epoch 16, batch 9050, loss[loss=0.1853, simple_loss=0.2674, pruned_loss=0.05161, over 4517.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2678, pruned_loss=0.04457, over 933791.87 frames. ], batch size: 10, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:11:54,113 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.634e+01 5.894e+01 6.647e+01 7.904e+01 1.086e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-29 00:11:58,491 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.64 vs. limit=6.0 +2024-07-29 00:12:05,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=216552.0, ans=0.125 +2024-07-29 00:12:07,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=216552.0, ans=0.5 +2024-07-29 00:12:17,313 INFO [train.py:1114] (1/4) Epoch 16, batch 9100, loss[loss=0.1713, simple_loss=0.2621, pruned_loss=0.04022, over 4932.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2674, pruned_loss=0.04417, over 936474.42 frames. ], batch size: 14, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:12:20,683 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:12:35,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=216605.33333333334, ans=0.125 +2024-07-29 00:12:39,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=216618.66666666666, ans=0.125 +2024-07-29 00:12:50,138 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.49 vs. limit=15.0 +2024-07-29 00:12:51,514 INFO [train.py:1114] (1/4) Epoch 16, batch 9150, loss[loss=0.168, simple_loss=0.2617, pruned_loss=0.03713, over 4803.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2682, pruned_loss=0.04423, over 935825.48 frames. ], batch size: 14, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:12:54,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=216645.33333333334, ans=0.1 +2024-07-29 00:13:00,183 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.42 vs. limit=15.0 +2024-07-29 00:13:01,748 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.263e+01 5.764e+01 6.440e+01 7.377e+01 1.090e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 00:13:02,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=216658.66666666666, ans=0.125 +2024-07-29 00:13:05,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=216672.0, ans=0.125 +2024-07-29 00:13:07,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=216672.0, ans=0.125 +2024-07-29 00:13:10,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=216685.33333333334, ans=0.125 +2024-07-29 00:13:13,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=216685.33333333334, ans=0.2 +2024-07-29 00:13:13,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=216685.33333333334, ans=0.2 +2024-07-29 00:13:19,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=216698.66666666666, ans=0.0 +2024-07-29 00:13:23,166 INFO [train.py:1114] (1/4) Epoch 16, batch 9200, loss[loss=0.1864, simple_loss=0.2683, pruned_loss=0.05227, over 4839.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2671, pruned_loss=0.04367, over 937707.84 frames. ], batch size: 12, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:13:30,473 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:13:56,979 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.74 vs. limit=15.0 +2024-07-29 00:13:59,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=216765.33333333334, ans=0.09899494936611666 +2024-07-29 00:14:01,550 INFO [train.py:1114] (1/4) Epoch 16, batch 9250, loss[loss=0.1896, simple_loss=0.2778, pruned_loss=0.05075, over 4650.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2667, pruned_loss=0.04344, over 938426.70 frames. ], batch size: 13, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:14:02,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.67 vs. limit=6.0 +2024-07-29 00:14:06,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=216778.66666666666, ans=0.2 +2024-07-29 00:14:11,592 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.669e+01 5.549e+01 6.033e+01 6.747e+01 9.644e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-29 00:14:11,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=216792.0, ans=0.125 +2024-07-29 00:14:14,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=216805.33333333334, ans=0.125 +2024-07-29 00:14:16,440 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.98 vs. limit=15.0 +2024-07-29 00:14:17,057 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-29 00:14:18,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=216805.33333333334, ans=0.125 +2024-07-29 00:14:25,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=216818.66666666666, ans=0.0 +2024-07-29 00:14:26,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=216832.0, ans=0.125 +2024-07-29 00:14:29,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216832.0, ans=0.1 +2024-07-29 00:14:30,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=216832.0, ans=0.125 +2024-07-29 00:14:32,953 INFO [train.py:1114] (1/4) Epoch 16, batch 9300, loss[loss=0.1672, simple_loss=0.2599, pruned_loss=0.0373, over 4777.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2669, pruned_loss=0.04331, over 938472.07 frames. ], batch size: 12, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:14:46,706 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=216872.0, ans=0.0 +2024-07-29 00:14:53,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216885.33333333334, ans=0.1 +2024-07-29 00:14:55,360 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.32 vs. limit=12.0 +2024-07-29 00:14:55,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=216885.33333333334, ans=0.125 +2024-07-29 00:14:58,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=216898.66666666666, ans=0.125 +2024-07-29 00:15:03,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=216898.66666666666, ans=0.5 +2024-07-29 00:15:04,406 INFO [train.py:1114] (1/4) Epoch 16, batch 9350, loss[loss=0.15, simple_loss=0.2431, pruned_loss=0.02845, over 4805.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2669, pruned_loss=0.04306, over 935330.56 frames. ], batch size: 11, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:15:08,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=216912.0, ans=0.5 +2024-07-29 00:15:10,192 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.48 vs. limit=15.0 +2024-07-29 00:15:11,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=216925.33333333334, ans=0.0 +2024-07-29 00:15:14,952 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.736e+01 6.318e+01 7.656e+01 1.489e+02, threshold=1.264e+02, percent-clipped=1.0 +2024-07-29 00:15:24,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=216952.0, ans=0.025 +2024-07-29 00:15:29,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=216965.33333333334, ans=0.2 +2024-07-29 00:15:34,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=216965.33333333334, ans=0.125 +2024-07-29 00:15:34,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=216965.33333333334, ans=0.125 +2024-07-29 00:15:35,903 INFO [train.py:1114] (1/4) Epoch 16, batch 9400, loss[loss=0.1599, simple_loss=0.2616, pruned_loss=0.02915, over 4688.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2668, pruned_loss=0.04308, over 933130.56 frames. ], batch size: 13, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:15:36,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=216978.66666666666, ans=0.125 +2024-07-29 00:15:44,793 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.91 vs. limit=6.0 +2024-07-29 00:15:47,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=217005.33333333334, ans=0.1 +2024-07-29 00:15:49,207 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.95 vs. limit=15.0 +2024-07-29 00:15:52,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=217005.33333333334, ans=0.2 +2024-07-29 00:15:54,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=217018.66666666666, ans=0.05 +2024-07-29 00:16:06,458 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.83 vs. limit=22.5 +2024-07-29 00:16:06,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=217032.0, ans=0.0 +2024-07-29 00:16:08,012 INFO [train.py:1114] (1/4) Epoch 16, batch 9450, loss[loss=0.1551, simple_loss=0.2346, pruned_loss=0.03784, over 4805.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2666, pruned_loss=0.04303, over 932465.58 frames. ], batch size: 11, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:16:25,646 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.291e+01 5.921e+01 6.735e+01 1.029e+02, threshold=1.184e+02, percent-clipped=0.0 +2024-07-29 00:16:30,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=217072.0, ans=0.125 +2024-07-29 00:16:33,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=217085.33333333334, ans=0.125 +2024-07-29 00:16:34,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=217085.33333333334, ans=0.125 +2024-07-29 00:16:39,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.58 vs. limit=12.0 +2024-07-29 00:16:41,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=217098.66666666666, ans=0.125 +2024-07-29 00:16:46,844 INFO [train.py:1114] (1/4) Epoch 16, batch 9500, loss[loss=0.1575, simple_loss=0.2456, pruned_loss=0.03468, over 4714.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2675, pruned_loss=0.04333, over 934760.98 frames. ], batch size: 12, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:16:53,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=217125.33333333334, ans=0.125 +2024-07-29 00:16:57,133 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=15.0 +2024-07-29 00:17:02,264 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.47 vs. limit=10.0 +2024-07-29 00:17:07,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=217152.0, ans=0.125 +2024-07-29 00:17:08,717 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:17:09,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=217152.0, ans=0.125 +2024-07-29 00:17:18,852 INFO [train.py:1114] (1/4) Epoch 16, batch 9550, loss[loss=0.1843, simple_loss=0.2631, pruned_loss=0.05271, over 4777.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2671, pruned_loss=0.04322, over 932175.21 frames. ], batch size: 12, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:17:27,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=217192.0, ans=0.125 +2024-07-29 00:17:28,559 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.794e+01 5.662e+01 6.269e+01 6.816e+01 8.303e+01, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 00:17:35,064 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:17:50,236 INFO [train.py:1114] (1/4) Epoch 16, batch 9600, loss[loss=0.2593, simple_loss=0.3301, pruned_loss=0.09428, over 3394.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2679, pruned_loss=0.04338, over 930889.85 frames. ], batch size: 35, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:17:54,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=217245.33333333334, ans=0.125 +2024-07-29 00:18:02,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=217272.0, ans=0.0 +2024-07-29 00:18:09,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=217285.33333333334, ans=0.125 +2024-07-29 00:18:17,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=217298.66666666666, ans=0.125 +2024-07-29 00:18:19,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=217298.66666666666, ans=0.0 +2024-07-29 00:18:19,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=217298.66666666666, ans=0.125 +2024-07-29 00:18:21,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=217312.0, ans=0.125 +2024-07-29 00:18:22,112 INFO [train.py:1114] (1/4) Epoch 16, batch 9650, loss[loss=0.1991, simple_loss=0.284, pruned_loss=0.0571, over 4836.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2683, pruned_loss=0.04391, over 926834.94 frames. ], batch size: 16, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:18:22,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=217312.0, ans=0.1 +2024-07-29 00:18:24,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=217312.0, ans=0.025 +2024-07-29 00:18:26,244 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.27 vs. limit=6.0 +2024-07-29 00:18:29,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=217325.33333333334, ans=0.125 +2024-07-29 00:18:32,294 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.780e+01 5.687e+01 6.553e+01 7.550e+01 1.146e+02, threshold=1.311e+02, percent-clipped=0.0 +2024-07-29 00:18:38,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=217338.66666666666, ans=0.125 +2024-07-29 00:18:44,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=217352.0, ans=0.125 +2024-07-29 00:18:53,902 INFO [train.py:1114] (1/4) Epoch 16, batch 9700, loss[loss=0.1883, simple_loss=0.2642, pruned_loss=0.05618, over 4091.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2688, pruned_loss=0.04412, over 924729.91 frames. ], batch size: 25, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:18:57,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=217378.66666666666, ans=0.0 +2024-07-29 00:19:05,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=217392.0, ans=0.2 +2024-07-29 00:19:05,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=217392.0, ans=0.035 +2024-07-29 00:19:07,963 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.92 vs. limit=15.0 +2024-07-29 00:19:10,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=217405.33333333334, ans=0.0 +2024-07-29 00:19:25,693 INFO [train.py:1114] (1/4) Epoch 16, batch 9750, loss[loss=0.2026, simple_loss=0.2934, pruned_loss=0.05587, over 4664.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.268, pruned_loss=0.04389, over 925388.92 frames. ], batch size: 15, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:19:27,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=217445.33333333334, ans=0.0 +2024-07-29 00:19:27,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=217445.33333333334, ans=0.025 +2024-07-29 00:19:34,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=217458.66666666666, ans=0.5 +2024-07-29 00:19:35,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=217458.66666666666, ans=0.0 +2024-07-29 00:19:36,074 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.471e+01 5.634e+01 6.378e+01 7.099e+01 1.078e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 00:19:45,972 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.26 vs. limit=15.0 +2024-07-29 00:19:55,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=217498.66666666666, ans=0.0 +2024-07-29 00:19:57,516 INFO [train.py:1114] (1/4) Epoch 16, batch 9800, loss[loss=0.1604, simple_loss=0.2515, pruned_loss=0.03468, over 4707.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2672, pruned_loss=0.04389, over 925302.74 frames. ], batch size: 12, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:20:10,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=217512.0, ans=0.0 +2024-07-29 00:20:32,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=217565.33333333334, ans=0.125 +2024-07-29 00:20:32,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.54 vs. limit=22.5 +2024-07-29 00:20:32,857 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:20:36,549 INFO [train.py:1114] (1/4) Epoch 16, batch 9850, loss[loss=0.1942, simple_loss=0.2841, pruned_loss=0.05211, over 4912.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2672, pruned_loss=0.04364, over 927525.35 frames. ], batch size: 15, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:20:50,313 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.242e+01 5.769e+01 6.344e+01 7.479e+01 1.066e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-29 00:20:56,529 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.46 vs. limit=10.0 +2024-07-29 00:21:01,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217618.66666666666, ans=0.1 +2024-07-29 00:21:03,792 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.00 vs. limit=15.0 +2024-07-29 00:21:07,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=217632.0, ans=0.0 +2024-07-29 00:21:11,730 INFO [train.py:1114] (1/4) Epoch 16, batch 9900, loss[loss=0.1736, simple_loss=0.2615, pruned_loss=0.04282, over 4843.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2682, pruned_loss=0.04432, over 926728.01 frames. ], batch size: 16, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:21:32,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=217685.33333333334, ans=0.125 +2024-07-29 00:21:40,005 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=10.08 vs. limit=15.0 +2024-07-29 00:21:41,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=217698.66666666666, ans=0.125 +2024-07-29 00:21:42,963 INFO [train.py:1114] (1/4) Epoch 16, batch 9950, loss[loss=0.1558, simple_loss=0.2404, pruned_loss=0.03567, over 4819.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2687, pruned_loss=0.04509, over 929649.49 frames. ], batch size: 11, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:21:48,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=217725.33333333334, ans=0.0 +2024-07-29 00:21:49,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=217725.33333333334, ans=0.125 +2024-07-29 00:21:56,426 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.282e+01 5.930e+01 6.462e+01 7.578e+01 1.307e+02, threshold=1.292e+02, percent-clipped=1.0 +2024-07-29 00:22:13,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=217765.33333333334, ans=0.125 +2024-07-29 00:22:16,939 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.57 vs. limit=15.0 +2024-07-29 00:22:17,189 INFO [train.py:1114] (1/4) Epoch 16, batch 10000, loss[loss=0.1768, simple_loss=0.2773, pruned_loss=0.03814, over 4627.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2706, pruned_loss=0.04539, over 927683.48 frames. ], batch size: 16, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:22:22,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=217792.0, ans=0.125 +2024-07-29 00:22:23,004 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.97 vs. limit=15.0 +2024-07-29 00:22:29,044 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.04 vs. limit=22.5 +2024-07-29 00:22:34,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217818.66666666666, ans=0.1 +2024-07-29 00:22:44,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff3.min_abs, batch_count=217832.0, ans=0.2 +2024-07-29 00:22:49,158 INFO [train.py:1114] (1/4) Epoch 16, batch 10050, loss[loss=0.2252, simple_loss=0.2927, pruned_loss=0.07887, over 3470.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2747, pruned_loss=0.04763, over 915853.24 frames. ], batch size: 35, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:22:49,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=217845.33333333334, ans=0.125 +2024-07-29 00:22:54,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=217845.33333333334, ans=0.125 +2024-07-29 00:22:58,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=8.70 vs. limit=12.0 +2024-07-29 00:23:01,093 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.821e+01 5.872e+01 6.658e+01 7.418e+01 1.272e+02, threshold=1.332e+02, percent-clipped=0.0 +2024-07-29 00:23:02,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=217858.66666666666, ans=0.0 +2024-07-29 00:23:07,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=217872.0, ans=0.125 +2024-07-29 00:23:13,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=217885.33333333334, ans=0.125 +2024-07-29 00:23:15,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=217898.66666666666, ans=0.0 +2024-07-29 00:23:22,361 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.80 vs. limit=22.5 +2024-07-29 00:23:23,186 INFO [train.py:1114] (1/4) Epoch 16, batch 10100, loss[loss=0.1906, simple_loss=0.2817, pruned_loss=0.04977, over 3213.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2799, pruned_loss=0.05241, over 862511.93 frames. ], batch size: 35, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:23:27,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=217912.0, ans=0.0 +2024-07-29 00:23:29,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=217925.33333333334, ans=0.125 +2024-07-29 00:23:33,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=217925.33333333334, ans=0.025 +2024-07-29 00:23:35,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217925.33333333334, ans=0.1 +2024-07-29 00:23:35,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=217938.66666666666, ans=0.125 +2024-07-29 00:23:38,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=217938.66666666666, ans=0.125 +2024-07-29 00:23:38,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=217938.66666666666, ans=0.125 +2024-07-29 00:23:40,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=217938.66666666666, ans=0.0 +2024-07-29 00:23:41,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=217938.66666666666, ans=0.025 +2024-07-29 00:23:42,744 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.78 vs. limit=15.0 +2024-07-29 00:23:53,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=217965.33333333334, ans=0.0 +2024-07-29 00:23:55,282 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.73 vs. limit=15.0 +2024-07-29 00:23:55,724 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.67 vs. limit=22.5 +2024-07-29 00:23:56,121 INFO [train.py:1114] (1/4) Epoch 16, batch 10150, loss[loss=0.2228, simple_loss=0.3155, pruned_loss=0.06498, over 3243.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2828, pruned_loss=0.05525, over 821950.48 frames. ], batch size: 35, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:23:58,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217978.66666666666, ans=0.1 +2024-07-29 00:23:58,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=217978.66666666666, ans=0.0 +2024-07-29 00:24:06,949 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.947e+01 6.782e+01 7.198e+01 7.904e+01 2.355e+02, threshold=1.440e+02, percent-clipped=1.0 +2024-07-29 00:24:22,153 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:24:22,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=218032.0, ans=0.125 +2024-07-29 00:24:24,049 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=218032.0, ans=0.0 +2024-07-29 00:24:24,820 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:24:27,767 INFO [train.py:1114] (1/4) Epoch 16, batch 10200, loss[loss=0.2362, simple_loss=0.3031, pruned_loss=0.08459, over 3225.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2865, pruned_loss=0.05856, over 788966.44 frames. ], batch size: 35, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:24:32,920 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.95 vs. limit=22.5 +2024-07-29 00:24:38,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=218058.66666666666, ans=0.2 +2024-07-29 00:25:24,909 INFO [train.py:1114] (1/4) Epoch 17, batch 0, loss[loss=0.1329, simple_loss=0.224, pruned_loss=0.02085, over 4847.00 frames. ], tot_loss[loss=0.1329, simple_loss=0.224, pruned_loss=0.02085, over 4847.00 frames. ], batch size: 12, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:25:24,909 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 00:25:30,130 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.3360, 3.1596, 2.7149, 2.5297], device='cuda:1') +2024-07-29 00:25:36,963 INFO [train.py:1146] (1/4) Epoch 17, validation: loss=0.1632, simple_loss=0.2676, pruned_loss=0.0294, over 944034.00 frames. +2024-07-29 00:25:36,964 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 00:25:43,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=218088.0, ans=0.125 +2024-07-29 00:25:50,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=218101.33333333334, ans=0.125 +2024-07-29 00:25:50,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=218101.33333333334, ans=0.125 +2024-07-29 00:25:52,861 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.38 vs. limit=15.0 +2024-07-29 00:26:12,486 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.98 vs. limit=22.5 +2024-07-29 00:26:23,653 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.877e+01 5.974e+01 6.557e+01 7.210e+01 8.434e+01, threshold=1.311e+02, percent-clipped=0.0 +2024-07-29 00:26:27,164 INFO [train.py:1114] (1/4) Epoch 17, batch 50, loss[loss=0.1628, simple_loss=0.2517, pruned_loss=0.03702, over 4618.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2671, pruned_loss=0.0428, over 206496.55 frames. ], batch size: 11, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:26:27,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218141.33333333334, ans=0.1 +2024-07-29 00:26:31,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=218141.33333333334, ans=0.0 +2024-07-29 00:26:36,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218154.66666666666, ans=0.1 +2024-07-29 00:26:48,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218181.33333333334, ans=0.1 +2024-07-29 00:26:48,775 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:27:00,754 INFO [train.py:1114] (1/4) Epoch 17, batch 100, loss[loss=0.1632, simple_loss=0.2449, pruned_loss=0.0407, over 4647.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2688, pruned_loss=0.04351, over 365621.15 frames. ], batch size: 12, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:27:05,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=218208.0, ans=0.0 +2024-07-29 00:27:05,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=218208.0, ans=0.125 +2024-07-29 00:27:30,360 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.772e+01 6.593e+01 7.419e+01 9.701e+01, threshold=1.319e+02, percent-clipped=0.0 +2024-07-29 00:27:33,592 INFO [train.py:1114] (1/4) Epoch 17, batch 150, loss[loss=0.1253, simple_loss=0.2136, pruned_loss=0.01853, over 4623.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2681, pruned_loss=0.04295, over 494081.11 frames. ], batch size: 11, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:27:44,294 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=218288.0, ans=0.125 +2024-07-29 00:27:47,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=218301.33333333334, ans=0.125 +2024-07-29 00:27:54,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218314.66666666666, ans=0.1 +2024-07-29 00:27:56,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=218314.66666666666, ans=0.125 +2024-07-29 00:28:02,207 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=218328.0, ans=0.2 +2024-07-29 00:28:06,811 INFO [train.py:1114] (1/4) Epoch 17, batch 200, loss[loss=0.2132, simple_loss=0.3066, pruned_loss=0.0599, over 4528.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2682, pruned_loss=0.04305, over 593824.05 frames. ], batch size: 21, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:28:08,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218341.33333333334, ans=0.1 +2024-07-29 00:28:09,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=218341.33333333334, ans=0.125 +2024-07-29 00:28:10,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218341.33333333334, ans=0.1 +2024-07-29 00:28:10,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=218341.33333333334, ans=0.125 +2024-07-29 00:28:18,961 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.74 vs. limit=6.0 +2024-07-29 00:28:23,531 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.85 vs. limit=22.5 +2024-07-29 00:28:24,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=218368.0, ans=0.04949747468305833 +2024-07-29 00:28:34,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=218394.66666666666, ans=0.0 +2024-07-29 00:28:36,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=218394.66666666666, ans=0.125 +2024-07-29 00:28:36,800 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.366e+01 5.659e+01 6.456e+01 7.215e+01 1.150e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-29 00:28:40,259 INFO [train.py:1114] (1/4) Epoch 17, batch 250, loss[loss=0.1876, simple_loss=0.283, pruned_loss=0.04609, over 4604.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2686, pruned_loss=0.04351, over 670725.33 frames. ], batch size: 16, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:28:55,308 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.67 vs. limit=12.0 +2024-07-29 00:28:58,580 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.66 vs. limit=10.0 +2024-07-29 00:28:59,389 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.34 vs. limit=15.0 +2024-07-29 00:29:12,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218461.33333333334, ans=0.1 +2024-07-29 00:29:15,281 INFO [train.py:1114] (1/4) Epoch 17, batch 300, loss[loss=0.1665, simple_loss=0.2567, pruned_loss=0.03817, over 4799.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2669, pruned_loss=0.04303, over 730188.15 frames. ], batch size: 15, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:29:29,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=218501.33333333334, ans=0.1 +2024-07-29 00:29:30,319 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.72 vs. limit=10.0 +2024-07-29 00:29:33,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.49 vs. limit=15.0 +2024-07-29 00:29:42,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=218514.66666666666, ans=0.125 +2024-07-29 00:29:47,005 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+01 5.418e+01 5.933e+01 6.484e+01 8.977e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-29 00:29:48,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=218528.0, ans=0.125 +2024-07-29 00:29:50,306 INFO [train.py:1114] (1/4) Epoch 17, batch 350, loss[loss=0.1536, simple_loss=0.2346, pruned_loss=0.03626, over 4951.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2669, pruned_loss=0.04268, over 775998.30 frames. ], batch size: 12, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:29:53,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=218541.33333333334, ans=0.125 +2024-07-29 00:29:57,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=218554.66666666666, ans=0.0 +2024-07-29 00:30:06,739 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.78 vs. limit=15.0 +2024-07-29 00:30:13,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=218581.33333333334, ans=0.0 +2024-07-29 00:30:16,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=218594.66666666666, ans=0.05 +2024-07-29 00:30:17,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=218594.66666666666, ans=0.125 +2024-07-29 00:30:17,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=218594.66666666666, ans=0.125 +2024-07-29 00:30:21,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=218594.66666666666, ans=0.125 +2024-07-29 00:30:22,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=218594.66666666666, ans=0.05 +2024-07-29 00:30:23,657 INFO [train.py:1114] (1/4) Epoch 17, batch 400, loss[loss=0.1956, simple_loss=0.2824, pruned_loss=0.05437, over 4695.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2668, pruned_loss=0.04278, over 813456.27 frames. ], batch size: 13, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:30:39,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=218634.66666666666, ans=0.2 +2024-07-29 00:30:44,800 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=29.49 vs. limit=22.5 +2024-07-29 00:30:50,167 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.89 vs. limit=12.0 +2024-07-29 00:30:53,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=218661.33333333334, ans=0.125 +2024-07-29 00:30:58,240 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.369e+01 5.560e+01 5.995e+01 6.560e+01 9.746e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-29 00:30:59,379 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.whiten.whitening_limit, batch_count=218661.33333333334, ans=12.0 +2024-07-29 00:31:01,602 INFO [train.py:1114] (1/4) Epoch 17, batch 450, loss[loss=0.179, simple_loss=0.2695, pruned_loss=0.04421, over 4633.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2673, pruned_loss=0.04316, over 838853.62 frames. ], batch size: 13, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:31:24,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=218714.66666666666, ans=0.025 +2024-07-29 00:31:25,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=218714.66666666666, ans=0.125 +2024-07-29 00:31:26,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=218714.66666666666, ans=0.125 +2024-07-29 00:31:33,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218728.0, ans=0.1 +2024-07-29 00:31:34,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=218741.33333333334, ans=0.04949747468305833 +2024-07-29 00:31:34,900 INFO [train.py:1114] (1/4) Epoch 17, batch 500, loss[loss=0.2206, simple_loss=0.3006, pruned_loss=0.07032, over 4672.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2675, pruned_loss=0.04341, over 861299.00 frames. ], batch size: 15, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:31:54,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=218768.0, ans=0.125 +2024-07-29 00:31:57,902 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.14 vs. limit=22.5 +2024-07-29 00:31:58,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=218781.33333333334, ans=0.125 +2024-07-29 00:32:01,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=218781.33333333334, ans=0.125 +2024-07-29 00:32:08,078 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.31 vs. limit=15.0 +2024-07-29 00:32:09,026 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.474e+01 5.408e+01 6.097e+01 6.893e+01 9.871e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 00:32:09,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=218794.66666666666, ans=0.5 +2024-07-29 00:32:11,394 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.09 vs. limit=15.0 +2024-07-29 00:32:12,365 INFO [train.py:1114] (1/4) Epoch 17, batch 550, loss[loss=0.1642, simple_loss=0.2712, pruned_loss=0.02861, over 4636.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2673, pruned_loss=0.04326, over 877549.80 frames. ], batch size: 17, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:32:12,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=218808.0, ans=0.125 +2024-07-29 00:32:15,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=218808.0, ans=0.2 +2024-07-29 00:32:29,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=218834.66666666666, ans=0.125 +2024-07-29 00:32:40,414 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.80 vs. limit=5.0 +2024-07-29 00:32:44,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=218861.33333333334, ans=0.125 +2024-07-29 00:32:44,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=218861.33333333334, ans=0.0 +2024-07-29 00:32:46,145 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:32:46,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=218861.33333333334, ans=0.125 +2024-07-29 00:32:50,130 INFO [train.py:1114] (1/4) Epoch 17, batch 600, loss[loss=0.2022, simple_loss=0.3051, pruned_loss=0.04964, over 4618.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2664, pruned_loss=0.04269, over 891864.90 frames. ], batch size: 16, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:32:59,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=218874.66666666666, ans=0.125 +2024-07-29 00:33:00,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=218874.66666666666, ans=0.025 +2024-07-29 00:33:13,573 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:33:15,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=218901.33333333334, ans=0.125 +2024-07-29 00:33:16,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=218914.66666666666, ans=0.0 +2024-07-29 00:33:18,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=218914.66666666666, ans=0.125 +2024-07-29 00:33:27,705 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.556e+01 5.574e+01 6.190e+01 7.231e+01 1.147e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 00:33:31,217 INFO [train.py:1114] (1/4) Epoch 17, batch 650, loss[loss=0.1608, simple_loss=0.2488, pruned_loss=0.03642, over 4759.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2659, pruned_loss=0.04251, over 903766.40 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:33:46,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=218968.0, ans=0.1 +2024-07-29 00:33:51,327 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.49 vs. limit=6.0 +2024-07-29 00:33:56,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=218981.33333333334, ans=0.0 +2024-07-29 00:33:59,335 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.39 vs. limit=15.0 +2024-07-29 00:34:00,085 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.13 vs. limit=22.5 +2024-07-29 00:34:02,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218994.66666666666, ans=0.1 +2024-07-29 00:34:04,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=219008.0, ans=0.125 +2024-07-29 00:34:05,189 INFO [train.py:1114] (1/4) Epoch 17, batch 700, loss[loss=0.1487, simple_loss=0.2334, pruned_loss=0.03201, over 4639.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2658, pruned_loss=0.04252, over 911541.53 frames. ], batch size: 12, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:34:14,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=219021.33333333334, ans=0.125 +2024-07-29 00:34:37,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=219061.33333333334, ans=0.125 +2024-07-29 00:34:41,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=219061.33333333334, ans=0.125 +2024-07-29 00:34:41,478 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.534e+01 5.477e+01 6.099e+01 6.897e+01 1.014e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 00:34:44,982 INFO [train.py:1114] (1/4) Epoch 17, batch 750, loss[loss=0.1835, simple_loss=0.2704, pruned_loss=0.04835, over 4691.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2654, pruned_loss=0.04239, over 918777.04 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:34:57,814 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.64 vs. limit=12.0 +2024-07-29 00:35:08,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=219114.66666666666, ans=0.125 +2024-07-29 00:35:21,496 INFO [train.py:1114] (1/4) Epoch 17, batch 800, loss[loss=0.1471, simple_loss=0.2352, pruned_loss=0.02946, over 4853.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2656, pruned_loss=0.04264, over 924002.34 frames. ], batch size: 12, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:35:24,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=219141.33333333334, ans=0.0 +2024-07-29 00:35:24,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=219141.33333333334, ans=0.025 +2024-07-29 00:35:26,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=219141.33333333334, ans=0.125 +2024-07-29 00:35:30,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=219154.66666666666, ans=0.125 +2024-07-29 00:35:34,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=219168.0, ans=0.025 +2024-07-29 00:35:37,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=219168.0, ans=6.0 +2024-07-29 00:35:44,718 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.84 vs. limit=12.0 +2024-07-29 00:35:49,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=219194.66666666666, ans=0.125 +2024-07-29 00:35:52,215 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.327e+01 5.596e+01 6.013e+01 6.802e+01 9.397e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-29 00:35:52,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=219194.66666666666, ans=0.0 +2024-07-29 00:35:55,663 INFO [train.py:1114] (1/4) Epoch 17, batch 850, loss[loss=0.1872, simple_loss=0.2916, pruned_loss=0.04144, over 4666.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.266, pruned_loss=0.04277, over 927837.58 frames. ], batch size: 14, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:35:56,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=219208.0, ans=0.015 +2024-07-29 00:36:10,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=219234.66666666666, ans=0.0 +2024-07-29 00:36:20,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=219248.0, ans=0.1 +2024-07-29 00:36:31,445 INFO [train.py:1114] (1/4) Epoch 17, batch 900, loss[loss=0.1383, simple_loss=0.2349, pruned_loss=0.0208, over 4863.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2664, pruned_loss=0.04273, over 928319.60 frames. ], batch size: 12, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:36:35,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=219274.66666666666, ans=0.125 +2024-07-29 00:36:36,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=219274.66666666666, ans=0.05 +2024-07-29 00:36:39,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=219288.0, ans=0.0 +2024-07-29 00:36:44,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=219301.33333333334, ans=0.1 +2024-07-29 00:36:45,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=219301.33333333334, ans=0.0 +2024-07-29 00:36:45,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=219301.33333333334, ans=0.2 +2024-07-29 00:36:46,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=219301.33333333334, ans=0.125 +2024-07-29 00:37:01,135 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.414e+01 5.608e+01 6.144e+01 6.799e+01 1.059e+02, threshold=1.229e+02, percent-clipped=0.0 +2024-07-29 00:37:04,601 INFO [train.py:1114] (1/4) Epoch 17, batch 950, loss[loss=0.1735, simple_loss=0.2617, pruned_loss=0.04261, over 4781.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2673, pruned_loss=0.04292, over 929733.81 frames. ], batch size: 12, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:37:13,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=219354.66666666666, ans=0.125 +2024-07-29 00:37:16,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=219354.66666666666, ans=0.125 +2024-07-29 00:37:19,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219368.0, ans=0.1 +2024-07-29 00:37:23,952 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.34 vs. limit=15.0 +2024-07-29 00:37:28,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=219381.33333333334, ans=0.025 +2024-07-29 00:37:34,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=219394.66666666666, ans=0.125 +2024-07-29 00:37:41,928 INFO [train.py:1114] (1/4) Epoch 17, batch 1000, loss[loss=0.1785, simple_loss=0.2691, pruned_loss=0.04397, over 4970.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2676, pruned_loss=0.0433, over 929681.10 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:37:46,000 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:37:49,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219421.33333333334, ans=0.1 +2024-07-29 00:37:51,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219421.33333333334, ans=0.1 +2024-07-29 00:38:08,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=219461.33333333334, ans=0.125 +2024-07-29 00:38:13,341 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.561e+01 5.640e+01 5.981e+01 6.813e+01 9.582e+01, threshold=1.196e+02, percent-clipped=0.0 +2024-07-29 00:38:16,859 INFO [train.py:1114] (1/4) Epoch 17, batch 1050, loss[loss=0.1875, simple_loss=0.2816, pruned_loss=0.0467, over 4881.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2668, pruned_loss=0.04263, over 932190.35 frames. ], batch size: 14, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:38:42,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=219501.33333333334, ans=0.125 +2024-07-29 00:38:50,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=219528.0, ans=0.125 +2024-07-29 00:38:57,325 INFO [train.py:1114] (1/4) Epoch 17, batch 1100, loss[loss=0.1734, simple_loss=0.2639, pruned_loss=0.04148, over 4889.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2669, pruned_loss=0.04311, over 934572.87 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:39:01,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=219541.33333333334, ans=0.0 +2024-07-29 00:39:10,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=219568.0, ans=0.0 +2024-07-29 00:39:10,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=219568.0, ans=0.2 +2024-07-29 00:39:11,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=219568.0, ans=0.125 +2024-07-29 00:39:15,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=219568.0, ans=0.125 +2024-07-29 00:39:17,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219581.33333333334, ans=0.1 +2024-07-29 00:39:19,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=219581.33333333334, ans=0.125 +2024-07-29 00:39:22,932 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:39:27,540 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.574e+01 5.915e+01 6.747e+01 1.337e+02, threshold=1.183e+02, percent-clipped=1.0 +2024-07-29 00:39:30,884 INFO [train.py:1114] (1/4) Epoch 17, batch 1150, loss[loss=0.1522, simple_loss=0.2452, pruned_loss=0.02958, over 4889.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2673, pruned_loss=0.04342, over 934552.00 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:39:33,700 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:39:45,576 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.71 vs. limit=15.0 +2024-07-29 00:39:52,242 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=219648.0, ans=0.0 +2024-07-29 00:40:04,852 INFO [train.py:1114] (1/4) Epoch 17, batch 1200, loss[loss=0.2017, simple_loss=0.2921, pruned_loss=0.05568, over 4873.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.269, pruned_loss=0.04386, over 933413.60 frames. ], batch size: 14, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:40:25,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=219714.66666666666, ans=0.05 +2024-07-29 00:40:29,339 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:40:31,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=219714.66666666666, ans=0.0 +2024-07-29 00:40:37,451 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.683e+01 5.543e+01 6.182e+01 6.957e+01 1.085e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-29 00:40:41,019 INFO [train.py:1114] (1/4) Epoch 17, batch 1250, loss[loss=0.192, simple_loss=0.2881, pruned_loss=0.04798, over 4802.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.269, pruned_loss=0.04362, over 937451.59 frames. ], batch size: 15, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:40:46,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=219741.33333333334, ans=0.0 +2024-07-29 00:41:17,213 INFO [train.py:1114] (1/4) Epoch 17, batch 1300, loss[loss=0.1998, simple_loss=0.2962, pruned_loss=0.0517, over 4741.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2677, pruned_loss=0.04318, over 938895.51 frames. ], batch size: 19, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:41:28,485 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.21 vs. limit=15.0 +2024-07-29 00:41:34,509 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-07-29 00:41:38,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=219834.66666666666, ans=0.0 +2024-07-29 00:41:47,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=219848.0, ans=0.2 +2024-07-29 00:41:49,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=219861.33333333334, ans=0.0 +2024-07-29 00:41:52,950 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.663e+01 5.796e+01 6.477e+01 7.611e+01 1.197e+02, threshold=1.295e+02, percent-clipped=0.0 +2024-07-29 00:41:56,416 INFO [train.py:1114] (1/4) Epoch 17, batch 1350, loss[loss=0.1828, simple_loss=0.2663, pruned_loss=0.0497, over 4763.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2676, pruned_loss=0.04303, over 940917.21 frames. ], batch size: 13, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:41:58,886 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=219874.66666666666, ans=0.0 +2024-07-29 00:42:01,101 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.15 vs. limit=22.5 +2024-07-29 00:42:01,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=219874.66666666666, ans=0.125 +2024-07-29 00:42:24,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=219914.66666666666, ans=0.025 +2024-07-29 00:42:28,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=219928.0, ans=0.0 +2024-07-29 00:42:31,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=219928.0, ans=0.125 +2024-07-29 00:42:33,533 INFO [train.py:1114] (1/4) Epoch 17, batch 1400, loss[loss=0.1288, simple_loss=0.2148, pruned_loss=0.02143, over 4705.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2662, pruned_loss=0.04259, over 943061.79 frames. ], batch size: 11, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:42:41,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=219954.66666666666, ans=0.0 +2024-07-29 00:42:47,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=219968.0, ans=0.125 +2024-07-29 00:42:49,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219968.0, ans=0.1 +2024-07-29 00:43:06,594 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.372e+01 5.575e+01 5.917e+01 6.621e+01 1.311e+02, threshold=1.183e+02, percent-clipped=1.0 +2024-07-29 00:43:06,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=219994.66666666666, ans=0.0 +2024-07-29 00:43:06,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219994.66666666666, ans=0.1 +2024-07-29 00:43:10,157 INFO [train.py:1114] (1/4) Epoch 17, batch 1450, loss[loss=0.1793, simple_loss=0.2845, pruned_loss=0.03707, over 4677.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.267, pruned_loss=0.04271, over 943302.66 frames. ], batch size: 15, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:43:18,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=220008.0, ans=0.125 +2024-07-29 00:43:28,873 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=220034.66666666666, ans=0.0 +2024-07-29 00:43:43,312 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.38 vs. limit=10.0 +2024-07-29 00:43:43,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.28 vs. limit=6.0 +2024-07-29 00:43:48,450 INFO [train.py:1114] (1/4) Epoch 17, batch 1500, loss[loss=0.1534, simple_loss=0.2404, pruned_loss=0.03319, over 4809.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2676, pruned_loss=0.04306, over 942855.41 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:43:56,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=220088.0, ans=0.0 +2024-07-29 00:44:03,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=220101.33333333334, ans=0.1 +2024-07-29 00:44:09,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=220114.66666666666, ans=0.125 +2024-07-29 00:44:11,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=220114.66666666666, ans=0.0 +2024-07-29 00:44:11,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=220114.66666666666, ans=0.125 +2024-07-29 00:44:13,168 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:44:15,980 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.07 vs. limit=22.5 +2024-07-29 00:44:18,915 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.888e+01 5.771e+01 6.251e+01 6.983e+01 1.071e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-29 00:44:19,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=220128.0, ans=0.125 +2024-07-29 00:44:20,427 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=220128.0, ans=0.0 +2024-07-29 00:44:22,217 INFO [train.py:1114] (1/4) Epoch 17, batch 1550, loss[loss=0.1932, simple_loss=0.2998, pruned_loss=0.04333, over 4903.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2677, pruned_loss=0.04321, over 939177.40 frames. ], batch size: 15, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:44:35,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=220168.0, ans=0.0 +2024-07-29 00:44:35,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=220168.0, ans=0.125 +2024-07-29 00:44:55,765 INFO [train.py:1114] (1/4) Epoch 17, batch 1600, loss[loss=0.1609, simple_loss=0.249, pruned_loss=0.03635, over 4870.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2667, pruned_loss=0.04268, over 937644.12 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:44:55,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=220208.0, ans=0.125 +2024-07-29 00:45:03,530 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.15 vs. limit=15.0 +2024-07-29 00:45:04,591 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=220221.33333333334, ans=0.0 +2024-07-29 00:45:11,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.95 vs. limit=10.0 +2024-07-29 00:45:16,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=220248.0, ans=0.5 +2024-07-29 00:45:19,900 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.95 vs. limit=15.0 +2024-07-29 00:45:24,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=220261.33333333334, ans=0.05 +2024-07-29 00:45:24,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=220261.33333333334, ans=0.125 +2024-07-29 00:45:26,614 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.909e+01 5.495e+01 6.270e+01 6.960e+01 9.456e+01, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 00:45:30,189 INFO [train.py:1114] (1/4) Epoch 17, batch 1650, loss[loss=0.1745, simple_loss=0.2624, pruned_loss=0.04324, over 4667.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.267, pruned_loss=0.04333, over 937365.75 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:45:30,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=220274.66666666666, ans=0.125 +2024-07-29 00:45:35,912 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=220274.66666666666, ans=0.0 +2024-07-29 00:45:40,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=220288.0, ans=0.125 +2024-07-29 00:45:53,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220314.66666666666, ans=0.1 +2024-07-29 00:45:55,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=220314.66666666666, ans=0.125 +2024-07-29 00:45:58,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=220328.0, ans=0.0 +2024-07-29 00:45:59,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=220328.0, ans=0.0 +2024-07-29 00:46:00,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=220328.0, ans=0.125 +2024-07-29 00:46:04,530 INFO [train.py:1114] (1/4) Epoch 17, batch 1700, loss[loss=0.1663, simple_loss=0.2496, pruned_loss=0.04153, over 4688.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2665, pruned_loss=0.04331, over 939118.70 frames. ], batch size: 11, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:46:16,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.23 vs. limit=15.0 +2024-07-29 00:46:17,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=220354.66666666666, ans=0.125 +2024-07-29 00:46:21,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=220368.0, ans=0.04949747468305833 +2024-07-29 00:46:25,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=220368.0, ans=0.2 +2024-07-29 00:46:30,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=220381.33333333334, ans=0.0 +2024-07-29 00:46:34,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=220394.66666666666, ans=0.0 +2024-07-29 00:46:36,627 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.642e+01 5.850e+01 6.496e+01 7.744e+01 1.150e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-29 00:46:40,162 INFO [train.py:1114] (1/4) Epoch 17, batch 1750, loss[loss=0.1421, simple_loss=0.2337, pruned_loss=0.02523, over 4812.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2657, pruned_loss=0.0428, over 939964.20 frames. ], batch size: 11, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:46:41,125 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.11 vs. limit=15.0 +2024-07-29 00:46:48,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=220421.33333333334, ans=0.125 +2024-07-29 00:47:10,381 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.90 vs. limit=22.5 +2024-07-29 00:47:13,448 INFO [train.py:1114] (1/4) Epoch 17, batch 1800, loss[loss=0.178, simple_loss=0.2739, pruned_loss=0.04101, over 4637.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2659, pruned_loss=0.043, over 940641.56 frames. ], batch size: 13, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:47:14,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=220474.66666666666, ans=0.125 +2024-07-29 00:47:20,688 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.66 vs. limit=22.5 +2024-07-29 00:47:25,986 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.52 vs. limit=15.0 +2024-07-29 00:47:27,018 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:47:27,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=220501.33333333334, ans=0.2 +2024-07-29 00:47:31,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=220501.33333333334, ans=0.07 +2024-07-29 00:47:45,633 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.755e+01 5.725e+01 6.271e+01 7.257e+01 1.188e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 00:47:49,040 INFO [train.py:1114] (1/4) Epoch 17, batch 1850, loss[loss=0.1788, simple_loss=0.2771, pruned_loss=0.04028, over 4814.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.266, pruned_loss=0.0431, over 940814.32 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:47:51,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220541.33333333334, ans=0.1 +2024-07-29 00:47:53,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=220541.33333333334, ans=0.0 +2024-07-29 00:47:57,647 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.62 vs. limit=22.5 +2024-07-29 00:47:59,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=220554.66666666666, ans=0.0 +2024-07-29 00:48:01,133 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.73 vs. limit=12.0 +2024-07-29 00:48:02,422 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.08 vs. limit=15.0 +2024-07-29 00:48:08,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=220581.33333333334, ans=0.125 +2024-07-29 00:48:12,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220581.33333333334, ans=0.1 +2024-07-29 00:48:15,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=220594.66666666666, ans=0.125 +2024-07-29 00:48:20,235 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.61 vs. limit=15.0 +2024-07-29 00:48:21,558 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.76 vs. limit=15.0 +2024-07-29 00:48:23,127 INFO [train.py:1114] (1/4) Epoch 17, batch 1900, loss[loss=0.1524, simple_loss=0.2539, pruned_loss=0.02544, over 4657.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2664, pruned_loss=0.04298, over 941835.56 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:48:33,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=220621.33333333334, ans=0.0 +2024-07-29 00:48:34,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=220621.33333333334, ans=0.125 +2024-07-29 00:48:36,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=220634.66666666666, ans=0.125 +2024-07-29 00:48:54,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=220661.33333333334, ans=0.125 +2024-07-29 00:48:55,007 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.475e+01 5.883e+01 6.427e+01 8.062e+01 1.126e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-29 00:49:13,497 INFO [train.py:1114] (1/4) Epoch 17, batch 1950, loss[loss=0.16, simple_loss=0.2555, pruned_loss=0.03223, over 4910.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2679, pruned_loss=0.04298, over 943751.39 frames. ], batch size: 13, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:49:13,944 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.31 vs. limit=15.0 +2024-07-29 00:50:01,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=220714.66666666666, ans=0.05 +2024-07-29 00:50:03,494 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=220714.66666666666, ans=0.0 +2024-07-29 00:50:03,621 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.07 vs. limit=12.0 +2024-07-29 00:50:27,341 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:50:28,631 INFO [train.py:1114] (1/4) Epoch 17, batch 2000, loss[loss=0.1778, simple_loss=0.2605, pruned_loss=0.04756, over 4800.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2688, pruned_loss=0.04335, over 940861.77 frames. ], batch size: 11, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:51:01,140 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.55 vs. limit=15.0 +2024-07-29 00:51:02,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220754.66666666666, ans=0.1 +2024-07-29 00:51:08,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=220754.66666666666, ans=0.125 +2024-07-29 00:51:10,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=220754.66666666666, ans=0.0 +2024-07-29 00:51:11,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=220768.0, ans=0.2 +2024-07-29 00:51:15,934 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.79 vs. limit=15.0 +2024-07-29 00:51:25,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=220781.33333333334, ans=0.125 +2024-07-29 00:51:32,746 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.500e+01 5.436e+01 5.997e+01 6.741e+01 1.066e+02, threshold=1.199e+02, percent-clipped=0.0 +2024-07-29 00:51:36,126 INFO [train.py:1114] (1/4) Epoch 17, batch 2050, loss[loss=0.1611, simple_loss=0.2408, pruned_loss=0.04072, over 4616.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2688, pruned_loss=0.04342, over 938976.94 frames. ], batch size: 11, lr: 4.42e-03, grad_scale: 64.0 +2024-07-29 00:51:40,571 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.49 vs. limit=22.5 +2024-07-29 00:51:41,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=220808.0, ans=0.0 +2024-07-29 00:51:43,976 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=220821.33333333334, ans=0.125 +2024-07-29 00:51:44,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=220821.33333333334, ans=0.035 +2024-07-29 00:51:55,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.89 vs. limit=15.0 +2024-07-29 00:51:55,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220848.0, ans=0.1 +2024-07-29 00:51:59,954 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.40 vs. limit=22.5 +2024-07-29 00:52:00,496 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.79 vs. limit=22.5 +2024-07-29 00:52:05,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220861.33333333334, ans=0.1 +2024-07-29 00:52:17,008 INFO [train.py:1114] (1/4) Epoch 17, batch 2100, loss[loss=0.1837, simple_loss=0.2741, pruned_loss=0.0467, over 4761.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2682, pruned_loss=0.04305, over 940601.73 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:52:22,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=220874.66666666666, ans=0.125 +2024-07-29 00:52:34,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=220901.33333333334, ans=0.025 +2024-07-29 00:52:43,364 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:52:44,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=220928.0, ans=10.0 +2024-07-29 00:52:48,038 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.684e+01 5.567e+01 6.209e+01 7.288e+01 1.074e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-29 00:52:50,925 INFO [train.py:1114] (1/4) Epoch 17, batch 2150, loss[loss=0.1692, simple_loss=0.2534, pruned_loss=0.04253, over 4896.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2667, pruned_loss=0.04223, over 943593.50 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:53:12,987 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.14 vs. limit=15.0 +2024-07-29 00:53:22,557 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.34 vs. limit=22.5 +2024-07-29 00:53:26,709 INFO [train.py:1114] (1/4) Epoch 17, batch 2200, loss[loss=0.199, simple_loss=0.302, pruned_loss=0.04799, over 4815.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2671, pruned_loss=0.04263, over 942839.61 frames. ], batch size: 14, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:53:30,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=221008.0, ans=0.025 +2024-07-29 00:53:33,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=221021.33333333334, ans=0.125 +2024-07-29 00:53:35,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.whiten.whitening_limit, batch_count=221021.33333333334, ans=12.0 +2024-07-29 00:53:40,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=221034.66666666666, ans=0.0 +2024-07-29 00:53:55,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=221061.33333333334, ans=0.2 +2024-07-29 00:53:57,772 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.391e+01 5.667e+01 6.562e+01 7.774e+01 1.023e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-29 00:54:00,464 INFO [train.py:1114] (1/4) Epoch 17, batch 2250, loss[loss=0.1594, simple_loss=0.253, pruned_loss=0.03285, over 4692.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2671, pruned_loss=0.04275, over 941993.95 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:54:01,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=221074.66666666666, ans=0.125 +2024-07-29 00:54:02,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=221074.66666666666, ans=0.125 +2024-07-29 00:54:03,612 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.49 vs. limit=12.0 +2024-07-29 00:54:06,693 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.52 vs. limit=15.0 +2024-07-29 00:54:07,973 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=221088.0, ans=0.125 +2024-07-29 00:54:15,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=221101.33333333334, ans=0.035 +2024-07-29 00:54:23,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=221114.66666666666, ans=0.125 +2024-07-29 00:54:23,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=221114.66666666666, ans=0.125 +2024-07-29 00:54:23,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=221114.66666666666, ans=0.125 +2024-07-29 00:54:24,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=221114.66666666666, ans=0.125 +2024-07-29 00:54:33,728 INFO [train.py:1114] (1/4) Epoch 17, batch 2300, loss[loss=0.1465, simple_loss=0.2286, pruned_loss=0.03221, over 4921.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.266, pruned_loss=0.04283, over 939394.49 frames. ], batch size: 12, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:54:50,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=221168.0, ans=0.0 +2024-07-29 00:54:50,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=221168.0, ans=0.07 +2024-07-29 00:54:51,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=221168.0, ans=0.2 +2024-07-29 00:54:52,406 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.68 vs. limit=6.0 +2024-07-29 00:55:01,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=221181.33333333334, ans=0.125 +2024-07-29 00:55:05,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=221194.66666666666, ans=0.125 +2024-07-29 00:55:06,975 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+01 5.506e+01 6.021e+01 6.838e+01 1.144e+02, threshold=1.204e+02, percent-clipped=0.0 +2024-07-29 00:55:09,637 INFO [train.py:1114] (1/4) Epoch 17, batch 2350, loss[loss=0.1746, simple_loss=0.2748, pruned_loss=0.0372, over 4635.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2664, pruned_loss=0.04298, over 941479.75 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:55:10,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=221208.0, ans=0.0 +2024-07-29 00:55:14,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221208.0, ans=0.1 +2024-07-29 00:55:15,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221208.0, ans=0.1 +2024-07-29 00:55:16,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=221221.33333333334, ans=0.125 +2024-07-29 00:55:29,541 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:55:43,235 INFO [train.py:1114] (1/4) Epoch 17, batch 2400, loss[loss=0.1728, simple_loss=0.2587, pruned_loss=0.04348, over 4638.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2665, pruned_loss=0.04301, over 941474.47 frames. ], batch size: 12, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:55:55,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=221288.0, ans=0.0 +2024-07-29 00:55:56,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221288.0, ans=0.1 +2024-07-29 00:55:58,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=221301.33333333334, ans=0.125 +2024-07-29 00:56:13,505 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.97 vs. limit=15.0 +2024-07-29 00:56:17,994 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.747e+01 5.694e+01 6.302e+01 6.928e+01 9.959e+01, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 00:56:20,725 INFO [train.py:1114] (1/4) Epoch 17, batch 2450, loss[loss=0.1682, simple_loss=0.2607, pruned_loss=0.03787, over 4691.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2684, pruned_loss=0.04412, over 937054.76 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:56:24,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=221341.33333333334, ans=0.0 +2024-07-29 00:56:24,459 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.96 vs. limit=10.0 +2024-07-29 00:56:29,211 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.59 vs. limit=6.0 +2024-07-29 00:56:35,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=221368.0, ans=0.0 +2024-07-29 00:56:54,118 INFO [train.py:1114] (1/4) Epoch 17, batch 2500, loss[loss=0.1603, simple_loss=0.2613, pruned_loss=0.02959, over 4805.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.268, pruned_loss=0.04377, over 938721.87 frames. ], batch size: 14, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:57:04,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=221421.33333333334, ans=0.09899494936611666 +2024-07-29 00:57:11,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=221434.66666666666, ans=0.025 +2024-07-29 00:57:14,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=221448.0, ans=0.0 +2024-07-29 00:57:21,803 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=221461.33333333334, ans=0.125 +2024-07-29 00:57:24,996 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.527e+01 5.455e+01 6.019e+01 6.790e+01 9.676e+01, threshold=1.204e+02, percent-clipped=0.0 +2024-07-29 00:57:27,772 INFO [train.py:1114] (1/4) Epoch 17, batch 2550, loss[loss=0.1758, simple_loss=0.252, pruned_loss=0.04986, over 4799.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2677, pruned_loss=0.04361, over 938162.64 frames. ], batch size: 11, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:57:27,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=221474.66666666666, ans=0.125 +2024-07-29 00:57:29,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=221474.66666666666, ans=0.125 +2024-07-29 00:57:30,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=221474.66666666666, ans=0.125 +2024-07-29 00:57:33,801 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.32 vs. limit=15.0 +2024-07-29 00:57:51,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=221501.33333333334, ans=0.5 +2024-07-29 00:57:53,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=221501.33333333334, ans=0.07 +2024-07-29 00:58:22,109 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.32 vs. limit=15.0 +2024-07-29 00:58:22,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221514.66666666666, ans=0.1 +2024-07-29 00:58:30,270 INFO [train.py:1114] (1/4) Epoch 17, batch 2600, loss[loss=0.1613, simple_loss=0.2506, pruned_loss=0.036, over 4898.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2677, pruned_loss=0.0432, over 937797.67 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:58:58,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221581.33333333334, ans=0.1 +2024-07-29 00:59:02,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=221581.33333333334, ans=0.2 +2024-07-29 00:59:02,608 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.32 vs. limit=15.0 +2024-07-29 00:59:03,868 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.23 vs. limit=15.0 +2024-07-29 00:59:06,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=221594.66666666666, ans=0.125 +2024-07-29 00:59:07,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=221594.66666666666, ans=0.0 +2024-07-29 00:59:08,387 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.679e+01 5.744e+01 6.230e+01 7.123e+01 1.037e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-29 00:59:21,106 INFO [train.py:1114] (1/4) Epoch 17, batch 2650, loss[loss=0.2021, simple_loss=0.307, pruned_loss=0.04857, over 4594.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2686, pruned_loss=0.04347, over 939604.25 frames. ], batch size: 16, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:59:21,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=221608.0, ans=0.1 +2024-07-29 00:59:22,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=221608.0, ans=0.025 +2024-07-29 00:59:32,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=221621.33333333334, ans=0.0 +2024-07-29 00:59:38,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.89 vs. limit=15.0 +2024-07-29 00:59:43,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221648.0, ans=0.1 +2024-07-29 00:59:45,004 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.19 vs. limit=15.0 +2024-07-29 00:59:49,752 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.26 vs. limit=15.0 +2024-07-29 00:59:54,776 INFO [train.py:1114] (1/4) Epoch 17, batch 2700, loss[loss=0.1799, simple_loss=0.2638, pruned_loss=0.04801, over 4736.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2675, pruned_loss=0.04324, over 939453.54 frames. ], batch size: 14, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 01:00:03,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221688.0, ans=0.1 +2024-07-29 01:00:09,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=221701.33333333334, ans=0.1 +2024-07-29 01:00:12,117 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-29 01:00:20,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=221714.66666666666, ans=0.0 +2024-07-29 01:00:33,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=221728.0, ans=0.125 +2024-07-29 01:00:37,056 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.539e+01 6.361e+01 7.423e+01 1.026e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-29 01:00:39,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=221728.0, ans=0.125 +2024-07-29 01:00:41,153 INFO [train.py:1114] (1/4) Epoch 17, batch 2750, loss[loss=0.1763, simple_loss=0.2775, pruned_loss=0.0376, over 4715.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2667, pruned_loss=0.04292, over 939183.21 frames. ], batch size: 12, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 01:00:44,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=221741.33333333334, ans=0.125 +2024-07-29 01:00:44,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=221741.33333333334, ans=0.125 +2024-07-29 01:00:44,675 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:00:48,139 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.40 vs. limit=15.0 +2024-07-29 01:00:48,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221754.66666666666, ans=0.1 +2024-07-29 01:00:53,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=221754.66666666666, ans=0.125 +2024-07-29 01:01:01,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=221768.0, ans=0.125 +2024-07-29 01:01:10,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=221781.33333333334, ans=0.0 +2024-07-29 01:01:11,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=221781.33333333334, ans=0.125 +2024-07-29 01:01:22,008 INFO [train.py:1114] (1/4) Epoch 17, batch 2800, loss[loss=0.243, simple_loss=0.3159, pruned_loss=0.08512, over 3405.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.267, pruned_loss=0.0433, over 937600.15 frames. ], batch size: 36, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:01:51,017 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.10 vs. limit=6.0 +2024-07-29 01:01:53,597 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.485e+01 5.698e+01 6.511e+01 7.478e+01 1.084e+02, threshold=1.302e+02, percent-clipped=0.0 +2024-07-29 01:01:53,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=221861.33333333334, ans=0.0 +2024-07-29 01:01:53,896 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.12 vs. limit=12.0 +2024-07-29 01:01:56,358 INFO [train.py:1114] (1/4) Epoch 17, batch 2850, loss[loss=0.1767, simple_loss=0.2629, pruned_loss=0.04521, over 4964.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2681, pruned_loss=0.04394, over 935835.83 frames. ], batch size: 13, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:02:08,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=221888.0, ans=0.0 +2024-07-29 01:02:12,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=221888.0, ans=0.125 +2024-07-29 01:02:14,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=221901.33333333334, ans=0.2 +2024-07-29 01:02:15,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=221901.33333333334, ans=0.025 +2024-07-29 01:02:20,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=221901.33333333334, ans=0.125 +2024-07-29 01:02:34,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=221928.0, ans=15.0 +2024-07-29 01:02:34,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=221941.33333333334, ans=0.05 +2024-07-29 01:02:35,086 INFO [train.py:1114] (1/4) Epoch 17, batch 2900, loss[loss=0.1595, simple_loss=0.2465, pruned_loss=0.03624, over 4824.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2685, pruned_loss=0.04379, over 939556.11 frames. ], batch size: 13, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:02:35,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=221941.33333333334, ans=0.125 +2024-07-29 01:02:44,710 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.60 vs. limit=15.0 +2024-07-29 01:03:06,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=221994.66666666666, ans=0.5 +2024-07-29 01:03:07,958 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.340e+01 5.542e+01 6.312e+01 7.539e+01 1.199e+02, threshold=1.262e+02, percent-clipped=0.0 +2024-07-29 01:03:09,816 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.50 vs. limit=6.0 +2024-07-29 01:03:10,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=222008.0, ans=0.0 +2024-07-29 01:03:10,782 INFO [train.py:1114] (1/4) Epoch 17, batch 2950, loss[loss=0.1618, simple_loss=0.253, pruned_loss=0.03527, over 4720.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2673, pruned_loss=0.04354, over 938545.11 frames. ], batch size: 12, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:03:18,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=222021.33333333334, ans=0.125 +2024-07-29 01:03:25,906 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.32 vs. limit=15.0 +2024-07-29 01:03:29,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=222034.66666666666, ans=0.0 +2024-07-29 01:03:39,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=222061.33333333334, ans=0.0 +2024-07-29 01:03:39,890 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.84 vs. limit=8.0 +2024-07-29 01:03:44,746 INFO [train.py:1114] (1/4) Epoch 17, batch 3000, loss[loss=0.1706, simple_loss=0.2578, pruned_loss=0.0417, over 4758.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2668, pruned_loss=0.04337, over 938547.22 frames. ], batch size: 13, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:03:44,747 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 01:04:05,746 INFO [train.py:1146] (1/4) Epoch 17, validation: loss=0.1635, simple_loss=0.2655, pruned_loss=0.03068, over 944034.00 frames. +2024-07-29 01:04:05,747 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 01:04:08,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=222074.66666666666, ans=0.0 +2024-07-29 01:04:16,527 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.63 vs. limit=15.0 +2024-07-29 01:04:17,920 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.80 vs. limit=15.0 +2024-07-29 01:04:25,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=222101.33333333334, ans=0.2 +2024-07-29 01:04:34,716 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222128.0, ans=0.1 +2024-07-29 01:04:37,606 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.930e+01 5.724e+01 6.244e+01 7.233e+01 1.089e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 01:04:38,697 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.49 vs. limit=22.5 +2024-07-29 01:04:40,425 INFO [train.py:1114] (1/4) Epoch 17, batch 3050, loss[loss=0.163, simple_loss=0.2599, pruned_loss=0.03309, over 4642.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2678, pruned_loss=0.04355, over 937641.50 frames. ], batch size: 12, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:04:47,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=222154.66666666666, ans=0.2 +2024-07-29 01:04:52,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=222154.66666666666, ans=0.2 +2024-07-29 01:04:57,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=222168.0, ans=0.0 +2024-07-29 01:05:03,558 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:05:05,850 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.21 vs. limit=15.0 +2024-07-29 01:05:08,370 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.20 vs. limit=6.0 +2024-07-29 01:05:11,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=222194.66666666666, ans=0.05 +2024-07-29 01:05:13,811 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.83 vs. limit=15.0 +2024-07-29 01:05:15,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=222208.0, ans=0.125 +2024-07-29 01:05:16,133 INFO [train.py:1114] (1/4) Epoch 17, batch 3100, loss[loss=0.1991, simple_loss=0.2789, pruned_loss=0.05965, over 4621.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2677, pruned_loss=0.0436, over 938032.78 frames. ], batch size: 16, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:05:16,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=222208.0, ans=0.0 +2024-07-29 01:05:17,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=222208.0, ans=0.0 +2024-07-29 01:05:21,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=222208.0, ans=0.2 +2024-07-29 01:05:35,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=222234.66666666666, ans=0.2 +2024-07-29 01:05:40,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=222234.66666666666, ans=0.125 +2024-07-29 01:05:51,829 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.936e+01 5.691e+01 6.608e+01 7.636e+01 1.029e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-29 01:05:54,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=222274.66666666666, ans=0.125 +2024-07-29 01:05:54,536 INFO [train.py:1114] (1/4) Epoch 17, batch 3150, loss[loss=0.1711, simple_loss=0.2757, pruned_loss=0.03325, over 4615.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2677, pruned_loss=0.0433, over 938759.64 frames. ], batch size: 17, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:05:59,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.87 vs. limit=15.0 +2024-07-29 01:06:01,423 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.98 vs. limit=15.0 +2024-07-29 01:06:08,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=222288.0, ans=0.125 +2024-07-29 01:06:10,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=222301.33333333334, ans=0.125 +2024-07-29 01:06:14,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=222301.33333333334, ans=0.125 +2024-07-29 01:06:15,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=222301.33333333334, ans=0.125 +2024-07-29 01:06:19,473 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.10 vs. limit=15.0 +2024-07-29 01:06:29,713 INFO [train.py:1114] (1/4) Epoch 17, batch 3200, loss[loss=0.1443, simple_loss=0.2326, pruned_loss=0.02799, over 4819.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2662, pruned_loss=0.04277, over 940091.80 frames. ], batch size: 13, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:06:31,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=222341.33333333334, ans=0.0 +2024-07-29 01:06:34,081 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.43 vs. limit=12.0 +2024-07-29 01:06:46,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=222368.0, ans=0.125 +2024-07-29 01:06:47,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=222368.0, ans=0.0 +2024-07-29 01:06:51,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=222381.33333333334, ans=0.125 +2024-07-29 01:07:02,015 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.480e+01 5.614e+01 6.191e+01 6.817e+01 1.066e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 01:07:05,960 INFO [train.py:1114] (1/4) Epoch 17, batch 3250, loss[loss=0.1937, simple_loss=0.2912, pruned_loss=0.0481, over 4928.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2666, pruned_loss=0.04228, over 941005.23 frames. ], batch size: 14, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:07:24,089 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.10 vs. limit=22.5 +2024-07-29 01:07:27,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=222448.0, ans=0.0 +2024-07-29 01:07:30,902 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.22 vs. limit=15.0 +2024-07-29 01:07:39,378 INFO [train.py:1114] (1/4) Epoch 17, batch 3300, loss[loss=0.2027, simple_loss=0.2825, pruned_loss=0.06144, over 4750.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2655, pruned_loss=0.04187, over 941465.67 frames. ], batch size: 19, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:07:46,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=222474.66666666666, ans=0.2 +2024-07-29 01:07:53,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=222488.0, ans=0.025 +2024-07-29 01:07:58,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=222501.33333333334, ans=0.0 +2024-07-29 01:08:08,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=222528.0, ans=0.125 +2024-07-29 01:08:13,577 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+01 5.660e+01 6.307e+01 7.257e+01 1.096e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 01:08:16,304 INFO [train.py:1114] (1/4) Epoch 17, batch 3350, loss[loss=0.1707, simple_loss=0.2658, pruned_loss=0.03774, over 4634.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.267, pruned_loss=0.04269, over 938999.89 frames. ], batch size: 17, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:08:16,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=222541.33333333334, ans=0.0 +2024-07-29 01:08:17,956 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.32 vs. limit=15.0 +2024-07-29 01:08:18,042 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.26 vs. limit=15.0 +2024-07-29 01:08:18,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=222541.33333333334, ans=0.5 +2024-07-29 01:08:26,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=222554.66666666666, ans=0.125 +2024-07-29 01:08:28,489 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=222554.66666666666, ans=0.09899494936611666 +2024-07-29 01:08:37,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=222581.33333333334, ans=0.125 +2024-07-29 01:08:42,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222581.33333333334, ans=0.1 +2024-07-29 01:08:44,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=222594.66666666666, ans=0.125 +2024-07-29 01:08:51,958 INFO [train.py:1114] (1/4) Epoch 17, batch 3400, loss[loss=0.1572, simple_loss=0.2409, pruned_loss=0.03677, over 4799.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2675, pruned_loss=0.04325, over 937919.93 frames. ], batch size: 11, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:08:52,020 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=222608.0, ans=0.0 +2024-07-29 01:09:00,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=222608.0, ans=0.0 +2024-07-29 01:09:05,014 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=222621.33333333334, ans=0.2 +2024-07-29 01:09:09,511 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.78 vs. limit=22.5 +2024-07-29 01:09:09,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=222634.66666666666, ans=0.125 +2024-07-29 01:09:11,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=222634.66666666666, ans=0.0 +2024-07-29 01:09:27,087 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.040e+01 6.022e+01 6.843e+01 8.395e+01 1.350e+02, threshold=1.369e+02, percent-clipped=1.0 +2024-07-29 01:09:28,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=222661.33333333334, ans=0.2 +2024-07-29 01:09:29,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=222674.66666666666, ans=0.1 +2024-07-29 01:09:29,760 INFO [train.py:1114] (1/4) Epoch 17, batch 3450, loss[loss=0.1912, simple_loss=0.2822, pruned_loss=0.05008, over 4650.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2673, pruned_loss=0.04296, over 937919.37 frames. ], batch size: 19, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:09:31,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=222674.66666666666, ans=0.04949747468305833 +2024-07-29 01:09:35,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222674.66666666666, ans=0.1 +2024-07-29 01:10:01,171 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.50 vs. limit=15.0 +2024-07-29 01:10:02,873 INFO [train.py:1114] (1/4) Epoch 17, batch 3500, loss[loss=0.1504, simple_loss=0.2378, pruned_loss=0.03149, over 4936.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2663, pruned_loss=0.04253, over 938740.80 frames. ], batch size: 12, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:10:13,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=222754.66666666666, ans=0.0 +2024-07-29 01:10:16,239 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.04 vs. limit=6.0 +2024-07-29 01:10:21,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=222768.0, ans=0.1 +2024-07-29 01:10:26,050 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.60 vs. limit=15.0 +2024-07-29 01:10:26,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=222781.33333333334, ans=0.025 +2024-07-29 01:10:33,019 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:10:35,613 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.400e+01 5.391e+01 6.097e+01 6.632e+01 8.722e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 01:10:38,347 INFO [train.py:1114] (1/4) Epoch 17, batch 3550, loss[loss=0.1664, simple_loss=0.2624, pruned_loss=0.03517, over 4676.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.266, pruned_loss=0.04258, over 939010.44 frames. ], batch size: 14, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:10:41,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=222808.0, ans=0.125 +2024-07-29 01:10:47,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=222821.33333333334, ans=0.1 +2024-07-29 01:10:49,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=222821.33333333334, ans=0.2 +2024-07-29 01:10:50,824 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:10:50,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=222821.33333333334, ans=0.125 +2024-07-29 01:10:58,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=222834.66666666666, ans=0.1 +2024-07-29 01:10:59,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=222834.66666666666, ans=0.2 +2024-07-29 01:11:01,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=222848.0, ans=0.125 +2024-07-29 01:11:06,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=222848.0, ans=0.2 +2024-07-29 01:11:11,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=222861.33333333334, ans=0.125 +2024-07-29 01:11:14,870 INFO [train.py:1114] (1/4) Epoch 17, batch 3600, loss[loss=0.1717, simple_loss=0.2588, pruned_loss=0.04229, over 4967.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2657, pruned_loss=0.04245, over 940681.27 frames. ], batch size: 13, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:11:24,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=222888.0, ans=0.125 +2024-07-29 01:11:47,271 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.818e+01 6.519e+01 7.348e+01 1.094e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-29 01:11:50,098 INFO [train.py:1114] (1/4) Epoch 17, batch 3650, loss[loss=0.209, simple_loss=0.2852, pruned_loss=0.06646, over 4890.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2657, pruned_loss=0.04245, over 940945.58 frames. ], batch size: 15, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:11:56,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=222954.66666666666, ans=0.125 +2024-07-29 01:11:58,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=222954.66666666666, ans=0.0 +2024-07-29 01:12:02,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=222954.66666666666, ans=0.0 +2024-07-29 01:12:07,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222968.0, ans=0.1 +2024-07-29 01:12:08,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=222968.0, ans=0.0 +2024-07-29 01:12:20,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=222981.33333333334, ans=0.1 +2024-07-29 01:12:34,074 INFO [train.py:1114] (1/4) Epoch 17, batch 3700, loss[loss=0.1777, simple_loss=0.2725, pruned_loss=0.04145, over 4926.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2657, pruned_loss=0.04231, over 942085.58 frames. ], batch size: 14, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:12:36,136 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=223008.0, ans=0.125 +2024-07-29 01:12:47,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223021.33333333334, ans=0.1 +2024-07-29 01:13:09,041 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.453e+01 5.690e+01 6.166e+01 6.901e+01 9.277e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 01:13:14,299 INFO [train.py:1114] (1/4) Epoch 17, batch 3750, loss[loss=0.1842, simple_loss=0.2574, pruned_loss=0.05553, over 4801.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2655, pruned_loss=0.04229, over 943677.79 frames. ], batch size: 11, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:13:37,476 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.04 vs. limit=6.0 +2024-07-29 01:13:38,875 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.24 vs. limit=15.0 +2024-07-29 01:13:54,165 INFO [train.py:1114] (1/4) Epoch 17, batch 3800, loss[loss=0.1648, simple_loss=0.2668, pruned_loss=0.0314, over 4811.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2652, pruned_loss=0.04199, over 941714.41 frames. ], batch size: 14, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:13:56,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=223141.33333333334, ans=0.2 +2024-07-29 01:14:07,223 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.83 vs. limit=22.5 +2024-07-29 01:14:07,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=223168.0, ans=0.125 +2024-07-29 01:14:09,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=223168.0, ans=0.125 +2024-07-29 01:14:11,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=223168.0, ans=0.125 +2024-07-29 01:14:15,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=223168.0, ans=0.2 +2024-07-29 01:14:17,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=223181.33333333334, ans=0.0 +2024-07-29 01:14:20,551 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:14:21,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=223181.33333333334, ans=0.0 +2024-07-29 01:14:27,644 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:14:28,211 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.538e+01 6.338e+01 7.177e+01 1.035e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-29 01:14:30,991 INFO [train.py:1114] (1/4) Epoch 17, batch 3850, loss[loss=0.1755, simple_loss=0.2827, pruned_loss=0.03416, over 4653.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.265, pruned_loss=0.04128, over 942452.93 frames. ], batch size: 16, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:14:40,909 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.60 vs. limit=12.0 +2024-07-29 01:14:41,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=223221.33333333334, ans=0.2 +2024-07-29 01:14:48,561 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.19 vs. limit=15.0 +2024-07-29 01:14:49,334 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.88 vs. limit=6.0 +2024-07-29 01:14:59,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=223261.33333333334, ans=0.125 +2024-07-29 01:15:00,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=223261.33333333334, ans=0.125 +2024-07-29 01:15:05,369 INFO [train.py:1114] (1/4) Epoch 17, batch 3900, loss[loss=0.2148, simple_loss=0.3057, pruned_loss=0.06193, over 4815.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2644, pruned_loss=0.04087, over 942779.28 frames. ], batch size: 14, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:15:07,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=223274.66666666666, ans=0.0 +2024-07-29 01:15:20,675 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.71 vs. limit=15.0 +2024-07-29 01:15:25,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223301.33333333334, ans=0.1 +2024-07-29 01:15:34,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=223314.66666666666, ans=0.04949747468305833 +2024-07-29 01:15:38,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=223328.0, ans=0.125 +2024-07-29 01:15:42,537 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.779e+01 5.533e+01 5.996e+01 6.814e+01 1.002e+02, threshold=1.199e+02, percent-clipped=0.0 +2024-07-29 01:15:45,483 INFO [train.py:1114] (1/4) Epoch 17, batch 3950, loss[loss=0.2037, simple_loss=0.282, pruned_loss=0.06269, over 4836.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2647, pruned_loss=0.0411, over 944608.54 frames. ], batch size: 16, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:15:57,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=223341.33333333334, ans=0.125 +2024-07-29 01:16:24,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=223368.0, ans=0.125 +2024-07-29 01:17:07,610 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.52 vs. limit=15.0 +2024-07-29 01:17:23,526 INFO [train.py:1114] (1/4) Epoch 17, batch 4000, loss[loss=0.1714, simple_loss=0.2541, pruned_loss=0.04439, over 4773.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2659, pruned_loss=0.04204, over 941361.46 frames. ], batch size: 12, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:17:28,721 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.84 vs. limit=15.0 +2024-07-29 01:17:46,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=223421.33333333334, ans=0.125 +2024-07-29 01:17:48,407 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.06 vs. limit=22.5 +2024-07-29 01:18:11,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=223448.0, ans=0.2 +2024-07-29 01:18:12,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=223448.0, ans=0.125 +2024-07-29 01:18:23,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223461.33333333334, ans=0.1 +2024-07-29 01:18:25,017 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.438e+01 5.691e+01 6.092e+01 6.901e+01 9.634e+01, threshold=1.218e+02, percent-clipped=0.0 +2024-07-29 01:18:33,081 INFO [train.py:1114] (1/4) Epoch 17, batch 4050, loss[loss=0.2009, simple_loss=0.2811, pruned_loss=0.06034, over 3601.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.266, pruned_loss=0.0422, over 940059.66 frames. ], batch size: 36, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:18:46,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223474.66666666666, ans=0.1 +2024-07-29 01:19:21,964 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.17 vs. limit=12.0 +2024-07-29 01:25:02,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=223528.0, ans=22.5 +2024-07-29 01:25:14,256 INFO [train.py:1114] (1/4) Epoch 17, batch 4100, loss[loss=0.2033, simple_loss=0.2886, pruned_loss=0.05895, over 4904.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.267, pruned_loss=0.04297, over 938802.36 frames. ], batch size: 15, lr: 4.40e-03, grad_scale: 64.0 +2024-07-29 01:25:14,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=223541.33333333334, ans=0.1 +2024-07-29 01:25:18,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=223541.33333333334, ans=0.0 +2024-07-29 01:25:29,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=223541.33333333334, ans=0.0 +2024-07-29 01:26:16,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=223568.0, ans=0.125 +2024-07-29 01:27:01,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=223581.33333333334, ans=0.025 +2024-07-29 01:27:03,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=223581.33333333334, ans=0.125 +2024-07-29 01:27:06,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223581.33333333334, ans=0.1 +2024-07-29 01:27:57,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=223581.33333333334, ans=0.1 +2024-07-29 01:28:16,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=223594.66666666666, ans=0.125 +2024-07-29 01:28:22,401 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.591e+01 5.689e+01 6.101e+01 7.334e+01 1.100e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 01:28:28,312 INFO [train.py:1114] (1/4) Epoch 17, batch 4150, loss[loss=0.1544, simple_loss=0.2563, pruned_loss=0.02624, over 4831.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2661, pruned_loss=0.04262, over 938211.51 frames. ], batch size: 13, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:29:28,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=223621.33333333334, ans=0.2 +2024-07-29 01:29:39,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=223634.66666666666, ans=0.0 +2024-07-29 01:29:41,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=223634.66666666666, ans=0.0 +2024-07-29 01:29:49,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=223634.66666666666, ans=0.04949747468305833 +2024-07-29 01:30:25,517 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.43 vs. limit=15.0 +2024-07-29 01:30:28,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=223661.33333333334, ans=6.0 +2024-07-29 01:30:48,968 INFO [train.py:1114] (1/4) Epoch 17, batch 4200, loss[loss=0.1809, simple_loss=0.2721, pruned_loss=0.04487, over 4908.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2669, pruned_loss=0.04269, over 939627.01 frames. ], batch size: 15, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:30:49,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=223674.66666666666, ans=0.125 +2024-07-29 01:30:53,884 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.93 vs. limit=15.0 +2024-07-29 01:32:23,740 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:32:43,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=223714.66666666666, ans=0.125 +2024-07-29 01:33:25,471 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.507e+01 5.649e+01 6.155e+01 7.132e+01 1.062e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-29 01:33:29,742 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.08 vs. limit=15.0 +2024-07-29 01:33:32,175 INFO [train.py:1114] (1/4) Epoch 17, batch 4250, loss[loss=0.1643, simple_loss=0.2599, pruned_loss=0.03429, over 4629.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2673, pruned_loss=0.04297, over 940745.53 frames. ], batch size: 12, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:33:35,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=223741.33333333334, ans=0.0 +2024-07-29 01:33:39,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=223741.33333333334, ans=0.125 +2024-07-29 01:33:41,184 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.57 vs. limit=10.0 +2024-07-29 01:34:03,528 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=223754.66666666666, ans=0.125 +2024-07-29 01:34:29,697 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.93 vs. limit=22.5 +2024-07-29 01:34:40,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=223781.33333333334, ans=0.0 +2024-07-29 01:34:48,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=223781.33333333334, ans=0.0 +2024-07-29 01:34:48,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=223781.33333333334, ans=0.0 +2024-07-29 01:35:05,259 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.98 vs. limit=15.0 +2024-07-29 01:35:07,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=223794.66666666666, ans=0.2 +2024-07-29 01:35:11,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=223794.66666666666, ans=0.125 +2024-07-29 01:35:25,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=223794.66666666666, ans=0.0 +2024-07-29 01:35:26,169 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.55 vs. limit=15.0 +2024-07-29 01:35:27,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=223808.0, ans=0.0 +2024-07-29 01:35:27,637 INFO [train.py:1114] (1/4) Epoch 17, batch 4300, loss[loss=0.1627, simple_loss=0.2603, pruned_loss=0.03256, over 4756.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2675, pruned_loss=0.04331, over 940387.61 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:35:28,724 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.80 vs. limit=15.0 +2024-07-29 01:36:31,030 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.01 vs. limit=22.5 +2024-07-29 01:38:09,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223848.0, ans=0.1 +2024-07-29 01:38:09,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=223848.0, ans=0.025 +2024-07-29 01:38:10,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=223848.0, ans=0.035 +2024-07-29 01:38:13,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=223848.0, ans=0.015 +2024-07-29 01:38:17,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=223861.33333333334, ans=0.125 +2024-07-29 01:38:25,969 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.698e+01 5.646e+01 6.421e+01 7.211e+01 1.436e+02, threshold=1.284e+02, percent-clipped=1.0 +2024-07-29 01:38:27,759 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.09 vs. limit=6.0 +2024-07-29 01:38:30,616 INFO [train.py:1114] (1/4) Epoch 17, batch 4350, loss[loss=0.1547, simple_loss=0.2499, pruned_loss=0.02978, over 4758.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2672, pruned_loss=0.04263, over 941365.88 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:38:39,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=223874.66666666666, ans=10.0 +2024-07-29 01:39:02,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=223901.33333333334, ans=0.125 +2024-07-29 01:39:08,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=223901.33333333334, ans=0.125 +2024-07-29 01:39:23,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=223928.0, ans=0.025 +2024-07-29 01:39:35,678 INFO [train.py:1114] (1/4) Epoch 17, batch 4400, loss[loss=0.1647, simple_loss=0.2693, pruned_loss=0.03001, over 4812.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2668, pruned_loss=0.04224, over 941274.51 frames. ], batch size: 14, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:39:35,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=223941.33333333334, ans=0.125 +2024-07-29 01:39:46,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=223941.33333333334, ans=0.125 +2024-07-29 01:40:12,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=223954.66666666666, ans=0.0 +2024-07-29 01:40:22,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=223968.0, ans=0.125 +2024-07-29 01:40:33,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=223981.33333333334, ans=0.04949747468305833 +2024-07-29 01:40:33,991 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.25 vs. limit=15.0 +2024-07-29 01:40:36,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=223981.33333333334, ans=0.125 +2024-07-29 01:40:49,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=223994.66666666666, ans=0.125 +2024-07-29 01:40:50,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=223994.66666666666, ans=0.1 +2024-07-29 01:40:54,550 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.541e+01 6.235e+01 6.902e+01 1.046e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-29 01:40:59,558 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.16 vs. limit=22.5 +2024-07-29 01:40:59,853 INFO [train.py:1114] (1/4) Epoch 17, batch 4450, loss[loss=0.1548, simple_loss=0.2413, pruned_loss=0.03411, over 4933.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2673, pruned_loss=0.04291, over 939660.02 frames. ], batch size: 12, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:41:04,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=224008.0, ans=0.0 +2024-07-29 01:41:44,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=224021.33333333334, ans=0.0 +2024-07-29 01:41:46,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=224034.66666666666, ans=0.2 +2024-07-29 01:42:25,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=224048.0, ans=0.0 +2024-07-29 01:42:57,262 INFO [train.py:1114] (1/4) Epoch 17, batch 4500, loss[loss=0.1851, simple_loss=0.2831, pruned_loss=0.04352, over 4745.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2675, pruned_loss=0.04278, over 938644.29 frames. ], batch size: 14, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:43:17,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=224088.0, ans=0.2 +2024-07-29 01:43:17,845 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.44 vs. limit=10.0 +2024-07-29 01:43:21,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=224101.33333333334, ans=0.1 +2024-07-29 01:43:23,513 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.42 vs. limit=6.0 +2024-07-29 01:43:42,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=224114.66666666666, ans=0.0 +2024-07-29 01:43:48,789 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.34 vs. limit=10.0 +2024-07-29 01:43:54,426 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.989e+01 5.696e+01 6.215e+01 7.468e+01 9.739e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 01:43:54,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=224128.0, ans=0.1 +2024-07-29 01:43:56,645 INFO [train.py:1114] (1/4) Epoch 17, batch 4550, loss[loss=0.1544, simple_loss=0.2397, pruned_loss=0.03454, over 4885.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2672, pruned_loss=0.04262, over 940576.61 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:44:11,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=224168.0, ans=0.125 +2024-07-29 01:44:12,071 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.72 vs. limit=15.0 +2024-07-29 01:44:14,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=224168.0, ans=0.125 +2024-07-29 01:44:23,942 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:44:29,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=224194.66666666666, ans=0.0 +2024-07-29 01:44:29,803 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:44:33,959 INFO [train.py:1114] (1/4) Epoch 17, batch 4600, loss[loss=0.1794, simple_loss=0.2713, pruned_loss=0.04381, over 4554.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2667, pruned_loss=0.04242, over 938817.82 frames. ], batch size: 21, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:44:40,982 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=224221.33333333334, ans=0.0 +2024-07-29 01:44:41,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=224221.33333333334, ans=0.09899494936611666 +2024-07-29 01:44:45,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=224221.33333333334, ans=0.1 +2024-07-29 01:44:54,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=224248.0, ans=0.125 +2024-07-29 01:45:02,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=224248.0, ans=0.1 +2024-07-29 01:45:14,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=224261.33333333334, ans=0.125 +2024-07-29 01:45:16,117 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.31 vs. limit=10.0 +2024-07-29 01:45:17,103 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.685e+01 5.605e+01 6.267e+01 6.922e+01 9.428e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 01:45:19,071 INFO [train.py:1114] (1/4) Epoch 17, batch 4650, loss[loss=0.1991, simple_loss=0.2887, pruned_loss=0.05474, over 4825.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.267, pruned_loss=0.04246, over 940496.23 frames. ], batch size: 16, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:45:42,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=224288.0, ans=0.125 +2024-07-29 01:45:52,873 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.67 vs. limit=22.5 +2024-07-29 01:46:15,066 INFO [train.py:1114] (1/4) Epoch 17, batch 4700, loss[loss=0.1469, simple_loss=0.2336, pruned_loss=0.03008, over 4706.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2654, pruned_loss=0.04204, over 938257.42 frames. ], batch size: 11, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:46:33,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=224368.0, ans=0.0 +2024-07-29 01:46:35,674 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.29 vs. limit=15.0 +2024-07-29 01:46:35,709 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=15.0 +2024-07-29 01:46:36,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=224368.0, ans=0.1 +2024-07-29 01:46:38,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=224381.33333333334, ans=0.0 +2024-07-29 01:46:42,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=224381.33333333334, ans=0.1 +2024-07-29 01:46:48,546 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.99 vs. limit=6.0 +2024-07-29 01:46:53,328 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.618e+01 6.268e+01 7.126e+01 1.011e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 01:46:56,293 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.74 vs. limit=22.5 +2024-07-29 01:46:58,153 INFO [train.py:1114] (1/4) Epoch 17, batch 4750, loss[loss=0.1937, simple_loss=0.2838, pruned_loss=0.05181, over 4508.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2664, pruned_loss=0.04282, over 936375.54 frames. ], batch size: 21, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:47:07,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=224421.33333333334, ans=0.125 +2024-07-29 01:47:26,076 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.32 vs. limit=22.5 +2024-07-29 01:47:46,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=224448.0, ans=0.2 +2024-07-29 01:47:53,589 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-29 01:47:56,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=224461.33333333334, ans=0.025 +2024-07-29 01:48:01,679 INFO [train.py:1114] (1/4) Epoch 17, batch 4800, loss[loss=0.1951, simple_loss=0.2865, pruned_loss=0.05187, over 4700.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2662, pruned_loss=0.04294, over 933080.32 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:48:13,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=224488.0, ans=0.125 +2024-07-29 01:48:49,936 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.702e+01 6.152e+01 7.356e+01 9.741e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 01:48:53,603 INFO [train.py:1114] (1/4) Epoch 17, batch 4850, loss[loss=0.1558, simple_loss=0.2452, pruned_loss=0.0332, over 4738.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2664, pruned_loss=0.04341, over 932499.88 frames. ], batch size: 14, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:49:07,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=224554.66666666666, ans=0.0 +2024-07-29 01:49:09,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=224554.66666666666, ans=0.125 +2024-07-29 01:49:17,691 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.55 vs. limit=15.0 +2024-07-29 01:49:36,671 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=224594.66666666666, ans=0.2 +2024-07-29 01:49:37,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=224594.66666666666, ans=0.125 +2024-07-29 01:49:42,565 INFO [train.py:1114] (1/4) Epoch 17, batch 4900, loss[loss=0.1511, simple_loss=0.2428, pruned_loss=0.02968, over 4763.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.265, pruned_loss=0.04297, over 934355.31 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:49:43,593 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.57 vs. limit=15.0 +2024-07-29 01:49:44,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=224608.0, ans=0.125 +2024-07-29 01:49:55,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=224634.66666666666, ans=0.2 +2024-07-29 01:50:00,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=224634.66666666666, ans=0.125 +2024-07-29 01:50:29,506 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.68 vs. limit=15.0 +2024-07-29 01:50:48,850 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.565e+01 5.722e+01 6.197e+01 6.933e+01 1.189e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 01:51:00,385 INFO [train.py:1114] (1/4) Epoch 17, batch 4950, loss[loss=0.1962, simple_loss=0.2802, pruned_loss=0.05613, over 3259.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2655, pruned_loss=0.04271, over 931402.75 frames. ], batch size: 35, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:51:11,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=224674.66666666666, ans=0.05 +2024-07-29 01:51:11,413 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.51 vs. limit=15.0 +2024-07-29 01:51:20,405 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=224688.0, ans=0.125 +2024-07-29 01:51:25,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=224701.33333333334, ans=0.1 +2024-07-29 01:51:31,278 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.87 vs. limit=6.0 +2024-07-29 01:51:33,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=224714.66666666666, ans=0.025 +2024-07-29 01:51:41,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=224728.0, ans=0.0 +2024-07-29 01:51:42,475 INFO [train.py:1114] (1/4) Epoch 17, batch 5000, loss[loss=0.1923, simple_loss=0.2861, pruned_loss=0.04926, over 4667.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2655, pruned_loss=0.04262, over 935210.46 frames. ], batch size: 14, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:52:01,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=224754.66666666666, ans=0.0 +2024-07-29 01:52:36,559 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.01 vs. limit=22.5 +2024-07-29 01:52:47,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=224794.66666666666, ans=0.025 +2024-07-29 01:52:47,964 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.329e+01 5.582e+01 6.302e+01 7.015e+01 1.020e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 01:52:50,184 INFO [train.py:1114] (1/4) Epoch 17, batch 5050, loss[loss=0.1587, simple_loss=0.246, pruned_loss=0.03569, over 4859.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2655, pruned_loss=0.04229, over 937473.42 frames. ], batch size: 12, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:53:00,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=224821.33333333334, ans=0.0 +2024-07-29 01:53:18,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=224848.0, ans=0.125 +2024-07-29 01:53:22,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=224848.0, ans=0.125 +2024-07-29 01:53:34,547 INFO [train.py:1114] (1/4) Epoch 17, batch 5100, loss[loss=0.1686, simple_loss=0.252, pruned_loss=0.04264, over 4785.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2661, pruned_loss=0.04227, over 935083.24 frames. ], batch size: 12, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:53:43,802 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.08 vs. limit=6.0 +2024-07-29 01:54:01,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=224914.66666666666, ans=0.0 +2024-07-29 01:54:01,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=224914.66666666666, ans=0.125 +2024-07-29 01:54:06,500 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:54:11,313 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.613e+01 5.725e+01 6.244e+01 7.275e+01 1.073e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 01:54:13,314 INFO [train.py:1114] (1/4) Epoch 17, batch 5150, loss[loss=0.1858, simple_loss=0.2719, pruned_loss=0.04986, over 4851.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2671, pruned_loss=0.04276, over 936038.11 frames. ], batch size: 16, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:54:13,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=224941.33333333334, ans=0.125 +2024-07-29 01:54:14,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=224941.33333333334, ans=0.125 +2024-07-29 01:54:17,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=224941.33333333334, ans=0.125 +2024-07-29 01:54:26,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=224968.0, ans=0.125 +2024-07-29 01:54:26,553 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.61 vs. limit=15.0 +2024-07-29 01:54:32,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=224968.0, ans=0.0 +2024-07-29 01:54:34,883 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.34 vs. limit=15.0 +2024-07-29 01:54:47,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=225008.0, ans=0.125 +2024-07-29 01:54:48,428 INFO [train.py:1114] (1/4) Epoch 17, batch 5200, loss[loss=0.1649, simple_loss=0.2578, pruned_loss=0.03599, over 4649.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2676, pruned_loss=0.04308, over 936343.58 frames. ], batch size: 14, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:54:49,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=225008.0, ans=0.0 +2024-07-29 01:55:20,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=225048.0, ans=0.5 +2024-07-29 01:55:29,957 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.70 vs. limit=15.0 +2024-07-29 01:55:37,216 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.752e+01 5.840e+01 6.748e+01 7.869e+01 1.303e+02, threshold=1.350e+02, percent-clipped=1.0 +2024-07-29 01:55:39,363 INFO [train.py:1114] (1/4) Epoch 17, batch 5250, loss[loss=0.2212, simple_loss=0.3067, pruned_loss=0.06783, over 4901.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.267, pruned_loss=0.04312, over 935854.84 frames. ], batch size: 13, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:55:41,148 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.99 vs. limit=6.0 +2024-07-29 01:55:50,885 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.72 vs. limit=15.0 +2024-07-29 01:55:53,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.81 vs. limit=22.5 +2024-07-29 01:56:17,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=225114.66666666666, ans=0.125 +2024-07-29 01:56:25,590 INFO [train.py:1114] (1/4) Epoch 17, batch 5300, loss[loss=0.1657, simple_loss=0.2614, pruned_loss=0.03497, over 4622.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2662, pruned_loss=0.04281, over 934535.51 frames. ], batch size: 16, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:56:29,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=225141.33333333334, ans=0.09899494936611666 +2024-07-29 01:56:29,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=225141.33333333334, ans=0.0 +2024-07-29 01:56:31,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225154.66666666666, ans=0.1 +2024-07-29 01:56:46,434 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.31 vs. limit=15.0 +2024-07-29 01:56:53,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=225194.66666666666, ans=0.125 +2024-07-29 01:56:57,571 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.355e+01 5.754e+01 6.386e+01 7.426e+01 1.100e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 01:56:59,691 INFO [train.py:1114] (1/4) Epoch 17, batch 5350, loss[loss=0.1483, simple_loss=0.2323, pruned_loss=0.03212, over 4532.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2672, pruned_loss=0.0431, over 936512.48 frames. ], batch size: 10, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:57:05,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=225221.33333333334, ans=0.125 +2024-07-29 01:57:06,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225221.33333333334, ans=0.1 +2024-07-29 01:57:12,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=225221.33333333334, ans=0.125 +2024-07-29 01:57:12,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=225221.33333333334, ans=0.0 +2024-07-29 01:57:13,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=225234.66666666666, ans=0.125 +2024-07-29 01:57:18,789 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-07-29 01:57:21,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=225248.0, ans=0.09899494936611666 +2024-07-29 01:57:29,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=225261.33333333334, ans=0.0 +2024-07-29 01:57:34,861 INFO [train.py:1114] (1/4) Epoch 17, batch 5400, loss[loss=0.2058, simple_loss=0.2912, pruned_loss=0.06018, over 4316.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2678, pruned_loss=0.04328, over 930977.12 frames. ], batch size: 25, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:57:44,175 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:57:48,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=225288.0, ans=0.025 +2024-07-29 01:57:56,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225301.33333333334, ans=0.1 +2024-07-29 01:58:07,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=225328.0, ans=0.125 +2024-07-29 01:58:09,518 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.272e+01 5.738e+01 6.198e+01 6.838e+01 9.669e+01, threshold=1.240e+02, percent-clipped=0.0 +2024-07-29 01:58:11,777 INFO [train.py:1114] (1/4) Epoch 17, batch 5450, loss[loss=0.1551, simple_loss=0.2337, pruned_loss=0.03832, over 4703.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2668, pruned_loss=0.04289, over 934139.32 frames. ], batch size: 11, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:58:19,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=225354.66666666666, ans=0.125 +2024-07-29 01:58:25,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=225368.0, ans=0.0 +2024-07-29 01:58:26,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225368.0, ans=0.1 +2024-07-29 01:58:35,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=225381.33333333334, ans=0.125 +2024-07-29 01:58:39,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=225394.66666666666, ans=0.125 +2024-07-29 01:58:43,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=225394.66666666666, ans=0.125 +2024-07-29 01:58:44,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=225394.66666666666, ans=0.125 +2024-07-29 01:58:45,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=225408.0, ans=0.0 +2024-07-29 01:58:45,721 INFO [train.py:1114] (1/4) Epoch 17, batch 5500, loss[loss=0.2269, simple_loss=0.3187, pruned_loss=0.06759, over 4256.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2668, pruned_loss=0.0434, over 931301.00 frames. ], batch size: 25, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:58:53,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=225421.33333333334, ans=0.125 +2024-07-29 01:58:58,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=225434.66666666666, ans=0.0 +2024-07-29 01:59:02,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=225434.66666666666, ans=0.125 +2024-07-29 01:59:02,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=225434.66666666666, ans=0.125 +2024-07-29 01:59:08,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=225448.0, ans=0.125 +2024-07-29 01:59:13,399 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.86 vs. limit=15.0 +2024-07-29 01:59:16,897 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.448e+01 5.683e+01 6.448e+01 7.775e+01 1.067e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 01:59:18,938 INFO [train.py:1114] (1/4) Epoch 17, batch 5550, loss[loss=0.1767, simple_loss=0.2729, pruned_loss=0.04029, over 4700.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2661, pruned_loss=0.04326, over 933208.18 frames. ], batch size: 12, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:59:33,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=225501.33333333334, ans=0.1 +2024-07-29 01:59:37,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225501.33333333334, ans=0.1 +2024-07-29 01:59:49,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225528.0, ans=0.1 +2024-07-29 01:59:53,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=225528.0, ans=0.125 +2024-07-29 01:59:55,059 INFO [train.py:1114] (1/4) Epoch 17, batch 5600, loss[loss=0.1778, simple_loss=0.2747, pruned_loss=0.04042, over 4734.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2665, pruned_loss=0.04319, over 934400.48 frames. ], batch size: 14, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:00:01,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=225554.66666666666, ans=0.125 +2024-07-29 02:00:01,647 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.44 vs. limit=10.0 +2024-07-29 02:00:02,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=225554.66666666666, ans=0.2 +2024-07-29 02:00:03,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=225554.66666666666, ans=0.125 +2024-07-29 02:00:04,201 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.31 vs. limit=15.0 +2024-07-29 02:00:05,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=225554.66666666666, ans=0.125 +2024-07-29 02:00:21,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=225594.66666666666, ans=0.0 +2024-07-29 02:00:22,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=225594.66666666666, ans=0.025 +2024-07-29 02:00:27,275 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.722e+01 5.591e+01 6.348e+01 7.500e+01 1.117e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-29 02:00:29,290 INFO [train.py:1114] (1/4) Epoch 17, batch 5650, loss[loss=0.2311, simple_loss=0.312, pruned_loss=0.0751, over 4505.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.266, pruned_loss=0.0432, over 936830.61 frames. ], batch size: 21, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:01:04,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=225634.66666666666, ans=0.2 +2024-07-29 02:01:10,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=225648.0, ans=0.125 +2024-07-29 02:01:12,004 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.42 vs. limit=22.5 +2024-07-29 02:01:20,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=225661.33333333334, ans=0.1 +2024-07-29 02:01:23,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=225674.66666666666, ans=0.125 +2024-07-29 02:01:23,981 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.90 vs. limit=15.0 +2024-07-29 02:01:24,141 INFO [train.py:1114] (1/4) Epoch 17, batch 5700, loss[loss=0.2022, simple_loss=0.285, pruned_loss=0.05966, over 4695.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.267, pruned_loss=0.04363, over 937919.77 frames. ], batch size: 13, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:01:28,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=225674.66666666666, ans=0.125 +2024-07-29 02:01:29,878 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.72 vs. limit=15.0 +2024-07-29 02:01:32,751 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.07 vs. limit=22.5 +2024-07-29 02:01:34,608 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.79 vs. limit=6.0 +2024-07-29 02:01:57,713 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.493e+01 6.225e+01 7.048e+01 1.096e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-29 02:02:02,540 INFO [train.py:1114] (1/4) Epoch 17, batch 5750, loss[loss=0.1825, simple_loss=0.2726, pruned_loss=0.04619, over 4772.00 frames. ], tot_loss[loss=0.177, simple_loss=0.267, pruned_loss=0.04354, over 937989.51 frames. ], batch size: 19, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:02:07,482 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.82 vs. limit=15.0 +2024-07-29 02:02:09,943 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=225754.66666666666, ans=0.04949747468305833 +2024-07-29 02:02:12,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=225754.66666666666, ans=0.5 +2024-07-29 02:02:23,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=225781.33333333334, ans=0.125 +2024-07-29 02:02:25,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=225781.33333333334, ans=0.125 +2024-07-29 02:02:34,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225794.66666666666, ans=0.1 +2024-07-29 02:02:35,949 INFO [train.py:1114] (1/4) Epoch 17, batch 5800, loss[loss=0.1902, simple_loss=0.2852, pruned_loss=0.04757, over 4690.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2672, pruned_loss=0.04371, over 936838.76 frames. ], batch size: 19, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:02:40,341 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.98 vs. limit=22.5 +2024-07-29 02:02:57,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=225821.33333333334, ans=0.2 +2024-07-29 02:03:17,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=225861.33333333334, ans=0.025 +2024-07-29 02:03:18,477 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.618e+01 6.216e+01 6.871e+01 1.068e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 02:03:21,241 INFO [train.py:1114] (1/4) Epoch 17, batch 5850, loss[loss=0.1868, simple_loss=0.2912, pruned_loss=0.04125, over 4506.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2682, pruned_loss=0.04392, over 937421.41 frames. ], batch size: 21, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:03:27,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=225888.0, ans=0.125 +2024-07-29 02:03:28,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225888.0, ans=0.1 +2024-07-29 02:03:29,471 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=225888.0, ans=0.09899494936611666 +2024-07-29 02:03:40,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=225901.33333333334, ans=0.1 +2024-07-29 02:03:47,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=225914.66666666666, ans=0.125 +2024-07-29 02:03:50,968 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.35 vs. limit=22.5 +2024-07-29 02:03:53,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=225928.0, ans=0.125 +2024-07-29 02:03:55,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=225928.0, ans=0.125 +2024-07-29 02:03:56,487 INFO [train.py:1114] (1/4) Epoch 17, batch 5900, loss[loss=0.1998, simple_loss=0.2879, pruned_loss=0.05585, over 4686.00 frames. ], tot_loss[loss=0.178, simple_loss=0.268, pruned_loss=0.04401, over 937936.07 frames. ], batch size: 15, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:04:01,012 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.20 vs. limit=22.5 +2024-07-29 02:04:05,498 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.76 vs. limit=10.0 +2024-07-29 02:04:06,224 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.92 vs. limit=6.0 +2024-07-29 02:04:08,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=225954.66666666666, ans=0.0 +2024-07-29 02:04:11,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=225968.0, ans=0.0 +2024-07-29 02:04:27,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=225994.66666666666, ans=0.125 +2024-07-29 02:04:28,164 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 5.754e+01 6.416e+01 7.190e+01 1.147e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 02:04:30,242 INFO [train.py:1114] (1/4) Epoch 17, batch 5950, loss[loss=0.193, simple_loss=0.2849, pruned_loss=0.05059, over 4702.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2664, pruned_loss=0.04335, over 940039.45 frames. ], batch size: 15, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:04:30,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=226008.0, ans=0.2 +2024-07-29 02:04:32,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=226008.0, ans=0.07 +2024-07-29 02:04:43,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=226021.33333333334, ans=0.0 +2024-07-29 02:04:57,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=226048.0, ans=0.125 +2024-07-29 02:04:57,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=226048.0, ans=0.0 +2024-07-29 02:05:02,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=226061.33333333334, ans=0.05 +2024-07-29 02:05:06,715 INFO [train.py:1114] (1/4) Epoch 17, batch 6000, loss[loss=0.2275, simple_loss=0.3161, pruned_loss=0.06942, over 4325.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2661, pruned_loss=0.04332, over 937544.82 frames. ], batch size: 26, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:05:06,716 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 02:05:43,820 INFO [train.py:1146] (1/4) Epoch 17, validation: loss=0.1623, simple_loss=0.2646, pruned_loss=0.02995, over 944034.00 frames. +2024-07-29 02:05:43,821 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 02:05:45,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=226074.66666666666, ans=0.0 +2024-07-29 02:05:52,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=226088.0, ans=0.07 +2024-07-29 02:05:53,968 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:05:56,310 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.43 vs. limit=22.5 +2024-07-29 02:06:09,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=226114.66666666666, ans=0.125 +2024-07-29 02:06:17,673 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.382e+01 5.832e+01 6.475e+01 7.861e+01 1.037e+02, threshold=1.295e+02, percent-clipped=0.0 +2024-07-29 02:06:19,740 INFO [train.py:1114] (1/4) Epoch 17, batch 6050, loss[loss=0.1647, simple_loss=0.2488, pruned_loss=0.04034, over 4774.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.266, pruned_loss=0.0434, over 938617.44 frames. ], batch size: 12, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:06:20,571 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:06:21,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=226141.33333333334, ans=0.1 +2024-07-29 02:06:22,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=226141.33333333334, ans=0.125 +2024-07-29 02:06:31,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=226154.66666666666, ans=0.0 +2024-07-29 02:06:36,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=226168.0, ans=0.07 +2024-07-29 02:06:41,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=226181.33333333334, ans=0.025 +2024-07-29 02:06:45,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=226181.33333333334, ans=0.2 +2024-07-29 02:06:45,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=226181.33333333334, ans=0.125 +2024-07-29 02:06:46,447 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.77 vs. limit=10.0 +2024-07-29 02:06:58,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=226194.66666666666, ans=0.125 +2024-07-29 02:07:01,457 INFO [train.py:1114] (1/4) Epoch 17, batch 6100, loss[loss=0.1956, simple_loss=0.2832, pruned_loss=0.05404, over 4679.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2656, pruned_loss=0.04297, over 937925.70 frames. ], batch size: 15, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:07:12,167 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.16 vs. limit=12.0 +2024-07-29 02:07:17,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=226221.33333333334, ans=0.0 +2024-07-29 02:07:21,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=226234.66666666666, ans=0.0 +2024-07-29 02:07:44,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=226248.0, ans=0.125 +2024-07-29 02:07:53,313 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.857e+01 5.509e+01 6.001e+01 6.915e+01 1.050e+02, threshold=1.200e+02, percent-clipped=0.0 +2024-07-29 02:07:55,389 INFO [train.py:1114] (1/4) Epoch 17, batch 6150, loss[loss=0.2209, simple_loss=0.3039, pruned_loss=0.06895, over 3425.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2661, pruned_loss=0.0428, over 936734.24 frames. ], batch size: 35, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:08:12,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=226274.66666666666, ans=0.0 +2024-07-29 02:08:16,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=226288.0, ans=0.2 +2024-07-29 02:08:34,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=226314.66666666666, ans=0.0 +2024-07-29 02:08:36,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=226314.66666666666, ans=0.125 +2024-07-29 02:08:37,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226314.66666666666, ans=0.1 +2024-07-29 02:08:46,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=226328.0, ans=0.125 +2024-07-29 02:08:46,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=226328.0, ans=0.0 +2024-07-29 02:08:56,968 INFO [train.py:1114] (1/4) Epoch 17, batch 6200, loss[loss=0.1806, simple_loss=0.2696, pruned_loss=0.04583, over 4738.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2667, pruned_loss=0.04341, over 936932.37 frames. ], batch size: 14, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:08:58,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=226341.33333333334, ans=0.0 +2024-07-29 02:09:24,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=226368.0, ans=0.07 +2024-07-29 02:09:48,446 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.60 vs. limit=15.0 +2024-07-29 02:09:51,543 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.525e+01 5.563e+01 6.274e+01 7.227e+01 1.075e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 02:09:53,624 INFO [train.py:1114] (1/4) Epoch 17, batch 6250, loss[loss=0.1841, simple_loss=0.2818, pruned_loss=0.04318, over 4806.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2664, pruned_loss=0.0439, over 932802.84 frames. ], batch size: 14, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:10:04,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=226421.33333333334, ans=0.0 +2024-07-29 02:10:08,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=226434.66666666666, ans=0.0 +2024-07-29 02:10:08,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=226434.66666666666, ans=0.0 +2024-07-29 02:10:09,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=226434.66666666666, ans=0.125 +2024-07-29 02:10:25,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=226461.33333333334, ans=0.125 +2024-07-29 02:10:30,815 INFO [train.py:1114] (1/4) Epoch 17, batch 6300, loss[loss=0.1481, simple_loss=0.2322, pruned_loss=0.03198, over 4509.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2669, pruned_loss=0.04397, over 930240.19 frames. ], batch size: 10, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:10:46,413 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.65 vs. limit=15.0 +2024-07-29 02:10:52,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=226501.33333333334, ans=0.125 +2024-07-29 02:11:05,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=226514.66666666666, ans=0.0 +2024-07-29 02:11:06,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=226514.66666666666, ans=0.125 +2024-07-29 02:11:15,466 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.120e+01 5.607e+01 6.569e+01 7.954e+01 1.446e+02, threshold=1.314e+02, percent-clipped=2.0 +2024-07-29 02:11:17,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=226541.33333333334, ans=0.125 +2024-07-29 02:11:31,870 INFO [train.py:1114] (1/4) Epoch 17, batch 6350, loss[loss=0.1991, simple_loss=0.2988, pruned_loss=0.04966, over 4494.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2671, pruned_loss=0.04386, over 934153.14 frames. ], batch size: 21, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:12:09,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=226568.0, ans=0.2 +2024-07-29 02:12:10,788 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=226568.0, ans=0.125 +2024-07-29 02:12:13,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=226568.0, ans=0.1 +2024-07-29 02:12:16,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=226581.33333333334, ans=0.04949747468305833 +2024-07-29 02:12:30,155 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.92 vs. limit=15.0 +2024-07-29 02:12:30,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=226608.0, ans=15.0 +2024-07-29 02:12:30,407 INFO [train.py:1114] (1/4) Epoch 17, batch 6400, loss[loss=0.1922, simple_loss=0.2801, pruned_loss=0.05217, over 4635.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2673, pruned_loss=0.04376, over 935309.01 frames. ], batch size: 13, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:13:19,227 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.58 vs. limit=5.0 +2024-07-29 02:13:23,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=226661.33333333334, ans=0.2 +2024-07-29 02:13:28,827 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.008e+01 5.819e+01 6.340e+01 7.116e+01 1.046e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-29 02:13:32,055 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=226661.33333333334, ans=0.125 +2024-07-29 02:13:33,293 INFO [train.py:1114] (1/4) Epoch 17, batch 6450, loss[loss=0.1891, simple_loss=0.2784, pruned_loss=0.04992, over 4410.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2671, pruned_loss=0.04347, over 938703.60 frames. ], batch size: 21, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:13:33,478 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:13:34,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=226674.66666666666, ans=0.125 +2024-07-29 02:13:49,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=226701.33333333334, ans=0.0 +2024-07-29 02:13:55,156 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.40 vs. limit=15.0 +2024-07-29 02:14:01,176 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.24 vs. limit=15.0 +2024-07-29 02:14:09,136 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.25 vs. limit=22.5 +2024-07-29 02:14:10,839 INFO [train.py:1114] (1/4) Epoch 17, batch 6500, loss[loss=0.2178, simple_loss=0.2989, pruned_loss=0.06831, over 3382.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2667, pruned_loss=0.04259, over 939908.67 frames. ], batch size: 35, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:14:14,741 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.97 vs. limit=10.0 +2024-07-29 02:14:28,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=226768.0, ans=0.125 +2024-07-29 02:14:31,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=226781.33333333334, ans=0.125 +2024-07-29 02:14:32,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=226781.33333333334, ans=0.2 +2024-07-29 02:14:42,542 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.227e+01 5.659e+01 6.416e+01 7.709e+01 1.114e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 02:14:43,951 INFO [train.py:1114] (1/4) Epoch 17, batch 6550, loss[loss=0.1509, simple_loss=0.2422, pruned_loss=0.02977, over 4815.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.267, pruned_loss=0.04261, over 942794.63 frames. ], batch size: 11, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:14:55,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=226821.33333333334, ans=0.125 +2024-07-29 02:15:09,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=226848.0, ans=0.125 +2024-07-29 02:15:12,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=226861.33333333334, ans=0.0 +2024-07-29 02:15:13,982 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.41 vs. limit=10.0 +2024-07-29 02:15:15,661 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:15:18,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=226874.66666666666, ans=0.0 +2024-07-29 02:15:18,654 INFO [train.py:1114] (1/4) Epoch 17, batch 6600, loss[loss=0.1744, simple_loss=0.2688, pruned_loss=0.04003, over 4932.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2675, pruned_loss=0.04296, over 944800.25 frames. ], batch size: 14, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:15:20,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=226874.66666666666, ans=0.125 +2024-07-29 02:15:20,841 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226874.66666666666, ans=0.1 +2024-07-29 02:15:33,464 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.00 vs. limit=15.0 +2024-07-29 02:15:36,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=226901.33333333334, ans=0.125 +2024-07-29 02:15:37,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=226901.33333333334, ans=0.0 +2024-07-29 02:15:38,593 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=226914.66666666666, ans=0.125 +2024-07-29 02:15:49,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=226928.0, ans=0.125 +2024-07-29 02:15:55,951 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.636e+01 5.664e+01 6.465e+01 7.332e+01 1.238e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-29 02:15:57,323 INFO [train.py:1114] (1/4) Epoch 17, batch 6650, loss[loss=0.1743, simple_loss=0.2831, pruned_loss=0.03275, over 4629.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2674, pruned_loss=0.04261, over 943506.59 frames. ], batch size: 17, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:16:13,987 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.46 vs. limit=22.5 +2024-07-29 02:16:21,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=226981.33333333334, ans=0.125 +2024-07-29 02:16:21,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=226981.33333333334, ans=0.1 +2024-07-29 02:16:23,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=226981.33333333334, ans=0.2 +2024-07-29 02:16:25,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=226981.33333333334, ans=0.0 +2024-07-29 02:16:35,769 INFO [train.py:1114] (1/4) Epoch 17, batch 6700, loss[loss=0.1727, simple_loss=0.2601, pruned_loss=0.04259, over 4687.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2673, pruned_loss=0.04266, over 942546.36 frames. ], batch size: 19, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:16:52,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=227034.66666666666, ans=0.2 +2024-07-29 02:16:55,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=227048.0, ans=6.0 +2024-07-29 02:17:08,093 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.780e+01 5.902e+01 6.582e+01 7.550e+01 1.119e+02, threshold=1.316e+02, percent-clipped=0.0 +2024-07-29 02:17:09,554 INFO [train.py:1114] (1/4) Epoch 17, batch 6750, loss[loss=0.1688, simple_loss=0.2706, pruned_loss=0.03344, over 4452.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2673, pruned_loss=0.04286, over 940755.71 frames. ], batch size: 25, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:17:09,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=227074.66666666666, ans=0.125 +2024-07-29 02:17:20,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=227088.0, ans=0.0 +2024-07-29 02:17:29,027 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.01 vs. limit=15.0 +2024-07-29 02:17:31,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=227114.66666666666, ans=0.0 +2024-07-29 02:17:40,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227128.0, ans=0.1 +2024-07-29 02:17:46,134 INFO [train.py:1114] (1/4) Epoch 17, batch 6800, loss[loss=0.1672, simple_loss=0.2598, pruned_loss=0.03725, over 4637.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2684, pruned_loss=0.04349, over 939030.61 frames. ], batch size: 13, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:17:55,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=227154.66666666666, ans=0.125 +2024-07-29 02:18:00,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=227154.66666666666, ans=0.0 +2024-07-29 02:18:02,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=227168.0, ans=0.0 +2024-07-29 02:18:07,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=227168.0, ans=0.125 +2024-07-29 02:18:08,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227168.0, ans=0.1 +2024-07-29 02:18:11,824 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.46 vs. limit=10.0 +2024-07-29 02:18:16,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=227181.33333333334, ans=0.125 +2024-07-29 02:18:18,611 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.23 vs. limit=15.0 +2024-07-29 02:18:19,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=227194.66666666666, ans=0.2 +2024-07-29 02:18:23,158 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.461e+01 5.796e+01 6.354e+01 7.528e+01 1.110e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-29 02:18:24,488 INFO [train.py:1114] (1/4) Epoch 17, batch 6850, loss[loss=0.1617, simple_loss=0.265, pruned_loss=0.02921, over 4698.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2683, pruned_loss=0.04325, over 940702.73 frames. ], batch size: 13, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:18:26,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=227208.0, ans=15.0 +2024-07-29 02:18:29,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=227208.0, ans=0.0 +2024-07-29 02:18:31,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=227221.33333333334, ans=0.0 +2024-07-29 02:18:32,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=227221.33333333334, ans=0.125 +2024-07-29 02:18:33,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=227221.33333333334, ans=0.0 +2024-07-29 02:18:38,278 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.55 vs. limit=15.0 +2024-07-29 02:18:38,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=227234.66666666666, ans=0.125 +2024-07-29 02:18:47,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=227248.0, ans=0.125 +2024-07-29 02:18:52,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=227261.33333333334, ans=0.125 +2024-07-29 02:18:52,823 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.09 vs. limit=22.5 +2024-07-29 02:18:58,428 INFO [train.py:1114] (1/4) Epoch 17, batch 6900, loss[loss=0.1401, simple_loss=0.2268, pruned_loss=0.02671, over 4967.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2678, pruned_loss=0.04298, over 942841.96 frames. ], batch size: 13, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:19:10,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=227288.0, ans=0.1 +2024-07-29 02:19:12,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=227301.33333333334, ans=0.2 +2024-07-29 02:19:19,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227314.66666666666, ans=0.1 +2024-07-29 02:19:20,935 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.39 vs. limit=10.0 +2024-07-29 02:19:24,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=227314.66666666666, ans=0.125 +2024-07-29 02:19:29,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=227328.0, ans=0.0 +2024-07-29 02:19:30,580 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 5.673e+01 6.337e+01 7.070e+01 9.910e+01, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 02:19:31,920 INFO [train.py:1114] (1/4) Epoch 17, batch 6950, loss[loss=0.1745, simple_loss=0.2661, pruned_loss=0.04149, over 4543.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2673, pruned_loss=0.04284, over 939720.67 frames. ], batch size: 10, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:19:43,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=227354.66666666666, ans=0.2 +2024-07-29 02:19:46,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227368.0, ans=0.1 +2024-07-29 02:19:59,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=227394.66666666666, ans=0.2 +2024-07-29 02:20:06,912 INFO [train.py:1114] (1/4) Epoch 17, batch 7000, loss[loss=0.1962, simple_loss=0.2811, pruned_loss=0.05564, over 4604.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2669, pruned_loss=0.04271, over 938387.48 frames. ], batch size: 17, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:20:15,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=227421.33333333334, ans=0.125 +2024-07-29 02:20:20,919 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=227434.66666666666, ans=0.0 +2024-07-29 02:20:21,857 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.03 vs. limit=15.0 +2024-07-29 02:20:24,050 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227434.66666666666, ans=0.1 +2024-07-29 02:20:26,472 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.60 vs. limit=15.0 +2024-07-29 02:20:38,598 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.627e+01 5.582e+01 6.064e+01 6.691e+01 1.096e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-29 02:20:39,942 INFO [train.py:1114] (1/4) Epoch 17, batch 7050, loss[loss=0.2007, simple_loss=0.277, pruned_loss=0.0622, over 4739.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2672, pruned_loss=0.04304, over 941632.21 frames. ], batch size: 19, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:20:41,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=227474.66666666666, ans=0.125 +2024-07-29 02:20:49,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=227488.0, ans=0.125 +2024-07-29 02:21:16,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=227528.0, ans=0.2 +2024-07-29 02:21:16,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=227528.0, ans=0.0 +2024-07-29 02:21:20,633 INFO [train.py:1114] (1/4) Epoch 17, batch 7100, loss[loss=0.1955, simple_loss=0.2817, pruned_loss=0.0547, over 4807.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.267, pruned_loss=0.04338, over 936255.87 frames. ], batch size: 15, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:21:21,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=227541.33333333334, ans=0.125 +2024-07-29 02:21:26,739 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.51 vs. limit=15.0 +2024-07-29 02:21:31,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227554.66666666666, ans=0.1 +2024-07-29 02:21:31,716 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.23 vs. limit=15.0 +2024-07-29 02:21:34,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=227568.0, ans=0.125 +2024-07-29 02:21:39,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=227568.0, ans=0.2 +2024-07-29 02:21:46,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=227581.33333333334, ans=0.2 +2024-07-29 02:21:52,998 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.433e+01 5.574e+01 6.289e+01 7.294e+01 1.340e+02, threshold=1.258e+02, percent-clipped=1.0 +2024-07-29 02:21:54,445 INFO [train.py:1114] (1/4) Epoch 17, batch 7150, loss[loss=0.1853, simple_loss=0.2737, pruned_loss=0.0484, over 4547.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2654, pruned_loss=0.04288, over 937443.83 frames. ], batch size: 21, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:22:18,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=227648.0, ans=0.2 +2024-07-29 02:22:27,907 INFO [train.py:1114] (1/4) Epoch 17, batch 7200, loss[loss=0.191, simple_loss=0.2816, pruned_loss=0.05017, over 4799.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2674, pruned_loss=0.04373, over 937611.09 frames. ], batch size: 15, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:22:30,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=227674.66666666666, ans=0.015 +2024-07-29 02:22:31,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=227674.66666666666, ans=0.125 +2024-07-29 02:22:50,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=227714.66666666666, ans=0.2 +2024-07-29 02:22:54,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=227728.0, ans=0.125 +2024-07-29 02:22:59,857 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.749e+01 5.624e+01 6.163e+01 6.917e+01 1.062e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 02:23:01,133 INFO [train.py:1114] (1/4) Epoch 17, batch 7250, loss[loss=0.1577, simple_loss=0.2507, pruned_loss=0.03233, over 4862.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2675, pruned_loss=0.04379, over 939368.53 frames. ], batch size: 12, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:23:01,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=227741.33333333334, ans=0.0 +2024-07-29 02:23:06,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227741.33333333334, ans=0.1 +2024-07-29 02:23:12,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227754.66666666666, ans=0.1 +2024-07-29 02:23:35,256 INFO [train.py:1114] (1/4) Epoch 17, batch 7300, loss[loss=0.147, simple_loss=0.2354, pruned_loss=0.02935, over 4846.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2668, pruned_loss=0.04327, over 939738.31 frames. ], batch size: 12, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:23:43,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=227821.33333333334, ans=0.125 +2024-07-29 02:23:44,308 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.58 vs. limit=15.0 +2024-07-29 02:23:46,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=227821.33333333334, ans=0.125 +2024-07-29 02:23:58,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=227848.0, ans=0.07 +2024-07-29 02:24:02,304 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.08 vs. limit=15.0 +2024-07-29 02:24:02,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=227861.33333333334, ans=0.0 +2024-07-29 02:24:02,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=227861.33333333334, ans=0.0 +2024-07-29 02:24:07,033 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.752e+01 5.670e+01 6.102e+01 6.863e+01 9.457e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 02:24:08,390 INFO [train.py:1114] (1/4) Epoch 17, batch 7350, loss[loss=0.1762, simple_loss=0.2655, pruned_loss=0.04342, over 4649.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2672, pruned_loss=0.0433, over 938883.96 frames. ], batch size: 12, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:24:09,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=227874.66666666666, ans=0.125 +2024-07-29 02:24:13,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=227874.66666666666, ans=0.125 +2024-07-29 02:24:14,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.47 vs. limit=15.0 +2024-07-29 02:24:27,981 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.95 vs. limit=15.0 +2024-07-29 02:24:28,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=227901.33333333334, ans=0.125 +2024-07-29 02:24:33,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=227901.33333333334, ans=0.125 +2024-07-29 02:24:35,634 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:24:46,992 INFO [train.py:1114] (1/4) Epoch 17, batch 7400, loss[loss=0.191, simple_loss=0.2728, pruned_loss=0.05463, over 4702.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2668, pruned_loss=0.04318, over 940078.09 frames. ], batch size: 13, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:24:56,131 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.33 vs. limit=15.0 +2024-07-29 02:25:04,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=227968.0, ans=0.2 +2024-07-29 02:25:05,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=227968.0, ans=0.2 +2024-07-29 02:25:18,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=227994.66666666666, ans=0.125 +2024-07-29 02:25:20,899 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.500e+01 5.692e+01 6.442e+01 7.535e+01 1.153e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 02:25:22,142 INFO [train.py:1114] (1/4) Epoch 17, batch 7450, loss[loss=0.136, simple_loss=0.2218, pruned_loss=0.02513, over 4606.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2657, pruned_loss=0.043, over 937530.27 frames. ], batch size: 11, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:25:31,352 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=228021.33333333334, ans=0.125 +2024-07-29 02:25:31,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=228021.33333333334, ans=0.1 +2024-07-29 02:25:34,033 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.18 vs. limit=15.0 +2024-07-29 02:25:36,031 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=228034.66666666666, ans=0.5 +2024-07-29 02:25:41,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=228048.0, ans=0.0 +2024-07-29 02:25:46,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=228048.0, ans=0.05 +2024-07-29 02:25:46,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=228048.0, ans=0.125 +2024-07-29 02:25:47,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=228048.0, ans=0.1 +2024-07-29 02:25:50,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=228061.33333333334, ans=0.09899494936611666 +2024-07-29 02:25:55,073 INFO [train.py:1114] (1/4) Epoch 17, batch 7500, loss[loss=0.2534, simple_loss=0.3215, pruned_loss=0.0926, over 3351.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2662, pruned_loss=0.04298, over 935694.36 frames. ], batch size: 35, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:25:59,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=228074.66666666666, ans=0.125 +2024-07-29 02:26:03,472 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.73 vs. limit=22.5 +2024-07-29 02:26:14,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228114.66666666666, ans=0.1 +2024-07-29 02:26:19,029 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.63 vs. limit=15.0 +2024-07-29 02:26:25,322 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=228128.0, ans=0.125 +2024-07-29 02:26:26,552 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.792e+01 5.692e+01 6.151e+01 7.079e+01 1.117e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 02:26:27,910 INFO [train.py:1114] (1/4) Epoch 17, batch 7550, loss[loss=0.1995, simple_loss=0.2919, pruned_loss=0.05356, over 4619.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2672, pruned_loss=0.04317, over 935466.28 frames. ], batch size: 17, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:26:28,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=228141.33333333334, ans=0.125 +2024-07-29 02:26:28,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=228141.33333333334, ans=0.0 +2024-07-29 02:26:30,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=228141.33333333334, ans=0.0 +2024-07-29 02:26:32,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=228141.33333333334, ans=0.0 +2024-07-29 02:26:41,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=228168.0, ans=0.125 +2024-07-29 02:26:45,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=228168.0, ans=0.0 +2024-07-29 02:26:47,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=228181.33333333334, ans=0.2 +2024-07-29 02:26:49,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=228181.33333333334, ans=0.125 +2024-07-29 02:27:00,381 INFO [train.py:1114] (1/4) Epoch 17, batch 7600, loss[loss=0.1839, simple_loss=0.2842, pruned_loss=0.04176, over 4814.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2671, pruned_loss=0.04287, over 937167.73 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:27:01,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=228208.0, ans=0.0 +2024-07-29 02:27:07,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=228221.33333333334, ans=0.025 +2024-07-29 02:27:21,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=228248.0, ans=0.025 +2024-07-29 02:27:29,631 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=228261.33333333334, ans=0.125 +2024-07-29 02:27:32,118 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.867e+01 5.577e+01 6.101e+01 6.985e+01 1.081e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 02:27:33,473 INFO [train.py:1114] (1/4) Epoch 17, batch 7650, loss[loss=0.1588, simple_loss=0.2583, pruned_loss=0.02967, over 4928.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2658, pruned_loss=0.04206, over 936469.35 frames. ], batch size: 12, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:28:06,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228341.33333333334, ans=0.1 +2024-07-29 02:28:06,775 INFO [train.py:1114] (1/4) Epoch 17, batch 7700, loss[loss=0.1583, simple_loss=0.248, pruned_loss=0.03425, over 4686.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2656, pruned_loss=0.04253, over 934112.48 frames. ], batch size: 13, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:28:08,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=228341.33333333334, ans=0.125 +2024-07-29 02:28:09,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228341.33333333334, ans=0.1 +2024-07-29 02:28:11,043 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.95 vs. limit=6.0 +2024-07-29 02:28:15,244 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.30 vs. limit=15.0 +2024-07-29 02:28:19,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=228368.0, ans=15.0 +2024-07-29 02:28:28,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228381.33333333334, ans=0.1 +2024-07-29 02:28:29,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=228381.33333333334, ans=0.125 +2024-07-29 02:28:36,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=228394.66666666666, ans=0.125 +2024-07-29 02:28:38,551 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.830e+01 5.778e+01 6.221e+01 6.817e+01 1.028e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-29 02:28:39,812 INFO [train.py:1114] (1/4) Epoch 17, batch 7750, loss[loss=0.1775, simple_loss=0.2812, pruned_loss=0.03688, over 4927.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2668, pruned_loss=0.04273, over 935462.55 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:28:47,977 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-07-29 02:28:49,934 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.33 vs. limit=15.0 +2024-07-29 02:28:56,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=228434.66666666666, ans=0.07 +2024-07-29 02:29:02,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=228448.0, ans=0.95 +2024-07-29 02:29:06,561 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=228461.33333333334, ans=0.125 +2024-07-29 02:29:13,584 INFO [train.py:1114] (1/4) Epoch 17, batch 7800, loss[loss=0.1636, simple_loss=0.2644, pruned_loss=0.03142, over 4670.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2666, pruned_loss=0.04265, over 937034.12 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:29:13,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=228474.66666666666, ans=0.2 +2024-07-29 02:29:20,287 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.38 vs. limit=12.0 +2024-07-29 02:29:26,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=228501.33333333334, ans=0.04949747468305833 +2024-07-29 02:29:45,761 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.634e+01 5.601e+01 6.061e+01 6.909e+01 9.922e+01, threshold=1.212e+02, percent-clipped=0.0 +2024-07-29 02:29:45,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=228528.0, ans=0.025 +2024-07-29 02:29:47,130 INFO [train.py:1114] (1/4) Epoch 17, batch 7850, loss[loss=0.1496, simple_loss=0.2284, pruned_loss=0.0354, over 4521.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2674, pruned_loss=0.04311, over 935731.50 frames. ], batch size: 10, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:29:49,526 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.53 vs. limit=22.5 +2024-07-29 02:29:51,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=228541.33333333334, ans=0.0 +2024-07-29 02:29:52,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=228541.33333333334, ans=0.0 +2024-07-29 02:30:00,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=228568.0, ans=0.125 +2024-07-29 02:30:05,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=228568.0, ans=22.5 +2024-07-29 02:30:07,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=228581.33333333334, ans=0.125 +2024-07-29 02:30:12,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=228581.33333333334, ans=0.05 +2024-07-29 02:30:20,613 INFO [train.py:1114] (1/4) Epoch 17, batch 7900, loss[loss=0.1713, simple_loss=0.2679, pruned_loss=0.03736, over 4869.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2679, pruned_loss=0.04321, over 932909.20 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:30:23,206 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=228608.0, ans=0.05 +2024-07-29 02:30:33,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=228634.66666666666, ans=0.0 +2024-07-29 02:30:48,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=228661.33333333334, ans=0.0 +2024-07-29 02:30:51,949 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.771e+01 6.375e+01 7.176e+01 1.150e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-29 02:30:53,269 INFO [train.py:1114] (1/4) Epoch 17, batch 7950, loss[loss=0.2109, simple_loss=0.2903, pruned_loss=0.06576, over 3368.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2667, pruned_loss=0.0423, over 935171.25 frames. ], batch size: 36, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:30:59,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=228688.0, ans=0.125 +2024-07-29 02:31:03,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=228688.0, ans=0.125 +2024-07-29 02:31:19,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=228701.33333333334, ans=0.0 +2024-07-29 02:31:19,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=228701.33333333334, ans=0.125 +2024-07-29 02:31:26,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=228714.66666666666, ans=0.125 +2024-07-29 02:31:28,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=228714.66666666666, ans=0.125 +2024-07-29 02:31:43,742 INFO [train.py:1114] (1/4) Epoch 17, batch 8000, loss[loss=0.1674, simple_loss=0.2525, pruned_loss=0.0412, over 4615.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2654, pruned_loss=0.04237, over 934672.21 frames. ], batch size: 11, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:31:52,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=228754.66666666666, ans=0.125 +2024-07-29 02:31:56,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=228768.0, ans=0.0 +2024-07-29 02:32:00,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=228768.0, ans=0.0 +2024-07-29 02:32:17,772 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.670e+01 5.673e+01 6.449e+01 7.589e+01 1.080e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 02:32:19,117 INFO [train.py:1114] (1/4) Epoch 17, batch 8050, loss[loss=0.1437, simple_loss=0.2364, pruned_loss=0.02548, over 4818.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2665, pruned_loss=0.04233, over 934675.24 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:32:19,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=228808.0, ans=0.2 +2024-07-29 02:32:45,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228848.0, ans=0.1 +2024-07-29 02:32:46,952 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.19 vs. limit=15.0 +2024-07-29 02:32:53,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=228861.33333333334, ans=0.125 +2024-07-29 02:32:54,992 INFO [train.py:1114] (1/4) Epoch 17, batch 8100, loss[loss=0.1791, simple_loss=0.2746, pruned_loss=0.04179, over 4794.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2685, pruned_loss=0.04327, over 934741.28 frames. ], batch size: 15, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:33:13,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=228901.33333333334, ans=0.125 +2024-07-29 02:33:15,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=228914.66666666666, ans=0.0 +2024-07-29 02:33:19,912 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.85 vs. limit=22.5 +2024-07-29 02:33:24,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=228928.0, ans=0.125 +2024-07-29 02:33:28,141 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.710e+01 5.750e+01 6.401e+01 7.734e+01 1.146e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-29 02:33:29,254 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.23 vs. limit=6.0 +2024-07-29 02:33:29,427 INFO [train.py:1114] (1/4) Epoch 17, batch 8150, loss[loss=0.1481, simple_loss=0.241, pruned_loss=0.02761, over 4804.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2671, pruned_loss=0.04313, over 938107.86 frames. ], batch size: 15, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:33:36,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=228954.66666666666, ans=0.125 +2024-07-29 02:33:52,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=228981.33333333334, ans=0.125 +2024-07-29 02:34:02,001 INFO [train.py:1114] (1/4) Epoch 17, batch 8200, loss[loss=0.2065, simple_loss=0.29, pruned_loss=0.06149, over 4808.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2665, pruned_loss=0.04276, over 938763.17 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:34:04,880 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.92 vs. limit=15.0 +2024-07-29 02:34:07,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=229008.0, ans=0.0 +2024-07-29 02:34:08,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=229021.33333333334, ans=0.125 +2024-07-29 02:34:10,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=229021.33333333334, ans=0.2 +2024-07-29 02:34:11,593 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.87 vs. limit=15.0 +2024-07-29 02:34:19,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229034.66666666666, ans=0.1 +2024-07-29 02:34:20,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=229034.66666666666, ans=0.125 +2024-07-29 02:34:22,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=229048.0, ans=0.2 +2024-07-29 02:34:34,883 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.838e+01 5.522e+01 6.074e+01 7.199e+01 1.525e+02, threshold=1.215e+02, percent-clipped=1.0 +2024-07-29 02:34:36,181 INFO [train.py:1114] (1/4) Epoch 17, batch 8250, loss[loss=0.1516, simple_loss=0.2439, pruned_loss=0.02966, over 4895.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2669, pruned_loss=0.04278, over 939075.66 frames. ], batch size: 13, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:34:39,368 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.05 vs. limit=15.0 +2024-07-29 02:34:56,024 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.79 vs. limit=15.0 +2024-07-29 02:34:59,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=229114.66666666666, ans=0.125 +2024-07-29 02:35:03,833 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.33 vs. limit=15.0 +2024-07-29 02:35:06,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=229128.0, ans=0.05 +2024-07-29 02:35:10,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=229128.0, ans=0.125 +2024-07-29 02:35:11,239 INFO [train.py:1114] (1/4) Epoch 17, batch 8300, loss[loss=0.1984, simple_loss=0.2795, pruned_loss=0.05862, over 4902.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2684, pruned_loss=0.04366, over 938789.35 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:35:12,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=229141.33333333334, ans=0.2 +2024-07-29 02:35:21,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=229154.66666666666, ans=0.0 +2024-07-29 02:35:21,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=229154.66666666666, ans=0.2 +2024-07-29 02:35:26,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=229168.0, ans=22.5 +2024-07-29 02:35:36,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=229194.66666666666, ans=22.5 +2024-07-29 02:35:44,146 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.879e+01 5.675e+01 6.316e+01 6.956e+01 1.152e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 02:35:45,458 INFO [train.py:1114] (1/4) Epoch 17, batch 8350, loss[loss=0.1825, simple_loss=0.278, pruned_loss=0.04351, over 4803.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2671, pruned_loss=0.04301, over 941724.27 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:35:55,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=229221.33333333334, ans=0.025 +2024-07-29 02:36:01,240 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.06 vs. limit=10.0 +2024-07-29 02:36:09,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=229248.0, ans=0.0 +2024-07-29 02:36:17,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=229261.33333333334, ans=0.0 +2024-07-29 02:36:22,688 INFO [train.py:1114] (1/4) Epoch 17, batch 8400, loss[loss=0.1571, simple_loss=0.2381, pruned_loss=0.03804, over 4779.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2665, pruned_loss=0.04293, over 940242.33 frames. ], batch size: 12, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:36:26,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=229274.66666666666, ans=0.125 +2024-07-29 02:36:33,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=229288.0, ans=0.0 +2024-07-29 02:36:35,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=229288.0, ans=0.125 +2024-07-29 02:36:37,259 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.29 vs. limit=22.5 +2024-07-29 02:36:40,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=229301.33333333334, ans=0.125 +2024-07-29 02:36:56,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=229328.0, ans=0.2 +2024-07-29 02:36:57,642 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.781e+01 6.432e+01 7.454e+01 1.243e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-29 02:36:58,928 INFO [train.py:1114] (1/4) Epoch 17, batch 8450, loss[loss=0.1732, simple_loss=0.2595, pruned_loss=0.04345, over 4789.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2675, pruned_loss=0.0431, over 938909.99 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:37:04,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=229341.33333333334, ans=0.125 +2024-07-29 02:37:05,664 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.55 vs. limit=15.0 +2024-07-29 02:37:16,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=229368.0, ans=0.0 +2024-07-29 02:37:18,177 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:37:18,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229381.33333333334, ans=0.1 +2024-07-29 02:37:21,375 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=229381.33333333334, ans=0.0 +2024-07-29 02:37:30,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=229408.0, ans=0.0 +2024-07-29 02:37:31,064 INFO [train.py:1114] (1/4) Epoch 17, batch 8500, loss[loss=0.1581, simple_loss=0.2454, pruned_loss=0.03536, over 4611.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2665, pruned_loss=0.04258, over 938968.36 frames. ], batch size: 11, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:37:36,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=229408.0, ans=0.125 +2024-07-29 02:37:41,283 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.79 vs. limit=15.0 +2024-07-29 02:37:49,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=229434.66666666666, ans=0.0 +2024-07-29 02:37:51,159 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.95 vs. limit=15.0 +2024-07-29 02:37:59,572 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.12 vs. limit=10.0 +2024-07-29 02:38:04,985 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.408e+01 5.595e+01 6.449e+01 7.243e+01 1.266e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 02:38:06,371 INFO [train.py:1114] (1/4) Epoch 17, batch 8550, loss[loss=0.1453, simple_loss=0.2305, pruned_loss=0.03004, over 4816.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2654, pruned_loss=0.04226, over 940033.74 frames. ], batch size: 11, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:38:07,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=229474.66666666666, ans=0.04949747468305833 +2024-07-29 02:38:07,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=229474.66666666666, ans=0.2 +2024-07-29 02:38:07,972 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.45 vs. limit=15.0 +2024-07-29 02:38:10,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=229474.66666666666, ans=0.0 +2024-07-29 02:38:11,153 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.84 vs. limit=15.0 +2024-07-29 02:38:17,936 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.97 vs. limit=15.0 +2024-07-29 02:38:22,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=229501.33333333334, ans=0.125 +2024-07-29 02:38:27,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=229514.66666666666, ans=0.0 +2024-07-29 02:38:28,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=229514.66666666666, ans=0.125 +2024-07-29 02:38:39,389 INFO [train.py:1114] (1/4) Epoch 17, batch 8600, loss[loss=0.1833, simple_loss=0.2937, pruned_loss=0.03649, over 4797.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2649, pruned_loss=0.04195, over 938929.76 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:38:57,120 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.75 vs. limit=15.0 +2024-07-29 02:38:59,672 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.26 vs. limit=12.0 +2024-07-29 02:39:00,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=229581.33333333334, ans=0.125 +2024-07-29 02:39:05,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229594.66666666666, ans=0.1 +2024-07-29 02:39:05,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=229594.66666666666, ans=0.5 +2024-07-29 02:39:09,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=229594.66666666666, ans=0.5 +2024-07-29 02:39:13,132 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.480e+01 5.824e+01 6.675e+01 7.491e+01 1.199e+02, threshold=1.335e+02, percent-clipped=0.0 +2024-07-29 02:39:13,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=229594.66666666666, ans=0.0 +2024-07-29 02:39:14,437 INFO [train.py:1114] (1/4) Epoch 17, batch 8650, loss[loss=0.203, simple_loss=0.2977, pruned_loss=0.05419, over 4881.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2654, pruned_loss=0.04264, over 940082.15 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:39:19,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=229608.0, ans=0.125 +2024-07-29 02:39:24,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229621.33333333334, ans=0.1 +2024-07-29 02:39:31,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=229634.66666666666, ans=0.2 +2024-07-29 02:39:32,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=229634.66666666666, ans=0.2 +2024-07-29 02:39:34,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=229648.0, ans=0.0 +2024-07-29 02:39:37,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=229648.0, ans=0.015 +2024-07-29 02:39:43,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=229661.33333333334, ans=0.125 +2024-07-29 02:39:46,642 INFO [train.py:1114] (1/4) Epoch 17, batch 8700, loss[loss=0.1619, simple_loss=0.2547, pruned_loss=0.03452, over 4760.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.266, pruned_loss=0.04252, over 937830.85 frames. ], batch size: 13, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:40:00,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229701.33333333334, ans=0.1 +2024-07-29 02:40:00,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=229701.33333333334, ans=0.2 +2024-07-29 02:40:19,048 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.617e+01 6.057e+01 6.881e+01 1.135e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 02:40:20,339 INFO [train.py:1114] (1/4) Epoch 17, batch 8750, loss[loss=0.2143, simple_loss=0.3044, pruned_loss=0.06214, over 4694.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2659, pruned_loss=0.04239, over 936341.12 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:40:32,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=229754.66666666666, ans=0.2 +2024-07-29 02:40:38,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229768.0, ans=0.1 +2024-07-29 02:40:41,008 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.47 vs. limit=12.0 +2024-07-29 02:40:45,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=229781.33333333334, ans=0.0 +2024-07-29 02:40:48,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=229781.33333333334, ans=0.125 +2024-07-29 02:40:49,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229794.66666666666, ans=0.1 +2024-07-29 02:40:50,368 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=229794.66666666666, ans=0.0 +2024-07-29 02:40:56,103 INFO [train.py:1114] (1/4) Epoch 17, batch 8800, loss[loss=0.1667, simple_loss=0.2599, pruned_loss=0.03677, over 4929.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2665, pruned_loss=0.04238, over 937137.70 frames. ], batch size: 14, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:41:01,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=229808.0, ans=0.0 +2024-07-29 02:41:02,225 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.76 vs. limit=15.0 +2024-07-29 02:41:14,955 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.45 vs. limit=22.5 +2024-07-29 02:41:19,380 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.92 vs. limit=15.0 +2024-07-29 02:41:21,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=229848.0, ans=0.125 +2024-07-29 02:41:28,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=229861.33333333334, ans=0.2 +2024-07-29 02:41:28,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229861.33333333334, ans=0.1 +2024-07-29 02:41:28,619 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.643e+01 5.657e+01 6.109e+01 6.683e+01 1.097e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-29 02:41:29,297 INFO [train.py:1114] (1/4) Epoch 17, batch 8850, loss[loss=0.1917, simple_loss=0.2862, pruned_loss=0.0486, over 4636.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2663, pruned_loss=0.04273, over 931791.67 frames. ], batch size: 21, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:41:35,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229874.66666666666, ans=0.1 +2024-07-29 02:41:44,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=229888.0, ans=0.5 +2024-07-29 02:42:01,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=229928.0, ans=0.125 +2024-07-29 02:42:07,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=229941.33333333334, ans=0.2 +2024-07-29 02:42:08,015 INFO [train.py:1114] (1/4) Epoch 17, batch 8900, loss[loss=0.1657, simple_loss=0.2561, pruned_loss=0.0377, over 4944.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.267, pruned_loss=0.04329, over 929975.69 frames. ], batch size: 12, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:42:31,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=229981.33333333334, ans=0.125 +2024-07-29 02:42:32,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=229981.33333333334, ans=10.0 +2024-07-29 02:42:36,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=229994.66666666666, ans=0.125 +2024-07-29 02:42:42,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229994.66666666666, ans=0.1 +2024-07-29 02:42:43,285 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.920e+01 5.712e+01 6.272e+01 7.147e+01 1.085e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 02:42:43,948 INFO [train.py:1114] (1/4) Epoch 17, batch 8950, loss[loss=0.2093, simple_loss=0.3071, pruned_loss=0.05573, over 4542.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2674, pruned_loss=0.04341, over 930887.56 frames. ], batch size: 21, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:42:46,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=230008.0, ans=0.2 +2024-07-29 02:42:49,031 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:42:51,269 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.53 vs. limit=10.0 +2024-07-29 02:43:00,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230021.33333333334, ans=0.1 +2024-07-29 02:43:01,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=230021.33333333334, ans=0.125 +2024-07-29 02:43:05,551 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.11 vs. limit=22.5 +2024-07-29 02:43:26,312 INFO [train.py:1114] (1/4) Epoch 17, batch 9000, loss[loss=0.1535, simple_loss=0.2439, pruned_loss=0.03156, over 4640.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2659, pruned_loss=0.04274, over 934093.16 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:43:26,313 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 02:43:35,537 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.8576, 4.5580, 4.0807, 4.6189], device='cuda:1') +2024-07-29 02:43:37,870 INFO [train.py:1146] (1/4) Epoch 17, validation: loss=0.1619, simple_loss=0.2644, pruned_loss=0.02967, over 944034.00 frames. +2024-07-29 02:43:37,870 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 02:43:45,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=230088.0, ans=0.0 +2024-07-29 02:43:50,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=230088.0, ans=0.0 +2024-07-29 02:43:52,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=230101.33333333334, ans=0.125 +2024-07-29 02:43:54,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=230101.33333333334, ans=0.025 +2024-07-29 02:43:59,848 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=230114.66666666666, ans=0.0 +2024-07-29 02:44:05,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=230128.0, ans=0.0 +2024-07-29 02:44:12,196 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.638e+01 5.566e+01 6.347e+01 7.363e+01 1.043e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-29 02:44:12,229 INFO [train.py:1114] (1/4) Epoch 17, batch 9050, loss[loss=0.1631, simple_loss=0.2415, pruned_loss=0.04231, over 4526.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2654, pruned_loss=0.04268, over 934684.57 frames. ], batch size: 10, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:44:17,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.06 vs. limit=22.5 +2024-07-29 02:44:20,862 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=230154.66666666666, ans=0.1 +2024-07-29 02:44:23,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=230154.66666666666, ans=0.0 +2024-07-29 02:44:25,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=230168.0, ans=0.125 +2024-07-29 02:44:44,837 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.07 vs. limit=15.0 +2024-07-29 02:44:46,186 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.47 vs. limit=15.0 +2024-07-29 02:44:46,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=230194.66666666666, ans=0.125 +2024-07-29 02:44:47,571 INFO [train.py:1114] (1/4) Epoch 17, batch 9100, loss[loss=0.14, simple_loss=0.2378, pruned_loss=0.02105, over 4930.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2647, pruned_loss=0.04198, over 936904.44 frames. ], batch size: 14, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:44:48,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=230208.0, ans=0.125 +2024-07-29 02:44:52,712 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=230208.0, ans=0.2 +2024-07-29 02:44:54,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=230221.33333333334, ans=0.125 +2024-07-29 02:45:00,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=230234.66666666666, ans=0.0 +2024-07-29 02:45:09,533 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=230248.0, ans=10.0 +2024-07-29 02:45:17,128 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.16 vs. limit=6.0 +2024-07-29 02:45:18,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=230261.33333333334, ans=0.125 +2024-07-29 02:45:18,613 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=230261.33333333334, ans=0.025 +2024-07-29 02:45:19,825 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.594e+01 5.656e+01 6.287e+01 6.947e+01 9.623e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-29 02:45:19,858 INFO [train.py:1114] (1/4) Epoch 17, batch 9150, loss[loss=0.1687, simple_loss=0.2693, pruned_loss=0.0341, over 4804.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2657, pruned_loss=0.04227, over 935722.25 frames. ], batch size: 14, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:45:19,997 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=230274.66666666666, ans=0.09899494936611666 +2024-07-29 02:45:35,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.39 vs. limit=22.5 +2024-07-29 02:45:40,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230314.66666666666, ans=0.1 +2024-07-29 02:45:52,561 INFO [train.py:1114] (1/4) Epoch 17, batch 9200, loss[loss=0.1722, simple_loss=0.258, pruned_loss=0.04318, over 4848.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2652, pruned_loss=0.0424, over 937672.86 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:45:54,928 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.31 vs. limit=15.0 +2024-07-29 02:46:02,826 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.39 vs. limit=15.0 +2024-07-29 02:46:09,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=230368.0, ans=0.125 +2024-07-29 02:46:09,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=230368.0, ans=0.0 +2024-07-29 02:46:13,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230381.33333333334, ans=0.1 +2024-07-29 02:46:24,553 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.375e+01 5.574e+01 6.025e+01 6.747e+01 8.782e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-29 02:46:24,589 INFO [train.py:1114] (1/4) Epoch 17, batch 9250, loss[loss=0.1846, simple_loss=0.2827, pruned_loss=0.04325, over 4635.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2653, pruned_loss=0.04205, over 938358.62 frames. ], batch size: 13, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:46:39,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=230434.66666666666, ans=0.125 +2024-07-29 02:46:39,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=230434.66666666666, ans=0.125 +2024-07-29 02:46:40,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230434.66666666666, ans=0.1 +2024-07-29 02:46:40,869 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.23 vs. limit=15.0 +2024-07-29 02:46:45,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.72 vs. limit=12.0 +2024-07-29 02:46:56,232 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.16 vs. limit=15.0 +2024-07-29 02:46:56,560 INFO [train.py:1114] (1/4) Epoch 17, batch 9300, loss[loss=0.1678, simple_loss=0.2663, pruned_loss=0.03459, over 4776.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.265, pruned_loss=0.042, over 938702.14 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:47:03,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=230488.0, ans=0.125 +2024-07-29 02:47:06,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=230488.0, ans=0.025 +2024-07-29 02:47:08,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230488.0, ans=0.1 +2024-07-29 02:47:17,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=230514.66666666666, ans=0.2 +2024-07-29 02:47:20,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=230514.66666666666, ans=0.0 +2024-07-29 02:47:28,549 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 5.719e+01 6.284e+01 7.337e+01 9.845e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-29 02:47:28,582 INFO [train.py:1114] (1/4) Epoch 17, batch 9350, loss[loss=0.1479, simple_loss=0.2351, pruned_loss=0.03032, over 4788.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2651, pruned_loss=0.042, over 935449.06 frames. ], batch size: 11, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:47:29,845 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=230541.33333333334, ans=0.125 +2024-07-29 02:47:48,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=230581.33333333334, ans=0.125 +2024-07-29 02:47:50,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=230581.33333333334, ans=0.1 +2024-07-29 02:47:52,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=230581.33333333334, ans=0.0 +2024-07-29 02:47:54,095 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=230594.66666666666, ans=0.2 +2024-07-29 02:48:00,624 INFO [train.py:1114] (1/4) Epoch 17, batch 9400, loss[loss=0.1846, simple_loss=0.2844, pruned_loss=0.04242, over 4688.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2657, pruned_loss=0.04241, over 932982.31 frames. ], batch size: 13, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:48:00,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=230608.0, ans=0.0 +2024-07-29 02:48:02,954 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.12 vs. limit=15.0 +2024-07-29 02:48:07,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=230608.0, ans=0.0 +2024-07-29 02:48:13,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=230621.33333333334, ans=0.025 +2024-07-29 02:48:13,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=230621.33333333334, ans=0.2 +2024-07-29 02:48:19,658 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.92 vs. limit=6.0 +2024-07-29 02:48:22,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230648.0, ans=0.1 +2024-07-29 02:48:24,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=230648.0, ans=0.0 +2024-07-29 02:48:24,774 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=230648.0, ans=0.0 +2024-07-29 02:48:34,668 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.369e+01 5.567e+01 6.049e+01 6.960e+01 9.210e+01, threshold=1.210e+02, percent-clipped=0.0 +2024-07-29 02:48:34,701 INFO [train.py:1114] (1/4) Epoch 17, batch 9450, loss[loss=0.1723, simple_loss=0.2641, pruned_loss=0.04025, over 4809.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2665, pruned_loss=0.04264, over 932149.89 frames. ], batch size: 11, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:48:38,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=230674.66666666666, ans=0.025 +2024-07-29 02:48:44,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230688.0, ans=0.1 +2024-07-29 02:48:49,600 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:48:57,381 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.73 vs. limit=10.0 +2024-07-29 02:49:04,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=230728.0, ans=0.125 +2024-07-29 02:49:06,174 INFO [train.py:1114] (1/4) Epoch 17, batch 9500, loss[loss=0.1626, simple_loss=0.2507, pruned_loss=0.03731, over 4700.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2665, pruned_loss=0.0425, over 934764.38 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:49:11,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=230754.66666666666, ans=0.125 +2024-07-29 02:49:15,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=230754.66666666666, ans=0.0 +2024-07-29 02:49:21,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=230768.0, ans=0.0 +2024-07-29 02:49:24,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=230768.0, ans=0.125 +2024-07-29 02:49:24,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=230781.33333333334, ans=0.125 +2024-07-29 02:49:27,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=230781.33333333334, ans=0.125 +2024-07-29 02:49:27,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=230781.33333333334, ans=0.0 +2024-07-29 02:49:27,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=230781.33333333334, ans=0.0 +2024-07-29 02:49:30,024 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.54 vs. limit=15.0 +2024-07-29 02:49:37,900 INFO [train.py:1114] (1/4) Epoch 17, batch 9550, loss[loss=0.1486, simple_loss=0.2366, pruned_loss=0.03028, over 4779.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2663, pruned_loss=0.04237, over 932063.30 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:49:39,096 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 5.645e+01 6.246e+01 7.009e+01 1.042e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 02:49:40,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=230808.0, ans=0.0 +2024-07-29 02:49:44,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=230821.33333333334, ans=0.125 +2024-07-29 02:49:48,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=230821.33333333334, ans=0.125 +2024-07-29 02:49:55,831 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=230834.66666666666, ans=0.125 +2024-07-29 02:49:56,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=230848.0, ans=0.0 +2024-07-29 02:49:59,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230848.0, ans=0.1 +2024-07-29 02:50:01,188 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.13 vs. limit=22.5 +2024-07-29 02:50:09,770 INFO [train.py:1114] (1/4) Epoch 17, batch 9600, loss[loss=0.2372, simple_loss=0.3131, pruned_loss=0.08063, over 3424.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2664, pruned_loss=0.04232, over 931075.98 frames. ], batch size: 35, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:50:09,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=230874.66666666666, ans=0.2 +2024-07-29 02:50:12,796 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=230874.66666666666, ans=0.125 +2024-07-29 02:50:24,011 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.34 vs. limit=15.0 +2024-07-29 02:50:36,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=230914.66666666666, ans=0.125 +2024-07-29 02:50:44,358 INFO [train.py:1114] (1/4) Epoch 17, batch 9650, loss[loss=0.1926, simple_loss=0.2843, pruned_loss=0.05051, over 4857.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2676, pruned_loss=0.04301, over 926779.40 frames. ], batch size: 16, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:50:44,985 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.884e+01 6.433e+01 7.222e+01 1.107e+02, threshold=1.287e+02, percent-clipped=0.0 +2024-07-29 02:50:45,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=230941.33333333334, ans=0.0 +2024-07-29 02:50:47,325 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.39 vs. limit=10.0 +2024-07-29 02:50:55,396 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.06 vs. limit=15.0 +2024-07-29 02:50:56,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.56 vs. limit=22.5 +2024-07-29 02:50:57,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=230968.0, ans=0.125 +2024-07-29 02:51:16,185 INFO [train.py:1114] (1/4) Epoch 17, batch 9700, loss[loss=0.1706, simple_loss=0.2624, pruned_loss=0.03939, over 4212.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2671, pruned_loss=0.04301, over 924674.44 frames. ], batch size: 25, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:51:22,868 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.63 vs. limit=15.0 +2024-07-29 02:51:29,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=231034.66666666666, ans=0.125 +2024-07-29 02:51:29,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=231034.66666666666, ans=0.125 +2024-07-29 02:51:47,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231074.66666666666, ans=0.1 +2024-07-29 02:51:47,648 INFO [train.py:1114] (1/4) Epoch 17, batch 9750, loss[loss=0.1965, simple_loss=0.2812, pruned_loss=0.0559, over 4666.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2667, pruned_loss=0.04295, over 924686.61 frames. ], batch size: 15, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:51:48,239 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.114e+01 5.556e+01 6.243e+01 6.911e+01 1.115e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 02:51:49,283 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.70 vs. limit=6.0 +2024-07-29 02:51:49,849 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.66 vs. limit=15.0 +2024-07-29 02:51:50,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=231074.66666666666, ans=0.125 +2024-07-29 02:52:03,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=231101.33333333334, ans=0.0 +2024-07-29 02:52:07,658 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.57 vs. limit=12.0 +2024-07-29 02:52:15,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=231128.0, ans=0.125 +2024-07-29 02:52:15,203 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.65 vs. limit=10.0 +2024-07-29 02:52:19,056 INFO [train.py:1114] (1/4) Epoch 17, batch 9800, loss[loss=0.1545, simple_loss=0.2394, pruned_loss=0.03478, over 4710.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2655, pruned_loss=0.04265, over 924615.55 frames. ], batch size: 12, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:52:21,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=231141.33333333334, ans=0.125 +2024-07-29 02:52:29,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=231154.66666666666, ans=0.125 +2024-07-29 02:52:30,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231154.66666666666, ans=0.1 +2024-07-29 02:52:32,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=231168.0, ans=0.2 +2024-07-29 02:52:38,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=231181.33333333334, ans=0.125 +2024-07-29 02:52:39,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=231181.33333333334, ans=0.025 +2024-07-29 02:52:43,420 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.16 vs. limit=6.0 +2024-07-29 02:52:44,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=231194.66666666666, ans=0.125 +2024-07-29 02:52:50,077 INFO [train.py:1114] (1/4) Epoch 17, batch 9850, loss[loss=0.1822, simple_loss=0.2728, pruned_loss=0.04585, over 4907.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2647, pruned_loss=0.04251, over 927238.87 frames. ], batch size: 15, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:52:50,654 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.758e+01 6.441e+01 7.212e+01 9.230e+01, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 02:52:54,177 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.11 vs. limit=15.0 +2024-07-29 02:52:54,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=231208.0, ans=0.0 +2024-07-29 02:52:55,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=231221.33333333334, ans=0.125 +2024-07-29 02:52:57,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=231221.33333333334, ans=0.0 +2024-07-29 02:52:59,104 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.41 vs. limit=22.5 +2024-07-29 02:53:22,317 INFO [train.py:1114] (1/4) Epoch 17, batch 9900, loss[loss=0.198, simple_loss=0.2884, pruned_loss=0.05384, over 4828.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.266, pruned_loss=0.04322, over 926932.57 frames. ], batch size: 16, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:53:24,520 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-07-29 02:53:28,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=231288.0, ans=0.125 +2024-07-29 02:53:39,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=231301.33333333334, ans=0.125 +2024-07-29 02:53:43,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=231314.66666666666, ans=0.0 +2024-07-29 02:53:53,561 INFO [train.py:1114] (1/4) Epoch 17, batch 9950, loss[loss=0.1174, simple_loss=0.1958, pruned_loss=0.01954, over 4798.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2672, pruned_loss=0.0442, over 929471.55 frames. ], batch size: 11, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:53:54,160 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.766e+01 6.356e+01 7.245e+01 1.147e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-29 02:53:59,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=231354.66666666666, ans=0.2 +2024-07-29 02:54:09,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231368.0, ans=0.1 +2024-07-29 02:54:24,959 INFO [train.py:1114] (1/4) Epoch 17, batch 10000, loss[loss=0.1887, simple_loss=0.2797, pruned_loss=0.04885, over 4642.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2711, pruned_loss=0.04534, over 927114.89 frames. ], batch size: 16, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:54:26,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=231408.0, ans=0.0 +2024-07-29 02:54:27,089 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.39 vs. limit=15.0 +2024-07-29 02:54:33,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=231421.33333333334, ans=0.125 +2024-07-29 02:54:34,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.29 vs. limit=15.0 +2024-07-29 02:54:46,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=231448.0, ans=0.125 +2024-07-29 02:55:00,062 INFO [train.py:1114] (1/4) Epoch 17, batch 10050, loss[loss=0.2013, simple_loss=0.2839, pruned_loss=0.05942, over 3544.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2743, pruned_loss=0.04702, over 916668.24 frames. ], batch size: 35, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:55:00,783 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.986e+01 5.675e+01 6.187e+01 6.969e+01 9.766e+01, threshold=1.237e+02, percent-clipped=0.0 +2024-07-29 02:55:04,369 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:55:05,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=231474.66666666666, ans=0.125 +2024-07-29 02:55:09,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=231488.0, ans=0.0 +2024-07-29 02:55:23,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231514.66666666666, ans=0.1 +2024-07-29 02:55:25,909 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231514.66666666666, ans=0.1 +2024-07-29 02:55:27,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231528.0, ans=0.1 +2024-07-29 02:55:27,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=231528.0, ans=0.025 +2024-07-29 02:55:29,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=231528.0, ans=0.0 +2024-07-29 02:55:35,507 INFO [train.py:1114] (1/4) Epoch 17, batch 10100, loss[loss=0.1881, simple_loss=0.2674, pruned_loss=0.05436, over 3311.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2778, pruned_loss=0.05092, over 864304.73 frames. ], batch size: 35, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:55:41,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=231541.33333333334, ans=0.0 +2024-07-29 02:55:42,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=231541.33333333334, ans=0.0 +2024-07-29 02:55:50,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=231554.66666666666, ans=0.125 +2024-07-29 02:55:50,599 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.90 vs. limit=10.0 +2024-07-29 02:55:52,403 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=231554.66666666666, ans=0.2 +2024-07-29 02:55:52,405 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:55:54,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=231568.0, ans=0.025 +2024-07-29 02:56:09,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=231594.66666666666, ans=0.125 +2024-07-29 02:56:12,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=231594.66666666666, ans=0.125 +2024-07-29 02:56:13,982 INFO [train.py:1114] (1/4) Epoch 17, batch 10150, loss[loss=0.217, simple_loss=0.3117, pruned_loss=0.06116, over 3301.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2814, pruned_loss=0.05463, over 821388.75 frames. ], batch size: 35, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:56:14,617 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.873e+01 6.975e+01 7.380e+01 8.032e+01 1.303e+02, threshold=1.476e+02, percent-clipped=1.0 +2024-07-29 02:56:16,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=231608.0, ans=0.2 +2024-07-29 02:56:32,183 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.61 vs. limit=22.5 +2024-07-29 02:56:36,584 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.03 vs. limit=15.0 +2024-07-29 02:56:41,244 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:56:42,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=231661.33333333334, ans=0.125 +2024-07-29 02:56:42,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=231661.33333333334, ans=0.125 +2024-07-29 02:56:45,756 INFO [train.py:1114] (1/4) Epoch 17, batch 10200, loss[loss=0.1806, simple_loss=0.2641, pruned_loss=0.04853, over 3463.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2837, pruned_loss=0.05709, over 789665.05 frames. ], batch size: 35, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:56:46,005 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.14 vs. limit=15.0 +2024-07-29 02:56:47,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=231674.66666666666, ans=0.0 +2024-07-29 02:56:49,120 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:58:12,614 INFO [train.py:1114] (1/4) Epoch 18, batch 0, loss[loss=0.1604, simple_loss=0.2565, pruned_loss=0.03218, over 4842.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2565, pruned_loss=0.03218, over 4842.00 frames. ], batch size: 12, lr: 4.20e-03, grad_scale: 32.0 +2024-07-29 02:58:12,615 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 02:58:24,197 INFO [train.py:1146] (1/4) Epoch 18, validation: loss=0.1629, simple_loss=0.2668, pruned_loss=0.02955, over 944034.00 frames. +2024-07-29 02:58:24,198 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 02:58:26,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231705.33333333334, ans=0.1 +2024-07-29 02:58:26,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=231705.33333333334, ans=0.125 +2024-07-29 02:58:27,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=231705.33333333334, ans=0.2 +2024-07-29 02:58:31,781 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:58:37,970 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=231732.0, ans=0.1 +2024-07-29 02:58:44,123 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.569e+01 6.224e+01 6.772e+01 7.416e+01 8.385e+01, threshold=1.354e+02, percent-clipped=0.0 +2024-07-29 02:58:49,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=231745.33333333334, ans=0.125 +2024-07-29 02:58:53,978 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.40 vs. limit=15.0 +2024-07-29 02:58:54,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=231758.66666666666, ans=0.125 +2024-07-29 02:58:57,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=231758.66666666666, ans=0.125 +2024-07-29 02:58:59,031 INFO [train.py:1114] (1/4) Epoch 18, batch 50, loss[loss=0.1516, simple_loss=0.2484, pruned_loss=0.0274, over 4606.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2687, pruned_loss=0.04416, over 206074.05 frames. ], batch size: 11, lr: 4.20e-03, grad_scale: 32.0 +2024-07-29 02:59:15,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=231798.66666666666, ans=0.0 +2024-07-29 02:59:18,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=231812.0, ans=0.125 +2024-07-29 02:59:20,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=231812.0, ans=0.0 +2024-07-29 02:59:34,494 INFO [train.py:1114] (1/4) Epoch 18, batch 100, loss[loss=0.1696, simple_loss=0.2532, pruned_loss=0.04302, over 4641.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.268, pruned_loss=0.04378, over 365269.70 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 02:59:40,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=231852.0, ans=0.0 +2024-07-29 02:59:44,747 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.46 vs. limit=6.0 +2024-07-29 02:59:51,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=231865.33333333334, ans=0.125 +2024-07-29 02:59:53,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=231865.33333333334, ans=0.025 +2024-07-29 02:59:54,427 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.485e+01 5.467e+01 5.995e+01 6.645e+01 8.215e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-29 02:59:54,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=231865.33333333334, ans=0.0 +2024-07-29 03:00:08,834 INFO [train.py:1114] (1/4) Epoch 18, batch 150, loss[loss=0.1486, simple_loss=0.2376, pruned_loss=0.0298, over 4624.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2669, pruned_loss=0.04281, over 494327.01 frames. ], batch size: 11, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:00:16,233 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=231918.66666666666, ans=0.125 +2024-07-29 03:00:18,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=231918.66666666666, ans=0.125 +2024-07-29 03:00:19,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=231918.66666666666, ans=0.0 +2024-07-29 03:00:25,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=231932.0, ans=0.125 +2024-07-29 03:00:28,963 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.37 vs. limit=10.0 +2024-07-29 03:00:42,581 INFO [train.py:1114] (1/4) Epoch 18, batch 200, loss[loss=0.197, simple_loss=0.2943, pruned_loss=0.04982, over 4500.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2656, pruned_loss=0.04211, over 593585.59 frames. ], batch size: 21, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:00:50,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=231985.33333333334, ans=0.125 +2024-07-29 03:01:08,541 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.689e+01 5.881e+01 6.844e+01 7.850e+01 1.252e+02, threshold=1.369e+02, percent-clipped=1.0 +2024-07-29 03:01:09,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=232012.0, ans=0.125 +2024-07-29 03:01:11,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=232012.0, ans=0.0 +2024-07-29 03:01:50,240 INFO [train.py:1114] (1/4) Epoch 18, batch 250, loss[loss=0.1738, simple_loss=0.2634, pruned_loss=0.04209, over 4622.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2661, pruned_loss=0.04279, over 670474.72 frames. ], batch size: 16, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:01:57,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=232052.0, ans=0.0 +2024-07-29 03:01:57,425 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:01:58,729 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=232052.0, ans=0.07 +2024-07-29 03:02:23,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=232052.0, ans=0.0 +2024-07-29 03:02:30,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=232065.33333333334, ans=0.125 +2024-07-29 03:02:39,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=232078.66666666666, ans=0.05 +2024-07-29 03:02:39,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=232078.66666666666, ans=0.125 +2024-07-29 03:02:58,737 INFO [train.py:1114] (1/4) Epoch 18, batch 300, loss[loss=0.1588, simple_loss=0.2515, pruned_loss=0.0331, over 4796.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2666, pruned_loss=0.04271, over 730126.27 frames. ], batch size: 15, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:03:09,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=232118.66666666666, ans=0.125 +2024-07-29 03:03:17,681 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.434e+01 5.467e+01 6.061e+01 6.995e+01 1.248e+02, threshold=1.212e+02, percent-clipped=0.0 +2024-07-29 03:03:32,388 INFO [train.py:1114] (1/4) Epoch 18, batch 350, loss[loss=0.1655, simple_loss=0.2433, pruned_loss=0.04386, over 4936.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2657, pruned_loss=0.04205, over 775881.08 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:03:33,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=232172.0, ans=0.025 +2024-07-29 03:03:34,956 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.58 vs. limit=15.0 +2024-07-29 03:03:41,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=232185.33333333334, ans=0.125 +2024-07-29 03:03:53,247 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:04:01,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=232225.33333333334, ans=0.125 +2024-07-29 03:04:05,785 INFO [train.py:1114] (1/4) Epoch 18, batch 400, loss[loss=0.1944, simple_loss=0.2754, pruned_loss=0.05672, over 4692.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2655, pruned_loss=0.04219, over 813198.85 frames. ], batch size: 13, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:04:20,299 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.44 vs. limit=15.0 +2024-07-29 03:04:26,839 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.483e+01 6.110e+01 6.835e+01 9.648e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-29 03:04:37,313 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.64 vs. limit=15.0 +2024-07-29 03:04:39,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=232292.0, ans=0.025 +2024-07-29 03:04:41,609 INFO [train.py:1114] (1/4) Epoch 18, batch 450, loss[loss=0.1625, simple_loss=0.262, pruned_loss=0.03147, over 4641.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2657, pruned_loss=0.04211, over 838688.10 frames. ], batch size: 13, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:04:42,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=232305.33333333334, ans=0.125 +2024-07-29 03:04:42,576 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.60 vs. limit=15.0 +2024-07-29 03:04:43,344 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.74 vs. limit=6.0 +2024-07-29 03:04:43,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=232305.33333333334, ans=0.035 +2024-07-29 03:04:51,294 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.33 vs. limit=15.0 +2024-07-29 03:04:51,310 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.38 vs. limit=22.5 +2024-07-29 03:04:59,922 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.40 vs. limit=22.5 +2024-07-29 03:05:00,724 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.35 vs. limit=6.0 +2024-07-29 03:05:05,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=232345.33333333334, ans=0.125 +2024-07-29 03:05:06,147 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.98 vs. limit=15.0 +2024-07-29 03:05:15,090 INFO [train.py:1114] (1/4) Epoch 18, batch 500, loss[loss=0.2097, simple_loss=0.2913, pruned_loss=0.064, over 4677.00 frames. ], tot_loss[loss=0.174, simple_loss=0.265, pruned_loss=0.04153, over 861179.68 frames. ], batch size: 15, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:05:16,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=232372.0, ans=0.125 +2024-07-29 03:05:20,856 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=232372.0, ans=0.125 +2024-07-29 03:05:24,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=232385.33333333334, ans=0.0 +2024-07-29 03:05:25,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=232385.33333333334, ans=0.2 +2024-07-29 03:05:28,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=232398.66666666666, ans=0.125 +2024-07-29 03:05:34,179 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.530e+01 5.559e+01 6.071e+01 6.831e+01 9.618e+01, threshold=1.214e+02, percent-clipped=0.0 +2024-07-29 03:05:37,030 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=232412.0, ans=0.025 +2024-07-29 03:05:46,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=232425.33333333334, ans=0.125 +2024-07-29 03:05:48,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=232438.66666666666, ans=0.125 +2024-07-29 03:05:48,918 INFO [train.py:1114] (1/4) Epoch 18, batch 550, loss[loss=0.1994, simple_loss=0.2875, pruned_loss=0.05567, over 4619.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2648, pruned_loss=0.0416, over 877362.70 frames. ], batch size: 17, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:05:55,859 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=232452.0, ans=0.0 +2024-07-29 03:06:02,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=232465.33333333334, ans=0.0 +2024-07-29 03:06:07,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=232465.33333333334, ans=0.125 +2024-07-29 03:06:10,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=232478.66666666666, ans=0.125 +2024-07-29 03:06:26,514 INFO [train.py:1114] (1/4) Epoch 18, batch 600, loss[loss=0.192, simple_loss=0.2847, pruned_loss=0.04963, over 4613.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2656, pruned_loss=0.04197, over 891922.81 frames. ], batch size: 16, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:06:37,263 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.24 vs. limit=15.0 +2024-07-29 03:06:44,912 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.582e+01 6.053e+01 7.206e+01 1.079e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 03:06:51,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=232545.33333333334, ans=0.125 +2024-07-29 03:07:03,964 INFO [train.py:1114] (1/4) Epoch 18, batch 650, loss[loss=0.1759, simple_loss=0.2669, pruned_loss=0.04248, over 4763.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2654, pruned_loss=0.04183, over 903445.52 frames. ], batch size: 13, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:07:06,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=232572.0, ans=0.1 +2024-07-29 03:07:12,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=232585.33333333334, ans=0.0 +2024-07-29 03:07:16,565 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.91 vs. limit=15.0 +2024-07-29 03:07:17,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=232585.33333333334, ans=0.1 +2024-07-29 03:07:26,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=232598.66666666666, ans=0.09899494936611666 +2024-07-29 03:07:34,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=232612.0, ans=0.2 +2024-07-29 03:07:36,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232612.0, ans=0.1 +2024-07-29 03:07:43,820 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=232625.33333333334, ans=0.0 +2024-07-29 03:07:46,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=232638.66666666666, ans=0.0 +2024-07-29 03:07:47,033 INFO [train.py:1114] (1/4) Epoch 18, batch 700, loss[loss=0.1866, simple_loss=0.2733, pruned_loss=0.05002, over 4645.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2659, pruned_loss=0.04217, over 911029.59 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:07:56,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=232652.0, ans=0.2 +2024-07-29 03:08:03,354 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.76 vs. limit=22.5 +2024-07-29 03:08:03,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=232665.33333333334, ans=0.0 +2024-07-29 03:08:05,612 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 5.680e+01 6.121e+01 6.839e+01 1.044e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 03:08:09,263 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.17 vs. limit=22.5 +2024-07-29 03:08:14,143 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.51 vs. limit=15.0 +2024-07-29 03:08:16,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=232692.0, ans=0.2 +2024-07-29 03:08:20,417 INFO [train.py:1114] (1/4) Epoch 18, batch 750, loss[loss=0.1854, simple_loss=0.2851, pruned_loss=0.04286, over 4696.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2661, pruned_loss=0.04214, over 917775.46 frames. ], batch size: 13, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:08:20,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.68 vs. limit=15.0 +2024-07-29 03:08:29,741 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=232718.66666666666, ans=0.2 +2024-07-29 03:08:54,510 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.51 vs. limit=15.0 +2024-07-29 03:09:02,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=232758.66666666666, ans=0.0 +2024-07-29 03:09:10,284 INFO [train.py:1114] (1/4) Epoch 18, batch 800, loss[loss=0.1317, simple_loss=0.219, pruned_loss=0.02216, over 4844.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2655, pruned_loss=0.04228, over 922924.71 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:09:11,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=232772.0, ans=0.125 +2024-07-29 03:09:12,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=232772.0, ans=0.125 +2024-07-29 03:09:26,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=232798.66666666666, ans=0.125 +2024-07-29 03:09:28,889 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.634e+01 6.203e+01 6.793e+01 1.019e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-29 03:09:43,906 INFO [train.py:1114] (1/4) Epoch 18, batch 850, loss[loss=0.2067, simple_loss=0.2996, pruned_loss=0.05691, over 4675.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2651, pruned_loss=0.04228, over 927239.42 frames. ], batch size: 14, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:10:03,125 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=232865.33333333334, ans=0.125 +2024-07-29 03:10:07,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=232878.66666666666, ans=0.125 +2024-07-29 03:10:11,529 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.15 vs. limit=15.0 +2024-07-29 03:10:12,678 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=232892.0, ans=0.2 +2024-07-29 03:10:13,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=232892.0, ans=0.025 +2024-07-29 03:10:19,725 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.19 vs. limit=15.0 +2024-07-29 03:10:19,903 INFO [train.py:1114] (1/4) Epoch 18, batch 900, loss[loss=0.197, simple_loss=0.2895, pruned_loss=0.05229, over 4852.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2647, pruned_loss=0.04221, over 928378.99 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:10:25,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=232905.33333333334, ans=0.125 +2024-07-29 03:10:27,103 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.63 vs. limit=15.0 +2024-07-29 03:10:27,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=232905.33333333334, ans=0.0 +2024-07-29 03:10:27,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-29 03:10:34,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=232918.66666666666, ans=0.125 +2024-07-29 03:10:44,075 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.656e+01 6.090e+01 7.210e+01 1.010e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-29 03:11:01,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=232958.66666666666, ans=0.0 +2024-07-29 03:11:04,781 INFO [train.py:1114] (1/4) Epoch 18, batch 950, loss[loss=0.1534, simple_loss=0.2431, pruned_loss=0.03182, over 4775.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.265, pruned_loss=0.04205, over 930045.97 frames. ], batch size: 12, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:11:04,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=232972.0, ans=0.1 +2024-07-29 03:11:05,874 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.63 vs. limit=15.0 +2024-07-29 03:11:27,652 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.97 vs. limit=22.5 +2024-07-29 03:11:29,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=233012.0, ans=0.0 +2024-07-29 03:11:38,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=233025.33333333334, ans=0.2 +2024-07-29 03:11:39,991 INFO [train.py:1114] (1/4) Epoch 18, batch 1000, loss[loss=0.1709, simple_loss=0.2627, pruned_loss=0.0395, over 4963.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2652, pruned_loss=0.04189, over 929529.10 frames. ], batch size: 13, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:11:51,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=233052.0, ans=0.125 +2024-07-29 03:11:54,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=233065.33333333334, ans=0.125 +2024-07-29 03:11:58,671 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.759e+01 5.660e+01 6.268e+01 7.166e+01 1.041e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 03:12:00,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=233078.66666666666, ans=0.125 +2024-07-29 03:12:03,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=233078.66666666666, ans=0.125 +2024-07-29 03:12:12,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=233092.0, ans=0.0 +2024-07-29 03:12:12,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=233092.0, ans=0.0 +2024-07-29 03:12:15,360 INFO [train.py:1114] (1/4) Epoch 18, batch 1050, loss[loss=0.173, simple_loss=0.2726, pruned_loss=0.03666, over 4875.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2639, pruned_loss=0.04136, over 932096.39 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:12:15,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=233105.33333333334, ans=0.95 +2024-07-29 03:12:20,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=233105.33333333334, ans=0.2 +2024-07-29 03:13:01,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=233145.33333333334, ans=15.0 +2024-07-29 03:13:24,181 INFO [train.py:1114] (1/4) Epoch 18, batch 1100, loss[loss=0.1573, simple_loss=0.2473, pruned_loss=0.03363, over 4900.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.264, pruned_loss=0.04169, over 934622.84 frames. ], batch size: 13, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:14:26,843 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.721e+01 5.370e+01 5.951e+01 6.699e+01 1.093e+02, threshold=1.190e+02, percent-clipped=0.0 +2024-07-29 03:14:44,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=233212.0, ans=0.125 +2024-07-29 03:14:51,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=233225.33333333334, ans=0.0 +2024-07-29 03:14:52,510 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=233225.33333333334, ans=0.025 +2024-07-29 03:15:12,269 INFO [train.py:1114] (1/4) Epoch 18, batch 1150, loss[loss=0.1887, simple_loss=0.2821, pruned_loss=0.04766, over 4903.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2654, pruned_loss=0.04243, over 934401.07 frames. ], batch size: 13, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:15:13,952 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.05 vs. limit=15.0 +2024-07-29 03:15:24,649 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.33 vs. limit=15.0 +2024-07-29 03:15:29,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=233265.33333333334, ans=0.125 +2024-07-29 03:15:30,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=233265.33333333334, ans=0.125 +2024-07-29 03:15:39,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=233278.66666666666, ans=0.0 +2024-07-29 03:15:54,164 INFO [train.py:1114] (1/4) Epoch 18, batch 1200, loss[loss=0.1993, simple_loss=0.2825, pruned_loss=0.05805, over 4872.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.266, pruned_loss=0.04246, over 933150.98 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:16:04,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=233318.66666666666, ans=0.1 +2024-07-29 03:16:05,591 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.92 vs. limit=15.0 +2024-07-29 03:16:25,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=233332.0, ans=0.125 +2024-07-29 03:19:12,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=233332.0, ans=0.125 +2024-07-29 03:19:13,688 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+01 5.527e+01 5.938e+01 6.741e+01 1.045e+02, threshold=1.188e+02, percent-clipped=0.0 +2024-07-29 03:19:19,310 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.93 vs. limit=6.0 +2024-07-29 03:19:29,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=233358.66666666666, ans=0.2 +2024-07-29 03:19:29,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=233372.0, ans=0.0 +2024-07-29 03:19:30,339 INFO [train.py:1114] (1/4) Epoch 18, batch 1250, loss[loss=0.2008, simple_loss=0.2921, pruned_loss=0.05476, over 4789.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2663, pruned_loss=0.04224, over 937296.59 frames. ], batch size: 15, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:19:37,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=233385.33333333334, ans=0.125 +2024-07-29 03:19:47,333 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.57 vs. limit=12.0 +2024-07-29 03:19:57,700 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.08 vs. limit=12.0 +2024-07-29 03:19:59,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=233425.33333333334, ans=0.09899494936611666 +2024-07-29 03:20:03,277 INFO [train.py:1114] (1/4) Epoch 18, batch 1300, loss[loss=0.1613, simple_loss=0.2604, pruned_loss=0.03109, over 4706.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2653, pruned_loss=0.04178, over 938434.84 frames. ], batch size: 19, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:20:05,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=233438.66666666666, ans=0.125 +2024-07-29 03:20:21,873 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.415e+01 5.468e+01 6.194e+01 6.881e+01 8.786e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 03:20:23,508 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.91 vs. limit=15.0 +2024-07-29 03:20:28,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=233478.66666666666, ans=0.0 +2024-07-29 03:20:36,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=233492.0, ans=0.125 +2024-07-29 03:20:38,027 INFO [train.py:1114] (1/4) Epoch 18, batch 1350, loss[loss=0.1799, simple_loss=0.2773, pruned_loss=0.04124, over 4760.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2654, pruned_loss=0.04154, over 940431.02 frames. ], batch size: 13, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:21:23,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=233505.33333333334, ans=0.125 +2024-07-29 03:22:12,835 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.89 vs. limit=22.5 +2024-07-29 03:22:33,505 INFO [train.py:1114] (1/4) Epoch 18, batch 1400, loss[loss=0.1732, simple_loss=0.2585, pruned_loss=0.04397, over 4707.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2655, pruned_loss=0.0418, over 942631.17 frames. ], batch size: 11, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:22:39,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=233585.33333333334, ans=0.125 +2024-07-29 03:22:47,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=233598.66666666666, ans=0.2 +2024-07-29 03:22:57,218 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 5.887e+01 6.413e+01 7.105e+01 1.184e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 03:23:33,205 INFO [train.py:1114] (1/4) Epoch 18, batch 1450, loss[loss=0.169, simple_loss=0.2646, pruned_loss=0.03671, over 4682.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2653, pruned_loss=0.04171, over 942722.62 frames. ], batch size: 15, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:23:38,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=233638.66666666666, ans=0.2 +2024-07-29 03:23:45,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=233652.0, ans=0.0 +2024-07-29 03:23:48,946 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.04 vs. limit=10.0 +2024-07-29 03:23:50,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=233665.33333333334, ans=0.2 +2024-07-29 03:23:50,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=233665.33333333334, ans=0.2 +2024-07-29 03:23:50,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=233665.33333333334, ans=0.125 +2024-07-29 03:23:58,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.22 vs. limit=15.0 +2024-07-29 03:24:00,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=233692.0, ans=0.0 +2024-07-29 03:24:09,764 INFO [train.py:1114] (1/4) Epoch 18, batch 1500, loss[loss=0.1626, simple_loss=0.2471, pruned_loss=0.03906, over 4818.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2661, pruned_loss=0.04175, over 942068.58 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:24:12,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=233705.33333333334, ans=0.125 +2024-07-29 03:24:13,450 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=233705.33333333334, ans=0.125 +2024-07-29 03:24:38,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=233732.0, ans=0.0 +2024-07-29 03:24:41,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=233732.0, ans=0.2 +2024-07-29 03:24:44,218 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+01 5.562e+01 6.096e+01 6.763e+01 1.145e+02, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 03:24:44,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=233732.0, ans=0.0 +2024-07-29 03:24:49,921 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.48 vs. limit=15.0 +2024-07-29 03:24:55,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=233758.66666666666, ans=0.125 +2024-07-29 03:25:14,228 INFO [train.py:1114] (1/4) Epoch 18, batch 1550, loss[loss=0.1927, simple_loss=0.2867, pruned_loss=0.0493, over 4901.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2665, pruned_loss=0.04212, over 938814.64 frames. ], batch size: 15, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:25:27,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=233785.33333333334, ans=0.125 +2024-07-29 03:25:30,870 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.67 vs. limit=15.0 +2024-07-29 03:25:32,833 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.34 vs. limit=15.0 +2024-07-29 03:25:33,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=233798.66666666666, ans=0.2 +2024-07-29 03:25:34,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=233798.66666666666, ans=0.2 +2024-07-29 03:25:34,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=233798.66666666666, ans=0.035 +2024-07-29 03:25:36,740 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.48 vs. limit=6.0 +2024-07-29 03:25:37,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=233798.66666666666, ans=10.0 +2024-07-29 03:25:37,698 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=233798.66666666666, ans=0.125 +2024-07-29 03:25:37,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=233798.66666666666, ans=0.0 +2024-07-29 03:25:47,411 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=233825.33333333334, ans=0.025 +2024-07-29 03:25:52,678 INFO [train.py:1114] (1/4) Epoch 18, batch 1600, loss[loss=0.1742, simple_loss=0.2711, pruned_loss=0.03866, over 4870.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2666, pruned_loss=0.04234, over 937577.10 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:26:02,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=233852.0, ans=0.0 +2024-07-29 03:26:04,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=233852.0, ans=0.0 +2024-07-29 03:26:08,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=233865.33333333334, ans=0.125 +2024-07-29 03:26:11,513 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=233865.33333333334, ans=0.1 +2024-07-29 03:26:12,703 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.724e+01 6.283e+01 7.250e+01 9.354e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-29 03:26:25,712 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.25 vs. limit=8.0 +2024-07-29 03:26:36,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=233892.0, ans=0.125 +2024-07-29 03:26:37,501 INFO [train.py:1114] (1/4) Epoch 18, batch 1650, loss[loss=0.1708, simple_loss=0.269, pruned_loss=0.03627, over 4672.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2658, pruned_loss=0.04215, over 937268.22 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:26:40,150 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.42 vs. limit=10.0 +2024-07-29 03:26:50,493 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=233932.0, ans=0.125 +2024-07-29 03:27:10,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.77 vs. limit=15.0 +2024-07-29 03:27:42,555 INFO [train.py:1114] (1/4) Epoch 18, batch 1700, loss[loss=0.1256, simple_loss=0.2059, pruned_loss=0.02268, over 4699.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.265, pruned_loss=0.04165, over 939030.61 frames. ], batch size: 11, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:27:54,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=233985.33333333334, ans=0.125 +2024-07-29 03:28:03,945 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.990e+01 5.769e+01 6.208e+01 7.214e+01 1.058e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-29 03:28:06,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=234012.0, ans=0.125 +2024-07-29 03:28:11,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=234025.33333333334, ans=0.125 +2024-07-29 03:28:13,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=234025.33333333334, ans=0.125 +2024-07-29 03:28:14,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=234025.33333333334, ans=0.125 +2024-07-29 03:28:17,238 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.68 vs. limit=15.0 +2024-07-29 03:28:18,160 INFO [train.py:1114] (1/4) Epoch 18, batch 1750, loss[loss=0.1397, simple_loss=0.2207, pruned_loss=0.02933, over 4796.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2646, pruned_loss=0.04155, over 940288.51 frames. ], batch size: 11, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:28:23,614 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=234038.66666666666, ans=0.125 +2024-07-29 03:28:25,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=234052.0, ans=0.2 +2024-07-29 03:28:32,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=234065.33333333334, ans=0.0 +2024-07-29 03:28:39,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=234078.66666666666, ans=0.025 +2024-07-29 03:28:40,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=234078.66666666666, ans=0.0 +2024-07-29 03:28:42,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=234078.66666666666, ans=0.07 +2024-07-29 03:28:51,116 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.35 vs. limit=6.0 +2024-07-29 03:28:51,353 INFO [train.py:1114] (1/4) Epoch 18, batch 1800, loss[loss=0.192, simple_loss=0.2955, pruned_loss=0.04419, over 4642.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2647, pruned_loss=0.04122, over 940574.58 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:28:52,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=234105.33333333334, ans=0.125 +2024-07-29 03:29:00,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=234118.66666666666, ans=0.0 +2024-07-29 03:29:07,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=234132.0, ans=0.125 +2024-07-29 03:29:10,856 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.536e+01 5.659e+01 6.366e+01 7.110e+01 1.077e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-29 03:29:12,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234145.33333333334, ans=0.1 +2024-07-29 03:29:13,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=234145.33333333334, ans=0.125 +2024-07-29 03:29:17,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=234145.33333333334, ans=0.125 +2024-07-29 03:29:19,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=234158.66666666666, ans=0.125 +2024-07-29 03:29:20,128 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.64 vs. limit=12.0 +2024-07-29 03:29:21,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=234158.66666666666, ans=0.07 +2024-07-29 03:29:22,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=234158.66666666666, ans=0.125 +2024-07-29 03:29:27,018 INFO [train.py:1114] (1/4) Epoch 18, batch 1850, loss[loss=0.204, simple_loss=0.3011, pruned_loss=0.05341, over 4814.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2652, pruned_loss=0.04161, over 940297.38 frames. ], batch size: 14, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:29:32,697 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.08 vs. limit=15.0 +2024-07-29 03:29:50,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=234212.0, ans=0.0 +2024-07-29 03:29:58,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=234225.33333333334, ans=0.025 +2024-07-29 03:30:03,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=234238.66666666666, ans=0.0 +2024-07-29 03:30:03,544 INFO [train.py:1114] (1/4) Epoch 18, batch 1900, loss[loss=0.1814, simple_loss=0.2773, pruned_loss=0.04277, over 4667.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.266, pruned_loss=0.0421, over 941821.64 frames. ], batch size: 14, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:30:22,650 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.802e+01 5.553e+01 6.272e+01 7.085e+01 9.977e+01, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 03:30:29,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=234292.0, ans=0.025 +2024-07-29 03:30:29,576 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.63 vs. limit=15.0 +2024-07-29 03:30:36,380 INFO [train.py:1114] (1/4) Epoch 18, batch 1950, loss[loss=0.1606, simple_loss=0.2527, pruned_loss=0.03424, over 4897.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2666, pruned_loss=0.04202, over 943983.60 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:30:41,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=234305.33333333334, ans=0.125 +2024-07-29 03:30:45,745 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.76 vs. limit=15.0 +2024-07-29 03:30:56,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=234345.33333333334, ans=0.125 +2024-07-29 03:30:57,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=234345.33333333334, ans=0.125 +2024-07-29 03:30:57,783 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.81 vs. limit=6.0 +2024-07-29 03:31:02,994 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.72 vs. limit=6.0 +2024-07-29 03:31:05,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=234358.66666666666, ans=0.0 +2024-07-29 03:31:10,160 INFO [train.py:1114] (1/4) Epoch 18, batch 2000, loss[loss=0.168, simple_loss=0.2629, pruned_loss=0.03652, over 4801.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2668, pruned_loss=0.04197, over 941667.40 frames. ], batch size: 11, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:31:11,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=234372.0, ans=0.125 +2024-07-29 03:31:16,369 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=234372.0, ans=0.125 +2024-07-29 03:31:17,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=234372.0, ans=0.125 +2024-07-29 03:31:30,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=234398.66666666666, ans=0.125 +2024-07-29 03:31:33,958 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.658e+01 6.506e+01 7.206e+01 1.041e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-29 03:31:40,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=234412.0, ans=0.1 +2024-07-29 03:31:48,062 INFO [train.py:1114] (1/4) Epoch 18, batch 2050, loss[loss=0.1485, simple_loss=0.2407, pruned_loss=0.02815, over 4609.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2663, pruned_loss=0.04197, over 939760.64 frames. ], batch size: 11, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:32:54,368 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.65 vs. limit=6.0 +2024-07-29 03:33:00,547 INFO [train.py:1114] (1/4) Epoch 18, batch 2100, loss[loss=0.1535, simple_loss=0.2481, pruned_loss=0.02945, over 4754.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2653, pruned_loss=0.04097, over 941618.38 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:33:05,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=234505.33333333334, ans=0.0 +2024-07-29 03:33:10,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=234518.66666666666, ans=0.125 +2024-07-29 03:33:14,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=234518.66666666666, ans=0.125 +2024-07-29 03:33:18,036 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.18 vs. limit=15.0 +2024-07-29 03:33:21,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=234532.0, ans=0.025 +2024-07-29 03:33:22,959 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.558e+01 5.524e+01 6.278e+01 7.367e+01 1.141e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-29 03:33:30,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=234545.33333333334, ans=0.125 +2024-07-29 03:33:56,463 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.94 vs. limit=15.0 +2024-07-29 03:33:59,992 INFO [train.py:1114] (1/4) Epoch 18, batch 2150, loss[loss=0.155, simple_loss=0.2471, pruned_loss=0.03148, over 4888.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2651, pruned_loss=0.04119, over 944600.93 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:34:00,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=234572.0, ans=0.125 +2024-07-29 03:34:00,990 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.88 vs. limit=10.0 +2024-07-29 03:34:02,358 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.27 vs. limit=15.0 +2024-07-29 03:34:06,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=234585.33333333334, ans=0.125 +2024-07-29 03:34:07,878 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.63 vs. limit=15.0 +2024-07-29 03:34:16,906 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:34:19,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=234598.66666666666, ans=0.0 +2024-07-29 03:34:21,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=234612.0, ans=0.025 +2024-07-29 03:34:34,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=234638.66666666666, ans=0.125 +2024-07-29 03:34:36,793 INFO [train.py:1114] (1/4) Epoch 18, batch 2200, loss[loss=0.1806, simple_loss=0.2676, pruned_loss=0.04678, over 4807.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2647, pruned_loss=0.04127, over 943522.42 frames. ], batch size: 14, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:34:44,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=234652.0, ans=0.025 +2024-07-29 03:34:57,724 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.703e+01 6.363e+01 7.397e+01 1.281e+02, threshold=1.273e+02, percent-clipped=1.0 +2024-07-29 03:35:03,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=234678.66666666666, ans=0.125 +2024-07-29 03:35:11,869 INFO [train.py:1114] (1/4) Epoch 18, batch 2250, loss[loss=0.2082, simple_loss=0.306, pruned_loss=0.0552, over 4692.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2653, pruned_loss=0.04188, over 941840.24 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:35:14,289 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.56 vs. limit=22.5 +2024-07-29 03:35:20,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=234718.66666666666, ans=0.0 +2024-07-29 03:35:26,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=234732.0, ans=0.2 +2024-07-29 03:35:45,217 INFO [train.py:1114] (1/4) Epoch 18, batch 2300, loss[loss=0.1639, simple_loss=0.2561, pruned_loss=0.03584, over 4942.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2646, pruned_loss=0.04173, over 940134.82 frames. ], batch size: 12, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:35:47,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=234772.0, ans=0.125 +2024-07-29 03:35:53,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=234785.33333333334, ans=0.025 +2024-07-29 03:36:06,802 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.677e+01 6.195e+01 6.878e+01 1.027e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 03:36:19,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=234825.33333333334, ans=0.0 +2024-07-29 03:36:20,961 INFO [train.py:1114] (1/4) Epoch 18, batch 2350, loss[loss=0.2122, simple_loss=0.3136, pruned_loss=0.05537, over 4643.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2652, pruned_loss=0.04176, over 942138.37 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:36:24,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=234838.66666666666, ans=0.125 +2024-07-29 03:36:29,729 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.93 vs. limit=6.0 +2024-07-29 03:36:38,386 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=234865.33333333334, ans=0.5 +2024-07-29 03:36:38,514 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.18 vs. limit=15.0 +2024-07-29 03:36:41,980 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.43 vs. limit=12.0 +2024-07-29 03:36:54,318 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.34 vs. limit=12.0 +2024-07-29 03:36:55,204 INFO [train.py:1114] (1/4) Epoch 18, batch 2400, loss[loss=0.172, simple_loss=0.2545, pruned_loss=0.04475, over 4647.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2659, pruned_loss=0.04162, over 941927.22 frames. ], batch size: 12, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:37:02,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=234918.66666666666, ans=0.1 +2024-07-29 03:37:03,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=234918.66666666666, ans=0.125 +2024-07-29 03:37:06,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=234918.66666666666, ans=0.05 +2024-07-29 03:37:08,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=234932.0, ans=0.125 +2024-07-29 03:37:15,793 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.568e+01 6.148e+01 7.129e+01 1.066e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 03:37:18,300 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.60 vs. limit=22.5 +2024-07-29 03:37:19,426 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.63 vs. limit=15.0 +2024-07-29 03:37:41,549 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=234958.66666666666, ans=0.125 +2024-07-29 03:37:44,174 INFO [train.py:1114] (1/4) Epoch 18, batch 2450, loss[loss=0.1755, simple_loss=0.2681, pruned_loss=0.04151, over 4693.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2668, pruned_loss=0.04206, over 937515.85 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:38:01,928 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:38:05,983 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.27 vs. limit=15.0 +2024-07-29 03:38:44,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=235012.0, ans=0.125 +2024-07-29 03:38:53,615 INFO [train.py:1114] (1/4) Epoch 18, batch 2500, loss[loss=0.1866, simple_loss=0.2801, pruned_loss=0.0466, over 4816.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2665, pruned_loss=0.04219, over 939359.06 frames. ], batch size: 14, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:39:49,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=235038.66666666666, ans=0.025 +2024-07-29 03:40:08,864 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.526e+01 6.445e+01 7.148e+01 1.003e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-29 03:40:09,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=235078.66666666666, ans=0.0 +2024-07-29 03:40:25,114 INFO [train.py:1114] (1/4) Epoch 18, batch 2550, loss[loss=0.1812, simple_loss=0.2579, pruned_loss=0.05226, over 4812.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2666, pruned_loss=0.04259, over 938753.68 frames. ], batch size: 11, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:40:31,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=235118.66666666666, ans=0.0 +2024-07-29 03:40:32,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=235118.66666666666, ans=0.02 +2024-07-29 03:40:32,887 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.53 vs. limit=15.0 +2024-07-29 03:40:39,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=235118.66666666666, ans=0.125 +2024-07-29 03:40:47,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=235118.66666666666, ans=0.0 +2024-07-29 03:41:05,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=235158.66666666666, ans=0.125 +2024-07-29 03:41:10,215 INFO [train.py:1114] (1/4) Epoch 18, batch 2600, loss[loss=0.1834, simple_loss=0.2746, pruned_loss=0.04607, over 4898.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2664, pruned_loss=0.04276, over 937673.31 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:41:14,754 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.91 vs. limit=15.0 +2024-07-29 03:41:26,302 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.78 vs. limit=12.0 +2024-07-29 03:41:30,248 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.64 vs. limit=22.5 +2024-07-29 03:41:36,741 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.470e+01 5.697e+01 6.154e+01 6.937e+01 9.396e+01, threshold=1.231e+02, percent-clipped=0.0 +2024-07-29 03:41:57,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=235238.66666666666, ans=0.125 +2024-07-29 03:41:58,071 INFO [train.py:1114] (1/4) Epoch 18, batch 2650, loss[loss=0.191, simple_loss=0.2879, pruned_loss=0.04701, over 4615.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2667, pruned_loss=0.04273, over 939773.91 frames. ], batch size: 16, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:42:06,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=235252.0, ans=0.125 +2024-07-29 03:42:10,172 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.79 vs. limit=15.0 +2024-07-29 03:42:14,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=235265.33333333334, ans=0.07 +2024-07-29 03:42:25,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=235278.66666666666, ans=10.0 +2024-07-29 03:42:36,013 INFO [train.py:1114] (1/4) Epoch 18, batch 2700, loss[loss=0.1935, simple_loss=0.2846, pruned_loss=0.05123, over 4738.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2671, pruned_loss=0.04307, over 939807.47 frames. ], batch size: 14, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:42:53,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=235318.66666666666, ans=0.125 +2024-07-29 03:43:01,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=235332.0, ans=0.2 +2024-07-29 03:43:06,503 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.340e+01 5.534e+01 6.342e+01 7.179e+01 1.053e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-29 03:43:07,840 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.47 vs. limit=10.0 +2024-07-29 03:43:14,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=235345.33333333334, ans=0.125 +2024-07-29 03:43:18,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=235358.66666666666, ans=0.2 +2024-07-29 03:43:28,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=235358.66666666666, ans=0.5 +2024-07-29 03:43:29,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=235358.66666666666, ans=0.125 +2024-07-29 03:43:33,214 INFO [train.py:1114] (1/4) Epoch 18, batch 2750, loss[loss=0.1472, simple_loss=0.2497, pruned_loss=0.02236, over 4706.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2661, pruned_loss=0.04291, over 940012.05 frames. ], batch size: 12, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:43:42,232 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.33 vs. limit=22.5 +2024-07-29 03:43:45,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=235385.33333333334, ans=0.125 +2024-07-29 03:43:57,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=235398.66666666666, ans=0.0 +2024-07-29 03:43:59,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=235412.0, ans=0.2 +2024-07-29 03:44:06,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=235425.33333333334, ans=0.125 +2024-07-29 03:44:06,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=235425.33333333334, ans=0.125 +2024-07-29 03:44:09,511 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=235425.33333333334, ans=0.1 +2024-07-29 03:44:15,745 INFO [train.py:1114] (1/4) Epoch 18, batch 2800, loss[loss=0.2352, simple_loss=0.304, pruned_loss=0.08318, over 3302.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2656, pruned_loss=0.04264, over 937993.61 frames. ], batch size: 35, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:44:17,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=235438.66666666666, ans=0.125 +2024-07-29 03:44:21,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=235438.66666666666, ans=0.125 +2024-07-29 03:44:26,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=235452.0, ans=0.0 +2024-07-29 03:44:35,786 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.07 vs. limit=15.0 +2024-07-29 03:44:36,876 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.862e+01 5.675e+01 6.325e+01 7.095e+01 1.073e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 03:46:30,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=235492.0, ans=0.125 +2024-07-29 03:46:37,205 INFO [train.py:1114] (1/4) Epoch 18, batch 2850, loss[loss=0.189, simple_loss=0.2764, pruned_loss=0.05077, over 4960.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2666, pruned_loss=0.04284, over 936516.53 frames. ], batch size: 13, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:46:39,057 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.80 vs. limit=15.0 +2024-07-29 03:46:39,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=235505.33333333334, ans=0.0 +2024-07-29 03:46:53,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=235532.0, ans=0.2 +2024-07-29 03:47:04,689 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.70 vs. limit=15.0 +2024-07-29 03:47:05,209 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235558.66666666666, ans=0.1 +2024-07-29 03:47:07,117 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=235558.66666666666, ans=0.025 +2024-07-29 03:47:10,076 INFO [train.py:1114] (1/4) Epoch 18, batch 2900, loss[loss=0.1589, simple_loss=0.2608, pruned_loss=0.02849, over 4829.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2671, pruned_loss=0.04282, over 940235.84 frames. ], batch size: 13, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:47:11,804 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.26 vs. limit=22.5 +2024-07-29 03:47:19,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=235585.33333333334, ans=0.0 +2024-07-29 03:47:29,763 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+01 5.690e+01 6.267e+01 7.332e+01 1.125e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 03:47:38,844 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.18 vs. limit=15.0 +2024-07-29 03:47:39,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=235625.33333333334, ans=0.125 +2024-07-29 03:47:43,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=235625.33333333334, ans=0.07 +2024-07-29 03:47:45,811 INFO [train.py:1114] (1/4) Epoch 18, batch 2950, loss[loss=0.1418, simple_loss=0.2345, pruned_loss=0.02456, over 4710.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2653, pruned_loss=0.04228, over 938986.42 frames. ], batch size: 12, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:47:52,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=235652.0, ans=0.125 +2024-07-29 03:48:11,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=235678.66666666666, ans=0.125 +2024-07-29 03:48:17,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=235692.0, ans=0.025 +2024-07-29 03:48:21,409 INFO [train.py:1114] (1/4) Epoch 18, batch 3000, loss[loss=0.1854, simple_loss=0.2823, pruned_loss=0.04426, over 4763.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2647, pruned_loss=0.04196, over 938178.88 frames. ], batch size: 13, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:48:21,409 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 03:48:44,158 INFO [train.py:1146] (1/4) Epoch 18, validation: loss=0.1624, simple_loss=0.2643, pruned_loss=0.03024, over 944034.00 frames. +2024-07-29 03:48:44,159 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 03:48:50,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=235705.33333333334, ans=0.125 +2024-07-29 03:49:04,141 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:49:04,554 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.349e+01 5.607e+01 6.067e+01 7.332e+01 1.132e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-29 03:49:07,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=235745.33333333334, ans=0.0 +2024-07-29 03:49:17,376 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=235758.66666666666, ans=0.125 +2024-07-29 03:49:18,684 INFO [train.py:1114] (1/4) Epoch 18, batch 3050, loss[loss=0.1849, simple_loss=0.2629, pruned_loss=0.05339, over 4635.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2651, pruned_loss=0.04193, over 936965.24 frames. ], batch size: 12, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:49:20,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=235772.0, ans=0.1 +2024-07-29 03:49:53,604 INFO [train.py:1114] (1/4) Epoch 18, batch 3100, loss[loss=0.1978, simple_loss=0.2844, pruned_loss=0.05557, over 4643.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2656, pruned_loss=0.04252, over 937598.22 frames. ], batch size: 16, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:49:55,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=235838.66666666666, ans=0.125 +2024-07-29 03:50:03,101 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:50:14,444 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.311e+01 5.371e+01 5.941e+01 6.939e+01 1.181e+02, threshold=1.188e+02, percent-clipped=0.0 +2024-07-29 03:50:28,773 INFO [train.py:1114] (1/4) Epoch 18, batch 3150, loss[loss=0.2054, simple_loss=0.3, pruned_loss=0.05545, over 4598.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2657, pruned_loss=0.04201, over 937595.99 frames. ], batch size: 17, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:50:31,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=235905.33333333334, ans=0.09899494936611666 +2024-07-29 03:50:39,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=235918.66666666666, ans=0.2 +2024-07-29 03:50:44,870 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:50:50,578 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.35 vs. limit=6.0 +2024-07-29 03:51:01,998 INFO [train.py:1114] (1/4) Epoch 18, batch 3200, loss[loss=0.1583, simple_loss=0.2516, pruned_loss=0.03249, over 4831.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2647, pruned_loss=0.04175, over 939125.96 frames. ], batch size: 13, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:51:18,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=235998.66666666666, ans=0.07 +2024-07-29 03:51:23,932 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.338e+01 5.694e+01 6.317e+01 7.020e+01 1.050e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 03:51:28,875 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.16 vs. limit=15.0 +2024-07-29 03:51:29,720 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.18 vs. limit=15.0 +2024-07-29 03:51:33,148 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.47 vs. limit=15.0 +2024-07-29 03:51:37,101 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.27 vs. limit=15.0 +2024-07-29 03:51:38,122 INFO [train.py:1114] (1/4) Epoch 18, batch 3250, loss[loss=0.182, simple_loss=0.2806, pruned_loss=0.04164, over 4931.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2646, pruned_loss=0.04131, over 940175.13 frames. ], batch size: 14, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:51:42,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=236038.66666666666, ans=0.0 +2024-07-29 03:51:42,331 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=236038.66666666666, ans=0.125 +2024-07-29 03:51:45,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=236052.0, ans=0.0 +2024-07-29 03:51:53,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236065.33333333334, ans=0.1 +2024-07-29 03:52:02,397 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=236078.66666666666, ans=0.0 +2024-07-29 03:52:11,779 INFO [train.py:1114] (1/4) Epoch 18, batch 3300, loss[loss=0.2174, simple_loss=0.3184, pruned_loss=0.05816, over 4716.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2632, pruned_loss=0.04118, over 940629.17 frames. ], batch size: 19, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:52:26,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=236132.0, ans=0.0 +2024-07-29 03:52:26,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=236132.0, ans=0.0 +2024-07-29 03:52:31,045 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.049e+01 5.536e+01 6.135e+01 6.929e+01 1.182e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 03:52:33,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=236145.33333333334, ans=0.125 +2024-07-29 03:52:33,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=236145.33333333334, ans=0.1 +2024-07-29 03:52:41,837 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.94 vs. limit=15.0 +2024-07-29 03:52:43,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=236158.66666666666, ans=0.07 +2024-07-29 03:52:45,454 INFO [train.py:1114] (1/4) Epoch 18, batch 3350, loss[loss=0.1739, simple_loss=0.267, pruned_loss=0.04044, over 4613.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2647, pruned_loss=0.04165, over 938655.40 frames. ], batch size: 17, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:53:07,446 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.21 vs. limit=22.5 +2024-07-29 03:53:13,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=236225.33333333334, ans=0.125 +2024-07-29 03:53:15,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=236225.33333333334, ans=0.1 +2024-07-29 03:53:23,134 INFO [train.py:1114] (1/4) Epoch 18, batch 3400, loss[loss=0.1546, simple_loss=0.2304, pruned_loss=0.03941, over 4794.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2652, pruned_loss=0.04199, over 937341.27 frames. ], batch size: 11, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:53:24,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=236238.66666666666, ans=0.0 +2024-07-29 03:53:31,715 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.17 vs. limit=15.0 +2024-07-29 03:53:32,036 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=236252.0, ans=0.125 +2024-07-29 03:53:33,038 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.06 vs. limit=15.0 +2024-07-29 03:53:34,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236252.0, ans=0.1 +2024-07-29 03:53:43,250 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.614e+01 5.661e+01 6.178e+01 6.933e+01 1.009e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-29 03:53:49,189 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.38 vs. limit=6.0 +2024-07-29 03:53:49,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=236278.66666666666, ans=0.0 +2024-07-29 03:53:53,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236292.0, ans=0.1 +2024-07-29 03:53:57,530 INFO [train.py:1114] (1/4) Epoch 18, batch 3450, loss[loss=0.1993, simple_loss=0.2859, pruned_loss=0.05635, over 4630.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2649, pruned_loss=0.0417, over 937399.49 frames. ], batch size: 19, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:53:58,319 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=236305.33333333334, ans=0.025 +2024-07-29 03:54:00,453 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.45 vs. limit=15.0 +2024-07-29 03:54:00,615 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.01 vs. limit=15.0 +2024-07-29 03:54:25,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=236358.66666666666, ans=0.5 +2024-07-29 03:54:30,873 INFO [train.py:1114] (1/4) Epoch 18, batch 3500, loss[loss=0.1526, simple_loss=0.238, pruned_loss=0.03359, over 4954.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.265, pruned_loss=0.04163, over 937967.11 frames. ], batch size: 12, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 03:54:37,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=236385.33333333334, ans=0.0 +2024-07-29 03:54:39,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=236385.33333333334, ans=0.125 +2024-07-29 03:54:45,799 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=236398.66666666666, ans=0.125 +2024-07-29 03:54:46,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=236398.66666666666, ans=0.025 +2024-07-29 03:54:47,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=236398.66666666666, ans=0.0 +2024-07-29 03:54:49,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=236398.66666666666, ans=0.0 +2024-07-29 03:54:50,372 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.411e+01 5.401e+01 5.925e+01 6.709e+01 9.541e+01, threshold=1.185e+02, percent-clipped=0.0 +2024-07-29 03:54:58,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=236425.33333333334, ans=0.125 +2024-07-29 03:54:59,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=236425.33333333334, ans=0.1 +2024-07-29 03:55:00,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=236425.33333333334, ans=0.2 +2024-07-29 03:55:04,386 INFO [train.py:1114] (1/4) Epoch 18, batch 3550, loss[loss=0.1833, simple_loss=0.2783, pruned_loss=0.04412, over 4667.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2647, pruned_loss=0.04112, over 938611.13 frames. ], batch size: 14, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 03:55:11,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=236452.0, ans=0.025 +2024-07-29 03:55:12,191 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.24 vs. limit=15.0 +2024-07-29 03:55:12,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=236452.0, ans=0.0 +2024-07-29 03:55:26,735 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.51 vs. limit=15.0 +2024-07-29 03:55:33,530 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=236492.0, ans=0.125 +2024-07-29 03:55:39,454 INFO [train.py:1114] (1/4) Epoch 18, batch 3600, loss[loss=0.145, simple_loss=0.2309, pruned_loss=0.02954, over 4966.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2641, pruned_loss=0.04108, over 940356.64 frames. ], batch size: 13, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 03:55:42,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=236505.33333333334, ans=0.0 +2024-07-29 03:55:43,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=236505.33333333334, ans=0.125 +2024-07-29 03:55:45,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=236518.66666666666, ans=0.09899494936611666 +2024-07-29 03:55:59,087 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.793e+01 6.774e+01 8.193e+01 1.238e+02, threshold=1.355e+02, percent-clipped=1.0 +2024-07-29 03:55:59,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=236545.33333333334, ans=0.09899494936611666 +2024-07-29 03:56:11,419 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:56:13,417 INFO [train.py:1114] (1/4) Epoch 18, batch 3650, loss[loss=0.1955, simple_loss=0.3074, pruned_loss=0.04182, over 4899.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2637, pruned_loss=0.04099, over 940771.74 frames. ], batch size: 15, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:56:25,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=236585.33333333334, ans=0.05 +2024-07-29 03:56:36,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236612.0, ans=0.1 +2024-07-29 03:56:46,964 INFO [train.py:1114] (1/4) Epoch 18, batch 3700, loss[loss=0.1621, simple_loss=0.2535, pruned_loss=0.03538, over 4938.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2634, pruned_loss=0.04094, over 941680.73 frames. ], batch size: 14, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:56:51,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236638.66666666666, ans=0.1 +2024-07-29 03:56:53,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=236638.66666666666, ans=0.025 +2024-07-29 03:57:06,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=236665.33333333334, ans=0.125 +2024-07-29 03:57:07,763 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.494e+01 5.372e+01 6.083e+01 6.875e+01 9.330e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-29 03:57:08,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=236678.66666666666, ans=0.0 +2024-07-29 03:57:10,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=236678.66666666666, ans=0.125 +2024-07-29 03:57:22,955 INFO [train.py:1114] (1/4) Epoch 18, batch 3750, loss[loss=0.1651, simple_loss=0.2487, pruned_loss=0.0407, over 4790.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.263, pruned_loss=0.04101, over 943345.05 frames. ], batch size: 11, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:57:34,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=236718.66666666666, ans=0.125 +2024-07-29 03:57:42,678 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.81 vs. limit=15.0 +2024-07-29 03:57:47,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=236732.0, ans=0.0 +2024-07-29 03:57:51,233 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:58:09,061 INFO [train.py:1114] (1/4) Epoch 18, batch 3800, loss[loss=0.1778, simple_loss=0.272, pruned_loss=0.04177, over 4808.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2628, pruned_loss=0.04091, over 941480.14 frames. ], batch size: 14, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:58:13,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=236772.0, ans=0.04949747468305833 +2024-07-29 03:58:17,327 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.34 vs. limit=15.0 +2024-07-29 03:58:21,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=236798.66666666666, ans=0.125 +2024-07-29 03:58:26,716 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.42 vs. limit=22.5 +2024-07-29 03:58:28,165 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.450e+01 5.460e+01 5.890e+01 6.474e+01 8.788e+01, threshold=1.178e+02, percent-clipped=0.0 +2024-07-29 03:58:28,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=236812.0, ans=0.0 +2024-07-29 03:58:37,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236825.33333333334, ans=0.1 +2024-07-29 03:58:41,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=236825.33333333334, ans=0.125 +2024-07-29 03:58:42,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=236825.33333333334, ans=0.125 +2024-07-29 03:58:42,862 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.17 vs. limit=6.0 +2024-07-29 03:58:43,810 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=236838.66666666666, ans=0.125 +2024-07-29 03:58:44,391 INFO [train.py:1114] (1/4) Epoch 18, batch 3850, loss[loss=0.1965, simple_loss=0.2853, pruned_loss=0.05386, over 4621.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2629, pruned_loss=0.04076, over 942173.14 frames. ], batch size: 16, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:58:46,245 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.66 vs. limit=15.0 +2024-07-29 03:59:00,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=236865.33333333334, ans=0.1 +2024-07-29 03:59:02,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=236865.33333333334, ans=0.0 +2024-07-29 03:59:13,261 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.41 vs. limit=15.0 +2024-07-29 03:59:15,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.60 vs. limit=15.0 +2024-07-29 03:59:19,422 INFO [train.py:1114] (1/4) Epoch 18, batch 3900, loss[loss=0.1696, simple_loss=0.2675, pruned_loss=0.03585, over 4812.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2633, pruned_loss=0.04086, over 942376.68 frames. ], batch size: 14, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:59:22,157 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:59:42,385 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.722e+01 5.580e+01 6.101e+01 6.865e+01 9.868e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 03:59:43,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=236945.33333333334, ans=0.1 +2024-07-29 03:59:43,152 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=236945.33333333334, ans=0.125 +2024-07-29 03:59:46,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=236945.33333333334, ans=0.125 +2024-07-29 03:59:56,441 INFO [train.py:1114] (1/4) Epoch 18, batch 3950, loss[loss=0.2062, simple_loss=0.2862, pruned_loss=0.06306, over 4832.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2631, pruned_loss=0.04074, over 944377.22 frames. ], batch size: 16, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 03:59:58,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=236972.0, ans=0.125 +2024-07-29 04:00:12,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=236998.66666666666, ans=0.0 +2024-07-29 04:00:16,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=237012.0, ans=0.125 +2024-07-29 04:00:19,191 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.38 vs. limit=10.0 +2024-07-29 04:00:23,761 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=237025.33333333334, ans=0.0 +2024-07-29 04:00:29,756 INFO [train.py:1114] (1/4) Epoch 18, batch 4000, loss[loss=0.1905, simple_loss=0.2771, pruned_loss=0.05191, over 4773.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2637, pruned_loss=0.04132, over 940347.19 frames. ], batch size: 12, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:00:31,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=237038.66666666666, ans=0.0 +2024-07-29 04:00:32,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=237038.66666666666, ans=0.0 +2024-07-29 04:00:39,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=237052.0, ans=0.125 +2024-07-29 04:00:42,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=237065.33333333334, ans=0.0 +2024-07-29 04:00:49,603 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.725e+01 5.687e+01 6.252e+01 7.100e+01 1.258e+02, threshold=1.250e+02, percent-clipped=1.0 +2024-07-29 04:01:03,144 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.08 vs. limit=15.0 +2024-07-29 04:01:03,421 INFO [train.py:1114] (1/4) Epoch 18, batch 4050, loss[loss=0.2404, simple_loss=0.3115, pruned_loss=0.08469, over 3627.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2641, pruned_loss=0.04136, over 939285.92 frames. ], batch size: 35, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:01:11,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=237105.33333333334, ans=0.0 +2024-07-29 04:01:14,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237118.66666666666, ans=0.1 +2024-07-29 04:01:18,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=237118.66666666666, ans=0.0 +2024-07-29 04:01:24,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=237132.0, ans=0.0 +2024-07-29 04:01:26,565 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.52 vs. limit=15.0 +2024-07-29 04:01:29,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=237145.33333333334, ans=0.025 +2024-07-29 04:01:41,043 INFO [train.py:1114] (1/4) Epoch 18, batch 4100, loss[loss=0.2148, simple_loss=0.3044, pruned_loss=0.06255, over 4910.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2641, pruned_loss=0.04161, over 938537.16 frames. ], batch size: 15, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:01:42,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=237172.0, ans=0.04949747468305833 +2024-07-29 04:01:43,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=237172.0, ans=0.0 +2024-07-29 04:01:46,271 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.91 vs. limit=22.5 +2024-07-29 04:01:54,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=237198.66666666666, ans=0.2 +2024-07-29 04:01:58,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=237198.66666666666, ans=0.125 +2024-07-29 04:01:59,078 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.93 vs. limit=15.0 +2024-07-29 04:02:01,260 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.620e+01 5.605e+01 6.193e+01 7.147e+01 1.131e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 04:02:04,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=237212.0, ans=0.125 +2024-07-29 04:02:05,149 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.52 vs. limit=12.0 +2024-07-29 04:02:25,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=237225.33333333334, ans=0.0 +2024-07-29 04:02:51,889 INFO [train.py:1114] (1/4) Epoch 18, batch 4150, loss[loss=0.2122, simple_loss=0.3061, pruned_loss=0.05913, over 4826.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.265, pruned_loss=0.04214, over 938601.31 frames. ], batch size: 13, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:03:48,806 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=237252.0, ans=0.125 +2024-07-29 04:03:50,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=237252.0, ans=0.125 +2024-07-29 04:04:11,725 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.51 vs. limit=15.0 +2024-07-29 04:04:15,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=237278.66666666666, ans=0.1 +2024-07-29 04:04:16,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=237278.66666666666, ans=0.125 +2024-07-29 04:04:20,629 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=237278.66666666666, ans=0.125 +2024-07-29 04:04:28,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=237292.0, ans=0.0 +2024-07-29 04:04:32,872 INFO [train.py:1114] (1/4) Epoch 18, batch 4200, loss[loss=0.2033, simple_loss=0.3019, pruned_loss=0.05239, over 4891.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.265, pruned_loss=0.04216, over 939640.25 frames. ], batch size: 15, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:04:33,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=237305.33333333334, ans=0.125 +2024-07-29 04:04:35,749 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=237305.33333333334, ans=0.1 +2024-07-29 04:04:37,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=237305.33333333334, ans=0.0 +2024-07-29 04:04:44,923 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.35 vs. limit=15.0 +2024-07-29 04:06:02,030 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.489e+01 5.423e+01 5.970e+01 6.521e+01 1.016e+02, threshold=1.194e+02, percent-clipped=0.0 +2024-07-29 04:06:18,672 INFO [train.py:1114] (1/4) Epoch 18, batch 4250, loss[loss=0.1499, simple_loss=0.237, pruned_loss=0.03138, over 4639.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2647, pruned_loss=0.04189, over 940990.75 frames. ], batch size: 12, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:06:22,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=237372.0, ans=0.05 +2024-07-29 04:06:29,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=237385.33333333334, ans=0.125 +2024-07-29 04:06:31,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=237398.66666666666, ans=0.125 +2024-07-29 04:06:44,395 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=237412.0, ans=0.1 +2024-07-29 04:06:47,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=237425.33333333334, ans=0.0 +2024-07-29 04:06:52,271 INFO [train.py:1114] (1/4) Epoch 18, batch 4300, loss[loss=0.1614, simple_loss=0.2535, pruned_loss=0.03466, over 4755.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2648, pruned_loss=0.04159, over 940275.76 frames. ], batch size: 13, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:07:17,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=237465.33333333334, ans=0.125 +2024-07-29 04:07:17,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=237465.33333333334, ans=0.025 +2024-07-29 04:07:22,448 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.07 vs. limit=10.0 +2024-07-29 04:07:24,056 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.877e+01 6.356e+01 7.291e+01 9.513e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-29 04:07:37,524 INFO [train.py:1114] (1/4) Epoch 18, batch 4350, loss[loss=0.1875, simple_loss=0.2843, pruned_loss=0.04534, over 4750.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2647, pruned_loss=0.0414, over 941062.58 frames. ], batch size: 13, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:07:37,616 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=237505.33333333334, ans=0.125 +2024-07-29 04:07:41,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=237505.33333333334, ans=0.2 +2024-07-29 04:08:02,070 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=237545.33333333334, ans=0.125 +2024-07-29 04:08:05,505 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:08:12,603 INFO [train.py:1114] (1/4) Epoch 18, batch 4400, loss[loss=0.1405, simple_loss=0.2299, pruned_loss=0.02554, over 4812.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2643, pruned_loss=0.04126, over 940940.32 frames. ], batch size: 14, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:08:15,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=237572.0, ans=0.125 +2024-07-29 04:08:29,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=237598.66666666666, ans=0.5 +2024-07-29 04:08:33,225 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.243e+01 5.655e+01 6.397e+01 7.492e+01 1.030e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-29 04:08:35,608 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.17 vs. limit=12.0 +2024-07-29 04:08:46,775 INFO [train.py:1114] (1/4) Epoch 18, batch 4450, loss[loss=0.1615, simple_loss=0.2425, pruned_loss=0.04022, over 4944.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2641, pruned_loss=0.04126, over 939387.94 frames. ], batch size: 12, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:08:55,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=237652.0, ans=0.2 +2024-07-29 04:08:57,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=237652.0, ans=0.0 +2024-07-29 04:09:01,154 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.34 vs. limit=22.5 +2024-07-29 04:09:08,422 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.28 vs. limit=15.0 +2024-07-29 04:09:10,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237678.66666666666, ans=0.1 +2024-07-29 04:09:21,652 INFO [train.py:1114] (1/4) Epoch 18, batch 4500, loss[loss=0.1757, simple_loss=0.2658, pruned_loss=0.04279, over 4742.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2646, pruned_loss=0.04105, over 938386.35 frames. ], batch size: 14, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:09:27,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=237718.66666666666, ans=0.125 +2024-07-29 04:09:29,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=237718.66666666666, ans=0.125 +2024-07-29 04:09:42,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=237732.0, ans=0.0 +2024-07-29 04:09:45,002 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.31 vs. limit=15.0 +2024-07-29 04:09:48,233 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.95 vs. limit=15.0 +2024-07-29 04:09:50,467 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.387e+01 5.651e+01 6.463e+01 7.658e+01 1.183e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-29 04:10:08,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=237758.66666666666, ans=0.125 +2024-07-29 04:10:10,125 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.57 vs. limit=15.0 +2024-07-29 04:10:13,158 INFO [train.py:1114] (1/4) Epoch 18, batch 4550, loss[loss=0.2026, simple_loss=0.2925, pruned_loss=0.05629, over 4898.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2648, pruned_loss=0.04129, over 940546.82 frames. ], batch size: 13, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:10:14,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=237772.0, ans=0.125 +2024-07-29 04:10:21,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=237785.33333333334, ans=0.125 +2024-07-29 04:10:23,323 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=237785.33333333334, ans=0.025 +2024-07-29 04:10:37,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=237812.0, ans=0.1 +2024-07-29 04:10:44,629 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.74 vs. limit=15.0 +2024-07-29 04:10:47,599 INFO [train.py:1114] (1/4) Epoch 18, batch 4600, loss[loss=0.1895, simple_loss=0.2827, pruned_loss=0.04811, over 4551.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2646, pruned_loss=0.04135, over 938583.59 frames. ], batch size: 21, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:10:51,413 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.79 vs. limit=22.5 +2024-07-29 04:10:57,979 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.62 vs. limit=22.5 +2024-07-29 04:10:59,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=237852.0, ans=0.125 +2024-07-29 04:11:09,106 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.515e+01 5.588e+01 6.056e+01 7.096e+01 1.037e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 04:11:09,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=237878.66666666666, ans=0.125 +2024-07-29 04:11:11,427 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.07 vs. limit=15.0 +2024-07-29 04:11:14,527 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=237878.66666666666, ans=0.125 +2024-07-29 04:11:20,035 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237892.0, ans=0.1 +2024-07-29 04:11:22,405 INFO [train.py:1114] (1/4) Epoch 18, batch 4650, loss[loss=0.1895, simple_loss=0.2766, pruned_loss=0.05124, over 4846.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2656, pruned_loss=0.0417, over 940064.38 frames. ], batch size: 16, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:11:27,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=237905.33333333334, ans=0.125 +2024-07-29 04:11:41,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=237918.66666666666, ans=0.2 +2024-07-29 04:11:42,825 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.74 vs. limit=15.0 +2024-07-29 04:11:49,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=237932.0, ans=0.125 +2024-07-29 04:11:59,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=237958.66666666666, ans=0.1 +2024-07-29 04:12:00,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=237958.66666666666, ans=0.05 +2024-07-29 04:12:04,313 INFO [train.py:1114] (1/4) Epoch 18, batch 4700, loss[loss=0.1562, simple_loss=0.2425, pruned_loss=0.03495, over 4693.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2651, pruned_loss=0.04162, over 937452.48 frames. ], batch size: 11, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:12:04,494 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:12:19,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=237998.66666666666, ans=0.0 +2024-07-29 04:12:24,422 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.519e+01 5.692e+01 6.166e+01 6.744e+01 9.680e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 04:12:41,496 INFO [train.py:1114] (1/4) Epoch 18, batch 4750, loss[loss=0.1852, simple_loss=0.2759, pruned_loss=0.04724, over 4533.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2666, pruned_loss=0.04233, over 935538.39 frames. ], batch size: 21, lr: 4.14e-03, grad_scale: 16.0 +2024-07-29 04:12:47,513 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.74 vs. limit=8.0 +2024-07-29 04:12:51,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=238052.0, ans=0.2 +2024-07-29 04:12:52,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=238052.0, ans=0.125 +2024-07-29 04:12:54,209 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.06 vs. limit=22.5 +2024-07-29 04:12:56,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=238065.33333333334, ans=0.125 +2024-07-29 04:13:02,355 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=238065.33333333334, ans=10.0 +2024-07-29 04:13:10,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238078.66666666666, ans=0.1 +2024-07-29 04:13:28,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=238092.0, ans=0.125 +2024-07-29 04:13:38,945 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=238092.0, ans=0.0 +2024-07-29 04:13:44,027 INFO [train.py:1114] (1/4) Epoch 18, batch 4800, loss[loss=0.1463, simple_loss=0.2364, pruned_loss=0.02809, over 4695.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2654, pruned_loss=0.04228, over 933377.51 frames. ], batch size: 13, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:13:46,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=238105.33333333334, ans=0.125 +2024-07-29 04:13:53,316 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=238118.66666666666, ans=0.125 +2024-07-29 04:13:55,363 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:13:57,504 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=238132.0, ans=0.04949747468305833 +2024-07-29 04:14:01,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=238132.0, ans=0.0 +2024-07-29 04:14:07,344 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.662e+01 6.486e+01 7.810e+01 1.129e+02, threshold=1.297e+02, percent-clipped=0.0 +2024-07-29 04:14:07,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=238145.33333333334, ans=0.0 +2024-07-29 04:14:09,763 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.93 vs. limit=15.0 +2024-07-29 04:14:14,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=238158.66666666666, ans=0.125 +2024-07-29 04:14:25,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=238158.66666666666, ans=0.125 +2024-07-29 04:14:25,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=238158.66666666666, ans=0.0 +2024-07-29 04:14:26,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=238172.0, ans=0.2 +2024-07-29 04:14:26,466 INFO [train.py:1114] (1/4) Epoch 18, batch 4850, loss[loss=0.1563, simple_loss=0.2642, pruned_loss=0.02418, over 4745.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2661, pruned_loss=0.04254, over 933045.51 frames. ], batch size: 14, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:14:36,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=238185.33333333334, ans=0.0 +2024-07-29 04:14:37,284 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=238185.33333333334, ans=0.035 +2024-07-29 04:15:19,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=238212.0, ans=0.025 +2024-07-29 04:15:31,239 INFO [train.py:1114] (1/4) Epoch 18, batch 4900, loss[loss=0.1942, simple_loss=0.2996, pruned_loss=0.04437, over 4765.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2657, pruned_loss=0.04232, over 934952.69 frames. ], batch size: 13, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:15:52,803 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.332e+01 5.541e+01 6.118e+01 7.300e+01 1.058e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 04:15:53,590 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=238278.66666666666, ans=0.0 +2024-07-29 04:16:05,554 INFO [train.py:1114] (1/4) Epoch 18, batch 4950, loss[loss=0.1848, simple_loss=0.2731, pruned_loss=0.04825, over 3503.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.266, pruned_loss=0.04284, over 932217.40 frames. ], batch size: 36, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:16:13,252 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.05 vs. limit=15.0 +2024-07-29 04:16:32,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238358.66666666666, ans=0.1 +2024-07-29 04:16:40,863 INFO [train.py:1114] (1/4) Epoch 18, batch 5000, loss[loss=0.179, simple_loss=0.2813, pruned_loss=0.03831, over 4654.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2652, pruned_loss=0.04241, over 936156.37 frames. ], batch size: 14, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:16:41,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=238372.0, ans=0.125 +2024-07-29 04:16:42,238 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238372.0, ans=0.1 +2024-07-29 04:16:47,845 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.14 vs. limit=15.0 +2024-07-29 04:16:50,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=238385.33333333334, ans=0.125 +2024-07-29 04:17:01,715 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.450e+01 5.523e+01 5.940e+01 6.612e+01 9.274e+01, threshold=1.188e+02, percent-clipped=0.0 +2024-07-29 04:17:04,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=238412.0, ans=0.125 +2024-07-29 04:17:14,228 INFO [train.py:1114] (1/4) Epoch 18, batch 5050, loss[loss=0.1367, simple_loss=0.2269, pruned_loss=0.02325, over 4841.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2644, pruned_loss=0.04197, over 938272.21 frames. ], batch size: 12, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:17:17,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=238438.66666666666, ans=0.0 +2024-07-29 04:17:19,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=238438.66666666666, ans=0.125 +2024-07-29 04:17:24,662 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.01 vs. limit=15.0 +2024-07-29 04:18:30,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=238465.33333333334, ans=0.125 +2024-07-29 04:18:31,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=238465.33333333334, ans=0.2 +2024-07-29 04:18:43,844 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.39 vs. limit=15.0 +2024-07-29 04:18:58,346 INFO [train.py:1114] (1/4) Epoch 18, batch 5100, loss[loss=0.1711, simple_loss=0.2534, pruned_loss=0.04438, over 4783.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2653, pruned_loss=0.04233, over 935368.50 frames. ], batch size: 12, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:19:07,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=238518.66666666666, ans=0.125 +2024-07-29 04:19:10,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=238518.66666666666, ans=0.125 +2024-07-29 04:19:16,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=238532.0, ans=0.125 +2024-07-29 04:19:17,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=238532.0, ans=0.125 +2024-07-29 04:19:19,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=238532.0, ans=0.0 +2024-07-29 04:19:21,953 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.577e+01 6.307e+01 7.309e+01 1.155e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 04:19:24,481 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.37 vs. limit=10.0 +2024-07-29 04:19:25,832 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.90 vs. limit=15.0 +2024-07-29 04:19:29,106 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.39 vs. limit=15.0 +2024-07-29 04:19:30,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=238558.66666666666, ans=0.0 +2024-07-29 04:19:34,576 INFO [train.py:1114] (1/4) Epoch 18, batch 5150, loss[loss=0.1715, simple_loss=0.2727, pruned_loss=0.03515, over 4825.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.266, pruned_loss=0.04248, over 936502.63 frames. ], batch size: 16, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:19:46,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=238585.33333333334, ans=0.025 +2024-07-29 04:19:48,168 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.69 vs. limit=15.0 +2024-07-29 04:20:03,879 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.15 vs. limit=15.0 +2024-07-29 04:20:04,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=238625.33333333334, ans=0.0 +2024-07-29 04:20:08,822 INFO [train.py:1114] (1/4) Epoch 18, batch 5200, loss[loss=0.161, simple_loss=0.2648, pruned_loss=0.02865, over 4673.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2655, pruned_loss=0.04182, over 936800.50 frames. ], batch size: 14, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:20:15,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=238652.0, ans=0.2 +2024-07-29 04:20:15,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=238652.0, ans=0.125 +2024-07-29 04:20:18,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=238652.0, ans=0.125 +2024-07-29 04:20:20,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=238652.0, ans=0.125 +2024-07-29 04:20:25,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=238665.33333333334, ans=0.0 +2024-07-29 04:20:26,515 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=238665.33333333334, ans=0.125 +2024-07-29 04:20:30,405 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.633e+01 5.651e+01 6.355e+01 7.516e+01 2.460e+02, threshold=1.271e+02, percent-clipped=1.0 +2024-07-29 04:20:38,440 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.36 vs. limit=15.0 +2024-07-29 04:20:40,642 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=238692.0, ans=0.125 +2024-07-29 04:20:44,962 INFO [train.py:1114] (1/4) Epoch 18, batch 5250, loss[loss=0.1804, simple_loss=0.2698, pruned_loss=0.04553, over 4885.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2657, pruned_loss=0.04189, over 936464.97 frames. ], batch size: 13, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:20:45,064 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=238705.33333333334, ans=0.0 +2024-07-29 04:20:48,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238705.33333333334, ans=0.1 +2024-07-29 04:20:49,763 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.47 vs. limit=15.0 +2024-07-29 04:20:56,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=238718.66666666666, ans=0.5 +2024-07-29 04:20:56,743 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=21.32 vs. limit=15.0 +2024-07-29 04:20:58,379 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.70 vs. limit=6.0 +2024-07-29 04:21:03,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=238732.0, ans=0.125 +2024-07-29 04:21:18,561 INFO [train.py:1114] (1/4) Epoch 18, batch 5300, loss[loss=0.1876, simple_loss=0.2803, pruned_loss=0.04744, over 4627.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2653, pruned_loss=0.04182, over 934803.95 frames. ], batch size: 16, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:21:18,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=238772.0, ans=0.025 +2024-07-29 04:21:37,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=238798.66666666666, ans=0.125 +2024-07-29 04:21:41,632 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.581e+01 5.630e+01 6.188e+01 7.457e+01 1.076e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 04:21:44,708 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.45 vs. limit=22.5 +2024-07-29 04:22:03,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.whiten.whitening_limit, batch_count=238825.33333333334, ans=12.0 +2024-07-29 04:22:08,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=238825.33333333334, ans=0.025 +2024-07-29 04:22:09,540 INFO [train.py:1114] (1/4) Epoch 18, batch 5350, loss[loss=0.1384, simple_loss=0.2305, pruned_loss=0.02317, over 4535.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2657, pruned_loss=0.04158, over 936727.03 frames. ], batch size: 10, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:22:25,872 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.80 vs. limit=15.0 +2024-07-29 04:22:26,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=238852.0, ans=0.025 +2024-07-29 04:22:29,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=238852.0, ans=0.0 +2024-07-29 04:22:48,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=238892.0, ans=0.0 +2024-07-29 04:22:54,868 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.32 vs. limit=10.0 +2024-07-29 04:22:58,912 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.41 vs. limit=12.0 +2024-07-29 04:23:04,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=238905.33333333334, ans=0.025 +2024-07-29 04:23:04,846 INFO [train.py:1114] (1/4) Epoch 18, batch 5400, loss[loss=0.2063, simple_loss=0.2907, pruned_loss=0.06092, over 4282.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2668, pruned_loss=0.04202, over 931578.54 frames. ], batch size: 25, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:23:10,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=238905.33333333334, ans=0.1 +2024-07-29 04:23:15,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=238918.66666666666, ans=0.2 +2024-07-29 04:23:25,774 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.47 vs. limit=15.0 +2024-07-29 04:23:25,932 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+01 5.845e+01 6.439e+01 7.513e+01 9.975e+01, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 04:23:29,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=238945.33333333334, ans=0.0 +2024-07-29 04:23:30,441 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.88 vs. limit=15.0 +2024-07-29 04:23:30,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=238945.33333333334, ans=0.125 +2024-07-29 04:23:38,118 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:23:38,582 INFO [train.py:1114] (1/4) Epoch 18, batch 5450, loss[loss=0.168, simple_loss=0.2616, pruned_loss=0.03721, over 4710.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2663, pruned_loss=0.04194, over 934420.99 frames. ], batch size: 11, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:23:46,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238985.33333333334, ans=0.1 +2024-07-29 04:23:48,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=238985.33333333334, ans=0.125 +2024-07-29 04:23:53,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.77 vs. limit=6.0 +2024-07-29 04:23:58,997 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.36 vs. limit=15.0 +2024-07-29 04:24:02,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=239012.0, ans=0.2 +2024-07-29 04:24:09,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=239025.33333333334, ans=0.2 +2024-07-29 04:24:09,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=239025.33333333334, ans=0.125 +2024-07-29 04:24:11,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=239025.33333333334, ans=0.125 +2024-07-29 04:24:12,550 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.46 vs. limit=15.0 +2024-07-29 04:24:14,267 INFO [train.py:1114] (1/4) Epoch 18, batch 5500, loss[loss=0.1971, simple_loss=0.2852, pruned_loss=0.05448, over 4071.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2649, pruned_loss=0.04162, over 931194.75 frames. ], batch size: 25, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:24:15,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=239038.66666666666, ans=0.0 +2024-07-29 04:24:17,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=239038.66666666666, ans=0.0 +2024-07-29 04:24:22,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239052.0, ans=0.125 +2024-07-29 04:24:25,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=239052.0, ans=0.125 +2024-07-29 04:24:39,145 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.809e+01 5.775e+01 6.562e+01 7.641e+01 1.081e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-29 04:25:00,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=239092.0, ans=0.125 +2024-07-29 04:25:02,891 INFO [train.py:1114] (1/4) Epoch 18, batch 5550, loss[loss=0.145, simple_loss=0.2369, pruned_loss=0.02653, over 4699.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2647, pruned_loss=0.04144, over 933394.20 frames. ], batch size: 12, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:25:03,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=239105.33333333334, ans=0.0 +2024-07-29 04:25:07,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=239105.33333333334, ans=0.0 +2024-07-29 04:25:11,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=239118.66666666666, ans=0.035 +2024-07-29 04:25:16,155 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.35 vs. limit=15.0 +2024-07-29 04:25:21,349 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.88 vs. limit=6.0 +2024-07-29 04:25:30,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=239158.66666666666, ans=0.0 +2024-07-29 04:25:38,199 INFO [train.py:1114] (1/4) Epoch 18, batch 5600, loss[loss=0.1664, simple_loss=0.2616, pruned_loss=0.03565, over 4744.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2655, pruned_loss=0.04154, over 934730.77 frames. ], batch size: 14, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:25:39,723 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:25:41,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=239172.0, ans=0.125 +2024-07-29 04:25:52,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=239198.66666666666, ans=0.1 +2024-07-29 04:25:53,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=239198.66666666666, ans=0.025 +2024-07-29 04:25:59,986 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.772e+01 5.791e+01 6.471e+01 7.649e+01 1.137e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-29 04:26:01,867 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.73 vs. limit=6.0 +2024-07-29 04:26:04,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.37 vs. limit=12.0 +2024-07-29 04:26:12,727 INFO [train.py:1114] (1/4) Epoch 18, batch 5650, loss[loss=0.1609, simple_loss=0.2541, pruned_loss=0.0339, over 4504.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2651, pruned_loss=0.04175, over 937149.84 frames. ], batch size: 21, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:26:12,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=239238.66666666666, ans=0.0 +2024-07-29 04:26:16,932 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=239238.66666666666, ans=0.5 +2024-07-29 04:26:17,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239238.66666666666, ans=0.1 +2024-07-29 04:26:17,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239238.66666666666, ans=0.1 +2024-07-29 04:26:18,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=239238.66666666666, ans=0.0 +2024-07-29 04:26:25,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=239252.0, ans=0.1 +2024-07-29 04:26:26,477 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=239265.33333333334, ans=0.0 +2024-07-29 04:26:37,867 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:26:38,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=239278.66666666666, ans=0.04949747468305833 +2024-07-29 04:26:43,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=239292.0, ans=0.0 +2024-07-29 04:26:47,110 INFO [train.py:1114] (1/4) Epoch 18, batch 5700, loss[loss=0.1534, simple_loss=0.2457, pruned_loss=0.03059, over 4695.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2653, pruned_loss=0.04183, over 937967.01 frames. ], batch size: 13, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:26:47,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=239305.33333333334, ans=0.0 +2024-07-29 04:26:52,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=239305.33333333334, ans=15.0 +2024-07-29 04:27:09,679 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.698e+01 5.703e+01 6.369e+01 7.336e+01 1.206e+02, threshold=1.274e+02, percent-clipped=0.0 +2024-07-29 04:27:09,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=239345.33333333334, ans=0.125 +2024-07-29 04:27:10,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239345.33333333334, ans=0.125 +2024-07-29 04:27:17,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=239358.66666666666, ans=0.125 +2024-07-29 04:27:21,801 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239372.0, ans=0.1 +2024-07-29 04:27:22,265 INFO [train.py:1114] (1/4) Epoch 18, batch 5750, loss[loss=0.1826, simple_loss=0.2757, pruned_loss=0.04474, over 4732.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2659, pruned_loss=0.04215, over 937781.64 frames. ], batch size: 19, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:27:23,636 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=239372.0, ans=0.2 +2024-07-29 04:27:30,192 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=239385.33333333334, ans=0.125 +2024-07-29 04:27:32,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=239385.33333333334, ans=0.0 +2024-07-29 04:27:32,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=239385.33333333334, ans=0.0 +2024-07-29 04:27:50,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=239425.33333333334, ans=0.2 +2024-07-29 04:27:56,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=239438.66666666666, ans=0.1 +2024-07-29 04:27:57,370 INFO [train.py:1114] (1/4) Epoch 18, batch 5800, loss[loss=0.1954, simple_loss=0.2864, pruned_loss=0.05217, over 4641.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2671, pruned_loss=0.04272, over 937229.16 frames. ], batch size: 19, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:28:07,010 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.61 vs. limit=15.0 +2024-07-29 04:28:12,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=239465.33333333334, ans=0.015 +2024-07-29 04:28:18,261 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.507e+01 5.596e+01 6.128e+01 6.728e+01 1.003e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-29 04:28:21,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=239478.66666666666, ans=0.1 +2024-07-29 04:28:21,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=239478.66666666666, ans=0.0 +2024-07-29 04:28:30,893 INFO [train.py:1114] (1/4) Epoch 18, batch 5850, loss[loss=0.1994, simple_loss=0.2876, pruned_loss=0.05554, over 4517.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2667, pruned_loss=0.04272, over 937965.51 frames. ], batch size: 21, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:28:43,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=239518.66666666666, ans=0.0 +2024-07-29 04:28:45,094 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.60 vs. limit=10.0 +2024-07-29 04:28:51,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=239545.33333333334, ans=0.0 +2024-07-29 04:28:58,077 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=239558.66666666666, ans=0.125 +2024-07-29 04:29:04,792 INFO [train.py:1114] (1/4) Epoch 18, batch 5900, loss[loss=0.2142, simple_loss=0.2971, pruned_loss=0.06564, over 4697.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2662, pruned_loss=0.04241, over 938373.96 frames. ], batch size: 15, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:29:27,145 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.760e+01 6.303e+01 7.095e+01 1.028e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 04:29:53,214 INFO [train.py:1114] (1/4) Epoch 18, batch 5950, loss[loss=0.194, simple_loss=0.2902, pruned_loss=0.04884, over 4686.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2648, pruned_loss=0.04116, over 940418.75 frames. ], batch size: 15, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:30:45,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=239665.33333333334, ans=0.0 +2024-07-29 04:30:49,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=239678.66666666666, ans=0.125 +2024-07-29 04:30:54,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=239692.0, ans=0.125 +2024-07-29 04:31:01,587 INFO [train.py:1114] (1/4) Epoch 18, batch 6000, loss[loss=0.1808, simple_loss=0.2718, pruned_loss=0.04487, over 4386.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.265, pruned_loss=0.04158, over 937524.51 frames. ], batch size: 26, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:31:01,588 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 04:31:10,902 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.1188, 2.8851, 4.8000, 2.0949], device='cuda:1') +2024-07-29 04:31:13,506 INFO [train.py:1146] (1/4) Epoch 18, validation: loss=0.1615, simple_loss=0.2636, pruned_loss=0.0297, over 944034.00 frames. +2024-07-29 04:31:13,507 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 04:31:15,302 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.10 vs. limit=22.5 +2024-07-29 04:31:27,720 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=239718.66666666666, ans=0.125 +2024-07-29 04:31:32,182 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.78 vs. limit=12.0 +2024-07-29 04:31:33,250 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=239732.0, ans=0.1 +2024-07-29 04:31:36,675 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.961e+01 5.758e+01 6.298e+01 7.334e+01 1.056e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 04:31:38,935 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=239745.33333333334, ans=0.125 +2024-07-29 04:31:47,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=239758.66666666666, ans=0.125 +2024-07-29 04:31:49,368 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.59 vs. limit=15.0 +2024-07-29 04:31:49,513 INFO [train.py:1114] (1/4) Epoch 18, batch 6050, loss[loss=0.1414, simple_loss=0.2217, pruned_loss=0.03062, over 4773.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2639, pruned_loss=0.04126, over 938443.88 frames. ], batch size: 12, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:31:51,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=239772.0, ans=0.0 +2024-07-29 04:31:55,138 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=15.0 +2024-07-29 04:32:11,875 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=239812.0, ans=0.125 +2024-07-29 04:32:15,585 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.60 vs. limit=22.5 +2024-07-29 04:32:39,730 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.94 vs. limit=15.0 +2024-07-29 04:32:40,016 INFO [train.py:1114] (1/4) Epoch 18, batch 6100, loss[loss=0.2228, simple_loss=0.3061, pruned_loss=0.06971, over 4671.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2635, pruned_loss=0.04114, over 937894.82 frames. ], batch size: 15, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:32:41,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=239838.66666666666, ans=0.125 +2024-07-29 04:33:02,003 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.53 vs. limit=22.5 +2024-07-29 04:33:02,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=239852.0, ans=0.0 +2024-07-29 04:33:05,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=239865.33333333334, ans=0.125 +2024-07-29 04:33:07,688 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=239865.33333333334, ans=0.125 +2024-07-29 04:33:10,856 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.665e+01 5.504e+01 6.224e+01 7.220e+01 1.027e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-29 04:33:11,719 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=239878.66666666666, ans=0.125 +2024-07-29 04:33:12,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=239878.66666666666, ans=10.0 +2024-07-29 04:33:13,922 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.08 vs. limit=15.0 +2024-07-29 04:33:25,613 INFO [train.py:1114] (1/4) Epoch 18, batch 6150, loss[loss=0.2582, simple_loss=0.3316, pruned_loss=0.09238, over 3341.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2648, pruned_loss=0.04185, over 936662.28 frames. ], batch size: 35, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:33:36,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239918.66666666666, ans=0.1 +2024-07-29 04:33:53,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=239958.66666666666, ans=0.0 +2024-07-29 04:33:58,044 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=239958.66666666666, ans=0.09899494936611666 +2024-07-29 04:33:59,192 INFO [train.py:1114] (1/4) Epoch 18, batch 6200, loss[loss=0.1838, simple_loss=0.2743, pruned_loss=0.0467, over 4742.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2647, pruned_loss=0.0419, over 935968.42 frames. ], batch size: 14, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:34:00,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=239972.0, ans=0.0 +2024-07-29 04:34:00,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=239972.0, ans=0.125 +2024-07-29 04:34:07,970 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.77 vs. limit=15.0 +2024-07-29 04:34:08,696 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.66 vs. limit=15.0 +2024-07-29 04:34:41,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=239998.66666666666, ans=0.0 +2024-07-29 04:34:44,994 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.635e+01 5.597e+01 6.193e+01 7.328e+01 9.537e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 04:34:46,158 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.36 vs. limit=15.0 +2024-07-29 04:34:46,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=240012.0, ans=0.0 +2024-07-29 04:34:50,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=240025.33333333334, ans=0.0 +2024-07-29 04:34:52,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240025.33333333334, ans=0.1 +2024-07-29 04:34:58,269 INFO [train.py:1114] (1/4) Epoch 18, batch 6250, loss[loss=0.1854, simple_loss=0.289, pruned_loss=0.0409, over 4819.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2642, pruned_loss=0.04167, over 932289.35 frames. ], batch size: 14, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:35:33,222 INFO [train.py:1114] (1/4) Epoch 18, batch 6300, loss[loss=0.1385, simple_loss=0.2203, pruned_loss=0.02831, over 4498.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2638, pruned_loss=0.04179, over 929099.39 frames. ], batch size: 10, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:35:33,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=240105.33333333334, ans=0.2 +2024-07-29 04:35:33,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=240105.33333333334, ans=0.0 +2024-07-29 04:35:46,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=240132.0, ans=0.0 +2024-07-29 04:35:58,296 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.96 vs. limit=22.5 +2024-07-29 04:35:58,643 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.473e+01 5.639e+01 6.264e+01 7.118e+01 1.029e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 04:36:08,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=240158.66666666666, ans=0.0 +2024-07-29 04:36:10,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=240172.0, ans=0.125 +2024-07-29 04:36:11,344 INFO [train.py:1114] (1/4) Epoch 18, batch 6350, loss[loss=0.1715, simple_loss=0.2734, pruned_loss=0.03478, over 4476.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2639, pruned_loss=0.04136, over 933102.65 frames. ], batch size: 21, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:36:54,715 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=240212.0, ans=0.025 +2024-07-29 04:36:56,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240212.0, ans=0.1 +2024-07-29 04:36:59,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=240225.33333333334, ans=0.125 +2024-07-29 04:37:03,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=240225.33333333334, ans=0.2 +2024-07-29 04:37:03,908 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:37:05,082 INFO [train.py:1114] (1/4) Epoch 18, batch 6400, loss[loss=0.1666, simple_loss=0.2717, pruned_loss=0.03073, over 4637.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2634, pruned_loss=0.0414, over 934253.57 frames. ], batch size: 13, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:37:48,303 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.33 vs. limit=15.0 +2024-07-29 04:37:51,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=240265.33333333334, ans=0.125 +2024-07-29 04:37:54,445 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.80 vs. limit=15.0 +2024-07-29 04:37:55,451 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.669e+01 6.278e+01 7.394e+01 9.691e+01, threshold=1.256e+02, percent-clipped=0.0 +2024-07-29 04:37:59,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=240278.66666666666, ans=0.125 +2024-07-29 04:38:07,966 INFO [train.py:1114] (1/4) Epoch 18, batch 6450, loss[loss=0.2111, simple_loss=0.3007, pruned_loss=0.06073, over 4574.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2643, pruned_loss=0.04159, over 938215.89 frames. ], batch size: 21, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:38:26,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=240332.0, ans=0.125 +2024-07-29 04:38:26,904 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.02 vs. limit=12.0 +2024-07-29 04:38:29,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=240345.33333333334, ans=0.035 +2024-07-29 04:38:31,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=240345.33333333334, ans=0.04949747468305833 +2024-07-29 04:38:53,612 INFO [train.py:1114] (1/4) Epoch 18, batch 6500, loss[loss=0.2474, simple_loss=0.3096, pruned_loss=0.09256, over 3440.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.265, pruned_loss=0.04127, over 939828.94 frames. ], batch size: 35, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:38:59,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240372.0, ans=0.1 +2024-07-29 04:39:06,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=240398.66666666666, ans=0.125 +2024-07-29 04:39:10,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=240398.66666666666, ans=0.125 +2024-07-29 04:39:13,958 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.498e+01 5.507e+01 6.062e+01 6.906e+01 9.828e+01, threshold=1.212e+02, percent-clipped=0.0 +2024-07-29 04:39:14,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=240412.0, ans=0.125 +2024-07-29 04:39:27,370 INFO [train.py:1114] (1/4) Epoch 18, batch 6550, loss[loss=0.1309, simple_loss=0.2138, pruned_loss=0.02399, over 4797.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2642, pruned_loss=0.04094, over 942820.74 frames. ], batch size: 11, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:39:50,947 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.96 vs. limit=15.0 +2024-07-29 04:40:06,034 INFO [train.py:1114] (1/4) Epoch 18, batch 6600, loss[loss=0.1635, simple_loss=0.2486, pruned_loss=0.03916, over 4936.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2641, pruned_loss=0.041, over 944827.02 frames. ], batch size: 14, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:40:06,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=240505.33333333334, ans=0.125 +2024-07-29 04:40:12,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=240518.66666666666, ans=0.025 +2024-07-29 04:40:25,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=240518.66666666666, ans=0.2 +2024-07-29 04:40:34,501 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.498e+01 5.621e+01 6.292e+01 7.270e+01 1.272e+02, threshold=1.258e+02, percent-clipped=1.0 +2024-07-29 04:40:40,005 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:40:40,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=240558.66666666666, ans=0.125 +2024-07-29 04:40:42,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=240558.66666666666, ans=0.125 +2024-07-29 04:40:47,139 INFO [train.py:1114] (1/4) Epoch 18, batch 6650, loss[loss=0.2005, simple_loss=0.2882, pruned_loss=0.05642, over 4631.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2642, pruned_loss=0.04082, over 943178.77 frames. ], batch size: 17, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:40:50,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=240572.0, ans=0.125 +2024-07-29 04:40:51,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=240572.0, ans=0.125 +2024-07-29 04:41:02,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.90 vs. limit=6.0 +2024-07-29 04:41:21,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240612.0, ans=0.1 +2024-07-29 04:41:30,160 INFO [train.py:1114] (1/4) Epoch 18, batch 6700, loss[loss=0.2039, simple_loss=0.304, pruned_loss=0.05191, over 4743.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2646, pruned_loss=0.04084, over 942056.48 frames. ], batch size: 19, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:41:31,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=240638.66666666666, ans=0.0 +2024-07-29 04:41:37,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=240652.0, ans=0.0 +2024-07-29 04:41:43,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=240665.33333333334, ans=0.0 +2024-07-29 04:41:51,430 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.779e+01 5.784e+01 6.386e+01 7.165e+01 1.123e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 04:42:04,334 INFO [train.py:1114] (1/4) Epoch 18, batch 6750, loss[loss=0.1572, simple_loss=0.2429, pruned_loss=0.03575, over 4424.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2657, pruned_loss=0.04106, over 940311.57 frames. ], batch size: 25, lr: 4.12e-03, grad_scale: 64.0 +2024-07-29 04:42:05,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=240705.33333333334, ans=0.025 +2024-07-29 04:42:07,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=240705.33333333334, ans=0.125 +2024-07-29 04:42:07,511 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.48 vs. limit=12.0 +2024-07-29 04:42:11,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=240718.66666666666, ans=0.125 +2024-07-29 04:42:11,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=240718.66666666666, ans=0.125 +2024-07-29 04:42:13,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=240718.66666666666, ans=0.125 +2024-07-29 04:42:15,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=240718.66666666666, ans=0.125 +2024-07-29 04:42:18,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=240732.0, ans=0.125 +2024-07-29 04:42:27,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=240745.33333333334, ans=0.0 +2024-07-29 04:42:35,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=240758.66666666666, ans=10.0 +2024-07-29 04:42:36,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=240758.66666666666, ans=0.125 +2024-07-29 04:42:37,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=240758.66666666666, ans=0.125 +2024-07-29 04:42:38,950 INFO [train.py:1114] (1/4) Epoch 18, batch 6800, loss[loss=0.1863, simple_loss=0.2823, pruned_loss=0.04517, over 4635.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2658, pruned_loss=0.04084, over 938797.29 frames. ], batch size: 13, lr: 4.12e-03, grad_scale: 64.0 +2024-07-29 04:42:39,753 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:42:42,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=240772.0, ans=0.2 +2024-07-29 04:42:48,445 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.37 vs. limit=15.0 +2024-07-29 04:42:49,840 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.93 vs. limit=10.0 +2024-07-29 04:42:53,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=240798.66666666666, ans=0.0 +2024-07-29 04:42:54,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=240798.66666666666, ans=0.025 +2024-07-29 04:42:57,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=240798.66666666666, ans=0.125 +2024-07-29 04:42:58,439 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=240812.0, ans=0.125 +2024-07-29 04:42:59,654 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.731e+01 5.569e+01 6.060e+01 6.382e+01 1.017e+02, threshold=1.212e+02, percent-clipped=0.0 +2024-07-29 04:43:04,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=240812.0, ans=0.2 +2024-07-29 04:43:09,106 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:43:11,255 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.20 vs. limit=15.0 +2024-07-29 04:43:14,293 INFO [train.py:1114] (1/4) Epoch 18, batch 6850, loss[loss=0.138, simple_loss=0.2428, pruned_loss=0.01664, over 4701.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2651, pruned_loss=0.04078, over 940774.07 frames. ], batch size: 13, lr: 4.12e-03, grad_scale: 64.0 +2024-07-29 04:43:19,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=240838.66666666666, ans=0.02 +2024-07-29 04:43:27,545 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.94 vs. limit=22.5 +2024-07-29 04:43:41,811 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:43:48,241 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.39 vs. limit=22.5 +2024-07-29 04:43:48,572 INFO [train.py:1114] (1/4) Epoch 18, batch 6900, loss[loss=0.1672, simple_loss=0.2613, pruned_loss=0.03655, over 4967.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2654, pruned_loss=0.04126, over 942952.92 frames. ], batch size: 13, lr: 4.12e-03, grad_scale: 64.0 +2024-07-29 04:43:48,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=240905.33333333334, ans=0.125 +2024-07-29 04:43:57,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=240918.66666666666, ans=0.125 +2024-07-29 04:44:04,192 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.19 vs. limit=22.5 +2024-07-29 04:44:11,087 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.810e+01 6.480e+01 7.498e+01 1.027e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-29 04:44:16,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=240945.33333333334, ans=0.125 +2024-07-29 04:44:16,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=240945.33333333334, ans=0.125 +2024-07-29 04:44:18,428 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=240958.66666666666, ans=0.95 +2024-07-29 04:44:19,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=240958.66666666666, ans=0.125 +2024-07-29 04:44:24,893 INFO [train.py:1114] (1/4) Epoch 18, batch 6950, loss[loss=0.1556, simple_loss=0.2332, pruned_loss=0.03897, over 4505.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2651, pruned_loss=0.04128, over 940747.88 frames. ], batch size: 10, lr: 4.11e-03, grad_scale: 64.0 +2024-07-29 04:44:25,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=240972.0, ans=0.125 +2024-07-29 04:44:32,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=240985.33333333334, ans=0.125 +2024-07-29 04:44:35,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=240985.33333333334, ans=0.2 +2024-07-29 04:44:35,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=240985.33333333334, ans=0.125 +2024-07-29 04:44:43,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=240998.66666666666, ans=0.1 +2024-07-29 04:44:48,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=241012.0, ans=0.0 +2024-07-29 04:44:59,692 INFO [train.py:1114] (1/4) Epoch 18, batch 7000, loss[loss=0.1983, simple_loss=0.3128, pruned_loss=0.04183, over 4601.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2648, pruned_loss=0.04117, over 938595.90 frames. ], batch size: 17, lr: 4.11e-03, grad_scale: 64.0 +2024-07-29 04:45:03,072 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:45:03,713 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=241038.66666666666, ans=0.125 +2024-07-29 04:45:06,013 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.97 vs. limit=15.0 +2024-07-29 04:45:10,975 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241052.0, ans=0.1 +2024-07-29 04:45:15,082 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=241065.33333333334, ans=0.5 +2024-07-29 04:45:20,297 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.289e+01 5.764e+01 6.515e+01 7.633e+01 1.207e+02, threshold=1.303e+02, percent-clipped=0.0 +2024-07-29 04:45:23,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=241078.66666666666, ans=0.125 +2024-07-29 04:45:32,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=241105.33333333334, ans=0.0 +2024-07-29 04:45:33,133 INFO [train.py:1114] (1/4) Epoch 18, batch 7050, loss[loss=0.1681, simple_loss=0.2635, pruned_loss=0.03635, over 4709.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.265, pruned_loss=0.04121, over 941775.32 frames. ], batch size: 19, lr: 4.11e-03, grad_scale: 64.0 +2024-07-29 04:45:40,312 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.33 vs. limit=15.0 +2024-07-29 04:45:42,033 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=241118.66666666666, ans=0.0 +2024-07-29 04:45:48,516 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.36 vs. limit=15.0 +2024-07-29 04:45:50,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=241132.0, ans=0.025 +2024-07-29 04:45:52,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.67 vs. limit=15.0 +2024-07-29 04:45:54,359 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=241145.33333333334, ans=0.125 +2024-07-29 04:46:03,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=241158.66666666666, ans=0.125 +2024-07-29 04:46:04,839 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.49 vs. limit=22.5 +2024-07-29 04:46:05,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=241158.66666666666, ans=0.2 +2024-07-29 04:46:06,963 INFO [train.py:1114] (1/4) Epoch 18, batch 7100, loss[loss=0.1727, simple_loss=0.2733, pruned_loss=0.03606, over 4803.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.265, pruned_loss=0.0416, over 937031.82 frames. ], batch size: 15, lr: 4.11e-03, grad_scale: 64.0 +2024-07-29 04:46:10,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=241172.0, ans=0.0 +2024-07-29 04:46:16,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=241185.33333333334, ans=0.2 +2024-07-29 04:46:21,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=241198.66666666666, ans=0.0 +2024-07-29 04:46:21,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=241198.66666666666, ans=0.0 +2024-07-29 04:46:23,721 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=241198.66666666666, ans=0.125 +2024-07-29 04:46:28,375 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.603e+01 5.554e+01 6.044e+01 6.901e+01 9.600e+01, threshold=1.209e+02, percent-clipped=0.0 +2024-07-29 04:46:29,364 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.31 vs. limit=22.5 +2024-07-29 04:46:32,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=241212.0, ans=0.0 +2024-07-29 04:46:40,687 INFO [train.py:1114] (1/4) Epoch 18, batch 7150, loss[loss=0.1777, simple_loss=0.2776, pruned_loss=0.03895, over 4513.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2636, pruned_loss=0.04082, over 937601.78 frames. ], batch size: 21, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:46:45,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=241238.66666666666, ans=0.04949747468305833 +2024-07-29 04:46:49,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=241252.0, ans=0.0 +2024-07-29 04:46:49,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=241252.0, ans=0.05 +2024-07-29 04:46:54,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=241265.33333333334, ans=0.0 +2024-07-29 04:46:54,181 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.33 vs. limit=10.0 +2024-07-29 04:47:23,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=241292.0, ans=0.125 +2024-07-29 04:47:24,560 INFO [train.py:1114] (1/4) Epoch 18, batch 7200, loss[loss=0.186, simple_loss=0.284, pruned_loss=0.044, over 4801.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2645, pruned_loss=0.04135, over 938312.41 frames. ], batch size: 15, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:47:24,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=241305.33333333334, ans=0.0 +2024-07-29 04:47:27,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=241305.33333333334, ans=0.125 +2024-07-29 04:47:33,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=241318.66666666666, ans=0.125 +2024-07-29 04:47:36,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=241332.0, ans=0.125 +2024-07-29 04:47:42,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241332.0, ans=0.1 +2024-07-29 04:47:45,845 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.087e+01 5.707e+01 6.356e+01 7.350e+01 1.020e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-29 04:47:50,363 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.58 vs. limit=22.5 +2024-07-29 04:47:57,744 INFO [train.py:1114] (1/4) Epoch 18, batch 7250, loss[loss=0.1796, simple_loss=0.2547, pruned_loss=0.05224, over 4851.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2626, pruned_loss=0.04068, over 939963.38 frames. ], batch size: 12, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:48:16,693 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.50 vs. limit=15.0 +2024-07-29 04:48:32,909 INFO [train.py:1114] (1/4) Epoch 18, batch 7300, loss[loss=0.1398, simple_loss=0.2292, pruned_loss=0.02517, over 4855.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2622, pruned_loss=0.0402, over 939832.69 frames. ], batch size: 12, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:48:34,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=241438.66666666666, ans=0.125 +2024-07-29 04:48:40,318 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=241452.0, ans=0.125 +2024-07-29 04:48:53,885 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+01 5.415e+01 6.086e+01 6.711e+01 9.900e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-29 04:49:12,065 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.80 vs. limit=15.0 +2024-07-29 04:49:14,859 INFO [train.py:1114] (1/4) Epoch 18, batch 7350, loss[loss=0.1337, simple_loss=0.2226, pruned_loss=0.02239, over 4639.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2626, pruned_loss=0.04015, over 939196.80 frames. ], batch size: 12, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:49:21,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241505.33333333334, ans=0.1 +2024-07-29 04:49:29,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=241518.66666666666, ans=0.125 +2024-07-29 04:49:37,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=241532.0, ans=0.125 +2024-07-29 04:49:38,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241532.0, ans=0.1 +2024-07-29 04:49:44,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=241545.33333333334, ans=0.125 +2024-07-29 04:49:56,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=241558.66666666666, ans=0.125 +2024-07-29 04:49:56,822 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.48 vs. limit=10.0 +2024-07-29 04:49:58,973 INFO [train.py:1114] (1/4) Epoch 18, batch 7400, loss[loss=0.1979, simple_loss=0.2999, pruned_loss=0.04792, over 4689.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2641, pruned_loss=0.04076, over 940278.19 frames. ], batch size: 13, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:49:59,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=241572.0, ans=0.125 +2024-07-29 04:50:15,202 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.34 vs. limit=15.0 +2024-07-29 04:50:16,501 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.61 vs. limit=15.0 +2024-07-29 04:50:22,050 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.538e+01 5.627e+01 6.295e+01 7.057e+01 1.550e+02, threshold=1.259e+02, percent-clipped=1.0 +2024-07-29 04:50:33,794 INFO [train.py:1114] (1/4) Epoch 18, batch 7450, loss[loss=0.1457, simple_loss=0.2209, pruned_loss=0.0352, over 4614.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2635, pruned_loss=0.04095, over 937657.93 frames. ], batch size: 11, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:50:34,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=241638.66666666666, ans=0.125 +2024-07-29 04:50:36,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=241638.66666666666, ans=0.2 +2024-07-29 04:50:36,632 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=241638.66666666666, ans=0.125 +2024-07-29 04:50:39,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=241652.0, ans=0.0 +2024-07-29 04:50:44,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241652.0, ans=0.1 +2024-07-29 04:50:49,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=241665.33333333334, ans=0.0 +2024-07-29 04:50:50,703 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=15.0 +2024-07-29 04:51:04,084 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.91 vs. limit=6.0 +2024-07-29 04:51:05,934 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.83 vs. limit=22.5 +2024-07-29 04:51:06,925 INFO [train.py:1114] (1/4) Epoch 18, batch 7500, loss[loss=0.1865, simple_loss=0.2719, pruned_loss=0.0505, over 3460.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2648, pruned_loss=0.04171, over 936236.17 frames. ], batch size: 36, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:51:07,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=241705.33333333334, ans=0.125 +2024-07-29 04:51:11,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=241705.33333333334, ans=0.025 +2024-07-29 04:51:23,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=241732.0, ans=0.125 +2024-07-29 04:51:26,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=241745.33333333334, ans=0.0 +2024-07-29 04:51:28,211 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.400e+01 5.735e+01 6.333e+01 6.793e+01 1.076e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 04:51:33,282 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.28 vs. limit=12.0 +2024-07-29 04:51:37,233 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.62 vs. limit=6.0 +2024-07-29 04:51:40,184 INFO [train.py:1114] (1/4) Epoch 18, batch 7550, loss[loss=0.1812, simple_loss=0.2851, pruned_loss=0.03868, over 4615.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2659, pruned_loss=0.04242, over 935929.79 frames. ], batch size: 17, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:51:42,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=241772.0, ans=0.125 +2024-07-29 04:51:42,903 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=241772.0, ans=0.1 +2024-07-29 04:51:43,236 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.40 vs. limit=10.0 +2024-07-29 04:51:55,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=241785.33333333334, ans=0.2 +2024-07-29 04:51:59,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=241785.33333333334, ans=0.2 +2024-07-29 04:52:11,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=241812.0, ans=0.125 +2024-07-29 04:52:21,660 INFO [train.py:1114] (1/4) Epoch 18, batch 7600, loss[loss=0.164, simple_loss=0.2577, pruned_loss=0.03512, over 4819.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2651, pruned_loss=0.04198, over 937873.55 frames. ], batch size: 14, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:52:24,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=241838.66666666666, ans=0.0 +2024-07-29 04:52:26,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=241838.66666666666, ans=0.025 +2024-07-29 04:52:26,686 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.86 vs. limit=22.5 +2024-07-29 04:52:27,178 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.95 vs. limit=6.0 +2024-07-29 04:52:33,357 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.79 vs. limit=22.5 +2024-07-29 04:52:43,229 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.006e+01 5.592e+01 6.086e+01 6.807e+01 8.936e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-29 04:52:43,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=241878.66666666666, ans=0.05 +2024-07-29 04:52:49,411 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.02 vs. limit=6.0 +2024-07-29 04:52:51,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=241892.0, ans=0.125 +2024-07-29 04:52:52,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=241892.0, ans=0.0 +2024-07-29 04:52:55,130 INFO [train.py:1114] (1/4) Epoch 18, batch 7650, loss[loss=0.1557, simple_loss=0.2477, pruned_loss=0.03189, over 4938.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2648, pruned_loss=0.04172, over 936482.19 frames. ], batch size: 12, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:53:13,889 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.62 vs. limit=15.0 +2024-07-29 04:53:21,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=241958.66666666666, ans=0.125 +2024-07-29 04:53:25,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=241958.66666666666, ans=0.2 +2024-07-29 04:53:28,757 INFO [train.py:1114] (1/4) Epoch 18, batch 7700, loss[loss=0.19, simple_loss=0.2791, pruned_loss=0.05041, over 4690.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2652, pruned_loss=0.04139, over 934177.79 frames. ], batch size: 13, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:53:40,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=241985.33333333334, ans=0.2 +2024-07-29 04:53:47,213 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-29 04:53:49,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=241998.66666666666, ans=0.125 +2024-07-29 04:53:50,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=242012.0, ans=0.125 +2024-07-29 04:53:52,077 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.452e+01 5.527e+01 6.014e+01 6.715e+01 9.821e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-29 04:53:54,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242012.0, ans=0.1 +2024-07-29 04:54:02,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=242025.33333333334, ans=0.125 +2024-07-29 04:54:03,544 INFO [train.py:1114] (1/4) Epoch 18, batch 7750, loss[loss=0.1735, simple_loss=0.2562, pruned_loss=0.04542, over 4928.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.266, pruned_loss=0.04131, over 935452.23 frames. ], batch size: 14, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:54:15,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=242052.0, ans=0.0 +2024-07-29 04:54:20,195 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.82 vs. limit=15.0 +2024-07-29 04:54:28,945 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.55 vs. limit=15.0 +2024-07-29 04:54:29,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=242078.66666666666, ans=0.125 +2024-07-29 04:54:39,126 INFO [train.py:1114] (1/4) Epoch 18, batch 7800, loss[loss=0.1576, simple_loss=0.2562, pruned_loss=0.02951, over 4661.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2651, pruned_loss=0.04037, over 937198.51 frames. ], batch size: 14, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:54:41,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=242105.33333333334, ans=0.125 +2024-07-29 04:54:41,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=242105.33333333334, ans=0.0 +2024-07-29 04:54:48,516 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.78 vs. limit=6.0 +2024-07-29 04:55:00,038 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.583e+01 6.063e+01 6.593e+01 8.807e+01, threshold=1.213e+02, percent-clipped=0.0 +2024-07-29 04:55:02,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=242145.33333333334, ans=0.125 +2024-07-29 04:55:07,015 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:55:26,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=242158.66666666666, ans=0.2 +2024-07-29 04:55:28,052 INFO [train.py:1114] (1/4) Epoch 18, batch 7850, loss[loss=0.1585, simple_loss=0.2397, pruned_loss=0.03872, over 4544.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2651, pruned_loss=0.04073, over 935739.02 frames. ], batch size: 10, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:55:29,067 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.13 vs. limit=15.0 +2024-07-29 04:55:45,727 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.06 vs. limit=15.0 +2024-07-29 04:55:51,092 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=242172.0, ans=10.0 +2024-07-29 04:55:56,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=242185.33333333334, ans=0.2 +2024-07-29 04:56:00,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=242198.66666666666, ans=0.125 +2024-07-29 04:56:03,343 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.70 vs. limit=15.0 +2024-07-29 04:56:08,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=242212.0, ans=0.125 +2024-07-29 04:56:10,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=242212.0, ans=0.0 +2024-07-29 04:56:12,441 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.83 vs. limit=6.0 +2024-07-29 04:56:18,458 INFO [train.py:1114] (1/4) Epoch 18, batch 7900, loss[loss=0.1589, simple_loss=0.257, pruned_loss=0.03042, over 4878.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2658, pruned_loss=0.04082, over 933090.63 frames. ], batch size: 14, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:56:39,131 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+01 5.700e+01 6.249e+01 7.197e+01 1.145e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-29 04:56:50,147 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.87 vs. limit=12.0 +2024-07-29 04:56:50,885 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.74 vs. limit=10.0 +2024-07-29 04:56:51,092 INFO [train.py:1114] (1/4) Epoch 18, batch 7950, loss[loss=0.2277, simple_loss=0.2956, pruned_loss=0.0799, over 3320.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2645, pruned_loss=0.04073, over 935206.62 frames. ], batch size: 36, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:56:55,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=242305.33333333334, ans=0.5 +2024-07-29 04:57:03,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=242332.0, ans=15.0 +2024-07-29 04:57:05,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=242332.0, ans=0.09899494936611666 +2024-07-29 04:57:09,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=242332.0, ans=0.2 +2024-07-29 04:57:14,437 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=242345.33333333334, ans=0.2 +2024-07-29 04:57:24,039 INFO [train.py:1114] (1/4) Epoch 18, batch 8000, loss[loss=0.1656, simple_loss=0.236, pruned_loss=0.0476, over 4619.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2633, pruned_loss=0.04073, over 934378.05 frames. ], batch size: 11, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:57:29,030 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.18 vs. limit=15.0 +2024-07-29 04:57:39,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=242398.66666666666, ans=0.125 +2024-07-29 04:57:42,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=242398.66666666666, ans=0.0 +2024-07-29 04:57:45,064 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.436e+01 5.556e+01 6.379e+01 7.313e+01 1.044e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 04:57:51,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=242425.33333333334, ans=0.125 +2024-07-29 04:57:58,319 INFO [train.py:1114] (1/4) Epoch 18, batch 8050, loss[loss=0.1803, simple_loss=0.2773, pruned_loss=0.04162, over 4815.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.264, pruned_loss=0.04066, over 934148.79 frames. ], batch size: 14, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:58:11,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=242465.33333333334, ans=0.0 +2024-07-29 04:58:12,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242465.33333333334, ans=0.1 +2024-07-29 04:58:18,348 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.70 vs. limit=10.0 +2024-07-29 04:58:22,151 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.76 vs. limit=15.0 +2024-07-29 04:58:25,055 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.81 vs. limit=15.0 +2024-07-29 04:58:31,318 INFO [train.py:1114] (1/4) Epoch 18, batch 8100, loss[loss=0.1786, simple_loss=0.277, pruned_loss=0.04007, over 4802.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.265, pruned_loss=0.04138, over 934001.20 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:58:39,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=242518.66666666666, ans=0.125 +2024-07-29 04:58:39,997 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:58:44,041 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:58:52,547 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.503e+01 5.727e+01 6.609e+01 7.504e+01 1.146e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-29 04:58:53,018 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.68 vs. limit=15.0 +2024-07-29 04:59:04,411 INFO [train.py:1114] (1/4) Epoch 18, batch 8150, loss[loss=0.208, simple_loss=0.3089, pruned_loss=0.05354, over 4799.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2641, pruned_loss=0.04116, over 937267.71 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:59:22,146 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.76 vs. limit=15.0 +2024-07-29 04:59:23,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=242612.0, ans=0.125 +2024-07-29 04:59:28,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=242612.0, ans=0.125 +2024-07-29 04:59:39,528 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:59:41,326 INFO [train.py:1114] (1/4) Epoch 18, batch 8200, loss[loss=0.1932, simple_loss=0.2783, pruned_loss=0.05407, over 4791.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2654, pruned_loss=0.04129, over 938411.56 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:59:49,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=242652.0, ans=0.2 +2024-07-29 05:00:11,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=242665.33333333334, ans=0.125 +2024-07-29 05:00:14,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=242665.33333333334, ans=0.07 +2024-07-29 05:00:20,496 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.484e+01 5.660e+01 6.350e+01 7.311e+01 1.182e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-29 05:00:26,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=242692.0, ans=0.0 +2024-07-29 05:00:28,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=242692.0, ans=0.025 +2024-07-29 05:00:31,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242692.0, ans=0.1 +2024-07-29 05:00:33,415 INFO [train.py:1114] (1/4) Epoch 18, batch 8250, loss[loss=0.1613, simple_loss=0.2586, pruned_loss=0.03196, over 4896.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2646, pruned_loss=0.04066, over 938290.19 frames. ], batch size: 13, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:00:34,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=242705.33333333334, ans=0.5 +2024-07-29 05:00:47,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=242732.0, ans=0.0 +2024-07-29 05:00:49,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=242732.0, ans=0.125 +2024-07-29 05:00:50,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=242732.0, ans=0.2 +2024-07-29 05:00:53,984 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.33 vs. limit=10.0 +2024-07-29 05:00:56,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=242745.33333333334, ans=0.04949747468305833 +2024-07-29 05:00:59,630 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.96 vs. limit=6.0 +2024-07-29 05:00:59,971 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=242758.66666666666, ans=0.0 +2024-07-29 05:01:06,375 INFO [train.py:1114] (1/4) Epoch 18, batch 8300, loss[loss=0.1838, simple_loss=0.2715, pruned_loss=0.04802, over 4908.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2646, pruned_loss=0.04069, over 937566.26 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:01:07,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242772.0, ans=0.1 +2024-07-29 05:01:07,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242772.0, ans=0.1 +2024-07-29 05:01:17,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=242785.33333333334, ans=0.0 +2024-07-29 05:01:17,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=242785.33333333334, ans=0.2 +2024-07-29 05:01:21,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=242798.66666666666, ans=0.025 +2024-07-29 05:01:23,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242798.66666666666, ans=0.1 +2024-07-29 05:01:24,665 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.50 vs. limit=12.0 +2024-07-29 05:01:26,869 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.844e+01 5.743e+01 6.319e+01 7.194e+01 1.218e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-29 05:01:34,252 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:01:36,449 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.52 vs. limit=15.0 +2024-07-29 05:01:38,531 INFO [train.py:1114] (1/4) Epoch 18, batch 8350, loss[loss=0.197, simple_loss=0.2871, pruned_loss=0.05343, over 4804.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2645, pruned_loss=0.04085, over 940569.62 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:01:50,643 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.95 vs. limit=22.5 +2024-07-29 05:01:53,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=242852.0, ans=0.125 +2024-07-29 05:02:02,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242865.33333333334, ans=0.1 +2024-07-29 05:02:03,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242865.33333333334, ans=0.1 +2024-07-29 05:02:05,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=242878.66666666666, ans=0.0 +2024-07-29 05:02:07,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=242878.66666666666, ans=0.025 +2024-07-29 05:02:11,791 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.40 vs. limit=15.0 +2024-07-29 05:02:14,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=242892.0, ans=0.2 +2024-07-29 05:02:18,747 INFO [train.py:1114] (1/4) Epoch 18, batch 8400, loss[loss=0.1733, simple_loss=0.2594, pruned_loss=0.0436, over 4778.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2644, pruned_loss=0.04082, over 939303.07 frames. ], batch size: 12, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:02:23,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=242905.33333333334, ans=0.1 +2024-07-29 05:02:32,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=242932.0, ans=0.125 +2024-07-29 05:02:34,820 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.31 vs. limit=15.0 +2024-07-29 05:02:37,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=242932.0, ans=0.5 +2024-07-29 05:02:39,517 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.393e+01 5.882e+01 6.556e+01 7.323e+01 1.088e+02, threshold=1.311e+02, percent-clipped=0.0 +2024-07-29 05:02:51,082 INFO [train.py:1114] (1/4) Epoch 18, batch 8450, loss[loss=0.1905, simple_loss=0.2803, pruned_loss=0.05036, over 4791.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2653, pruned_loss=0.04096, over 938440.13 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:02:58,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=242985.33333333334, ans=0.0 +2024-07-29 05:02:59,066 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.80 vs. limit=22.5 +2024-07-29 05:03:01,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242985.33333333334, ans=0.1 +2024-07-29 05:03:40,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=243025.33333333334, ans=0.125 +2024-07-29 05:03:44,951 INFO [train.py:1114] (1/4) Epoch 18, batch 8500, loss[loss=0.1473, simple_loss=0.2323, pruned_loss=0.03113, over 4633.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2647, pruned_loss=0.0409, over 938480.78 frames. ], batch size: 11, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:03:48,930 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=243038.66666666666, ans=0.125 +2024-07-29 05:03:52,837 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=243052.0, ans=0.125 +2024-07-29 05:03:54,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=243052.0, ans=0.1 +2024-07-29 05:04:00,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=243065.33333333334, ans=0.125 +2024-07-29 05:04:03,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=243065.33333333334, ans=0.0 +2024-07-29 05:04:07,965 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.366e+01 5.501e+01 6.348e+01 7.091e+01 9.836e+01, threshold=1.270e+02, percent-clipped=0.0 +2024-07-29 05:04:12,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=243092.0, ans=0.1 +2024-07-29 05:04:19,102 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=243105.33333333334, ans=0.0 +2024-07-29 05:04:19,719 INFO [train.py:1114] (1/4) Epoch 18, batch 8550, loss[loss=0.1344, simple_loss=0.2136, pruned_loss=0.02757, over 4800.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2651, pruned_loss=0.04118, over 939089.85 frames. ], batch size: 11, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:04:47,550 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.26 vs. limit=15.0 +2024-07-29 05:04:52,779 INFO [train.py:1114] (1/4) Epoch 18, batch 8600, loss[loss=0.1928, simple_loss=0.2984, pruned_loss=0.04361, over 4785.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2649, pruned_loss=0.04127, over 939098.51 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:04:52,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=243172.0, ans=0.0 +2024-07-29 05:05:13,978 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.872e+01 5.639e+01 6.288e+01 7.210e+01 1.078e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-29 05:05:25,405 INFO [train.py:1114] (1/4) Epoch 18, batch 8650, loss[loss=0.1977, simple_loss=0.2932, pruned_loss=0.05115, over 4894.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2649, pruned_loss=0.04149, over 940387.34 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:05:26,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=243238.66666666666, ans=0.025 +2024-07-29 05:05:39,967 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:05:57,720 INFO [train.py:1114] (1/4) Epoch 18, batch 8700, loss[loss=0.1713, simple_loss=0.2578, pruned_loss=0.04238, over 4753.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2656, pruned_loss=0.04164, over 937915.45 frames. ], batch size: 13, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:05:58,784 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.25 vs. limit=6.0 +2024-07-29 05:06:18,234 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.815e+01 5.797e+01 6.169e+01 6.761e+01 9.579e+01, threshold=1.234e+02, percent-clipped=0.0 +2024-07-29 05:06:26,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243358.66666666666, ans=0.1 +2024-07-29 05:06:29,976 INFO [train.py:1114] (1/4) Epoch 18, batch 8750, loss[loss=0.2011, simple_loss=0.2865, pruned_loss=0.05781, over 4698.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2663, pruned_loss=0.04197, over 936454.63 frames. ], batch size: 15, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:06:35,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=243385.33333333334, ans=0.025 +2024-07-29 05:06:39,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=243385.33333333334, ans=0.0 +2024-07-29 05:06:47,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243398.66666666666, ans=0.1 +2024-07-29 05:06:57,516 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=243412.0, ans=0.0 +2024-07-29 05:06:58,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=243425.33333333334, ans=0.0 +2024-07-29 05:07:05,110 INFO [train.py:1114] (1/4) Epoch 18, batch 8800, loss[loss=0.1964, simple_loss=0.2978, pruned_loss=0.04749, over 4925.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2662, pruned_loss=0.04185, over 937117.60 frames. ], batch size: 14, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:07:06,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=243438.66666666666, ans=0.125 +2024-07-29 05:07:07,025 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.88 vs. limit=15.0 +2024-07-29 05:07:13,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=243452.0, ans=0.125 +2024-07-29 05:07:23,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=243465.33333333334, ans=0.5 +2024-07-29 05:07:26,774 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.698e+01 5.641e+01 6.387e+01 7.548e+01 9.629e+01, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 05:07:38,885 INFO [train.py:1114] (1/4) Epoch 18, batch 8850, loss[loss=0.1973, simple_loss=0.2966, pruned_loss=0.04898, over 4490.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2661, pruned_loss=0.04219, over 932317.29 frames. ], batch size: 21, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:07:39,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=243505.33333333334, ans=0.2 +2024-07-29 05:07:41,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=243505.33333333334, ans=0.2 +2024-07-29 05:07:49,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=243518.66666666666, ans=0.2 +2024-07-29 05:07:49,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=243518.66666666666, ans=0.125 +2024-07-29 05:08:05,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=243558.66666666666, ans=0.0 +2024-07-29 05:08:09,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=243558.66666666666, ans=0.125 +2024-07-29 05:08:12,256 INFO [train.py:1114] (1/4) Epoch 18, batch 8900, loss[loss=0.1521, simple_loss=0.2396, pruned_loss=0.03232, over 4926.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2659, pruned_loss=0.04229, over 930350.72 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:08:17,152 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.43 vs. limit=15.0 +2024-07-29 05:08:30,622 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.16 vs. limit=22.5 +2024-07-29 05:08:32,776 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 5.619e+01 6.277e+01 7.423e+01 9.938e+01, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 05:08:33,585 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=243612.0, ans=0.125 +2024-07-29 05:08:34,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=15.0 +2024-07-29 05:08:34,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=243612.0, ans=0.125 +2024-07-29 05:08:44,473 INFO [train.py:1114] (1/4) Epoch 18, batch 8950, loss[loss=0.1633, simple_loss=0.2553, pruned_loss=0.03567, over 4509.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2651, pruned_loss=0.04177, over 930993.70 frames. ], batch size: 21, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:08:49,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=243638.66666666666, ans=0.025 +2024-07-29 05:08:51,680 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.41 vs. limit=12.0 +2024-07-29 05:08:54,217 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.26 vs. limit=22.5 +2024-07-29 05:08:58,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=243665.33333333334, ans=0.125 +2024-07-29 05:09:04,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=243678.66666666666, ans=0.0 +2024-07-29 05:09:06,879 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.43 vs. limit=15.0 +2024-07-29 05:09:17,110 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.63 vs. limit=10.0 +2024-07-29 05:09:18,123 INFO [train.py:1114] (1/4) Epoch 18, batch 9000, loss[loss=0.1474, simple_loss=0.2318, pruned_loss=0.03153, over 4640.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2644, pruned_loss=0.04159, over 933847.29 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:09:18,123 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 05:09:33,029 INFO [train.py:1146] (1/4) Epoch 18, validation: loss=0.1616, simple_loss=0.2637, pruned_loss=0.02971, over 944034.00 frames. +2024-07-29 05:09:33,029 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 05:09:35,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=243705.33333333334, ans=0.125 +2024-07-29 05:09:35,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=243705.33333333334, ans=0.2 +2024-07-29 05:09:41,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243718.66666666666, ans=0.1 +2024-07-29 05:09:48,090 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=243732.0, ans=0.05 +2024-07-29 05:09:52,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=243745.33333333334, ans=0.0 +2024-07-29 05:09:54,498 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.133e+01 5.845e+01 6.498e+01 7.420e+01 1.015e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-29 05:10:05,951 INFO [train.py:1114] (1/4) Epoch 18, batch 9050, loss[loss=0.1485, simple_loss=0.2334, pruned_loss=0.0318, over 4530.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2637, pruned_loss=0.04138, over 934425.51 frames. ], batch size: 10, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:10:11,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=243785.33333333334, ans=0.1 +2024-07-29 05:10:15,528 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:10:17,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=243785.33333333334, ans=0.125 +2024-07-29 05:10:21,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=243798.66666666666, ans=0.0 +2024-07-29 05:10:32,843 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=243825.33333333334, ans=0.125 +2024-07-29 05:10:36,800 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=243825.33333333334, ans=0.0 +2024-07-29 05:10:37,839 INFO [train.py:1114] (1/4) Epoch 18, batch 9100, loss[loss=0.1973, simple_loss=0.3094, pruned_loss=0.04262, over 4929.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.263, pruned_loss=0.04107, over 936978.33 frames. ], batch size: 14, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:10:45,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243852.0, ans=0.1 +2024-07-29 05:10:56,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=243878.66666666666, ans=0.125 +2024-07-29 05:10:58,033 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+01 5.927e+01 6.725e+01 7.788e+01 1.053e+02, threshold=1.345e+02, percent-clipped=0.0 +2024-07-29 05:10:58,132 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:11:08,599 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.63 vs. limit=6.0 +2024-07-29 05:11:09,471 INFO [train.py:1114] (1/4) Epoch 18, batch 9150, loss[loss=0.1764, simple_loss=0.2689, pruned_loss=0.04197, over 4805.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.264, pruned_loss=0.04142, over 935814.40 frames. ], batch size: 14, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:11:13,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=243905.33333333334, ans=0.025 +2024-07-29 05:11:19,484 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=243918.66666666666, ans=0.125 +2024-07-29 05:11:24,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=243932.0, ans=10.0 +2024-07-29 05:11:28,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=243945.33333333334, ans=0.025 +2024-07-29 05:11:37,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=243958.66666666666, ans=0.0 +2024-07-29 05:11:39,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243958.66666666666, ans=0.1 +2024-07-29 05:11:42,339 INFO [train.py:1114] (1/4) Epoch 18, batch 9200, loss[loss=0.17, simple_loss=0.2523, pruned_loss=0.0438, over 4846.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2637, pruned_loss=0.04128, over 937412.00 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:11:47,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=243972.0, ans=0.5 +2024-07-29 05:11:48,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=243985.33333333334, ans=0.2 +2024-07-29 05:12:03,098 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.715e+01 5.677e+01 6.144e+01 6.790e+01 1.037e+02, threshold=1.229e+02, percent-clipped=0.0 +2024-07-29 05:12:06,645 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:12:10,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=244025.33333333334, ans=0.125 +2024-07-29 05:12:12,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=244025.33333333334, ans=0.125 +2024-07-29 05:12:14,993 INFO [train.py:1114] (1/4) Epoch 18, batch 9250, loss[loss=0.1971, simple_loss=0.2797, pruned_loss=0.05722, over 4633.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.263, pruned_loss=0.04116, over 938170.22 frames. ], batch size: 13, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:12:21,605 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.33 vs. limit=22.5 +2024-07-29 05:12:22,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=244052.0, ans=0.0 +2024-07-29 05:12:28,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=244065.33333333334, ans=0.0 +2024-07-29 05:12:30,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=244065.33333333334, ans=0.1 +2024-07-29 05:12:42,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=244092.0, ans=0.0 +2024-07-29 05:12:45,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=244092.0, ans=0.0 +2024-07-29 05:12:47,059 INFO [train.py:1114] (1/4) Epoch 18, batch 9300, loss[loss=0.1617, simple_loss=0.2541, pruned_loss=0.03466, over 4764.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2637, pruned_loss=0.04136, over 937958.49 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:12:47,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=244105.33333333334, ans=0.125 +2024-07-29 05:12:54,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=244118.66666666666, ans=0.0 +2024-07-29 05:13:04,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=244132.0, ans=0.125 +2024-07-29 05:13:07,075 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.598e+01 6.030e+01 6.861e+01 1.072e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-29 05:13:09,236 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:13:12,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=244158.66666666666, ans=0.125 +2024-07-29 05:13:15,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=244158.66666666666, ans=10.0 +2024-07-29 05:13:19,383 INFO [train.py:1114] (1/4) Epoch 18, batch 9350, loss[loss=0.1362, simple_loss=0.228, pruned_loss=0.02224, over 4799.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2643, pruned_loss=0.04167, over 934584.70 frames. ], batch size: 11, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:13:25,692 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:13:26,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=244172.0, ans=0.0 +2024-07-29 05:13:36,267 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=244198.66666666666, ans=0.125 +2024-07-29 05:13:40,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=244212.0, ans=0.125 +2024-07-29 05:13:41,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=244212.0, ans=0.0 +2024-07-29 05:13:48,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=244225.33333333334, ans=0.125 +2024-07-29 05:13:51,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=244225.33333333334, ans=0.125 +2024-07-29 05:13:52,876 INFO [train.py:1114] (1/4) Epoch 18, batch 9400, loss[loss=0.1706, simple_loss=0.2537, pruned_loss=0.04379, over 4691.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2646, pruned_loss=0.04164, over 932536.66 frames. ], batch size: 13, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:13:56,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=244238.66666666666, ans=0.0 +2024-07-29 05:14:01,009 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=244252.0, ans=0.025 +2024-07-29 05:14:03,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=244252.0, ans=0.125 +2024-07-29 05:14:07,142 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=244265.33333333334, ans=0.0 +2024-07-29 05:14:08,245 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.48 vs. limit=15.0 +2024-07-29 05:14:14,128 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 5.480e+01 6.250e+01 7.248e+01 1.054e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-29 05:14:15,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=244278.66666666666, ans=0.0 +2024-07-29 05:14:24,919 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.37 vs. limit=22.5 +2024-07-29 05:14:25,262 INFO [train.py:1114] (1/4) Epoch 18, batch 9450, loss[loss=0.1474, simple_loss=0.2351, pruned_loss=0.02983, over 4809.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2642, pruned_loss=0.04132, over 932216.94 frames. ], batch size: 11, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:14:26,315 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.30 vs. limit=22.5 +2024-07-29 05:14:31,130 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=244318.66666666666, ans=0.125 +2024-07-29 05:14:36,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=244318.66666666666, ans=0.0 +2024-07-29 05:14:39,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=244332.0, ans=0.025 +2024-07-29 05:14:39,922 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.07 vs. limit=12.0 +2024-07-29 05:14:42,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=244332.0, ans=0.0 +2024-07-29 05:14:55,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244358.66666666666, ans=0.1 +2024-07-29 05:14:56,512 INFO [train.py:1114] (1/4) Epoch 18, batch 9500, loss[loss=0.2013, simple_loss=0.2897, pruned_loss=0.0565, over 4707.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2644, pruned_loss=0.04137, over 934744.31 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:15:00,824 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:15:12,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=244398.66666666666, ans=0.0 +2024-07-29 05:15:17,053 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.953e+01 5.635e+01 6.260e+01 7.098e+01 9.795e+01, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 05:15:19,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=244412.0, ans=0.0 +2024-07-29 05:15:27,755 INFO [train.py:1114] (1/4) Epoch 18, batch 9550, loss[loss=0.1313, simple_loss=0.213, pruned_loss=0.02475, over 4773.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2653, pruned_loss=0.04208, over 931761.74 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:15:35,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=244452.0, ans=0.0 +2024-07-29 05:15:38,088 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.69 vs. limit=15.0 +2024-07-29 05:15:54,514 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=244492.0, ans=0.0 +2024-07-29 05:15:58,130 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.67 vs. limit=15.0 +2024-07-29 05:16:00,332 INFO [train.py:1114] (1/4) Epoch 18, batch 9600, loss[loss=0.2165, simple_loss=0.2992, pruned_loss=0.06693, over 3208.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2653, pruned_loss=0.04185, over 930432.08 frames. ], batch size: 35, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:16:01,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=244505.33333333334, ans=0.025 +2024-07-29 05:16:12,293 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=244532.0, ans=0.125 +2024-07-29 05:16:13,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=244532.0, ans=0.125 +2024-07-29 05:16:17,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=244532.0, ans=0.2 +2024-07-29 05:16:19,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=244545.33333333334, ans=0.025 +2024-07-29 05:16:21,032 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 5.711e+01 6.305e+01 6.902e+01 1.149e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 05:16:22,350 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=244545.33333333334, ans=0.125 +2024-07-29 05:16:24,591 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.65 vs. limit=22.5 +2024-07-29 05:16:26,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=244558.66666666666, ans=0.0 +2024-07-29 05:16:31,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=244572.0, ans=0.0 +2024-07-29 05:16:31,667 INFO [train.py:1114] (1/4) Epoch 18, batch 9650, loss[loss=0.1703, simple_loss=0.2699, pruned_loss=0.0353, over 4861.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2651, pruned_loss=0.04174, over 927010.43 frames. ], batch size: 16, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:16:34,470 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.44 vs. limit=12.0 +2024-07-29 05:16:38,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=244585.33333333334, ans=0.125 +2024-07-29 05:16:43,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=244598.66666666666, ans=0.2 +2024-07-29 05:16:44,145 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.61 vs. limit=6.0 +2024-07-29 05:16:55,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=244612.0, ans=0.0 +2024-07-29 05:17:03,158 INFO [train.py:1114] (1/4) Epoch 18, batch 9700, loss[loss=0.2172, simple_loss=0.3027, pruned_loss=0.06585, over 4172.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2645, pruned_loss=0.04167, over 924963.72 frames. ], batch size: 25, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:17:14,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=244652.0, ans=0.0 +2024-07-29 05:17:23,681 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.541e+01 5.681e+01 6.275e+01 7.162e+01 1.082e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 05:17:31,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=244692.0, ans=0.125 +2024-07-29 05:17:34,253 INFO [train.py:1114] (1/4) Epoch 18, batch 9750, loss[loss=0.1891, simple_loss=0.2798, pruned_loss=0.04923, over 4694.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2649, pruned_loss=0.04126, over 925791.61 frames. ], batch size: 15, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:17:38,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244705.33333333334, ans=0.1 +2024-07-29 05:17:44,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=244718.66666666666, ans=0.125 +2024-07-29 05:17:49,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=244732.0, ans=0.05 +2024-07-29 05:18:04,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=244758.66666666666, ans=0.025 +2024-07-29 05:18:06,001 INFO [train.py:1114] (1/4) Epoch 18, batch 9800, loss[loss=0.1628, simple_loss=0.2453, pruned_loss=0.04012, over 4710.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2639, pruned_loss=0.04108, over 925382.68 frames. ], batch size: 12, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:18:08,827 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.80 vs. limit=15.0 +2024-07-29 05:18:09,441 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.01 vs. limit=12.0 +2024-07-29 05:18:11,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=244785.33333333334, ans=0.125 +2024-07-29 05:18:12,020 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.86 vs. limit=22.5 +2024-07-29 05:18:17,745 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=244798.66666666666, ans=0.125 +2024-07-29 05:18:26,928 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 5.797e+01 6.276e+01 7.162e+01 9.479e+01, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 05:18:38,926 INFO [train.py:1114] (1/4) Epoch 18, batch 9850, loss[loss=0.1614, simple_loss=0.2562, pruned_loss=0.03326, over 4908.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2648, pruned_loss=0.04157, over 928032.14 frames. ], batch size: 15, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:18:39,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=244838.66666666666, ans=0.0 +2024-07-29 05:18:48,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244852.0, ans=0.1 +2024-07-29 05:19:01,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=244878.66666666666, ans=0.125 +2024-07-29 05:19:02,637 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.93 vs. limit=6.0 +2024-07-29 05:19:06,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=244892.0, ans=0.125 +2024-07-29 05:19:09,932 INFO [train.py:1114] (1/4) Epoch 18, batch 9900, loss[loss=0.1885, simple_loss=0.287, pruned_loss=0.04499, over 4833.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2661, pruned_loss=0.04243, over 927030.91 frames. ], batch size: 16, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:19:15,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=244905.33333333334, ans=0.125 +2024-07-29 05:19:22,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=244932.0, ans=0.125 +2024-07-29 05:19:28,803 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.76 vs. limit=15.0 +2024-07-29 05:19:30,945 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.456e+01 5.784e+01 6.438e+01 7.578e+01 1.058e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 05:19:40,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=244958.66666666666, ans=0.125 +2024-07-29 05:19:40,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=244958.66666666666, ans=0.0 +2024-07-29 05:19:41,553 INFO [train.py:1114] (1/4) Epoch 18, batch 9950, loss[loss=0.1518, simple_loss=0.2422, pruned_loss=0.03066, over 4803.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2666, pruned_loss=0.0428, over 929796.43 frames. ], batch size: 11, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:19:53,041 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=244985.33333333334, ans=0.125 +2024-07-29 05:19:58,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=244998.66666666666, ans=0.0 +2024-07-29 05:19:58,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=244998.66666666666, ans=0.0 +2024-07-29 05:20:12,652 INFO [train.py:1114] (1/4) Epoch 18, batch 10000, loss[loss=0.2112, simple_loss=0.3, pruned_loss=0.06118, over 4610.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2694, pruned_loss=0.04383, over 926911.43 frames. ], batch size: 16, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:20:13,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=245038.66666666666, ans=0.0 +2024-07-29 05:20:16,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=245038.66666666666, ans=0.1 +2024-07-29 05:20:23,985 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=245052.0, ans=0.125 +2024-07-29 05:20:25,916 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=245065.33333333334, ans=0.0 +2024-07-29 05:20:30,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=245078.66666666666, ans=0.125 +2024-07-29 05:20:33,060 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.790e+01 5.781e+01 6.382e+01 8.189e+01 1.255e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 05:20:35,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=245078.66666666666, ans=0.0 +2024-07-29 05:20:35,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=245078.66666666666, ans=0.125 +2024-07-29 05:20:36,217 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.05 vs. limit=15.0 +2024-07-29 05:20:45,043 INFO [train.py:1114] (1/4) Epoch 18, batch 10050, loss[loss=0.1979, simple_loss=0.274, pruned_loss=0.06091, over 3212.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.271, pruned_loss=0.04438, over 915687.62 frames. ], batch size: 35, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:20:47,883 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=245105.33333333334, ans=0.125 +2024-07-29 05:20:52,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=245118.66666666666, ans=0.07 +2024-07-29 05:20:52,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=245118.66666666666, ans=0.125 +2024-07-29 05:21:13,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=245158.66666666666, ans=0.2 +2024-07-29 05:21:16,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=245158.66666666666, ans=0.04949747468305833 +2024-07-29 05:21:19,138 INFO [train.py:1114] (1/4) Epoch 18, batch 10100, loss[loss=0.2451, simple_loss=0.3191, pruned_loss=0.0856, over 3359.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2754, pruned_loss=0.04904, over 863071.27 frames. ], batch size: 35, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:21:30,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=245185.33333333334, ans=0.025 +2024-07-29 05:21:41,089 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.138e+01 6.796e+01 7.277e+01 7.758e+01 1.071e+02, threshold=1.455e+02, percent-clipped=0.0 +2024-07-29 05:21:50,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=245225.33333333334, ans=0.125 +2024-07-29 05:21:52,415 INFO [train.py:1114] (1/4) Epoch 18, batch 10150, loss[loss=0.2462, simple_loss=0.3159, pruned_loss=0.08827, over 3362.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2782, pruned_loss=0.0523, over 821637.56 frames. ], batch size: 35, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:22:10,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=245265.33333333334, ans=0.0 +2024-07-29 05:22:19,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=245292.0, ans=0.0 +2024-07-29 05:22:21,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=245292.0, ans=0.1 +2024-07-29 05:22:24,099 INFO [train.py:1114] (1/4) Epoch 18, batch 10200, loss[loss=0.251, simple_loss=0.3163, pruned_loss=0.09279, over 3411.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2813, pruned_loss=0.05536, over 790889.32 frames. ], batch size: 35, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:22:28,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=245305.33333333334, ans=0.125 +2024-07-29 05:22:31,398 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.74 vs. limit=15.0 +2024-07-29 05:24:06,502 INFO [train.py:1114] (1/4) Epoch 19, batch 0, loss[loss=0.154, simple_loss=0.248, pruned_loss=0.02996, over 4848.00 frames. ], tot_loss[loss=0.154, simple_loss=0.248, pruned_loss=0.02996, over 4848.00 frames. ], batch size: 12, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:24:06,502 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 05:24:11,186 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.0673, 4.4478, 4.4031, 4.8724], device='cuda:1') +2024-07-29 05:24:18,360 INFO [train.py:1146] (1/4) Epoch 19, validation: loss=0.1627, simple_loss=0.2658, pruned_loss=0.02977, over 944034.00 frames. +2024-07-29 05:24:18,361 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 05:24:19,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=245336.0, ans=0.5 +2024-07-29 05:24:20,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=245336.0, ans=0.125 +2024-07-29 05:24:24,836 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.91 vs. limit=22.5 +2024-07-29 05:24:25,124 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.889e+01 6.550e+01 7.036e+01 7.426e+01 9.937e+01, threshold=1.407e+02, percent-clipped=0.0 +2024-07-29 05:24:29,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=245349.33333333334, ans=0.125 +2024-07-29 05:24:43,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=245376.0, ans=0.125 +2024-07-29 05:24:53,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=245389.33333333334, ans=0.2 +2024-07-29 05:24:55,432 INFO [train.py:1114] (1/4) Epoch 19, batch 50, loss[loss=0.1746, simple_loss=0.2482, pruned_loss=0.05051, over 4606.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2673, pruned_loss=0.04189, over 206450.15 frames. ], batch size: 11, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:24:58,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245402.66666666666, ans=0.1 +2024-07-29 05:24:58,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245402.66666666666, ans=0.1 +2024-07-29 05:25:04,491 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.47 vs. limit=15.0 +2024-07-29 05:25:12,544 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.83 vs. limit=15.0 +2024-07-29 05:25:13,199 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.16 vs. limit=12.0 +2024-07-29 05:25:15,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=245442.66666666666, ans=0.0 +2024-07-29 05:25:21,487 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.80 vs. limit=15.0 +2024-07-29 05:25:29,105 INFO [train.py:1114] (1/4) Epoch 19, batch 100, loss[loss=0.156, simple_loss=0.2494, pruned_loss=0.03134, over 4639.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2674, pruned_loss=0.04118, over 366047.36 frames. ], batch size: 12, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:25:29,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=245469.33333333334, ans=0.035 +2024-07-29 05:25:33,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=245469.33333333334, ans=0.0 +2024-07-29 05:25:34,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=245469.33333333334, ans=0.0 +2024-07-29 05:25:35,846 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.410e+01 5.533e+01 6.230e+01 7.043e+01 1.593e+02, threshold=1.246e+02, percent-clipped=1.0 +2024-07-29 05:25:36,151 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.48 vs. limit=10.0 +2024-07-29 05:25:36,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=245482.66666666666, ans=0.0 +2024-07-29 05:25:48,078 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.37 vs. limit=15.0 +2024-07-29 05:25:49,911 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=245509.33333333334, ans=0.0 +2024-07-29 05:25:53,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=245509.33333333334, ans=0.125 +2024-07-29 05:25:53,523 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.21 vs. limit=12.0 +2024-07-29 05:26:00,660 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.01 vs. limit=22.5 +2024-07-29 05:26:01,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=245522.66666666666, ans=0.125 +2024-07-29 05:26:02,307 INFO [train.py:1114] (1/4) Epoch 19, batch 150, loss[loss=0.1268, simple_loss=0.2065, pruned_loss=0.02354, over 4612.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2641, pruned_loss=0.03961, over 494686.73 frames. ], batch size: 11, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:26:06,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=245536.0, ans=0.0 +2024-07-29 05:26:20,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=245562.66666666666, ans=0.125 +2024-07-29 05:26:22,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=245576.0, ans=0.125 +2024-07-29 05:26:27,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=245576.0, ans=0.1 +2024-07-29 05:26:28,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=245589.33333333334, ans=0.0 +2024-07-29 05:26:29,256 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.01 vs. limit=15.0 +2024-07-29 05:26:29,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=245589.33333333334, ans=0.0 +2024-07-29 05:26:35,603 INFO [train.py:1114] (1/4) Epoch 19, batch 200, loss[loss=0.1634, simple_loss=0.268, pruned_loss=0.02938, over 4470.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2645, pruned_loss=0.04058, over 594115.05 frames. ], batch size: 21, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:26:42,097 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.445e+01 5.609e+01 6.216e+01 6.903e+01 1.039e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 05:26:47,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=245616.0, ans=0.0 +2024-07-29 05:26:47,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=245616.0, ans=0.125 +2024-07-29 05:26:50,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=245629.33333333334, ans=0.0 +2024-07-29 05:26:52,853 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=245629.33333333334, ans=0.1 +2024-07-29 05:26:55,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=245642.66666666666, ans=0.0 +2024-07-29 05:27:03,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=245656.0, ans=0.125 +2024-07-29 05:27:06,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=245656.0, ans=0.125 +2024-07-29 05:27:09,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=245656.0, ans=0.04949747468305833 +2024-07-29 05:27:10,908 INFO [train.py:1114] (1/4) Epoch 19, batch 250, loss[loss=0.2041, simple_loss=0.2943, pruned_loss=0.05697, over 4655.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2634, pruned_loss=0.04008, over 670748.21 frames. ], batch size: 16, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:27:31,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=245709.33333333334, ans=0.125 +2024-07-29 05:27:33,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=245709.33333333334, ans=0.2 +2024-07-29 05:27:36,407 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=245709.33333333334, ans=0.125 +2024-07-29 05:27:43,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=245722.66666666666, ans=0.125 +2024-07-29 05:27:44,366 INFO [train.py:1114] (1/4) Epoch 19, batch 300, loss[loss=0.1937, simple_loss=0.294, pruned_loss=0.04666, over 4800.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2619, pruned_loss=0.03955, over 730194.76 frames. ], batch size: 15, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:27:51,011 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.436e+01 5.537e+01 6.057e+01 6.917e+01 1.022e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 05:27:58,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=245762.66666666666, ans=0.125 +2024-07-29 05:28:04,610 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.96 vs. limit=15.0 +2024-07-29 05:28:05,100 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=245776.0, ans=0.125 +2024-07-29 05:28:05,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=245776.0, ans=0.125 +2024-07-29 05:28:05,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=245776.0, ans=0.0 +2024-07-29 05:28:09,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=245776.0, ans=0.125 +2024-07-29 05:28:14,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=245789.33333333334, ans=0.09899494936611666 +2024-07-29 05:28:17,796 INFO [train.py:1114] (1/4) Epoch 19, batch 350, loss[loss=0.155, simple_loss=0.2387, pruned_loss=0.03564, over 4940.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2622, pruned_loss=0.03952, over 776313.06 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:28:27,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=245816.0, ans=0.05 +2024-07-29 05:28:36,378 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.26 vs. limit=15.0 +2024-07-29 05:28:42,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=245842.66666666666, ans=0.2 +2024-07-29 05:28:45,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=245856.0, ans=0.125 +2024-07-29 05:28:49,123 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=245856.0, ans=15.0 +2024-07-29 05:28:51,559 INFO [train.py:1114] (1/4) Epoch 19, batch 400, loss[loss=0.1722, simple_loss=0.2668, pruned_loss=0.03876, over 4697.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.262, pruned_loss=0.03983, over 813845.89 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:28:53,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=245869.33333333334, ans=0.0 +2024-07-29 05:28:56,599 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:28:58,547 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.537e+01 5.341e+01 5.802e+01 6.594e+01 8.688e+01, threshold=1.160e+02, percent-clipped=0.0 +2024-07-29 05:29:04,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=245882.66666666666, ans=0.125 +2024-07-29 05:29:04,753 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=245896.0, ans=0.125 +2024-07-29 05:29:10,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=245896.0, ans=15.0 +2024-07-29 05:29:13,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=245909.33333333334, ans=0.125 +2024-07-29 05:29:14,731 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.89 vs. limit=15.0 +2024-07-29 05:29:19,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=245909.33333333334, ans=0.025 +2024-07-29 05:29:21,560 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=245922.66666666666, ans=0.0 +2024-07-29 05:29:21,845 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.84 vs. limit=15.0 +2024-07-29 05:29:27,352 INFO [train.py:1114] (1/4) Epoch 19, batch 450, loss[loss=0.1747, simple_loss=0.2858, pruned_loss=0.0318, over 4627.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2629, pruned_loss=0.0403, over 839416.98 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:29:30,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245936.0, ans=0.1 +2024-07-29 05:29:35,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=245949.33333333334, ans=0.125 +2024-07-29 05:29:36,913 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=245949.33333333334, ans=0.125 +2024-07-29 05:29:39,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=245949.33333333334, ans=0.0 +2024-07-29 05:29:41,112 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=245962.66666666666, ans=0.025 +2024-07-29 05:29:50,273 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.71 vs. limit=15.0 +2024-07-29 05:29:55,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=245989.33333333334, ans=0.035 +2024-07-29 05:29:58,528 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.94 vs. limit=15.0 +2024-07-29 05:30:02,705 INFO [train.py:1114] (1/4) Epoch 19, batch 500, loss[loss=0.1991, simple_loss=0.2992, pruned_loss=0.04956, over 4666.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2626, pruned_loss=0.0402, over 861611.13 frames. ], batch size: 15, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:30:06,485 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.15 vs. limit=15.0 +2024-07-29 05:30:06,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246002.66666666666, ans=0.1 +2024-07-29 05:30:11,114 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.84 vs. limit=15.0 +2024-07-29 05:30:11,396 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.402e+01 5.590e+01 6.119e+01 6.735e+01 9.052e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 05:30:15,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-07-29 05:30:18,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=246029.33333333334, ans=0.0 +2024-07-29 05:30:23,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=246029.33333333334, ans=0.125 +2024-07-29 05:30:25,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=246042.66666666666, ans=0.125 +2024-07-29 05:30:30,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=246042.66666666666, ans=0.125 +2024-07-29 05:30:31,075 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=246056.0, ans=0.2 +2024-07-29 05:30:31,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=246056.0, ans=0.125 +2024-07-29 05:30:38,075 INFO [train.py:1114] (1/4) Epoch 19, batch 550, loss[loss=0.1728, simple_loss=0.2698, pruned_loss=0.03792, over 4618.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2624, pruned_loss=0.0401, over 877395.82 frames. ], batch size: 17, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:30:41,749 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.93 vs. limit=15.0 +2024-07-29 05:30:44,423 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.83 vs. limit=15.0 +2024-07-29 05:31:00,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=246109.33333333334, ans=0.5 +2024-07-29 05:31:11,617 INFO [train.py:1114] (1/4) Epoch 19, batch 600, loss[loss=0.1731, simple_loss=0.2557, pruned_loss=0.04524, over 4648.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2624, pruned_loss=0.04002, over 891952.31 frames. ], batch size: 16, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:31:12,104 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.35 vs. limit=22.5 +2024-07-29 05:31:15,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246136.0, ans=0.1 +2024-07-29 05:31:18,194 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.268e+01 5.519e+01 6.137e+01 7.010e+01 1.025e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 05:31:37,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=246189.33333333334, ans=0.0 +2024-07-29 05:31:44,597 INFO [train.py:1114] (1/4) Epoch 19, batch 650, loss[loss=0.1706, simple_loss=0.257, pruned_loss=0.04206, over 4756.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2624, pruned_loss=0.04003, over 903464.36 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:31:46,900 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.20 vs. limit=15.0 +2024-07-29 05:31:47,459 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=246202.66666666666, ans=0.125 +2024-07-29 05:32:06,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=246242.66666666666, ans=0.2 +2024-07-29 05:32:06,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=246242.66666666666, ans=0.2 +2024-07-29 05:32:18,688 INFO [train.py:1114] (1/4) Epoch 19, batch 700, loss[loss=0.1423, simple_loss=0.2386, pruned_loss=0.02302, over 4642.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.263, pruned_loss=0.04026, over 911327.13 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:32:23,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246269.33333333334, ans=0.1 +2024-07-29 05:32:25,373 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.359e+01 5.672e+01 6.319e+01 7.208e+01 1.301e+02, threshold=1.264e+02, percent-clipped=1.0 +2024-07-29 05:32:39,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246296.0, ans=0.1 +2024-07-29 05:32:41,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=246309.33333333334, ans=0.125 +2024-07-29 05:32:42,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246309.33333333334, ans=0.1 +2024-07-29 05:32:51,326 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.66 vs. limit=15.0 +2024-07-29 05:32:54,396 INFO [train.py:1114] (1/4) Epoch 19, batch 750, loss[loss=0.2155, simple_loss=0.2967, pruned_loss=0.06719, over 4702.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.263, pruned_loss=0.04037, over 917778.43 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:33:28,100 INFO [train.py:1114] (1/4) Epoch 19, batch 800, loss[loss=0.148, simple_loss=0.2327, pruned_loss=0.03161, over 4856.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2625, pruned_loss=0.04016, over 922867.30 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:33:29,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=246402.66666666666, ans=0.0 +2024-07-29 05:33:31,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=246402.66666666666, ans=0.125 +2024-07-29 05:33:34,594 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.732e+01 5.665e+01 6.243e+01 7.363e+01 1.175e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 05:33:48,877 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=246442.66666666666, ans=0.0 +2024-07-29 05:33:56,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=246456.0, ans=0.125 +2024-07-29 05:33:58,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=246456.0, ans=0.125 +2024-07-29 05:34:01,527 INFO [train.py:1114] (1/4) Epoch 19, batch 850, loss[loss=0.2042, simple_loss=0.2944, pruned_loss=0.05703, over 4673.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2623, pruned_loss=0.04003, over 927224.85 frames. ], batch size: 14, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:34:01,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=246469.33333333334, ans=0.125 +2024-07-29 05:34:14,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=246496.0, ans=0.125 +2024-07-29 05:34:23,574 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246509.33333333334, ans=0.1 +2024-07-29 05:34:27,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=246509.33333333334, ans=0.07 +2024-07-29 05:34:34,788 INFO [train.py:1114] (1/4) Epoch 19, batch 900, loss[loss=0.1332, simple_loss=0.2287, pruned_loss=0.01888, over 4855.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2626, pruned_loss=0.03994, over 927776.49 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:34:41,421 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+01 5.690e+01 6.264e+01 7.142e+01 9.700e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 05:34:53,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=246562.66666666666, ans=0.025 +2024-07-29 05:34:55,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=246562.66666666666, ans=0.07 +2024-07-29 05:34:59,686 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=246576.0, ans=0.0 +2024-07-29 05:35:08,293 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.38 vs. limit=15.0 +2024-07-29 05:35:10,548 INFO [train.py:1114] (1/4) Epoch 19, batch 950, loss[loss=0.1946, simple_loss=0.2797, pruned_loss=0.0547, over 4778.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2629, pruned_loss=0.04058, over 930102.72 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:35:14,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=246602.66666666666, ans=0.125 +2024-07-29 05:35:20,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=246616.0, ans=0.0 +2024-07-29 05:35:24,650 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246616.0, ans=0.1 +2024-07-29 05:35:32,504 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-29 05:35:37,567 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=246642.66666666666, ans=0.125 +2024-07-29 05:35:48,350 INFO [train.py:1114] (1/4) Epoch 19, batch 1000, loss[loss=0.1667, simple_loss=0.2575, pruned_loss=0.03794, over 4970.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2637, pruned_loss=0.04129, over 929837.48 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:35:50,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=246669.33333333334, ans=0.07 +2024-07-29 05:35:51,743 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=246669.33333333334, ans=0.125 +2024-07-29 05:35:54,964 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.458e+01 5.789e+01 6.385e+01 7.432e+01 1.004e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 05:35:55,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246682.66666666666, ans=0.1 +2024-07-29 05:36:10,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=246709.33333333334, ans=0.0 +2024-07-29 05:36:17,982 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.91 vs. limit=15.0 +2024-07-29 05:36:21,797 INFO [train.py:1114] (1/4) Epoch 19, batch 1050, loss[loss=0.1904, simple_loss=0.2844, pruned_loss=0.04819, over 4868.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.263, pruned_loss=0.04111, over 932243.69 frames. ], batch size: 14, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:36:24,627 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=246736.0, ans=0.0 +2024-07-29 05:36:43,017 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.97 vs. limit=15.0 +2024-07-29 05:36:48,327 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.86 vs. limit=22.5 +2024-07-29 05:36:55,276 INFO [train.py:1114] (1/4) Epoch 19, batch 1100, loss[loss=0.1935, simple_loss=0.2905, pruned_loss=0.04824, over 4901.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2626, pruned_loss=0.04086, over 934493.26 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:37:01,943 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.394e+01 5.545e+01 5.987e+01 6.620e+01 9.087e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-29 05:37:12,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=246829.33333333334, ans=0.025 +2024-07-29 05:37:27,488 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.67 vs. limit=15.0 +2024-07-29 05:37:28,456 INFO [train.py:1114] (1/4) Epoch 19, batch 1150, loss[loss=0.1603, simple_loss=0.2659, pruned_loss=0.02737, over 4896.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2626, pruned_loss=0.04061, over 934108.24 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:37:43,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.71 vs. limit=6.0 +2024-07-29 05:37:43,495 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.27 vs. limit=15.0 +2024-07-29 05:37:52,374 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.50 vs. limit=12.0 +2024-07-29 05:37:53,611 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.72 vs. limit=10.0 +2024-07-29 05:38:00,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=246922.66666666666, ans=0.125 +2024-07-29 05:38:07,849 INFO [train.py:1114] (1/4) Epoch 19, batch 1200, loss[loss=0.1717, simple_loss=0.2679, pruned_loss=0.03769, over 4876.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2628, pruned_loss=0.04097, over 933686.65 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:38:14,576 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 5.835e+01 6.415e+01 7.072e+01 9.087e+01, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 05:38:24,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=246962.66666666666, ans=0.0 +2024-07-29 05:38:30,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246976.0, ans=0.1 +2024-07-29 05:38:31,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246976.0, ans=0.1 +2024-07-29 05:38:33,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=246989.33333333334, ans=0.125 +2024-07-29 05:38:34,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.23 vs. limit=22.5 +2024-07-29 05:38:39,826 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.21 vs. limit=15.0 +2024-07-29 05:38:40,824 INFO [train.py:1114] (1/4) Epoch 19, batch 1250, loss[loss=0.1949, simple_loss=0.2856, pruned_loss=0.05209, over 4802.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2632, pruned_loss=0.04054, over 937517.58 frames. ], batch size: 15, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:38:41,122 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.41 vs. limit=22.5 +2024-07-29 05:38:46,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=247002.66666666666, ans=0.0 +2024-07-29 05:38:57,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=247029.33333333334, ans=0.125 +2024-07-29 05:38:59,447 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=247029.33333333334, ans=0.2 +2024-07-29 05:39:00,453 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.77 vs. limit=22.5 +2024-07-29 05:39:03,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=247042.66666666666, ans=0.05 +2024-07-29 05:39:13,110 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.99 vs. limit=10.0 +2024-07-29 05:39:14,089 INFO [train.py:1114] (1/4) Epoch 19, batch 1300, loss[loss=0.1842, simple_loss=0.2743, pruned_loss=0.0471, over 4732.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.262, pruned_loss=0.04032, over 939088.52 frames. ], batch size: 19, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:39:21,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=247069.33333333334, ans=0.0 +2024-07-29 05:39:24,247 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=247069.33333333334, ans=0.0 +2024-07-29 05:39:25,986 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.840e+01 5.578e+01 5.975e+01 6.963e+01 1.137e+02, threshold=1.195e+02, percent-clipped=0.0 +2024-07-29 05:39:34,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=247082.66666666666, ans=0.125 +2024-07-29 05:39:53,619 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=247122.66666666666, ans=0.125 +2024-07-29 05:39:56,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=247136.0, ans=0.0 +2024-07-29 05:39:56,774 INFO [train.py:1114] (1/4) Epoch 19, batch 1350, loss[loss=0.1784, simple_loss=0.265, pruned_loss=0.04595, over 4751.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2618, pruned_loss=0.04035, over 941130.18 frames. ], batch size: 13, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:39:59,605 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.41 vs. limit=12.0 +2024-07-29 05:40:03,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.17 vs. limit=22.5 +2024-07-29 05:40:21,182 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.78 vs. limit=6.0 +2024-07-29 05:40:21,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=247176.0, ans=0.95 +2024-07-29 05:40:28,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247189.33333333334, ans=0.1 +2024-07-29 05:40:32,019 INFO [train.py:1114] (1/4) Epoch 19, batch 1400, loss[loss=0.1523, simple_loss=0.2389, pruned_loss=0.03282, over 4695.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2612, pruned_loss=0.03956, over 943177.11 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:40:32,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=247202.66666666666, ans=0.125 +2024-07-29 05:40:38,797 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.545e+01 5.620e+01 6.318e+01 7.023e+01 1.312e+02, threshold=1.264e+02, percent-clipped=1.0 +2024-07-29 05:40:39,062 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:40:46,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=247229.33333333334, ans=0.2 +2024-07-29 05:40:59,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=247242.66666666666, ans=0.0 +2024-07-29 05:41:00,807 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.62 vs. limit=15.0 +2024-07-29 05:41:07,799 INFO [train.py:1114] (1/4) Epoch 19, batch 1450, loss[loss=0.1966, simple_loss=0.296, pruned_loss=0.0486, over 4672.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.262, pruned_loss=0.03983, over 942767.44 frames. ], batch size: 15, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:41:25,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=247296.0, ans=0.125 +2024-07-29 05:41:25,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=247296.0, ans=0.0 +2024-07-29 05:41:32,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=247309.33333333334, ans=0.125 +2024-07-29 05:41:34,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247309.33333333334, ans=0.1 +2024-07-29 05:41:37,714 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:41:38,558 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.99 vs. limit=6.0 +2024-07-29 05:41:42,895 INFO [train.py:1114] (1/4) Epoch 19, batch 1500, loss[loss=0.1713, simple_loss=0.2553, pruned_loss=0.04363, over 4819.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2616, pruned_loss=0.03985, over 942647.96 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:41:49,694 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.122e+01 5.560e+01 6.078e+01 6.890e+01 1.039e+02, threshold=1.216e+02, percent-clipped=0.0 +2024-07-29 05:42:07,737 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.08 vs. limit=15.0 +2024-07-29 05:42:16,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247402.66666666666, ans=0.1 +2024-07-29 05:42:16,781 INFO [train.py:1114] (1/4) Epoch 19, batch 1550, loss[loss=0.1666, simple_loss=0.2548, pruned_loss=0.03924, over 4891.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.261, pruned_loss=0.04013, over 939029.47 frames. ], batch size: 15, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:42:17,805 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.34 vs. limit=22.5 +2024-07-29 05:42:22,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=247402.66666666666, ans=0.125 +2024-07-29 05:42:23,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=247416.0, ans=0.2 +2024-07-29 05:42:25,164 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.12 vs. limit=15.0 +2024-07-29 05:42:27,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=247416.0, ans=0.125 +2024-07-29 05:42:32,353 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-29 05:42:50,241 INFO [train.py:1114] (1/4) Epoch 19, batch 1600, loss[loss=0.1681, simple_loss=0.2694, pruned_loss=0.03333, over 4875.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.262, pruned_loss=0.04071, over 938062.69 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:42:50,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=247469.33333333334, ans=0.04949747468305833 +2024-07-29 05:42:57,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=247482.66666666666, ans=0.125 +2024-07-29 05:42:58,142 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.792e+01 5.562e+01 6.323e+01 7.561e+01 1.065e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 05:43:01,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=247482.66666666666, ans=0.125 +2024-07-29 05:43:02,385 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=247482.66666666666, ans=0.125 +2024-07-29 05:43:10,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=247509.33333333334, ans=0.125 +2024-07-29 05:43:21,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=247522.66666666666, ans=0.125 +2024-07-29 05:43:24,346 INFO [train.py:1114] (1/4) Epoch 19, batch 1650, loss[loss=0.2011, simple_loss=0.2947, pruned_loss=0.05374, over 4651.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2623, pruned_loss=0.0409, over 937785.23 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:43:30,573 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.47 vs. limit=15.0 +2024-07-29 05:43:33,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247549.33333333334, ans=0.1 +2024-07-29 05:43:45,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247576.0, ans=0.1 +2024-07-29 05:43:53,317 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=247589.33333333334, ans=0.125 +2024-07-29 05:43:59,824 INFO [train.py:1114] (1/4) Epoch 19, batch 1700, loss[loss=0.1338, simple_loss=0.2241, pruned_loss=0.02179, over 4699.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.262, pruned_loss=0.04056, over 939446.07 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:44:08,233 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.612e+01 6.497e+01 7.246e+01 1.413e+02, threshold=1.299e+02, percent-clipped=1.0 +2024-07-29 05:44:11,833 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.02 vs. limit=22.5 +2024-07-29 05:44:14,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=247629.33333333334, ans=0.0 +2024-07-29 05:44:21,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=247642.66666666666, ans=0.125 +2024-07-29 05:44:25,037 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=247642.66666666666, ans=0.125 +2024-07-29 05:44:25,830 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=247642.66666666666, ans=0.125 +2024-07-29 05:44:31,645 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:44:34,125 INFO [train.py:1114] (1/4) Epoch 19, batch 1750, loss[loss=0.1621, simple_loss=0.2479, pruned_loss=0.03814, over 4825.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2621, pruned_loss=0.04077, over 940395.55 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:44:46,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=247682.66666666666, ans=0.125 +2024-07-29 05:44:49,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=247696.0, ans=0.125 +2024-07-29 05:44:52,657 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.57 vs. limit=12.0 +2024-07-29 05:44:58,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=247709.33333333334, ans=0.1 +2024-07-29 05:45:10,875 INFO [train.py:1114] (1/4) Epoch 19, batch 1800, loss[loss=0.1719, simple_loss=0.2625, pruned_loss=0.0407, over 4633.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2617, pruned_loss=0.04066, over 940836.96 frames. ], batch size: 13, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:45:18,175 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.396e+01 5.835e+01 6.491e+01 8.060e+01 1.072e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-29 05:45:45,498 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.90 vs. limit=10.0 +2024-07-29 05:45:50,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=247789.33333333334, ans=0.125 +2024-07-29 05:45:56,312 INFO [train.py:1114] (1/4) Epoch 19, batch 1850, loss[loss=0.2051, simple_loss=0.2987, pruned_loss=0.05573, over 4817.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.262, pruned_loss=0.04071, over 940487.86 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:45:59,084 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=247802.66666666666, ans=0.0 +2024-07-29 05:45:59,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=247802.66666666666, ans=0.0 +2024-07-29 05:46:01,266 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.52 vs. limit=15.0 +2024-07-29 05:46:04,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=247816.0, ans=0.07 +2024-07-29 05:46:05,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=247816.0, ans=0.125 +2024-07-29 05:46:13,828 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.12 vs. limit=15.0 +2024-07-29 05:46:33,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=247856.0, ans=0.0 +2024-07-29 05:46:34,831 INFO [train.py:1114] (1/4) Epoch 19, batch 1900, loss[loss=0.1817, simple_loss=0.2773, pruned_loss=0.04305, over 4666.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.263, pruned_loss=0.04087, over 941458.21 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:46:42,575 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=247882.66666666666, ans=0.0 +2024-07-29 05:46:43,051 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.675e+01 6.450e+01 7.490e+01 1.080e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 05:46:45,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=247882.66666666666, ans=0.125 +2024-07-29 05:46:50,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=247896.0, ans=0.125 +2024-07-29 05:46:58,901 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=247909.33333333334, ans=0.125 +2024-07-29 05:47:07,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=247922.66666666666, ans=0.125 +2024-07-29 05:47:08,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247922.66666666666, ans=0.1 +2024-07-29 05:47:11,508 INFO [train.py:1114] (1/4) Epoch 19, batch 1950, loss[loss=0.1364, simple_loss=0.2313, pruned_loss=0.02074, over 4896.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2637, pruned_loss=0.04099, over 943465.48 frames. ], batch size: 13, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:47:16,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=247936.0, ans=0.2 +2024-07-29 05:47:18,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=247949.33333333334, ans=0.0 +2024-07-29 05:47:24,134 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.16 vs. limit=15.0 +2024-07-29 05:47:27,378 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247962.66666666666, ans=0.1 +2024-07-29 05:47:57,263 INFO [train.py:1114] (1/4) Epoch 19, batch 2000, loss[loss=0.1306, simple_loss=0.2276, pruned_loss=0.01682, over 4814.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2661, pruned_loss=0.04181, over 940698.04 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:48:02,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=248002.66666666666, ans=0.125 +2024-07-29 05:48:03,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=248016.0, ans=0.5 +2024-07-29 05:48:04,710 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.516e+01 5.566e+01 6.044e+01 6.728e+01 1.044e+02, threshold=1.209e+02, percent-clipped=0.0 +2024-07-29 05:48:08,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=248016.0, ans=0.1 +2024-07-29 05:48:15,060 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=248029.33333333334, ans=0.125 +2024-07-29 05:48:15,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=248029.33333333334, ans=0.125 +2024-07-29 05:48:29,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=248056.0, ans=0.125 +2024-07-29 05:48:31,045 INFO [train.py:1114] (1/4) Epoch 19, batch 2050, loss[loss=0.1503, simple_loss=0.2317, pruned_loss=0.03449, over 4612.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2654, pruned_loss=0.04177, over 938931.31 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:48:38,524 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=248082.66666666666, ans=0.2 +2024-07-29 05:48:43,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=248082.66666666666, ans=0.2 +2024-07-29 05:48:46,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=248096.0, ans=0.07 +2024-07-29 05:48:57,363 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.46 vs. limit=22.5 +2024-07-29 05:48:59,367 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.19 vs. limit=12.0 +2024-07-29 05:49:01,104 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=248122.66666666666, ans=0.125 +2024-07-29 05:49:06,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=248136.0, ans=0.125 +2024-07-29 05:49:07,023 INFO [train.py:1114] (1/4) Epoch 19, batch 2100, loss[loss=0.1756, simple_loss=0.2722, pruned_loss=0.03944, over 4762.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2641, pruned_loss=0.04094, over 940461.47 frames. ], batch size: 13, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:49:14,318 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.765e+01 5.813e+01 6.323e+01 7.221e+01 1.090e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 05:49:17,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248149.33333333334, ans=0.1 +2024-07-29 05:49:18,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=248149.33333333334, ans=0.0 +2024-07-29 05:49:21,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=248162.66666666666, ans=0.125 +2024-07-29 05:49:22,559 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=248162.66666666666, ans=0.125 +2024-07-29 05:49:34,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=248189.33333333334, ans=0.125 +2024-07-29 05:49:36,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=248189.33333333334, ans=0.0 +2024-07-29 05:49:40,153 INFO [train.py:1114] (1/4) Epoch 19, batch 2150, loss[loss=0.1749, simple_loss=0.2715, pruned_loss=0.03915, over 4899.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.263, pruned_loss=0.04044, over 943858.33 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:49:45,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=248202.66666666666, ans=0.125 +2024-07-29 05:49:47,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=248202.66666666666, ans=0.125 +2024-07-29 05:50:01,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=248229.33333333334, ans=0.0 +2024-07-29 05:50:02,538 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.47 vs. limit=15.0 +2024-07-29 05:50:19,691 INFO [train.py:1114] (1/4) Epoch 19, batch 2200, loss[loss=0.1677, simple_loss=0.264, pruned_loss=0.0357, over 4807.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2637, pruned_loss=0.04077, over 943336.03 frames. ], batch size: 14, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:50:27,104 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 5.574e+01 6.118e+01 6.873e+01 9.817e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 05:50:27,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248282.66666666666, ans=0.1 +2024-07-29 05:51:19,350 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.68 vs. limit=15.0 +2024-07-29 05:51:26,222 INFO [train.py:1114] (1/4) Epoch 19, batch 2250, loss[loss=0.1799, simple_loss=0.2743, pruned_loss=0.04271, over 4689.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2631, pruned_loss=0.04074, over 941724.88 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:51:26,282 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:51:35,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=248349.33333333334, ans=0.0 +2024-07-29 05:51:35,604 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=248349.33333333334, ans=0.0 +2024-07-29 05:51:48,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=248376.0, ans=0.2 +2024-07-29 05:51:52,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=248376.0, ans=0.2 +2024-07-29 05:51:57,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=248389.33333333334, ans=0.125 +2024-07-29 05:52:00,493 INFO [train.py:1114] (1/4) Epoch 19, batch 2300, loss[loss=0.1533, simple_loss=0.2376, pruned_loss=0.03453, over 4935.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2616, pruned_loss=0.04023, over 939398.43 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:52:09,133 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.821e+01 6.321e+01 7.286e+01 1.025e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-29 05:52:15,888 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.55 vs. limit=15.0 +2024-07-29 05:52:29,011 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248456.0, ans=0.1 +2024-07-29 05:52:35,422 INFO [train.py:1114] (1/4) Epoch 19, batch 2350, loss[loss=0.1882, simple_loss=0.2803, pruned_loss=0.04806, over 4635.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2622, pruned_loss=0.04017, over 941519.38 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:52:42,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=248469.33333333334, ans=0.0 +2024-07-29 05:52:42,935 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.96 vs. limit=6.0 +2024-07-29 05:52:46,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=248482.66666666666, ans=0.0 +2024-07-29 05:52:46,780 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=248482.66666666666, ans=0.0 +2024-07-29 05:52:47,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=248482.66666666666, ans=0.125 +2024-07-29 05:52:51,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248496.0, ans=0.1 +2024-07-29 05:52:52,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=248496.0, ans=0.125 +2024-07-29 05:53:00,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=248509.33333333334, ans=0.0 +2024-07-29 05:53:04,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=248509.33333333334, ans=0.125 +2024-07-29 05:53:12,723 INFO [train.py:1114] (1/4) Epoch 19, batch 2400, loss[loss=0.1541, simple_loss=0.2455, pruned_loss=0.03137, over 4641.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2623, pruned_loss=0.03997, over 941379.12 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:53:20,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=248536.0, ans=0.0 +2024-07-29 05:53:21,904 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.538e+01 5.999e+01 6.676e+01 9.357e+01, threshold=1.200e+02, percent-clipped=0.0 +2024-07-29 05:53:24,430 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=248549.33333333334, ans=0.125 +2024-07-29 05:53:33,719 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.95 vs. limit=22.5 +2024-07-29 05:53:45,250 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.61 vs. limit=15.0 +2024-07-29 05:53:48,248 INFO [train.py:1114] (1/4) Epoch 19, batch 2450, loss[loss=0.1504, simple_loss=0.2512, pruned_loss=0.02476, over 4700.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2629, pruned_loss=0.04032, over 937406.70 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:53:58,578 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.71 vs. limit=15.0 +2024-07-29 05:54:01,343 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.15 vs. limit=15.0 +2024-07-29 05:54:01,797 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=248629.33333333334, ans=0.125 +2024-07-29 05:54:04,794 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.68 vs. limit=10.0 +2024-07-29 05:54:10,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=248642.66666666666, ans=0.125 +2024-07-29 05:54:11,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=248642.66666666666, ans=0.0 +2024-07-29 05:54:18,182 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=248656.0, ans=0.0 +2024-07-29 05:54:21,385 INFO [train.py:1114] (1/4) Epoch 19, batch 2500, loss[loss=0.1722, simple_loss=0.2766, pruned_loss=0.03394, over 4810.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2631, pruned_loss=0.04033, over 939386.62 frames. ], batch size: 14, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:54:23,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=248669.33333333334, ans=0.125 +2024-07-29 05:54:28,633 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.354e+01 5.769e+01 6.395e+01 7.394e+01 1.044e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-29 05:54:37,262 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248696.0, ans=0.1 +2024-07-29 05:54:44,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=248709.33333333334, ans=0.125 +2024-07-29 05:54:52,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=248722.66666666666, ans=0.125 +2024-07-29 05:54:54,729 INFO [train.py:1114] (1/4) Epoch 19, batch 2550, loss[loss=0.1483, simple_loss=0.231, pruned_loss=0.03281, over 4802.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2629, pruned_loss=0.04, over 938582.85 frames. ], batch size: 11, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:54:54,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=248736.0, ans=0.0 +2024-07-29 05:54:57,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=248736.0, ans=0.0 +2024-07-29 05:55:16,069 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:55:20,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=248776.0, ans=0.0 +2024-07-29 05:55:25,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=248789.33333333334, ans=0.125 +2024-07-29 05:55:27,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=248789.33333333334, ans=0.025 +2024-07-29 05:55:28,618 INFO [train.py:1114] (1/4) Epoch 19, batch 2600, loss[loss=0.1551, simple_loss=0.2363, pruned_loss=0.03694, over 4886.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2637, pruned_loss=0.04055, over 938093.56 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:55:29,053 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.39 vs. limit=15.0 +2024-07-29 05:55:35,909 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.520e+01 6.096e+01 6.841e+01 9.069e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 05:55:36,676 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=248816.0, ans=0.125 +2024-07-29 05:55:44,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=248829.33333333334, ans=0.0 +2024-07-29 05:55:48,565 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.92 vs. limit=6.0 +2024-07-29 05:56:03,689 INFO [train.py:1114] (1/4) Epoch 19, batch 2650, loss[loss=0.1677, simple_loss=0.2552, pruned_loss=0.04013, over 4609.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2636, pruned_loss=0.04063, over 939635.09 frames. ], batch size: 16, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:56:05,854 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248869.33333333334, ans=0.1 +2024-07-29 05:56:07,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=248869.33333333334, ans=0.125 +2024-07-29 05:56:07,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=248869.33333333334, ans=0.125 +2024-07-29 05:56:09,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=248869.33333333334, ans=0.0 +2024-07-29 05:56:39,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=248922.66666666666, ans=0.125 +2024-07-29 05:56:42,258 INFO [train.py:1114] (1/4) Epoch 19, batch 2700, loss[loss=0.2096, simple_loss=0.304, pruned_loss=0.05756, over 4741.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2639, pruned_loss=0.04053, over 939210.88 frames. ], batch size: 14, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:57:06,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248936.0, ans=0.1 +2024-07-29 05:57:09,485 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.838e+01 6.361e+01 7.244e+01 1.025e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-29 05:57:29,066 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248989.33333333334, ans=0.1 +2024-07-29 05:57:35,854 INFO [train.py:1114] (1/4) Epoch 19, batch 2750, loss[loss=0.1623, simple_loss=0.2562, pruned_loss=0.03423, over 4693.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2623, pruned_loss=0.04011, over 939142.32 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 16.0 +2024-07-29 05:57:44,802 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.84 vs. limit=22.5 +2024-07-29 05:57:45,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=249016.0, ans=0.0 +2024-07-29 05:57:53,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249029.33333333334, ans=0.1 +2024-07-29 05:57:55,929 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:58:00,243 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.46 vs. limit=15.0 +2024-07-29 05:58:05,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249056.0, ans=0.1 +2024-07-29 05:58:05,658 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.89 vs. limit=6.0 +2024-07-29 05:58:09,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.35 vs. limit=15.0 +2024-07-29 05:58:09,570 INFO [train.py:1114] (1/4) Epoch 19, batch 2800, loss[loss=0.2464, simple_loss=0.3001, pruned_loss=0.0963, over 3403.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.263, pruned_loss=0.0406, over 937331.51 frames. ], batch size: 36, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:58:11,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=249069.33333333334, ans=0.125 +2024-07-29 05:58:15,757 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=249082.66666666666, ans=0.2 +2024-07-29 05:58:17,634 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.437e+01 5.861e+01 6.601e+01 8.054e+01 1.135e+02, threshold=1.320e+02, percent-clipped=0.0 +2024-07-29 05:58:35,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=249109.33333333334, ans=0.125 +2024-07-29 05:58:43,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=249122.66666666666, ans=0.07 +2024-07-29 05:58:47,371 INFO [train.py:1114] (1/4) Epoch 19, batch 2850, loss[loss=0.1678, simple_loss=0.2523, pruned_loss=0.04162, over 4959.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.263, pruned_loss=0.04083, over 935852.95 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:58:54,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=249149.33333333334, ans=0.2 +2024-07-29 05:58:59,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=249149.33333333334, ans=0.125 +2024-07-29 05:59:00,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=249162.66666666666, ans=0.0 +2024-07-29 05:59:11,603 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.06 vs. limit=15.0 +2024-07-29 05:59:21,088 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=249189.33333333334, ans=0.0 +2024-07-29 05:59:22,305 INFO [train.py:1114] (1/4) Epoch 19, batch 2900, loss[loss=0.1809, simple_loss=0.2699, pruned_loss=0.04593, over 4829.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2641, pruned_loss=0.04067, over 939615.77 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:59:22,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=249202.66666666666, ans=0.125 +2024-07-29 05:59:24,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=249202.66666666666, ans=0.035 +2024-07-29 05:59:24,591 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.32 vs. limit=15.0 +2024-07-29 05:59:26,663 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=15.0 +2024-07-29 05:59:30,345 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.673e+01 5.762e+01 6.380e+01 7.309e+01 1.230e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 05:59:46,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=249242.66666666666, ans=0.125 +2024-07-29 05:59:49,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=249256.0, ans=0.125 +2024-07-29 05:59:51,019 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.33 vs. limit=15.0 +2024-07-29 05:59:55,833 INFO [train.py:1114] (1/4) Epoch 19, batch 2950, loss[loss=0.1714, simple_loss=0.2636, pruned_loss=0.03958, over 4707.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2629, pruned_loss=0.04072, over 938982.94 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 16.0 +2024-07-29 05:59:58,230 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.44 vs. limit=15.0 +2024-07-29 06:00:04,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=249282.66666666666, ans=0.025 +2024-07-29 06:00:05,512 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=249282.66666666666, ans=0.07 +2024-07-29 06:00:25,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=249322.66666666666, ans=0.2 +2024-07-29 06:00:29,670 INFO [train.py:1114] (1/4) Epoch 19, batch 3000, loss[loss=0.1846, simple_loss=0.28, pruned_loss=0.04461, over 4758.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2628, pruned_loss=0.04, over 938511.46 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 16.0 +2024-07-29 06:00:29,671 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 06:00:36,550 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.6763, 4.5041, 3.6707, 3.2204], device='cuda:1') +2024-07-29 06:00:41,087 INFO [train.py:1146] (1/4) Epoch 19, validation: loss=0.161, simple_loss=0.2631, pruned_loss=0.02943, over 944034.00 frames. +2024-07-29 06:00:41,088 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 06:00:43,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=249336.0, ans=0.0 +2024-07-29 06:00:48,811 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=249349.33333333334, ans=0.0 +2024-07-29 06:00:50,058 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.473e+01 5.637e+01 6.118e+01 7.161e+01 1.064e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 06:00:54,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=249362.66666666666, ans=0.125 +2024-07-29 06:00:55,079 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=249362.66666666666, ans=0.0 +2024-07-29 06:00:55,116 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:00:59,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249362.66666666666, ans=0.1 +2024-07-29 06:01:01,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=249376.0, ans=0.125 +2024-07-29 06:01:04,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=249376.0, ans=0.0 +2024-07-29 06:01:05,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=249376.0, ans=0.125 +2024-07-29 06:01:06,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=249376.0, ans=0.125 +2024-07-29 06:01:10,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.76 vs. limit=6.0 +2024-07-29 06:01:12,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=249389.33333333334, ans=0.0 +2024-07-29 06:01:15,364 INFO [train.py:1114] (1/4) Epoch 19, batch 3050, loss[loss=0.173, simple_loss=0.2574, pruned_loss=0.04424, over 4630.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2637, pruned_loss=0.04033, over 937508.15 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 16.0 +2024-07-29 06:01:17,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=249402.66666666666, ans=10.0 +2024-07-29 06:01:35,890 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:01:43,192 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:01:45,839 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=249456.0, ans=0.125 +2024-07-29 06:01:46,347 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.51 vs. limit=15.0 +2024-07-29 06:01:46,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=249456.0, ans=0.125 +2024-07-29 06:01:51,070 INFO [train.py:1114] (1/4) Epoch 19, batch 3100, loss[loss=0.2026, simple_loss=0.3008, pruned_loss=0.05224, over 4637.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2638, pruned_loss=0.04054, over 938021.99 frames. ], batch size: 16, lr: 3.93e-03, grad_scale: 16.0 +2024-07-29 06:01:53,214 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=249469.33333333334, ans=0.125 +2024-07-29 06:01:58,906 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.00 vs. limit=15.0 +2024-07-29 06:01:59,699 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.648e+01 5.499e+01 6.213e+01 7.046e+01 1.053e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 06:02:24,694 INFO [train.py:1114] (1/4) Epoch 19, batch 3150, loss[loss=0.1468, simple_loss=0.2431, pruned_loss=0.02522, over 4561.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2634, pruned_loss=0.04047, over 938135.40 frames. ], batch size: 17, lr: 3.93e-03, grad_scale: 16.0 +2024-07-29 06:02:27,481 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=249536.0, ans=0.2 +2024-07-29 06:02:41,021 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=249562.66666666666, ans=0.125 +2024-07-29 06:02:44,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=249576.0, ans=0.125 +2024-07-29 06:02:50,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=249576.0, ans=0.0 +2024-07-29 06:02:56,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=249589.33333333334, ans=0.0 +2024-07-29 06:03:01,861 INFO [train.py:1114] (1/4) Epoch 19, batch 3200, loss[loss=0.1714, simple_loss=0.2543, pruned_loss=0.04425, over 4824.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.262, pruned_loss=0.03978, over 939583.62 frames. ], batch size: 13, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:03:10,259 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.865e+01 5.924e+01 6.807e+01 8.203e+01 1.254e+02, threshold=1.361e+02, percent-clipped=1.0 +2024-07-29 06:03:22,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=249629.33333333334, ans=0.125 +2024-07-29 06:03:23,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=249629.33333333334, ans=0.2 +2024-07-29 06:03:28,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=249629.33333333334, ans=0.125 +2024-07-29 06:03:38,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=249642.66666666666, ans=0.125 +2024-07-29 06:03:38,918 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.41 vs. limit=22.5 +2024-07-29 06:03:44,595 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=249656.0, ans=0.5 +2024-07-29 06:03:48,466 INFO [train.py:1114] (1/4) Epoch 19, batch 3250, loss[loss=0.1543, simple_loss=0.2444, pruned_loss=0.03204, over 4932.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.263, pruned_loss=0.04001, over 940628.38 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:03:50,914 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.88 vs. limit=15.0 +2024-07-29 06:03:53,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=249669.33333333334, ans=0.125 +2024-07-29 06:03:53,988 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=249669.33333333334, ans=0.0 +2024-07-29 06:04:00,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=249682.66666666666, ans=0.2 +2024-07-29 06:04:11,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=249696.0, ans=0.0 +2024-07-29 06:07:13,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=249709.33333333334, ans=0.025 +2024-07-29 06:07:20,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=249736.0, ans=0.0 +2024-07-29 06:07:21,176 INFO [train.py:1114] (1/4) Epoch 19, batch 3300, loss[loss=0.1926, simple_loss=0.2776, pruned_loss=0.05379, over 4736.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2616, pruned_loss=0.03956, over 941155.96 frames. ], batch size: 19, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:07:38,744 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.082e+01 5.786e+01 6.492e+01 7.177e+01 1.036e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-29 06:07:40,628 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.17 vs. limit=15.0 +2024-07-29 06:07:47,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249762.66666666666, ans=0.1 +2024-07-29 06:07:53,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=249776.0, ans=0.0 +2024-07-29 06:08:07,272 INFO [train.py:1114] (1/4) Epoch 19, batch 3350, loss[loss=0.1787, simple_loss=0.2701, pruned_loss=0.04361, over 4621.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2617, pruned_loss=0.03966, over 938752.01 frames. ], batch size: 17, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:08:08,058 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:08:10,149 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=249802.66666666666, ans=0.2 +2024-07-29 06:08:23,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=249829.33333333334, ans=0.1 +2024-07-29 06:08:27,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=249842.66666666666, ans=0.125 +2024-07-29 06:08:32,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=249842.66666666666, ans=0.0 +2024-07-29 06:08:40,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249856.0, ans=0.1 +2024-07-29 06:08:41,237 INFO [train.py:1114] (1/4) Epoch 19, batch 3400, loss[loss=0.133, simple_loss=0.216, pruned_loss=0.02495, over 4811.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2613, pruned_loss=0.04001, over 937236.40 frames. ], batch size: 11, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:08:49,838 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.563e+01 5.488e+01 5.998e+01 6.910e+01 1.087e+02, threshold=1.200e+02, percent-clipped=0.0 +2024-07-29 06:08:54,505 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=249896.0, ans=15.0 +2024-07-29 06:09:08,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=249922.66666666666, ans=0.125 +2024-07-29 06:09:15,442 INFO [train.py:1114] (1/4) Epoch 19, batch 3450, loss[loss=0.1821, simple_loss=0.2804, pruned_loss=0.04187, over 4701.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.263, pruned_loss=0.04072, over 937425.65 frames. ], batch size: 19, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:09:33,418 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=249962.66666666666, ans=0.125 +2024-07-29 06:09:39,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=249976.0, ans=0.0 +2024-07-29 06:09:44,974 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:09:45,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=249989.33333333334, ans=0.025 +2024-07-29 06:09:48,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=250002.66666666666, ans=0.125 +2024-07-29 06:09:48,786 INFO [train.py:1114] (1/4) Epoch 19, batch 3500, loss[loss=0.1582, simple_loss=0.2434, pruned_loss=0.03656, over 4945.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2628, pruned_loss=0.04052, over 937800.96 frames. ], batch size: 12, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:09:56,026 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=250016.0, ans=0.04949747468305833 +2024-07-29 06:09:57,883 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.930e+01 5.660e+01 6.096e+01 6.757e+01 8.865e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 06:10:00,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=250016.0, ans=0.1 +2024-07-29 06:10:02,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=250029.33333333334, ans=0.125 +2024-07-29 06:10:10,029 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=250029.33333333334, ans=0.125 +2024-07-29 06:10:13,181 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=250042.66666666666, ans=0.0 +2024-07-29 06:10:24,713 INFO [train.py:1114] (1/4) Epoch 19, batch 3550, loss[loss=0.1794, simple_loss=0.2683, pruned_loss=0.04532, over 4666.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2634, pruned_loss=0.04097, over 938696.48 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:10:29,856 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.98 vs. limit=15.0 +2024-07-29 06:10:44,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=250096.0, ans=0.0 +2024-07-29 06:10:45,789 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.26 vs. limit=12.0 +2024-07-29 06:11:05,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=250109.33333333334, ans=0.0 +2024-07-29 06:11:17,173 INFO [train.py:1114] (1/4) Epoch 19, batch 3600, loss[loss=0.1548, simple_loss=0.2433, pruned_loss=0.03313, over 4963.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2627, pruned_loss=0.04041, over 940160.28 frames. ], batch size: 13, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:11:25,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=250136.0, ans=0.125 +2024-07-29 06:11:28,760 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.784e+01 5.577e+01 6.277e+01 7.321e+01 1.396e+02, threshold=1.255e+02, percent-clipped=3.0 +2024-07-29 06:11:35,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250162.66666666666, ans=0.1 +2024-07-29 06:11:40,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=250176.0, ans=0.1 +2024-07-29 06:11:42,586 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=6.0 +2024-07-29 06:11:53,651 INFO [train.py:1114] (1/4) Epoch 19, batch 3650, loss[loss=0.1882, simple_loss=0.2752, pruned_loss=0.05058, over 4899.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2617, pruned_loss=0.04008, over 940350.46 frames. ], batch size: 15, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:11:58,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250202.66666666666, ans=0.1 +2024-07-29 06:12:01,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=250216.0, ans=0.125 +2024-07-29 06:12:01,809 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=250216.0, ans=0.025 +2024-07-29 06:12:04,812 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.63 vs. limit=15.0 +2024-07-29 06:12:05,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=250216.0, ans=0.125 +2024-07-29 06:12:09,789 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=250229.33333333334, ans=0.0 +2024-07-29 06:12:17,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=250242.66666666666, ans=10.0 +2024-07-29 06:12:18,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=250242.66666666666, ans=0.125 +2024-07-29 06:12:21,894 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=250256.0, ans=0.2 +2024-07-29 06:12:22,107 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.92 vs. limit=15.0 +2024-07-29 06:12:22,480 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=250256.0, ans=0.0 +2024-07-29 06:12:22,538 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250256.0, ans=0.1 +2024-07-29 06:12:27,244 INFO [train.py:1114] (1/4) Epoch 19, batch 3700, loss[loss=0.1638, simple_loss=0.2745, pruned_loss=0.02655, over 4934.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2617, pruned_loss=0.04004, over 941154.37 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:12:28,723 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=250269.33333333334, ans=0.2 +2024-07-29 06:12:35,698 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.449e+01 6.027e+01 6.709e+01 1.105e+02, threshold=1.205e+02, percent-clipped=0.0 +2024-07-29 06:12:36,060 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.64 vs. limit=22.5 +2024-07-29 06:12:39,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=250282.66666666666, ans=0.2 +2024-07-29 06:12:51,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=250309.33333333334, ans=0.0 +2024-07-29 06:12:54,419 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.31 vs. limit=6.0 +2024-07-29 06:12:58,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=250322.66666666666, ans=0.125 +2024-07-29 06:13:00,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=250322.66666666666, ans=0.0 +2024-07-29 06:13:04,996 INFO [train.py:1114] (1/4) Epoch 19, batch 3750, loss[loss=0.1541, simple_loss=0.2391, pruned_loss=0.03449, over 4822.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2618, pruned_loss=0.04016, over 942819.38 frames. ], batch size: 11, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:13:05,877 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:13:17,388 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.23 vs. limit=12.0 +2024-07-29 06:13:20,592 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=250362.66666666666, ans=0.0 +2024-07-29 06:13:24,025 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=250362.66666666666, ans=0.125 +2024-07-29 06:13:41,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=250402.66666666666, ans=0.2 +2024-07-29 06:13:41,719 INFO [train.py:1114] (1/4) Epoch 19, batch 3800, loss[loss=0.1748, simple_loss=0.2717, pruned_loss=0.0389, over 4805.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2618, pruned_loss=0.04055, over 941307.77 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:13:42,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=250402.66666666666, ans=0.125 +2024-07-29 06:13:50,553 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.347e+01 5.643e+01 6.466e+01 7.181e+01 9.486e+01, threshold=1.293e+02, percent-clipped=0.0 +2024-07-29 06:13:55,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=250429.33333333334, ans=0.0 +2024-07-29 06:13:58,784 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.41 vs. limit=15.0 +2024-07-29 06:14:00,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=250429.33333333334, ans=0.125 +2024-07-29 06:14:04,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=250442.66666666666, ans=0.125 +2024-07-29 06:14:09,632 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:14:15,721 INFO [train.py:1114] (1/4) Epoch 19, batch 3850, loss[loss=0.1869, simple_loss=0.2914, pruned_loss=0.04114, over 4630.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2619, pruned_loss=0.03988, over 941906.18 frames. ], batch size: 16, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:14:21,507 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=250469.33333333334, ans=0.125 +2024-07-29 06:14:30,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=250482.66666666666, ans=0.025 +2024-07-29 06:14:47,905 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=250509.33333333334, ans=0.125 +2024-07-29 06:14:48,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=250509.33333333334, ans=0.125 +2024-07-29 06:15:00,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250522.66666666666, ans=0.1 +2024-07-29 06:15:01,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=250536.0, ans=0.125 +2024-07-29 06:15:01,944 INFO [train.py:1114] (1/4) Epoch 19, batch 3900, loss[loss=0.1464, simple_loss=0.2519, pruned_loss=0.02044, over 4810.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2623, pruned_loss=0.04012, over 942299.59 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:15:10,489 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.812e+01 5.443e+01 5.935e+01 6.800e+01 9.417e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-29 06:15:10,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=250549.33333333334, ans=0.0 +2024-07-29 06:15:13,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=250549.33333333334, ans=0.1 +2024-07-29 06:15:13,491 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.10 vs. limit=22.5 +2024-07-29 06:15:30,249 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.50 vs. limit=15.0 +2024-07-29 06:15:37,616 INFO [train.py:1114] (1/4) Epoch 19, batch 3950, loss[loss=0.1895, simple_loss=0.2688, pruned_loss=0.05516, over 4842.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2619, pruned_loss=0.04009, over 944329.94 frames. ], batch size: 16, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:15:38,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=250602.66666666666, ans=0.5 +2024-07-29 06:15:58,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=250642.66666666666, ans=0.1 +2024-07-29 06:15:58,151 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=250642.66666666666, ans=0.0 +2024-07-29 06:16:12,654 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=250669.33333333334, ans=0.125 +2024-07-29 06:16:13,302 INFO [train.py:1114] (1/4) Epoch 19, batch 4000, loss[loss=0.1676, simple_loss=0.2534, pruned_loss=0.04091, over 4770.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2619, pruned_loss=0.04015, over 940622.33 frames. ], batch size: 12, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:16:21,865 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=250682.66666666666, ans=0.2 +2024-07-29 06:16:23,292 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.54 vs. limit=15.0 +2024-07-29 06:16:24,304 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.630e+01 6.259e+01 7.111e+01 1.064e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 06:16:35,852 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=250709.33333333334, ans=0.09899494936611666 +2024-07-29 06:16:38,065 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.22 vs. limit=6.0 +2024-07-29 06:16:41,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=250722.66666666666, ans=0.0 +2024-07-29 06:16:48,993 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:16:49,146 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.98 vs. limit=22.5 +2024-07-29 06:16:49,518 INFO [train.py:1114] (1/4) Epoch 19, batch 4050, loss[loss=0.2264, simple_loss=0.3058, pruned_loss=0.07353, over 3430.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.262, pruned_loss=0.04037, over 939212.68 frames. ], batch size: 35, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:16:51,692 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=250736.0, ans=0.125 +2024-07-29 06:16:57,284 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.58 vs. limit=15.0 +2024-07-29 06:17:00,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=250749.33333333334, ans=15.0 +2024-07-29 06:17:01,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=250749.33333333334, ans=0.0 +2024-07-29 06:17:09,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=250762.66666666666, ans=0.05 +2024-07-29 06:17:11,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=250776.0, ans=0.0 +2024-07-29 06:17:17,322 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.81 vs. limit=6.0 +2024-07-29 06:17:23,763 INFO [train.py:1114] (1/4) Epoch 19, batch 4100, loss[loss=0.1808, simple_loss=0.28, pruned_loss=0.04074, over 4902.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2627, pruned_loss=0.0404, over 938075.77 frames. ], batch size: 15, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:17:25,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=250802.66666666666, ans=10.0 +2024-07-29 06:17:32,505 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 5.808e+01 6.562e+01 7.760e+01 1.349e+02, threshold=1.312e+02, percent-clipped=1.0 +2024-07-29 06:17:37,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=250829.33333333334, ans=0.125 +2024-07-29 06:17:57,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=250856.0, ans=0.1 +2024-07-29 06:18:04,072 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=250869.33333333334, ans=0.0 +2024-07-29 06:18:04,511 INFO [train.py:1114] (1/4) Epoch 19, batch 4150, loss[loss=0.1646, simple_loss=0.2569, pruned_loss=0.03617, over 4822.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2611, pruned_loss=0.03976, over 937517.95 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:19:19,625 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=250896.0, ans=0.0 +2024-07-29 06:19:24,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=250909.33333333334, ans=0.125 +2024-07-29 06:19:34,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250922.66666666666, ans=0.1 +2024-07-29 06:19:55,022 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.16 vs. limit=15.0 +2024-07-29 06:19:56,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=250922.66666666666, ans=0.125 +2024-07-29 06:19:59,622 INFO [train.py:1114] (1/4) Epoch 19, batch 4200, loss[loss=0.2007, simple_loss=0.2851, pruned_loss=0.05814, over 4893.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2613, pruned_loss=0.03986, over 939222.09 frames. ], batch size: 15, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:19:59,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=250936.0, ans=0.0 +2024-07-29 06:20:16,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=250949.33333333334, ans=0.0 +2024-07-29 06:20:17,255 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.502e+01 5.500e+01 5.908e+01 6.556e+01 1.150e+02, threshold=1.182e+02, percent-clipped=0.0 +2024-07-29 06:20:17,767 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.06 vs. limit=6.0 +2024-07-29 06:20:20,809 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.91 vs. limit=15.0 +2024-07-29 06:20:21,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=250962.66666666666, ans=0.125 +2024-07-29 06:20:37,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=250989.33333333334, ans=0.125 +2024-07-29 06:20:46,992 INFO [train.py:1114] (1/4) Epoch 19, batch 4250, loss[loss=0.1619, simple_loss=0.2583, pruned_loss=0.03272, over 4642.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2624, pruned_loss=0.04008, over 940558.24 frames. ], batch size: 12, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:21:01,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=251016.0, ans=0.0 +2024-07-29 06:21:02,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=251016.0, ans=0.2 +2024-07-29 06:21:02,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=251016.0, ans=0.0 +2024-07-29 06:21:08,444 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251016.0, ans=0.1 +2024-07-29 06:21:21,997 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.14 vs. limit=22.5 +2024-07-29 06:22:32,138 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=251056.0, ans=0.0 +2024-07-29 06:22:57,465 INFO [train.py:1114] (1/4) Epoch 19, batch 4300, loss[loss=0.1618, simple_loss=0.2581, pruned_loss=0.03274, over 4749.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.263, pruned_loss=0.04033, over 940290.80 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:23:13,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251069.33333333334, ans=0.1 +2024-07-29 06:23:42,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=251082.66666666666, ans=0.0 +2024-07-29 06:23:47,225 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.890e+01 5.705e+01 6.376e+01 7.099e+01 1.039e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-29 06:24:42,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=251096.0, ans=0.125 +2024-07-29 06:24:42,417 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.86 vs. limit=10.0 +2024-07-29 06:26:49,453 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:26:51,240 INFO [train.py:1114] (1/4) Epoch 19, batch 4350, loss[loss=0.1632, simple_loss=0.2593, pruned_loss=0.03356, over 4767.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2636, pruned_loss=0.04051, over 941068.09 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:27:20,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=251136.0, ans=0.0 +2024-07-29 06:27:42,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=251149.33333333334, ans=0.2 +2024-07-29 06:27:42,422 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.85 vs. limit=15.0 +2024-07-29 06:28:09,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=251162.66666666666, ans=0.125 +2024-07-29 06:28:24,189 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.95 vs. limit=15.0 +2024-07-29 06:28:25,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=251162.66666666666, ans=0.07 +2024-07-29 06:28:40,746 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=251189.33333333334, ans=0.0 +2024-07-29 06:28:43,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=251202.66666666666, ans=0.0 +2024-07-29 06:28:43,540 INFO [train.py:1114] (1/4) Epoch 19, batch 4400, loss[loss=0.1797, simple_loss=0.2732, pruned_loss=0.04306, over 4808.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2637, pruned_loss=0.04073, over 940498.90 frames. ], batch size: 14, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:28:45,056 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=251202.66666666666, ans=0.0 +2024-07-29 06:28:47,313 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.76 vs. limit=6.0 +2024-07-29 06:28:52,352 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.682e+01 6.192e+01 7.414e+01 9.950e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 06:29:00,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=251229.33333333334, ans=0.125 +2024-07-29 06:29:01,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=251229.33333333334, ans=0.125 +2024-07-29 06:29:33,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=251256.0, ans=0.0 +2024-07-29 06:29:34,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=251256.0, ans=0.125 +2024-07-29 06:29:37,348 INFO [train.py:1114] (1/4) Epoch 19, batch 4450, loss[loss=0.1362, simple_loss=0.2248, pruned_loss=0.02382, over 4935.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2638, pruned_loss=0.04101, over 938773.05 frames. ], batch size: 12, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:29:48,419 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.22 vs. limit=22.5 +2024-07-29 06:29:48,830 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.99 vs. limit=15.0 +2024-07-29 06:29:55,902 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=251296.0, ans=0.0 +2024-07-29 06:30:03,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=251309.33333333334, ans=0.1 +2024-07-29 06:30:07,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=251322.66666666666, ans=0.04949747468305833 +2024-07-29 06:30:07,580 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.44 vs. limit=15.0 +2024-07-29 06:30:13,622 INFO [train.py:1114] (1/4) Epoch 19, batch 4500, loss[loss=0.1684, simple_loss=0.2783, pruned_loss=0.02922, over 4740.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2645, pruned_loss=0.04084, over 938223.70 frames. ], batch size: 14, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:31:20,187 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.458e+01 5.536e+01 6.082e+01 6.951e+01 9.632e+01, threshold=1.216e+02, percent-clipped=0.0 +2024-07-29 06:31:20,396 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=251349.33333333334, ans=0.0 +2024-07-29 06:31:20,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=251349.33333333334, ans=0.125 +2024-07-29 06:31:20,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=251349.33333333334, ans=0.0 +2024-07-29 06:31:22,005 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.94 vs. limit=15.0 +2024-07-29 06:31:23,206 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.44 vs. limit=15.0 +2024-07-29 06:31:23,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=251349.33333333334, ans=0.125 +2024-07-29 06:32:05,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=251362.66666666666, ans=0.125 +2024-07-29 06:32:11,016 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.19 vs. limit=12.0 +2024-07-29 06:32:16,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251389.33333333334, ans=0.1 +2024-07-29 06:32:20,710 INFO [train.py:1114] (1/4) Epoch 19, batch 4550, loss[loss=0.1727, simple_loss=0.2683, pruned_loss=0.03852, over 4880.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.264, pruned_loss=0.04064, over 939942.49 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:32:26,538 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.69 vs. limit=15.0 +2024-07-29 06:32:36,694 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.59 vs. limit=15.0 +2024-07-29 06:32:37,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=251429.33333333334, ans=0.125 +2024-07-29 06:32:37,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=251429.33333333334, ans=0.0 +2024-07-29 06:32:40,393 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.56 vs. limit=15.0 +2024-07-29 06:32:48,647 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.55 vs. limit=15.0 +2024-07-29 06:33:01,144 INFO [train.py:1114] (1/4) Epoch 19, batch 4600, loss[loss=0.1711, simple_loss=0.2708, pruned_loss=0.03572, over 4538.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2633, pruned_loss=0.04013, over 938356.14 frames. ], batch size: 21, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:33:12,679 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.648e+01 5.756e+01 6.471e+01 7.460e+01 1.091e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-29 06:33:21,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=251496.0, ans=0.05 +2024-07-29 06:33:29,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=251509.33333333334, ans=0.125 +2024-07-29 06:33:42,765 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=251509.33333333334, ans=0.2 +2024-07-29 06:33:56,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=251509.33333333334, ans=0.0 +2024-07-29 06:34:01,543 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=251522.66666666666, ans=0.2 +2024-07-29 06:34:16,610 INFO [train.py:1114] (1/4) Epoch 19, batch 4650, loss[loss=0.1868, simple_loss=0.2719, pruned_loss=0.05085, over 4842.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2638, pruned_loss=0.04017, over 940072.93 frames. ], batch size: 16, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:34:16,652 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=251536.0, ans=0.025 +2024-07-29 06:34:33,951 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.12 vs. limit=15.0 +2024-07-29 06:35:07,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=251562.66666666666, ans=0.0 +2024-07-29 06:35:24,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=251576.0, ans=0.125 +2024-07-29 06:36:00,591 INFO [train.py:1114] (1/4) Epoch 19, batch 4700, loss[loss=0.1751, simple_loss=0.2689, pruned_loss=0.04061, over 4703.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2636, pruned_loss=0.04024, over 937454.30 frames. ], batch size: 11, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:36:28,168 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.797e+01 5.853e+01 6.382e+01 7.357e+01 1.166e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 06:36:47,666 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:38:00,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=251656.0, ans=0.2 +2024-07-29 06:38:01,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=251656.0, ans=0.125 +2024-07-29 06:38:02,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251656.0, ans=0.1 +2024-07-29 06:38:03,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=251656.0, ans=0.2 +2024-07-29 06:38:08,251 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=251656.0, ans=0.0 +2024-07-29 06:38:09,494 INFO [train.py:1114] (1/4) Epoch 19, batch 4750, loss[loss=0.1632, simple_loss=0.2623, pruned_loss=0.03205, over 4484.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2635, pruned_loss=0.04022, over 935729.19 frames. ], batch size: 21, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:38:28,875 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-07-29 06:38:48,359 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.64 vs. limit=6.0 +2024-07-29 06:39:20,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=251696.0, ans=0.0 +2024-07-29 06:39:23,589 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.63 vs. limit=15.0 +2024-07-29 06:39:25,340 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=251696.0, ans=0.025 +2024-07-29 06:39:34,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=251709.33333333334, ans=0.2 +2024-07-29 06:39:45,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=251709.33333333334, ans=0.125 +2024-07-29 06:40:01,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=251722.66666666666, ans=0.0 +2024-07-29 06:40:04,350 INFO [train.py:1114] (1/4) Epoch 19, batch 4800, loss[loss=0.2212, simple_loss=0.3158, pruned_loss=0.06332, over 4697.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2632, pruned_loss=0.04051, over 933473.78 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:40:05,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=251736.0, ans=0.125 +2024-07-29 06:40:06,421 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=251736.0, ans=0.0 +2024-07-29 06:40:33,250 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.567e+01 5.823e+01 6.588e+01 7.932e+01 1.236e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 06:40:37,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=251749.33333333334, ans=0.125 +2024-07-29 06:41:20,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=251776.0, ans=0.2 +2024-07-29 06:41:35,847 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=251789.33333333334, ans=0.125 +2024-07-29 06:41:52,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=251802.66666666666, ans=0.125 +2024-07-29 06:41:52,681 INFO [train.py:1114] (1/4) Epoch 19, batch 4850, loss[loss=0.1737, simple_loss=0.2738, pruned_loss=0.03683, over 4732.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2637, pruned_loss=0.041, over 932683.29 frames. ], batch size: 14, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:42:16,962 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.96 vs. limit=12.0 +2024-07-29 06:42:17,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=251816.0, ans=0.0 +2024-07-29 06:42:23,224 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.20 vs. limit=15.0 +2024-07-29 06:42:39,695 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=251842.66666666666, ans=0.025 +2024-07-29 06:42:52,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=251856.0, ans=0.125 +2024-07-29 06:42:55,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=251856.0, ans=0.025 +2024-07-29 06:43:02,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=251856.0, ans=0.1 +2024-07-29 06:43:11,734 INFO [train.py:1114] (1/4) Epoch 19, batch 4900, loss[loss=0.1838, simple_loss=0.2715, pruned_loss=0.04798, over 4754.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2644, pruned_loss=0.04129, over 934378.75 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:43:35,794 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=251869.33333333334, ans=0.1 +2024-07-29 06:43:55,401 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.611e+01 6.100e+01 6.685e+01 9.009e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 06:43:56,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=251882.66666666666, ans=0.125 +2024-07-29 06:43:58,365 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=251882.66666666666, ans=0.125 +2024-07-29 06:43:59,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=251896.0, ans=0.125 +2024-07-29 06:44:11,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn2.whiten.whitening_limit, batch_count=251896.0, ans=22.5 +2024-07-29 06:44:42,170 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=251909.33333333334, ans=15.0 +2024-07-29 06:44:51,787 INFO [train.py:1114] (1/4) Epoch 19, batch 4950, loss[loss=0.1971, simple_loss=0.282, pruned_loss=0.0561, over 3258.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2651, pruned_loss=0.04162, over 931202.93 frames. ], batch size: 35, lr: 3.92e-03, grad_scale: 64.0 +2024-07-29 06:44:58,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=251936.0, ans=0.125 +2024-07-29 06:45:11,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=251962.66666666666, ans=0.125 +2024-07-29 06:45:39,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=251976.0, ans=0.0 +2024-07-29 06:45:42,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=251976.0, ans=0.0 +2024-07-29 06:45:53,094 INFO [train.py:1114] (1/4) Epoch 19, batch 5000, loss[loss=0.1874, simple_loss=0.2801, pruned_loss=0.04731, over 4656.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2643, pruned_loss=0.04142, over 935132.99 frames. ], batch size: 14, lr: 3.91e-03, grad_scale: 64.0 +2024-07-29 06:46:06,043 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=252002.66666666666, ans=0.0 +2024-07-29 06:46:17,619 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.743e+01 6.406e+01 6.805e+01 1.014e+02, threshold=1.281e+02, percent-clipped=0.0 +2024-07-29 06:46:20,499 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=252016.0, ans=0.125 +2024-07-29 06:46:20,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=252016.0, ans=0.2 +2024-07-29 06:46:31,583 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=252029.33333333334, ans=0.04949747468305833 +2024-07-29 06:46:50,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=252056.0, ans=0.07 +2024-07-29 06:46:51,739 INFO [train.py:1114] (1/4) Epoch 19, batch 5050, loss[loss=0.1645, simple_loss=0.2539, pruned_loss=0.03753, over 4853.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2634, pruned_loss=0.04076, over 937356.88 frames. ], batch size: 12, lr: 3.91e-03, grad_scale: 64.0 +2024-07-29 06:46:57,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252069.33333333334, ans=0.1 +2024-07-29 06:47:25,252 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=252096.0, ans=0.0 +2024-07-29 06:47:38,060 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.66 vs. limit=15.0 +2024-07-29 06:47:38,655 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-29 06:47:40,382 INFO [train.py:1114] (1/4) Epoch 19, batch 5100, loss[loss=0.1468, simple_loss=0.2387, pruned_loss=0.02742, over 4771.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2644, pruned_loss=0.04124, over 935273.39 frames. ], batch size: 12, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:47:55,092 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.629e+01 5.744e+01 6.473e+01 7.169e+01 1.065e+02, threshold=1.295e+02, percent-clipped=0.0 +2024-07-29 06:48:49,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=252176.0, ans=0.025 +2024-07-29 06:49:16,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=252176.0, ans=0.09899494936611666 +2024-07-29 06:49:19,641 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=252189.33333333334, ans=0.125 +2024-07-29 06:49:22,880 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=252189.33333333334, ans=0.125 +2024-07-29 06:49:24,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=252202.66666666666, ans=0.0 +2024-07-29 06:49:24,762 INFO [train.py:1114] (1/4) Epoch 19, batch 5150, loss[loss=0.1587, simple_loss=0.2567, pruned_loss=0.03031, over 4836.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2651, pruned_loss=0.04121, over 935880.32 frames. ], batch size: 16, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:49:30,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=252202.66666666666, ans=0.125 +2024-07-29 06:49:36,713 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.50 vs. limit=22.5 +2024-07-29 06:49:41,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=252216.0, ans=0.125 +2024-07-29 06:49:41,669 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.81 vs. limit=22.5 +2024-07-29 06:49:43,626 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.88 vs. limit=15.0 +2024-07-29 06:49:49,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=252229.33333333334, ans=0.125 +2024-07-29 06:49:57,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=252242.66666666666, ans=0.2 +2024-07-29 06:49:59,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=252256.0, ans=0.025 +2024-07-29 06:50:04,664 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=252256.0, ans=0.2 +2024-07-29 06:50:13,872 INFO [train.py:1114] (1/4) Epoch 19, batch 5200, loss[loss=0.1782, simple_loss=0.2737, pruned_loss=0.04135, over 4677.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2642, pruned_loss=0.04087, over 936155.67 frames. ], batch size: 14, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:50:16,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=252269.33333333334, ans=0.5 +2024-07-29 06:50:20,264 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.06 vs. limit=22.5 +2024-07-29 06:50:24,636 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.474e+01 5.788e+01 6.281e+01 7.022e+01 9.096e+01, threshold=1.256e+02, percent-clipped=0.0 +2024-07-29 06:50:40,986 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=252309.33333333334, ans=0.0 +2024-07-29 06:50:49,260 INFO [train.py:1114] (1/4) Epoch 19, batch 5250, loss[loss=0.1617, simple_loss=0.2466, pruned_loss=0.03835, over 4897.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.263, pruned_loss=0.04065, over 935661.77 frames. ], batch size: 13, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:50:50,007 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=252336.0, ans=0.0 +2024-07-29 06:50:54,402 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.49 vs. limit=22.5 +2024-07-29 06:51:02,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=252349.33333333334, ans=0.5 +2024-07-29 06:51:17,266 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252389.33333333334, ans=0.1 +2024-07-29 06:51:24,376 INFO [train.py:1114] (1/4) Epoch 19, batch 5300, loss[loss=0.1772, simple_loss=0.2697, pruned_loss=0.04237, over 4649.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2624, pruned_loss=0.04071, over 934140.55 frames. ], batch size: 16, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:51:30,461 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=252416.0, ans=0.0 +2024-07-29 06:51:31,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=252416.0, ans=0.125 +2024-07-29 06:51:33,495 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+01 5.685e+01 6.229e+01 6.963e+01 9.686e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-29 06:51:34,000 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.47 vs. limit=15.0 +2024-07-29 06:51:39,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=252429.33333333334, ans=0.0 +2024-07-29 06:51:42,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=252429.33333333334, ans=0.0 +2024-07-29 06:51:57,736 INFO [train.py:1114] (1/4) Epoch 19, batch 5350, loss[loss=0.1413, simple_loss=0.2217, pruned_loss=0.03048, over 4534.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2632, pruned_loss=0.04132, over 936171.06 frames. ], batch size: 10, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:52:07,157 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=252482.66666666666, ans=0.0 +2024-07-29 06:52:09,243 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=252482.66666666666, ans=0.125 +2024-07-29 06:52:14,457 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.65 vs. limit=15.0 +2024-07-29 06:52:28,892 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=252522.66666666666, ans=0.125 +2024-07-29 06:52:32,370 INFO [train.py:1114] (1/4) Epoch 19, batch 5400, loss[loss=0.1586, simple_loss=0.2513, pruned_loss=0.03291, over 4183.00 frames. ], tot_loss[loss=0.175, simple_loss=0.265, pruned_loss=0.04253, over 929791.75 frames. ], batch size: 25, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:52:33,545 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.06 vs. limit=15.0 +2024-07-29 06:52:37,869 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=252536.0, ans=0.07 +2024-07-29 06:52:40,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=252549.33333333334, ans=0.2 +2024-07-29 06:52:42,196 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.436e+01 5.716e+01 6.217e+01 6.684e+01 8.948e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 06:52:42,454 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=252549.33333333334, ans=0.0 +2024-07-29 06:52:56,375 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:53:05,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=252589.33333333334, ans=0.0 +2024-07-29 06:53:08,750 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.91 vs. limit=12.0 +2024-07-29 06:53:08,929 INFO [train.py:1114] (1/4) Epoch 19, batch 5450, loss[loss=0.1417, simple_loss=0.2293, pruned_loss=0.02704, over 4708.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2642, pruned_loss=0.04156, over 932720.65 frames. ], batch size: 11, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:53:40,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=252656.0, ans=0.2 +2024-07-29 06:53:45,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=252669.33333333334, ans=0.125 +2024-07-29 06:53:45,432 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=252669.33333333334, ans=0.125 +2024-07-29 06:53:45,872 INFO [train.py:1114] (1/4) Epoch 19, batch 5500, loss[loss=0.1786, simple_loss=0.2678, pruned_loss=0.04468, over 4190.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2638, pruned_loss=0.04156, over 930311.73 frames. ], batch size: 26, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:53:52,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252682.66666666666, ans=0.1 +2024-07-29 06:53:54,948 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=252682.66666666666, ans=10.0 +2024-07-29 06:53:55,329 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.797e+01 5.621e+01 6.177e+01 7.042e+01 9.819e+01, threshold=1.235e+02, percent-clipped=0.0 +2024-07-29 06:53:58,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.12 vs. limit=15.0 +2024-07-29 06:54:00,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=252696.0, ans=0.125 +2024-07-29 06:54:02,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=252696.0, ans=0.125 +2024-07-29 06:54:04,678 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.37 vs. limit=15.0 +2024-07-29 06:54:24,294 INFO [train.py:1114] (1/4) Epoch 19, batch 5550, loss[loss=0.1395, simple_loss=0.2285, pruned_loss=0.0253, over 4705.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2634, pruned_loss=0.04123, over 932558.00 frames. ], batch size: 12, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:54:39,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=252762.66666666666, ans=0.125 +2024-07-29 06:55:00,394 INFO [train.py:1114] (1/4) Epoch 19, batch 5600, loss[loss=0.1771, simple_loss=0.2745, pruned_loss=0.03989, over 4744.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2648, pruned_loss=0.04178, over 933533.37 frames. ], batch size: 14, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:55:03,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=252802.66666666666, ans=0.2 +2024-07-29 06:55:04,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=252802.66666666666, ans=0.035 +2024-07-29 06:55:10,085 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.976e+01 6.000e+01 7.138e+01 7.919e+01 1.152e+02, threshold=1.428e+02, percent-clipped=0.0 +2024-07-29 06:55:10,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=252816.0, ans=15.0 +2024-07-29 06:55:31,400 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.08 vs. limit=22.5 +2024-07-29 06:55:37,016 INFO [train.py:1114] (1/4) Epoch 19, batch 5650, loss[loss=0.1705, simple_loss=0.2633, pruned_loss=0.03882, over 4443.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2635, pruned_loss=0.04122, over 936277.39 frames. ], batch size: 21, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:55:37,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=252869.33333333334, ans=0.125 +2024-07-29 06:55:54,306 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.64 vs. limit=15.0 +2024-07-29 06:55:58,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252896.0, ans=0.1 +2024-07-29 06:55:59,756 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.65 vs. limit=15.0 +2024-07-29 06:56:00,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=252909.33333333334, ans=0.125 +2024-07-29 06:56:05,259 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=252922.66666666666, ans=0.125 +2024-07-29 06:56:13,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=252922.66666666666, ans=0.0 +2024-07-29 06:56:15,650 INFO [train.py:1114] (1/4) Epoch 19, batch 5700, loss[loss=0.1965, simple_loss=0.2867, pruned_loss=0.05312, over 4700.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2633, pruned_loss=0.04074, over 937254.71 frames. ], batch size: 13, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:56:20,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252936.0, ans=0.1 +2024-07-29 06:56:23,473 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.64 vs. limit=15.0 +2024-07-29 06:56:25,029 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.780e+01 5.631e+01 6.115e+01 6.862e+01 9.521e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-29 06:56:35,832 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=252962.66666666666, ans=0.125 +2024-07-29 06:56:36,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=252962.66666666666, ans=0.025 +2024-07-29 06:56:41,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=252976.0, ans=0.125 +2024-07-29 06:56:47,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=252989.33333333334, ans=0.1 +2024-07-29 06:56:57,572 INFO [train.py:1114] (1/4) Epoch 19, batch 5750, loss[loss=0.2092, simple_loss=0.3077, pruned_loss=0.05539, over 4743.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2639, pruned_loss=0.04057, over 937499.33 frames. ], batch size: 19, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:57:02,023 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.91 vs. limit=22.5 +2024-07-29 06:57:02,475 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:57:03,434 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.86 vs. limit=15.0 +2024-07-29 06:57:06,444 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.65 vs. limit=6.0 +2024-07-29 06:57:10,213 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=253016.0, ans=0.125 +2024-07-29 06:57:31,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=253056.0, ans=0.125 +2024-07-29 06:57:33,965 INFO [train.py:1114] (1/4) Epoch 19, batch 5800, loss[loss=0.2073, simple_loss=0.3061, pruned_loss=0.05429, over 4739.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2642, pruned_loss=0.04097, over 937340.80 frames. ], batch size: 19, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:57:35,760 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.78 vs. limit=15.0 +2024-07-29 06:57:43,290 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.734e+01 5.591e+01 6.504e+01 7.272e+01 1.266e+02, threshold=1.301e+02, percent-clipped=1.0 +2024-07-29 06:57:45,486 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=253082.66666666666, ans=0.2 +2024-07-29 06:57:53,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=253109.33333333334, ans=0.125 +2024-07-29 06:57:57,827 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=253109.33333333334, ans=0.125 +2024-07-29 06:57:58,601 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=253109.33333333334, ans=0.125 +2024-07-29 06:58:07,002 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.36 vs. limit=15.0 +2024-07-29 06:58:08,033 INFO [train.py:1114] (1/4) Epoch 19, batch 5850, loss[loss=0.1682, simple_loss=0.2589, pruned_loss=0.03875, over 4550.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2642, pruned_loss=0.04113, over 937711.54 frames. ], batch size: 21, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:58:16,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=253149.33333333334, ans=0.0 +2024-07-29 06:58:24,076 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=253162.66666666666, ans=0.125 +2024-07-29 06:58:32,465 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-29 06:58:46,368 INFO [train.py:1114] (1/4) Epoch 19, batch 5900, loss[loss=0.2009, simple_loss=0.2878, pruned_loss=0.05703, over 4682.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2644, pruned_loss=0.04109, over 937798.02 frames. ], batch size: 15, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:58:49,890 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=253202.66666666666, ans=0.5 +2024-07-29 06:58:51,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=253202.66666666666, ans=0.125 +2024-07-29 06:58:54,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=253216.0, ans=0.125 +2024-07-29 06:58:55,596 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+01 5.657e+01 6.141e+01 7.066e+01 1.029e+02, threshold=1.228e+02, percent-clipped=0.0 +2024-07-29 06:59:03,150 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=253229.33333333334, ans=0.0 +2024-07-29 06:59:19,487 INFO [train.py:1114] (1/4) Epoch 19, batch 5950, loss[loss=0.1915, simple_loss=0.2805, pruned_loss=0.05127, over 4681.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2639, pruned_loss=0.04073, over 939870.98 frames. ], batch size: 15, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 06:59:20,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=253269.33333333334, ans=10.0 +2024-07-29 06:59:24,392 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=253269.33333333334, ans=0.125 +2024-07-29 06:59:28,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=253282.66666666666, ans=0.125 +2024-07-29 06:59:30,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253282.66666666666, ans=0.1 +2024-07-29 06:59:32,937 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:59:41,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=253296.0, ans=0.0 +2024-07-29 06:59:50,556 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=253309.33333333334, ans=0.2 +2024-07-29 06:59:52,886 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.78 vs. limit=15.0 +2024-07-29 06:59:58,954 INFO [train.py:1114] (1/4) Epoch 19, batch 6000, loss[loss=0.1754, simple_loss=0.2784, pruned_loss=0.03616, over 4121.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.264, pruned_loss=0.0408, over 936494.70 frames. ], batch size: 25, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 06:59:58,955 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 07:00:10,734 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.5403, 4.1917, 3.8444, 4.2517], device='cuda:1') +2024-07-29 07:00:15,062 INFO [train.py:1146] (1/4) Epoch 19, validation: loss=0.1606, simple_loss=0.2627, pruned_loss=0.02924, over 944034.00 frames. +2024-07-29 07:00:15,081 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 07:00:24,592 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.643e+01 5.715e+01 6.299e+01 6.877e+01 1.010e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 07:00:25,417 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:00:32,860 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=253362.66666666666, ans=0.2 +2024-07-29 07:00:55,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=253389.33333333334, ans=15.0 +2024-07-29 07:00:57,260 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=253402.66666666666, ans=0.125 +2024-07-29 07:00:57,729 INFO [train.py:1114] (1/4) Epoch 19, batch 6050, loss[loss=0.154, simple_loss=0.2469, pruned_loss=0.03053, over 4781.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2636, pruned_loss=0.04088, over 937870.22 frames. ], batch size: 12, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:01:05,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=253402.66666666666, ans=0.0 +2024-07-29 07:01:05,990 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.36 vs. limit=22.5 +2024-07-29 07:01:15,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=253429.33333333334, ans=0.125 +2024-07-29 07:01:17,540 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=253429.33333333334, ans=0.125 +2024-07-29 07:01:23,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=253442.66666666666, ans=0.2 +2024-07-29 07:01:35,546 INFO [train.py:1114] (1/4) Epoch 19, batch 6100, loss[loss=0.1896, simple_loss=0.2725, pruned_loss=0.05334, over 4678.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2635, pruned_loss=0.04078, over 937684.32 frames. ], batch size: 15, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:01:44,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=253482.66666666666, ans=0.0 +2024-07-29 07:01:46,499 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.746e+01 6.337e+01 7.599e+01 1.096e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 07:01:46,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=253482.66666666666, ans=0.1 +2024-07-29 07:01:57,840 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.29 vs. limit=15.0 +2024-07-29 07:02:01,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=253509.33333333334, ans=0.125 +2024-07-29 07:02:01,628 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=253509.33333333334, ans=0.5 +2024-07-29 07:02:10,980 INFO [train.py:1114] (1/4) Epoch 19, batch 6150, loss[loss=0.2255, simple_loss=0.308, pruned_loss=0.0715, over 3234.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2635, pruned_loss=0.04066, over 936473.36 frames. ], batch size: 35, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:02:11,468 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.73 vs. limit=15.0 +2024-07-29 07:02:18,826 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=253549.33333333334, ans=0.025 +2024-07-29 07:02:29,129 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=253562.66666666666, ans=0.125 +2024-07-29 07:02:45,975 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.25 vs. limit=15.0 +2024-07-29 07:02:46,250 INFO [train.py:1114] (1/4) Epoch 19, batch 6200, loss[loss=0.1717, simple_loss=0.2663, pruned_loss=0.03852, over 4737.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2638, pruned_loss=0.04088, over 936447.16 frames. ], batch size: 14, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:03:00,847 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.644e+01 5.872e+01 6.274e+01 7.114e+01 1.110e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 07:03:26,824 INFO [train.py:1114] (1/4) Epoch 19, batch 6250, loss[loss=0.1517, simple_loss=0.2427, pruned_loss=0.03034, over 4812.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2635, pruned_loss=0.04091, over 934409.54 frames. ], batch size: 14, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:03:31,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=253669.33333333334, ans=0.125 +2024-07-29 07:03:33,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=253682.66666666666, ans=0.125 +2024-07-29 07:03:47,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=253709.33333333334, ans=0.1 +2024-07-29 07:03:53,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=253722.66666666666, ans=0.125 +2024-07-29 07:04:00,507 INFO [train.py:1114] (1/4) Epoch 19, batch 6300, loss[loss=0.1254, simple_loss=0.2101, pruned_loss=0.02038, over 4548.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2628, pruned_loss=0.04069, over 930473.16 frames. ], batch size: 10, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:04:08,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=253749.33333333334, ans=0.0 +2024-07-29 07:04:09,765 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+01 5.656e+01 6.439e+01 7.394e+01 1.114e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 07:04:15,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=253762.66666666666, ans=0.2 +2024-07-29 07:04:20,766 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=253762.66666666666, ans=0.125 +2024-07-29 07:04:47,720 INFO [train.py:1114] (1/4) Epoch 19, batch 6350, loss[loss=0.1905, simple_loss=0.295, pruned_loss=0.04299, over 4507.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2623, pruned_loss=0.04089, over 934242.13 frames. ], batch size: 21, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:04:59,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=253816.0, ans=0.125 +2024-07-29 07:05:09,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=253842.66666666666, ans=0.125 +2024-07-29 07:05:10,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=253842.66666666666, ans=0.0 +2024-07-29 07:05:15,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=253856.0, ans=0.0 +2024-07-29 07:05:20,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=253869.33333333334, ans=0.2 +2024-07-29 07:05:21,130 INFO [train.py:1114] (1/4) Epoch 19, batch 6400, loss[loss=0.1673, simple_loss=0.2598, pruned_loss=0.03737, over 4648.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2618, pruned_loss=0.04073, over 935904.15 frames. ], batch size: 13, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:05:22,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=253869.33333333334, ans=0.125 +2024-07-29 07:05:25,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=253869.33333333334, ans=0.2 +2024-07-29 07:05:26,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=253869.33333333334, ans=0.125 +2024-07-29 07:05:29,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=253882.66666666666, ans=0.125 +2024-07-29 07:05:30,225 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.587e+01 5.936e+01 6.680e+01 7.365e+01 1.184e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-29 07:05:32,636 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.93 vs. limit=15.0 +2024-07-29 07:05:41,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.whiten.whitening_limit, batch_count=253909.33333333334, ans=12.0 +2024-07-29 07:05:55,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=253922.66666666666, ans=0.125 +2024-07-29 07:05:56,609 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=253922.66666666666, ans=0.2 +2024-07-29 07:05:57,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=253922.66666666666, ans=0.2 +2024-07-29 07:06:01,007 INFO [train.py:1114] (1/4) Epoch 19, batch 6450, loss[loss=0.1624, simple_loss=0.2586, pruned_loss=0.03306, over 4495.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2623, pruned_loss=0.04066, over 939129.55 frames. ], batch size: 21, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:06:08,342 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:06:09,329 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.75 vs. limit=22.5 +2024-07-29 07:06:29,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=253976.0, ans=0.2 +2024-07-29 07:06:36,239 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:06:44,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=253989.33333333334, ans=0.09899494936611666 +2024-07-29 07:06:45,347 INFO [train.py:1114] (1/4) Epoch 19, batch 6500, loss[loss=0.2288, simple_loss=0.2972, pruned_loss=0.08014, over 3465.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2625, pruned_loss=0.0406, over 940370.03 frames. ], batch size: 35, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:06:50,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=254002.66666666666, ans=0.125 +2024-07-29 07:06:54,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=254016.0, ans=0.0 +2024-07-29 07:06:54,894 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.939e+01 5.824e+01 6.462e+01 7.830e+01 1.082e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-29 07:06:59,244 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.01 vs. limit=10.0 +2024-07-29 07:07:03,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=254029.33333333334, ans=0.0 +2024-07-29 07:07:04,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254029.33333333334, ans=0.1 +2024-07-29 07:07:05,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=254042.66666666666, ans=0.0 +2024-07-29 07:07:11,451 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=254042.66666666666, ans=0.125 +2024-07-29 07:07:20,275 INFO [train.py:1114] (1/4) Epoch 19, batch 6550, loss[loss=0.1605, simple_loss=0.2583, pruned_loss=0.03134, over 4810.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2619, pruned_loss=0.03957, over 943223.53 frames. ], batch size: 11, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:07:21,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=254069.33333333334, ans=0.2 +2024-07-29 07:07:42,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=254109.33333333334, ans=0.125 +2024-07-29 07:07:56,772 INFO [train.py:1114] (1/4) Epoch 19, batch 6600, loss[loss=0.1667, simple_loss=0.2657, pruned_loss=0.03385, over 4929.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2616, pruned_loss=0.03929, over 944987.13 frames. ], batch size: 14, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:08:03,154 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=254149.33333333334, ans=0.125 +2024-07-29 07:08:03,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=254149.33333333334, ans=0.125 +2024-07-29 07:08:06,404 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.740e+01 5.577e+01 6.191e+01 6.872e+01 1.333e+02, threshold=1.238e+02, percent-clipped=1.0 +2024-07-29 07:08:07,205 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=254149.33333333334, ans=0.0 +2024-07-29 07:08:09,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=254149.33333333334, ans=0.125 +2024-07-29 07:08:26,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=254189.33333333334, ans=0.2 +2024-07-29 07:08:30,608 INFO [train.py:1114] (1/4) Epoch 19, batch 6650, loss[loss=0.1627, simple_loss=0.2597, pruned_loss=0.03279, over 4564.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2611, pruned_loss=0.03941, over 943761.52 frames. ], batch size: 17, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:08:30,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=254202.66666666666, ans=0.09899494936611666 +2024-07-29 07:08:44,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=254229.33333333334, ans=0.125 +2024-07-29 07:08:44,959 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=254229.33333333334, ans=0.125 +2024-07-29 07:08:47,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=254229.33333333334, ans=0.125 +2024-07-29 07:08:48,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=254229.33333333334, ans=0.035 +2024-07-29 07:08:49,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=254229.33333333334, ans=0.125 +2024-07-29 07:09:04,189 INFO [train.py:1114] (1/4) Epoch 19, batch 6700, loss[loss=0.1645, simple_loss=0.2597, pruned_loss=0.03461, over 4724.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2612, pruned_loss=0.03947, over 942642.24 frames. ], batch size: 19, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:09:13,673 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.681e+01 5.588e+01 6.301e+01 6.767e+01 8.851e+01, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 07:09:15,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=254282.66666666666, ans=0.2 +2024-07-29 07:09:18,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=254296.0, ans=0.125 +2024-07-29 07:09:21,303 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=254296.0, ans=0.125 +2024-07-29 07:09:36,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=254322.66666666666, ans=0.0 +2024-07-29 07:09:36,954 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=254322.66666666666, ans=0.0 +2024-07-29 07:09:38,291 INFO [train.py:1114] (1/4) Epoch 19, batch 6750, loss[loss=0.1973, simple_loss=0.2948, pruned_loss=0.04993, over 4226.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2618, pruned_loss=0.03942, over 941200.69 frames. ], batch size: 25, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:09:40,506 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=254336.0, ans=0.125 +2024-07-29 07:09:47,616 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.59 vs. limit=15.0 +2024-07-29 07:09:48,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=254349.33333333334, ans=0.125 +2024-07-29 07:09:52,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=254362.66666666666, ans=0.1 +2024-07-29 07:09:54,211 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=254362.66666666666, ans=0.0 +2024-07-29 07:09:55,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=254362.66666666666, ans=0.125 +2024-07-29 07:09:57,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=254362.66666666666, ans=0.125 +2024-07-29 07:10:00,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=254376.0, ans=0.125 +2024-07-29 07:10:00,158 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=254376.0, ans=0.125 +2024-07-29 07:10:13,856 INFO [train.py:1114] (1/4) Epoch 19, batch 6800, loss[loss=0.2163, simple_loss=0.3155, pruned_loss=0.05856, over 4639.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2641, pruned_loss=0.04043, over 939175.10 frames. ], batch size: 13, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:10:15,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254402.66666666666, ans=0.1 +2024-07-29 07:10:19,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=254416.0, ans=0.125 +2024-07-29 07:10:22,884 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+01 5.699e+01 6.328e+01 7.077e+01 1.070e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-29 07:10:23,734 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=254416.0, ans=0.125 +2024-07-29 07:10:31,521 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=254429.33333333334, ans=0.125 +2024-07-29 07:10:33,774 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.72 vs. limit=15.0 +2024-07-29 07:10:37,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=254442.66666666666, ans=0.0 +2024-07-29 07:10:44,968 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=254456.0, ans=0.025 +2024-07-29 07:10:46,745 INFO [train.py:1114] (1/4) Epoch 19, batch 6850, loss[loss=0.1648, simple_loss=0.2679, pruned_loss=0.03084, over 4690.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2636, pruned_loss=0.04042, over 940754.49 frames. ], batch size: 13, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:10:52,399 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.99 vs. limit=15.0 +2024-07-29 07:10:54,644 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:11:00,696 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=254496.0, ans=0.125 +2024-07-29 07:11:03,336 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=254496.0, ans=0.125 +2024-07-29 07:11:11,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=254509.33333333334, ans=0.0 +2024-07-29 07:11:20,049 INFO [train.py:1114] (1/4) Epoch 19, batch 6900, loss[loss=0.1729, simple_loss=0.2705, pruned_loss=0.03763, over 4967.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2636, pruned_loss=0.04051, over 943229.36 frames. ], batch size: 13, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:11:24,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=254536.0, ans=0.0 +2024-07-29 07:11:25,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=254536.0, ans=0.2 +2024-07-29 07:11:27,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=254549.33333333334, ans=0.0 +2024-07-29 07:11:29,545 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.808e+01 6.453e+01 7.424e+01 1.237e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-29 07:11:34,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=254562.66666666666, ans=0.025 +2024-07-29 07:11:35,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=254562.66666666666, ans=0.125 +2024-07-29 07:11:41,155 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.42 vs. limit=15.0 +2024-07-29 07:11:47,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=254589.33333333334, ans=0.125 +2024-07-29 07:11:53,784 INFO [train.py:1114] (1/4) Epoch 19, batch 6950, loss[loss=0.1377, simple_loss=0.2132, pruned_loss=0.0311, over 4559.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2629, pruned_loss=0.04064, over 940393.33 frames. ], batch size: 10, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:11:57,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=254602.66666666666, ans=0.2 +2024-07-29 07:12:05,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=254616.0, ans=0.0 +2024-07-29 07:12:09,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=254629.33333333334, ans=0.2 +2024-07-29 07:12:18,564 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=254642.66666666666, ans=0.125 +2024-07-29 07:12:21,140 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=254642.66666666666, ans=0.125 +2024-07-29 07:12:22,468 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=254656.0, ans=0.125 +2024-07-29 07:12:22,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=254656.0, ans=0.0 +2024-07-29 07:12:24,047 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.69 vs. limit=15.0 +2024-07-29 07:12:25,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=254656.0, ans=0.125 +2024-07-29 07:12:29,158 INFO [train.py:1114] (1/4) Epoch 19, batch 7000, loss[loss=0.195, simple_loss=0.2908, pruned_loss=0.04963, over 4632.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2631, pruned_loss=0.04075, over 938491.61 frames. ], batch size: 17, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:12:29,326 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=254669.33333333334, ans=0.0 +2024-07-29 07:12:30,234 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.52 vs. limit=15.0 +2024-07-29 07:12:35,185 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=254682.66666666666, ans=0.2 +2024-07-29 07:12:36,971 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.84 vs. limit=15.0 +2024-07-29 07:12:38,423 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.459e+01 5.806e+01 6.455e+01 7.186e+01 1.060e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-29 07:12:39,460 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.40 vs. limit=15.0 +2024-07-29 07:12:53,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=254709.33333333334, ans=0.125 +2024-07-29 07:13:02,118 INFO [train.py:1114] (1/4) Epoch 19, batch 7050, loss[loss=0.1803, simple_loss=0.2813, pruned_loss=0.03967, over 4731.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2631, pruned_loss=0.04052, over 941929.32 frames. ], batch size: 19, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:13:06,191 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=254736.0, ans=0.05 +2024-07-29 07:13:09,623 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.15 vs. limit=15.0 +2024-07-29 07:13:13,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=254749.33333333334, ans=0.125 +2024-07-29 07:13:19,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=254762.66666666666, ans=0.1 +2024-07-29 07:13:38,636 INFO [train.py:1114] (1/4) Epoch 19, batch 7100, loss[loss=0.1512, simple_loss=0.2469, pruned_loss=0.02776, over 4804.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2634, pruned_loss=0.04085, over 936882.42 frames. ], batch size: 15, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:13:40,673 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=254802.66666666666, ans=0.0 +2024-07-29 07:13:49,300 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.888e+01 5.809e+01 6.351e+01 7.232e+01 1.086e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-29 07:13:54,334 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.51 vs. limit=12.0 +2024-07-29 07:13:57,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn1.whiten.whitening_limit, batch_count=254829.33333333334, ans=22.5 +2024-07-29 07:14:00,347 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=27.15 vs. limit=22.5 +2024-07-29 07:14:05,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254842.66666666666, ans=0.1 +2024-07-29 07:14:13,150 INFO [train.py:1114] (1/4) Epoch 19, batch 7150, loss[loss=0.1716, simple_loss=0.2641, pruned_loss=0.03954, over 4528.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2621, pruned_loss=0.04044, over 937255.21 frames. ], batch size: 21, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:14:14,548 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=254869.33333333334, ans=0.0 +2024-07-29 07:14:17,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=254869.33333333334, ans=0.04949747468305833 +2024-07-29 07:14:26,245 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.62 vs. limit=12.0 +2024-07-29 07:14:35,122 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.55 vs. limit=22.5 +2024-07-29 07:14:36,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=254909.33333333334, ans=0.0 +2024-07-29 07:14:43,951 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=254922.66666666666, ans=0.125 +2024-07-29 07:14:46,072 INFO [train.py:1114] (1/4) Epoch 19, batch 7200, loss[loss=0.1748, simple_loss=0.2582, pruned_loss=0.04569, over 4807.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.263, pruned_loss=0.04046, over 937765.37 frames. ], batch size: 15, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:14:46,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=254936.0, ans=0.025 +2024-07-29 07:14:52,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=254949.33333333334, ans=0.125 +2024-07-29 07:14:52,747 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=254949.33333333334, ans=0.2 +2024-07-29 07:14:54,694 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_na.min_abs, batch_count=254949.33333333334, ans=0.02 +2024-07-29 07:14:55,077 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.575e+01 5.607e+01 6.088e+01 6.745e+01 8.858e+01, threshold=1.218e+02, percent-clipped=0.0 +2024-07-29 07:15:06,537 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=254976.0, ans=0.125 +2024-07-29 07:15:18,455 INFO [train.py:1114] (1/4) Epoch 19, batch 7250, loss[loss=0.1618, simple_loss=0.2479, pruned_loss=0.03784, over 4868.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2626, pruned_loss=0.04026, over 939947.34 frames. ], batch size: 12, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:15:25,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=255016.0, ans=0.0 +2024-07-29 07:15:33,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=255029.33333333334, ans=0.2 +2024-07-29 07:15:38,008 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=255042.66666666666, ans=0.0 +2024-07-29 07:15:38,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255042.66666666666, ans=0.1 +2024-07-29 07:15:50,896 INFO [train.py:1114] (1/4) Epoch 19, batch 7300, loss[loss=0.1432, simple_loss=0.2321, pruned_loss=0.02717, over 4853.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2619, pruned_loss=0.03965, over 939918.93 frames. ], batch size: 12, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:15:53,799 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.11 vs. limit=15.0 +2024-07-29 07:16:00,098 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.606e+01 6.073e+01 6.714e+01 9.388e+01, threshold=1.215e+02, percent-clipped=0.0 +2024-07-29 07:16:02,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=255082.66666666666, ans=0.07 +2024-07-29 07:16:17,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255122.66666666666, ans=0.1 +2024-07-29 07:16:23,606 INFO [train.py:1114] (1/4) Epoch 19, batch 7350, loss[loss=0.1712, simple_loss=0.2595, pruned_loss=0.0415, over 4637.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2622, pruned_loss=0.03951, over 939233.61 frames. ], batch size: 12, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:16:25,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=255136.0, ans=0.125 +2024-07-29 07:16:26,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=255136.0, ans=0.05 +2024-07-29 07:16:29,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255136.0, ans=0.1 +2024-07-29 07:16:31,465 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.60 vs. limit=22.5 +2024-07-29 07:16:55,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=255162.66666666666, ans=0.125 +2024-07-29 07:16:59,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=255176.0, ans=0.0 +2024-07-29 07:17:10,010 INFO [train.py:1114] (1/4) Epoch 19, batch 7400, loss[loss=0.1572, simple_loss=0.2505, pruned_loss=0.03197, over 4700.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2633, pruned_loss=0.04007, over 940573.43 frames. ], batch size: 13, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:17:13,563 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=255202.66666666666, ans=0.125 +2024-07-29 07:17:14,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=255202.66666666666, ans=0.0 +2024-07-29 07:17:19,330 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.361e+01 5.806e+01 6.617e+01 8.276e+01 1.312e+02, threshold=1.323e+02, percent-clipped=3.0 +2024-07-29 07:17:25,649 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=255229.33333333334, ans=0.0 +2024-07-29 07:17:26,137 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=255229.33333333334, ans=0.1 +2024-07-29 07:17:27,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=255229.33333333334, ans=0.125 +2024-07-29 07:17:31,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=255242.66666666666, ans=0.0 +2024-07-29 07:17:34,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=255242.66666666666, ans=0.0 +2024-07-29 07:17:38,126 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-29 07:17:39,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=255256.0, ans=0.025 +2024-07-29 07:17:42,966 INFO [train.py:1114] (1/4) Epoch 19, batch 7450, loss[loss=0.1689, simple_loss=0.2531, pruned_loss=0.04235, over 4617.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2621, pruned_loss=0.04009, over 937816.11 frames. ], batch size: 11, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:17:54,952 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=255282.66666666666, ans=0.025 +2024-07-29 07:18:02,108 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=255309.33333333334, ans=0.025 +2024-07-29 07:18:02,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=255309.33333333334, ans=0.125 +2024-07-29 07:18:15,923 INFO [train.py:1114] (1/4) Epoch 19, batch 7500, loss[loss=0.2184, simple_loss=0.3068, pruned_loss=0.06503, over 3435.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2629, pruned_loss=0.04057, over 936537.95 frames. ], batch size: 35, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:18:24,065 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=255349.33333333334, ans=0.025 +2024-07-29 07:18:25,193 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.636e+01 5.501e+01 5.980e+01 6.814e+01 1.020e+02, threshold=1.196e+02, percent-clipped=0.0 +2024-07-29 07:18:35,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=255376.0, ans=0.125 +2024-07-29 07:18:40,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=255376.0, ans=0.025 +2024-07-29 07:18:48,937 INFO [train.py:1114] (1/4) Epoch 19, batch 7550, loss[loss=0.1753, simple_loss=0.2681, pruned_loss=0.04124, over 4618.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2643, pruned_loss=0.04106, over 936359.73 frames. ], batch size: 17, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:18:56,885 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255402.66666666666, ans=0.1 +2024-07-29 07:19:04,857 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=255429.33333333334, ans=0.125 +2024-07-29 07:19:12,028 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.40 vs. limit=15.0 +2024-07-29 07:19:13,621 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=255442.66666666666, ans=0.125 +2024-07-29 07:19:13,679 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=255442.66666666666, ans=0.125 +2024-07-29 07:19:30,293 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.70 vs. limit=6.0 +2024-07-29 07:19:31,391 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=255456.0, ans=0.0 +2024-07-29 07:19:32,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255456.0, ans=0.1 +2024-07-29 07:19:34,137 INFO [train.py:1114] (1/4) Epoch 19, batch 7600, loss[loss=0.199, simple_loss=0.2942, pruned_loss=0.05194, over 4811.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2635, pruned_loss=0.04026, over 937644.33 frames. ], batch size: 14, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:21:09,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=255469.33333333334, ans=0.125 +2024-07-29 07:21:14,852 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.357e+01 5.885e+01 6.503e+01 9.082e+01, threshold=1.177e+02, percent-clipped=0.0 +2024-07-29 07:21:19,019 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=255496.0, ans=0.0 +2024-07-29 07:21:24,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=255496.0, ans=0.025 +2024-07-29 07:21:30,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=255509.33333333334, ans=0.0 +2024-07-29 07:21:35,327 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=255522.66666666666, ans=10.0 +2024-07-29 07:21:37,979 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=255536.0, ans=0.0 +2024-07-29 07:21:38,457 INFO [train.py:1114] (1/4) Epoch 19, batch 7650, loss[loss=0.1294, simple_loss=0.2079, pruned_loss=0.02548, over 4945.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2632, pruned_loss=0.04036, over 936681.33 frames. ], batch size: 12, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:21:45,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=255549.33333333334, ans=0.125 +2024-07-29 07:21:48,517 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:21:49,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=255549.33333333334, ans=0.0 +2024-07-29 07:21:54,581 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.36 vs. limit=15.0 +2024-07-29 07:22:04,974 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=255589.33333333334, ans=0.125 +2024-07-29 07:22:06,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255589.33333333334, ans=0.1 +2024-07-29 07:22:06,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=255589.33333333334, ans=0.125 +2024-07-29 07:22:11,504 INFO [train.py:1114] (1/4) Epoch 19, batch 7700, loss[loss=0.1591, simple_loss=0.2535, pruned_loss=0.03239, over 4685.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2639, pruned_loss=0.0405, over 934497.14 frames. ], batch size: 13, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:22:21,009 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.579e+01 5.495e+01 5.903e+01 6.797e+01 9.764e+01, threshold=1.181e+02, percent-clipped=0.0 +2024-07-29 07:22:25,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=255629.33333333334, ans=0.05 +2024-07-29 07:22:34,483 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255642.66666666666, ans=0.1 +2024-07-29 07:22:37,399 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=15.0 +2024-07-29 07:22:40,011 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.46 vs. limit=12.0 +2024-07-29 07:22:41,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=255656.0, ans=15.0 +2024-07-29 07:22:45,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255669.33333333334, ans=0.1 +2024-07-29 07:22:46,044 INFO [train.py:1114] (1/4) Epoch 19, batch 7750, loss[loss=0.1591, simple_loss=0.2554, pruned_loss=0.03145, over 4922.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.264, pruned_loss=0.04083, over 935534.00 frames. ], batch size: 14, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:22:51,018 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.35 vs. limit=15.0 +2024-07-29 07:22:53,656 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.54 vs. limit=22.5 +2024-07-29 07:23:41,557 INFO [train.py:1114] (1/4) Epoch 19, batch 7800, loss[loss=0.1662, simple_loss=0.2654, pruned_loss=0.03346, over 4671.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2637, pruned_loss=0.0402, over 937360.29 frames. ], batch size: 14, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:23:41,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=255736.0, ans=0.025 +2024-07-29 07:23:50,429 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=255736.0, ans=0.0 +2024-07-29 07:23:55,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=255749.33333333334, ans=0.0 +2024-07-29 07:23:57,873 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.016e+01 5.806e+01 6.397e+01 7.223e+01 9.492e+01, threshold=1.279e+02, percent-clipped=0.0 +2024-07-29 07:23:59,908 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=255749.33333333334, ans=0.025 +2024-07-29 07:24:00,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=255749.33333333334, ans=0.0 +2024-07-29 07:24:01,785 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=255762.66666666666, ans=0.2 +2024-07-29 07:24:04,565 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255762.66666666666, ans=0.1 +2024-07-29 07:24:21,832 INFO [train.py:1114] (1/4) Epoch 19, batch 7850, loss[loss=0.1329, simple_loss=0.2253, pruned_loss=0.02022, over 4525.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2639, pruned_loss=0.04085, over 936008.01 frames. ], batch size: 10, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:24:32,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=255802.66666666666, ans=0.125 +2024-07-29 07:24:32,290 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=255802.66666666666, ans=0.025 +2024-07-29 07:24:32,308 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=255802.66666666666, ans=0.07 +2024-07-29 07:24:49,313 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=255842.66666666666, ans=0.125 +2024-07-29 07:24:51,440 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.16 vs. limit=15.0 +2024-07-29 07:24:59,390 INFO [train.py:1114] (1/4) Epoch 19, batch 7900, loss[loss=0.1739, simple_loss=0.2681, pruned_loss=0.03982, over 4866.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2648, pruned_loss=0.04107, over 933329.89 frames. ], batch size: 14, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:25:19,920 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.535e+01 5.757e+01 6.184e+01 6.980e+01 1.069e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-29 07:25:28,928 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.44 vs. limit=15.0 +2024-07-29 07:25:36,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=255922.66666666666, ans=0.125 +2024-07-29 07:26:55,440 INFO [train.py:1114] (1/4) Epoch 19, batch 7950, loss[loss=0.217, simple_loss=0.2998, pruned_loss=0.06714, over 3374.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2646, pruned_loss=0.04068, over 935066.28 frames. ], batch size: 35, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:27:31,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255962.66666666666, ans=0.1 +2024-07-29 07:27:50,151 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:27:50,603 INFO [train.py:1114] (1/4) Epoch 19, batch 8000, loss[loss=0.144, simple_loss=0.2272, pruned_loss=0.03042, over 4618.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2622, pruned_loss=0.04002, over 934473.89 frames. ], batch size: 11, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:27:53,548 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.58 vs. limit=15.0 +2024-07-29 07:27:57,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=256016.0, ans=0.2 +2024-07-29 07:27:58,529 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=256016.0, ans=0.125 +2024-07-29 07:27:59,293 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.51 vs. limit=15.0 +2024-07-29 07:28:01,416 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.608e+01 5.673e+01 6.447e+01 7.571e+01 1.092e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-29 07:28:09,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=256029.33333333334, ans=0.0 +2024-07-29 07:28:11,793 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.06 vs. limit=22.5 +2024-07-29 07:28:12,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=256042.66666666666, ans=0.0 +2024-07-29 07:28:15,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=256042.66666666666, ans=0.1 +2024-07-29 07:28:15,777 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=256042.66666666666, ans=0.125 +2024-07-29 07:28:19,888 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=256056.0, ans=0.0 +2024-07-29 07:28:23,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=256069.33333333334, ans=0.0 +2024-07-29 07:28:24,184 INFO [train.py:1114] (1/4) Epoch 19, batch 8050, loss[loss=0.1513, simple_loss=0.2504, pruned_loss=0.02614, over 4809.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2624, pruned_loss=0.04, over 934494.43 frames. ], batch size: 14, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:28:34,741 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:28:34,916 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.93 vs. limit=6.0 +2024-07-29 07:28:39,425 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.96 vs. limit=15.0 +2024-07-29 07:28:39,915 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=256096.0, ans=0.2 +2024-07-29 07:28:46,519 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=256109.33333333334, ans=0.125 +2024-07-29 07:28:56,864 INFO [train.py:1114] (1/4) Epoch 19, batch 8100, loss[loss=0.1745, simple_loss=0.2696, pruned_loss=0.03971, over 4797.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2631, pruned_loss=0.04014, over 934343.25 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:28:59,021 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.75 vs. limit=22.5 +2024-07-29 07:29:06,405 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.781e+01 6.315e+01 7.245e+01 1.091e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 07:29:18,850 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=256176.0, ans=0.125 +2024-07-29 07:29:21,569 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=256176.0, ans=0.1 +2024-07-29 07:29:26,987 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:29:29,462 INFO [train.py:1114] (1/4) Epoch 19, batch 8150, loss[loss=0.1653, simple_loss=0.2719, pruned_loss=0.02936, over 4797.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2624, pruned_loss=0.03988, over 937453.41 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:29:40,784 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=256216.0, ans=0.125 +2024-07-29 07:29:40,795 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=256216.0, ans=0.0 +2024-07-29 07:29:53,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=256242.66666666666, ans=0.1 +2024-07-29 07:30:00,990 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=256256.0, ans=0.125 +2024-07-29 07:30:03,066 INFO [train.py:1114] (1/4) Epoch 19, batch 8200, loss[loss=0.1947, simple_loss=0.2813, pruned_loss=0.05402, over 4804.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2633, pruned_loss=0.04005, over 938132.17 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:30:12,535 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.925e+01 5.702e+01 6.206e+01 7.193e+01 9.525e+01, threshold=1.241e+02, percent-clipped=0.0 +2024-07-29 07:30:31,362 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=256322.66666666666, ans=0.1 +2024-07-29 07:30:34,181 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.58 vs. limit=6.0 +2024-07-29 07:30:34,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=256336.0, ans=0.09899494936611666 +2024-07-29 07:30:35,088 INFO [train.py:1114] (1/4) Epoch 19, batch 8250, loss[loss=0.1496, simple_loss=0.2465, pruned_loss=0.02636, over 4903.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2642, pruned_loss=0.04046, over 938511.10 frames. ], batch size: 13, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:30:38,607 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=256336.0, ans=0.125 +2024-07-29 07:30:39,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=256336.0, ans=0.07 +2024-07-29 07:30:41,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=256349.33333333334, ans=0.2 +2024-07-29 07:30:42,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=256349.33333333334, ans=0.2 +2024-07-29 07:30:47,976 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.77 vs. limit=15.0 +2024-07-29 07:30:49,572 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=256362.66666666666, ans=0.125 +2024-07-29 07:31:05,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=256389.33333333334, ans=0.125 +2024-07-29 07:31:07,549 INFO [train.py:1114] (1/4) Epoch 19, batch 8300, loss[loss=0.1977, simple_loss=0.2931, pruned_loss=0.05112, over 4894.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2645, pruned_loss=0.04031, over 938686.69 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:31:17,051 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.581e+01 6.136e+01 6.669e+01 1.025e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 07:31:17,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=256416.0, ans=0.125 +2024-07-29 07:31:17,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=256416.0, ans=0.0 +2024-07-29 07:31:25,389 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=256429.33333333334, ans=0.0 +2024-07-29 07:31:29,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=256442.66666666666, ans=0.125 +2024-07-29 07:31:41,614 INFO [train.py:1114] (1/4) Epoch 19, batch 8350, loss[loss=0.1669, simple_loss=0.2604, pruned_loss=0.03667, over 4793.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.264, pruned_loss=0.04028, over 941548.52 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:31:45,603 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=256469.33333333334, ans=0.5 +2024-07-29 07:31:48,804 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=256482.66666666666, ans=0.0 +2024-07-29 07:31:58,175 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256496.0, ans=0.1 +2024-07-29 07:32:04,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=256509.33333333334, ans=0.04949747468305833 +2024-07-29 07:32:05,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=256509.33333333334, ans=0.0 +2024-07-29 07:32:06,111 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=256509.33333333334, ans=0.125 +2024-07-29 07:32:14,722 INFO [train.py:1114] (1/4) Epoch 19, batch 8400, loss[loss=0.1505, simple_loss=0.2446, pruned_loss=0.02825, over 4776.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2638, pruned_loss=0.04013, over 940146.31 frames. ], batch size: 12, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:32:24,404 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.693e+01 5.636e+01 6.331e+01 6.924e+01 1.027e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-29 07:32:37,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=256576.0, ans=0.125 +2024-07-29 07:32:38,424 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=256576.0, ans=0.1 +2024-07-29 07:32:48,605 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=256602.66666666666, ans=0.125 +2024-07-29 07:32:48,697 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=256602.66666666666, ans=0.0 +2024-07-29 07:32:49,192 INFO [train.py:1114] (1/4) Epoch 19, batch 8450, loss[loss=0.1838, simple_loss=0.2746, pruned_loss=0.04654, over 4804.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2654, pruned_loss=0.04089, over 939171.49 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:32:49,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=256602.66666666666, ans=0.0 +2024-07-29 07:33:08,623 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=256629.33333333334, ans=0.125 +2024-07-29 07:33:13,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=256629.33333333334, ans=0.125 +2024-07-29 07:33:16,582 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.26 vs. limit=15.0 +2024-07-29 07:33:23,597 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=256656.0, ans=0.09899494936611666 +2024-07-29 07:33:28,014 INFO [train.py:1114] (1/4) Epoch 19, batch 8500, loss[loss=0.1475, simple_loss=0.2341, pruned_loss=0.0305, over 4617.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.265, pruned_loss=0.04094, over 938738.15 frames. ], batch size: 11, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:33:35,103 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=256682.66666666666, ans=0.1 +2024-07-29 07:33:37,643 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.699e+01 6.220e+01 6.936e+01 1.043e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-29 07:33:39,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=256682.66666666666, ans=0.0 +2024-07-29 07:33:39,994 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.45 vs. limit=15.0 +2024-07-29 07:33:44,401 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=256696.0, ans=0.2 +2024-07-29 07:33:50,485 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.77 vs. limit=6.0 +2024-07-29 07:33:54,779 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=256709.33333333334, ans=10.0 +2024-07-29 07:33:56,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=256722.66666666666, ans=0.125 +2024-07-29 07:34:03,196 INFO [train.py:1114] (1/4) Epoch 19, batch 8550, loss[loss=0.1684, simple_loss=0.2302, pruned_loss=0.05331, over 4784.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2638, pruned_loss=0.04031, over 939407.02 frames. ], batch size: 11, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:34:13,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=256749.33333333334, ans=0.125 +2024-07-29 07:34:44,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=256776.0, ans=0.0 +2024-07-29 07:34:52,555 INFO [train.py:1114] (1/4) Epoch 19, batch 8600, loss[loss=0.1482, simple_loss=0.2538, pruned_loss=0.02133, over 4803.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2631, pruned_loss=0.04018, over 939359.63 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:34:56,895 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=256802.66666666666, ans=0.0 +2024-07-29 07:35:02,024 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.60 vs. limit=22.5 +2024-07-29 07:35:04,656 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.39 vs. limit=22.5 +2024-07-29 07:35:04,896 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.663e+01 5.627e+01 6.563e+01 7.545e+01 1.202e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-29 07:35:05,385 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.89 vs. limit=15.0 +2024-07-29 07:35:20,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=256842.66666666666, ans=0.125 +2024-07-29 07:35:29,028 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.80 vs. limit=22.5 +2024-07-29 07:36:00,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=256856.0, ans=0.2 +2024-07-29 07:36:02,258 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:36:03,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=256869.33333333334, ans=0.1 +2024-07-29 07:36:04,065 INFO [train.py:1114] (1/4) Epoch 19, batch 8650, loss[loss=0.1858, simple_loss=0.2769, pruned_loss=0.04738, over 4915.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2644, pruned_loss=0.04106, over 940470.89 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:36:34,140 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.69 vs. limit=15.0 +2024-07-29 07:36:39,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=256922.66666666666, ans=0.0 +2024-07-29 07:36:42,590 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.90 vs. limit=22.5 +2024-07-29 07:36:44,196 INFO [train.py:1114] (1/4) Epoch 19, batch 8700, loss[loss=0.1659, simple_loss=0.2653, pruned_loss=0.03329, over 4748.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2643, pruned_loss=0.0408, over 937997.02 frames. ], batch size: 13, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:36:45,785 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.91 vs. limit=6.0 +2024-07-29 07:36:53,801 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.837e+01 5.735e+01 6.299e+01 7.253e+01 1.043e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 07:36:56,535 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256962.66666666666, ans=0.1 +2024-07-29 07:36:57,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=256962.66666666666, ans=0.125 +2024-07-29 07:37:05,106 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=256976.0, ans=0.0 +2024-07-29 07:37:20,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=256989.33333333334, ans=0.0 +2024-07-29 07:37:23,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=256989.33333333334, ans=0.125 +2024-07-29 07:37:27,839 INFO [train.py:1114] (1/4) Epoch 19, batch 8750, loss[loss=0.1774, simple_loss=0.2709, pruned_loss=0.04195, over 4674.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2644, pruned_loss=0.04089, over 936693.61 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:37:42,792 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=257002.66666666666, ans=0.0 +2024-07-29 07:37:49,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=257016.0, ans=0.125 +2024-07-29 07:37:51,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=257016.0, ans=0.0 +2024-07-29 07:37:51,995 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=257016.0, ans=0.125 +2024-07-29 07:37:55,927 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=257029.33333333334, ans=0.2 +2024-07-29 07:37:55,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=257029.33333333334, ans=0.125 +2024-07-29 07:38:02,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=257042.66666666666, ans=0.125 +2024-07-29 07:38:05,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=257042.66666666666, ans=0.125 +2024-07-29 07:38:05,493 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.02 vs. limit=10.0 +2024-07-29 07:38:10,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=257056.0, ans=0.125 +2024-07-29 07:38:11,450 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.32 vs. limit=15.0 +2024-07-29 07:38:11,981 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.77 vs. limit=10.0 +2024-07-29 07:38:15,035 INFO [train.py:1114] (1/4) Epoch 19, batch 8800, loss[loss=0.1941, simple_loss=0.287, pruned_loss=0.05058, over 4934.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2639, pruned_loss=0.04052, over 937543.04 frames. ], batch size: 14, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:38:21,856 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.88 vs. limit=15.0 +2024-07-29 07:38:24,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=257069.33333333334, ans=0.0 +2024-07-29 07:38:38,826 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.487e+01 5.702e+01 6.437e+01 7.118e+01 1.132e+02, threshold=1.287e+02, percent-clipped=0.0 +2024-07-29 07:38:47,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=257096.0, ans=0.125 +2024-07-29 07:38:48,861 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.98 vs. limit=15.0 +2024-07-29 07:39:10,024 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=257122.66666666666, ans=0.2 +2024-07-29 07:39:17,544 INFO [train.py:1114] (1/4) Epoch 19, batch 8850, loss[loss=0.1936, simple_loss=0.2866, pruned_loss=0.0503, over 4393.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2628, pruned_loss=0.04068, over 932188.45 frames. ], batch size: 21, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:39:23,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=257136.0, ans=0.125 +2024-07-29 07:39:23,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=257149.33333333334, ans=0.0 +2024-07-29 07:39:33,277 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=257149.33333333334, ans=0.025 +2024-07-29 07:39:41,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=257176.0, ans=0.025 +2024-07-29 07:39:46,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=257176.0, ans=0.125 +2024-07-29 07:39:52,250 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.63 vs. limit=15.0 +2024-07-29 07:39:53,573 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.27 vs. limit=12.0 +2024-07-29 07:39:54,402 INFO [train.py:1114] (1/4) Epoch 19, batch 8900, loss[loss=0.1457, simple_loss=0.2305, pruned_loss=0.0304, over 4934.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2633, pruned_loss=0.04042, over 930374.33 frames. ], batch size: 12, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:40:03,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=257202.66666666666, ans=0.0 +2024-07-29 07:40:04,546 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.66 vs. limit=10.0 +2024-07-29 07:40:04,907 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=257216.0, ans=0.025 +2024-07-29 07:40:06,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=257216.0, ans=0.125 +2024-07-29 07:40:20,223 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.370e+01 5.736e+01 6.296e+01 7.033e+01 9.064e+01, threshold=1.259e+02, percent-clipped=0.0 +2024-07-29 07:42:50,994 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.63 vs. limit=15.0 +2024-07-29 07:42:51,602 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=257216.0, ans=0.2 +2024-07-29 07:42:54,961 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.61 vs. limit=22.5 +2024-07-29 07:43:14,244 INFO [train.py:1114] (1/4) Epoch 19, batch 8950, loss[loss=0.1791, simple_loss=0.2715, pruned_loss=0.04337, over 4432.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2635, pruned_loss=0.04062, over 930827.11 frames. ], batch size: 21, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:43:24,188 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=257282.66666666666, ans=0.2 +2024-07-29 07:43:26,146 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=257282.66666666666, ans=0.125 +2024-07-29 07:43:27,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=257296.0, ans=0.0 +2024-07-29 07:43:37,727 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=257309.33333333334, ans=0.0 +2024-07-29 07:43:47,528 INFO [train.py:1114] (1/4) Epoch 19, batch 9000, loss[loss=0.1705, simple_loss=0.2454, pruned_loss=0.04775, over 4645.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2619, pruned_loss=0.04024, over 933334.79 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:43:47,528 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 07:43:59,126 INFO [train.py:1146] (1/4) Epoch 19, validation: loss=0.1612, simple_loss=0.2635, pruned_loss=0.02943, over 944034.00 frames. +2024-07-29 07:43:59,127 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 07:44:03,114 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:44:04,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=257336.0, ans=0.0 +2024-07-29 07:44:05,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=257349.33333333334, ans=0.125 +2024-07-29 07:44:08,779 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.731e+01 5.623e+01 6.391e+01 7.404e+01 1.117e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-29 07:44:11,724 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=257362.66666666666, ans=0.025 +2024-07-29 07:44:14,051 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.33 vs. limit=12.0 +2024-07-29 07:44:15,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=257362.66666666666, ans=0.0 +2024-07-29 07:44:19,938 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.89 vs. limit=22.5 +2024-07-29 07:44:24,225 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257376.0, ans=0.1 +2024-07-29 07:44:28,455 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=257389.33333333334, ans=0.125 +2024-07-29 07:44:31,557 INFO [train.py:1114] (1/4) Epoch 19, batch 9050, loss[loss=0.1585, simple_loss=0.2504, pruned_loss=0.03333, over 4514.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2611, pruned_loss=0.03965, over 934028.46 frames. ], batch size: 10, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:44:32,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257402.66666666666, ans=0.1 +2024-07-29 07:44:34,311 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=257402.66666666666, ans=0.125 +2024-07-29 07:44:35,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=257402.66666666666, ans=0.125 +2024-07-29 07:44:36,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=257402.66666666666, ans=0.2 +2024-07-29 07:44:38,865 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:44:47,335 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=257429.33333333334, ans=0.125 +2024-07-29 07:44:49,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=257429.33333333334, ans=0.0 +2024-07-29 07:44:58,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=257442.66666666666, ans=0.035 +2024-07-29 07:45:04,293 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.71 vs. limit=15.0 +2024-07-29 07:45:10,205 INFO [train.py:1114] (1/4) Epoch 19, batch 9100, loss[loss=0.1686, simple_loss=0.2624, pruned_loss=0.03744, over 4936.00 frames. ], tot_loss[loss=0.17, simple_loss=0.261, pruned_loss=0.03945, over 936654.59 frames. ], batch size: 14, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:45:26,409 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.674e+01 6.326e+01 7.504e+01 9.644e+01, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 07:45:26,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=257482.66666666666, ans=0.125 +2024-07-29 07:45:28,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=257482.66666666666, ans=0.0 +2024-07-29 07:45:42,623 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.67 vs. limit=6.0 +2024-07-29 07:45:47,327 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.81 vs. limit=22.5 +2024-07-29 07:45:57,961 INFO [train.py:1114] (1/4) Epoch 19, batch 9150, loss[loss=0.1507, simple_loss=0.2429, pruned_loss=0.02923, over 4808.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2624, pruned_loss=0.04006, over 935678.17 frames. ], batch size: 14, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:45:58,139 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=257536.0, ans=0.07 +2024-07-29 07:46:20,222 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=257576.0, ans=0.05 +2024-07-29 07:46:29,457 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=257589.33333333334, ans=0.04949747468305833 +2024-07-29 07:46:34,021 INFO [train.py:1114] (1/4) Epoch 19, batch 9200, loss[loss=0.1771, simple_loss=0.2599, pruned_loss=0.04713, over 4865.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2624, pruned_loss=0.04032, over 937590.54 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:46:37,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=257602.66666666666, ans=0.0 +2024-07-29 07:46:37,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=257602.66666666666, ans=0.2 +2024-07-29 07:46:43,417 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.603e+01 5.777e+01 6.391e+01 7.233e+01 9.749e+01, threshold=1.278e+02, percent-clipped=0.0 +2024-07-29 07:46:45,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.19 vs. limit=22.5 +2024-07-29 07:46:50,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=257629.33333333334, ans=0.125 +2024-07-29 07:46:50,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=257629.33333333334, ans=0.0 +2024-07-29 07:46:52,204 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=257629.33333333334, ans=0.125 +2024-07-29 07:46:52,828 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=257642.66666666666, ans=0.2 +2024-07-29 07:47:05,829 INFO [train.py:1114] (1/4) Epoch 19, batch 9250, loss[loss=0.1942, simple_loss=0.2911, pruned_loss=0.04865, over 4645.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.262, pruned_loss=0.04013, over 938189.32 frames. ], batch size: 13, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:47:07,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=257669.33333333334, ans=0.0 +2024-07-29 07:47:28,333 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=257709.33333333334, ans=0.0 +2024-07-29 07:47:38,217 INFO [train.py:1114] (1/4) Epoch 19, batch 9300, loss[loss=0.1572, simple_loss=0.249, pruned_loss=0.03272, over 4769.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2615, pruned_loss=0.03966, over 937842.64 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:47:44,735 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=257749.33333333334, ans=0.1 +2024-07-29 07:47:47,626 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.607e+01 5.571e+01 6.148e+01 7.388e+01 1.007e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 07:47:49,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=257749.33333333334, ans=0.125 +2024-07-29 07:47:52,291 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.86 vs. limit=22.5 +2024-07-29 07:47:52,737 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=257762.66666666666, ans=0.2 +2024-07-29 07:48:01,522 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=257776.0, ans=0.125 +2024-07-29 07:48:06,327 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.47 vs. limit=15.0 +2024-07-29 07:48:06,675 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=257789.33333333334, ans=0.125 +2024-07-29 07:48:10,649 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.88 vs. limit=22.5 +2024-07-29 07:48:12,930 INFO [train.py:1114] (1/4) Epoch 19, batch 9350, loss[loss=0.181, simple_loss=0.2576, pruned_loss=0.05222, over 4789.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.262, pruned_loss=0.03987, over 934482.36 frames. ], batch size: 11, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:48:18,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=257802.66666666666, ans=0.04949747468305833 +2024-07-29 07:48:19,296 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=257816.0, ans=0.0 +2024-07-29 07:48:26,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=257829.33333333334, ans=0.07 +2024-07-29 07:48:35,824 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=257842.66666666666, ans=0.125 +2024-07-29 07:48:41,742 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.61 vs. limit=15.0 +2024-07-29 07:48:44,699 INFO [train.py:1114] (1/4) Epoch 19, batch 9400, loss[loss=0.1716, simple_loss=0.2577, pruned_loss=0.04279, over 4696.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2631, pruned_loss=0.04073, over 932269.18 frames. ], batch size: 13, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:48:54,089 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 5.728e+01 6.199e+01 7.519e+01 1.174e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-29 07:48:56,356 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=7.62 vs. limit=15.0 +2024-07-29 07:48:59,190 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=257896.0, ans=0.125 +2024-07-29 07:49:02,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=257896.0, ans=0.1 +2024-07-29 07:49:06,436 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-29 07:49:16,041 INFO [train.py:1114] (1/4) Epoch 19, batch 9450, loss[loss=0.1617, simple_loss=0.25, pruned_loss=0.03668, over 4795.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2629, pruned_loss=0.04025, over 931616.40 frames. ], batch size: 11, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:49:41,931 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=257962.66666666666, ans=0.125 +2024-07-29 07:49:43,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=257976.0, ans=0.025 +2024-07-29 07:49:44,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=257976.0, ans=0.2 +2024-07-29 07:49:46,412 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.46 vs. limit=15.0 +2024-07-29 07:49:48,048 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=257976.0, ans=0.1 +2024-07-29 07:49:55,422 INFO [train.py:1114] (1/4) Epoch 19, batch 9500, loss[loss=0.1508, simple_loss=0.2399, pruned_loss=0.03087, over 4703.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2638, pruned_loss=0.04053, over 934200.44 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:49:57,446 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=258002.66666666666, ans=0.125 +2024-07-29 07:50:04,906 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.577e+01 5.446e+01 5.959e+01 6.735e+01 9.596e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-29 07:50:06,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=258016.0, ans=0.125 +2024-07-29 07:50:08,126 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=258029.33333333334, ans=0.2 +2024-07-29 07:50:19,415 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-29 07:50:21,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=258056.0, ans=0.025 +2024-07-29 07:50:23,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=258056.0, ans=0.125 +2024-07-29 07:50:24,111 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.77 vs. limit=22.5 +2024-07-29 07:50:25,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=258056.0, ans=0.125 +2024-07-29 07:50:29,000 INFO [train.py:1114] (1/4) Epoch 19, batch 9550, loss[loss=0.164, simple_loss=0.2582, pruned_loss=0.03494, over 4766.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2649, pruned_loss=0.041, over 931569.18 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:50:33,008 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.32 vs. limit=15.0 +2024-07-29 07:50:38,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=258082.66666666666, ans=0.0 +2024-07-29 07:50:40,768 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=258096.0, ans=0.1 +2024-07-29 07:50:41,398 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=258096.0, ans=0.125 +2024-07-29 07:50:46,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=258109.33333333334, ans=0.125 +2024-07-29 07:50:52,755 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=258109.33333333334, ans=0.125 +2024-07-29 07:51:05,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=258122.66666666666, ans=0.125 +2024-07-29 07:51:07,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=258122.66666666666, ans=0.125 +2024-07-29 07:51:10,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=258136.0, ans=0.0 +2024-07-29 07:51:12,496 INFO [train.py:1114] (1/4) Epoch 19, batch 9600, loss[loss=0.2391, simple_loss=0.3126, pruned_loss=0.0828, over 3558.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2643, pruned_loss=0.04051, over 931040.80 frames. ], batch size: 35, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:51:17,871 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=258136.0, ans=0.0 +2024-07-29 07:51:21,990 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.779e+01 5.937e+01 6.386e+01 7.744e+01 1.025e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 07:51:29,773 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-29 07:51:30,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=258162.66666666666, ans=0.09899494936611666 +2024-07-29 07:51:31,574 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=18.71 vs. limit=15.0 +2024-07-29 07:51:37,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=258176.0, ans=0.125 +2024-07-29 07:51:40,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=258189.33333333334, ans=0.2 +2024-07-29 07:51:44,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=258189.33333333334, ans=0.07 +2024-07-29 07:51:46,117 INFO [train.py:1114] (1/4) Epoch 19, batch 9650, loss[loss=0.1678, simple_loss=0.2588, pruned_loss=0.03838, over 4831.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2641, pruned_loss=0.04031, over 927172.86 frames. ], batch size: 16, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:51:46,876 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:51:53,552 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=258216.0, ans=0.125 +2024-07-29 07:51:54,591 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.70 vs. limit=15.0 +2024-07-29 07:51:54,969 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=258216.0, ans=0.125 +2024-07-29 07:52:01,970 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:52:02,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=258229.33333333334, ans=0.2 +2024-07-29 07:52:12,105 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=258256.0, ans=0.125 +2024-07-29 07:52:17,704 INFO [train.py:1114] (1/4) Epoch 19, batch 9700, loss[loss=0.2026, simple_loss=0.2903, pruned_loss=0.0575, over 4226.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2648, pruned_loss=0.0405, over 924995.17 frames. ], batch size: 25, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:52:20,918 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=258269.33333333334, ans=0.0 +2024-07-29 07:52:26,891 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.593e+01 5.780e+01 6.621e+01 7.551e+01 1.114e+02, threshold=1.324e+02, percent-clipped=0.0 +2024-07-29 07:52:27,855 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258282.66666666666, ans=0.1 +2024-07-29 07:52:35,582 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=258296.0, ans=0.07 +2024-07-29 07:52:44,377 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=258322.66666666666, ans=0.125 +2024-07-29 07:52:51,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=258336.0, ans=0.125 +2024-07-29 07:52:52,000 INFO [train.py:1114] (1/4) Epoch 19, batch 9750, loss[loss=0.1834, simple_loss=0.2655, pruned_loss=0.05067, over 4681.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2639, pruned_loss=0.04033, over 925440.25 frames. ], batch size: 15, lr: 3.87e-03, grad_scale: 64.0 +2024-07-29 07:52:52,373 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-29 07:52:52,690 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=258336.0, ans=0.0 +2024-07-29 07:52:54,648 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=258336.0, ans=0.125 +2024-07-29 07:52:55,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=258336.0, ans=0.2 +2024-07-29 07:53:14,425 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=258376.0, ans=0.125 +2024-07-29 07:53:14,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=258376.0, ans=0.025 +2024-07-29 07:54:17,465 INFO [train.py:1114] (1/4) Epoch 19, batch 9800, loss[loss=0.1651, simple_loss=0.2586, pruned_loss=0.03583, over 4706.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2634, pruned_loss=0.04021, over 925290.55 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 64.0 +2024-07-29 07:54:24,889 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=258416.0, ans=0.0 +2024-07-29 07:54:25,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=258416.0, ans=0.125 +2024-07-29 07:54:27,267 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.539e+01 5.598e+01 6.395e+01 7.278e+01 1.117e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-29 07:54:27,557 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.43 vs. limit=10.0 +2024-07-29 07:54:28,769 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=258416.0, ans=0.0 +2024-07-29 07:54:34,119 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.36 vs. limit=22.5 +2024-07-29 07:54:38,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258442.66666666666, ans=0.1 +2024-07-29 07:54:44,518 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=258456.0, ans=0.2 +2024-07-29 07:54:49,137 INFO [train.py:1114] (1/4) Epoch 19, batch 9850, loss[loss=0.1909, simple_loss=0.2819, pruned_loss=0.04995, over 4895.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2636, pruned_loss=0.04059, over 927683.21 frames. ], batch size: 15, lr: 3.87e-03, grad_scale: 64.0 +2024-07-29 07:54:49,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=258469.33333333334, ans=0.09899494936611666 +2024-07-29 07:54:54,096 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=258469.33333333334, ans=0.09899494936611666 +2024-07-29 07:55:14,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=258482.66666666666, ans=0.125 +2024-07-29 07:55:25,948 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.77 vs. limit=12.0 +2024-07-29 07:55:26,864 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=258509.33333333334, ans=0.09899494936611666 +2024-07-29 07:55:29,924 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=258509.33333333334, ans=0.2 +2024-07-29 07:55:31,802 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=258509.33333333334, ans=0.125 +2024-07-29 07:55:34,492 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.83 vs. limit=6.0 +2024-07-29 07:55:36,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=258522.66666666666, ans=0.125 +2024-07-29 07:55:40,083 INFO [train.py:1114] (1/4) Epoch 19, batch 9900, loss[loss=0.1948, simple_loss=0.2779, pruned_loss=0.05587, over 4858.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2646, pruned_loss=0.04139, over 927028.35 frames. ], batch size: 16, lr: 3.87e-03, grad_scale: 64.0 +2024-07-29 07:55:49,455 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.672e+01 5.747e+01 6.549e+01 7.522e+01 9.931e+01, threshold=1.310e+02, percent-clipped=0.0 +2024-07-29 07:55:51,409 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258549.33333333334, ans=0.1 +2024-07-29 07:55:55,226 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258562.66666666666, ans=0.1 +2024-07-29 07:55:58,244 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:56:10,860 INFO [train.py:1114] (1/4) Epoch 19, batch 9950, loss[loss=0.147, simple_loss=0.228, pruned_loss=0.03302, over 4815.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2645, pruned_loss=0.04173, over 930069.87 frames. ], batch size: 11, lr: 3.86e-03, grad_scale: 64.0 +2024-07-29 07:56:12,776 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=258602.66666666666, ans=0.125 +2024-07-29 07:56:27,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=258629.33333333334, ans=0.0 +2024-07-29 07:56:39,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=258656.0, ans=0.0 +2024-07-29 07:56:42,846 INFO [train.py:1114] (1/4) Epoch 19, batch 10000, loss[loss=0.1904, simple_loss=0.279, pruned_loss=0.05083, over 4629.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2665, pruned_loss=0.04223, over 927280.21 frames. ], batch size: 16, lr: 3.86e-03, grad_scale: 64.0 +2024-07-29 07:56:51,994 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.927e+01 5.763e+01 6.186e+01 6.988e+01 1.066e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-29 07:56:56,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=258696.0, ans=0.2 +2024-07-29 07:56:57,442 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.44 vs. limit=22.5 +2024-07-29 07:56:58,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=258696.0, ans=0.125 +2024-07-29 07:56:59,162 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=258696.0, ans=0.0 +2024-07-29 07:57:13,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=258722.66666666666, ans=0.09899494936611666 +2024-07-29 07:57:14,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258736.0, ans=0.1 +2024-07-29 07:57:14,467 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.48 vs. limit=15.0 +2024-07-29 07:57:14,766 INFO [train.py:1114] (1/4) Epoch 19, batch 10050, loss[loss=0.2627, simple_loss=0.3304, pruned_loss=0.09753, over 3415.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2687, pruned_loss=0.0434, over 917095.18 frames. ], batch size: 35, lr: 3.86e-03, grad_scale: 64.0 +2024-07-29 07:57:17,989 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=25.61 vs. limit=22.5 +2024-07-29 07:57:18,266 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:57:33,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=258762.66666666666, ans=0.0 +2024-07-29 07:57:37,814 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.37 vs. limit=6.0 +2024-07-29 07:57:41,002 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=258776.0, ans=0.125 +2024-07-29 07:57:48,578 INFO [train.py:1114] (1/4) Epoch 19, batch 10100, loss[loss=0.204, simple_loss=0.2774, pruned_loss=0.06532, over 3309.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2736, pruned_loss=0.0475, over 864117.24 frames. ], batch size: 35, lr: 3.86e-03, grad_scale: 64.0 +2024-07-29 07:57:51,325 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=258802.66666666666, ans=0.0 +2024-07-29 07:57:54,380 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.01 vs. limit=22.5 +2024-07-29 07:57:58,540 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.153e+01 6.634e+01 7.311e+01 7.897e+01 1.171e+02, threshold=1.462e+02, percent-clipped=0.0 +2024-07-29 07:58:01,029 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.05 vs. limit=22.5 +2024-07-29 07:58:02,663 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=258829.33333333334, ans=0.125 +2024-07-29 07:58:07,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=258842.66666666666, ans=0.0 +2024-07-29 07:58:08,624 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=258842.66666666666, ans=0.07 +2024-07-29 07:58:21,407 INFO [train.py:1114] (1/4) Epoch 19, batch 10150, loss[loss=0.1963, simple_loss=0.2805, pruned_loss=0.05606, over 3374.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2776, pruned_loss=0.0514, over 822722.24 frames. ], batch size: 35, lr: 3.86e-03, grad_scale: 32.0 +2024-07-29 07:58:26,655 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=258869.33333333334, ans=0.125 +2024-07-29 07:58:33,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=258882.66666666666, ans=0.025 +2024-07-29 07:58:40,186 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=258896.0, ans=0.125 +2024-07-29 07:58:52,612 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.80 vs. limit=6.0 +2024-07-29 07:59:01,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=258909.33333333334, ans=0.0 +2024-07-29 07:59:03,101 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=258922.66666666666, ans=0.0 +2024-07-29 07:59:03,879 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=258922.66666666666, ans=0.0 +2024-07-29 07:59:04,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=258922.66666666666, ans=0.0 +2024-07-29 07:59:09,559 INFO [train.py:1114] (1/4) Epoch 19, batch 10200, loss[loss=0.2377, simple_loss=0.3067, pruned_loss=0.08431, over 3326.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2811, pruned_loss=0.05449, over 788686.49 frames. ], batch size: 35, lr: 3.86e-03, grad_scale: 32.0 +2024-07-29 07:59:12,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=258936.0, ans=0.125 +2024-07-29 07:59:12,891 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258936.0, ans=0.1 +2024-07-29 07:59:17,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=258949.33333333334, ans=0.0 +2024-07-29 07:59:19,707 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.022e+01 7.024e+01 7.484e+01 8.101e+01 1.029e+02, threshold=1.497e+02, percent-clipped=0.0 +2024-07-29 08:02:27,568 INFO [train.py:1114] (1/4) Epoch 20, batch 0, loss[loss=0.1238, simple_loss=0.2146, pruned_loss=0.01651, over 4849.00 frames. ], tot_loss[loss=0.1238, simple_loss=0.2146, pruned_loss=0.01651, over 4849.00 frames. ], batch size: 12, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:02:27,569 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 08:02:33,786 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.0443, 3.9498, 3.5489, 3.6635], device='cuda:1') +2024-07-29 08:02:37,242 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.7705, 5.6890, 5.0912, 5.3303], device='cuda:1') +2024-07-29 08:02:40,769 INFO [train.py:1146] (1/4) Epoch 20, validation: loss=0.161, simple_loss=0.2644, pruned_loss=0.02883, over 944034.00 frames. +2024-07-29 08:02:40,769 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 08:02:44,363 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=258966.66666666666, ans=0.0 +2024-07-29 08:02:47,887 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=258980.0, ans=0.125 +2024-07-29 08:02:49,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=258980.0, ans=0.0 +2024-07-29 08:02:57,781 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.47 vs. limit=10.0 +2024-07-29 08:03:07,327 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.07 vs. limit=12.0 +2024-07-29 08:03:09,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=259020.0, ans=0.125 +2024-07-29 08:03:13,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259020.0, ans=0.1 +2024-07-29 08:03:14,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=259020.0, ans=0.125 +2024-07-29 08:03:17,471 INFO [train.py:1114] (1/4) Epoch 20, batch 50, loss[loss=0.1706, simple_loss=0.2514, pruned_loss=0.04488, over 4607.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2615, pruned_loss=0.03912, over 206343.23 frames. ], batch size: 11, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:03:26,561 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:03:42,387 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=259073.33333333334, ans=0.2 +2024-07-29 08:03:43,884 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=259073.33333333334, ans=0.125 +2024-07-29 08:03:47,269 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.549e+01 5.582e+01 6.158e+01 6.826e+01 9.280e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-29 08:03:51,448 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=259100.0, ans=0.125 +2024-07-29 08:03:51,950 INFO [train.py:1114] (1/4) Epoch 20, batch 100, loss[loss=0.1788, simple_loss=0.2589, pruned_loss=0.04937, over 4638.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2647, pruned_loss=0.04045, over 365094.43 frames. ], batch size: 12, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:03:54,757 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.64 vs. limit=22.5 +2024-07-29 08:04:00,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259113.33333333334, ans=0.1 +2024-07-29 08:04:11,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=259126.66666666666, ans=0.0 +2024-07-29 08:04:14,728 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=259140.0, ans=0.2 +2024-07-29 08:04:26,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=259166.66666666666, ans=0.0 +2024-07-29 08:04:27,183 INFO [train.py:1114] (1/4) Epoch 20, batch 150, loss[loss=0.1306, simple_loss=0.2212, pruned_loss=0.01999, over 4623.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2624, pruned_loss=0.03929, over 494020.74 frames. ], batch size: 11, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:04:46,572 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.91 vs. limit=22.5 +2024-07-29 08:04:57,099 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.593e+01 6.135e+01 6.886e+01 1.305e+02, threshold=1.227e+02, percent-clipped=1.0 +2024-07-29 08:05:01,697 INFO [train.py:1114] (1/4) Epoch 20, batch 200, loss[loss=0.1591, simple_loss=0.2516, pruned_loss=0.03331, over 4506.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2618, pruned_loss=0.03959, over 593473.22 frames. ], batch size: 21, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:05:15,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=259246.66666666666, ans=0.0 +2024-07-29 08:05:25,046 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.65 vs. limit=6.0 +2024-07-29 08:05:40,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=259273.33333333334, ans=0.0 +2024-07-29 08:05:43,500 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=259286.66666666666, ans=0.125 +2024-07-29 08:05:49,704 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=259286.66666666666, ans=0.125 +2024-07-29 08:05:50,870 INFO [train.py:1114] (1/4) Epoch 20, batch 250, loss[loss=0.1775, simple_loss=0.27, pruned_loss=0.04246, over 4614.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2618, pruned_loss=0.03959, over 669941.86 frames. ], batch size: 16, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:05:53,355 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.05 vs. limit=22.5 +2024-07-29 08:05:58,133 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.63 vs. limit=22.5 +2024-07-29 08:06:14,200 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=259326.66666666666, ans=0.125 +2024-07-29 08:06:51,498 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=259326.66666666666, ans=0.0 +2024-07-29 08:06:55,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=259326.66666666666, ans=0.125 +2024-07-29 08:07:30,204 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.296e+01 5.734e+01 6.099e+01 7.044e+01 1.100e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 08:07:30,750 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=21.33 vs. limit=22.5 +2024-07-29 08:07:36,796 INFO [train.py:1114] (1/4) Epoch 20, batch 300, loss[loss=0.1628, simple_loss=0.2583, pruned_loss=0.03363, over 4802.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2619, pruned_loss=0.03994, over 729673.10 frames. ], batch size: 15, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:07:43,645 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=259380.0, ans=0.0 +2024-07-29 08:07:57,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=259406.66666666666, ans=0.0 +2024-07-29 08:08:00,511 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.17 vs. limit=22.5 +2024-07-29 08:08:14,325 INFO [train.py:1114] (1/4) Epoch 20, batch 350, loss[loss=0.1689, simple_loss=0.2546, pruned_loss=0.04157, over 4943.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2626, pruned_loss=0.04004, over 775650.64 frames. ], batch size: 12, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:08:31,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=259460.0, ans=0.125 +2024-07-29 08:08:47,020 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.322e+01 5.507e+01 5.880e+01 6.811e+01 8.968e+01, threshold=1.176e+02, percent-clipped=0.0 +2024-07-29 08:08:47,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=259486.66666666666, ans=10.0 +2024-07-29 08:08:51,654 INFO [train.py:1114] (1/4) Epoch 20, batch 400, loss[loss=0.1742, simple_loss=0.2716, pruned_loss=0.0384, over 4695.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2619, pruned_loss=0.03992, over 813377.11 frames. ], batch size: 13, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:08:53,250 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.81 vs. limit=6.0 +2024-07-29 08:08:54,027 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.95 vs. limit=22.5 +2024-07-29 08:08:58,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=259513.33333333334, ans=0.125 +2024-07-29 08:09:03,087 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=259513.33333333334, ans=15.0 +2024-07-29 08:09:13,705 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=259526.66666666666, ans=0.0 +2024-07-29 08:09:26,900 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=259553.33333333334, ans=0.125 +2024-07-29 08:09:30,918 INFO [train.py:1114] (1/4) Epoch 20, batch 450, loss[loss=0.1642, simple_loss=0.2633, pruned_loss=0.03254, over 4641.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2619, pruned_loss=0.03982, over 839018.21 frames. ], batch size: 13, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:09:32,920 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=259566.66666666666, ans=0.0 +2024-07-29 08:09:36,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=259566.66666666666, ans=0.125 +2024-07-29 08:09:59,967 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=259606.66666666666, ans=0.2 +2024-07-29 08:10:05,752 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.282e+01 5.641e+01 6.168e+01 6.736e+01 1.200e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-29 08:10:10,561 INFO [train.py:1114] (1/4) Epoch 20, batch 500, loss[loss=0.1994, simple_loss=0.2974, pruned_loss=0.05065, over 4681.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2614, pruned_loss=0.03978, over 861393.36 frames. ], batch size: 15, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:10:12,307 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.68 vs. limit=10.0 +2024-07-29 08:10:25,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=259660.0, ans=0.125 +2024-07-29 08:10:51,611 INFO [train.py:1114] (1/4) Epoch 20, batch 550, loss[loss=0.2087, simple_loss=0.3032, pruned_loss=0.05709, over 4619.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2615, pruned_loss=0.03936, over 877243.22 frames. ], batch size: 17, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:14:19,016 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=259700.0, ans=0.025 +2024-07-29 08:14:59,612 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-29 08:15:09,269 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.92 vs. limit=15.0 +2024-07-29 08:15:10,998 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=259740.0, ans=0.2 +2024-07-29 08:15:11,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=259740.0, ans=0.125 +2024-07-29 08:15:21,353 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 5.418e+01 6.036e+01 6.579e+01 9.144e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-29 08:15:26,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=259753.33333333334, ans=10.0 +2024-07-29 08:15:29,093 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=259766.66666666666, ans=0.125 +2024-07-29 08:15:29,553 INFO [train.py:1114] (1/4) Epoch 20, batch 600, loss[loss=0.1908, simple_loss=0.2931, pruned_loss=0.04424, over 4645.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2621, pruned_loss=0.03961, over 891977.79 frames. ], batch size: 16, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:15:32,615 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.65 vs. limit=15.0 +2024-07-29 08:15:32,666 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.33 vs. limit=10.0 +2024-07-29 08:15:38,272 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=259780.0, ans=0.125 +2024-07-29 08:15:40,073 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=259780.0, ans=0.0 +2024-07-29 08:15:57,787 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.16 vs. limit=15.0 +2024-07-29 08:16:10,529 INFO [train.py:1114] (1/4) Epoch 20, batch 650, loss[loss=0.179, simple_loss=0.2678, pruned_loss=0.04513, over 4760.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2615, pruned_loss=0.03985, over 903529.93 frames. ], batch size: 13, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:17:26,135 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.71 vs. limit=15.0 +2024-07-29 08:18:30,217 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.467e+01 5.565e+01 6.152e+01 6.795e+01 9.682e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 08:18:33,428 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.64 vs. limit=15.0 +2024-07-29 08:18:35,044 INFO [train.py:1114] (1/4) Epoch 20, batch 700, loss[loss=0.1669, simple_loss=0.2657, pruned_loss=0.03408, over 4647.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2621, pruned_loss=0.03998, over 911440.73 frames. ], batch size: 12, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:18:46,448 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:18:52,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=259913.33333333334, ans=0.05 +2024-07-29 08:18:59,939 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=259926.66666666666, ans=0.025 +2024-07-29 08:19:01,317 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.47 vs. limit=15.0 +2024-07-29 08:19:09,033 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.74 vs. limit=12.0 +2024-07-29 08:19:19,934 INFO [train.py:1114] (1/4) Epoch 20, batch 750, loss[loss=0.1752, simple_loss=0.2644, pruned_loss=0.04301, over 4686.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2616, pruned_loss=0.03972, over 917825.90 frames. ], batch size: 13, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:19:21,356 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=259966.66666666666, ans=0.0 +2024-07-29 08:19:35,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=259993.33333333334, ans=0.125 +2024-07-29 08:19:47,953 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=260006.66666666666, ans=0.125 +2024-07-29 08:19:51,132 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.556e+01 5.579e+01 6.090e+01 6.934e+01 1.125e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-29 08:19:51,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=260020.0, ans=0.0 +2024-07-29 08:19:55,773 INFO [train.py:1114] (1/4) Epoch 20, batch 800, loss[loss=0.1512, simple_loss=0.2447, pruned_loss=0.02881, over 4844.00 frames. ], tot_loss[loss=0.171, simple_loss=0.262, pruned_loss=0.04002, over 922641.83 frames. ], batch size: 12, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:20:12,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=260060.0, ans=0.0 +2024-07-29 08:20:16,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=260073.33333333334, ans=0.0 +2024-07-29 08:20:34,474 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=260073.33333333334, ans=0.125 +2024-07-29 08:20:39,312 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=260086.66666666666, ans=0.125 +2024-07-29 08:20:43,556 INFO [train.py:1114] (1/4) Epoch 20, batch 850, loss[loss=0.1789, simple_loss=0.2796, pruned_loss=0.03908, over 4661.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2626, pruned_loss=0.04015, over 926933.01 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:20:45,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=260100.0, ans=0.125 +2024-07-29 08:20:49,742 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.56 vs. limit=10.0 +2024-07-29 08:20:54,180 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=260113.33333333334, ans=0.125 +2024-07-29 08:21:12,434 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.741e+01 5.604e+01 6.314e+01 7.197e+01 9.359e+01, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 08:21:15,996 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=260153.33333333334, ans=0.125 +2024-07-29 08:21:17,233 INFO [train.py:1114] (1/4) Epoch 20, batch 900, loss[loss=0.1728, simple_loss=0.2476, pruned_loss=0.04899, over 4842.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2631, pruned_loss=0.04034, over 927788.94 frames. ], batch size: 12, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:21:21,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=260166.66666666666, ans=0.125 +2024-07-29 08:21:33,536 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.23 vs. limit=15.0 +2024-07-29 08:21:38,842 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=260206.66666666666, ans=0.0 +2024-07-29 08:21:52,559 INFO [train.py:1114] (1/4) Epoch 20, batch 950, loss[loss=0.1384, simple_loss=0.2279, pruned_loss=0.02452, over 4780.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2626, pruned_loss=0.03986, over 929759.58 frames. ], batch size: 12, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:21:55,178 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=260233.33333333334, ans=0.0 +2024-07-29 08:21:55,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=260233.33333333334, ans=0.2 +2024-07-29 08:21:58,531 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=260246.66666666666, ans=0.2 +2024-07-29 08:23:16,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=260246.66666666666, ans=0.2 +2024-07-29 08:23:49,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=260260.0, ans=0.0 +2024-07-29 08:23:51,748 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=260260.0, ans=0.125 +2024-07-29 08:24:02,476 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.740e+01 6.532e+01 7.410e+01 9.580e+01, threshold=1.306e+02, percent-clipped=0.0 +2024-07-29 08:24:02,571 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=260286.66666666666, ans=0.125 +2024-07-29 08:24:07,417 INFO [train.py:1114] (1/4) Epoch 20, batch 1000, loss[loss=0.142, simple_loss=0.2217, pruned_loss=0.03108, over 4963.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.263, pruned_loss=0.04024, over 929127.03 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:24:10,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=260300.0, ans=0.0 +2024-07-29 08:24:14,622 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.71 vs. limit=6.0 +2024-07-29 08:24:21,176 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=15.0 +2024-07-29 08:24:23,837 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.07 vs. limit=12.0 +2024-07-29 08:24:30,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260340.0, ans=0.1 +2024-07-29 08:24:31,526 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=260340.0, ans=0.0 +2024-07-29 08:24:41,505 INFO [train.py:1114] (1/4) Epoch 20, batch 1050, loss[loss=0.1802, simple_loss=0.2707, pruned_loss=0.04485, over 4866.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2614, pruned_loss=0.03954, over 931832.62 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:25:12,293 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.430e+01 5.624e+01 6.219e+01 7.008e+01 1.029e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-29 08:25:17,056 INFO [train.py:1114] (1/4) Epoch 20, batch 1100, loss[loss=0.1699, simple_loss=0.259, pruned_loss=0.04034, over 4893.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2623, pruned_loss=0.03974, over 934233.27 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:25:39,687 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=260473.33333333334, ans=0.125 +2024-07-29 08:25:40,393 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=260473.33333333334, ans=0.125 +2024-07-29 08:25:52,967 INFO [train.py:1114] (1/4) Epoch 20, batch 1150, loss[loss=0.1831, simple_loss=0.2724, pruned_loss=0.04686, over 4899.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2623, pruned_loss=0.03995, over 934327.45 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:25:55,153 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=260500.0, ans=0.1 +2024-07-29 08:26:07,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=260526.66666666666, ans=0.0 +2024-07-29 08:26:18,414 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=260540.0, ans=0.125 +2024-07-29 08:26:22,170 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.531e+01 5.714e+01 6.232e+01 6.999e+01 1.113e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-29 08:26:26,993 INFO [train.py:1114] (1/4) Epoch 20, batch 1200, loss[loss=0.1772, simple_loss=0.2769, pruned_loss=0.03874, over 4875.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2633, pruned_loss=0.04011, over 933894.84 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:28:36,418 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.25 vs. limit=15.0 +2024-07-29 08:28:44,342 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=260606.66666666666, ans=0.125 +2024-07-29 08:28:51,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=260620.0, ans=0.025 +2024-07-29 08:28:58,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=260633.33333333334, ans=0.1 +2024-07-29 08:28:59,362 INFO [train.py:1114] (1/4) Epoch 20, batch 1250, loss[loss=0.183, simple_loss=0.2721, pruned_loss=0.047, over 4798.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2637, pruned_loss=0.03989, over 937889.82 frames. ], batch size: 15, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:29:00,364 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.33 vs. limit=12.0 +2024-07-29 08:29:02,270 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.64 vs. limit=22.5 +2024-07-29 08:29:05,433 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=260646.66666666666, ans=0.125 +2024-07-29 08:29:16,488 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=260660.0, ans=0.125 +2024-07-29 08:29:16,570 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.90 vs. limit=12.0 +2024-07-29 08:29:25,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260673.33333333334, ans=0.1 +2024-07-29 08:29:29,551 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.565e+01 6.190e+01 6.882e+01 9.944e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 08:29:42,139 INFO [train.py:1114] (1/4) Epoch 20, batch 1300, loss[loss=0.1625, simple_loss=0.2546, pruned_loss=0.03522, over 4692.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2626, pruned_loss=0.03969, over 938890.80 frames. ], batch size: 19, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:29:45,420 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=260700.0, ans=0.125 +2024-07-29 08:29:48,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=260713.33333333334, ans=0.0 +2024-07-29 08:29:57,622 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=260726.66666666666, ans=0.0 +2024-07-29 08:30:06,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260740.0, ans=0.1 +2024-07-29 08:30:15,512 INFO [train.py:1114] (1/4) Epoch 20, batch 1350, loss[loss=0.1555, simple_loss=0.2468, pruned_loss=0.03212, over 4763.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2622, pruned_loss=0.03933, over 940793.74 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:30:26,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260780.0, ans=0.1 +2024-07-29 08:30:27,941 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=260780.0, ans=0.125 +2024-07-29 08:30:27,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=260780.0, ans=0.2 +2024-07-29 08:30:29,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=260793.33333333334, ans=0.125 +2024-07-29 08:30:37,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=260806.66666666666, ans=0.2 +2024-07-29 08:30:38,307 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=260806.66666666666, ans=0.0 +2024-07-29 08:30:40,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=260806.66666666666, ans=0.2 +2024-07-29 08:30:41,539 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=260806.66666666666, ans=0.2 +2024-07-29 08:30:44,264 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=260820.0, ans=0.09899494936611666 +2024-07-29 08:30:44,740 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.728e+01 5.649e+01 6.305e+01 7.298e+01 1.047e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 08:30:49,524 INFO [train.py:1114] (1/4) Epoch 20, batch 1400, loss[loss=0.1384, simple_loss=0.2306, pruned_loss=0.02307, over 4713.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2627, pruned_loss=0.03978, over 942406.16 frames. ], batch size: 11, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:30:50,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=260833.33333333334, ans=0.2 +2024-07-29 08:30:51,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=260833.33333333334, ans=0.125 +2024-07-29 08:30:54,171 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:31:03,733 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=260860.0, ans=0.125 +2024-07-29 08:31:16,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=260873.33333333334, ans=0.125 +2024-07-29 08:31:16,751 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.95 vs. limit=15.0 +2024-07-29 08:31:25,043 INFO [train.py:1114] (1/4) Epoch 20, batch 1450, loss[loss=0.1744, simple_loss=0.2664, pruned_loss=0.04119, over 4693.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2627, pruned_loss=0.0395, over 942521.12 frames. ], batch size: 15, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:31:27,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=260900.0, ans=0.04949747468305833 +2024-07-29 08:31:27,846 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=260900.0, ans=0.125 +2024-07-29 08:31:35,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=260913.33333333334, ans=0.0 +2024-07-29 08:31:41,116 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260926.66666666666, ans=0.1 +2024-07-29 08:31:42,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260926.66666666666, ans=0.1 +2024-07-29 08:31:44,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.87 vs. limit=15.0 +2024-07-29 08:31:52,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=260953.33333333334, ans=0.2 +2024-07-29 08:31:53,425 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.680e+01 5.592e+01 6.212e+01 7.267e+01 9.238e+01, threshold=1.242e+02, percent-clipped=0.0 +2024-07-29 08:31:53,965 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.98 vs. limit=15.0 +2024-07-29 08:31:58,325 INFO [train.py:1114] (1/4) Epoch 20, batch 1500, loss[loss=0.1536, simple_loss=0.2523, pruned_loss=0.02746, over 4818.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.263, pruned_loss=0.03966, over 942202.27 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:32:00,638 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=260966.66666666666, ans=0.0 +2024-07-29 08:32:09,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=260980.0, ans=0.0 +2024-07-29 08:32:15,762 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=260993.33333333334, ans=0.125 +2024-07-29 08:32:23,043 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:32:30,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=261020.0, ans=0.125 +2024-07-29 08:32:33,958 INFO [train.py:1114] (1/4) Epoch 20, batch 1550, loss[loss=0.1913, simple_loss=0.2836, pruned_loss=0.04954, over 4898.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2627, pruned_loss=0.03943, over 938837.85 frames. ], batch size: 15, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:32:34,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=261033.33333333334, ans=0.0 +2024-07-29 08:32:39,413 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=261033.33333333334, ans=0.025 +2024-07-29 08:32:55,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=261073.33333333334, ans=0.125 +2024-07-29 08:32:55,826 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:33:01,738 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=261086.66666666666, ans=0.125 +2024-07-29 08:33:01,771 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=261086.66666666666, ans=0.0 +2024-07-29 08:33:04,817 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.534e+01 6.134e+01 7.096e+01 1.070e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 08:33:09,548 INFO [train.py:1114] (1/4) Epoch 20, batch 1600, loss[loss=0.1712, simple_loss=0.2687, pruned_loss=0.03686, over 4877.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2626, pruned_loss=0.03948, over 937509.47 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:33:09,752 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=261100.0, ans=0.0 +2024-07-29 08:33:17,651 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=261113.33333333334, ans=0.0 +2024-07-29 08:33:21,818 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=261113.33333333334, ans=0.0 +2024-07-29 08:33:22,619 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.90 vs. limit=22.5 +2024-07-29 08:33:30,476 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=261140.0, ans=0.0 +2024-07-29 08:33:34,419 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=261140.0, ans=0.0 +2024-07-29 08:33:35,179 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=261140.0, ans=0.0 +2024-07-29 08:33:39,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=261153.33333333334, ans=0.0 +2024-07-29 08:33:44,377 INFO [train.py:1114] (1/4) Epoch 20, batch 1650, loss[loss=0.1478, simple_loss=0.2451, pruned_loss=0.02527, over 4672.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2616, pruned_loss=0.03923, over 937446.24 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:33:46,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=261166.66666666666, ans=0.125 +2024-07-29 08:34:02,120 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:34:14,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=261206.66666666666, ans=0.125 +2024-07-29 08:34:18,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=261206.66666666666, ans=0.0 +2024-07-29 08:34:21,297 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=261220.0, ans=0.025 +2024-07-29 08:34:21,372 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=261220.0, ans=0.125 +2024-07-29 08:34:22,548 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.514e+01 5.635e+01 6.098e+01 6.570e+01 1.046e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 08:34:27,325 INFO [train.py:1114] (1/4) Epoch 20, batch 1700, loss[loss=0.1789, simple_loss=0.2635, pruned_loss=0.04717, over 4696.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2622, pruned_loss=0.03943, over 938992.64 frames. ], batch size: 11, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:34:27,478 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=261233.33333333334, ans=0.2 +2024-07-29 08:34:29,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=261233.33333333334, ans=0.0 +2024-07-29 08:34:30,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=261233.33333333334, ans=0.125 +2024-07-29 08:34:30,268 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=261233.33333333334, ans=0.125 +2024-07-29 08:34:32,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=261233.33333333334, ans=0.125 +2024-07-29 08:34:38,001 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=261246.66666666666, ans=0.5 +2024-07-29 08:34:40,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=261260.0, ans=0.1 +2024-07-29 08:34:45,324 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=261260.0, ans=0.125 +2024-07-29 08:34:48,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=261260.0, ans=0.1 +2024-07-29 08:34:52,053 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:34:53,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=261273.33333333334, ans=0.025 +2024-07-29 08:35:05,855 INFO [train.py:1114] (1/4) Epoch 20, batch 1750, loss[loss=0.1672, simple_loss=0.242, pruned_loss=0.04619, over 4817.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2618, pruned_loss=0.03926, over 939703.50 frames. ], batch size: 11, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:35:20,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=261313.33333333334, ans=0.125 +2024-07-29 08:35:54,910 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=261340.0, ans=0.2 +2024-07-29 08:36:01,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=261340.0, ans=0.125 +2024-07-29 08:36:11,239 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.694e+01 6.446e+01 7.395e+01 1.026e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-29 08:36:16,133 INFO [train.py:1114] (1/4) Epoch 20, batch 1800, loss[loss=0.1901, simple_loss=0.2822, pruned_loss=0.04903, over 4640.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2617, pruned_loss=0.03915, over 940203.69 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:36:18,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=261366.66666666666, ans=0.125 +2024-07-29 08:36:29,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=261380.0, ans=0.125 +2024-07-29 08:36:35,107 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=261393.33333333334, ans=0.0 +2024-07-29 08:37:16,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=261393.33333333334, ans=0.025 +2024-07-29 08:37:16,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=261406.66666666666, ans=0.0 +2024-07-29 08:37:22,031 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.22 vs. limit=22.5 +2024-07-29 08:37:23,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=261420.0, ans=0.125 +2024-07-29 08:37:24,253 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=261420.0, ans=0.025 +2024-07-29 08:37:47,130 INFO [train.py:1114] (1/4) Epoch 20, batch 1850, loss[loss=0.1744, simple_loss=0.2695, pruned_loss=0.03963, over 4812.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2611, pruned_loss=0.03914, over 940761.55 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:38:27,606 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=261486.66666666666, ans=0.125 +2024-07-29 08:38:27,832 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.98 vs. limit=15.0 +2024-07-29 08:38:30,213 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.557e+01 5.603e+01 6.221e+01 6.965e+01 1.039e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-29 08:38:30,964 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=261486.66666666666, ans=0.0 +2024-07-29 08:38:32,302 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=261486.66666666666, ans=0.2 +2024-07-29 08:38:34,807 INFO [train.py:1114] (1/4) Epoch 20, batch 1900, loss[loss=0.1787, simple_loss=0.2634, pruned_loss=0.04704, over 4657.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.262, pruned_loss=0.03953, over 942013.74 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:38:35,744 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=261500.0, ans=0.125 +2024-07-29 08:38:37,668 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=261500.0, ans=0.125 +2024-07-29 08:38:54,351 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=261526.66666666666, ans=0.0 +2024-07-29 08:38:55,662 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=261526.66666666666, ans=0.125 +2024-07-29 08:39:11,160 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.94 vs. limit=6.0 +2024-07-29 08:39:11,508 INFO [train.py:1114] (1/4) Epoch 20, batch 1950, loss[loss=0.1654, simple_loss=0.2689, pruned_loss=0.03095, over 4895.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2633, pruned_loss=0.03961, over 943819.22 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 64.0 +2024-07-29 08:39:17,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=261580.0, ans=0.125 +2024-07-29 08:39:23,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=261580.0, ans=0.125 +2024-07-29 08:39:25,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=261593.33333333334, ans=0.1 +2024-07-29 08:39:30,156 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=261593.33333333334, ans=0.1 +2024-07-29 08:39:30,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=261593.33333333334, ans=0.0 +2024-07-29 08:39:32,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=261606.66666666666, ans=0.125 +2024-07-29 08:39:40,864 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.726e+01 5.670e+01 6.297e+01 7.133e+01 1.211e+02, threshold=1.259e+02, percent-clipped=0.0 +2024-07-29 08:39:41,773 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261620.0, ans=0.1 +2024-07-29 08:39:45,715 INFO [train.py:1114] (1/4) Epoch 20, batch 2000, loss[loss=0.149, simple_loss=0.2241, pruned_loss=0.03692, over 4811.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2637, pruned_loss=0.03989, over 941225.48 frames. ], batch size: 11, lr: 3.74e-03, grad_scale: 64.0 +2024-07-29 08:40:08,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=261660.0, ans=0.125 +2024-07-29 08:40:15,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=261673.33333333334, ans=0.04949747468305833 +2024-07-29 08:40:19,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=261673.33333333334, ans=0.035 +2024-07-29 08:40:25,706 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.74 vs. limit=8.0 +2024-07-29 08:40:29,402 INFO [train.py:1114] (1/4) Epoch 20, batch 2050, loss[loss=0.1344, simple_loss=0.216, pruned_loss=0.02642, over 4605.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2629, pruned_loss=0.0398, over 939470.25 frames. ], batch size: 11, lr: 3.74e-03, grad_scale: 64.0 +2024-07-29 08:40:54,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261726.66666666666, ans=0.1 +2024-07-29 08:41:01,240 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=261726.66666666666, ans=0.0 +2024-07-29 08:41:08,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=261740.0, ans=0.2 +2024-07-29 08:41:14,700 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.484e+01 5.787e+01 6.324e+01 7.549e+01 1.272e+02, threshold=1.265e+02, percent-clipped=1.0 +2024-07-29 08:41:18,632 INFO [train.py:1114] (1/4) Epoch 20, batch 2100, loss[loss=0.1567, simple_loss=0.2541, pruned_loss=0.02965, over 4762.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2622, pruned_loss=0.03946, over 940894.87 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:41:31,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=261793.33333333334, ans=0.0 +2024-07-29 08:41:44,380 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=261820.0, ans=0.0 +2024-07-29 08:41:51,683 INFO [train.py:1114] (1/4) Epoch 20, batch 2150, loss[loss=0.1576, simple_loss=0.2486, pruned_loss=0.03328, over 4890.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2608, pruned_loss=0.03914, over 943950.56 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:42:02,013 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=261846.66666666666, ans=0.125 +2024-07-29 08:42:29,165 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.32 vs. limit=15.0 +2024-07-29 08:42:30,786 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=261873.33333333334, ans=0.2 +2024-07-29 08:42:32,165 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=261873.33333333334, ans=0.2 +2024-07-29 08:42:32,758 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=261873.33333333334, ans=0.04949747468305833 +2024-07-29 08:42:43,998 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.706e+01 5.512e+01 6.073e+01 7.043e+01 1.112e+02, threshold=1.215e+02, percent-clipped=0.0 +2024-07-29 08:42:53,723 INFO [train.py:1114] (1/4) Epoch 20, batch 2200, loss[loss=0.1571, simple_loss=0.2442, pruned_loss=0.03495, over 4814.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2602, pruned_loss=0.03878, over 943416.26 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:42:57,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=261900.0, ans=0.015 +2024-07-29 08:43:27,860 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.79 vs. limit=15.0 +2024-07-29 08:43:42,630 INFO [train.py:1114] (1/4) Epoch 20, batch 2250, loss[loss=0.1534, simple_loss=0.2572, pruned_loss=0.0248, over 4698.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2609, pruned_loss=0.03912, over 941687.96 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:44:17,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=261993.33333333334, ans=0.025 +2024-07-29 08:44:25,861 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=261993.33333333334, ans=0.125 +2024-07-29 08:44:25,981 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261993.33333333334, ans=0.1 +2024-07-29 08:44:51,829 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.514e+01 6.259e+01 6.946e+01 1.195e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 08:45:13,582 INFO [train.py:1114] (1/4) Epoch 20, batch 2300, loss[loss=0.156, simple_loss=0.2419, pruned_loss=0.03504, over 4941.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2592, pruned_loss=0.03855, over 939492.01 frames. ], batch size: 12, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:45:15,357 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=262033.33333333334, ans=0.0 +2024-07-29 08:45:51,588 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=262046.66666666666, ans=0.125 +2024-07-29 08:45:55,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262046.66666666666, ans=0.1 +2024-07-29 08:45:58,364 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262046.66666666666, ans=0.1 +2024-07-29 08:47:06,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=262060.0, ans=0.125 +2024-07-29 08:47:28,965 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.27 vs. limit=22.5 +2024-07-29 08:47:34,611 INFO [train.py:1114] (1/4) Epoch 20, batch 2350, loss[loss=0.1718, simple_loss=0.2777, pruned_loss=0.03301, over 4634.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2594, pruned_loss=0.03841, over 941397.26 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:48:08,534 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=262113.33333333334, ans=0.5 +2024-07-29 08:48:09,512 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.10 vs. limit=15.0 +2024-07-29 08:48:35,085 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=262140.0, ans=0.125 +2024-07-29 08:48:41,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=262140.0, ans=0.125 +2024-07-29 08:48:49,366 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.42 vs. limit=15.0 +2024-07-29 08:48:50,827 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.808e+01 6.182e+01 6.944e+01 1.016e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-29 08:48:51,078 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=262153.3333333333, ans=0.0 +2024-07-29 08:48:51,599 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262153.3333333333, ans=0.1 +2024-07-29 08:48:53,558 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=262153.3333333333, ans=0.125 +2024-07-29 08:48:54,793 INFO [train.py:1114] (1/4) Epoch 20, batch 2400, loss[loss=0.1583, simple_loss=0.2439, pruned_loss=0.0364, over 4636.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.261, pruned_loss=0.03871, over 940818.55 frames. ], batch size: 12, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:48:56,558 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-29 08:49:00,470 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=262166.6666666667, ans=0.0 +2024-07-29 08:49:05,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=262180.0, ans=0.125 +2024-07-29 08:49:08,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=262180.0, ans=0.05 +2024-07-29 08:49:12,282 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=262193.3333333333, ans=0.125 +2024-07-29 08:49:32,081 INFO [train.py:1114] (1/4) Epoch 20, batch 2450, loss[loss=0.1766, simple_loss=0.2631, pruned_loss=0.04506, over 4697.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2627, pruned_loss=0.03952, over 936720.16 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:49:33,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=262233.3333333333, ans=0.125 +2024-07-29 08:49:39,817 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=262233.3333333333, ans=0.0 +2024-07-29 08:50:05,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=262260.0, ans=0.2 +2024-07-29 08:50:10,257 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=262260.0, ans=0.125 +2024-07-29 08:50:10,439 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.27 vs. limit=15.0 +2024-07-29 08:50:33,799 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.751e+01 6.244e+01 7.182e+01 1.173e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 08:50:38,477 INFO [train.py:1114] (1/4) Epoch 20, batch 2500, loss[loss=0.168, simple_loss=0.2587, pruned_loss=0.03862, over 4809.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2623, pruned_loss=0.03973, over 938691.67 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:50:44,018 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=262300.0, ans=0.125 +2024-07-29 08:50:45,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=262300.0, ans=0.125 +2024-07-29 08:50:46,168 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=262300.0, ans=0.0 +2024-07-29 08:50:46,718 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=262300.0, ans=0.2 +2024-07-29 08:50:53,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=262313.3333333333, ans=0.125 +2024-07-29 08:50:54,274 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=262313.3333333333, ans=0.2 +2024-07-29 08:50:54,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=262313.3333333333, ans=10.0 +2024-07-29 08:50:55,276 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.15 vs. limit=15.0 +2024-07-29 08:51:10,688 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.59 vs. limit=10.0 +2024-07-29 08:51:17,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=262340.0, ans=0.0 +2024-07-29 08:51:24,134 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=262340.0, ans=0.125 +2024-07-29 08:51:24,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262340.0, ans=0.1 +2024-07-29 08:51:43,402 INFO [train.py:1114] (1/4) Epoch 20, batch 2550, loss[loss=0.1485, simple_loss=0.2282, pruned_loss=0.03437, over 4822.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2622, pruned_loss=0.0395, over 938427.68 frames. ], batch size: 11, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:52:01,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=262393.3333333333, ans=0.0 +2024-07-29 08:52:21,415 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.58 vs. limit=6.0 +2024-07-29 08:52:23,024 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.587e+01 5.541e+01 6.134e+01 6.874e+01 1.013e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 08:52:23,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=262420.0, ans=0.125 +2024-07-29 08:52:26,148 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.96 vs. limit=15.0 +2024-07-29 08:52:27,207 INFO [train.py:1114] (1/4) Epoch 20, batch 2600, loss[loss=0.1571, simple_loss=0.2501, pruned_loss=0.03207, over 4899.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2628, pruned_loss=0.03961, over 937396.40 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:52:41,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=262446.6666666667, ans=0.125 +2024-07-29 08:52:45,504 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.04 vs. limit=15.0 +2024-07-29 08:52:49,428 INFO [scaling.py:1024] (1/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.60 vs. limit=5.0 +2024-07-29 08:53:01,665 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=262473.3333333333, ans=0.125 +2024-07-29 08:53:03,682 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=262473.3333333333, ans=0.125 +2024-07-29 08:53:03,689 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=262473.3333333333, ans=0.0 +2024-07-29 08:53:14,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262486.6666666667, ans=0.1 +2024-07-29 08:53:27,583 INFO [train.py:1114] (1/4) Epoch 20, batch 2650, loss[loss=0.2013, simple_loss=0.2891, pruned_loss=0.0568, over 4644.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2631, pruned_loss=0.0395, over 939635.04 frames. ], batch size: 16, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:53:40,557 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=262513.3333333333, ans=0.125 +2024-07-29 08:53:48,010 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262526.6666666667, ans=0.1 +2024-07-29 08:53:52,659 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=262526.6666666667, ans=0.125 +2024-07-29 08:53:55,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=262540.0, ans=0.0 +2024-07-29 08:53:58,637 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=262540.0, ans=0.0 +2024-07-29 08:53:59,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=262540.0, ans=0.025 +2024-07-29 08:54:09,626 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.850e+01 5.564e+01 6.225e+01 7.006e+01 1.126e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-29 08:54:14,600 INFO [train.py:1114] (1/4) Epoch 20, batch 2700, loss[loss=0.1546, simple_loss=0.2466, pruned_loss=0.0313, over 4740.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2631, pruned_loss=0.03974, over 939494.57 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:54:16,173 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=262566.6666666667, ans=0.125 +2024-07-29 08:54:16,245 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=262566.6666666667, ans=0.125 +2024-07-29 08:54:19,074 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=262566.6666666667, ans=0.0 +2024-07-29 08:54:37,570 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=262593.3333333333, ans=0.125 +2024-07-29 08:54:41,530 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.21 vs. limit=22.5 +2024-07-29 08:54:42,453 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=262606.6666666667, ans=0.2 +2024-07-29 08:54:45,026 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=11.59 vs. limit=10.0 +2024-07-29 08:54:46,083 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.17 vs. limit=15.0 +2024-07-29 08:54:52,810 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:54:54,030 INFO [train.py:1114] (1/4) Epoch 20, batch 2750, loss[loss=0.1615, simple_loss=0.257, pruned_loss=0.03299, over 4704.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2623, pruned_loss=0.03943, over 939603.47 frames. ], batch size: 12, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:54:55,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=262633.3333333333, ans=0.125 +2024-07-29 08:54:58,203 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=262633.3333333333, ans=0.0 +2024-07-29 08:54:58,344 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=262633.3333333333, ans=0.125 +2024-07-29 08:54:58,525 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.46 vs. limit=15.0 +2024-07-29 08:55:11,914 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=262660.0, ans=0.025 +2024-07-29 08:55:13,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=262660.0, ans=0.2 +2024-07-29 08:55:13,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=262660.0, ans=0.0 +2024-07-29 08:55:13,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=262660.0, ans=0.125 +2024-07-29 08:55:21,057 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.73 vs. limit=15.0 +2024-07-29 08:55:36,086 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=262686.6666666667, ans=0.2 +2024-07-29 08:55:40,166 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.537e+01 5.878e+01 6.772e+01 7.962e+01 1.092e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-29 08:55:48,979 INFO [train.py:1114] (1/4) Epoch 20, batch 2800, loss[loss=0.2358, simple_loss=0.3132, pruned_loss=0.07917, over 3425.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2626, pruned_loss=0.03963, over 937646.27 frames. ], batch size: 35, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:55:52,113 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.37 vs. limit=22.5 +2024-07-29 08:55:53,145 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=262700.0, ans=0.125 +2024-07-29 08:55:56,466 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=262713.3333333333, ans=0.0 +2024-07-29 08:55:59,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=262713.3333333333, ans=0.0 +2024-07-29 08:56:06,474 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.05 vs. limit=6.0 +2024-07-29 08:56:10,984 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=262740.0, ans=0.125 +2024-07-29 08:56:16,525 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=262740.0, ans=0.025 +2024-07-29 08:56:21,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=262753.3333333333, ans=0.0 +2024-07-29 08:56:23,957 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262753.3333333333, ans=0.1 +2024-07-29 08:56:26,495 INFO [train.py:1114] (1/4) Epoch 20, batch 2850, loss[loss=0.1633, simple_loss=0.2557, pruned_loss=0.03548, over 4960.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2625, pruned_loss=0.03953, over 935961.79 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:56:26,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=262766.6666666667, ans=0.125 +2024-07-29 08:56:31,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=262766.6666666667, ans=0.2 +2024-07-29 08:56:32,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=262766.6666666667, ans=0.0 +2024-07-29 08:56:33,062 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:56:34,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=262780.0, ans=0.125 +2024-07-29 08:56:37,052 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262780.0, ans=0.1 +2024-07-29 08:57:02,165 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.799e+01 6.410e+01 7.214e+01 1.051e+02, threshold=1.282e+02, percent-clipped=0.0 +2024-07-29 08:57:06,627 INFO [train.py:1114] (1/4) Epoch 20, batch 2900, loss[loss=0.1442, simple_loss=0.2346, pruned_loss=0.02687, over 4820.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2626, pruned_loss=0.03928, over 939647.96 frames. ], batch size: 13, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 08:57:07,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=262833.3333333333, ans=0.0 +2024-07-29 08:57:27,111 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.42 vs. limit=15.0 +2024-07-29 08:57:40,659 INFO [train.py:1114] (1/4) Epoch 20, batch 2950, loss[loss=0.2176, simple_loss=0.2958, pruned_loss=0.06972, over 4712.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.261, pruned_loss=0.03907, over 938868.55 frames. ], batch size: 12, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 08:57:44,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=262900.0, ans=0.125 +2024-07-29 08:57:49,089 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=262913.3333333333, ans=0.2 +2024-07-29 08:57:53,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=262926.6666666667, ans=0.09899494936611666 +2024-07-29 08:57:57,546 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=262926.6666666667, ans=0.125 +2024-07-29 08:58:05,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=262940.0, ans=0.1 +2024-07-29 08:58:12,843 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.161e+01 5.582e+01 5.984e+01 6.557e+01 9.213e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-29 08:58:18,635 INFO [train.py:1114] (1/4) Epoch 20, batch 3000, loss[loss=0.1573, simple_loss=0.2539, pruned_loss=0.03032, over 4759.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2611, pruned_loss=0.03891, over 938099.99 frames. ], batch size: 13, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 08:58:18,635 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 08:58:44,396 INFO [train.py:1146] (1/4) Epoch 20, validation: loss=0.1605, simple_loss=0.2625, pruned_loss=0.02922, over 944034.00 frames. +2024-07-29 08:58:44,397 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 08:58:52,810 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=6.05 vs. limit=12.0 +2024-07-29 08:59:01,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=262993.3333333333, ans=0.0 +2024-07-29 08:59:05,147 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=262993.3333333333, ans=0.125 +2024-07-29 08:59:10,646 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=262993.3333333333, ans=0.0 +2024-07-29 08:59:12,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=263006.6666666667, ans=0.2 +2024-07-29 08:59:13,299 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=263006.6666666667, ans=0.125 +2024-07-29 08:59:13,938 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:59:40,236 INFO [train.py:1114] (1/4) Epoch 20, batch 3050, loss[loss=0.1614, simple_loss=0.2542, pruned_loss=0.03428, over 4646.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2617, pruned_loss=0.03879, over 937160.16 frames. ], batch size: 12, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 08:59:46,094 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=263033.3333333333, ans=0.125 +2024-07-29 08:59:47,415 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=263033.3333333333, ans=0.0 +2024-07-29 08:59:54,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=263046.6666666667, ans=0.0 +2024-07-29 08:59:56,174 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=263046.6666666667, ans=0.0 +2024-07-29 09:00:01,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=263060.0, ans=0.5 +2024-07-29 09:00:11,216 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.08 vs. limit=12.0 +2024-07-29 09:00:19,900 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.456e+01 5.658e+01 6.248e+01 7.167e+01 1.022e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-29 09:00:33,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=263100.0, ans=0.125 +2024-07-29 09:00:33,575 INFO [train.py:1114] (1/4) Epoch 20, batch 3100, loss[loss=0.1604, simple_loss=0.2561, pruned_loss=0.03237, over 4616.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2615, pruned_loss=0.03931, over 938163.34 frames. ], batch size: 16, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:01:05,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=263140.0, ans=0.0 +2024-07-29 09:01:09,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=263153.3333333333, ans=15.0 +2024-07-29 09:01:10,347 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263153.3333333333, ans=0.1 +2024-07-29 09:01:13,566 INFO [train.py:1114] (1/4) Epoch 20, batch 3150, loss[loss=0.1531, simple_loss=0.2511, pruned_loss=0.02758, over 4607.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2614, pruned_loss=0.0391, over 938606.04 frames. ], batch size: 17, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:01:16,383 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=263166.6666666667, ans=0.125 +2024-07-29 09:01:17,700 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=263166.6666666667, ans=0.125 +2024-07-29 09:01:46,061 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.636e+01 5.746e+01 6.588e+01 7.668e+01 1.344e+02, threshold=1.318e+02, percent-clipped=1.0 +2024-07-29 09:01:46,197 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263220.0, ans=0.1 +2024-07-29 09:01:50,148 INFO [train.py:1114] (1/4) Epoch 20, batch 3200, loss[loss=0.1716, simple_loss=0.2677, pruned_loss=0.03781, over 4829.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2607, pruned_loss=0.03883, over 939882.82 frames. ], batch size: 13, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:01:50,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263233.3333333333, ans=0.1 +2024-07-29 09:01:54,778 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.27 vs. limit=22.5 +2024-07-29 09:02:03,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=263260.0, ans=0.125 +2024-07-29 09:02:06,435 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=263260.0, ans=0.2 +2024-07-29 09:02:12,942 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=263273.3333333333, ans=0.2 +2024-07-29 09:02:14,321 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=263273.3333333333, ans=0.1 +2024-07-29 09:02:19,731 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=263286.6666666667, ans=0.2 +2024-07-29 09:02:21,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=263286.6666666667, ans=0.125 +2024-07-29 09:02:27,326 INFO [train.py:1114] (1/4) Epoch 20, batch 3250, loss[loss=0.1681, simple_loss=0.2625, pruned_loss=0.03688, over 4935.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2614, pruned_loss=0.03898, over 940712.29 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:02:27,545 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=263300.0, ans=0.0 +2024-07-29 09:02:30,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=263300.0, ans=0.2 +2024-07-29 09:02:57,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=263353.3333333333, ans=0.025 +2024-07-29 09:02:59,774 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.410e+01 5.543e+01 6.289e+01 7.306e+01 9.331e+01, threshold=1.258e+02, percent-clipped=0.0 +2024-07-29 09:03:02,701 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=263353.3333333333, ans=0.0 +2024-07-29 09:03:03,837 INFO [train.py:1114] (1/4) Epoch 20, batch 3300, loss[loss=0.1597, simple_loss=0.2549, pruned_loss=0.03222, over 4679.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2608, pruned_loss=0.03904, over 941078.39 frames. ], batch size: 19, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:03:05,595 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.13 vs. limit=15.0 +2024-07-29 09:03:15,938 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=263380.0, ans=15.0 +2024-07-29 09:03:24,754 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=263406.6666666667, ans=0.125 +2024-07-29 09:03:27,341 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=263406.6666666667, ans=0.125 +2024-07-29 09:03:37,232 INFO [train.py:1114] (1/4) Epoch 20, batch 3350, loss[loss=0.1718, simple_loss=0.2711, pruned_loss=0.03625, over 4852.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2616, pruned_loss=0.0395, over 938574.16 frames. ], batch size: 18, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:03:37,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=263433.3333333333, ans=0.125 +2024-07-29 09:03:43,703 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.61 vs. limit=22.5 +2024-07-29 09:03:44,691 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:03:46,904 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=263446.6666666667, ans=0.1 +2024-07-29 09:03:49,821 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.10 vs. limit=15.0 +2024-07-29 09:04:04,407 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.99 vs. limit=15.0 +2024-07-29 09:04:07,370 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.670e+01 5.699e+01 6.337e+01 7.173e+01 1.148e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 09:04:11,615 INFO [train.py:1114] (1/4) Epoch 20, batch 3400, loss[loss=0.1669, simple_loss=0.2466, pruned_loss=0.04359, over 4801.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2617, pruned_loss=0.0397, over 937376.52 frames. ], batch size: 11, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:04:31,469 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=263526.6666666667, ans=0.0 +2024-07-29 09:04:39,881 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=263540.0, ans=0.125 +2024-07-29 09:04:47,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=263553.3333333333, ans=0.125 +2024-07-29 09:04:49,169 INFO [train.py:1114] (1/4) Epoch 20, batch 3450, loss[loss=0.1694, simple_loss=0.2678, pruned_loss=0.03547, over 4700.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.262, pruned_loss=0.03976, over 937663.30 frames. ], batch size: 19, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:05:01,925 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=263593.3333333333, ans=0.0 +2024-07-29 09:05:02,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=263593.3333333333, ans=0.125 +2024-07-29 09:05:03,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=263593.3333333333, ans=0.125 +2024-07-29 09:05:18,038 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=263620.0, ans=0.1 +2024-07-29 09:05:18,522 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.353e+01 5.782e+01 6.590e+01 7.406e+01 1.017e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 09:05:22,631 INFO [train.py:1114] (1/4) Epoch 20, batch 3500, loss[loss=0.137, simple_loss=0.2177, pruned_loss=0.02814, over 4938.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2618, pruned_loss=0.03966, over 938156.09 frames. ], batch size: 12, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:05:30,524 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.84 vs. limit=15.0 +2024-07-29 09:05:30,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=263646.6666666667, ans=0.0 +2024-07-29 09:05:35,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=263646.6666666667, ans=0.07 +2024-07-29 09:05:39,835 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=263660.0, ans=0.125 +2024-07-29 09:05:40,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=263660.0, ans=0.0 +2024-07-29 09:05:46,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=263673.3333333333, ans=0.025 +2024-07-29 09:05:56,337 INFO [train.py:1114] (1/4) Epoch 20, batch 3550, loss[loss=0.1465, simple_loss=0.2438, pruned_loss=0.02459, over 4670.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2615, pruned_loss=0.03968, over 939015.10 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:05:57,299 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.12 vs. limit=12.0 +2024-07-29 09:06:15,367 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:06:17,573 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263726.6666666667, ans=0.1 +2024-07-29 09:06:31,371 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=263740.0, ans=0.125 +2024-07-29 09:06:35,175 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.573e+01 6.229e+01 6.741e+01 1.100e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-29 09:06:40,732 INFO [train.py:1114] (1/4) Epoch 20, batch 3600, loss[loss=0.1625, simple_loss=0.2539, pruned_loss=0.03557, over 4967.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2606, pruned_loss=0.03885, over 940526.16 frames. ], batch size: 13, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:06:51,609 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.51 vs. limit=22.5 +2024-07-29 09:06:52,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=263780.0, ans=0.0 +2024-07-29 09:07:02,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263806.6666666667, ans=0.1 +2024-07-29 09:07:02,913 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-29 09:07:06,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=263806.6666666667, ans=0.0 +2024-07-29 09:07:11,977 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.08 vs. limit=15.0 +2024-07-29 09:07:14,877 INFO [train.py:1114] (1/4) Epoch 20, batch 3650, loss[loss=0.1998, simple_loss=0.2876, pruned_loss=0.05601, over 4902.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2595, pruned_loss=0.0385, over 941119.87 frames. ], batch size: 15, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:07:17,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=263833.3333333333, ans=0.125 +2024-07-29 09:07:20,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=263833.3333333333, ans=0.025 +2024-07-29 09:07:23,120 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=263846.6666666667, ans=0.1 +2024-07-29 09:07:34,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263873.3333333333, ans=0.1 +2024-07-29 09:07:44,531 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.430e+01 6.137e+01 7.012e+01 1.010e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 09:07:45,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=263886.6666666667, ans=0.2 +2024-07-29 09:07:46,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263886.6666666667, ans=0.1 +2024-07-29 09:07:48,573 INFO [train.py:1114] (1/4) Epoch 20, batch 3700, loss[loss=0.1869, simple_loss=0.2842, pruned_loss=0.04481, over 4931.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2605, pruned_loss=0.03883, over 942079.12 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:07:52,390 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=263900.0, ans=0.125 +2024-07-29 09:07:53,443 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-07-29 09:07:58,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=263913.3333333333, ans=0.0 +2024-07-29 09:07:58,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263913.3333333333, ans=0.1 +2024-07-29 09:08:21,541 INFO [train.py:1114] (1/4) Epoch 20, batch 3750, loss[loss=0.1512, simple_loss=0.2341, pruned_loss=0.03415, over 4797.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2608, pruned_loss=0.03905, over 943554.22 frames. ], batch size: 11, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:08:39,781 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=263993.3333333333, ans=0.2 +2024-07-29 09:08:39,929 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=263993.3333333333, ans=0.2 +2024-07-29 09:08:57,862 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+01 5.705e+01 6.410e+01 7.000e+01 1.025e+02, threshold=1.282e+02, percent-clipped=0.0 +2024-07-29 09:08:59,545 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.67 vs. limit=12.0 +2024-07-29 09:09:02,216 INFO [train.py:1114] (1/4) Epoch 20, batch 3800, loss[loss=0.1753, simple_loss=0.2726, pruned_loss=0.03899, over 4824.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2611, pruned_loss=0.03917, over 941509.61 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:09:44,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=264073.3333333333, ans=0.125 +2024-07-29 09:09:46,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=264086.6666666667, ans=0.125 +2024-07-29 09:09:47,596 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=264086.6666666667, ans=0.125 +2024-07-29 09:09:53,587 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=264100.0, ans=0.125 +2024-07-29 09:09:54,164 INFO [train.py:1114] (1/4) Epoch 20, batch 3850, loss[loss=0.1722, simple_loss=0.2637, pruned_loss=0.04041, over 4644.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2611, pruned_loss=0.03896, over 942335.10 frames. ], batch size: 16, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:10:21,426 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=264153.3333333333, ans=0.125 +2024-07-29 09:10:24,564 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.802e+01 5.626e+01 6.107e+01 6.849e+01 9.588e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-29 09:10:31,760 INFO [train.py:1114] (1/4) Epoch 20, batch 3900, loss[loss=0.1599, simple_loss=0.2667, pruned_loss=0.02653, over 4811.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2616, pruned_loss=0.03912, over 943090.46 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 16.0 +2024-07-29 09:10:37,109 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=264166.6666666667, ans=0.125 +2024-07-29 09:10:45,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=264193.3333333333, ans=0.2 +2024-07-29 09:10:50,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=264193.3333333333, ans=0.0 +2024-07-29 09:10:59,053 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=264220.0, ans=0.025 +2024-07-29 09:11:02,994 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:11:26,905 INFO [train.py:1114] (1/4) Epoch 20, batch 3950, loss[loss=0.1762, simple_loss=0.2684, pruned_loss=0.04203, over 4861.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2621, pruned_loss=0.03937, over 944911.02 frames. ], batch size: 16, lr: 3.72e-03, grad_scale: 16.0 +2024-07-29 09:11:37,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=264246.6666666667, ans=0.125 +2024-07-29 09:11:52,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=264273.3333333333, ans=0.125 +2024-07-29 09:11:52,426 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.94 vs. limit=15.0 +2024-07-29 09:11:59,940 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.407e+01 5.612e+01 6.214e+01 7.012e+01 1.031e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 09:12:00,990 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.09 vs. limit=15.0 +2024-07-29 09:12:03,449 INFO [train.py:1114] (1/4) Epoch 20, batch 4000, loss[loss=0.1476, simple_loss=0.2388, pruned_loss=0.02821, over 4774.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2624, pruned_loss=0.03983, over 941130.82 frames. ], batch size: 12, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:12:12,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=264300.0, ans=0.125 +2024-07-29 09:12:56,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=264326.6666666667, ans=0.125 +2024-07-29 09:13:01,163 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=264340.0, ans=0.0 +2024-07-29 09:13:42,201 INFO [train.py:1114] (1/4) Epoch 20, batch 4050, loss[loss=0.2263, simple_loss=0.3049, pruned_loss=0.07383, over 3371.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.262, pruned_loss=0.03956, over 939609.89 frames. ], batch size: 35, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:13:42,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264366.6666666667, ans=0.1 +2024-07-29 09:13:51,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=264380.0, ans=0.125 +2024-07-29 09:13:53,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=264380.0, ans=0.0 +2024-07-29 09:14:02,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=264393.3333333333, ans=0.0 +2024-07-29 09:14:07,706 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.75 vs. limit=10.0 +2024-07-29 09:14:11,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=264406.6666666667, ans=0.125 +2024-07-29 09:14:14,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=264406.6666666667, ans=0.1 +2024-07-29 09:14:17,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=264420.0, ans=0.125 +2024-07-29 09:14:18,286 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=264420.0, ans=0.0 +2024-07-29 09:14:18,733 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.925e+01 5.618e+01 6.150e+01 7.099e+01 1.073e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 09:14:22,371 INFO [train.py:1114] (1/4) Epoch 20, batch 4100, loss[loss=0.2196, simple_loss=0.2988, pruned_loss=0.07026, over 4904.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2622, pruned_loss=0.03973, over 938531.81 frames. ], batch size: 15, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:14:25,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=264433.3333333333, ans=0.125 +2024-07-29 09:14:32,239 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=264446.6666666667, ans=0.1 +2024-07-29 09:14:44,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=264460.0, ans=0.0 +2024-07-29 09:14:46,863 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=264473.3333333333, ans=0.0 +2024-07-29 09:14:59,174 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.97 vs. limit=15.0 +2024-07-29 09:14:59,484 INFO [train.py:1114] (1/4) Epoch 20, batch 4150, loss[loss=0.2043, simple_loss=0.2921, pruned_loss=0.05823, over 4820.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2618, pruned_loss=0.03992, over 938037.01 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:14:59,608 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=264500.0, ans=0.125 +2024-07-29 09:15:03,836 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=264500.0, ans=0.125 +2024-07-29 09:15:04,644 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=264500.0, ans=0.0 +2024-07-29 09:15:23,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=264540.0, ans=0.0 +2024-07-29 09:15:27,501 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=264553.3333333333, ans=0.125 +2024-07-29 09:15:28,202 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=264553.3333333333, ans=0.0 +2024-07-29 09:15:35,198 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.580e+01 5.706e+01 6.359e+01 7.433e+01 1.126e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-29 09:15:39,928 INFO [train.py:1114] (1/4) Epoch 20, batch 4200, loss[loss=0.1554, simple_loss=0.2556, pruned_loss=0.0276, over 4916.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2614, pruned_loss=0.03947, over 939604.01 frames. ], batch size: 15, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:16:24,569 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.60 vs. limit=15.0 +2024-07-29 09:16:30,389 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-29 09:16:39,845 INFO [train.py:1114] (1/4) Epoch 20, batch 4250, loss[loss=0.1616, simple_loss=0.2504, pruned_loss=0.03642, over 4648.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2615, pruned_loss=0.03939, over 941099.35 frames. ], batch size: 12, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:16:53,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=264660.0, ans=0.1 +2024-07-29 09:17:01,279 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=264673.3333333333, ans=0.025 +2024-07-29 09:17:07,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=264686.6666666667, ans=0.035 +2024-07-29 09:17:09,904 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.690e+01 5.620e+01 6.275e+01 6.899e+01 1.013e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 09:17:12,188 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.12 vs. limit=15.0 +2024-07-29 09:17:13,147 INFO [train.py:1114] (1/4) Epoch 20, batch 4300, loss[loss=0.1461, simple_loss=0.2465, pruned_loss=0.02281, over 4769.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2618, pruned_loss=0.03959, over 940486.06 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:17:27,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=264713.3333333333, ans=0.0 +2024-07-29 09:17:29,228 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=264713.3333333333, ans=0.05 +2024-07-29 09:17:29,875 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:17:30,597 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:17:35,738 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.32 vs. limit=15.0 +2024-07-29 09:17:36,647 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=264740.0, ans=0.125 +2024-07-29 09:17:38,027 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=264740.0, ans=0.125 +2024-07-29 09:17:40,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=264740.0, ans=0.125 +2024-07-29 09:17:53,008 INFO [train.py:1114] (1/4) Epoch 20, batch 4350, loss[loss=0.1864, simple_loss=0.272, pruned_loss=0.0504, over 4762.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.263, pruned_loss=0.03986, over 941289.48 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:17:55,752 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.74 vs. limit=22.5 +2024-07-29 09:18:03,866 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=264780.0, ans=0.125 +2024-07-29 09:18:21,531 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.47 vs. limit=22.5 +2024-07-29 09:18:27,678 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.816e+01 5.702e+01 6.164e+01 6.960e+01 9.569e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 09:18:31,107 INFO [train.py:1114] (1/4) Epoch 20, batch 4400, loss[loss=0.191, simple_loss=0.2885, pruned_loss=0.04677, over 4818.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2631, pruned_loss=0.03951, over 941034.11 frames. ], batch size: 14, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:18:43,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=264833.3333333333, ans=0.125 +2024-07-29 09:18:43,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=264833.3333333333, ans=0.0 +2024-07-29 09:18:44,212 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264833.3333333333, ans=0.1 +2024-07-29 09:18:46,315 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=264846.6666666667, ans=0.0 +2024-07-29 09:18:54,962 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=264860.0, ans=0.125 +2024-07-29 09:18:56,230 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=264860.0, ans=0.125 +2024-07-29 09:18:58,133 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=264873.3333333333, ans=0.125 +2024-07-29 09:19:00,115 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=264873.3333333333, ans=0.0 +2024-07-29 09:19:01,490 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=264873.3333333333, ans=0.2 +2024-07-29 09:19:10,384 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=264886.6666666667, ans=0.2 +2024-07-29 09:19:12,242 INFO [train.py:1114] (1/4) Epoch 20, batch 4450, loss[loss=0.1699, simple_loss=0.2465, pruned_loss=0.04664, over 4941.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2636, pruned_loss=0.03984, over 938982.39 frames. ], batch size: 12, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:19:14,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264900.0, ans=0.1 +2024-07-29 09:19:29,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=264926.6666666667, ans=0.0 +2024-07-29 09:19:35,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=264940.0, ans=0.125 +2024-07-29 09:19:50,127 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=264953.3333333333, ans=0.2 +2024-07-29 09:19:52,183 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=264953.3333333333, ans=0.125 +2024-07-29 09:19:53,315 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.879e+01 5.589e+01 6.388e+01 7.277e+01 9.167e+01, threshold=1.278e+02, percent-clipped=0.0 +2024-07-29 09:19:57,879 INFO [train.py:1114] (1/4) Epoch 20, batch 4500, loss[loss=0.1885, simple_loss=0.2841, pruned_loss=0.04645, over 4747.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2632, pruned_loss=0.03962, over 938387.40 frames. ], batch size: 14, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:20:19,711 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=264993.3333333333, ans=0.125 +2024-07-29 09:20:22,505 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.48 vs. limit=15.0 +2024-07-29 09:20:31,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=265020.0, ans=0.04949747468305833 +2024-07-29 09:20:32,671 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.97 vs. limit=6.0 +2024-07-29 09:20:34,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=265020.0, ans=0.025 +2024-07-29 09:20:35,705 INFO [train.py:1114] (1/4) Epoch 20, batch 4550, loss[loss=0.1662, simple_loss=0.2521, pruned_loss=0.04013, over 4904.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2619, pruned_loss=0.03916, over 940222.39 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:20:36,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=265033.3333333333, ans=0.1 +2024-07-29 09:20:36,738 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.31 vs. limit=15.0 +2024-07-29 09:20:42,417 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=265046.6666666667, ans=0.125 +2024-07-29 09:20:49,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=265060.0, ans=0.0 +2024-07-29 09:21:16,615 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.804e+01 5.639e+01 6.516e+01 7.459e+01 1.043e+02, threshold=1.303e+02, percent-clipped=0.0 +2024-07-29 09:21:19,972 INFO [train.py:1114] (1/4) Epoch 20, batch 4600, loss[loss=0.1841, simple_loss=0.2756, pruned_loss=0.0463, over 4414.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2604, pruned_loss=0.03869, over 938522.49 frames. ], batch size: 21, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:21:20,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=265100.0, ans=0.2 +2024-07-29 09:21:31,218 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.47 vs. limit=15.0 +2024-07-29 09:21:34,172 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=265126.6666666667, ans=0.0 +2024-07-29 09:21:34,736 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=265126.6666666667, ans=0.02 +2024-07-29 09:21:35,714 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=265126.6666666667, ans=22.5 +2024-07-29 09:21:40,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=265140.0, ans=0.07 +2024-07-29 09:21:46,827 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:21:50,851 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=265153.3333333333, ans=0.09899494936611666 +2024-07-29 09:21:51,536 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=265153.3333333333, ans=0.0 +2024-07-29 09:21:53,407 INFO [train.py:1114] (1/4) Epoch 20, batch 4650, loss[loss=0.1586, simple_loss=0.2577, pruned_loss=0.02977, over 4858.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2614, pruned_loss=0.0386, over 940255.35 frames. ], batch size: 16, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:21:59,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=265166.6666666667, ans=0.125 +2024-07-29 09:22:11,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=265193.3333333333, ans=0.5 +2024-07-29 09:22:14,520 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=265206.6666666667, ans=0.035 +2024-07-29 09:22:25,176 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.723e+01 5.549e+01 5.991e+01 6.748e+01 1.053e+02, threshold=1.198e+02, percent-clipped=0.0 +2024-07-29 09:22:25,529 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.96 vs. limit=10.0 +2024-07-29 09:22:28,487 INFO [train.py:1114] (1/4) Epoch 20, batch 4700, loss[loss=0.1541, simple_loss=0.2416, pruned_loss=0.03332, over 4706.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2615, pruned_loss=0.03901, over 937920.28 frames. ], batch size: 11, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:22:36,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=265233.3333333333, ans=0.07 +2024-07-29 09:22:37,645 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.36 vs. limit=15.0 +2024-07-29 09:22:43,543 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:22:55,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=265260.0, ans=0.125 +2024-07-29 09:22:55,639 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.08 vs. limit=22.5 +2024-07-29 09:22:56,155 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265273.3333333333, ans=0.1 +2024-07-29 09:23:01,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=265273.3333333333, ans=0.0 +2024-07-29 09:23:01,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=265273.3333333333, ans=0.04949747468305833 +2024-07-29 09:23:12,636 INFO [train.py:1114] (1/4) Epoch 20, batch 4750, loss[loss=0.2046, simple_loss=0.295, pruned_loss=0.05708, over 4491.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2624, pruned_loss=0.03964, over 935516.58 frames. ], batch size: 21, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:23:53,295 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265353.3333333333, ans=0.1 +2024-07-29 09:23:59,496 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.589e+01 5.596e+01 6.192e+01 7.037e+01 1.008e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 09:23:59,683 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=265353.3333333333, ans=0.125 +2024-07-29 09:24:06,736 INFO [train.py:1114] (1/4) Epoch 20, batch 4800, loss[loss=0.1957, simple_loss=0.2958, pruned_loss=0.04778, over 4694.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.262, pruned_loss=0.03963, over 932339.68 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:24:21,958 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.95 vs. limit=22.5 +2024-07-29 09:24:26,650 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.75 vs. limit=22.5 +2024-07-29 09:24:31,187 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=265393.3333333333, ans=0.09899494936611666 +2024-07-29 09:24:35,219 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=265393.3333333333, ans=0.125 +2024-07-29 09:24:35,328 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=265393.3333333333, ans=0.125 +2024-07-29 09:24:46,517 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=265420.0, ans=0.2 +2024-07-29 09:24:47,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=265420.0, ans=0.2 +2024-07-29 09:24:59,665 INFO [train.py:1114] (1/4) Epoch 20, batch 4850, loss[loss=0.1727, simple_loss=0.2674, pruned_loss=0.03894, over 4743.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2616, pruned_loss=0.03943, over 932177.76 frames. ], batch size: 14, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:25:09,532 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.46 vs. limit=10.0 +2024-07-29 09:25:11,373 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=265433.3333333333, ans=0.125 +2024-07-29 09:25:11,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=265433.3333333333, ans=0.125 +2024-07-29 09:25:14,693 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=265446.6666666667, ans=0.125 +2024-07-29 09:25:16,402 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.07 vs. limit=15.0 +2024-07-29 09:25:22,023 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=265460.0, ans=0.1 +2024-07-29 09:25:30,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=265473.3333333333, ans=0.125 +2024-07-29 09:25:32,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=265473.3333333333, ans=0.0 +2024-07-29 09:25:34,632 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.84 vs. limit=15.0 +2024-07-29 09:25:43,422 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.605e+01 5.554e+01 6.055e+01 6.631e+01 1.173e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 09:25:46,886 INFO [train.py:1114] (1/4) Epoch 20, batch 4900, loss[loss=0.174, simple_loss=0.2699, pruned_loss=0.03908, over 4757.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2611, pruned_loss=0.0388, over 933877.52 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:26:32,320 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265553.3333333333, ans=0.1 +2024-07-29 09:26:34,043 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.44 vs. limit=15.0 +2024-07-29 09:26:34,952 INFO [train.py:1114] (1/4) Epoch 20, batch 4950, loss[loss=0.204, simple_loss=0.2887, pruned_loss=0.05971, over 3464.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2621, pruned_loss=0.03946, over 931788.03 frames. ], batch size: 35, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:26:38,463 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=265566.6666666667, ans=0.125 +2024-07-29 09:26:38,485 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=265566.6666666667, ans=0.0 +2024-07-29 09:26:59,442 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=265606.6666666667, ans=0.125 +2024-07-29 09:27:00,605 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.61 vs. limit=22.5 +2024-07-29 09:27:11,245 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.725e+01 5.554e+01 6.246e+01 6.923e+01 9.859e+01, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 09:27:14,683 INFO [train.py:1114] (1/4) Epoch 20, batch 5000, loss[loss=0.1755, simple_loss=0.2887, pruned_loss=0.03116, over 4663.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.262, pruned_loss=0.03924, over 935656.62 frames. ], batch size: 14, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:27:25,674 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=265646.6666666667, ans=0.1 +2024-07-29 09:27:29,201 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=265646.6666666667, ans=0.0 +2024-07-29 09:27:30,503 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=265660.0, ans=0.0 +2024-07-29 09:27:34,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=265660.0, ans=0.125 +2024-07-29 09:27:35,058 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=265660.0, ans=0.125 +2024-07-29 09:27:35,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=265660.0, ans=0.2 +2024-07-29 09:27:49,084 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.35 vs. limit=15.0 +2024-07-29 09:27:50,271 INFO [train.py:1114] (1/4) Epoch 20, batch 5050, loss[loss=0.1532, simple_loss=0.2464, pruned_loss=0.02999, over 4844.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2612, pruned_loss=0.03891, over 938045.43 frames. ], batch size: 12, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:28:10,960 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265713.3333333333, ans=0.1 +2024-07-29 09:28:25,541 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=265740.0, ans=0.04949747468305833 +2024-07-29 09:28:26,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=265740.0, ans=0.0 +2024-07-29 09:28:34,373 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+01 5.782e+01 6.489e+01 7.303e+01 1.011e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-29 09:28:38,656 INFO [train.py:1114] (1/4) Epoch 20, batch 5100, loss[loss=0.17, simple_loss=0.2577, pruned_loss=0.04111, over 4772.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2619, pruned_loss=0.03936, over 935789.84 frames. ], batch size: 12, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:28:46,458 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.45 vs. limit=12.0 +2024-07-29 09:28:46,928 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=265780.0, ans=0.1 +2024-07-29 09:28:48,369 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.68 vs. limit=22.5 +2024-07-29 09:28:54,381 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:28:55,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=265793.3333333333, ans=0.1 +2024-07-29 09:28:59,667 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265806.6666666667, ans=0.1 +2024-07-29 09:29:01,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=265806.6666666667, ans=0.125 +2024-07-29 09:29:11,709 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=265833.3333333333, ans=0.125 +2024-07-29 09:29:12,309 INFO [train.py:1114] (1/4) Epoch 20, batch 5150, loss[loss=0.2053, simple_loss=0.293, pruned_loss=0.05882, over 4837.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2631, pruned_loss=0.03971, over 936233.68 frames. ], batch size: 16, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:29:21,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.whiten.whitening_limit, batch_count=265846.6666666667, ans=15.0 +2024-07-29 09:29:24,282 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.96 vs. limit=15.0 +2024-07-29 09:29:25,361 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265860.0, ans=0.1 +2024-07-29 09:29:25,896 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=265860.0, ans=0.125 +2024-07-29 09:29:35,194 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=265873.3333333333, ans=0.2 +2024-07-29 09:29:44,548 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.013e+01 5.725e+01 6.279e+01 7.318e+01 1.119e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-29 09:29:47,796 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.31 vs. limit=10.0 +2024-07-29 09:30:06,162 INFO [train.py:1114] (1/4) Epoch 20, batch 5200, loss[loss=0.1831, simple_loss=0.2795, pruned_loss=0.04332, over 4664.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.263, pruned_loss=0.03963, over 936488.41 frames. ], batch size: 14, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:30:27,749 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:30:29,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=265913.3333333333, ans=0.0 +2024-07-29 09:30:31,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=265926.6666666667, ans=0.1 +2024-07-29 09:30:47,436 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=265926.6666666667, ans=0.0 +2024-07-29 09:31:01,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=265953.3333333333, ans=0.125 +2024-07-29 09:31:02,526 INFO [train.py:1114] (1/4) Epoch 20, batch 5250, loss[loss=0.1773, simple_loss=0.2723, pruned_loss=0.04115, over 4895.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2616, pruned_loss=0.03916, over 936092.37 frames. ], batch size: 13, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:31:20,634 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=265993.3333333333, ans=0.125 +2024-07-29 09:31:25,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=266006.6666666667, ans=0.2 +2024-07-29 09:31:28,367 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.99 vs. limit=22.5 +2024-07-29 09:31:30,081 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=266020.0, ans=0.125 +2024-07-29 09:31:32,635 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.533e+01 5.588e+01 6.109e+01 7.391e+01 1.107e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-29 09:31:34,793 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=266020.0, ans=0.1 +2024-07-29 09:31:37,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=266033.3333333333, ans=0.1 +2024-07-29 09:31:37,706 INFO [train.py:1114] (1/4) Epoch 20, batch 5300, loss[loss=0.1824, simple_loss=0.2861, pruned_loss=0.03933, over 4684.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2614, pruned_loss=0.03912, over 934790.69 frames. ], batch size: 16, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:31:38,491 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=266033.3333333333, ans=0.125 +2024-07-29 09:31:56,141 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=266060.0, ans=0.125 +2024-07-29 09:31:59,360 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=266073.3333333333, ans=0.125 +2024-07-29 09:31:59,443 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=266073.3333333333, ans=0.1 +2024-07-29 09:31:59,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=266073.3333333333, ans=0.125 +2024-07-29 09:32:09,884 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.21 vs. limit=15.0 +2024-07-29 09:32:10,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=266086.6666666667, ans=0.0 +2024-07-29 09:32:15,627 INFO [train.py:1114] (1/4) Epoch 20, batch 5350, loss[loss=0.1445, simple_loss=0.2166, pruned_loss=0.03623, over 4495.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2628, pruned_loss=0.03962, over 936521.70 frames. ], batch size: 10, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:32:58,330 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=266140.0, ans=0.2 +2024-07-29 09:32:58,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=266140.0, ans=0.2 +2024-07-29 09:33:06,389 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.461e+01 5.787e+01 6.416e+01 7.278e+01 1.158e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 09:33:09,778 INFO [train.py:1114] (1/4) Epoch 20, batch 5400, loss[loss=0.1853, simple_loss=0.2777, pruned_loss=0.04644, over 4243.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2632, pruned_loss=0.03986, over 930454.49 frames. ], batch size: 25, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:33:14,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=266166.6666666667, ans=0.1 +2024-07-29 09:33:15,848 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.94 vs. limit=15.0 +2024-07-29 09:33:19,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=266180.0, ans=0.0 +2024-07-29 09:33:27,893 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.22 vs. limit=15.0 +2024-07-29 09:33:31,051 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=266193.3333333333, ans=0.125 +2024-07-29 09:33:35,015 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=266206.6666666667, ans=0.125 +2024-07-29 09:33:38,934 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=266220.0, ans=0.125 +2024-07-29 09:33:39,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=266220.0, ans=0.125 +2024-07-29 09:33:42,412 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=266220.0, ans=0.0 +2024-07-29 09:33:46,288 INFO [train.py:1114] (1/4) Epoch 20, batch 5450, loss[loss=0.1405, simple_loss=0.223, pruned_loss=0.02896, over 4703.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.263, pruned_loss=0.03984, over 933555.40 frames. ], batch size: 11, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:34:28,496 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=266246.6666666667, ans=0.05 +2024-07-29 09:34:30,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=266246.6666666667, ans=0.125 +2024-07-29 09:34:35,503 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.47 vs. limit=6.0 +2024-07-29 09:34:50,649 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.141e+01 5.730e+01 6.160e+01 6.781e+01 9.375e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-29 09:34:54,546 INFO [train.py:1114] (1/4) Epoch 20, batch 5500, loss[loss=0.1884, simple_loss=0.2793, pruned_loss=0.04876, over 4169.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2617, pruned_loss=0.03932, over 931302.50 frames. ], batch size: 25, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:34:59,271 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=266300.0, ans=0.1 +2024-07-29 09:35:06,882 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=266313.3333333333, ans=0.025 +2024-07-29 09:35:15,124 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=266326.6666666667, ans=0.0 +2024-07-29 09:35:23,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=266340.0, ans=0.05 +2024-07-29 09:35:25,310 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=266340.0, ans=0.0 +2024-07-29 09:35:34,349 INFO [train.py:1114] (1/4) Epoch 20, batch 5550, loss[loss=0.1609, simple_loss=0.2533, pruned_loss=0.0342, over 4708.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2615, pruned_loss=0.03957, over 933368.76 frames. ], batch size: 12, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:35:36,656 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=266366.6666666667, ans=0.125 +2024-07-29 09:35:42,042 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=266380.0, ans=0.0 +2024-07-29 09:35:43,015 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-29 09:35:59,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=266406.6666666667, ans=0.05 +2024-07-29 09:36:07,404 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.955e+01 5.872e+01 6.404e+01 7.729e+01 1.135e+02, threshold=1.281e+02, percent-clipped=0.0 +2024-07-29 09:36:08,460 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=266420.0, ans=0.1 +2024-07-29 09:36:10,948 INFO [train.py:1114] (1/4) Epoch 20, batch 5600, loss[loss=0.1701, simple_loss=0.2672, pruned_loss=0.03648, over 4745.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2626, pruned_loss=0.0399, over 934394.27 frames. ], batch size: 14, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:36:46,542 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=266486.6666666667, ans=0.125 +2024-07-29 09:36:52,280 INFO [train.py:1114] (1/4) Epoch 20, batch 5650, loss[loss=0.1993, simple_loss=0.2992, pruned_loss=0.04973, over 4516.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2621, pruned_loss=0.03965, over 937224.61 frames. ], batch size: 21, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:37:09,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=266526.6666666667, ans=0.025 +2024-07-29 09:37:11,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=266540.0, ans=0.2 +2024-07-29 09:37:20,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=266553.3333333333, ans=0.125 +2024-07-29 09:37:22,260 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.795e+01 5.835e+01 6.614e+01 7.684e+01 1.140e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-29 09:37:24,366 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=266553.3333333333, ans=0.025 +2024-07-29 09:37:25,571 INFO [train.py:1114] (1/4) Epoch 20, batch 5700, loss[loss=0.1553, simple_loss=0.2552, pruned_loss=0.02769, over 4693.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.262, pruned_loss=0.03956, over 938068.12 frames. ], batch size: 13, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:37:25,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=266566.6666666667, ans=0.125 +2024-07-29 09:37:25,929 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.05 vs. limit=15.0 +2024-07-29 09:37:35,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=266566.6666666667, ans=0.1 +2024-07-29 09:37:47,618 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=266580.0, ans=0.1 +2024-07-29 09:37:47,653 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=266580.0, ans=0.0 +2024-07-29 09:37:49,040 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=266593.3333333333, ans=0.0 +2024-07-29 09:37:54,742 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=266593.3333333333, ans=15.0 +2024-07-29 09:37:56,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=266606.6666666667, ans=0.025 +2024-07-29 09:37:57,868 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=266606.6666666667, ans=0.125 +2024-07-29 09:38:21,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=266606.6666666667, ans=0.125 +2024-07-29 09:38:29,955 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=266620.0, ans=0.125 +2024-07-29 09:38:31,808 INFO [train.py:1114] (1/4) Epoch 20, batch 5750, loss[loss=0.1666, simple_loss=0.2553, pruned_loss=0.03897, over 4686.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.262, pruned_loss=0.03967, over 938114.72 frames. ], batch size: 19, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:38:45,771 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.81 vs. limit=6.0 +2024-07-29 09:38:48,954 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:38:49,126 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.44 vs. limit=12.0 +2024-07-29 09:38:49,643 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=266646.6666666667, ans=0.025 +2024-07-29 09:38:55,374 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=266646.6666666667, ans=0.125 +2024-07-29 09:39:03,892 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.59 vs. limit=15.0 +2024-07-29 09:39:17,486 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.798e+01 5.816e+01 6.291e+01 7.219e+01 1.004e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-29 09:39:19,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=266686.6666666667, ans=0.0 +2024-07-29 09:39:20,770 INFO [train.py:1114] (1/4) Epoch 20, batch 5800, loss[loss=0.1753, simple_loss=0.264, pruned_loss=0.04332, over 4723.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.263, pruned_loss=0.04025, over 937511.22 frames. ], batch size: 19, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:39:29,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=266713.3333333333, ans=0.0 +2024-07-29 09:39:31,806 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.96 vs. limit=22.5 +2024-07-29 09:39:37,878 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=266726.6666666667, ans=0.025 +2024-07-29 09:39:44,666 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=266740.0, ans=0.125 +2024-07-29 09:39:57,728 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.88 vs. limit=22.5 +2024-07-29 09:40:00,083 INFO [train.py:1114] (1/4) Epoch 20, batch 5850, loss[loss=0.1888, simple_loss=0.274, pruned_loss=0.05176, over 4506.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2621, pruned_loss=0.03969, over 938070.06 frames. ], batch size: 21, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:40:04,858 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=266766.6666666667, ans=0.0 +2024-07-29 09:40:05,804 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.71 vs. limit=22.5 +2024-07-29 09:40:16,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=266793.3333333333, ans=0.025 +2024-07-29 09:40:17,553 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=266793.3333333333, ans=0.09899494936611666 +2024-07-29 09:40:22,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=266806.6666666667, ans=0.0 +2024-07-29 09:40:25,562 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=266806.6666666667, ans=0.125 +2024-07-29 09:40:30,193 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=266820.0, ans=0.1 +2024-07-29 09:40:31,385 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.976e+01 5.785e+01 6.450e+01 7.129e+01 1.228e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 09:40:34,111 INFO [train.py:1114] (1/4) Epoch 20, batch 5900, loss[loss=0.2046, simple_loss=0.2902, pruned_loss=0.0595, over 4667.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2612, pruned_loss=0.03933, over 938306.38 frames. ], batch size: 15, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:40:47,726 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.41 vs. limit=12.0 +2024-07-29 09:40:55,118 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.65 vs. limit=15.0 +2024-07-29 09:41:04,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=266860.0, ans=0.125 +2024-07-29 09:41:11,978 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=266873.3333333333, ans=0.025 +2024-07-29 09:41:20,978 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.36 vs. limit=15.0 +2024-07-29 09:41:21,155 INFO [train.py:1114] (1/4) Epoch 20, batch 5950, loss[loss=0.1953, simple_loss=0.284, pruned_loss=0.05329, over 4660.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2609, pruned_loss=0.03915, over 940186.10 frames. ], batch size: 15, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:41:26,198 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=266900.0, ans=0.125 +2024-07-29 09:41:31,234 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=266913.3333333333, ans=0.5 +2024-07-29 09:41:38,759 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.69 vs. limit=12.0 +2024-07-29 09:42:00,039 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=266953.3333333333, ans=0.125 +2024-07-29 09:42:00,473 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.654e+01 6.112e+01 6.775e+01 1.038e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-29 09:42:28,145 INFO [train.py:1114] (1/4) Epoch 20, batch 6000, loss[loss=0.1757, simple_loss=0.2666, pruned_loss=0.04238, over 4233.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2611, pruned_loss=0.03871, over 937031.34 frames. ], batch size: 26, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:42:28,146 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 09:42:41,191 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.6954, 3.2818, 3.6114, 3.9639], device='cuda:1') +2024-07-29 09:42:44,430 INFO [train.py:1146] (1/4) Epoch 20, validation: loss=0.1606, simple_loss=0.2622, pruned_loss=0.02953, over 944034.00 frames. +2024-07-29 09:42:44,431 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 09:42:55,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=266980.0, ans=0.025 +2024-07-29 09:43:15,207 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.45 vs. limit=15.0 +2024-07-29 09:43:15,578 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=267020.0, ans=0.125 +2024-07-29 09:43:18,760 INFO [train.py:1114] (1/4) Epoch 20, batch 6050, loss[loss=0.1572, simple_loss=0.2489, pruned_loss=0.03273, over 4784.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2604, pruned_loss=0.03831, over 938065.74 frames. ], batch size: 12, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:43:37,635 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=267046.6666666667, ans=0.0 +2024-07-29 09:43:49,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=267073.3333333333, ans=0.0 +2024-07-29 09:44:05,095 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.461e+01 5.588e+01 6.267e+01 7.088e+01 1.023e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 09:44:12,289 INFO [train.py:1114] (1/4) Epoch 20, batch 6100, loss[loss=0.221, simple_loss=0.323, pruned_loss=0.05948, over 4660.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2609, pruned_loss=0.03865, over 937475.17 frames. ], batch size: 15, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:44:21,289 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=267113.3333333333, ans=0.125 +2024-07-29 09:44:22,211 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.25 vs. limit=22.5 +2024-07-29 09:44:31,012 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=267126.6666666667, ans=0.0 +2024-07-29 09:44:32,596 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.62 vs. limit=22.5 +2024-07-29 09:44:33,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=267126.6666666667, ans=0.125 +2024-07-29 09:44:40,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=267140.0, ans=0.125 +2024-07-29 09:44:45,208 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=267153.3333333333, ans=0.0 +2024-07-29 09:44:45,877 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:44:50,609 INFO [train.py:1114] (1/4) Epoch 20, batch 6150, loss[loss=0.232, simple_loss=0.3047, pruned_loss=0.07966, over 3367.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2611, pruned_loss=0.03853, over 936370.49 frames. ], batch size: 36, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:44:54,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=267166.6666666667, ans=0.125 +2024-07-29 09:45:01,611 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=267180.0, ans=0.125 +2024-07-29 09:45:14,614 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.65 vs. limit=10.0 +2024-07-29 09:45:18,049 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.14 vs. limit=15.0 +2024-07-29 09:45:21,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=267220.0, ans=0.05 +2024-07-29 09:45:24,829 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.44 vs. limit=6.0 +2024-07-29 09:45:26,396 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.706e+01 5.902e+01 6.533e+01 7.507e+01 1.268e+02, threshold=1.307e+02, percent-clipped=1.0 +2024-07-29 09:45:29,240 INFO [train.py:1114] (1/4) Epoch 20, batch 6200, loss[loss=0.1788, simple_loss=0.2693, pruned_loss=0.04414, over 4733.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2619, pruned_loss=0.03882, over 936247.06 frames. ], batch size: 14, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:45:30,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=267233.3333333333, ans=0.0 +2024-07-29 09:45:57,012 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:46:01,725 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=267286.6666666667, ans=0.0 +2024-07-29 09:46:01,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=267286.6666666667, ans=0.05 +2024-07-29 09:46:02,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=267286.6666666667, ans=0.125 +2024-07-29 09:46:05,741 INFO [train.py:1114] (1/4) Epoch 20, batch 6250, loss[loss=0.1628, simple_loss=0.2579, pruned_loss=0.03385, over 4798.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2611, pruned_loss=0.03872, over 932879.23 frames. ], batch size: 14, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:46:09,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=267300.0, ans=0.0 +2024-07-29 09:46:13,923 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=267313.3333333333, ans=0.125 +2024-07-29 09:46:16,088 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:46:21,110 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=267326.6666666667, ans=0.125 +2024-07-29 09:46:36,635 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.78 vs. limit=15.0 +2024-07-29 09:46:43,334 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.00 vs. limit=15.0 +2024-07-29 09:46:44,581 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=267353.3333333333, ans=0.0 +2024-07-29 09:46:45,642 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.404e+01 5.746e+01 6.370e+01 7.341e+01 9.825e+01, threshold=1.274e+02, percent-clipped=0.0 +2024-07-29 09:46:56,399 INFO [train.py:1114] (1/4) Epoch 20, batch 6300, loss[loss=0.145, simple_loss=0.2213, pruned_loss=0.03438, over 4518.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2611, pruned_loss=0.03889, over 929624.20 frames. ], batch size: 10, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:46:57,876 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=267366.6666666667, ans=0.125 +2024-07-29 09:47:10,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=267393.3333333333, ans=0.2 +2024-07-29 09:47:10,515 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.40 vs. limit=15.0 +2024-07-29 09:47:11,069 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.16 vs. limit=15.0 +2024-07-29 09:47:11,658 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=267393.3333333333, ans=0.025 +2024-07-29 09:47:12,956 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=267393.3333333333, ans=0.125 +2024-07-29 09:47:14,148 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=267393.3333333333, ans=0.125 +2024-07-29 09:47:21,983 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=267420.0, ans=0.125 +2024-07-29 09:47:24,034 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.35 vs. limit=22.5 +2024-07-29 09:47:31,908 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.53 vs. limit=22.5 +2024-07-29 09:47:32,261 INFO [train.py:1114] (1/4) Epoch 20, batch 6350, loss[loss=0.1949, simple_loss=0.2959, pruned_loss=0.04694, over 4649.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2609, pruned_loss=0.03908, over 934675.14 frames. ], batch size: 22, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:47:33,071 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=267433.3333333333, ans=0.1 +2024-07-29 09:48:06,772 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 5.677e+01 6.317e+01 7.481e+01 1.107e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 09:48:09,430 INFO [train.py:1114] (1/4) Epoch 20, batch 6400, loss[loss=0.1884, simple_loss=0.287, pruned_loss=0.04494, over 4634.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2615, pruned_loss=0.03966, over 935785.36 frames. ], batch size: 13, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:48:09,502 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=267500.0, ans=0.2 +2024-07-29 09:48:20,844 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=267513.3333333333, ans=0.2 +2024-07-29 09:48:39,579 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=267553.3333333333, ans=0.125 +2024-07-29 09:48:42,723 INFO [train.py:1114] (1/4) Epoch 20, batch 6450, loss[loss=0.1833, simple_loss=0.2602, pruned_loss=0.05324, over 4453.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2628, pruned_loss=0.04022, over 939470.14 frames. ], batch size: 21, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:48:45,431 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=267566.6666666667, ans=0.0 +2024-07-29 09:48:48,171 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=267566.6666666667, ans=0.1 +2024-07-29 09:48:58,273 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.26 vs. limit=10.0 +2024-07-29 09:49:11,479 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=267606.6666666667, ans=0.125 +2024-07-29 09:49:11,544 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.48 vs. limit=15.0 +2024-07-29 09:49:42,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=267620.0, ans=0.1 +2024-07-29 09:49:44,227 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=267620.0, ans=0.125 +2024-07-29 09:49:46,230 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.162e+01 5.798e+01 6.360e+01 7.229e+01 1.035e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-29 09:49:47,003 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=267620.0, ans=0.0 +2024-07-29 09:49:48,982 INFO [train.py:1114] (1/4) Epoch 20, batch 6500, loss[loss=0.2248, simple_loss=0.3003, pruned_loss=0.07463, over 3258.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2622, pruned_loss=0.04002, over 940408.74 frames. ], batch size: 35, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:50:01,732 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=267633.3333333333, ans=0.0 +2024-07-29 09:50:02,682 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.34 vs. limit=22.5 +2024-07-29 09:50:06,977 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=267646.6666666667, ans=0.125 +2024-07-29 09:50:10,378 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.44 vs. limit=15.0 +2024-07-29 09:50:11,244 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.35 vs. limit=10.0 +2024-07-29 09:50:24,068 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=267686.6666666667, ans=0.2 +2024-07-29 09:50:31,652 INFO [train.py:1114] (1/4) Epoch 20, batch 6550, loss[loss=0.1447, simple_loss=0.2229, pruned_loss=0.03323, over 4808.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2609, pruned_loss=0.03953, over 943151.47 frames. ], batch size: 11, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:51:16,423 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=267740.0, ans=0.125 +2024-07-29 09:51:18,304 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=267740.0, ans=0.125 +2024-07-29 09:51:26,544 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.591e+01 5.663e+01 6.386e+01 7.207e+01 1.403e+02, threshold=1.277e+02, percent-clipped=3.0 +2024-07-29 09:51:27,013 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.89 vs. limit=15.0 +2024-07-29 09:51:29,170 INFO [train.py:1114] (1/4) Epoch 20, batch 6600, loss[loss=0.2122, simple_loss=0.2938, pruned_loss=0.0653, over 4934.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2612, pruned_loss=0.03986, over 944965.22 frames. ], batch size: 14, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:51:44,280 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=267793.3333333333, ans=0.125 +2024-07-29 09:51:50,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=267806.6666666667, ans=0.1 +2024-07-29 09:52:52,695 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.06 vs. limit=15.0 +2024-07-29 09:52:53,921 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=267806.6666666667, ans=0.125 +2024-07-29 09:52:55,229 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=267806.6666666667, ans=0.125 +2024-07-29 09:52:56,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=267820.0, ans=0.125 +2024-07-29 09:52:57,221 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=267820.0, ans=0.1 +2024-07-29 09:52:58,450 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:53:11,987 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:53:12,455 INFO [train.py:1114] (1/4) Epoch 20, batch 6650, loss[loss=0.1867, simple_loss=0.2854, pruned_loss=0.04405, over 4626.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.261, pruned_loss=0.03955, over 943468.87 frames. ], batch size: 17, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:53:26,223 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=267846.6666666667, ans=0.0 +2024-07-29 09:53:38,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=267873.3333333333, ans=15.0 +2024-07-29 09:53:44,856 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.709e+01 6.420e+01 7.242e+01 1.116e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-29 09:53:47,497 INFO [train.py:1114] (1/4) Epoch 20, batch 6700, loss[loss=0.1753, simple_loss=0.2723, pruned_loss=0.03915, over 4693.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2615, pruned_loss=0.0397, over 942296.00 frames. ], batch size: 19, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:53:56,022 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=267900.0, ans=0.125 +2024-07-29 09:55:03,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=267953.3333333333, ans=0.025 +2024-07-29 09:55:05,220 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=267953.3333333333, ans=0.1 +2024-07-29 09:55:06,358 INFO [train.py:1114] (1/4) Epoch 20, batch 6750, loss[loss=0.1786, simple_loss=0.2599, pruned_loss=0.04866, over 4198.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2618, pruned_loss=0.03962, over 940522.22 frames. ], batch size: 25, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:55:36,750 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=267993.3333333333, ans=0.125 +2024-07-29 09:55:49,554 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=268020.0, ans=0.125 +2024-07-29 09:55:53,696 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.727e+01 5.999e+01 6.595e+01 7.628e+01 1.756e+02, threshold=1.319e+02, percent-clipped=1.0 +2024-07-29 09:55:56,348 INFO [train.py:1114] (1/4) Epoch 20, batch 6800, loss[loss=0.164, simple_loss=0.2629, pruned_loss=0.03254, over 4636.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2622, pruned_loss=0.03934, over 938620.22 frames. ], batch size: 13, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:56:22,703 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff3.min_abs, batch_count=268060.0, ans=0.2 +2024-07-29 09:56:25,487 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=268073.3333333333, ans=0.2 +2024-07-29 09:56:27,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=268073.3333333333, ans=0.0 +2024-07-29 09:56:34,358 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=268086.6666666667, ans=0.125 +2024-07-29 09:56:35,149 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.90 vs. limit=15.0 +2024-07-29 09:56:40,832 INFO [train.py:1114] (1/4) Epoch 20, batch 6850, loss[loss=0.1689, simple_loss=0.2625, pruned_loss=0.03765, over 4694.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2624, pruned_loss=0.03955, over 940544.27 frames. ], batch size: 13, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:56:43,047 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.84 vs. limit=15.0 +2024-07-29 09:56:48,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=268113.3333333333, ans=0.1 +2024-07-29 09:56:51,566 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=268113.3333333333, ans=0.0 +2024-07-29 09:57:27,508 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=268140.0, ans=0.125 +2024-07-29 09:57:28,285 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=268140.0, ans=0.1 +2024-07-29 09:57:36,086 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.628e+01 5.831e+01 6.589e+01 8.147e+01 1.219e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 09:57:38,783 INFO [train.py:1114] (1/4) Epoch 20, batch 6900, loss[loss=0.1671, simple_loss=0.2637, pruned_loss=0.03521, over 4959.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2619, pruned_loss=0.03904, over 942300.76 frames. ], batch size: 13, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:57:56,034 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=268180.0, ans=0.0 +2024-07-29 09:58:16,215 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=268220.0, ans=0.05 +2024-07-29 09:58:18,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=268220.0, ans=0.0 +2024-07-29 09:58:20,249 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=268220.0, ans=0.125 +2024-07-29 09:58:21,471 INFO [train.py:1114] (1/4) Epoch 20, batch 6950, loss[loss=0.1259, simple_loss=0.2036, pruned_loss=0.02405, over 4530.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2619, pruned_loss=0.03965, over 939860.81 frames. ], batch size: 10, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:58:22,269 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=268233.3333333333, ans=0.05 +2024-07-29 09:58:26,047 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=268233.3333333333, ans=0.125 +2024-07-29 09:58:27,402 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=268246.6666666667, ans=0.05 +2024-07-29 09:59:40,639 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=268286.6666666667, ans=0.2 +2024-07-29 09:59:44,603 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.835e+01 5.602e+01 6.145e+01 6.791e+01 9.985e+01, threshold=1.229e+02, percent-clipped=0.0 +2024-07-29 10:00:05,969 INFO [train.py:1114] (1/4) Epoch 20, batch 7000, loss[loss=0.1741, simple_loss=0.2671, pruned_loss=0.04058, over 4639.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.261, pruned_loss=0.03934, over 938161.61 frames. ], batch size: 17, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 10:00:06,195 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=268300.0, ans=0.125 +2024-07-29 10:00:07,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=268300.0, ans=0.1 +2024-07-29 10:00:42,281 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=268326.6666666667, ans=0.125 +2024-07-29 10:00:57,432 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.17 vs. limit=6.0 +2024-07-29 10:01:02,263 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=268353.3333333333, ans=0.025 +2024-07-29 10:01:03,389 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.19 vs. limit=15.0 +2024-07-29 10:01:12,960 INFO [train.py:1114] (1/4) Epoch 20, batch 7050, loss[loss=0.1726, simple_loss=0.2733, pruned_loss=0.0359, over 4698.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2609, pruned_loss=0.03889, over 941823.03 frames. ], batch size: 19, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 10:01:14,467 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=268366.6666666667, ans=0.2 +2024-07-29 10:01:19,045 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=268380.0, ans=0.125 +2024-07-29 10:01:26,963 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=268380.0, ans=0.125 +2024-07-29 10:01:29,576 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=268393.3333333333, ans=0.0 +2024-07-29 10:01:45,957 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.517e+01 5.713e+01 6.192e+01 7.192e+01 1.067e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 10:01:49,613 INFO [train.py:1114] (1/4) Epoch 20, batch 7100, loss[loss=0.1827, simple_loss=0.2883, pruned_loss=0.03856, over 4823.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2619, pruned_loss=0.03997, over 936930.47 frames. ], batch size: 15, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 10:01:50,681 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.52 vs. limit=10.0 +2024-07-29 10:01:51,083 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=268433.3333333333, ans=0.125 +2024-07-29 10:01:51,550 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=268433.3333333333, ans=0.125 +2024-07-29 10:01:59,441 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=268433.3333333333, ans=0.0 +2024-07-29 10:02:01,464 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=268446.6666666667, ans=0.125 +2024-07-29 10:02:18,169 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=268473.3333333333, ans=0.125 +2024-07-29 10:02:28,180 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.15 vs. limit=15.0 +2024-07-29 10:02:29,507 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.25 vs. limit=15.0 +2024-07-29 10:02:29,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=268486.6666666667, ans=0.1 +2024-07-29 10:02:31,663 INFO [train.py:1114] (1/4) Epoch 20, batch 7150, loss[loss=0.1797, simple_loss=0.281, pruned_loss=0.03922, over 4534.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2607, pruned_loss=0.03943, over 938448.22 frames. ], batch size: 21, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 10:02:45,568 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=268500.0, ans=0.125 +2024-07-29 10:02:50,097 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=268513.3333333333, ans=0.125 +2024-07-29 10:03:04,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=268540.0, ans=0.1 +2024-07-29 10:03:08,494 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.35 vs. limit=22.5 +2024-07-29 10:03:09,440 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=268553.3333333333, ans=0.1 +2024-07-29 10:03:31,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=268553.3333333333, ans=0.125 +2024-07-29 10:03:33,074 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.350e+01 5.664e+01 6.258e+01 7.035e+01 1.192e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 10:03:39,933 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=268566.6666666667, ans=0.025 +2024-07-29 10:03:59,855 INFO [train.py:1114] (1/4) Epoch 20, batch 7200, loss[loss=0.1779, simple_loss=0.2735, pruned_loss=0.04111, over 4799.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2616, pruned_loss=0.03952, over 939103.62 frames. ], batch size: 15, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:06:02,103 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.05 vs. limit=15.0 +2024-07-29 10:07:47,186 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.93 vs. limit=15.0 +2024-07-29 10:08:23,288 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=268606.6666666667, ans=0.0 +2024-07-29 10:09:24,685 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=268606.6666666667, ans=0.0 +2024-07-29 10:09:31,276 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=268620.0, ans=0.05 +2024-07-29 10:09:47,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=268620.0, ans=0.0 +2024-07-29 10:09:49,546 INFO [train.py:1114] (1/4) Epoch 20, batch 7250, loss[loss=0.1625, simple_loss=0.253, pruned_loss=0.03596, over 4846.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2616, pruned_loss=0.03977, over 940560.90 frames. ], batch size: 12, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:10:15,630 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.55 vs. limit=15.0 +2024-07-29 10:10:15,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=268646.6666666667, ans=0.0 +2024-07-29 10:10:27,005 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=268660.0, ans=0.1 +2024-07-29 10:10:39,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=268673.3333333333, ans=0.0 +2024-07-29 10:10:47,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=268686.6666666667, ans=0.125 +2024-07-29 10:10:47,651 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.79 vs. limit=15.0 +2024-07-29 10:11:35,253 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.440e+01 5.763e+01 6.435e+01 7.253e+01 9.940e+01, threshold=1.287e+02, percent-clipped=0.0 +2024-07-29 10:11:35,449 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=268686.6666666667, ans=0.125 +2024-07-29 10:12:08,051 INFO [train.py:1114] (1/4) Epoch 20, batch 7300, loss[loss=0.1538, simple_loss=0.2395, pruned_loss=0.03403, over 4847.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2615, pruned_loss=0.03961, over 940642.32 frames. ], batch size: 12, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:13:11,958 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=268700.0, ans=0.125 +2024-07-29 10:14:11,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=268713.3333333333, ans=0.0 +2024-07-29 10:14:24,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=268713.3333333333, ans=10.0 +2024-07-29 10:17:20,947 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=268713.3333333333, ans=0.2 +2024-07-29 10:18:47,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=268740.0, ans=0.0 +2024-07-29 10:18:48,224 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=268740.0, ans=0.2 +2024-07-29 10:19:39,279 INFO [train.py:1114] (1/4) Epoch 20, batch 7350, loss[loss=0.1643, simple_loss=0.2533, pruned_loss=0.03766, over 4640.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2622, pruned_loss=0.04013, over 939940.89 frames. ], batch size: 12, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:19:45,764 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=268766.6666666667, ans=0.125 +2024-07-29 10:19:50,167 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=268780.0, ans=0.1 +2024-07-29 10:19:53,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=268793.3333333333, ans=0.0 +2024-07-29 10:22:45,957 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.689e+01 5.764e+01 6.635e+01 7.838e+01 1.063e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-29 10:22:59,769 INFO [train.py:1114] (1/4) Epoch 20, batch 7400, loss[loss=0.1532, simple_loss=0.2453, pruned_loss=0.03056, over 4698.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.262, pruned_loss=0.04008, over 940912.01 frames. ], batch size: 13, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:23:30,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=268833.3333333333, ans=0.125 +2024-07-29 10:24:01,278 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=268886.6666666667, ans=0.125 +2024-07-29 10:24:06,254 INFO [train.py:1114] (1/4) Epoch 20, batch 7450, loss[loss=0.1505, simple_loss=0.2423, pruned_loss=0.02933, over 4619.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2613, pruned_loss=0.04017, over 937960.68 frames. ], batch size: 11, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:24:19,291 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=268926.6666666667, ans=0.0 +2024-07-29 10:25:36,816 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=268940.0, ans=0.025 +2024-07-29 10:25:38,254 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=268940.0, ans=0.05 +2024-07-29 10:25:39,143 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.64 vs. limit=15.0 +2024-07-29 10:25:39,577 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=268940.0, ans=0.125 +2024-07-29 10:25:42,808 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=268953.3333333333, ans=0.125 +2024-07-29 10:25:43,434 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=268953.3333333333, ans=0.125 +2024-07-29 10:25:46,120 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.697e+01 5.622e+01 6.334e+01 7.188e+01 1.210e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 10:25:50,598 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=268953.3333333333, ans=0.2 +2024-07-29 10:25:51,754 INFO [train.py:1114] (1/4) Epoch 20, batch 7500, loss[loss=0.2335, simple_loss=0.3042, pruned_loss=0.08146, over 3158.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2617, pruned_loss=0.04024, over 936325.08 frames. ], batch size: 35, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:25:55,248 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=268966.6666666667, ans=0.125 +2024-07-29 10:26:07,693 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.33 vs. limit=22.5 +2024-07-29 10:26:15,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=269006.6666666667, ans=0.125 +2024-07-29 10:26:16,902 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.33 vs. limit=15.0 +2024-07-29 10:26:22,575 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.92 vs. limit=15.0 +2024-07-29 10:26:24,838 INFO [train.py:1114] (1/4) Epoch 20, batch 7550, loss[loss=0.1594, simple_loss=0.2511, pruned_loss=0.03381, over 4620.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2627, pruned_loss=0.04069, over 936283.46 frames. ], batch size: 17, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:26:28,600 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.71 vs. limit=15.0 +2024-07-29 10:26:49,301 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.15 vs. limit=15.0 +2024-07-29 10:27:05,234 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:27:06,367 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.638e+01 5.698e+01 6.238e+01 6.861e+01 9.805e+01, threshold=1.248e+02, percent-clipped=0.0 +2024-07-29 10:27:14,583 INFO [train.py:1114] (1/4) Epoch 20, batch 7600, loss[loss=0.1736, simple_loss=0.2681, pruned_loss=0.03957, over 4800.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2622, pruned_loss=0.04005, over 938124.25 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:27:19,218 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=269100.0, ans=0.0 +2024-07-29 10:27:19,807 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=269100.0, ans=0.2 +2024-07-29 10:27:23,113 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=269113.3333333333, ans=0.125 +2024-07-29 10:27:31,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=269126.6666666667, ans=0.125 +2024-07-29 10:27:38,966 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.06 vs. limit=15.0 +2024-07-29 10:27:51,339 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=269153.3333333333, ans=0.125 +2024-07-29 10:27:56,947 INFO [train.py:1114] (1/4) Epoch 20, batch 7650, loss[loss=0.1446, simple_loss=0.2309, pruned_loss=0.0292, over 4938.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2621, pruned_loss=0.04017, over 937230.37 frames. ], batch size: 12, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:28:21,346 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=269193.3333333333, ans=0.125 +2024-07-29 10:28:55,236 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=269206.6666666667, ans=0.1 +2024-07-29 10:29:04,851 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.62 vs. limit=22.5 +2024-07-29 10:29:06,809 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.70 vs. limit=22.5 +2024-07-29 10:29:09,506 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.493e+01 5.550e+01 6.272e+01 7.437e+01 1.310e+02, threshold=1.254e+02, percent-clipped=1.0 +2024-07-29 10:29:11,649 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.30 vs. limit=22.5 +2024-07-29 10:29:13,690 INFO [train.py:1114] (1/4) Epoch 20, batch 7700, loss[loss=0.1519, simple_loss=0.2498, pruned_loss=0.02701, over 4690.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2624, pruned_loss=0.04014, over 934103.57 frames. ], batch size: 13, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:29:20,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=269246.6666666667, ans=0.05 +2024-07-29 10:29:20,814 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=269246.6666666667, ans=0.125 +2024-07-29 10:29:26,300 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.53 vs. limit=15.0 +2024-07-29 10:29:27,237 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=269260.0, ans=0.125 +2024-07-29 10:29:41,343 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=269273.3333333333, ans=0.0 +2024-07-29 10:29:49,382 INFO [train.py:1114] (1/4) Epoch 20, batch 7750, loss[loss=0.1794, simple_loss=0.2838, pruned_loss=0.03752, over 4928.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2629, pruned_loss=0.04024, over 935145.30 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:29:54,406 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=269300.0, ans=0.125 +2024-07-29 10:29:55,099 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=269300.0, ans=0.0 +2024-07-29 10:29:59,586 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=269313.3333333333, ans=0.125 +2024-07-29 10:30:06,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=269313.3333333333, ans=0.0 +2024-07-29 10:30:21,483 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.26 vs. limit=15.0 +2024-07-29 10:30:21,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=269340.0, ans=0.0 +2024-07-29 10:30:28,808 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.865e+01 5.687e+01 6.141e+01 6.601e+01 8.666e+01, threshold=1.228e+02, percent-clipped=0.0 +2024-07-29 10:30:31,961 INFO [train.py:1114] (1/4) Epoch 20, batch 7800, loss[loss=0.1743, simple_loss=0.267, pruned_loss=0.04086, over 4660.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2634, pruned_loss=0.04004, over 937262.83 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:31:20,306 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=269380.0, ans=0.125 +2024-07-29 10:31:26,775 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=269393.3333333333, ans=0.125 +2024-07-29 10:31:28,615 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=269393.3333333333, ans=0.1 +2024-07-29 10:31:32,872 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.32 vs. limit=10.0 +2024-07-29 10:31:33,763 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.85 vs. limit=15.0 +2024-07-29 10:31:42,681 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=269420.0, ans=0.0 +2024-07-29 10:31:43,623 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.95 vs. limit=15.0 +2024-07-29 10:31:47,979 INFO [train.py:1114] (1/4) Epoch 20, batch 7850, loss[loss=0.1654, simple_loss=0.2461, pruned_loss=0.04239, over 4580.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2635, pruned_loss=0.03986, over 936227.42 frames. ], batch size: 10, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:31:51,473 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=269433.3333333333, ans=0.1 +2024-07-29 10:31:52,328 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.99 vs. limit=15.0 +2024-07-29 10:31:55,007 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.34 vs. limit=15.0 +2024-07-29 10:32:03,006 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=269460.0, ans=0.2 +2024-07-29 10:32:11,940 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=269473.3333333333, ans=0.0 +2024-07-29 10:32:18,445 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.034e+01 5.765e+01 6.588e+01 7.311e+01 1.076e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 10:32:21,164 INFO [train.py:1114] (1/4) Epoch 20, batch 7900, loss[loss=0.168, simple_loss=0.2673, pruned_loss=0.03433, over 4875.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2648, pruned_loss=0.04023, over 933409.17 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:32:22,555 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=269500.0, ans=0.0 +2024-07-29 10:32:59,288 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-07-29 10:33:06,744 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.38 vs. limit=15.0 +2024-07-29 10:33:07,231 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=269553.3333333333, ans=0.0 +2024-07-29 10:33:09,092 INFO [train.py:1114] (1/4) Epoch 20, batch 7950, loss[loss=0.2075, simple_loss=0.2866, pruned_loss=0.06419, over 3394.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2639, pruned_loss=0.03965, over 935421.58 frames. ], batch size: 36, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:33:09,949 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=269566.6666666667, ans=0.125 +2024-07-29 10:33:09,972 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=269566.6666666667, ans=0.1 +2024-07-29 10:33:25,091 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=269580.0, ans=0.0 +2024-07-29 10:33:33,794 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.81 vs. limit=10.0 +2024-07-29 10:33:36,062 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=269606.6666666667, ans=0.125 +2024-07-29 10:33:40,261 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.00 vs. limit=10.0 +2024-07-29 10:33:44,266 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.355e+01 5.756e+01 6.342e+01 7.191e+01 1.019e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-29 10:33:46,949 INFO [train.py:1114] (1/4) Epoch 20, batch 8000, loss[loss=0.143, simple_loss=0.2243, pruned_loss=0.03086, over 4627.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2622, pruned_loss=0.0394, over 934285.35 frames. ], batch size: 11, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:34:14,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=269646.6666666667, ans=0.2 +2024-07-29 10:34:31,457 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.27 vs. limit=10.0 +2024-07-29 10:34:37,497 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=269686.6666666667, ans=0.125 +2024-07-29 10:34:47,684 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=269686.6666666667, ans=0.2 +2024-07-29 10:34:48,973 INFO [train.py:1114] (1/4) Epoch 20, batch 8050, loss[loss=0.223, simple_loss=0.301, pruned_loss=0.07246, over 4817.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2626, pruned_loss=0.0394, over 933844.27 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:34:49,707 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=269700.0, ans=0.05 +2024-07-29 10:34:59,899 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=269713.3333333333, ans=0.07 +2024-07-29 10:35:03,143 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=269713.3333333333, ans=0.125 +2024-07-29 10:35:06,965 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=269726.6666666667, ans=0.025 +2024-07-29 10:35:08,300 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=269726.6666666667, ans=0.2 +2024-07-29 10:35:12,275 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=269740.0, ans=0.125 +2024-07-29 10:35:14,122 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=269740.0, ans=0.125 +2024-07-29 10:35:16,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=269753.3333333333, ans=0.125 +2024-07-29 10:35:17,716 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.95 vs. limit=15.0 +2024-07-29 10:35:18,128 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=269753.3333333333, ans=0.125 +2024-07-29 10:35:21,085 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.839e+01 5.695e+01 6.260e+01 6.907e+01 1.067e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 10:35:35,896 INFO [train.py:1114] (1/4) Epoch 20, batch 8100, loss[loss=0.1622, simple_loss=0.2524, pruned_loss=0.03603, over 4787.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2626, pruned_loss=0.03928, over 933618.28 frames. ], batch size: 15, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:35:36,669 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=269766.6666666667, ans=0.2 +2024-07-29 10:35:44,558 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.45 vs. limit=15.0 +2024-07-29 10:35:53,770 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=269793.3333333333, ans=0.125 +2024-07-29 10:36:08,434 INFO [train.py:1114] (1/4) Epoch 20, batch 8150, loss[loss=0.1965, simple_loss=0.2894, pruned_loss=0.05178, over 4803.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2619, pruned_loss=0.03906, over 937476.05 frames. ], batch size: 15, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:36:12,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=269833.3333333333, ans=0.125 +2024-07-29 10:36:17,334 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=269833.3333333333, ans=0.0 +2024-07-29 10:36:52,369 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.965e+01 5.540e+01 6.167e+01 6.859e+01 1.030e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 10:36:54,633 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=269900.0, ans=0.125 +2024-07-29 10:36:55,164 INFO [train.py:1114] (1/4) Epoch 20, batch 8200, loss[loss=0.1658, simple_loss=0.2532, pruned_loss=0.03923, over 4797.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2613, pruned_loss=0.03852, over 938236.81 frames. ], batch size: 15, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:36:59,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=269900.0, ans=0.0 +2024-07-29 10:37:08,475 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=269926.6666666667, ans=0.125 +2024-07-29 10:37:09,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=269926.6666666667, ans=0.125 +2024-07-29 10:38:04,255 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=269953.3333333333, ans=0.125 +2024-07-29 10:38:11,129 INFO [train.py:1114] (1/4) Epoch 20, batch 8250, loss[loss=0.1704, simple_loss=0.2596, pruned_loss=0.04064, over 4895.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2603, pruned_loss=0.03857, over 938349.02 frames. ], batch size: 13, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:38:12,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=269966.6666666667, ans=0.025 +2024-07-29 10:38:15,833 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=269966.6666666667, ans=0.0 +2024-07-29 10:38:23,452 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.11 vs. limit=22.5 +2024-07-29 10:38:26,131 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=269980.0, ans=0.125 +2024-07-29 10:38:30,280 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.01 vs. limit=6.0 +2024-07-29 10:38:31,479 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.46 vs. limit=15.0 +2024-07-29 10:38:36,999 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=269993.3333333333, ans=0.1 +2024-07-29 10:38:42,270 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=270006.6666666667, ans=0.125 +2024-07-29 10:38:49,950 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.571e+01 5.582e+01 6.021e+01 6.658e+01 1.061e+02, threshold=1.204e+02, percent-clipped=0.0 +2024-07-29 10:38:52,537 INFO [train.py:1114] (1/4) Epoch 20, batch 8300, loss[loss=0.1701, simple_loss=0.266, pruned_loss=0.03713, over 4899.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2616, pruned_loss=0.039, over 938119.74 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:39:00,874 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=270046.6666666667, ans=0.125 +2024-07-29 10:39:08,210 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=270060.0, ans=0.2 +2024-07-29 10:39:18,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=270073.3333333333, ans=0.125 +2024-07-29 10:39:25,589 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=270086.6666666667, ans=0.125 +2024-07-29 10:39:28,019 INFO [train.py:1114] (1/4) Epoch 20, batch 8350, loss[loss=0.2069, simple_loss=0.2982, pruned_loss=0.05785, over 4819.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2618, pruned_loss=0.03912, over 940975.16 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:39:34,004 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=270113.3333333333, ans=0.125 +2024-07-29 10:39:35,823 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=270113.3333333333, ans=0.125 +2024-07-29 10:39:40,059 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=270113.3333333333, ans=10.0 +2024-07-29 10:39:46,580 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=270126.6666666667, ans=0.125 +2024-07-29 10:39:51,080 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=270140.0, ans=0.0 +2024-07-29 10:39:55,620 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=270153.3333333333, ans=0.125 +2024-07-29 10:39:57,941 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.685e+01 5.732e+01 6.400e+01 7.266e+01 9.706e+01, threshold=1.280e+02, percent-clipped=0.0 +2024-07-29 10:40:00,642 INFO [train.py:1114] (1/4) Epoch 20, batch 8400, loss[loss=0.1626, simple_loss=0.2537, pruned_loss=0.03573, over 4776.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2625, pruned_loss=0.03953, over 940000.35 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:40:27,159 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=270206.6666666667, ans=0.125 +2024-07-29 10:40:29,442 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.26 vs. limit=15.0 +2024-07-29 10:40:38,492 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=270220.0, ans=0.0 +2024-07-29 10:40:41,433 INFO [train.py:1114] (1/4) Epoch 20, batch 8450, loss[loss=0.1881, simple_loss=0.2741, pruned_loss=0.05106, over 4804.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2628, pruned_loss=0.03967, over 938583.92 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:40:49,456 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=270246.6666666667, ans=0.125 +2024-07-29 10:40:49,992 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=270246.6666666667, ans=0.125 +2024-07-29 10:41:12,716 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.341e+01 5.700e+01 6.559e+01 7.490e+01 1.068e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-29 10:41:13,999 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.58 vs. limit=6.0 +2024-07-29 10:41:22,380 INFO [train.py:1114] (1/4) Epoch 20, batch 8500, loss[loss=0.1825, simple_loss=0.2664, pruned_loss=0.04927, over 4604.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2618, pruned_loss=0.03928, over 938653.34 frames. ], batch size: 11, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:41:30,523 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=270313.3333333333, ans=0.1 +2024-07-29 10:41:34,046 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=270313.3333333333, ans=0.1 +2024-07-29 10:41:46,144 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=270340.0, ans=0.05 +2024-07-29 10:41:53,458 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=270353.3333333333, ans=0.125 +2024-07-29 10:41:54,588 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:41:55,438 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=270353.3333333333, ans=0.125 +2024-07-29 10:41:56,528 INFO [train.py:1114] (1/4) Epoch 20, batch 8550, loss[loss=0.1186, simple_loss=0.2011, pruned_loss=0.01802, over 4810.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2617, pruned_loss=0.03918, over 939582.05 frames. ], batch size: 11, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:41:59,176 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=270366.6666666667, ans=0.025 +2024-07-29 10:42:01,897 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=270366.6666666667, ans=0.0 +2024-07-29 10:42:10,410 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=270393.3333333333, ans=0.125 +2024-07-29 10:42:16,258 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=270393.3333333333, ans=0.125 +2024-07-29 10:42:28,841 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.753e+01 6.246e+01 7.230e+01 1.151e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 10:42:30,772 INFO [train.py:1114] (1/4) Epoch 20, batch 8600, loss[loss=0.1723, simple_loss=0.2742, pruned_loss=0.03517, over 4797.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2616, pruned_loss=0.03931, over 939230.41 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:42:38,813 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=270433.3333333333, ans=0.1 +2024-07-29 10:42:38,867 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=270433.3333333333, ans=0.0 +2024-07-29 10:42:42,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=270446.6666666667, ans=0.0 +2024-07-29 10:42:52,790 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=270460.0, ans=0.1 +2024-07-29 10:42:55,093 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.94 vs. limit=15.0 +2024-07-29 10:43:00,687 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.58 vs. limit=22.5 +2024-07-29 10:43:05,119 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.09 vs. limit=15.0 +2024-07-29 10:43:05,672 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=270486.6666666667, ans=0.1 +2024-07-29 10:43:08,641 INFO [train.py:1114] (1/4) Epoch 20, batch 8650, loss[loss=0.1654, simple_loss=0.2542, pruned_loss=0.03829, over 4898.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2615, pruned_loss=0.0393, over 940290.33 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:43:14,273 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=270500.0, ans=0.125 +2024-07-29 10:43:24,922 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=270526.6666666667, ans=0.5 +2024-07-29 10:43:32,670 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=270540.0, ans=0.125 +2024-07-29 10:43:41,992 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.826e+01 5.889e+01 6.581e+01 7.578e+01 1.021e+02, threshold=1.316e+02, percent-clipped=0.0 +2024-07-29 10:43:43,901 INFO [train.py:1114] (1/4) Epoch 20, batch 8700, loss[loss=0.1584, simple_loss=0.248, pruned_loss=0.03437, over 4759.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.262, pruned_loss=0.03952, over 938127.09 frames. ], batch size: 13, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:43:46,069 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=270566.6666666667, ans=0.025 +2024-07-29 10:43:50,760 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=270580.0, ans=0.0 +2024-07-29 10:43:50,763 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=270580.0, ans=0.125 +2024-07-29 10:43:52,864 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.37 vs. limit=22.5 +2024-07-29 10:44:03,340 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.16 vs. limit=15.0 +2024-07-29 10:44:04,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=270606.6666666667, ans=0.125 +2024-07-29 10:44:15,332 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=270620.0, ans=0.1 +2024-07-29 10:44:16,437 INFO [train.py:1114] (1/4) Epoch 20, batch 8750, loss[loss=0.1683, simple_loss=0.2613, pruned_loss=0.03761, over 4691.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2615, pruned_loss=0.03933, over 936870.99 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:44:23,630 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=270633.3333333333, ans=0.125 +2024-07-29 10:44:26,161 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=270633.3333333333, ans=0.125 +2024-07-29 10:44:31,287 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=270646.6666666667, ans=0.2 +2024-07-29 10:44:42,381 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:44:43,063 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=270673.3333333333, ans=0.125 +2024-07-29 10:44:46,691 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=270673.3333333333, ans=0.0 +2024-07-29 10:44:51,381 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=270686.6666666667, ans=0.2 +2024-07-29 10:44:51,531 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.57 vs. limit=15.0 +2024-07-29 10:44:54,917 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=270686.6666666667, ans=0.05 +2024-07-29 10:44:55,915 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.478e+01 5.802e+01 6.247e+01 7.031e+01 1.068e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 10:44:57,839 INFO [train.py:1114] (1/4) Epoch 20, batch 8800, loss[loss=0.1747, simple_loss=0.2706, pruned_loss=0.03934, over 4927.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2622, pruned_loss=0.03938, over 937665.94 frames. ], batch size: 14, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:45:03,057 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=270700.0, ans=0.125 +2024-07-29 10:45:04,337 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=270713.3333333333, ans=0.125 +2024-07-29 10:45:12,893 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=270713.3333333333, ans=0.0 +2024-07-29 10:45:18,462 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=270726.6666666667, ans=0.125 +2024-07-29 10:45:19,874 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.91 vs. limit=15.0 +2024-07-29 10:45:23,782 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=270740.0, ans=0.0 +2024-07-29 10:45:29,612 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=270753.3333333333, ans=0.125 +2024-07-29 10:45:35,522 INFO [train.py:1114] (1/4) Epoch 20, batch 8850, loss[loss=0.1834, simple_loss=0.2806, pruned_loss=0.04315, over 4461.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2626, pruned_loss=0.0398, over 931836.27 frames. ], batch size: 21, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:45:36,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=270766.6666666667, ans=0.125 +2024-07-29 10:46:03,116 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.94 vs. limit=22.5 +2024-07-29 10:46:04,265 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=270793.3333333333, ans=0.1 +2024-07-29 10:46:26,936 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=270820.0, ans=0.0 +2024-07-29 10:46:30,252 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.550e+01 5.751e+01 6.544e+01 7.566e+01 1.087e+02, threshold=1.309e+02, percent-clipped=0.0 +2024-07-29 10:46:32,203 INFO [train.py:1114] (1/4) Epoch 20, batch 8900, loss[loss=0.1341, simple_loss=0.2156, pruned_loss=0.02633, over 4931.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2622, pruned_loss=0.03952, over 929468.02 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:46:35,551 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=270833.3333333333, ans=0.125 +2024-07-29 10:46:48,260 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.32 vs. limit=10.0 +2024-07-29 10:46:53,740 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=270873.3333333333, ans=0.125 +2024-07-29 10:47:06,557 INFO [train.py:1114] (1/4) Epoch 20, batch 8950, loss[loss=0.1892, simple_loss=0.282, pruned_loss=0.04817, over 4571.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2623, pruned_loss=0.03935, over 930542.55 frames. ], batch size: 21, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:47:22,973 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.25 vs. limit=15.0 +2024-07-29 10:49:27,812 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=270953.3333333333, ans=0.125 +2024-07-29 10:49:33,585 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.383e+01 5.554e+01 6.323e+01 6.834e+01 1.028e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 10:49:37,830 INFO [train.py:1114] (1/4) Epoch 20, batch 9000, loss[loss=0.1679, simple_loss=0.2681, pruned_loss=0.03382, over 4643.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2618, pruned_loss=0.03933, over 933498.05 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:49:37,831 INFO [train.py:1137] (1/4) Computing validation loss +2024-07-29 10:51:29,547 INFO [zipformer.py:1858] (1/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.0931, 2.9408, 4.8201, 2.2625], device='cuda:1') +2024-07-29 10:51:38,763 INFO [train.py:1146] (1/4) Epoch 20, validation: loss=0.1604, simple_loss=0.262, pruned_loss=0.02938, over 944034.00 frames. +2024-07-29 10:51:38,764 INFO [train.py:1147] (1/4) Maximum memory allocated so far is 4129MB +2024-07-29 10:52:40,408 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=270993.3333333333, ans=0.125 +2024-07-29 10:52:43,626 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=270993.3333333333, ans=0.0 +2024-07-29 10:53:03,159 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.59 vs. limit=15.0 +2024-07-29 10:53:22,661 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=271020.0, ans=0.5 +2024-07-29 10:53:23,584 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=271020.0, ans=0.1 +2024-07-29 10:53:27,063 INFO [train.py:1114] (1/4) Epoch 20, batch 9050, loss[loss=0.161, simple_loss=0.2461, pruned_loss=0.03794, over 4516.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2605, pruned_loss=0.03887, over 933878.53 frames. ], batch size: 10, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:54:18,966 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=271086.6666666667, ans=0.125 +2024-07-29 10:54:22,474 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.730e+01 6.143e+01 7.007e+01 1.074e+02, threshold=1.229e+02, percent-clipped=0.0 +2024-07-29 10:54:30,291 INFO [train.py:1114] (1/4) Epoch 20, batch 9100, loss[loss=0.1731, simple_loss=0.267, pruned_loss=0.0396, over 4930.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2606, pruned_loss=0.03887, over 936566.05 frames. ], batch size: 14, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:54:40,264 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.14 vs. limit=15.0 +2024-07-29 10:54:59,166 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.65 vs. limit=15.0 +2024-07-29 10:55:05,020 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.57 vs. limit=15.0 +2024-07-29 10:55:06,316 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.48 vs. limit=6.0 +2024-07-29 10:55:13,829 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=271153.3333333333, ans=0.1 +2024-07-29 10:55:15,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=271153.3333333333, ans=0.125 +2024-07-29 10:55:17,114 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=271153.3333333333, ans=0.125 +2024-07-29 10:55:18,292 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=271153.3333333333, ans=0.1 +2024-07-29 10:55:18,465 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=271153.3333333333, ans=0.05 +2024-07-29 10:55:19,601 INFO [train.py:1114] (1/4) Epoch 20, batch 9150, loss[loss=0.1882, simple_loss=0.2689, pruned_loss=0.05369, over 4811.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2613, pruned_loss=0.03896, over 935952.07 frames. ], batch size: 14, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:56:34,834 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=271206.6666666667, ans=0.125 +2024-07-29 10:56:43,321 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:56:51,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=271220.0, ans=0.025 +2024-07-29 10:56:52,955 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.481e+01 5.764e+01 6.206e+01 7.056e+01 9.843e+01, threshold=1.241e+02, percent-clipped=0.0 +2024-07-29 10:56:55,101 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.15 vs. limit=15.0 +2024-07-29 10:56:55,338 INFO [train.py:1114] (1/4) Epoch 20, batch 9200, loss[loss=0.1574, simple_loss=0.2437, pruned_loss=0.03552, over 4844.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2605, pruned_loss=0.0388, over 937555.95 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:56:56,032 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=271233.3333333333, ans=0.125 +2024-07-29 10:56:57,551 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.02 vs. limit=15.0 +2024-07-29 10:57:07,717 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=271246.6666666667, ans=0.125 +2024-07-29 10:57:11,624 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.28 vs. limit=12.0 +2024-07-29 10:57:12,680 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=271260.0, ans=0.1 +2024-07-29 10:57:13,710 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=271260.0, ans=15.0 +2024-07-29 10:57:31,170 INFO [train.py:1114] (1/4) Epoch 20, batch 9250, loss[loss=0.2102, simple_loss=0.3037, pruned_loss=0.05837, over 4635.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2612, pruned_loss=0.03895, over 938155.18 frames. ], batch size: 13, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:57:32,594 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=271300.0, ans=0.125 +2024-07-29 10:57:33,166 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=271300.0, ans=0.1 +2024-07-29 10:57:36,400 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=271300.0, ans=6.0 +2024-07-29 10:57:39,199 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=271313.3333333333, ans=0.2 +2024-07-29 10:57:40,382 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=271313.3333333333, ans=0.1 +2024-07-29 10:57:40,994 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=271313.3333333333, ans=0.0 +2024-07-29 10:57:47,894 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.54 vs. limit=15.0 +2024-07-29 10:57:49,061 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=271326.6666666667, ans=0.0 +2024-07-29 10:57:51,338 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=271326.6666666667, ans=0.0 +2024-07-29 10:57:55,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=271340.0, ans=0.1 +2024-07-29 10:57:55,354 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=271340.0, ans=0.125 +2024-07-29 10:58:01,944 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=271340.0, ans=0.0 +2024-07-29 10:58:02,699 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=271353.3333333333, ans=0.1 +2024-07-29 10:58:11,147 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.506e+01 5.724e+01 6.498e+01 7.478e+01 1.094e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-29 10:58:14,102 INFO [train.py:1114] (1/4) Epoch 20, batch 9300, loss[loss=0.1515, simple_loss=0.2463, pruned_loss=0.02835, over 4770.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2607, pruned_loss=0.03902, over 937746.83 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:58:16,217 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=271366.6666666667, ans=0.0 +2024-07-29 10:58:18,184 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=271366.6666666667, ans=0.125 +2024-07-29 10:58:37,759 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=271380.0, ans=0.0 +2024-07-29 10:58:49,054 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=271380.0, ans=0.0 +2024-07-29 10:58:59,879 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.16 vs. limit=12.0 +2024-07-29 10:59:08,989 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=271406.6666666667, ans=0.1 +2024-07-29 11:01:37,035 INFO [train.py:1114] (1/4) Epoch 20, batch 9350, loss[loss=0.1455, simple_loss=0.2275, pruned_loss=0.03177, over 4810.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.261, pruned_loss=0.03886, over 934836.71 frames. ], batch size: 11, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 11:02:04,107 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.31 vs. limit=15.0 +2024-07-29 11:02:16,475 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.29 vs. limit=6.0 +2024-07-29 11:02:49,550 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.39 vs. limit=10.0 +2024-07-29 11:02:52,730 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=271460.0, ans=0.125 +2024-07-29 11:03:03,097 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.67 vs. limit=15.0 +2024-07-29 11:03:07,177 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=271486.6666666667, ans=0.125 +2024-07-29 11:03:21,196 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=271486.6666666667, ans=0.125 +2024-07-29 11:03:22,475 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.890e+01 5.751e+01 6.401e+01 7.888e+01 1.207e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-29 11:03:23,460 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=15.0 +2024-07-29 11:03:24,391 INFO [train.py:1114] (1/4) Epoch 20, batch 9400, loss[loss=0.1993, simple_loss=0.2864, pruned_loss=0.0561, over 4694.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2613, pruned_loss=0.03854, over 932556.73 frames. ], batch size: 13, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:03:25,835 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:03:32,991 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=271513.3333333333, ans=0.125 +2024-07-29 11:03:43,370 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=271540.0, ans=0.1 +2024-07-29 11:03:49,532 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=271540.0, ans=0.125 +2024-07-29 11:04:02,881 INFO [train.py:1114] (1/4) Epoch 20, batch 9450, loss[loss=0.1622, simple_loss=0.2455, pruned_loss=0.03945, over 4817.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2612, pruned_loss=0.03866, over 931954.36 frames. ], batch size: 11, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:04:11,241 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=271580.0, ans=0.1 +2024-07-29 11:04:14,000 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.25 vs. limit=15.0 +2024-07-29 11:04:27,445 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=271606.6666666667, ans=0.125 +2024-07-29 11:04:33,852 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.597e+01 5.763e+01 6.160e+01 6.815e+01 1.077e+02, threshold=1.232e+02, percent-clipped=0.0 +2024-07-29 11:04:35,846 INFO [train.py:1114] (1/4) Epoch 20, batch 9500, loss[loss=0.1722, simple_loss=0.2554, pruned_loss=0.04448, over 4711.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2607, pruned_loss=0.03875, over 934518.62 frames. ], batch size: 12, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:04:40,959 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:04:51,961 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=271660.0, ans=0.0 +2024-07-29 11:04:55,367 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=271673.3333333333, ans=0.125 +2024-07-29 11:04:57,791 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=271673.3333333333, ans=0.1 +2024-07-29 11:05:02,660 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=271686.6666666667, ans=0.1 +2024-07-29 11:05:05,345 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=271686.6666666667, ans=0.1 +2024-07-29 11:05:08,685 INFO [train.py:1114] (1/4) Epoch 20, batch 9550, loss[loss=0.177, simple_loss=0.2681, pruned_loss=0.04296, over 4784.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2614, pruned_loss=0.0395, over 931919.36 frames. ], batch size: 12, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:05:10,916 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.72 vs. limit=22.5 +2024-07-29 11:05:11,164 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=271700.0, ans=0.025 +2024-07-29 11:05:14,189 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=271713.3333333333, ans=0.2 +2024-07-29 11:05:16,610 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=271713.3333333333, ans=0.0 +2024-07-29 11:05:39,600 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=271726.6666666667, ans=0.2 +2024-07-29 11:05:54,353 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=271753.3333333333, ans=0.04949747468305833 +2024-07-29 11:05:54,822 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=271753.3333333333, ans=0.1 +2024-07-29 11:05:59,787 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=271753.3333333333, ans=0.1 +2024-07-29 11:06:00,808 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.785e+01 5.730e+01 6.530e+01 7.456e+01 1.001e+02, threshold=1.306e+02, percent-clipped=0.0 +2024-07-29 11:06:02,897 INFO [train.py:1114] (1/4) Epoch 20, batch 9600, loss[loss=0.2445, simple_loss=0.3129, pruned_loss=0.08803, over 3468.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2627, pruned_loss=0.04021, over 930982.38 frames. ], batch size: 35, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:06:03,999 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.56 vs. limit=6.0 +2024-07-29 11:06:05,657 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=271766.6666666667, ans=0.125 +2024-07-29 11:06:12,235 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=271780.0, ans=0.125 +2024-07-29 11:06:16,509 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=271793.3333333333, ans=0.125 +2024-07-29 11:06:20,980 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=271793.3333333333, ans=0.0 +2024-07-29 11:06:22,926 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=271806.6666666667, ans=0.025 +2024-07-29 11:06:24,034 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:06:30,452 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=271820.0, ans=0.125 +2024-07-29 11:06:37,472 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=271820.0, ans=0.0 +2024-07-29 11:06:39,303 INFO [train.py:1114] (1/4) Epoch 20, batch 9650, loss[loss=0.1864, simple_loss=0.2726, pruned_loss=0.05012, over 4843.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2632, pruned_loss=0.0405, over 927370.48 frames. ], batch size: 16, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:07:07,596 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.25 vs. limit=15.0 +2024-07-29 11:07:07,651 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.54 vs. limit=15.0 +2024-07-29 11:07:16,162 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.57 vs. limit=15.0 +2024-07-29 11:07:17,751 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=271886.6666666667, ans=0.2 +2024-07-29 11:07:17,776 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:07:21,934 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.580e+01 5.811e+01 6.589e+01 7.580e+01 1.190e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 11:07:22,098 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=271886.6666666667, ans=0.125 +2024-07-29 11:07:23,848 INFO [train.py:1114] (1/4) Epoch 20, batch 9700, loss[loss=0.1862, simple_loss=0.2864, pruned_loss=0.04298, over 4241.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2634, pruned_loss=0.04008, over 924781.90 frames. ], batch size: 25, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:07:24,099 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.23 vs. limit=15.0 +2024-07-29 11:07:26,329 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=271900.0, ans=0.125 +2024-07-29 11:07:34,849 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=271913.3333333333, ans=0.125 +2024-07-29 11:07:35,993 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=271913.3333333333, ans=0.0 +2024-07-29 11:07:39,119 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=271913.3333333333, ans=0.2 +2024-07-29 11:07:58,314 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=271953.3333333333, ans=0.0 +2024-07-29 11:08:09,756 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=271953.3333333333, ans=0.0 +2024-07-29 11:08:10,032 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.16 vs. limit=15.0 +2024-07-29 11:08:12,201 INFO [train.py:1114] (1/4) Epoch 20, batch 9750, loss[loss=0.1869, simple_loss=0.2869, pruned_loss=0.04349, over 4675.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2633, pruned_loss=0.04017, over 925306.99 frames. ], batch size: 15, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:08:14,870 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=271966.6666666667, ans=0.2 +2024-07-29 11:08:15,553 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.80 vs. limit=6.0 +2024-07-29 11:08:16,015 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.45 vs. limit=15.0 +2024-07-29 11:08:41,000 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=271993.3333333333, ans=0.1 +2024-07-29 11:09:06,960 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+01 5.621e+01 6.289e+01 7.582e+01 9.528e+01, threshold=1.258e+02, percent-clipped=0.0 +2024-07-29 11:09:10,467 INFO [train.py:1114] (1/4) Epoch 20, batch 9800, loss[loss=0.1621, simple_loss=0.258, pruned_loss=0.03311, over 4703.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2615, pruned_loss=0.03921, over 925166.43 frames. ], batch size: 12, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:09:11,376 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.01 vs. limit=22.5 +2024-07-29 11:09:12,404 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=272033.3333333333, ans=0.125 +2024-07-29 11:09:12,544 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=272033.3333333333, ans=0.07 +2024-07-29 11:09:23,073 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:10:08,017 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=272086.6666666667, ans=0.0 +2024-07-29 11:10:13,502 INFO [train.py:1114] (1/4) Epoch 20, batch 9850, loss[loss=0.1787, simple_loss=0.2769, pruned_loss=0.04024, over 4891.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2615, pruned_loss=0.03952, over 927987.60 frames. ], batch size: 15, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:10:43,567 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 6.014e+01 6.927e+01 8.025e+01 1.186e+02, threshold=1.385e+02, percent-clipped=0.0 +2024-07-29 11:10:44,348 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=272153.3333333333, ans=0.0 +2024-07-29 11:10:45,486 INFO [train.py:1114] (1/4) Epoch 20, batch 9900, loss[loss=0.1825, simple_loss=0.2819, pruned_loss=0.04157, over 4846.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2621, pruned_loss=0.0399, over 927023.70 frames. ], batch size: 16, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:10:47,482 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=272166.6666666667, ans=0.0 +2024-07-29 11:10:48,121 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=272166.6666666667, ans=0.2 +2024-07-29 11:11:05,906 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=272206.6666666667, ans=0.0 +2024-07-29 11:11:10,821 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=272220.0, ans=0.1 +2024-07-29 11:11:13,815 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=272220.0, ans=0.1 +2024-07-29 11:11:16,892 INFO [train.py:1114] (1/4) Epoch 20, batch 9950, loss[loss=0.1623, simple_loss=0.2435, pruned_loss=0.04053, over 4811.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2624, pruned_loss=0.04058, over 929634.33 frames. ], batch size: 11, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:11:26,819 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=272246.6666666667, ans=0.1 +2024-07-29 11:11:47,598 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.87 vs. limit=22.5 +2024-07-29 11:12:02,028 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=272286.6666666667, ans=0.125 +2024-07-29 11:12:02,406 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.836e+01 6.511e+01 7.365e+01 1.166e+02, threshold=1.302e+02, percent-clipped=0.0 +2024-07-29 11:12:04,274 INFO [train.py:1114] (1/4) Epoch 20, batch 10000, loss[loss=0.1921, simple_loss=0.2876, pruned_loss=0.04824, over 4635.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2659, pruned_loss=0.04184, over 927255.90 frames. ], batch size: 16, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:12:39,247 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:12:42,913 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.95 vs. limit=22.5 +2024-07-29 11:12:43,607 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.10 vs. limit=15.0 +2024-07-29 11:12:43,825 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=272353.3333333333, ans=0.0 +2024-07-29 11:12:43,950 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=272353.3333333333, ans=0.125 +2024-07-29 11:12:48,246 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=272366.6666666667, ans=0.025 +2024-07-29 11:12:48,743 INFO [train.py:1114] (1/4) Epoch 20, batch 10050, loss[loss=0.2233, simple_loss=0.2952, pruned_loss=0.07567, over 3537.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2695, pruned_loss=0.04336, over 915983.52 frames. ], batch size: 36, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:12:51,822 INFO [scaling.py:1120] (1/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:13:08,805 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=272380.0, ans=0.07 +2024-07-29 11:14:05,422 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=272406.6666666667, ans=0.125 +2024-07-29 11:14:41,541 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.88 vs. limit=22.5 +2024-07-29 11:14:45,434 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.683e+01 5.969e+01 6.772e+01 7.755e+01 1.002e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-29 11:14:47,459 INFO [train.py:1114] (1/4) Epoch 20, batch 10100, loss[loss=0.203, simple_loss=0.293, pruned_loss=0.05651, over 3147.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2729, pruned_loss=0.04711, over 862947.75 frames. ], batch size: 35, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:14:51,713 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.10 vs. limit=15.0 +2024-07-29 11:14:58,778 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=272446.6666666667, ans=0.125 +2024-07-29 11:14:59,416 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=272446.6666666667, ans=0.2 +2024-07-29 11:15:17,309 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=272486.6666666667, ans=0.0 +2024-07-29 11:15:19,709 INFO [train.py:1114] (1/4) Epoch 20, batch 10150, loss[loss=0.2052, simple_loss=0.2787, pruned_loss=0.06592, over 3285.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2753, pruned_loss=0.04988, over 822151.56 frames. ], batch size: 35, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:15:31,633 INFO [scaling.py:1024] (1/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=6.06 vs. limit=12.0 +2024-07-29 11:15:33,216 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=272526.6666666667, ans=0.125 +2024-07-29 11:15:33,617 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.whiten.whitening_limit, batch_count=272526.6666666667, ans=15.0 +2024-07-29 11:15:44,798 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=272540.0, ans=0.125 +2024-07-29 11:15:52,495 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=272553.3333333333, ans=10.0 +2024-07-29 11:15:53,607 WARNING [optim.py:487] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.562e+01 6.747e+01 7.203e+01 7.565e+01 9.241e+01, threshold=1.441e+02, percent-clipped=0.0 +2024-07-29 11:15:58,177 INFO [train.py:1114] (1/4) Epoch 20, batch 10200, loss[loss=0.1851, simple_loss=0.262, pruned_loss=0.0541, over 3447.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2774, pruned_loss=0.05242, over 789656.70 frames. ], batch size: 35, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:18:56,640 INFO [scaling.py:214] (1/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=272580.0, ans=0.125 +2024-07-29 11:19:22,578 INFO [train.py:1387] (1/4) Done! diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/log/log-train-2024-07-27-09-10-10-2 b/zipformer/pretrained/non_ctc/non_causal/exp/log/log-train-2024-07-27-09-10-10-2 new file mode 100644 index 0000000000000000000000000000000000000000..5af60c5d6b35e534709c5d9164da36cf9addc5de --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/log/log-train-2024-07-27-09-10-10-2 @@ -0,0 +1,28584 @@ +2024-07-27 09:10:10,827 INFO [train.py:1182] (2/4) Training started +2024-07-27 09:10:10,829 INFO [train.py:1192] (2/4) Device: cuda:2 +2024-07-27 09:10:10,832 INFO [train.py:1210] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2658.int.cedar.computecanada.ca', 'IP address': '172.16.146.95'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('zipformer/libri/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 200.0, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-07-27 09:10:10,832 INFO [train.py:1212] (2/4) About to create model +2024-07-27 09:10:23,800 INFO [train.py:1216] (2/4) Number of model parameters: 65549011 +2024-07-27 09:10:24,714 INFO [train.py:1231] (2/4) Using DDP +2024-07-27 09:11:00,503 INFO [asr_datamodule.py:893] (2/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-07-27 09:11:00,828 INFO [asr_datamodule.py:696] (2/4) Disable MUSAN +2024-07-27 09:11:00,829 INFO [asr_datamodule.py:714] (2/4) Enable SpecAugment +2024-07-27 09:11:00,829 INFO [asr_datamodule.py:715] (2/4) Time warp factor: 80 +2024-07-27 09:11:00,829 INFO [asr_datamodule.py:725] (2/4) Num frame mask: 10 +2024-07-27 09:11:00,829 INFO [asr_datamodule.py:738] (2/4) About to create train dataset +2024-07-27 09:11:00,829 INFO [asr_datamodule.py:765] (2/4) Using DynamicBucketingSampler. +2024-07-27 09:11:02,439 INFO [asr_datamodule.py:782] (2/4) About to create train dataloader +2024-07-27 09:11:02,446 INFO [asr_datamodule.py:910] (2/4) About to get dev-clean cuts +2024-07-27 09:11:02,592 INFO [asr_datamodule.py:917] (2/4) About to get dev-other cuts +2024-07-27 09:11:03,488 INFO [asr_datamodule.py:813] (2/4) About to create dev dataset +2024-07-27 09:11:03,805 INFO [asr_datamodule.py:830] (2/4) About to create dev dataloader +2024-07-27 09:11:03,806 INFO [train.py:1435] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-07-27 09:17:48,870 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=192, metric=45.97 vs. limit=7.5 +2024-07-27 09:17:49,697 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 3360MB +2024-07-27 09:17:50,297 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 3360MB +2024-07-27 09:17:54,281 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 3360MB +2024-07-27 09:17:55,227 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 3360MB +2024-07-27 09:18:08,384 INFO [scaling.py:1024] (2/4) Whitening: name=None, num_groups=1, num_channels=288, metric=63.81 vs. limit=5.0 +2024-07-27 09:18:08,568 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 3360MB +2024-07-27 09:18:09,383 INFO [train.py:1463] (2/4) Maximum memory allocated so far is 3360MB +2024-07-27 09:18:51,917 INFO [train.py:1114] (2/4) Epoch 1, batch 0, loss[loss=7.77, simple_loss=7.077, pruned_loss=6.916, over 4854.00 frames. ], tot_loss[loss=7.77, simple_loss=7.077, pruned_loss=6.916, over 4854.00 frames. ], batch size: 12, lr: 2.25e-02, grad_scale: 2.0 +2024-07-27 09:18:51,917 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 09:19:27,484 INFO [train.py:1146] (2/4) Epoch 1, validation: loss=7.631, simple_loss=6.945, pruned_loss=6.846, over 944034.00 frames. +2024-07-27 09:19:27,485 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 3377MB +2024-07-27 09:19:29,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=0.0, ans=0.2 +2024-07-27 09:19:31,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=0.0, ans=0.25 +2024-07-27 09:19:40,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=0.0, ans=5.0 +2024-07-27 09:19:48,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=13.333333333333334, ans=0.499375 +2024-07-27 09:19:52,383 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.784e+02 9.392e+02 1.009e+03 1.270e+03 1.305e+03, threshold=4.037e+03, percent-clipped=0.0 +2024-07-27 09:19:55,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=13.333333333333334, ans=0.20020000000000002 +2024-07-27 09:19:59,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=13.333333333333334, ans=0.1995 +2024-07-27 09:20:00,716 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=64.45 vs. limit=5.006666666666667 +2024-07-27 09:20:06,957 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=344.17 vs. limit=5.006666666666667 +2024-07-27 09:20:09,143 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.923e+01 2.100e+02 8.784e+02 1.111e+03 1.403e+03, threshold=3.513e+03, percent-clipped=0.0 +2024-07-27 09:20:12,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=26.666666666666668, ans=0.49875 +2024-07-27 09:20:19,082 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=507.00 vs. limit=7.52 +2024-07-27 09:20:26,380 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=280.05 vs. limit=7.51 +2024-07-27 09:20:28,855 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=363.79 vs. limit=7.515 +2024-07-27 09:20:36,232 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=305.07 vs. limit=7.515 +2024-07-27 09:20:36,329 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=443.25 vs. limit=7.515 +2024-07-27 09:20:40,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=53.333333333333336, ans=0.4975 +2024-07-27 09:20:41,235 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=23.12 vs. limit=7.52 +2024-07-27 09:20:41,324 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.923e+01 1.821e+02 2.209e+02 8.784e+02 1.403e+03, threshold=8.837e+02, percent-clipped=0.0 +2024-07-27 09:20:41,765 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=242.21 vs. limit=7.52 +2024-07-27 09:20:41,809 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=231.78 vs. limit=5.026666666666666 +2024-07-27 09:20:41,926 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=395.47 vs. limit=5.026666666666666 +2024-07-27 09:20:50,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=53.333333333333336, ans=0.4975 +2024-07-27 09:20:55,410 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=336.71 vs. limit=7.52 +2024-07-27 09:21:32,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=66.66666666666667, ans=0.496875 +2024-07-27 09:21:33,269 INFO [train.py:1114] (2/4) Epoch 1, batch 50, loss[loss=1.299, simple_loss=1.151, pruned_loss=1.324, over 4619.00 frames. ], tot_loss[loss=2.99, simple_loss=2.745, pruned_loss=2.381, over 206516.85 frames. ], batch size: 11, lr: 2.48e-02, grad_scale: 1.0 +2024-07-27 09:21:37,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=66.66666666666667, ans=0.8976666666666667 +2024-07-27 09:21:40,415 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=29.91 vs. limit=7.55 +2024-07-27 09:21:41,808 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=430.03 vs. limit=5.033333333333333 +2024-07-27 09:21:44,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80.0, ans=0.29919999999999997 +2024-07-27 09:21:47,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=80.0, ans=0.04975 +2024-07-27 09:21:58,212 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=243.01 vs. limit=7.53 +2024-07-27 09:22:00,907 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=287.81 vs. limit=7.535 +2024-07-27 09:22:16,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=93.33333333333333, ans=7.535 +2024-07-27 09:22:16,933 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=270.76 vs. limit=5.046666666666667 +2024-07-27 09:22:19,472 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=151.81 vs. limit=5.053333333333334 +2024-07-27 09:22:24,018 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=41.21 vs. limit=7.58 +2024-07-27 09:22:36,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=106.66666666666667, ans=0.495 +2024-07-27 09:22:37,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=120.0, ans=0.494375 +2024-07-27 09:22:37,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=120.0, ans=0.049625 +2024-07-27 09:22:54,871 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=47.24 vs. limit=7.545 +2024-07-27 09:23:04,785 INFO [train.py:1114] (2/4) Epoch 1, batch 100, loss[loss=1.108, simple_loss=0.9614, pruned_loss=1.172, over 4644.00 frames. ], tot_loss[loss=2.048, simple_loss=1.852, pruned_loss=1.792, over 365023.20 frames. ], batch size: 12, lr: 2.70e-02, grad_scale: 2.0 +2024-07-27 09:23:06,814 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.579e+01 2.513e+01 6.174e+01 1.938e+02 1.403e+03, threshold=1.235e+02, percent-clipped=0.0 +2024-07-27 09:23:09,914 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=49.95 vs. limit=7.6 +2024-07-27 09:23:13,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=133.33333333333334, ans=0.5 +2024-07-27 09:23:30,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=107.00 vs. limit=7.555 +2024-07-27 09:23:58,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=173.33333333333334, ans=0.491875 +2024-07-27 09:24:01,992 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=87.87 vs. limit=7.63 +2024-07-27 09:24:03,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=173.33333333333334, ans=0.491875 +2024-07-27 09:24:04,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=173.33333333333334, ans=0.7517333333333334 +2024-07-27 09:24:11,890 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=238.35 vs. limit=7.57 +2024-07-27 09:24:14,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.whiten.whitening_limit, batch_count=186.66666666666666, ans=4.074666666666666 +2024-07-27 09:24:15,191 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=36.95 vs. limit=4.074666666666666 +2024-07-27 09:24:19,473 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=54.17 vs. limit=7.65 +2024-07-27 09:24:19,869 INFO [train.py:1114] (2/4) Epoch 1, batch 150, loss[loss=0.9691, simple_loss=0.827, pruned_loss=1.032, over 4616.00 frames. ], tot_loss[loss=1.658, simple_loss=1.478, pruned_loss=1.541, over 493620.35 frames. ], batch size: 11, lr: 2.93e-02, grad_scale: 2.0 +2024-07-27 09:24:20,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=200.0, ans=0.09875 +2024-07-27 09:24:25,503 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.58 vs. limit=3.03 +2024-07-27 09:24:25,667 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.01 vs. limit=3.03 +2024-07-27 09:24:26,659 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=44.47 vs. limit=7.65 +2024-07-27 09:24:29,940 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=45.67 vs. limit=7.575 +2024-07-27 09:24:40,980 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=185.57 vs. limit=7.58 +2024-07-27 09:24:48,421 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=46.67 vs. limit=7.67 +2024-07-27 09:24:51,402 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=70.40 vs. limit=7.585 +2024-07-27 09:24:54,051 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=139.04 vs. limit=7.585 +2024-07-27 09:24:54,877 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=234.48 vs. limit=7.59 +2024-07-27 09:25:06,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.whiten.whitening_limit, batch_count=240.0, ans=4.096 +2024-07-27 09:25:09,290 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=41.65 vs. limit=7.595 +2024-07-27 09:25:11,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=89.33 vs. limit=5.126666666666667 +2024-07-27 09:25:15,196 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=73.63 vs. limit=7.69 +2024-07-27 09:25:22,974 INFO [train.py:1114] (2/4) Epoch 1, batch 200, loss[loss=1.061, simple_loss=0.9048, pruned_loss=1.057, over 4497.00 frames. ], tot_loss[loss=1.437, simple_loss=1.267, pruned_loss=1.374, over 593200.28 frames. ], batch size: 21, lr: 3.15e-02, grad_scale: 4.0 +2024-07-27 09:25:23,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=266.6666666666667, ans=0.4875 +2024-07-27 09:25:24,361 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.807e+01 2.398e+01 2.890e+01 3.614e+01 1.455e+02, threshold=5.780e+01, percent-clipped=1.0 +2024-07-27 09:25:30,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=266.6666666666667, ans=0.4875 +2024-07-27 09:25:52,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=280.0, ans=0.29719999999999996 +2024-07-27 09:25:53,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=18.47 vs. limit=4.112 +2024-07-27 09:25:56,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=280.0, ans=0.486875 +2024-07-27 09:26:01,405 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=195.02 vs. limit=7.61 +2024-07-27 09:26:03,604 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=14.30 vs. limit=5.1466666666666665 +2024-07-27 09:26:03,827 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=30.97 vs. limit=5.073333333333333 +2024-07-27 09:26:07,011 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=144.83 vs. limit=7.61 +2024-07-27 09:26:28,880 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.22 vs. limit=3.048 +2024-07-27 09:26:29,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=152.00 vs. limit=7.62 +2024-07-27 09:26:34,891 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=45.59 vs. limit=5.16 +2024-07-27 09:26:35,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=320.0, ans=0.46 +2024-07-27 09:26:42,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=320.0, ans=0.049 +2024-07-27 09:26:44,533 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=29.99 vs. limit=7.74 +2024-07-27 09:26:45,176 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=9.49 vs. limit=7.62 +2024-07-27 09:26:46,531 INFO [train.py:1114] (2/4) Epoch 1, batch 250, loss[loss=1.033, simple_loss=0.8681, pruned_loss=1.028, over 4650.00 frames. ], tot_loss[loss=1.302, simple_loss=1.137, pruned_loss=1.259, over 670070.85 frames. ], batch size: 16, lr: 3.38e-02, grad_scale: 4.0 +2024-07-27 09:26:47,965 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=11.22 vs. limit=5.083333333333333 +2024-07-27 09:26:50,066 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=204.05 vs. limit=7.625 +2024-07-27 09:26:55,748 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=14.59 vs. limit=7.625 +2024-07-27 09:27:03,106 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=14.58 vs. limit=7.63 +2024-07-27 09:27:08,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=360.0, ans=0.2964 +2024-07-27 09:27:14,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.46 vs. limit=5.09 +2024-07-27 09:27:24,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=373.3333333333333, ans=0.4825 +2024-07-27 09:27:25,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=373.3333333333333, ans=0.4825 +2024-07-27 09:27:28,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=386.6666666666667, ans=0.2058 +2024-07-27 09:27:31,228 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.04 vs. limit=5.193333333333333 +2024-07-27 09:27:31,584 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.63 vs. limit=7.645 +2024-07-27 09:27:31,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=386.6666666666667, ans=0.481875 +2024-07-27 09:27:33,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=386.6666666666667, ans=0.481875 +2024-07-27 09:27:38,727 INFO [train.py:1114] (2/4) Epoch 1, batch 300, loss[loss=1.022, simple_loss=0.854, pruned_loss=0.9852, over 4791.00 frames. ], tot_loss[loss=1.213, simple_loss=1.049, pruned_loss=1.177, over 729712.69 frames. ], batch size: 15, lr: 3.60e-02, grad_scale: 8.0 +2024-07-27 09:27:40,104 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.723e+01 3.145e+01 3.570e+01 4.574e+01 1.008e+02, threshold=7.140e+01, percent-clipped=16.0 +2024-07-27 09:27:44,196 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=60.28 vs. limit=7.65 +2024-07-27 09:27:45,928 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=13.31 vs. limit=7.65 +2024-07-27 09:27:53,021 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=78.07 vs. limit=7.655 +2024-07-27 09:27:56,724 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=22.84 vs. limit=7.655 +2024-07-27 09:28:01,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=413.3333333333333, ans=0.480625 +2024-07-27 09:28:02,909 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=135.83 vs. limit=5.213333333333333 +2024-07-27 09:28:03,557 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=31.13 vs. limit=7.82 +2024-07-27 09:28:08,130 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=81.71 vs. limit=7.66 +2024-07-27 09:28:17,623 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=211.34 vs. limit=5.22 +2024-07-27 09:28:19,125 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=8.16 vs. limit=4.176 +2024-07-27 09:28:20,072 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=77.40 vs. limit=7.67 +2024-07-27 09:28:24,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=453.3333333333333, ans=0.47875 +2024-07-27 09:28:26,401 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=9.32 vs. limit=4.181333333333333 +2024-07-27 09:28:31,001 INFO [train.py:1114] (2/4) Epoch 1, batch 350, loss[loss=0.8345, simple_loss=0.6874, pruned_loss=0.8043, over 4932.00 frames. ], tot_loss[loss=1.156, simple_loss=0.9899, pruned_loss=1.119, over 775707.85 frames. ], batch size: 12, lr: 3.83e-02, grad_scale: 8.0 +2024-07-27 09:28:32,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=466.6666666666667, ans=0.29533333333333334 +2024-07-27 09:28:33,347 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=27.24 vs. limit=7.675 +2024-07-27 09:28:37,456 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=13.93 vs. limit=7.675 +2024-07-27 09:28:43,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=480.0, ans=0.8832 +2024-07-27 09:28:54,384 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=22.24 vs. limit=7.685 +2024-07-27 09:28:59,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=493.3333333333333, ans=0.2074 +2024-07-27 09:28:59,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=493.3333333333333, ans=0.476875 +2024-07-27 09:29:05,941 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=10.37 vs. limit=7.69 +2024-07-27 09:29:23,203 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.38 vs. limit=7.89 +2024-07-27 09:29:25,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=520.0, ans=0.475625 +2024-07-27 09:29:27,557 INFO [train.py:1114] (2/4) Epoch 1, batch 400, loss[loss=1.01, simple_loss=0.8267, pruned_loss=0.9479, over 4702.00 frames. ], tot_loss[loss=1.109, simple_loss=0.9411, pruned_loss=1.068, over 813353.75 frames. ], batch size: 13, lr: 4.05e-02, grad_scale: 16.0 +2024-07-27 09:29:29,114 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.881e+01 3.675e+01 4.330e+01 5.451e+01 8.565e+01, threshold=8.660e+01, percent-clipped=3.0 +2024-07-27 09:29:31,367 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=15.02 vs. limit=7.7 +2024-07-27 09:29:38,820 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=25.83 vs. limit=7.705 +2024-07-27 09:29:47,171 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.75 vs. limit=7.92 +2024-07-27 09:29:54,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=560.0, ans=0.8804000000000001 +2024-07-27 09:30:04,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=573.3333333333334, ans=0.473125 +2024-07-27 09:30:04,657 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=35.55 vs. limit=7.93 +2024-07-27 09:30:20,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=586.6666666666666, ans=0.4725 +2024-07-27 09:31:01,911 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.46 vs. limit=5.293333333333333 +2024-07-27 09:31:09,352 INFO [train.py:1114] (2/4) Epoch 1, batch 450, loss[loss=0.9355, simple_loss=0.7624, pruned_loss=0.8552, over 4637.00 frames. ], tot_loss[loss=1.075, simple_loss=0.9049, pruned_loss=1.025, over 838410.91 frames. ], batch size: 13, lr: 4.28e-02, grad_scale: 16.0 +2024-07-27 09:31:12,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=600.0, ans=5.375 +2024-07-27 09:31:19,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=613.3333333333334, ans=0.2092 +2024-07-27 09:31:19,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=613.3333333333334, ans=7.73 +2024-07-27 09:31:32,627 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=43.01 vs. limit=7.73 +2024-07-27 09:36:51,856 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=17.04 vs. limit=7.735 +2024-07-27 09:36:52,458 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.17 vs. limit=5.3133333333333335 +2024-07-27 09:36:53,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=626.6666666666666, ans=0.470625 +2024-07-27 09:37:06,490 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=20.01 vs. limit=7.74 +2024-07-27 09:37:06,638 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=41.28 vs. limit=5.32 +2024-07-27 09:37:08,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=640.0, ans=0.08560000000000001 +2024-07-27 09:37:11,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=653.3333333333334, ans=0.20980000000000001 +2024-07-27 09:37:18,424 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=18.88 vs. limit=7.745 +2024-07-27 09:37:19,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=653.3333333333334, ans=0.5 +2024-07-27 09:37:19,583 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.48 vs. limit=5.163333333333333 +2024-07-27 09:37:22,225 INFO [train.py:1114] (2/4) Epoch 1, batch 500, loss[loss=1.036, simple_loss=0.8436, pruned_loss=0.9168, over 4694.00 frames. ], tot_loss[loss=1.049, simple_loss=0.8766, pruned_loss=0.9871, over 860796.66 frames. ], batch size: 15, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:37:25,284 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.049e+01 3.795e+01 4.382e+01 5.151e+01 8.333e+01, threshold=8.764e+01, percent-clipped=0.0 +2024-07-27 09:37:25,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=666.6666666666666, ans=0.46875 +2024-07-27 09:37:39,769 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=33.05 vs. limit=8.0 +2024-07-27 09:38:02,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=680.0, ans=0.2932 +2024-07-27 09:38:06,486 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.92 vs. limit=3.102 +2024-07-27 09:38:16,623 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=27.42 vs. limit=7.76 +2024-07-27 09:38:38,610 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.87 vs. limit=7.77 +2024-07-27 09:38:51,767 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=18.32 vs. limit=7.775 +2024-07-27 09:38:55,263 INFO [train.py:1114] (2/4) Epoch 1, batch 550, loss[loss=0.9932, simple_loss=0.8064, pruned_loss=0.8561, over 4586.00 frames. ], tot_loss[loss=1.029, simple_loss=0.855, pruned_loss=0.9525, over 876961.13 frames. ], batch size: 17, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:38:57,223 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=5.580e-01 +2024-07-27 09:38:59,243 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=25.70 vs. limit=8.05 +2024-07-27 09:38:59,855 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.27 vs. limit=7.775 +2024-07-27 09:39:02,953 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.80 vs. limit=5.0 +2024-07-27 09:39:04,350 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=36.29 vs. limit=8.06 +2024-07-27 09:39:09,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=746.6666666666666, ans=0.4066666666666667 +2024-07-27 09:39:21,071 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=13.62 vs. limit=7.785 +2024-07-27 09:39:29,870 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=52.24 vs. limit=7.79 +2024-07-27 09:39:33,268 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=35.71 vs. limit=7.795 +2024-07-27 09:39:41,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=800.0, ans=0.17 +2024-07-27 09:39:43,022 INFO [train.py:1114] (2/4) Epoch 1, batch 600, loss[loss=1.011, simple_loss=0.8164, pruned_loss=0.8557, over 4619.00 frames. ], tot_loss[loss=1.012, simple_loss=0.8364, pruned_loss=0.9215, over 891969.96 frames. ], batch size: 16, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:39:43,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=800.0, ans=0.292 +2024-07-27 09:39:43,868 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.748e+01 6.137e+01 8.087e+01 1.069e+02 3.258e+02, threshold=1.617e+02, percent-clipped=41.0 +2024-07-27 09:39:45,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=800.0, ans=0.082 +2024-07-27 09:39:49,866 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=8.37 vs. limit=8.1 +2024-07-27 09:40:07,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=826.6666666666666, ans=0.29173333333333334 +2024-07-27 09:40:07,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=826.6666666666666, ans=0.46125 +2024-07-27 09:40:23,661 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.17 vs. limit=5.213333333333333 +2024-07-27 09:40:24,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=853.3333333333334, ans=0.46 +2024-07-27 09:40:29,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=866.6666666666666, ans=0.459375 +2024-07-27 09:40:30,099 INFO [train.py:1114] (2/4) Epoch 1, batch 650, loss[loss=1.001, simple_loss=0.804, pruned_loss=0.8335, over 4767.00 frames. ], tot_loss[loss=1.001, simple_loss=0.8221, pruned_loss=0.896, over 903505.04 frames. ], batch size: 13, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:40:35,093 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=12.38 vs. limit=7.825 +2024-07-27 09:40:35,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=866.6666666666666, ans=0.29133333333333333 +2024-07-27 09:40:35,997 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.19 vs. limit=5.433333333333334 +2024-07-27 09:40:37,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=866.6666666666666, ans=8.15 +2024-07-27 09:41:02,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=906.6666666666666, ans=0.4575 +2024-07-27 09:41:04,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=906.6666666666666, ans=0.5 +2024-07-27 09:41:36,084 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=18.32 vs. limit=7.845 +2024-07-27 09:41:40,637 INFO [train.py:1114] (2/4) Epoch 1, batch 700, loss[loss=0.9388, simple_loss=0.7565, pruned_loss=0.7578, over 4633.00 frames. ], tot_loss[loss=0.9949, simple_loss=0.8127, pruned_loss=0.8747, over 911383.58 frames. ], batch size: 12, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:41:41,143 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=8.00 vs. limit=4.373333333333333 +2024-07-27 09:41:41,492 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.672e+01 9.322e+01 1.196e+02 1.686e+02 3.909e+02, threshold=2.392e+02, percent-clipped=30.0 +2024-07-27 09:41:46,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=933.3333333333334, ans=0.1975 +2024-07-27 09:41:46,739 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=11.49 vs. limit=7.85 +2024-07-27 09:41:54,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=19.25 vs. limit=7.85 +2024-07-27 09:41:55,347 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=29.61 vs. limit=7.85 +2024-07-27 09:41:55,557 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=9.62 vs. limit=8.2 +2024-07-27 09:41:56,363 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=9.81 vs. limit=8.21 +2024-07-27 09:42:17,064 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.82 vs. limit=4.378666666666667 +2024-07-27 09:42:17,080 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.91 vs. limit=5.473333333333334 +2024-07-27 09:42:20,265 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=7.65 vs. limit=4.384 +2024-07-27 09:42:21,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=960.0, ans=0.164 +2024-07-27 09:42:24,738 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=15.10 vs. limit=7.86 +2024-07-27 09:42:28,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=973.3333333333334, ans=0.8659333333333333 +2024-07-27 09:42:30,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=973.3333333333334, ans=0.2902666666666667 +2024-07-27 09:42:30,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=973.3333333333334, ans=0.454375 +2024-07-27 09:42:39,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=986.6666666666666, ans=0.45375 +2024-07-27 09:42:44,011 INFO [train.py:1114] (2/4) Epoch 1, batch 750, loss[loss=0.9748, simple_loss=0.7819, pruned_loss=0.7756, over 4696.00 frames. ], tot_loss[loss=0.9858, simple_loss=0.8031, pruned_loss=0.8486, over 918097.57 frames. ], batch size: 13, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:42:48,642 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.41 vs. limit=4.4 +2024-07-27 09:42:49,565 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=6.30 vs. limit=4.4 +2024-07-27 09:42:56,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=1013.3333333333334, ans=0.4525 +2024-07-27 09:42:57,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=1013.3333333333334, ans=0.162 +2024-07-27 09:42:58,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=1013.3333333333334, ans=0.4525 +2024-07-27 09:43:03,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=1026.6666666666667, ans=0.1615 +2024-07-27 09:43:04,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=1026.6666666666667, ans=0.28973333333333334 +2024-07-27 09:43:09,460 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.97 vs. limit=4.416 +2024-07-27 09:43:10,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=1040.0, ans=8.28 +2024-07-27 09:43:11,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=1040.0, ans=0.45125 +2024-07-27 09:43:21,069 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=11.96 vs. limit=5.26 +2024-07-27 09:43:22,950 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.99 vs. limit=7.89 +2024-07-27 09:43:23,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=1040.0, ans=0.45125 +2024-07-27 09:43:29,040 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.79 vs. limit=7.895 +2024-07-27 09:43:33,802 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=24.94 vs. limit=8.29 +2024-07-27 09:43:35,569 INFO [train.py:1114] (2/4) Epoch 1, batch 800, loss[loss=0.8187, simple_loss=0.67, pruned_loss=0.6143, over 4856.00 frames. ], tot_loss[loss=0.9732, simple_loss=0.7919, pruned_loss=0.8191, over 923479.35 frames. ], batch size: 12, lr: 4.49e-02, grad_scale: 32.0 +2024-07-27 09:43:36,004 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=31.67 vs. limit=8.3 +2024-07-27 09:43:36,081 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=18.88 vs. limit=7.9 +2024-07-27 09:43:36,421 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.122e+01 7.305e+01 9.106e+01 1.068e+02 1.961e+02, threshold=1.821e+02, percent-clipped=0.0 +2024-07-27 09:43:36,817 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=12.29 vs. limit=7.9 +2024-07-27 09:43:39,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=1066.6666666666667, ans=5.666666666666667 +2024-07-27 09:43:40,997 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.34 vs. limit=8.3 +2024-07-27 09:43:41,288 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.41 vs. limit=7.9 +2024-07-27 09:43:42,782 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=22.45 vs. limit=7.9 +2024-07-27 09:43:44,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1080.0, ans=0.2892 +2024-07-27 09:43:44,425 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=22.47 vs. limit=8.31 +2024-07-27 09:43:45,592 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.55 vs. limit=8.31 +2024-07-27 09:43:47,231 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.27 vs. limit=5.54 +2024-07-27 09:44:11,471 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.62 vs. limit=5.546666666666667 +2024-07-27 09:44:38,104 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.48 vs. limit=8.32 +2024-07-27 09:44:39,620 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.98 vs. limit=7.91 +2024-07-27 09:44:41,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=1106.6666666666667, ans=0.448125 +2024-07-27 09:44:45,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=1106.6666666666667, ans=0.23893333333333333 +2024-07-27 09:45:00,830 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.98 vs. limit=8.34 +2024-07-27 09:45:01,636 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.92 vs. limit=8.34 +2024-07-27 09:45:04,984 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.93 vs. limit=7.925 +2024-07-27 09:45:05,404 INFO [train.py:1114] (2/4) Epoch 1, batch 850, loss[loss=0.9416, simple_loss=0.773, pruned_loss=0.6898, over 4669.00 frames. ], tot_loss[loss=0.9522, simple_loss=0.7757, pruned_loss=0.7819, over 927344.94 frames. ], batch size: 14, lr: 4.49e-02, grad_scale: 32.0 +2024-07-27 09:46:13,776 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=16.84 vs. limit=5.573333333333333 +2024-07-27 09:46:15,177 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.25 vs. limit=7.93 +2024-07-27 09:46:21,731 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.65 vs. limit=7.935 +2024-07-27 09:46:22,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1160.0, ans=0.2884 +2024-07-27 09:46:45,986 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=12.49 vs. limit=7.94 +2024-07-27 09:46:47,695 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.48 vs. limit=7.94 +2024-07-27 09:46:52,016 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.82 vs. limit=8.38 +2024-07-27 09:46:57,288 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=11.62 vs. limit=7.945 +2024-07-27 09:47:00,604 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.67 vs. limit=8.39 +2024-07-27 09:47:01,746 INFO [train.py:1114] (2/4) Epoch 1, batch 900, loss[loss=0.8081, simple_loss=0.6666, pruned_loss=0.5769, over 4855.00 frames. ], tot_loss[loss=0.9294, simple_loss=0.7592, pruned_loss=0.7439, over 928096.27 frames. ], batch size: 12, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:47:02,357 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=9.69 vs. limit=4.48 +2024-07-27 09:47:05,082 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.509e+01 5.472e+01 6.615e+01 8.339e+01 1.626e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-27 09:47:08,179 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=10.54 vs. limit=5.3 +2024-07-27 09:47:11,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=1200.0, ans=0.44375 +2024-07-27 09:47:13,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=1213.3333333333333, ans=0.04620833333333334 +2024-07-27 09:47:18,655 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.28 vs. limit=5.303333333333334 +2024-07-27 09:47:19,577 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.82 vs. limit=5.303333333333334 +2024-07-27 09:47:19,841 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.24 vs. limit=7.955 +2024-07-27 09:47:26,566 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.20 vs. limit=8.42 +2024-07-27 09:47:27,556 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.24 vs. limit=7.96 +2024-07-27 09:47:30,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.53 vs. limit=5.62 +2024-07-27 09:47:35,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=1240.0, ans=0.21860000000000002 +2024-07-27 09:47:37,602 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.11 vs. limit=7.965 +2024-07-27 09:47:39,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=1253.3333333333333, ans=0.8561333333333334 +2024-07-27 09:47:43,978 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.43 vs. limit=7.97 +2024-07-27 09:47:47,817 INFO [train.py:1114] (2/4) Epoch 1, batch 950, loss[loss=0.7302, simple_loss=0.6135, pruned_loss=0.4973, over 4775.00 frames. ], tot_loss[loss=0.9035, simple_loss=0.7408, pruned_loss=0.7044, over 929230.78 frames. ], batch size: 12, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:47:54,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=1266.6666666666667, ans=0.1525 +2024-07-27 09:47:56,376 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.33 vs. limit=7.98 +2024-07-27 09:48:00,253 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.95 vs. limit=8.46 +2024-07-27 09:48:02,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=1280.0, ans=0.44 +2024-07-27 09:48:04,645 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.39 vs. limit=7.98 +2024-07-27 09:48:12,803 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.24 vs. limit=8.47 +2024-07-27 09:48:14,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=1293.3333333333333, ans=0.439375 +2024-07-27 09:48:19,595 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.23 vs. limit=7.99 +2024-07-27 09:48:21,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=1306.6666666666667, ans=0.151 +2024-07-27 09:48:31,228 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.86 vs. limit=7.995 +2024-07-27 09:48:37,811 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.54 vs. limit=8.5 +2024-07-27 09:48:38,210 INFO [train.py:1114] (2/4) Epoch 1, batch 1000, loss[loss=0.777, simple_loss=0.6535, pruned_loss=0.5215, over 4962.00 frames. ], tot_loss[loss=0.8794, simple_loss=0.7246, pruned_loss=0.6677, over 929145.31 frames. ], batch size: 13, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:48:39,237 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 6.604e+01 7.221e+01 8.711e+01 1.557e+02, threshold=1.444e+02, percent-clipped=4.0 +2024-07-27 09:48:50,764 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.96 vs. limit=8.005 +2024-07-27 09:48:52,336 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=26.47 vs. limit=8.51 +2024-07-27 09:48:54,094 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.00 vs. limit=5.336666666666667 +2024-07-27 09:48:56,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=1360.0, ans=0.8524 +2024-07-27 09:48:57,337 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.95 vs. limit=8.52 +2024-07-27 09:48:58,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=1360.0, ans=0.2364 +2024-07-27 09:49:03,954 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.59 vs. limit=8.015 +2024-07-27 09:49:06,439 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.53 vs. limit=5.343333333333334 +2024-07-27 09:49:07,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.53 vs. limit=8.53 +2024-07-27 09:49:09,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.40 vs. limit=8.015 +2024-07-27 09:49:09,805 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.30 vs. limit=8.53 +2024-07-27 09:49:13,826 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.75 vs. limit=5.693333333333333 +2024-07-27 09:49:16,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.72 vs. limit=5.346666666666667 +2024-07-27 09:49:21,081 INFO [train.py:1114] (2/4) Epoch 1, batch 1050, loss[loss=0.7416, simple_loss=0.6354, pruned_loss=0.4767, over 4878.00 frames. ], tot_loss[loss=0.849, simple_loss=0.7035, pruned_loss=0.6275, over 931806.34 frames. ], batch size: 14, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:49:28,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=1400.0, ans=0.434375 +2024-07-27 09:49:28,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=1413.3333333333333, ans=0.43374999999999997 +2024-07-27 09:49:32,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=1413.3333333333333, ans=0.8505333333333334 +2024-07-27 09:49:37,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1413.3333333333333, ans=0.28586666666666666 +2024-07-27 09:49:54,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.84 vs. limit=8.035 +2024-07-27 09:49:55,583 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.37 vs. limit=8.57 +2024-07-27 09:49:56,867 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 09:49:58,062 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.82 vs. limit=8.04 +2024-07-27 09:50:00,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=1440.0, ans=0.8496 +2024-07-27 09:50:22,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=1453.3333333333333, ans=0.28546666666666665 +2024-07-27 09:50:23,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=1453.3333333333333, ans=0.431875 +2024-07-27 09:50:23,681 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.10 vs. limit=8.59 +2024-07-27 09:50:24,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=1453.3333333333333, ans=0.431875 +2024-07-27 09:50:27,354 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.86 vs. limit=8.045 +2024-07-27 09:50:27,477 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.75 vs. limit=4.581333333333333 +2024-07-27 09:50:28,340 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.56 vs. limit=8.045 +2024-07-27 09:50:30,373 INFO [train.py:1114] (2/4) Epoch 1, batch 1100, loss[loss=0.7763, simple_loss=0.6604, pruned_loss=0.4999, over 4900.00 frames. ], tot_loss[loss=0.8186, simple_loss=0.6829, pruned_loss=0.5891, over 934299.53 frames. ], batch size: 13, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:50:31,167 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.580e+01 6.586e+01 7.875e+01 9.417e+01 1.858e+02, threshold=1.575e+02, percent-clipped=4.0 +2024-07-27 09:50:38,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=1480.0, ans=0.16675 +2024-07-27 09:50:38,802 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.79 vs. limit=8.61 +2024-07-27 09:50:41,941 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=17.17 vs. limit=5.74 +2024-07-27 09:50:43,613 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.49 vs. limit=8.61 +2024-07-27 09:50:54,244 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.92 vs. limit=4.597333333333333 +2024-07-27 09:50:56,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=1493.3333333333333, ans=0.14400000000000002 +2024-07-27 09:50:59,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=1506.6666666666667, ans=0.429375 +2024-07-27 09:51:14,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=1533.3333333333333, ans=3.23 +2024-07-27 09:51:14,875 INFO [train.py:1114] (2/4) Epoch 1, batch 1150, loss[loss=0.6736, simple_loss=0.5884, pruned_loss=0.4119, over 4886.00 frames. ], tot_loss[loss=0.7933, simple_loss=0.666, pruned_loss=0.5567, over 934263.42 frames. ], batch size: 13, lr: 4.47e-02, grad_scale: 32.0 +2024-07-27 09:51:36,307 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.89 vs. limit=8.66 +2024-07-27 09:51:37,192 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.90 vs. limit=8.66 +2024-07-27 09:51:46,376 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.90 vs. limit=8.085 +2024-07-27 09:51:49,867 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.73 vs. limit=8.67 +2024-07-27 09:51:50,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=1560.0, ans=0.426875 +2024-07-27 09:51:53,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=1573.3333333333333, ans=0.8449333333333333 +2024-07-27 09:51:53,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=1573.3333333333333, ans=0.42625 +2024-07-27 09:51:55,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=1573.3333333333333, ans=0.7657333333333334 +2024-07-27 09:51:58,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=1573.3333333333333, ans=0.28426666666666667 +2024-07-27 09:52:02,427 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.74 vs. limit=8.69 +2024-07-27 09:52:09,214 INFO [train.py:1114] (2/4) Epoch 1, batch 1200, loss[loss=0.6899, simple_loss=0.6029, pruned_loss=0.4184, over 4877.00 frames. ], tot_loss[loss=0.7716, simple_loss=0.6518, pruned_loss=0.5285, over 933318.70 frames. ], batch size: 14, lr: 4.47e-02, grad_scale: 32.0 +2024-07-27 09:52:10,015 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.946e+01 6.977e+01 8.267e+01 1.004e+02 1.485e+02, threshold=1.653e+02, percent-clipped=0.0 +2024-07-27 09:52:18,507 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.19 vs. limit=8.7 +2024-07-27 09:52:25,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=1613.3333333333333, ans=0.8435333333333334 +2024-07-27 09:52:25,888 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.02 vs. limit=8.105 +2024-07-27 09:52:32,733 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.41 vs. limit=5.8133333333333335 +2024-07-27 09:52:41,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=1640.0, ans=8.115 +2024-07-27 09:52:44,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=1640.0, ans=0.8426 +2024-07-27 09:52:54,758 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.61 vs. limit=8.74 +2024-07-27 09:52:55,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=1666.6666666666667, ans=0.1375 +2024-07-27 09:52:55,871 INFO [train.py:1114] (2/4) Epoch 1, batch 1250, loss[loss=0.6852, simple_loss=0.6005, pruned_loss=0.411, over 4806.00 frames. ], tot_loss[loss=0.7473, simple_loss=0.636, pruned_loss=0.4994, over 937480.37 frames. ], batch size: 15, lr: 4.47e-02, grad_scale: 32.0 +2024-07-27 09:52:58,369 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.32 vs. limit=8.75 +2024-07-27 09:52:59,388 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.49 vs. limit=8.75 +2024-07-27 09:53:01,037 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.93 vs. limit=5.416666666666667 +2024-07-27 09:53:07,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=1680.0, ans=0.42125 +2024-07-27 09:53:14,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.99 vs. limit=5.846666666666667 +2024-07-27 09:53:22,149 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.71 vs. limit=8.135 +2024-07-27 09:53:27,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=1706.6666666666667, ans=0.42 +2024-07-27 09:53:28,014 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.41 vs. limit=3.2560000000000002 +2024-07-27 09:53:28,049 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.67 vs. limit=8.78 +2024-07-27 09:53:34,104 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.36 vs. limit=5.43 +2024-07-27 09:53:36,396 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.97 vs. limit=5.43 +2024-07-27 09:53:41,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=1720.0, ans=0.035 +2024-07-27 09:53:57,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=1720.0, ans=0.419375 +2024-07-27 09:53:59,474 INFO [train.py:1114] (2/4) Epoch 1, batch 1300, loss[loss=0.6025, simple_loss=0.5427, pruned_loss=0.3448, over 4710.00 frames. ], tot_loss[loss=0.7209, simple_loss=0.6183, pruned_loss=0.4707, over 939067.13 frames. ], batch size: 19, lr: 4.47e-02, grad_scale: 32.0 +2024-07-27 09:53:59,597 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 09:54:00,181 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.513e+01 6.459e+01 7.334e+01 8.641e+01 1.550e+02, threshold=1.467e+02, percent-clipped=0.0 +2024-07-27 09:54:01,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=1733.3333333333333, ans=0.41875 +2024-07-27 09:54:13,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=1746.6666666666667, ans=0.2825333333333333 +2024-07-27 09:54:21,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=1746.6666666666667, ans=0.41812499999999997 +2024-07-27 09:54:30,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=1760.0, ans=0.0604 +2024-07-27 09:54:51,648 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.95 vs. limit=8.83 +2024-07-27 09:55:14,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=1786.6666666666667, ans=0.133 +2024-07-27 09:55:17,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=1786.6666666666667, ans=0.41625 +2024-07-27 09:55:29,125 INFO [train.py:1114] (2/4) Epoch 1, batch 1350, loss[loss=0.6517, simple_loss=0.5806, pruned_loss=0.3775, over 4747.00 frames. ], tot_loss[loss=0.699, simple_loss=0.6041, pruned_loss=0.4464, over 941118.49 frames. ], batch size: 13, lr: 4.46e-02, grad_scale: 32.0 +2024-07-27 09:55:46,648 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.92 vs. limit=5.45 +2024-07-27 09:55:49,119 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.23 vs. limit=8.18 +2024-07-27 09:55:52,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=1813.3333333333333, ans=0.14800000000000002 +2024-07-27 09:56:07,349 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.64 vs. limit=4.730666666666667 +2024-07-27 09:56:10,818 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.32 vs. limit=8.879999999999999 +2024-07-27 09:56:16,915 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.43 vs. limit=5.92 +2024-07-27 09:56:17,943 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.30 vs. limit=5.92 +2024-07-27 09:56:25,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=1853.3333333333333, ans=0.413125 +2024-07-27 09:56:26,869 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.71 vs. limit=5.463333333333333 +2024-07-27 09:56:29,342 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.07 vs. limit=8.195 +2024-07-27 09:56:32,587 INFO [train.py:1114] (2/4) Epoch 1, batch 1400, loss[loss=0.5168, simple_loss=0.4799, pruned_loss=0.2807, over 4706.00 frames. ], tot_loss[loss=0.6811, simple_loss=0.5926, pruned_loss=0.4262, over 943054.40 frames. ], batch size: 11, lr: 4.46e-02, grad_scale: 32.0 +2024-07-27 09:56:33,350 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.208e+01 7.358e+01 8.189e+01 9.683e+01 1.850e+02, threshold=1.638e+02, percent-clipped=1.0 +2024-07-27 09:56:34,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=1866.6666666666667, ans=0.14500000000000002 +2024-07-27 09:56:38,522 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.98 vs. limit=8.9 +2024-07-27 09:56:45,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=1866.6666666666667, ans=0.4125 +2024-07-27 09:56:47,672 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.16 vs. limit=8.9 +2024-07-27 09:56:53,364 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.01 vs. limit=5.47 +2024-07-27 09:57:00,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=1893.3333333333333, ans=0.14350000000000002 +2024-07-27 09:57:06,924 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.87 vs. limit=4.757333333333333 +2024-07-27 09:57:07,770 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.02 vs. limit=8.93 +2024-07-27 09:57:11,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=1906.6666666666667, ans=0.410625 +2024-07-27 09:57:13,141 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 09:57:13,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=1906.6666666666667, ans=0.23093333333333332 +2024-07-27 09:57:24,388 INFO [train.py:1114] (2/4) Epoch 1, batch 1450, loss[loss=0.6843, simple_loss=0.6057, pruned_loss=0.3961, over 4687.00 frames. ], tot_loss[loss=0.6644, simple_loss=0.5817, pruned_loss=0.4082, over 943125.75 frames. ], batch size: 15, lr: 4.46e-02, grad_scale: 32.0 +2024-07-27 09:57:36,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=1946.6666666666667, ans=0.40875 +2024-07-27 09:57:38,747 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.66 vs. limit=8.23 +2024-07-27 09:57:54,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=1960.0, ans=0.408125 +2024-07-27 09:57:57,284 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.90 vs. limit=5.493333333333333 +2024-07-27 09:58:12,500 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.40 vs. limit=5.986666666666666 +2024-07-27 09:58:16,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1986.6666666666667, ans=0.28013333333333335 +2024-07-27 09:58:16,454 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.02 vs. limit=8.245 +2024-07-27 09:58:22,748 INFO [train.py:1114] (2/4) Epoch 1, batch 1500, loss[loss=0.6067, simple_loss=0.5555, pruned_loss=0.3348, over 4810.00 frames. ], tot_loss[loss=0.6488, simple_loss=0.5721, pruned_loss=0.3915, over 942665.86 frames. ], batch size: 14, lr: 4.46e-02, grad_scale: 32.0 +2024-07-27 09:58:23,593 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.537e+01 6.985e+01 7.625e+01 8.885e+01 1.224e+02, threshold=1.525e+02, percent-clipped=0.0 +2024-07-27 09:58:32,365 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.90 vs. limit=8.255 +2024-07-27 09:58:33,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=2013.3333333333333, ans=6.258333333333333 +2024-07-27 09:58:43,398 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.26 vs. limit=9.01 +2024-07-27 09:58:57,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=2040.0, ans=0.404375 +2024-07-27 09:59:02,512 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.28 vs. limit=8.27 +2024-07-27 09:59:04,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=2053.3333333333335, ans=0.123 +2024-07-27 09:59:07,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=2053.3333333333335, ans=0.7705333333333333 +2024-07-27 09:59:08,503 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.92 vs. limit=8.27 +2024-07-27 09:59:09,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.73 vs. limit=9.040000000000001 +2024-07-27 09:59:10,611 INFO [train.py:1114] (2/4) Epoch 1, batch 1550, loss[loss=0.5273, simple_loss=0.4977, pruned_loss=0.2789, over 4908.00 frames. ], tot_loss[loss=0.635, simple_loss=0.5634, pruned_loss=0.3772, over 939038.43 frames. ], batch size: 15, lr: 4.45e-02, grad_scale: 32.0 +2024-07-27 09:59:14,225 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=7.96 vs. limit=8.275 +2024-07-27 09:59:29,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=2080.0, ans=0.40249999999999997 +2024-07-27 09:59:35,660 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.50 vs. limit=9.06 +2024-07-27 09:59:37,132 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.80 vs. limit=5.52 +2024-07-27 09:59:37,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=2080.0, ans=0.5 +2024-07-27 09:59:42,986 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.18 vs. limit=9.07 +2024-07-27 09:59:46,100 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.78 vs. limit=5.523333333333333 +2024-07-27 09:59:48,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=2106.6666666666665, ans=0.2789333333333333 +2024-07-27 09:59:54,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=2106.6666666666665, ans=0.0526 +2024-07-27 09:59:56,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2120.0, ans=0.2788 +2024-07-27 10:00:04,249 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.92 vs. limit=9.1 +2024-07-27 10:00:04,814 INFO [train.py:1114] (2/4) Epoch 1, batch 1600, loss[loss=0.5705, simple_loss=0.5335, pruned_loss=0.3054, over 4876.00 frames. ], tot_loss[loss=0.6237, simple_loss=0.5566, pruned_loss=0.3651, over 938112.66 frames. ], batch size: 14, lr: 4.45e-02, grad_scale: 32.0 +2024-07-27 10:00:05,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=2133.3333333333335, ans=0.4 +2024-07-27 10:00:05,612 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.018e+01 7.259e+01 8.235e+01 9.551e+01 1.793e+02, threshold=1.647e+02, percent-clipped=2.0 +2024-07-27 10:00:08,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=2133.3333333333335, ans=0.232 +2024-07-27 10:00:32,289 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.05 vs. limit=6.073333333333333 +2024-07-27 10:00:39,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=2160.0, ans=0.119 +2024-07-27 10:00:42,420 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.17 vs. limit=9.120000000000001 +2024-07-27 10:00:45,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=8.55 vs. limit=9.120000000000001 +2024-07-27 10:00:46,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=2160.0, ans=0.8244 +2024-07-27 10:00:47,960 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.02 vs. limit=8.315 +2024-07-27 10:01:01,164 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.61 vs. limit=6.093333333333334 +2024-07-27 10:01:04,979 INFO [train.py:1114] (2/4) Epoch 1, batch 1650, loss[loss=0.5774, simple_loss=0.5416, pruned_loss=0.3076, over 4659.00 frames. ], tot_loss[loss=0.6129, simple_loss=0.5499, pruned_loss=0.3542, over 937871.12 frames. ], batch size: 14, lr: 4.45e-02, grad_scale: 32.0 +2024-07-27 10:01:06,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=2200.0, ans=0.396875 +2024-07-27 10:01:09,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=2200.0, ans=0.12625 +2024-07-27 10:01:15,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=2213.3333333333335, ans=0.8225333333333333 +2024-07-27 10:01:18,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=2213.3333333333335, ans=0.11699999999999999 +2024-07-27 10:01:27,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=2226.6666666666665, ans=0.04990000000000001 +2024-07-27 10:01:30,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=2226.6666666666665, ans=5.556666666666667 +2024-07-27 10:01:35,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=2240.0, ans=0.395 +2024-07-27 10:01:36,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.71 vs. limit=9.18 +2024-07-27 10:01:37,870 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.93 vs. limit=9.18 +2024-07-27 10:01:50,430 INFO [train.py:1114] (2/4) Epoch 1, batch 1700, loss[loss=0.5074, simple_loss=0.4745, pruned_loss=0.2711, over 4717.00 frames. ], tot_loss[loss=0.6004, simple_loss=0.5425, pruned_loss=0.3422, over 939612.87 frames. ], batch size: 11, lr: 4.44e-02, grad_scale: 32.0 +2024-07-27 10:01:51,142 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.631e+01 6.759e+01 7.966e+01 9.777e+01 1.760e+02, threshold=1.593e+02, percent-clipped=1.0 +2024-07-27 10:01:53,683 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.11 vs. limit=8.35 +2024-07-27 10:01:58,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=2280.0, ans=0.393125 +2024-07-27 10:02:00,469 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.92 vs. limit=9.21 +2024-07-27 10:02:09,058 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.58 vs. limit=9.22 +2024-07-27 10:02:33,872 INFO [train.py:1114] (2/4) Epoch 1, batch 1750, loss[loss=0.5084, simple_loss=0.4735, pruned_loss=0.2727, over 4808.00 frames. ], tot_loss[loss=0.5886, simple_loss=0.5354, pruned_loss=0.3314, over 940455.37 frames. ], batch size: 11, lr: 4.44e-02, grad_scale: 32.0 +2024-07-27 10:02:37,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=2333.3333333333335, ans=0.8183333333333334 +2024-07-27 10:02:53,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=2346.6666666666665, ans=6.466666666666667 +2024-07-27 10:02:59,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=2360.0, ans=6.475 +2024-07-27 10:03:04,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=2360.0, ans=0.8174 +2024-07-27 10:03:04,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=2373.3333333333335, ans=0.38875 +2024-07-27 10:03:25,576 INFO [train.py:1114] (2/4) Epoch 1, batch 1800, loss[loss=0.5697, simple_loss=0.528, pruned_loss=0.3069, over 4645.00 frames. ], tot_loss[loss=0.5794, simple_loss=0.5301, pruned_loss=0.3227, over 940863.99 frames. ], batch size: 13, lr: 4.44e-02, grad_scale: 32.0 +2024-07-27 10:03:26,391 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.000e+01 7.252e+01 8.218e+01 9.576e+01 1.850e+02, threshold=1.644e+02, percent-clipped=1.0 +2024-07-27 10:03:27,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=2400.0, ans=0.035 +2024-07-27 10:03:28,077 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.94 vs. limit=5.6 +2024-07-27 10:03:29,058 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.39 vs. limit=9.3 +2024-07-27 10:03:30,009 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.51 vs. limit=8.4 +2024-07-27 10:03:31,603 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.93 vs. limit=9.3 +2024-07-27 10:03:35,791 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:03:40,224 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.09 vs. limit=9.31 +2024-07-27 10:03:42,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=2426.6666666666665, ans=0.38625 +2024-07-27 10:03:52,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=2440.0, ans=0.385625 +2024-07-27 10:03:59,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=2440.0, ans=0.1085 +2024-07-27 10:04:00,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=2440.0, ans=0.195 +2024-07-27 10:04:08,553 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=7.41 vs. limit=8.42 +2024-07-27 10:04:13,829 INFO [train.py:1114] (2/4) Epoch 1, batch 1850, loss[loss=0.5825, simple_loss=0.5394, pruned_loss=0.3137, over 4818.00 frames. ], tot_loss[loss=0.5689, simple_loss=0.5243, pruned_loss=0.3132, over 941460.30 frames. ], batch size: 14, lr: 4.43e-02, grad_scale: 32.0 +2024-07-27 10:04:14,310 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.93 vs. limit=5.616666666666666 +2024-07-27 10:04:24,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=2480.0, ans=0.38375 +2024-07-27 10:04:30,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=2493.3333333333335, ans=0.043899999999999995 +2024-07-27 10:04:33,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=2493.3333333333335, ans=0.383125 +2024-07-27 10:04:40,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=2506.6666666666665, ans=0.3825 +2024-07-27 10:04:42,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2506.6666666666665, ans=0.2749333333333333 +2024-07-27 10:04:44,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=2506.6666666666665, ans=0.7750666666666667 +2024-07-27 10:04:54,862 INFO [train.py:1114] (2/4) Epoch 1, batch 1900, loss[loss=0.6166, simple_loss=0.5581, pruned_loss=0.3388, over 4665.00 frames. ], tot_loss[loss=0.5606, simple_loss=0.5196, pruned_loss=0.3058, over 942467.65 frames. ], batch size: 14, lr: 4.43e-02, grad_scale: 32.0 +2024-07-27 10:04:55,624 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.340e+01 7.620e+01 8.335e+01 9.482e+01 1.510e+02, threshold=1.667e+02, percent-clipped=0.0 +2024-07-27 10:04:58,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=2533.3333333333335, ans=0.38125 +2024-07-27 10:04:59,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.87 vs. limit=9.4 +2024-07-27 10:05:05,558 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.37 vs. limit=8.455 +2024-07-27 10:05:07,884 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.24 vs. limit=8.455 +2024-07-27 10:05:18,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=2546.6666666666665, ans=0.380625 +2024-07-27 10:05:36,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=2573.3333333333335, ans=0.27426666666666666 +2024-07-27 10:05:46,645 INFO [train.py:1114] (2/4) Epoch 1, batch 1950, loss[loss=0.5156, simple_loss=0.5032, pruned_loss=0.2637, over 4901.00 frames. ], tot_loss[loss=0.5542, simple_loss=0.5171, pruned_loss=0.2996, over 944511.01 frames. ], batch size: 13, lr: 4.43e-02, grad_scale: 32.0 +2024-07-27 10:05:46,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=2600.0, ans=0.378125 +2024-07-27 10:05:47,236 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.63 vs. limit=9.45 +2024-07-27 10:05:59,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=2613.3333333333335, ans=0.3775 +2024-07-27 10:06:16,260 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.00 vs. limit=9.47 +2024-07-27 10:06:18,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=2640.0, ans=0.16999999999999998 +2024-07-27 10:06:32,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=2653.3333333333335, ans=6.658333333333333 +2024-07-27 10:06:33,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=2653.3333333333335, ans=0.375625 +2024-07-27 10:06:34,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=2666.6666666666665, ans=0.16666666666666669 +2024-07-27 10:06:42,533 INFO [train.py:1114] (2/4) Epoch 1, batch 2000, loss[loss=0.5496, simple_loss=0.5053, pruned_loss=0.297, over 4799.00 frames. ], tot_loss[loss=0.5472, simple_loss=0.5133, pruned_loss=0.2936, over 941966.91 frames. ], batch size: 11, lr: 4.42e-02, grad_scale: 32.0 +2024-07-27 10:06:42,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2666.6666666666665, ans=0.2733333333333333 +2024-07-27 10:06:43,359 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.312e+01 7.554e+01 8.059e+01 9.021e+01 3.573e+02, threshold=1.612e+02, percent-clipped=2.0 +2024-07-27 10:06:51,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=2666.6666666666665, ans=0.375 +2024-07-27 10:06:56,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2680.0, ans=0.2732 +2024-07-27 10:06:59,943 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.34 vs. limit=3.402 +2024-07-27 10:07:08,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=2693.3333333333335, ans=0.08316666666666667 +2024-07-27 10:07:10,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2706.6666666666665, ans=0.2729333333333333 +2024-07-27 10:07:12,634 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.08 vs. limit=6.3533333333333335 +2024-07-27 10:07:18,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=2720.0, ans=0.0415 +2024-07-27 10:07:26,887 INFO [train.py:1114] (2/4) Epoch 1, batch 2050, loss[loss=0.4385, simple_loss=0.4372, pruned_loss=0.2199, over 4617.00 frames. ], tot_loss[loss=0.5391, simple_loss=0.5083, pruned_loss=0.2873, over 939596.82 frames. ], batch size: 11, lr: 4.42e-02, grad_scale: 64.0 +2024-07-27 10:07:26,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2733.3333333333335, ans=0.27266666666666667 +2024-07-27 10:07:29,801 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.99 vs. limit=6.366666666666667 +2024-07-27 10:07:31,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2733.3333333333335, ans=0.27266666666666667 +2024-07-27 10:07:38,988 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.04 vs. limit=9.56 +2024-07-27 10:07:46,427 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.92 vs. limit=5.6899999999999995 +2024-07-27 10:07:54,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=2773.3333333333335, ans=0.037599999999999995 +2024-07-27 10:07:57,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=2773.3333333333335, ans=0.037599999999999995 +2024-07-27 10:07:59,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=2786.6666666666665, ans=0.369375 +2024-07-27 10:08:03,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=2786.6666666666665, ans=0.369375 +2024-07-27 10:08:08,453 INFO [train.py:1114] (2/4) Epoch 1, batch 2100, loss[loss=0.4907, simple_loss=0.4825, pruned_loss=0.2495, over 4752.00 frames. ], tot_loss[loss=0.5295, simple_loss=0.5028, pruned_loss=0.2799, over 941327.24 frames. ], batch size: 13, lr: 4.42e-02, grad_scale: 64.0 +2024-07-27 10:08:09,834 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.063e+01 7.785e+01 9.607e+01 1.091e+02 1.489e+02, threshold=1.921e+02, percent-clipped=0.0 +2024-07-27 10:08:10,440 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.86 vs. limit=6.4 +2024-07-27 10:08:11,039 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.86 vs. limit=9.6 +2024-07-27 10:08:24,202 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.27 vs. limit=9.61 +2024-07-27 10:08:28,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=2826.6666666666665, ans=9.620000000000001 +2024-07-27 10:08:36,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=2840.0, ans=0.366875 +2024-07-27 10:08:45,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=2840.0, ans=0.09025 +2024-07-27 10:08:52,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=2853.3333333333335, ans=0.035799999999999985 +2024-07-27 10:08:53,147 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.24 vs. limit=9.64 +2024-07-27 10:08:59,117 INFO [train.py:1114] (2/4) Epoch 1, batch 2150, loss[loss=0.437, simple_loss=0.4495, pruned_loss=0.2122, over 4898.00 frames. ], tot_loss[loss=0.5168, simple_loss=0.4956, pruned_loss=0.2704, over 944323.50 frames. ], batch size: 13, lr: 4.41e-02, grad_scale: 64.0 +2024-07-27 10:09:06,301 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.86 vs. limit=6.433333333333334 +2024-07-27 10:09:08,456 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.06 vs. limit=5.72 +2024-07-27 10:09:08,569 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.93 vs. limit=5.72 +2024-07-27 10:09:18,380 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.74 vs. limit=6.4399999999999995 +2024-07-27 10:09:38,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.16 vs. limit=9.67 +2024-07-27 10:09:41,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=2893.3333333333335, ans=0.5 +2024-07-27 10:09:57,572 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.92 vs. limit=9.69 +2024-07-27 10:10:00,696 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.02 vs. limit=6.46 +2024-07-27 10:10:04,669 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.66 vs. limit=8.6 +2024-07-27 10:10:05,502 INFO [train.py:1114] (2/4) Epoch 1, batch 2200, loss[loss=0.4918, simple_loss=0.5017, pruned_loss=0.2409, over 4807.00 frames. ], tot_loss[loss=0.5127, simple_loss=0.4942, pruned_loss=0.2667, over 943802.30 frames. ], batch size: 14, lr: 4.41e-02, grad_scale: 64.0 +2024-07-27 10:10:06,258 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.332e+01 7.672e+01 8.381e+01 9.351e+01 1.723e+02, threshold=1.676e+02, percent-clipped=0.0 +2024-07-27 10:10:07,431 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.10 vs. limit=5.173333333333334 +2024-07-27 10:10:10,231 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.48 vs. limit=8.6 +2024-07-27 10:10:15,778 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=34.06 vs. limit=9.71 +2024-07-27 10:10:17,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=2946.6666666666665, ans=8.605 +2024-07-27 10:10:18,825 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.87 vs. limit=9.71 +2024-07-27 10:10:19,767 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.28 vs. limit=5.736666666666666 +2024-07-27 10:10:20,564 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.31 vs. limit=5.1786666666666665 +2024-07-27 10:10:20,657 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.12 vs. limit=9.71 +2024-07-27 10:10:21,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=2946.6666666666665, ans=0.1316666666666667 +2024-07-27 10:10:33,977 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.34 vs. limit=6.486666666666666 +2024-07-27 10:10:36,399 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.52 vs. limit=6.486666666666666 +2024-07-27 10:10:37,507 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.62 vs. limit=5.743333333333333 +2024-07-27 10:10:39,755 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.05 vs. limit=5.746666666666666 +2024-07-27 10:10:43,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=2986.6666666666665, ans=0.36 +2024-07-27 10:10:43,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2986.6666666666665, ans=0.27013333333333334 +2024-07-27 10:10:47,988 INFO [train.py:1114] (2/4) Epoch 1, batch 2250, loss[loss=0.4941, simple_loss=0.4774, pruned_loss=0.2554, over 4696.00 frames. ], tot_loss[loss=0.5087, simple_loss=0.4923, pruned_loss=0.2634, over 942011.96 frames. ], batch size: 13, lr: 4.40e-02, grad_scale: 64.0 +2024-07-27 10:10:48,534 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.20 vs. limit=8.625 +2024-07-27 10:10:53,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=3000.0, ans=0.03249999999999999 +2024-07-27 10:10:53,972 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.87 vs. limit=6.5 +2024-07-27 10:10:56,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=3013.3333333333335, ans=0.04058333333333333 +2024-07-27 10:11:23,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=3026.6666666666665, ans=0.358125 +2024-07-27 10:11:26,378 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.44 vs. limit=8.635 +2024-07-27 10:11:29,561 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.25 vs. limit=5.76 +2024-07-27 10:11:34,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=3040.0, ans=0.0316 +2024-07-27 10:11:42,770 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=42.80 vs. limit=9.79 +2024-07-27 10:11:45,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=3053.3333333333335, ans=0.356875 +2024-07-27 10:11:48,436 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.51 vs. limit=8.65 +2024-07-27 10:11:48,681 INFO [train.py:1114] (2/4) Epoch 1, batch 2300, loss[loss=0.4736, simple_loss=0.4595, pruned_loss=0.2438, over 4943.00 frames. ], tot_loss[loss=0.5047, simple_loss=0.4901, pruned_loss=0.2603, over 939722.11 frames. ], batch size: 12, lr: 4.40e-02, grad_scale: 64.0 +2024-07-27 10:11:49,442 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.418e+01 7.845e+01 8.717e+01 9.817e+01 1.762e+02, threshold=1.743e+02, percent-clipped=1.0 +2024-07-27 10:11:50,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=3066.6666666666665, ans=0.35625 +2024-07-27 10:11:53,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=3066.6666666666665, ans=0.35625 +2024-07-27 10:11:55,326 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.93 vs. limit=9.8 +2024-07-27 10:11:56,304 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.82 vs. limit=5.77 +2024-07-27 10:11:56,834 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.26 vs. limit=6.54 +2024-07-27 10:12:04,324 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.39 vs. limit=3.462 +2024-07-27 10:12:10,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=3093.3333333333335, ans=0.08399999999999998 +2024-07-27 10:12:14,105 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.23 vs. limit=6.546666666666667 +2024-07-27 10:12:18,024 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.44 vs. limit=6.553333333333333 +2024-07-27 10:12:23,087 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.70 vs. limit=5.776666666666666 +2024-07-27 10:12:30,874 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.55 vs. limit=5.248 +2024-07-27 10:12:32,061 INFO [train.py:1114] (2/4) Epoch 1, batch 2350, loss[loss=0.5394, simple_loss=0.5252, pruned_loss=0.2768, over 4633.00 frames. ], tot_loss[loss=0.4992, simple_loss=0.4871, pruned_loss=0.2562, over 941694.79 frames. ], batch size: 13, lr: 4.40e-02, grad_scale: 64.0 +2024-07-27 10:12:44,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=3146.6666666666665, ans=0.35250000000000004 +2024-07-27 10:12:46,320 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.81 vs. limit=6.573333333333333 +2024-07-27 10:12:46,891 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:12:49,598 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.95 vs. limit=5.786666666666667 +2024-07-27 10:12:51,208 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.28 vs. limit=5.79 +2024-07-27 10:13:06,513 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.92 vs. limit=6.586666666666667 +2024-07-27 10:13:16,256 INFO [train.py:1114] (2/4) Epoch 1, batch 2400, loss[loss=0.4562, simple_loss=0.4513, pruned_loss=0.2306, over 4632.00 frames. ], tot_loss[loss=0.4941, simple_loss=0.4845, pruned_loss=0.2523, over 941540.19 frames. ], batch size: 12, lr: 4.39e-02, grad_scale: 64.0 +2024-07-27 10:13:16,979 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.405e+01 7.978e+01 8.770e+01 1.032e+02 1.902e+02, threshold=1.754e+02, percent-clipped=2.0 +2024-07-27 10:13:17,424 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.77 vs. limit=9.9 +2024-07-27 10:13:22,091 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.74 vs. limit=6.6 +2024-07-27 10:13:26,265 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.69 vs. limit=5.803333333333334 +2024-07-27 10:13:28,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3213.3333333333335, ans=0.26786666666666664 +2024-07-27 10:13:33,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=3226.6666666666665, ans=0.079 +2024-07-27 10:13:34,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=3226.6666666666665, ans=0.34875 +2024-07-27 10:13:36,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=3226.6666666666665, ans=0.34875 +2024-07-27 10:13:38,256 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.89 vs. limit=8.71 +2024-07-27 10:13:40,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=3240.0, ans=0.348125 +2024-07-27 10:13:43,483 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.95 vs. limit=6.62 +2024-07-27 10:13:43,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3240.0, ans=0.2676 +2024-07-27 10:13:44,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=3240.0, ans=0.0785 +2024-07-27 10:13:56,404 INFO [train.py:1114] (2/4) Epoch 1, batch 2450, loss[loss=0.4955, simple_loss=0.5011, pruned_loss=0.2449, over 4686.00 frames. ], tot_loss[loss=0.4926, simple_loss=0.4842, pruned_loss=0.2508, over 937035.56 frames. ], batch size: 13, lr: 4.39e-02, grad_scale: 64.0 +2024-07-27 10:14:01,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=3266.6666666666665, ans=0.346875 +2024-07-27 10:14:08,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=3280.0, ans=0.09000000000000002 +2024-07-27 10:14:09,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=3280.0, ans=0.077 +2024-07-27 10:14:38,027 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.87 vs. limit=5.826666666666666 +2024-07-27 10:14:38,749 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.73 vs. limit=6.653333333333333 +2024-07-27 10:14:39,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=3306.6666666666665, ans=0.345 +2024-07-27 10:14:45,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.32 vs. limit=8.745000000000001 +2024-07-27 10:14:49,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=3320.0, ans=8.745000000000001 +2024-07-27 10:14:50,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=3320.0, ans=0.344375 +2024-07-27 10:14:53,960 INFO [train.py:1114] (2/4) Epoch 1, batch 2500, loss[loss=0.4751, simple_loss=0.4709, pruned_loss=0.2396, over 4809.00 frames. ], tot_loss[loss=0.4887, simple_loss=0.4824, pruned_loss=0.2477, over 939333.89 frames. ], batch size: 14, lr: 4.38e-02, grad_scale: 64.0 +2024-07-27 10:14:54,655 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.439e+01 7.441e+01 8.140e+01 9.225e+01 1.396e+02, threshold=1.628e+02, percent-clipped=0.0 +2024-07-27 10:14:55,985 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.24 vs. limit=10.0 +2024-07-27 10:14:57,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=3333.3333333333335, ans=0.34375 +2024-07-27 10:15:00,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=3333.3333333333335, ans=0.34375 +2024-07-27 10:15:01,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.18 vs. limit=6.673333333333333 +2024-07-27 10:15:10,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=3346.6666666666665, ans=0.26653333333333334 +2024-07-27 10:15:35,535 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.99 vs. limit=8.76 +2024-07-27 10:15:47,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=3373.3333333333335, ans=0.07349999999999998 +2024-07-27 10:16:03,523 INFO [train.py:1114] (2/4) Epoch 1, batch 2550, loss[loss=0.4001, simple_loss=0.4187, pruned_loss=0.1908, over 4805.00 frames. ], tot_loss[loss=0.4832, simple_loss=0.4791, pruned_loss=0.2438, over 938657.93 frames. ], batch size: 11, lr: 4.38e-02, grad_scale: 64.0 +2024-07-27 10:16:09,511 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.80 vs. limit=10.05 +2024-07-27 10:16:23,455 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.75 vs. limit=6.713333333333333 +2024-07-27 10:16:39,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=3453.3333333333335, ans=0.055749999999999994 +2024-07-27 10:16:41,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=3453.3333333333335, ans=0.022299999999999986 +2024-07-27 10:16:47,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3453.3333333333335, ans=0.26546666666666663 +2024-07-27 10:16:47,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=3453.3333333333335, ans=0.2518 +2024-07-27 10:16:49,870 INFO [train.py:1114] (2/4) Epoch 1, batch 2600, loss[loss=0.4312, simple_loss=0.4387, pruned_loss=0.2118, over 4902.00 frames. ], tot_loss[loss=0.4806, simple_loss=0.4779, pruned_loss=0.2418, over 938085.14 frames. ], batch size: 13, lr: 4.37e-02, grad_scale: 64.0 +2024-07-27 10:16:50,623 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.396e+01 7.798e+01 8.275e+01 9.472e+01 1.752e+02, threshold=1.655e+02, percent-clipped=1.0 +2024-07-27 10:16:52,677 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.84 vs. limit=10.1 +2024-07-27 10:16:59,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=3480.0, ans=0.33687500000000004 +2024-07-27 10:17:12,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=3493.3333333333335, ans=0.021400000000000002 +2024-07-27 10:17:24,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3506.6666666666665, ans=0.335625 +2024-07-27 10:17:25,317 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.41 vs. limit=6.753333333333333 +2024-07-27 10:17:30,915 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.11 vs. limit=8.82 +2024-07-27 10:17:35,696 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.43 vs. limit=3.528 +2024-07-27 10:17:38,258 INFO [train.py:1114] (2/4) Epoch 1, batch 2650, loss[loss=0.4669, simple_loss=0.4835, pruned_loss=0.2251, over 4622.00 frames. ], tot_loss[loss=0.4767, simple_loss=0.4755, pruned_loss=0.2391, over 940008.73 frames. ], batch size: 16, lr: 4.37e-02, grad_scale: 64.0 +2024-07-27 10:17:38,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=3533.3333333333335, ans=0.334375 +2024-07-27 10:17:39,598 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.77 vs. limit=8.825 +2024-07-27 10:17:39,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=3533.3333333333335, ans=0.334375 +2024-07-27 10:17:41,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=3533.3333333333335, ans=0.7763333333333333 +2024-07-27 10:17:44,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=3533.3333333333335, ans=0.06749999999999998 +2024-07-27 10:17:45,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=3533.3333333333335, ans=0.02049999999999999 +2024-07-27 10:17:45,827 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.07 vs. limit=6.766666666666667 +2024-07-27 10:17:49,254 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.73 vs. limit=10.16 +2024-07-27 10:17:49,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=3546.6666666666665, ans=0.03891666666666667 +2024-07-27 10:17:52,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=3546.6666666666665, ans=0.33375 +2024-07-27 10:18:06,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=3573.3333333333335, ans=0.06599999999999998 +2024-07-27 10:18:40,841 INFO [train.py:1114] (2/4) Epoch 1, batch 2700, loss[loss=0.4189, simple_loss=0.4574, pruned_loss=0.1902, over 4746.00 frames. ], tot_loss[loss=0.4764, simple_loss=0.4763, pruned_loss=0.2383, over 939875.04 frames. ], batch size: 14, lr: 4.36e-02, grad_scale: 64.0 +2024-07-27 10:18:41,566 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.609e+01 7.664e+01 8.465e+01 9.239e+01 1.807e+02, threshold=1.693e+02, percent-clipped=1.0 +2024-07-27 10:18:41,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=3600.0, ans=0.33125 +2024-07-27 10:18:42,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=3600.0, ans=0.04749999999999999 +2024-07-27 10:19:00,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=3613.3333333333335, ans=0.046750000000000014 +2024-07-27 10:19:04,683 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=29.24 vs. limit=10.22 +2024-07-27 10:19:29,489 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.26 vs. limit=10.23 +2024-07-27 10:19:31,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=3653.3333333333335, ans=0.07716666666666666 +2024-07-27 10:19:38,115 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.58 vs. limit=3.55 +2024-07-27 10:19:40,869 INFO [train.py:1114] (2/4) Epoch 1, batch 2750, loss[loss=0.5031, simple_loss=0.4987, pruned_loss=0.2537, over 4715.00 frames. ], tot_loss[loss=0.4749, simple_loss=0.4755, pruned_loss=0.2373, over 939451.61 frames. ], batch size: 12, lr: 4.36e-02, grad_scale: 32.0 +2024-07-27 10:19:43,557 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.72 vs. limit=10.25 +2024-07-27 10:19:58,556 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.66 vs. limit=8.879999999999999 +2024-07-27 10:19:59,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=3680.0, ans=0.017199999999999993 +2024-07-27 10:19:59,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=3680.0, ans=10.26 +2024-07-27 10:20:06,163 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=10.42 vs. limit=10.27 +2024-07-27 10:20:22,991 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.10 vs. limit=10.29 +2024-07-27 10:20:23,027 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.33 vs. limit=10.29 +2024-07-27 10:20:23,807 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.45 vs. limit=10.29 +2024-07-27 10:20:25,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=3720.0, ans=0.01630000000000001 +2024-07-27 10:20:27,063 INFO [train.py:1114] (2/4) Epoch 1, batch 2800, loss[loss=0.5358, simple_loss=0.4976, pruned_loss=0.287, over 3449.00 frames. ], tot_loss[loss=0.4726, simple_loss=0.4743, pruned_loss=0.2355, over 937280.94 frames. ], batch size: 35, lr: 4.36e-02, grad_scale: 32.0 +2024-07-27 10:20:28,352 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.54 vs. limit=8.9 +2024-07-27 10:20:28,617 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.137e+01 7.490e+01 8.370e+01 9.871e+01 2.286e+02, threshold=1.674e+02, percent-clipped=1.0 +2024-07-27 10:20:29,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=3733.3333333333335, ans=0.325 +2024-07-27 10:20:51,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=3773.3333333333335, ans=0.07 +2024-07-27 10:20:52,671 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.14 vs. limit=10.33 +2024-07-27 10:20:54,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=3773.3333333333335, ans=0.323125 +2024-07-27 10:21:12,436 INFO [train.py:1114] (2/4) Epoch 1, batch 2850, loss[loss=0.3925, simple_loss=0.4084, pruned_loss=0.1883, over 4958.00 frames. ], tot_loss[loss=0.4705, simple_loss=0.4729, pruned_loss=0.2341, over 935108.89 frames. ], batch size: 13, lr: 4.35e-02, grad_scale: 32.0 +2024-07-27 10:21:14,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=3800.0, ans=10.35 +2024-07-27 10:21:17,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=3800.0, ans=0.057499999999999996 +2024-07-27 10:21:18,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=3800.0, ans=0.321875 +2024-07-27 10:21:18,569 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.94 vs. limit=6.9 +2024-07-27 10:21:26,794 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.67 vs. limit=3.572 +2024-07-27 10:21:31,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=3826.6666666666665, ans=0.2574 +2024-07-27 10:21:33,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=3826.6666666666665, ans=0.7882666666666667 +2024-07-27 10:21:40,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=3840.0, ans=0.3017788888605456 +2024-07-27 10:21:55,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=3853.3333333333335, ans=0.7651333333333333 +2024-07-27 10:21:57,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=3866.6666666666665, ans=0.258 +2024-07-27 10:21:57,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=3866.6666666666665, ans=0.013000000000000012 +2024-07-27 10:21:58,257 INFO [train.py:1114] (2/4) Epoch 1, batch 2900, loss[loss=0.4648, simple_loss=0.4823, pruned_loss=0.2237, over 4823.00 frames. ], tot_loss[loss=0.467, simple_loss=0.4717, pruned_loss=0.2312, over 939072.33 frames. ], batch size: 13, lr: 4.35e-02, grad_scale: 32.0 +2024-07-27 10:22:07,178 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.485e+01 7.711e+01 8.512e+01 9.288e+01 5.214e+02, threshold=1.702e+02, percent-clipped=1.0 +2024-07-27 10:22:10,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=3866.6666666666665, ans=0.31875 +2024-07-27 10:22:14,533 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.04 vs. limit=5.97 +2024-07-27 10:22:17,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.94 vs. limit=8.955 +2024-07-27 10:22:18,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=3880.0, ans=0.7642 +2024-07-27 10:22:22,549 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.73 vs. limit=6.946666666666667 +2024-07-27 10:22:26,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=3893.3333333333335, ans=0.012399999999999994 +2024-07-27 10:22:29,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=3893.3333333333335, ans=0.3175 +2024-07-27 10:22:29,708 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.47 vs. limit=8.96 +2024-07-27 10:22:30,611 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.82 vs. limit=8.965 +2024-07-27 10:22:43,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=3920.0, ans=0.07550000000000001 +2024-07-27 10:22:47,636 INFO [train.py:1114] (2/4) Epoch 1, batch 2950, loss[loss=0.3892, simple_loss=0.4177, pruned_loss=0.1804, over 4711.00 frames. ], tot_loss[loss=0.4613, simple_loss=0.4674, pruned_loss=0.2276, over 938074.82 frames. ], batch size: 12, lr: 4.34e-02, grad_scale: 32.0 +2024-07-27 10:22:57,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=3933.3333333333335, ans=0.05249999999999999 +2024-07-27 10:22:58,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=3933.3333333333335, ans=0.315625 +2024-07-27 10:23:00,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=3933.3333333333335, ans=0.011499999999999996 +2024-07-27 10:23:03,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.88 vs. limit=5.578666666666667 +2024-07-27 10:23:07,818 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.74 vs. limit=10.46 +2024-07-27 10:23:09,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=3946.6666666666665, ans=0.05199999999999999 +2024-07-27 10:23:11,876 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=16.58 vs. limit=6.98 +2024-07-27 10:23:13,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=3960.0, ans=0.31437499999999996 +2024-07-27 10:23:21,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=3973.3333333333335, ans=0.31375 +2024-07-27 10:23:29,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=3986.6666666666665, ans=0.05049999999999999 +2024-07-27 10:23:29,816 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.91 vs. limit=10.49 +2024-07-27 10:23:36,218 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.12 vs. limit=8.995000000000001 +2024-07-27 10:23:38,733 INFO [train.py:1114] (2/4) Epoch 1, batch 3000, loss[loss=0.4707, simple_loss=0.4695, pruned_loss=0.2359, over 4748.00 frames. ], tot_loss[loss=0.4563, simple_loss=0.4642, pruned_loss=0.2242, over 937533.15 frames. ], batch size: 13, lr: 4.34e-02, grad_scale: 32.0 +2024-07-27 10:23:38,734 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 10:23:52,402 INFO [train.py:1146] (2/4) Epoch 1, validation: loss=0.3584, simple_loss=0.4212, pruned_loss=0.1478, over 944034.00 frames. +2024-07-27 10:23:52,403 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 10:23:54,438 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.536e+01 7.537e+01 8.350e+01 9.496e+01 1.510e+02, threshold=1.670e+02, percent-clipped=0.0 +2024-07-27 10:23:57,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4000.0, ans=0.26 +2024-07-27 10:23:57,488 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.30 vs. limit=10.5 +2024-07-27 10:24:11,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=4000.0, ans=0.3125 +2024-07-27 10:24:14,075 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.49 vs. limit=9.004999999999999 +2024-07-27 10:24:26,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.14 vs. limit=6.006666666666667 +2024-07-27 10:24:30,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=4026.6666666666665, ans=0.2597333333333333 +2024-07-27 10:24:32,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=4040.0, ans=0.31062500000000004 +2024-07-27 10:24:35,533 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.12 vs. limit=9.015 +2024-07-27 10:24:38,381 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:24:41,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=4053.3333333333335, ans=0.31 +2024-07-27 10:24:44,691 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.36 vs. limit=9.02 +2024-07-27 10:24:46,246 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.35 vs. limit=10.54 +2024-07-27 10:24:49,756 INFO [train.py:1114] (2/4) Epoch 1, batch 3050, loss[loss=0.3569, simple_loss=0.3909, pruned_loss=0.1615, over 4639.00 frames. ], tot_loss[loss=0.4555, simple_loss=0.4646, pruned_loss=0.2232, over 936564.69 frames. ], batch size: 12, lr: 4.33e-02, grad_scale: 32.0 +2024-07-27 10:25:00,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=4080.0, ans=0.30874999999999997 +2024-07-27 10:25:03,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=4080.0, ans=0.04966666666666667 +2024-07-27 10:25:03,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=4080.0, ans=0.30874999999999997 +2024-07-27 10:25:07,067 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=11.49 vs. limit=10.57 +2024-07-27 10:25:16,183 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.62 vs. limit=3.614 +2024-07-27 10:25:16,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=4106.666666666667, ans=0.3075 +2024-07-27 10:25:19,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=4106.666666666667, ans=0.3075 +2024-07-27 10:25:29,037 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.69 vs. limit=9.045 +2024-07-27 10:25:34,560 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:25:36,736 INFO [train.py:1114] (2/4) Epoch 1, batch 3100, loss[loss=0.5035, simple_loss=0.5038, pruned_loss=0.2516, over 4654.00 frames. ], tot_loss[loss=0.4518, simple_loss=0.4616, pruned_loss=0.221, over 937640.72 frames. ], batch size: 16, lr: 4.33e-02, grad_scale: 32.0 +2024-07-27 10:25:36,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=4133.333333333333, ans=0.037083333333333336 +2024-07-27 10:25:38,255 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.617e+01 7.727e+01 8.300e+01 9.366e+01 1.573e+02, threshold=1.660e+02, percent-clipped=0.0 +2024-07-27 10:25:46,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=4146.666666666667, ans=0.04938888888888889 +2024-07-27 10:25:54,051 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.57 vs. limit=10.61 +2024-07-27 10:26:21,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=4186.666666666667, ans=0.2581333333333333 +2024-07-27 10:26:25,183 INFO [train.py:1114] (2/4) Epoch 1, batch 3150, loss[loss=0.3973, simple_loss=0.4323, pruned_loss=0.1812, over 4589.00 frames. ], tot_loss[loss=0.4471, simple_loss=0.4589, pruned_loss=0.2177, over 937849.06 frames. ], batch size: 17, lr: 4.32e-02, grad_scale: 32.0 +2024-07-27 10:26:32,873 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.66 vs. limit=10.66 +2024-07-27 10:26:33,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=4213.333333333333, ans=0.0 +2024-07-27 10:26:34,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=4213.333333333333, ans=0.3025 +2024-07-27 10:26:35,645 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.11 vs. limit=10.66 +2024-07-27 10:26:36,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=4213.333333333333, ans=0.3025 +2024-07-27 10:26:43,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=4226.666666666667, ans=0.049055555555555554 +2024-07-27 10:26:43,294 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.23 vs. limit=6.056666666666667 +2024-07-27 10:26:43,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=4226.666666666667, ans=0.301875 +2024-07-27 10:26:44,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.62 vs. limit=10.67 +2024-07-27 10:26:54,230 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.90 vs. limit=6.0600000000000005 +2024-07-27 10:27:05,675 INFO [train.py:1114] (2/4) Epoch 1, batch 3200, loss[loss=0.3887, simple_loss=0.4191, pruned_loss=0.1792, over 4821.00 frames. ], tot_loss[loss=0.4447, simple_loss=0.4572, pruned_loss=0.2161, over 939369.45 frames. ], batch size: 13, lr: 4.32e-02, grad_scale: 32.0 +2024-07-27 10:27:12,168 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.236e+01 7.498e+01 8.243e+01 8.897e+01 1.348e+02, threshold=1.649e+02, percent-clipped=0.0 +2024-07-27 10:27:13,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=4266.666666666667, ans=0.3 +2024-07-27 10:27:33,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=4280.0, ans=0.009939130434782608 +2024-07-27 10:27:34,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=4280.0, ans=0.04883333333333333 +2024-07-27 10:27:38,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=4280.0, ans=0.299375 +2024-07-27 10:27:38,861 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.78 vs. limit=10.71 +2024-07-27 10:27:43,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=4280.0, ans=0.04883333333333333 +2024-07-27 10:27:58,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=4306.666666666667, ans=0.04872222222222222 +2024-07-27 10:28:02,126 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.24 vs. limit=9.115 +2024-07-27 10:28:04,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=4306.666666666667, ans=0.298125 +2024-07-27 10:28:18,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=4320.0, ans=0.009930434782608695 +2024-07-27 10:28:29,684 INFO [train.py:1114] (2/4) Epoch 1, batch 3250, loss[loss=0.4395, simple_loss=0.4582, pruned_loss=0.2104, over 4934.00 frames. ], tot_loss[loss=0.4432, simple_loss=0.4567, pruned_loss=0.2149, over 940709.66 frames. ], batch size: 14, lr: 4.31e-02, grad_scale: 32.0 +2024-07-27 10:28:32,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=4333.333333333333, ans=0.025 +2024-07-27 10:28:35,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=52.48 vs. limit=9.125 +2024-07-27 10:28:43,206 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.55 vs. limit=10.76 +2024-07-27 10:28:51,943 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.26 vs. limit=9.135 +2024-07-27 10:28:57,207 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.01 vs. limit=6.093333333333334 +2024-07-27 10:29:03,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=4373.333333333333, ans=0.29500000000000004 +2024-07-27 10:29:04,496 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.15 vs. limit=9.145 +2024-07-27 10:29:08,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=4386.666666666667, ans=0.294375 +2024-07-27 10:29:12,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=4400.0, ans=7.2 +2024-07-27 10:29:12,738 INFO [train.py:1114] (2/4) Epoch 1, batch 3300, loss[loss=0.4765, simple_loss=0.502, pruned_loss=0.2255, over 4695.00 frames. ], tot_loss[loss=0.4417, simple_loss=0.4549, pruned_loss=0.2143, over 940904.75 frames. ], batch size: 19, lr: 4.31e-02, grad_scale: 32.0 +2024-07-27 10:29:14,417 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.348e+01 7.414e+01 8.133e+01 9.480e+01 1.579e+02, threshold=1.627e+02, percent-clipped=0.0 +2024-07-27 10:29:30,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=4400.0, ans=0.29375 +2024-07-27 10:30:33,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=4440.0, ans=0.009904347826086957 +2024-07-27 10:30:33,432 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.46 vs. limit=10.83 +2024-07-27 10:30:47,726 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=11.58 vs. limit=10.84 +2024-07-27 10:30:52,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.71 vs. limit=9.17 +2024-07-27 10:30:53,329 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.02 vs. limit=10.85 +2024-07-27 10:30:53,661 INFO [train.py:1114] (2/4) Epoch 1, batch 3350, loss[loss=0.404, simple_loss=0.4267, pruned_loss=0.1907, over 4648.00 frames. ], tot_loss[loss=0.4415, simple_loss=0.4547, pruned_loss=0.2142, over 938277.04 frames. ], batch size: 17, lr: 4.30e-02, grad_scale: 32.0 +2024-07-27 10:31:12,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4466.666666666667, ans=0.2553333333333333 +2024-07-27 10:31:14,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=4466.666666666667, ans=0.7436666666666667 +2024-07-27 10:31:15,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=4480.0, ans=0.048 +2024-07-27 10:31:35,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=4506.666666666667, ans=0.009889855072463769 +2024-07-27 10:31:38,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=4506.666666666667, ans=0.2676 +2024-07-27 10:31:48,921 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.25 vs. limit=9.195 +2024-07-27 10:31:49,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=4533.333333333333, ans=3.68 +2024-07-27 10:31:55,312 INFO [train.py:1114] (2/4) Epoch 1, batch 3400, loss[loss=0.4385, simple_loss=0.4446, pruned_loss=0.2162, over 4803.00 frames. ], tot_loss[loss=0.4428, simple_loss=0.4556, pruned_loss=0.215, over 937427.01 frames. ], batch size: 11, lr: 4.29e-02, grad_scale: 32.0 +2024-07-27 10:31:56,827 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.312e+01 7.521e+01 8.329e+01 9.335e+01 1.968e+02, threshold=1.666e+02, percent-clipped=1.0 +2024-07-27 10:32:18,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=4546.666666666667, ans=0.009881159420289855 +2024-07-27 10:32:38,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.44 vs. limit=5.8293333333333335 +2024-07-27 10:32:41,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=4573.333333333333, ans=0.04761111111111112 +2024-07-27 10:32:44,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=4586.666666666667, ans=0.04755555555555556 +2024-07-27 10:32:49,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=4586.666666666667, ans=0.7958666666666666 +2024-07-27 10:32:54,912 INFO [train.py:1114] (2/4) Epoch 1, batch 3450, loss[loss=0.5095, simple_loss=0.5104, pruned_loss=0.2543, over 4671.00 frames. ], tot_loss[loss=0.4391, simple_loss=0.4532, pruned_loss=0.2125, over 937584.78 frames. ], batch size: 19, lr: 4.29e-02, grad_scale: 32.0 +2024-07-27 10:33:12,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=4613.333333333333, ans=0.28375 +2024-07-27 10:33:18,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=4626.666666666667, ans=0.04738888888888889 +2024-07-27 10:33:21,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=4626.666666666667, ans=0.28312499999999996 +2024-07-27 10:33:22,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=4626.666666666667, ans=0.0 +2024-07-27 10:33:26,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=4626.666666666667, ans=0.26940000000000003 +2024-07-27 10:33:33,130 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.88 vs. limit=10.98 +2024-07-27 10:33:38,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=4640.0, ans=0.035500000000000004 +2024-07-27 10:33:39,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=4640.0, ans=0.2825 +2024-07-27 10:33:54,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=4653.333333333333, ans=0.281875 +2024-07-27 10:33:58,748 INFO [train.py:1114] (2/4) Epoch 1, batch 3500, loss[loss=0.4207, simple_loss=0.4344, pruned_loss=0.2035, over 4941.00 frames. ], tot_loss[loss=0.4349, simple_loss=0.4502, pruned_loss=0.2098, over 938180.28 frames. ], batch size: 12, lr: 4.28e-02, grad_scale: 32.0 +2024-07-27 10:34:00,618 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.699e+01 7.535e+01 8.121e+01 9.134e+01 1.279e+02, threshold=1.624e+02, percent-clipped=0.0 +2024-07-27 10:34:03,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=4666.666666666667, ans=0.28125 +2024-07-27 10:34:18,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=4680.0, ans=0.07075000000000001 +2024-07-27 10:34:24,467 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.65 vs. limit=9.26 +2024-07-27 10:34:49,929 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.77 vs. limit=11.03 +2024-07-27 10:35:04,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=4720.0, ans=0.047 +2024-07-27 10:35:18,219 INFO [train.py:1114] (2/4) Epoch 1, batch 3550, loss[loss=0.4848, simple_loss=0.4994, pruned_loss=0.235, over 4658.00 frames. ], tot_loss[loss=0.4333, simple_loss=0.4496, pruned_loss=0.2085, over 939136.97 frames. ], batch size: 14, lr: 4.28e-02, grad_scale: 32.0 +2024-07-27 10:35:36,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=4746.666666666667, ans=0.025 +2024-07-27 10:35:40,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=4746.666666666667, ans=0.27749999999999997 +2024-07-27 10:35:50,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=4773.333333333333, ans=0.27625 +2024-07-27 10:35:50,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=4773.333333333333, ans=0.27625 +2024-07-27 10:36:04,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=4786.666666666667, ans=0.009828985507246377 +2024-07-27 10:36:12,105 INFO [train.py:1114] (2/4) Epoch 1, batch 3600, loss[loss=0.3184, simple_loss=0.3672, pruned_loss=0.1347, over 4961.00 frames. ], tot_loss[loss=0.4334, simple_loss=0.4498, pruned_loss=0.2085, over 940358.65 frames. ], batch size: 13, lr: 4.27e-02, grad_scale: 32.0 +2024-07-27 10:36:12,573 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.01 vs. limit=7.4 +2024-07-27 10:36:13,481 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.93 vs. limit=11.1 +2024-07-27 10:36:13,745 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.357e+01 7.358e+01 8.127e+01 9.443e+01 1.425e+02, threshold=1.625e+02, percent-clipped=0.0 +2024-07-27 10:36:17,286 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.90 vs. limit=7.4 +2024-07-27 10:36:26,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=4813.333333333333, ans=0.27437500000000004 +2024-07-27 10:36:27,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4813.333333333333, ans=0.2518666666666667 +2024-07-27 10:36:29,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=4826.666666666667, ans=0.27375 +2024-07-27 10:36:31,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=4826.666666666667, ans=0.035 +2024-07-27 10:36:31,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=4826.666666666667, ans=0.04949747468305833 +2024-07-27 10:36:32,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=4826.666666666667, ans=0.27375 +2024-07-27 10:37:07,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=4853.333333333333, ans=0.034833333333333334 +2024-07-27 10:37:08,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=4853.333333333333, ans=0.27249999999999996 +2024-07-27 10:37:22,215 INFO [train.py:1114] (2/4) Epoch 1, batch 3650, loss[loss=0.438, simple_loss=0.4663, pruned_loss=0.2048, over 4895.00 frames. ], tot_loss[loss=0.4314, simple_loss=0.4486, pruned_loss=0.2071, over 940511.45 frames. ], batch size: 15, lr: 4.27e-02, grad_scale: 32.0 +2024-07-27 10:37:23,483 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.39 vs. limit=6.216666666666667 +2024-07-27 10:37:57,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=4906.666666666667, ans=0.27 +2024-07-27 10:38:04,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=4920.0, ans=0.26937500000000003 +2024-07-27 10:38:08,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=4933.333333333333, ans=0.0 +2024-07-27 10:38:08,646 INFO [train.py:1114] (2/4) Epoch 1, batch 3700, loss[loss=0.4661, simple_loss=0.4747, pruned_loss=0.2288, over 4927.00 frames. ], tot_loss[loss=0.4291, simple_loss=0.4478, pruned_loss=0.2052, over 941676.14 frames. ], batch size: 14, lr: 4.26e-02, grad_scale: 32.0 +2024-07-27 10:38:10,095 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.394e+01 7.604e+01 8.315e+01 9.088e+01 1.291e+02, threshold=1.663e+02, percent-clipped=0.0 +2024-07-27 10:38:10,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=4933.333333333333, ans=0.25066666666666665 +2024-07-27 10:38:24,530 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:38:25,544 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.78 vs. limit=5.984 +2024-07-27 10:38:31,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=4973.333333333333, ans=0.2746 +2024-07-27 10:38:39,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=4986.666666666667, ans=0.7998666666666666 +2024-07-27 10:38:47,015 INFO [train.py:1114] (2/4) Epoch 1, batch 3750, loss[loss=0.353, simple_loss=0.3934, pruned_loss=0.1563, over 4799.00 frames. ], tot_loss[loss=0.428, simple_loss=0.447, pruned_loss=0.2045, over 943095.37 frames. ], batch size: 11, lr: 4.26e-02, grad_scale: 32.0 +2024-07-27 10:38:48,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5000.0, ans=0.265625 +2024-07-27 10:38:50,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=5000.0, ans=0.7250000000000001 +2024-07-27 10:38:59,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=5013.333333333333, ans=0.265 +2024-07-27 10:39:02,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=5013.333333333333, ans=0.035 +2024-07-27 10:39:03,879 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.64 vs. limit=11.26 +2024-07-27 10:39:14,553 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.14 vs. limit=9.39 +2024-07-27 10:39:16,286 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.70 vs. limit=9.39 +2024-07-27 10:39:19,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=5040.0, ans=0.0 +2024-07-27 10:39:29,993 INFO [train.py:1114] (2/4) Epoch 1, batch 3800, loss[loss=0.4815, simple_loss=0.4763, pruned_loss=0.2433, over 4811.00 frames. ], tot_loss[loss=0.4289, simple_loss=0.4476, pruned_loss=0.2051, over 941305.50 frames. ], batch size: 14, lr: 4.25e-02, grad_scale: 32.0 +2024-07-27 10:39:30,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=5066.666666666667, ans=0.7226666666666667 +2024-07-27 10:39:31,466 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.397e+01 7.848e+01 8.926e+01 1.062e+02 1.659e+02, threshold=1.785e+02, percent-clipped=0.0 +2024-07-27 10:39:35,725 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.75 vs. limit=9.4 +2024-07-27 10:39:41,575 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.11 vs. limit=9.405 +2024-07-27 10:39:51,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=5093.333333333333, ans=0.26125 +2024-07-27 10:39:51,557 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.86 vs. limit=9.41 +2024-07-27 10:39:59,534 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.62 vs. limit=11.33 +2024-07-27 10:39:59,711 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.06 vs. limit=9.415 +2024-07-27 10:40:00,420 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.42 vs. limit=11.33 +2024-07-27 10:40:04,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=5120.0, ans=0.26 +2024-07-27 10:40:08,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=5120.0, ans=0.7208 +2024-07-27 10:40:09,708 INFO [train.py:1114] (2/4) Epoch 1, batch 3850, loss[loss=0.4639, simple_loss=0.465, pruned_loss=0.2314, over 4621.00 frames. ], tot_loss[loss=0.4256, simple_loss=0.4461, pruned_loss=0.2026, over 941871.91 frames. ], batch size: 16, lr: 4.24e-02, grad_scale: 32.0 +2024-07-27 10:40:34,140 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.04 vs. limit=6.286666666666667 +2024-07-27 10:40:34,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=5146.666666666667, ans=0.0 +2024-07-27 10:40:37,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=5160.0, ans=0.258125 +2024-07-27 10:40:38,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=5160.0, ans=3.774 +2024-07-27 10:40:41,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=5160.0, ans=0.258125 +2024-07-27 10:40:46,747 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.11 vs. limit=11.379999999999999 +2024-07-27 10:40:48,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=5173.333333333333, ans=0.045111111111111116 +2024-07-27 10:40:50,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=5173.333333333333, ans=0.2575 +2024-07-27 10:40:52,062 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.30 vs. limit=7.586666666666666 +2024-07-27 10:41:02,115 INFO [train.py:1114] (2/4) Epoch 1, batch 3900, loss[loss=0.4387, simple_loss=0.4677, pruned_loss=0.2049, over 4820.00 frames. ], tot_loss[loss=0.4249, simple_loss=0.4456, pruned_loss=0.2021, over 942194.63 frames. ], batch size: 14, lr: 4.24e-02, grad_scale: 32.0 +2024-07-27 10:41:05,346 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.129e+01 7.258e+01 7.897e+01 8.876e+01 1.354e+02, threshold=1.579e+02, percent-clipped=0.0 +2024-07-27 10:41:18,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=5213.333333333333, ans=0.00973623188405797 +2024-07-27 10:41:31,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=5240.0, ans=0.254375 +2024-07-27 10:41:39,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=5253.333333333333, ans=0.25375000000000003 +2024-07-27 10:41:46,759 INFO [train.py:1114] (2/4) Epoch 1, batch 3950, loss[loss=0.4195, simple_loss=0.4463, pruned_loss=0.1964, over 4826.00 frames. ], tot_loss[loss=0.4202, simple_loss=0.4429, pruned_loss=0.1988, over 944208.36 frames. ], batch size: 16, lr: 4.23e-02, grad_scale: 32.0 +2024-07-27 10:41:49,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=5266.666666666667, ans=0.044722222222222226 +2024-07-27 10:41:49,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=5266.666666666667, ans=0.253125 +2024-07-27 10:41:51,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=5266.666666666667, ans=0.253125 +2024-07-27 10:41:52,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=5266.666666666667, ans=0.279 +2024-07-27 10:42:01,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=5280.0, ans=0.2525 +2024-07-27 10:42:05,944 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.92 vs. limit=7.640000000000001 +2024-07-27 10:42:34,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.91 vs. limit=11.48 +2024-07-27 10:42:38,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=5320.0, ans=0.250625 +2024-07-27 10:42:42,277 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.54 vs. limit=9.495000000000001 +2024-07-27 10:42:45,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=5320.0, ans=0.250625 +2024-07-27 10:42:47,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=5320.0, ans=0.250625 +2024-07-27 10:43:00,299 INFO [train.py:1114] (2/4) Epoch 1, batch 4000, loss[loss=0.4473, simple_loss=0.4583, pruned_loss=0.2181, over 4779.00 frames. ], tot_loss[loss=0.4209, simple_loss=0.4433, pruned_loss=0.1993, over 940066.98 frames. ], batch size: 12, lr: 4.23e-02, grad_scale: 32.0 +2024-07-27 10:43:00,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=5333.333333333333, ans=0.25 +2024-07-27 10:43:01,865 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.271e+01 7.652e+01 8.472e+01 9.315e+01 2.163e+02, threshold=1.694e+02, percent-clipped=2.0 +2024-07-27 10:43:09,850 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.91 vs. limit=9.5 +2024-07-27 10:43:11,197 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=9.5 +2024-07-27 10:43:12,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.82 vs. limit=11.5 +2024-07-27 10:43:29,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=5360.0, ans=0.24875000000000003 +2024-07-27 10:43:31,890 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.97 vs. limit=11.530000000000001 +2024-07-27 10:43:34,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=5373.333333333333, ans=0.24812499999999998 +2024-07-27 10:43:34,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=5373.333333333333, ans=0.044277777777777784 +2024-07-27 10:43:36,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=5373.333333333333, ans=0.24812499999999998 +2024-07-27 10:43:48,333 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=7.90 vs. limit=6.346666666666667 +2024-07-27 10:43:53,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=5386.666666666667, ans=0.02 +2024-07-27 10:43:55,197 INFO [train.py:1114] (2/4) Epoch 1, batch 4050, loss[loss=0.5142, simple_loss=0.4992, pruned_loss=0.2646, over 3092.00 frames. ], tot_loss[loss=0.4195, simple_loss=0.442, pruned_loss=0.1985, over 938399.17 frames. ], batch size: 36, lr: 4.22e-02, grad_scale: 32.0 +2024-07-27 10:44:04,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5413.333333333333, ans=0.24625000000000002 +2024-07-27 10:44:25,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=5440.0, ans=0.245 +2024-07-27 10:44:44,996 INFO [train.py:1114] (2/4) Epoch 1, batch 4100, loss[loss=0.454, simple_loss=0.4723, pruned_loss=0.2178, over 4906.00 frames. ], tot_loss[loss=0.4203, simple_loss=0.4424, pruned_loss=0.1991, over 937507.93 frames. ], batch size: 15, lr: 4.22e-02, grad_scale: 32.0 +2024-07-27 10:44:46,478 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.345e+01 7.438e+01 7.964e+01 9.010e+01 1.753e+02, threshold=1.593e+02, percent-clipped=1.0 +2024-07-27 10:44:49,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=5466.666666666667, ans=0.24533333333333332 +2024-07-27 10:45:05,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=5493.333333333333, ans=0.2425 +2024-07-27 10:45:41,327 INFO [train.py:1114] (2/4) Epoch 1, batch 4150, loss[loss=0.3639, simple_loss=0.3965, pruned_loss=0.1656, over 4843.00 frames. ], tot_loss[loss=0.4173, simple_loss=0.4403, pruned_loss=0.1971, over 937458.67 frames. ], batch size: 13, lr: 4.21e-02, grad_scale: 32.0 +2024-07-27 10:45:44,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=5533.333333333333, ans=0.0 +2024-07-27 10:45:47,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=5533.333333333333, ans=0.28300000000000003 +2024-07-27 10:45:53,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=5546.666666666667, ans=0.24 +2024-07-27 10:46:03,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=5546.666666666667, ans=0.24453333333333332 +2024-07-27 10:46:27,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=5573.333333333333, ans=0.24426666666666666 +2024-07-27 10:46:31,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5586.666666666667, ans=0.23812499999999998 +2024-07-27 10:46:35,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=5586.666666666667, ans=6.3966666666666665 +2024-07-27 10:46:37,478 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.03 vs. limit=11.690000000000001 +2024-07-27 10:46:38,329 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.62 vs. limit=6.4 +2024-07-27 10:46:38,750 INFO [train.py:1114] (2/4) Epoch 1, batch 4200, loss[loss=0.4607, simple_loss=0.4905, pruned_loss=0.2154, over 4917.00 frames. ], tot_loss[loss=0.4179, simple_loss=0.4412, pruned_loss=0.1973, over 938711.62 frames. ], batch size: 15, lr: 4.20e-02, grad_scale: 32.0 +2024-07-27 10:46:40,168 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.767e+01 7.218e+01 8.164e+01 9.157e+01 1.293e+02, threshold=1.633e+02, percent-clipped=0.0 +2024-07-27 10:46:48,018 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.42 vs. limit=7.8 +2024-07-27 10:47:03,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=5626.666666666667, ans=0.23625000000000002 +2024-07-27 10:47:05,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=5626.666666666667, ans=0.23625000000000002 +2024-07-27 10:47:14,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=5640.0, ans=0.23562499999999997 +2024-07-27 10:47:14,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=5640.0, ans=0.009643478260869566 +2024-07-27 10:47:27,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=5653.333333333333, ans=0.7021333333333334 +2024-07-27 10:47:33,658 INFO [train.py:1114] (2/4) Epoch 1, batch 4250, loss[loss=0.3829, simple_loss=0.4085, pruned_loss=0.1787, over 4639.00 frames. ], tot_loss[loss=0.4174, simple_loss=0.4412, pruned_loss=0.1968, over 940243.69 frames. ], batch size: 12, lr: 4.20e-02, grad_scale: 32.0 +2024-07-27 10:47:46,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=5666.666666666667, ans=0.7016666666666667 +2024-07-27 10:48:07,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=5693.333333333333, ans=8.558333333333334 +2024-07-27 10:48:07,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=5693.333333333333, ans=0.04294444444444445 +2024-07-27 10:48:07,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=5706.666666666667, ans=0.23249999999999998 +2024-07-27 10:48:08,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=5706.666666666667, ans=0.009628985507246376 +2024-07-27 10:48:18,392 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.90 vs. limit=11.79 +2024-07-27 10:48:24,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=5733.333333333333, ans=0.24266666666666667 +2024-07-27 10:48:24,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=5733.333333333333, ans=0.009623188405797102 +2024-07-27 10:48:25,302 INFO [train.py:1114] (2/4) Epoch 1, batch 4300, loss[loss=0.4151, simple_loss=0.4371, pruned_loss=0.1966, over 4758.00 frames. ], tot_loss[loss=0.4156, simple_loss=0.4396, pruned_loss=0.1958, over 939699.02 frames. ], batch size: 13, lr: 4.19e-02, grad_scale: 32.0 +2024-07-27 10:48:26,777 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.009e+01 7.278e+01 8.201e+01 9.440e+01 2.695e+02, threshold=1.640e+02, percent-clipped=2.0 +2024-07-27 10:48:30,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=5733.333333333333, ans=0.009623188405797102 +2024-07-27 10:48:34,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=5746.666666666667, ans=0.24253333333333332 +2024-07-27 10:48:34,493 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.72 vs. limit=6.4366666666666665 +2024-07-27 10:48:39,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=5760.0, ans=0.22999999999999998 +2024-07-27 10:48:49,904 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.43 vs. limit=11.83 +2024-07-27 10:49:01,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=5786.666666666667, ans=0.009611594202898551 +2024-07-27 10:49:15,559 INFO [train.py:1114] (2/4) Epoch 1, batch 4350, loss[loss=0.4111, simple_loss=0.4456, pruned_loss=0.1883, over 4754.00 frames. ], tot_loss[loss=0.4151, simple_loss=0.4397, pruned_loss=0.1952, over 940521.77 frames. ], batch size: 13, lr: 4.19e-02, grad_scale: 32.0 +2024-07-27 10:49:16,035 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.98 vs. limit=9.675 +2024-07-27 10:49:25,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5800.0, ans=0.22812500000000002 +2024-07-27 10:49:29,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=5813.333333333333, ans=0.24186666666666667 +2024-07-27 10:49:29,252 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.80 vs. limit=7.906666666666666 +2024-07-27 10:49:43,197 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.94 vs. limit=3.874 +2024-07-27 10:49:56,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=5840.0, ans=9.69 +2024-07-27 10:50:09,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=5853.333333333333, ans=0.22562500000000002 +2024-07-27 10:50:12,261 INFO [train.py:1114] (2/4) Epoch 1, batch 4400, loss[loss=0.3777, simple_loss=0.4221, pruned_loss=0.1667, over 4809.00 frames. ], tot_loss[loss=0.4151, simple_loss=0.4402, pruned_loss=0.195, over 940235.89 frames. ], batch size: 14, lr: 4.18e-02, grad_scale: 32.0 +2024-07-27 10:50:13,753 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.875e+01 7.282e+01 8.065e+01 8.793e+01 1.417e+02, threshold=1.613e+02, percent-clipped=0.0 +2024-07-27 10:50:21,399 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.78 vs. limit=11.91 +2024-07-27 10:50:29,889 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.73 vs. limit=11.92 +2024-07-27 10:50:39,881 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.05 vs. limit=11.93 +2024-07-27 10:50:56,616 INFO [train.py:1114] (2/4) Epoch 1, batch 4450, loss[loss=0.3252, simple_loss=0.3539, pruned_loss=0.1482, over 4935.00 frames. ], tot_loss[loss=0.4155, simple_loss=0.44, pruned_loss=0.1955, over 938856.98 frames. ], batch size: 12, lr: 4.17e-02, grad_scale: 32.0 +2024-07-27 10:51:06,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=5946.666666666667, ans=0.04188888888888889 +2024-07-27 10:51:11,973 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.30 vs. limit=9.73 +2024-07-27 10:51:16,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=5960.0, ans=0.22062500000000002 +2024-07-27 10:52:02,903 INFO [train.py:1114] (2/4) Epoch 1, batch 4500, loss[loss=0.4036, simple_loss=0.4379, pruned_loss=0.1846, over 4737.00 frames. ], tot_loss[loss=0.4129, simple_loss=0.4385, pruned_loss=0.1937, over 938178.16 frames. ], batch size: 14, lr: 4.17e-02, grad_scale: 32.0 +2024-07-27 10:52:04,385 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.515e+01 7.518e+01 8.133e+01 8.921e+01 1.342e+02, threshold=1.627e+02, percent-clipped=0.0 +2024-07-27 10:52:10,210 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.09 vs. limit=9.754999999999999 +2024-07-27 10:52:17,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=6026.666666666667, ans=0.041555555555555554 +2024-07-27 10:52:17,420 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.89 vs. limit=12.02 +2024-07-27 10:52:26,195 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.56 vs. limit=9.76 +2024-07-27 10:52:29,792 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.74 vs. limit=9.765 +2024-07-27 10:52:30,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=6040.0, ans=0.025 +2024-07-27 10:52:39,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=6053.333333333333, ans=0.009553623188405797 +2024-07-27 10:52:47,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=6066.666666666667, ans=0.215625 +2024-07-27 10:52:47,970 INFO [train.py:1114] (2/4) Epoch 1, batch 4550, loss[loss=0.3975, simple_loss=0.4195, pruned_loss=0.1878, over 4897.00 frames. ], tot_loss[loss=0.413, simple_loss=0.4387, pruned_loss=0.1936, over 940137.09 frames. ], batch size: 13, lr: 4.16e-02, grad_scale: 32.0 +2024-07-27 10:52:52,040 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.73 vs. limit=12.05 +2024-07-27 10:52:53,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=6066.666666666667, ans=0.215625 +2024-07-27 10:53:47,882 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.43 vs. limit=12.059999999999999 +2024-07-27 10:55:17,050 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.53 vs. limit=9.785 +2024-07-27 10:55:30,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=6120.0, ans=0.23879999999999998 +2024-07-27 10:55:40,605 INFO [train.py:1114] (2/4) Epoch 1, batch 4600, loss[loss=0.4179, simple_loss=0.4511, pruned_loss=0.1923, over 4483.00 frames. ], tot_loss[loss=0.4118, simple_loss=0.4379, pruned_loss=0.1929, over 938244.99 frames. ], batch size: 21, lr: 4.15e-02, grad_scale: 32.0 +2024-07-27 10:55:47,215 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.044e+01 7.331e+01 8.005e+01 8.983e+01 1.431e+02, threshold=1.601e+02, percent-clipped=0.0 +2024-07-27 10:55:50,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=6133.333333333333, ans=0.21250000000000002 +2024-07-27 10:56:01,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=6146.666666666667, ans=0.041055555555555553 +2024-07-27 10:56:02,386 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.56 vs. limit=6.458666666666667 +2024-07-27 10:56:16,515 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.95 vs. limit=9.815 +2024-07-27 10:56:17,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=6173.333333333333, ans=0.210625 +2024-07-27 10:56:19,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=6173.333333333333, ans=0.6839333333333334 +2024-07-27 10:56:22,271 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.27 vs. limit=12.14 +2024-07-27 10:56:27,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=6186.666666666667, ans=0.06133333333333334 +2024-07-27 10:56:29,213 INFO [train.py:1114] (2/4) Epoch 1, batch 4650, loss[loss=0.3747, simple_loss=0.4223, pruned_loss=0.1636, over 4843.00 frames. ], tot_loss[loss=0.4111, simple_loss=0.4375, pruned_loss=0.1923, over 940131.79 frames. ], batch size: 16, lr: 4.15e-02, grad_scale: 32.0 +2024-07-27 10:56:33,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=6200.0, ans=0.683 +2024-07-27 10:56:35,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=6200.0, ans=0.030625000000000003 +2024-07-27 10:56:46,311 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:56:56,099 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.78 vs. limit=9.84 +2024-07-27 10:56:57,437 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.71 vs. limit=12.18 +2024-07-27 10:57:05,893 INFO [train.py:1114] (2/4) Epoch 1, batch 4700, loss[loss=0.3195, simple_loss=0.3636, pruned_loss=0.1377, over 4711.00 frames. ], tot_loss[loss=0.4088, simple_loss=0.4352, pruned_loss=0.1912, over 937547.02 frames. ], batch size: 11, lr: 4.14e-02, grad_scale: 32.0 +2024-07-27 10:57:07,352 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.557e+01 7.394e+01 8.015e+01 9.109e+01 1.664e+02, threshold=1.603e+02, percent-clipped=1.0 +2024-07-27 10:57:16,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=6280.0, ans=0.205625 +2024-07-27 10:57:21,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=6293.333333333333, ans=8.933333333333334 +2024-07-27 10:57:24,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=6293.333333333333, ans=0.04044444444444445 +2024-07-27 10:57:33,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=6306.666666666667, ans=0.04038888888888889 +2024-07-27 10:57:39,161 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.79 vs. limit=3.948 +2024-07-27 10:57:41,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=6320.0, ans=0.04033333333333333 +2024-07-27 10:57:44,084 INFO [train.py:1114] (2/4) Epoch 1, batch 4750, loss[loss=0.4189, simple_loss=0.4538, pruned_loss=0.192, over 4474.00 frames. ], tot_loss[loss=0.4114, simple_loss=0.4369, pruned_loss=0.193, over 935713.71 frames. ], batch size: 21, lr: 4.14e-02, grad_scale: 64.0 +2024-07-27 10:57:50,573 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.64 vs. limit=12.25 +2024-07-27 10:57:51,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=6333.333333333333, ans=0.009492753623188407 +2024-07-27 10:58:03,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.28 vs. limit=8.18 +2024-07-27 10:58:04,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=6360.0, ans=0.04016666666666667 +2024-07-27 10:58:04,832 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.21 vs. limit=12.27 +2024-07-27 10:58:08,875 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.21 vs. limit=9.885 +2024-07-27 10:58:21,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=6386.666666666667, ans=0.2361333333333333 +2024-07-27 10:58:26,663 INFO [train.py:1114] (2/4) Epoch 1, batch 4800, loss[loss=0.3986, simple_loss=0.4384, pruned_loss=0.1794, over 4680.00 frames. ], tot_loss[loss=0.4107, simple_loss=0.4365, pruned_loss=0.1924, over 933444.30 frames. ], batch size: 13, lr: 4.13e-02, grad_scale: 64.0 +2024-07-27 10:58:28,216 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.395e+01 7.298e+01 7.833e+01 8.734e+01 1.995e+02, threshold=1.567e+02, percent-clipped=2.0 +2024-07-27 10:58:29,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=6400.0, ans=0.04 +2024-07-27 10:58:33,246 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.99 vs. limit=12.3 +2024-07-27 10:58:37,611 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.70 vs. limit=6.6033333333333335 +2024-07-27 10:58:37,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=6413.333333333333, ans=0.19937500000000002 +2024-07-27 10:58:39,779 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.67 vs. limit=12.309999999999999 +2024-07-27 10:58:53,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=6440.0, ans=0.23559999999999998 +2024-07-27 10:58:57,716 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.66 vs. limit=6.613333333333333 +2024-07-27 10:59:02,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=6453.333333333333, ans=0.025 +2024-07-27 10:59:05,516 INFO [train.py:1114] (2/4) Epoch 1, batch 4850, loss[loss=0.3698, simple_loss=0.4089, pruned_loss=0.1653, over 4750.00 frames. ], tot_loss[loss=0.4091, simple_loss=0.4359, pruned_loss=0.1912, over 932405.40 frames. ], batch size: 14, lr: 4.12e-02, grad_scale: 64.0 +2024-07-27 10:59:09,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.15 vs. limit=12.35 +2024-07-27 10:59:24,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=6493.333333333333, ans=0.03961111111111111 +2024-07-27 10:59:44,819 INFO [train.py:1114] (2/4) Epoch 1, batch 4900, loss[loss=0.3559, simple_loss=0.3903, pruned_loss=0.1608, over 4758.00 frames. ], tot_loss[loss=0.4083, simple_loss=0.4355, pruned_loss=0.1906, over 934147.40 frames. ], batch size: 13, lr: 4.12e-02, grad_scale: 64.0 +2024-07-27 10:59:46,284 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.298e+01 7.338e+01 8.038e+01 8.614e+01 1.106e+02, threshold=1.608e+02, percent-clipped=0.0 +2024-07-27 10:59:46,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=6533.333333333333, ans=0.19374999999999998 +2024-07-27 10:59:50,533 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.64 vs. limit=12.4 +2024-07-27 10:59:51,363 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.28 vs. limit=6.613333333333333 +2024-07-27 10:59:53,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=6533.333333333333, ans=0.6713333333333333 +2024-07-27 10:59:54,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=6546.666666666667, ans=0.193125 +2024-07-27 11:00:08,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=6573.333333333333, ans=0.009440579710144928 +2024-07-27 11:00:15,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=6573.333333333333, ans=0.03927777777777778 +2024-07-27 11:00:21,507 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.72 vs. limit=6.6466666666666665 +2024-07-27 11:00:22,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=6586.666666666667, ans=0.19124999999999998 +2024-07-27 11:00:23,790 INFO [train.py:1114] (2/4) Epoch 1, batch 4950, loss[loss=0.4885, simple_loss=0.4845, pruned_loss=0.2462, over 3515.00 frames. ], tot_loss[loss=0.4088, simple_loss=0.4357, pruned_loss=0.191, over 931371.00 frames. ], batch size: 37, lr: 4.11e-02, grad_scale: 64.0 +2024-07-27 11:00:27,011 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.61 vs. limit=9.975 +2024-07-27 11:00:44,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=6626.666666666667, ans=0.18937500000000002 +2024-07-27 11:00:56,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=6653.333333333333, ans=0.188125 +2024-07-27 11:00:57,208 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.87 vs. limit=12.49 +2024-07-27 11:01:00,669 INFO [train.py:1114] (2/4) Epoch 1, batch 5000, loss[loss=0.4412, simple_loss=0.4717, pruned_loss=0.2054, over 4665.00 frames. ], tot_loss[loss=0.4081, simple_loss=0.4353, pruned_loss=0.1905, over 935368.47 frames. ], batch size: 14, lr: 4.10e-02, grad_scale: 64.0 +2024-07-27 11:01:02,004 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.106e+01 7.393e+01 8.012e+01 9.177e+01 1.350e+02, threshold=1.602e+02, percent-clipped=0.0 +2024-07-27 11:01:17,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=6693.333333333333, ans=0.18625000000000003 +2024-07-27 11:01:26,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6706.666666666667, ans=0.23293333333333333 +2024-07-27 11:01:33,998 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.92 vs. limit=6.68 +2024-07-27 11:01:34,698 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.30 vs. limit=10.02 +2024-07-27 11:01:36,547 INFO [train.py:1114] (2/4) Epoch 1, batch 5050, loss[loss=0.4025, simple_loss=0.4328, pruned_loss=0.1861, over 4844.00 frames. ], tot_loss[loss=0.405, simple_loss=0.4335, pruned_loss=0.1882, over 937774.60 frames. ], batch size: 12, lr: 4.10e-02, grad_scale: 64.0 +2024-07-27 11:01:37,068 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.55 vs. limit=6.683333333333334 +2024-07-27 11:01:38,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=6733.333333333333, ans=0.03861111111111112 +2024-07-27 11:01:38,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=6733.333333333333, ans=0.184375 +2024-07-27 11:01:41,442 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.58 vs. limit=6.683333333333334 +2024-07-27 11:01:45,479 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.44 vs. limit=12.559999999999999 +2024-07-27 11:01:47,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=6746.666666666667, ans=0.18375000000000002 +2024-07-27 11:01:48,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=6746.666666666667, ans=0.18375000000000002 +2024-07-27 11:01:49,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=6746.666666666667, ans=0.0 +2024-07-27 11:01:53,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=6760.0, ans=12.57 +2024-07-27 11:01:59,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=6773.333333333333, ans=0.1825 +2024-07-27 11:02:11,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=6786.666666666667, ans=0.181875 +2024-07-27 11:02:14,724 INFO [train.py:1114] (2/4) Epoch 1, batch 5100, loss[loss=0.4085, simple_loss=0.4381, pruned_loss=0.1895, over 4773.00 frames. ], tot_loss[loss=0.4062, simple_loss=0.4344, pruned_loss=0.189, over 935053.64 frames. ], batch size: 12, lr: 4.09e-02, grad_scale: 64.0 +2024-07-27 11:02:16,193 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.973e+01 7.191e+01 7.778e+01 8.421e+01 1.083e+02, threshold=1.556e+02, percent-clipped=0.0 +2024-07-27 11:02:31,643 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=6826.666666666667, ans=0.18 +2024-07-27 11:02:51,276 INFO [train.py:1114] (2/4) Epoch 1, batch 5150, loss[loss=0.4132, simple_loss=0.4406, pruned_loss=0.1929, over 4805.00 frames. ], tot_loss[loss=0.4055, simple_loss=0.4336, pruned_loss=0.1888, over 935956.71 frames. ], batch size: 16, lr: 4.09e-02, grad_scale: 64.0 +2024-07-27 11:02:55,890 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.70 vs. limit=10.075 +2024-07-27 11:02:58,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6880.0, ans=0.23120000000000002 +2024-07-27 11:03:01,206 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.77 vs. limit=10.08 +2024-07-27 11:03:05,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=6880.0, ans=0.038000000000000006 +2024-07-27 11:03:07,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=6893.333333333333, ans=0.03794444444444445 +2024-07-27 11:03:11,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=6893.333333333333, ans=0.6587333333333334 +2024-07-27 11:03:33,709 INFO [train.py:1114] (2/4) Epoch 1, batch 5200, loss[loss=0.3943, simple_loss=0.4295, pruned_loss=0.1796, over 4650.00 frames. ], tot_loss[loss=0.4036, simple_loss=0.4322, pruned_loss=0.1875, over 936061.75 frames. ], batch size: 14, lr: 4.08e-02, grad_scale: 64.0 +2024-07-27 11:03:35,258 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.869e+01 7.238e+01 8.043e+01 8.705e+01 1.237e+02, threshold=1.609e+02, percent-clipped=0.0 +2024-07-27 11:03:45,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=6946.666666666667, ans=0.174375 +2024-07-27 11:03:48,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=6960.0, ans=0.0 +2024-07-27 11:04:00,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=6973.333333333333, ans=0.17312499999999997 +2024-07-27 11:04:11,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.24 vs. limit=8.493333333333334 +2024-07-27 11:04:13,843 INFO [train.py:1114] (2/4) Epoch 1, batch 5250, loss[loss=0.344, simple_loss=0.3985, pruned_loss=0.1447, over 4902.00 frames. ], tot_loss[loss=0.4019, simple_loss=0.431, pruned_loss=0.1864, over 935858.24 frames. ], batch size: 13, lr: 4.07e-02, grad_scale: 64.0 +2024-07-27 11:04:15,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=7000.0, ans=12.75 +2024-07-27 11:04:16,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=7000.0, ans=8.5 +2024-07-27 11:04:17,873 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.14 vs. limit=4.05 +2024-07-27 11:04:19,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=7000.0, ans=0.22999999999999998 +2024-07-27 11:04:24,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=7013.333333333333, ans=9.383333333333333 +2024-07-27 11:04:47,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=7040.0, ans=9.4 +2024-07-27 11:04:50,696 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.58 vs. limit=6.8213333333333335 +2024-07-27 11:04:51,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=7053.333333333333, ans=0.22946666666666665 +2024-07-27 11:04:52,909 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.85 vs. limit=6.8213333333333335 +2024-07-27 11:04:58,087 INFO [train.py:1114] (2/4) Epoch 1, batch 5300, loss[loss=0.4679, simple_loss=0.4771, pruned_loss=0.2294, over 4632.00 frames. ], tot_loss[loss=0.4023, simple_loss=0.431, pruned_loss=0.1868, over 934331.29 frames. ], batch size: 16, lr: 4.07e-02, grad_scale: 64.0 +2024-07-27 11:05:04,097 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.771e+01 7.230e+01 7.839e+01 8.733e+01 1.218e+02, threshold=1.568e+02, percent-clipped=0.0 +2024-07-27 11:05:06,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=7066.666666666667, ans=0.16875 +2024-07-27 11:05:07,728 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:05:09,503 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.83 vs. limit=12.809999999999999 +2024-07-27 11:05:15,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=7080.0, ans=0.009330434782608695 +2024-07-27 11:05:15,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=7080.0, ans=0.04949747468305833 +2024-07-27 11:05:19,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=7093.333333333333, ans=12.82 +2024-07-27 11:05:19,896 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.51 vs. limit=10.16 +2024-07-27 11:05:22,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=7093.333333333333, ans=0.037111111111111116 +2024-07-27 11:05:24,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=7106.666666666667, ans=0.8210666666666666 +2024-07-27 11:05:24,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=7106.666666666667, ans=0.166875 +2024-07-27 11:05:34,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=7120.0, ans=0.2288 +2024-07-27 11:05:41,460 INFO [train.py:1114] (2/4) Epoch 1, batch 5350, loss[loss=0.4174, simple_loss=0.42, pruned_loss=0.2074, over 4525.00 frames. ], tot_loss[loss=0.4026, simple_loss=0.4319, pruned_loss=0.1867, over 936223.40 frames. ], batch size: 10, lr: 4.06e-02, grad_scale: 64.0 +2024-07-27 11:05:43,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7133.333333333333, ans=0.22866666666666666 +2024-07-27 11:05:48,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=7133.333333333333, ans=0.025 +2024-07-27 11:05:52,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=7146.666666666667, ans=0.03688888888888889 +2024-07-27 11:05:54,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=7146.666666666667, ans=0.0 +2024-07-27 11:06:03,984 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.45 vs. limit=12.870000000000001 +2024-07-27 11:06:04,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=7173.333333333333, ans=9.483333333333334 +2024-07-27 11:06:09,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=7173.333333333333, ans=0.0 +2024-07-27 11:06:18,080 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.97 vs. limit=8.593333333333334 +2024-07-27 11:06:20,240 INFO [train.py:1114] (2/4) Epoch 1, batch 5400, loss[loss=0.4346, simple_loss=0.461, pruned_loss=0.2041, over 4381.00 frames. ], tot_loss[loss=0.4036, simple_loss=0.4324, pruned_loss=0.1874, over 930889.44 frames. ], batch size: 26, lr: 4.05e-02, grad_scale: 64.0 +2024-07-27 11:06:21,690 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.213e+01 7.171e+01 7.909e+01 8.696e+01 2.349e+02, threshold=1.582e+02, percent-clipped=3.0 +2024-07-27 11:06:25,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=7200.0, ans=0.03666666666666667 +2024-07-27 11:06:27,895 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.73 vs. limit=12.91 +2024-07-27 11:06:37,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=7226.666666666667, ans=0.16125 +2024-07-27 11:06:40,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=7226.666666666667, ans=0.16125 +2024-07-27 11:06:41,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=7240.0, ans=0.16062500000000002 +2024-07-27 11:06:43,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=7240.0, ans=0.2276 +2024-07-27 11:06:44,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=7240.0, ans=0.16062500000000002 +2024-07-27 11:06:47,305 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:06:54,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=7253.333333333333, ans=0.15999999999999998 +2024-07-27 11:06:55,831 INFO [train.py:1114] (2/4) Epoch 1, batch 5450, loss[loss=0.3971, simple_loss=0.4138, pruned_loss=0.1902, over 4697.00 frames. ], tot_loss[loss=0.3998, simple_loss=0.4293, pruned_loss=0.1852, over 933466.92 frames. ], batch size: 11, lr: 4.05e-02, grad_scale: 64.0 +2024-07-27 11:07:04,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7280.0, ans=0.2272 +2024-07-27 11:07:16,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=7293.333333333333, ans=0.009284057971014493 +2024-07-27 11:07:17,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=7293.333333333333, ans=0.036277777777777784 +2024-07-27 11:07:19,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=7293.333333333333, ans=0.15812500000000002 +2024-07-27 11:07:25,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=7306.666666666667, ans=0.22693333333333332 +2024-07-27 11:07:37,160 INFO [train.py:1114] (2/4) Epoch 1, batch 5500, loss[loss=0.4697, simple_loss=0.4783, pruned_loss=0.2305, over 4183.00 frames. ], tot_loss[loss=0.4, simple_loss=0.4295, pruned_loss=0.1853, over 931225.38 frames. ], batch size: 25, lr: 4.04e-02, grad_scale: 64.0 +2024-07-27 11:07:38,669 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.895e+01 7.344e+01 7.791e+01 8.854e+01 1.594e+02, threshold=1.558e+02, percent-clipped=1.0 +2024-07-27 11:07:46,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=7346.666666666667, ans=0.155625 +2024-07-27 11:07:52,405 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.95 vs. limit=4.104 +2024-07-27 11:07:54,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=7360.0, ans=0.2264 +2024-07-27 11:08:03,999 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.23 vs. limit=13.030000000000001 +2024-07-27 11:08:14,261 INFO [train.py:1114] (2/4) Epoch 1, batch 5550, loss[loss=0.3301, simple_loss=0.3841, pruned_loss=0.138, over 4714.00 frames. ], tot_loss[loss=0.4001, simple_loss=0.43, pruned_loss=0.185, over 933374.03 frames. ], batch size: 12, lr: 4.03e-02, grad_scale: 64.0 +2024-07-27 11:08:15,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=7400.0, ans=0.226 +2024-07-27 11:08:15,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=7400.0, ans=0.153125 +2024-07-27 11:08:31,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=7426.666666666667, ans=0.15187499999999998 +2024-07-27 11:08:39,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=7440.0, ans=0.15125 +2024-07-27 11:08:43,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=7453.333333333333, ans=0.0 +2024-07-27 11:08:45,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=7453.333333333333, ans=0.150625 +2024-07-27 11:08:50,940 INFO [train.py:1114] (2/4) Epoch 1, batch 5600, loss[loss=0.4555, simple_loss=0.4706, pruned_loss=0.2202, over 4731.00 frames. ], tot_loss[loss=0.4005, simple_loss=0.4302, pruned_loss=0.1854, over 934251.15 frames. ], batch size: 14, lr: 4.03e-02, grad_scale: 64.0 +2024-07-27 11:08:52,369 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.970e+01 7.181e+01 7.813e+01 8.583e+01 1.892e+02, threshold=1.563e+02, percent-clipped=1.0 +2024-07-27 11:09:12,139 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.20 vs. limit=10.31 +2024-07-27 11:09:14,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7506.666666666667, ans=0.22493333333333332 +2024-07-27 11:09:27,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=7520.0, ans=0.09899494936611666 +2024-07-27 11:09:29,069 INFO [train.py:1114] (2/4) Epoch 1, batch 5650, loss[loss=0.3873, simple_loss=0.4332, pruned_loss=0.1708, over 4616.00 frames. ], tot_loss[loss=0.3971, simple_loss=0.4278, pruned_loss=0.1832, over 936896.40 frames. ], batch size: 21, lr: 4.02e-02, grad_scale: 64.0 +2024-07-27 11:09:30,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=7533.333333333333, ans=0.22466666666666668 +2024-07-27 11:09:36,580 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.30 vs. limit=10.33 +2024-07-27 11:09:38,103 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.16 vs. limit=10.33 +2024-07-27 11:09:38,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=7546.666666666667, ans=0.14625 +2024-07-27 11:09:49,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=7560.0, ans=0.035166666666666666 +2024-07-27 11:09:55,694 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.95 vs. limit=13.18 +2024-07-27 11:10:05,090 INFO [train.py:1114] (2/4) Epoch 1, batch 5700, loss[loss=0.3598, simple_loss=0.413, pruned_loss=0.1533, over 4693.00 frames. ], tot_loss[loss=0.3962, simple_loss=0.4278, pruned_loss=0.1823, over 937668.14 frames. ], batch size: 13, lr: 4.02e-02, grad_scale: 64.0 +2024-07-27 11:10:06,396 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.074e+01 7.227e+01 8.129e+01 9.173e+01 1.333e+02, threshold=1.626e+02, percent-clipped=0.0 +2024-07-27 11:10:14,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=7613.333333333333, ans=0.143125 +2024-07-27 11:10:16,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=7613.333333333333, ans=0.00921449275362319 +2024-07-27 11:10:30,831 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.68 vs. limit=10.365 +2024-07-27 11:10:32,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7640.0, ans=0.22360000000000002 +2024-07-27 11:10:35,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=7653.333333333333, ans=0.14125 +2024-07-27 11:10:36,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=7653.333333333333, ans=0.14125 +2024-07-27 11:10:38,036 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.66 vs. limit=8.826666666666666 +2024-07-27 11:10:41,775 INFO [train.py:1114] (2/4) Epoch 1, batch 5750, loss[loss=0.4022, simple_loss=0.4363, pruned_loss=0.184, over 4673.00 frames. ], tot_loss[loss=0.3955, simple_loss=0.4279, pruned_loss=0.1815, over 937840.68 frames. ], batch size: 19, lr: 4.01e-02, grad_scale: 64.0 +2024-07-27 11:10:49,970 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.53 vs. limit=13.26 +2024-07-27 11:10:55,094 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.30 vs. limit=13.26 +2024-07-27 11:10:58,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=7693.333333333333, ans=0.13937500000000003 +2024-07-27 11:11:06,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=7706.666666666667, ans=0.034555555555555555 +2024-07-27 11:11:10,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=7720.0, ans=0.138125 +2024-07-27 11:11:25,471 INFO [train.py:1114] (2/4) Epoch 1, batch 5800, loss[loss=0.4171, simple_loss=0.437, pruned_loss=0.1986, over 4689.00 frames. ], tot_loss[loss=0.3982, simple_loss=0.4296, pruned_loss=0.1834, over 936870.36 frames. ], batch size: 19, lr: 4.00e-02, grad_scale: 64.0 +2024-07-27 11:11:26,806 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.680e+01 7.353e+01 8.081e+01 9.227e+01 1.347e+02, threshold=1.616e+02, percent-clipped=0.0 +2024-07-27 11:11:47,184 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.06 vs. limit=13.32 +2024-07-27 11:11:54,386 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.46 vs. limit=8.886666666666667 +2024-07-27 11:11:56,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=7786.666666666667, ans=0.135 +2024-07-27 11:11:59,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=7786.666666666667, ans=0.0 +2024-07-27 11:12:04,134 INFO [train.py:1114] (2/4) Epoch 1, batch 5850, loss[loss=0.4541, simple_loss=0.4686, pruned_loss=0.2197, over 4464.00 frames. ], tot_loss[loss=0.3965, simple_loss=0.4281, pruned_loss=0.1825, over 937358.09 frames. ], batch size: 21, lr: 4.00e-02, grad_scale: 64.0 +2024-07-27 11:12:15,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=7813.333333333333, ans=0.025 +2024-07-27 11:12:19,625 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.78 vs. limit=7.1306666666666665 +2024-07-27 11:12:22,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=7826.666666666667, ans=0.133125 +2024-07-27 11:12:24,204 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:12:32,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=7853.333333333333, ans=0.05091666666666667 +2024-07-27 11:12:37,017 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.61 vs. limit=10.445 +2024-07-27 11:12:39,524 INFO [train.py:1114] (2/4) Epoch 1, batch 5900, loss[loss=0.3789, simple_loss=0.4162, pruned_loss=0.1708, over 4667.00 frames. ], tot_loss[loss=0.3955, simple_loss=0.4277, pruned_loss=0.1817, over 937775.90 frames. ], batch size: 15, lr: 3.99e-02, grad_scale: 64.0 +2024-07-27 11:12:40,950 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.022e+01 7.265e+01 7.754e+01 8.488e+01 1.052e+02, threshold=1.551e+02, percent-clipped=0.0 +2024-07-27 11:12:41,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=7866.666666666667, ans=0.13124999999999998 +2024-07-27 11:12:46,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=7880.0, ans=0.009156521739130435 +2024-07-27 11:13:02,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=7906.666666666667, ans=0.12937500000000002 +2024-07-27 11:13:13,846 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.64 vs. limit=13.440000000000001 +2024-07-27 11:13:14,914 INFO [train.py:1114] (2/4) Epoch 1, batch 5950, loss[loss=0.3731, simple_loss=0.4205, pruned_loss=0.1629, over 4684.00 frames. ], tot_loss[loss=0.3953, simple_loss=0.4277, pruned_loss=0.1815, over 939667.52 frames. ], batch size: 15, lr: 3.98e-02, grad_scale: 64.0 +2024-07-27 11:13:18,919 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.71 vs. limit=13.45 +2024-07-27 11:13:22,709 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:13:49,315 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:13:50,908 INFO [train.py:1114] (2/4) Epoch 1, batch 6000, loss[loss=0.3862, simple_loss=0.4256, pruned_loss=0.1734, over 4157.00 frames. ], tot_loss[loss=0.3948, simple_loss=0.4266, pruned_loss=0.1814, over 936747.80 frames. ], batch size: 25, lr: 3.98e-02, grad_scale: 64.0 +2024-07-27 11:13:50,909 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 11:13:58,425 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.9998, 5.8911, 5.6930, 5.7260], device='cuda:2') +2024-07-27 11:14:11,373 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([2.6042, 2.9925, 2.0716, 2.7390, 2.1915, 3.1343, 2.2016, 2.4066], + device='cuda:2') +2024-07-27 11:14:16,118 INFO [train.py:1146] (2/4) Epoch 1, validation: loss=0.3082, simple_loss=0.3886, pruned_loss=0.1139, over 944034.00 frames. +2024-07-27 11:14:16,119 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 11:14:17,457 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.690e+01 7.303e+01 7.945e+01 8.512e+01 1.515e+02, threshold=1.589e+02, percent-clipped=0.0 +2024-07-27 11:14:22,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=8013.333333333333, ans=0.125 +2024-07-27 11:14:39,667 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.27 vs. limit=4.2059999999999995 +2024-07-27 11:14:40,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=8040.0, ans=0.009121739130434783 +2024-07-27 11:14:42,364 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.71 vs. limit=10.515 +2024-07-27 11:14:43,836 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.17 vs. limit=10.515 +2024-07-27 11:14:50,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=8053.333333333333, ans=0.04949747468305833 +2024-07-27 11:14:52,580 INFO [train.py:1114] (2/4) Epoch 1, batch 6050, loss[loss=0.3374, simple_loss=0.3968, pruned_loss=0.139, over 4776.00 frames. ], tot_loss[loss=0.3923, simple_loss=0.4248, pruned_loss=0.1799, over 937980.94 frames. ], batch size: 12, lr: 3.97e-02, grad_scale: 64.0 +2024-07-27 11:15:02,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=8080.0, ans=0.125 +2024-07-27 11:15:03,824 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.61 vs. limit=10.53 +2024-07-27 11:15:07,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=8080.0, ans=0.033 +2024-07-27 11:15:20,970 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=18.04 vs. limit=10.54 +2024-07-27 11:15:21,614 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.38 vs. limit=13.58 +2024-07-27 11:15:26,215 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.40 vs. limit=13.59 +2024-07-27 11:15:30,060 INFO [train.py:1114] (2/4) Epoch 1, batch 6100, loss[loss=0.3699, simple_loss=0.4181, pruned_loss=0.1609, over 4683.00 frames. ], tot_loss[loss=0.3903, simple_loss=0.4238, pruned_loss=0.1784, over 937391.28 frames. ], batch size: 15, lr: 3.96e-02, grad_scale: 64.0 +2024-07-27 11:15:31,488 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.445e+01 6.771e+01 7.517e+01 8.445e+01 1.300e+02, threshold=1.503e+02, percent-clipped=0.0 +2024-07-27 11:15:32,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=8133.333333333333, ans=0.03277777777777778 +2024-07-27 11:15:36,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.04 vs. limit=13.6 +2024-07-27 11:15:46,202 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.72 vs. limit=10.56 +2024-07-27 11:15:47,019 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.16 vs. limit=7.04 +2024-07-27 11:16:02,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=8186.666666666667, ans=0.09899494936611666 +2024-07-27 11:16:04,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=8186.666666666667, ans=0.09899494936611666 +2024-07-27 11:16:05,332 INFO [train.py:1114] (2/4) Epoch 1, batch 6150, loss[loss=0.4917, simple_loss=0.4907, pruned_loss=0.2464, over 3539.00 frames. ], tot_loss[loss=0.3894, simple_loss=0.4232, pruned_loss=0.1778, over 936499.45 frames. ], batch size: 35, lr: 3.96e-02, grad_scale: 64.0 +2024-07-27 11:16:06,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=8200.0, ans=0.12415000000000001 +2024-07-27 11:16:09,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=8200.0, ans=0.125 +2024-07-27 11:16:41,983 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.01 vs. limit=4.2379999999999995 +2024-07-27 11:16:43,780 INFO [train.py:1114] (2/4) Epoch 1, batch 6200, loss[loss=0.373, simple_loss=0.4118, pruned_loss=0.1671, over 4745.00 frames. ], tot_loss[loss=0.3906, simple_loss=0.4244, pruned_loss=0.1784, over 936124.76 frames. ], batch size: 14, lr: 3.95e-02, grad_scale: 64.0 +2024-07-27 11:16:45,340 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.005e+01 7.091e+01 7.789e+01 8.708e+01 1.298e+02, threshold=1.558e+02, percent-clipped=0.0 +2024-07-27 11:16:56,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.51 vs. limit=10.6 +2024-07-27 11:16:58,130 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=5.715e-01 +2024-07-27 11:17:02,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=8280.0, ans=0.125 +2024-07-27 11:17:10,794 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.06 vs. limit=13.72 +2024-07-27 11:17:13,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=8293.333333333334, ans=0.125 +2024-07-27 11:17:23,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=8306.666666666666, ans=10.0 +2024-07-27 11:17:26,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=8320.0, ans=0.032 +2024-07-27 11:17:33,959 INFO [train.py:1114] (2/4) Epoch 1, batch 6250, loss[loss=0.3433, simple_loss=0.3856, pruned_loss=0.1505, over 4814.00 frames. ], tot_loss[loss=0.3909, simple_loss=0.4244, pruned_loss=0.1787, over 933148.59 frames. ], batch size: 14, lr: 3.94e-02, grad_scale: 64.0 +2024-07-27 11:17:41,561 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.27 vs. limit=13.759999999999998 +2024-07-27 11:17:41,607 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.85 vs. limit=7.086666666666666 +2024-07-27 11:17:52,905 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.82 vs. limit=13.77 +2024-07-27 11:18:09,435 INFO [train.py:1114] (2/4) Epoch 1, batch 6300, loss[loss=0.2958, simple_loss=0.3394, pruned_loss=0.1261, over 4555.00 frames. ], tot_loss[loss=0.3907, simple_loss=0.4242, pruned_loss=0.1786, over 929658.76 frames. ], batch size: 10, lr: 3.94e-02, grad_scale: 64.0 +2024-07-27 11:18:09,933 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.85 vs. limit=10.65 +2024-07-27 11:18:10,751 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.284e+01 7.148e+01 7.847e+01 8.773e+01 1.332e+02, threshold=1.569e+02, percent-clipped=0.0 +2024-07-27 11:18:20,402 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.74 vs. limit=13.81 +2024-07-27 11:18:33,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=8440.0, ans=0.0315 +2024-07-27 11:18:33,939 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.85 vs. limit=13.83 +2024-07-27 11:18:34,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=8440.0, ans=0.125 +2024-07-27 11:18:42,130 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.58 vs. limit=9.226666666666667 +2024-07-27 11:18:43,767 INFO [train.py:1114] (2/4) Epoch 1, batch 6350, loss[loss=0.3974, simple_loss=0.444, pruned_loss=0.1754, over 4501.00 frames. ], tot_loss[loss=0.3886, simple_loss=0.423, pruned_loss=0.1772, over 933729.30 frames. ], batch size: 21, lr: 3.93e-02, grad_scale: 64.0 +2024-07-27 11:18:43,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8466.666666666666, ans=0.21533333333333332 +2024-07-27 11:18:44,898 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.12 vs. limit=13.85 +2024-07-27 11:18:56,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=8480.0, ans=0.6032 +2024-07-27 11:19:13,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=8520.0, ans=0.125 +2024-07-27 11:19:19,071 INFO [train.py:1114] (2/4) Epoch 1, batch 6400, loss[loss=0.4185, simple_loss=0.4523, pruned_loss=0.1924, over 4642.00 frames. ], tot_loss[loss=0.3905, simple_loss=0.4242, pruned_loss=0.1784, over 935099.15 frames. ], batch size: 13, lr: 3.92e-02, grad_scale: 64.0 +2024-07-27 11:19:20,481 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.815e+01 7.135e+01 7.649e+01 8.994e+01 1.161e+02, threshold=1.530e+02, percent-clipped=0.0 +2024-07-27 11:19:22,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8533.333333333334, ans=0.21466666666666667 +2024-07-27 11:19:37,912 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.96 vs. limit=9.273333333333333 +2024-07-27 11:19:40,158 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.83 vs. limit=13.91 +2024-07-27 11:19:43,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=8560.0, ans=0.009008695652173913 +2024-07-27 11:19:46,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=8560.0, ans=0.031000000000000003 +2024-07-27 11:19:54,816 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.04 vs. limit=4.288 +2024-07-27 11:20:01,150 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.51 vs. limit=13.94 +2024-07-27 11:20:02,096 INFO [train.py:1114] (2/4) Epoch 1, batch 6450, loss[loss=0.3467, simple_loss=0.394, pruned_loss=0.1498, over 4595.00 frames. ], tot_loss[loss=0.3914, simple_loss=0.4258, pruned_loss=0.1785, over 938918.66 frames. ], batch size: 21, lr: 3.92e-02, grad_scale: 64.0 +2024-07-27 11:20:17,756 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.01 vs. limit=4.2940000000000005 +2024-07-27 11:20:24,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=8640.0, ans=0.0 +2024-07-27 11:20:38,003 INFO [train.py:1114] (2/4) Epoch 1, batch 6500, loss[loss=0.499, simple_loss=0.483, pruned_loss=0.2575, over 3547.00 frames. ], tot_loss[loss=0.3899, simple_loss=0.4248, pruned_loss=0.1775, over 940261.69 frames. ], batch size: 36, lr: 3.91e-02, grad_scale: 64.0 +2024-07-27 11:20:39,469 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.881e+01 7.078e+01 7.610e+01 8.619e+01 1.357e+02, threshold=1.522e+02, percent-clipped=0.0 +2024-07-27 11:20:44,786 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.50 vs. limit=10.754999999999999 +2024-07-27 11:21:02,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=8706.666666666666, ans=0.125 +2024-07-27 11:21:03,132 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.92 vs. limit=9.353333333333332 +2024-07-27 11:21:04,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=8706.666666666666, ans=0.125 +2024-07-27 11:21:14,507 INFO [train.py:1114] (2/4) Epoch 1, batch 6550, loss[loss=0.3457, simple_loss=0.3717, pruned_loss=0.1598, over 4796.00 frames. ], tot_loss[loss=0.3873, simple_loss=0.4233, pruned_loss=0.1756, over 943158.49 frames. ], batch size: 11, lr: 3.91e-02, grad_scale: 64.0 +2024-07-27 11:21:20,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=8746.666666666666, ans=0.025 +2024-07-27 11:21:30,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=8760.0, ans=0.025 +2024-07-27 11:21:37,559 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=10.79 +2024-07-27 11:21:37,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=8773.333333333334, ans=0.125 +2024-07-27 11:21:40,315 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.44 vs. limit=10.79 +2024-07-27 11:21:41,923 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.89 vs. limit=7.196666666666666 +2024-07-27 11:21:46,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8786.666666666666, ans=0.21213333333333334 +2024-07-27 11:21:49,230 INFO [train.py:1114] (2/4) Epoch 1, batch 6600, loss[loss=0.4335, simple_loss=0.4606, pruned_loss=0.2032, over 4929.00 frames. ], tot_loss[loss=0.3861, simple_loss=0.4223, pruned_loss=0.1749, over 945049.76 frames. ], batch size: 14, lr: 3.90e-02, grad_scale: 64.0 +2024-07-27 11:21:50,623 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.093e+01 7.005e+01 7.535e+01 8.213e+01 1.214e+02, threshold=1.507e+02, percent-clipped=0.0 +2024-07-27 11:21:53,163 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.57 vs. limit=14.1 +2024-07-27 11:22:08,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=8826.666666666666, ans=0.125 +2024-07-27 11:22:21,268 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:22:21,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=8853.333333333334, ans=0.125 +2024-07-27 11:22:25,932 INFO [train.py:1114] (2/4) Epoch 1, batch 6650, loss[loss=0.3849, simple_loss=0.4252, pruned_loss=0.1723, over 4583.00 frames. ], tot_loss[loss=0.3878, simple_loss=0.4236, pruned_loss=0.176, over 943546.83 frames. ], batch size: 17, lr: 3.89e-02, grad_scale: 64.0 +2024-07-27 11:22:35,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=8880.0, ans=0.125 +2024-07-27 11:22:38,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=8880.0, ans=0.2112 +2024-07-27 11:22:39,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.47 vs. limit=7.557333333333334 +2024-07-27 11:22:48,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=8893.333333333334, ans=0.04949747468305833 +2024-07-27 11:22:49,004 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.78 vs. limit=14.17 +2024-07-27 11:22:49,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=8906.666666666666, ans=0.02955555555555556 +2024-07-27 11:23:05,440 INFO [train.py:1114] (2/4) Epoch 1, batch 6700, loss[loss=0.3863, simple_loss=0.419, pruned_loss=0.1768, over 4735.00 frames. ], tot_loss[loss=0.3881, simple_loss=0.4238, pruned_loss=0.1762, over 942615.54 frames. ], batch size: 19, lr: 3.89e-02, grad_scale: 64.0 +2024-07-27 11:23:06,710 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.382e+01 7.413e+01 7.948e+01 9.118e+01 1.138e+02, threshold=1.590e+02, percent-clipped=0.0 +2024-07-27 11:23:12,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=8946.666666666666, ans=0.0 +2024-07-27 11:23:13,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.81 vs. limit=9.473333333333333 +2024-07-27 11:23:15,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys.whitening_limit, batch_count=8946.666666666666, ans=4.3420000000000005 +2024-07-27 11:23:21,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=8960.0, ans=0.008921739130434782 +2024-07-27 11:23:26,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=8973.333333333334, ans=0.125 +2024-07-27 11:23:28,960 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.92 vs. limit=14.23 +2024-07-27 11:23:42,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=8986.666666666666, ans=0.125 +2024-07-27 11:23:44,437 INFO [train.py:1114] (2/4) Epoch 1, batch 6750, loss[loss=0.4229, simple_loss=0.4415, pruned_loss=0.2022, over 4252.00 frames. ], tot_loss[loss=0.385, simple_loss=0.4211, pruned_loss=0.1744, over 940983.84 frames. ], batch size: 26, lr: 3.88e-02, grad_scale: 128.0 +2024-07-27 11:23:56,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=9013.333333333334, ans=0.125 +2024-07-27 11:24:07,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=9040.0, ans=0.029 +2024-07-27 11:24:07,465 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=19.38 vs. limit=10.89 +2024-07-27 11:24:14,476 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.85 vs. limit=14.29 +2024-07-27 11:24:17,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=9053.333333333334, ans=0.125 +2024-07-27 11:24:18,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=9066.666666666666, ans=0.20933333333333334 +2024-07-27 11:24:19,785 INFO [train.py:1114] (2/4) Epoch 1, batch 6800, loss[loss=0.4143, simple_loss=0.4456, pruned_loss=0.1915, over 4642.00 frames. ], tot_loss[loss=0.3844, simple_loss=0.4211, pruned_loss=0.1739, over 939342.13 frames. ], batch size: 13, lr: 3.87e-02, grad_scale: 128.0 +2024-07-27 11:24:21,073 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.897e+01 7.261e+01 7.946e+01 8.901e+01 1.743e+02, threshold=1.589e+02, percent-clipped=1.0 +2024-07-27 11:24:30,847 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.22 vs. limit=7.632 +2024-07-27 11:24:31,298 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=5.343e-02 +2024-07-27 11:24:36,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=9093.333333333334, ans=0.028777777777777777 +2024-07-27 11:24:38,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=9093.333333333334, ans=0.008892753623188407 +2024-07-27 11:24:44,549 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.42 vs. limit=14.33 +2024-07-27 11:24:51,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=9120.0, ans=0.125 +2024-07-27 11:24:53,741 INFO [train.py:1114] (2/4) Epoch 1, batch 6850, loss[loss=0.4075, simple_loss=0.4453, pruned_loss=0.1849, over 4693.00 frames. ], tot_loss[loss=0.3838, simple_loss=0.4203, pruned_loss=0.1737, over 941185.82 frames. ], batch size: 13, lr: 3.87e-02, grad_scale: 64.0 +2024-07-27 11:24:54,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=9133.333333333334, ans=0.125 +2024-07-27 11:24:55,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=9133.333333333334, ans=0.125 +2024-07-27 11:25:02,663 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.33 vs. limit=7.286666666666667 +2024-07-27 11:25:18,851 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.92 vs. limit=14.379999999999999 +2024-07-27 11:25:26,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=9186.666666666666, ans=0.125 +2024-07-27 11:25:28,388 INFO [train.py:1114] (2/4) Epoch 1, batch 6900, loss[loss=0.3668, simple_loss=0.4087, pruned_loss=0.1625, over 4963.00 frames. ], tot_loss[loss=0.3872, simple_loss=0.4228, pruned_loss=0.1758, over 943406.19 frames. ], batch size: 13, lr: 3.86e-02, grad_scale: 64.0 +2024-07-27 11:25:29,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=9200.0, ans=0.5780000000000001 +2024-07-27 11:25:30,371 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.952e+01 6.982e+01 7.530e+01 8.620e+01 1.386e+02, threshold=1.506e+02, percent-clipped=0.0 +2024-07-27 11:25:30,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=9200.0, ans=0.125 +2024-07-27 11:25:38,510 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.27 vs. limit=14.41 +2024-07-27 11:25:43,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=9226.666666666666, ans=0.00886376811594203 +2024-07-27 11:25:58,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=9253.333333333334, ans=0.02811111111111111 +2024-07-27 11:26:02,725 INFO [train.py:1114] (2/4) Epoch 1, batch 6950, loss[loss=0.3741, simple_loss=0.4013, pruned_loss=0.1735, over 4530.00 frames. ], tot_loss[loss=0.3855, simple_loss=0.4213, pruned_loss=0.1748, over 940371.81 frames. ], batch size: 10, lr: 3.85e-02, grad_scale: 64.0 +2024-07-27 11:26:15,114 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.05 vs. limit=14.46 +2024-07-27 11:26:15,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=9280.0, ans=0.008852173913043479 +2024-07-27 11:26:29,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=9306.666666666666, ans=0.125 +2024-07-27 11:26:37,596 INFO [train.py:1114] (2/4) Epoch 1, batch 7000, loss[loss=0.4293, simple_loss=0.451, pruned_loss=0.2038, over 4665.00 frames. ], tot_loss[loss=0.3847, simple_loss=0.4208, pruned_loss=0.1743, over 938457.96 frames. ], batch size: 17, lr: 3.85e-02, grad_scale: 64.0 +2024-07-27 11:26:38,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=9333.333333333334, ans=0.5733333333333334 +2024-07-27 11:26:39,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=9333.333333333334, ans=0.0 +2024-07-27 11:26:39,630 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.819e+01 7.301e+01 8.158e+01 9.084e+01 2.160e+02, threshold=1.632e+02, percent-clipped=1.0 +2024-07-27 11:26:40,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=9333.333333333334, ans=0.008840579710144927 +2024-07-27 11:26:43,747 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=5.46 vs. limit=5.866666666666667 +2024-07-27 11:26:44,327 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.93 vs. limit=14.51 +2024-07-27 11:27:03,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=9360.0, ans=0.008834782608695652 +2024-07-27 11:27:28,077 INFO [train.py:1114] (2/4) Epoch 1, batch 7050, loss[loss=0.4481, simple_loss=0.4708, pruned_loss=0.2127, over 4671.00 frames. ], tot_loss[loss=0.3827, simple_loss=0.4198, pruned_loss=0.1728, over 941753.57 frames. ], batch size: 19, lr: 3.84e-02, grad_scale: 64.0 +2024-07-27 11:27:30,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=9400.0, ans=0.125 +2024-07-27 11:27:31,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=9400.0, ans=0.125 +2024-07-27 11:27:31,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=9400.0, ans=0.025 +2024-07-27 11:27:44,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=9426.666666666666, ans=0.027388888888888893 +2024-07-27 11:27:53,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=9426.666666666666, ans=0.027388888888888893 +2024-07-27 11:28:03,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=9440.0, ans=0.125 +2024-07-27 11:28:05,761 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.94 vs. limit=14.58 +2024-07-27 11:28:06,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=9440.0, ans=0.125 +2024-07-27 11:28:15,350 INFO [train.py:1114] (2/4) Epoch 1, batch 7100, loss[loss=0.4114, simple_loss=0.4558, pruned_loss=0.1834, over 4786.00 frames. ], tot_loss[loss=0.3825, simple_loss=0.4202, pruned_loss=0.1724, over 936874.61 frames. ], batch size: 15, lr: 3.83e-02, grad_scale: 64.0 +2024-07-27 11:28:15,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=9466.666666666666, ans=0.125 +2024-07-27 11:28:16,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=9466.666666666666, ans=0.125 +2024-07-27 11:28:16,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=9466.666666666666, ans=0.125 +2024-07-27 11:28:17,409 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.024e+01 6.989e+01 7.688e+01 8.481e+01 1.289e+02, threshold=1.538e+02, percent-clipped=0.0 +2024-07-27 11:28:26,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=9480.0, ans=0.5682 +2024-07-27 11:28:30,442 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.74 vs. limit=14.620000000000001 +2024-07-27 11:28:39,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=9506.666666666666, ans=10.0 +2024-07-27 11:28:44,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=9520.0, ans=0.125 +2024-07-27 11:28:46,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=9520.0, ans=0.0 +2024-07-27 11:28:46,492 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.55 vs. limit=14.64 +2024-07-27 11:28:50,197 INFO [train.py:1114] (2/4) Epoch 1, batch 7150, loss[loss=0.3772, simple_loss=0.4222, pruned_loss=0.1661, over 4503.00 frames. ], tot_loss[loss=0.3792, simple_loss=0.4172, pruned_loss=0.1706, over 937857.54 frames. ], batch size: 21, lr: 3.83e-02, grad_scale: 64.0 +2024-07-27 11:29:01,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=9546.666666666666, ans=0.125 +2024-07-27 11:29:09,302 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.54 vs. limit=7.390000000000001 +2024-07-27 11:29:11,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.78 vs. limit=11.085 +2024-07-27 11:29:14,311 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=11.09 +2024-07-27 11:29:21,217 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.25 vs. limit=5.917333333333334 +2024-07-27 11:29:26,389 INFO [train.py:1114] (2/4) Epoch 1, batch 7200, loss[loss=0.3892, simple_loss=0.4211, pruned_loss=0.1786, over 4809.00 frames. ], tot_loss[loss=0.38, simple_loss=0.4182, pruned_loss=0.1709, over 938442.64 frames. ], batch size: 15, lr: 3.82e-02, grad_scale: 64.0 +2024-07-27 11:29:28,267 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.182e+01 6.919e+01 7.589e+01 8.160e+01 1.329e+02, threshold=1.518e+02, percent-clipped=0.0 +2024-07-27 11:29:30,740 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.14 vs. limit=11.1 +2024-07-27 11:29:39,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=9626.666666666666, ans=0.125 +2024-07-27 11:29:42,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=9626.666666666666, ans=0.5630666666666667 +2024-07-27 11:29:44,343 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.80 vs. limit=11.11 +2024-07-27 11:29:44,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=9626.666666666666, ans=0.02655555555555556 +2024-07-27 11:29:48,129 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.95 vs. limit=7.850666666666666 +2024-07-27 11:30:45,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=9640.0, ans=0.00877391304347826 +2024-07-27 11:30:45,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=9640.0, ans=0.2036 +2024-07-27 11:30:59,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=9653.333333333334, ans=0.026444444444444444 +2024-07-27 11:31:01,716 INFO [train.py:1114] (2/4) Epoch 1, batch 7250, loss[loss=0.2911, simple_loss=0.3576, pruned_loss=0.1123, over 4870.00 frames. ], tot_loss[loss=0.3781, simple_loss=0.4165, pruned_loss=0.1698, over 939934.52 frames. ], batch size: 12, lr: 3.82e-02, grad_scale: 64.0 +2024-07-27 11:31:04,212 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.19 vs. limit=7.416666666666666 +2024-07-27 11:31:06,217 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.32 vs. limit=7.416666666666666 +2024-07-27 11:31:06,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=9666.666666666666, ans=0.20333333333333334 +2024-07-27 11:31:07,447 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.49 vs. limit=11.125 +2024-07-27 11:31:07,568 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.89 vs. limit=7.866666666666666 +2024-07-27 11:31:09,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=9680.0, ans=0.026333333333333337 +2024-07-27 11:31:16,120 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.38 vs. limit=14.77 +2024-07-27 11:31:19,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=9693.333333333334, ans=0.5607333333333333 +2024-07-27 11:31:25,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=9706.666666666666, ans=0.025 +2024-07-27 11:31:27,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=9706.666666666666, ans=0.125 +2024-07-27 11:31:27,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=9706.666666666666, ans=0.025 +2024-07-27 11:31:35,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=9720.0, ans=0.125 +2024-07-27 11:31:37,466 INFO [train.py:1114] (2/4) Epoch 1, batch 7300, loss[loss=0.3864, simple_loss=0.4069, pruned_loss=0.1829, over 4838.00 frames. ], tot_loss[loss=0.3799, simple_loss=0.4175, pruned_loss=0.1711, over 940394.80 frames. ], batch size: 12, lr: 3.81e-02, grad_scale: 64.0 +2024-07-27 11:31:39,741 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.899e+01 6.987e+01 7.392e+01 8.309e+01 1.190e+02, threshold=1.478e+02, percent-clipped=0.0 +2024-07-27 11:31:41,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=9733.333333333334, ans=0.008753623188405796 +2024-07-27 11:31:42,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.whiten.whitening_limit, batch_count=9733.333333333334, ans=7.8933333333333335 +2024-07-27 11:31:55,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=9760.0, ans=0.125 +2024-07-27 11:32:09,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=9773.333333333334, ans=0.125 +2024-07-27 11:32:17,894 INFO [train.py:1114] (2/4) Epoch 1, batch 7350, loss[loss=0.396, simple_loss=0.4118, pruned_loss=0.1901, over 4633.00 frames. ], tot_loss[loss=0.3778, simple_loss=0.416, pruned_loss=0.1698, over 939354.85 frames. ], batch size: 12, lr: 3.80e-02, grad_scale: 64.0 +2024-07-27 11:32:18,337 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.26 vs. limit=11.175 +2024-07-27 11:32:25,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=9813.333333333334, ans=0.125 +2024-07-27 11:32:38,539 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.26 vs. limit=11.19 +2024-07-27 11:32:46,373 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.12 vs. limit=11.195 +2024-07-27 11:32:46,410 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.20 vs. limit=14.89 +2024-07-27 11:32:55,343 INFO [train.py:1114] (2/4) Epoch 1, batch 7400, loss[loss=0.3754, simple_loss=0.4148, pruned_loss=0.1681, over 4699.00 frames. ], tot_loss[loss=0.3771, simple_loss=0.4155, pruned_loss=0.1693, over 940446.77 frames. ], batch size: 13, lr: 3.80e-02, grad_scale: 64.0 +2024-07-27 11:32:57,410 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.028e+01 6.927e+01 7.410e+01 8.183e+01 1.194e+02, threshold=1.482e+02, percent-clipped=0.0 +2024-07-27 11:32:57,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=9866.666666666666, ans=0.5546666666666666 +2024-07-27 11:33:14,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=9893.333333333334, ans=0.07 +2024-07-27 11:33:18,041 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.63 vs. limit=11.215 +2024-07-27 11:33:37,724 INFO [train.py:1114] (2/4) Epoch 1, batch 7450, loss[loss=0.3217, simple_loss=0.374, pruned_loss=0.1347, over 4621.00 frames. ], tot_loss[loss=0.3766, simple_loss=0.4148, pruned_loss=0.1692, over 937699.04 frames. ], batch size: 11, lr: 3.79e-02, grad_scale: 64.0 +2024-07-27 11:33:55,542 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.40 vs. limit=7.984 +2024-07-27 11:34:38,018 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.45 vs. limit=11.245000000000001 +2024-07-27 11:34:39,462 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.42 vs. limit=4.5 +2024-07-27 11:34:39,726 INFO [train.py:1114] (2/4) Epoch 1, batch 7500, loss[loss=0.4584, simple_loss=0.4693, pruned_loss=0.2238, over 3625.00 frames. ], tot_loss[loss=0.3782, simple_loss=0.4161, pruned_loss=0.1701, over 936478.11 frames. ], batch size: 37, lr: 3.78e-02, grad_scale: 64.0 +2024-07-27 11:34:41,638 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.028e+01 6.970e+01 7.592e+01 8.473e+01 1.449e+02, threshold=1.518e+02, percent-clipped=0.0 +2024-07-27 11:34:46,786 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.91 vs. limit=8.005333333333333 +2024-07-27 11:34:50,934 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.05 vs. limit=8.005333333333333 +2024-07-27 11:35:14,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=10026.666666666666, ans=0.024888888888888894 +2024-07-27 11:35:16,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=10040.0, ans=0.125 +2024-07-27 11:35:17,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=10040.0, ans=0.125 +2024-07-27 11:35:19,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=10040.0, ans=0.125 +2024-07-27 11:35:19,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=10040.0, ans=0.125 +2024-07-27 11:35:22,796 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.73 vs. limit=11.27 +2024-07-27 11:35:25,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=10053.333333333334, ans=0.125 +2024-07-27 11:35:29,207 INFO [train.py:1114] (2/4) Epoch 1, batch 7550, loss[loss=0.3706, simple_loss=0.4132, pruned_loss=0.164, over 4640.00 frames. ], tot_loss[loss=0.381, simple_loss=0.4187, pruned_loss=0.1717, over 936390.19 frames. ], batch size: 17, lr: 3.78e-02, grad_scale: 64.0 +2024-07-27 11:35:39,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=10080.0, ans=0.5472 +2024-07-27 11:35:39,369 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=3.842e-02 +2024-07-27 11:35:40,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=10080.0, ans=0.5472 +2024-07-27 11:36:10,613 INFO [train.py:1114] (2/4) Epoch 1, batch 7600, loss[loss=0.3706, simple_loss=0.4211, pruned_loss=0.1601, over 4814.00 frames. ], tot_loss[loss=0.3798, simple_loss=0.4177, pruned_loss=0.171, over 938151.90 frames. ], batch size: 14, lr: 3.77e-02, grad_scale: 64.0 +2024-07-27 11:36:12,585 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.409e+01 6.929e+01 7.591e+01 8.810e+01 1.172e+02, threshold=1.518e+02, percent-clipped=0.0 +2024-07-27 11:36:14,335 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.35 vs. limit=11.3 +2024-07-27 11:36:39,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=10146.666666666666, ans=0.125 +2024-07-27 11:36:42,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=10160.0, ans=0.125 +2024-07-27 11:36:48,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=10173.333333333334, ans=0.0 +2024-07-27 11:36:58,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=10186.666666666666, ans=0.008655072463768116 +2024-07-27 11:37:02,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=10200.0, ans=10.0 +2024-07-27 11:37:02,936 INFO [train.py:1114] (2/4) Epoch 1, batch 7650, loss[loss=0.3399, simple_loss=0.3746, pruned_loss=0.1526, over 4934.00 frames. ], tot_loss[loss=0.3795, simple_loss=0.4172, pruned_loss=0.1708, over 936930.38 frames. ], batch size: 12, lr: 3.77e-02, grad_scale: 64.0 +2024-07-27 11:37:16,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=10226.666666666666, ans=0.025 +2024-07-27 11:37:20,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=10226.666666666666, ans=0.02405555555555556 +2024-07-27 11:37:28,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=10240.0, ans=0.024000000000000004 +2024-07-27 11:37:33,968 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=3.97 vs. limit=5.0 +2024-07-27 11:37:37,473 INFO [train.py:1114] (2/4) Epoch 1, batch 7700, loss[loss=0.3735, simple_loss=0.4261, pruned_loss=0.1605, over 4690.00 frames. ], tot_loss[loss=0.381, simple_loss=0.4188, pruned_loss=0.1716, over 934216.21 frames. ], batch size: 13, lr: 3.76e-02, grad_scale: 64.0 +2024-07-27 11:37:39,433 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.134e+01 7.002e+01 7.732e+01 8.804e+01 1.160e+02, threshold=1.546e+02, percent-clipped=0.0 +2024-07-27 11:37:42,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=10266.666666666666, ans=0.05 +2024-07-27 11:37:59,587 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.43 vs. limit=10.153333333333332 +2024-07-27 11:38:10,915 INFO [train.py:1114] (2/4) Epoch 1, batch 7750, loss[loss=0.4103, simple_loss=0.442, pruned_loss=0.1893, over 4926.00 frames. ], tot_loss[loss=0.3806, simple_loss=0.4189, pruned_loss=0.1711, over 935318.73 frames. ], batch size: 14, lr: 3.75e-02, grad_scale: 64.0 +2024-07-27 11:38:15,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=10333.333333333334, ans=0.125 +2024-07-27 11:38:31,311 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.66 vs. limit=10.186666666666667 +2024-07-27 11:38:32,082 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.61 vs. limit=11.39 +2024-07-27 11:38:33,631 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=4.89 vs. limit=11.39 +2024-07-27 11:38:35,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=10373.333333333334, ans=0.125 +2024-07-27 11:38:38,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=10386.666666666666, ans=0.00861159420289855 +2024-07-27 11:38:45,451 INFO [train.py:1114] (2/4) Epoch 1, batch 7800, loss[loss=0.412, simple_loss=0.4528, pruned_loss=0.1855, over 4658.00 frames. ], tot_loss[loss=0.3807, simple_loss=0.4194, pruned_loss=0.171, over 937004.51 frames. ], batch size: 14, lr: 3.75e-02, grad_scale: 64.0 +2024-07-27 11:38:47,309 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.952e+01 6.890e+01 7.293e+01 8.300e+01 1.085e+02, threshold=1.459e+02, percent-clipped=0.0 +2024-07-27 11:38:48,364 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.11 vs. limit=7.6 +2024-07-27 11:38:52,441 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:38:54,587 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=20.61 vs. limit=11.405 +2024-07-27 11:38:59,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=10426.666666666666, ans=0.07 +2024-07-27 11:39:03,892 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.59 vs. limit=15.32 +2024-07-27 11:39:04,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=10426.666666666666, ans=0.07 +2024-07-27 11:39:07,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=10440.0, ans=0.0 +2024-07-27 11:39:07,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=10440.0, ans=0.02316666666666667 +2024-07-27 11:39:09,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=10440.0, ans=0.0086 +2024-07-27 11:39:18,131 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.56 vs. limit=11.42 +2024-07-27 11:39:20,646 INFO [train.py:1114] (2/4) Epoch 1, batch 7850, loss[loss=0.3281, simple_loss=0.3722, pruned_loss=0.142, over 4520.00 frames. ], tot_loss[loss=0.3775, simple_loss=0.4167, pruned_loss=0.1691, over 936139.29 frames. ], batch size: 10, lr: 3.74e-02, grad_scale: 64.0 +2024-07-27 11:39:21,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=10466.666666666666, ans=0.02305555555555556 +2024-07-27 11:39:24,859 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.69 vs. limit=7.616666666666666 +2024-07-27 11:39:27,764 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.49 vs. limit=11.43 +2024-07-27 11:39:31,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=10480.0, ans=0.008591304347826087 +2024-07-27 11:39:41,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=10506.666666666666, ans=0.125 +2024-07-27 11:39:45,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=10506.666666666666, ans=0.0 +2024-07-27 11:39:49,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=10520.0, ans=0.19479999999999997 +2024-07-27 11:39:54,394 INFO [train.py:1114] (2/4) Epoch 1, batch 7900, loss[loss=0.3554, simple_loss=0.4198, pruned_loss=0.1455, over 4863.00 frames. ], tot_loss[loss=0.3779, simple_loss=0.4176, pruned_loss=0.1691, over 933219.35 frames. ], batch size: 14, lr: 3.73e-02, grad_scale: 64.0 +2024-07-27 11:39:56,373 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.883e+01 7.101e+01 7.733e+01 8.610e+01 1.628e+02, threshold=1.547e+02, percent-clipped=1.0 +2024-07-27 11:39:59,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=10533.333333333334, ans=0.125 +2024-07-27 11:40:09,596 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.15 vs. limit=11.46 +2024-07-27 11:40:09,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=10560.0, ans=0.125 +2024-07-27 11:40:10,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=10560.0, ans=0.07 +2024-07-27 11:40:17,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10573.333333333334, ans=0.19426666666666664 +2024-07-27 11:40:28,110 INFO [train.py:1114] (2/4) Epoch 1, batch 7950, loss[loss=0.4918, simple_loss=0.4792, pruned_loss=0.2522, over 3383.00 frames. ], tot_loss[loss=0.3769, simple_loss=0.4173, pruned_loss=0.1683, over 935457.22 frames. ], batch size: 35, lr: 3.73e-02, grad_scale: 64.0 +2024-07-27 11:40:32,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10600.0, ans=0.194 +2024-07-27 11:40:37,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=10613.333333333334, ans=0.19386666666666666 +2024-07-27 11:40:48,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=10626.666666666666, ans=0.125 +2024-07-27 11:40:54,832 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.13 vs. limit=15.48 +2024-07-27 11:40:55,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10640.0, ans=0.1936 +2024-07-27 11:40:57,620 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.64 vs. limit=10.326666666666668 +2024-07-27 11:41:44,738 INFO [train.py:1114] (2/4) Epoch 1, batch 8000, loss[loss=0.3198, simple_loss=0.375, pruned_loss=0.1323, over 4617.00 frames. ], tot_loss[loss=0.3746, simple_loss=0.415, pruned_loss=0.1671, over 934511.22 frames. ], batch size: 11, lr: 3.72e-02, grad_scale: 64.0 +2024-07-27 11:41:46,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=10666.666666666666, ans=0.125 +2024-07-27 11:41:46,747 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.949e+01 6.868e+01 7.730e+01 8.687e+01 2.055e+02, threshold=1.546e+02, percent-clipped=1.0 +2024-07-27 11:41:56,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=10680.0, ans=0.8568 +2024-07-27 11:42:16,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=10720.0, ans=0.0 +2024-07-27 11:42:16,448 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.77 vs. limit=15.54 +2024-07-27 11:42:17,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=10733.333333333334, ans=0.008536231884057971 +2024-07-27 11:42:18,057 INFO [train.py:1114] (2/4) Epoch 1, batch 8050, loss[loss=0.3332, simple_loss=0.4012, pruned_loss=0.1326, over 4805.00 frames. ], tot_loss[loss=0.3745, simple_loss=0.4149, pruned_loss=0.167, over 934023.99 frames. ], batch size: 14, lr: 3.72e-02, grad_scale: 64.0 +2024-07-27 11:42:22,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10733.333333333334, ans=0.19266666666666665 +2024-07-27 11:42:26,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=10746.666666666666, ans=0.125 +2024-07-27 11:42:28,879 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.05 vs. limit=11.530000000000001 +2024-07-27 11:42:35,611 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.42 vs. limit=11.535 +2024-07-27 11:42:37,554 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.08 vs. limit=15.57 +2024-07-27 11:42:41,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=10773.333333333334, ans=0.125 +2024-07-27 11:42:46,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=10786.666666666666, ans=0.5224666666666667 +2024-07-27 11:42:48,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=10786.666666666666, ans=0.125 +2024-07-27 11:42:49,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=10786.666666666666, ans=0.00852463768115942 +2024-07-27 11:42:52,708 INFO [train.py:1114] (2/4) Epoch 1, batch 8100, loss[loss=0.4076, simple_loss=0.4381, pruned_loss=0.1886, over 4805.00 frames. ], tot_loss[loss=0.3757, simple_loss=0.4162, pruned_loss=0.1677, over 934007.23 frames. ], batch size: 15, lr: 3.71e-02, grad_scale: 64.0 +2024-07-27 11:42:53,041 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.49 vs. limit=15.6 +2024-07-27 11:42:54,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=10800.0, ans=0.125 +2024-07-27 11:42:54,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=10800.0, ans=0.522 +2024-07-27 11:42:54,752 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.815e+01 7.005e+01 7.921e+01 8.722e+01 1.648e+02, threshold=1.584e+02, percent-clipped=1.0 +2024-07-27 11:42:54,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=10800.0, ans=0.125 +2024-07-27 11:43:07,869 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.68 vs. limit=15.62 +2024-07-27 11:43:26,152 INFO [train.py:1114] (2/4) Epoch 1, batch 8150, loss[loss=0.407, simple_loss=0.4457, pruned_loss=0.1842, over 4796.00 frames. ], tot_loss[loss=0.3748, simple_loss=0.415, pruned_loss=0.1672, over 937567.31 frames. ], batch size: 15, lr: 3.70e-02, grad_scale: 64.0 +2024-07-27 11:43:41,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=10893.333333333334, ans=0.00850144927536232 +2024-07-27 11:44:06,716 INFO [train.py:1114] (2/4) Epoch 1, batch 8200, loss[loss=0.4082, simple_loss=0.4549, pruned_loss=0.1807, over 4809.00 frames. ], tot_loss[loss=0.3756, simple_loss=0.416, pruned_loss=0.1676, over 938616.21 frames. ], batch size: 15, lr: 3.70e-02, grad_scale: 64.0 +2024-07-27 11:44:08,827 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.129e+01 6.954e+01 7.394e+01 8.427e+01 2.023e+02, threshold=1.479e+02, percent-clipped=1.0 +2024-07-27 11:44:13,303 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.18 vs. limit=11.605 +2024-07-27 11:44:13,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=10946.666666666666, ans=0.125 +2024-07-27 11:44:18,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=10946.666666666666, ans=0.125 +2024-07-27 11:44:20,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=10960.0, ans=0.19039999999999999 +2024-07-27 11:44:35,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=10986.666666666666, ans=0.020888888888888894 +2024-07-27 11:44:38,300 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.55 vs. limit=15.74 +2024-07-27 11:44:40,444 INFO [train.py:1114] (2/4) Epoch 1, batch 8250, loss[loss=0.419, simple_loss=0.455, pruned_loss=0.1915, over 4897.00 frames. ], tot_loss[loss=0.3758, simple_loss=0.4161, pruned_loss=0.1678, over 938864.74 frames. ], batch size: 13, lr: 3.69e-02, grad_scale: 64.0 +2024-07-27 11:44:42,129 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.54 vs. limit=4.65 +2024-07-27 11:44:52,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=11013.333333333334, ans=0.020777777777777773 +2024-07-27 11:44:56,535 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.45 vs. limit=15.77 +2024-07-27 11:45:01,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=11040.0, ans=0.025 +2024-07-27 11:45:04,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=11040.0, ans=0.02066666666666667 +2024-07-27 11:46:40,202 INFO [train.py:1114] (2/4) Epoch 1, batch 8300, loss[loss=0.3709, simple_loss=0.4186, pruned_loss=0.1616, over 4911.00 frames. ], tot_loss[loss=0.3754, simple_loss=0.4162, pruned_loss=0.1674, over 938459.89 frames. ], batch size: 15, lr: 3.68e-02, grad_scale: 64.0 +2024-07-27 11:46:42,161 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.900e+01 6.941e+01 7.717e+01 8.510e+01 1.243e+02, threshold=1.543e+02, percent-clipped=0.0 +2024-07-27 11:46:55,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11080.0, ans=0.18919999999999998 +2024-07-27 11:47:05,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=11093.333333333334, ans=0.020444444444444442 +2024-07-27 11:47:17,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=11106.666666666666, ans=0.125 +2024-07-27 11:47:20,468 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.59 vs. limit=10.553333333333333 +2024-07-27 11:47:21,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11120.0, ans=0.1888 +2024-07-27 11:47:22,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=11120.0, ans=0.1888 +2024-07-27 11:47:28,636 INFO [train.py:1114] (2/4) Epoch 1, batch 8350, loss[loss=0.409, simple_loss=0.459, pruned_loss=0.1795, over 4798.00 frames. ], tot_loss[loss=0.3743, simple_loss=0.4156, pruned_loss=0.1665, over 941582.56 frames. ], batch size: 15, lr: 3.68e-02, grad_scale: 64.0 +2024-07-27 11:47:50,940 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.31 vs. limit=7.79 +2024-07-27 11:47:52,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=11160.0, ans=0.125 +2024-07-27 11:47:53,060 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.41 vs. limit=11.684999999999999 +2024-07-27 11:47:57,102 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.40 vs. limit=11.690000000000001 +2024-07-27 11:48:00,429 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.28 vs. limit=7.793333333333333 +2024-07-27 11:48:01,091 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.16 vs. limit=11.690000000000001 +2024-07-27 11:48:11,887 INFO [train.py:1114] (2/4) Epoch 1, batch 8400, loss[loss=0.3237, simple_loss=0.3684, pruned_loss=0.1395, over 4779.00 frames. ], tot_loss[loss=0.3749, simple_loss=0.4161, pruned_loss=0.1668, over 940168.02 frames. ], batch size: 12, lr: 3.67e-02, grad_scale: 64.0 +2024-07-27 11:48:12,896 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.47 vs. limit=15.9 +2024-07-27 11:48:13,820 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.876e+01 7.080e+01 7.641e+01 8.587e+01 1.412e+02, threshold=1.528e+02, percent-clipped=0.0 +2024-07-27 11:48:14,783 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:48:23,388 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.80 vs. limit=11.705 +2024-07-27 11:48:35,007 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.98 vs. limit=15.93 +2024-07-27 11:48:35,503 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.15 vs. limit=15.93 +2024-07-27 11:48:45,018 INFO [train.py:1114] (2/4) Epoch 1, batch 8450, loss[loss=0.3798, simple_loss=0.4215, pruned_loss=0.1691, over 4809.00 frames. ], tot_loss[loss=0.3755, simple_loss=0.4166, pruned_loss=0.1672, over 939021.69 frames. ], batch size: 15, lr: 3.67e-02, grad_scale: 64.0 +2024-07-27 11:48:46,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11266.666666666666, ans=0.18733333333333335 +2024-07-27 11:48:50,385 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:48:53,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=11280.0, ans=0.5052000000000001 +2024-07-27 11:48:54,019 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.36 vs. limit=15.96 +2024-07-27 11:48:57,523 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.62 vs. limit=8.512 +2024-07-27 11:48:59,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=11293.333333333334, ans=0.019611111111111107 +2024-07-27 11:49:05,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=11293.333333333334, ans=0.019611111111111107 +2024-07-27 11:49:20,003 INFO [train.py:1114] (2/4) Epoch 1, batch 8500, loss[loss=0.3115, simple_loss=0.3533, pruned_loss=0.1349, over 4607.00 frames. ], tot_loss[loss=0.373, simple_loss=0.4142, pruned_loss=0.1659, over 938788.16 frames. ], batch size: 11, lr: 3.66e-02, grad_scale: 64.0 +2024-07-27 11:49:21,905 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.800e+01 6.867e+01 7.338e+01 8.262e+01 1.317e+02, threshold=1.468e+02, percent-clipped=0.0 +2024-07-27 11:49:23,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=11333.333333333334, ans=0.04949747468305833 +2024-07-27 11:49:28,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=11346.666666666666, ans=0.125 +2024-07-27 11:49:45,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=11373.333333333334, ans=0.07 +2024-07-27 11:49:47,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=11386.666666666666, ans=0.125 +2024-07-27 11:49:54,525 INFO [train.py:1114] (2/4) Epoch 1, batch 8550, loss[loss=0.3279, simple_loss=0.3659, pruned_loss=0.1449, over 4800.00 frames. ], tot_loss[loss=0.371, simple_loss=0.4125, pruned_loss=0.1647, over 939428.95 frames. ], batch size: 11, lr: 3.65e-02, grad_scale: 64.0 +2024-07-27 11:49:54,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11400.0, ans=0.186 +2024-07-27 11:49:57,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=11400.0, ans=0.008391304347826088 +2024-07-27 11:49:58,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=11400.0, ans=0.125 +2024-07-27 11:50:03,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=11413.333333333334, ans=0.0 +2024-07-27 11:50:12,930 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.08 vs. limit=11.785 +2024-07-27 11:50:20,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=11453.333333333334, ans=0.125 +2024-07-27 11:50:23,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=11453.333333333334, ans=0.125 +2024-07-27 11:50:28,078 INFO [train.py:1114] (2/4) Epoch 1, batch 8600, loss[loss=0.4098, simple_loss=0.4492, pruned_loss=0.1852, over 4795.00 frames. ], tot_loss[loss=0.3704, simple_loss=0.4116, pruned_loss=0.1647, over 938900.46 frames. ], batch size: 15, lr: 3.65e-02, grad_scale: 64.0 +2024-07-27 11:50:31,113 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.520e+01 6.717e+01 7.221e+01 8.025e+01 1.285e+02, threshold=1.444e+02, percent-clipped=0.0 +2024-07-27 11:50:37,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.65 vs. limit=10.74 +2024-07-27 11:50:39,720 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.26 vs. limit=10.74 +2024-07-27 11:50:40,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=11480.0, ans=0.018833333333333337 +2024-07-27 11:50:50,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=11506.666666666666, ans=0.018722222222222223 +2024-07-27 11:50:52,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=11506.666666666666, ans=0.125 +2024-07-27 11:50:53,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=11506.666666666666, ans=0.125 +2024-07-27 11:50:58,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=11520.0, ans=0.0 +2024-07-27 11:51:02,923 INFO [train.py:1114] (2/4) Epoch 1, batch 8650, loss[loss=0.4005, simple_loss=0.4309, pruned_loss=0.185, over 4898.00 frames. ], tot_loss[loss=0.3696, simple_loss=0.4113, pruned_loss=0.164, over 940111.76 frames. ], batch size: 15, lr: 3.64e-02, grad_scale: 64.0 +2024-07-27 11:51:11,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=11533.333333333334, ans=0.05 +2024-07-27 11:51:22,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=11560.0, ans=0.05 +2024-07-27 11:51:25,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=11560.0, ans=0.0 +2024-07-27 11:51:28,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=11573.333333333334, ans=0.125 +2024-07-27 11:51:31,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=11573.333333333334, ans=0.018444444444444444 +2024-07-27 11:51:41,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=11586.666666666666, ans=0.125 +2024-07-27 11:51:44,989 INFO [train.py:1114] (2/4) Epoch 1, batch 8700, loss[loss=0.38, simple_loss=0.4102, pruned_loss=0.1749, over 4753.00 frames. ], tot_loss[loss=0.371, simple_loss=0.4127, pruned_loss=0.1647, over 937560.45 frames. ], batch size: 13, lr: 3.64e-02, grad_scale: 64.0 +2024-07-27 11:51:46,843 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.502e+01 6.768e+01 7.395e+01 8.572e+01 1.594e+02, threshold=1.479e+02, percent-clipped=2.0 +2024-07-27 11:51:51,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=11600.0, ans=0.125 +2024-07-27 11:51:55,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11613.333333333334, ans=0.18386666666666668 +2024-07-27 11:51:55,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=11613.333333333334, ans=0.008344927536231884 +2024-07-27 11:52:00,870 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.32 vs. limit=16.22 +2024-07-27 11:52:16,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=11653.333333333334, ans=0.0 +2024-07-27 11:52:21,396 INFO [train.py:1114] (2/4) Epoch 1, batch 8750, loss[loss=0.4043, simple_loss=0.446, pruned_loss=0.1812, over 4696.00 frames. ], tot_loss[loss=0.3704, simple_loss=0.4125, pruned_loss=0.1641, over 936036.30 frames. ], batch size: 15, lr: 3.63e-02, grad_scale: 64.0 +2024-07-27 11:52:22,487 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.30 vs. limit=16.25 +2024-07-27 11:52:23,175 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.65 vs. limit=8.666666666666666 +2024-07-27 11:52:31,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=11680.0, ans=0.125 +2024-07-27 11:52:36,713 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.67 vs. limit=16.27 +2024-07-27 11:52:37,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=11693.333333333334, ans=0.18306666666666666 +2024-07-27 11:52:37,364 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.36 vs. limit=11.885 +2024-07-27 11:52:40,666 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.56 vs. limit=11.885 +2024-07-27 11:52:54,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=11720.0, ans=0.125 +2024-07-27 11:52:55,481 INFO [train.py:1114] (2/4) Epoch 1, batch 8800, loss[loss=0.3458, simple_loss=0.4051, pruned_loss=0.1433, over 4935.00 frames. ], tot_loss[loss=0.3691, simple_loss=0.412, pruned_loss=0.1632, over 937100.10 frames. ], batch size: 14, lr: 3.62e-02, grad_scale: 64.0 +2024-07-27 11:52:55,928 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.46 vs. limit=16.3 +2024-07-27 11:52:57,640 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.900e+01 7.237e+01 7.954e+01 8.853e+01 1.433e+02, threshold=1.591e+02, percent-clipped=0.0 +2024-07-27 11:53:01,347 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.47 vs. limit=16.3 +2024-07-27 11:53:04,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=11746.666666666666, ans=0.01772222222222223 +2024-07-27 11:53:15,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11773.333333333334, ans=0.18226666666666666 +2024-07-27 11:53:17,309 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:53:21,464 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.49 vs. limit=11.915 +2024-07-27 11:53:21,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=11786.666666666666, ans=10.0 +2024-07-27 11:53:23,519 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.84 vs. limit=11.92 +2024-07-27 11:53:28,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11800.0, ans=0.182 +2024-07-27 11:53:29,080 INFO [train.py:1114] (2/4) Epoch 1, batch 8850, loss[loss=0.3785, simple_loss=0.4146, pruned_loss=0.1712, over 4487.00 frames. ], tot_loss[loss=0.3685, simple_loss=0.4107, pruned_loss=0.1632, over 932056.06 frames. ], batch size: 21, lr: 3.62e-02, grad_scale: 128.0 +2024-07-27 11:53:29,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=11800.0, ans=0.48700000000000004 +2024-07-27 11:53:29,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=11800.0, ans=0.48700000000000004 +2024-07-27 11:53:33,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=11800.0, ans=0.48700000000000004 +2024-07-27 11:53:35,575 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.63 vs. limit=4.772 +2024-07-27 11:53:39,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=11813.333333333334, ans=0.008301449275362319 +2024-07-27 11:53:46,871 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.48 vs. limit=16.369999999999997 +2024-07-27 11:53:54,782 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.05 vs. limit=11.940000000000001 +2024-07-27 11:54:01,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11866.666666666666, ans=0.18133333333333335 +2024-07-27 11:54:02,399 INFO [train.py:1114] (2/4) Epoch 1, batch 8900, loss[loss=0.3373, simple_loss=0.3896, pruned_loss=0.1425, over 4937.00 frames. ], tot_loss[loss=0.3701, simple_loss=0.4116, pruned_loss=0.1643, over 929882.74 frames. ], batch size: 12, lr: 3.61e-02, grad_scale: 128.0 +2024-07-27 11:54:04,467 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.705e+01 6.769e+01 7.408e+01 8.026e+01 1.011e+02, threshold=1.482e+02, percent-clipped=0.0 +2024-07-27 11:54:04,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=11866.666666666666, ans=0.125 +2024-07-27 11:54:06,825 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.50 vs. limit=16.4 +2024-07-27 11:54:28,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=11893.333333333334, ans=0.125 +2024-07-27 11:54:29,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=11893.333333333334, ans=0.05 +2024-07-27 11:54:33,982 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:54:36,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=11906.666666666666, ans=0.125 +2024-07-27 11:54:40,227 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.90 vs. limit=7.976666666666667 +2024-07-27 11:54:44,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=11920.0, ans=0.008278260869565218 +2024-07-27 11:54:48,597 INFO [train.py:1114] (2/4) Epoch 1, batch 8950, loss[loss=0.4639, simple_loss=0.4841, pruned_loss=0.2219, over 4504.00 frames. ], tot_loss[loss=0.37, simple_loss=0.4116, pruned_loss=0.1643, over 930810.06 frames. ], batch size: 21, lr: 3.61e-02, grad_scale: 128.0 +2024-07-27 11:54:48,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=11933.333333333334, ans=0.09899494936611666 +2024-07-27 11:54:50,212 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:55:23,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=11986.666666666666, ans=0.125 +2024-07-27 11:55:23,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=11986.666666666666, ans=0.00826376811594203 +2024-07-27 11:55:31,965 INFO [train.py:1114] (2/4) Epoch 1, batch 9000, loss[loss=0.387, simple_loss=0.4308, pruned_loss=0.1716, over 4637.00 frames. ], tot_loss[loss=0.3676, simple_loss=0.4097, pruned_loss=0.1628, over 933972.99 frames. ], batch size: 12, lr: 3.60e-02, grad_scale: 64.0 +2024-07-27 11:55:31,966 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 11:55:41,819 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.0863, 3.3540, 2.9495, 3.2164], device='cuda:2') +2024-07-27 11:55:42,023 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.6158, 5.3185, 4.7615, 5.1180], device='cuda:2') +2024-07-27 11:55:45,436 INFO [train.py:1146] (2/4) Epoch 1, validation: loss=0.2917, simple_loss=0.3779, pruned_loss=0.1028, over 944034.00 frames. +2024-07-27 11:55:45,437 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 11:55:48,959 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.414e+01 6.571e+01 7.230e+01 7.907e+01 1.156e+02, threshold=1.446e+02, percent-clipped=0.0 +2024-07-27 11:55:51,484 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.43 vs. limit=16.5 +2024-07-27 11:55:53,231 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=7.589e-03 +2024-07-27 11:55:54,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=12013.333333333334, ans=0.4795333333333333 +2024-07-27 11:55:55,523 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.16 vs. limit=12.004999999999999 +2024-07-27 11:55:56,198 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.82 vs. limit=16.509999999999998 +2024-07-27 11:56:00,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=12013.333333333334, ans=0.17986666666666667 +2024-07-27 11:56:05,401 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.81 vs. limit=12.01 +2024-07-27 11:56:14,696 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.78 vs. limit=4.808 +2024-07-27 11:56:21,461 INFO [train.py:1114] (2/4) Epoch 1, batch 9050, loss[loss=0.3216, simple_loss=0.365, pruned_loss=0.1391, over 4548.00 frames. ], tot_loss[loss=0.3671, simple_loss=0.4095, pruned_loss=0.1624, over 934712.63 frames. ], batch size: 10, lr: 3.59e-02, grad_scale: 64.0 +2024-07-27 11:56:24,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=12066.666666666666, ans=0.025 +2024-07-27 11:56:24,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=12066.666666666666, ans=0.008246376811594203 +2024-07-27 11:56:34,926 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.91 vs. limit=12.035 +2024-07-27 11:56:35,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=12093.333333333334, ans=0.125 +2024-07-27 11:56:36,651 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:56:43,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12106.666666666666, ans=0.17893333333333333 +2024-07-27 11:56:43,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=12106.666666666666, ans=0.125 +2024-07-27 11:56:44,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=12106.666666666666, ans=0.125 +2024-07-27 11:56:52,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=12120.0, ans=0.07 +2024-07-27 11:56:53,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=12133.333333333334, ans=0.125 +2024-07-27 11:56:54,153 INFO [train.py:1114] (2/4) Epoch 1, batch 9100, loss[loss=0.4031, simple_loss=0.4243, pruned_loss=0.191, over 4931.00 frames. ], tot_loss[loss=0.3657, simple_loss=0.4086, pruned_loss=0.1614, over 937120.32 frames. ], batch size: 14, lr: 3.59e-02, grad_scale: 64.0 +2024-07-27 11:56:54,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12133.333333333334, ans=0.17866666666666667 +2024-07-27 11:56:59,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=12133.333333333334, ans=0.125 +2024-07-27 11:56:59,618 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.48 vs. limit=4.82 +2024-07-27 11:57:01,201 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.077e+01 6.999e+01 7.542e+01 8.527e+01 1.258e+02, threshold=1.508e+02, percent-clipped=0.0 +2024-07-27 11:57:05,611 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.02 vs. limit=16.61 +2024-07-27 11:57:09,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=12146.666666666666, ans=0.4748666666666667 +2024-07-27 11:57:12,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=12160.0, ans=0.125 +2024-07-27 11:57:12,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=12160.0, ans=0.47440000000000004 +2024-07-27 11:57:16,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=12160.0, ans=0.125 +2024-07-27 11:57:17,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=12173.333333333334, ans=0.07 +2024-07-27 11:57:19,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=12173.333333333334, ans=0.4739333333333333 +2024-07-27 11:57:28,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=12186.666666666666, ans=0.008220289855072465 +2024-07-27 11:57:31,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=12200.0, ans=0.025 +2024-07-27 11:57:31,654 INFO [train.py:1114] (2/4) Epoch 1, batch 9150, loss[loss=0.3425, simple_loss=0.4072, pruned_loss=0.1389, over 4808.00 frames. ], tot_loss[loss=0.3659, simple_loss=0.4087, pruned_loss=0.1616, over 936081.24 frames. ], batch size: 14, lr: 3.58e-02, grad_scale: 64.0 +2024-07-27 11:57:40,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=12213.333333333334, ans=0.125 +2024-07-27 11:57:41,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=12213.333333333334, ans=0.015777777777777773 +2024-07-27 11:57:43,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=12213.333333333334, ans=0.04949747468305833 +2024-07-27 11:57:44,596 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.88 vs. limit=12.085 +2024-07-27 11:58:00,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=12253.333333333334, ans=0.47113333333333335 +2024-07-27 11:58:03,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=12253.333333333334, ans=0.17746666666666666 +2024-07-27 11:58:03,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=12253.333333333334, ans=0.125 +2024-07-27 11:58:04,622 INFO [train.py:1114] (2/4) Epoch 1, batch 9200, loss[loss=0.305, simple_loss=0.3553, pruned_loss=0.1273, over 4855.00 frames. ], tot_loss[loss=0.3631, simple_loss=0.4064, pruned_loss=0.16, over 937809.69 frames. ], batch size: 12, lr: 3.58e-02, grad_scale: 64.0 +2024-07-27 11:58:07,229 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.834e+01 6.731e+01 7.265e+01 8.123e+01 1.608e+02, threshold=1.453e+02, percent-clipped=1.0 +2024-07-27 11:58:09,006 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.98 vs. limit=12.1 +2024-07-27 11:58:19,036 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.07 vs. limit=11.146666666666668 +2024-07-27 11:58:30,896 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=12.115 +2024-07-27 11:58:35,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=12320.0, ans=0.125 +2024-07-27 11:58:40,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=12333.333333333334, ans=0.125 +2024-07-27 11:58:40,674 INFO [train.py:1114] (2/4) Epoch 1, batch 9250, loss[loss=0.3588, simple_loss=0.421, pruned_loss=0.1483, over 4631.00 frames. ], tot_loss[loss=0.3639, simple_loss=0.4073, pruned_loss=0.1602, over 938519.62 frames. ], batch size: 13, lr: 3.57e-02, grad_scale: 64.0 +2024-07-27 11:58:55,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_na.min_abs, batch_count=12360.0, ans=0.02 +2024-07-27 11:59:05,717 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.51 vs. limit=11.186666666666667 +2024-07-27 11:59:08,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=12386.666666666666, ans=0.125 +2024-07-27 11:59:09,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.22 vs. limit=12.145 +2024-07-27 11:59:14,676 INFO [train.py:1114] (2/4) Epoch 1, batch 9300, loss[loss=0.3159, simple_loss=0.3722, pruned_loss=0.1298, over 4779.00 frames. ], tot_loss[loss=0.3629, simple_loss=0.4066, pruned_loss=0.1596, over 938107.38 frames. ], batch size: 12, lr: 3.57e-02, grad_scale: 64.0 +2024-07-27 11:59:17,481 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.284e+01 6.919e+01 7.820e+01 8.678e+01 1.247e+02, threshold=1.564e+02, percent-clipped=0.0 +2024-07-27 11:59:37,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12440.0, ans=0.17559999999999998 +2024-07-27 11:59:41,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=12453.333333333334, ans=0.00816231884057971 +2024-07-27 11:59:47,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=12466.666666666666, ans=0.0 +2024-07-27 11:59:48,173 INFO [train.py:1114] (2/4) Epoch 1, batch 9350, loss[loss=0.3491, simple_loss=0.3854, pruned_loss=0.1564, over 4793.00 frames. ], tot_loss[loss=0.3632, simple_loss=0.4064, pruned_loss=0.16, over 934666.97 frames. ], batch size: 11, lr: 3.56e-02, grad_scale: 64.0 +2024-07-27 11:59:50,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=12466.666666666666, ans=0.4636666666666667 +2024-07-27 11:59:54,390 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.81 vs. limit=12.18 +2024-07-27 11:59:56,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=12480.0, ans=0.014666666666666668 +2024-07-27 11:59:57,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=12480.0, ans=0.0 +2024-07-27 11:59:59,541 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.66 vs. limit=8.992 +2024-07-27 12:00:21,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=12533.333333333334, ans=0.125 +2024-07-27 12:00:21,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=12533.333333333334, ans=0.388 +2024-07-27 12:00:21,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=12533.333333333334, ans=0.125 +2024-07-27 12:00:22,101 INFO [train.py:1114] (2/4) Epoch 1, batch 9400, loss[loss=0.3241, simple_loss=0.3865, pruned_loss=0.1309, over 4688.00 frames. ], tot_loss[loss=0.3627, simple_loss=0.4064, pruned_loss=0.1595, over 932355.37 frames. ], batch size: 13, lr: 3.55e-02, grad_scale: 64.0 +2024-07-27 12:00:24,570 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.848e+01 6.575e+01 7.346e+01 8.658e+01 2.018e+02, threshold=1.469e+02, percent-clipped=2.0 +2024-07-27 12:00:40,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=12560.0, ans=0.008139130434782609 +2024-07-27 12:00:43,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=12573.333333333334, ans=0.0 +2024-07-27 12:00:44,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=12573.333333333334, ans=0.008136231884057972 +2024-07-27 12:00:48,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=12573.333333333334, ans=0.025 +2024-07-27 12:00:50,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=12586.666666666666, ans=0.3888 +2024-07-27 12:00:57,887 INFO [train.py:1114] (2/4) Epoch 1, batch 9450, loss[loss=0.3381, simple_loss=0.3636, pruned_loss=0.1563, over 4798.00 frames. ], tot_loss[loss=0.3617, simple_loss=0.4058, pruned_loss=0.1588, over 931902.60 frames. ], batch size: 11, lr: 3.55e-02, grad_scale: 64.0 +2024-07-27 12:01:04,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=12600.0, ans=0.008130434782608695 +2024-07-27 12:01:04,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12600.0, ans=0.174 +2024-07-27 12:01:13,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12626.666666666666, ans=0.17373333333333335 +2024-07-27 12:01:16,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=12626.666666666666, ans=0.125 +2024-07-27 12:01:16,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=12626.666666666666, ans=0.0 +2024-07-27 12:01:33,363 INFO [train.py:1114] (2/4) Epoch 1, batch 9500, loss[loss=0.3054, simple_loss=0.3666, pruned_loss=0.1221, over 4706.00 frames. ], tot_loss[loss=0.3622, simple_loss=0.4061, pruned_loss=0.1591, over 934086.42 frames. ], batch size: 12, lr: 3.54e-02, grad_scale: 64.0 +2024-07-27 12:01:35,769 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.67 vs. limit=6.533333333333333 +2024-07-27 12:01:35,890 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.605e+01 6.746e+01 7.341e+01 8.122e+01 1.206e+02, threshold=1.468e+02, percent-clipped=0.0 +2024-07-27 12:01:45,774 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.88 vs. limit=9.072 +2024-07-27 12:01:46,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=12680.0, ans=0.45620000000000005 +2024-07-27 12:01:47,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=12680.0, ans=0.125 +2024-07-27 12:02:31,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=12720.0, ans=0.008104347826086957 +2024-07-27 12:02:42,683 INFO [train.py:1114] (2/4) Epoch 1, batch 9550, loss[loss=0.3806, simple_loss=0.4041, pruned_loss=0.1785, over 4768.00 frames. ], tot_loss[loss=0.3637, simple_loss=0.4071, pruned_loss=0.1601, over 931549.33 frames. ], batch size: 12, lr: 3.54e-02, grad_scale: 64.0 +2024-07-27 12:02:59,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=12746.666666666666, ans=0.125 +2024-07-27 12:03:18,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=12786.666666666666, ans=0.025 +2024-07-27 12:03:22,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=12786.666666666666, ans=0.45246666666666674 +2024-07-27 12:03:24,184 INFO [train.py:1114] (2/4) Epoch 1, batch 9600, loss[loss=0.3926, simple_loss=0.4209, pruned_loss=0.1821, over 3584.00 frames. ], tot_loss[loss=0.3631, simple_loss=0.4069, pruned_loss=0.1596, over 931110.64 frames. ], batch size: 35, lr: 3.53e-02, grad_scale: 64.0 +2024-07-27 12:03:30,719 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.723e+01 6.771e+01 7.099e+01 8.382e+01 1.458e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 12:03:32,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=12800.0, ans=0.125 +2024-07-27 12:03:43,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=12826.666666666666, ans=0.008081159420289856 +2024-07-27 12:03:45,342 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.11 vs. limit=8.206666666666667 +2024-07-27 12:03:46,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=12826.666666666666, ans=0.4510666666666667 +2024-07-27 12:03:55,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=12840.0, ans=10.0 +2024-07-27 12:04:03,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=12853.333333333334, ans=0.00807536231884058 +2024-07-27 12:04:11,724 INFO [train.py:1114] (2/4) Epoch 1, batch 9650, loss[loss=0.3267, simple_loss=0.3912, pruned_loss=0.1311, over 4853.00 frames. ], tot_loss[loss=0.362, simple_loss=0.4061, pruned_loss=0.159, over 927355.08 frames. ], batch size: 16, lr: 3.53e-02, grad_scale: 64.0 +2024-07-27 12:04:12,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=12866.666666666666, ans=0.125 +2024-07-27 12:04:13,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12866.666666666666, ans=0.17133333333333334 +2024-07-27 12:04:13,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=12866.666666666666, ans=0.008072463768115943 +2024-07-27 12:04:21,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=12880.0, ans=0.125 +2024-07-27 12:04:21,229 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=28.01 vs. limit=12.33 +2024-07-27 12:04:30,703 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.62 vs. limit=17.17 +2024-07-27 12:04:34,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=12906.666666666666, ans=0.125 +2024-07-27 12:04:38,148 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.69 vs. limit=11.453333333333333 +2024-07-27 12:04:39,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=12906.666666666666, ans=0.125 +2024-07-27 12:04:41,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=12906.666666666666, ans=0.125 +2024-07-27 12:04:49,101 INFO [train.py:1114] (2/4) Epoch 1, batch 9700, loss[loss=0.3804, simple_loss=0.4156, pruned_loss=0.1725, over 4179.00 frames. ], tot_loss[loss=0.3638, simple_loss=0.4072, pruned_loss=0.1602, over 925569.89 frames. ], batch size: 25, lr: 3.52e-02, grad_scale: 64.0 +2024-07-27 12:04:52,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=12933.333333333334, ans=0.125 +2024-07-27 12:04:52,738 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.794e+01 6.661e+01 7.332e+01 8.273e+01 1.352e+02, threshold=1.466e+02, percent-clipped=0.0 +2024-07-27 12:04:59,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=12946.666666666666, ans=0.012722222222222225 +2024-07-27 12:05:19,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=12973.333333333334, ans=0.125 +2024-07-27 12:05:50,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=12986.666666666666, ans=0.035 +2024-07-27 12:05:57,817 INFO [train.py:1114] (2/4) Epoch 1, batch 9750, loss[loss=0.3982, simple_loss=0.4414, pruned_loss=0.1776, over 4687.00 frames. ], tot_loss[loss=0.3628, simple_loss=0.4067, pruned_loss=0.1594, over 925698.62 frames. ], batch size: 15, lr: 3.51e-02, grad_scale: 64.0 +2024-07-27 12:05:57,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=13000.0, ans=10.0 +2024-07-27 12:06:00,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=13000.0, ans=0.012500000000000004 +2024-07-27 12:06:07,438 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.90 vs. limit=17.259999999999998 +2024-07-27 12:06:18,890 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.79 vs. limit=9.205333333333334 +2024-07-27 12:06:21,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=13013.333333333334, ans=0.125 +2024-07-27 12:06:35,150 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.58 vs. limit=12.39 +2024-07-27 12:06:40,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=13040.0, ans=0.012333333333333335 +2024-07-27 12:06:48,523 INFO [train.py:1114] (2/4) Epoch 1, batch 9800, loss[loss=0.3822, simple_loss=0.4166, pruned_loss=0.1739, over 4701.00 frames. ], tot_loss[loss=0.3603, simple_loss=0.4042, pruned_loss=0.1582, over 925383.76 frames. ], batch size: 12, lr: 3.51e-02, grad_scale: 64.0 +2024-07-27 12:06:49,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=13066.666666666666, ans=0.125 +2024-07-27 12:06:51,239 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.642e+01 6.855e+01 7.493e+01 8.291e+01 1.245e+02, threshold=1.499e+02, percent-clipped=0.0 +2024-07-27 12:07:01,737 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=6.928e-02 +2024-07-27 12:07:07,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=13080.0, ans=0.125 +2024-07-27 12:07:12,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=13093.333333333334, ans=0.125 +2024-07-27 12:07:31,392 INFO [train.py:1114] (2/4) Epoch 1, batch 9850, loss[loss=0.3364, simple_loss=0.3985, pruned_loss=0.1371, over 4902.00 frames. ], tot_loss[loss=0.3607, simple_loss=0.4044, pruned_loss=0.1585, over 927756.49 frames. ], batch size: 15, lr: 3.50e-02, grad_scale: 64.0 +2024-07-27 12:07:33,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13133.333333333334, ans=0.16866666666666666 +2024-07-27 12:07:37,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=13133.333333333334, ans=0.035 +2024-07-27 12:07:37,580 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.75 vs. limit=12.425 +2024-07-27 12:07:41,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=13146.666666666666, ans=0.125 +2024-07-27 12:07:42,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=13146.666666666666, ans=0.125 +2024-07-27 12:07:48,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=13160.0, ans=0.008008695652173914 +2024-07-27 12:08:15,396 INFO [train.py:1114] (2/4) Epoch 1, batch 9900, loss[loss=0.3489, simple_loss=0.4035, pruned_loss=0.1471, over 4854.00 frames. ], tot_loss[loss=0.3652, simple_loss=0.4082, pruned_loss=0.1611, over 926921.35 frames. ], batch size: 16, lr: 3.50e-02, grad_scale: 64.0 +2024-07-27 12:08:16,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=13200.0, ans=0.125 +2024-07-27 12:08:17,907 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.707e+01 6.801e+01 7.469e+01 8.450e+01 1.233e+02, threshold=1.494e+02, percent-clipped=0.0 +2024-07-27 12:08:27,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=13200.0, ans=0.125 +2024-07-27 12:08:29,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=13213.333333333334, ans=0.025 +2024-07-27 12:08:34,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13213.333333333334, ans=0.16786666666666666 +2024-07-27 12:08:42,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=13240.0, ans=0.011500000000000003 +2024-07-27 12:08:47,352 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:08:49,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=13253.333333333334, ans=0.125 +2024-07-27 12:08:54,180 INFO [train.py:1114] (2/4) Epoch 1, batch 9950, loss[loss=0.3081, simple_loss=0.3519, pruned_loss=0.1321, over 4806.00 frames. ], tot_loss[loss=0.365, simple_loss=0.4081, pruned_loss=0.1609, over 929358.02 frames. ], batch size: 11, lr: 3.49e-02, grad_scale: 64.0 +2024-07-27 12:08:57,219 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.33 vs. limit=12.475 +2024-07-27 12:09:00,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=13280.0, ans=0.125 +2024-07-27 12:09:08,961 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.28 vs. limit=12.485 +2024-07-27 12:09:12,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=13293.333333333334, ans=0.125 +2024-07-27 12:09:27,217 INFO [train.py:1114] (2/4) Epoch 1, batch 10000, loss[loss=0.394, simple_loss=0.4352, pruned_loss=0.1764, over 4618.00 frames. ], tot_loss[loss=0.368, simple_loss=0.4112, pruned_loss=0.1624, over 926419.69 frames. ], batch size: 16, lr: 3.49e-02, grad_scale: 64.0 +2024-07-27 12:09:27,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=13333.333333333334, ans=0.011111111111111106 +2024-07-27 12:09:28,305 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.34 vs. limit=11.666666666666668 +2024-07-27 12:09:29,738 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.713e+01 6.862e+01 7.247e+01 8.214e+01 1.240e+02, threshold=1.449e+02, percent-clipped=0.0 +2024-07-27 12:09:33,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=13346.666666666666, ans=0.125 +2024-07-27 12:09:35,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=13346.666666666666, ans=0.43286666666666673 +2024-07-27 12:09:37,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=13346.666666666666, ans=0.007968115942028986 +2024-07-27 12:09:45,718 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.96 vs. limit=17.52 +2024-07-27 12:09:51,846 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.94 vs. limit=17.53 +2024-07-27 12:09:52,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13373.333333333334, ans=0.16626666666666667 +2024-07-27 12:09:56,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=13386.666666666666, ans=0.125 +2024-07-27 12:10:01,330 INFO [train.py:1114] (2/4) Epoch 1, batch 10050, loss[loss=0.4459, simple_loss=0.4635, pruned_loss=0.2142, over 3234.00 frames. ], tot_loss[loss=0.3742, simple_loss=0.4159, pruned_loss=0.1662, over 913716.29 frames. ], batch size: 35, lr: 3.48e-02, grad_scale: 64.0 +2024-07-27 12:10:20,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=13413.333333333334, ans=0.16586666666666666 +2024-07-27 12:10:34,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=13440.0, ans=0.0 +2024-07-27 12:10:43,136 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.97 vs. limit=12.545 +2024-07-27 12:10:46,746 INFO [train.py:1114] (2/4) Epoch 1, batch 10100, loss[loss=0.476, simple_loss=0.4808, pruned_loss=0.2356, over 3472.00 frames. ], tot_loss[loss=0.3885, simple_loss=0.424, pruned_loss=0.1765, over 858816.15 frames. ], batch size: 35, lr: 3.47e-02, grad_scale: 64.0 +2024-07-27 12:10:49,398 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.041e+01 6.990e+01 7.547e+01 8.268e+01 1.617e+02, threshold=1.509e+02, percent-clipped=1.0 +2024-07-27 12:10:49,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=13466.666666666666, ans=0.125 +2024-07-27 12:10:53,364 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:10:53,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13480.0, ans=0.16519999999999999 +2024-07-27 12:11:06,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=13493.333333333334, ans=0.0 +2024-07-27 12:11:07,331 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:11:17,397 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.74 vs. limit=17.64 +2024-07-27 12:11:20,142 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.26 vs. limit=17.64 +2024-07-27 12:11:23,848 INFO [train.py:1114] (2/4) Epoch 1, batch 10150, loss[loss=0.4771, simple_loss=0.4718, pruned_loss=0.2412, over 3109.00 frames. ], tot_loss[loss=0.3985, simple_loss=0.4296, pruned_loss=0.1837, over 818838.01 frames. ], batch size: 35, lr: 3.47e-02, grad_scale: 64.0 +2024-07-27 12:11:45,595 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.34 vs. limit=12.585 +2024-07-27 12:11:48,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=13573.333333333334, ans=0.1 +2024-07-27 12:12:02,447 INFO [train.py:1114] (2/4) Epoch 1, batch 10200, loss[loss=0.4368, simple_loss=0.4476, pruned_loss=0.213, over 3234.00 frames. ], tot_loss[loss=0.4045, simple_loss=0.4324, pruned_loss=0.1883, over 788458.59 frames. ], batch size: 35, lr: 3.46e-02, grad_scale: 64.0 +2024-07-27 12:12:04,945 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.695e+01 6.612e+01 7.159e+01 7.876e+01 1.155e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 12:12:06,007 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.50 vs. limit=17.7 +2024-07-27 12:12:09,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=13613.333333333334, ans=0.125 +2024-07-27 12:13:02,331 INFO [train.py:1114] (2/4) Epoch 2, batch 0, loss[loss=0.3661, simple_loss=0.4165, pruned_loss=0.1578, over 4854.00 frames. ], tot_loss[loss=0.3661, simple_loss=0.4165, pruned_loss=0.1578, over 4854.00 frames. ], batch size: 12, lr: 3.39e-02, grad_scale: 64.0 +2024-07-27 12:13:02,331 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 12:13:07,195 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.2784, 2.5570, 2.0802, 1.9003, 2.1474, 2.2313, 2.1319, 2.1380], + device='cuda:2') +2024-07-27 12:13:12,210 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.4718, 3.5910, 4.5956, 5.1060], device='cuda:2') +2024-07-27 12:13:13,917 INFO [train.py:1146] (2/4) Epoch 2, validation: loss=0.3005, simple_loss=0.3865, pruned_loss=0.1073, over 944034.00 frames. +2024-07-27 12:13:13,917 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 12:13:21,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.70 vs. limit=17.732 +2024-07-27 12:13:24,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=13642.666666666666, ans=0.8864266666666666 +2024-07-27 12:13:35,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=13669.333333333334, ans=17.752000000000002 +2024-07-27 12:13:45,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=13682.666666666666, ans=0.125 +2024-07-27 12:13:49,258 INFO [train.py:1114] (2/4) Epoch 2, batch 50, loss[loss=0.3175, simple_loss=0.3756, pruned_loss=0.1297, over 4626.00 frames. ], tot_loss[loss=0.3721, simple_loss=0.4153, pruned_loss=0.1645, over 206312.74 frames. ], batch size: 11, lr: 3.39e-02, grad_scale: 64.0 +2024-07-27 12:13:51,080 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.13 vs. limit=5.054399999999999 +2024-07-27 12:13:53,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=13696.0, ans=0.42064000000000007 +2024-07-27 12:14:05,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=13709.333333333334, ans=0.09899494936611666 +2024-07-27 12:14:16,676 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.870e+01 6.791e+01 7.517e+01 8.543e+01 1.783e+02, threshold=1.503e+02, percent-clipped=1.0 +2024-07-27 12:14:22,959 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.40 vs. limit=6.749866666666667 +2024-07-27 12:14:23,473 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.45 vs. limit=12.655999999999999 +2024-07-27 12:14:29,090 INFO [train.py:1114] (2/4) Epoch 2, batch 100, loss[loss=0.3503, simple_loss=0.3975, pruned_loss=0.1516, over 4640.00 frames. ], tot_loss[loss=0.3694, simple_loss=0.4139, pruned_loss=0.1624, over 365314.97 frames. ], batch size: 12, lr: 3.38e-02, grad_scale: 64.0 +2024-07-27 12:14:32,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=13762.666666666666, ans=0.41830666666666677 +2024-07-27 12:14:35,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=13762.666666666666, ans=0.125 +2024-07-27 12:14:37,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=13776.0, ans=0.025 +2024-07-27 12:14:38,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=13776.0, ans=0.0 +2024-07-27 12:14:46,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=13789.333333333334, ans=0.125 +2024-07-27 12:14:51,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=13802.666666666666, ans=0.025 +2024-07-27 12:15:03,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=13829.333333333334, ans=0.125 +2024-07-27 12:15:04,094 INFO [train.py:1114] (2/4) Epoch 2, batch 150, loss[loss=0.2768, simple_loss=0.3282, pruned_loss=0.1127, over 4613.00 frames. ], tot_loss[loss=0.3616, simple_loss=0.4078, pruned_loss=0.1577, over 493862.15 frames. ], batch size: 11, lr: 3.38e-02, grad_scale: 64.0 +2024-07-27 12:15:05,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=13829.333333333334, ans=0.125 +2024-07-27 12:15:08,578 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.92 vs. limit=11.914666666666667 +2024-07-27 12:15:15,554 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.85 vs. limit=11.921333333333333 +2024-07-27 12:15:25,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13869.333333333334, ans=0.16130666666666668 +2024-07-27 12:15:26,451 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.728e+01 6.633e+01 7.371e+01 8.455e+01 1.546e+02, threshold=1.474e+02, percent-clipped=1.0 +2024-07-27 12:15:34,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=13882.666666666666, ans=0.40823999999999994 +2024-07-27 12:15:38,927 INFO [train.py:1114] (2/4) Epoch 2, batch 200, loss[loss=0.364, simple_loss=0.4229, pruned_loss=0.1525, over 4607.00 frames. ], tot_loss[loss=0.3618, simple_loss=0.4071, pruned_loss=0.1583, over 593538.80 frames. ], batch size: 22, lr: 3.37e-02, grad_scale: 64.0 +2024-07-27 12:15:45,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=13909.333333333334, ans=0.025 +2024-07-27 12:15:54,143 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.41 vs. limit=5.0884 +2024-07-27 12:16:03,216 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.31 vs. limit=12.725999999999999 +2024-07-27 12:16:15,233 INFO [train.py:1114] (2/4) Epoch 2, batch 250, loss[loss=0.4092, simple_loss=0.4553, pruned_loss=0.1815, over 4592.00 frames. ], tot_loss[loss=0.3602, simple_loss=0.4053, pruned_loss=0.1575, over 670070.89 frames. ], batch size: 16, lr: 3.37e-02, grad_scale: 64.0 +2024-07-27 12:16:30,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=13976.0, ans=0.125 +2024-07-27 12:16:30,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=13976.0, ans=0.125 +2024-07-27 12:16:34,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=13989.333333333334, ans=0.41037333333333337 +2024-07-27 12:16:37,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=13989.333333333334, ans=0.008377777777777776 +2024-07-27 12:16:41,800 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.968e+01 6.499e+01 7.152e+01 7.948e+01 1.053e+02, threshold=1.430e+02, percent-clipped=0.0 +2024-07-27 12:16:51,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=14016.0, ans=0.008266666666666665 +2024-07-27 12:16:52,754 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.60 vs. limit=12.756 +2024-07-27 12:16:56,552 INFO [train.py:1114] (2/4) Epoch 2, batch 300, loss[loss=0.3504, simple_loss=0.4165, pruned_loss=0.1422, over 4804.00 frames. ], tot_loss[loss=0.3571, simple_loss=0.4033, pruned_loss=0.1554, over 729786.76 frames. ], batch size: 15, lr: 3.36e-02, grad_scale: 64.0 +2024-07-27 12:16:56,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=14029.333333333334, ans=0.07 +2024-07-27 12:16:58,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=14029.333333333334, ans=0.125 +2024-07-27 12:17:00,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=14029.333333333334, ans=0.125 +2024-07-27 12:17:05,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=14042.666666666666, ans=0.008155555555555562 +2024-07-27 12:17:09,627 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=2.510e-03 +2024-07-27 12:17:26,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=14082.666666666666, ans=0.025 +2024-07-27 12:17:26,606 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=2.96 vs. limit=12.780999999999999 +2024-07-27 12:17:29,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=14082.666666666666, ans=0.05 +2024-07-27 12:17:31,948 INFO [train.py:1114] (2/4) Epoch 2, batch 350, loss[loss=0.4351, simple_loss=0.4423, pruned_loss=0.2139, over 4928.00 frames. ], tot_loss[loss=0.3562, simple_loss=0.4029, pruned_loss=0.1547, over 775932.06 frames. ], batch size: 12, lr: 3.36e-02, grad_scale: 64.0 +2024-07-27 12:17:41,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=14109.333333333334, ans=0.125 +2024-07-27 12:17:51,061 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.70 vs. limit=18.092 +2024-07-27 12:17:53,657 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.56 vs. limit=12.061333333333334 +2024-07-27 12:17:57,626 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.955e+01 6.715e+01 7.349e+01 8.005e+01 1.409e+02, threshold=1.470e+02, percent-clipped=0.0 +2024-07-27 12:18:08,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=14149.333333333334, ans=0.007711111111111113 +2024-07-27 12:18:10,145 INFO [train.py:1114] (2/4) Epoch 2, batch 400, loss[loss=0.3025, simple_loss=0.3623, pruned_loss=0.1213, over 4687.00 frames. ], tot_loss[loss=0.3524, simple_loss=0.4, pruned_loss=0.1524, over 813324.76 frames. ], batch size: 13, lr: 3.35e-02, grad_scale: 64.0 +2024-07-27 12:18:19,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=14176.0, ans=0.1 +2024-07-27 12:18:21,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=14176.0, ans=0.007787826086956521 +2024-07-27 12:18:30,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=14202.666666666666, ans=0.125 +2024-07-27 12:18:36,908 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.58 vs. limit=12.101333333333333 +2024-07-27 12:18:43,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=14216.0, ans=0.125 +2024-07-27 12:18:43,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=14216.0, ans=0.125 +2024-07-27 12:18:45,069 INFO [train.py:1114] (2/4) Epoch 2, batch 450, loss[loss=0.3105, simple_loss=0.3851, pruned_loss=0.1179, over 4639.00 frames. ], tot_loss[loss=0.3516, simple_loss=0.3996, pruned_loss=0.1518, over 838570.35 frames. ], batch size: 13, lr: 3.35e-02, grad_scale: 64.0 +2024-07-27 12:18:48,379 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.18 vs. limit=8.557333333333334 +2024-07-27 12:18:49,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=14229.333333333334, ans=0.125 +2024-07-27 12:19:05,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=14269.333333333334, ans=0.125 +2024-07-27 12:19:07,282 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.533e+01 6.535e+01 7.099e+01 8.060e+01 1.224e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 12:19:08,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=14269.333333333334, ans=0.125 +2024-07-27 12:19:10,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=14269.333333333334, ans=0.125 +2024-07-27 12:19:19,766 INFO [train.py:1114] (2/4) Epoch 2, batch 500, loss[loss=0.4217, simple_loss=0.4576, pruned_loss=0.1929, over 4671.00 frames. ], tot_loss[loss=0.3517, simple_loss=0.399, pruned_loss=0.1522, over 861040.65 frames. ], batch size: 15, lr: 3.34e-02, grad_scale: 64.0 +2024-07-27 12:19:19,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=14296.0, ans=0.125 +2024-07-27 12:19:42,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14322.666666666666, ans=0.15677333333333335 +2024-07-27 12:19:49,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=14336.0, ans=0.125 +2024-07-27 12:19:51,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=14336.0, ans=0.125 +2024-07-27 12:19:51,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=14336.0, ans=0.39824000000000004 +2024-07-27 12:20:00,831 INFO [train.py:1114] (2/4) Epoch 2, batch 550, loss[loss=0.3953, simple_loss=0.4324, pruned_loss=0.1791, over 4614.00 frames. ], tot_loss[loss=0.3517, simple_loss=0.3987, pruned_loss=0.1523, over 877210.64 frames. ], batch size: 17, lr: 3.34e-02, grad_scale: 64.0 +2024-07-27 12:20:01,184 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.67 vs. limit=12.886 +2024-07-27 12:20:24,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=14389.333333333334, ans=0.007741449275362319 +2024-07-27 12:20:26,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=14402.666666666666, ans=0.125 +2024-07-27 12:20:27,928 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.749e+01 6.667e+01 7.400e+01 8.349e+01 1.588e+02, threshold=1.480e+02, percent-clipped=3.0 +2024-07-27 12:20:36,425 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=2.89 vs. limit=12.905999999999999 +2024-07-27 12:20:38,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=14416.0, ans=0.006599999999999995 +2024-07-27 12:20:39,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=14416.0, ans=0.125 +2024-07-27 12:20:40,340 INFO [train.py:1114] (2/4) Epoch 2, batch 600, loss[loss=0.3574, simple_loss=0.4162, pruned_loss=0.1493, over 4624.00 frames. ], tot_loss[loss=0.3506, simple_loss=0.3986, pruned_loss=0.1513, over 891785.01 frames. ], batch size: 16, lr: 3.33e-02, grad_scale: 64.0 +2024-07-27 12:20:49,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=14442.666666666666, ans=0.007729855072463768 +2024-07-27 12:20:53,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=14456.0, ans=10.0 +2024-07-27 12:21:01,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=14469.333333333334, ans=0.0 +2024-07-27 12:21:12,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=14482.666666666666, ans=0.09899494936611666 +2024-07-27 12:21:12,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.78 vs. limit=12.241333333333333 +2024-07-27 12:21:14,594 INFO [train.py:1114] (2/4) Epoch 2, batch 650, loss[loss=0.3192, simple_loss=0.3817, pruned_loss=0.1283, over 4766.00 frames. ], tot_loss[loss=0.3497, simple_loss=0.3974, pruned_loss=0.1509, over 903411.03 frames. ], batch size: 13, lr: 3.33e-02, grad_scale: 64.0 +2024-07-27 12:21:24,003 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.68 vs. limit=9.803733333333334 +2024-07-27 12:21:26,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=14509.333333333334, ans=0.006211111111111112 +2024-07-27 12:21:29,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=14522.666666666666, ans=0.09899494936611666 +2024-07-27 12:21:31,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=14522.666666666666, ans=0.0077124637681159425 +2024-07-27 12:21:35,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten.whitening_limit, batch_count=14536.0, ans=18.402 +2024-07-27 12:21:36,639 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.116e+01 6.647e+01 7.177e+01 7.899e+01 1.481e+02, threshold=1.435e+02, percent-clipped=1.0 +2024-07-27 12:21:41,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=14549.333333333334, ans=0.0060444444444444426 +2024-07-27 12:21:42,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=14549.333333333334, ans=0.0060444444444444426 +2024-07-27 12:21:43,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=14549.333333333334, ans=0.125 +2024-07-27 12:21:49,038 INFO [train.py:1114] (2/4) Epoch 2, batch 700, loss[loss=0.3401, simple_loss=0.3937, pruned_loss=0.1432, over 4632.00 frames. ], tot_loss[loss=0.3492, simple_loss=0.3976, pruned_loss=0.1504, over 911738.65 frames. ], batch size: 12, lr: 3.32e-02, grad_scale: 64.0 +2024-07-27 12:21:49,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=14562.666666666666, ans=0.125 +2024-07-27 12:22:07,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=14589.333333333334, ans=0.007697971014492754 +2024-07-27 12:22:10,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=14589.333333333334, ans=0.125 +2024-07-27 12:22:13,213 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.63 vs. limit=12.975999999999999 +2024-07-27 12:22:25,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=14616.0, ans=0.125 +2024-07-27 12:22:29,302 INFO [train.py:1114] (2/4) Epoch 2, batch 750, loss[loss=0.394, simple_loss=0.4326, pruned_loss=0.1777, over 4691.00 frames. ], tot_loss[loss=0.3473, simple_loss=0.3961, pruned_loss=0.1492, over 918231.69 frames. ], batch size: 13, lr: 3.31e-02, grad_scale: 64.0 +2024-07-27 12:22:36,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=14629.333333333334, ans=0.0076892753623188405 +2024-07-27 12:22:41,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=14642.666666666666, ans=0.125 +2024-07-27 12:22:43,973 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.73 vs. limit=5.196400000000001 +2024-07-27 12:22:53,337 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.645e+01 6.839e+01 7.355e+01 8.149e+01 1.440e+02, threshold=1.471e+02, percent-clipped=1.0 +2024-07-27 12:22:59,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=14682.666666666666, ans=0.025 +2024-07-27 12:23:00,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=14682.666666666666, ans=0.125 +2024-07-27 12:23:05,825 INFO [train.py:1114] (2/4) Epoch 2, batch 800, loss[loss=0.2939, simple_loss=0.3516, pruned_loss=0.1181, over 4853.00 frames. ], tot_loss[loss=0.3471, simple_loss=0.3956, pruned_loss=0.1493, over 923171.36 frames. ], batch size: 12, lr: 3.31e-02, grad_scale: 128.0 +2024-07-27 12:23:06,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=14696.0, ans=0.3856400000000001 +2024-07-27 12:23:15,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=14709.333333333334, ans=0.0 +2024-07-27 12:23:27,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=14736.0, ans=0.38424 +2024-07-27 12:23:40,083 INFO [train.py:1114] (2/4) Epoch 2, batch 850, loss[loss=0.3594, simple_loss=0.4134, pruned_loss=0.1527, over 4681.00 frames. ], tot_loss[loss=0.3461, simple_loss=0.3953, pruned_loss=0.1485, over 927538.50 frames. ], batch size: 14, lr: 3.30e-02, grad_scale: 64.0 +2024-07-27 12:23:41,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=14762.666666666666, ans=0.005155555555555559 +2024-07-27 12:24:02,971 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.532e+01 6.555e+01 7.139e+01 7.731e+01 1.156e+02, threshold=1.428e+02, percent-clipped=0.0 +2024-07-27 12:24:10,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.57 vs. limit=13.056000000000001 +2024-07-27 12:24:15,206 INFO [train.py:1114] (2/4) Epoch 2, batch 900, loss[loss=0.3081, simple_loss=0.3633, pruned_loss=0.1265, over 4849.00 frames. ], tot_loss[loss=0.3481, simple_loss=0.3967, pruned_loss=0.1497, over 928699.52 frames. ], batch size: 12, lr: 3.30e-02, grad_scale: 64.0 +2024-07-27 12:24:27,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=14842.666666666666, ans=10.0 +2024-07-27 12:24:27,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14842.666666666666, ans=0.15157333333333334 +2024-07-27 12:24:46,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=14882.666666666666, ans=0.125 +2024-07-27 12:24:50,048 INFO [train.py:1114] (2/4) Epoch 2, batch 950, loss[loss=0.2852, simple_loss=0.3447, pruned_loss=0.1129, over 4775.00 frames. ], tot_loss[loss=0.3469, simple_loss=0.3959, pruned_loss=0.149, over 930226.99 frames. ], batch size: 12, lr: 3.29e-02, grad_scale: 64.0 +2024-07-27 12:24:53,183 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.70 vs. limit=13.086 +2024-07-27 12:24:58,341 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.93 vs. limit=18.682000000000002 +2024-07-27 12:25:03,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=14922.666666666666, ans=0.125 +2024-07-27 12:25:13,549 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.467e+01 6.513e+01 7.102e+01 8.226e+01 2.101e+02, threshold=1.420e+02, percent-clipped=1.0 +2024-07-27 12:25:20,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=14949.333333333334, ans=0.125 +2024-07-27 12:25:25,579 INFO [train.py:1114] (2/4) Epoch 2, batch 1000, loss[loss=0.4047, simple_loss=0.4463, pruned_loss=0.1816, over 4963.00 frames. ], tot_loss[loss=0.3484, simple_loss=0.3967, pruned_loss=0.15, over 929932.99 frames. ], batch size: 13, lr: 3.29e-02, grad_scale: 64.0 +2024-07-27 12:25:30,119 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:25:43,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=14989.333333333334, ans=0.37537333333333334 +2024-07-27 12:25:54,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=15016.0, ans=0.125 +2024-07-27 12:26:00,218 INFO [train.py:1114] (2/4) Epoch 2, batch 1050, loss[loss=0.3371, simple_loss=0.382, pruned_loss=0.1461, over 4867.00 frames. ], tot_loss[loss=0.3471, simple_loss=0.3957, pruned_loss=0.1492, over 931990.96 frames. ], batch size: 14, lr: 3.28e-02, grad_scale: 64.0 +2024-07-27 12:26:02,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=15029.333333333334, ans=0.09899494936611666 +2024-07-27 12:26:10,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=15042.666666666666, ans=0.125 +2024-07-27 12:26:22,961 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.050e+01 6.480e+01 6.937e+01 7.724e+01 1.151e+02, threshold=1.387e+02, percent-clipped=0.0 +2024-07-27 12:26:23,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=15069.333333333334, ans=0.14930666666666667 +2024-07-27 12:26:29,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=15082.666666666666, ans=0.003822222222222227 +2024-07-27 12:26:35,187 INFO [train.py:1114] (2/4) Epoch 2, batch 1100, loss[loss=0.3988, simple_loss=0.4386, pruned_loss=0.1795, over 4899.00 frames. ], tot_loss[loss=0.3454, simple_loss=0.3943, pruned_loss=0.1482, over 934564.74 frames. ], batch size: 13, lr: 3.28e-02, grad_scale: 64.0 +2024-07-27 12:26:56,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=15136.0, ans=0.007579130434782609 +2024-07-27 12:27:00,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=15136.0, ans=0.125 +2024-07-27 12:27:08,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=15149.333333333334, ans=0.007576231884057971 +2024-07-27 12:27:09,720 INFO [train.py:1114] (2/4) Epoch 2, batch 1150, loss[loss=0.3123, simple_loss=0.3672, pruned_loss=0.1287, over 4899.00 frames. ], tot_loss[loss=0.344, simple_loss=0.3933, pruned_loss=0.1474, over 934042.53 frames. ], batch size: 13, lr: 3.27e-02, grad_scale: 64.0 +2024-07-27 12:27:18,433 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.92 vs. limit=18.881999999999998 +2024-07-27 12:27:30,961 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.79 vs. limit=13.196 +2024-07-27 12:27:32,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.67 vs. limit=18.902 +2024-07-27 12:27:35,092 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.945e+01 6.616e+01 7.321e+01 8.237e+01 1.316e+02, threshold=1.464e+02, percent-clipped=0.0 +2024-07-27 12:27:43,551 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.35 vs. limit=18.912 +2024-07-27 12:27:48,604 INFO [train.py:1114] (2/4) Epoch 2, batch 1200, loss[loss=0.3512, simple_loss=0.4045, pruned_loss=0.149, over 4877.00 frames. ], tot_loss[loss=0.3474, simple_loss=0.3962, pruned_loss=0.1493, over 932886.66 frames. ], batch size: 14, lr: 3.27e-02, grad_scale: 64.0 +2024-07-27 12:27:48,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15229.333333333334, ans=0.14770666666666668 +2024-07-27 12:28:05,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=15256.0, ans=0.125 +2024-07-27 12:28:11,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=15256.0, ans=0.125 +2024-07-27 12:28:16,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=15269.333333333334, ans=0.05875866666666668 +2024-07-27 12:28:17,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=15269.333333333334, ans=0.025 +2024-07-27 12:28:26,798 INFO [train.py:1114] (2/4) Epoch 2, batch 1250, loss[loss=0.3665, simple_loss=0.4142, pruned_loss=0.1594, over 4793.00 frames. ], tot_loss[loss=0.3445, simple_loss=0.3947, pruned_loss=0.1471, over 937050.00 frames. ], batch size: 15, lr: 3.26e-02, grad_scale: 64.0 +2024-07-27 12:28:26,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=15296.0, ans=0.125 +2024-07-27 12:28:30,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=15296.0, ans=13.236 +2024-07-27 12:28:32,104 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.49 vs. limit=13.236 +2024-07-27 12:28:43,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=15322.666666666666, ans=0.002822222222222226 +2024-07-27 12:28:46,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=15336.0, ans=0.025 +2024-07-27 12:28:49,359 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.661e+01 6.573e+01 7.173e+01 8.198e+01 1.375e+02, threshold=1.435e+02, percent-clipped=0.0 +2024-07-27 12:28:57,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=15349.333333333334, ans=0.0027111111111111086 +2024-07-27 12:28:57,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=15349.333333333334, ans=0.07 +2024-07-27 12:28:59,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=15349.333333333334, ans=0.035 +2024-07-27 12:29:01,002 INFO [train.py:1114] (2/4) Epoch 2, batch 1300, loss[loss=0.4721, simple_loss=0.493, pruned_loss=0.2256, over 4726.00 frames. ], tot_loss[loss=0.3439, simple_loss=0.3942, pruned_loss=0.1468, over 938523.20 frames. ], batch size: 19, lr: 3.26e-02, grad_scale: 64.0 +2024-07-27 12:29:30,610 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.85 vs. limit=19.061999999999998 +2024-07-27 12:29:30,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15416.0, ans=0.14584 +2024-07-27 12:29:34,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=15416.0, ans=0.125 +2024-07-27 12:29:35,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=15429.333333333334, ans=0.35997333333333337 +2024-07-27 12:29:35,613 INFO [train.py:1114] (2/4) Epoch 2, batch 1350, loss[loss=0.3205, simple_loss=0.3833, pruned_loss=0.1289, over 4766.00 frames. ], tot_loss[loss=0.3416, simple_loss=0.3924, pruned_loss=0.1454, over 940491.71 frames. ], batch size: 13, lr: 3.25e-02, grad_scale: 64.0 +2024-07-27 12:29:36,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=15429.333333333334, ans=0.35997333333333337 +2024-07-27 12:29:37,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=15429.333333333334, ans=0.0023777777777777773 +2024-07-27 12:29:47,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=15442.666666666666, ans=0.35950666666666675 +2024-07-27 12:29:57,863 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.16 vs. limit=13.301 +2024-07-27 12:29:58,816 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.201e+01 6.395e+01 7.183e+01 7.821e+01 1.561e+02, threshold=1.437e+02, percent-clipped=1.0 +2024-07-27 12:30:01,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15469.333333333334, ans=0.14530666666666667 +2024-07-27 12:30:08,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=15482.666666666666, ans=0.0 +2024-07-27 12:30:10,904 INFO [train.py:1114] (2/4) Epoch 2, batch 1400, loss[loss=0.2826, simple_loss=0.3325, pruned_loss=0.1163, over 4691.00 frames. ], tot_loss[loss=0.3399, simple_loss=0.3911, pruned_loss=0.1443, over 942589.74 frames. ], batch size: 11, lr: 3.25e-02, grad_scale: 64.0 +2024-07-27 12:30:12,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=15496.0, ans=0.125 +2024-07-27 12:30:26,785 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.91 vs. limit=8.880666666666666 +2024-07-27 12:30:28,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=15522.666666666666, ans=0.05 +2024-07-27 12:30:31,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=15536.0, ans=0.3562400000000001 +2024-07-27 12:30:32,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=15536.0, ans=0.3562400000000001 +2024-07-27 12:30:33,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=15536.0, ans=0.007492173913043479 +2024-07-27 12:30:38,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.22 vs. limit=19.152 +2024-07-27 12:30:46,194 INFO [train.py:1114] (2/4) Epoch 2, batch 1450, loss[loss=0.3312, simple_loss=0.3966, pruned_loss=0.1329, over 4688.00 frames. ], tot_loss[loss=0.3419, simple_loss=0.3925, pruned_loss=0.1456, over 942598.82 frames. ], batch size: 15, lr: 3.24e-02, grad_scale: 64.0 +2024-07-27 12:30:46,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=15562.666666666666, ans=0.35530666666666677 +2024-07-27 12:30:49,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=15562.666666666666, ans=0.007486376811594203 +2024-07-27 12:30:49,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=15562.666666666666, ans=0.001822222222222225 +2024-07-27 12:30:50,686 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.62 vs. limit=19.172 +2024-07-27 12:30:51,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=15562.666666666666, ans=0.125 +2024-07-27 12:30:58,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=15576.0, ans=13.341000000000001 +2024-07-27 12:31:04,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=15589.333333333334, ans=0.125 +2024-07-27 12:31:04,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=15589.333333333334, ans=0.125 +2024-07-27 12:31:05,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=15589.333333333334, ans=0.07 +2024-07-27 12:31:08,926 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.669e+01 6.624e+01 7.283e+01 7.925e+01 1.878e+02, threshold=1.457e+02, percent-clipped=2.0 +2024-07-27 12:31:15,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=15616.0, ans=0.3534400000000001 +2024-07-27 12:31:17,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=15616.0, ans=0.125 +2024-07-27 12:31:19,394 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.89 vs. limit=13.356 +2024-07-27 12:31:20,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=15616.0, ans=0.125 +2024-07-27 12:31:23,974 INFO [train.py:1114] (2/4) Epoch 2, batch 1500, loss[loss=0.3248, simple_loss=0.3855, pruned_loss=0.1321, over 4805.00 frames. ], tot_loss[loss=0.343, simple_loss=0.3935, pruned_loss=0.1462, over 942693.42 frames. ], batch size: 14, lr: 3.24e-02, grad_scale: 64.0 +2024-07-27 12:31:31,037 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.63 vs. limit=10.257066666666667 +2024-07-27 12:31:37,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=15656.0, ans=0.125 +2024-07-27 12:31:40,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=15656.0, ans=0.125 +2024-07-27 12:31:43,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=15656.0, ans=0.025 +2024-07-27 12:31:52,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=15682.666666666666, ans=0.0013222222222222246 +2024-07-27 12:31:57,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=15682.666666666666, ans=0.14317333333333335 +2024-07-27 12:31:58,845 INFO [train.py:1114] (2/4) Epoch 2, batch 1550, loss[loss=0.3482, simple_loss=0.4057, pruned_loss=0.1454, over 4916.00 frames. ], tot_loss[loss=0.3438, simple_loss=0.394, pruned_loss=0.1468, over 939085.72 frames. ], batch size: 15, lr: 3.23e-02, grad_scale: 64.0 +2024-07-27 12:32:06,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=15709.333333333334, ans=0.14290666666666668 +2024-07-27 12:32:07,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=15709.333333333334, ans=0.125 +2024-07-27 12:32:22,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=15736.0, ans=0.3492400000000001 +2024-07-27 12:32:22,593 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.37 vs. limit=13.401 +2024-07-27 12:32:22,806 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.789e+01 6.571e+01 7.346e+01 8.400e+01 2.303e+02, threshold=1.469e+02, percent-clipped=1.0 +2024-07-27 12:32:25,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15736.0, ans=0.14264 +2024-07-27 12:32:34,710 INFO [train.py:1114] (2/4) Epoch 2, batch 1600, loss[loss=0.3344, simple_loss=0.3904, pruned_loss=0.1393, over 4880.00 frames. ], tot_loss[loss=0.3441, simple_loss=0.3942, pruned_loss=0.147, over 937727.73 frames. ], batch size: 14, lr: 3.23e-02, grad_scale: 64.0 +2024-07-27 12:32:39,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=15762.666666666666, ans=0.125 +2024-07-27 12:33:00,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=15802.666666666666, ans=0.0008222222222222242 +2024-07-27 12:33:00,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=15802.666666666666, ans=0.125 +2024-07-27 12:33:01,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=15802.666666666666, ans=0.125 +2024-07-27 12:33:08,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15816.0, ans=0.14184 +2024-07-27 12:33:13,394 INFO [train.py:1114] (2/4) Epoch 2, batch 1650, loss[loss=0.4373, simple_loss=0.4696, pruned_loss=0.2025, over 4667.00 frames. ], tot_loss[loss=0.3442, simple_loss=0.3941, pruned_loss=0.1471, over 937475.87 frames. ], batch size: 14, lr: 3.22e-02, grad_scale: 64.0 +2024-07-27 12:33:18,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=15829.333333333334, ans=0.0007111111111111137 +2024-07-27 12:33:21,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15842.666666666666, ans=0.14157333333333336 +2024-07-27 12:33:25,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=15842.666666666666, ans=0.007425507246376812 +2024-07-27 12:33:42,232 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.638e+01 6.520e+01 7.164e+01 7.874e+01 1.221e+02, threshold=1.433e+02, percent-clipped=0.0 +2024-07-27 12:33:48,251 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.47 vs. limit=13.456 +2024-07-27 12:33:48,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=15882.666666666666, ans=0.125 +2024-07-27 12:33:59,029 INFO [train.py:1114] (2/4) Epoch 2, batch 1700, loss[loss=0.2893, simple_loss=0.339, pruned_loss=0.1198, over 4707.00 frames. ], tot_loss[loss=0.342, simple_loss=0.3927, pruned_loss=0.1457, over 939470.12 frames. ], batch size: 11, lr: 3.22e-02, grad_scale: 64.0 +2024-07-27 12:34:09,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=15909.333333333334, ans=0.125 +2024-07-27 12:34:12,479 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.74 vs. limit=13.471 +2024-07-27 12:34:15,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=15922.666666666666, ans=0.125 +2024-07-27 12:34:16,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=15922.666666666666, ans=0.007408115942028986 +2024-07-27 12:34:21,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15936.0, ans=0.14064 +2024-07-27 12:34:27,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=15949.333333333334, ans=0.035 +2024-07-27 12:34:28,602 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.72 vs. limit=19.462 +2024-07-27 12:34:31,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=15949.333333333334, ans=0.025 +2024-07-27 12:34:33,911 INFO [train.py:1114] (2/4) Epoch 2, batch 1750, loss[loss=0.3133, simple_loss=0.3682, pruned_loss=0.1292, over 4794.00 frames. ], tot_loss[loss=0.3401, simple_loss=0.3913, pruned_loss=0.1444, over 940427.14 frames. ], batch size: 11, lr: 3.22e-02, grad_scale: 64.0 +2024-07-27 12:34:34,338 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.26 vs. limit=13.486 +2024-07-27 12:34:38,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=15962.666666666666, ans=0.2 +2024-07-27 12:34:48,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=15989.333333333334, ans=0.125 +2024-07-27 12:34:48,679 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=5.3984000000000005 +2024-07-27 12:34:59,088 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.489e+01 6.792e+01 7.364e+01 8.042e+01 2.018e+02, threshold=1.473e+02, percent-clipped=1.0 +2024-07-27 12:34:59,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=16002.666666666666, ans=0.125 +2024-07-27 12:35:10,709 INFO [train.py:1114] (2/4) Epoch 2, batch 1800, loss[loss=0.3614, simple_loss=0.406, pruned_loss=0.1584, over 4640.00 frames. ], tot_loss[loss=0.3392, simple_loss=0.3906, pruned_loss=0.1439, over 940813.06 frames. ], batch size: 13, lr: 3.21e-02, grad_scale: 64.0 +2024-07-27 12:35:11,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=16029.333333333334, ans=0.13970666666666667 +2024-07-27 12:35:15,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=16029.333333333334, ans=0.0 +2024-07-27 12:35:18,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=16042.666666666666, ans=0.007382028985507247 +2024-07-27 12:35:28,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=16056.0, ans=0.125 +2024-07-27 12:35:36,266 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.80 vs. limit=13.526 +2024-07-27 12:35:37,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=16082.666666666666, ans=10.0 +2024-07-27 12:35:44,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=16096.0, ans=0.13904 +2024-07-27 12:35:45,374 INFO [train.py:1114] (2/4) Epoch 2, batch 1850, loss[loss=0.3143, simple_loss=0.3817, pruned_loss=0.1235, over 4805.00 frames. ], tot_loss[loss=0.3389, simple_loss=0.3904, pruned_loss=0.1437, over 940860.16 frames. ], batch size: 14, lr: 3.21e-02, grad_scale: 64.0 +2024-07-27 12:35:49,112 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.64 vs. limit=13.048 +2024-07-27 12:35:53,285 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.65 vs. limit=19.582 +2024-07-27 12:36:11,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=16122.666666666666, ans=0.125 +2024-07-27 12:36:17,285 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.330e+01 6.413e+01 7.038e+01 7.663e+01 1.052e+02, threshold=1.408e+02, percent-clipped=0.0 +2024-07-27 12:36:18,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=16136.0, ans=0.91136 +2024-07-27 12:36:28,704 INFO [train.py:1114] (2/4) Epoch 2, batch 1900, loss[loss=0.3121, simple_loss=0.3844, pruned_loss=0.1199, over 4665.00 frames. ], tot_loss[loss=0.3382, simple_loss=0.3902, pruned_loss=0.1431, over 942204.48 frames. ], batch size: 14, lr: 3.20e-02, grad_scale: 64.0 +2024-07-27 12:36:46,783 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.50 vs. limit=19.642 +2024-07-27 12:37:05,417 INFO [train.py:1114] (2/4) Epoch 2, batch 1950, loss[loss=0.2864, simple_loss=0.3451, pruned_loss=0.1138, over 4895.00 frames. ], tot_loss[loss=0.3384, simple_loss=0.3908, pruned_loss=0.143, over 944028.85 frames. ], batch size: 13, lr: 3.20e-02, grad_scale: 64.0 +2024-07-27 12:37:13,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=16242.666666666666, ans=0.0 +2024-07-27 12:37:17,395 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:37:25,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=16269.333333333334, ans=0.125 +2024-07-27 12:37:28,260 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.625e+01 6.630e+01 7.143e+01 8.194e+01 1.176e+02, threshold=1.429e+02, percent-clipped=0.0 +2024-07-27 12:37:28,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=16269.333333333334, ans=0.125 +2024-07-27 12:37:38,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=16269.333333333334, ans=0.09899494936611666 +2024-07-27 12:37:38,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=16269.333333333334, ans=0.007332753623188406 +2024-07-27 12:37:40,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=16282.666666666666, ans=0.125 +2024-07-27 12:37:59,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.10 vs. limit=9.070666666666666 +2024-07-27 12:38:00,636 INFO [train.py:1114] (2/4) Epoch 2, batch 2000, loss[loss=0.2958, simple_loss=0.349, pruned_loss=0.1213, over 4802.00 frames. ], tot_loss[loss=0.3406, simple_loss=0.3925, pruned_loss=0.1444, over 941455.74 frames. ], batch size: 11, lr: 3.19e-02, grad_scale: 64.0 +2024-07-27 12:38:01,426 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:38:16,301 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.62 vs. limit=13.621 +2024-07-27 12:38:19,710 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.95 vs. limit=13.621 +2024-07-27 12:38:22,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.41 vs. limit=19.752000000000002 +2024-07-27 12:38:27,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=16349.333333333334, ans=0.125 +2024-07-27 12:38:35,113 INFO [train.py:1114] (2/4) Epoch 2, batch 2050, loss[loss=0.3493, simple_loss=0.3847, pruned_loss=0.157, over 4612.00 frames. ], tot_loss[loss=0.3409, simple_loss=0.3922, pruned_loss=0.1448, over 939639.51 frames. ], batch size: 11, lr: 3.19e-02, grad_scale: 64.0 +2024-07-27 12:38:46,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=16376.0, ans=0.007309565217391304 +2024-07-27 12:38:58,889 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.186e+01 6.444e+01 7.138e+01 8.017e+01 1.723e+02, threshold=1.428e+02, percent-clipped=1.0 +2024-07-27 12:39:07,053 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.81 vs. limit=13.201333333333334 +2024-07-27 12:39:08,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=16402.666666666668, ans=0.125 +2024-07-27 12:39:19,462 INFO [train.py:1114] (2/4) Epoch 2, batch 2100, loss[loss=0.3538, simple_loss=0.4029, pruned_loss=0.1523, over 4763.00 frames. ], tot_loss[loss=0.3388, simple_loss=0.3908, pruned_loss=0.1434, over 940952.54 frames. ], batch size: 13, lr: 3.18e-02, grad_scale: 64.0 +2024-07-27 12:39:32,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=16442.666666666668, ans=0.0 +2024-07-27 12:39:36,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=16442.666666666668, ans=0.025 +2024-07-27 12:39:41,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=16456.0, ans=0.0 +2024-07-27 12:39:42,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=16456.0, ans=0.0 +2024-07-27 12:39:49,385 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.27 vs. limit=13.675999999999998 +2024-07-27 12:39:53,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=16469.333333333332, ans=0.007289275362318841 +2024-07-27 12:40:04,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=16482.666666666668, ans=0.44724 +2024-07-27 12:40:05,155 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.83 vs. limit=10.593066666666667 +2024-07-27 12:40:05,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=16482.666666666668, ans=0.125 +2024-07-27 12:40:06,978 INFO [train.py:1114] (2/4) Epoch 2, batch 2150, loss[loss=0.3722, simple_loss=0.4273, pruned_loss=0.1586, over 4900.00 frames. ], tot_loss[loss=0.3359, simple_loss=0.3885, pruned_loss=0.1416, over 943987.47 frames. ], batch size: 13, lr: 3.18e-02, grad_scale: 64.0 +2024-07-27 12:40:13,148 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.94 vs. limit=13.686 +2024-07-27 12:40:18,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=16509.333333333332, ans=0.007280579710144928 +2024-07-27 12:40:21,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=16509.333333333332, ans=0.025 +2024-07-27 12:40:24,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=16522.666666666668, ans=0.125 +2024-07-27 12:40:25,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=16522.666666666668, ans=0.00727768115942029 +2024-07-27 12:40:25,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=16522.666666666668, ans=0.00727768115942029 +2024-07-27 12:40:33,592 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.279e+01 6.440e+01 7.313e+01 8.077e+01 1.347e+02, threshold=1.463e+02, percent-clipped=0.0 +2024-07-27 12:40:38,083 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.44 vs. limit=19.912 +2024-07-27 12:40:39,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=16549.333333333332, ans=0.125 +2024-07-27 12:40:44,969 INFO [train.py:1114] (2/4) Epoch 2, batch 2200, loss[loss=0.3092, simple_loss=0.371, pruned_loss=0.1237, over 4815.00 frames. ], tot_loss[loss=0.3351, simple_loss=0.3875, pruned_loss=0.1414, over 943414.67 frames. ], batch size: 14, lr: 3.17e-02, grad_scale: 64.0 +2024-07-27 12:40:52,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=16576.0, ans=0.125 +2024-07-27 12:41:22,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=16616.0, ans=0.125 +2024-07-27 12:41:23,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=16616.0, ans=0.125 +2024-07-27 12:41:24,067 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.48 vs. limit=13.731 +2024-07-27 12:41:24,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=16616.0, ans=0.125 +2024-07-27 12:41:25,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=16629.333333333332, ans=0.025 +2024-07-27 12:41:26,214 INFO [train.py:1114] (2/4) Epoch 2, batch 2250, loss[loss=0.3524, simple_loss=0.413, pruned_loss=0.1459, over 4697.00 frames. ], tot_loss[loss=0.3375, simple_loss=0.3896, pruned_loss=0.1427, over 942312.53 frames. ], batch size: 13, lr: 3.17e-02, grad_scale: 64.0 +2024-07-27 12:41:28,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=16629.333333333332, ans=0.125 +2024-07-27 12:41:35,399 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.36 vs. limit=19.982 +2024-07-27 12:41:35,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=16642.666666666668, ans=0.125 +2024-07-27 12:41:38,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=16642.666666666668, ans=0.0 +2024-07-27 12:41:45,728 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.80 vs. limit=19.992 +2024-07-27 12:41:48,633 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.022e+01 6.411e+01 7.130e+01 8.285e+01 1.332e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 12:41:50,972 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:41:51,252 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.36 vs. limit=9.167333333333332 +2024-07-27 12:41:57,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=16682.666666666668, ans=0.0 +2024-07-27 12:42:00,452 INFO [train.py:1114] (2/4) Epoch 2, batch 2300, loss[loss=0.2892, simple_loss=0.3522, pruned_loss=0.1131, over 4929.00 frames. ], tot_loss[loss=0.3337, simple_loss=0.3861, pruned_loss=0.1407, over 939970.74 frames. ], batch size: 12, lr: 3.16e-02, grad_scale: 64.0 +2024-07-27 12:42:09,788 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.06 vs. limit=13.761 +2024-07-27 12:42:14,222 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:42:15,877 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.02 vs. limit=5.5064 +2024-07-27 12:42:16,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=16709.333333333332, ans=0.05 +2024-07-27 12:42:17,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=16709.333333333332, ans=0.125 +2024-07-27 12:42:17,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=16709.333333333332, ans=0.3151733333333334 +2024-07-27 12:42:24,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=16722.666666666668, ans=0.025 +2024-07-27 12:42:25,814 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.89 vs. limit=10.689066666666667 +2024-07-27 12:42:42,778 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=5.965e-02 +2024-07-27 12:42:44,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=16749.333333333332, ans=0.0 +2024-07-27 12:42:47,925 INFO [train.py:1114] (2/4) Epoch 2, batch 2350, loss[loss=0.3354, simple_loss=0.3938, pruned_loss=0.1384, over 4639.00 frames. ], tot_loss[loss=0.334, simple_loss=0.3864, pruned_loss=0.1408, over 941609.39 frames. ], batch size: 13, lr: 3.16e-02, grad_scale: 64.0 +2024-07-27 12:42:52,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=16762.666666666668, ans=0.007225507246376812 +2024-07-27 12:42:58,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=16776.0, ans=0.125 +2024-07-27 12:43:04,310 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.90 vs. limit=9.197333333333333 +2024-07-27 12:43:13,076 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.842e+01 6.484e+01 7.035e+01 7.953e+01 1.463e+02, threshold=1.407e+02, percent-clipped=1.0 +2024-07-27 12:43:24,673 INFO [train.py:1114] (2/4) Epoch 2, batch 2400, loss[loss=0.3818, simple_loss=0.4279, pruned_loss=0.1678, over 4632.00 frames. ], tot_loss[loss=0.3361, simple_loss=0.3883, pruned_loss=0.1419, over 941284.68 frames. ], batch size: 12, lr: 3.15e-02, grad_scale: 64.0 +2024-07-27 12:43:25,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=5.5244 +2024-07-27 12:43:26,434 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.23 vs. limit=13.811 +2024-07-27 12:43:26,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=16829.333333333332, ans=0.31097333333333343 +2024-07-27 12:43:27,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=16829.333333333332, ans=0.0 +2024-07-27 12:44:05,983 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.58 vs. limit=20.152 +2024-07-27 12:44:06,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=16869.333333333332, ans=0.0 +2024-07-27 12:44:11,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=16882.666666666668, ans=0.125 +2024-07-27 12:44:13,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=16882.666666666668, ans=0.13117333333333334 +2024-07-27 12:44:13,591 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.88 vs. limit=20.162 +2024-07-27 12:44:14,564 INFO [train.py:1114] (2/4) Epoch 2, batch 2450, loss[loss=0.3146, simple_loss=0.3658, pruned_loss=0.1318, over 4698.00 frames. ], tot_loss[loss=0.338, simple_loss=0.3895, pruned_loss=0.1432, over 937690.66 frames. ], batch size: 13, lr: 3.15e-02, grad_scale: 64.0 +2024-07-27 12:44:20,174 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.69 vs. limit=13.836 +2024-07-27 12:44:23,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=16896.0, ans=0.0 +2024-07-27 12:44:25,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=16909.333333333332, ans=0.125 +2024-07-27 12:44:26,304 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.13 vs. limit=20.182 +2024-07-27 12:44:29,115 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=18.58 vs. limit=13.841 +2024-07-27 12:44:30,435 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.49 vs. limit=20.182 +2024-07-27 12:44:43,827 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.068e+01 6.416e+01 7.061e+01 7.801e+01 1.253e+02, threshold=1.412e+02, percent-clipped=0.0 +2024-07-27 12:44:45,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=16936.0, ans=0.125 +2024-07-27 12:44:46,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16936.0, ans=0.13064 +2024-07-27 12:44:58,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=16949.333333333332, ans=0.0 +2024-07-27 12:44:58,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=16949.333333333332, ans=0.13050666666666666 +2024-07-27 12:44:59,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=16962.666666666668, ans=0.125 +2024-07-27 12:45:00,018 INFO [train.py:1114] (2/4) Epoch 2, batch 2500, loss[loss=0.343, simple_loss=0.3875, pruned_loss=0.1493, over 4806.00 frames. ], tot_loss[loss=0.3378, simple_loss=0.3892, pruned_loss=0.1431, over 939540.02 frames. ], batch size: 14, lr: 3.14e-02, grad_scale: 64.0 +2024-07-27 12:45:04,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=16962.666666666668, ans=0.125 +2024-07-27 12:45:06,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=16976.0, ans=0.0 +2024-07-27 12:45:12,592 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.41 vs. limit=9.244 +2024-07-27 12:45:12,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=16989.333333333332, ans=0.0 +2024-07-27 12:45:15,274 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.94 vs. limit=13.870999999999999 +2024-07-27 12:45:16,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=16989.333333333332, ans=0.3053733333333335 +2024-07-27 12:45:20,776 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=9.29 vs. limit=13.876000000000001 +2024-07-27 12:45:26,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=17002.666666666668, ans=0.125 +2024-07-27 12:45:27,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=17002.666666666668, ans=10.0 +2024-07-27 12:45:35,222 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.28 vs. limit=20.262 +2024-07-27 12:45:38,909 INFO [train.py:1114] (2/4) Epoch 2, batch 2550, loss[loss=0.3255, simple_loss=0.3712, pruned_loss=0.1399, over 4798.00 frames. ], tot_loss[loss=0.3362, simple_loss=0.3886, pruned_loss=0.1419, over 939117.71 frames. ], batch size: 11, lr: 3.14e-02, grad_scale: 64.0 +2024-07-27 12:45:42,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=17029.333333333332, ans=0.30397333333333343 +2024-07-27 12:46:05,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17056.0, ans=0.12944 +2024-07-27 12:46:09,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=17056.0, ans=0.0 +2024-07-27 12:46:12,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=17069.333333333332, ans=0.125 +2024-07-27 12:46:13,114 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.177e+01 6.481e+01 6.949e+01 7.902e+01 1.029e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-27 12:46:26,457 INFO [train.py:1114] (2/4) Epoch 2, batch 2600, loss[loss=0.3734, simple_loss=0.4111, pruned_loss=0.1679, over 4898.00 frames. ], tot_loss[loss=0.3355, simple_loss=0.3879, pruned_loss=0.1415, over 938146.77 frames. ], batch size: 13, lr: 3.14e-02, grad_scale: 32.0 +2024-07-27 12:46:37,428 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.87 vs. limit=13.554666666666666 +2024-07-27 12:46:56,337 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.33 vs. limit=20.352 +2024-07-27 12:47:23,249 INFO [train.py:1114] (2/4) Epoch 2, batch 2650, loss[loss=0.3193, simple_loss=0.3803, pruned_loss=0.1292, over 4629.00 frames. ], tot_loss[loss=0.3356, simple_loss=0.3884, pruned_loss=0.1414, over 939993.63 frames. ], batch size: 16, lr: 3.13e-02, grad_scale: 32.0 +2024-07-27 12:47:25,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.03 vs. limit=13.936 +2024-07-27 12:47:40,401 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.43 vs. limit=5.5764 +2024-07-27 12:47:51,451 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.937e+01 6.612e+01 7.199e+01 8.016e+01 1.169e+02, threshold=1.440e+02, percent-clipped=0.0 +2024-07-27 12:48:02,474 INFO [train.py:1114] (2/4) Epoch 2, batch 2700, loss[loss=0.3226, simple_loss=0.3766, pruned_loss=0.1343, over 4730.00 frames. ], tot_loss[loss=0.3346, simple_loss=0.3875, pruned_loss=0.1409, over 940005.23 frames. ], batch size: 14, lr: 3.13e-02, grad_scale: 32.0 +2024-07-27 12:48:02,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=17229.333333333332, ans=0.04949747468305833 +2024-07-27 12:48:20,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=17256.0, ans=0.05 +2024-07-27 12:48:21,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=17256.0, ans=0.125 +2024-07-27 12:48:22,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=17269.333333333332, ans=0.125 +2024-07-27 12:48:29,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=17282.666666666668, ans=0.007112463768115942 +2024-07-27 12:48:30,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=17282.666666666668, ans=0.125 +2024-07-27 12:48:36,980 INFO [train.py:1114] (2/4) Epoch 2, batch 2750, loss[loss=0.2854, simple_loss=0.3355, pruned_loss=0.1176, over 4706.00 frames. ], tot_loss[loss=0.3317, simple_loss=0.385, pruned_loss=0.1392, over 940161.81 frames. ], batch size: 12, lr: 3.12e-02, grad_scale: 32.0 +2024-07-27 12:48:52,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17322.666666666668, ans=0.12677333333333332 +2024-07-27 12:48:57,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=17322.666666666668, ans=0.125 +2024-07-27 12:49:02,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=17336.0, ans=0.0 +2024-07-27 12:49:02,558 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.446e+01 6.464e+01 7.074e+01 8.489e+01 1.052e+02, threshold=1.415e+02, percent-clipped=0.0 +2024-07-27 12:49:13,956 INFO [train.py:1114] (2/4) Epoch 2, batch 2800, loss[loss=0.5406, simple_loss=0.5162, pruned_loss=0.2825, over 3292.00 frames. ], tot_loss[loss=0.3327, simple_loss=0.3855, pruned_loss=0.1399, over 937885.95 frames. ], batch size: 36, lr: 3.12e-02, grad_scale: 32.0 +2024-07-27 12:49:19,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=17362.666666666668, ans=0.125 +2024-07-27 12:49:45,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=17416.0, ans=0.125 +2024-07-27 12:49:48,565 INFO [train.py:1114] (2/4) Epoch 2, batch 2850, loss[loss=0.2965, simple_loss=0.3467, pruned_loss=0.1231, over 4957.00 frames. ], tot_loss[loss=0.3325, simple_loss=0.3853, pruned_loss=0.1398, over 936927.82 frames. ], batch size: 13, lr: 3.11e-02, grad_scale: 32.0 +2024-07-27 12:49:48,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=17429.333333333332, ans=0.46143999999999996 +2024-07-27 12:49:49,379 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=3.279e-02 +2024-07-27 12:49:50,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=17429.333333333332, ans=0.125 +2024-07-27 12:49:50,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17429.333333333332, ans=0.12570666666666666 +2024-07-27 12:50:04,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=17456.0, ans=0.0 +2024-07-27 12:50:11,767 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.157e+01 6.590e+01 7.080e+01 8.267e+01 4.948e+02, threshold=1.416e+02, percent-clipped=1.0 +2024-07-27 12:50:11,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=17469.333333333332, ans=0.0 +2024-07-27 12:50:12,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=17469.333333333332, ans=0.28857333333333346 +2024-07-27 12:50:20,357 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.33 vs. limit=20.612000000000002 +2024-07-27 12:50:21,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.64 vs. limit=14.056000000000001 +2024-07-27 12:50:32,065 INFO [train.py:1114] (2/4) Epoch 2, batch 2900, loss[loss=0.3552, simple_loss=0.398, pruned_loss=0.1562, over 4824.00 frames. ], tot_loss[loss=0.3346, simple_loss=0.3878, pruned_loss=0.1407, over 940627.65 frames. ], batch size: 13, lr: 3.11e-02, grad_scale: 32.0 +2024-07-27 12:50:32,376 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.27 vs. limit=20.622 +2024-07-27 12:50:34,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=17496.0, ans=0.0070660869565217395 +2024-07-27 12:50:38,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=17509.333333333332, ans=0.2871733333333335 +2024-07-27 12:50:45,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=17509.333333333332, ans=0.125 +2024-07-27 12:50:59,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=17536.0, ans=0.125 +2024-07-27 12:51:01,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=17536.0, ans=0.07 +2024-07-27 12:51:07,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=17549.333333333332, ans=0.125 +2024-07-27 12:51:09,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=17549.333333333332, ans=0.025 +2024-07-27 12:51:10,565 INFO [train.py:1114] (2/4) Epoch 2, batch 2950, loss[loss=0.3104, simple_loss=0.3649, pruned_loss=0.1279, over 4709.00 frames. ], tot_loss[loss=0.3344, simple_loss=0.3865, pruned_loss=0.1412, over 939566.57 frames. ], batch size: 12, lr: 3.10e-02, grad_scale: 32.0 +2024-07-27 12:51:29,040 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.43 vs. limit=14.096 +2024-07-27 12:51:36,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=17602.666666666668, ans=0.0 +2024-07-27 12:51:38,400 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.210e+01 6.523e+01 7.161e+01 8.021e+01 1.155e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 12:51:43,441 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:51:49,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=17629.333333333332, ans=0.09899494936611666 +2024-07-27 12:51:49,480 INFO [train.py:1114] (2/4) Epoch 2, batch 3000, loss[loss=0.3387, simple_loss=0.3993, pruned_loss=0.1391, over 4759.00 frames. ], tot_loss[loss=0.3321, simple_loss=0.3851, pruned_loss=0.1395, over 938895.02 frames. ], batch size: 13, lr: 3.10e-02, grad_scale: 32.0 +2024-07-27 12:51:49,480 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 12:52:02,751 INFO [train.py:1146] (2/4) Epoch 2, validation: loss=0.2667, simple_loss=0.3583, pruned_loss=0.0876, over 944034.00 frames. +2024-07-27 12:52:02,752 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 12:52:16,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17629.333333333332, ans=0.12370666666666666 +2024-07-27 12:52:17,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17629.333333333332, ans=0.12370666666666666 +2024-07-27 12:52:28,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=17642.666666666668, ans=0.025 +2024-07-27 12:52:30,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=17656.0, ans=0.125 +2024-07-27 12:52:52,629 INFO [train.py:1114] (2/4) Epoch 2, batch 3050, loss[loss=0.3381, simple_loss=0.3766, pruned_loss=0.1498, over 4637.00 frames. ], tot_loss[loss=0.3327, simple_loss=0.3856, pruned_loss=0.1399, over 937771.71 frames. ], batch size: 12, lr: 3.09e-02, grad_scale: 32.0 +2024-07-27 12:53:01,397 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.17 vs. limit=9.427333333333333 +2024-07-27 12:53:02,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=21.35 vs. limit=20.782 +2024-07-27 12:53:18,157 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.638e+01 6.442e+01 7.179e+01 7.661e+01 1.033e+02, threshold=1.436e+02, percent-clipped=0.0 +2024-07-27 12:53:18,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=17736.0, ans=0.0 +2024-07-27 12:53:18,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=17736.0, ans=0.27924000000000004 +2024-07-27 12:53:25,455 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.21 vs. limit=14.155999999999999 +2024-07-27 12:53:27,591 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.03 vs. limit=14.155999999999999 +2024-07-27 12:53:28,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=17749.333333333332, ans=0.125 +2024-07-27 12:53:29,184 INFO [train.py:1114] (2/4) Epoch 2, batch 3100, loss[loss=0.3682, simple_loss=0.4233, pruned_loss=0.1565, over 4594.00 frames. ], tot_loss[loss=0.3316, simple_loss=0.3846, pruned_loss=0.1393, over 938191.20 frames. ], batch size: 16, lr: 3.09e-02, grad_scale: 32.0 +2024-07-27 12:53:30,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=17762.666666666668, ans=0.0 +2024-07-27 12:53:30,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=17762.666666666668, ans=0.125 +2024-07-27 12:53:31,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=17762.666666666668, ans=0.04949747468305833 +2024-07-27 12:53:38,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=17776.0, ans=0.125 +2024-07-27 12:53:39,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=17776.0, ans=0.035 +2024-07-27 12:53:48,220 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.99 vs. limit=20.842 +2024-07-27 12:53:51,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=17802.666666666668, ans=0.025 +2024-07-27 12:53:52,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=17802.666666666668, ans=0.125 +2024-07-27 12:54:03,690 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.28 vs. limit=14.186 +2024-07-27 12:54:03,869 INFO [train.py:1114] (2/4) Epoch 2, batch 3150, loss[loss=0.2921, simple_loss=0.3546, pruned_loss=0.1147, over 4645.00 frames. ], tot_loss[loss=0.3314, simple_loss=0.3844, pruned_loss=0.1391, over 937999.12 frames. ], batch size: 17, lr: 3.09e-02, grad_scale: 32.0 +2024-07-27 12:54:05,613 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.56 vs. limit=20.872 +2024-07-27 12:54:06,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=17829.333333333332, ans=0.125 +2024-07-27 12:54:08,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=17829.333333333332, ans=0.04949747468305833 +2024-07-27 12:54:11,915 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.70 vs. limit=11.137066666666668 +2024-07-27 12:54:17,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=17856.0, ans=0.125 +2024-07-27 12:54:18,036 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.61 vs. limit=9.464 +2024-07-27 12:54:27,255 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.400e+01 6.385e+01 6.845e+01 7.954e+01 1.765e+02, threshold=1.369e+02, percent-clipped=1.0 +2024-07-27 12:54:28,101 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=2.514e-03 +2024-07-27 12:54:28,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17869.333333333332, ans=0.12130666666666667 +2024-07-27 12:54:32,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=17882.666666666668, ans=0.12117333333333333 +2024-07-27 12:54:36,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=17882.666666666668, ans=0.125 +2024-07-27 12:54:36,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17882.666666666668, ans=0.12117333333333333 +2024-07-27 12:54:38,096 INFO [train.py:1114] (2/4) Epoch 2, batch 3200, loss[loss=0.3163, simple_loss=0.3769, pruned_loss=0.1278, over 4823.00 frames. ], tot_loss[loss=0.3298, simple_loss=0.3835, pruned_loss=0.1381, over 939439.30 frames. ], batch size: 13, lr: 3.08e-02, grad_scale: 32.0 +2024-07-27 12:54:48,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=17896.0, ans=0.125 +2024-07-27 12:54:51,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=17909.333333333332, ans=0.0 +2024-07-27 12:54:59,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=17922.666666666668, ans=0.0 +2024-07-27 12:55:03,378 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=1.401e-02 +2024-07-27 12:55:04,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=17936.0, ans=0.125 +2024-07-27 12:55:08,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=17936.0, ans=0.006970434782608696 +2024-07-27 12:55:15,992 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.29 vs. limit=14.231 +2024-07-27 12:55:16,947 INFO [train.py:1114] (2/4) Epoch 2, batch 3250, loss[loss=0.312, simple_loss=0.3704, pruned_loss=0.1268, over 4936.00 frames. ], tot_loss[loss=0.3306, simple_loss=0.3839, pruned_loss=0.1386, over 940657.90 frames. ], batch size: 14, lr: 3.08e-02, grad_scale: 32.0 +2024-07-27 12:55:39,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=17962.666666666668, ans=0.125 +2024-07-27 12:55:41,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=17962.666666666668, ans=0.125 +2024-07-27 12:55:44,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=17976.0, ans=0.0 +2024-07-27 12:55:50,700 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=14.19 vs. limit=13.994666666666665 +2024-07-27 12:55:52,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=17989.333333333332, ans=0.025 +2024-07-27 12:56:00,166 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.105e+01 6.706e+01 7.327e+01 8.227e+01 1.129e+02, threshold=1.465e+02, percent-clipped=0.0 +2024-07-27 12:56:11,188 INFO [train.py:1114] (2/4) Epoch 2, batch 3300, loss[loss=0.3633, simple_loss=0.4132, pruned_loss=0.1568, over 4704.00 frames. ], tot_loss[loss=0.3289, simple_loss=0.3821, pruned_loss=0.1378, over 940951.11 frames. ], batch size: 19, lr: 3.07e-02, grad_scale: 32.0 +2024-07-27 12:56:14,442 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.98 vs. limit=9.507333333333332 +2024-07-27 12:56:19,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=18042.666666666668, ans=0.05 +2024-07-27 12:56:20,450 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.07 vs. limit=14.266 +2024-07-27 12:56:24,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18056.0, ans=0.11943999999999999 +2024-07-27 12:56:33,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=18069.333333333332, ans=0.006941449275362319 +2024-07-27 12:56:39,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=18082.666666666668, ans=0.0 +2024-07-27 12:56:47,406 INFO [train.py:1114] (2/4) Epoch 2, batch 3350, loss[loss=0.331, simple_loss=0.3862, pruned_loss=0.1379, over 4611.00 frames. ], tot_loss[loss=0.3295, simple_loss=0.3824, pruned_loss=0.1383, over 939169.58 frames. ], batch size: 17, lr: 3.07e-02, grad_scale: 32.0 +2024-07-27 12:56:56,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18109.333333333332, ans=0.11890666666666666 +2024-07-27 12:57:10,884 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.417e+01 6.714e+01 7.318e+01 8.136e+01 2.148e+02, threshold=1.464e+02, percent-clipped=2.0 +2024-07-27 12:57:11,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=18136.0, ans=0.006926956521739131 +2024-07-27 12:57:13,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=18136.0, ans=0.125 +2024-07-27 12:57:22,056 INFO [train.py:1114] (2/4) Epoch 2, batch 3400, loss[loss=0.2944, simple_loss=0.3422, pruned_loss=0.1233, over 4808.00 frames. ], tot_loss[loss=0.3297, simple_loss=0.383, pruned_loss=0.1382, over 937660.21 frames. ], batch size: 11, lr: 3.06e-02, grad_scale: 32.0 +2024-07-27 12:57:22,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=18162.666666666668, ans=0.125 +2024-07-27 12:57:23,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=18162.666666666668, ans=0.125 +2024-07-27 12:57:32,016 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.55 vs. limit=21.131999999999998 +2024-07-27 12:57:39,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=18189.333333333332, ans=0.00691536231884058 +2024-07-27 12:57:57,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18202.666666666668, ans=0.11797333333333332 +2024-07-27 12:58:00,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=18216.0, ans=0.125 +2024-07-27 12:58:03,674 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.34 vs. limit=21.162 +2024-07-27 12:58:04,140 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:58:07,999 INFO [train.py:1114] (2/4) Epoch 2, batch 3450, loss[loss=0.3143, simple_loss=0.3895, pruned_loss=0.1196, over 4702.00 frames. ], tot_loss[loss=0.3307, simple_loss=0.3839, pruned_loss=0.1388, over 937628.91 frames. ], batch size: 19, lr: 3.06e-02, grad_scale: 32.0 +2024-07-27 12:58:20,099 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.69 vs. limit=21.182000000000002 +2024-07-27 12:58:21,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18242.666666666668, ans=0.11757333333333334 +2024-07-27 12:58:35,293 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.179e+01 6.586e+01 6.989e+01 7.796e+01 1.302e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 12:58:50,959 INFO [train.py:1114] (2/4) Epoch 2, batch 3500, loss[loss=0.254, simple_loss=0.3213, pruned_loss=0.09334, over 4940.00 frames. ], tot_loss[loss=0.3304, simple_loss=0.3835, pruned_loss=0.1387, over 938357.25 frames. ], batch size: 12, lr: 3.06e-02, grad_scale: 32.0 +2024-07-27 12:58:51,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=18296.0, ans=0.2596400000000001 +2024-07-27 12:58:56,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.whiten.whitening_limit, batch_count=18296.0, ans=14.361 +2024-07-27 12:59:08,621 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.46 vs. limit=14.371 +2024-07-27 12:59:16,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=18336.0, ans=0.006883478260869566 +2024-07-27 12:59:20,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=18336.0, ans=0.0 +2024-07-27 12:59:21,836 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.97 vs. limit=21.252000000000002 +2024-07-27 12:59:23,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=18349.333333333332, ans=0.0 +2024-07-27 12:59:25,200 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.03 vs. limit=11.339733333333333 +2024-07-27 12:59:28,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=18362.666666666668, ans=0.125 +2024-07-27 12:59:29,552 INFO [train.py:1114] (2/4) Epoch 2, batch 3550, loss[loss=0.3973, simple_loss=0.4325, pruned_loss=0.1811, over 4659.00 frames. ], tot_loss[loss=0.3299, simple_loss=0.3833, pruned_loss=0.1382, over 938975.35 frames. ], batch size: 14, lr: 3.05e-02, grad_scale: 32.0 +2024-07-27 12:59:42,173 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=19.47 vs. limit=14.188 +2024-07-27 12:59:53,859 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.354e+01 6.416e+01 6.884e+01 7.445e+01 1.050e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-27 12:59:54,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=18402.666666666668, ans=0.125 +2024-07-27 13:00:00,535 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.93 vs. limit=11.366399999999999 +2024-07-27 13:00:04,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=18429.333333333332, ans=0.125 +2024-07-27 13:00:04,823 INFO [train.py:1114] (2/4) Epoch 2, batch 3600, loss[loss=0.3209, simple_loss=0.3879, pruned_loss=0.1269, over 4961.00 frames. ], tot_loss[loss=0.3295, simple_loss=0.3836, pruned_loss=0.1377, over 940220.23 frames. ], batch size: 13, lr: 3.05e-02, grad_scale: 32.0 +2024-07-27 13:00:23,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=18456.0, ans=0.125 +2024-07-27 13:00:28,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=18469.333333333332, ans=0.125 +2024-07-27 13:00:29,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=18469.333333333332, ans=0.25357333333333343 +2024-07-27 13:00:42,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=18482.666666666668, ans=0.125 +2024-07-27 13:00:46,944 INFO [train.py:1114] (2/4) Epoch 2, batch 3650, loss[loss=0.321, simple_loss=0.3801, pruned_loss=0.1309, over 4903.00 frames. ], tot_loss[loss=0.3297, simple_loss=0.3835, pruned_loss=0.1379, over 940448.94 frames. ], batch size: 15, lr: 3.04e-02, grad_scale: 32.0 +2024-07-27 13:00:47,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=18496.0, ans=0.2526400000000001 +2024-07-27 13:00:55,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=18509.333333333332, ans=0.07 +2024-07-27 13:00:57,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=18509.333333333332, ans=0.125 +2024-07-27 13:01:05,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=18522.666666666668, ans=0.025 +2024-07-27 13:01:15,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18522.666666666668, ans=0.11477333333333331 +2024-07-27 13:01:15,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=18522.666666666668, ans=0.125 +2024-07-27 13:01:20,529 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.247e+01 6.612e+01 7.129e+01 7.786e+01 1.024e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 13:01:22,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18536.0, ans=0.11463999999999999 +2024-07-27 13:01:23,025 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:01:30,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=18549.333333333332, ans=0.0 +2024-07-27 13:01:31,201 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.75 vs. limit=14.456 +2024-07-27 13:01:32,249 INFO [train.py:1114] (2/4) Epoch 2, batch 3700, loss[loss=0.3661, simple_loss=0.4152, pruned_loss=0.1585, over 4936.00 frames. ], tot_loss[loss=0.3274, simple_loss=0.3823, pruned_loss=0.1363, over 941400.15 frames. ], batch size: 14, lr: 3.04e-02, grad_scale: 32.0 +2024-07-27 13:01:35,307 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.57 vs. limit=5.7844 +2024-07-27 13:01:38,710 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.96 vs. limit=14.466000000000001 +2024-07-27 13:01:54,581 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.51 vs. limit=11.435733333333332 +2024-07-27 13:01:57,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=18602.666666666668, ans=0.125 +2024-07-27 13:02:11,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=18602.666666666668, ans=0.125 +2024-07-27 13:02:18,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=18616.0, ans=0.006822608695652174 +2024-07-27 13:02:25,793 INFO [train.py:1114] (2/4) Epoch 2, batch 3750, loss[loss=0.2549, simple_loss=0.3087, pruned_loss=0.1006, over 4812.00 frames. ], tot_loss[loss=0.3269, simple_loss=0.3819, pruned_loss=0.136, over 943155.53 frames. ], batch size: 11, lr: 3.03e-02, grad_scale: 32.0 +2024-07-27 13:02:26,191 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.19 vs. limit=21.472 +2024-07-27 13:02:31,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=18629.333333333332, ans=0.11370666666666668 +2024-07-27 13:02:59,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=18656.0, ans=0.0068139130434782605 +2024-07-27 13:03:38,404 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.449e+01 6.486e+01 7.051e+01 7.963e+01 1.237e+02, threshold=1.410e+02, percent-clipped=0.0 +2024-07-27 13:03:38,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=18669.333333333332, ans=0.24657333333333342 +2024-07-27 13:03:40,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=18669.333333333332, ans=0.006811014492753623 +2024-07-27 13:03:40,939 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.98 vs. limit=14.501000000000001 +2024-07-27 13:03:42,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=18669.333333333332, ans=0.0 +2024-07-27 13:04:17,941 INFO [train.py:1114] (2/4) Epoch 2, batch 3800, loss[loss=0.3184, simple_loss=0.3863, pruned_loss=0.1252, over 4818.00 frames. ], tot_loss[loss=0.3269, simple_loss=0.3812, pruned_loss=0.1363, over 941574.21 frames. ], batch size: 14, lr: 3.03e-02, grad_scale: 32.0 +2024-07-27 13:04:30,313 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.40 vs. limit=14.516 +2024-07-27 13:04:35,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=18709.333333333332, ans=0.125 +2024-07-27 13:05:15,494 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.68 vs. limit=21.542 +2024-07-27 13:06:59,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=18749.333333333332, ans=0.125 +2024-07-27 13:07:10,276 INFO [train.py:1114] (2/4) Epoch 2, batch 3850, loss[loss=0.2929, simple_loss=0.3521, pruned_loss=0.1169, over 4634.00 frames. ], tot_loss[loss=0.3261, simple_loss=0.3805, pruned_loss=0.1359, over 942498.65 frames. ], batch size: 16, lr: 3.03e-02, grad_scale: 32.0 +2024-07-27 13:08:14,646 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.59 vs. limit=21.592 +2024-07-27 13:08:18,597 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.77 vs. limit=21.592 +2024-07-27 13:08:21,245 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:08:23,994 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.367e+01 6.538e+01 7.102e+01 7.754e+01 1.153e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 13:08:41,642 INFO [train.py:1114] (2/4) Epoch 2, batch 3900, loss[loss=0.2809, simple_loss=0.3672, pruned_loss=0.09729, over 4817.00 frames. ], tot_loss[loss=0.3264, simple_loss=0.3813, pruned_loss=0.1357, over 942781.36 frames. ], batch size: 14, lr: 3.02e-02, grad_scale: 32.0 +2024-07-27 13:09:02,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=18842.666666666668, ans=0.0 +2024-07-27 13:09:28,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=18856.0, ans=0.125 +2024-07-27 13:09:52,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten.whitening_limit, batch_count=18896.0, ans=21.672 +2024-07-27 13:09:52,406 INFO [train.py:1114] (2/4) Epoch 2, batch 3950, loss[loss=0.3646, simple_loss=0.4106, pruned_loss=0.1593, over 4855.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3811, pruned_loss=0.1349, over 944830.17 frames. ], batch size: 16, lr: 3.02e-02, grad_scale: 32.0 +2024-07-27 13:09:52,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=18896.0, ans=0.23864000000000007 +2024-07-27 13:09:57,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=18896.0, ans=0.0 +2024-07-27 13:10:07,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=18909.333333333332, ans=0.125 +2024-07-27 13:10:11,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=18922.666666666668, ans=0.23770666666666673 +2024-07-27 13:10:28,278 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.314e+01 6.596e+01 7.241e+01 7.988e+01 1.615e+02, threshold=1.448e+02, percent-clipped=1.0 +2024-07-27 13:10:30,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=18936.0, ans=0.125 +2024-07-27 13:10:31,542 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.51 vs. limit=14.600999999999999 +2024-07-27 13:10:34,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=18949.333333333332, ans=0.125 +2024-07-27 13:11:05,264 INFO [train.py:1114] (2/4) Epoch 2, batch 4000, loss[loss=0.3559, simple_loss=0.3993, pruned_loss=0.1563, over 4770.00 frames. ], tot_loss[loss=0.3279, simple_loss=0.3825, pruned_loss=0.1367, over 941363.81 frames. ], batch size: 12, lr: 3.01e-02, grad_scale: 32.0 +2024-07-27 13:11:15,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=18976.0, ans=0.125 +2024-07-27 13:11:24,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=18989.333333333332, ans=0.025 +2024-07-27 13:11:35,161 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:11:37,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=19016.0, ans=0.07 +2024-07-27 13:11:59,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=19016.0, ans=0.125 +2024-07-27 13:12:02,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=19016.0, ans=0.125 +2024-07-27 13:12:03,989 INFO [train.py:1114] (2/4) Epoch 2, batch 4050, loss[loss=0.3548, simple_loss=0.3905, pruned_loss=0.1595, over 3443.00 frames. ], tot_loss[loss=0.3266, simple_loss=0.3812, pruned_loss=0.136, over 939962.48 frames. ], batch size: 35, lr: 3.01e-02, grad_scale: 32.0 +2024-07-27 13:12:14,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=19042.666666666668, ans=0.9404266666666666 +2024-07-27 13:12:26,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=19069.333333333332, ans=0.025 +2024-07-27 13:12:27,060 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.071e+01 6.599e+01 7.309e+01 8.116e+01 1.221e+02, threshold=1.462e+02, percent-clipped=0.0 +2024-07-27 13:12:39,185 INFO [train.py:1114] (2/4) Epoch 2, batch 4100, loss[loss=0.3454, simple_loss=0.3939, pruned_loss=0.1485, over 4902.00 frames. ], tot_loss[loss=0.3294, simple_loss=0.3832, pruned_loss=0.1379, over 938729.32 frames. ], batch size: 15, lr: 3.01e-02, grad_scale: 32.0 +2024-07-27 13:12:59,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=19122.666666666668, ans=0.0 +2024-07-27 13:13:11,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=19149.333333333332, ans=0.0 +2024-07-27 13:13:12,902 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=5.69 vs. limit=14.681000000000001 +2024-07-27 13:13:15,007 INFO [train.py:1114] (2/4) Epoch 2, batch 4150, loss[loss=0.3202, simple_loss=0.3767, pruned_loss=0.1319, over 4828.00 frames. ], tot_loss[loss=0.3259, simple_loss=0.3805, pruned_loss=0.1356, over 938238.29 frames. ], batch size: 13, lr: 3.00e-02, grad_scale: 32.0 +2024-07-27 13:13:15,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff3.min_abs, batch_count=19162.666666666668, ans=0.2 +2024-07-27 13:13:16,130 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.45 vs. limit=21.872 +2024-07-27 13:13:21,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=19176.0, ans=0.22884000000000004 +2024-07-27 13:13:22,827 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.98 vs. limit=21.881999999999998 +2024-07-27 13:13:30,888 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:13:31,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=19189.333333333332, ans=0.125 +2024-07-27 13:13:32,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=19189.333333333332, ans=0.125 +2024-07-27 13:13:44,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=19202.666666666668, ans=0.04949747468305833 +2024-07-27 13:13:44,555 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.086e+01 6.337e+01 6.945e+01 7.844e+01 2.237e+02, threshold=1.389e+02, percent-clipped=1.0 +2024-07-27 13:13:58,217 INFO [train.py:1114] (2/4) Epoch 2, batch 4200, loss[loss=0.4078, simple_loss=0.4454, pruned_loss=0.1851, over 4907.00 frames. ], tot_loss[loss=0.3247, simple_loss=0.3798, pruned_loss=0.1348, over 940114.81 frames. ], batch size: 15, lr: 3.00e-02, grad_scale: 32.0 +2024-07-27 13:14:18,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=19269.333333333332, ans=0.125 +2024-07-27 13:14:32,752 INFO [train.py:1114] (2/4) Epoch 2, batch 4250, loss[loss=0.2799, simple_loss=0.3479, pruned_loss=0.1059, over 4636.00 frames. ], tot_loss[loss=0.3229, simple_loss=0.3785, pruned_loss=0.1336, over 941115.50 frames. ], batch size: 12, lr: 2.99e-02, grad_scale: 32.0 +2024-07-27 13:14:49,959 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.14 vs. limit=14.746 +2024-07-27 13:14:55,986 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.180e+01 6.301e+01 6.853e+01 7.797e+01 1.151e+02, threshold=1.371e+02, percent-clipped=0.0 +2024-07-27 13:14:56,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=19336.0, ans=0.2232400000000001 +2024-07-27 13:14:58,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=19336.0, ans=0.94336 +2024-07-27 13:15:01,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=19349.333333333332, ans=0.2 +2024-07-27 13:15:06,762 INFO [train.py:1114] (2/4) Epoch 2, batch 4300, loss[loss=0.3298, simple_loss=0.3911, pruned_loss=0.1342, over 4766.00 frames. ], tot_loss[loss=0.3226, simple_loss=0.3785, pruned_loss=0.1333, over 939936.84 frames. ], batch size: 13, lr: 2.99e-02, grad_scale: 32.0 +2024-07-27 13:15:07,203 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.86 vs. limit=22.022 +2024-07-27 13:15:14,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=19362.666666666668, ans=0.0 +2024-07-27 13:15:19,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=19376.0, ans=0.125 +2024-07-27 13:15:20,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=19376.0, ans=0.0 +2024-07-27 13:15:21,181 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.79 vs. limit=14.766 +2024-07-27 13:15:23,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=19376.0, ans=0.125 +2024-07-27 13:15:25,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=19389.333333333332, ans=0.10610666666666668 +2024-07-27 13:15:27,333 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:15:36,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=19402.666666666668, ans=0.0 +2024-07-27 13:15:42,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=19416.0, ans=0.0 +2024-07-27 13:15:43,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=19416.0, ans=0.09899494936611666 +2024-07-27 13:15:45,784 INFO [train.py:1114] (2/4) Epoch 2, batch 4350, loss[loss=0.3032, simple_loss=0.3569, pruned_loss=0.1247, over 4755.00 frames. ], tot_loss[loss=0.3228, simple_loss=0.3789, pruned_loss=0.1334, over 940891.92 frames. ], batch size: 13, lr: 2.98e-02, grad_scale: 32.0 +2024-07-27 13:15:47,667 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.58 vs. limit=22.072 +2024-07-27 13:15:51,830 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.55 vs. limit=14.714666666666666 +2024-07-27 13:15:52,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=19442.666666666668, ans=0.49164 +2024-07-27 13:15:54,438 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.06 vs. limit=14.721333333333334 +2024-07-27 13:15:57,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=19442.666666666668, ans=0.125 +2024-07-27 13:15:58,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=19456.0, ans=0.21904000000000012 +2024-07-27 13:16:01,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=19456.0, ans=0.125 +2024-07-27 13:16:05,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=19456.0, ans=0.125 +2024-07-27 13:16:09,151 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.359e+01 6.368e+01 6.866e+01 7.654e+01 1.225e+02, threshold=1.373e+02, percent-clipped=0.0 +2024-07-27 13:16:11,737 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.03 vs. limit=22.102 +2024-07-27 13:16:21,960 INFO [train.py:1114] (2/4) Epoch 2, batch 4400, loss[loss=0.3114, simple_loss=0.369, pruned_loss=0.1269, over 4811.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.3796, pruned_loss=0.1333, over 940742.25 frames. ], batch size: 14, lr: 2.98e-02, grad_scale: 32.0 +2024-07-27 13:16:23,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=19496.0, ans=0.21764000000000006 +2024-07-27 13:16:34,800 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=12.65 vs. limit=14.815999999999999 +2024-07-27 13:16:36,334 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=26.95 vs. limit=22.142 +2024-07-27 13:16:37,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.out_whiten.whitening_limit, batch_count=19522.666666666668, ans=7.904533333333333 +2024-07-27 13:16:46,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=19536.0, ans=0.125 +2024-07-27 13:16:48,823 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.47 vs. limit=14.826 +2024-07-27 13:16:49,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=19536.0, ans=0.125 +2024-07-27 13:16:57,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=19562.666666666668, ans=0.0 +2024-07-27 13:16:58,329 INFO [train.py:1114] (2/4) Epoch 2, batch 4450, loss[loss=0.3211, simple_loss=0.3744, pruned_loss=0.1339, over 4937.00 frames. ], tot_loss[loss=0.3252, simple_loss=0.3811, pruned_loss=0.1347, over 938879.44 frames. ], batch size: 12, lr: 2.98e-02, grad_scale: 32.0 +2024-07-27 13:16:59,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=19562.666666666668, ans=0.006616811594202898 +2024-07-27 13:16:59,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=19562.666666666668, ans=0.125 +2024-07-27 13:17:00,806 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.26 vs. limit=14.836 +2024-07-27 13:17:13,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=19589.333333333332, ans=0.125 +2024-07-27 13:17:20,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=19589.333333333332, ans=0.125 +2024-07-27 13:17:29,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=19602.666666666668, ans=0.125 +2024-07-27 13:17:31,925 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.227e+01 6.384e+01 6.851e+01 7.779e+01 1.148e+02, threshold=1.370e+02, percent-clipped=0.0 +2024-07-27 13:17:36,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=19616.0, ans=0.0 +2024-07-27 13:17:37,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=19616.0, ans=0.10383999999999999 +2024-07-27 13:17:40,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19616.0, ans=0.10383999999999999 +2024-07-27 13:17:42,986 INFO [train.py:1114] (2/4) Epoch 2, batch 4500, loss[loss=0.3315, simple_loss=0.3945, pruned_loss=0.1343, over 4746.00 frames. ], tot_loss[loss=0.3262, simple_loss=0.3819, pruned_loss=0.1353, over 938004.10 frames. ], batch size: 14, lr: 2.97e-02, grad_scale: 32.0 +2024-07-27 13:18:01,134 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=14.871 +2024-07-27 13:18:05,549 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.287e+00 +2024-07-27 13:18:10,352 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:18:17,186 INFO [train.py:1114] (2/4) Epoch 2, batch 4550, loss[loss=0.3008, simple_loss=0.3746, pruned_loss=0.1135, over 4892.00 frames. ], tot_loss[loss=0.3257, simple_loss=0.3815, pruned_loss=0.1349, over 940009.74 frames. ], batch size: 13, lr: 2.97e-02, grad_scale: 32.0 +2024-07-27 13:18:34,227 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=14.896 +2024-07-27 13:18:36,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=19722.666666666668, ans=0.125 +2024-07-27 13:18:36,923 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.98 vs. limit=9.930666666666667 +2024-07-27 13:18:43,568 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.866e+01 6.563e+01 7.303e+01 8.334e+01 1.051e+02, threshold=1.461e+02, percent-clipped=0.0 +2024-07-27 13:18:47,023 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.44 vs. limit=22.302 +2024-07-27 13:18:54,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=19749.333333333332, ans=0.125 +2024-07-27 13:18:57,903 INFO [train.py:1114] (2/4) Epoch 2, batch 4600, loss[loss=0.3072, simple_loss=0.3625, pruned_loss=0.126, over 4500.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3793, pruned_loss=0.1338, over 937961.67 frames. ], batch size: 21, lr: 2.96e-02, grad_scale: 64.0 +2024-07-27 13:19:03,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=19762.666666666668, ans=0.125 +2024-07-27 13:19:07,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=19776.0, ans=0.006570434782608697 +2024-07-27 13:19:10,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=19789.333333333332, ans=0.125 +2024-07-27 13:19:14,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=19789.333333333332, ans=0.025 +2024-07-27 13:19:25,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19816.0, ans=0.10183999999999999 +2024-07-27 13:19:30,442 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:19:31,756 INFO [train.py:1114] (2/4) Epoch 2, batch 4650, loss[loss=0.3161, simple_loss=0.3908, pruned_loss=0.1207, over 4867.00 frames. ], tot_loss[loss=0.3239, simple_loss=0.3803, pruned_loss=0.1337, over 939865.46 frames. ], batch size: 16, lr: 2.96e-02, grad_scale: 64.0 +2024-07-27 13:19:31,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=19829.333333333332, ans=0.20597333333333345 +2024-07-27 13:19:36,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=19829.333333333332, ans=0.125 +2024-07-27 13:19:51,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=19869.333333333332, ans=0.006550144927536233 +2024-07-27 13:19:54,897 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.553e+01 6.585e+01 7.200e+01 8.002e+01 1.335e+02, threshold=1.440e+02, percent-clipped=0.0 +2024-07-27 13:19:55,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=19869.333333333332, ans=0.125 +2024-07-27 13:19:57,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=19869.333333333332, ans=0.125 +2024-07-27 13:20:02,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=19882.666666666668, ans=0.0 +2024-07-27 13:20:06,203 INFO [train.py:1114] (2/4) Epoch 2, batch 4700, loss[loss=0.2604, simple_loss=0.3254, pruned_loss=0.0977, over 4703.00 frames. ], tot_loss[loss=0.325, simple_loss=0.381, pruned_loss=0.1345, over 937428.52 frames. ], batch size: 11, lr: 2.96e-02, grad_scale: 64.0 +2024-07-27 13:20:32,839 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.88 vs. limit=14.966000000000001 +2024-07-27 13:20:35,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=19922.666666666668, ans=0.10077333333333333 +2024-07-27 13:20:42,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=19936.0, ans=0.2022400000000001 +2024-07-27 13:20:51,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=19949.333333333332, ans=0.125 +2024-07-27 13:20:55,540 INFO [train.py:1114] (2/4) Epoch 2, batch 4750, loss[loss=0.4216, simple_loss=0.4651, pruned_loss=0.189, over 4537.00 frames. ], tot_loss[loss=0.3256, simple_loss=0.3815, pruned_loss=0.1348, over 936117.76 frames. ], batch size: 21, lr: 2.95e-02, grad_scale: 64.0 +2024-07-27 13:20:57,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=19962.666666666668, ans=0.125 +2024-07-27 13:21:02,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=19976.0, ans=0.125 +2024-07-27 13:21:19,836 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.240e+01 6.354e+01 6.910e+01 7.839e+01 1.849e+02, threshold=1.382e+02, percent-clipped=1.0 +2024-07-27 13:21:20,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=20002.666666666668, ans=0.1 +2024-07-27 13:21:24,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=20016.0, ans=0.125 +2024-07-27 13:21:25,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=20016.0, ans=0.125 +2024-07-27 13:21:31,069 INFO [train.py:1114] (2/4) Epoch 2, batch 4800, loss[loss=0.3209, simple_loss=0.3707, pruned_loss=0.1355, over 4699.00 frames. ], tot_loss[loss=0.3267, simple_loss=0.3815, pruned_loss=0.1359, over 933593.18 frames. ], batch size: 13, lr: 2.95e-02, grad_scale: 64.0 +2024-07-27 13:21:34,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=20029.333333333332, ans=0.125 +2024-07-27 13:21:40,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20042.666666666668, ans=0.1 +2024-07-27 13:21:48,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=20056.0, ans=0.0 +2024-07-27 13:21:48,637 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.93 vs. limit=22.5 +2024-07-27 13:21:54,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=20069.333333333332, ans=0.04949747468305833 +2024-07-27 13:22:05,288 INFO [train.py:1114] (2/4) Epoch 2, batch 4850, loss[loss=0.3147, simple_loss=0.3753, pruned_loss=0.1271, over 4746.00 frames. ], tot_loss[loss=0.325, simple_loss=0.3805, pruned_loss=0.1348, over 933063.64 frames. ], batch size: 14, lr: 2.95e-02, grad_scale: 64.0 +2024-07-27 13:22:09,607 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.67 vs. limit=6.0 +2024-07-27 13:22:11,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=20096.0, ans=0.0 +2024-07-27 13:22:12,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=20096.0, ans=0.125 +2024-07-27 13:22:18,333 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.37 vs. limit=22.5 +2024-07-27 13:22:33,557 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.340e+01 6.424e+01 6.890e+01 7.552e+01 1.246e+02, threshold=1.378e+02, percent-clipped=0.0 +2024-07-27 13:22:46,567 INFO [train.py:1114] (2/4) Epoch 2, batch 4900, loss[loss=0.3184, simple_loss=0.3979, pruned_loss=0.1194, over 4757.00 frames. ], tot_loss[loss=0.3245, simple_loss=0.3802, pruned_loss=0.1344, over 934460.70 frames. ], batch size: 13, lr: 2.94e-02, grad_scale: 64.0 +2024-07-27 13:22:52,740 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.81 vs. limit=15.0 +2024-07-27 13:22:58,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=20176.0, ans=0.125 +2024-07-27 13:23:03,532 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.63 vs. limit=12.0 +2024-07-27 13:23:09,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20202.666666666668, ans=0.1 +2024-07-27 13:23:23,084 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.18 vs. limit=15.0 +2024-07-27 13:23:27,084 INFO [train.py:1114] (2/4) Epoch 2, batch 4950, loss[loss=0.4134, simple_loss=0.4294, pruned_loss=0.1987, over 3345.00 frames. ], tot_loss[loss=0.3267, simple_loss=0.3818, pruned_loss=0.1358, over 931377.94 frames. ], batch size: 36, lr: 2.94e-02, grad_scale: 64.0 +2024-07-27 13:23:38,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=20242.666666666668, ans=0.125 +2024-07-27 13:23:46,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=20256.0, ans=0.125 +2024-07-27 13:23:51,053 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.442e+01 6.541e+01 7.146e+01 7.949e+01 1.013e+02, threshold=1.429e+02, percent-clipped=0.0 +2024-07-27 13:24:01,984 INFO [train.py:1114] (2/4) Epoch 2, batch 5000, loss[loss=0.374, simple_loss=0.4351, pruned_loss=0.1565, over 4659.00 frames. ], tot_loss[loss=0.3236, simple_loss=0.3795, pruned_loss=0.1338, over 935132.29 frames. ], batch size: 14, lr: 2.93e-02, grad_scale: 64.0 +2024-07-27 13:24:22,323 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.41 vs. limit=15.0 +2024-07-27 13:24:36,477 INFO [train.py:1114] (2/4) Epoch 2, batch 5050, loss[loss=0.2777, simple_loss=0.3457, pruned_loss=0.1049, over 4852.00 frames. ], tot_loss[loss=0.3223, simple_loss=0.3781, pruned_loss=0.1332, over 937821.66 frames. ], batch size: 12, lr: 2.93e-02, grad_scale: 64.0 +2024-07-27 13:24:39,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=20362.666666666668, ans=0.006442898550724637 +2024-07-27 13:24:43,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20376.0, ans=0.1 +2024-07-27 13:24:45,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=20376.0, ans=0.0 +2024-07-27 13:24:45,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=20376.0, ans=0.125 +2024-07-27 13:24:47,189 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.774e+00 +2024-07-27 13:24:51,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=20389.333333333332, ans=0.0 +2024-07-27 13:24:53,915 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=18.94 vs. limit=15.0 +2024-07-27 13:24:56,119 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.50 vs. limit=15.0 +2024-07-27 13:25:00,642 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.122e+01 6.416e+01 7.117e+01 7.818e+01 1.344e+02, threshold=1.423e+02, percent-clipped=0.0 +2024-07-27 13:25:02,326 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.18 vs. limit=15.0 +2024-07-27 13:25:04,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20416.0, ans=0.1 +2024-07-27 13:25:04,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=20416.0, ans=0.125 +2024-07-27 13:25:11,750 INFO [train.py:1114] (2/4) Epoch 2, batch 5100, loss[loss=0.2679, simple_loss=0.321, pruned_loss=0.1074, over 4776.00 frames. ], tot_loss[loss=0.3256, simple_loss=0.3802, pruned_loss=0.1355, over 935116.17 frames. ], batch size: 12, lr: 2.93e-02, grad_scale: 64.0 +2024-07-27 13:25:11,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=20429.333333333332, ans=0.2 +2024-07-27 13:25:13,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=20429.333333333332, ans=0.1 +2024-07-27 13:25:14,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=20429.333333333332, ans=0.125 +2024-07-27 13:25:15,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=20429.333333333332, ans=0.1 +2024-07-27 13:25:21,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20442.666666666668, ans=0.1 +2024-07-27 13:25:25,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=20456.0, ans=0.125 +2024-07-27 13:25:33,911 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.81 vs. limit=6.0 +2024-07-27 13:25:41,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=20482.666666666668, ans=0.125 +2024-07-27 13:25:46,239 INFO [train.py:1114] (2/4) Epoch 2, batch 5150, loss[loss=0.339, simple_loss=0.3928, pruned_loss=0.1426, over 4832.00 frames. ], tot_loss[loss=0.3268, simple_loss=0.3813, pruned_loss=0.1361, over 936153.90 frames. ], batch size: 16, lr: 2.92e-02, grad_scale: 64.0 +2024-07-27 13:25:49,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=20496.0, ans=0.0 +2024-07-27 13:25:53,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=20509.333333333332, ans=0.125 +2024-07-27 13:26:06,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=20536.0, ans=0.006405217391304348 +2024-07-27 13:26:07,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=20536.0, ans=0.125 +2024-07-27 13:26:07,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=20536.0, ans=0.125 +2024-07-27 13:26:09,644 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.469e+01 6.536e+01 7.424e+01 8.253e+01 1.032e+02, threshold=1.485e+02, percent-clipped=0.0 +2024-07-27 13:26:14,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=20549.333333333332, ans=0.04949747468305833 +2024-07-27 13:26:18,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=20549.333333333332, ans=0.006402318840579711 +2024-07-27 13:26:21,110 INFO [train.py:1114] (2/4) Epoch 2, batch 5200, loss[loss=0.3435, simple_loss=0.3952, pruned_loss=0.1459, over 4665.00 frames. ], tot_loss[loss=0.324, simple_loss=0.3799, pruned_loss=0.1341, over 936580.80 frames. ], batch size: 14, lr: 2.92e-02, grad_scale: 64.0 +2024-07-27 13:26:22,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.78 vs. limit=6.0 +2024-07-27 13:26:25,816 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=19.62 vs. limit=15.0 +2024-07-27 13:26:31,476 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.12 vs. limit=15.0 +2024-07-27 13:26:33,637 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.48 vs. limit=15.0 +2024-07-27 13:26:34,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=20589.333333333332, ans=0.05 +2024-07-27 13:26:39,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=20589.333333333332, ans=0.0 +2024-07-27 13:26:57,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=20616.0, ans=0.0 +2024-07-27 13:26:57,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.04 vs. limit=15.0 +2024-07-27 13:27:00,329 INFO [train.py:1114] (2/4) Epoch 2, batch 5250, loss[loss=0.3258, simple_loss=0.3749, pruned_loss=0.1384, over 4897.00 frames. ], tot_loss[loss=0.322, simple_loss=0.3781, pruned_loss=0.1329, over 935927.39 frames. ], batch size: 13, lr: 2.91e-02, grad_scale: 64.0 +2024-07-27 13:27:02,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=20629.333333333332, ans=0.07 +2024-07-27 13:27:04,270 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.66 vs. limit=10.0 +2024-07-27 13:27:06,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=20629.333333333332, ans=0.1 +2024-07-27 13:27:14,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=20642.666666666668, ans=0.125 +2024-07-27 13:27:17,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=20656.0, ans=0.0 +2024-07-27 13:27:19,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=20656.0, ans=0.95 +2024-07-27 13:27:27,952 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.995e+01 6.506e+01 7.005e+01 7.765e+01 1.418e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-27 13:27:30,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=20669.333333333332, ans=0.0 +2024-07-27 13:27:40,061 INFO [train.py:1114] (2/4) Epoch 2, batch 5300, loss[loss=0.3724, simple_loss=0.4073, pruned_loss=0.1687, over 4625.00 frames. ], tot_loss[loss=0.3228, simple_loss=0.3787, pruned_loss=0.1334, over 934300.55 frames. ], batch size: 16, lr: 2.91e-02, grad_scale: 64.0 +2024-07-27 13:27:43,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.72 vs. limit=15.0 +2024-07-27 13:27:46,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=20709.333333333332, ans=0.2 +2024-07-27 13:28:01,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=20736.0, ans=0.006361739130434783 +2024-07-27 13:28:15,713 INFO [train.py:1114] (2/4) Epoch 2, batch 5350, loss[loss=0.2549, simple_loss=0.3163, pruned_loss=0.09673, over 4499.00 frames. ], tot_loss[loss=0.3233, simple_loss=0.3794, pruned_loss=0.1336, over 936341.55 frames. ], batch size: 10, lr: 2.91e-02, grad_scale: 64.0 +2024-07-27 13:28:27,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=20776.0, ans=0.05 +2024-07-27 13:28:30,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=20776.0, ans=0.1 +2024-07-27 13:28:34,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=20789.333333333332, ans=0.125 +2024-07-27 13:28:41,457 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.442e+01 6.375e+01 6.982e+01 7.841e+01 1.512e+02, threshold=1.396e+02, percent-clipped=1.0 +2024-07-27 13:28:54,558 INFO [train.py:1114] (2/4) Epoch 2, batch 5400, loss[loss=0.3842, simple_loss=0.4108, pruned_loss=0.1788, over 4358.00 frames. ], tot_loss[loss=0.3252, simple_loss=0.3806, pruned_loss=0.1349, over 931364.51 frames. ], batch size: 26, lr: 2.90e-02, grad_scale: 64.0 +2024-07-27 13:29:09,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=20842.666666666668, ans=0.125 +2024-07-27 13:29:10,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=20856.0, ans=0.125 +2024-07-27 13:29:12,020 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:29:13,062 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.83 vs. limit=22.5 +2024-07-27 13:29:31,762 INFO [train.py:1114] (2/4) Epoch 2, batch 5450, loss[loss=0.265, simple_loss=0.3268, pruned_loss=0.1016, over 4706.00 frames. ], tot_loss[loss=0.3222, simple_loss=0.3787, pruned_loss=0.1329, over 933930.46 frames. ], batch size: 11, lr: 2.90e-02, grad_scale: 64.0 +2024-07-27 13:29:39,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=20909.333333333332, ans=0.0 +2024-07-27 13:29:55,875 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.402e+01 6.377e+01 6.925e+01 7.766e+01 1.521e+02, threshold=1.385e+02, percent-clipped=1.0 +2024-07-27 13:29:56,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=20936.0, ans=0.04949747468305833 +2024-07-27 13:30:03,094 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.87 vs. limit=22.5 +2024-07-27 13:30:06,862 INFO [train.py:1114] (2/4) Epoch 2, batch 5500, loss[loss=0.3336, simple_loss=0.3852, pruned_loss=0.141, over 4279.00 frames. ], tot_loss[loss=0.3221, simple_loss=0.3784, pruned_loss=0.1329, over 931453.70 frames. ], batch size: 25, lr: 2.90e-02, grad_scale: 64.0 +2024-07-27 13:30:14,538 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.26 vs. limit=22.5 +2024-07-27 13:30:19,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=20976.0, ans=0.125 +2024-07-27 13:30:21,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=20989.333333333332, ans=0.2 +2024-07-27 13:30:40,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=21029.333333333332, ans=0.125 +2024-07-27 13:30:41,368 INFO [train.py:1114] (2/4) Epoch 2, batch 5550, loss[loss=0.3237, simple_loss=0.3993, pruned_loss=0.1241, over 4709.00 frames. ], tot_loss[loss=0.3211, simple_loss=0.3772, pruned_loss=0.1325, over 933548.15 frames. ], batch size: 12, lr: 2.89e-02, grad_scale: 64.0 +2024-07-27 13:30:47,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=21042.666666666668, ans=0.0 +2024-07-27 13:30:51,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=21042.666666666668, ans=0.125 +2024-07-27 13:30:54,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=21056.0, ans=0.1 +2024-07-27 13:31:04,810 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.723e+01 6.613e+01 7.499e+01 8.477e+01 2.130e+02, threshold=1.500e+02, percent-clipped=3.0 +2024-07-27 13:31:07,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=21069.333333333332, ans=0.125 +2024-07-27 13:31:11,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=21082.666666666668, ans=0.1 +2024-07-27 13:31:15,967 INFO [train.py:1114] (2/4) Epoch 2, batch 5600, loss[loss=0.3386, simple_loss=0.3983, pruned_loss=0.1395, over 4735.00 frames. ], tot_loss[loss=0.3217, simple_loss=0.3775, pruned_loss=0.133, over 934933.65 frames. ], batch size: 14, lr: 2.89e-02, grad_scale: 64.0 +2024-07-27 13:31:18,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=21096.0, ans=0.015 +2024-07-27 13:31:23,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=21109.333333333332, ans=0.125 +2024-07-27 13:31:23,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=21109.333333333332, ans=0.025 +2024-07-27 13:31:27,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21109.333333333332, ans=0.1 +2024-07-27 13:31:31,900 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.72 vs. limit=15.0 +2024-07-27 13:31:39,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=21136.0, ans=0.125 +2024-07-27 13:31:44,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=21149.333333333332, ans=0.125 +2024-07-27 13:31:50,429 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.53 vs. limit=22.5 +2024-07-27 13:31:50,656 INFO [train.py:1114] (2/4) Epoch 2, batch 5650, loss[loss=0.3601, simple_loss=0.3846, pruned_loss=0.1678, over 4538.00 frames. ], tot_loss[loss=0.3199, simple_loss=0.3765, pruned_loss=0.1316, over 937503.94 frames. ], batch size: 21, lr: 2.88e-02, grad_scale: 64.0 +2024-07-27 13:31:53,921 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.17 vs. limit=15.0 +2024-07-27 13:31:57,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=21176.0, ans=0.125 +2024-07-27 13:32:01,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=21176.0, ans=0.125 +2024-07-27 13:32:06,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21189.333333333332, ans=0.1 +2024-07-27 13:32:06,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=21189.333333333332, ans=10.0 +2024-07-27 13:32:10,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=21189.333333333332, ans=0.125 +2024-07-27 13:32:13,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=21202.666666666668, ans=0.125 +2024-07-27 13:32:14,254 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.661e+01 6.325e+01 6.816e+01 7.626e+01 1.168e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-27 13:32:22,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=21216.0, ans=0.125 +2024-07-27 13:32:25,414 INFO [train.py:1114] (2/4) Epoch 2, batch 5700, loss[loss=0.3306, simple_loss=0.3931, pruned_loss=0.134, over 4700.00 frames. ], tot_loss[loss=0.3196, simple_loss=0.3761, pruned_loss=0.1315, over 938542.45 frames. ], batch size: 13, lr: 2.88e-02, grad_scale: 64.0 +2024-07-27 13:32:26,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=21229.333333333332, ans=0.125 +2024-07-27 13:32:40,338 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.53 vs. limit=15.0 +2024-07-27 13:32:42,757 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.95 vs. limit=22.5 +2024-07-27 13:32:49,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=21256.0, ans=0.125 +2024-07-27 13:32:52,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21269.333333333332, ans=0.1 +2024-07-27 13:32:56,948 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=26.24 vs. limit=22.5 +2024-07-27 13:32:58,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=21269.333333333332, ans=0.125 +2024-07-27 13:32:58,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=21282.666666666668, ans=0.2 +2024-07-27 13:33:05,787 INFO [train.py:1114] (2/4) Epoch 2, batch 5750, loss[loss=0.2812, simple_loss=0.3661, pruned_loss=0.0982, over 4740.00 frames. ], tot_loss[loss=0.3223, simple_loss=0.3784, pruned_loss=0.1331, over 938769.66 frames. ], batch size: 19, lr: 2.88e-02, grad_scale: 64.0 +2024-07-27 13:33:07,387 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:33:10,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=21296.0, ans=0.00624 +2024-07-27 13:33:15,610 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.18 vs. limit=22.5 +2024-07-27 13:33:31,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=21336.0, ans=0.125 +2024-07-27 13:33:34,451 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.471e+01 6.485e+01 7.135e+01 7.978e+01 1.224e+02, threshold=1.427e+02, percent-clipped=0.0 +2024-07-27 13:33:38,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=21349.333333333332, ans=0.125 +2024-07-27 13:33:45,477 INFO [train.py:1114] (2/4) Epoch 2, batch 5800, loss[loss=0.3499, simple_loss=0.3991, pruned_loss=0.1503, over 4773.00 frames. ], tot_loss[loss=0.3223, simple_loss=0.3783, pruned_loss=0.1331, over 938454.38 frames. ], batch size: 19, lr: 2.87e-02, grad_scale: 64.0 +2024-07-27 13:33:54,710 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.33 vs. limit=15.0 +2024-07-27 13:34:02,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=21389.333333333332, ans=0.125 +2024-07-27 13:34:17,555 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-27 13:34:24,459 INFO [train.py:1114] (2/4) Epoch 2, batch 5850, loss[loss=0.3667, simple_loss=0.4061, pruned_loss=0.1637, over 4427.00 frames. ], tot_loss[loss=0.3216, simple_loss=0.3784, pruned_loss=0.1324, over 938442.43 frames. ], batch size: 21, lr: 2.87e-02, grad_scale: 64.0 +2024-07-27 13:34:29,662 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.43 vs. limit=22.5 +2024-07-27 13:34:49,984 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.936e+01 6.249e+01 6.870e+01 7.832e+01 1.003e+02, threshold=1.374e+02, percent-clipped=0.0 +2024-07-27 13:34:50,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21469.333333333332, ans=0.1 +2024-07-27 13:34:54,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=21482.666666666668, ans=0.0 +2024-07-27 13:35:01,015 INFO [train.py:1114] (2/4) Epoch 2, batch 5900, loss[loss=0.3762, simple_loss=0.4312, pruned_loss=0.1606, over 4682.00 frames. ], tot_loss[loss=0.322, simple_loss=0.3789, pruned_loss=0.1326, over 938358.41 frames. ], batch size: 15, lr: 2.87e-02, grad_scale: 64.0 +2024-07-27 13:35:06,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=21496.0, ans=0.125 +2024-07-27 13:35:13,276 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.24 vs. limit=6.0 +2024-07-27 13:35:20,739 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.02 vs. limit=15.0 +2024-07-27 13:35:28,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=21549.333333333332, ans=0.006184927536231885 +2024-07-27 13:35:29,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=21549.333333333332, ans=0.125 +2024-07-27 13:35:33,679 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=15.0 +2024-07-27 13:35:35,450 INFO [train.py:1114] (2/4) Epoch 2, batch 5950, loss[loss=0.3503, simple_loss=0.4037, pruned_loss=0.1485, over 4680.00 frames. ], tot_loss[loss=0.32, simple_loss=0.3774, pruned_loss=0.1313, over 940242.77 frames. ], batch size: 15, lr: 2.86e-02, grad_scale: 64.0 +2024-07-27 13:35:49,013 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.18 vs. limit=15.0 +2024-07-27 13:35:53,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=21589.333333333332, ans=0.09899494936611666 +2024-07-27 13:35:55,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=21589.333333333332, ans=0.2 +2024-07-27 13:35:59,409 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.396e+01 6.345e+01 7.134e+01 8.050e+01 1.843e+02, threshold=1.427e+02, percent-clipped=1.0 +2024-07-27 13:36:02,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21602.666666666668, ans=0.1 +2024-07-27 13:36:10,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=21616.0, ans=0.125 +2024-07-27 13:36:20,606 INFO [train.py:1114] (2/4) Epoch 2, batch 6000, loss[loss=0.3237, simple_loss=0.381, pruned_loss=0.1332, over 4161.00 frames. ], tot_loss[loss=0.3197, simple_loss=0.3765, pruned_loss=0.1314, over 937917.74 frames. ], batch size: 25, lr: 2.86e-02, grad_scale: 64.0 +2024-07-27 13:36:20,607 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 13:36:36,192 INFO [train.py:1146] (2/4) Epoch 2, validation: loss=0.2564, simple_loss=0.3503, pruned_loss=0.08121, over 944034.00 frames. +2024-07-27 13:36:36,192 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 13:36:44,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=21642.666666666668, ans=0.2 +2024-07-27 13:36:55,757 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.95 vs. limit=15.0 +2024-07-27 13:37:09,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=21682.666666666668, ans=0.125 +2024-07-27 13:37:11,101 INFO [train.py:1114] (2/4) Epoch 2, batch 6050, loss[loss=0.3131, simple_loss=0.3507, pruned_loss=0.1377, over 4776.00 frames. ], tot_loss[loss=0.3178, simple_loss=0.3746, pruned_loss=0.1305, over 939262.35 frames. ], batch size: 12, lr: 2.85e-02, grad_scale: 64.0 +2024-07-27 13:37:13,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=21696.0, ans=0.125 +2024-07-27 13:37:14,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=21696.0, ans=0.0 +2024-07-27 13:37:24,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=21722.666666666668, ans=0.125 +2024-07-27 13:37:26,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=21722.666666666668, ans=0.0 +2024-07-27 13:37:27,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=21722.666666666668, ans=0.125 +2024-07-27 13:37:27,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=21722.666666666668, ans=0.025 +2024-07-27 13:37:32,934 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.19 vs. limit=22.5 +2024-07-27 13:37:34,503 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.397e+01 6.133e+01 6.810e+01 7.852e+01 1.499e+02, threshold=1.362e+02, percent-clipped=2.0 +2024-07-27 13:37:38,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=21749.333333333332, ans=0.0 +2024-07-27 13:37:44,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=21762.666666666668, ans=0.125 +2024-07-27 13:37:45,483 INFO [train.py:1114] (2/4) Epoch 2, batch 6100, loss[loss=0.3258, simple_loss=0.3868, pruned_loss=0.1324, over 4689.00 frames. ], tot_loss[loss=0.316, simple_loss=0.3738, pruned_loss=0.1291, over 938564.24 frames. ], batch size: 15, lr: 2.85e-02, grad_scale: 64.0 +2024-07-27 13:37:47,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=21762.666666666668, ans=0.2 +2024-07-27 13:38:05,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=21802.666666666668, ans=0.0 +2024-07-27 13:38:07,364 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.77 vs. limit=15.0 +2024-07-27 13:38:08,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=21802.666666666668, ans=0.125 +2024-07-27 13:38:09,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=21802.666666666668, ans=0.025 +2024-07-27 13:38:20,141 INFO [train.py:1114] (2/4) Epoch 2, batch 6150, loss[loss=0.5019, simple_loss=0.486, pruned_loss=0.2589, over 3030.00 frames. ], tot_loss[loss=0.317, simple_loss=0.3749, pruned_loss=0.1296, over 936812.36 frames. ], batch size: 35, lr: 2.85e-02, grad_scale: 64.0 +2024-07-27 13:38:27,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=21842.666666666668, ans=0.125 +2024-07-27 13:38:33,353 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.98 vs. limit=15.0 +2024-07-27 13:38:36,163 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=17.33 vs. limit=15.0 +2024-07-27 13:38:42,775 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.55 vs. limit=15.0 +2024-07-27 13:38:44,320 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.693e+01 6.434e+01 7.098e+01 7.748e+01 1.262e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 13:38:45,363 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.13 vs. limit=12.0 +2024-07-27 13:38:51,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=21882.666666666668, ans=0.125 +2024-07-27 13:38:53,713 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.09 vs. limit=6.0 +2024-07-27 13:38:56,916 INFO [train.py:1114] (2/4) Epoch 2, batch 6200, loss[loss=0.3229, simple_loss=0.3914, pruned_loss=0.1272, over 4747.00 frames. ], tot_loss[loss=0.3186, simple_loss=0.3761, pruned_loss=0.1305, over 936153.65 frames. ], batch size: 14, lr: 2.84e-02, grad_scale: 64.0 +2024-07-27 13:38:59,558 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.00 vs. limit=15.0 +2024-07-27 13:39:03,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=21909.333333333332, ans=0.125 +2024-07-27 13:39:12,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=21922.666666666668, ans=0.125 +2024-07-27 13:39:20,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=21922.666666666668, ans=0.0 +2024-07-27 13:39:26,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=21936.0, ans=0.0 +2024-07-27 13:39:31,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=21949.333333333332, ans=0.125 +2024-07-27 13:39:38,440 INFO [train.py:1114] (2/4) Epoch 2, batch 6250, loss[loss=0.3068, simple_loss=0.353, pruned_loss=0.1303, over 4802.00 frames. ], tot_loss[loss=0.3203, simple_loss=0.3769, pruned_loss=0.1318, over 932415.98 frames. ], batch size: 14, lr: 2.84e-02, grad_scale: 64.0 +2024-07-27 13:39:42,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21962.666666666668, ans=0.1 +2024-07-27 13:39:48,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21976.0, ans=0.1 +2024-07-27 13:39:54,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=21989.333333333332, ans=0.125 +2024-07-27 13:39:55,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=21989.333333333332, ans=10.0 +2024-07-27 13:40:02,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=22002.666666666668, ans=0.0 +2024-07-27 13:40:04,078 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.093e+01 6.216e+01 6.990e+01 7.888e+01 1.132e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 13:40:08,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=22016.0, ans=0.125 +2024-07-27 13:40:08,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=22016.0, ans=0.09899494936611666 +2024-07-27 13:40:12,171 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.72 vs. limit=15.0 +2024-07-27 13:40:12,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=22016.0, ans=0.125 +2024-07-27 13:40:15,173 INFO [train.py:1114] (2/4) Epoch 2, batch 6300, loss[loss=0.2929, simple_loss=0.358, pruned_loss=0.1139, over 4490.00 frames. ], tot_loss[loss=0.321, simple_loss=0.3775, pruned_loss=0.1323, over 928827.61 frames. ], batch size: 10, lr: 2.84e-02, grad_scale: 64.0 +2024-07-27 13:40:16,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=22029.333333333332, ans=0.125 +2024-07-27 13:40:19,508 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.30 vs. limit=15.0 +2024-07-27 13:40:29,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=22056.0, ans=0.125 +2024-07-27 13:40:40,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=22069.333333333332, ans=0.07 +2024-07-27 13:40:58,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22082.666666666668, ans=0.1 +2024-07-27 13:41:01,985 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.27 vs. limit=22.5 +2024-07-27 13:41:02,156 INFO [train.py:1114] (2/4) Epoch 2, batch 6350, loss[loss=0.2869, simple_loss=0.3637, pruned_loss=0.1051, over 4570.00 frames. ], tot_loss[loss=0.3184, simple_loss=0.3755, pruned_loss=0.1307, over 933120.76 frames. ], batch size: 21, lr: 2.83e-02, grad_scale: 64.0 +2024-07-27 13:41:04,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=22096.0, ans=0.125 +2024-07-27 13:41:25,746 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.585e+01 6.300e+01 6.631e+01 7.435e+01 1.313e+02, threshold=1.326e+02, percent-clipped=0.0 +2024-07-27 13:41:25,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=22136.0, ans=0.125 +2024-07-27 13:41:36,434 INFO [train.py:1114] (2/4) Epoch 2, batch 6400, loss[loss=0.3471, simple_loss=0.4045, pruned_loss=0.1449, over 4639.00 frames. ], tot_loss[loss=0.3192, simple_loss=0.3763, pruned_loss=0.131, over 934519.32 frames. ], batch size: 13, lr: 2.83e-02, grad_scale: 64.0 +2024-07-27 13:41:48,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=22176.0, ans=0.125 +2024-07-27 13:41:53,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22189.333333333332, ans=0.1 +2024-07-27 13:42:10,992 INFO [train.py:1114] (2/4) Epoch 2, batch 6450, loss[loss=0.3642, simple_loss=0.4139, pruned_loss=0.1573, over 4483.00 frames. ], tot_loss[loss=0.3205, simple_loss=0.3778, pruned_loss=0.1316, over 938303.44 frames. ], batch size: 21, lr: 2.83e-02, grad_scale: 64.0 +2024-07-27 13:42:13,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=22229.333333333332, ans=0.125 +2024-07-27 13:42:18,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=22242.666666666668, ans=0.0 +2024-07-27 13:42:30,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=22256.0, ans=0.125 +2024-07-27 13:42:31,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=22269.333333333332, ans=0.125 +2024-07-27 13:42:33,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=22269.333333333332, ans=0.125 +2024-07-27 13:42:34,028 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.359e+01 6.221e+01 6.785e+01 7.657e+01 1.359e+02, threshold=1.357e+02, percent-clipped=1.0 +2024-07-27 13:42:45,088 INFO [train.py:1114] (2/4) Epoch 2, batch 6500, loss[loss=0.402, simple_loss=0.4266, pruned_loss=0.1887, over 3614.00 frames. ], tot_loss[loss=0.3177, simple_loss=0.3756, pruned_loss=0.1299, over 939644.10 frames. ], batch size: 37, lr: 2.82e-02, grad_scale: 64.0 +2024-07-27 13:43:04,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=22336.0, ans=0.125 +2024-07-27 13:43:09,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=22336.0, ans=0.006013913043478261 +2024-07-27 13:43:11,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=22349.333333333332, ans=0.2 +2024-07-27 13:43:12,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=22349.333333333332, ans=0.0 +2024-07-27 13:43:19,934 INFO [train.py:1114] (2/4) Epoch 2, batch 6550, loss[loss=0.3029, simple_loss=0.3638, pruned_loss=0.121, over 4822.00 frames. ], tot_loss[loss=0.3163, simple_loss=0.375, pruned_loss=0.1288, over 942648.39 frames. ], batch size: 11, lr: 2.82e-02, grad_scale: 64.0 +2024-07-27 13:43:21,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=22362.666666666668, ans=0.125 +2024-07-27 13:43:33,188 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.98 vs. limit=10.0 +2024-07-27 13:43:41,603 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.33 vs. limit=15.0 +2024-07-27 13:43:43,163 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.181e+01 6.196e+01 6.780e+01 7.401e+01 1.122e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 13:43:43,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=22402.666666666668, ans=0.125 +2024-07-27 13:43:50,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=22416.0, ans=0.5 +2024-07-27 13:43:56,089 INFO [train.py:1114] (2/4) Epoch 2, batch 6600, loss[loss=0.3308, simple_loss=0.3912, pruned_loss=0.1352, over 4942.00 frames. ], tot_loss[loss=0.3152, simple_loss=0.3743, pruned_loss=0.128, over 944531.38 frames. ], batch size: 14, lr: 2.82e-02, grad_scale: 128.0 +2024-07-27 13:44:07,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=22442.666666666668, ans=0.0 +2024-07-27 13:44:13,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22456.0, ans=0.1 +2024-07-27 13:44:18,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=22469.333333333332, ans=0.125 +2024-07-27 13:44:28,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=22482.666666666668, ans=0.125 +2024-07-27 13:44:31,240 INFO [train.py:1114] (2/4) Epoch 2, batch 6650, loss[loss=0.3666, simple_loss=0.4134, pruned_loss=0.1599, over 4614.00 frames. ], tot_loss[loss=0.315, simple_loss=0.3742, pruned_loss=0.1279, over 943640.15 frames. ], batch size: 17, lr: 2.81e-02, grad_scale: 128.0 +2024-07-27 13:44:33,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=22496.0, ans=0.2 +2024-07-27 13:44:37,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=22509.333333333332, ans=0.04949747468305833 +2024-07-27 13:44:40,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22509.333333333332, ans=0.1 +2024-07-27 13:44:58,373 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.864e+01 6.602e+01 7.128e+01 7.971e+01 1.702e+02, threshold=1.426e+02, percent-clipped=1.0 +2024-07-27 13:45:14,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=22549.333333333332, ans=0.125 +2024-07-27 13:45:17,362 INFO [train.py:1114] (2/4) Epoch 2, batch 6700, loss[loss=0.3477, simple_loss=0.4085, pruned_loss=0.1435, over 4710.00 frames. ], tot_loss[loss=0.3164, simple_loss=0.3752, pruned_loss=0.1288, over 942360.15 frames. ], batch size: 19, lr: 2.81e-02, grad_scale: 128.0 +2024-07-27 13:45:24,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=22576.0, ans=0.0 +2024-07-27 13:45:28,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.03 vs. limit=6.0 +2024-07-27 13:45:48,526 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.86 vs. limit=15.0 +2024-07-27 13:45:49,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=22616.0, ans=0.125 +2024-07-27 13:45:53,900 INFO [train.py:1114] (2/4) Epoch 2, batch 6750, loss[loss=0.3026, simple_loss=0.3574, pruned_loss=0.1239, over 4270.00 frames. ], tot_loss[loss=0.3149, simple_loss=0.3743, pruned_loss=0.1277, over 940512.15 frames. ], batch size: 25, lr: 2.81e-02, grad_scale: 128.0 +2024-07-27 13:46:17,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=22669.333333333332, ans=0.07 +2024-07-27 13:46:19,139 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.110e+01 6.419e+01 6.907e+01 8.025e+01 1.154e+02, threshold=1.381e+02, percent-clipped=0.0 +2024-07-27 13:46:27,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22682.666666666668, ans=0.1 +2024-07-27 13:46:31,969 INFO [train.py:1114] (2/4) Epoch 2, batch 6800, loss[loss=0.2929, simple_loss=0.3778, pruned_loss=0.104, over 4638.00 frames. ], tot_loss[loss=0.3149, simple_loss=0.3741, pruned_loss=0.1279, over 939007.19 frames. ], batch size: 13, lr: 2.80e-02, grad_scale: 128.0 +2024-07-27 13:46:37,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=22696.0, ans=0.125 +2024-07-27 13:46:43,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=22709.333333333332, ans=0.2 +2024-07-27 13:46:46,176 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.94 vs. limit=15.0 +2024-07-27 13:46:50,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=22722.666666666668, ans=0.07 +2024-07-27 13:46:54,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=22736.0, ans=0.2 +2024-07-27 13:46:54,233 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.54 vs. limit=22.5 +2024-07-27 13:46:57,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=22736.0, ans=0.025 +2024-07-27 13:46:58,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=22749.333333333332, ans=0.04949747468305833 +2024-07-27 13:46:58,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=22749.333333333332, ans=0.125 +2024-07-27 13:47:01,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22749.333333333332, ans=0.1 +2024-07-27 13:47:03,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=22749.333333333332, ans=0.2 +2024-07-27 13:47:06,033 INFO [train.py:1114] (2/4) Epoch 2, batch 6850, loss[loss=0.2836, simple_loss=0.3641, pruned_loss=0.1015, over 4703.00 frames. ], tot_loss[loss=0.3145, simple_loss=0.3738, pruned_loss=0.1276, over 940572.84 frames. ], batch size: 13, lr: 2.80e-02, grad_scale: 128.0 +2024-07-27 13:47:13,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=22776.0, ans=0.125 +2024-07-27 13:47:18,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=22776.0, ans=0.2 +2024-07-27 13:47:30,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22802.666666666668, ans=0.1 +2024-07-27 13:47:32,163 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-27 13:47:32,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=22802.666666666668, ans=0.2 +2024-07-27 13:47:33,806 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.902e+01 6.353e+01 6.914e+01 7.942e+01 1.137e+02, threshold=1.383e+02, percent-clipped=0.0 +2024-07-27 13:47:39,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22816.0, ans=0.1 +2024-07-27 13:47:40,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=22816.0, ans=0.125 +2024-07-27 13:47:40,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=22816.0, ans=0.125 +2024-07-27 13:47:44,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=22816.0, ans=0.0 +2024-07-27 13:47:44,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=22816.0, ans=0.125 +2024-07-27 13:47:45,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22829.333333333332, ans=0.1 +2024-07-27 13:47:46,052 INFO [train.py:1114] (2/4) Epoch 2, batch 6900, loss[loss=0.2599, simple_loss=0.328, pruned_loss=0.0959, over 4971.00 frames. ], tot_loss[loss=0.3145, simple_loss=0.3738, pruned_loss=0.1276, over 942945.19 frames. ], batch size: 13, lr: 2.79e-02, grad_scale: 128.0 +2024-07-27 13:47:49,846 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.89 vs. limit=15.0 +2024-07-27 13:47:53,566 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.33 vs. limit=15.0 +2024-07-27 13:48:21,664 INFO [train.py:1114] (2/4) Epoch 2, batch 6950, loss[loss=0.3114, simple_loss=0.3628, pruned_loss=0.13, over 4549.00 frames. ], tot_loss[loss=0.3153, simple_loss=0.3742, pruned_loss=0.1282, over 940702.87 frames. ], batch size: 10, lr: 2.79e-02, grad_scale: 128.0 +2024-07-27 13:48:28,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=22896.0, ans=0.125 +2024-07-27 13:48:29,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=22896.0, ans=0.125 +2024-07-27 13:48:29,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=22896.0, ans=0.125 +2024-07-27 13:48:30,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=22896.0, ans=0.005892173913043478 +2024-07-27 13:48:53,839 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.36 vs. limit=15.0 +2024-07-27 13:48:54,771 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.437e+01 6.446e+01 7.112e+01 7.644e+01 1.059e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 13:49:03,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=22949.333333333332, ans=0.95 +2024-07-27 13:49:03,306 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.25 vs. limit=15.0 +2024-07-27 13:49:05,666 INFO [train.py:1114] (2/4) Epoch 2, batch 7000, loss[loss=0.3235, simple_loss=0.3844, pruned_loss=0.1313, over 4622.00 frames. ], tot_loss[loss=0.3139, simple_loss=0.3732, pruned_loss=0.1273, over 938846.28 frames. ], batch size: 17, lr: 2.79e-02, grad_scale: 128.0 +2024-07-27 13:49:05,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=22962.666666666668, ans=0.0 +2024-07-27 13:49:08,082 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.84 vs. limit=12.0 +2024-07-27 13:49:22,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=22989.333333333332, ans=0.2 +2024-07-27 13:49:28,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=23002.666666666668, ans=0.07 +2024-07-27 13:49:30,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=23002.666666666668, ans=0.125 +2024-07-27 13:49:42,716 INFO [train.py:1114] (2/4) Epoch 2, batch 7050, loss[loss=0.3682, simple_loss=0.4128, pruned_loss=0.1618, over 4711.00 frames. ], tot_loss[loss=0.3141, simple_loss=0.3736, pruned_loss=0.1273, over 942074.86 frames. ], batch size: 19, lr: 2.78e-02, grad_scale: 128.0 +2024-07-27 13:50:05,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=23042.666666666668, ans=0.125 +2024-07-27 13:50:17,261 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.353e+01 6.903e+01 7.811e+01 8.989e+01 1.248e+02, threshold=1.562e+02, percent-clipped=0.0 +2024-07-27 13:50:21,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=23082.666666666668, ans=0.125 +2024-07-27 13:50:22,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=23082.666666666668, ans=0.125 +2024-07-27 13:50:27,351 INFO [train.py:1114] (2/4) Epoch 2, batch 7100, loss[loss=0.2936, simple_loss=0.3663, pruned_loss=0.1104, over 4807.00 frames. ], tot_loss[loss=0.3156, simple_loss=0.375, pruned_loss=0.1281, over 937277.33 frames. ], batch size: 15, lr: 2.78e-02, grad_scale: 64.0 +2024-07-27 13:50:29,580 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.22 vs. limit=15.0 +2024-07-27 13:50:36,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=23109.333333333332, ans=0.125 +2024-07-27 13:50:41,312 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.05 vs. limit=15.0 +2024-07-27 13:50:47,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=23122.666666666668, ans=0.0 +2024-07-27 13:51:01,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=23149.333333333332, ans=0.0 +2024-07-27 13:51:02,029 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.40 vs. limit=15.0 +2024-07-27 13:51:07,814 INFO [train.py:1114] (2/4) Epoch 2, batch 7150, loss[loss=0.3517, simple_loss=0.3996, pruned_loss=0.1518, over 4470.00 frames. ], tot_loss[loss=0.3134, simple_loss=0.3727, pruned_loss=0.1271, over 938064.68 frames. ], batch size: 21, lr: 2.78e-02, grad_scale: 64.0 +2024-07-27 13:51:08,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=23162.666666666668, ans=0.1 +2024-07-27 13:51:18,935 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.25 vs. limit=22.5 +2024-07-27 13:51:20,971 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.52 vs. limit=15.0 +2024-07-27 13:51:30,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23189.333333333332, ans=0.1 +2024-07-27 13:51:32,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=23189.333333333332, ans=0.125 +2024-07-27 13:51:34,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=23202.666666666668, ans=0.0 +2024-07-27 13:51:38,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=23202.666666666668, ans=0.125 +2024-07-27 13:51:41,522 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.182e+01 6.431e+01 7.159e+01 7.939e+01 1.328e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 13:51:44,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=23202.666666666668, ans=0.125 +2024-07-27 13:51:50,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=23216.0, ans=0.125 +2024-07-27 13:51:52,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=23216.0, ans=0.125 +2024-07-27 13:51:53,991 INFO [train.py:1114] (2/4) Epoch 2, batch 7200, loss[loss=0.3931, simple_loss=0.4322, pruned_loss=0.177, over 4811.00 frames. ], tot_loss[loss=0.313, simple_loss=0.3723, pruned_loss=0.1268, over 938464.50 frames. ], batch size: 15, lr: 2.77e-02, grad_scale: 64.0 +2024-07-27 13:52:00,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=23242.666666666668, ans=0.125 +2024-07-27 13:52:09,323 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=19.30 vs. limit=15.0 +2024-07-27 13:52:09,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=23256.0, ans=0.0 +2024-07-27 13:52:11,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=23256.0, ans=0.125 +2024-07-27 13:52:14,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=23256.0, ans=0.125 +2024-07-27 13:52:28,641 INFO [train.py:1114] (2/4) Epoch 2, batch 7250, loss[loss=0.2449, simple_loss=0.319, pruned_loss=0.08539, over 4841.00 frames. ], tot_loss[loss=0.3119, simple_loss=0.3711, pruned_loss=0.1263, over 940100.42 frames. ], batch size: 12, lr: 2.77e-02, grad_scale: 64.0 +2024-07-27 13:52:33,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=23296.0, ans=0.125 +2024-07-27 13:52:35,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=23309.333333333332, ans=0.125 +2024-07-27 13:52:42,245 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.02 vs. limit=15.0 +2024-07-27 13:52:43,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=23322.666666666668, ans=0.0 +2024-07-27 13:52:50,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=23336.0, ans=0.125 +2024-07-27 13:52:54,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=23336.0, ans=0.025 +2024-07-27 13:52:54,556 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.076e+01 6.237e+01 6.919e+01 7.525e+01 1.117e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-27 13:53:07,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=23349.333333333332, ans=0.2 +2024-07-27 13:53:14,209 INFO [train.py:1114] (2/4) Epoch 2, batch 7300, loss[loss=0.303, simple_loss=0.3539, pruned_loss=0.1261, over 4835.00 frames. ], tot_loss[loss=0.3121, simple_loss=0.3716, pruned_loss=0.1263, over 940231.27 frames. ], batch size: 12, lr: 2.77e-02, grad_scale: 64.0 +2024-07-27 13:53:14,647 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.01 vs. limit=10.0 +2024-07-27 13:53:15,240 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.71 vs. limit=22.5 +2024-07-27 13:53:21,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=23376.0, ans=0.125 +2024-07-27 13:53:24,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=23376.0, ans=0.2 +2024-07-27 13:53:27,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=23389.333333333332, ans=0.005784927536231885 +2024-07-27 13:53:27,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=23389.333333333332, ans=0.125 +2024-07-27 13:53:49,000 INFO [train.py:1114] (2/4) Epoch 2, batch 7350, loss[loss=0.2667, simple_loss=0.3552, pruned_loss=0.08913, over 4632.00 frames. ], tot_loss[loss=0.3131, simple_loss=0.3722, pruned_loss=0.127, over 939516.45 frames. ], batch size: 12, lr: 2.76e-02, grad_scale: 64.0 +2024-07-27 13:53:56,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23442.666666666668, ans=0.1 +2024-07-27 13:54:00,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=23442.666666666668, ans=0.025 +2024-07-27 13:54:03,509 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:54:13,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=23469.333333333332, ans=0.1 +2024-07-27 13:54:14,586 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.043e+01 6.547e+01 7.387e+01 8.600e+01 1.543e+02, threshold=1.477e+02, percent-clipped=1.0 +2024-07-27 13:54:20,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=23469.333333333332, ans=0.125 +2024-07-27 13:54:24,203 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.32 vs. limit=15.0 +2024-07-27 13:54:45,307 INFO [train.py:1114] (2/4) Epoch 2, batch 7400, loss[loss=0.3136, simple_loss=0.3887, pruned_loss=0.1192, over 4687.00 frames. ], tot_loss[loss=0.3124, simple_loss=0.3719, pruned_loss=0.1264, over 940940.42 frames. ], batch size: 13, lr: 2.76e-02, grad_scale: 64.0 +2024-07-27 13:54:53,998 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.84 vs. limit=15.0 +2024-07-27 13:54:58,824 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-07-27 13:55:19,720 INFO [train.py:1114] (2/4) Epoch 2, batch 7450, loss[loss=0.2451, simple_loss=0.3116, pruned_loss=0.08935, over 4611.00 frames. ], tot_loss[loss=0.3096, simple_loss=0.3693, pruned_loss=0.125, over 938231.77 frames. ], batch size: 11, lr: 2.76e-02, grad_scale: 64.0 +2024-07-27 13:55:25,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=23562.666666666668, ans=0.125 +2024-07-27 13:55:30,933 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.43 vs. limit=6.0 +2024-07-27 13:55:34,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=23589.333333333332, ans=0.2 +2024-07-27 13:55:36,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=23589.333333333332, ans=0.09899494936611666 +2024-07-27 13:55:37,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=23589.333333333332, ans=0.04949747468305833 +2024-07-27 13:55:45,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=23602.666666666668, ans=0.125 +2024-07-27 13:55:47,792 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.335e+01 6.373e+01 7.113e+01 7.806e+01 1.283e+02, threshold=1.423e+02, percent-clipped=0.0 +2024-07-27 13:55:58,106 INFO [train.py:1114] (2/4) Epoch 2, batch 7500, loss[loss=0.4728, simple_loss=0.4745, pruned_loss=0.2356, over 3313.00 frames. ], tot_loss[loss=0.3111, simple_loss=0.37, pruned_loss=0.1261, over 936413.57 frames. ], batch size: 35, lr: 2.75e-02, grad_scale: 64.0 +2024-07-27 13:56:25,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=23656.0, ans=0.125 +2024-07-27 13:56:30,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=23656.0, ans=0.07 +2024-07-27 13:56:30,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=23656.0, ans=0.00572695652173913 +2024-07-27 13:56:32,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=23656.0, ans=0.2 +2024-07-27 13:56:47,506 INFO [train.py:1114] (2/4) Epoch 2, batch 7550, loss[loss=0.2951, simple_loss=0.3591, pruned_loss=0.1156, over 4644.00 frames. ], tot_loss[loss=0.3134, simple_loss=0.3722, pruned_loss=0.1273, over 936381.88 frames. ], batch size: 17, lr: 2.75e-02, grad_scale: 64.0 +2024-07-27 13:56:58,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=23709.333333333332, ans=0.00571536231884058 +2024-07-27 13:56:59,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=23709.333333333332, ans=0.125 +2024-07-27 13:57:06,250 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.99 vs. limit=10.0 +2024-07-27 13:57:15,320 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.177e+01 6.494e+01 6.851e+01 7.705e+01 1.471e+02, threshold=1.370e+02, percent-clipped=1.0 +2024-07-27 13:57:16,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23736.0, ans=0.1 +2024-07-27 13:57:21,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=23749.333333333332, ans=0.125 +2024-07-27 13:57:25,063 INFO [train.py:1114] (2/4) Epoch 2, batch 7600, loss[loss=0.3246, simple_loss=0.3755, pruned_loss=0.1368, over 4816.00 frames. ], tot_loss[loss=0.3122, simple_loss=0.3719, pruned_loss=0.1262, over 937896.31 frames. ], batch size: 14, lr: 2.75e-02, grad_scale: 64.0 +2024-07-27 13:57:27,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=23762.666666666668, ans=0.125 +2024-07-27 13:57:29,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=23762.666666666668, ans=0.09899494936611666 +2024-07-27 13:57:50,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=23802.666666666668, ans=0.125 +2024-07-27 13:57:58,707 INFO [train.py:1114] (2/4) Epoch 2, batch 7650, loss[loss=0.3362, simple_loss=0.3988, pruned_loss=0.1368, over 4923.00 frames. ], tot_loss[loss=0.3111, simple_loss=0.3709, pruned_loss=0.1257, over 936740.10 frames. ], batch size: 12, lr: 2.74e-02, grad_scale: 64.0 +2024-07-27 13:59:35,230 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.17 vs. limit=8.0 +2024-07-27 13:59:42,371 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.43 vs. limit=15.0 +2024-07-27 13:59:42,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=23856.0, ans=0.125 +2024-07-27 13:59:45,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=23856.0, ans=0.0 +2024-07-27 13:59:46,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=23856.0, ans=0.07 +2024-07-27 13:59:50,985 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.430e+01 6.487e+01 6.980e+01 8.234e+01 1.140e+02, threshold=1.396e+02, percent-clipped=0.0 +2024-07-27 14:00:00,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=23896.0, ans=0.1 +2024-07-27 14:00:01,133 INFO [train.py:1114] (2/4) Epoch 2, batch 7700, loss[loss=0.3005, simple_loss=0.3649, pruned_loss=0.1181, over 4692.00 frames. ], tot_loss[loss=0.3131, simple_loss=0.3725, pruned_loss=0.1269, over 933910.54 frames. ], batch size: 13, lr: 2.74e-02, grad_scale: 64.0 +2024-07-27 14:00:03,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=23896.0, ans=0.05 +2024-07-27 14:00:04,746 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.55 vs. limit=15.0 +2024-07-27 14:00:10,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=23909.333333333332, ans=0.125 +2024-07-27 14:00:21,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=23936.0, ans=0.125 +2024-07-27 14:00:23,317 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.09 vs. limit=10.0 +2024-07-27 14:00:34,016 INFO [train.py:1114] (2/4) Epoch 2, batch 7750, loss[loss=0.3068, simple_loss=0.3745, pruned_loss=0.1196, over 4923.00 frames. ], tot_loss[loss=0.3144, simple_loss=0.3733, pruned_loss=0.1278, over 935305.93 frames. ], batch size: 14, lr: 2.74e-02, grad_scale: 64.0 +2024-07-27 14:00:42,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=23976.0, ans=0.125 +2024-07-27 14:00:45,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.96 vs. limit=10.0 +2024-07-27 14:00:48,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=23989.333333333332, ans=0.2 +2024-07-27 14:00:51,448 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.73 vs. limit=15.0 +2024-07-27 14:00:58,554 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.60 vs. limit=15.0 +2024-07-27 14:00:59,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24002.666666666668, ans=0.1 +2024-07-27 14:00:59,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=24002.666666666668, ans=0.0 +2024-07-27 14:01:01,474 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.212e+01 6.407e+01 7.069e+01 7.682e+01 1.137e+02, threshold=1.414e+02, percent-clipped=0.0 +2024-07-27 14:01:09,859 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.05 vs. limit=15.0 +2024-07-27 14:01:11,462 INFO [train.py:1114] (2/4) Epoch 2, batch 7800, loss[loss=0.3265, simple_loss=0.3872, pruned_loss=0.1329, over 4670.00 frames. ], tot_loss[loss=0.313, simple_loss=0.3728, pruned_loss=0.1266, over 937161.24 frames. ], batch size: 14, lr: 2.74e-02, grad_scale: 64.0 +2024-07-27 14:01:16,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=24029.333333333332, ans=0.2 +2024-07-27 14:01:28,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=24056.0, ans=0.125 +2024-07-27 14:01:30,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=24056.0, ans=0.05 +2024-07-27 14:01:33,131 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=21.77 vs. limit=15.0 +2024-07-27 14:01:44,914 INFO [train.py:1114] (2/4) Epoch 2, batch 7850, loss[loss=0.3449, simple_loss=0.3751, pruned_loss=0.1574, over 4516.00 frames. ], tot_loss[loss=0.3132, simple_loss=0.3727, pruned_loss=0.1268, over 936034.31 frames. ], batch size: 10, lr: 2.73e-02, grad_scale: 64.0 +2024-07-27 14:01:46,568 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.81 vs. limit=22.5 +2024-07-27 14:01:49,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=24096.0, ans=0.0 +2024-07-27 14:01:56,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=24109.333333333332, ans=0.0 +2024-07-27 14:02:05,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=24122.666666666668, ans=0.125 +2024-07-27 14:02:10,324 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.460e+01 6.475e+01 7.021e+01 7.812e+01 1.156e+02, threshold=1.404e+02, percent-clipped=0.0 +2024-07-27 14:02:18,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=24149.333333333332, ans=0.05 +2024-07-27 14:02:18,592 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.11 vs. limit=10.0 +2024-07-27 14:02:20,251 INFO [train.py:1114] (2/4) Epoch 2, batch 7900, loss[loss=0.3701, simple_loss=0.4321, pruned_loss=0.154, over 4872.00 frames. ], tot_loss[loss=0.3152, simple_loss=0.3748, pruned_loss=0.1278, over 933535.51 frames. ], batch size: 14, lr: 2.73e-02, grad_scale: 64.0 +2024-07-27 14:02:20,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24162.666666666668, ans=0.1 +2024-07-27 14:02:25,990 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.78 vs. limit=10.0 +2024-07-27 14:02:28,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24176.0, ans=0.1 +2024-07-27 14:02:32,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=24176.0, ans=0.125 +2024-07-27 14:02:37,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=24189.333333333332, ans=0.025 +2024-07-27 14:02:42,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=24202.666666666668, ans=0.125 +2024-07-27 14:02:53,686 INFO [train.py:1114] (2/4) Epoch 2, batch 7950, loss[loss=0.382, simple_loss=0.4167, pruned_loss=0.1736, over 3412.00 frames. ], tot_loss[loss=0.3159, simple_loss=0.3755, pruned_loss=0.1282, over 935562.85 frames. ], batch size: 36, lr: 2.73e-02, grad_scale: 64.0 +2024-07-27 14:02:55,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten.whitening_limit, batch_count=24229.333333333332, ans=15.0 +2024-07-27 14:02:55,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24229.333333333332, ans=0.1 +2024-07-27 14:03:00,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=24242.666666666668, ans=0.125 +2024-07-27 14:03:07,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=24256.0, ans=0.2 +2024-07-27 14:03:16,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=24269.333333333332, ans=0.125 +2024-07-27 14:03:17,184 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.361e+01 6.433e+01 7.086e+01 8.045e+01 1.490e+02, threshold=1.417e+02, percent-clipped=1.0 +2024-07-27 14:03:20,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=24282.666666666668, ans=0.07 +2024-07-27 14:03:26,868 INFO [train.py:1114] (2/4) Epoch 2, batch 8000, loss[loss=0.2922, simple_loss=0.3325, pruned_loss=0.126, over 4612.00 frames. ], tot_loss[loss=0.3146, simple_loss=0.3739, pruned_loss=0.1276, over 933939.94 frames. ], batch size: 11, lr: 2.72e-02, grad_scale: 64.0 +2024-07-27 14:03:30,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=24296.0, ans=0.2 +2024-07-27 14:03:49,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=24336.0, ans=0.125 +2024-07-27 14:03:49,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=24336.0, ans=0.125 +2024-07-27 14:03:54,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=24349.333333333332, ans=0.125 +2024-07-27 14:04:00,349 INFO [train.py:1114] (2/4) Epoch 2, batch 8050, loss[loss=0.3158, simple_loss=0.3844, pruned_loss=0.1237, over 4807.00 frames. ], tot_loss[loss=0.315, simple_loss=0.3742, pruned_loss=0.1279, over 934246.39 frames. ], batch size: 14, lr: 2.72e-02, grad_scale: 64.0 +2024-07-27 14:04:04,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=24362.666666666668, ans=0.005573333333333333 +2024-07-27 14:04:16,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=24389.333333333332, ans=0.0055675362318840585 +2024-07-27 14:04:18,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=24389.333333333332, ans=0.0055675362318840585 +2024-07-27 14:04:23,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=24402.666666666668, ans=0.025 +2024-07-27 14:04:23,845 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.030e+01 6.126e+01 6.809e+01 7.483e+01 1.319e+02, threshold=1.362e+02, percent-clipped=0.0 +2024-07-27 14:04:24,306 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.01 vs. limit=22.5 +2024-07-27 14:04:27,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=24416.0, ans=0.125 +2024-07-27 14:04:27,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=24416.0, ans=0.125 +2024-07-27 14:04:28,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=24416.0, ans=0.125 +2024-07-27 14:04:29,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=24416.0, ans=0.005561739130434783 +2024-07-27 14:04:33,617 INFO [train.py:1114] (2/4) Epoch 2, batch 8100, loss[loss=0.3463, simple_loss=0.4109, pruned_loss=0.1408, over 4801.00 frames. ], tot_loss[loss=0.3145, simple_loss=0.3744, pruned_loss=0.1273, over 933253.39 frames. ], batch size: 15, lr: 2.72e-02, grad_scale: 64.0 +2024-07-27 14:04:48,721 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-27 14:04:49,305 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.36 vs. limit=15.0 +2024-07-27 14:05:04,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=24442.666666666668, ans=0.125 +2024-07-27 14:05:17,444 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=11.56 vs. limit=15.0 +2024-07-27 14:05:18,831 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.14 vs. limit=10.0 +2024-07-27 14:05:56,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24482.666666666668, ans=0.1 +2024-07-27 14:06:01,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24482.666666666668, ans=0.1 +2024-07-27 14:06:02,894 INFO [train.py:1114] (2/4) Epoch 2, batch 8150, loss[loss=0.312, simple_loss=0.3952, pruned_loss=0.1144, over 4800.00 frames. ], tot_loss[loss=0.3131, simple_loss=0.3728, pruned_loss=0.1267, over 936657.06 frames. ], batch size: 15, lr: 2.71e-02, grad_scale: 64.0 +2024-07-27 14:06:11,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=24509.333333333332, ans=0.1 +2024-07-27 14:06:46,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=24536.0, ans=0.02 +2024-07-27 14:06:50,921 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.617e+01 6.763e+01 7.386e+01 8.088e+01 1.261e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 14:06:54,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.30 vs. limit=15.0 +2024-07-27 14:06:56,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=24549.333333333332, ans=0.125 +2024-07-27 14:06:58,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=24549.333333333332, ans=0.125 +2024-07-27 14:06:59,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=24549.333333333332, ans=0.125 +2024-07-27 14:07:00,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.whiten.whitening_limit, batch_count=24549.333333333332, ans=12.0 +2024-07-27 14:07:01,479 INFO [train.py:1114] (2/4) Epoch 2, batch 8200, loss[loss=0.3176, simple_loss=0.3677, pruned_loss=0.1338, over 4791.00 frames. ], tot_loss[loss=0.3127, simple_loss=0.3729, pruned_loss=0.1262, over 937602.30 frames. ], batch size: 15, lr: 2.71e-02, grad_scale: 64.0 +2024-07-27 14:07:06,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=24562.666666666668, ans=0.2 +2024-07-27 14:07:21,928 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.36 vs. limit=22.5 +2024-07-27 14:07:24,450 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.40 vs. limit=15.0 +2024-07-27 14:07:25,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=24602.666666666668, ans=0.5 +2024-07-27 14:07:27,938 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=25.92 vs. limit=15.0 +2024-07-27 14:07:31,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=24602.666666666668, ans=0.0 +2024-07-27 14:07:39,097 INFO [train.py:1114] (2/4) Epoch 2, batch 8250, loss[loss=0.2863, simple_loss=0.3549, pruned_loss=0.1088, over 4885.00 frames. ], tot_loss[loss=0.3128, simple_loss=0.3725, pruned_loss=0.1266, over 938105.02 frames. ], batch size: 13, lr: 2.71e-02, grad_scale: 64.0 +2024-07-27 14:07:39,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=24629.333333333332, ans=0.125 +2024-07-27 14:07:42,288 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.70 vs. limit=15.0 +2024-07-27 14:07:44,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=24629.333333333332, ans=0.125 +2024-07-27 14:08:00,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24656.0, ans=0.1 +2024-07-27 14:08:00,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=24656.0, ans=0.125 +2024-07-27 14:08:04,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=24669.333333333332, ans=0.125 +2024-07-27 14:08:07,827 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.008e+01 6.272e+01 7.057e+01 7.926e+01 1.070e+02, threshold=1.411e+02, percent-clipped=0.0 +2024-07-27 14:08:12,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=24682.666666666668, ans=0.0 +2024-07-27 14:08:32,473 INFO [train.py:1114] (2/4) Epoch 2, batch 8300, loss[loss=0.3622, simple_loss=0.4236, pruned_loss=0.1504, over 4914.00 frames. ], tot_loss[loss=0.3148, simple_loss=0.3742, pruned_loss=0.1277, over 938124.16 frames. ], batch size: 15, lr: 2.70e-02, grad_scale: 64.0 +2024-07-27 14:08:39,898 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:08:46,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=24722.666666666668, ans=0.005495072463768116 +2024-07-27 14:08:49,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24722.666666666668, ans=0.1 +2024-07-27 14:08:51,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=24722.666666666668, ans=0.125 +2024-07-27 14:08:56,232 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.28 vs. limit=22.5 +2024-07-27 14:09:06,257 INFO [train.py:1114] (2/4) Epoch 2, batch 8350, loss[loss=0.2983, simple_loss=0.3693, pruned_loss=0.1136, over 4798.00 frames. ], tot_loss[loss=0.315, simple_loss=0.3745, pruned_loss=0.1277, over 940870.31 frames. ], batch size: 15, lr: 2.70e-02, grad_scale: 64.0 +2024-07-27 14:09:13,054 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.75 vs. limit=12.0 +2024-07-27 14:09:18,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=24789.333333333332, ans=0.125 +2024-07-27 14:09:34,714 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.707e+01 6.372e+01 7.103e+01 7.786e+01 1.162e+02, threshold=1.421e+02, percent-clipped=0.0 +2024-07-27 14:09:37,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=24816.0, ans=0.2 +2024-07-27 14:09:40,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=24816.0, ans=0.005474782608695652 +2024-07-27 14:09:44,916 INFO [train.py:1114] (2/4) Epoch 2, batch 8400, loss[loss=0.2764, simple_loss=0.3418, pruned_loss=0.1055, over 4790.00 frames. ], tot_loss[loss=0.3145, simple_loss=0.374, pruned_loss=0.1274, over 939973.39 frames. ], batch size: 12, lr: 2.70e-02, grad_scale: 64.0 +2024-07-27 14:09:51,352 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.47 vs. limit=22.5 +2024-07-27 14:09:51,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=24842.666666666668, ans=0.0 +2024-07-27 14:09:53,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=24842.666666666668, ans=0.2 +2024-07-27 14:09:56,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=24842.666666666668, ans=0.2 +2024-07-27 14:09:57,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=24856.0, ans=0.125 +2024-07-27 14:09:58,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=24856.0, ans=0.125 +2024-07-27 14:10:01,678 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.98 vs. limit=15.0 +2024-07-27 14:10:04,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=24869.333333333332, ans=0.0 +2024-07-27 14:10:10,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=24882.666666666668, ans=10.0 +2024-07-27 14:10:17,803 INFO [train.py:1114] (2/4) Epoch 2, batch 8450, loss[loss=0.3192, simple_loss=0.382, pruned_loss=0.1282, over 4810.00 frames. ], tot_loss[loss=0.3145, simple_loss=0.3743, pruned_loss=0.1274, over 938930.37 frames. ], batch size: 15, lr: 2.69e-02, grad_scale: 64.0 +2024-07-27 14:10:18,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.17 vs. limit=12.0 +2024-07-27 14:10:25,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=24909.333333333332, ans=0.125 +2024-07-27 14:10:25,684 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.69 vs. limit=15.0 +2024-07-27 14:10:49,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24936.0, ans=0.1 +2024-07-27 14:10:51,590 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.225e+01 6.363e+01 6.808e+01 7.563e+01 1.440e+02, threshold=1.362e+02, percent-clipped=1.0 +2024-07-27 14:11:01,385 INFO [train.py:1114] (2/4) Epoch 2, batch 8500, loss[loss=0.2639, simple_loss=0.3231, pruned_loss=0.1023, over 4620.00 frames. ], tot_loss[loss=0.3121, simple_loss=0.3722, pruned_loss=0.126, over 938770.74 frames. ], batch size: 11, lr: 2.69e-02, grad_scale: 64.0 +2024-07-27 14:11:03,556 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.38 vs. limit=12.0 +2024-07-27 14:11:04,958 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.11 vs. limit=12.0 +2024-07-27 14:11:13,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24989.333333333332, ans=0.1 +2024-07-27 14:11:28,822 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.80 vs. limit=22.5 +2024-07-27 14:11:29,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=25016.0, ans=0.125 +2024-07-27 14:11:34,236 INFO [train.py:1114] (2/4) Epoch 2, batch 8550, loss[loss=0.2516, simple_loss=0.3244, pruned_loss=0.08936, over 4814.00 frames. ], tot_loss[loss=0.3102, simple_loss=0.3708, pruned_loss=0.1248, over 939952.83 frames. ], batch size: 11, lr: 2.69e-02, grad_scale: 64.0 +2024-07-27 14:11:43,095 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.84 vs. limit=12.0 +2024-07-27 14:11:51,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=25056.0, ans=0.0 +2024-07-27 14:12:01,372 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.226e+01 6.174e+01 6.782e+01 7.598e+01 1.715e+02, threshold=1.356e+02, percent-clipped=1.0 +2024-07-27 14:12:06,421 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.59 vs. limit=6.0 +2024-07-27 14:12:10,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25082.666666666668, ans=0.1 +2024-07-27 14:12:12,542 INFO [train.py:1114] (2/4) Epoch 2, batch 8600, loss[loss=0.29, simple_loss=0.3663, pruned_loss=0.1069, over 4809.00 frames. ], tot_loss[loss=0.3096, simple_loss=0.3698, pruned_loss=0.1247, over 939753.54 frames. ], batch size: 15, lr: 2.68e-02, grad_scale: 64.0 +2024-07-27 14:12:12,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=25096.0, ans=0.0 +2024-07-27 14:12:23,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.84 vs. limit=12.0 +2024-07-27 14:12:23,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.whiten.whitening_limit, batch_count=25109.333333333332, ans=12.0 +2024-07-27 14:12:24,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=25109.333333333332, ans=0.0 +2024-07-27 14:12:25,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=25122.666666666668, ans=0.0 +2024-07-27 14:12:30,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25122.666666666668, ans=0.1 +2024-07-27 14:12:35,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=25136.0, ans=0.005405217391304348 +2024-07-27 14:12:40,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25149.333333333332, ans=0.1 +2024-07-27 14:12:47,843 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.30 vs. limit=22.5 +2024-07-27 14:12:50,779 INFO [train.py:1114] (2/4) Epoch 2, batch 8650, loss[loss=0.3829, simple_loss=0.42, pruned_loss=0.1729, over 4903.00 frames. ], tot_loss[loss=0.3099, simple_loss=0.3701, pruned_loss=0.1249, over 940946.68 frames. ], batch size: 15, lr: 2.68e-02, grad_scale: 64.0 +2024-07-27 14:12:56,003 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.36 vs. limit=15.0 +2024-07-27 14:12:56,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=25162.666666666668, ans=0.125 +2024-07-27 14:13:00,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=25176.0, ans=0.125 +2024-07-27 14:13:01,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=25176.0, ans=0.125 +2024-07-27 14:13:10,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=25202.666666666668, ans=0.05 +2024-07-27 14:13:13,965 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.076e+01 6.621e+01 7.393e+01 8.155e+01 1.216e+02, threshold=1.479e+02, percent-clipped=0.0 +2024-07-27 14:13:14,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=25202.666666666668, ans=0.125 +2024-07-27 14:13:23,801 INFO [train.py:1114] (2/4) Epoch 2, batch 8700, loss[loss=0.2597, simple_loss=0.3231, pruned_loss=0.09818, over 4749.00 frames. ], tot_loss[loss=0.3103, simple_loss=0.3704, pruned_loss=0.1251, over 937943.83 frames. ], batch size: 13, lr: 2.68e-02, grad_scale: 64.0 +2024-07-27 14:13:26,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=25229.333333333332, ans=0.025 +2024-07-27 14:13:28,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.07 vs. limit=22.5 +2024-07-27 14:13:28,945 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.74 vs. limit=10.0 +2024-07-27 14:13:31,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=25242.666666666668, ans=0.5 +2024-07-27 14:13:33,850 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.22 vs. limit=15.0 +2024-07-27 14:13:45,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=25256.0, ans=0.2 +2024-07-27 14:14:00,753 INFO [train.py:1114] (2/4) Epoch 2, batch 8750, loss[loss=0.3002, simple_loss=0.3575, pruned_loss=0.1215, over 4678.00 frames. ], tot_loss[loss=0.3105, simple_loss=0.3705, pruned_loss=0.1252, over 936445.41 frames. ], batch size: 15, lr: 2.68e-02, grad_scale: 64.0 +2024-07-27 14:14:36,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=25322.666666666668, ans=0.2 +2024-07-27 14:14:41,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=25336.0, ans=0.125 +2024-07-27 14:14:43,963 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.590e+01 6.384e+01 6.883e+01 7.910e+01 1.074e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-27 14:14:49,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=25349.333333333332, ans=0.2 +2024-07-27 14:14:53,579 INFO [train.py:1114] (2/4) Epoch 2, batch 8800, loss[loss=0.3074, simple_loss=0.3722, pruned_loss=0.1213, over 4928.00 frames. ], tot_loss[loss=0.3099, simple_loss=0.3703, pruned_loss=0.1248, over 937256.15 frames. ], batch size: 14, lr: 2.67e-02, grad_scale: 64.0 +2024-07-27 14:15:09,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=25389.333333333332, ans=0.125 +2024-07-27 14:15:15,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=25402.666666666668, ans=0.0 +2024-07-27 14:15:17,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=25402.666666666668, ans=10.0 +2024-07-27 14:15:18,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=25402.666666666668, ans=0.125 +2024-07-27 14:15:25,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=25416.0, ans=10.0 +2024-07-27 14:15:27,654 INFO [train.py:1114] (2/4) Epoch 2, batch 8850, loss[loss=0.2897, simple_loss=0.353, pruned_loss=0.1132, over 4536.00 frames. ], tot_loss[loss=0.3102, simple_loss=0.3701, pruned_loss=0.1251, over 931517.48 frames. ], batch size: 21, lr: 2.67e-02, grad_scale: 64.0 +2024-07-27 14:15:35,455 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=21.50 vs. limit=22.5 +2024-07-27 14:15:39,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=25442.666666666668, ans=0.0 +2024-07-27 14:15:40,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=25456.0, ans=0.125 +2024-07-27 14:15:44,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25456.0, ans=0.1 +2024-07-27 14:15:55,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=25456.0, ans=0.125 +2024-07-27 14:15:55,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=25456.0, ans=0.125 +2024-07-27 14:16:00,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=25469.333333333332, ans=0.125 +2024-07-27 14:16:01,199 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.339e+01 6.518e+01 6.996e+01 7.988e+01 1.039e+02, threshold=1.399e+02, percent-clipped=0.0 +2024-07-27 14:16:11,083 INFO [train.py:1114] (2/4) Epoch 2, batch 8900, loss[loss=0.2813, simple_loss=0.3466, pruned_loss=0.108, over 4935.00 frames. ], tot_loss[loss=0.3098, simple_loss=0.3697, pruned_loss=0.1249, over 929531.71 frames. ], batch size: 12, lr: 2.67e-02, grad_scale: 64.0 +2024-07-27 14:16:11,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=25496.0, ans=0.0 +2024-07-27 14:16:14,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=25496.0, ans=0.0 +2024-07-27 14:16:22,927 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.96 vs. limit=15.0 +2024-07-27 14:16:24,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=25522.666666666668, ans=0.2 +2024-07-27 14:16:28,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=25522.666666666668, ans=15.0 +2024-07-27 14:16:30,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=25536.0, ans=0.95 +2024-07-27 14:16:31,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.56 vs. limit=10.0 +2024-07-27 14:16:44,023 INFO [train.py:1114] (2/4) Epoch 2, batch 8950, loss[loss=0.3396, simple_loss=0.4079, pruned_loss=0.1356, over 4529.00 frames. ], tot_loss[loss=0.3092, simple_loss=0.3693, pruned_loss=0.1245, over 930452.04 frames. ], batch size: 21, lr: 2.66e-02, grad_scale: 64.0 +2024-07-27 14:16:46,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=25562.666666666668, ans=10.0 +2024-07-27 14:16:52,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=25576.0, ans=0.125 +2024-07-27 14:16:55,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=25576.0, ans=0.0 +2024-07-27 14:16:56,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=25576.0, ans=0.125 +2024-07-27 14:16:58,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25589.333333333332, ans=0.1 +2024-07-27 14:17:00,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=25589.333333333332, ans=0.0 +2024-07-27 14:17:00,712 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.60 vs. limit=22.5 +2024-07-27 14:17:01,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=25589.333333333332, ans=0.125 +2024-07-27 14:17:05,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=25602.666666666668, ans=0.125 +2024-07-27 14:17:08,259 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.246e+01 6.427e+01 6.847e+01 7.354e+01 1.255e+02, threshold=1.369e+02, percent-clipped=0.0 +2024-07-27 14:17:14,519 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.80 vs. limit=22.5 +2024-07-27 14:17:17,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=25629.333333333332, ans=0.0052979710144927545 +2024-07-27 14:17:18,092 INFO [train.py:1114] (2/4) Epoch 2, batch 9000, loss[loss=0.262, simple_loss=0.325, pruned_loss=0.09946, over 4642.00 frames. ], tot_loss[loss=0.3068, simple_loss=0.3676, pruned_loss=0.123, over 933461.07 frames. ], batch size: 12, lr: 2.66e-02, grad_scale: 64.0 +2024-07-27 14:17:18,092 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 14:17:33,886 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.5388, 2.7336, 3.6422, 4.1886], device='cuda:2') +2024-07-27 14:17:37,004 INFO [train.py:1146] (2/4) Epoch 2, validation: loss=0.2471, simple_loss=0.3424, pruned_loss=0.07587, over 944034.00 frames. +2024-07-27 14:17:37,005 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 14:17:53,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=25656.0, ans=0.005292173913043478 +2024-07-27 14:17:58,809 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.54 vs. limit=15.0 +2024-07-27 14:18:10,130 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.95 vs. limit=15.0 +2024-07-27 14:18:18,643 INFO [train.py:1114] (2/4) Epoch 2, batch 9050, loss[loss=0.3075, simple_loss=0.3581, pruned_loss=0.1284, over 4556.00 frames. ], tot_loss[loss=0.306, simple_loss=0.3664, pruned_loss=0.1228, over 934118.89 frames. ], batch size: 10, lr: 2.66e-02, grad_scale: 64.0 +2024-07-27 14:18:19,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25696.0, ans=0.1 +2024-07-27 14:18:19,732 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-07-27 14:18:23,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=25696.0, ans=0.125 +2024-07-27 14:18:25,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=25709.333333333332, ans=0.125 +2024-07-27 14:18:36,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=25709.333333333332, ans=0.125 +2024-07-27 14:18:36,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=25709.333333333332, ans=0.125 +2024-07-27 14:18:36,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=25709.333333333332, ans=0.125 +2024-07-27 14:18:44,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=25736.0, ans=0.5 +2024-07-27 14:18:48,200 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.394e+01 6.425e+01 6.926e+01 7.624e+01 1.076e+02, threshold=1.385e+02, percent-clipped=0.0 +2024-07-27 14:18:49,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=25736.0, ans=0.125 +2024-07-27 14:18:54,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=25749.333333333332, ans=0.005271884057971014 +2024-07-27 14:18:58,054 INFO [train.py:1114] (2/4) Epoch 2, batch 9100, loss[loss=0.3018, simple_loss=0.365, pruned_loss=0.1193, over 4936.00 frames. ], tot_loss[loss=0.3055, simple_loss=0.3662, pruned_loss=0.1224, over 936672.17 frames. ], batch size: 14, lr: 2.65e-02, grad_scale: 128.0 +2024-07-27 14:19:13,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=25789.333333333332, ans=0.125 +2024-07-27 14:19:14,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=25789.333333333332, ans=0.2 +2024-07-27 14:19:30,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=25816.0, ans=0.125 +2024-07-27 14:19:32,198 INFO [train.py:1114] (2/4) Epoch 2, batch 9150, loss[loss=0.3257, simple_loss=0.3889, pruned_loss=0.1312, over 4813.00 frames. ], tot_loss[loss=0.3078, simple_loss=0.3683, pruned_loss=0.1236, over 934901.12 frames. ], batch size: 14, lr: 2.65e-02, grad_scale: 64.0 +2024-07-27 14:19:32,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=25829.333333333332, ans=0.0052544927536231885 +2024-07-27 14:19:48,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=25842.666666666668, ans=0.0 +2024-07-27 14:19:52,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=25856.0, ans=0.125 +2024-07-27 14:19:53,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25856.0, ans=0.1 +2024-07-27 14:19:54,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=25856.0, ans=0.2 +2024-07-27 14:20:02,448 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.426e+01 6.682e+01 7.261e+01 8.100e+01 1.344e+02, threshold=1.452e+02, percent-clipped=0.0 +2024-07-27 14:20:04,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=25882.666666666668, ans=0.1 +2024-07-27 14:20:11,599 INFO [train.py:1114] (2/4) Epoch 2, batch 9200, loss[loss=0.3015, simple_loss=0.3563, pruned_loss=0.1234, over 4852.00 frames. ], tot_loss[loss=0.3083, simple_loss=0.3684, pruned_loss=0.1241, over 936821.61 frames. ], batch size: 12, lr: 2.65e-02, grad_scale: 64.0 +2024-07-27 14:20:14,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=25896.0, ans=15.0 +2024-07-27 14:20:16,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=25896.0, ans=0.0 +2024-07-27 14:20:18,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=25909.333333333332, ans=0.125 +2024-07-27 14:20:23,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=25909.333333333332, ans=0.0 +2024-07-27 14:20:25,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.08 vs. limit=12.0 +2024-07-27 14:20:26,913 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.66 vs. limit=22.5 +2024-07-27 14:20:32,074 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.55 vs. limit=15.0 +2024-07-27 14:20:34,034 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.75 vs. limit=15.0 +2024-07-27 14:20:40,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=25949.333333333332, ans=0.125 +2024-07-27 14:20:44,394 INFO [train.py:1114] (2/4) Epoch 2, batch 9250, loss[loss=0.3369, simple_loss=0.4063, pruned_loss=0.1338, over 4633.00 frames. ], tot_loss[loss=0.3069, simple_loss=0.3674, pruned_loss=0.1232, over 937723.65 frames. ], batch size: 13, lr: 2.65e-02, grad_scale: 64.0 +2024-07-27 14:20:47,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=25962.666666666668, ans=0.005225507246376811 +2024-07-27 14:20:50,493 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.16 vs. limit=15.0 +2024-07-27 14:20:52,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=25976.0, ans=0.125 +2024-07-27 14:20:53,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=25976.0, ans=0.125 +2024-07-27 14:21:04,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=26002.666666666668, ans=0.0 +2024-07-27 14:21:07,933 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.437e+01 6.326e+01 6.900e+01 7.743e+01 1.339e+02, threshold=1.380e+02, percent-clipped=0.0 +2024-07-27 14:21:17,303 INFO [train.py:1114] (2/4) Epoch 2, batch 9300, loss[loss=0.2877, simple_loss=0.3517, pruned_loss=0.1118, over 4772.00 frames. ], tot_loss[loss=0.3059, simple_loss=0.3665, pruned_loss=0.1226, over 937604.52 frames. ], batch size: 12, lr: 2.64e-02, grad_scale: 64.0 +2024-07-27 14:21:17,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=26029.333333333332, ans=0.1 +2024-07-27 14:21:18,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=26029.333333333332, ans=0.0 +2024-07-27 14:21:22,285 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.10 vs. limit=15.0 +2024-07-27 14:21:29,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=26042.666666666668, ans=0.0 +2024-07-27 14:21:49,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=26096.0, ans=0.005196521739130435 +2024-07-27 14:21:49,567 INFO [train.py:1114] (2/4) Epoch 2, batch 9350, loss[loss=0.2767, simple_loss=0.331, pruned_loss=0.1112, over 4803.00 frames. ], tot_loss[loss=0.3072, simple_loss=0.3676, pruned_loss=0.1235, over 934304.92 frames. ], batch size: 11, lr: 2.64e-02, grad_scale: 64.0 +2024-07-27 14:21:54,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=26096.0, ans=10.0 +2024-07-27 14:21:55,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=26096.0, ans=0.0 +2024-07-27 14:21:56,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=26096.0, ans=0.1 +2024-07-27 14:22:05,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=26122.666666666668, ans=0.07 +2024-07-27 14:22:13,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=26136.0, ans=0.025 +2024-07-27 14:22:16,012 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.591e+01 6.512e+01 7.104e+01 8.363e+01 3.070e+02, threshold=1.421e+02, percent-clipped=1.0 +2024-07-27 14:22:26,818 INFO [train.py:1114] (2/4) Epoch 2, batch 9400, loss[loss=0.2851, simple_loss=0.3581, pruned_loss=0.1061, over 4691.00 frames. ], tot_loss[loss=0.3085, simple_loss=0.3685, pruned_loss=0.1242, over 932293.12 frames. ], batch size: 13, lr: 2.64e-02, grad_scale: 64.0 +2024-07-27 14:22:58,583 INFO [train.py:1114] (2/4) Epoch 2, batch 9450, loss[loss=0.223, simple_loss=0.2999, pruned_loss=0.07302, over 4799.00 frames. ], tot_loss[loss=0.3063, simple_loss=0.3668, pruned_loss=0.1229, over 932483.90 frames. ], batch size: 11, lr: 2.63e-02, grad_scale: 64.0 +2024-07-27 14:23:23,994 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.999e+01 6.119e+01 6.627e+01 7.680e+01 1.096e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-27 14:23:24,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=26269.333333333332, ans=0.125 +2024-07-27 14:23:32,717 INFO [train.py:1114] (2/4) Epoch 2, batch 9500, loss[loss=0.2863, simple_loss=0.3626, pruned_loss=0.105, over 4712.00 frames. ], tot_loss[loss=0.3069, simple_loss=0.3678, pruned_loss=0.123, over 934328.12 frames. ], batch size: 12, lr: 2.63e-02, grad_scale: 64.0 +2024-07-27 14:23:34,375 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.16 vs. limit=15.0 +2024-07-27 14:23:38,671 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:23:41,306 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.65 vs. limit=22.5 +2024-07-27 14:23:45,173 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.14 vs. limit=15.0 +2024-07-27 14:23:46,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=26322.666666666668, ans=0.125 +2024-07-27 14:23:46,413 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.13 vs. limit=15.0 +2024-07-27 14:23:48,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=26322.666666666668, ans=0.2 +2024-07-27 14:23:50,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=26322.666666666668, ans=0.125 +2024-07-27 14:23:52,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=26336.0, ans=0.125 +2024-07-27 14:23:54,260 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.47 vs. limit=22.5 +2024-07-27 14:23:57,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=26336.0, ans=0.125 +2024-07-27 14:24:01,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=26349.333333333332, ans=0.0 +2024-07-27 14:24:02,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=26349.333333333332, ans=0.025 +2024-07-27 14:24:02,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=26349.333333333332, ans=0.125 +2024-07-27 14:24:04,420 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.50 vs. limit=15.0 +2024-07-27 14:24:05,362 INFO [train.py:1114] (2/4) Epoch 2, batch 9550, loss[loss=0.2501, simple_loss=0.3177, pruned_loss=0.0913, over 4777.00 frames. ], tot_loss[loss=0.3073, simple_loss=0.3679, pruned_loss=0.1233, over 931539.96 frames. ], batch size: 12, lr: 2.63e-02, grad_scale: 64.0 +2024-07-27 14:24:17,746 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-27 14:24:23,209 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=21.34 vs. limit=15.0 +2024-07-27 14:24:26,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=26389.333333333332, ans=0.005132753623188407 +2024-07-27 14:24:31,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=26389.333333333332, ans=10.0 +2024-07-27 14:24:34,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=26402.666666666668, ans=0.025 +2024-07-27 14:24:35,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=26402.666666666668, ans=0.125 +2024-07-27 14:24:37,645 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.234e+01 6.396e+01 7.111e+01 8.222e+01 1.095e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 14:24:37,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=26402.666666666668, ans=0.2 +2024-07-27 14:24:44,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=26416.0, ans=0.125 +2024-07-27 14:24:44,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=26416.0, ans=0.125 +2024-07-27 14:24:45,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=26416.0, ans=0.125 +2024-07-27 14:24:50,599 INFO [train.py:1114] (2/4) Epoch 2, batch 9600, loss[loss=0.4521, simple_loss=0.4503, pruned_loss=0.2269, over 3142.00 frames. ], tot_loss[loss=0.3074, simple_loss=0.368, pruned_loss=0.1234, over 930142.98 frames. ], batch size: 35, lr: 2.62e-02, grad_scale: 64.0 +2024-07-27 14:24:50,986 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.33 vs. limit=22.5 +2024-07-27 14:24:55,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26429.333333333332, ans=0.1 +2024-07-27 14:25:14,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=26456.0, ans=0.0 +2024-07-27 14:25:16,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=26456.0, ans=0.125 +2024-07-27 14:25:48,568 INFO [train.py:1114] (2/4) Epoch 2, batch 9650, loss[loss=0.3239, simple_loss=0.3945, pruned_loss=0.1267, over 4837.00 frames. ], tot_loss[loss=0.3085, simple_loss=0.3691, pruned_loss=0.1239, over 926353.45 frames. ], batch size: 16, lr: 2.62e-02, grad_scale: 64.0 +2024-07-27 14:25:50,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=26496.0, ans=0.0051095652173913046 +2024-07-27 14:26:09,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=26522.666666666668, ans=0.0 +2024-07-27 14:26:11,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=26536.0, ans=0.125 +2024-07-27 14:26:14,872 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.044e+01 6.379e+01 6.976e+01 8.027e+01 1.621e+02, threshold=1.395e+02, percent-clipped=2.0 +2024-07-27 14:26:25,397 INFO [train.py:1114] (2/4) Epoch 2, batch 9700, loss[loss=0.4367, simple_loss=0.4523, pruned_loss=0.2106, over 4159.00 frames. ], tot_loss[loss=0.3094, simple_loss=0.3696, pruned_loss=0.1246, over 924116.35 frames. ], batch size: 25, lr: 2.62e-02, grad_scale: 64.0 +2024-07-27 14:26:36,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=26576.0, ans=0.1 +2024-07-27 14:26:41,222 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=2.85 vs. limit=15.0 +2024-07-27 14:26:49,717 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.70 vs. limit=8.0 +2024-07-27 14:26:57,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=26616.0, ans=0.2 +2024-07-27 14:26:57,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26616.0, ans=0.1 +2024-07-27 14:27:00,089 INFO [train.py:1114] (2/4) Epoch 2, batch 9750, loss[loss=0.3116, simple_loss=0.3847, pruned_loss=0.1192, over 4697.00 frames. ], tot_loss[loss=0.3098, simple_loss=0.3699, pruned_loss=0.1248, over 924599.09 frames. ], batch size: 15, lr: 2.62e-02, grad_scale: 64.0 +2024-07-27 14:27:08,153 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.13 vs. limit=15.0 +2024-07-27 14:27:09,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=26642.666666666668, ans=0.125 +2024-07-27 14:27:16,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=26656.0, ans=0.0 +2024-07-27 14:27:18,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=26656.0, ans=0.025 +2024-07-27 14:27:18,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=26656.0, ans=0.1 +2024-07-27 14:27:37,700 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.374e+01 6.232e+01 6.802e+01 7.534e+01 1.606e+02, threshold=1.360e+02, percent-clipped=1.0 +2024-07-27 14:27:39,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=26669.333333333332, ans=0.125 +2024-07-27 14:27:39,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=26682.666666666668, ans=0.0 +2024-07-27 14:27:40,617 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.38 vs. limit=15.0 +2024-07-27 14:27:45,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=26682.666666666668, ans=0.125 +2024-07-27 14:27:46,384 INFO [train.py:1114] (2/4) Epoch 2, batch 9800, loss[loss=0.2656, simple_loss=0.3401, pruned_loss=0.09555, over 4713.00 frames. ], tot_loss[loss=0.309, simple_loss=0.3689, pruned_loss=0.1246, over 924030.09 frames. ], batch size: 12, lr: 2.61e-02, grad_scale: 64.0 +2024-07-27 14:27:48,058 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.03 vs. limit=15.0 +2024-07-27 14:27:50,625 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.02 vs. limit=10.0 +2024-07-27 14:27:56,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=26709.333333333332, ans=0.0 +2024-07-27 14:28:01,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=26722.666666666668, ans=0.125 +2024-07-27 14:28:02,782 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.58 vs. limit=15.0 +2024-07-27 14:28:04,540 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.67 vs. limit=15.0 +2024-07-27 14:28:07,084 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.37 vs. limit=15.0 +2024-07-27 14:28:15,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=26749.333333333332, ans=0.125 +2024-07-27 14:28:15,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=26749.333333333332, ans=0.0 +2024-07-27 14:28:16,648 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.18 vs. limit=15.0 +2024-07-27 14:28:18,174 INFO [train.py:1114] (2/4) Epoch 2, batch 9850, loss[loss=0.3164, simple_loss=0.3817, pruned_loss=0.1256, over 4901.00 frames. ], tot_loss[loss=0.31, simple_loss=0.3699, pruned_loss=0.1251, over 926914.10 frames. ], batch size: 15, lr: 2.61e-02, grad_scale: 64.0 +2024-07-27 14:28:39,627 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.87 vs. limit=15.0 +2024-07-27 14:28:41,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=26789.333333333332, ans=0.0 +2024-07-27 14:28:42,062 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.04 vs. limit=22.5 +2024-07-27 14:28:46,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=26802.666666666668, ans=0.035 +2024-07-27 14:28:48,009 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.332e+01 6.564e+01 7.229e+01 8.186e+01 1.183e+02, threshold=1.446e+02, percent-clipped=0.0 +2024-07-27 14:28:49,154 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.03 vs. limit=10.0 +2024-07-27 14:28:49,293 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.34 vs. limit=22.5 +2024-07-27 14:28:57,033 INFO [train.py:1114] (2/4) Epoch 2, batch 9900, loss[loss=0.2993, simple_loss=0.3735, pruned_loss=0.1126, over 4842.00 frames. ], tot_loss[loss=0.311, simple_loss=0.3706, pruned_loss=0.1257, over 926070.09 frames. ], batch size: 16, lr: 2.61e-02, grad_scale: 64.0 +2024-07-27 14:29:07,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=26842.666666666668, ans=0.125 +2024-07-27 14:29:23,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=26882.666666666668, ans=0.95 +2024-07-27 14:29:27,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=26882.666666666668, ans=0.125 +2024-07-27 14:29:28,795 INFO [train.py:1114] (2/4) Epoch 2, batch 9950, loss[loss=0.2645, simple_loss=0.334, pruned_loss=0.09747, over 4812.00 frames. ], tot_loss[loss=0.3123, simple_loss=0.3716, pruned_loss=0.1265, over 928910.15 frames. ], batch size: 11, lr: 2.60e-02, grad_scale: 64.0 +2024-07-27 14:29:29,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=26896.0, ans=0.005022608695652174 +2024-07-27 14:29:30,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=26896.0, ans=0.125 +2024-07-27 14:29:31,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=26896.0, ans=0.125 +2024-07-27 14:29:33,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=26896.0, ans=0.0 +2024-07-27 14:30:16,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=26922.666666666668, ans=0.5 +2024-07-27 14:30:27,306 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.345e+01 6.388e+01 7.080e+01 7.845e+01 1.130e+02, threshold=1.416e+02, percent-clipped=0.0 +2024-07-27 14:30:33,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=26949.333333333332, ans=0.025 +2024-07-27 14:30:33,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=26949.333333333332, ans=0.125 +2024-07-27 14:30:41,652 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.12 vs. limit=6.0 +2024-07-27 14:30:41,814 INFO [train.py:1114] (2/4) Epoch 2, batch 10000, loss[loss=0.2914, simple_loss=0.3689, pruned_loss=0.1069, over 4635.00 frames. ], tot_loss[loss=0.3144, simple_loss=0.3742, pruned_loss=0.1273, over 927110.65 frames. ], batch size: 16, lr: 2.60e-02, grad_scale: 64.0 +2024-07-27 14:30:42,750 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.83 vs. limit=12.0 +2024-07-27 14:30:57,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=26976.0, ans=0.0 +2024-07-27 14:31:02,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=26989.333333333332, ans=0.00500231884057971 +2024-07-27 14:31:09,779 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.66 vs. limit=15.0 +2024-07-27 14:31:21,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=27016.0, ans=0.004996521739130435 +2024-07-27 14:31:22,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=27016.0, ans=0.025 +2024-07-27 14:31:24,569 INFO [train.py:1114] (2/4) Epoch 2, batch 10050, loss[loss=0.346, simple_loss=0.3858, pruned_loss=0.1531, over 3233.00 frames. ], tot_loss[loss=0.3196, simple_loss=0.3785, pruned_loss=0.1303, over 914931.94 frames. ], batch size: 35, lr: 2.60e-02, grad_scale: 64.0 +2024-07-27 14:31:41,007 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.94 vs. limit=10.0 +2024-07-27 14:31:42,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=27056.0, ans=0.2 +2024-07-27 14:31:50,391 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.657e+01 6.782e+01 7.547e+01 8.673e+01 1.246e+02, threshold=1.509e+02, percent-clipped=0.0 +2024-07-27 14:31:51,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=27069.333333333332, ans=0.125 +2024-07-27 14:31:58,319 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.13 vs. limit=15.0 +2024-07-27 14:31:58,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=27082.666666666668, ans=0.0 +2024-07-27 14:31:59,885 INFO [train.py:1114] (2/4) Epoch 2, batch 10100, loss[loss=0.4164, simple_loss=0.4473, pruned_loss=0.1928, over 3595.00 frames. ], tot_loss[loss=0.3334, simple_loss=0.3868, pruned_loss=0.14, over 861320.11 frames. ], batch size: 35, lr: 2.60e-02, grad_scale: 64.0 +2024-07-27 14:32:22,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=27136.0, ans=0.2 +2024-07-27 14:32:30,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=27149.333333333332, ans=15.0 +2024-07-27 14:32:30,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.54 vs. limit=15.0 +2024-07-27 14:32:33,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=27149.333333333332, ans=0.125 +2024-07-27 14:32:35,049 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.74 vs. limit=15.0 +2024-07-27 14:32:35,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=27149.333333333332, ans=0.125 +2024-07-27 14:32:36,622 INFO [train.py:1114] (2/4) Epoch 2, batch 10150, loss[loss=0.4096, simple_loss=0.4317, pruned_loss=0.1937, over 3367.00 frames. ], tot_loss[loss=0.3431, simple_loss=0.3923, pruned_loss=0.1469, over 820173.43 frames. ], batch size: 35, lr: 2.59e-02, grad_scale: 64.0 +2024-07-27 14:32:44,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=27162.666666666668, ans=0.125 +2024-07-27 14:32:45,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=27176.0, ans=0.2 +2024-07-27 14:33:00,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=27189.333333333332, ans=0.125 +2024-07-27 14:33:07,430 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.821e+01 6.551e+01 7.013e+01 7.617e+01 1.384e+02, threshold=1.403e+02, percent-clipped=0.0 +2024-07-27 14:33:10,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=27216.0, ans=0.0 +2024-07-27 14:33:11,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=27216.0, ans=0.1 +2024-07-27 14:33:16,422 INFO [train.py:1114] (2/4) Epoch 2, batch 10200, loss[loss=0.3944, simple_loss=0.4311, pruned_loss=0.1789, over 3701.00 frames. ], tot_loss[loss=0.3505, simple_loss=0.3963, pruned_loss=0.1524, over 788852.47 frames. ], batch size: 35, lr: 2.59e-02, grad_scale: 64.0 +2024-07-27 14:33:18,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=27229.333333333332, ans=0.004950144927536232 +2024-07-27 14:34:06,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=27260.0, ans=0.0 +2024-07-27 14:34:23,831 INFO [train.py:1114] (2/4) Epoch 3, batch 0, loss[loss=0.2806, simple_loss=0.3564, pruned_loss=0.1023, over 4849.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3564, pruned_loss=0.1023, over 4849.00 frames. ], batch size: 12, lr: 2.46e-02, grad_scale: 64.0 +2024-07-27 14:34:23,832 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 14:34:35,397 INFO [train.py:1146] (2/4) Epoch 3, validation: loss=0.2558, simple_loss=0.3526, pruned_loss=0.07947, over 944034.00 frames. +2024-07-27 14:34:35,398 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 14:34:36,473 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:34:36,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=27260.0, ans=0.2 +2024-07-27 14:34:55,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=27273.333333333332, ans=0.125 +2024-07-27 14:34:58,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=27286.666666666668, ans=0.0 +2024-07-27 14:35:03,507 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.67 vs. limit=12.0 +2024-07-27 14:35:06,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=27286.666666666668, ans=0.05 +2024-07-27 14:35:10,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=27300.0, ans=0.025 +2024-07-27 14:35:10,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27300.0, ans=0.1 +2024-07-27 14:35:21,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=27313.333333333332, ans=0.035 +2024-07-27 14:35:23,908 INFO [train.py:1114] (2/4) Epoch 3, batch 50, loss[loss=0.303, simple_loss=0.3644, pruned_loss=0.1208, over 4604.00 frames. ], tot_loss[loss=0.3154, simple_loss=0.3756, pruned_loss=0.1276, over 206430.77 frames. ], batch size: 11, lr: 2.46e-02, grad_scale: 64.0 +2024-07-27 14:35:33,789 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.475e+01 6.508e+01 7.033e+01 7.791e+01 1.183e+02, threshold=1.407e+02, percent-clipped=0.0 +2024-07-27 14:35:34,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=27340.0, ans=0.2 +2024-07-27 14:35:37,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=27340.0, ans=0.0 +2024-07-27 14:35:53,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=27366.666666666668, ans=0.125 +2024-07-27 14:36:00,612 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.04 vs. limit=12.0 +2024-07-27 14:36:02,364 INFO [train.py:1114] (2/4) Epoch 3, batch 100, loss[loss=0.2578, simple_loss=0.3332, pruned_loss=0.0912, over 4641.00 frames. ], tot_loss[loss=0.309, simple_loss=0.3717, pruned_loss=0.1232, over 365081.48 frames. ], batch size: 12, lr: 2.46e-02, grad_scale: 64.0 +2024-07-27 14:36:08,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=27406.666666666668, ans=0.125 +2024-07-27 14:36:08,729 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.13 vs. limit=22.5 +2024-07-27 14:36:10,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=27406.666666666668, ans=0.125 +2024-07-27 14:36:28,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=27433.333333333332, ans=0.004905797101449276 +2024-07-27 14:36:30,872 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.01 vs. limit=12.0 +2024-07-27 14:36:37,591 INFO [train.py:1114] (2/4) Epoch 3, batch 150, loss[loss=0.2558, simple_loss=0.3248, pruned_loss=0.09342, over 4621.00 frames. ], tot_loss[loss=0.3066, simple_loss=0.3699, pruned_loss=0.1216, over 493858.15 frames. ], batch size: 11, lr: 2.45e-02, grad_scale: 64.0 +2024-07-27 14:36:47,459 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.350e+01 6.333e+01 7.071e+01 8.102e+01 1.073e+02, threshold=1.414e+02, percent-clipped=0.0 +2024-07-27 14:36:53,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=27473.333333333332, ans=0.025 +2024-07-27 14:36:59,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27486.666666666668, ans=0.1 +2024-07-27 14:37:13,350 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.98 vs. limit=22.5 +2024-07-27 14:37:14,994 INFO [train.py:1114] (2/4) Epoch 3, batch 200, loss[loss=0.3706, simple_loss=0.4109, pruned_loss=0.1651, over 4486.00 frames. ], tot_loss[loss=0.3071, simple_loss=0.3686, pruned_loss=0.1228, over 593704.89 frames. ], batch size: 21, lr: 2.45e-02, grad_scale: 64.0 +2024-07-27 14:37:19,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=27526.666666666668, ans=0.125 +2024-07-27 14:37:30,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=27553.333333333332, ans=0.125 +2024-07-27 14:37:42,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=27580.0, ans=0.125 +2024-07-27 14:37:48,655 INFO [train.py:1114] (2/4) Epoch 3, batch 250, loss[loss=0.3669, simple_loss=0.4293, pruned_loss=0.1522, over 4651.00 frames. ], tot_loss[loss=0.3066, simple_loss=0.3682, pruned_loss=0.1225, over 670758.96 frames. ], batch size: 16, lr: 2.45e-02, grad_scale: 64.0 +2024-07-27 14:37:51,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=27593.333333333332, ans=0.0 +2024-07-27 14:37:57,415 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.290e+01 6.405e+01 7.025e+01 7.906e+01 1.155e+02, threshold=1.405e+02, percent-clipped=0.0 +2024-07-27 14:37:59,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=27606.666666666668, ans=0.125 +2024-07-27 14:38:11,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=27633.333333333332, ans=0.0 +2024-07-27 14:38:14,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=27633.333333333332, ans=0.125 +2024-07-27 14:38:15,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=27633.333333333332, ans=0.004862318840579711 +2024-07-27 14:38:24,190 INFO [train.py:1114] (2/4) Epoch 3, batch 300, loss[loss=0.3006, simple_loss=0.3818, pruned_loss=0.1097, over 4792.00 frames. ], tot_loss[loss=0.3054, simple_loss=0.3677, pruned_loss=0.1216, over 730601.98 frames. ], batch size: 15, lr: 2.44e-02, grad_scale: 64.0 +2024-07-27 14:38:25,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=27660.0, ans=0.025 +2024-07-27 14:38:26,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=27660.0, ans=0.125 +2024-07-27 14:38:26,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=27660.0, ans=0.07 +2024-07-27 14:38:27,183 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.20 vs. limit=15.0 +2024-07-27 14:38:31,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=27673.333333333332, ans=0.0 +2024-07-27 14:38:36,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=27673.333333333332, ans=0.125 +2024-07-27 14:39:12,021 INFO [train.py:1114] (2/4) Epoch 3, batch 350, loss[loss=0.2519, simple_loss=0.3111, pruned_loss=0.09637, over 4941.00 frames. ], tot_loss[loss=0.3038, simple_loss=0.3668, pruned_loss=0.1204, over 776299.38 frames. ], batch size: 12, lr: 2.44e-02, grad_scale: 64.0 +2024-07-27 14:39:14,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=27726.666666666668, ans=0.0 +2024-07-27 14:39:21,245 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.509e+01 6.194e+01 6.978e+01 7.817e+01 1.142e+02, threshold=1.396e+02, percent-clipped=0.0 +2024-07-27 14:39:28,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=27753.333333333332, ans=0.0 +2024-07-27 14:39:29,713 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.15 vs. limit=15.0 +2024-07-27 14:39:29,742 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.75 vs. limit=22.5 +2024-07-27 14:39:41,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=27780.0, ans=0.125 +2024-07-27 14:39:53,902 INFO [train.py:1114] (2/4) Epoch 3, batch 400, loss[loss=0.2887, simple_loss=0.3613, pruned_loss=0.1081, over 4694.00 frames. ], tot_loss[loss=0.3002, simple_loss=0.3639, pruned_loss=0.1182, over 813649.50 frames. ], batch size: 13, lr: 2.44e-02, grad_scale: 64.0 +2024-07-27 14:40:00,332 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.99 vs. limit=15.0 +2024-07-27 14:40:03,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=27806.666666666668, ans=0.05 +2024-07-27 14:40:05,136 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.28 vs. limit=10.0 +2024-07-27 14:40:05,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=27806.666666666668, ans=0.00482463768115942 +2024-07-27 14:40:08,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=27820.0, ans=0.125 +2024-07-27 14:40:21,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=27833.333333333332, ans=0.004818840579710146 +2024-07-27 14:40:23,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=27846.666666666668, ans=0.125 +2024-07-27 14:40:29,638 INFO [train.py:1114] (2/4) Epoch 3, batch 450, loss[loss=0.359, simple_loss=0.4055, pruned_loss=0.1563, over 4633.00 frames. ], tot_loss[loss=0.3014, simple_loss=0.3649, pruned_loss=0.119, over 839088.78 frames. ], batch size: 13, lr: 2.44e-02, grad_scale: 64.0 +2024-07-27 14:40:35,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=27860.0, ans=22.5 +2024-07-27 14:40:40,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=27873.333333333332, ans=0.0 +2024-07-27 14:40:40,715 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.175e+01 6.156e+01 6.961e+01 7.854e+01 1.209e+02, threshold=1.392e+02, percent-clipped=0.0 +2024-07-27 14:40:50,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=27886.666666666668, ans=0.125 +2024-07-27 14:41:06,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=27913.333333333332, ans=0.125 +2024-07-27 14:41:08,923 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.09 vs. limit=6.0 +2024-07-27 14:41:09,144 INFO [train.py:1114] (2/4) Epoch 3, batch 500, loss[loss=0.3451, simple_loss=0.3924, pruned_loss=0.1489, over 4677.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3628, pruned_loss=0.1172, over 861475.10 frames. ], batch size: 15, lr: 2.43e-02, grad_scale: 64.0 +2024-07-27 14:41:12,766 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-07-27 14:41:18,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=27940.0, ans=0.125 +2024-07-27 14:41:48,945 INFO [train.py:1114] (2/4) Epoch 3, batch 550, loss[loss=0.2916, simple_loss=0.3699, pruned_loss=0.1067, over 4619.00 frames. ], tot_loss[loss=0.2981, simple_loss=0.3626, pruned_loss=0.1168, over 877110.03 frames. ], batch size: 17, lr: 2.43e-02, grad_scale: 64.0 +2024-07-27 14:41:56,948 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.29 vs. limit=15.0 +2024-07-27 14:41:59,866 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.022e+01 6.181e+01 6.683e+01 7.809e+01 1.184e+02, threshold=1.337e+02, percent-clipped=0.0 +2024-07-27 14:42:09,820 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.90 vs. limit=10.0 +2024-07-27 14:42:31,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=28033.333333333332, ans=0.1 +2024-07-27 14:42:36,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=28046.666666666668, ans=0.125 +2024-07-27 14:42:40,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=28046.666666666668, ans=0.025 +2024-07-27 14:42:46,436 INFO [train.py:1114] (2/4) Epoch 3, batch 600, loss[loss=0.2656, simple_loss=0.3395, pruned_loss=0.09582, over 4647.00 frames. ], tot_loss[loss=0.2978, simple_loss=0.3624, pruned_loss=0.1166, over 891648.98 frames. ], batch size: 16, lr: 2.43e-02, grad_scale: 64.0 +2024-07-27 14:42:52,281 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.23 vs. limit=6.0 +2024-07-27 14:43:23,873 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:43:26,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=28113.333333333332, ans=0.125 +2024-07-27 14:43:30,814 INFO [train.py:1114] (2/4) Epoch 3, batch 650, loss[loss=0.3542, simple_loss=0.41, pruned_loss=0.1492, over 4756.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3619, pruned_loss=0.1168, over 903362.04 frames. ], batch size: 13, lr: 2.43e-02, grad_scale: 64.0 +2024-07-27 14:43:32,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=28126.666666666668, ans=0.125 +2024-07-27 14:43:32,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28126.666666666668, ans=0.1 +2024-07-27 14:43:38,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=28140.0, ans=22.5 +2024-07-27 14:43:39,811 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.390e+01 6.211e+01 6.879e+01 7.737e+01 1.031e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-27 14:44:02,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=28166.666666666668, ans=0.035 +2024-07-27 14:44:04,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=28180.0, ans=0.1 +2024-07-27 14:44:23,643 INFO [train.py:1114] (2/4) Epoch 3, batch 700, loss[loss=0.2592, simple_loss=0.3367, pruned_loss=0.09083, over 4639.00 frames. ], tot_loss[loss=0.2971, simple_loss=0.3613, pruned_loss=0.1164, over 911652.15 frames. ], batch size: 12, lr: 2.42e-02, grad_scale: 64.0 +2024-07-27 14:44:27,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=28193.333333333332, ans=0.004740579710144928 +2024-07-27 14:44:31,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=28193.333333333332, ans=0.05 +2024-07-27 14:44:56,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=28220.0, ans=0.125 +2024-07-27 14:45:13,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=28233.333333333332, ans=0.125 +2024-07-27 14:45:13,398 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.09 vs. limit=15.0 +2024-07-27 14:45:18,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=28233.333333333332, ans=0.125 +2024-07-27 14:45:42,324 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.47 vs. limit=15.0 +2024-07-27 14:45:42,573 INFO [train.py:1114] (2/4) Epoch 3, batch 750, loss[loss=0.2942, simple_loss=0.3698, pruned_loss=0.1093, over 4688.00 frames. ], tot_loss[loss=0.296, simple_loss=0.3605, pruned_loss=0.1158, over 918190.72 frames. ], batch size: 13, lr: 2.42e-02, grad_scale: 64.0 +2024-07-27 14:46:08,315 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.231e+01 6.433e+01 7.255e+01 8.187e+01 1.605e+02, threshold=1.451e+02, percent-clipped=1.0 +2024-07-27 14:46:13,847 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.85 vs. limit=15.0 +2024-07-27 14:46:15,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=28286.666666666668, ans=0.0 +2024-07-27 14:46:18,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=28286.666666666668, ans=0.125 +2024-07-27 14:46:19,756 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.75 vs. limit=15.0 +2024-07-27 14:46:22,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=28286.666666666668, ans=0.125 +2024-07-27 14:46:44,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=28313.333333333332, ans=0.025 +2024-07-27 14:46:49,667 INFO [train.py:1114] (2/4) Epoch 3, batch 800, loss[loss=0.2876, simple_loss=0.3456, pruned_loss=0.1148, over 4849.00 frames. ], tot_loss[loss=0.2978, simple_loss=0.3615, pruned_loss=0.1171, over 923444.03 frames. ], batch size: 12, lr: 2.42e-02, grad_scale: 64.0 +2024-07-27 14:47:05,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28326.666666666668, ans=0.1 +2024-07-27 14:47:42,348 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.131e+01 +2024-07-27 14:48:09,327 INFO [train.py:1114] (2/4) Epoch 3, batch 850, loss[loss=0.2936, simple_loss=0.3569, pruned_loss=0.1152, over 4667.00 frames. ], tot_loss[loss=0.2964, simple_loss=0.3603, pruned_loss=0.1162, over 927562.43 frames. ], batch size: 14, lr: 2.42e-02, grad_scale: 64.0 +2024-07-27 14:48:17,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=28406.666666666668, ans=22.5 +2024-07-27 14:48:21,495 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.186e+01 6.419e+01 6.895e+01 7.641e+01 1.957e+02, threshold=1.379e+02, percent-clipped=1.0 +2024-07-27 14:48:27,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=28406.666666666668, ans=0.125 +2024-07-27 14:48:39,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=28433.333333333332, ans=0.0 +2024-07-27 14:48:40,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=28433.333333333332, ans=0.00468840579710145 +2024-07-27 14:48:48,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=28446.666666666668, ans=0.0046855072463768115 +2024-07-27 14:48:52,077 INFO [train.py:1114] (2/4) Epoch 3, batch 900, loss[loss=0.262, simple_loss=0.3236, pruned_loss=0.1002, over 4859.00 frames. ], tot_loss[loss=0.2982, simple_loss=0.3617, pruned_loss=0.1174, over 927971.67 frames. ], batch size: 12, lr: 2.41e-02, grad_scale: 64.0 +2024-07-27 14:48:55,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=28460.0, ans=0.125 +2024-07-27 14:48:56,179 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:49:09,898 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.33 vs. limit=10.0 +2024-07-27 14:49:28,766 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.51 vs. limit=15.0 +2024-07-27 14:49:34,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=28486.666666666668, ans=0.2 +2024-07-27 14:49:54,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=28500.0, ans=0.125 +2024-07-27 14:50:05,016 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:50:06,841 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.18 vs. limit=15.0 +2024-07-27 14:50:08,999 INFO [train.py:1114] (2/4) Epoch 3, batch 950, loss[loss=0.274, simple_loss=0.3434, pruned_loss=0.1023, over 4780.00 frames. ], tot_loss[loss=0.2983, simple_loss=0.3624, pruned_loss=0.1171, over 929462.38 frames. ], batch size: 12, lr: 2.41e-02, grad_scale: 128.0 +2024-07-27 14:50:23,825 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.236e+01 6.152e+01 6.859e+01 7.763e+01 1.125e+02, threshold=1.372e+02, percent-clipped=0.0 +2024-07-27 14:50:34,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=28553.333333333332, ans=0.125 +2024-07-27 14:51:03,515 INFO [train.py:1114] (2/4) Epoch 3, batch 1000, loss[loss=0.2695, simple_loss=0.334, pruned_loss=0.1025, over 4955.00 frames. ], tot_loss[loss=0.2989, simple_loss=0.3633, pruned_loss=0.1172, over 929487.66 frames. ], batch size: 13, lr: 2.41e-02, grad_scale: 128.0 +2024-07-27 14:51:26,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=28620.0, ans=0.0 +2024-07-27 14:51:30,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=28620.0, ans=0.0 +2024-07-27 14:51:38,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=28633.333333333332, ans=0.0 +2024-07-27 14:51:54,538 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.58 vs. limit=15.0 +2024-07-27 14:51:54,907 INFO [train.py:1114] (2/4) Epoch 3, batch 1050, loss[loss=0.3005, simple_loss=0.3648, pruned_loss=0.1181, over 4874.00 frames. ], tot_loss[loss=0.2979, simple_loss=0.3623, pruned_loss=0.1167, over 931657.70 frames. ], batch size: 14, lr: 2.41e-02, grad_scale: 128.0 +2024-07-27 14:51:59,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=28660.0, ans=0.004639130434782609 +2024-07-27 14:52:06,688 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.090e+01 6.458e+01 7.095e+01 7.722e+01 9.914e+01, threshold=1.419e+02, percent-clipped=0.0 +2024-07-27 14:52:06,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=28673.333333333332, ans=0.0 +2024-07-27 14:52:51,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=28713.333333333332, ans=0.125 +2024-07-27 14:52:54,288 INFO [train.py:1114] (2/4) Epoch 3, batch 1100, loss[loss=0.2468, simple_loss=0.3121, pruned_loss=0.09071, over 4902.00 frames. ], tot_loss[loss=0.295, simple_loss=0.3592, pruned_loss=0.1154, over 934300.56 frames. ], batch size: 13, lr: 2.40e-02, grad_scale: 128.0 +2024-07-27 14:53:06,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=28740.0, ans=0.0046217391304347825 +2024-07-27 14:53:30,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=28753.333333333332, ans=0.0 +2024-07-27 14:53:47,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=28753.333333333332, ans=22.5 +2024-07-27 14:53:58,412 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.75 vs. limit=15.0 +2024-07-27 14:54:00,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=28780.0, ans=0.125 +2024-07-27 14:54:02,496 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.13 vs. limit=15.0 +2024-07-27 14:54:04,144 INFO [train.py:1114] (2/4) Epoch 3, batch 1150, loss[loss=0.2947, simple_loss=0.3598, pruned_loss=0.1148, over 4897.00 frames. ], tot_loss[loss=0.2942, simple_loss=0.3583, pruned_loss=0.115, over 934353.38 frames. ], batch size: 13, lr: 2.40e-02, grad_scale: 128.0 +2024-07-27 14:54:05,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=28793.333333333332, ans=0.004610144927536232 +2024-07-27 14:54:14,008 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.388e+01 6.303e+01 6.956e+01 7.734e+01 1.852e+02, threshold=1.391e+02, percent-clipped=1.0 +2024-07-27 14:54:15,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=28806.666666666668, ans=0.125 +2024-07-27 14:54:19,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=28820.0, ans=0.025 +2024-07-27 14:54:20,435 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.77 vs. limit=22.5 +2024-07-27 14:54:30,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=28833.333333333332, ans=0.1 +2024-07-27 14:54:40,694 INFO [train.py:1114] (2/4) Epoch 3, batch 1200, loss[loss=0.3687, simple_loss=0.418, pruned_loss=0.1597, over 4880.00 frames. ], tot_loss[loss=0.2954, simple_loss=0.3593, pruned_loss=0.1158, over 933239.16 frames. ], batch size: 14, lr: 2.40e-02, grad_scale: 64.0 +2024-07-27 14:54:46,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=28860.0, ans=0.125 +2024-07-27 14:54:49,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=28873.333333333332, ans=0.2 +2024-07-27 14:55:04,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=28900.0, ans=0.0 +2024-07-27 14:55:32,479 INFO [train.py:1114] (2/4) Epoch 3, batch 1250, loss[loss=0.3635, simple_loss=0.4098, pruned_loss=0.1586, over 4800.00 frames. ], tot_loss[loss=0.2961, simple_loss=0.3605, pruned_loss=0.1159, over 937089.17 frames. ], batch size: 15, lr: 2.40e-02, grad_scale: 64.0 +2024-07-27 14:55:41,896 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 6.356e+01 6.895e+01 7.489e+01 1.286e+02, threshold=1.379e+02, percent-clipped=0.0 +2024-07-27 14:55:42,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=28940.0, ans=0.0 +2024-07-27 14:55:52,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=28953.333333333332, ans=0.1 +2024-07-27 14:56:08,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=28980.0, ans=0.125 +2024-07-27 14:56:15,699 INFO [train.py:1114] (2/4) Epoch 3, batch 1300, loss[loss=0.3163, simple_loss=0.3797, pruned_loss=0.1264, over 4657.00 frames. ], tot_loss[loss=0.2951, simple_loss=0.3598, pruned_loss=0.1152, over 938808.02 frames. ], batch size: 19, lr: 2.39e-02, grad_scale: 64.0 +2024-07-27 14:56:23,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=29006.666666666668, ans=0.0 +2024-07-27 14:56:26,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=29006.666666666668, ans=10.0 +2024-07-27 14:56:31,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=29006.666666666668, ans=0.125 +2024-07-27 14:56:52,848 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.57 vs. limit=12.0 +2024-07-27 14:56:53,454 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.19 vs. limit=10.0 +2024-07-27 14:56:58,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=29033.333333333332, ans=0.2 +2024-07-27 14:57:16,156 INFO [train.py:1114] (2/4) Epoch 3, batch 1350, loss[loss=0.2725, simple_loss=0.3493, pruned_loss=0.09787, over 4749.00 frames. ], tot_loss[loss=0.2943, simple_loss=0.3591, pruned_loss=0.1147, over 940767.74 frames. ], batch size: 13, lr: 2.39e-02, grad_scale: 64.0 +2024-07-27 14:57:16,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=29060.0, ans=0.125 +2024-07-27 14:57:18,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=29060.0, ans=0.004552173913043479 +2024-07-27 14:57:32,073 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.012e+01 6.171e+01 6.881e+01 8.115e+01 1.166e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-27 14:57:33,800 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.24 vs. limit=12.0 +2024-07-27 14:57:39,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29086.666666666668, ans=0.1 +2024-07-27 14:57:58,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29113.333333333332, ans=0.1 +2024-07-27 14:58:06,127 INFO [train.py:1114] (2/4) Epoch 3, batch 1400, loss[loss=0.2475, simple_loss=0.3159, pruned_loss=0.08958, over 4689.00 frames. ], tot_loss[loss=0.2961, simple_loss=0.3603, pruned_loss=0.1159, over 942586.73 frames. ], batch size: 11, lr: 2.39e-02, grad_scale: 64.0 +2024-07-27 14:58:25,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=29140.0, ans=0.125 +2024-07-27 14:58:39,046 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.00 vs. limit=15.0 +2024-07-27 14:58:40,381 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.10 vs. limit=15.0 +2024-07-27 14:58:40,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=29166.666666666668, ans=0.05 +2024-07-27 14:58:43,449 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.09 vs. limit=22.5 +2024-07-27 14:58:58,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=29180.0, ans=0.125 +2024-07-27 14:59:00,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=29180.0, ans=0.025 +2024-07-27 14:59:02,312 INFO [train.py:1114] (2/4) Epoch 3, batch 1450, loss[loss=0.2919, simple_loss=0.3611, pruned_loss=0.1113, over 4685.00 frames. ], tot_loss[loss=0.2957, simple_loss=0.3601, pruned_loss=0.1156, over 942496.88 frames. ], batch size: 15, lr: 2.39e-02, grad_scale: 64.0 +2024-07-27 14:59:18,816 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.112e+01 6.399e+01 7.002e+01 7.900e+01 1.035e+02, threshold=1.400e+02, percent-clipped=0.0 +2024-07-27 14:59:20,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=29206.666666666668, ans=0.2 +2024-07-27 14:59:32,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=29220.0, ans=0.125 +2024-07-27 14:59:38,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.whiten.whitening_limit, batch_count=29233.333333333332, ans=12.0 +2024-07-27 14:59:45,240 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.18 vs. limit=22.5 +2024-07-27 14:59:46,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=29246.666666666668, ans=0.125 +2024-07-27 14:59:53,897 INFO [train.py:1114] (2/4) Epoch 3, batch 1500, loss[loss=0.2526, simple_loss=0.3364, pruned_loss=0.0844, over 4814.00 frames. ], tot_loss[loss=0.2969, simple_loss=0.3613, pruned_loss=0.1162, over 942134.04 frames. ], batch size: 14, lr: 2.38e-02, grad_scale: 64.0 +2024-07-27 14:59:55,107 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.25 vs. limit=22.5 +2024-07-27 15:00:02,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=29273.333333333332, ans=0.125 +2024-07-27 15:00:02,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=29273.333333333332, ans=0.0 +2024-07-27 15:00:14,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=29300.0, ans=0.0 +2024-07-27 15:00:20,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29300.0, ans=0.1 +2024-07-27 15:00:20,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=29300.0, ans=0.125 +2024-07-27 15:00:29,964 INFO [train.py:1114] (2/4) Epoch 3, batch 1550, loss[loss=0.3346, simple_loss=0.3841, pruned_loss=0.1425, over 4907.00 frames. ], tot_loss[loss=0.2966, simple_loss=0.3609, pruned_loss=0.1162, over 938838.54 frames. ], batch size: 15, lr: 2.38e-02, grad_scale: 64.0 +2024-07-27 15:00:30,292 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.70 vs. limit=22.5 +2024-07-27 15:00:34,030 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.90 vs. limit=15.0 +2024-07-27 15:00:39,316 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.01 vs. limit=15.0 +2024-07-27 15:00:41,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=29340.0, ans=0.2 +2024-07-27 15:00:41,583 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.380e+01 6.188e+01 6.996e+01 8.008e+01 1.128e+02, threshold=1.399e+02, percent-clipped=0.0 +2024-07-27 15:00:47,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=29353.333333333332, ans=0.1 +2024-07-27 15:00:52,899 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.58 vs. limit=15.0 +2024-07-27 15:00:56,316 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.12 vs. limit=15.0 +2024-07-27 15:01:08,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29366.666666666668, ans=0.1 +2024-07-27 15:01:26,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=29380.0, ans=0.125 +2024-07-27 15:01:53,227 INFO [train.py:1114] (2/4) Epoch 3, batch 1600, loss[loss=0.3297, simple_loss=0.3872, pruned_loss=0.1361, over 4873.00 frames. ], tot_loss[loss=0.2982, simple_loss=0.3619, pruned_loss=0.1173, over 938220.49 frames. ], batch size: 14, lr: 2.38e-02, grad_scale: 32.0 +2024-07-27 15:01:56,266 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.63 vs. limit=15.0 +2024-07-27 15:02:01,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=29406.666666666668, ans=0.0 +2024-07-27 15:02:01,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=29406.666666666668, ans=0.125 +2024-07-27 15:02:03,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=29406.666666666668, ans=0.125 +2024-07-27 15:02:20,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=29433.333333333332, ans=0.125 +2024-07-27 15:02:25,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=29446.666666666668, ans=0.125 +2024-07-27 15:02:26,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=29446.666666666668, ans=0.125 +2024-07-27 15:02:30,089 INFO [train.py:1114] (2/4) Epoch 3, batch 1650, loss[loss=0.3438, simple_loss=0.3956, pruned_loss=0.146, over 4656.00 frames. ], tot_loss[loss=0.297, simple_loss=0.3605, pruned_loss=0.1167, over 937835.50 frames. ], batch size: 14, lr: 2.38e-02, grad_scale: 32.0 +2024-07-27 15:02:34,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=29460.0, ans=0.125 +2024-07-27 15:02:40,276 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.917e+01 6.286e+01 6.723e+01 7.368e+01 1.143e+02, threshold=1.345e+02, percent-clipped=0.0 +2024-07-27 15:03:08,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=29500.0, ans=0.004456521739130435 +2024-07-27 15:03:10,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=29500.0, ans=0.125 +2024-07-27 15:03:20,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=29513.333333333332, ans=0.125 +2024-07-27 15:03:27,846 INFO [train.py:1114] (2/4) Epoch 3, batch 1700, loss[loss=0.2465, simple_loss=0.3099, pruned_loss=0.09149, over 4701.00 frames. ], tot_loss[loss=0.2958, simple_loss=0.3595, pruned_loss=0.116, over 939351.78 frames. ], batch size: 11, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:03:46,796 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.828e+01 +2024-07-27 15:04:00,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=29566.666666666668, ans=0.125 +2024-07-27 15:04:02,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=29580.0, ans=0.0 +2024-07-27 15:04:14,463 INFO [train.py:1114] (2/4) Epoch 3, batch 1750, loss[loss=0.2743, simple_loss=0.3387, pruned_loss=0.1049, over 4803.00 frames. ], tot_loss[loss=0.2948, simple_loss=0.359, pruned_loss=0.1153, over 940383.66 frames. ], batch size: 11, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:04:29,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=29606.666666666668, ans=0.035 +2024-07-27 15:04:30,889 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.45 vs. limit=10.0 +2024-07-27 15:04:30,958 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.973e+01 6.164e+01 6.649e+01 7.575e+01 1.168e+02, threshold=1.330e+02, percent-clipped=0.0 +2024-07-27 15:04:44,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=29620.0, ans=0.0 +2024-07-27 15:05:02,470 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.22 vs. limit=22.5 +2024-07-27 15:05:15,312 INFO [train.py:1114] (2/4) Epoch 3, batch 1800, loss[loss=0.3051, simple_loss=0.3739, pruned_loss=0.1181, over 4640.00 frames. ], tot_loss[loss=0.2963, simple_loss=0.3602, pruned_loss=0.1162, over 940871.91 frames. ], batch size: 13, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:05:22,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=29673.333333333332, ans=0.125 +2024-07-27 15:05:25,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=29673.333333333332, ans=0.025 +2024-07-27 15:05:36,777 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.16 vs. limit=22.5 +2024-07-27 15:05:42,318 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.22 vs. limit=10.0 +2024-07-27 15:05:49,940 INFO [train.py:1114] (2/4) Epoch 3, batch 1850, loss[loss=0.286, simple_loss=0.3668, pruned_loss=0.1026, over 4807.00 frames. ], tot_loss[loss=0.2936, simple_loss=0.3584, pruned_loss=0.1144, over 940546.62 frames. ], batch size: 14, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:05:52,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=29726.666666666668, ans=0.0 +2024-07-27 15:06:06,973 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.35 vs. limit=10.0 +2024-07-27 15:06:07,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29726.666666666668, ans=0.1 +2024-07-27 15:06:13,606 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.082e+01 6.411e+01 6.989e+01 8.311e+01 1.252e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 15:06:25,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=29753.333333333332, ans=0.2 +2024-07-27 15:06:26,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=29766.666666666668, ans=0.125 +2024-07-27 15:06:27,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29766.666666666668, ans=0.1 +2024-07-27 15:06:38,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=29766.666666666668, ans=0.125 +2024-07-27 15:06:39,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=29766.666666666668, ans=0.125 +2024-07-27 15:06:57,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29780.0, ans=0.1 +2024-07-27 15:06:58,751 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.07 vs. limit=12.0 +2024-07-27 15:07:01,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=29793.333333333332, ans=0.0043927536231884055 +2024-07-27 15:07:02,458 INFO [train.py:1114] (2/4) Epoch 3, batch 1900, loss[loss=0.3424, simple_loss=0.4087, pruned_loss=0.1381, over 4667.00 frames. ], tot_loss[loss=0.2939, simple_loss=0.3586, pruned_loss=0.1146, over 941983.42 frames. ], batch size: 14, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:07:23,631 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:07:33,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=29833.333333333332, ans=0.2 +2024-07-27 15:07:33,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=29833.333333333332, ans=0.025 +2024-07-27 15:07:40,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=29846.666666666668, ans=0.125 +2024-07-27 15:07:41,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29846.666666666668, ans=0.1 +2024-07-27 15:08:06,600 INFO [train.py:1114] (2/4) Epoch 3, batch 1950, loss[loss=0.277, simple_loss=0.3521, pruned_loss=0.101, over 4888.00 frames. ], tot_loss[loss=0.2951, simple_loss=0.36, pruned_loss=0.1151, over 944038.67 frames. ], batch size: 13, lr: 2.36e-02, grad_scale: 32.0 +2024-07-27 15:08:15,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=29873.333333333332, ans=0.0 +2024-07-27 15:08:17,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=29873.333333333332, ans=0.125 +2024-07-27 15:08:18,976 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.566e+01 6.436e+01 6.844e+01 7.392e+01 3.834e+02, threshold=1.369e+02, percent-clipped=1.0 +2024-07-27 15:08:27,730 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.31 vs. limit=15.0 +2024-07-27 15:08:37,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=29900.0, ans=0.0 +2024-07-27 15:08:38,150 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=26.18 vs. limit=22.5 +2024-07-27 15:08:56,177 INFO [train.py:1114] (2/4) Epoch 3, batch 2000, loss[loss=0.3073, simple_loss=0.3585, pruned_loss=0.128, over 4811.00 frames. ], tot_loss[loss=0.295, simple_loss=0.3592, pruned_loss=0.1154, over 941369.43 frames. ], batch size: 11, lr: 2.36e-02, grad_scale: 32.0 +2024-07-27 15:08:59,399 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.69 vs. limit=22.5 +2024-07-27 15:09:20,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29966.666666666668, ans=0.1 +2024-07-27 15:09:20,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=29966.666666666668, ans=0.004355072463768116 +2024-07-27 15:09:42,736 INFO [train.py:1114] (2/4) Epoch 3, batch 2050, loss[loss=0.2105, simple_loss=0.2779, pruned_loss=0.07156, over 4601.00 frames. ], tot_loss[loss=0.2947, simple_loss=0.3587, pruned_loss=0.1154, over 939667.37 frames. ], batch size: 11, lr: 2.36e-02, grad_scale: 32.0 +2024-07-27 15:09:42,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=29993.333333333332, ans=0.0 +2024-07-27 15:09:47,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=29993.333333333332, ans=0.2 +2024-07-27 15:09:49,342 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.13 vs. limit=10.0 +2024-07-27 15:09:52,889 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.226e+01 6.283e+01 7.092e+01 8.463e+01 1.553e+02, threshold=1.418e+02, percent-clipped=1.0 +2024-07-27 15:10:02,175 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=18.30 vs. limit=15.0 +2024-07-27 15:10:06,254 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.05 vs. limit=10.0 +2024-07-27 15:10:07,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=30033.333333333332, ans=0.2 +2024-07-27 15:10:15,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=30046.666666666668, ans=0.125 +2024-07-27 15:10:16,478 INFO [train.py:1114] (2/4) Epoch 3, batch 2100, loss[loss=0.2708, simple_loss=0.3485, pruned_loss=0.09657, over 4752.00 frames. ], tot_loss[loss=0.2939, simple_loss=0.3581, pruned_loss=0.1148, over 941401.24 frames. ], batch size: 13, lr: 2.36e-02, grad_scale: 32.0 +2024-07-27 15:10:20,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=30060.0, ans=0.2 +2024-07-27 15:10:25,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=30073.333333333332, ans=0.025 +2024-07-27 15:10:26,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=30073.333333333332, ans=0.025 +2024-07-27 15:10:32,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=30086.666666666668, ans=0.125 +2024-07-27 15:10:46,097 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.02 vs. limit=15.0 +2024-07-27 15:10:50,475 INFO [train.py:1114] (2/4) Epoch 3, batch 2150, loss[loss=0.2379, simple_loss=0.3171, pruned_loss=0.07934, over 4901.00 frames. ], tot_loss[loss=0.2912, simple_loss=0.3563, pruned_loss=0.1131, over 944444.08 frames. ], batch size: 13, lr: 2.35e-02, grad_scale: 32.0 +2024-07-27 15:11:01,330 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.479e+01 6.161e+01 6.566e+01 7.305e+01 9.854e+01, threshold=1.313e+02, percent-clipped=0.0 +2024-07-27 15:11:04,096 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.58 vs. limit=15.0 +2024-07-27 15:11:10,315 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:11:22,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=30180.0, ans=0.07 +2024-07-27 15:11:26,682 INFO [train.py:1114] (2/4) Epoch 3, batch 2200, loss[loss=0.2844, simple_loss=0.362, pruned_loss=0.1034, over 4800.00 frames. ], tot_loss[loss=0.2908, simple_loss=0.3556, pruned_loss=0.113, over 943852.64 frames. ], batch size: 14, lr: 2.35e-02, grad_scale: 32.0 +2024-07-27 15:11:31,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=30193.333333333332, ans=0.1 +2024-07-27 15:11:41,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=30220.0, ans=0.1 +2024-07-27 15:11:42,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=30220.0, ans=0.125 +2024-07-27 15:11:42,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=30220.0, ans=0.0 +2024-07-27 15:11:56,880 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=31.78 vs. limit=15.0 +2024-07-27 15:12:00,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=30246.666666666668, ans=0.125 +2024-07-27 15:12:05,474 INFO [train.py:1114] (2/4) Epoch 3, batch 2250, loss[loss=0.2673, simple_loss=0.3361, pruned_loss=0.09922, over 4698.00 frames. ], tot_loss[loss=0.2909, simple_loss=0.3559, pruned_loss=0.1129, over 942213.86 frames. ], batch size: 13, lr: 2.35e-02, grad_scale: 32.0 +2024-07-27 15:12:09,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=30260.0, ans=0.125 +2024-07-27 15:12:12,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=30273.333333333332, ans=0.1 +2024-07-27 15:12:16,108 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.240e+01 6.175e+01 6.906e+01 7.852e+01 1.345e+02, threshold=1.381e+02, percent-clipped=1.0 +2024-07-27 15:12:41,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=30300.0, ans=0.004282608695652174 +2024-07-27 15:12:50,202 INFO [train.py:1114] (2/4) Epoch 3, batch 2300, loss[loss=0.2621, simple_loss=0.3265, pruned_loss=0.09882, over 4933.00 frames. ], tot_loss[loss=0.2901, simple_loss=0.3547, pruned_loss=0.1127, over 940246.01 frames. ], batch size: 12, lr: 2.35e-02, grad_scale: 32.0 +2024-07-27 15:12:54,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=30326.666666666668, ans=0.125 +2024-07-27 15:13:14,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=30366.666666666668, ans=0.0 +2024-07-27 15:13:14,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=30366.666666666668, ans=0.125 +2024-07-27 15:13:16,831 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:13:20,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=30366.666666666668, ans=0.0 +2024-07-27 15:13:35,760 INFO [train.py:1114] (2/4) Epoch 3, batch 2350, loss[loss=0.3223, simple_loss=0.4, pruned_loss=0.1223, over 4636.00 frames. ], tot_loss[loss=0.2907, simple_loss=0.3555, pruned_loss=0.1129, over 941836.31 frames. ], batch size: 13, lr: 2.34e-02, grad_scale: 32.0 +2024-07-27 15:13:54,299 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.436e+01 6.422e+01 7.140e+01 8.022e+01 1.675e+02, threshold=1.428e+02, percent-clipped=1.0 +2024-07-27 15:14:00,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=30420.0, ans=0.125 +2024-07-27 15:14:02,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=30420.0, ans=0.125 +2024-07-27 15:14:08,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=30433.333333333332, ans=0.2 +2024-07-27 15:14:18,264 INFO [train.py:1114] (2/4) Epoch 3, batch 2400, loss[loss=0.2504, simple_loss=0.3258, pruned_loss=0.08749, over 4636.00 frames. ], tot_loss[loss=0.2922, simple_loss=0.3573, pruned_loss=0.1136, over 941769.91 frames. ], batch size: 12, lr: 2.34e-02, grad_scale: 32.0 +2024-07-27 15:14:45,359 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.74 vs. limit=15.0 +2024-07-27 15:15:02,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=30513.333333333332, ans=0.125 +2024-07-27 15:15:02,526 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=18.31 vs. limit=15.0 +2024-07-27 15:15:04,237 INFO [train.py:1114] (2/4) Epoch 3, batch 2450, loss[loss=0.2731, simple_loss=0.3521, pruned_loss=0.09705, over 4695.00 frames. ], tot_loss[loss=0.2939, simple_loss=0.3589, pruned_loss=0.1144, over 937381.29 frames. ], batch size: 13, lr: 2.34e-02, grad_scale: 32.0 +2024-07-27 15:15:22,373 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.262e+01 6.488e+01 7.535e+01 9.077e+01 1.631e+02, threshold=1.507e+02, percent-clipped=1.0 +2024-07-27 15:15:41,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=30580.0, ans=0.125 +2024-07-27 15:15:46,348 INFO [train.py:1114] (2/4) Epoch 3, batch 2500, loss[loss=0.295, simple_loss=0.3659, pruned_loss=0.1121, over 4819.00 frames. ], tot_loss[loss=0.2929, simple_loss=0.3581, pruned_loss=0.1138, over 939451.83 frames. ], batch size: 14, lr: 2.34e-02, grad_scale: 32.0 +2024-07-27 15:16:07,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=30620.0, ans=0.025 +2024-07-27 15:16:09,588 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.27 vs. limit=22.5 +2024-07-27 15:16:15,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30633.333333333332, ans=0.1 +2024-07-27 15:16:23,928 INFO [train.py:1114] (2/4) Epoch 3, batch 2550, loss[loss=0.2659, simple_loss=0.3331, pruned_loss=0.09932, over 4821.00 frames. ], tot_loss[loss=0.2923, simple_loss=0.3576, pruned_loss=0.1135, over 938792.11 frames. ], batch size: 11, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:16:26,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=30660.0, ans=0.0 +2024-07-27 15:16:56,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=30673.333333333332, ans=0.0 +2024-07-27 15:16:59,989 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.098e+01 6.217e+01 6.996e+01 7.708e+01 1.283e+02, threshold=1.399e+02, percent-clipped=0.0 +2024-07-27 15:17:00,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30673.333333333332, ans=0.1 +2024-07-27 15:17:09,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=30673.333333333332, ans=0.125 +2024-07-27 15:17:20,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=30700.0, ans=0.125 +2024-07-27 15:17:32,006 INFO [train.py:1114] (2/4) Epoch 3, batch 2600, loss[loss=0.2634, simple_loss=0.3455, pruned_loss=0.09064, over 4895.00 frames. ], tot_loss[loss=0.2939, simple_loss=0.3592, pruned_loss=0.1143, over 938101.08 frames. ], batch size: 13, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:17:32,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=30726.666666666668, ans=0.1 +2024-07-27 15:17:48,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30753.333333333332, ans=0.1 +2024-07-27 15:18:07,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=30780.0, ans=0.2 +2024-07-27 15:18:08,636 INFO [train.py:1114] (2/4) Epoch 3, batch 2650, loss[loss=0.3533, simple_loss=0.3934, pruned_loss=0.1566, over 4626.00 frames. ], tot_loss[loss=0.294, simple_loss=0.3594, pruned_loss=0.1143, over 940578.38 frames. ], batch size: 16, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:18:12,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=30793.333333333332, ans=0.125 +2024-07-27 15:18:13,152 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=15.57 vs. limit=15.0 +2024-07-27 15:18:28,837 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.827e+01 6.212e+01 6.736e+01 7.183e+01 9.052e+01, threshold=1.347e+02, percent-clipped=0.0 +2024-07-27 15:18:34,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=30820.0, ans=0.004169565217391305 +2024-07-27 15:18:40,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=30833.333333333332, ans=0.0041666666666666675 +2024-07-27 15:18:43,735 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.18 vs. limit=15.0 +2024-07-27 15:18:46,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=30846.666666666668, ans=0.0 +2024-07-27 15:18:49,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=30846.666666666668, ans=0.035 +2024-07-27 15:18:53,062 INFO [train.py:1114] (2/4) Epoch 3, batch 2700, loss[loss=0.2853, simple_loss=0.3663, pruned_loss=0.1022, over 4743.00 frames. ], tot_loss[loss=0.2934, simple_loss=0.3587, pruned_loss=0.114, over 940459.04 frames. ], batch size: 14, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:19:02,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.13 vs. limit=22.5 +2024-07-27 15:19:03,195 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.58 vs. limit=6.0 +2024-07-27 15:19:20,410 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.45 vs. limit=12.0 +2024-07-27 15:19:28,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=30913.333333333332, ans=0.0 +2024-07-27 15:19:28,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=30913.333333333332, ans=0.0 +2024-07-27 15:19:28,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=30913.333333333332, ans=0.125 +2024-07-27 15:19:29,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=30913.333333333332, ans=0.025 +2024-07-27 15:19:34,144 INFO [train.py:1114] (2/4) Epoch 3, batch 2750, loss[loss=0.3066, simple_loss=0.3514, pruned_loss=0.1309, over 4710.00 frames. ], tot_loss[loss=0.2921, simple_loss=0.3573, pruned_loss=0.1134, over 940199.16 frames. ], batch size: 12, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:19:44,300 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.704e+01 6.371e+01 6.868e+01 7.779e+01 1.190e+02, threshold=1.374e+02, percent-clipped=0.0 +2024-07-27 15:19:47,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=30953.333333333332, ans=0.2 +2024-07-27 15:19:48,802 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.71 vs. limit=10.0 +2024-07-27 15:19:55,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=30966.666666666668, ans=0.125 +2024-07-27 15:19:55,842 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=24.02 vs. limit=15.0 +2024-07-27 15:19:56,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=30966.666666666668, ans=0.0 +2024-07-27 15:20:08,678 INFO [train.py:1114] (2/4) Epoch 3, batch 2800, loss[loss=0.4414, simple_loss=0.4502, pruned_loss=0.2163, over 3386.00 frames. ], tot_loss[loss=0.2937, simple_loss=0.3585, pruned_loss=0.1144, over 938198.31 frames. ], batch size: 35, lr: 2.32e-02, grad_scale: 32.0 +2024-07-27 15:20:11,877 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.23 vs. limit=6.0 +2024-07-27 15:20:20,803 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.38 vs. limit=15.0 +2024-07-27 15:20:30,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=31033.333333333332, ans=0.004123188405797102 +2024-07-27 15:20:30,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=31033.333333333332, ans=0.125 +2024-07-27 15:20:49,918 INFO [train.py:1114] (2/4) Epoch 3, batch 2850, loss[loss=0.2382, simple_loss=0.3018, pruned_loss=0.08729, over 4962.00 frames. ], tot_loss[loss=0.2935, simple_loss=0.3582, pruned_loss=0.1144, over 936020.48 frames. ], batch size: 13, lr: 2.32e-02, grad_scale: 32.0 +2024-07-27 15:20:52,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=31060.0, ans=0.004117391304347826 +2024-07-27 15:20:59,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=31073.333333333332, ans=0.2 +2024-07-27 15:21:01,743 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.366e+01 6.414e+01 6.981e+01 8.121e+01 1.632e+02, threshold=1.396e+02, percent-clipped=1.0 +2024-07-27 15:21:14,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=31086.666666666668, ans=0.125 +2024-07-27 15:21:14,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=31086.666666666668, ans=0.125 +2024-07-27 15:21:26,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=31100.0, ans=0.125 +2024-07-27 15:21:28,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=31113.333333333332, ans=0.125 +2024-07-27 15:21:37,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=31113.333333333332, ans=0.0 +2024-07-27 15:21:43,052 INFO [train.py:1114] (2/4) Epoch 3, batch 2900, loss[loss=0.2821, simple_loss=0.3592, pruned_loss=0.1026, over 4830.00 frames. ], tot_loss[loss=0.2941, simple_loss=0.3594, pruned_loss=0.1144, over 939868.92 frames. ], batch size: 13, lr: 2.32e-02, grad_scale: 32.0 +2024-07-27 15:21:54,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=31126.666666666668, ans=0.125 +2024-07-27 15:22:02,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=31140.0, ans=0.125 +2024-07-27 15:22:08,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=31153.333333333332, ans=0.2 +2024-07-27 15:22:11,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=31166.666666666668, ans=0.0 +2024-07-27 15:22:35,883 INFO [train.py:1114] (2/4) Epoch 3, batch 2950, loss[loss=0.2511, simple_loss=0.3123, pruned_loss=0.09494, over 4706.00 frames. ], tot_loss[loss=0.2913, simple_loss=0.3564, pruned_loss=0.113, over 938520.65 frames. ], batch size: 12, lr: 2.32e-02, grad_scale: 32.0 +2024-07-27 15:22:45,528 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.01 vs. limit=22.5 +2024-07-27 15:22:49,840 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=23.95 vs. limit=15.0 +2024-07-27 15:22:52,196 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.280e+01 6.239e+01 6.722e+01 7.619e+01 1.818e+02, threshold=1.344e+02, percent-clipped=1.0 +2024-07-27 15:23:00,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=31220.0, ans=0.2 +2024-07-27 15:23:00,772 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.45 vs. limit=22.5 +2024-07-27 15:23:10,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=31233.333333333332, ans=0.125 +2024-07-27 15:23:14,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=31246.666666666668, ans=0.2 +2024-07-27 15:23:18,259 INFO [train.py:1114] (2/4) Epoch 3, batch 3000, loss[loss=0.2795, simple_loss=0.3413, pruned_loss=0.1088, over 4756.00 frames. ], tot_loss[loss=0.2897, simple_loss=0.3556, pruned_loss=0.1119, over 938078.02 frames. ], batch size: 13, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:23:18,260 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 15:23:33,136 INFO [train.py:1146] (2/4) Epoch 3, validation: loss=0.2358, simple_loss=0.3336, pruned_loss=0.06904, over 944034.00 frames. +2024-07-27 15:23:33,137 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 15:23:38,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=31260.0, ans=0.125 +2024-07-27 15:23:42,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=31273.333333333332, ans=0.125 +2024-07-27 15:23:43,887 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.33 vs. limit=15.0 +2024-07-27 15:23:54,628 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.64 vs. limit=10.0 +2024-07-27 15:24:09,323 INFO [train.py:1114] (2/4) Epoch 3, batch 3050, loss[loss=0.2917, simple_loss=0.3548, pruned_loss=0.1143, over 4643.00 frames. ], tot_loss[loss=0.2921, simple_loss=0.3572, pruned_loss=0.1135, over 937054.99 frames. ], batch size: 12, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:24:19,670 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.862e+01 6.122e+01 6.753e+01 7.490e+01 1.166e+02, threshold=1.351e+02, percent-clipped=0.0 +2024-07-27 15:24:29,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31353.333333333332, ans=0.1 +2024-07-27 15:24:43,474 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.14 vs. limit=15.0 +2024-07-27 15:24:45,637 INFO [train.py:1114] (2/4) Epoch 3, batch 3100, loss[loss=0.2623, simple_loss=0.3379, pruned_loss=0.09332, over 4636.00 frames. ], tot_loss[loss=0.2927, simple_loss=0.3575, pruned_loss=0.1139, over 937845.15 frames. ], batch size: 16, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:24:50,102 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.49 vs. limit=22.5 +2024-07-27 15:24:53,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=31406.666666666668, ans=0.004042028985507247 +2024-07-27 15:25:02,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=31420.0, ans=0.07 +2024-07-27 15:25:04,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=31420.0, ans=0.125 +2024-07-27 15:25:21,450 INFO [train.py:1114] (2/4) Epoch 3, batch 3150, loss[loss=0.2864, simple_loss=0.3545, pruned_loss=0.1091, over 4630.00 frames. ], tot_loss[loss=0.2914, simple_loss=0.3567, pruned_loss=0.1131, over 938243.93 frames. ], batch size: 17, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:25:22,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=31460.0, ans=0.025 +2024-07-27 15:25:22,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=31460.0, ans=0.025 +2024-07-27 15:25:24,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=31460.0, ans=0.2 +2024-07-27 15:25:31,679 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.963e+01 6.198e+01 6.919e+01 7.574e+01 1.132e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-27 15:25:34,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=31486.666666666668, ans=0.004024637681159421 +2024-07-27 15:25:36,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=31486.666666666668, ans=0.2 +2024-07-27 15:25:38,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=31486.666666666668, ans=0.004024637681159421 +2024-07-27 15:25:41,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=31486.666666666668, ans=0.125 +2024-07-27 15:25:42,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=31486.666666666668, ans=0.2 +2024-07-27 15:25:44,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=31500.0, ans=0.004021739130434783 +2024-07-27 15:25:54,542 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=12.0 +2024-07-27 15:25:56,881 INFO [train.py:1114] (2/4) Epoch 3, batch 3200, loss[loss=0.2928, simple_loss=0.3544, pruned_loss=0.1155, over 4836.00 frames. ], tot_loss[loss=0.2914, simple_loss=0.3566, pruned_loss=0.1131, over 939767.59 frames. ], batch size: 13, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:26:01,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=31526.666666666668, ans=0.125 +2024-07-27 15:26:03,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=31540.0, ans=0.1 +2024-07-27 15:26:15,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=31553.333333333332, ans=0.125 +2024-07-27 15:26:26,531 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.24 vs. limit=22.5 +2024-07-27 15:26:31,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31566.666666666668, ans=0.1 +2024-07-27 15:26:38,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=31566.666666666668, ans=0.125 +2024-07-27 15:26:49,792 INFO [train.py:1114] (2/4) Epoch 3, batch 3250, loss[loss=0.2619, simple_loss=0.3476, pruned_loss=0.08811, over 4934.00 frames. ], tot_loss[loss=0.2908, simple_loss=0.3565, pruned_loss=0.1125, over 940876.03 frames. ], batch size: 14, lr: 2.30e-02, grad_scale: 32.0 +2024-07-27 15:26:57,982 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.11 vs. limit=12.0 +2024-07-27 15:27:01,546 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.545e+01 6.278e+01 6.797e+01 7.554e+01 1.103e+02, threshold=1.359e+02, percent-clipped=0.0 +2024-07-27 15:27:01,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=31606.666666666668, ans=0.125 +2024-07-27 15:27:14,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=31633.333333333332, ans=0.1 +2024-07-27 15:27:14,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=31633.333333333332, ans=0.125 +2024-07-27 15:27:20,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=31633.333333333332, ans=0.0 +2024-07-27 15:27:30,559 INFO [train.py:1114] (2/4) Epoch 3, batch 3300, loss[loss=0.3293, simple_loss=0.3882, pruned_loss=0.1352, over 4705.00 frames. ], tot_loss[loss=0.291, simple_loss=0.3558, pruned_loss=0.1131, over 941216.73 frames. ], batch size: 19, lr: 2.30e-02, grad_scale: 32.0 +2024-07-27 15:27:31,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=31660.0, ans=0.0 +2024-07-27 15:27:38,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=31660.0, ans=0.00398695652173913 +2024-07-27 15:27:38,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=31660.0, ans=0.125 +2024-07-27 15:27:48,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=31673.333333333332, ans=0.125 +2024-07-27 15:28:01,685 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.24 vs. limit=22.5 +2024-07-27 15:28:02,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31700.0, ans=0.1 +2024-07-27 15:28:06,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=31713.333333333332, ans=0.125 +2024-07-27 15:28:13,930 INFO [train.py:1114] (2/4) Epoch 3, batch 3350, loss[loss=0.3222, simple_loss=0.3827, pruned_loss=0.1308, over 4628.00 frames. ], tot_loss[loss=0.2933, simple_loss=0.3574, pruned_loss=0.1146, over 938569.36 frames. ], batch size: 17, lr: 2.30e-02, grad_scale: 32.0 +2024-07-27 15:28:15,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=31726.666666666668, ans=0.0 +2024-07-27 15:28:20,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=31740.0, ans=0.09899494936611666 +2024-07-27 15:28:24,380 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.034e+01 6.313e+01 6.716e+01 7.505e+01 1.231e+02, threshold=1.343e+02, percent-clipped=0.0 +2024-07-27 15:28:27,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=31753.333333333332, ans=0.125 +2024-07-27 15:28:50,132 INFO [train.py:1114] (2/4) Epoch 3, batch 3400, loss[loss=0.2292, simple_loss=0.2893, pruned_loss=0.08461, over 4811.00 frames. ], tot_loss[loss=0.2921, simple_loss=0.3566, pruned_loss=0.1138, over 937411.79 frames. ], batch size: 11, lr: 2.30e-02, grad_scale: 32.0 +2024-07-27 15:28:57,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=31793.333333333332, ans=0.2 +2024-07-27 15:29:02,435 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.49 vs. limit=22.5 +2024-07-27 15:29:20,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff3.min_abs, batch_count=31820.0, ans=0.2 +2024-07-27 15:29:30,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=31846.666666666668, ans=0.125 +2024-07-27 15:29:31,821 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.16 vs. limit=22.5 +2024-07-27 15:29:35,524 INFO [train.py:1114] (2/4) Epoch 3, batch 3450, loss[loss=0.3197, simple_loss=0.3749, pruned_loss=0.1323, over 4707.00 frames. ], tot_loss[loss=0.292, simple_loss=0.3567, pruned_loss=0.1136, over 937656.01 frames. ], batch size: 19, lr: 2.29e-02, grad_scale: 32.0 +2024-07-27 15:29:40,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=31860.0, ans=0.2 +2024-07-27 15:29:52,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31873.333333333332, ans=0.1 +2024-07-27 15:29:55,353 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.891e+01 6.313e+01 6.956e+01 7.933e+01 1.220e+02, threshold=1.391e+02, percent-clipped=0.0 +2024-07-27 15:30:00,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=31886.666666666668, ans=0.0 +2024-07-27 15:30:07,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=31900.0, ans=0.0 +2024-07-27 15:30:16,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=31913.333333333332, ans=0.125 +2024-07-27 15:30:20,199 INFO [train.py:1114] (2/4) Epoch 3, batch 3500, loss[loss=0.282, simple_loss=0.3369, pruned_loss=0.1135, over 4952.00 frames. ], tot_loss[loss=0.292, simple_loss=0.3565, pruned_loss=0.1138, over 938660.14 frames. ], batch size: 12, lr: 2.29e-02, grad_scale: 32.0 +2024-07-27 15:30:21,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=31926.666666666668, ans=0.0 +2024-07-27 15:30:24,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=31926.666666666668, ans=0.1 +2024-07-27 15:30:27,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=31940.0, ans=0.1 +2024-07-27 15:30:34,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.13 vs. limit=6.0 +2024-07-27 15:30:46,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=31966.666666666668, ans=0.2 +2024-07-27 15:30:54,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=31993.333333333332, ans=0.125 +2024-07-27 15:30:55,137 INFO [train.py:1114] (2/4) Epoch 3, batch 3550, loss[loss=0.3287, simple_loss=0.3923, pruned_loss=0.1326, over 4662.00 frames. ], tot_loss[loss=0.2915, simple_loss=0.3566, pruned_loss=0.1132, over 939261.87 frames. ], batch size: 14, lr: 2.29e-02, grad_scale: 32.0 +2024-07-27 15:31:08,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=32006.666666666668, ans=0.125 +2024-07-27 15:31:08,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=32006.666666666668, ans=0.125 +2024-07-27 15:31:10,781 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 6.203e+01 6.849e+01 7.664e+01 1.472e+02, threshold=1.370e+02, percent-clipped=1.0 +2024-07-27 15:31:18,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=32020.0, ans=0.0 +2024-07-27 15:31:19,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=32020.0, ans=0.5 +2024-07-27 15:31:23,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=32033.333333333332, ans=0.125 +2024-07-27 15:31:24,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=32033.333333333332, ans=0.0 +2024-07-27 15:31:34,540 INFO [train.py:1114] (2/4) Epoch 3, batch 3600, loss[loss=0.2864, simple_loss=0.3495, pruned_loss=0.1116, over 4965.00 frames. ], tot_loss[loss=0.291, simple_loss=0.3563, pruned_loss=0.1128, over 940680.18 frames. ], batch size: 13, lr: 2.29e-02, grad_scale: 64.0 +2024-07-27 15:31:46,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=32073.333333333332, ans=0.025 +2024-07-27 15:31:48,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=32073.333333333332, ans=0.0 +2024-07-27 15:31:49,312 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.70 vs. limit=22.5 +2024-07-27 15:31:52,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=32086.666666666668, ans=0.125 +2024-07-27 15:31:56,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=32100.0, ans=0.0 +2024-07-27 15:31:57,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=32100.0, ans=0.125 +2024-07-27 15:31:59,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=32100.0, ans=0.125 +2024-07-27 15:31:59,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=32100.0, ans=0.0038913043478260873 +2024-07-27 15:32:06,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=32113.333333333332, ans=0.2 +2024-07-27 15:32:10,763 INFO [train.py:1114] (2/4) Epoch 3, batch 3650, loss[loss=0.3328, simple_loss=0.3758, pruned_loss=0.1449, over 4910.00 frames. ], tot_loss[loss=0.2906, simple_loss=0.3556, pruned_loss=0.1128, over 940756.56 frames. ], batch size: 15, lr: 2.29e-02, grad_scale: 64.0 +2024-07-27 15:32:21,266 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.293e+01 6.817e+01 8.019e+01 9.949e+01 1.573e+02, threshold=1.604e+02, percent-clipped=3.0 +2024-07-27 15:32:37,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=32180.0, ans=0.125 +2024-07-27 15:32:45,070 INFO [train.py:1114] (2/4) Epoch 3, batch 3700, loss[loss=0.3322, simple_loss=0.3951, pruned_loss=0.1346, over 4936.00 frames. ], tot_loss[loss=0.2888, simple_loss=0.3548, pruned_loss=0.1114, over 941848.54 frames. ], batch size: 14, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:32:50,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=32193.333333333332, ans=0.125 +2024-07-27 15:32:57,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=32206.666666666668, ans=0.125 +2024-07-27 15:33:16,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=32246.666666666668, ans=0.0 +2024-07-27 15:33:17,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=32246.666666666668, ans=0.125 +2024-07-27 15:33:20,707 INFO [train.py:1114] (2/4) Epoch 3, batch 3750, loss[loss=0.2247, simple_loss=0.2963, pruned_loss=0.07651, over 4831.00 frames. ], tot_loss[loss=0.2879, simple_loss=0.354, pruned_loss=0.1108, over 943191.75 frames. ], batch size: 11, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:33:31,093 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.210e+01 6.187e+01 6.972e+01 7.768e+01 2.543e+02, threshold=1.394e+02, percent-clipped=1.0 +2024-07-27 15:33:39,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=32286.666666666668, ans=0.1 +2024-07-27 15:33:44,054 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.58 vs. limit=22.5 +2024-07-27 15:33:52,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=32313.333333333332, ans=0.2 +2024-07-27 15:33:54,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=32313.333333333332, ans=0.003844927536231884 +2024-07-27 15:33:55,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=32313.333333333332, ans=0.125 +2024-07-27 15:33:56,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=32313.333333333332, ans=0.5 +2024-07-27 15:34:01,595 INFO [train.py:1114] (2/4) Epoch 3, batch 3800, loss[loss=0.3475, simple_loss=0.3936, pruned_loss=0.1507, over 4818.00 frames. ], tot_loss[loss=0.2893, simple_loss=0.355, pruned_loss=0.1118, over 941758.31 frames. ], batch size: 14, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:34:16,212 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-27 15:34:25,686 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.47 vs. limit=12.0 +2024-07-27 15:34:26,388 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.65 vs. limit=22.5 +2024-07-27 15:34:33,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=32380.0, ans=0.125 +2024-07-27 15:34:36,459 INFO [train.py:1114] (2/4) Epoch 3, batch 3850, loss[loss=0.2715, simple_loss=0.3595, pruned_loss=0.09175, over 4639.00 frames. ], tot_loss[loss=0.2873, simple_loss=0.354, pruned_loss=0.1103, over 942275.09 frames. ], batch size: 16, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:34:43,101 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.13 vs. limit=5.0 +2024-07-27 15:34:46,756 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.157e+01 6.304e+01 7.059e+01 8.148e+01 1.168e+02, threshold=1.412e+02, percent-clipped=0.0 +2024-07-27 15:34:51,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=32420.0, ans=0.1 +2024-07-27 15:35:01,470 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:35:02,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=32433.333333333332, ans=0.0 +2024-07-27 15:35:06,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=32446.666666666668, ans=0.035 +2024-07-27 15:35:12,178 INFO [train.py:1114] (2/4) Epoch 3, batch 3900, loss[loss=0.2957, simple_loss=0.3596, pruned_loss=0.1159, over 4811.00 frames. ], tot_loss[loss=0.2868, simple_loss=0.3536, pruned_loss=0.11, over 942575.59 frames. ], batch size: 14, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:35:17,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=32460.0, ans=0.09899494936611666 +2024-07-27 15:35:36,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=32500.0, ans=0.125 +2024-07-27 15:35:43,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=32513.333333333332, ans=0.125 +2024-07-27 15:35:43,614 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.20 vs. limit=22.5 +2024-07-27 15:35:46,911 INFO [train.py:1114] (2/4) Epoch 3, batch 3950, loss[loss=0.2949, simple_loss=0.3696, pruned_loss=0.1101, over 4842.00 frames. ], tot_loss[loss=0.2859, simple_loss=0.3527, pruned_loss=0.1096, over 944450.63 frames. ], batch size: 16, lr: 2.27e-02, grad_scale: 64.0 +2024-07-27 15:35:53,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=32526.666666666668, ans=0.00379855072463768 +2024-07-27 15:35:58,548 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.016e+01 6.366e+01 6.864e+01 8.017e+01 1.947e+02, threshold=1.373e+02, percent-clipped=1.0 +2024-07-27 15:36:03,250 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.96 vs. limit=12.0 +2024-07-27 15:36:11,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=32566.666666666668, ans=10.0 +2024-07-27 15:36:17,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=32580.0, ans=0.0 +2024-07-27 15:36:17,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=32580.0, ans=0.05 +2024-07-27 15:36:22,715 INFO [train.py:1114] (2/4) Epoch 3, batch 4000, loss[loss=0.2014, simple_loss=0.2854, pruned_loss=0.05869, over 4772.00 frames. ], tot_loss[loss=0.2881, simple_loss=0.3542, pruned_loss=0.111, over 941335.51 frames. ], batch size: 12, lr: 2.27e-02, grad_scale: 64.0 +2024-07-27 15:36:22,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=32593.333333333332, ans=0.125 +2024-07-27 15:36:27,990 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.77 vs. limit=12.0 +2024-07-27 15:36:30,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=32593.333333333332, ans=15.0 +2024-07-27 15:36:31,725 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.48 vs. limit=10.0 +2024-07-27 15:36:44,870 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.34 vs. limit=22.5 +2024-07-27 15:36:51,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=32646.666666666668, ans=0.025 +2024-07-27 15:36:54,585 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.29 vs. limit=10.0 +2024-07-27 15:36:58,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=32660.0, ans=0.0 +2024-07-27 15:36:59,121 INFO [train.py:1114] (2/4) Epoch 3, batch 4050, loss[loss=0.3378, simple_loss=0.3716, pruned_loss=0.152, over 3498.00 frames. ], tot_loss[loss=0.288, simple_loss=0.3532, pruned_loss=0.1115, over 940276.12 frames. ], batch size: 35, lr: 2.27e-02, grad_scale: 64.0 +2024-07-27 15:36:59,593 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.57 vs. limit=15.0 +2024-07-27 15:37:05,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=32660.0, ans=0.125 +2024-07-27 15:37:11,299 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.463e+01 6.459e+01 6.983e+01 7.697e+01 1.084e+02, threshold=1.397e+02, percent-clipped=0.0 +2024-07-27 15:37:13,014 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.71 vs. limit=15.0 +2024-07-27 15:37:17,574 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.52 vs. limit=10.0 +2024-07-27 15:37:25,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=32700.0, ans=0.07 +2024-07-27 15:37:26,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=32700.0, ans=0.0 +2024-07-27 15:37:30,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=32713.333333333332, ans=0.2 +2024-07-27 15:37:34,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=32713.333333333332, ans=0.125 +2024-07-27 15:37:37,963 INFO [train.py:1114] (2/4) Epoch 3, batch 4100, loss[loss=0.2672, simple_loss=0.3383, pruned_loss=0.09808, over 4906.00 frames. ], tot_loss[loss=0.2884, simple_loss=0.3534, pruned_loss=0.1117, over 938754.71 frames. ], batch size: 15, lr: 2.27e-02, grad_scale: 64.0 +2024-07-27 15:37:45,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=32740.0, ans=0.09899494936611666 +2024-07-27 15:37:48,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=32740.0, ans=0.125 +2024-07-27 15:37:57,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=32753.333333333332, ans=0.0 +2024-07-27 15:37:57,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=32753.333333333332, ans=0.125 +2024-07-27 15:38:00,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=32766.666666666668, ans=0.125 +2024-07-27 15:38:14,533 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.11 vs. limit=15.0 +2024-07-27 15:38:15,398 INFO [train.py:1114] (2/4) Epoch 3, batch 4150, loss[loss=0.2879, simple_loss=0.3585, pruned_loss=0.1087, over 4819.00 frames. ], tot_loss[loss=0.2876, simple_loss=0.3533, pruned_loss=0.111, over 938091.28 frames. ], batch size: 13, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:38:16,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=32793.333333333336, ans=0.09899494936611666 +2024-07-27 15:38:20,904 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.12 vs. limit=15.0 +2024-07-27 15:38:25,904 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.960e+01 6.227e+01 6.781e+01 8.028e+01 1.229e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 15:38:29,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=32820.0, ans=0.1 +2024-07-27 15:38:33,101 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.84 vs. limit=15.0 +2024-07-27 15:38:37,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=32833.333333333336, ans=0.125 +2024-07-27 15:38:41,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=32833.333333333336, ans=0.003731884057971014 +2024-07-27 15:38:54,828 INFO [train.py:1114] (2/4) Epoch 3, batch 4200, loss[loss=0.261, simple_loss=0.3412, pruned_loss=0.0904, over 4899.00 frames. ], tot_loss[loss=0.2886, simple_loss=0.354, pruned_loss=0.1116, over 939556.67 frames. ], batch size: 15, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:39:05,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=32873.333333333336, ans=0.0 +2024-07-27 15:39:12,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=32886.666666666664, ans=0.05 +2024-07-27 15:39:13,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=32886.666666666664, ans=0.125 +2024-07-27 15:39:26,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=32913.333333333336, ans=0.1 +2024-07-27 15:39:31,229 INFO [train.py:1114] (2/4) Epoch 3, batch 4250, loss[loss=0.2341, simple_loss=0.3045, pruned_loss=0.08183, over 4645.00 frames. ], tot_loss[loss=0.2879, simple_loss=0.3538, pruned_loss=0.111, over 940521.75 frames. ], batch size: 12, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:39:33,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=32926.666666666664, ans=0.2 +2024-07-27 15:39:41,111 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.439e+01 6.186e+01 6.763e+01 7.704e+01 1.140e+02, threshold=1.353e+02, percent-clipped=0.0 +2024-07-27 15:39:47,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=32953.333333333336, ans=0.125 +2024-07-27 15:39:51,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=32953.333333333336, ans=0.125 +2024-07-27 15:39:56,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=32966.666666666664, ans=0.125 +2024-07-27 15:40:06,797 INFO [train.py:1114] (2/4) Epoch 3, batch 4300, loss[loss=0.2961, simple_loss=0.3706, pruned_loss=0.1108, over 4763.00 frames. ], tot_loss[loss=0.2888, simple_loss=0.3547, pruned_loss=0.1115, over 939925.65 frames. ], batch size: 13, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:40:25,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=33020.0, ans=0.1 +2024-07-27 15:40:29,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=33033.333333333336, ans=0.2 +2024-07-27 15:40:31,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=33033.333333333336, ans=0.125 +2024-07-27 15:40:41,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=33046.666666666664, ans=0.125 +2024-07-27 15:40:42,785 INFO [train.py:1114] (2/4) Epoch 3, batch 4350, loss[loss=0.3118, simple_loss=0.3764, pruned_loss=0.1236, over 4742.00 frames. ], tot_loss[loss=0.2883, simple_loss=0.3548, pruned_loss=0.1109, over 940942.33 frames. ], batch size: 13, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:40:50,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=33073.333333333336, ans=0.125 +2024-07-27 15:40:51,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=33073.333333333336, ans=0.125 +2024-07-27 15:40:54,690 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.277e+01 6.236e+01 6.804e+01 7.780e+01 1.356e+02, threshold=1.361e+02, percent-clipped=1.0 +2024-07-27 15:41:13,195 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.20 vs. limit=15.0 +2024-07-27 15:41:13,922 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.14 vs. limit=15.0 +2024-07-27 15:41:19,011 INFO [train.py:1114] (2/4) Epoch 3, batch 4400, loss[loss=0.2969, simple_loss=0.3641, pruned_loss=0.1148, over 4810.00 frames. ], tot_loss[loss=0.2882, simple_loss=0.3551, pruned_loss=0.1107, over 941020.88 frames. ], batch size: 14, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:41:33,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=33153.333333333336, ans=0.125 +2024-07-27 15:41:40,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=33166.666666666664, ans=0.0 +2024-07-27 15:41:44,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=33166.666666666664, ans=0.2 +2024-07-27 15:41:58,135 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.42 vs. limit=15.0 +2024-07-27 15:41:58,402 INFO [train.py:1114] (2/4) Epoch 3, batch 4450, loss[loss=0.2354, simple_loss=0.3126, pruned_loss=0.07909, over 4938.00 frames. ], tot_loss[loss=0.2895, simple_loss=0.3559, pruned_loss=0.1115, over 939282.55 frames. ], batch size: 12, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:42:05,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=33206.666666666664, ans=0.125 +2024-07-27 15:42:05,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=33206.666666666664, ans=0.0036507246376811598 +2024-07-27 15:42:07,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=33206.666666666664, ans=0.0036507246376811598 +2024-07-27 15:42:07,641 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.68 vs. limit=12.0 +2024-07-27 15:42:08,448 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.127e+01 6.763e+01 7.448e+01 8.954e+01 1.362e+02, threshold=1.490e+02, percent-clipped=1.0 +2024-07-27 15:42:10,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=33206.666666666664, ans=0.0 +2024-07-27 15:42:17,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=33220.0, ans=0.125 +2024-07-27 15:42:24,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=33233.333333333336, ans=0.0036449275362318836 +2024-07-27 15:42:29,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=33246.666666666664, ans=0.0036420289855072473 +2024-07-27 15:42:44,727 INFO [train.py:1114] (2/4) Epoch 3, batch 4500, loss[loss=0.2736, simple_loss=0.3542, pruned_loss=0.09655, over 4739.00 frames. ], tot_loss[loss=0.2897, simple_loss=0.3564, pruned_loss=0.1115, over 938640.63 frames. ], batch size: 14, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:42:50,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=33260.0, ans=0.0 +2024-07-27 15:42:51,390 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.98 vs. limit=6.0 +2024-07-27 15:42:53,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=33273.333333333336, ans=0.0 +2024-07-27 15:43:08,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33300.0, ans=0.1 +2024-07-27 15:43:09,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=33300.0, ans=0.1 +2024-07-27 15:43:18,851 INFO [train.py:1114] (2/4) Epoch 3, batch 4550, loss[loss=0.2862, simple_loss=0.351, pruned_loss=0.1107, over 4893.00 frames. ], tot_loss[loss=0.2896, simple_loss=0.3562, pruned_loss=0.1115, over 940381.60 frames. ], batch size: 13, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:43:26,454 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:43:29,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=33340.0, ans=0.0036217391304347825 +2024-07-27 15:43:30,999 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.122e+01 6.619e+01 7.429e+01 8.895e+01 1.429e+02, threshold=1.486e+02, percent-clipped=0.0 +2024-07-27 15:43:34,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=33353.333333333336, ans=0.0036188405797101444 +2024-07-27 15:43:38,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=33353.333333333336, ans=0.125 +2024-07-27 15:43:45,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=33366.666666666664, ans=0.003615942028985508 +2024-07-27 15:43:57,981 INFO [train.py:1114] (2/4) Epoch 3, batch 4600, loss[loss=0.3105, simple_loss=0.379, pruned_loss=0.121, over 4496.00 frames. ], tot_loss[loss=0.2888, simple_loss=0.3552, pruned_loss=0.1112, over 938698.94 frames. ], batch size: 21, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:44:24,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=33446.666666666664, ans=0.0 +2024-07-27 15:44:32,059 INFO [train.py:1114] (2/4) Epoch 3, batch 4650, loss[loss=0.2802, simple_loss=0.3622, pruned_loss=0.09911, over 4841.00 frames. ], tot_loss[loss=0.2885, simple_loss=0.3556, pruned_loss=0.1107, over 940240.74 frames. ], batch size: 16, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:44:32,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=33460.0, ans=0.0035956521739130432 +2024-07-27 15:44:42,684 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.996e+01 6.580e+01 7.328e+01 8.938e+01 2.315e+02, threshold=1.466e+02, percent-clipped=1.0 +2024-07-27 15:44:43,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=33473.333333333336, ans=0.0 +2024-07-27 15:44:44,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=33473.333333333336, ans=0.07 +2024-07-27 15:44:58,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=33500.0, ans=0.2 +2024-07-27 15:45:05,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=33513.333333333336, ans=0.125 +2024-07-27 15:45:08,280 INFO [train.py:1114] (2/4) Epoch 3, batch 4700, loss[loss=0.2317, simple_loss=0.2923, pruned_loss=0.08557, over 4710.00 frames. ], tot_loss[loss=0.2875, simple_loss=0.3546, pruned_loss=0.1102, over 937601.22 frames. ], batch size: 11, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:45:08,534 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:45:10,604 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.72 vs. limit=15.0 +2024-07-27 15:45:21,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=33553.333333333336, ans=10.0 +2024-07-27 15:45:21,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=33553.333333333336, ans=0.0035753623188405793 +2024-07-27 15:45:22,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=33553.333333333336, ans=0.125 +2024-07-27 15:45:46,782 INFO [train.py:1114] (2/4) Epoch 3, batch 4750, loss[loss=0.3344, simple_loss=0.3892, pruned_loss=0.1398, over 4503.00 frames. ], tot_loss[loss=0.2888, simple_loss=0.3555, pruned_loss=0.1111, over 935585.50 frames. ], batch size: 21, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:45:50,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=33593.333333333336, ans=0.025 +2024-07-27 15:45:57,485 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.591e+01 6.473e+01 7.371e+01 8.571e+01 1.233e+02, threshold=1.474e+02, percent-clipped=0.0 +2024-07-27 15:46:17,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=33646.666666666664, ans=0.125 +2024-07-27 15:46:21,154 INFO [train.py:1114] (2/4) Epoch 3, batch 4800, loss[loss=0.3115, simple_loss=0.3953, pruned_loss=0.1139, over 4686.00 frames. ], tot_loss[loss=0.2879, simple_loss=0.3543, pruned_loss=0.1107, over 932714.87 frames. ], batch size: 13, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:46:21,606 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.44 vs. limit=15.0 +2024-07-27 15:46:24,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=33660.0, ans=0.0 +2024-07-27 15:46:26,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=33660.0, ans=0.003552173913043478 +2024-07-27 15:46:26,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=33660.0, ans=0.003552173913043478 +2024-07-27 15:46:32,287 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.09 vs. limit=15.0 +2024-07-27 15:46:47,727 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:46:56,843 INFO [train.py:1114] (2/4) Epoch 3, batch 4850, loss[loss=0.2892, simple_loss=0.3561, pruned_loss=0.1112, over 4745.00 frames. ], tot_loss[loss=0.2894, simple_loss=0.3558, pruned_loss=0.1115, over 932640.35 frames. ], batch size: 14, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:47:01,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=33726.666666666664, ans=0.035 +2024-07-27 15:47:07,315 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.203e+01 6.462e+01 7.308e+01 8.577e+01 1.443e+02, threshold=1.462e+02, percent-clipped=0.0 +2024-07-27 15:47:15,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33753.333333333336, ans=0.1 +2024-07-27 15:47:18,131 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.31 vs. limit=15.0 +2024-07-27 15:47:31,290 INFO [train.py:1114] (2/4) Epoch 3, batch 4900, loss[loss=0.2538, simple_loss=0.3292, pruned_loss=0.08919, over 4767.00 frames. ], tot_loss[loss=0.2893, simple_loss=0.3557, pruned_loss=0.1114, over 934611.53 frames. ], batch size: 13, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:47:35,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=33793.333333333336, ans=0.2 +2024-07-27 15:47:43,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=33806.666666666664, ans=0.125 +2024-07-27 15:47:49,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.02 vs. limit=22.5 +2024-07-27 15:48:01,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=33846.666666666664, ans=0.125 +2024-07-27 15:48:01,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=33846.666666666664, ans=0.125 +2024-07-27 15:48:04,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=33846.666666666664, ans=0.125 +2024-07-27 15:48:06,511 INFO [train.py:1114] (2/4) Epoch 3, batch 4950, loss[loss=0.3773, simple_loss=0.4212, pruned_loss=0.1667, over 3312.00 frames. ], tot_loss[loss=0.2909, simple_loss=0.3571, pruned_loss=0.1124, over 932180.05 frames. ], batch size: 35, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:48:10,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=33860.0, ans=0.125 +2024-07-27 15:48:13,288 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.75 vs. limit=15.0 +2024-07-27 15:48:14,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33873.333333333336, ans=0.1 +2024-07-27 15:48:15,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=33873.333333333336, ans=0.0 +2024-07-27 15:48:15,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=33873.333333333336, ans=0.0 +2024-07-27 15:48:16,792 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.224e+01 6.458e+01 7.350e+01 8.583e+01 1.982e+02, threshold=1.470e+02, percent-clipped=1.0 +2024-07-27 15:48:25,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=33886.666666666664, ans=0.125 +2024-07-27 15:48:30,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=33900.0, ans=0.125 +2024-07-27 15:48:31,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=33900.0, ans=0.0035000000000000005 +2024-07-27 15:48:31,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=33900.0, ans=0.125 +2024-07-27 15:48:41,077 INFO [train.py:1114] (2/4) Epoch 3, batch 5000, loss[loss=0.298, simple_loss=0.3631, pruned_loss=0.1165, over 4662.00 frames. ], tot_loss[loss=0.2901, simple_loss=0.3563, pruned_loss=0.1119, over 935791.42 frames. ], batch size: 14, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:48:42,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=33926.666666666664, ans=0.2 +2024-07-27 15:48:47,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=33940.0, ans=0.1 +2024-07-27 15:48:48,838 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.52 vs. limit=22.5 +2024-07-27 15:48:49,523 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.14 vs. limit=15.0 +2024-07-27 15:48:52,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=33940.0, ans=0.125 +2024-07-27 15:48:58,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=33953.333333333336, ans=0.0 +2024-07-27 15:48:58,442 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.88 vs. limit=6.0 +2024-07-27 15:49:09,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=33980.0, ans=0.125 +2024-07-27 15:49:20,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33980.0, ans=0.1 +2024-07-27 15:49:21,836 INFO [train.py:1114] (2/4) Epoch 3, batch 5050, loss[loss=0.2675, simple_loss=0.334, pruned_loss=0.1005, over 4862.00 frames. ], tot_loss[loss=0.2894, simple_loss=0.3557, pruned_loss=0.1115, over 937915.18 frames. ], batch size: 12, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:49:34,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=33993.333333333336, ans=0.05 +2024-07-27 15:49:40,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=34006.666666666664, ans=0.2 +2024-07-27 15:49:40,707 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.504e+01 6.490e+01 6.878e+01 7.828e+01 1.247e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-27 15:49:44,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=34020.0, ans=0.125 +2024-07-27 15:49:57,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=34033.333333333336, ans=15.0 +2024-07-27 15:49:59,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=34046.666666666664, ans=0.0 +2024-07-27 15:50:05,323 INFO [train.py:1114] (2/4) Epoch 3, batch 5100, loss[loss=0.2614, simple_loss=0.3295, pruned_loss=0.09669, over 4771.00 frames. ], tot_loss[loss=0.29, simple_loss=0.356, pruned_loss=0.112, over 935852.15 frames. ], batch size: 12, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:50:07,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=34060.0, ans=0.0 +2024-07-27 15:50:09,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=34060.0, ans=0.003465217391304348 +2024-07-27 15:50:13,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=34073.333333333336, ans=0.0 +2024-07-27 15:50:26,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=34086.666666666664, ans=0.0034594202898550726 +2024-07-27 15:50:34,944 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:50:35,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=34100.0, ans=0.125 +2024-07-27 15:50:35,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=34100.0, ans=0.2 +2024-07-27 15:50:35,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=34100.0, ans=0.0034565217391304354 +2024-07-27 15:50:39,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=34113.333333333336, ans=0.0 +2024-07-27 15:50:45,835 INFO [train.py:1114] (2/4) Epoch 3, batch 5150, loss[loss=0.273, simple_loss=0.3416, pruned_loss=0.1021, over 4836.00 frames. ], tot_loss[loss=0.2901, simple_loss=0.3563, pruned_loss=0.112, over 936622.75 frames. ], batch size: 16, lr: 2.22e-02, grad_scale: 64.0 +2024-07-27 15:50:51,834 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.84 vs. limit=15.0 +2024-07-27 15:50:56,023 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.400e+01 6.638e+01 7.768e+01 8.989e+01 1.373e+02, threshold=1.554e+02, percent-clipped=0.0 +2024-07-27 15:51:01,493 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.85 vs. limit=15.0 +2024-07-27 15:51:05,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=34166.666666666664, ans=0.125 +2024-07-27 15:51:06,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=34166.666666666664, ans=0.0034420289855072476 +2024-07-27 15:51:10,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34166.666666666664, ans=0.1 +2024-07-27 15:51:13,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=34180.0, ans=0.2 +2024-07-27 15:51:22,079 INFO [train.py:1114] (2/4) Epoch 3, batch 5200, loss[loss=0.2746, simple_loss=0.3503, pruned_loss=0.09947, over 4661.00 frames. ], tot_loss[loss=0.2892, simple_loss=0.3561, pruned_loss=0.1111, over 936525.22 frames. ], batch size: 14, lr: 2.22e-02, grad_scale: 64.0 +2024-07-27 15:51:27,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=34193.333333333336, ans=0.07 +2024-07-27 15:51:40,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=34220.0, ans=0.2 +2024-07-27 15:51:42,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=34220.0, ans=0.125 +2024-07-27 15:51:45,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34220.0, ans=0.1 +2024-07-27 15:51:51,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=34233.333333333336, ans=0.1 +2024-07-27 15:51:58,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=34246.666666666664, ans=0.2 +2024-07-27 15:52:05,530 INFO [train.py:1114] (2/4) Epoch 3, batch 5250, loss[loss=0.2405, simple_loss=0.3182, pruned_loss=0.08141, over 4902.00 frames. ], tot_loss[loss=0.2877, simple_loss=0.3547, pruned_loss=0.1103, over 935737.18 frames. ], batch size: 13, lr: 2.22e-02, grad_scale: 64.0 +2024-07-27 15:52:06,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=34260.0, ans=0.1 +2024-07-27 15:52:12,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=34273.333333333336, ans=0.05 +2024-07-27 15:52:18,630 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.312e+01 6.590e+01 7.442e+01 8.415e+01 1.347e+02, threshold=1.488e+02, percent-clipped=0.0 +2024-07-27 15:52:36,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=34300.0, ans=0.125 +2024-07-27 15:52:38,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=34313.333333333336, ans=0.125 +2024-07-27 15:52:45,537 INFO [train.py:1114] (2/4) Epoch 3, batch 5300, loss[loss=0.2953, simple_loss=0.3583, pruned_loss=0.1161, over 4642.00 frames. ], tot_loss[loss=0.2872, simple_loss=0.3543, pruned_loss=0.1101, over 934107.16 frames. ], batch size: 16, lr: 2.22e-02, grad_scale: 32.0 +2024-07-27 15:52:50,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=34326.666666666664, ans=0.125 +2024-07-27 15:52:52,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=34340.0, ans=0.125 +2024-07-27 15:52:58,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=34353.333333333336, ans=0.04949747468305833 +2024-07-27 15:53:13,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=34366.666666666664, ans=0.125 +2024-07-27 15:53:21,081 INFO [train.py:1114] (2/4) Epoch 3, batch 5350, loss[loss=0.2441, simple_loss=0.3101, pruned_loss=0.08907, over 4506.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3531, pruned_loss=0.1091, over 936145.85 frames. ], batch size: 10, lr: 2.22e-02, grad_scale: 32.0 +2024-07-27 15:53:27,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=34393.333333333336, ans=0.0 +2024-07-27 15:53:32,210 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.392e+01 6.457e+01 7.092e+01 8.534e+01 1.457e+02, threshold=1.418e+02, percent-clipped=0.0 +2024-07-27 15:53:39,023 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.25 vs. limit=6.0 +2024-07-27 15:53:44,007 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.97 vs. limit=6.0 +2024-07-27 15:53:53,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=34446.666666666664, ans=0.0 +2024-07-27 15:53:56,312 INFO [train.py:1114] (2/4) Epoch 3, batch 5400, loss[loss=0.3368, simple_loss=0.3889, pruned_loss=0.1424, over 4329.00 frames. ], tot_loss[loss=0.2872, simple_loss=0.3539, pruned_loss=0.1102, over 930301.25 frames. ], batch size: 25, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:54:22,254 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.65 vs. limit=22.5 +2024-07-27 15:54:30,037 INFO [train.py:1114] (2/4) Epoch 3, batch 5450, loss[loss=0.2773, simple_loss=0.3303, pruned_loss=0.1122, over 4717.00 frames. ], tot_loss[loss=0.2858, simple_loss=0.3533, pruned_loss=0.1092, over 933205.44 frames. ], batch size: 11, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:54:38,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=34540.0, ans=0.125 +2024-07-27 15:54:40,908 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.485e+01 6.790e+01 7.649e+01 9.479e+01 1.674e+02, threshold=1.530e+02, percent-clipped=4.0 +2024-07-27 15:55:00,470 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.16 vs. limit=15.0 +2024-07-27 15:55:07,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=34580.0, ans=0.125 +2024-07-27 15:55:09,458 INFO [train.py:1114] (2/4) Epoch 3, batch 5500, loss[loss=0.2674, simple_loss=0.3326, pruned_loss=0.1011, over 4174.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3529, pruned_loss=0.1093, over 930481.08 frames. ], batch size: 25, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:55:13,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=34593.333333333336, ans=0.125 +2024-07-27 15:55:16,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34606.666666666664, ans=0.1 +2024-07-27 15:55:19,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=34606.666666666664, ans=0.0 +2024-07-27 15:55:19,347 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.51 vs. limit=22.5 +2024-07-27 15:55:23,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=34620.0, ans=10.0 +2024-07-27 15:55:42,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=34646.666666666664, ans=0.125 +2024-07-27 15:55:45,404 INFO [train.py:1114] (2/4) Epoch 3, batch 5550, loss[loss=0.263, simple_loss=0.3375, pruned_loss=0.0942, over 4717.00 frames. ], tot_loss[loss=0.2859, simple_loss=0.3531, pruned_loss=0.1094, over 932515.10 frames. ], batch size: 12, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:56:05,368 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.078e+01 6.918e+01 7.816e+01 8.981e+01 2.239e+02, threshold=1.563e+02, percent-clipped=1.0 +2024-07-27 15:56:18,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=34700.0, ans=0.125 +2024-07-27 15:56:22,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=34713.333333333336, ans=0.125 +2024-07-27 15:56:28,880 INFO [train.py:1114] (2/4) Epoch 3, batch 5600, loss[loss=0.2603, simple_loss=0.3336, pruned_loss=0.09351, over 4750.00 frames. ], tot_loss[loss=0.2862, simple_loss=0.3534, pruned_loss=0.1095, over 933691.12 frames. ], batch size: 14, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:56:30,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=34726.666666666664, ans=0.125 +2024-07-27 15:56:31,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=34726.666666666664, ans=0.125 +2024-07-27 15:56:33,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=34726.666666666664, ans=0.025 +2024-07-27 15:56:44,483 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.000e+01 +2024-07-27 15:56:58,823 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.01 vs. limit=12.0 +2024-07-27 15:57:00,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=34780.0, ans=0.1 +2024-07-27 15:57:04,494 INFO [train.py:1114] (2/4) Epoch 3, batch 5650, loss[loss=0.3274, simple_loss=0.394, pruned_loss=0.1304, over 4486.00 frames. ], tot_loss[loss=0.2845, simple_loss=0.3521, pruned_loss=0.1084, over 936434.13 frames. ], batch size: 21, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:57:04,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=34793.333333333336, ans=0.2 +2024-07-27 15:57:19,462 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.551e+01 6.421e+01 6.946e+01 8.141e+01 1.354e+02, threshold=1.389e+02, percent-clipped=0.0 +2024-07-27 15:57:21,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=34806.666666666664, ans=0.125 +2024-07-27 15:57:31,247 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=22.05 vs. limit=22.5 +2024-07-27 15:57:31,826 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.41 vs. limit=22.5 +2024-07-27 15:57:36,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=34846.666666666664, ans=0.125 +2024-07-27 15:57:39,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=34846.666666666664, ans=0.1 +2024-07-27 15:57:43,147 INFO [train.py:1114] (2/4) Epoch 3, batch 5700, loss[loss=0.2412, simple_loss=0.3248, pruned_loss=0.07884, over 4697.00 frames. ], tot_loss[loss=0.2852, simple_loss=0.3528, pruned_loss=0.1088, over 937598.99 frames. ], batch size: 13, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:57:46,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=34860.0, ans=0.125 +2024-07-27 15:57:51,265 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.47 vs. limit=15.0 +2024-07-27 15:58:05,831 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.93 vs. limit=15.0 +2024-07-27 15:58:06,585 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.03 vs. limit=22.5 +2024-07-27 15:58:06,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=34900.0, ans=0.0 +2024-07-27 15:58:16,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=34913.333333333336, ans=0.1 +2024-07-27 15:58:18,959 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:58:19,560 INFO [train.py:1114] (2/4) Epoch 3, batch 5750, loss[loss=0.2908, simple_loss=0.3525, pruned_loss=0.1145, over 4725.00 frames. ], tot_loss[loss=0.2868, simple_loss=0.3543, pruned_loss=0.1096, over 938026.91 frames. ], batch size: 19, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:58:26,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=34940.0, ans=0.0 +2024-07-27 15:58:29,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=34940.0, ans=0.125 +2024-07-27 15:58:30,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=34940.0, ans=10.0 +2024-07-27 15:58:30,754 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.375e+01 6.773e+01 7.385e+01 8.434e+01 1.352e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 15:58:38,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=34953.333333333336, ans=0.0032710144927536227 +2024-07-27 15:58:56,480 INFO [train.py:1114] (2/4) Epoch 3, batch 5800, loss[loss=0.3312, simple_loss=0.3883, pruned_loss=0.137, over 4656.00 frames. ], tot_loss[loss=0.2874, simple_loss=0.3547, pruned_loss=0.11, over 937136.78 frames. ], batch size: 19, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:59:01,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=34993.333333333336, ans=0.125 +2024-07-27 15:59:08,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35006.666666666664, ans=0.1 +2024-07-27 15:59:10,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=35020.0, ans=0.125 +2024-07-27 15:59:17,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35033.333333333336, ans=0.1 +2024-07-27 15:59:17,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=35033.333333333336, ans=0.125 +2024-07-27 15:59:17,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=35033.333333333336, ans=0.2 +2024-07-27 15:59:27,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=35046.666666666664, ans=0.0 +2024-07-27 15:59:30,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35060.0, ans=0.1 +2024-07-27 15:59:30,563 INFO [train.py:1114] (2/4) Epoch 3, batch 5850, loss[loss=0.2784, simple_loss=0.3411, pruned_loss=0.1078, over 4570.00 frames. ], tot_loss[loss=0.2862, simple_loss=0.3531, pruned_loss=0.1096, over 937471.94 frames. ], batch size: 21, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:59:42,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=35073.333333333336, ans=0.125 +2024-07-27 15:59:45,118 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.147e+01 6.773e+01 7.644e+01 9.466e+01 1.883e+02, threshold=1.529e+02, percent-clipped=1.0 +2024-07-27 15:59:46,771 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.81 vs. limit=10.0 +2024-07-27 15:59:47,597 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.08 vs. limit=10.0 +2024-07-27 15:59:49,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=35086.666666666664, ans=0.0 +2024-07-27 15:59:58,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=35100.0, ans=0.125 +2024-07-27 16:00:09,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=35113.333333333336, ans=0.025 +2024-07-27 16:00:10,192 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=21.94 vs. limit=15.0 +2024-07-27 16:00:12,752 INFO [train.py:1114] (2/4) Epoch 3, batch 5900, loss[loss=0.3057, simple_loss=0.3718, pruned_loss=0.1198, over 4679.00 frames. ], tot_loss[loss=0.2867, simple_loss=0.3529, pruned_loss=0.1102, over 937472.87 frames. ], batch size: 15, lr: 2.19e-02, grad_scale: 16.0 +2024-07-27 16:00:27,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35153.333333333336, ans=0.1 +2024-07-27 16:00:39,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=35166.666666666664, ans=0.5 +2024-07-27 16:00:42,058 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.16 vs. limit=15.0 +2024-07-27 16:00:50,005 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.99 vs. limit=10.0 +2024-07-27 16:00:52,881 INFO [train.py:1114] (2/4) Epoch 3, batch 5950, loss[loss=0.2892, simple_loss=0.3579, pruned_loss=0.1102, over 4681.00 frames. ], tot_loss[loss=0.2872, simple_loss=0.3542, pruned_loss=0.1101, over 939327.45 frames. ], batch size: 15, lr: 2.19e-02, grad_scale: 16.0 +2024-07-27 16:00:54,811 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.70 vs. limit=15.0 +2024-07-27 16:01:06,210 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.498e+01 6.928e+01 7.896e+01 9.145e+01 1.429e+02, threshold=1.579e+02, percent-clipped=0.0 +2024-07-27 16:01:09,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=35220.0, ans=0.95 +2024-07-27 16:01:13,165 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:01:13,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=35220.0, ans=0.0 +2024-07-27 16:01:16,849 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.73 vs. limit=15.0 +2024-07-27 16:01:19,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=35233.333333333336, ans=0.125 +2024-07-27 16:01:26,704 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.65 vs. limit=15.0 +2024-07-27 16:01:27,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=35246.666666666664, ans=0.0 +2024-07-27 16:01:29,016 INFO [train.py:1114] (2/4) Epoch 3, batch 6000, loss[loss=0.3122, simple_loss=0.3654, pruned_loss=0.1295, over 4177.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3525, pruned_loss=0.109, over 936658.60 frames. ], batch size: 25, lr: 2.19e-02, grad_scale: 32.0 +2024-07-27 16:01:29,017 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 16:01:40,727 INFO [train.py:1146] (2/4) Epoch 3, validation: loss=0.2286, simple_loss=0.328, pruned_loss=0.06459, over 944034.00 frames. +2024-07-27 16:01:40,728 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 16:01:56,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=35286.666666666664, ans=0.035 +2024-07-27 16:02:05,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=35300.0, ans=0.125 +2024-07-27 16:02:09,419 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.64 vs. limit=12.0 +2024-07-27 16:02:10,266 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.78 vs. limit=12.0 +2024-07-27 16:02:13,859 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.44 vs. limit=12.0 +2024-07-27 16:02:16,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=35326.666666666664, ans=0.125 +2024-07-27 16:02:17,548 INFO [train.py:1114] (2/4) Epoch 3, batch 6050, loss[loss=0.2549, simple_loss=0.3197, pruned_loss=0.09501, over 4784.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3514, pruned_loss=0.1082, over 938232.22 frames. ], batch size: 12, lr: 2.19e-02, grad_scale: 32.0 +2024-07-27 16:02:28,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=35340.0, ans=0.2 +2024-07-27 16:02:29,022 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.303e+01 6.741e+01 7.557e+01 8.762e+01 1.550e+02, threshold=1.511e+02, percent-clipped=0.0 +2024-07-27 16:02:29,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=35340.0, ans=0.0 +2024-07-27 16:02:30,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=35353.333333333336, ans=0.125 +2024-07-27 16:02:30,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35353.333333333336, ans=0.1 +2024-07-27 16:02:34,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=35353.333333333336, ans=10.0 +2024-07-27 16:02:37,807 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.83 vs. limit=15.0 +2024-07-27 16:02:53,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=35380.0, ans=0.125 +2024-07-27 16:02:56,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=35380.0, ans=0.125 +2024-07-27 16:02:59,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=35393.333333333336, ans=0.125 +2024-07-27 16:02:59,368 INFO [train.py:1114] (2/4) Epoch 3, batch 6100, loss[loss=0.2414, simple_loss=0.3343, pruned_loss=0.07424, over 4694.00 frames. ], tot_loss[loss=0.2832, simple_loss=0.3505, pruned_loss=0.108, over 937585.86 frames. ], batch size: 15, lr: 2.19e-02, grad_scale: 32.0 +2024-07-27 16:03:02,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=35393.333333333336, ans=0.0 +2024-07-27 16:03:11,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=35406.666666666664, ans=0.0 +2024-07-27 16:03:13,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=35420.0, ans=0.0 +2024-07-27 16:03:21,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=35433.333333333336, ans=15.0 +2024-07-27 16:03:21,484 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.08 vs. limit=15.0 +2024-07-27 16:03:33,313 INFO [train.py:1114] (2/4) Epoch 3, batch 6150, loss[loss=0.3349, simple_loss=0.3707, pruned_loss=0.1495, over 3764.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3519, pruned_loss=0.1086, over 937290.03 frames. ], batch size: 37, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:03:34,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=35460.0, ans=0.125 +2024-07-27 16:03:37,899 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=19.96 vs. limit=15.0 +2024-07-27 16:03:42,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.05 vs. limit=15.0 +2024-07-27 16:03:46,863 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.320e+01 6.689e+01 7.561e+01 9.895e+01 1.847e+02, threshold=1.512e+02, percent-clipped=5.0 +2024-07-27 16:03:48,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=35486.666666666664, ans=0.003155072463768116 +2024-07-27 16:03:49,492 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.75 vs. limit=10.0 +2024-07-27 16:03:49,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=35486.666666666664, ans=0.125 +2024-07-27 16:03:51,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=35486.666666666664, ans=0.04949747468305833 +2024-07-27 16:04:08,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35526.666666666664, ans=0.1 +2024-07-27 16:04:09,293 INFO [train.py:1114] (2/4) Epoch 3, batch 6200, loss[loss=0.2518, simple_loss=0.3356, pruned_loss=0.08405, over 4734.00 frames. ], tot_loss[loss=0.2844, simple_loss=0.3519, pruned_loss=0.1085, over 936887.06 frames. ], batch size: 14, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:04:11,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=35526.666666666664, ans=0.125 +2024-07-27 16:04:18,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=35540.0, ans=0.0 +2024-07-27 16:04:22,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35553.333333333336, ans=0.1 +2024-07-27 16:04:32,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=35566.666666666664, ans=0.1 +2024-07-27 16:04:39,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=35580.0, ans=0.125 +2024-07-27 16:04:42,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=35580.0, ans=0.0 +2024-07-27 16:04:43,465 INFO [train.py:1114] (2/4) Epoch 3, batch 6250, loss[loss=0.2707, simple_loss=0.3522, pruned_loss=0.09457, over 4819.00 frames. ], tot_loss[loss=0.285, simple_loss=0.3522, pruned_loss=0.1089, over 933256.17 frames. ], batch size: 14, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:04:53,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=35606.666666666664, ans=0.125 +2024-07-27 16:04:54,893 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.517e+01 6.327e+01 7.433e+01 8.878e+01 1.317e+02, threshold=1.487e+02, percent-clipped=0.0 +2024-07-27 16:04:57,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=35606.666666666664, ans=0.125 +2024-07-27 16:05:08,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.whiten.whitening_limit, batch_count=35633.333333333336, ans=12.0 +2024-07-27 16:05:09,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=35633.333333333336, ans=0.125 +2024-07-27 16:05:10,122 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=14.52 vs. limit=15.0 +2024-07-27 16:05:13,835 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.85 vs. limit=22.5 +2024-07-27 16:05:27,310 INFO [train.py:1114] (2/4) Epoch 3, batch 6300, loss[loss=0.2294, simple_loss=0.3021, pruned_loss=0.07834, over 4552.00 frames. ], tot_loss[loss=0.2849, simple_loss=0.3522, pruned_loss=0.1088, over 930158.03 frames. ], batch size: 10, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:05:28,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=35660.0, ans=0.125 +2024-07-27 16:05:28,909 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:05:34,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=35673.333333333336, ans=0.125 +2024-07-27 16:05:43,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=35686.666666666664, ans=0.025 +2024-07-27 16:05:46,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=26.06 vs. limit=22.5 +2024-07-27 16:05:51,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=35700.0, ans=0.125 +2024-07-27 16:05:53,984 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=12.35 vs. limit=15.0 +2024-07-27 16:05:58,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=35713.333333333336, ans=0.07 +2024-07-27 16:05:59,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=35713.333333333336, ans=0.125 +2024-07-27 16:06:00,853 INFO [train.py:1114] (2/4) Epoch 3, batch 6350, loss[loss=0.3071, simple_loss=0.3565, pruned_loss=0.1288, over 4538.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3513, pruned_loss=0.1079, over 934539.58 frames. ], batch size: 21, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:06:12,644 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.099e+01 7.124e+01 7.949e+01 9.215e+01 1.375e+02, threshold=1.590e+02, percent-clipped=0.0 +2024-07-27 16:06:14,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=35753.333333333336, ans=0.1 +2024-07-27 16:06:24,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=35766.666666666664, ans=0.5 +2024-07-27 16:06:26,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=35766.666666666664, ans=0.2 +2024-07-27 16:06:36,562 INFO [train.py:1114] (2/4) Epoch 3, batch 6400, loss[loss=0.3115, simple_loss=0.3645, pruned_loss=0.1293, over 4641.00 frames. ], tot_loss[loss=0.284, simple_loss=0.3514, pruned_loss=0.1083, over 936130.47 frames. ], batch size: 13, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:06:38,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35793.333333333336, ans=0.1 +2024-07-27 16:06:42,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=35806.666666666664, ans=0.125 +2024-07-27 16:06:45,919 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.04 vs. limit=15.0 +2024-07-27 16:06:52,133 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:07:07,243 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.68 vs. limit=15.0 +2024-07-27 16:07:12,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=35846.666666666664, ans=0.0 +2024-07-27 16:07:15,632 INFO [train.py:1114] (2/4) Epoch 3, batch 6450, loss[loss=0.2896, simple_loss=0.355, pruned_loss=0.1121, over 4448.00 frames. ], tot_loss[loss=0.2822, simple_loss=0.3501, pruned_loss=0.1071, over 939543.31 frames. ], batch size: 21, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:07:25,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=35873.333333333336, ans=0.09899494936611666 +2024-07-27 16:07:25,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.06 vs. limit=10.0 +2024-07-27 16:07:32,075 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.336e+01 7.043e+01 8.051e+01 9.807e+01 1.613e+02, threshold=1.610e+02, percent-clipped=2.0 +2024-07-27 16:07:34,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=35886.666666666664, ans=0.125 +2024-07-27 16:07:43,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35900.0, ans=0.1 +2024-07-27 16:07:44,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=35900.0, ans=0.0030652173913043477 +2024-07-27 16:07:45,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=35900.0, ans=0.2 +2024-07-27 16:07:46,168 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.94 vs. limit=22.5 +2024-07-27 16:07:46,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=35913.333333333336, ans=0.0 +2024-07-27 16:07:47,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=35913.333333333336, ans=0.0030623188405797096 +2024-07-27 16:07:49,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=35913.333333333336, ans=0.125 +2024-07-27 16:07:53,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=35913.333333333336, ans=0.125 +2024-07-27 16:07:55,528 INFO [train.py:1114] (2/4) Epoch 3, batch 6500, loss[loss=0.473, simple_loss=0.479, pruned_loss=0.2335, over 3329.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3497, pruned_loss=0.1069, over 940510.59 frames. ], batch size: 36, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:08:00,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=35926.666666666664, ans=0.05 +2024-07-27 16:08:01,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=35940.0, ans=10.0 +2024-07-27 16:08:08,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=35953.333333333336, ans=0.125 +2024-07-27 16:08:13,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=35953.333333333336, ans=0.125 +2024-07-27 16:08:16,751 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.19 vs. limit=15.0 +2024-07-27 16:08:17,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=35966.666666666664, ans=0.125 +2024-07-27 16:08:19,934 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.61 vs. limit=22.5 +2024-07-27 16:08:22,506 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.16 vs. limit=15.0 +2024-07-27 16:08:28,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=35993.333333333336, ans=0.125 +2024-07-27 16:08:29,033 INFO [train.py:1114] (2/4) Epoch 3, batch 6550, loss[loss=0.2371, simple_loss=0.312, pruned_loss=0.08106, over 4809.00 frames. ], tot_loss[loss=0.28, simple_loss=0.3488, pruned_loss=0.1056, over 943283.57 frames. ], batch size: 11, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:08:38,627 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.12 vs. limit=15.0 +2024-07-27 16:08:41,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=36006.666666666664, ans=0.125 +2024-07-27 16:08:44,821 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.305e+01 6.734e+01 7.453e+01 8.745e+01 1.645e+02, threshold=1.491e+02, percent-clipped=1.0 +2024-07-27 16:08:46,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=36020.0, ans=0.0 +2024-07-27 16:08:46,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=36020.0, ans=0.125 +2024-07-27 16:08:48,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=36020.0, ans=0.125 +2024-07-27 16:08:50,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=36020.0, ans=0.125 +2024-07-27 16:08:52,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36020.0, ans=0.1 +2024-07-27 16:08:52,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=36020.0, ans=0.125 +2024-07-27 16:09:02,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=36046.666666666664, ans=0.125 +2024-07-27 16:09:07,364 INFO [train.py:1114] (2/4) Epoch 3, batch 6600, loss[loss=0.2854, simple_loss=0.3612, pruned_loss=0.1048, over 4932.00 frames. ], tot_loss[loss=0.2804, simple_loss=0.3493, pruned_loss=0.1058, over 945150.83 frames. ], batch size: 14, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:09:12,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=36060.0, ans=0.0 +2024-07-27 16:09:29,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=36100.0, ans=0.0030217391304347826 +2024-07-27 16:09:39,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=36113.333333333336, ans=0.09899494936611666 +2024-07-27 16:09:42,609 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.61 vs. limit=22.5 +2024-07-27 16:09:43,503 INFO [train.py:1114] (2/4) Epoch 3, batch 6650, loss[loss=0.3116, simple_loss=0.3684, pruned_loss=0.1274, over 4606.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3493, pruned_loss=0.1062, over 943727.91 frames. ], batch size: 17, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:09:44,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=36126.666666666664, ans=0.125 +2024-07-27 16:09:47,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=36126.666666666664, ans=0.1 +2024-07-27 16:09:54,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=36140.0, ans=0.125 +2024-07-27 16:09:54,999 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.577e+01 6.831e+01 8.168e+01 1.025e+02 1.593e+02, threshold=1.634e+02, percent-clipped=2.0 +2024-07-27 16:10:00,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=36140.0, ans=0.125 +2024-07-27 16:10:04,341 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.88 vs. limit=15.0 +2024-07-27 16:10:11,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=36166.666666666664, ans=0.125 +2024-07-27 16:10:13,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=36166.666666666664, ans=0.2 +2024-07-27 16:10:21,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=36180.0, ans=0.0 +2024-07-27 16:10:24,376 INFO [train.py:1114] (2/4) Epoch 3, batch 6700, loss[loss=0.2419, simple_loss=0.3226, pruned_loss=0.08063, over 4759.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3507, pruned_loss=0.1071, over 942810.46 frames. ], batch size: 19, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:10:25,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=36193.333333333336, ans=0.125 +2024-07-27 16:10:30,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=36206.666666666664, ans=0.125 +2024-07-27 16:10:35,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=36206.666666666664, ans=0.125 +2024-07-27 16:10:39,162 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=22.03 vs. limit=22.5 +2024-07-27 16:10:40,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=36220.0, ans=0.125 +2024-07-27 16:10:48,005 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.31 vs. limit=15.0 +2024-07-27 16:10:52,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=36246.666666666664, ans=0.025 +2024-07-27 16:10:59,152 INFO [train.py:1114] (2/4) Epoch 3, batch 6750, loss[loss=0.2913, simple_loss=0.3624, pruned_loss=0.1101, over 4192.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.3509, pruned_loss=0.1073, over 940578.99 frames. ], batch size: 25, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:11:09,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=36273.333333333336, ans=0.09899494936611666 +2024-07-27 16:11:10,904 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.207e+01 6.852e+01 7.798e+01 8.780e+01 1.253e+02, threshold=1.560e+02, percent-clipped=0.0 +2024-07-27 16:11:11,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=36273.333333333336, ans=0.125 +2024-07-27 16:11:19,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=36300.0, ans=0.125 +2024-07-27 16:11:27,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=36313.333333333336, ans=0.0 +2024-07-27 16:11:33,088 INFO [train.py:1114] (2/4) Epoch 3, batch 6800, loss[loss=0.2955, simple_loss=0.3426, pruned_loss=0.1242, over 4637.00 frames. ], tot_loss[loss=0.2831, simple_loss=0.3514, pruned_loss=0.1074, over 939010.92 frames. ], batch size: 13, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:11:36,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=36326.666666666664, ans=0.0 +2024-07-27 16:11:41,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=36340.0, ans=0.0 +2024-07-27 16:11:48,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=36353.333333333336, ans=0.2 +2024-07-27 16:11:52,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=36353.333333333336, ans=0.2 +2024-07-27 16:11:54,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=36366.666666666664, ans=0.1 +2024-07-27 16:11:55,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=36366.666666666664, ans=0.125 +2024-07-27 16:11:59,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.75 vs. limit=15.0 +2024-07-27 16:12:08,429 INFO [train.py:1114] (2/4) Epoch 3, batch 6850, loss[loss=0.2992, simple_loss=0.3801, pruned_loss=0.1091, over 4688.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3525, pruned_loss=0.1084, over 940549.81 frames. ], batch size: 13, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:12:09,560 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.11 vs. limit=6.0 +2024-07-27 16:12:16,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=36406.666666666664, ans=0.125 +2024-07-27 16:12:19,974 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.402e+01 6.863e+01 7.550e+01 8.711e+01 1.509e+02, threshold=1.510e+02, percent-clipped=0.0 +2024-07-27 16:12:24,543 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:12:28,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36420.0, ans=0.1 +2024-07-27 16:12:31,631 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.58 vs. limit=15.0 +2024-07-27 16:12:42,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=36446.666666666664, ans=0.125 +2024-07-27 16:12:42,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=36446.666666666664, ans=0.125 +2024-07-27 16:12:44,068 INFO [train.py:1114] (2/4) Epoch 3, batch 6900, loss[loss=0.2738, simple_loss=0.3421, pruned_loss=0.1027, over 4954.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.352, pruned_loss=0.108, over 942715.24 frames. ], batch size: 13, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:12:46,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=36460.0, ans=0.002943478260869565 +2024-07-27 16:12:46,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=36460.0, ans=0.125 +2024-07-27 16:12:52,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=36473.333333333336, ans=0.2 +2024-07-27 16:13:14,500 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.14 vs. limit=15.0 +2024-07-27 16:13:18,526 INFO [train.py:1114] (2/4) Epoch 3, batch 6950, loss[loss=0.2206, simple_loss=0.2813, pruned_loss=0.07998, over 4522.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.351, pruned_loss=0.1079, over 940407.37 frames. ], batch size: 10, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:13:22,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.34 vs. limit=15.0 +2024-07-27 16:13:27,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=36540.0, ans=0.002926086956521738 +2024-07-27 16:13:30,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=36540.0, ans=0.0 +2024-07-27 16:13:32,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=36540.0, ans=0.002926086956521738 +2024-07-27 16:13:32,728 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.311e+01 6.730e+01 8.056e+01 9.531e+01 1.380e+02, threshold=1.611e+02, percent-clipped=0.0 +2024-07-27 16:13:41,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=36566.666666666664, ans=0.0 +2024-07-27 16:13:42,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=36566.666666666664, ans=0.125 +2024-07-27 16:13:54,235 INFO [train.py:1114] (2/4) Epoch 3, batch 7000, loss[loss=0.3153, simple_loss=0.3861, pruned_loss=0.1223, over 4603.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3494, pruned_loss=0.1063, over 939117.46 frames. ], batch size: 17, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:13:56,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=36593.333333333336, ans=0.1 +2024-07-27 16:14:19,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=36633.333333333336, ans=0.0 +2024-07-27 16:14:21,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=36646.666666666664, ans=0.035 +2024-07-27 16:14:28,483 INFO [train.py:1114] (2/4) Epoch 3, batch 7050, loss[loss=0.3068, simple_loss=0.3696, pruned_loss=0.122, over 4684.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3493, pruned_loss=0.1059, over 942328.87 frames. ], batch size: 19, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:14:41,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.19 vs. limit=15.0 +2024-07-27 16:14:47,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=36673.333333333336, ans=0.07 +2024-07-27 16:14:50,687 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.466e+01 6.926e+01 7.603e+01 8.954e+01 1.226e+02, threshold=1.521e+02, percent-clipped=0.0 +2024-07-27 16:14:58,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=36686.666666666664, ans=0.025 +2024-07-27 16:15:10,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=36713.333333333336, ans=0.05 +2024-07-27 16:15:13,291 INFO [train.py:1114] (2/4) Epoch 3, batch 7100, loss[loss=0.2688, simple_loss=0.3329, pruned_loss=0.1024, over 4815.00 frames. ], tot_loss[loss=0.2831, simple_loss=0.3513, pruned_loss=0.1074, over 936799.34 frames. ], batch size: 15, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:15:32,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=36766.666666666664, ans=0.2 +2024-07-27 16:15:51,824 INFO [train.py:1114] (2/4) Epoch 3, batch 7150, loss[loss=0.277, simple_loss=0.3505, pruned_loss=0.1018, over 4489.00 frames. ], tot_loss[loss=0.2811, simple_loss=0.3494, pruned_loss=0.1064, over 937862.66 frames. ], batch size: 21, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:15:52,257 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.37 vs. limit=22.5 +2024-07-27 16:16:07,965 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.541e+01 6.708e+01 7.597e+01 9.458e+01 1.380e+02, threshold=1.519e+02, percent-clipped=0.0 +2024-07-27 16:16:10,145 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:16:15,121 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.58 vs. limit=22.5 +2024-07-27 16:16:16,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=36833.333333333336, ans=0.125 +2024-07-27 16:16:29,439 INFO [train.py:1114] (2/4) Epoch 3, batch 7200, loss[loss=0.2886, simple_loss=0.3557, pruned_loss=0.1107, over 4799.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.3505, pruned_loss=0.1072, over 938042.21 frames. ], batch size: 15, lr: 2.15e-02, grad_scale: 32.0 +2024-07-27 16:16:31,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=36860.0, ans=0.125 +2024-07-27 16:16:34,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=36860.0, ans=0.125 +2024-07-27 16:16:46,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=36886.666666666664, ans=0.1 +2024-07-27 16:16:47,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=36886.666666666664, ans=0.2 +2024-07-27 16:17:11,409 INFO [train.py:1114] (2/4) Epoch 3, batch 7250, loss[loss=0.2631, simple_loss=0.3244, pruned_loss=0.1009, over 4838.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3496, pruned_loss=0.1068, over 939888.92 frames. ], batch size: 12, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:17:13,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=36926.666666666664, ans=0.07 +2024-07-27 16:17:13,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=36926.666666666664, ans=0.2 +2024-07-27 16:17:16,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=36926.666666666664, ans=0.0 +2024-07-27 16:17:19,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=36940.0, ans=0.125 +2024-07-27 16:17:21,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=36940.0, ans=0.0 +2024-07-27 16:17:23,000 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.450e+01 6.548e+01 7.607e+01 9.272e+01 1.593e+02, threshold=1.521e+02, percent-clipped=2.0 +2024-07-27 16:17:34,393 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:17:35,745 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.81 vs. limit=12.0 +2024-07-27 16:17:44,567 INFO [train.py:1114] (2/4) Epoch 3, batch 7300, loss[loss=0.2724, simple_loss=0.3405, pruned_loss=0.1021, over 4862.00 frames. ], tot_loss[loss=0.2821, simple_loss=0.3502, pruned_loss=0.107, over 939822.32 frames. ], batch size: 12, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:17:55,490 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.68 vs. limit=15.0 +2024-07-27 16:18:00,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=37020.0, ans=0.2 +2024-07-27 16:18:06,891 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.90 vs. limit=15.0 +2024-07-27 16:18:13,864 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.30 vs. limit=22.5 +2024-07-27 16:18:17,359 INFO [train.py:1114] (2/4) Epoch 3, batch 7350, loss[loss=0.2956, simple_loss=0.3572, pruned_loss=0.1171, over 4642.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3498, pruned_loss=0.1068, over 939266.07 frames. ], batch size: 12, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:18:18,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=37060.0, ans=0.125 +2024-07-27 16:18:23,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=37073.333333333336, ans=0.0 +2024-07-27 16:18:29,222 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.542e+01 6.884e+01 7.906e+01 1.038e+02 1.585e+02, threshold=1.581e+02, percent-clipped=4.0 +2024-07-27 16:18:30,105 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:18:36,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=37100.0, ans=0.002804347826086957 +2024-07-27 16:18:42,047 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.51 vs. limit=22.5 +2024-07-27 16:18:43,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=37113.333333333336, ans=0.125 +2024-07-27 16:18:46,843 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:18:49,937 INFO [train.py:1114] (2/4) Epoch 3, batch 7400, loss[loss=0.2355, simple_loss=0.3234, pruned_loss=0.07384, over 4693.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3492, pruned_loss=0.1063, over 940599.04 frames. ], batch size: 13, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:18:54,882 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-07-27 16:18:58,165 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-07-27 16:19:08,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=37153.333333333336, ans=0.0027927536231884056 +2024-07-27 16:19:21,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=37180.0, ans=0.0027869565217391304 +2024-07-27 16:19:22,924 INFO [train.py:1114] (2/4) Epoch 3, batch 7450, loss[loss=0.2519, simple_loss=0.3255, pruned_loss=0.0891, over 4610.00 frames. ], tot_loss[loss=0.28, simple_loss=0.3484, pruned_loss=0.1058, over 937863.69 frames. ], batch size: 11, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:19:34,463 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.095e+01 6.772e+01 7.758e+01 9.808e+01 2.086e+02, threshold=1.552e+02, percent-clipped=2.0 +2024-07-27 16:19:35,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=37220.0, ans=0.125 +2024-07-27 16:19:39,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=37220.0, ans=0.125 +2024-07-27 16:19:39,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=37220.0, ans=0.0 +2024-07-27 16:19:41,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=37233.333333333336, ans=0.125 +2024-07-27 16:19:41,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=37233.333333333336, ans=0.125 +2024-07-27 16:19:43,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=37233.333333333336, ans=0.2 +2024-07-27 16:19:49,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=37246.666666666664, ans=0.125 +2024-07-27 16:19:50,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=37246.666666666664, ans=0.0 +2024-07-27 16:19:52,611 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.01 vs. limit=15.0 +2024-07-27 16:19:55,864 INFO [train.py:1114] (2/4) Epoch 3, batch 7500, loss[loss=0.3309, simple_loss=0.3815, pruned_loss=0.1401, over 3677.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3494, pruned_loss=0.1062, over 936630.71 frames. ], batch size: 35, lr: 2.13e-02, grad_scale: 16.0 +2024-07-27 16:19:58,175 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.95 vs. limit=15.0 +2024-07-27 16:19:59,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=37260.0, ans=0.0 +2024-07-27 16:20:04,470 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-27 16:20:14,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=37286.666666666664, ans=0.125 +2024-07-27 16:20:14,257 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.69 vs. limit=15.0 +2024-07-27 16:20:23,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=37313.333333333336, ans=0.125 +2024-07-27 16:20:29,139 INFO [train.py:1114] (2/4) Epoch 3, batch 7550, loss[loss=0.2929, simple_loss=0.3622, pruned_loss=0.1118, over 4602.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3515, pruned_loss=0.1078, over 936240.35 frames. ], batch size: 17, lr: 2.13e-02, grad_scale: 16.0 +2024-07-27 16:21:33,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=37340.0, ans=0.025 +2024-07-27 16:21:35,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=37340.0, ans=0.0 +2024-07-27 16:21:36,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=37340.0, ans=0.125 +2024-07-27 16:21:39,447 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.630e+01 6.808e+01 7.761e+01 9.046e+01 1.679e+02, threshold=1.552e+02, percent-clipped=1.0 +2024-07-27 16:21:39,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=37353.333333333336, ans=0.2 +2024-07-27 16:21:44,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=37353.333333333336, ans=0.125 +2024-07-27 16:21:46,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=37366.666666666664, ans=0.95 +2024-07-27 16:21:46,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=37366.666666666664, ans=0.125 +2024-07-27 16:21:51,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=37366.666666666664, ans=0.1 +2024-07-27 16:21:54,706 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.38 vs. limit=12.0 +2024-07-27 16:21:56,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=37380.0, ans=0.2 +2024-07-27 16:21:56,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37380.0, ans=0.1 +2024-07-27 16:21:59,552 INFO [train.py:1114] (2/4) Epoch 3, batch 7600, loss[loss=0.287, simple_loss=0.3649, pruned_loss=0.1046, over 4809.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.3506, pruned_loss=0.1073, over 937946.80 frames. ], batch size: 14, lr: 2.13e-02, grad_scale: 32.0 +2024-07-27 16:22:06,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=37406.666666666664, ans=0.00273768115942029 +2024-07-27 16:22:14,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=37420.0, ans=0.0 +2024-07-27 16:22:31,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=37433.333333333336, ans=0.05 +2024-07-27 16:22:32,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=37446.666666666664, ans=0.0027289855072463775 +2024-07-27 16:22:37,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=37446.666666666664, ans=0.125 +2024-07-27 16:22:40,209 INFO [train.py:1114] (2/4) Epoch 3, batch 7650, loss[loss=0.2562, simple_loss=0.329, pruned_loss=0.09173, over 4940.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3497, pruned_loss=0.1068, over 937090.55 frames. ], batch size: 12, lr: 2.13e-02, grad_scale: 32.0 +2024-07-27 16:22:53,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=37460.0, ans=0.125 +2024-07-27 16:23:02,039 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.597e+01 7.183e+01 8.812e+01 1.036e+02 1.540e+02, threshold=1.762e+02, percent-clipped=0.0 +2024-07-27 16:23:12,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=37486.666666666664, ans=0.5 +2024-07-27 16:23:14,740 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.58 vs. limit=22.5 +2024-07-27 16:23:39,139 INFO [train.py:1114] (2/4) Epoch 3, batch 7700, loss[loss=0.2811, simple_loss=0.3485, pruned_loss=0.1068, over 4696.00 frames. ], tot_loss[loss=0.2824, simple_loss=0.3509, pruned_loss=0.107, over 934817.47 frames. ], batch size: 13, lr: 2.13e-02, grad_scale: 32.0 +2024-07-27 16:23:50,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=37526.666666666664, ans=0.2 +2024-07-27 16:23:53,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37540.0, ans=0.1 +2024-07-27 16:23:55,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=37540.0, ans=0.125 +2024-07-27 16:23:57,708 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.04 vs. limit=10.0 +2024-07-27 16:24:05,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=37553.333333333336, ans=0.125 +2024-07-27 16:24:09,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=37566.666666666664, ans=0.125 +2024-07-27 16:24:10,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37566.666666666664, ans=0.1 +2024-07-27 16:24:17,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=37580.0, ans=0.2 +2024-07-27 16:24:22,435 INFO [train.py:1114] (2/4) Epoch 3, batch 7750, loss[loss=0.3167, simple_loss=0.376, pruned_loss=0.1287, over 4940.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.353, pruned_loss=0.1081, over 936265.69 frames. ], batch size: 14, lr: 2.13e-02, grad_scale: 32.0 +2024-07-27 16:24:42,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=37593.333333333336, ans=0.5 +2024-07-27 16:24:49,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=37606.666666666664, ans=0.025 +2024-07-27 16:24:52,945 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.568e+01 6.584e+01 7.302e+01 8.614e+01 1.487e+02, threshold=1.460e+02, percent-clipped=0.0 +2024-07-27 16:24:57,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=37620.0, ans=0.2 +2024-07-27 16:25:00,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=37633.333333333336, ans=0.035 +2024-07-27 16:25:09,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=37633.333333333336, ans=0.125 +2024-07-27 16:25:14,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37646.666666666664, ans=0.1 +2024-07-27 16:25:24,448 INFO [train.py:1114] (2/4) Epoch 3, batch 7800, loss[loss=0.3091, simple_loss=0.3804, pruned_loss=0.1189, over 4665.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.353, pruned_loss=0.1081, over 937683.00 frames. ], batch size: 14, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:25:27,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=37660.0, ans=0.1 +2024-07-27 16:25:29,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.80 vs. limit=15.0 +2024-07-27 16:25:33,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=37673.333333333336, ans=0.2 +2024-07-27 16:25:36,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=37673.333333333336, ans=0.2 +2024-07-27 16:25:51,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37700.0, ans=0.1 +2024-07-27 16:25:55,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=37713.333333333336, ans=0.1 +2024-07-27 16:25:58,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37713.333333333336, ans=0.1 +2024-07-27 16:26:06,471 INFO [train.py:1114] (2/4) Epoch 3, batch 7850, loss[loss=0.2398, simple_loss=0.3091, pruned_loss=0.08521, over 4510.00 frames. ], tot_loss[loss=0.2836, simple_loss=0.3521, pruned_loss=0.1076, over 936341.97 frames. ], batch size: 10, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:26:17,729 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.18 vs. limit=6.0 +2024-07-27 16:26:19,923 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.341e+01 6.634e+01 7.796e+01 9.040e+01 1.354e+02, threshold=1.559e+02, percent-clipped=0.0 +2024-07-27 16:26:37,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=37766.666666666664, ans=0.0 +2024-07-27 16:26:51,026 INFO [train.py:1114] (2/4) Epoch 3, batch 7900, loss[loss=0.2776, simple_loss=0.3618, pruned_loss=0.09667, over 4862.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3534, pruned_loss=0.1086, over 933550.88 frames. ], batch size: 14, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:27:03,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=37793.333333333336, ans=0.0 +2024-07-27 16:27:04,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=37793.333333333336, ans=0.0 +2024-07-27 16:27:08,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=37806.666666666664, ans=0.125 +2024-07-27 16:27:14,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37806.666666666664, ans=0.1 +2024-07-27 16:27:18,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=37820.0, ans=0.2 +2024-07-27 16:27:57,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37846.666666666664, ans=0.1 +2024-07-27 16:27:58,711 INFO [train.py:1114] (2/4) Epoch 3, batch 7950, loss[loss=0.411, simple_loss=0.4493, pruned_loss=0.1863, over 3056.00 frames. ], tot_loss[loss=0.2832, simple_loss=0.3519, pruned_loss=0.1072, over 935396.68 frames. ], batch size: 35, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:28:13,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=37873.333333333336, ans=0.1 +2024-07-27 16:28:14,338 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.890e+01 6.860e+01 7.518e+01 9.206e+01 1.306e+02, threshold=1.504e+02, percent-clipped=0.0 +2024-07-27 16:28:19,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=37886.666666666664, ans=0.125 +2024-07-27 16:28:20,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=37886.666666666664, ans=0.125 +2024-07-27 16:28:51,989 INFO [train.py:1114] (2/4) Epoch 3, batch 8000, loss[loss=0.2865, simple_loss=0.3533, pruned_loss=0.1099, over 4616.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.3504, pruned_loss=0.1073, over 934893.08 frames. ], batch size: 11, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:29:20,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=37940.0, ans=0.125 +2024-07-27 16:29:25,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=37940.0, ans=0.0 +2024-07-27 16:29:38,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=37953.333333333336, ans=0.0 +2024-07-27 16:30:12,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.66 vs. limit=6.0 +2024-07-27 16:30:16,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=37980.0, ans=0.05 +2024-07-27 16:30:21,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=37980.0, ans=0.2 +2024-07-27 16:30:39,456 INFO [train.py:1114] (2/4) Epoch 3, batch 8050, loss[loss=0.2637, simple_loss=0.3481, pruned_loss=0.08959, over 4812.00 frames. ], tot_loss[loss=0.2821, simple_loss=0.3507, pruned_loss=0.1068, over 933985.52 frames. ], batch size: 14, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:31:03,283 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.744e+01 7.005e+01 8.059e+01 9.966e+01 1.848e+02, threshold=1.612e+02, percent-clipped=3.0 +2024-07-27 16:32:02,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=38033.333333333336, ans=0.0 +2024-07-27 16:32:05,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=38033.333333333336, ans=0.2 +2024-07-27 16:32:07,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=38033.333333333336, ans=0.035 +2024-07-27 16:32:07,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=38033.333333333336, ans=0.125 +2024-07-27 16:32:37,602 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.44 vs. limit=22.5 +2024-07-27 16:32:37,859 INFO [train.py:1114] (2/4) Epoch 3, batch 8100, loss[loss=0.276, simple_loss=0.352, pruned_loss=0.1, over 4813.00 frames. ], tot_loss[loss=0.2838, simple_loss=0.3518, pruned_loss=0.1079, over 933562.28 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 32.0 +2024-07-27 16:33:13,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=38073.333333333336, ans=0.125 +2024-07-27 16:33:29,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=38100.0, ans=0.2 +2024-07-27 16:33:31,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=38100.0, ans=0.0 +2024-07-27 16:33:32,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=38100.0, ans=0.1 +2024-07-27 16:33:36,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=38100.0, ans=0.125 +2024-07-27 16:33:44,009 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.80 vs. limit=22.5 +2024-07-27 16:33:47,669 INFO [train.py:1114] (2/4) Epoch 3, batch 8150, loss[loss=0.3163, simple_loss=0.3894, pruned_loss=0.1216, over 4804.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.3504, pruned_loss=0.1073, over 936926.20 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 32.0 +2024-07-27 16:34:10,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=38140.0, ans=0.125 +2024-07-27 16:34:10,405 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.75 vs. limit=15.0 +2024-07-27 16:34:19,635 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.808e+01 6.778e+01 7.869e+01 9.669e+01 1.901e+02, threshold=1.574e+02, percent-clipped=1.0 +2024-07-27 16:34:24,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=38153.333333333336, ans=0.125 +2024-07-27 16:34:47,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=38166.666666666664, ans=0.125 +2024-07-27 16:34:48,161 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.95 vs. limit=22.5 +2024-07-27 16:34:49,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38180.0, ans=0.1 +2024-07-27 16:34:52,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=38180.0, ans=0.2 +2024-07-27 16:34:56,442 INFO [train.py:1114] (2/4) Epoch 3, batch 8200, loss[loss=0.3051, simple_loss=0.3564, pruned_loss=0.1269, over 4804.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.3497, pruned_loss=0.1067, over 937964.21 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 32.0 +2024-07-27 16:35:01,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=38193.333333333336, ans=0.0 +2024-07-27 16:35:11,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=38206.666666666664, ans=0.0 +2024-07-27 16:35:17,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38220.0, ans=0.1 +2024-07-27 16:35:19,590 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.34 vs. limit=22.5 +2024-07-27 16:35:23,319 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=15.0 +2024-07-27 16:35:24,087 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.15 vs. limit=15.0 +2024-07-27 16:35:36,337 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=16.10 vs. limit=15.0 +2024-07-27 16:35:45,649 INFO [train.py:1114] (2/4) Epoch 3, batch 8250, loss[loss=0.2511, simple_loss=0.3296, pruned_loss=0.08628, over 4881.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3493, pruned_loss=0.106, over 938243.06 frames. ], batch size: 13, lr: 2.11e-02, grad_scale: 16.0 +2024-07-27 16:35:46,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=38260.0, ans=0.002552173913043478 +2024-07-27 16:35:47,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=38260.0, ans=0.125 +2024-07-27 16:35:48,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=38260.0, ans=0.125 +2024-07-27 16:36:00,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=38273.333333333336, ans=0.125 +2024-07-27 16:36:02,618 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.06 vs. limit=15.0 +2024-07-27 16:36:04,215 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.365e+01 6.777e+01 7.463e+01 9.374e+01 1.482e+02, threshold=1.493e+02, percent-clipped=0.0 +2024-07-27 16:36:13,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=38300.0, ans=0.125 +2024-07-27 16:36:16,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=38300.0, ans=0.05 +2024-07-27 16:36:20,912 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:36:24,685 INFO [train.py:1114] (2/4) Epoch 3, batch 8300, loss[loss=0.3237, simple_loss=0.4077, pruned_loss=0.1198, over 4892.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3521, pruned_loss=0.1072, over 938278.85 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 16.0 +2024-07-27 16:36:29,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=38326.666666666664, ans=0.1 +2024-07-27 16:36:42,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=38353.333333333336, ans=0.0 +2024-07-27 16:37:01,101 INFO [train.py:1114] (2/4) Epoch 3, batch 8350, loss[loss=0.3239, simple_loss=0.3917, pruned_loss=0.128, over 4812.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3523, pruned_loss=0.1073, over 941191.23 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 16.0 +2024-07-27 16:37:01,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=38393.333333333336, ans=0.2 +2024-07-27 16:37:11,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=38393.333333333336, ans=22.5 +2024-07-27 16:38:23,651 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.458e+01 6.838e+01 7.813e+01 8.986e+01 1.214e+02, threshold=1.563e+02, percent-clipped=0.0 +2024-07-27 16:38:25,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=38420.0, ans=0.2 +2024-07-27 16:38:25,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=38420.0, ans=0.2 +2024-07-27 16:38:43,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=38433.333333333336, ans=0.125 +2024-07-27 16:38:43,272 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.05 vs. limit=22.5 +2024-07-27 16:38:44,695 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.01 vs. limit=15.0 +2024-07-27 16:38:47,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=38433.333333333336, ans=0.125 +2024-07-27 16:38:50,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=38433.333333333336, ans=0.0 +2024-07-27 16:38:52,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=38446.666666666664, ans=0.002511594202898552 +2024-07-27 16:38:56,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=38446.666666666664, ans=0.125 +2024-07-27 16:39:01,313 INFO [train.py:1114] (2/4) Epoch 3, batch 8400, loss[loss=0.2142, simple_loss=0.2904, pruned_loss=0.06895, over 4768.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.3513, pruned_loss=0.107, over 939766.29 frames. ], batch size: 12, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:39:07,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=38460.0, ans=0.125 +2024-07-27 16:39:12,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38473.333333333336, ans=0.1 +2024-07-27 16:39:12,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=38473.333333333336, ans=0.125 +2024-07-27 16:39:13,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=38473.333333333336, ans=0.0 +2024-07-27 16:39:25,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=38500.0, ans=0.07 +2024-07-27 16:39:26,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=38500.0, ans=0.125 +2024-07-27 16:39:29,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=38500.0, ans=0.125 +2024-07-27 16:39:46,493 INFO [train.py:1114] (2/4) Epoch 3, batch 8450, loss[loss=0.3043, simple_loss=0.3613, pruned_loss=0.1237, over 4797.00 frames. ], tot_loss[loss=0.2831, simple_loss=0.3519, pruned_loss=0.1072, over 938946.84 frames. ], batch size: 15, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:39:57,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=38540.0, ans=0.1 +2024-07-27 16:39:58,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=38540.0, ans=0.1 +2024-07-27 16:40:00,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=38540.0, ans=0.0 +2024-07-27 16:40:03,729 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.374e+01 6.960e+01 7.996e+01 9.204e+01 1.346e+02, threshold=1.599e+02, percent-clipped=0.0 +2024-07-27 16:40:22,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=38580.0, ans=0.125 +2024-07-27 16:40:34,575 INFO [train.py:1114] (2/4) Epoch 3, batch 8500, loss[loss=0.2643, simple_loss=0.3213, pruned_loss=0.1036, over 4624.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3504, pruned_loss=0.1061, over 938591.07 frames. ], batch size: 11, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:40:36,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=38593.333333333336, ans=0.2 +2024-07-27 16:40:47,366 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.87 vs. limit=15.0 +2024-07-27 16:40:48,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=38606.666666666664, ans=0.025 +2024-07-27 16:40:48,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=38620.0, ans=0.95 +2024-07-27 16:40:51,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=38620.0, ans=0.1 +2024-07-27 16:41:17,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=38646.666666666664, ans=0.125 +2024-07-27 16:41:19,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=38646.666666666664, ans=0.2 +2024-07-27 16:41:22,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=38660.0, ans=0.002465217391304348 +2024-07-27 16:41:22,943 INFO [train.py:1114] (2/4) Epoch 3, batch 8550, loss[loss=0.25, simple_loss=0.3314, pruned_loss=0.08431, over 4818.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.3491, pruned_loss=0.1054, over 939759.28 frames. ], batch size: 11, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:41:48,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=38673.333333333336, ans=0.2 +2024-07-27 16:41:56,828 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:41:59,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=38686.666666666664, ans=0.125 +2024-07-27 16:41:59,823 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.027e+01 6.868e+01 7.768e+01 9.567e+01 1.448e+02, threshold=1.554e+02, percent-clipped=0.0 +2024-07-27 16:42:01,024 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.35 vs. limit=22.5 +2024-07-27 16:42:37,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=38713.333333333336, ans=0.0 +2024-07-27 16:42:39,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=38713.333333333336, ans=0.125 +2024-07-27 16:42:40,831 INFO [train.py:1114] (2/4) Epoch 3, batch 8600, loss[loss=0.284, simple_loss=0.3548, pruned_loss=0.1066, over 4808.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3481, pruned_loss=0.105, over 939196.30 frames. ], batch size: 15, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:42:44,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=38726.666666666664, ans=0.125 +2024-07-27 16:43:10,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=38780.0, ans=0.025 +2024-07-27 16:43:13,334 INFO [train.py:1114] (2/4) Epoch 3, batch 8650, loss[loss=0.2714, simple_loss=0.339, pruned_loss=0.1019, over 4912.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.3477, pruned_loss=0.1045, over 940326.51 frames. ], batch size: 15, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:43:13,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=38793.333333333336, ans=0.0 +2024-07-27 16:44:00,159 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.000e+01 6.909e+01 7.732e+01 9.254e+01 1.585e+02, threshold=1.546e+02, percent-clipped=1.0 +2024-07-27 16:44:27,637 INFO [train.py:1114] (2/4) Epoch 3, batch 8700, loss[loss=0.2829, simple_loss=0.3617, pruned_loss=0.1021, over 4757.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3493, pruned_loss=0.106, over 937639.85 frames. ], batch size: 13, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:44:31,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38860.0, ans=0.1 +2024-07-27 16:44:35,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=38873.333333333336, ans=0.125 +2024-07-27 16:44:43,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=38886.666666666664, ans=0.125 +2024-07-27 16:45:09,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=38913.333333333336, ans=0.125 +2024-07-27 16:45:11,834 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.87 vs. limit=6.0 +2024-07-27 16:45:13,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=38913.333333333336, ans=0.0 +2024-07-27 16:45:15,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=38926.666666666664, ans=0.2 +2024-07-27 16:45:15,429 INFO [train.py:1114] (2/4) Epoch 3, batch 8750, loss[loss=0.282, simple_loss=0.3568, pruned_loss=0.1036, over 4678.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.35, pruned_loss=0.1065, over 936160.14 frames. ], batch size: 15, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:45:35,872 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.390e+01 6.721e+01 7.628e+01 9.057e+01 1.548e+02, threshold=1.526e+02, percent-clipped=1.0 +2024-07-27 16:45:36,978 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.45 vs. limit=22.5 +2024-07-27 16:45:40,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=38953.333333333336, ans=0.002401449275362319 +2024-07-27 16:45:43,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=38966.666666666664, ans=0.125 +2024-07-27 16:45:48,624 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=15.0 +2024-07-27 16:45:49,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38980.0, ans=0.1 +2024-07-27 16:45:53,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38980.0, ans=0.1 +2024-07-27 16:45:55,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=38980.0, ans=0.05 +2024-07-27 16:45:57,594 INFO [train.py:1114] (2/4) Epoch 3, batch 8800, loss[loss=0.2615, simple_loss=0.3424, pruned_loss=0.09032, over 4931.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3494, pruned_loss=0.106, over 936891.12 frames. ], batch size: 14, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:46:23,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.05 vs. limit=15.0 +2024-07-27 16:46:36,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39006.666666666664, ans=0.1 +2024-07-27 16:46:44,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=39020.0, ans=0.125 +2024-07-27 16:47:18,771 INFO [train.py:1114] (2/4) Epoch 3, batch 8850, loss[loss=0.3531, simple_loss=0.4023, pruned_loss=0.1519, over 4568.00 frames. ], tot_loss[loss=0.2794, simple_loss=0.3478, pruned_loss=0.1055, over 932271.26 frames. ], batch size: 21, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:47:29,291 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.66 vs. limit=15.0 +2024-07-27 16:47:32,062 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.718e+01 6.978e+01 8.333e+01 9.846e+01 2.201e+02, threshold=1.667e+02, percent-clipped=2.0 +2024-07-27 16:47:36,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=39086.666666666664, ans=0.025 +2024-07-27 16:48:06,963 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.45 vs. limit=15.0 +2024-07-27 16:48:07,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=39100.0, ans=0.05 +2024-07-27 16:48:10,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=39113.333333333336, ans=0.125 +2024-07-27 16:48:16,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39113.333333333336, ans=0.1 +2024-07-27 16:48:17,166 INFO [train.py:1114] (2/4) Epoch 3, batch 8900, loss[loss=0.2205, simple_loss=0.2927, pruned_loss=0.0742, over 4937.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.3484, pruned_loss=0.1058, over 930189.17 frames. ], batch size: 12, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:48:23,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=39126.666666666664, ans=0.2 +2024-07-27 16:48:25,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39140.0, ans=0.1 +2024-07-27 16:48:50,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=39166.666666666664, ans=10.0 +2024-07-27 16:48:57,544 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:49:00,625 INFO [train.py:1114] (2/4) Epoch 3, batch 8950, loss[loss=0.3026, simple_loss=0.3698, pruned_loss=0.1177, over 4506.00 frames. ], tot_loss[loss=0.2792, simple_loss=0.3476, pruned_loss=0.1054, over 931235.10 frames. ], batch size: 21, lr: 2.08e-02, grad_scale: 32.0 +2024-07-27 16:49:04,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39193.333333333336, ans=0.1 +2024-07-27 16:49:18,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=39206.666666666664, ans=0.025 +2024-07-27 16:49:23,812 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.972e+01 6.809e+01 7.609e+01 8.972e+01 1.358e+02, threshold=1.522e+02, percent-clipped=0.0 +2024-07-27 16:49:29,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=39233.333333333336, ans=0.0 +2024-07-27 16:49:37,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=39233.333333333336, ans=0.125 +2024-07-27 16:49:39,912 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.68 vs. limit=15.0 +2024-07-27 16:49:51,639 INFO [train.py:1114] (2/4) Epoch 3, batch 9000, loss[loss=0.2435, simple_loss=0.3217, pruned_loss=0.08261, over 4641.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.346, pruned_loss=0.104, over 934060.11 frames. ], batch size: 12, lr: 2.08e-02, grad_scale: 32.0 +2024-07-27 16:49:51,640 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 16:50:01,282 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.0195, 3.4478, 3.5668, 3.3442], device='cuda:2') +2024-07-27 16:50:05,981 INFO [train.py:1146] (2/4) Epoch 3, validation: loss=0.2254, simple_loss=0.3252, pruned_loss=0.06281, over 944034.00 frames. +2024-07-27 16:50:05,982 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 16:50:33,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=39286.666666666664, ans=0.125 +2024-07-27 16:50:35,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39286.666666666664, ans=0.1 +2024-07-27 16:50:42,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=39300.0, ans=0.125 +2024-07-27 16:50:51,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=39313.333333333336, ans=0.0 +2024-07-27 16:50:52,319 INFO [train.py:1114] (2/4) Epoch 3, batch 9050, loss[loss=0.274, simple_loss=0.3266, pruned_loss=0.1107, over 4492.00 frames. ], tot_loss[loss=0.2771, simple_loss=0.3461, pruned_loss=0.1041, over 934268.49 frames. ], batch size: 10, lr: 2.08e-02, grad_scale: 32.0 +2024-07-27 16:51:04,888 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.552e+01 6.807e+01 7.856e+01 8.861e+01 3.440e+02, threshold=1.571e+02, percent-clipped=1.0 +2024-07-27 16:51:49,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=39380.0, ans=0.125 +2024-07-27 16:52:04,351 INFO [train.py:1114] (2/4) Epoch 3, batch 9100, loss[loss=0.3084, simple_loss=0.3736, pruned_loss=0.1216, over 4929.00 frames. ], tot_loss[loss=0.2758, simple_loss=0.3453, pruned_loss=0.1031, over 936717.68 frames. ], batch size: 14, lr: 2.08e-02, grad_scale: 16.0 +2024-07-27 16:52:10,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=39393.333333333336, ans=0.025 +2024-07-27 16:52:19,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=39406.666666666664, ans=0.125 +2024-07-27 16:52:19,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=39406.666666666664, ans=0.002302898550724638 +2024-07-27 16:52:24,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=39406.666666666664, ans=0.07 +2024-07-27 16:52:28,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=39420.0, ans=0.002300000000000001 +2024-07-27 16:52:35,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1.whitening_limit, batch_count=39420.0, ans=10.0 +2024-07-27 16:52:40,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=39433.333333333336, ans=0.125 +2024-07-27 16:52:45,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=39446.666666666664, ans=0.2 +2024-07-27 16:52:57,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39446.666666666664, ans=0.1 +2024-07-27 16:53:06,167 INFO [train.py:1114] (2/4) Epoch 3, batch 9150, loss[loss=0.2885, simple_loss=0.3636, pruned_loss=0.1067, over 4815.00 frames. ], tot_loss[loss=0.2777, simple_loss=0.3474, pruned_loss=0.104, over 935171.46 frames. ], batch size: 14, lr: 2.08e-02, grad_scale: 16.0 +2024-07-27 16:53:19,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=39460.0, ans=0.125 +2024-07-27 16:53:22,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=39460.0, ans=0.125 +2024-07-27 16:53:24,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=39460.0, ans=0.0022913043478260866 +2024-07-27 16:53:30,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=39473.333333333336, ans=0.125 +2024-07-27 16:53:31,742 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.63 vs. limit=15.0 +2024-07-27 16:53:32,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=39473.333333333336, ans=0.025 +2024-07-27 16:53:34,153 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.44 vs. limit=15.0 +2024-07-27 16:53:38,852 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.526e+01 6.919e+01 8.427e+01 9.572e+01 1.552e+02, threshold=1.685e+02, percent-clipped=0.0 +2024-07-27 16:53:54,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=39500.0, ans=0.125 +2024-07-27 16:54:03,878 INFO [train.py:1114] (2/4) Epoch 3, batch 9200, loss[loss=0.2909, simple_loss=0.3578, pruned_loss=0.112, over 4849.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3469, pruned_loss=0.1033, over 937427.94 frames. ], batch size: 12, lr: 2.08e-02, grad_scale: 32.0 +2024-07-27 16:54:17,197 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.10 vs. limit=15.0 +2024-07-27 16:54:31,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=39580.0, ans=0.0 +2024-07-27 16:54:36,154 INFO [train.py:1114] (2/4) Epoch 3, batch 9250, loss[loss=0.3175, simple_loss=0.3749, pruned_loss=0.1301, over 4633.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3468, pruned_loss=0.1034, over 938256.92 frames. ], batch size: 13, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:54:36,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=39593.333333333336, ans=0.00226231884057971 +2024-07-27 16:54:49,934 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.796e+01 6.391e+01 6.941e+01 8.054e+01 1.289e+02, threshold=1.388e+02, percent-clipped=0.0 +2024-07-27 16:54:53,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39620.0, ans=0.1 +2024-07-27 16:54:57,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=39633.333333333336, ans=0.2 +2024-07-27 16:55:02,783 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.28 vs. limit=15.0 +2024-07-27 16:55:08,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=39646.666666666664, ans=0.0 +2024-07-27 16:55:08,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=39646.666666666664, ans=0.0022507246376811604 +2024-07-27 16:55:10,143 INFO [train.py:1114] (2/4) Epoch 3, batch 9300, loss[loss=0.2176, simple_loss=0.295, pruned_loss=0.07012, over 4782.00 frames. ], tot_loss[loss=0.2767, simple_loss=0.3462, pruned_loss=0.1036, over 938020.79 frames. ], batch size: 12, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:55:12,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=39660.0, ans=0.125 +2024-07-27 16:55:33,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=39686.666666666664, ans=0.0 +2024-07-27 16:55:45,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=39700.0, ans=0.125 +2024-07-27 16:56:05,287 INFO [train.py:1114] (2/4) Epoch 3, batch 9350, loss[loss=0.2133, simple_loss=0.2893, pruned_loss=0.06865, over 4811.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3464, pruned_loss=0.1036, over 935050.96 frames. ], batch size: 11, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:56:12,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=39740.0, ans=0.125 +2024-07-27 16:56:20,302 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.575e+01 6.744e+01 7.337e+01 8.957e+01 1.228e+02, threshold=1.467e+02, percent-clipped=0.0 +2024-07-27 16:56:21,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=39753.333333333336, ans=0.0 +2024-07-27 16:56:22,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=39753.333333333336, ans=0.0 +2024-07-27 16:56:36,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=39780.0, ans=0.125 +2024-07-27 16:56:36,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=39780.0, ans=0.0 +2024-07-27 16:56:39,562 INFO [train.py:1114] (2/4) Epoch 3, batch 9400, loss[loss=0.2753, simple_loss=0.3501, pruned_loss=0.1003, over 4683.00 frames. ], tot_loss[loss=0.2785, simple_loss=0.3477, pruned_loss=0.1046, over 932815.40 frames. ], batch size: 13, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:56:39,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=39793.333333333336, ans=0.2 +2024-07-27 16:56:46,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=39806.666666666664, ans=0.125 +2024-07-27 16:56:49,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=39806.666666666664, ans=0.125 +2024-07-27 16:57:04,066 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.45 vs. limit=22.5 +2024-07-27 16:57:12,027 INFO [train.py:1114] (2/4) Epoch 3, batch 9450, loss[loss=0.2786, simple_loss=0.3391, pruned_loss=0.109, over 4803.00 frames. ], tot_loss[loss=0.2801, simple_loss=0.3485, pruned_loss=0.1058, over 932830.89 frames. ], batch size: 11, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:57:16,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=39860.0, ans=0.1 +2024-07-27 16:57:25,847 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.577e+01 6.734e+01 7.503e+01 8.983e+01 1.272e+02, threshold=1.501e+02, percent-clipped=0.0 +2024-07-27 16:57:30,220 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.11 vs. limit=22.5 +2024-07-27 16:57:37,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=39900.0, ans=0.125 +2024-07-27 16:57:45,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=39913.333333333336, ans=0.002192753623188405 +2024-07-27 16:57:47,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=39913.333333333336, ans=0.125 +2024-07-27 16:58:03,401 INFO [train.py:1114] (2/4) Epoch 3, batch 9500, loss[loss=0.2452, simple_loss=0.3189, pruned_loss=0.08574, over 4695.00 frames. ], tot_loss[loss=0.2798, simple_loss=0.3488, pruned_loss=0.1054, over 934712.20 frames. ], batch size: 12, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:58:30,296 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.45 vs. limit=22.5 +2024-07-27 16:58:30,343 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.12 vs. limit=15.0 +2024-07-27 16:58:35,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=39953.333333333336, ans=0.0 +2024-07-27 16:58:40,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=39953.333333333336, ans=0.125 +2024-07-27 16:58:54,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=39966.666666666664, ans=0.0 +2024-07-27 16:59:12,993 INFO [train.py:1114] (2/4) Epoch 3, batch 9550, loss[loss=0.2862, simple_loss=0.3384, pruned_loss=0.117, over 4781.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.3486, pruned_loss=0.1056, over 932753.54 frames. ], batch size: 12, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:59:21,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=40006.666666666664, ans=0.002172463768115942 +2024-07-27 16:59:26,039 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.187e+01 6.649e+01 7.565e+01 8.321e+01 1.560e+02, threshold=1.513e+02, percent-clipped=2.0 +2024-07-27 16:59:37,730 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.55 vs. limit=10.0 +2024-07-27 16:59:40,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40046.666666666664, ans=0.1 +2024-07-27 16:59:44,507 INFO [train.py:1114] (2/4) Epoch 3, batch 9600, loss[loss=0.4165, simple_loss=0.4196, pruned_loss=0.2067, over 3357.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3477, pruned_loss=0.1052, over 931575.46 frames. ], batch size: 35, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 16:59:50,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40073.333333333336, ans=0.1 +2024-07-27 17:00:20,422 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.24 vs. limit=10.0 +2024-07-27 17:00:30,274 INFO [train.py:1114] (2/4) Epoch 3, batch 9650, loss[loss=0.3334, simple_loss=0.3933, pruned_loss=0.1368, over 4844.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.349, pruned_loss=0.1062, over 927910.57 frames. ], batch size: 16, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:00:37,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=40140.0, ans=0.09899494936611666 +2024-07-27 17:00:43,025 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.82 vs. limit=15.0 +2024-07-27 17:00:44,533 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.272e+01 6.641e+01 7.549e+01 8.923e+01 1.361e+02, threshold=1.510e+02, percent-clipped=0.0 +2024-07-27 17:00:45,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=40153.333333333336, ans=0.0 +2024-07-27 17:00:47,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=40153.333333333336, ans=0.125 +2024-07-27 17:00:48,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=40153.333333333336, ans=0.125 +2024-07-27 17:00:48,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=40153.333333333336, ans=0.1 +2024-07-27 17:00:51,416 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.01 vs. limit=15.0 +2024-07-27 17:01:04,420 INFO [train.py:1114] (2/4) Epoch 3, batch 9700, loss[loss=0.3366, simple_loss=0.3942, pruned_loss=0.1395, over 4283.00 frames. ], tot_loss[loss=0.2798, simple_loss=0.3484, pruned_loss=0.1056, over 925435.86 frames. ], batch size: 25, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:01:18,102 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.99 vs. limit=22.5 +2024-07-27 17:01:19,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40206.666666666664, ans=0.1 +2024-07-27 17:01:21,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40220.0, ans=0.1 +2024-07-27 17:01:38,007 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:01:39,651 INFO [train.py:1114] (2/4) Epoch 3, batch 9750, loss[loss=0.349, simple_loss=0.4096, pruned_loss=0.1442, over 4663.00 frames. ], tot_loss[loss=0.279, simple_loss=0.3479, pruned_loss=0.1051, over 925841.08 frames. ], batch size: 15, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:01:43,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=40260.0, ans=0.125 +2024-07-27 17:01:53,663 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.325e+01 6.567e+01 7.224e+01 8.540e+01 1.142e+02, threshold=1.445e+02, percent-clipped=0.0 +2024-07-27 17:01:57,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.87 vs. limit=15.0 +2024-07-27 17:02:04,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=40300.0, ans=22.5 +2024-07-27 17:02:12,034 INFO [train.py:1114] (2/4) Epoch 3, batch 9800, loss[loss=0.2936, simple_loss=0.3497, pruned_loss=0.1188, over 4707.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3471, pruned_loss=0.1047, over 925473.35 frames. ], batch size: 12, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:02:27,104 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.30 vs. limit=15.0 +2024-07-27 17:02:32,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=40353.333333333336, ans=0.125 +2024-07-27 17:02:34,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=40353.333333333336, ans=0.035 +2024-07-27 17:02:37,089 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.09 vs. limit=15.0 +2024-07-27 17:02:46,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=40380.0, ans=10.0 +2024-07-27 17:02:50,272 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.47 vs. limit=15.0 +2024-07-27 17:02:51,230 INFO [train.py:1114] (2/4) Epoch 3, batch 9850, loss[loss=0.2869, simple_loss=0.3577, pruned_loss=0.1081, over 4900.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.3473, pruned_loss=0.1042, over 927686.15 frames. ], batch size: 15, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:02:56,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=40393.333333333336, ans=0.015 +2024-07-27 17:03:05,366 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.17 vs. limit=15.0 +2024-07-27 17:03:08,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=40406.666666666664, ans=0.125 +2024-07-27 17:03:12,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=40406.666666666664, ans=0.5 +2024-07-27 17:03:15,223 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.438e+01 6.644e+01 7.357e+01 1.003e+02 1.564e+02, threshold=1.471e+02, percent-clipped=2.0 +2024-07-27 17:03:18,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=40420.0, ans=0.0 +2024-07-27 17:03:19,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=40420.0, ans=0.125 +2024-07-27 17:03:20,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=40433.333333333336, ans=0.125 +2024-07-27 17:03:20,872 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.18 vs. limit=12.0 +2024-07-27 17:03:27,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=40446.666666666664, ans=0.0 +2024-07-27 17:03:32,381 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.21 vs. limit=15.0 +2024-07-27 17:03:33,888 INFO [train.py:1114] (2/4) Epoch 3, batch 9900, loss[loss=0.2673, simple_loss=0.3317, pruned_loss=0.1014, over 4853.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3495, pruned_loss=0.1061, over 926779.49 frames. ], batch size: 16, lr: 2.05e-02, grad_scale: 32.0 +2024-07-27 17:04:05,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=40486.666666666664, ans=0.1 +2024-07-27 17:04:13,722 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.08 vs. limit=22.5 +2024-07-27 17:04:22,285 INFO [train.py:1114] (2/4) Epoch 3, batch 9950, loss[loss=0.2227, simple_loss=0.2859, pruned_loss=0.07979, over 4802.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3499, pruned_loss=0.1065, over 930008.56 frames. ], batch size: 11, lr: 2.05e-02, grad_scale: 32.0 +2024-07-27 17:04:22,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=40526.666666666664, ans=10.0 +2024-07-27 17:04:34,391 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=34.35 vs. limit=22.5 +2024-07-27 17:04:35,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40540.0, ans=0.1 +2024-07-27 17:04:39,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=40553.333333333336, ans=0.125 +2024-07-27 17:04:42,176 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.432e+01 7.065e+01 7.952e+01 9.840e+01 1.527e+02, threshold=1.590e+02, percent-clipped=1.0 +2024-07-27 17:04:47,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=40566.666666666664, ans=0.2 +2024-07-27 17:04:53,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=40580.0, ans=0.002047826086956521 +2024-07-27 17:04:55,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=40580.0, ans=0.125 +2024-07-27 17:04:57,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=40580.0, ans=0.0 +2024-07-27 17:04:59,772 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.43 vs. limit=15.0 +2024-07-27 17:05:00,003 INFO [train.py:1114] (2/4) Epoch 3, batch 10000, loss[loss=0.2961, simple_loss=0.3716, pruned_loss=0.1103, over 4635.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3531, pruned_loss=0.108, over 927023.79 frames. ], batch size: 16, lr: 2.05e-02, grad_scale: 32.0 +2024-07-27 17:05:16,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.53 vs. limit=10.0 +2024-07-27 17:05:32,230 INFO [train.py:1114] (2/4) Epoch 3, batch 10050, loss[loss=0.3278, simple_loss=0.3715, pruned_loss=0.1421, over 3309.00 frames. ], tot_loss[loss=0.2898, simple_loss=0.3572, pruned_loss=0.1112, over 914703.00 frames. ], batch size: 35, lr: 2.05e-02, grad_scale: 32.0 +2024-07-27 17:05:47,414 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.930e+01 7.073e+01 7.900e+01 8.546e+01 1.194e+02, threshold=1.580e+02, percent-clipped=0.0 +2024-07-27 17:05:59,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=40700.0, ans=10.0 +2024-07-27 17:06:07,146 INFO [train.py:1114] (2/4) Epoch 3, batch 10100, loss[loss=0.3212, simple_loss=0.3753, pruned_loss=0.1336, over 3343.00 frames. ], tot_loss[loss=0.3022, simple_loss=0.3645, pruned_loss=0.1199, over 862713.95 frames. ], batch size: 35, lr: 2.05e-02, grad_scale: 16.0 +2024-07-27 17:06:09,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=40726.666666666664, ans=0.125 +2024-07-27 17:06:15,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40740.0, ans=0.1 +2024-07-27 17:06:26,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=40753.333333333336, ans=0.125 +2024-07-27 17:06:27,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=40766.666666666664, ans=0.002007246376811595 +2024-07-27 17:06:31,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=40766.666666666664, ans=0.1 +2024-07-27 17:06:31,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=40766.666666666664, ans=0.125 +2024-07-27 17:06:34,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=40780.0, ans=0.2 +2024-07-27 17:06:40,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=40780.0, ans=0.125 +2024-07-27 17:06:41,345 INFO [train.py:1114] (2/4) Epoch 3, batch 10150, loss[loss=0.3187, simple_loss=0.3694, pruned_loss=0.1341, over 3477.00 frames. ], tot_loss[loss=0.3095, simple_loss=0.3688, pruned_loss=0.1251, over 822860.56 frames. ], batch size: 36, lr: 2.05e-02, grad_scale: 16.0 +2024-07-27 17:06:43,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=40793.333333333336, ans=0.125 +2024-07-27 17:07:09,201 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.948e+01 6.999e+01 7.537e+01 8.281e+01 1.738e+02, threshold=1.507e+02, percent-clipped=1.0 +2024-07-27 17:07:15,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=40833.333333333336, ans=0.125 +2024-07-27 17:07:23,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40833.333333333336, ans=0.1 +2024-07-27 17:07:32,467 INFO [train.py:1114] (2/4) Epoch 3, batch 10200, loss[loss=0.3419, simple_loss=0.373, pruned_loss=0.1554, over 3225.00 frames. ], tot_loss[loss=0.3154, simple_loss=0.3722, pruned_loss=0.1293, over 789858.08 frames. ], batch size: 35, lr: 2.04e-02, grad_scale: 16.0 +2024-07-27 17:07:35,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=40860.0, ans=0.001986956521739131 +2024-07-27 17:07:36,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=40860.0, ans=0.025 +2024-07-27 17:08:50,761 INFO [train.py:1114] (2/4) Epoch 4, batch 0, loss[loss=0.227, simple_loss=0.3085, pruned_loss=0.07271, over 4855.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3085, pruned_loss=0.07271, over 4855.00 frames. ], batch size: 12, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:08:50,762 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 17:09:02,630 INFO [train.py:1146] (2/4) Epoch 4, validation: loss=0.2303, simple_loss=0.3319, pruned_loss=0.06433, over 944034.00 frames. +2024-07-27 17:09:02,630 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 17:09:32,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=40917.333333333336, ans=0.125 +2024-07-27 17:09:45,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=40944.0, ans=0.0 +2024-07-27 17:09:48,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=40944.0, ans=0.125 +2024-07-27 17:09:56,713 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.176e+01 6.703e+01 7.240e+01 7.919e+01 1.564e+02, threshold=1.448e+02, percent-clipped=1.0 +2024-07-27 17:09:57,450 INFO [train.py:1114] (2/4) Epoch 4, batch 50, loss[loss=0.225, simple_loss=0.2959, pruned_loss=0.07703, over 4602.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.353, pruned_loss=0.1082, over 206440.24 frames. ], batch size: 11, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:10:06,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=40970.666666666664, ans=0.09899494936611666 +2024-07-27 17:10:14,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=40984.0, ans=0.125 +2024-07-27 17:10:18,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=40997.333333333336, ans=0.0019571014492753627 +2024-07-27 17:10:20,812 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.67 vs. limit=22.5 +2024-07-27 17:10:24,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=41010.666666666664, ans=0.0 +2024-07-27 17:10:30,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41010.666666666664, ans=0.1 +2024-07-27 17:10:31,381 INFO [train.py:1114] (2/4) Epoch 4, batch 100, loss[loss=0.3061, simple_loss=0.379, pruned_loss=0.1166, over 4646.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3537, pruned_loss=0.1066, over 364891.94 frames. ], batch size: 12, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:10:32,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=41024.0, ans=0.125 +2024-07-27 17:10:36,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=41024.0, ans=0.125 +2024-07-27 17:10:39,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=41037.333333333336, ans=0.125 +2024-07-27 17:10:42,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=41037.333333333336, ans=0.0 +2024-07-27 17:10:46,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=41050.666666666664, ans=0.025 +2024-07-27 17:10:47,436 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.10 vs. limit=15.0 +2024-07-27 17:11:04,709 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.318e+01 6.667e+01 8.145e+01 9.581e+01 1.407e+02, threshold=1.629e+02, percent-clipped=0.0 +2024-07-27 17:15:47,220 INFO [train.py:1114] (2/4) Epoch 4, batch 150, loss[loss=0.214, simple_loss=0.2956, pruned_loss=0.06622, over 4613.00 frames. ], tot_loss[loss=0.2762, simple_loss=0.3475, pruned_loss=0.1024, over 493603.16 frames. ], batch size: 11, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:16:04,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41090.666666666664, ans=0.1 +2024-07-27 17:16:05,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=41090.666666666664, ans=0.0 +2024-07-27 17:16:10,668 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.63 vs. limit=22.5 +2024-07-27 17:16:26,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=41130.666666666664, ans=0.0019281159420289854 +2024-07-27 17:16:36,435 INFO [train.py:1114] (2/4) Epoch 4, batch 200, loss[loss=0.2728, simple_loss=0.3448, pruned_loss=0.1004, over 4432.00 frames. ], tot_loss[loss=0.274, simple_loss=0.3443, pruned_loss=0.1019, over 592970.57 frames. ], batch size: 21, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:16:37,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=41157.333333333336, ans=0.0 +2024-07-27 17:16:37,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=41157.333333333336, ans=0.025 +2024-07-27 17:16:46,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=41170.666666666664, ans=0.125 +2024-07-27 17:16:49,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=41184.0, ans=0.025 +2024-07-27 17:16:51,225 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.20 vs. limit=15.0 +2024-07-27 17:16:51,239 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.42 vs. limit=15.0 +2024-07-27 17:16:57,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=41197.333333333336, ans=0.025 +2024-07-27 17:17:01,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41197.333333333336, ans=0.1 +2024-07-27 17:17:09,100 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.496e+01 6.403e+01 7.504e+01 8.893e+01 1.315e+02, threshold=1.501e+02, percent-clipped=0.0 +2024-07-27 17:17:09,904 INFO [train.py:1114] (2/4) Epoch 4, batch 250, loss[loss=0.2976, simple_loss=0.3699, pruned_loss=0.1127, over 4640.00 frames. ], tot_loss[loss=0.2726, simple_loss=0.3433, pruned_loss=0.101, over 669856.79 frames. ], batch size: 16, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:17:16,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=41237.333333333336, ans=0.125 +2024-07-27 17:17:20,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=41237.333333333336, ans=0.0 +2024-07-27 17:17:25,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=41250.666666666664, ans=0.125 +2024-07-27 17:17:42,652 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.76 vs. limit=15.0 +2024-07-27 17:17:43,509 INFO [train.py:1114] (2/4) Epoch 4, batch 300, loss[loss=0.2609, simple_loss=0.3426, pruned_loss=0.08963, over 4797.00 frames. ], tot_loss[loss=0.2724, simple_loss=0.3426, pruned_loss=0.1011, over 729204.75 frames. ], batch size: 15, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:18:13,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=41344.0, ans=0.04949747468305833 +2024-07-27 17:18:18,363 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.327e+01 6.726e+01 7.955e+01 9.020e+01 1.256e+02, threshold=1.591e+02, percent-clipped=0.0 +2024-07-27 17:18:18,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=41357.333333333336, ans=0.2 +2024-07-27 17:18:19,069 INFO [train.py:1114] (2/4) Epoch 4, batch 350, loss[loss=0.2884, simple_loss=0.3532, pruned_loss=0.1117, over 4932.00 frames. ], tot_loss[loss=0.273, simple_loss=0.3437, pruned_loss=0.1012, over 775461.22 frames. ], batch size: 12, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:18:19,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=41357.333333333336, ans=0.0 +2024-07-27 17:18:27,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41370.666666666664, ans=0.1 +2024-07-27 17:18:52,193 INFO [train.py:1114] (2/4) Epoch 4, batch 400, loss[loss=0.3087, simple_loss=0.3804, pruned_loss=0.1185, over 4688.00 frames. ], tot_loss[loss=0.272, simple_loss=0.3433, pruned_loss=0.1003, over 813094.88 frames. ], batch size: 13, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:18:59,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=41437.333333333336, ans=0.0 +2024-07-27 17:19:03,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=41437.333333333336, ans=0.0 +2024-07-27 17:19:15,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=41464.0, ans=0.2 +2024-07-27 17:19:24,315 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:19:24,669 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.330e+01 6.564e+01 7.397e+01 8.870e+01 1.499e+02, threshold=1.479e+02, percent-clipped=0.0 +2024-07-27 17:19:25,346 INFO [train.py:1114] (2/4) Epoch 4, batch 450, loss[loss=0.2986, simple_loss=0.3725, pruned_loss=0.1124, over 4633.00 frames. ], tot_loss[loss=0.2721, simple_loss=0.343, pruned_loss=0.1006, over 838587.04 frames. ], batch size: 13, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:19:33,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=41504.0, ans=0.125 +2024-07-27 17:19:34,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=41504.0, ans=0.5 +2024-07-27 17:19:47,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=41530.666666666664, ans=0.025 +2024-07-27 17:19:48,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=41530.666666666664, ans=0.09899494936611666 +2024-07-27 17:19:56,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=41530.666666666664, ans=0.125 +2024-07-27 17:20:04,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=41544.0, ans=0.125 +2024-07-27 17:20:09,643 INFO [train.py:1114] (2/4) Epoch 4, batch 500, loss[loss=0.3074, simple_loss=0.3758, pruned_loss=0.1195, over 4674.00 frames. ], tot_loss[loss=0.2699, simple_loss=0.3411, pruned_loss=0.09933, over 861031.94 frames. ], batch size: 15, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:20:13,759 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:20:19,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=41570.666666666664, ans=0.125 +2024-07-27 17:20:30,789 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.56 vs. limit=6.0 +2024-07-27 17:20:41,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=41610.666666666664, ans=0.125 +2024-07-27 17:20:45,340 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.329e+01 6.267e+01 7.385e+01 9.027e+01 1.460e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 17:20:46,085 INFO [train.py:1114] (2/4) Epoch 4, batch 550, loss[loss=0.3061, simple_loss=0.3502, pruned_loss=0.1311, over 4644.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3406, pruned_loss=0.0994, over 877138.62 frames. ], batch size: 17, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:20:49,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=41624.0, ans=0.2 +2024-07-27 17:20:52,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=41637.333333333336, ans=0.025 +2024-07-27 17:20:57,848 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.02 vs. limit=12.0 +2024-07-27 17:21:00,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=41650.666666666664, ans=0.0 +2024-07-27 17:21:07,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=41664.0, ans=0.09899494936611666 +2024-07-27 17:21:09,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=41664.0, ans=0.125 +2024-07-27 17:21:17,270 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.03 vs. limit=15.0 +2024-07-27 17:21:18,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=41677.333333333336, ans=0.0018092753623188407 +2024-07-27 17:21:19,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=41677.333333333336, ans=0.125 +2024-07-27 17:21:21,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=41690.666666666664, ans=0.125 +2024-07-27 17:21:21,546 INFO [train.py:1114] (2/4) Epoch 4, batch 600, loss[loss=0.3042, simple_loss=0.3653, pruned_loss=0.1215, over 4610.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3423, pruned_loss=0.1001, over 892166.96 frames. ], batch size: 16, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:21:26,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=41690.666666666664, ans=0.125 +2024-07-27 17:21:29,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=41704.0, ans=0.125 +2024-07-27 17:21:32,419 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.13 vs. limit=12.0 +2024-07-27 17:21:34,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=41717.333333333336, ans=0.0018005797101449273 +2024-07-27 17:21:37,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=41717.333333333336, ans=0.125 +2024-07-27 17:21:46,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=41730.666666666664, ans=0.00179768115942029 +2024-07-27 17:21:53,706 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.519e+01 6.252e+01 7.090e+01 7.980e+01 1.452e+02, threshold=1.418e+02, percent-clipped=0.0 +2024-07-27 17:21:54,399 INFO [train.py:1114] (2/4) Epoch 4, batch 650, loss[loss=0.2544, simple_loss=0.3302, pruned_loss=0.08928, over 4749.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3408, pruned_loss=0.09923, over 903642.96 frames. ], batch size: 13, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:21:55,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=41757.333333333336, ans=0.0 +2024-07-27 17:22:01,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=41770.666666666664, ans=0.125 +2024-07-27 17:22:11,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=41784.0, ans=0.125 +2024-07-27 17:22:18,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=41797.333333333336, ans=0.2 +2024-07-27 17:22:18,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=41797.333333333336, ans=0.04949747468305833 +2024-07-27 17:22:22,758 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.45 vs. limit=10.0 +2024-07-27 17:22:25,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=41810.666666666664, ans=0.125 +2024-07-27 17:22:26,544 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:22:27,765 INFO [train.py:1114] (2/4) Epoch 4, batch 700, loss[loss=0.216, simple_loss=0.3021, pruned_loss=0.06492, over 4640.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3399, pruned_loss=0.09874, over 911712.50 frames. ], batch size: 12, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:22:35,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=41837.333333333336, ans=0.125 +2024-07-27 17:22:55,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=41877.333333333336, ans=0.2 +2024-07-27 17:22:57,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=41877.333333333336, ans=0.0017657971014492756 +2024-07-27 17:22:59,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.29 vs. limit=15.0 +2024-07-27 17:23:02,159 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.371e+01 6.772e+01 7.672e+01 9.334e+01 1.432e+02, threshold=1.534e+02, percent-clipped=1.0 +2024-07-27 17:23:02,193 INFO [train.py:1114] (2/4) Epoch 4, batch 750, loss[loss=0.2661, simple_loss=0.3428, pruned_loss=0.09475, over 4703.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3399, pruned_loss=0.09855, over 918037.62 frames. ], batch size: 13, lr: 1.89e-02, grad_scale: 16.0 +2024-07-27 17:23:02,266 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:23:08,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=41904.0, ans=0.1 +2024-07-27 17:23:15,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=41917.333333333336, ans=0.125 +2024-07-27 17:23:17,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=41917.333333333336, ans=0.5 +2024-07-27 17:23:23,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=41930.666666666664, ans=0.0 +2024-07-27 17:23:37,969 INFO [train.py:1114] (2/4) Epoch 4, batch 800, loss[loss=0.2597, simple_loss=0.313, pruned_loss=0.1032, over 4833.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3409, pruned_loss=0.09968, over 923464.90 frames. ], batch size: 12, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:23:57,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=41984.0, ans=0.025 +2024-07-27 17:24:05,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=41997.333333333336, ans=0.2 +2024-07-27 17:24:13,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=42010.666666666664, ans=0.0017368115942029 +2024-07-27 17:24:18,031 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.573e+01 6.422e+01 7.236e+01 8.133e+01 1.458e+02, threshold=1.447e+02, percent-clipped=0.0 +2024-07-27 17:24:18,262 INFO [train.py:1114] (2/4) Epoch 4, batch 850, loss[loss=0.2979, simple_loss=0.3805, pruned_loss=0.1076, over 4673.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3402, pruned_loss=0.09934, over 927252.39 frames. ], batch size: 14, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:24:40,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=42050.666666666664, ans=0.0 +2024-07-27 17:24:42,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=42050.666666666664, ans=0.2 +2024-07-27 17:24:44,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=42050.666666666664, ans=0.125 +2024-07-27 17:24:51,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42077.333333333336, ans=0.1 +2024-07-27 17:24:52,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=42077.333333333336, ans=0.025 +2024-07-27 17:24:58,796 INFO [train.py:1114] (2/4) Epoch 4, batch 900, loss[loss=0.2708, simple_loss=0.3314, pruned_loss=0.1051, over 4855.00 frames. ], tot_loss[loss=0.2699, simple_loss=0.3403, pruned_loss=0.09977, over 927945.73 frames. ], batch size: 12, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:25:06,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=42104.0, ans=0.125 +2024-07-27 17:25:07,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=42104.0, ans=0.025 +2024-07-27 17:25:12,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=42117.333333333336, ans=0.125 +2024-07-27 17:25:13,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=42117.333333333336, ans=0.2 +2024-07-27 17:25:34,686 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.294e+01 6.285e+01 6.831e+01 7.468e+01 1.764e+02, threshold=1.366e+02, percent-clipped=2.0 +2024-07-27 17:25:34,719 INFO [train.py:1114] (2/4) Epoch 4, batch 950, loss[loss=0.2528, simple_loss=0.3282, pruned_loss=0.08874, over 4773.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.3401, pruned_loss=0.09903, over 929406.68 frames. ], batch size: 12, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:25:41,839 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.99 vs. limit=22.5 +2024-07-27 17:26:02,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42197.333333333336, ans=0.1 +2024-07-27 17:26:12,295 INFO [train.py:1114] (2/4) Epoch 4, batch 1000, loss[loss=0.2418, simple_loss=0.3078, pruned_loss=0.08789, over 4963.00 frames. ], tot_loss[loss=0.2706, simple_loss=0.3416, pruned_loss=0.09985, over 929337.04 frames. ], batch size: 13, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:26:16,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=42224.0, ans=22.5 +2024-07-27 17:26:45,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=42277.333333333336, ans=0.1 +2024-07-27 17:26:51,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=42277.333333333336, ans=0.125 +2024-07-27 17:26:52,348 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.144e+01 6.274e+01 6.992e+01 7.907e+01 1.150e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 17:26:52,381 INFO [train.py:1114] (2/4) Epoch 4, batch 1050, loss[loss=0.2754, simple_loss=0.3535, pruned_loss=0.09866, over 4869.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.3399, pruned_loss=0.09864, over 931944.23 frames. ], batch size: 14, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:27:04,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42304.0, ans=0.1 +2024-07-27 17:27:05,658 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.19 vs. limit=22.5 +2024-07-27 17:27:13,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=42317.333333333336, ans=0.2 +2024-07-27 17:27:24,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42330.666666666664, ans=0.1 +2024-07-27 17:27:35,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=42330.666666666664, ans=0.125 +2024-07-27 17:27:36,327 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.63 vs. limit=22.5 +2024-07-27 17:27:44,196 INFO [train.py:1114] (2/4) Epoch 4, batch 1100, loss[loss=0.2793, simple_loss=0.3543, pruned_loss=0.1022, over 4895.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3405, pruned_loss=0.09909, over 934461.94 frames. ], batch size: 13, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:27:44,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=42357.333333333336, ans=0.0 +2024-07-27 17:27:49,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=42357.333333333336, ans=0.125 +2024-07-27 17:27:59,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=42370.666666666664, ans=0.0 +2024-07-27 17:28:01,676 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=42384.0, ans=0.2 +2024-07-27 17:28:15,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=42410.666666666664, ans=0.0 +2024-07-27 17:28:22,188 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.166e+01 6.208e+01 6.982e+01 7.743e+01 1.395e+02, threshold=1.396e+02, percent-clipped=0.0 +2024-07-27 17:28:22,221 INFO [train.py:1114] (2/4) Epoch 4, batch 1150, loss[loss=0.271, simple_loss=0.3531, pruned_loss=0.09442, over 4897.00 frames. ], tot_loss[loss=0.2699, simple_loss=0.3411, pruned_loss=0.09938, over 934627.91 frames. ], batch size: 13, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:28:22,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=42424.0, ans=0.0 +2024-07-27 17:28:23,729 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=8.529e+00 +2024-07-27 17:28:44,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=42424.0, ans=0.0 +2024-07-27 17:28:47,868 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.55 vs. limit=15.0 +2024-07-27 17:28:48,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=42437.333333333336, ans=0.0 +2024-07-27 17:28:48,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=42437.333333333336, ans=0.5 +2024-07-27 17:29:06,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=42464.0, ans=0.0 +2024-07-27 17:29:13,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42477.333333333336, ans=0.1 +2024-07-27 17:29:24,984 INFO [train.py:1114] (2/4) Epoch 4, batch 1200, loss[loss=0.243, simple_loss=0.3351, pruned_loss=0.07547, over 4883.00 frames. ], tot_loss[loss=0.2714, simple_loss=0.3425, pruned_loss=0.1002, over 933667.54 frames. ], batch size: 14, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:29:26,631 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.14 vs. limit=22.5 +2024-07-27 17:29:28,539 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.65 vs. limit=22.5 +2024-07-27 17:29:44,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=42517.333333333336, ans=0.125 +2024-07-27 17:29:47,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=42517.333333333336, ans=0.0 +2024-07-27 17:29:57,318 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.73 vs. limit=6.0 +2024-07-27 17:30:02,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=42530.666666666664, ans=0.2 +2024-07-27 17:30:03,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=42544.0, ans=0.0 +2024-07-27 17:30:11,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=42557.333333333336, ans=0.2 +2024-07-27 17:30:12,128 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.401e+01 6.877e+01 7.526e+01 8.642e+01 1.436e+02, threshold=1.505e+02, percent-clipped=1.0 +2024-07-27 17:30:12,161 INFO [train.py:1114] (2/4) Epoch 4, batch 1250, loss[loss=0.2744, simple_loss=0.3492, pruned_loss=0.09978, over 4803.00 frames. ], tot_loss[loss=0.2707, simple_loss=0.3423, pruned_loss=0.09952, over 937585.93 frames. ], batch size: 15, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:30:21,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=42570.666666666664, ans=0.125 +2024-07-27 17:30:23,567 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.67 vs. limit=22.5 +2024-07-27 17:30:24,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=42570.666666666664, ans=0.125 +2024-07-27 17:30:36,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=42584.0, ans=0.0 +2024-07-27 17:30:37,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=42597.333333333336, ans=0.125 +2024-07-27 17:30:39,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=42597.333333333336, ans=0.2 +2024-07-27 17:30:41,473 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-07-27 17:30:55,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42610.666666666664, ans=0.1 +2024-07-27 17:30:57,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=42624.0, ans=0.125 +2024-07-27 17:30:57,772 INFO [train.py:1114] (2/4) Epoch 4, batch 1300, loss[loss=0.3146, simple_loss=0.3902, pruned_loss=0.1195, over 4676.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3409, pruned_loss=0.09881, over 939154.23 frames. ], batch size: 19, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:30:57,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42624.0, ans=0.1 +2024-07-27 17:31:08,744 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.16 vs. limit=12.0 +2024-07-27 17:31:09,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=42637.333333333336, ans=0.0016005797101449268 +2024-07-27 17:31:58,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=42677.333333333336, ans=0.125 +2024-07-27 17:31:58,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=42677.333333333336, ans=0.2 +2024-07-27 17:31:59,663 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.286e+01 6.475e+01 6.974e+01 8.075e+01 1.412e+02, threshold=1.395e+02, percent-clipped=0.0 +2024-07-27 17:31:59,696 INFO [train.py:1114] (2/4) Epoch 4, batch 1350, loss[loss=0.2762, simple_loss=0.3432, pruned_loss=0.1046, over 4758.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3394, pruned_loss=0.0981, over 941207.21 frames. ], batch size: 13, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:32:04,436 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.39 vs. limit=22.5 +2024-07-27 17:32:05,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=42690.666666666664, ans=0.05 +2024-07-27 17:32:09,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=42704.0, ans=0.05 +2024-07-27 17:32:26,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=42704.0, ans=0.0 +2024-07-27 17:32:35,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=42730.666666666664, ans=0.125 +2024-07-27 17:32:36,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.24 vs. limit=22.5 +2024-07-27 17:32:49,803 INFO [train.py:1114] (2/4) Epoch 4, batch 1400, loss[loss=0.2417, simple_loss=0.3026, pruned_loss=0.09043, over 4723.00 frames. ], tot_loss[loss=0.2682, simple_loss=0.34, pruned_loss=0.0982, over 943206.12 frames. ], batch size: 11, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:32:51,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=42757.333333333336, ans=0.125 +2024-07-27 17:32:59,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=42770.666666666664, ans=0.125 +2024-07-27 17:33:59,440 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.546e+01 6.502e+01 7.039e+01 8.275e+01 1.312e+02, threshold=1.408e+02, percent-clipped=0.0 +2024-07-27 17:33:59,473 INFO [train.py:1114] (2/4) Epoch 4, batch 1450, loss[loss=0.2599, simple_loss=0.3387, pruned_loss=0.09059, over 4668.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3408, pruned_loss=0.09831, over 943285.77 frames. ], batch size: 15, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:34:15,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=42837.333333333336, ans=0.125 +2024-07-27 17:34:20,541 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.70 vs. limit=15.0 +2024-07-27 17:34:30,430 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.53 vs. limit=15.0 +2024-07-27 17:34:31,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=42864.0, ans=0.0015513043478260873 +2024-07-27 17:34:54,666 INFO [train.py:1114] (2/4) Epoch 4, batch 1500, loss[loss=0.224, simple_loss=0.3093, pruned_loss=0.06938, over 4809.00 frames. ], tot_loss[loss=0.269, simple_loss=0.3408, pruned_loss=0.09862, over 942550.56 frames. ], batch size: 14, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:35:00,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=42890.666666666664, ans=0.04949747468305833 +2024-07-27 17:35:21,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=42917.333333333336, ans=0.125 +2024-07-27 17:35:26,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=42917.333333333336, ans=0.1 +2024-07-27 17:35:43,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.74 vs. limit=15.0 +2024-07-27 17:35:46,844 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.518e+01 6.513e+01 7.459e+01 8.473e+01 1.359e+02, threshold=1.492e+02, percent-clipped=0.0 +2024-07-27 17:35:46,877 INFO [train.py:1114] (2/4) Epoch 4, batch 1550, loss[loss=0.2524, simple_loss=0.3302, pruned_loss=0.08725, over 4894.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.3411, pruned_loss=0.09927, over 938489.76 frames. ], batch size: 15, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:36:00,479 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.39 vs. limit=22.5 +2024-07-27 17:36:16,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=42997.333333333336, ans=0.125 +2024-07-27 17:36:18,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43010.666666666664, ans=0.1 +2024-07-27 17:36:19,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=43010.666666666664, ans=0.0 +2024-07-27 17:36:22,444 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.42 vs. limit=15.0 +2024-07-27 17:36:25,514 INFO [train.py:1114] (2/4) Epoch 4, batch 1600, loss[loss=0.2779, simple_loss=0.3495, pruned_loss=0.1031, over 4869.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3406, pruned_loss=0.09904, over 937312.61 frames. ], batch size: 14, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:36:31,952 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.39 vs. limit=15.0 +2024-07-27 17:36:36,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43024.0, ans=0.1 +2024-07-27 17:36:40,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=43037.333333333336, ans=0.125 +2024-07-27 17:36:57,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=43077.333333333336, ans=0.035 +2024-07-27 17:37:04,656 INFO [train.py:1114] (2/4) Epoch 4, batch 1650, loss[loss=0.2448, simple_loss=0.3229, pruned_loss=0.08336, over 4665.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.341, pruned_loss=0.0997, over 937187.64 frames. ], batch size: 14, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:37:05,272 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.309e+01 6.450e+01 7.502e+01 9.535e+01 1.419e+02, threshold=1.500e+02, percent-clipped=0.0 +2024-07-27 17:37:29,657 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=7.85 vs. limit=15.0 +2024-07-27 17:37:37,803 INFO [train.py:1114] (2/4) Epoch 4, batch 1700, loss[loss=0.2223, simple_loss=0.2979, pruned_loss=0.07334, over 4694.00 frames. ], tot_loss[loss=0.2693, simple_loss=0.3404, pruned_loss=0.09908, over 938960.75 frames. ], batch size: 11, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:37:38,316 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.46 vs. limit=15.0 +2024-07-27 17:37:42,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=43157.333333333336, ans=0.0014875362318840565 +2024-07-27 17:37:43,032 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.03 vs. limit=15.0 +2024-07-27 17:37:54,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=43184.0, ans=0.0 +2024-07-27 17:37:54,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=43184.0, ans=0.02 +2024-07-27 17:38:01,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=43197.333333333336, ans=0.0 +2024-07-27 17:38:03,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=43197.333333333336, ans=0.0014788405797101448 +2024-07-27 17:38:04,679 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.84 vs. limit=22.5 +2024-07-27 17:38:05,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=43210.666666666664, ans=0.125 +2024-07-27 17:38:06,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=43210.666666666664, ans=0.125 +2024-07-27 17:38:07,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=43210.666666666664, ans=0.125 +2024-07-27 17:38:12,055 INFO [train.py:1114] (2/4) Epoch 4, batch 1750, loss[loss=0.2139, simple_loss=0.282, pruned_loss=0.07287, over 4806.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3396, pruned_loss=0.09867, over 940129.94 frames. ], batch size: 11, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:38:14,588 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.524e+01 6.769e+01 7.815e+01 9.643e+01 1.575e+02, threshold=1.563e+02, percent-clipped=1.0 +2024-07-27 17:38:24,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=43237.333333333336, ans=0.2 +2024-07-27 17:38:46,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=43277.333333333336, ans=0.001461449275362318 +2024-07-27 17:38:49,162 INFO [train.py:1114] (2/4) Epoch 4, batch 1800, loss[loss=0.2831, simple_loss=0.3594, pruned_loss=0.1034, over 4638.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.3394, pruned_loss=0.09883, over 941003.51 frames. ], batch size: 13, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:38:52,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43290.666666666664, ans=0.1 +2024-07-27 17:38:52,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=43290.666666666664, ans=0.0014585507246376826 +2024-07-27 17:39:05,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=43317.333333333336, ans=0.015 +2024-07-27 17:39:10,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=43330.666666666664, ans=0.125 +2024-07-27 17:39:23,346 INFO [train.py:1114] (2/4) Epoch 4, batch 1850, loss[loss=0.3576, simple_loss=0.4141, pruned_loss=0.1505, over 4821.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.341, pruned_loss=0.09997, over 940955.94 frames. ], batch size: 14, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:39:23,918 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.011e+01 6.740e+01 7.721e+01 9.480e+01 1.911e+02, threshold=1.544e+02, percent-clipped=3.0 +2024-07-27 17:39:28,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=43357.333333333336, ans=0.0 +2024-07-27 17:39:48,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=43397.333333333336, ans=0.05 +2024-07-27 17:39:53,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=43410.666666666664, ans=0.2 +2024-07-27 17:39:54,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=43410.666666666664, ans=0.125 +2024-07-27 17:39:54,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=43410.666666666664, ans=0.125 +2024-07-27 17:39:54,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=43410.666666666664, ans=0.125 +2024-07-27 17:39:58,491 INFO [train.py:1114] (2/4) Epoch 4, batch 1900, loss[loss=0.2903, simple_loss=0.3565, pruned_loss=0.112, over 4664.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3407, pruned_loss=0.09987, over 942071.11 frames. ], batch size: 14, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:40:12,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=43437.333333333336, ans=0.125 +2024-07-27 17:40:12,579 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.54 vs. limit=15.0 +2024-07-27 17:40:17,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43450.666666666664, ans=0.1 +2024-07-27 17:40:22,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43464.0, ans=0.1 +2024-07-27 17:40:27,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43477.333333333336, ans=0.1 +2024-07-27 17:40:33,681 INFO [train.py:1114] (2/4) Epoch 4, batch 1950, loss[loss=0.2307, simple_loss=0.324, pruned_loss=0.0687, over 4900.00 frames. ], tot_loss[loss=0.2709, simple_loss=0.3419, pruned_loss=0.09998, over 944040.90 frames. ], batch size: 13, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:40:34,313 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.087e+01 6.498e+01 7.387e+01 8.650e+01 1.667e+02, threshold=1.477e+02, percent-clipped=1.0 +2024-07-27 17:40:43,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=43490.666666666664, ans=0.125 +2024-07-27 17:41:02,420 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.47 vs. limit=6.0 +2024-07-27 17:41:06,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=43544.0, ans=0.125 +2024-07-27 17:41:17,159 INFO [train.py:1114] (2/4) Epoch 4, batch 2000, loss[loss=0.2747, simple_loss=0.3241, pruned_loss=0.1126, over 4813.00 frames. ], tot_loss[loss=0.2715, simple_loss=0.342, pruned_loss=0.1004, over 941185.36 frames. ], batch size: 11, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:41:24,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=43570.666666666664, ans=0.125 +2024-07-27 17:41:27,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=43570.666666666664, ans=0.125 +2024-07-27 17:41:30,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=43584.0, ans=0.0013947826086956518 +2024-07-27 17:41:51,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=43610.666666666664, ans=0.125 +2024-07-27 17:41:52,683 INFO [train.py:1114] (2/4) Epoch 4, batch 2050, loss[loss=0.2174, simple_loss=0.2862, pruned_loss=0.0743, over 4620.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3403, pruned_loss=0.09925, over 939471.92 frames. ], batch size: 11, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:41:53,323 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.522e+01 6.397e+01 6.971e+01 8.145e+01 1.317e+02, threshold=1.394e+02, percent-clipped=0.0 +2024-07-27 17:41:56,326 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-27 17:42:15,651 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.75 vs. limit=15.0 +2024-07-27 17:42:19,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=43677.333333333336, ans=0.2 +2024-07-27 17:42:20,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.77 vs. limit=12.0 +2024-07-27 17:42:20,258 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.94 vs. limit=22.5 +2024-07-27 17:42:20,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=43677.333333333336, ans=0.1 +2024-07-27 17:42:22,984 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.84 vs. limit=15.0 +2024-07-27 17:42:23,032 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=22.88 vs. limit=22.5 +2024-07-27 17:42:25,817 INFO [train.py:1114] (2/4) Epoch 4, batch 2100, loss[loss=0.2652, simple_loss=0.3423, pruned_loss=0.09401, over 4753.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3401, pruned_loss=0.09889, over 941109.07 frames. ], batch size: 13, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:42:27,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=43690.666666666664, ans=0.125 +2024-07-27 17:42:33,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=43704.0, ans=0.2 +2024-07-27 17:42:40,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43717.333333333336, ans=0.1 +2024-07-27 17:42:45,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=43730.666666666664, ans=0.125 +2024-07-27 17:42:49,929 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.24 vs. limit=22.5 +2024-07-27 17:42:50,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=43730.666666666664, ans=0.0 +2024-07-27 17:42:55,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=43744.0, ans=0.025 +2024-07-27 17:42:58,984 INFO [train.py:1114] (2/4) Epoch 4, batch 2150, loss[loss=0.2567, simple_loss=0.3356, pruned_loss=0.08892, over 4894.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3395, pruned_loss=0.09811, over 944276.47 frames. ], batch size: 13, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:42:59,573 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.474e+01 6.533e+01 7.336e+01 8.956e+01 1.647e+02, threshold=1.467e+02, percent-clipped=5.0 +2024-07-27 17:43:01,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43757.333333333336, ans=0.1 +2024-07-27 17:43:05,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=43770.666666666664, ans=0.025 +2024-07-27 17:43:17,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=43784.0, ans=0.2 +2024-07-27 17:43:19,559 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.11 vs. limit=15.0 +2024-07-27 17:43:30,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=43810.666666666664, ans=0.125 +2024-07-27 17:43:32,597 INFO [train.py:1114] (2/4) Epoch 4, batch 2200, loss[loss=0.248, simple_loss=0.3228, pruned_loss=0.08655, over 4817.00 frames. ], tot_loss[loss=0.2679, simple_loss=0.3395, pruned_loss=0.09815, over 943629.80 frames. ], batch size: 14, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:43:33,709 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.92 vs. limit=15.0 +2024-07-27 17:43:40,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=43837.333333333336, ans=0.125 +2024-07-27 17:43:45,386 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.13 vs. limit=15.0 +2024-07-27 17:44:09,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=43877.333333333336, ans=0.0013310144927536228 +2024-07-27 17:44:16,761 INFO [train.py:1114] (2/4) Epoch 4, batch 2250, loss[loss=0.2515, simple_loss=0.3232, pruned_loss=0.0899, over 4689.00 frames. ], tot_loss[loss=0.2676, simple_loss=0.3394, pruned_loss=0.09788, over 942417.95 frames. ], batch size: 13, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:44:17,406 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 6.403e+01 7.459e+01 9.142e+01 2.382e+02, threshold=1.492e+02, percent-clipped=1.0 +2024-07-27 17:44:25,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43904.0, ans=0.1 +2024-07-27 17:44:37,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=43904.0, ans=0.0 +2024-07-27 17:44:38,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=43917.333333333336, ans=0.125 +2024-07-27 17:44:59,210 INFO [train.py:1114] (2/4) Epoch 4, batch 2300, loss[loss=0.2408, simple_loss=0.3101, pruned_loss=0.08578, over 4942.00 frames. ], tot_loss[loss=0.2663, simple_loss=0.3384, pruned_loss=0.09716, over 939735.59 frames. ], batch size: 12, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:45:11,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=43957.333333333336, ans=0.125 +2024-07-27 17:45:11,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43957.333333333336, ans=0.1 +2024-07-27 17:45:11,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=43957.333333333336, ans=0.125 +2024-07-27 17:45:13,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=43970.666666666664, ans=0.125 +2024-07-27 17:45:14,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=43970.666666666664, ans=0.04949747468305833 +2024-07-27 17:45:23,445 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.09 vs. limit=15.0 +2024-07-27 17:45:24,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=43984.0, ans=0.1 +2024-07-27 17:45:34,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=44010.666666666664, ans=0.125 +2024-07-27 17:45:41,418 INFO [train.py:1114] (2/4) Epoch 4, batch 2350, loss[loss=0.3282, simple_loss=0.3947, pruned_loss=0.1309, over 4641.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.338, pruned_loss=0.0968, over 941480.60 frames. ], batch size: 13, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:45:41,989 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.850e+01 6.786e+01 8.508e+01 1.044e+02 1.776e+02, threshold=1.702e+02, percent-clipped=2.0 +2024-07-27 17:45:50,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=44037.333333333336, ans=0.0012962318840579693 +2024-07-27 17:46:04,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=44050.666666666664, ans=0.0 +2024-07-27 17:46:08,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=44064.0, ans=0.125 +2024-07-27 17:46:10,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=44064.0, ans=0.125 +2024-07-27 17:46:26,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=44077.333333333336, ans=0.2 +2024-07-27 17:46:28,013 INFO [train.py:1114] (2/4) Epoch 4, batch 2400, loss[loss=0.2305, simple_loss=0.2997, pruned_loss=0.08063, over 4631.00 frames. ], tot_loss[loss=0.2655, simple_loss=0.3379, pruned_loss=0.09649, over 941464.22 frames. ], batch size: 12, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:46:28,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=44090.666666666664, ans=0.0012846376811594205 +2024-07-27 17:46:29,670 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.33 vs. limit=15.0 +2024-07-27 17:46:32,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=44090.666666666664, ans=0.0 +2024-07-27 17:46:32,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=44090.666666666664, ans=0.125 +2024-07-27 17:46:55,225 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.63 vs. limit=6.0 +2024-07-27 17:46:57,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=44144.0, ans=10.0 +2024-07-27 17:47:01,648 INFO [train.py:1114] (2/4) Epoch 4, batch 2450, loss[loss=0.2738, simple_loss=0.3346, pruned_loss=0.1065, over 4692.00 frames. ], tot_loss[loss=0.2671, simple_loss=0.3391, pruned_loss=0.0975, over 937328.61 frames. ], batch size: 13, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:47:02,263 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.203e+01 6.348e+01 7.314e+01 8.641e+01 1.426e+02, threshold=1.463e+02, percent-clipped=0.0 +2024-07-27 17:47:04,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=44157.333333333336, ans=0.125 +2024-07-27 17:47:04,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=44157.333333333336, ans=0.2 +2024-07-27 17:47:14,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=44170.666666666664, ans=0.125 +2024-07-27 17:47:14,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=44170.666666666664, ans=0.0 +2024-07-27 17:47:21,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=44184.0, ans=0.1 +2024-07-27 17:47:25,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=44184.0, ans=0.2 +2024-07-27 17:47:39,912 INFO [train.py:1114] (2/4) Epoch 4, batch 2500, loss[loss=0.2691, simple_loss=0.3458, pruned_loss=0.09618, over 4800.00 frames. ], tot_loss[loss=0.2672, simple_loss=0.3389, pruned_loss=0.09769, over 939015.76 frames. ], batch size: 14, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:47:54,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=44250.666666666664, ans=0.2 +2024-07-27 17:47:54,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=44250.666666666664, ans=0.125 +2024-07-27 17:48:14,563 INFO [train.py:1114] (2/4) Epoch 4, batch 2550, loss[loss=0.2249, simple_loss=0.2785, pruned_loss=0.0856, over 4793.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3369, pruned_loss=0.09645, over 938512.28 frames. ], batch size: 11, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:48:15,139 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.441e+01 6.325e+01 6.836e+01 7.764e+01 1.443e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-27 17:48:18,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=44290.666666666664, ans=0.2 +2024-07-27 17:48:24,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=44304.0, ans=0.09899494936611666 +2024-07-27 17:48:26,155 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.80 vs. limit=15.0 +2024-07-27 17:48:35,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=44317.333333333336, ans=0.0012353623188405792 +2024-07-27 17:48:39,988 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.74 vs. limit=12.0 +2024-07-27 17:48:40,746 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.48 vs. limit=15.0 +2024-07-27 17:48:49,580 INFO [train.py:1114] (2/4) Epoch 4, batch 2600, loss[loss=0.2296, simple_loss=0.3119, pruned_loss=0.07362, over 4891.00 frames. ], tot_loss[loss=0.2653, simple_loss=0.3372, pruned_loss=0.09669, over 937791.86 frames. ], batch size: 13, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:49:01,686 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.63 vs. limit=12.0 +2024-07-27 17:49:10,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=44397.333333333336, ans=0.125 +2024-07-27 17:49:11,579 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:49:24,928 INFO [train.py:1114] (2/4) Epoch 4, batch 2650, loss[loss=0.2545, simple_loss=0.3314, pruned_loss=0.0888, over 4621.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.3373, pruned_loss=0.09654, over 939830.08 frames. ], batch size: 16, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:49:25,612 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.438e+01 6.678e+01 7.695e+01 9.100e+01 1.480e+02, threshold=1.539e+02, percent-clipped=3.0 +2024-07-27 17:49:29,441 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.72 vs. limit=15.0 +2024-07-27 17:49:32,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=44437.333333333336, ans=0.07 +2024-07-27 17:49:38,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=44437.333333333336, ans=0.0 +2024-07-27 17:49:41,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=44450.666666666664, ans=0.0012063768115942036 +2024-07-27 17:49:46,850 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.03 vs. limit=22.5 +2024-07-27 17:49:52,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=44464.0, ans=0.2 +2024-07-27 17:50:01,707 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.19 vs. limit=12.0 +2024-07-27 17:50:06,104 INFO [train.py:1114] (2/4) Epoch 4, batch 2700, loss[loss=0.2647, simple_loss=0.3454, pruned_loss=0.09201, over 4734.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3388, pruned_loss=0.09735, over 939745.54 frames. ], batch size: 14, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:50:14,152 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.16 vs. limit=15.0 +2024-07-27 17:50:24,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=44517.333333333336, ans=0.125 +2024-07-27 17:50:26,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=44530.666666666664, ans=0.125 +2024-07-27 17:50:41,735 INFO [train.py:1114] (2/4) Epoch 4, batch 2750, loss[loss=0.2141, simple_loss=0.2914, pruned_loss=0.06841, over 4703.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3387, pruned_loss=0.0976, over 939678.32 frames. ], batch size: 12, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:50:42,307 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.077e+01 6.612e+01 7.573e+01 9.586e+01 1.480e+02, threshold=1.515e+02, percent-clipped=0.0 +2024-07-27 17:51:03,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=44597.333333333336, ans=0.0011744927536231874 +2024-07-27 17:51:05,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=44597.333333333336, ans=0.1 +2024-07-27 17:51:06,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=44597.333333333336, ans=0.1 +2024-07-27 17:51:18,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=44610.666666666664, ans=0.125 +2024-07-27 17:51:19,254 INFO [train.py:1114] (2/4) Epoch 4, batch 2800, loss[loss=0.3633, simple_loss=0.4034, pruned_loss=0.1616, over 3426.00 frames. ], tot_loss[loss=0.2677, simple_loss=0.3395, pruned_loss=0.0979, over 938084.97 frames. ], batch size: 35, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:51:39,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=44650.666666666664, ans=0.0 +2024-07-27 17:51:45,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=44664.0, ans=0.125 +2024-07-27 17:51:51,911 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.41 vs. limit=22.5 +2024-07-27 17:51:54,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.16 vs. limit=22.5 +2024-07-27 17:51:55,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=44677.333333333336, ans=0.125 +2024-07-27 17:51:55,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=44677.333333333336, ans=0.025 +2024-07-27 17:51:55,924 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.51 vs. limit=15.0 +2024-07-27 17:51:57,093 INFO [train.py:1114] (2/4) Epoch 4, batch 2850, loss[loss=0.252, simple_loss=0.3218, pruned_loss=0.09109, over 4958.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3394, pruned_loss=0.09811, over 936826.80 frames. ], batch size: 13, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:51:57,803 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.345e+01 6.785e+01 7.509e+01 8.652e+01 1.296e+02, threshold=1.502e+02, percent-clipped=0.0 +2024-07-27 17:52:00,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=44690.666666666664, ans=0.125 +2024-07-27 17:52:17,634 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.82 vs. limit=22.5 +2024-07-27 17:52:32,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=44744.0, ans=0.125 +2024-07-27 17:52:33,419 INFO [train.py:1114] (2/4) Epoch 4, batch 2900, loss[loss=0.281, simple_loss=0.3545, pruned_loss=0.1037, over 4828.00 frames. ], tot_loss[loss=0.2682, simple_loss=0.3406, pruned_loss=0.09793, over 940433.10 frames. ], batch size: 13, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:52:42,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=44770.666666666664, ans=0.125 +2024-07-27 17:52:44,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=44770.666666666664, ans=0.2 +2024-07-27 17:52:49,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=44784.0, ans=0.125 +2024-07-27 17:52:49,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=44784.0, ans=0.0 +2024-07-27 17:52:58,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=44797.333333333336, ans=10.0 +2024-07-27 17:53:07,408 INFO [train.py:1114] (2/4) Epoch 4, batch 2950, loss[loss=0.238, simple_loss=0.3116, pruned_loss=0.08219, over 4697.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3397, pruned_loss=0.09794, over 939127.20 frames. ], batch size: 12, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:53:07,999 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.029e+01 6.448e+01 7.326e+01 8.943e+01 1.391e+02, threshold=1.465e+02, percent-clipped=0.0 +2024-07-27 17:53:13,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=44837.333333333336, ans=0.125 +2024-07-27 17:53:16,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=44837.333333333336, ans=0.1 +2024-07-27 17:53:19,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=44837.333333333336, ans=0.125 +2024-07-27 17:53:34,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=44877.333333333336, ans=0.125 +2024-07-27 17:53:41,153 INFO [train.py:1114] (2/4) Epoch 4, batch 3000, loss[loss=0.2646, simple_loss=0.3518, pruned_loss=0.08865, over 4759.00 frames. ], tot_loss[loss=0.267, simple_loss=0.3393, pruned_loss=0.09732, over 937897.41 frames. ], batch size: 13, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:53:41,154 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 17:53:48,573 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.9300, 3.7577, 2.2741, 2.3679], device='cuda:2') +2024-07-27 17:53:48,903 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.9244, 3.0400, 3.6752, 2.5330], device='cuda:2') +2024-07-27 17:53:50,697 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.9299, 5.1306, 5.0563, 5.6837], device='cuda:2') +2024-07-27 17:53:52,965 INFO [train.py:1146] (2/4) Epoch 4, validation: loss=0.2168, simple_loss=0.3177, pruned_loss=0.05793, over 944034.00 frames. +2024-07-27 17:53:52,966 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 17:54:01,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=44904.0, ans=0.001107826086956521 +2024-07-27 17:54:34,924 INFO [train.py:1114] (2/4) Epoch 4, batch 3050, loss[loss=0.27, simple_loss=0.3342, pruned_loss=0.1029, over 4636.00 frames. ], tot_loss[loss=0.2672, simple_loss=0.3392, pruned_loss=0.09761, over 937436.89 frames. ], batch size: 12, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:54:42,784 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.188e+01 6.571e+01 7.374e+01 8.801e+01 1.359e+02, threshold=1.475e+02, percent-clipped=0.0 +2024-07-27 17:54:49,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff2.min_abs, batch_count=44970.666666666664, ans=0.1 +2024-07-27 17:54:49,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=44970.666666666664, ans=0.125 +2024-07-27 17:54:53,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=44970.666666666664, ans=0.125 +2024-07-27 17:57:24,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=44997.333333333336, ans=0.2 +2024-07-27 17:57:42,511 INFO [train.py:1114] (2/4) Epoch 4, batch 3100, loss[loss=0.302, simple_loss=0.3713, pruned_loss=0.1164, over 4663.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.338, pruned_loss=0.09712, over 937845.94 frames. ], batch size: 16, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:57:54,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=45037.333333333336, ans=0.125 +2024-07-27 17:57:58,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=45037.333333333336, ans=0.0010788405797101438 +2024-07-27 17:58:17,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=45077.333333333336, ans=0.2 +2024-07-27 17:58:20,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=45077.333333333336, ans=0.05 +2024-07-27 17:58:45,994 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=6.25 vs. limit=12.0 +2024-07-27 17:58:46,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=45090.666666666664, ans=0.1 +2024-07-27 17:58:47,038 INFO [train.py:1114] (2/4) Epoch 4, batch 3150, loss[loss=0.3165, simple_loss=0.3767, pruned_loss=0.1282, over 4614.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.3382, pruned_loss=0.09664, over 937898.73 frames. ], batch size: 17, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 17:58:47,641 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.206e+01 6.605e+01 7.303e+01 8.284e+01 1.349e+02, threshold=1.461e+02, percent-clipped=0.0 +2024-07-27 17:59:02,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=45104.0, ans=0.0 +2024-07-27 17:59:24,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=45144.0, ans=0.125 +2024-07-27 17:59:25,232 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.32 vs. limit=22.5 +2024-07-27 17:59:29,804 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.72 vs. limit=15.0 +2024-07-27 17:59:31,925 INFO [train.py:1114] (2/4) Epoch 4, batch 3200, loss[loss=0.2705, simple_loss=0.347, pruned_loss=0.09702, over 4820.00 frames. ], tot_loss[loss=0.2648, simple_loss=0.3372, pruned_loss=0.09614, over 939532.32 frames. ], batch size: 13, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 17:59:33,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=45157.333333333336, ans=0.125 +2024-07-27 17:59:41,497 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.68 vs. limit=22.5 +2024-07-27 18:00:03,822 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.84 vs. limit=15.0 +2024-07-27 18:00:26,302 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:00:54,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=45197.333333333336, ans=0.125 +2024-07-27 18:01:01,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=45210.666666666664, ans=0.125 +2024-07-27 18:01:03,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=45210.666666666664, ans=0.125 +2024-07-27 18:01:12,059 INFO [train.py:1114] (2/4) Epoch 4, batch 3250, loss[loss=0.3101, simple_loss=0.3836, pruned_loss=0.1183, over 4926.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3372, pruned_loss=0.09552, over 940394.78 frames. ], batch size: 14, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:01:12,745 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.153e+01 6.665e+01 7.646e+01 9.547e+01 1.516e+02, threshold=1.529e+02, percent-clipped=1.0 +2024-07-27 18:01:17,111 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.50 vs. limit=15.0 +2024-07-27 18:01:21,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=45237.333333333336, ans=0.125 +2024-07-27 18:01:31,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=45250.666666666664, ans=0.125 +2024-07-27 18:01:35,572 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=22.20 vs. limit=22.5 +2024-07-27 18:01:46,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45277.333333333336, ans=0.1 +2024-07-27 18:01:50,280 INFO [train.py:1114] (2/4) Epoch 4, batch 3300, loss[loss=0.3205, simple_loss=0.3757, pruned_loss=0.1326, over 4674.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3371, pruned_loss=0.09637, over 940636.03 frames. ], batch size: 19, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:01:59,565 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.80 vs. limit=22.5 +2024-07-27 18:02:03,909 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:02:24,083 INFO [train.py:1114] (2/4) Epoch 4, batch 3350, loss[loss=0.2776, simple_loss=0.3463, pruned_loss=0.1044, over 4587.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.3378, pruned_loss=0.09705, over 938333.52 frames. ], batch size: 17, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:02:24,711 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.140e+01 6.495e+01 7.490e+01 8.565e+01 1.368e+02, threshold=1.498e+02, percent-clipped=0.0 +2024-07-27 18:02:24,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=45357.333333333336, ans=0.125 +2024-07-27 18:02:31,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=45370.666666666664, ans=0.025 +2024-07-27 18:02:35,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=45370.666666666664, ans=0.125 +2024-07-27 18:02:40,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=45384.0, ans=0.125 +2024-07-27 18:02:46,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=45397.333333333336, ans=0.001000579710144927 +2024-07-27 18:02:57,873 INFO [train.py:1114] (2/4) Epoch 4, batch 3400, loss[loss=0.2425, simple_loss=0.3066, pruned_loss=0.08919, over 4816.00 frames. ], tot_loss[loss=0.2672, simple_loss=0.3386, pruned_loss=0.09783, over 937176.45 frames. ], batch size: 11, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:03:00,938 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.07 vs. limit=6.0 +2024-07-27 18:03:03,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=45424.0, ans=0.125 +2024-07-27 18:03:34,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45477.333333333336, ans=0.1 +2024-07-27 18:03:40,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=45477.333333333336, ans=0.125 +2024-07-27 18:03:42,181 INFO [train.py:1114] (2/4) Epoch 4, batch 3450, loss[loss=0.2851, simple_loss=0.3669, pruned_loss=0.1016, over 4718.00 frames. ], tot_loss[loss=0.265, simple_loss=0.3373, pruned_loss=0.09637, over 937060.61 frames. ], batch size: 19, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:03:42,790 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.068e+01 6.545e+01 7.401e+01 8.660e+01 1.564e+02, threshold=1.480e+02, percent-clipped=3.0 +2024-07-27 18:03:45,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45490.666666666664, ans=0.1 +2024-07-27 18:04:01,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=45517.333333333336, ans=0.2 +2024-07-27 18:04:04,980 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.66 vs. limit=15.0 +2024-07-27 18:04:07,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=45517.333333333336, ans=0.125 +2024-07-27 18:04:10,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45530.666666666664, ans=0.1 +2024-07-27 18:04:21,762 INFO [train.py:1114] (2/4) Epoch 4, batch 3500, loss[loss=0.2786, simple_loss=0.3435, pruned_loss=0.1068, over 4950.00 frames. ], tot_loss[loss=0.2653, simple_loss=0.3378, pruned_loss=0.0964, over 938183.69 frames. ], batch size: 12, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:04:22,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45557.333333333336, ans=0.1 +2024-07-27 18:04:28,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45570.666666666664, ans=0.1 +2024-07-27 18:04:29,800 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.87 vs. limit=10.0 +2024-07-27 18:04:30,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=45570.666666666664, ans=0.0 +2024-07-27 18:04:52,278 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:04:55,588 INFO [train.py:1114] (2/4) Epoch 4, batch 3550, loss[loss=0.2145, simple_loss=0.2924, pruned_loss=0.06833, over 4671.00 frames. ], tot_loss[loss=0.2651, simple_loss=0.3378, pruned_loss=0.0962, over 938672.38 frames. ], batch size: 14, lr: 1.81e-02, grad_scale: 32.0 +2024-07-27 18:04:56,223 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.319e+01 6.373e+01 7.017e+01 7.924e+01 1.305e+02, threshold=1.403e+02, percent-clipped=0.0 +2024-07-27 18:05:03,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=45637.333333333336, ans=0.0 +2024-07-27 18:05:24,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=45664.0, ans=0.125 +2024-07-27 18:05:35,439 INFO [train.py:1114] (2/4) Epoch 4, batch 3600, loss[loss=0.2406, simple_loss=0.3207, pruned_loss=0.08026, over 4960.00 frames. ], tot_loss[loss=0.2663, simple_loss=0.3388, pruned_loss=0.09693, over 940504.02 frames. ], batch size: 13, lr: 1.81e-02, grad_scale: 32.0 +2024-07-27 18:05:35,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=45690.666666666664, ans=0.125 +2024-07-27 18:05:36,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=45690.666666666664, ans=0.0 +2024-07-27 18:05:41,752 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=27.66 vs. limit=22.5 +2024-07-27 18:05:42,333 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.76 vs. limit=15.0 +2024-07-27 18:05:43,028 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.48 vs. limit=15.0 +2024-07-27 18:05:51,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=45717.333333333336, ans=0.1 +2024-07-27 18:06:01,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=45730.666666666664, ans=0.2 +2024-07-27 18:06:02,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=45730.666666666664, ans=0.125 +2024-07-27 18:06:08,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=45744.0, ans=0.125 +2024-07-27 18:06:11,485 INFO [train.py:1114] (2/4) Epoch 4, batch 3650, loss[loss=0.2918, simple_loss=0.3592, pruned_loss=0.1122, over 4895.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.3382, pruned_loss=0.0968, over 941565.45 frames. ], batch size: 15, lr: 1.81e-02, grad_scale: 64.0 +2024-07-27 18:06:12,145 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.552e+01 6.653e+01 7.624e+01 9.000e+01 1.438e+02, threshold=1.525e+02, percent-clipped=1.0 +2024-07-27 18:06:25,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=45784.0, ans=0.025 +2024-07-27 18:06:35,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=45797.333333333336, ans=0.125 +2024-07-27 18:06:35,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=45797.333333333336, ans=0.0 +2024-07-27 18:06:44,752 INFO [train.py:1114] (2/4) Epoch 4, batch 3700, loss[loss=0.3101, simple_loss=0.3711, pruned_loss=0.1245, over 4934.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.3382, pruned_loss=0.09667, over 942430.33 frames. ], batch size: 14, lr: 1.81e-02, grad_scale: 64.0 +2024-07-27 18:06:51,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=45837.333333333336, ans=0.125 +2024-07-27 18:07:04,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=45850.666666666664, ans=0.0009020289855072462 +2024-07-27 18:07:21,637 INFO [train.py:1114] (2/4) Epoch 4, batch 3750, loss[loss=0.2367, simple_loss=0.3048, pruned_loss=0.08433, over 4814.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.3367, pruned_loss=0.09555, over 943668.95 frames. ], batch size: 11, lr: 1.81e-02, grad_scale: 64.0 +2024-07-27 18:07:22,321 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.296e+01 6.507e+01 7.242e+01 8.300e+01 1.182e+02, threshold=1.448e+02, percent-clipped=0.0 +2024-07-27 18:07:42,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45930.666666666664, ans=0.1 +2024-07-27 18:07:52,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=45944.0, ans=0.025 +2024-07-27 18:07:54,669 INFO [train.py:1114] (2/4) Epoch 4, batch 3800, loss[loss=0.2696, simple_loss=0.3405, pruned_loss=0.09933, over 4822.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3368, pruned_loss=0.09575, over 942012.74 frames. ], batch size: 14, lr: 1.81e-02, grad_scale: 64.0 +2024-07-27 18:08:04,883 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.96 vs. limit=6.0 +2024-07-27 18:08:11,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45984.0, ans=0.1 +2024-07-27 18:08:13,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=45984.0, ans=0.0 +2024-07-27 18:08:18,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=45997.333333333336, ans=0.0 +2024-07-27 18:08:28,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=46024.0, ans=0.2 +2024-07-27 18:08:28,681 INFO [train.py:1114] (2/4) Epoch 4, batch 3850, loss[loss=0.3007, simple_loss=0.3727, pruned_loss=0.1144, over 4610.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3364, pruned_loss=0.09528, over 942503.32 frames. ], batch size: 16, lr: 1.81e-02, grad_scale: 32.0 +2024-07-27 18:08:30,035 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.355e+01 6.600e+01 7.617e+01 8.935e+01 1.540e+02, threshold=1.523e+02, percent-clipped=1.0 +2024-07-27 18:08:30,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=46024.0, ans=0.0008643478260869572 +2024-07-27 18:08:34,508 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.24 vs. limit=15.0 +2024-07-27 18:08:42,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=46050.666666666664, ans=0.125 +2024-07-27 18:08:43,888 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.23 vs. limit=12.0 +2024-07-27 18:09:03,316 INFO [train.py:1114] (2/4) Epoch 4, batch 3900, loss[loss=0.3187, simple_loss=0.3853, pruned_loss=0.126, over 4816.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3375, pruned_loss=0.09561, over 942541.08 frames. ], batch size: 14, lr: 1.81e-02, grad_scale: 32.0 +2024-07-27 18:09:14,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=46104.0, ans=0.5 +2024-07-27 18:09:16,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=46117.333333333336, ans=0.125 +2024-07-27 18:09:20,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=46117.333333333336, ans=0.95 +2024-07-27 18:09:22,100 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.07 vs. limit=15.0 +2024-07-27 18:09:37,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=46144.0, ans=0.125 +2024-07-27 18:09:42,261 INFO [train.py:1114] (2/4) Epoch 4, batch 3950, loss[loss=0.3285, simple_loss=0.3906, pruned_loss=0.1332, over 4848.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3368, pruned_loss=0.09497, over 944483.21 frames. ], batch size: 16, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:09:44,106 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.311e+01 6.796e+01 7.722e+01 1.006e+02 1.504e+02, threshold=1.544e+02, percent-clipped=0.0 +2024-07-27 18:09:44,527 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=26.53 vs. limit=22.5 +2024-07-27 18:09:50,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=46170.666666666664, ans=0.1 +2024-07-27 18:09:52,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=46170.666666666664, ans=0.125 +2024-07-27 18:10:04,136 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.48 vs. limit=6.0 +2024-07-27 18:10:16,462 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.18 vs. limit=6.0 +2024-07-27 18:10:21,420 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.08 vs. limit=15.0 +2024-07-27 18:10:29,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=46210.666666666664, ans=0.0 +2024-07-27 18:10:30,498 INFO [train.py:1114] (2/4) Epoch 4, batch 4000, loss[loss=0.2712, simple_loss=0.3353, pruned_loss=0.1036, over 4779.00 frames. ], tot_loss[loss=0.2626, simple_loss=0.3359, pruned_loss=0.09465, over 940875.09 frames. ], batch size: 12, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:10:31,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46224.0, ans=0.1 +2024-07-27 18:10:32,928 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.51 vs. limit=22.5 +2024-07-27 18:10:40,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=46237.333333333336, ans=0.125 +2024-07-27 18:10:47,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=46250.666666666664, ans=0.0 +2024-07-27 18:10:53,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=46264.0, ans=0.125 +2024-07-27 18:11:06,047 INFO [train.py:1114] (2/4) Epoch 4, batch 4050, loss[loss=0.2985, simple_loss=0.3684, pruned_loss=0.1143, over 3557.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3363, pruned_loss=0.09524, over 939887.53 frames. ], batch size: 35, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:11:07,321 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.344e+01 6.516e+01 7.339e+01 8.508e+01 1.190e+02, threshold=1.468e+02, percent-clipped=0.0 +2024-07-27 18:11:28,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=46317.333333333336, ans=10.0 +2024-07-27 18:11:30,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=46317.333333333336, ans=0.125 +2024-07-27 18:11:30,643 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=46317.333333333336, ans=0.125 +2024-07-27 18:11:37,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=46330.666666666664, ans=0.0007976811594202892 +2024-07-27 18:11:37,689 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.20 vs. limit=15.0 +2024-07-27 18:11:42,776 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:11:47,335 INFO [train.py:1114] (2/4) Epoch 4, batch 4100, loss[loss=0.2711, simple_loss=0.3381, pruned_loss=0.1021, over 4910.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.337, pruned_loss=0.09598, over 938737.45 frames. ], batch size: 15, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:11:50,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46357.333333333336, ans=0.1 +2024-07-27 18:11:53,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=46370.666666666664, ans=0.2 +2024-07-27 18:11:54,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=46370.666666666664, ans=0.125 +2024-07-27 18:11:59,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=46370.666666666664, ans=0.0 +2024-07-27 18:12:00,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=46384.0, ans=0.125 +2024-07-27 18:12:02,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=46384.0, ans=0.125 +2024-07-27 18:12:06,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=46384.0, ans=0.125 +2024-07-27 18:12:07,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=46397.333333333336, ans=0.125 +2024-07-27 18:12:21,694 INFO [train.py:1114] (2/4) Epoch 4, batch 4150, loss[loss=0.2363, simple_loss=0.3268, pruned_loss=0.07287, over 4830.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.337, pruned_loss=0.09542, over 938403.78 frames. ], batch size: 13, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:12:21,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.16 vs. limit=12.0 +2024-07-27 18:12:22,998 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.177e+01 6.950e+01 8.086e+01 1.014e+02 1.411e+02, threshold=1.617e+02, percent-clipped=0.0 +2024-07-27 18:12:23,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=46424.0, ans=0.125 +2024-07-27 18:12:25,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=46424.0, ans=0.125 +2024-07-27 18:12:26,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=46424.0, ans=0.025 +2024-07-27 18:12:27,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=46424.0, ans=0.0007773913043478253 +2024-07-27 18:12:31,971 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:12:41,808 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.73 vs. limit=12.0 +2024-07-27 18:13:02,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=46477.333333333336, ans=0.125 +2024-07-27 18:13:02,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=46477.333333333336, ans=0.0 +2024-07-27 18:13:03,919 INFO [train.py:1114] (2/4) Epoch 4, batch 4200, loss[loss=0.2654, simple_loss=0.3336, pruned_loss=0.09863, over 4908.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3386, pruned_loss=0.09689, over 939942.31 frames. ], batch size: 15, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:13:05,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=46490.666666666664, ans=0.125 +2024-07-27 18:13:21,463 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.00 vs. limit=6.0 +2024-07-27 18:13:25,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=46530.666666666664, ans=0.1 +2024-07-27 18:13:30,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=46530.666666666664, ans=0.0007542028985507241 +2024-07-27 18:13:38,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=46544.0, ans=0.2 +2024-07-27 18:13:39,617 INFO [train.py:1114] (2/4) Epoch 4, batch 4250, loss[loss=0.2178, simple_loss=0.3026, pruned_loss=0.0665, over 4646.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3383, pruned_loss=0.097, over 940669.36 frames. ], batch size: 12, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:13:40,642 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.50 vs. limit=15.0 +2024-07-27 18:13:40,928 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.134e+01 6.597e+01 7.169e+01 7.931e+01 1.247e+02, threshold=1.434e+02, percent-clipped=0.0 +2024-07-27 18:14:03,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=46584.0, ans=0.125 +2024-07-27 18:14:07,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=46584.0, ans=0.1 +2024-07-27 18:14:19,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=46610.666666666664, ans=0.2 +2024-07-27 18:14:24,151 INFO [train.py:1114] (2/4) Epoch 4, batch 4300, loss[loss=0.2622, simple_loss=0.3381, pruned_loss=0.09317, over 4761.00 frames. ], tot_loss[loss=0.2648, simple_loss=0.3371, pruned_loss=0.09627, over 939960.05 frames. ], batch size: 13, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:14:24,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=46624.0, ans=0.125 +2024-07-27 18:14:26,950 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:14:28,156 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:14:40,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=46650.666666666664, ans=0.1 +2024-07-27 18:14:51,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=46677.333333333336, ans=0.2 +2024-07-27 18:14:57,375 INFO [train.py:1114] (2/4) Epoch 4, batch 4350, loss[loss=0.2584, simple_loss=0.3329, pruned_loss=0.09197, over 4756.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3372, pruned_loss=0.09577, over 940425.69 frames. ], batch size: 13, lr: 1.79e-02, grad_scale: 32.0 +2024-07-27 18:14:58,634 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.449e+01 6.647e+01 7.749e+01 8.957e+01 1.514e+02, threshold=1.550e+02, percent-clipped=2.0 +2024-07-27 18:15:19,937 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.91 vs. limit=22.5 +2024-07-27 18:15:24,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=46744.0, ans=0.125 +2024-07-27 18:15:27,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=46744.0, ans=0.2 +2024-07-27 18:15:30,906 INFO [train.py:1114] (2/4) Epoch 4, batch 4400, loss[loss=0.2325, simple_loss=0.3276, pruned_loss=0.06871, over 4804.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.3371, pruned_loss=0.09534, over 940375.12 frames. ], batch size: 14, lr: 1.79e-02, grad_scale: 32.0 +2024-07-27 18:15:41,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=46770.666666666664, ans=0.125 +2024-07-27 18:15:45,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=46784.0, ans=0.125 +2024-07-27 18:15:53,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=46797.333333333336, ans=0.125 +2024-07-27 18:16:04,539 INFO [train.py:1114] (2/4) Epoch 4, batch 4450, loss[loss=0.2132, simple_loss=0.2834, pruned_loss=0.07155, over 4945.00 frames. ], tot_loss[loss=0.2637, simple_loss=0.3367, pruned_loss=0.09533, over 938866.00 frames. ], batch size: 12, lr: 1.79e-02, grad_scale: 32.0 +2024-07-27 18:16:05,844 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.136e+01 6.574e+01 7.932e+01 1.004e+02 1.651e+02, threshold=1.586e+02, percent-clipped=3.0 +2024-07-27 18:16:05,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=46824.0, ans=0.0 +2024-07-27 18:16:06,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=46824.0, ans=0.125 +2024-07-27 18:16:17,795 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.83 vs. limit=6.0 +2024-07-27 18:16:23,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=46850.666666666664, ans=0.2 +2024-07-27 18:16:25,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=46864.0, ans=0.125 +2024-07-27 18:16:38,382 INFO [train.py:1114] (2/4) Epoch 4, batch 4500, loss[loss=0.2554, simple_loss=0.3333, pruned_loss=0.08882, over 4746.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.3364, pruned_loss=0.09496, over 938131.56 frames. ], batch size: 14, lr: 1.79e-02, grad_scale: 32.0 +2024-07-27 18:16:39,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=46890.666666666664, ans=0.1 +2024-07-27 18:16:40,925 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.68 vs. limit=15.0 +2024-07-27 18:16:43,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=46890.666666666664, ans=0.1 +2024-07-27 18:16:52,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=46917.333333333336, ans=0.125 +2024-07-27 18:16:58,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=46917.333333333336, ans=0.0006701449275362311 +2024-07-27 18:17:02,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.47 vs. limit=15.0 +2024-07-27 18:17:06,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=46930.666666666664, ans=0.2 +2024-07-27 18:17:08,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=46944.0, ans=0.0 +2024-07-27 18:17:13,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=46944.0, ans=0.0 +2024-07-27 18:17:15,575 INFO [train.py:1114] (2/4) Epoch 4, batch 4550, loss[loss=0.2456, simple_loss=0.3133, pruned_loss=0.08897, over 4898.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3347, pruned_loss=0.09392, over 940184.33 frames. ], batch size: 13, lr: 1.79e-02, grad_scale: 16.0 +2024-07-27 18:17:15,924 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.64 vs. limit=15.0 +2024-07-27 18:17:17,514 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.369e+01 6.640e+01 7.268e+01 8.274e+01 1.292e+02, threshold=1.454e+02, percent-clipped=0.0 +2024-07-27 18:17:39,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=46997.333333333336, ans=0.035 +2024-07-27 18:17:39,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=46997.333333333336, ans=0.0006527536231884061 +2024-07-27 18:17:49,277 INFO [train.py:1114] (2/4) Epoch 4, batch 4600, loss[loss=0.262, simple_loss=0.3324, pruned_loss=0.09582, over 4565.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3345, pruned_loss=0.09397, over 938737.87 frames. ], batch size: 21, lr: 1.79e-02, grad_scale: 16.0 +2024-07-27 18:18:00,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=47037.333333333336, ans=0.0 +2024-07-27 18:18:21,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=47077.333333333336, ans=0.2 +2024-07-27 18:18:22,633 INFO [train.py:1114] (2/4) Epoch 4, batch 4650, loss[loss=0.2598, simple_loss=0.3329, pruned_loss=0.09335, over 4831.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3357, pruned_loss=0.09432, over 940351.37 frames. ], batch size: 16, lr: 1.79e-02, grad_scale: 8.0 +2024-07-27 18:18:25,333 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.210e+01 6.570e+01 7.431e+01 9.301e+01 1.835e+02, threshold=1.486e+02, percent-clipped=1.0 +2024-07-27 18:18:49,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=47117.333333333336, ans=0.125 +2024-07-27 18:18:56,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=47130.666666666664, ans=0.0 +2024-07-27 18:18:59,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=47130.666666666664, ans=0.0 +2024-07-27 18:19:09,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=47144.0, ans=0.0006208695652173916 +2024-07-27 18:19:10,657 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-27 18:19:10,954 INFO [train.py:1114] (2/4) Epoch 4, batch 4700, loss[loss=0.2275, simple_loss=0.3045, pruned_loss=0.07522, over 4694.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3339, pruned_loss=0.09374, over 937558.38 frames. ], batch size: 11, lr: 1.79e-02, grad_scale: 8.0 +2024-07-27 18:19:13,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=47157.333333333336, ans=0.0 +2024-07-27 18:19:16,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=47157.333333333336, ans=0.125 +2024-07-27 18:19:17,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=47170.666666666664, ans=0.0 +2024-07-27 18:19:21,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=47170.666666666664, ans=0.125 +2024-07-27 18:19:25,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=47184.0, ans=0.025 +2024-07-27 18:19:25,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=47184.0, ans=0.0006121739130434782 +2024-07-27 18:19:38,953 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.96 vs. limit=12.0 +2024-07-27 18:19:40,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=47210.666666666664, ans=0.05 +2024-07-27 18:19:45,069 INFO [train.py:1114] (2/4) Epoch 4, batch 4750, loss[loss=0.2856, simple_loss=0.3553, pruned_loss=0.108, over 4470.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3356, pruned_loss=0.09524, over 935870.43 frames. ], batch size: 21, lr: 1.78e-02, grad_scale: 8.0 +2024-07-27 18:19:47,746 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.323e+01 6.439e+01 7.166e+01 9.768e+01 1.474e+02, threshold=1.433e+02, percent-clipped=0.0 +2024-07-27 18:19:48,614 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:19:49,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=47224.0, ans=0.07 +2024-07-27 18:19:51,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=47237.333333333336, ans=0.2 +2024-07-27 18:19:51,660 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.50 vs. limit=15.0 +2024-07-27 18:19:55,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=47237.333333333336, ans=0.07 +2024-07-27 18:19:56,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=47237.333333333336, ans=0.125 +2024-07-27 18:20:05,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=47264.0, ans=0.05 +2024-07-27 18:20:10,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=47264.0, ans=0.025 +2024-07-27 18:20:15,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=47277.333333333336, ans=0.2 +2024-07-27 18:20:17,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=47277.333333333336, ans=0.125 +2024-07-27 18:20:19,551 INFO [train.py:1114] (2/4) Epoch 4, batch 4800, loss[loss=0.2802, simple_loss=0.3526, pruned_loss=0.1039, over 4690.00 frames. ], tot_loss[loss=0.2626, simple_loss=0.3347, pruned_loss=0.09525, over 932954.25 frames. ], batch size: 13, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:20:21,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=47290.666666666664, ans=0.0 +2024-07-27 18:20:45,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=47330.666666666664, ans=0.035 +2024-07-27 18:20:59,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47344.0, ans=0.1 +2024-07-27 18:21:03,409 INFO [train.py:1114] (2/4) Epoch 4, batch 4850, loss[loss=0.2976, simple_loss=0.3638, pruned_loss=0.1157, over 4740.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.3354, pruned_loss=0.09549, over 932235.37 frames. ], batch size: 14, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:21:06,064 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.193e+01 6.442e+01 7.162e+01 7.877e+01 1.649e+02, threshold=1.432e+02, percent-clipped=2.0 +2024-07-27 18:21:16,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=47384.0, ans=0.125 +2024-07-27 18:21:35,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47410.666666666664, ans=0.1 +2024-07-27 18:21:37,086 INFO [train.py:1114] (2/4) Epoch 4, batch 4900, loss[loss=0.2641, simple_loss=0.338, pruned_loss=0.09508, over 4753.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3358, pruned_loss=0.09566, over 934159.32 frames. ], batch size: 13, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:21:47,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=47437.333333333336, ans=0.125 +2024-07-27 18:21:55,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=47450.666666666664, ans=0.09899494936611666 +2024-07-27 18:21:55,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47450.666666666664, ans=0.1 +2024-07-27 18:22:01,021 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.48 vs. limit=22.5 +2024-07-27 18:22:03,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=47477.333333333336, ans=0.125 +2024-07-27 18:22:05,516 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.94 vs. limit=22.5 +2024-07-27 18:22:14,200 INFO [train.py:1114] (2/4) Epoch 4, batch 4950, loss[loss=0.4122, simple_loss=0.4329, pruned_loss=0.1958, over 3466.00 frames. ], tot_loss[loss=0.2661, simple_loss=0.3382, pruned_loss=0.09701, over 931435.92 frames. ], batch size: 36, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:22:16,767 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.778e+01 6.647e+01 7.619e+01 9.936e+01 1.671e+02, threshold=1.524e+02, percent-clipped=3.0 +2024-07-27 18:22:31,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=47517.333333333336, ans=0.125 +2024-07-27 18:22:35,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=47530.666666666664, ans=0.2 +2024-07-27 18:22:35,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=47530.666666666664, ans=0.0005368115942028986 +2024-07-27 18:22:45,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=47544.0, ans=0.125 +2024-07-27 18:22:52,601 INFO [train.py:1114] (2/4) Epoch 4, batch 5000, loss[loss=0.2489, simple_loss=0.3394, pruned_loss=0.07919, over 4674.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3371, pruned_loss=0.09605, over 935175.31 frames. ], batch size: 14, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:23:03,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=47570.666666666664, ans=0.125 +2024-07-27 18:23:04,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=47570.666666666664, ans=0.0005281159420289869 +2024-07-27 18:23:04,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=47570.666666666664, ans=0.0 +2024-07-27 18:23:08,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=47584.0, ans=15.0 +2024-07-27 18:23:26,401 INFO [train.py:1114] (2/4) Epoch 4, batch 5050, loss[loss=0.23, simple_loss=0.3022, pruned_loss=0.07887, over 4836.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3356, pruned_loss=0.0951, over 937541.65 frames. ], batch size: 12, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:23:29,105 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.322e+01 6.671e+01 7.390e+01 9.030e+01 1.584e+02, threshold=1.478e+02, percent-clipped=1.0 +2024-07-27 18:23:34,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=47637.333333333336, ans=0.2 +2024-07-27 18:23:39,323 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.89 vs. limit=15.0 +2024-07-27 18:23:51,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=47664.0, ans=0.1 +2024-07-27 18:23:54,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=47677.333333333336, ans=0.0 +2024-07-27 18:24:01,842 INFO [train.py:1114] (2/4) Epoch 4, batch 5100, loss[loss=0.2211, simple_loss=0.2925, pruned_loss=0.07482, over 4767.00 frames. ], tot_loss[loss=0.2654, simple_loss=0.3379, pruned_loss=0.09642, over 935324.12 frames. ], batch size: 12, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:24:14,029 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.49 vs. limit=12.0 +2024-07-27 18:24:19,570 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:24:19,789 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.41 vs. limit=15.0 +2024-07-27 18:24:30,825 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.10 vs. limit=15.0 +2024-07-27 18:24:31,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=47744.0, ans=0.0004904347826086963 +2024-07-27 18:24:37,632 INFO [train.py:1114] (2/4) Epoch 4, batch 5150, loss[loss=0.2532, simple_loss=0.3346, pruned_loss=0.0859, over 4853.00 frames. ], tot_loss[loss=0.2648, simple_loss=0.3375, pruned_loss=0.09605, over 936507.84 frames. ], batch size: 16, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:24:40,258 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.251e+01 6.747e+01 7.591e+01 8.914e+01 1.388e+02, threshold=1.518e+02, percent-clipped=0.0 +2024-07-27 18:24:49,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=47770.666666666664, ans=0.125 +2024-07-27 18:24:49,804 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=18.68 vs. limit=15.0 +2024-07-27 18:24:51,872 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.31 vs. limit=22.5 +2024-07-27 18:25:02,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=47797.333333333336, ans=0.125 +2024-07-27 18:25:13,266 INFO [train.py:1114] (2/4) Epoch 4, batch 5200, loss[loss=0.2502, simple_loss=0.3279, pruned_loss=0.08627, over 4674.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3362, pruned_loss=0.09517, over 936223.58 frames. ], batch size: 14, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:25:22,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=47837.333333333336, ans=0.125 +2024-07-27 18:25:22,624 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.08 vs. limit=12.0 +2024-07-27 18:25:26,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=47850.666666666664, ans=0.1 +2024-07-27 18:25:29,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=47850.666666666664, ans=0.125 +2024-07-27 18:25:35,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=47864.0, ans=0.125 +2024-07-27 18:25:43,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=47877.333333333336, ans=0.125 +2024-07-27 18:25:47,787 INFO [train.py:1114] (2/4) Epoch 4, batch 5250, loss[loss=0.3515, simple_loss=0.4104, pruned_loss=0.1463, over 4902.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.3355, pruned_loss=0.09495, over 935909.69 frames. ], batch size: 13, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:25:50,447 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.322e+01 6.549e+01 7.419e+01 9.087e+01 1.892e+02, threshold=1.484e+02, percent-clipped=1.0 +2024-07-27 18:25:54,338 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.56 vs. limit=15.0 +2024-07-27 18:26:06,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=47917.333333333336, ans=0.05 +2024-07-27 18:26:13,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=47930.666666666664, ans=0.125 +2024-07-27 18:26:15,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=47944.0, ans=0.125 +2024-07-27 18:26:21,681 INFO [train.py:1114] (2/4) Epoch 4, batch 5300, loss[loss=0.3123, simple_loss=0.3832, pruned_loss=0.1207, over 4639.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3361, pruned_loss=0.09548, over 934599.74 frames. ], batch size: 16, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:26:28,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=47970.666666666664, ans=0.125 +2024-07-27 18:26:28,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=47970.666666666664, ans=0.0 +2024-07-27 18:26:33,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=47970.666666666664, ans=0.05 +2024-07-27 18:26:35,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=47984.0, ans=0.05 +2024-07-27 18:26:57,392 INFO [train.py:1114] (2/4) Epoch 4, batch 5350, loss[loss=0.2441, simple_loss=0.3036, pruned_loss=0.09231, over 4501.00 frames. ], tot_loss[loss=0.2632, simple_loss=0.3359, pruned_loss=0.09524, over 936575.54 frames. ], batch size: 10, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:27:00,009 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.313e+01 6.419e+01 7.171e+01 7.752e+01 1.208e+02, threshold=1.434e+02, percent-clipped=0.0 +2024-07-27 18:27:00,417 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.79 vs. limit=15.0 +2024-07-27 18:27:00,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=48024.0, ans=0.2 +2024-07-27 18:27:08,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=48037.333333333336, ans=0.0 +2024-07-27 18:27:14,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=48050.666666666664, ans=0.125 +2024-07-27 18:27:22,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=48064.0, ans=0.00042086956521739105 +2024-07-27 18:27:22,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=48064.0, ans=0.0 +2024-07-27 18:27:23,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=48077.333333333336, ans=0.125 +2024-07-27 18:27:28,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=48077.333333333336, ans=0.125 +2024-07-27 18:27:31,380 INFO [train.py:1114] (2/4) Epoch 4, batch 5400, loss[loss=0.3141, simple_loss=0.3659, pruned_loss=0.1312, over 4378.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.337, pruned_loss=0.09599, over 930496.57 frames. ], batch size: 26, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:27:33,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=48090.666666666664, ans=0.2 +2024-07-27 18:27:47,896 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.66 vs. limit=22.5 +2024-07-27 18:27:56,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=48130.666666666664, ans=0.1 +2024-07-27 18:28:05,835 INFO [train.py:1114] (2/4) Epoch 4, batch 5450, loss[loss=0.2128, simple_loss=0.2845, pruned_loss=0.07053, over 4708.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3355, pruned_loss=0.09522, over 933576.29 frames. ], batch size: 11, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:28:14,666 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.319e+01 6.320e+01 7.105e+01 8.639e+01 1.249e+02, threshold=1.421e+02, percent-clipped=0.0 +2024-07-27 18:28:26,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=48170.666666666664, ans=0.125 +2024-07-27 18:28:27,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=48170.666666666664, ans=0.2 +2024-07-27 18:28:36,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=48197.333333333336, ans=0.09899494936611666 +2024-07-27 18:28:38,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=48197.333333333336, ans=0.125 +2024-07-27 18:28:39,875 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.59 vs. limit=15.0 +2024-07-27 18:28:50,543 INFO [train.py:1114] (2/4) Epoch 4, batch 5500, loss[loss=0.3385, simple_loss=0.4001, pruned_loss=0.1385, over 4220.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3346, pruned_loss=0.09493, over 931409.19 frames. ], batch size: 25, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:29:00,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=48237.333333333336, ans=0.0 +2024-07-27 18:29:11,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=48264.0, ans=0.125 +2024-07-27 18:29:14,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48264.0, ans=0.1 +2024-07-27 18:29:15,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=48264.0, ans=0.025 +2024-07-27 18:29:16,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=48264.0, ans=0.2 +2024-07-27 18:29:22,825 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.36 vs. limit=15.0 +2024-07-27 18:29:24,390 INFO [train.py:1114] (2/4) Epoch 4, batch 5550, loss[loss=0.2983, simple_loss=0.3531, pruned_loss=0.1217, over 4712.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3342, pruned_loss=0.09476, over 933223.51 frames. ], batch size: 12, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:29:27,164 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.873e+01 6.976e+01 8.822e+01 1.148e+02 2.032e+02, threshold=1.764e+02, percent-clipped=8.0 +2024-07-27 18:29:33,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=48304.0, ans=0.125 +2024-07-27 18:29:37,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48317.333333333336, ans=0.1 +2024-07-27 18:29:40,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=48317.333333333336, ans=0.125 +2024-07-27 18:29:49,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=48330.666666666664, ans=0.125 +2024-07-27 18:29:49,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=48330.666666666664, ans=0.1 +2024-07-27 18:29:52,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=48330.666666666664, ans=0.125 +2024-07-27 18:29:58,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=48344.0, ans=0.2 +2024-07-27 18:30:01,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=48357.333333333336, ans=0.025 +2024-07-27 18:30:02,097 INFO [train.py:1114] (2/4) Epoch 4, batch 5600, loss[loss=0.2727, simple_loss=0.3497, pruned_loss=0.09784, over 4741.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3342, pruned_loss=0.09452, over 934513.94 frames. ], batch size: 14, lr: 1.76e-02, grad_scale: 32.0 +2024-07-27 18:30:03,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48357.333333333336, ans=0.1 +2024-07-27 18:30:07,067 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=21.42 vs. limit=15.0 +2024-07-27 18:30:08,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=48357.333333333336, ans=0.125 +2024-07-27 18:30:09,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=48370.666666666664, ans=0.0003542028985507248 +2024-07-27 18:30:12,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=48370.666666666664, ans=0.1 +2024-07-27 18:30:12,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.41 vs. limit=12.0 +2024-07-27 18:30:19,086 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.99 vs. limit=6.0 +2024-07-27 18:30:24,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=48397.333333333336, ans=0.125 +2024-07-27 18:30:38,208 INFO [train.py:1114] (2/4) Epoch 4, batch 5650, loss[loss=0.3032, simple_loss=0.3619, pruned_loss=0.1223, over 4472.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.3329, pruned_loss=0.09345, over 936957.17 frames. ], batch size: 21, lr: 1.76e-02, grad_scale: 32.0 +2024-07-27 18:30:41,016 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.980e+01 6.257e+01 6.942e+01 8.186e+01 1.408e+02, threshold=1.388e+02, percent-clipped=0.0 +2024-07-27 18:30:41,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=48424.0, ans=0.0003426086956521742 +2024-07-27 18:30:58,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=48464.0, ans=0.0 +2024-07-27 18:30:59,540 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.90 vs. limit=15.0 +2024-07-27 18:31:05,223 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:31:11,775 INFO [train.py:1114] (2/4) Epoch 4, batch 5700, loss[loss=0.2246, simple_loss=0.2996, pruned_loss=0.07482, over 4690.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3332, pruned_loss=0.09347, over 938158.23 frames. ], batch size: 13, lr: 1.76e-02, grad_scale: 32.0 +2024-07-27 18:31:22,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=48504.0, ans=0.0 +2024-07-27 18:31:31,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48530.666666666664, ans=0.1 +2024-07-27 18:31:34,166 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.01 vs. limit=15.0 +2024-07-27 18:31:39,212 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:31:39,975 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.72 vs. limit=15.0 +2024-07-27 18:31:42,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=48544.0, ans=0.125 +2024-07-27 18:31:45,699 INFO [train.py:1114] (2/4) Epoch 4, batch 5750, loss[loss=0.2881, simple_loss=0.3654, pruned_loss=0.1054, over 4738.00 frames. ], tot_loss[loss=0.2618, simple_loss=0.3351, pruned_loss=0.09422, over 937908.05 frames. ], batch size: 19, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:31:51,402 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.193e+01 6.612e+01 7.726e+01 1.001e+02 1.887e+02, threshold=1.545e+02, percent-clipped=6.0 +2024-07-27 18:32:02,041 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.21 vs. limit=15.0 +2024-07-27 18:32:05,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=48584.0, ans=0.0 +2024-07-27 18:32:08,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.17 vs. limit=15.0 +2024-07-27 18:32:17,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=48610.666666666664, ans=0.1 +2024-07-27 18:32:21,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=48610.666666666664, ans=0.1 +2024-07-27 18:32:22,606 INFO [train.py:1114] (2/4) Epoch 4, batch 5800, loss[loss=0.271, simple_loss=0.3281, pruned_loss=0.107, over 4766.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3359, pruned_loss=0.09455, over 936846.15 frames. ], batch size: 19, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:32:24,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_na.min_abs, batch_count=48624.0, ans=0.02 +2024-07-27 18:32:41,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=48650.666666666664, ans=0.025 +2024-07-27 18:32:42,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=48664.0, ans=0.0 +2024-07-27 18:32:56,501 INFO [train.py:1114] (2/4) Epoch 4, batch 5850, loss[loss=0.3154, simple_loss=0.3843, pruned_loss=0.1233, over 4627.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3359, pruned_loss=0.0948, over 937413.25 frames. ], batch size: 21, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:32:58,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=48690.666666666664, ans=0.125 +2024-07-27 18:32:59,846 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.208e+01 6.444e+01 7.225e+01 8.494e+01 1.330e+02, threshold=1.445e+02, percent-clipped=0.0 +2024-07-27 18:33:09,670 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.82 vs. limit=15.0 +2024-07-27 18:33:11,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=48704.0, ans=0.125 +2024-07-27 18:33:12,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=48704.0, ans=0.125 +2024-07-27 18:33:13,122 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.47 vs. limit=10.0 +2024-07-27 18:33:21,351 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.42 vs. limit=15.0 +2024-07-27 18:33:23,051 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:33:25,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=48730.666666666664, ans=0.2 +2024-07-27 18:33:27,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=48744.0, ans=0.0 +2024-07-27 18:33:34,401 INFO [train.py:1114] (2/4) Epoch 4, batch 5900, loss[loss=0.2721, simple_loss=0.3411, pruned_loss=0.1015, over 4670.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.3361, pruned_loss=0.09465, over 937824.75 frames. ], batch size: 15, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:33:35,490 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.95 vs. limit=15.0 +2024-07-27 18:33:36,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=48757.333333333336, ans=0.95 +2024-07-27 18:33:37,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=48757.333333333336, ans=0.1 +2024-07-27 18:33:54,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=48784.0, ans=0.125 +2024-07-27 18:34:13,827 INFO [train.py:1114] (2/4) Epoch 4, batch 5950, loss[loss=0.2349, simple_loss=0.3231, pruned_loss=0.07336, over 4694.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3352, pruned_loss=0.09377, over 939766.74 frames. ], batch size: 15, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:34:13,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=48824.0, ans=0.0 +2024-07-27 18:34:14,250 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.99 vs. limit=15.0 +2024-07-27 18:34:17,273 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.376e+01 6.577e+01 7.476e+01 8.958e+01 1.675e+02, threshold=1.495e+02, percent-clipped=2.0 +2024-07-27 18:34:24,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=48837.333333333336, ans=0.00025275362318840505 +2024-07-27 18:34:26,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=48837.333333333336, ans=0.125 +2024-07-27 18:34:28,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=48850.666666666664, ans=0.0 +2024-07-27 18:34:34,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=48864.0, ans=0.2 +2024-07-27 18:34:47,626 INFO [train.py:1114] (2/4) Epoch 4, batch 6000, loss[loss=0.2759, simple_loss=0.3452, pruned_loss=0.1033, over 4107.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3343, pruned_loss=0.09352, over 936773.33 frames. ], batch size: 25, lr: 1.76e-02, grad_scale: 32.0 +2024-07-27 18:34:47,626 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 18:35:03,514 INFO [train.py:1146] (2/4) Epoch 4, validation: loss=0.2107, simple_loss=0.3128, pruned_loss=0.05435, over 944034.00 frames. +2024-07-27 18:35:03,515 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 18:35:06,941 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:35:14,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=48904.0, ans=0.00023826086956521726 +2024-07-27 18:35:14,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=48904.0, ans=0.125 +2024-07-27 18:35:23,643 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.28 vs. limit=15.0 +2024-07-27 18:35:26,436 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.17 vs. limit=22.5 +2024-07-27 18:35:27,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=48930.666666666664, ans=0.125 +2024-07-27 18:35:35,828 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.62 vs. limit=12.0 +2024-07-27 18:35:37,386 INFO [train.py:1114] (2/4) Epoch 4, batch 6050, loss[loss=0.2327, simple_loss=0.305, pruned_loss=0.08017, over 4787.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3342, pruned_loss=0.09344, over 938192.07 frames. ], batch size: 12, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:35:37,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=48957.333333333336, ans=0.0 +2024-07-27 18:35:38,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=48957.333333333336, ans=0.2 +2024-07-27 18:35:42,521 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.402e+01 6.393e+01 7.329e+01 8.400e+01 1.158e+02, threshold=1.466e+02, percent-clipped=0.0 +2024-07-27 18:35:45,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=48970.666666666664, ans=0.0 +2024-07-27 18:35:49,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=48970.666666666664, ans=15.0 +2024-07-27 18:35:56,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=48984.0, ans=0.125 +2024-07-27 18:35:58,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=48984.0, ans=0.125 +2024-07-27 18:35:58,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=48984.0, ans=0.00022086956521739053 +2024-07-27 18:36:12,771 INFO [train.py:1114] (2/4) Epoch 4, batch 6100, loss[loss=0.2399, simple_loss=0.3224, pruned_loss=0.07875, over 4693.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.333, pruned_loss=0.09312, over 937982.11 frames. ], batch size: 15, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:36:14,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=49024.0, ans=0.05 +2024-07-27 18:36:17,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=49024.0, ans=0.125 +2024-07-27 18:36:21,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=49037.333333333336, ans=0.125 +2024-07-27 18:36:26,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49050.666666666664, ans=0.1 +2024-07-27 18:36:32,205 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.43 vs. limit=15.0 +2024-07-27 18:36:46,622 INFO [train.py:1114] (2/4) Epoch 4, batch 6150, loss[loss=0.3424, simple_loss=0.3898, pruned_loss=0.1475, over 3405.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3331, pruned_loss=0.09289, over 936452.98 frames. ], batch size: 35, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:36:46,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=49090.666666666664, ans=0.125 +2024-07-27 18:36:50,105 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.101e+01 6.312e+01 7.204e+01 8.554e+01 1.450e+02, threshold=1.441e+02, percent-clipped=0.0 +2024-07-27 18:36:54,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=49104.0, ans=0.00019478260869565216 +2024-07-27 18:36:56,528 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:37:05,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=49117.333333333336, ans=15.0 +2024-07-27 18:37:08,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=49130.666666666664, ans=0.125 +2024-07-27 18:37:12,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49130.666666666664, ans=0.1 +2024-07-27 18:37:20,684 INFO [train.py:1114] (2/4) Epoch 4, batch 6200, loss[loss=0.3148, simple_loss=0.3903, pruned_loss=0.1197, over 4743.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3336, pruned_loss=0.09322, over 936094.89 frames. ], batch size: 14, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:37:22,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=49157.333333333336, ans=0.125 +2024-07-27 18:37:54,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=49197.333333333336, ans=0.025 +2024-07-27 18:37:58,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=49210.666666666664, ans=0.125 +2024-07-27 18:37:58,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49210.666666666664, ans=0.1 +2024-07-27 18:37:58,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=49210.666666666664, ans=0.1 +2024-07-27 18:38:05,393 INFO [train.py:1114] (2/4) Epoch 4, batch 6250, loss[loss=0.2744, simple_loss=0.3387, pruned_loss=0.1051, over 4816.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3332, pruned_loss=0.09351, over 933131.28 frames. ], batch size: 14, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:38:08,828 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.216e+01 6.578e+01 7.418e+01 8.909e+01 1.704e+02, threshold=1.484e+02, percent-clipped=3.0 +2024-07-27 18:38:11,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=49237.333333333336, ans=0.125 +2024-07-27 18:38:14,045 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.27 vs. limit=22.5 +2024-07-27 18:38:22,773 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.77 vs. limit=6.0 +2024-07-27 18:38:23,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=49250.666666666664, ans=0.1 +2024-07-27 18:38:25,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=49250.666666666664, ans=0.125 +2024-07-27 18:38:40,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=49277.333333333336, ans=0.0 +2024-07-27 18:39:02,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=49277.333333333336, ans=0.2 +2024-07-27 18:39:03,631 INFO [train.py:1114] (2/4) Epoch 4, batch 6300, loss[loss=0.2354, simple_loss=0.3072, pruned_loss=0.08184, over 4508.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3343, pruned_loss=0.09458, over 929615.28 frames. ], batch size: 10, lr: 1.75e-02, grad_scale: 16.0 +2024-07-27 18:39:17,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=49317.333333333336, ans=0.025 +2024-07-27 18:39:23,035 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.23 vs. limit=22.5 +2024-07-27 18:39:28,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=49330.666666666664, ans=0.125 +2024-07-27 18:39:29,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49330.666666666664, ans=0.1 +2024-07-27 18:39:29,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=49330.666666666664, ans=0.0 +2024-07-27 18:39:49,715 INFO [train.py:1114] (2/4) Epoch 4, batch 6350, loss[loss=0.3269, simple_loss=0.3783, pruned_loss=0.1378, over 4464.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.3335, pruned_loss=0.09407, over 933581.78 frames. ], batch size: 21, lr: 1.75e-02, grad_scale: 16.0 +2024-07-27 18:40:02,487 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.450e+01 6.143e+01 6.766e+01 7.753e+01 2.111e+02, threshold=1.353e+02, percent-clipped=1.0 +2024-07-27 18:40:15,678 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.87 vs. limit=15.0 +2024-07-27 18:40:15,771 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.13 vs. limit=22.5 +2024-07-27 18:40:19,840 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:40:21,710 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.44 vs. limit=15.0 +2024-07-27 18:40:31,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=49397.333333333336, ans=0.0001310144927536231 +2024-07-27 18:41:05,489 INFO [train.py:1114] (2/4) Epoch 4, batch 6400, loss[loss=0.2287, simple_loss=0.3024, pruned_loss=0.07751, over 4633.00 frames. ], tot_loss[loss=0.2618, simple_loss=0.3344, pruned_loss=0.0946, over 935248.31 frames. ], batch size: 13, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:41:08,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49424.0, ans=0.1 +2024-07-27 18:41:26,138 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.50 vs. limit=6.0 +2024-07-27 18:41:26,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=49464.0, ans=0.125 +2024-07-27 18:41:31,384 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.25 vs. limit=15.0 +2024-07-27 18:41:39,085 INFO [train.py:1114] (2/4) Epoch 4, batch 6450, loss[loss=0.2749, simple_loss=0.3381, pruned_loss=0.1058, over 4476.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3359, pruned_loss=0.09531, over 938865.91 frames. ], batch size: 21, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:41:42,983 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.447e+01 6.416e+01 7.153e+01 7.876e+01 1.277e+02, threshold=1.431e+02, percent-clipped=0.0 +2024-07-27 18:41:43,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=49490.666666666664, ans=0.125 +2024-07-27 18:41:46,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=49504.0, ans=0.125 +2024-07-27 18:41:47,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49504.0, ans=0.1 +2024-07-27 18:41:50,108 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.42 vs. limit=22.5 +2024-07-27 18:41:51,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=49517.333333333336, ans=0.125 +2024-07-27 18:42:06,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=49530.666666666664, ans=0.125 +2024-07-27 18:42:19,506 INFO [train.py:1114] (2/4) Epoch 4, batch 6500, loss[loss=0.3758, simple_loss=0.3926, pruned_loss=0.1795, over 3412.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3336, pruned_loss=0.09389, over 940135.98 frames. ], batch size: 35, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:42:25,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=49557.333333333336, ans=0.025 +2024-07-27 18:42:29,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=49570.666666666664, ans=0.125 +2024-07-27 18:42:31,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=49570.666666666664, ans=0.125 +2024-07-27 18:42:40,924 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.38 vs. limit=12.0 +2024-07-27 18:42:43,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=49584.0, ans=0.125 +2024-07-27 18:42:44,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=49584.0, ans=0.025 +2024-07-27 18:42:44,964 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.71 vs. limit=15.0 +2024-07-27 18:42:52,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=49597.333333333336, ans=0.125 +2024-07-27 18:42:52,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=49597.333333333336, ans=8.7536231884058e-05 +2024-07-27 18:43:10,166 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:43:14,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=49610.666666666664, ans=0.1 +2024-07-27 18:43:19,881 INFO [train.py:1114] (2/4) Epoch 4, batch 6550, loss[loss=0.2334, simple_loss=0.3035, pruned_loss=0.08166, over 4820.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3325, pruned_loss=0.09288, over 943056.47 frames. ], batch size: 11, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:43:23,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=49624.0, ans=0.0 +2024-07-27 18:43:23,931 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.133e+01 6.247e+01 6.814e+01 7.966e+01 1.482e+02, threshold=1.363e+02, percent-clipped=1.0 +2024-07-27 18:43:30,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=49637.333333333336, ans=0.125 +2024-07-27 18:43:37,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=49650.666666666664, ans=0.2 +2024-07-27 18:43:56,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=49677.333333333336, ans=0.125 +2024-07-27 18:44:02,858 INFO [train.py:1114] (2/4) Epoch 4, batch 6600, loss[loss=0.2326, simple_loss=0.3229, pruned_loss=0.07112, over 4933.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3324, pruned_loss=0.09256, over 944945.10 frames. ], batch size: 14, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:44:17,333 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:44:26,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=49717.333333333336, ans=0.025 +2024-07-27 18:44:27,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=49717.333333333336, ans=0.125 +2024-07-27 18:44:33,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=49730.666666666664, ans=0.0 +2024-07-27 18:44:41,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=49730.666666666664, ans=0.125 +2024-07-27 18:44:47,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=49744.0, ans=0.2 +2024-07-27 18:44:51,861 INFO [train.py:1114] (2/4) Epoch 4, batch 6650, loss[loss=0.2498, simple_loss=0.324, pruned_loss=0.08779, over 4643.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3333, pruned_loss=0.09349, over 943617.22 frames. ], batch size: 17, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:44:59,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=49757.333333333336, ans=0.0 +2024-07-27 18:45:00,755 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.08 vs. limit=15.0 +2024-07-27 18:45:01,625 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.604e+01 6.574e+01 7.387e+01 9.385e+01 1.471e+02, threshold=1.477e+02, percent-clipped=2.0 +2024-07-27 18:45:06,030 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.67 vs. limit=22.5 +2024-07-27 18:45:06,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=49770.666666666664, ans=0.125 +2024-07-27 18:45:07,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=49770.666666666664, ans=0.125 +2024-07-27 18:45:23,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=49797.333333333336, ans=0.125 +2024-07-27 18:45:24,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=49797.333333333336, ans=0.0 +2024-07-27 18:45:29,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=49797.333333333336, ans=0.05 +2024-07-27 18:45:37,469 INFO [train.py:1114] (2/4) Epoch 4, batch 6700, loss[loss=0.2598, simple_loss=0.3455, pruned_loss=0.08707, over 4664.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3341, pruned_loss=0.09346, over 942109.59 frames. ], batch size: 19, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:45:42,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49824.0, ans=0.1 +2024-07-27 18:45:45,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=49837.333333333336, ans=0.0 +2024-07-27 18:45:48,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=49837.333333333336, ans=0.125 +2024-07-27 18:45:54,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=49850.666666666664, ans=0.0 +2024-07-27 18:46:26,240 INFO [train.py:1114] (2/4) Epoch 4, batch 6750, loss[loss=0.3093, simple_loss=0.3671, pruned_loss=0.1258, over 4239.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3338, pruned_loss=0.09314, over 940522.07 frames. ], batch size: 25, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:46:30,212 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.238e+01 6.545e+01 7.445e+01 9.250e+01 1.508e+02, threshold=1.489e+02, percent-clipped=1.0 +2024-07-27 18:46:31,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=49890.666666666664, ans=0.125 +2024-07-27 18:46:31,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=49890.666666666664, ans=0.125 +2024-07-27 18:46:39,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=49904.0, ans=2.0869565217391736e-05 +2024-07-27 18:46:41,313 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.24 vs. limit=15.0 +2024-07-27 18:46:48,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49930.666666666664, ans=0.1 +2024-07-27 18:46:50,743 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.88 vs. limit=22.5 +2024-07-27 18:46:59,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=49944.0, ans=0.05 +2024-07-27 18:47:00,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=49957.333333333336, ans=0.125 +2024-07-27 18:47:01,349 INFO [train.py:1114] (2/4) Epoch 4, batch 6800, loss[loss=0.279, simple_loss=0.3591, pruned_loss=0.09944, over 4630.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.334, pruned_loss=0.09273, over 938597.66 frames. ], batch size: 13, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:47:22,880 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.98 vs. limit=15.0 +2024-07-27 18:47:32,412 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.63 vs. limit=15.0 +2024-07-27 18:47:33,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=49970.666666666664, ans=0.2 +2024-07-27 18:47:34,414 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.91 vs. limit=6.0 +2024-07-27 18:48:37,390 INFO [train.py:1114] (2/4) Epoch 4, batch 6850, loss[loss=0.2784, simple_loss=0.3534, pruned_loss=0.1017, over 4692.00 frames. ], tot_loss[loss=0.2578, simple_loss=0.3323, pruned_loss=0.09166, over 940549.39 frames. ], batch size: 13, lr: 1.74e-02, grad_scale: 16.0 +2024-07-27 18:48:37,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=50024.0, ans=0.2 +2024-07-27 18:48:42,393 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.116e+01 6.490e+01 7.044e+01 8.185e+01 1.640e+02, threshold=1.409e+02, percent-clipped=3.0 +2024-07-27 18:49:11,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=50050.666666666664, ans=0.2 +2024-07-27 18:49:19,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=50064.0, ans=0.025 +2024-07-27 18:49:29,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=50077.333333333336, ans=0.125 +2024-07-27 18:49:31,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=50077.333333333336, ans=0.125 +2024-07-27 18:49:31,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=50077.333333333336, ans=0.1 +2024-07-27 18:49:43,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=50077.333333333336, ans=0.2 +2024-07-27 18:49:46,657 INFO [train.py:1114] (2/4) Epoch 4, batch 6900, loss[loss=0.2564, simple_loss=0.3328, pruned_loss=0.09005, over 4968.00 frames. ], tot_loss[loss=0.2577, simple_loss=0.3317, pruned_loss=0.09182, over 942643.34 frames. ], batch size: 13, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:49:48,481 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-07-27 18:49:51,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=50090.666666666664, ans=0.0 +2024-07-27 18:50:44,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=50130.666666666664, ans=0.0 +2024-07-27 18:50:49,929 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.54 vs. limit=15.0 +2024-07-27 18:50:50,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=50144.0, ans=0.0 +2024-07-27 18:50:56,892 INFO [train.py:1114] (2/4) Epoch 4, batch 6950, loss[loss=0.2122, simple_loss=0.2775, pruned_loss=0.07344, over 4482.00 frames. ], tot_loss[loss=0.2581, simple_loss=0.3315, pruned_loss=0.09234, over 939707.89 frames. ], batch size: 10, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:51:01,452 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.030e+01 6.625e+01 7.241e+01 8.326e+01 1.274e+02, threshold=1.448e+02, percent-clipped=0.0 +2024-07-27 18:51:07,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=50170.666666666664, ans=15.0 +2024-07-27 18:51:14,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50184.0, ans=0.1 +2024-07-27 18:51:25,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=50210.666666666664, ans=0.0 +2024-07-27 18:51:26,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=50210.666666666664, ans=0.125 +2024-07-27 18:51:27,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=50210.666666666664, ans=0.125 +2024-07-27 18:51:31,308 INFO [train.py:1114] (2/4) Epoch 4, batch 7000, loss[loss=0.3556, simple_loss=0.4068, pruned_loss=0.1523, over 4632.00 frames. ], tot_loss[loss=0.2583, simple_loss=0.3317, pruned_loss=0.09247, over 938876.72 frames. ], batch size: 17, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:51:39,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=50237.333333333336, ans=0.1 +2024-07-27 18:51:45,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50250.666666666664, ans=0.1 +2024-07-27 18:51:52,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=50264.0, ans=0.125 +2024-07-27 18:52:05,903 INFO [train.py:1114] (2/4) Epoch 4, batch 7050, loss[loss=0.2737, simple_loss=0.3525, pruned_loss=0.09747, over 4679.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.3326, pruned_loss=0.09231, over 942211.42 frames. ], batch size: 19, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:52:10,852 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.329e+01 6.665e+01 7.548e+01 9.503e+01 1.584e+02, threshold=1.510e+02, percent-clipped=1.0 +2024-07-27 18:52:17,300 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.39 vs. limit=22.5 +2024-07-27 18:52:19,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=50317.333333333336, ans=10.0 +2024-07-27 18:52:31,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=50330.666666666664, ans=10.0 +2024-07-27 18:52:41,219 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.72 vs. limit=22.5 +2024-07-27 18:52:41,455 INFO [train.py:1114] (2/4) Epoch 4, batch 7100, loss[loss=0.237, simple_loss=0.3117, pruned_loss=0.08116, over 4800.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.3331, pruned_loss=0.09296, over 937080.62 frames. ], batch size: 15, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:52:44,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=50357.333333333336, ans=0.125 +2024-07-27 18:52:48,837 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.62 vs. limit=15.0 +2024-07-27 18:53:09,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=50410.666666666664, ans=0.07 +2024-07-27 18:53:10,763 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.44 vs. limit=15.0 +2024-07-27 18:53:14,556 INFO [train.py:1114] (2/4) Epoch 4, batch 7150, loss[loss=0.2835, simple_loss=0.3531, pruned_loss=0.107, over 4553.00 frames. ], tot_loss[loss=0.2572, simple_loss=0.3307, pruned_loss=0.09182, over 938119.41 frames. ], batch size: 21, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:53:17,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=50424.0, ans=0.125 +2024-07-27 18:53:18,907 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.925e+01 6.686e+01 7.675e+01 9.181e+01 1.338e+02, threshold=1.535e+02, percent-clipped=0.0 +2024-07-27 18:53:20,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=50437.333333333336, ans=0.125 +2024-07-27 18:53:31,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=50450.666666666664, ans=0.0 +2024-07-27 18:53:36,817 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.98 vs. limit=6.0 +2024-07-27 18:53:46,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=50490.666666666664, ans=0.125 +2024-07-27 18:53:47,308 INFO [train.py:1114] (2/4) Epoch 4, batch 7200, loss[loss=0.2808, simple_loss=0.3518, pruned_loss=0.1049, over 4786.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.3323, pruned_loss=0.09231, over 938579.56 frames. ], batch size: 15, lr: 1.73e-02, grad_scale: 32.0 +2024-07-27 18:53:48,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=50490.666666666664, ans=0.125 +2024-07-27 18:54:06,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=50530.666666666664, ans=0.0 +2024-07-27 18:54:13,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=50544.0, ans=0.125 +2024-07-27 18:54:18,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=50544.0, ans=0.1 +2024-07-27 18:54:19,839 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.87 vs. limit=10.0 +2024-07-27 18:54:20,077 INFO [train.py:1114] (2/4) Epoch 4, batch 7250, loss[loss=0.2433, simple_loss=0.3134, pruned_loss=0.08655, over 4842.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3312, pruned_loss=0.09203, over 939778.29 frames. ], batch size: 12, lr: 1.73e-02, grad_scale: 32.0 +2024-07-27 18:54:24,521 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.173e+01 6.374e+01 7.128e+01 8.077e+01 1.230e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 18:54:30,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.32 vs. limit=15.0 +2024-07-27 18:54:41,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=50597.333333333336, ans=0.2 +2024-07-27 18:54:51,540 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.01 vs. limit=22.5 +2024-07-27 18:54:52,629 INFO [train.py:1114] (2/4) Epoch 4, batch 7300, loss[loss=0.2411, simple_loss=0.3011, pruned_loss=0.09053, over 4860.00 frames. ], tot_loss[loss=0.2578, simple_loss=0.3312, pruned_loss=0.09226, over 939315.57 frames. ], batch size: 12, lr: 1.73e-02, grad_scale: 32.0 +2024-07-27 18:54:53,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=50624.0, ans=0.125 +2024-07-27 18:54:59,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=50637.333333333336, ans=0.0 +2024-07-27 18:55:13,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=50664.0, ans=0.125 +2024-07-27 18:55:14,276 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.46 vs. limit=22.5 +2024-07-27 18:55:14,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=50664.0, ans=0.0 +2024-07-27 18:55:25,736 INFO [train.py:1114] (2/4) Epoch 4, batch 7350, loss[loss=0.267, simple_loss=0.3422, pruned_loss=0.09592, over 4645.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3309, pruned_loss=0.09202, over 938904.59 frames. ], batch size: 12, lr: 1.73e-02, grad_scale: 32.0 +2024-07-27 18:55:30,228 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.456e+01 6.562e+01 7.152e+01 9.266e+01 1.352e+02, threshold=1.430e+02, percent-clipped=0.0 +2024-07-27 18:55:40,576 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.33 vs. limit=6.0 +2024-07-27 18:55:45,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=50730.666666666664, ans=0.125 +2024-07-27 18:55:47,595 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.31 vs. limit=15.0 +2024-07-27 18:55:58,374 INFO [train.py:1114] (2/4) Epoch 4, batch 7400, loss[loss=0.2553, simple_loss=0.3337, pruned_loss=0.08848, over 4691.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3307, pruned_loss=0.09197, over 940095.05 frames. ], batch size: 13, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:56:05,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=50770.666666666664, ans=0.1 +2024-07-27 18:56:12,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=50784.0, ans=0.125 +2024-07-27 18:56:17,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=50784.0, ans=0.125 +2024-07-27 18:56:19,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=50797.333333333336, ans=0.125 +2024-07-27 18:56:20,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=50797.333333333336, ans=0.0 +2024-07-27 18:56:32,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=50824.0, ans=0.09899494936611666 +2024-07-27 18:56:32,550 INFO [train.py:1114] (2/4) Epoch 4, batch 7450, loss[loss=0.2521, simple_loss=0.3259, pruned_loss=0.08917, over 4622.00 frames. ], tot_loss[loss=0.257, simple_loss=0.3305, pruned_loss=0.09173, over 937264.52 frames. ], batch size: 11, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:56:37,129 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.488e+01 6.489e+01 7.278e+01 8.154e+01 1.203e+02, threshold=1.456e+02, percent-clipped=0.0 +2024-07-27 18:56:38,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=50824.0, ans=0.125 +2024-07-27 18:56:57,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=50864.0, ans=0.0 +2024-07-27 18:57:01,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=50877.333333333336, ans=0.125 +2024-07-27 18:57:06,486 INFO [train.py:1114] (2/4) Epoch 4, batch 7500, loss[loss=0.2756, simple_loss=0.3348, pruned_loss=0.1082, over 3108.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.331, pruned_loss=0.09202, over 935041.06 frames. ], batch size: 35, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:57:16,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=50904.0, ans=0.125 +2024-07-27 18:57:18,047 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.11 vs. limit=15.0 +2024-07-27 18:57:27,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=50930.666666666664, ans=0.125 +2024-07-27 18:57:32,659 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.17 vs. limit=10.0 +2024-07-27 18:57:33,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=50944.0, ans=0.025 +2024-07-27 18:57:36,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=50944.0, ans=0.0 +2024-07-27 18:57:36,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=50944.0, ans=0.2 +2024-07-27 18:57:37,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=50944.0, ans=0.125 +2024-07-27 18:57:39,304 INFO [train.py:1114] (2/4) Epoch 4, batch 7550, loss[loss=0.2742, simple_loss=0.3516, pruned_loss=0.09843, over 4627.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3325, pruned_loss=0.09255, over 934954.84 frames. ], batch size: 17, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:57:39,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=50957.333333333336, ans=0.125 +2024-07-27 18:57:46,171 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.76 vs. limit=15.0 +2024-07-27 18:57:46,435 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.472e+01 6.522e+01 7.335e+01 8.635e+01 1.380e+02, threshold=1.467e+02, percent-clipped=0.0 +2024-07-27 18:57:50,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=50970.666666666664, ans=0.0 +2024-07-27 18:57:50,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=50970.666666666664, ans=0.0 +2024-07-27 18:57:52,176 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.32 vs. limit=12.0 +2024-07-27 18:58:06,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=50984.0, ans=0.2 +2024-07-27 18:58:07,320 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.00 vs. limit=22.5 +2024-07-27 18:58:14,471 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.04 vs. limit=6.0 +2024-07-27 18:58:23,626 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:58:24,740 INFO [train.py:1114] (2/4) Epoch 4, batch 7600, loss[loss=0.2284, simple_loss=0.3105, pruned_loss=0.07315, over 4821.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3321, pruned_loss=0.09258, over 936954.37 frames. ], batch size: 14, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:58:34,017 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.74 vs. limit=22.5 +2024-07-27 18:58:49,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=51050.666666666664, ans=0.025 +2024-07-27 18:58:53,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=51064.0, ans=0.0 +2024-07-27 18:58:54,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=51064.0, ans=0.0 +2024-07-27 18:59:06,954 INFO [train.py:1114] (2/4) Epoch 4, batch 7650, loss[loss=0.2185, simple_loss=0.2935, pruned_loss=0.07181, over 4944.00 frames. ], tot_loss[loss=0.2582, simple_loss=0.3316, pruned_loss=0.09241, over 935846.73 frames. ], batch size: 12, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:59:09,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=51090.666666666664, ans=0.125 +2024-07-27 18:59:11,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=51090.666666666664, ans=0.125 +2024-07-27 18:59:13,318 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.235e+01 6.494e+01 7.893e+01 8.811e+01 1.540e+02, threshold=1.579e+02, percent-clipped=3.0 +2024-07-27 18:59:23,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=51104.0, ans=0.125 +2024-07-27 18:59:26,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=51117.333333333336, ans=0.2 +2024-07-27 18:59:34,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=51130.666666666664, ans=0.0 +2024-07-27 18:59:44,103 INFO [train.py:1114] (2/4) Epoch 4, batch 7700, loss[loss=0.2602, simple_loss=0.3246, pruned_loss=0.09788, over 4698.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3329, pruned_loss=0.0929, over 933855.50 frames. ], batch size: 13, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:59:46,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=51157.333333333336, ans=0.0 +2024-07-27 18:59:50,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=51170.666666666664, ans=0.0 +2024-07-27 18:59:55,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=51170.666666666664, ans=0.2 +2024-07-27 19:00:03,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=51184.0, ans=0.125 +2024-07-27 19:00:05,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=51197.333333333336, ans=0.0 +2024-07-27 19:00:09,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51197.333333333336, ans=0.1 +2024-07-27 19:00:15,512 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.63 vs. limit=15.0 +2024-07-27 19:00:19,740 INFO [train.py:1114] (2/4) Epoch 4, batch 7750, loss[loss=0.2486, simple_loss=0.3418, pruned_loss=0.07769, over 4925.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3354, pruned_loss=0.09391, over 935253.29 frames. ], batch size: 14, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 19:00:19,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=51224.0, ans=0.125 +2024-07-27 19:00:24,703 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.378e+01 6.531e+01 7.452e+01 8.452e+01 1.344e+02, threshold=1.490e+02, percent-clipped=0.0 +2024-07-27 19:00:25,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=51224.0, ans=0.025 +2024-07-27 19:00:35,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=51250.666666666664, ans=0.1 +2024-07-27 19:00:53,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=51250.666666666664, ans=0.125 +2024-07-27 19:01:01,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=51264.0, ans=0.125 +2024-07-27 19:01:02,692 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=9.74 vs. limit=10.0 +2024-07-27 19:01:09,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=51264.0, ans=0.125 +2024-07-27 19:01:21,786 INFO [train.py:1114] (2/4) Epoch 4, batch 7800, loss[loss=0.2962, simple_loss=0.3738, pruned_loss=0.1093, over 4662.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3361, pruned_loss=0.09445, over 937126.18 frames. ], batch size: 14, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 19:01:25,944 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.84 vs. limit=22.5 +2024-07-27 19:01:26,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=51290.666666666664, ans=0.125 +2024-07-27 19:01:33,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=51304.0, ans=0.125 +2024-07-27 19:01:50,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=51330.666666666664, ans=0.125 +2024-07-27 19:01:59,103 INFO [train.py:1114] (2/4) Epoch 4, batch 7850, loss[loss=0.2194, simple_loss=0.2869, pruned_loss=0.07592, over 4534.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.337, pruned_loss=0.09478, over 935459.16 frames. ], batch size: 10, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:02:00,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.04 vs. limit=22.5 +2024-07-27 19:02:04,740 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.164e+01 6.243e+01 7.019e+01 7.976e+01 1.332e+02, threshold=1.404e+02, percent-clipped=0.0 +2024-07-27 19:02:22,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=51384.0, ans=0.125 +2024-07-27 19:02:22,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=51384.0, ans=0.125 +2024-07-27 19:02:30,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=51397.333333333336, ans=0.0 +2024-07-27 19:02:31,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=51397.333333333336, ans=0.1 +2024-07-27 19:02:37,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=51410.666666666664, ans=0.125 +2024-07-27 19:02:39,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=51410.666666666664, ans=0.0 +2024-07-27 19:02:39,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=51410.666666666664, ans=0.2 +2024-07-27 19:02:43,538 INFO [train.py:1114] (2/4) Epoch 4, batch 7900, loss[loss=0.307, simple_loss=0.3711, pruned_loss=0.1214, over 4878.00 frames. ], tot_loss[loss=0.264, simple_loss=0.3377, pruned_loss=0.0951, over 933154.24 frames. ], batch size: 14, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:02:46,408 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.97 vs. limit=6.0 +2024-07-27 19:02:50,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=51437.333333333336, ans=0.125 +2024-07-27 19:02:55,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_na.min_abs, batch_count=51437.333333333336, ans=0.02 +2024-07-27 19:02:58,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=51450.666666666664, ans=0.0 +2024-07-27 19:03:00,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=51450.666666666664, ans=0.2 +2024-07-27 19:03:06,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=51464.0, ans=0.0 +2024-07-27 19:03:20,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=51477.333333333336, ans=0.025 +2024-07-27 19:03:23,107 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.48 vs. limit=12.0 +2024-07-27 19:03:25,611 INFO [train.py:1114] (2/4) Epoch 4, batch 7950, loss[loss=0.3583, simple_loss=0.4035, pruned_loss=0.1565, over 3008.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.3365, pruned_loss=0.09405, over 934912.37 frames. ], batch size: 35, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:03:30,105 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.275e+01 6.617e+01 8.169e+01 1.040e+02 2.019e+02, threshold=1.634e+02, percent-clipped=10.0 +2024-07-27 19:03:32,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=51504.0, ans=0.0 +2024-07-27 19:03:39,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=51517.333333333336, ans=0.125 +2024-07-27 19:03:41,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=51517.333333333336, ans=0.0 +2024-07-27 19:03:42,058 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.28 vs. limit=15.0 +2024-07-27 19:03:48,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=51530.666666666664, ans=0.0 +2024-07-27 19:04:01,617 INFO [train.py:1114] (2/4) Epoch 4, batch 8000, loss[loss=0.2485, simple_loss=0.3054, pruned_loss=0.09578, over 4617.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.334, pruned_loss=0.09314, over 934069.10 frames. ], batch size: 11, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:04:28,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=51597.333333333336, ans=0.0 +2024-07-27 19:04:33,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=51597.333333333336, ans=0.0 +2024-07-27 19:04:34,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=51597.333333333336, ans=0.2 +2024-07-27 19:04:35,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=51597.333333333336, ans=0.0 +2024-07-27 19:04:40,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=51610.666666666664, ans=0.0 +2024-07-27 19:04:43,579 INFO [train.py:1114] (2/4) Epoch 4, batch 8050, loss[loss=0.2813, simple_loss=0.3605, pruned_loss=0.1011, over 4816.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3341, pruned_loss=0.09322, over 933263.15 frames. ], batch size: 14, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:04:48,126 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+01 6.358e+01 7.394e+01 8.578e+01 1.528e+02, threshold=1.479e+02, percent-clipped=0.0 +2024-07-27 19:05:06,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=51664.0, ans=0.0 +2024-07-27 19:05:07,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=51664.0, ans=15.0 +2024-07-27 19:05:08,451 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.27 vs. limit=22.5 +2024-07-27 19:05:17,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=51677.333333333336, ans=0.0 +2024-07-27 19:05:18,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=51677.333333333336, ans=0.0 +2024-07-27 19:05:20,002 INFO [train.py:1114] (2/4) Epoch 4, batch 8100, loss[loss=0.2709, simple_loss=0.3369, pruned_loss=0.1024, over 4817.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3336, pruned_loss=0.09279, over 933224.99 frames. ], batch size: 15, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:05:28,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=51704.0, ans=0.125 +2024-07-27 19:05:34,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.32 vs. limit=15.0 +2024-07-27 19:05:34,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=51717.333333333336, ans=0.0 +2024-07-27 19:05:36,032 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.11 vs. limit=15.0 +2024-07-27 19:05:41,331 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.32 vs. limit=15.0 +2024-07-27 19:05:53,292 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.73 vs. limit=15.0 +2024-07-27 19:05:57,494 INFO [train.py:1114] (2/4) Epoch 4, batch 8150, loss[loss=0.2711, simple_loss=0.3492, pruned_loss=0.09646, over 4798.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3316, pruned_loss=0.09181, over 936978.46 frames. ], batch size: 15, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:06:02,176 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.306e+01 6.341e+01 7.110e+01 7.968e+01 1.215e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 19:06:05,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=51770.666666666664, ans=0.125 +2024-07-27 19:06:09,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=51770.666666666664, ans=0.0 +2024-07-27 19:06:10,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=51784.0, ans=0.2 +2024-07-27 19:06:11,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=51784.0, ans=0.0 +2024-07-27 19:06:31,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=51824.0, ans=0.5 +2024-07-27 19:06:32,396 INFO [train.py:1114] (2/4) Epoch 4, batch 8200, loss[loss=0.2659, simple_loss=0.3332, pruned_loss=0.09925, over 4801.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3319, pruned_loss=0.09152, over 938160.42 frames. ], batch size: 15, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:06:34,023 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.32 vs. limit=15.0 +2024-07-27 19:06:34,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=51824.0, ans=0.2 +2024-07-27 19:06:50,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=51850.666666666664, ans=0.125 +2024-07-27 19:06:55,084 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:06:58,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=51877.333333333336, ans=0.025 +2024-07-27 19:06:58,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=51877.333333333336, ans=0.125 +2024-07-27 19:07:03,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=51877.333333333336, ans=0.125 +2024-07-27 19:07:04,515 INFO [train.py:1114] (2/4) Epoch 4, batch 8250, loss[loss=0.2716, simple_loss=0.3363, pruned_loss=0.1034, over 4888.00 frames. ], tot_loss[loss=0.2581, simple_loss=0.3325, pruned_loss=0.09182, over 938622.97 frames. ], batch size: 13, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:07:09,009 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.095e+01 6.190e+01 7.037e+01 8.392e+01 1.258e+02, threshold=1.407e+02, percent-clipped=0.0 +2024-07-27 19:07:10,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=51904.0, ans=0.125 +2024-07-27 19:07:15,951 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.13 vs. limit=15.0 +2024-07-27 19:07:19,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=51917.333333333336, ans=0.025 +2024-07-27 19:07:28,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=51930.666666666664, ans=0.125 +2024-07-27 19:07:29,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=51930.666666666664, ans=0.125 +2024-07-27 19:07:33,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=51944.0, ans=0.125 +2024-07-27 19:07:37,257 INFO [train.py:1114] (2/4) Epoch 4, batch 8300, loss[loss=0.2765, simple_loss=0.3588, pruned_loss=0.09712, over 4907.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3337, pruned_loss=0.0926, over 938275.08 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:07:44,840 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:07:54,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51984.0, ans=0.1 +2024-07-27 19:07:55,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=51984.0, ans=0.0 +2024-07-27 19:08:00,248 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.07 vs. limit=15.0 +2024-07-27 19:08:03,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51997.333333333336, ans=0.1 +2024-07-27 19:08:11,095 INFO [train.py:1114] (2/4) Epoch 4, batch 8350, loss[loss=0.3018, simple_loss=0.3628, pruned_loss=0.1205, over 4803.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.3337, pruned_loss=0.09251, over 941157.11 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:08:11,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=52024.0, ans=0.125 +2024-07-27 19:08:15,732 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.058e+01 6.472e+01 7.036e+01 8.315e+01 1.538e+02, threshold=1.407e+02, percent-clipped=2.0 +2024-07-27 19:08:20,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=52037.333333333336, ans=0.125 +2024-07-27 19:08:24,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=52050.666666666664, ans=0.95 +2024-07-27 19:08:24,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=52050.666666666664, ans=0.0 +2024-07-27 19:08:35,664 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:08:44,798 INFO [train.py:1114] (2/4) Epoch 4, batch 8400, loss[loss=0.2029, simple_loss=0.2812, pruned_loss=0.06232, over 4777.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3349, pruned_loss=0.09325, over 939612.57 frames. ], batch size: 12, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:08:51,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=52104.0, ans=0.02 +2024-07-27 19:08:52,956 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.88 vs. limit=15.0 +2024-07-27 19:09:03,592 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.21 vs. limit=10.0 +2024-07-27 19:09:04,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=52117.333333333336, ans=0.125 +2024-07-27 19:09:09,416 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.14 vs. limit=22.5 +2024-07-27 19:09:09,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=52130.666666666664, ans=0.125 +2024-07-27 19:09:09,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=52130.666666666664, ans=0.125 +2024-07-27 19:09:11,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=52130.666666666664, ans=0.05 +2024-07-27 19:09:19,575 INFO [train.py:1114] (2/4) Epoch 4, batch 8450, loss[loss=0.2732, simple_loss=0.3576, pruned_loss=0.09441, over 4778.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.335, pruned_loss=0.09295, over 938391.18 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:09:22,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52157.333333333336, ans=0.1 +2024-07-27 19:09:24,026 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.442e+01 6.588e+01 7.394e+01 8.228e+01 1.463e+02, threshold=1.479e+02, percent-clipped=1.0 +2024-07-27 19:09:31,645 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.55 vs. limit=15.0 +2024-07-27 19:09:34,826 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.61 vs. limit=22.5 +2024-07-27 19:09:38,036 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.97 vs. limit=15.0 +2024-07-27 19:09:42,431 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.91 vs. limit=6.0 +2024-07-27 19:09:46,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=52210.666666666664, ans=0.0 +2024-07-27 19:09:49,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=52210.666666666664, ans=0.125 +2024-07-27 19:09:51,711 INFO [train.py:1114] (2/4) Epoch 4, batch 8500, loss[loss=0.2204, simple_loss=0.2945, pruned_loss=0.07309, over 4595.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.3343, pruned_loss=0.09277, over 938217.56 frames. ], batch size: 11, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:09:51,911 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:09:53,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=52224.0, ans=0.1 +2024-07-27 19:09:53,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=52224.0, ans=0.0 +2024-07-27 19:10:02,402 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.67 vs. limit=15.0 +2024-07-27 19:10:02,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=52237.333333333336, ans=0.1 +2024-07-27 19:10:05,667 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.41 vs. limit=22.5 +2024-07-27 19:10:22,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=52264.0, ans=0.05 +2024-07-27 19:10:25,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=52277.333333333336, ans=0.0 +2024-07-27 19:10:35,613 INFO [train.py:1114] (2/4) Epoch 4, batch 8550, loss[loss=0.2327, simple_loss=0.3024, pruned_loss=0.08154, over 4808.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3342, pruned_loss=0.09293, over 939152.16 frames. ], batch size: 11, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:10:38,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=52290.666666666664, ans=0.09899494936611666 +2024-07-27 19:10:40,189 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 6.270e+01 6.908e+01 7.613e+01 1.129e+02, threshold=1.382e+02, percent-clipped=0.0 +2024-07-27 19:10:46,465 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.92 vs. limit=22.5 +2024-07-27 19:11:09,081 INFO [train.py:1114] (2/4) Epoch 4, batch 8600, loss[loss=0.2957, simple_loss=0.376, pruned_loss=0.1077, over 4805.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3342, pruned_loss=0.09304, over 938708.41 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:11:14,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=52357.333333333336, ans=0.125 +2024-07-27 19:11:15,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=52370.666666666664, ans=0.0 +2024-07-27 19:11:18,349 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.55 vs. limit=15.0 +2024-07-27 19:11:23,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52384.0, ans=0.1 +2024-07-27 19:11:25,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=52384.0, ans=0.1 +2024-07-27 19:11:25,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=52384.0, ans=0.125 +2024-07-27 19:11:31,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=52397.333333333336, ans=0.125 +2024-07-27 19:11:43,032 INFO [train.py:1114] (2/4) Epoch 4, batch 8650, loss[loss=0.3043, simple_loss=0.3714, pruned_loss=0.1186, over 4909.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3352, pruned_loss=0.09407, over 940153.43 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:11:45,248 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.21 vs. limit=15.0 +2024-07-27 19:11:54,072 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.219e+01 6.477e+01 7.280e+01 8.362e+01 1.223e+02, threshold=1.456e+02, percent-clipped=0.0 +2024-07-27 19:12:02,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=52437.333333333336, ans=0.2 +2024-07-27 19:12:02,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=52437.333333333336, ans=0.125 +2024-07-27 19:12:05,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=52450.666666666664, ans=0.0 +2024-07-27 19:12:07,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52450.666666666664, ans=0.1 +2024-07-27 19:12:12,479 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.15 vs. limit=22.5 +2024-07-27 19:12:13,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=52464.0, ans=0.0 +2024-07-27 19:12:14,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=52464.0, ans=0.125 +2024-07-27 19:12:18,232 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.55 vs. limit=15.0 +2024-07-27 19:12:23,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.65 vs. limit=10.0 +2024-07-27 19:12:24,878 INFO [train.py:1114] (2/4) Epoch 4, batch 8700, loss[loss=0.2949, simple_loss=0.3558, pruned_loss=0.117, over 4756.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.3373, pruned_loss=0.0955, over 937775.73 frames. ], batch size: 13, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:12:26,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52490.666666666664, ans=0.1 +2024-07-27 19:12:28,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52490.666666666664, ans=0.1 +2024-07-27 19:12:32,233 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.47 vs. limit=15.0 +2024-07-27 19:12:38,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=52517.333333333336, ans=0.125 +2024-07-27 19:12:40,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=52517.333333333336, ans=0.125 +2024-07-27 19:13:03,653 INFO [train.py:1114] (2/4) Epoch 4, batch 8750, loss[loss=0.2883, simple_loss=0.3646, pruned_loss=0.106, over 4674.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3363, pruned_loss=0.09469, over 936602.65 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:16:50,588 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.77 vs. limit=8.0 +2024-07-27 19:16:51,368 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.136e+01 6.508e+01 7.367e+01 8.337e+01 1.242e+02, threshold=1.473e+02, percent-clipped=0.0 +2024-07-27 19:17:27,556 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:17:39,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=52597.333333333336, ans=0.0 +2024-07-27 19:17:48,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=52624.0, ans=0.125 +2024-07-27 19:17:48,789 INFO [train.py:1114] (2/4) Epoch 4, batch 8800, loss[loss=0.263, simple_loss=0.3395, pruned_loss=0.09326, over 4931.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.3363, pruned_loss=0.09453, over 937365.57 frames. ], batch size: 14, lr: 1.69e-02, grad_scale: 32.0 +2024-07-27 19:17:54,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=52624.0, ans=0.0 +2024-07-27 19:17:59,340 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:18:03,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=52650.666666666664, ans=0.0 +2024-07-27 19:18:30,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=52677.333333333336, ans=0.0 +2024-07-27 19:18:32,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=52677.333333333336, ans=0.2 +2024-07-27 19:18:33,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52690.666666666664, ans=0.1 +2024-07-27 19:18:34,298 INFO [train.py:1114] (2/4) Epoch 4, batch 8850, loss[loss=0.3442, simple_loss=0.4022, pruned_loss=0.1431, over 4494.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3353, pruned_loss=0.09397, over 932077.52 frames. ], batch size: 21, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:18:38,871 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.151e+01 6.204e+01 6.999e+01 8.264e+01 1.249e+02, threshold=1.400e+02, percent-clipped=0.0 +2024-07-27 19:18:43,135 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:18:55,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=52704.0, ans=0.125 +2024-07-27 19:18:55,912 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.35 vs. limit=10.0 +2024-07-27 19:18:57,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=52717.333333333336, ans=0.05 +2024-07-27 19:19:07,223 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.36 vs. limit=12.0 +2024-07-27 19:19:14,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=52757.333333333336, ans=0.125 +2024-07-27 19:19:15,450 INFO [train.py:1114] (2/4) Epoch 4, batch 8900, loss[loss=0.2054, simple_loss=0.2848, pruned_loss=0.06301, over 4934.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3347, pruned_loss=0.09396, over 930219.99 frames. ], batch size: 12, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:19:18,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=52757.333333333336, ans=0.125 +2024-07-27 19:19:24,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=52770.666666666664, ans=0.2 +2024-07-27 19:19:34,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=52784.0, ans=0.025 +2024-07-27 19:19:34,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=52784.0, ans=0.125 +2024-07-27 19:19:39,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=52797.333333333336, ans=0.015 +2024-07-27 19:19:40,113 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.86 vs. limit=15.0 +2024-07-27 19:19:49,277 INFO [train.py:1114] (2/4) Epoch 4, batch 8950, loss[loss=0.316, simple_loss=0.3834, pruned_loss=0.1243, over 4559.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.3342, pruned_loss=0.0935, over 931355.22 frames. ], batch size: 21, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:19:51,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=52824.0, ans=0.125 +2024-07-27 19:19:52,267 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.65 vs. limit=12.0 +2024-07-27 19:19:59,030 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.556e+01 6.546e+01 7.266e+01 8.543e+01 1.301e+02, threshold=1.453e+02, percent-clipped=0.0 +2024-07-27 19:20:00,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=52837.333333333336, ans=0.125 +2024-07-27 19:20:10,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=52850.666666666664, ans=0.2 +2024-07-27 19:20:12,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=52850.666666666664, ans=0.125 +2024-07-27 19:20:14,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=52850.666666666664, ans=0.0 +2024-07-27 19:20:14,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=52850.666666666664, ans=0.025 +2024-07-27 19:20:15,042 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.99 vs. limit=22.5 +2024-07-27 19:20:23,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=52877.333333333336, ans=0.05 +2024-07-27 19:20:24,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=52877.333333333336, ans=0.125 +2024-07-27 19:20:25,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=52877.333333333336, ans=0.05 +2024-07-27 19:20:29,459 INFO [train.py:1114] (2/4) Epoch 4, batch 9000, loss[loss=0.2443, simple_loss=0.3228, pruned_loss=0.08291, over 4633.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3326, pruned_loss=0.09288, over 934242.50 frames. ], batch size: 12, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:20:29,459 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 19:20:36,224 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([3.2226, 2.0732, 2.8394, 3.0327, 2.8969, 2.7481, 2.8586, 1.8203], + device='cuda:2') +2024-07-27 19:20:48,915 INFO [train.py:1146] (2/4) Epoch 4, validation: loss=0.2088, simple_loss=0.3114, pruned_loss=0.05305, over 944034.00 frames. +2024-07-27 19:20:48,915 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 19:20:56,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=52904.0, ans=0.95 +2024-07-27 19:20:58,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.56 vs. limit=10.0 +2024-07-27 19:20:58,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=52904.0, ans=0.125 +2024-07-27 19:21:00,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=52904.0, ans=0.0 +2024-07-27 19:21:27,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=52944.0, ans=0.125 +2024-07-27 19:21:41,740 INFO [train.py:1114] (2/4) Epoch 4, batch 9050, loss[loss=0.2351, simple_loss=0.2997, pruned_loss=0.08525, over 4555.00 frames. ], tot_loss[loss=0.2577, simple_loss=0.3311, pruned_loss=0.09217, over 934679.12 frames. ], batch size: 10, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:21:46,105 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.336e+01 6.460e+01 7.493e+01 8.562e+01 1.240e+02, threshold=1.499e+02, percent-clipped=0.0 +2024-07-27 19:21:50,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=52970.666666666664, ans=0.125 +2024-07-27 19:21:53,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=52970.666666666664, ans=0.0 +2024-07-27 19:22:01,388 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.65 vs. limit=15.0 +2024-07-27 19:22:01,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52984.0, ans=0.1 +2024-07-27 19:22:03,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=52997.333333333336, ans=0.125 +2024-07-27 19:22:04,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=52997.333333333336, ans=0.125 +2024-07-27 19:22:08,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=52997.333333333336, ans=0.0 +2024-07-27 19:22:16,420 INFO [train.py:1114] (2/4) Epoch 4, batch 9100, loss[loss=0.2365, simple_loss=0.3132, pruned_loss=0.07994, over 4927.00 frames. ], tot_loss[loss=0.2565, simple_loss=0.3301, pruned_loss=0.09145, over 937106.90 frames. ], batch size: 14, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:22:48,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=53050.666666666664, ans=0.1 +2024-07-27 19:22:52,854 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.61 vs. limit=22.5 +2024-07-27 19:22:58,297 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.55 vs. limit=15.0 +2024-07-27 19:22:58,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=53077.333333333336, ans=0.125 +2024-07-27 19:23:03,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53077.333333333336, ans=0.1 +2024-07-27 19:23:04,850 INFO [train.py:1114] (2/4) Epoch 4, batch 9150, loss[loss=0.2606, simple_loss=0.3312, pruned_loss=0.095, over 4811.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3323, pruned_loss=0.09293, over 936000.62 frames. ], batch size: 14, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:23:11,022 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+01 6.280e+01 7.131e+01 8.307e+01 1.469e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 19:23:19,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53117.333333333336, ans=0.1 +2024-07-27 19:23:22,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=53117.333333333336, ans=0.5 +2024-07-27 19:23:25,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=53130.666666666664, ans=0.125 +2024-07-27 19:23:29,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=53130.666666666664, ans=0.2 +2024-07-27 19:23:38,881 INFO [train.py:1114] (2/4) Epoch 4, batch 9200, loss[loss=0.2376, simple_loss=0.3139, pruned_loss=0.08071, over 4848.00 frames. ], tot_loss[loss=0.257, simple_loss=0.3311, pruned_loss=0.09145, over 937820.73 frames. ], batch size: 12, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:23:44,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=53170.666666666664, ans=0.125 +2024-07-27 19:23:52,252 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.18 vs. limit=15.0 +2024-07-27 19:23:54,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=53184.0, ans=0.125 +2024-07-27 19:23:58,526 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.23 vs. limit=15.0 +2024-07-27 19:24:00,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53197.333333333336, ans=0.1 +2024-07-27 19:24:05,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.09 vs. limit=15.0 +2024-07-27 19:24:07,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53210.666666666664, ans=0.1 +2024-07-27 19:24:10,846 INFO [train.py:1114] (2/4) Epoch 4, batch 9250, loss[loss=0.3152, simple_loss=0.3817, pruned_loss=0.1243, over 4639.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3313, pruned_loss=0.0916, over 938725.79 frames. ], batch size: 13, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:24:15,281 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.176e+01 6.609e+01 7.603e+01 9.259e+01 1.699e+02, threshold=1.521e+02, percent-clipped=1.0 +2024-07-27 19:24:34,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=53250.666666666664, ans=0.025 +2024-07-27 19:24:43,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=53264.0, ans=10.0 +2024-07-27 19:24:47,324 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:24:48,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.04 vs. limit=10.0 +2024-07-27 19:24:51,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=53277.333333333336, ans=0.125 +2024-07-27 19:24:53,697 INFO [train.py:1114] (2/4) Epoch 4, batch 9300, loss[loss=0.2087, simple_loss=0.2871, pruned_loss=0.0652, over 4776.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3307, pruned_loss=0.09094, over 937957.85 frames. ], batch size: 12, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:25:01,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=53304.0, ans=0.125 +2024-07-27 19:25:06,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=53317.333333333336, ans=0.2 +2024-07-27 19:25:07,430 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.88 vs. limit=15.0 +2024-07-27 19:25:07,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=53317.333333333336, ans=0.125 +2024-07-27 19:25:23,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=53344.0, ans=0.0 +2024-07-27 19:25:38,981 INFO [train.py:1114] (2/4) Epoch 4, batch 9350, loss[loss=0.1923, simple_loss=0.2663, pruned_loss=0.05912, over 4802.00 frames. ], tot_loss[loss=0.2578, simple_loss=0.3317, pruned_loss=0.09192, over 934760.67 frames. ], batch size: 11, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:25:43,191 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.072e+01 6.213e+01 6.915e+01 8.745e+01 1.555e+02, threshold=1.383e+02, percent-clipped=1.0 +2024-07-27 19:25:43,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=53357.333333333336, ans=0.0 +2024-07-27 19:25:43,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=53357.333333333336, ans=0.09899494936611666 +2024-07-27 19:25:46,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=53370.666666666664, ans=0.0 +2024-07-27 19:26:07,077 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.34 vs. limit=15.0 +2024-07-27 19:26:14,175 INFO [train.py:1114] (2/4) Epoch 4, batch 9400, loss[loss=0.2905, simple_loss=0.3624, pruned_loss=0.1093, over 4693.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3316, pruned_loss=0.09176, over 932494.20 frames. ], batch size: 13, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:26:14,207 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:26:24,947 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:26:43,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=53477.333333333336, ans=0.0 +2024-07-27 19:26:43,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=53477.333333333336, ans=0.125 +2024-07-27 19:26:47,569 INFO [train.py:1114] (2/4) Epoch 4, batch 9450, loss[loss=0.2002, simple_loss=0.2718, pruned_loss=0.06434, over 4795.00 frames. ], tot_loss[loss=0.2582, simple_loss=0.3325, pruned_loss=0.09192, over 932103.92 frames. ], batch size: 11, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:26:50,983 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.67 vs. limit=10.0 +2024-07-27 19:26:53,646 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.978e+01 6.050e+01 6.667e+01 7.624e+01 1.196e+02, threshold=1.333e+02, percent-clipped=0.0 +2024-07-27 19:26:54,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.43 vs. limit=22.5 +2024-07-27 19:26:56,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53504.0, ans=0.1 +2024-07-27 19:27:05,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=53517.333333333336, ans=0.0 +2024-07-27 19:27:21,671 INFO [train.py:1114] (2/4) Epoch 4, batch 9500, loss[loss=0.2357, simple_loss=0.3137, pruned_loss=0.07886, over 4711.00 frames. ], tot_loss[loss=0.2582, simple_loss=0.3325, pruned_loss=0.09195, over 934607.54 frames. ], batch size: 12, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:27:36,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=53584.0, ans=0.2 +2024-07-27 19:27:41,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=53597.333333333336, ans=0.125 +2024-07-27 19:27:47,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=53610.666666666664, ans=0.125 +2024-07-27 19:27:54,673 INFO [train.py:1114] (2/4) Epoch 4, batch 9550, loss[loss=0.2349, simple_loss=0.3183, pruned_loss=0.0758, over 4777.00 frames. ], tot_loss[loss=0.2579, simple_loss=0.3321, pruned_loss=0.09191, over 932145.33 frames. ], batch size: 12, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:27:58,966 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.893e+01 6.641e+01 7.346e+01 8.353e+01 1.240e+02, threshold=1.469e+02, percent-clipped=0.0 +2024-07-27 19:28:00,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=53637.333333333336, ans=10.0 +2024-07-27 19:28:02,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53637.333333333336, ans=0.1 +2024-07-27 19:28:02,413 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.55 vs. limit=22.5 +2024-07-27 19:28:18,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=53650.666666666664, ans=0.2 +2024-07-27 19:28:24,735 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.91 vs. limit=15.0 +2024-07-27 19:28:39,652 INFO [train.py:1114] (2/4) Epoch 4, batch 9600, loss[loss=0.4037, simple_loss=0.4212, pruned_loss=0.1931, over 3209.00 frames. ], tot_loss[loss=0.2581, simple_loss=0.332, pruned_loss=0.09214, over 931326.67 frames. ], batch size: 35, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:28:41,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=53690.666666666664, ans=0.1 +2024-07-27 19:28:44,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=53690.666666666664, ans=0.1 +2024-07-27 19:28:44,556 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.99 vs. limit=12.0 +2024-07-27 19:28:51,453 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.96 vs. limit=15.0 +2024-07-27 19:29:04,754 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.96 vs. limit=6.0 +2024-07-27 19:29:10,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=53744.0, ans=0.0 +2024-07-27 19:29:11,840 INFO [train.py:1114] (2/4) Epoch 4, batch 9650, loss[loss=0.2661, simple_loss=0.35, pruned_loss=0.09107, over 4840.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3323, pruned_loss=0.09261, over 926693.61 frames. ], batch size: 16, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:29:17,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=53757.333333333336, ans=0.125 +2024-07-27 19:29:17,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53757.333333333336, ans=0.1 +2024-07-27 19:29:19,469 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.095e+01 6.349e+01 7.028e+01 7.935e+01 1.425e+02, threshold=1.406e+02, percent-clipped=0.0 +2024-07-27 19:29:20,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=53770.666666666664, ans=0.125 +2024-07-27 19:29:33,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=53770.666666666664, ans=0.0 +2024-07-27 19:29:36,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=53770.666666666664, ans=0.1 +2024-07-27 19:29:39,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=53784.0, ans=0.125 +2024-07-27 19:29:49,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=53797.333333333336, ans=0.2 +2024-07-27 19:29:51,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=53810.666666666664, ans=0.125 +2024-07-27 19:29:52,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=53810.666666666664, ans=0.125 +2024-07-27 19:29:54,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=53810.666666666664, ans=0.125 +2024-07-27 19:29:57,637 INFO [train.py:1114] (2/4) Epoch 4, batch 9700, loss[loss=0.2812, simple_loss=0.3417, pruned_loss=0.1104, over 4145.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.3322, pruned_loss=0.09246, over 924959.95 frames. ], batch size: 25, lr: 1.68e-02, grad_scale: 32.0 +2024-07-27 19:30:00,224 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.56 vs. limit=10.0 +2024-07-27 19:30:07,229 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.81 vs. limit=22.5 +2024-07-27 19:30:18,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=53850.666666666664, ans=0.125 +2024-07-27 19:30:19,313 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.58 vs. limit=6.0 +2024-07-27 19:30:22,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=53864.0, ans=0.2 +2024-07-27 19:30:22,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=53864.0, ans=0.5 +2024-07-27 19:30:25,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=53877.333333333336, ans=0.125 +2024-07-27 19:30:31,242 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.09 vs. limit=15.0 +2024-07-27 19:30:33,404 INFO [train.py:1114] (2/4) Epoch 4, batch 9750, loss[loss=0.2861, simple_loss=0.3573, pruned_loss=0.1074, over 4676.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3324, pruned_loss=0.09275, over 925587.64 frames. ], batch size: 15, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:30:41,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=53890.666666666664, ans=0.0 +2024-07-27 19:30:42,342 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.008e+01 6.435e+01 7.103e+01 8.018e+01 1.499e+02, threshold=1.421e+02, percent-clipped=1.0 +2024-07-27 19:30:43,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=53904.0, ans=0.2 +2024-07-27 19:30:53,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=53917.333333333336, ans=0.125 +2024-07-27 19:31:01,432 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.14 vs. limit=15.0 +2024-07-27 19:31:09,153 INFO [train.py:1114] (2/4) Epoch 4, batch 9800, loss[loss=0.2092, simple_loss=0.2958, pruned_loss=0.06134, over 4722.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3313, pruned_loss=0.09195, over 925213.58 frames. ], batch size: 12, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:31:17,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=53970.666666666664, ans=0.125 +2024-07-27 19:31:19,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53970.666666666664, ans=0.1 +2024-07-27 19:31:40,669 INFO [train.py:1114] (2/4) Epoch 4, batch 9850, loss[loss=0.277, simple_loss=0.3598, pruned_loss=0.09711, over 4913.00 frames. ], tot_loss[loss=0.2581, simple_loss=0.332, pruned_loss=0.09204, over 927767.39 frames. ], batch size: 15, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:31:40,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=54024.0, ans=0.0 +2024-07-27 19:31:46,037 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.505e+01 6.777e+01 7.966e+01 9.401e+01 1.769e+02, threshold=1.593e+02, percent-clipped=1.0 +2024-07-27 19:31:55,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=54037.333333333336, ans=0.0 +2024-07-27 19:32:01,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=54050.666666666664, ans=0.1 +2024-07-27 19:32:09,919 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.33 vs. limit=15.0 +2024-07-27 19:32:10,603 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.63 vs. limit=6.0 +2024-07-27 19:32:17,534 INFO [train.py:1114] (2/4) Epoch 4, batch 9900, loss[loss=0.2317, simple_loss=0.3141, pruned_loss=0.07461, over 4858.00 frames. ], tot_loss[loss=0.2577, simple_loss=0.3315, pruned_loss=0.09194, over 926684.05 frames. ], batch size: 16, lr: 1.67e-02, grad_scale: 16.0 +2024-07-27 19:32:19,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=54090.666666666664, ans=0.1 +2024-07-27 19:32:21,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=54090.666666666664, ans=0.0 +2024-07-27 19:32:28,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=54104.0, ans=0.0 +2024-07-27 19:32:31,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=54104.0, ans=0.0 +2024-07-27 19:32:33,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=54117.333333333336, ans=0.0 +2024-07-27 19:33:15,504 INFO [train.py:1114] (2/4) Epoch 4, batch 9950, loss[loss=0.2111, simple_loss=0.2809, pruned_loss=0.07071, over 4805.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.3322, pruned_loss=0.09254, over 929122.09 frames. ], batch size: 11, lr: 1.67e-02, grad_scale: 16.0 +2024-07-27 19:33:18,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=54157.333333333336, ans=0.025 +2024-07-27 19:33:19,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=54157.333333333336, ans=0.125 +2024-07-27 19:33:20,987 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.075e+01 6.568e+01 7.447e+01 8.780e+01 1.338e+02, threshold=1.489e+02, percent-clipped=0.0 +2024-07-27 19:33:32,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=54184.0, ans=0.2 +2024-07-27 19:33:38,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=54197.333333333336, ans=0.0 +2024-07-27 19:33:43,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=54197.333333333336, ans=0.0 +2024-07-27 19:33:50,883 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=19.18 vs. limit=22.5 +2024-07-27 19:33:51,643 INFO [train.py:1114] (2/4) Epoch 4, batch 10000, loss[loss=0.241, simple_loss=0.3202, pruned_loss=0.08092, over 4641.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3356, pruned_loss=0.09413, over 927231.87 frames. ], batch size: 16, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:33:56,131 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.35 vs. limit=6.0 +2024-07-27 19:33:57,232 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.98 vs. limit=15.0 +2024-07-27 19:33:58,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=54224.0, ans=0.125 +2024-07-27 19:34:30,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=54264.0, ans=0.0 +2024-07-27 19:34:32,589 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.89 vs. limit=6.0 +2024-07-27 19:34:35,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=54277.333333333336, ans=0.0 +2024-07-27 19:34:39,361 INFO [train.py:1114] (2/4) Epoch 4, batch 10050, loss[loss=0.3603, simple_loss=0.3834, pruned_loss=0.1686, over 3222.00 frames. ], tot_loss[loss=0.2676, simple_loss=0.3404, pruned_loss=0.09739, over 914676.63 frames. ], batch size: 35, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:34:39,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=54290.666666666664, ans=0.0 +2024-07-27 19:34:45,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=54290.666666666664, ans=0.0 +2024-07-27 19:34:45,411 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.986e+01 6.968e+01 7.682e+01 9.310e+01 1.537e+02, threshold=1.536e+02, percent-clipped=1.0 +2024-07-27 19:34:45,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=54304.0, ans=10.0 +2024-07-27 19:34:51,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=54304.0, ans=0.0 +2024-07-27 19:35:02,872 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.01 vs. limit=22.5 +2024-07-27 19:35:05,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=54330.666666666664, ans=0.0 +2024-07-27 19:35:07,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54344.0, ans=0.1 +2024-07-27 19:35:08,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=54344.0, ans=0.125 +2024-07-27 19:35:13,883 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.21 vs. limit=22.5 +2024-07-27 19:35:14,123 INFO [train.py:1114] (2/4) Epoch 4, batch 10100, loss[loss=0.3431, simple_loss=0.3918, pruned_loss=0.1472, over 3419.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3477, pruned_loss=0.1053, over 863137.11 frames. ], batch size: 35, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:35:17,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=54357.333333333336, ans=0.125 +2024-07-27 19:35:17,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=54357.333333333336, ans=0.125 +2024-07-27 19:35:18,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=54357.333333333336, ans=0.1 +2024-07-27 19:35:23,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54370.666666666664, ans=0.1 +2024-07-27 19:35:24,787 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.65 vs. limit=15.0 +2024-07-27 19:35:42,492 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=25.27 vs. limit=22.5 +2024-07-27 19:35:45,110 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.55 vs. limit=6.0 +2024-07-27 19:35:49,235 INFO [train.py:1114] (2/4) Epoch 4, batch 10150, loss[loss=0.3228, simple_loss=0.3744, pruned_loss=0.1356, over 3588.00 frames. ], tot_loss[loss=0.2867, simple_loss=0.3522, pruned_loss=0.1106, over 820008.27 frames. ], batch size: 35, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:35:52,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=54424.0, ans=0.2 +2024-07-27 19:36:00,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=54424.0, ans=0.125 +2024-07-27 19:36:02,454 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.057e+01 7.252e+01 7.644e+01 8.757e+01 1.198e+02, threshold=1.529e+02, percent-clipped=0.0 +2024-07-27 19:36:28,234 INFO [train.py:1114] (2/4) Epoch 4, batch 10200, loss[loss=0.3632, simple_loss=0.4044, pruned_loss=0.161, over 3384.00 frames. ], tot_loss[loss=0.2926, simple_loss=0.3556, pruned_loss=0.1148, over 788723.68 frames. ], batch size: 35, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:36:29,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=54490.666666666664, ans=0.0 +2024-07-27 19:36:42,193 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.29 vs. limit=15.0 +2024-07-27 19:37:32,061 INFO [train.py:1114] (2/4) Epoch 5, batch 0, loss[loss=0.1961, simple_loss=0.2887, pruned_loss=0.05179, over 4853.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2887, pruned_loss=0.05179, over 4853.00 frames. ], batch size: 12, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:37:32,062 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 19:37:43,745 INFO [train.py:1146] (2/4) Epoch 5, validation: loss=0.2167, simple_loss=0.3194, pruned_loss=0.05704, over 944034.00 frames. +2024-07-27 19:37:43,746 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 19:37:49,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=54521.333333333336, ans=0.125 +2024-07-27 19:37:53,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=54534.666666666664, ans=0.1 +2024-07-27 19:37:56,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54548.0, ans=0.1 +2024-07-27 19:38:01,280 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.79 vs. limit=22.5 +2024-07-27 19:38:02,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=54548.0, ans=15.0 +2024-07-27 19:38:05,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=54561.333333333336, ans=0.0 +2024-07-27 19:38:08,552 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.642e+01 6.667e+01 7.198e+01 8.159e+01 1.101e+02, threshold=1.440e+02, percent-clipped=0.0 +2024-07-27 19:38:16,299 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.74 vs. limit=22.5 +2024-07-27 19:43:23,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=54588.0, ans=0.125 +2024-07-27 19:43:25,058 INFO [train.py:1114] (2/4) Epoch 5, batch 50, loss[loss=0.2353, simple_loss=0.3097, pruned_loss=0.08049, over 4603.00 frames. ], tot_loss[loss=0.2653, simple_loss=0.3413, pruned_loss=0.09465, over 206549.56 frames. ], batch size: 11, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:44:00,504 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=27.90 vs. limit=22.5 +2024-07-27 19:44:14,924 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:44:16,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=54628.0, ans=0.025 +2024-07-27 19:44:43,615 INFO [train.py:1114] (2/4) Epoch 5, batch 100, loss[loss=0.2785, simple_loss=0.3427, pruned_loss=0.1071, over 4637.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.339, pruned_loss=0.09298, over 365478.34 frames. ], batch size: 12, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:45:06,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=54654.666666666664, ans=0.0 +2024-07-27 19:45:28,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=54668.0, ans=0.05 +2024-07-27 19:45:29,206 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.51 vs. limit=22.5 +2024-07-27 19:45:33,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=54681.333333333336, ans=0.0 +2024-07-27 19:45:45,456 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.64 vs. limit=22.5 +2024-07-27 19:45:46,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=54681.333333333336, ans=0.025 +2024-07-27 19:45:50,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=54694.666666666664, ans=0.125 +2024-07-27 19:45:52,466 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 6.028e+01 6.816e+01 7.937e+01 1.219e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-27 19:45:53,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.69 vs. limit=15.0 +2024-07-27 19:46:10,078 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.87 vs. limit=12.0 +2024-07-27 19:46:20,037 INFO [train.py:1114] (2/4) Epoch 5, batch 150, loss[loss=0.1872, simple_loss=0.2692, pruned_loss=0.05257, over 4606.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3345, pruned_loss=0.09128, over 494005.38 frames. ], batch size: 11, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:46:31,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=54721.333333333336, ans=0.0 +2024-07-27 19:46:49,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=54734.666666666664, ans=0.125 +2024-07-27 19:47:05,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=54748.0, ans=0.0 +2024-07-27 19:47:21,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=54761.333333333336, ans=0.2 +2024-07-27 19:47:43,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=54774.666666666664, ans=0.2 +2024-07-27 19:47:50,698 INFO [train.py:1114] (2/4) Epoch 5, batch 200, loss[loss=0.2773, simple_loss=0.3536, pruned_loss=0.1005, over 4495.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3297, pruned_loss=0.08949, over 593458.24 frames. ], batch size: 21, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:47:52,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=54788.0, ans=0.125 +2024-07-27 19:47:53,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=54788.0, ans=22.5 +2024-07-27 19:48:12,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=54801.333333333336, ans=0.125 +2024-07-27 19:48:41,791 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.041e+01 6.282e+01 6.864e+01 7.827e+01 1.211e+02, threshold=1.373e+02, percent-clipped=0.0 +2024-07-27 19:49:02,705 INFO [train.py:1114] (2/4) Epoch 5, batch 250, loss[loss=0.257, simple_loss=0.3234, pruned_loss=0.09535, over 4613.00 frames. ], tot_loss[loss=0.2529, simple_loss=0.3284, pruned_loss=0.08868, over 670517.13 frames. ], batch size: 16, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:49:19,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=54881.333333333336, ans=0.07 +2024-07-27 19:49:49,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=54921.333333333336, ans=0.1 +2024-07-27 19:49:50,295 INFO [train.py:1114] (2/4) Epoch 5, batch 300, loss[loss=0.2539, simple_loss=0.3349, pruned_loss=0.08647, over 4812.00 frames. ], tot_loss[loss=0.2522, simple_loss=0.3275, pruned_loss=0.08843, over 730343.76 frames. ], batch size: 15, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:49:57,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=54934.666666666664, ans=0.125 +2024-07-27 19:50:16,111 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.053e+01 6.343e+01 7.108e+01 8.248e+01 1.263e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 19:50:22,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54974.666666666664, ans=0.1 +2024-07-27 19:50:25,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=54974.666666666664, ans=0.125 +2024-07-27 19:50:27,359 INFO [train.py:1114] (2/4) Epoch 5, batch 350, loss[loss=0.201, simple_loss=0.2798, pruned_loss=0.0611, over 4932.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3275, pruned_loss=0.08752, over 776357.44 frames. ], batch size: 12, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:50:33,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=54988.0, ans=0.0 +2024-07-27 19:50:35,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=55001.333333333336, ans=0.125 +2024-07-27 19:50:45,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=55014.666666666664, ans=0.0 +2024-07-27 19:50:45,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=55014.666666666664, ans=0.125 +2024-07-27 19:51:04,244 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.55 vs. limit=10.0 +2024-07-27 19:51:12,131 INFO [train.py:1114] (2/4) Epoch 5, batch 400, loss[loss=0.2412, simple_loss=0.3254, pruned_loss=0.0785, over 4694.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3271, pruned_loss=0.08714, over 813635.82 frames. ], batch size: 13, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:51:38,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=55081.333333333336, ans=0.125 +2024-07-27 19:51:43,473 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.118e+01 6.055e+01 6.518e+01 7.484e+01 1.056e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-27 19:51:48,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=55108.0, ans=0.0 +2024-07-27 19:52:02,004 INFO [train.py:1114] (2/4) Epoch 5, batch 450, loss[loss=0.2408, simple_loss=0.3188, pruned_loss=0.08141, over 4626.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3276, pruned_loss=0.08748, over 839177.54 frames. ], batch size: 13, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:52:11,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=55134.666666666664, ans=0.125 +2024-07-27 19:52:16,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55134.666666666664, ans=0.1 +2024-07-27 19:52:17,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=55148.0, ans=0.025 +2024-07-27 19:52:19,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=55148.0, ans=0.125 +2024-07-27 19:52:19,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=55148.0, ans=0.0 +2024-07-27 19:52:39,052 INFO [train.py:1114] (2/4) Epoch 5, batch 500, loss[loss=0.2933, simple_loss=0.3613, pruned_loss=0.1126, over 4691.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.3255, pruned_loss=0.08641, over 861501.74 frames. ], batch size: 15, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:52:44,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=55188.0, ans=0.2 +2024-07-27 19:52:53,620 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.01 vs. limit=22.5 +2024-07-27 19:52:55,105 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-27 19:52:56,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=55214.666666666664, ans=0.0 +2024-07-27 19:53:00,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=55228.0, ans=0.125 +2024-07-27 19:53:04,119 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.317e+01 6.118e+01 6.781e+01 7.848e+01 1.133e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 19:53:04,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=55228.0, ans=0.0 +2024-07-27 19:53:04,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=55228.0, ans=0.025 +2024-07-27 19:53:14,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=55241.333333333336, ans=0.0 +2024-07-27 19:53:19,189 INFO [train.py:1114] (2/4) Epoch 5, batch 550, loss[loss=0.3027, simple_loss=0.3701, pruned_loss=0.1176, over 4622.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3246, pruned_loss=0.0859, over 877509.96 frames. ], batch size: 17, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:53:20,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=55254.666666666664, ans=0.125 +2024-07-27 19:53:25,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=55268.0, ans=0.09899494936611666 +2024-07-27 19:53:51,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=55294.666666666664, ans=0.09899494936611666 +2024-07-27 19:53:59,350 INFO [train.py:1114] (2/4) Epoch 5, batch 600, loss[loss=0.282, simple_loss=0.3421, pruned_loss=0.111, over 4602.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3255, pruned_loss=0.08652, over 891804.99 frames. ], batch size: 16, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:53:59,679 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.11 vs. limit=12.0 +2024-07-27 19:54:00,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=55321.333333333336, ans=0.0 +2024-07-27 19:54:01,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=55321.333333333336, ans=0.125 +2024-07-27 19:54:08,421 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:54:17,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55348.0, ans=0.1 +2024-07-27 19:54:23,115 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.126e+01 6.489e+01 7.020e+01 8.216e+01 1.209e+02, threshold=1.404e+02, percent-clipped=0.0 +2024-07-27 19:54:31,412 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.86 vs. limit=15.0 +2024-07-27 19:54:32,213 INFO [train.py:1114] (2/4) Epoch 5, batch 650, loss[loss=0.2836, simple_loss=0.3484, pruned_loss=0.1094, over 4753.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.3252, pruned_loss=0.08648, over 903303.15 frames. ], batch size: 13, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:54:34,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=55388.0, ans=0.0 +2024-07-27 19:54:47,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=55414.666666666664, ans=10.0 +2024-07-27 19:54:53,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=55428.0, ans=0.125 +2024-07-27 19:54:57,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=55428.0, ans=0.0 +2024-07-27 19:55:07,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=55441.333333333336, ans=0.0 +2024-07-27 19:55:07,700 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.09 vs. limit=22.5 +2024-07-27 19:55:09,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55454.666666666664, ans=0.1 +2024-07-27 19:55:10,106 INFO [train.py:1114] (2/4) Epoch 5, batch 700, loss[loss=0.2172, simple_loss=0.2922, pruned_loss=0.07104, over 4642.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.3257, pruned_loss=0.08619, over 911209.09 frames. ], batch size: 12, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:55:29,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=55481.333333333336, ans=0.0 +2024-07-27 19:55:34,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=55494.666666666664, ans=0.0 +2024-07-27 19:55:35,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=55494.666666666664, ans=0.025 +2024-07-27 19:55:37,269 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:55:37,846 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.381e+01 6.482e+01 7.754e+01 9.297e+01 1.843e+02, threshold=1.551e+02, percent-clipped=6.0 +2024-07-27 19:55:47,639 INFO [train.py:1114] (2/4) Epoch 5, batch 750, loss[loss=0.2283, simple_loss=0.308, pruned_loss=0.07427, over 4695.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.325, pruned_loss=0.086, over 917505.04 frames. ], batch size: 13, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:55:49,519 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.83 vs. limit=15.0 +2024-07-27 19:55:49,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=55521.333333333336, ans=0.0 +2024-07-27 19:55:59,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=55534.666666666664, ans=0.2 +2024-07-27 19:56:04,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=55548.0, ans=0.0 +2024-07-27 19:56:06,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=55548.0, ans=0.0 +2024-07-27 19:56:25,780 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.49 vs. limit=10.0 +2024-07-27 19:56:31,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=55574.666666666664, ans=0.125 +2024-07-27 19:56:32,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=55574.666666666664, ans=0.0 +2024-07-27 19:56:37,353 INFO [train.py:1114] (2/4) Epoch 5, batch 800, loss[loss=0.2246, simple_loss=0.3, pruned_loss=0.07457, over 4855.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.3252, pruned_loss=0.08633, over 922619.66 frames. ], batch size: 12, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:57:05,535 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.583e+01 6.253e+01 7.054e+01 8.487e+01 1.181e+02, threshold=1.411e+02, percent-clipped=0.0 +2024-07-27 19:57:06,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55628.0, ans=0.1 +2024-07-27 19:57:17,160 INFO [train.py:1114] (2/4) Epoch 5, batch 850, loss[loss=0.2469, simple_loss=0.3311, pruned_loss=0.08136, over 4664.00 frames. ], tot_loss[loss=0.248, simple_loss=0.324, pruned_loss=0.08597, over 927067.73 frames. ], batch size: 14, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:57:46,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=55694.666666666664, ans=0.0 +2024-07-27 19:57:55,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55708.0, ans=0.1 +2024-07-27 19:57:56,361 INFO [train.py:1114] (2/4) Epoch 5, batch 900, loss[loss=0.2677, simple_loss=0.3354, pruned_loss=0.1, over 4833.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.325, pruned_loss=0.08713, over 928328.83 frames. ], batch size: 12, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 19:57:57,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=55721.333333333336, ans=0.0 +2024-07-27 19:58:05,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=55734.666666666664, ans=0.0 +2024-07-27 19:58:09,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=55748.0, ans=0.2 +2024-07-27 19:58:18,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=55761.333333333336, ans=0.2 +2024-07-27 19:58:32,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.75 vs. limit=15.0 +2024-07-27 19:58:32,496 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.179e+01 6.369e+01 7.320e+01 8.500e+01 1.312e+02, threshold=1.464e+02, percent-clipped=0.0 +2024-07-27 19:58:53,873 INFO [train.py:1114] (2/4) Epoch 5, batch 950, loss[loss=0.215, simple_loss=0.2888, pruned_loss=0.07066, over 4787.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3263, pruned_loss=0.08783, over 929972.11 frames. ], batch size: 12, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 19:58:58,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=55788.0, ans=0.0 +2024-07-27 19:59:02,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=55801.333333333336, ans=0.125 +2024-07-27 19:59:03,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=55801.333333333336, ans=0.0 +2024-07-27 19:59:12,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=55801.333333333336, ans=0.0 +2024-07-27 19:59:13,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55814.666666666664, ans=0.1 +2024-07-27 19:59:15,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=55814.666666666664, ans=0.025 +2024-07-27 19:59:31,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=55841.333333333336, ans=0.0 +2024-07-27 19:59:34,237 INFO [train.py:1114] (2/4) Epoch 5, batch 1000, loss[loss=0.2358, simple_loss=0.3205, pruned_loss=0.07554, over 4964.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3266, pruned_loss=0.08759, over 929588.42 frames. ], batch size: 13, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 19:59:35,936 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.54 vs. limit=15.0 +2024-07-27 19:59:37,931 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.37 vs. limit=22.5 +2024-07-27 19:59:39,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=55854.666666666664, ans=0.07 +2024-07-27 19:59:44,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=55854.666666666664, ans=0.125 +2024-07-27 19:59:46,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=55868.0, ans=0.125 +2024-07-27 19:59:48,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=55868.0, ans=0.95 +2024-07-27 19:59:52,284 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.80 vs. limit=15.0 +2024-07-27 19:59:53,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=55881.333333333336, ans=0.0 +2024-07-27 20:00:00,930 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=27.26 vs. limit=15.0 +2024-07-27 20:00:03,951 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.112e+01 6.099e+01 6.760e+01 7.878e+01 1.806e+02, threshold=1.352e+02, percent-clipped=1.0 +2024-07-27 20:00:13,323 INFO [train.py:1114] (2/4) Epoch 5, batch 1050, loss[loss=0.2991, simple_loss=0.3802, pruned_loss=0.109, over 4874.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3266, pruned_loss=0.08755, over 932036.34 frames. ], batch size: 14, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:00:18,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.70 vs. limit=15.0 +2024-07-27 20:00:22,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=55934.666666666664, ans=0.0 +2024-07-27 20:00:26,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=55948.0, ans=0.2 +2024-07-27 20:00:43,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=55974.666666666664, ans=0.0 +2024-07-27 20:00:47,864 INFO [train.py:1114] (2/4) Epoch 5, batch 1100, loss[loss=0.22, simple_loss=0.2957, pruned_loss=0.07216, over 4892.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3264, pruned_loss=0.08757, over 934195.47 frames. ], batch size: 13, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:00:47,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=55988.0, ans=0.125 +2024-07-27 20:00:50,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=55988.0, ans=0.125 +2024-07-27 20:00:54,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=56001.333333333336, ans=0.0 +2024-07-27 20:00:56,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=56001.333333333336, ans=0.1 +2024-07-27 20:00:56,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=56001.333333333336, ans=0.125 +2024-07-27 20:00:59,711 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.63 vs. limit=15.0 +2024-07-27 20:01:04,376 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.90 vs. limit=22.5 +2024-07-27 20:01:05,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=56014.666666666664, ans=0.125 +2024-07-27 20:01:07,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=56028.0, ans=0.0 +2024-07-27 20:01:11,708 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.396e+01 6.284e+01 6.917e+01 8.137e+01 1.279e+02, threshold=1.383e+02, percent-clipped=0.0 +2024-07-27 20:01:14,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=56041.333333333336, ans=0.0 +2024-07-27 20:01:22,333 INFO [train.py:1114] (2/4) Epoch 5, batch 1150, loss[loss=0.2297, simple_loss=0.3047, pruned_loss=0.07738, over 4899.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3256, pruned_loss=0.08731, over 933894.17 frames. ], batch size: 13, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:01:22,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56054.666666666664, ans=0.1 +2024-07-27 20:01:23,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=56054.666666666664, ans=0.2 +2024-07-27 20:01:26,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=56054.666666666664, ans=0.125 +2024-07-27 20:01:29,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=56068.0, ans=0.125 +2024-07-27 20:01:34,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=56068.0, ans=0.025 +2024-07-27 20:01:43,267 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.78 vs. limit=15.0 +2024-07-27 20:01:45,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=56094.666666666664, ans=0.0 +2024-07-27 20:01:47,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=56094.666666666664, ans=0.125 +2024-07-27 20:01:57,047 INFO [train.py:1114] (2/4) Epoch 5, batch 1200, loss[loss=0.2882, simple_loss=0.3565, pruned_loss=0.1099, over 4869.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3265, pruned_loss=0.08753, over 933135.10 frames. ], batch size: 14, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:02:28,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=56148.0, ans=15.0 +2024-07-27 20:02:34,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56161.333333333336, ans=0.1 +2024-07-27 20:02:40,236 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.29 vs. limit=15.0 +2024-07-27 20:02:40,442 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.476e+01 6.643e+01 8.181e+01 1.020e+02 1.586e+02, threshold=1.636e+02, percent-clipped=2.0 +2024-07-27 20:02:43,462 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.24 vs. limit=22.5 +2024-07-27 20:02:43,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=56174.666666666664, ans=0.2 +2024-07-27 20:02:52,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=56174.666666666664, ans=0.125 +2024-07-27 20:02:53,269 INFO [train.py:1114] (2/4) Epoch 5, batch 1250, loss[loss=0.2869, simple_loss=0.3565, pruned_loss=0.1087, over 4792.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3272, pruned_loss=0.08708, over 937149.98 frames. ], batch size: 15, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:03:09,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=56214.666666666664, ans=10.0 +2024-07-27 20:03:19,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=56228.0, ans=0.125 +2024-07-27 20:03:24,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=56241.333333333336, ans=0.125 +2024-07-27 20:03:30,280 INFO [train.py:1114] (2/4) Epoch 5, batch 1300, loss[loss=0.243, simple_loss=0.3262, pruned_loss=0.07988, over 4778.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3267, pruned_loss=0.08717, over 938693.84 frames. ], batch size: 19, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:03:46,185 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.46 vs. limit=12.0 +2024-07-27 20:03:48,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=56281.333333333336, ans=0.07 +2024-07-27 20:03:48,957 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.40 vs. limit=15.0 +2024-07-27 20:03:50,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=56281.333333333336, ans=0.125 +2024-07-27 20:03:54,803 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:03:55,210 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.88 vs. limit=15.0 +2024-07-27 20:04:00,106 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.011e+01 6.264e+01 7.458e+01 8.643e+01 1.456e+02, threshold=1.492e+02, percent-clipped=0.0 +2024-07-27 20:04:10,208 INFO [train.py:1114] (2/4) Epoch 5, batch 1350, loss[loss=0.2234, simple_loss=0.308, pruned_loss=0.06943, over 4748.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3254, pruned_loss=0.08597, over 940578.04 frames. ], batch size: 13, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:04:31,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=56348.0, ans=0.07 +2024-07-27 20:04:34,576 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.99 vs. limit=15.0 +2024-07-27 20:04:35,867 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.33 vs. limit=22.5 +2024-07-27 20:04:38,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=56361.333333333336, ans=0.0 +2024-07-27 20:04:39,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=56361.333333333336, ans=0.0 +2024-07-27 20:04:44,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=56374.666666666664, ans=0.0 +2024-07-27 20:04:47,667 INFO [train.py:1114] (2/4) Epoch 5, batch 1400, loss[loss=0.2121, simple_loss=0.2917, pruned_loss=0.06623, over 4694.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3263, pruned_loss=0.08667, over 942704.77 frames. ], batch size: 11, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:04:50,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=56388.0, ans=0.2 +2024-07-27 20:04:51,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=56388.0, ans=0.125 +2024-07-27 20:04:56,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=56401.333333333336, ans=0.125 +2024-07-27 20:05:47,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=56428.0, ans=0.125 +2024-07-27 20:05:48,353 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.750e+01 6.394e+01 7.108e+01 8.417e+01 1.153e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 20:05:48,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=56428.0, ans=0.07 +2024-07-27 20:05:49,858 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:05:53,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=56441.333333333336, ans=0.125 +2024-07-27 20:05:57,868 INFO [train.py:1114] (2/4) Epoch 5, batch 1450, loss[loss=0.2796, simple_loss=0.3585, pruned_loss=0.1003, over 4699.00 frames. ], tot_loss[loss=0.252, simple_loss=0.328, pruned_loss=0.08795, over 942563.63 frames. ], batch size: 15, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:06:23,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=56494.666666666664, ans=0.0 +2024-07-27 20:06:23,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56494.666666666664, ans=0.1 +2024-07-27 20:06:25,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=56508.0, ans=0.025 +2024-07-27 20:06:32,454 INFO [train.py:1114] (2/4) Epoch 5, batch 1500, loss[loss=0.239, simple_loss=0.3099, pruned_loss=0.08406, over 4814.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.328, pruned_loss=0.08763, over 942501.80 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 32.0 +2024-07-27 20:06:35,454 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.65 vs. limit=22.5 +2024-07-27 20:06:39,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=56521.333333333336, ans=0.125 +2024-07-27 20:06:42,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=56534.666666666664, ans=0.125 +2024-07-27 20:06:54,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=56548.0, ans=0.125 +2024-07-27 20:06:59,210 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.351e+01 6.555e+01 7.313e+01 8.345e+01 1.115e+02, threshold=1.463e+02, percent-clipped=0.0 +2024-07-27 20:07:08,540 INFO [train.py:1114] (2/4) Epoch 5, batch 1550, loss[loss=0.2234, simple_loss=0.3195, pruned_loss=0.06366, over 4902.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3269, pruned_loss=0.08696, over 938885.06 frames. ], batch size: 15, lr: 1.52e-02, grad_scale: 32.0 +2024-07-27 20:07:20,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=56601.333333333336, ans=0.025 +2024-07-27 20:07:30,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=56628.0, ans=0.125 +2024-07-27 20:07:35,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=56641.333333333336, ans=0.125 +2024-07-27 20:07:42,160 INFO [train.py:1114] (2/4) Epoch 5, batch 1600, loss[loss=0.2592, simple_loss=0.3328, pruned_loss=0.09278, over 4871.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3261, pruned_loss=0.08686, over 937262.89 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 32.0 +2024-07-27 20:07:42,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=56654.666666666664, ans=0.5 +2024-07-27 20:07:43,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=56654.666666666664, ans=0.1 +2024-07-27 20:08:06,108 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.974e+01 6.291e+01 7.006e+01 7.974e+01 1.110e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-27 20:08:15,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=56721.333333333336, ans=0.125 +2024-07-27 20:08:15,493 INFO [train.py:1114] (2/4) Epoch 5, batch 1650, loss[loss=0.2293, simple_loss=0.3226, pruned_loss=0.06804, over 4673.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3263, pruned_loss=0.08768, over 937394.98 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:08:17,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=56721.333333333336, ans=0.0 +2024-07-27 20:08:21,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff2.min_abs, batch_count=56721.333333333336, ans=0.1 +2024-07-27 20:08:25,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=56734.666666666664, ans=0.125 +2024-07-27 20:08:26,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=56734.666666666664, ans=0.125 +2024-07-27 20:08:33,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=56748.0, ans=0.125 +2024-07-27 20:08:47,614 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:08:50,058 INFO [train.py:1114] (2/4) Epoch 5, batch 1700, loss[loss=0.2412, simple_loss=0.3069, pruned_loss=0.08778, over 4688.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.326, pruned_loss=0.08743, over 939043.55 frames. ], batch size: 11, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:08:50,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=56788.0, ans=0.125 +2024-07-27 20:08:53,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=56788.0, ans=0.025 +2024-07-27 20:08:54,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=56788.0, ans=10.0 +2024-07-27 20:08:56,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=56788.0, ans=0.125 +2024-07-27 20:09:09,979 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.36 vs. limit=22.5 +2024-07-27 20:09:16,489 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.365e+01 6.475e+01 7.223e+01 8.445e+01 1.275e+02, threshold=1.445e+02, percent-clipped=0.0 +2024-07-27 20:09:17,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=56828.0, ans=0.125 +2024-07-27 20:09:20,238 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.36 vs. limit=22.5 +2024-07-27 20:09:23,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=56841.333333333336, ans=0.95 +2024-07-27 20:09:23,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=56841.333333333336, ans=0.0 +2024-07-27 20:09:24,142 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.43 vs. limit=10.0 +2024-07-27 20:09:25,626 INFO [train.py:1114] (2/4) Epoch 5, batch 1750, loss[loss=0.2126, simple_loss=0.2822, pruned_loss=0.07154, over 4823.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3254, pruned_loss=0.08699, over 940091.08 frames. ], batch size: 11, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:09:26,832 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.10 vs. limit=15.0 +2024-07-27 20:09:29,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=56854.666666666664, ans=0.0 +2024-07-27 20:09:37,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=56868.0, ans=0.0 +2024-07-27 20:09:42,005 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.17 vs. limit=22.5 +2024-07-27 20:09:47,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=56894.666666666664, ans=0.025 +2024-07-27 20:10:02,668 INFO [train.py:1114] (2/4) Epoch 5, batch 1800, loss[loss=0.2662, simple_loss=0.3494, pruned_loss=0.09148, over 4633.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3254, pruned_loss=0.08665, over 941155.57 frames. ], batch size: 13, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:10:06,451 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-27 20:10:13,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=56934.666666666664, ans=0.125 +2024-07-27 20:10:18,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=56948.0, ans=0.125 +2024-07-27 20:10:18,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=56948.0, ans=0.0 +2024-07-27 20:10:20,964 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.29 vs. limit=22.5 +2024-07-27 20:10:21,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=56961.333333333336, ans=0.5 +2024-07-27 20:10:26,594 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.038e+01 6.233e+01 6.949e+01 8.152e+01 1.410e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-27 20:10:27,157 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.04 vs. limit=15.0 +2024-07-27 20:10:30,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=56974.666666666664, ans=0.0 +2024-07-27 20:10:35,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=56974.666666666664, ans=0.025 +2024-07-27 20:10:37,642 INFO [train.py:1114] (2/4) Epoch 5, batch 1850, loss[loss=0.2696, simple_loss=0.3579, pruned_loss=0.09066, over 4815.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.3249, pruned_loss=0.08671, over 940977.59 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:10:38,598 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.31 vs. limit=12.0 +2024-07-27 20:10:45,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=57001.333333333336, ans=0.0 +2024-07-27 20:10:50,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=57001.333333333336, ans=0.025 +2024-07-27 20:10:50,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=57014.666666666664, ans=0.125 +2024-07-27 20:10:54,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57014.666666666664, ans=0.1 +2024-07-27 20:11:11,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=57041.333333333336, ans=0.125 +2024-07-27 20:11:12,475 INFO [train.py:1114] (2/4) Epoch 5, batch 1900, loss[loss=0.2561, simple_loss=0.3407, pruned_loss=0.08572, over 4650.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3258, pruned_loss=0.08704, over 942181.03 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:11:29,959 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.30 vs. limit=22.5 +2024-07-27 20:11:36,761 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.88 vs. limit=15.0 +2024-07-27 20:11:36,829 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.851e+01 6.078e+01 6.608e+01 7.914e+01 1.166e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-27 20:11:39,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff3.min_abs, batch_count=57108.0, ans=0.2 +2024-07-27 20:11:40,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=57108.0, ans=0.125 +2024-07-27 20:11:46,530 INFO [train.py:1114] (2/4) Epoch 5, batch 1950, loss[loss=0.2317, simple_loss=0.3097, pruned_loss=0.07685, over 4901.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3281, pruned_loss=0.08791, over 944064.95 frames. ], batch size: 13, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:11:46,837 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.77 vs. limit=10.0 +2024-07-27 20:11:54,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=57134.666666666664, ans=0.1 +2024-07-27 20:11:58,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=57134.666666666664, ans=0.0 +2024-07-27 20:12:00,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57134.666666666664, ans=0.1 +2024-07-27 20:12:08,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=57161.333333333336, ans=0.07 +2024-07-27 20:12:15,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57174.666666666664, ans=0.1 +2024-07-27 20:12:16,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=57174.666666666664, ans=0.2 +2024-07-27 20:12:22,150 INFO [train.py:1114] (2/4) Epoch 5, batch 2000, loss[loss=0.2578, simple_loss=0.3153, pruned_loss=0.1001, over 4809.00 frames. ], tot_loss[loss=0.2525, simple_loss=0.3288, pruned_loss=0.08812, over 941738.82 frames. ], batch size: 11, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:12:23,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=57188.0, ans=0.125 +2024-07-27 20:12:24,621 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.12 vs. limit=10.0 +2024-07-27 20:12:29,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=57201.333333333336, ans=0.07 +2024-07-27 20:12:35,919 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:12:39,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=57214.666666666664, ans=0.125 +2024-07-27 20:12:42,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=57228.0, ans=0.0 +2024-07-27 20:12:46,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=57228.0, ans=0.2 +2024-07-27 20:12:46,755 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.030e+01 6.356e+01 7.460e+01 8.642e+01 1.315e+02, threshold=1.492e+02, percent-clipped=0.0 +2024-07-27 20:12:50,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=57241.333333333336, ans=0.0 +2024-07-27 20:12:55,804 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.35 vs. limit=22.5 +2024-07-27 20:12:56,140 INFO [train.py:1114] (2/4) Epoch 5, batch 2050, loss[loss=0.2158, simple_loss=0.2922, pruned_loss=0.06969, over 4604.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3279, pruned_loss=0.08788, over 939757.72 frames. ], batch size: 11, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:13:01,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=57254.666666666664, ans=0.1 +2024-07-27 20:13:02,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57268.0, ans=0.1 +2024-07-27 20:13:17,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=57294.666666666664, ans=0.025 +2024-07-27 20:13:24,545 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:13:29,962 INFO [train.py:1114] (2/4) Epoch 5, batch 2100, loss[loss=0.2212, simple_loss=0.3029, pruned_loss=0.06976, over 4762.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.326, pruned_loss=0.08683, over 941620.27 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:13:30,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=57321.333333333336, ans=0.125 +2024-07-27 20:13:34,406 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.83 vs. limit=6.0 +2024-07-27 20:13:39,131 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.38 vs. limit=22.5 +2024-07-27 20:13:42,318 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.90 vs. limit=10.0 +2024-07-27 20:13:48,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=57348.0, ans=0.0 +2024-07-27 20:13:50,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=57361.333333333336, ans=0.04949747468305833 +2024-07-27 20:13:52,874 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:13:53,940 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.957e+01 6.234e+01 6.918e+01 8.302e+01 1.274e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-27 20:13:54,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=57361.333333333336, ans=0.0 +2024-07-27 20:13:56,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=57374.666666666664, ans=0.125 +2024-07-27 20:14:01,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=57374.666666666664, ans=0.125 +2024-07-27 20:14:02,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=57388.0, ans=0.125 +2024-07-27 20:14:03,278 INFO [train.py:1114] (2/4) Epoch 5, batch 2150, loss[loss=0.2535, simple_loss=0.332, pruned_loss=0.08749, over 4903.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3257, pruned_loss=0.08638, over 944603.87 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:14:09,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57401.333333333336, ans=0.1 +2024-07-27 20:14:11,322 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.12 vs. limit=22.5 +2024-07-27 20:14:15,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=57401.333333333336, ans=0.04949747468305833 +2024-07-27 20:14:15,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=57401.333333333336, ans=0.0 +2024-07-27 20:14:16,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=57414.666666666664, ans=0.025 +2024-07-27 20:14:31,137 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.66 vs. limit=22.5 +2024-07-27 20:14:35,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57441.333333333336, ans=0.1 +2024-07-27 20:14:38,764 INFO [train.py:1114] (2/4) Epoch 5, batch 2200, loss[loss=0.2246, simple_loss=0.2951, pruned_loss=0.07704, over 4801.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3256, pruned_loss=0.08664, over 943957.13 frames. ], batch size: 14, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:14:43,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=57454.666666666664, ans=0.125 +2024-07-27 20:14:54,771 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=22.73 vs. limit=15.0 +2024-07-27 20:14:57,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=57481.333333333336, ans=0.2 +2024-07-27 20:14:57,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=57481.333333333336, ans=0.125 +2024-07-27 20:15:03,289 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.963e+01 6.386e+01 7.473e+01 9.024e+01 1.169e+02, threshold=1.495e+02, percent-clipped=0.0 +2024-07-27 20:15:03,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=57494.666666666664, ans=0.125 +2024-07-27 20:15:14,740 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.39 vs. limit=12.0 +2024-07-27 20:15:14,972 INFO [train.py:1114] (2/4) Epoch 5, batch 2250, loss[loss=0.2492, simple_loss=0.3287, pruned_loss=0.08488, over 4702.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3251, pruned_loss=0.08679, over 942792.92 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:15:35,376 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.14 vs. limit=15.0 +2024-07-27 20:15:35,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57548.0, ans=0.1 +2024-07-27 20:15:37,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=57561.333333333336, ans=0.125 +2024-07-27 20:15:37,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57561.333333333336, ans=0.1 +2024-07-27 20:15:37,351 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.36 vs. limit=15.0 +2024-07-27 20:15:50,351 INFO [train.py:1114] (2/4) Epoch 5, batch 2300, loss[loss=0.204, simple_loss=0.2863, pruned_loss=0.06088, over 4955.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3237, pruned_loss=0.0861, over 940314.48 frames. ], batch size: 12, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:16:00,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=57601.333333333336, ans=0.125 +2024-07-27 20:16:04,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=57614.666666666664, ans=0.2 +2024-07-27 20:16:16,283 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.130e+01 6.014e+01 6.647e+01 7.772e+01 1.123e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-27 20:16:19,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=57641.333333333336, ans=0.125 +2024-07-27 20:16:29,313 INFO [train.py:1114] (2/4) Epoch 5, batch 2350, loss[loss=0.2472, simple_loss=0.3237, pruned_loss=0.08534, over 4643.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.324, pruned_loss=0.08585, over 941830.48 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:16:31,096 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.66 vs. limit=6.0 +2024-07-27 20:16:32,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=57654.666666666664, ans=0.0 +2024-07-27 20:16:41,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=57668.0, ans=0.125 +2024-07-27 20:16:42,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=57668.0, ans=0.125 +2024-07-27 20:16:58,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=57708.0, ans=0.125 +2024-07-27 20:17:03,185 INFO [train.py:1114] (2/4) Epoch 5, batch 2400, loss[loss=0.2252, simple_loss=0.2983, pruned_loss=0.07608, over 4643.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.3251, pruned_loss=0.08659, over 941500.10 frames. ], batch size: 12, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:17:08,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57721.333333333336, ans=0.1 +2024-07-27 20:17:10,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=57734.666666666664, ans=0.125 +2024-07-27 20:17:10,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57734.666666666664, ans=0.1 +2024-07-27 20:17:13,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=57734.666666666664, ans=0.125 +2024-07-27 20:17:27,277 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.323e+01 6.252e+01 6.682e+01 7.735e+01 1.071e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-27 20:17:36,700 INFO [train.py:1114] (2/4) Epoch 5, batch 2450, loss[loss=0.2372, simple_loss=0.3213, pruned_loss=0.07654, over 4691.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3255, pruned_loss=0.08688, over 936724.28 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:17:42,127 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.67 vs. limit=15.0 +2024-07-27 20:17:55,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=57814.666666666664, ans=0.0 +2024-07-27 20:18:20,672 INFO [train.py:1114] (2/4) Epoch 5, batch 2500, loss[loss=0.319, simple_loss=0.3807, pruned_loss=0.1286, over 4801.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3237, pruned_loss=0.08561, over 938918.01 frames. ], batch size: 14, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:18:22,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=57854.666666666664, ans=0.1 +2024-07-27 20:18:32,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=57868.0, ans=0.0 +2024-07-27 20:18:39,597 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.29 vs. limit=15.0 +2024-07-27 20:18:51,017 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.916e+01 6.442e+01 7.418e+01 9.024e+01 1.336e+02, threshold=1.484e+02, percent-clipped=0.0 +2024-07-27 20:18:58,524 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.62 vs. limit=22.5 +2024-07-27 20:19:01,043 INFO [train.py:1114] (2/4) Epoch 5, batch 2550, loss[loss=0.1755, simple_loss=0.2542, pruned_loss=0.0484, over 4807.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3232, pruned_loss=0.08502, over 938908.17 frames. ], batch size: 11, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:19:08,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=57934.666666666664, ans=0.125 +2024-07-27 20:19:13,284 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.61 vs. limit=10.0 +2024-07-27 20:19:21,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=57961.333333333336, ans=0.1 +2024-07-27 20:19:25,815 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=11.18 vs. limit=10.0 +2024-07-27 20:19:34,663 INFO [train.py:1114] (2/4) Epoch 5, batch 2600, loss[loss=0.2111, simple_loss=0.2865, pruned_loss=0.06787, over 4886.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.3243, pruned_loss=0.08528, over 937396.15 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:19:42,397 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.37 vs. limit=12.0 +2024-07-27 20:19:45,776 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.19 vs. limit=15.0 +2024-07-27 20:19:46,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=58001.333333333336, ans=0.2 +2024-07-27 20:19:56,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=58028.0, ans=0.125 +2024-07-27 20:19:58,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=58028.0, ans=0.125 +2024-07-27 20:19:58,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.03 vs. limit=15.0 +2024-07-27 20:19:58,984 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.906e+01 6.417e+01 7.272e+01 8.306e+01 1.432e+02, threshold=1.454e+02, percent-clipped=0.0 +2024-07-27 20:19:59,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=58028.0, ans=0.025 +2024-07-27 20:20:02,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58041.333333333336, ans=0.1 +2024-07-27 20:20:06,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=58041.333333333336, ans=0.025 +2024-07-27 20:20:09,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=58041.333333333336, ans=0.0 +2024-07-27 20:20:11,711 INFO [train.py:1114] (2/4) Epoch 5, batch 2650, loss[loss=0.2231, simple_loss=0.3101, pruned_loss=0.06807, over 4646.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3258, pruned_loss=0.08629, over 939710.25 frames. ], batch size: 16, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:20:13,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=58054.666666666664, ans=0.125 +2024-07-27 20:20:20,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=58054.666666666664, ans=0.1 +2024-07-27 20:20:33,297 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.63 vs. limit=22.5 +2024-07-27 20:20:41,290 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.95 vs. limit=6.0 +2024-07-27 20:20:41,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=58094.666666666664, ans=0.125 +2024-07-27 20:20:47,271 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.27 vs. limit=6.0 +2024-07-27 20:20:55,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=58108.0, ans=0.0 +2024-07-27 20:20:57,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=58108.0, ans=0.125 +2024-07-27 20:20:58,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.whiten.whitening_limit, batch_count=58108.0, ans=12.0 +2024-07-27 20:20:59,477 INFO [train.py:1114] (2/4) Epoch 5, batch 2700, loss[loss=0.2392, simple_loss=0.3224, pruned_loss=0.07794, over 4741.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3256, pruned_loss=0.08592, over 939660.30 frames. ], batch size: 14, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:21:00,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=58121.333333333336, ans=0.035 +2024-07-27 20:21:15,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=58148.0, ans=0.1 +2024-07-27 20:21:19,957 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.09 vs. limit=15.0 +2024-07-27 20:21:22,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=58161.333333333336, ans=0.1 +2024-07-27 20:21:25,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=58161.333333333336, ans=0.2 +2024-07-27 20:21:27,044 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 6.186e+01 6.835e+01 7.719e+01 1.191e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-27 20:21:27,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=58161.333333333336, ans=0.0 +2024-07-27 20:21:38,089 INFO [train.py:1114] (2/4) Epoch 5, batch 2750, loss[loss=0.2133, simple_loss=0.2944, pruned_loss=0.06615, over 4706.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3235, pruned_loss=0.08486, over 939730.04 frames. ], batch size: 12, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:21:40,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=58188.0, ans=0.2 +2024-07-27 20:21:42,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=58188.0, ans=0.0 +2024-07-27 20:21:55,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=58214.666666666664, ans=0.2 +2024-07-27 20:22:17,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=58241.333333333336, ans=0.07 +2024-07-27 20:22:19,863 INFO [train.py:1114] (2/4) Epoch 5, batch 2800, loss[loss=0.3947, simple_loss=0.4174, pruned_loss=0.186, over 3543.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3224, pruned_loss=0.08412, over 938215.50 frames. ], batch size: 35, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:22:32,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=58268.0, ans=0.0 +2024-07-27 20:22:33,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58281.333333333336, ans=0.1 +2024-07-27 20:22:35,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=58281.333333333336, ans=0.0 +2024-07-27 20:22:44,006 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 6.174e+01 6.624e+01 7.261e+01 1.719e+02, threshold=1.325e+02, percent-clipped=1.0 +2024-07-27 20:22:51,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=58308.0, ans=0.015 +2024-07-27 20:22:53,251 INFO [train.py:1114] (2/4) Epoch 5, batch 2850, loss[loss=0.2421, simple_loss=0.3142, pruned_loss=0.08496, over 4961.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3236, pruned_loss=0.08536, over 936447.58 frames. ], batch size: 13, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:22:53,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=58321.333333333336, ans=0.125 +2024-07-27 20:23:04,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58334.666666666664, ans=0.1 +2024-07-27 20:23:11,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58348.0, ans=0.1 +2024-07-27 20:23:15,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=58361.333333333336, ans=0.125 +2024-07-27 20:23:22,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=58374.666666666664, ans=0.125 +2024-07-27 20:23:26,092 INFO [train.py:1114] (2/4) Epoch 5, batch 2900, loss[loss=0.2409, simple_loss=0.3244, pruned_loss=0.07873, over 4824.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.3236, pruned_loss=0.08452, over 940054.80 frames. ], batch size: 13, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:23:26,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=58388.0, ans=0.125 +2024-07-27 20:23:30,354 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:23:31,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=58388.0, ans=0.07 +2024-07-27 20:23:34,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=58401.333333333336, ans=0.0 +2024-07-27 20:23:35,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=58401.333333333336, ans=0.2 +2024-07-27 20:23:41,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=58414.666666666664, ans=0.0 +2024-07-27 20:23:51,046 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.201e+01 6.205e+01 6.873e+01 7.885e+01 1.448e+02, threshold=1.375e+02, percent-clipped=1.0 +2024-07-27 20:24:01,548 INFO [train.py:1114] (2/4) Epoch 5, batch 2950, loss[loss=0.2152, simple_loss=0.2986, pruned_loss=0.06586, over 4708.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.322, pruned_loss=0.08436, over 938917.28 frames. ], batch size: 12, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:24:06,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=58454.666666666664, ans=0.2 +2024-07-27 20:24:11,950 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:24:19,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=58481.333333333336, ans=0.125 +2024-07-27 20:24:21,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=58494.666666666664, ans=0.125 +2024-07-27 20:24:28,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=58508.0, ans=0.125 +2024-07-27 20:24:30,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58508.0, ans=0.1 +2024-07-27 20:24:39,797 INFO [train.py:1114] (2/4) Epoch 5, batch 3000, loss[loss=0.2633, simple_loss=0.3407, pruned_loss=0.09297, over 4771.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3221, pruned_loss=0.08437, over 938555.83 frames. ], batch size: 13, lr: 1.50e-02, grad_scale: 32.0 +2024-07-27 20:24:39,797 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 20:25:01,871 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([6.1985, 5.7261, 5.9447, 5.7592], device='cuda:2') +2024-07-27 20:25:04,232 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.5139, 4.2553, 3.5585, 3.1232], device='cuda:2') +2024-07-27 20:25:04,263 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.4616, 3.1955, 3.5182, 4.1164], device='cuda:2') +2024-07-27 20:25:07,124 INFO [train.py:1146] (2/4) Epoch 5, validation: loss=0.2018, simple_loss=0.3051, pruned_loss=0.04931, over 944034.00 frames. +2024-07-27 20:25:07,223 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 20:25:10,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=58521.333333333336, ans=0.0 +2024-07-27 20:25:14,642 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.80 vs. limit=15.0 +2024-07-27 20:25:29,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58548.0, ans=0.1 +2024-07-27 20:25:32,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58561.333333333336, ans=0.1 +2024-07-27 20:25:35,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=58561.333333333336, ans=0.125 +2024-07-27 20:25:51,052 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.481e+01 6.112e+01 6.899e+01 7.724e+01 1.072e+02, threshold=1.380e+02, percent-clipped=0.0 +2024-07-27 20:25:57,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=58574.666666666664, ans=0.125 +2024-07-27 20:26:00,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=58574.666666666664, ans=0.2 +2024-07-27 20:26:01,527 INFO [train.py:1114] (2/4) Epoch 5, batch 3050, loss[loss=0.2382, simple_loss=0.3179, pruned_loss=0.07922, over 4635.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.323, pruned_loss=0.08501, over 937372.05 frames. ], batch size: 12, lr: 1.50e-02, grad_scale: 32.0 +2024-07-27 20:26:02,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=58588.0, ans=0.125 +2024-07-27 20:26:08,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=58601.333333333336, ans=0.125 +2024-07-27 20:26:38,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=58641.333333333336, ans=0.125 +2024-07-27 20:26:40,789 INFO [train.py:1114] (2/4) Epoch 5, batch 3100, loss[loss=0.2869, simple_loss=0.3691, pruned_loss=0.1023, over 4666.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3225, pruned_loss=0.08442, over 937924.55 frames. ], batch size: 16, lr: 1.50e-02, grad_scale: 16.0 +2024-07-27 20:26:49,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=58654.666666666664, ans=0.0 +2024-07-27 20:27:13,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=58668.0, ans=0.125 +2024-07-27 20:28:33,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=58681.333333333336, ans=0.95 +2024-07-27 20:28:43,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=58694.666666666664, ans=0.2 +2024-07-27 20:29:03,171 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.714e+01 6.229e+01 6.955e+01 7.996e+01 1.498e+02, threshold=1.391e+02, percent-clipped=1.0 +2024-07-27 20:29:36,763 INFO [train.py:1114] (2/4) Epoch 5, batch 3150, loss[loss=0.2632, simple_loss=0.35, pruned_loss=0.08813, over 4635.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3234, pruned_loss=0.08489, over 938055.53 frames. ], batch size: 17, lr: 1.50e-02, grad_scale: 16.0 +2024-07-27 20:29:52,314 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.98 vs. limit=15.0 +2024-07-27 20:29:53,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58748.0, ans=0.1 +2024-07-27 20:30:20,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=58788.0, ans=0.125 +2024-07-27 20:30:20,978 INFO [train.py:1114] (2/4) Epoch 5, batch 3200, loss[loss=0.2401, simple_loss=0.3148, pruned_loss=0.08271, over 4835.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3235, pruned_loss=0.08507, over 939774.50 frames. ], batch size: 13, lr: 1.50e-02, grad_scale: 32.0 +2024-07-27 20:30:22,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=58788.0, ans=0.0 +2024-07-27 20:30:25,087 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:30:25,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=58788.0, ans=0.2 +2024-07-27 20:30:56,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=58814.666666666664, ans=0.0 +2024-07-27 20:31:00,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=58814.666666666664, ans=0.125 +2024-07-27 20:31:09,394 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.060e+01 6.414e+01 7.232e+01 8.731e+01 1.300e+02, threshold=1.446e+02, percent-clipped=0.0 +2024-07-27 20:31:13,263 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.52 vs. limit=15.0 +2024-07-27 20:31:13,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=58841.333333333336, ans=0.0 +2024-07-27 20:31:17,446 INFO [train.py:1114] (2/4) Epoch 5, batch 3250, loss[loss=0.2367, simple_loss=0.3112, pruned_loss=0.08106, over 4930.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3243, pruned_loss=0.08547, over 940695.59 frames. ], batch size: 14, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:31:20,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=58854.666666666664, ans=0.2 +2024-07-27 20:31:22,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=58854.666666666664, ans=0.025 +2024-07-27 20:31:25,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58868.0, ans=0.1 +2024-07-27 20:31:28,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=58868.0, ans=0.125 +2024-07-27 20:31:31,757 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.28 vs. limit=10.0 +2024-07-27 20:31:46,974 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.98 vs. limit=6.0 +2024-07-27 20:31:53,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58894.666666666664, ans=0.1 +2024-07-27 20:31:59,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=58908.0, ans=0.125 +2024-07-27 20:32:06,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=58921.333333333336, ans=0.125 +2024-07-27 20:32:06,914 INFO [train.py:1114] (2/4) Epoch 5, batch 3300, loss[loss=0.2654, simple_loss=0.3299, pruned_loss=0.1005, over 4740.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.3229, pruned_loss=0.0855, over 940845.07 frames. ], batch size: 19, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:32:07,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=58921.333333333336, ans=0.025 +2024-07-27 20:32:45,660 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.62 vs. limit=15.0 +2024-07-27 20:32:46,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=58948.0, ans=0.0 +2024-07-27 20:32:46,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=58948.0, ans=0.125 +2024-07-27 20:32:54,494 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+01 6.806e+01 7.832e+01 9.289e+01 1.732e+02, threshold=1.566e+02, percent-clipped=1.0 +2024-07-27 20:33:05,646 INFO [train.py:1114] (2/4) Epoch 5, batch 3350, loss[loss=0.2733, simple_loss=0.3422, pruned_loss=0.1022, over 4599.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3245, pruned_loss=0.08722, over 938984.48 frames. ], batch size: 17, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:33:20,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=59014.666666666664, ans=0.2 +2024-07-27 20:33:33,901 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.48 vs. limit=15.0 +2024-07-27 20:33:39,078 INFO [train.py:1114] (2/4) Epoch 5, batch 3400, loss[loss=0.1866, simple_loss=0.2703, pruned_loss=0.05146, over 4813.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3245, pruned_loss=0.08697, over 937620.82 frames. ], batch size: 11, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:33:39,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=59054.666666666664, ans=0.1 +2024-07-27 20:33:40,140 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.05 vs. limit=15.0 +2024-07-27 20:33:51,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=59068.0, ans=0.0 +2024-07-27 20:33:51,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59068.0, ans=0.1 +2024-07-27 20:33:51,602 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.97 vs. limit=6.0 +2024-07-27 20:33:52,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=59081.333333333336, ans=0.0 +2024-07-27 20:33:57,649 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.24 vs. limit=12.0 +2024-07-27 20:34:01,731 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-07-27 20:34:04,612 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.766e+01 6.358e+01 7.066e+01 8.502e+01 1.252e+02, threshold=1.413e+02, percent-clipped=0.0 +2024-07-27 20:34:12,094 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:34:12,599 INFO [train.py:1114] (2/4) Epoch 5, batch 3450, loss[loss=0.2628, simple_loss=0.3441, pruned_loss=0.09071, over 4760.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.3248, pruned_loss=0.0867, over 937873.77 frames. ], batch size: 19, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:34:18,736 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.02 vs. limit=6.0 +2024-07-27 20:34:19,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=59134.666666666664, ans=0.125 +2024-07-27 20:34:20,882 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.91 vs. limit=6.0 +2024-07-27 20:34:25,306 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.59 vs. limit=15.0 +2024-07-27 20:34:27,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=59148.0, ans=0.2 +2024-07-27 20:34:30,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=59148.0, ans=0.0 +2024-07-27 20:34:38,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=59161.333333333336, ans=0.025 +2024-07-27 20:34:46,108 INFO [train.py:1114] (2/4) Epoch 5, batch 3500, loss[loss=0.2089, simple_loss=0.2991, pruned_loss=0.05935, over 4954.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.3233, pruned_loss=0.08567, over 938349.42 frames. ], batch size: 12, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:34:46,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=59188.0, ans=0.05 +2024-07-27 20:34:58,235 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.64 vs. limit=15.0 +2024-07-27 20:35:03,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=59214.666666666664, ans=0.1 +2024-07-27 20:35:07,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=59228.0, ans=0.025 +2024-07-27 20:35:07,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=59228.0, ans=0.125 +2024-07-27 20:35:13,407 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.758e+01 6.287e+01 6.647e+01 7.437e+01 1.544e+02, threshold=1.329e+02, percent-clipped=1.0 +2024-07-27 20:35:21,583 INFO [train.py:1114] (2/4) Epoch 5, batch 3550, loss[loss=0.2633, simple_loss=0.3402, pruned_loss=0.09327, over 4664.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3229, pruned_loss=0.08553, over 939148.62 frames. ], batch size: 14, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:35:23,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=59254.666666666664, ans=0.125 +2024-07-27 20:35:25,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=59254.666666666664, ans=0.125 +2024-07-27 20:35:33,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59268.0, ans=0.1 +2024-07-27 20:35:38,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=59281.333333333336, ans=0.0 +2024-07-27 20:35:44,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=59294.666666666664, ans=0.125 +2024-07-27 20:35:45,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=59294.666666666664, ans=0.125 +2024-07-27 20:35:45,644 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.51 vs. limit=22.5 +2024-07-27 20:35:54,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=59308.0, ans=0.125 +2024-07-27 20:35:55,985 INFO [train.py:1114] (2/4) Epoch 5, batch 3600, loss[loss=0.1987, simple_loss=0.2798, pruned_loss=0.05877, over 4961.00 frames. ], tot_loss[loss=0.2474, simple_loss=0.3236, pruned_loss=0.08558, over 940811.83 frames. ], batch size: 13, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:36:06,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=59334.666666666664, ans=0.125 +2024-07-27 20:36:19,873 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.75 vs. limit=15.0 +2024-07-27 20:36:26,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=59361.333333333336, ans=0.2 +2024-07-27 20:36:26,688 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.050e+01 6.226e+01 6.920e+01 7.848e+01 1.341e+02, threshold=1.384e+02, percent-clipped=1.0 +2024-07-27 20:36:30,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=59374.666666666664, ans=10.0 +2024-07-27 20:36:31,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=59374.666666666664, ans=0.125 +2024-07-27 20:36:34,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=59374.666666666664, ans=0.125 +2024-07-27 20:36:35,259 INFO [train.py:1114] (2/4) Epoch 5, batch 3650, loss[loss=0.2605, simple_loss=0.3479, pruned_loss=0.08661, over 4917.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3209, pruned_loss=0.08362, over 941009.06 frames. ], batch size: 15, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:36:51,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=59414.666666666664, ans=0.0 +2024-07-27 20:36:56,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=59428.0, ans=0.0 +2024-07-27 20:37:11,924 INFO [train.py:1114] (2/4) Epoch 5, batch 3700, loss[loss=0.2328, simple_loss=0.3149, pruned_loss=0.07532, over 4934.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3215, pruned_loss=0.08402, over 941848.98 frames. ], batch size: 14, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:37:31,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59494.666666666664, ans=0.1 +2024-07-27 20:37:37,467 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.201e+01 6.383e+01 7.266e+01 8.369e+01 1.200e+02, threshold=1.453e+02, percent-clipped=0.0 +2024-07-27 20:37:38,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59508.0, ans=0.1 +2024-07-27 20:37:45,344 INFO [train.py:1114] (2/4) Epoch 5, batch 3750, loss[loss=0.1995, simple_loss=0.2655, pruned_loss=0.06676, over 4808.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3222, pruned_loss=0.08423, over 943601.06 frames. ], batch size: 11, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:37:46,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=59521.333333333336, ans=0.2 +2024-07-27 20:37:51,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=59534.666666666664, ans=15.0 +2024-07-27 20:37:52,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=59534.666666666664, ans=0.2 +2024-07-27 20:37:57,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=59534.666666666664, ans=0.0 +2024-07-27 20:38:02,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=59548.0, ans=10.0 +2024-07-27 20:38:05,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=59561.333333333336, ans=0.025 +2024-07-27 20:38:10,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=59561.333333333336, ans=0.125 +2024-07-27 20:38:15,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=59574.666666666664, ans=0.0 +2024-07-27 20:38:18,244 INFO [train.py:1114] (2/4) Epoch 5, batch 3800, loss[loss=0.2265, simple_loss=0.3232, pruned_loss=0.06496, over 4808.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3221, pruned_loss=0.08415, over 941585.98 frames. ], batch size: 14, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:38:24,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=59601.333333333336, ans=0.0 +2024-07-27 20:38:44,251 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.840e+01 6.382e+01 7.291e+01 8.683e+01 1.605e+02, threshold=1.458e+02, percent-clipped=1.0 +2024-07-27 20:38:45,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=59641.333333333336, ans=0.125 +2024-07-27 20:38:51,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=59641.333333333336, ans=0.125 +2024-07-27 20:38:52,569 INFO [train.py:1114] (2/4) Epoch 5, batch 3850, loss[loss=0.2637, simple_loss=0.3476, pruned_loss=0.08994, over 4612.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.322, pruned_loss=0.08367, over 942291.34 frames. ], batch size: 16, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:39:03,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=59668.0, ans=0.025 +2024-07-27 20:39:07,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=59681.333333333336, ans=0.2 +2024-07-27 20:39:17,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=59694.666666666664, ans=0.125 +2024-07-27 20:39:21,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59708.0, ans=0.1 +2024-07-27 20:39:25,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=59721.333333333336, ans=0.125 +2024-07-27 20:39:26,362 INFO [train.py:1114] (2/4) Epoch 5, batch 3900, loss[loss=0.2684, simple_loss=0.3461, pruned_loss=0.0954, over 4813.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3229, pruned_loss=0.08436, over 942540.37 frames. ], batch size: 14, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:39:37,368 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=11.43 vs. limit=10.0 +2024-07-27 20:39:37,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=59734.666666666664, ans=0.125 +2024-07-27 20:39:38,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=59748.0, ans=0.025 +2024-07-27 20:39:40,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=59748.0, ans=0.125 +2024-07-27 20:39:41,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=59748.0, ans=0.2 +2024-07-27 20:39:42,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=59748.0, ans=0.2 +2024-07-27 20:39:48,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59761.333333333336, ans=0.1 +2024-07-27 20:39:51,230 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 6.368e+01 7.161e+01 8.539e+01 1.176e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 20:39:59,277 INFO [train.py:1114] (2/4) Epoch 5, batch 3950, loss[loss=0.3089, simple_loss=0.3869, pruned_loss=0.1155, over 4833.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3231, pruned_loss=0.08422, over 944397.92 frames. ], batch size: 16, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:40:17,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=59814.666666666664, ans=0.0 +2024-07-27 20:40:26,143 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.58 vs. limit=15.0 +2024-07-27 20:40:26,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=59841.333333333336, ans=0.05 +2024-07-27 20:40:33,154 INFO [train.py:1114] (2/4) Epoch 5, batch 4000, loss[loss=0.1999, simple_loss=0.2815, pruned_loss=0.05916, over 4786.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3237, pruned_loss=0.08476, over 940876.65 frames. ], batch size: 12, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:40:39,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59868.0, ans=0.1 +2024-07-27 20:40:47,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=59881.333333333336, ans=0.125 +2024-07-27 20:40:48,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=59881.333333333336, ans=0.1 +2024-07-27 20:40:50,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=59881.333333333336, ans=0.0 +2024-07-27 20:40:50,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=59881.333333333336, ans=0.0 +2024-07-27 20:40:51,011 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.32 vs. limit=15.0 +2024-07-27 20:40:57,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=59894.666666666664, ans=0.125 +2024-07-27 20:40:59,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=59894.666666666664, ans=0.0 +2024-07-27 20:41:01,637 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.290e+01 6.270e+01 7.255e+01 8.485e+01 1.075e+02, threshold=1.451e+02, percent-clipped=0.0 +2024-07-27 20:41:09,836 INFO [train.py:1114] (2/4) Epoch 5, batch 4050, loss[loss=0.3254, simple_loss=0.3751, pruned_loss=0.1378, over 3228.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3223, pruned_loss=0.08421, over 938945.02 frames. ], batch size: 36, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:41:11,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=59921.333333333336, ans=0.1 +2024-07-27 20:41:13,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=59921.333333333336, ans=0.0 +2024-07-27 20:41:26,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=59948.0, ans=0.125 +2024-07-27 20:41:28,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=59948.0, ans=0.07 +2024-07-27 20:41:30,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=59961.333333333336, ans=0.05 +2024-07-27 20:41:45,374 INFO [train.py:1114] (2/4) Epoch 5, batch 4100, loss[loss=0.233, simple_loss=0.3286, pruned_loss=0.06867, over 4891.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3236, pruned_loss=0.08492, over 938198.75 frames. ], batch size: 15, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:41:58,135 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.66 vs. limit=22.5 +2024-07-27 20:42:10,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60028.0, ans=0.1 +2024-07-27 20:42:12,620 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.156e+01 6.593e+01 8.156e+01 1.046e+02 1.897e+02, threshold=1.631e+02, percent-clipped=3.0 +2024-07-27 20:42:16,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=60041.333333333336, ans=0.125 +2024-07-27 20:42:18,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=60041.333333333336, ans=0.125 +2024-07-27 20:42:20,627 INFO [train.py:1114] (2/4) Epoch 5, batch 4150, loss[loss=0.2191, simple_loss=0.2959, pruned_loss=0.07115, over 4831.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3221, pruned_loss=0.08423, over 937744.50 frames. ], batch size: 13, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:42:20,911 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.57 vs. limit=15.0 +2024-07-27 20:42:21,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60054.666666666664, ans=0.1 +2024-07-27 20:42:21,599 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.18 vs. limit=15.0 +2024-07-27 20:42:22,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=60054.666666666664, ans=0.0 +2024-07-27 20:42:24,213 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-27 20:42:25,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=60054.666666666664, ans=0.2 +2024-07-27 20:42:42,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=60094.666666666664, ans=0.125 +2024-07-27 20:42:51,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=60108.0, ans=0.125 +2024-07-27 20:42:52,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=60108.0, ans=0.0 +2024-07-27 20:42:55,751 INFO [train.py:1114] (2/4) Epoch 5, batch 4200, loss[loss=0.2749, simple_loss=0.3526, pruned_loss=0.09865, over 4901.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3227, pruned_loss=0.08463, over 939511.44 frames. ], batch size: 15, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:42:56,261 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.34 vs. limit=15.0 +2024-07-27 20:42:57,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=60121.333333333336, ans=0.125 +2024-07-27 20:42:58,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=60121.333333333336, ans=15.0 +2024-07-27 20:43:01,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=60121.333333333336, ans=0.0 +2024-07-27 20:43:04,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=60134.666666666664, ans=0.125 +2024-07-27 20:43:14,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=60161.333333333336, ans=0.0 +2024-07-27 20:43:19,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=60161.333333333336, ans=0.0 +2024-07-27 20:43:20,328 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+01 6.148e+01 7.735e+01 9.943e+01 1.461e+02, threshold=1.547e+02, percent-clipped=0.0 +2024-07-27 20:43:28,468 INFO [train.py:1114] (2/4) Epoch 5, batch 4250, loss[loss=0.1903, simple_loss=0.2734, pruned_loss=0.05363, over 4641.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.323, pruned_loss=0.08471, over 941026.65 frames. ], batch size: 12, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:43:42,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=60214.666666666664, ans=0.125 +2024-07-27 20:43:43,641 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.98 vs. limit=15.0 +2024-07-27 20:43:51,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=60228.0, ans=0.2 +2024-07-27 20:44:01,221 INFO [train.py:1114] (2/4) Epoch 5, batch 4300, loss[loss=0.2483, simple_loss=0.3214, pruned_loss=0.08758, over 4757.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3248, pruned_loss=0.08568, over 940250.87 frames. ], batch size: 13, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:44:10,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=60268.0, ans=0.2 +2024-07-27 20:44:12,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=60268.0, ans=0.05 +2024-07-27 20:44:18,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.87 vs. limit=15.0 +2024-07-27 20:44:26,307 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.022e+01 6.170e+01 6.762e+01 7.364e+01 1.372e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-27 20:44:29,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60308.0, ans=0.1 +2024-07-27 20:44:34,437 INFO [train.py:1114] (2/4) Epoch 5, batch 4350, loss[loss=0.296, simple_loss=0.3739, pruned_loss=0.1091, over 4750.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3254, pruned_loss=0.08587, over 941113.20 frames. ], batch size: 13, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:44:38,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=60321.333333333336, ans=0.0 +2024-07-27 20:44:44,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=60334.666666666664, ans=0.0 +2024-07-27 20:44:44,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60334.666666666664, ans=0.1 +2024-07-27 20:44:49,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=60348.0, ans=0.025 +2024-07-27 20:44:50,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=60348.0, ans=0.95 +2024-07-27 20:44:52,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=60348.0, ans=0.0 +2024-07-27 20:44:52,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=60348.0, ans=0.2 +2024-07-27 20:44:56,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=60361.333333333336, ans=0.2 +2024-07-27 20:45:01,102 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.79 vs. limit=15.0 +2024-07-27 20:45:04,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=60374.666666666664, ans=0.025 +2024-07-27 20:45:08,016 INFO [train.py:1114] (2/4) Epoch 5, batch 4400, loss[loss=0.2485, simple_loss=0.3389, pruned_loss=0.07908, over 4805.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3248, pruned_loss=0.08529, over 941263.97 frames. ], batch size: 14, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:45:12,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=60388.0, ans=0.125 +2024-07-27 20:45:16,794 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.68 vs. limit=12.0 +2024-07-27 20:45:24,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=60414.666666666664, ans=0.125 +2024-07-27 20:45:27,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=60428.0, ans=0.125 +2024-07-27 20:45:30,418 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.70 vs. limit=15.0 +2024-07-27 20:45:31,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=60428.0, ans=0.0 +2024-07-27 20:45:33,193 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.88 vs. limit=10.0 +2024-07-27 20:45:33,397 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.176e+01 6.353e+01 6.947e+01 8.100e+01 1.220e+02, threshold=1.389e+02, percent-clipped=0.0 +2024-07-27 20:45:34,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=60441.333333333336, ans=0.125 +2024-07-27 20:45:35,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=60441.333333333336, ans=0.0 +2024-07-27 20:45:41,665 INFO [train.py:1114] (2/4) Epoch 5, batch 4450, loss[loss=0.2103, simple_loss=0.2828, pruned_loss=0.06884, over 4943.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.325, pruned_loss=0.08597, over 939478.94 frames. ], batch size: 12, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:46:00,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.72 vs. limit=12.0 +2024-07-27 20:46:02,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=60494.666666666664, ans=0.125 +2024-07-27 20:46:06,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=60494.666666666664, ans=0.0 +2024-07-27 20:46:07,615 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.02 vs. limit=15.0 +2024-07-27 20:46:13,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=60508.0, ans=0.0 +2024-07-27 20:46:13,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60508.0, ans=0.1 +2024-07-27 20:46:14,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=60508.0, ans=0.125 +2024-07-27 20:46:16,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=60521.333333333336, ans=0.05 +2024-07-27 20:46:16,939 INFO [train.py:1114] (2/4) Epoch 5, batch 4500, loss[loss=0.2488, simple_loss=0.3285, pruned_loss=0.08456, over 4744.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3254, pruned_loss=0.08596, over 938294.12 frames. ], batch size: 14, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:46:26,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60534.666666666664, ans=0.1 +2024-07-27 20:46:28,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=60534.666666666664, ans=0.125 +2024-07-27 20:46:35,142 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.40 vs. limit=5.0 +2024-07-27 20:46:37,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=60561.333333333336, ans=0.0 +2024-07-27 20:46:38,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=60561.333333333336, ans=0.025 +2024-07-27 20:46:38,405 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.58 vs. limit=15.0 +2024-07-27 20:46:41,935 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.270e+01 6.144e+01 7.215e+01 8.358e+01 1.180e+02, threshold=1.443e+02, percent-clipped=0.0 +2024-07-27 20:46:48,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60574.666666666664, ans=0.1 +2024-07-27 20:46:49,976 INFO [train.py:1114] (2/4) Epoch 5, batch 4550, loss[loss=0.2166, simple_loss=0.2899, pruned_loss=0.07166, over 4898.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.3252, pruned_loss=0.08586, over 940341.58 frames. ], batch size: 13, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:46:59,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=60601.333333333336, ans=6.0 +2024-07-27 20:47:07,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=60614.666666666664, ans=0.0 +2024-07-27 20:47:21,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=60641.333333333336, ans=0.0 +2024-07-27 20:47:25,721 INFO [train.py:1114] (2/4) Epoch 5, batch 4600, loss[loss=0.2421, simple_loss=0.3195, pruned_loss=0.08235, over 4481.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3235, pruned_loss=0.08507, over 938545.12 frames. ], batch size: 21, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:47:31,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=60654.666666666664, ans=0.0 +2024-07-27 20:47:35,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=60668.0, ans=0.125 +2024-07-27 20:47:50,505 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.48 vs. limit=15.0 +2024-07-27 20:47:53,335 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 6.426e+01 7.546e+01 8.603e+01 1.273e+02, threshold=1.509e+02, percent-clipped=0.0 +2024-07-27 20:47:55,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=60708.0, ans=0.125 +2024-07-27 20:48:02,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=60721.333333333336, ans=0.125 +2024-07-27 20:48:03,051 INFO [train.py:1114] (2/4) Epoch 5, batch 4650, loss[loss=0.267, simple_loss=0.3518, pruned_loss=0.09105, over 4848.00 frames. ], tot_loss[loss=0.247, simple_loss=0.324, pruned_loss=0.08504, over 940415.35 frames. ], batch size: 16, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:48:23,745 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=25.02 vs. limit=22.5 +2024-07-27 20:48:24,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=60761.333333333336, ans=0.125 +2024-07-27 20:48:34,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=60774.666666666664, ans=0.0 +2024-07-27 20:48:36,513 INFO [train.py:1114] (2/4) Epoch 5, batch 4700, loss[loss=0.1832, simple_loss=0.2605, pruned_loss=0.05295, over 4712.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3232, pruned_loss=0.08488, over 937585.64 frames. ], batch size: 11, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:48:41,518 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.15 vs. limit=22.5 +2024-07-27 20:48:48,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=60801.333333333336, ans=0.0 +2024-07-27 20:48:54,205 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.55 vs. limit=10.0 +2024-07-27 20:48:59,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=60828.0, ans=0.0 +2024-07-27 20:49:00,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=60828.0, ans=0.125 +2024-07-27 20:49:02,025 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.640e+01 6.344e+01 7.380e+01 9.406e+01 1.591e+02, threshold=1.476e+02, percent-clipped=1.0 +2024-07-27 20:49:02,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=60841.333333333336, ans=0.125 +2024-07-27 20:49:07,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=60841.333333333336, ans=0.0 +2024-07-27 20:49:10,659 INFO [train.py:1114] (2/4) Epoch 5, batch 4750, loss[loss=0.2742, simple_loss=0.3441, pruned_loss=0.1021, over 4481.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3226, pruned_loss=0.08449, over 935415.75 frames. ], batch size: 21, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:49:10,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=60854.666666666664, ans=0.125 +2024-07-27 20:49:19,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=60868.0, ans=0.125 +2024-07-27 20:49:29,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=60881.333333333336, ans=0.125 +2024-07-27 20:49:38,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=60908.0, ans=0.125 +2024-07-27 20:49:43,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=60908.0, ans=0.125 +2024-07-27 20:49:45,213 INFO [train.py:1114] (2/4) Epoch 5, batch 4800, loss[loss=0.2737, simple_loss=0.3547, pruned_loss=0.09634, over 4691.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3228, pruned_loss=0.08511, over 932986.32 frames. ], batch size: 13, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:49:46,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=60921.333333333336, ans=0.0 +2024-07-27 20:49:56,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=60934.666666666664, ans=0.025 +2024-07-27 20:49:58,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=60948.0, ans=0.125 +2024-07-27 20:50:10,639 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.566e+01 6.152e+01 6.705e+01 7.633e+01 9.767e+01, threshold=1.341e+02, percent-clipped=0.0 +2024-07-27 20:50:18,673 INFO [train.py:1114] (2/4) Epoch 5, batch 4850, loss[loss=0.2188, simple_loss=0.2995, pruned_loss=0.06902, over 4743.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3222, pruned_loss=0.08439, over 932541.17 frames. ], batch size: 14, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:50:20,438 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.79 vs. limit=22.5 +2024-07-27 20:50:51,570 INFO [train.py:1114] (2/4) Epoch 5, batch 4900, loss[loss=0.245, simple_loss=0.3189, pruned_loss=0.08559, over 4760.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3216, pruned_loss=0.08372, over 934455.21 frames. ], batch size: 13, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:50:55,632 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.96 vs. limit=15.0 +2024-07-27 20:50:57,109 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.74 vs. limit=15.0 +2024-07-27 20:50:58,506 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.72 vs. limit=15.0 +2024-07-27 20:51:01,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=61068.0, ans=0.125 +2024-07-27 20:51:02,461 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.97 vs. limit=6.0 +2024-07-27 20:51:03,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=61068.0, ans=0.125 +2024-07-27 20:51:06,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=61081.333333333336, ans=0.1 +2024-07-27 20:51:14,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=61094.666666666664, ans=0.125 +2024-07-27 20:51:20,164 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.134e+01 6.112e+01 6.910e+01 8.321e+01 1.535e+02, threshold=1.382e+02, percent-clipped=5.0 +2024-07-27 20:51:35,818 INFO [train.py:1114] (2/4) Epoch 5, batch 4950, loss[loss=0.3168, simple_loss=0.3662, pruned_loss=0.1337, over 3230.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3244, pruned_loss=0.08553, over 931412.37 frames. ], batch size: 35, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:52:06,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61161.333333333336, ans=0.1 +2024-07-27 20:52:07,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=61161.333333333336, ans=0.2 +2024-07-27 20:52:15,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=61174.666666666664, ans=0.0 +2024-07-27 20:52:19,436 INFO [train.py:1114] (2/4) Epoch 5, batch 5000, loss[loss=0.2847, simple_loss=0.3671, pruned_loss=0.1011, over 4659.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.3246, pruned_loss=0.08548, over 935389.79 frames. ], batch size: 14, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:52:24,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=61188.0, ans=0.125 +2024-07-27 20:52:26,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=61201.333333333336, ans=0.125 +2024-07-27 20:52:33,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=61214.666666666664, ans=0.1 +2024-07-27 20:52:39,308 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.74 vs. limit=15.0 +2024-07-27 20:52:48,111 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.055e+01 6.404e+01 7.517e+01 8.761e+01 1.608e+02, threshold=1.503e+02, percent-clipped=2.0 +2024-07-27 20:52:48,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=61241.333333333336, ans=0.125 +2024-07-27 20:52:51,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=61241.333333333336, ans=0.0 +2024-07-27 20:53:05,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=61241.333333333336, ans=10.0 +2024-07-27 20:53:06,844 INFO [train.py:1114] (2/4) Epoch 5, batch 5050, loss[loss=0.2032, simple_loss=0.2777, pruned_loss=0.06441, over 4853.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3223, pruned_loss=0.08412, over 937904.11 frames. ], batch size: 12, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:53:12,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=61254.666666666664, ans=0.125 +2024-07-27 20:53:18,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=61268.0, ans=0.025 +2024-07-27 20:53:26,928 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.71 vs. limit=15.0 +2024-07-27 20:53:38,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=61294.666666666664, ans=0.125 +2024-07-27 20:53:47,701 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.72 vs. limit=22.5 +2024-07-27 20:53:53,181 INFO [train.py:1114] (2/4) Epoch 5, batch 5100, loss[loss=0.2294, simple_loss=0.3057, pruned_loss=0.07658, over 4762.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3233, pruned_loss=0.08445, over 934635.57 frames. ], batch size: 12, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:54:00,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61334.666666666664, ans=0.1 +2024-07-27 20:54:17,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=61361.333333333336, ans=0.125 +2024-07-27 20:54:20,590 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.842e+01 6.137e+01 6.842e+01 8.040e+01 3.164e+02, threshold=1.368e+02, percent-clipped=1.0 +2024-07-27 20:54:24,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=61374.666666666664, ans=0.0 +2024-07-27 20:54:29,750 INFO [train.py:1114] (2/4) Epoch 5, batch 5150, loss[loss=0.2492, simple_loss=0.3278, pruned_loss=0.08528, over 4850.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3237, pruned_loss=0.08489, over 935550.22 frames. ], batch size: 16, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:54:30,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=61388.0, ans=0.2 +2024-07-27 20:54:31,995 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.12 vs. limit=15.0 +2024-07-27 20:54:47,532 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.85 vs. limit=15.0 +2024-07-27 20:54:47,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=61401.333333333336, ans=0.125 +2024-07-27 20:54:50,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=61414.666666666664, ans=0.07 +2024-07-27 20:55:08,516 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.18 vs. limit=6.0 +2024-07-27 20:55:10,128 INFO [train.py:1114] (2/4) Epoch 5, batch 5200, loss[loss=0.224, simple_loss=0.3148, pruned_loss=0.06656, over 4667.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3228, pruned_loss=0.08446, over 936093.31 frames. ], batch size: 14, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:55:17,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=61454.666666666664, ans=0.0 +2024-07-27 20:55:30,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=61481.333333333336, ans=0.2 +2024-07-27 20:55:37,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61494.666666666664, ans=0.1 +2024-07-27 20:55:42,005 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.018e+01 6.430e+01 7.385e+01 8.844e+01 1.293e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 20:55:44,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=61508.0, ans=0.125 +2024-07-27 20:55:50,190 INFO [train.py:1114] (2/4) Epoch 5, batch 5250, loss[loss=0.2345, simple_loss=0.3204, pruned_loss=0.07427, over 4901.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3215, pruned_loss=0.08368, over 936051.35 frames. ], batch size: 13, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:55:56,466 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:55:57,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=61534.666666666664, ans=0.125 +2024-07-27 20:56:04,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=61548.0, ans=0.125 +2024-07-27 20:56:09,894 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.12 vs. limit=12.0 +2024-07-27 20:56:10,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=61561.333333333336, ans=0.0 +2024-07-27 20:56:24,400 INFO [train.py:1114] (2/4) Epoch 5, batch 5300, loss[loss=0.2665, simple_loss=0.3363, pruned_loss=0.09833, over 4621.00 frames. ], tot_loss[loss=0.245, simple_loss=0.3219, pruned_loss=0.08402, over 934268.16 frames. ], batch size: 16, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:56:36,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=61601.333333333336, ans=0.2 +2024-07-27 20:56:43,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=61614.666666666664, ans=0.1 +2024-07-27 20:56:44,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=61614.666666666664, ans=0.05 +2024-07-27 20:56:54,052 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.117e+01 6.200e+01 6.732e+01 7.536e+01 1.097e+02, threshold=1.346e+02, percent-clipped=0.0 +2024-07-27 20:57:00,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=61641.333333333336, ans=0.125 +2024-07-27 20:57:02,172 INFO [train.py:1114] (2/4) Epoch 5, batch 5350, loss[loss=0.2242, simple_loss=0.2847, pruned_loss=0.08182, over 4554.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3216, pruned_loss=0.0843, over 936202.44 frames. ], batch size: 10, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:57:19,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=61681.333333333336, ans=0.125 +2024-07-27 20:57:32,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61694.666666666664, ans=0.1 +2024-07-27 20:57:41,152 INFO [train.py:1114] (2/4) Epoch 5, batch 5400, loss[loss=0.2847, simple_loss=0.3465, pruned_loss=0.1114, over 4184.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3228, pruned_loss=0.08507, over 930629.11 frames. ], batch size: 25, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:57:41,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=61721.333333333336, ans=0.2 +2024-07-27 20:57:53,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=61734.666666666664, ans=0.0 +2024-07-27 20:58:04,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=61761.333333333336, ans=0.125 +2024-07-27 20:58:08,952 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.945e+01 6.299e+01 6.991e+01 7.974e+01 1.272e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 20:58:11,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=61774.666666666664, ans=10.0 +2024-07-27 20:58:11,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=61774.666666666664, ans=0.025 +2024-07-27 20:58:22,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=61788.0, ans=0.125 +2024-07-27 20:58:22,867 INFO [train.py:1114] (2/4) Epoch 5, batch 5450, loss[loss=0.2061, simple_loss=0.2827, pruned_loss=0.06475, over 4701.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3216, pruned_loss=0.08414, over 933323.36 frames. ], batch size: 11, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:58:26,233 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.61 vs. limit=15.0 +2024-07-27 20:58:29,443 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.19 vs. limit=6.0 +2024-07-27 20:58:30,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=61801.333333333336, ans=0.125 +2024-07-27 20:58:35,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=61801.333333333336, ans=0.0 +2024-07-27 20:58:41,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=61814.666666666664, ans=0.125 +2024-07-27 20:58:41,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=61814.666666666664, ans=0.125 +2024-07-27 20:58:48,864 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.03 vs. limit=22.5 +2024-07-27 20:58:54,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=61841.333333333336, ans=0.125 +2024-07-27 20:58:58,519 INFO [train.py:1114] (2/4) Epoch 5, batch 5500, loss[loss=0.2297, simple_loss=0.3083, pruned_loss=0.0756, over 4264.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3203, pruned_loss=0.0839, over 930911.30 frames. ], batch size: 25, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:59:00,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=61854.666666666664, ans=0.0 +2024-07-27 20:59:16,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=61881.333333333336, ans=0.0 +2024-07-27 20:59:18,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=61881.333333333336, ans=0.125 +2024-07-27 20:59:23,585 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:59:24,625 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.189e+01 6.179e+01 6.952e+01 7.770e+01 1.227e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-27 20:59:26,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=61908.0, ans=0.0 +2024-07-27 20:59:28,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=61908.0, ans=0.0 +2024-07-27 20:59:30,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61908.0, ans=0.1 +2024-07-27 20:59:30,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=61908.0, ans=0.025 +2024-07-27 20:59:32,515 INFO [train.py:1114] (2/4) Epoch 5, batch 5550, loss[loss=0.2145, simple_loss=0.2959, pruned_loss=0.06651, over 4702.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3209, pruned_loss=0.08475, over 933121.32 frames. ], batch size: 12, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:59:38,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61921.333333333336, ans=0.1 +2024-07-27 20:59:46,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=61934.666666666664, ans=0.025 +2024-07-27 20:59:52,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=61948.0, ans=0.0 +2024-07-27 20:59:54,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=61961.333333333336, ans=0.0 +2024-07-27 20:59:58,960 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.73 vs. limit=22.5 +2024-07-27 20:59:59,558 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.92 vs. limit=22.5 +2024-07-27 21:00:00,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=61961.333333333336, ans=0.125 +2024-07-27 21:00:07,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=61974.666666666664, ans=0.0 +2024-07-27 21:00:08,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=61974.666666666664, ans=0.125 +2024-07-27 21:00:09,932 INFO [train.py:1114] (2/4) Epoch 5, batch 5600, loss[loss=0.2844, simple_loss=0.345, pruned_loss=0.1119, over 4744.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3224, pruned_loss=0.08581, over 934088.94 frames. ], batch size: 14, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 21:00:18,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=61988.0, ans=0.125 +2024-07-27 21:00:30,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=62014.666666666664, ans=0.125 +2024-07-27 21:00:33,331 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.10 vs. limit=15.0 +2024-07-27 21:00:51,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=62028.0, ans=0.125 +2024-07-27 21:00:53,094 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.144e+01 6.040e+01 6.647e+01 7.605e+01 1.041e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-27 21:01:01,029 INFO [train.py:1114] (2/4) Epoch 5, batch 5650, loss[loss=0.2632, simple_loss=0.3276, pruned_loss=0.09937, over 4418.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3215, pruned_loss=0.08533, over 936313.76 frames. ], batch size: 21, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 21:01:04,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=62054.666666666664, ans=0.025 +2024-07-27 21:01:06,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=62054.666666666664, ans=0.125 +2024-07-27 21:01:10,439 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.70 vs. limit=15.0 +2024-07-27 21:01:21,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=62068.0, ans=10.0 +2024-07-27 21:01:23,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=62081.333333333336, ans=0.0 +2024-07-27 21:01:31,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=62094.666666666664, ans=0.2 +2024-07-27 21:01:33,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=62094.666666666664, ans=0.125 +2024-07-27 21:01:37,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62108.0, ans=0.1 +2024-07-27 21:01:42,157 INFO [train.py:1114] (2/4) Epoch 5, batch 5700, loss[loss=0.3069, simple_loss=0.3819, pruned_loss=0.1159, over 4701.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3227, pruned_loss=0.0852, over 937654.67 frames. ], batch size: 13, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 21:01:51,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=62134.666666666664, ans=0.125 +2024-07-27 21:01:53,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=62134.666666666664, ans=0.0 +2024-07-27 21:01:57,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=62148.0, ans=0.2 +2024-07-27 21:02:07,560 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.270e+01 6.365e+01 6.985e+01 7.849e+01 1.267e+02, threshold=1.397e+02, percent-clipped=0.0 +2024-07-27 21:02:14,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=62174.666666666664, ans=0.0 +2024-07-27 21:02:20,504 INFO [train.py:1114] (2/4) Epoch 5, batch 5750, loss[loss=0.2642, simple_loss=0.3433, pruned_loss=0.09253, over 4767.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3225, pruned_loss=0.08484, over 938195.47 frames. ], batch size: 19, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:02:25,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=62188.0, ans=0.0 +2024-07-27 21:02:45,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=62214.666666666664, ans=0.125 +2024-07-27 21:02:58,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=62228.0, ans=0.0 +2024-07-27 21:03:09,029 INFO [train.py:1114] (2/4) Epoch 5, batch 5800, loss[loss=0.2329, simple_loss=0.3112, pruned_loss=0.0773, over 4703.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3233, pruned_loss=0.08489, over 937578.55 frames. ], batch size: 19, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:03:09,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=62254.666666666664, ans=0.0 +2024-07-27 21:03:19,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=62268.0, ans=0.125 +2024-07-27 21:03:38,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=62281.333333333336, ans=0.0 +2024-07-27 21:03:40,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=62281.333333333336, ans=0.125 +2024-07-27 21:03:46,569 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.301e+01 6.477e+01 7.083e+01 8.928e+01 1.486e+02, threshold=1.417e+02, percent-clipped=3.0 +2024-07-27 21:03:47,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=62308.0, ans=0.125 +2024-07-27 21:03:47,634 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.49 vs. limit=10.0 +2024-07-27 21:03:57,629 INFO [train.py:1114] (2/4) Epoch 5, batch 5850, loss[loss=0.3015, simple_loss=0.3732, pruned_loss=0.1149, over 4442.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3232, pruned_loss=0.08521, over 938277.76 frames. ], batch size: 21, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:04:08,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=62321.333333333336, ans=0.0 +2024-07-27 21:04:57,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=62374.666666666664, ans=0.125 +2024-07-27 21:05:03,543 INFO [train.py:1114] (2/4) Epoch 5, batch 5900, loss[loss=0.2704, simple_loss=0.3451, pruned_loss=0.0978, over 4693.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3235, pruned_loss=0.08538, over 938141.85 frames. ], batch size: 15, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:05:09,600 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.82 vs. limit=5.0 +2024-07-27 21:05:12,882 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.80 vs. limit=22.5 +2024-07-27 21:05:17,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=62414.666666666664, ans=0.035 +2024-07-27 21:05:22,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=62414.666666666664, ans=0.0 +2024-07-27 21:05:23,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=62428.0, ans=0.95 +2024-07-27 21:05:26,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=62428.0, ans=0.125 +2024-07-27 21:05:29,068 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.135e+01 6.365e+01 7.384e+01 8.628e+01 1.400e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 21:05:37,067 INFO [train.py:1114] (2/4) Epoch 5, batch 5950, loss[loss=0.2421, simple_loss=0.3286, pruned_loss=0.07779, over 4677.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3227, pruned_loss=0.08484, over 939890.01 frames. ], batch size: 15, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:05:37,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=62454.666666666664, ans=0.125 +2024-07-27 21:05:38,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=62454.666666666664, ans=0.0 +2024-07-27 21:05:41,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=62454.666666666664, ans=0.125 +2024-07-27 21:05:42,292 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-07-27 21:05:43,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=62468.0, ans=0.95 +2024-07-27 21:05:44,636 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:06:00,766 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.31 vs. limit=10.0 +2024-07-27 21:06:06,796 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.45 vs. limit=6.0 +2024-07-27 21:06:13,056 INFO [train.py:1114] (2/4) Epoch 5, batch 6000, loss[loss=0.3007, simple_loss=0.3721, pruned_loss=0.1146, over 4153.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3223, pruned_loss=0.08462, over 936632.44 frames. ], batch size: 25, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:06:13,056 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 21:07:25,584 INFO [train.py:1146] (2/4) Epoch 5, validation: loss=0.1984, simple_loss=0.3025, pruned_loss=0.04714, over 944034.00 frames. +2024-07-27 21:07:25,585 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 21:07:31,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62521.333333333336, ans=0.1 +2024-07-27 21:07:36,765 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.54 vs. limit=15.0 +2024-07-27 21:07:41,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=62534.666666666664, ans=0.0 +2024-07-27 21:07:49,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=62561.333333333336, ans=0.04949747468305833 +2024-07-27 21:07:55,029 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.867e+01 6.382e+01 7.254e+01 8.565e+01 1.652e+02, threshold=1.451e+02, percent-clipped=1.0 +2024-07-27 21:08:02,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=62588.0, ans=0.035 +2024-07-27 21:08:02,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=62588.0, ans=0.125 +2024-07-27 21:08:03,191 INFO [train.py:1114] (2/4) Epoch 5, batch 6050, loss[loss=0.2486, simple_loss=0.3165, pruned_loss=0.09042, over 4781.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.322, pruned_loss=0.08442, over 937945.61 frames. ], batch size: 12, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:08:14,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=62601.333333333336, ans=0.125 +2024-07-27 21:08:16,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=62601.333333333336, ans=0.1 +2024-07-27 21:08:38,502 INFO [train.py:1114] (2/4) Epoch 5, batch 6100, loss[loss=0.3263, simple_loss=0.3986, pruned_loss=0.127, over 4671.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.3206, pruned_loss=0.08335, over 937528.79 frames. ], batch size: 15, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:08:41,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=62654.666666666664, ans=0.0 +2024-07-27 21:08:50,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=62668.0, ans=0.1 +2024-07-27 21:08:51,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=62681.333333333336, ans=0.1 +2024-07-27 21:09:02,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=62694.666666666664, ans=0.125 +2024-07-27 21:09:04,970 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.680e+01 6.274e+01 6.891e+01 8.796e+01 1.456e+02, threshold=1.378e+02, percent-clipped=1.0 +2024-07-27 21:09:12,206 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=15.59 vs. limit=15.0 +2024-07-27 21:09:13,080 INFO [train.py:1114] (2/4) Epoch 5, batch 6150, loss[loss=0.3043, simple_loss=0.3551, pruned_loss=0.1268, over 3325.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3214, pruned_loss=0.08381, over 936361.73 frames. ], batch size: 35, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:09:33,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=62734.666666666664, ans=0.0 +2024-07-27 21:09:42,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62748.0, ans=0.1 +2024-07-27 21:09:44,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62761.333333333336, ans=0.1 +2024-07-27 21:09:51,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=62774.666666666664, ans=0.1 +2024-07-27 21:09:58,614 INFO [train.py:1114] (2/4) Epoch 5, batch 6200, loss[loss=0.2463, simple_loss=0.321, pruned_loss=0.08584, over 4739.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3211, pruned_loss=0.08383, over 936628.08 frames. ], batch size: 14, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:10:00,601 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-27 21:10:01,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=62788.0, ans=0.125 +2024-07-27 21:10:07,762 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.12 vs. limit=12.0 +2024-07-27 21:10:11,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=62801.333333333336, ans=0.0 +2024-07-27 21:10:13,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=62814.666666666664, ans=0.0 +2024-07-27 21:10:14,286 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:10:25,272 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.230e+01 6.498e+01 7.693e+01 9.750e+01 1.653e+02, threshold=1.539e+02, percent-clipped=3.0 +2024-07-27 21:10:33,749 INFO [train.py:1114] (2/4) Epoch 5, batch 6250, loss[loss=0.2334, simple_loss=0.3192, pruned_loss=0.07383, over 4806.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3217, pruned_loss=0.08348, over 933228.70 frames. ], batch size: 14, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:10:37,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=62854.666666666664, ans=0.125 +2024-07-27 21:10:44,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=62868.0, ans=0.2 +2024-07-27 21:10:54,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=62881.333333333336, ans=0.0 +2024-07-27 21:10:55,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=62881.333333333336, ans=0.0 +2024-07-27 21:11:02,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62894.666666666664, ans=0.1 +2024-07-27 21:11:04,539 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:11:08,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=62908.0, ans=0.125 +2024-07-27 21:11:12,044 INFO [train.py:1114] (2/4) Epoch 5, batch 6300, loss[loss=0.1998, simple_loss=0.281, pruned_loss=0.05924, over 4535.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3224, pruned_loss=0.08436, over 930126.92 frames. ], batch size: 10, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:11:37,094 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.292e+01 6.261e+01 7.099e+01 7.903e+01 1.165e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 21:11:37,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=62961.333333333336, ans=0.5 +2024-07-27 21:11:44,987 INFO [train.py:1114] (2/4) Epoch 5, batch 6350, loss[loss=0.2518, simple_loss=0.327, pruned_loss=0.08835, over 4509.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3209, pruned_loss=0.08353, over 934246.75 frames. ], batch size: 21, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:11:45,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=62988.0, ans=0.07 +2024-07-27 21:11:54,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63001.333333333336, ans=0.1 +2024-07-27 21:11:54,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=63001.333333333336, ans=0.025 +2024-07-27 21:12:11,511 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.96 vs. limit=22.5 +2024-07-27 21:12:20,304 INFO [train.py:1114] (2/4) Epoch 5, batch 6400, loss[loss=0.3175, simple_loss=0.3883, pruned_loss=0.1233, over 4634.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3218, pruned_loss=0.08457, over 935453.79 frames. ], batch size: 13, lr: 1.45e-02, grad_scale: 32.0 +2024-07-27 21:12:20,689 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.63 vs. limit=15.0 +2024-07-27 21:12:24,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=63054.666666666664, ans=0.125 +2024-07-27 21:12:36,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=63081.333333333336, ans=0.07 +2024-07-27 21:12:40,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=63094.666666666664, ans=0.125 +2024-07-27 21:12:41,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=63094.666666666664, ans=0.125 +2024-07-27 21:12:47,086 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.30 vs. limit=10.0 +2024-07-27 21:12:47,236 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.147e+01 6.719e+01 7.795e+01 8.869e+01 1.661e+02, threshold=1.559e+02, percent-clipped=1.0 +2024-07-27 21:12:48,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=63108.0, ans=0.0 +2024-07-27 21:12:53,140 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.58 vs. limit=15.0 +2024-07-27 21:12:53,976 INFO [train.py:1114] (2/4) Epoch 5, batch 6450, loss[loss=0.2775, simple_loss=0.3517, pruned_loss=0.1017, over 4503.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3231, pruned_loss=0.08448, over 938901.48 frames. ], batch size: 21, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:12:56,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=63121.333333333336, ans=0.0 +2024-07-27 21:12:58,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=63121.333333333336, ans=0.125 +2024-07-27 21:13:22,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=63161.333333333336, ans=0.05 +2024-07-27 21:13:24,586 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.56 vs. limit=15.0 +2024-07-27 21:13:27,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=63174.666666666664, ans=0.125 +2024-07-27 21:13:30,349 INFO [train.py:1114] (2/4) Epoch 5, batch 6500, loss[loss=0.3663, simple_loss=0.4125, pruned_loss=0.16, over 3253.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.3213, pruned_loss=0.0831, over 939933.90 frames. ], batch size: 36, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:13:33,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63188.0, ans=0.1 +2024-07-27 21:13:40,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=63201.333333333336, ans=0.0 +2024-07-27 21:13:41,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=63201.333333333336, ans=0.125 +2024-07-27 21:13:43,663 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.52 vs. limit=15.0 +2024-07-27 21:13:44,809 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.87 vs. limit=10.0 +2024-07-27 21:13:46,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63214.666666666664, ans=0.1 +2024-07-27 21:13:50,725 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.36 vs. limit=10.0 +2024-07-27 21:13:56,999 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.806e+01 6.095e+01 6.739e+01 7.533e+01 1.080e+02, threshold=1.348e+02, percent-clipped=0.0 +2024-07-27 21:13:57,965 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.48 vs. limit=12.0 +2024-07-27 21:13:58,707 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.61 vs. limit=15.0 +2024-07-27 21:14:03,805 INFO [train.py:1114] (2/4) Epoch 5, batch 6550, loss[loss=0.1936, simple_loss=0.2679, pruned_loss=0.05969, over 4821.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.32, pruned_loss=0.08249, over 943201.21 frames. ], batch size: 11, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:14:05,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=63254.666666666664, ans=0.125 +2024-07-27 21:14:10,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=63268.0, ans=0.2 +2024-07-27 21:14:11,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=63268.0, ans=0.025 +2024-07-27 21:14:12,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63268.0, ans=0.1 +2024-07-27 21:14:15,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=63268.0, ans=0.0 +2024-07-27 21:14:16,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=63281.333333333336, ans=0.0 +2024-07-27 21:14:18,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=63281.333333333336, ans=0.05 +2024-07-27 21:14:20,621 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.00 vs. limit=15.0 +2024-07-27 21:14:32,887 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.72 vs. limit=15.0 +2024-07-27 21:14:36,523 INFO [train.py:1114] (2/4) Epoch 5, batch 6600, loss[loss=0.2216, simple_loss=0.3031, pruned_loss=0.07003, over 4931.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3199, pruned_loss=0.08268, over 945103.85 frames. ], batch size: 14, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:14:36,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=63321.333333333336, ans=0.2 +2024-07-27 21:14:39,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=63321.333333333336, ans=0.125 +2024-07-27 21:14:46,897 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.37 vs. limit=15.0 +2024-07-27 21:14:56,227 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.08 vs. limit=15.0 +2024-07-27 21:15:00,153 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:15:03,302 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.320e+01 6.369e+01 7.181e+01 8.583e+01 1.412e+02, threshold=1.436e+02, percent-clipped=2.0 +2024-07-27 21:15:06,464 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.66 vs. limit=22.5 +2024-07-27 21:15:09,889 INFO [train.py:1114] (2/4) Epoch 5, batch 6650, loss[loss=0.2379, simple_loss=0.3228, pruned_loss=0.07648, over 4626.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.3203, pruned_loss=0.08304, over 943771.50 frames. ], batch size: 17, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:15:10,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=63388.0, ans=0.0 +2024-07-27 21:15:11,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=63388.0, ans=0.0 +2024-07-27 21:15:16,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63401.333333333336, ans=0.1 +2024-07-27 21:15:38,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=63441.333333333336, ans=0.0 +2024-07-27 21:15:39,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=63441.333333333336, ans=0.2 +2024-07-27 21:15:41,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=63441.333333333336, ans=0.0 +2024-07-27 21:15:43,856 INFO [train.py:1114] (2/4) Epoch 5, batch 6700, loss[loss=0.2378, simple_loss=0.3096, pruned_loss=0.08298, over 4707.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3209, pruned_loss=0.08345, over 942546.72 frames. ], batch size: 19, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:15:54,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=63468.0, ans=0.0 +2024-07-27 21:15:59,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=63481.333333333336, ans=0.0 +2024-07-27 21:16:11,141 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.250e+01 6.326e+01 7.074e+01 8.168e+01 1.305e+02, threshold=1.415e+02, percent-clipped=0.0 +2024-07-27 21:16:19,142 INFO [train.py:1114] (2/4) Epoch 5, batch 6750, loss[loss=0.2764, simple_loss=0.3489, pruned_loss=0.102, over 4054.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3217, pruned_loss=0.0837, over 940737.73 frames. ], batch size: 25, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:16:24,147 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:16:38,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=63548.0, ans=0.125 +2024-07-27 21:16:44,465 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.06 vs. limit=12.0 +2024-07-27 21:16:51,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=27.02 vs. limit=22.5 +2024-07-27 21:16:54,433 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.47 vs. limit=10.0 +2024-07-27 21:16:54,729 INFO [train.py:1114] (2/4) Epoch 5, batch 6800, loss[loss=0.1865, simple_loss=0.2662, pruned_loss=0.0534, over 4632.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3228, pruned_loss=0.08423, over 938747.66 frames. ], batch size: 13, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:16:56,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=63588.0, ans=0.125 +2024-07-27 21:16:56,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63588.0, ans=0.1 +2024-07-27 21:17:03,700 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.16 vs. limit=22.5 +2024-07-27 21:17:06,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=63601.333333333336, ans=0.1 +2024-07-27 21:17:16,575 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.40 vs. limit=5.0 +2024-07-27 21:17:21,272 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.069e+01 6.010e+01 6.782e+01 8.396e+01 1.269e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 21:17:22,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=63641.333333333336, ans=0.125 +2024-07-27 21:17:28,021 INFO [train.py:1114] (2/4) Epoch 5, batch 6850, loss[loss=0.2339, simple_loss=0.3254, pruned_loss=0.07122, over 4695.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3227, pruned_loss=0.08392, over 940622.57 frames. ], batch size: 13, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:17:32,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63654.666666666664, ans=0.1 +2024-07-27 21:17:33,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=63654.666666666664, ans=0.0 +2024-07-27 21:17:36,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=63668.0, ans=0.0 +2024-07-27 21:17:43,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=63681.333333333336, ans=0.125 +2024-07-27 21:18:00,638 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.26 vs. limit=10.0 +2024-07-27 21:18:02,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.45 vs. limit=22.5 +2024-07-27 21:18:02,846 INFO [train.py:1114] (2/4) Epoch 5, batch 6900, loss[loss=0.2528, simple_loss=0.3297, pruned_loss=0.08791, over 4967.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3217, pruned_loss=0.08331, over 942985.78 frames. ], batch size: 13, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:18:04,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=63721.333333333336, ans=0.1 +2024-07-27 21:18:09,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=63734.666666666664, ans=0.04949747468305833 +2024-07-27 21:18:09,601 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.93 vs. limit=6.0 +2024-07-27 21:18:17,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=63748.0, ans=0.2 +2024-07-27 21:18:17,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=63748.0, ans=0.125 +2024-07-27 21:18:28,313 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.18 vs. limit=15.0 +2024-07-27 21:18:30,461 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.148e+01 6.563e+01 7.062e+01 8.255e+01 1.155e+02, threshold=1.412e+02, percent-clipped=0.0 +2024-07-27 21:18:35,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=63774.666666666664, ans=0.125 +2024-07-27 21:18:36,462 INFO [train.py:1114] (2/4) Epoch 5, batch 6950, loss[loss=0.1952, simple_loss=0.2661, pruned_loss=0.06213, over 4544.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3214, pruned_loss=0.08338, over 939646.23 frames. ], batch size: 10, lr: 1.44e-02, grad_scale: 16.0 +2024-07-27 21:18:39,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=63788.0, ans=0.125 +2024-07-27 21:18:53,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=63814.666666666664, ans=0.125 +2024-07-27 21:18:59,417 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=26.26 vs. limit=22.5 +2024-07-27 21:19:08,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=63841.333333333336, ans=0.125 +2024-07-27 21:19:09,411 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.43 vs. limit=22.5 +2024-07-27 21:19:13,711 INFO [train.py:1114] (2/4) Epoch 5, batch 7000, loss[loss=0.2631, simple_loss=0.3393, pruned_loss=0.09349, over 4866.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3205, pruned_loss=0.08233, over 938276.91 frames. ], batch size: 18, lr: 1.44e-02, grad_scale: 16.0 +2024-07-27 21:19:20,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=63868.0, ans=10.0 +2024-07-27 21:19:32,912 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.56 vs. limit=6.0 +2024-07-27 21:19:40,349 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:19:40,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=63908.0, ans=22.5 +2024-07-27 21:19:40,757 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.209e+01 6.125e+01 6.883e+01 8.000e+01 1.166e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-27 21:19:46,902 INFO [train.py:1114] (2/4) Epoch 5, batch 7050, loss[loss=0.2241, simple_loss=0.3082, pruned_loss=0.07004, over 4681.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.3206, pruned_loss=0.08246, over 941446.37 frames. ], batch size: 19, lr: 1.44e-02, grad_scale: 16.0 +2024-07-27 21:19:50,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=63921.333333333336, ans=0.125 +2024-07-27 21:19:59,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63934.666666666664, ans=0.1 +2024-07-27 21:20:00,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=63948.0, ans=0.1 +2024-07-27 21:20:02,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=63948.0, ans=15.0 +2024-07-27 21:20:06,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=63961.333333333336, ans=0.025 +2024-07-27 21:20:13,074 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.59 vs. limit=22.5 +2024-07-27 21:20:16,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=63974.666666666664, ans=0.125 +2024-07-27 21:20:20,292 INFO [train.py:1114] (2/4) Epoch 5, batch 7100, loss[loss=0.2478, simple_loss=0.3384, pruned_loss=0.07859, over 4803.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3216, pruned_loss=0.08328, over 936197.67 frames. ], batch size: 15, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:20:22,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=63988.0, ans=0.2 +2024-07-27 21:20:37,709 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:20:38,630 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.81 vs. limit=12.0 +2024-07-27 21:20:40,729 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.80 vs. limit=15.0 +2024-07-27 21:20:45,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=64028.0, ans=0.125 +2024-07-27 21:20:52,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=64041.333333333336, ans=0.125 +2024-07-27 21:20:53,653 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.086e+01 6.110e+01 6.595e+01 7.846e+01 1.344e+02, threshold=1.319e+02, percent-clipped=0.0 +2024-07-27 21:20:56,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=64041.333333333336, ans=0.125 +2024-07-27 21:20:57,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=64041.333333333336, ans=0.125 +2024-07-27 21:20:59,481 INFO [train.py:1114] (2/4) Epoch 5, batch 7150, loss[loss=0.2869, simple_loss=0.3588, pruned_loss=0.1075, over 4458.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3198, pruned_loss=0.08258, over 936965.14 frames. ], batch size: 21, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:21:01,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=64054.666666666664, ans=0.1 +2024-07-27 21:21:10,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=64068.0, ans=0.0 +2024-07-27 21:21:15,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=64081.333333333336, ans=0.07 +2024-07-27 21:21:26,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=64108.0, ans=0.025 +2024-07-27 21:21:32,471 INFO [train.py:1114] (2/4) Epoch 5, batch 7200, loss[loss=0.2765, simple_loss=0.3522, pruned_loss=0.1004, over 4813.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3219, pruned_loss=0.08391, over 937521.41 frames. ], batch size: 15, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:21:33,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=64121.333333333336, ans=0.125 +2024-07-27 21:21:37,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=64121.333333333336, ans=0.2 +2024-07-27 21:21:40,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=64134.666666666664, ans=0.125 +2024-07-27 21:21:44,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=64148.0, ans=0.125 +2024-07-27 21:21:59,181 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.508e+01 6.414e+01 7.135e+01 8.390e+01 1.273e+02, threshold=1.427e+02, percent-clipped=0.0 +2024-07-27 21:22:04,960 INFO [train.py:1114] (2/4) Epoch 5, batch 7250, loss[loss=0.2378, simple_loss=0.2999, pruned_loss=0.08783, over 4863.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3214, pruned_loss=0.08398, over 939103.29 frames. ], batch size: 12, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:22:05,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=64188.0, ans=0.125 +2024-07-27 21:22:08,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=64188.0, ans=0.0 +2024-07-27 21:22:27,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=64214.666666666664, ans=0.1 +2024-07-27 21:22:41,586 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.99 vs. limit=15.0 +2024-07-27 21:22:42,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=64241.333333333336, ans=0.0 +2024-07-27 21:22:44,620 INFO [train.py:1114] (2/4) Epoch 5, batch 7300, loss[loss=0.2016, simple_loss=0.2746, pruned_loss=0.0643, over 4855.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3207, pruned_loss=0.08354, over 939659.18 frames. ], batch size: 12, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:22:46,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=64254.666666666664, ans=0.2 +2024-07-27 21:22:48,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=64254.666666666664, ans=0.125 +2024-07-27 21:22:50,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=64254.666666666664, ans=0.125 +2024-07-27 21:22:52,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=64268.0, ans=0.125 +2024-07-27 21:22:58,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=64281.333333333336, ans=0.2 +2024-07-27 21:23:06,188 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.30 vs. limit=15.0 +2024-07-27 21:23:09,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.whiten.whitening_limit, batch_count=64294.666666666664, ans=12.0 +2024-07-27 21:23:14,912 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.221e+01 6.188e+01 7.170e+01 8.494e+01 1.437e+02, threshold=1.434e+02, percent-clipped=1.0 +2024-07-27 21:23:19,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=64308.0, ans=0.2 +2024-07-27 21:23:20,888 INFO [train.py:1114] (2/4) Epoch 5, batch 7350, loss[loss=0.2212, simple_loss=0.2996, pruned_loss=0.07147, over 4648.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3203, pruned_loss=0.08292, over 938623.97 frames. ], batch size: 12, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:23:21,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=64321.333333333336, ans=0.125 +2024-07-27 21:23:27,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=64334.666666666664, ans=0.125 +2024-07-27 21:23:29,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=64334.666666666664, ans=0.2 +2024-07-27 21:23:33,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=64348.0, ans=0.05 +2024-07-27 21:23:33,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64348.0, ans=0.1 +2024-07-27 21:23:35,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=64348.0, ans=0.5 +2024-07-27 21:23:37,824 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.76 vs. limit=15.0 +2024-07-27 21:23:38,334 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.82 vs. limit=22.5 +2024-07-27 21:23:46,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=64374.666666666664, ans=0.125 +2024-07-27 21:23:47,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=64374.666666666664, ans=0.0 +2024-07-27 21:23:56,171 INFO [train.py:1114] (2/4) Epoch 5, batch 7400, loss[loss=0.2161, simple_loss=0.297, pruned_loss=0.06753, over 4688.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.3205, pruned_loss=0.08256, over 939972.11 frames. ], batch size: 13, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:24:12,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=64414.666666666664, ans=0.0 +2024-07-27 21:24:19,111 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.30 vs. limit=12.0 +2024-07-27 21:24:23,431 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.841e+01 6.697e+01 7.956e+01 9.233e+01 1.549e+02, threshold=1.591e+02, percent-clipped=1.0 +2024-07-27 21:24:29,330 INFO [train.py:1114] (2/4) Epoch 5, batch 7450, loss[loss=0.2078, simple_loss=0.2952, pruned_loss=0.06024, over 4599.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.3192, pruned_loss=0.08235, over 937275.77 frames. ], batch size: 11, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:24:31,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=64454.666666666664, ans=0.125 +2024-07-27 21:24:38,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=64468.0, ans=0.025 +2024-07-27 21:24:47,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64481.333333333336, ans=0.1 +2024-07-27 21:25:03,918 INFO [train.py:1114] (2/4) Epoch 5, batch 7500, loss[loss=0.3181, simple_loss=0.3756, pruned_loss=0.1303, over 3327.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.3206, pruned_loss=0.08317, over 935724.22 frames. ], batch size: 35, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:25:16,933 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.08 vs. limit=15.0 +2024-07-27 21:25:21,784 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.10 vs. limit=15.0 +2024-07-27 21:25:29,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=64561.333333333336, ans=0.04949747468305833 +2024-07-27 21:25:33,069 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.613e+01 6.039e+01 6.623e+01 7.552e+01 1.223e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-27 21:25:42,052 INFO [train.py:1114] (2/4) Epoch 5, batch 7550, loss[loss=0.2791, simple_loss=0.3616, pruned_loss=0.09826, over 4608.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3218, pruned_loss=0.08384, over 935436.80 frames. ], batch size: 17, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:25:49,026 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.66 vs. limit=15.0 +2024-07-27 21:25:57,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=64601.333333333336, ans=0.025 +2024-07-27 21:26:10,851 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.55 vs. limit=22.5 +2024-07-27 21:26:23,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=64641.333333333336, ans=0.2 +2024-07-27 21:26:24,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=64641.333333333336, ans=0.125 +2024-07-27 21:26:25,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=64641.333333333336, ans=0.025 +2024-07-27 21:26:27,403 INFO [train.py:1114] (2/4) Epoch 5, batch 7600, loss[loss=0.2389, simple_loss=0.3188, pruned_loss=0.07951, over 4810.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.3204, pruned_loss=0.08329, over 937547.53 frames. ], batch size: 14, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:26:28,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=64654.666666666664, ans=0.2 +2024-07-27 21:26:29,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64654.666666666664, ans=0.1 +2024-07-27 21:26:33,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=64654.666666666664, ans=0.1 +2024-07-27 21:26:37,859 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.79 vs. limit=22.5 +2024-07-27 21:26:39,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=64668.0, ans=0.125 +2024-07-27 21:26:40,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=64668.0, ans=0.0 +2024-07-27 21:26:52,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=64681.333333333336, ans=0.5 +2024-07-27 21:26:58,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64694.666666666664, ans=0.1 +2024-07-27 21:27:02,469 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.190e+01 6.111e+01 6.673e+01 8.200e+01 1.239e+02, threshold=1.335e+02, percent-clipped=0.0 +2024-07-27 21:27:07,758 INFO [train.py:1114] (2/4) Epoch 5, batch 7650, loss[loss=0.2124, simple_loss=0.2839, pruned_loss=0.0704, over 4942.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3206, pruned_loss=0.08312, over 936601.90 frames. ], batch size: 12, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:27:11,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=64721.333333333336, ans=0.0 +2024-07-27 21:27:17,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=64734.666666666664, ans=0.2 +2024-07-27 21:27:25,509 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-07-27 21:27:28,278 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.63 vs. limit=15.0 +2024-07-27 21:27:39,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=64774.666666666664, ans=0.125 +2024-07-27 21:27:43,727 INFO [train.py:1114] (2/4) Epoch 5, batch 7700, loss[loss=0.2871, simple_loss=0.3702, pruned_loss=0.1021, over 4692.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3217, pruned_loss=0.08332, over 934148.27 frames. ], batch size: 13, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:27:54,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=64788.0, ans=0.125 +2024-07-27 21:27:54,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=64788.0, ans=0.2 +2024-07-27 21:27:57,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=64801.333333333336, ans=0.0 +2024-07-27 21:28:05,542 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=21.59 vs. limit=15.0 +2024-07-27 21:28:06,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=64814.666666666664, ans=0.2 +2024-07-27 21:28:10,211 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.87 vs. limit=10.0 +2024-07-27 21:28:12,117 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.87 vs. limit=15.0 +2024-07-27 21:28:14,258 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.64 vs. limit=15.0 +2024-07-27 21:28:16,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64828.0, ans=0.1 +2024-07-27 21:28:17,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=64828.0, ans=0.125 +2024-07-27 21:28:17,326 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.31 vs. limit=15.0 +2024-07-27 21:28:19,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=64841.333333333336, ans=0.125 +2024-07-27 21:28:20,011 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.750e+01 6.292e+01 7.097e+01 8.458e+01 1.099e+02, threshold=1.419e+02, percent-clipped=0.0 +2024-07-27 21:28:21,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=64841.333333333336, ans=0.125 +2024-07-27 21:28:25,231 INFO [train.py:1114] (2/4) Epoch 5, batch 7750, loss[loss=0.2464, simple_loss=0.3298, pruned_loss=0.08149, over 4926.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3231, pruned_loss=0.08388, over 935681.84 frames. ], batch size: 14, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:28:37,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=64868.0, ans=10.0 +2024-07-27 21:28:50,100 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.56 vs. limit=22.5 +2024-07-27 21:28:53,549 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.09 vs. limit=15.0 +2024-07-27 21:28:55,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=64908.0, ans=0.125 +2024-07-27 21:28:55,802 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:29:02,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=64921.333333333336, ans=0.025 +2024-07-27 21:29:02,993 INFO [train.py:1114] (2/4) Epoch 5, batch 7800, loss[loss=0.2338, simple_loss=0.3183, pruned_loss=0.07465, over 4654.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3237, pruned_loss=0.08385, over 937311.25 frames. ], batch size: 14, lr: 1.42e-02, grad_scale: 16.0 +2024-07-27 21:29:21,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=64948.0, ans=0.125 +2024-07-27 21:29:23,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=64961.333333333336, ans=0.125 +2024-07-27 21:29:25,257 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.84 vs. limit=15.0 +2024-07-27 21:29:27,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=64961.333333333336, ans=0.0 +2024-07-27 21:29:30,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=64974.666666666664, ans=0.0 +2024-07-27 21:29:30,981 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.317e+01 6.312e+01 7.129e+01 8.364e+01 1.154e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 21:29:36,490 INFO [train.py:1114] (2/4) Epoch 5, batch 7850, loss[loss=0.219, simple_loss=0.2799, pruned_loss=0.079, over 4488.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3236, pruned_loss=0.08376, over 936089.09 frames. ], batch size: 10, lr: 1.42e-02, grad_scale: 16.0 +2024-07-27 21:29:36,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=64988.0, ans=0.125 +2024-07-27 21:29:49,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=65001.333333333336, ans=0.0 +2024-07-27 21:30:10,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=65028.0, ans=0.125 +2024-07-27 21:30:22,619 INFO [train.py:1114] (2/4) Epoch 5, batch 7900, loss[loss=0.2577, simple_loss=0.3424, pruned_loss=0.0865, over 4880.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3246, pruned_loss=0.08444, over 932766.14 frames. ], batch size: 14, lr: 1.42e-02, grad_scale: 16.0 +2024-07-27 21:30:24,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=65054.666666666664, ans=0.125 +2024-07-27 21:30:33,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=65068.0, ans=0.5 +2024-07-27 21:30:40,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=65068.0, ans=0.125 +2024-07-27 21:30:44,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=65081.333333333336, ans=0.125 +2024-07-27 21:30:55,206 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.330e+01 6.363e+01 7.228e+01 8.012e+01 1.089e+02, threshold=1.446e+02, percent-clipped=0.0 +2024-07-27 21:31:03,071 INFO [train.py:1114] (2/4) Epoch 5, batch 7950, loss[loss=0.3214, simple_loss=0.3771, pruned_loss=0.1329, over 3428.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3242, pruned_loss=0.08432, over 935016.22 frames. ], batch size: 35, lr: 1.42e-02, grad_scale: 16.0 +2024-07-27 21:31:03,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=65121.333333333336, ans=0.0 +2024-07-27 21:31:16,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=65148.0, ans=0.125 +2024-07-27 21:31:26,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=65161.333333333336, ans=0.0 +2024-07-27 21:31:37,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=6.0 +2024-07-27 21:31:38,002 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=15.27 vs. limit=15.0 +2024-07-27 21:31:43,111 INFO [train.py:1114] (2/4) Epoch 5, batch 8000, loss[loss=0.2102, simple_loss=0.2849, pruned_loss=0.06776, over 4621.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3233, pruned_loss=0.08391, over 934617.27 frames. ], batch size: 11, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:31:43,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=65188.0, ans=0.125 +2024-07-27 21:31:45,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=65188.0, ans=0.125 +2024-07-27 21:31:53,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=65188.0, ans=0.05 +2024-07-27 21:32:00,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=65201.333333333336, ans=0.0 +2024-07-27 21:32:12,557 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:32:15,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=65241.333333333336, ans=0.1 +2024-07-27 21:32:17,006 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.802e+01 6.447e+01 7.209e+01 8.816e+01 1.330e+02, threshold=1.442e+02, percent-clipped=0.0 +2024-07-27 21:32:22,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=65254.666666666664, ans=0.2 +2024-07-27 21:32:23,351 INFO [train.py:1114] (2/4) Epoch 5, batch 8050, loss[loss=0.2408, simple_loss=0.3323, pruned_loss=0.07465, over 4809.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3232, pruned_loss=0.0838, over 934711.42 frames. ], batch size: 14, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:32:23,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=65254.666666666664, ans=0.125 +2024-07-27 21:32:26,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=65254.666666666664, ans=0.025 +2024-07-27 21:32:26,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=65254.666666666664, ans=0.0 +2024-07-27 21:32:29,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=65268.0, ans=0.125 +2024-07-27 21:32:32,513 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.04 vs. limit=15.0 +2024-07-27 21:32:43,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=65281.333333333336, ans=0.2 +2024-07-27 21:32:49,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=65294.666666666664, ans=0.0 +2024-07-27 21:33:00,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=65308.0, ans=0.05 +2024-07-27 21:33:02,322 INFO [train.py:1114] (2/4) Epoch 5, batch 8100, loss[loss=0.275, simple_loss=0.3562, pruned_loss=0.09686, over 4803.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3236, pruned_loss=0.08394, over 934034.29 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:33:13,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=65334.666666666664, ans=0.0 +2024-07-27 21:33:21,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=65361.333333333336, ans=0.95 +2024-07-27 21:33:23,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=65361.333333333336, ans=0.125 +2024-07-27 21:33:30,389 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.006e+01 6.276e+01 6.776e+01 7.896e+01 1.142e+02, threshold=1.355e+02, percent-clipped=0.0 +2024-07-27 21:33:33,489 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.65 vs. limit=12.0 +2024-07-27 21:33:35,662 INFO [train.py:1114] (2/4) Epoch 5, batch 8150, loss[loss=0.2263, simple_loss=0.3034, pruned_loss=0.07462, over 4791.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3221, pruned_loss=0.0835, over 937930.37 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:33:37,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=65388.0, ans=0.05 +2024-07-27 21:33:52,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=65414.666666666664, ans=0.125 +2024-07-27 21:33:58,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=65428.0, ans=0.125 +2024-07-27 21:33:58,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=65428.0, ans=0.0 +2024-07-27 21:34:03,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=65428.0, ans=0.125 +2024-07-27 21:34:03,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=65428.0, ans=0.0 +2024-07-27 21:34:11,746 INFO [train.py:1114] (2/4) Epoch 5, batch 8200, loss[loss=0.2425, simple_loss=0.3157, pruned_loss=0.0847, over 4809.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3218, pruned_loss=0.0831, over 938840.48 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:34:12,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=65454.666666666664, ans=0.07 +2024-07-27 21:34:13,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=65454.666666666664, ans=0.025 +2024-07-27 21:34:34,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=65468.0, ans=0.0 +2024-07-27 21:34:38,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=65468.0, ans=0.125 +2024-07-27 21:34:45,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=65481.333333333336, ans=0.2 +2024-07-27 21:34:51,701 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.06 vs. limit=15.0 +2024-07-27 21:34:56,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=65494.666666666664, ans=0.1 +2024-07-27 21:35:00,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=65508.0, ans=0.04949747468305833 +2024-07-27 21:35:00,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=65508.0, ans=0.2 +2024-07-27 21:35:01,187 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.077e+01 6.058e+01 6.820e+01 7.758e+01 1.671e+02, threshold=1.364e+02, percent-clipped=1.0 +2024-07-27 21:35:06,489 INFO [train.py:1114] (2/4) Epoch 5, batch 8250, loss[loss=0.2216, simple_loss=0.3106, pruned_loss=0.06625, over 4896.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3226, pruned_loss=0.08345, over 938737.61 frames. ], batch size: 13, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:35:09,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=65521.333333333336, ans=0.2 +2024-07-27 21:35:15,362 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.64 vs. limit=10.0 +2024-07-27 21:35:26,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=65548.0, ans=0.2 +2024-07-27 21:35:27,102 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.71 vs. limit=15.0 +2024-07-27 21:35:28,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=65548.0, ans=0.07 +2024-07-27 21:35:33,317 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.41 vs. limit=15.0 +2024-07-27 21:35:58,864 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.18 vs. limit=15.0 +2024-07-27 21:36:00,904 INFO [train.py:1114] (2/4) Epoch 5, batch 8300, loss[loss=0.2819, simple_loss=0.3555, pruned_loss=0.1041, over 4904.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3248, pruned_loss=0.08486, over 938706.03 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:36:05,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=65588.0, ans=0.125 +2024-07-27 21:36:07,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=65588.0, ans=0.125 +2024-07-27 21:36:09,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=65601.33333333333, ans=0.125 +2024-07-27 21:36:12,147 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.00 vs. limit=15.0 +2024-07-27 21:36:14,091 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-27 21:36:32,863 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.068e+01 6.223e+01 6.833e+01 7.614e+01 1.184e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-27 21:36:44,523 INFO [train.py:1114] (2/4) Epoch 5, batch 8350, loss[loss=0.2667, simple_loss=0.3475, pruned_loss=0.09298, over 4808.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3223, pruned_loss=0.08365, over 941643.08 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:36:47,530 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.89 vs. limit=15.0 +2024-07-27 21:36:49,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=65654.66666666667, ans=0.0 +2024-07-27 21:36:55,753 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.17 vs. limit=12.0 +2024-07-27 21:36:56,487 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.55 vs. limit=15.0 +2024-07-27 21:36:57,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=65668.0, ans=0.125 +2024-07-27 21:36:59,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=65681.33333333333, ans=0.2 +2024-07-27 21:37:00,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=65681.33333333333, ans=0.07 +2024-07-27 21:37:06,567 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-27 21:37:13,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=65708.0, ans=0.125 +2024-07-27 21:37:21,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=65708.0, ans=0.0 +2024-07-27 21:37:24,208 INFO [train.py:1114] (2/4) Epoch 5, batch 8400, loss[loss=0.2303, simple_loss=0.2993, pruned_loss=0.08063, over 4782.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3219, pruned_loss=0.08323, over 940250.39 frames. ], batch size: 12, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:37:24,690 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.64 vs. limit=15.0 +2024-07-27 21:37:25,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=65721.33333333333, ans=0.025 +2024-07-27 21:37:38,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=65748.0, ans=0.025 +2024-07-27 21:37:42,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=65748.0, ans=0.125 +2024-07-27 21:37:43,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=65748.0, ans=0.02 +2024-07-27 21:37:49,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=65761.33333333333, ans=0.0 +2024-07-27 21:37:57,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=65761.33333333333, ans=0.125 +2024-07-27 21:38:02,270 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.300e+01 6.390e+01 7.817e+01 9.094e+01 1.508e+02, threshold=1.563e+02, percent-clipped=1.0 +2024-07-27 21:38:07,416 INFO [train.py:1114] (2/4) Epoch 5, batch 8450, loss[loss=0.2629, simple_loss=0.3414, pruned_loss=0.09217, over 4807.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3222, pruned_loss=0.08346, over 939334.10 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:38:17,020 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.52 vs. limit=10.0 +2024-07-27 21:38:28,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=65814.66666666667, ans=0.125 +2024-07-27 21:38:43,068 INFO [train.py:1114] (2/4) Epoch 5, batch 8500, loss[loss=0.2185, simple_loss=0.297, pruned_loss=0.06997, over 4627.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3218, pruned_loss=0.0835, over 939009.43 frames. ], batch size: 11, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:38:43,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=65854.66666666667, ans=0.125 +2024-07-27 21:38:44,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=65854.66666666667, ans=0.0 +2024-07-27 21:38:48,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=65854.66666666667, ans=0.2 +2024-07-27 21:39:14,109 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.478e+01 6.095e+01 6.601e+01 7.527e+01 1.077e+02, threshold=1.320e+02, percent-clipped=0.0 +2024-07-27 21:39:15,677 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.88 vs. limit=6.0 +2024-07-27 21:39:19,324 INFO [train.py:1114] (2/4) Epoch 5, batch 8550, loss[loss=0.197, simple_loss=0.2816, pruned_loss=0.05623, over 4795.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3206, pruned_loss=0.08306, over 939968.43 frames. ], batch size: 11, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:39:22,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=65921.33333333333, ans=15.0 +2024-07-27 21:39:43,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.22 vs. limit=22.5 +2024-07-27 21:39:47,505 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:39:53,159 INFO [train.py:1114] (2/4) Epoch 5, batch 8600, loss[loss=0.2535, simple_loss=0.3255, pruned_loss=0.09076, over 4793.00 frames. ], tot_loss[loss=0.243, simple_loss=0.32, pruned_loss=0.08301, over 939539.24 frames. ], batch size: 15, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:39:56,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=65988.0, ans=0.0 +2024-07-27 21:40:01,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=66001.33333333333, ans=0.0 +2024-07-27 21:40:03,876 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.63 vs. limit=6.0 +2024-07-27 21:40:10,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=66014.66666666667, ans=0.2 +2024-07-27 21:40:18,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=66041.33333333333, ans=0.025 +2024-07-27 21:40:19,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=66041.33333333333, ans=0.04949747468305833 +2024-07-27 21:40:19,966 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.202e+01 6.597e+01 7.756e+01 9.469e+01 1.243e+02, threshold=1.551e+02, percent-clipped=0.0 +2024-07-27 21:40:22,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=66041.33333333333, ans=0.125 +2024-07-27 21:40:25,122 INFO [train.py:1114] (2/4) Epoch 5, batch 8650, loss[loss=0.232, simple_loss=0.3184, pruned_loss=0.0728, over 4893.00 frames. ], tot_loss[loss=0.2427, simple_loss=0.32, pruned_loss=0.08274, over 940787.37 frames. ], batch size: 15, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:40:25,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=66054.66666666667, ans=0.025 +2024-07-27 21:40:42,648 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.60 vs. limit=6.0 +2024-07-27 21:40:50,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=66108.0, ans=0.0 +2024-07-27 21:40:52,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=66108.0, ans=0.125 +2024-07-27 21:40:58,366 INFO [train.py:1114] (2/4) Epoch 5, batch 8700, loss[loss=0.2644, simple_loss=0.3421, pruned_loss=0.0933, over 4756.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3212, pruned_loss=0.08376, over 938627.05 frames. ], batch size: 13, lr: 1.41e-02, grad_scale: 16.0 +2024-07-27 21:40:58,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=66121.33333333333, ans=0.05 +2024-07-27 21:40:58,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=66121.33333333333, ans=0.07 +2024-07-27 21:41:00,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=66121.33333333333, ans=0.125 +2024-07-27 21:41:05,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=66134.66666666667, ans=0.125 +2024-07-27 21:41:11,966 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:41:14,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=66148.0, ans=0.125 +2024-07-27 21:41:17,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=66148.0, ans=0.125 +2024-07-27 21:41:27,644 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.897e+01 6.098e+01 6.655e+01 7.583e+01 1.149e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-27 21:41:30,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=66174.66666666667, ans=0.125 +2024-07-27 21:41:31,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=66174.66666666667, ans=0.07 +2024-07-27 21:41:32,142 INFO [train.py:1114] (2/4) Epoch 5, batch 8750, loss[loss=0.2662, simple_loss=0.3515, pruned_loss=0.09047, over 4681.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.3209, pruned_loss=0.08338, over 936755.75 frames. ], batch size: 15, lr: 1.41e-02, grad_scale: 16.0 +2024-07-27 21:41:42,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=66188.0, ans=0.5 +2024-07-27 21:42:04,933 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:42:06,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=66241.33333333333, ans=0.0 +2024-07-27 21:42:07,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=66241.33333333333, ans=0.0 +2024-07-27 21:42:07,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=66241.33333333333, ans=0.125 +2024-07-27 21:42:14,178 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.94 vs. limit=10.0 +2024-07-27 21:42:21,961 INFO [train.py:1114] (2/4) Epoch 5, batch 8800, loss[loss=0.2839, simple_loss=0.3584, pruned_loss=0.1048, over 4930.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3211, pruned_loss=0.08415, over 937741.15 frames. ], batch size: 14, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:42:26,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=66254.66666666667, ans=0.1 +2024-07-27 21:42:34,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=66268.0, ans=0.125 +2024-07-27 21:42:47,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=66294.66666666667, ans=0.125 +2024-07-27 21:42:54,235 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.166e+01 6.254e+01 7.129e+01 8.198e+01 1.307e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 21:42:58,856 INFO [train.py:1114] (2/4) Epoch 5, batch 8850, loss[loss=0.2289, simple_loss=0.3115, pruned_loss=0.07312, over 4487.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3199, pruned_loss=0.08319, over 932648.88 frames. ], batch size: 21, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:43:04,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=66321.33333333333, ans=0.0 +2024-07-27 21:43:04,800 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.14 vs. limit=12.0 +2024-07-27 21:43:16,994 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.32 vs. limit=15.0 +2024-07-27 21:43:18,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=66348.0, ans=0.2 +2024-07-27 21:43:33,893 INFO [train.py:1114] (2/4) Epoch 5, batch 8900, loss[loss=0.2022, simple_loss=0.2864, pruned_loss=0.059, over 4937.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3196, pruned_loss=0.08282, over 930922.45 frames. ], batch size: 12, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:43:36,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=66388.0, ans=0.0 +2024-07-27 21:43:40,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=66401.33333333333, ans=0.125 +2024-07-27 21:43:40,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=66401.33333333333, ans=0.2 +2024-07-27 21:43:47,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=66414.66666666667, ans=0.125 +2024-07-27 21:43:55,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=66428.0, ans=0.125 +2024-07-27 21:43:56,869 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=15.0 +2024-07-27 21:43:58,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=66428.0, ans=0.0 +2024-07-27 21:44:03,543 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 6.558e+01 7.585e+01 9.378e+01 1.606e+02, threshold=1.517e+02, percent-clipped=2.0 +2024-07-27 21:44:03,951 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.69 vs. limit=15.0 +2024-07-27 21:44:08,115 INFO [train.py:1114] (2/4) Epoch 5, batch 8950, loss[loss=0.225, simple_loss=0.3093, pruned_loss=0.0704, over 4584.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.319, pruned_loss=0.08262, over 931896.20 frames. ], batch size: 21, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:44:12,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=66454.66666666667, ans=0.125 +2024-07-27 21:44:16,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=66454.66666666667, ans=0.1 +2024-07-27 21:44:41,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=66508.0, ans=0.2 +2024-07-27 21:44:43,391 INFO [train.py:1114] (2/4) Epoch 5, batch 9000, loss[loss=0.2036, simple_loss=0.2888, pruned_loss=0.05925, over 4638.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.3183, pruned_loss=0.08246, over 934546.62 frames. ], batch size: 12, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:44:43,392 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 21:44:50,970 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.9094, 2.0706, 3.5882, 2.6427], device='cuda:2') +2024-07-27 21:44:55,830 INFO [train.py:1146] (2/4) Epoch 5, validation: loss=0.197, simple_loss=0.3006, pruned_loss=0.04666, over 944034.00 frames. +2024-07-27 21:44:55,830 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 21:44:59,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=66521.33333333333, ans=0.2 +2024-07-27 21:45:07,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=66534.66666666667, ans=0.2 +2024-07-27 21:45:21,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=66561.33333333333, ans=0.0 +2024-07-27 21:45:36,646 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.268e+01 6.311e+01 7.147e+01 8.276e+01 1.860e+02, threshold=1.429e+02, percent-clipped=1.0 +2024-07-27 21:45:41,886 INFO [train.py:1114] (2/4) Epoch 5, batch 9050, loss[loss=0.2518, simple_loss=0.3038, pruned_loss=0.09989, over 4530.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3177, pruned_loss=0.0818, over 935117.03 frames. ], batch size: 10, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:46:04,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=66628.0, ans=0.0 +2024-07-27 21:46:16,350 INFO [train.py:1114] (2/4) Epoch 5, batch 9100, loss[loss=0.2391, simple_loss=0.3264, pruned_loss=0.07592, over 4924.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3182, pruned_loss=0.0817, over 937458.54 frames. ], batch size: 14, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:46:20,856 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.88 vs. limit=10.0 +2024-07-27 21:46:39,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=66694.66666666667, ans=0.2 +2024-07-27 21:46:40,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=66694.66666666667, ans=0.125 +2024-07-27 21:46:44,167 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.191e+01 6.256e+01 6.952e+01 8.323e+01 1.113e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-27 21:46:46,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=66708.0, ans=0.125 +2024-07-27 21:46:47,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=66721.33333333333, ans=0.2 +2024-07-27 21:46:48,052 INFO [train.py:1114] (2/4) Epoch 5, batch 9150, loss[loss=0.2371, simple_loss=0.3275, pruned_loss=0.07336, over 4809.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3189, pruned_loss=0.08209, over 936410.18 frames. ], batch size: 14, lr: 1.41e-02, grad_scale: 16.0 +2024-07-27 21:46:48,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=66721.33333333333, ans=0.0 +2024-07-27 21:46:54,995 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.06 vs. limit=10.0 +2024-07-27 21:47:01,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=66734.66666666667, ans=0.0 +2024-07-27 21:47:07,075 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.92 vs. limit=15.0 +2024-07-27 21:47:09,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=66761.33333333333, ans=0.125 +2024-07-27 21:47:20,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=66774.66666666667, ans=0.0 +2024-07-27 21:47:21,062 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.26 vs. limit=15.0 +2024-07-27 21:47:21,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=66774.66666666667, ans=0.0 +2024-07-27 21:47:22,576 INFO [train.py:1114] (2/4) Epoch 5, batch 9200, loss[loss=0.2247, simple_loss=0.2961, pruned_loss=0.07667, over 4845.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.3187, pruned_loss=0.082, over 938011.01 frames. ], batch size: 12, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:47:28,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=66801.33333333333, ans=0.0 +2024-07-27 21:47:33,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=66801.33333333333, ans=0.1 +2024-07-27 21:47:41,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=66828.0, ans=0.125 +2024-07-27 21:47:50,765 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.899e+01 6.022e+01 6.976e+01 8.483e+01 1.676e+02, threshold=1.395e+02, percent-clipped=4.0 +2024-07-27 21:47:52,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=66841.33333333333, ans=0.0 +2024-07-27 21:47:54,616 INFO [train.py:1114] (2/4) Epoch 5, batch 9250, loss[loss=0.2358, simple_loss=0.3228, pruned_loss=0.07439, over 4639.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3187, pruned_loss=0.08164, over 938799.24 frames. ], batch size: 13, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:48:17,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=66894.66666666667, ans=0.0 +2024-07-27 21:48:23,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=66908.0, ans=0.125 +2024-07-27 21:48:26,549 INFO [train.py:1114] (2/4) Epoch 5, batch 9300, loss[loss=0.2059, simple_loss=0.2825, pruned_loss=0.06463, over 4779.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3188, pruned_loss=0.08165, over 938661.52 frames. ], batch size: 12, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:48:30,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=66921.33333333333, ans=0.0 +2024-07-27 21:48:37,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=66934.66666666667, ans=0.125 +2024-07-27 21:48:39,897 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-07-27 21:48:42,378 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.02 vs. limit=15.0 +2024-07-27 21:48:49,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=66961.33333333333, ans=0.09899494936611666 +2024-07-27 21:48:50,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=66961.33333333333, ans=0.04949747468305833 +2024-07-27 21:48:54,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=66961.33333333333, ans=0.125 +2024-07-27 21:48:57,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=66974.66666666667, ans=0.0 +2024-07-27 21:48:58,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=66974.66666666667, ans=0.1 +2024-07-27 21:48:59,173 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.113e+01 6.261e+01 7.073e+01 8.470e+01 1.590e+02, threshold=1.415e+02, percent-clipped=1.0 +2024-07-27 21:49:02,814 INFO [train.py:1114] (2/4) Epoch 5, batch 9350, loss[loss=0.2239, simple_loss=0.2918, pruned_loss=0.07796, over 4798.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3179, pruned_loss=0.08147, over 936195.61 frames. ], batch size: 11, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:49:06,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=66988.0, ans=0.125 +2024-07-27 21:49:10,746 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.14 vs. limit=15.0 +2024-07-27 21:49:11,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=67001.33333333333, ans=0.0 +2024-07-27 21:49:20,138 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.01 vs. limit=15.0 +2024-07-27 21:49:28,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=67028.0, ans=0.0 +2024-07-27 21:49:29,646 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.51 vs. limit=15.0 +2024-07-27 21:49:31,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=67041.33333333333, ans=0.125 +2024-07-27 21:49:35,472 INFO [train.py:1114] (2/4) Epoch 5, batch 9400, loss[loss=0.1978, simple_loss=0.2859, pruned_loss=0.05487, over 4690.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.3174, pruned_loss=0.08116, over 933547.02 frames. ], batch size: 13, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:49:38,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=67054.66666666667, ans=0.125 +2024-07-27 21:49:43,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=67068.0, ans=0.125 +2024-07-27 21:49:46,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=67068.0, ans=0.95 +2024-07-27 21:49:54,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=67094.66666666667, ans=0.125 +2024-07-27 21:49:59,948 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-07-27 21:50:01,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=67108.0, ans=0.0 +2024-07-27 21:50:03,182 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.976e+01 5.980e+01 6.533e+01 7.095e+01 1.005e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-27 21:50:05,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=67108.0, ans=0.07 +2024-07-27 21:50:07,169 INFO [train.py:1114] (2/4) Epoch 5, batch 9450, loss[loss=0.1941, simple_loss=0.2773, pruned_loss=0.0555, over 4799.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3182, pruned_loss=0.08135, over 932599.64 frames. ], batch size: 11, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:50:08,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=67121.33333333333, ans=0.125 +2024-07-27 21:50:10,565 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.96 vs. limit=15.0 +2024-07-27 21:50:10,917 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.55 vs. limit=6.0 +2024-07-27 21:50:19,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67148.0, ans=0.1 +2024-07-27 21:50:22,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=67148.0, ans=0.2 +2024-07-27 21:50:24,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=67148.0, ans=0.5 +2024-07-27 21:50:37,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=67174.66666666667, ans=0.125 +2024-07-27 21:50:39,307 INFO [train.py:1114] (2/4) Epoch 5, batch 9500, loss[loss=0.1993, simple_loss=0.2838, pruned_loss=0.05742, over 4708.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3186, pruned_loss=0.08135, over 934648.08 frames. ], batch size: 12, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:50:41,533 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.55 vs. limit=22.5 +2024-07-27 21:50:46,379 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.09 vs. limit=10.0 +2024-07-27 21:51:02,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=67228.0, ans=0.125 +2024-07-27 21:51:08,217 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.271e+01 6.540e+01 7.330e+01 8.472e+01 1.165e+02, threshold=1.466e+02, percent-clipped=0.0 +2024-07-27 21:51:11,964 INFO [train.py:1114] (2/4) Epoch 5, batch 9550, loss[loss=0.2586, simple_loss=0.3314, pruned_loss=0.09292, over 4777.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3198, pruned_loss=0.0823, over 932240.65 frames. ], batch size: 12, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:51:12,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=67254.66666666667, ans=0.2 +2024-07-27 21:51:27,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=67254.66666666667, ans=0.2 +2024-07-27 21:51:29,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.38 vs. limit=15.0 +2024-07-27 21:51:44,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=67281.33333333333, ans=0.0 +2024-07-27 21:51:58,328 INFO [train.py:1114] (2/4) Epoch 5, batch 9600, loss[loss=0.2868, simple_loss=0.3376, pruned_loss=0.118, over 3398.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3202, pruned_loss=0.08222, over 931406.76 frames. ], batch size: 35, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:52:07,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=67321.33333333333, ans=0.0 +2024-07-27 21:52:20,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=67348.0, ans=0.125 +2024-07-27 21:52:24,156 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.10 vs. limit=12.0 +2024-07-27 21:52:32,049 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.609e+01 6.845e+01 7.921e+01 9.332e+01 1.441e+02, threshold=1.584e+02, percent-clipped=0.0 +2024-07-27 21:52:34,441 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.44 vs. limit=15.0 +2024-07-27 21:52:35,928 INFO [train.py:1114] (2/4) Epoch 5, batch 9650, loss[loss=0.2053, simple_loss=0.2903, pruned_loss=0.06016, over 4818.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3219, pruned_loss=0.08304, over 927391.68 frames. ], batch size: 16, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:52:46,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=67401.33333333333, ans=0.07 +2024-07-27 21:52:52,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=67414.66666666667, ans=15.0 +2024-07-27 21:53:02,610 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:53:09,961 INFO [train.py:1114] (2/4) Epoch 5, batch 9700, loss[loss=0.2705, simple_loss=0.3306, pruned_loss=0.1052, over 4427.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.3211, pruned_loss=0.08267, over 925635.60 frames. ], batch size: 26, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:53:10,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=67454.66666666667, ans=0.125 +2024-07-27 21:53:10,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=67454.66666666667, ans=0.125 +2024-07-27 21:53:12,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=67454.66666666667, ans=0.0 +2024-07-27 21:53:14,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=67454.66666666667, ans=0.035 +2024-07-27 21:53:17,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=67468.0, ans=0.125 +2024-07-27 21:53:20,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=67468.0, ans=0.125 +2024-07-27 21:53:20,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=67468.0, ans=0.025 +2024-07-27 21:53:28,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=67494.66666666667, ans=0.2 +2024-07-27 21:53:32,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=67494.66666666667, ans=0.05 +2024-07-27 21:53:37,143 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.756e+01 6.215e+01 7.059e+01 7.921e+01 1.151e+02, threshold=1.412e+02, percent-clipped=0.0 +2024-07-27 21:53:38,281 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.07 vs. limit=15.0 +2024-07-27 21:53:41,377 INFO [train.py:1114] (2/4) Epoch 5, batch 9750, loss[loss=0.2662, simple_loss=0.3551, pruned_loss=0.08862, over 4679.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.3208, pruned_loss=0.08275, over 925984.39 frames. ], batch size: 15, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:53:42,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=67521.33333333333, ans=0.0 +2024-07-27 21:53:47,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=67534.66666666667, ans=0.025 +2024-07-27 21:53:52,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=67534.66666666667, ans=0.05 +2024-07-27 21:53:56,881 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.02 vs. limit=15.0 +2024-07-27 21:53:58,195 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.73 vs. limit=15.0 +2024-07-27 21:53:58,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=67548.0, ans=0.125 +2024-07-27 21:54:09,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=67574.66666666667, ans=0.2 +2024-07-27 21:54:13,366 INFO [train.py:1114] (2/4) Epoch 5, batch 9800, loss[loss=0.2452, simple_loss=0.3259, pruned_loss=0.08226, over 4705.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3183, pruned_loss=0.08142, over 925980.66 frames. ], batch size: 12, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:54:14,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=67588.0, ans=0.0 +2024-07-27 21:54:18,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=67588.0, ans=0.0 +2024-07-27 21:54:36,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=67628.0, ans=0.125 +2024-07-27 21:54:40,555 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.676e+01 6.437e+01 7.516e+01 8.874e+01 1.109e+02, threshold=1.503e+02, percent-clipped=0.0 +2024-07-27 21:54:42,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67641.33333333333, ans=0.1 +2024-07-27 21:54:44,136 INFO [train.py:1114] (2/4) Epoch 5, batch 9850, loss[loss=0.2165, simple_loss=0.3117, pruned_loss=0.06066, over 4909.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.3196, pruned_loss=0.08183, over 928599.32 frames. ], batch size: 15, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:54:44,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=67654.66666666667, ans=0.125 +2024-07-27 21:54:49,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=67668.0, ans=0.1 +2024-07-27 21:54:56,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=67681.33333333333, ans=0.0 +2024-07-27 21:55:03,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=67694.66666666667, ans=0.0 +2024-07-27 21:55:06,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=67694.66666666667, ans=0.125 +2024-07-27 21:55:15,351 INFO [train.py:1114] (2/4) Epoch 5, batch 9900, loss[loss=0.2737, simple_loss=0.353, pruned_loss=0.09719, over 4828.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3205, pruned_loss=0.08289, over 927338.00 frames. ], batch size: 16, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:55:27,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=67734.66666666667, ans=0.0 +2024-07-27 21:55:31,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=67748.0, ans=10.0 +2024-07-27 21:55:35,105 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.57 vs. limit=6.0 +2024-07-27 21:55:44,750 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.290e+01 6.621e+01 7.499e+01 8.431e+01 1.516e+02, threshold=1.500e+02, percent-clipped=1.0 +2024-07-27 21:55:47,785 INFO [train.py:1114] (2/4) Epoch 5, batch 9950, loss[loss=0.2223, simple_loss=0.2848, pruned_loss=0.07988, over 4825.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3213, pruned_loss=0.08368, over 929354.75 frames. ], batch size: 11, lr: 1.39e-02, grad_scale: 16.0 +2024-07-27 21:55:47,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=67788.0, ans=0.125 +2024-07-27 21:55:54,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=67801.33333333333, ans=0.125 +2024-07-27 21:56:05,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67814.66666666667, ans=0.1 +2024-07-27 21:56:22,650 INFO [train.py:1114] (2/4) Epoch 5, batch 10000, loss[loss=0.2407, simple_loss=0.3236, pruned_loss=0.07888, over 4637.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3251, pruned_loss=0.08536, over 926598.14 frames. ], batch size: 16, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:56:22,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=67854.66666666667, ans=0.025 +2024-07-27 21:56:22,984 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.73 vs. limit=15.0 +2024-07-27 21:56:27,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=67854.66666666667, ans=0.125 +2024-07-27 21:56:30,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=67868.0, ans=0.1 +2024-07-27 21:56:41,535 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.73 vs. limit=22.5 +2024-07-27 21:56:44,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=67894.66666666667, ans=0.125 +2024-07-27 21:56:44,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=67894.66666666667, ans=0.125 +2024-07-27 21:56:48,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=67894.66666666667, ans=0.0 +2024-07-27 21:56:53,464 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.246e+01 6.323e+01 6.858e+01 7.699e+01 1.357e+02, threshold=1.372e+02, percent-clipped=0.0 +2024-07-27 21:56:56,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=67921.33333333333, ans=0.125 +2024-07-27 21:56:57,388 INFO [train.py:1114] (2/4) Epoch 5, batch 10050, loss[loss=0.2789, simple_loss=0.3462, pruned_loss=0.1058, over 3255.00 frames. ], tot_loss[loss=0.253, simple_loss=0.3296, pruned_loss=0.08817, over 914138.33 frames. ], batch size: 37, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:57:02,117 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.03 vs. limit=15.0 +2024-07-27 21:57:04,130 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=13.31 vs. limit=15.0 +2024-07-27 21:57:11,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=67948.0, ans=0.125 +2024-07-27 21:57:16,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=67948.0, ans=0.0 +2024-07-27 21:57:20,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.00 vs. limit=15.0 +2024-07-27 21:57:22,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=67961.33333333333, ans=0.125 +2024-07-27 21:57:26,985 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=20.61 vs. limit=15.0 +2024-07-27 21:57:27,028 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.16 vs. limit=15.0 +2024-07-27 21:57:31,361 INFO [train.py:1114] (2/4) Epoch 5, batch 10100, loss[loss=0.2964, simple_loss=0.3594, pruned_loss=0.1167, over 3339.00 frames. ], tot_loss[loss=0.2648, simple_loss=0.337, pruned_loss=0.09633, over 862726.69 frames. ], batch size: 35, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:57:41,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68001.33333333333, ans=0.1 +2024-07-27 21:58:01,135 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.071e+01 6.809e+01 7.405e+01 8.060e+01 1.302e+02, threshold=1.481e+02, percent-clipped=0.0 +2024-07-27 21:58:01,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=68041.33333333333, ans=0.125 +2024-07-27 21:58:04,002 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=10.01 vs. limit=12.0 +2024-07-27 21:58:04,299 INFO [train.py:1114] (2/4) Epoch 5, batch 10150, loss[loss=0.3242, simple_loss=0.3717, pruned_loss=0.1383, over 3296.00 frames. ], tot_loss[loss=0.2715, simple_loss=0.3407, pruned_loss=0.1011, over 821700.08 frames. ], batch size: 35, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:58:15,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=68068.0, ans=0.5 +2024-07-27 21:58:27,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=68094.66666666667, ans=0.0 +2024-07-27 21:58:31,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68108.0, ans=0.1 +2024-07-27 21:58:32,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=68108.0, ans=0.125 +2024-07-27 21:58:32,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=68108.0, ans=0.0 +2024-07-27 21:58:34,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=68108.0, ans=15.0 +2024-07-27 21:58:35,557 INFO [train.py:1114] (2/4) Epoch 5, batch 10200, loss[loss=0.2849, simple_loss=0.3474, pruned_loss=0.1112, over 3161.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3442, pruned_loss=0.1053, over 789096.22 frames. ], batch size: 35, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:59:31,813 INFO [train.py:1114] (2/4) Epoch 6, batch 0, loss[loss=0.1969, simple_loss=0.2846, pruned_loss=0.0546, over 4837.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2846, pruned_loss=0.0546, over 4837.00 frames. ], batch size: 12, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 21:59:31,813 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 21:59:42,094 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([6.0101, 5.8248, 5.4887, 5.9199], device='cuda:2') +2024-07-27 21:59:43,347 INFO [train.py:1146] (2/4) Epoch 6, validation: loss=0.203, simple_loss=0.3084, pruned_loss=0.04884, over 944034.00 frames. +2024-07-27 21:59:43,347 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 21:59:48,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.39 vs. limit=15.0 +2024-07-27 21:59:51,288 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.67 vs. limit=22.5 +2024-07-27 21:59:51,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=68164.0, ans=0.125 +2024-07-27 21:59:58,961 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.074e+01 6.594e+01 7.055e+01 7.805e+01 1.292e+02, threshold=1.411e+02, percent-clipped=0.0 +2024-07-27 21:59:59,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68177.33333333333, ans=0.1 +2024-07-27 22:00:00,135 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.77 vs. limit=15.0 +2024-07-27 22:00:01,452 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.67 vs. limit=6.0 +2024-07-27 22:00:10,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=68190.66666666667, ans=0.125 +2024-07-27 22:00:10,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=68190.66666666667, ans=0.1 +2024-07-27 22:00:12,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=68204.0, ans=0.125 +2024-07-27 22:00:18,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=68217.33333333333, ans=0.07 +2024-07-27 22:00:18,904 INFO [train.py:1114] (2/4) Epoch 6, batch 50, loss[loss=0.1757, simple_loss=0.265, pruned_loss=0.04325, over 4610.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3231, pruned_loss=0.08436, over 206054.45 frames. ], batch size: 11, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:00:24,686 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.30 vs. limit=12.0 +2024-07-27 22:00:29,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=68230.66666666667, ans=0.0 +2024-07-27 22:00:29,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=68230.66666666667, ans=0.1 +2024-07-27 22:00:38,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=68244.0, ans=0.1 +2024-07-27 22:00:50,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=68270.66666666667, ans=0.2 +2024-07-27 22:00:52,634 INFO [train.py:1114] (2/4) Epoch 6, batch 100, loss[loss=0.2661, simple_loss=0.3246, pruned_loss=0.1038, over 4643.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3259, pruned_loss=0.08603, over 364914.81 frames. ], batch size: 12, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:00:55,911 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.52 vs. limit=15.0 +2024-07-27 22:01:00,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=68297.33333333333, ans=0.025 +2024-07-27 22:01:09,854 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+01 6.212e+01 6.939e+01 8.250e+01 1.265e+02, threshold=1.388e+02, percent-clipped=0.0 +2024-07-27 22:01:14,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=68324.0, ans=0.125 +2024-07-27 22:01:16,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=68324.0, ans=0.125 +2024-07-27 22:01:27,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=68350.66666666667, ans=0.0 +2024-07-27 22:01:27,712 INFO [train.py:1114] (2/4) Epoch 6, batch 150, loss[loss=0.1987, simple_loss=0.2811, pruned_loss=0.05814, over 4610.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3228, pruned_loss=0.08374, over 493522.77 frames. ], batch size: 11, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:01:41,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=68377.33333333333, ans=0.0 +2024-07-27 22:01:46,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=68377.33333333333, ans=0.125 +2024-07-27 22:01:49,953 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=11.13 vs. limit=10.0 +2024-07-27 22:02:01,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=68404.0, ans=0.0 +2024-07-27 22:02:04,351 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=22.92 vs. limit=22.5 +2024-07-27 22:02:05,152 INFO [train.py:1114] (2/4) Epoch 6, batch 200, loss[loss=0.2327, simple_loss=0.3188, pruned_loss=0.07327, over 4586.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3205, pruned_loss=0.08225, over 593210.96 frames. ], batch size: 21, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:02:05,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=68417.33333333333, ans=0.05 +2024-07-27 22:02:07,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=68417.33333333333, ans=0.0 +2024-07-27 22:02:16,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68430.66666666667, ans=0.1 +2024-07-27 22:02:20,259 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.000e+01 6.270e+01 7.736e+01 9.618e+01 1.930e+02, threshold=1.547e+02, percent-clipped=5.0 +2024-07-27 22:02:30,210 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.30 vs. limit=6.0 +2024-07-27 22:02:36,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68470.66666666667, ans=0.1 +2024-07-27 22:02:38,663 INFO [train.py:1114] (2/4) Epoch 6, batch 250, loss[loss=0.2893, simple_loss=0.3676, pruned_loss=0.1055, over 4615.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3198, pruned_loss=0.08161, over 670086.00 frames. ], batch size: 16, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:02:41,105 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.94 vs. limit=15.0 +2024-07-27 22:02:48,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=68497.33333333333, ans=0.0 +2024-07-27 22:02:53,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=68510.66666666667, ans=0.0 +2024-07-27 22:02:56,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=68510.66666666667, ans=0.2 +2024-07-27 22:03:00,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=68524.0, ans=0.025 +2024-07-27 22:03:10,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=68537.33333333333, ans=0.0 +2024-07-27 22:03:14,341 INFO [train.py:1114] (2/4) Epoch 6, batch 300, loss[loss=0.2239, simple_loss=0.3123, pruned_loss=0.06774, over 4802.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.319, pruned_loss=0.08156, over 730132.22 frames. ], batch size: 15, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:03:19,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=68550.66666666667, ans=0.125 +2024-07-27 22:03:22,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.78 vs. limit=22.5 +2024-07-27 22:03:29,807 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.873e+01 6.141e+01 6.927e+01 8.037e+01 1.226e+02, threshold=1.385e+02, percent-clipped=0.0 +2024-07-27 22:03:29,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=68577.33333333333, ans=0.2 +2024-07-27 22:03:34,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=68590.66666666667, ans=0.025 +2024-07-27 22:03:41,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=68604.0, ans=0.1 +2024-07-27 22:03:49,912 INFO [train.py:1114] (2/4) Epoch 6, batch 350, loss[loss=0.2249, simple_loss=0.2906, pruned_loss=0.0796, over 4934.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.3177, pruned_loss=0.08054, over 776381.73 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:03:50,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=68617.33333333333, ans=0.0 +2024-07-27 22:04:06,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=68644.0, ans=10.0 +2024-07-27 22:04:09,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=68657.33333333333, ans=0.035 +2024-07-27 22:04:12,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=68657.33333333333, ans=0.1 +2024-07-27 22:04:20,247 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.61 vs. limit=15.0 +2024-07-27 22:04:23,111 INFO [train.py:1114] (2/4) Epoch 6, batch 400, loss[loss=0.2401, simple_loss=0.3193, pruned_loss=0.08049, over 4694.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3169, pruned_loss=0.07962, over 813645.31 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:04:27,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=68684.0, ans=10.0 +2024-07-27 22:04:29,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=68684.0, ans=0.0 +2024-07-27 22:04:36,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=68697.33333333333, ans=0.125 +2024-07-27 22:04:39,656 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.83 vs. limit=22.5 +2024-07-27 22:04:42,399 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.890e+01 6.252e+01 7.226e+01 8.425e+01 1.439e+02, threshold=1.445e+02, percent-clipped=1.0 +2024-07-27 22:05:00,649 INFO [train.py:1114] (2/4) Epoch 6, batch 450, loss[loss=0.1981, simple_loss=0.2819, pruned_loss=0.05722, over 4634.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.3179, pruned_loss=0.08056, over 839003.88 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:05:05,380 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:05:08,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=68764.0, ans=0.2 +2024-07-27 22:05:12,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=68764.0, ans=0.0 +2024-07-27 22:05:21,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68790.66666666667, ans=0.1 +2024-07-27 22:05:33,917 INFO [train.py:1114] (2/4) Epoch 6, batch 500, loss[loss=0.2515, simple_loss=0.3308, pruned_loss=0.08612, over 4690.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.3166, pruned_loss=0.07993, over 861384.07 frames. ], batch size: 15, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:05:51,014 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 6.191e+01 6.809e+01 7.735e+01 1.328e+02, threshold=1.362e+02, percent-clipped=0.0 +2024-07-27 22:05:55,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=68857.33333333333, ans=0.125 +2024-07-27 22:06:09,214 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.22 vs. limit=15.0 +2024-07-27 22:06:09,499 INFO [train.py:1114] (2/4) Epoch 6, batch 550, loss[loss=0.2916, simple_loss=0.3611, pruned_loss=0.111, over 4610.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3163, pruned_loss=0.07974, over 877755.70 frames. ], batch size: 17, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:06:11,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=68884.0, ans=0.125 +2024-07-27 22:06:20,117 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=4.191e-01 +2024-07-27 22:06:35,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=68924.0, ans=0.0 +2024-07-27 22:06:37,276 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.87 vs. limit=15.0 +2024-07-27 22:06:38,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=68937.33333333333, ans=0.125 +2024-07-27 22:06:39,414 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=8.10 vs. limit=8.0 +2024-07-27 22:06:39,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=68937.33333333333, ans=0.125 +2024-07-27 22:06:41,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=68937.33333333333, ans=0.125 +2024-07-27 22:06:43,657 INFO [train.py:1114] (2/4) Epoch 6, batch 600, loss[loss=0.2922, simple_loss=0.3625, pruned_loss=0.111, over 4598.00 frames. ], tot_loss[loss=0.238, simple_loss=0.3163, pruned_loss=0.07986, over 891949.96 frames. ], batch size: 16, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:06:49,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68950.66666666667, ans=0.1 +2024-07-27 22:06:58,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=68977.33333333333, ans=0.125 +2024-07-27 22:06:59,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=68977.33333333333, ans=0.0 +2024-07-27 22:07:00,810 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.994e+01 6.226e+01 6.771e+01 7.767e+01 1.130e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-27 22:07:01,808 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.21 vs. limit=15.0 +2024-07-27 22:07:03,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=68977.33333333333, ans=0.0 +2024-07-27 22:07:18,951 INFO [train.py:1114] (2/4) Epoch 6, batch 650, loss[loss=0.2317, simple_loss=0.3084, pruned_loss=0.07749, over 4758.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.3155, pruned_loss=0.07913, over 903577.24 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:07:43,495 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.24 vs. limit=15.0 +2024-07-27 22:07:52,605 INFO [train.py:1114] (2/4) Epoch 6, batch 700, loss[loss=0.2151, simple_loss=0.2824, pruned_loss=0.07396, over 4641.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.316, pruned_loss=0.07888, over 911495.69 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:07:54,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=69084.0, ans=0.05 +2024-07-27 22:08:07,875 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.953e+01 5.900e+01 6.634e+01 8.042e+01 1.194e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-27 22:08:16,095 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:08:27,948 INFO [train.py:1114] (2/4) Epoch 6, batch 750, loss[loss=0.1921, simple_loss=0.2975, pruned_loss=0.04336, over 4696.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3153, pruned_loss=0.0784, over 918165.74 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:08:36,126 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.00 vs. limit=6.0 +2024-07-27 22:08:44,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69177.33333333333, ans=0.1 +2024-07-27 22:09:00,974 INFO [train.py:1114] (2/4) Epoch 6, batch 800, loss[loss=0.2206, simple_loss=0.2977, pruned_loss=0.07174, over 4863.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3166, pruned_loss=0.07924, over 923309.60 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:09:01,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=69217.33333333333, ans=0.2 +2024-07-27 22:09:18,018 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.094e+01 6.022e+01 6.607e+01 7.761e+01 1.209e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-27 22:09:20,334 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:09:37,884 INFO [train.py:1114] (2/4) Epoch 6, batch 850, loss[loss=0.2305, simple_loss=0.3276, pruned_loss=0.06671, over 4664.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.3169, pruned_loss=0.07932, over 927364.65 frames. ], batch size: 14, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:09:48,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=69297.33333333333, ans=0.125 +2024-07-27 22:09:50,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=69310.66666666667, ans=0.125 +2024-07-27 22:09:52,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=69310.66666666667, ans=0.0 +2024-07-27 22:09:53,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=69310.66666666667, ans=0.2 +2024-07-27 22:09:57,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=69324.0, ans=0.025 +2024-07-27 22:10:12,254 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.47 vs. limit=12.0 +2024-07-27 22:10:13,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=69337.33333333333, ans=0.125 +2024-07-27 22:10:15,142 INFO [train.py:1114] (2/4) Epoch 6, batch 900, loss[loss=0.2156, simple_loss=0.2751, pruned_loss=0.07807, over 4847.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3164, pruned_loss=0.07865, over 928432.81 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:10:24,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69364.0, ans=0.1 +2024-07-27 22:10:29,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=69377.33333333333, ans=0.0 +2024-07-27 22:10:30,381 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.456e+01 6.354e+01 7.008e+01 8.406e+01 1.301e+02, threshold=1.402e+02, percent-clipped=0.0 +2024-07-27 22:10:31,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=69377.33333333333, ans=0.0 +2024-07-27 22:10:32,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=69377.33333333333, ans=0.025 +2024-07-27 22:10:48,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=69417.33333333333, ans=0.0 +2024-07-27 22:10:48,668 INFO [train.py:1114] (2/4) Epoch 6, batch 950, loss[loss=0.2123, simple_loss=0.2901, pruned_loss=0.06726, over 4771.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3165, pruned_loss=0.07905, over 930401.71 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:10:49,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69417.33333333333, ans=0.1 +2024-07-27 22:10:57,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=69430.66666666667, ans=0.05 +2024-07-27 22:10:58,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=69430.66666666667, ans=0.125 +2024-07-27 22:11:20,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=69470.66666666667, ans=0.125 +2024-07-27 22:11:23,654 INFO [train.py:1114] (2/4) Epoch 6, batch 1000, loss[loss=0.2404, simple_loss=0.3292, pruned_loss=0.07575, over 4961.00 frames. ], tot_loss[loss=0.239, simple_loss=0.3183, pruned_loss=0.07989, over 929865.14 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:11:23,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=69484.0, ans=0.125 +2024-07-27 22:11:27,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=69484.0, ans=0.025 +2024-07-27 22:11:32,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69497.33333333333, ans=0.1 +2024-07-27 22:11:37,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=69510.66666666667, ans=0.07 +2024-07-27 22:11:39,443 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.085e+01 6.209e+01 6.779e+01 8.145e+01 1.211e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 22:11:40,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=69510.66666666667, ans=0.125 +2024-07-27 22:11:41,268 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.90 vs. limit=22.5 +2024-07-27 22:11:42,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69510.66666666667, ans=0.1 +2024-07-27 22:11:44,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=69524.0, ans=0.125 +2024-07-27 22:11:49,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=69524.0, ans=0.0 +2024-07-27 22:11:57,412 INFO [train.py:1114] (2/4) Epoch 6, batch 1050, loss[loss=0.2327, simple_loss=0.3206, pruned_loss=0.07245, over 4879.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3161, pruned_loss=0.07877, over 932208.53 frames. ], batch size: 14, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:11:58,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=69550.66666666667, ans=0.125 +2024-07-27 22:12:04,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=69564.0, ans=0.09899494936611666 +2024-07-27 22:12:25,829 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.35 vs. limit=15.0 +2024-07-27 22:12:27,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69604.0, ans=0.1 +2024-07-27 22:12:32,787 INFO [train.py:1114] (2/4) Epoch 6, batch 1100, loss[loss=0.2035, simple_loss=0.2871, pruned_loss=0.05999, over 4909.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3154, pruned_loss=0.07824, over 934487.73 frames. ], batch size: 13, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:12:40,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=69630.66666666667, ans=0.07 +2024-07-27 22:12:40,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=69630.66666666667, ans=0.125 +2024-07-27 22:12:41,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=69630.66666666667, ans=10.0 +2024-07-27 22:12:48,101 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.955e+01 5.872e+01 6.350e+01 6.961e+01 9.139e+01, threshold=1.270e+02, percent-clipped=0.0 +2024-07-27 22:12:49,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=69644.0, ans=0.0 +2024-07-27 22:13:05,923 INFO [train.py:1114] (2/4) Epoch 6, batch 1150, loss[loss=0.2284, simple_loss=0.303, pruned_loss=0.07691, over 4886.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3146, pruned_loss=0.07824, over 933928.51 frames. ], batch size: 13, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:13:11,078 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.46 vs. limit=6.0 +2024-07-27 22:13:18,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=69697.33333333333, ans=0.125 +2024-07-27 22:13:23,136 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.83 vs. limit=15.0 +2024-07-27 22:13:25,739 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:13:40,108 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.74 vs. limit=15.0 +2024-07-27 22:13:41,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=69724.0, ans=0.2 +2024-07-27 22:13:48,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=69737.33333333333, ans=0.0 +2024-07-27 22:13:50,317 INFO [train.py:1114] (2/4) Epoch 6, batch 1200, loss[loss=0.2475, simple_loss=0.3257, pruned_loss=0.08466, over 4873.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.3157, pruned_loss=0.07906, over 932887.81 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:13:53,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=69750.66666666667, ans=0.0 +2024-07-27 22:14:07,403 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.905e+01 5.960e+01 6.565e+01 7.380e+01 1.067e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-27 22:14:07,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69777.33333333333, ans=0.1 +2024-07-27 22:14:18,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=69804.0, ans=0.125 +2024-07-27 22:14:25,339 INFO [train.py:1114] (2/4) Epoch 6, batch 1250, loss[loss=0.2646, simple_loss=0.3394, pruned_loss=0.09491, over 4799.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.3163, pruned_loss=0.0788, over 936894.30 frames. ], batch size: 15, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:14:41,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=69844.0, ans=0.0 +2024-07-27 22:14:52,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=69870.66666666667, ans=0.2 +2024-07-27 22:14:56,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=69870.66666666667, ans=0.0 +2024-07-27 22:14:57,765 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:14:58,322 INFO [train.py:1114] (2/4) Epoch 6, batch 1300, loss[loss=0.2712, simple_loss=0.3456, pruned_loss=0.09836, over 4698.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3142, pruned_loss=0.07782, over 938432.36 frames. ], batch size: 19, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:15:03,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69884.0, ans=0.1 +2024-07-27 22:15:09,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=69897.33333333333, ans=0.0 +2024-07-27 22:15:14,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=69910.66666666667, ans=0.0 +2024-07-27 22:15:15,341 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.914e+01 6.589e+01 7.357e+01 1.015e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-27 22:15:16,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=69910.66666666667, ans=0.05 +2024-07-27 22:15:25,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=69924.0, ans=0.125 +2024-07-27 22:15:25,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=69924.0, ans=0.0 +2024-07-27 22:15:26,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=69937.33333333333, ans=0.125 +2024-07-27 22:15:29,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=69937.33333333333, ans=0.125 +2024-07-27 22:15:33,821 INFO [train.py:1114] (2/4) Epoch 6, batch 1350, loss[loss=0.2383, simple_loss=0.3286, pruned_loss=0.07396, over 4757.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.314, pruned_loss=0.07732, over 940422.53 frames. ], batch size: 13, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:15:44,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=69964.0, ans=0.1 +2024-07-27 22:15:50,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=69977.33333333333, ans=0.125 +2024-07-27 22:16:07,368 INFO [train.py:1114] (2/4) Epoch 6, batch 1400, loss[loss=0.1964, simple_loss=0.2769, pruned_loss=0.05798, over 4707.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3139, pruned_loss=0.07763, over 942316.43 frames. ], batch size: 11, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:16:07,472 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:16:10,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=70017.33333333333, ans=0.2 +2024-07-27 22:16:14,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=70030.66666666667, ans=0.125 +2024-07-27 22:16:17,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=70030.66666666667, ans=0.2 +2024-07-27 22:16:22,918 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.942e+01 6.116e+01 6.900e+01 7.787e+01 1.307e+02, threshold=1.380e+02, percent-clipped=0.0 +2024-07-27 22:17:12,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70044.0, ans=0.1 +2024-07-27 22:17:28,115 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.63 vs. limit=15.0 +2024-07-27 22:17:29,173 INFO [train.py:1114] (2/4) Epoch 6, batch 1450, loss[loss=0.2689, simple_loss=0.3496, pruned_loss=0.09407, over 4682.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3149, pruned_loss=0.0779, over 942289.56 frames. ], batch size: 15, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:17:38,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=70097.33333333333, ans=0.125 +2024-07-27 22:17:42,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=70097.33333333333, ans=0.125 +2024-07-27 22:17:49,260 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:17:49,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=70110.66666666667, ans=0.0 +2024-07-27 22:17:58,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=70137.33333333333, ans=0.125 +2024-07-27 22:18:01,973 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.09 vs. limit=10.0 +2024-07-27 22:18:04,305 INFO [train.py:1114] (2/4) Epoch 6, batch 1500, loss[loss=0.236, simple_loss=0.3165, pruned_loss=0.07774, over 4807.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3147, pruned_loss=0.07781, over 941771.00 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:18:08,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=70150.66666666667, ans=0.1 +2024-07-27 22:18:20,393 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.729e+01 5.883e+01 6.851e+01 7.584e+01 1.194e+02, threshold=1.370e+02, percent-clipped=0.0 +2024-07-27 22:18:24,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=70190.66666666667, ans=0.025 +2024-07-27 22:18:40,399 INFO [train.py:1114] (2/4) Epoch 6, batch 1550, loss[loss=0.2545, simple_loss=0.342, pruned_loss=0.08349, over 4902.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3154, pruned_loss=0.07791, over 938106.27 frames. ], batch size: 15, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:18:48,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=70230.66666666667, ans=0.0 +2024-07-27 22:18:49,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70230.66666666667, ans=0.1 +2024-07-27 22:18:49,880 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:18:50,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=70230.66666666667, ans=0.0 +2024-07-27 22:18:51,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=70230.66666666667, ans=0.07 +2024-07-27 22:18:51,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=70230.66666666667, ans=0.1 +2024-07-27 22:18:53,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=70244.0, ans=0.125 +2024-07-27 22:19:00,185 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.90 vs. limit=15.0 +2024-07-27 22:19:07,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=70270.66666666667, ans=0.125 +2024-07-27 22:19:13,499 INFO [train.py:1114] (2/4) Epoch 6, batch 1600, loss[loss=0.2585, simple_loss=0.3395, pruned_loss=0.08877, over 4872.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.3159, pruned_loss=0.07851, over 936896.05 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:19:17,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=70284.0, ans=0.1 +2024-07-27 22:19:17,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=70284.0, ans=0.125 +2024-07-27 22:19:17,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=70284.0, ans=0.0 +2024-07-27 22:19:31,156 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.952e+01 6.615e+01 7.870e+01 9.186e+01 1.944e+02, threshold=1.574e+02, percent-clipped=2.0 +2024-07-27 22:19:32,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=70310.66666666667, ans=0.125 +2024-07-27 22:19:44,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=70337.33333333333, ans=0.125 +2024-07-27 22:19:49,371 INFO [train.py:1114] (2/4) Epoch 6, batch 1650, loss[loss=0.2152, simple_loss=0.3048, pruned_loss=0.06283, over 4664.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3155, pruned_loss=0.07854, over 936773.27 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:20:01,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=70364.0, ans=0.2 +2024-07-27 22:20:04,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=70377.33333333333, ans=0.1 +2024-07-27 22:20:08,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=70390.66666666667, ans=0.0 +2024-07-27 22:20:23,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=70404.0, ans=0.125 +2024-07-27 22:20:24,491 INFO [train.py:1114] (2/4) Epoch 6, batch 1700, loss[loss=0.2235, simple_loss=0.2881, pruned_loss=0.07943, over 4698.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3164, pruned_loss=0.07858, over 938633.10 frames. ], batch size: 11, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:20:25,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=70417.33333333333, ans=0.125 +2024-07-27 22:20:26,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=70417.33333333333, ans=0.025 +2024-07-27 22:20:31,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=70430.66666666667, ans=0.5 +2024-07-27 22:20:35,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=70430.66666666667, ans=0.125 +2024-07-27 22:20:39,549 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.065e+01 6.237e+01 7.615e+01 9.161e+01 1.409e+02, threshold=1.523e+02, percent-clipped=0.0 +2024-07-27 22:20:39,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=70444.0, ans=0.125 +2024-07-27 22:20:53,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=70470.66666666667, ans=0.0 +2024-07-27 22:20:58,127 INFO [train.py:1114] (2/4) Epoch 6, batch 1750, loss[loss=0.1998, simple_loss=0.2806, pruned_loss=0.05951, over 4804.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.3158, pruned_loss=0.07832, over 939563.40 frames. ], batch size: 11, lr: 1.28e-02, grad_scale: 64.0 +2024-07-27 22:21:09,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=70497.33333333333, ans=0.1 +2024-07-27 22:21:11,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70510.66666666667, ans=0.1 +2024-07-27 22:21:37,376 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.80 vs. limit=15.0 +2024-07-27 22:21:38,399 INFO [train.py:1114] (2/4) Epoch 6, batch 1800, loss[loss=0.2764, simple_loss=0.3614, pruned_loss=0.09575, over 4644.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3152, pruned_loss=0.07821, over 940206.05 frames. ], batch size: 13, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:21:43,690 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.47 vs. limit=15.0 +2024-07-27 22:21:45,099 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.06 vs. limit=15.0 +2024-07-27 22:21:49,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=70564.0, ans=0.1 +2024-07-27 22:21:54,694 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+01 6.220e+01 7.110e+01 8.756e+01 1.676e+02, threshold=1.422e+02, percent-clipped=1.0 +2024-07-27 22:21:56,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=70577.33333333333, ans=0.2 +2024-07-27 22:21:59,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=70590.66666666667, ans=0.125 +2024-07-27 22:22:04,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=70590.66666666667, ans=0.125 +2024-07-27 22:22:12,103 INFO [train.py:1114] (2/4) Epoch 6, batch 1850, loss[loss=0.207, simple_loss=0.296, pruned_loss=0.05895, over 4816.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3141, pruned_loss=0.07728, over 940095.50 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:22:15,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=70617.33333333333, ans=0.0 +2024-07-27 22:22:23,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=70630.66666666667, ans=0.0 +2024-07-27 22:22:45,978 INFO [train.py:1114] (2/4) Epoch 6, batch 1900, loss[loss=0.2274, simple_loss=0.3259, pruned_loss=0.06449, over 4675.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3146, pruned_loss=0.07782, over 941663.91 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:22:50,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=70684.0, ans=0.125 +2024-07-27 22:22:55,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=70697.33333333333, ans=0.0 +2024-07-27 22:23:00,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=70710.66666666667, ans=0.025 +2024-07-27 22:23:01,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=70710.66666666667, ans=0.1 +2024-07-27 22:23:01,912 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 6.167e+01 7.357e+01 8.960e+01 1.368e+02, threshold=1.471e+02, percent-clipped=0.0 +2024-07-27 22:23:02,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=70710.66666666667, ans=0.0 +2024-07-27 22:23:13,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=70737.33333333333, ans=0.1 +2024-07-27 22:23:21,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=70737.33333333333, ans=0.95 +2024-07-27 22:23:23,066 INFO [train.py:1114] (2/4) Epoch 6, batch 1950, loss[loss=0.2239, simple_loss=0.304, pruned_loss=0.07196, over 4891.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3156, pruned_loss=0.07828, over 943789.50 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:23:38,265 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.90 vs. limit=15.0 +2024-07-27 22:23:43,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=70790.66666666667, ans=0.0 +2024-07-27 22:23:47,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=70790.66666666667, ans=0.125 +2024-07-27 22:23:51,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=70804.0, ans=0.125 +2024-07-27 22:23:56,789 INFO [train.py:1114] (2/4) Epoch 6, batch 2000, loss[loss=0.1901, simple_loss=0.2629, pruned_loss=0.05869, over 4796.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.317, pruned_loss=0.07871, over 940774.91 frames. ], batch size: 11, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:24:01,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=70817.33333333333, ans=0.125 +2024-07-27 22:24:15,306 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 6.102e+01 6.803e+01 8.517e+01 1.833e+02, threshold=1.361e+02, percent-clipped=3.0 +2024-07-27 22:24:16,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=70844.0, ans=0.2 +2024-07-27 22:24:26,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=70870.66666666667, ans=0.125 +2024-07-27 22:24:32,983 INFO [train.py:1114] (2/4) Epoch 6, batch 2050, loss[loss=0.2163, simple_loss=0.2824, pruned_loss=0.07505, over 4612.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3148, pruned_loss=0.07772, over 938589.01 frames. ], batch size: 11, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:24:34,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=70884.0, ans=0.125 +2024-07-27 22:24:40,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=70897.33333333333, ans=0.2 +2024-07-27 22:24:56,152 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.61 vs. limit=15.0 +2024-07-27 22:24:59,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=70937.33333333333, ans=0.125 +2024-07-27 22:25:03,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=70937.33333333333, ans=0.125 +2024-07-27 22:25:07,023 INFO [train.py:1114] (2/4) Epoch 6, batch 2100, loss[loss=0.2366, simple_loss=0.3247, pruned_loss=0.07428, over 4762.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3134, pruned_loss=0.07697, over 940444.86 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:25:09,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=70950.66666666667, ans=0.025 +2024-07-27 22:25:17,798 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.48 vs. limit=22.5 +2024-07-27 22:25:22,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=70977.33333333333, ans=0.0 +2024-07-27 22:25:23,163 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.747e+01 6.024e+01 6.945e+01 8.681e+01 1.626e+02, threshold=1.389e+02, percent-clipped=3.0 +2024-07-27 22:25:30,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=70990.66666666667, ans=0.125 +2024-07-27 22:25:36,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=71004.0, ans=0.125 +2024-07-27 22:25:40,258 INFO [train.py:1114] (2/4) Epoch 6, batch 2150, loss[loss=0.2166, simple_loss=0.2975, pruned_loss=0.06782, over 4900.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3129, pruned_loss=0.07676, over 943509.87 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:25:46,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=71017.33333333333, ans=0.0 +2024-07-27 22:25:49,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=71030.66666666667, ans=0.0 +2024-07-27 22:25:49,953 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.20 vs. limit=10.0 +2024-07-27 22:25:54,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=71030.66666666667, ans=0.125 +2024-07-27 22:25:54,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=71030.66666666667, ans=0.025 +2024-07-27 22:26:11,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=71070.66666666667, ans=0.125 +2024-07-27 22:26:13,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=71070.66666666667, ans=0.125 +2024-07-27 22:26:14,882 INFO [train.py:1114] (2/4) Epoch 6, batch 2200, loss[loss=0.2251, simple_loss=0.3157, pruned_loss=0.06722, over 4813.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.3129, pruned_loss=0.077, over 943073.85 frames. ], batch size: 14, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:26:17,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=71084.0, ans=0.0 +2024-07-27 22:26:23,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=71097.33333333333, ans=0.0 +2024-07-27 22:26:30,845 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.032e+01 5.977e+01 6.533e+01 7.474e+01 1.096e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-27 22:26:47,958 INFO [train.py:1114] (2/4) Epoch 6, batch 2250, loss[loss=0.2484, simple_loss=0.3171, pruned_loss=0.08985, over 4693.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3124, pruned_loss=0.07709, over 942120.03 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:27:08,324 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.51 vs. limit=12.0 +2024-07-27 22:27:15,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=71190.66666666667, ans=0.125 +2024-07-27 22:27:17,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=71204.0, ans=0.125 +2024-07-27 22:27:22,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=71204.0, ans=0.125 +2024-07-27 22:27:24,054 INFO [train.py:1114] (2/4) Epoch 6, batch 2300, loss[loss=0.1977, simple_loss=0.265, pruned_loss=0.06524, over 4936.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3111, pruned_loss=0.07702, over 939531.25 frames. ], batch size: 12, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:27:24,521 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.84 vs. limit=12.0 +2024-07-27 22:27:26,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=71217.33333333333, ans=0.0 +2024-07-27 22:27:38,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=71244.0, ans=0.125 +2024-07-27 22:27:39,867 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.011e+01 6.045e+01 6.668e+01 7.674e+01 1.080e+02, threshold=1.334e+02, percent-clipped=0.0 +2024-07-27 22:27:42,284 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.81 vs. limit=15.0 +2024-07-27 22:27:44,945 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.59 vs. limit=15.0 +2024-07-27 22:27:50,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=71270.66666666667, ans=0.025 +2024-07-27 22:27:51,117 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.68 vs. limit=22.5 +2024-07-27 22:27:57,312 INFO [train.py:1114] (2/4) Epoch 6, batch 2350, loss[loss=0.2551, simple_loss=0.3304, pruned_loss=0.08995, over 4638.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3121, pruned_loss=0.07705, over 941413.47 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:28:02,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=71284.0, ans=0.125 +2024-07-27 22:28:04,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=71297.33333333333, ans=0.07 +2024-07-27 22:28:08,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=71297.33333333333, ans=0.0 +2024-07-27 22:28:09,662 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.79 vs. limit=12.0 +2024-07-27 22:28:14,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71310.66666666667, ans=0.1 +2024-07-27 22:28:27,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=71337.33333333333, ans=0.125 +2024-07-27 22:28:27,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71337.33333333333, ans=0.1 +2024-07-27 22:28:30,516 INFO [train.py:1114] (2/4) Epoch 6, batch 2400, loss[loss=0.2226, simple_loss=0.3045, pruned_loss=0.07031, over 4640.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3132, pruned_loss=0.0772, over 941164.04 frames. ], batch size: 12, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:28:36,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=71350.66666666667, ans=0.0 +2024-07-27 22:28:44,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=71364.0, ans=0.0 +2024-07-27 22:28:46,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=71377.33333333333, ans=0.0 +2024-07-27 22:28:48,416 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.400e+01 6.375e+01 7.289e+01 8.298e+01 1.037e+02, threshold=1.458e+02, percent-clipped=0.0 +2024-07-27 22:28:57,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=71390.66666666667, ans=0.125 +2024-07-27 22:29:00,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=71404.0, ans=0.125 +2024-07-27 22:29:05,457 INFO [train.py:1114] (2/4) Epoch 6, batch 2450, loss[loss=0.2618, simple_loss=0.3278, pruned_loss=0.09783, over 4702.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3153, pruned_loss=0.07789, over 937079.95 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:29:06,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71417.33333333333, ans=0.1 +2024-07-27 22:29:08,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=71417.33333333333, ans=0.125 +2024-07-27 22:29:10,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=71417.33333333333, ans=0.125 +2024-07-27 22:29:10,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=71417.33333333333, ans=0.0 +2024-07-27 22:29:19,711 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.77 vs. limit=10.0 +2024-07-27 22:29:20,948 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:29:31,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=71470.66666666667, ans=0.0 +2024-07-27 22:29:34,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=71470.66666666667, ans=0.125 +2024-07-27 22:29:41,005 INFO [train.py:1114] (2/4) Epoch 6, batch 2500, loss[loss=0.2646, simple_loss=0.3539, pruned_loss=0.08767, over 4811.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3162, pruned_loss=0.07836, over 938926.96 frames. ], batch size: 14, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:29:43,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=71484.0, ans=0.0 +2024-07-27 22:29:44,005 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.39 vs. limit=10.0 +2024-07-27 22:29:45,906 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.00 vs. limit=10.0 +2024-07-27 22:29:56,875 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.024e+01 6.200e+01 6.677e+01 7.747e+01 1.498e+02, threshold=1.335e+02, percent-clipped=1.0 +2024-07-27 22:29:57,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=71510.66666666667, ans=0.2 +2024-07-27 22:30:05,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=71524.0, ans=0.0 +2024-07-27 22:30:14,586 INFO [train.py:1114] (2/4) Epoch 6, batch 2550, loss[loss=0.1859, simple_loss=0.2678, pruned_loss=0.05202, over 4803.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3148, pruned_loss=0.07756, over 938550.59 frames. ], batch size: 11, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:30:14,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=71550.66666666667, ans=0.125 +2024-07-27 22:30:29,929 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.87 vs. limit=12.0 +2024-07-27 22:30:33,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=71577.33333333333, ans=0.125 +2024-07-27 22:30:35,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=71590.66666666667, ans=0.0 +2024-07-27 22:30:47,966 INFO [train.py:1114] (2/4) Epoch 6, batch 2600, loss[loss=0.1879, simple_loss=0.2784, pruned_loss=0.04869, over 4893.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3136, pruned_loss=0.07683, over 937625.65 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:30:50,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=71617.33333333333, ans=0.125 +2024-07-27 22:30:51,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=71617.33333333333, ans=22.5 +2024-07-27 22:30:53,627 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.97 vs. limit=15.0 +2024-07-27 22:30:57,623 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.41 vs. limit=22.5 +2024-07-27 22:31:05,415 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.818e+01 6.119e+01 7.086e+01 8.200e+01 1.372e+02, threshold=1.417e+02, percent-clipped=1.0 +2024-07-27 22:31:07,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=71644.0, ans=0.125 +2024-07-27 22:31:12,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=71657.33333333333, ans=0.1 +2024-07-27 22:31:22,648 INFO [train.py:1114] (2/4) Epoch 6, batch 2650, loss[loss=0.2482, simple_loss=0.3418, pruned_loss=0.07731, over 4624.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3137, pruned_loss=0.07688, over 939851.50 frames. ], batch size: 16, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:31:40,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71710.66666666667, ans=0.1 +2024-07-27 22:31:40,775 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.79 vs. limit=15.0 +2024-07-27 22:31:53,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=71737.33333333333, ans=10.0 +2024-07-27 22:31:56,599 INFO [train.py:1114] (2/4) Epoch 6, batch 2700, loss[loss=0.217, simple_loss=0.308, pruned_loss=0.06296, over 4739.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.3139, pruned_loss=0.07649, over 939833.24 frames. ], batch size: 14, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:32:06,821 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.55 vs. limit=22.5 +2024-07-27 22:32:12,947 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.944e+01 6.253e+01 7.142e+01 8.501e+01 1.377e+02, threshold=1.428e+02, percent-clipped=0.0 +2024-07-27 22:32:19,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=71790.66666666667, ans=0.125 +2024-07-27 22:32:24,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=71790.66666666667, ans=0.04949747468305833 +2024-07-27 22:32:31,822 INFO [train.py:1114] (2/4) Epoch 6, batch 2750, loss[loss=0.2352, simple_loss=0.3043, pruned_loss=0.08303, over 4704.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3133, pruned_loss=0.07667, over 940081.28 frames. ], batch size: 12, lr: 1.27e-02, grad_scale: 16.0 +2024-07-27 22:32:45,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=71844.0, ans=0.1 +2024-07-27 22:32:56,742 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.18 vs. limit=22.5 +2024-07-27 22:33:03,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=71870.66666666667, ans=0.125 +2024-07-27 22:33:03,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=71870.66666666667, ans=0.125 +2024-07-27 22:33:05,086 INFO [train.py:1114] (2/4) Epoch 6, batch 2800, loss[loss=0.3618, simple_loss=0.4015, pruned_loss=0.161, over 3291.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3127, pruned_loss=0.07658, over 938041.56 frames. ], batch size: 35, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:33:16,252 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.05 vs. limit=10.0 +2024-07-27 22:33:18,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=71910.66666666667, ans=0.0 +2024-07-27 22:33:22,314 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.117e+01 6.346e+01 7.274e+01 8.194e+01 1.245e+02, threshold=1.455e+02, percent-clipped=0.0 +2024-07-27 22:33:24,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=71910.66666666667, ans=0.0 +2024-07-27 22:33:24,699 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.20 vs. limit=22.5 +2024-07-27 22:33:38,926 INFO [train.py:1114] (2/4) Epoch 6, batch 2850, loss[loss=0.2008, simple_loss=0.2886, pruned_loss=0.05655, over 4970.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.313, pruned_loss=0.0767, over 936302.42 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:33:43,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=71950.66666666667, ans=0.0 +2024-07-27 22:33:49,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=71964.0, ans=0.125 +2024-07-27 22:33:54,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=71977.33333333333, ans=0.0 +2024-07-27 22:34:03,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=71990.66666666667, ans=0.125 +2024-07-27 22:34:03,465 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:34:09,810 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.59 vs. limit=12.0 +2024-07-27 22:34:14,083 INFO [train.py:1114] (2/4) Epoch 6, batch 2900, loss[loss=0.2949, simple_loss=0.3741, pruned_loss=0.1079, over 4821.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3156, pruned_loss=0.07754, over 940065.24 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:34:20,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72030.66666666667, ans=0.1 +2024-07-27 22:34:29,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=72044.0, ans=0.0 +2024-07-27 22:34:31,186 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.576e+01 5.900e+01 6.392e+01 7.089e+01 1.311e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-27 22:34:32,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=72044.0, ans=0.125 +2024-07-27 22:34:36,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=72057.33333333333, ans=0.2 +2024-07-27 22:34:39,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=72057.33333333333, ans=0.125 +2024-07-27 22:34:39,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=72057.33333333333, ans=0.125 +2024-07-27 22:34:47,736 INFO [train.py:1114] (2/4) Epoch 6, batch 2950, loss[loss=0.2179, simple_loss=0.2931, pruned_loss=0.07136, over 4703.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3146, pruned_loss=0.07695, over 938946.56 frames. ], batch size: 12, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:34:50,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=72084.0, ans=0.125 +2024-07-27 22:34:50,953 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.77 vs. limit=15.0 +2024-07-27 22:34:52,230 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.75 vs. limit=15.0 +2024-07-27 22:35:03,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=72110.66666666667, ans=0.2 +2024-07-27 22:35:21,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=72137.33333333333, ans=0.0 +2024-07-27 22:35:23,050 INFO [train.py:1114] (2/4) Epoch 6, batch 3000, loss[loss=0.2415, simple_loss=0.3247, pruned_loss=0.07918, over 4759.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3141, pruned_loss=0.0765, over 938534.36 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:35:23,050 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 22:35:35,849 INFO [train.py:1146] (2/4) Epoch 6, validation: loss=0.194, simple_loss=0.2973, pruned_loss=0.04533, over 944034.00 frames. +2024-07-27 22:35:35,849 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 22:35:38,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=72150.66666666667, ans=0.125 +2024-07-27 22:35:39,297 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.24 vs. limit=15.0 +2024-07-27 22:35:42,597 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.80 vs. limit=6.0 +2024-07-27 22:35:46,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=72164.0, ans=0.125 +2024-07-27 22:35:49,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72177.33333333333, ans=0.1 +2024-07-27 22:35:53,040 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.659e+01 5.896e+01 6.493e+01 7.360e+01 1.026e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-27 22:36:02,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=72190.66666666667, ans=0.125 +2024-07-27 22:36:13,118 INFO [train.py:1114] (2/4) Epoch 6, batch 3050, loss[loss=0.22, simple_loss=0.2843, pruned_loss=0.07787, over 4649.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3141, pruned_loss=0.07673, over 937150.52 frames. ], batch size: 12, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:36:22,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=72230.66666666667, ans=0.035 +2024-07-27 22:36:33,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=72257.33333333333, ans=0.125 +2024-07-27 22:36:35,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=72257.33333333333, ans=0.125 +2024-07-27 22:36:39,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=72257.33333333333, ans=0.07 +2024-07-27 22:36:46,922 INFO [train.py:1114] (2/4) Epoch 6, batch 3100, loss[loss=0.2347, simple_loss=0.3162, pruned_loss=0.07661, over 4634.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3148, pruned_loss=0.07736, over 937984.71 frames. ], batch size: 16, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:36:48,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=72284.0, ans=0.2 +2024-07-27 22:36:51,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=72284.0, ans=0.1 +2024-07-27 22:36:53,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=72297.33333333333, ans=0.0 +2024-07-27 22:36:55,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=72297.33333333333, ans=0.5 +2024-07-27 22:36:57,819 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.53 vs. limit=6.0 +2024-07-27 22:36:58,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=72297.33333333333, ans=0.125 +2024-07-27 22:37:03,368 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.197e+01 6.089e+01 6.786e+01 8.344e+01 1.227e+02, threshold=1.357e+02, percent-clipped=0.0 +2024-07-27 22:37:07,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=72324.0, ans=15.0 +2024-07-27 22:37:18,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=72337.33333333333, ans=0.07 +2024-07-27 22:37:19,960 INFO [train.py:1114] (2/4) Epoch 6, batch 3150, loss[loss=0.2722, simple_loss=0.3458, pruned_loss=0.09929, over 4620.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3141, pruned_loss=0.07708, over 938229.35 frames. ], batch size: 17, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:37:24,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=72350.66666666667, ans=0.0 +2024-07-27 22:37:28,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=72364.0, ans=0.125 +2024-07-27 22:37:50,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten.whitening_limit, batch_count=72404.0, ans=15.0 +2024-07-27 22:37:55,035 INFO [train.py:1114] (2/4) Epoch 6, batch 3200, loss[loss=0.2364, simple_loss=0.3235, pruned_loss=0.07458, over 4829.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3129, pruned_loss=0.07627, over 939688.44 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:38:04,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=72430.66666666667, ans=0.0 +2024-07-27 22:38:07,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=72444.0, ans=0.125 +2024-07-27 22:38:07,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=72444.0, ans=0.025 +2024-07-27 22:38:11,653 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.049e+01 5.899e+01 6.448e+01 7.393e+01 1.095e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-27 22:38:28,284 INFO [train.py:1114] (2/4) Epoch 6, batch 3250, loss[loss=0.2707, simple_loss=0.3428, pruned_loss=0.09932, over 4936.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3126, pruned_loss=0.07628, over 940440.07 frames. ], batch size: 14, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:38:32,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=72484.0, ans=0.0 +2024-07-27 22:39:00,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=72537.33333333333, ans=0.125 +2024-07-27 22:39:01,898 INFO [train.py:1114] (2/4) Epoch 6, batch 3300, loss[loss=0.234, simple_loss=0.3093, pruned_loss=0.07936, over 4724.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3114, pruned_loss=0.07585, over 940636.43 frames. ], batch size: 19, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:39:05,766 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.24 vs. limit=22.5 +2024-07-27 22:39:11,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=72564.0, ans=0.125 +2024-07-27 22:39:20,239 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.031e+01 6.037e+01 6.381e+01 7.466e+01 1.307e+02, threshold=1.276e+02, percent-clipped=1.0 +2024-07-27 22:39:23,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=72590.66666666667, ans=0.125 +2024-07-27 22:39:25,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=72590.66666666667, ans=0.0 +2024-07-27 22:39:36,897 INFO [train.py:1114] (2/4) Epoch 6, batch 3350, loss[loss=0.2983, simple_loss=0.369, pruned_loss=0.1138, over 4601.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3133, pruned_loss=0.07735, over 938475.86 frames. ], batch size: 17, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:39:49,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=72644.0, ans=0.025 +2024-07-27 22:40:01,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=72657.33333333333, ans=0.05 +2024-07-27 22:40:01,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72657.33333333333, ans=0.1 +2024-07-27 22:40:12,004 INFO [train.py:1114] (2/4) Epoch 6, batch 3400, loss[loss=0.1934, simple_loss=0.2789, pruned_loss=0.05399, over 4812.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3133, pruned_loss=0.07748, over 936973.94 frames. ], batch size: 11, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:40:13,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=72684.0, ans=0.1 +2024-07-27 22:40:17,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72684.0, ans=0.1 +2024-07-27 22:40:17,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=72684.0, ans=0.0 +2024-07-27 22:40:18,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=72697.33333333333, ans=0.0 +2024-07-27 22:40:25,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=72710.66666666667, ans=0.1 +2024-07-27 22:40:28,632 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.932e+01 5.919e+01 6.608e+01 7.688e+01 1.157e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-27 22:40:30,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=72710.66666666667, ans=0.0 +2024-07-27 22:40:38,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=72737.33333333333, ans=0.0 +2024-07-27 22:40:45,207 INFO [train.py:1114] (2/4) Epoch 6, batch 3450, loss[loss=0.2906, simple_loss=0.3671, pruned_loss=0.1071, over 4708.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3135, pruned_loss=0.07721, over 936952.31 frames. ], batch size: 19, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:40:48,311 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.94 vs. limit=12.0 +2024-07-27 22:40:51,562 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.16 vs. limit=15.0 +2024-07-27 22:40:55,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=72764.0, ans=0.0 +2024-07-27 22:41:01,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=72777.33333333333, ans=0.2 +2024-07-27 22:41:08,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=72790.66666666667, ans=0.0 +2024-07-27 22:41:09,157 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.28 vs. limit=22.5 +2024-07-27 22:41:18,868 INFO [train.py:1114] (2/4) Epoch 6, batch 3500, loss[loss=0.2289, simple_loss=0.3028, pruned_loss=0.07753, over 4928.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3127, pruned_loss=0.07685, over 937499.08 frames. ], batch size: 12, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:41:23,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=72817.33333333333, ans=0.125 +2024-07-27 22:41:33,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=72844.0, ans=0.125 +2024-07-27 22:41:36,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=72844.0, ans=0.125 +2024-07-27 22:41:37,344 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.111e+01 6.112e+01 6.695e+01 8.214e+01 1.239e+02, threshold=1.339e+02, percent-clipped=0.0 +2024-07-27 22:41:37,530 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=5.775e-02 +2024-07-27 22:41:51,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=72870.66666666667, ans=0.05 +2024-07-27 22:41:52,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=72870.66666666667, ans=0.125 +2024-07-27 22:41:53,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=72884.0, ans=0.125 +2024-07-27 22:41:58,733 INFO [train.py:1114] (2/4) Epoch 6, batch 3550, loss[loss=0.2277, simple_loss=0.309, pruned_loss=0.07321, over 4663.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3136, pruned_loss=0.07705, over 937798.17 frames. ], batch size: 14, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:42:01,832 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.19 vs. limit=10.0 +2024-07-27 22:42:04,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=72884.0, ans=0.015 +2024-07-27 22:42:07,557 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.20 vs. limit=12.0 +2024-07-27 22:42:09,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=72897.33333333333, ans=0.125 +2024-07-27 22:42:11,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=72910.66666666667, ans=0.2 +2024-07-27 22:42:11,467 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.32 vs. limit=22.5 +2024-07-27 22:42:24,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=72937.33333333333, ans=0.05 +2024-07-27 22:42:31,752 INFO [train.py:1114] (2/4) Epoch 6, batch 3600, loss[loss=0.2668, simple_loss=0.3371, pruned_loss=0.09822, over 4965.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3123, pruned_loss=0.07631, over 939630.97 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:42:34,071 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.11 vs. limit=15.0 +2024-07-27 22:42:48,883 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.822e+01 6.148e+01 6.891e+01 7.768e+01 1.144e+02, threshold=1.378e+02, percent-clipped=0.0 +2024-07-27 22:42:56,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=72990.66666666667, ans=0.125 +2024-07-27 22:43:06,021 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.19 vs. limit=15.0 +2024-07-27 22:43:06,986 INFO [train.py:1114] (2/4) Epoch 6, batch 3650, loss[loss=0.261, simple_loss=0.3234, pruned_loss=0.09931, over 4905.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3117, pruned_loss=0.07604, over 940139.75 frames. ], batch size: 15, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:43:09,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=73017.33333333333, ans=0.125 +2024-07-27 22:43:11,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=73017.33333333333, ans=0.1 +2024-07-27 22:43:23,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=73044.0, ans=0.125 +2024-07-27 22:43:24,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_na.min_abs, batch_count=73044.0, ans=0.02 +2024-07-27 22:43:25,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=73044.0, ans=0.2 +2024-07-27 22:43:32,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=73057.33333333333, ans=15.0 +2024-07-27 22:43:40,164 INFO [train.py:1114] (2/4) Epoch 6, batch 3700, loss[loss=0.2432, simple_loss=0.3271, pruned_loss=0.07958, over 4927.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3117, pruned_loss=0.07605, over 941293.39 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:43:51,021 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.67 vs. limit=15.0 +2024-07-27 22:43:55,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=73110.66666666667, ans=0.2 +2024-07-27 22:43:56,980 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.456e+01 6.084e+01 6.656e+01 7.917e+01 1.226e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-27 22:44:03,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=73124.0, ans=0.025 +2024-07-27 22:44:12,702 INFO [train.py:1114] (2/4) Epoch 6, batch 3750, loss[loss=0.1917, simple_loss=0.2771, pruned_loss=0.05315, over 4802.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.3111, pruned_loss=0.07583, over 942666.45 frames. ], batch size: 11, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:44:15,720 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.93 vs. limit=6.0 +2024-07-27 22:44:24,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=73164.0, ans=0.125 +2024-07-27 22:44:28,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=73177.33333333333, ans=0.025 +2024-07-27 22:44:30,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=73177.33333333333, ans=0.1 +2024-07-27 22:44:37,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=73190.66666666667, ans=0.0 +2024-07-27 22:44:46,114 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.83 vs. limit=6.0 +2024-07-27 22:44:47,708 INFO [train.py:1114] (2/4) Epoch 6, batch 3800, loss[loss=0.2451, simple_loss=0.3207, pruned_loss=0.08473, over 4813.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3115, pruned_loss=0.07691, over 941137.50 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:44:54,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=73230.66666666667, ans=0.025 +2024-07-27 22:45:00,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=73244.0, ans=0.125 +2024-07-27 22:45:02,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73244.0, ans=0.1 +2024-07-27 22:45:04,714 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.923e+01 6.031e+01 6.654e+01 7.619e+01 1.236e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-27 22:45:18,041 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.57 vs. limit=12.0 +2024-07-27 22:45:20,637 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.44 vs. limit=12.0 +2024-07-27 22:45:20,973 INFO [train.py:1114] (2/4) Epoch 6, batch 3850, loss[loss=0.2493, simple_loss=0.3434, pruned_loss=0.07764, over 4657.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3108, pruned_loss=0.07629, over 941767.61 frames. ], batch size: 16, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:45:22,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=73284.0, ans=0.1 +2024-07-27 22:45:45,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73324.0, ans=0.1 +2024-07-27 22:45:51,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=73337.33333333333, ans=0.025 +2024-07-27 22:45:52,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=73337.33333333333, ans=0.0 +2024-07-27 22:45:56,552 INFO [train.py:1114] (2/4) Epoch 6, batch 3900, loss[loss=0.2734, simple_loss=0.3425, pruned_loss=0.1021, over 4820.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3119, pruned_loss=0.07653, over 942104.67 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:46:00,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=73350.66666666667, ans=0.2 +2024-07-27 22:46:03,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=73364.0, ans=0.95 +2024-07-27 22:46:04,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=73364.0, ans=0.125 +2024-07-27 22:46:13,236 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.00 vs. limit=15.0 +2024-07-27 22:46:13,361 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.979e+01 6.121e+01 6.587e+01 7.635e+01 1.146e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-27 22:46:21,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=73390.66666666667, ans=0.125 +2024-07-27 22:46:29,896 INFO [train.py:1114] (2/4) Epoch 6, batch 3950, loss[loss=0.2822, simple_loss=0.3547, pruned_loss=0.1049, over 4836.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3122, pruned_loss=0.07685, over 944103.34 frames. ], batch size: 16, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:46:35,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=73430.66666666667, ans=0.125 +2024-07-27 22:46:39,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=73430.66666666667, ans=0.125 +2024-07-27 22:46:45,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=73444.0, ans=0.0 +2024-07-27 22:46:49,511 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.79 vs. limit=22.5 +2024-07-27 22:46:51,783 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=20.61 vs. limit=22.5 +2024-07-27 22:47:05,404 INFO [train.py:1114] (2/4) Epoch 6, batch 4000, loss[loss=0.1855, simple_loss=0.2665, pruned_loss=0.05227, over 4773.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3127, pruned_loss=0.07747, over 940591.10 frames. ], batch size: 12, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:47:10,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=73484.0, ans=0.125 +2024-07-27 22:47:22,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=73510.66666666667, ans=0.125 +2024-07-27 22:47:22,997 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.882e+01 6.236e+01 6.803e+01 7.982e+01 1.360e+02, threshold=1.361e+02, percent-clipped=1.0 +2024-07-27 22:47:27,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=73524.0, ans=0.2 +2024-07-27 22:47:39,834 INFO [train.py:1114] (2/4) Epoch 6, batch 4050, loss[loss=0.3362, simple_loss=0.372, pruned_loss=0.1502, over 3216.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3129, pruned_loss=0.07776, over 939493.71 frames. ], batch size: 35, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:47:40,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=73550.66666666667, ans=0.0 +2024-07-27 22:47:48,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=73564.0, ans=0.125 +2024-07-27 22:47:53,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=73577.33333333333, ans=0.125 +2024-07-27 22:48:11,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=73604.0, ans=0.1 +2024-07-27 22:48:14,293 INFO [train.py:1114] (2/4) Epoch 6, batch 4100, loss[loss=0.2572, simple_loss=0.3429, pruned_loss=0.08582, over 4896.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3135, pruned_loss=0.07788, over 939144.14 frames. ], batch size: 15, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:48:14,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=73617.33333333333, ans=0.0 +2024-07-27 22:48:18,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=73617.33333333333, ans=0.125 +2024-07-27 22:48:25,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=73630.66666666667, ans=10.0 +2024-07-27 22:48:25,368 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.93 vs. limit=15.0 +2024-07-27 22:48:31,606 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.808e+01 6.159e+01 6.782e+01 8.525e+01 1.477e+02, threshold=1.356e+02, percent-clipped=2.0 +2024-07-27 22:48:45,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=73670.66666666667, ans=0.2 +2024-07-27 22:48:46,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=73670.66666666667, ans=0.0 +2024-07-27 22:48:49,548 INFO [train.py:1114] (2/4) Epoch 6, batch 4150, loss[loss=0.2313, simple_loss=0.3114, pruned_loss=0.07557, over 4837.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.312, pruned_loss=0.07727, over 938719.15 frames. ], batch size: 13, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:49:02,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73710.66666666667, ans=0.1 +2024-07-27 22:49:10,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=73724.0, ans=0.125 +2024-07-27 22:49:14,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=73724.0, ans=0.025 +2024-07-27 22:49:23,810 INFO [train.py:1114] (2/4) Epoch 6, batch 4200, loss[loss=0.2255, simple_loss=0.3051, pruned_loss=0.07297, over 4900.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3121, pruned_loss=0.07662, over 940110.79 frames. ], batch size: 15, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:49:25,569 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.81 vs. limit=10.0 +2024-07-27 22:49:27,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=73750.66666666667, ans=0.125 +2024-07-27 22:49:40,729 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.699e+01 5.760e+01 6.554e+01 7.096e+01 1.149e+02, threshold=1.311e+02, percent-clipped=0.0 +2024-07-27 22:49:44,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=73790.66666666667, ans=0.125 +2024-07-27 22:49:57,298 INFO [train.py:1114] (2/4) Epoch 6, batch 4250, loss[loss=0.2411, simple_loss=0.3288, pruned_loss=0.07672, over 4629.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.314, pruned_loss=0.07737, over 941016.43 frames. ], batch size: 12, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:50:12,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=73844.0, ans=0.0 +2024-07-27 22:50:27,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=73870.66666666667, ans=0.125 +2024-07-27 22:50:32,519 INFO [train.py:1114] (2/4) Epoch 6, batch 4300, loss[loss=0.2145, simple_loss=0.298, pruned_loss=0.06545, over 4754.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3138, pruned_loss=0.0771, over 939906.37 frames. ], batch size: 13, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:50:33,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=73884.0, ans=0.0 +2024-07-27 22:50:47,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=73910.66666666667, ans=0.125 +2024-07-27 22:50:49,607 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.043e+01 6.051e+01 7.094e+01 8.613e+01 1.493e+02, threshold=1.419e+02, percent-clipped=5.0 +2024-07-27 22:50:51,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=73910.66666666667, ans=0.04949747468305833 +2024-07-27 22:51:08,575 INFO [train.py:1114] (2/4) Epoch 6, batch 4350, loss[loss=0.2217, simple_loss=0.3095, pruned_loss=0.06694, over 4761.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3123, pruned_loss=0.07637, over 940961.59 frames. ], batch size: 13, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:51:12,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=73950.66666666667, ans=0.1 +2024-07-27 22:51:18,960 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.80 vs. limit=22.5 +2024-07-27 22:51:23,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=73977.33333333333, ans=0.125 +2024-07-27 22:51:34,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=73990.66666666667, ans=0.125 +2024-07-27 22:51:42,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=74017.33333333333, ans=0.0 +2024-07-27 22:51:43,388 INFO [train.py:1114] (2/4) Epoch 6, batch 4400, loss[loss=0.2244, simple_loss=0.3187, pruned_loss=0.0651, over 4805.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3138, pruned_loss=0.07697, over 940826.23 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:51:51,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=74030.66666666667, ans=0.025 +2024-07-27 22:51:52,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.17 vs. limit=10.0 +2024-07-27 22:51:52,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74030.66666666667, ans=0.1 +2024-07-27 22:52:00,870 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.058e+01 5.949e+01 6.629e+01 7.705e+01 1.284e+02, threshold=1.326e+02, percent-clipped=0.0 +2024-07-27 22:52:06,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=74057.33333333333, ans=0.0 +2024-07-27 22:52:14,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=74070.66666666667, ans=0.0 +2024-07-27 22:52:17,026 INFO [train.py:1114] (2/4) Epoch 6, batch 4450, loss[loss=0.2344, simple_loss=0.3068, pruned_loss=0.08096, over 4938.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3146, pruned_loss=0.07803, over 939080.20 frames. ], batch size: 12, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:52:23,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=74097.33333333333, ans=0.05 +2024-07-27 22:52:44,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=74137.33333333333, ans=0.1 +2024-07-27 22:52:49,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=74137.33333333333, ans=0.125 +2024-07-27 22:52:51,884 INFO [train.py:1114] (2/4) Epoch 6, batch 4500, loss[loss=0.213, simple_loss=0.3075, pruned_loss=0.05928, over 4739.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3137, pruned_loss=0.07695, over 938076.78 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:52:55,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=74150.66666666667, ans=0.125 +2024-07-27 22:53:08,907 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.835e+01 5.811e+01 6.353e+01 6.989e+01 9.336e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-27 22:53:16,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=74190.66666666667, ans=0.2 +2024-07-27 22:53:18,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=74204.0, ans=0.0 +2024-07-27 22:53:19,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=74204.0, ans=0.0 +2024-07-27 22:53:24,768 INFO [train.py:1114] (2/4) Epoch 6, batch 4550, loss[loss=0.2487, simple_loss=0.3222, pruned_loss=0.08763, over 4881.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.313, pruned_loss=0.0763, over 939937.96 frames. ], batch size: 13, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:53:25,098 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.12 vs. limit=15.0 +2024-07-27 22:53:31,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=74230.66666666667, ans=0.2 +2024-07-27 22:53:43,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74244.0, ans=0.1 +2024-07-27 22:53:51,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=74270.66666666667, ans=0.125 +2024-07-27 22:53:51,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=74270.66666666667, ans=0.025 +2024-07-27 22:53:55,858 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.70 vs. limit=15.0 +2024-07-27 22:53:56,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=74270.66666666667, ans=0.125 +2024-07-27 22:53:58,237 INFO [train.py:1114] (2/4) Epoch 6, batch 4600, loss[loss=0.2166, simple_loss=0.2988, pruned_loss=0.06724, over 4438.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3133, pruned_loss=0.07691, over 938544.82 frames. ], batch size: 21, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:53:59,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=74284.0, ans=0.2 +2024-07-27 22:54:05,585 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-27 22:54:08,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=74297.33333333333, ans=0.125 +2024-07-27 22:54:12,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=74297.33333333333, ans=0.125 +2024-07-27 22:54:14,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=74310.66666666667, ans=0.125 +2024-07-27 22:54:17,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=74310.66666666667, ans=0.125 +2024-07-27 22:54:18,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.07 vs. limit=12.0 +2024-07-27 22:54:18,334 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.798e+01 6.152e+01 6.770e+01 8.392e+01 1.380e+02, threshold=1.354e+02, percent-clipped=1.0 +2024-07-27 22:54:19,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=74310.66666666667, ans=0.025 +2024-07-27 22:54:19,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=74310.66666666667, ans=0.125 +2024-07-27 22:54:19,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=74310.66666666667, ans=0.0 +2024-07-27 22:54:23,177 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.60 vs. limit=15.0 +2024-07-27 22:54:26,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=74337.33333333333, ans=0.0 +2024-07-27 22:54:30,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=74337.33333333333, ans=0.025 +2024-07-27 22:54:34,076 INFO [train.py:1114] (2/4) Epoch 6, batch 4650, loss[loss=0.2623, simple_loss=0.346, pruned_loss=0.08927, over 4840.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3138, pruned_loss=0.07658, over 940437.30 frames. ], batch size: 16, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:54:39,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=74350.66666666667, ans=0.0 +2024-07-27 22:54:40,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=74364.0, ans=0.125 +2024-07-27 22:55:17,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=74404.0, ans=0.125 +2024-07-27 22:55:38,797 INFO [train.py:1114] (2/4) Epoch 6, batch 4700, loss[loss=0.2009, simple_loss=0.2739, pruned_loss=0.06394, over 4710.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3137, pruned_loss=0.07701, over 938121.88 frames. ], batch size: 11, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:55:39,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=74417.33333333333, ans=0.0 +2024-07-27 22:55:41,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=74417.33333333333, ans=15.0 +2024-07-27 22:55:42,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=74417.33333333333, ans=0.0 +2024-07-27 22:55:54,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=74444.0, ans=0.125 +2024-07-27 22:55:57,188 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.339e+01 6.070e+01 6.903e+01 7.937e+01 1.102e+02, threshold=1.381e+02, percent-clipped=0.0 +2024-07-27 22:57:01,147 INFO [train.py:1114] (2/4) Epoch 6, batch 4750, loss[loss=0.2552, simple_loss=0.3338, pruned_loss=0.08828, over 4540.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.315, pruned_loss=0.07824, over 936793.08 frames. ], batch size: 21, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:57:05,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=74484.0, ans=0.125 +2024-07-27 22:57:07,666 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.99 vs. limit=15.0 +2024-07-27 22:57:13,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=74497.33333333333, ans=0.125 +2024-07-27 22:57:48,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=74524.0, ans=0.0 +2024-07-27 22:57:53,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=74537.33333333333, ans=0.0 +2024-07-27 22:58:00,109 INFO [train.py:1114] (2/4) Epoch 6, batch 4800, loss[loss=0.2269, simple_loss=0.3022, pruned_loss=0.07574, over 4691.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3141, pruned_loss=0.07811, over 933597.77 frames. ], batch size: 13, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:58:01,905 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.06 vs. limit=15.0 +2024-07-27 22:58:10,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=74564.0, ans=0.035 +2024-07-27 22:58:10,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=74564.0, ans=0.1 +2024-07-27 22:58:14,179 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.44 vs. limit=15.0 +2024-07-27 22:58:22,634 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:58:27,450 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.139e+01 5.953e+01 6.774e+01 8.357e+01 1.268e+02, threshold=1.355e+02, percent-clipped=0.0 +2024-07-27 22:58:29,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=74577.33333333333, ans=0.0 +2024-07-27 22:58:29,264 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.45 vs. limit=22.5 +2024-07-27 22:58:32,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=74590.66666666667, ans=0.2 +2024-07-27 22:58:42,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=74604.0, ans=0.05 +2024-07-27 22:58:43,663 INFO [train.py:1114] (2/4) Epoch 6, batch 4850, loss[loss=0.2932, simple_loss=0.3795, pruned_loss=0.1034, over 4744.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3139, pruned_loss=0.07794, over 932579.65 frames. ], batch size: 14, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:58:44,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74617.33333333333, ans=0.1 +2024-07-27 22:58:47,250 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.92 vs. limit=15.0 +2024-07-27 22:58:53,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=74630.66666666667, ans=0.125 +2024-07-27 22:58:54,881 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=24.14 vs. limit=15.0 +2024-07-27 22:59:07,332 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:59:29,830 INFO [train.py:1114] (2/4) Epoch 6, batch 4900, loss[loss=0.2, simple_loss=0.2975, pruned_loss=0.05129, over 4756.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3137, pruned_loss=0.07739, over 934137.65 frames. ], batch size: 13, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:59:36,984 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.90 vs. limit=15.0 +2024-07-27 22:59:47,084 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.53 vs. limit=12.0 +2024-07-27 22:59:48,781 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.911e+01 6.095e+01 6.974e+01 8.315e+01 1.441e+02, threshold=1.395e+02, percent-clipped=3.0 +2024-07-27 23:00:01,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=74737.33333333333, ans=0.0 +2024-07-27 23:00:07,422 INFO [train.py:1114] (2/4) Epoch 6, batch 4950, loss[loss=0.3507, simple_loss=0.3845, pruned_loss=0.1584, over 3397.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3154, pruned_loss=0.07862, over 931567.59 frames. ], batch size: 35, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:00:12,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=74750.66666666667, ans=0.2 +2024-07-27 23:00:15,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74764.0, ans=0.1 +2024-07-27 23:00:47,383 INFO [train.py:1114] (2/4) Epoch 6, batch 5000, loss[loss=0.2403, simple_loss=0.3363, pruned_loss=0.07211, over 4659.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.313, pruned_loss=0.077, over 935654.09 frames. ], batch size: 14, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:00:50,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=74817.33333333333, ans=0.125 +2024-07-27 23:00:50,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=74817.33333333333, ans=0.125 +2024-07-27 23:00:50,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=74817.33333333333, ans=0.0 +2024-07-27 23:00:52,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=74817.33333333333, ans=0.1 +2024-07-27 23:00:56,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=74830.66666666667, ans=0.125 +2024-07-27 23:01:05,416 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.034e+01 6.345e+01 7.582e+01 9.212e+01 1.315e+02, threshold=1.516e+02, percent-clipped=0.0 +2024-07-27 23:01:12,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=74857.33333333333, ans=0.0 +2024-07-27 23:01:24,731 INFO [train.py:1114] (2/4) Epoch 6, batch 5050, loss[loss=0.1916, simple_loss=0.2678, pruned_loss=0.05775, over 4864.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3118, pruned_loss=0.07635, over 938081.96 frames. ], batch size: 12, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:01:30,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74884.0, ans=0.1 +2024-07-27 23:01:54,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74937.33333333333, ans=0.1 +2024-07-27 23:01:57,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74937.33333333333, ans=0.1 +2024-07-27 23:02:00,492 INFO [train.py:1114] (2/4) Epoch 6, batch 5100, loss[loss=0.226, simple_loss=0.3081, pruned_loss=0.07199, over 4774.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3124, pruned_loss=0.07698, over 935219.55 frames. ], batch size: 12, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:02:02,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=74950.66666666667, ans=0.125 +2024-07-27 23:02:10,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=74950.66666666667, ans=0.125 +2024-07-27 23:02:23,234 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.910e+01 5.981e+01 6.894e+01 7.665e+01 1.178e+02, threshold=1.379e+02, percent-clipped=0.0 +2024-07-27 23:02:27,775 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.43 vs. limit=15.0 +2024-07-27 23:02:33,845 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.36 vs. limit=22.5 +2024-07-27 23:02:38,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=75004.0, ans=0.125 +2024-07-27 23:02:39,329 INFO [train.py:1114] (2/4) Epoch 6, batch 5150, loss[loss=0.2449, simple_loss=0.3267, pruned_loss=0.0815, over 4865.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3135, pruned_loss=0.0773, over 936355.20 frames. ], batch size: 16, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:02:48,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=75030.66666666667, ans=0.0 +2024-07-27 23:02:50,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=75030.66666666667, ans=0.07 +2024-07-27 23:02:54,574 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.79 vs. limit=22.5 +2024-07-27 23:02:54,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=75044.0, ans=0.125 +2024-07-27 23:02:57,042 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.19 vs. limit=12.0 +2024-07-27 23:02:58,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=75044.0, ans=0.125 +2024-07-27 23:03:12,936 INFO [train.py:1114] (2/4) Epoch 6, batch 5200, loss[loss=0.2047, simple_loss=0.3053, pruned_loss=0.05201, over 4664.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3135, pruned_loss=0.07695, over 936165.18 frames. ], batch size: 14, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:03:13,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=75084.0, ans=0.0 +2024-07-27 23:03:19,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=75097.33333333333, ans=0.125 +2024-07-27 23:03:23,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=75097.33333333333, ans=0.0 +2024-07-27 23:03:26,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=75097.33333333333, ans=0.04949747468305833 +2024-07-27 23:03:32,290 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.98 vs. limit=12.0 +2024-07-27 23:03:32,616 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 6.053e+01 6.671e+01 7.847e+01 1.456e+02, threshold=1.334e+02, percent-clipped=1.0 +2024-07-27 23:03:38,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75124.0, ans=0.1 +2024-07-27 23:03:39,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=75124.0, ans=0.0 +2024-07-27 23:03:41,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=75137.33333333333, ans=0.125 +2024-07-27 23:03:44,091 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.86 vs. limit=22.5 +2024-07-27 23:03:48,770 INFO [train.py:1114] (2/4) Epoch 6, batch 5250, loss[loss=0.2438, simple_loss=0.316, pruned_loss=0.08576, over 4891.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3122, pruned_loss=0.0763, over 935818.43 frames. ], batch size: 13, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:03:57,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=75164.0, ans=0.1 +2024-07-27 23:03:58,623 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=11.39 vs. limit=10.0 +2024-07-27 23:04:08,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75190.66666666667, ans=0.1 +2024-07-27 23:04:09,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=75190.66666666667, ans=0.05 +2024-07-27 23:04:09,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=75190.66666666667, ans=0.0 +2024-07-27 23:04:11,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=75190.66666666667, ans=0.0 +2024-07-27 23:04:17,438 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.41 vs. limit=15.0 +2024-07-27 23:04:24,673 INFO [train.py:1114] (2/4) Epoch 6, batch 5300, loss[loss=0.2658, simple_loss=0.3372, pruned_loss=0.09724, over 4617.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3121, pruned_loss=0.07667, over 933879.98 frames. ], batch size: 16, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:04:29,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=75217.33333333333, ans=0.025 +2024-07-27 23:04:37,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=75244.0, ans=0.0 +2024-07-27 23:04:41,942 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.955e+01 6.651e+01 7.573e+01 1.282e+02, threshold=1.330e+02, percent-clipped=0.0 +2024-07-27 23:04:43,044 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.49 vs. limit=22.5 +2024-07-27 23:04:44,366 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.40 vs. limit=15.0 +2024-07-27 23:04:44,514 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=19.54 vs. limit=22.5 +2024-07-27 23:04:53,492 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.49 vs. limit=6.0 +2024-07-27 23:04:57,836 INFO [train.py:1114] (2/4) Epoch 6, batch 5350, loss[loss=0.2526, simple_loss=0.3295, pruned_loss=0.08782, over 4527.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3136, pruned_loss=0.07718, over 936073.14 frames. ], batch size: 10, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:05:06,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=75297.33333333333, ans=0.035 +2024-07-27 23:05:12,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=75310.66666666667, ans=0.125 +2024-07-27 23:05:14,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=75310.66666666667, ans=0.0 +2024-07-27 23:05:33,160 INFO [train.py:1114] (2/4) Epoch 6, batch 5400, loss[loss=0.27, simple_loss=0.332, pruned_loss=0.104, over 4263.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3141, pruned_loss=0.07775, over 930428.83 frames. ], batch size: 25, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:05:45,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=75377.33333333333, ans=0.2 +2024-07-27 23:05:50,264 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.680e+01 5.962e+01 6.573e+01 7.607e+01 1.590e+02, threshold=1.315e+02, percent-clipped=1.0 +2024-07-27 23:05:52,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=75377.33333333333, ans=15.0 +2024-07-27 23:05:52,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=75390.66666666667, ans=0.0 +2024-07-27 23:05:55,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=75390.66666666667, ans=0.0 +2024-07-27 23:05:55,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=75390.66666666667, ans=0.125 +2024-07-27 23:05:57,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=75390.66666666667, ans=0.125 +2024-07-27 23:05:58,507 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:05:59,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=75404.0, ans=0.125 +2024-07-27 23:06:08,298 INFO [train.py:1114] (2/4) Epoch 6, batch 5450, loss[loss=0.2241, simple_loss=0.297, pruned_loss=0.07558, over 4698.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3132, pruned_loss=0.07757, over 933202.48 frames. ], batch size: 11, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:06:14,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=75430.66666666667, ans=0.07 +2024-07-27 23:06:18,692 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.99 vs. limit=6.0 +2024-07-27 23:06:31,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=75457.33333333333, ans=0.125 +2024-07-27 23:06:37,257 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.75 vs. limit=10.0 +2024-07-27 23:06:39,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=75470.66666666667, ans=0.0 +2024-07-27 23:06:42,303 INFO [train.py:1114] (2/4) Epoch 6, batch 5500, loss[loss=0.2354, simple_loss=0.3144, pruned_loss=0.07821, over 4175.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3111, pruned_loss=0.07616, over 930576.42 frames. ], batch size: 25, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:06:45,568 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.84 vs. limit=15.0 +2024-07-27 23:06:45,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=75484.0, ans=0.0 +2024-07-27 23:06:54,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75497.33333333333, ans=0.1 +2024-07-27 23:06:54,709 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.87 vs. limit=12.0 +2024-07-27 23:06:59,681 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.538e+01 6.092e+01 6.682e+01 7.913e+01 1.212e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-27 23:07:07,177 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:07:21,734 INFO [train.py:1114] (2/4) Epoch 6, batch 5550, loss[loss=0.1779, simple_loss=0.2725, pruned_loss=0.04168, over 4710.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3113, pruned_loss=0.07661, over 933015.32 frames. ], batch size: 12, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:07:30,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=75564.0, ans=0.0 +2024-07-27 23:07:33,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75564.0, ans=0.1 +2024-07-27 23:07:35,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=75577.33333333333, ans=0.0 +2024-07-27 23:07:39,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=75577.33333333333, ans=0.125 +2024-07-27 23:07:41,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=75590.66666666667, ans=0.07 +2024-07-27 23:07:43,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=75590.66666666667, ans=0.025 +2024-07-27 23:07:44,554 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.36 vs. limit=15.0 +2024-07-27 23:07:46,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=75590.66666666667, ans=0.0 +2024-07-27 23:07:49,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=75604.0, ans=0.0 +2024-07-27 23:07:54,736 INFO [train.py:1114] (2/4) Epoch 6, batch 5600, loss[loss=0.2371, simple_loss=0.3166, pruned_loss=0.07885, over 4736.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3117, pruned_loss=0.07589, over 934340.23 frames. ], batch size: 14, lr: 1.23e-02, grad_scale: 64.0 +2024-07-27 23:07:57,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=75617.33333333333, ans=0.1 +2024-07-27 23:08:03,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=75617.33333333333, ans=0.0 +2024-07-27 23:08:07,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=75630.66666666667, ans=0.125 +2024-07-27 23:08:09,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=75630.66666666667, ans=0.125 +2024-07-27 23:08:11,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=75630.66666666667, ans=0.125 +2024-07-27 23:08:16,465 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.881e+01 6.141e+01 6.729e+01 7.455e+01 1.025e+02, threshold=1.346e+02, percent-clipped=0.0 +2024-07-27 23:08:27,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=75670.66666666667, ans=0.0 +2024-07-27 23:08:28,875 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.04 vs. limit=15.0 +2024-07-27 23:08:32,404 INFO [train.py:1114] (2/4) Epoch 6, batch 5650, loss[loss=0.2681, simple_loss=0.3541, pruned_loss=0.09105, over 4574.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3126, pruned_loss=0.07646, over 937055.39 frames. ], batch size: 21, lr: 1.23e-02, grad_scale: 64.0 +2024-07-27 23:08:35,756 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.95 vs. limit=10.0 +2024-07-27 23:08:43,917 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.77 vs. limit=12.0 +2024-07-27 23:08:51,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75710.66666666667, ans=0.1 +2024-07-27 23:08:51,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=75710.66666666667, ans=0.125 +2024-07-27 23:09:03,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=75737.33333333333, ans=0.125 +2024-07-27 23:09:08,124 INFO [train.py:1114] (2/4) Epoch 6, batch 5700, loss[loss=0.2231, simple_loss=0.3039, pruned_loss=0.07111, over 4691.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3121, pruned_loss=0.07624, over 938007.06 frames. ], batch size: 13, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:09:20,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=75764.0, ans=0.0 +2024-07-27 23:09:26,481 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.116e+01 6.584e+01 7.686e+01 8.929e+01 1.310e+02, threshold=1.537e+02, percent-clipped=0.0 +2024-07-27 23:09:26,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=75777.33333333333, ans=0.0 +2024-07-27 23:09:30,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=75790.66666666667, ans=0.0 +2024-07-27 23:09:41,554 INFO [train.py:1114] (2/4) Epoch 6, batch 5750, loss[loss=0.264, simple_loss=0.3453, pruned_loss=0.0914, over 4678.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3127, pruned_loss=0.07659, over 937619.39 frames. ], batch size: 19, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:09:41,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=75817.33333333333, ans=0.2 +2024-07-27 23:09:57,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=75844.0, ans=0.125 +2024-07-27 23:09:58,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=75844.0, ans=0.125 +2024-07-27 23:09:58,915 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.13 vs. limit=15.0 +2024-07-27 23:09:59,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=75844.0, ans=0.125 +2024-07-27 23:10:14,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=75870.66666666667, ans=0.0 +2024-07-27 23:10:16,864 INFO [train.py:1114] (2/4) Epoch 6, batch 5800, loss[loss=0.2317, simple_loss=0.2988, pruned_loss=0.08234, over 4660.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3125, pruned_loss=0.07677, over 936861.95 frames. ], batch size: 19, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:10:17,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75884.0, ans=0.1 +2024-07-27 23:10:34,697 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.114e+01 6.081e+01 6.996e+01 7.790e+01 1.543e+02, threshold=1.399e+02, percent-clipped=1.0 +2024-07-27 23:10:37,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=75924.0, ans=0.125 +2024-07-27 23:10:42,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=75924.0, ans=0.125 +2024-07-27 23:10:50,839 INFO [train.py:1114] (2/4) Epoch 6, batch 5850, loss[loss=0.2776, simple_loss=0.3456, pruned_loss=0.1048, over 4565.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3129, pruned_loss=0.07684, over 937473.53 frames. ], batch size: 21, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:11:02,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=75964.0, ans=0.2 +2024-07-27 23:11:03,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=75964.0, ans=0.0 +2024-07-27 23:11:07,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=75977.33333333333, ans=0.125 +2024-07-27 23:11:14,130 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-27 23:11:25,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=76004.0, ans=0.025 +2024-07-27 23:11:30,107 INFO [train.py:1114] (2/4) Epoch 6, batch 5900, loss[loss=0.2632, simple_loss=0.3345, pruned_loss=0.0959, over 4674.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3128, pruned_loss=0.07668, over 937548.31 frames. ], batch size: 15, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:11:38,453 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.63 vs. limit=6.0 +2024-07-27 23:11:41,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=76030.66666666667, ans=0.0 +2024-07-27 23:11:42,505 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.95 vs. limit=22.5 +2024-07-27 23:11:48,175 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.509e+01 6.028e+01 6.783e+01 7.450e+01 1.132e+02, threshold=1.357e+02, percent-clipped=0.0 +2024-07-27 23:12:03,425 INFO [train.py:1114] (2/4) Epoch 6, batch 5950, loss[loss=0.2514, simple_loss=0.3338, pruned_loss=0.08452, over 4677.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3127, pruned_loss=0.07602, over 939655.01 frames. ], batch size: 15, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:12:04,706 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.12 vs. limit=15.0 +2024-07-27 23:12:17,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=76110.66666666667, ans=0.125 +2024-07-27 23:12:17,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=76110.66666666667, ans=0.0 +2024-07-27 23:12:29,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=76137.33333333333, ans=0.2 +2024-07-27 23:12:34,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=19.16 vs. limit=15.0 +2024-07-27 23:12:35,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76150.66666666667, ans=0.1 +2024-07-27 23:12:36,360 INFO [train.py:1114] (2/4) Epoch 6, batch 6000, loss[loss=0.2723, simple_loss=0.3371, pruned_loss=0.1038, over 4212.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3126, pruned_loss=0.07619, over 936842.57 frames. ], batch size: 25, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:12:36,360 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 23:12:42,524 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.9035, 4.5673, 4.1798, 3.9228], device='cuda:2') +2024-07-27 23:12:42,996 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([6.7951, 6.0419, 6.0339, 6.6076], device='cuda:2') +2024-07-27 23:12:50,096 INFO [train.py:1146] (2/4) Epoch 6, validation: loss=0.1905, simple_loss=0.2947, pruned_loss=0.04318, over 944034.00 frames. +2024-07-27 23:12:50,097 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 23:12:50,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=76150.66666666667, ans=0.2 +2024-07-27 23:12:52,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=76150.66666666667, ans=0.2 +2024-07-27 23:13:07,972 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.732e+01 6.230e+01 7.142e+01 8.647e+01 1.308e+02, threshold=1.428e+02, percent-clipped=0.0 +2024-07-27 23:13:24,142 INFO [train.py:1114] (2/4) Epoch 6, batch 6050, loss[loss=0.2296, simple_loss=0.3021, pruned_loss=0.07859, over 4782.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3114, pruned_loss=0.07604, over 938377.33 frames. ], batch size: 12, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:13:31,741 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-27 23:13:45,549 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.82 vs. limit=6.0 +2024-07-27 23:13:57,301 INFO [train.py:1114] (2/4) Epoch 6, batch 6100, loss[loss=0.2013, simple_loss=0.2798, pruned_loss=0.06139, over 4690.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3107, pruned_loss=0.07572, over 937609.98 frames. ], batch size: 15, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:14:03,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=76297.33333333333, ans=0.0 +2024-07-27 23:14:17,099 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 6.126e+01 6.655e+01 7.850e+01 1.418e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-27 23:14:19,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=76324.0, ans=0.125 +2024-07-27 23:14:24,655 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.39 vs. limit=22.5 +2024-07-27 23:14:31,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=76337.33333333333, ans=0.1 +2024-07-27 23:14:32,317 INFO [train.py:1114] (2/4) Epoch 6, batch 6150, loss[loss=0.2626, simple_loss=0.333, pruned_loss=0.09606, over 3277.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3106, pruned_loss=0.07566, over 936366.79 frames. ], batch size: 35, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:14:33,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=76350.66666666667, ans=0.2 +2024-07-27 23:14:40,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=76364.0, ans=0.125 +2024-07-27 23:14:43,124 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.74 vs. limit=15.0 +2024-07-27 23:15:01,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76404.0, ans=0.1 +2024-07-27 23:15:07,870 INFO [train.py:1114] (2/4) Epoch 6, batch 6200, loss[loss=0.2209, simple_loss=0.2946, pruned_loss=0.07363, over 4749.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3119, pruned_loss=0.07613, over 935995.44 frames. ], batch size: 14, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:15:15,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=76417.33333333333, ans=0.125 +2024-07-27 23:15:20,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=76430.66666666667, ans=0.0 +2024-07-27 23:15:20,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=76430.66666666667, ans=0.025 +2024-07-27 23:15:26,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=76444.0, ans=0.015 +2024-07-27 23:15:26,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=76444.0, ans=0.0 +2024-07-27 23:15:30,095 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.613e+01 5.920e+01 6.889e+01 8.181e+01 1.186e+02, threshold=1.378e+02, percent-clipped=0.0 +2024-07-27 23:15:42,365 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:15:45,637 INFO [train.py:1114] (2/4) Epoch 6, batch 6250, loss[loss=0.2209, simple_loss=0.3127, pruned_loss=0.06452, over 4807.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3118, pruned_loss=0.07599, over 932411.94 frames. ], batch size: 14, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:15:49,211 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:15:50,596 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.53 vs. limit=15.0 +2024-07-27 23:16:09,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=76524.0, ans=0.125 +2024-07-27 23:16:11,806 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.17 vs. limit=15.0 +2024-07-27 23:16:16,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76537.33333333333, ans=0.1 +2024-07-27 23:16:21,061 INFO [train.py:1114] (2/4) Epoch 6, batch 6300, loss[loss=0.1662, simple_loss=0.2402, pruned_loss=0.04614, over 4560.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3116, pruned_loss=0.07614, over 929215.59 frames. ], batch size: 10, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:16:23,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=76550.66666666667, ans=0.125 +2024-07-27 23:16:25,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=76550.66666666667, ans=0.0 +2024-07-27 23:16:26,907 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.01 vs. limit=15.0 +2024-07-27 23:16:36,157 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.60 vs. limit=22.5 +2024-07-27 23:16:38,844 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.517e+01 5.917e+01 6.519e+01 7.440e+01 1.686e+02, threshold=1.304e+02, percent-clipped=1.0 +2024-07-27 23:16:47,835 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.65 vs. limit=15.0 +2024-07-27 23:16:52,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=76604.0, ans=0.1 +2024-07-27 23:16:53,925 INFO [train.py:1114] (2/4) Epoch 6, batch 6350, loss[loss=0.2459, simple_loss=0.3278, pruned_loss=0.08207, over 4488.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.3111, pruned_loss=0.07584, over 933187.34 frames. ], batch size: 21, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:17:00,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=76630.66666666667, ans=0.2 +2024-07-27 23:17:10,213 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.58 vs. limit=15.0 +2024-07-27 23:17:10,263 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.26 vs. limit=22.5 +2024-07-27 23:17:23,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.40 vs. limit=15.0 +2024-07-27 23:17:25,073 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.63 vs. limit=22.5 +2024-07-27 23:17:26,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=76684.0, ans=0.125 +2024-07-27 23:17:27,179 INFO [train.py:1114] (2/4) Epoch 6, batch 6400, loss[loss=0.2534, simple_loss=0.3123, pruned_loss=0.09729, over 4638.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3107, pruned_loss=0.07597, over 934555.42 frames. ], batch size: 13, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:17:28,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=76684.0, ans=0.125 +2024-07-27 23:17:35,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=76697.33333333333, ans=0.07 +2024-07-27 23:17:36,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=76697.33333333333, ans=0.125 +2024-07-27 23:17:38,736 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.45 vs. limit=12.0 +2024-07-27 23:17:42,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76710.66666666667, ans=0.1 +2024-07-27 23:17:44,982 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.061e+01 6.138e+01 6.927e+01 7.775e+01 1.168e+02, threshold=1.385e+02, percent-clipped=0.0 +2024-07-27 23:17:52,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=76724.0, ans=0.125 +2024-07-27 23:18:00,340 INFO [train.py:1114] (2/4) Epoch 6, batch 6450, loss[loss=0.2452, simple_loss=0.3185, pruned_loss=0.08596, over 4529.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.312, pruned_loss=0.07633, over 938374.11 frames. ], batch size: 21, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:18:23,529 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.05 vs. limit=22.5 +2024-07-27 23:18:27,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=76790.66666666667, ans=10.0 +2024-07-27 23:18:40,299 INFO [train.py:1114] (2/4) Epoch 6, batch 6500, loss[loss=0.2956, simple_loss=0.344, pruned_loss=0.1236, over 3367.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3119, pruned_loss=0.07637, over 939709.95 frames. ], batch size: 35, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:18:43,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=76817.33333333333, ans=0.125 +2024-07-27 23:18:52,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=76830.66666666667, ans=0.125 +2024-07-27 23:18:53,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=76844.0, ans=0.125 +2024-07-27 23:18:57,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.45 vs. limit=10.0 +2024-07-27 23:18:58,077 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 6.177e+01 7.054e+01 8.466e+01 1.519e+02, threshold=1.411e+02, percent-clipped=2.0 +2024-07-27 23:19:04,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=76857.33333333333, ans=0.1 +2024-07-27 23:19:13,512 INFO [train.py:1114] (2/4) Epoch 6, batch 6550, loss[loss=0.1681, simple_loss=0.2495, pruned_loss=0.04339, over 4829.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3109, pruned_loss=0.07534, over 942710.04 frames. ], batch size: 11, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:19:37,590 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.77 vs. limit=15.0 +2024-07-27 23:19:44,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.whiten.whitening_limit, batch_count=76937.33333333333, ans=12.0 +2024-07-27 23:19:47,608 INFO [train.py:1114] (2/4) Epoch 6, batch 6600, loss[loss=0.2616, simple_loss=0.3485, pruned_loss=0.08731, over 4938.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3107, pruned_loss=0.07511, over 944801.60 frames. ], batch size: 14, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:19:51,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=76950.66666666667, ans=0.2 +2024-07-27 23:19:57,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=76964.0, ans=0.2 +2024-07-27 23:20:05,794 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.013e+01 6.034e+01 7.063e+01 8.869e+01 1.315e+02, threshold=1.413e+02, percent-clipped=0.0 +2024-07-27 23:20:08,241 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.86 vs. limit=22.5 +2024-07-27 23:20:10,332 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.18 vs. limit=15.0 +2024-07-27 23:20:21,203 INFO [train.py:1114] (2/4) Epoch 6, batch 6650, loss[loss=0.2659, simple_loss=0.3476, pruned_loss=0.09208, over 4588.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3099, pruned_loss=0.07499, over 943231.43 frames. ], batch size: 17, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:20:24,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=77017.33333333333, ans=15.0 +2024-07-27 23:20:27,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=77030.66666666667, ans=0.04949747468305833 +2024-07-27 23:20:31,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=77030.66666666667, ans=0.2 +2024-07-27 23:20:37,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=77044.0, ans=0.2 +2024-07-27 23:20:47,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77057.33333333333, ans=0.1 +2024-07-27 23:20:47,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=77057.33333333333, ans=0.025 +2024-07-27 23:20:57,087 INFO [train.py:1114] (2/4) Epoch 6, batch 6700, loss[loss=0.2379, simple_loss=0.3198, pruned_loss=0.07807, over 4688.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3098, pruned_loss=0.07503, over 942015.13 frames. ], batch size: 19, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:21:05,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=77097.33333333333, ans=0.0 +2024-07-27 23:21:15,127 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.982e+01 6.173e+01 6.934e+01 8.423e+01 1.268e+02, threshold=1.387e+02, percent-clipped=0.0 +2024-07-27 23:21:27,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=77124.0, ans=0.025 +2024-07-27 23:21:31,128 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.41 vs. limit=15.0 +2024-07-27 23:21:34,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77137.33333333333, ans=0.1 +2024-07-27 23:21:41,302 INFO [train.py:1114] (2/4) Epoch 6, batch 6750, loss[loss=0.2491, simple_loss=0.3411, pruned_loss=0.07852, over 4246.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3099, pruned_loss=0.07559, over 940171.27 frames. ], batch size: 25, lr: 1.22e-02, grad_scale: 16.0 +2024-07-27 23:21:43,639 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.20 vs. limit=15.0 +2024-07-27 23:21:56,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77177.33333333333, ans=0.1 +2024-07-27 23:22:15,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=77204.0, ans=0.125 +2024-07-27 23:22:16,834 INFO [train.py:1114] (2/4) Epoch 6, batch 6800, loss[loss=0.2276, simple_loss=0.3222, pruned_loss=0.06647, over 4637.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3093, pruned_loss=0.07511, over 938639.71 frames. ], batch size: 13, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:22:22,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=77217.33333333333, ans=0.0 +2024-07-27 23:22:23,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=77217.33333333333, ans=0.125 +2024-07-27 23:23:09,701 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-27 23:23:14,540 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.127e+01 5.858e+01 6.351e+01 7.283e+01 1.199e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-27 23:23:25,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=77270.66666666667, ans=0.0 +2024-07-27 23:23:29,481 INFO [train.py:1114] (2/4) Epoch 6, batch 6850, loss[loss=0.2054, simple_loss=0.2992, pruned_loss=0.05578, over 4691.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3095, pruned_loss=0.0751, over 940379.12 frames. ], batch size: 13, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:23:41,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=77297.33333333333, ans=0.015 +2024-07-27 23:23:45,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77310.66666666667, ans=0.1 +2024-07-27 23:24:03,351 INFO [train.py:1114] (2/4) Epoch 6, batch 6900, loss[loss=0.2271, simple_loss=0.2997, pruned_loss=0.07727, over 4967.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3091, pruned_loss=0.07498, over 942770.71 frames. ], batch size: 13, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:24:13,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=77364.0, ans=0.05 +2024-07-27 23:24:14,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=77364.0, ans=0.125 +2024-07-27 23:24:21,825 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.943e+01 5.966e+01 6.630e+01 7.138e+01 1.259e+02, threshold=1.326e+02, percent-clipped=0.0 +2024-07-27 23:24:21,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77377.33333333333, ans=0.1 +2024-07-27 23:24:21,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=77377.33333333333, ans=0.0 +2024-07-27 23:24:23,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=77390.66666666667, ans=0.125 +2024-07-27 23:24:32,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=77404.0, ans=0.0 +2024-07-27 23:24:38,560 INFO [train.py:1114] (2/4) Epoch 6, batch 6950, loss[loss=0.2195, simple_loss=0.2889, pruned_loss=0.07502, over 4520.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3077, pruned_loss=0.07432, over 939610.35 frames. ], batch size: 10, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:25:03,839 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.16 vs. limit=6.0 +2024-07-27 23:25:11,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77484.0, ans=0.1 +2024-07-27 23:25:12,291 INFO [train.py:1114] (2/4) Epoch 6, batch 7000, loss[loss=0.257, simple_loss=0.3482, pruned_loss=0.08289, over 4639.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3068, pruned_loss=0.0736, over 938543.58 frames. ], batch size: 17, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:25:12,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=77484.0, ans=0.05 +2024-07-27 23:25:27,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=77510.66666666667, ans=0.125 +2024-07-27 23:25:29,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=77510.66666666667, ans=0.125 +2024-07-27 23:25:30,327 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.154e+01 6.184e+01 7.015e+01 8.119e+01 1.355e+02, threshold=1.403e+02, percent-clipped=1.0 +2024-07-27 23:25:36,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=77524.0, ans=0.125 +2024-07-27 23:25:40,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=77537.33333333333, ans=0.125 +2024-07-27 23:25:44,777 INFO [train.py:1114] (2/4) Epoch 6, batch 7050, loss[loss=0.2328, simple_loss=0.3135, pruned_loss=0.07599, over 4653.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3068, pruned_loss=0.07312, over 941767.31 frames. ], batch size: 19, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:26:12,304 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.67 vs. limit=6.0 +2024-07-27 23:26:18,231 INFO [train.py:1114] (2/4) Epoch 6, batch 7100, loss[loss=0.2928, simple_loss=0.3661, pruned_loss=0.1097, over 4788.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3084, pruned_loss=0.07456, over 937404.05 frames. ], batch size: 15, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:26:26,516 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:26:28,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=77630.66666666667, ans=0.125 +2024-07-27 23:26:29,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=77630.66666666667, ans=0.07 +2024-07-27 23:26:34,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=77630.66666666667, ans=0.0 +2024-07-27 23:26:40,769 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.696e+01 6.046e+01 6.711e+01 7.848e+01 1.418e+02, threshold=1.342e+02, percent-clipped=1.0 +2024-07-27 23:26:43,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=77657.33333333333, ans=0.125 +2024-07-27 23:26:44,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=77657.33333333333, ans=0.0 +2024-07-27 23:26:53,736 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.03 vs. limit=10.0 +2024-07-27 23:26:55,142 INFO [train.py:1114] (2/4) Epoch 6, batch 7150, loss[loss=0.2322, simple_loss=0.3012, pruned_loss=0.08157, over 4488.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.3065, pruned_loss=0.0735, over 938070.86 frames. ], batch size: 21, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:26:55,972 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:27:03,559 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:27:11,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=77710.66666666667, ans=0.0 +2024-07-27 23:27:15,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=77710.66666666667, ans=0.0 +2024-07-27 23:27:20,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=77724.0, ans=0.0 +2024-07-27 23:27:20,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=77724.0, ans=0.0 +2024-07-27 23:27:21,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=77724.0, ans=0.125 +2024-07-27 23:27:26,932 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.37 vs. limit=15.0 +2024-07-27 23:27:27,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77737.33333333333, ans=0.1 +2024-07-27 23:27:29,643 INFO [train.py:1114] (2/4) Epoch 6, batch 7200, loss[loss=0.231, simple_loss=0.3198, pruned_loss=0.07106, over 4809.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.308, pruned_loss=0.07419, over 938151.10 frames. ], batch size: 15, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:27:29,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=77750.66666666667, ans=0.025 +2024-07-27 23:27:35,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=77764.0, ans=0.125 +2024-07-27 23:27:37,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=77764.0, ans=0.0 +2024-07-27 23:27:39,118 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.95 vs. limit=6.0 +2024-07-27 23:27:40,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=77764.0, ans=0.0 +2024-07-27 23:27:44,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=77777.33333333333, ans=0.125 +2024-07-27 23:27:47,991 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.062e+01 6.035e+01 6.773e+01 8.115e+01 1.390e+02, threshold=1.355e+02, percent-clipped=1.0 +2024-07-27 23:27:50,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77790.66666666667, ans=0.1 +2024-07-27 23:27:56,779 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.39 vs. limit=22.5 +2024-07-27 23:27:59,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=77804.0, ans=0.0 +2024-07-27 23:28:02,492 INFO [train.py:1114] (2/4) Epoch 6, batch 7250, loss[loss=0.2147, simple_loss=0.2862, pruned_loss=0.0716, over 4844.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3084, pruned_loss=0.07426, over 940135.60 frames. ], batch size: 12, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:28:06,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=77817.33333333333, ans=0.2 +2024-07-27 23:28:08,028 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.29 vs. limit=15.0 +2024-07-27 23:28:23,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77857.33333333333, ans=0.1 +2024-07-27 23:28:25,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=77857.33333333333, ans=0.0 +2024-07-27 23:28:31,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=77870.66666666667, ans=0.07 +2024-07-27 23:28:34,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=77870.66666666667, ans=0.09899494936611666 +2024-07-27 23:28:35,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=77870.66666666667, ans=0.125 +2024-07-27 23:28:35,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=77870.66666666667, ans=0.125 +2024-07-27 23:28:36,955 INFO [train.py:1114] (2/4) Epoch 6, batch 7300, loss[loss=0.1761, simple_loss=0.2688, pruned_loss=0.0417, over 4846.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.3091, pruned_loss=0.07458, over 940249.21 frames. ], batch size: 12, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:28:42,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=77884.0, ans=0.2 +2024-07-27 23:28:43,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=77897.33333333333, ans=0.0 +2024-07-27 23:28:47,120 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.53 vs. limit=22.5 +2024-07-27 23:28:47,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=77897.33333333333, ans=0.125 +2024-07-27 23:28:49,070 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.45 vs. limit=15.0 +2024-07-27 23:28:55,458 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.159e+01 6.187e+01 6.781e+01 8.208e+01 1.800e+02, threshold=1.356e+02, percent-clipped=4.0 +2024-07-27 23:29:09,768 INFO [train.py:1114] (2/4) Epoch 6, batch 7350, loss[loss=0.2313, simple_loss=0.3192, pruned_loss=0.0717, over 4643.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.3096, pruned_loss=0.07447, over 939591.90 frames. ], batch size: 12, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:29:22,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=77977.33333333333, ans=0.125 +2024-07-27 23:29:24,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=77977.33333333333, ans=0.125 +2024-07-27 23:29:27,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=77977.33333333333, ans=0.125 +2024-07-27 23:29:39,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=78004.0, ans=0.125 +2024-07-27 23:29:42,401 INFO [train.py:1114] (2/4) Epoch 6, batch 7400, loss[loss=0.2446, simple_loss=0.332, pruned_loss=0.07859, over 4697.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3098, pruned_loss=0.07399, over 940712.83 frames. ], batch size: 13, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:29:57,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=78044.0, ans=0.125 +2024-07-27 23:30:00,722 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.794e+01 6.318e+01 7.281e+01 8.792e+01 1.336e+02, threshold=1.456e+02, percent-clipped=0.0 +2024-07-27 23:30:26,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=78070.66666666667, ans=0.09899494936611666 +2024-07-27 23:30:31,771 INFO [train.py:1114] (2/4) Epoch 6, batch 7450, loss[loss=0.2051, simple_loss=0.2838, pruned_loss=0.06322, over 4628.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.3087, pruned_loss=0.07423, over 938509.43 frames. ], batch size: 11, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:30:42,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=78097.33333333333, ans=0.125 +2024-07-27 23:30:43,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=78097.33333333333, ans=0.125 +2024-07-27 23:30:44,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=78110.66666666667, ans=0.0 +2024-07-27 23:30:47,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=78110.66666666667, ans=0.2 +2024-07-27 23:30:49,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=78110.66666666667, ans=0.125 +2024-07-27 23:31:04,800 INFO [train.py:1114] (2/4) Epoch 6, batch 7500, loss[loss=0.2342, simple_loss=0.3136, pruned_loss=0.07744, over 3399.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.31, pruned_loss=0.07523, over 936565.32 frames. ], batch size: 36, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:31:05,897 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.07 vs. limit=12.0 +2024-07-27 23:31:24,015 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.339e+01 6.209e+01 6.853e+01 7.670e+01 1.087e+02, threshold=1.371e+02, percent-clipped=0.0 +2024-07-27 23:31:24,792 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:31:33,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=78204.0, ans=0.125 +2024-07-27 23:31:33,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=78204.0, ans=0.07 +2024-07-27 23:31:35,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=78204.0, ans=0.125 +2024-07-27 23:31:36,536 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:31:38,275 INFO [train.py:1114] (2/4) Epoch 6, batch 7550, loss[loss=0.2207, simple_loss=0.3072, pruned_loss=0.06709, over 4603.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3119, pruned_loss=0.0761, over 936613.37 frames. ], batch size: 17, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:32:01,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78257.33333333333, ans=0.1 +2024-07-27 23:32:01,679 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:32:11,964 INFO [train.py:1114] (2/4) Epoch 6, batch 7600, loss[loss=0.2176, simple_loss=0.3054, pruned_loss=0.06493, over 4805.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3111, pruned_loss=0.07569, over 938471.45 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:32:25,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=78297.33333333333, ans=0.2 +2024-07-27 23:32:33,859 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.827e+01 6.092e+01 6.628e+01 7.251e+01 1.124e+02, threshold=1.326e+02, percent-clipped=0.0 +2024-07-27 23:32:40,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=78324.0, ans=0.1 +2024-07-27 23:32:40,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=78324.0, ans=0.1 +2024-07-27 23:32:52,071 INFO [train.py:1114] (2/4) Epoch 6, batch 7650, loss[loss=0.2023, simple_loss=0.2852, pruned_loss=0.05965, over 4940.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.3107, pruned_loss=0.07609, over 937163.92 frames. ], batch size: 12, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:33:05,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78364.0, ans=0.1 +2024-07-27 23:33:10,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=78364.0, ans=0.0 +2024-07-27 23:33:14,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78377.33333333333, ans=0.1 +2024-07-27 23:33:16,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=78377.33333333333, ans=0.2 +2024-07-27 23:33:37,408 INFO [train.py:1114] (2/4) Epoch 6, batch 7700, loss[loss=0.2091, simple_loss=0.303, pruned_loss=0.05761, over 4698.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3111, pruned_loss=0.07629, over 934579.21 frames. ], batch size: 13, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:34:05,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=78444.0, ans=0.2 +2024-07-27 23:34:11,047 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.983e+01 6.189e+01 6.836e+01 7.774e+01 1.390e+02, threshold=1.367e+02, percent-clipped=1.0 +2024-07-27 23:34:28,171 INFO [train.py:1114] (2/4) Epoch 6, batch 7750, loss[loss=0.2454, simple_loss=0.3246, pruned_loss=0.08305, over 4935.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3136, pruned_loss=0.07701, over 935562.29 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:34:52,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=78524.0, ans=0.125 +2024-07-27 23:34:55,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78537.33333333333, ans=0.1 +2024-07-27 23:35:02,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=78537.33333333333, ans=0.0 +2024-07-27 23:35:03,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=78550.66666666667, ans=0.0 +2024-07-27 23:35:03,199 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.79 vs. limit=15.0 +2024-07-27 23:35:04,334 INFO [train.py:1114] (2/4) Epoch 6, batch 7800, loss[loss=0.2243, simple_loss=0.3223, pruned_loss=0.06314, over 4661.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.313, pruned_loss=0.07606, over 937510.53 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:35:05,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=78550.66666666667, ans=0.0 +2024-07-27 23:35:05,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=78550.66666666667, ans=0.07 +2024-07-27 23:35:12,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=78564.0, ans=0.125 +2024-07-27 23:35:15,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=78564.0, ans=0.2 +2024-07-27 23:35:22,303 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+01 6.050e+01 6.523e+01 7.521e+01 9.871e+01, threshold=1.305e+02, percent-clipped=0.0 +2024-07-27 23:35:33,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=78604.0, ans=0.0 +2024-07-27 23:35:36,954 INFO [train.py:1114] (2/4) Epoch 6, batch 7850, loss[loss=0.2255, simple_loss=0.2944, pruned_loss=0.07827, over 4521.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3136, pruned_loss=0.07626, over 936405.77 frames. ], batch size: 10, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:35:38,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=78617.33333333333, ans=0.125 +2024-07-27 23:35:38,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=78617.33333333333, ans=0.5 +2024-07-27 23:35:39,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=78617.33333333333, ans=0.125 +2024-07-27 23:35:44,491 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.54 vs. limit=10.0 +2024-07-27 23:35:54,360 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.67 vs. limit=10.0 +2024-07-27 23:36:00,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=78657.33333333333, ans=0.2 +2024-07-27 23:36:11,571 INFO [train.py:1114] (2/4) Epoch 6, batch 7900, loss[loss=0.2434, simple_loss=0.3251, pruned_loss=0.08081, over 4871.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3138, pruned_loss=0.07607, over 933267.80 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:36:11,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=78684.0, ans=0.1 +2024-07-27 23:36:13,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=78684.0, ans=0.125 +2024-07-27 23:36:20,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=78697.33333333333, ans=0.125 +2024-07-27 23:36:25,731 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.93 vs. limit=15.0 +2024-07-27 23:36:29,751 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.141e+01 6.160e+01 7.004e+01 8.333e+01 1.233e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-27 23:36:38,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=78737.33333333333, ans=0.0 +2024-07-27 23:36:42,538 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.71 vs. limit=12.0 +2024-07-27 23:36:43,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=78750.66666666667, ans=0.0 +2024-07-27 23:36:44,075 INFO [train.py:1114] (2/4) Epoch 6, batch 7950, loss[loss=0.2421, simple_loss=0.3323, pruned_loss=0.076, over 3305.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3134, pruned_loss=0.07584, over 935373.44 frames. ], batch size: 35, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:36:44,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=78750.66666666667, ans=0.1 +2024-07-27 23:36:49,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.29 vs. limit=22.5 +2024-07-27 23:36:56,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=78777.33333333333, ans=0.125 +2024-07-27 23:36:59,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78777.33333333333, ans=0.1 +2024-07-27 23:37:00,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=78777.33333333333, ans=0.04949747468305833 +2024-07-27 23:37:04,135 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.95 vs. limit=22.5 +2024-07-27 23:37:05,491 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.46 vs. limit=15.0 +2024-07-27 23:37:15,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78817.33333333333, ans=0.1 +2024-07-27 23:37:16,345 INFO [train.py:1114] (2/4) Epoch 6, batch 8000, loss[loss=0.2116, simple_loss=0.2882, pruned_loss=0.06747, over 4627.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3112, pruned_loss=0.0756, over 934146.05 frames. ], batch size: 11, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:37:19,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78817.33333333333, ans=0.1 +2024-07-27 23:37:24,268 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:37:32,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=78844.0, ans=0.125 +2024-07-27 23:37:33,590 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.59 vs. limit=6.0 +2024-07-27 23:37:34,336 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.441e+01 5.938e+01 6.564e+01 7.603e+01 1.476e+02, threshold=1.313e+02, percent-clipped=1.0 +2024-07-27 23:37:38,689 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.97 vs. limit=10.0 +2024-07-27 23:37:48,733 INFO [train.py:1114] (2/4) Epoch 6, batch 8050, loss[loss=0.2168, simple_loss=0.3051, pruned_loss=0.06421, over 4804.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3116, pruned_loss=0.07552, over 933767.22 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:37:54,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=78897.33333333333, ans=0.1 +2024-07-27 23:37:57,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=78897.33333333333, ans=0.0 +2024-07-27 23:38:03,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=78910.66666666667, ans=0.025 +2024-07-27 23:38:10,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=78924.0, ans=0.2 +2024-07-27 23:38:19,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=78937.33333333333, ans=0.0 +2024-07-27 23:38:21,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=78937.33333333333, ans=0.125 +2024-07-27 23:38:23,518 INFO [train.py:1114] (2/4) Epoch 6, batch 8100, loss[loss=0.2387, simple_loss=0.3237, pruned_loss=0.07684, over 4821.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3117, pruned_loss=0.07576, over 934152.30 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:38:23,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78950.66666666667, ans=0.1 +2024-07-27 23:38:29,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78964.0, ans=0.1 +2024-07-27 23:38:35,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=78977.33333333333, ans=0.05 +2024-07-27 23:38:35,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=78977.33333333333, ans=0.125 +2024-07-27 23:38:41,455 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.051e+01 5.969e+01 6.429e+01 6.997e+01 9.390e+01, threshold=1.286e+02, percent-clipped=0.0 +2024-07-27 23:38:48,465 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.62 vs. limit=15.0 +2024-07-27 23:38:55,581 INFO [train.py:1114] (2/4) Epoch 6, batch 8150, loss[loss=0.2393, simple_loss=0.3257, pruned_loss=0.07647, over 4801.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3109, pruned_loss=0.07599, over 937642.36 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:39:10,396 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.79 vs. limit=15.0 +2024-07-27 23:39:10,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=79044.0, ans=0.2 +2024-07-27 23:39:22,085 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.87 vs. limit=22.5 +2024-07-27 23:39:28,487 INFO [train.py:1114] (2/4) Epoch 6, batch 8200, loss[loss=0.271, simple_loss=0.3447, pruned_loss=0.09863, over 4798.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.3117, pruned_loss=0.07628, over 938735.26 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:39:31,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=79084.0, ans=0.07 +2024-07-27 23:39:41,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=79110.66666666667, ans=0.025 +2024-07-27 23:39:47,310 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.029e+01 5.934e+01 6.554e+01 7.415e+01 1.580e+02, threshold=1.311e+02, percent-clipped=1.0 +2024-07-27 23:39:48,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=79124.0, ans=0.0 +2024-07-27 23:39:54,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=79137.33333333333, ans=0.125 +2024-07-27 23:39:56,596 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.41 vs. limit=10.0 +2024-07-27 23:40:01,169 INFO [train.py:1114] (2/4) Epoch 6, batch 8250, loss[loss=0.2416, simple_loss=0.3027, pruned_loss=0.09023, over 4893.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3116, pruned_loss=0.07588, over 939001.05 frames. ], batch size: 13, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:40:05,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=79150.66666666667, ans=0.025 +2024-07-27 23:40:11,403 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.09 vs. limit=15.0 +2024-07-27 23:40:12,062 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.56 vs. limit=15.0 +2024-07-27 23:40:26,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=79190.66666666667, ans=0.125 +2024-07-27 23:40:30,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=79204.0, ans=0.125 +2024-07-27 23:40:33,944 INFO [train.py:1114] (2/4) Epoch 6, batch 8300, loss[loss=0.2346, simple_loss=0.3274, pruned_loss=0.07092, over 4910.00 frames. ], tot_loss[loss=0.231, simple_loss=0.3112, pruned_loss=0.07539, over 938909.39 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:40:38,331 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.25 vs. limit=15.0 +2024-07-27 23:40:39,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=79230.66666666667, ans=0.125 +2024-07-27 23:40:44,667 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.62 vs. limit=15.0 +2024-07-27 23:40:54,346 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.146e+01 5.976e+01 6.704e+01 7.897e+01 1.175e+02, threshold=1.341e+02, percent-clipped=0.0 +2024-07-27 23:41:03,573 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.38 vs. limit=15.0 +2024-07-27 23:41:07,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=79270.66666666667, ans=0.0 +2024-07-27 23:41:07,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=79284.0, ans=0.125 +2024-07-27 23:41:08,394 INFO [train.py:1114] (2/4) Epoch 6, batch 8350, loss[loss=0.2433, simple_loss=0.3205, pruned_loss=0.08305, over 4791.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.3093, pruned_loss=0.07422, over 941445.86 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:41:20,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=79297.33333333333, ans=0.1 +2024-07-27 23:41:20,254 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.60 vs. limit=6.0 +2024-07-27 23:41:22,978 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.85 vs. limit=15.0 +2024-07-27 23:41:35,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=79337.33333333333, ans=0.2 +2024-07-27 23:41:40,604 INFO [train.py:1114] (2/4) Epoch 6, batch 8400, loss[loss=0.1845, simple_loss=0.2561, pruned_loss=0.05649, over 4783.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3107, pruned_loss=0.07497, over 940207.42 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:41:41,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=79350.66666666667, ans=0.035 +2024-07-27 23:41:41,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=79350.66666666667, ans=0.0 +2024-07-27 23:41:58,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=79377.33333333333, ans=0.125 +2024-07-27 23:41:58,571 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.741e+01 6.271e+01 7.007e+01 8.306e+01 1.253e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-27 23:42:00,832 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.01 vs. limit=15.0 +2024-07-27 23:42:02,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=79390.66666666667, ans=0.125 +2024-07-27 23:42:06,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=79404.0, ans=0.1 +2024-07-27 23:42:07,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=79404.0, ans=0.1 +2024-07-27 23:42:12,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=79417.33333333333, ans=0.0 +2024-07-27 23:42:12,518 INFO [train.py:1114] (2/4) Epoch 6, batch 8450, loss[loss=0.2398, simple_loss=0.3139, pruned_loss=0.08289, over 4801.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3109, pruned_loss=0.07477, over 939008.47 frames. ], batch size: 15, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:42:15,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=79417.33333333333, ans=0.025 +2024-07-27 23:42:30,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=79444.0, ans=0.2 +2024-07-27 23:42:43,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=79470.66666666667, ans=0.125 +2024-07-27 23:42:45,639 INFO [train.py:1114] (2/4) Epoch 6, batch 8500, loss[loss=0.1974, simple_loss=0.2858, pruned_loss=0.05451, over 4615.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3108, pruned_loss=0.07542, over 938791.77 frames. ], batch size: 11, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:42:56,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=79497.33333333333, ans=0.125 +2024-07-27 23:43:01,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=79510.66666666667, ans=0.125 +2024-07-27 23:43:02,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=79510.66666666667, ans=0.125 +2024-07-27 23:43:02,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=79510.66666666667, ans=0.125 +2024-07-27 23:43:04,884 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.976e+01 5.862e+01 6.704e+01 7.850e+01 1.312e+02, threshold=1.341e+02, percent-clipped=0.0 +2024-07-27 23:43:09,732 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.16 vs. limit=15.0 +2024-07-27 23:43:18,110 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.90 vs. limit=15.0 +2024-07-27 23:43:19,091 INFO [train.py:1114] (2/4) Epoch 6, batch 8550, loss[loss=0.1898, simple_loss=0.2768, pruned_loss=0.05137, over 4798.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3103, pruned_loss=0.07548, over 939734.17 frames. ], batch size: 11, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:43:20,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=79550.66666666667, ans=0.125 +2024-07-27 23:43:20,659 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.77 vs. limit=22.5 +2024-07-27 23:43:23,330 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.64 vs. limit=15.0 +2024-07-27 23:43:30,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=79564.0, ans=0.0 +2024-07-27 23:43:34,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=79577.33333333333, ans=0.125 +2024-07-27 23:43:40,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=79590.66666666667, ans=0.125 +2024-07-27 23:43:50,230 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.08 vs. limit=15.0 +2024-07-27 23:43:51,144 INFO [train.py:1114] (2/4) Epoch 6, batch 8600, loss[loss=0.2083, simple_loss=0.2919, pruned_loss=0.06235, over 4794.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3098, pruned_loss=0.07512, over 939465.36 frames. ], batch size: 15, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:43:54,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=79617.33333333333, ans=0.035 +2024-07-27 23:44:05,891 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.28 vs. limit=22.5 +2024-07-27 23:44:09,907 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.199e+01 6.001e+01 6.460e+01 7.651e+01 1.281e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-27 23:44:14,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=79657.33333333333, ans=0.0 +2024-07-27 23:44:18,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=79670.66666666667, ans=0.0 +2024-07-27 23:44:19,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=79670.66666666667, ans=0.0 +2024-07-27 23:44:24,676 INFO [train.py:1114] (2/4) Epoch 6, batch 8650, loss[loss=0.2358, simple_loss=0.3153, pruned_loss=0.07813, over 4906.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3089, pruned_loss=0.0751, over 940407.27 frames. ], batch size: 15, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:44:56,636 INFO [train.py:1114] (2/4) Epoch 6, batch 8700, loss[loss=0.259, simple_loss=0.326, pruned_loss=0.09603, over 4759.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3098, pruned_loss=0.0756, over 937874.83 frames. ], batch size: 13, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:45:02,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=79764.0, ans=0.1 +2024-07-27 23:45:13,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=79777.33333333333, ans=0.025 +2024-07-27 23:45:14,453 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.012e+01 6.110e+01 6.862e+01 8.564e+01 1.344e+02, threshold=1.372e+02, percent-clipped=1.0 +2024-07-27 23:45:18,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=79790.66666666667, ans=0.125 +2024-07-27 23:45:28,840 INFO [train.py:1114] (2/4) Epoch 6, batch 8750, loss[loss=0.2999, simple_loss=0.3828, pruned_loss=0.1085, over 4689.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3101, pruned_loss=0.07532, over 936511.44 frames. ], batch size: 15, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:45:29,222 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.95 vs. limit=15.0 +2024-07-27 23:45:29,741 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.07 vs. limit=10.0 +2024-07-27 23:45:37,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=79830.66666666667, ans=0.1 +2024-07-27 23:45:42,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=79844.0, ans=0.1 +2024-07-27 23:45:51,044 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.56 vs. limit=22.5 +2024-07-27 23:45:59,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=79870.66666666667, ans=0.2 +2024-07-27 23:46:00,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=79884.0, ans=0.025 +2024-07-27 23:46:01,321 INFO [train.py:1114] (2/4) Epoch 6, batch 8800, loss[loss=0.2004, simple_loss=0.2886, pruned_loss=0.05608, over 4935.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3114, pruned_loss=0.07579, over 937392.03 frames. ], batch size: 14, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:46:01,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=79884.0, ans=0.125 +2024-07-27 23:46:07,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=79897.33333333333, ans=0.1 +2024-07-27 23:46:17,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=79910.66666666667, ans=0.2 +2024-07-27 23:46:19,210 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.942e+01 5.815e+01 6.538e+01 7.322e+01 9.632e+01, threshold=1.308e+02, percent-clipped=0.0 +2024-07-27 23:46:23,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=79924.0, ans=0.125 +2024-07-27 23:46:33,571 INFO [train.py:1114] (2/4) Epoch 6, batch 8850, loss[loss=0.2904, simple_loss=0.3524, pruned_loss=0.1142, over 4446.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3125, pruned_loss=0.07674, over 931930.57 frames. ], batch size: 21, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:46:33,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=79950.66666666667, ans=0.125 +2024-07-27 23:46:34,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=79950.66666666667, ans=0.125 +2024-07-27 23:47:04,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=79977.33333333333, ans=0.1 +2024-07-27 23:47:05,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=79977.33333333333, ans=0.125 +2024-07-27 23:47:06,831 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.20 vs. limit=6.0 +2024-07-27 23:47:07,966 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.36 vs. limit=15.0 +2024-07-27 23:47:09,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=79990.66666666667, ans=0.0 +2024-07-27 23:47:12,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=79990.66666666667, ans=0.125 +2024-07-27 23:47:34,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=80004.0, ans=0.125 +2024-07-27 23:47:34,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=80017.33333333333, ans=0.2 +2024-07-27 23:47:34,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=80017.33333333333, ans=0.2 +2024-07-27 23:47:35,387 INFO [train.py:1114] (2/4) Epoch 6, batch 8900, loss[loss=0.2698, simple_loss=0.3407, pruned_loss=0.09945, over 4946.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3134, pruned_loss=0.07725, over 930250.19 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:47:43,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80030.66666666667, ans=0.1 +2024-07-27 23:47:47,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=80030.66666666667, ans=0.125 +2024-07-27 23:47:53,106 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.33 vs. limit=15.0 +2024-07-27 23:47:53,362 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.820e+01 6.167e+01 6.816e+01 7.855e+01 1.273e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-27 23:47:53,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=80044.0, ans=0.05 +2024-07-27 23:47:54,947 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.02 vs. limit=15.0 +2024-07-27 23:47:57,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=80057.33333333333, ans=0.125 +2024-07-27 23:48:06,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=80070.66666666667, ans=0.125 +2024-07-27 23:48:07,401 INFO [train.py:1114] (2/4) Epoch 6, batch 8950, loss[loss=0.2285, simple_loss=0.3179, pruned_loss=0.06953, over 4517.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.312, pruned_loss=0.0762, over 930926.47 frames. ], batch size: 21, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:48:09,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=80084.0, ans=0.125 +2024-07-27 23:48:10,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=80084.0, ans=0.2 +2024-07-27 23:48:20,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=80110.66666666667, ans=0.2 +2024-07-27 23:48:25,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=80110.66666666667, ans=0.125 +2024-07-27 23:48:29,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=80124.0, ans=0.0 +2024-07-27 23:48:40,063 INFO [train.py:1114] (2/4) Epoch 6, batch 9000, loss[loss=0.2071, simple_loss=0.2944, pruned_loss=0.05992, over 4639.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.3104, pruned_loss=0.07539, over 933816.93 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:48:40,064 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-27 23:48:52,411 INFO [train.py:1146] (2/4) Epoch 6, validation: loss=0.1898, simple_loss=0.2938, pruned_loss=0.0429, over 944034.00 frames. +2024-07-27 23:48:52,412 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-27 23:48:56,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=80150.66666666667, ans=0.09899494936611666 +2024-07-27 23:49:08,447 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.71 vs. limit=15.0 +2024-07-27 23:49:10,603 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.150e+01 6.230e+01 7.342e+01 8.976e+01 1.203e+02, threshold=1.468e+02, percent-clipped=0.0 +2024-07-27 23:49:14,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=80190.66666666667, ans=0.0 +2024-07-27 23:49:18,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=80204.0, ans=0.125 +2024-07-27 23:49:24,032 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=14.11 vs. limit=15.0 +2024-07-27 23:49:25,568 INFO [train.py:1114] (2/4) Epoch 6, batch 9050, loss[loss=0.1828, simple_loss=0.256, pruned_loss=0.05474, over 4550.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.31, pruned_loss=0.07505, over 934232.91 frames. ], batch size: 10, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:49:25,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=80217.33333333333, ans=0.125 +2024-07-27 23:49:34,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=80230.66666666667, ans=0.2 +2024-07-27 23:49:35,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=80230.66666666667, ans=0.125 +2024-07-27 23:49:54,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=80257.33333333333, ans=0.125 +2024-07-27 23:50:03,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=80284.0, ans=0.09899494936611666 +2024-07-27 23:50:03,559 INFO [train.py:1114] (2/4) Epoch 6, batch 9100, loss[loss=0.1987, simple_loss=0.2917, pruned_loss=0.05285, over 4925.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3088, pruned_loss=0.07399, over 936836.80 frames. ], batch size: 14, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:50:12,716 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-07-27 23:50:20,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80310.66666666667, ans=0.1 +2024-07-27 23:50:21,305 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.88 vs. limit=22.5 +2024-07-27 23:50:21,450 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 6.154e+01 7.130e+01 8.632e+01 1.081e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 23:50:32,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=80337.33333333333, ans=0.125 +2024-07-27 23:50:35,557 INFO [train.py:1114] (2/4) Epoch 6, batch 9150, loss[loss=0.2119, simple_loss=0.3109, pruned_loss=0.05648, over 4809.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3099, pruned_loss=0.07486, over 935421.28 frames. ], batch size: 14, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:50:40,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=80350.66666666667, ans=0.0 +2024-07-27 23:50:48,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=80377.33333333333, ans=0.125 +2024-07-27 23:50:50,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=80377.33333333333, ans=0.125 +2024-07-27 23:50:52,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=80377.33333333333, ans=0.125 +2024-07-27 23:51:10,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=80390.66666666667, ans=0.1 +2024-07-27 23:51:11,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=80390.66666666667, ans=0.125 +2024-07-27 23:51:15,386 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.58 vs. limit=15.0 +2024-07-27 23:51:17,771 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.19 vs. limit=15.0 +2024-07-27 23:51:18,732 INFO [train.py:1114] (2/4) Epoch 6, batch 9200, loss[loss=0.2057, simple_loss=0.285, pruned_loss=0.06318, over 4852.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3088, pruned_loss=0.07408, over 937176.64 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:51:21,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=80417.33333333333, ans=0.025 +2024-07-27 23:51:27,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=80430.66666666667, ans=0.125 +2024-07-27 23:51:40,372 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 6.086e+01 6.690e+01 8.259e+01 1.289e+02, threshold=1.338e+02, percent-clipped=0.0 +2024-07-27 23:51:41,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=80457.33333333333, ans=0.0 +2024-07-27 23:51:44,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=80457.33333333333, ans=0.0 +2024-07-27 23:51:49,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=80470.66666666667, ans=0.125 +2024-07-27 23:51:54,184 INFO [train.py:1114] (2/4) Epoch 6, batch 9250, loss[loss=0.2367, simple_loss=0.3188, pruned_loss=0.07735, over 4632.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3102, pruned_loss=0.07479, over 937867.80 frames. ], batch size: 13, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:52:16,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80524.0, ans=0.1 +2024-07-27 23:52:18,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=80524.0, ans=0.0 +2024-07-27 23:52:26,177 INFO [train.py:1114] (2/4) Epoch 6, batch 9300, loss[loss=0.2106, simple_loss=0.288, pruned_loss=0.06654, over 4784.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3101, pruned_loss=0.07484, over 938192.63 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:52:26,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=80550.66666666667, ans=0.0 +2024-07-27 23:52:29,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=80550.66666666667, ans=0.125 +2024-07-27 23:52:36,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=80564.0, ans=0.125 +2024-07-27 23:52:43,896 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.063e+01 5.901e+01 6.419e+01 7.368e+01 1.271e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-27 23:52:51,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=80604.0, ans=0.1 +2024-07-27 23:52:55,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=80604.0, ans=0.0 +2024-07-27 23:52:58,699 INFO [train.py:1114] (2/4) Epoch 6, batch 9350, loss[loss=0.2112, simple_loss=0.2879, pruned_loss=0.06724, over 4802.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3098, pruned_loss=0.07486, over 935091.20 frames. ], batch size: 11, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:53:07,129 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:53:07,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=80630.66666666667, ans=0.0 +2024-07-27 23:53:16,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=80644.0, ans=0.125 +2024-07-27 23:53:17,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=80657.33333333333, ans=0.025 +2024-07-27 23:53:18,882 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.44 vs. limit=15.0 +2024-07-27 23:53:30,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=80670.66666666667, ans=0.07 +2024-07-27 23:53:31,231 INFO [train.py:1114] (2/4) Epoch 6, batch 9400, loss[loss=0.2513, simple_loss=0.3338, pruned_loss=0.08435, over 4692.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3088, pruned_loss=0.07439, over 932668.81 frames. ], batch size: 13, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:53:37,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=80697.33333333333, ans=0.0 +2024-07-27 23:53:45,544 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.64 vs. limit=12.0 +2024-07-27 23:53:49,493 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.580e+01 6.055e+01 7.065e+01 8.211e+01 1.397e+02, threshold=1.413e+02, percent-clipped=1.0 +2024-07-27 23:53:51,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80724.0, ans=0.1 +2024-07-27 23:53:51,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=80724.0, ans=0.2 +2024-07-27 23:53:55,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80724.0, ans=0.1 +2024-07-27 23:53:55,632 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.37 vs. limit=15.0 +2024-07-27 23:53:56,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=80737.33333333333, ans=0.0 +2024-07-27 23:54:02,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=80750.66666666667, ans=0.125 +2024-07-27 23:54:02,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=80750.66666666667, ans=15.0 +2024-07-27 23:54:02,795 INFO [train.py:1114] (2/4) Epoch 6, batch 9450, loss[loss=0.1543, simple_loss=0.2223, pruned_loss=0.04311, over 4802.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.3082, pruned_loss=0.07399, over 932654.17 frames. ], batch size: 11, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:54:04,918 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.06 vs. limit=15.0 +2024-07-27 23:54:08,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=80764.0, ans=0.0 +2024-07-27 23:54:11,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=80764.0, ans=15.0 +2024-07-27 23:54:17,282 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-27 23:54:17,325 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.71 vs. limit=15.0 +2024-07-27 23:54:17,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=80777.33333333333, ans=0.5 +2024-07-27 23:54:28,078 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.03 vs. limit=12.0 +2024-07-27 23:54:28,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=80804.0, ans=0.2 +2024-07-27 23:54:31,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=80804.0, ans=0.2 +2024-07-27 23:54:34,511 INFO [train.py:1114] (2/4) Epoch 6, batch 9500, loss[loss=0.2075, simple_loss=0.2936, pruned_loss=0.06066, over 4716.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3089, pruned_loss=0.07382, over 934846.28 frames. ], batch size: 12, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:54:34,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=80817.33333333333, ans=0.125 +2024-07-27 23:54:40,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=80830.66666666667, ans=0.025 +2024-07-27 23:54:40,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=80830.66666666667, ans=0.09899494936611666 +2024-07-27 23:54:49,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=80844.0, ans=0.0 +2024-07-27 23:54:49,620 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.01 vs. limit=10.0 +2024-07-27 23:54:52,209 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.921e+01 6.030e+01 6.974e+01 8.015e+01 1.181e+02, threshold=1.395e+02, percent-clipped=0.0 +2024-07-27 23:54:52,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=80857.33333333333, ans=0.2 +2024-07-27 23:54:54,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=80857.33333333333, ans=0.025 +2024-07-27 23:54:57,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80857.33333333333, ans=0.1 +2024-07-27 23:55:03,109 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.06 vs. limit=6.0 +2024-07-27 23:55:05,225 INFO [train.py:1114] (2/4) Epoch 6, batch 9550, loss[loss=0.2413, simple_loss=0.3149, pruned_loss=0.08381, over 4775.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3096, pruned_loss=0.07468, over 932127.95 frames. ], batch size: 12, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:55:06,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=80884.0, ans=0.2 +2024-07-27 23:55:09,845 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.14 vs. limit=15.0 +2024-07-27 23:55:18,928 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.88 vs. limit=10.0 +2024-07-27 23:55:27,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=80924.0, ans=0.2 +2024-07-27 23:55:32,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=80937.33333333333, ans=0.2 +2024-07-27 23:55:36,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80937.33333333333, ans=0.1 +2024-07-27 23:55:38,083 INFO [train.py:1114] (2/4) Epoch 6, batch 9600, loss[loss=0.2639, simple_loss=0.3301, pruned_loss=0.09884, over 3273.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3105, pruned_loss=0.07488, over 931023.95 frames. ], batch size: 36, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:55:39,012 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.30 vs. limit=22.5 +2024-07-27 23:55:48,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=80964.0, ans=0.125 +2024-07-27 23:55:50,358 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.70 vs. limit=6.0 +2024-07-27 23:55:53,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=80977.33333333333, ans=0.1 +2024-07-27 23:55:56,584 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.004e+01 6.228e+01 7.001e+01 7.870e+01 1.117e+02, threshold=1.400e+02, percent-clipped=0.0 +2024-07-27 23:56:02,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=80990.66666666667, ans=0.1 +2024-07-27 23:56:03,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=81004.0, ans=0.0 +2024-07-27 23:58:41,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=81004.0, ans=0.125 +2024-07-27 23:58:43,520 INFO [train.py:1114] (2/4) Epoch 6, batch 9650, loss[loss=0.2455, simple_loss=0.3275, pruned_loss=0.08174, over 4838.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3105, pruned_loss=0.07496, over 927103.54 frames. ], batch size: 16, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:58:49,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81030.66666666667, ans=0.1 +2024-07-27 23:58:54,346 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.41 vs. limit=15.0 +2024-07-27 23:59:15,242 INFO [train.py:1114] (2/4) Epoch 6, batch 9700, loss[loss=0.2693, simple_loss=0.336, pruned_loss=0.1013, over 4209.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3108, pruned_loss=0.07549, over 924464.21 frames. ], batch size: 25, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:59:27,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=81110.66666666667, ans=0.0 +2024-07-27 23:59:33,224 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.713e+01 6.355e+01 7.161e+01 8.228e+01 1.300e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 23:59:46,573 INFO [train.py:1114] (2/4) Epoch 6, batch 9750, loss[loss=0.2159, simple_loss=0.3062, pruned_loss=0.06284, over 4679.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.311, pruned_loss=0.07542, over 925244.92 frames. ], batch size: 15, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:59:52,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=81164.0, ans=0.07 +2024-07-27 23:59:52,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=81164.0, ans=0.2 +2024-07-28 00:00:15,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=81177.33333333333, ans=0.0 +2024-07-28 00:00:23,904 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.18 vs. limit=15.0 +2024-07-28 00:00:33,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=81204.0, ans=10.0 +2024-07-28 00:00:33,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=81204.0, ans=0.0 +2024-07-28 00:00:35,047 INFO [train.py:1114] (2/4) Epoch 6, batch 9800, loss[loss=0.1765, simple_loss=0.266, pruned_loss=0.04354, over 4706.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.309, pruned_loss=0.07463, over 924985.72 frames. ], batch size: 12, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:00:36,579 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.40 vs. limit=10.0 +2024-07-28 00:00:42,067 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.27 vs. limit=22.5 +2024-07-28 00:00:47,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=81244.0, ans=0.125 +2024-07-28 00:00:48,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=81244.0, ans=0.125 +2024-07-28 00:00:52,768 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.096e+01 6.416e+01 7.275e+01 8.758e+01 1.346e+02, threshold=1.455e+02, percent-clipped=0.0 +2024-07-28 00:00:53,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=81257.33333333333, ans=0.0 +2024-07-28 00:00:54,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=81257.33333333333, ans=0.0 +2024-07-28 00:01:05,410 INFO [train.py:1114] (2/4) Epoch 6, batch 9850, loss[loss=0.2465, simple_loss=0.3232, pruned_loss=0.08492, over 4902.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3092, pruned_loss=0.07508, over 927406.16 frames. ], batch size: 15, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:01:11,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=81297.33333333333, ans=0.0 +2024-07-28 00:01:36,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=81337.33333333333, ans=0.0 +2024-07-28 00:01:37,335 INFO [train.py:1114] (2/4) Epoch 6, batch 9900, loss[loss=0.2509, simple_loss=0.335, pruned_loss=0.0834, over 4850.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.311, pruned_loss=0.07609, over 926693.06 frames. ], batch size: 16, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:01:50,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=81377.33333333333, ans=0.125 +2024-07-28 00:01:54,972 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.847e+01 6.249e+01 6.784e+01 7.688e+01 1.136e+02, threshold=1.357e+02, percent-clipped=0.0 +2024-07-28 00:01:56,395 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:01:57,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=81390.66666666667, ans=0.125 +2024-07-28 00:01:59,710 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.43 vs. limit=12.0 +2024-07-28 00:02:02,287 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.00 vs. limit=10.0 +2024-07-28 00:02:07,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=81417.33333333333, ans=0.125 +2024-07-28 00:02:07,895 INFO [train.py:1114] (2/4) Epoch 6, batch 9950, loss[loss=0.214, simple_loss=0.2838, pruned_loss=0.07213, over 4813.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3126, pruned_loss=0.07722, over 928998.50 frames. ], batch size: 11, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:02:18,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=81430.66666666667, ans=0.0 +2024-07-28 00:02:22,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=81444.0, ans=0.0 +2024-07-28 00:02:27,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81457.33333333333, ans=0.1 +2024-07-28 00:02:31,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=81457.33333333333, ans=0.05 +2024-07-28 00:02:36,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=81470.66666666667, ans=15.0 +2024-07-28 00:02:37,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=81470.66666666667, ans=0.0 +2024-07-28 00:02:39,504 INFO [train.py:1114] (2/4) Epoch 6, batch 10000, loss[loss=0.2521, simple_loss=0.3265, pruned_loss=0.08884, over 4609.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3149, pruned_loss=0.07789, over 926043.61 frames. ], batch size: 16, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:02:50,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=81497.33333333333, ans=0.125 +2024-07-28 00:02:52,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=81510.66666666667, ans=0.0 +2024-07-28 00:02:57,853 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.089e+01 5.998e+01 6.471e+01 7.600e+01 1.218e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 00:02:57,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=81524.0, ans=10.0 +2024-07-28 00:03:00,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=81524.0, ans=0.0 +2024-07-28 00:03:05,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=81537.33333333333, ans=0.125 +2024-07-28 00:03:06,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=81537.33333333333, ans=0.125 +2024-07-28 00:03:11,404 INFO [train.py:1114] (2/4) Epoch 6, batch 10050, loss[loss=0.2655, simple_loss=0.327, pruned_loss=0.102, over 3254.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.3189, pruned_loss=0.08019, over 914200.62 frames. ], batch size: 36, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:03:12,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=81550.66666666667, ans=0.125 +2024-07-28 00:03:13,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=81550.66666666667, ans=0.0 +2024-07-28 00:03:16,956 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:03:21,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=81564.0, ans=0.125 +2024-07-28 00:03:25,515 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.35 vs. limit=15.0 +2024-07-28 00:03:28,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=81577.33333333333, ans=0.0 +2024-07-28 00:03:34,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81590.66666666667, ans=0.1 +2024-07-28 00:03:42,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=81604.0, ans=0.0 +2024-07-28 00:03:45,278 INFO [train.py:1114] (2/4) Epoch 6, batch 10100, loss[loss=0.2274, simple_loss=0.2942, pruned_loss=0.0803, over 3533.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3251, pruned_loss=0.08728, over 861766.94 frames. ], batch size: 35, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:03:47,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=81617.33333333333, ans=0.125 +2024-07-28 00:04:03,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=81644.0, ans=0.025 +2024-07-28 00:04:04,055 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.601e+01 6.841e+01 7.276e+01 7.854e+01 1.337e+02, threshold=1.455e+02, percent-clipped=1.0 +2024-07-28 00:04:08,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=81657.33333333333, ans=0.0 +2024-07-28 00:04:17,456 INFO [train.py:1114] (2/4) Epoch 6, batch 10150, loss[loss=0.2385, simple_loss=0.3234, pruned_loss=0.07686, over 3392.00 frames. ], tot_loss[loss=0.2566, simple_loss=0.3292, pruned_loss=0.09203, over 821322.02 frames. ], batch size: 36, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:04:20,064 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:04:21,590 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.60 vs. limit=15.0 +2024-07-28 00:04:30,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=81710.66666666667, ans=0.025 +2024-07-28 00:04:34,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=81710.66666666667, ans=0.125 +2024-07-28 00:04:48,172 INFO [train.py:1114] (2/4) Epoch 6, batch 10200, loss[loss=0.2434, simple_loss=0.3103, pruned_loss=0.08825, over 3496.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3314, pruned_loss=0.09529, over 790664.26 frames. ], batch size: 35, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:04:50,390 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=8.06 vs. limit=12.0 +2024-07-28 00:05:45,913 INFO [train.py:1114] (2/4) Epoch 7, batch 0, loss[loss=0.1959, simple_loss=0.2862, pruned_loss=0.05282, over 4848.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2862, pruned_loss=0.05282, over 4848.00 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:05:45,914 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 00:05:57,486 INFO [train.py:1146] (2/4) Epoch 7, validation: loss=0.1928, simple_loss=0.2981, pruned_loss=0.04372, over 944034.00 frames. +2024-07-28 00:05:57,487 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 00:05:59,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81780.0, ans=0.1 +2024-07-28 00:06:04,566 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.706e+01 6.568e+01 7.074e+01 7.483e+01 1.038e+02, threshold=1.415e+02, percent-clipped=0.0 +2024-07-28 00:06:26,953 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.78 vs. limit=15.0 +2024-07-28 00:06:33,963 INFO [train.py:1114] (2/4) Epoch 7, batch 50, loss[loss=0.2046, simple_loss=0.2773, pruned_loss=0.06597, over 4623.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3136, pruned_loss=0.07505, over 206733.77 frames. ], batch size: 11, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:06:37,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=81846.66666666667, ans=0.025 +2024-07-28 00:06:48,601 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.49 vs. limit=6.0 +2024-07-28 00:07:07,543 INFO [train.py:1114] (2/4) Epoch 7, batch 100, loss[loss=0.2102, simple_loss=0.2891, pruned_loss=0.06566, over 4645.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3145, pruned_loss=0.07506, over 365859.60 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:07:12,083 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.841e+01 5.914e+01 6.777e+01 7.920e+01 1.192e+02, threshold=1.355e+02, percent-clipped=0.0 +2024-07-28 00:07:16,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=81926.66666666667, ans=0.1 +2024-07-28 00:07:26,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=81940.0, ans=0.125 +2024-07-28 00:07:30,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=81953.33333333333, ans=0.0 +2024-07-28 00:07:33,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=81966.66666666667, ans=0.125 +2024-07-28 00:07:40,165 INFO [train.py:1114] (2/4) Epoch 7, batch 150, loss[loss=0.1699, simple_loss=0.2579, pruned_loss=0.04092, over 4618.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3091, pruned_loss=0.07239, over 494468.96 frames. ], batch size: 11, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:07:40,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=81980.0, ans=0.125 +2024-07-28 00:07:52,756 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.67 vs. limit=10.0 +2024-07-28 00:07:56,847 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-07-28 00:07:57,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=82006.66666666667, ans=0.0 +2024-07-28 00:08:00,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=82020.0, ans=0.2 +2024-07-28 00:08:02,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.52 vs. limit=15.0 +2024-07-28 00:08:06,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=82033.33333333333, ans=0.125 +2024-07-28 00:08:12,787 INFO [train.py:1114] (2/4) Epoch 7, batch 200, loss[loss=0.304, simple_loss=0.3776, pruned_loss=0.1152, over 4513.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3097, pruned_loss=0.07388, over 593962.66 frames. ], batch size: 21, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:08:12,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=82046.66666666667, ans=0.0 +2024-07-28 00:08:17,411 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.841e+01 5.956e+01 6.544e+01 7.409e+01 1.468e+02, threshold=1.309e+02, percent-clipped=1.0 +2024-07-28 00:08:17,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=82046.66666666667, ans=0.0 +2024-07-28 00:08:17,803 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.12 vs. limit=15.0 +2024-07-28 00:08:46,286 INFO [train.py:1114] (2/4) Epoch 7, batch 250, loss[loss=0.2477, simple_loss=0.3362, pruned_loss=0.07961, over 4625.00 frames. ], tot_loss[loss=0.2272, simple_loss=0.3085, pruned_loss=0.07293, over 670493.89 frames. ], batch size: 16, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:08:51,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82113.33333333333, ans=0.1 +2024-07-28 00:08:57,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=82126.66666666667, ans=0.125 +2024-07-28 00:09:06,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=82153.33333333333, ans=0.0 +2024-07-28 00:09:08,085 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.01 vs. limit=10.0 +2024-07-28 00:09:09,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82153.33333333333, ans=0.1 +2024-07-28 00:09:10,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82153.33333333333, ans=0.1 +2024-07-28 00:09:12,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=82166.66666666667, ans=0.125 +2024-07-28 00:09:16,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=82166.66666666667, ans=0.125 +2024-07-28 00:09:19,515 INFO [train.py:1114] (2/4) Epoch 7, batch 300, loss[loss=0.2185, simple_loss=0.3113, pruned_loss=0.06285, over 4805.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3068, pruned_loss=0.07182, over 730188.03 frames. ], batch size: 15, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:09:21,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=82180.0, ans=0.125 +2024-07-28 00:09:24,031 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.788e+01 5.988e+01 6.705e+01 7.891e+01 1.591e+02, threshold=1.341e+02, percent-clipped=1.0 +2024-07-28 00:09:35,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=82206.66666666667, ans=0.125 +2024-07-28 00:09:39,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=82206.66666666667, ans=0.0 +2024-07-28 00:09:42,279 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.75 vs. limit=22.5 +2024-07-28 00:09:49,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=82233.33333333333, ans=0.0 +2024-07-28 00:09:54,199 INFO [train.py:1114] (2/4) Epoch 7, batch 350, loss[loss=0.2368, simple_loss=0.3044, pruned_loss=0.08461, over 4949.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.307, pruned_loss=0.07212, over 776485.75 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:09:57,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=82246.66666666667, ans=0.0 +2024-07-28 00:10:12,448 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.36 vs. limit=22.5 +2024-07-28 00:10:13,202 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.16 vs. limit=15.0 +2024-07-28 00:10:21,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=82286.66666666667, ans=0.0 +2024-07-28 00:10:25,684 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.58 vs. limit=15.0 +2024-07-28 00:10:26,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=82300.0, ans=0.0 +2024-07-28 00:10:27,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=82300.0, ans=0.125 +2024-07-28 00:10:29,132 INFO [train.py:1114] (2/4) Epoch 7, batch 400, loss[loss=0.2289, simple_loss=0.3183, pruned_loss=0.06979, over 4702.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3059, pruned_loss=0.07142, over 813853.42 frames. ], batch size: 13, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:10:33,746 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.839e+01 6.182e+01 6.903e+01 9.738e+01, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 00:10:36,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=82326.66666666667, ans=0.125 +2024-07-28 00:10:39,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=82326.66666666667, ans=0.125 +2024-07-28 00:10:44,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=82340.0, ans=0.09899494936611666 +2024-07-28 00:10:49,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=82340.0, ans=0.0 +2024-07-28 00:10:53,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=82353.33333333333, ans=0.125 +2024-07-28 00:10:57,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=82366.66666666667, ans=0.025 +2024-07-28 00:11:00,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=82366.66666666667, ans=0.2 +2024-07-28 00:11:04,437 INFO [train.py:1114] (2/4) Epoch 7, batch 450, loss[loss=0.2821, simple_loss=0.3646, pruned_loss=0.09984, over 4628.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.307, pruned_loss=0.07203, over 838733.71 frames. ], batch size: 13, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:11:04,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82380.0, ans=0.1 +2024-07-28 00:11:07,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82380.0, ans=0.1 +2024-07-28 00:11:08,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=82380.0, ans=0.05 +2024-07-28 00:11:11,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=82393.33333333333, ans=0.2 +2024-07-28 00:11:16,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=82393.33333333333, ans=0.125 +2024-07-28 00:11:17,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=82406.66666666667, ans=0.0 +2024-07-28 00:11:27,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=82420.0, ans=0.125 +2024-07-28 00:11:32,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=82433.33333333333, ans=0.2 +2024-07-28 00:11:39,263 INFO [train.py:1114] (2/4) Epoch 7, batch 500, loss[loss=0.2584, simple_loss=0.3432, pruned_loss=0.0868, over 4683.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3061, pruned_loss=0.07168, over 861194.82 frames. ], batch size: 15, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:11:41,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82446.66666666667, ans=0.1 +2024-07-28 00:11:44,370 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.615e+01 5.805e+01 6.520e+01 7.491e+01 1.046e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 00:11:44,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=82446.66666666667, ans=0.125 +2024-07-28 00:12:03,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=82486.66666666667, ans=0.0 +2024-07-28 00:12:12,285 INFO [train.py:1114] (2/4) Epoch 7, batch 550, loss[loss=0.2199, simple_loss=0.2997, pruned_loss=0.07005, over 4637.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.3071, pruned_loss=0.0721, over 877405.86 frames. ], batch size: 17, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:12:14,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=82513.33333333333, ans=0.025 +2024-07-28 00:12:40,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=82553.33333333333, ans=0.125 +2024-07-28 00:12:40,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=82566.66666666667, ans=0.0 +2024-07-28 00:12:44,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=82566.66666666667, ans=0.95 +2024-07-28 00:12:45,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=82566.66666666667, ans=0.0 +2024-07-28 00:12:47,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82566.66666666667, ans=0.1 +2024-07-28 00:12:48,463 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.47 vs. limit=6.0 +2024-07-28 00:12:51,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=82580.0, ans=0.0 +2024-07-28 00:12:52,480 INFO [train.py:1114] (2/4) Epoch 7, batch 600, loss[loss=0.2414, simple_loss=0.3244, pruned_loss=0.07917, over 4640.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3072, pruned_loss=0.07191, over 892038.58 frames. ], batch size: 16, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:13:01,197 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.565e+01 5.825e+01 6.471e+01 7.822e+01 1.372e+02, threshold=1.294e+02, percent-clipped=1.0 +2024-07-28 00:13:02,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=82593.33333333333, ans=0.2 +2024-07-28 00:13:17,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=82620.0, ans=0.0 +2024-07-28 00:13:22,852 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.95 vs. limit=10.0 +2024-07-28 00:13:28,961 INFO [train.py:1114] (2/4) Epoch 7, batch 650, loss[loss=0.193, simple_loss=0.2787, pruned_loss=0.05368, over 4759.00 frames. ], tot_loss[loss=0.226, simple_loss=0.3074, pruned_loss=0.07227, over 903563.85 frames. ], batch size: 13, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:13:56,133 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.64 vs. limit=10.0 +2024-07-28 00:14:02,612 INFO [train.py:1114] (2/4) Epoch 7, batch 700, loss[loss=0.2095, simple_loss=0.2952, pruned_loss=0.06194, over 4642.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.3067, pruned_loss=0.07196, over 911751.19 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:14:07,885 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.492e+01 5.955e+01 6.627e+01 7.908e+01 1.237e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-28 00:14:09,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=82726.66666666667, ans=0.1 +2024-07-28 00:14:09,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=82726.66666666667, ans=0.0 +2024-07-28 00:14:30,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=82766.66666666667, ans=0.125 +2024-07-28 00:14:34,791 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.79 vs. limit=22.5 +2024-07-28 00:14:36,833 INFO [train.py:1114] (2/4) Epoch 7, batch 750, loss[loss=0.2322, simple_loss=0.3316, pruned_loss=0.06643, over 4687.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3071, pruned_loss=0.07189, over 918245.77 frames. ], batch size: 13, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:14:38,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82780.0, ans=0.1 +2024-07-28 00:14:40,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=82780.0, ans=0.0 +2024-07-28 00:14:57,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=82820.0, ans=0.025 +2024-07-28 00:14:59,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=82820.0, ans=0.0 +2024-07-28 00:15:03,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=82833.33333333333, ans=0.0 +2024-07-28 00:15:05,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82833.33333333333, ans=0.1 +2024-07-28 00:15:10,167 INFO [train.py:1114] (2/4) Epoch 7, batch 800, loss[loss=0.1916, simple_loss=0.268, pruned_loss=0.0576, over 4850.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3056, pruned_loss=0.07115, over 923291.22 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:15:10,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.79 vs. limit=15.0 +2024-07-28 00:15:17,228 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.015e+01 5.902e+01 6.465e+01 7.413e+01 1.020e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-28 00:15:30,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=82873.33333333333, ans=0.0 +2024-07-28 00:15:33,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=82886.66666666667, ans=0.2 +2024-07-28 00:15:33,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=82886.66666666667, ans=0.0 +2024-07-28 00:15:34,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=82886.66666666667, ans=0.125 +2024-07-28 00:15:39,026 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:15:39,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=82900.0, ans=0.125 +2024-07-28 00:15:46,670 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.70 vs. limit=15.0 +2024-07-28 00:15:46,837 INFO [train.py:1114] (2/4) Epoch 7, batch 850, loss[loss=0.2165, simple_loss=0.3097, pruned_loss=0.06165, over 4673.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3056, pruned_loss=0.07131, over 927691.18 frames. ], batch size: 14, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:15:58,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=82926.66666666667, ans=0.05 +2024-07-28 00:16:02,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=82940.0, ans=0.125 +2024-07-28 00:16:06,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=82953.33333333333, ans=0.015 +2024-07-28 00:16:08,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=82953.33333333333, ans=0.125 +2024-07-28 00:16:08,660 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.24 vs. limit=10.0 +2024-07-28 00:16:09,456 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.52 vs. limit=8.0 +2024-07-28 00:16:14,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82966.66666666667, ans=0.1 +2024-07-28 00:16:21,804 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:16:22,225 INFO [train.py:1114] (2/4) Epoch 7, batch 900, loss[loss=0.187, simple_loss=0.2611, pruned_loss=0.05643, over 4848.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3067, pruned_loss=0.07204, over 928338.48 frames. ], batch size: 12, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:16:26,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82980.0, ans=0.1 +2024-07-28 00:16:27,462 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.904e+01 6.297e+01 6.765e+01 1.145e+02, threshold=1.259e+02, percent-clipped=0.0 +2024-07-28 00:16:30,412 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.75 vs. limit=22.5 +2024-07-28 00:16:46,178 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=79.77 vs. limit=15.0 +2024-07-28 00:16:48,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=83033.33333333333, ans=0.0 +2024-07-28 00:16:50,756 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:16:52,939 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.38 vs. limit=22.5 +2024-07-28 00:16:57,799 INFO [train.py:1114] (2/4) Epoch 7, batch 950, loss[loss=0.204, simple_loss=0.2867, pruned_loss=0.06059, over 4782.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3071, pruned_loss=0.07163, over 930154.89 frames. ], batch size: 12, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:17:04,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=83060.0, ans=0.0 +2024-07-28 00:17:31,117 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.05 vs. limit=15.0 +2024-07-28 00:17:31,224 INFO [train.py:1114] (2/4) Epoch 7, batch 1000, loss[loss=0.1948, simple_loss=0.2823, pruned_loss=0.05367, over 4962.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3078, pruned_loss=0.0723, over 930127.45 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:17:34,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=83113.33333333333, ans=0.0 +2024-07-28 00:17:34,299 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.89 vs. limit=15.0 +2024-07-28 00:17:36,723 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.037e+01 6.185e+01 7.251e+01 8.642e+01 1.358e+02, threshold=1.450e+02, percent-clipped=3.0 +2024-07-28 00:17:36,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=83113.33333333333, ans=0.125 +2024-07-28 00:17:39,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=83126.66666666667, ans=0.025 +2024-07-28 00:17:41,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=83126.66666666667, ans=0.2 +2024-07-28 00:17:53,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=83153.33333333333, ans=0.125 +2024-07-28 00:17:57,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=83166.66666666667, ans=0.125 +2024-07-28 00:18:03,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=83166.66666666667, ans=0.0 +2024-07-28 00:18:04,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=83166.66666666667, ans=0.0 +2024-07-28 00:18:05,101 INFO [train.py:1114] (2/4) Epoch 7, batch 1050, loss[loss=0.2133, simple_loss=0.3031, pruned_loss=0.0618, over 4876.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.306, pruned_loss=0.07211, over 932842.52 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:18:07,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=83180.0, ans=0.1 +2024-07-28 00:18:28,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=83193.33333333333, ans=0.1 +2024-07-28 00:18:29,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=83193.33333333333, ans=0.125 +2024-07-28 00:18:30,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=83206.66666666667, ans=0.09899494936611666 +2024-07-28 00:18:33,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=83206.66666666667, ans=0.0 +2024-07-28 00:18:38,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=83220.0, ans=0.125 +2024-07-28 00:18:38,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=83220.0, ans=0.025 +2024-07-28 00:18:42,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.82 vs. limit=12.0 +2024-07-28 00:18:48,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=83233.33333333333, ans=0.1 +2024-07-28 00:18:50,780 INFO [train.py:1114] (2/4) Epoch 7, batch 1100, loss[loss=0.2536, simple_loss=0.3269, pruned_loss=0.09016, over 4895.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3064, pruned_loss=0.07201, over 935716.08 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:18:50,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=83246.66666666667, ans=0.07 +2024-07-28 00:18:52,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=83246.66666666667, ans=0.125 +2024-07-28 00:18:53,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=83246.66666666667, ans=0.125 +2024-07-28 00:18:53,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=83246.66666666667, ans=0.0 +2024-07-28 00:18:56,152 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.849e+01 5.958e+01 6.479e+01 7.755e+01 1.091e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 00:18:56,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=83246.66666666667, ans=0.125 +2024-07-28 00:19:11,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=83273.33333333333, ans=0.125 +2024-07-28 00:19:13,201 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:19:28,119 INFO [train.py:1114] (2/4) Epoch 7, batch 1150, loss[loss=0.2107, simple_loss=0.296, pruned_loss=0.06272, over 4900.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3056, pruned_loss=0.07162, over 935440.68 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:19:30,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=83313.33333333333, ans=0.0 +2024-07-28 00:19:34,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=83326.66666666667, ans=0.125 +2024-07-28 00:19:46,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=83340.0, ans=0.125 +2024-07-28 00:19:49,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=83340.0, ans=0.125 +2024-07-28 00:19:51,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=83353.33333333333, ans=0.2 +2024-07-28 00:19:55,156 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.66 vs. limit=15.0 +2024-07-28 00:19:55,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=83353.33333333333, ans=0.1 +2024-07-28 00:20:02,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=83366.66666666667, ans=0.125 +2024-07-28 00:20:02,464 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.96 vs. limit=15.0 +2024-07-28 00:20:05,257 INFO [train.py:1114] (2/4) Epoch 7, batch 1200, loss[loss=0.246, simple_loss=0.3339, pruned_loss=0.079, over 4880.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3071, pruned_loss=0.07267, over 934446.53 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:20:09,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=83380.0, ans=0.125 +2024-07-28 00:20:10,449 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.726e+01 5.660e+01 6.364e+01 7.390e+01 1.227e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 00:20:37,997 INFO [train.py:1114] (2/4) Epoch 7, batch 1250, loss[loss=0.2439, simple_loss=0.3276, pruned_loss=0.08008, over 4796.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3069, pruned_loss=0.07177, over 938099.44 frames. ], batch size: 15, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:20:50,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=83460.0, ans=0.5 +2024-07-28 00:20:52,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=83473.33333333333, ans=0.1 +2024-07-28 00:21:16,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=83500.0, ans=0.1 +2024-07-28 00:21:21,940 INFO [train.py:1114] (2/4) Epoch 7, batch 1300, loss[loss=0.244, simple_loss=0.3164, pruned_loss=0.08578, over 4757.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3065, pruned_loss=0.07172, over 939599.20 frames. ], batch size: 19, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:21:22,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=83513.33333333333, ans=0.125 +2024-07-28 00:21:25,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=83513.33333333333, ans=0.2 +2024-07-28 00:21:26,949 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.618e+01 5.788e+01 6.480e+01 7.663e+01 1.256e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 00:21:31,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=83526.66666666667, ans=0.025 +2024-07-28 00:21:44,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=83553.33333333333, ans=0.2 +2024-07-28 00:21:45,069 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.74 vs. limit=12.0 +2024-07-28 00:21:55,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=83566.66666666667, ans=0.1 +2024-07-28 00:21:56,798 INFO [train.py:1114] (2/4) Epoch 7, batch 1350, loss[loss=0.2515, simple_loss=0.3287, pruned_loss=0.08714, over 4761.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3053, pruned_loss=0.07079, over 941402.01 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:21:57,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=83580.0, ans=0.1 +2024-07-28 00:21:58,484 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.48 vs. limit=12.0 +2024-07-28 00:22:04,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=83593.33333333333, ans=0.125 +2024-07-28 00:22:13,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=83606.66666666667, ans=0.125 +2024-07-28 00:22:15,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=83606.66666666667, ans=0.0 +2024-07-28 00:22:18,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=83620.0, ans=0.125 +2024-07-28 00:22:22,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=83620.0, ans=0.125 +2024-07-28 00:22:23,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=83620.0, ans=0.025 +2024-07-28 00:22:29,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=83633.33333333333, ans=0.2 +2024-07-28 00:22:31,680 INFO [train.py:1114] (2/4) Epoch 7, batch 1400, loss[loss=0.168, simple_loss=0.2533, pruned_loss=0.04138, over 4703.00 frames. ], tot_loss[loss=0.223, simple_loss=0.305, pruned_loss=0.0705, over 942997.87 frames. ], batch size: 11, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:22:36,882 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.900e+01 5.949e+01 6.637e+01 7.853e+01 1.145e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-28 00:22:38,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=83660.0, ans=0.035 +2024-07-28 00:22:39,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.03 vs. limit=10.0 +2024-07-28 00:22:49,091 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-28 00:22:49,678 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.38 vs. limit=10.0 +2024-07-28 00:22:55,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=83686.66666666667, ans=0.125 +2024-07-28 00:22:55,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=83686.66666666667, ans=0.0 +2024-07-28 00:22:57,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=83686.66666666667, ans=10.0 +2024-07-28 00:23:06,296 INFO [train.py:1114] (2/4) Epoch 7, batch 1450, loss[loss=0.2271, simple_loss=0.3128, pruned_loss=0.07066, over 4674.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.3057, pruned_loss=0.07083, over 942694.96 frames. ], batch size: 15, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:23:13,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=83726.66666666667, ans=0.2 +2024-07-28 00:23:25,866 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.91 vs. limit=15.0 +2024-07-28 00:23:27,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=83753.33333333333, ans=15.0 +2024-07-28 00:23:35,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=83766.66666666667, ans=0.0 +2024-07-28 00:23:37,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=83766.66666666667, ans=0.125 +2024-07-28 00:23:39,632 INFO [train.py:1114] (2/4) Epoch 7, batch 1500, loss[loss=0.2167, simple_loss=0.3126, pruned_loss=0.06043, over 4806.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3057, pruned_loss=0.07037, over 942195.43 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:23:41,300 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.04 vs. limit=6.0 +2024-07-28 00:23:42,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=83780.0, ans=0.07 +2024-07-28 00:23:45,189 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 5.882e+01 6.521e+01 7.412e+01 1.092e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 00:23:50,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=83793.33333333333, ans=0.125 +2024-07-28 00:23:55,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=83806.66666666667, ans=0.125 +2024-07-28 00:23:58,238 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.10 vs. limit=10.0 +2024-07-28 00:24:00,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=83820.0, ans=0.125 +2024-07-28 00:24:12,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=83833.33333333333, ans=0.1 +2024-07-28 00:24:15,242 INFO [train.py:1114] (2/4) Epoch 7, batch 1550, loss[loss=0.245, simple_loss=0.3223, pruned_loss=0.08381, over 4918.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.3052, pruned_loss=0.07046, over 938819.89 frames. ], batch size: 15, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:24:24,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=83860.0, ans=0.0 +2024-07-28 00:24:43,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=83860.0, ans=0.0 +2024-07-28 00:24:44,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=83873.33333333333, ans=0.125 +2024-07-28 00:24:51,225 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.36 vs. limit=22.5 +2024-07-28 00:24:51,307 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.00 vs. limit=6.0 +2024-07-28 00:24:55,923 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.35 vs. limit=6.0 +2024-07-28 00:24:56,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=83886.66666666667, ans=0.125 +2024-07-28 00:25:03,720 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.21 vs. limit=15.0 +2024-07-28 00:25:09,013 INFO [train.py:1114] (2/4) Epoch 7, batch 1600, loss[loss=0.1984, simple_loss=0.286, pruned_loss=0.05536, over 4875.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3036, pruned_loss=0.07002, over 938051.25 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:25:11,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=83913.33333333333, ans=0.125 +2024-07-28 00:25:11,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=83913.33333333333, ans=0.2 +2024-07-28 00:25:17,695 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.865e+01 6.513e+01 7.777e+01 1.353e+02, threshold=1.303e+02, percent-clipped=1.0 +2024-07-28 00:25:20,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=83926.66666666667, ans=0.0 +2024-07-28 00:25:20,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=83926.66666666667, ans=0.1 +2024-07-28 00:25:24,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=83926.66666666667, ans=0.5 +2024-07-28 00:25:37,102 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.07 vs. limit=12.0 +2024-07-28 00:25:39,825 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.00 vs. limit=22.5 +2024-07-28 00:25:45,981 INFO [train.py:1114] (2/4) Epoch 7, batch 1650, loss[loss=0.2109, simple_loss=0.3114, pruned_loss=0.05522, over 4671.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3034, pruned_loss=0.06997, over 937820.42 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:25:46,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=83980.0, ans=0.125 +2024-07-28 00:25:50,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=83980.0, ans=0.125 +2024-07-28 00:26:02,503 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.10 vs. limit=15.0 +2024-07-28 00:26:03,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=84006.66666666667, ans=0.125 +2024-07-28 00:26:10,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=84020.0, ans=0.0 +2024-07-28 00:26:21,405 INFO [train.py:1114] (2/4) Epoch 7, batch 1700, loss[loss=0.1841, simple_loss=0.2643, pruned_loss=0.05196, over 4709.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3032, pruned_loss=0.06974, over 939426.91 frames. ], batch size: 11, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:26:23,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=84046.66666666667, ans=0.025 +2024-07-28 00:26:25,233 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.26 vs. limit=15.0 +2024-07-28 00:26:26,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=84046.66666666667, ans=0.0 +2024-07-28 00:26:26,711 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.280e+01 6.250e+01 6.932e+01 8.047e+01 1.262e+02, threshold=1.386e+02, percent-clipped=0.0 +2024-07-28 00:26:30,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=84060.0, ans=0.95 +2024-07-28 00:26:32,735 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:26:34,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=84073.33333333333, ans=0.125 +2024-07-28 00:26:45,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.88 vs. limit=12.0 +2024-07-28 00:26:53,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=84100.0, ans=0.125 +2024-07-28 00:26:54,581 INFO [train.py:1114] (2/4) Epoch 7, batch 1750, loss[loss=0.1692, simple_loss=0.2441, pruned_loss=0.04715, over 4798.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3029, pruned_loss=0.06987, over 940470.91 frames. ], batch size: 11, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:26:54,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=84113.33333333333, ans=0.125 +2024-07-28 00:27:09,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=84140.0, ans=0.0 +2024-07-28 00:27:33,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=84166.66666666667, ans=0.125 +2024-07-28 00:27:34,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=84166.66666666667, ans=0.0 +2024-07-28 00:27:35,783 INFO [train.py:1114] (2/4) Epoch 7, batch 1800, loss[loss=0.2598, simple_loss=0.3422, pruned_loss=0.08875, over 4636.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.3037, pruned_loss=0.07044, over 941398.79 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:27:36,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=84180.0, ans=0.125 +2024-07-28 00:27:39,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=84180.0, ans=0.125 +2024-07-28 00:27:40,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=84180.0, ans=0.0 +2024-07-28 00:27:41,157 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.062e+01 5.927e+01 6.951e+01 8.175e+01 1.232e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-28 00:27:43,468 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.11 vs. limit=15.0 +2024-07-28 00:27:45,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=84193.33333333333, ans=0.0 +2024-07-28 00:27:47,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=84193.33333333333, ans=0.0 +2024-07-28 00:27:56,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84206.66666666667, ans=0.1 +2024-07-28 00:28:00,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=84220.0, ans=0.04949747468305833 +2024-07-28 00:28:14,938 INFO [train.py:1114] (2/4) Epoch 7, batch 1850, loss[loss=0.232, simple_loss=0.3213, pruned_loss=0.07138, over 4813.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3044, pruned_loss=0.07058, over 940927.47 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:28:18,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=84246.66666666667, ans=0.07 +2024-07-28 00:28:20,731 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.03 vs. limit=22.5 +2024-07-28 00:28:44,482 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:28:45,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.83 vs. limit=15.0 +2024-07-28 00:28:50,168 INFO [train.py:1114] (2/4) Epoch 7, batch 1900, loss[loss=0.2111, simple_loss=0.3047, pruned_loss=0.05879, over 4662.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.3048, pruned_loss=0.07016, over 941998.36 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:28:55,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=84313.33333333333, ans=15.0 +2024-07-28 00:28:55,322 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.933e+01 5.987e+01 6.515e+01 7.725e+01 1.148e+02, threshold=1.303e+02, percent-clipped=0.0 +2024-07-28 00:28:59,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=84326.66666666667, ans=0.0 +2024-07-28 00:29:13,691 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:29:22,773 INFO [train.py:1114] (2/4) Epoch 7, batch 1950, loss[loss=0.2237, simple_loss=0.2988, pruned_loss=0.07428, over 4895.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3056, pruned_loss=0.0706, over 943996.64 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:29:29,264 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.70 vs. limit=15.0 +2024-07-28 00:29:30,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=84393.33333333333, ans=0.035 +2024-07-28 00:29:30,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=84393.33333333333, ans=0.0 +2024-07-28 00:29:30,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=84393.33333333333, ans=0.125 +2024-07-28 00:29:36,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=84406.66666666667, ans=0.125 +2024-07-28 00:29:56,356 INFO [train.py:1114] (2/4) Epoch 7, batch 2000, loss[loss=0.2022, simple_loss=0.2826, pruned_loss=0.06095, over 4809.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3065, pruned_loss=0.07107, over 941684.32 frames. ], batch size: 11, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:29:57,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=84446.66666666667, ans=0.125 +2024-07-28 00:30:01,591 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 6.163e+01 6.683e+01 7.706e+01 1.195e+02, threshold=1.337e+02, percent-clipped=0.0 +2024-07-28 00:30:27,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=84486.66666666667, ans=0.125 +2024-07-28 00:30:31,848 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.80 vs. limit=15.0 +2024-07-28 00:30:32,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=84500.0, ans=0.0 +2024-07-28 00:30:32,408 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.90 vs. limit=22.5 +2024-07-28 00:30:36,703 INFO [train.py:1114] (2/4) Epoch 7, batch 2050, loss[loss=0.1909, simple_loss=0.2624, pruned_loss=0.05973, over 4606.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3055, pruned_loss=0.07071, over 940071.49 frames. ], batch size: 11, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:30:36,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=84513.33333333333, ans=0.2 +2024-07-28 00:30:48,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=84526.66666666667, ans=0.125 +2024-07-28 00:30:48,857 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.70 vs. limit=6.0 +2024-07-28 00:30:49,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=84540.0, ans=0.125 +2024-07-28 00:31:05,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=84566.66666666667, ans=0.125 +2024-07-28 00:31:05,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=84566.66666666667, ans=0.2 +2024-07-28 00:31:06,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=84566.66666666667, ans=0.0 +2024-07-28 00:31:09,869 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:31:11,091 INFO [train.py:1114] (2/4) Epoch 7, batch 2100, loss[loss=0.2062, simple_loss=0.29, pruned_loss=0.06114, over 4751.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3048, pruned_loss=0.07048, over 941488.95 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:31:14,100 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.29 vs. limit=15.0 +2024-07-28 00:31:16,339 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.722e+01 5.891e+01 6.506e+01 7.465e+01 1.283e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 00:31:17,946 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.03 vs. limit=15.0 +2024-07-28 00:31:21,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=84593.33333333333, ans=0.0 +2024-07-28 00:31:36,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=84620.0, ans=0.125 +2024-07-28 00:31:38,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=84633.33333333333, ans=0.125 +2024-07-28 00:31:39,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=84633.33333333333, ans=0.09899494936611666 +2024-07-28 00:31:43,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=84633.33333333333, ans=0.025 +2024-07-28 00:31:43,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84633.33333333333, ans=0.1 +2024-07-28 00:31:44,326 INFO [train.py:1114] (2/4) Epoch 7, batch 2150, loss[loss=0.2158, simple_loss=0.2914, pruned_loss=0.07008, over 4906.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3035, pruned_loss=0.06992, over 944628.71 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:31:45,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=84646.66666666667, ans=0.125 +2024-07-28 00:31:54,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=84660.0, ans=0.125 +2024-07-28 00:32:05,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=84686.66666666667, ans=0.125 +2024-07-28 00:32:06,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=84686.66666666667, ans=0.0 +2024-07-28 00:32:07,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.41 vs. limit=10.0 +2024-07-28 00:32:15,129 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.15 vs. limit=15.0 +2024-07-28 00:32:17,326 INFO [train.py:1114] (2/4) Epoch 7, batch 2200, loss[loss=0.2241, simple_loss=0.3062, pruned_loss=0.07104, over 4811.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3046, pruned_loss=0.07039, over 943729.63 frames. ], batch size: 14, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:32:22,577 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.863e+01 5.835e+01 6.281e+01 7.163e+01 1.109e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 00:32:25,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=84726.66666666667, ans=0.0 +2024-07-28 00:32:31,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=84740.0, ans=0.2 +2024-07-28 00:32:32,220 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.91 vs. limit=15.0 +2024-07-28 00:32:38,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=84753.33333333333, ans=0.125 +2024-07-28 00:32:52,120 INFO [train.py:1114] (2/4) Epoch 7, batch 2250, loss[loss=0.1897, simple_loss=0.2827, pruned_loss=0.0483, over 4692.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.305, pruned_loss=0.07063, over 942122.78 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:33:26,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=84833.33333333333, ans=0.0 +2024-07-28 00:33:27,450 INFO [train.py:1114] (2/4) Epoch 7, batch 2300, loss[loss=0.1997, simple_loss=0.2847, pruned_loss=0.05734, over 4942.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.304, pruned_loss=0.07055, over 939571.67 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:33:31,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=84846.66666666667, ans=0.025 +2024-07-28 00:33:32,859 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.138e+01 5.907e+01 7.082e+01 8.177e+01 1.156e+02, threshold=1.416e+02, percent-clipped=0.0 +2024-07-28 00:33:33,957 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.63 vs. limit=22.5 +2024-07-28 00:33:35,309 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.97 vs. limit=15.0 +2024-07-28 00:33:37,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=84860.0, ans=0.0 +2024-07-28 00:33:42,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=84873.33333333333, ans=0.125 +2024-07-28 00:33:44,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=84873.33333333333, ans=0.05 +2024-07-28 00:33:56,251 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-07-28 00:34:03,230 INFO [train.py:1114] (2/4) Epoch 7, batch 2350, loss[loss=0.2325, simple_loss=0.3147, pruned_loss=0.07518, over 4637.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3035, pruned_loss=0.07007, over 941787.11 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:34:06,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=84913.33333333333, ans=0.125 +2024-07-28 00:34:08,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=84913.33333333333, ans=0.125 +2024-07-28 00:34:29,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=84953.33333333333, ans=0.0 +2024-07-28 00:34:38,017 INFO [train.py:1114] (2/4) Epoch 7, batch 2400, loss[loss=0.2277, simple_loss=0.3027, pruned_loss=0.07637, over 4644.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3034, pruned_loss=0.06992, over 941544.59 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:34:40,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.25 vs. limit=15.0 +2024-07-28 00:34:43,147 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 6.102e+01 6.788e+01 7.615e+01 1.111e+02, threshold=1.358e+02, percent-clipped=0.0 +2024-07-28 00:34:50,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=85006.66666666667, ans=0.125 +2024-07-28 00:34:51,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=85006.66666666667, ans=0.025 +2024-07-28 00:34:51,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=85006.66666666667, ans=0.0 +2024-07-28 00:35:11,392 INFO [train.py:1114] (2/4) Epoch 7, batch 2450, loss[loss=0.2449, simple_loss=0.3275, pruned_loss=0.08112, over 4699.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3059, pruned_loss=0.07112, over 936866.51 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:35:13,130 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.58 vs. limit=15.0 +2024-07-28 00:35:15,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=85046.66666666667, ans=0.2 +2024-07-28 00:35:18,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=85060.0, ans=0.95 +2024-07-28 00:35:18,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=85060.0, ans=0.125 +2024-07-28 00:35:19,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=85060.0, ans=0.125 +2024-07-28 00:35:44,986 INFO [train.py:1114] (2/4) Epoch 7, batch 2500, loss[loss=0.2229, simple_loss=0.3075, pruned_loss=0.06911, over 4812.00 frames. ], tot_loss[loss=0.223, simple_loss=0.3049, pruned_loss=0.0705, over 939135.70 frames. ], batch size: 14, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:35:50,133 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.036e+01 6.265e+01 6.846e+01 8.137e+01 1.168e+02, threshold=1.369e+02, percent-clipped=0.0 +2024-07-28 00:35:58,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=85140.0, ans=0.0 +2024-07-28 00:36:07,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85153.33333333333, ans=0.1 +2024-07-28 00:36:09,488 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.48 vs. limit=22.5 +2024-07-28 00:36:15,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=85166.66666666667, ans=0.0 +2024-07-28 00:36:17,875 INFO [train.py:1114] (2/4) Epoch 7, batch 2550, loss[loss=0.199, simple_loss=0.2648, pruned_loss=0.06665, over 4800.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.3038, pruned_loss=0.07034, over 938992.11 frames. ], batch size: 11, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:36:18,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=85180.0, ans=0.0 +2024-07-28 00:36:22,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=85180.0, ans=0.125 +2024-07-28 00:36:28,812 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.75 vs. limit=15.0 +2024-07-28 00:36:30,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=85206.66666666667, ans=0.2 +2024-07-28 00:36:43,454 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.016e-02 +2024-07-28 00:36:51,085 INFO [train.py:1114] (2/4) Epoch 7, batch 2600, loss[loss=0.2006, simple_loss=0.2881, pruned_loss=0.05653, over 4905.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3039, pruned_loss=0.07056, over 937932.13 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:36:56,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=85246.66666666667, ans=0.0 +2024-07-28 00:36:56,532 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.315e+01 5.684e+01 6.063e+01 6.727e+01 1.050e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-28 00:37:01,062 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.96 vs. limit=22.5 +2024-07-28 00:37:02,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=85260.0, ans=0.0 +2024-07-28 00:37:05,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=85273.33333333333, ans=0.125 +2024-07-28 00:37:08,266 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.89 vs. limit=15.0 +2024-07-28 00:37:17,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=85300.0, ans=0.125 +2024-07-28 00:37:21,511 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.72 vs. limit=15.0 +2024-07-28 00:37:24,483 INFO [train.py:1114] (2/4) Epoch 7, batch 2650, loss[loss=0.2449, simple_loss=0.3152, pruned_loss=0.0873, over 4614.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3044, pruned_loss=0.07067, over 939799.73 frames. ], batch size: 16, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:37:58,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=85366.66666666667, ans=0.125 +2024-07-28 00:37:58,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85366.66666666667, ans=0.1 +2024-07-28 00:37:59,899 INFO [train.py:1114] (2/4) Epoch 7, batch 2700, loss[loss=0.2187, simple_loss=0.3099, pruned_loss=0.06376, over 4738.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3051, pruned_loss=0.07089, over 939725.89 frames. ], batch size: 14, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:38:05,160 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.110e+01 5.828e+01 6.522e+01 7.194e+01 9.710e+01, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 00:38:13,032 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.73 vs. limit=15.0 +2024-07-28 00:38:14,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=85393.33333333333, ans=0.2 +2024-07-28 00:38:16,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=85406.66666666667, ans=0.0 +2024-07-28 00:38:26,822 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.54 vs. limit=22.5 +2024-07-28 00:38:27,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=85420.0, ans=0.09899494936611666 +2024-07-28 00:38:29,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=85433.33333333333, ans=0.0 +2024-07-28 00:38:38,361 INFO [train.py:1114] (2/4) Epoch 7, batch 2750, loss[loss=0.1973, simple_loss=0.2753, pruned_loss=0.05963, over 4706.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3041, pruned_loss=0.07067, over 939655.28 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:38:38,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=85446.66666666667, ans=0.125 +2024-07-28 00:38:48,033 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.87 vs. limit=15.0 +2024-07-28 00:38:50,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=85473.33333333333, ans=0.125 +2024-07-28 00:38:52,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=85473.33333333333, ans=0.025 +2024-07-28 00:39:06,870 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:39:13,524 INFO [train.py:1114] (2/4) Epoch 7, batch 2800, loss[loss=0.2724, simple_loss=0.3446, pruned_loss=0.1001, over 3165.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3051, pruned_loss=0.07105, over 937703.78 frames. ], batch size: 35, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:39:17,327 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.28 vs. limit=15.0 +2024-07-28 00:39:18,793 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.845e+01 5.969e+01 6.581e+01 7.409e+01 1.159e+02, threshold=1.316e+02, percent-clipped=0.0 +2024-07-28 00:39:26,940 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.91 vs. limit=15.0 +2024-07-28 00:39:31,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=85540.0, ans=0.125 +2024-07-28 00:39:39,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=85553.33333333333, ans=0.0 +2024-07-28 00:39:42,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_na.min_abs, batch_count=85566.66666666667, ans=0.02 +2024-07-28 00:39:45,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=85566.66666666667, ans=0.125 +2024-07-28 00:39:49,480 INFO [train.py:1114] (2/4) Epoch 7, batch 2850, loss[loss=0.2185, simple_loss=0.2993, pruned_loss=0.06885, over 4960.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3064, pruned_loss=0.07178, over 936134.56 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:40:06,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=85606.66666666667, ans=0.125 +2024-07-28 00:40:14,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=85620.0, ans=0.0 +2024-07-28 00:40:15,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=85620.0, ans=0.125 +2024-07-28 00:40:19,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=85633.33333333333, ans=0.125 +2024-07-28 00:40:23,190 INFO [train.py:1114] (2/4) Epoch 7, batch 2900, loss[loss=0.2648, simple_loss=0.3401, pruned_loss=0.09475, over 4833.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3072, pruned_loss=0.0718, over 939961.77 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:40:28,621 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 6.206e+01 7.013e+01 8.326e+01 1.461e+02, threshold=1.403e+02, percent-clipped=1.0 +2024-07-28 00:40:31,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=85660.0, ans=0.125 +2024-07-28 00:40:41,189 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.13 vs. limit=22.5 +2024-07-28 00:40:43,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=85686.66666666667, ans=0.0 +2024-07-28 00:40:43,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=85686.66666666667, ans=0.0 +2024-07-28 00:40:44,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=85686.66666666667, ans=0.125 +2024-07-28 00:40:50,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=85686.66666666667, ans=0.0 +2024-07-28 00:41:00,423 INFO [train.py:1114] (2/4) Epoch 7, batch 2950, loss[loss=0.1928, simple_loss=0.2718, pruned_loss=0.05687, over 4711.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.3057, pruned_loss=0.07138, over 938861.27 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:41:02,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=85713.33333333333, ans=0.125 +2024-07-28 00:41:04,234 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.30 vs. limit=10.0 +2024-07-28 00:41:15,097 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.23 vs. limit=6.0 +2024-07-28 00:41:33,276 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:41:33,795 INFO [train.py:1114] (2/4) Epoch 7, batch 3000, loss[loss=0.2478, simple_loss=0.3276, pruned_loss=0.08395, over 4765.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3049, pruned_loss=0.07085, over 938277.71 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:41:33,795 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 00:41:45,440 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([6.2568, 3.8207, 5.8424, 3.2891], device='cuda:2') +2024-07-28 00:41:46,474 INFO [train.py:1146] (2/4) Epoch 7, validation: loss=0.1857, simple_loss=0.2896, pruned_loss=0.04088, over 944034.00 frames. +2024-07-28 00:41:46,475 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 00:41:51,998 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 6.009e+01 6.936e+01 8.242e+01 1.252e+02, threshold=1.387e+02, percent-clipped=0.0 +2024-07-28 00:41:59,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=85806.66666666667, ans=0.025 +2024-07-28 00:42:09,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=85820.0, ans=0.125 +2024-07-28 00:42:20,755 INFO [train.py:1114] (2/4) Epoch 7, batch 3050, loss[loss=0.1977, simple_loss=0.2744, pruned_loss=0.0605, over 4633.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.307, pruned_loss=0.07171, over 936948.94 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:42:20,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=85846.66666666667, ans=0.1 +2024-07-28 00:42:28,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=85860.0, ans=0.125 +2024-07-28 00:42:43,339 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.57 vs. limit=22.5 +2024-07-28 00:42:48,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=85900.0, ans=0.125 +2024-07-28 00:42:54,036 INFO [train.py:1114] (2/4) Epoch 7, batch 3100, loss[loss=0.2726, simple_loss=0.3659, pruned_loss=0.08964, over 4656.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3067, pruned_loss=0.07153, over 937911.45 frames. ], batch size: 16, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:42:57,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=85913.33333333333, ans=0.025 +2024-07-28 00:42:59,251 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.759e+01 6.343e+01 7.086e+01 1.226e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 00:43:12,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=85940.0, ans=0.125 +2024-07-28 00:43:13,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=85953.33333333333, ans=0.0 +2024-07-28 00:43:15,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=85953.33333333333, ans=0.125 +2024-07-28 00:43:22,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=85953.33333333333, ans=0.5 +2024-07-28 00:43:24,930 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.08 vs. limit=22.5 +2024-07-28 00:43:28,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=85966.66666666667, ans=0.2 +2024-07-28 00:43:32,972 INFO [train.py:1114] (2/4) Epoch 7, batch 3150, loss[loss=0.2463, simple_loss=0.326, pruned_loss=0.0833, over 4621.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3062, pruned_loss=0.07124, over 937616.08 frames. ], batch size: 17, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:43:33,268 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.65 vs. limit=15.0 +2024-07-28 00:43:35,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=85980.0, ans=0.1 +2024-07-28 00:43:43,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=85993.33333333333, ans=10.0 +2024-07-28 00:43:43,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=85993.33333333333, ans=0.125 +2024-07-28 00:43:49,194 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.89 vs. limit=10.0 +2024-07-28 00:43:53,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=86006.66666666667, ans=0.125 +2024-07-28 00:43:56,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86020.0, ans=0.1 +2024-07-28 00:44:03,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=86033.33333333333, ans=0.0 +2024-07-28 00:44:08,146 INFO [train.py:1114] (2/4) Epoch 7, batch 3200, loss[loss=0.2437, simple_loss=0.3243, pruned_loss=0.08155, over 4817.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3044, pruned_loss=0.0697, over 939206.26 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:44:08,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=86046.66666666667, ans=0.125 +2024-07-28 00:44:08,541 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.58 vs. limit=22.5 +2024-07-28 00:44:10,390 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.15 vs. limit=12.0 +2024-07-28 00:44:13,316 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+01 6.085e+01 7.068e+01 8.225e+01 1.298e+02, threshold=1.414e+02, percent-clipped=1.0 +2024-07-28 00:44:19,095 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.62 vs. limit=15.0 +2024-07-28 00:44:24,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=86073.33333333333, ans=0.125 +2024-07-28 00:44:26,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=86073.33333333333, ans=0.2 +2024-07-28 00:44:28,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=86073.33333333333, ans=0.0 +2024-07-28 00:44:29,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=86086.66666666667, ans=0.125 +2024-07-28 00:44:35,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=86086.66666666667, ans=0.2 +2024-07-28 00:44:50,710 INFO [train.py:1114] (2/4) Epoch 7, batch 3250, loss[loss=0.2383, simple_loss=0.3305, pruned_loss=0.07303, over 4925.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3052, pruned_loss=0.07065, over 940290.41 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:44:59,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=86126.66666666667, ans=0.1 +2024-07-28 00:45:01,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=86126.66666666667, ans=0.1 +2024-07-28 00:45:05,218 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.95 vs. limit=15.0 +2024-07-28 00:45:15,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=86153.33333333333, ans=0.2 +2024-07-28 00:45:24,552 INFO [train.py:1114] (2/4) Epoch 7, batch 3300, loss[loss=0.24, simple_loss=0.3317, pruned_loss=0.07418, over 4697.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3041, pruned_loss=0.07011, over 940732.56 frames. ], batch size: 19, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:45:30,686 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 5.736e+01 6.420e+01 6.992e+01 1.033e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 00:45:32,509 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.21 vs. limit=15.0 +2024-07-28 00:45:44,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=86220.0, ans=0.04949747468305833 +2024-07-28 00:45:49,672 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.20 vs. limit=6.0 +2024-07-28 00:45:57,998 INFO [train.py:1114] (2/4) Epoch 7, batch 3350, loss[loss=0.2554, simple_loss=0.3389, pruned_loss=0.08595, over 4615.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.3057, pruned_loss=0.07136, over 938887.93 frames. ], batch size: 17, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:46:01,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=86246.66666666667, ans=0.0 +2024-07-28 00:46:04,659 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.54 vs. limit=12.0 +2024-07-28 00:46:11,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=86273.33333333333, ans=0.0 +2024-07-28 00:46:16,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=86273.33333333333, ans=0.125 +2024-07-28 00:46:26,932 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.72 vs. limit=15.0 +2024-07-28 00:46:31,259 INFO [train.py:1114] (2/4) Epoch 7, batch 3400, loss[loss=0.1787, simple_loss=0.2544, pruned_loss=0.05149, over 4777.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3054, pruned_loss=0.07164, over 937882.38 frames. ], batch size: 11, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:46:37,178 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.995e+01 5.874e+01 6.654e+01 7.588e+01 1.124e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-28 00:46:41,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=86326.66666666667, ans=0.125 +2024-07-28 00:46:46,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=86340.0, ans=0.0 +2024-07-28 00:46:57,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=86353.33333333333, ans=0.125 +2024-07-28 00:46:59,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86366.66666666667, ans=0.1 +2024-07-28 00:47:04,953 INFO [train.py:1114] (2/4) Epoch 7, batch 3450, loss[loss=0.2844, simple_loss=0.3711, pruned_loss=0.09881, over 4697.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.306, pruned_loss=0.07134, over 937912.41 frames. ], batch size: 19, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:47:10,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=86393.33333333333, ans=0.0 +2024-07-28 00:47:12,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=86393.33333333333, ans=0.125 +2024-07-28 00:47:17,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=86393.33333333333, ans=0.125 +2024-07-28 00:47:19,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=86406.66666666667, ans=0.125 +2024-07-28 00:47:24,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=86420.0, ans=0.125 +2024-07-28 00:47:31,489 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.23 vs. limit=22.5 +2024-07-28 00:47:32,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86433.33333333333, ans=0.1 +2024-07-28 00:47:38,423 INFO [train.py:1114] (2/4) Epoch 7, batch 3500, loss[loss=0.1921, simple_loss=0.2824, pruned_loss=0.05087, over 4931.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3058, pruned_loss=0.07109, over 938697.28 frames. ], batch size: 12, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:47:44,470 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.672e+01 5.841e+01 6.535e+01 7.195e+01 1.031e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 00:48:00,935 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.59 vs. limit=22.5 +2024-07-28 00:48:01,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=86486.66666666667, ans=0.125 +2024-07-28 00:48:06,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=86486.66666666667, ans=0.1 +2024-07-28 00:48:06,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=86486.66666666667, ans=0.125 +2024-07-28 00:48:08,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=86500.0, ans=0.0 +2024-07-28 00:48:11,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=86500.0, ans=0.125 +2024-07-28 00:48:16,910 INFO [train.py:1114] (2/4) Epoch 7, batch 3550, loss[loss=0.2553, simple_loss=0.3425, pruned_loss=0.08409, over 4667.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.306, pruned_loss=0.07141, over 938991.26 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:48:17,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=86513.33333333333, ans=0.125 +2024-07-28 00:48:18,782 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.68 vs. limit=15.0 +2024-07-28 00:48:21,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=86513.33333333333, ans=0.0 +2024-07-28 00:48:25,339 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.36 vs. limit=22.5 +2024-07-28 00:48:30,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=86540.0, ans=0.1 +2024-07-28 00:48:32,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=86540.0, ans=0.125 +2024-07-28 00:48:32,329 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.46 vs. limit=15.0 +2024-07-28 00:48:37,091 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.44 vs. limit=22.5 +2024-07-28 00:48:40,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=86553.33333333333, ans=0.95 +2024-07-28 00:48:41,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.94 vs. limit=6.0 +2024-07-28 00:48:49,698 INFO [train.py:1114] (2/4) Epoch 7, batch 3600, loss[loss=0.1802, simple_loss=0.262, pruned_loss=0.04913, over 4963.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3058, pruned_loss=0.07104, over 940296.01 frames. ], batch size: 13, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:48:51,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=86580.0, ans=0.125 +2024-07-28 00:48:55,681 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.014e+01 6.005e+01 6.689e+01 7.700e+01 1.084e+02, threshold=1.338e+02, percent-clipped=0.0 +2024-07-28 00:49:00,349 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.89 vs. limit=15.0 +2024-07-28 00:49:13,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=86620.0, ans=0.07 +2024-07-28 00:49:26,861 INFO [train.py:1114] (2/4) Epoch 7, batch 3650, loss[loss=0.2541, simple_loss=0.3298, pruned_loss=0.08919, over 4898.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3052, pruned_loss=0.07088, over 940537.54 frames. ], batch size: 15, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:49:32,042 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.90 vs. limit=12.0 +2024-07-28 00:49:53,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=86700.0, ans=0.125 +2024-07-28 00:49:57,128 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:49:59,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=86700.0, ans=10.0 +2024-07-28 00:50:01,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=86700.0, ans=0.0 +2024-07-28 00:50:02,230 INFO [train.py:1114] (2/4) Epoch 7, batch 3700, loss[loss=0.2145, simple_loss=0.3046, pruned_loss=0.06216, over 4930.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3051, pruned_loss=0.07076, over 941473.65 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:50:07,944 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.091e+01 5.984e+01 6.849e+01 8.141e+01 1.285e+02, threshold=1.370e+02, percent-clipped=0.0 +2024-07-28 00:50:21,346 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.78 vs. limit=15.0 +2024-07-28 00:50:21,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=86740.0, ans=0.125 +2024-07-28 00:50:22,076 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.26 vs. limit=6.0 +2024-07-28 00:50:36,839 INFO [train.py:1114] (2/4) Epoch 7, batch 3750, loss[loss=0.1953, simple_loss=0.2787, pruned_loss=0.05592, over 4808.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3051, pruned_loss=0.07077, over 942905.34 frames. ], batch size: 11, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:51:04,751 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.61 vs. limit=6.0 +2024-07-28 00:51:09,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=86833.33333333333, ans=0.2 +2024-07-28 00:51:09,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=86846.66666666667, ans=0.035 +2024-07-28 00:51:10,294 INFO [train.py:1114] (2/4) Epoch 7, batch 3800, loss[loss=0.2171, simple_loss=0.3131, pruned_loss=0.06058, over 4810.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.3039, pruned_loss=0.07044, over 941311.26 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:51:12,594 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=20.41 vs. limit=15.0 +2024-07-28 00:51:16,207 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.845e+01 5.938e+01 6.490e+01 7.260e+01 1.083e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-28 00:51:19,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=86860.0, ans=0.125 +2024-07-28 00:51:20,914 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:51:32,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=86886.66666666667, ans=0.05 +2024-07-28 00:51:32,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=86886.66666666667, ans=0.2 +2024-07-28 00:51:33,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=86886.66666666667, ans=0.2 +2024-07-28 00:51:34,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=86886.66666666667, ans=0.2 +2024-07-28 00:51:43,433 INFO [train.py:1114] (2/4) Epoch 7, batch 3850, loss[loss=0.242, simple_loss=0.3274, pruned_loss=0.07833, over 4617.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3037, pruned_loss=0.07011, over 941884.82 frames. ], batch size: 16, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:51:55,765 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:52:07,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=86953.33333333333, ans=0.125 +2024-07-28 00:52:07,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=86953.33333333333, ans=0.125 +2024-07-28 00:52:08,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86953.33333333333, ans=0.1 +2024-07-28 00:52:13,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=86966.66666666667, ans=0.1 +2024-07-28 00:52:17,084 INFO [train.py:1114] (2/4) Epoch 7, batch 3900, loss[loss=0.2481, simple_loss=0.3357, pruned_loss=0.08024, over 4816.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3038, pruned_loss=0.06974, over 941925.29 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:52:17,496 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.87 vs. limit=15.0 +2024-07-28 00:52:20,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=86980.0, ans=0.0 +2024-07-28 00:52:20,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=86980.0, ans=0.0 +2024-07-28 00:52:22,767 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.069e+01 5.781e+01 6.376e+01 7.079e+01 1.169e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 00:52:22,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=86993.33333333333, ans=0.2 +2024-07-28 00:52:29,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=87006.66666666667, ans=0.125 +2024-07-28 00:52:30,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=87006.66666666667, ans=0.2 +2024-07-28 00:52:45,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=87033.33333333333, ans=0.125 +2024-07-28 00:52:48,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=87033.33333333333, ans=0.2 +2024-07-28 00:52:49,982 INFO [train.py:1114] (2/4) Epoch 7, batch 3950, loss[loss=0.207, simple_loss=0.2909, pruned_loss=0.06154, over 4854.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3032, pruned_loss=0.06938, over 944064.92 frames. ], batch size: 16, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:52:57,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=87060.0, ans=0.125 +2024-07-28 00:53:02,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=87073.33333333333, ans=0.035 +2024-07-28 00:53:07,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=87073.33333333333, ans=0.125 +2024-07-28 00:53:15,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=87086.66666666667, ans=0.125 +2024-07-28 00:53:20,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=87100.0, ans=0.125 +2024-07-28 00:53:23,290 INFO [train.py:1114] (2/4) Epoch 7, batch 4000, loss[loss=0.1931, simple_loss=0.2803, pruned_loss=0.05296, over 4774.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.303, pruned_loss=0.06968, over 940955.50 frames. ], batch size: 12, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:53:29,164 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.994e+01 5.981e+01 6.594e+01 7.315e+01 1.099e+02, threshold=1.319e+02, percent-clipped=0.0 +2024-07-28 00:53:32,327 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.61 vs. limit=15.0 +2024-07-28 00:53:45,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=87153.33333333333, ans=0.2 +2024-07-28 00:53:54,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=87166.66666666667, ans=0.125 +2024-07-28 00:53:54,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87166.66666666667, ans=0.1 +2024-07-28 00:53:57,318 INFO [train.py:1114] (2/4) Epoch 7, batch 4050, loss[loss=0.262, simple_loss=0.3273, pruned_loss=0.09835, over 3461.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3035, pruned_loss=0.07007, over 939746.30 frames. ], batch size: 35, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:54:17,083 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.14 vs. limit=15.0 +2024-07-28 00:54:22,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=87220.0, ans=0.0 +2024-07-28 00:54:32,973 INFO [train.py:1114] (2/4) Epoch 7, batch 4100, loss[loss=0.2169, simple_loss=0.3086, pruned_loss=0.06261, over 4895.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3028, pruned_loss=0.06967, over 938335.42 frames. ], batch size: 15, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:54:35,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=87246.66666666667, ans=0.125 +2024-07-28 00:54:39,026 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 5.802e+01 6.541e+01 7.841e+01 1.191e+02, threshold=1.308e+02, percent-clipped=0.0 +2024-07-28 00:54:48,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=87273.33333333333, ans=0.125 +2024-07-28 00:54:49,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=87273.33333333333, ans=0.0 +2024-07-28 00:54:50,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=87273.33333333333, ans=0.04949747468305833 +2024-07-28 00:54:58,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=87286.66666666667, ans=0.0 +2024-07-28 00:55:03,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=87300.0, ans=0.2 +2024-07-28 00:55:04,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=87300.0, ans=0.2 +2024-07-28 00:55:08,132 INFO [train.py:1114] (2/4) Epoch 7, batch 4150, loss[loss=0.2461, simple_loss=0.3345, pruned_loss=0.07891, over 4824.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3023, pruned_loss=0.06979, over 937978.00 frames. ], batch size: 13, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:55:09,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=87313.33333333333, ans=0.125 +2024-07-28 00:55:09,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=87313.33333333333, ans=0.0 +2024-07-28 00:55:13,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=87313.33333333333, ans=22.5 +2024-07-28 00:55:14,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=87326.66666666667, ans=0.125 +2024-07-28 00:55:14,540 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.37 vs. limit=22.5 +2024-07-28 00:55:17,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=87326.66666666667, ans=0.125 +2024-07-28 00:55:39,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=87366.66666666667, ans=0.0 +2024-07-28 00:55:42,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=87366.66666666667, ans=0.125 +2024-07-28 00:55:44,752 INFO [train.py:1114] (2/4) Epoch 7, batch 4200, loss[loss=0.2303, simple_loss=0.3153, pruned_loss=0.07268, over 4900.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3029, pruned_loss=0.06984, over 939745.95 frames. ], batch size: 15, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:55:45,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=87380.0, ans=0.1 +2024-07-28 00:55:50,408 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 5.692e+01 6.166e+01 6.641e+01 1.038e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 00:55:55,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=87393.33333333333, ans=0.125 +2024-07-28 00:56:03,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=87406.66666666667, ans=0.125 +2024-07-28 00:56:03,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=87420.0, ans=0.125 +2024-07-28 00:56:05,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=87420.0, ans=0.0 +2024-07-28 00:56:09,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87420.0, ans=0.1 +2024-07-28 00:56:14,842 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.71 vs. limit=15.0 +2024-07-28 00:56:16,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=87433.33333333333, ans=0.125 +2024-07-28 00:56:17,895 INFO [train.py:1114] (2/4) Epoch 7, batch 4250, loss[loss=0.247, simple_loss=0.3323, pruned_loss=0.08083, over 4636.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3043, pruned_loss=0.07067, over 940603.19 frames. ], batch size: 12, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:56:27,476 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.39 vs. limit=15.0 +2024-07-28 00:56:38,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=87486.66666666667, ans=0.125 +2024-07-28 00:56:39,725 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.70 vs. limit=15.0 +2024-07-28 00:56:51,123 INFO [train.py:1114] (2/4) Epoch 7, batch 4300, loss[loss=0.2224, simple_loss=0.3023, pruned_loss=0.07124, over 4758.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3048, pruned_loss=0.071, over 939296.77 frames. ], batch size: 13, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:56:57,167 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.961e+01 6.079e+01 6.780e+01 8.042e+01 1.237e+02, threshold=1.356e+02, percent-clipped=1.0 +2024-07-28 00:56:59,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.10 vs. limit=15.0 +2024-07-28 00:57:05,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=87540.0, ans=0.125 +2024-07-28 00:57:15,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=87553.33333333333, ans=0.0 +2024-07-28 00:57:17,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=87566.66666666667, ans=0.125 +2024-07-28 00:57:19,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=87566.66666666667, ans=0.125 +2024-07-28 00:57:24,599 INFO [train.py:1114] (2/4) Epoch 7, batch 4350, loss[loss=0.2046, simple_loss=0.2879, pruned_loss=0.06068, over 4762.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3041, pruned_loss=0.07006, over 940168.16 frames. ], batch size: 13, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:57:25,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=87580.0, ans=0.2 +2024-07-28 00:57:40,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=87606.66666666667, ans=0.2 +2024-07-28 00:57:42,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=87606.66666666667, ans=0.025 +2024-07-28 00:57:46,873 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.88 vs. limit=6.0 +2024-07-28 00:57:58,010 INFO [train.py:1114] (2/4) Epoch 7, batch 4400, loss[loss=0.2135, simple_loss=0.3023, pruned_loss=0.06231, over 4809.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3058, pruned_loss=0.07071, over 940210.63 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:58:04,032 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.884e+01 5.991e+01 6.337e+01 7.130e+01 1.070e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 00:58:14,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=87673.33333333333, ans=0.125 +2024-07-28 00:58:31,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=87713.33333333333, ans=0.0 +2024-07-28 00:58:31,623 INFO [train.py:1114] (2/4) Epoch 7, batch 4450, loss[loss=0.1926, simple_loss=0.2639, pruned_loss=0.06066, over 4937.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.3061, pruned_loss=0.07073, over 938861.35 frames. ], batch size: 12, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:58:47,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=87740.0, ans=0.0 +2024-07-28 00:58:54,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=87753.33333333333, ans=0.0 +2024-07-28 00:58:56,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=87753.33333333333, ans=0.025 +2024-07-28 00:59:01,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=87766.66666666667, ans=0.1 +2024-07-28 00:59:05,050 INFO [train.py:1114] (2/4) Epoch 7, batch 4500, loss[loss=0.237, simple_loss=0.323, pruned_loss=0.07555, over 4736.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3068, pruned_loss=0.07094, over 938501.80 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 00:59:10,923 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.444e+01 5.694e+01 6.393e+01 7.700e+01 1.282e+02, threshold=1.279e+02, percent-clipped=1.0 +2024-07-28 00:59:13,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=87793.33333333333, ans=0.125 +2024-07-28 00:59:22,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=87806.66666666667, ans=0.125 +2024-07-28 00:59:23,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87806.66666666667, ans=0.1 +2024-07-28 00:59:35,198 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.86 vs. limit=6.0 +2024-07-28 00:59:38,267 INFO [train.py:1114] (2/4) Epoch 7, batch 4550, loss[loss=0.2104, simple_loss=0.2947, pruned_loss=0.06307, over 4891.00 frames. ], tot_loss[loss=0.223, simple_loss=0.3058, pruned_loss=0.07006, over 940404.86 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 00:59:41,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=87846.66666666667, ans=0.0 +2024-07-28 00:59:43,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=87846.66666666667, ans=0.0 +2024-07-28 01:00:00,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=87886.66666666667, ans=0.0 +2024-07-28 01:00:06,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=87900.0, ans=0.125 +2024-07-28 01:00:10,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=87900.0, ans=0.0 +2024-07-28 01:00:15,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87913.33333333333, ans=0.1 +2024-07-28 01:00:15,716 INFO [train.py:1114] (2/4) Epoch 7, batch 4600, loss[loss=0.2571, simple_loss=0.3168, pruned_loss=0.09873, over 4535.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3056, pruned_loss=0.07049, over 939170.21 frames. ], batch size: 21, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:00:15,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=87913.33333333333, ans=0.0 +2024-07-28 01:00:17,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=87913.33333333333, ans=0.125 +2024-07-28 01:00:20,350 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.77 vs. limit=15.0 +2024-07-28 01:00:20,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=87913.33333333333, ans=0.0 +2024-07-28 01:00:21,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=87913.33333333333, ans=0.0 +2024-07-28 01:00:21,783 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.972e+01 5.990e+01 6.916e+01 8.662e+01 1.306e+02, threshold=1.383e+02, percent-clipped=1.0 +2024-07-28 01:00:23,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=87926.66666666667, ans=0.125 +2024-07-28 01:00:28,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=87940.0, ans=0.0 +2024-07-28 01:00:29,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=87940.0, ans=0.125 +2024-07-28 01:00:33,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=87940.0, ans=0.05 +2024-07-28 01:00:35,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=87953.33333333333, ans=0.0 +2024-07-28 01:00:46,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=87966.66666666667, ans=0.125 +2024-07-28 01:00:48,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=87966.66666666667, ans=0.2 +2024-07-28 01:00:49,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=87966.66666666667, ans=0.2 +2024-07-28 01:00:49,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=87966.66666666667, ans=0.0 +2024-07-28 01:00:50,809 INFO [train.py:1114] (2/4) Epoch 7, batch 4650, loss[loss=0.2375, simple_loss=0.3216, pruned_loss=0.07673, over 4828.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.3064, pruned_loss=0.07059, over 940668.46 frames. ], batch size: 16, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:00:50,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=87980.0, ans=0.0 +2024-07-28 01:00:51,245 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.06 vs. limit=6.0 +2024-07-28 01:01:03,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=87993.33333333333, ans=0.125 +2024-07-28 01:01:09,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=88006.66666666667, ans=0.125 +2024-07-28 01:01:13,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=88020.0, ans=0.04949747468305833 +2024-07-28 01:01:22,189 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.52 vs. limit=15.0 +2024-07-28 01:01:26,389 INFO [train.py:1114] (2/4) Epoch 7, batch 4700, loss[loss=0.2074, simple_loss=0.2781, pruned_loss=0.06834, over 4699.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3052, pruned_loss=0.07017, over 937452.08 frames. ], batch size: 11, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:01:30,892 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.45 vs. limit=22.5 +2024-07-28 01:01:32,308 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.754e+01 5.909e+01 6.693e+01 7.629e+01 1.851e+02, threshold=1.339e+02, percent-clipped=2.0 +2024-07-28 01:01:49,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=88086.66666666667, ans=0.09899494936611666 +2024-07-28 01:01:56,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=88100.0, ans=0.04949747468305833 +2024-07-28 01:01:57,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=88100.0, ans=0.0 +2024-07-28 01:01:59,591 INFO [train.py:1114] (2/4) Epoch 7, batch 4750, loss[loss=0.229, simple_loss=0.3136, pruned_loss=0.07222, over 4463.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3054, pruned_loss=0.07052, over 935207.97 frames. ], batch size: 21, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:02:07,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=88126.66666666667, ans=0.125 +2024-07-28 01:02:17,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=88140.0, ans=0.125 +2024-07-28 01:02:20,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=88153.33333333333, ans=0.125 +2024-07-28 01:02:22,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=88153.33333333333, ans=0.2 +2024-07-28 01:02:32,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=88180.0, ans=0.125 +2024-07-28 01:02:33,425 INFO [train.py:1114] (2/4) Epoch 7, batch 4800, loss[loss=0.2528, simple_loss=0.3373, pruned_loss=0.0841, over 4699.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3062, pruned_loss=0.07148, over 932138.06 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:02:39,292 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.695e+01 5.971e+01 6.574e+01 7.583e+01 1.047e+02, threshold=1.315e+02, percent-clipped=0.0 +2024-07-28 01:02:39,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=88193.33333333333, ans=0.025 +2024-07-28 01:02:41,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=88193.33333333333, ans=0.125 +2024-07-28 01:02:46,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=88206.66666666667, ans=0.1 +2024-07-28 01:02:49,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=88206.66666666667, ans=0.05 +2024-07-28 01:02:59,821 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.03 vs. limit=6.0 +2024-07-28 01:03:02,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=88233.33333333333, ans=0.125 +2024-07-28 01:03:06,429 INFO [train.py:1114] (2/4) Epoch 7, batch 4850, loss[loss=0.2334, simple_loss=0.3249, pruned_loss=0.07093, over 4746.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3065, pruned_loss=0.07142, over 931864.98 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:03:15,348 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.33 vs. limit=22.5 +2024-07-28 01:03:17,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=88260.0, ans=0.125 +2024-07-28 01:03:39,135 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.00 vs. limit=22.5 +2024-07-28 01:03:39,987 INFO [train.py:1114] (2/4) Epoch 7, batch 4900, loss[loss=0.2104, simple_loss=0.3015, pruned_loss=0.05962, over 4762.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3043, pruned_loss=0.07032, over 934103.12 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:03:46,985 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.185e+01 5.896e+01 6.545e+01 7.673e+01 1.105e+02, threshold=1.309e+02, percent-clipped=0.0 +2024-07-28 01:03:47,596 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.10 vs. limit=15.0 +2024-07-28 01:03:49,491 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.43 vs. limit=6.0 +2024-07-28 01:03:53,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=88340.0, ans=0.0 +2024-07-28 01:03:54,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=88340.0, ans=0.125 +2024-07-28 01:04:10,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=88366.66666666667, ans=0.2 +2024-07-28 01:04:12,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=88366.66666666667, ans=0.0 +2024-07-28 01:04:13,973 INFO [train.py:1114] (2/4) Epoch 7, batch 4950, loss[loss=0.2434, simple_loss=0.322, pruned_loss=0.08239, over 3136.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.3053, pruned_loss=0.07109, over 931498.20 frames. ], batch size: 35, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:04:14,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=88380.0, ans=0.2 +2024-07-28 01:04:18,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=88380.0, ans=0.1 +2024-07-28 01:04:18,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=88380.0, ans=0.0 +2024-07-28 01:04:19,523 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:04:21,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=88393.33333333333, ans=0.025 +2024-07-28 01:04:27,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=88406.66666666667, ans=0.125 +2024-07-28 01:04:33,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=88406.66666666667, ans=0.125 +2024-07-28 01:04:41,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=88433.33333333333, ans=0.125 +2024-07-28 01:04:48,686 INFO [train.py:1114] (2/4) Epoch 7, batch 5000, loss[loss=0.279, simple_loss=0.3543, pruned_loss=0.1019, over 4662.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3038, pruned_loss=0.07004, over 935295.38 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:04:55,133 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.044e+01 6.033e+01 7.025e+01 8.348e+01 1.303e+02, threshold=1.405e+02, percent-clipped=0.0 +2024-07-28 01:04:56,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=88460.0, ans=0.1 +2024-07-28 01:05:01,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=88473.33333333333, ans=0.0 +2024-07-28 01:05:13,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=88486.66666666667, ans=0.0 +2024-07-28 01:05:15,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=88500.0, ans=0.0 +2024-07-28 01:05:18,690 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.90 vs. limit=12.0 +2024-07-28 01:05:21,744 INFO [train.py:1114] (2/4) Epoch 7, batch 5050, loss[loss=0.2222, simple_loss=0.3051, pruned_loss=0.06967, over 4852.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3034, pruned_loss=0.06961, over 937745.04 frames. ], batch size: 12, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:05:23,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=88513.33333333333, ans=0.125 +2024-07-28 01:05:38,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=88540.0, ans=0.2 +2024-07-28 01:05:40,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=88540.0, ans=0.125 +2024-07-28 01:05:52,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=88566.66666666667, ans=0.1 +2024-07-28 01:05:57,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=88566.66666666667, ans=0.025 +2024-07-28 01:05:59,507 INFO [train.py:1114] (2/4) Epoch 7, batch 5100, loss[loss=0.1972, simple_loss=0.2775, pruned_loss=0.05848, over 4773.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3041, pruned_loss=0.07, over 935191.37 frames. ], batch size: 12, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:06:00,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=88580.0, ans=0.04949747468305833 +2024-07-28 01:06:02,714 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.68 vs. limit=12.0 +2024-07-28 01:06:06,159 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.527e+01 5.884e+01 6.519e+01 7.454e+01 1.176e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 01:06:09,771 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.31 vs. limit=15.0 +2024-07-28 01:06:21,075 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.76 vs. limit=15.0 +2024-07-28 01:06:26,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=88620.0, ans=0.0 +2024-07-28 01:06:27,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=88620.0, ans=0.0 +2024-07-28 01:06:32,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=88633.33333333333, ans=0.2 +2024-07-28 01:06:37,929 INFO [train.py:1114] (2/4) Epoch 7, batch 5150, loss[loss=0.2658, simple_loss=0.3561, pruned_loss=0.08777, over 4836.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3042, pruned_loss=0.06987, over 935805.65 frames. ], batch size: 16, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:07:06,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=88700.0, ans=0.0 +2024-07-28 01:07:11,395 INFO [train.py:1114] (2/4) Epoch 7, batch 5200, loss[loss=0.2209, simple_loss=0.3143, pruned_loss=0.0638, over 4666.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.3035, pruned_loss=0.0691, over 935924.36 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:07:12,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=88713.33333333333, ans=0.125 +2024-07-28 01:07:12,557 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.36 vs. limit=15.0 +2024-07-28 01:07:16,755 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.15 vs. limit=15.0 +2024-07-28 01:07:18,340 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 6.071e+01 6.603e+01 7.061e+01 1.007e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-28 01:07:25,658 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.95 vs. limit=15.0 +2024-07-28 01:07:31,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=88753.33333333333, ans=0.0 +2024-07-28 01:07:33,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=88753.33333333333, ans=0.0 +2024-07-28 01:07:35,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=88753.33333333333, ans=0.125 +2024-07-28 01:07:43,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=88766.66666666667, ans=0.0 +2024-07-28 01:07:44,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=88780.0, ans=0.1 +2024-07-28 01:07:44,901 INFO [train.py:1114] (2/4) Epoch 7, batch 5250, loss[loss=0.2203, simple_loss=0.308, pruned_loss=0.06632, over 4894.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.303, pruned_loss=0.06931, over 935581.99 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:07:51,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=88793.33333333333, ans=0.0 +2024-07-28 01:08:01,105 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.38 vs. limit=15.0 +2024-07-28 01:08:08,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=88820.0, ans=0.1 +2024-07-28 01:08:09,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=88820.0, ans=0.05 +2024-07-28 01:08:18,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=88846.66666666667, ans=22.5 +2024-07-28 01:08:18,676 INFO [train.py:1114] (2/4) Epoch 7, batch 5300, loss[loss=0.2271, simple_loss=0.3083, pruned_loss=0.07291, over 4612.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.3029, pruned_loss=0.06891, over 933926.56 frames. ], batch size: 16, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:08:18,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=88846.66666666667, ans=0.125 +2024-07-28 01:08:20,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=88846.66666666667, ans=0.2 +2024-07-28 01:08:20,398 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.50 vs. limit=22.5 +2024-07-28 01:08:22,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=88846.66666666667, ans=0.0 +2024-07-28 01:08:23,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=88846.66666666667, ans=0.025 +2024-07-28 01:08:24,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=88860.0, ans=0.1 +2024-07-28 01:08:25,201 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.360e+01 5.926e+01 6.505e+01 7.271e+01 1.034e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 01:08:27,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=88860.0, ans=0.025 +2024-07-28 01:08:36,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=88873.33333333333, ans=0.2 +2024-07-28 01:08:51,784 INFO [train.py:1114] (2/4) Epoch 7, batch 5350, loss[loss=0.2032, simple_loss=0.2801, pruned_loss=0.06319, over 4487.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3036, pruned_loss=0.0695, over 935845.47 frames. ], batch size: 10, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:08:55,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=88913.33333333333, ans=0.0 +2024-07-28 01:09:13,150 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-28 01:09:13,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=88953.33333333333, ans=0.1 +2024-07-28 01:09:14,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=88953.33333333333, ans=0.0 +2024-07-28 01:09:30,877 INFO [train.py:1114] (2/4) Epoch 7, batch 5400, loss[loss=0.2229, simple_loss=0.3012, pruned_loss=0.07224, over 4381.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.3027, pruned_loss=0.06932, over 930717.36 frames. ], batch size: 26, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:09:34,117 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.79 vs. limit=6.0 +2024-07-28 01:09:35,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=88980.0, ans=0.125 +2024-07-28 01:09:37,777 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.898e+01 5.891e+01 6.586e+01 7.274e+01 1.067e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-28 01:09:38,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=88993.33333333333, ans=0.025 +2024-07-28 01:09:40,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=88993.33333333333, ans=0.0 +2024-07-28 01:09:43,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=88993.33333333333, ans=0.1 +2024-07-28 01:09:43,407 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.02 vs. limit=15.0 +2024-07-28 01:09:51,584 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.06 vs. limit=15.0 +2024-07-28 01:09:51,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=89020.0, ans=0.125 +2024-07-28 01:09:56,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=89020.0, ans=0.025 +2024-07-28 01:09:59,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=89033.33333333333, ans=0.125 +2024-07-28 01:10:01,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=89033.33333333333, ans=0.125 +2024-07-28 01:10:04,296 INFO [train.py:1114] (2/4) Epoch 7, batch 5450, loss[loss=0.2072, simple_loss=0.2715, pruned_loss=0.07144, over 4701.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.3017, pruned_loss=0.06868, over 933724.70 frames. ], batch size: 11, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:10:06,727 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.61 vs. limit=15.0 +2024-07-28 01:10:08,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=89046.66666666667, ans=0.0 +2024-07-28 01:10:28,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=89086.66666666667, ans=0.125 +2024-07-28 01:10:32,264 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:10:45,388 INFO [train.py:1114] (2/4) Epoch 7, batch 5500, loss[loss=0.2247, simple_loss=0.3112, pruned_loss=0.06914, over 4316.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.3021, pruned_loss=0.06907, over 931467.10 frames. ], batch size: 26, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:11:09,589 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.832e+01 6.102e+01 6.764e+01 7.655e+01 1.015e+02, threshold=1.353e+02, percent-clipped=0.0 +2024-07-28 01:11:13,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=89126.66666666667, ans=0.0 +2024-07-28 01:11:36,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=89140.0, ans=0.125 +2024-07-28 01:11:37,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=89140.0, ans=0.0 +2024-07-28 01:11:37,870 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.55 vs. limit=22.5 +2024-07-28 01:12:22,823 INFO [train.py:1114] (2/4) Epoch 7, batch 5550, loss[loss=0.1961, simple_loss=0.2725, pruned_loss=0.05987, over 4701.00 frames. ], tot_loss[loss=0.22, simple_loss=0.3015, pruned_loss=0.06931, over 933276.79 frames. ], batch size: 12, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:12:33,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=89193.33333333333, ans=0.2 +2024-07-28 01:12:37,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=89206.66666666667, ans=0.125 +2024-07-28 01:12:39,439 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.45 vs. limit=15.0 +2024-07-28 01:12:39,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=89206.66666666667, ans=0.125 +2024-07-28 01:12:39,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=89206.66666666667, ans=0.125 +2024-07-28 01:12:41,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=89206.66666666667, ans=0.025 +2024-07-28 01:13:01,376 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.42 vs. limit=15.0 +2024-07-28 01:13:05,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=89233.33333333333, ans=0.0 +2024-07-28 01:13:07,199 INFO [train.py:1114] (2/4) Epoch 7, batch 5600, loss[loss=0.2351, simple_loss=0.3158, pruned_loss=0.07715, over 4734.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3032, pruned_loss=0.06996, over 934404.01 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:13:09,661 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.40 vs. limit=22.5 +2024-07-28 01:13:14,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=89260.0, ans=0.1 +2024-07-28 01:13:14,837 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 6.099e+01 6.958e+01 8.233e+01 1.047e+02, threshold=1.392e+02, percent-clipped=0.0 +2024-07-28 01:13:21,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=89260.0, ans=0.0 +2024-07-28 01:13:25,416 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=9.20 vs. limit=15.0 +2024-07-28 01:13:31,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=89286.66666666667, ans=0.125 +2024-07-28 01:13:40,733 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.09 vs. limit=15.0 +2024-07-28 01:13:42,984 INFO [train.py:1114] (2/4) Epoch 7, batch 5650, loss[loss=0.2404, simple_loss=0.3197, pruned_loss=0.08053, over 4448.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.3025, pruned_loss=0.06961, over 936965.63 frames. ], batch size: 21, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:13:59,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=89340.0, ans=0.125 +2024-07-28 01:14:04,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=89353.33333333333, ans=0.125 +2024-07-28 01:14:05,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=89353.33333333333, ans=0.125 +2024-07-28 01:14:12,938 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.04 vs. limit=15.0 +2024-07-28 01:14:15,503 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.49 vs. limit=22.5 +2024-07-28 01:14:16,571 INFO [train.py:1114] (2/4) Epoch 7, batch 5700, loss[loss=0.2214, simple_loss=0.3062, pruned_loss=0.06833, over 4704.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.3027, pruned_loss=0.0696, over 938098.80 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:14:22,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=89393.33333333333, ans=0.5 +2024-07-28 01:14:23,483 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.757e+01 5.798e+01 6.210e+01 7.158e+01 1.197e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 01:14:25,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=89393.33333333333, ans=0.04949747468305833 +2024-07-28 01:14:32,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=89406.66666666667, ans=0.125 +2024-07-28 01:14:36,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=89420.0, ans=0.125 +2024-07-28 01:14:38,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=89420.0, ans=0.05 +2024-07-28 01:14:43,253 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.56 vs. limit=22.5 +2024-07-28 01:14:50,060 INFO [train.py:1114] (2/4) Epoch 7, batch 5750, loss[loss=0.2325, simple_loss=0.3139, pruned_loss=0.07558, over 4793.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.3027, pruned_loss=0.06953, over 937984.63 frames. ], batch size: 19, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:15:02,999 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-28 01:15:18,441 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.54 vs. limit=15.0 +2024-07-28 01:15:23,595 INFO [train.py:1114] (2/4) Epoch 7, batch 5800, loss[loss=0.2488, simple_loss=0.323, pruned_loss=0.08731, over 4750.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3036, pruned_loss=0.06967, over 937148.25 frames. ], batch size: 19, lr: 1.06e-02, grad_scale: 16.0 +2024-07-28 01:15:24,622 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.00 vs. limit=15.0 +2024-07-28 01:15:31,063 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 6.066e+01 6.586e+01 7.704e+01 1.621e+02, threshold=1.317e+02, percent-clipped=1.0 +2024-07-28 01:15:57,474 INFO [train.py:1114] (2/4) Epoch 7, batch 5850, loss[loss=0.2389, simple_loss=0.3244, pruned_loss=0.07668, over 4611.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3051, pruned_loss=0.07097, over 937943.77 frames. ], batch size: 21, lr: 1.06e-02, grad_scale: 16.0 +2024-07-28 01:16:06,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=89593.33333333333, ans=0.2 +2024-07-28 01:16:12,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=89606.66666666667, ans=0.0 +2024-07-28 01:16:16,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=89606.66666666667, ans=0.04949747468305833 +2024-07-28 01:16:23,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=89620.0, ans=0.125 +2024-07-28 01:16:32,049 INFO [train.py:1114] (2/4) Epoch 7, batch 5900, loss[loss=0.2316, simple_loss=0.33, pruned_loss=0.06662, over 4687.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3051, pruned_loss=0.07099, over 938199.32 frames. ], batch size: 15, lr: 1.06e-02, grad_scale: 16.0 +2024-07-28 01:16:39,540 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.684e+01 6.380e+01 7.370e+01 9.045e+01 1.525e+02, threshold=1.474e+02, percent-clipped=5.0 +2024-07-28 01:16:47,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=89673.33333333333, ans=0.1 +2024-07-28 01:16:48,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=89673.33333333333, ans=0.125 +2024-07-28 01:16:50,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=89673.33333333333, ans=0.2 +2024-07-28 01:17:01,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=89700.0, ans=0.125 +2024-07-28 01:17:07,212 INFO [train.py:1114] (2/4) Epoch 7, batch 5950, loss[loss=0.2856, simple_loss=0.3595, pruned_loss=0.1058, over 4688.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3047, pruned_loss=0.07033, over 940210.17 frames. ], batch size: 15, lr: 1.06e-02, grad_scale: 16.0 +2024-07-28 01:17:07,576 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.40 vs. limit=10.0 +2024-07-28 01:17:13,909 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.78 vs. limit=15.0 +2024-07-28 01:17:16,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=89726.66666666667, ans=10.0 +2024-07-28 01:17:18,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=89726.66666666667, ans=0.0 +2024-07-28 01:17:22,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=89740.0, ans=0.125 +2024-07-28 01:17:26,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=89753.33333333333, ans=0.025 +2024-07-28 01:17:33,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=89766.66666666667, ans=0.0 +2024-07-28 01:17:38,362 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.88 vs. limit=15.0 +2024-07-28 01:17:42,384 INFO [train.py:1114] (2/4) Epoch 7, batch 6000, loss[loss=0.2758, simple_loss=0.358, pruned_loss=0.09676, over 4362.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3036, pruned_loss=0.06976, over 937128.25 frames. ], batch size: 26, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:17:42,384 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 01:17:48,659 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([6.6288, 5.9403, 5.8922, 6.4268], device='cuda:2') +2024-07-28 01:17:53,515 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.7689, 3.5198, 3.9559, 4.5360], device='cuda:2') +2024-07-28 01:17:53,603 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([4.8864, 3.2500, 3.9428, 4.8732, 4.8024, 3.8763, 4.3635, 3.4872], + device='cuda:2') +2024-07-28 01:17:54,532 INFO [train.py:1146] (2/4) Epoch 7, validation: loss=0.1857, simple_loss=0.2893, pruned_loss=0.04109, over 944034.00 frames. +2024-07-28 01:17:54,532 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 01:18:03,856 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.906e+01 5.859e+01 6.415e+01 7.407e+01 1.156e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 01:18:04,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=89793.33333333333, ans=0.0 +2024-07-28 01:18:08,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=89793.33333333333, ans=0.04949747468305833 +2024-07-28 01:18:20,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=89820.0, ans=0.125 +2024-07-28 01:18:31,921 INFO [train.py:1114] (2/4) Epoch 7, batch 6050, loss[loss=0.2103, simple_loss=0.2999, pruned_loss=0.06037, over 4781.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.3024, pruned_loss=0.06903, over 938670.02 frames. ], batch size: 12, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:18:34,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=89846.66666666667, ans=0.1 +2024-07-28 01:18:39,680 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.71 vs. limit=22.5 +2024-07-28 01:18:42,979 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.08 vs. limit=15.0 +2024-07-28 01:18:47,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=89873.33333333333, ans=0.0 +2024-07-28 01:18:58,465 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.75 vs. limit=15.0 +2024-07-28 01:19:02,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=89900.0, ans=0.125 +2024-07-28 01:19:05,233 INFO [train.py:1114] (2/4) Epoch 7, batch 6100, loss[loss=0.2207, simple_loss=0.2998, pruned_loss=0.07082, over 4677.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.3014, pruned_loss=0.06881, over 937885.93 frames. ], batch size: 15, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:19:05,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=89913.33333333333, ans=0.1 +2024-07-28 01:19:07,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=89913.33333333333, ans=0.125 +2024-07-28 01:19:12,436 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.648e+01 5.958e+01 6.611e+01 7.776e+01 1.081e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-28 01:19:17,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=89926.66666666667, ans=0.125 +2024-07-28 01:19:18,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=89940.0, ans=0.0 +2024-07-28 01:19:31,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=89966.66666666667, ans=0.0 +2024-07-28 01:19:36,057 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:19:38,544 INFO [train.py:1114] (2/4) Epoch 7, batch 6150, loss[loss=0.2475, simple_loss=0.3151, pruned_loss=0.08991, over 3539.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.3018, pruned_loss=0.06896, over 937094.29 frames. ], batch size: 35, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:19:39,700 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.90 vs. limit=6.0 +2024-07-28 01:19:41,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=89980.0, ans=0.125 +2024-07-28 01:19:43,685 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.55 vs. limit=15.0 +2024-07-28 01:19:47,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=89993.33333333333, ans=0.2 +2024-07-28 01:19:49,984 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.08 vs. limit=10.0 +2024-07-28 01:19:59,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=90006.66666666667, ans=0.125 +2024-07-28 01:20:04,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=90020.0, ans=0.125 +2024-07-28 01:20:05,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90020.0, ans=0.1 +2024-07-28 01:20:15,038 INFO [train.py:1114] (2/4) Epoch 7, batch 6200, loss[loss=0.1999, simple_loss=0.2897, pruned_loss=0.05505, over 4737.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.3026, pruned_loss=0.06925, over 936454.20 frames. ], batch size: 14, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:20:22,611 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.692e+01 6.027e+01 6.497e+01 7.393e+01 1.206e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-28 01:20:27,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=90060.0, ans=0.2 +2024-07-28 01:20:48,990 INFO [train.py:1114] (2/4) Epoch 7, batch 6250, loss[loss=0.2625, simple_loss=0.3542, pruned_loss=0.08546, over 4814.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3032, pruned_loss=0.06984, over 932992.39 frames. ], batch size: 14, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:20:55,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=90126.66666666667, ans=0.0 +2024-07-28 01:21:08,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=90153.33333333333, ans=0.125 +2024-07-28 01:21:15,327 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.22 vs. limit=6.0 +2024-07-28 01:21:20,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=90166.66666666667, ans=0.07 +2024-07-28 01:21:22,797 INFO [train.py:1114] (2/4) Epoch 7, batch 6300, loss[loss=0.21, simple_loss=0.2863, pruned_loss=0.06682, over 4550.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.3037, pruned_loss=0.07032, over 930414.35 frames. ], batch size: 10, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:21:24,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=90180.0, ans=0.125 +2024-07-28 01:21:28,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=90193.33333333333, ans=0.125 +2024-07-28 01:21:29,899 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.769e+01 5.979e+01 7.188e+01 8.735e+01 1.314e+02, threshold=1.438e+02, percent-clipped=1.0 +2024-07-28 01:21:33,571 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.13 vs. limit=10.0 +2024-07-28 01:21:36,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=90206.66666666667, ans=0.125 +2024-07-28 01:21:37,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=90206.66666666667, ans=0.125 +2024-07-28 01:21:54,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90233.33333333333, ans=0.1 +2024-07-28 01:21:55,885 INFO [train.py:1114] (2/4) Epoch 7, batch 6350, loss[loss=0.2642, simple_loss=0.3473, pruned_loss=0.09054, over 4527.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3034, pruned_loss=0.06991, over 934270.29 frames. ], batch size: 21, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:22:08,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90273.33333333333, ans=0.1 +2024-07-28 01:22:11,038 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.91 vs. limit=6.0 +2024-07-28 01:22:28,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90313.33333333333, ans=0.1 +2024-07-28 01:22:29,298 INFO [train.py:1114] (2/4) Epoch 7, batch 6400, loss[loss=0.2239, simple_loss=0.3094, pruned_loss=0.0692, over 4639.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3031, pruned_loss=0.06977, over 935149.45 frames. ], batch size: 13, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:22:29,511 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:22:29,743 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.61 vs. limit=12.0 +2024-07-28 01:22:34,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=90313.33333333333, ans=0.05 +2024-07-28 01:22:36,603 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.788e+01 5.990e+01 6.724e+01 8.012e+01 1.042e+02, threshold=1.345e+02, percent-clipped=0.0 +2024-07-28 01:22:36,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=90326.66666666667, ans=0.2 +2024-07-28 01:22:47,809 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.79 vs. limit=12.0 +2024-07-28 01:22:56,035 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:22:56,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=90353.33333333333, ans=0.2 +2024-07-28 01:22:57,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90366.66666666667, ans=0.1 +2024-07-28 01:23:05,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=90380.0, ans=0.125 +2024-07-28 01:23:06,112 INFO [train.py:1114] (2/4) Epoch 7, batch 6450, loss[loss=0.2363, simple_loss=0.3061, pruned_loss=0.08323, over 4471.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3038, pruned_loss=0.07002, over 938575.43 frames. ], batch size: 21, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:23:15,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=90393.33333333333, ans=0.125 +2024-07-28 01:23:21,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=90406.66666666667, ans=0.125 +2024-07-28 01:23:25,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=90406.66666666667, ans=0.025 +2024-07-28 01:23:42,683 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.25 vs. limit=6.0 +2024-07-28 01:23:43,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=90433.33333333333, ans=0.0 +2024-07-28 01:23:46,189 INFO [train.py:1114] (2/4) Epoch 7, batch 6500, loss[loss=0.2972, simple_loss=0.3545, pruned_loss=0.12, over 3310.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3032, pruned_loss=0.06947, over 939861.15 frames. ], batch size: 35, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:23:49,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=90446.66666666667, ans=0.09899494936611666 +2024-07-28 01:23:57,750 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.888e+01 5.820e+01 6.453e+01 7.206e+01 1.081e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-28 01:24:03,299 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.32 vs. limit=15.0 +2024-07-28 01:24:04,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=90460.0, ans=0.035 +2024-07-28 01:24:05,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=90460.0, ans=0.125 +2024-07-28 01:24:09,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=90473.33333333333, ans=0.125 +2024-07-28 01:24:10,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=90473.33333333333, ans=0.0 +2024-07-28 01:24:11,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=90473.33333333333, ans=0.09899494936611666 +2024-07-28 01:24:11,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=90473.33333333333, ans=0.025 +2024-07-28 01:24:38,480 INFO [train.py:1114] (2/4) Epoch 7, batch 6550, loss[loss=0.16, simple_loss=0.2453, pruned_loss=0.03739, over 4812.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.302, pruned_loss=0.06873, over 943020.10 frames. ], batch size: 11, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:24:38,744 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.18 vs. limit=12.0 +2024-07-28 01:24:43,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=90513.33333333333, ans=0.125 +2024-07-28 01:24:53,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=90540.0, ans=0.125 +2024-07-28 01:25:21,615 INFO [train.py:1114] (2/4) Epoch 7, batch 6600, loss[loss=0.3205, simple_loss=0.3978, pruned_loss=0.1216, over 4947.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.3035, pruned_loss=0.06917, over 944963.31 frames. ], batch size: 14, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:25:24,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=90580.0, ans=0.125 +2024-07-28 01:25:29,267 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.788e+01 6.007e+01 7.132e+01 8.613e+01 1.294e+02, threshold=1.426e+02, percent-clipped=1.0 +2024-07-28 01:25:29,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=90593.33333333333, ans=0.2 +2024-07-28 01:25:49,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=90620.0, ans=0.09899494936611666 +2024-07-28 01:25:50,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=90633.33333333333, ans=0.125 +2024-07-28 01:25:50,455 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.48 vs. limit=12.0 +2024-07-28 01:25:55,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=90633.33333333333, ans=0.125 +2024-07-28 01:25:57,361 INFO [train.py:1114] (2/4) Epoch 7, batch 6650, loss[loss=0.199, simple_loss=0.2935, pruned_loss=0.05225, over 4642.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.3025, pruned_loss=0.06869, over 943768.89 frames. ], batch size: 17, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:26:02,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=90646.66666666667, ans=0.125 +2024-07-28 01:26:02,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=90646.66666666667, ans=0.125 +2024-07-28 01:26:06,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=90660.0, ans=0.125 +2024-07-28 01:26:10,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=90660.0, ans=0.0 +2024-07-28 01:26:19,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=90686.66666666667, ans=0.125 +2024-07-28 01:26:22,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=90686.66666666667, ans=0.2 +2024-07-28 01:26:33,329 INFO [train.py:1114] (2/4) Epoch 7, batch 6700, loss[loss=0.2577, simple_loss=0.3395, pruned_loss=0.08795, over 4726.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.3047, pruned_loss=0.0698, over 942357.88 frames. ], batch size: 19, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:26:33,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=90713.33333333333, ans=0.125 +2024-07-28 01:26:38,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=90713.33333333333, ans=0.125 +2024-07-28 01:26:40,650 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.203e+01 5.978e+01 6.873e+01 8.305e+01 1.151e+02, threshold=1.375e+02, percent-clipped=0.0 +2024-07-28 01:27:07,209 INFO [train.py:1114] (2/4) Epoch 7, batch 6750, loss[loss=0.2472, simple_loss=0.3348, pruned_loss=0.0798, over 4376.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3045, pruned_loss=0.06979, over 940747.68 frames. ], batch size: 26, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:27:15,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=90793.33333333333, ans=0.2 +2024-07-28 01:27:20,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=90806.66666666667, ans=0.0 +2024-07-28 01:27:22,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=90806.66666666667, ans=0.0 +2024-07-28 01:27:26,656 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.27 vs. limit=15.0 +2024-07-28 01:27:27,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=90820.0, ans=0.1 +2024-07-28 01:27:28,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=90820.0, ans=0.125 +2024-07-28 01:27:33,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=90820.0, ans=0.125 +2024-07-28 01:27:40,991 INFO [train.py:1114] (2/4) Epoch 7, batch 6800, loss[loss=0.2338, simple_loss=0.3171, pruned_loss=0.07528, over 4639.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3046, pruned_loss=0.06921, over 939171.68 frames. ], batch size: 13, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:27:43,829 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:27:43,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90846.66666666667, ans=0.1 +2024-07-28 01:27:48,119 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.925e+01 5.741e+01 6.354e+01 7.079e+01 9.743e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 01:27:58,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90873.33333333333, ans=0.1 +2024-07-28 01:27:58,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=90873.33333333333, ans=0.125 +2024-07-28 01:28:00,370 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.40 vs. limit=12.0 +2024-07-28 01:28:00,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90886.66666666667, ans=0.1 +2024-07-28 01:28:01,053 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.44 vs. limit=15.0 +2024-07-28 01:28:12,286 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.100e-01 +2024-07-28 01:28:13,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=90913.33333333333, ans=0.125 +2024-07-28 01:28:14,144 INFO [train.py:1114] (2/4) Epoch 7, batch 6850, loss[loss=0.2303, simple_loss=0.3162, pruned_loss=0.07218, over 4685.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3038, pruned_loss=0.06933, over 940749.52 frames. ], batch size: 13, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:28:15,743 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.31 vs. limit=15.0 +2024-07-28 01:28:29,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=90940.0, ans=0.125 +2024-07-28 01:28:32,353 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.98 vs. limit=8.0 +2024-07-28 01:28:35,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=90953.33333333333, ans=0.0 +2024-07-28 01:28:48,215 INFO [train.py:1114] (2/4) Epoch 7, batch 6900, loss[loss=0.1712, simple_loss=0.2609, pruned_loss=0.04074, over 4962.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3039, pruned_loss=0.06933, over 942985.95 frames. ], batch size: 13, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:28:50,519 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.76 vs. limit=15.0 +2024-07-28 01:28:57,320 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.878e+01 5.899e+01 6.510e+01 7.129e+01 1.062e+02, threshold=1.302e+02, percent-clipped=0.0 +2024-07-28 01:29:08,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=91006.66666666667, ans=0.1 +2024-07-28 01:29:14,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=91020.0, ans=0.025 +2024-07-28 01:29:17,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=91020.0, ans=0.07 +2024-07-28 01:29:17,683 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.71 vs. limit=15.0 +2024-07-28 01:29:18,974 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.01 vs. limit=15.0 +2024-07-28 01:29:25,543 INFO [train.py:1114] (2/4) Epoch 7, batch 6950, loss[loss=0.2052, simple_loss=0.2831, pruned_loss=0.06366, over 4509.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3039, pruned_loss=0.06954, over 940182.85 frames. ], batch size: 10, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:29:39,678 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.00 vs. limit=10.0 +2024-07-28 01:29:42,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=91073.33333333333, ans=0.125 +2024-07-28 01:29:47,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=91073.33333333333, ans=0.0 +2024-07-28 01:29:47,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=91073.33333333333, ans=0.125 +2024-07-28 01:29:56,405 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.70 vs. limit=22.5 +2024-07-28 01:30:02,418 INFO [train.py:1114] (2/4) Epoch 7, batch 7000, loss[loss=0.2511, simple_loss=0.3376, pruned_loss=0.08228, over 4596.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3039, pruned_loss=0.06953, over 938310.41 frames. ], batch size: 17, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:30:04,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.54 vs. limit=15.0 +2024-07-28 01:30:08,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.32 vs. limit=6.0 +2024-07-28 01:30:09,587 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.067e+01 5.877e+01 6.787e+01 8.210e+01 1.500e+02, threshold=1.357e+02, percent-clipped=1.0 +2024-07-28 01:30:15,960 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.74 vs. limit=6.0 +2024-07-28 01:30:26,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=91153.33333333333, ans=0.125 +2024-07-28 01:30:35,444 INFO [train.py:1114] (2/4) Epoch 7, batch 7050, loss[loss=0.26, simple_loss=0.3417, pruned_loss=0.0891, over 4786.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3043, pruned_loss=0.0695, over 942188.71 frames. ], batch size: 19, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:30:43,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=91193.33333333333, ans=6.0 +2024-07-28 01:30:46,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=91193.33333333333, ans=0.025 +2024-07-28 01:30:59,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=91220.0, ans=0.04949747468305833 +2024-07-28 01:31:00,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=91220.0, ans=0.0 +2024-07-28 01:31:04,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=91233.33333333333, ans=0.125 +2024-07-28 01:31:09,039 INFO [train.py:1114] (2/4) Epoch 7, batch 7100, loss[loss=0.2838, simple_loss=0.3555, pruned_loss=0.1061, over 4794.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3054, pruned_loss=0.07052, over 937090.13 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 16.0 +2024-07-28 01:31:10,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91246.66666666667, ans=0.1 +2024-07-28 01:31:16,782 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.787e+01 5.674e+01 6.634e+01 7.600e+01 1.129e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-28 01:31:25,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=91273.33333333333, ans=0.125 +2024-07-28 01:31:27,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=91273.33333333333, ans=0.125 +2024-07-28 01:31:38,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=91300.0, ans=0.125 +2024-07-28 01:31:41,777 INFO [train.py:1114] (2/4) Epoch 7, batch 7150, loss[loss=0.2189, simple_loss=0.303, pruned_loss=0.06735, over 4534.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3034, pruned_loss=0.06989, over 937993.18 frames. ], batch size: 21, lr: 1.05e-02, grad_scale: 16.0 +2024-07-28 01:31:43,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=91313.33333333333, ans=10.0 +2024-07-28 01:31:49,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=91326.66666666667, ans=0.125 +2024-07-28 01:31:57,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=91340.0, ans=0.0 +2024-07-28 01:31:57,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=91340.0, ans=0.025 +2024-07-28 01:32:04,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=91353.33333333333, ans=0.125 +2024-07-28 01:32:07,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=91353.33333333333, ans=0.1 +2024-07-28 01:32:12,427 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.86 vs. limit=15.0 +2024-07-28 01:32:13,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=91366.66666666667, ans=0.125 +2024-07-28 01:32:14,603 INFO [train.py:1114] (2/4) Epoch 7, batch 7200, loss[loss=0.2223, simple_loss=0.3067, pruned_loss=0.06897, over 4792.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.3024, pruned_loss=0.06918, over 938930.20 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:32:16,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=91380.0, ans=0.125 +2024-07-28 01:32:21,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=91393.33333333333, ans=0.125 +2024-07-28 01:32:22,359 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.818e+01 5.919e+01 6.755e+01 7.806e+01 1.038e+02, threshold=1.351e+02, percent-clipped=0.0 +2024-07-28 01:32:23,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=91393.33333333333, ans=0.0 +2024-07-28 01:32:29,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=91406.66666666667, ans=0.2 +2024-07-28 01:32:46,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=91433.33333333333, ans=0.125 +2024-07-28 01:32:49,503 INFO [train.py:1114] (2/4) Epoch 7, batch 7250, loss[loss=0.1964, simple_loss=0.2938, pruned_loss=0.04946, over 4845.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.3025, pruned_loss=0.0695, over 940417.76 frames. ], batch size: 12, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:32:57,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91460.0, ans=0.1 +2024-07-28 01:33:11,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=91486.66666666667, ans=0.0 +2024-07-28 01:33:22,443 INFO [train.py:1114] (2/4) Epoch 7, batch 7300, loss[loss=0.176, simple_loss=0.2699, pruned_loss=0.04102, over 4854.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.301, pruned_loss=0.0688, over 940446.54 frames. ], batch size: 12, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:33:22,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=91513.33333333333, ans=0.125 +2024-07-28 01:33:24,086 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.69 vs. limit=12.0 +2024-07-28 01:33:26,795 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.57 vs. limit=15.0 +2024-07-28 01:33:28,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=91526.66666666667, ans=0.125 +2024-07-28 01:33:30,226 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.315e+01 6.274e+01 7.077e+01 8.324e+01 1.199e+02, threshold=1.415e+02, percent-clipped=0.0 +2024-07-28 01:33:34,547 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.50 vs. limit=15.0 +2024-07-28 01:33:36,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=91540.0, ans=0.125 +2024-07-28 01:33:54,590 INFO [train.py:1114] (2/4) Epoch 7, batch 7350, loss[loss=0.2078, simple_loss=0.2942, pruned_loss=0.06075, over 4641.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.301, pruned_loss=0.06886, over 940016.64 frames. ], batch size: 12, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:33:54,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=91580.0, ans=0.0 +2024-07-28 01:33:55,116 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.45 vs. limit=10.0 +2024-07-28 01:34:00,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=91593.33333333333, ans=0.2 +2024-07-28 01:34:04,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=91593.33333333333, ans=0.125 +2024-07-28 01:34:14,327 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.93 vs. limit=12.0 +2024-07-28 01:34:16,039 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-07-28 01:34:18,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=91620.0, ans=0.0 +2024-07-28 01:34:27,611 INFO [train.py:1114] (2/4) Epoch 7, batch 7400, loss[loss=0.1899, simple_loss=0.2964, pruned_loss=0.04167, over 4695.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.3016, pruned_loss=0.06853, over 941080.12 frames. ], batch size: 13, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:34:29,878 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.86 vs. limit=15.0 +2024-07-28 01:34:33,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=91660.0, ans=0.0 +2024-07-28 01:34:35,812 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.824e+01 5.881e+01 6.822e+01 8.435e+01 1.377e+02, threshold=1.364e+02, percent-clipped=0.0 +2024-07-28 01:34:44,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=91673.33333333333, ans=0.1 +2024-07-28 01:34:49,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=91686.66666666667, ans=0.2 +2024-07-28 01:34:50,942 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.46 vs. limit=15.0 +2024-07-28 01:34:57,913 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:35:01,067 INFO [train.py:1114] (2/4) Epoch 7, batch 7450, loss[loss=0.1963, simple_loss=0.2773, pruned_loss=0.05764, over 4612.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2996, pruned_loss=0.06774, over 938466.15 frames. ], batch size: 11, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:35:08,442 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:35:11,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=91726.66666666667, ans=0.125 +2024-07-28 01:35:12,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=91726.66666666667, ans=0.025 +2024-07-28 01:35:15,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=91740.0, ans=0.125 +2024-07-28 01:35:17,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=91740.0, ans=0.025 +2024-07-28 01:35:20,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=91753.33333333333, ans=0.125 +2024-07-28 01:35:32,051 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.49 vs. limit=6.0 +2024-07-28 01:35:34,019 INFO [train.py:1114] (2/4) Epoch 7, batch 7500, loss[loss=0.2783, simple_loss=0.3519, pruned_loss=0.1023, over 3363.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.3008, pruned_loss=0.06875, over 936789.91 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:35:35,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=91780.0, ans=0.125 +2024-07-28 01:35:41,662 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.899e+01 6.430e+01 7.635e+01 1.398e+02, threshold=1.286e+02, percent-clipped=1.0 +2024-07-28 01:35:45,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=91793.33333333333, ans=0.2 +2024-07-28 01:35:53,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=91820.0, ans=0.1 +2024-07-28 01:35:54,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=91820.0, ans=0.0 +2024-07-28 01:36:07,906 INFO [train.py:1114] (2/4) Epoch 7, batch 7550, loss[loss=0.24, simple_loss=0.3101, pruned_loss=0.08499, over 4698.00 frames. ], tot_loss[loss=0.22, simple_loss=0.3022, pruned_loss=0.06889, over 936561.05 frames. ], batch size: 17, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:36:11,535 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.77 vs. limit=22.5 +2024-07-28 01:36:19,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=91860.0, ans=15.0 +2024-07-28 01:36:23,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=91873.33333333333, ans=0.2 +2024-07-28 01:36:40,626 INFO [train.py:1114] (2/4) Epoch 7, batch 7600, loss[loss=0.1923, simple_loss=0.2735, pruned_loss=0.05557, over 4810.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.3019, pruned_loss=0.06875, over 938249.80 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:36:46,338 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:36:49,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=91926.66666666667, ans=0.5 +2024-07-28 01:36:49,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=91926.66666666667, ans=0.125 +2024-07-28 01:36:50,223 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.825e+01 5.658e+01 6.042e+01 7.178e+01 9.793e+01, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 01:36:51,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=91926.66666666667, ans=0.125 +2024-07-28 01:36:57,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=91940.0, ans=0.04949747468305833 +2024-07-28 01:36:58,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=91940.0, ans=0.125 +2024-07-28 01:36:59,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=91940.0, ans=0.2 +2024-07-28 01:37:04,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=91953.33333333333, ans=0.2 +2024-07-28 01:37:09,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=91966.66666666667, ans=0.125 +2024-07-28 01:37:14,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=91980.0, ans=0.125 +2024-07-28 01:37:15,246 INFO [train.py:1114] (2/4) Epoch 7, batch 7650, loss[loss=0.1846, simple_loss=0.2522, pruned_loss=0.05854, over 4928.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.3027, pruned_loss=0.06935, over 937447.19 frames. ], batch size: 12, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:37:35,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=92020.0, ans=0.0 +2024-07-28 01:37:49,617 INFO [train.py:1114] (2/4) Epoch 7, batch 7700, loss[loss=0.2398, simple_loss=0.3167, pruned_loss=0.08151, over 4686.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3033, pruned_loss=0.06984, over 934578.79 frames. ], batch size: 13, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:37:51,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=92046.66666666667, ans=0.125 +2024-07-28 01:37:53,655 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.78 vs. limit=22.5 +2024-07-28 01:37:55,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=92060.0, ans=0.125 +2024-07-28 01:37:57,210 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.942e+01 5.877e+01 6.503e+01 7.905e+01 1.085e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 01:38:08,211 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:38:13,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=92086.66666666667, ans=0.125 +2024-07-28 01:38:17,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=92100.0, ans=0.0 +2024-07-28 01:38:19,295 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:38:21,627 INFO [train.py:1114] (2/4) Epoch 7, batch 7750, loss[loss=0.2222, simple_loss=0.3063, pruned_loss=0.069, over 4932.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3042, pruned_loss=0.06994, over 935906.77 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:38:37,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=92140.0, ans=0.125 +2024-07-28 01:38:57,637 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.66 vs. limit=22.5 +2024-07-28 01:38:58,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=92166.66666666667, ans=0.015 +2024-07-28 01:38:59,296 INFO [train.py:1114] (2/4) Epoch 7, batch 7800, loss[loss=0.2307, simple_loss=0.317, pruned_loss=0.07226, over 4660.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3045, pruned_loss=0.06974, over 937617.94 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:39:06,963 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.786e+01 5.790e+01 6.287e+01 7.177e+01 9.845e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 01:39:39,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=92220.0, ans=0.2 +2024-07-28 01:39:41,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=92233.33333333333, ans=0.0 +2024-07-28 01:39:52,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=92246.66666666667, ans=0.0 +2024-07-28 01:39:53,517 INFO [train.py:1114] (2/4) Epoch 7, batch 7850, loss[loss=0.1924, simple_loss=0.2661, pruned_loss=0.05938, over 4496.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.3045, pruned_loss=0.07005, over 935874.92 frames. ], batch size: 10, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:40:08,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=92273.33333333333, ans=0.1 +2024-07-28 01:40:22,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=92300.0, ans=0.125 +2024-07-28 01:40:23,001 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.66 vs. limit=15.0 +2024-07-28 01:40:23,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=92300.0, ans=0.125 +2024-07-28 01:40:31,975 INFO [train.py:1114] (2/4) Epoch 7, batch 7900, loss[loss=0.2188, simple_loss=0.2997, pruned_loss=0.069, over 4872.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3067, pruned_loss=0.07092, over 933214.95 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:40:34,839 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.32 vs. limit=15.0 +2024-07-28 01:40:35,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=92313.33333333333, ans=0.0 +2024-07-28 01:40:39,613 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.593e+01 6.059e+01 6.486e+01 7.471e+01 1.043e+02, threshold=1.297e+02, percent-clipped=0.0 +2024-07-28 01:40:55,647 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.36 vs. limit=12.0 +2024-07-28 01:40:58,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=92353.33333333333, ans=0.025 +2024-07-28 01:40:59,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=92353.33333333333, ans=0.125 +2024-07-28 01:41:03,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=92366.66666666667, ans=0.125 +2024-07-28 01:41:14,330 INFO [train.py:1114] (2/4) Epoch 7, batch 7950, loss[loss=0.2804, simple_loss=0.3485, pruned_loss=0.1061, over 3554.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3059, pruned_loss=0.07037, over 935408.82 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 16.0 +2024-07-28 01:41:18,534 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:41:22,647 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.01 vs. limit=15.0 +2024-07-28 01:41:38,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=92420.0, ans=0.125 +2024-07-28 01:41:45,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=92433.33333333333, ans=0.125 +2024-07-28 01:41:47,113 INFO [train.py:1114] (2/4) Epoch 7, batch 8000, loss[loss=0.2042, simple_loss=0.2754, pruned_loss=0.06649, over 4621.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3037, pruned_loss=0.0692, over 934947.44 frames. ], batch size: 11, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:41:52,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=92446.66666666667, ans=0.125 +2024-07-28 01:41:55,553 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.050e+01 6.079e+01 6.641e+01 7.975e+01 1.086e+02, threshold=1.328e+02, percent-clipped=0.0 +2024-07-28 01:42:04,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=92473.33333333333, ans=0.0 +2024-07-28 01:42:05,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=92473.33333333333, ans=0.0 +2024-07-28 01:42:20,850 INFO [train.py:1114] (2/4) Epoch 7, batch 8050, loss[loss=0.2121, simple_loss=0.2977, pruned_loss=0.06326, over 4808.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3038, pruned_loss=0.06946, over 934757.27 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:42:20,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=92513.33333333333, ans=0.0 +2024-07-28 01:42:31,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=92526.66666666667, ans=0.125 +2024-07-28 01:42:36,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=92540.0, ans=0.025 +2024-07-28 01:42:43,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92553.33333333333, ans=0.1 +2024-07-28 01:42:49,383 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.28 vs. limit=22.5 +2024-07-28 01:42:53,557 INFO [train.py:1114] (2/4) Epoch 7, batch 8100, loss[loss=0.2469, simple_loss=0.3278, pruned_loss=0.08297, over 4790.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3052, pruned_loss=0.07029, over 934557.10 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:43:01,848 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.964e+01 5.903e+01 6.479e+01 7.411e+01 1.026e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 01:43:02,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=92593.33333333333, ans=0.125 +2024-07-28 01:43:07,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=92606.66666666667, ans=0.125 +2024-07-28 01:43:18,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=92620.0, ans=0.1 +2024-07-28 01:43:27,019 INFO [train.py:1114] (2/4) Epoch 7, batch 8150, loss[loss=0.2108, simple_loss=0.3057, pruned_loss=0.05796, over 4812.00 frames. ], tot_loss[loss=0.221, simple_loss=0.303, pruned_loss=0.06955, over 937739.44 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:43:33,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=92660.0, ans=0.07 +2024-07-28 01:43:47,203 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.18 vs. limit=15.0 +2024-07-28 01:43:49,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=92686.66666666667, ans=0.025 +2024-07-28 01:43:56,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=92700.0, ans=0.025 +2024-07-28 01:44:00,244 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.77 vs. limit=15.0 +2024-07-28 01:44:00,495 INFO [train.py:1114] (2/4) Epoch 7, batch 8200, loss[loss=0.2534, simple_loss=0.3386, pruned_loss=0.08407, over 4786.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.3026, pruned_loss=0.06878, over 938498.30 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:44:03,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=92713.33333333333, ans=0.125 +2024-07-28 01:44:06,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=92726.66666666667, ans=0.125 +2024-07-28 01:44:08,973 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.964e+01 6.053e+01 7.008e+01 8.416e+01 1.296e+02, threshold=1.402e+02, percent-clipped=1.0 +2024-07-28 01:44:17,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=92740.0, ans=0.025 +2024-07-28 01:44:19,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=92740.0, ans=0.125 +2024-07-28 01:44:26,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=92753.33333333333, ans=0.0 +2024-07-28 01:44:32,183 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.71 vs. limit=10.0 +2024-07-28 01:44:34,199 INFO [train.py:1114] (2/4) Epoch 7, batch 8250, loss[loss=0.2064, simple_loss=0.2968, pruned_loss=0.05796, over 4908.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.3029, pruned_loss=0.06881, over 938796.76 frames. ], batch size: 13, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:44:43,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=92793.33333333333, ans=0.125 +2024-07-28 01:44:48,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=92806.66666666667, ans=0.125 +2024-07-28 01:44:48,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=92806.66666666667, ans=0.125 +2024-07-28 01:44:52,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=92806.66666666667, ans=0.125 +2024-07-28 01:44:57,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=92820.0, ans=0.1 +2024-07-28 01:45:06,704 INFO [train.py:1114] (2/4) Epoch 7, batch 8300, loss[loss=0.2415, simple_loss=0.3136, pruned_loss=0.08472, over 4894.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3042, pruned_loss=0.06972, over 938640.97 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:45:08,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=92846.66666666667, ans=0.95 +2024-07-28 01:45:10,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=92846.66666666667, ans=0.0 +2024-07-28 01:45:15,032 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.705e+01 6.122e+01 6.815e+01 8.383e+01 1.214e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-28 01:45:17,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=92860.0, ans=0.0 +2024-07-28 01:45:38,733 INFO [train.py:1114] (2/4) Epoch 7, batch 8350, loss[loss=0.2465, simple_loss=0.3193, pruned_loss=0.08682, over 4807.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.3028, pruned_loss=0.06866, over 941268.39 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:45:40,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=92913.33333333333, ans=0.04949747468305833 +2024-07-28 01:45:42,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=92913.33333333333, ans=0.0 +2024-07-28 01:45:44,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=92913.33333333333, ans=0.0 +2024-07-28 01:45:49,485 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.48 vs. limit=22.5 +2024-07-28 01:45:52,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=92940.0, ans=0.125 +2024-07-28 01:45:56,473 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:45:57,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=92940.0, ans=0.125 +2024-07-28 01:45:57,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=92940.0, ans=0.125 +2024-07-28 01:46:00,750 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.72 vs. limit=15.0 +2024-07-28 01:46:11,399 INFO [train.py:1114] (2/4) Epoch 7, batch 8400, loss[loss=0.2031, simple_loss=0.2952, pruned_loss=0.05553, over 4771.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.3035, pruned_loss=0.06911, over 939804.66 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:46:14,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=92980.0, ans=0.025 +2024-07-28 01:46:20,639 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.750e+01 5.850e+01 6.401e+01 7.146e+01 1.045e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 01:46:20,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=92993.33333333333, ans=0.125 +2024-07-28 01:46:28,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=93006.66666666667, ans=0.125 +2024-07-28 01:46:37,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=93033.33333333333, ans=0.2 +2024-07-28 01:46:37,332 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.94 vs. limit=10.0 +2024-07-28 01:46:37,862 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.98 vs. limit=15.0 +2024-07-28 01:46:43,920 INFO [train.py:1114] (2/4) Epoch 7, batch 8450, loss[loss=0.2307, simple_loss=0.3105, pruned_loss=0.07544, over 4789.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3048, pruned_loss=0.06957, over 938912.32 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:46:45,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=93046.66666666667, ans=0.125 +2024-07-28 01:46:52,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=93060.0, ans=0.1 +2024-07-28 01:47:11,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=93086.66666666667, ans=0.025 +2024-07-28 01:47:11,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=93086.66666666667, ans=0.0 +2024-07-28 01:47:19,793 INFO [train.py:1114] (2/4) Epoch 7, batch 8500, loss[loss=0.2229, simple_loss=0.2927, pruned_loss=0.0766, over 4608.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3042, pruned_loss=0.06964, over 938730.76 frames. ], batch size: 11, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:47:24,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=93113.33333333333, ans=0.04949747468305833 +2024-07-28 01:47:29,189 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.808e+01 5.964e+01 6.473e+01 7.597e+01 1.017e+02, threshold=1.295e+02, percent-clipped=0.0 +2024-07-28 01:47:32,967 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.76 vs. limit=12.0 +2024-07-28 01:47:33,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93140.0, ans=0.1 +2024-07-28 01:47:37,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=93140.0, ans=0.125 +2024-07-28 01:47:44,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=93153.33333333333, ans=0.125 +2024-07-28 01:47:53,378 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.67 vs. limit=15.0 +2024-07-28 01:47:53,744 INFO [train.py:1114] (2/4) Epoch 7, batch 8550, loss[loss=0.1705, simple_loss=0.2507, pruned_loss=0.04518, over 4787.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.3025, pruned_loss=0.0693, over 939352.42 frames. ], batch size: 11, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:48:02,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93193.33333333333, ans=0.1 +2024-07-28 01:48:03,448 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:48:04,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=93193.33333333333, ans=0.0 +2024-07-28 01:48:10,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=93206.66666666667, ans=0.0 +2024-07-28 01:48:22,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=93233.33333333333, ans=0.125 +2024-07-28 01:48:22,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=93233.33333333333, ans=0.2 +2024-07-28 01:48:24,388 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:48:25,460 INFO [train.py:1114] (2/4) Epoch 7, batch 8600, loss[loss=0.2237, simple_loss=0.3086, pruned_loss=0.06943, over 4793.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.3019, pruned_loss=0.06919, over 939284.50 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:48:26,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=93246.66666666667, ans=0.1 +2024-07-28 01:48:27,656 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.86 vs. limit=22.5 +2024-07-28 01:48:29,199 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.92 vs. limit=10.0 +2024-07-28 01:48:33,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=93260.0, ans=0.125 +2024-07-28 01:48:35,630 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.806e+01 6.089e+01 7.126e+01 9.182e+01 1.339e+02, threshold=1.425e+02, percent-clipped=2.0 +2024-07-28 01:48:51,433 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.66 vs. limit=15.0 +2024-07-28 01:48:54,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=93286.66666666667, ans=0.07 +2024-07-28 01:48:57,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=93300.0, ans=0.025 +2024-07-28 01:49:01,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=93300.0, ans=0.125 +2024-07-28 01:49:04,762 INFO [train.py:1114] (2/4) Epoch 7, batch 8650, loss[loss=0.2663, simple_loss=0.345, pruned_loss=0.09375, over 4895.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.3018, pruned_loss=0.06944, over 940381.10 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:49:21,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=93326.66666666667, ans=0.125 +2024-07-28 01:49:30,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=93340.0, ans=0.0 +2024-07-28 01:49:31,970 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.15 vs. limit=15.0 +2024-07-28 01:49:36,518 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.82 vs. limit=15.0 +2024-07-28 01:49:38,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=93366.66666666667, ans=0.125 +2024-07-28 01:49:40,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=93366.66666666667, ans=0.125 +2024-07-28 01:49:46,878 INFO [train.py:1114] (2/4) Epoch 7, batch 8700, loss[loss=0.2096, simple_loss=0.2877, pruned_loss=0.06577, over 4761.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.3029, pruned_loss=0.06999, over 938088.94 frames. ], batch size: 13, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:49:54,596 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.33 vs. limit=15.0 +2024-07-28 01:49:58,264 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.007e+01 5.695e+01 6.363e+01 6.862e+01 1.009e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 01:50:05,838 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.16 vs. limit=10.0 +2024-07-28 01:50:13,560 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.47 vs. limit=22.5 +2024-07-28 01:50:14,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=93433.33333333333, ans=0.025 +2024-07-28 01:50:18,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=93433.33333333333, ans=0.125 +2024-07-28 01:50:21,411 INFO [train.py:1114] (2/4) Epoch 7, batch 8750, loss[loss=0.2001, simple_loss=0.2885, pruned_loss=0.05581, over 4680.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3032, pruned_loss=0.06988, over 935940.20 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:50:39,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=93473.33333333333, ans=0.1 +2024-07-28 01:50:53,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=93500.0, ans=0.125 +2024-07-28 01:50:55,005 INFO [train.py:1114] (2/4) Epoch 7, batch 8800, loss[loss=0.2108, simple_loss=0.3105, pruned_loss=0.0556, over 4930.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3039, pruned_loss=0.07011, over 936867.99 frames. ], batch size: 14, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:51:02,769 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.05 vs. limit=15.0 +2024-07-28 01:51:03,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=93526.66666666667, ans=0.0 +2024-07-28 01:51:04,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=93526.66666666667, ans=0.125 +2024-07-28 01:51:05,115 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.892e+01 5.962e+01 6.661e+01 7.820e+01 1.016e+02, threshold=1.332e+02, percent-clipped=0.0 +2024-07-28 01:51:10,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=93540.0, ans=0.125 +2024-07-28 01:51:11,638 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:51:20,013 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=7.69 vs. limit=12.0 +2024-07-28 01:51:25,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=93566.66666666667, ans=0.0 +2024-07-28 01:51:26,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=93566.66666666667, ans=0.0 +2024-07-28 01:51:26,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=93566.66666666667, ans=0.1 +2024-07-28 01:51:28,578 INFO [train.py:1114] (2/4) Epoch 7, batch 8850, loss[loss=0.2732, simple_loss=0.3419, pruned_loss=0.1022, over 4416.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3038, pruned_loss=0.07004, over 931522.49 frames. ], batch size: 21, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:51:32,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=93580.0, ans=0.05 +2024-07-28 01:51:44,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=93606.66666666667, ans=0.125 +2024-07-28 01:51:47,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=93620.0, ans=0.025 +2024-07-28 01:51:49,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=93620.0, ans=0.2 +2024-07-28 01:51:53,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=93633.33333333333, ans=0.0 +2024-07-28 01:52:00,234 INFO [train.py:1114] (2/4) Epoch 7, batch 8900, loss[loss=0.193, simple_loss=0.2759, pruned_loss=0.05504, over 4934.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3038, pruned_loss=0.07004, over 929485.58 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:52:01,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=93646.66666666667, ans=0.125 +2024-07-28 01:52:09,251 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.776e+01 6.236e+01 6.887e+01 8.483e+01 1.202e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-28 01:52:10,742 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:52:12,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=93660.0, ans=10.0 +2024-07-28 01:52:15,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=93673.33333333333, ans=0.125 +2024-07-28 01:52:25,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=93700.0, ans=0.125 +2024-07-28 01:52:31,375 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.76 vs. limit=10.0 +2024-07-28 01:52:32,126 INFO [train.py:1114] (2/4) Epoch 7, batch 8950, loss[loss=0.2169, simple_loss=0.3057, pruned_loss=0.0641, over 4461.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3036, pruned_loss=0.06965, over 930994.99 frames. ], batch size: 21, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:52:34,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=93713.33333333333, ans=0.09899494936611666 +2024-07-28 01:52:42,523 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.81 vs. limit=15.0 +2024-07-28 01:52:51,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=93753.33333333333, ans=0.0 +2024-07-28 01:52:53,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=93753.33333333333, ans=0.09899494936611666 +2024-07-28 01:52:54,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=93753.33333333333, ans=0.125 +2024-07-28 01:52:56,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=93766.66666666667, ans=0.0 +2024-07-28 01:52:57,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=93766.66666666667, ans=0.2 +2024-07-28 01:52:58,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=93766.66666666667, ans=0.125 +2024-07-28 01:53:03,479 INFO [train.py:1114] (2/4) Epoch 7, batch 9000, loss[loss=0.2371, simple_loss=0.321, pruned_loss=0.07659, over 4637.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.3021, pruned_loss=0.06864, over 933598.49 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:53:03,479 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 01:53:15,550 INFO [train.py:1146] (2/4) Epoch 7, validation: loss=0.1831, simple_loss=0.2876, pruned_loss=0.03931, over 944034.00 frames. +2024-07-28 01:53:15,550 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 01:53:25,270 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.089e+01 5.776e+01 6.458e+01 7.441e+01 1.035e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 01:53:39,911 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.15 vs. limit=6.0 +2024-07-28 01:53:54,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=93833.33333333333, ans=15.0 +2024-07-28 01:53:56,186 INFO [train.py:1114] (2/4) Epoch 7, batch 9050, loss[loss=0.1521, simple_loss=0.2373, pruned_loss=0.03342, over 4514.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.3005, pruned_loss=0.06788, over 934170.55 frames. ], batch size: 10, lr: 1.04e-02, grad_scale: 16.0 +2024-07-28 01:54:02,273 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.84 vs. limit=15.0 +2024-07-28 01:54:02,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=93860.0, ans=0.2 +2024-07-28 01:54:04,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=93860.0, ans=0.125 +2024-07-28 01:54:08,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=93873.33333333333, ans=0.05 +2024-07-28 01:54:11,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=93873.33333333333, ans=0.125 +2024-07-28 01:54:12,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=93873.33333333333, ans=0.2 +2024-07-28 01:54:19,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=93886.66666666667, ans=0.125 +2024-07-28 01:54:28,103 INFO [train.py:1114] (2/4) Epoch 7, batch 9100, loss[loss=0.2062, simple_loss=0.2974, pruned_loss=0.05752, over 4924.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2998, pruned_loss=0.06691, over 936570.40 frames. ], batch size: 14, lr: 1.04e-02, grad_scale: 16.0 +2024-07-28 01:54:28,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=93913.33333333333, ans=0.125 +2024-07-28 01:54:34,950 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:54:37,212 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.668e+01 5.684e+01 6.462e+01 7.112e+01 1.033e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 01:54:46,492 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.41 vs. limit=15.0 +2024-07-28 01:54:55,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=93966.66666666667, ans=0.0 +2024-07-28 01:54:55,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=93966.66666666667, ans=0.04949747468305833 +2024-07-28 01:54:58,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=93980.0, ans=0.0 +2024-07-28 01:54:59,361 INFO [train.py:1114] (2/4) Epoch 7, batch 9150, loss[loss=0.224, simple_loss=0.3104, pruned_loss=0.06878, over 4816.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.3009, pruned_loss=0.06734, over 935131.94 frames. ], batch size: 14, lr: 1.04e-02, grad_scale: 16.0 +2024-07-28 01:55:04,238 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.49 vs. limit=15.0 +2024-07-28 01:55:16,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=94006.66666666667, ans=0.0 +2024-07-28 01:55:30,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=94033.33333333333, ans=0.125 +2024-07-28 01:55:33,115 INFO [train.py:1114] (2/4) Epoch 7, batch 9200, loss[loss=0.165, simple_loss=0.2447, pruned_loss=0.04262, over 4853.00 frames. ], tot_loss[loss=0.218, simple_loss=0.3006, pruned_loss=0.06775, over 936965.33 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:55:42,556 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.593e+01 5.866e+01 6.542e+01 7.562e+01 1.078e+02, threshold=1.308e+02, percent-clipped=0.0 +2024-07-28 01:55:47,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=94073.33333333333, ans=0.2 +2024-07-28 01:55:50,874 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.29 vs. limit=15.0 +2024-07-28 01:55:52,684 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=60.08 vs. limit=15.0 +2024-07-28 01:56:00,155 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.19 vs. limit=15.0 +2024-07-28 01:56:00,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=94100.0, ans=0.125 +2024-07-28 01:56:01,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=94100.0, ans=0.125 +2024-07-28 01:56:02,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=94100.0, ans=0.05 +2024-07-28 01:56:04,076 INFO [train.py:1114] (2/4) Epoch 7, batch 9250, loss[loss=0.2553, simple_loss=0.3446, pruned_loss=0.08302, over 4636.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.3003, pruned_loss=0.06738, over 937501.81 frames. ], batch size: 13, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:56:07,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94113.33333333333, ans=0.1 +2024-07-28 01:56:24,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=94153.33333333333, ans=0.0 +2024-07-28 01:56:26,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=94153.33333333333, ans=0.125 +2024-07-28 01:56:27,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=94153.33333333333, ans=0.125 +2024-07-28 01:56:35,474 INFO [train.py:1114] (2/4) Epoch 7, batch 9300, loss[loss=0.173, simple_loss=0.2496, pruned_loss=0.04823, over 4778.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.3, pruned_loss=0.06732, over 937634.79 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:56:41,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=94193.33333333333, ans=0.125 +2024-07-28 01:56:44,613 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.557e+01 5.679e+01 6.402e+01 7.728e+01 1.178e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 01:57:00,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=94233.33333333333, ans=0.125 +2024-07-28 01:57:03,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=94233.33333333333, ans=0.125 +2024-07-28 01:57:06,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=94246.66666666667, ans=0.2 +2024-07-28 01:57:07,395 INFO [train.py:1114] (2/4) Epoch 7, batch 9350, loss[loss=0.2075, simple_loss=0.2901, pruned_loss=0.06248, over 4803.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.3022, pruned_loss=0.06873, over 934419.42 frames. ], batch size: 11, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:57:08,941 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.87 vs. limit=22.5 +2024-07-28 01:57:12,330 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.47 vs. limit=6.0 +2024-07-28 01:57:27,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=94286.66666666667, ans=0.125 +2024-07-28 01:57:29,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=94286.66666666667, ans=0.0 +2024-07-28 01:57:33,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=94300.0, ans=0.0 +2024-07-28 01:57:38,746 INFO [train.py:1114] (2/4) Epoch 7, batch 9400, loss[loss=0.2154, simple_loss=0.3012, pruned_loss=0.06476, over 4686.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.3025, pruned_loss=0.06902, over 932440.76 frames. ], batch size: 13, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:57:48,108 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.963e+01 5.906e+01 6.522e+01 7.564e+01 1.110e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 01:57:51,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=94340.0, ans=0.125 +2024-07-28 01:57:59,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94353.33333333333, ans=0.1 +2024-07-28 01:57:59,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=94353.33333333333, ans=0.07 +2024-07-28 01:58:09,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.20 vs. limit=15.0 +2024-07-28 01:58:09,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=94380.0, ans=0.07 +2024-07-28 01:58:09,842 INFO [train.py:1114] (2/4) Epoch 7, batch 9450, loss[loss=0.1998, simple_loss=0.2751, pruned_loss=0.06228, over 4794.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.3028, pruned_loss=0.06943, over 931848.28 frames. ], batch size: 11, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:58:16,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=94393.33333333333, ans=0.0 +2024-07-28 01:58:47,098 INFO [train.py:1114] (2/4) Epoch 7, batch 9500, loss[loss=0.2215, simple_loss=0.2906, pruned_loss=0.0762, over 4706.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.303, pruned_loss=0.06927, over 934102.21 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:58:48,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=94446.66666666667, ans=0.125 +2024-07-28 01:58:56,880 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.893e+01 5.944e+01 6.483e+01 7.199e+01 9.045e+01, threshold=1.297e+02, percent-clipped=0.0 +2024-07-28 01:59:01,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=94473.33333333333, ans=0.05 +2024-07-28 01:59:10,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=94486.66666666667, ans=0.125 +2024-07-28 01:59:14,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=94500.0, ans=0.0 +2024-07-28 01:59:18,807 INFO [train.py:1114] (2/4) Epoch 7, batch 9550, loss[loss=0.2299, simple_loss=0.308, pruned_loss=0.07585, over 4786.00 frames. ], tot_loss[loss=0.22, simple_loss=0.3023, pruned_loss=0.06886, over 931413.90 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:59:19,773 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.72 vs. limit=15.0 +2024-07-28 01:59:21,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=94513.33333333333, ans=0.0 +2024-07-28 01:59:22,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=94513.33333333333, ans=0.125 +2024-07-28 01:59:23,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=94513.33333333333, ans=0.125 +2024-07-28 01:59:37,273 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.60 vs. limit=15.0 +2024-07-28 01:59:44,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=94566.66666666667, ans=0.125 +2024-07-28 01:59:49,839 INFO [train.py:1114] (2/4) Epoch 7, batch 9600, loss[loss=0.3248, simple_loss=0.376, pruned_loss=0.1368, over 3232.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.3026, pruned_loss=0.0688, over 930119.74 frames. ], batch size: 35, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:59:51,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=94580.0, ans=0.2 +2024-07-28 01:59:53,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=94580.0, ans=0.125 +2024-07-28 01:59:59,439 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 6.136e+01 6.787e+01 7.890e+01 1.161e+02, threshold=1.357e+02, percent-clipped=0.0 +2024-07-28 02:00:11,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=94620.0, ans=0.125 +2024-07-28 02:00:17,663 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-07-28 02:00:18,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=94633.33333333333, ans=0.125 +2024-07-28 02:00:21,635 INFO [train.py:1114] (2/4) Epoch 7, batch 9650, loss[loss=0.223, simple_loss=0.3119, pruned_loss=0.06707, over 4846.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3043, pruned_loss=0.0703, over 926379.08 frames. ], batch size: 16, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 02:00:27,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=94660.0, ans=0.0 +2024-07-28 02:00:27,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94660.0, ans=0.1 +2024-07-28 02:00:32,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=94660.0, ans=0.125 +2024-07-28 02:00:34,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=94673.33333333333, ans=0.0 +2024-07-28 02:00:49,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=94700.0, ans=0.2 +2024-07-28 02:00:52,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=94713.33333333333, ans=0.125 +2024-07-28 02:00:52,843 INFO [train.py:1114] (2/4) Epoch 7, batch 9700, loss[loss=0.2585, simple_loss=0.3294, pruned_loss=0.09383, over 4291.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3043, pruned_loss=0.06995, over 924282.33 frames. ], batch size: 26, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 02:00:57,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=94713.33333333333, ans=0.0 +2024-07-28 02:00:59,661 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:00:59,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=94726.66666666667, ans=0.125 +2024-07-28 02:01:02,013 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.092e+01 6.194e+01 6.881e+01 8.155e+01 1.257e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-28 02:01:04,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=94726.66666666667, ans=0.025 +2024-07-28 02:01:04,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=94740.0, ans=0.1 +2024-07-28 02:01:09,339 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:01:24,057 INFO [train.py:1114] (2/4) Epoch 7, batch 9750, loss[loss=0.2754, simple_loss=0.3386, pruned_loss=0.1061, over 4667.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3036, pruned_loss=0.06968, over 924851.29 frames. ], batch size: 15, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:01:27,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=94780.0, ans=0.09899494936611666 +2024-07-28 02:01:28,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=94780.0, ans=0.125 +2024-07-28 02:01:36,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=94793.33333333333, ans=0.125 +2024-07-28 02:01:45,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=94820.0, ans=0.125 +2024-07-28 02:01:48,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=94820.0, ans=0.2 +2024-07-28 02:01:51,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=94833.33333333333, ans=0.0 +2024-07-28 02:01:56,369 INFO [train.py:1114] (2/4) Epoch 7, batch 9800, loss[loss=0.2095, simple_loss=0.2805, pruned_loss=0.06923, over 4705.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.3019, pruned_loss=0.06878, over 924201.37 frames. ], batch size: 12, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:02:03,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=94860.0, ans=0.125 +2024-07-28 02:02:05,937 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 5.759e+01 6.678e+01 8.256e+01 1.240e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-28 02:02:07,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=94860.0, ans=0.0 +2024-07-28 02:02:12,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=94873.33333333333, ans=0.125 +2024-07-28 02:02:13,601 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.97 vs. limit=12.0 +2024-07-28 02:02:15,453 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.45 vs. limit=22.5 +2024-07-28 02:02:20,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=94886.66666666667, ans=0.2 +2024-07-28 02:02:24,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=94900.0, ans=0.0 +2024-07-28 02:02:27,306 INFO [train.py:1114] (2/4) Epoch 7, batch 9850, loss[loss=0.2497, simple_loss=0.338, pruned_loss=0.08071, over 4894.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.3022, pruned_loss=0.069, over 926603.35 frames. ], batch size: 15, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:02:27,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94913.33333333333, ans=0.1 +2024-07-28 02:02:32,624 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.26 vs. limit=10.0 +2024-07-28 02:02:47,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=94953.33333333333, ans=0.125 +2024-07-28 02:02:57,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=94966.66666666667, ans=0.0 +2024-07-28 02:02:58,786 INFO [train.py:1114] (2/4) Epoch 7, batch 9900, loss[loss=0.2254, simple_loss=0.2994, pruned_loss=0.07572, over 4838.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.3025, pruned_loss=0.06939, over 925723.43 frames. ], batch size: 16, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:03:08,108 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 5.950e+01 6.593e+01 7.492e+01 1.029e+02, threshold=1.319e+02, percent-clipped=0.0 +2024-07-28 02:03:08,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94993.33333333333, ans=0.1 +2024-07-28 02:03:11,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=95006.66666666667, ans=0.0 +2024-07-28 02:03:13,125 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.55 vs. limit=22.5 +2024-07-28 02:03:19,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=95020.0, ans=0.2 +2024-07-28 02:03:28,442 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.40 vs. limit=8.0 +2024-07-28 02:03:29,265 INFO [train.py:1114] (2/4) Epoch 7, batch 9950, loss[loss=0.1754, simple_loss=0.2569, pruned_loss=0.04701, over 4789.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3034, pruned_loss=0.07001, over 928384.62 frames. ], batch size: 11, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:03:30,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=95046.66666666667, ans=0.2 +2024-07-28 02:03:31,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=95046.66666666667, ans=0.0 +2024-07-28 02:03:33,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=95046.66666666667, ans=0.125 +2024-07-28 02:03:35,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=95060.0, ans=0.2 +2024-07-28 02:03:48,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=95086.66666666667, ans=0.125 +2024-07-28 02:03:51,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=95086.66666666667, ans=0.125 +2024-07-28 02:04:00,315 INFO [train.py:1114] (2/4) Epoch 7, batch 10000, loss[loss=0.213, simple_loss=0.3119, pruned_loss=0.05706, over 4643.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3068, pruned_loss=0.07154, over 926716.23 frames. ], batch size: 16, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:04:01,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=95113.33333333333, ans=0.0 +2024-07-28 02:04:06,051 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.76 vs. limit=22.5 +2024-07-28 02:04:09,349 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.061e+01 5.825e+01 6.191e+01 6.916e+01 9.527e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 02:04:11,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=95126.66666666667, ans=0.0 +2024-07-28 02:04:13,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=95140.0, ans=0.2 +2024-07-28 02:04:15,709 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:04:18,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=95153.33333333333, ans=0.0 +2024-07-28 02:04:21,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=95153.33333333333, ans=0.125 +2024-07-28 02:04:22,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=95153.33333333333, ans=0.125 +2024-07-28 02:04:32,325 INFO [train.py:1114] (2/4) Epoch 7, batch 10050, loss[loss=0.2584, simple_loss=0.3276, pruned_loss=0.09459, over 3480.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3096, pruned_loss=0.07305, over 914600.52 frames. ], batch size: 36, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:04:43,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=95193.33333333333, ans=0.125 +2024-07-28 02:04:45,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=95193.33333333333, ans=0.07 +2024-07-28 02:05:08,116 INFO [train.py:1114] (2/4) Epoch 7, batch 10100, loss[loss=0.255, simple_loss=0.3293, pruned_loss=0.0903, over 3486.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3162, pruned_loss=0.08002, over 862054.86 frames. ], batch size: 36, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:05:17,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=95260.0, ans=0.125 +2024-07-28 02:05:17,908 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.232e+01 6.813e+01 7.421e+01 7.882e+01 1.006e+02, threshold=1.484e+02, percent-clipped=0.0 +2024-07-28 02:05:28,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=95286.66666666667, ans=0.07 +2024-07-28 02:05:32,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=95286.66666666667, ans=0.125 +2024-07-28 02:05:40,554 INFO [train.py:1114] (2/4) Epoch 7, batch 10150, loss[loss=0.2589, simple_loss=0.3253, pruned_loss=0.09624, over 3184.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.321, pruned_loss=0.08537, over 819608.39 frames. ], batch size: 36, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:05:51,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=95326.66666666667, ans=0.125 +2024-07-28 02:05:51,257 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=8.50 vs. limit=12.0 +2024-07-28 02:05:54,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=95326.66666666667, ans=0.125 +2024-07-28 02:06:02,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=95353.33333333333, ans=0.125 +2024-07-28 02:06:05,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=95353.33333333333, ans=0.1 +2024-07-28 02:06:14,353 INFO [train.py:1114] (2/4) Epoch 7, batch 10200, loss[loss=0.2771, simple_loss=0.3423, pruned_loss=0.106, over 3336.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3244, pruned_loss=0.08936, over 788023.38 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:06:18,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=95380.0, ans=0.125 +2024-07-28 02:06:20,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=95393.33333333333, ans=0.0 +2024-07-28 02:06:24,489 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.931e+01 6.733e+01 7.121e+01 8.042e+01 1.219e+02, threshold=1.424e+02, percent-clipped=0.0 +2024-07-28 02:06:25,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=95393.33333333333, ans=0.125 +2024-07-28 02:07:11,987 INFO [train.py:1114] (2/4) Epoch 8, batch 0, loss[loss=0.1877, simple_loss=0.2759, pruned_loss=0.04971, over 4835.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2759, pruned_loss=0.04971, over 4835.00 frames. ], batch size: 12, lr: 9.72e-03, grad_scale: 32.0 +2024-07-28 02:07:11,988 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 02:07:22,455 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.3357, 4.0114, 2.9104, 2.6995], device='cuda:2') +2024-07-28 02:07:23,602 INFO [train.py:1146] (2/4) Epoch 8, validation: loss=0.1876, simple_loss=0.2932, pruned_loss=0.04099, over 944034.00 frames. +2024-07-28 02:07:23,602 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 02:07:28,087 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.08 vs. limit=15.0 +2024-07-28 02:07:41,294 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.92 vs. limit=15.0 +2024-07-28 02:07:44,871 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.63 vs. limit=6.0 +2024-07-28 02:08:13,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=95464.0, ans=0.125 +2024-07-28 02:08:23,641 INFO [train.py:1114] (2/4) Epoch 8, batch 50, loss[loss=0.2022, simple_loss=0.2785, pruned_loss=0.06297, over 4640.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3049, pruned_loss=0.07094, over 205918.13 frames. ], batch size: 11, lr: 9.71e-03, grad_scale: 32.0 +2024-07-28 02:08:33,569 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.07 vs. limit=15.0 +2024-07-28 02:08:50,137 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.77 vs. limit=6.0 +2024-07-28 02:08:54,397 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 5.904e+01 6.447e+01 7.403e+01 1.012e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 02:08:59,011 INFO [train.py:1114] (2/4) Epoch 8, batch 100, loss[loss=0.2345, simple_loss=0.3148, pruned_loss=0.07707, over 4631.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3045, pruned_loss=0.06929, over 364951.37 frames. ], batch size: 12, lr: 9.71e-03, grad_scale: 32.0 +2024-07-28 02:09:02,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=95544.0, ans=0.0 +2024-07-28 02:09:11,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=95570.66666666667, ans=0.0 +2024-07-28 02:09:19,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=95584.0, ans=0.125 +2024-07-28 02:09:31,890 INFO [train.py:1114] (2/4) Epoch 8, batch 150, loss[loss=0.1771, simple_loss=0.2722, pruned_loss=0.04099, over 4621.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.3024, pruned_loss=0.06815, over 493544.55 frames. ], batch size: 11, lr: 9.71e-03, grad_scale: 32.0 +2024-07-28 02:09:38,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=95624.0, ans=0.0 +2024-07-28 02:09:43,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=95624.0, ans=0.125 +2024-07-28 02:09:51,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=95650.66666666667, ans=0.125 +2024-07-28 02:09:57,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=95664.0, ans=0.0 +2024-07-28 02:09:57,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=95664.0, ans=0.0 +2024-07-28 02:10:00,299 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.617e+01 5.653e+01 6.192e+01 6.799e+01 9.993e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 02:10:05,092 INFO [train.py:1114] (2/4) Epoch 8, batch 200, loss[loss=0.2077, simple_loss=0.2881, pruned_loss=0.06364, over 4484.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.3028, pruned_loss=0.06904, over 593132.51 frames. ], batch size: 21, lr: 9.70e-03, grad_scale: 32.0 +2024-07-28 02:10:13,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=95690.66666666667, ans=0.2 +2024-07-28 02:10:21,849 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.06 vs. limit=15.0 +2024-07-28 02:10:38,249 INFO [train.py:1114] (2/4) Epoch 8, batch 250, loss[loss=0.1871, simple_loss=0.2897, pruned_loss=0.0423, over 4622.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.3033, pruned_loss=0.06861, over 670241.03 frames. ], batch size: 16, lr: 9.70e-03, grad_scale: 32.0 +2024-07-28 02:10:46,795 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.96 vs. limit=10.0 +2024-07-28 02:10:47,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=95757.33333333333, ans=0.0 +2024-07-28 02:10:56,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=95770.66666666667, ans=0.125 +2024-07-28 02:11:01,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=95784.0, ans=0.95 +2024-07-28 02:11:06,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=95797.33333333333, ans=0.125 +2024-07-28 02:11:08,826 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.464e+01 5.845e+01 7.036e+01 8.606e+01 1.725e+02, threshold=1.407e+02, percent-clipped=4.0 +2024-07-28 02:11:11,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=95797.33333333333, ans=0.05 +2024-07-28 02:11:13,555 INFO [train.py:1114] (2/4) Epoch 8, batch 300, loss[loss=0.209, simple_loss=0.2993, pruned_loss=0.0593, over 4808.00 frames. ], tot_loss[loss=0.218, simple_loss=0.3012, pruned_loss=0.06742, over 729866.33 frames. ], batch size: 15, lr: 9.70e-03, grad_scale: 32.0 +2024-07-28 02:11:20,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=95824.0, ans=0.2 +2024-07-28 02:11:23,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=95824.0, ans=0.125 +2024-07-28 02:11:27,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=95837.33333333333, ans=0.0 +2024-07-28 02:11:28,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=95837.33333333333, ans=0.025 +2024-07-28 02:11:35,227 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.56 vs. limit=12.0 +2024-07-28 02:11:38,467 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.20 vs. limit=10.0 +2024-07-28 02:11:39,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=95864.0, ans=0.0 +2024-07-28 02:11:46,922 INFO [train.py:1114] (2/4) Epoch 8, batch 350, loss[loss=0.2131, simple_loss=0.288, pruned_loss=0.06913, over 4939.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.3007, pruned_loss=0.06695, over 775702.25 frames. ], batch size: 12, lr: 9.69e-03, grad_scale: 32.0 +2024-07-28 02:11:57,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=95890.66666666667, ans=0.0 +2024-07-28 02:12:01,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=95890.66666666667, ans=0.125 +2024-07-28 02:12:01,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=95904.0, ans=0.04949747468305833 +2024-07-28 02:12:06,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=95904.0, ans=0.0 +2024-07-28 02:12:18,890 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.049e+01 5.562e+01 6.059e+01 7.082e+01 1.101e+02, threshold=1.212e+02, percent-clipped=0.0 +2024-07-28 02:12:23,531 INFO [train.py:1114] (2/4) Epoch 8, batch 400, loss[loss=0.2412, simple_loss=0.3293, pruned_loss=0.0766, over 4693.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.3005, pruned_loss=0.06625, over 813388.49 frames. ], batch size: 13, lr: 9.69e-03, grad_scale: 32.0 +2024-07-28 02:12:33,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=95957.33333333333, ans=0.125 +2024-07-28 02:12:36,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=95970.66666666667, ans=0.1 +2024-07-28 02:12:40,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=95970.66666666667, ans=0.05 +2024-07-28 02:13:05,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=95997.33333333333, ans=0.125 +2024-07-28 02:13:06,710 INFO [train.py:1114] (2/4) Epoch 8, batch 450, loss[loss=0.2238, simple_loss=0.3244, pruned_loss=0.06158, over 4636.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.3015, pruned_loss=0.06689, over 838909.24 frames. ], batch size: 13, lr: 9.69e-03, grad_scale: 32.0 +2024-07-28 02:13:07,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=96010.66666666667, ans=0.125 +2024-07-28 02:13:08,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=96010.66666666667, ans=0.125 +2024-07-28 02:13:13,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=96024.0, ans=0.125 +2024-07-28 02:13:15,686 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.36 vs. limit=6.0 +2024-07-28 02:13:23,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=96037.33333333333, ans=0.025 +2024-07-28 02:13:38,592 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.745e+01 5.894e+01 6.679e+01 8.075e+01 1.208e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-28 02:13:43,466 INFO [train.py:1114] (2/4) Epoch 8, batch 500, loss[loss=0.2563, simple_loss=0.3294, pruned_loss=0.0916, over 4677.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.3007, pruned_loss=0.06638, over 861442.30 frames. ], batch size: 15, lr: 9.68e-03, grad_scale: 32.0 +2024-07-28 02:13:53,967 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.10 vs. limit=15.0 +2024-07-28 02:14:00,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=96104.0, ans=0.2 +2024-07-28 02:14:07,790 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.97 vs. limit=15.0 +2024-07-28 02:14:10,235 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:14:15,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=96130.66666666667, ans=0.125 +2024-07-28 02:14:16,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=96144.0, ans=0.5 +2024-07-28 02:14:16,578 INFO [train.py:1114] (2/4) Epoch 8, batch 550, loss[loss=0.2685, simple_loss=0.3509, pruned_loss=0.0931, over 4637.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.3005, pruned_loss=0.06663, over 877913.59 frames. ], batch size: 17, lr: 9.68e-03, grad_scale: 32.0 +2024-07-28 02:14:38,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=96184.0, ans=0.125 +2024-07-28 02:14:47,414 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.015e+01 5.729e+01 6.322e+01 7.437e+01 1.078e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 02:14:48,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=96197.33333333333, ans=0.0 +2024-07-28 02:14:52,253 INFO [train.py:1114] (2/4) Epoch 8, batch 600, loss[loss=0.2296, simple_loss=0.3185, pruned_loss=0.07038, over 4649.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.3001, pruned_loss=0.06655, over 892689.03 frames. ], batch size: 16, lr: 9.68e-03, grad_scale: 32.0 +2024-07-28 02:14:55,839 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=22.73 vs. limit=15.0 +2024-07-28 02:14:58,544 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.37 vs. limit=15.0 +2024-07-28 02:15:04,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=96224.0, ans=0.0 +2024-07-28 02:15:16,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=96250.66666666667, ans=0.0 +2024-07-28 02:15:18,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=96264.0, ans=0.125 +2024-07-28 02:15:25,430 INFO [train.py:1114] (2/4) Epoch 8, batch 650, loss[loss=0.2545, simple_loss=0.3476, pruned_loss=0.08075, over 4749.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.3003, pruned_loss=0.06662, over 904114.48 frames. ], batch size: 13, lr: 9.67e-03, grad_scale: 32.0 +2024-07-28 02:15:28,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=96277.33333333333, ans=0.125 +2024-07-28 02:15:53,648 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.945e+01 6.095e+01 6.758e+01 8.122e+01 1.148e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-28 02:15:54,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96330.66666666667, ans=0.1 +2024-07-28 02:15:56,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=96330.66666666667, ans=0.025 +2024-07-28 02:15:58,368 INFO [train.py:1114] (2/4) Epoch 8, batch 700, loss[loss=0.1773, simple_loss=0.2591, pruned_loss=0.04772, over 4638.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.3, pruned_loss=0.06646, over 912089.54 frames. ], batch size: 12, lr: 9.67e-03, grad_scale: 32.0 +2024-07-28 02:16:13,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=96370.66666666667, ans=0.0 +2024-07-28 02:16:24,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=96397.33333333333, ans=0.125 +2024-07-28 02:16:32,058 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.71 vs. limit=15.0 +2024-07-28 02:16:33,809 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.81 vs. limit=22.5 +2024-07-28 02:16:33,900 INFO [train.py:1114] (2/4) Epoch 8, batch 750, loss[loss=0.2314, simple_loss=0.3225, pruned_loss=0.0701, over 4688.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2989, pruned_loss=0.0657, over 918739.07 frames. ], batch size: 13, lr: 9.67e-03, grad_scale: 32.0 +2024-07-28 02:16:36,469 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.94 vs. limit=15.0 +2024-07-28 02:16:39,494 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.14 vs. limit=22.5 +2024-07-28 02:16:40,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.49 vs. limit=12.0 +2024-07-28 02:16:54,630 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.07 vs. limit=22.5 +2024-07-28 02:17:02,888 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.794e+01 5.806e+01 6.357e+01 7.174e+01 1.221e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 02:17:07,418 INFO [train.py:1114] (2/4) Epoch 8, batch 800, loss[loss=0.1929, simple_loss=0.2722, pruned_loss=0.05674, over 4855.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2978, pruned_loss=0.06537, over 924085.23 frames. ], batch size: 12, lr: 9.66e-03, grad_scale: 32.0 +2024-07-28 02:17:11,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=96477.33333333333, ans=0.125 +2024-07-28 02:17:18,304 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.43 vs. limit=10.0 +2024-07-28 02:17:24,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=96504.0, ans=0.125 +2024-07-28 02:17:57,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=96530.66666666667, ans=0.125 +2024-07-28 02:17:58,642 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=20.38 vs. limit=22.5 +2024-07-28 02:17:58,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=96530.66666666667, ans=0.0 +2024-07-28 02:18:02,123 INFO [train.py:1114] (2/4) Epoch 8, batch 850, loss[loss=0.222, simple_loss=0.3209, pruned_loss=0.06158, over 4661.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2976, pruned_loss=0.06564, over 928489.40 frames. ], batch size: 14, lr: 9.66e-03, grad_scale: 32.0 +2024-07-28 02:18:06,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=96544.0, ans=0.125 +2024-07-28 02:18:16,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=96570.66666666667, ans=0.025 +2024-07-28 02:18:28,394 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.14 vs. limit=15.0 +2024-07-28 02:18:28,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=96584.0, ans=0.125 +2024-07-28 02:18:32,790 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.484e+01 5.816e+01 6.612e+01 7.766e+01 1.010e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-28 02:18:35,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=96597.33333333333, ans=0.0 +2024-07-28 02:18:36,489 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.57 vs. limit=15.0 +2024-07-28 02:18:37,396 INFO [train.py:1114] (2/4) Epoch 8, batch 900, loss[loss=0.1986, simple_loss=0.2704, pruned_loss=0.06336, over 4865.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2986, pruned_loss=0.06635, over 928862.73 frames. ], batch size: 12, lr: 9.66e-03, grad_scale: 32.0 +2024-07-28 02:18:43,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=96624.0, ans=0.0 +2024-07-28 02:18:45,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.85 vs. limit=15.0 +2024-07-28 02:18:45,513 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=2.98 vs. limit=12.0 +2024-07-28 02:18:46,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=96624.0, ans=0.05 +2024-07-28 02:19:01,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=96650.66666666667, ans=0.125 +2024-07-28 02:19:05,606 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:19:10,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=96677.33333333333, ans=0.0 +2024-07-28 02:19:11,066 INFO [train.py:1114] (2/4) Epoch 8, batch 950, loss[loss=0.2006, simple_loss=0.2794, pruned_loss=0.06086, over 4781.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2989, pruned_loss=0.06616, over 930381.86 frames. ], batch size: 12, lr: 9.65e-03, grad_scale: 32.0 +2024-07-28 02:19:20,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=96690.66666666667, ans=0.125 +2024-07-28 02:19:21,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=96690.66666666667, ans=0.025 +2024-07-28 02:19:22,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=96690.66666666667, ans=0.125 +2024-07-28 02:19:30,674 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.34 vs. limit=15.0 +2024-07-28 02:19:32,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.15 vs. limit=15.0 +2024-07-28 02:19:40,111 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.894e+01 6.010e+01 6.768e+01 8.162e+01 1.047e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-28 02:19:42,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=96730.66666666667, ans=0.1 +2024-07-28 02:19:42,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=96730.66666666667, ans=0.025 +2024-07-28 02:19:44,741 INFO [train.py:1114] (2/4) Epoch 8, batch 1000, loss[loss=0.1872, simple_loss=0.2654, pruned_loss=0.05448, over 4965.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2992, pruned_loss=0.0666, over 930077.89 frames. ], batch size: 13, lr: 9.65e-03, grad_scale: 32.0 +2024-07-28 02:19:45,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=96744.0, ans=0.125 +2024-07-28 02:19:54,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=96757.33333333333, ans=0.125 +2024-07-28 02:19:56,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=96757.33333333333, ans=0.125 +2024-07-28 02:20:07,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=96784.0, ans=0.125 +2024-07-28 02:20:19,452 INFO [train.py:1114] (2/4) Epoch 8, batch 1050, loss[loss=0.2297, simple_loss=0.3225, pruned_loss=0.0684, over 4866.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.299, pruned_loss=0.06671, over 932315.98 frames. ], batch size: 14, lr: 9.65e-03, grad_scale: 32.0 +2024-07-28 02:20:24,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96810.66666666667, ans=0.1 +2024-07-28 02:20:34,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=96824.0, ans=0.0 +2024-07-28 02:20:42,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=96837.33333333333, ans=0.125 +2024-07-28 02:20:45,565 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.52 vs. limit=6.0 +2024-07-28 02:20:50,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=96850.66666666667, ans=0.0 +2024-07-28 02:21:00,904 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.947e+01 5.815e+01 6.423e+01 7.080e+01 9.595e+01, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 02:21:28,314 INFO [train.py:1114] (2/4) Epoch 8, batch 1100, loss[loss=0.2178, simple_loss=0.3031, pruned_loss=0.06626, over 4904.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2981, pruned_loss=0.06588, over 935116.22 frames. ], batch size: 13, lr: 9.64e-03, grad_scale: 32.0 +2024-07-28 02:21:29,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=96877.33333333333, ans=0.125 +2024-07-28 02:25:34,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=96890.66666666667, ans=0.125 +2024-07-28 02:25:51,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=96930.66666666667, ans=0.0 +2024-07-28 02:25:56,431 INFO [train.py:1114] (2/4) Epoch 8, batch 1150, loss[loss=0.208, simple_loss=0.2932, pruned_loss=0.06138, over 4893.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2988, pruned_loss=0.06613, over 935056.54 frames. ], batch size: 13, lr: 9.64e-03, grad_scale: 32.0 +2024-07-28 02:26:00,199 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.84 vs. limit=6.0 +2024-07-28 02:26:19,543 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:26:21,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=96984.0, ans=0.0 +2024-07-28 02:26:22,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=96984.0, ans=15.0 +2024-07-28 02:26:30,182 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.889e+01 6.022e+01 6.608e+01 7.492e+01 1.273e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-28 02:26:41,671 INFO [train.py:1114] (2/4) Epoch 8, batch 1200, loss[loss=0.2106, simple_loss=0.3032, pruned_loss=0.05897, over 4872.00 frames. ], tot_loss[loss=0.217, simple_loss=0.3003, pruned_loss=0.06683, over 933597.19 frames. ], batch size: 14, lr: 9.64e-03, grad_scale: 32.0 +2024-07-28 02:26:41,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=97010.66666666667, ans=0.05 +2024-07-28 02:27:02,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=97050.66666666667, ans=0.0 +2024-07-28 02:27:09,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=97050.66666666667, ans=0.2 +2024-07-28 02:27:14,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=97064.0, ans=0.1 +2024-07-28 02:27:16,981 INFO [train.py:1114] (2/4) Epoch 8, batch 1250, loss[loss=0.2683, simple_loss=0.3428, pruned_loss=0.0969, over 4803.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.3008, pruned_loss=0.06681, over 937801.61 frames. ], batch size: 15, lr: 9.63e-03, grad_scale: 32.0 +2024-07-28 02:27:20,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=97077.33333333333, ans=0.125 +2024-07-28 02:27:29,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=97090.66666666667, ans=0.0 +2024-07-28 02:27:47,565 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.862e+01 5.611e+01 6.251e+01 6.902e+01 9.769e+01, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 02:27:52,328 INFO [train.py:1114] (2/4) Epoch 8, batch 1300, loss[loss=0.2395, simple_loss=0.318, pruned_loss=0.08045, over 4746.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2995, pruned_loss=0.0661, over 939038.42 frames. ], batch size: 19, lr: 9.63e-03, grad_scale: 32.0 +2024-07-28 02:28:00,450 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.11 vs. limit=15.0 +2024-07-28 02:28:05,601 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.65 vs. limit=15.0 +2024-07-28 02:28:23,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=97184.0, ans=0.125 +2024-07-28 02:28:24,381 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.39 vs. limit=22.5 +2024-07-28 02:28:31,421 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.66 vs. limit=15.0 +2024-07-28 02:28:33,049 INFO [train.py:1114] (2/4) Epoch 8, batch 1350, loss[loss=0.2046, simple_loss=0.293, pruned_loss=0.05812, over 4763.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2982, pruned_loss=0.06538, over 940851.88 frames. ], batch size: 13, lr: 9.63e-03, grad_scale: 32.0 +2024-07-28 02:28:34,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=97210.66666666667, ans=0.125 +2024-07-28 02:28:44,643 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=97224.0, ans=0.125 +2024-07-28 02:28:46,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=97224.0, ans=0.0 +2024-07-28 02:28:49,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=97237.33333333333, ans=0.0 +2024-07-28 02:28:51,193 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:28:54,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=97237.33333333333, ans=0.2 +2024-07-28 02:29:04,454 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+01 5.768e+01 6.671e+01 8.189e+01 1.142e+02, threshold=1.334e+02, percent-clipped=0.0 +2024-07-28 02:29:09,255 INFO [train.py:1114] (2/4) Epoch 8, batch 1400, loss[loss=0.1879, simple_loss=0.2619, pruned_loss=0.05689, over 4700.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2974, pruned_loss=0.0652, over 942710.87 frames. ], batch size: 11, lr: 9.62e-03, grad_scale: 32.0 +2024-07-28 02:29:10,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=97277.33333333333, ans=0.0 +2024-07-28 02:29:18,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=97290.66666666667, ans=0.0 +2024-07-28 02:29:19,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=97290.66666666667, ans=0.07 +2024-07-28 02:29:23,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=97304.0, ans=0.125 +2024-07-28 02:29:23,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=97304.0, ans=0.125 +2024-07-28 02:29:23,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=97304.0, ans=0.1 +2024-07-28 02:29:38,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=97330.66666666667, ans=0.5 +2024-07-28 02:29:43,482 INFO [train.py:1114] (2/4) Epoch 8, batch 1450, loss[loss=0.2238, simple_loss=0.31, pruned_loss=0.06877, over 4693.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2983, pruned_loss=0.06546, over 942464.68 frames. ], batch size: 15, lr: 9.62e-03, grad_scale: 32.0 +2024-07-28 02:30:12,516 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.584e+01 5.678e+01 6.336e+01 6.902e+01 9.292e+01, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 02:30:16,527 INFO [train.py:1114] (2/4) Epoch 8, batch 1500, loss[loss=0.2353, simple_loss=0.3087, pruned_loss=0.08093, over 4810.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2993, pruned_loss=0.06641, over 942472.52 frames. ], batch size: 14, lr: 9.62e-03, grad_scale: 16.0 +2024-07-28 02:30:22,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=97410.66666666667, ans=0.1 +2024-07-28 02:30:24,262 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.66 vs. limit=15.0 +2024-07-28 02:30:42,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=97450.66666666667, ans=0.125 +2024-07-28 02:30:45,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=97464.0, ans=0.125 +2024-07-28 02:30:50,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=97464.0, ans=0.1 +2024-07-28 02:30:51,717 INFO [train.py:1114] (2/4) Epoch 8, batch 1550, loss[loss=0.2236, simple_loss=0.3169, pruned_loss=0.06511, over 4910.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.3007, pruned_loss=0.06733, over 938569.32 frames. ], batch size: 15, lr: 9.61e-03, grad_scale: 16.0 +2024-07-28 02:30:53,315 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.65 vs. limit=22.5 +2024-07-28 02:30:54,867 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.38 vs. limit=22.5 +2024-07-28 02:31:13,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=97517.33333333333, ans=0.0 +2024-07-28 02:31:21,198 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.890e+01 6.503e+01 7.700e+01 2.674e+02, threshold=1.301e+02, percent-clipped=1.0 +2024-07-28 02:31:21,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=97530.66666666667, ans=0.0 +2024-07-28 02:31:25,113 INFO [train.py:1114] (2/4) Epoch 8, batch 1600, loss[loss=0.1761, simple_loss=0.265, pruned_loss=0.04355, over 4878.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.3009, pruned_loss=0.06766, over 937651.04 frames. ], batch size: 14, lr: 9.61e-03, grad_scale: 32.0 +2024-07-28 02:31:31,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97557.33333333333, ans=0.1 +2024-07-28 02:31:38,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=97570.66666666667, ans=0.09899494936611666 +2024-07-28 02:31:39,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=97570.66666666667, ans=0.0 +2024-07-28 02:31:48,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=97584.0, ans=0.0 +2024-07-28 02:31:51,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=97584.0, ans=0.125 +2024-07-28 02:31:53,170 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.09 vs. limit=15.0 +2024-07-28 02:31:59,478 INFO [train.py:1114] (2/4) Epoch 8, batch 1650, loss[loss=0.1993, simple_loss=0.299, pruned_loss=0.04984, over 4666.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.3002, pruned_loss=0.06756, over 937694.34 frames. ], batch size: 14, lr: 9.61e-03, grad_scale: 32.0 +2024-07-28 02:32:05,185 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.21 vs. limit=15.0 +2024-07-28 02:32:07,122 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.83 vs. limit=15.0 +2024-07-28 02:32:18,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=97637.33333333333, ans=0.05 +2024-07-28 02:32:21,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=97650.66666666667, ans=0.125 +2024-07-28 02:32:30,781 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.779e+01 6.597e+01 7.631e+01 1.276e+02, threshold=1.319e+02, percent-clipped=0.0 +2024-07-28 02:32:31,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=97664.0, ans=0.125 +2024-07-28 02:32:32,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=97664.0, ans=0.125 +2024-07-28 02:32:32,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=97664.0, ans=0.125 +2024-07-28 02:32:34,705 INFO [train.py:1114] (2/4) Epoch 8, batch 1700, loss[loss=0.2075, simple_loss=0.2762, pruned_loss=0.06944, over 4697.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2997, pruned_loss=0.06683, over 939355.41 frames. ], batch size: 11, lr: 9.60e-03, grad_scale: 32.0 +2024-07-28 02:32:39,285 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.98 vs. limit=15.0 +2024-07-28 02:32:42,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=97690.66666666667, ans=0.2 +2024-07-28 02:33:03,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=97730.66666666667, ans=0.125 +2024-07-28 02:33:09,491 INFO [train.py:1114] (2/4) Epoch 8, batch 1750, loss[loss=0.1647, simple_loss=0.2372, pruned_loss=0.04607, over 4814.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2985, pruned_loss=0.06634, over 940013.89 frames. ], batch size: 11, lr: 9.60e-03, grad_scale: 32.0 +2024-07-28 02:33:13,224 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=15.0 +2024-07-28 02:33:26,513 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.16 vs. limit=15.0 +2024-07-28 02:33:42,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=97797.33333333333, ans=0.5 +2024-07-28 02:33:46,445 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.043e+01 5.863e+01 6.439e+01 7.161e+01 1.257e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 02:33:52,174 INFO [train.py:1114] (2/4) Epoch 8, batch 1800, loss[loss=0.2144, simple_loss=0.3005, pruned_loss=0.06418, over 4641.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2988, pruned_loss=0.06609, over 940718.80 frames. ], batch size: 13, lr: 9.60e-03, grad_scale: 32.0 +2024-07-28 02:34:18,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=97850.66666666667, ans=0.0 +2024-07-28 02:34:19,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=97850.66666666667, ans=0.0 +2024-07-28 02:34:25,558 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.16 vs. limit=22.5 +2024-07-28 02:34:34,882 INFO [train.py:1114] (2/4) Epoch 8, batch 1850, loss[loss=0.2337, simple_loss=0.3204, pruned_loss=0.07344, over 4807.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2985, pruned_loss=0.06598, over 940530.04 frames. ], batch size: 14, lr: 9.59e-03, grad_scale: 32.0 +2024-07-28 02:34:36,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=97877.33333333333, ans=0.125 +2024-07-28 02:34:49,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=97904.0, ans=0.0 +2024-07-28 02:34:51,266 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.32 vs. limit=6.0 +2024-07-28 02:34:54,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=97917.33333333333, ans=0.125 +2024-07-28 02:35:04,085 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.444e+01 5.869e+01 6.668e+01 7.730e+01 1.207e+02, threshold=1.334e+02, percent-clipped=0.0 +2024-07-28 02:35:04,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=97930.66666666667, ans=0.0 +2024-07-28 02:35:08,078 INFO [train.py:1114] (2/4) Epoch 8, batch 1900, loss[loss=0.2099, simple_loss=0.2988, pruned_loss=0.06053, over 4669.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2985, pruned_loss=0.06529, over 941771.18 frames. ], batch size: 14, lr: 9.59e-03, grad_scale: 32.0 +2024-07-28 02:35:08,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=97944.0, ans=0.125 +2024-07-28 02:35:09,157 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.96 vs. limit=12.0 +2024-07-28 02:35:10,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=97944.0, ans=0.125 +2024-07-28 02:35:13,113 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.59 vs. limit=12.0 +2024-07-28 02:35:14,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=97957.33333333333, ans=0.07 +2024-07-28 02:35:15,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=97957.33333333333, ans=0.0 +2024-07-28 02:35:20,795 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.63 vs. limit=8.0 +2024-07-28 02:35:31,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=97984.0, ans=0.125 +2024-07-28 02:35:32,443 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.83 vs. limit=10.0 +2024-07-28 02:35:35,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=97997.33333333333, ans=0.125 +2024-07-28 02:35:41,031 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:35:41,416 INFO [train.py:1114] (2/4) Epoch 8, batch 1950, loss[loss=0.2199, simple_loss=0.3022, pruned_loss=0.06882, over 4890.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2988, pruned_loss=0.06531, over 943778.02 frames. ], batch size: 13, lr: 9.59e-03, grad_scale: 32.0 +2024-07-28 02:35:43,253 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.33 vs. limit=15.0 +2024-07-28 02:35:55,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=98037.33333333333, ans=0.025 +2024-07-28 02:36:06,148 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.20 vs. limit=15.0 +2024-07-28 02:36:09,410 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.71 vs. limit=15.0 +2024-07-28 02:36:12,630 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.950e+01 5.706e+01 6.313e+01 6.898e+01 1.010e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 02:36:13,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=98064.0, ans=0.125 +2024-07-28 02:36:16,766 INFO [train.py:1114] (2/4) Epoch 8, batch 2000, loss[loss=0.2007, simple_loss=0.2742, pruned_loss=0.06365, over 4800.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2984, pruned_loss=0.0656, over 941473.78 frames. ], batch size: 11, lr: 9.58e-03, grad_scale: 32.0 +2024-07-28 02:36:24,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=98090.66666666667, ans=0.025 +2024-07-28 02:36:31,928 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.13 vs. limit=15.0 +2024-07-28 02:36:34,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=98104.0, ans=0.0 +2024-07-28 02:36:37,829 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.85 vs. limit=10.0 +2024-07-28 02:36:50,206 INFO [train.py:1114] (2/4) Epoch 8, batch 2050, loss[loss=0.1945, simple_loss=0.2727, pruned_loss=0.05816, over 4620.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2981, pruned_loss=0.06553, over 939598.57 frames. ], batch size: 11, lr: 9.58e-03, grad_scale: 16.0 +2024-07-28 02:37:00,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=98157.33333333333, ans=0.0 +2024-07-28 02:37:01,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=98157.33333333333, ans=0.1 +2024-07-28 02:37:02,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=98157.33333333333, ans=0.125 +2024-07-28 02:37:04,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=98170.66666666667, ans=0.0 +2024-07-28 02:37:15,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=98184.0, ans=0.125 +2024-07-28 02:37:16,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=98197.33333333333, ans=0.025 +2024-07-28 02:37:20,056 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.635e+01 6.128e+01 6.881e+01 8.380e+01 1.718e+02, threshold=1.376e+02, percent-clipped=3.0 +2024-07-28 02:37:20,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=98197.33333333333, ans=0.0 +2024-07-28 02:37:23,360 INFO [train.py:1114] (2/4) Epoch 8, batch 2100, loss[loss=0.2166, simple_loss=0.2997, pruned_loss=0.06676, over 4761.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2973, pruned_loss=0.06502, over 941258.13 frames. ], batch size: 13, lr: 9.58e-03, grad_scale: 16.0 +2024-07-28 02:37:30,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=98224.0, ans=0.0 +2024-07-28 02:37:36,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=98237.33333333333, ans=0.2 +2024-07-28 02:37:37,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=98237.33333333333, ans=0.0 +2024-07-28 02:37:38,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=98237.33333333333, ans=0.125 +2024-07-28 02:37:40,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=98237.33333333333, ans=0.0 +2024-07-28 02:37:46,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=98250.66666666667, ans=0.125 +2024-07-28 02:37:50,820 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.02 vs. limit=22.5 +2024-07-28 02:37:54,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=98264.0, ans=0.125 +2024-07-28 02:37:56,404 INFO [train.py:1114] (2/4) Epoch 8, batch 2150, loss[loss=0.1914, simple_loss=0.272, pruned_loss=0.05538, over 4911.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2969, pruned_loss=0.06491, over 944424.76 frames. ], batch size: 13, lr: 9.57e-03, grad_scale: 16.0 +2024-07-28 02:38:14,042 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.66 vs. limit=22.5 +2024-07-28 02:38:16,162 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.28 vs. limit=10.0 +2024-07-28 02:38:28,126 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.526e+01 5.621e+01 6.298e+01 7.456e+01 1.063e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 02:38:31,419 INFO [train.py:1114] (2/4) Epoch 8, batch 2200, loss[loss=0.2511, simple_loss=0.3373, pruned_loss=0.08241, over 4808.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2964, pruned_loss=0.06487, over 943480.50 frames. ], batch size: 14, lr: 9.57e-03, grad_scale: 16.0 +2024-07-28 02:38:31,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=98344.0, ans=0.125 +2024-07-28 02:38:46,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=98370.66666666667, ans=0.2 +2024-07-28 02:38:54,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98384.0, ans=0.1 +2024-07-28 02:38:57,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=98397.33333333333, ans=0.0 +2024-07-28 02:38:58,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=98397.33333333333, ans=0.125 +2024-07-28 02:39:06,718 INFO [train.py:1114] (2/4) Epoch 8, batch 2250, loss[loss=0.2217, simple_loss=0.3026, pruned_loss=0.07035, over 4697.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2966, pruned_loss=0.06493, over 942047.91 frames. ], batch size: 13, lr: 9.57e-03, grad_scale: 16.0 +2024-07-28 02:39:09,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=98410.66666666667, ans=0.04949747468305833 +2024-07-28 02:39:19,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=98437.33333333333, ans=0.0 +2024-07-28 02:39:21,323 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.83 vs. limit=15.0 +2024-07-28 02:39:21,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98437.33333333333, ans=0.1 +2024-07-28 02:39:26,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=98450.66666666667, ans=0.1 +2024-07-28 02:39:35,843 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.467e+01 5.847e+01 6.592e+01 7.483e+01 1.040e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-28 02:39:39,170 INFO [train.py:1114] (2/4) Epoch 8, batch 2300, loss[loss=0.1736, simple_loss=0.2471, pruned_loss=0.0501, over 4941.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2954, pruned_loss=0.0648, over 939926.15 frames. ], batch size: 12, lr: 9.57e-03, grad_scale: 16.0 +2024-07-28 02:40:57,550 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.96 vs. limit=15.0 +2024-07-28 02:41:00,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=98490.66666666667, ans=0.2 +2024-07-28 02:41:00,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=98490.66666666667, ans=0.125 +2024-07-28 02:41:06,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=98504.0, ans=0.125 +2024-07-28 02:41:08,605 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.10 vs. limit=6.0 +2024-07-28 02:41:22,959 INFO [train.py:1114] (2/4) Epoch 8, batch 2350, loss[loss=0.2419, simple_loss=0.3233, pruned_loss=0.08021, over 4641.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2955, pruned_loss=0.06457, over 941908.38 frames. ], batch size: 13, lr: 9.56e-03, grad_scale: 16.0 +2024-07-28 02:41:41,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=98570.66666666667, ans=0.07 +2024-07-28 02:41:46,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=98584.0, ans=0.125 +2024-07-28 02:41:53,309 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.834e+01 6.332e+01 7.540e+01 1.064e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 02:41:56,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=98610.66666666667, ans=0.125 +2024-07-28 02:41:56,623 INFO [train.py:1114] (2/4) Epoch 8, batch 2400, loss[loss=0.1825, simple_loss=0.2662, pruned_loss=0.04943, over 4631.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2961, pruned_loss=0.06457, over 941308.10 frames. ], batch size: 12, lr: 9.56e-03, grad_scale: 32.0 +2024-07-28 02:41:59,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=98610.66666666667, ans=0.0 +2024-07-28 02:42:00,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=98610.66666666667, ans=0.125 +2024-07-28 02:42:25,432 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.00 vs. limit=15.0 +2024-07-28 02:42:26,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=98650.66666666667, ans=0.0 +2024-07-28 02:42:37,991 INFO [train.py:1114] (2/4) Epoch 8, batch 2450, loss[loss=0.2175, simple_loss=0.3121, pruned_loss=0.06145, over 4701.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2974, pruned_loss=0.06485, over 936839.10 frames. ], batch size: 13, lr: 9.56e-03, grad_scale: 32.0 +2024-07-28 02:42:44,583 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:42:44,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=98677.33333333333, ans=0.125 +2024-07-28 02:42:44,955 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.63 vs. limit=6.0 +2024-07-28 02:42:45,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=98677.33333333333, ans=0.125 +2024-07-28 02:42:51,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=98690.66666666667, ans=0.125 +2024-07-28 02:43:03,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=98704.0, ans=0.0 +2024-07-28 02:43:05,557 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.77 vs. limit=22.5 +2024-07-28 02:43:06,302 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.54 vs. limit=6.0 +2024-07-28 02:43:21,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=98730.66666666667, ans=0.125 +2024-07-28 02:43:25,564 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.031e+01 6.058e+01 6.704e+01 7.921e+01 1.237e+02, threshold=1.341e+02, percent-clipped=0.0 +2024-07-28 02:43:28,909 INFO [train.py:1114] (2/4) Epoch 8, batch 2500, loss[loss=0.2409, simple_loss=0.3178, pruned_loss=0.082, over 4822.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.298, pruned_loss=0.06524, over 938796.25 frames. ], batch size: 14, lr: 9.55e-03, grad_scale: 32.0 +2024-07-28 02:43:30,760 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.21 vs. limit=8.0 +2024-07-28 02:43:35,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=98757.33333333333, ans=0.2 +2024-07-28 02:44:05,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=98770.66666666667, ans=0.1 +2024-07-28 02:44:17,636 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.92 vs. limit=15.0 +2024-07-28 02:44:21,339 INFO [train.py:1114] (2/4) Epoch 8, batch 2550, loss[loss=0.179, simple_loss=0.2595, pruned_loss=0.04931, over 4807.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2982, pruned_loss=0.06513, over 938468.39 frames. ], batch size: 11, lr: 9.55e-03, grad_scale: 32.0 +2024-07-28 02:44:21,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=98810.66666666667, ans=0.1 +2024-07-28 02:44:44,329 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.45 vs. limit=22.5 +2024-07-28 02:44:44,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=98837.33333333333, ans=0.0 +2024-07-28 02:44:58,728 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.823e+01 5.841e+01 6.423e+01 7.700e+01 1.142e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 02:45:02,081 INFO [train.py:1114] (2/4) Epoch 8, batch 2600, loss[loss=0.2084, simple_loss=0.2922, pruned_loss=0.06226, over 4894.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2985, pruned_loss=0.06514, over 937374.10 frames. ], batch size: 13, lr: 9.55e-03, grad_scale: 32.0 +2024-07-28 02:45:07,016 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.41 vs. limit=15.0 +2024-07-28 02:45:15,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=98904.0, ans=0.125 +2024-07-28 02:45:16,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=98904.0, ans=0.125 +2024-07-28 02:45:25,599 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.05 vs. limit=22.5 +2024-07-28 02:45:31,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98930.66666666667, ans=0.1 +2024-07-28 02:45:37,240 INFO [train.py:1114] (2/4) Epoch 8, batch 2650, loss[loss=0.19, simple_loss=0.2872, pruned_loss=0.04638, over 4658.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2989, pruned_loss=0.06521, over 939573.14 frames. ], batch size: 16, lr: 9.54e-03, grad_scale: 32.0 +2024-07-28 02:45:38,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=98944.0, ans=0.125 +2024-07-28 02:45:40,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=98944.0, ans=0.2 +2024-07-28 02:45:42,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=98944.0, ans=0.1 +2024-07-28 02:45:45,089 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.34 vs. limit=15.0 +2024-07-28 02:45:49,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=98970.66666666667, ans=0.125 +2024-07-28 02:46:03,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=98984.0, ans=10.0 +2024-07-28 02:46:08,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=98997.33333333333, ans=0.125 +2024-07-28 02:46:09,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=98997.33333333333, ans=0.0 +2024-07-28 02:46:09,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=98997.33333333333, ans=0.2 +2024-07-28 02:46:11,237 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.93 vs. limit=15.0 +2024-07-28 02:46:12,206 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.525e+01 5.757e+01 6.469e+01 7.162e+01 1.151e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 02:46:18,785 INFO [train.py:1114] (2/4) Epoch 8, batch 2700, loss[loss=0.2627, simple_loss=0.3472, pruned_loss=0.08912, over 4740.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2995, pruned_loss=0.06573, over 939267.06 frames. ], batch size: 14, lr: 9.54e-03, grad_scale: 32.0 +2024-07-28 02:46:25,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=99024.0, ans=0.125 +2024-07-28 02:46:28,800 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.83 vs. limit=15.0 +2024-07-28 02:46:37,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=99037.33333333333, ans=15.0 +2024-07-28 02:46:40,101 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.27 vs. limit=15.0 +2024-07-28 02:46:41,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=99050.66666666667, ans=0.2 +2024-07-28 02:46:45,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=99050.66666666667, ans=0.125 +2024-07-28 02:46:47,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=99064.0, ans=0.1 +2024-07-28 02:46:53,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99064.0, ans=0.1 +2024-07-28 02:46:54,447 INFO [train.py:1114] (2/4) Epoch 8, batch 2750, loss[loss=0.1988, simple_loss=0.2734, pruned_loss=0.06211, over 4706.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2988, pruned_loss=0.06564, over 939313.04 frames. ], batch size: 12, lr: 9.54e-03, grad_scale: 32.0 +2024-07-28 02:46:56,270 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.81 vs. limit=12.0 +2024-07-28 02:47:17,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=99104.0, ans=0.95 +2024-07-28 02:47:17,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=99104.0, ans=0.025 +2024-07-28 02:47:31,845 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.602e+01 6.000e+01 6.844e+01 8.152e+01 1.229e+02, threshold=1.369e+02, percent-clipped=0.0 +2024-07-28 02:47:40,080 INFO [train.py:1114] (2/4) Epoch 8, batch 2800, loss[loss=0.2863, simple_loss=0.3523, pruned_loss=0.1101, over 3262.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2982, pruned_loss=0.06561, over 937241.86 frames. ], batch size: 35, lr: 9.53e-03, grad_scale: 32.0 +2024-07-28 02:47:43,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=99144.0, ans=0.035 +2024-07-28 02:47:44,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=99144.0, ans=0.125 +2024-07-28 02:47:47,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99157.33333333333, ans=0.1 +2024-07-28 02:47:48,177 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:47:48,621 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.00 vs. limit=15.0 +2024-07-28 02:47:49,763 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.10 vs. limit=10.0 +2024-07-28 02:47:56,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=99170.66666666667, ans=0.0 +2024-07-28 02:47:57,240 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.98 vs. limit=12.0 +2024-07-28 02:48:07,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99184.0, ans=0.1 +2024-07-28 02:48:09,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=99184.0, ans=0.125 +2024-07-28 02:48:09,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=99197.33333333333, ans=0.09899494936611666 +2024-07-28 02:48:24,679 INFO [train.py:1114] (2/4) Epoch 8, batch 2850, loss[loss=0.1928, simple_loss=0.2788, pruned_loss=0.05346, over 4965.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2991, pruned_loss=0.06596, over 934992.44 frames. ], batch size: 13, lr: 9.53e-03, grad_scale: 32.0 +2024-07-28 02:48:30,266 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:48:40,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=99237.33333333333, ans=0.0 +2024-07-28 02:48:42,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=99237.33333333333, ans=0.0 +2024-07-28 02:48:47,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=99250.66666666667, ans=0.2 +2024-07-28 02:48:49,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=99250.66666666667, ans=0.04949747468305833 +2024-07-28 02:48:52,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99264.0, ans=0.1 +2024-07-28 02:48:54,159 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.514e+01 5.619e+01 6.304e+01 7.225e+01 1.077e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 02:48:54,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=99264.0, ans=0.125 +2024-07-28 02:48:57,361 INFO [train.py:1114] (2/4) Epoch 8, batch 2900, loss[loss=0.1985, simple_loss=0.2794, pruned_loss=0.05882, over 4824.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2998, pruned_loss=0.06613, over 938912.02 frames. ], batch size: 13, lr: 9.53e-03, grad_scale: 32.0 +2024-07-28 02:48:58,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=99277.33333333333, ans=0.125 +2024-07-28 02:49:06,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=99290.66666666667, ans=0.125 +2024-07-28 02:49:13,999 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.06 vs. limit=10.0 +2024-07-28 02:49:19,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=99317.33333333333, ans=0.015 +2024-07-28 02:49:26,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.45 vs. limit=15.0 +2024-07-28 02:49:33,154 INFO [train.py:1114] (2/4) Epoch 8, batch 2950, loss[loss=0.2326, simple_loss=0.3085, pruned_loss=0.07832, over 4715.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2986, pruned_loss=0.06576, over 938085.37 frames. ], batch size: 12, lr: 9.52e-03, grad_scale: 32.0 +2024-07-28 02:49:41,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=99357.33333333333, ans=0.05 +2024-07-28 02:49:55,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=99384.0, ans=0.1 +2024-07-28 02:50:01,128 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:50:04,307 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.366e+01 5.988e+01 6.681e+01 8.290e+01 1.259e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-28 02:50:07,637 INFO [train.py:1114] (2/4) Epoch 8, batch 3000, loss[loss=0.1915, simple_loss=0.2699, pruned_loss=0.05655, over 4755.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2981, pruned_loss=0.06523, over 938024.25 frames. ], batch size: 13, lr: 9.52e-03, grad_scale: 32.0 +2024-07-28 02:50:07,637 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 02:50:54,529 INFO [train.py:1146] (2/4) Epoch 8, validation: loss=0.1802, simple_loss=0.2848, pruned_loss=0.03781, over 944034.00 frames. +2024-07-28 02:50:54,530 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 02:50:56,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=99410.66666666667, ans=0.0 +2024-07-28 02:51:00,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=99410.66666666667, ans=0.125 +2024-07-28 02:51:00,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=99424.0, ans=0.125 +2024-07-28 02:51:03,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=99424.0, ans=0.125 +2024-07-28 02:51:04,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=99424.0, ans=0.0 +2024-07-28 02:51:11,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=99437.33333333333, ans=0.05 +2024-07-28 02:51:14,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff3.min_abs, batch_count=99450.66666666667, ans=0.2 +2024-07-28 02:51:14,823 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.10 vs. limit=15.0 +2024-07-28 02:51:29,237 INFO [train.py:1114] (2/4) Epoch 8, batch 3050, loss[loss=0.1792, simple_loss=0.2709, pruned_loss=0.04374, over 4631.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2991, pruned_loss=0.06544, over 936857.82 frames. ], batch size: 12, lr: 9.52e-03, grad_scale: 32.0 +2024-07-28 02:51:32,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=99477.33333333333, ans=0.0 +2024-07-28 02:51:35,776 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=6.12 vs. limit=12.0 +2024-07-28 02:52:14,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=99517.33333333333, ans=0.2 +2024-07-28 02:52:15,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=99517.33333333333, ans=0.125 +2024-07-28 02:52:16,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=99517.33333333333, ans=0.125 +2024-07-28 02:52:22,248 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.490e+01 5.732e+01 6.156e+01 7.183e+01 1.083e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 02:52:25,479 INFO [train.py:1114] (2/4) Epoch 8, batch 3100, loss[loss=0.2237, simple_loss=0.3186, pruned_loss=0.06441, over 4637.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2996, pruned_loss=0.06597, over 937324.62 frames. ], batch size: 16, lr: 9.51e-03, grad_scale: 32.0 +2024-07-28 02:52:33,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=99557.33333333333, ans=0.95 +2024-07-28 02:52:39,735 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.67 vs. limit=6.0 +2024-07-28 02:52:41,760 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.03 vs. limit=15.0 +2024-07-28 02:52:46,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=99584.0, ans=0.125 +2024-07-28 02:52:53,036 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.41 vs. limit=10.0 +2024-07-28 02:53:00,724 INFO [train.py:1114] (2/4) Epoch 8, batch 3150, loss[loss=0.2098, simple_loss=0.3068, pruned_loss=0.05634, over 4617.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.3001, pruned_loss=0.06645, over 936981.59 frames. ], batch size: 17, lr: 9.51e-03, grad_scale: 32.0 +2024-07-28 02:53:07,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-28 02:53:21,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=99637.33333333333, ans=0.2 +2024-07-28 02:53:22,548 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.17 vs. limit=15.0 +2024-07-28 02:53:33,336 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.886e+01 5.840e+01 6.506e+01 7.424e+01 1.196e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 02:53:36,732 INFO [train.py:1114] (2/4) Epoch 8, batch 3200, loss[loss=0.2151, simple_loss=0.3041, pruned_loss=0.063, over 4824.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.299, pruned_loss=0.06574, over 938990.87 frames. ], batch size: 13, lr: 9.51e-03, grad_scale: 32.0 +2024-07-28 02:53:41,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=99677.33333333333, ans=0.0 +2024-07-28 02:53:42,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=99690.66666666667, ans=0.05 +2024-07-28 02:53:46,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=99690.66666666667, ans=0.025 +2024-07-28 02:53:51,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=99704.0, ans=0.0 +2024-07-28 02:54:07,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=99730.66666666667, ans=0.0 +2024-07-28 02:54:11,573 INFO [train.py:1114] (2/4) Epoch 8, batch 3250, loss[loss=0.222, simple_loss=0.3029, pruned_loss=0.07049, over 4926.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2991, pruned_loss=0.0656, over 939980.83 frames. ], batch size: 14, lr: 9.50e-03, grad_scale: 32.0 +2024-07-28 02:54:22,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=99757.33333333333, ans=0.125 +2024-07-28 02:54:32,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=99784.0, ans=0.125 +2024-07-28 02:54:41,674 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 5.887e+01 6.598e+01 7.799e+01 2.167e+02, threshold=1.320e+02, percent-clipped=1.0 +2024-07-28 02:54:45,026 INFO [train.py:1114] (2/4) Epoch 8, batch 3300, loss[loss=0.2576, simple_loss=0.341, pruned_loss=0.08711, over 4682.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2979, pruned_loss=0.06555, over 940240.56 frames. ], batch size: 19, lr: 9.50e-03, grad_scale: 32.0 +2024-07-28 02:54:51,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=99824.0, ans=0.0 +2024-07-28 02:54:52,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=99824.0, ans=0.125 +2024-07-28 02:54:58,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=99837.33333333333, ans=0.125 +2024-07-28 02:55:00,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=99837.33333333333, ans=0.125 +2024-07-28 02:55:05,058 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.00 vs. limit=15.0 +2024-07-28 02:55:06,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=99850.66666666667, ans=0.125 +2024-07-28 02:55:15,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=99864.0, ans=0.0 +2024-07-28 02:55:18,715 INFO [train.py:1114] (2/4) Epoch 8, batch 3350, loss[loss=0.2142, simple_loss=0.2982, pruned_loss=0.06508, over 4642.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2987, pruned_loss=0.06609, over 938536.93 frames. ], batch size: 17, lr: 9.50e-03, grad_scale: 32.0 +2024-07-28 02:55:29,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=99890.66666666667, ans=0.125 +2024-07-28 02:55:31,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=99890.66666666667, ans=0.125 +2024-07-28 02:55:45,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=99917.33333333333, ans=0.0 +2024-07-28 02:55:50,391 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.676e+01 5.817e+01 6.427e+01 7.197e+01 1.127e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 02:55:53,713 INFO [train.py:1114] (2/4) Epoch 8, batch 3400, loss[loss=0.1622, simple_loss=0.2434, pruned_loss=0.04053, over 4822.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2977, pruned_loss=0.06614, over 937144.82 frames. ], batch size: 11, lr: 9.50e-03, grad_scale: 32.0 +2024-07-28 02:55:53,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=99944.0, ans=0.04949747468305833 +2024-07-28 02:55:54,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=99944.0, ans=0.125 +2024-07-28 02:55:56,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=99944.0, ans=0.125 +2024-07-28 02:55:57,612 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.58 vs. limit=15.0 +2024-07-28 02:56:12,101 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.45 vs. limit=15.0 +2024-07-28 02:56:12,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=99970.66666666667, ans=0.125 +2024-07-28 02:56:19,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=99984.0, ans=0.125 +2024-07-28 02:56:20,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=99997.33333333333, ans=0.5 +2024-07-28 02:56:21,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=99997.33333333333, ans=0.125 +2024-07-28 02:56:25,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=99997.33333333333, ans=0.0 +2024-07-28 02:56:28,006 INFO [train.py:1114] (2/4) Epoch 8, batch 3450, loss[loss=0.2455, simple_loss=0.3366, pruned_loss=0.07714, over 4691.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2986, pruned_loss=0.06626, over 937528.39 frames. ], batch size: 19, lr: 9.49e-03, grad_scale: 32.0 +2024-07-28 02:56:33,619 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:56:38,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=100024.0, ans=0.125 +2024-07-28 02:56:44,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=100037.33333333333, ans=0.0 +2024-07-28 02:56:50,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=100050.66666666667, ans=0.2 +2024-07-28 02:56:58,309 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 5.830e+01 6.643e+01 7.875e+01 1.454e+02, threshold=1.329e+02, percent-clipped=3.0 +2024-07-28 02:57:01,712 INFO [train.py:1114] (2/4) Epoch 8, batch 3500, loss[loss=0.1891, simple_loss=0.2727, pruned_loss=0.05278, over 4950.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2983, pruned_loss=0.06607, over 938477.68 frames. ], batch size: 12, lr: 9.49e-03, grad_scale: 32.0 +2024-07-28 02:57:05,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=100077.33333333333, ans=0.0 +2024-07-28 02:57:07,471 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.16 vs. limit=22.5 +2024-07-28 02:57:26,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=100117.33333333333, ans=0.07 +2024-07-28 02:57:37,853 INFO [train.py:1114] (2/4) Epoch 8, batch 3550, loss[loss=0.2619, simple_loss=0.3443, pruned_loss=0.08974, over 4678.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2974, pruned_loss=0.06552, over 939461.35 frames. ], batch size: 14, lr: 9.49e-03, grad_scale: 32.0 +2024-07-28 02:58:07,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=100197.33333333333, ans=0.125 +2024-07-28 02:58:07,550 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.623e+01 5.767e+01 6.398e+01 7.244e+01 1.008e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 02:58:24,531 INFO [train.py:1114] (2/4) Epoch 8, batch 3600, loss[loss=0.2185, simple_loss=0.3044, pruned_loss=0.06627, over 4955.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.297, pruned_loss=0.06542, over 941340.08 frames. ], batch size: 13, lr: 9.48e-03, grad_scale: 32.0 +2024-07-28 02:58:34,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100210.66666666667, ans=0.1 +2024-07-28 02:58:48,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=100210.66666666667, ans=0.0 +2024-07-28 02:58:54,589 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.85 vs. limit=6.0 +2024-07-28 02:59:18,756 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.04 vs. limit=22.5 +2024-07-28 02:59:25,350 INFO [train.py:1114] (2/4) Epoch 8, batch 3650, loss[loss=0.2362, simple_loss=0.3196, pruned_loss=0.07639, over 4892.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2967, pruned_loss=0.06524, over 941750.65 frames. ], batch size: 15, lr: 9.48e-03, grad_scale: 32.0 +2024-07-28 02:59:25,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100277.33333333333, ans=0.1 +2024-07-28 02:59:26,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=100277.33333333333, ans=0.125 +2024-07-28 02:59:27,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=100277.33333333333, ans=0.1 +2024-07-28 02:59:27,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=100277.33333333333, ans=0.2 +2024-07-28 02:59:32,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=100290.66666666667, ans=0.2 +2024-07-28 02:59:34,407 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:59:42,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=100304.0, ans=0.125 +2024-07-28 02:59:45,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=100317.33333333333, ans=0.0 +2024-07-28 02:59:48,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=100317.33333333333, ans=0.09899494936611666 +2024-07-28 02:59:51,690 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.23 vs. limit=22.5 +2024-07-28 02:59:55,980 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.900e+01 6.500e+01 7.963e+01 1.457e+02, threshold=1.300e+02, percent-clipped=1.0 +2024-07-28 02:59:59,598 INFO [train.py:1114] (2/4) Epoch 8, batch 3700, loss[loss=0.2272, simple_loss=0.3147, pruned_loss=0.06983, over 4940.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2954, pruned_loss=0.0639, over 942887.80 frames. ], batch size: 14, lr: 9.48e-03, grad_scale: 32.0 +2024-07-28 03:00:05,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=100344.0, ans=0.0 +2024-07-28 03:00:18,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=100370.66666666667, ans=0.0 +2024-07-28 03:00:19,261 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.45 vs. limit=22.5 +2024-07-28 03:00:26,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=100384.0, ans=0.125 +2024-07-28 03:00:26,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=100384.0, ans=0.2 +2024-07-28 03:00:34,746 INFO [train.py:1114] (2/4) Epoch 8, batch 3750, loss[loss=0.1939, simple_loss=0.2719, pruned_loss=0.05795, over 4793.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2952, pruned_loss=0.06374, over 944617.58 frames. ], batch size: 11, lr: 9.47e-03, grad_scale: 16.0 +2024-07-28 03:00:44,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100424.0, ans=0.1 +2024-07-28 03:00:47,731 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:00:50,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=100437.33333333333, ans=0.0 +2024-07-28 03:00:53,944 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.73 vs. limit=22.5 +2024-07-28 03:00:59,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=100450.66666666667, ans=0.125 +2024-07-28 03:01:01,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=100464.0, ans=0.125 +2024-07-28 03:01:04,128 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.12 vs. limit=15.0 +2024-07-28 03:01:05,686 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.700e+01 5.636e+01 6.396e+01 7.360e+01 1.035e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 03:01:06,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=100464.0, ans=0.125 +2024-07-28 03:01:08,363 INFO [train.py:1114] (2/4) Epoch 8, batch 3800, loss[loss=0.2187, simple_loss=0.3041, pruned_loss=0.0666, over 4807.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2955, pruned_loss=0.06362, over 942725.34 frames. ], batch size: 14, lr: 9.47e-03, grad_scale: 16.0 +2024-07-28 03:01:09,292 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.43 vs. limit=12.0 +2024-07-28 03:01:19,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=100490.66666666667, ans=22.5 +2024-07-28 03:01:33,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=100504.0, ans=0.04949747468305833 +2024-07-28 03:01:44,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=100517.33333333333, ans=0.2 +2024-07-28 03:01:44,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=100517.33333333333, ans=0.1 +2024-07-28 03:01:46,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=100517.33333333333, ans=0.0 +2024-07-28 03:01:47,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=100530.66666666667, ans=0.125 +2024-07-28 03:01:55,206 INFO [train.py:1114] (2/4) Epoch 8, batch 3850, loss[loss=0.2165, simple_loss=0.3005, pruned_loss=0.06629, over 4650.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2938, pruned_loss=0.06261, over 942975.24 frames. ], batch size: 16, lr: 9.47e-03, grad_scale: 16.0 +2024-07-28 03:02:04,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=100557.33333333333, ans=0.0 +2024-07-28 03:02:07,704 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.13 vs. limit=15.0 +2024-07-28 03:02:14,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100584.0, ans=0.125 +2024-07-28 03:02:21,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=100597.33333333333, ans=0.1 +2024-07-28 03:02:29,300 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.866e+01 5.806e+01 6.374e+01 7.296e+01 1.382e+02, threshold=1.275e+02, percent-clipped=1.0 +2024-07-28 03:02:36,098 INFO [train.py:1114] (2/4) Epoch 8, batch 3900, loss[loss=0.1989, simple_loss=0.2821, pruned_loss=0.05783, over 4806.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2944, pruned_loss=0.06289, over 943047.23 frames. ], batch size: 14, lr: 9.46e-03, grad_scale: 16.0 +2024-07-28 03:02:44,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=100624.0, ans=0.125 +2024-07-28 03:03:01,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=100650.66666666667, ans=0.125 +2024-07-28 03:03:06,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=100650.66666666667, ans=0.125 +2024-07-28 03:03:14,064 INFO [train.py:1114] (2/4) Epoch 8, batch 3950, loss[loss=0.2465, simple_loss=0.3302, pruned_loss=0.08138, over 4839.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2952, pruned_loss=0.06321, over 944833.63 frames. ], batch size: 16, lr: 9.46e-03, grad_scale: 16.0 +2024-07-28 03:03:18,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=100677.33333333333, ans=0.125 +2024-07-28 03:03:26,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=100690.66666666667, ans=0.025 +2024-07-28 03:03:27,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=100704.0, ans=0.125 +2024-07-28 03:03:44,899 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.455e+01 5.725e+01 6.427e+01 7.427e+01 2.052e+02, threshold=1.285e+02, percent-clipped=1.0 +2024-07-28 03:04:02,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=100744.0, ans=0.0 +2024-07-28 03:04:03,424 INFO [train.py:1114] (2/4) Epoch 8, batch 4000, loss[loss=0.1704, simple_loss=0.2493, pruned_loss=0.04577, over 4766.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2957, pruned_loss=0.06403, over 940317.89 frames. ], batch size: 12, lr: 9.46e-03, grad_scale: 32.0 +2024-07-28 03:04:27,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=100770.66666666667, ans=0.125 +2024-07-28 03:04:32,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=100784.0, ans=0.125 +2024-07-28 03:04:39,840 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.28 vs. limit=22.5 +2024-07-28 03:04:44,266 INFO [train.py:1114] (2/4) Epoch 8, batch 4050, loss[loss=0.2773, simple_loss=0.3477, pruned_loss=0.1035, over 3110.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2955, pruned_loss=0.06383, over 938786.45 frames. ], batch size: 35, lr: 9.45e-03, grad_scale: 32.0 +2024-07-28 03:04:51,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=100824.0, ans=0.125 +2024-07-28 03:04:55,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=100824.0, ans=0.0 +2024-07-28 03:05:09,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=100850.66666666667, ans=0.0 +2024-07-28 03:05:11,073 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.12 vs. limit=15.0 +2024-07-28 03:05:12,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=100850.66666666667, ans=0.07 +2024-07-28 03:05:17,170 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.596e+01 5.984e+01 6.561e+01 7.849e+01 1.305e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-28 03:05:18,326 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.12 vs. limit=10.0 +2024-07-28 03:05:19,919 INFO [train.py:1114] (2/4) Epoch 8, batch 4100, loss[loss=0.2112, simple_loss=0.3014, pruned_loss=0.06049, over 4911.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2961, pruned_loss=0.06448, over 937973.59 frames. ], batch size: 15, lr: 9.45e-03, grad_scale: 32.0 +2024-07-28 03:06:40,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=100930.66666666667, ans=0.125 +2024-07-28 03:06:43,186 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.58 vs. limit=15.0 +2024-07-28 03:06:43,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=100930.66666666667, ans=0.95 +2024-07-28 03:06:46,157 INFO [train.py:1114] (2/4) Epoch 8, batch 4150, loss[loss=0.171, simple_loss=0.2643, pruned_loss=0.0389, over 4826.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2962, pruned_loss=0.06458, over 937452.03 frames. ], batch size: 13, lr: 9.45e-03, grad_scale: 32.0 +2024-07-28 03:06:52,462 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.63 vs. limit=15.0 +2024-07-28 03:06:58,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=100970.66666666667, ans=0.025 +2024-07-28 03:07:26,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=100997.33333333333, ans=0.0 +2024-07-28 03:07:28,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=100997.33333333333, ans=0.125 +2024-07-28 03:07:29,975 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.669e+01 6.007e+01 6.703e+01 7.835e+01 1.474e+02, threshold=1.341e+02, percent-clipped=1.0 +2024-07-28 03:07:52,645 INFO [train.py:1114] (2/4) Epoch 8, batch 4200, loss[loss=0.2615, simple_loss=0.3461, pruned_loss=0.08847, over 4913.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2965, pruned_loss=0.06444, over 939439.06 frames. ], batch size: 15, lr: 9.45e-03, grad_scale: 32.0 +2024-07-28 03:07:52,950 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.94 vs. limit=22.5 +2024-07-28 03:07:58,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=101010.66666666667, ans=0.0 +2024-07-28 03:07:59,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=101010.66666666667, ans=0.2 +2024-07-28 03:08:44,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=101050.66666666667, ans=0.025 +2024-07-28 03:09:14,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=101064.0, ans=0.125 +2024-07-28 03:09:17,425 INFO [train.py:1114] (2/4) Epoch 8, batch 4250, loss[loss=0.1625, simple_loss=0.2513, pruned_loss=0.03685, over 4646.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.298, pruned_loss=0.0652, over 940627.59 frames. ], batch size: 12, lr: 9.44e-03, grad_scale: 32.0 +2024-07-28 03:09:20,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=101077.33333333333, ans=0.05 +2024-07-28 03:09:20,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=101077.33333333333, ans=0.0 +2024-07-28 03:09:31,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=101104.0, ans=0.0 +2024-07-28 03:09:41,107 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:09:43,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=101117.33333333333, ans=0.0 +2024-07-28 03:09:49,641 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 5.910e+01 6.569e+01 7.778e+01 1.465e+02, threshold=1.314e+02, percent-clipped=1.0 +2024-07-28 03:09:52,207 INFO [train.py:1114] (2/4) Epoch 8, batch 4300, loss[loss=0.2091, simple_loss=0.3064, pruned_loss=0.05594, over 4752.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2989, pruned_loss=0.06583, over 940358.45 frames. ], batch size: 13, lr: 9.44e-03, grad_scale: 32.0 +2024-07-28 03:10:07,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=101170.66666666667, ans=0.125 +2024-07-28 03:10:24,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=101197.33333333333, ans=0.0 +2024-07-28 03:10:24,596 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=12.0 +2024-07-28 03:10:27,459 INFO [train.py:1114] (2/4) Epoch 8, batch 4350, loss[loss=0.2059, simple_loss=0.2864, pruned_loss=0.06274, over 4767.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.298, pruned_loss=0.06495, over 941008.65 frames. ], batch size: 13, lr: 9.44e-03, grad_scale: 32.0 +2024-07-28 03:10:45,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=101237.33333333333, ans=0.125 +2024-07-28 03:10:49,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=101250.66666666667, ans=0.2 +2024-07-28 03:10:53,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=101250.66666666667, ans=0.125 +2024-07-28 03:10:58,164 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.299e+01 5.773e+01 6.336e+01 7.369e+01 1.096e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 03:11:01,106 INFO [train.py:1114] (2/4) Epoch 8, batch 4400, loss[loss=0.2053, simple_loss=0.31, pruned_loss=0.05032, over 4803.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2981, pruned_loss=0.06448, over 940656.45 frames. ], batch size: 14, lr: 9.43e-03, grad_scale: 32.0 +2024-07-28 03:11:05,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=101277.33333333333, ans=0.0 +2024-07-28 03:11:19,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.whiten.whitening_limit, batch_count=101304.0, ans=12.0 +2024-07-28 03:11:21,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=101317.33333333333, ans=0.09899494936611666 +2024-07-28 03:11:31,830 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.99 vs. limit=15.0 +2024-07-28 03:11:36,963 INFO [train.py:1114] (2/4) Epoch 8, batch 4450, loss[loss=0.1865, simple_loss=0.2678, pruned_loss=0.05262, over 4945.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.298, pruned_loss=0.0648, over 939202.45 frames. ], batch size: 12, lr: 9.43e-03, grad_scale: 32.0 +2024-07-28 03:11:41,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=101344.0, ans=0.1 +2024-07-28 03:11:48,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=101357.33333333333, ans=0.1 +2024-07-28 03:12:06,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=101397.33333333333, ans=0.125 +2024-07-28 03:12:06,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=101397.33333333333, ans=0.2 +2024-07-28 03:12:09,297 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.991e+01 6.173e+01 7.006e+01 8.907e+01 1.361e+02, threshold=1.401e+02, percent-clipped=3.0 +2024-07-28 03:12:12,346 INFO [train.py:1114] (2/4) Epoch 8, batch 4500, loss[loss=0.215, simple_loss=0.306, pruned_loss=0.06203, over 4737.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2984, pruned_loss=0.06496, over 938313.03 frames. ], batch size: 14, lr: 9.43e-03, grad_scale: 32.0 +2024-07-28 03:12:21,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=101424.0, ans=0.2 +2024-07-28 03:12:22,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101424.0, ans=0.1 +2024-07-28 03:12:24,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=101424.0, ans=0.125 +2024-07-28 03:12:26,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=101437.33333333333, ans=0.125 +2024-07-28 03:12:46,593 INFO [train.py:1114] (2/4) Epoch 8, batch 4550, loss[loss=0.1721, simple_loss=0.2593, pruned_loss=0.04242, over 4904.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2977, pruned_loss=0.06492, over 940047.58 frames. ], batch size: 13, lr: 9.42e-03, grad_scale: 32.0 +2024-07-28 03:12:53,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=101477.33333333333, ans=0.125 +2024-07-28 03:12:55,389 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:12:58,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=101490.66666666667, ans=0.125 +2024-07-28 03:13:03,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=101504.0, ans=0.125 +2024-07-28 03:13:04,320 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.66 vs. limit=15.0 +2024-07-28 03:13:11,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=101517.33333333333, ans=0.0 +2024-07-28 03:13:19,424 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.586e+01 5.796e+01 6.389e+01 7.358e+01 1.083e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-28 03:13:22,135 INFO [train.py:1114] (2/4) Epoch 8, batch 4600, loss[loss=0.2396, simple_loss=0.3166, pruned_loss=0.08129, over 4640.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2981, pruned_loss=0.0652, over 938485.23 frames. ], batch size: 22, lr: 9.42e-03, grad_scale: 32.0 +2024-07-28 03:13:22,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=101544.0, ans=0.125 +2024-07-28 03:13:23,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101544.0, ans=0.1 +2024-07-28 03:13:27,761 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:13:36,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101570.66666666667, ans=0.1 +2024-07-28 03:13:42,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=101584.0, ans=0.125 +2024-07-28 03:13:46,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=101584.0, ans=0.0 +2024-07-28 03:13:52,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=101597.33333333333, ans=0.025 +2024-07-28 03:13:55,114 INFO [train.py:1114] (2/4) Epoch 8, batch 4650, loss[loss=0.2252, simple_loss=0.3139, pruned_loss=0.06821, over 4838.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2983, pruned_loss=0.06496, over 940393.52 frames. ], batch size: 16, lr: 9.42e-03, grad_scale: 32.0 +2024-07-28 03:13:56,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=101610.66666666667, ans=0.125 +2024-07-28 03:13:58,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=101610.66666666667, ans=0.0 +2024-07-28 03:14:04,270 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.63 vs. limit=15.0 +2024-07-28 03:14:05,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=101624.0, ans=0.125 +2024-07-28 03:14:19,973 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.65 vs. limit=22.5 +2024-07-28 03:14:27,597 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.801e+01 5.774e+01 6.444e+01 7.624e+01 1.056e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 03:14:30,290 INFO [train.py:1114] (2/4) Epoch 8, batch 4700, loss[loss=0.1985, simple_loss=0.2816, pruned_loss=0.05768, over 4719.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2975, pruned_loss=0.0648, over 938172.58 frames. ], batch size: 11, lr: 9.41e-03, grad_scale: 32.0 +2024-07-28 03:14:33,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=101677.33333333333, ans=0.125 +2024-07-28 03:14:39,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=101690.66666666667, ans=0.2 +2024-07-28 03:14:52,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=101717.33333333333, ans=0.125 +2024-07-28 03:15:04,168 INFO [train.py:1114] (2/4) Epoch 8, batch 4750, loss[loss=0.2503, simple_loss=0.3243, pruned_loss=0.08812, over 4531.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2978, pruned_loss=0.06516, over 935938.06 frames. ], batch size: 21, lr: 9.41e-03, grad_scale: 32.0 +2024-07-28 03:15:13,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101757.33333333333, ans=0.1 +2024-07-28 03:15:24,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=101770.66666666667, ans=0.125 +2024-07-28 03:15:32,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=101797.33333333333, ans=0.2 +2024-07-28 03:15:34,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=101797.33333333333, ans=0.2 +2024-07-28 03:15:37,287 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.494e+01 5.718e+01 6.515e+01 7.341e+01 9.928e+01, threshold=1.303e+02, percent-clipped=0.0 +2024-07-28 03:15:40,386 INFO [train.py:1114] (2/4) Epoch 8, batch 4800, loss[loss=0.235, simple_loss=0.3105, pruned_loss=0.07976, over 4689.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2974, pruned_loss=0.06563, over 933027.09 frames. ], batch size: 13, lr: 9.41e-03, grad_scale: 32.0 +2024-07-28 03:15:44,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=101810.66666666667, ans=0.0 +2024-07-28 03:16:10,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=101864.0, ans=0.125 +2024-07-28 03:16:14,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=101864.0, ans=0.0 +2024-07-28 03:16:17,665 INFO [train.py:1114] (2/4) Epoch 8, batch 4850, loss[loss=0.2493, simple_loss=0.3373, pruned_loss=0.08065, over 4747.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2978, pruned_loss=0.06577, over 933314.23 frames. ], batch size: 14, lr: 9.41e-03, grad_scale: 32.0 +2024-07-28 03:16:29,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=101890.66666666667, ans=0.125 +2024-07-28 03:16:31,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=101890.66666666667, ans=0.1 +2024-07-28 03:16:31,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=101890.66666666667, ans=0.125 +2024-07-28 03:16:39,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=101904.0, ans=0.1 +2024-07-28 03:16:41,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=101917.33333333333, ans=0.125 +2024-07-28 03:16:42,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=101917.33333333333, ans=0.04949747468305833 +2024-07-28 03:16:49,723 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.60 vs. limit=15.0 +2024-07-28 03:16:51,217 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.836e+01 5.718e+01 6.267e+01 6.950e+01 1.595e+02, threshold=1.253e+02, percent-clipped=1.0 +2024-07-28 03:17:01,442 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.76 vs. limit=22.5 +2024-07-28 03:17:01,612 INFO [train.py:1114] (2/4) Epoch 8, batch 4900, loss[loss=0.2248, simple_loss=0.3063, pruned_loss=0.07159, over 4760.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2971, pruned_loss=0.06544, over 934647.62 frames. ], batch size: 13, lr: 9.40e-03, grad_scale: 32.0 +2024-07-28 03:17:04,105 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.32 vs. limit=10.0 +2024-07-28 03:17:09,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=101957.33333333333, ans=0.0 +2024-07-28 03:17:12,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=101957.33333333333, ans=0.125 +2024-07-28 03:17:23,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=101984.0, ans=0.2 +2024-07-28 03:17:25,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=101984.0, ans=0.125 +2024-07-28 03:17:26,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=101984.0, ans=0.125 +2024-07-28 03:17:33,542 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.42 vs. limit=10.0 +2024-07-28 03:17:35,667 INFO [train.py:1114] (2/4) Epoch 8, batch 4950, loss[loss=0.2586, simple_loss=0.3216, pruned_loss=0.09784, over 3514.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2983, pruned_loss=0.0661, over 932060.11 frames. ], batch size: 35, lr: 9.40e-03, grad_scale: 32.0 +2024-07-28 03:17:39,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=102010.66666666667, ans=0.04949747468305833 +2024-07-28 03:17:50,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=102037.33333333333, ans=0.125 +2024-07-28 03:18:02,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=102050.66666666667, ans=0.125 +2024-07-28 03:18:08,030 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.891e+01 5.855e+01 6.357e+01 7.218e+01 9.647e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 03:18:10,643 INFO [train.py:1114] (2/4) Epoch 8, batch 5000, loss[loss=0.2084, simple_loss=0.294, pruned_loss=0.06142, over 4666.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2975, pruned_loss=0.06544, over 935774.21 frames. ], batch size: 14, lr: 9.40e-03, grad_scale: 32.0 +2024-07-28 03:18:35,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=102117.33333333333, ans=0.0 +2024-07-28 03:18:37,889 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.95 vs. limit=15.0 +2024-07-28 03:18:45,844 INFO [train.py:1114] (2/4) Epoch 8, batch 5050, loss[loss=0.1556, simple_loss=0.2358, pruned_loss=0.03776, over 4852.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2973, pruned_loss=0.06493, over 938361.91 frames. ], batch size: 12, lr: 9.39e-03, grad_scale: 16.0 +2024-07-28 03:18:46,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=102144.0, ans=0.07 +2024-07-28 03:18:57,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=102157.33333333333, ans=0.125 +2024-07-28 03:19:03,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=102170.66666666667, ans=0.125 +2024-07-28 03:19:05,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=102170.66666666667, ans=0.95 +2024-07-28 03:19:07,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=102184.0, ans=0.0 +2024-07-28 03:19:10,298 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.85 vs. limit=10.0 +2024-07-28 03:19:11,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102184.0, ans=0.1 +2024-07-28 03:19:14,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=102197.33333333333, ans=0.125 +2024-07-28 03:19:15,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=102197.33333333333, ans=0.05 +2024-07-28 03:19:17,811 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.666e+01 5.803e+01 6.269e+01 7.279e+01 1.149e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 03:19:19,905 INFO [train.py:1114] (2/4) Epoch 8, batch 5100, loss[loss=0.1743, simple_loss=0.2579, pruned_loss=0.04535, over 4763.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2981, pruned_loss=0.06579, over 935793.62 frames. ], batch size: 12, lr: 9.39e-03, grad_scale: 16.0 +2024-07-28 03:19:33,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=102237.33333333333, ans=0.125 +2024-07-28 03:19:49,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=102264.0, ans=0.07 +2024-07-28 03:19:54,792 INFO [train.py:1114] (2/4) Epoch 8, batch 5150, loss[loss=0.2159, simple_loss=0.3031, pruned_loss=0.06441, over 4845.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2992, pruned_loss=0.06615, over 936446.79 frames. ], batch size: 16, lr: 9.39e-03, grad_scale: 16.0 +2024-07-28 03:20:08,900 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=3.662e-02 +2024-07-28 03:20:10,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=102304.0, ans=0.0 +2024-07-28 03:20:21,158 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.29 vs. limit=15.0 +2024-07-28 03:20:23,247 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.46 vs. limit=22.5 +2024-07-28 03:20:26,078 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.475e+01 5.815e+01 6.319e+01 7.025e+01 9.950e+01, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 03:20:30,188 INFO [train.py:1114] (2/4) Epoch 8, batch 5200, loss[loss=0.2161, simple_loss=0.3186, pruned_loss=0.05676, over 4673.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2977, pruned_loss=0.06506, over 936664.74 frames. ], batch size: 14, lr: 9.38e-03, grad_scale: 32.0 +2024-07-28 03:20:32,643 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.92 vs. limit=22.5 +2024-07-28 03:20:42,494 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.02 vs. limit=12.0 +2024-07-28 03:20:43,915 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.59 vs. limit=12.0 +2024-07-28 03:20:44,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff2.min_abs, batch_count=102370.66666666667, ans=0.1 +2024-07-28 03:20:49,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=102370.66666666667, ans=0.125 +2024-07-28 03:21:05,423 INFO [train.py:1114] (2/4) Epoch 8, batch 5250, loss[loss=0.1999, simple_loss=0.2916, pruned_loss=0.05412, over 4903.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2963, pruned_loss=0.06463, over 936165.29 frames. ], batch size: 13, lr: 9.38e-03, grad_scale: 32.0 +2024-07-28 03:21:10,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=102410.66666666667, ans=0.0 +2024-07-28 03:21:10,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=102410.66666666667, ans=0.025 +2024-07-28 03:21:11,409 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:21:12,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=102424.0, ans=0.125 +2024-07-28 03:21:16,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102424.0, ans=0.1 +2024-07-28 03:21:20,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=102437.33333333333, ans=0.025 +2024-07-28 03:21:21,179 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=11.15 vs. limit=10.0 +2024-07-28 03:21:22,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=102437.33333333333, ans=0.125 +2024-07-28 03:21:32,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=102464.0, ans=0.125 +2024-07-28 03:21:36,698 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.677e+01 5.809e+01 6.446e+01 7.224e+01 1.154e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 03:21:38,712 INFO [train.py:1114] (2/4) Epoch 8, batch 5300, loss[loss=0.1945, simple_loss=0.2875, pruned_loss=0.0508, over 4649.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2962, pruned_loss=0.06515, over 934137.42 frames. ], batch size: 16, lr: 9.38e-03, grad_scale: 32.0 +2024-07-28 03:21:40,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=102477.33333333333, ans=0.0 +2024-07-28 03:21:41,495 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:21:54,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=102504.0, ans=0.0 +2024-07-28 03:21:59,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=102517.33333333333, ans=0.0 +2024-07-28 03:22:00,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102517.33333333333, ans=0.1 +2024-07-28 03:22:01,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102517.33333333333, ans=0.1 +2024-07-28 03:22:04,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=102530.66666666667, ans=0.2 +2024-07-28 03:22:11,878 INFO [train.py:1114] (2/4) Epoch 8, batch 5350, loss[loss=0.2003, simple_loss=0.2746, pruned_loss=0.06301, over 4482.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2973, pruned_loss=0.06543, over 936192.44 frames. ], batch size: 10, lr: 9.38e-03, grad_scale: 32.0 +2024-07-28 03:22:23,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=102557.33333333333, ans=0.5 +2024-07-28 03:22:25,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=102570.66666666667, ans=0.09899494936611666 +2024-07-28 03:22:36,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=102584.0, ans=0.0 +2024-07-28 03:22:41,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=102597.33333333333, ans=0.0 +2024-07-28 03:22:43,567 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 6.073e+01 6.739e+01 7.548e+01 1.442e+02, threshold=1.348e+02, percent-clipped=1.0 +2024-07-28 03:22:45,669 INFO [train.py:1114] (2/4) Epoch 8, batch 5400, loss[loss=0.2425, simple_loss=0.338, pruned_loss=0.0735, over 4374.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2988, pruned_loss=0.06631, over 930162.94 frames. ], batch size: 26, lr: 9.37e-03, grad_scale: 32.0 +2024-07-28 03:22:48,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=102610.66666666667, ans=0.125 +2024-07-28 03:22:52,833 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.53 vs. limit=12.0 +2024-07-28 03:22:54,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=102624.0, ans=0.0 +2024-07-28 03:23:05,420 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:23:07,199 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.01 vs. limit=15.0 +2024-07-28 03:23:20,683 INFO [train.py:1114] (2/4) Epoch 8, batch 5450, loss[loss=0.1785, simple_loss=0.2533, pruned_loss=0.05182, over 4709.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2965, pruned_loss=0.06511, over 933017.32 frames. ], batch size: 11, lr: 9.37e-03, grad_scale: 32.0 +2024-07-28 03:23:24,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=102677.33333333333, ans=0.0 +2024-07-28 03:23:26,568 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.80 vs. limit=6.0 +2024-07-28 03:23:30,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=102690.66666666667, ans=0.0 +2024-07-28 03:23:33,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=102704.0, ans=0.125 +2024-07-28 03:23:45,760 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.99 vs. limit=15.0 +2024-07-28 03:23:48,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=102717.33333333333, ans=0.125 +2024-07-28 03:23:48,896 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.49 vs. limit=10.0 +2024-07-28 03:23:54,556 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.766e+01 6.203e+01 6.756e+01 7.672e+01 1.108e+02, threshold=1.351e+02, percent-clipped=0.0 +2024-07-28 03:23:55,037 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.12 vs. limit=12.0 +2024-07-28 03:23:56,683 INFO [train.py:1114] (2/4) Epoch 8, batch 5500, loss[loss=0.2397, simple_loss=0.3213, pruned_loss=0.07909, over 4184.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.296, pruned_loss=0.0649, over 930362.05 frames. ], batch size: 25, lr: 9.37e-03, grad_scale: 32.0 +2024-07-28 03:24:12,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=102770.66666666667, ans=0.2 +2024-07-28 03:24:16,112 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.48 vs. limit=15.0 +2024-07-28 03:24:29,631 INFO [train.py:1114] (2/4) Epoch 8, batch 5550, loss[loss=0.175, simple_loss=0.2622, pruned_loss=0.04393, over 4712.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.297, pruned_loss=0.06511, over 932673.73 frames. ], batch size: 12, lr: 9.36e-03, grad_scale: 32.0 +2024-07-28 03:24:36,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=102824.0, ans=0.125 +2024-07-28 03:24:43,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=102837.33333333333, ans=0.125 +2024-07-28 03:24:45,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=102837.33333333333, ans=0.2 +2024-07-28 03:24:45,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102837.33333333333, ans=0.1 +2024-07-28 03:24:51,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102850.66666666667, ans=0.1 +2024-07-28 03:24:51,674 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=14.87 vs. limit=15.0 +2024-07-28 03:25:01,401 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.969e+01 5.947e+01 6.604e+01 7.771e+01 1.160e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-28 03:25:03,448 INFO [train.py:1114] (2/4) Epoch 8, batch 5600, loss[loss=0.2216, simple_loss=0.3121, pruned_loss=0.06555, over 4733.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2969, pruned_loss=0.0652, over 934171.58 frames. ], batch size: 14, lr: 9.36e-03, grad_scale: 32.0 +2024-07-28 03:25:03,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=102877.33333333333, ans=0.025 +2024-07-28 03:25:06,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=102877.33333333333, ans=0.05 +2024-07-28 03:25:11,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=102890.66666666667, ans=0.0 +2024-07-28 03:25:25,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=102917.33333333333, ans=0.125 +2024-07-28 03:25:38,401 INFO [train.py:1114] (2/4) Epoch 8, batch 5650, loss[loss=0.2369, simple_loss=0.3241, pruned_loss=0.07491, over 4562.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2961, pruned_loss=0.06428, over 936557.73 frames. ], batch size: 21, lr: 9.36e-03, grad_scale: 32.0 +2024-07-28 03:25:38,877 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.14 vs. limit=10.0 +2024-07-28 03:25:44,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=102957.33333333333, ans=0.125 +2024-07-28 03:25:45,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=102957.33333333333, ans=0.2 +2024-07-28 03:25:45,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102957.33333333333, ans=0.1 +2024-07-28 03:25:51,269 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.06 vs. limit=6.0 +2024-07-28 03:25:51,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=102970.66666666667, ans=10.0 +2024-07-28 03:25:59,308 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.85 vs. limit=10.0 +2024-07-28 03:26:09,441 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.666e+01 6.096e+01 6.693e+01 9.432e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 03:26:11,508 INFO [train.py:1114] (2/4) Epoch 8, batch 5700, loss[loss=0.2329, simple_loss=0.3072, pruned_loss=0.07925, over 4687.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2971, pruned_loss=0.06487, over 937633.93 frames. ], batch size: 13, lr: 9.35e-03, grad_scale: 32.0 +2024-07-28 03:26:11,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=103010.66666666667, ans=0.0 +2024-07-28 03:26:15,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=103010.66666666667, ans=0.125 +2024-07-28 03:26:15,355 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.24 vs. limit=12.0 +2024-07-28 03:26:22,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=103024.0, ans=0.125 +2024-07-28 03:26:40,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=103064.0, ans=0.125 +2024-07-28 03:26:42,151 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.16 vs. limit=6.0 +2024-07-28 03:26:46,909 INFO [train.py:1114] (2/4) Epoch 8, batch 5750, loss[loss=0.2324, simple_loss=0.32, pruned_loss=0.07238, over 4754.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2978, pruned_loss=0.06537, over 937628.52 frames. ], batch size: 19, lr: 9.35e-03, grad_scale: 32.0 +2024-07-28 03:27:00,705 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.86 vs. limit=12.0 +2024-07-28 03:27:03,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=103104.0, ans=0.125 +2024-07-28 03:27:05,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=103104.0, ans=0.2 +2024-07-28 03:27:14,817 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.65 vs. limit=15.0 +2024-07-28 03:27:18,334 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.76 vs. limit=10.0 +2024-07-28 03:27:18,489 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.742e+01 5.884e+01 6.600e+01 7.288e+01 1.127e+02, threshold=1.320e+02, percent-clipped=0.0 +2024-07-28 03:27:20,735 INFO [train.py:1114] (2/4) Epoch 8, batch 5800, loss[loss=0.2316, simple_loss=0.3064, pruned_loss=0.07834, over 4686.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2993, pruned_loss=0.06671, over 936719.15 frames. ], batch size: 19, lr: 9.35e-03, grad_scale: 32.0 +2024-07-28 03:27:29,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=103144.0, ans=0.125 +2024-07-28 03:27:35,045 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.53 vs. limit=6.0 +2024-07-28 03:27:40,449 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.79 vs. limit=6.0 +2024-07-28 03:27:46,440 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.91 vs. limit=22.5 +2024-07-28 03:27:59,746 INFO [train.py:1114] (2/4) Epoch 8, batch 5850, loss[loss=0.2265, simple_loss=0.3121, pruned_loss=0.0704, over 4511.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2983, pruned_loss=0.06571, over 937792.92 frames. ], batch size: 21, lr: 9.35e-03, grad_scale: 32.0 +2024-07-28 03:27:59,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=103210.66666666667, ans=0.125 +2024-07-28 03:28:04,707 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.06 vs. limit=15.0 +2024-07-28 03:28:13,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=103237.33333333333, ans=0.125 +2024-07-28 03:28:20,651 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.71 vs. limit=22.5 +2024-07-28 03:28:30,668 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 5.776e+01 6.352e+01 7.126e+01 1.312e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 03:28:38,754 INFO [train.py:1114] (2/4) Epoch 8, batch 5900, loss[loss=0.267, simple_loss=0.3471, pruned_loss=0.0935, over 4690.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2975, pruned_loss=0.06555, over 938079.77 frames. ], batch size: 15, lr: 9.34e-03, grad_scale: 32.0 +2024-07-28 03:28:47,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=103290.66666666667, ans=0.0 +2024-07-28 03:28:50,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=103290.66666666667, ans=0.125 +2024-07-28 03:28:57,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=103304.0, ans=0.0 +2024-07-28 03:29:14,044 INFO [train.py:1114] (2/4) Epoch 8, batch 5950, loss[loss=0.2423, simple_loss=0.3202, pruned_loss=0.08223, over 4674.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2959, pruned_loss=0.06485, over 939986.05 frames. ], batch size: 15, lr: 9.34e-03, grad_scale: 32.0 +2024-07-28 03:29:24,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=103357.33333333333, ans=0.125 +2024-07-28 03:29:28,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=103370.66666666667, ans=0.0 +2024-07-28 03:29:30,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=103370.66666666667, ans=0.125 +2024-07-28 03:29:37,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=103384.0, ans=0.2 +2024-07-28 03:29:39,613 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=6.59 vs. limit=12.0 +2024-07-28 03:29:43,125 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.32 vs. limit=15.0 +2024-07-28 03:29:45,391 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.782e+01 5.778e+01 6.625e+01 7.689e+01 1.053e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-28 03:29:47,504 INFO [train.py:1114] (2/4) Epoch 8, batch 6000, loss[loss=0.2332, simple_loss=0.3123, pruned_loss=0.0771, over 4212.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2963, pruned_loss=0.06526, over 937201.66 frames. ], batch size: 25, lr: 9.34e-03, grad_scale: 32.0 +2024-07-28 03:29:47,505 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 03:30:06,860 INFO [train.py:1146] (2/4) Epoch 8, validation: loss=0.1796, simple_loss=0.2837, pruned_loss=0.03775, over 944034.00 frames. +2024-07-28 03:30:06,861 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 03:30:24,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=103437.33333333333, ans=0.125 +2024-07-28 03:30:25,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.92 vs. limit=22.5 +2024-07-28 03:30:26,141 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.90 vs. limit=5.0 +2024-07-28 03:30:26,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=103450.66666666667, ans=0.0 +2024-07-28 03:30:30,655 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.57 vs. limit=12.0 +2024-07-28 03:30:35,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=103464.0, ans=0.5 +2024-07-28 03:30:42,223 INFO [train.py:1114] (2/4) Epoch 8, batch 6050, loss[loss=0.2073, simple_loss=0.2899, pruned_loss=0.0623, over 4775.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2959, pruned_loss=0.06458, over 938996.56 frames. ], batch size: 12, lr: 9.33e-03, grad_scale: 32.0 +2024-07-28 03:31:09,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=103530.66666666667, ans=0.025 +2024-07-28 03:31:14,078 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.635e+01 6.111e+01 6.957e+01 1.112e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 03:31:16,090 INFO [train.py:1114] (2/4) Epoch 8, batch 6100, loss[loss=0.2263, simple_loss=0.3195, pruned_loss=0.06656, over 4670.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2945, pruned_loss=0.06375, over 938245.21 frames. ], batch size: 15, lr: 9.33e-03, grad_scale: 32.0 +2024-07-28 03:31:20,514 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.68 vs. limit=15.0 +2024-07-28 03:31:20,999 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.59 vs. limit=22.5 +2024-07-28 03:31:22,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=103557.33333333333, ans=0.0 +2024-07-28 03:31:27,615 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.69 vs. limit=15.0 +2024-07-28 03:31:39,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.63 vs. limit=22.5 +2024-07-28 03:31:46,305 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.17 vs. limit=12.0 +2024-07-28 03:31:53,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=103597.33333333333, ans=0.125 +2024-07-28 03:31:54,403 INFO [train.py:1114] (2/4) Epoch 8, batch 6150, loss[loss=0.2618, simple_loss=0.3367, pruned_loss=0.09342, over 3683.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.295, pruned_loss=0.06389, over 937277.53 frames. ], batch size: 35, lr: 9.33e-03, grad_scale: 32.0 +2024-07-28 03:31:56,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=103610.66666666667, ans=0.125 +2024-07-28 03:32:02,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=103624.0, ans=0.0 +2024-07-28 03:32:10,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=103624.0, ans=0.125 +2024-07-28 03:32:11,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=103637.33333333333, ans=0.125 +2024-07-28 03:32:12,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=103637.33333333333, ans=0.0 +2024-07-28 03:32:19,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=103650.66666666667, ans=0.2 +2024-07-28 03:32:30,320 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.158e+01 5.994e+01 6.634e+01 7.988e+01 1.219e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-28 03:32:32,390 INFO [train.py:1114] (2/4) Epoch 8, batch 6200, loss[loss=0.2074, simple_loss=0.301, pruned_loss=0.05685, over 4748.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2944, pruned_loss=0.06381, over 936894.42 frames. ], batch size: 14, lr: 9.32e-03, grad_scale: 32.0 +2024-07-28 03:32:34,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103677.33333333333, ans=0.1 +2024-07-28 03:32:36,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=103677.33333333333, ans=0.2 +2024-07-28 03:32:55,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=103717.33333333333, ans=0.025 +2024-07-28 03:33:03,229 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.67 vs. limit=15.0 +2024-07-28 03:33:06,939 INFO [train.py:1114] (2/4) Epoch 8, batch 6250, loss[loss=0.2486, simple_loss=0.3289, pruned_loss=0.08411, over 4818.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2936, pruned_loss=0.06392, over 933489.89 frames. ], batch size: 14, lr: 9.32e-03, grad_scale: 32.0 +2024-07-28 03:33:14,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=103744.0, ans=0.0 +2024-07-28 03:33:25,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=103770.66666666667, ans=0.025 +2024-07-28 03:33:31,551 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.06 vs. limit=15.0 +2024-07-28 03:33:37,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=103784.0, ans=0.025 +2024-07-28 03:33:56,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=103784.0, ans=0.0 +2024-07-28 03:34:05,279 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.565e+01 5.622e+01 6.181e+01 7.164e+01 1.267e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 03:34:06,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=103810.66666666667, ans=0.2 +2024-07-28 03:34:07,329 INFO [train.py:1114] (2/4) Epoch 8, batch 6300, loss[loss=0.2007, simple_loss=0.2863, pruned_loss=0.05756, over 4517.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2942, pruned_loss=0.06411, over 929612.38 frames. ], batch size: 10, lr: 9.32e-03, grad_scale: 32.0 +2024-07-28 03:34:10,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=103810.66666666667, ans=0.95 +2024-07-28 03:34:13,699 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.41 vs. limit=10.0 +2024-07-28 03:34:16,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=103824.0, ans=0.125 +2024-07-28 03:34:21,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103837.33333333333, ans=0.1 +2024-07-28 03:34:31,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=103850.66666666667, ans=0.125 +2024-07-28 03:34:31,334 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.08 vs. limit=15.0 +2024-07-28 03:34:47,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103864.0, ans=0.1 +2024-07-28 03:34:47,993 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.45 vs. limit=10.0 +2024-07-28 03:34:48,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=103864.0, ans=0.2 +2024-07-28 03:34:53,144 INFO [train.py:1114] (2/4) Epoch 8, batch 6350, loss[loss=0.2315, simple_loss=0.3266, pruned_loss=0.0682, over 4340.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2933, pruned_loss=0.06287, over 933513.60 frames. ], batch size: 21, lr: 9.32e-03, grad_scale: 32.0 +2024-07-28 03:34:56,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=103877.33333333333, ans=0.0 +2024-07-28 03:35:05,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=103890.66666666667, ans=0.0 +2024-07-28 03:35:06,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=103890.66666666667, ans=0.05 +2024-07-28 03:35:12,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=103904.0, ans=0.2 +2024-07-28 03:35:23,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=103930.66666666667, ans=0.2 +2024-07-28 03:35:23,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=103930.66666666667, ans=0.1 +2024-07-28 03:35:24,660 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.17 vs. limit=15.0 +2024-07-28 03:35:37,028 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 5.697e+01 6.431e+01 7.734e+01 1.122e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 03:35:38,763 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.14 vs. limit=22.5 +2024-07-28 03:35:39,024 INFO [train.py:1114] (2/4) Epoch 8, batch 6400, loss[loss=0.1824, simple_loss=0.2692, pruned_loss=0.04778, over 4643.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2943, pruned_loss=0.06336, over 935452.08 frames. ], batch size: 13, lr: 9.31e-03, grad_scale: 32.0 +2024-07-28 03:35:43,331 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.07 vs. limit=15.0 +2024-07-28 03:35:44,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103957.33333333333, ans=0.1 +2024-07-28 03:35:46,445 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.49 vs. limit=6.0 +2024-07-28 03:35:50,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=103957.33333333333, ans=0.125 +2024-07-28 03:35:55,918 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.35 vs. limit=15.0 +2024-07-28 03:36:08,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=103984.0, ans=0.2 +2024-07-28 03:36:16,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=103997.33333333333, ans=0.025 +2024-07-28 03:36:18,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=103997.33333333333, ans=10.0 +2024-07-28 03:36:20,336 INFO [train.py:1114] (2/4) Epoch 8, batch 6450, loss[loss=0.229, simple_loss=0.3152, pruned_loss=0.07144, over 4471.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2949, pruned_loss=0.06357, over 938801.26 frames. ], batch size: 21, lr: 9.31e-03, grad_scale: 32.0 +2024-07-28 03:36:23,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=104010.66666666667, ans=0.0 +2024-07-28 03:36:24,030 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.02 vs. limit=15.0 +2024-07-28 03:36:27,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=104024.0, ans=0.05 +2024-07-28 03:36:27,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=104024.0, ans=0.125 +2024-07-28 03:36:33,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=104037.33333333333, ans=0.2 +2024-07-28 03:36:38,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=104037.33333333333, ans=0.125 +2024-07-28 03:36:52,547 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.901e+01 6.090e+01 6.968e+01 8.127e+01 1.259e+02, threshold=1.394e+02, percent-clipped=0.0 +2024-07-28 03:36:54,647 INFO [train.py:1114] (2/4) Epoch 8, batch 6500, loss[loss=0.3452, simple_loss=0.3882, pruned_loss=0.1511, over 3210.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2956, pruned_loss=0.0635, over 940141.28 frames. ], batch size: 35, lr: 9.31e-03, grad_scale: 32.0 +2024-07-28 03:37:09,045 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.29 vs. limit=15.0 +2024-07-28 03:37:09,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=104104.0, ans=0.05 +2024-07-28 03:37:10,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=104104.0, ans=0.0 +2024-07-28 03:37:18,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=104117.33333333333, ans=0.125 +2024-07-28 03:37:19,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104117.33333333333, ans=0.1 +2024-07-28 03:37:19,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=104117.33333333333, ans=0.1 +2024-07-28 03:37:21,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=104117.33333333333, ans=0.0 +2024-07-28 03:37:27,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=104130.66666666667, ans=0.09899494936611666 +2024-07-28 03:37:30,150 INFO [train.py:1114] (2/4) Epoch 8, batch 6550, loss[loss=0.1989, simple_loss=0.2811, pruned_loss=0.05834, over 4811.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2951, pruned_loss=0.06315, over 943211.56 frames. ], batch size: 11, lr: 9.30e-03, grad_scale: 32.0 +2024-07-28 03:38:00,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=104197.33333333333, ans=0.0 +2024-07-28 03:38:02,250 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.025e+01 5.695e+01 6.284e+01 7.396e+01 1.281e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 03:38:04,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.20 vs. limit=22.5 +2024-07-28 03:38:04,218 INFO [train.py:1114] (2/4) Epoch 8, batch 6600, loss[loss=0.2288, simple_loss=0.3259, pruned_loss=0.06584, over 4932.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.296, pruned_loss=0.06415, over 944960.41 frames. ], batch size: 14, lr: 9.30e-03, grad_scale: 32.0 +2024-07-28 03:38:10,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=104224.0, ans=0.125 +2024-07-28 03:38:11,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=104224.0, ans=0.125 +2024-07-28 03:38:36,277 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.55 vs. limit=15.0 +2024-07-28 03:38:40,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=104250.66666666667, ans=0.0 +2024-07-28 03:38:56,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=104264.0, ans=0.0 +2024-07-28 03:38:58,593 INFO [train.py:1114] (2/4) Epoch 8, batch 6650, loss[loss=0.2542, simple_loss=0.3429, pruned_loss=0.08281, over 4641.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2964, pruned_loss=0.06458, over 943761.98 frames. ], batch size: 17, lr: 9.30e-03, grad_scale: 32.0 +2024-07-28 03:39:02,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=104277.33333333333, ans=0.125 +2024-07-28 03:39:04,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=104290.66666666667, ans=0.125 +2024-07-28 03:39:11,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=104304.0, ans=0.125 +2024-07-28 03:39:12,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=104304.0, ans=0.125 +2024-07-28 03:39:13,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=104304.0, ans=0.125 +2024-07-28 03:39:22,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104317.33333333333, ans=0.1 +2024-07-28 03:39:34,786 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.880e+01 5.792e+01 6.278e+01 6.949e+01 1.059e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 03:39:44,718 INFO [train.py:1114] (2/4) Epoch 8, batch 6700, loss[loss=0.224, simple_loss=0.3064, pruned_loss=0.07078, over 4720.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2974, pruned_loss=0.06524, over 942802.20 frames. ], batch size: 19, lr: 9.29e-03, grad_scale: 32.0 +2024-07-28 03:39:53,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104344.0, ans=0.1 +2024-07-28 03:39:53,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=104344.0, ans=0.2 +2024-07-28 03:39:53,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=104344.0, ans=0.2 +2024-07-28 03:39:56,732 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.47 vs. limit=15.0 +2024-07-28 03:40:05,256 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=25.75 vs. limit=22.5 +2024-07-28 03:40:24,518 INFO [train.py:1114] (2/4) Epoch 8, batch 6750, loss[loss=0.2624, simple_loss=0.3405, pruned_loss=0.0922, over 4210.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2971, pruned_loss=0.06518, over 940968.48 frames. ], batch size: 25, lr: 9.29e-03, grad_scale: 32.0 +2024-07-28 03:40:29,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=104410.66666666667, ans=0.1 +2024-07-28 03:40:32,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=104410.66666666667, ans=0.0 +2024-07-28 03:40:44,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=104424.0, ans=0.125 +2024-07-28 03:40:45,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=104424.0, ans=0.0 +2024-07-28 03:40:47,907 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:40:51,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=104437.33333333333, ans=0.025 +2024-07-28 03:41:04,957 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.898e+01 5.597e+01 6.085e+01 6.894e+01 1.207e+02, threshold=1.217e+02, percent-clipped=0.0 +2024-07-28 03:41:14,476 INFO [train.py:1114] (2/4) Epoch 8, batch 6800, loss[loss=0.2053, simple_loss=0.3029, pruned_loss=0.05389, over 4637.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2978, pruned_loss=0.06526, over 939124.24 frames. ], batch size: 13, lr: 9.29e-03, grad_scale: 32.0 +2024-07-28 03:41:32,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=104504.0, ans=0.0 +2024-07-28 03:41:36,548 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.21 vs. limit=22.5 +2024-07-28 03:41:43,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=104530.66666666667, ans=0.125 +2024-07-28 03:41:50,247 INFO [train.py:1114] (2/4) Epoch 8, batch 6850, loss[loss=0.2087, simple_loss=0.288, pruned_loss=0.06468, over 4697.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2972, pruned_loss=0.0649, over 940947.12 frames. ], batch size: 13, lr: 9.29e-03, grad_scale: 32.0 +2024-07-28 03:41:56,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=104557.33333333333, ans=0.125 +2024-07-28 03:42:19,929 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.85 vs. limit=15.0 +2024-07-28 03:42:21,577 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.580e+01 6.042e+01 6.902e+01 8.247e+01 1.133e+02, threshold=1.380e+02, percent-clipped=0.0 +2024-07-28 03:42:22,905 INFO [train.py:1114] (2/4) Epoch 8, batch 6900, loss[loss=0.1825, simple_loss=0.2673, pruned_loss=0.04882, over 4961.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2975, pruned_loss=0.06488, over 943046.71 frames. ], batch size: 13, lr: 9.28e-03, grad_scale: 16.0 +2024-07-28 03:42:35,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=104637.33333333333, ans=0.125 +2024-07-28 03:42:38,193 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.35 vs. limit=15.0 +2024-07-28 03:42:41,562 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.73 vs. limit=15.0 +2024-07-28 03:42:41,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104650.66666666667, ans=0.1 +2024-07-28 03:42:55,951 INFO [train.py:1114] (2/4) Epoch 8, batch 6950, loss[loss=0.1927, simple_loss=0.264, pruned_loss=0.06066, over 4542.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2981, pruned_loss=0.06521, over 940775.99 frames. ], batch size: 10, lr: 9.28e-03, grad_scale: 16.0 +2024-07-28 03:42:56,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=104677.33333333333, ans=0.2 +2024-07-28 03:42:56,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=104677.33333333333, ans=0.125 +2024-07-28 03:42:57,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=104677.33333333333, ans=0.2 +2024-07-28 03:42:58,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=104677.33333333333, ans=0.2 +2024-07-28 03:43:02,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=104690.66666666667, ans=0.0 +2024-07-28 03:43:05,904 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.16 vs. limit=15.0 +2024-07-28 03:43:09,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=104704.0, ans=0.07 +2024-07-28 03:43:10,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=104704.0, ans=0.125 +2024-07-28 03:43:12,396 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.21 vs. limit=15.0 +2024-07-28 03:43:16,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=104717.33333333333, ans=0.2 +2024-07-28 03:43:22,648 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.17 vs. limit=22.5 +2024-07-28 03:43:25,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=104730.66666666667, ans=0.0 +2024-07-28 03:43:25,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=104730.66666666667, ans=0.0 +2024-07-28 03:43:28,347 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.937e+01 5.764e+01 6.206e+01 6.980e+01 1.236e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 03:43:29,715 INFO [train.py:1114] (2/4) Epoch 8, batch 7000, loss[loss=0.215, simple_loss=0.2983, pruned_loss=0.06579, over 4606.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2975, pruned_loss=0.06527, over 938987.09 frames. ], batch size: 17, lr: 9.28e-03, grad_scale: 16.0 +2024-07-28 03:43:30,708 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.59 vs. limit=12.0 +2024-07-28 03:43:35,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=104744.0, ans=0.0 +2024-07-28 03:43:37,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=104757.33333333333, ans=0.0 +2024-07-28 03:43:38,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=104757.33333333333, ans=0.125 +2024-07-28 03:43:45,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=104770.66666666667, ans=0.025 +2024-07-28 03:44:09,221 INFO [train.py:1114] (2/4) Epoch 8, batch 7050, loss[loss=0.2297, simple_loss=0.3229, pruned_loss=0.06819, over 4679.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2968, pruned_loss=0.06447, over 942179.70 frames. ], batch size: 19, lr: 9.27e-03, grad_scale: 16.0 +2024-07-28 03:44:09,663 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.48 vs. limit=15.0 +2024-07-28 03:44:20,498 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.61 vs. limit=15.0 +2024-07-28 03:44:22,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=104837.33333333333, ans=0.0 +2024-07-28 03:44:23,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=104837.33333333333, ans=0.0 +2024-07-28 03:44:41,373 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.865e+01 5.673e+01 6.225e+01 7.440e+01 1.294e+02, threshold=1.245e+02, percent-clipped=1.0 +2024-07-28 03:44:42,669 INFO [train.py:1114] (2/4) Epoch 8, batch 7100, loss[loss=0.1882, simple_loss=0.277, pruned_loss=0.04967, over 4798.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.297, pruned_loss=0.06506, over 936448.28 frames. ], batch size: 15, lr: 9.27e-03, grad_scale: 16.0 +2024-07-28 03:44:42,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=104877.33333333333, ans=0.2 +2024-07-28 03:44:44,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104877.33333333333, ans=0.1 +2024-07-28 03:44:53,939 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:45:05,851 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.29 vs. limit=15.0 +2024-07-28 03:45:15,083 INFO [train.py:1114] (2/4) Epoch 8, batch 7150, loss[loss=0.236, simple_loss=0.3215, pruned_loss=0.07527, over 4474.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2957, pruned_loss=0.06494, over 937455.15 frames. ], batch size: 21, lr: 9.27e-03, grad_scale: 16.0 +2024-07-28 03:45:24,463 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.24 vs. limit=22.5 +2024-07-28 03:45:30,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=104970.66666666667, ans=0.025 +2024-07-28 03:45:30,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=104970.66666666667, ans=0.125 +2024-07-28 03:45:31,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104970.66666666667, ans=0.1 +2024-07-28 03:45:40,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=104984.0, ans=0.2 +2024-07-28 03:45:44,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=104997.33333333333, ans=0.125 +2024-07-28 03:45:46,612 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.94 vs. limit=15.0 +2024-07-28 03:45:46,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=104997.33333333333, ans=0.5 +2024-07-28 03:45:48,071 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 5.987e+01 7.110e+01 8.384e+01 1.191e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-28 03:45:49,419 INFO [train.py:1114] (2/4) Epoch 8, batch 7200, loss[loss=0.2417, simple_loss=0.3259, pruned_loss=0.07874, over 4812.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2973, pruned_loss=0.06553, over 937701.30 frames. ], batch size: 15, lr: 9.27e-03, grad_scale: 32.0 +2024-07-28 03:46:01,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=105037.33333333333, ans=0.125 +2024-07-28 03:46:03,893 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.284e-02 +2024-07-28 03:46:13,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=105050.66666666667, ans=0.125 +2024-07-28 03:46:13,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=105050.66666666667, ans=0.2 +2024-07-28 03:46:13,659 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:46:21,924 INFO [train.py:1114] (2/4) Epoch 8, batch 7250, loss[loss=0.2155, simple_loss=0.2724, pruned_loss=0.07929, over 4864.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2961, pruned_loss=0.06515, over 939285.80 frames. ], batch size: 12, lr: 9.26e-03, grad_scale: 32.0 +2024-07-28 03:46:30,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=105090.66666666667, ans=0.025 +2024-07-28 03:46:32,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=105090.66666666667, ans=0.125 +2024-07-28 03:46:53,133 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.994e+01 6.010e+01 6.491e+01 7.289e+01 9.989e+01, threshold=1.298e+02, percent-clipped=0.0 +2024-07-28 03:46:54,405 INFO [train.py:1114] (2/4) Epoch 8, batch 7300, loss[loss=0.2218, simple_loss=0.3045, pruned_loss=0.06951, over 4851.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2964, pruned_loss=0.06478, over 939494.14 frames. ], batch size: 12, lr: 9.26e-03, grad_scale: 32.0 +2024-07-28 03:46:58,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=105144.0, ans=0.125 +2024-07-28 03:47:19,726 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.52 vs. limit=15.0 +2024-07-28 03:47:20,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=105197.33333333333, ans=0.125 +2024-07-28 03:47:22,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=105197.33333333333, ans=0.125 +2024-07-28 03:47:27,035 INFO [train.py:1114] (2/4) Epoch 8, batch 7350, loss[loss=0.2163, simple_loss=0.3151, pruned_loss=0.05877, over 4642.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2965, pruned_loss=0.06468, over 939219.53 frames. ], batch size: 12, lr: 9.26e-03, grad_scale: 32.0 +2024-07-28 03:47:27,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=105210.66666666667, ans=0.125 +2024-07-28 03:47:29,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=105210.66666666667, ans=0.125 +2024-07-28 03:47:39,935 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.77 vs. limit=15.0 +2024-07-28 03:47:46,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=105250.66666666667, ans=0.0 +2024-07-28 03:47:58,208 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.496e+01 5.796e+01 6.371e+01 7.883e+01 1.311e+02, threshold=1.274e+02, percent-clipped=1.0 +2024-07-28 03:47:58,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=105264.0, ans=0.125 +2024-07-28 03:47:59,458 INFO [train.py:1114] (2/4) Epoch 8, batch 7400, loss[loss=0.2327, simple_loss=0.3108, pruned_loss=0.07737, over 4687.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2962, pruned_loss=0.06415, over 940329.32 frames. ], batch size: 13, lr: 9.25e-03, grad_scale: 32.0 +2024-07-28 03:47:59,898 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.10 vs. limit=22.5 +2024-07-28 03:48:10,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=105290.66666666667, ans=0.2 +2024-07-28 03:48:14,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=105304.0, ans=0.125 +2024-07-28 03:48:23,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=105317.33333333333, ans=0.125 +2024-07-28 03:48:30,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=105330.66666666667, ans=0.125 +2024-07-28 03:48:30,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105330.66666666667, ans=0.1 +2024-07-28 03:48:32,432 INFO [train.py:1114] (2/4) Epoch 8, batch 7450, loss[loss=0.2101, simple_loss=0.288, pruned_loss=0.06609, over 4599.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.295, pruned_loss=0.06383, over 937841.21 frames. ], batch size: 11, lr: 9.25e-03, grad_scale: 32.0 +2024-07-28 03:48:33,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=105344.0, ans=0.0 +2024-07-28 03:48:50,980 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.41 vs. limit=12.0 +2024-07-28 03:48:53,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=105384.0, ans=0.0 +2024-07-28 03:49:03,912 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.015e+01 5.959e+01 6.584e+01 7.550e+01 1.203e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-28 03:49:05,209 INFO [train.py:1114] (2/4) Epoch 8, batch 7500, loss[loss=0.2619, simple_loss=0.3248, pruned_loss=0.09953, over 3457.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2966, pruned_loss=0.06483, over 936573.06 frames. ], batch size: 36, lr: 9.25e-03, grad_scale: 32.0 +2024-07-28 03:49:09,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=105410.66666666667, ans=0.125 +2024-07-28 03:49:09,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=105410.66666666667, ans=0.125 +2024-07-28 03:49:16,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=105424.0, ans=0.125 +2024-07-28 03:49:24,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=105450.66666666667, ans=0.035 +2024-07-28 03:49:34,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=105464.0, ans=0.125 +2024-07-28 03:49:39,666 INFO [train.py:1114] (2/4) Epoch 8, batch 7550, loss[loss=0.2517, simple_loss=0.3212, pruned_loss=0.09106, over 4637.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.299, pruned_loss=0.06571, over 936571.39 frames. ], batch size: 17, lr: 9.25e-03, grad_scale: 32.0 +2024-07-28 03:49:47,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105490.66666666667, ans=0.1 +2024-07-28 03:49:48,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105490.66666666667, ans=0.1 +2024-07-28 03:49:52,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=105504.0, ans=0.125 +2024-07-28 03:49:57,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=105504.0, ans=0.0 +2024-07-28 03:50:09,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=105530.66666666667, ans=0.0 +2024-07-28 03:50:09,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=105530.66666666667, ans=0.0 +2024-07-28 03:50:10,698 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.921e+01 5.717e+01 6.338e+01 7.144e+01 8.798e+01, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 03:50:12,744 INFO [train.py:1114] (2/4) Epoch 8, batch 7600, loss[loss=0.212, simple_loss=0.3007, pruned_loss=0.06163, over 4811.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2967, pruned_loss=0.06463, over 938404.85 frames. ], batch size: 14, lr: 9.24e-03, grad_scale: 32.0 +2024-07-28 03:50:35,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105584.0, ans=0.1 +2024-07-28 03:50:45,955 INFO [train.py:1114] (2/4) Epoch 8, batch 7650, loss[loss=0.1742, simple_loss=0.2594, pruned_loss=0.0445, over 4929.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2953, pruned_loss=0.06426, over 937403.86 frames. ], batch size: 12, lr: 9.24e-03, grad_scale: 32.0 +2024-07-28 03:50:49,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=105610.66666666667, ans=0.07 +2024-07-28 03:50:52,398 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.92 vs. limit=15.0 +2024-07-28 03:51:01,659 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.31 vs. limit=15.0 +2024-07-28 03:51:17,768 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.951e+01 6.499e+01 7.761e+01 1.442e+02, threshold=1.300e+02, percent-clipped=1.0 +2024-07-28 03:51:19,113 INFO [train.py:1114] (2/4) Epoch 8, batch 7700, loss[loss=0.2014, simple_loss=0.2988, pruned_loss=0.05203, over 4695.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2968, pruned_loss=0.06514, over 934406.50 frames. ], batch size: 13, lr: 9.24e-03, grad_scale: 32.0 +2024-07-28 03:51:25,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=105690.66666666667, ans=0.0 +2024-07-28 03:51:27,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=105690.66666666667, ans=0.0 +2024-07-28 03:51:30,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=105690.66666666667, ans=0.125 +2024-07-28 03:51:45,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=105730.66666666667, ans=0.2 +2024-07-28 03:51:47,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=105730.66666666667, ans=0.125 +2024-07-28 03:51:51,773 INFO [train.py:1114] (2/4) Epoch 8, batch 7750, loss[loss=0.2237, simple_loss=0.313, pruned_loss=0.06721, over 4929.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2977, pruned_loss=0.06535, over 935791.86 frames. ], batch size: 14, lr: 9.23e-03, grad_scale: 32.0 +2024-07-28 03:52:05,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=105770.66666666667, ans=0.125 +2024-07-28 03:52:23,027 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.861e+01 6.500e+01 7.436e+01 9.708e+01, threshold=1.300e+02, percent-clipped=0.0 +2024-07-28 03:52:24,794 INFO [train.py:1114] (2/4) Epoch 8, batch 7800, loss[loss=0.2221, simple_loss=0.3265, pruned_loss=0.05884, over 4666.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2976, pruned_loss=0.06468, over 937415.42 frames. ], batch size: 14, lr: 9.23e-03, grad_scale: 32.0 +2024-07-28 03:52:25,683 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.48 vs. limit=22.5 +2024-07-28 03:52:26,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=105810.66666666667, ans=0.0 +2024-07-28 03:52:26,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=105810.66666666667, ans=0.07 +2024-07-28 03:52:28,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=105810.66666666667, ans=10.0 +2024-07-28 03:52:38,815 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.18 vs. limit=6.0 +2024-07-28 03:52:43,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=105837.33333333333, ans=0.125 +2024-07-28 03:52:56,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=105864.0, ans=0.2 +2024-07-28 03:52:56,693 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=12.35 vs. limit=15.0 +2024-07-28 03:52:57,146 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:52:59,048 INFO [train.py:1114] (2/4) Epoch 8, batch 7850, loss[loss=0.2039, simple_loss=0.2708, pruned_loss=0.0685, over 4546.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2969, pruned_loss=0.0645, over 936114.09 frames. ], batch size: 10, lr: 9.23e-03, grad_scale: 32.0 +2024-07-28 03:53:14,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=105904.0, ans=0.125 +2024-07-28 03:53:22,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=105917.33333333333, ans=0.125 +2024-07-28 03:53:30,113 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.073e+01 5.939e+01 6.689e+01 8.282e+01 1.225e+02, threshold=1.338e+02, percent-clipped=0.0 +2024-07-28 03:53:31,397 INFO [train.py:1114] (2/4) Epoch 8, batch 7900, loss[loss=0.2366, simple_loss=0.3122, pruned_loss=0.08045, over 4882.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2987, pruned_loss=0.06515, over 933472.55 frames. ], batch size: 14, lr: 9.22e-03, grad_scale: 32.0 +2024-07-28 03:53:33,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=105944.0, ans=0.125 +2024-07-28 03:53:51,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105984.0, ans=0.1 +2024-07-28 03:53:51,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=105984.0, ans=0.125 +2024-07-28 03:54:00,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=105997.33333333333, ans=0.05 +2024-07-28 03:54:03,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=105997.33333333333, ans=0.0 +2024-07-28 03:54:04,739 INFO [train.py:1114] (2/4) Epoch 8, batch 7950, loss[loss=0.2713, simple_loss=0.3447, pruned_loss=0.09893, over 3203.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.297, pruned_loss=0.06438, over 935399.57 frames. ], batch size: 35, lr: 9.22e-03, grad_scale: 16.0 +2024-07-28 03:54:07,042 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.87 vs. limit=15.0 +2024-07-28 03:54:10,635 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.05 vs. limit=22.5 +2024-07-28 03:54:11,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=106010.66666666667, ans=0.125 +2024-07-28 03:54:12,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=106024.0, ans=0.125 +2024-07-28 03:54:12,536 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.31 vs. limit=10.0 +2024-07-28 03:54:15,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=106024.0, ans=0.125 +2024-07-28 03:54:16,348 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:54:30,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=106050.66666666667, ans=10.0 +2024-07-28 03:54:34,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=106064.0, ans=0.125 +2024-07-28 03:54:36,741 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.77 vs. limit=12.0 +2024-07-28 03:54:39,423 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.635e+01 5.772e+01 6.445e+01 7.166e+01 9.685e+01, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 03:54:40,293 INFO [train.py:1114] (2/4) Epoch 8, batch 8000, loss[loss=0.1769, simple_loss=0.249, pruned_loss=0.05239, over 4613.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2958, pruned_loss=0.06429, over 934496.36 frames. ], batch size: 11, lr: 9.22e-03, grad_scale: 32.0 +2024-07-28 03:55:03,487 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.24 vs. limit=15.0 +2024-07-28 03:55:06,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=106130.66666666667, ans=10.0 +2024-07-28 03:55:08,153 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.67 vs. limit=22.5 +2024-07-28 03:55:09,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=106130.66666666667, ans=0.125 +2024-07-28 03:55:09,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=106130.66666666667, ans=0.09899494936611666 +2024-07-28 03:55:13,064 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.35 vs. limit=15.0 +2024-07-28 03:55:14,599 INFO [train.py:1114] (2/4) Epoch 8, batch 8050, loss[loss=0.2, simple_loss=0.2965, pruned_loss=0.05171, over 4803.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2953, pruned_loss=0.06382, over 934215.90 frames. ], batch size: 14, lr: 9.22e-03, grad_scale: 32.0 +2024-07-28 03:55:28,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=106170.66666666667, ans=0.2 +2024-07-28 03:55:42,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=106197.33333333333, ans=0.125 +2024-07-28 03:55:45,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=106197.33333333333, ans=0.0 +2024-07-28 03:55:46,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=106197.33333333333, ans=0.0 +2024-07-28 03:55:47,759 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.934e+01 5.620e+01 6.153e+01 6.973e+01 1.002e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 03:55:48,434 INFO [train.py:1114] (2/4) Epoch 8, batch 8100, loss[loss=0.2383, simple_loss=0.3269, pruned_loss=0.07487, over 4816.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2958, pruned_loss=0.06392, over 934324.31 frames. ], batch size: 15, lr: 9.21e-03, grad_scale: 32.0 +2024-07-28 03:55:50,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=106210.66666666667, ans=0.2 +2024-07-28 03:55:57,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=106224.0, ans=0.125 +2024-07-28 03:55:58,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=106224.0, ans=0.125 +2024-07-28 03:56:00,339 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.38 vs. limit=12.0 +2024-07-28 03:56:04,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=106237.33333333333, ans=0.0 +2024-07-28 03:56:04,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=106237.33333333333, ans=0.0 +2024-07-28 03:56:05,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=106237.33333333333, ans=0.0 +2024-07-28 03:56:06,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=106237.33333333333, ans=0.125 +2024-07-28 03:56:08,757 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.81 vs. limit=22.5 +2024-07-28 03:56:09,983 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.44 vs. limit=15.0 +2024-07-28 03:56:20,979 INFO [train.py:1114] (2/4) Epoch 8, batch 8150, loss[loss=0.2201, simple_loss=0.3122, pruned_loss=0.06404, over 4814.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2955, pruned_loss=0.06377, over 937547.39 frames. ], batch size: 15, lr: 9.21e-03, grad_scale: 32.0 +2024-07-28 03:56:28,478 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.12 vs. limit=15.0 +2024-07-28 03:56:31,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=106290.66666666667, ans=0.125 +2024-07-28 03:56:39,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106317.33333333333, ans=0.1 +2024-07-28 03:56:52,818 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.056e+01 5.810e+01 6.420e+01 7.411e+01 1.127e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 03:56:53,443 INFO [train.py:1114] (2/4) Epoch 8, batch 8200, loss[loss=0.1956, simple_loss=0.2879, pruned_loss=0.05161, over 4799.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2948, pruned_loss=0.06302, over 938262.44 frames. ], batch size: 15, lr: 9.21e-03, grad_scale: 32.0 +2024-07-28 03:56:57,872 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.92 vs. limit=15.0 +2024-07-28 03:57:01,965 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=3.423e-02 +2024-07-28 03:57:07,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=106370.66666666667, ans=0.0 +2024-07-28 03:57:11,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106370.66666666667, ans=0.125 +2024-07-28 03:57:16,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=106384.0, ans=0.0 +2024-07-28 03:57:18,631 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.54 vs. limit=22.5 +2024-07-28 03:57:26,388 INFO [train.py:1114] (2/4) Epoch 8, batch 8250, loss[loss=0.2131, simple_loss=0.2948, pruned_loss=0.06566, over 4898.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.296, pruned_loss=0.06362, over 938299.92 frames. ], batch size: 13, lr: 9.20e-03, grad_scale: 32.0 +2024-07-28 03:57:30,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=106410.66666666667, ans=0.125 +2024-07-28 03:57:34,663 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=24.70 vs. limit=15.0 +2024-07-28 03:57:37,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=106424.0, ans=0.125 +2024-07-28 03:57:38,092 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.99 vs. limit=15.0 +2024-07-28 03:57:41,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=106437.33333333333, ans=0.2 +2024-07-28 03:57:45,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=106450.66666666667, ans=0.0 +2024-07-28 03:57:46,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=106450.66666666667, ans=0.125 +2024-07-28 03:57:53,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=106464.0, ans=0.0 +2024-07-28 03:57:57,943 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.754e+01 5.787e+01 6.260e+01 6.993e+01 1.105e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 03:57:58,645 INFO [train.py:1114] (2/4) Epoch 8, batch 8300, loss[loss=0.2488, simple_loss=0.3335, pruned_loss=0.08206, over 4903.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2963, pruned_loss=0.06357, over 938506.07 frames. ], batch size: 15, lr: 9.20e-03, grad_scale: 32.0 +2024-07-28 03:58:33,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=106517.33333333333, ans=0.125 +2024-07-28 03:58:45,423 INFO [train.py:1114] (2/4) Epoch 8, batch 8350, loss[loss=0.2203, simple_loss=0.2954, pruned_loss=0.07259, over 4799.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2956, pruned_loss=0.06304, over 941306.16 frames. ], batch size: 15, lr: 9.20e-03, grad_scale: 32.0 +2024-07-28 03:58:54,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=106557.33333333333, ans=0.09899494936611666 +2024-07-28 03:58:57,132 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:58:58,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106570.66666666667, ans=0.1 +2024-07-28 03:59:02,564 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.17 vs. limit=15.0 +2024-07-28 03:59:14,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=106597.33333333333, ans=0.025 +2024-07-28 03:59:15,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=106597.33333333333, ans=0.125 +2024-07-28 03:59:17,895 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.872e+01 6.040e+01 6.810e+01 8.092e+01 1.142e+02, threshold=1.362e+02, percent-clipped=0.0 +2024-07-28 03:59:18,626 INFO [train.py:1114] (2/4) Epoch 8, batch 8400, loss[loss=0.2101, simple_loss=0.3036, pruned_loss=0.05833, over 4776.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2956, pruned_loss=0.06301, over 939429.04 frames. ], batch size: 12, lr: 9.20e-03, grad_scale: 32.0 +2024-07-28 03:59:18,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.64 vs. limit=10.0 +2024-07-28 03:59:22,070 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.44 vs. limit=15.0 +2024-07-28 03:59:24,960 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.16 vs. limit=15.0 +2024-07-28 03:59:25,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=106624.0, ans=0.125 +2024-07-28 03:59:33,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=106637.33333333333, ans=0.025 +2024-07-28 03:59:36,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=106637.33333333333, ans=0.125 +2024-07-28 03:59:40,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=106650.66666666667, ans=0.2 +2024-07-28 03:59:45,422 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=11.76 vs. limit=10.0 +2024-07-28 03:59:48,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=106664.0, ans=0.125 +2024-07-28 03:59:52,393 INFO [train.py:1114] (2/4) Epoch 8, batch 8450, loss[loss=0.23, simple_loss=0.3157, pruned_loss=0.0722, over 4804.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2964, pruned_loss=0.06341, over 938408.92 frames. ], batch size: 15, lr: 9.19e-03, grad_scale: 32.0 +2024-07-28 03:59:55,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=106677.33333333333, ans=0.0 +2024-07-28 04:00:01,098 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.25 vs. limit=22.5 +2024-07-28 04:00:03,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=106690.66666666667, ans=0.0 +2024-07-28 04:00:06,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=106704.0, ans=0.0 +2024-07-28 04:00:07,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=106704.0, ans=0.125 +2024-07-28 04:00:08,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=106704.0, ans=0.2 +2024-07-28 04:00:23,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=106730.66666666667, ans=0.1 +2024-07-28 04:00:25,076 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.985e+01 6.391e+01 7.364e+01 1.076e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-28 04:00:25,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=106744.0, ans=0.125 +2024-07-28 04:00:25,751 INFO [train.py:1114] (2/4) Epoch 8, batch 8500, loss[loss=0.1952, simple_loss=0.2808, pruned_loss=0.05482, over 4619.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2958, pruned_loss=0.06342, over 938418.84 frames. ], batch size: 11, lr: 9.19e-03, grad_scale: 32.0 +2024-07-28 04:00:28,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=26.45 vs. limit=22.5 +2024-07-28 04:00:34,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=106757.33333333333, ans=0.0 +2024-07-28 04:00:40,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=106770.66666666667, ans=0.125 +2024-07-28 04:00:45,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=106784.0, ans=0.0 +2024-07-28 04:00:47,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106784.0, ans=0.0 +2024-07-28 04:00:47,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=106784.0, ans=0.125 +2024-07-28 04:00:58,741 INFO [train.py:1114] (2/4) Epoch 8, batch 8550, loss[loss=0.181, simple_loss=0.2437, pruned_loss=0.0591, over 4796.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2958, pruned_loss=0.06357, over 939453.53 frames. ], batch size: 11, lr: 9.19e-03, grad_scale: 32.0 +2024-07-28 04:01:03,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=106810.66666666667, ans=10.0 +2024-07-28 04:01:06,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=106824.0, ans=0.125 +2024-07-28 04:01:10,536 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.23 vs. limit=15.0 +2024-07-28 04:01:20,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=106850.66666666667, ans=0.0 +2024-07-28 04:01:20,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=106850.66666666667, ans=0.025 +2024-07-28 04:01:28,638 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.69 vs. limit=15.0 +2024-07-28 04:01:31,417 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.793e+01 5.840e+01 6.821e+01 7.770e+01 1.284e+02, threshold=1.364e+02, percent-clipped=1.0 +2024-07-28 04:01:32,069 INFO [train.py:1114] (2/4) Epoch 8, batch 8600, loss[loss=0.229, simple_loss=0.3226, pruned_loss=0.0677, over 4797.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.296, pruned_loss=0.06422, over 938570.52 frames. ], batch size: 15, lr: 9.18e-03, grad_scale: 32.0 +2024-07-28 04:01:50,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=106904.0, ans=0.2 +2024-07-28 04:01:59,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106930.66666666667, ans=0.0 +2024-07-28 04:02:02,490 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.67 vs. limit=22.5 +2024-07-28 04:02:04,638 INFO [train.py:1114] (2/4) Epoch 8, batch 8650, loss[loss=0.2408, simple_loss=0.3215, pruned_loss=0.08005, over 4891.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2956, pruned_loss=0.06412, over 940174.58 frames. ], batch size: 15, lr: 9.18e-03, grad_scale: 32.0 +2024-07-28 04:02:15,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106957.33333333333, ans=0.0 +2024-07-28 04:02:20,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=106970.66666666667, ans=0.125 +2024-07-28 04:02:25,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=106984.0, ans=0.125 +2024-07-28 04:02:25,846 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.12 vs. limit=15.0 +2024-07-28 04:02:32,998 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.01 vs. limit=22.5 +2024-07-28 04:02:37,369 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.17 vs. limit=12.0 +2024-07-28 04:02:37,622 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.946e+01 6.079e+01 7.020e+01 8.285e+01 1.215e+02, threshold=1.404e+02, percent-clipped=0.0 +2024-07-28 04:02:38,287 INFO [train.py:1114] (2/4) Epoch 8, batch 8700, loss[loss=0.2112, simple_loss=0.3012, pruned_loss=0.06063, over 4757.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2965, pruned_loss=0.06493, over 938085.19 frames. ], batch size: 13, lr: 9.18e-03, grad_scale: 32.0 +2024-07-28 04:02:38,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=107010.66666666667, ans=0.0 +2024-07-28 04:02:49,195 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.97 vs. limit=15.0 +2024-07-28 04:02:51,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=107024.0, ans=0.2 +2024-07-28 04:03:05,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=107064.0, ans=0.125 +2024-07-28 04:03:11,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=107064.0, ans=0.0 +2024-07-28 04:03:12,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=107077.33333333333, ans=0.2 +2024-07-28 04:03:13,343 INFO [train.py:1114] (2/4) Epoch 8, batch 8750, loss[loss=0.2278, simple_loss=0.3233, pruned_loss=0.06617, over 4693.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2964, pruned_loss=0.06503, over 936615.08 frames. ], batch size: 15, lr: 9.18e-03, grad_scale: 32.0 +2024-07-28 04:03:19,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=107077.33333333333, ans=0.2 +2024-07-28 04:03:25,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=107090.66666666667, ans=0.125 +2024-07-28 04:03:27,040 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.45 vs. limit=10.0 +2024-07-28 04:03:30,175 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.71 vs. limit=22.5 +2024-07-28 04:03:32,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=107104.0, ans=0.07 +2024-07-28 04:03:35,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=107117.33333333333, ans=0.0 +2024-07-28 04:03:41,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=107130.66666666667, ans=0.125 +2024-07-28 04:03:41,971 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.64 vs. limit=6.0 +2024-07-28 04:03:45,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=107130.66666666667, ans=0.2 +2024-07-28 04:03:46,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107130.66666666667, ans=0.1 +2024-07-28 04:03:46,707 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 6.010e+01 6.887e+01 8.098e+01 1.294e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-28 04:03:47,351 INFO [train.py:1114] (2/4) Epoch 8, batch 8800, loss[loss=0.1751, simple_loss=0.2733, pruned_loss=0.03849, over 4938.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2965, pruned_loss=0.06451, over 937616.46 frames. ], batch size: 14, lr: 9.17e-03, grad_scale: 32.0 +2024-07-28 04:03:54,801 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.48 vs. limit=12.0 +2024-07-28 04:04:04,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=107170.66666666667, ans=0.125 +2024-07-28 04:04:04,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=107170.66666666667, ans=0.0 +2024-07-28 04:04:17,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=107197.33333333333, ans=0.0 +2024-07-28 04:04:20,448 INFO [train.py:1114] (2/4) Epoch 8, batch 8850, loss[loss=0.2375, simple_loss=0.3341, pruned_loss=0.0704, over 4553.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2955, pruned_loss=0.06409, over 932269.35 frames. ], batch size: 21, lr: 9.17e-03, grad_scale: 32.0 +2024-07-28 04:04:30,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=107224.0, ans=0.0 +2024-07-28 04:04:33,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=107224.0, ans=0.0 +2024-07-28 04:04:40,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=107250.66666666667, ans=0.2 +2024-07-28 04:04:40,804 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:04:47,047 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.64 vs. limit=22.5 +2024-07-28 04:04:53,230 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 5.819e+01 6.564e+01 7.832e+01 1.170e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-28 04:04:53,898 INFO [train.py:1114] (2/4) Epoch 8, batch 8900, loss[loss=0.1955, simple_loss=0.2706, pruned_loss=0.06025, over 4944.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2961, pruned_loss=0.0641, over 930378.89 frames. ], batch size: 12, lr: 9.17e-03, grad_scale: 32.0 +2024-07-28 04:05:06,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=107304.0, ans=0.125 +2024-07-28 04:05:11,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=107304.0, ans=10.0 +2024-07-28 04:05:19,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=107330.66666666667, ans=0.125 +2024-07-28 04:05:26,186 INFO [train.py:1114] (2/4) Epoch 8, batch 8950, loss[loss=0.2366, simple_loss=0.32, pruned_loss=0.0766, over 4548.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.296, pruned_loss=0.06411, over 930901.84 frames. ], batch size: 21, lr: 9.17e-03, grad_scale: 32.0 +2024-07-28 04:05:27,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=107344.0, ans=0.125 +2024-07-28 04:05:28,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=107344.0, ans=0.125 +2024-07-28 04:05:30,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107344.0, ans=0.125 +2024-07-28 04:05:34,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=107357.33333333333, ans=22.5 +2024-07-28 04:05:35,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=107357.33333333333, ans=0.2 +2024-07-28 04:05:50,374 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.29 vs. limit=15.0 +2024-07-28 04:05:55,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=107397.33333333333, ans=0.0 +2024-07-28 04:05:58,456 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.74 vs. limit=22.5 +2024-07-28 04:06:00,000 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.334e+01 5.823e+01 6.551e+01 7.649e+01 1.257e+02, threshold=1.310e+02, percent-clipped=0.0 +2024-07-28 04:06:00,703 INFO [train.py:1114] (2/4) Epoch 8, batch 9000, loss[loss=0.2194, simple_loss=0.2978, pruned_loss=0.07052, over 4640.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2951, pruned_loss=0.06404, over 934080.00 frames. ], batch size: 12, lr: 9.16e-03, grad_scale: 32.0 +2024-07-28 04:06:00,704 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 04:06:12,615 INFO [train.py:1146] (2/4) Epoch 8, validation: loss=0.1781, simple_loss=0.2826, pruned_loss=0.03685, over 944034.00 frames. +2024-07-28 04:06:12,618 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 04:06:17,470 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.16 vs. limit=15.0 +2024-07-28 04:06:23,191 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:06:28,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=107437.33333333333, ans=0.025 +2024-07-28 04:06:28,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107437.33333333333, ans=0.1 +2024-07-28 04:06:30,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107437.33333333333, ans=0.1 +2024-07-28 04:06:33,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=107450.66666666667, ans=0.0 +2024-07-28 04:06:35,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107450.66666666667, ans=0.125 +2024-07-28 04:06:36,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=107450.66666666667, ans=0.0 +2024-07-28 04:06:37,352 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-28 04:06:41,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=107464.0, ans=0.125 +2024-07-28 04:06:44,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107464.0, ans=0.1 +2024-07-28 04:06:45,367 INFO [train.py:1114] (2/4) Epoch 8, batch 9050, loss[loss=0.1938, simple_loss=0.2658, pruned_loss=0.06089, over 4498.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2949, pruned_loss=0.06442, over 934478.31 frames. ], batch size: 10, lr: 9.16e-03, grad_scale: 32.0 +2024-07-28 04:06:49,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=107477.33333333333, ans=0.2 +2024-07-28 04:06:49,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=107477.33333333333, ans=10.0 +2024-07-28 04:06:54,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=107490.66666666667, ans=15.0 +2024-07-28 04:06:54,650 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.07 vs. limit=12.0 +2024-07-28 04:07:05,871 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.33 vs. limit=22.5 +2024-07-28 04:07:14,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=107530.66666666667, ans=0.125 +2024-07-28 04:07:14,658 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.45 vs. limit=22.5 +2024-07-28 04:07:15,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=107530.66666666667, ans=0.125 +2024-07-28 04:07:16,877 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.793e+01 6.353e+01 7.194e+01 9.869e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 04:07:17,491 INFO [train.py:1114] (2/4) Epoch 8, batch 9100, loss[loss=0.22, simple_loss=0.2978, pruned_loss=0.07111, over 4934.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2955, pruned_loss=0.06447, over 936974.84 frames. ], batch size: 14, lr: 9.16e-03, grad_scale: 32.0 +2024-07-28 04:07:31,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107570.66666666667, ans=0.1 +2024-07-28 04:07:32,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=107570.66666666667, ans=0.0 +2024-07-28 04:07:35,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=107584.0, ans=0.0 +2024-07-28 04:07:38,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107584.0, ans=0.1 +2024-07-28 04:07:39,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=107584.0, ans=0.125 +2024-07-28 04:07:49,001 INFO [train.py:1114] (2/4) Epoch 8, batch 9150, loss[loss=0.2054, simple_loss=0.3002, pruned_loss=0.05528, over 4811.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.297, pruned_loss=0.0649, over 935775.32 frames. ], batch size: 14, lr: 9.15e-03, grad_scale: 32.0 +2024-07-28 04:07:51,181 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.52 vs. limit=22.5 +2024-07-28 04:07:59,817 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.71 vs. limit=15.0 +2024-07-28 04:08:04,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=107637.33333333333, ans=0.0 +2024-07-28 04:08:07,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=107650.66666666667, ans=0.125 +2024-07-28 04:08:17,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=107664.0, ans=0.125 +2024-07-28 04:08:19,451 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.26 vs. limit=6.0 +2024-07-28 04:08:19,833 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.141e+01 5.934e+01 6.513e+01 7.499e+01 1.086e+02, threshold=1.303e+02, percent-clipped=0.0 +2024-07-28 04:08:20,440 INFO [train.py:1114] (2/4) Epoch 8, batch 9200, loss[loss=0.2233, simple_loss=0.3052, pruned_loss=0.07075, over 4854.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2962, pruned_loss=0.06439, over 937355.73 frames. ], batch size: 12, lr: 9.15e-03, grad_scale: 32.0 +2024-07-28 04:08:35,344 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.25 vs. limit=15.0 +2024-07-28 04:08:35,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=107704.0, ans=0.125 +2024-07-28 04:08:38,304 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.95 vs. limit=6.0 +2024-07-28 04:08:43,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=107717.33333333333, ans=0.125 +2024-07-28 04:08:46,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=107730.66666666667, ans=10.0 +2024-07-28 04:08:51,961 INFO [train.py:1114] (2/4) Epoch 8, batch 9250, loss[loss=0.204, simple_loss=0.2977, pruned_loss=0.05516, over 4634.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2944, pruned_loss=0.0631, over 938228.59 frames. ], batch size: 13, lr: 9.15e-03, grad_scale: 32.0 +2024-07-28 04:08:52,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=107744.0, ans=0.0 +2024-07-28 04:08:57,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107757.33333333333, ans=0.1 +2024-07-28 04:09:00,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=107757.33333333333, ans=0.125 +2024-07-28 04:09:13,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=107784.0, ans=0.125 +2024-07-28 04:09:22,904 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.575e+01 5.836e+01 6.420e+01 7.069e+01 1.211e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 04:09:23,544 INFO [train.py:1114] (2/4) Epoch 8, batch 9300, loss[loss=0.2063, simple_loss=0.2833, pruned_loss=0.06461, over 4790.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.294, pruned_loss=0.06277, over 938131.12 frames. ], batch size: 12, lr: 9.15e-03, grad_scale: 32.0 +2024-07-28 04:09:25,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=107810.66666666667, ans=0.0 +2024-07-28 04:09:34,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=107824.0, ans=0.125 +2024-07-28 04:09:41,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107850.66666666667, ans=0.1 +2024-07-28 04:09:52,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=107864.0, ans=0.125 +2024-07-28 04:09:55,755 INFO [train.py:1114] (2/4) Epoch 8, batch 9350, loss[loss=0.1812, simple_loss=0.2642, pruned_loss=0.0491, over 4804.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2948, pruned_loss=0.06313, over 935680.42 frames. ], batch size: 11, lr: 9.14e-03, grad_scale: 32.0 +2024-07-28 04:10:00,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=107877.33333333333, ans=0.125 +2024-07-28 04:10:13,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=107904.0, ans=0.125 +2024-07-28 04:10:22,526 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.36 vs. limit=15.0 +2024-07-28 04:10:22,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=107930.66666666667, ans=0.125 +2024-07-28 04:10:27,326 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.820e+01 5.454e+01 5.997e+01 6.849e+01 9.161e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 04:10:27,985 INFO [train.py:1114] (2/4) Epoch 8, batch 9400, loss[loss=0.1985, simple_loss=0.2858, pruned_loss=0.05556, over 4694.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2953, pruned_loss=0.06345, over 933405.07 frames. ], batch size: 13, lr: 9.14e-03, grad_scale: 32.0 +2024-07-28 04:10:47,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=107970.66666666667, ans=0.0 +2024-07-28 04:10:49,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=107970.66666666667, ans=0.0 +2024-07-28 04:11:02,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=108010.66666666667, ans=0.0 +2024-07-28 04:11:03,417 INFO [train.py:1114] (2/4) Epoch 8, batch 9450, loss[loss=0.1843, simple_loss=0.2664, pruned_loss=0.05116, over 4804.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2951, pruned_loss=0.06331, over 932846.79 frames. ], batch size: 11, lr: 9.14e-03, grad_scale: 32.0 +2024-07-28 04:11:04,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108010.66666666667, ans=0.1 +2024-07-28 04:11:11,207 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.44 vs. limit=10.0 +2024-07-28 04:11:17,880 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.40 vs. limit=15.0 +2024-07-28 04:11:34,697 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.777e+01 5.745e+01 6.311e+01 7.517e+01 1.007e+02, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 04:11:34,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=108077.33333333333, ans=0.0 +2024-07-28 04:11:35,353 INFO [train.py:1114] (2/4) Epoch 8, batch 9500, loss[loss=0.199, simple_loss=0.2791, pruned_loss=0.05946, over 4707.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2963, pruned_loss=0.0638, over 934887.49 frames. ], batch size: 12, lr: 9.13e-03, grad_scale: 32.0 +2024-07-28 04:11:39,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=108077.33333333333, ans=0.125 +2024-07-28 04:11:45,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=108090.66666666667, ans=0.0 +2024-07-28 04:11:52,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=108104.0, ans=0.95 +2024-07-28 04:12:00,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=108130.66666666667, ans=0.125 +2024-07-28 04:12:06,610 INFO [train.py:1114] (2/4) Epoch 8, batch 9550, loss[loss=0.1905, simple_loss=0.2719, pruned_loss=0.0546, over 4776.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2958, pruned_loss=0.06373, over 931871.28 frames. ], batch size: 12, lr: 9.13e-03, grad_scale: 32.0 +2024-07-28 04:12:07,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108144.0, ans=0.1 +2024-07-28 04:12:10,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=108144.0, ans=0.125 +2024-07-28 04:12:20,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=108170.66666666667, ans=0.025 +2024-07-28 04:12:23,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108170.66666666667, ans=0.1 +2024-07-28 04:12:29,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=108184.0, ans=0.025 +2024-07-28 04:12:31,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=108197.33333333333, ans=0.125 +2024-07-28 04:12:31,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108197.33333333333, ans=0.1 +2024-07-28 04:12:36,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=108197.33333333333, ans=0.125 +2024-07-28 04:12:37,466 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.060e+01 5.899e+01 6.832e+01 8.942e+01 1.153e+02, threshold=1.366e+02, percent-clipped=0.0 +2024-07-28 04:12:38,160 INFO [train.py:1114] (2/4) Epoch 8, batch 9600, loss[loss=0.2428, simple_loss=0.302, pruned_loss=0.09175, over 3115.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2955, pruned_loss=0.0633, over 930644.92 frames. ], batch size: 35, lr: 9.13e-03, grad_scale: 32.0 +2024-07-28 04:12:42,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=108210.66666666667, ans=0.125 +2024-07-28 04:12:50,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=108237.33333333333, ans=0.125 +2024-07-28 04:12:52,595 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:12:52,873 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.32 vs. limit=15.0 +2024-07-28 04:12:59,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=108250.66666666667, ans=0.125 +2024-07-28 04:13:07,044 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:13:07,211 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.32 vs. limit=15.0 +2024-07-28 04:13:10,120 INFO [train.py:1114] (2/4) Epoch 8, batch 9650, loss[loss=0.2206, simple_loss=0.3217, pruned_loss=0.05979, over 4835.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2965, pruned_loss=0.06403, over 926060.26 frames. ], batch size: 16, lr: 9.13e-03, grad_scale: 32.0 +2024-07-28 04:13:14,045 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.89 vs. limit=12.0 +2024-07-28 04:13:15,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108277.33333333333, ans=0.1 +2024-07-28 04:13:16,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.52 vs. limit=15.0 +2024-07-28 04:13:21,451 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.23 vs. limit=6.0 +2024-07-28 04:13:23,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=108304.0, ans=0.125 +2024-07-28 04:13:27,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=108304.0, ans=0.04949747468305833 +2024-07-28 04:13:28,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=108317.33333333333, ans=0.0 +2024-07-28 04:13:35,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.44 vs. limit=22.5 +2024-07-28 04:13:35,716 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.75 vs. limit=15.0 +2024-07-28 04:13:40,241 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.895e+01 5.890e+01 6.394e+01 7.383e+01 1.171e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 04:13:40,943 INFO [train.py:1114] (2/4) Epoch 8, batch 9700, loss[loss=0.2346, simple_loss=0.3163, pruned_loss=0.07641, over 4483.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2975, pruned_loss=0.06447, over 924288.98 frames. ], batch size: 26, lr: 9.12e-03, grad_scale: 32.0 +2024-07-28 04:13:46,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=108344.0, ans=0.125 +2024-07-28 04:13:52,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=108370.66666666667, ans=0.125 +2024-07-28 04:13:59,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=108384.0, ans=0.125 +2024-07-28 04:14:02,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=108384.0, ans=0.0 +2024-07-28 04:14:11,904 INFO [train.py:1114] (2/4) Epoch 8, batch 9750, loss[loss=0.2082, simple_loss=0.2909, pruned_loss=0.06277, over 4687.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2971, pruned_loss=0.06426, over 924730.56 frames. ], batch size: 15, lr: 9.12e-03, grad_scale: 32.0 +2024-07-28 04:14:14,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=108410.66666666667, ans=0.025 +2024-07-28 04:14:15,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=108410.66666666667, ans=0.0 +2024-07-28 04:14:20,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=108424.0, ans=0.05 +2024-07-28 04:14:32,070 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.18 vs. limit=22.5 +2024-07-28 04:14:34,449 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.68 vs. limit=22.5 +2024-07-28 04:14:42,362 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.361e+01 6.068e+01 7.067e+01 8.452e+01 1.289e+02, threshold=1.413e+02, percent-clipped=1.0 +2024-07-28 04:14:42,979 INFO [train.py:1114] (2/4) Epoch 8, batch 9800, loss[loss=0.2244, simple_loss=0.3103, pruned_loss=0.06922, over 4716.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2963, pruned_loss=0.06433, over 924192.74 frames. ], batch size: 12, lr: 9.12e-03, grad_scale: 32.0 +2024-07-28 04:14:45,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=108477.33333333333, ans=0.025 +2024-07-28 04:14:55,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=108504.0, ans=0.0 +2024-07-28 04:14:57,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=108504.0, ans=0.125 +2024-07-28 04:15:00,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=108504.0, ans=0.2 +2024-07-28 04:15:11,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=108530.66666666667, ans=0.125 +2024-07-28 04:15:13,626 INFO [train.py:1114] (2/4) Epoch 8, batch 9850, loss[loss=0.234, simple_loss=0.3352, pruned_loss=0.06639, over 4895.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2967, pruned_loss=0.06446, over 926825.70 frames. ], batch size: 15, lr: 9.11e-03, grad_scale: 32.0 +2024-07-28 04:15:14,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108544.0, ans=0.1 +2024-07-28 04:15:23,345 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.21 vs. limit=22.5 +2024-07-28 04:15:26,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=108570.66666666667, ans=0.015 +2024-07-28 04:15:27,072 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.18 vs. limit=15.0 +2024-07-28 04:15:44,369 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.741e+01 5.942e+01 6.515e+01 7.439e+01 1.419e+02, threshold=1.303e+02, percent-clipped=1.0 +2024-07-28 04:15:45,062 INFO [train.py:1114] (2/4) Epoch 8, batch 9900, loss[loss=0.232, simple_loss=0.3216, pruned_loss=0.07122, over 4849.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2977, pruned_loss=0.06555, over 925930.53 frames. ], batch size: 16, lr: 9.11e-03, grad_scale: 32.0 +2024-07-28 04:15:59,368 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.83 vs. limit=15.0 +2024-07-28 04:16:03,343 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:16:08,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=108650.66666666667, ans=0.125 +2024-07-28 04:16:11,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=108664.0, ans=0.95 +2024-07-28 04:16:15,788 INFO [train.py:1114] (2/4) Epoch 8, batch 9950, loss[loss=0.1664, simple_loss=0.262, pruned_loss=0.03543, over 4790.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2983, pruned_loss=0.06611, over 928485.26 frames. ], batch size: 11, lr: 9.11e-03, grad_scale: 64.0 +2024-07-28 04:16:23,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=108690.66666666667, ans=0.0 +2024-07-28 04:16:25,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108690.66666666667, ans=0.1 +2024-07-28 04:16:27,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=108690.66666666667, ans=0.125 +2024-07-28 04:16:27,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=108704.0, ans=0.2 +2024-07-28 04:16:32,428 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.49 vs. limit=15.0 +2024-07-28 04:16:37,038 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.39 vs. limit=15.0 +2024-07-28 04:16:38,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=108717.33333333333, ans=0.025 +2024-07-28 04:16:38,339 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.49 vs. limit=15.0 +2024-07-28 04:16:38,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108717.33333333333, ans=0.1 +2024-07-28 04:16:39,859 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:16:43,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=108730.66666666667, ans=0.125 +2024-07-28 04:16:46,386 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.858e+01 5.881e+01 6.237e+01 7.241e+01 1.097e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 04:16:46,977 INFO [train.py:1114] (2/4) Epoch 8, batch 10000, loss[loss=0.2206, simple_loss=0.3017, pruned_loss=0.06975, over 4646.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.3006, pruned_loss=0.06666, over 926011.79 frames. ], batch size: 16, lr: 9.11e-03, grad_scale: 64.0 +2024-07-28 04:16:58,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=108770.66666666667, ans=0.0 +2024-07-28 04:17:02,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=108770.66666666667, ans=0.025 +2024-07-28 04:17:03,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=108770.66666666667, ans=0.0 +2024-07-28 04:17:04,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=108784.0, ans=0.125 +2024-07-28 04:17:04,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=108784.0, ans=0.125 +2024-07-28 04:17:21,343 INFO [train.py:1114] (2/4) Epoch 8, batch 10050, loss[loss=0.335, simple_loss=0.3794, pruned_loss=0.1453, over 3640.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.304, pruned_loss=0.0686, over 914343.50 frames. ], batch size: 35, lr: 9.10e-03, grad_scale: 64.0 +2024-07-28 04:17:24,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=108810.66666666667, ans=10.0 +2024-07-28 04:17:26,293 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.65 vs. limit=15.0 +2024-07-28 04:17:28,140 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.18 vs. limit=10.0 +2024-07-28 04:17:35,638 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.46 vs. limit=15.0 +2024-07-28 04:17:50,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108864.0, ans=0.0 +2024-07-28 04:17:52,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=108864.0, ans=0.04949747468305833 +2024-07-28 04:17:52,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=108864.0, ans=0.0 +2024-07-28 04:17:55,197 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.156e+01 6.508e+01 7.175e+01 7.748e+01 1.103e+02, threshold=1.435e+02, percent-clipped=0.0 +2024-07-28 04:17:55,230 INFO [train.py:1114] (2/4) Epoch 8, batch 10100, loss[loss=0.2878, simple_loss=0.349, pruned_loss=0.1133, over 3368.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3085, pruned_loss=0.07398, over 863653.04 frames. ], batch size: 35, lr: 9.10e-03, grad_scale: 32.0 +2024-07-28 04:18:05,460 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.90 vs. limit=15.0 +2024-07-28 04:18:11,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=108904.0, ans=0.125 +2024-07-28 04:18:15,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108917.33333333333, ans=0.1 +2024-07-28 04:18:18,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108917.33333333333, ans=0.1 +2024-07-28 04:18:24,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=108930.66666666667, ans=0.0 +2024-07-28 04:18:27,873 INFO [train.py:1114] (2/4) Epoch 8, batch 10150, loss[loss=0.2546, simple_loss=0.3274, pruned_loss=0.09088, over 3238.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3127, pruned_loss=0.07868, over 820647.79 frames. ], batch size: 35, lr: 9.10e-03, grad_scale: 32.0 +2024-07-28 04:18:29,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=108944.0, ans=0.0 +2024-07-28 04:18:33,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108957.33333333333, ans=0.1 +2024-07-28 04:18:37,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=108957.33333333333, ans=0.2 +2024-07-28 04:18:39,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=108957.33333333333, ans=0.04949747468305833 +2024-07-28 04:18:42,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_na.min_abs, batch_count=108970.66666666667, ans=0.02 +2024-07-28 04:18:44,212 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.78 vs. limit=22.5 +2024-07-28 04:18:50,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=108984.0, ans=0.125 +2024-07-28 04:18:52,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=108997.33333333333, ans=0.2 +2024-07-28 04:18:54,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=108997.33333333333, ans=0.125 +2024-07-28 04:18:58,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=109010.66666666667, ans=0.025 +2024-07-28 04:18:59,463 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.833e+01 6.827e+01 7.269e+01 7.768e+01 1.197e+02, threshold=1.454e+02, percent-clipped=0.0 +2024-07-28 04:18:59,496 INFO [train.py:1114] (2/4) Epoch 8, batch 10200, loss[loss=0.313, simple_loss=0.3694, pruned_loss=0.1283, over 3351.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.3156, pruned_loss=0.08196, over 787775.40 frames. ], batch size: 35, lr: 9.10e-03, grad_scale: 32.0 +2024-07-28 04:19:49,762 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:19:59,116 INFO [train.py:1114] (2/4) Epoch 9, batch 0, loss[loss=0.2069, simple_loss=0.2852, pruned_loss=0.0643, over 4848.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2852, pruned_loss=0.0643, over 4848.00 frames. ], batch size: 12, lr: 8.61e-03, grad_scale: 32.0 +2024-07-28 04:19:59,116 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 04:20:10,795 INFO [train.py:1146] (2/4) Epoch 9, validation: loss=0.1818, simple_loss=0.2877, pruned_loss=0.03795, over 944034.00 frames. +2024-07-28 04:20:10,796 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 04:20:23,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=109053.33333333333, ans=0.05 +2024-07-28 04:20:29,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=109066.66666666667, ans=0.125 +2024-07-28 04:20:30,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=109066.66666666667, ans=0.025 +2024-07-28 04:20:37,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=109080.0, ans=0.125 +2024-07-28 04:20:45,163 INFO [train.py:1114] (2/4) Epoch 9, batch 50, loss[loss=0.1966, simple_loss=0.2811, pruned_loss=0.0561, over 4608.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2992, pruned_loss=0.06371, over 206342.27 frames. ], batch size: 11, lr: 8.61e-03, grad_scale: 32.0 +2024-07-28 04:20:47,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109106.66666666667, ans=0.1 +2024-07-28 04:20:49,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=109106.66666666667, ans=0.0 +2024-07-28 04:21:01,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=109133.33333333333, ans=0.025 +2024-07-28 04:21:04,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=109133.33333333333, ans=0.95 +2024-07-28 04:21:05,385 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.946e+01 5.804e+01 6.519e+01 7.318e+01 1.022e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 04:21:16,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=109160.0, ans=0.0 +2024-07-28 04:21:16,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=109160.0, ans=0.125 +2024-07-28 04:21:22,072 INFO [train.py:1114] (2/4) Epoch 9, batch 100, loss[loss=0.2021, simple_loss=0.2804, pruned_loss=0.06186, over 4645.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2988, pruned_loss=0.06383, over 365286.04 frames. ], batch size: 12, lr: 8.60e-03, grad_scale: 32.0 +2024-07-28 04:21:24,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=109173.33333333333, ans=0.025 +2024-07-28 04:21:35,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=109200.0, ans=0.125 +2024-07-28 04:21:36,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=109200.0, ans=10.0 +2024-07-28 04:21:42,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=109200.0, ans=0.0 +2024-07-28 04:21:45,867 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.15 vs. limit=15.0 +2024-07-28 04:21:55,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.44 vs. limit=6.0 +2024-07-28 04:21:59,533 INFO [train.py:1114] (2/4) Epoch 9, batch 150, loss[loss=0.1537, simple_loss=0.2442, pruned_loss=0.03163, over 4610.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2972, pruned_loss=0.06255, over 493980.84 frames. ], batch size: 11, lr: 8.60e-03, grad_scale: 32.0 +2024-07-28 04:22:03,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff2.min_abs, batch_count=109240.0, ans=0.1 +2024-07-28 04:22:17,897 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.795e+01 5.760e+01 6.227e+01 6.826e+01 1.008e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 04:22:24,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=109280.0, ans=0.125 +2024-07-28 04:22:32,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109306.66666666667, ans=0.1 +2024-07-28 04:22:32,692 INFO [train.py:1114] (2/4) Epoch 9, batch 200, loss[loss=0.2334, simple_loss=0.3208, pruned_loss=0.07296, over 4473.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2954, pruned_loss=0.06203, over 593685.25 frames. ], batch size: 21, lr: 8.60e-03, grad_scale: 32.0 +2024-07-28 04:22:34,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=109306.66666666667, ans=0.125 +2024-07-28 04:22:41,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=109320.0, ans=0.125 +2024-07-28 04:22:42,998 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.76 vs. limit=15.0 +2024-07-28 04:22:43,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=109320.0, ans=0.0 +2024-07-28 04:22:43,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=109320.0, ans=0.125 +2024-07-28 04:22:44,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=109320.0, ans=0.125 +2024-07-28 04:23:02,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=109360.0, ans=0.2 +2024-07-28 04:23:02,958 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.81 vs. limit=6.0 +2024-07-28 04:23:05,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109373.33333333333, ans=0.1 +2024-07-28 04:23:05,831 INFO [train.py:1114] (2/4) Epoch 9, batch 250, loss[loss=0.2556, simple_loss=0.3316, pruned_loss=0.08976, over 4609.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2938, pruned_loss=0.061, over 670812.77 frames. ], batch size: 16, lr: 8.60e-03, grad_scale: 32.0 +2024-07-28 04:23:06,210 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=10.48 vs. limit=12.0 +2024-07-28 04:23:11,056 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.86 vs. limit=6.0 +2024-07-28 04:23:16,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=109386.66666666667, ans=0.125 +2024-07-28 04:23:26,179 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.872e+01 6.084e+01 6.743e+01 8.358e+01 1.381e+02, threshold=1.349e+02, percent-clipped=2.0 +2024-07-28 04:23:26,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=109400.0, ans=0.09899494936611666 +2024-07-28 04:23:33,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=109413.33333333333, ans=0.125 +2024-07-28 04:23:33,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=109426.66666666667, ans=0.2 +2024-07-28 04:23:34,557 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.68 vs. limit=15.0 +2024-07-28 04:23:40,752 INFO [train.py:1114] (2/4) Epoch 9, batch 300, loss[loss=0.1894, simple_loss=0.2793, pruned_loss=0.04974, over 4800.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2934, pruned_loss=0.06125, over 730552.02 frames. ], batch size: 15, lr: 8.59e-03, grad_scale: 32.0 +2024-07-28 04:23:44,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=109440.0, ans=10.0 +2024-07-28 04:23:50,035 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.13 vs. limit=15.0 +2024-07-28 04:23:51,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=109453.33333333333, ans=0.125 +2024-07-28 04:23:52,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=109453.33333333333, ans=0.125 +2024-07-28 04:24:00,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=109480.0, ans=0.0 +2024-07-28 04:24:01,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109480.0, ans=0.1 +2024-07-28 04:24:02,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=109480.0, ans=0.025 +2024-07-28 04:24:02,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109480.0, ans=0.1 +2024-07-28 04:24:14,380 INFO [train.py:1114] (2/4) Epoch 9, batch 350, loss[loss=0.2533, simple_loss=0.3224, pruned_loss=0.09207, over 4940.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2939, pruned_loss=0.06168, over 776150.64 frames. ], batch size: 12, lr: 8.59e-03, grad_scale: 32.0 +2024-07-28 04:24:23,973 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.76 vs. limit=6.0 +2024-07-28 04:24:27,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=109533.33333333333, ans=0.125 +2024-07-28 04:24:32,535 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.584e+01 5.878e+01 6.356e+01 6.901e+01 1.235e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 04:24:40,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=109560.0, ans=0.0 +2024-07-28 04:24:41,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=109560.0, ans=0.0 +2024-07-28 04:24:47,188 INFO [train.py:1114] (2/4) Epoch 9, batch 400, loss[loss=0.2016, simple_loss=0.2942, pruned_loss=0.05455, over 4695.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2919, pruned_loss=0.06058, over 813497.50 frames. ], batch size: 13, lr: 8.59e-03, grad_scale: 32.0 +2024-07-28 04:24:47,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=109573.33333333333, ans=0.0 +2024-07-28 04:25:02,530 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.49 vs. limit=12.0 +2024-07-28 04:25:08,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=109613.33333333333, ans=0.2 +2024-07-28 04:25:19,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=109626.66666666667, ans=0.125 +2024-07-28 04:25:20,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=109640.0, ans=0.125 +2024-07-28 04:25:21,007 INFO [train.py:1114] (2/4) Epoch 9, batch 450, loss[loss=0.1736, simple_loss=0.2628, pruned_loss=0.04217, over 4630.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2919, pruned_loss=0.06055, over 839077.66 frames. ], batch size: 13, lr: 8.59e-03, grad_scale: 32.0 +2024-07-28 04:25:32,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=109653.33333333333, ans=0.125 +2024-07-28 04:25:33,102 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.74 vs. limit=5.0 +2024-07-28 04:25:33,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=109666.66666666667, ans=0.125 +2024-07-28 04:25:33,747 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.29 vs. limit=15.0 +2024-07-28 04:25:39,081 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+01 5.801e+01 6.257e+01 7.055e+01 9.311e+01, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 04:25:47,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=109693.33333333333, ans=0.125 +2024-07-28 04:25:48,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=109693.33333333333, ans=0.05 +2024-07-28 04:25:49,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109693.33333333333, ans=0.1 +2024-07-28 04:25:53,415 INFO [train.py:1114] (2/4) Epoch 9, batch 500, loss[loss=0.2755, simple_loss=0.367, pruned_loss=0.092, over 4689.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2925, pruned_loss=0.06111, over 861786.84 frames. ], batch size: 15, lr: 8.58e-03, grad_scale: 32.0 +2024-07-28 04:26:33,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=109733.33333333333, ans=0.04949747468305833 +2024-07-28 04:26:43,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=109746.66666666667, ans=0.0 +2024-07-28 04:26:49,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=109760.0, ans=0.0 +2024-07-28 04:26:51,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=109760.0, ans=0.125 +2024-07-28 04:26:52,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=109760.0, ans=0.2 +2024-07-28 04:26:55,067 INFO [train.py:1114] (2/4) Epoch 9, batch 550, loss[loss=0.2007, simple_loss=0.2924, pruned_loss=0.05448, over 4627.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2925, pruned_loss=0.06077, over 878131.99 frames. ], batch size: 17, lr: 8.58e-03, grad_scale: 32.0 +2024-07-28 04:26:58,201 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.21 vs. limit=15.0 +2024-07-28 04:27:00,420 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.43 vs. limit=15.0 +2024-07-28 04:27:02,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109786.66666666667, ans=0.1 +2024-07-28 04:27:15,816 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.786e+01 5.880e+01 6.464e+01 7.237e+01 1.061e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-28 04:27:16,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=109800.0, ans=0.2 +2024-07-28 04:27:19,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=109813.33333333333, ans=10.0 +2024-07-28 04:27:22,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=109813.33333333333, ans=0.0 +2024-07-28 04:27:47,012 INFO [train.py:1114] (2/4) Epoch 9, batch 600, loss[loss=0.2145, simple_loss=0.3056, pruned_loss=0.06174, over 4644.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2936, pruned_loss=0.06144, over 892528.93 frames. ], batch size: 16, lr: 8.58e-03, grad_scale: 32.0 +2024-07-28 04:27:52,593 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.32 vs. limit=15.0 +2024-07-28 04:27:53,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=109853.33333333333, ans=0.125 +2024-07-28 04:28:01,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=109866.66666666667, ans=0.125 +2024-07-28 04:28:05,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=109866.66666666667, ans=0.0 +2024-07-28 04:28:12,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=109880.0, ans=0.125 +2024-07-28 04:28:13,910 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.01 vs. limit=22.5 +2024-07-28 04:28:19,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109906.66666666667, ans=0.1 +2024-07-28 04:28:20,243 INFO [train.py:1114] (2/4) Epoch 9, batch 650, loss[loss=0.2382, simple_loss=0.3238, pruned_loss=0.07626, over 4755.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2934, pruned_loss=0.06138, over 903996.58 frames. ], batch size: 13, lr: 8.58e-03, grad_scale: 32.0 +2024-07-28 04:28:33,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109933.33333333333, ans=0.1 +2024-07-28 04:28:38,830 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.14 vs. limit=22.5 +2024-07-28 04:28:38,940 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.902e+01 5.739e+01 6.277e+01 6.982e+01 1.071e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 04:28:53,378 INFO [train.py:1114] (2/4) Epoch 9, batch 700, loss[loss=0.1801, simple_loss=0.2685, pruned_loss=0.04586, over 4640.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2935, pruned_loss=0.06146, over 911695.60 frames. ], batch size: 12, lr: 8.57e-03, grad_scale: 32.0 +2024-07-28 04:29:00,739 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:29:09,178 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.77 vs. limit=15.0 +2024-07-28 04:29:27,308 INFO [train.py:1114] (2/4) Epoch 9, batch 750, loss[loss=0.2044, simple_loss=0.2773, pruned_loss=0.06574, over 4697.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2921, pruned_loss=0.06071, over 918415.38 frames. ], batch size: 13, lr: 8.57e-03, grad_scale: 32.0 +2024-07-28 04:29:33,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.79 vs. limit=22.5 +2024-07-28 04:29:37,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=110053.33333333333, ans=0.05 +2024-07-28 04:29:41,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110053.33333333333, ans=0.1 +2024-07-28 04:29:48,612 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.674e+01 6.132e+01 7.146e+01 1.139e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 04:29:52,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=110080.0, ans=0.125 +2024-07-28 04:30:03,389 INFO [train.py:1114] (2/4) Epoch 9, batch 800, loss[loss=0.1811, simple_loss=0.261, pruned_loss=0.05057, over 4865.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2922, pruned_loss=0.06113, over 923306.49 frames. ], batch size: 12, lr: 8.57e-03, grad_scale: 32.0 +2024-07-28 04:30:14,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=110120.0, ans=0.1 +2024-07-28 04:30:14,913 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.79 vs. limit=15.0 +2024-07-28 04:30:32,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=110160.0, ans=0.125 +2024-07-28 04:30:37,058 INFO [train.py:1114] (2/4) Epoch 9, batch 850, loss[loss=0.2452, simple_loss=0.3388, pruned_loss=0.0758, over 4663.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.293, pruned_loss=0.06171, over 927479.44 frames. ], batch size: 14, lr: 8.57e-03, grad_scale: 32.0 +2024-07-28 04:30:41,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=110173.33333333333, ans=0.0 +2024-07-28 04:30:41,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=110173.33333333333, ans=0.125 +2024-07-28 04:30:46,721 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.03 vs. limit=15.0 +2024-07-28 04:30:47,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110186.66666666667, ans=0.1 +2024-07-28 04:30:49,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=110186.66666666667, ans=0.125 +2024-07-28 04:30:55,618 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.250e+01 5.670e+01 6.591e+01 7.214e+01 1.079e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-28 04:31:10,553 INFO [train.py:1114] (2/4) Epoch 9, batch 900, loss[loss=0.2338, simple_loss=0.3044, pruned_loss=0.08159, over 4839.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2942, pruned_loss=0.06245, over 928213.25 frames. ], batch size: 12, lr: 8.56e-03, grad_scale: 32.0 +2024-07-28 04:31:20,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=110253.33333333333, ans=0.125 +2024-07-28 04:31:44,082 INFO [train.py:1114] (2/4) Epoch 9, batch 950, loss[loss=0.1784, simple_loss=0.259, pruned_loss=0.04892, over 4772.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2933, pruned_loss=0.06221, over 929784.72 frames. ], batch size: 12, lr: 8.56e-03, grad_scale: 32.0 +2024-07-28 04:31:46,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=110306.66666666667, ans=0.1 +2024-07-28 04:31:46,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=110306.66666666667, ans=0.0 +2024-07-28 04:31:48,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=110306.66666666667, ans=0.2 +2024-07-28 04:31:50,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=110320.0, ans=10.0 +2024-07-28 04:31:53,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=110320.0, ans=0.125 +2024-07-28 04:31:57,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=110333.33333333333, ans=0.125 +2024-07-28 04:31:57,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=110333.33333333333, ans=0.025 +2024-07-28 04:32:00,206 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:32:02,682 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.659e+01 5.804e+01 6.637e+01 7.593e+01 9.914e+01, threshold=1.327e+02, percent-clipped=0.0 +2024-07-28 04:32:07,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=110346.66666666667, ans=0.125 +2024-07-28 04:32:17,301 INFO [train.py:1114] (2/4) Epoch 9, batch 1000, loss[loss=0.1854, simple_loss=0.2637, pruned_loss=0.05352, over 4968.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2929, pruned_loss=0.06176, over 929880.52 frames. ], batch size: 13, lr: 8.56e-03, grad_scale: 32.0 +2024-07-28 04:32:35,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=110400.0, ans=0.0 +2024-07-28 04:32:46,477 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=15.0 +2024-07-28 04:32:52,792 INFO [train.py:1114] (2/4) Epoch 9, batch 1050, loss[loss=0.2319, simple_loss=0.328, pruned_loss=0.06792, over 4871.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2923, pruned_loss=0.06168, over 932093.22 frames. ], batch size: 14, lr: 8.56e-03, grad_scale: 32.0 +2024-07-28 04:33:07,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110440.0, ans=0.1 +2024-07-28 04:33:08,402 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.14 vs. limit=22.5 +2024-07-28 04:33:19,464 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:33:22,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=110466.66666666667, ans=0.125 +2024-07-28 04:33:36,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=110466.66666666667, ans=0.125 +2024-07-28 04:33:37,207 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.983e+01 5.646e+01 6.301e+01 7.018e+01 9.967e+01, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 04:33:39,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=110480.0, ans=0.125 +2024-07-28 04:33:49,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=110480.0, ans=0.0 +2024-07-28 04:33:50,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=110493.33333333333, ans=0.0 +2024-07-28 04:33:58,446 INFO [train.py:1114] (2/4) Epoch 9, batch 1100, loss[loss=0.2334, simple_loss=0.3052, pruned_loss=0.08083, over 4906.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2923, pruned_loss=0.06182, over 934390.43 frames. ], batch size: 13, lr: 8.55e-03, grad_scale: 32.0 +2024-07-28 04:34:12,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=110533.33333333333, ans=0.0 +2024-07-28 04:34:22,803 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.14 vs. limit=6.0 +2024-07-28 04:34:23,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=110546.66666666667, ans=0.0 +2024-07-28 04:34:32,503 INFO [train.py:1114] (2/4) Epoch 9, batch 1150, loss[loss=0.1883, simple_loss=0.2814, pruned_loss=0.04764, over 4905.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2924, pruned_loss=0.06238, over 934349.70 frames. ], batch size: 13, lr: 8.55e-03, grad_scale: 32.0 +2024-07-28 04:34:38,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=110573.33333333333, ans=0.2 +2024-07-28 04:34:39,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=110586.66666666667, ans=0.0 +2024-07-28 04:34:43,185 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.34 vs. limit=15.0 +2024-07-28 04:34:49,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=110600.0, ans=0.125 +2024-07-28 04:34:51,455 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.662e+01 6.289e+01 6.921e+01 1.035e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 04:34:57,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=110613.33333333333, ans=0.0 +2024-07-28 04:35:07,032 INFO [train.py:1114] (2/4) Epoch 9, batch 1200, loss[loss=0.2273, simple_loss=0.3149, pruned_loss=0.06989, over 4874.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2934, pruned_loss=0.06234, over 933346.73 frames. ], batch size: 14, lr: 8.55e-03, grad_scale: 32.0 +2024-07-28 04:35:10,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=110640.0, ans=0.025 +2024-07-28 04:35:11,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=110640.0, ans=0.2 +2024-07-28 04:35:14,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=110653.33333333333, ans=0.125 +2024-07-28 04:35:16,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=110653.33333333333, ans=0.125 +2024-07-28 04:35:29,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=110680.0, ans=0.0 +2024-07-28 04:35:29,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=110680.0, ans=0.0 +2024-07-28 04:35:39,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=110693.33333333333, ans=0.125 +2024-07-28 04:35:42,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=110693.33333333333, ans=0.125 +2024-07-28 04:35:43,353 INFO [train.py:1114] (2/4) Epoch 9, batch 1250, loss[loss=0.2052, simple_loss=0.2907, pruned_loss=0.0599, over 4803.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.294, pruned_loss=0.06255, over 937216.00 frames. ], batch size: 15, lr: 8.55e-03, grad_scale: 32.0 +2024-07-28 04:35:46,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=110706.66666666667, ans=0.04949747468305833 +2024-07-28 04:35:49,566 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.52 vs. limit=6.0 +2024-07-28 04:36:00,927 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.92 vs. limit=15.0 +2024-07-28 04:36:07,230 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.896e+01 5.807e+01 6.256e+01 7.154e+01 1.109e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 04:36:08,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=110746.66666666667, ans=0.5 +2024-07-28 04:36:11,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110746.66666666667, ans=0.1 +2024-07-28 04:36:21,792 INFO [train.py:1114] (2/4) Epoch 9, batch 1300, loss[loss=0.2065, simple_loss=0.3009, pruned_loss=0.05604, over 4686.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2937, pruned_loss=0.06258, over 938802.75 frames. ], batch size: 19, lr: 8.54e-03, grad_scale: 32.0 +2024-07-28 04:36:31,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=110786.66666666667, ans=0.125 +2024-07-28 04:36:34,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=110800.0, ans=0.125 +2024-07-28 04:36:36,636 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.42 vs. limit=22.5 +2024-07-28 04:36:37,241 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.19 vs. limit=12.0 +2024-07-28 04:36:39,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=110800.0, ans=0.125 +2024-07-28 04:37:00,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=110813.33333333333, ans=0.02 +2024-07-28 04:37:23,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=110826.66666666667, ans=0.0 +2024-07-28 04:37:25,166 INFO [train.py:1114] (2/4) Epoch 9, batch 1350, loss[loss=0.2223, simple_loss=0.307, pruned_loss=0.06879, over 4754.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2928, pruned_loss=0.06188, over 940752.32 frames. ], batch size: 13, lr: 8.54e-03, grad_scale: 32.0 +2024-07-28 04:37:33,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=110853.33333333333, ans=0.025 +2024-07-28 04:37:36,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=110853.33333333333, ans=0.125 +2024-07-28 04:37:39,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=110866.66666666667, ans=0.125 +2024-07-28 04:37:43,870 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.586e+01 5.724e+01 6.443e+01 7.516e+01 1.167e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 04:37:51,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=110880.0, ans=0.125 +2024-07-28 04:37:55,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=110880.0, ans=0.2 +2024-07-28 04:38:02,870 INFO [train.py:1114] (2/4) Epoch 9, batch 1400, loss[loss=0.2196, simple_loss=0.289, pruned_loss=0.07505, over 4693.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2931, pruned_loss=0.06185, over 942496.35 frames. ], batch size: 11, lr: 8.54e-03, grad_scale: 32.0 +2024-07-28 04:38:21,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=110920.0, ans=0.0 +2024-07-28 04:38:24,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.48 vs. limit=15.0 +2024-07-28 04:38:29,352 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.08 vs. limit=15.0 +2024-07-28 04:38:44,515 INFO [train.py:1114] (2/4) Epoch 9, batch 1450, loss[loss=0.216, simple_loss=0.3108, pruned_loss=0.06063, over 4656.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2936, pruned_loss=0.06165, over 942287.64 frames. ], batch size: 15, lr: 8.53e-03, grad_scale: 32.0 +2024-07-28 04:38:46,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=110973.33333333333, ans=0.95 +2024-07-28 04:38:51,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=110986.66666666667, ans=0.125 +2024-07-28 04:38:53,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn1.whiten.whitening_limit, batch_count=110986.66666666667, ans=22.5 +2024-07-28 04:38:53,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=110986.66666666667, ans=0.0 +2024-07-28 04:39:03,126 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+01 5.882e+01 6.432e+01 7.495e+01 9.959e+01, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 04:39:13,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=111026.66666666667, ans=0.0 +2024-07-28 04:39:14,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=111026.66666666667, ans=0.04949747468305833 +2024-07-28 04:39:19,218 INFO [train.py:1114] (2/4) Epoch 9, batch 1500, loss[loss=0.2064, simple_loss=0.3064, pruned_loss=0.05323, over 4814.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2944, pruned_loss=0.06174, over 941581.85 frames. ], batch size: 14, lr: 8.53e-03, grad_scale: 32.0 +2024-07-28 04:39:22,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=111040.0, ans=0.0 +2024-07-28 04:39:28,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=111053.33333333333, ans=0.025 +2024-07-28 04:39:38,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=111066.66666666667, ans=0.025 +2024-07-28 04:39:56,265 INFO [train.py:1114] (2/4) Epoch 9, batch 1550, loss[loss=0.1764, simple_loss=0.2654, pruned_loss=0.0437, over 4908.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2939, pruned_loss=0.06173, over 938321.15 frames. ], batch size: 15, lr: 8.53e-03, grad_scale: 32.0 +2024-07-28 04:40:10,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=111133.33333333333, ans=0.0 +2024-07-28 04:40:12,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=111133.33333333333, ans=0.09899494936611666 +2024-07-28 04:40:14,625 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.785e+01 5.781e+01 6.614e+01 7.335e+01 1.076e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-28 04:40:16,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111146.66666666667, ans=0.1 +2024-07-28 04:40:29,183 INFO [train.py:1114] (2/4) Epoch 9, batch 1600, loss[loss=0.2011, simple_loss=0.2973, pruned_loss=0.05238, over 4871.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2936, pruned_loss=0.06209, over 937090.46 frames. ], batch size: 14, lr: 8.53e-03, grad_scale: 32.0 +2024-07-28 04:40:33,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=111173.33333333333, ans=0.125 +2024-07-28 04:40:34,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=111173.33333333333, ans=0.0 +2024-07-28 04:40:50,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=111213.33333333333, ans=0.125 +2024-07-28 04:41:03,283 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.67 vs. limit=15.0 +2024-07-28 04:41:03,437 INFO [train.py:1114] (2/4) Epoch 9, batch 1650, loss[loss=0.2277, simple_loss=0.3291, pruned_loss=0.0632, over 4657.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2936, pruned_loss=0.06218, over 937429.28 frames. ], batch size: 14, lr: 8.52e-03, grad_scale: 32.0 +2024-07-28 04:41:11,323 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.81 vs. limit=15.0 +2024-07-28 04:41:21,886 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.810e+01 6.591e+01 7.411e+01 1.241e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-28 04:41:29,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=111293.33333333333, ans=0.025 +2024-07-28 04:41:32,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=111293.33333333333, ans=0.125 +2024-07-28 04:41:38,618 INFO [train.py:1114] (2/4) Epoch 9, batch 1700, loss[loss=0.1871, simple_loss=0.2637, pruned_loss=0.05527, over 4704.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2927, pruned_loss=0.06107, over 938949.72 frames. ], batch size: 11, lr: 8.52e-03, grad_scale: 32.0 +2024-07-28 04:41:46,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=111320.0, ans=0.125 +2024-07-28 04:41:51,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=111320.0, ans=0.1 +2024-07-28 04:41:54,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=111333.33333333333, ans=0.5 +2024-07-28 04:41:57,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111333.33333333333, ans=0.125 +2024-07-28 04:42:00,421 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:42:12,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=111360.0, ans=0.125 +2024-07-28 04:42:13,666 INFO [train.py:1114] (2/4) Epoch 9, batch 1750, loss[loss=0.1538, simple_loss=0.2374, pruned_loss=0.03514, over 4789.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2921, pruned_loss=0.06072, over 940065.89 frames. ], batch size: 11, lr: 8.52e-03, grad_scale: 32.0 +2024-07-28 04:42:32,291 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.858e+01 5.540e+01 6.107e+01 6.918e+01 9.511e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 04:42:34,024 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=15.0 +2024-07-28 04:42:34,762 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.88 vs. limit=22.5 +2024-07-28 04:42:45,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=111426.66666666667, ans=0.125 +2024-07-28 04:42:45,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=111426.66666666667, ans=0.125 +2024-07-28 04:42:47,089 INFO [train.py:1114] (2/4) Epoch 9, batch 1800, loss[loss=0.2399, simple_loss=0.3251, pruned_loss=0.07739, over 4645.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2921, pruned_loss=0.06083, over 940630.78 frames. ], batch size: 13, lr: 8.52e-03, grad_scale: 32.0 +2024-07-28 04:42:55,496 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.45 vs. limit=15.0 +2024-07-28 04:43:00,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=111453.33333333333, ans=0.2 +2024-07-28 04:43:19,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=111493.33333333333, ans=0.0 +2024-07-28 04:43:22,189 INFO [train.py:1114] (2/4) Epoch 9, batch 1850, loss[loss=0.1957, simple_loss=0.2828, pruned_loss=0.05427, over 4814.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2918, pruned_loss=0.06067, over 940383.77 frames. ], batch size: 14, lr: 8.51e-03, grad_scale: 32.0 +2024-07-28 04:43:22,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=111506.66666666667, ans=0.125 +2024-07-28 04:43:34,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111520.0, ans=0.1 +2024-07-28 04:43:38,645 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.96 vs. limit=15.0 +2024-07-28 04:43:41,628 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.354e+01 5.789e+01 6.622e+01 8.000e+01 1.293e+02, threshold=1.324e+02, percent-clipped=1.0 +2024-07-28 04:43:43,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=111546.66666666667, ans=0.025 +2024-07-28 04:43:47,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=111546.66666666667, ans=0.0 +2024-07-28 04:43:48,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=111546.66666666667, ans=0.125 +2024-07-28 04:43:56,514 INFO [train.py:1114] (2/4) Epoch 9, batch 1900, loss[loss=0.1924, simple_loss=0.2842, pruned_loss=0.05032, over 4655.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2927, pruned_loss=0.06097, over 941564.53 frames. ], batch size: 14, lr: 8.51e-03, grad_scale: 64.0 +2024-07-28 04:44:08,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=111586.66666666667, ans=0.05 +2024-07-28 04:44:12,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=111600.0, ans=0.0 +2024-07-28 04:44:17,096 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.81 vs. limit=15.0 +2024-07-28 04:44:26,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=111626.66666666667, ans=0.0 +2024-07-28 04:44:27,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=111626.66666666667, ans=0.125 +2024-07-28 04:44:29,973 INFO [train.py:1114] (2/4) Epoch 9, batch 1950, loss[loss=0.2221, simple_loss=0.307, pruned_loss=0.0686, over 4887.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2943, pruned_loss=0.06178, over 943753.78 frames. ], batch size: 13, lr: 8.51e-03, grad_scale: 64.0 +2024-07-28 04:44:43,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=111653.33333333333, ans=0.0 +2024-07-28 04:44:48,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=111666.66666666667, ans=0.0 +2024-07-28 04:44:50,745 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.065e+01 5.906e+01 6.292e+01 6.984e+01 1.022e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 04:44:51,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=111680.0, ans=0.125 +2024-07-28 04:45:04,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=111693.33333333333, ans=0.125 +2024-07-28 04:45:04,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111693.33333333333, ans=0.125 +2024-07-28 04:45:05,295 INFO [train.py:1114] (2/4) Epoch 9, batch 2000, loss[loss=0.2012, simple_loss=0.2739, pruned_loss=0.06425, over 4818.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2951, pruned_loss=0.0624, over 940901.53 frames. ], batch size: 11, lr: 8.51e-03, grad_scale: 64.0 +2024-07-28 04:45:16,566 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:45:19,294 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.18 vs. limit=10.0 +2024-07-28 04:45:23,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=111733.33333333333, ans=0.0 +2024-07-28 04:45:24,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=111733.33333333333, ans=0.0 +2024-07-28 04:45:32,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=111746.66666666667, ans=0.0 +2024-07-28 04:45:42,824 INFO [train.py:1114] (2/4) Epoch 9, batch 2050, loss[loss=0.1629, simple_loss=0.2424, pruned_loss=0.04173, over 4615.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2937, pruned_loss=0.06236, over 938937.46 frames. ], batch size: 11, lr: 8.50e-03, grad_scale: 64.0 +2024-07-28 04:45:54,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=111786.66666666667, ans=0.05 +2024-07-28 04:45:55,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=111786.66666666667, ans=0.2 +2024-07-28 04:46:01,613 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.743e+01 6.420e+01 7.803e+01 1.541e+02, threshold=1.284e+02, percent-clipped=1.0 +2024-07-28 04:46:15,907 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.79 vs. limit=22.5 +2024-07-28 04:46:16,247 INFO [train.py:1114] (2/4) Epoch 9, batch 2100, loss[loss=0.1797, simple_loss=0.2766, pruned_loss=0.04146, over 4762.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2923, pruned_loss=0.06142, over 940831.50 frames. ], batch size: 13, lr: 8.50e-03, grad_scale: 64.0 +2024-07-28 04:46:21,011 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:46:32,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111866.66666666667, ans=0.1 +2024-07-28 04:46:39,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=111880.0, ans=0.125 +2024-07-28 04:46:42,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=111880.0, ans=0.125 +2024-07-28 04:46:51,245 INFO [train.py:1114] (2/4) Epoch 9, batch 2150, loss[loss=0.2409, simple_loss=0.3281, pruned_loss=0.07688, over 4890.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2911, pruned_loss=0.06073, over 943986.12 frames. ], batch size: 13, lr: 8.50e-03, grad_scale: 64.0 +2024-07-28 04:46:53,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111906.66666666667, ans=0.125 +2024-07-28 04:46:56,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=111906.66666666667, ans=0.1 +2024-07-28 04:46:57,159 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.97 vs. limit=12.0 +2024-07-28 04:47:01,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=111920.0, ans=0.0 +2024-07-28 04:47:01,740 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.81 vs. limit=6.0 +2024-07-28 04:47:10,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=111933.33333333333, ans=0.2 +2024-07-28 04:47:11,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=111933.33333333333, ans=0.025 +2024-07-28 04:47:12,514 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.843e+01 7.038e+01 8.009e+01 1.104e+02, threshold=1.408e+02, percent-clipped=0.0 +2024-07-28 04:47:14,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=111946.66666666667, ans=0.125 +2024-07-28 04:47:20,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=111960.0, ans=0.0 +2024-07-28 04:47:23,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=111960.0, ans=0.125 +2024-07-28 04:47:25,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=111960.0, ans=0.0 +2024-07-28 04:47:25,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111960.0, ans=0.1 +2024-07-28 04:47:26,787 INFO [train.py:1114] (2/4) Epoch 9, batch 2200, loss[loss=0.2243, simple_loss=0.2986, pruned_loss=0.07505, over 4815.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.29, pruned_loss=0.06049, over 943654.00 frames. ], batch size: 14, lr: 8.50e-03, grad_scale: 32.0 +2024-07-28 04:47:30,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=111973.33333333333, ans=0.0 +2024-07-28 04:47:36,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=111986.66666666667, ans=0.125 +2024-07-28 04:48:00,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=112026.66666666667, ans=0.125 +2024-07-28 04:48:01,874 INFO [train.py:1114] (2/4) Epoch 9, batch 2250, loss[loss=0.1728, simple_loss=0.2611, pruned_loss=0.04221, over 4690.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2903, pruned_loss=0.06069, over 942362.69 frames. ], batch size: 13, lr: 8.49e-03, grad_scale: 32.0 +2024-07-28 04:48:05,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=112040.0, ans=0.125 +2024-07-28 04:48:15,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=112066.66666666667, ans=0.125 +2024-07-28 04:48:19,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=112066.66666666667, ans=0.035 +2024-07-28 04:48:20,916 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.419e+01 6.028e+01 6.824e+01 8.191e+01 1.096e+02, threshold=1.365e+02, percent-clipped=0.0 +2024-07-28 04:48:34,840 INFO [train.py:1114] (2/4) Epoch 9, batch 2300, loss[loss=0.1972, simple_loss=0.2809, pruned_loss=0.05677, over 4940.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2894, pruned_loss=0.0606, over 939963.66 frames. ], batch size: 12, lr: 8.49e-03, grad_scale: 32.0 +2024-07-28 04:48:35,232 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.07 vs. limit=22.5 +2024-07-28 04:48:42,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=112120.0, ans=0.2 +2024-07-28 04:49:34,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=112146.66666666667, ans=0.125 +2024-07-28 04:49:52,136 INFO [train.py:1114] (2/4) Epoch 9, batch 2350, loss[loss=0.205, simple_loss=0.2926, pruned_loss=0.0587, over 4628.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2905, pruned_loss=0.06128, over 941693.10 frames. ], batch size: 13, lr: 8.49e-03, grad_scale: 32.0 +2024-07-28 04:49:52,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=112173.33333333333, ans=0.0 +2024-07-28 04:49:54,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=112173.33333333333, ans=0.125 +2024-07-28 04:49:55,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=112173.33333333333, ans=0.025 +2024-07-28 04:50:01,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=112186.66666666667, ans=0.0 +2024-07-28 04:50:11,755 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.897e+01 5.463e+01 6.108e+01 6.939e+01 1.035e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 04:50:15,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=112213.33333333333, ans=0.0 +2024-07-28 04:50:15,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=112213.33333333333, ans=0.1 +2024-07-28 04:50:18,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112226.66666666667, ans=0.1 +2024-07-28 04:50:19,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=112226.66666666667, ans=0.0 +2024-07-28 04:50:21,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=112226.66666666667, ans=0.0 +2024-07-28 04:50:25,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=112240.0, ans=0.0 +2024-07-28 04:50:25,455 INFO [train.py:1114] (2/4) Epoch 9, batch 2400, loss[loss=0.2079, simple_loss=0.2928, pruned_loss=0.06152, over 4638.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2899, pruned_loss=0.06054, over 941631.89 frames. ], batch size: 12, lr: 8.49e-03, grad_scale: 32.0 +2024-07-28 04:50:35,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=112240.0, ans=0.125 +2024-07-28 04:50:38,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=112253.33333333333, ans=0.125 +2024-07-28 04:50:44,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=112266.66666666667, ans=0.125 +2024-07-28 04:50:58,081 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:50:58,968 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.67 vs. limit=15.0 +2024-07-28 04:51:05,885 INFO [train.py:1114] (2/4) Epoch 9, batch 2450, loss[loss=0.2182, simple_loss=0.2984, pruned_loss=0.06905, over 4694.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2914, pruned_loss=0.06105, over 937845.87 frames. ], batch size: 13, lr: 8.48e-03, grad_scale: 32.0 +2024-07-28 04:51:09,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=112306.66666666667, ans=0.125 +2024-07-28 04:51:12,310 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-28 04:51:16,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112320.0, ans=0.1 +2024-07-28 04:51:25,462 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.551e+01 5.862e+01 6.434e+01 7.688e+01 1.164e+02, threshold=1.287e+02, percent-clipped=0.0 +2024-07-28 04:51:27,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=112346.66666666667, ans=0.0 +2024-07-28 04:51:43,153 INFO [train.py:1114] (2/4) Epoch 9, batch 2500, loss[loss=0.2049, simple_loss=0.2944, pruned_loss=0.05766, over 4795.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2915, pruned_loss=0.06064, over 939319.91 frames. ], batch size: 14, lr: 8.48e-03, grad_scale: 32.0 +2024-07-28 04:51:46,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=112373.33333333333, ans=0.125 +2024-07-28 04:52:00,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=112400.0, ans=0.125 +2024-07-28 04:52:05,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=112413.33333333333, ans=0.025 +2024-07-28 04:52:05,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=112413.33333333333, ans=0.0 +2024-07-28 04:52:16,730 INFO [train.py:1114] (2/4) Epoch 9, batch 2550, loss[loss=0.1791, simple_loss=0.2692, pruned_loss=0.04447, over 4801.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2915, pruned_loss=0.06011, over 938990.13 frames. ], batch size: 11, lr: 8.48e-03, grad_scale: 32.0 +2024-07-28 04:52:24,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112453.33333333333, ans=0.1 +2024-07-28 04:52:30,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=112466.66666666667, ans=10.0 +2024-07-28 04:52:31,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=112466.66666666667, ans=0.125 +2024-07-28 04:52:35,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=112466.66666666667, ans=0.125 +2024-07-28 04:52:36,358 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.845e+01 6.430e+01 7.273e+01 1.102e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 04:52:46,126 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-07-28 04:52:46,199 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.22 vs. limit=15.0 +2024-07-28 04:52:48,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112493.33333333333, ans=0.1 +2024-07-28 04:52:50,639 INFO [train.py:1114] (2/4) Epoch 9, batch 2600, loss[loss=0.1806, simple_loss=0.2762, pruned_loss=0.04251, over 4902.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2921, pruned_loss=0.06029, over 938309.95 frames. ], batch size: 13, lr: 8.48e-03, grad_scale: 32.0 +2024-07-28 04:52:56,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=112520.0, ans=0.125 +2024-07-28 04:52:59,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=112520.0, ans=0.0 +2024-07-28 04:53:04,840 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.71 vs. limit=15.0 +2024-07-28 04:53:10,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=112546.66666666667, ans=0.125 +2024-07-28 04:53:23,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=112560.0, ans=0.125 +2024-07-28 04:53:25,567 INFO [train.py:1114] (2/4) Epoch 9, batch 2650, loss[loss=0.2551, simple_loss=0.3401, pruned_loss=0.08509, over 4607.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2922, pruned_loss=0.06054, over 940375.31 frames. ], batch size: 16, lr: 8.47e-03, grad_scale: 32.0 +2024-07-28 04:53:30,530 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.62 vs. limit=6.0 +2024-07-28 04:53:40,274 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.31 vs. limit=15.0 +2024-07-28 04:53:44,510 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.626e+01 6.228e+01 7.272e+01 1.238e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 04:53:44,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=112613.33333333333, ans=0.0 +2024-07-28 04:53:49,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=112613.33333333333, ans=0.0 +2024-07-28 04:53:55,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=112626.66666666667, ans=0.05 +2024-07-28 04:53:58,568 INFO [train.py:1114] (2/4) Epoch 9, batch 2700, loss[loss=0.2012, simple_loss=0.3081, pruned_loss=0.04712, over 4743.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2925, pruned_loss=0.06026, over 940114.74 frames. ], batch size: 14, lr: 8.47e-03, grad_scale: 32.0 +2024-07-28 04:54:02,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=112640.0, ans=0.07 +2024-07-28 04:54:06,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=112653.33333333333, ans=0.5 +2024-07-28 04:54:19,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=112680.0, ans=0.125 +2024-07-28 04:54:24,345 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.58 vs. limit=8.0 +2024-07-28 04:54:28,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112693.33333333333, ans=0.1 +2024-07-28 04:54:29,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=112693.33333333333, ans=0.0 +2024-07-28 04:54:32,643 INFO [train.py:1114] (2/4) Epoch 9, batch 2750, loss[loss=0.1909, simple_loss=0.2752, pruned_loss=0.05328, over 4712.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2914, pruned_loss=0.06041, over 939798.62 frames. ], batch size: 12, lr: 8.47e-03, grad_scale: 32.0 +2024-07-28 04:54:43,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=112720.0, ans=0.1 +2024-07-28 04:54:45,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=112733.33333333333, ans=0.125 +2024-07-28 04:54:51,674 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.642e+01 6.173e+01 6.885e+01 7.984e+01 1.102e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-28 04:55:01,423 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.38 vs. limit=10.0 +2024-07-28 04:55:05,490 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.40 vs. limit=15.0 +2024-07-28 04:55:05,595 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.45 vs. limit=15.0 +2024-07-28 04:55:05,842 INFO [train.py:1114] (2/4) Epoch 9, batch 2800, loss[loss=0.3181, simple_loss=0.3666, pruned_loss=0.1348, over 3440.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2926, pruned_loss=0.06117, over 937580.41 frames. ], batch size: 36, lr: 8.47e-03, grad_scale: 32.0 +2024-07-28 04:55:09,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=112773.33333333333, ans=0.2 +2024-07-28 04:55:12,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=112786.66666666667, ans=0.1 +2024-07-28 04:55:27,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=112813.33333333333, ans=0.0 +2024-07-28 04:55:28,018 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.22 vs. limit=22.5 +2024-07-28 04:55:29,250 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.16 vs. limit=15.0 +2024-07-28 04:55:39,182 INFO [train.py:1114] (2/4) Epoch 9, batch 2850, loss[loss=0.2036, simple_loss=0.289, pruned_loss=0.05909, over 4963.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2925, pruned_loss=0.06124, over 935953.45 frames. ], batch size: 13, lr: 8.46e-03, grad_scale: 32.0 +2024-07-28 04:55:42,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=112840.0, ans=0.125 +2024-07-28 04:55:46,069 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.50 vs. limit=22.5 +2024-07-28 04:55:51,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=112853.33333333333, ans=0.125 +2024-07-28 04:55:53,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=112866.66666666667, ans=0.125 +2024-07-28 04:55:58,048 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.36 vs. limit=15.0 +2024-07-28 04:55:58,249 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.781e+01 6.339e+01 7.378e+01 1.144e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 04:55:59,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=112880.0, ans=0.025 +2024-07-28 04:56:00,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=112880.0, ans=0.2 +2024-07-28 04:56:05,637 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:56:12,178 INFO [train.py:1114] (2/4) Epoch 9, batch 2900, loss[loss=0.1801, simple_loss=0.262, pruned_loss=0.0491, over 4824.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2933, pruned_loss=0.06079, over 939900.64 frames. ], batch size: 13, lr: 8.46e-03, grad_scale: 32.0 +2024-07-28 04:56:12,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=112906.66666666667, ans=0.05 +2024-07-28 04:56:26,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=112920.0, ans=0.125 +2024-07-28 04:56:29,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=112933.33333333333, ans=0.0 +2024-07-28 04:56:31,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=112933.33333333333, ans=0.0 +2024-07-28 04:56:34,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=112946.66666666667, ans=0.0 +2024-07-28 04:56:34,654 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.44 vs. limit=15.0 +2024-07-28 04:56:47,479 INFO [train.py:1114] (2/4) Epoch 9, batch 2950, loss[loss=0.2138, simple_loss=0.2997, pruned_loss=0.06392, over 4712.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.292, pruned_loss=0.06079, over 938792.86 frames. ], batch size: 12, lr: 8.46e-03, grad_scale: 32.0 +2024-07-28 04:56:54,278 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.22 vs. limit=15.0 +2024-07-28 04:57:08,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=113000.0, ans=0.2 +2024-07-28 04:57:09,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.whiten.whitening_limit, batch_count=113000.0, ans=12.0 +2024-07-28 04:57:10,758 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.416e+01 5.914e+01 6.880e+01 7.946e+01 1.236e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-28 04:57:16,027 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.57 vs. limit=15.0 +2024-07-28 04:57:16,280 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:57:19,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=113026.66666666667, ans=0.125 +2024-07-28 04:57:19,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=113026.66666666667, ans=0.0 +2024-07-28 04:57:19,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=113026.66666666667, ans=0.05 +2024-07-28 04:57:24,717 INFO [train.py:1114] (2/4) Epoch 9, batch 3000, loss[loss=0.2156, simple_loss=0.2968, pruned_loss=0.06717, over 4762.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2909, pruned_loss=0.06033, over 937741.58 frames. ], batch size: 13, lr: 8.46e-03, grad_scale: 32.0 +2024-07-28 04:57:24,717 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 04:57:37,296 INFO [train.py:1146] (2/4) Epoch 9, validation: loss=0.1766, simple_loss=0.2807, pruned_loss=0.03626, over 944034.00 frames. +2024-07-28 04:57:37,296 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 04:57:38,898 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:57:45,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=113053.33333333333, ans=0.125 +2024-07-28 04:57:51,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=113066.66666666667, ans=0.5 +2024-07-28 04:57:51,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=113066.66666666667, ans=0.025 +2024-07-28 04:57:53,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=113066.66666666667, ans=0.0 +2024-07-28 04:58:01,401 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.15 vs. limit=15.0 +2024-07-28 04:58:06,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=113093.33333333333, ans=0.0 +2024-07-28 04:58:11,132 INFO [train.py:1114] (2/4) Epoch 9, batch 3050, loss[loss=0.2117, simple_loss=0.2974, pruned_loss=0.06295, over 4634.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2909, pruned_loss=0.0602, over 936690.74 frames. ], batch size: 12, lr: 8.45e-03, grad_scale: 32.0 +2024-07-28 04:58:15,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.61 vs. limit=15.0 +2024-07-28 04:58:21,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=113120.0, ans=0.125 +2024-07-28 04:58:23,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113133.33333333333, ans=0.1 +2024-07-28 04:58:32,587 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.569e+01 5.528e+01 6.161e+01 6.934e+01 1.105e+02, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 04:58:40,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=113160.0, ans=0.0 +2024-07-28 04:58:44,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=113160.0, ans=0.0 +2024-07-28 04:58:47,568 INFO [train.py:1114] (2/4) Epoch 9, batch 3100, loss[loss=0.2583, simple_loss=0.3277, pruned_loss=0.0944, over 4635.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2919, pruned_loss=0.06075, over 937381.52 frames. ], batch size: 16, lr: 8.45e-03, grad_scale: 32.0 +2024-07-28 04:58:48,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=113173.33333333333, ans=0.2 +2024-07-28 04:58:49,383 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.33 vs. limit=8.0 +2024-07-28 04:58:50,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=113173.33333333333, ans=0.125 +2024-07-28 04:58:55,945 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.31 vs. limit=15.0 +2024-07-28 04:59:04,751 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.56 vs. limit=15.0 +2024-07-28 04:59:06,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=113200.0, ans=0.2 +2024-07-28 04:59:22,222 INFO [train.py:1114] (2/4) Epoch 9, batch 3150, loss[loss=0.1927, simple_loss=0.2912, pruned_loss=0.0471, over 4594.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2916, pruned_loss=0.0602, over 937795.88 frames. ], batch size: 17, lr: 8.45e-03, grad_scale: 32.0 +2024-07-28 04:59:41,417 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.794e+01 6.244e+01 6.965e+01 1.084e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-28 04:59:41,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=113280.0, ans=0.125 +2024-07-28 04:59:50,625 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.71 vs. limit=15.0 +2024-07-28 04:59:51,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=113293.33333333333, ans=0.0 +2024-07-28 04:59:54,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=113293.33333333333, ans=0.0 +2024-07-28 04:59:55,508 INFO [train.py:1114] (2/4) Epoch 9, batch 3200, loss[loss=0.1698, simple_loss=0.2569, pruned_loss=0.04137, over 4826.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2912, pruned_loss=0.05997, over 939224.87 frames. ], batch size: 13, lr: 8.45e-03, grad_scale: 32.0 +2024-07-28 04:59:58,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=113306.66666666667, ans=0.125 +2024-07-28 05:00:09,497 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.08 vs. limit=15.0 +2024-07-28 05:00:21,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=113346.66666666667, ans=0.125 +2024-07-28 05:00:25,256 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.02 vs. limit=12.0 +2024-07-28 05:00:32,257 INFO [train.py:1114] (2/4) Epoch 9, batch 3250, loss[loss=0.2589, simple_loss=0.3331, pruned_loss=0.0923, over 4936.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2916, pruned_loss=0.06049, over 940306.25 frames. ], batch size: 14, lr: 8.44e-03, grad_scale: 32.0 +2024-07-28 05:00:51,420 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.809e+01 6.527e+01 7.156e+01 1.090e+02, threshold=1.305e+02, percent-clipped=0.0 +2024-07-28 05:00:54,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=113413.33333333333, ans=0.0 +2024-07-28 05:00:59,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=113426.66666666667, ans=0.125 +2024-07-28 05:01:05,602 INFO [train.py:1114] (2/4) Epoch 9, batch 3300, loss[loss=0.2499, simple_loss=0.3315, pruned_loss=0.0841, over 4764.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2902, pruned_loss=0.0606, over 940703.04 frames. ], batch size: 19, lr: 8.44e-03, grad_scale: 32.0 +2024-07-28 05:01:07,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=113440.0, ans=0.0 +2024-07-28 05:01:07,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=113440.0, ans=0.0 +2024-07-28 05:01:09,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=113440.0, ans=0.025 +2024-07-28 05:01:13,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=113453.33333333333, ans=0.125 +2024-07-28 05:01:22,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=113466.66666666667, ans=0.125 +2024-07-28 05:01:25,112 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.81 vs. limit=15.0 +2024-07-28 05:01:40,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=113506.66666666667, ans=0.125 +2024-07-28 05:01:40,567 INFO [train.py:1114] (2/4) Epoch 9, batch 3350, loss[loss=0.2463, simple_loss=0.3263, pruned_loss=0.08314, over 4613.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.292, pruned_loss=0.0612, over 938353.48 frames. ], batch size: 17, lr: 8.44e-03, grad_scale: 32.0 +2024-07-28 05:01:47,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=113520.0, ans=0.0 +2024-07-28 05:02:00,181 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.890e+01 5.762e+01 6.208e+01 6.963e+01 1.151e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 05:02:08,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=113546.66666666667, ans=0.125 +2024-07-28 05:02:22,829 INFO [train.py:1114] (2/4) Epoch 9, batch 3400, loss[loss=0.1608, simple_loss=0.2418, pruned_loss=0.03988, over 4820.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2917, pruned_loss=0.06095, over 937101.23 frames. ], batch size: 11, lr: 8.44e-03, grad_scale: 32.0 +2024-07-28 05:02:39,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=113600.0, ans=0.2 +2024-07-28 05:02:40,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=113600.0, ans=0.125 +2024-07-28 05:02:40,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=113600.0, ans=0.125 +2024-07-28 05:02:47,427 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=15.0 +2024-07-28 05:02:50,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=113626.66666666667, ans=0.125 +2024-07-28 05:02:56,780 INFO [train.py:1114] (2/4) Epoch 9, batch 3450, loss[loss=0.234, simple_loss=0.3086, pruned_loss=0.07967, over 4723.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2919, pruned_loss=0.06069, over 937556.13 frames. ], batch size: 19, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:02:57,042 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.00 vs. limit=22.5 +2024-07-28 05:02:57,057 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.71 vs. limit=15.0 +2024-07-28 05:03:00,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=113640.0, ans=0.125 +2024-07-28 05:03:16,034 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.764e+01 6.050e+01 6.762e+01 7.636e+01 1.132e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-28 05:03:18,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=113680.0, ans=0.0 +2024-07-28 05:03:24,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=113693.33333333333, ans=0.0 +2024-07-28 05:03:29,820 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.38 vs. limit=6.0 +2024-07-28 05:03:29,935 INFO [train.py:1114] (2/4) Epoch 9, batch 3500, loss[loss=0.1787, simple_loss=0.2521, pruned_loss=0.05267, over 4938.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2904, pruned_loss=0.06035, over 938195.25 frames. ], batch size: 12, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:03:32,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=113706.66666666667, ans=0.125 +2024-07-28 05:03:36,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=113706.66666666667, ans=0.0 +2024-07-28 05:03:37,049 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:03:40,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=113720.0, ans=0.125 +2024-07-28 05:03:40,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=113720.0, ans=0.125 +2024-07-28 05:03:41,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=113720.0, ans=0.2 +2024-07-28 05:03:45,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=113733.33333333333, ans=0.125 +2024-07-28 05:03:49,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=113733.33333333333, ans=0.2 +2024-07-28 05:04:06,621 INFO [train.py:1114] (2/4) Epoch 9, batch 3550, loss[loss=0.2661, simple_loss=0.3471, pruned_loss=0.09258, over 4654.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2916, pruned_loss=0.06125, over 939423.20 frames. ], batch size: 14, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:04:09,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113773.33333333333, ans=0.1 +2024-07-28 05:04:22,646 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=15.0 +2024-07-28 05:04:26,149 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.826e+01 5.609e+01 6.345e+01 7.145e+01 1.049e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 05:04:28,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=113813.33333333333, ans=0.125 +2024-07-28 05:04:28,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=113813.33333333333, ans=0.125 +2024-07-28 05:04:29,277 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.57 vs. limit=22.5 +2024-07-28 05:04:33,805 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.07 vs. limit=15.0 +2024-07-28 05:04:37,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=113826.66666666667, ans=0.0 +2024-07-28 05:04:38,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=113826.66666666667, ans=0.0 +2024-07-28 05:04:40,149 INFO [train.py:1114] (2/4) Epoch 9, batch 3600, loss[loss=0.1931, simple_loss=0.2857, pruned_loss=0.05022, over 4965.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2922, pruned_loss=0.06134, over 940895.76 frames. ], batch size: 13, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:04:40,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=113840.0, ans=0.05 +2024-07-28 05:04:43,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113840.0, ans=0.1 +2024-07-28 05:04:47,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=113853.33333333333, ans=0.125 +2024-07-28 05:05:06,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.37 vs. limit=15.0 +2024-07-28 05:05:07,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=113893.33333333333, ans=0.0 +2024-07-28 05:05:13,701 INFO [train.py:1114] (2/4) Epoch 9, batch 3650, loss[loss=0.2274, simple_loss=0.3076, pruned_loss=0.07361, over 4905.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2917, pruned_loss=0.06103, over 941250.95 frames. ], batch size: 15, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:05:18,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=113906.66666666667, ans=0.125 +2024-07-28 05:05:19,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=113906.66666666667, ans=0.125 +2024-07-28 05:05:23,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=113920.0, ans=0.125 +2024-07-28 05:05:29,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=113933.33333333333, ans=0.125 +2024-07-28 05:05:32,931 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 5.862e+01 6.678e+01 8.090e+01 1.321e+02, threshold=1.336e+02, percent-clipped=1.0 +2024-07-28 05:05:47,169 INFO [train.py:1114] (2/4) Epoch 9, batch 3700, loss[loss=0.21, simple_loss=0.3061, pruned_loss=0.05697, over 4928.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2916, pruned_loss=0.06088, over 942319.12 frames. ], batch size: 14, lr: 8.42e-03, grad_scale: 32.0 +2024-07-28 05:06:16,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114013.33333333333, ans=0.1 +2024-07-28 05:06:20,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=114026.66666666667, ans=0.125 +2024-07-28 05:06:24,416 INFO [train.py:1114] (2/4) Epoch 9, batch 3750, loss[loss=0.1877, simple_loss=0.2566, pruned_loss=0.05939, over 4804.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2919, pruned_loss=0.06113, over 943764.04 frames. ], batch size: 11, lr: 8.42e-03, grad_scale: 32.0 +2024-07-28 05:06:26,017 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:06:34,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=114053.33333333333, ans=0.2 +2024-07-28 05:06:39,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=114066.66666666667, ans=0.0 +2024-07-28 05:06:44,318 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.905e+01 5.778e+01 6.645e+01 7.408e+01 1.039e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-28 05:06:44,726 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.97 vs. limit=10.0 +2024-07-28 05:06:45,440 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.92 vs. limit=10.0 +2024-07-28 05:06:45,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=114080.0, ans=0.125 +2024-07-28 05:06:49,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=114080.0, ans=0.2 +2024-07-28 05:06:55,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114093.33333333333, ans=0.1 +2024-07-28 05:06:58,228 INFO [train.py:1114] (2/4) Epoch 9, batch 3800, loss[loss=0.2347, simple_loss=0.325, pruned_loss=0.07226, over 4812.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2912, pruned_loss=0.06119, over 942127.12 frames. ], batch size: 14, lr: 8.42e-03, grad_scale: 32.0 +2024-07-28 05:07:04,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=114120.0, ans=0.125 +2024-07-28 05:07:08,314 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.72 vs. limit=6.0 +2024-07-28 05:07:10,938 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.15 vs. limit=6.0 +2024-07-28 05:07:12,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=114120.0, ans=0.125 +2024-07-28 05:07:12,658 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:07:29,309 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.16 vs. limit=15.0 +2024-07-28 05:07:29,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=114160.0, ans=0.0 +2024-07-28 05:07:32,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=114160.0, ans=0.125 +2024-07-28 05:07:33,842 INFO [train.py:1114] (2/4) Epoch 9, batch 3850, loss[loss=0.1957, simple_loss=0.2851, pruned_loss=0.05312, over 4657.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2905, pruned_loss=0.06038, over 942635.31 frames. ], batch size: 16, lr: 8.42e-03, grad_scale: 32.0 +2024-07-28 05:07:37,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=114173.33333333333, ans=0.07 +2024-07-28 05:07:41,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=114173.33333333333, ans=0.125 +2024-07-28 05:07:43,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=114186.66666666667, ans=0.07 +2024-07-28 05:07:43,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=114186.66666666667, ans=0.025 +2024-07-28 05:07:45,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=114186.66666666667, ans=0.0 +2024-07-28 05:07:46,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=114186.66666666667, ans=0.0 +2024-07-28 05:07:49,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=114200.0, ans=0.0 +2024-07-28 05:07:50,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=114200.0, ans=0.125 +2024-07-28 05:07:53,765 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.95 vs. limit=22.5 +2024-07-28 05:07:56,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=114200.0, ans=0.125 +2024-07-28 05:07:57,332 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 5.654e+01 6.534e+01 7.463e+01 1.189e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 05:07:59,811 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.19 vs. limit=12.0 +2024-07-28 05:08:02,461 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.87 vs. limit=15.0 +2024-07-28 05:08:06,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114226.66666666667, ans=0.1 +2024-07-28 05:08:08,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=114226.66666666667, ans=0.0 +2024-07-28 05:08:11,617 INFO [train.py:1114] (2/4) Epoch 9, batch 3900, loss[loss=0.2362, simple_loss=0.3326, pruned_loss=0.06988, over 4817.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2917, pruned_loss=0.06063, over 942771.19 frames. ], batch size: 14, lr: 8.41e-03, grad_scale: 32.0 +2024-07-28 05:08:17,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=114240.0, ans=0.0 +2024-07-28 05:08:19,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=114253.33333333333, ans=0.025 +2024-07-28 05:08:26,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=114266.66666666667, ans=0.125 +2024-07-28 05:08:27,792 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.26 vs. limit=22.5 +2024-07-28 05:08:28,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=114266.66666666667, ans=0.0 +2024-07-28 05:08:33,236 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.01 vs. limit=10.0 +2024-07-28 05:08:34,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114280.0, ans=0.1 +2024-07-28 05:08:39,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=114293.33333333333, ans=0.1 +2024-07-28 05:08:40,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=114293.33333333333, ans=0.125 +2024-07-28 05:08:45,015 INFO [train.py:1114] (2/4) Epoch 9, batch 3950, loss[loss=0.1992, simple_loss=0.2969, pruned_loss=0.05072, over 4835.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2917, pruned_loss=0.06099, over 944796.82 frames. ], batch size: 16, lr: 8.41e-03, grad_scale: 32.0 +2024-07-28 05:09:04,058 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.380e+01 5.786e+01 6.190e+01 6.950e+01 9.125e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 05:09:11,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=114360.0, ans=0.2 +2024-07-28 05:09:15,046 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.90 vs. limit=15.0 +2024-07-28 05:09:16,391 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.34 vs. limit=15.0 +2024-07-28 05:09:18,006 INFO [train.py:1114] (2/4) Epoch 9, batch 4000, loss[loss=0.1754, simple_loss=0.2711, pruned_loss=0.03985, over 4785.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2923, pruned_loss=0.06092, over 940740.16 frames. ], batch size: 12, lr: 8.41e-03, grad_scale: 32.0 +2024-07-28 05:09:21,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=114373.33333333333, ans=0.2 +2024-07-28 05:09:27,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=114386.66666666667, ans=0.125 +2024-07-28 05:09:31,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=114400.0, ans=0.125 +2024-07-28 05:09:32,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114400.0, ans=0.1 +2024-07-28 05:09:34,162 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.46 vs. limit=22.5 +2024-07-28 05:09:45,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=114426.66666666667, ans=0.125 +2024-07-28 05:09:53,453 INFO [train.py:1114] (2/4) Epoch 9, batch 4050, loss[loss=0.2615, simple_loss=0.3213, pruned_loss=0.1009, over 3388.00 frames. ], tot_loss[loss=0.207, simple_loss=0.292, pruned_loss=0.06101, over 939007.86 frames. ], batch size: 36, lr: 8.41e-03, grad_scale: 32.0 +2024-07-28 05:09:54,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=114440.0, ans=0.2 +2024-07-28 05:10:01,888 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.19 vs. limit=22.5 +2024-07-28 05:10:03,168 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.69 vs. limit=15.0 +2024-07-28 05:10:07,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=114466.66666666667, ans=0.125 +2024-07-28 05:10:08,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=114466.66666666667, ans=0.2 +2024-07-28 05:10:12,639 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.800e+01 6.025e+01 6.921e+01 7.969e+01 1.217e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-28 05:10:23,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=114493.33333333333, ans=0.2 +2024-07-28 05:10:26,750 INFO [train.py:1114] (2/4) Epoch 9, batch 4100, loss[loss=0.2318, simple_loss=0.3202, pruned_loss=0.07168, over 4923.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2918, pruned_loss=0.06115, over 938150.10 frames. ], batch size: 15, lr: 8.40e-03, grad_scale: 32.0 +2024-07-28 05:10:34,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=114520.0, ans=0.125 +2024-07-28 05:10:42,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=114533.33333333333, ans=0.2 +2024-07-28 05:10:42,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=114533.33333333333, ans=0.125 +2024-07-28 05:10:59,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=114560.0, ans=0.1 +2024-07-28 05:10:59,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=114560.0, ans=0.125 +2024-07-28 05:10:59,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.71 vs. limit=22.5 +2024-07-28 05:11:01,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=114560.0, ans=0.125 +2024-07-28 05:11:02,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=114560.0, ans=0.0 +2024-07-28 05:11:10,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=114560.0, ans=0.125 +2024-07-28 05:11:11,946 INFO [train.py:1114] (2/4) Epoch 9, batch 4150, loss[loss=0.1946, simple_loss=0.2833, pruned_loss=0.05297, over 4825.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2912, pruned_loss=0.06094, over 937752.92 frames. ], batch size: 13, lr: 8.40e-03, grad_scale: 32.0 +2024-07-28 05:11:31,357 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.579e+01 5.561e+01 6.118e+01 6.990e+01 1.145e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 05:11:43,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=114626.66666666667, ans=0.2 +2024-07-28 05:12:05,253 INFO [train.py:1114] (2/4) Epoch 9, batch 4200, loss[loss=0.2127, simple_loss=0.3066, pruned_loss=0.05938, over 4906.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2919, pruned_loss=0.06119, over 939743.55 frames. ], batch size: 15, lr: 8.40e-03, grad_scale: 64.0 +2024-07-28 05:12:29,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=114640.0, ans=0.1 +2024-07-28 05:12:34,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=114653.33333333333, ans=0.0 +2024-07-28 05:12:35,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=114653.33333333333, ans=0.125 +2024-07-28 05:12:36,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=114653.33333333333, ans=0.125 +2024-07-28 05:12:47,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=114680.0, ans=0.125 +2024-07-28 05:12:58,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=114706.66666666667, ans=0.125 +2024-07-28 05:12:58,968 INFO [train.py:1114] (2/4) Epoch 9, batch 4250, loss[loss=0.1814, simple_loss=0.2726, pruned_loss=0.04515, over 4637.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2917, pruned_loss=0.06101, over 941573.76 frames. ], batch size: 12, lr: 8.40e-03, grad_scale: 32.0 +2024-07-28 05:12:59,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=114706.66666666667, ans=15.0 +2024-07-28 05:13:08,810 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.98 vs. limit=15.0 +2024-07-28 05:13:47,032 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.40 vs. limit=12.0 +2024-07-28 05:13:48,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=114746.66666666667, ans=0.025 +2024-07-28 05:13:49,192 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.539e+01 5.532e+01 6.240e+01 7.121e+01 1.493e+02, threshold=1.248e+02, percent-clipped=1.0 +2024-07-28 05:13:57,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=114760.0, ans=0.125 +2024-07-28 05:14:04,261 INFO [train.py:1114] (2/4) Epoch 9, batch 4300, loss[loss=0.2013, simple_loss=0.296, pruned_loss=0.05332, over 4766.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2911, pruned_loss=0.06064, over 940938.12 frames. ], batch size: 13, lr: 8.39e-03, grad_scale: 32.0 +2024-07-28 05:14:17,179 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.36 vs. limit=12.0 +2024-07-28 05:14:19,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=114800.0, ans=0.125 +2024-07-28 05:14:39,641 INFO [train.py:1114] (2/4) Epoch 9, batch 4350, loss[loss=0.2185, simple_loss=0.3049, pruned_loss=0.06602, over 4757.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2912, pruned_loss=0.06039, over 941502.07 frames. ], batch size: 13, lr: 8.39e-03, grad_scale: 32.0 +2024-07-28 05:14:40,428 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:14:47,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=114840.0, ans=0.0 +2024-07-28 05:14:48,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=114853.33333333333, ans=0.125 +2024-07-28 05:14:53,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=114853.33333333333, ans=0.0 +2024-07-28 05:14:54,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=114866.66666666667, ans=0.125 +2024-07-28 05:14:55,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=114866.66666666667, ans=0.2 +2024-07-28 05:15:01,420 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.772e+01 5.655e+01 6.124e+01 6.925e+01 1.522e+02, threshold=1.225e+02, percent-clipped=1.0 +2024-07-28 05:15:03,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=114880.0, ans=0.125 +2024-07-28 05:15:04,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=114880.0, ans=0.025 +2024-07-28 05:15:06,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=114880.0, ans=0.125 +2024-07-28 05:15:09,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=114893.33333333333, ans=0.2 +2024-07-28 05:15:10,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=114893.33333333333, ans=0.2 +2024-07-28 05:15:14,980 INFO [train.py:1114] (2/4) Epoch 9, batch 4400, loss[loss=0.192, simple_loss=0.2723, pruned_loss=0.05586, over 4801.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2921, pruned_loss=0.06107, over 941057.69 frames. ], batch size: 14, lr: 8.39e-03, grad_scale: 32.0 +2024-07-28 05:15:31,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.92 vs. limit=10.0 +2024-07-28 05:15:32,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=114933.33333333333, ans=0.0 +2024-07-28 05:15:45,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=114960.0, ans=0.125 +2024-07-28 05:15:49,346 INFO [train.py:1114] (2/4) Epoch 9, batch 4450, loss[loss=0.1714, simple_loss=0.2563, pruned_loss=0.04322, over 4937.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.291, pruned_loss=0.06042, over 939270.24 frames. ], batch size: 12, lr: 8.39e-03, grad_scale: 32.0 +2024-07-28 05:15:57,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=114986.66666666667, ans=0.0 +2024-07-28 05:16:02,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=115000.0, ans=0.125 +2024-07-28 05:16:06,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=115000.0, ans=0.0 +2024-07-28 05:16:08,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=115013.33333333333, ans=0.125 +2024-07-28 05:16:09,217 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.877e+01 5.739e+01 6.410e+01 7.552e+01 1.027e+02, threshold=1.282e+02, percent-clipped=0.0 +2024-07-28 05:16:14,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=115013.33333333333, ans=0.125 +2024-07-28 05:16:17,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=115026.66666666667, ans=0.0 +2024-07-28 05:16:24,260 INFO [train.py:1114] (2/4) Epoch 9, batch 4500, loss[loss=0.1642, simple_loss=0.2574, pruned_loss=0.03551, over 4743.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.291, pruned_loss=0.06023, over 938245.78 frames. ], batch size: 14, lr: 8.38e-03, grad_scale: 32.0 +2024-07-28 05:16:25,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=115040.0, ans=0.05 +2024-07-28 05:16:29,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=115040.0, ans=0.125 +2024-07-28 05:16:32,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115053.33333333333, ans=0.1 +2024-07-28 05:16:34,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=115053.33333333333, ans=0.125 +2024-07-28 05:16:35,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=115053.33333333333, ans=0.0 +2024-07-28 05:16:49,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.95 vs. limit=6.0 +2024-07-28 05:16:58,145 INFO [train.py:1114] (2/4) Epoch 9, batch 4550, loss[loss=0.1913, simple_loss=0.2831, pruned_loss=0.04971, over 4901.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2913, pruned_loss=0.06041, over 940320.10 frames. ], batch size: 13, lr: 8.38e-03, grad_scale: 32.0 +2024-07-28 05:16:58,575 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-07-28 05:17:06,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=115120.0, ans=0.025 +2024-07-28 05:17:07,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=115120.0, ans=0.125 +2024-07-28 05:17:09,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115120.0, ans=0.1 +2024-07-28 05:17:37,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=115133.33333333333, ans=0.125 +2024-07-28 05:17:38,006 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-07-28 05:17:38,915 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.875e+01 5.713e+01 6.359e+01 7.183e+01 1.180e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 05:17:42,093 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.11 vs. limit=22.5 +2024-07-28 05:17:52,583 INFO [train.py:1114] (2/4) Epoch 9, batch 4600, loss[loss=0.2226, simple_loss=0.3161, pruned_loss=0.06459, over 4472.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2909, pruned_loss=0.06025, over 937866.30 frames. ], batch size: 21, lr: 8.38e-03, grad_scale: 32.0 +2024-07-28 05:17:58,269 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.69 vs. limit=15.0 +2024-07-28 05:18:04,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=115186.66666666667, ans=0.1 +2024-07-28 05:18:05,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=115186.66666666667, ans=0.025 +2024-07-28 05:18:20,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=115213.33333333333, ans=0.125 +2024-07-28 05:19:46,529 INFO [train.py:1114] (2/4) Epoch 9, batch 4650, loss[loss=0.2183, simple_loss=0.3002, pruned_loss=0.06817, over 4807.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2913, pruned_loss=0.06002, over 939558.61 frames. ], batch size: 16, lr: 8.38e-03, grad_scale: 32.0 +2024-07-28 05:19:55,001 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.01 vs. limit=15.0 +2024-07-28 05:19:56,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=115253.33333333333, ans=0.2 +2024-07-28 05:19:58,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=115253.33333333333, ans=0.0 +2024-07-28 05:20:01,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=115253.33333333333, ans=0.1 +2024-07-28 05:20:02,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=115253.33333333333, ans=0.0 +2024-07-28 05:20:16,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=115266.66666666667, ans=0.1 +2024-07-28 05:20:20,851 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 5.631e+01 6.409e+01 7.272e+01 9.674e+01, threshold=1.282e+02, percent-clipped=0.0 +2024-07-28 05:20:24,514 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.13 vs. limit=15.0 +2024-07-28 05:20:24,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=115280.0, ans=0.125 +2024-07-28 05:20:27,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=115280.0, ans=0.1 +2024-07-28 05:20:28,431 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=7.98 vs. limit=12.0 +2024-07-28 05:20:31,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=115293.33333333333, ans=0.125 +2024-07-28 05:20:32,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=115293.33333333333, ans=0.1 +2024-07-28 05:20:39,136 INFO [train.py:1114] (2/4) Epoch 9, batch 4700, loss[loss=0.1614, simple_loss=0.2438, pruned_loss=0.03951, over 4711.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.291, pruned_loss=0.0602, over 937050.71 frames. ], batch size: 11, lr: 8.37e-03, grad_scale: 32.0 +2024-07-28 05:20:40,991 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.16 vs. limit=22.5 +2024-07-28 05:20:44,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=115306.66666666667, ans=0.04949747468305833 +2024-07-28 05:20:56,264 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.41 vs. limit=15.0 +2024-07-28 05:21:00,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=115346.66666666667, ans=0.125 +2024-07-28 05:21:01,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=115346.66666666667, ans=0.0 +2024-07-28 05:21:02,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=115346.66666666667, ans=0.1 +2024-07-28 05:21:03,750 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.70 vs. limit=15.0 +2024-07-28 05:21:10,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=115360.0, ans=0.0 +2024-07-28 05:21:12,819 INFO [train.py:1114] (2/4) Epoch 9, batch 4750, loss[loss=0.2501, simple_loss=0.3232, pruned_loss=0.08847, over 4473.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2928, pruned_loss=0.06147, over 935158.49 frames. ], batch size: 21, lr: 8.37e-03, grad_scale: 32.0 +2024-07-28 05:21:30,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=115400.0, ans=0.125 +2024-07-28 05:21:34,803 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.742e+01 6.606e+01 7.346e+01 1.206e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-28 05:21:35,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=115413.33333333333, ans=0.0 +2024-07-28 05:21:40,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=115426.66666666667, ans=0.025 +2024-07-28 05:21:41,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=115426.66666666667, ans=0.125 +2024-07-28 05:21:50,061 INFO [train.py:1114] (2/4) Epoch 9, batch 4800, loss[loss=0.2334, simple_loss=0.3195, pruned_loss=0.07372, over 4695.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2925, pruned_loss=0.06171, over 933017.55 frames. ], batch size: 13, lr: 8.37e-03, grad_scale: 32.0 +2024-07-28 05:22:07,308 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.99 vs. limit=15.0 +2024-07-28 05:22:09,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=115453.33333333333, ans=0.0 +2024-07-28 05:22:41,485 INFO [train.py:1114] (2/4) Epoch 9, batch 4850, loss[loss=0.1999, simple_loss=0.2936, pruned_loss=0.05315, over 4740.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2926, pruned_loss=0.06161, over 932765.67 frames. ], batch size: 14, lr: 8.37e-03, grad_scale: 32.0 +2024-07-28 05:22:45,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=115506.66666666667, ans=0.0 +2024-07-28 05:23:03,784 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.65 vs. limit=15.0 +2024-07-28 05:23:07,385 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 5.778e+01 6.431e+01 7.298e+01 1.043e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 05:23:09,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=115546.66666666667, ans=0.125 +2024-07-28 05:23:14,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=115560.0, ans=0.125 +2024-07-28 05:23:18,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=115560.0, ans=0.05 +2024-07-28 05:23:19,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=115560.0, ans=0.2 +2024-07-28 05:23:20,780 INFO [train.py:1114] (2/4) Epoch 9, batch 4900, loss[loss=0.2372, simple_loss=0.3221, pruned_loss=0.07614, over 4759.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2912, pruned_loss=0.06042, over 934628.06 frames. ], batch size: 13, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:23:34,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=115586.66666666667, ans=0.125 +2024-07-28 05:23:40,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=115600.0, ans=0.95 +2024-07-28 05:23:50,801 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.66 vs. limit=15.0 +2024-07-28 05:23:51,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=115613.33333333333, ans=10.0 +2024-07-28 05:23:51,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=115613.33333333333, ans=0.125 +2024-07-28 05:23:53,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=115613.33333333333, ans=0.025 +2024-07-28 05:23:55,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=115626.66666666667, ans=0.125 +2024-07-28 05:24:12,935 INFO [train.py:1114] (2/4) Epoch 9, batch 4950, loss[loss=0.2942, simple_loss=0.3602, pruned_loss=0.1141, over 3276.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2922, pruned_loss=0.06132, over 931840.18 frames. ], batch size: 35, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:24:23,926 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.77 vs. limit=22.5 +2024-07-28 05:24:33,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=115666.66666666667, ans=0.125 +2024-07-28 05:24:35,818 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.843e+01 5.657e+01 6.231e+01 6.947e+01 1.249e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 05:24:43,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=115680.0, ans=0.1 +2024-07-28 05:25:13,963 INFO [train.py:1114] (2/4) Epoch 9, batch 5000, loss[loss=0.2027, simple_loss=0.2916, pruned_loss=0.05692, over 4664.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2918, pruned_loss=0.06074, over 935721.71 frames. ], batch size: 14, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:25:26,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=115706.66666666667, ans=0.025 +2024-07-28 05:25:51,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=115720.0, ans=22.5 +2024-07-28 05:25:51,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=115720.0, ans=0.125 +2024-07-28 05:25:52,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=115720.0, ans=0.0 +2024-07-28 05:25:55,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=115733.33333333333, ans=0.0 +2024-07-28 05:26:01,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=115733.33333333333, ans=0.125 +2024-07-28 05:26:01,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=115733.33333333333, ans=0.2 +2024-07-28 05:26:19,882 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:26:28,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=115760.0, ans=0.1 +2024-07-28 05:26:44,473 INFO [train.py:1114] (2/4) Epoch 9, batch 5050, loss[loss=0.2101, simple_loss=0.2857, pruned_loss=0.0673, over 4849.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2915, pruned_loss=0.06043, over 938346.07 frames. ], batch size: 12, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:26:44,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=115773.33333333333, ans=0.125 +2024-07-28 05:26:46,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=115773.33333333333, ans=0.1 +2024-07-28 05:27:05,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115786.66666666667, ans=0.1 +2024-07-28 05:27:07,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=115786.66666666667, ans=0.04949747468305833 +2024-07-28 05:27:29,892 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.024e+01 5.915e+01 6.647e+01 7.788e+01 1.077e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-28 05:27:30,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=115813.33333333333, ans=0.1 +2024-07-28 05:27:32,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=115813.33333333333, ans=0.1 +2024-07-28 05:27:37,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=115813.33333333333, ans=0.125 +2024-07-28 05:27:39,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.08 vs. limit=15.0 +2024-07-28 05:27:41,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=115826.66666666667, ans=0.125 +2024-07-28 05:27:48,483 INFO [train.py:1114] (2/4) Epoch 9, batch 5100, loss[loss=0.202, simple_loss=0.2862, pruned_loss=0.05888, over 4779.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2922, pruned_loss=0.06106, over 935600.85 frames. ], batch size: 12, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:28:14,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=115853.33333333333, ans=0.0 +2024-07-28 05:28:20,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=115853.33333333333, ans=0.0 +2024-07-28 05:28:26,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=115866.66666666667, ans=0.05 +2024-07-28 05:28:26,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=115866.66666666667, ans=0.0 +2024-07-28 05:28:44,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=115880.0, ans=0.2 +2024-07-28 05:28:59,107 INFO [train.py:1114] (2/4) Epoch 9, batch 5150, loss[loss=0.2275, simple_loss=0.3125, pruned_loss=0.07128, over 4844.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2929, pruned_loss=0.0613, over 936189.72 frames. ], batch size: 16, lr: 8.35e-03, grad_scale: 32.0 +2024-07-28 05:29:02,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=115906.66666666667, ans=0.125 +2024-07-28 05:29:34,980 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.67 vs. limit=6.0 +2024-07-28 05:29:35,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=115933.33333333333, ans=0.125 +2024-07-28 05:29:37,019 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.59 vs. limit=15.0 +2024-07-28 05:29:37,799 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.832e+01 5.668e+01 6.329e+01 7.486e+01 1.027e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 05:29:46,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=115946.66666666667, ans=0.125 +2024-07-28 05:29:50,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=115946.66666666667, ans=0.125 +2024-07-28 05:29:51,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=115960.0, ans=0.0 +2024-07-28 05:30:01,859 INFO [train.py:1114] (2/4) Epoch 9, batch 5200, loss[loss=0.2144, simple_loss=0.3045, pruned_loss=0.06217, over 4672.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2943, pruned_loss=0.06169, over 936384.19 frames. ], batch size: 14, lr: 8.35e-03, grad_scale: 32.0 +2024-07-28 05:30:05,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=115973.33333333333, ans=0.5 +2024-07-28 05:30:16,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=115986.66666666667, ans=0.125 +2024-07-28 05:30:46,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=116026.66666666667, ans=0.0 +2024-07-28 05:30:47,211 INFO [train.py:1114] (2/4) Epoch 9, batch 5250, loss[loss=0.2078, simple_loss=0.2862, pruned_loss=0.06471, over 4894.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2933, pruned_loss=0.06106, over 936246.73 frames. ], batch size: 13, lr: 8.35e-03, grad_scale: 32.0 +2024-07-28 05:30:47,559 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.25 vs. limit=10.0 +2024-07-28 05:30:50,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=116040.0, ans=0.1 +2024-07-28 05:30:50,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=116040.0, ans=0.1 +2024-07-28 05:31:08,040 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.83 vs. limit=10.0 +2024-07-28 05:31:25,410 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.15 vs. limit=15.0 +2024-07-28 05:31:30,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=116080.0, ans=0.2 +2024-07-28 05:31:31,301 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.193e+01 5.667e+01 6.856e+01 8.237e+01 1.145e+02, threshold=1.371e+02, percent-clipped=0.0 +2024-07-28 05:31:33,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=116080.0, ans=0.125 +2024-07-28 05:31:37,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=116093.33333333333, ans=0.125 +2024-07-28 05:32:01,286 INFO [train.py:1114] (2/4) Epoch 9, batch 5300, loss[loss=0.2069, simple_loss=0.2899, pruned_loss=0.062, over 4657.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2926, pruned_loss=0.06098, over 934171.08 frames. ], batch size: 16, lr: 8.35e-03, grad_scale: 32.0 +2024-07-28 05:32:08,815 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.65 vs. limit=15.0 +2024-07-28 05:32:10,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=116120.0, ans=0.1 +2024-07-28 05:32:17,930 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:32:18,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=116120.0, ans=0.1 +2024-07-28 05:32:22,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=116133.33333333333, ans=0.0 +2024-07-28 05:32:23,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=116133.33333333333, ans=0.5 +2024-07-28 05:32:24,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=116133.33333333333, ans=0.1 +2024-07-28 05:32:24,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=116133.33333333333, ans=0.125 +2024-07-28 05:32:24,678 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.47 vs. limit=15.0 +2024-07-28 05:32:35,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=116146.66666666667, ans=10.0 +2024-07-28 05:32:41,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=116160.0, ans=0.125 +2024-07-28 05:32:44,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=116160.0, ans=0.2 +2024-07-28 05:32:46,256 INFO [train.py:1114] (2/4) Epoch 9, batch 5350, loss[loss=0.201, simple_loss=0.2732, pruned_loss=0.0644, over 4533.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2937, pruned_loss=0.06163, over 936348.40 frames. ], batch size: 10, lr: 8.34e-03, grad_scale: 32.0 +2024-07-28 05:33:01,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=116186.66666666667, ans=0.0 +2024-07-28 05:33:13,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=116213.33333333333, ans=0.125 +2024-07-28 05:33:14,384 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.727e+01 5.909e+01 6.357e+01 7.144e+01 1.044e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 05:33:14,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=116213.33333333333, ans=0.2 +2024-07-28 05:33:16,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=116213.33333333333, ans=0.125 +2024-07-28 05:33:35,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=116226.66666666667, ans=0.0 +2024-07-28 05:33:36,870 INFO [train.py:1114] (2/4) Epoch 9, batch 5400, loss[loss=0.2083, simple_loss=0.292, pruned_loss=0.0623, over 4126.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2943, pruned_loss=0.0623, over 930207.61 frames. ], batch size: 25, lr: 8.34e-03, grad_scale: 32.0 +2024-07-28 05:33:38,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=116240.0, ans=0.035 +2024-07-28 05:33:38,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=116240.0, ans=0.125 +2024-07-28 05:33:57,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=116253.33333333333, ans=0.1 +2024-07-28 05:34:02,179 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.69 vs. limit=15.0 +2024-07-28 05:34:18,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=116293.33333333333, ans=0.125 +2024-07-28 05:34:22,033 INFO [train.py:1114] (2/4) Epoch 9, batch 5450, loss[loss=0.1824, simple_loss=0.2606, pruned_loss=0.05214, over 4691.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2923, pruned_loss=0.06107, over 932913.35 frames. ], batch size: 11, lr: 8.34e-03, grad_scale: 32.0 +2024-07-28 05:34:45,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=116320.0, ans=0.125 +2024-07-28 05:34:55,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=116333.33333333333, ans=0.2 +2024-07-28 05:34:58,370 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.683e+01 5.949e+01 6.805e+01 7.625e+01 9.971e+01, threshold=1.361e+02, percent-clipped=0.0 +2024-07-28 05:35:22,113 INFO [train.py:1114] (2/4) Epoch 9, batch 5500, loss[loss=0.2398, simple_loss=0.3288, pruned_loss=0.07538, over 4152.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2925, pruned_loss=0.0616, over 930687.07 frames. ], batch size: 25, lr: 8.34e-03, grad_scale: 32.0 +2024-07-28 05:35:23,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=116373.33333333333, ans=0.125 +2024-07-28 05:35:31,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=116386.66666666667, ans=0.125 +2024-07-28 05:35:40,352 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-07-28 05:35:49,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=116426.66666666667, ans=0.0 +2024-07-28 05:35:51,425 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.78 vs. limit=15.0 +2024-07-28 05:35:56,269 INFO [train.py:1114] (2/4) Epoch 9, batch 5550, loss[loss=0.1594, simple_loss=0.255, pruned_loss=0.03194, over 4708.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2922, pruned_loss=0.06163, over 933062.73 frames. ], batch size: 12, lr: 8.33e-03, grad_scale: 32.0 +2024-07-28 05:36:01,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=116440.0, ans=0.125 +2024-07-28 05:36:04,118 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.49 vs. limit=15.0 +2024-07-28 05:36:09,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=116453.33333333333, ans=0.125 +2024-07-28 05:36:15,924 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:36:15,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.88 vs. limit=15.0 +2024-07-28 05:36:17,000 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.435e+01 5.683e+01 6.413e+01 7.380e+01 1.098e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 05:36:17,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=116480.0, ans=0.125 +2024-07-28 05:36:18,562 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:36:20,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=116480.0, ans=0.0 +2024-07-28 05:36:35,084 INFO [train.py:1114] (2/4) Epoch 9, batch 5600, loss[loss=0.2135, simple_loss=0.3104, pruned_loss=0.05824, over 4736.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2922, pruned_loss=0.06141, over 934193.90 frames. ], batch size: 14, lr: 8.33e-03, grad_scale: 32.0 +2024-07-28 05:36:35,670 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.01 vs. limit=15.0 +2024-07-28 05:37:02,300 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:37:08,962 INFO [train.py:1114] (2/4) Epoch 9, batch 5650, loss[loss=0.2064, simple_loss=0.2885, pruned_loss=0.06216, over 4422.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2906, pruned_loss=0.06066, over 936439.30 frames. ], batch size: 21, lr: 8.33e-03, grad_scale: 32.0 +2024-07-28 05:37:09,440 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.73 vs. limit=22.5 +2024-07-28 05:37:09,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=116573.33333333333, ans=0.0 +2024-07-28 05:37:19,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=116586.66666666667, ans=0.07 +2024-07-28 05:37:28,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=116613.33333333333, ans=0.0 +2024-07-28 05:37:28,749 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.968e+01 5.748e+01 6.232e+01 7.231e+01 1.019e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 05:37:35,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.01 vs. limit=15.0 +2024-07-28 05:37:35,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=116626.66666666667, ans=0.0 +2024-07-28 05:37:43,686 INFO [train.py:1114] (2/4) Epoch 9, batch 5700, loss[loss=0.2298, simple_loss=0.3141, pruned_loss=0.07272, over 4697.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2911, pruned_loss=0.0606, over 937845.00 frames. ], batch size: 13, lr: 8.33e-03, grad_scale: 32.0 +2024-07-28 05:37:50,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=116653.33333333333, ans=0.125 +2024-07-28 05:38:05,893 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.68 vs. limit=22.5 +2024-07-28 05:38:18,397 INFO [train.py:1114] (2/4) Epoch 9, batch 5750, loss[loss=0.2133, simple_loss=0.2992, pruned_loss=0.06374, over 4742.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2908, pruned_loss=0.06072, over 938024.31 frames. ], batch size: 19, lr: 8.32e-03, grad_scale: 32.0 +2024-07-28 05:38:19,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=116706.66666666667, ans=0.2 +2024-07-28 05:38:34,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=116720.0, ans=0.125 +2024-07-28 05:38:38,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=116720.0, ans=0.0 +2024-07-28 05:38:47,984 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.019e+01 5.728e+01 6.287e+01 7.264e+01 1.232e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 05:39:17,885 INFO [train.py:1114] (2/4) Epoch 9, batch 5800, loss[loss=0.2423, simple_loss=0.3248, pruned_loss=0.0799, over 4708.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2925, pruned_loss=0.06177, over 937526.88 frames. ], batch size: 19, lr: 8.32e-03, grad_scale: 32.0 +2024-07-28 05:39:28,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=116786.66666666667, ans=0.025 +2024-07-28 05:39:39,562 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.35 vs. limit=15.0 +2024-07-28 05:39:44,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=116813.33333333333, ans=0.2 +2024-07-28 05:39:57,627 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.71 vs. limit=15.0 +2024-07-28 05:40:00,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=116826.66666666667, ans=0.0 +2024-07-28 05:40:02,722 INFO [train.py:1114] (2/4) Epoch 9, batch 5850, loss[loss=0.2394, simple_loss=0.3191, pruned_loss=0.07982, over 4541.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2916, pruned_loss=0.0613, over 938314.12 frames. ], batch size: 21, lr: 8.32e-03, grad_scale: 32.0 +2024-07-28 05:40:06,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=116840.0, ans=0.2 +2024-07-28 05:40:25,339 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.305e+01 6.200e+01 6.975e+01 8.009e+01 1.394e+02, threshold=1.395e+02, percent-clipped=2.0 +2024-07-28 05:40:38,638 INFO [train.py:1114] (2/4) Epoch 9, batch 5900, loss[loss=0.2031, simple_loss=0.2944, pruned_loss=0.0559, over 4689.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.291, pruned_loss=0.06074, over 938345.69 frames. ], batch size: 15, lr: 8.32e-03, grad_scale: 32.0 +2024-07-28 05:40:44,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=116906.66666666667, ans=0.1 +2024-07-28 05:40:57,856 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.60 vs. limit=15.0 +2024-07-28 05:41:11,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=116960.0, ans=0.0 +2024-07-28 05:41:17,906 INFO [train.py:1114] (2/4) Epoch 9, batch 5950, loss[loss=0.2138, simple_loss=0.2952, pruned_loss=0.06621, over 4687.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2906, pruned_loss=0.0599, over 940329.29 frames. ], batch size: 15, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:41:22,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=116973.33333333333, ans=0.0 +2024-07-28 05:41:37,805 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.704e+01 5.843e+01 6.530e+01 7.569e+01 1.342e+02, threshold=1.306e+02, percent-clipped=0.0 +2024-07-28 05:41:46,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=117026.66666666667, ans=0.0 +2024-07-28 05:41:51,467 INFO [train.py:1114] (2/4) Epoch 9, batch 6000, loss[loss=0.209, simple_loss=0.2982, pruned_loss=0.05992, over 4234.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2901, pruned_loss=0.05958, over 937166.76 frames. ], batch size: 25, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:41:51,467 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 05:42:05,184 INFO [train.py:1146] (2/4) Epoch 9, validation: loss=0.175, simple_loss=0.2796, pruned_loss=0.03521, over 944034.00 frames. +2024-07-28 05:42:05,185 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 05:42:18,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=117053.33333333333, ans=0.125 +2024-07-28 05:42:19,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117066.66666666667, ans=0.1 +2024-07-28 05:42:22,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=117066.66666666667, ans=0.0 +2024-07-28 05:42:24,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=117066.66666666667, ans=0.125 +2024-07-28 05:42:29,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=117080.0, ans=0.125 +2024-07-28 05:42:32,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=117093.33333333333, ans=0.125 +2024-07-28 05:42:39,968 INFO [train.py:1114] (2/4) Epoch 9, batch 6050, loss[loss=0.173, simple_loss=0.2596, pruned_loss=0.04324, over 4775.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2897, pruned_loss=0.05972, over 938322.18 frames. ], batch size: 12, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:42:42,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=117106.66666666667, ans=0.125 +2024-07-28 05:42:46,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117120.0, ans=0.1 +2024-07-28 05:42:57,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=117120.0, ans=0.0 +2024-07-28 05:43:05,323 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.759e+01 5.753e+01 6.307e+01 7.312e+01 1.282e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 05:43:09,568 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.41 vs. limit=15.0 +2024-07-28 05:43:18,564 INFO [train.py:1114] (2/4) Epoch 9, batch 6100, loss[loss=0.2161, simple_loss=0.3013, pruned_loss=0.06542, over 4688.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2898, pruned_loss=0.05973, over 937829.76 frames. ], batch size: 15, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:43:30,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=117186.66666666667, ans=0.2 +2024-07-28 05:43:31,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=117200.0, ans=0.0 +2024-07-28 05:43:34,839 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:43:36,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=117200.0, ans=0.2 +2024-07-28 05:43:36,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=117200.0, ans=0.125 +2024-07-28 05:43:41,670 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.02 vs. limit=15.0 +2024-07-28 05:43:47,341 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:43:53,988 INFO [train.py:1114] (2/4) Epoch 9, batch 6150, loss[loss=0.2696, simple_loss=0.339, pruned_loss=0.1001, over 3266.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2903, pruned_loss=0.06016, over 936405.42 frames. ], batch size: 36, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:44:11,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.71 vs. limit=15.0 +2024-07-28 05:44:15,654 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.025e+01 5.523e+01 6.022e+01 6.962e+01 1.002e+02, threshold=1.204e+02, percent-clipped=1.0 +2024-07-28 05:44:32,084 INFO [train.py:1114] (2/4) Epoch 9, batch 6200, loss[loss=0.1728, simple_loss=0.2689, pruned_loss=0.03835, over 4741.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2906, pruned_loss=0.06022, over 936174.27 frames. ], batch size: 14, lr: 8.30e-03, grad_scale: 32.0 +2024-07-28 05:44:37,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=117306.66666666667, ans=15.0 +2024-07-28 05:44:47,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117320.0, ans=0.1 +2024-07-28 05:44:47,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=117320.0, ans=0.125 +2024-07-28 05:44:51,774 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:45:05,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117346.66666666667, ans=0.1 +2024-07-28 05:45:15,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=117360.0, ans=0.125 +2024-07-28 05:45:22,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=117373.33333333333, ans=0.0 +2024-07-28 05:45:22,588 INFO [train.py:1114] (2/4) Epoch 9, batch 6250, loss[loss=0.1926, simple_loss=0.2834, pruned_loss=0.0509, over 4805.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2903, pruned_loss=0.05988, over 932624.14 frames. ], batch size: 14, lr: 8.30e-03, grad_scale: 64.0 +2024-07-28 05:45:26,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=117373.33333333333, ans=0.1 +2024-07-28 05:45:30,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=117373.33333333333, ans=0.125 +2024-07-28 05:45:30,420 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.97 vs. limit=22.5 +2024-07-28 05:45:30,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=117373.33333333333, ans=0.0 +2024-07-28 05:45:57,919 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.368e+01 5.807e+01 6.495e+01 7.426e+01 1.051e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-28 05:45:58,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=117413.33333333333, ans=0.125 +2024-07-28 05:46:07,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=117426.66666666667, ans=0.0 +2024-07-28 05:46:07,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=117426.66666666667, ans=0.125 +2024-07-28 05:46:11,022 INFO [train.py:1114] (2/4) Epoch 9, batch 6300, loss[loss=0.1926, simple_loss=0.2608, pruned_loss=0.06223, over 4512.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2909, pruned_loss=0.06081, over 929840.68 frames. ], batch size: 10, lr: 8.30e-03, grad_scale: 64.0 +2024-07-28 05:46:17,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117453.33333333333, ans=0.1 +2024-07-28 05:46:18,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=117453.33333333333, ans=0.2 +2024-07-28 05:46:24,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=117466.66666666667, ans=0.125 +2024-07-28 05:46:40,203 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.38 vs. limit=15.0 +2024-07-28 05:46:45,005 INFO [train.py:1114] (2/4) Epoch 9, batch 6350, loss[loss=0.2142, simple_loss=0.2977, pruned_loss=0.06533, over 4520.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2898, pruned_loss=0.06037, over 933849.35 frames. ], batch size: 21, lr: 8.30e-03, grad_scale: 64.0 +2024-07-28 05:46:50,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=117506.66666666667, ans=0.0 +2024-07-28 05:47:05,469 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.541e+01 5.641e+01 6.337e+01 7.331e+01 1.035e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 05:47:13,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=117560.0, ans=0.125 +2024-07-28 05:47:13,821 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.43 vs. limit=15.0 +2024-07-28 05:47:14,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=117560.0, ans=0.2 +2024-07-28 05:47:18,819 INFO [train.py:1114] (2/4) Epoch 9, batch 6400, loss[loss=0.2085, simple_loss=0.3037, pruned_loss=0.05659, over 4632.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2894, pruned_loss=0.05987, over 934898.92 frames. ], batch size: 13, lr: 8.29e-03, grad_scale: 64.0 +2024-07-28 05:47:38,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=117613.33333333333, ans=0.0 +2024-07-28 05:47:51,903 INFO [train.py:1114] (2/4) Epoch 9, batch 6450, loss[loss=0.2015, simple_loss=0.2883, pruned_loss=0.05729, over 4585.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2897, pruned_loss=0.05991, over 938595.36 frames. ], batch size: 21, lr: 8.29e-03, grad_scale: 32.0 +2024-07-28 05:47:52,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=117640.0, ans=0.0 +2024-07-28 05:48:11,234 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.61 vs. limit=6.0 +2024-07-28 05:48:12,207 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.958e+01 5.777e+01 6.265e+01 7.458e+01 1.073e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 05:48:19,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.75 vs. limit=10.0 +2024-07-28 05:48:20,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=117693.33333333333, ans=0.04949747468305833 +2024-07-28 05:48:24,566 INFO [train.py:1114] (2/4) Epoch 9, batch 6500, loss[loss=0.3357, simple_loss=0.3816, pruned_loss=0.1449, over 3415.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2903, pruned_loss=0.06052, over 939937.45 frames. ], batch size: 35, lr: 8.29e-03, grad_scale: 32.0 +2024-07-28 05:48:29,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=117706.66666666667, ans=0.1 +2024-07-28 05:48:30,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=117720.0, ans=0.0 +2024-07-28 05:48:31,226 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.48 vs. limit=6.0 +2024-07-28 05:48:33,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=117720.0, ans=0.125 +2024-07-28 05:48:41,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=117733.33333333333, ans=0.1 +2024-07-28 05:48:53,630 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.11 vs. limit=15.0 +2024-07-28 05:48:54,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=117760.0, ans=0.125 +2024-07-28 05:49:03,007 INFO [train.py:1114] (2/4) Epoch 9, batch 6550, loss[loss=0.1671, simple_loss=0.2513, pruned_loss=0.04143, over 4797.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2892, pruned_loss=0.05923, over 942738.97 frames. ], batch size: 11, lr: 8.29e-03, grad_scale: 32.0 +2024-07-28 05:49:13,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117786.66666666667, ans=0.1 +2024-07-28 05:49:13,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=117786.66666666667, ans=0.0 +2024-07-28 05:49:21,345 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.02 vs. limit=6.0 +2024-07-28 05:49:23,500 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.708e+01 5.652e+01 6.284e+01 7.270e+01 1.094e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 05:49:26,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=117813.33333333333, ans=0.0 +2024-07-28 05:49:34,657 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:49:35,935 INFO [train.py:1114] (2/4) Epoch 9, batch 6600, loss[loss=0.1761, simple_loss=0.2661, pruned_loss=0.043, over 4933.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2884, pruned_loss=0.05892, over 944658.66 frames. ], batch size: 14, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:49:41,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=117840.0, ans=0.0 +2024-07-28 05:49:48,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117853.33333333333, ans=0.1 +2024-07-28 05:49:50,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=117866.66666666667, ans=0.125 +2024-07-28 05:49:53,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=117866.66666666667, ans=0.95 +2024-07-28 05:50:04,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=117880.0, ans=0.0 +2024-07-28 05:50:09,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=117893.33333333333, ans=0.125 +2024-07-28 05:50:12,823 INFO [train.py:1114] (2/4) Epoch 9, batch 6650, loss[loss=0.2125, simple_loss=0.3008, pruned_loss=0.06209, over 4615.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2885, pruned_loss=0.05901, over 943150.88 frames. ], batch size: 17, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:50:18,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=117920.0, ans=0.0 +2024-07-28 05:50:26,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=117933.33333333333, ans=0.1 +2024-07-28 05:50:35,494 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.523e+01 5.735e+01 6.176e+01 7.286e+01 9.615e+01, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 05:50:37,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=117946.66666666667, ans=0.125 +2024-07-28 05:50:47,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=117973.33333333333, ans=0.0 +2024-07-28 05:50:48,321 INFO [train.py:1114] (2/4) Epoch 9, batch 6700, loss[loss=0.207, simple_loss=0.2949, pruned_loss=0.05951, over 4658.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2894, pruned_loss=0.05921, over 942066.55 frames. ], batch size: 19, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:50:58,751 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.41 vs. limit=15.0 +2024-07-28 05:51:20,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=118026.66666666667, ans=0.0 +2024-07-28 05:51:22,054 INFO [train.py:1114] (2/4) Epoch 9, batch 6750, loss[loss=0.2189, simple_loss=0.3034, pruned_loss=0.06713, over 4192.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2889, pruned_loss=0.059, over 939905.45 frames. ], batch size: 25, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:51:25,192 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.10 vs. limit=10.0 +2024-07-28 05:51:39,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=118066.66666666667, ans=0.0 +2024-07-28 05:51:42,739 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.453e+01 5.833e+01 6.338e+01 7.124e+01 1.183e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 05:51:49,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=118093.33333333333, ans=0.0 +2024-07-28 05:51:49,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=118093.33333333333, ans=0.0 +2024-07-28 05:51:53,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=118093.33333333333, ans=0.0 +2024-07-28 05:51:55,771 INFO [train.py:1114] (2/4) Epoch 9, batch 6800, loss[loss=0.2376, simple_loss=0.3167, pruned_loss=0.07922, over 4634.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2892, pruned_loss=0.05944, over 938176.57 frames. ], batch size: 13, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:52:06,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=118120.0, ans=0.125 +2024-07-28 05:52:18,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=118146.66666666667, ans=0.125 +2024-07-28 05:52:29,594 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:52:32,167 INFO [train.py:1114] (2/4) Epoch 9, batch 6850, loss[loss=0.2111, simple_loss=0.3097, pruned_loss=0.05619, over 4691.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2895, pruned_loss=0.05958, over 940065.02 frames. ], batch size: 13, lr: 8.27e-03, grad_scale: 32.0 +2024-07-28 05:52:40,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff2.min_abs, batch_count=118186.66666666667, ans=0.1 +2024-07-28 05:52:40,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=118186.66666666667, ans=0.0 +2024-07-28 05:52:42,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=118186.66666666667, ans=0.125 +2024-07-28 05:52:42,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=118186.66666666667, ans=0.0 +2024-07-28 05:52:51,218 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.55 vs. limit=22.5 +2024-07-28 05:52:53,782 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.745e+01 6.443e+01 7.368e+01 1.069e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 05:53:08,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=118226.66666666667, ans=0.0 +2024-07-28 05:53:10,619 INFO [train.py:1114] (2/4) Epoch 9, batch 6900, loss[loss=0.1886, simple_loss=0.2782, pruned_loss=0.04949, over 4969.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2911, pruned_loss=0.06009, over 942288.85 frames. ], batch size: 13, lr: 8.27e-03, grad_scale: 32.0 +2024-07-28 05:53:13,734 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.05 vs. limit=6.0 +2024-07-28 05:53:16,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=118240.0, ans=0.2 +2024-07-28 05:53:26,708 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.41 vs. limit=6.0 +2024-07-28 05:53:33,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=118280.0, ans=0.0 +2024-07-28 05:53:39,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=118293.33333333333, ans=0.125 +2024-07-28 05:53:42,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=118293.33333333333, ans=0.0 +2024-07-28 05:53:44,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=118293.33333333333, ans=0.0 +2024-07-28 05:53:45,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=118293.33333333333, ans=0.125 +2024-07-28 05:53:46,432 INFO [train.py:1114] (2/4) Epoch 9, batch 6950, loss[loss=0.1966, simple_loss=0.2707, pruned_loss=0.06123, over 4527.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.292, pruned_loss=0.06085, over 940189.66 frames. ], batch size: 10, lr: 8.27e-03, grad_scale: 32.0 +2024-07-28 05:53:59,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=118333.33333333333, ans=0.125 +2024-07-28 05:54:04,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=118333.33333333333, ans=0.1 +2024-07-28 05:54:06,830 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.989e+01 5.744e+01 6.460e+01 7.316e+01 1.273e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 05:54:08,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=118346.66666666667, ans=0.0 +2024-07-28 05:54:19,633 INFO [train.py:1114] (2/4) Epoch 9, batch 7000, loss[loss=0.2234, simple_loss=0.3067, pruned_loss=0.0701, over 4624.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2913, pruned_loss=0.06091, over 938935.93 frames. ], batch size: 17, lr: 8.27e-03, grad_scale: 32.0 +2024-07-28 05:54:19,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118373.33333333333, ans=0.1 +2024-07-28 05:54:39,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=118413.33333333333, ans=0.0 +2024-07-28 05:54:40,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=118413.33333333333, ans=0.125 +2024-07-28 05:54:53,262 INFO [train.py:1114] (2/4) Epoch 9, batch 7050, loss[loss=0.2055, simple_loss=0.2957, pruned_loss=0.05766, over 4706.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2907, pruned_loss=0.06024, over 942173.65 frames. ], batch size: 19, lr: 8.26e-03, grad_scale: 32.0 +2024-07-28 05:54:54,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=118440.0, ans=0.125 +2024-07-28 05:54:54,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=118440.0, ans=0.0 +2024-07-28 05:54:55,765 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=21.77 vs. limit=22.5 +2024-07-28 05:54:55,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=118440.0, ans=0.125 +2024-07-28 05:55:14,449 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.643e+01 5.657e+01 6.222e+01 6.949e+01 1.042e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-28 05:55:14,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=118480.0, ans=0.0 +2024-07-28 05:55:14,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=118480.0, ans=0.1 +2024-07-28 05:55:21,424 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.64 vs. limit=15.0 +2024-07-28 05:55:21,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=118493.33333333333, ans=0.125 +2024-07-28 05:55:21,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=118493.33333333333, ans=0.0 +2024-07-28 05:55:26,541 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.15 vs. limit=15.0 +2024-07-28 05:55:26,697 INFO [train.py:1114] (2/4) Epoch 9, batch 7100, loss[loss=0.2362, simple_loss=0.3202, pruned_loss=0.07605, over 4810.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2911, pruned_loss=0.06052, over 936921.22 frames. ], batch size: 15, lr: 8.26e-03, grad_scale: 32.0 +2024-07-28 05:55:41,676 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=118533.33333333333, ans=0.125 +2024-07-28 05:55:46,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=118546.66666666667, ans=0.07 +2024-07-28 05:55:55,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=118560.0, ans=0.0 +2024-07-28 05:55:56,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118560.0, ans=0.1 +2024-07-28 05:55:57,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=118560.0, ans=0.02 +2024-07-28 05:55:59,523 INFO [train.py:1114] (2/4) Epoch 9, batch 7150, loss[loss=0.2434, simple_loss=0.3201, pruned_loss=0.08333, over 4480.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2887, pruned_loss=0.05959, over 937703.53 frames. ], batch size: 21, lr: 8.26e-03, grad_scale: 32.0 +2024-07-28 05:56:01,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=118573.33333333333, ans=0.125 +2024-07-28 05:56:08,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=118586.66666666667, ans=0.025 +2024-07-28 05:56:12,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=118600.0, ans=0.125 +2024-07-28 05:56:17,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=118600.0, ans=0.2 +2024-07-28 05:56:19,959 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.871e+01 5.611e+01 6.289e+01 7.655e+01 1.013e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 05:56:28,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=118626.66666666667, ans=0.0 +2024-07-28 05:56:30,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=118626.66666666667, ans=0.0 +2024-07-28 05:56:32,651 INFO [train.py:1114] (2/4) Epoch 9, batch 7200, loss[loss=0.1867, simple_loss=0.2815, pruned_loss=0.04593, over 4797.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2902, pruned_loss=0.05977, over 938706.55 frames. ], batch size: 15, lr: 8.26e-03, grad_scale: 32.0 +2024-07-28 05:56:35,407 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:57:06,749 INFO [train.py:1114] (2/4) Epoch 9, batch 7250, loss[loss=0.1883, simple_loss=0.285, pruned_loss=0.04574, over 4858.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2888, pruned_loss=0.05938, over 940185.72 frames. ], batch size: 12, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:57:07,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=118706.66666666667, ans=0.125 +2024-07-28 05:57:08,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=118706.66666666667, ans=0.0 +2024-07-28 05:57:26,916 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.398e+01 5.726e+01 6.433e+01 7.236e+01 9.812e+01, threshold=1.287e+02, percent-clipped=0.0 +2024-07-28 05:57:37,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=118760.0, ans=0.125 +2024-07-28 05:57:39,736 INFO [train.py:1114] (2/4) Epoch 9, batch 7300, loss[loss=0.1962, simple_loss=0.2763, pruned_loss=0.05807, over 4851.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2883, pruned_loss=0.05926, over 940173.20 frames. ], batch size: 12, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:57:40,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=118773.33333333333, ans=0.125 +2024-07-28 05:57:41,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=118773.33333333333, ans=0.025 +2024-07-28 05:57:42,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=118773.33333333333, ans=0.125 +2024-07-28 05:57:43,723 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.01 vs. limit=12.0 +2024-07-28 05:57:51,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=118786.66666666667, ans=0.125 +2024-07-28 05:57:58,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=118800.0, ans=0.125 +2024-07-28 05:58:09,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=118813.33333333333, ans=0.0 +2024-07-28 05:58:12,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=118826.66666666667, ans=0.125 +2024-07-28 05:58:13,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=118826.66666666667, ans=0.1 +2024-07-28 05:58:13,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=118826.66666666667, ans=0.125 +2024-07-28 05:58:16,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=118840.0, ans=0.125 +2024-07-28 05:58:17,007 INFO [train.py:1114] (2/4) Epoch 9, batch 7350, loss[loss=0.1788, simple_loss=0.2593, pruned_loss=0.04918, over 4635.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2889, pruned_loss=0.05912, over 939710.10 frames. ], batch size: 12, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:58:19,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=118840.0, ans=0.0 +2024-07-28 05:58:28,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=118853.33333333333, ans=0.5 +2024-07-28 05:58:30,364 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.68 vs. limit=22.5 +2024-07-28 05:58:33,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=118866.66666666667, ans=0.2 +2024-07-28 05:58:34,982 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-28 05:58:37,725 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.559e+01 5.606e+01 6.103e+01 6.789e+01 9.069e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 05:58:39,457 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.00 vs. limit=15.0 +2024-07-28 05:58:47,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=118893.33333333333, ans=0.0 +2024-07-28 05:58:48,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=118893.33333333333, ans=0.125 +2024-07-28 05:58:50,115 INFO [train.py:1114] (2/4) Epoch 9, batch 7400, loss[loss=0.1849, simple_loss=0.2797, pruned_loss=0.04504, over 4694.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2893, pruned_loss=0.05935, over 940726.97 frames. ], batch size: 13, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:59:02,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=118933.33333333333, ans=0.125 +2024-07-28 05:59:02,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=118933.33333333333, ans=0.5 +2024-07-28 05:59:08,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=118933.33333333333, ans=0.1 +2024-07-28 05:59:15,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=118946.66666666667, ans=0.125 +2024-07-28 05:59:15,671 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.15 vs. limit=15.0 +2024-07-28 05:59:22,843 INFO [train.py:1114] (2/4) Epoch 9, batch 7450, loss[loss=0.2058, simple_loss=0.2945, pruned_loss=0.05854, over 4619.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2885, pruned_loss=0.05925, over 938426.83 frames. ], batch size: 11, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:59:32,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=118986.66666666667, ans=0.2 +2024-07-28 05:59:46,254 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.870e+01 5.815e+01 6.542e+01 7.746e+01 1.541e+02, threshold=1.308e+02, percent-clipped=5.0 +2024-07-28 05:59:54,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=119026.66666666667, ans=0.125 +2024-07-28 05:59:59,612 INFO [train.py:1114] (2/4) Epoch 9, batch 7500, loss[loss=0.2943, simple_loss=0.3494, pruned_loss=0.1195, over 3224.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2889, pruned_loss=0.05971, over 936609.91 frames. ], batch size: 36, lr: 8.24e-03, grad_scale: 32.0 +2024-07-28 06:00:01,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=119040.0, ans=0.125 +2024-07-28 06:00:07,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=119053.33333333333, ans=0.125 +2024-07-28 06:00:22,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=119080.0, ans=0.125 +2024-07-28 06:00:22,846 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:00:35,132 INFO [train.py:1114] (2/4) Epoch 9, batch 7550, loss[loss=0.2394, simple_loss=0.3371, pruned_loss=0.07082, over 4599.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2907, pruned_loss=0.06044, over 936109.79 frames. ], batch size: 17, lr: 8.24e-03, grad_scale: 32.0 +2024-07-28 06:00:35,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=119106.66666666667, ans=0.2 +2024-07-28 06:00:54,943 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:00:55,461 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.364e+01 5.884e+01 6.441e+01 7.385e+01 1.107e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 06:01:05,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=119160.0, ans=0.0 +2024-07-28 06:01:07,528 INFO [train.py:1114] (2/4) Epoch 9, batch 7600, loss[loss=0.1956, simple_loss=0.2942, pruned_loss=0.04852, over 4817.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2904, pruned_loss=0.06019, over 937782.82 frames. ], batch size: 14, lr: 8.24e-03, grad_scale: 32.0 +2024-07-28 06:01:11,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=119173.33333333333, ans=0.07 +2024-07-28 06:01:12,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=119173.33333333333, ans=0.025 +2024-07-28 06:01:14,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=119186.66666666667, ans=15.0 +2024-07-28 06:01:33,957 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.67 vs. limit=22.5 +2024-07-28 06:01:38,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=119226.66666666667, ans=0.125 +2024-07-28 06:01:39,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=119226.66666666667, ans=0.0 +2024-07-28 06:01:40,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=119226.66666666667, ans=0.0 +2024-07-28 06:01:41,982 INFO [train.py:1114] (2/4) Epoch 9, batch 7650, loss[loss=0.1744, simple_loss=0.256, pruned_loss=0.04636, over 4926.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2903, pruned_loss=0.06058, over 936934.43 frames. ], batch size: 12, lr: 8.24e-03, grad_scale: 32.0 +2024-07-28 06:01:42,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=119240.0, ans=0.0 +2024-07-28 06:01:42,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=119240.0, ans=0.1 +2024-07-28 06:01:48,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=119253.33333333333, ans=0.125 +2024-07-28 06:02:04,509 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.140e+01 5.757e+01 6.386e+01 7.107e+01 1.097e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 06:02:10,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=119293.33333333333, ans=0.125 +2024-07-28 06:02:11,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=119293.33333333333, ans=0.125 +2024-07-28 06:02:18,630 INFO [train.py:1114] (2/4) Epoch 9, batch 7700, loss[loss=0.2135, simple_loss=0.3032, pruned_loss=0.06189, over 4700.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2902, pruned_loss=0.06037, over 934256.04 frames. ], batch size: 13, lr: 8.23e-03, grad_scale: 16.0 +2024-07-28 06:02:21,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=119306.66666666667, ans=0.04949747468305833 +2024-07-28 06:02:34,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=119320.0, ans=0.125 +2024-07-28 06:03:18,527 INFO [train.py:1114] (2/4) Epoch 9, batch 7750, loss[loss=0.2017, simple_loss=0.2996, pruned_loss=0.05193, over 4924.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2916, pruned_loss=0.06023, over 935677.19 frames. ], batch size: 14, lr: 8.23e-03, grad_scale: 16.0 +2024-07-28 06:03:30,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=119386.66666666667, ans=0.2 +2024-07-28 06:03:31,653 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.25 vs. limit=15.0 +2024-07-28 06:03:34,763 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.22 vs. limit=15.0 +2024-07-28 06:03:50,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=119413.33333333333, ans=0.025 +2024-07-28 06:03:50,883 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.52 vs. limit=15.0 +2024-07-28 06:03:51,265 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.407e+01 5.505e+01 6.110e+01 6.941e+01 1.112e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 06:03:55,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119413.33333333333, ans=0.1 +2024-07-28 06:04:01,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=119426.66666666667, ans=0.2 +2024-07-28 06:04:03,702 INFO [train.py:1114] (2/4) Epoch 9, batch 7800, loss[loss=0.2047, simple_loss=0.2906, pruned_loss=0.05942, over 4664.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2923, pruned_loss=0.06043, over 937348.11 frames. ], batch size: 14, lr: 8.23e-03, grad_scale: 8.0 +2024-07-28 06:04:04,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=119440.0, ans=0.1 +2024-07-28 06:04:20,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=119440.0, ans=0.0 +2024-07-28 06:04:51,736 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:05:20,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=119493.33333333333, ans=22.5 +2024-07-28 06:05:21,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=119493.33333333333, ans=0.2 +2024-07-28 06:05:26,847 INFO [train.py:1114] (2/4) Epoch 9, batch 7850, loss[loss=0.1679, simple_loss=0.2523, pruned_loss=0.04175, over 4573.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2918, pruned_loss=0.06039, over 936320.63 frames. ], batch size: 10, lr: 8.23e-03, grad_scale: 8.0 +2024-07-28 06:05:29,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=119506.66666666667, ans=0.0 +2024-07-28 06:05:29,760 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.58 vs. limit=22.5 +2024-07-28 06:05:44,591 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.34 vs. limit=15.0 +2024-07-28 06:06:33,867 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.602e+01 5.758e+01 6.163e+01 6.826e+01 1.029e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 06:06:52,776 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.08 vs. limit=15.0 +2024-07-28 06:07:12,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=119573.33333333333, ans=0.0 +2024-07-28 06:07:13,113 INFO [train.py:1114] (2/4) Epoch 9, batch 7900, loss[loss=0.2186, simple_loss=0.3006, pruned_loss=0.06828, over 4875.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2939, pruned_loss=0.0614, over 933204.22 frames. ], batch size: 14, lr: 8.22e-03, grad_scale: 8.0 +2024-07-28 06:07:18,689 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.89 vs. limit=6.0 +2024-07-28 06:07:19,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=119586.66666666667, ans=0.0 +2024-07-28 06:07:42,637 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.31 vs. limit=15.0 +2024-07-28 06:07:43,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=119586.66666666667, ans=0.125 +2024-07-28 06:07:44,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=119600.0, ans=0.05 +2024-07-28 06:07:50,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=119600.0, ans=0.0 +2024-07-28 06:07:50,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=119613.33333333333, ans=0.125 +2024-07-28 06:07:50,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=119613.33333333333, ans=0.2 +2024-07-28 06:08:05,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=119626.66666666667, ans=0.0 +2024-07-28 06:08:16,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=119640.0, ans=0.125 +2024-07-28 06:08:16,887 INFO [train.py:1114] (2/4) Epoch 9, batch 7950, loss[loss=0.2485, simple_loss=0.3253, pruned_loss=0.08586, over 3367.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2939, pruned_loss=0.0614, over 935228.69 frames. ], batch size: 35, lr: 8.22e-03, grad_scale: 8.0 +2024-07-28 06:08:17,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=119640.0, ans=0.1 +2024-07-28 06:08:30,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=119653.33333333333, ans=0.2 +2024-07-28 06:08:31,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=119653.33333333333, ans=0.125 +2024-07-28 06:08:40,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=119653.33333333333, ans=0.0 +2024-07-28 06:08:44,596 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.36 vs. limit=15.0 +2024-07-28 06:08:47,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=119666.66666666667, ans=0.125 +2024-07-28 06:08:54,860 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.650e+01 5.779e+01 6.459e+01 7.298e+01 1.141e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 06:08:56,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=119680.0, ans=0.0 +2024-07-28 06:08:57,255 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.74 vs. limit=15.0 +2024-07-28 06:09:24,212 INFO [train.py:1114] (2/4) Epoch 9, batch 8000, loss[loss=0.2311, simple_loss=0.2856, pruned_loss=0.08831, over 4605.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.292, pruned_loss=0.06077, over 933890.42 frames. ], batch size: 11, lr: 8.22e-03, grad_scale: 16.0 +2024-07-28 06:09:30,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=119706.66666666667, ans=0.125 +2024-07-28 06:09:30,432 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.18 vs. limit=15.0 +2024-07-28 06:09:44,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=119733.33333333333, ans=0.125 +2024-07-28 06:09:45,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=119733.33333333333, ans=10.0 +2024-07-28 06:09:58,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=119733.33333333333, ans=0.0 +2024-07-28 06:09:59,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119746.66666666667, ans=0.1 +2024-07-28 06:10:01,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=119746.66666666667, ans=0.125 +2024-07-28 06:10:01,427 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.66 vs. limit=12.0 +2024-07-28 06:10:13,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=119746.66666666667, ans=10.0 +2024-07-28 06:10:28,782 INFO [train.py:1114] (2/4) Epoch 9, batch 8050, loss[loss=0.2297, simple_loss=0.3218, pruned_loss=0.06883, over 4808.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2914, pruned_loss=0.06066, over 934000.19 frames. ], batch size: 14, lr: 8.22e-03, grad_scale: 16.0 +2024-07-28 06:10:32,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=119773.33333333333, ans=0.125 +2024-07-28 06:10:47,089 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.83 vs. limit=12.0 +2024-07-28 06:10:47,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=119813.33333333333, ans=0.0 +2024-07-28 06:10:49,916 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 6.138e+01 7.014e+01 8.220e+01 1.277e+02, threshold=1.403e+02, percent-clipped=0.0 +2024-07-28 06:11:03,031 INFO [train.py:1114] (2/4) Epoch 9, batch 8100, loss[loss=0.2323, simple_loss=0.3133, pruned_loss=0.0757, over 4801.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2914, pruned_loss=0.06079, over 933715.87 frames. ], batch size: 15, lr: 8.22e-03, grad_scale: 16.0 +2024-07-28 06:11:04,098 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.67 vs. limit=15.0 +2024-07-28 06:11:24,686 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.90 vs. limit=15.0 +2024-07-28 06:11:25,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=119880.0, ans=0.125 +2024-07-28 06:11:31,911 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.55 vs. limit=22.5 +2024-07-28 06:11:33,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=119893.33333333333, ans=0.2 +2024-07-28 06:11:35,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=119893.33333333333, ans=0.125 +2024-07-28 06:11:36,510 INFO [train.py:1114] (2/4) Epoch 9, batch 8150, loss[loss=0.2146, simple_loss=0.3015, pruned_loss=0.06384, over 4798.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2908, pruned_loss=0.06049, over 936995.73 frames. ], batch size: 15, lr: 8.21e-03, grad_scale: 16.0 +2024-07-28 06:11:41,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=119906.66666666667, ans=0.0 +2024-07-28 06:11:47,612 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.05 vs. limit=10.0 +2024-07-28 06:11:48,189 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-28 06:11:49,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=119933.33333333333, ans=0.125 +2024-07-28 06:11:50,838 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.58 vs. limit=15.0 +2024-07-28 06:11:53,102 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=9.159e-02 +2024-07-28 06:11:57,270 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.727e+01 5.730e+01 6.295e+01 7.311e+01 1.625e+02, threshold=1.259e+02, percent-clipped=1.0 +2024-07-28 06:11:58,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=119946.66666666667, ans=10.0 +2024-07-28 06:12:05,509 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.55 vs. limit=22.5 +2024-07-28 06:12:08,485 INFO [train.py:1114] (2/4) Epoch 9, batch 8200, loss[loss=0.2111, simple_loss=0.3095, pruned_loss=0.05641, over 4789.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2911, pruned_loss=0.05984, over 937951.14 frames. ], batch size: 15, lr: 8.21e-03, grad_scale: 16.0 +2024-07-28 06:12:08,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=119973.33333333333, ans=0.95 +2024-07-28 06:12:20,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=119986.66666666667, ans=0.0 +2024-07-28 06:12:32,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=120013.33333333333, ans=0.1 +2024-07-28 06:12:37,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=120013.33333333333, ans=0.125 +2024-07-28 06:12:47,771 INFO [train.py:1114] (2/4) Epoch 9, batch 8250, loss[loss=0.1777, simple_loss=0.2705, pruned_loss=0.04244, over 4884.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2919, pruned_loss=0.05989, over 938151.13 frames. ], batch size: 13, lr: 8.21e-03, grad_scale: 16.0 +2024-07-28 06:12:55,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.71 vs. limit=15.0 +2024-07-28 06:13:08,655 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.01 vs. limit=22.5 +2024-07-28 06:13:10,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=120080.0, ans=0.0 +2024-07-28 06:13:12,021 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.840e+01 6.472e+01 7.401e+01 1.114e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 06:13:15,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=120080.0, ans=0.0 +2024-07-28 06:13:16,489 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.23 vs. limit=15.0 +2024-07-28 06:13:31,393 INFO [train.py:1114] (2/4) Epoch 9, batch 8300, loss[loss=0.2247, simple_loss=0.309, pruned_loss=0.07018, over 4907.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2932, pruned_loss=0.06036, over 938466.78 frames. ], batch size: 15, lr: 8.21e-03, grad_scale: 16.0 +2024-07-28 06:13:36,794 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.64 vs. limit=22.5 +2024-07-28 06:13:42,165 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.97 vs. limit=10.0 +2024-07-28 06:13:43,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=120120.0, ans=0.125 +2024-07-28 06:13:53,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=120146.66666666667, ans=0.025 +2024-07-28 06:13:57,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=120146.66666666667, ans=0.0 +2024-07-28 06:13:57,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=120146.66666666667, ans=0.125 +2024-07-28 06:14:00,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=120160.0, ans=0.025 +2024-07-28 06:14:01,847 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.24 vs. limit=22.5 +2024-07-28 06:14:06,597 INFO [train.py:1114] (2/4) Epoch 9, batch 8350, loss[loss=0.2498, simple_loss=0.336, pruned_loss=0.08178, over 4798.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2925, pruned_loss=0.06015, over 941302.65 frames. ], batch size: 15, lr: 8.20e-03, grad_scale: 16.0 +2024-07-28 06:14:11,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=120173.33333333333, ans=0.125 +2024-07-28 06:14:11,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=120173.33333333333, ans=0.1 +2024-07-28 06:14:25,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=120200.0, ans=0.125 +2024-07-28 06:14:29,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=120213.33333333333, ans=0.125 +2024-07-28 06:14:32,034 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.453e+01 5.725e+01 6.523e+01 7.692e+01 9.570e+01, threshold=1.305e+02, percent-clipped=0.0 +2024-07-28 06:14:33,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=120213.33333333333, ans=0.125 +2024-07-28 06:14:35,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=120213.33333333333, ans=0.125 +2024-07-28 06:14:42,663 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.30 vs. limit=12.0 +2024-07-28 06:14:42,954 INFO [train.py:1114] (2/4) Epoch 9, batch 8400, loss[loss=0.1971, simple_loss=0.2879, pruned_loss=0.05317, over 4773.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2929, pruned_loss=0.0606, over 939871.16 frames. ], batch size: 12, lr: 8.20e-03, grad_scale: 32.0 +2024-07-28 06:14:51,028 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.00 vs. limit=10.0 +2024-07-28 06:14:55,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=120266.66666666667, ans=0.025 +2024-07-28 06:14:59,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=120266.66666666667, ans=0.0 +2024-07-28 06:14:59,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=120266.66666666667, ans=0.0 +2024-07-28 06:15:11,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=120293.33333333333, ans=0.125 +2024-07-28 06:15:11,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=120293.33333333333, ans=0.025 +2024-07-28 06:15:12,708 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.07 vs. limit=15.0 +2024-07-28 06:15:15,455 INFO [train.py:1114] (2/4) Epoch 9, batch 8450, loss[loss=0.218, simple_loss=0.3143, pruned_loss=0.06085, over 4798.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2922, pruned_loss=0.05992, over 939249.85 frames. ], batch size: 15, lr: 8.20e-03, grad_scale: 32.0 +2024-07-28 06:15:19,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=120306.66666666667, ans=0.0 +2024-07-28 06:15:19,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=120306.66666666667, ans=0.05 +2024-07-28 06:15:20,096 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.39 vs. limit=15.0 +2024-07-28 06:15:20,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=120306.66666666667, ans=0.0 +2024-07-28 06:15:38,203 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+01 5.798e+01 6.423e+01 7.347e+01 1.111e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 06:15:48,430 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.44 vs. limit=22.5 +2024-07-28 06:15:49,306 INFO [train.py:1114] (2/4) Epoch 9, batch 8500, loss[loss=0.1786, simple_loss=0.2597, pruned_loss=0.04874, over 4617.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.291, pruned_loss=0.05983, over 939282.43 frames. ], batch size: 11, lr: 8.20e-03, grad_scale: 32.0 +2024-07-28 06:16:00,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=120386.66666666667, ans=0.0 +2024-07-28 06:16:01,552 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.75 vs. limit=15.0 +2024-07-28 06:16:06,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=120400.0, ans=0.07 +2024-07-28 06:16:07,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=120400.0, ans=0.1 +2024-07-28 06:16:15,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=120426.66666666667, ans=0.0 +2024-07-28 06:16:17,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=120426.66666666667, ans=0.02 +2024-07-28 06:16:21,537 INFO [train.py:1114] (2/4) Epoch 9, batch 8550, loss[loss=0.1764, simple_loss=0.2553, pruned_loss=0.04872, over 4806.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2913, pruned_loss=0.06013, over 940035.80 frames. ], batch size: 11, lr: 8.20e-03, grad_scale: 16.0 +2024-07-28 06:16:28,990 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.70 vs. limit=22.5 +2024-07-28 06:16:30,355 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.75 vs. limit=22.5 +2024-07-28 06:16:31,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=120453.33333333333, ans=0.125 +2024-07-28 06:16:35,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=120466.66666666667, ans=0.05 +2024-07-28 06:16:43,552 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.883e+01 5.723e+01 6.764e+01 8.281e+01 1.171e+02, threshold=1.353e+02, percent-clipped=0.0 +2024-07-28 06:16:54,074 INFO [train.py:1114] (2/4) Epoch 9, batch 8600, loss[loss=0.2347, simple_loss=0.3338, pruned_loss=0.0678, over 4781.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2909, pruned_loss=0.06012, over 939049.68 frames. ], batch size: 15, lr: 8.19e-03, grad_scale: 16.0 +2024-07-28 06:17:00,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=120520.0, ans=0.05 +2024-07-28 06:17:03,793 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.12 vs. limit=22.5 +2024-07-28 06:17:08,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=120533.33333333333, ans=0.0 +2024-07-28 06:17:26,338 INFO [train.py:1114] (2/4) Epoch 9, batch 8650, loss[loss=0.2225, simple_loss=0.3003, pruned_loss=0.07238, over 4893.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2905, pruned_loss=0.06001, over 939959.99 frames. ], batch size: 15, lr: 8.19e-03, grad_scale: 16.0 +2024-07-28 06:17:28,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=120573.33333333333, ans=0.125 +2024-07-28 06:17:31,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=120573.33333333333, ans=0.125 +2024-07-28 06:17:39,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=120600.0, ans=0.125 +2024-07-28 06:17:48,025 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.611e+01 5.743e+01 6.194e+01 7.423e+01 1.120e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 06:17:50,322 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.37 vs. limit=6.0 +2024-07-28 06:17:59,182 INFO [train.py:1114] (2/4) Epoch 9, batch 8700, loss[loss=0.239, simple_loss=0.3077, pruned_loss=0.08516, over 4765.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2912, pruned_loss=0.06048, over 937750.72 frames. ], batch size: 13, lr: 8.19e-03, grad_scale: 16.0 +2024-07-28 06:18:02,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=120640.0, ans=0.1 +2024-07-28 06:18:02,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=120640.0, ans=0.125 +2024-07-28 06:18:07,096 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.77 vs. limit=15.0 +2024-07-28 06:18:14,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=120666.66666666667, ans=0.5 +2024-07-28 06:18:14,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=120666.66666666667, ans=0.1 +2024-07-28 06:18:20,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=120666.66666666667, ans=0.125 +2024-07-28 06:18:22,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=120680.0, ans=0.125 +2024-07-28 06:18:26,471 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:18:26,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=120680.0, ans=0.0 +2024-07-28 06:18:34,225 INFO [train.py:1114] (2/4) Epoch 9, batch 8750, loss[loss=0.2566, simple_loss=0.3344, pruned_loss=0.08938, over 4688.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2919, pruned_loss=0.06119, over 936225.20 frames. ], batch size: 15, lr: 8.19e-03, grad_scale: 16.0 +2024-07-28 06:18:38,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=120706.66666666667, ans=0.125 +2024-07-28 06:18:46,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=120720.0, ans=0.2 +2024-07-28 06:18:56,339 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 5.820e+01 6.301e+01 7.114e+01 1.037e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 06:19:02,233 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.20 vs. limit=22.5 +2024-07-28 06:19:06,432 INFO [train.py:1114] (2/4) Epoch 9, batch 8800, loss[loss=0.1694, simple_loss=0.2736, pruned_loss=0.03254, over 4932.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2915, pruned_loss=0.06079, over 936948.60 frames. ], batch size: 14, lr: 8.18e-03, grad_scale: 32.0 +2024-07-28 06:19:11,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=120773.33333333333, ans=0.04949747468305833 +2024-07-28 06:19:39,900 INFO [train.py:1114] (2/4) Epoch 9, batch 8850, loss[loss=0.2473, simple_loss=0.3271, pruned_loss=0.08377, over 4542.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2907, pruned_loss=0.06045, over 931564.68 frames. ], batch size: 21, lr: 8.18e-03, grad_scale: 32.0 +2024-07-28 06:19:42,229 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.20 vs. limit=22.5 +2024-07-28 06:19:44,554 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:19:57,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=120866.66666666667, ans=0.09899494936611666 +2024-07-28 06:20:02,412 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.399e+01 5.678e+01 6.367e+01 7.332e+01 1.676e+02, threshold=1.273e+02, percent-clipped=2.0 +2024-07-28 06:20:02,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120880.0, ans=0.1 +2024-07-28 06:20:13,208 INFO [train.py:1114] (2/4) Epoch 9, batch 8900, loss[loss=0.1997, simple_loss=0.2795, pruned_loss=0.05996, over 4937.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2908, pruned_loss=0.06066, over 929648.07 frames. ], batch size: 12, lr: 8.18e-03, grad_scale: 32.0 +2024-07-28 06:20:13,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120906.66666666667, ans=0.1 +2024-07-28 06:20:17,456 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.29 vs. limit=8.0 +2024-07-28 06:20:20,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=120920.0, ans=0.2 +2024-07-28 06:20:36,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120946.66666666667, ans=0.1 +2024-07-28 06:20:47,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=120960.0, ans=0.1 +2024-07-28 06:20:52,605 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.79 vs. limit=22.5 +2024-07-28 06:20:58,024 INFO [train.py:1114] (2/4) Epoch 9, batch 8950, loss[loss=0.2173, simple_loss=0.3065, pruned_loss=0.06405, over 4614.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2918, pruned_loss=0.06118, over 930459.23 frames. ], batch size: 21, lr: 8.18e-03, grad_scale: 32.0 +2024-07-28 06:20:58,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=120973.33333333333, ans=10.0 +2024-07-28 06:21:02,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=120973.33333333333, ans=0.04949747468305833 +2024-07-28 06:21:10,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=121000.0, ans=0.0 +2024-07-28 06:21:19,656 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.879e+01 5.816e+01 6.215e+01 7.468e+01 1.036e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 06:21:21,204 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.72 vs. limit=12.0 +2024-07-28 06:21:27,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=121026.66666666667, ans=0.125 +2024-07-28 06:21:28,294 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.81 vs. limit=15.0 +2024-07-28 06:21:29,709 INFO [train.py:1114] (2/4) Epoch 9, batch 9000, loss[loss=0.1952, simple_loss=0.2809, pruned_loss=0.0547, over 4643.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2909, pruned_loss=0.06095, over 933345.98 frames. ], batch size: 12, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:21:29,709 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 06:21:45,146 INFO [train.py:1146] (2/4) Epoch 9, validation: loss=0.1749, simple_loss=0.2792, pruned_loss=0.03531, over 944034.00 frames. +2024-07-28 06:21:45,147 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 06:21:45,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=121040.0, ans=0.2 +2024-07-28 06:21:45,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=121040.0, ans=0.0 +2024-07-28 06:21:58,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=121053.33333333333, ans=0.125 +2024-07-28 06:21:59,663 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.23 vs. limit=15.0 +2024-07-28 06:22:00,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=121053.33333333333, ans=0.125 +2024-07-28 06:22:00,897 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.23 vs. limit=10.0 +2024-07-28 06:22:17,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=121066.66666666667, ans=0.95 +2024-07-28 06:22:19,371 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.90 vs. limit=15.0 +2024-07-28 06:22:25,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=121093.33333333333, ans=0.1 +2024-07-28 06:22:35,221 INFO [train.py:1114] (2/4) Epoch 9, batch 9050, loss[loss=0.1689, simple_loss=0.2489, pruned_loss=0.04443, over 4543.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2902, pruned_loss=0.06008, over 933740.94 frames. ], batch size: 10, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:22:50,864 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.53 vs. limit=15.0 +2024-07-28 06:23:12,689 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=8.01 vs. limit=8.0 +2024-07-28 06:23:15,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=121146.66666666667, ans=0.125 +2024-07-28 06:23:15,946 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+01 5.675e+01 6.570e+01 7.797e+01 1.121e+02, threshold=1.314e+02, percent-clipped=0.0 +2024-07-28 06:23:32,979 INFO [train.py:1114] (2/4) Epoch 9, batch 9100, loss[loss=0.2117, simple_loss=0.3056, pruned_loss=0.05892, over 4936.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2898, pruned_loss=0.05973, over 936504.49 frames. ], batch size: 14, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:23:33,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=121173.33333333333, ans=0.0 +2024-07-28 06:23:40,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=121173.33333333333, ans=0.1 +2024-07-28 06:23:44,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121186.66666666667, ans=0.1 +2024-07-28 06:23:47,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=121186.66666666667, ans=0.0 +2024-07-28 06:24:02,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=121213.33333333333, ans=0.125 +2024-07-28 06:24:05,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=121226.66666666667, ans=0.125 +2024-07-28 06:24:09,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=121226.66666666667, ans=0.0 +2024-07-28 06:24:12,179 INFO [train.py:1114] (2/4) Epoch 9, batch 9150, loss[loss=0.2339, simple_loss=0.3161, pruned_loss=0.07584, over 4795.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2909, pruned_loss=0.06014, over 935349.99 frames. ], batch size: 14, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:24:16,100 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.99 vs. limit=10.0 +2024-07-28 06:24:47,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=121266.66666666667, ans=0.0 +2024-07-28 06:24:54,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=121266.66666666667, ans=0.125 +2024-07-28 06:24:57,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=121280.0, ans=0.125 +2024-07-28 06:24:59,226 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.588e+01 5.661e+01 6.215e+01 7.054e+01 1.564e+02, threshold=1.243e+02, percent-clipped=1.0 +2024-07-28 06:25:01,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=121280.0, ans=0.1 +2024-07-28 06:25:10,708 INFO [train.py:1114] (2/4) Epoch 9, batch 9200, loss[loss=0.1726, simple_loss=0.2531, pruned_loss=0.04603, over 4844.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2894, pruned_loss=0.05985, over 937313.19 frames. ], batch size: 12, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:25:19,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=121306.66666666667, ans=0.2 +2024-07-28 06:25:29,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=121320.0, ans=0.125 +2024-07-28 06:25:37,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=121333.33333333333, ans=0.0 +2024-07-28 06:25:38,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=121333.33333333333, ans=0.2 +2024-07-28 06:25:39,429 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.61 vs. limit=22.5 +2024-07-28 06:25:41,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=121333.33333333333, ans=0.125 +2024-07-28 06:25:42,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=121346.66666666667, ans=0.2 +2024-07-28 06:25:47,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=121346.66666666667, ans=0.125 +2024-07-28 06:25:51,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=121360.0, ans=0.125 +2024-07-28 06:25:54,949 INFO [train.py:1114] (2/4) Epoch 9, batch 9250, loss[loss=0.2123, simple_loss=0.3036, pruned_loss=0.06055, over 4633.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2886, pruned_loss=0.05909, over 938118.66 frames. ], batch size: 13, lr: 8.16e-03, grad_scale: 32.0 +2024-07-28 06:26:01,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=121386.66666666667, ans=0.0 +2024-07-28 06:26:02,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=121386.66666666667, ans=22.5 +2024-07-28 06:26:16,172 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.708e+01 5.765e+01 6.275e+01 7.273e+01 1.016e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 06:26:16,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=121413.33333333333, ans=0.2 +2024-07-28 06:26:26,993 INFO [train.py:1114] (2/4) Epoch 9, batch 9300, loss[loss=0.169, simple_loss=0.2571, pruned_loss=0.04042, over 4772.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2883, pruned_loss=0.05883, over 938380.78 frames. ], batch size: 12, lr: 8.16e-03, grad_scale: 32.0 +2024-07-28 06:26:30,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=121440.0, ans=0.2 +2024-07-28 06:26:49,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=121453.33333333333, ans=0.0 +2024-07-28 06:26:50,491 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.45 vs. limit=12.0 +2024-07-28 06:26:57,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=121466.66666666667, ans=0.125 +2024-07-28 06:27:04,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=121493.33333333333, ans=0.0 +2024-07-28 06:27:31,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121493.33333333333, ans=0.1 +2024-07-28 06:27:34,640 INFO [train.py:1114] (2/4) Epoch 9, batch 9350, loss[loss=0.1928, simple_loss=0.268, pruned_loss=0.05877, over 4796.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2893, pruned_loss=0.05919, over 935094.77 frames. ], batch size: 11, lr: 8.16e-03, grad_scale: 32.0 +2024-07-28 06:27:41,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=121506.66666666667, ans=0.95 +2024-07-28 06:27:41,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121506.66666666667, ans=0.1 +2024-07-28 06:27:42,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=121506.66666666667, ans=0.0 +2024-07-28 06:27:50,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=121520.0, ans=0.125 +2024-07-28 06:27:50,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=121520.0, ans=0.09899494936611666 +2024-07-28 06:28:00,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.59 vs. limit=15.0 +2024-07-28 06:28:01,021 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.924e+01 5.677e+01 6.203e+01 7.268e+01 1.059e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 06:28:11,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=121546.66666666667, ans=0.125 +2024-07-28 06:28:14,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=121560.0, ans=0.125 +2024-07-28 06:28:20,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=121560.0, ans=0.09899494936611666 +2024-07-28 06:28:21,359 INFO [train.py:1114] (2/4) Epoch 9, batch 9400, loss[loss=0.2112, simple_loss=0.3046, pruned_loss=0.05889, over 4692.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2899, pruned_loss=0.05952, over 933115.41 frames. ], batch size: 13, lr: 8.16e-03, grad_scale: 32.0 +2024-07-28 06:28:26,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=121573.33333333333, ans=0.0 +2024-07-28 06:28:41,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=121613.33333333333, ans=0.125 +2024-07-28 06:28:47,231 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-07-28 06:28:52,904 INFO [train.py:1114] (2/4) Epoch 9, batch 9450, loss[loss=0.1497, simple_loss=0.2365, pruned_loss=0.03144, over 4817.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2902, pruned_loss=0.05982, over 932709.41 frames. ], batch size: 11, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:28:57,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=121640.0, ans=0.125 +2024-07-28 06:28:58,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=121653.33333333333, ans=0.2 +2024-07-28 06:29:07,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=121666.66666666667, ans=0.125 +2024-07-28 06:29:07,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=121666.66666666667, ans=0.025 +2024-07-28 06:29:13,879 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.626e+01 6.016e+01 7.236e+01 1.280e+02, threshold=1.203e+02, percent-clipped=1.0 +2024-07-28 06:29:22,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=121693.33333333333, ans=0.125 +2024-07-28 06:29:24,049 INFO [train.py:1114] (2/4) Epoch 9, batch 9500, loss[loss=0.2036, simple_loss=0.2941, pruned_loss=0.05651, over 4710.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2902, pruned_loss=0.05971, over 934698.38 frames. ], batch size: 12, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:29:29,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=121706.66666666667, ans=0.125 +2024-07-28 06:29:31,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=121720.0, ans=0.125 +2024-07-28 06:29:33,213 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.68 vs. limit=10.0 +2024-07-28 06:29:35,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=121720.0, ans=0.0 +2024-07-28 06:29:41,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=121733.33333333333, ans=0.0 +2024-07-28 06:29:52,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=121760.0, ans=0.0 +2024-07-28 06:30:02,261 INFO [train.py:1114] (2/4) Epoch 9, batch 9550, loss[loss=0.1754, simple_loss=0.2621, pruned_loss=0.04438, over 4771.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2913, pruned_loss=0.06013, over 932058.72 frames. ], batch size: 12, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:30:08,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=121786.66666666667, ans=0.0 +2024-07-28 06:30:12,460 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.75 vs. limit=15.0 +2024-07-28 06:30:16,454 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.46 vs. limit=22.5 +2024-07-28 06:30:18,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121800.0, ans=0.1 +2024-07-28 06:30:20,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=121813.33333333333, ans=0.0 +2024-07-28 06:30:23,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=121813.33333333333, ans=0.0 +2024-07-28 06:30:23,661 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.774e+01 6.473e+01 7.553e+01 1.235e+02, threshold=1.295e+02, percent-clipped=1.0 +2024-07-28 06:30:25,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=121813.33333333333, ans=0.0 +2024-07-28 06:30:26,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=121813.33333333333, ans=0.0 +2024-07-28 06:30:27,200 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.46 vs. limit=10.0 +2024-07-28 06:30:35,269 INFO [train.py:1114] (2/4) Epoch 9, batch 9600, loss[loss=0.2993, simple_loss=0.3672, pruned_loss=0.1157, over 3296.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2909, pruned_loss=0.06, over 930933.19 frames. ], batch size: 36, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:30:38,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=121840.0, ans=0.0 +2024-07-28 06:30:47,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121853.33333333333, ans=0.1 +2024-07-28 06:30:51,119 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.73 vs. limit=12.0 +2024-07-28 06:30:52,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=121866.66666666667, ans=0.125 +2024-07-28 06:31:13,499 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.17 vs. limit=10.0 +2024-07-28 06:31:18,866 INFO [train.py:1114] (2/4) Epoch 9, batch 9650, loss[loss=0.2016, simple_loss=0.2866, pruned_loss=0.05833, over 4836.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2918, pruned_loss=0.06072, over 927137.41 frames. ], batch size: 16, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:31:20,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.50 vs. limit=22.5 +2024-07-28 06:31:31,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121906.66666666667, ans=0.1 +2024-07-28 06:31:43,673 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:31:53,654 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.690e+01 5.799e+01 6.411e+01 7.286e+01 1.019e+02, threshold=1.282e+02, percent-clipped=0.0 +2024-07-28 06:32:09,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=121960.0, ans=0.125 +2024-07-28 06:32:15,514 INFO [train.py:1114] (2/4) Epoch 9, batch 9700, loss[loss=0.2187, simple_loss=0.2961, pruned_loss=0.07064, over 4281.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2914, pruned_loss=0.06051, over 925136.74 frames. ], batch size: 25, lr: 8.14e-03, grad_scale: 32.0 +2024-07-28 06:32:22,977 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.86 vs. limit=6.0 +2024-07-28 06:32:23,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=121986.66666666667, ans=0.0 +2024-07-28 06:32:24,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=121986.66666666667, ans=0.125 +2024-07-28 06:32:26,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=121986.66666666667, ans=0.0 +2024-07-28 06:32:27,092 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.34 vs. limit=15.0 +2024-07-28 06:32:51,286 INFO [train.py:1114] (2/4) Epoch 9, batch 9750, loss[loss=0.1854, simple_loss=0.267, pruned_loss=0.05194, over 4676.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2903, pruned_loss=0.05995, over 925442.30 frames. ], batch size: 15, lr: 8.14e-03, grad_scale: 32.0 +2024-07-28 06:32:51,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=122040.0, ans=0.125 +2024-07-28 06:32:52,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122040.0, ans=0.1 +2024-07-28 06:32:52,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=122040.0, ans=0.0 +2024-07-28 06:33:08,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=122053.33333333333, ans=0.025 +2024-07-28 06:33:18,531 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.645e+01 5.595e+01 6.071e+01 7.420e+01 1.003e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 06:33:19,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=122080.0, ans=0.0 +2024-07-28 06:33:19,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=122080.0, ans=0.2 +2024-07-28 06:33:20,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=122080.0, ans=0.2 +2024-07-28 06:33:24,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=122093.33333333333, ans=0.035 +2024-07-28 06:33:28,406 INFO [train.py:1114] (2/4) Epoch 9, batch 9800, loss[loss=0.1973, simple_loss=0.2709, pruned_loss=0.0619, over 4708.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2902, pruned_loss=0.06007, over 925043.35 frames. ], batch size: 12, lr: 8.14e-03, grad_scale: 32.0 +2024-07-28 06:33:40,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=122120.0, ans=0.125 +2024-07-28 06:33:40,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=122120.0, ans=0.125 +2024-07-28 06:33:48,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=122133.33333333333, ans=0.0 +2024-07-28 06:33:52,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=122146.66666666667, ans=0.125 +2024-07-28 06:34:09,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=122160.0, ans=0.125 +2024-07-28 06:34:10,943 INFO [train.py:1114] (2/4) Epoch 9, batch 9850, loss[loss=0.2291, simple_loss=0.3202, pruned_loss=0.06898, over 4894.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2912, pruned_loss=0.06063, over 927553.57 frames. ], batch size: 15, lr: 8.14e-03, grad_scale: 32.0 +2024-07-28 06:34:10,993 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:34:15,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=122173.33333333333, ans=0.0 +2024-07-28 06:34:16,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122173.33333333333, ans=0.1 +2024-07-28 06:34:32,423 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.677e+01 5.834e+01 6.555e+01 7.421e+01 1.036e+02, threshold=1.311e+02, percent-clipped=0.0 +2024-07-28 06:34:42,492 INFO [train.py:1114] (2/4) Epoch 9, batch 9900, loss[loss=0.2237, simple_loss=0.3149, pruned_loss=0.06625, over 4854.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.292, pruned_loss=0.06107, over 927099.22 frames. ], batch size: 16, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:34:50,727 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:34:54,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=122266.66666666667, ans=0.125 +2024-07-28 06:35:10,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=122293.33333333333, ans=0.2 +2024-07-28 06:35:26,877 INFO [train.py:1114] (2/4) Epoch 9, batch 9950, loss[loss=0.1881, simple_loss=0.2766, pruned_loss=0.04983, over 4807.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2924, pruned_loss=0.06148, over 929558.34 frames. ], batch size: 11, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:35:36,437 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.29 vs. limit=22.5 +2024-07-28 06:35:36,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=122320.0, ans=0.125 +2024-07-28 06:35:43,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=122333.33333333333, ans=0.125 +2024-07-28 06:35:49,830 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.887e+01 6.567e+01 7.886e+01 1.035e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-28 06:35:56,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=122360.0, ans=0.0 +2024-07-28 06:36:00,348 INFO [train.py:1114] (2/4) Epoch 9, batch 10000, loss[loss=0.2426, simple_loss=0.3373, pruned_loss=0.07399, over 4640.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2955, pruned_loss=0.0631, over 926950.26 frames. ], batch size: 16, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:36:07,130 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.45 vs. limit=15.0 +2024-07-28 06:36:10,876 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.21 vs. limit=22.5 +2024-07-28 06:36:20,702 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.34 vs. limit=15.0 +2024-07-28 06:36:21,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122400.0, ans=0.1 +2024-07-28 06:36:26,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122400.0, ans=0.1 +2024-07-28 06:36:42,688 INFO [train.py:1114] (2/4) Epoch 9, batch 10050, loss[loss=0.2424, simple_loss=0.3077, pruned_loss=0.08855, over 3494.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2985, pruned_loss=0.06491, over 914894.43 frames. ], batch size: 35, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:36:47,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=122440.0, ans=0.0 +2024-07-28 06:36:55,287 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.73 vs. limit=10.0 +2024-07-28 06:36:59,434 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.46 vs. limit=15.0 +2024-07-28 06:37:02,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=122466.66666666667, ans=0.125 +2024-07-28 06:37:04,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122480.0, ans=0.1 +2024-07-28 06:37:06,688 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 6.481e+01 7.122e+01 8.299e+01 1.409e+02, threshold=1.424e+02, percent-clipped=1.0 +2024-07-28 06:37:06,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=122480.0, ans=0.125 +2024-07-28 06:37:18,575 INFO [train.py:1114] (2/4) Epoch 9, batch 10100, loss[loss=0.2441, simple_loss=0.3069, pruned_loss=0.09064, over 3444.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3057, pruned_loss=0.07207, over 860637.92 frames. ], batch size: 35, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:37:18,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=122506.66666666667, ans=0.125 +2024-07-28 06:37:31,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=122533.33333333333, ans=0.09899494936611666 +2024-07-28 06:37:38,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=122546.66666666667, ans=0.125 +2024-07-28 06:37:49,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=122560.0, ans=0.0 +2024-07-28 06:37:49,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=122560.0, ans=0.2 +2024-07-28 06:37:52,175 INFO [train.py:1114] (2/4) Epoch 9, batch 10150, loss[loss=0.2645, simple_loss=0.3346, pruned_loss=0.09717, over 3441.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3087, pruned_loss=0.07532, over 819612.21 frames. ], batch size: 37, lr: 8.12e-03, grad_scale: 32.0 +2024-07-28 06:38:01,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=122586.66666666667, ans=0.0 +2024-07-28 06:38:02,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=122586.66666666667, ans=0.015 +2024-07-28 06:38:15,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=122600.0, ans=0.2 +2024-07-28 06:38:17,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=122613.33333333333, ans=0.2 +2024-07-28 06:38:18,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=122613.33333333333, ans=0.0 +2024-07-28 06:38:18,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=122613.33333333333, ans=0.125 +2024-07-28 06:38:19,557 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.853e+01 6.506e+01 6.978e+01 7.406e+01 9.051e+01, threshold=1.396e+02, percent-clipped=0.0 +2024-07-28 06:39:22,413 INFO [train.py:1114] (2/4) Epoch 9, batch 10200, loss[loss=0.261, simple_loss=0.3256, pruned_loss=0.09816, over 3186.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3113, pruned_loss=0.07867, over 787513.10 frames. ], batch size: 35, lr: 8.12e-03, grad_scale: 32.0 +2024-07-28 06:39:28,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=122653.33333333333, ans=0.1 +2024-07-28 06:39:29,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=122653.33333333333, ans=0.2 +2024-07-28 06:39:31,253 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=9.52 vs. limit=15.0 +2024-07-28 06:41:02,960 INFO [train.py:1114] (2/4) Epoch 10, batch 0, loss[loss=0.1821, simple_loss=0.2712, pruned_loss=0.04647, over 4850.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2712, pruned_loss=0.04647, over 4850.00 frames. ], batch size: 12, lr: 7.72e-03, grad_scale: 32.0 +2024-07-28 06:41:02,961 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 06:41:14,704 INFO [train.py:1146] (2/4) Epoch 10, validation: loss=0.1773, simple_loss=0.2829, pruned_loss=0.03584, over 944034.00 frames. +2024-07-28 06:41:14,705 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 06:41:19,235 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.92 vs. limit=22.5 +2024-07-28 06:41:26,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=122682.66666666667, ans=0.125 +2024-07-28 06:41:34,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=122696.0, ans=0.0 +2024-07-28 06:41:53,928 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.88 vs. limit=15.0 +2024-07-28 06:41:58,942 INFO [train.py:1114] (2/4) Epoch 10, batch 50, loss[loss=0.1591, simple_loss=0.2408, pruned_loss=0.03866, over 4622.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2948, pruned_loss=0.06321, over 206484.86 frames. ], batch size: 11, lr: 7.72e-03, grad_scale: 32.0 +2024-07-28 06:42:06,808 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.770e+01 5.950e+01 6.646e+01 7.258e+01 1.106e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-28 06:42:09,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=122749.33333333333, ans=0.125 +2024-07-28 06:42:21,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=122749.33333333333, ans=0.125 +2024-07-28 06:42:21,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=122749.33333333333, ans=0.125 +2024-07-28 06:42:21,938 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-28 06:42:27,071 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.88 vs. limit=15.0 +2024-07-28 06:42:32,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=122776.0, ans=0.0 +2024-07-28 06:42:33,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=122776.0, ans=0.125 +2024-07-28 06:42:46,653 INFO [train.py:1114] (2/4) Epoch 10, batch 100, loss[loss=0.1878, simple_loss=0.2678, pruned_loss=0.05387, over 4637.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.293, pruned_loss=0.06168, over 365840.73 frames. ], batch size: 12, lr: 7.72e-03, grad_scale: 32.0 +2024-07-28 06:42:47,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=122802.66666666667, ans=0.0 +2024-07-28 06:42:50,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=122802.66666666667, ans=0.125 +2024-07-28 06:43:27,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=122842.66666666667, ans=0.2 +2024-07-28 06:43:38,506 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.47 vs. limit=22.5 +2024-07-28 06:43:40,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=122856.0, ans=0.125 +2024-07-28 06:43:42,689 INFO [train.py:1114] (2/4) Epoch 10, batch 150, loss[loss=0.1834, simple_loss=0.2599, pruned_loss=0.05349, over 4611.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2892, pruned_loss=0.05968, over 494333.15 frames. ], batch size: 11, lr: 7.72e-03, grad_scale: 32.0 +2024-07-28 06:43:44,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=122869.33333333333, ans=0.0 +2024-07-28 06:43:46,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=122869.33333333333, ans=0.125 +2024-07-28 06:43:46,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=122869.33333333333, ans=0.125 +2024-07-28 06:43:51,031 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.790e+01 6.360e+01 7.461e+01 1.069e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 06:44:04,102 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.79 vs. limit=10.0 +2024-07-28 06:44:11,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=122909.33333333333, ans=0.125 +2024-07-28 06:44:32,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=122922.66666666667, ans=0.0 +2024-07-28 06:44:33,905 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.50 vs. limit=15.0 +2024-07-28 06:44:37,852 INFO [train.py:1114] (2/4) Epoch 10, batch 200, loss[loss=0.1941, simple_loss=0.2957, pruned_loss=0.04629, over 4495.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2903, pruned_loss=0.05977, over 593707.75 frames. ], batch size: 21, lr: 7.71e-03, grad_scale: 32.0 +2024-07-28 06:44:49,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=122936.0, ans=0.125 +2024-07-28 06:44:50,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=122936.0, ans=0.0 +2024-07-28 06:44:52,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=122949.33333333333, ans=0.2 +2024-07-28 06:45:00,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=122949.33333333333, ans=0.0 +2024-07-28 06:45:04,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=122949.33333333333, ans=0.2 +2024-07-28 06:45:07,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=122962.66666666667, ans=0.0 +2024-07-28 06:45:17,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=122976.0, ans=0.0 +2024-07-28 06:45:28,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=123002.66666666667, ans=10.0 +2024-07-28 06:45:28,953 INFO [train.py:1114] (2/4) Epoch 10, batch 250, loss[loss=0.221, simple_loss=0.3063, pruned_loss=0.06785, over 4643.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2908, pruned_loss=0.05972, over 670870.48 frames. ], batch size: 16, lr: 7.71e-03, grad_scale: 32.0 +2024-07-28 06:45:29,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=123002.66666666667, ans=0.035 +2024-07-28 06:45:31,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=123002.66666666667, ans=0.125 +2024-07-28 06:45:36,593 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.49 vs. limit=15.0 +2024-07-28 06:45:37,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=123016.0, ans=0.1 +2024-07-28 06:45:38,196 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+01 5.690e+01 6.559e+01 7.773e+01 1.314e+02, threshold=1.312e+02, percent-clipped=1.0 +2024-07-28 06:45:38,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=123016.0, ans=0.125 +2024-07-28 06:45:43,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=123016.0, ans=0.125 +2024-07-28 06:45:45,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=123029.33333333333, ans=0.125 +2024-07-28 06:45:54,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=123042.66666666667, ans=0.025 +2024-07-28 06:45:54,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=123042.66666666667, ans=0.0 +2024-07-28 06:46:08,672 INFO [train.py:1114] (2/4) Epoch 10, batch 300, loss[loss=0.2027, simple_loss=0.2885, pruned_loss=0.0584, over 4808.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2892, pruned_loss=0.05873, over 730069.42 frames. ], batch size: 15, lr: 7.71e-03, grad_scale: 32.0 +2024-07-28 06:46:09,746 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.52 vs. limit=15.0 +2024-07-28 06:46:26,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=123096.0, ans=0.125 +2024-07-28 06:46:27,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=123096.0, ans=0.0 +2024-07-28 06:46:31,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=123109.33333333333, ans=0.1 +2024-07-28 06:46:39,646 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.88 vs. limit=12.0 +2024-07-28 06:46:40,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=123122.66666666667, ans=0.125 +2024-07-28 06:46:41,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=123122.66666666667, ans=0.0 +2024-07-28 06:46:44,381 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.77 vs. limit=15.0 +2024-07-28 06:46:46,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=123136.0, ans=0.2 +2024-07-28 06:46:48,124 INFO [train.py:1114] (2/4) Epoch 10, batch 350, loss[loss=0.211, simple_loss=0.2888, pruned_loss=0.06657, over 4939.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2892, pruned_loss=0.05842, over 775902.33 frames. ], batch size: 12, lr: 7.71e-03, grad_scale: 64.0 +2024-07-28 06:47:00,719 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.715e+01 5.536e+01 6.033e+01 6.929e+01 1.043e+02, threshold=1.207e+02, percent-clipped=0.0 +2024-07-28 06:47:00,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=123149.33333333333, ans=0.1 +2024-07-28 06:50:15,405 INFO [train.py:1114] (2/4) Epoch 10, batch 400, loss[loss=0.1955, simple_loss=0.2848, pruned_loss=0.05307, over 4691.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2886, pruned_loss=0.05773, over 813432.51 frames. ], batch size: 13, lr: 7.71e-03, grad_scale: 64.0 +2024-07-28 06:50:16,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=123202.66666666667, ans=0.125 +2024-07-28 06:50:27,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=123202.66666666667, ans=0.2 +2024-07-28 06:50:39,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=123216.0, ans=0.0 +2024-07-28 06:50:58,927 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.62 vs. limit=22.5 +2024-07-28 06:51:13,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=123242.66666666667, ans=0.125 +2024-07-28 06:51:17,724 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:52:21,018 INFO [train.py:1114] (2/4) Epoch 10, batch 450, loss[loss=0.2099, simple_loss=0.3045, pruned_loss=0.05769, over 4648.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2899, pruned_loss=0.05869, over 838437.13 frames. ], batch size: 13, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:52:46,135 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.482e+01 5.561e+01 6.292e+01 7.345e+01 1.157e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 06:52:55,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=123282.66666666667, ans=0.125 +2024-07-28 06:54:42,589 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.21 vs. limit=15.0 +2024-07-28 06:54:48,146 INFO [train.py:1114] (2/4) Epoch 10, batch 500, loss[loss=0.2201, simple_loss=0.3197, pruned_loss=0.06028, over 4685.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2888, pruned_loss=0.05819, over 861354.68 frames. ], batch size: 15, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:54:48,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=123336.0, ans=0.035 +2024-07-28 06:54:50,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=123336.0, ans=0.5 +2024-07-28 06:55:10,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=123349.33333333333, ans=0.2 +2024-07-28 06:55:38,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=123362.66666666667, ans=0.0 +2024-07-28 06:55:39,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=123362.66666666667, ans=0.2 +2024-07-28 06:55:51,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=123376.0, ans=0.125 +2024-07-28 06:55:55,488 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.53 vs. limit=15.0 +2024-07-28 06:55:55,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=123376.0, ans=0.1 +2024-07-28 06:55:55,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=123376.0, ans=0.125 +2024-07-28 06:56:03,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=123389.33333333333, ans=0.0 +2024-07-28 06:56:05,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=123389.33333333333, ans=0.125 +2024-07-28 06:56:13,174 INFO [train.py:1114] (2/4) Epoch 10, batch 550, loss[loss=0.2133, simple_loss=0.2946, pruned_loss=0.06597, over 4630.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2888, pruned_loss=0.05829, over 877701.71 frames. ], batch size: 17, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:56:13,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=123402.66666666667, ans=0.125 +2024-07-28 06:56:15,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=123402.66666666667, ans=0.09899494936611666 +2024-07-28 06:56:20,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=123416.0, ans=0.0 +2024-07-28 06:56:21,996 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.607e+01 5.657e+01 6.359e+01 7.249e+01 1.002e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 06:56:24,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=123416.0, ans=0.125 +2024-07-28 06:56:39,542 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-28 06:56:41,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=123429.33333333333, ans=0.0 +2024-07-28 06:57:03,607 INFO [train.py:1114] (2/4) Epoch 10, batch 600, loss[loss=0.1933, simple_loss=0.2799, pruned_loss=0.05331, over 4644.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2889, pruned_loss=0.05843, over 892417.05 frames. ], batch size: 16, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:57:15,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=123482.66666666667, ans=0.0 +2024-07-28 06:57:23,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=123496.0, ans=15.0 +2024-07-28 06:57:27,297 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.55 vs. limit=15.0 +2024-07-28 06:57:28,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=123509.33333333333, ans=0.0 +2024-07-28 06:57:29,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=123509.33333333333, ans=0.0 +2024-07-28 06:57:41,756 INFO [train.py:1114] (2/4) Epoch 10, batch 650, loss[loss=0.2316, simple_loss=0.3161, pruned_loss=0.07356, over 4760.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2879, pruned_loss=0.05784, over 903898.53 frames. ], batch size: 13, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:57:49,741 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.335e+01 5.819e+01 6.416e+01 7.118e+01 9.444e+01, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 06:57:57,982 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.37 vs. limit=15.0 +2024-07-28 06:58:24,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=123576.0, ans=0.1 +2024-07-28 06:58:24,957 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.999e-01 +2024-07-28 06:58:34,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.58 vs. limit=22.5 +2024-07-28 06:58:37,456 INFO [train.py:1114] (2/4) Epoch 10, batch 700, loss[loss=0.1769, simple_loss=0.2639, pruned_loss=0.04499, over 4633.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2892, pruned_loss=0.05823, over 912131.66 frames. ], batch size: 12, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 06:58:46,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=123602.66666666667, ans=0.125 +2024-07-28 06:58:48,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=123616.0, ans=0.0 +2024-07-28 06:58:59,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=123629.33333333333, ans=0.125 +2024-07-28 06:59:01,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=123629.33333333333, ans=0.125 +2024-07-28 06:59:35,985 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.22 vs. limit=15.0 +2024-07-28 06:59:40,544 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.00 vs. limit=6.0 +2024-07-28 06:59:56,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=123656.0, ans=0.07 +2024-07-28 06:59:59,349 INFO [train.py:1114] (2/4) Epoch 10, batch 750, loss[loss=0.19, simple_loss=0.2831, pruned_loss=0.04847, over 4694.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2881, pruned_loss=0.05815, over 918352.93 frames. ], batch size: 13, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 07:00:01,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=123669.33333333333, ans=0.125 +2024-07-28 07:00:07,408 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.006e+01 5.598e+01 6.088e+01 6.743e+01 1.006e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 07:00:08,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=123682.66666666667, ans=0.125 +2024-07-28 07:01:01,986 INFO [train.py:1114] (2/4) Epoch 10, batch 800, loss[loss=0.1643, simple_loss=0.2395, pruned_loss=0.04459, over 4859.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2872, pruned_loss=0.0576, over 923260.24 frames. ], batch size: 12, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 07:01:08,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=123749.33333333333, ans=0.125 +2024-07-28 07:01:17,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=123762.66666666667, ans=0.2 +2024-07-28 07:01:20,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=123762.66666666667, ans=0.1 +2024-07-28 07:01:22,900 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.24 vs. limit=12.0 +2024-07-28 07:01:24,210 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.46 vs. limit=15.0 +2024-07-28 07:01:28,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=123776.0, ans=0.125 +2024-07-28 07:01:29,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.70 vs. limit=15.0 +2024-07-28 07:01:31,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=123789.33333333333, ans=0.125 +2024-07-28 07:01:33,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=123789.33333333333, ans=0.0 +2024-07-28 07:01:38,559 INFO [train.py:1114] (2/4) Epoch 10, batch 850, loss[loss=0.2347, simple_loss=0.3188, pruned_loss=0.07531, over 4667.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2868, pruned_loss=0.05765, over 927144.47 frames. ], batch size: 14, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 07:01:40,924 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.34 vs. limit=22.5 +2024-07-28 07:01:43,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=123802.66666666667, ans=0.0 +2024-07-28 07:01:48,402 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.695e+01 6.333e+01 6.870e+01 1.740e+02, threshold=1.267e+02, percent-clipped=1.0 +2024-07-28 07:01:48,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=123816.0, ans=0.0 +2024-07-28 07:01:51,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_na.min_abs, batch_count=123816.0, ans=0.02 +2024-07-28 07:01:55,842 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.14 vs. limit=15.0 +2024-07-28 07:02:21,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=123869.33333333333, ans=0.125 +2024-07-28 07:02:22,338 INFO [train.py:1114] (2/4) Epoch 10, batch 900, loss[loss=0.1732, simple_loss=0.2578, pruned_loss=0.04432, over 4848.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2882, pruned_loss=0.05799, over 928106.16 frames. ], batch size: 12, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 07:02:22,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=123869.33333333333, ans=0.02 +2024-07-28 07:02:30,527 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.80 vs. limit=6.0 +2024-07-28 07:02:32,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=123882.66666666667, ans=0.125 +2024-07-28 07:02:40,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=123896.0, ans=0.0 +2024-07-28 07:02:50,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=123922.66666666667, ans=0.0 +2024-07-28 07:02:56,038 INFO [train.py:1114] (2/4) Epoch 10, batch 950, loss[loss=0.1902, simple_loss=0.2762, pruned_loss=0.05205, over 4774.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2887, pruned_loss=0.05759, over 930044.24 frames. ], batch size: 12, lr: 7.68e-03, grad_scale: 64.0 +2024-07-28 07:03:04,117 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.892e+01 5.603e+01 6.108e+01 6.683e+01 9.503e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 07:03:13,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=123962.66666666667, ans=0.07 +2024-07-28 07:03:21,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=123976.0, ans=0.125 +2024-07-28 07:03:22,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=123989.33333333333, ans=0.025 +2024-07-28 07:03:23,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=123989.33333333333, ans=0.125 +2024-07-28 07:03:27,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=123989.33333333333, ans=0.0 +2024-07-28 07:03:29,739 INFO [train.py:1114] (2/4) Epoch 10, batch 1000, loss[loss=0.1705, simple_loss=0.2541, pruned_loss=0.04344, over 4959.00 frames. ], tot_loss[loss=0.203, simple_loss=0.289, pruned_loss=0.05846, over 929321.70 frames. ], batch size: 13, lr: 7.68e-03, grad_scale: 64.0 +2024-07-28 07:03:49,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=124002.66666666667, ans=0.125 +2024-07-28 07:04:02,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=124029.33333333333, ans=0.2 +2024-07-28 07:04:19,090 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.51 vs. limit=22.5 +2024-07-28 07:04:21,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=124056.0, ans=0.025 +2024-07-28 07:04:25,166 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.08 vs. limit=15.0 +2024-07-28 07:04:25,366 INFO [train.py:1114] (2/4) Epoch 10, batch 1050, loss[loss=0.1811, simple_loss=0.2748, pruned_loss=0.04374, over 4876.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2879, pruned_loss=0.05811, over 931524.26 frames. ], batch size: 14, lr: 7.68e-03, grad_scale: 64.0 +2024-07-28 07:04:55,559 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.602e+01 6.129e+01 7.252e+01 1.285e+02, threshold=1.226e+02, percent-clipped=1.0 +2024-07-28 07:04:58,416 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:05:05,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=124096.0, ans=0.0 +2024-07-28 07:05:09,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=124109.33333333333, ans=0.125 +2024-07-28 07:05:25,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=124122.66666666667, ans=0.0 +2024-07-28 07:05:27,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=124122.66666666667, ans=0.025 +2024-07-28 07:05:39,256 INFO [train.py:1114] (2/4) Epoch 10, batch 1100, loss[loss=0.2167, simple_loss=0.2868, pruned_loss=0.07327, over 4891.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2886, pruned_loss=0.05885, over 933966.33 frames. ], batch size: 13, lr: 7.68e-03, grad_scale: 32.0 +2024-07-28 07:05:46,396 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.49 vs. limit=15.0 +2024-07-28 07:05:52,830 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.79 vs. limit=6.0 +2024-07-28 07:05:58,972 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.59 vs. limit=15.0 +2024-07-28 07:06:08,771 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:06:21,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=124189.33333333333, ans=0.125 +2024-07-28 07:06:23,653 INFO [train.py:1114] (2/4) Epoch 10, batch 1150, loss[loss=0.1852, simple_loss=0.267, pruned_loss=0.05173, over 4896.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2879, pruned_loss=0.05858, over 934016.88 frames. ], batch size: 13, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:06:44,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=124202.66666666667, ans=0.0 +2024-07-28 07:07:00,540 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.669e+01 6.088e+01 6.784e+01 1.007e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 07:07:14,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=124242.66666666667, ans=0.125 +2024-07-28 07:07:19,253 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.57 vs. limit=6.0 +2024-07-28 07:07:44,461 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.13 vs. limit=12.0 +2024-07-28 07:07:48,873 INFO [train.py:1114] (2/4) Epoch 10, batch 1200, loss[loss=0.1726, simple_loss=0.2612, pruned_loss=0.04197, over 4868.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2879, pruned_loss=0.05788, over 933514.38 frames. ], batch size: 14, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:07:49,964 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.24 vs. limit=12.0 +2024-07-28 07:07:53,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124269.33333333333, ans=0.1 +2024-07-28 07:08:08,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=124282.66666666667, ans=0.125 +2024-07-28 07:08:11,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=124296.0, ans=0.0 +2024-07-28 07:08:24,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=124309.33333333333, ans=0.125 +2024-07-28 07:08:25,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124309.33333333333, ans=0.1 +2024-07-28 07:08:41,872 INFO [train.py:1114] (2/4) Epoch 10, batch 1250, loss[loss=0.2187, simple_loss=0.3025, pruned_loss=0.06741, over 4801.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.287, pruned_loss=0.05733, over 937214.01 frames. ], batch size: 15, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:08:49,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=124349.33333333333, ans=0.025 +2024-07-28 07:08:50,578 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.912e+01 6.433e+01 7.478e+01 1.098e+02, threshold=1.287e+02, percent-clipped=0.0 +2024-07-28 07:08:50,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=124349.33333333333, ans=0.125 +2024-07-28 07:08:56,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=124362.66666666667, ans=0.05 +2024-07-28 07:09:06,394 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:09:08,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=124376.0, ans=0.125 +2024-07-28 07:09:17,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=124389.33333333333, ans=0.125 +2024-07-28 07:09:20,434 INFO [train.py:1114] (2/4) Epoch 10, batch 1300, loss[loss=0.2249, simple_loss=0.3169, pruned_loss=0.06646, over 4724.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2862, pruned_loss=0.05688, over 938779.22 frames. ], batch size: 19, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:09:22,887 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.81 vs. limit=10.0 +2024-07-28 07:09:23,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=124402.66666666667, ans=0.125 +2024-07-28 07:09:24,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=124402.66666666667, ans=0.125 +2024-07-28 07:09:24,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=124402.66666666667, ans=0.125 +2024-07-28 07:09:25,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124402.66666666667, ans=0.1 +2024-07-28 07:09:54,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=124429.33333333333, ans=0.125 +2024-07-28 07:10:15,058 INFO [train.py:1114] (2/4) Epoch 10, batch 1350, loss[loss=0.1781, simple_loss=0.2725, pruned_loss=0.04185, over 4765.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2855, pruned_loss=0.05608, over 940892.47 frames. ], batch size: 13, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:10:15,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=124469.33333333333, ans=0.125 +2024-07-28 07:10:23,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=124482.66666666667, ans=0.2 +2024-07-28 07:10:23,761 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.509e+01 5.516e+01 6.216e+01 7.014e+01 1.025e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 07:10:27,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=124496.0, ans=0.1 +2024-07-28 07:10:29,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=124496.0, ans=0.2 +2024-07-28 07:10:43,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=124522.66666666667, ans=0.125 +2024-07-28 07:10:46,787 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.02 vs. limit=15.0 +2024-07-28 07:10:48,590 INFO [train.py:1114] (2/4) Epoch 10, batch 1400, loss[loss=0.1974, simple_loss=0.2692, pruned_loss=0.0628, over 4724.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2862, pruned_loss=0.05718, over 943074.70 frames. ], batch size: 11, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:10:49,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=124536.0, ans=0.125 +2024-07-28 07:11:01,634 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.86 vs. limit=15.0 +2024-07-28 07:11:06,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=124562.66666666667, ans=0.125 +2024-07-28 07:11:11,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=124576.0, ans=0.0 +2024-07-28 07:11:13,000 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.28 vs. limit=22.5 +2024-07-28 07:11:21,989 INFO [train.py:1114] (2/4) Epoch 10, batch 1450, loss[loss=0.185, simple_loss=0.2847, pruned_loss=0.0427, over 4676.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2864, pruned_loss=0.05698, over 943211.30 frames. ], batch size: 15, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:11:29,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=124616.0, ans=0.125 +2024-07-28 07:11:30,552 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.685e+01 6.213e+01 7.325e+01 1.109e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 07:11:32,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=124616.0, ans=0.2 +2024-07-28 07:11:37,849 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.29 vs. limit=22.5 +2024-07-28 07:11:44,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=124642.66666666667, ans=0.125 +2024-07-28 07:11:45,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=124642.66666666667, ans=0.015 +2024-07-28 07:11:59,749 INFO [train.py:1114] (2/4) Epoch 10, batch 1500, loss[loss=0.1993, simple_loss=0.2962, pruned_loss=0.05119, over 4811.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2864, pruned_loss=0.05708, over 942724.43 frames. ], batch size: 14, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:12:00,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=124669.33333333333, ans=0.0 +2024-07-28 07:12:00,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=124669.33333333333, ans=0.125 +2024-07-28 07:12:03,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124669.33333333333, ans=0.1 +2024-07-28 07:12:09,604 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.69 vs. limit=22.5 +2024-07-28 07:12:10,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=124682.66666666667, ans=0.0 +2024-07-28 07:12:14,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=124696.0, ans=0.125 +2024-07-28 07:12:17,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.06 vs. limit=15.0 +2024-07-28 07:12:28,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124722.66666666667, ans=0.1 +2024-07-28 07:12:33,113 INFO [train.py:1114] (2/4) Epoch 10, batch 1550, loss[loss=0.1979, simple_loss=0.2917, pruned_loss=0.05204, over 4901.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2868, pruned_loss=0.05769, over 938729.89 frames. ], batch size: 15, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:12:36,807 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.37 vs. limit=22.5 +2024-07-28 07:12:41,765 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.366e+01 5.520e+01 6.164e+01 6.899e+01 9.824e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 07:12:43,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=124749.33333333333, ans=0.0 +2024-07-28 07:12:48,222 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.31 vs. limit=15.0 +2024-07-28 07:12:54,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=124776.0, ans=0.125 +2024-07-28 07:12:54,402 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.85 vs. limit=6.0 +2024-07-28 07:13:06,580 INFO [train.py:1114] (2/4) Epoch 10, batch 1600, loss[loss=0.2082, simple_loss=0.2989, pruned_loss=0.05869, over 4878.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2864, pruned_loss=0.05775, over 937482.97 frames. ], batch size: 14, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:13:06,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=124802.66666666667, ans=0.04949747468305833 +2024-07-28 07:13:06,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=124802.66666666667, ans=0.0 +2024-07-28 07:13:16,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=124816.0, ans=0.125 +2024-07-28 07:13:17,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=124816.0, ans=0.125 +2024-07-28 07:13:33,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=124856.0, ans=0.125 +2024-07-28 07:13:40,222 INFO [train.py:1114] (2/4) Epoch 10, batch 1650, loss[loss=0.2296, simple_loss=0.3223, pruned_loss=0.0685, over 4652.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2874, pruned_loss=0.05874, over 937429.26 frames. ], batch size: 14, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:13:45,382 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-07-28 07:13:48,853 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.680e+01 5.790e+01 6.415e+01 7.555e+01 1.180e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 07:14:07,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=124909.33333333333, ans=0.125 +2024-07-28 07:14:13,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=124922.66666666667, ans=0.125 +2024-07-28 07:14:16,925 INFO [train.py:1114] (2/4) Epoch 10, batch 1700, loss[loss=0.1989, simple_loss=0.2699, pruned_loss=0.06401, over 4699.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2872, pruned_loss=0.05838, over 938994.71 frames. ], batch size: 11, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:14:21,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=124936.0, ans=0.2 +2024-07-28 07:14:22,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=124936.0, ans=0.125 +2024-07-28 07:14:35,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=124962.66666666667, ans=0.09899494936611666 +2024-07-28 07:14:38,261 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.11 vs. limit=15.0 +2024-07-28 07:14:49,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=124989.33333333333, ans=0.025 +2024-07-28 07:14:53,743 INFO [train.py:1114] (2/4) Epoch 10, batch 1750, loss[loss=0.1578, simple_loss=0.2364, pruned_loss=0.03966, over 4815.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2864, pruned_loss=0.05791, over 939873.28 frames. ], batch size: 11, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:15:04,249 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.898e+01 5.615e+01 6.197e+01 6.752e+01 9.322e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 07:15:11,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=125029.33333333333, ans=0.025 +2024-07-28 07:15:11,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=125029.33333333333, ans=0.1 +2024-07-28 07:15:16,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=125042.66666666667, ans=0.1 +2024-07-28 07:15:31,051 INFO [train.py:1114] (2/4) Epoch 10, batch 1800, loss[loss=0.2058, simple_loss=0.3012, pruned_loss=0.05522, over 4633.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2868, pruned_loss=0.05785, over 940569.54 frames. ], batch size: 13, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:15:37,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=125082.66666666667, ans=0.2 +2024-07-28 07:15:43,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=125082.66666666667, ans=0.125 +2024-07-28 07:15:55,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=125109.33333333333, ans=0.0 +2024-07-28 07:16:04,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=125136.0, ans=0.0 +2024-07-28 07:16:05,161 INFO [train.py:1114] (2/4) Epoch 10, batch 1850, loss[loss=0.2285, simple_loss=0.3102, pruned_loss=0.0734, over 4813.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.287, pruned_loss=0.05779, over 940679.97 frames. ], batch size: 14, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:16:10,204 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.28 vs. limit=15.0 +2024-07-28 07:16:14,243 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:16:14,694 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.312e+01 5.824e+01 6.671e+01 8.109e+01 1.121e+02, threshold=1.334e+02, percent-clipped=0.0 +2024-07-28 07:16:28,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=125176.0, ans=0.125 +2024-07-28 07:16:29,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=125176.0, ans=0.2 +2024-07-28 07:16:38,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=125189.33333333333, ans=0.125 +2024-07-28 07:16:43,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=125189.33333333333, ans=0.125 +2024-07-28 07:16:45,566 INFO [train.py:1114] (2/4) Epoch 10, batch 1900, loss[loss=0.1878, simple_loss=0.2853, pruned_loss=0.04519, over 4665.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2872, pruned_loss=0.05783, over 941653.97 frames. ], batch size: 14, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:17:09,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125229.33333333333, ans=0.1 +2024-07-28 07:17:12,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=125229.33333333333, ans=0.2 +2024-07-28 07:17:18,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=125242.66666666667, ans=0.125 +2024-07-28 07:17:28,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=125269.33333333333, ans=0.125 +2024-07-28 07:17:28,515 INFO [train.py:1114] (2/4) Epoch 10, batch 1950, loss[loss=0.1764, simple_loss=0.27, pruned_loss=0.04136, over 4895.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2887, pruned_loss=0.05811, over 943811.11 frames. ], batch size: 13, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:17:36,510 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.72 vs. limit=15.0 +2024-07-28 07:17:37,351 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.707e+01 5.662e+01 6.185e+01 7.189e+01 1.102e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 07:17:38,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=125282.66666666667, ans=0.125 +2024-07-28 07:17:39,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=125282.66666666667, ans=0.2 +2024-07-28 07:17:39,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=125282.66666666667, ans=0.2 +2024-07-28 07:17:57,102 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:18:04,797 INFO [train.py:1114] (2/4) Epoch 10, batch 2000, loss[loss=0.1555, simple_loss=0.232, pruned_loss=0.03951, over 4807.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2883, pruned_loss=0.05781, over 941262.66 frames. ], batch size: 11, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:18:09,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=125336.0, ans=0.125 +2024-07-28 07:18:31,259 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.98 vs. limit=15.0 +2024-07-28 07:18:38,347 INFO [train.py:1114] (2/4) Epoch 10, batch 2050, loss[loss=0.2049, simple_loss=0.2833, pruned_loss=0.06324, over 4613.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2871, pruned_loss=0.05739, over 939728.70 frames. ], batch size: 11, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:18:38,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=125402.66666666667, ans=0.125 +2024-07-28 07:18:42,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=125402.66666666667, ans=0.125 +2024-07-28 07:18:44,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=125416.0, ans=0.125 +2024-07-28 07:18:47,041 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.499e+01 5.685e+01 6.326e+01 7.286e+01 1.205e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-28 07:18:54,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=125429.33333333333, ans=0.07 +2024-07-28 07:19:01,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=125442.66666666667, ans=0.025 +2024-07-28 07:19:08,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=125456.0, ans=0.125 +2024-07-28 07:19:08,283 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=26.27 vs. limit=22.5 +2024-07-28 07:19:09,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=125456.0, ans=0.0 +2024-07-28 07:19:13,213 INFO [train.py:1114] (2/4) Epoch 10, batch 2100, loss[loss=0.1874, simple_loss=0.2686, pruned_loss=0.05311, over 4761.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2851, pruned_loss=0.05629, over 941377.72 frames. ], batch size: 13, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:19:15,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=125469.33333333333, ans=0.0 +2024-07-28 07:19:30,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=125496.0, ans=0.125 +2024-07-28 07:19:47,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=125536.0, ans=0.125 +2024-07-28 07:19:47,877 INFO [train.py:1114] (2/4) Epoch 10, batch 2150, loss[loss=0.1839, simple_loss=0.2746, pruned_loss=0.04659, over 4895.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2858, pruned_loss=0.05619, over 944422.27 frames. ], batch size: 13, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:19:48,411 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.31 vs. limit=22.5 +2024-07-28 07:19:49,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=125536.0, ans=0.09899494936611666 +2024-07-28 07:19:56,664 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.831e+01 5.697e+01 6.227e+01 7.381e+01 1.023e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 07:20:02,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=125549.33333333333, ans=0.125 +2024-07-28 07:20:08,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=125562.66666666667, ans=0.04949747468305833 +2024-07-28 07:20:24,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=125589.33333333333, ans=0.125 +2024-07-28 07:20:30,844 INFO [train.py:1114] (2/4) Epoch 10, batch 2200, loss[loss=0.2005, simple_loss=0.293, pruned_loss=0.05402, over 4805.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2855, pruned_loss=0.05626, over 943782.13 frames. ], batch size: 14, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:20:31,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=125602.66666666667, ans=0.125 +2024-07-28 07:20:38,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=125616.0, ans=0.05 +2024-07-28 07:20:50,360 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.47 vs. limit=15.0 +2024-07-28 07:21:06,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=125629.33333333333, ans=0.125 +2024-07-28 07:21:06,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=125629.33333333333, ans=0.025 +2024-07-28 07:21:09,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=125629.33333333333, ans=0.125 +2024-07-28 07:21:12,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125642.66666666667, ans=0.1 +2024-07-28 07:21:14,793 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.84 vs. limit=15.0 +2024-07-28 07:21:26,428 INFO [train.py:1114] (2/4) Epoch 10, batch 2250, loss[loss=0.2002, simple_loss=0.2998, pruned_loss=0.05031, over 4700.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2856, pruned_loss=0.05603, over 942532.09 frames. ], batch size: 13, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:21:35,186 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.513e+01 5.590e+01 6.237e+01 6.942e+01 1.306e+02, threshold=1.247e+02, percent-clipped=1.0 +2024-07-28 07:21:53,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=125696.0, ans=0.125 +2024-07-28 07:21:58,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=125709.33333333333, ans=0.125 +2024-07-28 07:22:02,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=125709.33333333333, ans=0.125 +2024-07-28 07:22:09,993 INFO [train.py:1114] (2/4) Epoch 10, batch 2300, loss[loss=0.1736, simple_loss=0.2709, pruned_loss=0.03811, over 4930.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2849, pruned_loss=0.05583, over 939978.05 frames. ], batch size: 12, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:22:17,446 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.16 vs. limit=15.0 +2024-07-28 07:22:20,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=125749.33333333333, ans=0.1 +2024-07-28 07:22:21,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=125749.33333333333, ans=0.0 +2024-07-28 07:22:23,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=125762.66666666667, ans=0.025 +2024-07-28 07:22:25,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=125762.66666666667, ans=0.125 +2024-07-28 07:22:39,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=125776.0, ans=0.125 +2024-07-28 07:22:40,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125776.0, ans=0.1 +2024-07-28 07:22:46,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=125789.33333333333, ans=0.2 +2024-07-28 07:22:47,877 INFO [train.py:1114] (2/4) Epoch 10, batch 2350, loss[loss=0.1803, simple_loss=0.2718, pruned_loss=0.0444, over 4631.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2848, pruned_loss=0.0556, over 941829.78 frames. ], batch size: 13, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:22:50,213 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.80 vs. limit=15.0 +2024-07-28 07:22:51,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=125802.66666666667, ans=0.125 +2024-07-28 07:22:55,633 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.01 vs. limit=15.0 +2024-07-28 07:22:56,462 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.493e+01 6.004e+01 6.754e+01 1.065e+02, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 07:22:59,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=125816.0, ans=0.0 +2024-07-28 07:23:04,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=125829.33333333333, ans=0.0 +2024-07-28 07:23:11,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=125842.66666666667, ans=0.2 +2024-07-28 07:23:17,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=125856.0, ans=0.1 +2024-07-28 07:23:18,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=125856.0, ans=0.2 +2024-07-28 07:23:20,991 INFO [train.py:1114] (2/4) Epoch 10, batch 2400, loss[loss=0.1816, simple_loss=0.2709, pruned_loss=0.04619, over 4646.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2854, pruned_loss=0.05626, over 941252.70 frames. ], batch size: 12, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:23:26,123 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.84 vs. limit=15.0 +2024-07-28 07:23:36,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=125896.0, ans=0.125 +2024-07-28 07:23:50,829 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.62 vs. limit=22.5 +2024-07-28 07:23:54,406 INFO [train.py:1114] (2/4) Epoch 10, batch 2450, loss[loss=0.1942, simple_loss=0.2789, pruned_loss=0.0547, over 4692.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2873, pruned_loss=0.0575, over 937178.10 frames. ], batch size: 13, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:24:01,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=125949.33333333333, ans=0.125 +2024-07-28 07:24:03,001 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.788e+01 5.824e+01 6.375e+01 7.344e+01 1.011e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 07:24:05,539 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.14 vs. limit=22.5 +2024-07-28 07:24:11,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=125962.66666666667, ans=0.125 +2024-07-28 07:24:17,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=125976.0, ans=0.025 +2024-07-28 07:24:24,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=125989.33333333333, ans=0.125 +2024-07-28 07:24:27,206 INFO [train.py:1114] (2/4) Epoch 10, batch 2500, loss[loss=0.1905, simple_loss=0.2811, pruned_loss=0.04993, over 4817.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2867, pruned_loss=0.05732, over 938935.21 frames. ], batch size: 14, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:24:38,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=126002.66666666667, ans=0.125 +2024-07-28 07:25:01,470 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.16 vs. limit=12.0 +2024-07-28 07:25:02,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126056.0, ans=0.1 +2024-07-28 07:25:03,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=126056.0, ans=0.125 +2024-07-28 07:25:05,724 INFO [train.py:1114] (2/4) Epoch 10, batch 2550, loss[loss=0.1844, simple_loss=0.2639, pruned_loss=0.05249, over 4802.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.287, pruned_loss=0.05744, over 938460.96 frames. ], batch size: 11, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:25:09,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=126069.33333333333, ans=0.125 +2024-07-28 07:25:14,261 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.249e+01 5.571e+01 6.137e+01 7.112e+01 1.171e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 07:25:18,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=126096.0, ans=0.125 +2024-07-28 07:25:23,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=126096.0, ans=0.125 +2024-07-28 07:25:27,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=126109.33333333333, ans=0.95 +2024-07-28 07:25:30,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=126109.33333333333, ans=0.125 +2024-07-28 07:25:31,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=126122.66666666667, ans=0.2 +2024-07-28 07:25:38,771 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.71 vs. limit=6.0 +2024-07-28 07:25:38,948 INFO [train.py:1114] (2/4) Epoch 10, batch 2600, loss[loss=0.1777, simple_loss=0.2624, pruned_loss=0.04647, over 4897.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2869, pruned_loss=0.05762, over 937421.69 frames. ], batch size: 13, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:25:41,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=126136.0, ans=0.0 +2024-07-28 07:25:41,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=126136.0, ans=0.125 +2024-07-28 07:25:45,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.52 vs. limit=15.0 +2024-07-28 07:25:50,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=126149.33333333333, ans=0.2 +2024-07-28 07:25:51,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=126149.33333333333, ans=0.125 +2024-07-28 07:26:04,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=126176.0, ans=0.125 +2024-07-28 07:26:12,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=126189.33333333333, ans=0.125 +2024-07-28 07:26:15,539 INFO [train.py:1114] (2/4) Epoch 10, batch 2650, loss[loss=0.2106, simple_loss=0.3016, pruned_loss=0.0598, over 4632.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2878, pruned_loss=0.05784, over 939631.55 frames. ], batch size: 16, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:26:15,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126202.66666666667, ans=0.1 +2024-07-28 07:26:16,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=126202.66666666667, ans=0.0 +2024-07-28 07:26:22,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=126202.66666666667, ans=0.125 +2024-07-28 07:26:22,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=126202.66666666667, ans=0.0 +2024-07-28 07:26:25,844 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.753e+01 5.752e+01 6.121e+01 6.935e+01 9.272e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 07:26:27,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=126216.0, ans=0.0 +2024-07-28 07:26:30,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=126229.33333333333, ans=0.125 +2024-07-28 07:26:51,027 INFO [train.py:1114] (2/4) Epoch 10, batch 2700, loss[loss=0.2538, simple_loss=0.3269, pruned_loss=0.09033, over 4732.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2879, pruned_loss=0.05815, over 940091.49 frames. ], batch size: 14, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:26:51,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=126269.33333333333, ans=0.0 +2024-07-28 07:26:54,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=126269.33333333333, ans=10.0 +2024-07-28 07:27:10,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=126296.0, ans=0.0 +2024-07-28 07:27:19,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=126322.66666666667, ans=0.0 +2024-07-28 07:27:23,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=126322.66666666667, ans=0.1 +2024-07-28 07:27:24,870 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.44 vs. limit=22.5 +2024-07-28 07:27:28,534 INFO [train.py:1114] (2/4) Epoch 10, batch 2750, loss[loss=0.186, simple_loss=0.2734, pruned_loss=0.04924, over 4709.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2874, pruned_loss=0.05788, over 939769.31 frames. ], batch size: 12, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:27:35,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=126349.33333333333, ans=0.125 +2024-07-28 07:27:37,080 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.293e+01 5.804e+01 6.361e+01 7.427e+01 1.283e+02, threshold=1.272e+02, percent-clipped=1.0 +2024-07-28 07:27:43,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=126362.66666666667, ans=0.0 +2024-07-28 07:27:52,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126376.0, ans=0.1 +2024-07-28 07:27:53,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=126376.0, ans=0.125 +2024-07-28 07:27:56,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=126389.33333333333, ans=0.125 +2024-07-28 07:27:57,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=126389.33333333333, ans=0.125 +2024-07-28 07:28:02,094 INFO [train.py:1114] (2/4) Epoch 10, batch 2800, loss[loss=0.2334, simple_loss=0.3171, pruned_loss=0.07491, over 3372.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2873, pruned_loss=0.05778, over 937284.51 frames. ], batch size: 35, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:28:05,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=126402.66666666667, ans=0.0 +2024-07-28 07:28:15,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=126429.33333333333, ans=0.0 +2024-07-28 07:28:15,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=126429.33333333333, ans=0.2 +2024-07-28 07:28:17,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=126429.33333333333, ans=0.125 +2024-07-28 07:28:29,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=126456.0, ans=0.0 +2024-07-28 07:28:30,765 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.87 vs. limit=15.0 +2024-07-28 07:28:31,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=126456.0, ans=0.125 +2024-07-28 07:28:32,056 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.41 vs. limit=15.0 +2024-07-28 07:28:35,596 INFO [train.py:1114] (2/4) Epoch 10, batch 2850, loss[loss=0.1855, simple_loss=0.27, pruned_loss=0.05053, over 4963.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2878, pruned_loss=0.0584, over 935558.56 frames. ], batch size: 13, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:28:37,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=126469.33333333333, ans=0.0 +2024-07-28 07:28:44,293 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.524e+01 5.758e+01 6.530e+01 7.801e+01 1.215e+02, threshold=1.306e+02, percent-clipped=0.0 +2024-07-28 07:28:44,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=126482.66666666667, ans=0.0 +2024-07-28 07:28:47,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=126482.66666666667, ans=0.125 +2024-07-28 07:28:56,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126509.33333333333, ans=0.1 +2024-07-28 07:28:56,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126509.33333333333, ans=0.1 +2024-07-28 07:29:05,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=126522.66666666667, ans=0.125 +2024-07-28 07:29:07,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=126522.66666666667, ans=22.5 +2024-07-28 07:29:08,558 INFO [train.py:1114] (2/4) Epoch 10, batch 2900, loss[loss=0.2035, simple_loss=0.2823, pruned_loss=0.06232, over 4829.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2884, pruned_loss=0.05823, over 939428.66 frames. ], batch size: 13, lr: 7.60e-03, grad_scale: 32.0 +2024-07-28 07:29:12,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126536.0, ans=0.1 +2024-07-28 07:29:23,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=126562.66666666667, ans=0.125 +2024-07-28 07:29:25,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=126562.66666666667, ans=0.1 +2024-07-28 07:29:35,886 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:29:38,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=126589.33333333333, ans=0.2 +2024-07-28 07:29:40,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=126589.33333333333, ans=0.125 +2024-07-28 07:29:42,698 INFO [train.py:1114] (2/4) Epoch 10, batch 2950, loss[loss=0.1855, simple_loss=0.2761, pruned_loss=0.04751, over 4705.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2873, pruned_loss=0.05806, over 938132.75 frames. ], batch size: 12, lr: 7.60e-03, grad_scale: 32.0 +2024-07-28 07:29:51,643 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 5.710e+01 6.450e+01 7.485e+01 1.036e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-28 07:29:57,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=126629.33333333333, ans=0.125 +2024-07-28 07:30:03,440 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.29 vs. limit=15.0 +2024-07-28 07:30:09,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=126656.0, ans=0.0 +2024-07-28 07:30:11,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126656.0, ans=0.1 +2024-07-28 07:30:16,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=126656.0, ans=0.125 +2024-07-28 07:30:17,355 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.77 vs. limit=15.0 +2024-07-28 07:30:24,803 INFO [train.py:1114] (2/4) Epoch 10, batch 3000, loss[loss=0.1731, simple_loss=0.2704, pruned_loss=0.03786, over 4757.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2872, pruned_loss=0.0576, over 937896.40 frames. ], batch size: 13, lr: 7.60e-03, grad_scale: 32.0 +2024-07-28 07:30:24,803 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 07:30:42,396 INFO [train.py:1146] (2/4) Epoch 10, validation: loss=0.173, simple_loss=0.277, pruned_loss=0.03444, over 944034.00 frames. +2024-07-28 07:30:42,397 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 07:30:48,045 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.58 vs. limit=15.0 +2024-07-28 07:31:03,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=126709.33333333333, ans=0.125 +2024-07-28 07:31:04,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=126709.33333333333, ans=0.125 +2024-07-28 07:31:08,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=126709.33333333333, ans=0.0 +2024-07-28 07:31:09,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=126709.33333333333, ans=0.125 +2024-07-28 07:31:17,816 INFO [train.py:1114] (2/4) Epoch 10, batch 3050, loss[loss=0.1931, simple_loss=0.2797, pruned_loss=0.05325, over 4638.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2874, pruned_loss=0.05793, over 937102.74 frames. ], batch size: 12, lr: 7.60e-03, grad_scale: 32.0 +2024-07-28 07:31:20,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=126736.0, ans=0.125 +2024-07-28 07:31:23,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=126736.0, ans=0.0 +2024-07-28 07:31:38,389 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.819e+01 5.667e+01 6.279e+01 7.137e+01 1.004e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 07:31:43,962 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.25 vs. limit=15.0 +2024-07-28 07:31:54,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=126762.66666666667, ans=0.5 +2024-07-28 07:32:15,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=126789.33333333333, ans=0.125 +2024-07-28 07:32:16,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=126802.66666666667, ans=0.125 +2024-07-28 07:32:17,206 INFO [train.py:1114] (2/4) Epoch 10, batch 3100, loss[loss=0.2197, simple_loss=0.3018, pruned_loss=0.06879, over 4624.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2875, pruned_loss=0.0582, over 937740.30 frames. ], batch size: 16, lr: 7.60e-03, grad_scale: 64.0 +2024-07-28 07:32:17,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126802.66666666667, ans=0.1 +2024-07-28 07:32:22,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=126802.66666666667, ans=0.0 +2024-07-28 07:32:22,946 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.19 vs. limit=22.5 +2024-07-28 07:32:23,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=126816.0, ans=0.125 +2024-07-28 07:32:28,181 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.62 vs. limit=15.0 +2024-07-28 07:32:36,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=126842.66666666667, ans=0.0 +2024-07-28 07:32:52,364 INFO [train.py:1114] (2/4) Epoch 10, batch 3150, loss[loss=0.1767, simple_loss=0.2598, pruned_loss=0.04676, over 4617.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2876, pruned_loss=0.05801, over 937916.95 frames. ], batch size: 17, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:32:52,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=126869.33333333333, ans=0.0 +2024-07-28 07:32:53,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=126869.33333333333, ans=0.125 +2024-07-28 07:32:53,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=126869.33333333333, ans=0.1 +2024-07-28 07:32:58,231 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.53 vs. limit=12.0 +2024-07-28 07:33:01,116 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.793e+01 5.563e+01 5.962e+01 7.006e+01 9.323e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 07:33:18,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=126909.33333333333, ans=0.025 +2024-07-28 07:33:27,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=126922.66666666667, ans=0.125 +2024-07-28 07:33:29,773 INFO [train.py:1114] (2/4) Epoch 10, batch 3200, loss[loss=0.2195, simple_loss=0.3141, pruned_loss=0.06248, over 4820.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2861, pruned_loss=0.057, over 939410.24 frames. ], batch size: 13, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:33:31,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=126936.0, ans=0.0 +2024-07-28 07:33:44,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.03 vs. limit=10.0 +2024-07-28 07:33:47,837 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.87 vs. limit=22.5 +2024-07-28 07:33:56,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=126989.33333333333, ans=10.0 +2024-07-28 07:34:02,611 INFO [train.py:1114] (2/4) Epoch 10, batch 3250, loss[loss=0.2209, simple_loss=0.3152, pruned_loss=0.0633, over 4932.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2866, pruned_loss=0.05696, over 940692.80 frames. ], batch size: 14, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:34:05,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=127002.66666666667, ans=0.04949747468305833 +2024-07-28 07:34:11,321 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.705e+01 5.496e+01 6.167e+01 6.993e+01 1.063e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 07:34:11,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127016.0, ans=0.1 +2024-07-28 07:34:21,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=127042.66666666667, ans=0.125 +2024-07-28 07:34:27,575 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.80 vs. limit=15.0 +2024-07-28 07:34:28,864 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.88 vs. limit=15.0 +2024-07-28 07:34:29,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=127056.0, ans=0.2 +2024-07-28 07:34:36,025 INFO [train.py:1114] (2/4) Epoch 10, batch 3300, loss[loss=0.2049, simple_loss=0.2853, pruned_loss=0.06226, over 4692.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2859, pruned_loss=0.05744, over 940834.27 frames. ], batch size: 19, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:34:51,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=127096.0, ans=0.2 +2024-07-28 07:34:52,436 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.95 vs. limit=15.0 +2024-07-28 07:34:56,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=127109.33333333333, ans=0.125 +2024-07-28 07:34:57,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=127109.33333333333, ans=0.0 +2024-07-28 07:35:05,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=127122.66666666667, ans=0.0 +2024-07-28 07:35:09,178 INFO [train.py:1114] (2/4) Epoch 10, batch 3350, loss[loss=0.2177, simple_loss=0.3, pruned_loss=0.06768, over 4630.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2874, pruned_loss=0.05839, over 938529.98 frames. ], batch size: 17, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:35:15,519 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.85 vs. limit=10.0 +2024-07-28 07:35:17,675 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.856e+01 5.618e+01 6.272e+01 7.252e+01 1.069e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 07:35:19,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=127149.33333333333, ans=0.0 +2024-07-28 07:35:29,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=127176.0, ans=0.1 +2024-07-28 07:35:34,248 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.67 vs. limit=15.0 +2024-07-28 07:35:38,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=127189.33333333333, ans=0.0 +2024-07-28 07:35:41,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=127189.33333333333, ans=0.125 +2024-07-28 07:35:42,763 INFO [train.py:1114] (2/4) Epoch 10, batch 3400, loss[loss=0.1759, simple_loss=0.2514, pruned_loss=0.0502, over 4804.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2873, pruned_loss=0.05859, over 936733.78 frames. ], batch size: 11, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:35:45,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=127202.66666666667, ans=0.125 +2024-07-28 07:35:55,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=127229.33333333333, ans=0.0 +2024-07-28 07:35:58,233 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.18 vs. limit=15.0 +2024-07-28 07:36:00,116 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.55 vs. limit=15.0 +2024-07-28 07:36:08,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=127242.66666666667, ans=0.125 +2024-07-28 07:36:10,314 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.29 vs. limit=12.0 +2024-07-28 07:36:16,677 INFO [train.py:1114] (2/4) Epoch 10, batch 3450, loss[loss=0.2301, simple_loss=0.3158, pruned_loss=0.07217, over 4732.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2875, pruned_loss=0.05822, over 936825.85 frames. ], batch size: 19, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:36:25,812 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.819e+01 5.619e+01 6.055e+01 6.552e+01 2.053e+02, threshold=1.211e+02, percent-clipped=1.0 +2024-07-28 07:36:31,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=127296.0, ans=0.0 +2024-07-28 07:36:34,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=127296.0, ans=0.0 +2024-07-28 07:36:45,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=127322.66666666667, ans=0.05 +2024-07-28 07:36:53,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=127322.66666666667, ans=0.125 +2024-07-28 07:36:53,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=127322.66666666667, ans=0.0 +2024-07-28 07:36:53,983 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.44 vs. limit=22.5 +2024-07-28 07:36:56,186 INFO [train.py:1114] (2/4) Epoch 10, batch 3500, loss[loss=0.1694, simple_loss=0.2604, pruned_loss=0.03914, over 4926.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2859, pruned_loss=0.05736, over 937429.65 frames. ], batch size: 12, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:36:56,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=127336.0, ans=0.125 +2024-07-28 07:36:58,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=127336.0, ans=0.0 +2024-07-28 07:37:12,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.13 vs. limit=22.5 +2024-07-28 07:37:23,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=127362.66666666667, ans=0.0 +2024-07-28 07:37:26,068 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.63 vs. limit=22.5 +2024-07-28 07:37:36,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=127389.33333333333, ans=0.0 +2024-07-28 07:37:38,656 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.10 vs. limit=22.5 +2024-07-28 07:37:46,842 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.82 vs. limit=15.0 +2024-07-28 07:37:47,036 INFO [train.py:1114] (2/4) Epoch 10, batch 3550, loss[loss=0.2058, simple_loss=0.296, pruned_loss=0.05781, over 4663.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2861, pruned_loss=0.05718, over 938094.70 frames. ], batch size: 14, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:38:16,055 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.484e+01 5.635e+01 6.291e+01 7.462e+01 1.218e+02, threshold=1.258e+02, percent-clipped=1.0 +2024-07-28 07:38:16,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=127416.0, ans=0.0 +2024-07-28 07:38:16,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=127416.0, ans=0.0 +2024-07-28 07:38:20,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127429.33333333333, ans=0.1 +2024-07-28 07:38:21,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=127429.33333333333, ans=0.2 +2024-07-28 07:38:23,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=127429.33333333333, ans=0.2 +2024-07-28 07:38:30,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=127442.66666666667, ans=0.2 +2024-07-28 07:38:40,231 INFO [train.py:1114] (2/4) Epoch 10, batch 3600, loss[loss=0.1987, simple_loss=0.2797, pruned_loss=0.05883, over 4960.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2872, pruned_loss=0.0579, over 940053.75 frames. ], batch size: 13, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:38:40,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=127469.33333333333, ans=0.0 +2024-07-28 07:38:42,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=127469.33333333333, ans=0.2 +2024-07-28 07:38:43,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=127469.33333333333, ans=0.1 +2024-07-28 07:38:44,514 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.16 vs. limit=15.0 +2024-07-28 07:38:46,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=127482.66666666667, ans=0.125 +2024-07-28 07:39:08,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=127496.0, ans=0.125 +2024-07-28 07:39:23,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=127522.66666666667, ans=0.04949747468305833 +2024-07-28 07:39:25,757 INFO [train.py:1114] (2/4) Epoch 10, batch 3650, loss[loss=0.1943, simple_loss=0.2778, pruned_loss=0.05539, over 4914.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2859, pruned_loss=0.05778, over 940687.43 frames. ], batch size: 15, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:39:38,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=127536.0, ans=0.025 +2024-07-28 07:39:51,676 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.686e+01 5.725e+01 6.100e+01 7.132e+01 1.043e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 07:39:53,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127549.33333333333, ans=0.1 +2024-07-28 07:40:02,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=127549.33333333333, ans=0.125 +2024-07-28 07:40:05,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127562.66666666667, ans=0.1 +2024-07-28 07:40:50,444 INFO [train.py:1114] (2/4) Epoch 10, batch 3700, loss[loss=0.2047, simple_loss=0.2987, pruned_loss=0.05535, over 4923.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2859, pruned_loss=0.0575, over 941564.66 frames. ], batch size: 14, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:40:50,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=127602.66666666667, ans=0.2 +2024-07-28 07:41:03,778 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.27 vs. limit=22.5 +2024-07-28 07:41:04,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=127616.0, ans=0.0 +2024-07-28 07:41:24,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=127656.0, ans=0.2 +2024-07-28 07:41:28,282 INFO [train.py:1114] (2/4) Epoch 10, batch 3750, loss[loss=0.1742, simple_loss=0.2539, pruned_loss=0.04724, over 4811.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2854, pruned_loss=0.05741, over 942792.54 frames. ], batch size: 11, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:41:48,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127682.66666666667, ans=0.1 +2024-07-28 07:41:51,638 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.342e+01 5.968e+01 6.692e+01 7.910e+01 1.742e+02, threshold=1.338e+02, percent-clipped=0.0 +2024-07-28 07:41:53,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=127682.66666666667, ans=0.125 +2024-07-28 07:42:03,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=127709.33333333333, ans=0.0 +2024-07-28 07:42:12,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=127722.66666666667, ans=0.125 +2024-07-28 07:42:21,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=127722.66666666667, ans=0.125 +2024-07-28 07:42:22,850 INFO [train.py:1114] (2/4) Epoch 10, batch 3800, loss[loss=0.227, simple_loss=0.3212, pruned_loss=0.06642, over 4813.00 frames. ], tot_loss[loss=0.201, simple_loss=0.286, pruned_loss=0.05804, over 941335.24 frames. ], batch size: 14, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:42:44,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=127762.66666666667, ans=0.125 +2024-07-28 07:42:49,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=127762.66666666667, ans=0.125 +2024-07-28 07:42:50,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=127762.66666666667, ans=0.025 +2024-07-28 07:43:10,552 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.55 vs. limit=6.0 +2024-07-28 07:43:11,570 INFO [train.py:1114] (2/4) Epoch 10, batch 3850, loss[loss=0.1971, simple_loss=0.2914, pruned_loss=0.05136, over 4639.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2852, pruned_loss=0.05705, over 941914.26 frames. ], batch size: 16, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:43:17,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=127816.0, ans=0.2 +2024-07-28 07:43:19,521 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.65 vs. limit=22.5 +2024-07-28 07:43:21,989 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.612e+01 5.662e+01 6.521e+01 7.617e+01 1.192e+02, threshold=1.304e+02, percent-clipped=1.0 +2024-07-28 07:43:29,199 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.16 vs. limit=15.0 +2024-07-28 07:43:30,093 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.68 vs. limit=12.0 +2024-07-28 07:43:39,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=127842.66666666667, ans=0.125 +2024-07-28 07:43:41,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=127842.66666666667, ans=0.125 +2024-07-28 07:43:50,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=127856.0, ans=0.125 +2024-07-28 07:43:51,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=127856.0, ans=0.125 +2024-07-28 07:43:51,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=127869.33333333333, ans=0.0 +2024-07-28 07:43:51,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=127869.33333333333, ans=0.09899494936611666 +2024-07-28 07:43:52,482 INFO [train.py:1114] (2/4) Epoch 10, batch 3900, loss[loss=0.1786, simple_loss=0.2618, pruned_loss=0.04768, over 4808.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2857, pruned_loss=0.05728, over 942118.66 frames. ], batch size: 14, lr: 7.56e-03, grad_scale: 32.0 +2024-07-28 07:44:02,757 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.81 vs. limit=15.0 +2024-07-28 07:44:08,591 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=26.39 vs. limit=22.5 +2024-07-28 07:44:16,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127896.0, ans=0.1 +2024-07-28 07:44:21,996 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:44:24,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=127909.33333333333, ans=0.04949747468305833 +2024-07-28 07:44:31,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=127922.66666666667, ans=0.025 +2024-07-28 07:44:33,705 INFO [train.py:1114] (2/4) Epoch 10, batch 3950, loss[loss=0.2214, simple_loss=0.3107, pruned_loss=0.06604, over 4830.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2852, pruned_loss=0.05668, over 944229.40 frames. ], batch size: 16, lr: 7.56e-03, grad_scale: 16.0 +2024-07-28 07:44:57,176 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.715e+01 6.133e+01 6.852e+01 1.045e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 07:45:09,787 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.58 vs. limit=15.0 +2024-07-28 07:45:11,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127976.0, ans=0.1 +2024-07-28 07:45:16,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127989.33333333333, ans=0.1 +2024-07-28 07:45:23,676 INFO [train.py:1114] (2/4) Epoch 10, batch 4000, loss[loss=0.202, simple_loss=0.2968, pruned_loss=0.05356, over 4782.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.287, pruned_loss=0.05787, over 940384.30 frames. ], batch size: 12, lr: 7.56e-03, grad_scale: 32.0 +2024-07-28 07:45:25,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=128002.66666666667, ans=0.125 +2024-07-28 07:45:29,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=128002.66666666667, ans=0.0 +2024-07-28 07:45:55,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=128056.0, ans=0.025 +2024-07-28 07:46:07,089 INFO [train.py:1114] (2/4) Epoch 10, batch 4050, loss[loss=0.247, simple_loss=0.317, pruned_loss=0.08844, over 3254.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2879, pruned_loss=0.05848, over 938973.39 frames. ], batch size: 36, lr: 7.56e-03, grad_scale: 32.0 +2024-07-28 07:46:08,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=128069.33333333333, ans=0.0 +2024-07-28 07:46:10,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=128069.33333333333, ans=0.125 +2024-07-28 07:46:16,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=128082.66666666667, ans=0.025 +2024-07-28 07:46:16,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128082.66666666667, ans=0.1 +2024-07-28 07:46:17,343 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.919e+01 6.572e+01 7.473e+01 1.130e+02, threshold=1.314e+02, percent-clipped=0.0 +2024-07-28 07:46:17,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=128082.66666666667, ans=0.125 +2024-07-28 07:46:21,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=128096.0, ans=0.0 +2024-07-28 07:46:26,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=128096.0, ans=0.025 +2024-07-28 07:46:38,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=128122.66666666667, ans=0.0 +2024-07-28 07:46:42,250 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.02 vs. limit=15.0 +2024-07-28 07:46:42,601 INFO [train.py:1114] (2/4) Epoch 10, batch 4100, loss[loss=0.2171, simple_loss=0.2953, pruned_loss=0.06944, over 4906.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2883, pruned_loss=0.05879, over 938199.78 frames. ], batch size: 15, lr: 7.56e-03, grad_scale: 32.0 +2024-07-28 07:46:44,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128136.0, ans=0.1 +2024-07-28 07:46:44,899 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.74 vs. limit=15.0 +2024-07-28 07:46:48,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128136.0, ans=0.1 +2024-07-28 07:46:52,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=128149.33333333333, ans=0.0 +2024-07-28 07:47:18,166 INFO [train.py:1114] (2/4) Epoch 10, batch 4150, loss[loss=0.2004, simple_loss=0.2912, pruned_loss=0.05479, over 4827.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2879, pruned_loss=0.05816, over 937764.23 frames. ], batch size: 13, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:47:23,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=128202.66666666667, ans=0.125 +2024-07-28 07:47:27,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=128216.0, ans=0.125 +2024-07-28 07:47:28,110 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.249e+01 5.846e+01 6.728e+01 7.607e+01 1.158e+02, threshold=1.346e+02, percent-clipped=0.0 +2024-07-28 07:47:40,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=128242.66666666667, ans=0.2 +2024-07-28 07:47:51,069 INFO [train.py:1114] (2/4) Epoch 10, batch 4200, loss[loss=0.1982, simple_loss=0.2909, pruned_loss=0.05272, over 4901.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2873, pruned_loss=0.05762, over 939567.77 frames. ], batch size: 15, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:47:58,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=128282.66666666667, ans=0.125 +2024-07-28 07:48:03,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=128296.0, ans=0.125 +2024-07-28 07:48:11,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=128309.33333333333, ans=0.025 +2024-07-28 07:48:23,681 INFO [train.py:1114] (2/4) Epoch 10, batch 4250, loss[loss=0.1664, simple_loss=0.2648, pruned_loss=0.03407, over 4641.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2882, pruned_loss=0.05829, over 940450.93 frames. ], batch size: 12, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:48:23,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=128336.0, ans=0.0 +2024-07-28 07:48:25,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=128336.0, ans=0.0 +2024-07-28 07:48:29,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=128349.33333333333, ans=0.025 +2024-07-28 07:48:31,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=128349.33333333333, ans=0.125 +2024-07-28 07:48:33,351 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.588e+01 5.567e+01 6.071e+01 6.705e+01 1.236e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 07:48:47,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=128376.0, ans=0.125 +2024-07-28 07:48:49,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=128376.0, ans=0.125 +2024-07-28 07:48:49,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=128389.33333333333, ans=0.125 +2024-07-28 07:48:55,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=128389.33333333333, ans=0.125 +2024-07-28 07:48:56,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=128389.33333333333, ans=0.2 +2024-07-28 07:48:57,117 INFO [train.py:1114] (2/4) Epoch 10, batch 4300, loss[loss=0.1703, simple_loss=0.2612, pruned_loss=0.03972, over 4767.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2877, pruned_loss=0.05787, over 939881.76 frames. ], batch size: 13, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:49:14,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=128429.33333333333, ans=0.5 +2024-07-28 07:49:26,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=128456.0, ans=0.125 +2024-07-28 07:49:30,499 INFO [train.py:1114] (2/4) Epoch 10, batch 4350, loss[loss=0.18, simple_loss=0.2724, pruned_loss=0.04385, over 4754.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2883, pruned_loss=0.05792, over 940625.61 frames. ], batch size: 13, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:49:40,763 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.507e+01 6.201e+01 7.013e+01 1.119e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 07:49:47,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=128496.0, ans=0.125 +2024-07-28 07:50:04,259 INFO [train.py:1114] (2/4) Epoch 10, batch 4400, loss[loss=0.1832, simple_loss=0.2837, pruned_loss=0.04129, over 4815.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2881, pruned_loss=0.05758, over 940339.20 frames. ], batch size: 14, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:50:08,514 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.28 vs. limit=15.0 +2024-07-28 07:50:12,106 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=17.98 vs. limit=22.5 +2024-07-28 07:50:28,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=128576.0, ans=0.125 +2024-07-28 07:50:31,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=128589.33333333333, ans=0.0 +2024-07-28 07:50:33,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=128589.33333333333, ans=0.0 +2024-07-28 07:50:34,929 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.31 vs. limit=12.0 +2024-07-28 07:50:35,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=128589.33333333333, ans=0.2 +2024-07-28 07:50:37,961 INFO [train.py:1114] (2/4) Epoch 10, batch 4450, loss[loss=0.1669, simple_loss=0.2487, pruned_loss=0.04255, over 4942.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2874, pruned_loss=0.05762, over 939136.24 frames. ], batch size: 12, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:50:41,028 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=19.84 vs. limit=15.0 +2024-07-28 07:50:43,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=128602.66666666667, ans=0.125 +2024-07-28 07:50:43,718 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.57 vs. limit=12.0 +2024-07-28 07:50:47,721 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+01 5.603e+01 6.224e+01 7.010e+01 9.776e+01, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 07:50:55,365 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.58 vs. limit=15.0 +2024-07-28 07:51:10,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=128656.0, ans=0.125 +2024-07-28 07:51:12,773 INFO [train.py:1114] (2/4) Epoch 10, batch 4500, loss[loss=0.2038, simple_loss=0.297, pruned_loss=0.05531, over 4735.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2876, pruned_loss=0.05687, over 938244.78 frames. ], batch size: 14, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:51:30,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=128696.0, ans=0.025 +2024-07-28 07:51:32,810 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.65 vs. limit=15.0 +2024-07-28 07:51:35,560 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.57 vs. limit=15.0 +2024-07-28 07:51:47,665 INFO [train.py:1114] (2/4) Epoch 10, batch 4550, loss[loss=0.2274, simple_loss=0.3092, pruned_loss=0.07273, over 4896.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2867, pruned_loss=0.05654, over 940252.66 frames. ], batch size: 13, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:51:49,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=128736.0, ans=0.2 +2024-07-28 07:51:53,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=128736.0, ans=0.125 +2024-07-28 07:51:57,768 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.502e+01 5.839e+01 6.410e+01 7.232e+01 1.296e+02, threshold=1.282e+02, percent-clipped=2.0 +2024-07-28 07:52:04,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=128762.66666666667, ans=10.0 +2024-07-28 07:52:04,689 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.57 vs. limit=15.0 +2024-07-28 07:52:16,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=128789.33333333333, ans=0.0 +2024-07-28 07:52:17,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=128789.33333333333, ans=0.125 +2024-07-28 07:52:19,085 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:52:24,747 INFO [train.py:1114] (2/4) Epoch 10, batch 4600, loss[loss=0.1821, simple_loss=0.2778, pruned_loss=0.04324, over 4544.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2858, pruned_loss=0.05644, over 938594.96 frames. ], batch size: 21, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:52:30,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=128816.0, ans=0.125 +2024-07-28 07:52:36,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=128816.0, ans=0.0 +2024-07-28 07:52:36,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=128816.0, ans=0.2 +2024-07-28 07:52:40,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=128829.33333333333, ans=0.0 +2024-07-28 07:52:46,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=128842.66666666667, ans=0.125 +2024-07-28 07:52:54,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=128856.0, ans=0.1 +2024-07-28 07:52:57,426 INFO [train.py:1114] (2/4) Epoch 10, batch 4650, loss[loss=0.1778, simple_loss=0.2805, pruned_loss=0.03754, over 4849.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2855, pruned_loss=0.05599, over 940011.06 frames. ], batch size: 16, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:53:00,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=128869.33333333333, ans=0.0 +2024-07-28 07:53:05,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=128882.66666666667, ans=0.0 +2024-07-28 07:53:07,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=128882.66666666667, ans=0.125 +2024-07-28 07:53:07,565 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.546e+01 5.595e+01 6.179e+01 7.275e+01 1.134e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 07:53:18,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=128909.33333333333, ans=0.0 +2024-07-28 07:53:27,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=128922.66666666667, ans=0.1 +2024-07-28 07:53:30,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=128936.0, ans=0.07 +2024-07-28 07:53:31,106 INFO [train.py:1114] (2/4) Epoch 10, batch 4700, loss[loss=0.1628, simple_loss=0.2383, pruned_loss=0.04361, over 4705.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2852, pruned_loss=0.05602, over 937911.75 frames. ], batch size: 11, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:53:34,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=128936.0, ans=0.125 +2024-07-28 07:53:38,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=128949.33333333333, ans=15.0 +2024-07-28 07:53:47,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=128962.66666666667, ans=0.025 +2024-07-28 07:53:50,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=128962.66666666667, ans=0.0 +2024-07-28 07:53:51,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=128976.0, ans=0.0 +2024-07-28 07:53:56,292 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.33 vs. limit=6.0 +2024-07-28 07:53:58,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=128989.33333333333, ans=10.0 +2024-07-28 07:54:00,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128989.33333333333, ans=0.1 +2024-07-28 07:54:05,545 INFO [train.py:1114] (2/4) Epoch 10, batch 4750, loss[loss=0.262, simple_loss=0.3389, pruned_loss=0.09258, over 4500.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2863, pruned_loss=0.05648, over 935864.29 frames. ], batch size: 21, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:54:15,560 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.408e+01 5.636e+01 6.177e+01 7.080e+01 9.506e+01, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 07:54:17,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129016.0, ans=0.1 +2024-07-28 07:54:30,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=129042.66666666667, ans=0.125 +2024-07-28 07:54:40,012 INFO [train.py:1114] (2/4) Epoch 10, batch 4800, loss[loss=0.2301, simple_loss=0.3156, pruned_loss=0.07229, over 4701.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2857, pruned_loss=0.05659, over 933287.77 frames. ], batch size: 13, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:54:41,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=129069.33333333333, ans=0.0 +2024-07-28 07:54:41,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=129069.33333333333, ans=0.125 +2024-07-28 07:54:56,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=129096.0, ans=0.05 +2024-07-28 07:54:56,363 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.88 vs. limit=15.0 +2024-07-28 07:54:57,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=129096.0, ans=10.0 +2024-07-28 07:55:06,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=129122.66666666667, ans=0.125 +2024-07-28 07:55:08,343 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.93 vs. limit=15.0 +2024-07-28 07:55:08,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=129122.66666666667, ans=0.07 +2024-07-28 07:55:09,588 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.06 vs. limit=22.5 +2024-07-28 07:55:10,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=129122.66666666667, ans=0.125 +2024-07-28 07:55:12,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=129122.66666666667, ans=0.125 +2024-07-28 07:55:13,157 INFO [train.py:1114] (2/4) Epoch 10, batch 4850, loss[loss=0.194, simple_loss=0.2815, pruned_loss=0.05322, over 4748.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2861, pruned_loss=0.05651, over 932595.37 frames. ], batch size: 14, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:55:13,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=129136.0, ans=0.125 +2024-07-28 07:55:18,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=129136.0, ans=0.1 +2024-07-28 07:55:23,234 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.594e+01 5.570e+01 6.105e+01 6.787e+01 9.790e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 07:55:25,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=129149.33333333333, ans=0.2 +2024-07-28 07:55:35,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=129176.0, ans=0.025 +2024-07-28 07:55:46,409 INFO [train.py:1114] (2/4) Epoch 10, batch 4900, loss[loss=0.1821, simple_loss=0.2671, pruned_loss=0.0486, over 4771.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2865, pruned_loss=0.05687, over 934387.11 frames. ], batch size: 13, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:55:49,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=129202.66666666667, ans=0.125 +2024-07-28 07:55:50,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=129202.66666666667, ans=0.0 +2024-07-28 07:55:53,740 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.30 vs. limit=6.0 +2024-07-28 07:55:59,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=129216.0, ans=0.0 +2024-07-28 07:56:06,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=129242.66666666667, ans=0.125 +2024-07-28 07:56:12,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=129242.66666666667, ans=0.2 +2024-07-28 07:56:15,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=129256.0, ans=0.125 +2024-07-28 07:56:16,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=129256.0, ans=0.125 +2024-07-28 07:56:16,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=129256.0, ans=0.125 +2024-07-28 07:56:20,960 INFO [train.py:1114] (2/4) Epoch 10, batch 4950, loss[loss=0.2374, simple_loss=0.3075, pruned_loss=0.08364, over 3406.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2879, pruned_loss=0.0579, over 931691.07 frames. ], batch size: 35, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:56:27,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=129269.33333333333, ans=0.125 +2024-07-28 07:56:29,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=129282.66666666667, ans=0.05 +2024-07-28 07:56:33,111 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.746e+01 5.675e+01 6.169e+01 7.226e+01 1.073e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-28 07:56:43,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=129309.33333333333, ans=0.125 +2024-07-28 07:56:44,310 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.24 vs. limit=22.5 +2024-07-28 07:57:00,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=129322.66666666667, ans=0.0 +2024-07-28 07:57:01,459 INFO [train.py:1114] (2/4) Epoch 10, batch 5000, loss[loss=0.2106, simple_loss=0.3059, pruned_loss=0.05769, over 4654.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2873, pruned_loss=0.05721, over 935502.37 frames. ], batch size: 14, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:57:02,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=129336.0, ans=0.125 +2024-07-28 07:57:09,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129349.33333333333, ans=0.1 +2024-07-28 07:57:14,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=129362.66666666667, ans=0.125 +2024-07-28 07:57:16,197 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:57:18,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=129362.66666666667, ans=0.125 +2024-07-28 07:57:19,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=129362.66666666667, ans=0.95 +2024-07-28 07:57:21,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=129376.0, ans=0.125 +2024-07-28 07:57:23,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129376.0, ans=0.1 +2024-07-28 07:57:23,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=129376.0, ans=0.125 +2024-07-28 07:57:25,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=129376.0, ans=0.0 +2024-07-28 07:57:34,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=129389.33333333333, ans=0.125 +2024-07-28 07:57:37,478 INFO [train.py:1114] (2/4) Epoch 10, batch 5050, loss[loss=0.2009, simple_loss=0.2895, pruned_loss=0.0561, over 4856.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2862, pruned_loss=0.05694, over 937931.63 frames. ], batch size: 12, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:57:45,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=129416.0, ans=0.125 +2024-07-28 07:57:47,628 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.667e+01 5.711e+01 6.360e+01 7.128e+01 1.073e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 07:57:50,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=129416.0, ans=0.0 +2024-07-28 07:58:08,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=129456.0, ans=0.0 +2024-07-28 07:58:13,497 INFO [train.py:1114] (2/4) Epoch 10, batch 5100, loss[loss=0.1975, simple_loss=0.2753, pruned_loss=0.05985, over 4777.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.287, pruned_loss=0.05763, over 935684.38 frames. ], batch size: 12, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:58:15,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=129469.33333333333, ans=0.125 +2024-07-28 07:58:19,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=129482.66666666667, ans=0.125 +2024-07-28 07:58:21,242 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.72 vs. limit=22.5 +2024-07-28 07:58:26,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=129496.0, ans=0.125 +2024-07-28 07:58:31,769 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.85 vs. limit=12.0 +2024-07-28 07:58:38,960 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.87 vs. limit=15.0 +2024-07-28 07:58:46,343 INFO [train.py:1114] (2/4) Epoch 10, batch 5150, loss[loss=0.202, simple_loss=0.2945, pruned_loss=0.0547, over 4836.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.288, pruned_loss=0.0581, over 936613.24 frames. ], batch size: 16, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:58:48,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=129536.0, ans=0.125 +2024-07-28 07:58:52,684 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.27 vs. limit=12.0 +2024-07-28 07:58:53,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=129549.33333333333, ans=0.0 +2024-07-28 07:58:56,278 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.874e+01 5.650e+01 6.455e+01 7.114e+01 1.167e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-28 07:59:00,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=129562.66666666667, ans=0.0 +2024-07-28 07:59:02,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129562.66666666667, ans=0.1 +2024-07-28 07:59:02,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=129562.66666666667, ans=0.0 +2024-07-28 07:59:05,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=129576.0, ans=0.025 +2024-07-28 07:59:07,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129576.0, ans=0.1 +2024-07-28 07:59:11,241 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.15 vs. limit=15.0 +2024-07-28 07:59:12,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.64 vs. limit=15.0 +2024-07-28 07:59:20,218 INFO [train.py:1114] (2/4) Epoch 10, batch 5200, loss[loss=0.2405, simple_loss=0.3273, pruned_loss=0.0769, over 4665.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2876, pruned_loss=0.05729, over 936611.69 frames. ], batch size: 14, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 07:59:21,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129602.66666666667, ans=0.1 +2024-07-28 07:59:42,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=129642.66666666667, ans=0.125 +2024-07-28 07:59:49,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff3.min_abs, batch_count=129656.0, ans=0.2 +2024-07-28 07:59:53,605 INFO [train.py:1114] (2/4) Epoch 10, batch 5250, loss[loss=0.1911, simple_loss=0.264, pruned_loss=0.05906, over 4902.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2864, pruned_loss=0.05691, over 936658.07 frames. ], batch size: 13, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 08:00:03,740 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.916e+01 5.858e+01 6.971e+01 8.204e+01 1.196e+02, threshold=1.394e+02, percent-clipped=0.0 +2024-07-28 08:00:04,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129682.66666666667, ans=0.1 +2024-07-28 08:00:08,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=129696.0, ans=0.125 +2024-07-28 08:00:09,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=129696.0, ans=0.125 +2024-07-28 08:00:10,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=129696.0, ans=0.2 +2024-07-28 08:00:11,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=129696.0, ans=0.125 +2024-07-28 08:00:13,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=129709.33333333333, ans=0.025 +2024-07-28 08:00:17,869 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.64 vs. limit=6.0 +2024-07-28 08:00:18,380 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.21 vs. limit=22.5 +2024-07-28 08:00:24,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=129722.66666666667, ans=0.07 +2024-07-28 08:00:27,508 INFO [train.py:1114] (2/4) Epoch 10, batch 5300, loss[loss=0.1959, simple_loss=0.2917, pruned_loss=0.05006, over 4623.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.286, pruned_loss=0.0571, over 934542.76 frames. ], batch size: 16, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 08:00:29,232 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.25 vs. limit=12.0 +2024-07-28 08:00:39,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=129749.33333333333, ans=0.0 +2024-07-28 08:00:39,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=129749.33333333333, ans=0.0 +2024-07-28 08:00:44,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=129762.66666666667, ans=0.0 +2024-07-28 08:00:51,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=129776.0, ans=0.0 +2024-07-28 08:00:53,257 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.29 vs. limit=15.0 +2024-07-28 08:00:57,790 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.57 vs. limit=15.0 +2024-07-28 08:00:58,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=129789.33333333333, ans=0.125 +2024-07-28 08:01:00,918 INFO [train.py:1114] (2/4) Epoch 10, batch 5350, loss[loss=0.157, simple_loss=0.2266, pruned_loss=0.0437, over 4501.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2855, pruned_loss=0.05657, over 936381.98 frames. ], batch size: 10, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 08:01:03,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=129802.66666666667, ans=0.125 +2024-07-28 08:01:07,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129816.0, ans=0.1 +2024-07-28 08:01:07,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=129816.0, ans=0.0 +2024-07-28 08:01:10,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=129816.0, ans=0.05 +2024-07-28 08:01:11,102 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.715e+01 5.483e+01 5.986e+01 6.738e+01 1.016e+02, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 08:01:31,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=129856.0, ans=0.1 +2024-07-28 08:01:33,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=129856.0, ans=0.0 +2024-07-28 08:01:36,307 INFO [train.py:1114] (2/4) Epoch 10, batch 5400, loss[loss=0.1882, simple_loss=0.284, pruned_loss=0.04618, over 4245.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2866, pruned_loss=0.0571, over 930523.79 frames. ], batch size: 25, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 08:01:40,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=129869.33333333333, ans=0.025 +2024-07-28 08:01:41,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=129869.33333333333, ans=0.0 +2024-07-28 08:01:46,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=129882.66666666667, ans=0.1 +2024-07-28 08:01:53,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=129896.0, ans=0.125 +2024-07-28 08:01:57,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=129896.0, ans=0.07 +2024-07-28 08:02:00,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=129909.33333333333, ans=0.125 +2024-07-28 08:02:04,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=129922.66666666667, ans=0.0 +2024-07-28 08:02:07,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=129922.66666666667, ans=0.125 +2024-07-28 08:02:11,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=129936.0, ans=0.0 +2024-07-28 08:02:12,902 INFO [train.py:1114] (2/4) Epoch 10, batch 5450, loss[loss=0.1496, simple_loss=0.2361, pruned_loss=0.03158, over 4687.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2849, pruned_loss=0.05628, over 933130.17 frames. ], batch size: 11, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:02:22,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=129949.33333333333, ans=0.125 +2024-07-28 08:02:24,709 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.820e+01 5.695e+01 6.364e+01 7.750e+01 1.165e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 08:02:32,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=129962.66666666667, ans=0.1 +2024-07-28 08:02:42,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=129989.33333333333, ans=0.125 +2024-07-28 08:02:45,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=129989.33333333333, ans=0.95 +2024-07-28 08:02:45,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=129989.33333333333, ans=0.0 +2024-07-28 08:02:48,503 INFO [train.py:1114] (2/4) Epoch 10, batch 5500, loss[loss=0.185, simple_loss=0.2841, pruned_loss=0.04293, over 4152.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2861, pruned_loss=0.05694, over 930920.87 frames. ], batch size: 25, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:03:18,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=130042.66666666667, ans=0.0 +2024-07-28 08:03:19,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=130056.0, ans=0.0 +2024-07-28 08:03:22,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=130056.0, ans=0.07 +2024-07-28 08:03:23,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff3.min_abs, batch_count=130056.0, ans=0.2 +2024-07-28 08:03:25,771 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.12 vs. limit=10.0 +2024-07-28 08:03:26,089 INFO [train.py:1114] (2/4) Epoch 10, batch 5550, loss[loss=0.1811, simple_loss=0.2683, pruned_loss=0.04693, over 4710.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2869, pruned_loss=0.05743, over 933398.99 frames. ], batch size: 12, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:03:32,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=130082.66666666667, ans=0.125 +2024-07-28 08:03:35,930 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 5.857e+01 6.242e+01 7.417e+01 1.070e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 08:03:37,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=130082.66666666667, ans=0.1 +2024-07-28 08:03:49,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=130109.33333333333, ans=0.125 +2024-07-28 08:03:49,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=130109.33333333333, ans=0.125 +2024-07-28 08:03:54,906 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.97 vs. limit=22.5 +2024-07-28 08:03:59,474 INFO [train.py:1114] (2/4) Epoch 10, batch 5600, loss[loss=0.2135, simple_loss=0.2979, pruned_loss=0.06455, over 4732.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2877, pruned_loss=0.05791, over 933996.94 frames. ], batch size: 14, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:04:07,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=130149.33333333333, ans=0.2 +2024-07-28 08:04:07,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=130149.33333333333, ans=0.2 +2024-07-28 08:04:26,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=130176.0, ans=0.0 +2024-07-28 08:04:29,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130176.0, ans=0.1 +2024-07-28 08:04:40,122 INFO [train.py:1114] (2/4) Epoch 10, batch 5650, loss[loss=0.219, simple_loss=0.3012, pruned_loss=0.06844, over 4523.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2867, pruned_loss=0.05742, over 936670.73 frames. ], batch size: 21, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:04:40,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=130202.66666666667, ans=0.125 +2024-07-28 08:04:45,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=130202.66666666667, ans=0.125 +2024-07-28 08:04:50,453 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.652e+01 5.620e+01 6.091e+01 7.074e+01 1.306e+02, threshold=1.218e+02, percent-clipped=1.0 +2024-07-28 08:05:00,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=130242.66666666667, ans=0.125 +2024-07-28 08:05:01,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=130242.66666666667, ans=0.025 +2024-07-28 08:05:05,029 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.76 vs. limit=15.0 +2024-07-28 08:05:06,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=130256.0, ans=0.025 +2024-07-28 08:05:13,680 INFO [train.py:1114] (2/4) Epoch 10, batch 5700, loss[loss=0.2513, simple_loss=0.3285, pruned_loss=0.08708, over 4702.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.288, pruned_loss=0.05811, over 937779.55 frames. ], batch size: 13, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:05:19,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=130282.66666666667, ans=0.125 +2024-07-28 08:05:29,528 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.63 vs. limit=22.5 +2024-07-28 08:05:35,894 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.61 vs. limit=15.0 +2024-07-28 08:05:39,034 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.55 vs. limit=15.0 +2024-07-28 08:05:39,718 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.88 vs. limit=22.5 +2024-07-28 08:05:40,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=130309.33333333333, ans=0.125 +2024-07-28 08:05:47,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=130322.66666666667, ans=0.07 +2024-07-28 08:05:47,390 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.04 vs. limit=15.0 +2024-07-28 08:05:49,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=130322.66666666667, ans=0.0 +2024-07-28 08:05:50,245 INFO [train.py:1114] (2/4) Epoch 10, batch 5750, loss[loss=0.2039, simple_loss=0.2869, pruned_loss=0.06042, over 4720.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2876, pruned_loss=0.05813, over 937920.44 frames. ], batch size: 19, lr: 7.49e-03, grad_scale: 32.0 +2024-07-28 08:05:51,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=130336.0, ans=0.125 +2024-07-28 08:05:53,938 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.73 vs. limit=15.0 +2024-07-28 08:05:55,208 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.31 vs. limit=15.0 +2024-07-28 08:05:57,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=130349.33333333333, ans=0.125 +2024-07-28 08:06:00,023 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.227e+01 5.735e+01 6.185e+01 6.687e+01 9.991e+01, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 08:06:06,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=130362.66666666667, ans=0.125 +2024-07-28 08:06:08,732 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.03 vs. limit=10.0 +2024-07-28 08:06:11,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=130376.0, ans=0.125 +2024-07-28 08:06:24,161 INFO [train.py:1114] (2/4) Epoch 10, batch 5800, loss[loss=0.2278, simple_loss=0.3074, pruned_loss=0.07411, over 4676.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2883, pruned_loss=0.05859, over 936770.22 frames. ], batch size: 19, lr: 7.49e-03, grad_scale: 32.0 +2024-07-28 08:06:27,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=130402.66666666667, ans=0.125 +2024-07-28 08:06:27,878 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.59 vs. limit=6.0 +2024-07-28 08:06:28,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=130402.66666666667, ans=0.125 +2024-07-28 08:06:29,894 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.72 vs. limit=15.0 +2024-07-28 08:06:38,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=130429.33333333333, ans=0.0 +2024-07-28 08:06:39,747 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.87 vs. limit=15.0 +2024-07-28 08:06:40,302 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:06:49,988 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.97 vs. limit=15.0 +2024-07-28 08:06:55,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=130456.0, ans=0.125 +2024-07-28 08:06:56,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=130456.0, ans=0.2 +2024-07-28 08:06:59,075 INFO [train.py:1114] (2/4) Epoch 10, batch 5850, loss[loss=0.2637, simple_loss=0.3573, pruned_loss=0.08506, over 4463.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2873, pruned_loss=0.0578, over 937028.85 frames. ], batch size: 21, lr: 7.49e-03, grad_scale: 32.0 +2024-07-28 08:07:01,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=130469.33333333333, ans=0.1 +2024-07-28 08:07:01,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=130469.33333333333, ans=0.1 +2024-07-28 08:07:09,043 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.958e+01 5.762e+01 6.655e+01 7.927e+01 1.283e+02, threshold=1.331e+02, percent-clipped=2.0 +2024-07-28 08:07:26,765 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.81 vs. limit=6.0 +2024-07-28 08:07:29,212 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:07:33,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.78 vs. limit=15.0 +2024-07-28 08:07:34,278 INFO [train.py:1114] (2/4) Epoch 10, batch 5900, loss[loss=0.223, simple_loss=0.3073, pruned_loss=0.06936, over 4680.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2872, pruned_loss=0.0575, over 937439.00 frames. ], batch size: 15, lr: 7.49e-03, grad_scale: 32.0 +2024-07-28 08:07:42,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=130549.33333333333, ans=0.125 +2024-07-28 08:07:47,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=130562.66666666667, ans=0.125 +2024-07-28 08:07:53,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=130562.66666666667, ans=0.0 +2024-07-28 08:08:03,901 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.58 vs. limit=6.0 +2024-07-28 08:08:08,613 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.31 vs. limit=22.5 +2024-07-28 08:08:09,344 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.12 vs. limit=22.5 +2024-07-28 08:08:10,410 INFO [train.py:1114] (2/4) Epoch 10, batch 5950, loss[loss=0.2406, simple_loss=0.3144, pruned_loss=0.08338, over 4676.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.287, pruned_loss=0.05709, over 939428.39 frames. ], batch size: 15, lr: 7.49e-03, grad_scale: 64.0 +2024-07-28 08:08:13,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=130602.66666666667, ans=0.125 +2024-07-28 08:08:20,243 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.913e+01 5.602e+01 6.081e+01 6.794e+01 9.729e+01, threshold=1.216e+02, percent-clipped=0.0 +2024-07-28 08:08:20,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=130616.0, ans=0.1 +2024-07-28 08:08:30,191 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.66 vs. limit=15.0 +2024-07-28 08:08:31,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=130642.66666666667, ans=10.0 +2024-07-28 08:08:45,948 INFO [train.py:1114] (2/4) Epoch 10, batch 6000, loss[loss=0.1853, simple_loss=0.2725, pruned_loss=0.04907, over 4237.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2861, pruned_loss=0.05683, over 936512.02 frames. ], batch size: 26, lr: 7.48e-03, grad_scale: 64.0 +2024-07-28 08:08:45,949 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 08:08:58,310 INFO [train.py:1146] (2/4) Epoch 10, validation: loss=0.1713, simple_loss=0.2758, pruned_loss=0.03335, over 944034.00 frames. +2024-07-28 08:08:58,310 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 08:09:02,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=130669.33333333333, ans=0.025 +2024-07-28 08:09:15,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=130696.0, ans=0.125 +2024-07-28 08:09:20,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=130709.33333333333, ans=0.125 +2024-07-28 08:09:28,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=130722.66666666667, ans=0.125 +2024-07-28 08:09:32,087 INFO [train.py:1114] (2/4) Epoch 10, batch 6050, loss[loss=0.2072, simple_loss=0.2771, pruned_loss=0.06863, over 4789.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2853, pruned_loss=0.05666, over 937775.62 frames. ], batch size: 12, lr: 7.48e-03, grad_scale: 32.0 +2024-07-28 08:09:33,766 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.14 vs. limit=15.0 +2024-07-28 08:09:42,573 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 5.819e+01 6.565e+01 7.638e+01 1.917e+02, threshold=1.313e+02, percent-clipped=1.0 +2024-07-28 08:09:45,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=130762.66666666667, ans=0.0 +2024-07-28 08:09:46,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=130762.66666666667, ans=0.0 +2024-07-28 08:09:54,921 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.10 vs. limit=15.0 +2024-07-28 08:09:58,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=130789.33333333333, ans=0.035 +2024-07-28 08:09:59,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=130789.33333333333, ans=0.04949747468305833 +2024-07-28 08:10:04,891 INFO [train.py:1114] (2/4) Epoch 10, batch 6100, loss[loss=0.1908, simple_loss=0.2822, pruned_loss=0.04973, over 4672.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2843, pruned_loss=0.05585, over 937336.13 frames. ], batch size: 15, lr: 7.48e-03, grad_scale: 32.0 +2024-07-28 08:10:16,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=130816.0, ans=0.0 +2024-07-28 08:10:17,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=130829.33333333333, ans=0.0 +2024-07-28 08:10:24,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=130842.66666666667, ans=0.125 +2024-07-28 08:10:25,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=130842.66666666667, ans=0.025 +2024-07-28 08:10:25,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=130842.66666666667, ans=0.125 +2024-07-28 08:10:31,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=130856.0, ans=10.0 +2024-07-28 08:10:35,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=130856.0, ans=0.125 +2024-07-28 08:10:38,565 INFO [train.py:1114] (2/4) Epoch 10, batch 6150, loss[loss=0.2982, simple_loss=0.3516, pruned_loss=0.1224, over 3501.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2854, pruned_loss=0.05661, over 936574.46 frames. ], batch size: 35, lr: 7.48e-03, grad_scale: 32.0 +2024-07-28 08:10:40,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=130869.33333333333, ans=0.025 +2024-07-28 08:10:44,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=130869.33333333333, ans=0.2 +2024-07-28 08:10:47,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=130882.66666666667, ans=0.0 +2024-07-28 08:10:49,591 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+01 5.804e+01 6.352e+01 7.086e+01 1.134e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 08:10:55,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=130896.0, ans=0.125 +2024-07-28 08:10:59,078 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:11:05,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=130922.66666666667, ans=0.125 +2024-07-28 08:11:12,357 INFO [train.py:1114] (2/4) Epoch 10, batch 6200, loss[loss=0.2112, simple_loss=0.3163, pruned_loss=0.05302, over 4745.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2866, pruned_loss=0.05721, over 936032.74 frames. ], batch size: 14, lr: 7.48e-03, grad_scale: 32.0 +2024-07-28 08:11:13,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=130936.0, ans=0.0 +2024-07-28 08:11:19,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=130949.33333333333, ans=0.125 +2024-07-28 08:11:26,180 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.79 vs. limit=15.0 +2024-07-28 08:11:29,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=130962.66666666667, ans=0.125 +2024-07-28 08:11:46,564 INFO [train.py:1114] (2/4) Epoch 10, batch 6250, loss[loss=0.2275, simple_loss=0.3096, pruned_loss=0.07269, over 4819.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2866, pruned_loss=0.05743, over 932853.67 frames. ], batch size: 14, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:11:50,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=131002.66666666667, ans=0.125 +2024-07-28 08:11:57,348 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.378e+01 5.979e+01 6.836e+01 8.576e+01 1.211e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-28 08:12:03,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=131029.33333333333, ans=0.2 +2024-07-28 08:12:05,811 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.51 vs. limit=15.0 +2024-07-28 08:12:18,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131056.0, ans=0.1 +2024-07-28 08:12:19,828 INFO [train.py:1114] (2/4) Epoch 10, batch 6300, loss[loss=0.1945, simple_loss=0.2754, pruned_loss=0.05683, over 4598.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2869, pruned_loss=0.05779, over 929532.81 frames. ], batch size: 10, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:12:27,910 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.04 vs. limit=15.0 +2024-07-28 08:12:29,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=131082.66666666666, ans=0.125 +2024-07-28 08:12:35,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=131096.0, ans=0.0 +2024-07-28 08:12:54,183 INFO [train.py:1114] (2/4) Epoch 10, batch 6350, loss[loss=0.2389, simple_loss=0.3144, pruned_loss=0.08172, over 4479.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2861, pruned_loss=0.05682, over 933581.56 frames. ], batch size: 21, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:12:59,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=131136.0, ans=0.125 +2024-07-28 08:13:05,106 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.45 vs. limit=15.0 +2024-07-28 08:13:07,208 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.906e+01 5.570e+01 6.150e+01 7.348e+01 9.033e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 08:13:10,335 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.37 vs. limit=15.0 +2024-07-28 08:13:11,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=131162.66666666666, ans=0.2 +2024-07-28 08:13:11,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=131162.66666666666, ans=0.05 +2024-07-28 08:13:21,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=131176.0, ans=0.2 +2024-07-28 08:13:29,576 INFO [train.py:1114] (2/4) Epoch 10, batch 6400, loss[loss=0.1834, simple_loss=0.2692, pruned_loss=0.04879, over 4630.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2856, pruned_loss=0.05683, over 934506.78 frames. ], batch size: 13, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:13:39,547 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:13:39,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=131216.0, ans=0.5 +2024-07-28 08:13:41,113 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.33 vs. limit=15.0 +2024-07-28 08:14:06,269 INFO [train.py:1114] (2/4) Epoch 10, batch 6450, loss[loss=0.2206, simple_loss=0.3111, pruned_loss=0.06504, over 4552.00 frames. ], tot_loss[loss=0.2, simple_loss=0.286, pruned_loss=0.05696, over 938371.90 frames. ], batch size: 21, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:14:11,890 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.74 vs. limit=12.0 +2024-07-28 08:14:13,211 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.89 vs. limit=12.0 +2024-07-28 08:14:16,714 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.797e+01 5.892e+01 6.683e+01 7.805e+01 1.062e+02, threshold=1.337e+02, percent-clipped=0.0 +2024-07-28 08:14:16,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=131282.66666666666, ans=0.125 +2024-07-28 08:14:20,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=131296.0, ans=0.2 +2024-07-28 08:14:22,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=131296.0, ans=0.025 +2024-07-28 08:14:24,257 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.90 vs. limit=15.0 +2024-07-28 08:14:29,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=131309.33333333334, ans=0.0 +2024-07-28 08:14:31,416 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.59 vs. limit=22.5 +2024-07-28 08:14:32,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=131322.66666666666, ans=0.035 +2024-07-28 08:14:32,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=131322.66666666666, ans=0.0 +2024-07-28 08:14:37,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=131322.66666666666, ans=0.2 +2024-07-28 08:14:38,895 INFO [train.py:1114] (2/4) Epoch 10, batch 6500, loss[loss=0.2658, simple_loss=0.3313, pruned_loss=0.1001, over 3177.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2853, pruned_loss=0.05639, over 939580.05 frames. ], batch size: 35, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:14:40,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=131336.0, ans=0.125 +2024-07-28 08:14:42,022 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.11 vs. limit=15.0 +2024-07-28 08:14:43,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=131336.0, ans=0.0 +2024-07-28 08:15:10,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=131389.33333333334, ans=0.125 +2024-07-28 08:15:11,953 INFO [train.py:1114] (2/4) Epoch 10, batch 6550, loss[loss=0.1766, simple_loss=0.2518, pruned_loss=0.05073, over 4808.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2848, pruned_loss=0.0562, over 942405.55 frames. ], batch size: 11, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:15:14,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=131402.66666666666, ans=0.125 +2024-07-28 08:15:32,284 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.408e+01 5.455e+01 5.898e+01 6.813e+01 1.235e+02, threshold=1.180e+02, percent-clipped=0.0 +2024-07-28 08:15:44,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=131429.33333333334, ans=0.0 +2024-07-28 08:15:47,563 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.71 vs. limit=10.0 +2024-07-28 08:15:49,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=131442.66666666666, ans=0.2 +2024-07-28 08:16:03,313 INFO [train.py:1114] (2/4) Epoch 10, batch 6600, loss[loss=0.1902, simple_loss=0.2896, pruned_loss=0.04538, over 4930.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2851, pruned_loss=0.05637, over 944298.11 frames. ], batch size: 14, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:16:09,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=131482.66666666666, ans=0.125 +2024-07-28 08:16:11,238 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-07-28 08:16:18,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=131496.0, ans=0.125 +2024-07-28 08:16:21,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=131496.0, ans=0.125 +2024-07-28 08:16:21,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=131496.0, ans=0.125 +2024-07-28 08:16:26,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.31 vs. limit=15.0 +2024-07-28 08:16:35,149 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.70 vs. limit=15.0 +2024-07-28 08:16:37,430 INFO [train.py:1114] (2/4) Epoch 10, batch 6650, loss[loss=0.1922, simple_loss=0.2735, pruned_loss=0.05543, over 4635.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2851, pruned_loss=0.05666, over 943002.27 frames. ], batch size: 17, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:16:48,321 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.943e+01 5.716e+01 6.391e+01 7.041e+01 1.048e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-28 08:17:11,449 INFO [train.py:1114] (2/4) Epoch 10, batch 6700, loss[loss=0.1913, simple_loss=0.2809, pruned_loss=0.05088, over 4752.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2848, pruned_loss=0.05634, over 942012.89 frames. ], batch size: 19, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:17:32,597 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:17:33,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=131629.33333333334, ans=0.07 +2024-07-28 08:17:36,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=131629.33333333334, ans=0.0 +2024-07-28 08:17:40,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=131642.66666666666, ans=0.125 +2024-07-28 08:17:45,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=131642.66666666666, ans=0.2 +2024-07-28 08:17:46,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=131656.0, ans=0.1 +2024-07-28 08:17:49,033 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.37 vs. limit=22.5 +2024-07-28 08:17:52,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=131656.0, ans=0.0 +2024-07-28 08:17:54,068 INFO [train.py:1114] (2/4) Epoch 10, batch 6750, loss[loss=0.1987, simple_loss=0.2892, pruned_loss=0.05411, over 4254.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2852, pruned_loss=0.0564, over 940330.31 frames. ], batch size: 25, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:18:04,617 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.612e+01 5.726e+01 6.534e+01 7.091e+01 1.095e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 08:18:08,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=131696.0, ans=0.025 +2024-07-28 08:18:09,614 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.49 vs. limit=6.0 +2024-07-28 08:18:19,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=131709.33333333334, ans=0.2 +2024-07-28 08:18:23,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=131722.66666666666, ans=0.125 +2024-07-28 08:18:29,036 INFO [train.py:1114] (2/4) Epoch 10, batch 6800, loss[loss=0.1866, simple_loss=0.272, pruned_loss=0.05061, over 4638.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2858, pruned_loss=0.05659, over 938813.06 frames. ], batch size: 13, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:18:30,212 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.81 vs. limit=22.5 +2024-07-28 08:18:32,732 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.46 vs. limit=15.0 +2024-07-28 08:18:34,060 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.64 vs. limit=15.0 +2024-07-28 08:18:34,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=131736.0, ans=0.125 +2024-07-28 08:18:39,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=131749.33333333334, ans=0.1 +2024-07-28 08:18:41,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=131762.66666666666, ans=0.0 +2024-07-28 08:18:47,689 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.93 vs. limit=15.0 +2024-07-28 08:18:49,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=131776.0, ans=0.0 +2024-07-28 08:18:52,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=131776.0, ans=0.2 +2024-07-28 08:18:56,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.04 vs. limit=6.0 +2024-07-28 08:19:01,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=131789.33333333334, ans=0.2 +2024-07-28 08:19:03,589 INFO [train.py:1114] (2/4) Epoch 10, batch 6850, loss[loss=0.2013, simple_loss=0.292, pruned_loss=0.0553, over 4694.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2857, pruned_loss=0.05662, over 940793.80 frames. ], batch size: 13, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:19:09,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=131802.66666666666, ans=0.125 +2024-07-28 08:19:09,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=131802.66666666666, ans=0.125 +2024-07-28 08:19:13,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=131816.0, ans=0.125 +2024-07-28 08:19:13,973 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.625e+01 5.736e+01 6.428e+01 7.691e+01 1.005e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 08:19:19,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=131829.33333333334, ans=0.025 +2024-07-28 08:19:26,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=131842.66666666666, ans=0.125 +2024-07-28 08:19:38,304 INFO [train.py:1114] (2/4) Epoch 10, batch 6900, loss[loss=0.1744, simple_loss=0.2608, pruned_loss=0.04405, over 4968.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2862, pruned_loss=0.05693, over 942968.54 frames. ], batch size: 13, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:19:51,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=131882.66666666666, ans=0.125 +2024-07-28 08:19:54,607 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.04 vs. limit=12.0 +2024-07-28 08:20:13,677 INFO [train.py:1114] (2/4) Epoch 10, batch 6950, loss[loss=0.1645, simple_loss=0.2473, pruned_loss=0.04082, over 4511.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2859, pruned_loss=0.05663, over 939974.04 frames. ], batch size: 10, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:20:16,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.14 vs. limit=15.0 +2024-07-28 08:20:18,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=131936.0, ans=0.0 +2024-07-28 08:20:19,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=131949.33333333334, ans=0.125 +2024-07-28 08:20:24,313 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.214e+01 5.671e+01 6.271e+01 7.214e+01 1.195e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 08:20:30,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131962.66666666666, ans=0.1 +2024-07-28 08:20:31,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=131962.66666666666, ans=0.125 +2024-07-28 08:20:35,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=131976.0, ans=0.2 +2024-07-28 08:20:46,957 INFO [train.py:1114] (2/4) Epoch 10, batch 7000, loss[loss=0.2097, simple_loss=0.2985, pruned_loss=0.06046, over 4634.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2859, pruned_loss=0.05703, over 938970.82 frames. ], batch size: 17, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:20:52,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=132016.0, ans=0.125 +2024-07-28 08:21:19,411 INFO [train.py:1114] (2/4) Epoch 10, batch 7050, loss[loss=0.2223, simple_loss=0.3143, pruned_loss=0.0652, over 4690.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2863, pruned_loss=0.05725, over 942065.48 frames. ], batch size: 19, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:21:23,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=132069.33333333334, ans=0.125 +2024-07-28 08:21:30,147 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.631e+01 5.787e+01 6.450e+01 7.707e+01 1.222e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-28 08:21:32,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=132096.0, ans=0.025 +2024-07-28 08:21:33,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=132096.0, ans=0.025 +2024-07-28 08:21:38,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.47 vs. limit=15.0 +2024-07-28 08:21:45,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=132122.66666666666, ans=0.125 +2024-07-28 08:21:52,623 INFO [train.py:1114] (2/4) Epoch 10, batch 7100, loss[loss=0.2175, simple_loss=0.3074, pruned_loss=0.06383, over 4809.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2869, pruned_loss=0.05774, over 937601.50 frames. ], batch size: 15, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:21:57,882 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:22:03,316 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.05 vs. limit=22.5 +2024-07-28 08:22:07,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=132162.66666666666, ans=10.0 +2024-07-28 08:22:10,088 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.23 vs. limit=10.0 +2024-07-28 08:22:10,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=132162.66666666666, ans=0.0 +2024-07-28 08:22:11,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=132176.0, ans=0.125 +2024-07-28 08:22:12,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132176.0, ans=0.1 +2024-07-28 08:22:25,454 INFO [train.py:1114] (2/4) Epoch 10, batch 7150, loss[loss=0.2363, simple_loss=0.3127, pruned_loss=0.07999, over 4598.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2851, pruned_loss=0.0573, over 938673.12 frames. ], batch size: 21, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:22:26,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=132202.66666666666, ans=0.0 +2024-07-28 08:22:35,937 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+01 5.833e+01 6.423e+01 7.127e+01 1.033e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 08:22:47,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=132242.66666666666, ans=0.125 +2024-07-28 08:22:48,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=132242.66666666666, ans=0.0 +2024-07-28 08:22:54,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=132256.0, ans=0.5 +2024-07-28 08:22:55,173 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.37 vs. limit=22.5 +2024-07-28 08:22:58,316 INFO [train.py:1114] (2/4) Epoch 10, batch 7200, loss[loss=0.1909, simple_loss=0.2856, pruned_loss=0.04812, over 4803.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2857, pruned_loss=0.05744, over 938393.11 frames. ], batch size: 15, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:22:58,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=132269.33333333334, ans=0.0 +2024-07-28 08:22:59,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=132269.33333333334, ans=0.2 +2024-07-28 08:23:06,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=132269.33333333334, ans=0.125 +2024-07-28 08:23:06,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=132269.33333333334, ans=0.0 +2024-07-28 08:23:07,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=132269.33333333334, ans=0.125 +2024-07-28 08:23:15,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=132296.0, ans=0.2 +2024-07-28 08:23:26,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=132296.0, ans=0.0 +2024-07-28 08:23:38,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=132322.66666666666, ans=0.0 +2024-07-28 08:23:42,278 INFO [train.py:1114] (2/4) Epoch 10, batch 7250, loss[loss=0.1828, simple_loss=0.2598, pruned_loss=0.05287, over 4849.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2855, pruned_loss=0.05748, over 939824.30 frames. ], batch size: 12, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:23:48,231 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:23:48,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=132349.33333333334, ans=0.125 +2024-07-28 08:23:52,714 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.426e+01 5.603e+01 6.257e+01 7.383e+01 1.105e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 08:23:59,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=132362.66666666666, ans=0.025 +2024-07-28 08:24:00,974 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.18 vs. limit=15.0 +2024-07-28 08:24:02,314 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.04 vs. limit=15.0 +2024-07-28 08:24:02,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=132376.0, ans=0.125 +2024-07-28 08:24:13,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=132389.33333333334, ans=0.2 +2024-07-28 08:24:15,236 INFO [train.py:1114] (2/4) Epoch 10, batch 7300, loss[loss=0.1915, simple_loss=0.2803, pruned_loss=0.05131, over 4841.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2841, pruned_loss=0.05663, over 939794.07 frames. ], batch size: 12, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:24:30,733 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.50 vs. limit=15.0 +2024-07-28 08:24:34,913 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.77 vs. limit=22.5 +2024-07-28 08:24:38,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=132442.66666666666, ans=0.025 +2024-07-28 08:24:39,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=132442.66666666666, ans=0.125 +2024-07-28 08:24:48,300 INFO [train.py:1114] (2/4) Epoch 10, batch 7350, loss[loss=0.2073, simple_loss=0.2949, pruned_loss=0.0598, over 4639.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2848, pruned_loss=0.05692, over 939316.31 frames. ], batch size: 12, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:24:58,620 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.713e+01 5.561e+01 5.989e+01 6.823e+01 9.799e+01, threshold=1.198e+02, percent-clipped=0.0 +2024-07-28 08:25:00,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=132496.0, ans=0.125 +2024-07-28 08:25:06,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=132496.0, ans=0.125 +2024-07-28 08:25:07,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=132509.33333333334, ans=0.125 +2024-07-28 08:25:20,775 INFO [train.py:1114] (2/4) Epoch 10, batch 7400, loss[loss=0.1821, simple_loss=0.2707, pruned_loss=0.04673, over 4697.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2851, pruned_loss=0.05662, over 940429.96 frames. ], batch size: 13, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:25:21,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=132536.0, ans=0.95 +2024-07-28 08:25:22,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132536.0, ans=0.1 +2024-07-28 08:25:26,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=132536.0, ans=0.125 +2024-07-28 08:25:35,035 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.56 vs. limit=22.5 +2024-07-28 08:25:35,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=132562.66666666666, ans=0.2 +2024-07-28 08:25:44,036 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.19 vs. limit=22.5 +2024-07-28 08:25:53,283 INFO [train.py:1114] (2/4) Epoch 10, batch 7450, loss[loss=0.1928, simple_loss=0.2686, pruned_loss=0.05845, over 4599.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2846, pruned_loss=0.05663, over 938043.53 frames. ], batch size: 11, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:25:59,808 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:26:05,099 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.415e+01 5.617e+01 6.160e+01 7.093e+01 9.986e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 08:26:08,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=132629.33333333334, ans=0.0 +2024-07-28 08:26:10,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=132629.33333333334, ans=0.05 +2024-07-28 08:26:22,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=132656.0, ans=0.125 +2024-07-28 08:26:27,378 INFO [train.py:1114] (2/4) Epoch 10, batch 7500, loss[loss=0.2606, simple_loss=0.3099, pruned_loss=0.1057, over 3510.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2842, pruned_loss=0.0564, over 936812.33 frames. ], batch size: 35, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:26:40,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=132682.66666666666, ans=0.125 +2024-07-28 08:26:53,705 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.27 vs. limit=6.0 +2024-07-28 08:27:00,936 INFO [train.py:1114] (2/4) Epoch 10, batch 7550, loss[loss=0.1998, simple_loss=0.2797, pruned_loss=0.05991, over 4618.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2862, pruned_loss=0.05736, over 936509.56 frames. ], batch size: 17, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:27:03,838 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.69 vs. limit=15.0 +2024-07-28 08:27:09,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=132749.33333333334, ans=0.2 +2024-07-28 08:27:11,159 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.785e+01 5.843e+01 6.500e+01 7.580e+01 1.303e+02, threshold=1.300e+02, percent-clipped=2.0 +2024-07-28 08:27:12,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132749.33333333334, ans=0.1 +2024-07-28 08:27:28,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=132776.0, ans=0.04949747468305833 +2024-07-28 08:27:45,644 INFO [train.py:1114] (2/4) Epoch 10, batch 7600, loss[loss=0.2157, simple_loss=0.2952, pruned_loss=0.06814, over 4813.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2854, pruned_loss=0.05692, over 937815.92 frames. ], batch size: 14, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:27:59,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132829.33333333334, ans=0.1 +2024-07-28 08:28:00,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=132829.33333333334, ans=0.0 +2024-07-28 08:28:03,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=132829.33333333334, ans=0.125 +2024-07-28 08:28:10,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=132842.66666666666, ans=0.125 +2024-07-28 08:28:19,598 INFO [train.py:1114] (2/4) Epoch 10, batch 7650, loss[loss=0.193, simple_loss=0.2727, pruned_loss=0.05668, over 4939.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2866, pruned_loss=0.05799, over 936566.52 frames. ], batch size: 12, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:28:29,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=132882.66666666666, ans=0.1 +2024-07-28 08:28:30,219 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.708e+01 5.574e+01 6.113e+01 7.353e+01 1.031e+02, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 08:28:38,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=132896.0, ans=0.1 +2024-07-28 08:28:41,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132909.33333333334, ans=0.1 +2024-07-28 08:28:46,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=132922.66666666666, ans=0.125 +2024-07-28 08:28:55,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=132922.66666666666, ans=0.125 +2024-07-28 08:28:58,470 INFO [train.py:1114] (2/4) Epoch 10, batch 7700, loss[loss=0.1828, simple_loss=0.2761, pruned_loss=0.04475, over 4689.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2866, pruned_loss=0.05802, over 934032.73 frames. ], batch size: 13, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:29:04,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=132949.33333333334, ans=0.0 +2024-07-28 08:29:19,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=132962.66666666666, ans=0.125 +2024-07-28 08:29:22,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=132976.0, ans=0.125 +2024-07-28 08:29:35,860 INFO [train.py:1114] (2/4) Epoch 10, batch 7750, loss[loss=0.2053, simple_loss=0.3048, pruned_loss=0.05289, over 4927.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2879, pruned_loss=0.05829, over 935136.55 frames. ], batch size: 14, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:29:44,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=133016.0, ans=0.0 +2024-07-28 08:29:46,091 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 5.644e+01 6.328e+01 7.366e+01 9.654e+01, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 08:30:02,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=133056.0, ans=0.1 +2024-07-28 08:30:08,512 INFO [train.py:1114] (2/4) Epoch 10, batch 7800, loss[loss=0.1939, simple_loss=0.2895, pruned_loss=0.04921, over 4671.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2887, pruned_loss=0.05873, over 936812.29 frames. ], batch size: 14, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:30:09,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=133069.33333333334, ans=0.125 +2024-07-28 08:30:20,033 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:30:24,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133096.0, ans=0.1 +2024-07-28 08:30:25,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133096.0, ans=0.1 +2024-07-28 08:30:35,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=133122.66666666666, ans=0.025 +2024-07-28 08:30:48,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=133122.66666666666, ans=0.07 +2024-07-28 08:30:50,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=133136.0, ans=0.125 +2024-07-28 08:30:51,380 INFO [train.py:1114] (2/4) Epoch 10, batch 7850, loss[loss=0.1792, simple_loss=0.2557, pruned_loss=0.05132, over 4508.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2872, pruned_loss=0.05799, over 935429.51 frames. ], batch size: 10, lr: 7.41e-03, grad_scale: 32.0 +2024-07-28 08:31:11,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=133136.0, ans=0.125 +2024-07-28 08:31:13,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=133149.33333333334, ans=10.0 +2024-07-28 08:31:14,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.45 vs. limit=15.0 +2024-07-28 08:31:16,885 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.726e+01 6.171e+01 6.913e+01 1.107e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-28 08:31:21,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=133162.66666666666, ans=0.0 +2024-07-28 08:31:22,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=133162.66666666666, ans=0.2 +2024-07-28 08:31:38,774 INFO [train.py:1114] (2/4) Epoch 10, batch 7900, loss[loss=0.205, simple_loss=0.3036, pruned_loss=0.05319, over 4877.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2874, pruned_loss=0.05791, over 932717.30 frames. ], batch size: 14, lr: 7.41e-03, grad_scale: 32.0 +2024-07-28 08:31:38,804 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:31:38,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133202.66666666666, ans=0.1 +2024-07-28 08:31:57,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=133216.0, ans=0.125 +2024-07-28 08:32:01,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133229.33333333334, ans=0.1 +2024-07-28 08:32:10,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133229.33333333334, ans=0.1 +2024-07-28 08:32:10,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=133229.33333333334, ans=0.125 +2024-07-28 08:32:12,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=133229.33333333334, ans=0.0 +2024-07-28 08:32:15,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=133242.66666666666, ans=0.2 +2024-07-28 08:32:26,511 INFO [train.py:1114] (2/4) Epoch 10, batch 7950, loss[loss=0.2777, simple_loss=0.3525, pruned_loss=0.1014, over 3822.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2875, pruned_loss=0.05777, over 935241.84 frames. ], batch size: 37, lr: 7.41e-03, grad_scale: 32.0 +2024-07-28 08:32:28,491 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:32:31,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=133269.33333333334, ans=0.125 +2024-07-28 08:32:37,007 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.684e+01 5.725e+01 6.303e+01 6.935e+01 1.035e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 08:32:42,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=133296.0, ans=0.025 +2024-07-28 08:32:42,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=133296.0, ans=0.1 +2024-07-28 08:32:46,181 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.93 vs. limit=15.0 +2024-07-28 08:32:51,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=133309.33333333334, ans=0.0 +2024-07-28 08:32:54,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=133322.66666666666, ans=0.025 +2024-07-28 08:33:00,923 INFO [train.py:1114] (2/4) Epoch 10, batch 8000, loss[loss=0.1936, simple_loss=0.2639, pruned_loss=0.06168, over 4626.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2856, pruned_loss=0.0571, over 934466.23 frames. ], batch size: 11, lr: 7.41e-03, grad_scale: 32.0 +2024-07-28 08:33:25,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=133389.33333333334, ans=0.125 +2024-07-28 08:33:26,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=133389.33333333334, ans=0.0 +2024-07-28 08:33:28,045 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.67 vs. limit=15.0 +2024-07-28 08:33:30,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=133389.33333333334, ans=0.125 +2024-07-28 08:33:32,893 INFO [train.py:1114] (2/4) Epoch 10, batch 8050, loss[loss=0.1783, simple_loss=0.2765, pruned_loss=0.04005, over 4806.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2865, pruned_loss=0.05722, over 934205.84 frames. ], batch size: 14, lr: 7.41e-03, grad_scale: 64.0 +2024-07-28 08:33:34,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=133402.66666666666, ans=0.0 +2024-07-28 08:33:43,159 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.644e+01 5.682e+01 6.216e+01 7.101e+01 1.040e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 08:33:43,443 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.40 vs. limit=22.5 +2024-07-28 08:33:55,680 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.03 vs. limit=22.5 +2024-07-28 08:34:02,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=133456.0, ans=0.0 +2024-07-28 08:34:06,168 INFO [train.py:1114] (2/4) Epoch 10, batch 8100, loss[loss=0.2114, simple_loss=0.2943, pruned_loss=0.06424, over 4804.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2873, pruned_loss=0.05738, over 933692.69 frames. ], batch size: 15, lr: 7.41e-03, grad_scale: 64.0 +2024-07-28 08:34:29,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=133509.33333333334, ans=0.0 +2024-07-28 08:34:30,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=133509.33333333334, ans=0.0 +2024-07-28 08:34:38,561 INFO [train.py:1114] (2/4) Epoch 10, batch 8150, loss[loss=0.236, simple_loss=0.3251, pruned_loss=0.07342, over 4800.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2872, pruned_loss=0.05726, over 937206.97 frames. ], batch size: 15, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:34:43,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=133536.0, ans=0.05 +2024-07-28 08:34:48,785 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.759e+01 5.775e+01 6.372e+01 7.050e+01 1.046e+02, threshold=1.274e+02, percent-clipped=0.0 +2024-07-28 08:34:53,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=133562.66666666666, ans=0.0 +2024-07-28 08:35:00,740 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.93 vs. limit=6.0 +2024-07-28 08:35:11,101 INFO [train.py:1114] (2/4) Epoch 10, batch 8200, loss[loss=0.2362, simple_loss=0.3217, pruned_loss=0.07535, over 4791.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.288, pruned_loss=0.05754, over 938356.02 frames. ], batch size: 15, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:35:21,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=133616.0, ans=0.0 +2024-07-28 08:35:22,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=133616.0, ans=0.125 +2024-07-28 08:35:28,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=133629.33333333334, ans=0.2 +2024-07-28 08:35:30,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=133642.66666666666, ans=0.0 +2024-07-28 08:35:34,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133642.66666666666, ans=0.1 +2024-07-28 08:35:42,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=133656.0, ans=0.125 +2024-07-28 08:35:53,137 INFO [train.py:1114] (2/4) Epoch 10, batch 8250, loss[loss=0.1925, simple_loss=0.2806, pruned_loss=0.0522, over 4894.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2873, pruned_loss=0.05739, over 938835.43 frames. ], batch size: 13, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:36:10,118 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.694e+01 6.323e+01 7.329e+01 1.024e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-28 08:36:10,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=133682.66666666666, ans=0.0 +2024-07-28 08:36:32,392 INFO [train.py:1114] (2/4) Epoch 10, batch 8300, loss[loss=0.1796, simple_loss=0.2778, pruned_loss=0.04072, over 4899.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2878, pruned_loss=0.05714, over 938791.54 frames. ], batch size: 15, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:36:33,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=133736.0, ans=0.0 +2024-07-28 08:36:37,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=133736.0, ans=0.125 +2024-07-28 08:36:50,931 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.54 vs. limit=15.0 +2024-07-28 08:36:51,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=133776.0, ans=0.125 +2024-07-28 08:36:51,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=133776.0, ans=0.125 +2024-07-28 08:36:52,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=133776.0, ans=0.025 +2024-07-28 08:37:00,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=133789.33333333334, ans=0.125 +2024-07-28 08:37:05,227 INFO [train.py:1114] (2/4) Epoch 10, batch 8350, loss[loss=0.2013, simple_loss=0.302, pruned_loss=0.05026, over 4802.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.288, pruned_loss=0.05691, over 941636.46 frames. ], batch size: 15, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:37:05,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=133802.66666666666, ans=0.125 +2024-07-28 08:37:10,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=133802.66666666666, ans=0.05 +2024-07-28 08:37:17,104 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.840e+01 5.721e+01 6.167e+01 6.839e+01 1.069e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 08:37:19,505 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.67 vs. limit=15.0 +2024-07-28 08:37:20,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=133829.33333333334, ans=0.2 +2024-07-28 08:37:26,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=133842.66666666666, ans=0.0 +2024-07-28 08:37:38,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=133856.0, ans=0.09899494936611666 +2024-07-28 08:37:44,277 INFO [train.py:1114] (2/4) Epoch 10, batch 8400, loss[loss=0.1978, simple_loss=0.2889, pruned_loss=0.05331, over 4775.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2881, pruned_loss=0.05719, over 940254.11 frames. ], batch size: 12, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:37:51,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=133882.66666666666, ans=0.125 +2024-07-28 08:38:13,132 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.92 vs. limit=22.5 +2024-07-28 08:38:18,209 INFO [train.py:1114] (2/4) Epoch 10, batch 8450, loss[loss=0.2256, simple_loss=0.3163, pruned_loss=0.06746, over 4795.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2888, pruned_loss=0.05747, over 938789.30 frames. ], batch size: 15, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:38:23,517 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.70 vs. limit=10.0 +2024-07-28 08:38:24,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=133949.33333333334, ans=0.0 +2024-07-28 08:38:28,299 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.430e+01 5.805e+01 6.479e+01 7.666e+01 1.044e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 08:38:34,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=133962.66666666666, ans=0.09899494936611666 +2024-07-28 08:38:35,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=133962.66666666666, ans=0.125 +2024-07-28 08:38:45,474 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.53 vs. limit=22.5 +2024-07-28 08:38:57,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=133989.33333333334, ans=0.2 +2024-07-28 08:39:02,046 INFO [train.py:1114] (2/4) Epoch 10, batch 8500, loss[loss=0.1974, simple_loss=0.2866, pruned_loss=0.0541, over 4616.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2879, pruned_loss=0.05713, over 938521.00 frames. ], batch size: 11, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:39:02,491 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.27 vs. limit=22.5 +2024-07-28 08:39:29,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=134016.0, ans=0.125 +2024-07-28 08:39:29,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=134016.0, ans=0.125 +2024-07-28 08:39:35,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=134029.33333333334, ans=0.0 +2024-07-28 08:39:56,590 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.75 vs. limit=15.0 +2024-07-28 08:40:24,354 INFO [train.py:1114] (2/4) Epoch 10, batch 8550, loss[loss=0.1594, simple_loss=0.2409, pruned_loss=0.03897, over 4806.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2875, pruned_loss=0.05719, over 939322.31 frames. ], batch size: 11, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:40:44,212 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.526e+01 5.946e+01 6.615e+01 7.789e+01 1.197e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-28 08:40:45,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=134082.66666666666, ans=0.125 +2024-07-28 08:40:48,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=134096.0, ans=0.125 +2024-07-28 08:40:54,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=134109.33333333334, ans=0.0 +2024-07-28 08:41:19,432 INFO [train.py:1114] (2/4) Epoch 10, batch 8600, loss[loss=0.1892, simple_loss=0.2859, pruned_loss=0.04632, over 4812.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2864, pruned_loss=0.05649, over 938978.99 frames. ], batch size: 15, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:41:28,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=134149.33333333334, ans=0.025 +2024-07-28 08:41:36,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=134162.66666666666, ans=0.125 +2024-07-28 08:41:47,467 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:41:48,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=134189.33333333334, ans=0.125 +2024-07-28 08:41:50,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=134189.33333333334, ans=0.2 +2024-07-28 08:41:55,676 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.55 vs. limit=15.0 +2024-07-28 08:41:59,312 INFO [train.py:1114] (2/4) Epoch 10, batch 8650, loss[loss=0.2176, simple_loss=0.3003, pruned_loss=0.06748, over 4908.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2862, pruned_loss=0.05675, over 940174.21 frames. ], batch size: 15, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:42:03,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=134202.66666666666, ans=0.2 +2024-07-28 08:42:09,610 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.967e+01 5.912e+01 6.591e+01 7.425e+01 1.041e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-28 08:42:09,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=134216.0, ans=0.07 +2024-07-28 08:42:12,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=134229.33333333334, ans=0.0 +2024-07-28 08:42:34,296 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.90 vs. limit=15.0 +2024-07-28 08:42:39,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=134256.0, ans=0.125 +2024-07-28 08:42:42,709 INFO [train.py:1114] (2/4) Epoch 10, batch 8700, loss[loss=0.1863, simple_loss=0.2805, pruned_loss=0.046, over 4747.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2866, pruned_loss=0.05732, over 937782.07 frames. ], batch size: 13, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:43:05,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134309.33333333334, ans=0.1 +2024-07-28 08:43:08,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=134322.66666666666, ans=0.04949747468305833 +2024-07-28 08:43:09,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=134322.66666666666, ans=0.0 +2024-07-28 08:43:09,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=134322.66666666666, ans=0.125 +2024-07-28 08:43:14,599 INFO [train.py:1114] (2/4) Epoch 10, batch 8750, loss[loss=0.2211, simple_loss=0.3074, pruned_loss=0.0674, over 4684.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2853, pruned_loss=0.05679, over 936337.16 frames. ], batch size: 15, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:43:23,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=134349.33333333334, ans=0.125 +2024-07-28 08:43:24,873 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.932e+01 5.734e+01 6.452e+01 7.346e+01 1.067e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-28 08:43:25,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=134349.33333333334, ans=0.0 +2024-07-28 08:43:31,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=134362.66666666666, ans=0.125 +2024-07-28 08:43:37,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=134376.0, ans=0.0 +2024-07-28 08:43:48,714 INFO [train.py:1114] (2/4) Epoch 10, batch 8800, loss[loss=0.2134, simple_loss=0.3081, pruned_loss=0.05935, over 4931.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2863, pruned_loss=0.05733, over 937467.03 frames. ], batch size: 14, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:43:58,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=134416.0, ans=0.125 +2024-07-28 08:44:01,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=134429.33333333334, ans=0.2 +2024-07-28 08:44:02,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=134429.33333333334, ans=0.0 +2024-07-28 08:44:03,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=134429.33333333334, ans=0.125 +2024-07-28 08:44:13,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=134442.66666666666, ans=0.125 +2024-07-28 08:44:19,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=134456.0, ans=0.0 +2024-07-28 08:44:22,023 INFO [train.py:1114] (2/4) Epoch 10, batch 8850, loss[loss=0.2295, simple_loss=0.3144, pruned_loss=0.07229, over 4566.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2866, pruned_loss=0.05792, over 931840.63 frames. ], batch size: 21, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:44:34,666 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.302e+01 5.576e+01 6.226e+01 7.150e+01 1.100e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 08:44:34,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=134482.66666666666, ans=0.125 +2024-07-28 08:44:40,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134496.0, ans=0.1 +2024-07-28 08:44:50,018 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.29 vs. limit=15.0 +2024-07-28 08:44:52,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=134522.66666666666, ans=0.0 +2024-07-28 08:44:55,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=134522.66666666666, ans=0.5 +2024-07-28 08:44:56,774 INFO [train.py:1114] (2/4) Epoch 10, batch 8900, loss[loss=0.1861, simple_loss=0.2648, pruned_loss=0.05377, over 4941.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2878, pruned_loss=0.05866, over 930313.44 frames. ], batch size: 12, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:44:57,775 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-28 08:45:00,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.43 vs. limit=12.0 +2024-07-28 08:45:11,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=134549.33333333334, ans=0.05 +2024-07-28 08:45:34,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=134576.0, ans=0.0 +2024-07-28 08:45:41,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=134589.33333333334, ans=0.125 +2024-07-28 08:45:43,838 INFO [train.py:1114] (2/4) Epoch 10, batch 8950, loss[loss=0.2022, simple_loss=0.2932, pruned_loss=0.05563, over 4534.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2875, pruned_loss=0.05816, over 930854.72 frames. ], batch size: 21, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:45:44,257 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.01 vs. limit=10.0 +2024-07-28 08:45:53,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=134616.0, ans=0.125 +2024-07-28 08:45:53,854 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.674e+01 5.627e+01 6.283e+01 7.444e+01 1.084e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 08:45:54,247 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.03 vs. limit=15.0 +2024-07-28 08:46:09,317 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:46:15,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=134669.33333333334, ans=0.125 +2024-07-28 08:46:15,697 INFO [train.py:1114] (2/4) Epoch 10, batch 9000, loss[loss=0.1829, simple_loss=0.2771, pruned_loss=0.04436, over 4647.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2864, pruned_loss=0.05802, over 933824.84 frames. ], batch size: 12, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:46:15,698 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 08:46:28,323 INFO [train.py:1146] (2/4) Epoch 10, validation: loss=0.1719, simple_loss=0.2766, pruned_loss=0.0336, over 944034.00 frames. +2024-07-28 08:46:28,323 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 08:46:44,139 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-28 08:46:45,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=134696.0, ans=0.1 +2024-07-28 08:46:45,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=134696.0, ans=0.125 +2024-07-28 08:46:49,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=134709.33333333334, ans=0.125 +2024-07-28 08:47:00,431 INFO [train.py:1114] (2/4) Epoch 10, batch 9050, loss[loss=0.1567, simple_loss=0.2333, pruned_loss=0.04005, over 4538.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2862, pruned_loss=0.05798, over 934374.16 frames. ], batch size: 10, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:47:10,430 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.777e+01 6.275e+01 7.546e+01 8.998e+01 1.332e+02, threshold=1.509e+02, percent-clipped=1.0 +2024-07-28 08:47:17,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=134762.66666666666, ans=0.2 +2024-07-28 08:47:24,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=134776.0, ans=0.025 +2024-07-28 08:47:28,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=134789.33333333334, ans=0.0 +2024-07-28 08:47:31,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=134789.33333333334, ans=0.125 +2024-07-28 08:47:32,973 INFO [train.py:1114] (2/4) Epoch 10, batch 9100, loss[loss=0.1943, simple_loss=0.2918, pruned_loss=0.04841, over 4925.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2852, pruned_loss=0.05733, over 937009.43 frames. ], batch size: 14, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:47:35,162 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.40 vs. limit=6.0 +2024-07-28 08:47:40,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=134816.0, ans=0.125 +2024-07-28 08:47:46,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=134829.33333333334, ans=0.025 +2024-07-28 08:47:52,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=134842.66666666666, ans=0.1 +2024-07-28 08:48:06,625 INFO [train.py:1114] (2/4) Epoch 10, batch 9150, loss[loss=0.2105, simple_loss=0.3186, pruned_loss=0.05127, over 4809.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2861, pruned_loss=0.05779, over 936053.89 frames. ], batch size: 14, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:48:08,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=134869.33333333334, ans=0.125 +2024-07-28 08:48:16,569 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.926e+01 6.660e+01 7.545e+01 1.146e+02, threshold=1.332e+02, percent-clipped=0.0 +2024-07-28 08:48:23,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=134896.0, ans=0.0 +2024-07-28 08:48:25,945 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:48:27,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=134909.33333333334, ans=0.1 +2024-07-28 08:48:38,489 INFO [train.py:1114] (2/4) Epoch 10, batch 9200, loss[loss=0.1594, simple_loss=0.2531, pruned_loss=0.0328, over 4851.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2856, pruned_loss=0.05716, over 937644.10 frames. ], batch size: 12, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:48:40,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=134936.0, ans=0.125 +2024-07-28 08:48:47,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=134949.33333333334, ans=0.0 +2024-07-28 08:48:50,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=134962.66666666666, ans=0.125 +2024-07-28 08:49:04,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-28 08:49:08,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=134989.33333333334, ans=0.0 +2024-07-28 08:49:10,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=134989.33333333334, ans=0.04949747468305833 +2024-07-28 08:49:11,334 INFO [train.py:1114] (2/4) Epoch 10, batch 9250, loss[loss=0.1857, simple_loss=0.2865, pruned_loss=0.04246, over 4638.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2846, pruned_loss=0.0564, over 938454.29 frames. ], batch size: 13, lr: 7.36e-03, grad_scale: 64.0 +2024-07-28 08:49:16,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=135002.66666666666, ans=0.125 +2024-07-28 08:49:17,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=135016.0, ans=0.015 +2024-07-28 08:49:21,514 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.514e+01 5.719e+01 6.236e+01 6.936e+01 9.849e+01, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 08:49:28,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=135029.33333333334, ans=0.125 +2024-07-28 08:49:39,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=135056.0, ans=0.1 +2024-07-28 08:49:42,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=135056.0, ans=0.125 +2024-07-28 08:49:43,434 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.41 vs. limit=15.0 +2024-07-28 08:49:46,098 INFO [train.py:1114] (2/4) Epoch 10, batch 9300, loss[loss=0.174, simple_loss=0.2549, pruned_loss=0.04655, over 4782.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2842, pruned_loss=0.0563, over 938268.13 frames. ], batch size: 12, lr: 7.36e-03, grad_scale: 64.0 +2024-07-28 08:49:48,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=135069.33333333334, ans=0.07 +2024-07-28 08:49:49,649 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.87 vs. limit=12.0 +2024-07-28 08:49:53,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=135082.66666666666, ans=0.125 +2024-07-28 08:49:55,158 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.10 vs. limit=15.0 +2024-07-28 08:50:15,835 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.93 vs. limit=15.0 +2024-07-28 08:50:15,935 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.52 vs. limit=15.0 +2024-07-28 08:50:17,993 INFO [train.py:1114] (2/4) Epoch 10, batch 9350, loss[loss=0.1599, simple_loss=0.2422, pruned_loss=0.03881, over 4791.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2845, pruned_loss=0.05651, over 935535.05 frames. ], batch size: 11, lr: 7.36e-03, grad_scale: 64.0 +2024-07-28 08:50:29,450 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.89 vs. limit=10.0 +2024-07-28 08:50:35,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=135149.33333333334, ans=0.0 +2024-07-28 08:50:36,248 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.595e+01 5.628e+01 6.269e+01 7.143e+01 1.097e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 08:50:39,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=135162.66666666666, ans=0.2 +2024-07-28 08:50:41,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=135162.66666666666, ans=0.125 +2024-07-28 08:50:44,464 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-28 08:50:46,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=135176.0, ans=0.0 +2024-07-28 08:50:58,072 INFO [train.py:1114] (2/4) Epoch 10, batch 9400, loss[loss=0.174, simple_loss=0.2739, pruned_loss=0.03708, over 4707.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.285, pruned_loss=0.05668, over 933362.00 frames. ], batch size: 13, lr: 7.36e-03, grad_scale: 64.0 +2024-07-28 08:51:01,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135202.66666666666, ans=0.1 +2024-07-28 08:51:13,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=135229.33333333334, ans=0.125 +2024-07-28 08:51:15,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=135229.33333333334, ans=0.2 +2024-07-28 08:51:24,794 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.37 vs. limit=22.5 +2024-07-28 08:51:28,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=135256.0, ans=0.1 +2024-07-28 08:51:29,448 INFO [train.py:1114] (2/4) Epoch 10, batch 9450, loss[loss=0.1804, simple_loss=0.2507, pruned_loss=0.05501, over 4805.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2855, pruned_loss=0.05665, over 932398.13 frames. ], batch size: 11, lr: 7.36e-03, grad_scale: 32.0 +2024-07-28 08:51:29,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=135269.33333333334, ans=0.125 +2024-07-28 08:51:33,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=135269.33333333334, ans=0.0 +2024-07-28 08:51:35,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135282.66666666666, ans=0.1 +2024-07-28 08:51:37,729 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:51:39,177 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.41 vs. limit=12.0 +2024-07-28 08:51:39,986 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.423e+01 5.630e+01 6.223e+01 7.000e+01 1.011e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 08:51:42,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135296.0, ans=0.1 +2024-07-28 08:51:44,587 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.05 vs. limit=15.0 +2024-07-28 08:51:48,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=135309.33333333334, ans=0.125 +2024-07-28 08:51:59,474 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-07-28 08:52:01,575 INFO [train.py:1114] (2/4) Epoch 10, batch 9500, loss[loss=0.1905, simple_loss=0.2751, pruned_loss=0.05293, over 4702.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2855, pruned_loss=0.0564, over 934635.95 frames. ], batch size: 12, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:52:15,643 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.18 vs. limit=15.0 +2024-07-28 08:52:19,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=135362.66666666666, ans=0.1 +2024-07-28 08:52:27,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=135389.33333333334, ans=0.125 +2024-07-28 08:52:34,231 INFO [train.py:1114] (2/4) Epoch 10, batch 9550, loss[loss=0.1895, simple_loss=0.2785, pruned_loss=0.05025, over 4774.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2859, pruned_loss=0.05665, over 931609.24 frames. ], batch size: 12, lr: 7.35e-03, grad_scale: 16.0 +2024-07-28 08:52:48,295 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.16 vs. limit=15.0 +2024-07-28 08:52:48,487 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.659e+01 6.121e+01 6.852e+01 1.035e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 08:52:52,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=135429.33333333334, ans=0.025 +2024-07-28 08:52:56,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=135442.66666666666, ans=0.125 +2024-07-28 08:52:57,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=135442.66666666666, ans=0.0 +2024-07-28 08:52:59,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=135442.66666666666, ans=0.125 +2024-07-28 08:52:59,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=135442.66666666666, ans=0.125 +2024-07-28 08:53:03,256 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.45 vs. limit=22.5 +2024-07-28 08:53:09,502 INFO [train.py:1114] (2/4) Epoch 10, batch 9600, loss[loss=0.2076, simple_loss=0.2842, pruned_loss=0.06552, over 3461.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2855, pruned_loss=0.05652, over 930709.12 frames. ], batch size: 35, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:53:09,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=135469.33333333334, ans=0.0 +2024-07-28 08:53:19,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=135482.66666666666, ans=0.125 +2024-07-28 08:53:24,309 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.37 vs. limit=15.0 +2024-07-28 08:53:28,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=135496.0, ans=0.125 +2024-07-28 08:53:35,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=135509.33333333334, ans=0.05 +2024-07-28 08:53:36,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=135509.33333333334, ans=15.0 +2024-07-28 08:53:39,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=135522.66666666666, ans=0.125 +2024-07-28 08:53:43,548 INFO [train.py:1114] (2/4) Epoch 10, batch 9650, loss[loss=0.2147, simple_loss=0.3006, pruned_loss=0.06444, over 4833.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2864, pruned_loss=0.05692, over 926822.98 frames. ], batch size: 16, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:53:49,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135549.33333333334, ans=0.1 +2024-07-28 08:53:50,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=135549.33333333334, ans=0.125 +2024-07-28 08:53:51,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=135549.33333333334, ans=0.125 +2024-07-28 08:53:54,688 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.653e+01 6.117e+01 7.383e+01 9.422e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 08:53:56,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=135562.66666666666, ans=0.04949747468305833 +2024-07-28 08:54:01,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=135562.66666666666, ans=0.2 +2024-07-28 08:54:04,447 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.49 vs. limit=12.0 +2024-07-28 08:54:06,844 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.79 vs. limit=15.0 +2024-07-28 08:54:07,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=135589.33333333334, ans=0.125 +2024-07-28 08:54:09,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=135589.33333333334, ans=0.2 +2024-07-28 08:54:11,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=135589.33333333334, ans=0.125 +2024-07-28 08:54:12,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=135589.33333333334, ans=0.125 +2024-07-28 08:54:15,217 INFO [train.py:1114] (2/4) Epoch 10, batch 9700, loss[loss=0.2251, simple_loss=0.299, pruned_loss=0.0756, over 4100.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2866, pruned_loss=0.05737, over 924543.15 frames. ], batch size: 25, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:54:18,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=135602.66666666666, ans=0.0 +2024-07-28 08:54:34,717 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.37 vs. limit=6.0 +2024-07-28 08:54:51,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=135656.0, ans=0.125 +2024-07-28 08:54:54,874 INFO [train.py:1114] (2/4) Epoch 10, batch 9750, loss[loss=0.2287, simple_loss=0.317, pruned_loss=0.07019, over 4684.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2859, pruned_loss=0.0572, over 925508.62 frames. ], batch size: 15, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:54:55,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135669.33333333334, ans=0.1 +2024-07-28 08:55:09,168 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.808e+01 6.506e+01 7.716e+01 1.140e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 08:55:09,970 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:55:14,514 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.72 vs. limit=22.5 +2024-07-28 08:55:20,177 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.24 vs. limit=15.0 +2024-07-28 08:55:26,096 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.47 vs. limit=22.5 +2024-07-28 08:55:28,985 INFO [train.py:1114] (2/4) Epoch 10, batch 9800, loss[loss=0.1938, simple_loss=0.2824, pruned_loss=0.05255, over 4694.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2845, pruned_loss=0.05646, over 924757.10 frames. ], batch size: 12, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:55:29,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=135736.0, ans=0.125 +2024-07-28 08:55:30,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=135736.0, ans=0.0 +2024-07-28 08:55:35,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=135749.33333333334, ans=0.2 +2024-07-28 08:55:46,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=135762.66666666666, ans=0.1 +2024-07-28 08:55:46,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=135762.66666666666, ans=0.2 +2024-07-28 08:55:47,482 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.00 vs. limit=15.0 +2024-07-28 08:55:47,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=135762.66666666666, ans=0.125 +2024-07-28 08:56:01,761 INFO [train.py:1114] (2/4) Epoch 10, batch 9850, loss[loss=0.2084, simple_loss=0.299, pruned_loss=0.05888, over 4909.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2857, pruned_loss=0.05681, over 927220.33 frames. ], batch size: 15, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:56:05,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=135802.66666666666, ans=0.0 +2024-07-28 08:56:06,676 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.05 vs. limit=12.0 +2024-07-28 08:56:11,841 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.62 vs. limit=15.0 +2024-07-28 08:56:14,547 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.854e+01 5.916e+01 6.813e+01 8.007e+01 1.183e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-28 08:56:19,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=135829.33333333334, ans=0.0 +2024-07-28 08:56:26,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=135842.66666666666, ans=0.1 +2024-07-28 08:56:27,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=135842.66666666666, ans=0.125 +2024-07-28 08:56:29,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=135856.0, ans=0.0 +2024-07-28 08:56:32,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=135856.0, ans=0.125 +2024-07-28 08:56:34,478 INFO [train.py:1114] (2/4) Epoch 10, batch 9900, loss[loss=0.2239, simple_loss=0.3122, pruned_loss=0.06776, over 4844.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2868, pruned_loss=0.05762, over 926335.01 frames. ], batch size: 16, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:56:52,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=135896.0, ans=0.035 +2024-07-28 08:56:59,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=135922.66666666666, ans=10.0 +2024-07-28 08:57:05,750 INFO [train.py:1114] (2/4) Epoch 10, batch 9950, loss[loss=0.1858, simple_loss=0.2697, pruned_loss=0.05091, over 4793.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2877, pruned_loss=0.05846, over 929045.72 frames. ], batch size: 11, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:57:06,037 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.23 vs. limit=10.0 +2024-07-28 08:57:15,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=135949.33333333334, ans=0.07 +2024-07-28 08:57:17,009 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.30 vs. limit=15.0 +2024-07-28 08:57:17,158 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.632e+01 6.153e+01 7.007e+01 8.060e+01 1.036e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-28 08:57:18,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=135962.66666666666, ans=0.025 +2024-07-28 08:57:27,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=135976.0, ans=0.1 +2024-07-28 08:57:28,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=135976.0, ans=0.0 +2024-07-28 08:57:30,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=135976.0, ans=0.125 +2024-07-28 08:57:33,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=135989.33333333334, ans=0.125 +2024-07-28 08:57:37,845 INFO [train.py:1114] (2/4) Epoch 10, batch 10000, loss[loss=0.2018, simple_loss=0.2941, pruned_loss=0.05478, over 4625.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2897, pruned_loss=0.05833, over 926896.32 frames. ], batch size: 16, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:57:39,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.93 vs. limit=22.5 +2024-07-28 08:57:50,596 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.90 vs. limit=22.5 +2024-07-28 08:57:50,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=136029.33333333334, ans=0.125 +2024-07-28 08:57:53,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=136029.33333333334, ans=0.125 +2024-07-28 08:57:56,064 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.28 vs. limit=15.0 +2024-07-28 08:57:59,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=136042.66666666666, ans=0.0 +2024-07-28 08:58:09,776 INFO [train.py:1114] (2/4) Epoch 10, batch 10050, loss[loss=0.2576, simple_loss=0.324, pruned_loss=0.09555, over 3492.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2939, pruned_loss=0.0606, over 915665.97 frames. ], batch size: 35, lr: 7.33e-03, grad_scale: 32.0 +2024-07-28 08:58:11,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=136069.33333333334, ans=0.125 +2024-07-28 08:58:14,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=136069.33333333334, ans=0.2 +2024-07-28 08:58:22,174 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.772e+01 6.455e+01 7.428e+01 1.276e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-28 08:58:22,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=136082.66666666666, ans=0.2 +2024-07-28 08:58:29,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136096.0, ans=0.1 +2024-07-28 08:58:37,882 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.32 vs. limit=12.0 +2024-07-28 08:58:40,495 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.82 vs. limit=22.5 +2024-07-28 08:58:43,288 INFO [train.py:1114] (2/4) Epoch 10, batch 10100, loss[loss=0.2623, simple_loss=0.3403, pruned_loss=0.0921, over 3114.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2985, pruned_loss=0.0659, over 863185.98 frames. ], batch size: 35, lr: 7.33e-03, grad_scale: 32.0 +2024-07-28 08:58:43,813 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.65 vs. limit=5.0 +2024-07-28 08:58:59,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=136162.66666666666, ans=0.125 +2024-07-28 08:59:07,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=136176.0, ans=0.1 +2024-07-28 08:59:15,621 INFO [train.py:1114] (2/4) Epoch 10, batch 10150, loss[loss=0.2519, simple_loss=0.3203, pruned_loss=0.09179, over 3221.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3025, pruned_loss=0.07038, over 821834.72 frames. ], batch size: 35, lr: 7.33e-03, grad_scale: 32.0 +2024-07-28 08:59:20,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=136202.66666666666, ans=0.125 +2024-07-28 08:59:22,407 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.69 vs. limit=6.0 +2024-07-28 08:59:22,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=136216.0, ans=0.025 +2024-07-28 08:59:27,026 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.570e+01 6.618e+01 7.074e+01 7.488e+01 9.490e+01, threshold=1.415e+02, percent-clipped=0.0 +2024-07-28 08:59:29,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=136229.33333333334, ans=0.125 +2024-07-28 08:59:33,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=136229.33333333334, ans=0.1 +2024-07-28 08:59:47,609 INFO [train.py:1114] (2/4) Epoch 10, batch 10200, loss[loss=0.2542, simple_loss=0.3199, pruned_loss=0.09424, over 3350.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.306, pruned_loss=0.0741, over 791068.63 frames. ], batch size: 35, lr: 7.33e-03, grad_scale: 32.0 +2024-07-28 08:59:49,131 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.99 vs. limit=15.0 +2024-07-28 08:59:57,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=136282.66666666666, ans=0.125 +2024-07-28 09:00:46,199 INFO [train.py:1114] (2/4) Epoch 11, batch 0, loss[loss=0.1791, simple_loss=0.2743, pruned_loss=0.04198, over 4855.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2743, pruned_loss=0.04198, over 4855.00 frames. ], batch size: 12, lr: 7.00e-03, grad_scale: 32.0 +2024-07-28 09:00:46,199 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 09:00:54,604 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.2450, 3.7532, 3.9077, 3.5767, 3.7686, 3.9678, 4.1253, 3.5942], + device='cuda:2') +2024-07-28 09:00:57,970 INFO [train.py:1146] (2/4) Epoch 11, validation: loss=0.1737, simple_loss=0.279, pruned_loss=0.03421, over 944034.00 frames. +2024-07-28 09:00:57,971 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 09:01:03,891 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.61 vs. limit=15.0 +2024-07-28 09:01:13,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=136325.33333333334, ans=0.125 +2024-07-28 09:01:16,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=136325.33333333334, ans=0.125 +2024-07-28 09:01:16,891 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.31 vs. limit=10.0 +2024-07-28 09:01:20,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=136338.66666666666, ans=0.125 +2024-07-28 09:01:21,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=136338.66666666666, ans=0.125 +2024-07-28 09:01:29,224 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.650e+01 6.307e+01 6.804e+01 7.464e+01 1.172e+02, threshold=1.361e+02, percent-clipped=0.0 +2024-07-28 09:01:32,018 INFO [train.py:1114] (2/4) Epoch 11, batch 50, loss[loss=0.1894, simple_loss=0.2826, pruned_loss=0.04808, over 4609.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2911, pruned_loss=0.05935, over 206747.03 frames. ], batch size: 11, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:01:33,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=136365.33333333334, ans=0.125 +2024-07-28 09:01:34,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=136365.33333333334, ans=0.125 +2024-07-28 09:01:36,952 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.32 vs. limit=15.0 +2024-07-28 09:01:37,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=136365.33333333334, ans=0.125 +2024-07-28 09:01:50,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=136392.0, ans=0.1 +2024-07-28 09:01:57,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=136405.33333333334, ans=0.025 +2024-07-28 09:01:57,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=136405.33333333334, ans=0.2 +2024-07-28 09:02:05,900 INFO [train.py:1114] (2/4) Epoch 11, batch 100, loss[loss=0.1987, simple_loss=0.2871, pruned_loss=0.0552, over 4639.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2906, pruned_loss=0.05756, over 365838.40 frames. ], batch size: 12, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:02:15,015 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:02:28,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=136472.0, ans=0.0 +2024-07-28 09:02:36,700 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.385e+01 5.958e+01 6.972e+01 1.024e+02, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 09:02:39,365 INFO [train.py:1114] (2/4) Epoch 11, batch 150, loss[loss=0.1769, simple_loss=0.2572, pruned_loss=0.0483, over 4615.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2864, pruned_loss=0.05551, over 494471.17 frames. ], batch size: 11, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:02:54,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136525.33333333334, ans=0.1 +2024-07-28 09:03:01,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=136538.66666666666, ans=10.0 +2024-07-28 09:03:08,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=136552.0, ans=0.125 +2024-07-28 09:03:08,300 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.86 vs. limit=6.0 +2024-07-28 09:03:14,120 INFO [train.py:1114] (2/4) Epoch 11, batch 200, loss[loss=0.2266, simple_loss=0.3019, pruned_loss=0.07566, over 4441.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2848, pruned_loss=0.0553, over 594151.82 frames. ], batch size: 21, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:03:28,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=136592.0, ans=0.2 +2024-07-28 09:03:36,677 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.71 vs. limit=15.0 +2024-07-28 09:03:45,024 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.586e+01 5.746e+01 6.330e+01 7.204e+01 1.314e+02, threshold=1.266e+02, percent-clipped=1.0 +2024-07-28 09:03:46,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=136618.66666666666, ans=0.0 +2024-07-28 09:03:47,807 INFO [train.py:1114] (2/4) Epoch 11, batch 250, loss[loss=0.1945, simple_loss=0.2887, pruned_loss=0.05014, over 4621.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.285, pruned_loss=0.05504, over 671006.04 frames. ], batch size: 16, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:03:49,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=136632.0, ans=0.1 +2024-07-28 09:03:58,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=136645.33333333334, ans=0.0 +2024-07-28 09:03:58,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=136645.33333333334, ans=0.125 +2024-07-28 09:04:02,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=136645.33333333334, ans=0.1 +2024-07-28 09:04:07,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=136658.66666666666, ans=0.125 +2024-07-28 09:04:14,800 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.70 vs. limit=15.0 +2024-07-28 09:04:15,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=136672.0, ans=0.125 +2024-07-28 09:04:21,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=136685.33333333334, ans=0.125 +2024-07-28 09:04:22,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=136685.33333333334, ans=0.0 +2024-07-28 09:04:22,838 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.20 vs. limit=10.0 +2024-07-28 09:04:25,276 INFO [train.py:1114] (2/4) Epoch 11, batch 300, loss[loss=0.1879, simple_loss=0.2858, pruned_loss=0.04502, over 4799.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2838, pruned_loss=0.0544, over 730526.45 frames. ], batch size: 15, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:04:49,089 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.63 vs. limit=15.0 +2024-07-28 09:04:56,685 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.265e+01 5.546e+01 5.956e+01 6.746e+01 1.009e+02, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 09:04:57,610 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.37 vs. limit=22.5 +2024-07-28 09:04:59,414 INFO [train.py:1114] (2/4) Epoch 11, batch 350, loss[loss=0.1898, simple_loss=0.2713, pruned_loss=0.05419, over 4944.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2844, pruned_loss=0.05466, over 776571.99 frames. ], batch size: 12, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:04:59,574 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:05:02,457 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.21 vs. limit=15.0 +2024-07-28 09:05:04,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=136765.33333333334, ans=0.0 +2024-07-28 09:05:05,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=136778.66666666666, ans=0.0 +2024-07-28 09:05:10,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=136778.66666666666, ans=0.125 +2024-07-28 09:05:18,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=136792.0, ans=0.125 +2024-07-28 09:05:21,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=136805.33333333334, ans=0.0 +2024-07-28 09:05:31,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=136818.66666666666, ans=0.125 +2024-07-28 09:05:33,210 INFO [train.py:1114] (2/4) Epoch 11, batch 400, loss[loss=0.1922, simple_loss=0.284, pruned_loss=0.05017, over 4694.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2845, pruned_loss=0.05487, over 813865.29 frames. ], batch size: 13, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:05:43,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=136832.0, ans=0.0 +2024-07-28 09:05:52,945 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.24 vs. limit=6.0 +2024-07-28 09:06:00,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=136872.0, ans=0.0 +2024-07-28 09:06:12,505 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.549e+01 5.682e+01 6.253e+01 7.367e+01 1.050e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 09:06:15,204 INFO [train.py:1114] (2/4) Epoch 11, batch 450, loss[loss=0.1969, simple_loss=0.2866, pruned_loss=0.05366, over 4633.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2844, pruned_loss=0.05513, over 839649.72 frames. ], batch size: 13, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:06:15,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=136898.66666666666, ans=0.05 +2024-07-28 09:06:17,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=136898.66666666666, ans=0.1 +2024-07-28 09:06:37,045 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.30 vs. limit=12.0 +2024-07-28 09:06:37,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=136925.33333333334, ans=0.125 +2024-07-28 09:06:43,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=136938.66666666666, ans=0.0 +2024-07-28 09:06:55,243 INFO [train.py:1114] (2/4) Epoch 11, batch 500, loss[loss=0.1852, simple_loss=0.2887, pruned_loss=0.04079, over 4693.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2831, pruned_loss=0.05476, over 861885.49 frames. ], batch size: 15, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:07:22,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=137018.66666666666, ans=0.5 +2024-07-28 09:07:25,819 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.558e+01 5.492e+01 6.007e+01 6.943e+01 8.543e+01, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 09:07:28,418 INFO [train.py:1114] (2/4) Epoch 11, batch 550, loss[loss=0.2098, simple_loss=0.2976, pruned_loss=0.06096, over 4609.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2831, pruned_loss=0.05449, over 877408.28 frames. ], batch size: 17, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:07:30,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=137032.0, ans=0.1 +2024-07-28 09:07:32,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=137032.0, ans=0.125 +2024-07-28 09:07:33,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=137032.0, ans=0.125 +2024-07-28 09:07:35,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=137045.33333333334, ans=0.025 +2024-07-28 09:07:42,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=137058.66666666666, ans=0.125 +2024-07-28 09:07:44,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=137058.66666666666, ans=0.0 +2024-07-28 09:07:48,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=137058.66666666666, ans=0.125 +2024-07-28 09:07:55,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=137085.33333333334, ans=0.1 +2024-07-28 09:08:00,682 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.47 vs. limit=22.5 +2024-07-28 09:08:02,924 INFO [train.py:1114] (2/4) Epoch 11, batch 600, loss[loss=0.2241, simple_loss=0.3223, pruned_loss=0.06295, over 4622.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2836, pruned_loss=0.05485, over 892218.67 frames. ], batch size: 16, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:08:19,495 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.40 vs. limit=6.0 +2024-07-28 09:08:21,369 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.44 vs. limit=15.0 +2024-07-28 09:08:25,948 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.32 vs. limit=15.0 +2024-07-28 09:08:26,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=137138.66666666666, ans=0.1 +2024-07-28 09:08:33,003 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.811e+01 5.574e+01 6.202e+01 6.752e+01 1.007e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 09:08:35,648 INFO [train.py:1114] (2/4) Epoch 11, batch 650, loss[loss=0.1642, simple_loss=0.2574, pruned_loss=0.03548, over 4762.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.282, pruned_loss=0.05429, over 904102.34 frames. ], batch size: 13, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:08:40,106 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.02 vs. limit=15.0 +2024-07-28 09:08:48,910 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.67 vs. limit=15.0 +2024-07-28 09:08:53,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=137192.0, ans=0.125 +2024-07-28 09:08:55,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=137205.33333333334, ans=0.125 +2024-07-28 09:09:01,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=137205.33333333334, ans=0.07 +2024-07-28 09:09:09,631 INFO [train.py:1114] (2/4) Epoch 11, batch 700, loss[loss=0.1701, simple_loss=0.2571, pruned_loss=0.04149, over 4636.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2818, pruned_loss=0.05437, over 912078.26 frames. ], batch size: 12, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:09:15,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=137245.33333333334, ans=0.0 +2024-07-28 09:09:21,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=137245.33333333334, ans=0.125 +2024-07-28 09:09:22,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=6.16 vs. limit=12.0 +2024-07-28 09:09:28,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=137258.66666666666, ans=0.0 +2024-07-28 09:09:34,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=137272.0, ans=0.05 +2024-07-28 09:09:38,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=137285.33333333334, ans=0.2 +2024-07-28 09:09:40,616 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.468e+01 5.602e+01 6.234e+01 6.972e+01 9.125e+01, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 09:09:45,987 INFO [train.py:1114] (2/4) Epoch 11, batch 750, loss[loss=0.1825, simple_loss=0.266, pruned_loss=0.04953, over 4696.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2816, pruned_loss=0.05432, over 918676.92 frames. ], batch size: 13, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:09:51,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=137298.66666666666, ans=0.125 +2024-07-28 09:09:56,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=137312.0, ans=0.1 +2024-07-28 09:09:57,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=137312.0, ans=0.2 +2024-07-28 09:09:59,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=137325.33333333334, ans=0.025 +2024-07-28 09:10:05,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=137325.33333333334, ans=0.2 +2024-07-28 09:10:13,294 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.35 vs. limit=22.5 +2024-07-28 09:10:19,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=137352.0, ans=0.0 +2024-07-28 09:10:21,548 INFO [train.py:1114] (2/4) Epoch 11, batch 800, loss[loss=0.1967, simple_loss=0.2866, pruned_loss=0.05344, over 4859.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2818, pruned_loss=0.05429, over 923367.63 frames. ], batch size: 12, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:10:24,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=137365.33333333334, ans=0.125 +2024-07-28 09:10:28,452 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.55 vs. limit=15.0 +2024-07-28 09:10:30,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=137378.66666666666, ans=0.025 +2024-07-28 09:10:38,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=137392.0, ans=0.125 +2024-07-28 09:10:40,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=137405.33333333334, ans=0.125 +2024-07-28 09:11:00,614 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.302e+01 5.597e+01 6.070e+01 6.795e+01 9.040e+01, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 09:11:02,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=137432.0, ans=0.125 +2024-07-28 09:11:03,292 INFO [train.py:1114] (2/4) Epoch 11, batch 850, loss[loss=0.1867, simple_loss=0.29, pruned_loss=0.04169, over 4659.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2818, pruned_loss=0.05455, over 927422.23 frames. ], batch size: 14, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:11:10,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=137445.33333333334, ans=0.125 +2024-07-28 09:11:11,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=137445.33333333334, ans=0.2 +2024-07-28 09:11:19,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=137458.66666666666, ans=0.125 +2024-07-28 09:11:26,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=137472.0, ans=0.125 +2024-07-28 09:11:30,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=137485.33333333334, ans=0.0 +2024-07-28 09:11:35,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=137498.66666666666, ans=0.125 +2024-07-28 09:11:36,216 INFO [train.py:1114] (2/4) Epoch 11, batch 900, loss[loss=0.1692, simple_loss=0.2634, pruned_loss=0.03754, over 4849.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2832, pruned_loss=0.05474, over 928174.06 frames. ], batch size: 12, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:11:37,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137498.66666666666, ans=0.1 +2024-07-28 09:11:55,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=137525.33333333334, ans=0.025 +2024-07-28 09:11:56,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=137538.66666666666, ans=0.125 +2024-07-28 09:11:59,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137538.66666666666, ans=0.1 +2024-07-28 09:12:00,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=137538.66666666666, ans=0.5 +2024-07-28 09:12:09,245 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 5.732e+01 6.294e+01 7.433e+01 1.155e+02, threshold=1.259e+02, percent-clipped=0.0 +2024-07-28 09:12:12,079 INFO [train.py:1114] (2/4) Epoch 11, batch 950, loss[loss=0.18, simple_loss=0.2641, pruned_loss=0.04795, over 4773.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2836, pruned_loss=0.05456, over 929625.33 frames. ], batch size: 12, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:12:14,369 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.18 vs. limit=15.0 +2024-07-28 09:12:20,798 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.87 vs. limit=15.0 +2024-07-28 09:12:21,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=137578.66666666666, ans=0.5 +2024-07-28 09:12:34,575 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.31 vs. limit=15.0 +2024-07-28 09:12:35,986 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=17.34 vs. limit=22.5 +2024-07-28 09:12:46,526 INFO [train.py:1114] (2/4) Epoch 11, batch 1000, loss[loss=0.2058, simple_loss=0.2962, pruned_loss=0.05766, over 4962.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2844, pruned_loss=0.05487, over 929088.70 frames. ], batch size: 13, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:12:56,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=137645.33333333334, ans=0.125 +2024-07-28 09:13:00,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=137658.66666666666, ans=0.0 +2024-07-28 09:13:04,490 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.92 vs. limit=15.0 +2024-07-28 09:13:18,722 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.919e+01 5.562e+01 6.150e+01 7.152e+01 9.857e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 09:13:21,487 INFO [train.py:1114] (2/4) Epoch 11, batch 1050, loss[loss=0.2284, simple_loss=0.3137, pruned_loss=0.07151, over 4880.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2838, pruned_loss=0.05513, over 931700.10 frames. ], batch size: 14, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:13:26,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=137698.66666666666, ans=0.125 +2024-07-28 09:13:26,980 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.87 vs. limit=22.5 +2024-07-28 09:13:32,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=137712.0, ans=0.125 +2024-07-28 09:13:34,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=137712.0, ans=0.125 +2024-07-28 09:13:37,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=137712.0, ans=0.0 +2024-07-28 09:13:38,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=137725.33333333334, ans=0.0 +2024-07-28 09:13:39,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=137725.33333333334, ans=0.125 +2024-07-28 09:13:53,145 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.28 vs. limit=22.5 +2024-07-28 09:13:58,441 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.33 vs. limit=15.0 +2024-07-28 09:13:58,594 INFO [train.py:1114] (2/4) Epoch 11, batch 1100, loss[loss=0.1526, simple_loss=0.2408, pruned_loss=0.03215, over 4892.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2827, pruned_loss=0.05437, over 934337.20 frames. ], batch size: 13, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:14:03,819 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.34 vs. limit=22.5 +2024-07-28 09:14:23,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=137805.33333333334, ans=0.125 +2024-07-28 09:14:26,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=137805.33333333334, ans=0.1 +2024-07-28 09:14:33,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=137818.66666666666, ans=0.1 +2024-07-28 09:14:40,322 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.240e+01 5.580e+01 6.119e+01 6.842e+01 1.423e+02, threshold=1.224e+02, percent-clipped=1.0 +2024-07-28 09:14:42,905 INFO [train.py:1114] (2/4) Epoch 11, batch 1150, loss[loss=0.2059, simple_loss=0.2888, pruned_loss=0.06153, over 4902.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.283, pruned_loss=0.05433, over 934035.04 frames. ], batch size: 13, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:14:45,798 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=13.00 vs. limit=15.0 +2024-07-28 09:14:51,983 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.89 vs. limit=22.5 +2024-07-28 09:14:54,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=137845.33333333334, ans=0.125 +2024-07-28 09:15:11,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=137885.33333333334, ans=0.2 +2024-07-28 09:15:14,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=137885.33333333334, ans=0.125 +2024-07-28 09:15:15,289 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:15:16,426 INFO [train.py:1114] (2/4) Epoch 11, batch 1200, loss[loss=0.1985, simple_loss=0.2925, pruned_loss=0.05222, over 4875.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2824, pruned_loss=0.05384, over 932901.71 frames. ], batch size: 14, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:15:26,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=137912.0, ans=0.0 +2024-07-28 09:15:37,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=137925.33333333334, ans=0.035 +2024-07-28 09:15:50,323 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 5.564e+01 6.259e+01 7.036e+01 9.371e+01, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 09:15:53,050 INFO [train.py:1114] (2/4) Epoch 11, batch 1250, loss[loss=0.2428, simple_loss=0.3197, pruned_loss=0.08292, over 4797.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2832, pruned_loss=0.05448, over 936956.31 frames. ], batch size: 15, lr: 6.95e-03, grad_scale: 32.0 +2024-07-28 09:15:53,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=137965.33333333334, ans=0.125 +2024-07-28 09:16:06,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=137992.0, ans=0.05 +2024-07-28 09:16:19,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138018.66666666666, ans=0.1 +2024-07-28 09:16:26,264 INFO [train.py:1114] (2/4) Epoch 11, batch 1300, loss[loss=0.225, simple_loss=0.3037, pruned_loss=0.07319, over 4645.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2824, pruned_loss=0.05422, over 938548.48 frames. ], batch size: 19, lr: 6.95e-03, grad_scale: 32.0 +2024-07-28 09:16:26,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=138032.0, ans=0.125 +2024-07-28 09:16:28,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138032.0, ans=0.1 +2024-07-28 09:16:36,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=138045.33333333334, ans=0.125 +2024-07-28 09:16:46,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=138072.0, ans=0.0 +2024-07-28 09:16:57,019 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.412e+01 5.624e+01 6.382e+01 7.662e+01 1.173e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-28 09:16:57,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=138085.33333333334, ans=0.0 +2024-07-28 09:16:59,903 INFO [train.py:1114] (2/4) Epoch 11, batch 1350, loss[loss=0.1918, simple_loss=0.2859, pruned_loss=0.04883, over 4756.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2823, pruned_loss=0.05418, over 940567.34 frames. ], batch size: 13, lr: 6.95e-03, grad_scale: 64.0 +2024-07-28 09:17:04,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=138098.66666666666, ans=0.125 +2024-07-28 09:17:05,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=138098.66666666666, ans=0.1 +2024-07-28 09:17:32,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=138152.0, ans=0.125 +2024-07-28 09:17:33,293 INFO [train.py:1114] (2/4) Epoch 11, batch 1400, loss[loss=0.1955, simple_loss=0.2684, pruned_loss=0.06128, over 4711.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2826, pruned_loss=0.05434, over 942689.15 frames. ], batch size: 11, lr: 6.95e-03, grad_scale: 64.0 +2024-07-28 09:17:34,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=138165.33333333334, ans=0.0 +2024-07-28 09:17:40,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=138178.66666666666, ans=0.025 +2024-07-28 09:17:53,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=138192.0, ans=0.125 +2024-07-28 09:17:56,975 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.81 vs. limit=10.0 +2024-07-28 09:17:59,895 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.90 vs. limit=15.0 +2024-07-28 09:18:03,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=138218.66666666666, ans=0.0 +2024-07-28 09:18:06,173 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.597e+01 5.725e+01 6.807e+01 7.781e+01 1.138e+02, threshold=1.361e+02, percent-clipped=0.0 +2024-07-28 09:18:08,969 INFO [train.py:1114] (2/4) Epoch 11, batch 1450, loss[loss=0.1931, simple_loss=0.2829, pruned_loss=0.05162, over 4685.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2831, pruned_loss=0.05424, over 942536.44 frames. ], batch size: 15, lr: 6.95e-03, grad_scale: 64.0 +2024-07-28 09:18:09,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.45 vs. limit=6.0 +2024-07-28 09:18:14,277 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:18:19,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=138245.33333333334, ans=0.125 +2024-07-28 09:18:23,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=138245.33333333334, ans=0.0 +2024-07-28 09:18:27,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=138258.66666666666, ans=0.125 +2024-07-28 09:18:27,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=138258.66666666666, ans=0.125 +2024-07-28 09:18:30,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=138258.66666666666, ans=0.0 +2024-07-28 09:18:42,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=138285.33333333334, ans=0.0 +2024-07-28 09:18:52,748 INFO [train.py:1114] (2/4) Epoch 11, batch 1500, loss[loss=0.1895, simple_loss=0.2803, pruned_loss=0.04936, over 4808.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2845, pruned_loss=0.05506, over 942156.48 frames. ], batch size: 14, lr: 6.95e-03, grad_scale: 64.0 +2024-07-28 09:18:56,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=138298.66666666666, ans=0.09899494936611666 +2024-07-28 09:18:59,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=138312.0, ans=0.125 +2024-07-28 09:19:17,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=138325.33333333334, ans=0.1 +2024-07-28 09:19:18,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138338.66666666666, ans=0.1 +2024-07-28 09:19:31,305 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.052e+01 5.776e+01 6.231e+01 7.086e+01 9.841e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 09:19:33,308 INFO [train.py:1114] (2/4) Epoch 11, batch 1550, loss[loss=0.2411, simple_loss=0.3209, pruned_loss=0.08064, over 4897.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2839, pruned_loss=0.05494, over 938388.68 frames. ], batch size: 15, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:19:34,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=138365.33333333334, ans=0.1 +2024-07-28 09:19:37,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=138365.33333333334, ans=0.125 +2024-07-28 09:19:43,473 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.54 vs. limit=22.5 +2024-07-28 09:19:47,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138392.0, ans=0.1 +2024-07-28 09:19:50,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=138392.0, ans=0.125 +2024-07-28 09:19:53,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=138405.33333333334, ans=0.125 +2024-07-28 09:20:09,041 INFO [train.py:1114] (2/4) Epoch 11, batch 1600, loss[loss=0.2021, simple_loss=0.3019, pruned_loss=0.05114, over 4869.00 frames. ], tot_loss[loss=0.197, simple_loss=0.284, pruned_loss=0.05498, over 937737.17 frames. ], batch size: 14, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:20:12,077 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.15 vs. limit=15.0 +2024-07-28 09:20:31,605 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.12 vs. limit=15.0 +2024-07-28 09:20:36,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=138485.33333333334, ans=0.5 +2024-07-28 09:20:46,017 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.563e+01 5.538e+01 5.961e+01 6.813e+01 9.879e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 09:20:47,970 INFO [train.py:1114] (2/4) Epoch 11, batch 1650, loss[loss=0.1797, simple_loss=0.2803, pruned_loss=0.03959, over 4667.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2839, pruned_loss=0.05507, over 937098.32 frames. ], batch size: 14, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:21:08,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=138525.33333333334, ans=0.125 +2024-07-28 09:21:08,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=138538.66666666666, ans=0.125 +2024-07-28 09:21:24,965 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.38 vs. limit=6.0 +2024-07-28 09:21:27,533 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.18 vs. limit=15.0 +2024-07-28 09:21:29,792 INFO [train.py:1114] (2/4) Epoch 11, batch 1700, loss[loss=0.1806, simple_loss=0.26, pruned_loss=0.05054, over 4719.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2838, pruned_loss=0.05482, over 938629.19 frames. ], batch size: 11, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:21:37,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=138578.66666666666, ans=0.025 +2024-07-28 09:21:47,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=138592.0, ans=0.0 +2024-07-28 09:22:06,853 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.454e+01 5.772e+01 6.333e+01 7.541e+01 1.576e+02, threshold=1.267e+02, percent-clipped=2.0 +2024-07-28 09:22:08,895 INFO [train.py:1114] (2/4) Epoch 11, batch 1750, loss[loss=0.2177, simple_loss=0.2843, pruned_loss=0.07556, over 4821.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2835, pruned_loss=0.05448, over 939845.30 frames. ], batch size: 11, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:22:28,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=138658.66666666666, ans=0.125 +2024-07-28 09:22:35,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138658.66666666666, ans=0.1 +2024-07-28 09:22:45,544 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:22:50,656 INFO [train.py:1114] (2/4) Epoch 11, batch 1800, loss[loss=0.2384, simple_loss=0.3238, pruned_loss=0.07654, over 4642.00 frames. ], tot_loss[loss=0.196, simple_loss=0.283, pruned_loss=0.05451, over 940401.55 frames. ], batch size: 13, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:22:55,825 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.96 vs. limit=6.0 +2024-07-28 09:22:57,191 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.61 vs. limit=15.0 +2024-07-28 09:23:09,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138725.33333333334, ans=0.1 +2024-07-28 09:23:16,747 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.82 vs. limit=12.0 +2024-07-28 09:23:19,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=138752.0, ans=0.2 +2024-07-28 09:23:24,317 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.405e+01 5.944e+01 6.989e+01 8.458e+01 1.208e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-28 09:23:28,451 INFO [train.py:1114] (2/4) Epoch 11, batch 1850, loss[loss=0.2086, simple_loss=0.2907, pruned_loss=0.06318, over 4814.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2823, pruned_loss=0.05418, over 940487.87 frames. ], batch size: 14, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:24:01,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=138818.66666666666, ans=0.05 +2024-07-28 09:24:06,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=138818.66666666666, ans=0.07 +2024-07-28 09:24:07,712 INFO [train.py:1114] (2/4) Epoch 11, batch 1900, loss[loss=0.1901, simple_loss=0.2874, pruned_loss=0.04635, over 4671.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2819, pruned_loss=0.0538, over 941955.64 frames. ], batch size: 14, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:24:19,291 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:24:28,923 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.07 vs. limit=22.5 +2024-07-28 09:24:31,452 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=2.70 vs. limit=12.0 +2024-07-28 09:24:33,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138872.0, ans=0.1 +2024-07-28 09:24:39,499 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.648e+01 6.210e+01 7.045e+01 1.018e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 09:24:40,565 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.97 vs. limit=22.5 +2024-07-28 09:24:41,674 INFO [train.py:1114] (2/4) Epoch 11, batch 1950, loss[loss=0.2022, simple_loss=0.3, pruned_loss=0.05223, over 4892.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2834, pruned_loss=0.0542, over 943942.66 frames. ], batch size: 13, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:24:44,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=138898.66666666666, ans=0.025 +2024-07-28 09:25:01,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=138925.33333333334, ans=0.1 +2024-07-28 09:25:03,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=138925.33333333334, ans=10.0 +2024-07-28 09:25:07,224 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.68 vs. limit=15.0 +2024-07-28 09:25:10,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=138952.0, ans=0.0 +2024-07-28 09:25:19,148 INFO [train.py:1114] (2/4) Epoch 11, batch 2000, loss[loss=0.1741, simple_loss=0.2591, pruned_loss=0.04457, over 4805.00 frames. ], tot_loss[loss=0.197, simple_loss=0.284, pruned_loss=0.05495, over 941112.91 frames. ], batch size: 11, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:25:23,188 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:32:26,318 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.57 vs. limit=10.0 +2024-07-28 09:32:40,763 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.90 vs. limit=15.0 +2024-07-28 09:32:42,923 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.46 vs. limit=6.0 +2024-07-28 09:32:47,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=139018.66666666666, ans=0.0 +2024-07-28 09:32:48,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=139018.66666666666, ans=0.0 +2024-07-28 09:32:50,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=139018.66666666666, ans=0.125 +2024-07-28 09:32:51,185 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.589e+01 5.827e+01 6.350e+01 7.381e+01 1.146e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 09:32:53,237 INFO [train.py:1114] (2/4) Epoch 11, batch 2050, loss[loss=0.1521, simple_loss=0.2435, pruned_loss=0.03037, over 4626.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2835, pruned_loss=0.05511, over 938828.22 frames. ], batch size: 11, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:32:58,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=139032.0, ans=0.125 +2024-07-28 09:32:59,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139045.33333333334, ans=0.1 +2024-07-28 09:33:00,213 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.29 vs. limit=15.0 +2024-07-28 09:33:13,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=139058.66666666666, ans=0.1 +2024-07-28 09:33:14,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=139058.66666666666, ans=0.125 +2024-07-28 09:33:21,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=139085.33333333334, ans=0.0 +2024-07-28 09:33:22,838 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.54 vs. limit=6.0 +2024-07-28 09:33:23,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=139085.33333333334, ans=0.2 +2024-07-28 09:33:28,479 INFO [train.py:1114] (2/4) Epoch 11, batch 2100, loss[loss=0.1989, simple_loss=0.3016, pruned_loss=0.04813, over 4756.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2824, pruned_loss=0.0545, over 940750.31 frames. ], batch size: 13, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:33:31,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=139098.66666666666, ans=0.025 +2024-07-28 09:33:32,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=139098.66666666666, ans=0.0 +2024-07-28 09:33:43,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=139125.33333333334, ans=0.07 +2024-07-28 09:33:45,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139125.33333333334, ans=0.1 +2024-07-28 09:33:46,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=139125.33333333334, ans=0.0 +2024-07-28 09:33:48,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=139138.66666666666, ans=0.0 +2024-07-28 09:34:03,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=139152.0, ans=0.0 +2024-07-28 09:34:05,601 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.787e+01 5.652e+01 6.255e+01 7.375e+01 9.920e+01, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 09:34:06,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=139165.33333333334, ans=0.125 +2024-07-28 09:34:06,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=139165.33333333334, ans=0.125 +2024-07-28 09:34:06,905 INFO [train.py:1114] (2/4) Epoch 11, batch 2150, loss[loss=0.1919, simple_loss=0.2789, pruned_loss=0.05243, over 4891.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2817, pruned_loss=0.05412, over 943865.95 frames. ], batch size: 13, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:34:13,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=139165.33333333334, ans=0.125 +2024-07-28 09:34:23,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=139178.66666666666, ans=0.125 +2024-07-28 09:34:33,691 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.34 vs. limit=15.0 +2024-07-28 09:34:55,048 INFO [train.py:1114] (2/4) Epoch 11, batch 2200, loss[loss=0.1512, simple_loss=0.2463, pruned_loss=0.02804, over 4807.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2821, pruned_loss=0.05421, over 943201.78 frames. ], batch size: 14, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:35:01,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=139245.33333333334, ans=0.0 +2024-07-28 09:35:05,998 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:35:07,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=139245.33333333334, ans=0.0 +2024-07-28 09:35:07,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=139258.66666666666, ans=0.0 +2024-07-28 09:35:27,038 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.263e+01 5.559e+01 6.152e+01 7.200e+01 1.019e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 09:35:28,418 INFO [train.py:1114] (2/4) Epoch 11, batch 2250, loss[loss=0.1901, simple_loss=0.2725, pruned_loss=0.05387, over 4687.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2827, pruned_loss=0.05466, over 942142.21 frames. ], batch size: 13, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:35:44,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=139325.33333333334, ans=0.0 +2024-07-28 09:35:53,743 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-28 09:36:01,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=139352.0, ans=0.0 +2024-07-28 09:36:27,454 INFO [train.py:1114] (2/4) Epoch 11, batch 2300, loss[loss=0.1607, simple_loss=0.2493, pruned_loss=0.03603, over 4949.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2813, pruned_loss=0.05418, over 939260.90 frames. ], batch size: 12, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:36:28,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=139365.33333333334, ans=0.0 +2024-07-28 09:36:32,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=139365.33333333334, ans=0.125 +2024-07-28 09:36:43,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=139392.0, ans=0.0 +2024-07-28 09:36:50,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=139405.33333333334, ans=0.0 +2024-07-28 09:36:59,896 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 5.544e+01 6.088e+01 7.000e+01 1.026e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 09:37:01,177 INFO [train.py:1114] (2/4) Epoch 11, batch 2350, loss[loss=0.1901, simple_loss=0.2854, pruned_loss=0.04739, over 4639.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2816, pruned_loss=0.0542, over 941300.96 frames. ], batch size: 13, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:37:22,206 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.13 vs. limit=22.5 +2024-07-28 09:37:23,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=139472.0, ans=0.125 +2024-07-28 09:37:26,128 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.89 vs. limit=15.0 +2024-07-28 09:37:30,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=139485.33333333334, ans=0.0 +2024-07-28 09:37:36,626 INFO [train.py:1114] (2/4) Epoch 11, batch 2400, loss[loss=0.1816, simple_loss=0.2651, pruned_loss=0.04906, over 4644.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2821, pruned_loss=0.05408, over 940935.16 frames. ], batch size: 12, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:37:54,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=139512.0, ans=0.0 +2024-07-28 09:38:13,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139552.0, ans=0.1 +2024-07-28 09:38:17,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=139552.0, ans=0.125 +2024-07-28 09:38:18,419 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.707e+01 6.350e+01 6.927e+01 1.167e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 09:38:19,109 INFO [train.py:1114] (2/4) Epoch 11, batch 2450, loss[loss=0.1924, simple_loss=0.2743, pruned_loss=0.05524, over 4695.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.283, pruned_loss=0.05496, over 937181.42 frames. ], batch size: 13, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:38:20,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=139565.33333333334, ans=0.0 +2024-07-28 09:38:22,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139565.33333333334, ans=0.1 +2024-07-28 09:38:47,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=139605.33333333334, ans=0.2 +2024-07-28 09:38:48,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139605.33333333334, ans=0.1 +2024-07-28 09:38:49,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139605.33333333334, ans=0.1 +2024-07-28 09:38:57,051 INFO [train.py:1114] (2/4) Epoch 11, batch 2500, loss[loss=0.1561, simple_loss=0.2491, pruned_loss=0.03155, over 4813.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2832, pruned_loss=0.05476, over 938901.01 frames. ], batch size: 14, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:39:07,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=139645.33333333334, ans=0.125 +2024-07-28 09:39:17,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=139672.0, ans=0.015 +2024-07-28 09:39:23,346 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.25 vs. limit=15.0 +2024-07-28 09:39:32,203 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.264e+01 5.568e+01 6.165e+01 6.885e+01 1.396e+02, threshold=1.233e+02, percent-clipped=2.0 +2024-07-28 09:39:32,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=139698.66666666666, ans=0.2 +2024-07-28 09:39:32,992 INFO [train.py:1114] (2/4) Epoch 11, batch 2550, loss[loss=0.1434, simple_loss=0.2238, pruned_loss=0.03147, over 4803.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2826, pruned_loss=0.05443, over 938335.28 frames. ], batch size: 11, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:39:45,994 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:39:52,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=139738.66666666666, ans=0.0 +2024-07-28 09:39:58,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=139738.66666666666, ans=0.125 +2024-07-28 09:40:08,274 INFO [train.py:1114] (2/4) Epoch 11, batch 2600, loss[loss=0.2182, simple_loss=0.2862, pruned_loss=0.07509, over 4896.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2829, pruned_loss=0.05461, over 937343.02 frames. ], batch size: 13, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:40:09,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=139765.33333333334, ans=0.125 +2024-07-28 09:40:11,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=139765.33333333334, ans=0.2 +2024-07-28 09:40:12,172 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.31 vs. limit=15.0 +2024-07-28 09:40:12,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139765.33333333334, ans=0.1 +2024-07-28 09:40:23,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=139792.0, ans=0.125 +2024-07-28 09:40:28,068 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.88 vs. limit=10.0 +2024-07-28 09:40:44,007 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.437e+01 5.632e+01 6.432e+01 7.757e+01 1.315e+02, threshold=1.286e+02, percent-clipped=1.0 +2024-07-28 09:40:44,696 INFO [train.py:1114] (2/4) Epoch 11, batch 2650, loss[loss=0.2017, simple_loss=0.2946, pruned_loss=0.05446, over 4615.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2834, pruned_loss=0.05451, over 939343.48 frames. ], batch size: 16, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:40:57,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=139845.33333333334, ans=0.2 +2024-07-28 09:40:59,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=139845.33333333334, ans=0.0 +2024-07-28 09:41:27,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=139885.33333333334, ans=0.0 +2024-07-28 09:41:28,892 INFO [train.py:1114] (2/4) Epoch 11, batch 2700, loss[loss=0.2227, simple_loss=0.3067, pruned_loss=0.0693, over 4735.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2835, pruned_loss=0.0547, over 939343.12 frames. ], batch size: 14, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:41:28,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=139898.66666666666, ans=10.0 +2024-07-28 09:41:44,963 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.86 vs. limit=10.0 +2024-07-28 09:42:07,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=139952.0, ans=0.125 +2024-07-28 09:42:10,946 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.187e+01 5.691e+01 6.358e+01 7.173e+01 9.845e+01, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 09:42:11,633 INFO [train.py:1114] (2/4) Epoch 11, batch 2750, loss[loss=0.1771, simple_loss=0.2716, pruned_loss=0.04134, over 4709.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2823, pruned_loss=0.05409, over 939665.13 frames. ], batch size: 12, lr: 6.90e-03, grad_scale: 16.0 +2024-07-28 09:42:12,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=139965.33333333334, ans=0.0 +2024-07-28 09:42:14,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=139965.33333333334, ans=0.125 +2024-07-28 09:42:16,340 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:44:58,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139978.66666666666, ans=0.1 +2024-07-28 09:45:22,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=140005.33333333334, ans=0.2 +2024-07-28 09:45:25,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=140018.66666666666, ans=0.125 +2024-07-28 09:45:28,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=140018.66666666666, ans=0.0 +2024-07-28 09:45:30,367 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.41 vs. limit=15.0 +2024-07-28 09:45:31,586 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.32 vs. limit=15.0 +2024-07-28 09:45:33,260 INFO [train.py:1114] (2/4) Epoch 11, batch 2800, loss[loss=0.2216, simple_loss=0.306, pruned_loss=0.06861, over 3488.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2835, pruned_loss=0.0547, over 938055.65 frames. ], batch size: 35, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:45:35,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=140032.0, ans=0.125 +2024-07-28 09:45:39,502 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.81 vs. limit=22.5 +2024-07-28 09:45:40,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=140045.33333333334, ans=0.125 +2024-07-28 09:45:41,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten.whitening_limit, batch_count=140045.33333333334, ans=22.5 +2024-07-28 09:45:48,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=140058.66666666666, ans=0.1 +2024-07-28 09:45:57,678 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.26 vs. limit=10.0 +2024-07-28 09:45:58,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=140072.0, ans=0.0 +2024-07-28 09:45:59,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=140085.33333333334, ans=0.125 +2024-07-28 09:46:03,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140085.33333333334, ans=0.1 +2024-07-28 09:46:07,735 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.695e+01 5.521e+01 6.232e+01 7.025e+01 9.705e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 09:46:08,488 INFO [train.py:1114] (2/4) Epoch 11, batch 2850, loss[loss=0.1744, simple_loss=0.2578, pruned_loss=0.0455, over 4958.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2834, pruned_loss=0.05465, over 936072.02 frames. ], batch size: 13, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:46:11,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=140098.66666666666, ans=0.0 +2024-07-28 09:46:13,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=140098.66666666666, ans=0.125 +2024-07-28 09:46:19,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=140112.0, ans=0.125 +2024-07-28 09:46:28,399 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.35 vs. limit=15.0 +2024-07-28 09:46:29,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=140138.66666666666, ans=0.2 +2024-07-28 09:46:42,412 INFO [train.py:1114] (2/4) Epoch 11, batch 2900, loss[loss=0.2396, simple_loss=0.3113, pruned_loss=0.08396, over 4831.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2838, pruned_loss=0.05412, over 939836.44 frames. ], batch size: 13, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:46:42,740 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.37 vs. limit=15.0 +2024-07-28 09:46:56,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=140192.0, ans=0.125 +2024-07-28 09:46:57,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140192.0, ans=0.1 +2024-07-28 09:47:00,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=140192.0, ans=0.125 +2024-07-28 09:47:21,184 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.597e+01 5.615e+01 6.138e+01 7.226e+01 1.097e+02, threshold=1.228e+02, percent-clipped=0.0 +2024-07-28 09:47:22,500 INFO [train.py:1114] (2/4) Epoch 11, batch 2950, loss[loss=0.1831, simple_loss=0.2758, pruned_loss=0.04523, over 4714.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2829, pruned_loss=0.05402, over 938484.76 frames. ], batch size: 12, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:47:22,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=140232.0, ans=0.125 +2024-07-28 09:47:25,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=140232.0, ans=0.1 +2024-07-28 09:47:29,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=140245.33333333334, ans=0.0 +2024-07-28 09:47:41,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=140245.33333333334, ans=0.125 +2024-07-28 09:47:47,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=140258.66666666666, ans=0.125 +2024-07-28 09:47:53,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140272.0, ans=0.1 +2024-07-28 09:47:54,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=140272.0, ans=0.125 +2024-07-28 09:47:59,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=140285.33333333334, ans=0.1 +2024-07-28 09:48:05,766 INFO [train.py:1114] (2/4) Epoch 11, batch 3000, loss[loss=0.209, simple_loss=0.3019, pruned_loss=0.05807, over 4765.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2829, pruned_loss=0.05427, over 938486.05 frames. ], batch size: 13, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:48:05,767 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 09:48:10,284 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.3680, 4.0932, 3.5994, 3.7786], device='cuda:2') +2024-07-28 09:48:11,765 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.5819, 3.8936, 4.5747, 3.6501], device='cuda:2') +2024-07-28 09:48:12,712 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.1515, 4.8927, 4.4245, 3.9216], device='cuda:2') +2024-07-28 09:48:15,718 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.9119, 5.0142, 4.8934, 5.6254], device='cuda:2') +2024-07-28 09:48:19,193 INFO [train.py:1146] (2/4) Epoch 11, validation: loss=0.1714, simple_loss=0.2749, pruned_loss=0.03396, over 944034.00 frames. +2024-07-28 09:48:19,194 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 09:48:21,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=140298.66666666666, ans=0.125 +2024-07-28 09:48:28,380 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.95 vs. limit=10.0 +2024-07-28 09:48:31,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=140312.0, ans=0.125 +2024-07-28 09:48:42,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=140338.66666666666, ans=0.125 +2024-07-28 09:48:42,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=140338.66666666666, ans=0.125 +2024-07-28 09:48:52,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=140352.0, ans=0.2 +2024-07-28 09:48:53,328 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.500e+01 5.535e+01 6.032e+01 6.917e+01 1.051e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 09:48:53,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=140365.33333333334, ans=0.125 +2024-07-28 09:48:54,515 INFO [train.py:1114] (2/4) Epoch 11, batch 3050, loss[loss=0.1875, simple_loss=0.2751, pruned_loss=0.04992, over 4631.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2834, pruned_loss=0.05473, over 937033.04 frames. ], batch size: 12, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:49:08,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=140392.0, ans=0.0 +2024-07-28 09:49:13,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=140392.0, ans=0.0 +2024-07-28 09:49:15,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=140392.0, ans=0.125 +2024-07-28 09:49:32,128 INFO [train.py:1114] (2/4) Epoch 11, batch 3100, loss[loss=0.181, simple_loss=0.2658, pruned_loss=0.0481, over 4630.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2822, pruned_loss=0.05406, over 938078.26 frames. ], batch size: 16, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:49:41,711 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.44 vs. limit=22.5 +2024-07-28 09:49:54,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=140472.0, ans=0.125 +2024-07-28 09:49:57,259 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.27 vs. limit=15.0 +2024-07-28 09:50:01,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=140485.33333333334, ans=0.2 +2024-07-28 09:50:01,406 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.42 vs. limit=22.5 +2024-07-28 09:50:02,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140485.33333333334, ans=0.1 +2024-07-28 09:50:02,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=140485.33333333334, ans=0.125 +2024-07-28 09:50:03,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=140485.33333333334, ans=0.07 +2024-07-28 09:50:07,163 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.605e+01 5.405e+01 6.178e+01 7.390e+01 1.037e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 09:50:07,876 INFO [train.py:1114] (2/4) Epoch 11, batch 3150, loss[loss=0.2229, simple_loss=0.3116, pruned_loss=0.06716, over 4649.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2824, pruned_loss=0.05432, over 938331.10 frames. ], batch size: 17, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:50:09,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=140498.66666666666, ans=0.125 +2024-07-28 09:50:13,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=140512.0, ans=0.2 +2024-07-28 09:50:14,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=140512.0, ans=0.125 +2024-07-28 09:50:21,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=140525.33333333334, ans=0.125 +2024-07-28 09:50:25,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140525.33333333334, ans=0.1 +2024-07-28 09:50:39,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=140552.0, ans=0.125 +2024-07-28 09:50:43,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=140565.33333333334, ans=0.2 +2024-07-28 09:50:43,667 INFO [train.py:1114] (2/4) Epoch 11, batch 3200, loss[loss=0.2042, simple_loss=0.2901, pruned_loss=0.05913, over 4824.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2816, pruned_loss=0.05384, over 939718.60 frames. ], batch size: 13, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:50:53,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=140578.66666666666, ans=0.125 +2024-07-28 09:50:58,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=140592.0, ans=0.02 +2024-07-28 09:51:13,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=140592.0, ans=0.05 +2024-07-28 09:51:18,641 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:51:19,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=140605.33333333334, ans=0.2 +2024-07-28 09:51:32,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=140605.33333333334, ans=0.125 +2024-07-28 09:51:34,027 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.86 vs. limit=22.5 +2024-07-28 09:51:36,397 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.66 vs. limit=22.5 +2024-07-28 09:51:48,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140618.66666666666, ans=0.1 +2024-07-28 09:52:00,433 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.837e+01 5.714e+01 6.190e+01 6.678e+01 8.069e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 09:52:01,127 INFO [train.py:1114] (2/4) Epoch 11, batch 3250, loss[loss=0.2135, simple_loss=0.2984, pruned_loss=0.0643, over 4925.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2821, pruned_loss=0.05366, over 941027.58 frames. ], batch size: 14, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:52:20,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=140632.0, ans=0.125 +2024-07-28 09:52:21,036 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.89 vs. limit=15.0 +2024-07-28 09:52:56,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=140658.66666666666, ans=0.0 +2024-07-28 09:53:44,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=140685.33333333334, ans=0.125 +2024-07-28 09:53:46,909 INFO [train.py:1114] (2/4) Epoch 11, batch 3300, loss[loss=0.2744, simple_loss=0.3438, pruned_loss=0.1025, over 4752.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2818, pruned_loss=0.05414, over 940987.90 frames. ], batch size: 19, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:53:58,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=140698.66666666666, ans=0.0 +2024-07-28 09:54:04,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140712.0, ans=0.1 +2024-07-28 09:54:27,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=140725.33333333334, ans=0.125 +2024-07-28 09:54:37,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=140738.66666666666, ans=0.125 +2024-07-28 09:54:44,369 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.59 vs. limit=10.0 +2024-07-28 09:54:51,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=140752.0, ans=0.125 +2024-07-28 09:54:52,380 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+01 5.701e+01 6.395e+01 7.330e+01 1.076e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 09:54:53,119 INFO [train.py:1114] (2/4) Epoch 11, batch 3350, loss[loss=0.2191, simple_loss=0.2922, pruned_loss=0.073, over 4612.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2819, pruned_loss=0.0541, over 939234.58 frames. ], batch size: 17, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:54:54,538 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:55:00,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=140778.66666666666, ans=0.125 +2024-07-28 09:55:15,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=140805.33333333334, ans=0.2 +2024-07-28 09:55:17,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=140805.33333333334, ans=0.125 +2024-07-28 09:55:28,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=140832.0, ans=0.125 +2024-07-28 09:55:29,118 INFO [train.py:1114] (2/4) Epoch 11, batch 3400, loss[loss=0.2075, simple_loss=0.2782, pruned_loss=0.06843, over 4792.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2826, pruned_loss=0.05479, over 937745.92 frames. ], batch size: 11, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:55:30,163 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.26 vs. limit=22.5 +2024-07-28 09:55:41,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=140858.66666666666, ans=0.125 +2024-07-28 09:55:46,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=140858.66666666666, ans=10.0 +2024-07-28 09:55:50,056 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.52 vs. limit=15.0 +2024-07-28 09:55:59,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=140885.33333333334, ans=0.0 +2024-07-28 09:56:04,024 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.505e+01 5.604e+01 6.128e+01 6.821e+01 1.006e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 09:56:04,709 INFO [train.py:1114] (2/4) Epoch 11, batch 3450, loss[loss=0.2392, simple_loss=0.3213, pruned_loss=0.07861, over 4729.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2821, pruned_loss=0.05437, over 937996.67 frames. ], batch size: 19, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:56:12,739 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:56:14,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=140912.0, ans=0.125 +2024-07-28 09:56:14,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=140912.0, ans=0.0 +2024-07-28 09:56:21,458 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.29 vs. limit=22.5 +2024-07-28 09:56:23,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=140938.66666666666, ans=0.0 +2024-07-28 09:56:32,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=140952.0, ans=0.125 +2024-07-28 09:56:38,862 INFO [train.py:1114] (2/4) Epoch 11, batch 3500, loss[loss=0.1894, simple_loss=0.2654, pruned_loss=0.05674, over 4948.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2819, pruned_loss=0.05395, over 938097.95 frames. ], batch size: 12, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:56:39,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=140965.33333333334, ans=0.2 +2024-07-28 09:56:54,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=140992.0, ans=0.0 +2024-07-28 09:56:55,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=140992.0, ans=0.0 +2024-07-28 09:57:00,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=140992.0, ans=0.125 +2024-07-28 09:57:04,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=141005.33333333334, ans=0.0 +2024-07-28 09:57:15,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=141018.66666666666, ans=0.0 +2024-07-28 09:57:16,585 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.512e+01 5.451e+01 6.238e+01 7.293e+01 9.971e+01, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 09:57:17,326 INFO [train.py:1114] (2/4) Epoch 11, batch 3550, loss[loss=0.1817, simple_loss=0.277, pruned_loss=0.04322, over 4674.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2824, pruned_loss=0.05402, over 938516.64 frames. ], batch size: 14, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:57:18,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=141032.0, ans=0.125 +2024-07-28 09:57:25,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=141045.33333333334, ans=0.025 +2024-07-28 09:57:41,168 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.68 vs. limit=10.0 +2024-07-28 09:57:42,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=141072.0, ans=0.125 +2024-07-28 09:57:44,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=141085.33333333334, ans=0.0 +2024-07-28 09:57:59,193 INFO [train.py:1114] (2/4) Epoch 11, batch 3600, loss[loss=0.2034, simple_loss=0.2921, pruned_loss=0.05734, over 4968.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2816, pruned_loss=0.0536, over 940357.63 frames. ], batch size: 13, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:58:02,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=141098.66666666666, ans=0.125 +2024-07-28 09:58:03,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=141098.66666666666, ans=0.125 +2024-07-28 09:58:03,505 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.07 vs. limit=22.5 +2024-07-28 09:58:22,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=141138.66666666666, ans=0.0 +2024-07-28 09:58:27,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=141138.66666666666, ans=0.125 +2024-07-28 09:58:35,668 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.546e+01 5.527e+01 6.114e+01 7.370e+01 1.148e+02, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 09:58:37,552 INFO [train.py:1114] (2/4) Epoch 11, batch 3650, loss[loss=0.2153, simple_loss=0.3015, pruned_loss=0.06454, over 4902.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.281, pruned_loss=0.05325, over 940499.99 frames. ], batch size: 15, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:58:50,334 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.70 vs. limit=15.0 +2024-07-28 09:58:57,532 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.18 vs. limit=22.5 +2024-07-28 09:59:11,456 INFO [train.py:1114] (2/4) Epoch 11, batch 3700, loss[loss=0.1822, simple_loss=0.2801, pruned_loss=0.04213, over 4935.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2807, pruned_loss=0.05264, over 941675.53 frames. ], batch size: 14, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 09:59:17,241 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.73 vs. limit=10.0 +2024-07-28 09:59:24,586 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.60 vs. limit=15.0 +2024-07-28 09:59:33,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141258.66666666666, ans=0.1 +2024-07-28 09:59:38,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=141272.0, ans=0.125 +2024-07-28 09:59:38,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=141272.0, ans=0.95 +2024-07-28 09:59:39,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=141272.0, ans=0.0 +2024-07-28 09:59:41,427 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.60 vs. limit=15.0 +2024-07-28 09:59:46,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=141285.33333333334, ans=0.05 +2024-07-28 09:59:47,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.62 vs. limit=10.0 +2024-07-28 09:59:50,251 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.396e+01 5.987e+01 6.537e+01 9.206e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 09:59:50,946 INFO [train.py:1114] (2/4) Epoch 11, batch 3750, loss[loss=0.1729, simple_loss=0.2629, pruned_loss=0.04147, over 4812.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2805, pruned_loss=0.05258, over 943230.19 frames. ], batch size: 11, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 09:59:57,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=141298.66666666666, ans=0.1 +2024-07-28 10:00:04,503 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.66 vs. limit=10.0 +2024-07-28 10:00:31,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=141352.0, ans=0.0 +2024-07-28 10:00:33,296 INFO [train.py:1114] (2/4) Epoch 11, batch 3800, loss[loss=0.1835, simple_loss=0.2721, pruned_loss=0.0474, over 4812.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2799, pruned_loss=0.0528, over 941111.87 frames. ], batch size: 14, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 10:00:37,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=141365.33333333334, ans=0.07 +2024-07-28 10:00:50,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=141392.0, ans=0.0 +2024-07-28 10:00:51,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=141392.0, ans=0.0 +2024-07-28 10:01:03,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=141418.66666666666, ans=0.0 +2024-07-28 10:01:07,864 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.632e+01 5.570e+01 6.150e+01 7.131e+01 1.072e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 10:01:08,501 INFO [train.py:1114] (2/4) Epoch 11, batch 3850, loss[loss=0.1762, simple_loss=0.2641, pruned_loss=0.04416, over 4624.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.281, pruned_loss=0.05356, over 941951.69 frames. ], batch size: 16, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 10:01:11,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=141432.0, ans=0.125 +2024-07-28 10:01:12,616 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:01:18,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=141445.33333333334, ans=0.1 +2024-07-28 10:01:19,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=141445.33333333334, ans=0.0 +2024-07-28 10:01:22,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141458.66666666666, ans=0.1 +2024-07-28 10:01:25,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=141458.66666666666, ans=0.1 +2024-07-28 10:01:29,503 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.18 vs. limit=15.0 +2024-07-28 10:01:42,029 INFO [train.py:1114] (2/4) Epoch 11, batch 3900, loss[loss=0.2178, simple_loss=0.3099, pruned_loss=0.06284, over 4808.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2821, pruned_loss=0.05356, over 941844.48 frames. ], batch size: 14, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 10:01:46,808 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.49 vs. limit=15.0 +2024-07-28 10:01:55,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=141512.0, ans=0.125 +2024-07-28 10:01:57,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=141525.33333333334, ans=0.125 +2024-07-28 10:02:01,375 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.13 vs. limit=22.5 +2024-07-28 10:02:09,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=141538.66666666666, ans=0.125 +2024-07-28 10:02:14,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=141552.0, ans=0.025 +2024-07-28 10:02:16,439 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.644e+01 6.231e+01 6.992e+01 1.031e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 10:02:17,224 INFO [train.py:1114] (2/4) Epoch 11, batch 3950, loss[loss=0.2007, simple_loss=0.2906, pruned_loss=0.05536, over 4858.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2826, pruned_loss=0.05373, over 944087.96 frames. ], batch size: 16, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 10:02:17,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=141565.33333333334, ans=0.0 +2024-07-28 10:02:18,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=141565.33333333334, ans=0.1 +2024-07-28 10:02:22,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=141565.33333333334, ans=0.5 +2024-07-28 10:02:26,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=141578.66666666666, ans=0.125 +2024-07-28 10:02:36,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=141592.0, ans=0.1 +2024-07-28 10:02:38,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=141605.33333333334, ans=0.125 +2024-07-28 10:02:52,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=141618.66666666666, ans=0.125 +2024-07-28 10:02:54,309 INFO [train.py:1114] (2/4) Epoch 11, batch 4000, loss[loss=0.1896, simple_loss=0.2742, pruned_loss=0.05247, over 4783.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2827, pruned_loss=0.05422, over 940278.34 frames. ], batch size: 12, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:02:55,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=141632.0, ans=0.0 +2024-07-28 10:03:15,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=141645.33333333334, ans=0.125 +2024-07-28 10:03:35,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=141672.0, ans=0.125 +2024-07-28 10:03:47,682 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.35 vs. limit=15.0 +2024-07-28 10:03:53,271 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.441e+01 6.028e+01 6.961e+01 9.604e+01, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 10:03:53,948 INFO [train.py:1114] (2/4) Epoch 11, batch 4050, loss[loss=0.2268, simple_loss=0.3062, pruned_loss=0.07365, over 3318.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2823, pruned_loss=0.05425, over 938722.47 frames. ], batch size: 35, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:04:04,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=141698.66666666666, ans=0.1 +2024-07-28 10:04:17,730 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.55 vs. limit=12.0 +2024-07-28 10:04:22,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=141738.66666666666, ans=0.0 +2024-07-28 10:04:26,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=141738.66666666666, ans=15.0 +2024-07-28 10:04:33,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=141752.0, ans=0.09899494936611666 +2024-07-28 10:04:34,717 INFO [train.py:1114] (2/4) Epoch 11, batch 4100, loss[loss=0.189, simple_loss=0.2757, pruned_loss=0.05117, over 4899.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2839, pruned_loss=0.05557, over 937812.67 frames. ], batch size: 15, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:04:43,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=141778.66666666666, ans=0.035 +2024-07-28 10:04:59,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=141778.66666666666, ans=0.09899494936611666 +2024-07-28 10:05:04,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141792.0, ans=0.1 +2024-07-28 10:05:07,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=141805.33333333334, ans=0.125 +2024-07-28 10:05:14,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=141805.33333333334, ans=0.0 +2024-07-28 10:05:19,929 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.41 vs. limit=22.5 +2024-07-28 10:05:36,363 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.721e+01 5.886e+01 6.549e+01 7.693e+01 1.193e+02, threshold=1.310e+02, percent-clipped=0.0 +2024-07-28 10:05:37,302 INFO [train.py:1114] (2/4) Epoch 11, batch 4150, loss[loss=0.1992, simple_loss=0.2765, pruned_loss=0.06094, over 4829.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2843, pruned_loss=0.05578, over 937939.54 frames. ], batch size: 13, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:06:05,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=141845.33333333334, ans=0.2 +2024-07-28 10:06:06,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=141845.33333333334, ans=0.09899494936611666 +2024-07-28 10:06:42,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=141858.66666666666, ans=0.125 +2024-07-28 10:06:43,764 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.88 vs. limit=15.0 +2024-07-28 10:06:58,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=141872.0, ans=0.2 +2024-07-28 10:07:15,450 INFO [train.py:1114] (2/4) Epoch 11, batch 4200, loss[loss=0.232, simple_loss=0.3101, pruned_loss=0.07699, over 4904.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2833, pruned_loss=0.055, over 939365.87 frames. ], batch size: 15, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:07:32,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=141912.0, ans=0.0 +2024-07-28 10:07:44,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141925.33333333334, ans=0.1 +2024-07-28 10:07:49,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=141925.33333333334, ans=0.5 +2024-07-28 10:07:58,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=141938.66666666666, ans=0.2 +2024-07-28 10:08:19,279 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:08:25,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=141952.0, ans=0.0 +2024-07-28 10:08:41,006 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.620e+01 5.645e+01 6.237e+01 6.874e+01 1.098e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 10:08:41,702 INFO [train.py:1114] (2/4) Epoch 11, batch 4250, loss[loss=0.1592, simple_loss=0.2498, pruned_loss=0.03424, over 4644.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2833, pruned_loss=0.05496, over 940587.06 frames. ], batch size: 12, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:08:43,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=141965.33333333334, ans=0.125 +2024-07-28 10:08:51,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-07-28 10:08:55,550 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.34 vs. limit=22.5 +2024-07-28 10:09:12,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=142005.33333333334, ans=0.0 +2024-07-28 10:09:24,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=142018.66666666666, ans=0.0 +2024-07-28 10:09:25,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=142018.66666666666, ans=0.0 +2024-07-28 10:09:30,548 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.28 vs. limit=15.0 +2024-07-28 10:09:31,297 INFO [train.py:1114] (2/4) Epoch 11, batch 4300, loss[loss=0.1489, simple_loss=0.2427, pruned_loss=0.02758, over 4761.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2832, pruned_loss=0.05474, over 940509.65 frames. ], batch size: 13, lr: 6.85e-03, grad_scale: 32.0 +2024-07-28 10:09:46,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=142045.33333333334, ans=0.125 +2024-07-28 10:09:48,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=142045.33333333334, ans=0.2 +2024-07-28 10:09:52,290 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.92 vs. limit=15.0 +2024-07-28 10:09:52,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=142045.33333333334, ans=0.0 +2024-07-28 10:09:52,912 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.00 vs. limit=15.0 +2024-07-28 10:09:56,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=142058.66666666666, ans=0.125 +2024-07-28 10:10:06,860 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:10:13,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=142058.66666666666, ans=0.125 +2024-07-28 10:10:25,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=142085.33333333334, ans=0.125 +2024-07-28 10:10:27,602 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.309e+01 5.443e+01 5.948e+01 6.522e+01 9.090e+01, threshold=1.190e+02, percent-clipped=0.0 +2024-07-28 10:10:28,346 INFO [train.py:1114] (2/4) Epoch 11, batch 4350, loss[loss=0.1913, simple_loss=0.2803, pruned_loss=0.05111, over 4758.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2838, pruned_loss=0.05461, over 940952.30 frames. ], batch size: 13, lr: 6.85e-03, grad_scale: 32.0 +2024-07-28 10:10:30,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=142098.66666666666, ans=0.2 +2024-07-28 10:10:44,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=142125.33333333334, ans=0.0 +2024-07-28 10:10:48,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=142125.33333333334, ans=0.0 +2024-07-28 10:10:58,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=142152.0, ans=0.1 +2024-07-28 10:11:05,767 INFO [train.py:1114] (2/4) Epoch 11, batch 4400, loss[loss=0.2153, simple_loss=0.3231, pruned_loss=0.05377, over 4810.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2837, pruned_loss=0.05442, over 940967.00 frames. ], batch size: 14, lr: 6.85e-03, grad_scale: 64.0 +2024-07-28 10:11:07,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=142165.33333333334, ans=0.125 +2024-07-28 10:11:11,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=142165.33333333334, ans=0.0 +2024-07-28 10:11:33,723 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=25.52 vs. limit=22.5 +2024-07-28 10:11:37,048 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.59 vs. limit=15.0 +2024-07-28 10:11:38,555 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+01 5.755e+01 6.372e+01 7.291e+01 1.018e+02, threshold=1.274e+02, percent-clipped=0.0 +2024-07-28 10:11:39,287 INFO [train.py:1114] (2/4) Epoch 11, batch 4450, loss[loss=0.1469, simple_loss=0.2355, pruned_loss=0.0292, over 4954.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2842, pruned_loss=0.05493, over 938813.70 frames. ], batch size: 12, lr: 6.85e-03, grad_scale: 64.0 +2024-07-28 10:11:40,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=142232.0, ans=0.0 +2024-07-28 10:12:20,688 INFO [train.py:1114] (2/4) Epoch 11, batch 4500, loss[loss=0.2091, simple_loss=0.3105, pruned_loss=0.05381, over 4749.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2855, pruned_loss=0.05532, over 938527.99 frames. ], batch size: 14, lr: 6.85e-03, grad_scale: 64.0 +2024-07-28 10:12:57,412 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.457e+01 5.934e+01 6.532e+01 9.481e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-28 10:12:58,145 INFO [train.py:1114] (2/4) Epoch 11, batch 4550, loss[loss=0.185, simple_loss=0.267, pruned_loss=0.05151, over 4907.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2847, pruned_loss=0.05496, over 940416.75 frames. ], batch size: 13, lr: 6.85e-03, grad_scale: 64.0 +2024-07-28 10:13:00,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=142365.33333333334, ans=0.125 +2024-07-28 10:13:08,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=142378.66666666666, ans=0.2 +2024-07-28 10:13:12,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=142392.0, ans=0.125 +2024-07-28 10:13:16,325 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.08 vs. limit=15.0 +2024-07-28 10:13:16,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=142392.0, ans=0.125 +2024-07-28 10:13:16,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=142392.0, ans=0.2 +2024-07-28 10:13:18,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=142405.33333333334, ans=0.125 +2024-07-28 10:13:25,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=142418.66666666666, ans=0.125 +2024-07-28 10:13:33,036 INFO [train.py:1114] (2/4) Epoch 11, batch 4600, loss[loss=0.1655, simple_loss=0.2697, pruned_loss=0.03068, over 4445.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2838, pruned_loss=0.0549, over 938512.29 frames. ], batch size: 21, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:13:33,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=142432.0, ans=0.0 +2024-07-28 10:13:35,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=142432.0, ans=0.2 +2024-07-28 10:13:42,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=142432.0, ans=0.025 +2024-07-28 10:14:09,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142472.0, ans=0.1 +2024-07-28 10:14:16,898 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.448e+01 5.751e+01 6.441e+01 7.092e+01 1.186e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 10:14:22,123 INFO [train.py:1114] (2/4) Epoch 11, batch 4650, loss[loss=0.1877, simple_loss=0.2722, pruned_loss=0.05161, over 4844.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2844, pruned_loss=0.05461, over 940087.93 frames. ], batch size: 16, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:14:30,237 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.63 vs. limit=12.0 +2024-07-28 10:14:41,323 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.45 vs. limit=15.0 +2024-07-28 10:14:48,285 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:15:05,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=142552.0, ans=0.0 +2024-07-28 10:15:09,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=142565.33333333334, ans=0.0 +2024-07-28 10:15:09,642 INFO [train.py:1114] (2/4) Epoch 11, batch 4700, loss[loss=0.1859, simple_loss=0.2692, pruned_loss=0.05134, over 4703.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2826, pruned_loss=0.05377, over 937550.38 frames. ], batch size: 11, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:15:20,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=142578.66666666666, ans=0.125 +2024-07-28 10:15:30,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=142592.0, ans=0.125 +2024-07-28 10:15:49,436 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.868e+01 6.350e+01 7.061e+01 1.022e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 10:15:50,091 INFO [train.py:1114] (2/4) Epoch 11, batch 4750, loss[loss=0.2588, simple_loss=0.3404, pruned_loss=0.0886, over 4486.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2833, pruned_loss=0.05473, over 935181.18 frames. ], batch size: 21, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:15:51,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=142632.0, ans=0.125 +2024-07-28 10:15:58,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=142645.33333333334, ans=0.125 +2024-07-28 10:16:02,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=142645.33333333334, ans=0.0 +2024-07-28 10:16:03,152 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.42 vs. limit=15.0 +2024-07-28 10:16:05,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=142645.33333333334, ans=0.0 +2024-07-28 10:16:07,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff2.min_abs, batch_count=142658.66666666666, ans=0.1 +2024-07-28 10:16:09,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=142658.66666666666, ans=0.0 +2024-07-28 10:16:09,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=142658.66666666666, ans=0.05 +2024-07-28 10:16:24,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=142685.33333333334, ans=0.0 +2024-07-28 10:16:28,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=142685.33333333334, ans=0.125 +2024-07-28 10:16:42,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=142698.66666666666, ans=0.2 +2024-07-28 10:16:42,920 INFO [train.py:1114] (2/4) Epoch 11, batch 4800, loss[loss=0.1995, simple_loss=0.29, pruned_loss=0.05455, over 4705.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2826, pruned_loss=0.05456, over 932718.51 frames. ], batch size: 13, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:17:21,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=142752.0, ans=0.125 +2024-07-28 10:17:27,025 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.631e+01 5.658e+01 6.076e+01 6.872e+01 9.188e+01, threshold=1.215e+02, percent-clipped=0.0 +2024-07-28 10:17:35,867 INFO [train.py:1114] (2/4) Epoch 11, batch 4850, loss[loss=0.2259, simple_loss=0.3071, pruned_loss=0.07236, over 4740.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2835, pruned_loss=0.0549, over 932067.70 frames. ], batch size: 14, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:17:37,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=142765.33333333334, ans=0.125 +2024-07-28 10:17:40,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=142765.33333333334, ans=0.0 +2024-07-28 10:17:43,025 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.20 vs. limit=15.0 +2024-07-28 10:17:49,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=142792.0, ans=0.1 +2024-07-28 10:18:08,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=142818.66666666666, ans=0.125 +2024-07-28 10:18:15,437 INFO [train.py:1114] (2/4) Epoch 11, batch 4900, loss[loss=0.1958, simple_loss=0.2903, pruned_loss=0.05067, over 4760.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2819, pruned_loss=0.05398, over 933887.79 frames. ], batch size: 13, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:18:34,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=142858.66666666666, ans=0.0 +2024-07-28 10:18:34,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=142858.66666666666, ans=0.125 +2024-07-28 10:18:35,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=142858.66666666666, ans=0.1 +2024-07-28 10:18:37,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=142858.66666666666, ans=0.125 +2024-07-28 10:18:42,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=142872.0, ans=0.0 +2024-07-28 10:18:52,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=142885.33333333334, ans=0.0 +2024-07-28 10:18:53,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.15 vs. limit=15.0 +2024-07-28 10:18:54,125 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.706e+01 5.557e+01 6.177e+01 6.945e+01 1.051e+02, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 10:18:54,887 INFO [train.py:1114] (2/4) Epoch 11, batch 4950, loss[loss=0.2278, simple_loss=0.2945, pruned_loss=0.08053, over 3325.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2837, pruned_loss=0.05512, over 931114.44 frames. ], batch size: 35, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:18:59,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=142898.66666666666, ans=0.0 +2024-07-28 10:19:04,657 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.69 vs. limit=12.0 +2024-07-28 10:19:05,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142912.0, ans=0.1 +2024-07-28 10:19:18,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=142938.66666666666, ans=0.125 +2024-07-28 10:19:19,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=142938.66666666666, ans=0.0 +2024-07-28 10:19:32,853 INFO [train.py:1114] (2/4) Epoch 11, batch 5000, loss[loss=0.2126, simple_loss=0.3176, pruned_loss=0.05374, over 4663.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2828, pruned_loss=0.05458, over 935030.45 frames. ], batch size: 14, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:19:44,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=142965.33333333334, ans=0.125 +2024-07-28 10:19:44,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=142965.33333333334, ans=0.07 +2024-07-28 10:19:53,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=142978.66666666666, ans=0.125 +2024-07-28 10:20:02,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=143005.33333333334, ans=0.0 +2024-07-28 10:20:08,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=143018.66666666666, ans=0.0 +2024-07-28 10:20:17,707 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.576e+01 5.560e+01 5.974e+01 6.425e+01 8.960e+01, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 10:20:18,546 INFO [train.py:1114] (2/4) Epoch 11, batch 5050, loss[loss=0.1983, simple_loss=0.2669, pruned_loss=0.06481, over 4851.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2819, pruned_loss=0.05368, over 937677.93 frames. ], batch size: 12, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:20:22,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=143032.0, ans=0.125 +2024-07-28 10:20:28,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=143045.33333333334, ans=0.125 +2024-07-28 10:20:33,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=143058.66666666666, ans=0.125 +2024-07-28 10:20:47,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=143085.33333333334, ans=0.2 +2024-07-28 10:20:49,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143085.33333333334, ans=0.1 +2024-07-28 10:20:50,123 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.02 vs. limit=12.0 +2024-07-28 10:20:53,951 INFO [train.py:1114] (2/4) Epoch 11, batch 5100, loss[loss=0.1807, simple_loss=0.2858, pruned_loss=0.03785, over 4773.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2825, pruned_loss=0.05438, over 934823.51 frames. ], batch size: 12, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:20:55,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=143098.66666666666, ans=0.125 +2024-07-28 10:20:58,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=143098.66666666666, ans=0.125 +2024-07-28 10:21:08,625 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:21:10,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=143125.33333333334, ans=0.125 +2024-07-28 10:21:12,881 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.34 vs. limit=6.0 +2024-07-28 10:21:16,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=143138.66666666666, ans=0.04949747468305833 +2024-07-28 10:21:16,328 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.51 vs. limit=15.0 +2024-07-28 10:21:31,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=143138.66666666666, ans=0.2 +2024-07-28 10:21:31,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=143138.66666666666, ans=0.0 +2024-07-28 10:21:33,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=143138.66666666666, ans=0.125 +2024-07-28 10:21:46,393 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.445e+01 5.691e+01 6.335e+01 6.758e+01 9.887e+01, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 10:21:47,062 INFO [train.py:1114] (2/4) Epoch 11, batch 5150, loss[loss=0.1961, simple_loss=0.2887, pruned_loss=0.05177, over 4835.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2832, pruned_loss=0.05458, over 935704.94 frames. ], batch size: 16, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:22:00,779 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.03 vs. limit=15.0 +2024-07-28 10:22:01,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=143192.0, ans=0.0 +2024-07-28 10:22:11,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=143205.33333333334, ans=0.125 +2024-07-28 10:22:22,790 INFO [train.py:1114] (2/4) Epoch 11, batch 5200, loss[loss=0.213, simple_loss=0.2974, pruned_loss=0.0643, over 4653.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.283, pruned_loss=0.05462, over 935882.15 frames. ], batch size: 14, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:22:33,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=143245.33333333334, ans=0.2 +2024-07-28 10:22:40,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=143258.66666666666, ans=0.125 +2024-07-28 10:22:40,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=143258.66666666666, ans=0.07 +2024-07-28 10:22:40,999 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.41 vs. limit=6.0 +2024-07-28 10:22:48,938 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.46 vs. limit=12.0 +2024-07-28 10:22:56,374 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.779e+01 6.416e+01 7.170e+01 1.127e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 10:22:57,060 INFO [train.py:1114] (2/4) Epoch 11, batch 5250, loss[loss=0.173, simple_loss=0.2529, pruned_loss=0.04661, over 4888.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2816, pruned_loss=0.0537, over 935215.54 frames. ], batch size: 13, lr: 6.82e-03, grad_scale: 64.0 +2024-07-28 10:23:01,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=143298.66666666666, ans=0.035 +2024-07-28 10:23:02,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=143298.66666666666, ans=0.0 +2024-07-28 10:23:05,775 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.31 vs. limit=12.0 +2024-07-28 10:23:10,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=143325.33333333334, ans=0.125 +2024-07-28 10:23:11,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=143325.33333333334, ans=0.125 +2024-07-28 10:23:15,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143325.33333333334, ans=0.1 +2024-07-28 10:23:30,544 INFO [train.py:1114] (2/4) Epoch 11, batch 5300, loss[loss=0.2158, simple_loss=0.3153, pruned_loss=0.05814, over 4636.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2822, pruned_loss=0.0539, over 933663.18 frames. ], batch size: 16, lr: 6.82e-03, grad_scale: 64.0 +2024-07-28 10:23:43,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=143378.66666666666, ans=0.125 +2024-07-28 10:23:46,557 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.97 vs. limit=10.0 +2024-07-28 10:24:00,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=143405.33333333334, ans=0.1 +2024-07-28 10:24:08,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=143418.66666666666, ans=0.125 +2024-07-28 10:24:13,405 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.823e+01 5.558e+01 6.072e+01 7.139e+01 1.045e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 10:24:14,195 INFO [train.py:1114] (2/4) Epoch 11, batch 5350, loss[loss=0.1555, simple_loss=0.238, pruned_loss=0.03644, over 4516.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2825, pruned_loss=0.05347, over 935813.30 frames. ], batch size: 10, lr: 6.82e-03, grad_scale: 64.0 +2024-07-28 10:24:15,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=143432.0, ans=0.0 +2024-07-28 10:24:17,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=143432.0, ans=0.0 +2024-07-28 10:24:19,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=143432.0, ans=0.0 +2024-07-28 10:24:26,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=143445.33333333334, ans=0.0 +2024-07-28 10:24:38,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=143472.0, ans=0.125 +2024-07-28 10:24:46,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143485.33333333334, ans=0.1 +2024-07-28 10:24:48,765 INFO [train.py:1114] (2/4) Epoch 11, batch 5400, loss[loss=0.2412, simple_loss=0.3137, pruned_loss=0.08439, over 4113.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2837, pruned_loss=0.05473, over 929323.30 frames. ], batch size: 25, lr: 6.82e-03, grad_scale: 64.0 +2024-07-28 10:24:49,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=143498.66666666666, ans=0.0 +2024-07-28 10:24:53,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=143498.66666666666, ans=0.0 +2024-07-28 10:24:54,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=143512.0, ans=0.025 +2024-07-28 10:25:17,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=143525.33333333334, ans=0.125 +2024-07-28 10:25:20,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=143525.33333333334, ans=0.025 +2024-07-28 10:25:36,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=143552.0, ans=0.125 +2024-07-28 10:25:40,267 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.36 vs. limit=15.0 +2024-07-28 10:25:40,463 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.745e+01 5.583e+01 6.179e+01 6.977e+01 1.082e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 10:25:40,510 INFO [train.py:1114] (2/4) Epoch 11, batch 5450, loss[loss=0.1536, simple_loss=0.2352, pruned_loss=0.03601, over 4705.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2835, pruned_loss=0.05479, over 932337.58 frames. ], batch size: 11, lr: 6.82e-03, grad_scale: 32.0 +2024-07-28 10:25:45,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=143565.33333333334, ans=0.0 +2024-07-28 10:25:49,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=143578.66666666666, ans=0.2 +2024-07-28 10:25:52,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=143578.66666666666, ans=0.125 +2024-07-28 10:25:54,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.01 vs. limit=22.5 +2024-07-28 10:25:55,626 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.64 vs. limit=15.0 +2024-07-28 10:26:06,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=143605.33333333334, ans=0.125 +2024-07-28 10:26:09,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-07-28 10:26:13,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=143618.66666666666, ans=0.0 +2024-07-28 10:26:21,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=143618.66666666666, ans=0.125 +2024-07-28 10:26:23,927 INFO [train.py:1114] (2/4) Epoch 11, batch 5500, loss[loss=0.2124, simple_loss=0.299, pruned_loss=0.0629, over 4448.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2834, pruned_loss=0.0554, over 930145.22 frames. ], batch size: 26, lr: 6.82e-03, grad_scale: 32.0 +2024-07-28 10:26:24,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=143632.0, ans=0.125 +2024-07-28 10:26:27,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=143632.0, ans=0.125 +2024-07-28 10:32:01,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=143658.66666666666, ans=0.0 +2024-07-28 10:32:02,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=143658.66666666666, ans=0.125 +2024-07-28 10:32:04,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=143672.0, ans=0.125 +2024-07-28 10:32:04,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=143672.0, ans=0.0 +2024-07-28 10:32:07,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=143672.0, ans=0.125 +2024-07-28 10:32:18,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=143685.33333333334, ans=0.025 +2024-07-28 10:32:21,537 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.05 vs. limit=15.0 +2024-07-28 10:32:22,312 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.760e+01 6.498e+01 7.825e+01 1.226e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-28 10:32:22,345 INFO [train.py:1114] (2/4) Epoch 11, batch 5550, loss[loss=0.2303, simple_loss=0.3081, pruned_loss=0.07623, over 4718.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2824, pruned_loss=0.05512, over 932696.54 frames. ], batch size: 12, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:32:29,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=143698.66666666666, ans=0.125 +2024-07-28 10:32:43,806 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.80 vs. limit=10.0 +2024-07-28 10:32:45,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=143738.66666666666, ans=0.0 +2024-07-28 10:32:49,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=143738.66666666666, ans=10.0 +2024-07-28 10:32:51,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=143752.0, ans=0.125 +2024-07-28 10:32:58,923 INFO [train.py:1114] (2/4) Epoch 11, batch 5600, loss[loss=0.224, simple_loss=0.3016, pruned_loss=0.07315, over 4739.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2825, pruned_loss=0.05497, over 934132.63 frames. ], batch size: 14, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:33:00,764 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=12.0 +2024-07-28 10:33:04,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=143765.33333333334, ans=0.125 +2024-07-28 10:33:07,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=143778.66666666666, ans=0.125 +2024-07-28 10:33:11,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=143778.66666666666, ans=0.1 +2024-07-28 10:33:29,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=143805.33333333334, ans=0.125 +2024-07-28 10:33:42,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143818.66666666666, ans=0.1 +2024-07-28 10:33:43,363 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.771e+01 6.077e+01 6.890e+01 8.236e+01 1.387e+02, threshold=1.378e+02, percent-clipped=1.0 +2024-07-28 10:33:44,790 INFO [train.py:1114] (2/4) Epoch 11, batch 5650, loss[loss=0.2255, simple_loss=0.2983, pruned_loss=0.07637, over 4480.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2818, pruned_loss=0.05472, over 936967.60 frames. ], batch size: 21, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:33:50,622 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.66 vs. limit=6.0 +2024-07-28 10:33:57,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=143845.33333333334, ans=0.0 +2024-07-28 10:34:00,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=143858.66666666666, ans=0.2 +2024-07-28 10:34:07,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=143872.0, ans=0.125 +2024-07-28 10:34:07,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=143872.0, ans=0.2 +2024-07-28 10:34:10,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143872.0, ans=0.1 +2024-07-28 10:34:12,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=143872.0, ans=0.125 +2024-07-28 10:34:15,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=143885.33333333334, ans=0.125 +2024-07-28 10:34:16,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=143885.33333333334, ans=0.1 +2024-07-28 10:34:21,306 INFO [train.py:1114] (2/4) Epoch 11, batch 5700, loss[loss=0.2123, simple_loss=0.2944, pruned_loss=0.06506, over 4694.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2826, pruned_loss=0.0549, over 938005.58 frames. ], batch size: 13, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:34:22,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143898.66666666666, ans=0.1 +2024-07-28 10:34:34,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=143912.0, ans=0.05 +2024-07-28 10:34:42,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=143938.66666666666, ans=0.07 +2024-07-28 10:34:49,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=143952.0, ans=0.2 +2024-07-28 10:34:52,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=143952.0, ans=0.125 +2024-07-28 10:34:56,156 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.558e+01 6.017e+01 6.629e+01 9.464e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 10:34:56,189 INFO [train.py:1114] (2/4) Epoch 11, batch 5750, loss[loss=0.2212, simple_loss=0.3064, pruned_loss=0.06797, over 4782.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2831, pruned_loss=0.05506, over 938111.26 frames. ], batch size: 19, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:34:59,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=143965.33333333334, ans=0.09899494936611666 +2024-07-28 10:35:14,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.76 vs. limit=15.0 +2024-07-28 10:35:16,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=143992.0, ans=0.125 +2024-07-28 10:35:27,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=144005.33333333334, ans=0.125 +2024-07-28 10:35:30,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.85 vs. limit=10.0 +2024-07-28 10:35:37,664 INFO [train.py:1114] (2/4) Epoch 11, batch 5800, loss[loss=0.2022, simple_loss=0.2913, pruned_loss=0.05654, over 4752.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2832, pruned_loss=0.05526, over 937068.56 frames. ], batch size: 19, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:35:37,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=144032.0, ans=0.0 +2024-07-28 10:35:44,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=144045.33333333334, ans=0.125 +2024-07-28 10:35:59,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=144072.0, ans=10.0 +2024-07-28 10:36:04,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=144085.33333333334, ans=0.09899494936611666 +2024-07-28 10:36:11,533 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.841e+01 5.791e+01 6.490e+01 7.663e+01 1.100e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-28 10:36:11,580 INFO [train.py:1114] (2/4) Epoch 11, batch 5850, loss[loss=0.2284, simple_loss=0.3247, pruned_loss=0.06604, over 4512.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2832, pruned_loss=0.05533, over 937700.57 frames. ], batch size: 21, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:36:13,676 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=144098.66666666666, ans=0.125 +2024-07-28 10:36:44,220 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.77 vs. limit=10.0 +2024-07-28 10:36:45,263 INFO [train.py:1114] (2/4) Epoch 11, batch 5900, loss[loss=0.2167, simple_loss=0.3072, pruned_loss=0.06314, over 4680.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2824, pruned_loss=0.05466, over 938014.63 frames. ], batch size: 15, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:37:23,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=144192.0, ans=0.0 +2024-07-28 10:37:25,354 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.20 vs. limit=15.0 +2024-07-28 10:37:40,535 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.879e+01 5.784e+01 6.286e+01 7.070e+01 1.230e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 10:37:40,568 INFO [train.py:1114] (2/4) Epoch 11, batch 5950, loss[loss=0.1957, simple_loss=0.2821, pruned_loss=0.05465, over 4689.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2829, pruned_loss=0.05458, over 940099.09 frames. ], batch size: 15, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:37:40,937 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.55 vs. limit=15.0 +2024-07-28 10:37:52,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=144245.33333333334, ans=0.1 +2024-07-28 10:37:58,564 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.80 vs. limit=6.0 +2024-07-28 10:38:25,029 INFO [train.py:1114] (2/4) Epoch 11, batch 6000, loss[loss=0.175, simple_loss=0.2579, pruned_loss=0.04603, over 4361.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.282, pruned_loss=0.05422, over 937748.14 frames. ], batch size: 25, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:38:25,029 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 10:39:08,753 INFO [train.py:1146] (2/4) Epoch 11, validation: loss=0.1692, simple_loss=0.2732, pruned_loss=0.03262, over 944034.00 frames. +2024-07-28 10:39:08,754 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 10:39:09,851 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.35 vs. limit=6.0 +2024-07-28 10:39:18,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=144312.0, ans=0.0 +2024-07-28 10:39:45,806 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.289e+01 5.724e+01 6.626e+01 8.238e+01 1.220e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-28 10:39:45,839 INFO [train.py:1114] (2/4) Epoch 11, batch 6050, loss[loss=0.2066, simple_loss=0.2882, pruned_loss=0.0625, over 4778.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2816, pruned_loss=0.05426, over 938736.00 frames. ], batch size: 12, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:39:47,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=144365.33333333334, ans=0.025 +2024-07-28 10:40:04,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=144378.66666666666, ans=0.125 +2024-07-28 10:40:12,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=144405.33333333334, ans=0.125 +2024-07-28 10:40:12,147 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:40:12,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=144405.33333333334, ans=0.2 +2024-07-28 10:40:16,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=144405.33333333334, ans=0.025 +2024-07-28 10:40:16,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=144405.33333333334, ans=0.125 +2024-07-28 10:40:22,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=144418.66666666666, ans=0.0 +2024-07-28 10:40:28,023 INFO [train.py:1114] (2/4) Epoch 11, batch 6100, loss[loss=0.2523, simple_loss=0.3212, pruned_loss=0.09172, over 4687.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2818, pruned_loss=0.05393, over 938384.47 frames. ], batch size: 15, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:40:38,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=144445.33333333334, ans=0.0 +2024-07-28 10:40:42,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=144458.66666666666, ans=0.125 +2024-07-28 10:40:49,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=144472.0, ans=0.0 +2024-07-28 10:40:56,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=144485.33333333334, ans=0.0 +2024-07-28 10:40:58,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=144485.33333333334, ans=0.025 +2024-07-28 10:40:59,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=144485.33333333334, ans=0.125 +2024-07-28 10:41:01,435 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.304e+01 5.350e+01 6.027e+01 7.047e+01 1.301e+02, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 10:41:01,468 INFO [train.py:1114] (2/4) Epoch 11, batch 6150, loss[loss=0.2448, simple_loss=0.3191, pruned_loss=0.08524, over 3483.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2816, pruned_loss=0.05369, over 937229.80 frames. ], batch size: 36, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:41:01,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=144498.66666666666, ans=0.125 +2024-07-28 10:41:05,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=144498.66666666666, ans=0.125 +2024-07-28 10:41:14,654 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.64 vs. limit=15.0 +2024-07-28 10:41:21,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=144525.33333333334, ans=0.0 +2024-07-28 10:41:24,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=144525.33333333334, ans=0.0 +2024-07-28 10:41:39,925 INFO [train.py:1114] (2/4) Epoch 11, batch 6200, loss[loss=0.1865, simple_loss=0.2879, pruned_loss=0.0425, over 4748.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2824, pruned_loss=0.05415, over 936433.15 frames. ], batch size: 14, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:41:45,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=144565.33333333334, ans=0.0 +2024-07-28 10:41:58,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=144592.0, ans=0.0 +2024-07-28 10:42:04,256 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:42:11,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=144618.66666666666, ans=0.2 +2024-07-28 10:42:15,875 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.646e+01 5.697e+01 6.150e+01 7.002e+01 1.067e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 10:42:15,908 INFO [train.py:1114] (2/4) Epoch 11, batch 6250, loss[loss=0.1942, simple_loss=0.2756, pruned_loss=0.05633, over 4818.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2832, pruned_loss=0.05488, over 933166.22 frames. ], batch size: 14, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:42:18,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=144632.0, ans=0.025 +2024-07-28 10:42:38,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=144658.66666666666, ans=0.0 +2024-07-28 10:42:41,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144672.0, ans=0.1 +2024-07-28 10:42:52,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=144685.33333333334, ans=0.125 +2024-07-28 10:42:53,411 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.59 vs. limit=6.0 +2024-07-28 10:42:53,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144685.33333333334, ans=0.1 +2024-07-28 10:42:55,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=144685.33333333334, ans=0.2 +2024-07-28 10:42:58,538 INFO [train.py:1114] (2/4) Epoch 11, batch 6300, loss[loss=0.1778, simple_loss=0.2553, pruned_loss=0.05018, over 4507.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2826, pruned_loss=0.05505, over 930247.02 frames. ], batch size: 10, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:42:59,875 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.80 vs. limit=15.0 +2024-07-28 10:43:03,501 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:43:13,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=144725.33333333334, ans=0.125 +2024-07-28 10:43:25,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=144738.66666666666, ans=0.0 +2024-07-28 10:43:36,749 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.336e+01 5.612e+01 6.120e+01 6.711e+01 9.743e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 10:43:36,782 INFO [train.py:1114] (2/4) Epoch 11, batch 6350, loss[loss=0.1967, simple_loss=0.2895, pruned_loss=0.052, over 4519.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2821, pruned_loss=0.05459, over 933947.97 frames. ], batch size: 21, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:43:37,911 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.45 vs. limit=22.5 +2024-07-28 10:43:42,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=144765.33333333334, ans=0.125 +2024-07-28 10:43:49,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=144778.66666666666, ans=0.2 +2024-07-28 10:43:55,042 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.10 vs. limit=22.5 +2024-07-28 10:43:57,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=144792.0, ans=0.2 +2024-07-28 10:43:58,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=144805.33333333334, ans=0.1 +2024-07-28 10:44:11,964 INFO [train.py:1114] (2/4) Epoch 11, batch 6400, loss[loss=0.1918, simple_loss=0.2882, pruned_loss=0.04771, over 4644.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2828, pruned_loss=0.05483, over 934805.74 frames. ], batch size: 13, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:44:14,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=144832.0, ans=0.0 +2024-07-28 10:44:17,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=144832.0, ans=0.025 +2024-07-28 10:44:21,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=144845.33333333334, ans=0.0 +2024-07-28 10:44:23,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=144845.33333333334, ans=0.125 +2024-07-28 10:44:26,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=144858.66666666666, ans=0.1 +2024-07-28 10:44:41,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=144885.33333333334, ans=0.125 +2024-07-28 10:44:41,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=144885.33333333334, ans=0.2 +2024-07-28 10:44:41,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.11 vs. limit=15.0 +2024-07-28 10:44:45,102 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.492e+01 5.883e+01 6.533e+01 7.974e+01 1.055e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 10:44:45,135 INFO [train.py:1114] (2/4) Epoch 11, batch 6450, loss[loss=0.2482, simple_loss=0.3333, pruned_loss=0.08155, over 4494.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2832, pruned_loss=0.05457, over 938524.53 frames. ], batch size: 21, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:44:46,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=144898.66666666666, ans=0.125 +2024-07-28 10:44:52,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=144912.0, ans=0.07 +2024-07-28 10:44:52,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=144912.0, ans=0.025 +2024-07-28 10:45:16,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=144952.0, ans=0.0 +2024-07-28 10:45:18,202 INFO [train.py:1114] (2/4) Epoch 11, batch 6500, loss[loss=0.2247, simple_loss=0.3053, pruned_loss=0.072, over 3341.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2822, pruned_loss=0.05389, over 939700.74 frames. ], batch size: 35, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:45:25,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=144978.66666666666, ans=0.125 +2024-07-28 10:45:31,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=144992.0, ans=0.015 +2024-07-28 10:45:31,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=144992.0, ans=0.0 +2024-07-28 10:45:37,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=145005.33333333334, ans=0.2 +2024-07-28 10:45:38,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=145005.33333333334, ans=0.025 +2024-07-28 10:45:39,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=145005.33333333334, ans=0.035 +2024-07-28 10:45:43,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=145005.33333333334, ans=0.125 +2024-07-28 10:45:51,643 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.465e+01 5.571e+01 6.263e+01 7.370e+01 1.165e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 10:45:51,690 INFO [train.py:1114] (2/4) Epoch 11, batch 6550, loss[loss=0.1757, simple_loss=0.2595, pruned_loss=0.04594, over 4813.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.282, pruned_loss=0.05335, over 942998.58 frames. ], batch size: 11, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:46:03,176 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.71 vs. limit=22.5 +2024-07-28 10:46:24,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=145085.33333333334, ans=0.09899494936611666 +2024-07-28 10:46:25,936 INFO [train.py:1114] (2/4) Epoch 11, batch 6600, loss[loss=0.1938, simple_loss=0.2766, pruned_loss=0.05552, over 4934.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2814, pruned_loss=0.05293, over 944870.93 frames. ], batch size: 14, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:46:37,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=145112.0, ans=0.125 +2024-07-28 10:46:38,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=145112.0, ans=0.125 +2024-07-28 10:46:51,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=145138.66666666666, ans=0.125 +2024-07-28 10:46:54,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=145152.0, ans=0.125 +2024-07-28 10:46:55,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=145152.0, ans=0.0 +2024-07-28 10:46:57,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=145152.0, ans=0.2 +2024-07-28 10:46:59,238 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.651e+01 6.150e+01 6.851e+01 8.170e+01 1.263e+02, threshold=1.370e+02, percent-clipped=1.0 +2024-07-28 10:46:59,302 INFO [train.py:1114] (2/4) Epoch 11, batch 6650, loss[loss=0.223, simple_loss=0.315, pruned_loss=0.06552, over 4604.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2817, pruned_loss=0.05355, over 943320.41 frames. ], batch size: 17, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:47:06,481 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.78 vs. limit=15.0 +2024-07-28 10:47:18,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=145178.66666666666, ans=0.125 +2024-07-28 10:47:27,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=145192.0, ans=0.0 +2024-07-28 10:47:41,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=145218.66666666666, ans=0.0 +2024-07-28 10:47:43,297 INFO [train.py:1114] (2/4) Epoch 11, batch 6700, loss[loss=0.218, simple_loss=0.3087, pruned_loss=0.06365, over 4682.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2826, pruned_loss=0.05398, over 942414.20 frames. ], batch size: 19, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:47:47,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=145232.0, ans=0.125 +2024-07-28 10:47:56,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=145258.66666666666, ans=0.125 +2024-07-28 10:48:18,317 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.90 vs. limit=15.0 +2024-07-28 10:48:18,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=145285.33333333334, ans=0.0 +2024-07-28 10:48:21,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.50 vs. limit=15.0 +2024-07-28 10:48:21,941 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.733e+01 5.801e+01 6.276e+01 7.380e+01 1.183e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 10:48:21,974 INFO [train.py:1114] (2/4) Epoch 11, batch 6750, loss[loss=0.1812, simple_loss=0.276, pruned_loss=0.04325, over 4259.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2828, pruned_loss=0.05416, over 940295.25 frames. ], batch size: 25, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:48:26,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=145298.66666666666, ans=0.07 +2024-07-28 10:48:27,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=145298.66666666666, ans=0.0 +2024-07-28 10:48:53,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=145338.66666666666, ans=0.125 +2024-07-28 10:49:00,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=145352.0, ans=0.0 +2024-07-28 10:49:02,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=145352.0, ans=0.125 +2024-07-28 10:49:06,043 INFO [train.py:1114] (2/4) Epoch 11, batch 6800, loss[loss=0.2029, simple_loss=0.2956, pruned_loss=0.05514, over 4635.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2837, pruned_loss=0.0548, over 938619.08 frames. ], batch size: 13, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:49:07,850 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.95 vs. limit=10.0 +2024-07-28 10:49:19,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=145378.66666666666, ans=0.125 +2024-07-28 10:49:27,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=145405.33333333334, ans=0.0 +2024-07-28 10:49:33,168 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-28 10:49:34,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=145405.33333333334, ans=0.125 +2024-07-28 10:49:43,880 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.787e+01 5.530e+01 6.115e+01 7.020e+01 1.132e+02, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 10:49:43,913 INFO [train.py:1114] (2/4) Epoch 11, batch 6850, loss[loss=0.1922, simple_loss=0.2862, pruned_loss=0.04912, over 4693.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2833, pruned_loss=0.05432, over 940182.61 frames. ], batch size: 13, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:50:14,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=145472.0, ans=0.05 +2024-07-28 10:50:16,448 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.78 vs. limit=15.0 +2024-07-28 10:50:22,677 INFO [train.py:1114] (2/4) Epoch 11, batch 6900, loss[loss=0.1829, simple_loss=0.2738, pruned_loss=0.046, over 4970.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2828, pruned_loss=0.0546, over 942359.84 frames. ], batch size: 13, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:50:29,190 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.02 vs. limit=15.0 +2024-07-28 10:50:30,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145512.0, ans=0.1 +2024-07-28 10:50:33,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=145512.0, ans=0.0 +2024-07-28 10:50:48,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=145538.66666666666, ans=0.125 +2024-07-28 10:50:49,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=145552.0, ans=0.0 +2024-07-28 10:50:56,818 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.55 vs. limit=22.5 +2024-07-28 10:50:57,127 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.940e+01 5.560e+01 6.281e+01 7.160e+01 1.002e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 10:50:57,162 INFO [train.py:1114] (2/4) Epoch 11, batch 6950, loss[loss=0.1734, simple_loss=0.2492, pruned_loss=0.04877, over 4543.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2827, pruned_loss=0.05497, over 939884.72 frames. ], batch size: 10, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:51:06,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=145578.66666666666, ans=0.125 +2024-07-28 10:51:09,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145578.66666666666, ans=0.1 +2024-07-28 10:51:12,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=145592.0, ans=0.0 +2024-07-28 10:51:15,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=145592.0, ans=0.0 +2024-07-28 10:51:18,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=145605.33333333334, ans=0.125 +2024-07-28 10:51:20,967 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:51:22,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=145605.33333333334, ans=0.0 +2024-07-28 10:51:27,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=145618.66666666666, ans=0.0 +2024-07-28 10:51:29,514 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.43 vs. limit=15.0 +2024-07-28 10:51:30,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=145618.66666666666, ans=0.125 +2024-07-28 10:51:31,225 INFO [train.py:1114] (2/4) Epoch 11, batch 7000, loss[loss=0.224, simple_loss=0.3048, pruned_loss=0.07156, over 4608.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.283, pruned_loss=0.05504, over 938161.25 frames. ], batch size: 17, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:51:33,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=145632.0, ans=0.125 +2024-07-28 10:51:37,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=145645.33333333334, ans=0.07 +2024-07-28 10:51:48,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=145658.66666666666, ans=0.0 +2024-07-28 10:51:57,722 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.50 vs. limit=15.0 +2024-07-28 10:52:00,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145685.33333333334, ans=0.1 +2024-07-28 10:52:03,914 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.575e+01 5.703e+01 6.345e+01 7.288e+01 1.132e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 10:52:03,947 INFO [train.py:1114] (2/4) Epoch 11, batch 7050, loss[loss=0.2103, simple_loss=0.3086, pruned_loss=0.05601, over 4699.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.283, pruned_loss=0.05462, over 941600.31 frames. ], batch size: 19, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:52:07,634 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.15 vs. limit=10.0 +2024-07-28 10:52:08,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145698.66666666666, ans=0.1 +2024-07-28 10:52:18,216 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.54 vs. limit=12.0 +2024-07-28 10:52:19,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=145725.33333333334, ans=0.0 +2024-07-28 10:52:23,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=145725.33333333334, ans=0.125 +2024-07-28 10:52:38,925 INFO [train.py:1114] (2/4) Epoch 11, batch 7100, loss[loss=0.1981, simple_loss=0.2984, pruned_loss=0.04891, over 4785.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2837, pruned_loss=0.05482, over 936298.03 frames. ], batch size: 15, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:52:49,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=145778.66666666666, ans=0.125 +2024-07-28 10:53:11,362 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.457e+01 5.413e+01 6.227e+01 7.503e+01 1.030e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 10:53:11,409 INFO [train.py:1114] (2/4) Epoch 11, batch 7150, loss[loss=0.1675, simple_loss=0.26, pruned_loss=0.03748, over 4512.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2816, pruned_loss=0.05423, over 937104.00 frames. ], batch size: 21, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:53:11,707 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.68 vs. limit=10.0 +2024-07-28 10:53:19,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=145845.33333333334, ans=0.125 +2024-07-28 10:53:23,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=145845.33333333334, ans=0.125 +2024-07-28 10:53:33,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=145872.0, ans=0.035 +2024-07-28 10:53:44,202 INFO [train.py:1114] (2/4) Epoch 11, batch 7200, loss[loss=0.1929, simple_loss=0.2949, pruned_loss=0.04542, over 4812.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2828, pruned_loss=0.05421, over 938017.38 frames. ], batch size: 15, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:53:46,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=145898.66666666666, ans=0.125 +2024-07-28 10:53:49,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.86 vs. limit=15.0 +2024-07-28 10:54:03,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=145925.33333333334, ans=0.0 +2024-07-28 10:54:09,529 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.10 vs. limit=22.5 +2024-07-28 10:54:10,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=145952.0, ans=0.025 +2024-07-28 10:54:11,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=145952.0, ans=0.0 +2024-07-28 10:54:14,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=145952.0, ans=0.0 +2024-07-28 10:54:23,292 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 5.502e+01 5.961e+01 6.542e+01 9.167e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 10:54:23,339 INFO [train.py:1114] (2/4) Epoch 11, batch 7250, loss[loss=0.1682, simple_loss=0.2611, pruned_loss=0.03764, over 4851.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2819, pruned_loss=0.05429, over 939676.10 frames. ], batch size: 12, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:54:24,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=145965.33333333334, ans=10.0 +2024-07-28 10:54:24,248 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.76 vs. limit=15.0 +2024-07-28 10:54:26,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=145965.33333333334, ans=0.125 +2024-07-28 10:54:31,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=145978.66666666666, ans=0.025 +2024-07-28 10:54:32,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=145978.66666666666, ans=0.0 +2024-07-28 10:54:58,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145992.0, ans=0.1 +2024-07-28 10:55:03,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=146005.33333333334, ans=0.0 +2024-07-28 10:55:13,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=146018.66666666666, ans=0.125 +2024-07-28 10:55:13,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=146018.66666666666, ans=0.0 +2024-07-28 10:55:14,512 INFO [train.py:1114] (2/4) Epoch 11, batch 7300, loss[loss=0.1569, simple_loss=0.2413, pruned_loss=0.03621, over 4858.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2811, pruned_loss=0.05378, over 940079.17 frames. ], batch size: 12, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:55:21,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=146045.33333333334, ans=0.125 +2024-07-28 10:55:29,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=146058.66666666666, ans=0.125 +2024-07-28 10:55:36,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=146072.0, ans=0.0 +2024-07-28 10:55:43,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=146085.33333333334, ans=0.0 +2024-07-28 10:55:44,470 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-07-28 10:55:49,207 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.444e+01 5.651e+01 6.063e+01 6.776e+01 1.053e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-28 10:55:49,240 INFO [train.py:1114] (2/4) Epoch 11, batch 7350, loss[loss=0.1831, simple_loss=0.2695, pruned_loss=0.04837, over 4640.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2807, pruned_loss=0.05349, over 939024.03 frames. ], batch size: 12, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:56:04,275 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.55 vs. limit=22.5 +2024-07-28 10:56:14,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=146112.0, ans=0.0 +2024-07-28 10:56:22,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=146138.66666666666, ans=0.0 +2024-07-28 10:56:27,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=146138.66666666666, ans=0.5 +2024-07-28 10:56:28,468 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.91 vs. limit=12.0 +2024-07-28 10:56:29,726 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.41 vs. limit=22.5 +2024-07-28 10:56:30,379 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.33 vs. limit=22.5 +2024-07-28 10:56:33,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=146152.0, ans=0.1 +2024-07-28 10:56:36,372 INFO [train.py:1114] (2/4) Epoch 11, batch 7400, loss[loss=0.1855, simple_loss=0.2716, pruned_loss=0.04967, over 4693.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2818, pruned_loss=0.05397, over 940371.71 frames. ], batch size: 13, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:56:37,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=146165.33333333334, ans=0.0 +2024-07-28 10:56:45,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=146178.66666666666, ans=0.0 +2024-07-28 10:56:48,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=146178.66666666666, ans=0.0 +2024-07-28 10:57:07,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=146218.66666666666, ans=0.0 +2024-07-28 10:57:11,528 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.799e+01 5.675e+01 6.306e+01 7.270e+01 1.053e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 10:57:11,561 INFO [train.py:1114] (2/4) Epoch 11, batch 7450, loss[loss=0.1793, simple_loss=0.2658, pruned_loss=0.04639, over 4610.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2804, pruned_loss=0.05371, over 937785.49 frames. ], batch size: 11, lr: 6.76e-03, grad_scale: 64.0 +2024-07-28 10:57:35,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=146272.0, ans=0.125 +2024-07-28 10:57:37,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=146272.0, ans=0.5 +2024-07-28 10:57:45,300 INFO [train.py:1114] (2/4) Epoch 11, batch 7500, loss[loss=0.2164, simple_loss=0.291, pruned_loss=0.0709, over 3476.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2809, pruned_loss=0.05418, over 936495.91 frames. ], batch size: 35, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 10:57:52,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=146312.0, ans=0.0 +2024-07-28 10:57:57,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=146312.0, ans=0.0 +2024-07-28 10:58:10,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=146338.66666666666, ans=0.125 +2024-07-28 10:58:10,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=146338.66666666666, ans=0.025 +2024-07-28 10:58:20,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=146352.0, ans=0.2 +2024-07-28 10:58:21,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146352.0, ans=0.1 +2024-07-28 10:58:24,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=146352.0, ans=0.125 +2024-07-28 10:58:36,014 INFO [train.py:1114] (2/4) Epoch 11, batch 7550, loss[loss=0.2342, simple_loss=0.3235, pruned_loss=0.07242, over 4591.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2822, pruned_loss=0.05491, over 936108.52 frames. ], batch size: 17, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 10:58:37,336 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.706e+01 6.227e+01 6.985e+01 1.230e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 10:58:39,921 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.81 vs. limit=12.0 +2024-07-28 10:58:42,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=146365.33333333334, ans=0.0 +2024-07-28 10:58:46,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=146378.66666666666, ans=0.125 +2024-07-28 10:58:52,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=146392.0, ans=0.125 +2024-07-28 10:58:57,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=146405.33333333334, ans=0.0 +2024-07-28 10:58:59,079 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=26.37 vs. limit=22.5 +2024-07-28 10:59:02,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=146405.33333333334, ans=0.125 +2024-07-28 10:59:10,208 INFO [train.py:1114] (2/4) Epoch 11, batch 7600, loss[loss=0.2005, simple_loss=0.2944, pruned_loss=0.05331, over 4810.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2818, pruned_loss=0.05432, over 937812.79 frames. ], batch size: 14, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 10:59:25,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=146445.33333333334, ans=0.125 +2024-07-28 10:59:45,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=146472.0, ans=0.125 +2024-07-28 10:59:56,750 INFO [train.py:1114] (2/4) Epoch 11, batch 7650, loss[loss=0.1863, simple_loss=0.2725, pruned_loss=0.04998, over 4949.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2815, pruned_loss=0.05425, over 936952.38 frames. ], batch size: 12, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 10:59:57,328 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.695e+01 5.694e+01 6.162e+01 7.312e+01 1.050e+02, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 11:00:02,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=146498.66666666666, ans=0.025 +2024-07-28 11:00:02,317 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:00:03,972 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.01 vs. limit=15.0 +2024-07-28 11:00:12,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=146512.0, ans=0.125 +2024-07-28 11:00:14,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=146512.0, ans=0.0 +2024-07-28 11:00:20,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=146525.33333333334, ans=0.0 +2024-07-28 11:00:27,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=146538.66666666666, ans=0.125 +2024-07-28 11:00:52,408 INFO [train.py:1114] (2/4) Epoch 11, batch 7700, loss[loss=0.1796, simple_loss=0.2589, pruned_loss=0.05017, over 4696.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2814, pruned_loss=0.05429, over 933689.86 frames. ], batch size: 13, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 11:00:56,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=146565.33333333334, ans=0.0 +2024-07-28 11:00:58,606 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-07-28 11:00:59,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=146578.66666666666, ans=0.125 +2024-07-28 11:01:02,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=146578.66666666666, ans=0.0 +2024-07-28 11:01:07,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=146592.0, ans=0.125 +2024-07-28 11:01:10,807 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.67 vs. limit=10.0 +2024-07-28 11:01:11,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=146605.33333333334, ans=0.125 +2024-07-28 11:01:13,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=146605.33333333334, ans=0.05 +2024-07-28 11:01:13,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=146605.33333333334, ans=0.125 +2024-07-28 11:01:21,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=146618.66666666666, ans=0.125 +2024-07-28 11:01:24,494 INFO [train.py:1114] (2/4) Epoch 11, batch 7750, loss[loss=0.1894, simple_loss=0.2808, pruned_loss=0.04903, over 4937.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2835, pruned_loss=0.05519, over 935554.57 frames. ], batch size: 14, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 11:01:25,061 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.807e+01 5.502e+01 5.839e+01 6.536e+01 9.660e+01, threshold=1.168e+02, percent-clipped=0.0 +2024-07-28 11:01:41,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=146658.66666666666, ans=0.0 +2024-07-28 11:01:43,070 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:01:57,064 INFO [train.py:1114] (2/4) Epoch 11, batch 7800, loss[loss=0.2027, simple_loss=0.2962, pruned_loss=0.05457, over 4657.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2843, pruned_loss=0.05537, over 937287.91 frames. ], batch size: 14, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:02:01,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=146698.66666666666, ans=0.2 +2024-07-28 11:02:01,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=146698.66666666666, ans=0.125 +2024-07-28 11:02:05,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=146712.0, ans=0.125 +2024-07-28 11:02:15,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=146725.33333333334, ans=0.0 +2024-07-28 11:02:15,753 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.08 vs. limit=6.0 +2024-07-28 11:02:29,897 INFO [train.py:1114] (2/4) Epoch 11, batch 7850, loss[loss=0.1788, simple_loss=0.2499, pruned_loss=0.05384, over 4572.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2841, pruned_loss=0.05514, over 936424.31 frames. ], batch size: 10, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:02:30,521 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.015e+01 5.817e+01 6.561e+01 7.399e+01 1.277e+02, threshold=1.312e+02, percent-clipped=1.0 +2024-07-28 11:02:31,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=146765.33333333334, ans=0.025 +2024-07-28 11:02:32,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=146765.33333333334, ans=0.025 +2024-07-28 11:02:36,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=146778.66666666666, ans=0.125 +2024-07-28 11:02:47,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=146792.0, ans=0.2 +2024-07-28 11:02:49,908 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.38 vs. limit=22.5 +2024-07-28 11:02:59,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=146818.66666666666, ans=0.125 +2024-07-28 11:03:02,660 INFO [train.py:1114] (2/4) Epoch 11, batch 7900, loss[loss=0.216, simple_loss=0.3092, pruned_loss=0.06138, over 4870.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2855, pruned_loss=0.05567, over 933457.53 frames. ], batch size: 14, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:03:05,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=146832.0, ans=0.1 +2024-07-28 11:03:08,071 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.76 vs. limit=10.0 +2024-07-28 11:03:10,723 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.05 vs. limit=10.0 +2024-07-28 11:03:11,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=146845.33333333334, ans=0.125 +2024-07-28 11:03:22,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=146858.66666666666, ans=0.1 +2024-07-28 11:03:33,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=146872.0, ans=0.0 +2024-07-28 11:03:35,957 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-28 11:03:36,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff2.min_abs, batch_count=146885.33333333334, ans=0.1 +2024-07-28 11:03:41,452 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.79 vs. limit=6.0 +2024-07-28 11:03:43,603 INFO [train.py:1114] (2/4) Epoch 11, batch 7950, loss[loss=0.2179, simple_loss=0.2978, pruned_loss=0.06903, over 3500.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2846, pruned_loss=0.0551, over 935954.74 frames. ], batch size: 36, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:03:44,167 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.442e+01 5.704e+01 6.229e+01 6.685e+01 9.610e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 11:03:46,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=146898.66666666666, ans=0.0 +2024-07-28 11:03:55,395 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:04:03,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=146925.33333333334, ans=0.2 +2024-07-28 11:04:19,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=146952.0, ans=0.0 +2024-07-28 11:04:23,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=146952.0, ans=0.0 +2024-07-28 11:04:24,577 INFO [train.py:1114] (2/4) Epoch 11, batch 8000, loss[loss=0.1783, simple_loss=0.2497, pruned_loss=0.05338, over 4619.00 frames. ], tot_loss[loss=0.195, simple_loss=0.282, pruned_loss=0.05397, over 935291.09 frames. ], batch size: 11, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:04:44,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=147005.33333333334, ans=0.125 +2024-07-28 11:04:55,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=147018.66666666666, ans=0.125 +2024-07-28 11:04:55,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=147018.66666666666, ans=0.0 +2024-07-28 11:04:55,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=147018.66666666666, ans=0.125 +2024-07-28 11:04:57,162 INFO [train.py:1114] (2/4) Epoch 11, batch 8050, loss[loss=0.1827, simple_loss=0.2741, pruned_loss=0.04569, over 4821.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.282, pruned_loss=0.0539, over 934795.71 frames. ], batch size: 14, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:04:57,754 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.054e+01 5.507e+01 6.263e+01 7.215e+01 1.111e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 11:04:59,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=147032.0, ans=0.09899494936611666 +2024-07-28 11:05:05,277 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.89 vs. limit=15.0 +2024-07-28 11:05:09,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=147058.66666666666, ans=0.125 +2024-07-28 11:05:10,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=147058.66666666666, ans=0.125 +2024-07-28 11:05:28,596 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:05:28,975 INFO [train.py:1114] (2/4) Epoch 11, batch 8100, loss[loss=0.1697, simple_loss=0.2701, pruned_loss=0.03464, over 4817.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2821, pruned_loss=0.05342, over 934642.42 frames. ], batch size: 15, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:05:32,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147098.66666666666, ans=0.1 +2024-07-28 11:05:39,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=147112.0, ans=0.125 +2024-07-28 11:05:55,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=147125.33333333334, ans=0.0 +2024-07-28 11:06:05,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=147125.33333333334, ans=0.0 +2024-07-28 11:06:12,002 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.72 vs. limit=15.0 +2024-07-28 11:06:19,669 INFO [train.py:1114] (2/4) Epoch 11, batch 8150, loss[loss=0.2265, simple_loss=0.3077, pruned_loss=0.07266, over 4809.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2812, pruned_loss=0.05335, over 937903.74 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:06:20,270 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.704e+01 5.553e+01 6.182e+01 6.972e+01 1.059e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 11:06:33,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=147192.0, ans=0.125 +2024-07-28 11:06:50,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147218.66666666666, ans=0.1 +2024-07-28 11:06:50,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=147218.66666666666, ans=0.125 +2024-07-28 11:06:51,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=147218.66666666666, ans=0.2 +2024-07-28 11:06:51,980 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.94 vs. limit=10.0 +2024-07-28 11:06:54,243 INFO [train.py:1114] (2/4) Epoch 11, batch 8200, loss[loss=0.1849, simple_loss=0.2866, pruned_loss=0.04162, over 4799.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2818, pruned_loss=0.05316, over 938793.06 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:06:55,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=147232.0, ans=0.0 +2024-07-28 11:06:56,201 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:07:04,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=147245.33333333334, ans=0.0 +2024-07-28 11:07:08,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147258.66666666666, ans=0.1 +2024-07-28 11:07:21,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=147272.0, ans=0.125 +2024-07-28 11:07:34,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=147285.33333333334, ans=0.125 +2024-07-28 11:07:38,664 INFO [train.py:1114] (2/4) Epoch 11, batch 8250, loss[loss=0.2001, simple_loss=0.2791, pruned_loss=0.06059, over 4896.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2825, pruned_loss=0.05359, over 939332.06 frames. ], batch size: 13, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:07:39,314 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.558e+01 5.575e+01 5.968e+01 7.239e+01 1.462e+02, threshold=1.194e+02, percent-clipped=1.0 +2024-07-28 11:07:52,344 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.38 vs. limit=15.0 +2024-07-28 11:07:56,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=147325.33333333334, ans=0.125 +2024-07-28 11:07:58,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=147325.33333333334, ans=0.125 +2024-07-28 11:07:59,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147325.33333333334, ans=0.1 +2024-07-28 11:08:00,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=147325.33333333334, ans=0.2 +2024-07-28 11:08:02,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=147338.66666666666, ans=0.0 +2024-07-28 11:08:06,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=147338.66666666666, ans=0.0 +2024-07-28 11:08:07,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=147338.66666666666, ans=0.125 +2024-07-28 11:08:09,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=147338.66666666666, ans=0.1 +2024-07-28 11:08:15,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=147352.0, ans=0.125 +2024-07-28 11:08:17,733 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.20 vs. limit=12.0 +2024-07-28 11:08:17,937 INFO [train.py:1114] (2/4) Epoch 11, batch 8300, loss[loss=0.2316, simple_loss=0.3195, pruned_loss=0.0718, over 4896.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2833, pruned_loss=0.05381, over 939146.66 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:08:18,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=147365.33333333334, ans=0.0 +2024-07-28 11:08:23,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=147365.33333333334, ans=0.2 +2024-07-28 11:08:23,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=147378.66666666666, ans=0.0 +2024-07-28 11:08:35,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=147392.0, ans=0.0 +2024-07-28 11:08:51,033 INFO [train.py:1114] (2/4) Epoch 11, batch 8350, loss[loss=0.2071, simple_loss=0.301, pruned_loss=0.05654, over 4812.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2832, pruned_loss=0.05414, over 941700.39 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:08:51,649 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.686e+01 6.163e+01 6.949e+01 9.683e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 11:08:53,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=147432.0, ans=0.125 +2024-07-28 11:08:55,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=147432.0, ans=0.125 +2024-07-28 11:08:57,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=147445.33333333334, ans=0.0 +2024-07-28 11:09:02,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=147445.33333333334, ans=0.125 +2024-07-28 11:09:05,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=147458.66666666666, ans=0.125 +2024-07-28 11:09:08,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=147458.66666666666, ans=0.2 +2024-07-28 11:09:21,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=147485.33333333334, ans=0.125 +2024-07-28 11:09:25,430 INFO [train.py:1114] (2/4) Epoch 11, batch 8400, loss[loss=0.1958, simple_loss=0.2793, pruned_loss=0.05617, over 4777.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2826, pruned_loss=0.05384, over 940550.83 frames. ], batch size: 12, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:09:27,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=147498.66666666666, ans=0.125 +2024-07-28 11:09:38,659 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.43 vs. limit=6.0 +2024-07-28 11:09:40,568 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.34 vs. limit=15.0 +2024-07-28 11:09:41,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=147525.33333333334, ans=0.2 +2024-07-28 11:09:45,875 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.67 vs. limit=15.0 +2024-07-28 11:09:46,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=147538.66666666666, ans=0.2 +2024-07-28 11:09:49,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=147538.66666666666, ans=0.125 +2024-07-28 11:09:56,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=147552.0, ans=0.0 +2024-07-28 11:09:57,654 INFO [train.py:1114] (2/4) Epoch 11, batch 8450, loss[loss=0.2113, simple_loss=0.3106, pruned_loss=0.05594, over 4812.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2841, pruned_loss=0.05445, over 938799.26 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:09:58,229 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+01 5.711e+01 6.250e+01 7.138e+01 1.059e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 11:09:58,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=147565.33333333334, ans=0.025 +2024-07-28 11:09:59,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147565.33333333334, ans=0.1 +2024-07-28 11:10:01,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=147565.33333333334, ans=0.0 +2024-07-28 11:10:04,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=147578.66666666666, ans=0.125 +2024-07-28 11:10:06,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=147578.66666666666, ans=0.0 +2024-07-28 11:10:09,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=147578.66666666666, ans=0.0 +2024-07-28 11:10:15,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=147592.0, ans=0.0 +2024-07-28 11:10:16,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=147605.33333333334, ans=10.0 +2024-07-28 11:10:20,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=147605.33333333334, ans=0.05 +2024-07-28 11:10:22,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=147618.66666666666, ans=0.0 +2024-07-28 11:10:29,742 INFO [train.py:1114] (2/4) Epoch 11, batch 8500, loss[loss=0.1457, simple_loss=0.2352, pruned_loss=0.02812, over 4615.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2823, pruned_loss=0.05378, over 938982.34 frames. ], batch size: 11, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:10:35,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=147632.0, ans=0.125 +2024-07-28 11:10:41,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=147645.33333333334, ans=0.125 +2024-07-28 11:10:46,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=147658.66666666666, ans=0.0 +2024-07-28 11:10:47,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=147658.66666666666, ans=0.125 +2024-07-28 11:10:58,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147685.33333333334, ans=0.1 +2024-07-28 11:11:00,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=147685.33333333334, ans=0.125 +2024-07-28 11:11:01,942 INFO [train.py:1114] (2/4) Epoch 11, batch 8550, loss[loss=0.1978, simple_loss=0.2671, pruned_loss=0.06422, over 4801.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2831, pruned_loss=0.05426, over 939861.34 frames. ], batch size: 11, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:11:02,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147698.66666666666, ans=0.1 +2024-07-28 11:11:03,200 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+01 5.768e+01 6.482e+01 7.355e+01 1.079e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 11:11:07,188 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.67 vs. limit=15.0 +2024-07-28 11:11:12,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=147712.0, ans=0.125 +2024-07-28 11:11:22,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=147738.66666666666, ans=0.125 +2024-07-28 11:11:31,456 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.86 vs. limit=15.0 +2024-07-28 11:11:32,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=147752.0, ans=0.0 +2024-07-28 11:11:34,245 INFO [train.py:1114] (2/4) Epoch 11, batch 8600, loss[loss=0.2111, simple_loss=0.3126, pruned_loss=0.05478, over 4797.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2818, pruned_loss=0.05352, over 939598.16 frames. ], batch size: 15, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:11:37,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=147765.33333333334, ans=0.125 +2024-07-28 11:11:45,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=147778.66666666666, ans=0.5 +2024-07-28 11:11:49,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=147792.0, ans=0.125 +2024-07-28 11:11:57,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=147805.33333333334, ans=0.125 +2024-07-28 11:12:00,133 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.29 vs. limit=15.0 +2024-07-28 11:12:01,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=147818.66666666666, ans=0.125 +2024-07-28 11:12:03,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=147818.66666666666, ans=0.125 +2024-07-28 11:12:05,740 INFO [train.py:1114] (2/4) Epoch 11, batch 8650, loss[loss=0.2008, simple_loss=0.2956, pruned_loss=0.05304, over 4905.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.282, pruned_loss=0.05363, over 940593.79 frames. ], batch size: 15, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:12:06,998 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.868e+01 5.716e+01 6.623e+01 8.030e+01 1.303e+02, threshold=1.325e+02, percent-clipped=1.0 +2024-07-28 11:12:17,401 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.38 vs. limit=15.0 +2024-07-28 11:12:18,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=147845.33333333334, ans=0.2 +2024-07-28 11:12:18,886 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.66 vs. limit=5.0 +2024-07-28 11:12:22,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=147858.66666666666, ans=0.0 +2024-07-28 11:12:37,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=147885.33333333334, ans=0.025 +2024-07-28 11:12:39,099 INFO [train.py:1114] (2/4) Epoch 11, batch 8700, loss[loss=0.1573, simple_loss=0.2448, pruned_loss=0.03494, over 4761.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2826, pruned_loss=0.05405, over 937616.89 frames. ], batch size: 13, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:12:39,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=147898.66666666666, ans=0.0 +2024-07-28 11:12:54,093 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.41 vs. limit=15.0 +2024-07-28 11:13:11,404 INFO [train.py:1114] (2/4) Epoch 11, batch 8750, loss[loss=0.1933, simple_loss=0.2823, pruned_loss=0.05213, over 4692.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.282, pruned_loss=0.05354, over 936028.57 frames. ], batch size: 15, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:13:12,493 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.08 vs. limit=6.0 +2024-07-28 11:13:12,641 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.615e+01 5.668e+01 6.375e+01 7.547e+01 1.367e+02, threshold=1.275e+02, percent-clipped=1.0 +2024-07-28 11:13:12,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=147965.33333333334, ans=0.125 +2024-07-28 11:13:20,686 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:13:25,593 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:13:25,921 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.28 vs. limit=6.0 +2024-07-28 11:13:51,483 INFO [train.py:1114] (2/4) Epoch 11, batch 8800, loss[loss=0.1977, simple_loss=0.2896, pruned_loss=0.05293, over 4938.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2829, pruned_loss=0.05399, over 936849.22 frames. ], batch size: 14, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:13:51,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=148032.0, ans=0.125 +2024-07-28 11:13:57,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148045.33333333334, ans=0.1 +2024-07-28 11:14:11,106 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:14:13,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=148058.66666666666, ans=0.07 +2024-07-28 11:14:17,062 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.12 vs. limit=15.0 +2024-07-28 11:14:19,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=148072.0, ans=0.0 +2024-07-28 11:14:28,378 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.13 vs. limit=15.0 +2024-07-28 11:14:32,884 INFO [train.py:1114] (2/4) Epoch 11, batch 8850, loss[loss=0.2328, simple_loss=0.3248, pruned_loss=0.07038, over 4515.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2836, pruned_loss=0.05449, over 931128.05 frames. ], batch size: 21, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:14:34,134 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.414e+01 5.683e+01 6.364e+01 7.220e+01 1.136e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 11:14:35,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=148098.66666666666, ans=0.125 +2024-07-28 11:14:37,448 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:14:38,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=148112.0, ans=0.025 +2024-07-28 11:14:40,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=148112.0, ans=0.025 +2024-07-28 11:14:47,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=148125.33333333334, ans=0.0 +2024-07-28 11:15:00,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=148152.0, ans=0.025 +2024-07-28 11:15:05,041 INFO [train.py:1114] (2/4) Epoch 11, batch 8900, loss[loss=0.1963, simple_loss=0.269, pruned_loss=0.06179, over 4935.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2829, pruned_loss=0.05397, over 929346.44 frames. ], batch size: 12, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:15:05,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=148165.33333333334, ans=0.0 +2024-07-28 11:15:05,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=148165.33333333334, ans=0.1 +2024-07-28 11:15:13,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=148178.66666666666, ans=0.2 +2024-07-28 11:15:13,750 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.47 vs. limit=15.0 +2024-07-28 11:15:13,895 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.82 vs. limit=22.5 +2024-07-28 11:15:22,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=148192.0, ans=0.0 +2024-07-28 11:15:33,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=148218.66666666666, ans=0.125 +2024-07-28 11:15:37,387 INFO [train.py:1114] (2/4) Epoch 11, batch 8950, loss[loss=0.1836, simple_loss=0.2708, pruned_loss=0.04822, over 4559.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2835, pruned_loss=0.05442, over 930278.26 frames. ], batch size: 21, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:15:38,594 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.938e+01 5.642e+01 6.194e+01 7.205e+01 1.181e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 11:15:50,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=148258.66666666666, ans=0.2 +2024-07-28 11:15:54,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=148258.66666666666, ans=0.025 +2024-07-28 11:16:02,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=148285.33333333334, ans=0.2 +2024-07-28 11:16:09,528 INFO [train.py:1114] (2/4) Epoch 11, batch 9000, loss[loss=0.1664, simple_loss=0.2548, pruned_loss=0.03901, over 4652.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2821, pruned_loss=0.05364, over 933412.32 frames. ], batch size: 12, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:16:09,528 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 11:16:21,142 INFO [train.py:1146] (2/4) Epoch 11, validation: loss=0.1703, simple_loss=0.274, pruned_loss=0.03325, over 944034.00 frames. +2024-07-28 11:16:21,143 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 11:16:45,797 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.67 vs. limit=5.0 +2024-07-28 11:16:47,917 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:16:53,516 INFO [train.py:1114] (2/4) Epoch 11, batch 9050, loss[loss=0.1852, simple_loss=0.2657, pruned_loss=0.05239, over 4536.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2814, pruned_loss=0.05315, over 934209.28 frames. ], batch size: 10, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:16:53,890 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.71 vs. limit=6.0 +2024-07-28 11:16:54,783 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.736e+01 5.677e+01 6.450e+01 7.430e+01 1.132e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-28 11:17:02,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148378.66666666666, ans=0.1 +2024-07-28 11:17:06,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=148392.0, ans=0.125 +2024-07-28 11:17:06,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148392.0, ans=0.1 +2024-07-28 11:17:09,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=148392.0, ans=0.0 +2024-07-28 11:17:10,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=148392.0, ans=0.0 +2024-07-28 11:17:12,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=148405.33333333334, ans=0.125 +2024-07-28 11:17:25,697 INFO [train.py:1114] (2/4) Epoch 11, batch 9100, loss[loss=0.2265, simple_loss=0.3215, pruned_loss=0.06577, over 4929.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.281, pruned_loss=0.05299, over 936819.67 frames. ], batch size: 14, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:17:30,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=148432.0, ans=0.0 +2024-07-28 11:17:31,731 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.50 vs. limit=22.5 +2024-07-28 11:17:34,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=148445.33333333334, ans=0.125 +2024-07-28 11:17:34,665 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.95 vs. limit=22.5 +2024-07-28 11:17:38,581 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.70 vs. limit=15.0 +2024-07-28 11:17:39,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=148458.66666666666, ans=0.125 +2024-07-28 11:17:44,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=148472.0, ans=0.125 +2024-07-28 11:17:47,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=148472.0, ans=0.2 +2024-07-28 11:17:57,380 INFO [train.py:1114] (2/4) Epoch 11, batch 9150, loss[loss=0.1869, simple_loss=0.2719, pruned_loss=0.05094, over 4805.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2819, pruned_loss=0.05364, over 935563.78 frames. ], batch size: 14, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:17:58,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=148498.66666666666, ans=0.1 +2024-07-28 11:17:58,680 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.328e+01 5.452e+01 6.035e+01 6.657e+01 8.728e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-28 11:18:10,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=148512.0, ans=0.125 +2024-07-28 11:18:16,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=148525.33333333334, ans=0.125 +2024-07-28 11:18:17,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=148525.33333333334, ans=0.0 +2024-07-28 11:18:19,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=148538.66666666666, ans=0.0 +2024-07-28 11:18:31,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_na.min_abs, batch_count=148565.33333333334, ans=0.02 +2024-07-28 11:18:32,055 INFO [train.py:1114] (2/4) Epoch 11, batch 9200, loss[loss=0.1649, simple_loss=0.252, pruned_loss=0.03884, over 4850.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2805, pruned_loss=0.0534, over 937361.59 frames. ], batch size: 12, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:18:51,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148592.0, ans=0.1 +2024-07-28 11:18:52,025 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.99 vs. limit=15.0 +2024-07-28 11:18:57,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148605.33333333334, ans=0.1 +2024-07-28 11:19:10,161 INFO [train.py:1114] (2/4) Epoch 11, batch 9250, loss[loss=0.171, simple_loss=0.2619, pruned_loss=0.04004, over 4633.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2798, pruned_loss=0.05305, over 938064.81 frames. ], batch size: 13, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:19:11,548 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.586e+01 5.571e+01 5.944e+01 7.071e+01 9.935e+01, threshold=1.189e+02, percent-clipped=0.0 +2024-07-28 11:19:18,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=148645.33333333334, ans=0.04949747468305833 +2024-07-28 11:19:18,864 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.60 vs. limit=12.0 +2024-07-28 11:19:20,855 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.78 vs. limit=15.0 +2024-07-28 11:19:25,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148658.66666666666, ans=0.1 +2024-07-28 11:19:33,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148672.0, ans=0.1 +2024-07-28 11:19:39,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=148685.33333333334, ans=0.125 +2024-07-28 11:19:41,572 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.35 vs. limit=15.0 +2024-07-28 11:19:42,416 INFO [train.py:1114] (2/4) Epoch 11, batch 9300, loss[loss=0.2061, simple_loss=0.293, pruned_loss=0.05962, over 4772.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2792, pruned_loss=0.05315, over 938224.15 frames. ], batch size: 12, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:19:43,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=148698.66666666666, ans=0.2 +2024-07-28 11:19:46,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=148698.66666666666, ans=0.2 +2024-07-28 11:19:47,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=148698.66666666666, ans=0.025 +2024-07-28 11:19:49,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=148712.0, ans=0.0 +2024-07-28 11:19:52,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=148712.0, ans=0.125 +2024-07-28 11:19:56,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=148725.33333333334, ans=0.0 +2024-07-28 11:20:03,223 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.56 vs. limit=15.0 +2024-07-28 11:20:10,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148752.0, ans=0.1 +2024-07-28 11:20:14,403 INFO [train.py:1114] (2/4) Epoch 11, batch 9350, loss[loss=0.1857, simple_loss=0.2641, pruned_loss=0.05363, over 4787.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2801, pruned_loss=0.05319, over 934876.77 frames. ], batch size: 11, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:20:15,614 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 5.532e+01 6.030e+01 6.752e+01 9.117e+01, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 11:20:28,566 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.59 vs. limit=15.0 +2024-07-28 11:20:31,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=148792.0, ans=0.0 +2024-07-28 11:20:38,181 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.21 vs. limit=6.0 +2024-07-28 11:20:41,107 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:20:45,874 INFO [train.py:1114] (2/4) Epoch 11, batch 9400, loss[loss=0.1819, simple_loss=0.2711, pruned_loss=0.04634, over 4694.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2803, pruned_loss=0.0534, over 933108.22 frames. ], batch size: 13, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:20:51,644 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.70 vs. limit=22.5 +2024-07-28 11:20:55,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148845.33333333334, ans=0.1 +2024-07-28 11:20:56,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=148845.33333333334, ans=0.025 +2024-07-28 11:20:57,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=148845.33333333334, ans=0.09899494936611666 +2024-07-28 11:21:08,358 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.59 vs. limit=15.0 +2024-07-28 11:21:13,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=148885.33333333334, ans=0.125 +2024-07-28 11:21:18,917 INFO [train.py:1114] (2/4) Epoch 11, batch 9450, loss[loss=0.2026, simple_loss=0.2768, pruned_loss=0.06417, over 4806.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2805, pruned_loss=0.0534, over 932457.73 frames. ], batch size: 11, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:21:20,119 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.380e+01 5.492e+01 5.833e+01 6.605e+01 9.079e+01, threshold=1.167e+02, percent-clipped=0.0 +2024-07-28 11:21:22,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=148898.66666666666, ans=0.125 +2024-07-28 11:21:37,119 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.38 vs. limit=15.0 +2024-07-28 11:21:46,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=148952.0, ans=0.125 +2024-07-28 11:21:50,282 INFO [train.py:1114] (2/4) Epoch 11, batch 9500, loss[loss=0.1795, simple_loss=0.2638, pruned_loss=0.04764, over 4709.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.281, pruned_loss=0.05361, over 934569.46 frames. ], batch size: 12, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:21:52,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=148965.33333333334, ans=0.0 +2024-07-28 11:22:11,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=149005.33333333334, ans=0.09899494936611666 +2024-07-28 11:22:16,062 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.67 vs. limit=22.5 +2024-07-28 11:22:22,557 INFO [train.py:1114] (2/4) Epoch 11, batch 9550, loss[loss=0.2167, simple_loss=0.2944, pruned_loss=0.06947, over 4783.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2813, pruned_loss=0.05346, over 931934.31 frames. ], batch size: 12, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:22:23,739 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.769e+01 5.584e+01 6.073e+01 6.801e+01 9.660e+01, threshold=1.215e+02, percent-clipped=0.0 +2024-07-28 11:22:32,607 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.41 vs. limit=15.0 +2024-07-28 11:22:33,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=149045.33333333334, ans=0.0 +2024-07-28 11:22:33,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=149045.33333333334, ans=0.125 +2024-07-28 11:22:34,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=149058.66666666666, ans=0.0 +2024-07-28 11:22:36,816 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.56 vs. limit=22.5 +2024-07-28 11:22:41,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=149072.0, ans=0.125 +2024-07-28 11:22:53,735 INFO [train.py:1114] (2/4) Epoch 11, batch 9600, loss[loss=0.2607, simple_loss=0.3262, pruned_loss=0.09757, over 3394.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2815, pruned_loss=0.05329, over 930619.18 frames. ], batch size: 35, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:23:00,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=149112.0, ans=0.2 +2024-07-28 11:23:06,282 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.50 vs. limit=15.0 +2024-07-28 11:23:13,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=149138.66666666666, ans=0.125 +2024-07-28 11:23:15,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=149138.66666666666, ans=0.02 +2024-07-28 11:23:23,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=149152.0, ans=0.125 +2024-07-28 11:23:25,448 INFO [train.py:1114] (2/4) Epoch 11, batch 9650, loss[loss=0.2113, simple_loss=0.3025, pruned_loss=0.06007, over 4863.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2823, pruned_loss=0.05391, over 926552.85 frames. ], batch size: 16, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:23:26,665 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.554e+01 5.812e+01 6.472e+01 7.420e+01 1.092e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 11:23:30,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=149165.33333333334, ans=0.125 +2024-07-28 11:23:30,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=149165.33333333334, ans=0.0 +2024-07-28 11:23:33,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=149178.66666666666, ans=0.1 +2024-07-28 11:24:03,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=149218.66666666666, ans=0.125 +2024-07-28 11:24:08,282 INFO [train.py:1114] (2/4) Epoch 11, batch 9700, loss[loss=0.2009, simple_loss=0.2979, pruned_loss=0.05193, over 4156.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2813, pruned_loss=0.05339, over 923777.09 frames. ], batch size: 25, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:24:08,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=149232.0, ans=0.125 +2024-07-28 11:24:11,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=149232.0, ans=0.125 +2024-07-28 11:24:14,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149245.33333333334, ans=0.1 +2024-07-28 11:24:15,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=149245.33333333334, ans=0.05 +2024-07-28 11:24:15,630 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.42 vs. limit=15.0 +2024-07-28 11:24:17,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=149245.33333333334, ans=0.0 +2024-07-28 11:24:28,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=149272.0, ans=0.05 +2024-07-28 11:24:28,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=149272.0, ans=0.125 +2024-07-28 11:24:32,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=149272.0, ans=0.0 +2024-07-28 11:24:34,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=149285.33333333334, ans=0.125 +2024-07-28 11:24:39,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=149298.66666666666, ans=0.5 +2024-07-28 11:24:39,667 INFO [train.py:1114] (2/4) Epoch 11, batch 9750, loss[loss=0.1545, simple_loss=0.2406, pruned_loss=0.03424, over 4693.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2813, pruned_loss=0.05332, over 924585.42 frames. ], batch size: 15, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:24:40,899 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.638e+01 5.600e+01 6.430e+01 7.398e+01 1.191e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 11:24:42,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=149298.66666666666, ans=10.0 +2024-07-28 11:24:44,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149298.66666666666, ans=0.1 +2024-07-28 11:24:49,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=149312.0, ans=0.2 +2024-07-28 11:24:53,057 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.46 vs. limit=6.0 +2024-07-28 11:24:59,118 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.60 vs. limit=22.5 +2024-07-28 11:25:03,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=149338.66666666666, ans=0.125 +2024-07-28 11:25:06,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=149352.0, ans=0.0 +2024-07-28 11:25:13,042 INFO [train.py:1114] (2/4) Epoch 11, batch 9800, loss[loss=0.1574, simple_loss=0.248, pruned_loss=0.0334, over 4705.00 frames. ], tot_loss[loss=0.192, simple_loss=0.279, pruned_loss=0.05253, over 924359.40 frames. ], batch size: 12, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:25:14,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149365.33333333334, ans=0.1 +2024-07-28 11:25:16,479 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.17 vs. limit=15.0 +2024-07-28 11:25:19,417 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.34 vs. limit=6.0 +2024-07-28 11:25:29,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149392.0, ans=0.1 +2024-07-28 11:25:35,320 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.42 vs. limit=6.0 +2024-07-28 11:25:35,381 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.99 vs. limit=15.0 +2024-07-28 11:25:38,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=149418.66666666666, ans=0.2 +2024-07-28 11:25:43,719 INFO [train.py:1114] (2/4) Epoch 11, batch 9850, loss[loss=0.2035, simple_loss=0.2896, pruned_loss=0.0587, over 4899.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.28, pruned_loss=0.0534, over 926921.05 frames. ], batch size: 15, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:25:44,876 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.760e+01 6.754e+01 7.559e+01 1.117e+02, threshold=1.351e+02, percent-clipped=0.0 +2024-07-28 11:25:46,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=149432.0, ans=0.0 +2024-07-28 11:26:05,316 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.28 vs. limit=22.5 +2024-07-28 11:26:10,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=149485.33333333334, ans=0.125 +2024-07-28 11:26:10,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=149485.33333333334, ans=0.125 +2024-07-28 11:26:14,557 INFO [train.py:1114] (2/4) Epoch 11, batch 9900, loss[loss=0.2118, simple_loss=0.3088, pruned_loss=0.05734, over 4840.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2814, pruned_loss=0.05377, over 926021.58 frames. ], batch size: 16, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:26:14,983 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.30 vs. limit=22.5 +2024-07-28 11:26:17,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=149498.66666666666, ans=0.125 +2024-07-28 11:26:18,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=149498.66666666666, ans=0.0 +2024-07-28 11:26:19,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=149498.66666666666, ans=0.2 +2024-07-28 11:26:23,841 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.77 vs. limit=15.0 +2024-07-28 11:26:24,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=149512.0, ans=0.025 +2024-07-28 11:26:25,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=149512.0, ans=0.0 +2024-07-28 11:26:25,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=149512.0, ans=0.125 +2024-07-28 11:26:28,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=149525.33333333334, ans=0.2 +2024-07-28 11:26:35,253 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.42 vs. limit=22.5 +2024-07-28 11:26:43,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=149552.0, ans=0.125 +2024-07-28 11:26:44,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=149552.0, ans=0.0 +2024-07-28 11:26:45,601 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.34 vs. limit=15.0 +2024-07-28 11:26:45,851 INFO [train.py:1114] (2/4) Epoch 11, batch 9950, loss[loss=0.2151, simple_loss=0.3019, pruned_loss=0.06422, over 4799.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2823, pruned_loss=0.05437, over 929254.77 frames. ], batch size: 11, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:26:47,368 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.729e+01 5.848e+01 6.460e+01 7.731e+01 1.083e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 11:26:52,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=149578.66666666666, ans=0.125 +2024-07-28 11:26:53,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=149578.66666666666, ans=0.0 +2024-07-28 11:26:57,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149578.66666666666, ans=0.1 +2024-07-28 11:27:03,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=149592.0, ans=0.125 +2024-07-28 11:27:03,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=149592.0, ans=0.125 +2024-07-28 11:27:12,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=149618.66666666666, ans=0.0 +2024-07-28 11:27:14,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=149618.66666666666, ans=0.0 +2024-07-28 11:27:18,157 INFO [train.py:1114] (2/4) Epoch 11, batch 10000, loss[loss=0.1895, simple_loss=0.2811, pruned_loss=0.0489, over 4676.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2849, pruned_loss=0.05521, over 926887.80 frames. ], batch size: 16, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:27:19,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=149632.0, ans=0.125 +2024-07-28 11:27:20,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=149632.0, ans=0.0 +2024-07-28 11:27:22,135 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.51 vs. limit=15.0 +2024-07-28 11:27:33,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=149658.66666666666, ans=0.0 +2024-07-28 11:27:40,329 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.03 vs. limit=15.0 +2024-07-28 11:27:46,109 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.18 vs. limit=15.0 +2024-07-28 11:27:49,598 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.54 vs. limit=22.5 +2024-07-28 11:27:50,526 INFO [train.py:1114] (2/4) Epoch 11, batch 10050, loss[loss=0.2772, simple_loss=0.3389, pruned_loss=0.1078, over 3542.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2888, pruned_loss=0.05726, over 914670.92 frames. ], batch size: 36, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:27:51,874 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.828e+01 6.328e+01 6.971e+01 1.016e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 11:27:52,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=149698.66666666666, ans=0.5 +2024-07-28 11:27:52,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=149698.66666666666, ans=0.2 +2024-07-28 11:27:53,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=149698.66666666666, ans=0.125 +2024-07-28 11:27:56,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=149698.66666666666, ans=0.125 +2024-07-28 11:28:05,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=149725.33333333334, ans=0.125 +2024-07-28 11:28:21,248 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.54 vs. limit=15.0 +2024-07-28 11:28:24,951 INFO [train.py:1114] (2/4) Epoch 11, batch 10100, loss[loss=0.2488, simple_loss=0.3261, pruned_loss=0.08576, over 3324.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.294, pruned_loss=0.0627, over 859901.23 frames. ], batch size: 35, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:28:36,731 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=9.99 vs. limit=15.0 +2024-07-28 11:28:55,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=149818.66666666666, ans=0.125 +2024-07-28 11:28:57,736 INFO [train.py:1114] (2/4) Epoch 11, batch 10150, loss[loss=0.2415, simple_loss=0.3156, pruned_loss=0.08371, over 3545.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2977, pruned_loss=0.06677, over 820370.34 frames. ], batch size: 35, lr: 6.67e-03, grad_scale: 32.0 +2024-07-28 11:28:59,001 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.659e+01 6.801e+01 7.178e+01 7.670e+01 2.138e+02, threshold=1.436e+02, percent-clipped=2.0 +2024-07-28 11:29:08,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=149845.33333333334, ans=0.125 +2024-07-28 11:29:17,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=149872.0, ans=0.0 +2024-07-28 11:29:23,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=149885.33333333334, ans=0.125 +2024-07-28 11:29:29,434 INFO [train.py:1114] (2/4) Epoch 11, batch 10200, loss[loss=0.239, simple_loss=0.3179, pruned_loss=0.08001, over 3390.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.3009, pruned_loss=0.07046, over 787725.59 frames. ], batch size: 35, lr: 6.67e-03, grad_scale: 32.0 +2024-07-28 11:29:36,549 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-28 11:29:40,267 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.65 vs. limit=15.0 +2024-07-28 11:30:30,086 INFO [train.py:1114] (2/4) Epoch 12, batch 0, loss[loss=0.1654, simple_loss=0.2554, pruned_loss=0.03767, over 4853.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2554, pruned_loss=0.03767, over 4853.00 frames. ], batch size: 12, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:30:30,086 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 11:30:35,056 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.8422, 5.1798, 5.0843, 5.6304], device='cuda:2') +2024-07-28 11:30:38,522 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.8452, 4.7102, 4.3249, 4.4803], device='cuda:2') +2024-07-28 11:30:49,834 INFO [train.py:1146] (2/4) Epoch 12, validation: loss=0.171, simple_loss=0.2765, pruned_loss=0.03276, over 944034.00 frames. +2024-07-28 11:30:49,836 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 11:31:23,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=149954.66666666666, ans=0.0 +2024-07-28 11:31:27,527 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 6.296e+01 6.981e+01 7.560e+01 1.062e+02, threshold=1.396e+02, percent-clipped=0.0 +2024-07-28 11:31:31,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=149968.0, ans=0.0 +2024-07-28 11:31:37,423 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.98 vs. limit=15.0 +2024-07-28 11:31:40,985 INFO [train.py:1114] (2/4) Epoch 12, batch 50, loss[loss=0.1809, simple_loss=0.2668, pruned_loss=0.04753, over 4613.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2824, pruned_loss=0.05297, over 206147.21 frames. ], batch size: 11, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:31:42,003 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.23 vs. limit=22.5 +2024-07-28 11:31:46,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=149994.66666666666, ans=0.2 +2024-07-28 11:31:49,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=150008.0, ans=0.125 +2024-07-28 11:31:50,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=150008.0, ans=0.125 +2024-07-28 11:31:56,457 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.48 vs. limit=15.0 +2024-07-28 11:32:01,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=150021.33333333334, ans=0.0 +2024-07-28 11:32:06,123 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.04 vs. limit=12.0 +2024-07-28 11:32:14,225 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.77 vs. limit=22.5 +2024-07-28 11:32:16,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=150061.33333333334, ans=10.0 +2024-07-28 11:32:17,177 INFO [train.py:1114] (2/4) Epoch 12, batch 100, loss[loss=0.2345, simple_loss=0.311, pruned_loss=0.07902, over 4647.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2819, pruned_loss=0.05343, over 365016.47 frames. ], batch size: 12, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:32:20,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=150061.33333333334, ans=0.1 +2024-07-28 11:32:24,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150074.66666666666, ans=0.1 +2024-07-28 11:32:26,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=150074.66666666666, ans=0.0 +2024-07-28 11:32:31,690 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.11 vs. limit=6.0 +2024-07-28 11:32:32,489 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.65 vs. limit=15.0 +2024-07-28 11:32:35,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=150088.0, ans=0.125 +2024-07-28 11:32:39,034 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.212e+01 5.482e+01 5.996e+01 6.450e+01 1.001e+02, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 11:32:43,324 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.20 vs. limit=15.0 +2024-07-28 11:32:52,011 INFO [train.py:1114] (2/4) Epoch 12, batch 150, loss[loss=0.1652, simple_loss=0.2529, pruned_loss=0.03879, over 4601.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2814, pruned_loss=0.05221, over 493795.48 frames. ], batch size: 11, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:33:03,722 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.78 vs. limit=15.0 +2024-07-28 11:33:05,794 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-28 11:33:10,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=150154.66666666666, ans=0.125 +2024-07-28 11:33:11,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=150154.66666666666, ans=0.0 +2024-07-28 11:33:13,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=150168.0, ans=0.0 +2024-07-28 11:33:16,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150168.0, ans=0.1 +2024-07-28 11:33:29,133 INFO [train.py:1114] (2/4) Epoch 12, batch 200, loss[loss=0.1934, simple_loss=0.2774, pruned_loss=0.05469, over 4518.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2816, pruned_loss=0.05287, over 593613.69 frames. ], batch size: 21, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:33:29,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=150194.66666666666, ans=0.0 +2024-07-28 11:33:32,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=150194.66666666666, ans=15.0 +2024-07-28 11:33:35,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=150208.0, ans=0.125 +2024-07-28 11:33:48,810 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.686e+01 5.805e+01 6.723e+01 7.880e+01 1.326e+02, threshold=1.345e+02, percent-clipped=1.0 +2024-07-28 11:34:02,306 INFO [train.py:1114] (2/4) Epoch 12, batch 250, loss[loss=0.2051, simple_loss=0.2987, pruned_loss=0.05579, over 4623.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2816, pruned_loss=0.05287, over 670384.21 frames. ], batch size: 16, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:34:16,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=150288.0, ans=0.0 +2024-07-28 11:34:16,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=150288.0, ans=0.0 +2024-07-28 11:34:17,983 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:34:20,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=150288.0, ans=0.0 +2024-07-28 11:34:21,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=150288.0, ans=0.2 +2024-07-28 11:34:28,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150314.66666666666, ans=0.1 +2024-07-28 11:34:29,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=150314.66666666666, ans=0.2 +2024-07-28 11:34:35,839 INFO [train.py:1114] (2/4) Epoch 12, batch 300, loss[loss=0.2022, simple_loss=0.2999, pruned_loss=0.05228, over 4791.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.281, pruned_loss=0.0526, over 729859.75 frames. ], batch size: 15, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:34:36,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=150328.0, ans=0.125 +2024-07-28 11:34:53,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=150354.66666666666, ans=0.125 +2024-07-28 11:34:56,215 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.803e+01 5.570e+01 6.129e+01 6.973e+01 1.064e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 11:34:59,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=150368.0, ans=0.0 +2024-07-28 11:35:01,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150368.0, ans=0.1 +2024-07-28 11:35:09,569 INFO [train.py:1114] (2/4) Epoch 12, batch 350, loss[loss=0.213, simple_loss=0.2992, pruned_loss=0.06341, over 4939.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2802, pruned_loss=0.05186, over 776121.88 frames. ], batch size: 12, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:35:12,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=150394.66666666666, ans=0.025 +2024-07-28 11:35:14,183 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.25 vs. limit=10.0 +2024-07-28 11:35:15,444 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.82 vs. limit=10.0 +2024-07-28 11:35:24,190 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.78 vs. limit=15.0 +2024-07-28 11:35:24,824 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.12 vs. limit=15.0 +2024-07-28 11:35:28,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=150421.33333333334, ans=0.125 +2024-07-28 11:35:30,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=150434.66666666666, ans=0.125 +2024-07-28 11:35:34,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=150434.66666666666, ans=0.125 +2024-07-28 11:35:34,729 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.26 vs. limit=22.5 +2024-07-28 11:35:35,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=150434.66666666666, ans=0.0 +2024-07-28 11:35:35,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=150448.0, ans=0.025 +2024-07-28 11:35:42,768 INFO [train.py:1114] (2/4) Epoch 12, batch 400, loss[loss=0.1757, simple_loss=0.2743, pruned_loss=0.03855, over 4684.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2806, pruned_loss=0.05242, over 813344.88 frames. ], batch size: 13, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:35:51,266 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.63 vs. limit=10.0 +2024-07-28 11:36:04,833 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.526e+01 5.661e+01 6.256e+01 7.189e+01 1.032e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 11:36:05,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=150501.33333333334, ans=0.05 +2024-07-28 11:36:15,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=150514.66666666666, ans=0.0 +2024-07-28 11:36:18,210 INFO [train.py:1114] (2/4) Epoch 12, batch 450, loss[loss=0.1825, simple_loss=0.2733, pruned_loss=0.04582, over 4638.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2805, pruned_loss=0.05249, over 838317.70 frames. ], batch size: 13, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:36:25,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=150541.33333333334, ans=0.0 +2024-07-28 11:36:43,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=150568.0, ans=0.025 +2024-07-28 11:36:46,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=150581.33333333334, ans=0.0 +2024-07-28 11:36:48,419 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.64 vs. limit=12.0 +2024-07-28 11:36:49,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=150581.33333333334, ans=0.025 +2024-07-28 11:36:50,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=150581.33333333334, ans=0.0 +2024-07-28 11:36:51,747 INFO [train.py:1114] (2/4) Epoch 12, batch 500, loss[loss=0.21, simple_loss=0.3095, pruned_loss=0.05528, over 4695.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.28, pruned_loss=0.05214, over 860675.07 frames. ], batch size: 15, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:37:01,483 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:37:14,901 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+01 5.527e+01 6.124e+01 7.195e+01 1.120e+02, threshold=1.225e+02, percent-clipped=0.0 +2024-07-28 11:37:17,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=150634.66666666666, ans=0.0 +2024-07-28 11:37:24,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=150648.0, ans=0.0 +2024-07-28 11:37:28,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=150648.0, ans=0.125 +2024-07-28 11:37:30,001 INFO [train.py:1114] (2/4) Epoch 12, batch 550, loss[loss=0.2026, simple_loss=0.2983, pruned_loss=0.05349, over 4593.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2799, pruned_loss=0.05183, over 877014.16 frames. ], batch size: 17, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:37:43,095 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.32 vs. limit=15.0 +2024-07-28 11:37:44,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=150688.0, ans=0.125 +2024-07-28 11:37:46,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=150688.0, ans=0.0 +2024-07-28 11:38:02,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150714.66666666666, ans=0.1 +2024-07-28 11:38:02,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=150714.66666666666, ans=0.09899494936611666 +2024-07-28 11:38:08,514 INFO [train.py:1114] (2/4) Epoch 12, batch 600, loss[loss=0.2365, simple_loss=0.3134, pruned_loss=0.07978, over 4625.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2804, pruned_loss=0.05222, over 891759.09 frames. ], batch size: 16, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:38:09,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=150728.0, ans=0.0 +2024-07-28 11:38:17,575 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.44 vs. limit=15.0 +2024-07-28 11:38:20,016 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.22 vs. limit=10.0 +2024-07-28 11:38:25,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=150754.66666666666, ans=0.2 +2024-07-28 11:38:30,309 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.714e+01 6.286e+01 7.173e+01 1.255e+02, threshold=1.257e+02, percent-clipped=1.0 +2024-07-28 11:38:51,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=150781.33333333334, ans=0.125 +2024-07-28 11:38:53,100 INFO [train.py:1114] (2/4) Epoch 12, batch 650, loss[loss=0.1808, simple_loss=0.2758, pruned_loss=0.04286, over 4756.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2791, pruned_loss=0.05141, over 903174.46 frames. ], batch size: 13, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:39:00,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=150794.66666666666, ans=0.0 +2024-07-28 11:39:01,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff3.min_abs, batch_count=150794.66666666666, ans=0.2 +2024-07-28 11:39:04,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=150794.66666666666, ans=0.125 +2024-07-28 11:39:05,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=150808.0, ans=0.125 +2024-07-28 11:39:11,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150808.0, ans=0.1 +2024-07-28 11:39:22,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=150834.66666666666, ans=0.05 +2024-07-28 11:39:25,026 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.09 vs. limit=15.0 +2024-07-28 11:39:29,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=150834.66666666666, ans=0.125 +2024-07-28 11:39:30,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=150848.0, ans=0.0 +2024-07-28 11:39:30,657 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:39:34,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=150848.0, ans=0.0 +2024-07-28 11:39:37,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=150848.0, ans=0.125 +2024-07-28 11:39:38,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=150848.0, ans=0.125 +2024-07-28 11:39:39,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=150861.33333333334, ans=0.125 +2024-07-28 11:39:40,042 INFO [train.py:1114] (2/4) Epoch 12, batch 700, loss[loss=0.1907, simple_loss=0.2699, pruned_loss=0.05573, over 4639.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2787, pruned_loss=0.05109, over 911061.10 frames. ], batch size: 12, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:39:42,100 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:39:45,444 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:39:46,998 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.18 vs. limit=15.0 +2024-07-28 11:39:58,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=150888.0, ans=0.2 +2024-07-28 11:40:00,051 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.230e+01 5.630e+01 6.208e+01 7.148e+01 1.083e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 11:40:09,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=150914.66666666666, ans=0.025 +2024-07-28 11:40:11,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=150914.66666666666, ans=0.125 +2024-07-28 11:40:13,786 INFO [train.py:1114] (2/4) Epoch 12, batch 750, loss[loss=0.174, simple_loss=0.2631, pruned_loss=0.04241, over 4682.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2783, pruned_loss=0.05122, over 917950.35 frames. ], batch size: 13, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:40:16,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=150928.0, ans=0.04949747468305833 +2024-07-28 11:40:18,980 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.60 vs. limit=15.0 +2024-07-28 11:40:21,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=150941.33333333334, ans=0.0 +2024-07-28 11:40:25,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=150941.33333333334, ans=0.0 +2024-07-28 11:40:30,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=150954.66666666666, ans=0.125 +2024-07-28 11:40:33,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=150968.0, ans=0.2 +2024-07-28 11:40:36,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=150968.0, ans=0.2 +2024-07-28 11:40:47,001 INFO [train.py:1114] (2/4) Epoch 12, batch 800, loss[loss=0.2051, simple_loss=0.2927, pruned_loss=0.05874, over 4851.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2777, pruned_loss=0.05123, over 923424.06 frames. ], batch size: 12, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:40:52,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=150994.66666666666, ans=0.125 +2024-07-28 11:41:01,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=151021.33333333334, ans=0.125 +2024-07-28 11:41:06,752 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.726e+01 6.208e+01 6.822e+01 1.017e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 11:41:20,186 INFO [train.py:1114] (2/4) Epoch 12, batch 850, loss[loss=0.2292, simple_loss=0.3156, pruned_loss=0.07142, over 4658.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2774, pruned_loss=0.05138, over 927646.37 frames. ], batch size: 14, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:41:23,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=151061.33333333334, ans=0.125 +2024-07-28 11:41:35,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=151088.0, ans=0.125 +2024-07-28 11:41:37,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=151088.0, ans=0.0 +2024-07-28 11:41:40,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=151101.33333333334, ans=0.07 +2024-07-28 11:41:47,567 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.78 vs. limit=15.0 +2024-07-28 11:41:47,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=151114.66666666666, ans=0.1 +2024-07-28 11:41:47,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=151114.66666666666, ans=0.125 +2024-07-28 11:41:55,130 INFO [train.py:1114] (2/4) Epoch 12, batch 900, loss[loss=0.1747, simple_loss=0.2552, pruned_loss=0.04706, over 4860.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2787, pruned_loss=0.05213, over 928215.96 frames. ], batch size: 12, lr: 6.37e-03, grad_scale: 32.0 +2024-07-28 11:42:02,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=151141.33333333334, ans=0.025 +2024-07-28 11:42:10,291 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.72 vs. limit=5.0 +2024-07-28 11:42:15,739 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.369e+01 5.655e+01 6.355e+01 7.195e+01 9.950e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 11:42:20,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=151168.0, ans=0.125 +2024-07-28 11:42:26,356 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.68 vs. limit=15.0 +2024-07-28 11:42:28,522 INFO [train.py:1114] (2/4) Epoch 12, batch 950, loss[loss=0.1585, simple_loss=0.252, pruned_loss=0.03249, over 4775.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2791, pruned_loss=0.05165, over 929949.41 frames. ], batch size: 12, lr: 6.37e-03, grad_scale: 32.0 +2024-07-28 11:42:30,981 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.20 vs. limit=15.0 +2024-07-28 11:42:34,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=151194.66666666666, ans=0.0 +2024-07-28 11:42:47,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=151221.33333333334, ans=0.125 +2024-07-28 11:42:51,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=151234.66666666666, ans=15.0 +2024-07-28 11:42:58,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.03 vs. limit=15.0 +2024-07-28 11:43:02,360 INFO [train.py:1114] (2/4) Epoch 12, batch 1000, loss[loss=0.1792, simple_loss=0.2562, pruned_loss=0.05108, over 4962.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2807, pruned_loss=0.0526, over 929445.38 frames. ], batch size: 13, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:43:08,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=151274.66666666666, ans=0.0 +2024-07-28 11:43:08,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=151274.66666666666, ans=0.125 +2024-07-28 11:43:10,844 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:43:14,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=151274.66666666666, ans=0.125 +2024-07-28 11:43:15,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=151288.0, ans=0.125 +2024-07-28 11:43:15,858 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.15 vs. limit=22.5 +2024-07-28 11:43:18,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=151288.0, ans=0.125 +2024-07-28 11:43:20,301 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.26 vs. limit=15.0 +2024-07-28 11:43:25,150 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.535e+01 6.224e+01 7.277e+01 1.100e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 11:43:37,741 INFO [train.py:1114] (2/4) Epoch 12, batch 1050, loss[loss=0.1793, simple_loss=0.274, pruned_loss=0.04228, over 4877.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2797, pruned_loss=0.05208, over 932087.79 frames. ], batch size: 14, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:43:39,928 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.51 vs. limit=15.0 +2024-07-28 11:43:44,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=151328.0, ans=0.125 +2024-07-28 11:43:58,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=151354.66666666666, ans=0.0 +2024-07-28 11:44:14,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=151381.33333333334, ans=0.0 +2024-07-28 11:44:15,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=151381.33333333334, ans=0.125 +2024-07-28 11:44:17,155 INFO [train.py:1114] (2/4) Epoch 12, batch 1100, loss[loss=0.1965, simple_loss=0.2824, pruned_loss=0.05532, over 4901.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2803, pruned_loss=0.05217, over 934835.39 frames. ], batch size: 13, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:44:17,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=151394.66666666666, ans=0.125 +2024-07-28 11:44:23,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=151408.0, ans=0.0 +2024-07-28 11:44:38,701 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.346e+01 5.539e+01 6.009e+01 6.753e+01 8.123e+01, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 11:44:40,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151434.66666666666, ans=0.1 +2024-07-28 11:44:40,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=151434.66666666666, ans=0.125 +2024-07-28 11:44:42,837 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.64 vs. limit=22.5 +2024-07-28 11:44:48,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=151448.0, ans=0.125 +2024-07-28 11:44:53,968 INFO [train.py:1114] (2/4) Epoch 12, batch 1150, loss[loss=0.159, simple_loss=0.2432, pruned_loss=0.03744, over 4894.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2803, pruned_loss=0.05208, over 934409.87 frames. ], batch size: 13, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:44:58,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=151461.33333333334, ans=0.0 +2024-07-28 11:45:03,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=151474.66666666666, ans=0.125 +2024-07-28 11:45:04,463 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.11 vs. limit=15.0 +2024-07-28 11:45:06,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=151474.66666666666, ans=0.0 +2024-07-28 11:45:10,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=151488.0, ans=0.125 +2024-07-28 11:45:10,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151488.0, ans=0.1 +2024-07-28 11:45:11,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=151488.0, ans=0.0 +2024-07-28 11:45:36,564 INFO [train.py:1114] (2/4) Epoch 12, batch 1200, loss[loss=0.1926, simple_loss=0.2846, pruned_loss=0.05029, over 4872.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2813, pruned_loss=0.05221, over 933769.11 frames. ], batch size: 14, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:45:40,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=151528.0, ans=0.2 +2024-07-28 11:45:42,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151541.33333333334, ans=0.1 +2024-07-28 11:45:54,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=151554.66666666666, ans=0.1 +2024-07-28 11:45:57,707 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.539e+01 6.207e+01 7.047e+01 1.080e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 11:45:57,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=151568.0, ans=0.125 +2024-07-28 11:46:01,942 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.05 vs. limit=15.0 +2024-07-28 11:46:10,284 INFO [train.py:1114] (2/4) Epoch 12, batch 1250, loss[loss=0.2335, simple_loss=0.3162, pruned_loss=0.07545, over 4803.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2811, pruned_loss=0.05205, over 937813.45 frames. ], batch size: 15, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:46:14,464 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.27 vs. limit=22.5 +2024-07-28 11:46:31,163 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.78 vs. limit=15.0 +2024-07-28 11:46:33,162 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.45 vs. limit=10.0 +2024-07-28 11:46:43,103 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.13 vs. limit=15.0 +2024-07-28 11:46:43,326 INFO [train.py:1114] (2/4) Epoch 12, batch 1300, loss[loss=0.1878, simple_loss=0.2863, pruned_loss=0.04468, over 4710.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2796, pruned_loss=0.0514, over 939116.96 frames. ], batch size: 19, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:46:53,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=151674.66666666666, ans=0.95 +2024-07-28 11:46:59,074 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.54 vs. limit=15.0 +2024-07-28 11:47:03,823 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.884e+01 5.669e+01 6.218e+01 7.134e+01 9.799e+01, threshold=1.244e+02, percent-clipped=0.0 +2024-07-28 11:47:07,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=151701.33333333334, ans=0.125 +2024-07-28 11:47:16,372 INFO [train.py:1114] (2/4) Epoch 12, batch 1350, loss[loss=0.1778, simple_loss=0.2723, pruned_loss=0.0417, over 4749.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2792, pruned_loss=0.05137, over 941074.10 frames. ], batch size: 13, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:47:34,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=151754.66666666666, ans=0.125 +2024-07-28 11:47:37,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=151754.66666666666, ans=0.05 +2024-07-28 11:47:39,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=151754.66666666666, ans=0.125 +2024-07-28 11:47:42,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151768.0, ans=0.1 +2024-07-28 11:47:44,967 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.94 vs. limit=15.0 +2024-07-28 11:47:48,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=151781.33333333334, ans=0.1 +2024-07-28 11:47:51,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=151781.33333333334, ans=0.125 +2024-07-28 11:47:54,339 INFO [train.py:1114] (2/4) Epoch 12, batch 1400, loss[loss=0.1266, simple_loss=0.2226, pruned_loss=0.01531, over 4703.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.279, pruned_loss=0.05132, over 942764.41 frames. ], batch size: 11, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:47:55,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=151794.66666666666, ans=0.0 +2024-07-28 11:47:55,366 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.46 vs. limit=15.0 +2024-07-28 11:47:58,213 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.48 vs. limit=22.5 +2024-07-28 11:48:03,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=151808.0, ans=0.0 +2024-07-28 11:48:13,735 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.28 vs. limit=15.0 +2024-07-28 11:48:16,697 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.713e+01 6.249e+01 7.424e+01 1.107e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 11:48:21,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=151834.66666666666, ans=0.0 +2024-07-28 11:48:29,486 INFO [train.py:1114] (2/4) Epoch 12, batch 1450, loss[loss=0.1925, simple_loss=0.283, pruned_loss=0.05094, over 4685.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2797, pruned_loss=0.05173, over 942967.76 frames. ], batch size: 15, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:48:55,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=151914.66666666666, ans=0.125 +2024-07-28 11:49:04,470 INFO [train.py:1114] (2/4) Epoch 12, batch 1500, loss[loss=0.2127, simple_loss=0.3072, pruned_loss=0.05908, over 4803.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2797, pruned_loss=0.05128, over 942941.72 frames. ], batch size: 14, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:49:08,464 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.51 vs. limit=15.0 +2024-07-28 11:49:08,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=151928.0, ans=0.025 +2024-07-28 11:49:11,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=151941.33333333334, ans=0.0 +2024-07-28 11:49:19,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=151941.33333333334, ans=0.125 +2024-07-28 11:49:20,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=151941.33333333334, ans=0.125 +2024-07-28 11:49:21,151 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.25 vs. limit=15.0 +2024-07-28 11:49:23,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151954.66666666666, ans=0.1 +2024-07-28 11:49:29,098 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.429e+01 5.582e+01 5.945e+01 6.654e+01 9.521e+01, threshold=1.189e+02, percent-clipped=0.0 +2024-07-28 11:49:33,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=151968.0, ans=0.2 +2024-07-28 11:49:59,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=151994.66666666666, ans=0.125 +2024-07-28 11:49:59,053 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:49:59,627 INFO [train.py:1114] (2/4) Epoch 12, batch 1550, loss[loss=0.1847, simple_loss=0.2815, pruned_loss=0.04389, over 4910.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2794, pruned_loss=0.05113, over 938534.46 frames. ], batch size: 15, lr: 6.35e-03, grad_scale: 16.0 +2024-07-28 11:49:59,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=151994.66666666666, ans=0.025 +2024-07-28 11:50:04,105 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:50:08,443 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.75 vs. limit=10.0 +2024-07-28 11:50:16,147 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.52 vs. limit=10.0 +2024-07-28 11:50:17,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=152021.33333333334, ans=0.0 +2024-07-28 11:50:18,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=152021.33333333334, ans=0.0 +2024-07-28 11:50:27,738 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.34 vs. limit=6.0 +2024-07-28 11:50:29,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=152048.0, ans=0.0 +2024-07-28 11:50:35,418 INFO [train.py:1114] (2/4) Epoch 12, batch 1600, loss[loss=0.1848, simple_loss=0.2719, pruned_loss=0.04884, over 4875.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2792, pruned_loss=0.05166, over 937171.40 frames. ], batch size: 14, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:50:38,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=152061.33333333334, ans=0.0 +2024-07-28 11:50:44,220 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:50:45,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=152074.66666666666, ans=0.125 +2024-07-28 11:50:54,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=152088.0, ans=0.125 +2024-07-28 11:50:59,240 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.730e+01 5.721e+01 6.309e+01 7.092e+01 1.066e+02, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 11:51:01,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=152101.33333333334, ans=0.025 +2024-07-28 11:51:16,031 INFO [train.py:1114] (2/4) Epoch 12, batch 1650, loss[loss=0.1894, simple_loss=0.2816, pruned_loss=0.04858, over 4670.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2795, pruned_loss=0.05223, over 937007.85 frames. ], batch size: 14, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:51:16,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=152128.0, ans=0.0 +2024-07-28 11:51:16,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=152128.0, ans=0.2 +2024-07-28 11:51:18,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152128.0, ans=0.1 +2024-07-28 11:51:19,807 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.31 vs. limit=15.0 +2024-07-28 11:51:26,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152141.33333333334, ans=0.1 +2024-07-28 11:51:26,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=152141.33333333334, ans=0.0 +2024-07-28 11:51:30,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=152154.66666666666, ans=0.125 +2024-07-28 11:51:48,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=152154.66666666666, ans=0.125 +2024-07-28 11:51:53,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=152168.0, ans=0.125 +2024-07-28 11:51:58,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=152181.33333333334, ans=0.125 +2024-07-28 11:52:05,269 INFO [train.py:1114] (2/4) Epoch 12, batch 1700, loss[loss=0.1769, simple_loss=0.2584, pruned_loss=0.04769, over 4685.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.279, pruned_loss=0.05198, over 938741.67 frames. ], batch size: 11, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:52:05,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=152194.66666666666, ans=0.125 +2024-07-28 11:52:05,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=152194.66666666666, ans=0.125 +2024-07-28 11:52:15,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=152208.0, ans=0.2 +2024-07-28 11:52:26,635 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.325e+01 5.663e+01 6.309e+01 7.408e+01 1.033e+02, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 11:52:30,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=152234.66666666666, ans=0.125 +2024-07-28 11:52:38,672 INFO [train.py:1114] (2/4) Epoch 12, batch 1750, loss[loss=0.1721, simple_loss=0.252, pruned_loss=0.04606, over 4816.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.279, pruned_loss=0.05174, over 940086.49 frames. ], batch size: 11, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:52:44,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=152261.33333333334, ans=0.125 +2024-07-28 11:52:50,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=152274.66666666666, ans=0.125 +2024-07-28 11:52:51,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=152288.0, ans=0.0 +2024-07-28 11:52:59,343 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.73 vs. limit=15.0 +2024-07-28 11:53:00,759 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.10 vs. limit=15.0 +2024-07-28 11:53:01,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=152301.33333333334, ans=10.0 +2024-07-28 11:53:06,059 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.21 vs. limit=6.0 +2024-07-28 11:53:06,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=152314.66666666666, ans=0.125 +2024-07-28 11:53:08,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=152314.66666666666, ans=0.125 +2024-07-28 11:53:12,227 INFO [train.py:1114] (2/4) Epoch 12, batch 1800, loss[loss=0.1966, simple_loss=0.2889, pruned_loss=0.0522, over 4635.00 frames. ], tot_loss[loss=0.192, simple_loss=0.28, pruned_loss=0.05203, over 940792.41 frames. ], batch size: 13, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:53:12,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=152328.0, ans=0.125 +2024-07-28 11:53:17,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=152328.0, ans=0.125 +2024-07-28 11:53:22,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=152341.33333333334, ans=0.2 +2024-07-28 11:53:35,387 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.783e+01 5.693e+01 6.293e+01 7.294e+01 9.358e+01, threshold=1.259e+02, percent-clipped=0.0 +2024-07-28 11:53:35,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=152368.0, ans=0.2 +2024-07-28 11:53:43,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=152381.33333333334, ans=0.125 +2024-07-28 11:53:44,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=152381.33333333334, ans=0.2 +2024-07-28 11:53:49,303 INFO [train.py:1114] (2/4) Epoch 12, batch 1850, loss[loss=0.1955, simple_loss=0.2884, pruned_loss=0.05134, over 4818.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2787, pruned_loss=0.05188, over 940967.46 frames. ], batch size: 14, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:54:02,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=152421.33333333334, ans=0.125 +2024-07-28 11:54:06,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=152421.33333333334, ans=0.1 +2024-07-28 11:54:20,988 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.88 vs. limit=6.0 +2024-07-28 11:54:23,282 INFO [train.py:1114] (2/4) Epoch 12, batch 1900, loss[loss=0.1947, simple_loss=0.2907, pruned_loss=0.04933, over 4659.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2787, pruned_loss=0.05139, over 941988.23 frames. ], batch size: 14, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:54:38,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=152488.0, ans=0.125 +2024-07-28 11:54:40,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=152488.0, ans=0.125 +2024-07-28 11:54:42,265 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.03 vs. limit=15.0 +2024-07-28 11:54:44,569 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.417e+01 5.599e+01 6.321e+01 7.441e+01 1.076e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 11:54:44,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=152501.33333333334, ans=0.125 +2024-07-28 11:54:56,026 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.23 vs. limit=12.0 +2024-07-28 11:54:56,299 INFO [train.py:1114] (2/4) Epoch 12, batch 1950, loss[loss=0.2316, simple_loss=0.3218, pruned_loss=0.07063, over 4897.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2804, pruned_loss=0.0521, over 943943.76 frames. ], batch size: 13, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:54:57,441 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.41 vs. limit=22.5 +2024-07-28 11:54:57,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=152528.0, ans=0.125 +2024-07-28 11:55:00,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=152528.0, ans=0.125 +2024-07-28 11:55:01,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152528.0, ans=0.1 +2024-07-28 11:55:03,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=152541.33333333334, ans=0.0 +2024-07-28 11:55:11,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=152554.66666666666, ans=0.125 +2024-07-28 11:55:18,211 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.83 vs. limit=15.0 +2024-07-28 11:55:31,699 INFO [train.py:1114] (2/4) Epoch 12, batch 2000, loss[loss=0.1705, simple_loss=0.2577, pruned_loss=0.04164, over 4792.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2813, pruned_loss=0.05231, over 941581.43 frames. ], batch size: 11, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:55:33,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=152594.66666666666, ans=0.2 +2024-07-28 11:55:35,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=152594.66666666666, ans=0.125 +2024-07-28 11:55:37,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=152594.66666666666, ans=0.125 +2024-07-28 11:55:52,947 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.820e+01 6.521e+01 7.809e+01 1.085e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 11:56:04,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=152648.0, ans=0.0 +2024-07-28 11:56:05,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=152648.0, ans=0.125 +2024-07-28 11:56:07,048 INFO [train.py:1114] (2/4) Epoch 12, batch 2050, loss[loss=0.1535, simple_loss=0.2312, pruned_loss=0.03787, over 4634.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2812, pruned_loss=0.05243, over 939890.21 frames. ], batch size: 11, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:56:10,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=152661.33333333334, ans=0.035 +2024-07-28 11:56:22,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=152688.0, ans=0.0 +2024-07-28 11:56:32,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=152701.33333333334, ans=0.0 +2024-07-28 11:56:34,608 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:56:34,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=152701.33333333334, ans=0.2 +2024-07-28 11:56:35,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=152714.66666666666, ans=0.125 +2024-07-28 11:56:42,395 INFO [train.py:1114] (2/4) Epoch 12, batch 2100, loss[loss=0.1669, simple_loss=0.2613, pruned_loss=0.03623, over 4767.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2805, pruned_loss=0.05201, over 941477.10 frames. ], batch size: 13, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:56:42,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=152728.0, ans=0.0 +2024-07-28 11:56:47,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=152728.0, ans=0.125 +2024-07-28 11:56:48,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=152741.33333333334, ans=0.0 +2024-07-28 11:56:58,335 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.82 vs. limit=10.0 +2024-07-28 11:57:03,674 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.175e+01 5.578e+01 6.172e+01 6.931e+01 1.014e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-28 11:57:12,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=152781.33333333334, ans=0.04949747468305833 +2024-07-28 11:57:19,550 INFO [train.py:1114] (2/4) Epoch 12, batch 2150, loss[loss=0.185, simple_loss=0.2781, pruned_loss=0.04591, over 4894.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2783, pruned_loss=0.05117, over 944630.23 frames. ], batch size: 13, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:57:19,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff3.min_abs, batch_count=152794.66666666666, ans=0.2 +2024-07-28 11:57:24,079 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.27 vs. limit=10.0 +2024-07-28 11:57:25,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=152808.0, ans=0.2 +2024-07-28 11:57:27,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=152808.0, ans=0.125 +2024-07-28 11:57:30,587 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.32 vs. limit=6.0 +2024-07-28 11:57:33,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152821.33333333334, ans=0.1 +2024-07-28 11:57:44,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=152834.66666666666, ans=0.0 +2024-07-28 11:57:52,750 INFO [train.py:1114] (2/4) Epoch 12, batch 2200, loss[loss=0.2079, simple_loss=0.2925, pruned_loss=0.06164, over 4817.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2783, pruned_loss=0.05121, over 943570.03 frames. ], batch size: 14, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:57:59,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=152874.66666666666, ans=0.04949747468305833 +2024-07-28 11:58:00,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=152874.66666666666, ans=0.0 +2024-07-28 11:58:01,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=152874.66666666666, ans=0.125 +2024-07-28 11:58:10,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=152888.0, ans=0.125 +2024-07-28 11:58:11,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=152888.0, ans=0.0 +2024-07-28 11:58:12,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=152901.33333333334, ans=0.125 +2024-07-28 11:58:13,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=152901.33333333334, ans=0.125 +2024-07-28 11:58:14,230 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.745e+01 5.791e+01 6.232e+01 7.216e+01 1.117e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 11:58:19,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=152914.66666666666, ans=0.125 +2024-07-28 11:58:24,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.98 vs. limit=15.0 +2024-07-28 11:58:25,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=152914.66666666666, ans=0.125 +2024-07-28 11:58:27,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152928.0, ans=0.1 +2024-07-28 11:58:27,608 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.30 vs. limit=6.0 +2024-07-28 11:58:27,779 INFO [train.py:1114] (2/4) Epoch 12, batch 2250, loss[loss=0.215, simple_loss=0.3179, pruned_loss=0.05605, over 4685.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2777, pruned_loss=0.05103, over 942065.74 frames. ], batch size: 13, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:58:30,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=152928.0, ans=0.0 +2024-07-28 11:58:39,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=152941.33333333334, ans=0.0 +2024-07-28 11:58:41,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=152954.66666666666, ans=0.1 +2024-07-28 11:58:44,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=152954.66666666666, ans=0.125 +2024-07-28 11:58:51,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.09 vs. limit=12.0 +2024-07-28 11:59:00,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=152981.33333333334, ans=0.0 +2024-07-28 11:59:03,227 INFO [train.py:1114] (2/4) Epoch 12, batch 2300, loss[loss=0.1629, simple_loss=0.2445, pruned_loss=0.04068, over 4933.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.277, pruned_loss=0.05163, over 939563.20 frames. ], batch size: 12, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:59:07,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=152994.66666666666, ans=0.04949747468305833 +2024-07-28 11:59:07,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=152994.66666666666, ans=0.125 +2024-07-28 11:59:19,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=153021.33333333334, ans=0.125 +2024-07-28 11:59:21,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=153021.33333333334, ans=0.2 +2024-07-28 11:59:24,486 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.625e+01 5.617e+01 6.257e+01 7.219e+01 1.104e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 11:59:31,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=153048.0, ans=0.2 +2024-07-28 11:59:36,844 INFO [train.py:1114] (2/4) Epoch 12, batch 2350, loss[loss=0.2163, simple_loss=0.3039, pruned_loss=0.06436, over 4644.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2767, pruned_loss=0.05122, over 941241.57 frames. ], batch size: 13, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:59:37,245 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.32 vs. limit=15.0 +2024-07-28 11:59:43,341 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:59:48,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=153074.66666666666, ans=0.0 +2024-07-28 11:59:49,299 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.33 vs. limit=12.0 +2024-07-28 11:59:53,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=153088.0, ans=0.125 +2024-07-28 11:59:55,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=153088.0, ans=0.1 +2024-07-28 12:00:08,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=153114.66666666666, ans=0.125 +2024-07-28 12:00:10,593 INFO [train.py:1114] (2/4) Epoch 12, batch 2400, loss[loss=0.1824, simple_loss=0.2729, pruned_loss=0.04592, over 4648.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2771, pruned_loss=0.05125, over 941047.41 frames. ], batch size: 12, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 12:00:21,264 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.30 vs. limit=15.0 +2024-07-28 12:00:26,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=153154.66666666666, ans=0.0 +2024-07-28 12:00:27,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=153154.66666666666, ans=0.0 +2024-07-28 12:00:31,902 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 5.703e+01 6.200e+01 6.966e+01 9.820e+01, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 12:00:32,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=153168.0, ans=0.125 +2024-07-28 12:00:38,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=153181.33333333334, ans=0.125 +2024-07-28 12:00:45,567 INFO [train.py:1114] (2/4) Epoch 12, batch 2450, loss[loss=0.213, simple_loss=0.3138, pruned_loss=0.05615, over 4698.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2785, pruned_loss=0.05202, over 937317.32 frames. ], batch size: 13, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:00:50,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=153194.66666666666, ans=0.07 +2024-07-28 12:00:59,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=153221.33333333334, ans=0.125 +2024-07-28 12:01:09,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=153234.66666666666, ans=0.125 +2024-07-28 12:01:17,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=153248.0, ans=0.125 +2024-07-28 12:01:18,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=153261.33333333334, ans=0.125 +2024-07-28 12:01:18,878 INFO [train.py:1114] (2/4) Epoch 12, batch 2500, loss[loss=0.2273, simple_loss=0.3248, pruned_loss=0.06491, over 4813.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2799, pruned_loss=0.05244, over 939244.47 frames. ], batch size: 14, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:01:21,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=153261.33333333334, ans=0.125 +2024-07-28 12:01:30,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=153274.66666666666, ans=0.125 +2024-07-28 12:01:41,953 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.651e+01 6.166e+01 7.013e+01 1.450e+02, threshold=1.233e+02, percent-clipped=1.0 +2024-07-28 12:01:46,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=153301.33333333334, ans=0.125 +2024-07-28 12:01:54,190 INFO [train.py:1114] (2/4) Epoch 12, batch 2550, loss[loss=0.1772, simple_loss=0.2622, pruned_loss=0.04609, over 4799.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2791, pruned_loss=0.05192, over 939097.49 frames. ], batch size: 11, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:01:55,238 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.22 vs. limit=15.0 +2024-07-28 12:02:12,535 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.81 vs. limit=10.0 +2024-07-28 12:02:13,183 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.43 vs. limit=15.0 +2024-07-28 12:02:24,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=153381.33333333334, ans=0.0 +2024-07-28 12:02:24,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=153381.33333333334, ans=0.125 +2024-07-28 12:02:28,494 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.28 vs. limit=22.5 +2024-07-28 12:02:29,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=153381.33333333334, ans=0.125 +2024-07-28 12:02:32,198 INFO [train.py:1114] (2/4) Epoch 12, batch 2600, loss[loss=0.1794, simple_loss=0.2722, pruned_loss=0.04335, over 4893.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2807, pruned_loss=0.05254, over 937785.44 frames. ], batch size: 13, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:02:32,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=153394.66666666666, ans=0.2 +2024-07-28 12:02:47,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=153394.66666666666, ans=0.125 +2024-07-28 12:02:47,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=153394.66666666666, ans=0.125 +2024-07-28 12:02:51,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=153408.0, ans=0.125 +2024-07-28 12:03:07,391 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.455e+01 5.682e+01 6.373e+01 7.145e+01 1.030e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 12:03:23,893 INFO [train.py:1114] (2/4) Epoch 12, batch 2650, loss[loss=0.2174, simple_loss=0.3011, pruned_loss=0.06691, over 4662.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2801, pruned_loss=0.05163, over 939875.86 frames. ], batch size: 16, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:03:25,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=153461.33333333334, ans=0.0 +2024-07-28 12:03:26,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=153461.33333333334, ans=0.125 +2024-07-28 12:03:35,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=153474.66666666666, ans=0.0 +2024-07-28 12:03:38,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=153488.0, ans=0.125 +2024-07-28 12:03:40,072 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.99 vs. limit=22.5 +2024-07-28 12:03:40,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=153488.0, ans=0.2 +2024-07-28 12:03:45,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=153501.33333333334, ans=0.0 +2024-07-28 12:03:46,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=153501.33333333334, ans=0.125 +2024-07-28 12:03:47,751 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.86 vs. limit=15.0 +2024-07-28 12:03:56,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=153514.66666666666, ans=0.125 +2024-07-28 12:03:57,518 INFO [train.py:1114] (2/4) Epoch 12, batch 2700, loss[loss=0.2009, simple_loss=0.2928, pruned_loss=0.05451, over 4740.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.28, pruned_loss=0.05187, over 940305.73 frames. ], batch size: 14, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:04:06,650 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.79 vs. limit=15.0 +2024-07-28 12:04:16,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=153554.66666666666, ans=0.125 +2024-07-28 12:04:18,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=153554.66666666666, ans=0.125 +2024-07-28 12:04:21,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=153568.0, ans=0.125 +2024-07-28 12:04:22,346 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.622e+01 5.642e+01 6.087e+01 6.756e+01 9.576e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-28 12:04:29,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=153581.33333333334, ans=0.125 +2024-07-28 12:04:34,318 INFO [train.py:1114] (2/4) Epoch 12, batch 2750, loss[loss=0.1649, simple_loss=0.2623, pruned_loss=0.03379, over 4707.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2786, pruned_loss=0.05148, over 940363.55 frames. ], batch size: 12, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:04:38,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=153594.66666666666, ans=0.125 +2024-07-28 12:04:49,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=153621.33333333334, ans=0.2 +2024-07-28 12:04:53,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=153621.33333333334, ans=0.125 +2024-07-28 12:04:56,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=153634.66666666666, ans=0.125 +2024-07-28 12:04:58,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=153634.66666666666, ans=0.0 +2024-07-28 12:05:10,495 INFO [train.py:1114] (2/4) Epoch 12, batch 2800, loss[loss=0.2515, simple_loss=0.3249, pruned_loss=0.08905, over 3281.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.279, pruned_loss=0.05201, over 938450.06 frames. ], batch size: 35, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:05:19,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=153674.66666666666, ans=0.2 +2024-07-28 12:05:22,134 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.65 vs. limit=15.0 +2024-07-28 12:05:31,818 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.685e+01 5.464e+01 6.125e+01 7.070e+01 1.105e+02, threshold=1.225e+02, percent-clipped=0.0 +2024-07-28 12:05:36,775 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.32 vs. limit=12.0 +2024-07-28 12:05:42,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=153714.66666666666, ans=0.125 +2024-07-28 12:05:43,859 INFO [train.py:1114] (2/4) Epoch 12, batch 2850, loss[loss=0.1712, simple_loss=0.2533, pruned_loss=0.04452, over 4964.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2793, pruned_loss=0.05225, over 936666.63 frames. ], batch size: 13, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:05:44,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=153728.0, ans=0.025 +2024-07-28 12:06:09,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=153768.0, ans=0.1 +2024-07-28 12:06:11,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=153781.33333333334, ans=0.1 +2024-07-28 12:06:16,608 INFO [train.py:1114] (2/4) Epoch 12, batch 2900, loss[loss=0.2192, simple_loss=0.3024, pruned_loss=0.06797, over 4831.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2803, pruned_loss=0.05199, over 940444.53 frames. ], batch size: 13, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:06:22,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=153794.66666666666, ans=0.125 +2024-07-28 12:06:31,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=153821.33333333334, ans=0.125 +2024-07-28 12:06:39,898 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.688e+01 6.255e+01 7.399e+01 1.060e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 12:06:42,181 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.22 vs. limit=22.5 +2024-07-28 12:06:44,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=153834.66666666666, ans=0.2 +2024-07-28 12:06:51,636 INFO [train.py:1114] (2/4) Epoch 12, batch 2950, loss[loss=0.1636, simple_loss=0.2589, pruned_loss=0.0341, over 4710.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2789, pruned_loss=0.05129, over 939167.36 frames. ], batch size: 12, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:06:51,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=153861.33333333334, ans=0.125 +2024-07-28 12:07:03,670 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.47 vs. limit=15.0 +2024-07-28 12:07:12,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=153901.33333333334, ans=0.05 +2024-07-28 12:07:24,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=153914.66666666666, ans=0.1 +2024-07-28 12:07:25,356 INFO [train.py:1114] (2/4) Epoch 12, batch 3000, loss[loss=0.1876, simple_loss=0.2722, pruned_loss=0.05152, over 4754.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2787, pruned_loss=0.05113, over 938532.83 frames. ], batch size: 13, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:07:25,357 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 12:07:33,974 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.9809, 3.7716, 3.3488, 3.5941], device='cuda:2') +2024-07-28 12:07:46,424 INFO [train.py:1146] (2/4) Epoch 12, validation: loss=0.1682, simple_loss=0.272, pruned_loss=0.03224, over 944034.00 frames. +2024-07-28 12:07:46,425 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 12:07:52,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=153941.33333333334, ans=0.0 +2024-07-28 12:08:03,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=153954.66666666666, ans=0.025 +2024-07-28 12:08:08,185 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.713e+01 5.599e+01 6.354e+01 7.168e+01 1.019e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 12:08:17,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=153981.33333333334, ans=0.1 +2024-07-28 12:08:17,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=153981.33333333334, ans=0.125 +2024-07-28 12:08:20,401 INFO [train.py:1114] (2/4) Epoch 12, batch 3050, loss[loss=0.1614, simple_loss=0.2524, pruned_loss=0.03517, over 4646.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2792, pruned_loss=0.0513, over 937757.04 frames. ], batch size: 12, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:08:24,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=153994.66666666666, ans=0.0 +2024-07-28 12:08:35,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=154021.33333333334, ans=0.125 +2024-07-28 12:08:40,896 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:08:41,631 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.45 vs. limit=15.0 +2024-07-28 12:08:42,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=154034.66666666666, ans=0.0 +2024-07-28 12:08:49,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=154048.0, ans=0.0 +2024-07-28 12:08:53,626 INFO [train.py:1114] (2/4) Epoch 12, batch 3100, loss[loss=0.2148, simple_loss=0.3004, pruned_loss=0.06457, over 4628.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2793, pruned_loss=0.05188, over 938488.43 frames. ], batch size: 16, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:08:55,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=154061.33333333334, ans=0.0 +2024-07-28 12:08:55,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=154061.33333333334, ans=0.125 +2024-07-28 12:09:14,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=154101.33333333334, ans=0.125 +2024-07-28 12:09:14,830 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.787e+01 5.719e+01 6.483e+01 7.749e+01 1.294e+02, threshold=1.297e+02, percent-clipped=1.0 +2024-07-28 12:09:15,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=154101.33333333334, ans=0.5 +2024-07-28 12:09:24,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=154114.66666666666, ans=0.0 +2024-07-28 12:09:26,874 INFO [train.py:1114] (2/4) Epoch 12, batch 3150, loss[loss=0.2056, simple_loss=0.2949, pruned_loss=0.05819, over 4634.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2796, pruned_loss=0.0518, over 938512.67 frames. ], batch size: 17, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:09:32,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=154128.0, ans=0.0 +2024-07-28 12:09:42,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=154154.66666666666, ans=0.125 +2024-07-28 12:09:46,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=154154.66666666666, ans=0.0 +2024-07-28 12:09:53,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=154168.0, ans=0.125 +2024-07-28 12:09:53,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=154168.0, ans=0.125 +2024-07-28 12:09:54,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=154168.0, ans=0.125 +2024-07-28 12:09:55,911 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.29 vs. limit=15.0 +2024-07-28 12:10:02,169 INFO [train.py:1114] (2/4) Epoch 12, batch 3200, loss[loss=0.1928, simple_loss=0.284, pruned_loss=0.05081, over 4829.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2785, pruned_loss=0.05104, over 939647.52 frames. ], batch size: 13, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:10:04,576 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.34 vs. limit=15.0 +2024-07-28 12:10:15,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=154221.33333333334, ans=0.2 +2024-07-28 12:10:23,356 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.738e+01 6.146e+01 6.845e+01 1.156e+02, threshold=1.229e+02, percent-clipped=0.0 +2024-07-28 12:10:28,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=154248.0, ans=0.0 +2024-07-28 12:10:32,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=154248.0, ans=0.125 +2024-07-28 12:10:34,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=154261.33333333334, ans=0.025 +2024-07-28 12:10:35,338 INFO [train.py:1114] (2/4) Epoch 12, batch 3250, loss[loss=0.195, simple_loss=0.2883, pruned_loss=0.05086, over 4930.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2787, pruned_loss=0.05091, over 940599.50 frames. ], batch size: 14, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:10:42,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=154274.66666666666, ans=0.025 +2024-07-28 12:10:52,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=154288.0, ans=0.0 +2024-07-28 12:10:59,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=154301.33333333334, ans=0.025 +2024-07-28 12:11:00,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=154301.33333333334, ans=0.0 +2024-07-28 12:11:00,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=154301.33333333334, ans=0.0 +2024-07-28 12:11:02,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=154314.66666666666, ans=0.125 +2024-07-28 12:11:05,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=154314.66666666666, ans=0.09899494936611666 +2024-07-28 12:11:09,146 INFO [train.py:1114] (2/4) Epoch 12, batch 3300, loss[loss=0.2009, simple_loss=0.298, pruned_loss=0.05189, over 4645.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2774, pruned_loss=0.05055, over 940839.70 frames. ], batch size: 19, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:11:16,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=154341.33333333334, ans=0.04949747468305833 +2024-07-28 12:11:26,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=154354.66666666666, ans=0.025 +2024-07-28 12:11:31,596 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.932e+01 5.622e+01 6.140e+01 6.825e+01 9.627e+01, threshold=1.228e+02, percent-clipped=0.0 +2024-07-28 12:11:41,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=154381.33333333334, ans=0.1 +2024-07-28 12:11:45,529 INFO [train.py:1114] (2/4) Epoch 12, batch 3350, loss[loss=0.2291, simple_loss=0.3151, pruned_loss=0.07149, over 4597.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2789, pruned_loss=0.05128, over 938602.14 frames. ], batch size: 17, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:11:47,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=154394.66666666666, ans=0.125 +2024-07-28 12:11:53,822 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:12:01,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=154408.0, ans=0.125 +2024-07-28 12:12:08,229 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:12:13,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=154434.66666666666, ans=0.1 +2024-07-28 12:12:28,958 INFO [train.py:1114] (2/4) Epoch 12, batch 3400, loss[loss=0.1683, simple_loss=0.2435, pruned_loss=0.04656, over 4805.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.279, pruned_loss=0.05132, over 937104.22 frames. ], batch size: 11, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:12:32,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=154461.33333333334, ans=0.125 +2024-07-28 12:12:35,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154474.66666666666, ans=0.1 +2024-07-28 12:12:37,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=154474.66666666666, ans=0.125 +2024-07-28 12:12:37,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=154474.66666666666, ans=0.5 +2024-07-28 12:12:50,518 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.411e+01 5.633e+01 6.152e+01 6.788e+01 1.015e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 12:13:04,068 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.59 vs. limit=22.5 +2024-07-28 12:13:04,235 INFO [train.py:1114] (2/4) Epoch 12, batch 3450, loss[loss=0.2266, simple_loss=0.2982, pruned_loss=0.07749, over 4671.00 frames. ], tot_loss[loss=0.192, simple_loss=0.28, pruned_loss=0.052, over 937132.04 frames. ], batch size: 19, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:13:16,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=154541.33333333334, ans=0.125 +2024-07-28 12:13:18,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=154541.33333333334, ans=0.1 +2024-07-28 12:13:39,610 INFO [train.py:1114] (2/4) Epoch 12, batch 3500, loss[loss=0.177, simple_loss=0.2669, pruned_loss=0.0436, over 4937.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2787, pruned_loss=0.05151, over 937543.59 frames. ], batch size: 12, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:13:44,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=154594.66666666666, ans=0.125 +2024-07-28 12:13:45,988 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.67 vs. limit=12.0 +2024-07-28 12:13:51,609 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.13 vs. limit=22.5 +2024-07-28 12:13:52,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=154608.0, ans=0.025 +2024-07-28 12:14:00,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=154634.66666666666, ans=0.125 +2024-07-28 12:14:01,209 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.093e+01 5.546e+01 6.148e+01 6.737e+01 9.893e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 12:14:10,859 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.59 vs. limit=10.0 +2024-07-28 12:14:12,959 INFO [train.py:1114] (2/4) Epoch 12, batch 3550, loss[loss=0.1649, simple_loss=0.2554, pruned_loss=0.03723, over 4659.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2784, pruned_loss=0.05148, over 938216.40 frames. ], batch size: 14, lr: 6.29e-03, grad_scale: 64.0 +2024-07-28 12:14:25,107 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.89 vs. limit=10.0 +2024-07-28 12:14:32,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154674.66666666666, ans=0.1 +2024-07-28 12:14:39,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=154688.0, ans=0.125 +2024-07-28 12:14:47,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=154714.66666666666, ans=0.2 +2024-07-28 12:14:54,257 INFO [train.py:1114] (2/4) Epoch 12, batch 3600, loss[loss=0.1734, simple_loss=0.2599, pruned_loss=0.04342, over 4953.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.278, pruned_loss=0.05137, over 940422.94 frames. ], batch size: 13, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:14:57,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154728.0, ans=0.1 +2024-07-28 12:15:11,328 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.94 vs. limit=15.0 +2024-07-28 12:15:14,317 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:15:16,128 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.517e+01 5.873e+01 6.627e+01 7.814e+01 1.281e+02, threshold=1.325e+02, percent-clipped=1.0 +2024-07-28 12:15:24,068 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.83 vs. limit=15.0 +2024-07-28 12:15:26,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=154781.33333333334, ans=0.0 +2024-07-28 12:15:27,725 INFO [train.py:1114] (2/4) Epoch 12, batch 3650, loss[loss=0.2117, simple_loss=0.302, pruned_loss=0.06075, over 4924.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2781, pruned_loss=0.05128, over 940440.07 frames. ], batch size: 15, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:15:33,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=154794.66666666666, ans=0.2 +2024-07-28 12:15:37,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=154808.0, ans=0.0 +2024-07-28 12:15:42,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=154821.33333333334, ans=0.025 +2024-07-28 12:15:46,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=154821.33333333334, ans=0.125 +2024-07-28 12:15:49,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=154834.66666666666, ans=0.025 +2024-07-28 12:15:49,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=154834.66666666666, ans=0.0 +2024-07-28 12:15:50,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=154834.66666666666, ans=0.125 +2024-07-28 12:15:56,469 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.14 vs. limit=15.0 +2024-07-28 12:16:02,907 INFO [train.py:1114] (2/4) Epoch 12, batch 3700, loss[loss=0.1839, simple_loss=0.273, pruned_loss=0.04743, over 4926.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2791, pruned_loss=0.05167, over 941667.25 frames. ], batch size: 14, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:16:11,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=154874.66666666666, ans=0.2 +2024-07-28 12:16:12,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=154874.66666666666, ans=0.0 +2024-07-28 12:16:12,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=154874.66666666666, ans=0.0 +2024-07-28 12:16:22,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=154901.33333333334, ans=0.125 +2024-07-28 12:16:22,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=154901.33333333334, ans=0.0 +2024-07-28 12:16:24,636 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.998e+01 5.499e+01 5.998e+01 6.974e+01 1.210e+02, threshold=1.200e+02, percent-clipped=0.0 +2024-07-28 12:16:28,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=154901.33333333334, ans=0.0 +2024-07-28 12:16:35,614 INFO [train.py:1114] (2/4) Epoch 12, batch 3750, loss[loss=0.1791, simple_loss=0.2502, pruned_loss=0.05396, over 4792.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2786, pruned_loss=0.0515, over 943441.08 frames. ], batch size: 11, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:16:43,426 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.45 vs. limit=15.0 +2024-07-28 12:16:47,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=154941.33333333334, ans=0.125 +2024-07-28 12:16:47,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=154941.33333333334, ans=0.0 +2024-07-28 12:16:52,005 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.19 vs. limit=15.0 +2024-07-28 12:16:54,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=154954.66666666666, ans=0.125 +2024-07-28 12:16:58,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=154968.0, ans=0.1 +2024-07-28 12:17:03,674 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.40 vs. limit=22.5 +2024-07-28 12:17:04,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=154981.33333333334, ans=0.125 +2024-07-28 12:17:09,302 INFO [train.py:1114] (2/4) Epoch 12, batch 3800, loss[loss=0.1968, simple_loss=0.2858, pruned_loss=0.05393, over 4809.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2779, pruned_loss=0.05145, over 941523.14 frames. ], batch size: 14, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:17:16,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=155008.0, ans=0.125 +2024-07-28 12:17:30,621 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.66 vs. limit=15.0 +2024-07-28 12:17:32,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=155034.66666666666, ans=0.125 +2024-07-28 12:17:32,845 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.431e+01 5.749e+01 6.230e+01 7.169e+01 2.120e+02, threshold=1.246e+02, percent-clipped=1.0 +2024-07-28 12:17:48,210 INFO [train.py:1114] (2/4) Epoch 12, batch 3850, loss[loss=0.2101, simple_loss=0.3066, pruned_loss=0.05679, over 4609.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2773, pruned_loss=0.05094, over 942090.90 frames. ], batch size: 16, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:17:59,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=155061.33333333334, ans=0.0 +2024-07-28 12:18:00,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=155061.33333333334, ans=0.2 +2024-07-28 12:18:03,556 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.49 vs. limit=12.0 +2024-07-28 12:18:07,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=155074.66666666666, ans=0.125 +2024-07-28 12:18:13,560 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.43 vs. limit=10.0 +2024-07-28 12:18:14,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=155088.0, ans=0.1 +2024-07-28 12:18:20,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=155101.33333333334, ans=0.0 +2024-07-28 12:18:27,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=155114.66666666666, ans=0.125 +2024-07-28 12:18:29,371 INFO [train.py:1114] (2/4) Epoch 12, batch 3900, loss[loss=0.2026, simple_loss=0.2846, pruned_loss=0.06034, over 4806.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2771, pruned_loss=0.05055, over 942497.94 frames. ], batch size: 14, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:18:33,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=155128.0, ans=0.0 +2024-07-28 12:18:35,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=155141.33333333334, ans=0.2 +2024-07-28 12:18:46,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=155154.66666666666, ans=0.125 +2024-07-28 12:18:51,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=155168.0, ans=0.2 +2024-07-28 12:18:53,191 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.209e+01 5.496e+01 6.124e+01 6.680e+01 9.090e+01, threshold=1.225e+02, percent-clipped=0.0 +2024-07-28 12:18:54,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=155168.0, ans=10.0 +2024-07-28 12:19:07,243 INFO [train.py:1114] (2/4) Epoch 12, batch 3950, loss[loss=0.2015, simple_loss=0.2983, pruned_loss=0.05233, over 4821.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2772, pruned_loss=0.051, over 944359.44 frames. ], batch size: 16, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:19:08,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=155194.66666666666, ans=0.125 +2024-07-28 12:19:27,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=155234.66666666666, ans=0.0 +2024-07-28 12:19:41,099 INFO [train.py:1114] (2/4) Epoch 12, batch 4000, loss[loss=0.1585, simple_loss=0.2477, pruned_loss=0.03466, over 4774.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2781, pruned_loss=0.05163, over 940896.55 frames. ], batch size: 12, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:19:44,130 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.06 vs. limit=15.0 +2024-07-28 12:19:51,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155274.66666666666, ans=0.1 +2024-07-28 12:19:51,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=155274.66666666666, ans=0.125 +2024-07-28 12:20:02,780 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.540e+01 6.444e+01 7.146e+01 1.519e+02, threshold=1.289e+02, percent-clipped=1.0 +2024-07-28 12:20:14,415 INFO [train.py:1114] (2/4) Epoch 12, batch 4050, loss[loss=0.2534, simple_loss=0.3289, pruned_loss=0.08895, over 3227.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2782, pruned_loss=0.05158, over 939279.96 frames. ], batch size: 35, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:20:17,603 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.12 vs. limit=15.0 +2024-07-28 12:20:26,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=155341.33333333334, ans=0.125 +2024-07-28 12:20:26,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155341.33333333334, ans=0.1 +2024-07-28 12:20:30,539 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.41 vs. limit=6.0 +2024-07-28 12:20:30,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=155354.66666666666, ans=0.125 +2024-07-28 12:20:33,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=155354.66666666666, ans=0.0 +2024-07-28 12:20:48,475 INFO [train.py:1114] (2/4) Epoch 12, batch 4100, loss[loss=0.2098, simple_loss=0.2978, pruned_loss=0.0609, over 4903.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2772, pruned_loss=0.05135, over 938775.15 frames. ], batch size: 15, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:20:53,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155394.66666666666, ans=0.1 +2024-07-28 12:20:54,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=155394.66666666666, ans=0.125 +2024-07-28 12:20:54,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=155394.66666666666, ans=0.2 +2024-07-28 12:21:03,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=155421.33333333334, ans=0.025 +2024-07-28 12:21:12,173 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.341e+01 5.731e+01 6.585e+01 8.286e+01 1.195e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-28 12:21:15,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=155434.66666666666, ans=0.0 +2024-07-28 12:21:16,548 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.03 vs. limit=12.0 +2024-07-28 12:21:21,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=155448.0, ans=0.125 +2024-07-28 12:21:23,650 INFO [train.py:1114] (2/4) Epoch 12, batch 4150, loss[loss=0.1855, simple_loss=0.2812, pruned_loss=0.04487, over 4837.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2776, pruned_loss=0.05119, over 938748.84 frames. ], batch size: 13, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:21:25,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155461.33333333334, ans=0.1 +2024-07-28 12:21:57,094 INFO [train.py:1114] (2/4) Epoch 12, batch 4200, loss[loss=0.208, simple_loss=0.293, pruned_loss=0.06152, over 4899.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2777, pruned_loss=0.0512, over 939949.25 frames. ], batch size: 15, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:21:57,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=155528.0, ans=0.125 +2024-07-28 12:21:58,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155528.0, ans=0.1 +2024-07-28 12:22:04,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=155541.33333333334, ans=0.125 +2024-07-28 12:22:13,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=155554.66666666666, ans=0.025 +2024-07-28 12:22:16,666 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.81 vs. limit=6.0 +2024-07-28 12:22:18,896 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.397e+01 5.594e+01 6.177e+01 7.434e+01 1.256e+02, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 12:22:19,925 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.73 vs. limit=6.0 +2024-07-28 12:22:23,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=155581.33333333334, ans=0.125 +2024-07-28 12:22:30,563 INFO [train.py:1114] (2/4) Epoch 12, batch 4250, loss[loss=0.1386, simple_loss=0.2235, pruned_loss=0.02686, over 4636.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.279, pruned_loss=0.05179, over 940859.02 frames. ], batch size: 12, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:22:34,259 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-07-28 12:22:34,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=155594.66666666666, ans=0.0 +2024-07-28 12:22:37,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=155608.0, ans=0.125 +2024-07-28 12:22:53,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155634.66666666666, ans=0.1 +2024-07-28 12:22:56,332 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.41 vs. limit=15.0 +2024-07-28 12:23:03,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=155648.0, ans=0.125 +2024-07-28 12:23:06,447 INFO [train.py:1114] (2/4) Epoch 12, batch 4300, loss[loss=0.1872, simple_loss=0.2832, pruned_loss=0.04556, over 4764.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.279, pruned_loss=0.05177, over 940255.06 frames. ], batch size: 13, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:23:22,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=155688.0, ans=0.07 +2024-07-28 12:23:27,977 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.634e+01 6.193e+01 6.969e+01 9.578e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 12:23:28,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=155701.33333333334, ans=0.125 +2024-07-28 12:23:29,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=155701.33333333334, ans=0.125 +2024-07-28 12:23:33,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155714.66666666666, ans=0.1 +2024-07-28 12:23:39,247 INFO [train.py:1114] (2/4) Epoch 12, batch 4350, loss[loss=0.1673, simple_loss=0.2421, pruned_loss=0.0462, over 4760.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2791, pruned_loss=0.05161, over 940978.24 frames. ], batch size: 13, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:23:58,022 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.71 vs. limit=22.5 +2024-07-28 12:24:21,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=155781.33333333334, ans=0.0 +2024-07-28 12:24:28,592 INFO [train.py:1114] (2/4) Epoch 12, batch 4400, loss[loss=0.1891, simple_loss=0.2856, pruned_loss=0.04628, over 4810.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2798, pruned_loss=0.05189, over 940887.41 frames. ], batch size: 14, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:24:42,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155808.0, ans=0.1 +2024-07-28 12:24:50,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=155834.66666666666, ans=0.0 +2024-07-28 12:24:52,801 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.007e+01 5.483e+01 6.215e+01 6.857e+01 1.527e+02, threshold=1.243e+02, percent-clipped=1.0 +2024-07-28 12:24:58,451 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.57 vs. limit=12.0 +2024-07-28 12:25:07,423 INFO [train.py:1114] (2/4) Epoch 12, batch 4450, loss[loss=0.1499, simple_loss=0.2364, pruned_loss=0.03167, over 4940.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2794, pruned_loss=0.05206, over 938901.83 frames. ], batch size: 12, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:25:08,635 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.33 vs. limit=15.0 +2024-07-28 12:25:17,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=155861.33333333334, ans=0.125 +2024-07-28 12:25:20,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=155874.66666666666, ans=0.125 +2024-07-28 12:25:31,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=155888.0, ans=0.125 +2024-07-28 12:25:36,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=155901.33333333334, ans=0.125 +2024-07-28 12:25:43,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=155914.66666666666, ans=0.125 +2024-07-28 12:25:46,638 INFO [train.py:1114] (2/4) Epoch 12, batch 4500, loss[loss=0.1978, simple_loss=0.301, pruned_loss=0.0473, over 4730.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2806, pruned_loss=0.05242, over 938398.11 frames. ], batch size: 14, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:25:51,495 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.06 vs. limit=12.0 +2024-07-28 12:25:51,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=155928.0, ans=0.025 +2024-07-28 12:26:07,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=155968.0, ans=0.125 +2024-07-28 12:26:08,388 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.634e+01 5.459e+01 6.375e+01 7.469e+01 1.021e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 12:26:09,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=155968.0, ans=0.1 +2024-07-28 12:26:09,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=155968.0, ans=0.0 +2024-07-28 12:26:13,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=155981.33333333334, ans=0.125 +2024-07-28 12:26:19,640 INFO [train.py:1114] (2/4) Epoch 12, batch 4550, loss[loss=0.1768, simple_loss=0.2623, pruned_loss=0.04559, over 4896.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2797, pruned_loss=0.05195, over 940343.51 frames. ], batch size: 13, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:26:21,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=155994.66666666666, ans=0.125 +2024-07-28 12:26:21,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=155994.66666666666, ans=0.2 +2024-07-28 12:26:23,955 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:26:35,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=156021.33333333334, ans=0.0 +2024-07-28 12:26:54,974 INFO [train.py:1114] (2/4) Epoch 12, batch 4600, loss[loss=0.193, simple_loss=0.2754, pruned_loss=0.05529, over 4493.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.278, pruned_loss=0.05146, over 938146.45 frames. ], batch size: 21, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:27:02,813 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.58 vs. limit=15.0 +2024-07-28 12:27:14,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=156101.33333333334, ans=0.2 +2024-07-28 12:27:16,071 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.90 vs. limit=15.0 +2024-07-28 12:27:16,928 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.470e+01 5.723e+01 6.384e+01 7.730e+01 1.121e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 12:27:18,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=156101.33333333334, ans=0.2 +2024-07-28 12:27:26,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=156114.66666666666, ans=0.025 +2024-07-28 12:27:28,350 INFO [train.py:1114] (2/4) Epoch 12, batch 4650, loss[loss=0.2434, simple_loss=0.3193, pruned_loss=0.0838, over 4828.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2787, pruned_loss=0.05134, over 940084.24 frames. ], batch size: 16, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:27:32,694 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.73 vs. limit=15.0 +2024-07-28 12:27:47,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=156168.0, ans=0.125 +2024-07-28 12:27:48,043 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.27 vs. limit=10.0 +2024-07-28 12:27:49,982 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.60 vs. limit=15.0 +2024-07-28 12:28:01,576 INFO [train.py:1114] (2/4) Epoch 12, batch 4700, loss[loss=0.1609, simple_loss=0.2366, pruned_loss=0.0426, over 4703.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2783, pruned_loss=0.0518, over 937388.44 frames. ], batch size: 11, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:28:02,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=156194.66666666666, ans=0.025 +2024-07-28 12:28:09,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=156208.0, ans=0.0 +2024-07-28 12:28:13,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=156208.0, ans=0.0 +2024-07-28 12:28:23,591 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.610e+01 5.553e+01 6.034e+01 6.597e+01 9.759e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-28 12:28:25,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=156234.66666666666, ans=0.1 +2024-07-28 12:28:29,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=156248.0, ans=0.0 +2024-07-28 12:28:30,083 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:28:35,500 INFO [train.py:1114] (2/4) Epoch 12, batch 4750, loss[loss=0.2043, simple_loss=0.3013, pruned_loss=0.05368, over 4440.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2786, pruned_loss=0.05235, over 934799.27 frames. ], batch size: 21, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:28:35,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=156261.33333333334, ans=0.125 +2024-07-28 12:28:39,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=156261.33333333334, ans=0.0 +2024-07-28 12:28:40,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=156261.33333333334, ans=0.1 +2024-07-28 12:28:47,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=156274.66666666666, ans=0.0 +2024-07-28 12:28:51,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=156288.0, ans=0.125 +2024-07-28 12:29:04,886 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:29:05,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=156314.66666666666, ans=0.125 +2024-07-28 12:29:07,350 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.78 vs. limit=15.0 +2024-07-28 12:29:11,594 INFO [train.py:1114] (2/4) Epoch 12, batch 4800, loss[loss=0.2101, simple_loss=0.2986, pruned_loss=0.06077, over 4701.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2788, pruned_loss=0.05258, over 932315.96 frames. ], batch size: 13, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:29:19,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=156341.33333333334, ans=0.125 +2024-07-28 12:29:27,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=156354.66666666666, ans=0.125 +2024-07-28 12:29:29,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=156354.66666666666, ans=0.125 +2024-07-28 12:29:35,342 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.785e+01 5.583e+01 6.047e+01 7.018e+01 9.420e+01, threshold=1.209e+02, percent-clipped=0.0 +2024-07-28 12:29:38,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=156368.0, ans=0.125 +2024-07-28 12:29:46,700 INFO [train.py:1114] (2/4) Epoch 12, batch 4850, loss[loss=0.1772, simple_loss=0.2578, pruned_loss=0.04828, over 4734.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2789, pruned_loss=0.05255, over 932164.58 frames. ], batch size: 14, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:29:48,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=156394.66666666666, ans=0.0 +2024-07-28 12:29:51,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=156394.66666666666, ans=0.0 +2024-07-28 12:29:52,786 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.38 vs. limit=22.5 +2024-07-28 12:30:08,912 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.91 vs. limit=15.0 +2024-07-28 12:30:09,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=156434.66666666666, ans=0.0 +2024-07-28 12:30:12,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=156434.66666666666, ans=0.125 +2024-07-28 12:30:22,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=156448.0, ans=0.125 +2024-07-28 12:30:25,574 INFO [train.py:1114] (2/4) Epoch 12, batch 4900, loss[loss=0.2113, simple_loss=0.3025, pruned_loss=0.06004, over 4757.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.278, pruned_loss=0.05162, over 934117.68 frames. ], batch size: 13, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:30:25,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=156461.33333333334, ans=0.0 +2024-07-28 12:30:29,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=156461.33333333334, ans=0.05 +2024-07-28 12:30:34,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=156474.66666666666, ans=0.025 +2024-07-28 12:30:38,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=156474.66666666666, ans=0.0 +2024-07-28 12:30:43,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=156488.0, ans=0.2 +2024-07-28 12:30:48,388 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.179e+01 5.700e+01 6.377e+01 7.192e+01 1.081e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 12:30:52,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=156514.66666666666, ans=0.125 +2024-07-28 12:30:56,938 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.21 vs. limit=6.0 +2024-07-28 12:30:59,635 INFO [train.py:1114] (2/4) Epoch 12, batch 4950, loss[loss=0.2226, simple_loss=0.3033, pruned_loss=0.0709, over 3452.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2792, pruned_loss=0.05186, over 931498.39 frames. ], batch size: 35, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:31:02,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=156528.0, ans=0.0 +2024-07-28 12:31:05,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=156528.0, ans=0.0 +2024-07-28 12:31:10,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=156541.33333333334, ans=0.125 +2024-07-28 12:31:17,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=156554.66666666666, ans=0.1 +2024-07-28 12:31:29,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=156581.33333333334, ans=0.125 +2024-07-28 12:31:30,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=156581.33333333334, ans=0.125 +2024-07-28 12:31:33,251 INFO [train.py:1114] (2/4) Epoch 12, batch 5000, loss[loss=0.2137, simple_loss=0.3085, pruned_loss=0.05944, over 4676.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2785, pruned_loss=0.05145, over 935446.96 frames. ], batch size: 14, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:31:45,396 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:32:00,481 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.430e+01 5.699e+01 6.190e+01 6.580e+01 9.599e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 12:32:03,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=156634.66666666666, ans=0.125 +2024-07-28 12:32:05,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=156648.0, ans=0.125 +2024-07-28 12:32:06,746 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.45 vs. limit=12.0 +2024-07-28 12:32:11,982 INFO [train.py:1114] (2/4) Epoch 12, batch 5050, loss[loss=0.1311, simple_loss=0.2133, pruned_loss=0.02443, over 4856.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2776, pruned_loss=0.051, over 937778.37 frames. ], batch size: 12, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:32:26,586 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.20 vs. limit=15.0 +2024-07-28 12:32:27,233 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.54 vs. limit=15.0 +2024-07-28 12:32:37,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=156701.33333333334, ans=0.125 +2024-07-28 12:32:46,638 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.42 vs. limit=22.5 +2024-07-28 12:32:48,321 INFO [train.py:1114] (2/4) Epoch 12, batch 5100, loss[loss=0.1699, simple_loss=0.2586, pruned_loss=0.0406, over 4777.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2787, pruned_loss=0.05165, over 935626.40 frames. ], batch size: 12, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:32:49,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=156728.0, ans=0.2 +2024-07-28 12:32:49,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=156728.0, ans=0.0 +2024-07-28 12:32:51,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=156728.0, ans=0.2 +2024-07-28 12:32:55,070 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.40 vs. limit=12.0 +2024-07-28 12:32:56,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=156741.33333333334, ans=0.0 +2024-07-28 12:32:57,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=156741.33333333334, ans=0.125 +2024-07-28 12:32:59,451 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.19 vs. limit=15.0 +2024-07-28 12:33:19,271 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.805e+01 5.671e+01 6.012e+01 6.981e+01 1.009e+02, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 12:33:31,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=156781.33333333334, ans=0.025 +2024-07-28 12:33:36,872 INFO [train.py:1114] (2/4) Epoch 12, batch 5150, loss[loss=0.2371, simple_loss=0.3126, pruned_loss=0.08078, over 4830.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2792, pruned_loss=0.05176, over 936323.88 frames. ], batch size: 16, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:33:38,680 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.45 vs. limit=15.0 +2024-07-28 12:33:48,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=156808.0, ans=0.1 +2024-07-28 12:34:10,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156848.0, ans=0.1 +2024-07-28 12:34:11,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=156848.0, ans=0.0 +2024-07-28 12:34:12,736 INFO [train.py:1114] (2/4) Epoch 12, batch 5200, loss[loss=0.166, simple_loss=0.2569, pruned_loss=0.03756, over 4668.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2785, pruned_loss=0.05169, over 936553.82 frames. ], batch size: 14, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:34:19,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=156874.66666666666, ans=0.125 +2024-07-28 12:34:22,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=156874.66666666666, ans=0.125 +2024-07-28 12:34:23,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=156874.66666666666, ans=0.125 +2024-07-28 12:34:26,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=156888.0, ans=0.125 +2024-07-28 12:34:28,200 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.84 vs. limit=15.0 +2024-07-28 12:34:39,947 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.716e+01 5.675e+01 6.398e+01 7.446e+01 1.094e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 12:34:48,439 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.09 vs. limit=22.5 +2024-07-28 12:34:50,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=156928.0, ans=0.2 +2024-07-28 12:34:51,271 INFO [train.py:1114] (2/4) Epoch 12, batch 5250, loss[loss=0.1995, simple_loss=0.2882, pruned_loss=0.05542, over 4890.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2777, pruned_loss=0.05149, over 935870.44 frames. ], batch size: 13, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:34:55,011 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.29 vs. limit=12.0 +2024-07-28 12:35:05,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=156954.66666666666, ans=0.0 +2024-07-28 12:35:09,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=156954.66666666666, ans=0.125 +2024-07-28 12:35:13,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=156968.0, ans=0.125 +2024-07-28 12:35:22,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=156981.33333333334, ans=0.125 +2024-07-28 12:35:24,840 INFO [train.py:1114] (2/4) Epoch 12, batch 5300, loss[loss=0.1949, simple_loss=0.2818, pruned_loss=0.05398, over 4616.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2781, pruned_loss=0.05187, over 934879.21 frames. ], batch size: 16, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:35:32,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=157008.0, ans=0.125 +2024-07-28 12:35:41,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=157021.33333333334, ans=0.125 +2024-07-28 12:35:44,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=157034.66666666666, ans=0.0 +2024-07-28 12:35:46,889 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.557e+01 5.552e+01 6.428e+01 7.649e+01 1.141e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 12:35:49,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=157034.66666666666, ans=0.025 +2024-07-28 12:36:00,588 INFO [train.py:1114] (2/4) Epoch 12, batch 5350, loss[loss=0.2154, simple_loss=0.2928, pruned_loss=0.069, over 4496.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2789, pruned_loss=0.05174, over 936694.35 frames. ], batch size: 10, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:36:03,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=157061.33333333334, ans=0.125 +2024-07-28 12:36:09,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=157074.66666666666, ans=0.125 +2024-07-28 12:36:10,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=157074.66666666666, ans=0.1 +2024-07-28 12:36:13,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=157074.66666666666, ans=0.05 +2024-07-28 12:36:25,234 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=2.70 vs. limit=12.0 +2024-07-28 12:36:27,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=157101.33333333334, ans=0.0 +2024-07-28 12:36:30,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=157114.66666666666, ans=10.0 +2024-07-28 12:36:34,799 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:36:36,742 INFO [train.py:1114] (2/4) Epoch 12, batch 5400, loss[loss=0.1867, simple_loss=0.2892, pruned_loss=0.0421, over 4286.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2791, pruned_loss=0.05181, over 930620.19 frames. ], batch size: 25, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:36:50,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=157154.66666666666, ans=0.125 +2024-07-28 12:36:58,604 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.622e+01 6.108e+01 6.944e+01 7.812e+01 1.147e+02, threshold=1.389e+02, percent-clipped=0.0 +2024-07-28 12:37:09,775 INFO [train.py:1114] (2/4) Epoch 12, batch 5450, loss[loss=0.1779, simple_loss=0.2555, pruned_loss=0.05013, over 4702.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2787, pruned_loss=0.05174, over 933404.08 frames. ], batch size: 11, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:37:19,723 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.83 vs. limit=12.0 +2024-07-28 12:37:21,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=157208.0, ans=0.125 +2024-07-28 12:37:21,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=157208.0, ans=0.125 +2024-07-28 12:37:22,942 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.42 vs. limit=15.0 +2024-07-28 12:37:23,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=157221.33333333334, ans=0.125 +2024-07-28 12:37:41,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=157248.0, ans=0.125 +2024-07-28 12:37:43,636 INFO [train.py:1114] (2/4) Epoch 12, batch 5500, loss[loss=0.2036, simple_loss=0.2962, pruned_loss=0.05551, over 4243.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2782, pruned_loss=0.05101, over 931430.58 frames. ], batch size: 25, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:37:45,424 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.01 vs. limit=15.0 +2024-07-28 12:37:49,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=157261.33333333334, ans=0.0 +2024-07-28 12:37:54,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.98 vs. limit=22.5 +2024-07-28 12:37:55,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157274.66666666666, ans=0.1 +2024-07-28 12:38:05,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=157301.33333333334, ans=0.95 +2024-07-28 12:38:07,916 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.744e+01 6.392e+01 7.523e+01 1.431e+02, threshold=1.278e+02, percent-clipped=1.0 +2024-07-28 12:38:10,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=157301.33333333334, ans=0.2 +2024-07-28 12:38:15,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=157314.66666666666, ans=0.125 +2024-07-28 12:38:16,982 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.47 vs. limit=15.0 +2024-07-28 12:38:19,239 INFO [train.py:1114] (2/4) Epoch 12, batch 5550, loss[loss=0.1703, simple_loss=0.2594, pruned_loss=0.04058, over 4707.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2783, pruned_loss=0.05133, over 933482.89 frames. ], batch size: 12, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:38:23,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=157328.0, ans=0.025 +2024-07-28 12:38:34,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=157354.66666666666, ans=0.0 +2024-07-28 12:38:38,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=157354.66666666666, ans=0.1 +2024-07-28 12:38:39,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=157368.0, ans=0.0 +2024-07-28 12:38:44,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157368.0, ans=0.1 +2024-07-28 12:38:47,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157381.33333333334, ans=0.1 +2024-07-28 12:38:52,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=157394.66666666666, ans=0.125 +2024-07-28 12:38:52,989 INFO [train.py:1114] (2/4) Epoch 12, batch 5600, loss[loss=0.2378, simple_loss=0.3262, pruned_loss=0.07476, over 4748.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2798, pruned_loss=0.05195, over 934470.72 frames. ], batch size: 14, lr: 6.24e-03, grad_scale: 64.0 +2024-07-28 12:39:09,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=157421.33333333334, ans=0.1 +2024-07-28 12:39:10,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=157421.33333333334, ans=0.0 +2024-07-28 12:39:15,770 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.811e+01 5.521e+01 6.342e+01 7.244e+01 1.033e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 12:39:26,383 INFO [train.py:1114] (2/4) Epoch 12, batch 5650, loss[loss=0.2184, simple_loss=0.2939, pruned_loss=0.07141, over 4492.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2789, pruned_loss=0.05101, over 937199.96 frames. ], batch size: 21, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:39:31,178 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.95 vs. limit=15.0 +2024-07-28 12:39:31,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=157461.33333333334, ans=0.2 +2024-07-28 12:39:36,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=157474.66666666666, ans=0.0 +2024-07-28 12:39:38,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157474.66666666666, ans=0.1 +2024-07-28 12:39:43,799 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.43 vs. limit=12.0 +2024-07-28 12:39:53,058 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.44 vs. limit=15.0 +2024-07-28 12:39:54,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=157514.66666666666, ans=0.125 +2024-07-28 12:39:55,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=157514.66666666666, ans=0.125 +2024-07-28 12:39:58,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.89 vs. limit=22.5 +2024-07-28 12:40:00,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=157528.0, ans=0.025 +2024-07-28 12:40:01,552 INFO [train.py:1114] (2/4) Epoch 12, batch 5700, loss[loss=0.1927, simple_loss=0.2852, pruned_loss=0.05007, over 4702.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2801, pruned_loss=0.05158, over 938178.89 frames. ], batch size: 13, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:40:06,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=157528.0, ans=0.1 +2024-07-28 12:40:09,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=157541.33333333334, ans=0.015 +2024-07-28 12:40:20,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=157554.66666666666, ans=10.0 +2024-07-28 12:40:26,159 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.907e+01 5.849e+01 6.761e+01 7.551e+01 1.061e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-28 12:40:36,677 INFO [train.py:1114] (2/4) Epoch 12, batch 5750, loss[loss=0.1853, simple_loss=0.279, pruned_loss=0.04577, over 4693.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2796, pruned_loss=0.05138, over 938037.29 frames. ], batch size: 19, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:40:43,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=157608.0, ans=0.125 +2024-07-28 12:41:03,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=157634.66666666666, ans=0.125 +2024-07-28 12:41:03,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=157634.66666666666, ans=0.1 +2024-07-28 12:41:05,449 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.23 vs. limit=22.5 +2024-07-28 12:41:06,633 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.58 vs. limit=15.0 +2024-07-28 12:41:07,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=157634.66666666666, ans=0.125 +2024-07-28 12:41:14,402 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.79 vs. limit=15.0 +2024-07-28 12:41:16,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157661.33333333334, ans=0.1 +2024-07-28 12:41:16,563 INFO [train.py:1114] (2/4) Epoch 12, batch 5800, loss[loss=0.1951, simple_loss=0.2894, pruned_loss=0.05037, over 4754.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2801, pruned_loss=0.05174, over 937107.32 frames. ], batch size: 19, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:41:30,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157674.66666666666, ans=0.1 +2024-07-28 12:41:30,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=157674.66666666666, ans=0.5 +2024-07-28 12:41:40,235 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.47 vs. limit=15.0 +2024-07-28 12:41:43,099 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.866e+01 5.542e+01 6.072e+01 7.218e+01 1.008e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 12:41:54,579 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.65 vs. limit=22.5 +2024-07-28 12:41:54,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=157714.66666666666, ans=0.0 +2024-07-28 12:41:57,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=157714.66666666666, ans=0.125 +2024-07-28 12:41:59,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=157728.0, ans=0.025 +2024-07-28 12:41:59,708 INFO [train.py:1114] (2/4) Epoch 12, batch 5850, loss[loss=0.2269, simple_loss=0.3143, pruned_loss=0.06978, over 4603.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2801, pruned_loss=0.05196, over 938166.55 frames. ], batch size: 21, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:42:00,836 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.81 vs. limit=6.0 +2024-07-28 12:42:04,763 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-28 12:42:06,292 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.03 vs. limit=15.0 +2024-07-28 12:42:13,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=157754.66666666666, ans=0.0 +2024-07-28 12:42:15,355 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.45 vs. limit=22.5 +2024-07-28 12:42:33,097 INFO [train.py:1114] (2/4) Epoch 12, batch 5900, loss[loss=0.1886, simple_loss=0.2872, pruned_loss=0.04499, over 4697.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2803, pruned_loss=0.05209, over 938276.31 frames. ], batch size: 15, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:42:47,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=157821.33333333334, ans=0.0 +2024-07-28 12:42:47,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=157821.33333333334, ans=0.125 +2024-07-28 12:42:49,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=157821.33333333334, ans=0.0 +2024-07-28 12:42:56,390 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 5.671e+01 6.327e+01 7.319e+01 1.125e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-28 12:43:07,206 INFO [train.py:1114] (2/4) Epoch 12, batch 5950, loss[loss=0.2041, simple_loss=0.3075, pruned_loss=0.05036, over 4697.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2803, pruned_loss=0.05181, over 940178.76 frames. ], batch size: 15, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:43:15,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=157874.66666666666, ans=0.025 +2024-07-28 12:43:42,909 INFO [train.py:1114] (2/4) Epoch 12, batch 6000, loss[loss=0.1921, simple_loss=0.2778, pruned_loss=0.05325, over 4184.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2799, pruned_loss=0.0521, over 936871.83 frames. ], batch size: 25, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:43:42,909 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 12:43:54,398 INFO [train.py:1146] (2/4) Epoch 12, validation: loss=0.1672, simple_loss=0.2713, pruned_loss=0.03161, over 944034.00 frames. +2024-07-28 12:43:54,398 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 12:43:58,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157928.0, ans=0.1 +2024-07-28 12:43:58,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=157928.0, ans=0.125 +2024-07-28 12:43:59,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157928.0, ans=0.1 +2024-07-28 12:44:17,594 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.752e+01 5.694e+01 6.318e+01 7.255e+01 1.160e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 12:44:28,517 INFO [train.py:1114] (2/4) Epoch 12, batch 6050, loss[loss=0.1652, simple_loss=0.2412, pruned_loss=0.04454, over 4778.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2797, pruned_loss=0.05223, over 938035.99 frames. ], batch size: 12, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:44:30,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=157994.66666666666, ans=0.0 +2024-07-28 12:44:33,681 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.14 vs. limit=12.0 +2024-07-28 12:44:34,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=157994.66666666666, ans=0.0 +2024-07-28 12:44:36,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158008.0, ans=0.1 +2024-07-28 12:44:38,294 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.14 vs. limit=15.0 +2024-07-28 12:44:39,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=158008.0, ans=0.025 +2024-07-28 12:44:55,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=158048.0, ans=0.0 +2024-07-28 12:44:58,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=158048.0, ans=0.0 +2024-07-28 12:45:01,913 INFO [train.py:1114] (2/4) Epoch 12, batch 6100, loss[loss=0.1917, simple_loss=0.2871, pruned_loss=0.04821, over 4692.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2792, pruned_loss=0.05189, over 937713.48 frames. ], batch size: 15, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:45:02,269 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.99 vs. limit=15.0 +2024-07-28 12:45:20,098 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:45:22,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=158101.33333333334, ans=0.0 +2024-07-28 12:45:26,349 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.227e+01 5.720e+01 6.414e+01 7.144e+01 1.177e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 12:45:27,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=158101.33333333334, ans=0.125 +2024-07-28 12:45:32,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=158114.66666666666, ans=0.04949747468305833 +2024-07-28 12:45:36,993 INFO [train.py:1114] (2/4) Epoch 12, batch 6150, loss[loss=0.2739, simple_loss=0.3358, pruned_loss=0.106, over 3566.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2788, pruned_loss=0.05141, over 937221.30 frames. ], batch size: 35, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:45:40,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=158128.0, ans=0.125 +2024-07-28 12:45:44,625 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.20 vs. limit=12.0 +2024-07-28 12:45:45,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=158141.33333333334, ans=0.125 +2024-07-28 12:45:49,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=158141.33333333334, ans=0.05 +2024-07-28 12:45:54,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=158154.66666666666, ans=0.125 +2024-07-28 12:45:55,234 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.04 vs. limit=15.0 +2024-07-28 12:46:03,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158181.33333333334, ans=0.1 +2024-07-28 12:46:03,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=158181.33333333334, ans=0.0 +2024-07-28 12:46:08,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=158181.33333333334, ans=0.125 +2024-07-28 12:46:11,261 INFO [train.py:1114] (2/4) Epoch 12, batch 6200, loss[loss=0.1847, simple_loss=0.2782, pruned_loss=0.04564, over 4738.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2789, pruned_loss=0.05156, over 936523.88 frames. ], batch size: 14, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:46:23,320 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.00 vs. limit=6.0 +2024-07-28 12:46:25,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=158221.33333333334, ans=0.125 +2024-07-28 12:46:34,428 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.522e+01 5.559e+01 6.111e+01 6.861e+01 1.032e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 12:46:41,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=158248.0, ans=0.125 +2024-07-28 12:46:47,172 INFO [train.py:1114] (2/4) Epoch 12, batch 6250, loss[loss=0.2209, simple_loss=0.3105, pruned_loss=0.06567, over 4807.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2789, pruned_loss=0.05171, over 932972.96 frames. ], batch size: 14, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:46:51,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=158261.33333333334, ans=0.0 +2024-07-28 12:47:05,004 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.69 vs. limit=10.0 +2024-07-28 12:47:22,603 INFO [train.py:1114] (2/4) Epoch 12, batch 6300, loss[loss=0.1606, simple_loss=0.2333, pruned_loss=0.04389, over 4521.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2788, pruned_loss=0.05198, over 929621.54 frames. ], batch size: 10, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:47:24,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=158328.0, ans=0.025 +2024-07-28 12:47:31,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=158341.33333333334, ans=0.07 +2024-07-28 12:47:39,130 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:47:44,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=158368.0, ans=0.125 +2024-07-28 12:47:44,770 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.429e+01 5.769e+01 6.275e+01 7.297e+01 9.885e+01, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 12:47:45,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=158368.0, ans=0.2 +2024-07-28 12:47:48,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=158381.33333333334, ans=0.0 +2024-07-28 12:47:49,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=158381.33333333334, ans=0.125 +2024-07-28 12:47:55,516 INFO [train.py:1114] (2/4) Epoch 12, batch 6350, loss[loss=0.2231, simple_loss=0.3081, pruned_loss=0.0691, over 4429.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2784, pruned_loss=0.05165, over 933536.22 frames. ], batch size: 21, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:48:14,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=158421.33333333334, ans=0.2 +2024-07-28 12:48:16,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=158434.66666666666, ans=0.2 +2024-07-28 12:48:16,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158434.66666666666, ans=0.1 +2024-07-28 12:48:27,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=158448.0, ans=0.125 +2024-07-28 12:48:29,205 INFO [train.py:1114] (2/4) Epoch 12, batch 6400, loss[loss=0.1746, simple_loss=0.2635, pruned_loss=0.04289, over 4629.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2791, pruned_loss=0.05188, over 934976.86 frames. ], batch size: 13, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:48:34,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=158461.33333333334, ans=0.1 +2024-07-28 12:48:34,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=158461.33333333334, ans=0.125 +2024-07-28 12:48:43,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=158474.66666666666, ans=0.0 +2024-07-28 12:48:53,532 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.709e+01 5.709e+01 6.303e+01 7.389e+01 1.106e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 12:49:03,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=158514.66666666666, ans=0.05 +2024-07-28 12:49:04,249 INFO [train.py:1114] (2/4) Epoch 12, batch 6450, loss[loss=0.1822, simple_loss=0.2733, pruned_loss=0.04556, over 4558.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2787, pruned_loss=0.05129, over 938667.82 frames. ], batch size: 21, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:49:07,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158528.0, ans=0.1 +2024-07-28 12:49:16,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=158541.33333333334, ans=0.125 +2024-07-28 12:49:28,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=158568.0, ans=0.04949747468305833 +2024-07-28 12:49:34,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=158581.33333333334, ans=0.125 +2024-07-28 12:49:35,122 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.68 vs. limit=15.0 +2024-07-28 12:49:37,227 INFO [train.py:1114] (2/4) Epoch 12, batch 6500, loss[loss=0.2392, simple_loss=0.3042, pruned_loss=0.08715, over 3398.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2785, pruned_loss=0.0513, over 939973.83 frames. ], batch size: 35, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:49:44,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=158608.0, ans=0.0 +2024-07-28 12:49:47,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=158608.0, ans=0.07 +2024-07-28 12:49:54,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=158621.33333333334, ans=0.0 +2024-07-28 12:49:55,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=158621.33333333334, ans=0.1 +2024-07-28 12:49:59,307 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.679e+01 6.205e+01 7.346e+01 1.316e+02, threshold=1.241e+02, percent-clipped=1.0 +2024-07-28 12:50:02,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=158648.0, ans=0.0 +2024-07-28 12:50:10,237 INFO [train.py:1114] (2/4) Epoch 12, batch 6550, loss[loss=0.1583, simple_loss=0.2417, pruned_loss=0.03747, over 4813.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2781, pruned_loss=0.05126, over 942974.01 frames. ], batch size: 11, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:50:15,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=158661.33333333334, ans=0.125 +2024-07-28 12:50:23,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158688.0, ans=0.1 +2024-07-28 12:50:29,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=158701.33333333334, ans=0.09899494936611666 +2024-07-28 12:50:39,641 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.85 vs. limit=15.0 +2024-07-28 12:50:43,191 INFO [train.py:1114] (2/4) Epoch 12, batch 6600, loss[loss=0.2052, simple_loss=0.3035, pruned_loss=0.05349, over 4929.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2789, pruned_loss=0.05161, over 944968.21 frames. ], batch size: 14, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:50:49,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=158741.33333333334, ans=0.125 +2024-07-28 12:50:50,041 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:51:02,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=158754.66666666666, ans=0.125 +2024-07-28 12:51:07,919 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.596e+01 5.716e+01 6.452e+01 7.100e+01 1.307e+02, threshold=1.290e+02, percent-clipped=2.0 +2024-07-28 12:51:09,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=158768.0, ans=10.0 +2024-07-28 12:51:12,186 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:51:19,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=158781.33333333334, ans=0.125 +2024-07-28 12:51:23,538 INFO [train.py:1114] (2/4) Epoch 12, batch 6650, loss[loss=0.2024, simple_loss=0.2871, pruned_loss=0.05884, over 4603.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2791, pruned_loss=0.05157, over 944046.79 frames. ], batch size: 17, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:51:36,715 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.38 vs. limit=15.0 +2024-07-28 12:51:40,151 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.16 vs. limit=6.0 +2024-07-28 12:51:46,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=158821.33333333334, ans=0.2 +2024-07-28 12:51:57,967 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.72 vs. limit=12.0 +2024-07-28 12:52:04,749 INFO [train.py:1114] (2/4) Epoch 12, batch 6700, loss[loss=0.1829, simple_loss=0.2835, pruned_loss=0.04115, over 4658.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2794, pruned_loss=0.05187, over 942551.77 frames. ], batch size: 19, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:52:05,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=158861.33333333334, ans=0.0 +2024-07-28 12:52:10,035 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.05 vs. limit=15.0 +2024-07-28 12:52:18,041 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.05 vs. limit=15.0 +2024-07-28 12:52:31,271 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.714e+01 5.714e+01 6.445e+01 7.279e+01 1.274e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 12:52:42,433 INFO [train.py:1114] (2/4) Epoch 12, batch 6750, loss[loss=0.1887, simple_loss=0.2784, pruned_loss=0.04956, over 4114.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2789, pruned_loss=0.05164, over 940423.53 frames. ], batch size: 25, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:52:50,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=158941.33333333334, ans=0.125 +2024-07-28 12:52:51,523 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:52:59,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=158954.66666666666, ans=0.07 +2024-07-28 12:53:07,300 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.52 vs. limit=6.0 +2024-07-28 12:53:09,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=158981.33333333334, ans=0.02 +2024-07-28 12:53:09,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.38 vs. limit=12.0 +2024-07-28 12:53:11,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=158981.33333333334, ans=0.0 +2024-07-28 12:53:11,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=158981.33333333334, ans=0.125 +2024-07-28 12:53:12,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=158981.33333333334, ans=0.125 +2024-07-28 12:53:13,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=158981.33333333334, ans=0.0 +2024-07-28 12:53:13,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=158981.33333333334, ans=0.125 +2024-07-28 12:53:14,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=158981.33333333334, ans=0.0 +2024-07-28 12:53:14,790 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.15 vs. limit=15.0 +2024-07-28 12:53:15,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=158981.33333333334, ans=0.0 +2024-07-28 12:53:16,214 INFO [train.py:1114] (2/4) Epoch 12, batch 6800, loss[loss=0.2149, simple_loss=0.2857, pruned_loss=0.07208, over 4636.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2796, pruned_loss=0.05176, over 938777.34 frames. ], batch size: 13, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:53:17,208 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.60 vs. limit=15.0 +2024-07-28 12:53:18,027 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.81 vs. limit=5.0 +2024-07-28 12:53:19,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=158994.66666666666, ans=0.125 +2024-07-28 12:53:21,807 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.54 vs. limit=15.0 +2024-07-28 12:53:31,351 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:53:32,286 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.00 vs. limit=15.0 +2024-07-28 12:53:35,592 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.22 vs. limit=12.0 +2024-07-28 12:53:38,557 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.462e+01 5.632e+01 6.105e+01 6.995e+01 1.094e+02, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 12:53:44,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159048.0, ans=0.1 +2024-07-28 12:53:49,465 INFO [train.py:1114] (2/4) Epoch 12, batch 6850, loss[loss=0.1976, simple_loss=0.2944, pruned_loss=0.05035, over 4700.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2788, pruned_loss=0.05119, over 940688.32 frames. ], batch size: 13, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:53:51,822 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.14 vs. limit=12.0 +2024-07-28 12:54:00,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=159074.66666666666, ans=0.125 +2024-07-28 12:54:04,293 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.85 vs. limit=12.0 +2024-07-28 12:54:10,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=159101.33333333334, ans=0.5 +2024-07-28 12:54:24,209 INFO [train.py:1114] (2/4) Epoch 12, batch 6900, loss[loss=0.1826, simple_loss=0.2802, pruned_loss=0.04248, over 4960.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2787, pruned_loss=0.05095, over 943041.79 frames. ], batch size: 13, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:54:30,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=159141.33333333334, ans=0.125 +2024-07-28 12:54:42,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=159154.66666666666, ans=0.0 +2024-07-28 12:54:44,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=159168.0, ans=0.05 +2024-07-28 12:54:46,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=159168.0, ans=0.125 +2024-07-28 12:54:46,740 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.739e+01 5.528e+01 6.156e+01 7.028e+01 9.720e+01, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 12:54:50,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=159181.33333333334, ans=0.0 +2024-07-28 12:54:54,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=159181.33333333334, ans=0.125 +2024-07-28 12:54:57,592 INFO [train.py:1114] (2/4) Epoch 12, batch 6950, loss[loss=0.1982, simple_loss=0.2676, pruned_loss=0.06445, over 4520.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2793, pruned_loss=0.05156, over 940729.43 frames. ], batch size: 10, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:55:08,992 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.48 vs. limit=15.0 +2024-07-28 12:55:10,706 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:55:17,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=159234.66666666666, ans=0.0 +2024-07-28 12:55:27,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=159248.0, ans=0.035 +2024-07-28 12:55:27,618 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:55:30,915 INFO [train.py:1114] (2/4) Epoch 12, batch 7000, loss[loss=0.2142, simple_loss=0.2986, pruned_loss=0.0649, over 4626.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.279, pruned_loss=0.05168, over 939034.73 frames. ], batch size: 17, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:55:32,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=159261.33333333334, ans=0.125 +2024-07-28 12:55:44,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.50 vs. limit=10.0 +2024-07-28 12:55:47,194 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.50 vs. limit=15.0 +2024-07-28 12:55:53,276 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.935e+01 5.641e+01 6.482e+01 7.445e+01 1.063e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 12:55:59,607 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.86 vs. limit=22.5 +2024-07-28 12:56:02,940 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.71 vs. limit=15.0 +2024-07-28 12:56:03,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=159314.66666666666, ans=0.0 +2024-07-28 12:56:07,636 INFO [train.py:1114] (2/4) Epoch 12, batch 7050, loss[loss=0.1924, simple_loss=0.2881, pruned_loss=0.04833, over 4743.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2802, pruned_loss=0.05181, over 942183.70 frames. ], batch size: 19, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:56:16,241 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.39 vs. limit=22.5 +2024-07-28 12:56:16,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten.whitening_limit, batch_count=159341.33333333334, ans=22.5 +2024-07-28 12:56:26,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=159354.66666666666, ans=0.125 +2024-07-28 12:56:26,383 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=15.0 +2024-07-28 12:56:27,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=159354.66666666666, ans=0.125 +2024-07-28 12:56:28,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=159354.66666666666, ans=0.125 +2024-07-28 12:56:28,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=159354.66666666666, ans=0.0 +2024-07-28 12:56:32,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=159368.0, ans=0.125 +2024-07-28 12:56:42,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=159394.66666666666, ans=0.0 +2024-07-28 12:56:42,912 INFO [train.py:1114] (2/4) Epoch 12, batch 7100, loss[loss=0.2214, simple_loss=0.3075, pruned_loss=0.06764, over 4810.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2817, pruned_loss=0.05324, over 936520.41 frames. ], batch size: 15, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:56:43,217 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.53 vs. limit=6.0 +2024-07-28 12:56:45,217 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.84 vs. limit=15.0 +2024-07-28 12:57:03,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=159434.66666666666, ans=0.0 +2024-07-28 12:57:04,407 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.57 vs. limit=15.0 +2024-07-28 12:57:06,531 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.638e+01 6.257e+01 7.591e+01 1.588e+02, threshold=1.251e+02, percent-clipped=2.0 +2024-07-28 12:57:09,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=159434.66666666666, ans=0.125 +2024-07-28 12:57:09,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=159448.0, ans=10.0 +2024-07-28 12:57:16,936 INFO [train.py:1114] (2/4) Epoch 12, batch 7150, loss[loss=0.2077, simple_loss=0.2856, pruned_loss=0.06488, over 4607.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.279, pruned_loss=0.05188, over 937498.21 frames. ], batch size: 21, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:57:17,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=159461.33333333334, ans=0.0 +2024-07-28 12:57:17,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=159461.33333333334, ans=0.1 +2024-07-28 12:57:19,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=159461.33333333334, ans=0.025 +2024-07-28 12:57:22,635 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=3.91 vs. limit=12.0 +2024-07-28 12:57:42,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=159501.33333333334, ans=0.1 +2024-07-28 12:57:46,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=159514.66666666666, ans=0.125 +2024-07-28 12:57:47,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=159514.66666666666, ans=0.125 +2024-07-28 12:57:51,490 INFO [train.py:1114] (2/4) Epoch 12, batch 7200, loss[loss=0.2356, simple_loss=0.3302, pruned_loss=0.07055, over 4796.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2796, pruned_loss=0.05233, over 937544.81 frames. ], batch size: 15, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:57:53,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=159528.0, ans=0.025 +2024-07-28 12:57:55,939 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.95 vs. limit=10.0 +2024-07-28 12:58:05,075 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.54 vs. limit=10.0 +2024-07-28 12:58:08,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=159554.66666666666, ans=0.125 +2024-07-28 12:58:08,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=159554.66666666666, ans=0.025 +2024-07-28 12:58:11,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=159554.66666666666, ans=0.0 +2024-07-28 12:58:12,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=159554.66666666666, ans=0.0 +2024-07-28 12:58:17,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=159568.0, ans=0.125 +2024-07-28 12:58:18,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=159568.0, ans=0.125 +2024-07-28 12:58:18,798 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.659e+01 5.813e+01 6.361e+01 7.395e+01 9.715e+01, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 12:58:21,289 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.00 vs. limit=15.0 +2024-07-28 12:58:24,445 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.34 vs. limit=15.0 +2024-07-28 12:58:27,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=159581.33333333334, ans=0.1 +2024-07-28 12:58:29,555 INFO [train.py:1114] (2/4) Epoch 12, batch 7250, loss[loss=0.172, simple_loss=0.2646, pruned_loss=0.0397, over 4851.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2794, pruned_loss=0.05236, over 939261.08 frames. ], batch size: 12, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:58:34,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159594.66666666666, ans=0.1 +2024-07-28 12:58:35,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=159608.0, ans=0.1 +2024-07-28 12:58:42,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=159621.33333333334, ans=0.125 +2024-07-28 12:58:48,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=159621.33333333334, ans=15.0 +2024-07-28 12:58:58,939 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.71 vs. limit=15.0 +2024-07-28 12:59:02,452 INFO [train.py:1114] (2/4) Epoch 12, batch 7300, loss[loss=0.1576, simple_loss=0.2491, pruned_loss=0.033, over 4841.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2786, pruned_loss=0.05174, over 939303.97 frames. ], batch size: 12, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:59:03,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=159661.33333333334, ans=0.125 +2024-07-28 12:59:04,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159661.33333333334, ans=0.1 +2024-07-28 12:59:14,273 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.29 vs. limit=22.5 +2024-07-28 12:59:15,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=159688.0, ans=0.09899494936611666 +2024-07-28 12:59:26,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=159701.33333333334, ans=0.0 +2024-07-28 12:59:27,313 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.673e+01 5.393e+01 5.789e+01 6.409e+01 1.096e+02, threshold=1.158e+02, percent-clipped=0.0 +2024-07-28 12:59:29,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=159701.33333333334, ans=0.05 +2024-07-28 12:59:35,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=159714.66666666666, ans=0.125 +2024-07-28 12:59:36,189 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.95 vs. limit=15.0 +2024-07-28 12:59:36,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=159714.66666666666, ans=0.125 +2024-07-28 12:59:37,728 INFO [train.py:1114] (2/4) Epoch 12, batch 7350, loss[loss=0.2366, simple_loss=0.3065, pruned_loss=0.08339, over 4646.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2791, pruned_loss=0.05203, over 938978.77 frames. ], batch size: 12, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 12:59:41,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159728.0, ans=0.1 +2024-07-28 12:59:58,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=159754.66666666666, ans=0.0 +2024-07-28 13:00:00,580 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:00:03,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=159768.0, ans=0.0 +2024-07-28 13:00:07,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=159768.0, ans=0.125 +2024-07-28 13:00:07,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159768.0, ans=0.1 +2024-07-28 13:00:09,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=159768.0, ans=0.0 +2024-07-28 13:00:23,196 INFO [train.py:1114] (2/4) Epoch 12, batch 7400, loss[loss=0.1825, simple_loss=0.2724, pruned_loss=0.04629, over 4691.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2784, pruned_loss=0.05114, over 940263.79 frames. ], batch size: 13, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:00:26,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=159794.66666666666, ans=0.0 +2024-07-28 13:00:27,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=159794.66666666666, ans=0.0 +2024-07-28 13:00:31,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=159808.0, ans=0.0 +2024-07-28 13:00:40,197 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:00:48,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=159821.33333333334, ans=0.125 +2024-07-28 13:00:55,281 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.574e+01 5.910e+01 6.477e+01 7.704e+01 1.281e+02, threshold=1.295e+02, percent-clipped=1.0 +2024-07-28 13:00:58,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159848.0, ans=0.1 +2024-07-28 13:01:03,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=159848.0, ans=0.0 +2024-07-28 13:01:05,645 INFO [train.py:1114] (2/4) Epoch 12, batch 7450, loss[loss=0.2007, simple_loss=0.2814, pruned_loss=0.05995, over 4611.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2772, pruned_loss=0.05114, over 938256.84 frames. ], batch size: 11, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:01:05,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=159861.33333333334, ans=0.0 +2024-07-28 13:01:06,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=159861.33333333334, ans=0.0 +2024-07-28 13:01:17,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=159874.66666666666, ans=0.0 +2024-07-28 13:01:18,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=159888.0, ans=0.125 +2024-07-28 13:01:18,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=159888.0, ans=0.125 +2024-07-28 13:01:24,845 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.89 vs. limit=10.0 +2024-07-28 13:01:29,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=159901.33333333334, ans=0.0 +2024-07-28 13:01:38,271 INFO [train.py:1114] (2/4) Epoch 12, batch 7500, loss[loss=0.2474, simple_loss=0.3173, pruned_loss=0.08882, over 3298.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2779, pruned_loss=0.05142, over 936493.59 frames. ], batch size: 36, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:02:12,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=159954.66666666666, ans=0.125 +2024-07-28 13:02:14,636 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.42 vs. limit=22.5 +2024-07-28 13:02:17,761 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.84 vs. limit=22.5 +2024-07-28 13:05:16,505 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.752e+01 6.223e+01 6.904e+01 1.181e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 13:05:26,369 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:05:26,966 INFO [train.py:1114] (2/4) Epoch 12, batch 7550, loss[loss=0.2266, simple_loss=0.3162, pruned_loss=0.06852, over 4598.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2793, pruned_loss=0.05179, over 936248.35 frames. ], batch size: 17, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:05:36,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=159994.66666666666, ans=0.125 +2024-07-28 13:05:38,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=160008.0, ans=0.0 +2024-07-28 13:05:44,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=160021.33333333334, ans=0.125 +2024-07-28 13:05:45,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=160021.33333333334, ans=0.125 +2024-07-28 13:05:47,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160021.33333333334, ans=0.1 +2024-07-28 13:05:50,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=160021.33333333334, ans=0.0 +2024-07-28 13:06:04,283 INFO [train.py:1114] (2/4) Epoch 12, batch 7600, loss[loss=0.1856, simple_loss=0.2818, pruned_loss=0.04466, over 4804.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2787, pruned_loss=0.05139, over 938083.38 frames. ], batch size: 14, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:06:26,386 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.534e+01 5.580e+01 6.028e+01 7.060e+01 1.012e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 13:06:27,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=160101.33333333334, ans=0.2 +2024-07-28 13:06:32,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160114.66666666666, ans=0.1 +2024-07-28 13:06:33,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=160114.66666666666, ans=0.125 +2024-07-28 13:06:35,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=160114.66666666666, ans=0.125 +2024-07-28 13:06:36,731 INFO [train.py:1114] (2/4) Epoch 12, batch 7650, loss[loss=0.206, simple_loss=0.2833, pruned_loss=0.06435, over 4934.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2793, pruned_loss=0.05173, over 937207.64 frames. ], batch size: 12, lr: 6.19e-03, grad_scale: 64.0 +2024-07-28 13:06:41,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=160128.0, ans=0.125 +2024-07-28 13:06:53,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160154.66666666666, ans=0.1 +2024-07-28 13:07:06,835 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.27 vs. limit=15.0 +2024-07-28 13:07:08,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=160181.33333333334, ans=0.125 +2024-07-28 13:07:10,992 INFO [train.py:1114] (2/4) Epoch 12, batch 7700, loss[loss=0.2068, simple_loss=0.2967, pruned_loss=0.05847, over 4696.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2789, pruned_loss=0.05167, over 934682.29 frames. ], batch size: 13, lr: 6.18e-03, grad_scale: 64.0 +2024-07-28 13:07:29,490 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.72 vs. limit=22.5 +2024-07-28 13:07:31,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=160234.66666666666, ans=0.125 +2024-07-28 13:07:32,963 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.508e+01 5.585e+01 6.116e+01 6.946e+01 9.555e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 13:07:33,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=160234.66666666666, ans=0.125 +2024-07-28 13:07:37,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=160248.0, ans=0.125 +2024-07-28 13:07:42,469 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.88 vs. limit=22.5 +2024-07-28 13:07:43,395 INFO [train.py:1114] (2/4) Epoch 12, batch 7750, loss[loss=0.1985, simple_loss=0.2875, pruned_loss=0.05472, over 4934.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2797, pruned_loss=0.05153, over 935957.05 frames. ], batch size: 14, lr: 6.18e-03, grad_scale: 64.0 +2024-07-28 13:07:43,811 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.61 vs. limit=15.0 +2024-07-28 13:07:49,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=160274.66666666666, ans=0.125 +2024-07-28 13:07:50,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=160274.66666666666, ans=0.0 +2024-07-28 13:07:52,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=160274.66666666666, ans=0.2 +2024-07-28 13:07:54,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=160274.66666666666, ans=0.125 +2024-07-28 13:08:12,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=160314.66666666666, ans=0.05 +2024-07-28 13:08:16,040 INFO [train.py:1114] (2/4) Epoch 12, batch 7800, loss[loss=0.1863, simple_loss=0.2846, pruned_loss=0.04402, over 4673.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2798, pruned_loss=0.05128, over 937710.87 frames. ], batch size: 14, lr: 6.18e-03, grad_scale: 64.0 +2024-07-28 13:08:29,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=160354.66666666666, ans=0.125 +2024-07-28 13:08:38,405 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.749e+01 5.545e+01 6.012e+01 6.981e+01 9.442e+01, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 13:08:39,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=160368.0, ans=0.125 +2024-07-28 13:08:43,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=160381.33333333334, ans=0.1 +2024-07-28 13:08:45,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=160381.33333333334, ans=0.0 +2024-07-28 13:08:48,344 INFO [train.py:1114] (2/4) Epoch 12, batch 7850, loss[loss=0.1927, simple_loss=0.2747, pruned_loss=0.05534, over 4554.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2797, pruned_loss=0.0515, over 936501.38 frames. ], batch size: 10, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:09:03,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=160421.33333333334, ans=0.125 +2024-07-28 13:09:05,026 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:09:13,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=160434.66666666666, ans=0.125 +2024-07-28 13:09:21,154 INFO [train.py:1114] (2/4) Epoch 12, batch 7900, loss[loss=0.1553, simple_loss=0.2508, pruned_loss=0.02991, over 4875.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2799, pruned_loss=0.05136, over 933535.61 frames. ], batch size: 14, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:09:25,905 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.53 vs. limit=15.0 +2024-07-28 13:09:30,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=160474.66666666666, ans=0.07 +2024-07-28 13:09:41,121 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.64 vs. limit=22.5 +2024-07-28 13:09:43,192 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.620e+01 5.515e+01 6.026e+01 6.730e+01 9.606e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 13:09:48,309 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.72 vs. limit=15.0 +2024-07-28 13:09:53,231 INFO [train.py:1114] (2/4) Epoch 12, batch 7950, loss[loss=0.2849, simple_loss=0.333, pruned_loss=0.1184, over 3633.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2797, pruned_loss=0.05175, over 935719.93 frames. ], batch size: 35, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:09:56,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=160528.0, ans=0.0 +2024-07-28 13:09:58,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=160528.0, ans=0.0 +2024-07-28 13:09:59,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=160541.33333333334, ans=0.1 +2024-07-28 13:10:07,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=160554.66666666666, ans=0.125 +2024-07-28 13:10:17,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=160568.0, ans=0.125 +2024-07-28 13:10:24,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=160581.33333333334, ans=0.025 +2024-07-28 13:10:25,970 INFO [train.py:1114] (2/4) Epoch 12, batch 8000, loss[loss=0.1945, simple_loss=0.2676, pruned_loss=0.06072, over 4620.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2784, pruned_loss=0.0515, over 934677.76 frames. ], batch size: 11, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:10:40,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=160621.33333333334, ans=0.04949747468305833 +2024-07-28 13:10:46,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=160634.66666666666, ans=0.2 +2024-07-28 13:10:46,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=160634.66666666666, ans=0.0 +2024-07-28 13:10:50,017 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.821e+01 5.959e+01 6.918e+01 8.297e+01 1.204e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-28 13:10:50,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=160634.66666666666, ans=0.125 +2024-07-28 13:11:00,326 INFO [train.py:1114] (2/4) Epoch 12, batch 8050, loss[loss=0.1835, simple_loss=0.2789, pruned_loss=0.04405, over 4796.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2794, pruned_loss=0.05185, over 934347.67 frames. ], batch size: 14, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:11:03,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=160661.33333333334, ans=0.035 +2024-07-28 13:11:08,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=160674.66666666666, ans=0.0 +2024-07-28 13:11:12,029 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.77 vs. limit=12.0 +2024-07-28 13:11:28,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=160714.66666666666, ans=0.125 +2024-07-28 13:11:32,819 INFO [train.py:1114] (2/4) Epoch 12, batch 8100, loss[loss=0.195, simple_loss=0.282, pruned_loss=0.05398, over 4792.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2794, pruned_loss=0.05169, over 934008.81 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:11:35,429 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:11:42,048 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:11:49,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160754.66666666666, ans=0.1 +2024-07-28 13:11:55,383 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 5.710e+01 6.581e+01 7.221e+01 1.063e+02, threshold=1.316e+02, percent-clipped=0.0 +2024-07-28 13:12:05,409 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.32 vs. limit=22.5 +2024-07-28 13:12:06,448 INFO [train.py:1114] (2/4) Epoch 12, batch 8150, loss[loss=0.2065, simple_loss=0.2926, pruned_loss=0.0602, over 4813.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2779, pruned_loss=0.05105, over 937436.05 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:12:08,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=160794.66666666666, ans=0.1 +2024-07-28 13:12:09,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=160794.66666666666, ans=0.125 +2024-07-28 13:12:16,720 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.79 vs. limit=10.0 +2024-07-28 13:12:21,737 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.36 vs. limit=22.5 +2024-07-28 13:12:22,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=160821.33333333334, ans=10.0 +2024-07-28 13:12:23,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff3.min_abs, batch_count=160821.33333333334, ans=0.2 +2024-07-28 13:12:31,325 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.86 vs. limit=15.0 +2024-07-28 13:12:32,573 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:12:39,524 INFO [train.py:1114] (2/4) Epoch 12, batch 8200, loss[loss=0.2364, simple_loss=0.3279, pruned_loss=0.07245, over 4809.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2786, pruned_loss=0.05122, over 938456.44 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:12:51,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=160888.0, ans=0.2 +2024-07-28 13:12:54,186 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.60 vs. limit=10.0 +2024-07-28 13:12:57,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=160888.0, ans=10.0 +2024-07-28 13:13:03,299 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.924e+01 5.545e+01 6.398e+01 7.080e+01 1.151e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 13:13:09,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=160914.66666666666, ans=0.125 +2024-07-28 13:13:12,840 INFO [train.py:1114] (2/4) Epoch 12, batch 8250, loss[loss=0.2015, simple_loss=0.2731, pruned_loss=0.06495, over 4899.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2785, pruned_loss=0.05095, over 938814.34 frames. ], batch size: 13, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:13:13,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=160928.0, ans=0.125 +2024-07-28 13:13:22,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=160941.33333333334, ans=0.125 +2024-07-28 13:13:22,497 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.96 vs. limit=15.0 +2024-07-28 13:13:33,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=160968.0, ans=0.2 +2024-07-28 13:13:33,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=160968.0, ans=0.1 +2024-07-28 13:13:37,789 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:13:39,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=160981.33333333334, ans=0.0 +2024-07-28 13:13:45,321 INFO [train.py:1114] (2/4) Epoch 12, batch 8300, loss[loss=0.1987, simple_loss=0.2911, pruned_loss=0.05317, over 4899.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2792, pruned_loss=0.05118, over 938927.62 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:13:45,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=160994.66666666666, ans=0.125 +2024-07-28 13:13:53,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=161008.0, ans=0.025 +2024-07-28 13:13:59,167 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.40 vs. limit=12.0 +2024-07-28 13:14:03,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=161021.33333333334, ans=0.125 +2024-07-28 13:14:07,779 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+01 5.767e+01 6.201e+01 7.053e+01 1.187e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 13:14:09,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=161034.66666666666, ans=0.0 +2024-07-28 13:14:14,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=161048.0, ans=0.1 +2024-07-28 13:14:15,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=161048.0, ans=0.0 +2024-07-28 13:14:17,543 INFO [train.py:1114] (2/4) Epoch 12, batch 8350, loss[loss=0.239, simple_loss=0.3279, pruned_loss=0.07505, over 4794.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2798, pruned_loss=0.05125, over 941697.47 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:14:19,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=161061.33333333334, ans=0.125 +2024-07-28 13:14:27,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=161074.66666666666, ans=0.125 +2024-07-28 13:14:32,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=161088.0, ans=0.125 +2024-07-28 13:14:33,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=161088.0, ans=0.125 +2024-07-28 13:14:37,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=161101.33333333334, ans=0.125 +2024-07-28 13:14:46,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161114.66666666666, ans=0.1 +2024-07-28 13:14:50,103 INFO [train.py:1114] (2/4) Epoch 12, batch 8400, loss[loss=0.1965, simple_loss=0.2882, pruned_loss=0.05239, over 4771.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2794, pruned_loss=0.05091, over 940376.39 frames. ], batch size: 12, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:14:55,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=161128.0, ans=0.0 +2024-07-28 13:14:56,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=161141.33333333334, ans=0.0 +2024-07-28 13:14:58,666 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.42 vs. limit=15.0 +2024-07-28 13:15:13,431 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.804e+01 5.899e+01 6.443e+01 7.292e+01 9.298e+01, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 13:15:15,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=161168.0, ans=0.125 +2024-07-28 13:15:20,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=161181.33333333334, ans=0.125 +2024-07-28 13:15:23,167 INFO [train.py:1114] (2/4) Epoch 12, batch 8450, loss[loss=0.1927, simple_loss=0.2808, pruned_loss=0.05231, over 4806.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2798, pruned_loss=0.05133, over 938860.19 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:15:23,508 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.18 vs. limit=22.5 +2024-07-28 13:15:45,536 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:15:55,156 INFO [train.py:1114] (2/4) Epoch 12, batch 8500, loss[loss=0.1827, simple_loss=0.2586, pruned_loss=0.05337, over 4613.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2795, pruned_loss=0.05163, over 938594.27 frames. ], batch size: 11, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:16:01,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=161274.66666666666, ans=0.07 +2024-07-28 13:16:03,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=161274.66666666666, ans=0.0 +2024-07-28 13:16:08,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=161288.0, ans=0.125 +2024-07-28 13:16:11,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=161288.0, ans=0.035 +2024-07-28 13:16:17,534 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.709e+01 5.769e+01 6.331e+01 7.345e+01 1.019e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 13:16:27,218 INFO [train.py:1114] (2/4) Epoch 12, batch 8550, loss[loss=0.1483, simple_loss=0.2414, pruned_loss=0.02762, over 4795.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.279, pruned_loss=0.05126, over 939167.97 frames. ], batch size: 11, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:16:35,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=161341.33333333334, ans=0.0 +2024-07-28 13:16:52,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=161368.0, ans=0.125 +2024-07-28 13:16:58,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=161381.33333333334, ans=0.0 +2024-07-28 13:17:00,034 INFO [train.py:1114] (2/4) Epoch 12, batch 8600, loss[loss=0.2068, simple_loss=0.2937, pruned_loss=0.05997, over 4797.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2792, pruned_loss=0.05135, over 939045.59 frames. ], batch size: 15, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:17:06,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=161408.0, ans=0.07 +2024-07-28 13:17:08,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=161408.0, ans=0.0 +2024-07-28 13:17:09,102 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=20.43 vs. limit=15.0 +2024-07-28 13:17:17,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=161421.33333333334, ans=0.0 +2024-07-28 13:17:18,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=161421.33333333334, ans=0.0 +2024-07-28 13:17:18,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161421.33333333334, ans=0.1 +2024-07-28 13:17:23,231 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.568e+01 5.734e+01 6.405e+01 7.244e+01 9.929e+01, threshold=1.281e+02, percent-clipped=0.0 +2024-07-28 13:17:32,664 INFO [train.py:1114] (2/4) Epoch 12, batch 8650, loss[loss=0.2175, simple_loss=0.308, pruned_loss=0.06347, over 4906.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2787, pruned_loss=0.05108, over 939896.07 frames. ], batch size: 15, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:17:34,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=161461.33333333334, ans=0.125 +2024-07-28 13:17:36,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=161461.33333333334, ans=0.0 +2024-07-28 13:17:36,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.57 vs. limit=15.0 +2024-07-28 13:17:39,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=161461.33333333334, ans=0.125 +2024-07-28 13:17:48,561 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.30 vs. limit=15.0 +2024-07-28 13:17:56,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=161488.0, ans=0.125 +2024-07-28 13:18:00,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161501.33333333334, ans=0.1 +2024-07-28 13:18:06,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=161514.66666666666, ans=0.5 +2024-07-28 13:18:11,229 INFO [train.py:1114] (2/4) Epoch 12, batch 8700, loss[loss=0.1775, simple_loss=0.2645, pruned_loss=0.04521, over 4765.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.28, pruned_loss=0.05182, over 937892.65 frames. ], batch size: 13, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:18:11,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=161528.0, ans=0.0 +2024-07-28 13:18:13,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=161528.0, ans=0.0 +2024-07-28 13:18:19,388 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.29 vs. limit=15.0 +2024-07-28 13:18:25,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=161554.66666666666, ans=0.04949747468305833 +2024-07-28 13:18:34,019 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.629e+01 5.645e+01 6.105e+01 7.078e+01 1.033e+02, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 13:18:43,675 INFO [train.py:1114] (2/4) Epoch 12, batch 8750, loss[loss=0.2091, simple_loss=0.3016, pruned_loss=0.05828, over 4691.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2789, pruned_loss=0.05147, over 936424.24 frames. ], batch size: 15, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:18:47,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=161594.66666666666, ans=0.0 +2024-07-28 13:18:50,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=161608.0, ans=0.2 +2024-07-28 13:18:51,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=161608.0, ans=0.2 +2024-07-28 13:19:00,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=161621.33333333334, ans=0.0 +2024-07-28 13:19:15,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=161648.0, ans=0.125 +2024-07-28 13:19:16,559 INFO [train.py:1114] (2/4) Epoch 12, batch 8800, loss[loss=0.2169, simple_loss=0.3116, pruned_loss=0.06111, over 4926.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2797, pruned_loss=0.05189, over 937582.85 frames. ], batch size: 14, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:19:27,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=161674.66666666666, ans=0.0 +2024-07-28 13:19:28,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=161674.66666666666, ans=0.0 +2024-07-28 13:19:30,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=161688.0, ans=0.1 +2024-07-28 13:19:38,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=161701.33333333334, ans=0.125 +2024-07-28 13:19:40,338 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.669e+01 5.675e+01 6.216e+01 7.145e+01 9.386e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 13:19:50,085 INFO [train.py:1114] (2/4) Epoch 12, batch 8850, loss[loss=0.2184, simple_loss=0.321, pruned_loss=0.05786, over 4625.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2791, pruned_loss=0.0519, over 932708.04 frames. ], batch size: 21, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:19:52,611 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-07-28 13:19:52,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=161728.0, ans=0.09899494936611666 +2024-07-28 13:19:54,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=161728.0, ans=0.0 +2024-07-28 13:19:56,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=161728.0, ans=0.2 +2024-07-28 13:20:04,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=161754.66666666666, ans=0.0 +2024-07-28 13:20:09,394 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.34 vs. limit=15.0 +2024-07-28 13:20:14,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=161768.0, ans=0.0 +2024-07-28 13:20:17,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=161781.33333333334, ans=0.125 +2024-07-28 13:20:20,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=161781.33333333334, ans=0.025 +2024-07-28 13:20:20,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=161781.33333333334, ans=0.125 +2024-07-28 13:20:23,582 INFO [train.py:1114] (2/4) Epoch 12, batch 8900, loss[loss=0.1649, simple_loss=0.2532, pruned_loss=0.03824, over 4935.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2791, pruned_loss=0.05201, over 930686.78 frames. ], batch size: 12, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:20:28,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=161794.66666666666, ans=0.125 +2024-07-28 13:20:32,881 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.13 vs. limit=15.0 +2024-07-28 13:20:36,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=161821.33333333334, ans=0.125 +2024-07-28 13:20:38,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=161821.33333333334, ans=0.125 +2024-07-28 13:20:39,352 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.91 vs. limit=15.0 +2024-07-28 13:20:45,938 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.688e+01 5.749e+01 6.497e+01 7.319e+01 1.057e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-28 13:20:47,409 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.99 vs. limit=22.5 +2024-07-28 13:20:50,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=161848.0, ans=0.125 +2024-07-28 13:20:52,004 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.16 vs. limit=15.0 +2024-07-28 13:20:55,340 INFO [train.py:1114] (2/4) Epoch 12, batch 8950, loss[loss=0.1852, simple_loss=0.2663, pruned_loss=0.05203, over 4464.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2781, pruned_loss=0.05167, over 931382.60 frames. ], batch size: 21, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:20:56,980 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.41 vs. limit=15.0 +2024-07-28 13:20:58,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=161861.33333333334, ans=0.125 +2024-07-28 13:21:01,579 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.47 vs. limit=15.0 +2024-07-28 13:21:08,882 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:21:11,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=161888.0, ans=0.2 +2024-07-28 13:21:11,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=161888.0, ans=0.2 +2024-07-28 13:21:13,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=161901.33333333334, ans=0.0 +2024-07-28 13:21:23,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=161914.66666666666, ans=0.2 +2024-07-28 13:21:25,209 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:21:26,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=161928.0, ans=0.125 +2024-07-28 13:21:26,985 INFO [train.py:1114] (2/4) Epoch 12, batch 9000, loss[loss=0.189, simple_loss=0.2732, pruned_loss=0.0524, over 4644.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2768, pruned_loss=0.05155, over 934233.67 frames. ], batch size: 12, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:21:26,985 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 13:21:39,259 INFO [train.py:1146] (2/4) Epoch 12, validation: loss=0.1673, simple_loss=0.2713, pruned_loss=0.03166, over 944034.00 frames. +2024-07-28 13:21:39,259 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 13:21:41,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=161928.0, ans=0.125 +2024-07-28 13:21:43,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=161928.0, ans=0.125 +2024-07-28 13:21:59,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=161968.0, ans=0.125 +2024-07-28 13:22:02,100 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.972e+01 5.644e+01 6.027e+01 6.782e+01 9.850e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 13:22:05,104 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.00 vs. limit=15.0 +2024-07-28 13:22:11,808 INFO [train.py:1114] (2/4) Epoch 12, batch 9050, loss[loss=0.185, simple_loss=0.2603, pruned_loss=0.05485, over 4507.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2761, pruned_loss=0.05122, over 934596.34 frames. ], batch size: 10, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:22:15,676 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=161994.66666666666, ans=0.1 +2024-07-28 13:22:19,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=162008.0, ans=0.0 +2024-07-28 13:22:43,297 INFO [train.py:1114] (2/4) Epoch 12, batch 9100, loss[loss=0.1925, simple_loss=0.2758, pruned_loss=0.05457, over 4925.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2762, pruned_loss=0.05052, over 936936.72 frames. ], batch size: 14, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:22:44,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=162061.33333333334, ans=0.2 +2024-07-28 13:22:51,720 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.64 vs. limit=22.5 +2024-07-28 13:22:56,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=162088.0, ans=0.125 +2024-07-28 13:23:02,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=162101.33333333334, ans=0.035 +2024-07-28 13:23:06,065 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.556e+01 5.681e+01 6.344e+01 7.391e+01 1.004e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 13:23:10,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=162114.66666666666, ans=0.125 +2024-07-28 13:23:15,550 INFO [train.py:1114] (2/4) Epoch 12, batch 9150, loss[loss=0.1653, simple_loss=0.2648, pruned_loss=0.0329, over 4802.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2772, pruned_loss=0.05079, over 936230.15 frames. ], batch size: 14, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:23:23,856 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.90 vs. limit=22.5 +2024-07-28 13:23:31,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=162154.66666666666, ans=0.0 +2024-07-28 13:23:32,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=162154.66666666666, ans=0.125 +2024-07-28 13:23:42,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=162181.33333333334, ans=0.2 +2024-07-28 13:23:43,217 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.03 vs. limit=22.5 +2024-07-28 13:23:44,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.whiten.whitening_limit, batch_count=162181.33333333334, ans=12.0 +2024-07-28 13:23:44,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=162181.33333333334, ans=0.0 +2024-07-28 13:23:47,216 INFO [train.py:1114] (2/4) Epoch 12, batch 9200, loss[loss=0.175, simple_loss=0.2742, pruned_loss=0.03791, over 4855.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2776, pruned_loss=0.05074, over 937638.29 frames. ], batch size: 12, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:24:09,765 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.766e+01 5.530e+01 6.301e+01 7.507e+01 1.119e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 13:24:11,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=162234.66666666666, ans=0.125 +2024-07-28 13:24:18,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=162261.33333333334, ans=0.0 +2024-07-28 13:24:19,322 INFO [train.py:1114] (2/4) Epoch 12, batch 9250, loss[loss=0.227, simple_loss=0.3291, pruned_loss=0.06246, over 4630.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2772, pruned_loss=0.05045, over 938427.86 frames. ], batch size: 13, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:24:19,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=162261.33333333334, ans=0.125 +2024-07-28 13:24:23,393 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.57 vs. limit=12.0 +2024-07-28 13:24:32,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=162288.0, ans=0.0 +2024-07-28 13:24:32,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=162288.0, ans=0.125 +2024-07-28 13:24:51,029 INFO [train.py:1114] (2/4) Epoch 12, batch 9300, loss[loss=0.1862, simple_loss=0.268, pruned_loss=0.05226, over 4778.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2764, pruned_loss=0.05036, over 938201.70 frames. ], batch size: 12, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:24:52,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=162328.0, ans=0.0 +2024-07-28 13:24:56,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=162341.33333333334, ans=0.1 +2024-07-28 13:24:58,311 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.22 vs. limit=22.5 +2024-07-28 13:25:02,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=162341.33333333334, ans=0.125 +2024-07-28 13:25:03,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=162354.66666666666, ans=0.125 +2024-07-28 13:25:07,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=162354.66666666666, ans=0.0 +2024-07-28 13:25:11,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=162368.0, ans=0.125 +2024-07-28 13:25:13,005 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.254e+01 5.656e+01 6.395e+01 7.099e+01 1.199e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 13:25:17,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=162381.33333333334, ans=0.125 +2024-07-28 13:25:18,080 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.07 vs. limit=6.0 +2024-07-28 13:25:22,441 INFO [train.py:1114] (2/4) Epoch 12, batch 9350, loss[loss=0.1677, simple_loss=0.2498, pruned_loss=0.04279, over 4797.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2771, pruned_loss=0.05082, over 934824.45 frames. ], batch size: 11, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:25:41,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=162434.66666666666, ans=0.07 +2024-07-28 13:25:42,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=162434.66666666666, ans=0.5 +2024-07-28 13:25:43,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=162434.66666666666, ans=0.125 +2024-07-28 13:25:43,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=162434.66666666666, ans=0.125 +2024-07-28 13:25:44,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=162434.66666666666, ans=0.125 +2024-07-28 13:25:46,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=162434.66666666666, ans=0.2 +2024-07-28 13:25:53,648 INFO [train.py:1114] (2/4) Epoch 12, batch 9400, loss[loss=0.208, simple_loss=0.2995, pruned_loss=0.05825, over 4701.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2779, pruned_loss=0.05149, over 932742.85 frames. ], batch size: 13, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:25:54,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=162461.33333333334, ans=0.125 +2024-07-28 13:26:06,041 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.37 vs. limit=15.0 +2024-07-28 13:26:08,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=162488.0, ans=0.0 +2024-07-28 13:26:15,656 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.679e+01 5.567e+01 6.093e+01 7.292e+01 1.222e+02, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 13:26:18,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=162514.66666666666, ans=0.2 +2024-07-28 13:26:25,617 INFO [train.py:1114] (2/4) Epoch 12, batch 9450, loss[loss=0.1701, simple_loss=0.2432, pruned_loss=0.04849, over 4800.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2778, pruned_loss=0.05124, over 932065.34 frames. ], batch size: 11, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:26:45,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162568.0, ans=0.1 +2024-07-28 13:26:48,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162568.0, ans=0.1 +2024-07-28 13:26:49,216 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.65 vs. limit=6.0 +2024-07-28 13:26:51,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=162581.33333333334, ans=0.0 +2024-07-28 13:26:55,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=162581.33333333334, ans=0.125 +2024-07-28 13:26:56,310 INFO [train.py:1114] (2/4) Epoch 12, batch 9500, loss[loss=0.1965, simple_loss=0.271, pruned_loss=0.061, over 4705.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2775, pruned_loss=0.05081, over 934276.03 frames. ], batch size: 12, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:26:59,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-28 13:26:59,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=162594.66666666666, ans=0.125 +2024-07-28 13:27:02,104 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.89 vs. limit=22.5 +2024-07-28 13:27:04,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=162608.0, ans=0.125 +2024-07-28 13:27:17,798 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.732e+01 5.541e+01 6.151e+01 7.043e+01 9.368e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 13:27:21,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=162648.0, ans=0.0 +2024-07-28 13:27:27,295 INFO [train.py:1114] (2/4) Epoch 12, batch 9550, loss[loss=0.1787, simple_loss=0.2663, pruned_loss=0.04559, over 4775.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.278, pruned_loss=0.05137, over 931231.30 frames. ], batch size: 12, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:27:34,099 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.93 vs. limit=15.0 +2024-07-28 13:27:43,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=162688.0, ans=0.5 +2024-07-28 13:27:48,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=162701.33333333334, ans=0.125 +2024-07-28 13:27:56,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=162714.66666666666, ans=0.035 +2024-07-28 13:27:59,580 INFO [train.py:1114] (2/4) Epoch 12, batch 9600, loss[loss=0.3126, simple_loss=0.3635, pruned_loss=0.1309, over 3354.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2792, pruned_loss=0.05166, over 930254.76 frames. ], batch size: 35, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:28:12,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162754.66666666666, ans=0.1 +2024-07-28 13:28:14,799 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.33 vs. limit=15.0 +2024-07-28 13:28:15,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=162754.66666666666, ans=0.09899494936611666 +2024-07-28 13:28:15,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=162754.66666666666, ans=0.125 +2024-07-28 13:28:18,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=162754.66666666666, ans=0.125 +2024-07-28 13:28:22,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=162768.0, ans=0.125 +2024-07-28 13:28:22,485 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 5.832e+01 6.811e+01 8.204e+01 1.211e+02, threshold=1.362e+02, percent-clipped=0.0 +2024-07-28 13:28:28,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=162781.33333333334, ans=0.0 +2024-07-28 13:28:31,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=162794.66666666666, ans=0.125 +2024-07-28 13:28:31,977 INFO [train.py:1114] (2/4) Epoch 12, batch 9650, loss[loss=0.1995, simple_loss=0.2973, pruned_loss=0.0508, over 4845.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2791, pruned_loss=0.05156, over 926627.71 frames. ], batch size: 16, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:28:32,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=162794.66666666666, ans=0.125 +2024-07-28 13:28:33,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=162794.66666666666, ans=0.2 +2024-07-28 13:28:41,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=162808.0, ans=0.125 +2024-07-28 13:28:41,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=162808.0, ans=0.0 +2024-07-28 13:28:47,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=162821.33333333334, ans=0.125 +2024-07-28 13:29:02,791 INFO [train.py:1114] (2/4) Epoch 12, batch 9700, loss[loss=0.2275, simple_loss=0.3088, pruned_loss=0.07303, over 4188.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2788, pruned_loss=0.05166, over 924423.36 frames. ], batch size: 25, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:29:07,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=162861.33333333334, ans=0.0 +2024-07-28 13:29:14,328 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.33 vs. limit=22.5 +2024-07-28 13:29:14,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=162874.66666666666, ans=0.0 +2024-07-28 13:29:15,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=162888.0, ans=0.2 +2024-07-28 13:29:17,476 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.09 vs. limit=15.0 +2024-07-28 13:29:19,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=162888.0, ans=0.1 +2024-07-28 13:29:19,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=162888.0, ans=0.125 +2024-07-28 13:29:22,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=162901.33333333334, ans=0.025 +2024-07-28 13:29:25,021 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.802e+01 5.692e+01 6.242e+01 7.537e+01 1.052e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 13:29:34,185 INFO [train.py:1114] (2/4) Epoch 12, batch 9750, loss[loss=0.2129, simple_loss=0.3145, pruned_loss=0.0557, over 4676.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2785, pruned_loss=0.05132, over 925163.72 frames. ], batch size: 15, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:29:34,489 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.24 vs. limit=10.0 +2024-07-28 13:29:38,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=162928.0, ans=0.2 +2024-07-28 13:29:40,098 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:29:45,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.50 vs. limit=15.0 +2024-07-28 13:30:05,303 INFO [train.py:1114] (2/4) Epoch 12, batch 9800, loss[loss=0.1434, simple_loss=0.231, pruned_loss=0.02787, over 4698.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2764, pruned_loss=0.05025, over 924765.09 frames. ], batch size: 12, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:30:18,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=163021.33333333334, ans=0.125 +2024-07-28 13:30:22,674 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.37 vs. limit=15.0 +2024-07-28 13:30:23,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=163034.66666666666, ans=0.125 +2024-07-28 13:30:26,616 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.850e+01 5.828e+01 6.429e+01 7.275e+01 1.013e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 13:30:31,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=163048.0, ans=0.0 +2024-07-28 13:30:33,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=163048.0, ans=0.125 +2024-07-28 13:30:36,208 INFO [train.py:1114] (2/4) Epoch 12, batch 9850, loss[loss=0.2214, simple_loss=0.2999, pruned_loss=0.07144, over 4892.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2768, pruned_loss=0.05078, over 927478.47 frames. ], batch size: 15, lr: 6.13e-03, grad_scale: 64.0 +2024-07-28 13:30:40,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=163061.33333333334, ans=0.125 +2024-07-28 13:30:44,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=163074.66666666666, ans=0.125 +2024-07-28 13:30:57,547 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.33 vs. limit=5.0 +2024-07-28 13:31:03,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163114.66666666666, ans=0.1 +2024-07-28 13:31:06,848 INFO [train.py:1114] (2/4) Epoch 12, batch 9900, loss[loss=0.2133, simple_loss=0.2868, pruned_loss=0.06995, over 4851.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2783, pruned_loss=0.05181, over 926558.07 frames. ], batch size: 16, lr: 6.13e-03, grad_scale: 64.0 +2024-07-28 13:31:10,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=163128.0, ans=0.125 +2024-07-28 13:31:12,344 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:31:21,970 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.26 vs. limit=15.0 +2024-07-28 13:31:25,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=163168.0, ans=0.125 +2024-07-28 13:31:29,356 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.746e+01 5.809e+01 6.400e+01 7.583e+01 1.176e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 13:31:35,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=163181.33333333334, ans=0.125 +2024-07-28 13:31:38,452 INFO [train.py:1114] (2/4) Epoch 12, batch 9950, loss[loss=0.172, simple_loss=0.2495, pruned_loss=0.04728, over 4795.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2787, pruned_loss=0.05217, over 929082.52 frames. ], batch size: 11, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:31:51,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=163221.33333333334, ans=0.125 +2024-07-28 13:31:53,061 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=11.26 vs. limit=10.0 +2024-07-28 13:31:56,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163234.66666666666, ans=0.1 +2024-07-28 13:31:59,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=163234.66666666666, ans=0.2 +2024-07-28 13:32:01,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=163234.66666666666, ans=0.0 +2024-07-28 13:32:09,637 INFO [train.py:1114] (2/4) Epoch 12, batch 10000, loss[loss=0.2484, simple_loss=0.3447, pruned_loss=0.07605, over 4626.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2818, pruned_loss=0.05323, over 926767.69 frames. ], batch size: 16, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:32:13,793 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.53 vs. limit=15.0 +2024-07-28 13:32:22,348 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.39 vs. limit=8.0 +2024-07-28 13:32:24,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=163288.0, ans=0.1 +2024-07-28 13:32:28,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=163301.33333333334, ans=0.0 +2024-07-28 13:32:31,312 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.999e+01 5.883e+01 6.345e+01 7.076e+01 8.600e+01, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 13:32:33,623 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.88 vs. limit=15.0 +2024-07-28 13:32:41,187 INFO [train.py:1114] (2/4) Epoch 12, batch 10050, loss[loss=0.2674, simple_loss=0.3347, pruned_loss=0.1, over 3383.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2848, pruned_loss=0.05482, over 914708.82 frames. ], batch size: 35, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:32:46,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=163328.0, ans=0.07 +2024-07-28 13:32:49,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=163341.33333333334, ans=0.125 +2024-07-28 13:32:52,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=163341.33333333334, ans=0.0 +2024-07-28 13:32:57,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=163354.66666666666, ans=0.2 +2024-07-28 13:33:01,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=163368.0, ans=0.2 +2024-07-28 13:33:14,549 INFO [train.py:1114] (2/4) Epoch 12, batch 10100, loss[loss=0.2494, simple_loss=0.3157, pruned_loss=0.09153, over 3678.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2903, pruned_loss=0.06032, over 862321.57 frames. ], batch size: 35, lr: 6.12e-03, grad_scale: 32.0 +2024-07-28 13:33:14,855 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.25 vs. limit=15.0 +2024-07-28 13:33:37,797 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.859e+01 6.715e+01 7.355e+01 7.791e+01 1.006e+02, threshold=1.471e+02, percent-clipped=0.0 +2024-07-28 13:33:46,743 INFO [train.py:1114] (2/4) Epoch 12, batch 10150, loss[loss=0.2503, simple_loss=0.3209, pruned_loss=0.08983, over 3562.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2948, pruned_loss=0.06468, over 821252.55 frames. ], batch size: 36, lr: 6.12e-03, grad_scale: 32.0 +2024-07-28 13:33:49,975 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.92 vs. limit=10.0 +2024-07-28 13:33:54,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=163474.66666666666, ans=0.125 +2024-07-28 13:33:54,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=163474.66666666666, ans=0.125 +2024-07-28 13:33:58,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=163474.66666666666, ans=0.125 +2024-07-28 13:34:02,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=163488.0, ans=0.125 +2024-07-28 13:34:06,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.05 vs. limit=22.5 +2024-07-28 13:34:08,460 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.58 vs. limit=15.0 +2024-07-28 13:34:13,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=163514.66666666666, ans=0.0 +2024-07-28 13:34:14,059 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=10.99 vs. limit=12.0 +2024-07-28 13:34:18,689 INFO [train.py:1114] (2/4) Epoch 12, batch 10200, loss[loss=0.2243, simple_loss=0.2975, pruned_loss=0.07551, over 3223.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2964, pruned_loss=0.06707, over 788943.29 frames. ], batch size: 35, lr: 6.12e-03, grad_scale: 32.0 +2024-07-28 13:34:28,463 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.48 vs. limit=22.5 +2024-07-28 13:34:31,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=163554.66666666666, ans=0.0 +2024-07-28 13:35:14,738 INFO [train.py:1114] (2/4) Epoch 13, batch 0, loss[loss=0.1578, simple_loss=0.2481, pruned_loss=0.03376, over 4846.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2481, pruned_loss=0.03376, over 4846.00 frames. ], batch size: 12, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:35:14,738 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 13:35:20,252 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.8490, 5.1763, 5.0170, 5.6054], device='cuda:2') +2024-07-28 13:35:26,208 INFO [train.py:1146] (2/4) Epoch 13, validation: loss=0.1689, simple_loss=0.2745, pruned_loss=0.03167, over 944034.00 frames. +2024-07-28 13:35:26,209 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 13:35:26,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=163557.33333333334, ans=0.125 +2024-07-28 13:35:26,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=163557.33333333334, ans=0.125 +2024-07-28 13:35:27,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=163557.33333333334, ans=0.125 +2024-07-28 13:35:27,985 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.71 vs. limit=10.0 +2024-07-28 13:35:28,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163557.33333333334, ans=0.1 +2024-07-28 13:35:35,876 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.887e+01 6.365e+01 6.777e+01 7.332e+01 9.562e+01, threshold=1.355e+02, percent-clipped=0.0 +2024-07-28 13:35:38,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=163570.66666666666, ans=0.125 +2024-07-28 13:35:43,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=163584.0, ans=0.125 +2024-07-28 13:35:53,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=163610.66666666666, ans=0.0 +2024-07-28 13:35:53,451 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.82 vs. limit=22.5 +2024-07-28 13:35:59,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=163610.66666666666, ans=0.125 +2024-07-28 13:36:00,566 INFO [train.py:1114] (2/4) Epoch 13, batch 50, loss[loss=0.1673, simple_loss=0.2599, pruned_loss=0.03729, over 4632.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2816, pruned_loss=0.05223, over 206570.85 frames. ], batch size: 11, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:36:06,752 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.29 vs. limit=15.0 +2024-07-28 13:36:08,375 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.98 vs. limit=15.0 +2024-07-28 13:36:09,685 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.70 vs. limit=6.0 +2024-07-28 13:36:13,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163650.66666666666, ans=0.1 +2024-07-28 13:36:14,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=163650.66666666666, ans=0.0 +2024-07-28 13:36:37,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=163677.33333333334, ans=0.125 +2024-07-28 13:36:41,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=163690.66666666666, ans=0.0 +2024-07-28 13:36:42,413 INFO [train.py:1114] (2/4) Epoch 13, batch 100, loss[loss=0.1735, simple_loss=0.255, pruned_loss=0.04602, over 4640.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2828, pruned_loss=0.05231, over 365724.46 frames. ], batch size: 12, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:36:43,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=163690.66666666666, ans=0.125 +2024-07-28 13:36:44,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=163690.66666666666, ans=0.125 +2024-07-28 13:36:44,815 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.06 vs. limit=12.0 +2024-07-28 13:36:46,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=163690.66666666666, ans=0.125 +2024-07-28 13:36:51,946 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.677e+01 5.407e+01 6.133e+01 6.720e+01 8.973e+01, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 13:36:52,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=163704.0, ans=0.2 +2024-07-28 13:36:59,454 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:37:06,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=163717.33333333334, ans=0.2 +2024-07-28 13:37:10,029 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.36 vs. limit=22.5 +2024-07-28 13:37:10,863 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.40 vs. limit=15.0 +2024-07-28 13:37:17,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=163730.66666666666, ans=0.125 +2024-07-28 13:37:22,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=163744.0, ans=0.125 +2024-07-28 13:37:23,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=163744.0, ans=0.125 +2024-07-28 13:37:29,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=163744.0, ans=0.09899494936611666 +2024-07-28 13:37:30,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=163744.0, ans=0.025 +2024-07-28 13:37:32,109 INFO [train.py:1114] (2/4) Epoch 13, batch 150, loss[loss=0.1457, simple_loss=0.244, pruned_loss=0.02374, over 4621.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2776, pruned_loss=0.04981, over 494033.13 frames. ], batch size: 11, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:37:39,260 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.63 vs. limit=6.0 +2024-07-28 13:37:47,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=163784.0, ans=0.0 +2024-07-28 13:38:08,740 INFO [train.py:1114] (2/4) Epoch 13, batch 200, loss[loss=0.2414, simple_loss=0.3305, pruned_loss=0.07612, over 4462.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2779, pruned_loss=0.05118, over 593871.48 frames. ], batch size: 21, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:38:13,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=163824.0, ans=0.025 +2024-07-28 13:38:18,038 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.812e+01 5.615e+01 6.251e+01 7.683e+01 1.063e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 13:38:19,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=163837.33333333334, ans=0.0 +2024-07-28 13:38:41,291 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.52 vs. limit=6.0 +2024-07-28 13:38:42,114 INFO [train.py:1114] (2/4) Epoch 13, batch 250, loss[loss=0.2071, simple_loss=0.3086, pruned_loss=0.05277, over 4620.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2788, pruned_loss=0.05129, over 670583.34 frames. ], batch size: 16, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:38:42,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=163890.66666666666, ans=0.125 +2024-07-28 13:38:43,347 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.16 vs. limit=15.0 +2024-07-28 13:38:45,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=163890.66666666666, ans=0.5 +2024-07-28 13:38:46,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=163890.66666666666, ans=0.5 +2024-07-28 13:38:51,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=163904.0, ans=0.0 +2024-07-28 13:38:54,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=163904.0, ans=0.025 +2024-07-28 13:39:02,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=163930.66666666666, ans=0.05 +2024-07-28 13:39:02,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163930.66666666666, ans=0.1 +2024-07-28 13:39:04,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=163930.66666666666, ans=0.125 +2024-07-28 13:39:16,264 INFO [train.py:1114] (2/4) Epoch 13, batch 300, loss[loss=0.1835, simple_loss=0.2842, pruned_loss=0.04143, over 4800.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2783, pruned_loss=0.05141, over 730213.86 frames. ], batch size: 15, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:39:17,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=163957.33333333334, ans=0.2 +2024-07-28 13:39:25,707 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+01 5.595e+01 6.354e+01 7.540e+01 1.026e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 13:39:40,888 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.29 vs. limit=15.0 +2024-07-28 13:39:42,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164010.66666666666, ans=0.1 +2024-07-28 13:39:45,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=164010.66666666666, ans=0.0 +2024-07-28 13:39:49,705 INFO [train.py:1114] (2/4) Epoch 13, batch 350, loss[loss=0.1998, simple_loss=0.2791, pruned_loss=0.06023, over 4942.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2779, pruned_loss=0.05051, over 776242.89 frames. ], batch size: 12, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:39:57,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=164037.33333333334, ans=0.125 +2024-07-28 13:39:57,877 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.67 vs. limit=12.0 +2024-07-28 13:40:01,843 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.02 vs. limit=22.5 +2024-07-28 13:40:13,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=164064.0, ans=10.0 +2024-07-28 13:40:18,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=164077.33333333334, ans=0.2 +2024-07-28 13:40:21,569 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.99 vs. limit=15.0 +2024-07-28 13:40:24,301 INFO [train.py:1114] (2/4) Epoch 13, batch 400, loss[loss=0.1825, simple_loss=0.2845, pruned_loss=0.04024, over 4700.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2782, pruned_loss=0.05009, over 813683.76 frames. ], batch size: 13, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:40:28,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=164090.66666666666, ans=0.0 +2024-07-28 13:40:29,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=164090.66666666666, ans=0.0 +2024-07-28 13:40:30,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=164104.0, ans=0.2 +2024-07-28 13:40:35,602 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.297e+01 5.430e+01 5.754e+01 6.889e+01 9.909e+01, threshold=1.151e+02, percent-clipped=0.0 +2024-07-28 13:40:46,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=164130.66666666666, ans=0.0 +2024-07-28 13:40:50,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=164130.66666666666, ans=0.125 +2024-07-28 13:40:54,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=164144.0, ans=0.125 +2024-07-28 13:40:55,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=164144.0, ans=0.0 +2024-07-28 13:40:55,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=164144.0, ans=0.0 +2024-07-28 13:40:59,533 INFO [train.py:1114] (2/4) Epoch 13, batch 450, loss[loss=0.2241, simple_loss=0.3204, pruned_loss=0.0639, over 4635.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2787, pruned_loss=0.05037, over 838858.51 frames. ], batch size: 13, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:41:04,671 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.64 vs. limit=15.0 +2024-07-28 13:41:14,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=164184.0, ans=0.125 +2024-07-28 13:41:32,426 INFO [train.py:1114] (2/4) Epoch 13, batch 500, loss[loss=0.2034, simple_loss=0.293, pruned_loss=0.05696, over 4669.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2777, pruned_loss=0.05017, over 861718.45 frames. ], batch size: 15, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:41:33,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=164224.0, ans=0.125 +2024-07-28 13:41:38,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=164237.33333333334, ans=0.0 +2024-07-28 13:41:40,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=164237.33333333334, ans=0.125 +2024-07-28 13:41:41,732 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.481e+01 5.521e+01 6.089e+01 6.841e+01 9.670e+01, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 13:41:50,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164250.66666666666, ans=0.1 +2024-07-28 13:41:57,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=164264.0, ans=0.2 +2024-07-28 13:42:02,044 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.43 vs. limit=6.0 +2024-07-28 13:42:06,174 INFO [train.py:1114] (2/4) Epoch 13, batch 550, loss[loss=0.2064, simple_loss=0.2985, pruned_loss=0.05713, over 4595.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2783, pruned_loss=0.05027, over 877669.24 frames. ], batch size: 17, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:42:15,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164304.0, ans=0.1 +2024-07-28 13:42:16,346 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.41 vs. limit=15.0 +2024-07-28 13:42:22,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164317.33333333334, ans=0.1 +2024-07-28 13:42:39,323 INFO [train.py:1114] (2/4) Epoch 13, batch 600, loss[loss=0.2133, simple_loss=0.3172, pruned_loss=0.05467, over 4632.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.278, pruned_loss=0.04955, over 892388.51 frames. ], batch size: 16, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:42:42,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164357.33333333334, ans=0.1 +2024-07-28 13:42:43,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164357.33333333334, ans=0.1 +2024-07-28 13:42:44,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=164357.33333333334, ans=0.0 +2024-07-28 13:42:48,629 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.455e+01 5.528e+01 6.337e+01 7.273e+01 1.055e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 13:42:50,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=164370.66666666666, ans=0.125 +2024-07-28 13:43:01,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=164397.33333333334, ans=0.0 +2024-07-28 13:43:01,854 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.01 vs. limit=15.0 +2024-07-28 13:43:11,558 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.16 vs. limit=15.0 +2024-07-28 13:43:12,348 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.67 vs. limit=10.0 +2024-07-28 13:43:18,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=164410.66666666666, ans=0.0 +2024-07-28 13:43:19,291 INFO [train.py:1114] (2/4) Epoch 13, batch 650, loss[loss=0.1818, simple_loss=0.2798, pruned_loss=0.04196, over 4758.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2761, pruned_loss=0.04881, over 903942.90 frames. ], batch size: 13, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:43:34,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=164450.66666666666, ans=0.125 +2024-07-28 13:43:37,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=164450.66666666666, ans=0.125 +2024-07-28 13:43:44,094 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:43:47,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=164477.33333333334, ans=0.2 +2024-07-28 13:43:50,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=164477.33333333334, ans=0.125 +2024-07-28 13:43:55,039 INFO [train.py:1114] (2/4) Epoch 13, batch 700, loss[loss=0.1621, simple_loss=0.2601, pruned_loss=0.0321, over 4640.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2765, pruned_loss=0.04882, over 911989.76 frames. ], batch size: 12, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:43:55,193 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:44:04,379 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.803e+01 5.621e+01 6.058e+01 7.095e+01 1.199e+02, threshold=1.212e+02, percent-clipped=0.0 +2024-07-28 13:44:11,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=164504.0, ans=0.1 +2024-07-28 13:44:13,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=164517.33333333334, ans=0.125 +2024-07-28 13:44:25,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=164530.66666666666, ans=0.125 +2024-07-28 13:44:38,231 INFO [train.py:1114] (2/4) Epoch 13, batch 750, loss[loss=0.1897, simple_loss=0.2778, pruned_loss=0.05082, over 4696.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.276, pruned_loss=0.04848, over 918713.79 frames. ], batch size: 13, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:44:42,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=164557.33333333334, ans=10.0 +2024-07-28 13:44:59,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=164584.0, ans=0.125 +2024-07-28 13:45:00,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=164597.33333333334, ans=0.0 +2024-07-28 13:45:01,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=164597.33333333334, ans=0.125 +2024-07-28 13:45:08,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164610.66666666666, ans=0.1 +2024-07-28 13:45:13,510 INFO [train.py:1114] (2/4) Epoch 13, batch 800, loss[loss=0.1699, simple_loss=0.254, pruned_loss=0.04286, over 4862.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2758, pruned_loss=0.04871, over 923276.04 frames. ], batch size: 12, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:45:22,571 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.403e+01 5.509e+01 5.892e+01 6.560e+01 1.053e+02, threshold=1.178e+02, percent-clipped=0.0 +2024-07-28 13:45:38,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=164650.66666666666, ans=0.1 +2024-07-28 13:45:39,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=164650.66666666666, ans=0.0 +2024-07-28 13:45:45,655 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.84 vs. limit=10.0 +2024-07-28 13:45:48,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=164664.0, ans=0.0 +2024-07-28 13:45:58,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=164677.33333333334, ans=0.125 +2024-07-28 13:45:59,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=164677.33333333334, ans=0.2 +2024-07-28 13:46:01,009 INFO [train.py:1114] (2/4) Epoch 13, batch 850, loss[loss=0.1986, simple_loss=0.294, pruned_loss=0.05162, over 4663.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2754, pruned_loss=0.04868, over 927349.44 frames. ], batch size: 14, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:46:21,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=164704.0, ans=0.0 +2024-07-28 13:46:23,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=164717.33333333334, ans=10.0 +2024-07-28 13:46:53,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=164730.66666666666, ans=0.0 +2024-07-28 13:46:54,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=164730.66666666666, ans=0.0 +2024-07-28 13:47:04,274 INFO [train.py:1114] (2/4) Epoch 13, batch 900, loss[loss=0.202, simple_loss=0.2755, pruned_loss=0.06424, over 4863.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.276, pruned_loss=0.04905, over 928399.01 frames. ], batch size: 12, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:47:10,084 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.53 vs. limit=10.0 +2024-07-28 13:47:11,376 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.54 vs. limit=10.0 +2024-07-28 13:47:13,471 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.792e+01 6.438e+01 7.268e+01 1.084e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 13:47:34,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=164810.66666666666, ans=0.025 +2024-07-28 13:47:36,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=164810.66666666666, ans=0.1 +2024-07-28 13:47:37,999 INFO [train.py:1114] (2/4) Epoch 13, batch 950, loss[loss=0.2155, simple_loss=0.2965, pruned_loss=0.06728, over 4778.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2763, pruned_loss=0.04927, over 930325.70 frames. ], batch size: 12, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:47:54,999 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:47:59,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=164864.0, ans=0.125 +2024-07-28 13:48:03,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=164864.0, ans=0.125 +2024-07-28 13:48:10,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=164890.66666666666, ans=0.125 +2024-07-28 13:48:11,292 INFO [train.py:1114] (2/4) Epoch 13, batch 1000, loss[loss=0.1993, simple_loss=0.2898, pruned_loss=0.05439, over 4963.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2775, pruned_loss=0.04985, over 929652.57 frames. ], batch size: 13, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:48:20,581 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.383e+01 5.622e+01 6.136e+01 7.218e+01 8.877e+01, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 13:48:30,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=164930.66666666666, ans=0.0 +2024-07-28 13:48:34,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=164930.66666666666, ans=0.125 +2024-07-28 13:48:44,488 INFO [train.py:1114] (2/4) Epoch 13, batch 1050, loss[loss=0.179, simple_loss=0.2819, pruned_loss=0.03804, over 4876.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2768, pruned_loss=0.04947, over 931940.04 frames. ], batch size: 14, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:48:54,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=164957.33333333334, ans=0.2 +2024-07-28 13:48:54,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164957.33333333334, ans=0.1 +2024-07-28 13:48:55,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=164957.33333333334, ans=0.125 +2024-07-28 13:49:13,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=164997.33333333334, ans=0.125 +2024-07-28 13:49:23,126 INFO [train.py:1114] (2/4) Epoch 13, batch 1100, loss[loss=0.1603, simple_loss=0.2665, pruned_loss=0.02703, over 4896.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2766, pruned_loss=0.04975, over 934509.14 frames. ], batch size: 13, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:49:45,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=165037.33333333334, ans=0.09899494936611666 +2024-07-28 13:49:50,759 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.914e+01 5.557e+01 6.150e+01 6.948e+01 9.915e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 13:49:55,620 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.72 vs. limit=22.5 +2024-07-28 13:50:18,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=165050.66666666666, ans=0.125 +2024-07-28 13:50:18,797 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.02 vs. limit=15.0 +2024-07-28 13:50:20,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=165064.0, ans=0.125 +2024-07-28 13:50:25,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=165064.0, ans=0.07 +2024-07-28 13:50:40,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=165077.33333333334, ans=0.125 +2024-07-28 13:50:46,017 INFO [train.py:1114] (2/4) Epoch 13, batch 1150, loss[loss=0.1799, simple_loss=0.2618, pruned_loss=0.04903, over 4890.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2764, pruned_loss=0.04982, over 934583.86 frames. ], batch size: 13, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:50:58,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=165117.33333333334, ans=0.125 +2024-07-28 13:50:59,734 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.15 vs. limit=22.5 +2024-07-28 13:51:04,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=165117.33333333334, ans=0.04949747468305833 +2024-07-28 13:51:04,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=165117.33333333334, ans=0.2 +2024-07-28 13:51:12,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=165130.66666666666, ans=0.1 +2024-07-28 13:51:16,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=165144.0, ans=0.125 +2024-07-28 13:51:20,625 INFO [train.py:1114] (2/4) Epoch 13, batch 1200, loss[loss=0.1939, simple_loss=0.2794, pruned_loss=0.05422, over 4868.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2772, pruned_loss=0.04973, over 933729.92 frames. ], batch size: 14, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:51:23,767 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.54 vs. limit=15.0 +2024-07-28 13:51:27,416 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.74 vs. limit=15.0 +2024-07-28 13:51:27,500 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.94 vs. limit=15.0 +2024-07-28 13:51:30,267 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.565e+01 5.602e+01 6.215e+01 7.036e+01 9.353e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 13:51:33,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=165184.0, ans=0.125 +2024-07-28 13:51:40,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=165197.33333333334, ans=0.125 +2024-07-28 13:51:44,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=165197.33333333334, ans=0.125 +2024-07-28 13:51:50,871 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.92 vs. limit=10.0 +2024-07-28 13:51:55,902 INFO [train.py:1114] (2/4) Epoch 13, batch 1250, loss[loss=0.1571, simple_loss=0.2517, pruned_loss=0.03127, over 4817.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.277, pruned_loss=0.04966, over 937571.73 frames. ], batch size: 15, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:52:09,790 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.06 vs. limit=12.0 +2024-07-28 13:52:13,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=165250.66666666666, ans=0.0 +2024-07-28 13:52:19,234 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.73 vs. limit=15.0 +2024-07-28 13:52:37,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=165290.66666666666, ans=0.125 +2024-07-28 13:52:37,828 INFO [train.py:1114] (2/4) Epoch 13, batch 1300, loss[loss=0.1853, simple_loss=0.2633, pruned_loss=0.05365, over 4690.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2761, pruned_loss=0.04908, over 939199.85 frames. ], batch size: 19, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:52:38,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=165290.66666666666, ans=0.0 +2024-07-28 13:52:43,502 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.95 vs. limit=15.0 +2024-07-28 13:52:47,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=165304.0, ans=0.0 +2024-07-28 13:52:48,771 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.215e+01 5.537e+01 6.038e+01 6.682e+01 9.542e+01, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 13:53:07,229 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.67 vs. limit=10.0 +2024-07-28 13:53:16,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=165344.0, ans=0.2 +2024-07-28 13:53:19,166 INFO [train.py:1114] (2/4) Epoch 13, batch 1350, loss[loss=0.1419, simple_loss=0.2335, pruned_loss=0.02515, over 4764.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2755, pruned_loss=0.04872, over 941133.86 frames. ], batch size: 13, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:53:21,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=165357.33333333334, ans=0.2 +2024-07-28 13:53:38,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=165384.0, ans=0.125 +2024-07-28 13:53:45,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165397.33333333334, ans=0.1 +2024-07-28 13:53:45,735 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.36 vs. limit=6.0 +2024-07-28 13:53:51,235 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.58 vs. limit=15.0 +2024-07-28 13:53:54,753 INFO [train.py:1114] (2/4) Epoch 13, batch 1400, loss[loss=0.158, simple_loss=0.2503, pruned_loss=0.03287, over 4711.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2759, pruned_loss=0.04901, over 942747.49 frames. ], batch size: 11, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:53:57,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=165424.0, ans=0.125 +2024-07-28 13:54:04,065 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.297e+01 5.870e+01 6.563e+01 8.092e+01 1.108e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-28 13:54:07,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=165450.66666666666, ans=0.1 +2024-07-28 13:54:15,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=165464.0, ans=0.05 +2024-07-28 13:54:16,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=165464.0, ans=0.0 +2024-07-28 13:54:20,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=165464.0, ans=0.015 +2024-07-28 13:54:24,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=165477.33333333334, ans=0.09899494936611666 +2024-07-28 13:54:28,177 INFO [train.py:1114] (2/4) Epoch 13, batch 1450, loss[loss=0.2005, simple_loss=0.2891, pruned_loss=0.05592, over 4672.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2759, pruned_loss=0.049, over 942449.21 frames. ], batch size: 15, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:54:41,148 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.02 vs. limit=12.0 +2024-07-28 13:54:43,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=165517.33333333334, ans=0.07 +2024-07-28 13:54:55,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=165544.0, ans=0.1 +2024-07-28 13:55:01,061 INFO [train.py:1114] (2/4) Epoch 13, batch 1500, loss[loss=0.1617, simple_loss=0.2548, pruned_loss=0.03428, over 4808.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2758, pruned_loss=0.04871, over 942403.18 frames. ], batch size: 14, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:55:10,414 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.997e+01 5.672e+01 6.060e+01 6.827e+01 9.493e+01, threshold=1.212e+02, percent-clipped=0.0 +2024-07-28 13:55:16,169 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.88 vs. limit=15.0 +2024-07-28 13:55:16,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=165584.0, ans=0.125 +2024-07-28 13:55:29,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165610.66666666666, ans=0.1 +2024-07-28 13:55:34,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=165624.0, ans=0.1 +2024-07-28 13:55:34,841 INFO [train.py:1114] (2/4) Epoch 13, batch 1550, loss[loss=0.2086, simple_loss=0.2963, pruned_loss=0.06048, over 4903.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2763, pruned_loss=0.04905, over 939312.26 frames. ], batch size: 15, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:55:41,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=165637.33333333334, ans=0.0 +2024-07-28 13:55:43,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=165637.33333333334, ans=0.125 +2024-07-28 13:55:48,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=165650.66666666666, ans=0.0 +2024-07-28 13:55:54,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165664.0, ans=0.1 +2024-07-28 13:56:06,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=165677.33333333334, ans=0.125 +2024-07-28 13:56:08,033 INFO [train.py:1114] (2/4) Epoch 13, batch 1600, loss[loss=0.1878, simple_loss=0.2862, pruned_loss=0.04469, over 4864.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2758, pruned_loss=0.04894, over 938144.36 frames. ], batch size: 14, lr: 5.84e-03, grad_scale: 32.0 +2024-07-28 13:56:15,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=165690.66666666666, ans=0.0 +2024-07-28 13:56:15,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=165690.66666666666, ans=0.0 +2024-07-28 13:56:19,117 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.588e+01 5.661e+01 6.268e+01 7.174e+01 9.497e+01, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 13:56:24,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=165717.33333333334, ans=0.0 +2024-07-28 13:56:29,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=165730.66666666666, ans=0.07 +2024-07-28 13:56:37,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=165744.0, ans=0.125 +2024-07-28 13:56:42,659 INFO [train.py:1114] (2/4) Epoch 13, batch 1650, loss[loss=0.2093, simple_loss=0.2987, pruned_loss=0.05994, over 4673.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2758, pruned_loss=0.04927, over 937876.03 frames. ], batch size: 14, lr: 5.84e-03, grad_scale: 32.0 +2024-07-28 13:56:58,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=165784.0, ans=0.0 +2024-07-28 13:57:10,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=165810.66666666666, ans=0.125 +2024-07-28 13:57:15,592 INFO [train.py:1114] (2/4) Epoch 13, batch 1700, loss[loss=0.1547, simple_loss=0.2396, pruned_loss=0.03487, over 4696.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2764, pruned_loss=0.04934, over 939348.38 frames. ], batch size: 11, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:57:25,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165837.33333333334, ans=0.1 +2024-07-28 13:57:26,860 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 5.803e+01 6.268e+01 7.328e+01 1.138e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 13:57:35,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=165850.66666666666, ans=0.025 +2024-07-28 13:57:35,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=165850.66666666666, ans=0.025 +2024-07-28 13:57:51,035 INFO [train.py:1114] (2/4) Epoch 13, batch 1750, loss[loss=0.1641, simple_loss=0.2405, pruned_loss=0.04386, over 4795.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.276, pruned_loss=0.04896, over 940334.29 frames. ], batch size: 11, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:57:57,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=165904.0, ans=0.125 +2024-07-28 13:57:59,386 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.75 vs. limit=15.0 +2024-07-28 13:58:00,006 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.00 vs. limit=15.0 +2024-07-28 13:58:18,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=165930.66666666666, ans=0.125 +2024-07-28 13:58:22,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=165944.0, ans=0.2 +2024-07-28 13:58:26,065 INFO [train.py:1114] (2/4) Epoch 13, batch 1800, loss[loss=0.1804, simple_loss=0.2725, pruned_loss=0.04409, over 4636.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2769, pruned_loss=0.04943, over 941248.94 frames. ], batch size: 13, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:58:27,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=165957.33333333334, ans=0.0 +2024-07-28 13:58:35,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=165970.66666666666, ans=0.1 +2024-07-28 13:58:35,515 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+01 5.623e+01 6.283e+01 7.470e+01 1.047e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 13:58:49,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=165997.33333333334, ans=0.0 +2024-07-28 13:58:53,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=165997.33333333334, ans=0.1 +2024-07-28 13:58:56,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=166010.66666666666, ans=0.125 +2024-07-28 13:59:01,624 INFO [train.py:1114] (2/4) Epoch 13, batch 1850, loss[loss=0.197, simple_loss=0.2794, pruned_loss=0.05732, over 4815.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2763, pruned_loss=0.04929, over 941278.73 frames. ], batch size: 14, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:59:35,563 INFO [train.py:1114] (2/4) Epoch 13, batch 1900, loss[loss=0.2, simple_loss=0.2937, pruned_loss=0.05311, over 4662.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.277, pruned_loss=0.04961, over 942471.13 frames. ], batch size: 14, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:59:40,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=166090.66666666666, ans=0.0 +2024-07-28 13:59:43,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=166104.0, ans=0.0 +2024-07-28 13:59:44,628 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.534e+01 5.588e+01 6.157e+01 7.144e+01 1.104e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 13:59:51,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=166117.33333333334, ans=0.2 +2024-07-28 13:59:52,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=166117.33333333334, ans=0.125 +2024-07-28 13:59:57,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=166130.66666666666, ans=0.125 +2024-07-28 14:00:05,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=166144.0, ans=0.125 +2024-07-28 14:00:08,384 INFO [train.py:1114] (2/4) Epoch 13, batch 1950, loss[loss=0.1883, simple_loss=0.272, pruned_loss=0.0523, over 4897.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2789, pruned_loss=0.05032, over 944291.82 frames. ], batch size: 13, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 14:00:14,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=166170.66666666666, ans=0.1 +2024-07-28 14:00:20,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166170.66666666666, ans=0.1 +2024-07-28 14:00:42,316 INFO [train.py:1114] (2/4) Epoch 13, batch 2000, loss[loss=0.161, simple_loss=0.2421, pruned_loss=0.03993, over 4798.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2789, pruned_loss=0.0508, over 941689.69 frames. ], batch size: 11, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 14:00:51,861 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+01 5.856e+01 6.495e+01 7.461e+01 1.148e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-28 14:00:55,141 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.17 vs. limit=10.0 +2024-07-28 14:00:56,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166250.66666666666, ans=0.1 +2024-07-28 14:01:00,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=166250.66666666666, ans=0.0 +2024-07-28 14:01:02,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=166264.0, ans=0.015 +2024-07-28 14:01:08,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166264.0, ans=0.1 +2024-07-28 14:01:16,521 INFO [train.py:1114] (2/4) Epoch 13, batch 2050, loss[loss=0.1508, simple_loss=0.2359, pruned_loss=0.0328, over 4609.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2776, pruned_loss=0.0504, over 939281.90 frames. ], batch size: 11, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:01:25,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=166304.0, ans=0.125 +2024-07-28 14:01:25,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166304.0, ans=0.1 +2024-07-28 14:01:25,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=166304.0, ans=0.0 +2024-07-28 14:01:43,934 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.86 vs. limit=22.5 +2024-07-28 14:01:46,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=166344.0, ans=0.07 +2024-07-28 14:01:50,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166344.0, ans=0.1 +2024-07-28 14:01:51,261 INFO [train.py:1114] (2/4) Epoch 13, batch 2100, loss[loss=0.1624, simple_loss=0.2518, pruned_loss=0.03653, over 4757.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2765, pruned_loss=0.04975, over 940894.68 frames. ], batch size: 13, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:02:00,482 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.442e+01 5.528e+01 6.162e+01 7.061e+01 9.278e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 14:02:00,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=166370.66666666666, ans=0.0 +2024-07-28 14:02:05,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=166384.0, ans=0.0 +2024-07-28 14:02:06,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=166384.0, ans=0.0 +2024-07-28 14:02:21,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=166410.66666666666, ans=0.0 +2024-07-28 14:02:23,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166410.66666666666, ans=0.1 +2024-07-28 14:02:23,142 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.26 vs. limit=22.5 +2024-07-28 14:02:24,075 INFO [train.py:1114] (2/4) Epoch 13, batch 2150, loss[loss=0.1835, simple_loss=0.2905, pruned_loss=0.03829, over 4892.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2758, pruned_loss=0.04976, over 944068.16 frames. ], batch size: 13, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:02:44,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=166450.66666666666, ans=0.0 +2024-07-28 14:02:51,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=166464.0, ans=0.125 +2024-07-28 14:02:59,573 INFO [train.py:1114] (2/4) Epoch 13, batch 2200, loss[loss=0.1741, simple_loss=0.277, pruned_loss=0.03566, over 4808.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2756, pruned_loss=0.04936, over 943021.55 frames. ], batch size: 14, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:03:04,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166490.66666666666, ans=0.1 +2024-07-28 14:03:08,837 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.709e+01 5.698e+01 6.654e+01 7.833e+01 2.383e+02, threshold=1.331e+02, percent-clipped=1.0 +2024-07-28 14:03:09,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=166504.0, ans=0.125 +2024-07-28 14:03:32,644 INFO [train.py:1114] (2/4) Epoch 13, batch 2250, loss[loss=0.1865, simple_loss=0.2858, pruned_loss=0.04361, over 4703.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2758, pruned_loss=0.04956, over 941824.21 frames. ], batch size: 13, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:03:49,376 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.88 vs. limit=10.0 +2024-07-28 14:03:53,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=166584.0, ans=0.0 +2024-07-28 14:04:07,928 INFO [train.py:1114] (2/4) Epoch 13, batch 2300, loss[loss=0.1537, simple_loss=0.239, pruned_loss=0.03419, over 4940.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2741, pruned_loss=0.04889, over 940381.85 frames. ], batch size: 12, lr: 5.83e-03, grad_scale: 32.0 +2024-07-28 14:04:11,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=166624.0, ans=0.2 +2024-07-28 14:04:13,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=166624.0, ans=0.025 +2024-07-28 14:04:19,882 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.764e+01 5.399e+01 5.798e+01 6.898e+01 9.306e+01, threshold=1.160e+02, percent-clipped=0.0 +2024-07-28 14:04:22,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=166650.66666666666, ans=0.07 +2024-07-28 14:04:24,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=166650.66666666666, ans=0.0 +2024-07-28 14:04:27,070 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.28 vs. limit=15.0 +2024-07-28 14:04:30,304 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.59 vs. limit=22.5 +2024-07-28 14:04:37,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=166677.33333333334, ans=0.2 +2024-07-28 14:04:44,039 INFO [train.py:1114] (2/4) Epoch 13, batch 2350, loss[loss=0.2011, simple_loss=0.2902, pruned_loss=0.056, over 4643.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2738, pruned_loss=0.04873, over 942080.12 frames. ], batch size: 13, lr: 5.83e-03, grad_scale: 32.0 +2024-07-28 14:04:44,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=166690.66666666666, ans=0.125 +2024-07-28 14:04:45,790 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.77 vs. limit=6.0 +2024-07-28 14:04:48,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=166690.66666666666, ans=0.2 +2024-07-28 14:05:01,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=166717.33333333334, ans=0.125 +2024-07-28 14:05:02,601 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.88 vs. limit=15.0 +2024-07-28 14:05:09,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=166730.66666666666, ans=0.125 +2024-07-28 14:05:10,004 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.92 vs. limit=15.0 +2024-07-28 14:05:14,447 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.87 vs. limit=15.0 +2024-07-28 14:05:17,419 INFO [train.py:1114] (2/4) Epoch 13, batch 2400, loss[loss=0.1809, simple_loss=0.2674, pruned_loss=0.04722, over 4639.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2742, pruned_loss=0.04846, over 941738.84 frames. ], batch size: 12, lr: 5.83e-03, grad_scale: 32.0 +2024-07-28 14:05:19,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=166757.33333333334, ans=0.025 +2024-07-28 14:05:20,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=166757.33333333334, ans=0.2 +2024-07-28 14:05:23,775 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.40 vs. limit=15.0 +2024-07-28 14:05:27,489 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.619e+01 5.555e+01 6.337e+01 7.554e+01 1.093e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 14:05:32,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=166784.0, ans=0.2 +2024-07-28 14:05:47,788 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.98 vs. limit=15.0 +2024-07-28 14:05:47,832 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.68 vs. limit=10.0 +2024-07-28 14:05:47,869 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.58 vs. limit=15.0 +2024-07-28 14:05:48,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=166810.66666666666, ans=0.0 +2024-07-28 14:05:49,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=166810.66666666666, ans=0.125 +2024-07-28 14:05:50,658 INFO [train.py:1114] (2/4) Epoch 13, batch 2450, loss[loss=0.1747, simple_loss=0.2582, pruned_loss=0.04558, over 4691.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2754, pruned_loss=0.04945, over 937695.59 frames. ], batch size: 13, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:05:51,771 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.55 vs. limit=10.0 +2024-07-28 14:06:06,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166850.66666666666, ans=0.1 +2024-07-28 14:06:10,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=166864.0, ans=0.125 +2024-07-28 14:06:14,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=166864.0, ans=0.125 +2024-07-28 14:06:17,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=166877.33333333334, ans=0.2 +2024-07-28 14:06:18,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=166877.33333333334, ans=0.025 +2024-07-28 14:06:20,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=166877.33333333334, ans=0.125 +2024-07-28 14:06:23,975 INFO [train.py:1114] (2/4) Epoch 13, batch 2500, loss[loss=0.2024, simple_loss=0.3021, pruned_loss=0.05136, over 4809.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2761, pruned_loss=0.04951, over 939375.02 frames. ], batch size: 14, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:06:24,452 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.74 vs. limit=15.0 +2024-07-28 14:06:31,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=166904.0, ans=0.025 +2024-07-28 14:06:33,898 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.764e+01 5.445e+01 5.909e+01 6.665e+01 1.016e+02, threshold=1.182e+02, percent-clipped=0.0 +2024-07-28 14:06:39,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=166917.33333333334, ans=0.125 +2024-07-28 14:06:47,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=166930.66666666666, ans=0.09899494936611666 +2024-07-28 14:06:50,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166944.0, ans=0.1 +2024-07-28 14:06:57,756 INFO [train.py:1114] (2/4) Epoch 13, batch 2550, loss[loss=0.1701, simple_loss=0.2568, pruned_loss=0.04169, over 4804.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2767, pruned_loss=0.04967, over 938835.85 frames. ], batch size: 11, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:07:04,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=166957.33333333334, ans=0.125 +2024-07-28 14:07:07,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=166970.66666666666, ans=0.125 +2024-07-28 14:07:28,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=167010.66666666666, ans=0.035 +2024-07-28 14:07:29,075 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.13 vs. limit=10.0 +2024-07-28 14:07:29,093 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.37 vs. limit=6.0 +2024-07-28 14:07:32,611 INFO [train.py:1114] (2/4) Epoch 13, batch 2600, loss[loss=0.1577, simple_loss=0.2398, pruned_loss=0.03783, over 4906.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2768, pruned_loss=0.04962, over 937647.64 frames. ], batch size: 13, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:07:42,338 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.776e+01 5.608e+01 6.313e+01 7.050e+01 1.090e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 14:07:44,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167037.33333333334, ans=0.1 +2024-07-28 14:07:45,667 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:07:55,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=167064.0, ans=0.025 +2024-07-28 14:08:00,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=167077.33333333334, ans=0.125 +2024-07-28 14:08:01,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=167077.33333333334, ans=0.125 +2024-07-28 14:08:07,478 INFO [train.py:1114] (2/4) Epoch 13, batch 2650, loss[loss=0.2233, simple_loss=0.3132, pruned_loss=0.0667, over 4627.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2778, pruned_loss=0.05006, over 939403.42 frames. ], batch size: 16, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:08:07,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=167090.66666666666, ans=0.0 +2024-07-28 14:08:10,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167090.66666666666, ans=0.1 +2024-07-28 14:08:17,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=167104.0, ans=0.125 +2024-07-28 14:08:17,969 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.16 vs. limit=6.0 +2024-07-28 14:08:23,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=167117.33333333334, ans=0.125 +2024-07-28 14:08:24,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=167117.33333333334, ans=0.125 +2024-07-28 14:08:28,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=167130.66666666666, ans=0.0 +2024-07-28 14:08:40,461 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:08:40,498 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:08:41,004 INFO [train.py:1114] (2/4) Epoch 13, batch 2700, loss[loss=0.1833, simple_loss=0.2659, pruned_loss=0.05031, over 4734.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2776, pruned_loss=0.05019, over 939476.83 frames. ], batch size: 14, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:08:44,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=167157.33333333334, ans=0.125 +2024-07-28 14:08:51,006 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.399e+01 5.482e+01 5.925e+01 6.824e+01 1.004e+02, threshold=1.185e+02, percent-clipped=0.0 +2024-07-28 14:08:58,053 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.77 vs. limit=15.0 +2024-07-28 14:08:59,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=167184.0, ans=0.2 +2024-07-28 14:09:01,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=167197.33333333334, ans=0.1 +2024-07-28 14:09:10,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=167210.66666666666, ans=0.2 +2024-07-28 14:09:11,676 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.32 vs. limit=15.0 +2024-07-28 14:09:17,199 INFO [train.py:1114] (2/4) Epoch 13, batch 2750, loss[loss=0.2153, simple_loss=0.2985, pruned_loss=0.06607, over 4725.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2765, pruned_loss=0.04987, over 939409.12 frames. ], batch size: 12, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:09:25,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=167237.33333333334, ans=0.0 +2024-07-28 14:09:26,076 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.11 vs. limit=15.0 +2024-07-28 14:09:39,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=167264.0, ans=0.09899494936611666 +2024-07-28 14:09:41,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=167264.0, ans=0.125 +2024-07-28 14:09:50,352 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.17 vs. limit=15.0 +2024-07-28 14:09:51,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=167277.33333333334, ans=0.05 +2024-07-28 14:09:52,625 INFO [train.py:1114] (2/4) Epoch 13, batch 2800, loss[loss=0.2025, simple_loss=0.2843, pruned_loss=0.06038, over 3466.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2764, pruned_loss=0.04957, over 937339.14 frames. ], batch size: 35, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:09:59,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=167304.0, ans=0.0 +2024-07-28 14:10:02,560 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.817e+01 5.664e+01 6.211e+01 7.205e+01 1.021e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 14:10:12,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=167330.66666666666, ans=0.015 +2024-07-28 14:10:14,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=167330.66666666666, ans=0.125 +2024-07-28 14:10:15,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=167330.66666666666, ans=0.125 +2024-07-28 14:10:18,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=167344.0, ans=0.0 +2024-07-28 14:10:20,396 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.92 vs. limit=22.5 +2024-07-28 14:10:20,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=167344.0, ans=0.125 +2024-07-28 14:10:23,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=167344.0, ans=0.125 +2024-07-28 14:10:26,075 INFO [train.py:1114] (2/4) Epoch 13, batch 2850, loss[loss=0.1921, simple_loss=0.2654, pruned_loss=0.05934, over 4961.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2761, pruned_loss=0.04944, over 935578.35 frames. ], batch size: 13, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:10:30,013 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.72 vs. limit=22.5 +2024-07-28 14:10:45,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=167397.33333333334, ans=0.2 +2024-07-28 14:10:54,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=167410.66666666666, ans=0.2 +2024-07-28 14:10:55,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=167410.66666666666, ans=0.09899494936611666 +2024-07-28 14:10:59,349 INFO [train.py:1114] (2/4) Epoch 13, batch 2900, loss[loss=0.1876, simple_loss=0.2831, pruned_loss=0.04607, over 4826.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2768, pruned_loss=0.04958, over 939377.90 frames. ], batch size: 13, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:11:09,573 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.537e+01 5.851e+01 6.550e+01 7.504e+01 1.142e+02, threshold=1.310e+02, percent-clipped=0.0 +2024-07-28 14:11:17,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=167450.66666666666, ans=0.0 +2024-07-28 14:11:22,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=167464.0, ans=0.0 +2024-07-28 14:11:29,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=167477.33333333334, ans=0.0 +2024-07-28 14:11:31,589 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.12 vs. limit=10.0 +2024-07-28 14:11:33,210 INFO [train.py:1114] (2/4) Epoch 13, batch 2950, loss[loss=0.1855, simple_loss=0.2743, pruned_loss=0.04834, over 4696.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.276, pruned_loss=0.04991, over 938646.53 frames. ], batch size: 12, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:11:40,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=167504.0, ans=0.125 +2024-07-28 14:11:44,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=167504.0, ans=0.0 +2024-07-28 14:11:54,003 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.62 vs. limit=5.0 +2024-07-28 14:12:01,167 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.72 vs. limit=15.0 +2024-07-28 14:12:06,234 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:12:06,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=167557.33333333334, ans=0.2 +2024-07-28 14:12:06,766 INFO [train.py:1114] (2/4) Epoch 13, batch 3000, loss[loss=0.1899, simple_loss=0.2896, pruned_loss=0.04507, over 4753.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.275, pruned_loss=0.04911, over 938429.17 frames. ], batch size: 13, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:12:06,767 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 14:12:13,464 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.0389, 4.5793, 4.7801, 4.6439], device='cuda:2') +2024-07-28 14:12:17,039 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.7554, 3.8724, 4.1145, 3.9377], device='cuda:2') +2024-07-28 14:12:18,644 INFO [train.py:1146] (2/4) Epoch 13, validation: loss=0.1663, simple_loss=0.2701, pruned_loss=0.0312, over 944034.00 frames. +2024-07-28 14:12:18,645 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 14:12:29,080 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.643e+01 5.635e+01 6.154e+01 7.337e+01 1.248e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 14:12:31,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=167570.66666666666, ans=0.125 +2024-07-28 14:12:36,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=167584.0, ans=0.125 +2024-07-28 14:12:39,090 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.64 vs. limit=15.0 +2024-07-28 14:12:40,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=167597.33333333334, ans=0.1 +2024-07-28 14:12:48,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167610.66666666666, ans=0.1 +2024-07-28 14:12:52,947 INFO [train.py:1114] (2/4) Epoch 13, batch 3050, loss[loss=0.1797, simple_loss=0.2765, pruned_loss=0.04139, over 4644.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2756, pruned_loss=0.0493, over 937449.68 frames. ], batch size: 12, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:12:56,234 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.90 vs. limit=15.0 +2024-07-28 14:13:02,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=167637.33333333334, ans=0.125 +2024-07-28 14:13:09,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=167650.66666666666, ans=0.2 +2024-07-28 14:13:12,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=167650.66666666666, ans=0.0 +2024-07-28 14:13:14,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=167664.0, ans=0.1 +2024-07-28 14:13:20,070 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.78 vs. limit=15.0 +2024-07-28 14:13:22,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=167677.33333333334, ans=0.125 +2024-07-28 14:13:27,843 INFO [train.py:1114] (2/4) Epoch 13, batch 3100, loss[loss=0.1951, simple_loss=0.2879, pruned_loss=0.0512, over 4649.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2757, pruned_loss=0.0493, over 938302.14 frames. ], batch size: 16, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:13:33,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=167690.66666666666, ans=0.125 +2024-07-28 14:13:37,637 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.158e+01 5.544e+01 6.108e+01 7.072e+01 9.683e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 14:13:41,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=167717.33333333334, ans=0.5 +2024-07-28 14:13:51,055 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.99 vs. limit=22.5 +2024-07-28 14:13:53,799 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.26 vs. limit=22.5 +2024-07-28 14:14:01,404 INFO [train.py:1114] (2/4) Epoch 13, batch 3150, loss[loss=0.2435, simple_loss=0.3335, pruned_loss=0.07673, over 4614.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2767, pruned_loss=0.04955, over 938681.12 frames. ], batch size: 17, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:14:12,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=167770.66666666666, ans=0.125 +2024-07-28 14:14:25,502 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.85 vs. limit=15.0 +2024-07-28 14:14:31,496 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.76 vs. limit=15.0 +2024-07-28 14:14:37,495 INFO [train.py:1114] (2/4) Epoch 13, batch 3200, loss[loss=0.2109, simple_loss=0.2993, pruned_loss=0.06128, over 4830.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2769, pruned_loss=0.04948, over 939963.22 frames. ], batch size: 13, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:14:47,194 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.951e+01 5.665e+01 6.377e+01 7.022e+01 9.065e+01, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 14:14:49,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=167837.33333333334, ans=0.07 +2024-07-28 14:14:50,068 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.87 vs. limit=15.0 +2024-07-28 14:14:56,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=167837.33333333334, ans=0.125 +2024-07-28 14:15:18,621 INFO [train.py:1114] (2/4) Epoch 13, batch 3250, loss[loss=0.1963, simple_loss=0.2904, pruned_loss=0.05107, over 4925.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2766, pruned_loss=0.04892, over 941064.95 frames. ], batch size: 14, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:15:18,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=167890.66666666666, ans=0.0 +2024-07-28 14:15:22,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=167890.66666666666, ans=0.125 +2024-07-28 14:15:24,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=167890.66666666666, ans=0.125 +2024-07-28 14:15:24,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=167904.0, ans=0.0 +2024-07-28 14:15:25,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=167904.0, ans=0.125 +2024-07-28 14:15:26,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167904.0, ans=0.1 +2024-07-28 14:15:30,369 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:15:42,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=167930.66666666666, ans=0.5 +2024-07-28 14:15:52,625 INFO [train.py:1114] (2/4) Epoch 13, batch 3300, loss[loss=0.2262, simple_loss=0.3069, pruned_loss=0.0727, over 4717.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2745, pruned_loss=0.04871, over 941026.75 frames. ], batch size: 19, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:15:58,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=167957.33333333334, ans=0.0 +2024-07-28 14:16:02,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=167970.66666666666, ans=0.125 +2024-07-28 14:16:02,800 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 5.327e+01 5.938e+01 6.571e+01 1.063e+02, threshold=1.188e+02, percent-clipped=0.0 +2024-07-28 14:16:26,156 INFO [train.py:1114] (2/4) Epoch 13, batch 3350, loss[loss=0.1836, simple_loss=0.2804, pruned_loss=0.04338, over 4663.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2757, pruned_loss=0.04964, over 938810.17 frames. ], batch size: 17, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:16:33,364 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.95 vs. limit=15.0 +2024-07-28 14:16:40,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=168050.66666666666, ans=0.0 +2024-07-28 14:16:40,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=168050.66666666666, ans=0.125 +2024-07-28 14:16:40,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=168050.66666666666, ans=0.0 +2024-07-28 14:16:43,497 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.81 vs. limit=15.0 +2024-07-28 14:17:00,155 INFO [train.py:1114] (2/4) Epoch 13, batch 3400, loss[loss=0.1528, simple_loss=0.2356, pruned_loss=0.03501, over 4802.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2759, pruned_loss=0.05019, over 937542.37 frames. ], batch size: 11, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:17:04,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=168090.66666666666, ans=0.125 +2024-07-28 14:17:06,639 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.41 vs. limit=6.0 +2024-07-28 14:17:10,163 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.911e+01 6.407e+01 7.548e+01 1.179e+02, threshold=1.281e+02, percent-clipped=0.0 +2024-07-28 14:17:16,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=168117.33333333334, ans=0.125 +2024-07-28 14:17:27,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=168144.0, ans=0.125 +2024-07-28 14:17:33,665 INFO [train.py:1114] (2/4) Epoch 13, batch 3450, loss[loss=0.1902, simple_loss=0.2762, pruned_loss=0.05208, over 4676.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2767, pruned_loss=0.05088, over 937603.25 frames. ], batch size: 19, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:17:36,682 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.74 vs. limit=10.0 +2024-07-28 14:17:36,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.74 vs. limit=6.0 +2024-07-28 14:17:37,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=168157.33333333334, ans=0.0 +2024-07-28 14:17:49,365 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.18 vs. limit=15.0 +2024-07-28 14:17:55,187 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.10 vs. limit=22.5 +2024-07-28 14:17:59,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=168197.33333333334, ans=0.2 +2024-07-28 14:18:05,858 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.85 vs. limit=10.0 +2024-07-28 14:18:08,626 INFO [train.py:1114] (2/4) Epoch 13, batch 3500, loss[loss=0.1539, simple_loss=0.2358, pruned_loss=0.03599, over 4940.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2752, pruned_loss=0.04997, over 938524.59 frames. ], batch size: 12, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:18:12,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=168224.0, ans=0.0 +2024-07-28 14:18:18,529 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.446e+01 5.616e+01 6.376e+01 7.329e+01 9.586e+01, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 14:18:18,952 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.83 vs. limit=15.0 +2024-07-28 14:18:22,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=168250.66666666666, ans=0.025 +2024-07-28 14:18:27,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=168264.0, ans=0.125 +2024-07-28 14:18:39,106 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.82 vs. limit=15.0 +2024-07-28 14:18:44,200 INFO [train.py:1114] (2/4) Epoch 13, batch 3550, loss[loss=0.2005, simple_loss=0.2985, pruned_loss=0.05129, over 4662.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2764, pruned_loss=0.05031, over 938828.66 frames. ], batch size: 14, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:18:51,840 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.08 vs. limit=12.0 +2024-07-28 14:18:57,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=168317.33333333334, ans=0.0 +2024-07-28 14:19:10,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=168344.0, ans=0.0 +2024-07-28 14:19:17,402 INFO [train.py:1114] (2/4) Epoch 13, batch 3600, loss[loss=0.1796, simple_loss=0.2727, pruned_loss=0.04329, over 4962.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2758, pruned_loss=0.05037, over 940773.40 frames. ], batch size: 13, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:19:27,245 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.558e+01 5.903e+01 6.553e+01 7.584e+01 1.363e+02, threshold=1.311e+02, percent-clipped=1.0 +2024-07-28 14:19:28,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=168370.66666666666, ans=0.05 +2024-07-28 14:19:34,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=168384.0, ans=0.2 +2024-07-28 14:19:38,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=168397.33333333334, ans=0.125 +2024-07-28 14:19:43,708 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.05 vs. limit=22.5 +2024-07-28 14:19:50,620 INFO [train.py:1114] (2/4) Epoch 13, batch 3650, loss[loss=0.2174, simple_loss=0.3133, pruned_loss=0.06076, over 4897.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2754, pruned_loss=0.04978, over 941153.64 frames. ], batch size: 15, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:19:57,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=168424.0, ans=0.0 +2024-07-28 14:19:59,433 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.32 vs. limit=15.0 +2024-07-28 14:20:02,189 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.17 vs. limit=15.0 +2024-07-28 14:20:23,919 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.57 vs. limit=10.0 +2024-07-28 14:20:25,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=168477.33333333334, ans=15.0 +2024-07-28 14:20:27,696 INFO [train.py:1114] (2/4) Epoch 13, batch 3700, loss[loss=0.1922, simple_loss=0.2857, pruned_loss=0.04935, over 4923.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2758, pruned_loss=0.04979, over 941932.76 frames. ], batch size: 14, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:20:34,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=168504.0, ans=0.025 +2024-07-28 14:20:37,612 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.274e+01 5.551e+01 6.034e+01 6.765e+01 1.404e+02, threshold=1.207e+02, percent-clipped=1.0 +2024-07-28 14:20:43,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=168517.33333333334, ans=0.125 +2024-07-28 14:20:45,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=168517.33333333334, ans=0.95 +2024-07-28 14:21:01,039 INFO [train.py:1114] (2/4) Epoch 13, batch 3750, loss[loss=0.1533, simple_loss=0.2369, pruned_loss=0.03486, over 4789.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2757, pruned_loss=0.0499, over 943284.72 frames. ], batch size: 11, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:21:05,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=168557.33333333334, ans=0.125 +2024-07-28 14:21:07,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=168570.66666666666, ans=0.07 +2024-07-28 14:21:15,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=168584.0, ans=0.05 +2024-07-28 14:21:30,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=168610.66666666666, ans=0.0 +2024-07-28 14:21:34,567 INFO [train.py:1114] (2/4) Epoch 13, batch 3800, loss[loss=0.1737, simple_loss=0.2647, pruned_loss=0.04131, over 4815.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.276, pruned_loss=0.0503, over 941764.18 frames. ], batch size: 14, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:21:36,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=168624.0, ans=0.0 +2024-07-28 14:21:39,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=168624.0, ans=0.125 +2024-07-28 14:21:40,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=168624.0, ans=0.125 +2024-07-28 14:21:40,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168637.33333333334, ans=0.1 +2024-07-28 14:21:40,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=168637.33333333334, ans=0.125 +2024-07-28 14:21:44,597 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.768e+01 5.664e+01 6.425e+01 7.356e+01 1.029e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 14:21:46,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=168637.33333333334, ans=0.2 +2024-07-28 14:21:47,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=168650.66666666666, ans=0.125 +2024-07-28 14:21:47,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=168650.66666666666, ans=0.025 +2024-07-28 14:21:59,067 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.38 vs. limit=15.0 +2024-07-28 14:21:59,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=168664.0, ans=0.125 +2024-07-28 14:22:08,478 INFO [train.py:1114] (2/4) Epoch 13, batch 3850, loss[loss=0.201, simple_loss=0.2892, pruned_loss=0.05642, over 4613.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2755, pruned_loss=0.04994, over 942492.85 frames. ], batch size: 16, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:22:14,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=168690.66666666666, ans=0.0 +2024-07-28 14:22:34,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=168744.0, ans=0.0 +2024-07-28 14:22:41,756 INFO [train.py:1114] (2/4) Epoch 13, batch 3900, loss[loss=0.2097, simple_loss=0.3015, pruned_loss=0.05897, over 4807.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2755, pruned_loss=0.04964, over 942905.44 frames. ], batch size: 14, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:22:48,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=168770.66666666666, ans=0.125 +2024-07-28 14:22:51,514 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.515e+01 5.611e+01 6.115e+01 6.716e+01 9.720e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 14:22:56,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=168784.0, ans=0.0 +2024-07-28 14:22:57,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=168784.0, ans=0.2 +2024-07-28 14:23:01,243 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.90 vs. limit=15.0 +2024-07-28 14:23:03,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=168797.33333333334, ans=0.2 +2024-07-28 14:23:03,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=168797.33333333334, ans=0.125 +2024-07-28 14:23:08,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=168810.66666666666, ans=0.125 +2024-07-28 14:23:13,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=168810.66666666666, ans=0.2 +2024-07-28 14:23:13,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=168810.66666666666, ans=0.125 +2024-07-28 14:23:17,170 INFO [train.py:1114] (2/4) Epoch 13, batch 3950, loss[loss=0.2331, simple_loss=0.3129, pruned_loss=0.07659, over 4850.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2744, pruned_loss=0.04898, over 944841.81 frames. ], batch size: 16, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:23:32,672 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.58 vs. limit=15.0 +2024-07-28 14:23:36,855 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.14 vs. limit=10.0 +2024-07-28 14:23:37,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168864.0, ans=0.1 +2024-07-28 14:23:38,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=168864.0, ans=0.1 +2024-07-28 14:23:39,666 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.00 vs. limit=15.0 +2024-07-28 14:23:43,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=168877.33333333334, ans=0.025 +2024-07-28 14:23:50,577 INFO [train.py:1114] (2/4) Epoch 13, batch 4000, loss[loss=0.1847, simple_loss=0.2671, pruned_loss=0.05113, over 4770.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2753, pruned_loss=0.04975, over 941711.68 frames. ], batch size: 12, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:23:55,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=168890.66666666666, ans=0.0 +2024-07-28 14:24:00,469 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.269e+01 5.777e+01 6.304e+01 7.103e+01 1.026e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 14:24:08,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=168917.33333333334, ans=0.125 +2024-07-28 14:24:15,805 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.05 vs. limit=15.0 +2024-07-28 14:24:18,609 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.69 vs. limit=15.0 +2024-07-28 14:24:25,719 INFO [train.py:1114] (2/4) Epoch 13, batch 4050, loss[loss=0.2513, simple_loss=0.3183, pruned_loss=0.09213, over 3468.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2759, pruned_loss=0.04977, over 940303.31 frames. ], batch size: 35, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:24:27,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=168957.33333333334, ans=0.09899494936611666 +2024-07-28 14:24:58,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169010.66666666666, ans=0.1 +2024-07-28 14:24:59,972 INFO [train.py:1114] (2/4) Epoch 13, batch 4100, loss[loss=0.2213, simple_loss=0.3134, pruned_loss=0.06456, over 4910.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2762, pruned_loss=0.04995, over 938992.13 frames. ], batch size: 15, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:25:09,988 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.483e+01 5.994e+01 6.398e+01 7.649e+01 1.244e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 14:25:11,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=169037.33333333334, ans=0.0 +2024-07-28 14:25:11,730 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.85 vs. limit=15.0 +2024-07-28 14:25:33,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=169077.33333333334, ans=0.0 +2024-07-28 14:25:35,779 INFO [train.py:1114] (2/4) Epoch 13, batch 4150, loss[loss=0.2242, simple_loss=0.3097, pruned_loss=0.06939, over 4830.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2744, pruned_loss=0.04914, over 938682.11 frames. ], batch size: 13, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:25:36,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=169090.66666666666, ans=0.125 +2024-07-28 14:25:43,596 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.29 vs. limit=10.0 +2024-07-28 14:25:51,883 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.35 vs. limit=12.0 +2024-07-28 14:25:52,632 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.77 vs. limit=15.0 +2024-07-28 14:25:55,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=169117.33333333334, ans=0.025 +2024-07-28 14:25:59,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=169130.66666666666, ans=0.125 +2024-07-28 14:26:09,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=169144.0, ans=0.025 +2024-07-28 14:26:11,037 INFO [train.py:1114] (2/4) Epoch 13, batch 4200, loss[loss=0.1901, simple_loss=0.2847, pruned_loss=0.0478, over 4891.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2747, pruned_loss=0.04905, over 939914.36 frames. ], batch size: 15, lr: 5.78e-03, grad_scale: 32.0 +2024-07-28 14:26:11,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=169157.33333333334, ans=0.125 +2024-07-28 14:26:11,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169157.33333333334, ans=0.1 +2024-07-28 14:26:20,875 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 5.568e+01 6.158e+01 7.068e+01 9.655e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 14:26:27,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=169184.0, ans=0.125 +2024-07-28 14:26:44,493 INFO [train.py:1114] (2/4) Epoch 13, batch 4250, loss[loss=0.1546, simple_loss=0.2496, pruned_loss=0.02982, over 4636.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2747, pruned_loss=0.04905, over 941116.06 frames. ], batch size: 12, lr: 5.78e-03, grad_scale: 32.0 +2024-07-28 14:26:45,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=169224.0, ans=10.0 +2024-07-28 14:26:48,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=169224.0, ans=0.125 +2024-07-28 14:26:56,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=169237.33333333334, ans=0.125 +2024-07-28 14:26:59,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=169250.66666666666, ans=0.125 +2024-07-28 14:27:04,707 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.80 vs. limit=15.0 +2024-07-28 14:27:08,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=169264.0, ans=0.1 +2024-07-28 14:27:10,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=169264.0, ans=0.125 +2024-07-28 14:27:12,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=169277.33333333334, ans=0.125 +2024-07-28 14:27:17,722 INFO [train.py:1114] (2/4) Epoch 13, batch 4300, loss[loss=0.1882, simple_loss=0.286, pruned_loss=0.04524, over 4759.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.275, pruned_loss=0.04921, over 940192.55 frames. ], batch size: 13, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:27:20,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=169290.66666666666, ans=0.025 +2024-07-28 14:27:27,634 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.628e+01 5.556e+01 6.095e+01 6.767e+01 1.249e+02, threshold=1.219e+02, percent-clipped=1.0 +2024-07-28 14:27:31,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=169317.33333333334, ans=0.025 +2024-07-28 14:27:42,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169330.66666666666, ans=0.1 +2024-07-28 14:27:50,965 INFO [train.py:1114] (2/4) Epoch 13, batch 4350, loss[loss=0.1809, simple_loss=0.2834, pruned_loss=0.03917, over 4760.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2767, pruned_loss=0.04992, over 941089.78 frames. ], batch size: 13, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:27:55,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=169357.33333333334, ans=0.125 +2024-07-28 14:28:02,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169370.66666666666, ans=0.1 +2024-07-28 14:28:02,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=169370.66666666666, ans=0.025 +2024-07-28 14:28:24,049 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.97 vs. limit=15.0 +2024-07-28 14:28:24,283 INFO [train.py:1114] (2/4) Epoch 13, batch 4400, loss[loss=0.1525, simple_loss=0.2477, pruned_loss=0.02864, over 4814.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2763, pruned_loss=0.04941, over 940732.91 frames. ], batch size: 14, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:28:28,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=169424.0, ans=0.0 +2024-07-28 14:28:34,759 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.00 vs. limit=15.0 +2024-07-28 14:28:36,398 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.899e+01 5.545e+01 6.054e+01 6.710e+01 1.195e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-28 14:28:48,238 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.84 vs. limit=15.0 +2024-07-28 14:28:56,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=169477.33333333334, ans=0.125 +2024-07-28 14:28:57,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=169477.33333333334, ans=0.125 +2024-07-28 14:29:00,300 INFO [train.py:1114] (2/4) Epoch 13, batch 4450, loss[loss=0.1761, simple_loss=0.2763, pruned_loss=0.03789, over 4942.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2765, pruned_loss=0.04978, over 938965.16 frames. ], batch size: 12, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:29:06,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=169504.0, ans=0.0 +2024-07-28 14:29:10,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=169504.0, ans=0.125 +2024-07-28 14:29:10,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=169504.0, ans=0.125 +2024-07-28 14:29:11,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=169504.0, ans=0.125 +2024-07-28 14:29:14,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=169517.33333333334, ans=0.0 +2024-07-28 14:29:37,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=169544.0, ans=0.1 +2024-07-28 14:29:38,960 INFO [train.py:1114] (2/4) Epoch 13, batch 4500, loss[loss=0.1794, simple_loss=0.2731, pruned_loss=0.04286, over 4736.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2769, pruned_loss=0.04963, over 938205.80 frames. ], batch size: 14, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:29:45,294 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.70 vs. limit=6.0 +2024-07-28 14:29:48,691 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.567e+01 5.617e+01 6.099e+01 7.289e+01 9.992e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 14:29:50,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=169570.66666666666, ans=0.2 +2024-07-28 14:29:55,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=169584.0, ans=0.125 +2024-07-28 14:31:40,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=169584.0, ans=0.2 +2024-07-28 14:31:41,623 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.52 vs. limit=12.0 +2024-07-28 14:31:52,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=169610.66666666666, ans=0.125 +2024-07-28 14:31:59,578 INFO [train.py:1114] (2/4) Epoch 13, batch 4550, loss[loss=0.1851, simple_loss=0.2689, pruned_loss=0.05059, over 4903.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2771, pruned_loss=0.04957, over 940473.31 frames. ], batch size: 13, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:32:18,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=169650.66666666666, ans=0.125 +2024-07-28 14:32:28,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=169677.33333333334, ans=0.0 +2024-07-28 14:32:32,900 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.96 vs. limit=22.5 +2024-07-28 14:32:34,422 INFO [train.py:1114] (2/4) Epoch 13, batch 4600, loss[loss=0.1775, simple_loss=0.274, pruned_loss=0.04051, over 4556.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2756, pruned_loss=0.04874, over 938524.70 frames. ], batch size: 21, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:32:34,724 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.17 vs. limit=22.5 +2024-07-28 14:32:41,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=169690.66666666666, ans=0.0 +2024-07-28 14:32:48,089 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.248e+01 5.789e+01 6.719e+01 7.977e+01 1.194e+02, threshold=1.344e+02, percent-clipped=0.0 +2024-07-28 14:32:53,872 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.57 vs. limit=15.0 +2024-07-28 14:33:04,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=169730.66666666666, ans=0.125 +2024-07-28 14:33:12,868 INFO [train.py:1114] (2/4) Epoch 13, batch 4650, loss[loss=0.1838, simple_loss=0.2788, pruned_loss=0.04439, over 4852.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2775, pruned_loss=0.04925, over 940326.01 frames. ], batch size: 16, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:33:12,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=169757.33333333334, ans=0.1 +2024-07-28 14:33:16,004 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.40 vs. limit=22.5 +2024-07-28 14:33:18,592 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:33:21,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=169770.66666666666, ans=0.125 +2024-07-28 14:33:25,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=169770.66666666666, ans=0.125 +2024-07-28 14:33:27,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=169784.0, ans=0.0 +2024-07-28 14:33:37,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169797.33333333334, ans=0.1 +2024-07-28 14:33:38,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=169797.33333333334, ans=0.025 +2024-07-28 14:33:40,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=169810.66666666666, ans=0.125 +2024-07-28 14:33:40,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=169810.66666666666, ans=0.05 +2024-07-28 14:33:46,634 INFO [train.py:1114] (2/4) Epoch 13, batch 4700, loss[loss=0.1491, simple_loss=0.2354, pruned_loss=0.03135, over 4698.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2767, pruned_loss=0.04916, over 937518.18 frames. ], batch size: 11, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:33:53,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.09 vs. limit=15.0 +2024-07-28 14:33:56,553 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.492e+01 5.422e+01 6.008e+01 7.035e+01 1.017e+02, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 14:34:03,711 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.95 vs. limit=15.0 +2024-07-28 14:34:09,079 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.66 vs. limit=10.0 +2024-07-28 14:34:14,886 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:34:18,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=169877.33333333334, ans=0.2 +2024-07-28 14:34:20,165 INFO [train.py:1114] (2/4) Epoch 13, batch 4750, loss[loss=0.2079, simple_loss=0.2826, pruned_loss=0.06656, over 4452.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2759, pruned_loss=0.04928, over 935502.93 frames. ], batch size: 21, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:34:26,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=169904.0, ans=0.125 +2024-07-28 14:34:40,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=169930.66666666666, ans=0.04949747468305833 +2024-07-28 14:34:50,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=169944.0, ans=0.125 +2024-07-28 14:34:53,980 INFO [train.py:1114] (2/4) Epoch 13, batch 4800, loss[loss=0.1657, simple_loss=0.2616, pruned_loss=0.03488, over 4692.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2759, pruned_loss=0.04921, over 932657.74 frames. ], batch size: 13, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:35:03,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=169970.66666666666, ans=0.2 +2024-07-28 14:35:03,986 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.473e+01 5.668e+01 6.259e+01 7.420e+01 1.160e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 14:35:08,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=169984.0, ans=0.0 +2024-07-28 14:35:24,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=170010.66666666666, ans=0.125 +2024-07-28 14:35:31,481 INFO [train.py:1114] (2/4) Epoch 13, batch 4850, loss[loss=0.1844, simple_loss=0.2814, pruned_loss=0.04369, over 4748.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2766, pruned_loss=0.04969, over 932143.48 frames. ], batch size: 14, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:35:49,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=170050.66666666666, ans=0.0 +2024-07-28 14:35:49,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=170050.66666666666, ans=0.2 +2024-07-28 14:36:04,636 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.44 vs. limit=6.0 +2024-07-28 14:36:14,100 INFO [train.py:1114] (2/4) Epoch 13, batch 4900, loss[loss=0.1769, simple_loss=0.2692, pruned_loss=0.04233, over 4757.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2752, pruned_loss=0.04902, over 933703.41 frames. ], batch size: 13, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:36:27,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=170090.66666666666, ans=0.125 +2024-07-28 14:36:37,689 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.642e+01 5.628e+01 6.419e+01 7.139e+01 1.048e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 14:36:39,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170104.0, ans=0.1 +2024-07-28 14:36:40,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=170117.33333333334, ans=0.0 +2024-07-28 14:36:42,114 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.72 vs. limit=12.0 +2024-07-28 14:36:44,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=170117.33333333334, ans=0.125 +2024-07-28 14:36:47,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=170117.33333333334, ans=0.07 +2024-07-28 14:36:47,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=170117.33333333334, ans=0.125 +2024-07-28 14:36:59,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=170144.0, ans=0.0 +2024-07-28 14:37:04,268 INFO [train.py:1114] (2/4) Epoch 13, batch 4950, loss[loss=0.2669, simple_loss=0.3395, pruned_loss=0.09714, over 3471.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2756, pruned_loss=0.04929, over 931475.12 frames. ], batch size: 36, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:37:04,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=170157.33333333334, ans=0.125 +2024-07-28 14:37:05,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=170157.33333333334, ans=0.125 +2024-07-28 14:37:12,963 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.11 vs. limit=22.5 +2024-07-28 14:37:15,620 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.11 vs. limit=22.5 +2024-07-28 14:37:20,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=170184.0, ans=0.0 +2024-07-28 14:37:25,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=170184.0, ans=0.0 +2024-07-28 14:37:38,243 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.12 vs. limit=22.5 +2024-07-28 14:37:39,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=170224.0, ans=0.0 +2024-07-28 14:37:40,490 INFO [train.py:1114] (2/4) Epoch 13, batch 5000, loss[loss=0.1695, simple_loss=0.2656, pruned_loss=0.03667, over 4662.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2743, pruned_loss=0.04849, over 935641.63 frames. ], batch size: 14, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:37:44,080 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.59 vs. limit=22.5 +2024-07-28 14:37:51,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=170237.33333333334, ans=0.125 +2024-07-28 14:37:52,211 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.619e+01 5.707e+01 6.178e+01 6.994e+01 1.058e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 14:37:57,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=170250.66666666666, ans=0.0 +2024-07-28 14:38:15,701 INFO [train.py:1114] (2/4) Epoch 13, batch 5050, loss[loss=0.1599, simple_loss=0.2507, pruned_loss=0.03461, over 4843.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2742, pruned_loss=0.04826, over 938268.70 frames. ], batch size: 12, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:38:50,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=170344.0, ans=0.0 +2024-07-28 14:38:51,494 INFO [train.py:1114] (2/4) Epoch 13, batch 5100, loss[loss=0.1776, simple_loss=0.2718, pruned_loss=0.04171, over 4768.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2747, pruned_loss=0.04869, over 935589.78 frames. ], batch size: 12, lr: 5.76e-03, grad_scale: 64.0 +2024-07-28 14:38:51,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=170357.33333333334, ans=0.125 +2024-07-28 14:38:56,669 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.80 vs. limit=22.5 +2024-07-28 14:39:03,334 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:39:04,402 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.671e+01 6.468e+01 7.600e+01 1.076e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 14:39:04,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170370.66666666666, ans=0.1 +2024-07-28 14:39:08,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=170384.0, ans=0.025 +2024-07-28 14:39:15,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=170397.33333333334, ans=0.125 +2024-07-28 14:39:21,165 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.85 vs. limit=15.0 +2024-07-28 14:39:27,931 INFO [train.py:1114] (2/4) Epoch 13, batch 5150, loss[loss=0.2078, simple_loss=0.2849, pruned_loss=0.06536, over 4833.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2758, pruned_loss=0.04883, over 936578.95 frames. ], batch size: 16, lr: 5.76e-03, grad_scale: 64.0 +2024-07-28 14:39:28,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=170424.0, ans=0.2 +2024-07-28 14:39:36,283 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.44 vs. limit=10.0 +2024-07-28 14:39:38,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=170437.33333333334, ans=0.0 +2024-07-28 14:39:44,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=170450.66666666666, ans=0.125 +2024-07-28 14:39:50,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=170464.0, ans=0.125 +2024-07-28 14:39:58,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=170477.33333333334, ans=0.0 +2024-07-28 14:39:59,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=170477.33333333334, ans=0.125 +2024-07-28 14:39:59,299 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.26 vs. limit=15.0 +2024-07-28 14:40:01,718 INFO [train.py:1114] (2/4) Epoch 13, batch 5200, loss[loss=0.196, simple_loss=0.2954, pruned_loss=0.04828, over 4673.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.276, pruned_loss=0.0491, over 936670.49 frames. ], batch size: 14, lr: 5.76e-03, grad_scale: 64.0 +2024-07-28 14:40:09,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=170504.0, ans=0.0 +2024-07-28 14:40:11,858 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.435e+01 5.593e+01 6.249e+01 7.313e+01 1.397e+02, threshold=1.250e+02, percent-clipped=1.0 +2024-07-28 14:40:18,539 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.09 vs. limit=6.0 +2024-07-28 14:40:18,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=170517.33333333334, ans=0.125 +2024-07-28 14:40:35,428 INFO [train.py:1114] (2/4) Epoch 13, batch 5250, loss[loss=0.1706, simple_loss=0.2626, pruned_loss=0.03928, over 4902.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2744, pruned_loss=0.04873, over 936552.18 frames. ], batch size: 13, lr: 5.76e-03, grad_scale: 64.0 +2024-07-28 14:40:36,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=170557.33333333334, ans=0.0 +2024-07-28 14:40:44,097 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.01 vs. limit=15.0 +2024-07-28 14:40:53,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=170584.0, ans=0.0 +2024-07-28 14:40:54,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=170584.0, ans=0.125 +2024-07-28 14:40:54,315 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.54 vs. limit=22.5 +2024-07-28 14:40:54,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=170584.0, ans=0.125 +2024-07-28 14:41:09,255 INFO [train.py:1114] (2/4) Epoch 13, batch 5300, loss[loss=0.1913, simple_loss=0.2905, pruned_loss=0.04599, over 4637.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2746, pruned_loss=0.04891, over 935012.76 frames. ], batch size: 16, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:41:13,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=170624.0, ans=0.125 +2024-07-28 14:41:19,627 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.242e+01 5.756e+01 6.384e+01 7.054e+01 9.587e+01, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 14:41:19,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=170637.33333333334, ans=0.125 +2024-07-28 14:41:21,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=170650.66666666666, ans=0.0 +2024-07-28 14:41:28,187 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.01 vs. limit=12.0 +2024-07-28 14:41:47,981 INFO [train.py:1114] (2/4) Epoch 13, batch 5350, loss[loss=0.1545, simple_loss=0.2428, pruned_loss=0.03305, over 4524.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2753, pruned_loss=0.04917, over 936989.94 frames. ], batch size: 10, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:41:58,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=170704.0, ans=0.0 +2024-07-28 14:42:00,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=170704.0, ans=0.025 +2024-07-28 14:42:02,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170717.33333333334, ans=0.1 +2024-07-28 14:42:03,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=170717.33333333334, ans=0.0 +2024-07-28 14:42:34,763 INFO [train.py:1114] (2/4) Epoch 13, batch 5400, loss[loss=0.1955, simple_loss=0.2703, pruned_loss=0.06034, over 4170.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2754, pruned_loss=0.04924, over 930989.93 frames. ], batch size: 25, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:42:43,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=170770.66666666666, ans=0.125 +2024-07-28 14:42:47,124 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.370e+01 5.692e+01 6.413e+01 7.093e+01 1.081e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 14:42:49,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=170784.0, ans=0.0 +2024-07-28 14:42:59,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=170797.33333333334, ans=0.0 +2024-07-28 14:42:59,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=170797.33333333334, ans=0.05 +2024-07-28 14:43:03,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=170810.66666666666, ans=0.025 +2024-07-28 14:43:04,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170810.66666666666, ans=0.1 +2024-07-28 14:43:09,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=170824.0, ans=0.125 +2024-07-28 14:43:09,421 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.25 vs. limit=15.0 +2024-07-28 14:43:09,613 INFO [train.py:1114] (2/4) Epoch 13, batch 5450, loss[loss=0.1798, simple_loss=0.2569, pruned_loss=0.05136, over 4702.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2753, pruned_loss=0.0493, over 933461.67 frames. ], batch size: 11, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:43:19,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.78 vs. limit=15.0 +2024-07-28 14:43:30,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=170850.66666666666, ans=0.0 +2024-07-28 14:43:38,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=170864.0, ans=0.1 +2024-07-28 14:43:43,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=170877.33333333334, ans=0.0 +2024-07-28 14:43:44,239 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.32 vs. limit=6.0 +2024-07-28 14:43:46,361 INFO [train.py:1114] (2/4) Epoch 13, batch 5500, loss[loss=0.2074, simple_loss=0.294, pruned_loss=0.06039, over 4215.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2759, pruned_loss=0.05024, over 931036.34 frames. ], batch size: 25, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:43:46,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=170890.66666666666, ans=0.125 +2024-07-28 14:43:56,007 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.95 vs. limit=15.0 +2024-07-28 14:43:57,654 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.576e+01 6.394e+01 7.172e+01 9.673e+01, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 14:44:10,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=170930.66666666666, ans=0.0 +2024-07-28 14:44:32,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=170930.66666666666, ans=0.125 +2024-07-28 14:44:33,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170930.66666666666, ans=0.1 +2024-07-28 14:44:34,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=170930.66666666666, ans=0.125 +2024-07-28 14:44:42,496 INFO [train.py:1114] (2/4) Epoch 13, batch 5550, loss[loss=0.1888, simple_loss=0.2708, pruned_loss=0.05342, over 4711.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2753, pruned_loss=0.04997, over 933743.29 frames. ], batch size: 12, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:45:05,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=170984.0, ans=0.125 +2024-07-28 14:45:09,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=170984.0, ans=0.0 +2024-07-28 14:45:10,505 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.42 vs. limit=22.5 +2024-07-28 14:45:28,254 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.55 vs. limit=22.5 +2024-07-28 14:45:34,106 INFO [train.py:1114] (2/4) Epoch 13, batch 5600, loss[loss=0.1919, simple_loss=0.2868, pruned_loss=0.04849, over 4737.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2771, pruned_loss=0.05039, over 934855.49 frames. ], batch size: 14, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:45:36,598 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.58 vs. limit=22.5 +2024-07-28 14:45:38,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=171024.0, ans=0.2 +2024-07-28 14:45:44,821 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.519e+01 5.953e+01 6.683e+01 8.989e+01, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 14:45:46,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=171037.33333333334, ans=0.125 +2024-07-28 14:45:47,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171050.66666666666, ans=0.1 +2024-07-28 14:45:50,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=171050.66666666666, ans=0.125 +2024-07-28 14:45:58,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=171064.0, ans=0.125 +2024-07-28 14:46:00,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=171077.33333333334, ans=0.125 +2024-07-28 14:46:07,382 INFO [train.py:1114] (2/4) Epoch 13, batch 5650, loss[loss=0.1925, simple_loss=0.2875, pruned_loss=0.04878, over 4491.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2763, pruned_loss=0.04991, over 937106.81 frames. ], batch size: 21, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:46:16,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=171104.0, ans=0.2 +2024-07-28 14:46:17,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=171104.0, ans=0.125 +2024-07-28 14:46:23,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171117.33333333334, ans=0.1 +2024-07-28 14:46:25,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=171117.33333333334, ans=0.125 +2024-07-28 14:46:26,086 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.08 vs. limit=12.0 +2024-07-28 14:46:34,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=171130.66666666666, ans=0.2 +2024-07-28 14:46:40,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=171144.0, ans=0.0 +2024-07-28 14:46:41,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=171144.0, ans=0.1 +2024-07-28 14:46:42,391 INFO [train.py:1114] (2/4) Epoch 13, batch 5700, loss[loss=0.1476, simple_loss=0.2357, pruned_loss=0.02982, over 4689.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2767, pruned_loss=0.04966, over 938127.65 frames. ], batch size: 13, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:46:46,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=171157.33333333334, ans=0.0 +2024-07-28 14:46:58,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=171170.66666666666, ans=0.125 +2024-07-28 14:46:58,594 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.488e+01 5.340e+01 5.994e+01 6.863e+01 1.115e+02, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 14:47:00,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=171184.0, ans=0.125 +2024-07-28 14:47:06,723 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.66 vs. limit=15.0 +2024-07-28 14:47:23,393 INFO [train.py:1114] (2/4) Epoch 13, batch 5750, loss[loss=0.2317, simple_loss=0.3128, pruned_loss=0.07531, over 4645.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.277, pruned_loss=0.04962, over 938064.78 frames. ], batch size: 19, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:47:24,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=171224.0, ans=0.125 +2024-07-28 14:47:25,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=171224.0, ans=0.125 +2024-07-28 14:47:56,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=171290.66666666666, ans=0.0 +2024-07-28 14:47:56,856 INFO [train.py:1114] (2/4) Epoch 13, batch 5800, loss[loss=0.202, simple_loss=0.2911, pruned_loss=0.05645, over 4711.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2776, pruned_loss=0.0498, over 937641.43 frames. ], batch size: 19, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:47:57,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=171290.66666666666, ans=0.125 +2024-07-28 14:48:00,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=171290.66666666666, ans=0.07 +2024-07-28 14:48:00,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=171290.66666666666, ans=0.125 +2024-07-28 14:48:04,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=171304.0, ans=0.125 +2024-07-28 14:48:07,659 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.533e+01 5.852e+01 6.546e+01 7.322e+01 1.389e+02, threshold=1.309e+02, percent-clipped=1.0 +2024-07-28 14:48:19,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=171330.66666666666, ans=0.0 +2024-07-28 14:48:24,495 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.61 vs. limit=6.0 +2024-07-28 14:48:28,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=171344.0, ans=0.0 +2024-07-28 14:48:29,680 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.30 vs. limit=15.0 +2024-07-28 14:48:35,957 INFO [train.py:1114] (2/4) Epoch 13, batch 5850, loss[loss=0.2169, simple_loss=0.3102, pruned_loss=0.06181, over 4441.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2772, pruned_loss=0.04977, over 938138.47 frames. ], batch size: 21, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:48:37,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=171357.33333333334, ans=0.0 +2024-07-28 14:48:42,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=171357.33333333334, ans=0.05 +2024-07-28 14:48:42,542 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.77 vs. limit=10.0 +2024-07-28 14:48:46,005 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.21 vs. limit=15.0 +2024-07-28 14:48:49,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=171370.66666666666, ans=0.125 +2024-07-28 14:48:49,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=171370.66666666666, ans=0.2 +2024-07-28 14:48:59,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=171397.33333333334, ans=0.125 +2024-07-28 14:49:07,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=171410.66666666666, ans=10.0 +2024-07-28 14:49:09,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=171410.66666666666, ans=0.2 +2024-07-28 14:49:11,467 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.91 vs. limit=15.0 +2024-07-28 14:49:13,156 INFO [train.py:1114] (2/4) Epoch 13, batch 5900, loss[loss=0.2109, simple_loss=0.3056, pruned_loss=0.05812, over 4694.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2768, pruned_loss=0.04952, over 938453.31 frames. ], batch size: 15, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:49:18,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=171424.0, ans=0.07 +2024-07-28 14:49:19,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=171437.33333333334, ans=0.125 +2024-07-28 14:49:44,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=171437.33333333334, ans=0.125 +2024-07-28 14:49:46,008 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 5.643e+01 6.441e+01 7.134e+01 1.016e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 14:49:47,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=171437.33333333334, ans=0.0 +2024-07-28 14:49:52,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=171450.66666666666, ans=0.0 +2024-07-28 14:49:54,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=171450.66666666666, ans=0.0 +2024-07-28 14:49:55,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=171464.0, ans=0.0 +2024-07-28 14:49:56,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=171464.0, ans=0.125 +2024-07-28 14:49:59,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=171464.0, ans=0.025 +2024-07-28 14:49:59,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=171464.0, ans=0.0 +2024-07-28 14:50:09,013 INFO [train.py:1114] (2/4) Epoch 13, batch 5950, loss[loss=0.1979, simple_loss=0.2778, pruned_loss=0.05899, over 4690.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2764, pruned_loss=0.04964, over 940286.37 frames. ], batch size: 15, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:50:18,503 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.09 vs. limit=22.5 +2024-07-28 14:50:29,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171517.33333333334, ans=0.1 +2024-07-28 14:50:32,740 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.34 vs. limit=12.0 +2024-07-28 14:50:47,157 INFO [train.py:1114] (2/4) Epoch 13, batch 6000, loss[loss=0.197, simple_loss=0.2793, pruned_loss=0.05738, over 4149.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2775, pruned_loss=0.05037, over 937503.71 frames. ], batch size: 25, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:50:47,158 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 14:51:12,227 INFO [train.py:1146] (2/4) Epoch 13, validation: loss=0.1644, simple_loss=0.2689, pruned_loss=0.02993, over 944034.00 frames. +2024-07-28 14:51:12,228 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 14:51:22,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=171570.66666666666, ans=0.125 +2024-07-28 14:51:25,689 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.755e+01 5.656e+01 6.363e+01 7.172e+01 1.139e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 14:51:26,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=171570.66666666666, ans=0.125 +2024-07-28 14:51:35,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=171597.33333333334, ans=0.0 +2024-07-28 14:51:38,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=171597.33333333334, ans=0.125 +2024-07-28 14:52:00,226 INFO [train.py:1114] (2/4) Epoch 13, batch 6050, loss[loss=0.1625, simple_loss=0.2549, pruned_loss=0.03504, over 4778.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2766, pruned_loss=0.04996, over 938673.08 frames. ], batch size: 12, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:52:05,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171624.0, ans=0.1 +2024-07-28 14:52:07,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=171624.0, ans=0.125 +2024-07-28 14:52:10,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=171637.33333333334, ans=0.2 +2024-07-28 14:52:21,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=171650.66666666666, ans=0.125 +2024-07-28 14:52:22,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=171650.66666666666, ans=0.5 +2024-07-28 14:52:35,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=171677.33333333334, ans=0.125 +2024-07-28 14:52:36,818 INFO [train.py:1114] (2/4) Epoch 13, batch 6100, loss[loss=0.1993, simple_loss=0.311, pruned_loss=0.04376, over 4690.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2757, pruned_loss=0.0492, over 937985.98 frames. ], batch size: 15, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:52:51,828 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.506e+01 6.070e+01 6.932e+01 1.254e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 14:53:06,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171730.66666666666, ans=0.1 +2024-07-28 14:53:06,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171730.66666666666, ans=0.1 +2024-07-28 14:53:07,637 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.06 vs. limit=15.0 +2024-07-28 14:53:20,085 INFO [train.py:1114] (2/4) Epoch 13, batch 6150, loss[loss=0.2561, simple_loss=0.3241, pruned_loss=0.09409, over 3323.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2761, pruned_loss=0.04928, over 936400.69 frames. ], batch size: 35, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:53:26,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=171770.66666666666, ans=0.125 +2024-07-28 14:53:51,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=171810.66666666666, ans=0.2 +2024-07-28 14:53:53,579 INFO [train.py:1114] (2/4) Epoch 13, batch 6200, loss[loss=0.1693, simple_loss=0.2683, pruned_loss=0.03509, over 4745.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.277, pruned_loss=0.04972, over 935826.61 frames. ], batch size: 14, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:54:06,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=171837.33333333334, ans=0.0 +2024-07-28 14:54:07,622 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.605e+01 5.672e+01 6.206e+01 7.275e+01 9.803e+01, threshold=1.241e+02, percent-clipped=1.0 +2024-07-28 14:54:11,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=171850.66666666666, ans=0.0 +2024-07-28 14:54:32,912 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.96 vs. limit=12.0 +2024-07-28 14:54:34,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=171877.33333333334, ans=6.0 +2024-07-28 14:54:35,791 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.80 vs. limit=15.0 +2024-07-28 14:54:35,976 INFO [train.py:1114] (2/4) Epoch 13, batch 6250, loss[loss=0.2361, simple_loss=0.3274, pruned_loss=0.07241, over 4804.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2776, pruned_loss=0.05045, over 932955.81 frames. ], batch size: 14, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:54:36,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=171890.66666666666, ans=0.125 +2024-07-28 14:54:38,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=171890.66666666666, ans=0.0 +2024-07-28 14:54:41,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=171890.66666666666, ans=0.125 +2024-07-28 14:54:47,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171904.0, ans=0.1 +2024-07-28 14:54:59,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=171930.66666666666, ans=0.2 +2024-07-28 14:54:59,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=171930.66666666666, ans=0.0 +2024-07-28 14:55:10,113 INFO [train.py:1114] (2/4) Epoch 13, batch 6300, loss[loss=0.1476, simple_loss=0.2523, pruned_loss=0.02141, over 4542.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2769, pruned_loss=0.05005, over 929698.10 frames. ], batch size: 10, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:55:10,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171957.33333333334, ans=0.1 +2024-07-28 14:55:11,371 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.76 vs. limit=15.0 +2024-07-28 14:55:16,061 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.76 vs. limit=15.0 +2024-07-28 14:55:17,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=171957.33333333334, ans=0.2 +2024-07-28 14:55:17,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=171970.66666666666, ans=0.0 +2024-07-28 14:55:26,613 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.561e+01 5.859e+01 6.673e+01 7.738e+01 1.141e+02, threshold=1.335e+02, percent-clipped=0.0 +2024-07-28 14:55:32,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=171984.0, ans=0.0 +2024-07-28 14:55:33,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=171984.0, ans=0.025 +2024-07-28 14:55:33,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=171984.0, ans=0.125 +2024-07-28 14:55:42,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=172010.66666666666, ans=0.125 +2024-07-28 14:55:44,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172010.66666666666, ans=0.1 +2024-07-28 14:55:49,044 INFO [train.py:1114] (2/4) Epoch 13, batch 6350, loss[loss=0.216, simple_loss=0.3128, pruned_loss=0.05961, over 4455.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2759, pruned_loss=0.04921, over 933794.41 frames. ], batch size: 21, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:55:53,527 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.82 vs. limit=15.0 +2024-07-28 14:55:53,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=172024.0, ans=0.0 +2024-07-28 14:55:56,251 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.00 vs. limit=15.0 +2024-07-28 14:56:08,643 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.63 vs. limit=15.0 +2024-07-28 14:56:14,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=172064.0, ans=0.125 +2024-07-28 14:56:14,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172064.0, ans=0.1 +2024-07-28 14:56:18,680 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.06 vs. limit=15.0 +2024-07-28 14:56:23,878 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.45 vs. limit=22.5 +2024-07-28 14:56:26,785 INFO [train.py:1114] (2/4) Epoch 13, batch 6400, loss[loss=0.2023, simple_loss=0.291, pruned_loss=0.05678, over 4638.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.276, pruned_loss=0.04922, over 934927.99 frames. ], batch size: 13, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:56:30,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172090.66666666666, ans=0.1 +2024-07-28 14:56:31,688 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.97 vs. limit=22.5 +2024-07-28 14:56:34,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=172104.0, ans=10.0 +2024-07-28 14:56:36,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=172104.0, ans=0.125 +2024-07-28 14:56:37,084 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.564e+01 5.588e+01 6.261e+01 7.317e+01 1.038e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 14:56:38,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=172104.0, ans=0.0 +2024-07-28 14:56:51,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=172130.66666666666, ans=0.0 +2024-07-28 14:56:57,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=172144.0, ans=0.0 +2024-07-28 14:57:00,246 INFO [train.py:1114] (2/4) Epoch 13, batch 6450, loss[loss=0.1978, simple_loss=0.2936, pruned_loss=0.05097, over 4526.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.276, pruned_loss=0.04891, over 938687.97 frames. ], batch size: 21, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:57:02,038 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.75 vs. limit=22.5 +2024-07-28 14:57:03,960 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:57:10,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=172170.66666666666, ans=0.0 +2024-07-28 14:57:17,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=172184.0, ans=0.0 +2024-07-28 14:57:18,089 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.60 vs. limit=15.0 +2024-07-28 14:57:35,421 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:57:38,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=172210.66666666666, ans=0.125 +2024-07-28 14:57:39,250 INFO [train.py:1114] (2/4) Epoch 13, batch 6500, loss[loss=0.2462, simple_loss=0.3199, pruned_loss=0.08631, over 3325.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2757, pruned_loss=0.04865, over 939817.61 frames. ], batch size: 36, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:57:46,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=172237.33333333334, ans=0.0 +2024-07-28 14:57:48,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=172237.33333333334, ans=0.0 +2024-07-28 14:57:49,796 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.639e+01 5.677e+01 6.560e+01 8.086e+01 1.120e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-28 14:58:14,166 INFO [train.py:1114] (2/4) Epoch 13, batch 6550, loss[loss=0.1698, simple_loss=0.2615, pruned_loss=0.03906, over 4798.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2755, pruned_loss=0.04842, over 942715.86 frames. ], batch size: 11, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:58:19,293 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.90 vs. limit=15.0 +2024-07-28 14:58:20,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=172304.0, ans=0.125 +2024-07-28 14:58:25,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=172304.0, ans=0.125 +2024-07-28 14:58:26,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=172304.0, ans=0.125 +2024-07-28 14:58:29,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=172317.33333333334, ans=0.0 +2024-07-28 14:58:36,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=172330.66666666666, ans=0.025 +2024-07-28 14:58:43,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=172344.0, ans=10.0 +2024-07-28 14:58:43,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=172344.0, ans=0.0 +2024-07-28 14:58:44,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=172344.0, ans=0.0 +2024-07-28 14:58:47,965 INFO [train.py:1114] (2/4) Epoch 13, batch 6600, loss[loss=0.1581, simple_loss=0.2522, pruned_loss=0.03196, over 4918.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2751, pruned_loss=0.04822, over 944525.52 frames. ], batch size: 14, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:58:58,751 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.632e+01 5.699e+01 6.105e+01 6.926e+01 1.138e+02, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 14:59:03,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=172384.0, ans=0.125 +2024-07-28 14:59:09,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=172397.33333333334, ans=0.125 +2024-07-28 14:59:22,884 INFO [train.py:1114] (2/4) Epoch 13, batch 6650, loss[loss=0.2383, simple_loss=0.327, pruned_loss=0.07475, over 4624.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2742, pruned_loss=0.04772, over 943451.95 frames. ], batch size: 17, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:59:44,849 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.13 vs. limit=15.0 +2024-07-28 14:59:50,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=172477.33333333334, ans=0.125 +2024-07-28 14:59:56,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=172490.66666666666, ans=0.0 +2024-07-28 14:59:56,802 INFO [train.py:1114] (2/4) Epoch 13, batch 6700, loss[loss=0.2031, simple_loss=0.3023, pruned_loss=0.05194, over 4724.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2748, pruned_loss=0.04791, over 941938.27 frames. ], batch size: 19, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:59:57,269 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.20 vs. limit=22.5 +2024-07-28 14:59:58,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=172490.66666666666, ans=0.0 +2024-07-28 15:00:01,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.04 vs. limit=15.0 +2024-07-28 15:00:02,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=172490.66666666666, ans=0.0 +2024-07-28 15:00:07,462 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.867e+01 5.630e+01 6.292e+01 7.000e+01 1.303e+02, threshold=1.258e+02, percent-clipped=1.0 +2024-07-28 15:00:16,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=172517.33333333334, ans=0.0 +2024-07-28 15:00:19,788 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.89 vs. limit=22.5 +2024-07-28 15:00:32,543 INFO [train.py:1114] (2/4) Epoch 13, batch 6750, loss[loss=0.2213, simple_loss=0.3083, pruned_loss=0.06713, over 4236.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2752, pruned_loss=0.04798, over 939768.77 frames. ], batch size: 25, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 15:00:55,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=172597.33333333334, ans=0.0 +2024-07-28 15:01:08,804 INFO [train.py:1114] (2/4) Epoch 13, batch 6800, loss[loss=0.2129, simple_loss=0.3232, pruned_loss=0.05133, over 4640.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2762, pruned_loss=0.04839, over 938822.53 frames. ], batch size: 13, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 15:01:17,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=172637.33333333334, ans=0.0 +2024-07-28 15:01:19,460 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.751e+01 5.597e+01 6.324e+01 7.266e+01 1.591e+02, threshold=1.265e+02, percent-clipped=1.0 +2024-07-28 15:01:28,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=172664.0, ans=0.125 +2024-07-28 15:01:35,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=172677.33333333334, ans=0.5 +2024-07-28 15:01:35,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=172677.33333333334, ans=0.0 +2024-07-28 15:01:39,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=172677.33333333334, ans=0.2 +2024-07-28 15:01:41,600 INFO [train.py:1114] (2/4) Epoch 13, batch 6850, loss[loss=0.2087, simple_loss=0.2952, pruned_loss=0.06111, over 4688.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2761, pruned_loss=0.04825, over 940489.39 frames. ], batch size: 13, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 15:02:13,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172730.66666666666, ans=0.1 +2024-07-28 15:02:23,643 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=15.0 +2024-07-28 15:02:27,241 INFO [train.py:1114] (2/4) Epoch 13, batch 6900, loss[loss=0.1751, simple_loss=0.2708, pruned_loss=0.03973, over 4966.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2764, pruned_loss=0.04897, over 942758.67 frames. ], batch size: 13, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:02:27,532 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.66 vs. limit=15.0 +2024-07-28 15:02:35,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=172770.66666666666, ans=10.0 +2024-07-28 15:02:38,168 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.866e+01 5.650e+01 5.997e+01 6.576e+01 8.900e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 15:02:49,326 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.55 vs. limit=15.0 +2024-07-28 15:02:51,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=172797.33333333334, ans=0.125 +2024-07-28 15:02:52,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172797.33333333334, ans=0.1 +2024-07-28 15:02:56,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=172810.66666666666, ans=0.125 +2024-07-28 15:02:57,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=172810.66666666666, ans=0.125 +2024-07-28 15:02:58,980 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.43 vs. limit=15.0 +2024-07-28 15:03:01,335 INFO [train.py:1114] (2/4) Epoch 13, batch 6950, loss[loss=0.1728, simple_loss=0.2496, pruned_loss=0.04804, over 4511.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2766, pruned_loss=0.04991, over 940102.53 frames. ], batch size: 10, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:03:01,439 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:03:01,801 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.61 vs. limit=22.5 +2024-07-28 15:03:03,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=172824.0, ans=0.125 +2024-07-28 15:03:07,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172824.0, ans=0.1 +2024-07-28 15:03:09,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=172837.33333333334, ans=0.125 +2024-07-28 15:03:13,921 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.05 vs. limit=22.5 +2024-07-28 15:03:15,254 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.06 vs. limit=22.5 +2024-07-28 15:03:17,382 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.91 vs. limit=15.0 +2024-07-28 15:03:27,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=172864.0, ans=0.09899494936611666 +2024-07-28 15:03:29,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=172864.0, ans=0.125 +2024-07-28 15:03:38,301 INFO [train.py:1114] (2/4) Epoch 13, batch 7000, loss[loss=0.1901, simple_loss=0.2789, pruned_loss=0.05064, over 4609.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2766, pruned_loss=0.04987, over 938168.93 frames. ], batch size: 17, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:03:38,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=172890.66666666666, ans=0.0 +2024-07-28 15:03:41,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=172890.66666666666, ans=0.125 +2024-07-28 15:03:44,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=172904.0, ans=0.0 +2024-07-28 15:03:46,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=172904.0, ans=0.125 +2024-07-28 15:03:48,574 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.631e+01 6.423e+01 7.992e+01 1.097e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 15:03:48,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=172904.0, ans=0.09899494936611666 +2024-07-28 15:03:57,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=172930.66666666666, ans=0.1 +2024-07-28 15:04:02,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=172930.66666666666, ans=0.125 +2024-07-28 15:04:02,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=172930.66666666666, ans=0.2 +2024-07-28 15:04:05,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=172944.0, ans=0.0 +2024-07-28 15:04:08,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172944.0, ans=0.1 +2024-07-28 15:04:10,934 INFO [train.py:1114] (2/4) Epoch 13, batch 7050, loss[loss=0.1757, simple_loss=0.2701, pruned_loss=0.0406, over 4767.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2751, pruned_loss=0.04863, over 941995.57 frames. ], batch size: 19, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:04:21,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=172970.66666666666, ans=0.125 +2024-07-28 15:04:29,922 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.38 vs. limit=22.5 +2024-07-28 15:04:33,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=172997.33333333334, ans=0.125 +2024-07-28 15:04:43,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=173024.0, ans=0.125 +2024-07-28 15:04:44,124 INFO [train.py:1114] (2/4) Epoch 13, batch 7100, loss[loss=0.2023, simple_loss=0.3003, pruned_loss=0.05219, over 4809.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2755, pruned_loss=0.04911, over 937470.91 frames. ], batch size: 15, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:04:44,543 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.88 vs. limit=15.0 +2024-07-28 15:04:46,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=173024.0, ans=0.0 +2024-07-28 15:04:50,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=173037.33333333334, ans=0.0 +2024-07-28 15:04:54,282 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+01 5.692e+01 6.139e+01 7.289e+01 1.294e+02, threshold=1.228e+02, percent-clipped=1.0 +2024-07-28 15:05:12,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=173077.33333333334, ans=0.125 +2024-07-28 15:05:13,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=173077.33333333334, ans=0.125 +2024-07-28 15:05:15,572 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.98 vs. limit=15.0 +2024-07-28 15:05:16,965 INFO [train.py:1114] (2/4) Epoch 13, batch 7150, loss[loss=0.2432, simple_loss=0.3264, pruned_loss=0.07994, over 4432.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2733, pruned_loss=0.04848, over 938207.94 frames. ], batch size: 21, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:05:17,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=173090.66666666666, ans=0.0 +2024-07-28 15:05:33,843 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.95 vs. limit=22.5 +2024-07-28 15:05:42,913 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=173144.0, ans=0.125 +2024-07-28 15:05:46,423 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.07 vs. limit=10.0 +2024-07-28 15:05:48,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=173144.0, ans=0.125 +2024-07-28 15:05:50,047 INFO [train.py:1114] (2/4) Epoch 13, batch 7200, loss[loss=0.175, simple_loss=0.2727, pruned_loss=0.03872, over 4802.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2738, pruned_loss=0.0482, over 938593.24 frames. ], batch size: 15, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:05:52,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=173157.33333333334, ans=0.125 +2024-07-28 15:05:56,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=173170.66666666666, ans=0.125 +2024-07-28 15:05:57,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=173170.66666666666, ans=0.125 +2024-07-28 15:05:59,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173170.66666666666, ans=0.1 +2024-07-28 15:06:00,404 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.641e+01 6.340e+01 7.110e+01 1.006e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 15:06:03,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=173184.0, ans=0.025 +2024-07-28 15:06:18,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=173210.66666666666, ans=0.0 +2024-07-28 15:06:19,303 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.93 vs. limit=22.5 +2024-07-28 15:06:22,752 INFO [train.py:1114] (2/4) Epoch 13, batch 7250, loss[loss=0.1687, simple_loss=0.2431, pruned_loss=0.0471, over 4857.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2735, pruned_loss=0.04855, over 939818.45 frames. ], batch size: 12, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:06:26,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=173224.0, ans=0.2 +2024-07-28 15:06:33,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=173237.33333333334, ans=0.125 +2024-07-28 15:06:35,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=173250.66666666666, ans=0.0 +2024-07-28 15:06:37,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=173250.66666666666, ans=0.0 +2024-07-28 15:06:47,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=173264.0, ans=0.125 +2024-07-28 15:06:53,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=173277.33333333334, ans=0.5 +2024-07-28 15:06:55,532 INFO [train.py:1114] (2/4) Epoch 13, batch 7300, loss[loss=0.1704, simple_loss=0.2495, pruned_loss=0.04562, over 4849.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2727, pruned_loss=0.04783, over 940119.59 frames. ], batch size: 12, lr: 5.72e-03, grad_scale: 64.0 +2024-07-28 15:06:57,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=173290.66666666666, ans=0.025 +2024-07-28 15:07:02,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=173304.0, ans=0.125 +2024-07-28 15:07:04,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=173304.0, ans=0.125 +2024-07-28 15:07:05,992 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.989e+01 5.468e+01 5.985e+01 6.770e+01 9.344e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 15:07:22,640 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:07:24,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173344.0, ans=0.1 +2024-07-28 15:07:25,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=173344.0, ans=0.0 +2024-07-28 15:07:28,106 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.55 vs. limit=10.0 +2024-07-28 15:07:28,308 INFO [train.py:1114] (2/4) Epoch 13, batch 7350, loss[loss=0.1905, simple_loss=0.2831, pruned_loss=0.04891, over 4635.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2735, pruned_loss=0.04826, over 939312.55 frames. ], batch size: 12, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:07:28,451 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:07:32,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=173357.33333333334, ans=0.125 +2024-07-28 15:07:47,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=173397.33333333334, ans=0.125 +2024-07-28 15:07:49,344 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.95 vs. limit=15.0 +2024-07-28 15:07:55,449 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.73 vs. limit=15.0 +2024-07-28 15:08:00,666 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.71 vs. limit=15.0 +2024-07-28 15:08:02,220 INFO [train.py:1114] (2/4) Epoch 13, batch 7400, loss[loss=0.1937, simple_loss=0.2776, pruned_loss=0.0549, over 4691.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2734, pruned_loss=0.04803, over 940361.40 frames. ], batch size: 13, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:08:12,819 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.374e+01 5.640e+01 6.317e+01 7.601e+01 1.154e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 15:08:12,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=173437.33333333334, ans=0.125 +2024-07-28 15:08:22,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=173450.66666666666, ans=0.125 +2024-07-28 15:08:29,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=173477.33333333334, ans=0.0 +2024-07-28 15:08:33,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=173477.33333333334, ans=0.015 +2024-07-28 15:08:36,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=173490.66666666666, ans=0.05 +2024-07-28 15:08:36,749 INFO [train.py:1114] (2/4) Epoch 13, batch 7450, loss[loss=0.1774, simple_loss=0.2563, pruned_loss=0.04926, over 4626.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2733, pruned_loss=0.04858, over 937799.74 frames. ], batch size: 11, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:08:36,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173490.66666666666, ans=0.1 +2024-07-28 15:08:43,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=173504.0, ans=0.5 +2024-07-28 15:08:44,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=173504.0, ans=0.0 +2024-07-28 15:08:44,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=173504.0, ans=0.125 +2024-07-28 15:08:46,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=173504.0, ans=0.125 +2024-07-28 15:08:47,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=173504.0, ans=0.025 +2024-07-28 15:08:56,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173530.66666666666, ans=0.1 +2024-07-28 15:09:07,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=173544.0, ans=0.0 +2024-07-28 15:09:09,545 INFO [train.py:1114] (2/4) Epoch 13, batch 7500, loss[loss=0.2632, simple_loss=0.3235, pruned_loss=0.1015, over 3524.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2741, pruned_loss=0.04907, over 935921.29 frames. ], batch size: 38, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:09:11,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=173557.33333333334, ans=0.125 +2024-07-28 15:09:20,245 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.712e+01 6.192e+01 7.126e+01 1.284e+02, threshold=1.238e+02, percent-clipped=1.0 +2024-07-28 15:09:22,171 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.44 vs. limit=15.0 +2024-07-28 15:09:31,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=173584.0, ans=0.2 +2024-07-28 15:09:33,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=173584.0, ans=0.05 +2024-07-28 15:09:34,157 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.27 vs. limit=15.0 +2024-07-28 15:09:56,860 INFO [train.py:1114] (2/4) Epoch 13, batch 7550, loss[loss=0.2274, simple_loss=0.31, pruned_loss=0.07245, over 4621.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2752, pruned_loss=0.04943, over 935489.69 frames. ], batch size: 17, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:10:10,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=173624.0, ans=0.0 +2024-07-28 15:10:14,887 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.22 vs. limit=22.5 +2024-07-28 15:10:21,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=173637.33333333334, ans=0.125 +2024-07-28 15:10:29,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173650.66666666666, ans=0.1 +2024-07-28 15:10:30,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=173650.66666666666, ans=0.07 +2024-07-28 15:10:33,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=173664.0, ans=0.025 +2024-07-28 15:10:40,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=173677.33333333334, ans=0.125 +2024-07-28 15:10:57,558 INFO [train.py:1114] (2/4) Epoch 13, batch 7600, loss[loss=0.194, simple_loss=0.2867, pruned_loss=0.05063, over 4811.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2747, pruned_loss=0.04847, over 937453.47 frames. ], batch size: 14, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:11:01,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=173690.66666666666, ans=0.125 +2024-07-28 15:11:03,827 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.96 vs. limit=22.5 +2024-07-28 15:11:08,021 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.498e+01 5.988e+01 6.691e+01 9.239e+01, threshold=1.198e+02, percent-clipped=0.0 +2024-07-28 15:11:23,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=173717.33333333334, ans=0.125 +2024-07-28 15:11:36,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=173730.66666666666, ans=0.1 +2024-07-28 15:11:57,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=173730.66666666666, ans=0.2 +2024-07-28 15:11:59,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=173744.0, ans=0.125 +2024-07-28 15:12:05,190 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.00 vs. limit=6.0 +2024-07-28 15:12:06,101 INFO [train.py:1114] (2/4) Epoch 13, batch 7650, loss[loss=0.1484, simple_loss=0.2285, pruned_loss=0.03417, over 4934.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2752, pruned_loss=0.0491, over 936305.41 frames. ], batch size: 12, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:12:06,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=173757.33333333334, ans=0.125 +2024-07-28 15:12:17,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=173770.66666666666, ans=0.125 +2024-07-28 15:12:23,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173784.0, ans=0.1 +2024-07-28 15:12:24,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=173784.0, ans=0.125 +2024-07-28 15:12:26,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=173784.0, ans=0.125 +2024-07-28 15:12:32,127 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.82 vs. limit=15.0 +2024-07-28 15:12:34,142 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=8.38 vs. limit=15.0 +2024-07-28 15:12:34,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=173810.66666666666, ans=0.125 +2024-07-28 15:12:36,970 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.96 vs. limit=15.0 +2024-07-28 15:12:41,860 INFO [train.py:1114] (2/4) Epoch 13, batch 7700, loss[loss=0.1651, simple_loss=0.2546, pruned_loss=0.03778, over 4703.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2752, pruned_loss=0.04925, over 934191.95 frames. ], batch size: 13, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:12:44,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=173824.0, ans=0.125 +2024-07-28 15:12:52,772 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.633e+01 5.534e+01 6.118e+01 6.663e+01 8.734e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 15:12:58,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=173850.66666666666, ans=0.125 +2024-07-28 15:13:05,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=173864.0, ans=0.2 +2024-07-28 15:13:13,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=173890.66666666666, ans=0.0 +2024-07-28 15:13:14,198 INFO [train.py:1114] (2/4) Epoch 13, batch 7750, loss[loss=0.1903, simple_loss=0.2854, pruned_loss=0.04755, over 4925.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2769, pruned_loss=0.04932, over 935175.65 frames. ], batch size: 14, lr: 5.71e-03, grad_scale: 32.0 +2024-07-28 15:13:44,141 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.49 vs. limit=15.0 +2024-07-28 15:13:49,763 INFO [train.py:1114] (2/4) Epoch 13, batch 7800, loss[loss=0.1725, simple_loss=0.2775, pruned_loss=0.03378, over 4662.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.278, pruned_loss=0.04947, over 937164.24 frames. ], batch size: 14, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:13:56,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=173970.66666666666, ans=0.2 +2024-07-28 15:14:01,086 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.796e+01 5.555e+01 6.069e+01 6.471e+01 9.594e+01, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 15:14:10,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=173997.33333333334, ans=0.2 +2024-07-28 15:14:15,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=173997.33333333334, ans=0.0 +2024-07-28 15:14:22,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174010.66666666666, ans=0.1 +2024-07-28 15:14:23,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174010.66666666666, ans=0.1 +2024-07-28 15:14:29,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=174010.66666666666, ans=0.125 +2024-07-28 15:14:35,067 INFO [train.py:1114] (2/4) Epoch 13, batch 7850, loss[loss=0.1685, simple_loss=0.2557, pruned_loss=0.04068, over 4512.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2774, pruned_loss=0.04937, over 936487.77 frames. ], batch size: 10, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:14:41,962 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.82 vs. limit=12.0 +2024-07-28 15:14:43,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=174037.33333333334, ans=0.2 +2024-07-28 15:14:52,225 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:14:58,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=174064.0, ans=0.0 +2024-07-28 15:15:09,968 INFO [train.py:1114] (2/4) Epoch 13, batch 7900, loss[loss=0.1936, simple_loss=0.2869, pruned_loss=0.05014, over 4876.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2782, pruned_loss=0.04981, over 933828.55 frames. ], batch size: 14, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:15:16,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=174104.0, ans=0.0 +2024-07-28 15:15:20,620 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.723e+01 5.632e+01 6.110e+01 7.084e+01 9.814e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 15:15:40,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=174144.0, ans=0.1 +2024-07-28 15:15:41,938 INFO [train.py:1114] (2/4) Epoch 13, batch 7950, loss[loss=0.2197, simple_loss=0.304, pruned_loss=0.06771, over 3295.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2785, pruned_loss=0.04963, over 935635.25 frames. ], batch size: 35, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:15:53,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=174170.66666666666, ans=0.2 +2024-07-28 15:16:01,706 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.20 vs. limit=15.0 +2024-07-28 15:16:13,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=174210.66666666666, ans=0.0 +2024-07-28 15:16:14,590 INFO [train.py:1114] (2/4) Epoch 13, batch 8000, loss[loss=0.1648, simple_loss=0.2498, pruned_loss=0.03994, over 4606.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2767, pruned_loss=0.04942, over 934693.59 frames. ], batch size: 11, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:16:16,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=174224.0, ans=0.025 +2024-07-28 15:16:23,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=174237.33333333334, ans=0.125 +2024-07-28 15:16:25,594 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.549e+01 5.751e+01 6.184e+01 6.866e+01 1.059e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 15:16:27,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=174250.66666666666, ans=0.125 +2024-07-28 15:16:30,461 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.77 vs. limit=15.0 +2024-07-28 15:16:44,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_na.min_abs, batch_count=174277.33333333334, ans=0.02 +2024-07-28 15:16:47,953 INFO [train.py:1114] (2/4) Epoch 13, batch 8050, loss[loss=0.1864, simple_loss=0.2881, pruned_loss=0.04236, over 4819.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2776, pruned_loss=0.04962, over 934403.49 frames. ], batch size: 14, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:16:49,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=174290.66666666666, ans=0.125 +2024-07-28 15:16:54,480 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.08 vs. limit=22.5 +2024-07-28 15:16:58,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=174304.0, ans=0.05 +2024-07-28 15:17:02,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=174317.33333333334, ans=0.125 +2024-07-28 15:17:13,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=174330.66666666666, ans=0.125 +2024-07-28 15:17:16,669 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.68 vs. limit=22.5 +2024-07-28 15:17:22,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=174344.0, ans=0.0 +2024-07-28 15:17:23,055 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.19 vs. limit=15.0 +2024-07-28 15:17:23,928 INFO [train.py:1114] (2/4) Epoch 13, batch 8100, loss[loss=0.209, simple_loss=0.2887, pruned_loss=0.06466, over 4802.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2774, pruned_loss=0.04951, over 934067.54 frames. ], batch size: 15, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:17:34,676 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.608e+01 5.712e+01 6.251e+01 7.311e+01 9.756e+01, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 15:17:39,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=174384.0, ans=0.125 +2024-07-28 15:17:55,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=174410.66666666666, ans=0.125 +2024-07-28 15:18:12,017 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.86 vs. limit=15.0 +2024-07-28 15:18:14,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=174410.66666666666, ans=0.125 +2024-07-28 15:18:25,030 INFO [train.py:1114] (2/4) Epoch 13, batch 8150, loss[loss=0.1999, simple_loss=0.2896, pruned_loss=0.05514, over 4807.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2773, pruned_loss=0.04966, over 937687.46 frames. ], batch size: 15, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:18:38,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=174437.33333333334, ans=0.07 +2024-07-28 15:18:40,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=174437.33333333334, ans=0.125 +2024-07-28 15:18:53,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=174450.66666666666, ans=0.125 +2024-07-28 15:21:19,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=174450.66666666666, ans=0.2 +2024-07-28 15:21:19,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=174450.66666666666, ans=0.125 +2024-07-28 15:21:33,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=174477.33333333334, ans=0.125 +2024-07-28 15:21:34,267 INFO [train.py:1114] (2/4) Epoch 13, batch 8200, loss[loss=0.1825, simple_loss=0.2836, pruned_loss=0.04074, over 4806.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2779, pruned_loss=0.04972, over 938764.28 frames. ], batch size: 15, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:21:57,442 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.419e+01 5.624e+01 6.115e+01 7.227e+01 1.322e+02, threshold=1.223e+02, percent-clipped=1.0 +2024-07-28 15:22:19,840 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.78 vs. limit=12.0 +2024-07-28 15:22:22,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=174530.66666666666, ans=0.0 +2024-07-28 15:22:23,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=174530.66666666666, ans=0.05 +2024-07-28 15:22:27,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=174544.0, ans=0.5 +2024-07-28 15:23:17,765 INFO [train.py:1114] (2/4) Epoch 13, batch 8250, loss[loss=0.1911, simple_loss=0.2829, pruned_loss=0.04959, over 4891.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2774, pruned_loss=0.04952, over 938884.94 frames. ], batch size: 13, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:23:32,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=174557.33333333334, ans=0.125 +2024-07-28 15:23:42,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=174584.0, ans=0.125 +2024-07-28 15:23:46,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=174584.0, ans=0.125 +2024-07-28 15:23:55,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=174610.66666666666, ans=0.0 +2024-07-28 15:24:00,799 INFO [train.py:1114] (2/4) Epoch 13, batch 8300, loss[loss=0.186, simple_loss=0.2669, pruned_loss=0.05257, over 4901.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2774, pruned_loss=0.04901, over 938681.74 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:24:01,780 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.34 vs. limit=15.0 +2024-07-28 15:24:11,814 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.568e+01 5.640e+01 5.984e+01 6.893e+01 9.803e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 15:24:16,994 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.74 vs. limit=22.5 +2024-07-28 15:24:25,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff2.min_abs, batch_count=174664.0, ans=0.1 +2024-07-28 15:24:28,691 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.43 vs. limit=12.0 +2024-07-28 15:24:31,468 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.32 vs. limit=15.0 +2024-07-28 15:24:38,262 INFO [train.py:1114] (2/4) Epoch 13, batch 8350, loss[loss=0.2062, simple_loss=0.2941, pruned_loss=0.05914, over 4797.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2775, pruned_loss=0.04891, over 941511.69 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:24:49,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=174704.0, ans=0.125 +2024-07-28 15:25:08,678 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.90 vs. limit=15.0 +2024-07-28 15:25:11,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=174744.0, ans=0.125 +2024-07-28 15:25:11,834 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.18 vs. limit=12.0 +2024-07-28 15:25:12,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=174757.33333333334, ans=0.125 +2024-07-28 15:25:12,894 INFO [train.py:1114] (2/4) Epoch 13, batch 8400, loss[loss=0.1489, simple_loss=0.2435, pruned_loss=0.02719, over 4773.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2767, pruned_loss=0.0487, over 940182.76 frames. ], batch size: 12, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:25:18,840 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:25:23,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=174770.66666666666, ans=0.025 +2024-07-28 15:25:23,776 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.906e+01 5.833e+01 6.092e+01 7.413e+01 1.221e+02, threshold=1.218e+02, percent-clipped=1.0 +2024-07-28 15:25:30,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=174784.0, ans=0.125 +2024-07-28 15:25:31,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=174784.0, ans=0.125 +2024-07-28 15:25:51,139 INFO [train.py:1114] (2/4) Epoch 13, batch 8450, loss[loss=0.2026, simple_loss=0.2799, pruned_loss=0.06268, over 4803.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2778, pruned_loss=0.04898, over 939156.21 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:25:51,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=174824.0, ans=0.0 +2024-07-28 15:26:01,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174837.33333333334, ans=0.1 +2024-07-28 15:26:10,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174864.0, ans=0.1 +2024-07-28 15:26:25,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=174877.33333333334, ans=0.125 +2024-07-28 15:26:26,165 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-28 15:26:29,147 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-28 15:26:29,382 INFO [train.py:1114] (2/4) Epoch 13, batch 8500, loss[loss=0.1972, simple_loss=0.2745, pruned_loss=0.05997, over 4628.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2763, pruned_loss=0.04852, over 938836.33 frames. ], batch size: 11, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:26:30,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=174890.66666666666, ans=0.125 +2024-07-28 15:29:18,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=174890.66666666666, ans=0.025 +2024-07-28 15:29:25,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=174904.0, ans=0.125 +2024-07-28 15:29:29,320 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-07-28 15:29:35,083 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.221e+01 5.689e+01 6.230e+01 7.373e+01 1.057e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 15:29:36,542 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:29:38,011 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.79 vs. limit=15.0 +2024-07-28 15:30:03,282 INFO [train.py:1114] (2/4) Epoch 13, batch 8550, loss[loss=0.1485, simple_loss=0.2356, pruned_loss=0.03063, over 4796.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2747, pruned_loss=0.04794, over 939301.03 frames. ], batch size: 11, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:30:03,766 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.74 vs. limit=10.0 +2024-07-28 15:30:33,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=175010.66666666666, ans=0.125 +2024-07-28 15:30:33,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=175010.66666666666, ans=0.025 +2024-07-28 15:30:41,168 INFO [train.py:1114] (2/4) Epoch 13, batch 8600, loss[loss=0.2082, simple_loss=0.3021, pruned_loss=0.05721, over 4799.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2745, pruned_loss=0.04814, over 939125.62 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:30:54,283 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.590e+01 5.714e+01 6.617e+01 7.604e+01 1.022e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-28 15:30:57,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=175050.66666666666, ans=0.2 +2024-07-28 15:30:58,841 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.59 vs. limit=15.0 +2024-07-28 15:31:02,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175064.0, ans=0.1 +2024-07-28 15:31:04,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=175064.0, ans=0.0 +2024-07-28 15:31:16,116 INFO [train.py:1114] (2/4) Epoch 13, batch 8650, loss[loss=0.19, simple_loss=0.2811, pruned_loss=0.04947, over 4913.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2745, pruned_loss=0.04862, over 940407.86 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:36:24,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=175117.33333333334, ans=0.0 +2024-07-28 15:36:27,817 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.56 vs. limit=10.0 +2024-07-28 15:36:31,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=175130.66666666666, ans=0.0 +2024-07-28 15:36:37,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=175144.0, ans=10.0 +2024-07-28 15:36:43,569 INFO [train.py:1114] (2/4) Epoch 13, batch 8700, loss[loss=0.1821, simple_loss=0.265, pruned_loss=0.0496, over 4758.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2755, pruned_loss=0.04922, over 937867.90 frames. ], batch size: 13, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:36:59,767 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.706e+01 5.561e+01 6.137e+01 6.917e+01 9.151e+01, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 15:37:04,592 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.74 vs. limit=15.0 +2024-07-28 15:37:22,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=175197.33333333334, ans=0.125 +2024-07-28 15:37:23,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=175210.66666666666, ans=0.125 +2024-07-28 15:37:34,701 INFO [train.py:1114] (2/4) Epoch 13, batch 8750, loss[loss=0.2187, simple_loss=0.3098, pruned_loss=0.06385, over 4672.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2751, pruned_loss=0.04936, over 937106.54 frames. ], batch size: 15, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:37:34,955 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.29 vs. limit=15.0 +2024-07-28 15:37:35,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=175224.0, ans=0.0 +2024-07-28 15:37:35,708 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.22 vs. limit=15.0 +2024-07-28 15:37:39,776 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-28 15:37:40,901 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.57 vs. limit=12.0 +2024-07-28 15:37:49,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=175250.66666666666, ans=0.0 +2024-07-28 15:37:58,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=175264.0, ans=0.125 +2024-07-28 15:38:09,909 INFO [train.py:1114] (2/4) Epoch 13, batch 8800, loss[loss=0.194, simple_loss=0.2918, pruned_loss=0.04812, over 4934.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2761, pruned_loss=0.04965, over 937913.88 frames. ], batch size: 14, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:38:21,219 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.482e+01 5.841e+01 6.340e+01 7.291e+01 9.820e+01, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 15:38:23,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=175317.33333333334, ans=0.0 +2024-07-28 15:38:37,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=175344.0, ans=0.125 +2024-07-28 15:38:42,301 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.99 vs. limit=6.0 +2024-07-28 15:38:43,123 INFO [train.py:1114] (2/4) Epoch 13, batch 8850, loss[loss=0.1883, simple_loss=0.28, pruned_loss=0.04829, over 4599.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2766, pruned_loss=0.05025, over 932463.84 frames. ], batch size: 21, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:38:48,591 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.00 vs. limit=15.0 +2024-07-28 15:38:54,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=175370.66666666666, ans=0.125 +2024-07-28 15:38:55,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=175384.0, ans=0.0 +2024-07-28 15:38:56,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=175384.0, ans=0.0 +2024-07-28 15:38:59,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=175384.0, ans=0.07 +2024-07-28 15:39:00,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=175384.0, ans=0.2 +2024-07-28 15:39:05,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175397.33333333334, ans=0.1 +2024-07-28 15:39:15,586 INFO [train.py:1114] (2/4) Epoch 13, batch 8900, loss[loss=0.1628, simple_loss=0.2554, pruned_loss=0.03512, over 4951.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2772, pruned_loss=0.05032, over 930287.78 frames. ], batch size: 12, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:39:30,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=175437.33333333334, ans=0.07 +2024-07-28 15:39:32,297 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.207e+01 5.752e+01 6.427e+01 7.462e+01 1.101e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 15:39:34,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=175450.66666666666, ans=0.5 +2024-07-28 15:39:44,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=175464.0, ans=0.1 +2024-07-28 15:39:53,969 INFO [train.py:1114] (2/4) Epoch 13, batch 8950, loss[loss=0.1732, simple_loss=0.2676, pruned_loss=0.0394, over 4563.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2763, pruned_loss=0.05021, over 930903.77 frames. ], batch size: 21, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:39:59,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=175504.0, ans=0.2 +2024-07-28 15:40:01,476 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.50 vs. limit=15.0 +2024-07-28 15:40:02,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=175504.0, ans=0.2 +2024-07-28 15:40:04,206 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:40:08,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175504.0, ans=0.1 +2024-07-28 15:40:11,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=175517.33333333334, ans=0.0 +2024-07-28 15:40:23,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=175530.66666666666, ans=0.0 +2024-07-28 15:40:34,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=175544.0, ans=0.04949747468305833 +2024-07-28 15:40:36,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.94 vs. limit=22.5 +2024-07-28 15:40:38,250 INFO [train.py:1114] (2/4) Epoch 13, batch 9000, loss[loss=0.1529, simple_loss=0.2405, pruned_loss=0.03264, over 4648.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2748, pruned_loss=0.04969, over 933794.43 frames. ], batch size: 12, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:40:38,250 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 15:42:48,985 INFO [train.py:1146] (2/4) Epoch 13, validation: loss=0.1657, simple_loss=0.2696, pruned_loss=0.03096, over 944034.00 frames. +2024-07-28 15:42:48,986 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 15:45:15,803 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.541e+01 5.562e+01 6.322e+01 7.112e+01 1.143e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 15:45:16,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=175570.66666666666, ans=0.0 +2024-07-28 15:45:25,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=175597.33333333334, ans=0.125 +2024-07-28 15:45:39,153 INFO [train.py:1114] (2/4) Epoch 13, batch 9050, loss[loss=0.1608, simple_loss=0.2389, pruned_loss=0.0414, over 4510.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2745, pruned_loss=0.04921, over 934292.26 frames. ], batch size: 10, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:45:44,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=175624.0, ans=0.2 +2024-07-28 15:45:52,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175650.66666666666, ans=0.1 +2024-07-28 15:45:57,972 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.16 vs. limit=10.0 +2024-07-28 15:46:04,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=175664.0, ans=0.125 +2024-07-28 15:46:06,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175677.33333333334, ans=0.1 +2024-07-28 15:46:11,972 INFO [train.py:1114] (2/4) Epoch 13, batch 9100, loss[loss=0.1666, simple_loss=0.2591, pruned_loss=0.03699, over 4934.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2741, pruned_loss=0.0495, over 936978.70 frames. ], batch size: 14, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:46:19,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=175704.0, ans=0.09899494936611666 +2024-07-28 15:46:22,479 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.359e+01 5.613e+01 6.012e+01 6.953e+01 8.806e+01, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 15:46:26,702 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-07-28 15:46:26,871 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.98 vs. limit=10.0 +2024-07-28 15:46:27,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=175717.33333333334, ans=0.0 +2024-07-28 15:46:32,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=175730.66666666666, ans=0.125 +2024-07-28 15:46:56,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175730.66666666666, ans=0.1 +2024-07-28 15:47:18,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=175744.0, ans=0.2 +2024-07-28 15:47:18,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=175744.0, ans=0.07 +2024-07-28 15:47:18,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=175744.0, ans=0.1 +2024-07-28 15:47:19,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=175757.33333333334, ans=0.125 +2024-07-28 15:47:19,812 INFO [train.py:1114] (2/4) Epoch 13, batch 9150, loss[loss=0.1884, simple_loss=0.2897, pruned_loss=0.04355, over 4816.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2744, pruned_loss=0.04911, over 935711.81 frames. ], batch size: 14, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:47:19,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=175757.33333333334, ans=0.125 +2024-07-28 15:47:40,902 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.29 vs. limit=6.0 +2024-07-28 15:48:12,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=175770.66666666666, ans=0.125 +2024-07-28 15:48:21,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175784.0, ans=0.1 +2024-07-28 15:48:36,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=175810.66666666666, ans=0.1 +2024-07-28 15:48:41,189 INFO [train.py:1114] (2/4) Epoch 13, batch 9200, loss[loss=0.1666, simple_loss=0.2551, pruned_loss=0.03908, over 4861.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2741, pruned_loss=0.04881, over 937679.47 frames. ], batch size: 12, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:48:41,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=175824.0, ans=0.125 +2024-07-28 15:48:42,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=175824.0, ans=0.125 +2024-07-28 15:48:45,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=175824.0, ans=0.125 +2024-07-28 15:48:46,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=175824.0, ans=0.025 +2024-07-28 15:48:51,963 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.679e+01 5.600e+01 6.167e+01 6.927e+01 1.004e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 15:48:52,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=175837.33333333334, ans=0.125 +2024-07-28 15:48:58,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=175850.66666666666, ans=0.125 +2024-07-28 15:49:01,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=175864.0, ans=0.5 +2024-07-28 15:49:12,554 INFO [train.py:1114] (2/4) Epoch 13, batch 9250, loss[loss=0.1848, simple_loss=0.2851, pruned_loss=0.0422, over 4643.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2748, pruned_loss=0.0489, over 938524.82 frames. ], batch size: 13, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:49:18,150 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.95 vs. limit=15.0 +2024-07-28 15:51:49,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=175904.0, ans=0.0 +2024-07-28 15:52:26,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=175917.33333333334, ans=0.025 +2024-07-28 15:52:46,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=175930.66666666666, ans=0.09899494936611666 +2024-07-28 15:52:53,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=175944.0, ans=0.025 +2024-07-28 15:52:56,047 INFO [train.py:1114] (2/4) Epoch 13, batch 9300, loss[loss=0.1763, simple_loss=0.2598, pruned_loss=0.04638, over 4783.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2742, pruned_loss=0.04881, over 937968.62 frames. ], batch size: 12, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:52:58,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=175957.33333333334, ans=0.0 +2024-07-28 15:53:00,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=175957.33333333334, ans=0.125 +2024-07-28 15:53:05,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=175970.66666666666, ans=0.125 +2024-07-28 15:53:06,731 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+01 5.420e+01 5.839e+01 6.596e+01 1.003e+02, threshold=1.168e+02, percent-clipped=0.0 +2024-07-28 15:53:11,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=175984.0, ans=0.0 +2024-07-28 15:53:14,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=175997.33333333334, ans=0.125 +2024-07-28 15:53:15,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175997.33333333334, ans=0.1 +2024-07-28 15:53:15,822 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.77 vs. limit=22.5 +2024-07-28 15:53:36,044 INFO [train.py:1114] (2/4) Epoch 13, batch 9350, loss[loss=0.1658, simple_loss=0.2531, pruned_loss=0.03923, over 4811.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2745, pruned_loss=0.04877, over 934953.13 frames. ], batch size: 11, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:53:43,107 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.73 vs. limit=12.0 +2024-07-28 15:53:51,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=176050.66666666666, ans=0.05 +2024-07-28 15:53:58,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=176064.0, ans=0.125 +2024-07-28 15:54:00,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=176077.33333333334, ans=0.2 +2024-07-28 15:54:00,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=176077.33333333334, ans=0.125 +2024-07-28 15:54:02,983 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.39 vs. limit=15.0 +2024-07-28 15:54:08,556 INFO [train.py:1114] (2/4) Epoch 13, batch 9400, loss[loss=0.186, simple_loss=0.2752, pruned_loss=0.04837, over 4682.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2755, pruned_loss=0.04955, over 932733.10 frames. ], batch size: 13, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:54:19,728 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.585e+01 5.549e+01 6.208e+01 6.780e+01 1.030e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 15:54:19,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=176104.0, ans=0.125 +2024-07-28 15:54:27,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=176117.33333333334, ans=0.0 +2024-07-28 15:54:28,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=176130.66666666666, ans=0.125 +2024-07-28 15:54:38,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=176144.0, ans=0.1 +2024-07-28 15:54:40,623 INFO [train.py:1114] (2/4) Epoch 13, batch 9450, loss[loss=0.1534, simple_loss=0.239, pruned_loss=0.03394, over 4799.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2748, pruned_loss=0.04872, over 932146.81 frames. ], batch size: 11, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:54:43,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=176157.33333333334, ans=0.125 +2024-07-28 15:54:47,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=176170.66666666666, ans=0.125 +2024-07-28 15:54:47,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=176170.66666666666, ans=0.2 +2024-07-28 15:54:49,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=176170.66666666666, ans=0.09899494936611666 +2024-07-28 15:54:50,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=176170.66666666666, ans=0.125 +2024-07-28 15:54:52,675 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.81 vs. limit=22.5 +2024-07-28 15:54:53,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=176184.0, ans=0.125 +2024-07-28 15:54:54,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=176184.0, ans=0.125 +2024-07-28 15:55:00,566 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.65 vs. limit=10.0 +2024-07-28 15:55:02,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=176197.33333333334, ans=0.0 +2024-07-28 15:55:11,718 INFO [train.py:1114] (2/4) Epoch 13, batch 9500, loss[loss=0.201, simple_loss=0.2841, pruned_loss=0.05899, over 4703.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2761, pruned_loss=0.04942, over 934214.80 frames. ], batch size: 12, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:55:17,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=176237.33333333334, ans=0.125 +2024-07-28 15:55:22,346 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.570e+01 5.491e+01 5.977e+01 6.811e+01 8.816e+01, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 15:55:37,839 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.52 vs. limit=6.0 +2024-07-28 15:55:43,373 INFO [train.py:1114] (2/4) Epoch 13, batch 9550, loss[loss=0.1853, simple_loss=0.2691, pruned_loss=0.0508, over 4770.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2759, pruned_loss=0.04945, over 931348.48 frames. ], batch size: 12, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:55:45,906 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176290.66666666666, ans=0.1 +2024-07-28 15:55:48,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=176290.66666666666, ans=0.0 +2024-07-28 15:55:51,072 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.68 vs. limit=10.0 +2024-07-28 15:55:52,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=176304.0, ans=0.125 +2024-07-28 15:55:54,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176304.0, ans=0.1 +2024-07-28 15:55:54,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=176304.0, ans=0.0 +2024-07-28 15:56:15,431 INFO [train.py:1114] (2/4) Epoch 13, batch 9600, loss[loss=0.287, simple_loss=0.3327, pruned_loss=0.1207, over 3229.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2766, pruned_loss=0.04973, over 929984.12 frames. ], batch size: 35, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:56:21,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176370.66666666666, ans=0.1 +2024-07-28 15:56:26,135 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.903e+01 5.951e+01 6.565e+01 7.484e+01 1.008e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-28 15:56:31,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=176384.0, ans=0.2 +2024-07-28 15:56:44,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=176410.66666666666, ans=0.0 +2024-07-28 15:56:46,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=176410.66666666666, ans=0.0 +2024-07-28 15:56:47,960 INFO [train.py:1114] (2/4) Epoch 13, batch 9650, loss[loss=0.1986, simple_loss=0.2798, pruned_loss=0.05865, over 4848.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2773, pruned_loss=0.05025, over 926107.40 frames. ], batch size: 16, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 15:56:51,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=176424.0, ans=0.0 +2024-07-28 15:57:00,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=176450.66666666666, ans=0.125 +2024-07-28 15:57:02,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=176450.66666666666, ans=0.125 +2024-07-28 15:57:02,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=176450.66666666666, ans=0.09899494936611666 +2024-07-28 15:57:03,619 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:57:14,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=176464.0, ans=0.2 +2024-07-28 15:57:16,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176464.0, ans=0.1 +2024-07-28 15:57:24,273 INFO [train.py:1114] (2/4) Epoch 13, batch 9700, loss[loss=0.1725, simple_loss=0.2638, pruned_loss=0.04059, over 4106.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2779, pruned_loss=0.05068, over 923479.11 frames. ], batch size: 25, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 15:57:27,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=176490.66666666666, ans=0.125 +2024-07-28 15:57:27,711 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.26 vs. limit=15.0 +2024-07-28 15:57:30,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=176504.0, ans=0.125 +2024-07-28 15:57:32,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=176504.0, ans=0.2 +2024-07-28 15:57:34,736 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.912e+01 5.596e+01 6.037e+01 6.865e+01 8.980e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-28 15:57:50,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=176544.0, ans=10.0 +2024-07-28 15:57:59,696 INFO [train.py:1114] (2/4) Epoch 13, batch 9750, loss[loss=0.2128, simple_loss=0.2921, pruned_loss=0.06676, over 4671.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.277, pruned_loss=0.04998, over 924438.37 frames. ], batch size: 15, lr: 5.66e-03, grad_scale: 64.0 +2024-07-28 15:58:04,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=176557.33333333334, ans=0.125 +2024-07-28 15:58:05,146 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.20 vs. limit=15.0 +2024-07-28 15:58:11,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=176570.66666666666, ans=0.0 +2024-07-28 16:01:22,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=176597.33333333334, ans=0.125 +2024-07-28 16:01:27,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=176597.33333333334, ans=0.2 +2024-07-28 16:01:27,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=176597.33333333334, ans=0.125 +2024-07-28 16:01:29,067 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.90 vs. limit=15.0 +2024-07-28 16:01:29,678 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.05 vs. limit=15.0 +2024-07-28 16:01:37,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=176624.0, ans=0.0 +2024-07-28 16:01:45,799 INFO [train.py:1114] (2/4) Epoch 13, batch 9800, loss[loss=0.1547, simple_loss=0.2478, pruned_loss=0.03081, over 4715.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2755, pruned_loss=0.04924, over 924185.26 frames. ], batch size: 12, lr: 5.66e-03, grad_scale: 64.0 +2024-07-28 16:01:57,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=176624.0, ans=0.125 +2024-07-28 16:02:00,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=176624.0, ans=10.0 +2024-07-28 16:02:06,094 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.70 vs. limit=22.5 +2024-07-28 16:02:06,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=176637.33333333334, ans=0.125 +2024-07-28 16:02:08,122 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.529e+01 5.638e+01 6.459e+01 7.664e+01 1.106e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 16:02:32,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=176650.66666666666, ans=0.0 +2024-07-28 16:02:53,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=176664.0, ans=0.125 +2024-07-28 16:02:56,349 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.90 vs. limit=15.0 +2024-07-28 16:02:57,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=176664.0, ans=0.125 +2024-07-28 16:02:59,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=176664.0, ans=0.0 +2024-07-28 16:03:10,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=176677.33333333334, ans=0.0 +2024-07-28 16:03:21,597 INFO [train.py:1114] (2/4) Epoch 13, batch 9850, loss[loss=0.178, simple_loss=0.2697, pruned_loss=0.04313, over 4909.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2767, pruned_loss=0.05005, over 927100.74 frames. ], batch size: 15, lr: 5.66e-03, grad_scale: 64.0 +2024-07-28 16:05:30,411 INFO [train.py:1114] (2/4) Epoch 13, batch 9900, loss[loss=0.2145, simple_loss=0.2982, pruned_loss=0.06543, over 4829.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2768, pruned_loss=0.05021, over 926535.68 frames. ], batch size: 16, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 16:05:57,879 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.55 vs. limit=15.0 +2024-07-28 16:05:58,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=176757.33333333334, ans=0.125 +2024-07-28 16:06:07,039 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.22 vs. limit=15.0 +2024-07-28 16:06:18,140 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.30 vs. limit=22.5 +2024-07-28 16:06:49,266 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.859e+01 5.715e+01 6.519e+01 7.339e+01 1.147e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 16:06:53,113 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.24 vs. limit=15.0 +2024-07-28 16:06:53,122 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.87 vs. limit=15.0 +2024-07-28 16:07:03,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=176797.33333333334, ans=0.125 +2024-07-28 16:07:04,391 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:07:06,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=176810.66666666666, ans=0.125 +2024-07-28 16:07:11,626 INFO [train.py:1114] (2/4) Epoch 13, batch 9950, loss[loss=0.1713, simple_loss=0.2549, pruned_loss=0.0439, over 4803.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2766, pruned_loss=0.05052, over 929217.74 frames. ], batch size: 11, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 16:07:23,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=176824.0, ans=0.125 +2024-07-28 16:07:25,118 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.84 vs. limit=10.0 +2024-07-28 16:07:25,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=176837.33333333334, ans=0.125 +2024-07-28 16:07:26,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.87 vs. limit=22.5 +2024-07-28 16:07:29,313 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.18 vs. limit=12.0 +2024-07-28 16:07:33,819 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.94 vs. limit=15.0 +2024-07-28 16:07:40,475 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:07:50,042 INFO [train.py:1114] (2/4) Epoch 13, batch 10000, loss[loss=0.2128, simple_loss=0.2955, pruned_loss=0.06508, over 4669.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2791, pruned_loss=0.05107, over 927208.54 frames. ], batch size: 16, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 16:07:50,565 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.98 vs. limit=15.0 +2024-07-28 16:07:54,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176890.66666666666, ans=0.1 +2024-07-28 16:07:58,006 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-28 16:08:01,260 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.743e+01 6.303e+01 7.198e+01 1.105e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 16:08:16,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=176944.0, ans=0.025 +2024-07-28 16:08:21,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=176957.33333333334, ans=0.125 +2024-07-28 16:08:22,354 INFO [train.py:1114] (2/4) Epoch 13, batch 10050, loss[loss=0.2557, simple_loss=0.3306, pruned_loss=0.09043, over 3552.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.283, pruned_loss=0.05335, over 915983.59 frames. ], batch size: 36, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 16:08:32,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=176970.66666666666, ans=0.0 +2024-07-28 16:08:51,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=177010.66666666666, ans=0.125 +2024-07-28 16:08:55,886 INFO [train.py:1114] (2/4) Epoch 13, batch 10100, loss[loss=0.2642, simple_loss=0.3396, pruned_loss=0.09446, over 3159.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2884, pruned_loss=0.05835, over 863443.23 frames. ], batch size: 36, lr: 5.65e-03, grad_scale: 32.0 +2024-07-28 16:09:02,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=177037.33333333334, ans=0.125 +2024-07-28 16:09:05,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=177037.33333333334, ans=0.125 +2024-07-28 16:09:05,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=177037.33333333334, ans=0.0 +2024-07-28 16:09:07,530 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.274e+01 6.562e+01 7.156e+01 7.782e+01 1.093e+02, threshold=1.431e+02, percent-clipped=0.0 +2024-07-28 16:09:19,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=177064.0, ans=0.0 +2024-07-28 16:09:20,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=177064.0, ans=0.0 +2024-07-28 16:09:20,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.34 vs. limit=6.0 +2024-07-28 16:09:28,320 INFO [train.py:1114] (2/4) Epoch 13, batch 10150, loss[loss=0.2941, simple_loss=0.3547, pruned_loss=0.1167, over 3228.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2925, pruned_loss=0.06258, over 823118.44 frames. ], batch size: 35, lr: 5.65e-03, grad_scale: 32.0 +2024-07-28 16:09:31,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=177090.66666666666, ans=0.0 +2024-07-28 16:09:31,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177090.66666666666, ans=0.1 +2024-07-28 16:09:34,119 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.78 vs. limit=6.0 +2024-07-28 16:09:34,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.17 vs. limit=22.5 +2024-07-28 16:09:44,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=177117.33333333334, ans=0.125 +2024-07-28 16:10:15,116 INFO [train.py:1114] (2/4) Epoch 13, batch 10200, loss[loss=0.2477, simple_loss=0.3047, pruned_loss=0.09535, over 3249.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2961, pruned_loss=0.06619, over 790899.66 frames. ], batch size: 36, lr: 5.65e-03, grad_scale: 32.0 +2024-07-28 16:10:16,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177157.33333333334, ans=0.1 +2024-07-28 16:10:21,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=177170.66666666666, ans=0.125 +2024-07-28 16:10:26,155 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.992e+01 6.629e+01 7.003e+01 7.390e+01 9.064e+01, threshold=1.401e+02, percent-clipped=0.0 +2024-07-28 16:11:16,417 INFO [train.py:1114] (2/4) Epoch 14, batch 0, loss[loss=0.151, simple_loss=0.2455, pruned_loss=0.02828, over 4852.00 frames. ], tot_loss[loss=0.151, simple_loss=0.2455, pruned_loss=0.02828, over 4852.00 frames. ], batch size: 12, lr: 5.45e-03, grad_scale: 32.0 +2024-07-28 16:11:16,418 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 16:14:51,230 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.3091, 4.7039, 4.6293, 5.0869], device='cuda:2') +2024-07-28 16:14:55,767 INFO [train.py:1146] (2/4) Epoch 14, validation: loss=0.1673, simple_loss=0.2724, pruned_loss=0.03104, over 944034.00 frames. +2024-07-28 16:14:55,768 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 16:15:07,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=177186.66666666666, ans=0.0 +2024-07-28 16:15:07,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=177186.66666666666, ans=0.125 +2024-07-28 16:15:35,847 INFO [train.py:1114] (2/4) Epoch 14, batch 50, loss[loss=0.1623, simple_loss=0.2431, pruned_loss=0.0408, over 4607.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2732, pruned_loss=0.04616, over 206553.03 frames. ], batch size: 11, lr: 5.45e-03, grad_scale: 32.0 +2024-07-28 16:15:37,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=177253.33333333334, ans=0.125 +2024-07-28 16:15:45,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=177253.33333333334, ans=0.025 +2024-07-28 16:15:50,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=177266.66666666666, ans=0.0 +2024-07-28 16:15:51,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=177266.66666666666, ans=0.09899494936611666 +2024-07-28 16:15:52,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=177266.66666666666, ans=0.2 +2024-07-28 16:16:02,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=177280.0, ans=0.2 +2024-07-28 16:16:09,330 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.69 vs. limit=6.0 +2024-07-28 16:16:17,034 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.504e+01 5.430e+01 5.954e+01 6.690e+01 1.022e+02, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 16:16:19,823 INFO [train.py:1114] (2/4) Epoch 14, batch 100, loss[loss=0.1599, simple_loss=0.2458, pruned_loss=0.03704, over 4645.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2745, pruned_loss=0.0463, over 365514.31 frames. ], batch size: 12, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:16:21,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=177320.0, ans=0.125 +2024-07-28 16:16:23,517 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.83 vs. limit=22.5 +2024-07-28 16:16:29,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_na.min_abs, batch_count=177333.33333333334, ans=0.02 +2024-07-28 16:17:05,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=177360.0, ans=0.2 +2024-07-28 16:17:10,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=177373.33333333334, ans=0.125 +2024-07-28 16:17:13,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=177373.33333333334, ans=0.125 +2024-07-28 16:17:14,956 INFO [train.py:1114] (2/4) Epoch 14, batch 150, loss[loss=0.1684, simple_loss=0.2437, pruned_loss=0.04649, over 4616.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2724, pruned_loss=0.04553, over 494265.93 frames. ], batch size: 11, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:17:28,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=177400.0, ans=15.0 +2024-07-28 16:17:37,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=177426.66666666666, ans=0.125 +2024-07-28 16:17:37,748 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:17:45,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=177426.66666666666, ans=0.2 +2024-07-28 16:17:52,158 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.588e+01 5.424e+01 5.956e+01 7.040e+01 1.129e+02, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 16:17:57,482 INFO [train.py:1114] (2/4) Epoch 14, batch 200, loss[loss=0.2023, simple_loss=0.2989, pruned_loss=0.05282, over 4594.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2723, pruned_loss=0.04704, over 593764.81 frames. ], batch size: 21, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:18:31,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=177506.66666666666, ans=0.125 +2024-07-28 16:18:35,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=177506.66666666666, ans=0.125 +2024-07-28 16:18:37,134 INFO [train.py:1114] (2/4) Epoch 14, batch 250, loss[loss=0.1829, simple_loss=0.2712, pruned_loss=0.04724, over 4645.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.273, pruned_loss=0.04783, over 670410.27 frames. ], batch size: 16, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:18:41,672 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.16 vs. limit=6.0 +2024-07-28 16:18:55,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=177546.66666666666, ans=22.5 +2024-07-28 16:18:55,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.22 vs. limit=22.5 +2024-07-28 16:19:00,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=177560.0, ans=0.125 +2024-07-28 16:19:03,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=177573.33333333334, ans=0.125 +2024-07-28 16:19:08,673 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.684e+01 5.662e+01 6.232e+01 7.449e+01 1.133e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 16:19:11,395 INFO [train.py:1114] (2/4) Epoch 14, batch 300, loss[loss=0.2, simple_loss=0.3001, pruned_loss=0.04998, over 4802.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2737, pruned_loss=0.04791, over 730147.59 frames. ], batch size: 15, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:19:11,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177586.66666666666, ans=0.1 +2024-07-28 16:19:12,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=177586.66666666666, ans=0.125 +2024-07-28 16:19:31,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177613.33333333334, ans=0.1 +2024-07-28 16:19:37,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=177626.66666666666, ans=0.125 +2024-07-28 16:19:48,718 INFO [train.py:1114] (2/4) Epoch 14, batch 350, loss[loss=0.1539, simple_loss=0.2526, pruned_loss=0.02756, over 4921.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2746, pruned_loss=0.04756, over 776164.20 frames. ], batch size: 12, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:19:51,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=177653.33333333334, ans=0.125 +2024-07-28 16:19:53,734 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.48 vs. limit=15.0 +2024-07-28 16:20:34,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=177693.33333333334, ans=0.035 +2024-07-28 16:20:40,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=177706.66666666666, ans=0.1 +2024-07-28 16:20:40,508 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.07 vs. limit=12.0 +2024-07-28 16:20:44,600 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.533e+01 5.994e+01 6.845e+01 1.570e+02, threshold=1.199e+02, percent-clipped=1.0 +2024-07-28 16:20:47,257 INFO [train.py:1114] (2/4) Epoch 14, batch 400, loss[loss=0.1591, simple_loss=0.2497, pruned_loss=0.03422, over 4699.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2733, pruned_loss=0.04717, over 813347.03 frames. ], batch size: 13, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:20:57,107 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.37 vs. limit=6.0 +2024-07-28 16:20:57,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=177733.33333333334, ans=0.025 +2024-07-28 16:21:04,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=177746.66666666666, ans=0.125 +2024-07-28 16:21:06,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=177746.66666666666, ans=0.2 +2024-07-28 16:21:19,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177773.33333333334, ans=0.1 +2024-07-28 16:21:20,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=177773.33333333334, ans=0.125 +2024-07-28 16:21:22,189 INFO [train.py:1114] (2/4) Epoch 14, batch 450, loss[loss=0.182, simple_loss=0.2688, pruned_loss=0.04764, over 4636.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2729, pruned_loss=0.04712, over 838631.50 frames. ], batch size: 13, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:21:22,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff3.min_abs, batch_count=177786.66666666666, ans=0.2 +2024-07-28 16:21:28,708 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.58 vs. limit=15.0 +2024-07-28 16:21:49,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=177840.0, ans=0.1 +2024-07-28 16:21:52,674 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.533e+01 5.509e+01 6.147e+01 6.796e+01 9.434e+01, threshold=1.229e+02, percent-clipped=0.0 +2024-07-28 16:21:55,365 INFO [train.py:1114] (2/4) Epoch 14, batch 500, loss[loss=0.22, simple_loss=0.3067, pruned_loss=0.06667, over 4689.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2721, pruned_loss=0.04674, over 861126.63 frames. ], batch size: 15, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:22:14,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=177880.0, ans=0.125 +2024-07-28 16:22:23,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=177906.66666666666, ans=0.0 +2024-07-28 16:22:29,015 INFO [train.py:1114] (2/4) Epoch 14, batch 550, loss[loss=0.1959, simple_loss=0.295, pruned_loss=0.04836, over 4626.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2713, pruned_loss=0.04626, over 877086.16 frames. ], batch size: 17, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:22:29,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=177920.0, ans=0.125 +2024-07-28 16:22:37,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=177933.33333333334, ans=0.125 +2024-07-28 16:22:37,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=177933.33333333334, ans=0.2 +2024-07-28 16:22:37,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=177933.33333333334, ans=0.125 +2024-07-28 16:22:52,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=177960.0, ans=0.0 +2024-07-28 16:22:57,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=177973.33333333334, ans=0.04949747468305833 +2024-07-28 16:22:59,820 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+01 5.791e+01 6.101e+01 6.506e+01 8.521e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 16:23:02,687 INFO [train.py:1114] (2/4) Epoch 14, batch 600, loss[loss=0.2216, simple_loss=0.311, pruned_loss=0.06613, over 4619.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2717, pruned_loss=0.04633, over 891607.98 frames. ], batch size: 16, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:23:03,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=177986.66666666666, ans=0.0 +2024-07-28 16:23:05,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.17 vs. limit=12.0 +2024-07-28 16:23:06,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=177986.66666666666, ans=0.0 +2024-07-28 16:23:11,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=178000.0, ans=0.05 +2024-07-28 16:23:15,264 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.90 vs. limit=6.0 +2024-07-28 16:23:46,284 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:23:47,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=178040.0, ans=0.125 +2024-07-28 16:23:53,582 INFO [train.py:1114] (2/4) Epoch 14, batch 650, loss[loss=0.1776, simple_loss=0.2681, pruned_loss=0.04355, over 4760.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2716, pruned_loss=0.04683, over 903607.37 frames. ], batch size: 13, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:24:19,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=178093.33333333334, ans=10.0 +2024-07-28 16:24:24,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=178106.66666666666, ans=0.0 +2024-07-28 16:24:27,864 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.659e+01 6.159e+01 7.127e+01 1.309e+02, threshold=1.232e+02, percent-clipped=1.0 +2024-07-28 16:24:30,519 INFO [train.py:1114] (2/4) Epoch 14, batch 700, loss[loss=0.1757, simple_loss=0.2695, pruned_loss=0.04098, over 4638.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2724, pruned_loss=0.04702, over 911617.57 frames. ], batch size: 12, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:24:31,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=178120.0, ans=0.125 +2024-07-28 16:24:32,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=178120.0, ans=0.0 +2024-07-28 16:24:33,003 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.09 vs. limit=15.0 +2024-07-28 16:24:45,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=178146.66666666666, ans=0.2 +2024-07-28 16:24:47,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=178146.66666666666, ans=0.1 +2024-07-28 16:24:48,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=178146.66666666666, ans=0.0 +2024-07-28 16:26:48,787 INFO [train.py:1114] (2/4) Epoch 14, batch 750, loss[loss=0.1819, simple_loss=0.2678, pruned_loss=0.04799, over 4687.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2717, pruned_loss=0.0463, over 918282.23 frames. ], batch size: 13, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:26:50,564 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.91 vs. limit=22.5 +2024-07-28 16:26:53,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=178186.66666666666, ans=0.1 +2024-07-28 16:26:54,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=178186.66666666666, ans=0.025 +2024-07-28 16:26:56,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=178200.0, ans=0.125 +2024-07-28 16:26:58,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=178200.0, ans=0.125 +2024-07-28 16:27:18,009 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.80 vs. limit=22.5 +2024-07-28 16:27:18,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=178240.0, ans=0.125 +2024-07-28 16:27:20,241 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.515e+01 6.007e+01 6.700e+01 1.144e+02, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 16:27:22,822 INFO [train.py:1114] (2/4) Epoch 14, batch 800, loss[loss=0.1865, simple_loss=0.2765, pruned_loss=0.04828, over 4854.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.272, pruned_loss=0.04667, over 923433.85 frames. ], batch size: 12, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:27:43,185 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.85 vs. limit=10.0 +2024-07-28 16:27:50,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=178306.66666666666, ans=0.125 +2024-07-28 16:27:56,262 INFO [train.py:1114] (2/4) Epoch 14, batch 850, loss[loss=0.1628, simple_loss=0.2572, pruned_loss=0.03422, over 4658.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2721, pruned_loss=0.0471, over 927871.62 frames. ], batch size: 14, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:27:59,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=178320.0, ans=0.07 +2024-07-28 16:28:02,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=178333.33333333334, ans=0.125 +2024-07-28 16:28:13,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=178346.66666666666, ans=0.0 +2024-07-28 16:28:18,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=178360.0, ans=0.125 +2024-07-28 16:28:20,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178360.0, ans=0.1 +2024-07-28 16:28:27,737 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.266e+01 5.522e+01 6.221e+01 7.119e+01 8.769e+01, threshold=1.244e+02, percent-clipped=0.0 +2024-07-28 16:28:30,549 INFO [train.py:1114] (2/4) Epoch 14, batch 900, loss[loss=0.1691, simple_loss=0.2612, pruned_loss=0.03855, over 4850.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2733, pruned_loss=0.04801, over 928450.05 frames. ], batch size: 12, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:28:30,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=178386.66666666666, ans=0.125 +2024-07-28 16:28:31,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=178386.66666666666, ans=0.0 +2024-07-28 16:28:45,836 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.63 vs. limit=15.0 +2024-07-28 16:29:01,257 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.13 vs. limit=15.0 +2024-07-28 16:29:03,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=178440.0, ans=0.125 +2024-07-28 16:29:03,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=178440.0, ans=0.125 +2024-07-28 16:29:03,643 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=178440.0, ans=0.125 +2024-07-28 16:29:04,397 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=178440.0, ans=0.0 +2024-07-28 16:29:05,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=178453.33333333334, ans=0.125 +2024-07-28 16:29:06,310 INFO [train.py:1114] (2/4) Epoch 14, batch 950, loss[loss=0.165, simple_loss=0.2666, pruned_loss=0.03171, over 4784.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2736, pruned_loss=0.04785, over 930410.56 frames. ], batch size: 12, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:29:11,485 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.02 vs. limit=22.5 +2024-07-28 16:29:16,265 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.54 vs. limit=12.0 +2024-07-28 16:29:24,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=178480.0, ans=0.0 +2024-07-28 16:29:30,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=178493.33333333334, ans=0.0 +2024-07-28 16:29:37,160 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.450e+01 5.573e+01 6.236e+01 6.880e+01 1.050e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 16:29:38,641 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:29:39,833 INFO [train.py:1114] (2/4) Epoch 14, batch 1000, loss[loss=0.1736, simple_loss=0.2711, pruned_loss=0.03802, over 4959.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2744, pruned_loss=0.04847, over 930184.46 frames. ], batch size: 13, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:29:42,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=178520.0, ans=0.125 +2024-07-28 16:29:42,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=178520.0, ans=0.0 +2024-07-28 16:30:01,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn2.whiten.whitening_limit, batch_count=178560.0, ans=22.5 +2024-07-28 16:30:03,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=178560.0, ans=0.025 +2024-07-28 16:30:05,362 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:30:10,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2.whitening_limit, batch_count=178573.33333333334, ans=15.0 +2024-07-28 16:30:16,064 INFO [train.py:1114] (2/4) Epoch 14, batch 1050, loss[loss=0.2039, simple_loss=0.2939, pruned_loss=0.05696, over 4874.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.274, pruned_loss=0.04829, over 932482.59 frames. ], batch size: 14, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:30:25,055 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.68 vs. limit=10.0 +2024-07-28 16:30:28,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=178600.0, ans=0.0 +2024-07-28 16:30:34,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=178613.33333333334, ans=0.1 +2024-07-28 16:30:38,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=178626.66666666666, ans=0.125 +2024-07-28 16:30:39,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=178626.66666666666, ans=0.1 +2024-07-28 16:30:48,607 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.67 vs. limit=15.0 +2024-07-28 16:30:48,750 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.456e+01 5.778e+01 6.259e+01 7.627e+01 1.146e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 16:30:53,961 INFO [train.py:1114] (2/4) Epoch 14, batch 1100, loss[loss=0.1914, simple_loss=0.2781, pruned_loss=0.05232, over 4891.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.274, pruned_loss=0.04856, over 934985.18 frames. ], batch size: 13, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:30:59,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=178653.33333333334, ans=0.0 +2024-07-28 16:31:12,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=178680.0, ans=0.0 +2024-07-28 16:31:26,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=178706.66666666666, ans=0.125 +2024-07-28 16:31:29,940 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.96 vs. limit=12.0 +2024-07-28 16:31:31,674 INFO [train.py:1114] (2/4) Epoch 14, batch 1150, loss[loss=0.2002, simple_loss=0.2906, pruned_loss=0.05488, over 4911.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2744, pruned_loss=0.04896, over 934615.20 frames. ], batch size: 13, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:31:34,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=178720.0, ans=0.125 +2024-07-28 16:31:40,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=178733.33333333334, ans=0.0 +2024-07-28 16:31:43,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=178733.33333333334, ans=0.125 +2024-07-28 16:31:44,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=178733.33333333334, ans=0.0 +2024-07-28 16:31:49,717 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.03 vs. limit=15.0 +2024-07-28 16:31:50,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=178746.66666666666, ans=0.2 +2024-07-28 16:31:50,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=178746.66666666666, ans=0.04949747468305833 +2024-07-28 16:31:55,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=178760.0, ans=0.125 +2024-07-28 16:32:02,873 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+01 5.551e+01 6.026e+01 6.659e+01 1.121e+02, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 16:32:05,593 INFO [train.py:1114] (2/4) Epoch 14, batch 1200, loss[loss=0.1896, simple_loss=0.2807, pruned_loss=0.04922, over 4875.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.276, pruned_loss=0.04934, over 934083.22 frames. ], batch size: 14, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:32:19,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=178813.33333333334, ans=0.125 +2024-07-28 16:32:22,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=178813.33333333334, ans=0.0 +2024-07-28 16:32:32,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=178840.0, ans=0.125 +2024-07-28 16:32:33,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=178840.0, ans=0.0 +2024-07-28 16:32:38,591 INFO [train.py:1114] (2/4) Epoch 14, batch 1250, loss[loss=0.1723, simple_loss=0.2635, pruned_loss=0.0405, over 4801.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2758, pruned_loss=0.04862, over 937849.14 frames. ], batch size: 15, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:32:42,357 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.73 vs. limit=15.0 +2024-07-28 16:32:47,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=178866.66666666666, ans=0.125 +2024-07-28 16:32:49,392 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.06 vs. limit=10.0 +2024-07-28 16:32:52,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=178880.0, ans=0.125 +2024-07-28 16:33:09,551 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.792e+01 5.596e+01 6.109e+01 6.927e+01 8.665e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 16:33:10,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=178906.66666666666, ans=0.0 +2024-07-28 16:33:12,230 INFO [train.py:1114] (2/4) Epoch 14, batch 1300, loss[loss=0.185, simple_loss=0.2632, pruned_loss=0.05337, over 4719.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2752, pruned_loss=0.04814, over 938967.45 frames. ], batch size: 19, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:33:13,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=178920.0, ans=0.125 +2024-07-28 16:33:15,581 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=2.91 vs. limit=12.0 +2024-07-28 16:33:16,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=178920.0, ans=0.0 +2024-07-28 16:33:19,205 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.86 vs. limit=15.0 +2024-07-28 16:33:20,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=178933.33333333334, ans=0.2 +2024-07-28 16:33:26,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=178946.66666666666, ans=0.0 +2024-07-28 16:33:26,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=178946.66666666666, ans=0.125 +2024-07-28 16:33:26,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178946.66666666666, ans=0.1 +2024-07-28 16:33:32,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=178960.0, ans=0.025 +2024-07-28 16:33:32,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=178960.0, ans=0.0 +2024-07-28 16:33:44,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=178986.66666666666, ans=0.0 +2024-07-28 16:33:45,452 INFO [train.py:1114] (2/4) Epoch 14, batch 1350, loss[loss=0.1671, simple_loss=0.2567, pruned_loss=0.03878, over 4762.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2735, pruned_loss=0.04718, over 940993.67 frames. ], batch size: 13, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:33:49,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=178986.66666666666, ans=0.0 +2024-07-28 16:34:00,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=179013.33333333334, ans=0.025 +2024-07-28 16:34:05,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=179026.66666666666, ans=0.0 +2024-07-28 16:34:13,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=179040.0, ans=0.125 +2024-07-28 16:34:18,362 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.737e+01 5.767e+01 6.518e+01 7.803e+01 1.206e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 16:34:21,085 INFO [train.py:1114] (2/4) Epoch 14, batch 1400, loss[loss=0.1459, simple_loss=0.2366, pruned_loss=0.0276, over 4719.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2731, pruned_loss=0.04734, over 942980.52 frames. ], batch size: 11, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:34:22,837 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.13 vs. limit=22.5 +2024-07-28 16:34:28,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=179066.66666666666, ans=0.0 +2024-07-28 16:34:29,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.39 vs. limit=10.0 +2024-07-28 16:34:33,380 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.88 vs. limit=15.0 +2024-07-28 16:34:46,012 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.20 vs. limit=22.5 +2024-07-28 16:34:54,883 INFO [train.py:1114] (2/4) Epoch 14, batch 1450, loss[loss=0.2083, simple_loss=0.2988, pruned_loss=0.05885, over 4696.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2726, pruned_loss=0.04698, over 942871.30 frames. ], batch size: 15, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:34:56,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=179120.0, ans=0.125 +2024-07-28 16:34:59,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=179120.0, ans=0.2 +2024-07-28 16:35:04,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=179133.33333333334, ans=0.125 +2024-07-28 16:35:18,572 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.37 vs. limit=15.0 +2024-07-28 16:35:18,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179160.0, ans=0.1 +2024-07-28 16:35:25,328 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.281e+01 5.717e+01 6.158e+01 6.700e+01 8.649e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 16:35:28,157 INFO [train.py:1114] (2/4) Epoch 14, batch 1500, loss[loss=0.1943, simple_loss=0.2896, pruned_loss=0.04945, over 4803.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2744, pruned_loss=0.04774, over 942568.13 frames. ], batch size: 14, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:35:33,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=179186.66666666666, ans=0.2 +2024-07-28 16:35:36,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=179200.0, ans=0.125 +2024-07-28 16:35:37,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=179200.0, ans=0.0 +2024-07-28 16:35:40,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=179200.0, ans=0.125 +2024-07-28 16:35:47,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=179213.33333333334, ans=0.125 +2024-07-28 16:35:49,819 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.36 vs. limit=15.0 +2024-07-28 16:35:53,819 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-28 16:35:58,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179240.0, ans=0.1 +2024-07-28 16:36:02,054 INFO [train.py:1114] (2/4) Epoch 14, batch 1550, loss[loss=0.1787, simple_loss=0.2728, pruned_loss=0.04233, over 4903.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2758, pruned_loss=0.04859, over 939472.24 frames. ], batch size: 15, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:36:03,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=179253.33333333334, ans=0.025 +2024-07-28 16:36:12,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=179266.66666666666, ans=0.125 +2024-07-28 16:36:15,837 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.51 vs. limit=15.0 +2024-07-28 16:36:24,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=179293.33333333334, ans=0.07 +2024-07-28 16:36:33,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=179306.66666666666, ans=0.1 +2024-07-28 16:36:34,738 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.776e+01 5.675e+01 6.307e+01 6.776e+01 1.138e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 16:36:35,809 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.11 vs. limit=15.0 +2024-07-28 16:36:39,295 INFO [train.py:1114] (2/4) Epoch 14, batch 1600, loss[loss=0.1842, simple_loss=0.2861, pruned_loss=0.04119, over 4878.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.275, pruned_loss=0.04842, over 938545.56 frames. ], batch size: 14, lr: 5.41e-03, grad_scale: 32.0 +2024-07-28 16:36:43,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=179320.0, ans=0.125 +2024-07-28 16:37:02,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=179360.0, ans=0.2 +2024-07-28 16:37:14,297 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.77 vs. limit=22.5 +2024-07-28 16:37:14,611 INFO [train.py:1114] (2/4) Epoch 14, batch 1650, loss[loss=0.2136, simple_loss=0.3097, pruned_loss=0.05878, over 4660.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2744, pruned_loss=0.04807, over 937780.31 frames. ], batch size: 14, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:37:32,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=179413.33333333334, ans=0.025 +2024-07-28 16:37:45,308 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.393e+01 5.723e+01 6.070e+01 6.636e+01 1.142e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 16:37:48,015 INFO [train.py:1114] (2/4) Epoch 14, batch 1700, loss[loss=0.1949, simple_loss=0.2733, pruned_loss=0.05818, over 4709.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2747, pruned_loss=0.04829, over 939525.10 frames. ], batch size: 11, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:37:55,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=179466.66666666666, ans=0.0 +2024-07-28 16:38:00,371 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.98 vs. limit=12.0 +2024-07-28 16:38:04,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=179480.0, ans=0.125 +2024-07-28 16:38:12,313 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.61 vs. limit=12.0 +2024-07-28 16:38:12,871 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:38:21,301 INFO [train.py:1114] (2/4) Epoch 14, batch 1750, loss[loss=0.1734, simple_loss=0.2535, pruned_loss=0.04669, over 4802.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2741, pruned_loss=0.04843, over 940217.90 frames. ], batch size: 11, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:38:25,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=179520.0, ans=0.125 +2024-07-28 16:38:26,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=179520.0, ans=0.125 +2024-07-28 16:38:30,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=179533.33333333334, ans=0.125 +2024-07-28 16:38:33,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=179533.33333333334, ans=0.0 +2024-07-28 16:38:49,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=179573.33333333334, ans=0.2 +2024-07-28 16:38:52,732 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.429e+01 5.501e+01 6.182e+01 7.069e+01 1.179e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 16:38:55,450 INFO [train.py:1114] (2/4) Epoch 14, batch 1800, loss[loss=0.1811, simple_loss=0.2636, pruned_loss=0.04933, over 4635.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.274, pruned_loss=0.04828, over 940604.53 frames. ], batch size: 13, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:39:14,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=179613.33333333334, ans=0.0 +2024-07-28 16:39:26,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=179640.0, ans=0.1 +2024-07-28 16:39:29,077 INFO [train.py:1114] (2/4) Epoch 14, batch 1850, loss[loss=0.1869, simple_loss=0.2719, pruned_loss=0.05099, over 4820.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2757, pruned_loss=0.04905, over 940574.30 frames. ], batch size: 14, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:39:33,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179653.33333333334, ans=0.1 +2024-07-28 16:39:34,289 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.32 vs. limit=15.0 +2024-07-28 16:39:41,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=179666.66666666666, ans=0.025 +2024-07-28 16:39:41,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=179666.66666666666, ans=0.125 +2024-07-28 16:39:44,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=179680.0, ans=0.125 +2024-07-28 16:39:53,710 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.69 vs. limit=22.5 +2024-07-28 16:40:01,785 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.632e+01 6.282e+01 7.683e+01 1.282e+02, threshold=1.256e+02, percent-clipped=1.0 +2024-07-28 16:42:23,066 INFO [train.py:1114] (2/4) Epoch 14, batch 1900, loss[loss=0.1876, simple_loss=0.2788, pruned_loss=0.04819, over 4662.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2756, pruned_loss=0.04864, over 941720.42 frames. ], batch size: 14, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:42:57,505 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.54 vs. limit=15.0 +2024-07-28 16:43:19,252 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.82 vs. limit=15.0 +2024-07-28 16:43:22,169 INFO [train.py:1114] (2/4) Epoch 14, batch 1950, loss[loss=0.1618, simple_loss=0.2408, pruned_loss=0.04141, over 4896.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2773, pruned_loss=0.04915, over 943533.25 frames. ], batch size: 13, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:43:49,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=179840.0, ans=15.0 +2024-07-28 16:43:52,887 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.413e+01 5.659e+01 6.185e+01 6.876e+01 9.171e+01, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 16:43:53,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=179840.0, ans=0.125 +2024-07-28 16:43:53,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=179840.0, ans=0.125 +2024-07-28 16:43:55,618 INFO [train.py:1114] (2/4) Epoch 14, batch 2000, loss[loss=0.1756, simple_loss=0.2544, pruned_loss=0.04842, over 4795.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2771, pruned_loss=0.04881, over 941018.46 frames. ], batch size: 11, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:44:10,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=179880.0, ans=0.125 +2024-07-28 16:44:18,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=179893.33333333334, ans=0.04949747468305833 +2024-07-28 16:44:28,718 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.20 vs. limit=6.0 +2024-07-28 16:44:35,738 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.17 vs. limit=15.0 +2024-07-28 16:44:39,839 INFO [train.py:1114] (2/4) Epoch 14, batch 2050, loss[loss=0.1755, simple_loss=0.2505, pruned_loss=0.05023, over 4616.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2751, pruned_loss=0.04833, over 939021.07 frames. ], batch size: 11, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:45:07,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179960.0, ans=0.1 +2024-07-28 16:45:14,161 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.388e+01 5.698e+01 6.586e+01 7.906e+01 1.162e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-28 16:45:16,944 INFO [train.py:1114] (2/4) Epoch 14, batch 2100, loss[loss=0.1711, simple_loss=0.2668, pruned_loss=0.03767, over 4754.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2739, pruned_loss=0.04779, over 940932.49 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:45:34,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=180013.33333333334, ans=0.0 +2024-07-28 16:45:35,612 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.38 vs. limit=15.0 +2024-07-28 16:45:38,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=180026.66666666666, ans=0.125 +2024-07-28 16:45:49,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=180040.0, ans=0.95 +2024-07-28 16:45:50,471 INFO [train.py:1114] (2/4) Epoch 14, batch 2150, loss[loss=0.1523, simple_loss=0.2424, pruned_loss=0.03108, over 4895.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2732, pruned_loss=0.04757, over 944172.20 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:45:52,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=180053.33333333334, ans=0.2 +2024-07-28 16:45:58,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=180066.66666666666, ans=0.1 +2024-07-28 16:46:05,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=180080.0, ans=0.0 +2024-07-28 16:46:08,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=180080.0, ans=0.1 +2024-07-28 16:46:13,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=180093.33333333334, ans=0.0 +2024-07-28 16:46:22,337 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.354e+01 5.458e+01 6.111e+01 6.832e+01 1.017e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 16:46:36,559 INFO [train.py:1114] (2/4) Epoch 14, batch 2200, loss[loss=0.199, simple_loss=0.2924, pruned_loss=0.05285, over 4809.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2733, pruned_loss=0.04763, over 943567.54 frames. ], batch size: 14, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:46:36,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=180120.0, ans=0.125 +2024-07-28 16:46:48,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=180120.0, ans=0.125 +2024-07-28 16:46:50,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=180133.33333333334, ans=0.125 +2024-07-28 16:46:54,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.20 vs. limit=8.0 +2024-07-28 16:46:59,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=180146.66666666666, ans=0.125 +2024-07-28 16:47:09,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=180160.0, ans=0.125 +2024-07-28 16:47:16,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=180173.33333333334, ans=0.125 +2024-07-28 16:47:16,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=180173.33333333334, ans=0.125 +2024-07-28 16:47:17,909 INFO [train.py:1114] (2/4) Epoch 14, batch 2250, loss[loss=0.1998, simple_loss=0.2955, pruned_loss=0.05204, over 4697.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2732, pruned_loss=0.04747, over 941808.46 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:47:39,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=180213.33333333334, ans=0.125 +2024-07-28 16:47:43,064 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:47:45,826 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:47:54,627 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:47:57,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=180240.0, ans=0.125 +2024-07-28 16:48:09,496 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.321e+01 5.481e+01 6.034e+01 6.798e+01 1.360e+02, threshold=1.207e+02, percent-clipped=1.0 +2024-07-28 16:48:20,576 INFO [train.py:1114] (2/4) Epoch 14, batch 2300, loss[loss=0.1537, simple_loss=0.2363, pruned_loss=0.03548, over 4936.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2721, pruned_loss=0.04727, over 939587.65 frames. ], batch size: 12, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:49:20,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=180253.33333333334, ans=15.0 +2024-07-28 16:49:24,960 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:49:27,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=180266.66666666666, ans=0.125 +2024-07-28 16:49:39,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=180293.33333333334, ans=0.125 +2024-07-28 16:49:46,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=180306.66666666666, ans=0.125 +2024-07-28 16:49:54,917 INFO [train.py:1114] (2/4) Epoch 14, batch 2350, loss[loss=0.1936, simple_loss=0.2801, pruned_loss=0.0536, over 4634.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2723, pruned_loss=0.0473, over 941442.35 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:49:54,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=180320.0, ans=0.125 +2024-07-28 16:50:00,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=180320.0, ans=0.0 +2024-07-28 16:50:04,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=180333.33333333334, ans=0.07 +2024-07-28 16:50:07,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=180346.66666666666, ans=0.125 +2024-07-28 16:50:07,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=180346.66666666666, ans=0.125 +2024-07-28 16:50:12,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=180346.66666666666, ans=0.0 +2024-07-28 16:50:12,586 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:50:18,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=180360.0, ans=0.125 +2024-07-28 16:50:22,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=180373.33333333334, ans=0.125 +2024-07-28 16:50:26,415 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.883e+01 5.752e+01 6.311e+01 7.505e+01 9.885e+01, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 16:50:29,053 INFO [train.py:1114] (2/4) Epoch 14, batch 2400, loss[loss=0.149, simple_loss=0.239, pruned_loss=0.02948, over 4635.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2725, pruned_loss=0.04731, over 941083.11 frames. ], batch size: 12, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:50:44,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=180413.33333333334, ans=0.0 +2024-07-28 16:51:01,959 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.58 vs. limit=15.0 +2024-07-28 16:51:03,436 INFO [train.py:1114] (2/4) Epoch 14, batch 2450, loss[loss=0.1783, simple_loss=0.2708, pruned_loss=0.04293, over 4698.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2742, pruned_loss=0.04828, over 936325.07 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:51:04,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=180453.33333333334, ans=0.05 +2024-07-28 16:51:09,211 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.87 vs. limit=6.0 +2024-07-28 16:51:22,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=180480.0, ans=0.0 +2024-07-28 16:51:31,868 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.63 vs. limit=15.0 +2024-07-28 16:51:34,062 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.381e+01 5.494e+01 6.004e+01 6.734e+01 1.227e+02, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 16:51:36,692 INFO [train.py:1114] (2/4) Epoch 14, batch 2500, loss[loss=0.1885, simple_loss=0.2752, pruned_loss=0.05094, over 4809.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2741, pruned_loss=0.04811, over 938359.25 frames. ], batch size: 14, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:51:38,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=180520.0, ans=0.07 +2024-07-28 16:51:57,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.08 vs. limit=22.5 +2024-07-28 16:52:08,551 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.91 vs. limit=10.0 +2024-07-28 16:52:11,137 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.57 vs. limit=15.0 +2024-07-28 16:52:16,714 INFO [train.py:1114] (2/4) Epoch 14, batch 2550, loss[loss=0.1373, simple_loss=0.2255, pruned_loss=0.02453, over 4798.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2739, pruned_loss=0.04771, over 937863.29 frames. ], batch size: 11, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:52:29,998 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.56 vs. limit=6.0 +2024-07-28 16:52:30,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=180600.0, ans=0.05 +2024-07-28 16:52:33,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=180613.33333333334, ans=0.07 +2024-07-28 16:52:38,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=180626.66666666666, ans=0.0 +2024-07-28 16:52:48,649 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.421e+01 5.545e+01 6.227e+01 6.776e+01 1.046e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 16:52:51,360 INFO [train.py:1114] (2/4) Epoch 14, batch 2600, loss[loss=0.146, simple_loss=0.2438, pruned_loss=0.02415, over 4905.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2738, pruned_loss=0.04753, over 937564.40 frames. ], batch size: 13, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:52:58,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=180666.66666666666, ans=0.09899494936611666 +2024-07-28 16:53:01,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=180666.66666666666, ans=0.125 +2024-07-28 16:53:09,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180680.0, ans=0.1 +2024-07-28 16:53:09,883 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.46 vs. limit=15.0 +2024-07-28 16:53:16,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=180693.33333333334, ans=0.125 +2024-07-28 16:53:19,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=180706.66666666666, ans=0.2 +2024-07-28 16:53:25,247 INFO [train.py:1114] (2/4) Epoch 14, batch 2650, loss[loss=0.2026, simple_loss=0.2911, pruned_loss=0.05704, over 4640.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2748, pruned_loss=0.04794, over 939691.40 frames. ], batch size: 16, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:53:26,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=180720.0, ans=0.125 +2024-07-28 16:53:31,042 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.46 vs. limit=15.0 +2024-07-28 16:53:48,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=180760.0, ans=0.125 +2024-07-28 16:53:48,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=180760.0, ans=0.125 +2024-07-28 16:53:56,288 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.582e+01 5.983e+01 6.716e+01 1.150e+02, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 16:53:58,987 INFO [train.py:1114] (2/4) Epoch 14, batch 2700, loss[loss=0.2068, simple_loss=0.3031, pruned_loss=0.05528, over 4739.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2747, pruned_loss=0.04799, over 939631.99 frames. ], batch size: 14, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:54:02,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=180786.66666666666, ans=15.0 +2024-07-28 16:54:03,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=180786.66666666666, ans=0.1 +2024-07-28 16:54:03,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=180786.66666666666, ans=0.0 +2024-07-28 16:54:26,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=180840.0, ans=0.0 +2024-07-28 16:54:31,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=180840.0, ans=0.0 +2024-07-28 16:54:32,673 INFO [train.py:1114] (2/4) Epoch 14, batch 2750, loss[loss=0.1963, simple_loss=0.2838, pruned_loss=0.0544, over 4700.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2735, pruned_loss=0.04763, over 939616.70 frames. ], batch size: 12, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:54:38,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=180853.33333333334, ans=0.0 +2024-07-28 16:54:42,884 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.83 vs. limit=15.0 +2024-07-28 16:54:52,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=180893.33333333334, ans=0.2 +2024-07-28 16:54:57,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=180893.33333333334, ans=0.05 +2024-07-28 16:55:03,429 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.266e+01 5.739e+01 6.573e+01 7.646e+01 1.098e+02, threshold=1.315e+02, percent-clipped=0.0 +2024-07-28 16:55:06,170 INFO [train.py:1114] (2/4) Epoch 14, batch 2800, loss[loss=0.2729, simple_loss=0.3386, pruned_loss=0.1036, over 3515.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2741, pruned_loss=0.04807, over 938548.71 frames. ], batch size: 37, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:55:15,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=180933.33333333334, ans=0.125 +2024-07-28 16:55:19,643 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:55:30,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=180960.0, ans=0.125 +2024-07-28 16:55:37,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=180973.33333333334, ans=0.125 +2024-07-28 16:55:42,101 INFO [train.py:1114] (2/4) Epoch 14, batch 2850, loss[loss=0.1768, simple_loss=0.2695, pruned_loss=0.0421, over 4966.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2745, pruned_loss=0.04839, over 936827.77 frames. ], batch size: 13, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:55:42,580 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.91 vs. limit=15.0 +2024-07-28 16:56:04,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=181013.33333333334, ans=0.5 +2024-07-28 16:56:04,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=181026.66666666666, ans=0.125 +2024-07-28 16:56:15,625 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.493e+01 5.822e+01 6.351e+01 7.357e+01 1.031e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 16:56:16,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=181040.0, ans=0.1 +2024-07-28 16:56:17,183 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.84 vs. limit=15.0 +2024-07-28 16:56:18,127 INFO [train.py:1114] (2/4) Epoch 14, batch 2900, loss[loss=0.1643, simple_loss=0.2533, pruned_loss=0.03767, over 4831.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2748, pruned_loss=0.04798, over 940495.03 frames. ], batch size: 13, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:56:31,090 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.46 vs. limit=15.0 +2024-07-28 16:56:39,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=181080.0, ans=0.125 +2024-07-28 16:56:57,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181093.33333333334, ans=0.1 +2024-07-28 16:57:04,976 INFO [train.py:1114] (2/4) Epoch 14, batch 2950, loss[loss=0.1612, simple_loss=0.2466, pruned_loss=0.03791, over 4709.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2738, pruned_loss=0.04836, over 939503.18 frames. ], batch size: 12, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:57:16,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=181120.0, ans=0.125 +2024-07-28 16:57:20,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=181133.33333333334, ans=0.2 +2024-07-28 16:57:26,560 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.72 vs. limit=15.0 +2024-07-28 16:57:27,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=181133.33333333334, ans=0.125 +2024-07-28 16:57:27,798 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.65 vs. limit=15.0 +2024-07-28 16:57:30,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=181146.66666666666, ans=0.125 +2024-07-28 16:57:55,639 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.382e+01 5.574e+01 6.305e+01 7.129e+01 1.096e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 16:57:58,336 INFO [train.py:1114] (2/4) Epoch 14, batch 3000, loss[loss=0.1868, simple_loss=0.2749, pruned_loss=0.04938, over 4762.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2728, pruned_loss=0.04777, over 938995.43 frames. ], batch size: 13, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:57:58,336 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 16:58:17,164 INFO [train.py:1146] (2/4) Epoch 14, validation: loss=0.1652, simple_loss=0.2685, pruned_loss=0.03098, over 944034.00 frames. +2024-07-28 16:58:17,165 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 16:58:25,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.52 vs. limit=6.0 +2024-07-28 16:58:35,612 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-07-28 16:58:41,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=181226.66666666666, ans=0.2 +2024-07-28 16:59:00,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=181240.0, ans=0.2 +2024-07-28 16:59:04,090 INFO [train.py:1114] (2/4) Epoch 14, batch 3050, loss[loss=0.1939, simple_loss=0.2755, pruned_loss=0.05615, over 4645.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2735, pruned_loss=0.04815, over 937642.49 frames. ], batch size: 12, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:59:07,007 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-28 16:59:07,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=181253.33333333334, ans=0.125 +2024-07-28 16:59:08,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=181253.33333333334, ans=0.125 +2024-07-28 16:59:13,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=181266.66666666666, ans=0.1 +2024-07-28 16:59:23,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=181280.0, ans=0.0 +2024-07-28 16:59:33,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=181293.33333333334, ans=0.2 +2024-07-28 16:59:41,680 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.769e+01 5.765e+01 6.488e+01 7.325e+01 1.172e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-28 16:59:48,315 INFO [train.py:1114] (2/4) Epoch 14, batch 3100, loss[loss=0.2479, simple_loss=0.3317, pruned_loss=0.08204, over 4641.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2729, pruned_loss=0.04792, over 938279.76 frames. ], batch size: 16, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:00:46,112 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.24 vs. limit=12.0 +2024-07-28 17:01:07,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=181373.33333333334, ans=0.125 +2024-07-28 17:01:10,571 INFO [train.py:1114] (2/4) Epoch 14, batch 3150, loss[loss=0.1989, simple_loss=0.2908, pruned_loss=0.05351, over 4856.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2734, pruned_loss=0.04793, over 938166.84 frames. ], batch size: 18, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:01:13,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=181386.66666666666, ans=0.0 +2024-07-28 17:01:34,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=181413.33333333334, ans=0.125 +2024-07-28 17:01:47,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=181440.0, ans=0.125 +2024-07-28 17:01:50,906 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.644e+01 5.747e+01 6.201e+01 6.953e+01 1.061e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 17:01:53,635 INFO [train.py:1114] (2/4) Epoch 14, batch 3200, loss[loss=0.1819, simple_loss=0.2747, pruned_loss=0.04451, over 4823.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2723, pruned_loss=0.04711, over 939878.09 frames. ], batch size: 13, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:01:53,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=181453.33333333334, ans=0.125 +2024-07-28 17:01:57,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=181453.33333333334, ans=0.0 +2024-07-28 17:02:06,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=181480.0, ans=0.0 +2024-07-28 17:02:07,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=181480.0, ans=0.025 +2024-07-28 17:02:13,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=181493.33333333334, ans=0.04949747468305833 +2024-07-28 17:02:14,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=181493.33333333334, ans=0.125 +2024-07-28 17:02:15,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=181493.33333333334, ans=0.0 +2024-07-28 17:02:28,097 INFO [train.py:1114] (2/4) Epoch 14, batch 3250, loss[loss=0.1979, simple_loss=0.2812, pruned_loss=0.05728, over 4939.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2737, pruned_loss=0.04769, over 941214.31 frames. ], batch size: 14, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:02:34,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=181520.0, ans=0.025 +2024-07-28 17:02:38,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=181533.33333333334, ans=0.0 +2024-07-28 17:02:51,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=181546.66666666666, ans=0.0 +2024-07-28 17:03:16,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=181573.33333333334, ans=0.125 +2024-07-28 17:03:17,416 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.388e+01 5.561e+01 6.069e+01 6.754e+01 1.054e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 17:03:19,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=181586.66666666666, ans=0.125 +2024-07-28 17:04:11,093 INFO [train.py:1114] (2/4) Epoch 14, batch 3300, loss[loss=0.2242, simple_loss=0.309, pruned_loss=0.06966, over 4698.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2738, pruned_loss=0.0484, over 941530.06 frames. ], batch size: 19, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:04:29,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=181600.0, ans=0.0 +2024-07-28 17:04:33,197 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.41 vs. limit=15.0 +2024-07-28 17:04:33,442 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:04:34,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=181613.33333333334, ans=0.0 +2024-07-28 17:04:34,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=181613.33333333334, ans=0.1 +2024-07-28 17:04:40,992 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.47 vs. limit=15.0 +2024-07-28 17:04:46,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=181640.0, ans=0.125 +2024-07-28 17:04:50,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=181640.0, ans=0.0 +2024-07-28 17:04:54,332 INFO [train.py:1114] (2/4) Epoch 14, batch 3350, loss[loss=0.2229, simple_loss=0.3248, pruned_loss=0.06046, over 4619.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2745, pruned_loss=0.04835, over 939172.90 frames. ], batch size: 17, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:04:59,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=181653.33333333334, ans=0.1 +2024-07-28 17:05:08,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=181680.0, ans=0.025 +2024-07-28 17:05:11,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=181680.0, ans=0.09899494936611666 +2024-07-28 17:05:19,398 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.99 vs. limit=15.0 +2024-07-28 17:05:25,517 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.23 vs. limit=12.0 +2024-07-28 17:05:25,876 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.710e+01 5.702e+01 6.286e+01 7.207e+01 1.084e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 17:05:29,886 INFO [train.py:1114] (2/4) Epoch 14, batch 3400, loss[loss=0.1894, simple_loss=0.2594, pruned_loss=0.05971, over 4811.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2744, pruned_loss=0.04899, over 937744.81 frames. ], batch size: 11, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:05:45,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=181746.66666666666, ans=0.0 +2024-07-28 17:05:49,898 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.60 vs. limit=15.0 +2024-07-28 17:05:51,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181760.0, ans=0.1 +2024-07-28 17:05:56,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=181760.0, ans=0.0 +2024-07-28 17:06:07,189 INFO [train.py:1114] (2/4) Epoch 14, batch 3450, loss[loss=0.1955, simple_loss=0.2838, pruned_loss=0.05355, over 4763.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2745, pruned_loss=0.04888, over 938056.99 frames. ], batch size: 19, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:06:21,158 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.55 vs. limit=15.0 +2024-07-28 17:06:27,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=181813.33333333334, ans=0.125 +2024-07-28 17:06:31,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=181826.66666666666, ans=0.125 +2024-07-28 17:06:33,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=181826.66666666666, ans=0.125 +2024-07-28 17:06:41,511 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.360e+01 5.614e+01 6.099e+01 6.810e+01 1.220e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 17:06:44,268 INFO [train.py:1114] (2/4) Epoch 14, batch 3500, loss[loss=0.1712, simple_loss=0.2621, pruned_loss=0.04011, over 4926.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2733, pruned_loss=0.04829, over 938476.65 frames. ], batch size: 12, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:07:07,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=181893.33333333334, ans=0.125 +2024-07-28 17:07:13,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=181906.66666666666, ans=0.05 +2024-07-28 17:07:16,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=181906.66666666666, ans=0.0 +2024-07-28 17:07:17,703 INFO [train.py:1114] (2/4) Epoch 14, batch 3550, loss[loss=0.1859, simple_loss=0.2798, pruned_loss=0.04603, over 4655.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2729, pruned_loss=0.04802, over 938838.23 frames. ], batch size: 14, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:07:17,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=181920.0, ans=0.2 +2024-07-28 17:07:18,142 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.81 vs. limit=6.0 +2024-07-28 17:07:28,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=181933.33333333334, ans=0.2 +2024-07-28 17:07:47,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=181973.33333333334, ans=0.2 +2024-07-28 17:07:47,923 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 5.433e+01 6.095e+01 6.753e+01 1.044e+02, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 17:07:50,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=181986.66666666666, ans=0.1 +2024-07-28 17:07:50,515 INFO [train.py:1114] (2/4) Epoch 14, batch 3600, loss[loss=0.1735, simple_loss=0.2649, pruned_loss=0.04107, over 4963.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2739, pruned_loss=0.04837, over 940263.00 frames. ], batch size: 13, lr: 5.37e-03, grad_scale: 64.0 +2024-07-28 17:07:51,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=181986.66666666666, ans=0.0 +2024-07-28 17:07:52,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=181986.66666666666, ans=0.0 +2024-07-28 17:07:52,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=181986.66666666666, ans=0.0 +2024-07-28 17:07:57,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=182000.0, ans=0.125 +2024-07-28 17:07:58,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=182000.0, ans=6.0 +2024-07-28 17:08:08,427 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:08:11,683 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:08:13,114 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.48 vs. limit=12.0 +2024-07-28 17:08:17,884 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.55 vs. limit=15.0 +2024-07-28 17:08:21,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=182040.0, ans=0.2 +2024-07-28 17:08:26,139 INFO [train.py:1114] (2/4) Epoch 14, batch 3650, loss[loss=0.2197, simple_loss=0.3116, pruned_loss=0.06391, over 4900.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2722, pruned_loss=0.04757, over 940660.08 frames. ], batch size: 15, lr: 5.37e-03, grad_scale: 64.0 +2024-07-28 17:08:26,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=182053.33333333334, ans=0.125 +2024-07-28 17:08:42,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=182080.0, ans=0.2 +2024-07-28 17:08:48,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=182093.33333333334, ans=0.0 +2024-07-28 17:08:53,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=182106.66666666666, ans=0.125 +2024-07-28 17:08:57,487 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.626e+01 5.717e+01 6.299e+01 7.471e+01 1.089e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 17:08:59,530 INFO [train.py:1114] (2/4) Epoch 14, batch 3700, loss[loss=0.167, simple_loss=0.2657, pruned_loss=0.03413, over 4942.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2722, pruned_loss=0.04694, over 941768.73 frames. ], batch size: 14, lr: 5.37e-03, grad_scale: 64.0 +2024-07-28 17:09:14,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=182120.0, ans=0.0 +2024-07-28 17:09:23,610 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.77 vs. limit=10.0 +2024-07-28 17:09:37,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=182160.0, ans=0.2 +2024-07-28 17:09:40,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=182160.0, ans=0.125 +2024-07-28 17:09:42,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=182173.33333333334, ans=0.125 +2024-07-28 17:09:46,023 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.62 vs. limit=12.0 +2024-07-28 17:09:48,370 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:09:48,832 INFO [train.py:1114] (2/4) Epoch 14, batch 3750, loss[loss=0.1754, simple_loss=0.26, pruned_loss=0.04539, over 4802.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2723, pruned_loss=0.04712, over 943093.61 frames. ], batch size: 11, lr: 5.37e-03, grad_scale: 64.0 +2024-07-28 17:09:59,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=182200.0, ans=0.125 +2024-07-28 17:10:04,318 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:10:06,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=182213.33333333334, ans=0.125 +2024-07-28 17:10:17,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=182240.0, ans=0.0 +2024-07-28 17:10:19,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182240.0, ans=0.1 +2024-07-28 17:10:20,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=182240.0, ans=0.125 +2024-07-28 17:10:20,835 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.520e+01 5.513e+01 6.095e+01 6.820e+01 9.830e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 17:10:22,248 INFO [train.py:1114] (2/4) Epoch 14, batch 3800, loss[loss=0.2062, simple_loss=0.3004, pruned_loss=0.05601, over 4804.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2722, pruned_loss=0.04726, over 941543.24 frames. ], batch size: 14, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:10:23,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=182253.33333333334, ans=0.125 +2024-07-28 17:10:27,183 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.59 vs. limit=15.0 +2024-07-28 17:10:40,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182280.0, ans=0.1 +2024-07-28 17:10:42,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=182293.33333333334, ans=0.125 +2024-07-28 17:10:46,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=182293.33333333334, ans=0.0 +2024-07-28 17:10:53,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182306.66666666666, ans=0.1 +2024-07-28 17:10:55,057 INFO [train.py:1114] (2/4) Epoch 14, batch 3850, loss[loss=0.1752, simple_loss=0.2563, pruned_loss=0.04705, over 4628.00 frames. ], tot_loss[loss=0.183, simple_loss=0.272, pruned_loss=0.04705, over 942381.46 frames. ], batch size: 16, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:11:03,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=182333.33333333334, ans=0.07 +2024-07-28 17:11:04,135 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.75 vs. limit=15.0 +2024-07-28 17:11:08,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=182346.66666666666, ans=0.125 +2024-07-28 17:11:16,747 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.69 vs. limit=22.5 +2024-07-28 17:11:19,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=182360.0, ans=0.2 +2024-07-28 17:11:21,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=182373.33333333334, ans=0.2 +2024-07-28 17:11:23,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182373.33333333334, ans=0.1 +2024-07-28 17:11:29,005 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.431e+01 5.612e+01 6.160e+01 6.955e+01 1.058e+02, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 17:11:30,352 INFO [train.py:1114] (2/4) Epoch 14, batch 3900, loss[loss=0.2196, simple_loss=0.3016, pruned_loss=0.06874, over 4813.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2729, pruned_loss=0.04749, over 942712.48 frames. ], batch size: 14, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:11:32,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=182386.66666666666, ans=0.025 +2024-07-28 17:11:33,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=182386.66666666666, ans=0.125 +2024-07-28 17:11:35,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=182386.66666666666, ans=0.2 +2024-07-28 17:11:35,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=182386.66666666666, ans=0.0 +2024-07-28 17:11:39,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=182400.0, ans=0.125 +2024-07-28 17:11:40,077 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.60 vs. limit=12.0 +2024-07-28 17:11:46,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=182413.33333333334, ans=10.0 +2024-07-28 17:11:51,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=182413.33333333334, ans=0.025 +2024-07-28 17:11:52,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=182426.66666666666, ans=0.125 +2024-07-28 17:11:58,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=182440.0, ans=0.0 +2024-07-28 17:12:01,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=182440.0, ans=0.125 +2024-07-28 17:12:06,181 INFO [train.py:1114] (2/4) Epoch 14, batch 3950, loss[loss=0.1917, simple_loss=0.2839, pruned_loss=0.04974, over 4852.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2729, pruned_loss=0.04729, over 944689.31 frames. ], batch size: 16, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:12:17,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=182466.66666666666, ans=0.125 +2024-07-28 17:12:24,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=182480.0, ans=0.0 +2024-07-28 17:12:37,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=182493.33333333334, ans=0.125 +2024-07-28 17:12:39,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=182506.66666666666, ans=0.05 +2024-07-28 17:12:43,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=182506.66666666666, ans=0.125 +2024-07-28 17:12:44,493 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.459e+01 5.506e+01 6.184e+01 7.058e+01 1.004e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 17:12:54,531 INFO [train.py:1114] (2/4) Epoch 14, batch 4000, loss[loss=0.1781, simple_loss=0.2624, pruned_loss=0.04688, over 4770.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2735, pruned_loss=0.04787, over 940755.20 frames. ], batch size: 12, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:12:55,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=182520.0, ans=0.2 +2024-07-28 17:13:07,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=182533.33333333334, ans=0.0 +2024-07-28 17:13:10,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=182533.33333333334, ans=0.1 +2024-07-28 17:14:59,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=182573.33333333334, ans=0.0 +2024-07-28 17:15:01,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=182586.66666666666, ans=0.125 +2024-07-28 17:15:02,344 INFO [train.py:1114] (2/4) Epoch 14, batch 4050, loss[loss=0.2812, simple_loss=0.3327, pruned_loss=0.1149, over 3366.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2737, pruned_loss=0.04824, over 939417.05 frames. ], batch size: 37, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:15:04,139 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.06 vs. limit=12.0 +2024-07-28 17:15:05,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=182586.66666666666, ans=0.0 +2024-07-28 17:15:22,253 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.66 vs. limit=15.0 +2024-07-28 17:15:45,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=182613.33333333334, ans=0.0 +2024-07-28 17:16:07,185 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.679e+01 5.600e+01 6.211e+01 7.334e+01 1.251e+02, threshold=1.242e+02, percent-clipped=2.0 +2024-07-28 17:16:08,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=182653.33333333334, ans=0.0 +2024-07-28 17:16:08,568 INFO [train.py:1114] (2/4) Epoch 14, batch 4100, loss[loss=0.2181, simple_loss=0.3075, pruned_loss=0.06436, over 4909.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2733, pruned_loss=0.04814, over 938706.58 frames. ], batch size: 15, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:16:08,786 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=182653.33333333334, ans=0.05 +2024-07-28 17:16:08,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=182653.33333333334, ans=0.0 +2024-07-28 17:16:10,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=182653.33333333334, ans=0.07 +2024-07-28 17:16:13,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=182653.33333333334, ans=0.0 +2024-07-28 17:16:13,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=182653.33333333334, ans=0.125 +2024-07-28 17:16:22,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=182666.66666666666, ans=0.125 +2024-07-28 17:16:40,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=182706.66666666666, ans=0.125 +2024-07-28 17:16:49,307 INFO [train.py:1114] (2/4) Epoch 14, batch 4150, loss[loss=0.1716, simple_loss=0.2581, pruned_loss=0.04253, over 4827.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2733, pruned_loss=0.0482, over 938412.93 frames. ], batch size: 13, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:16:49,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=182720.0, ans=0.2 +2024-07-28 17:16:59,811 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.71 vs. limit=10.0 +2024-07-28 17:17:00,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=182733.33333333334, ans=0.07 +2024-07-28 17:17:41,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=182733.33333333334, ans=0.035 +2024-07-28 17:17:53,638 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.23 vs. limit=15.0 +2024-07-28 17:17:54,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=182746.66666666666, ans=0.125 +2024-07-28 17:18:31,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=182760.0, ans=0.125 +2024-07-28 17:18:39,512 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.85 vs. limit=15.0 +2024-07-28 17:18:46,285 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.37 vs. limit=15.0 +2024-07-28 17:18:46,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182773.33333333334, ans=0.1 +2024-07-28 17:18:46,875 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=20.13 vs. limit=15.0 +2024-07-28 17:18:48,508 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.421e+01 5.631e+01 6.207e+01 7.543e+01 1.114e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 17:18:49,845 INFO [train.py:1114] (2/4) Epoch 14, batch 4200, loss[loss=0.2271, simple_loss=0.3093, pruned_loss=0.07246, over 4897.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2728, pruned_loss=0.04807, over 939582.08 frames. ], batch size: 15, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:19:05,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=182786.66666666666, ans=0.2 +2024-07-28 17:19:32,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182813.33333333334, ans=0.1 +2024-07-28 17:19:35,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=182813.33333333334, ans=0.125 +2024-07-28 17:19:38,954 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.24 vs. limit=15.0 +2024-07-28 17:19:49,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=182840.0, ans=0.0 +2024-07-28 17:19:52,369 INFO [train.py:1114] (2/4) Epoch 14, batch 4250, loss[loss=0.1677, simple_loss=0.2577, pruned_loss=0.03884, over 4641.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2731, pruned_loss=0.04796, over 940383.12 frames. ], batch size: 12, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:20:28,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=182906.66666666666, ans=0.04949747468305833 +2024-07-28 17:20:30,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=182906.66666666666, ans=0.2 +2024-07-28 17:20:33,150 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.532e+01 5.602e+01 6.327e+01 7.435e+01 1.299e+02, threshold=1.265e+02, percent-clipped=1.0 +2024-07-28 17:20:34,467 INFO [train.py:1114] (2/4) Epoch 14, batch 4300, loss[loss=0.1744, simple_loss=0.2747, pruned_loss=0.03706, over 4766.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2734, pruned_loss=0.04826, over 939946.82 frames. ], batch size: 13, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:20:37,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=182920.0, ans=10.0 +2024-07-28 17:20:46,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=182933.33333333334, ans=0.0 +2024-07-28 17:21:02,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=182973.33333333334, ans=0.0 +2024-07-28 17:21:02,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=182973.33333333334, ans=0.025 +2024-07-28 17:21:04,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=182973.33333333334, ans=0.125 +2024-07-28 17:21:09,266 INFO [train.py:1114] (2/4) Epoch 14, batch 4350, loss[loss=0.1982, simple_loss=0.2824, pruned_loss=0.05695, over 4760.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2742, pruned_loss=0.04811, over 940763.80 frames. ], batch size: 13, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:21:25,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=183013.33333333334, ans=0.125 +2024-07-28 17:21:32,575 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.06 vs. limit=15.0 +2024-07-28 17:21:44,670 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.884e+01 5.657e+01 6.269e+01 7.008e+01 1.088e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 17:21:46,164 INFO [train.py:1114] (2/4) Epoch 14, batch 4400, loss[loss=0.1733, simple_loss=0.266, pruned_loss=0.04032, over 4813.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.274, pruned_loss=0.04783, over 940600.21 frames. ], batch size: 14, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:21:50,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=183053.33333333334, ans=0.025 +2024-07-28 17:22:22,159 INFO [train.py:1114] (2/4) Epoch 14, batch 4450, loss[loss=0.1645, simple_loss=0.2382, pruned_loss=0.04542, over 4952.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2741, pruned_loss=0.04822, over 938902.66 frames. ], batch size: 12, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:22:24,706 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.96 vs. limit=6.0 +2024-07-28 17:22:26,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183120.0, ans=0.1 +2024-07-28 17:22:28,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183133.33333333334, ans=0.1 +2024-07-28 17:22:30,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=183133.33333333334, ans=0.0 +2024-07-28 17:22:32,216 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.80 vs. limit=12.0 +2024-07-28 17:22:35,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=183146.66666666666, ans=0.125 +2024-07-28 17:22:40,308 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.84 vs. limit=15.0 +2024-07-28 17:22:49,293 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.05 vs. limit=15.0 +2024-07-28 17:26:48,552 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.393e+01 5.470e+01 5.943e+01 6.622e+01 1.092e+02, threshold=1.189e+02, percent-clipped=0.0 +2024-07-28 17:26:49,864 INFO [train.py:1114] (2/4) Epoch 14, batch 4500, loss[loss=0.2038, simple_loss=0.304, pruned_loss=0.05175, over 4747.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2747, pruned_loss=0.04838, over 938180.79 frames. ], batch size: 14, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:27:02,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=183200.0, ans=0.0 +2024-07-28 17:27:04,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=183200.0, ans=0.125 +2024-07-28 17:27:07,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=183213.33333333334, ans=0.05 +2024-07-28 17:27:24,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=183240.0, ans=0.125 +2024-07-28 17:27:28,602 INFO [train.py:1114] (2/4) Epoch 14, batch 4550, loss[loss=0.18, simple_loss=0.2733, pruned_loss=0.04332, over 4895.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.274, pruned_loss=0.04783, over 940128.62 frames. ], batch size: 13, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:27:30,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=183253.33333333334, ans=0.0 +2024-07-28 17:27:30,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=183253.33333333334, ans=0.2 +2024-07-28 17:27:33,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=183253.33333333334, ans=0.125 +2024-07-28 17:27:37,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=183266.66666666666, ans=0.125 +2024-07-28 17:27:40,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=183266.66666666666, ans=0.125 +2024-07-28 17:28:01,176 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.581e+01 5.634e+01 6.361e+01 7.770e+01 1.092e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 17:28:02,546 INFO [train.py:1114] (2/4) Epoch 14, batch 4600, loss[loss=0.1945, simple_loss=0.2865, pruned_loss=0.05126, over 4427.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2735, pruned_loss=0.04753, over 938168.48 frames. ], batch size: 21, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:28:06,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=183320.0, ans=0.0 +2024-07-28 17:28:07,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183320.0, ans=0.1 +2024-07-28 17:28:17,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=183346.66666666666, ans=0.035 +2024-07-28 17:28:18,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=183346.66666666666, ans=0.0 +2024-07-28 17:28:20,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=183346.66666666666, ans=0.125 +2024-07-28 17:28:26,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=183360.0, ans=0.125 +2024-07-28 17:28:35,614 INFO [train.py:1114] (2/4) Epoch 14, batch 4650, loss[loss=0.1858, simple_loss=0.2825, pruned_loss=0.04451, over 4836.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2748, pruned_loss=0.04745, over 940090.93 frames. ], batch size: 16, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:28:41,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=183400.0, ans=0.0 +2024-07-28 17:28:49,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=183413.33333333334, ans=0.0 +2024-07-28 17:28:53,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=183413.33333333334, ans=0.0 +2024-07-28 17:29:09,676 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.277e+01 5.803e+01 6.288e+01 7.232e+01 1.102e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 17:29:11,000 INFO [train.py:1114] (2/4) Epoch 14, batch 4700, loss[loss=0.1557, simple_loss=0.2419, pruned_loss=0.03477, over 4706.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2738, pruned_loss=0.0472, over 937733.24 frames. ], batch size: 11, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:29:20,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=183466.66666666666, ans=0.05 +2024-07-28 17:29:22,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.whiten.whitening_limit, batch_count=183466.66666666666, ans=12.0 +2024-07-28 17:29:42,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=183506.66666666666, ans=0.2 +2024-07-28 17:29:45,337 INFO [train.py:1114] (2/4) Epoch 14, batch 4750, loss[loss=0.2215, simple_loss=0.2987, pruned_loss=0.07213, over 4516.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.275, pruned_loss=0.04829, over 935952.54 frames. ], batch size: 21, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:30:04,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=183546.66666666666, ans=0.125 +2024-07-28 17:30:07,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=183560.0, ans=0.0 +2024-07-28 17:30:16,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=183573.33333333334, ans=0.125 +2024-07-28 17:30:17,970 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.592e+01 6.256e+01 7.365e+01 1.010e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 17:30:19,297 INFO [train.py:1114] (2/4) Epoch 14, batch 4800, loss[loss=0.1626, simple_loss=0.252, pruned_loss=0.03657, over 4696.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2737, pruned_loss=0.04786, over 932745.58 frames. ], batch size: 13, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:30:28,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=183600.0, ans=0.025 +2024-07-28 17:30:30,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=183600.0, ans=0.035 +2024-07-28 17:30:31,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=183600.0, ans=0.05 +2024-07-28 17:30:34,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=183613.33333333334, ans=0.125 +2024-07-28 17:30:34,540 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.67 vs. limit=12.0 +2024-07-28 17:30:54,681 INFO [train.py:1114] (2/4) Epoch 14, batch 4850, loss[loss=0.169, simple_loss=0.2575, pruned_loss=0.04022, over 4738.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2745, pruned_loss=0.04848, over 932662.33 frames. ], batch size: 14, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:31:08,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183666.66666666666, ans=0.1 +2024-07-28 17:31:13,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=183680.0, ans=0.0 +2024-07-28 17:31:24,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=183706.66666666666, ans=0.125 +2024-07-28 17:31:24,454 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.81 vs. limit=10.0 +2024-07-28 17:31:30,321 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.639e+01 5.421e+01 5.850e+01 6.499e+01 1.354e+02, threshold=1.170e+02, percent-clipped=1.0 +2024-07-28 17:31:31,725 INFO [train.py:1114] (2/4) Epoch 14, batch 4900, loss[loss=0.1823, simple_loss=0.2818, pruned_loss=0.04136, over 4770.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2737, pruned_loss=0.04768, over 934395.40 frames. ], batch size: 13, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:31:31,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=183720.0, ans=0.125 +2024-07-28 17:31:37,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=183720.0, ans=0.125 +2024-07-28 17:31:44,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=183733.33333333334, ans=0.0 +2024-07-28 17:31:44,970 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.42 vs. limit=15.0 +2024-07-28 17:31:45,131 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.62 vs. limit=6.0 +2024-07-28 17:31:50,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=183746.66666666666, ans=0.125 +2024-07-28 17:31:53,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=183760.0, ans=0.2 +2024-07-28 17:31:54,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=183760.0, ans=0.0 +2024-07-28 17:32:02,375 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.87 vs. limit=22.5 +2024-07-28 17:32:06,232 INFO [train.py:1114] (2/4) Epoch 14, batch 4950, loss[loss=0.2178, simple_loss=0.296, pruned_loss=0.06983, over 3339.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2746, pruned_loss=0.04814, over 931612.37 frames. ], batch size: 35, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:32:12,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=183800.0, ans=0.2 +2024-07-28 17:32:16,839 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.13 vs. limit=12.0 +2024-07-28 17:32:27,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=183826.66666666666, ans=0.0 +2024-07-28 17:32:31,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=183826.66666666666, ans=0.125 +2024-07-28 17:32:36,001 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.32 vs. limit=15.0 +2024-07-28 17:32:38,287 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.280e+01 5.530e+01 6.017e+01 6.862e+01 9.810e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 17:32:39,707 INFO [train.py:1114] (2/4) Epoch 14, batch 5000, loss[loss=0.2009, simple_loss=0.2978, pruned_loss=0.05204, over 4674.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2739, pruned_loss=0.04787, over 935173.30 frames. ], batch size: 14, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:32:40,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=183853.33333333334, ans=0.5 +2024-07-28 17:32:41,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=183853.33333333334, ans=0.125 +2024-07-28 17:32:45,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=183866.66666666666, ans=0.0 +2024-07-28 17:32:50,209 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.84 vs. limit=10.0 +2024-07-28 17:32:51,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=183866.66666666666, ans=0.0 +2024-07-28 17:33:12,763 INFO [train.py:1114] (2/4) Epoch 14, batch 5050, loss[loss=0.1784, simple_loss=0.2654, pruned_loss=0.04572, over 4859.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2732, pruned_loss=0.0475, over 938186.38 frames. ], batch size: 12, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:33:13,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=183920.0, ans=0.0 +2024-07-28 17:33:17,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=183920.0, ans=0.0 +2024-07-28 17:33:19,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=183933.33333333334, ans=0.125 +2024-07-28 17:33:19,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=183933.33333333334, ans=0.0 +2024-07-28 17:33:20,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=183933.33333333334, ans=0.0 +2024-07-28 17:33:21,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=183933.33333333334, ans=0.2 +2024-07-28 17:33:22,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=183933.33333333334, ans=0.125 +2024-07-28 17:33:27,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=183946.66666666666, ans=0.125 +2024-07-28 17:33:29,589 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.12 vs. limit=10.0 +2024-07-28 17:33:30,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=183946.66666666666, ans=0.0 +2024-07-28 17:33:31,138 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.38 vs. limit=6.0 +2024-07-28 17:33:34,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.19 vs. limit=22.5 +2024-07-28 17:33:40,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=183973.33333333334, ans=0.125 +2024-07-28 17:33:42,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=183973.33333333334, ans=0.125 +2024-07-28 17:33:45,611 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 5.607e+01 6.225e+01 6.953e+01 1.020e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 17:33:46,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=183986.66666666666, ans=0.125 +2024-07-28 17:33:47,349 INFO [train.py:1114] (2/4) Epoch 14, batch 5100, loss[loss=0.154, simple_loss=0.2465, pruned_loss=0.03075, over 4770.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2732, pruned_loss=0.0475, over 935774.06 frames. ], batch size: 12, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:33:51,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=183986.66666666666, ans=0.125 +2024-07-28 17:34:20,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=184053.33333333334, ans=0.125 +2024-07-28 17:34:21,110 INFO [train.py:1114] (2/4) Epoch 14, batch 5150, loss[loss=0.2001, simple_loss=0.2865, pruned_loss=0.05687, over 4848.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2742, pruned_loss=0.0482, over 936419.73 frames. ], batch size: 16, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:34:21,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=184053.33333333334, ans=0.0 +2024-07-28 17:34:27,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=184053.33333333334, ans=0.0 +2024-07-28 17:34:38,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=184080.0, ans=0.025 +2024-07-28 17:34:45,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=184093.33333333334, ans=0.125 +2024-07-28 17:34:54,941 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.666e+01 6.187e+01 7.169e+01 1.415e+02, threshold=1.237e+02, percent-clipped=1.0 +2024-07-28 17:34:56,316 INFO [train.py:1114] (2/4) Epoch 14, batch 5200, loss[loss=0.1582, simple_loss=0.2451, pruned_loss=0.0356, over 4664.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2741, pruned_loss=0.04803, over 936420.08 frames. ], batch size: 14, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:34:56,782 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.09 vs. limit=6.0 +2024-07-28 17:34:57,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=184120.0, ans=0.125 +2024-07-28 17:35:03,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=184133.33333333334, ans=0.125 +2024-07-28 17:35:22,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=184160.0, ans=0.125 +2024-07-28 17:35:27,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=184173.33333333334, ans=0.025 +2024-07-28 17:35:44,106 INFO [train.py:1114] (2/4) Epoch 14, batch 5250, loss[loss=0.1739, simple_loss=0.2604, pruned_loss=0.04374, over 4893.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2733, pruned_loss=0.04779, over 935953.74 frames. ], batch size: 13, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:35:44,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.85 vs. limit=15.0 +2024-07-28 17:36:01,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=184186.66666666666, ans=0.0 +2024-07-28 17:36:17,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=184200.0, ans=0.125 +2024-07-28 17:36:36,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=184213.33333333334, ans=0.2 +2024-07-28 17:37:38,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184226.66666666666, ans=0.1 +2024-07-28 17:37:39,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=184226.66666666666, ans=0.025 +2024-07-28 17:37:41,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=184226.66666666666, ans=0.025 +2024-07-28 17:37:43,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184240.0, ans=0.1 +2024-07-28 17:37:46,933 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.87 vs. limit=12.0 +2024-07-28 17:37:48,429 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.478e+01 6.210e+01 7.367e+01 1.027e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 17:37:49,769 INFO [train.py:1114] (2/4) Epoch 14, batch 5300, loss[loss=0.1976, simple_loss=0.2866, pruned_loss=0.0543, over 4634.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2732, pruned_loss=0.04782, over 934007.82 frames. ], batch size: 16, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:37:55,490 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.08 vs. limit=22.5 +2024-07-28 17:37:58,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=184266.66666666666, ans=0.125 +2024-07-28 17:38:00,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=184266.66666666666, ans=0.125 +2024-07-28 17:38:07,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=184280.0, ans=0.125 +2024-07-28 17:38:18,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=184293.33333333334, ans=0.0 +2024-07-28 17:38:26,491 INFO [train.py:1114] (2/4) Epoch 14, batch 5350, loss[loss=0.1526, simple_loss=0.2424, pruned_loss=0.03145, over 4545.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2741, pruned_loss=0.04783, over 936198.34 frames. ], batch size: 10, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:38:32,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=184333.33333333334, ans=0.025 +2024-07-28 17:38:43,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184346.66666666666, ans=0.1 +2024-07-28 17:38:44,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=184346.66666666666, ans=0.025 +2024-07-28 17:38:45,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=184346.66666666666, ans=0.125 +2024-07-28 17:38:47,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=184346.66666666666, ans=0.125 +2024-07-28 17:38:50,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=184360.0, ans=0.125 +2024-07-28 17:38:54,678 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.79 vs. limit=15.0 +2024-07-28 17:38:59,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184373.33333333334, ans=0.1 +2024-07-28 17:39:00,811 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:39:01,373 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.641e+01 6.338e+01 7.374e+01 1.167e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 17:39:02,098 INFO [train.py:1114] (2/4) Epoch 14, batch 5400, loss[loss=0.273, simple_loss=0.353, pruned_loss=0.09649, over 4168.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2743, pruned_loss=0.0477, over 930279.99 frames. ], batch size: 25, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:39:02,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=184386.66666666666, ans=0.05 +2024-07-28 17:39:07,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184386.66666666666, ans=0.1 +2024-07-28 17:39:11,266 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.80 vs. limit=12.0 +2024-07-28 17:39:17,230 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.39 vs. limit=10.0 +2024-07-28 17:39:22,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=184426.66666666666, ans=0.0 +2024-07-28 17:39:23,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=184426.66666666666, ans=0.025 +2024-07-28 17:39:24,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=184426.66666666666, ans=0.125 +2024-07-28 17:39:28,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184440.0, ans=0.1 +2024-07-28 17:39:32,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=184440.0, ans=0.125 +2024-07-28 17:39:32,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=184440.0, ans=0.0 +2024-07-28 17:39:35,487 INFO [train.py:1114] (2/4) Epoch 14, batch 5450, loss[loss=0.156, simple_loss=0.2461, pruned_loss=0.03295, over 4697.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2734, pruned_loss=0.04717, over 932951.87 frames. ], batch size: 11, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:39:36,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184453.33333333334, ans=0.1 +2024-07-28 17:39:41,331 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.44 vs. limit=12.0 +2024-07-28 17:39:47,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=184466.66666666666, ans=0.2 +2024-07-28 17:39:50,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=184480.0, ans=0.125 +2024-07-28 17:39:54,810 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.96 vs. limit=15.0 +2024-07-28 17:39:56,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=184493.33333333334, ans=0.125 +2024-07-28 17:40:04,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=184506.66666666666, ans=0.125 +2024-07-28 17:40:08,549 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.352e+01 5.587e+01 6.313e+01 7.261e+01 1.072e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 17:40:09,250 INFO [train.py:1114] (2/4) Epoch 14, batch 5500, loss[loss=0.1966, simple_loss=0.2842, pruned_loss=0.05451, over 4299.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2731, pruned_loss=0.04711, over 930846.28 frames. ], batch size: 25, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:40:09,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=184520.0, ans=0.2 +2024-07-28 17:40:11,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184520.0, ans=0.1 +2024-07-28 17:40:16,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=184533.33333333334, ans=0.125 +2024-07-28 17:40:17,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=184533.33333333334, ans=0.125 +2024-07-28 17:40:27,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184546.66666666666, ans=0.1 +2024-07-28 17:40:33,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=184560.0, ans=0.125 +2024-07-28 17:40:38,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=184573.33333333334, ans=0.125 +2024-07-28 17:40:45,344 INFO [train.py:1114] (2/4) Epoch 14, batch 5550, loss[loss=0.197, simple_loss=0.2823, pruned_loss=0.05581, over 4718.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.273, pruned_loss=0.04702, over 933334.20 frames. ], batch size: 12, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:40:46,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=184586.66666666666, ans=0.125 +2024-07-28 17:40:55,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=184600.0, ans=0.125 +2024-07-28 17:41:19,037 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.688e+01 6.017e+01 6.994e+01 8.294e+01 1.224e+02, threshold=1.399e+02, percent-clipped=0.0 +2024-07-28 17:41:19,728 INFO [train.py:1114] (2/4) Epoch 14, batch 5600, loss[loss=0.2045, simple_loss=0.2846, pruned_loss=0.0622, over 4738.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2735, pruned_loss=0.0476, over 934072.38 frames. ], batch size: 14, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:41:26,183 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.88 vs. limit=22.5 +2024-07-28 17:41:29,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=184666.66666666666, ans=0.0 +2024-07-28 17:41:29,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=184666.66666666666, ans=0.125 +2024-07-28 17:41:31,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=184666.66666666666, ans=0.0 +2024-07-28 17:41:35,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=184680.0, ans=0.0 +2024-07-28 17:41:45,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=184693.33333333334, ans=0.125 +2024-07-28 17:41:50,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=184706.66666666666, ans=0.125 +2024-07-28 17:41:55,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=184706.66666666666, ans=0.0 +2024-07-28 17:42:53,630 INFO [train.py:1114] (2/4) Epoch 14, batch 5650, loss[loss=0.215, simple_loss=0.3047, pruned_loss=0.06263, over 4592.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2734, pruned_loss=0.0478, over 936521.69 frames. ], batch size: 21, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:42:53,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=184720.0, ans=0.0 +2024-07-28 17:43:05,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=184720.0, ans=0.0 +2024-07-28 17:43:14,338 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:43:24,878 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.76 vs. limit=15.0 +2024-07-28 17:43:35,138 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.575e+01 6.312e+01 7.151e+01 9.820e+01, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 17:43:35,893 INFO [train.py:1114] (2/4) Epoch 14, batch 5700, loss[loss=0.1653, simple_loss=0.258, pruned_loss=0.03632, over 4696.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2733, pruned_loss=0.04808, over 937656.67 frames. ], batch size: 13, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:43:46,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=184800.0, ans=0.025 +2024-07-28 17:43:53,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=184813.33333333334, ans=0.125 +2024-07-28 17:44:12,517 INFO [train.py:1114] (2/4) Epoch 14, batch 5750, loss[loss=0.1733, simple_loss=0.2648, pruned_loss=0.04093, over 4664.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2735, pruned_loss=0.04783, over 937673.60 frames. ], batch size: 19, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:44:27,501 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-28 17:44:48,084 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.73 vs. limit=15.0 +2024-07-28 17:44:51,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184906.66666666666, ans=0.1 +2024-07-28 17:44:56,653 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.552e+01 6.040e+01 6.826e+01 9.653e+01, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 17:44:57,317 INFO [train.py:1114] (2/4) Epoch 14, batch 5800, loss[loss=0.21, simple_loss=0.3067, pruned_loss=0.05665, over 4727.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2745, pruned_loss=0.04825, over 937086.35 frames. ], batch size: 19, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:45:01,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=184920.0, ans=0.125 +2024-07-28 17:45:23,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=184960.0, ans=0.0 +2024-07-28 17:45:26,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_na.min_abs, batch_count=184973.33333333334, ans=0.02 +2024-07-28 17:45:32,381 INFO [train.py:1114] (2/4) Epoch 14, batch 5850, loss[loss=0.1897, simple_loss=0.2874, pruned_loss=0.04597, over 4535.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2749, pruned_loss=0.04846, over 937676.45 frames. ], batch size: 21, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:45:42,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=185000.0, ans=0.5 +2024-07-28 17:45:53,583 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.02 vs. limit=15.0 +2024-07-28 17:45:57,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=185026.66666666666, ans=0.025 +2024-07-28 17:45:59,578 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.17 vs. limit=22.5 +2024-07-28 17:46:05,084 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.554e+01 5.675e+01 6.318e+01 7.157e+01 1.040e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 17:46:05,801 INFO [train.py:1114] (2/4) Epoch 14, batch 5900, loss[loss=0.2071, simple_loss=0.2885, pruned_loss=0.06283, over 4674.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2742, pruned_loss=0.04839, over 938146.04 frames. ], batch size: 15, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:46:08,268 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.91 vs. limit=6.0 +2024-07-28 17:46:09,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=185053.33333333334, ans=0.2 +2024-07-28 17:46:09,631 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=185053.33333333334, ans=0.125 +2024-07-28 17:46:18,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=185066.66666666666, ans=0.1 +2024-07-28 17:46:22,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=185080.0, ans=0.125 +2024-07-28 17:46:23,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=185080.0, ans=0.0 +2024-07-28 17:46:24,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=185080.0, ans=0.125 +2024-07-28 17:46:25,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=185080.0, ans=0.125 +2024-07-28 17:46:30,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=185093.33333333334, ans=0.125 +2024-07-28 17:46:31,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=185093.33333333334, ans=0.125 +2024-07-28 17:46:33,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=185106.66666666666, ans=0.025 +2024-07-28 17:46:35,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=185106.66666666666, ans=0.0 +2024-07-28 17:46:38,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=185106.66666666666, ans=0.0 +2024-07-28 17:46:40,169 INFO [train.py:1114] (2/4) Epoch 14, batch 5950, loss[loss=0.1822, simple_loss=0.2709, pruned_loss=0.04679, over 4681.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2746, pruned_loss=0.04832, over 940205.62 frames. ], batch size: 15, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:46:40,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=185120.0, ans=0.0 +2024-07-28 17:47:04,306 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.36 vs. limit=15.0 +2024-07-28 17:47:12,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=185173.33333333334, ans=0.0 +2024-07-28 17:47:15,499 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.29 vs. limit=15.0 +2024-07-28 17:47:18,302 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.153e+01 5.664e+01 6.270e+01 7.000e+01 1.010e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 17:47:19,017 INFO [train.py:1114] (2/4) Epoch 14, batch 6000, loss[loss=0.2193, simple_loss=0.2965, pruned_loss=0.07102, over 4119.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2739, pruned_loss=0.0483, over 937320.59 frames. ], batch size: 25, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:47:19,017 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 17:48:55,377 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.3907, 2.9821, 2.7856, 2.7646], device='cuda:2') +2024-07-28 17:48:55,839 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.0576, 3.5172, 3.3995, 3.0060, 3.7094, 3.4503, 3.6726, 3.2266], + device='cuda:2') +2024-07-28 17:49:17,897 INFO [train.py:1146] (2/4) Epoch 14, validation: loss=0.1656, simple_loss=0.2686, pruned_loss=0.03133, over 944034.00 frames. +2024-07-28 17:49:17,898 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 17:49:18,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=185186.66666666666, ans=0.125 +2024-07-28 17:49:22,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=185186.66666666666, ans=0.0 +2024-07-28 17:49:27,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=185200.0, ans=0.0 +2024-07-28 17:49:30,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=185200.0, ans=0.125 +2024-07-28 17:49:37,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185226.66666666666, ans=0.1 +2024-07-28 17:49:42,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=185226.66666666666, ans=0.2 +2024-07-28 17:49:50,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=185240.0, ans=0.125 +2024-07-28 17:49:51,985 INFO [train.py:1114] (2/4) Epoch 14, batch 6050, loss[loss=0.1359, simple_loss=0.2237, pruned_loss=0.02406, over 4771.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2727, pruned_loss=0.04787, over 938410.09 frames. ], batch size: 12, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:49:54,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=185253.33333333334, ans=0.125 +2024-07-28 17:49:58,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=185266.66666666666, ans=0.0 +2024-07-28 17:50:03,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=185266.66666666666, ans=0.125 +2024-07-28 17:50:08,148 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.49 vs. limit=12.0 +2024-07-28 17:50:10,137 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.96 vs. limit=10.0 +2024-07-28 17:50:15,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=185293.33333333334, ans=0.125 +2024-07-28 17:50:26,095 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+01 5.501e+01 6.141e+01 7.204e+01 9.755e+01, threshold=1.228e+02, percent-clipped=0.0 +2024-07-28 17:50:26,793 INFO [train.py:1114] (2/4) Epoch 14, batch 6100, loss[loss=0.1927, simple_loss=0.2875, pruned_loss=0.04891, over 4677.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2722, pruned_loss=0.04755, over 938182.08 frames. ], batch size: 15, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:50:31,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=185320.0, ans=0.0 +2024-07-28 17:50:42,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=185346.66666666666, ans=0.0 +2024-07-28 17:50:44,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185346.66666666666, ans=0.1 +2024-07-28 17:50:48,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=185360.0, ans=0.025 +2024-07-28 17:50:53,464 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:51:00,625 INFO [train.py:1114] (2/4) Epoch 14, batch 6150, loss[loss=0.2152, simple_loss=0.3028, pruned_loss=0.06376, over 3364.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2731, pruned_loss=0.04781, over 936800.04 frames. ], batch size: 38, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:51:11,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=185400.0, ans=0.2 +2024-07-28 17:51:24,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=185426.66666666666, ans=0.0 +2024-07-28 17:51:33,269 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.416e+01 5.520e+01 6.468e+01 7.669e+01 1.156e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 17:51:33,902 INFO [train.py:1114] (2/4) Epoch 14, batch 6200, loss[loss=0.1818, simple_loss=0.2641, pruned_loss=0.04977, over 4731.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2725, pruned_loss=0.04736, over 935989.62 frames. ], batch size: 14, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:51:44,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=185466.66666666666, ans=0.125 +2024-07-28 17:51:48,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=185480.0, ans=0.0 +2024-07-28 17:52:08,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=185506.66666666666, ans=0.125 +2024-07-28 17:52:08,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=185506.66666666666, ans=0.0 +2024-07-28 17:52:11,501 INFO [train.py:1114] (2/4) Epoch 14, batch 6250, loss[loss=0.1643, simple_loss=0.2594, pruned_loss=0.0346, over 4808.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2725, pruned_loss=0.04772, over 932762.83 frames. ], batch size: 14, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:52:18,184 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.09 vs. limit=10.0 +2024-07-28 17:52:26,591 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.97 vs. limit=22.5 +2024-07-28 17:52:35,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=185560.0, ans=0.125 +2024-07-28 17:52:35,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=185560.0, ans=0.0 +2024-07-28 17:52:38,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=185560.0, ans=0.2 +2024-07-28 17:52:44,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=185573.33333333334, ans=0.1 +2024-07-28 17:52:46,202 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+01 5.580e+01 6.337e+01 7.212e+01 1.101e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 17:52:46,921 INFO [train.py:1114] (2/4) Epoch 14, batch 6300, loss[loss=0.1822, simple_loss=0.2716, pruned_loss=0.04638, over 4535.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2732, pruned_loss=0.04803, over 929505.14 frames. ], batch size: 10, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:52:47,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=185586.66666666666, ans=0.0 +2024-07-28 17:52:50,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=185586.66666666666, ans=0.0 +2024-07-28 17:52:57,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185600.0, ans=0.1 +2024-07-28 17:53:01,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=185613.33333333334, ans=0.125 +2024-07-28 17:53:03,786 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.35 vs. limit=15.0 +2024-07-28 17:53:05,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=185626.66666666666, ans=0.125 +2024-07-28 17:53:06,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=185626.66666666666, ans=0.0 +2024-07-28 17:53:10,834 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.58 vs. limit=15.0 +2024-07-28 17:53:19,507 INFO [train.py:1114] (2/4) Epoch 14, batch 6350, loss[loss=0.1858, simple_loss=0.2772, pruned_loss=0.04719, over 4449.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2721, pruned_loss=0.04767, over 933605.99 frames. ], batch size: 21, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:53:20,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=185653.33333333334, ans=0.1 +2024-07-28 17:53:36,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=185680.0, ans=0.0 +2024-07-28 17:54:02,939 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.778e+01 6.430e+01 7.550e+01 1.026e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 17:54:03,650 INFO [train.py:1114] (2/4) Epoch 14, batch 6400, loss[loss=0.185, simple_loss=0.2827, pruned_loss=0.04363, over 4639.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2731, pruned_loss=0.04799, over 934871.28 frames. ], batch size: 13, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:54:25,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=185760.0, ans=0.125 +2024-07-28 17:54:27,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=185760.0, ans=0.2 +2024-07-28 17:54:39,385 INFO [train.py:1114] (2/4) Epoch 14, batch 6450, loss[loss=0.2199, simple_loss=0.3044, pruned_loss=0.06771, over 4464.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2745, pruned_loss=0.0484, over 938734.99 frames. ], batch size: 21, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:55:09,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=185840.0, ans=0.125 +2024-07-28 17:55:10,582 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.62 vs. limit=5.0 +2024-07-28 17:55:11,351 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.589e+01 5.736e+01 6.499e+01 7.740e+01 1.076e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-28 17:55:12,079 INFO [train.py:1114] (2/4) Epoch 14, batch 6500, loss[loss=0.2427, simple_loss=0.3122, pruned_loss=0.08661, over 3523.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2729, pruned_loss=0.04789, over 940040.79 frames. ], batch size: 35, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:55:19,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=185866.66666666666, ans=0.125 +2024-07-28 17:55:20,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=185866.66666666666, ans=0.025 +2024-07-28 17:55:47,200 INFO [train.py:1114] (2/4) Epoch 14, batch 6550, loss[loss=0.1424, simple_loss=0.2286, pruned_loss=0.02807, over 4802.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2723, pruned_loss=0.04722, over 942992.06 frames. ], batch size: 11, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:56:03,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=185946.66666666666, ans=0.125 +2024-07-28 17:56:07,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185960.0, ans=0.1 +2024-07-28 17:56:17,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=185973.33333333334, ans=0.0 +2024-07-28 17:56:19,459 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.641e+01 6.098e+01 6.852e+01 1.074e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 17:56:20,138 INFO [train.py:1114] (2/4) Epoch 14, batch 6600, loss[loss=0.1723, simple_loss=0.2604, pruned_loss=0.04211, over 4930.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2723, pruned_loss=0.04718, over 944908.24 frames. ], batch size: 14, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:56:24,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=185986.66666666666, ans=0.125 +2024-07-28 17:56:41,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=186026.66666666666, ans=0.2 +2024-07-28 17:56:50,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=186040.0, ans=0.0 +2024-07-28 17:56:53,744 INFO [train.py:1114] (2/4) Epoch 14, batch 6650, loss[loss=0.2033, simple_loss=0.2941, pruned_loss=0.05626, over 4659.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2721, pruned_loss=0.04657, over 944077.34 frames. ], batch size: 17, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:56:55,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=186053.33333333334, ans=0.125 +2024-07-28 17:56:59,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=186066.66666666666, ans=0.0 +2024-07-28 17:57:08,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=186080.0, ans=0.0 +2024-07-28 17:57:13,234 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.40 vs. limit=12.0 +2024-07-28 17:57:27,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=186106.66666666666, ans=0.0 +2024-07-28 17:57:30,076 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.923e+01 5.766e+01 6.384e+01 7.192e+01 1.160e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 17:57:30,770 INFO [train.py:1114] (2/4) Epoch 14, batch 6700, loss[loss=0.239, simple_loss=0.3175, pruned_loss=0.08023, over 4737.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2722, pruned_loss=0.04674, over 942901.37 frames. ], batch size: 19, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:57:36,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=186120.0, ans=0.125 +2024-07-28 17:57:40,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=186133.33333333334, ans=0.1 +2024-07-28 17:57:42,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=186133.33333333334, ans=0.125 +2024-07-28 17:57:53,850 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.96 vs. limit=15.0 +2024-07-28 17:57:59,318 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.62 vs. limit=22.5 +2024-07-28 17:58:06,972 INFO [train.py:1114] (2/4) Epoch 14, batch 6750, loss[loss=0.2114, simple_loss=0.3072, pruned_loss=0.05785, over 4254.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2722, pruned_loss=0.04727, over 940970.40 frames. ], batch size: 26, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:58:10,210 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:58:10,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=186186.66666666666, ans=0.125 +2024-07-28 17:58:28,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=186226.66666666666, ans=0.125 +2024-07-28 17:58:30,992 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.10 vs. limit=15.0 +2024-07-28 17:58:31,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=186226.66666666666, ans=0.125 +2024-07-28 17:58:34,349 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.68 vs. limit=6.0 +2024-07-28 17:58:34,464 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.32 vs. limit=6.0 +2024-07-28 17:58:38,838 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.62 vs. limit=15.0 +2024-07-28 17:58:40,455 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.736e+01 5.569e+01 6.146e+01 7.020e+01 9.338e+01, threshold=1.229e+02, percent-clipped=0.0 +2024-07-28 17:58:41,135 INFO [train.py:1114] (2/4) Epoch 14, batch 6800, loss[loss=0.2057, simple_loss=0.2903, pruned_loss=0.06061, over 4638.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2727, pruned_loss=0.04756, over 939128.60 frames. ], batch size: 13, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:58:44,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=186253.33333333334, ans=0.125 +2024-07-28 17:58:45,351 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.13 vs. limit=22.5 +2024-07-28 17:58:56,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=186280.0, ans=0.125 +2024-07-28 17:58:56,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=186280.0, ans=0.2 +2024-07-28 17:59:10,239 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.83 vs. limit=15.0 +2024-07-28 17:59:11,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=186306.66666666666, ans=0.05 +2024-07-28 17:59:11,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=186306.66666666666, ans=0.125 +2024-07-28 17:59:12,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=186306.66666666666, ans=0.0 +2024-07-28 17:59:13,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=186306.66666666666, ans=0.125 +2024-07-28 17:59:14,775 INFO [train.py:1114] (2/4) Epoch 14, batch 6850, loss[loss=0.1957, simple_loss=0.2866, pruned_loss=0.05241, over 4686.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2727, pruned_loss=0.04726, over 940820.88 frames. ], batch size: 13, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:59:17,303 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.48 vs. limit=22.5 +2024-07-28 17:59:18,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=186320.0, ans=0.0 +2024-07-28 17:59:19,283 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.52 vs. limit=15.0 +2024-07-28 17:59:20,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=186333.33333333334, ans=0.0 +2024-07-28 17:59:22,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=186333.33333333334, ans=0.2 +2024-07-28 17:59:25,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=186333.33333333334, ans=0.125 +2024-07-28 17:59:30,579 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.61 vs. limit=10.0 +2024-07-28 17:59:32,363 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:59:36,824 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.05 vs. limit=15.0 +2024-07-28 17:59:47,734 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.85 vs. limit=15.0 +2024-07-28 17:59:47,796 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.718e+01 6.199e+01 6.949e+01 1.067e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 17:59:48,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=186386.66666666666, ans=0.125 +2024-07-28 17:59:48,490 INFO [train.py:1114] (2/4) Epoch 14, batch 6900, loss[loss=0.1953, simple_loss=0.2865, pruned_loss=0.05208, over 4963.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2734, pruned_loss=0.0476, over 943477.77 frames. ], batch size: 13, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:59:51,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=186386.66666666666, ans=0.125 +2024-07-28 17:59:52,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=186386.66666666666, ans=0.0 +2024-07-28 17:59:53,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=186386.66666666666, ans=0.2 +2024-07-28 17:59:53,431 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.12 vs. limit=6.0 +2024-07-28 18:00:03,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=186413.33333333334, ans=0.125 +2024-07-28 18:00:06,496 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:00:13,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=186426.66666666666, ans=0.09899494936611666 +2024-07-28 18:00:22,515 INFO [train.py:1114] (2/4) Epoch 14, batch 6950, loss[loss=0.1483, simple_loss=0.2338, pruned_loss=0.03141, over 4500.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2733, pruned_loss=0.04796, over 940498.86 frames. ], batch size: 10, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:00:24,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=186453.33333333334, ans=0.0 +2024-07-28 18:00:35,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=186480.0, ans=0.0 +2024-07-28 18:00:37,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=186480.0, ans=0.2 +2024-07-28 18:00:40,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=186480.0, ans=0.2 +2024-07-28 18:00:43,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=186493.33333333334, ans=0.125 +2024-07-28 18:00:54,795 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.637e+01 6.195e+01 7.111e+01 9.946e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 18:00:55,476 INFO [train.py:1114] (2/4) Epoch 14, batch 7000, loss[loss=0.1716, simple_loss=0.2703, pruned_loss=0.0364, over 4614.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2723, pruned_loss=0.04786, over 938722.53 frames. ], batch size: 17, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:00:59,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=186520.0, ans=0.025 +2024-07-28 18:01:03,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=186533.33333333334, ans=0.0 +2024-07-28 18:01:16,738 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:01:21,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=186573.33333333334, ans=0.1 +2024-07-28 18:01:21,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=186573.33333333334, ans=0.0 +2024-07-28 18:01:25,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=186573.33333333334, ans=0.125 +2024-07-28 18:01:28,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186586.66666666666, ans=0.1 +2024-07-28 18:01:28,536 INFO [train.py:1114] (2/4) Epoch 14, batch 7050, loss[loss=0.2002, simple_loss=0.3027, pruned_loss=0.04885, over 4682.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2723, pruned_loss=0.04754, over 941804.96 frames. ], batch size: 19, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:01:30,302 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.54 vs. limit=22.5 +2024-07-28 18:01:32,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186586.66666666666, ans=0.1 +2024-07-28 18:01:33,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=186586.66666666666, ans=0.125 +2024-07-28 18:01:40,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=186600.0, ans=0.025 +2024-07-28 18:01:57,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=186640.0, ans=0.125 +2024-07-28 18:02:02,020 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.31 vs. limit=22.5 +2024-07-28 18:02:03,025 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+01 5.702e+01 6.224e+01 7.168e+01 1.076e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 18:02:03,701 INFO [train.py:1114] (2/4) Epoch 14, batch 7100, loss[loss=0.179, simple_loss=0.2784, pruned_loss=0.03983, over 4801.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2726, pruned_loss=0.04748, over 937042.88 frames. ], batch size: 15, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:02:25,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=186666.66666666666, ans=0.125 +2024-07-28 18:02:28,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=186680.0, ans=0.125 +2024-07-28 18:02:33,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=186693.33333333334, ans=0.0 +2024-07-28 18:02:38,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186693.33333333334, ans=0.1 +2024-07-28 18:02:46,761 INFO [train.py:1114] (2/4) Epoch 14, batch 7150, loss[loss=0.1775, simple_loss=0.2641, pruned_loss=0.04546, over 4464.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2706, pruned_loss=0.04684, over 938005.69 frames. ], batch size: 21, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:03:26,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=186733.33333333334, ans=0.125 +2024-07-28 18:03:26,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=186733.33333333334, ans=0.1 +2024-07-28 18:03:50,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=186746.66666666666, ans=0.1 +2024-07-28 18:03:53,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=186746.66666666666, ans=0.0 +2024-07-28 18:04:03,783 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.89 vs. limit=22.5 +2024-07-28 18:04:08,009 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=25.96 vs. limit=15.0 +2024-07-28 18:04:08,748 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.34 vs. limit=15.0 +2024-07-28 18:04:20,570 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.670e+01 5.638e+01 6.134e+01 6.924e+01 9.250e+01, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 18:04:21,222 INFO [train.py:1114] (2/4) Epoch 14, batch 7200, loss[loss=0.1792, simple_loss=0.2717, pruned_loss=0.04339, over 4799.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2716, pruned_loss=0.0472, over 938197.45 frames. ], batch size: 15, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:04:27,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=186786.66666666666, ans=0.125 +2024-07-28 18:04:45,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=186800.0, ans=0.125 +2024-07-28 18:04:52,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=186813.33333333334, ans=0.0 +2024-07-28 18:05:00,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=186826.66666666666, ans=0.2 +2024-07-28 18:05:25,087 INFO [train.py:1114] (2/4) Epoch 14, batch 7250, loss[loss=0.1861, simple_loss=0.2733, pruned_loss=0.04945, over 4851.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2715, pruned_loss=0.04713, over 939851.98 frames. ], batch size: 12, lr: 5.30e-03, grad_scale: 32.0 +2024-07-28 18:05:25,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=186853.33333333334, ans=0.0 +2024-07-28 18:05:45,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=186866.66666666666, ans=0.125 +2024-07-28 18:05:47,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=186880.0, ans=0.125 +2024-07-28 18:05:51,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=186880.0, ans=0.0 +2024-07-28 18:05:52,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=186880.0, ans=0.0 +2024-07-28 18:06:09,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=186906.66666666666, ans=0.125 +2024-07-28 18:06:11,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=186906.66666666666, ans=0.0 +2024-07-28 18:06:13,403 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.490e+01 5.624e+01 6.211e+01 6.890e+01 1.048e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 18:06:13,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=186920.0, ans=0.125 +2024-07-28 18:06:14,138 INFO [train.py:1114] (2/4) Epoch 14, batch 7300, loss[loss=0.1346, simple_loss=0.2159, pruned_loss=0.02664, over 4846.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2712, pruned_loss=0.04697, over 940214.13 frames. ], batch size: 12, lr: 5.30e-03, grad_scale: 32.0 +2024-07-28 18:06:15,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=186920.0, ans=0.1 +2024-07-28 18:06:26,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=186933.33333333334, ans=0.1 +2024-07-28 18:06:27,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=186946.66666666666, ans=0.125 +2024-07-28 18:06:34,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=186960.0, ans=0.125 +2024-07-28 18:06:36,157 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.51 vs. limit=10.0 +2024-07-28 18:06:38,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=186960.0, ans=0.1 +2024-07-28 18:06:42,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=186973.33333333334, ans=0.025 +2024-07-28 18:06:44,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff3.min_abs, batch_count=186973.33333333334, ans=0.2 +2024-07-28 18:06:46,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=186986.66666666666, ans=0.125 +2024-07-28 18:06:46,845 INFO [train.py:1114] (2/4) Epoch 14, batch 7350, loss[loss=0.174, simple_loss=0.2576, pruned_loss=0.04521, over 4640.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2713, pruned_loss=0.0468, over 939292.93 frames. ], batch size: 12, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:06:55,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=187000.0, ans=0.125 +2024-07-28 18:06:59,947 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.45 vs. limit=15.0 +2024-07-28 18:07:06,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=187013.33333333334, ans=0.1 +2024-07-28 18:07:07,659 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.35 vs. limit=15.0 +2024-07-28 18:07:12,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=187026.66666666666, ans=0.1 +2024-07-28 18:07:21,808 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.550e+01 5.480e+01 5.943e+01 6.743e+01 9.456e+01, threshold=1.189e+02, percent-clipped=0.0 +2024-07-28 18:07:22,482 INFO [train.py:1114] (2/4) Epoch 14, batch 7400, loss[loss=0.2066, simple_loss=0.2934, pruned_loss=0.05989, over 4694.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2721, pruned_loss=0.04679, over 940431.99 frames. ], batch size: 13, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:07:24,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=187053.33333333334, ans=0.0 +2024-07-28 18:07:39,882 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-07-28 18:07:40,235 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:07:48,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=187106.66666666666, ans=0.1 +2024-07-28 18:07:49,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=187106.66666666666, ans=0.1 +2024-07-28 18:07:54,557 INFO [train.py:1114] (2/4) Epoch 14, batch 7450, loss[loss=0.1673, simple_loss=0.2469, pruned_loss=0.0439, over 4608.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2716, pruned_loss=0.04688, over 937486.90 frames. ], batch size: 11, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:07:56,395 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.45 vs. limit=15.0 +2024-07-28 18:08:03,862 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:08:04,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=187133.33333333334, ans=10.0 +2024-07-28 18:08:07,913 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.03 vs. limit=15.0 +2024-07-28 18:08:11,147 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.11 vs. limit=10.0 +2024-07-28 18:08:18,909 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.02 vs. limit=22.5 +2024-07-28 18:08:20,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=187160.0, ans=0.125 +2024-07-28 18:08:26,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=187173.33333333334, ans=0.015 +2024-07-28 18:08:26,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=187173.33333333334, ans=0.125 +2024-07-28 18:08:26,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=187173.33333333334, ans=0.125 +2024-07-28 18:08:28,505 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.589e+01 6.200e+01 6.910e+01 1.220e+02, threshold=1.240e+02, percent-clipped=1.0 +2024-07-28 18:08:29,222 INFO [train.py:1114] (2/4) Epoch 14, batch 7500, loss[loss=0.2464, simple_loss=0.3145, pruned_loss=0.08911, over 3512.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2736, pruned_loss=0.04759, over 935812.13 frames. ], batch size: 35, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:08:31,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=187186.66666666666, ans=0.025 +2024-07-28 18:08:40,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=187200.0, ans=0.1 +2024-07-28 18:09:17,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=187240.0, ans=0.125 +2024-07-28 18:09:19,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=187240.0, ans=0.0 +2024-07-28 18:09:23,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=187253.33333333334, ans=0.2 +2024-07-28 18:09:23,924 INFO [train.py:1114] (2/4) Epoch 14, batch 7550, loss[loss=0.2148, simple_loss=0.3051, pruned_loss=0.06225, over 4609.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2756, pruned_loss=0.04828, over 936131.40 frames. ], batch size: 17, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:09:35,224 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.23 vs. limit=15.0 +2024-07-28 18:09:50,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=187306.66666666666, ans=0.125 +2024-07-28 18:09:53,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=187306.66666666666, ans=0.5 +2024-07-28 18:09:56,071 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.167e+01 5.585e+01 6.021e+01 6.660e+01 1.005e+02, threshold=1.204e+02, percent-clipped=0.0 +2024-07-28 18:09:56,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=187320.0, ans=0.0 +2024-07-28 18:09:56,708 INFO [train.py:1114] (2/4) Epoch 14, batch 7600, loss[loss=0.1854, simple_loss=0.2873, pruned_loss=0.04178, over 4805.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2749, pruned_loss=0.04814, over 937778.04 frames. ], batch size: 14, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:10:01,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=187320.0, ans=0.0 +2024-07-28 18:10:03,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=187333.33333333334, ans=0.2 +2024-07-28 18:10:04,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=187333.33333333334, ans=0.0 +2024-07-28 18:10:05,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=187333.33333333334, ans=0.2 +2024-07-28 18:10:05,479 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:10:16,667 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.41 vs. limit=22.5 +2024-07-28 18:10:18,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=187360.0, ans=0.0 +2024-07-28 18:10:19,538 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:10:21,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=187360.0, ans=0.125 +2024-07-28 18:10:29,869 INFO [train.py:1114] (2/4) Epoch 14, batch 7650, loss[loss=0.1558, simple_loss=0.2341, pruned_loss=0.0388, over 4946.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2742, pruned_loss=0.04822, over 936706.26 frames. ], batch size: 12, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:10:36,367 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.60 vs. limit=15.0 +2024-07-28 18:10:43,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=187413.33333333334, ans=0.125 +2024-07-28 18:10:47,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=187413.33333333334, ans=0.125 +2024-07-28 18:10:54,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=187426.66666666666, ans=0.125 +2024-07-28 18:11:02,119 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.854e+01 5.638e+01 6.341e+01 7.114e+01 1.063e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 18:11:02,796 INFO [train.py:1114] (2/4) Epoch 14, batch 7700, loss[loss=0.1994, simple_loss=0.3003, pruned_loss=0.04922, over 4689.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2739, pruned_loss=0.04777, over 934142.58 frames. ], batch size: 13, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:11:09,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=187453.33333333334, ans=0.0 +2024-07-28 18:11:48,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=187520.0, ans=0.2 +2024-07-28 18:11:48,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=187520.0, ans=0.125 +2024-07-28 18:11:48,693 INFO [train.py:1114] (2/4) Epoch 14, batch 7750, loss[loss=0.1866, simple_loss=0.2791, pruned_loss=0.04702, over 4935.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2751, pruned_loss=0.04845, over 935237.72 frames. ], batch size: 14, lr: 5.29e-03, grad_scale: 64.0 +2024-07-28 18:11:57,763 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.31 vs. limit=22.5 +2024-07-28 18:11:58,493 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=15.0 +2024-07-28 18:12:05,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=187546.66666666666, ans=0.0 +2024-07-28 18:12:22,672 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.155e+01 5.694e+01 6.124e+01 6.801e+01 8.564e+01, threshold=1.225e+02, percent-clipped=0.0 +2024-07-28 18:12:24,027 INFO [train.py:1114] (2/4) Epoch 14, batch 7800, loss[loss=0.1953, simple_loss=0.2889, pruned_loss=0.05085, over 4675.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2754, pruned_loss=0.04848, over 936878.71 frames. ], batch size: 14, lr: 5.29e-03, grad_scale: 64.0 +2024-07-28 18:12:29,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=187600.0, ans=0.04949747468305833 +2024-07-28 18:13:17,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=187653.33333333334, ans=0.2 +2024-07-28 18:13:18,100 INFO [train.py:1114] (2/4) Epoch 14, batch 7850, loss[loss=0.1647, simple_loss=0.2398, pruned_loss=0.04477, over 4517.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2751, pruned_loss=0.04804, over 936020.19 frames. ], batch size: 10, lr: 5.29e-03, grad_scale: 64.0 +2024-07-28 18:13:28,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=187666.66666666666, ans=0.0 +2024-07-28 18:13:33,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=187680.0, ans=0.1 +2024-07-28 18:13:39,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=187693.33333333334, ans=0.025 +2024-07-28 18:13:51,320 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.474e+01 5.677e+01 6.181e+01 6.848e+01 9.012e+01, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 18:13:51,944 INFO [train.py:1114] (2/4) Epoch 14, batch 7900, loss[loss=0.1828, simple_loss=0.2725, pruned_loss=0.04652, over 4871.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2756, pruned_loss=0.04826, over 933456.43 frames. ], batch size: 14, lr: 5.29e-03, grad_scale: 64.0 +2024-07-28 18:14:06,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=187746.66666666666, ans=0.0 +2024-07-28 18:14:08,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=187746.66666666666, ans=0.125 +2024-07-28 18:14:25,562 INFO [train.py:1114] (2/4) Epoch 14, batch 7950, loss[loss=0.2389, simple_loss=0.316, pruned_loss=0.08088, over 3325.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2749, pruned_loss=0.04758, over 935124.64 frames. ], batch size: 35, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:14:31,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=187800.0, ans=0.125 +2024-07-28 18:14:40,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.61 vs. limit=15.0 +2024-07-28 18:14:43,343 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:14:45,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=187826.66666666666, ans=0.0 +2024-07-28 18:15:01,012 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.483e+01 5.795e+01 6.761e+01 7.899e+01 1.107e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-28 18:15:01,060 INFO [train.py:1114] (2/4) Epoch 14, batch 8000, loss[loss=0.1555, simple_loss=0.2287, pruned_loss=0.04119, over 4624.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2738, pruned_loss=0.04725, over 934462.41 frames. ], batch size: 11, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:15:04,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=187853.33333333334, ans=0.0 +2024-07-28 18:15:20,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=187880.0, ans=0.1 +2024-07-28 18:15:25,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=187893.33333333334, ans=0.125 +2024-07-28 18:15:26,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=187893.33333333334, ans=0.1 +2024-07-28 18:15:27,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=187893.33333333334, ans=0.2 +2024-07-28 18:16:05,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=187906.66666666666, ans=0.05 +2024-07-28 18:16:08,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=187906.66666666666, ans=0.0 +2024-07-28 18:16:09,699 INFO [train.py:1114] (2/4) Epoch 14, batch 8050, loss[loss=0.1507, simple_loss=0.2424, pruned_loss=0.02945, over 4809.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2735, pruned_loss=0.04712, over 934232.71 frames. ], batch size: 14, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:16:10,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=187920.0, ans=0.125 +2024-07-28 18:16:21,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=187933.33333333334, ans=0.0 +2024-07-28 18:16:22,622 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.17 vs. limit=15.0 +2024-07-28 18:16:27,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=187946.66666666666, ans=0.0 +2024-07-28 18:16:30,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=187960.0, ans=0.125 +2024-07-28 18:16:38,635 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.73 vs. limit=15.0 +2024-07-28 18:16:39,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=187973.33333333334, ans=0.0 +2024-07-28 18:16:41,830 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.834e+01 6.714e+01 7.875e+01 1.229e+02, threshold=1.343e+02, percent-clipped=0.0 +2024-07-28 18:16:41,863 INFO [train.py:1114] (2/4) Epoch 14, batch 8100, loss[loss=0.197, simple_loss=0.2836, pruned_loss=0.05516, over 4793.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2744, pruned_loss=0.04701, over 933870.91 frames. ], batch size: 15, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:16:51,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=188000.0, ans=0.0 +2024-07-28 18:17:08,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=188040.0, ans=10.0 +2024-07-28 18:17:14,588 INFO [train.py:1114] (2/4) Epoch 14, batch 8150, loss[loss=0.1891, simple_loss=0.2879, pruned_loss=0.04511, over 4806.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2743, pruned_loss=0.04735, over 937353.85 frames. ], batch size: 15, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:17:31,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=188080.0, ans=0.1 +2024-07-28 18:17:31,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=188080.0, ans=0.0 +2024-07-28 18:17:33,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=188080.0, ans=0.2 +2024-07-28 18:17:46,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188106.66666666666, ans=0.1 +2024-07-28 18:17:49,786 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.677e+01 5.661e+01 6.103e+01 6.886e+01 9.464e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 18:17:49,819 INFO [train.py:1114] (2/4) Epoch 14, batch 8200, loss[loss=0.1687, simple_loss=0.2506, pruned_loss=0.04339, over 4798.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2735, pruned_loss=0.04668, over 938428.55 frames. ], batch size: 15, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:18:02,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=188146.66666666666, ans=0.125 +2024-07-28 18:18:09,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=188160.0, ans=0.0 +2024-07-28 18:18:14,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=188160.0, ans=0.0 +2024-07-28 18:18:17,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=188173.33333333334, ans=0.2 +2024-07-28 18:18:21,528 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.35 vs. limit=12.0 +2024-07-28 18:18:22,388 INFO [train.py:1114] (2/4) Epoch 14, batch 8250, loss[loss=0.1808, simple_loss=0.2693, pruned_loss=0.04618, over 4900.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.274, pruned_loss=0.04747, over 938845.87 frames. ], batch size: 13, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:18:29,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=188200.0, ans=0.025 +2024-07-28 18:18:30,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=188200.0, ans=0.025 +2024-07-28 18:18:34,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=188200.0, ans=0.125 +2024-07-28 18:18:52,544 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.46 vs. limit=15.0 +2024-07-28 18:18:54,601 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.568e+01 5.616e+01 6.253e+01 7.427e+01 1.123e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 18:18:54,634 INFO [train.py:1114] (2/4) Epoch 14, batch 8300, loss[loss=0.2192, simple_loss=0.2962, pruned_loss=0.07109, over 4909.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.275, pruned_loss=0.04837, over 938720.51 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:19:00,207 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.68 vs. limit=10.0 +2024-07-28 18:19:02,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=188266.66666666666, ans=0.0 +2024-07-28 18:19:09,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=188280.0, ans=0.125 +2024-07-28 18:19:13,208 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.87 vs. limit=15.0 +2024-07-28 18:19:25,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=188306.66666666666, ans=0.0 +2024-07-28 18:19:28,364 INFO [train.py:1114] (2/4) Epoch 14, batch 8350, loss[loss=0.1746, simple_loss=0.2646, pruned_loss=0.04235, over 4803.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2748, pruned_loss=0.04781, over 941411.75 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:19:29,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=188320.0, ans=0.0 +2024-07-28 18:19:35,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=188333.33333333334, ans=0.0 +2024-07-28 18:19:52,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=188360.0, ans=0.025 +2024-07-28 18:19:56,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=188373.33333333334, ans=0.125 +2024-07-28 18:19:58,109 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.51 vs. limit=15.0 +2024-07-28 18:20:00,912 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.607e+01 5.571e+01 5.977e+01 6.680e+01 9.102e+01, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 18:20:00,960 INFO [train.py:1114] (2/4) Epoch 14, batch 8400, loss[loss=0.1835, simple_loss=0.2666, pruned_loss=0.05018, over 4775.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2749, pruned_loss=0.04807, over 940007.59 frames. ], batch size: 12, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:20:10,889 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:20:20,956 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.95 vs. limit=15.0 +2024-07-28 18:20:22,426 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.04 vs. limit=22.5 +2024-07-28 18:20:23,207 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=11.05 vs. limit=15.0 +2024-07-28 18:20:24,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.49 vs. limit=12.0 +2024-07-28 18:20:26,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=188426.66666666666, ans=0.125 +2024-07-28 18:20:33,550 INFO [train.py:1114] (2/4) Epoch 14, batch 8450, loss[loss=0.1741, simple_loss=0.2776, pruned_loss=0.03524, over 4802.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2754, pruned_loss=0.04824, over 938684.98 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:20:35,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188453.33333333334, ans=0.1 +2024-07-28 18:20:36,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=188453.33333333334, ans=0.125 +2024-07-28 18:20:37,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=188453.33333333334, ans=0.02 +2024-07-28 18:20:38,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff3.min_abs, batch_count=188453.33333333334, ans=0.2 +2024-07-28 18:20:48,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=188480.0, ans=0.125 +2024-07-28 18:20:49,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=188480.0, ans=0.125 +2024-07-28 18:20:50,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=188480.0, ans=0.125 +2024-07-28 18:21:00,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=188506.66666666666, ans=0.125 +2024-07-28 18:21:05,575 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.332e+01 6.107e+01 6.939e+01 8.222e+01 1.191e+02, threshold=1.388e+02, percent-clipped=0.0 +2024-07-28 18:21:05,608 INFO [train.py:1114] (2/4) Epoch 14, batch 8500, loss[loss=0.1902, simple_loss=0.2654, pruned_loss=0.0575, over 4612.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2741, pruned_loss=0.04785, over 938585.65 frames. ], batch size: 11, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:21:05,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=188520.0, ans=0.125 +2024-07-28 18:21:06,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff2.min_abs, batch_count=188520.0, ans=0.1 +2024-07-28 18:21:11,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=188533.33333333334, ans=0.125 +2024-07-28 18:21:18,511 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.16 vs. limit=12.0 +2024-07-28 18:21:26,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=188560.0, ans=0.125 +2024-07-28 18:21:29,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=188560.0, ans=0.125 +2024-07-28 18:21:29,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=188560.0, ans=0.125 +2024-07-28 18:21:34,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=188573.33333333334, ans=0.125 +2024-07-28 18:21:37,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=188586.66666666666, ans=0.2 +2024-07-28 18:21:38,568 INFO [train.py:1114] (2/4) Epoch 14, batch 8550, loss[loss=0.1736, simple_loss=0.2546, pruned_loss=0.04624, over 4792.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2731, pruned_loss=0.04735, over 939465.38 frames. ], batch size: 11, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:21:44,131 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.40 vs. limit=15.0 +2024-07-28 18:22:08,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=188613.33333333334, ans=0.2 +2024-07-28 18:22:09,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=188613.33333333334, ans=0.125 +2024-07-28 18:22:11,453 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.68 vs. limit=10.0 +2024-07-28 18:22:12,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=188626.66666666666, ans=0.1 +2024-07-28 18:22:13,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=188626.66666666666, ans=0.025 +2024-07-28 18:22:26,144 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.740e+01 5.708e+01 6.188e+01 7.242e+01 1.269e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 18:22:26,192 INFO [train.py:1114] (2/4) Epoch 14, batch 8600, loss[loss=0.1878, simple_loss=0.2779, pruned_loss=0.04888, over 4795.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2725, pruned_loss=0.04712, over 939043.28 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:22:54,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=188693.33333333334, ans=0.0 +2024-07-28 18:22:56,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=188693.33333333334, ans=0.0 +2024-07-28 18:23:06,723 INFO [train.py:1114] (2/4) Epoch 14, batch 8650, loss[loss=0.2225, simple_loss=0.3063, pruned_loss=0.06938, over 4903.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2717, pruned_loss=0.04658, over 940083.78 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:23:07,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=188720.0, ans=0.0 +2024-07-28 18:23:11,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=188720.0, ans=0.2 +2024-07-28 18:23:23,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=188746.66666666666, ans=0.0 +2024-07-28 18:23:23,994 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.56 vs. limit=15.0 +2024-07-28 18:23:24,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.84 vs. limit=15.0 +2024-07-28 18:23:26,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=188760.0, ans=0.125 +2024-07-28 18:23:27,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=188760.0, ans=0.2 +2024-07-28 18:23:27,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=188760.0, ans=0.05 +2024-07-28 18:23:38,749 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.50 vs. limit=15.0 +2024-07-28 18:23:38,964 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 5.747e+01 6.354e+01 7.150e+01 1.051e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 18:23:39,011 INFO [train.py:1114] (2/4) Epoch 14, batch 8700, loss[loss=0.1602, simple_loss=0.2508, pruned_loss=0.03482, over 4768.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2721, pruned_loss=0.04674, over 937820.64 frames. ], batch size: 13, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:23:44,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=188786.66666666666, ans=0.125 +2024-07-28 18:23:45,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=188800.0, ans=0.125 +2024-07-28 18:24:11,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=188840.0, ans=0.125 +2024-07-28 18:24:13,590 INFO [train.py:1114] (2/4) Epoch 14, batch 8750, loss[loss=0.1719, simple_loss=0.2645, pruned_loss=0.03972, over 4689.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.272, pruned_loss=0.04674, over 936138.99 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:24:20,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=188866.66666666666, ans=0.125 +2024-07-28 18:24:24,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=188866.66666666666, ans=0.025 +2024-07-28 18:24:26,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=188880.0, ans=0.2 +2024-07-28 18:24:36,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=188893.33333333334, ans=0.05 +2024-07-28 18:24:37,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=188893.33333333334, ans=0.0 +2024-07-28 18:24:45,707 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.401e+01 5.577e+01 5.996e+01 6.718e+01 9.459e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 18:24:45,740 INFO [train.py:1114] (2/4) Epoch 14, batch 8800, loss[loss=0.2081, simple_loss=0.3114, pruned_loss=0.05238, over 4933.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2728, pruned_loss=0.04665, over 937161.68 frames. ], batch size: 14, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:24:51,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=188933.33333333334, ans=0.04949747468305833 +2024-07-28 18:24:51,901 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.13 vs. limit=10.0 +2024-07-28 18:24:59,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=188946.66666666666, ans=0.125 +2024-07-28 18:25:01,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=188946.66666666666, ans=0.0 +2024-07-28 18:25:04,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=188946.66666666666, ans=0.1 +2024-07-28 18:25:11,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=188973.33333333334, ans=0.125 +2024-07-28 18:25:14,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=188973.33333333334, ans=0.0 +2024-07-28 18:25:16,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=188973.33333333334, ans=0.2 +2024-07-28 18:25:17,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=188973.33333333334, ans=0.025 +2024-07-28 18:25:18,764 INFO [train.py:1114] (2/4) Epoch 14, batch 8850, loss[loss=0.2423, simple_loss=0.3289, pruned_loss=0.07783, over 4557.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2727, pruned_loss=0.04651, over 931749.34 frames. ], batch size: 21, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:25:25,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=189000.0, ans=0.2 +2024-07-28 18:25:30,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=189000.0, ans=0.0 +2024-07-28 18:25:37,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=189026.66666666666, ans=0.125 +2024-07-28 18:25:51,402 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.385e+01 5.694e+01 6.232e+01 7.298e+01 9.650e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 18:25:51,436 INFO [train.py:1114] (2/4) Epoch 14, batch 8900, loss[loss=0.1668, simple_loss=0.2466, pruned_loss=0.04355, over 4923.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2731, pruned_loss=0.04688, over 929035.74 frames. ], batch size: 12, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:25:54,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=189053.33333333334, ans=0.125 +2024-07-28 18:25:56,230 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.89 vs. limit=6.0 +2024-07-28 18:26:05,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=189080.0, ans=0.125 +2024-07-28 18:26:14,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=189093.33333333334, ans=0.0 +2024-07-28 18:26:21,890 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.54 vs. limit=6.0 +2024-07-28 18:26:22,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=189106.66666666666, ans=0.015 +2024-07-28 18:26:22,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=189106.66666666666, ans=0.0 +2024-07-28 18:26:24,556 INFO [train.py:1114] (2/4) Epoch 14, batch 8950, loss[loss=0.2002, simple_loss=0.294, pruned_loss=0.05316, over 4523.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2731, pruned_loss=0.04653, over 929847.97 frames. ], batch size: 21, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:26:25,020 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.15 vs. limit=6.0 +2024-07-28 18:26:26,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=189120.0, ans=0.95 +2024-07-28 18:26:44,210 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.99 vs. limit=15.0 +2024-07-28 18:26:44,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=189160.0, ans=0.2 +2024-07-28 18:26:46,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=189160.0, ans=0.0 +2024-07-28 18:26:57,062 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.595e+01 6.149e+01 7.157e+01 9.804e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 18:26:57,109 INFO [train.py:1114] (2/4) Epoch 14, batch 9000, loss[loss=0.165, simple_loss=0.2593, pruned_loss=0.03539, over 4640.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.272, pruned_loss=0.04613, over 932887.45 frames. ], batch size: 12, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:26:57,109 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 18:27:02,196 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.8633, 2.0534, 3.6871, 2.2066], device='cuda:2') +2024-07-28 18:27:02,477 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.1900, 2.2890, 3.9868, 1.9720], device='cuda:2') +2024-07-28 18:27:08,999 INFO [train.py:1146] (2/4) Epoch 14, validation: loss=0.1644, simple_loss=0.2676, pruned_loss=0.03058, over 944034.00 frames. +2024-07-28 18:27:09,000 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 18:27:16,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=189200.0, ans=0.125 +2024-07-28 18:27:17,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=189200.0, ans=0.0 +2024-07-28 18:27:36,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=189240.0, ans=0.125 +2024-07-28 18:27:37,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=189240.0, ans=0.0 +2024-07-28 18:27:38,076 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.50 vs. limit=15.0 +2024-07-28 18:27:42,004 INFO [train.py:1114] (2/4) Epoch 14, batch 9050, loss[loss=0.1642, simple_loss=0.247, pruned_loss=0.0407, over 4523.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.271, pruned_loss=0.04595, over 933914.15 frames. ], batch size: 10, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:27:53,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189266.66666666666, ans=0.1 +2024-07-28 18:27:57,177 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.18 vs. limit=15.0 +2024-07-28 18:27:58,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=189280.0, ans=0.95 +2024-07-28 18:28:04,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=189293.33333333334, ans=0.1 +2024-07-28 18:28:05,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=189293.33333333334, ans=0.5 +2024-07-28 18:28:14,830 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.467e+01 5.696e+01 6.239e+01 6.974e+01 1.014e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 18:28:14,877 INFO [train.py:1114] (2/4) Epoch 14, batch 9100, loss[loss=0.1919, simple_loss=0.2833, pruned_loss=0.05028, over 4934.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2714, pruned_loss=0.04602, over 936586.65 frames. ], batch size: 14, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:28:19,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=189320.0, ans=0.125 +2024-07-28 18:28:19,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=189320.0, ans=0.07 +2024-07-28 18:28:43,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=189360.0, ans=0.035 +2024-07-28 18:28:48,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=189373.33333333334, ans=0.125 +2024-07-28 18:28:50,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=189373.33333333334, ans=0.025 +2024-07-28 18:28:51,060 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.98 vs. limit=15.0 +2024-07-28 18:28:54,496 INFO [train.py:1114] (2/4) Epoch 14, batch 9150, loss[loss=0.1849, simple_loss=0.284, pruned_loss=0.04291, over 4814.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2737, pruned_loss=0.04704, over 935430.46 frames. ], batch size: 14, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:29:02,917 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.91 vs. limit=15.0 +2024-07-28 18:29:03,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=189400.0, ans=0.0 +2024-07-28 18:29:04,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=189400.0, ans=0.125 +2024-07-28 18:29:27,779 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.765e+01 6.391e+01 7.042e+01 1.009e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-28 18:29:27,827 INFO [train.py:1114] (2/4) Epoch 14, batch 9200, loss[loss=0.1692, simple_loss=0.2554, pruned_loss=0.04149, over 4852.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2737, pruned_loss=0.04742, over 936970.54 frames. ], batch size: 12, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:29:29,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=189453.33333333334, ans=0.0 +2024-07-28 18:29:33,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=189453.33333333334, ans=0.125 +2024-07-28 18:29:52,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=189506.66666666666, ans=0.125 +2024-07-28 18:30:02,100 INFO [train.py:1114] (2/4) Epoch 14, batch 9250, loss[loss=0.1849, simple_loss=0.2839, pruned_loss=0.04296, over 4638.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2738, pruned_loss=0.04742, over 937577.90 frames. ], batch size: 13, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:30:07,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=189520.0, ans=0.2 +2024-07-28 18:30:15,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=189546.66666666666, ans=0.125 +2024-07-28 18:30:19,199 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.10 vs. limit=6.0 +2024-07-28 18:30:20,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=189546.66666666666, ans=0.2 +2024-07-28 18:30:20,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=189546.66666666666, ans=0.125 +2024-07-28 18:30:23,981 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.96 vs. limit=5.0 +2024-07-28 18:30:31,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=189573.33333333334, ans=0.2 +2024-07-28 18:30:38,955 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.722e+01 6.128e+01 6.836e+01 1.013e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 18:30:39,002 INFO [train.py:1114] (2/4) Epoch 14, batch 9300, loss[loss=0.1878, simple_loss=0.2579, pruned_loss=0.05883, over 4776.00 frames. ], tot_loss[loss=0.184, simple_loss=0.273, pruned_loss=0.04746, over 937235.54 frames. ], batch size: 12, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:30:44,832 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.08 vs. limit=12.0 +2024-07-28 18:30:50,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=189600.0, ans=0.2 +2024-07-28 18:30:52,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=189613.33333333334, ans=0.125 +2024-07-28 18:35:01,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=189640.0, ans=0.0 +2024-07-28 18:35:05,210 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.81 vs. limit=15.0 +2024-07-28 18:35:05,509 INFO [train.py:1114] (2/4) Epoch 14, batch 9350, loss[loss=0.1539, simple_loss=0.2355, pruned_loss=0.03619, over 4803.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2728, pruned_loss=0.04689, over 934260.33 frames. ], batch size: 11, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:35:17,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=189666.66666666666, ans=0.0 +2024-07-28 18:35:17,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=189666.66666666666, ans=0.125 +2024-07-28 18:35:21,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=189680.0, ans=0.125 +2024-07-28 18:35:26,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=189693.33333333334, ans=0.125 +2024-07-28 18:35:33,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=189706.66666666666, ans=0.125 +2024-07-28 18:35:38,438 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.799e+01 5.700e+01 6.300e+01 7.033e+01 1.050e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 18:35:39,330 INFO [train.py:1114] (2/4) Epoch 14, batch 9400, loss[loss=0.181, simple_loss=0.2767, pruned_loss=0.04263, over 4690.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2732, pruned_loss=0.04752, over 932715.29 frames. ], batch size: 13, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:35:41,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=189720.0, ans=0.0 +2024-07-28 18:35:43,954 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.88 vs. limit=15.0 +2024-07-28 18:35:44,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=189720.0, ans=0.125 +2024-07-28 18:35:48,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=189733.33333333334, ans=0.0 +2024-07-28 18:35:53,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=189746.66666666666, ans=0.125 +2024-07-28 18:36:03,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=189760.0, ans=0.2 +2024-07-28 18:36:05,243 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.42 vs. limit=15.0 +2024-07-28 18:36:09,015 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.46 vs. limit=15.0 +2024-07-28 18:36:12,504 INFO [train.py:1114] (2/4) Epoch 14, batch 9450, loss[loss=0.144, simple_loss=0.2222, pruned_loss=0.03284, over 4810.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2733, pruned_loss=0.04751, over 932176.52 frames. ], batch size: 11, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:36:18,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=189800.0, ans=0.125 +2024-07-28 18:36:19,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=189800.0, ans=0.0 +2024-07-28 18:36:21,165 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-07-28 18:36:21,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=189800.0, ans=0.1 +2024-07-28 18:36:22,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=189800.0, ans=0.1 +2024-07-28 18:36:22,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=189800.0, ans=0.0 +2024-07-28 18:36:25,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=189813.33333333334, ans=0.2 +2024-07-28 18:36:43,736 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.801e+01 5.560e+01 6.240e+01 6.918e+01 1.034e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 18:36:43,784 INFO [train.py:1114] (2/4) Epoch 14, batch 9500, loss[loss=0.1631, simple_loss=0.2604, pruned_loss=0.03289, over 4705.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2727, pruned_loss=0.04695, over 934424.24 frames. ], batch size: 12, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:36:43,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=189853.33333333334, ans=0.2 +2024-07-28 18:36:43,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=189853.33333333334, ans=0.025 +2024-07-28 18:36:46,624 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.77 vs. limit=15.0 +2024-07-28 18:37:05,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=189893.33333333334, ans=0.125 +2024-07-28 18:37:07,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=189893.33333333334, ans=0.2 +2024-07-28 18:37:14,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=189920.0, ans=0.95 +2024-07-28 18:37:15,309 INFO [train.py:1114] (2/4) Epoch 14, batch 9550, loss[loss=0.1677, simple_loss=0.2552, pruned_loss=0.04009, over 4765.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2722, pruned_loss=0.04711, over 931206.72 frames. ], batch size: 12, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:37:20,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=189920.0, ans=0.0 +2024-07-28 18:37:24,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=189933.33333333334, ans=0.0 +2024-07-28 18:37:27,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.66 vs. limit=15.0 +2024-07-28 18:37:29,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=189946.66666666666, ans=0.125 +2024-07-28 18:37:38,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=189960.0, ans=0.125 +2024-07-28 18:37:44,087 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:37:46,551 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.418e+01 5.773e+01 6.422e+01 7.521e+01 1.253e+02, threshold=1.284e+02, percent-clipped=1.0 +2024-07-28 18:37:46,598 INFO [train.py:1114] (2/4) Epoch 14, batch 9600, loss[loss=0.1917, simple_loss=0.2675, pruned_loss=0.05791, over 3270.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2722, pruned_loss=0.04706, over 930185.41 frames. ], batch size: 35, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:37:48,039 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=189986.66666666666, ans=0.125 +2024-07-28 18:37:50,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=189986.66666666666, ans=0.0 +2024-07-28 18:37:50,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=189986.66666666666, ans=0.2 +2024-07-28 18:37:58,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.82 vs. limit=15.0 +2024-07-28 18:38:08,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=190026.66666666666, ans=0.125 +2024-07-28 18:38:12,486 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.28 vs. limit=12.0 +2024-07-28 18:38:17,667 INFO [train.py:1114] (2/4) Epoch 14, batch 9650, loss[loss=0.1789, simple_loss=0.2748, pruned_loss=0.0415, over 4840.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2722, pruned_loss=0.04723, over 925987.36 frames. ], batch size: 16, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:38:19,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=190053.33333333334, ans=0.125 +2024-07-28 18:38:30,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=190080.0, ans=0.125 +2024-07-28 18:38:41,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=190093.33333333334, ans=0.125 +2024-07-28 18:38:43,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=190106.66666666666, ans=0.125 +2024-07-28 18:38:44,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=190106.66666666666, ans=0.07 +2024-07-28 18:38:48,846 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.24 vs. limit=22.5 +2024-07-28 18:38:49,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=190120.0, ans=0.125 +2024-07-28 18:38:49,747 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.576e+01 5.692e+01 6.329e+01 7.195e+01 1.065e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 18:38:49,780 INFO [train.py:1114] (2/4) Epoch 14, batch 9700, loss[loss=0.2149, simple_loss=0.2998, pruned_loss=0.06503, over 4283.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2728, pruned_loss=0.04718, over 924415.25 frames. ], batch size: 25, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:38:52,476 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.45 vs. limit=15.0 +2024-07-28 18:38:56,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=190133.33333333334, ans=0.125 +2024-07-28 18:38:59,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=190133.33333333334, ans=0.0 +2024-07-28 18:39:02,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=190146.66666666666, ans=0.2 +2024-07-28 18:39:06,984 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.36 vs. limit=22.5 +2024-07-28 18:39:08,324 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.18 vs. limit=15.0 +2024-07-28 18:39:14,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=190173.33333333334, ans=0.125 +2024-07-28 18:39:14,845 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.91 vs. limit=10.0 +2024-07-28 18:39:20,732 INFO [train.py:1114] (2/4) Epoch 14, batch 9750, loss[loss=0.1937, simple_loss=0.2819, pruned_loss=0.05276, over 4687.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2729, pruned_loss=0.04712, over 925403.58 frames. ], batch size: 15, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:39:28,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=190200.0, ans=0.0 +2024-07-28 18:39:28,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=190200.0, ans=0.125 +2024-07-28 18:39:28,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=190200.0, ans=0.1 +2024-07-28 18:39:44,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=190226.66666666666, ans=0.125 +2024-07-28 18:39:50,467 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.02 vs. limit=22.5 +2024-07-28 18:39:51,573 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.96 vs. limit=15.0 +2024-07-28 18:39:51,824 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.404e+01 5.577e+01 6.285e+01 7.276e+01 9.873e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 18:39:51,857 INFO [train.py:1114] (2/4) Epoch 14, batch 9800, loss[loss=0.163, simple_loss=0.2597, pruned_loss=0.03318, over 4710.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2722, pruned_loss=0.04708, over 925275.59 frames. ], batch size: 12, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:39:56,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=190253.33333333334, ans=0.0 +2024-07-28 18:40:10,015 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.05 vs. limit=12.0 +2024-07-28 18:40:10,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=190293.33333333334, ans=0.2 +2024-07-28 18:40:14,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=190293.33333333334, ans=6.0 +2024-07-28 18:40:23,795 INFO [train.py:1114] (2/4) Epoch 14, batch 9850, loss[loss=0.171, simple_loss=0.2727, pruned_loss=0.03468, over 4900.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2717, pruned_loss=0.04657, over 927979.55 frames. ], batch size: 15, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:40:28,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=190320.0, ans=0.0 +2024-07-28 18:40:32,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=190333.33333333334, ans=22.5 +2024-07-28 18:40:38,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=190346.66666666666, ans=0.0 +2024-07-28 18:40:42,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=190360.0, ans=0.05 +2024-07-28 18:40:54,571 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.802e+01 6.503e+01 7.443e+01 1.103e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 18:40:54,604 INFO [train.py:1114] (2/4) Epoch 14, batch 9900, loss[loss=0.1851, simple_loss=0.2763, pruned_loss=0.04693, over 4838.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2727, pruned_loss=0.04725, over 927354.49 frames. ], batch size: 16, lr: 5.25e-03, grad_scale: 32.0 +2024-07-28 18:40:55,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190386.66666666666, ans=0.1 +2024-07-28 18:41:00,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=190400.0, ans=0.125 +2024-07-28 18:41:04,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=190400.0, ans=0.0 +2024-07-28 18:41:09,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=190413.33333333334, ans=0.025 +2024-07-28 18:41:15,992 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.23 vs. limit=15.0 +2024-07-28 18:41:17,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=190426.66666666666, ans=0.1 +2024-07-28 18:41:17,524 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.13 vs. limit=15.0 +2024-07-28 18:41:18,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190440.0, ans=0.1 +2024-07-28 18:41:25,169 INFO [train.py:1114] (2/4) Epoch 14, batch 9950, loss[loss=0.1547, simple_loss=0.2378, pruned_loss=0.03581, over 4800.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2722, pruned_loss=0.04735, over 930004.75 frames. ], batch size: 11, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:41:25,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=190453.33333333334, ans=0.125 +2024-07-28 18:41:28,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=190453.33333333334, ans=0.0 +2024-07-28 18:41:28,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=190453.33333333334, ans=0.125 +2024-07-28 18:41:32,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190466.66666666666, ans=0.1 +2024-07-28 18:41:35,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=190466.66666666666, ans=0.0 +2024-07-28 18:41:41,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=190480.0, ans=0.0 +2024-07-28 18:41:44,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=190493.33333333334, ans=0.125 +2024-07-28 18:41:44,918 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.58 vs. limit=22.5 +2024-07-28 18:41:48,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190493.33333333334, ans=0.1 +2024-07-28 18:41:51,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=190506.66666666666, ans=0.125 +2024-07-28 18:41:56,000 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.155e+01 6.024e+01 6.660e+01 7.558e+01 1.184e+02, threshold=1.332e+02, percent-clipped=0.0 +2024-07-28 18:41:56,033 INFO [train.py:1114] (2/4) Epoch 14, batch 10000, loss[loss=0.2129, simple_loss=0.3214, pruned_loss=0.05221, over 4632.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2754, pruned_loss=0.0486, over 927820.49 frames. ], batch size: 16, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:42:02,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=190533.33333333334, ans=0.0 +2024-07-28 18:42:03,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=190533.33333333334, ans=0.2 +2024-07-28 18:42:05,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=190533.33333333334, ans=0.0 +2024-07-28 18:42:26,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=190573.33333333334, ans=0.125 +2024-07-28 18:42:28,112 INFO [train.py:1114] (2/4) Epoch 14, batch 10050, loss[loss=0.2344, simple_loss=0.3196, pruned_loss=0.07455, over 3115.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2796, pruned_loss=0.05084, over 915963.90 frames. ], batch size: 35, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:42:28,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190586.66666666666, ans=0.1 +2024-07-28 18:42:36,859 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.07 vs. limit=15.0 +2024-07-28 18:42:47,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=190613.33333333334, ans=0.2 +2024-07-28 18:42:57,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=190640.0, ans=0.0 +2024-07-28 18:42:59,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=190640.0, ans=0.125 +2024-07-28 18:43:02,249 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.109e+01 6.099e+01 7.025e+01 7.577e+01 1.043e+02, threshold=1.405e+02, percent-clipped=0.0 +2024-07-28 18:43:02,282 INFO [train.py:1114] (2/4) Epoch 14, batch 10100, loss[loss=0.253, simple_loss=0.31, pruned_loss=0.09802, over 3487.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2843, pruned_loss=0.05589, over 864000.30 frames. ], batch size: 35, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:43:09,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=190666.66666666666, ans=0.125 +2024-07-28 18:43:15,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190680.0, ans=0.1 +2024-07-28 18:43:24,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=190693.33333333334, ans=0.125 +2024-07-28 18:43:34,826 INFO [train.py:1114] (2/4) Epoch 14, batch 10150, loss[loss=0.2278, simple_loss=0.2993, pruned_loss=0.07818, over 3555.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2881, pruned_loss=0.05977, over 823898.29 frames. ], batch size: 37, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:43:37,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=190720.0, ans=0.125 +2024-07-28 18:43:40,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=190720.0, ans=0.125 +2024-07-28 18:43:48,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=190746.66666666666, ans=0.025 +2024-07-28 18:44:01,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=190773.33333333334, ans=0.125 +2024-07-28 18:44:06,123 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.761e+01 6.519e+01 6.916e+01 7.399e+01 9.914e+01, threshold=1.383e+02, percent-clipped=0.0 +2024-07-28 18:44:06,156 INFO [train.py:1114] (2/4) Epoch 14, batch 10200, loss[loss=0.2336, simple_loss=0.3185, pruned_loss=0.07431, over 3228.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2903, pruned_loss=0.06248, over 791279.28 frames. ], batch size: 36, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:44:09,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=190786.66666666666, ans=15.0 +2024-07-28 18:44:15,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=190800.0, ans=0.125 +2024-07-28 18:44:17,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_na.min_abs, batch_count=190800.0, ans=0.02 +2024-07-28 18:45:00,512 INFO [train.py:1114] (2/4) Epoch 15, batch 0, loss[loss=0.1452, simple_loss=0.2386, pruned_loss=0.02585, over 4852.00 frames. ], tot_loss[loss=0.1452, simple_loss=0.2386, pruned_loss=0.02585, over 4852.00 frames. ], batch size: 12, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:45:00,512 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 18:45:12,053 INFO [train.py:1146] (2/4) Epoch 15, validation: loss=0.1655, simple_loss=0.2703, pruned_loss=0.03031, over 944034.00 frames. +2024-07-28 18:45:12,054 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 18:45:12,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=190816.0, ans=0.2 +2024-07-28 18:45:13,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=190816.0, ans=0.125 +2024-07-28 18:45:17,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=190816.0, ans=0.125 +2024-07-28 18:45:27,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=190842.66666666666, ans=0.125 +2024-07-28 18:45:37,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=190856.0, ans=0.125 +2024-07-28 18:45:44,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=190869.33333333334, ans=0.125 +2024-07-28 18:45:49,205 INFO [train.py:1114] (2/4) Epoch 15, batch 50, loss[loss=0.1631, simple_loss=0.2523, pruned_loss=0.03696, over 4617.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2747, pruned_loss=0.04756, over 206418.86 frames. ], batch size: 11, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:45:51,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=190882.66666666666, ans=0.125 +2024-07-28 18:45:53,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=190882.66666666666, ans=0.125 +2024-07-28 18:46:07,817 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.277e+01 5.683e+01 6.465e+01 7.180e+01 1.067e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-28 18:46:33,574 INFO [train.py:1114] (2/4) Epoch 15, batch 100, loss[loss=0.1858, simple_loss=0.2688, pruned_loss=0.05138, over 4636.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.276, pruned_loss=0.04735, over 365169.39 frames. ], batch size: 12, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:46:35,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190949.33333333334, ans=0.1 +2024-07-28 18:46:43,469 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.93 vs. limit=15.0 +2024-07-28 18:46:46,640 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.63 vs. limit=15.0 +2024-07-28 18:47:07,601 INFO [train.py:1114] (2/4) Epoch 15, batch 150, loss[loss=0.139, simple_loss=0.2184, pruned_loss=0.02974, over 4615.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2729, pruned_loss=0.04717, over 493880.88 frames. ], batch size: 11, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:47:26,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191042.66666666666, ans=0.1 +2024-07-28 18:47:28,159 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.483e+01 5.436e+01 5.988e+01 6.579e+01 9.241e+01, threshold=1.198e+02, percent-clipped=0.0 +2024-07-28 18:47:42,918 INFO [train.py:1114] (2/4) Epoch 15, batch 200, loss[loss=0.1828, simple_loss=0.2778, pruned_loss=0.04387, over 4529.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2725, pruned_loss=0.04647, over 593489.38 frames. ], batch size: 21, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:47:44,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=191082.66666666666, ans=0.0 +2024-07-28 18:47:53,292 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.44 vs. limit=15.0 +2024-07-28 18:47:56,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=191096.0, ans=0.95 +2024-07-28 18:48:17,654 INFO [train.py:1114] (2/4) Epoch 15, batch 250, loss[loss=0.1959, simple_loss=0.2918, pruned_loss=0.05003, over 4644.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2721, pruned_loss=0.04698, over 670304.45 frames. ], batch size: 16, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:48:23,025 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.56 vs. limit=5.0 +2024-07-28 18:48:31,477 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.62 vs. limit=15.0 +2024-07-28 18:48:34,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=191176.0, ans=0.05 +2024-07-28 18:48:35,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=191176.0, ans=10.0 +2024-07-28 18:48:36,565 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 5.893e+01 6.659e+01 7.264e+01 1.310e+02, threshold=1.332e+02, percent-clipped=1.0 +2024-07-28 18:48:40,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=191189.33333333334, ans=0.125 +2024-07-28 18:48:43,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=191189.33333333334, ans=0.125 +2024-07-28 18:48:51,305 INFO [train.py:1114] (2/4) Epoch 15, batch 300, loss[loss=0.1795, simple_loss=0.2628, pruned_loss=0.0481, over 4815.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2724, pruned_loss=0.04705, over 729729.67 frames. ], batch size: 15, lr: 5.06e-03, grad_scale: 64.0 +2024-07-28 18:48:53,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=191216.0, ans=0.025 +2024-07-28 18:48:57,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=191229.33333333334, ans=0.125 +2024-07-28 18:49:01,348 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-07-28 18:49:06,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=191242.66666666666, ans=0.0 +2024-07-28 18:49:07,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=191242.66666666666, ans=0.07 +2024-07-28 18:49:14,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=191256.0, ans=0.0 +2024-07-28 18:49:21,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=191269.33333333334, ans=0.125 +2024-07-28 18:49:26,596 INFO [train.py:1114] (2/4) Epoch 15, batch 350, loss[loss=0.1534, simple_loss=0.2303, pruned_loss=0.03826, over 4946.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.273, pruned_loss=0.04697, over 775929.54 frames. ], batch size: 12, lr: 5.06e-03, grad_scale: 64.0 +2024-07-28 18:49:37,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=191296.0, ans=0.04949747468305833 +2024-07-28 18:49:43,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=191296.0, ans=0.025 +2024-07-28 18:49:46,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=191309.33333333334, ans=0.125 +2024-07-28 18:49:47,955 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.78 vs. limit=22.5 +2024-07-28 18:49:53,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=191309.33333333334, ans=0.5 +2024-07-28 18:49:56,401 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+01 5.550e+01 6.008e+01 7.215e+01 1.087e+02, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 18:50:08,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=191336.0, ans=0.125 +2024-07-28 18:50:10,488 INFO [train.py:1114] (2/4) Epoch 15, batch 400, loss[loss=0.1684, simple_loss=0.2669, pruned_loss=0.03493, over 4700.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2717, pruned_loss=0.04634, over 813512.22 frames. ], batch size: 13, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:50:11,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=191349.33333333334, ans=0.125 +2024-07-28 18:50:14,135 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.68 vs. limit=15.0 +2024-07-28 18:50:15,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=191349.33333333334, ans=0.2 +2024-07-28 18:50:18,546 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.87 vs. limit=15.0 +2024-07-28 18:50:18,630 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.67 vs. limit=22.5 +2024-07-28 18:50:41,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=191402.66666666666, ans=0.0 +2024-07-28 18:50:44,731 INFO [train.py:1114] (2/4) Epoch 15, batch 450, loss[loss=0.183, simple_loss=0.2668, pruned_loss=0.04959, over 4637.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2717, pruned_loss=0.04698, over 839209.84 frames. ], batch size: 13, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:50:49,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=191416.0, ans=0.2 +2024-07-28 18:50:49,835 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.73 vs. limit=12.0 +2024-07-28 18:50:58,178 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:50:58,517 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.67 vs. limit=22.5 +2024-07-28 18:50:59,761 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.09 vs. limit=10.0 +2024-07-28 18:51:00,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=191442.66666666666, ans=0.125 +2024-07-28 18:51:03,916 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.382e+01 5.595e+01 6.045e+01 6.958e+01 9.344e+01, threshold=1.209e+02, percent-clipped=0.0 +2024-07-28 18:51:06,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.19 vs. limit=22.5 +2024-07-28 18:51:11,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=191469.33333333334, ans=0.0 +2024-07-28 18:51:11,951 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.89 vs. limit=15.0 +2024-07-28 18:51:18,093 INFO [train.py:1114] (2/4) Epoch 15, batch 500, loss[loss=0.1865, simple_loss=0.2815, pruned_loss=0.04571, over 4683.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2703, pruned_loss=0.0462, over 861588.86 frames. ], batch size: 15, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:51:24,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191482.66666666666, ans=0.1 +2024-07-28 18:51:31,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=191496.0, ans=0.125 +2024-07-28 18:51:31,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=191496.0, ans=0.125 +2024-07-28 18:51:32,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=191496.0, ans=0.5 +2024-07-28 18:51:45,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=191522.66666666666, ans=0.125 +2024-07-28 18:51:46,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=191522.66666666666, ans=0.0 +2024-07-28 18:51:53,486 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.88 vs. limit=15.0 +2024-07-28 18:51:53,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=191549.33333333334, ans=0.0 +2024-07-28 18:51:54,344 INFO [train.py:1114] (2/4) Epoch 15, batch 550, loss[loss=0.1829, simple_loss=0.2696, pruned_loss=0.04815, over 4661.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2701, pruned_loss=0.04594, over 877657.31 frames. ], batch size: 17, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:51:54,786 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.97 vs. limit=15.0 +2024-07-28 18:52:11,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=191576.0, ans=0.2 +2024-07-28 18:52:13,586 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 5.498e+01 5.942e+01 6.485e+01 9.965e+01, threshold=1.188e+02, percent-clipped=0.0 +2024-07-28 18:52:23,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=191602.66666666666, ans=0.1 +2024-07-28 18:52:27,548 INFO [train.py:1114] (2/4) Epoch 15, batch 600, loss[loss=0.1843, simple_loss=0.2701, pruned_loss=0.04923, over 4637.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2696, pruned_loss=0.04542, over 892041.63 frames. ], batch size: 16, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:52:34,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=191629.33333333334, ans=0.2 +2024-07-28 18:52:35,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=191629.33333333334, ans=0.125 +2024-07-28 18:52:36,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191629.33333333334, ans=0.1 +2024-07-28 18:52:38,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=191629.33333333334, ans=0.125 +2024-07-28 18:52:38,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=191629.33333333334, ans=0.125 +2024-07-28 18:52:48,573 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.47 vs. limit=15.0 +2024-07-28 18:52:51,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=191656.0, ans=0.125 +2024-07-28 18:52:57,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=191669.33333333334, ans=0.04949747468305833 +2024-07-28 18:53:03,044 INFO [train.py:1114] (2/4) Epoch 15, batch 650, loss[loss=0.1641, simple_loss=0.2644, pruned_loss=0.03193, over 4768.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2692, pruned_loss=0.04539, over 903440.02 frames. ], batch size: 13, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:53:15,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=191709.33333333334, ans=10.0 +2024-07-28 18:53:22,455 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.353e+01 5.431e+01 6.039e+01 6.829e+01 9.137e+01, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 18:53:23,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=191722.66666666666, ans=0.125 +2024-07-28 18:53:38,722 INFO [train.py:1114] (2/4) Epoch 15, batch 700, loss[loss=0.1402, simple_loss=0.2368, pruned_loss=0.02175, over 4638.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.27, pruned_loss=0.04553, over 911596.97 frames. ], batch size: 12, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:53:38,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=191749.33333333334, ans=0.0 +2024-07-28 18:53:41,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=191749.33333333334, ans=0.125 +2024-07-28 18:53:44,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191762.66666666666, ans=0.1 +2024-07-28 18:53:50,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=191762.66666666666, ans=0.125 +2024-07-28 18:53:52,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=191776.0, ans=0.125 +2024-07-28 18:53:54,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=191776.0, ans=0.125 +2024-07-28 18:53:57,810 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.64 vs. limit=15.0 +2024-07-28 18:54:07,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=191802.66666666666, ans=0.0 +2024-07-28 18:54:11,819 INFO [train.py:1114] (2/4) Epoch 15, batch 750, loss[loss=0.1741, simple_loss=0.2723, pruned_loss=0.03797, over 4697.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2704, pruned_loss=0.04561, over 918038.10 frames. ], batch size: 13, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:54:11,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191816.0, ans=0.1 +2024-07-28 18:54:15,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=191816.0, ans=0.125 +2024-07-28 18:54:17,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=191829.33333333334, ans=0.125 +2024-07-28 18:54:25,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191842.66666666666, ans=0.1 +2024-07-28 18:54:25,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=191842.66666666666, ans=0.0 +2024-07-28 18:54:31,057 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.222e+01 5.661e+01 6.305e+01 7.556e+01 1.211e+02, threshold=1.261e+02, percent-clipped=1.0 +2024-07-28 18:54:38,012 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.41 vs. limit=15.0 +2024-07-28 18:54:45,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=191869.33333333334, ans=0.025 +2024-07-28 18:54:47,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=191882.66666666666, ans=0.125 +2024-07-28 18:54:48,307 INFO [train.py:1114] (2/4) Epoch 15, batch 800, loss[loss=0.1832, simple_loss=0.2709, pruned_loss=0.04772, over 4855.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2722, pruned_loss=0.04653, over 922954.51 frames. ], batch size: 12, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:54:59,112 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=191896.0, ans=0.0 +2024-07-28 18:55:20,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=191936.0, ans=0.0 +2024-07-28 18:55:23,100 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.74 vs. limit=22.5 +2024-07-28 18:55:23,933 INFO [train.py:1114] (2/4) Epoch 15, batch 850, loss[loss=0.1819, simple_loss=0.2769, pruned_loss=0.04347, over 4654.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2715, pruned_loss=0.04625, over 927221.60 frames. ], batch size: 14, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 18:55:38,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=191976.0, ans=0.025 +2024-07-28 18:55:43,399 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.257e+01 5.494e+01 6.079e+01 6.525e+01 1.058e+02, threshold=1.216e+02, percent-clipped=0.0 +2024-07-28 19:01:37,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=192002.66666666666, ans=0.0 +2024-07-28 19:01:39,934 INFO [train.py:1114] (2/4) Epoch 15, batch 900, loss[loss=0.1673, simple_loss=0.2462, pruned_loss=0.04423, over 4865.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2722, pruned_loss=0.04684, over 927985.76 frames. ], batch size: 12, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:02:07,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=192069.33333333334, ans=0.125 +2024-07-28 19:02:13,183 INFO [train.py:1114] (2/4) Epoch 15, batch 950, loss[loss=0.1772, simple_loss=0.2541, pruned_loss=0.0501, over 4787.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2725, pruned_loss=0.04712, over 929770.10 frames. ], batch size: 12, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:02:15,408 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.91 vs. limit=15.0 +2024-07-28 19:02:25,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=192109.33333333334, ans=0.07 +2024-07-28 19:02:34,367 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.707e+01 5.740e+01 6.341e+01 7.414e+01 2.683e+02, threshold=1.268e+02, percent-clipped=1.0 +2024-07-28 19:02:36,848 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.62 vs. limit=6.0 +2024-07-28 19:02:38,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=192122.66666666666, ans=0.025 +2024-07-28 19:02:42,988 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.87 vs. limit=15.0 +2024-07-28 19:02:45,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=192136.0, ans=0.2 +2024-07-28 19:02:47,669 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.38 vs. limit=15.0 +2024-07-28 19:02:48,025 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:02:48,492 INFO [train.py:1114] (2/4) Epoch 15, batch 1000, loss[loss=0.1686, simple_loss=0.2682, pruned_loss=0.03451, over 4960.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2725, pruned_loss=0.04695, over 929462.55 frames. ], batch size: 13, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:03:04,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=192176.0, ans=0.125 +2024-07-28 19:03:05,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=192176.0, ans=0.125 +2024-07-28 19:03:22,088 INFO [train.py:1114] (2/4) Epoch 15, batch 1050, loss[loss=0.166, simple_loss=0.2652, pruned_loss=0.03338, over 4873.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2718, pruned_loss=0.04695, over 931842.45 frames. ], batch size: 14, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:03:33,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192229.33333333334, ans=0.1 +2024-07-28 19:03:33,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=192229.33333333334, ans=0.0 +2024-07-28 19:03:34,822 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:03:36,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=192242.66666666666, ans=0.0 +2024-07-28 19:03:36,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=192242.66666666666, ans=0.125 +2024-07-28 19:03:39,749 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.34 vs. limit=15.0 +2024-07-28 19:03:41,200 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.689e+01 5.436e+01 6.028e+01 6.736e+01 8.653e+01, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 19:03:48,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=192269.33333333334, ans=0.2 +2024-07-28 19:03:50,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=192269.33333333334, ans=0.07 +2024-07-28 19:03:54,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=192282.66666666666, ans=0.5 +2024-07-28 19:03:55,266 INFO [train.py:1114] (2/4) Epoch 15, batch 1100, loss[loss=0.1818, simple_loss=0.2601, pruned_loss=0.05178, over 4892.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2714, pruned_loss=0.04654, over 934574.91 frames. ], batch size: 13, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:03:58,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=192282.66666666666, ans=0.125 +2024-07-28 19:04:00,869 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.79 vs. limit=15.0 +2024-07-28 19:04:05,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=192296.0, ans=0.125 +2024-07-28 19:04:09,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=192309.33333333334, ans=0.125 +2024-07-28 19:04:25,173 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:04:28,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=192336.0, ans=0.0 +2024-07-28 19:04:30,211 INFO [train.py:1114] (2/4) Epoch 15, batch 1150, loss[loss=0.1756, simple_loss=0.261, pruned_loss=0.04514, over 4903.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2718, pruned_loss=0.04696, over 934798.10 frames. ], batch size: 13, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:04:31,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=192349.33333333334, ans=0.2 +2024-07-28 19:04:32,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=192349.33333333334, ans=0.125 +2024-07-28 19:04:51,446 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.762e+01 5.625e+01 6.208e+01 7.192e+01 1.002e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 19:05:02,760 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.54 vs. limit=15.0 +2024-07-28 19:05:05,560 INFO [train.py:1114] (2/4) Epoch 15, batch 1200, loss[loss=0.1579, simple_loss=0.2557, pruned_loss=0.03005, over 4872.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.273, pruned_loss=0.04714, over 934041.25 frames. ], batch size: 14, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:05:09,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=192416.0, ans=0.125 +2024-07-28 19:05:11,335 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.86 vs. limit=22.5 +2024-07-28 19:05:36,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=192469.33333333334, ans=0.0 +2024-07-28 19:05:37,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=192469.33333333334, ans=0.125 +2024-07-28 19:05:38,619 INFO [train.py:1114] (2/4) Epoch 15, batch 1250, loss[loss=0.2741, simple_loss=0.3492, pruned_loss=0.09952, over 4813.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2729, pruned_loss=0.04674, over 938025.57 frames. ], batch size: 15, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:05:49,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=192496.0, ans=0.2 +2024-07-28 19:05:54,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=192509.33333333334, ans=0.125 +2024-07-28 19:05:57,778 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.394e+01 5.598e+01 6.184e+01 7.240e+01 1.147e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 19:06:02,878 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.44 vs. limit=15.0 +2024-07-28 19:06:12,404 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.34 vs. limit=15.0 +2024-07-28 19:06:13,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=192536.0, ans=0.2 +2024-07-28 19:06:14,167 INFO [train.py:1114] (2/4) Epoch 15, batch 1300, loss[loss=0.1838, simple_loss=0.274, pruned_loss=0.0468, over 4779.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2714, pruned_loss=0.04615, over 939405.84 frames. ], batch size: 19, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:06:37,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=192589.33333333334, ans=0.0 +2024-07-28 19:06:47,773 INFO [train.py:1114] (2/4) Epoch 15, batch 1350, loss[loss=0.1806, simple_loss=0.282, pruned_loss=0.03964, over 4760.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2706, pruned_loss=0.04542, over 941501.24 frames. ], batch size: 13, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:06:47,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=192616.0, ans=0.125 +2024-07-28 19:06:51,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=192616.0, ans=0.125 +2024-07-28 19:06:57,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=192629.33333333334, ans=0.1 +2024-07-28 19:07:07,203 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.665e+01 5.559e+01 6.171e+01 7.538e+01 1.379e+02, threshold=1.234e+02, percent-clipped=1.0 +2024-07-28 19:07:08,278 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.14 vs. limit=12.0 +2024-07-28 19:07:14,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.59 vs. limit=15.0 +2024-07-28 19:07:14,427 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.40 vs. limit=10.0 +2024-07-28 19:07:20,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=192669.33333333334, ans=0.125 +2024-07-28 19:07:21,226 INFO [train.py:1114] (2/4) Epoch 15, batch 1400, loss[loss=0.1872, simple_loss=0.2804, pruned_loss=0.04705, over 4698.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2712, pruned_loss=0.04572, over 943249.80 frames. ], batch size: 11, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:07:28,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=192696.0, ans=0.125 +2024-07-28 19:07:30,259 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.40 vs. limit=15.0 +2024-07-28 19:07:37,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=192709.33333333334, ans=0.05 +2024-07-28 19:07:37,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=192709.33333333334, ans=0.0 +2024-07-28 19:07:44,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=192722.66666666666, ans=0.0 +2024-07-28 19:07:53,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=192736.0, ans=0.125 +2024-07-28 19:07:56,461 INFO [train.py:1114] (2/4) Epoch 15, batch 1450, loss[loss=0.1793, simple_loss=0.2881, pruned_loss=0.03526, over 4688.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2718, pruned_loss=0.04597, over 943107.55 frames. ], batch size: 15, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:08:02,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192762.66666666666, ans=0.1 +2024-07-28 19:08:15,580 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.624e+01 5.633e+01 5.995e+01 6.598e+01 8.860e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 19:08:28,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=192802.66666666666, ans=0.025 +2024-07-28 19:08:29,690 INFO [train.py:1114] (2/4) Epoch 15, batch 1500, loss[loss=0.1668, simple_loss=0.2635, pruned_loss=0.03507, over 4811.00 frames. ], tot_loss[loss=0.182, simple_loss=0.272, pruned_loss=0.04603, over 942319.30 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:08:31,551 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.14 vs. limit=22.5 +2024-07-28 19:08:42,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=192829.33333333334, ans=10.0 +2024-07-28 19:09:03,674 INFO [train.py:1114] (2/4) Epoch 15, batch 1550, loss[loss=0.1733, simple_loss=0.2753, pruned_loss=0.03565, over 4916.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.272, pruned_loss=0.04609, over 938689.38 frames. ], batch size: 15, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:09:08,654 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.42 vs. limit=22.5 +2024-07-28 19:09:11,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=192896.0, ans=0.0 +2024-07-28 19:09:14,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=192896.0, ans=0.0 +2024-07-28 19:09:21,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=192909.33333333334, ans=0.125 +2024-07-28 19:09:23,056 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.711e+01 6.284e+01 7.073e+01 1.043e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 19:09:23,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=192922.66666666666, ans=0.0 +2024-07-28 19:09:34,635 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.10 vs. limit=15.0 +2024-07-28 19:09:39,406 INFO [train.py:1114] (2/4) Epoch 15, batch 1600, loss[loss=0.2098, simple_loss=0.3185, pruned_loss=0.05059, over 4870.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.271, pruned_loss=0.04592, over 937173.36 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:09:49,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=192962.66666666666, ans=0.125 +2024-07-28 19:09:55,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=192976.0, ans=0.125 +2024-07-28 19:10:05,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=192989.33333333334, ans=0.025 +2024-07-28 19:10:05,240 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.57 vs. limit=22.5 +2024-07-28 19:10:07,778 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.31 vs. limit=22.5 +2024-07-28 19:10:10,961 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.16 vs. limit=6.0 +2024-07-28 19:10:14,697 INFO [train.py:1114] (2/4) Epoch 15, batch 1650, loss[loss=0.1723, simple_loss=0.2808, pruned_loss=0.03193, over 4676.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2704, pruned_loss=0.04583, over 937217.29 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:10:17,936 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.50 vs. limit=22.5 +2024-07-28 19:10:28,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=193042.66666666666, ans=0.125 +2024-07-28 19:10:28,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=193042.66666666666, ans=0.0 +2024-07-28 19:10:32,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=193042.66666666666, ans=0.0 +2024-07-28 19:10:33,849 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.043e+01 5.642e+01 6.032e+01 7.016e+01 1.079e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 19:10:36,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=193056.0, ans=0.125 +2024-07-28 19:10:47,894 INFO [train.py:1114] (2/4) Epoch 15, batch 1700, loss[loss=0.1699, simple_loss=0.246, pruned_loss=0.0469, over 4706.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2703, pruned_loss=0.04566, over 938891.52 frames. ], batch size: 11, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:10:58,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=193096.0, ans=0.0 +2024-07-28 19:10:58,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=193096.0, ans=0.125 +2024-07-28 19:11:06,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=193109.33333333334, ans=0.125 +2024-07-28 19:11:13,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=193122.66666666666, ans=0.0 +2024-07-28 19:11:17,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=193136.0, ans=0.2 +2024-07-28 19:11:21,431 INFO [train.py:1114] (2/4) Epoch 15, batch 1750, loss[loss=0.1614, simple_loss=0.2476, pruned_loss=0.03758, over 4807.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.271, pruned_loss=0.04619, over 940181.20 frames. ], batch size: 11, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:11:23,182 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.59 vs. limit=22.5 +2024-07-28 19:11:28,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=193162.66666666666, ans=0.1 +2024-07-28 19:11:42,917 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.964e+01 5.710e+01 6.387e+01 7.487e+01 1.072e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 19:11:43,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=193189.33333333334, ans=0.1 +2024-07-28 19:11:45,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=193189.33333333334, ans=0.125 +2024-07-28 19:11:47,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=193189.33333333334, ans=0.04949747468305833 +2024-07-28 19:11:52,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=193202.66666666666, ans=0.125 +2024-07-28 19:11:56,820 INFO [train.py:1114] (2/4) Epoch 15, batch 1800, loss[loss=0.2013, simple_loss=0.3022, pruned_loss=0.05018, over 4639.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2715, pruned_loss=0.0464, over 941119.39 frames. ], batch size: 13, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:11:59,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=193216.0, ans=15.0 +2024-07-28 19:12:11,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=193242.66666666666, ans=0.125 +2024-07-28 19:12:20,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=193256.0, ans=0.0 +2024-07-28 19:12:29,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=193282.66666666666, ans=0.125 +2024-07-28 19:12:30,406 INFO [train.py:1114] (2/4) Epoch 15, batch 1850, loss[loss=0.2213, simple_loss=0.3057, pruned_loss=0.06842, over 4811.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2714, pruned_loss=0.04626, over 940953.76 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:12:40,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=193296.0, ans=0.125 +2024-07-28 19:12:41,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=193296.0, ans=0.125 +2024-07-28 19:12:42,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=193296.0, ans=0.0 +2024-07-28 19:12:44,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=193309.33333333334, ans=0.125 +2024-07-28 19:12:48,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=193309.33333333334, ans=0.0 +2024-07-28 19:12:50,213 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.636e+01 5.579e+01 6.374e+01 7.062e+01 1.422e+02, threshold=1.275e+02, percent-clipped=2.0 +2024-07-28 19:12:53,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=193322.66666666666, ans=0.0 +2024-07-28 19:13:23,614 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.16 vs. limit=22.5 +2024-07-28 19:13:26,482 INFO [train.py:1114] (2/4) Epoch 15, batch 1900, loss[loss=0.1728, simple_loss=0.2697, pruned_loss=0.03795, over 4672.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2724, pruned_loss=0.04681, over 942181.63 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:13:30,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=193349.33333333334, ans=0.2 +2024-07-28 19:13:39,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=193362.66666666666, ans=0.0 +2024-07-28 19:13:41,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=193376.0, ans=0.5 +2024-07-28 19:13:50,108 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:14:01,212 INFO [train.py:1114] (2/4) Epoch 15, batch 1950, loss[loss=0.1619, simple_loss=0.256, pruned_loss=0.03391, over 4900.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2736, pruned_loss=0.04708, over 944258.41 frames. ], batch size: 13, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:14:04,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=193416.0, ans=0.125 +2024-07-28 19:14:14,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=193442.66666666666, ans=0.125 +2024-07-28 19:14:16,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=193442.66666666666, ans=0.0 +2024-07-28 19:14:20,687 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.328e+01 5.675e+01 6.115e+01 6.828e+01 9.814e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 19:14:26,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=193456.0, ans=0.125 +2024-07-28 19:14:34,658 INFO [train.py:1114] (2/4) Epoch 15, batch 2000, loss[loss=0.1569, simple_loss=0.2473, pruned_loss=0.03324, over 4808.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2745, pruned_loss=0.04743, over 941317.49 frames. ], batch size: 11, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:14:39,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=193482.66666666666, ans=0.2 +2024-07-28 19:14:40,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=193482.66666666666, ans=0.125 +2024-07-28 19:15:05,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=193536.0, ans=22.5 +2024-07-28 19:15:08,846 INFO [train.py:1114] (2/4) Epoch 15, batch 2050, loss[loss=0.1349, simple_loss=0.2201, pruned_loss=0.02487, over 4617.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2733, pruned_loss=0.04762, over 939555.58 frames. ], batch size: 11, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:15:20,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=193562.66666666666, ans=0.125 +2024-07-28 19:15:27,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=193576.0, ans=0.125 +2024-07-28 19:15:31,316 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.698e+01 6.282e+01 7.137e+01 1.040e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 19:15:34,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=193589.33333333334, ans=0.0 +2024-07-28 19:15:35,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=193589.33333333334, ans=0.125 +2024-07-28 19:15:45,947 INFO [train.py:1114] (2/4) Epoch 15, batch 2100, loss[loss=0.2042, simple_loss=0.3018, pruned_loss=0.05324, over 4757.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2722, pruned_loss=0.04658, over 941656.31 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:15:58,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=193629.33333333334, ans=0.0 +2024-07-28 19:16:01,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=193642.66666666666, ans=0.025 +2024-07-28 19:16:08,752 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.28 vs. limit=15.0 +2024-07-28 19:16:11,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=193656.0, ans=0.1 +2024-07-28 19:16:13,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=193656.0, ans=0.125 +2024-07-28 19:16:20,810 INFO [train.py:1114] (2/4) Epoch 15, batch 2150, loss[loss=0.1738, simple_loss=0.2721, pruned_loss=0.03775, over 4887.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2707, pruned_loss=0.0459, over 944545.53 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:16:24,961 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:16:29,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=193696.0, ans=0.0 +2024-07-28 19:16:31,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=193696.0, ans=0.025 +2024-07-28 19:16:34,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=193709.33333333334, ans=0.125 +2024-07-28 19:16:34,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=193709.33333333334, ans=0.2 +2024-07-28 19:16:38,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=193709.33333333334, ans=0.125 +2024-07-28 19:16:40,107 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.631e+01 5.589e+01 6.207e+01 7.234e+01 9.865e+01, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 19:16:43,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=193722.66666666666, ans=0.125 +2024-07-28 19:16:47,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=193736.0, ans=0.125 +2024-07-28 19:16:49,191 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.21 vs. limit=12.0 +2024-07-28 19:16:54,161 INFO [train.py:1114] (2/4) Epoch 15, batch 2200, loss[loss=0.1667, simple_loss=0.2664, pruned_loss=0.03356, over 4801.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2703, pruned_loss=0.04524, over 943267.09 frames. ], batch size: 14, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:16:54,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=193749.33333333334, ans=0.0 +2024-07-28 19:17:05,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=193762.66666666666, ans=0.125 +2024-07-28 19:17:12,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=193776.0, ans=0.125 +2024-07-28 19:17:13,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=193789.33333333334, ans=0.0 +2024-07-28 19:17:14,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=193789.33333333334, ans=0.125 +2024-07-28 19:17:43,136 INFO [train.py:1114] (2/4) Epoch 15, batch 2250, loss[loss=0.179, simple_loss=0.2793, pruned_loss=0.03935, over 4682.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2705, pruned_loss=0.04503, over 941967.19 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:17:43,976 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:17:52,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.14 vs. limit=22.5 +2024-07-28 19:18:00,232 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.86 vs. limit=22.5 +2024-07-28 19:18:02,480 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.434e+01 5.887e+01 6.714e+01 1.189e+02, threshold=1.177e+02, percent-clipped=0.0 +2024-07-28 19:18:08,043 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.44 vs. limit=15.0 +2024-07-28 19:18:10,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=193869.33333333334, ans=0.1 +2024-07-28 19:18:14,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=193869.33333333334, ans=0.125 +2024-07-28 19:18:16,481 INFO [train.py:1114] (2/4) Epoch 15, batch 2300, loss[loss=0.178, simple_loss=0.2626, pruned_loss=0.04674, over 4945.00 frames. ], tot_loss[loss=0.18, simple_loss=0.27, pruned_loss=0.04504, over 940038.07 frames. ], batch size: 12, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:18:17,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=193882.66666666666, ans=0.125 +2024-07-28 19:18:26,562 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.52 vs. limit=22.5 +2024-07-28 19:18:31,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=193909.33333333334, ans=0.0 +2024-07-28 19:18:40,703 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.48 vs. limit=6.0 +2024-07-28 19:18:48,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=193936.0, ans=0.5 +2024-07-28 19:18:49,564 INFO [train.py:1114] (2/4) Epoch 15, batch 2350, loss[loss=0.1937, simple_loss=0.2789, pruned_loss=0.05429, over 4634.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2696, pruned_loss=0.04515, over 942057.55 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:18:49,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=193949.33333333334, ans=0.2 +2024-07-28 19:19:02,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=193962.66666666666, ans=0.125 +2024-07-28 19:19:04,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=193962.66666666666, ans=0.025 +2024-07-28 19:19:10,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=193976.0, ans=0.125 +2024-07-28 19:19:13,026 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.332e+01 5.729e+01 6.304e+01 7.186e+01 9.939e+01, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 19:19:16,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=193989.33333333334, ans=0.125 +2024-07-28 19:19:25,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=194002.66666666666, ans=0.0 +2024-07-28 19:19:27,911 INFO [train.py:1114] (2/4) Epoch 15, batch 2400, loss[loss=0.1417, simple_loss=0.2352, pruned_loss=0.02412, over 4635.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.271, pruned_loss=0.04558, over 941468.52 frames. ], batch size: 12, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:19:28,319 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.47 vs. limit=15.0 +2024-07-28 19:19:30,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=194016.0, ans=0.0 +2024-07-28 19:19:38,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=194029.33333333334, ans=0.125 +2024-07-28 19:19:42,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=194042.66666666666, ans=0.125 +2024-07-28 19:19:44,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=194042.66666666666, ans=0.2 +2024-07-28 19:19:47,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=194056.0, ans=0.125 +2024-07-28 19:19:48,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=194056.0, ans=0.2 +2024-07-28 19:20:01,303 INFO [train.py:1114] (2/4) Epoch 15, batch 2450, loss[loss=0.1583, simple_loss=0.2661, pruned_loss=0.02529, over 4689.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2725, pruned_loss=0.04593, over 937640.05 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:20:03,793 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.89 vs. limit=15.0 +2024-07-28 19:20:06,157 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:20:17,212 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.42 vs. limit=15.0 +2024-07-28 19:20:21,699 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.395e+01 5.722e+01 6.164e+01 6.844e+01 9.609e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 19:20:24,261 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.56 vs. limit=15.0 +2024-07-28 19:20:26,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=194122.66666666666, ans=0.125 +2024-07-28 19:20:27,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=194122.66666666666, ans=0.2 +2024-07-28 19:20:28,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=194136.0, ans=0.0 +2024-07-28 19:20:31,651 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.03 vs. limit=10.0 +2024-07-28 19:20:32,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=194136.0, ans=0.2 +2024-07-28 19:20:35,261 INFO [train.py:1114] (2/4) Epoch 15, batch 2500, loss[loss=0.1908, simple_loss=0.2764, pruned_loss=0.05262, over 4813.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2727, pruned_loss=0.04612, over 939462.07 frames. ], batch size: 14, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:20:37,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=194149.33333333334, ans=0.125 +2024-07-28 19:20:39,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=194149.33333333334, ans=0.125 +2024-07-28 19:20:48,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=194176.0, ans=0.125 +2024-07-28 19:20:50,333 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:20:50,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=194176.0, ans=0.125 +2024-07-28 19:20:53,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=194176.0, ans=0.025 +2024-07-28 19:20:55,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=194189.33333333334, ans=0.125 +2024-07-28 19:20:56,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=194189.33333333334, ans=0.0 +2024-07-28 19:20:57,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=194189.33333333334, ans=0.0 +2024-07-28 19:20:58,295 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.61 vs. limit=15.0 +2024-07-28 19:21:00,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=194202.66666666666, ans=0.0 +2024-07-28 19:21:08,113 INFO [train.py:1114] (2/4) Epoch 15, batch 2550, loss[loss=0.1984, simple_loss=0.2744, pruned_loss=0.06117, over 4815.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2728, pruned_loss=0.04609, over 938874.95 frames. ], batch size: 11, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:21:11,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=194216.0, ans=0.125 +2024-07-28 19:21:23,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=194242.66666666666, ans=0.125 +2024-07-28 19:21:23,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=194242.66666666666, ans=0.0 +2024-07-28 19:21:28,597 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-28 19:21:30,733 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.237e+01 5.497e+01 6.084e+01 7.068e+01 9.259e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-28 19:21:39,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=194269.33333333334, ans=0.125 +2024-07-28 19:21:41,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=194269.33333333334, ans=0.0 +2024-07-28 19:21:42,015 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.21 vs. limit=12.0 +2024-07-28 19:21:43,296 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-07-28 19:21:45,671 INFO [train.py:1114] (2/4) Epoch 15, batch 2600, loss[loss=0.205, simple_loss=0.292, pruned_loss=0.05896, over 4894.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2732, pruned_loss=0.04635, over 937934.02 frames. ], batch size: 13, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:21:48,197 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.17 vs. limit=22.5 +2024-07-28 19:22:14,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=194336.0, ans=0.125 +2024-07-28 19:22:19,342 INFO [train.py:1114] (2/4) Epoch 15, batch 2650, loss[loss=0.1859, simple_loss=0.2778, pruned_loss=0.04701, over 4597.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.273, pruned_loss=0.04631, over 940117.49 frames. ], batch size: 16, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:22:20,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194349.33333333334, ans=0.1 +2024-07-28 19:22:23,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=194349.33333333334, ans=0.125 +2024-07-28 19:22:29,793 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.81 vs. limit=6.0 +2024-07-28 19:22:30,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=194362.66666666666, ans=0.0 +2024-07-28 19:22:33,157 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-28 19:22:40,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=194376.0, ans=0.2 +2024-07-28 19:22:43,425 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.390e+01 5.437e+01 6.145e+01 6.904e+01 9.658e+01, threshold=1.229e+02, percent-clipped=0.0 +2024-07-28 19:22:54,051 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-07-28 19:22:54,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194402.66666666666, ans=0.1 +2024-07-28 19:23:00,536 INFO [train.py:1114] (2/4) Epoch 15, batch 2700, loss[loss=0.1772, simple_loss=0.2753, pruned_loss=0.0396, over 4740.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2724, pruned_loss=0.04615, over 940123.52 frames. ], batch size: 14, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:23:32,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=194469.33333333334, ans=0.125 +2024-07-28 19:23:35,794 INFO [train.py:1114] (2/4) Epoch 15, batch 2750, loss[loss=0.1717, simple_loss=0.2633, pruned_loss=0.04003, over 4696.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2712, pruned_loss=0.04625, over 939951.39 frames. ], batch size: 12, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:23:44,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=194496.0, ans=0.125 +2024-07-28 19:23:48,428 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.45 vs. limit=6.0 +2024-07-28 19:23:56,140 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.670e+01 5.701e+01 6.363e+01 7.329e+01 1.129e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 19:24:08,370 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.19 vs. limit=12.0 +2024-07-28 19:24:10,235 INFO [train.py:1114] (2/4) Epoch 15, batch 2800, loss[loss=0.2223, simple_loss=0.2851, pruned_loss=0.07977, over 3240.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2715, pruned_loss=0.04685, over 937463.52 frames. ], batch size: 35, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:24:15,588 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.60 vs. limit=10.0 +2024-07-28 19:24:19,026 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.51 vs. limit=15.0 +2024-07-28 19:24:25,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=194576.0, ans=0.0 +2024-07-28 19:24:48,380 INFO [train.py:1114] (2/4) Epoch 15, batch 2850, loss[loss=0.1865, simple_loss=0.2744, pruned_loss=0.04926, over 4957.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2725, pruned_loss=0.04736, over 935494.23 frames. ], batch size: 13, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:24:53,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=194616.0, ans=0.125 +2024-07-28 19:24:56,424 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:25:09,891 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.207e+01 5.952e+01 6.499e+01 7.318e+01 1.007e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-28 19:25:18,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=194669.33333333334, ans=0.125 +2024-07-28 19:25:18,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=194669.33333333334, ans=0.0 +2024-07-28 19:25:23,263 INFO [train.py:1114] (2/4) Epoch 15, batch 2900, loss[loss=0.1814, simple_loss=0.2717, pruned_loss=0.04551, over 4824.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2725, pruned_loss=0.04702, over 939466.94 frames. ], batch size: 13, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:25:32,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=194696.0, ans=0.2 +2024-07-28 19:25:33,102 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.40 vs. limit=15.0 +2024-07-28 19:25:46,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=194722.66666666666, ans=10.0 +2024-07-28 19:25:51,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=194736.0, ans=0.125 +2024-07-28 19:25:57,087 INFO [train.py:1114] (2/4) Epoch 15, batch 2950, loss[loss=0.1963, simple_loss=0.2863, pruned_loss=0.05315, over 4714.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2716, pruned_loss=0.04694, over 938565.28 frames. ], batch size: 12, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:26:04,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.99 vs. limit=15.0 +2024-07-28 19:26:06,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=194762.66666666666, ans=0.025 +2024-07-28 19:26:10,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=194776.0, ans=0.125 +2024-07-28 19:26:13,530 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.59 vs. limit=15.0 +2024-07-28 19:26:17,151 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.233e+01 5.428e+01 5.966e+01 6.720e+01 8.904e+01, threshold=1.193e+02, percent-clipped=0.0 +2024-07-28 19:27:12,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=194802.66666666666, ans=0.125 +2024-07-28 19:27:15,597 INFO [train.py:1114] (2/4) Epoch 15, batch 3000, loss[loss=0.1862, simple_loss=0.278, pruned_loss=0.04725, over 4762.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2722, pruned_loss=0.0472, over 938004.73 frames. ], batch size: 13, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:27:15,597 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 19:27:27,985 INFO [train.py:1146] (2/4) Epoch 15, validation: loss=0.1635, simple_loss=0.2667, pruned_loss=0.03013, over 944034.00 frames. +2024-07-28 19:27:27,986 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 19:27:28,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=194816.0, ans=0.0 +2024-07-28 19:27:42,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=194829.33333333334, ans=0.125 +2024-07-28 19:27:50,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=194856.0, ans=0.0 +2024-07-28 19:27:54,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=194856.0, ans=0.125 +2024-07-28 19:28:02,068 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.14 vs. limit=10.0 +2024-07-28 19:28:04,529 INFO [train.py:1114] (2/4) Epoch 15, batch 3050, loss[loss=0.1563, simple_loss=0.244, pruned_loss=0.03425, over 4638.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2726, pruned_loss=0.04721, over 937286.89 frames. ], batch size: 12, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:28:06,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=194882.66666666666, ans=0.125 +2024-07-28 19:28:13,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=194896.0, ans=0.125 +2024-07-28 19:28:16,630 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.58 vs. limit=15.0 +2024-07-28 19:28:19,124 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.51 vs. limit=15.0 +2024-07-28 19:28:21,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=194909.33333333334, ans=0.2 +2024-07-28 19:28:24,841 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.462e+01 5.642e+01 6.422e+01 7.764e+01 9.574e+01, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 19:28:25,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=194922.66666666666, ans=0.0 +2024-07-28 19:28:40,211 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:28:41,476 INFO [train.py:1114] (2/4) Epoch 15, batch 3100, loss[loss=0.2026, simple_loss=0.3003, pruned_loss=0.05246, over 4649.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2717, pruned_loss=0.047, over 938221.97 frames. ], batch size: 16, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:28:42,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=194949.33333333334, ans=0.125 +2024-07-28 19:28:50,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=194962.66666666666, ans=0.125 +2024-07-28 19:29:06,454 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.77 vs. limit=6.0 +2024-07-28 19:29:08,863 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:29:17,410 INFO [train.py:1114] (2/4) Epoch 15, batch 3150, loss[loss=0.1992, simple_loss=0.2925, pruned_loss=0.05295, over 4616.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2717, pruned_loss=0.04668, over 938276.79 frames. ], batch size: 17, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:29:20,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=195016.0, ans=0.2 +2024-07-28 19:29:24,485 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.63 vs. limit=12.0 +2024-07-28 19:29:26,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=195029.33333333334, ans=0.025 +2024-07-28 19:29:27,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=195029.33333333334, ans=0.125 +2024-07-28 19:29:30,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=195029.33333333334, ans=0.09899494936611666 +2024-07-28 19:29:47,869 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.902e+01 5.711e+01 6.349e+01 7.434e+01 1.242e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 19:29:56,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=195069.33333333334, ans=0.0 +2024-07-28 19:29:58,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=195069.33333333334, ans=0.2 +2024-07-28 19:30:01,497 INFO [train.py:1114] (2/4) Epoch 15, batch 3200, loss[loss=0.127, simple_loss=0.2203, pruned_loss=0.0168, over 4834.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2712, pruned_loss=0.04621, over 940103.06 frames. ], batch size: 13, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:30:07,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=195082.66666666666, ans=0.125 +2024-07-28 19:30:07,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=195096.0, ans=0.0 +2024-07-28 19:30:26,473 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.33 vs. limit=15.0 +2024-07-28 19:30:35,985 INFO [train.py:1114] (2/4) Epoch 15, batch 3250, loss[loss=0.1726, simple_loss=0.2728, pruned_loss=0.03618, over 4942.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2717, pruned_loss=0.04583, over 940944.34 frames. ], batch size: 14, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:30:50,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=195176.0, ans=0.2 +2024-07-28 19:30:56,189 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.478e+01 5.941e+01 6.673e+01 9.852e+01, threshold=1.188e+02, percent-clipped=0.0 +2024-07-28 19:30:56,501 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.33 vs. limit=15.0 +2024-07-28 19:30:57,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=195189.33333333334, ans=0.125 +2024-07-28 19:31:11,844 INFO [train.py:1114] (2/4) Epoch 15, batch 3300, loss[loss=0.2201, simple_loss=0.3054, pruned_loss=0.06734, over 4700.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2706, pruned_loss=0.0455, over 940834.63 frames. ], batch size: 19, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:31:14,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=195216.0, ans=0.125 +2024-07-28 19:31:23,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=195229.33333333334, ans=0.0 +2024-07-28 19:31:26,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=195242.66666666666, ans=10.0 +2024-07-28 19:31:36,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=195256.0, ans=0.0 +2024-07-28 19:31:38,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=195269.33333333334, ans=0.125 +2024-07-28 19:31:44,974 INFO [train.py:1114] (2/4) Epoch 15, batch 3350, loss[loss=0.2308, simple_loss=0.309, pruned_loss=0.07631, over 4619.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2711, pruned_loss=0.0459, over 938551.98 frames. ], batch size: 17, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:32:19,700 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.536e+01 5.781e+01 6.264e+01 6.966e+01 9.522e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 19:32:21,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=195322.66666666666, ans=0.0 +2024-07-28 19:32:24,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=195322.66666666666, ans=0.025 +2024-07-28 19:32:29,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195336.0, ans=0.1 +2024-07-28 19:32:33,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=195336.0, ans=0.125 +2024-07-28 19:32:34,344 INFO [train.py:1114] (2/4) Epoch 15, batch 3400, loss[loss=0.1419, simple_loss=0.2221, pruned_loss=0.03087, over 4797.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2704, pruned_loss=0.04595, over 936950.73 frames. ], batch size: 11, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:32:37,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=195349.33333333334, ans=0.125 +2024-07-28 19:32:42,051 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.45 vs. limit=15.0 +2024-07-28 19:32:43,227 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.56 vs. limit=15.0 +2024-07-28 19:32:43,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=195362.66666666666, ans=0.0 +2024-07-28 19:32:49,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=195376.0, ans=0.1 +2024-07-28 19:32:50,933 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.69 vs. limit=6.0 +2024-07-28 19:32:57,774 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.92 vs. limit=6.0 +2024-07-28 19:33:01,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195389.33333333334, ans=0.1 +2024-07-28 19:33:03,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=195389.33333333334, ans=0.0 +2024-07-28 19:33:10,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=195402.66666666666, ans=0.0 +2024-07-28 19:33:11,734 INFO [train.py:1114] (2/4) Epoch 15, batch 3450, loss[loss=0.1914, simple_loss=0.2745, pruned_loss=0.05419, over 4702.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2706, pruned_loss=0.04551, over 937391.68 frames. ], batch size: 19, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:33:17,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=195416.0, ans=0.125 +2024-07-28 19:33:20,804 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.56 vs. limit=15.0 +2024-07-28 19:33:21,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=195429.33333333334, ans=0.025 +2024-07-28 19:33:25,429 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.48 vs. limit=12.0 +2024-07-28 19:33:27,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=195442.66666666666, ans=0.0 +2024-07-28 19:33:31,965 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-07-28 19:33:32,015 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.21 vs. limit=15.0 +2024-07-28 19:33:33,577 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.369e+01 5.667e+01 6.148e+01 6.825e+01 9.914e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 19:33:48,992 INFO [train.py:1114] (2/4) Epoch 15, batch 3500, loss[loss=0.1602, simple_loss=0.2447, pruned_loss=0.03783, over 4944.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2697, pruned_loss=0.0456, over 937837.33 frames. ], batch size: 12, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:33:53,961 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=195482.66666666666, ans=0.125 +2024-07-28 19:34:04,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=195509.33333333334, ans=0.125 +2024-07-28 19:34:05,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=195509.33333333334, ans=0.125 +2024-07-28 19:34:15,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=195536.0, ans=0.0 +2024-07-28 19:34:20,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=195536.0, ans=0.0 +2024-07-28 19:34:21,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=195536.0, ans=0.025 +2024-07-28 19:34:23,069 INFO [train.py:1114] (2/4) Epoch 15, batch 3550, loss[loss=0.1856, simple_loss=0.2843, pruned_loss=0.04347, over 4655.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2699, pruned_loss=0.0458, over 938782.36 frames. ], batch size: 14, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:34:27,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=195549.33333333334, ans=0.09899494936611666 +2024-07-28 19:34:27,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=195549.33333333334, ans=0.125 +2024-07-28 19:34:28,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=195549.33333333334, ans=0.025 +2024-07-28 19:34:40,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=195576.0, ans=15.0 +2024-07-28 19:34:43,332 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.372e+01 5.567e+01 6.296e+01 7.208e+01 1.241e+02, threshold=1.259e+02, percent-clipped=1.0 +2024-07-28 19:34:56,849 INFO [train.py:1114] (2/4) Epoch 15, batch 3600, loss[loss=0.1631, simple_loss=0.2543, pruned_loss=0.03593, over 4968.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2705, pruned_loss=0.04572, over 940796.87 frames. ], batch size: 13, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:34:58,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=195616.0, ans=0.125 +2024-07-28 19:35:10,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=195629.33333333334, ans=0.0 +2024-07-28 19:35:10,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=195629.33333333334, ans=0.125 +2024-07-28 19:35:18,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=195642.66666666666, ans=0.025 +2024-07-28 19:35:21,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=195642.66666666666, ans=0.125 +2024-07-28 19:35:24,709 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.57 vs. limit=15.0 +2024-07-28 19:35:25,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=195656.0, ans=0.0 +2024-07-28 19:35:33,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=195669.33333333334, ans=0.125 +2024-07-28 19:35:35,850 INFO [train.py:1114] (2/4) Epoch 15, batch 3650, loss[loss=0.1827, simple_loss=0.2789, pruned_loss=0.04325, over 4898.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2699, pruned_loss=0.04532, over 940960.07 frames. ], batch size: 15, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:35:56,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=195696.0, ans=0.125 +2024-07-28 19:36:03,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195709.33333333334, ans=0.1 +2024-07-28 19:36:03,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=195709.33333333334, ans=0.0 +2024-07-28 19:36:07,032 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.599e+01 5.794e+01 6.353e+01 7.232e+01 1.193e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 19:36:26,501 INFO [train.py:1114] (2/4) Epoch 15, batch 3700, loss[loss=0.1885, simple_loss=0.294, pruned_loss=0.04151, over 4939.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2702, pruned_loss=0.04581, over 941773.80 frames. ], batch size: 14, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:36:27,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=195749.33333333334, ans=0.125 +2024-07-28 19:36:33,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=195762.66666666666, ans=0.025 +2024-07-28 19:36:57,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=195789.33333333334, ans=0.0 +2024-07-28 19:37:01,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=195802.66666666666, ans=0.125 +2024-07-28 19:37:06,815 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:37:12,729 INFO [train.py:1114] (2/4) Epoch 15, batch 3750, loss[loss=0.1568, simple_loss=0.247, pruned_loss=0.03333, over 4803.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2696, pruned_loss=0.04527, over 943186.04 frames. ], batch size: 11, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:37:43,723 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.50 vs. limit=22.5 +2024-07-28 19:37:44,960 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.01 vs. limit=15.0 +2024-07-28 19:37:47,140 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.420e+01 5.433e+01 6.069e+01 6.768e+01 1.859e+02, threshold=1.214e+02, percent-clipped=1.0 +2024-07-28 19:37:50,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=195856.0, ans=0.125 +2024-07-28 19:37:55,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=195869.33333333334, ans=0.125 +2024-07-28 19:38:00,628 INFO [train.py:1114] (2/4) Epoch 15, batch 3800, loss[loss=0.1707, simple_loss=0.2667, pruned_loss=0.0374, over 4802.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2692, pruned_loss=0.04516, over 941591.88 frames. ], batch size: 14, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:38:10,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=195896.0, ans=0.125 +2024-07-28 19:38:13,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=195896.0, ans=0.125 +2024-07-28 19:38:16,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=195909.33333333334, ans=0.125 +2024-07-28 19:38:19,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=195909.33333333334, ans=0.95 +2024-07-28 19:38:25,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195922.66666666666, ans=0.1 +2024-07-28 19:38:27,969 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.67 vs. limit=10.0 +2024-07-28 19:38:29,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=195936.0, ans=0.025 +2024-07-28 19:38:30,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=195936.0, ans=0.0 +2024-07-28 19:38:35,887 INFO [train.py:1114] (2/4) Epoch 15, batch 3850, loss[loss=0.1542, simple_loss=0.2528, pruned_loss=0.02777, over 4618.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2688, pruned_loss=0.04456, over 941968.67 frames. ], batch size: 16, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:38:37,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=195949.33333333334, ans=0.0 +2024-07-28 19:38:52,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=195962.66666666666, ans=0.125 +2024-07-28 19:38:55,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=195962.66666666666, ans=0.0 +2024-07-28 19:38:55,343 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.41 vs. limit=15.0 +2024-07-28 19:39:02,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=195976.0, ans=0.125 +2024-07-28 19:39:04,047 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.933e+01 5.600e+01 6.154e+01 6.941e+01 1.032e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 19:39:12,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=195989.33333333334, ans=0.2 +2024-07-28 19:39:15,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=196002.66666666666, ans=0.125 +2024-07-28 19:39:18,932 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:39:55,325 INFO [train.py:1114] (2/4) Epoch 15, batch 3900, loss[loss=0.1543, simple_loss=0.2437, pruned_loss=0.0325, over 4808.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2696, pruned_loss=0.04478, over 942285.07 frames. ], batch size: 14, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:40:02,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=196029.33333333334, ans=0.125 +2024-07-28 19:40:06,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=196029.33333333334, ans=0.125 +2024-07-28 19:42:00,756 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.67 vs. limit=15.0 +2024-07-28 19:42:22,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196069.33333333334, ans=0.1 +2024-07-28 19:42:26,993 INFO [train.py:1114] (2/4) Epoch 15, batch 3950, loss[loss=0.2176, simple_loss=0.3094, pruned_loss=0.06286, over 4837.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2698, pruned_loss=0.04481, over 944124.58 frames. ], batch size: 16, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:42:27,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=196082.66666666666, ans=0.0 +2024-07-28 19:42:31,327 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.15 vs. limit=15.0 +2024-07-28 19:42:38,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=196082.66666666666, ans=0.0 +2024-07-28 19:42:47,965 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.66 vs. limit=15.0 +2024-07-28 19:42:53,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=196109.33333333334, ans=0.125 +2024-07-28 19:42:55,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=196122.66666666666, ans=0.0 +2024-07-28 19:42:56,270 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.391e+01 5.595e+01 6.291e+01 6.996e+01 9.236e+01, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 19:43:04,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=196122.66666666666, ans=0.1 +2024-07-28 19:44:10,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=196136.0, ans=0.125 +2024-07-28 19:44:10,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=196136.0, ans=0.1 +2024-07-28 19:44:13,298 INFO [train.py:1114] (2/4) Epoch 15, batch 4000, loss[loss=0.1614, simple_loss=0.2424, pruned_loss=0.04014, over 4770.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.27, pruned_loss=0.04534, over 940278.58 frames. ], batch size: 12, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:44:29,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=196176.0, ans=0.125 +2024-07-28 19:44:31,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=196176.0, ans=0.0 +2024-07-28 19:44:31,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=196176.0, ans=0.0 +2024-07-28 19:44:32,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=196176.0, ans=0.1 +2024-07-28 19:44:41,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=196189.33333333334, ans=0.025 +2024-07-28 19:44:49,540 INFO [train.py:1114] (2/4) Epoch 15, batch 4050, loss[loss=0.2532, simple_loss=0.3255, pruned_loss=0.09046, over 3786.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2703, pruned_loss=0.04546, over 939160.41 frames. ], batch size: 35, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:44:58,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=196216.0, ans=0.2 +2024-07-28 19:45:06,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=196229.33333333334, ans=0.125 +2024-07-28 19:48:20,685 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.610e+01 5.678e+01 6.345e+01 7.266e+01 1.118e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 19:48:31,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=196269.33333333334, ans=0.0 +2024-07-28 19:48:32,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=196269.33333333334, ans=0.0 +2024-07-28 19:48:39,020 INFO [train.py:1114] (2/4) Epoch 15, batch 4100, loss[loss=0.2085, simple_loss=0.2905, pruned_loss=0.0632, over 4908.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.271, pruned_loss=0.04609, over 938228.47 frames. ], batch size: 15, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:48:42,337 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.11 vs. limit=15.0 +2024-07-28 19:48:46,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=196296.0, ans=0.125 +2024-07-28 19:48:55,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=196309.33333333334, ans=0.125 +2024-07-28 19:49:13,372 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=2.502e-03 +2024-07-28 19:49:15,621 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.24 vs. limit=15.0 +2024-07-28 19:49:22,863 INFO [train.py:1114] (2/4) Epoch 15, batch 4150, loss[loss=0.1694, simple_loss=0.2689, pruned_loss=0.03492, over 4822.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.27, pruned_loss=0.04551, over 938032.93 frames. ], batch size: 13, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:49:47,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=196376.0, ans=0.0 +2024-07-28 19:49:52,568 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.49 vs. limit=12.0 +2024-07-28 19:49:52,756 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.168e+01 5.670e+01 6.330e+01 7.256e+01 1.542e+02, threshold=1.266e+02, percent-clipped=1.0 +2024-07-28 19:50:01,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=196389.33333333334, ans=0.1 +2024-07-28 19:50:04,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=196402.66666666666, ans=0.1 +2024-07-28 19:50:09,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=196416.0, ans=0.125 +2024-07-28 19:50:10,382 INFO [train.py:1114] (2/4) Epoch 15, batch 4200, loss[loss=0.2055, simple_loss=0.3026, pruned_loss=0.0542, over 4903.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2701, pruned_loss=0.04541, over 939725.01 frames. ], batch size: 15, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:50:37,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=196429.33333333334, ans=0.0 +2024-07-28 19:50:49,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=196456.0, ans=0.2 +2024-07-28 19:50:52,184 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.93 vs. limit=22.5 +2024-07-28 19:50:53,575 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.80 vs. limit=15.0 +2024-07-28 19:51:00,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196469.33333333334, ans=0.1 +2024-07-28 19:51:03,978 INFO [train.py:1114] (2/4) Epoch 15, batch 4250, loss[loss=0.1548, simple_loss=0.2464, pruned_loss=0.03164, over 4643.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2705, pruned_loss=0.04564, over 940658.57 frames. ], batch size: 12, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:51:09,045 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.00 vs. limit=10.0 +2024-07-28 19:51:13,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=196496.0, ans=0.1 +2024-07-28 19:51:22,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=196509.33333333334, ans=0.025 +2024-07-28 19:51:23,977 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.654e+01 5.709e+01 6.318e+01 7.581e+01 1.158e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 19:51:27,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=196522.66666666666, ans=0.125 +2024-07-28 19:51:32,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196536.0, ans=0.1 +2024-07-28 19:51:37,907 INFO [train.py:1114] (2/4) Epoch 15, batch 4300, loss[loss=0.162, simple_loss=0.2574, pruned_loss=0.03336, over 4767.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2713, pruned_loss=0.04595, over 940079.18 frames. ], batch size: 13, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:51:42,223 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.84 vs. limit=15.0 +2024-07-28 19:51:47,653 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.68 vs. limit=15.0 +2024-07-28 19:52:11,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=196602.66666666666, ans=0.0 +2024-07-28 19:52:13,576 INFO [train.py:1114] (2/4) Epoch 15, batch 4350, loss[loss=0.1882, simple_loss=0.2954, pruned_loss=0.04045, over 4757.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2718, pruned_loss=0.04592, over 940987.66 frames. ], batch size: 13, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:52:21,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=196629.33333333334, ans=0.125 +2024-07-28 19:52:31,349 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:52:33,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=196642.66666666666, ans=0.125 +2024-07-28 19:52:34,516 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.697e+01 5.717e+01 6.359e+01 7.024e+01 1.032e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 19:52:36,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=196656.0, ans=0.0 +2024-07-28 19:52:42,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=196669.33333333334, ans=0.0 +2024-07-28 19:52:48,048 INFO [train.py:1114] (2/4) Epoch 15, batch 4400, loss[loss=0.1763, simple_loss=0.2712, pruned_loss=0.0407, over 4810.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2716, pruned_loss=0.04572, over 940840.38 frames. ], batch size: 14, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:52:52,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=196682.66666666666, ans=0.125 +2024-07-28 19:52:55,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=196696.0, ans=0.0 +2024-07-28 19:53:05,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=196709.33333333334, ans=0.125 +2024-07-28 19:53:11,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196722.66666666666, ans=0.1 +2024-07-28 19:53:21,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=196736.0, ans=0.025 +2024-07-28 19:53:23,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=196736.0, ans=0.125 +2024-07-28 19:53:25,045 INFO [train.py:1114] (2/4) Epoch 15, batch 4450, loss[loss=0.1743, simple_loss=0.261, pruned_loss=0.04382, over 4937.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2719, pruned_loss=0.04592, over 938410.64 frames. ], batch size: 12, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:53:29,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=196749.33333333334, ans=0.0 +2024-07-28 19:55:23,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=196749.33333333334, ans=0.0 +2024-07-28 19:55:44,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.81 vs. limit=15.0 +2024-07-28 19:55:45,512 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.88 vs. limit=22.5 +2024-07-28 19:55:45,663 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.242e+01 5.676e+01 6.225e+01 6.763e+01 9.651e+01, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 19:55:52,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=196802.66666666666, ans=0.05 +2024-07-28 19:55:58,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=196816.0, ans=0.025 +2024-07-28 19:55:59,332 INFO [train.py:1114] (2/4) Epoch 15, batch 4500, loss[loss=0.1715, simple_loss=0.271, pruned_loss=0.03596, over 4736.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2723, pruned_loss=0.04621, over 937722.70 frames. ], batch size: 14, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:55:59,636 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.77 vs. limit=10.0 +2024-07-28 19:56:05,734 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.09 vs. limit=6.0 +2024-07-28 19:56:26,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=196869.33333333334, ans=0.2 +2024-07-28 19:56:31,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=196869.33333333334, ans=0.125 +2024-07-28 19:56:32,378 INFO [train.py:1114] (2/4) Epoch 15, batch 4550, loss[loss=0.178, simple_loss=0.2598, pruned_loss=0.04805, over 4892.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2722, pruned_loss=0.04637, over 939770.17 frames. ], batch size: 13, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:56:35,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=196882.66666666666, ans=0.1 +2024-07-28 19:56:39,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=196896.0, ans=0.2 +2024-07-28 19:56:41,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=196896.0, ans=0.125 +2024-07-28 19:56:42,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=196896.0, ans=0.0 +2024-07-28 19:56:51,600 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.39 vs. limit=22.5 +2024-07-28 19:56:54,583 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.611e+01 5.447e+01 5.965e+01 6.710e+01 1.037e+02, threshold=1.193e+02, percent-clipped=0.0 +2024-07-28 19:56:57,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=196922.66666666666, ans=0.125 +2024-07-28 19:57:06,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=196936.0, ans=0.125 +2024-07-28 19:57:06,303 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.07 vs. limit=15.0 +2024-07-28 19:57:06,385 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.24 vs. limit=15.0 +2024-07-28 19:57:07,872 INFO [train.py:1114] (2/4) Epoch 15, batch 4600, loss[loss=0.1827, simple_loss=0.2696, pruned_loss=0.0479, over 4512.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2716, pruned_loss=0.04608, over 937908.20 frames. ], batch size: 21, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:57:08,664 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:57:10,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=196949.33333333334, ans=0.125 +2024-07-28 19:57:14,889 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.27 vs. limit=6.0 +2024-07-28 19:57:23,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=196976.0, ans=0.0 +2024-07-28 19:57:25,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=196976.0, ans=0.0 +2024-07-28 19:57:41,457 INFO [train.py:1114] (2/4) Epoch 15, batch 4650, loss[loss=0.1845, simple_loss=0.2675, pruned_loss=0.0508, over 4836.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2718, pruned_loss=0.04615, over 940087.82 frames. ], batch size: 16, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:57:42,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=197016.0, ans=0.1 +2024-07-28 19:57:44,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=197016.0, ans=0.2 +2024-07-28 19:57:51,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=197029.33333333334, ans=0.1 +2024-07-28 19:57:53,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=197029.33333333334, ans=0.025 +2024-07-28 19:58:00,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=197042.66666666666, ans=0.0 +2024-07-28 19:58:03,991 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.740e+01 5.604e+01 6.309e+01 7.191e+01 9.740e+01, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 19:58:05,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=197056.0, ans=0.025 +2024-07-28 19:58:23,485 INFO [train.py:1114] (2/4) Epoch 15, batch 4700, loss[loss=0.1529, simple_loss=0.23, pruned_loss=0.03792, over 4697.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.271, pruned_loss=0.04589, over 937142.98 frames. ], batch size: 11, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:58:24,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=197082.66666666666, ans=0.0 +2024-07-28 19:58:25,163 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.55 vs. limit=15.0 +2024-07-28 19:58:39,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=197109.33333333334, ans=0.2 +2024-07-28 19:58:57,023 INFO [train.py:1114] (2/4) Epoch 15, batch 4750, loss[loss=0.1613, simple_loss=0.2528, pruned_loss=0.03491, over 4526.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.272, pruned_loss=0.0463, over 935787.25 frames. ], batch size: 21, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:59:17,884 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.663e+01 6.511e+01 7.507e+01 1.082e+02, threshold=1.302e+02, percent-clipped=0.0 +2024-07-28 19:59:27,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=197202.66666666666, ans=0.125 +2024-07-28 19:59:30,785 INFO [train.py:1114] (2/4) Epoch 15, batch 4800, loss[loss=0.1981, simple_loss=0.2933, pruned_loss=0.05143, over 4696.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.272, pruned_loss=0.04658, over 933354.88 frames. ], batch size: 13, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:59:32,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=197216.0, ans=0.025 +2024-07-28 19:59:36,044 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.18 vs. limit=12.0 +2024-07-28 19:59:39,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=197229.33333333334, ans=0.0 +2024-07-28 19:59:40,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=197229.33333333334, ans=0.125 +2024-07-28 19:59:41,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=197229.33333333334, ans=0.2 +2024-07-28 19:59:42,068 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.34 vs. limit=15.0 +2024-07-28 19:59:47,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=197242.66666666666, ans=0.125 +2024-07-28 19:59:52,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=197256.0, ans=0.025 +2024-07-28 20:00:00,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=197269.33333333334, ans=0.0 +2024-07-28 20:00:02,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=197269.33333333334, ans=0.125 +2024-07-28 20:00:04,017 INFO [train.py:1114] (2/4) Epoch 15, batch 4850, loss[loss=0.185, simple_loss=0.282, pruned_loss=0.04404, over 4742.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2723, pruned_loss=0.04667, over 933058.55 frames. ], batch size: 14, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 20:00:13,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=197282.66666666666, ans=0.0 +2024-07-28 20:00:13,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=197282.66666666666, ans=0.2 +2024-07-28 20:00:19,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=197296.0, ans=0.1 +2024-07-28 20:00:31,450 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.502e+01 5.598e+01 6.100e+01 6.871e+01 9.023e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 20:00:33,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197322.66666666666, ans=0.1 +2024-07-28 20:00:46,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=197336.0, ans=0.125 +2024-07-28 20:00:48,004 INFO [train.py:1114] (2/4) Epoch 15, batch 4900, loss[loss=0.1629, simple_loss=0.2737, pruned_loss=0.026, over 4759.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2714, pruned_loss=0.04588, over 934427.93 frames. ], batch size: 13, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 20:00:56,192 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.03 vs. limit=22.5 +2024-07-28 20:00:56,717 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.31 vs. limit=22.5 +2024-07-28 20:00:59,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=197362.66666666666, ans=0.125 +2024-07-28 20:01:10,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=197376.0, ans=0.0 +2024-07-28 20:01:25,291 INFO [train.py:1114] (2/4) Epoch 15, batch 4950, loss[loss=0.2345, simple_loss=0.3028, pruned_loss=0.08312, over 3224.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2717, pruned_loss=0.04632, over 931247.41 frames. ], batch size: 35, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:01:30,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=197416.0, ans=0.2 +2024-07-28 20:01:31,470 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=197429.33333333334, ans=0.1 +2024-07-28 20:01:41,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=197442.66666666666, ans=0.125 +2024-07-28 20:01:45,885 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+01 5.531e+01 5.983e+01 6.546e+01 1.015e+02, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 20:01:47,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=197456.0, ans=0.125 +2024-07-28 20:01:56,685 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.12 vs. limit=15.0 +2024-07-28 20:01:56,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=197469.33333333334, ans=0.125 +2024-07-28 20:01:58,980 INFO [train.py:1114] (2/4) Epoch 15, batch 5000, loss[loss=0.1643, simple_loss=0.2531, pruned_loss=0.03769, over 4666.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2717, pruned_loss=0.04605, over 935162.57 frames. ], batch size: 14, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:02:08,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=197496.0, ans=0.125 +2024-07-28 20:02:14,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=197509.33333333334, ans=0.2 +2024-07-28 20:02:16,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=197509.33333333334, ans=0.0 +2024-07-28 20:02:16,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=197509.33333333334, ans=0.125 +2024-07-28 20:02:26,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=197522.66666666666, ans=0.125 +2024-07-28 20:02:31,835 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:02:33,796 INFO [train.py:1114] (2/4) Epoch 15, batch 5050, loss[loss=0.172, simple_loss=0.2594, pruned_loss=0.04227, over 4852.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2716, pruned_loss=0.04596, over 938274.58 frames. ], batch size: 12, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:02:37,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=197549.33333333334, ans=0.035 +2024-07-28 20:02:42,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=197562.66666666666, ans=0.0 +2024-07-28 20:02:44,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=197562.66666666666, ans=0.2 +2024-07-28 20:02:53,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=197589.33333333334, ans=0.04949747468305833 +2024-07-28 20:02:54,791 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.283e+01 5.690e+01 6.527e+01 7.473e+01 1.062e+02, threshold=1.305e+02, percent-clipped=0.0 +2024-07-28 20:03:06,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=197602.66666666666, ans=0.07 +2024-07-28 20:03:08,384 INFO [train.py:1114] (2/4) Epoch 15, batch 5100, loss[loss=0.154, simple_loss=0.2467, pruned_loss=0.03063, over 4770.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2712, pruned_loss=0.04567, over 936039.78 frames. ], batch size: 12, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:03:08,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=197616.0, ans=0.125 +2024-07-28 20:03:10,104 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.23 vs. limit=22.5 +2024-07-28 20:03:11,455 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.46 vs. limit=15.0 +2024-07-28 20:03:16,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=197629.33333333334, ans=0.125 +2024-07-28 20:03:22,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=197642.66666666666, ans=0.0 +2024-07-28 20:03:34,757 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:03:35,553 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.33 vs. limit=15.0 +2024-07-28 20:03:41,975 INFO [train.py:1114] (2/4) Epoch 15, batch 5150, loss[loss=0.2158, simple_loss=0.3012, pruned_loss=0.06522, over 4832.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2724, pruned_loss=0.04621, over 936747.33 frames. ], batch size: 16, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:03:42,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=197682.66666666666, ans=0.125 +2024-07-28 20:03:45,637 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.86 vs. limit=15.0 +2024-07-28 20:03:47,518 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.30 vs. limit=15.0 +2024-07-28 20:03:54,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=197696.0, ans=0.5 +2024-07-28 20:04:04,751 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.460e+01 5.595e+01 6.018e+01 6.676e+01 9.613e+01, threshold=1.204e+02, percent-clipped=0.0 +2024-07-28 20:04:16,851 INFO [train.py:1114] (2/4) Epoch 15, batch 5200, loss[loss=0.1819, simple_loss=0.2778, pruned_loss=0.04304, over 4666.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2718, pruned_loss=0.04571, over 936551.85 frames. ], batch size: 14, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:04:24,634 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.75 vs. limit=15.0 +2024-07-28 20:04:25,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=197762.66666666666, ans=0.2 +2024-07-28 20:04:28,654 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.19 vs. limit=15.0 +2024-07-28 20:04:31,846 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:04:39,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=197789.33333333334, ans=0.0 +2024-07-28 20:04:50,351 INFO [train.py:1114] (2/4) Epoch 15, batch 5250, loss[loss=0.1727, simple_loss=0.2591, pruned_loss=0.04314, over 4902.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2711, pruned_loss=0.04551, over 935799.98 frames. ], batch size: 13, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:04:52,167 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.05 vs. limit=15.0 +2024-07-28 20:04:57,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=197829.33333333334, ans=0.125 +2024-07-28 20:05:08,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=197842.66666666666, ans=0.2 +2024-07-28 20:05:09,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=197842.66666666666, ans=0.125 +2024-07-28 20:05:11,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=197856.0, ans=0.125 +2024-07-28 20:05:12,230 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.422e+01 5.761e+01 6.623e+01 7.609e+01 1.184e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-28 20:05:13,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=197856.0, ans=0.125 +2024-07-28 20:05:19,695 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.14 vs. limit=15.0 +2024-07-28 20:05:24,592 INFO [train.py:1114] (2/4) Epoch 15, batch 5300, loss[loss=0.1942, simple_loss=0.2836, pruned_loss=0.05239, over 4620.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2705, pruned_loss=0.0457, over 934425.69 frames. ], batch size: 16, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:05:41,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=197896.0, ans=0.0 +2024-07-28 20:05:41,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=197896.0, ans=0.125 +2024-07-28 20:05:45,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=197896.0, ans=0.125 +2024-07-28 20:05:51,403 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.67 vs. limit=15.0 +2024-07-28 20:06:07,652 INFO [train.py:1114] (2/4) Epoch 15, batch 5350, loss[loss=0.1643, simple_loss=0.2486, pruned_loss=0.04003, over 4514.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2716, pruned_loss=0.04599, over 936480.35 frames. ], batch size: 10, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:06:09,510 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.49 vs. limit=22.5 +2024-07-28 20:06:10,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197949.33333333334, ans=0.1 +2024-07-28 20:06:28,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=197976.0, ans=0.1 +2024-07-28 20:06:33,135 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.665e+01 5.458e+01 6.203e+01 7.042e+01 1.086e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 20:06:45,668 INFO [train.py:1114] (2/4) Epoch 15, batch 5400, loss[loss=0.1971, simple_loss=0.2905, pruned_loss=0.05186, over 4146.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2728, pruned_loss=0.04674, over 930691.20 frames. ], batch size: 25, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:06:45,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=198016.0, ans=0.125 +2024-07-28 20:06:48,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=198016.0, ans=0.0 +2024-07-28 20:06:53,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=198029.33333333334, ans=0.2 +2024-07-28 20:06:53,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=198029.33333333334, ans=0.0 +2024-07-28 20:07:00,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=198042.66666666666, ans=0.025 +2024-07-28 20:07:05,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=198056.0, ans=0.0 +2024-07-28 20:07:18,654 INFO [train.py:1114] (2/4) Epoch 15, batch 5450, loss[loss=0.1747, simple_loss=0.2518, pruned_loss=0.04883, over 4708.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2717, pruned_loss=0.04611, over 933457.38 frames. ], batch size: 11, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:07:18,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=198082.66666666666, ans=0.0 +2024-07-28 20:07:19,430 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:07:23,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=198082.66666666666, ans=0.0 +2024-07-28 20:07:35,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=198109.33333333334, ans=0.0 +2024-07-28 20:07:39,632 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.29 vs. limit=6.0 +2024-07-28 20:07:40,427 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+01 5.624e+01 6.275e+01 7.403e+01 1.039e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 20:07:40,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=198122.66666666666, ans=0.2 +2024-07-28 20:07:54,595 INFO [train.py:1114] (2/4) Epoch 15, batch 5500, loss[loss=0.2051, simple_loss=0.2943, pruned_loss=0.05792, over 4269.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2711, pruned_loss=0.04611, over 931231.65 frames. ], batch size: 25, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:07:58,420 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.02 vs. limit=15.0 +2024-07-28 20:08:04,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=198162.66666666666, ans=0.0 +2024-07-28 20:08:10,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=198176.0, ans=0.1 +2024-07-28 20:08:16,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198189.33333333334, ans=0.1 +2024-07-28 20:08:20,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=198189.33333333334, ans=0.0 +2024-07-28 20:08:25,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=198202.66666666666, ans=0.125 +2024-07-28 20:08:29,838 INFO [train.py:1114] (2/4) Epoch 15, batch 5550, loss[loss=0.1486, simple_loss=0.2292, pruned_loss=0.03405, over 4707.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.27, pruned_loss=0.04558, over 933186.60 frames. ], batch size: 12, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:08:47,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=198242.66666666666, ans=0.0 +2024-07-28 20:08:51,163 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.553e+01 5.843e+01 6.570e+01 7.982e+01 1.258e+02, threshold=1.314e+02, percent-clipped=1.0 +2024-07-28 20:08:56,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=198269.33333333334, ans=0.015 +2024-07-28 20:09:03,373 INFO [train.py:1114] (2/4) Epoch 15, batch 5600, loss[loss=0.1889, simple_loss=0.2827, pruned_loss=0.04752, over 4730.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2703, pruned_loss=0.04558, over 933904.45 frames. ], batch size: 14, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:09:06,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=198282.66666666666, ans=0.0 +2024-07-28 20:09:07,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=198282.66666666666, ans=0.125 +2024-07-28 20:09:08,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=198282.66666666666, ans=0.125 +2024-07-28 20:09:10,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198296.0, ans=0.1 +2024-07-28 20:09:10,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=198296.0, ans=0.125 +2024-07-28 20:09:38,940 INFO [train.py:1114] (2/4) Epoch 15, batch 5650, loss[loss=0.1597, simple_loss=0.2579, pruned_loss=0.03081, over 4511.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2684, pruned_loss=0.04482, over 936549.39 frames. ], batch size: 21, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:09:41,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=198349.33333333334, ans=0.125 +2024-07-28 20:09:43,463 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.74 vs. limit=10.0 +2024-07-28 20:09:59,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=198389.33333333334, ans=0.2 +2024-07-28 20:10:00,148 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.751e+01 5.802e+01 6.478e+01 7.454e+01 1.007e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 20:10:06,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=198402.66666666666, ans=0.2 +2024-07-28 20:10:12,547 INFO [train.py:1114] (2/4) Epoch 15, batch 5700, loss[loss=0.1951, simple_loss=0.2767, pruned_loss=0.05675, over 4701.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2691, pruned_loss=0.04523, over 937953.44 frames. ], batch size: 13, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:10:12,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=198416.0, ans=0.0 +2024-07-28 20:10:12,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=198416.0, ans=0.1 +2024-07-28 20:10:21,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=198429.33333333334, ans=0.2 +2024-07-28 20:10:28,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=198442.66666666666, ans=0.2 +2024-07-28 20:10:32,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198456.0, ans=0.1 +2024-07-28 20:10:33,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198456.0, ans=0.1 +2024-07-28 20:10:34,918 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:10:42,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=198469.33333333334, ans=0.0 +2024-07-28 20:10:46,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=198482.66666666666, ans=0.0 +2024-07-28 20:10:46,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=198482.66666666666, ans=0.2 +2024-07-28 20:10:46,721 INFO [train.py:1114] (2/4) Epoch 15, batch 5750, loss[loss=0.2036, simple_loss=0.293, pruned_loss=0.05708, over 4681.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2702, pruned_loss=0.04596, over 937977.11 frames. ], batch size: 19, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:10:47,698 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.19 vs. limit=6.0 +2024-07-28 20:10:48,152 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:10:50,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=198482.66666666666, ans=0.125 +2024-07-28 20:11:03,238 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.04 vs. limit=15.0 +2024-07-28 20:11:05,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=198509.33333333334, ans=0.05 +2024-07-28 20:11:07,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=198522.66666666666, ans=0.025 +2024-07-28 20:11:08,231 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.757e+01 5.533e+01 6.199e+01 7.119e+01 1.016e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 20:11:12,800 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.06 vs. limit=15.0 +2024-07-28 20:11:13,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=198536.0, ans=0.025 +2024-07-28 20:11:16,083 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.54 vs. limit=15.0 +2024-07-28 20:11:16,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=198536.0, ans=0.0 +2024-07-28 20:11:20,261 INFO [train.py:1114] (2/4) Epoch 15, batch 5800, loss[loss=0.2025, simple_loss=0.2963, pruned_loss=0.05438, over 4718.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.272, pruned_loss=0.04695, over 937073.71 frames. ], batch size: 19, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:11:21,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=198549.33333333334, ans=0.125 +2024-07-28 20:11:30,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=198562.66666666666, ans=0.2 +2024-07-28 20:11:41,970 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.46 vs. limit=12.0 +2024-07-28 20:11:49,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=198602.66666666666, ans=0.0 +2024-07-28 20:11:55,708 INFO [train.py:1114] (2/4) Epoch 15, batch 5850, loss[loss=0.1799, simple_loss=0.2751, pruned_loss=0.0423, over 4478.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2712, pruned_loss=0.04616, over 937512.82 frames. ], batch size: 21, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:12:11,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=198642.66666666666, ans=0.2 +2024-07-28 20:12:16,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=198642.66666666666, ans=0.125 +2024-07-28 20:12:19,163 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.882e+01 5.725e+01 6.353e+01 6.909e+01 1.131e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 20:12:22,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=198656.0, ans=0.0 +2024-07-28 20:12:26,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=198669.33333333334, ans=0.125 +2024-07-28 20:12:31,580 INFO [train.py:1114] (2/4) Epoch 15, batch 5900, loss[loss=0.2046, simple_loss=0.3026, pruned_loss=0.05326, over 4686.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2717, pruned_loss=0.04656, over 937509.78 frames. ], batch size: 15, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:12:37,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=198696.0, ans=0.025 +2024-07-28 20:12:39,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198696.0, ans=0.1 +2024-07-28 20:12:43,988 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.28 vs. limit=15.0 +2024-07-28 20:12:47,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=198709.33333333334, ans=0.035 +2024-07-28 20:12:51,620 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.26 vs. limit=10.0 +2024-07-28 20:12:58,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=198722.66666666666, ans=0.125 +2024-07-28 20:13:11,340 INFO [train.py:1114] (2/4) Epoch 15, batch 5950, loss[loss=0.216, simple_loss=0.3094, pruned_loss=0.0613, over 4684.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2714, pruned_loss=0.04638, over 939509.40 frames. ], batch size: 15, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:13:16,797 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.62 vs. limit=15.0 +2024-07-28 20:13:21,931 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.55 vs. limit=22.5 +2024-07-28 20:13:23,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=198762.66666666666, ans=0.0 +2024-07-28 20:13:27,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=198776.0, ans=0.125 +2024-07-28 20:13:34,730 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.387e+01 5.687e+01 6.210e+01 6.868e+01 1.023e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 20:13:41,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=198802.66666666666, ans=0.125 +2024-07-28 20:13:42,277 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.10 vs. limit=15.0 +2024-07-28 20:13:47,295 INFO [train.py:1114] (2/4) Epoch 15, batch 6000, loss[loss=0.1893, simple_loss=0.2778, pruned_loss=0.05038, over 4232.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.271, pruned_loss=0.046, over 937107.34 frames. ], batch size: 25, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:13:50,111 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 20:14:07,948 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([6.6155, 5.7050, 5.3430, 6.3794], device='cuda:2') +2024-07-28 20:14:09,824 INFO [train.py:1146] (2/4) Epoch 15, validation: loss=0.1637, simple_loss=0.2666, pruned_loss=0.03037, over 944034.00 frames. +2024-07-28 20:14:09,824 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 20:14:11,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=198816.0, ans=0.125 +2024-07-28 20:14:15,673 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.61 vs. limit=10.0 +2024-07-28 20:14:39,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198869.33333333334, ans=0.1 +2024-07-28 20:14:41,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=198869.33333333334, ans=0.125 +2024-07-28 20:14:43,678 INFO [train.py:1114] (2/4) Epoch 15, batch 6050, loss[loss=0.1904, simple_loss=0.2766, pruned_loss=0.05217, over 4776.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2713, pruned_loss=0.04602, over 938424.30 frames. ], batch size: 12, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:14:46,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=198882.66666666666, ans=0.125 +2024-07-28 20:14:48,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=198882.66666666666, ans=0.0 +2024-07-28 20:14:57,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=198896.0, ans=0.0 +2024-07-28 20:14:58,886 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:15:06,830 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.394e+01 5.577e+01 6.176e+01 7.301e+01 1.116e+02, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 20:15:13,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=198936.0, ans=0.025 +2024-07-28 20:15:18,936 INFO [train.py:1114] (2/4) Epoch 15, batch 6100, loss[loss=0.2017, simple_loss=0.2961, pruned_loss=0.05363, over 4680.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2706, pruned_loss=0.04543, over 938305.14 frames. ], batch size: 15, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:15:24,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=198949.33333333334, ans=0.125 +2024-07-28 20:15:31,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=198962.66666666666, ans=0.2 +2024-07-28 20:15:35,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=198976.0, ans=0.125 +2024-07-28 20:15:38,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=198976.0, ans=0.125 +2024-07-28 20:15:39,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=198989.33333333334, ans=0.125 +2024-07-28 20:15:40,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=198989.33333333334, ans=0.035 +2024-07-28 20:15:52,717 INFO [train.py:1114] (2/4) Epoch 15, batch 6150, loss[loss=0.2133, simple_loss=0.2861, pruned_loss=0.07023, over 3383.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2707, pruned_loss=0.0454, over 936932.14 frames. ], batch size: 35, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:15:58,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.49 vs. limit=15.0 +2024-07-28 20:16:14,491 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.237e+01 5.430e+01 6.165e+01 7.118e+01 1.181e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 20:16:15,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=199056.0, ans=0.0 +2024-07-28 20:16:17,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199056.0, ans=0.1 +2024-07-28 20:16:20,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=199069.33333333334, ans=0.0 +2024-07-28 20:16:25,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=199069.33333333334, ans=0.125 +2024-07-28 20:16:26,465 INFO [train.py:1114] (2/4) Epoch 15, batch 6200, loss[loss=0.2094, simple_loss=0.2901, pruned_loss=0.06433, over 4737.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2712, pruned_loss=0.04557, over 936514.81 frames. ], batch size: 14, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:16:29,399 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199082.66666666666, ans=0.1 +2024-07-28 20:16:48,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=199122.66666666666, ans=0.125 +2024-07-28 20:17:00,529 INFO [train.py:1114] (2/4) Epoch 15, batch 6250, loss[loss=0.189, simple_loss=0.2813, pruned_loss=0.04836, over 4805.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2715, pruned_loss=0.04615, over 932958.55 frames. ], batch size: 14, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:17:02,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199149.33333333334, ans=0.1 +2024-07-28 20:17:25,820 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.601e+01 6.121e+01 7.200e+01 1.148e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 20:17:30,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199189.33333333334, ans=0.1 +2024-07-28 20:17:35,147 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:17:38,304 INFO [train.py:1114] (2/4) Epoch 15, batch 6300, loss[loss=0.19, simple_loss=0.2715, pruned_loss=0.05429, over 4589.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2717, pruned_loss=0.04659, over 929414.12 frames. ], batch size: 10, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:17:40,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=199216.0, ans=0.2 +2024-07-28 20:17:42,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=199216.0, ans=0.125 +2024-07-28 20:17:45,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=199229.33333333334, ans=0.125 +2024-07-28 20:17:47,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=199229.33333333334, ans=0.125 +2024-07-28 20:17:53,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=199242.66666666666, ans=0.2 +2024-07-28 20:18:07,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=199269.33333333334, ans=0.125 +2024-07-28 20:18:08,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=199269.33333333334, ans=0.125 +2024-07-28 20:18:11,403 INFO [train.py:1114] (2/4) Epoch 15, batch 6350, loss[loss=0.1776, simple_loss=0.2725, pruned_loss=0.0414, over 4474.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2714, pruned_loss=0.04597, over 933564.35 frames. ], batch size: 21, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:18:23,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199296.0, ans=0.1 +2024-07-28 20:18:29,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=199309.33333333334, ans=0.0 +2024-07-28 20:18:33,048 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 5.687e+01 6.281e+01 7.134e+01 1.278e+02, threshold=1.256e+02, percent-clipped=1.0 +2024-07-28 20:18:35,482 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.34 vs. limit=15.0 +2024-07-28 20:18:44,888 INFO [train.py:1114] (2/4) Epoch 15, batch 6400, loss[loss=0.1717, simple_loss=0.2779, pruned_loss=0.03273, over 4636.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.27, pruned_loss=0.04539, over 934534.35 frames. ], batch size: 13, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:18:48,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=199349.33333333334, ans=0.0 +2024-07-28 20:18:52,506 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.70 vs. limit=10.0 +2024-07-28 20:18:55,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199362.66666666666, ans=0.1 +2024-07-28 20:18:57,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=199362.66666666666, ans=0.1 +2024-07-28 20:18:57,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=199362.66666666666, ans=0.125 +2024-07-28 20:19:00,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=199376.0, ans=0.2 +2024-07-28 20:19:16,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=199402.66666666666, ans=0.2 +2024-07-28 20:19:20,050 INFO [train.py:1114] (2/4) Epoch 15, batch 6450, loss[loss=0.1692, simple_loss=0.2668, pruned_loss=0.03584, over 4568.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2705, pruned_loss=0.04545, over 938459.49 frames. ], batch size: 21, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:19:20,173 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=199416.0, ans=0.125 +2024-07-28 20:19:28,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=199429.33333333334, ans=0.025 +2024-07-28 20:19:30,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199429.33333333334, ans=0.1 +2024-07-28 20:19:34,453 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.60 vs. limit=10.0 +2024-07-28 20:19:37,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=199442.66666666666, ans=0.0 +2024-07-28 20:19:40,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=199456.0, ans=0.0 +2024-07-28 20:19:41,421 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+01 5.703e+01 6.605e+01 7.565e+01 1.204e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-28 20:19:53,870 INFO [train.py:1114] (2/4) Epoch 15, batch 6500, loss[loss=0.2188, simple_loss=0.2879, pruned_loss=0.07485, over 3471.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2707, pruned_loss=0.04557, over 940272.17 frames. ], batch size: 36, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:19:57,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=199482.66666666666, ans=0.0 +2024-07-28 20:19:59,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=199482.66666666666, ans=0.125 +2024-07-28 20:20:04,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=199496.0, ans=0.2 +2024-07-28 20:20:29,583 INFO [train.py:1114] (2/4) Epoch 15, batch 6550, loss[loss=0.1507, simple_loss=0.2383, pruned_loss=0.03156, over 4801.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2702, pruned_loss=0.04519, over 943165.64 frames. ], batch size: 11, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:20:51,056 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.602e+01 6.242e+01 7.548e+01 1.165e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 20:21:00,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=199602.66666666666, ans=0.125 +2024-07-28 20:21:03,271 INFO [train.py:1114] (2/4) Epoch 15, batch 6600, loss[loss=0.2149, simple_loss=0.2951, pruned_loss=0.06732, over 4939.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2705, pruned_loss=0.0453, over 944924.62 frames. ], batch size: 14, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:21:20,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=199642.66666666666, ans=0.125 +2024-07-28 20:21:27,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=199656.0, ans=0.0 +2024-07-28 20:21:31,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=199669.33333333334, ans=0.025 +2024-07-28 20:21:33,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=199669.33333333334, ans=0.025 +2024-07-28 20:21:37,069 INFO [train.py:1114] (2/4) Epoch 15, batch 6650, loss[loss=0.1705, simple_loss=0.2569, pruned_loss=0.0421, over 4592.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2707, pruned_loss=0.04555, over 943420.10 frames. ], batch size: 17, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:21:46,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=199696.0, ans=0.125 +2024-07-28 20:21:55,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=199709.33333333334, ans=0.2 +2024-07-28 20:21:55,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=199709.33333333334, ans=0.125 +2024-07-28 20:21:58,585 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.79 vs. limit=22.5 +2024-07-28 20:21:58,842 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.766e+01 5.657e+01 6.507e+01 7.358e+01 9.845e+01, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 20:22:04,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=199736.0, ans=0.125 +2024-07-28 20:22:11,047 INFO [train.py:1114] (2/4) Epoch 15, batch 6700, loss[loss=0.1808, simple_loss=0.2803, pruned_loss=0.04062, over 4672.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2716, pruned_loss=0.04619, over 942148.84 frames. ], batch size: 19, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:22:16,228 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.76 vs. limit=22.5 +2024-07-28 20:22:26,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=199776.0, ans=0.025 +2024-07-28 20:22:26,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199776.0, ans=0.1 +2024-07-28 20:22:28,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=199776.0, ans=0.125 +2024-07-28 20:22:31,601 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.38 vs. limit=22.5 +2024-07-28 20:22:33,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=199789.33333333334, ans=0.025 +2024-07-28 20:22:35,929 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.03 vs. limit=6.0 +2024-07-28 20:22:37,411 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.14 vs. limit=15.0 +2024-07-28 20:22:46,739 INFO [train.py:1114] (2/4) Epoch 15, batch 6750, loss[loss=0.1795, simple_loss=0.2681, pruned_loss=0.04545, over 4377.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2718, pruned_loss=0.0465, over 940282.64 frames. ], batch size: 26, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:22:49,191 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.13 vs. limit=15.0 +2024-07-28 20:23:09,780 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.538e+01 5.776e+01 6.215e+01 6.945e+01 1.166e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 20:23:12,721 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.92 vs. limit=15.0 +2024-07-28 20:23:21,873 INFO [train.py:1114] (2/4) Epoch 15, batch 6800, loss[loss=0.1984, simple_loss=0.2779, pruned_loss=0.05949, over 4626.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2721, pruned_loss=0.0467, over 938462.56 frames. ], batch size: 13, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:23:32,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=199896.0, ans=0.125 +2024-07-28 20:23:34,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=199909.33333333334, ans=0.125 +2024-07-28 20:23:34,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=199909.33333333334, ans=0.125 +2024-07-28 20:23:34,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=199909.33333333334, ans=0.125 +2024-07-28 20:23:37,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=199909.33333333334, ans=0.125 +2024-07-28 20:23:38,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=199909.33333333334, ans=0.125 +2024-07-28 20:23:40,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=199909.33333333334, ans=0.125 +2024-07-28 20:23:43,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=199922.66666666666, ans=0.125 +2024-07-28 20:23:44,264 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.22 vs. limit=12.0 +2024-07-28 20:23:55,349 INFO [train.py:1114] (2/4) Epoch 15, batch 6850, loss[loss=0.1882, simple_loss=0.2845, pruned_loss=0.04595, over 4692.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2711, pruned_loss=0.04637, over 940374.57 frames. ], batch size: 13, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:23:59,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=199949.33333333334, ans=0.0 +2024-07-28 20:24:07,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=199962.66666666666, ans=0.125 +2024-07-28 20:24:16,992 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.771e+01 5.729e+01 6.369e+01 7.119e+01 1.032e+02, threshold=1.274e+02, percent-clipped=0.0 +2024-07-28 20:24:17,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=199989.33333333334, ans=0.0 +2024-07-28 20:24:23,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200002.66666666666, ans=0.1 +2024-07-28 20:24:24,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=200002.66666666666, ans=0.2 +2024-07-28 20:24:24,765 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.82 vs. limit=12.0 +2024-07-28 20:24:28,263 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.47 vs. limit=22.5 +2024-07-28 20:24:31,069 INFO [train.py:1114] (2/4) Epoch 15, batch 6900, loss[loss=0.1425, simple_loss=0.2353, pruned_loss=0.02485, over 4963.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2718, pruned_loss=0.04662, over 942693.69 frames. ], batch size: 13, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:24:52,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=200056.0, ans=0.2 +2024-07-28 20:24:55,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=200056.0, ans=0.0 +2024-07-28 20:25:01,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200069.33333333334, ans=0.1 +2024-07-28 20:25:04,304 INFO [train.py:1114] (2/4) Epoch 15, batch 6950, loss[loss=0.1499, simple_loss=0.2376, pruned_loss=0.03106, over 4513.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2712, pruned_loss=0.04644, over 939904.94 frames. ], batch size: 10, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:25:06,120 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.84 vs. limit=15.0 +2024-07-28 20:25:10,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=200096.0, ans=0.125 +2024-07-28 20:25:15,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=200096.0, ans=0.0 +2024-07-28 20:25:23,594 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:25:23,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=200122.66666666666, ans=0.125 +2024-07-28 20:25:25,326 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+01 5.615e+01 6.056e+01 6.911e+01 1.034e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-28 20:25:26,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=200122.66666666666, ans=0.0 +2024-07-28 20:25:26,829 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:25:30,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=200136.0, ans=0.125 +2024-07-28 20:25:37,552 INFO [train.py:1114] (2/4) Epoch 15, batch 7000, loss[loss=0.1826, simple_loss=0.274, pruned_loss=0.04564, over 4637.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2701, pruned_loss=0.04577, over 938700.04 frames. ], batch size: 17, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:25:48,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=200162.66666666666, ans=0.025 +2024-07-28 20:25:49,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=200162.66666666666, ans=0.0 +2024-07-28 20:25:51,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=200162.66666666666, ans=0.0 +2024-07-28 20:25:58,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200189.33333333334, ans=0.1 +2024-07-28 20:26:01,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=200189.33333333334, ans=0.125 +2024-07-28 20:26:12,405 INFO [train.py:1114] (2/4) Epoch 15, batch 7050, loss[loss=0.1652, simple_loss=0.2652, pruned_loss=0.03258, over 4753.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2704, pruned_loss=0.04603, over 942138.74 frames. ], batch size: 19, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:26:20,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=200229.33333333334, ans=0.1 +2024-07-28 20:26:20,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=200229.33333333334, ans=0.0 +2024-07-28 20:26:24,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=200229.33333333334, ans=0.09899494936611666 +2024-07-28 20:26:35,598 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.524e+01 5.674e+01 6.340e+01 7.118e+01 1.081e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 20:26:37,670 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=200256.0, ans=0.0 +2024-07-28 20:26:39,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=200256.0, ans=0.025 +2024-07-28 20:26:41,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=200269.33333333334, ans=0.04949747468305833 +2024-07-28 20:26:42,638 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:26:47,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=200282.66666666666, ans=0.2 +2024-07-28 20:26:47,464 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.50 vs. limit=12.0 +2024-07-28 20:26:47,601 INFO [train.py:1114] (2/4) Epoch 15, batch 7100, loss[loss=0.248, simple_loss=0.3274, pruned_loss=0.0843, over 4810.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2717, pruned_loss=0.04696, over 937068.57 frames. ], batch size: 15, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:26:47,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=200282.66666666666, ans=0.125 +2024-07-28 20:26:57,484 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:27:01,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=200309.33333333334, ans=0.0 +2024-07-28 20:27:16,739 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.91 vs. limit=22.5 +2024-07-28 20:27:19,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=200336.0, ans=0.0 +2024-07-28 20:27:20,253 INFO [train.py:1114] (2/4) Epoch 15, batch 7150, loss[loss=0.1991, simple_loss=0.3, pruned_loss=0.04911, over 4539.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2703, pruned_loss=0.04626, over 937781.44 frames. ], batch size: 21, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:27:25,092 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.30 vs. limit=15.0 +2024-07-28 20:27:41,583 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.495e+01 6.100e+01 6.664e+01 1.254e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 20:27:43,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=200389.33333333334, ans=0.1 +2024-07-28 20:27:50,013 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.65 vs. limit=15.0 +2024-07-28 20:27:53,638 INFO [train.py:1114] (2/4) Epoch 15, batch 7200, loss[loss=0.2065, simple_loss=0.2917, pruned_loss=0.0607, over 4804.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.271, pruned_loss=0.04629, over 938472.40 frames. ], batch size: 15, lr: 4.95e-03, grad_scale: 64.0 +2024-07-28 20:28:03,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200429.33333333334, ans=0.1 +2024-07-28 20:28:04,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=200429.33333333334, ans=0.125 +2024-07-28 20:28:07,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=200442.66666666666, ans=0.2 +2024-07-28 20:28:13,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=200456.0, ans=0.125 +2024-07-28 20:28:14,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=200456.0, ans=0.125 +2024-07-28 20:28:23,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=200469.33333333334, ans=0.125 +2024-07-28 20:28:26,775 INFO [train.py:1114] (2/4) Epoch 15, batch 7250, loss[loss=0.1564, simple_loss=0.2426, pruned_loss=0.03514, over 4861.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2705, pruned_loss=0.04602, over 940259.72 frames. ], batch size: 12, lr: 4.95e-03, grad_scale: 64.0 +2024-07-28 20:28:33,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=200496.0, ans=0.0 +2024-07-28 20:28:33,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=200496.0, ans=0.125 +2024-07-28 20:28:34,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=200496.0, ans=0.0 +2024-07-28 20:28:46,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=200522.66666666666, ans=0.0 +2024-07-28 20:28:47,742 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.84 vs. limit=15.0 +2024-07-28 20:28:47,958 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.349e+01 5.542e+01 5.960e+01 6.678e+01 9.539e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 20:28:48,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=200522.66666666666, ans=0.125 +2024-07-28 20:28:53,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=200536.0, ans=0.125 +2024-07-28 20:28:59,491 INFO [train.py:1114] (2/4) Epoch 15, batch 7300, loss[loss=0.1928, simple_loss=0.2731, pruned_loss=0.05623, over 4860.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2704, pruned_loss=0.04607, over 940511.39 frames. ], batch size: 12, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:29:10,500 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:29:32,744 INFO [train.py:1114] (2/4) Epoch 15, batch 7350, loss[loss=0.1868, simple_loss=0.2796, pruned_loss=0.04696, over 4639.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2707, pruned_loss=0.04604, over 939911.44 frames. ], batch size: 12, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:29:38,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=200629.33333333334, ans=0.04949747468305833 +2024-07-28 20:29:43,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200629.33333333334, ans=0.1 +2024-07-28 20:29:46,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=200642.66666666666, ans=0.0 +2024-07-28 20:29:49,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=200642.66666666666, ans=0.125 +2024-07-28 20:29:52,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=200656.0, ans=0.025 +2024-07-28 20:29:52,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=200656.0, ans=0.125 +2024-07-28 20:29:54,109 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.821e+01 5.678e+01 6.177e+01 7.167e+01 1.153e+02, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 20:30:04,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200682.66666666666, ans=0.1 +2024-07-28 20:30:05,184 INFO [train.py:1114] (2/4) Epoch 15, batch 7400, loss[loss=0.1658, simple_loss=0.2573, pruned_loss=0.03711, over 4690.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2706, pruned_loss=0.04595, over 940929.13 frames. ], batch size: 13, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:30:12,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=200696.0, ans=0.125 +2024-07-28 20:30:22,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=200709.33333333334, ans=0.125 +2024-07-28 20:30:30,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=200722.66666666666, ans=0.125 +2024-07-28 20:30:30,784 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.11 vs. limit=15.0 +2024-07-28 20:30:38,350 INFO [train.py:1114] (2/4) Epoch 15, batch 7450, loss[loss=0.1583, simple_loss=0.2341, pruned_loss=0.04127, over 4612.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2687, pruned_loss=0.04532, over 938119.58 frames. ], batch size: 11, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:30:39,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=200749.33333333334, ans=0.2 +2024-07-28 20:30:46,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=200762.66666666666, ans=0.0 +2024-07-28 20:30:47,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=200762.66666666666, ans=0.0 +2024-07-28 20:30:47,771 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.06 vs. limit=15.0 +2024-07-28 20:30:59,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=200789.33333333334, ans=0.0 +2024-07-28 20:30:59,743 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.621e+01 5.506e+01 6.120e+01 7.059e+01 1.130e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 20:31:00,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=200789.33333333334, ans=0.125 +2024-07-28 20:31:04,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=200802.66666666666, ans=0.07 +2024-07-28 20:31:10,176 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.60 vs. limit=22.5 +2024-07-28 20:31:11,142 INFO [train.py:1114] (2/4) Epoch 15, batch 7500, loss[loss=0.2456, simple_loss=0.3168, pruned_loss=0.08723, over 3363.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2697, pruned_loss=0.0456, over 936087.51 frames. ], batch size: 35, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:31:33,448 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.17 vs. limit=10.0 +2024-07-28 20:31:33,642 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.49 vs. limit=12.0 +2024-07-28 20:31:35,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=200856.0, ans=0.125 +2024-07-28 20:31:41,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=200869.33333333334, ans=0.2 +2024-07-28 20:31:45,695 INFO [train.py:1114] (2/4) Epoch 15, batch 7550, loss[loss=0.1791, simple_loss=0.2726, pruned_loss=0.04284, over 4597.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2713, pruned_loss=0.0458, over 936101.79 frames. ], batch size: 17, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:31:46,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=200882.66666666666, ans=0.125 +2024-07-28 20:32:04,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200909.33333333334, ans=0.1 +2024-07-28 20:32:08,710 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.815e+01 5.439e+01 5.885e+01 6.380e+01 8.239e+01, threshold=1.177e+02, percent-clipped=0.0 +2024-07-28 20:32:10,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=200922.66666666666, ans=0.125 +2024-07-28 20:32:11,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=200922.66666666666, ans=0.05 +2024-07-28 20:32:12,895 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:32:19,797 INFO [train.py:1114] (2/4) Epoch 15, batch 7600, loss[loss=0.1607, simple_loss=0.2533, pruned_loss=0.03404, over 4813.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2699, pruned_loss=0.04484, over 937858.27 frames. ], batch size: 14, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:32:20,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200949.33333333334, ans=0.1 +2024-07-28 20:32:22,520 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.43 vs. limit=15.0 +2024-07-28 20:32:41,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=200962.66666666666, ans=0.125 +2024-07-28 20:32:48,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=200962.66666666666, ans=0.0 +2024-07-28 20:32:49,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=200976.0, ans=0.0 +2024-07-28 20:32:59,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=200989.33333333334, ans=0.125 +2024-07-28 20:33:03,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=200989.33333333334, ans=0.2 +2024-07-28 20:33:13,672 INFO [train.py:1114] (2/4) Epoch 15, batch 7650, loss[loss=0.1446, simple_loss=0.2352, pruned_loss=0.02702, over 4945.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2699, pruned_loss=0.04476, over 936830.30 frames. ], batch size: 12, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:33:18,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=201016.0, ans=0.0 +2024-07-28 20:33:21,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=201016.0, ans=0.0 +2024-07-28 20:33:25,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=201029.33333333334, ans=10.0 +2024-07-28 20:33:30,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=201029.33333333334, ans=0.125 +2024-07-28 20:33:50,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=201056.0, ans=0.07 +2024-07-28 20:33:52,758 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.430e+01 5.492e+01 6.279e+01 7.005e+01 1.015e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 20:33:53,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=201056.0, ans=0.95 +2024-07-28 20:34:02,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=201069.33333333334, ans=0.125 +2024-07-28 20:34:05,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=201069.33333333334, ans=0.0 +2024-07-28 20:34:21,700 INFO [train.py:1114] (2/4) Epoch 15, batch 7700, loss[loss=0.1727, simple_loss=0.2791, pruned_loss=0.03314, over 4689.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2713, pruned_loss=0.0457, over 934335.96 frames. ], batch size: 13, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:34:32,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=201096.0, ans=0.2 +2024-07-28 20:34:35,842 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.28 vs. limit=15.0 +2024-07-28 20:34:37,124 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.16 vs. limit=15.0 +2024-07-28 20:34:46,148 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.18 vs. limit=15.0 +2024-07-28 20:34:56,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=201122.66666666666, ans=0.125 +2024-07-28 20:35:01,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=201136.0, ans=0.125 +2024-07-28 20:35:06,321 INFO [train.py:1114] (2/4) Epoch 15, batch 7750, loss[loss=0.2164, simple_loss=0.3125, pruned_loss=0.06012, over 4928.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2723, pruned_loss=0.04573, over 935910.30 frames. ], batch size: 14, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:35:20,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=201162.66666666666, ans=0.0 +2024-07-28 20:35:20,834 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:35:21,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=201162.66666666666, ans=0.125 +2024-07-28 20:35:27,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=201176.0, ans=0.125 +2024-07-28 20:35:29,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=201176.0, ans=0.0 +2024-07-28 20:35:31,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=201176.0, ans=0.2 +2024-07-28 20:35:37,375 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.480e+01 5.525e+01 5.917e+01 6.791e+01 1.166e+02, threshold=1.183e+02, percent-clipped=0.0 +2024-07-28 20:36:13,010 INFO [train.py:1114] (2/4) Epoch 15, batch 7800, loss[loss=0.1735, simple_loss=0.2745, pruned_loss=0.0363, over 4662.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2722, pruned_loss=0.04554, over 937414.75 frames. ], batch size: 14, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:36:30,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=201216.0, ans=0.125 +2024-07-28 20:36:36,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=201229.33333333334, ans=0.125 +2024-07-28 20:36:52,385 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.01 vs. limit=12.0 +2024-07-28 20:37:04,532 INFO [train.py:1114] (2/4) Epoch 15, batch 7850, loss[loss=0.1545, simple_loss=0.2257, pruned_loss=0.04164, over 4502.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2722, pruned_loss=0.04592, over 936502.65 frames. ], batch size: 10, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:37:19,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=201309.33333333334, ans=0.2 +2024-07-28 20:37:23,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=201309.33333333334, ans=0.025 +2024-07-28 20:37:36,068 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 5.654e+01 6.198e+01 6.976e+01 9.701e+01, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 20:37:42,775 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.15 vs. limit=6.0 +2024-07-28 20:37:46,453 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.95 vs. limit=15.0 +2024-07-28 20:37:47,267 INFO [train.py:1114] (2/4) Epoch 15, batch 7900, loss[loss=0.1618, simple_loss=0.2561, pruned_loss=0.03372, over 4880.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2735, pruned_loss=0.0464, over 933310.91 frames. ], batch size: 14, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:37:50,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=201349.33333333334, ans=0.1 +2024-07-28 20:37:57,113 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.56 vs. limit=15.0 +2024-07-28 20:38:07,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=201376.0, ans=0.125 +2024-07-28 20:38:14,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=201402.66666666666, ans=0.125 +2024-07-28 20:38:21,274 INFO [train.py:1114] (2/4) Epoch 15, batch 7950, loss[loss=0.248, simple_loss=0.3105, pruned_loss=0.0927, over 3362.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2734, pruned_loss=0.04664, over 935327.52 frames. ], batch size: 35, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:38:23,238 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:38:42,610 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.203e+01 5.519e+01 6.026e+01 6.724e+01 9.656e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 20:38:53,249 INFO [train.py:1114] (2/4) Epoch 15, batch 8000, loss[loss=0.155, simple_loss=0.2442, pruned_loss=0.03285, over 4611.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2712, pruned_loss=0.04576, over 934613.87 frames. ], batch size: 11, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:38:57,547 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.69 vs. limit=22.5 +2024-07-28 20:39:03,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=201496.0, ans=0.025 +2024-07-28 20:39:16,175 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.84 vs. limit=10.0 +2024-07-28 20:39:16,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=201522.66666666666, ans=0.125 +2024-07-28 20:39:23,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=201536.0, ans=0.125 +2024-07-28 20:39:25,593 INFO [train.py:1114] (2/4) Epoch 15, batch 8050, loss[loss=0.1867, simple_loss=0.2872, pruned_loss=0.04311, over 4812.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2711, pruned_loss=0.04574, over 934384.40 frames. ], batch size: 14, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:39:25,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=201549.33333333334, ans=0.2 +2024-07-28 20:39:30,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=201549.33333333334, ans=0.025 +2024-07-28 20:39:46,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=201589.33333333334, ans=0.0 +2024-07-28 20:39:46,854 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.771e+01 6.002e+01 6.838e+01 8.210e+01 1.277e+02, threshold=1.368e+02, percent-clipped=1.0 +2024-07-28 20:39:51,191 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:39:58,064 INFO [train.py:1114] (2/4) Epoch 15, batch 8100, loss[loss=0.2075, simple_loss=0.3078, pruned_loss=0.05363, over 4813.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2718, pruned_loss=0.04568, over 934139.67 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:40:00,375 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.61 vs. limit=12.0 +2024-07-28 20:40:04,259 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.22 vs. limit=15.0 +2024-07-28 20:40:04,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=201629.33333333334, ans=0.125 +2024-07-28 20:40:08,071 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.69 vs. limit=12.0 +2024-07-28 20:40:10,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=201642.66666666666, ans=0.125 +2024-07-28 20:40:10,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=201642.66666666666, ans=0.0 +2024-07-28 20:40:18,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=201656.0, ans=0.125 +2024-07-28 20:40:22,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=201656.0, ans=0.0 +2024-07-28 20:40:25,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=201669.33333333334, ans=0.1 +2024-07-28 20:40:30,019 INFO [train.py:1114] (2/4) Epoch 15, batch 8150, loss[loss=0.2082, simple_loss=0.2976, pruned_loss=0.05943, over 4803.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.271, pruned_loss=0.0457, over 937631.37 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:40:32,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=201682.66666666666, ans=0.0 +2024-07-28 20:40:35,437 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.33 vs. limit=15.0 +2024-07-28 20:40:37,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=201696.0, ans=0.0 +2024-07-28 20:40:51,252 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.751e+01 6.330e+01 7.260e+01 1.173e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 20:41:02,487 INFO [train.py:1114] (2/4) Epoch 15, batch 8200, loss[loss=0.1771, simple_loss=0.2719, pruned_loss=0.04118, over 4799.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2704, pruned_loss=0.04517, over 939098.16 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:41:04,682 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.01 vs. limit=12.0 +2024-07-28 20:41:17,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=201776.0, ans=0.025 +2024-07-28 20:41:17,509 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.45 vs. limit=15.0 +2024-07-28 20:41:21,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=201776.0, ans=0.025 +2024-07-28 20:41:34,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=201802.66666666666, ans=0.1 +2024-07-28 20:41:35,843 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.51 vs. limit=10.0 +2024-07-28 20:41:36,030 INFO [train.py:1114] (2/4) Epoch 15, batch 8250, loss[loss=0.1518, simple_loss=0.2461, pruned_loss=0.02873, over 4892.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2703, pruned_loss=0.04494, over 939497.67 frames. ], batch size: 13, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:41:52,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=201842.66666666666, ans=0.125 +2024-07-28 20:41:53,500 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.41 vs. limit=12.0 +2024-07-28 20:41:53,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=201842.66666666666, ans=0.1 +2024-07-28 20:41:54,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=201842.66666666666, ans=0.2 +2024-07-28 20:41:57,640 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.753e+01 5.622e+01 6.090e+01 6.800e+01 1.043e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 20:42:00,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=201856.0, ans=0.2 +2024-07-28 20:42:04,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=201869.33333333334, ans=0.125 +2024-07-28 20:42:06,835 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:42:07,102 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.32 vs. limit=15.0 +2024-07-28 20:42:08,669 INFO [train.py:1114] (2/4) Epoch 15, batch 8300, loss[loss=0.2262, simple_loss=0.2907, pruned_loss=0.08087, over 4889.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2713, pruned_loss=0.04558, over 939253.67 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:42:11,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=201882.66666666666, ans=0.0 +2024-07-28 20:42:19,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=201896.0, ans=0.0 +2024-07-28 20:42:21,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=201909.33333333334, ans=0.2 +2024-07-28 20:42:31,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=201922.66666666666, ans=0.125 +2024-07-28 20:42:36,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=201936.0, ans=0.125 +2024-07-28 20:42:41,380 INFO [train.py:1114] (2/4) Epoch 15, batch 8350, loss[loss=0.1949, simple_loss=0.2836, pruned_loss=0.05306, over 4809.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2707, pruned_loss=0.04526, over 941853.29 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:42:44,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=201949.33333333334, ans=0.125 +2024-07-28 20:42:46,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=201949.33333333334, ans=0.0 +2024-07-28 20:42:47,150 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.22 vs. limit=15.0 +2024-07-28 20:42:57,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=201976.0, ans=0.05 +2024-07-28 20:43:00,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=201976.0, ans=0.2 +2024-07-28 20:43:02,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=201989.33333333334, ans=0.125 +2024-07-28 20:43:03,767 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.556e+01 6.243e+01 6.901e+01 1.019e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-28 20:43:13,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=202002.66666666666, ans=0.0 +2024-07-28 20:43:15,646 INFO [train.py:1114] (2/4) Epoch 15, batch 8400, loss[loss=0.1421, simple_loss=0.2376, pruned_loss=0.02326, over 4769.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2714, pruned_loss=0.04568, over 940455.68 frames. ], batch size: 12, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:43:27,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=202029.33333333334, ans=0.0 +2024-07-28 20:43:33,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=202042.66666666666, ans=0.125 +2024-07-28 20:43:34,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=202042.66666666666, ans=0.0 +2024-07-28 20:43:37,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=202056.0, ans=0.0 +2024-07-28 20:43:47,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=202069.33333333334, ans=0.0 +2024-07-28 20:43:49,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=202069.33333333334, ans=0.0 +2024-07-28 20:43:51,497 INFO [train.py:1114] (2/4) Epoch 15, batch 8450, loss[loss=0.1763, simple_loss=0.2588, pruned_loss=0.04691, over 4803.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2727, pruned_loss=0.0461, over 939126.39 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:43:53,872 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.35 vs. limit=15.0 +2024-07-28 20:43:55,088 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.11 vs. limit=15.0 +2024-07-28 20:44:03,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=202096.0, ans=0.2 +2024-07-28 20:44:03,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=202109.33333333334, ans=0.0 +2024-07-28 20:44:20,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=202122.66666666666, ans=0.125 +2024-07-28 20:44:20,238 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.86 vs. limit=22.5 +2024-07-28 20:44:20,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=202122.66666666666, ans=0.125 +2024-07-28 20:44:21,219 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.759e+01 5.800e+01 6.456e+01 7.440e+01 1.040e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-28 20:44:21,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=202122.66666666666, ans=0.2 +2024-07-28 20:44:34,655 INFO [train.py:1114] (2/4) Epoch 15, batch 8500, loss[loss=0.1603, simple_loss=0.2411, pruned_loss=0.03973, over 4612.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2714, pruned_loss=0.04548, over 938607.96 frames. ], batch size: 11, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:44:39,068 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.13 vs. limit=15.0 +2024-07-28 20:44:54,931 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.66 vs. limit=10.0 +2024-07-28 20:45:05,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=202202.66666666666, ans=0.0 +2024-07-28 20:45:07,517 INFO [train.py:1114] (2/4) Epoch 15, batch 8550, loss[loss=0.1324, simple_loss=0.2183, pruned_loss=0.02328, over 4820.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2709, pruned_loss=0.04539, over 939742.75 frames. ], batch size: 11, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:45:11,751 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.35 vs. limit=15.0 +2024-07-28 20:45:30,957 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.429e+01 5.682e+01 6.336e+01 7.358e+01 1.234e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 20:45:31,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=202256.0, ans=0.025 +2024-07-28 20:45:41,901 INFO [train.py:1114] (2/4) Epoch 15, batch 8600, loss[loss=0.1745, simple_loss=0.2686, pruned_loss=0.04024, over 4800.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2707, pruned_loss=0.04547, over 939163.03 frames. ], batch size: 15, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:45:45,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=202282.66666666666, ans=0.2 +2024-07-28 20:45:53,091 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.83 vs. limit=10.0 +2024-07-28 20:46:08,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=202336.0, ans=0.125 +2024-07-28 20:46:14,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=202349.33333333334, ans=0.125 +2024-07-28 20:46:14,999 INFO [train.py:1114] (2/4) Epoch 15, batch 8650, loss[loss=0.1875, simple_loss=0.283, pruned_loss=0.04601, over 4904.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2707, pruned_loss=0.04559, over 940274.19 frames. ], batch size: 15, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:46:18,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=202349.33333333334, ans=0.1 +2024-07-28 20:46:20,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=202362.66666666666, ans=0.125 +2024-07-28 20:46:34,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.32 vs. limit=10.0 +2024-07-28 20:46:36,418 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.784e+01 5.651e+01 6.077e+01 6.775e+01 1.563e+02, threshold=1.215e+02, percent-clipped=1.0 +2024-07-28 20:46:43,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=202402.66666666666, ans=0.0 +2024-07-28 20:46:47,380 INFO [train.py:1114] (2/4) Epoch 15, batch 8700, loss[loss=0.154, simple_loss=0.2526, pruned_loss=0.02772, over 4750.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2711, pruned_loss=0.04582, over 937567.06 frames. ], batch size: 13, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:46:56,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=202429.33333333334, ans=0.125 +2024-07-28 20:46:58,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202429.33333333334, ans=0.1 +2024-07-28 20:47:13,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=202469.33333333334, ans=0.04949747468305833 +2024-07-28 20:47:19,423 INFO [train.py:1114] (2/4) Epoch 15, batch 8750, loss[loss=0.1993, simple_loss=0.29, pruned_loss=0.05432, over 4674.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2706, pruned_loss=0.04522, over 936291.97 frames. ], batch size: 15, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:47:19,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=202482.66666666666, ans=0.0 +2024-07-28 20:47:21,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=202482.66666666666, ans=0.2 +2024-07-28 20:47:29,542 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.84 vs. limit=22.5 +2024-07-28 20:47:38,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=202522.66666666666, ans=0.0 +2024-07-28 20:47:40,673 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.522e+01 5.559e+01 6.196e+01 6.974e+01 1.029e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 20:47:41,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=202522.66666666666, ans=0.0 +2024-07-28 20:47:46,801 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.11 vs. limit=6.0 +2024-07-28 20:47:49,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=202536.0, ans=0.0 +2024-07-28 20:47:51,483 INFO [train.py:1114] (2/4) Epoch 15, batch 8800, loss[loss=0.228, simple_loss=0.3201, pruned_loss=0.06794, over 4931.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2704, pruned_loss=0.04514, over 936881.72 frames. ], batch size: 14, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:47:56,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=202549.33333333334, ans=0.0 +2024-07-28 20:48:03,666 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:48:11,117 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.72 vs. limit=12.0 +2024-07-28 20:48:23,830 INFO [train.py:1114] (2/4) Epoch 15, batch 8850, loss[loss=0.1766, simple_loss=0.2591, pruned_loss=0.04707, over 4488.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.27, pruned_loss=0.04512, over 931755.48 frames. ], batch size: 21, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:48:24,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=202616.0, ans=0.0 +2024-07-28 20:48:27,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=202616.0, ans=0.125 +2024-07-28 20:48:34,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=202629.33333333334, ans=0.1 +2024-07-28 20:48:36,890 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.64 vs. limit=22.5 +2024-07-28 20:48:50,490 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.697e+01 5.661e+01 6.393e+01 7.198e+01 1.179e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 20:48:50,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=202656.0, ans=0.025 +2024-07-28 20:48:51,015 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.98 vs. limit=15.0 +2024-07-28 20:49:07,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202669.33333333334, ans=0.1 +2024-07-28 20:49:10,209 INFO [train.py:1114] (2/4) Epoch 15, batch 8900, loss[loss=0.1404, simple_loss=0.2226, pruned_loss=0.02912, over 4930.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.27, pruned_loss=0.04535, over 929880.15 frames. ], batch size: 12, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:49:33,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=202722.66666666666, ans=0.05 +2024-07-28 20:49:34,058 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.68 vs. limit=12.0 +2024-07-28 20:49:42,088 INFO [train.py:1114] (2/4) Epoch 15, batch 8950, loss[loss=0.1785, simple_loss=0.2556, pruned_loss=0.05071, over 4561.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2709, pruned_loss=0.04541, over 930825.64 frames. ], batch size: 21, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:49:43,846 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.28 vs. limit=22.5 +2024-07-28 20:49:53,993 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.79 vs. limit=15.0 +2024-07-28 20:49:57,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=202776.0, ans=0.125 +2024-07-28 20:49:58,527 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.79 vs. limit=22.5 +2024-07-28 20:49:59,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=202776.0, ans=0.125 +2024-07-28 20:50:03,361 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.618e+01 5.676e+01 6.111e+01 7.140e+01 9.937e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 20:50:05,136 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.16 vs. limit=22.5 +2024-07-28 20:50:14,257 INFO [train.py:1114] (2/4) Epoch 15, batch 9000, loss[loss=0.1607, simple_loss=0.2462, pruned_loss=0.0376, over 4641.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.269, pruned_loss=0.04479, over 933568.18 frames. ], batch size: 12, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:50:14,257 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 20:50:29,447 INFO [train.py:1146] (2/4) Epoch 15, validation: loss=0.164, simple_loss=0.2673, pruned_loss=0.03039, over 944034.00 frames. +2024-07-28 20:50:29,448 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 20:50:41,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=202842.66666666666, ans=0.125 +2024-07-28 20:50:45,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=202842.66666666666, ans=0.2 +2024-07-28 20:51:01,320 INFO [train.py:1114] (2/4) Epoch 15, batch 9050, loss[loss=0.1629, simple_loss=0.2387, pruned_loss=0.04354, over 4523.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2682, pruned_loss=0.04418, over 934461.01 frames. ], batch size: 10, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:51:04,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=202882.66666666666, ans=0.2 +2024-07-28 20:51:09,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=202896.0, ans=0.125 +2024-07-28 20:51:21,894 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.625e+01 5.604e+01 6.217e+01 7.321e+01 1.269e+02, threshold=1.243e+02, percent-clipped=1.0 +2024-07-28 20:51:24,272 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.38 vs. limit=12.0 +2024-07-28 20:51:32,976 INFO [train.py:1114] (2/4) Epoch 15, batch 9100, loss[loss=0.163, simple_loss=0.2633, pruned_loss=0.03142, over 4940.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2677, pruned_loss=0.04416, over 937085.93 frames. ], batch size: 14, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:51:35,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=202949.33333333334, ans=0.025 +2024-07-28 20:51:38,237 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.47 vs. limit=15.0 +2024-07-28 20:51:44,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=202962.66666666666, ans=0.125 +2024-07-28 20:51:52,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=202989.33333333334, ans=0.1 +2024-07-28 20:51:54,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=202989.33333333334, ans=0.09899494936611666 +2024-07-28 20:51:57,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=203002.66666666666, ans=0.2 +2024-07-28 20:51:58,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=203002.66666666666, ans=0.125 +2024-07-28 20:52:01,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=203002.66666666666, ans=0.125 +2024-07-28 20:52:02,324 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.54 vs. limit=10.0 +2024-07-28 20:52:04,365 INFO [train.py:1114] (2/4) Epoch 15, batch 9150, loss[loss=0.1864, simple_loss=0.2823, pruned_loss=0.04525, over 4816.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2685, pruned_loss=0.04435, over 935764.33 frames. ], batch size: 14, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:52:19,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=203042.66666666666, ans=0.125 +2024-07-28 20:52:25,461 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.329e+01 5.523e+01 6.043e+01 6.925e+01 1.017e+02, threshold=1.209e+02, percent-clipped=0.0 +2024-07-28 20:52:32,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=203069.33333333334, ans=0.2 +2024-07-28 20:52:36,147 INFO [train.py:1114] (2/4) Epoch 15, batch 9200, loss[loss=0.1648, simple_loss=0.2501, pruned_loss=0.03979, over 4853.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2675, pruned_loss=0.04411, over 937634.84 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:52:40,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=203082.66666666666, ans=0.0 +2024-07-28 20:52:40,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=203082.66666666666, ans=0.125 +2024-07-28 20:52:42,935 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.54 vs. limit=15.0 +2024-07-28 20:52:47,961 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.53 vs. limit=22.5 +2024-07-28 20:52:48,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=203109.33333333334, ans=0.04949747468305833 +2024-07-28 20:52:51,635 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.11 vs. limit=15.0 +2024-07-28 20:52:53,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=203109.33333333334, ans=0.125 +2024-07-28 20:52:58,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=203122.66666666666, ans=0.125 +2024-07-28 20:52:59,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=203122.66666666666, ans=0.125 +2024-07-28 20:53:07,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=203136.0, ans=0.035 +2024-07-28 20:53:07,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=203136.0, ans=0.05 +2024-07-28 20:53:08,434 INFO [train.py:1114] (2/4) Epoch 15, batch 9250, loss[loss=0.1922, simple_loss=0.2975, pruned_loss=0.04344, over 4630.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2687, pruned_loss=0.04446, over 938663.50 frames. ], batch size: 13, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:53:29,344 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.390e+01 5.672e+01 6.344e+01 6.747e+01 1.004e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 20:53:30,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=203189.33333333334, ans=0.1 +2024-07-28 20:53:35,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=203202.66666666666, ans=0.125 +2024-07-28 20:53:41,027 INFO [train.py:1114] (2/4) Epoch 15, batch 9300, loss[loss=0.1873, simple_loss=0.2712, pruned_loss=0.05168, over 4778.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2691, pruned_loss=0.04471, over 938138.89 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:53:41,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=203216.0, ans=0.0 +2024-07-28 20:53:45,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=203216.0, ans=0.1 +2024-07-28 20:53:46,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=203216.0, ans=0.125 +2024-07-28 20:53:46,995 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:53:47,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=203229.33333333334, ans=0.125 +2024-07-28 20:53:49,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=203229.33333333334, ans=0.0 +2024-07-28 20:53:50,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=203229.33333333334, ans=0.125 +2024-07-28 20:53:50,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=203229.33333333334, ans=0.5 +2024-07-28 20:53:54,715 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.95 vs. limit=15.0 +2024-07-28 20:53:55,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=203242.66666666666, ans=0.1 +2024-07-28 20:54:02,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=203256.0, ans=0.125 +2024-07-28 20:54:13,799 INFO [train.py:1114] (2/4) Epoch 15, batch 9350, loss[loss=0.1459, simple_loss=0.2359, pruned_loss=0.02798, over 4814.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2699, pruned_loss=0.04524, over 934468.90 frames. ], batch size: 11, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:54:19,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=203282.66666666666, ans=0.125 +2024-07-28 20:54:24,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=203296.0, ans=0.025 +2024-07-28 20:54:34,860 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.610e+01 5.452e+01 6.189e+01 7.531e+01 9.435e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 20:54:37,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=203322.66666666666, ans=0.07 +2024-07-28 20:54:38,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=203322.66666666666, ans=0.025 +2024-07-28 20:54:45,610 INFO [train.py:1114] (2/4) Epoch 15, batch 9400, loss[loss=0.1868, simple_loss=0.2863, pruned_loss=0.0437, over 4701.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.27, pruned_loss=0.04555, over 932671.27 frames. ], batch size: 13, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:54:55,232 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.21 vs. limit=15.0 +2024-07-28 20:54:56,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=203362.66666666666, ans=0.07 +2024-07-28 20:55:06,398 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.37 vs. limit=15.0 +2024-07-28 20:55:17,111 INFO [train.py:1114] (2/4) Epoch 15, batch 9450, loss[loss=0.1542, simple_loss=0.243, pruned_loss=0.03265, over 4785.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2703, pruned_loss=0.04534, over 931874.96 frames. ], batch size: 11, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:55:17,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=203416.0, ans=0.125 +2024-07-28 20:55:19,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=203416.0, ans=0.125 +2024-07-28 20:55:21,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=203416.0, ans=0.125 +2024-07-28 20:55:21,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=203416.0, ans=0.125 +2024-07-28 20:55:30,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=203442.66666666666, ans=0.125 +2024-07-28 20:55:37,636 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.245e+01 5.463e+01 5.974e+01 6.797e+01 9.307e+01, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 20:55:41,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=203456.0, ans=0.0 +2024-07-28 20:55:48,428 INFO [train.py:1114] (2/4) Epoch 15, batch 9500, loss[loss=0.1735, simple_loss=0.2648, pruned_loss=0.04107, over 4712.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2706, pruned_loss=0.04486, over 934192.27 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:55:59,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=203496.0, ans=0.125 +2024-07-28 20:56:02,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=203509.33333333334, ans=0.04949747468305833 +2024-07-28 20:56:09,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=203522.66666666666, ans=0.025 +2024-07-28 20:56:19,753 INFO [train.py:1114] (2/4) Epoch 15, batch 9550, loss[loss=0.17, simple_loss=0.2541, pruned_loss=0.04294, over 4773.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2713, pruned_loss=0.04565, over 931839.74 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:56:26,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=203562.66666666666, ans=0.0 +2024-07-28 20:56:40,323 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.803e+01 5.499e+01 6.112e+01 6.972e+01 9.508e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 20:56:41,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=203589.33333333334, ans=0.125 +2024-07-28 20:56:41,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=203589.33333333334, ans=0.1 +2024-07-28 20:56:47,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=203602.66666666666, ans=0.125 +2024-07-28 20:56:50,886 INFO [train.py:1114] (2/4) Epoch 15, batch 9600, loss[loss=0.2331, simple_loss=0.299, pruned_loss=0.08358, over 3393.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2709, pruned_loss=0.04576, over 930858.02 frames. ], batch size: 35, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:56:54,573 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=21.55 vs. limit=22.5 +2024-07-28 20:57:05,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=203642.66666666666, ans=0.0 +2024-07-28 20:57:07,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=203642.66666666666, ans=0.0 +2024-07-28 20:57:14,390 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.43 vs. limit=22.5 +2024-07-28 20:57:20,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=203669.33333333334, ans=6.0 +2024-07-28 20:57:22,693 INFO [train.py:1114] (2/4) Epoch 15, batch 9650, loss[loss=0.1797, simple_loss=0.2777, pruned_loss=0.04083, over 4857.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2707, pruned_loss=0.04619, over 926234.22 frames. ], batch size: 16, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:57:27,494 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.74 vs. limit=22.5 +2024-07-28 20:57:32,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=203696.0, ans=0.1 +2024-07-28 20:57:44,440 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.709e+01 6.228e+01 7.235e+01 8.715e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 20:57:46,727 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.38 vs. limit=15.0 +2024-07-28 20:57:47,212 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:57:55,047 INFO [train.py:1114] (2/4) Epoch 15, batch 9700, loss[loss=0.2577, simple_loss=0.3332, pruned_loss=0.09107, over 4286.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2709, pruned_loss=0.04623, over 924115.60 frames. ], batch size: 25, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:58:14,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=203789.33333333334, ans=0.0 +2024-07-28 20:58:16,347 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.21 vs. limit=15.0 +2024-07-28 20:58:18,918 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.91 vs. limit=15.0 +2024-07-28 20:58:26,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=203802.66666666666, ans=0.125 +2024-07-28 20:58:27,166 INFO [train.py:1114] (2/4) Epoch 15, batch 9750, loss[loss=0.1846, simple_loss=0.2794, pruned_loss=0.04488, over 4673.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2702, pruned_loss=0.04589, over 924471.36 frames. ], batch size: 15, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:58:27,625 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.84 vs. limit=22.5 +2024-07-28 20:58:31,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=203816.0, ans=0.2 +2024-07-28 20:58:41,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=203842.66666666666, ans=0.2 +2024-07-28 20:58:48,315 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.470e+01 5.731e+01 6.608e+01 7.819e+01 1.278e+02, threshold=1.322e+02, percent-clipped=1.0 +2024-07-28 20:58:51,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=203856.0, ans=0.125 +2024-07-28 20:58:56,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=203869.33333333334, ans=0.0 +2024-07-28 20:59:03,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=203882.66666666666, ans=0.1 +2024-07-28 20:59:03,969 INFO [train.py:1114] (2/4) Epoch 15, batch 9800, loss[loss=0.1802, simple_loss=0.2629, pruned_loss=0.04879, over 4707.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2695, pruned_loss=0.04562, over 924169.39 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:59:04,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=203882.66666666666, ans=0.125 +2024-07-28 20:59:11,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=203896.0, ans=0.1 +2024-07-28 20:59:12,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=203896.0, ans=0.1 +2024-07-28 20:59:16,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=203909.33333333334, ans=0.2 +2024-07-28 20:59:17,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=203909.33333333334, ans=0.2 +2024-07-28 20:59:18,009 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.41 vs. limit=12.0 +2024-07-28 20:59:21,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=203909.33333333334, ans=0.125 +2024-07-28 20:59:26,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=203922.66666666666, ans=0.125 +2024-07-28 20:59:27,143 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.38 vs. limit=15.0 +2024-07-28 20:59:29,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=203936.0, ans=0.125 +2024-07-28 20:59:35,249 INFO [train.py:1114] (2/4) Epoch 15, batch 9850, loss[loss=0.1823, simple_loss=0.2792, pruned_loss=0.04271, over 4888.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.27, pruned_loss=0.04574, over 927264.56 frames. ], batch size: 15, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 20:59:38,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=203949.33333333334, ans=0.025 +2024-07-28 20:59:38,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=203949.33333333334, ans=0.025 +2024-07-28 20:59:38,669 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=15.0 +2024-07-28 20:59:43,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=203962.66666666666, ans=0.125 +2024-07-28 20:59:49,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=203976.0, ans=0.125 +2024-07-28 20:59:52,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=203976.0, ans=0.1 +2024-07-28 20:59:52,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=203976.0, ans=0.125 +2024-07-28 20:59:56,144 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.481e+01 5.690e+01 6.538e+01 7.363e+01 1.082e+02, threshold=1.308e+02, percent-clipped=0.0 +2024-07-28 20:59:57,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=203989.33333333334, ans=0.0 +2024-07-28 21:00:06,870 INFO [train.py:1114] (2/4) Epoch 15, batch 9900, loss[loss=0.2189, simple_loss=0.3004, pruned_loss=0.06871, over 4846.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2701, pruned_loss=0.04599, over 926372.28 frames. ], batch size: 16, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:00:27,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=204056.0, ans=0.0 +2024-07-28 21:00:30,084 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.77 vs. limit=15.0 +2024-07-28 21:00:37,694 INFO [train.py:1114] (2/4) Epoch 15, batch 9950, loss[loss=0.1717, simple_loss=0.2572, pruned_loss=0.0431, over 4808.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2699, pruned_loss=0.04608, over 929501.95 frames. ], batch size: 11, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:00:46,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=204096.0, ans=0.025 +2024-07-28 21:00:49,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=204096.0, ans=0.0 +2024-07-28 21:00:55,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=204109.33333333334, ans=0.0 +2024-07-28 21:00:59,864 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.488e+01 6.089e+01 6.834e+01 7.968e+01 1.113e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-28 21:01:00,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=204122.66666666666, ans=0.125 +2024-07-28 21:01:07,552 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.57 vs. limit=10.0 +2024-07-28 21:01:09,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=204149.33333333334, ans=0.125 +2024-07-28 21:01:09,747 INFO [train.py:1114] (2/4) Epoch 15, batch 10000, loss[loss=0.1918, simple_loss=0.2874, pruned_loss=0.04803, over 4629.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2732, pruned_loss=0.04715, over 927139.82 frames. ], batch size: 16, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:01:12,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=204149.33333333334, ans=0.0 +2024-07-28 21:01:14,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=204149.33333333334, ans=0.125 +2024-07-28 21:01:20,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=204162.66666666666, ans=0.1 +2024-07-28 21:01:32,367 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=204189.33333333334, ans=0.0 +2024-07-28 21:01:39,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=204202.66666666666, ans=0.125 +2024-07-28 21:01:41,157 INFO [train.py:1114] (2/4) Epoch 15, batch 10050, loss[loss=0.233, simple_loss=0.321, pruned_loss=0.07249, over 3406.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2769, pruned_loss=0.04916, over 913676.38 frames. ], batch size: 36, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:01:45,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.11 vs. limit=22.5 +2024-07-28 21:01:48,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=204229.33333333334, ans=0.125 +2024-07-28 21:01:54,037 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.10 vs. limit=22.5 +2024-07-28 21:01:55,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=204242.66666666666, ans=0.0 +2024-07-28 21:02:04,619 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.770e+01 5.990e+01 6.680e+01 7.345e+01 9.959e+01, threshold=1.336e+02, percent-clipped=0.0 +2024-07-28 21:02:06,807 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:02:08,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=204269.33333333334, ans=0.125 +2024-07-28 21:02:13,326 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.29 vs. limit=15.0 +2024-07-28 21:02:15,482 INFO [train.py:1114] (2/4) Epoch 15, batch 10100, loss[loss=0.2058, simple_loss=0.2835, pruned_loss=0.06407, over 3256.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2815, pruned_loss=0.05412, over 859566.32 frames. ], batch size: 35, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:02:24,302 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.32 vs. limit=15.0 +2024-07-28 21:02:25,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=204296.0, ans=0.125 +2024-07-28 21:02:26,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=204296.0, ans=0.1 +2024-07-28 21:02:30,171 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:02:44,468 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.74 vs. limit=15.0 +2024-07-28 21:02:48,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=204349.33333333334, ans=0.125 +2024-07-28 21:02:48,790 INFO [train.py:1114] (2/4) Epoch 15, batch 10150, loss[loss=0.194, simple_loss=0.2825, pruned_loss=0.05274, over 3427.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2851, pruned_loss=0.05774, over 817092.77 frames. ], batch size: 35, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:02:52,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=204349.33333333334, ans=0.0 +2024-07-28 21:02:56,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=204362.66666666666, ans=0.015 +2024-07-28 21:03:02,932 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.59 vs. limit=10.0 +2024-07-28 21:03:12,070 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.568e+01 6.640e+01 7.110e+01 7.457e+01 9.149e+01, threshold=1.422e+02, percent-clipped=0.0 +2024-07-28 21:03:15,927 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-07-28 21:03:18,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=204402.66666666666, ans=0.0 +2024-07-28 21:03:22,493 INFO [train.py:1114] (2/4) Epoch 15, batch 10200, loss[loss=0.2259, simple_loss=0.314, pruned_loss=0.06892, over 3283.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2877, pruned_loss=0.06012, over 785905.49 frames. ], batch size: 35, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:03:22,873 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.47 vs. limit=10.0 +2024-07-28 21:04:37,201 INFO [train.py:1114] (2/4) Epoch 16, batch 0, loss[loss=0.1515, simple_loss=0.2477, pruned_loss=0.02759, over 4854.00 frames. ], tot_loss[loss=0.1515, simple_loss=0.2477, pruned_loss=0.02759, over 4854.00 frames. ], batch size: 12, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:04:37,201 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 21:04:45,300 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.1986, 4.0019, 4.1260, 3.9437, 4.5132, 4.4064, 4.5412, 4.0181], + device='cuda:2') +2024-07-28 21:04:48,652 INFO [train.py:1146] (2/4) Epoch 16, validation: loss=0.1648, simple_loss=0.2693, pruned_loss=0.03017, over 944034.00 frames. +2024-07-28 21:04:48,652 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 21:04:53,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=204445.33333333334, ans=0.125 +2024-07-28 21:05:02,047 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=204472.0, ans=0.125 +2024-07-28 21:05:17,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.97 vs. limit=10.0 +2024-07-28 21:05:22,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=204512.0, ans=0.125 +2024-07-28 21:05:23,359 INFO [train.py:1114] (2/4) Epoch 16, batch 50, loss[loss=0.1535, simple_loss=0.2404, pruned_loss=0.03332, over 4626.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2721, pruned_loss=0.04536, over 206477.52 frames. ], batch size: 11, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:05:29,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204512.0, ans=0.1 +2024-07-28 21:05:36,794 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.417e+01 5.659e+01 6.518e+01 7.271e+01 1.139e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 21:05:44,247 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.21 vs. limit=6.0 +2024-07-28 21:05:56,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=204552.0, ans=0.125 +2024-07-28 21:06:04,207 INFO [train.py:1114] (2/4) Epoch 16, batch 100, loss[loss=0.1994, simple_loss=0.2822, pruned_loss=0.05827, over 4640.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2732, pruned_loss=0.04631, over 365690.84 frames. ], batch size: 12, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:06:04,584 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-28 21:06:13,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=204592.0, ans=0.125 +2024-07-28 21:06:20,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=204605.33333333334, ans=0.0 +2024-07-28 21:06:38,078 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:06:39,310 INFO [train.py:1114] (2/4) Epoch 16, batch 150, loss[loss=0.1605, simple_loss=0.2514, pruned_loss=0.03483, over 4605.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2705, pruned_loss=0.04538, over 494262.02 frames. ], batch size: 11, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:06:46,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=204645.33333333334, ans=0.125 +2024-07-28 21:06:50,076 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.99 vs. limit=15.0 +2024-07-28 21:06:50,235 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 5.360e+01 5.968e+01 6.673e+01 1.001e+02, threshold=1.194e+02, percent-clipped=0.0 +2024-07-28 21:06:51,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=204658.66666666666, ans=0.125 +2024-07-28 21:06:54,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=204658.66666666666, ans=0.0 +2024-07-28 21:07:11,302 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.39 vs. limit=22.5 +2024-07-28 21:07:18,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=204712.0, ans=0.125 +2024-07-28 21:07:18,889 INFO [train.py:1114] (2/4) Epoch 16, batch 200, loss[loss=0.1921, simple_loss=0.2879, pruned_loss=0.04817, over 4546.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2688, pruned_loss=0.04452, over 594048.56 frames. ], batch size: 21, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:07:21,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=204712.0, ans=0.125 +2024-07-28 21:07:26,571 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-07-28 21:07:32,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=204738.66666666666, ans=0.125 +2024-07-28 21:07:41,474 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.42 vs. limit=15.0 +2024-07-28 21:07:41,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=204752.0, ans=0.1 +2024-07-28 21:07:52,299 INFO [train.py:1114] (2/4) Epoch 16, batch 250, loss[loss=0.16, simple_loss=0.258, pruned_loss=0.03103, over 4603.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2681, pruned_loss=0.0441, over 670498.38 frames. ], batch size: 16, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:07:56,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=204778.66666666666, ans=0.125 +2024-07-28 21:07:56,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=204778.66666666666, ans=0.125 +2024-07-28 21:08:03,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=204792.0, ans=0.025 +2024-07-28 21:08:05,814 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 5.773e+01 6.705e+01 7.902e+01 1.167e+02, threshold=1.341e+02, percent-clipped=0.0 +2024-07-28 21:08:09,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=204792.0, ans=0.0 +2024-07-28 21:08:10,631 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.65 vs. limit=6.0 +2024-07-28 21:08:17,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=204818.66666666666, ans=0.125 +2024-07-28 21:08:21,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=204818.66666666666, ans=0.05 +2024-07-28 21:08:26,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=204832.0, ans=0.125 +2024-07-28 21:08:36,620 INFO [train.py:1114] (2/4) Epoch 16, batch 300, loss[loss=0.1931, simple_loss=0.284, pruned_loss=0.0511, over 4816.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2692, pruned_loss=0.04475, over 730473.66 frames. ], batch size: 15, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:09:00,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=204885.33333333334, ans=0.025 +2024-07-28 21:09:05,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=204898.66666666666, ans=0.125 +2024-07-28 21:09:09,772 INFO [train.py:1114] (2/4) Epoch 16, batch 350, loss[loss=0.1617, simple_loss=0.2459, pruned_loss=0.03878, over 4936.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2693, pruned_loss=0.0448, over 776465.28 frames. ], batch size: 12, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:09:17,695 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.457e+01 5.458e+01 6.054e+01 6.509e+01 1.036e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-28 21:09:44,865 INFO [train.py:1114] (2/4) Epoch 16, batch 400, loss[loss=0.1491, simple_loss=0.2286, pruned_loss=0.0348, over 4705.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2686, pruned_loss=0.04392, over 813638.63 frames. ], batch size: 13, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:09:46,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204978.66666666666, ans=0.1 +2024-07-28 21:09:47,085 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.72 vs. limit=15.0 +2024-07-28 21:09:55,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=204992.0, ans=10.0 +2024-07-28 21:13:32,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.46 vs. limit=22.5 +2024-07-28 21:13:43,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=205018.66666666666, ans=0.125 +2024-07-28 21:13:46,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=205018.66666666666, ans=0.125 +2024-07-28 21:13:46,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=205018.66666666666, ans=0.125 +2024-07-28 21:13:59,382 INFO [train.py:1114] (2/4) Epoch 16, batch 450, loss[loss=0.2147, simple_loss=0.3118, pruned_loss=0.05884, over 4633.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2692, pruned_loss=0.04405, over 838726.07 frames. ], batch size: 13, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:14:10,784 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.94 vs. limit=15.0 +2024-07-28 21:14:13,711 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.574e+01 5.588e+01 6.021e+01 6.553e+01 1.018e+02, threshold=1.204e+02, percent-clipped=0.0 +2024-07-28 21:14:24,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=205072.0, ans=0.0 +2024-07-28 21:14:38,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=205098.66666666666, ans=0.0 +2024-07-28 21:14:39,771 INFO [train.py:1114] (2/4) Epoch 16, batch 500, loss[loss=0.2193, simple_loss=0.3104, pruned_loss=0.06406, over 4681.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2681, pruned_loss=0.04369, over 861279.91 frames. ], batch size: 15, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:14:47,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=205125.33333333334, ans=0.025 +2024-07-28 21:14:53,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205138.66666666666, ans=0.1 +2024-07-28 21:14:56,424 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.92 vs. limit=15.0 +2024-07-28 21:14:59,774 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.72 vs. limit=10.0 +2024-07-28 21:15:02,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=205152.0, ans=0.0 +2024-07-28 21:15:27,950 INFO [train.py:1114] (2/4) Epoch 16, batch 550, loss[loss=0.1962, simple_loss=0.2909, pruned_loss=0.05075, over 4602.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2679, pruned_loss=0.0436, over 877436.44 frames. ], batch size: 17, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:15:37,689 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.508e+01 5.496e+01 6.135e+01 6.977e+01 1.008e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 21:15:40,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=205192.0, ans=0.025 +2024-07-28 21:15:54,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=205218.66666666666, ans=0.2 +2024-07-28 21:15:56,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=205232.0, ans=0.125 +2024-07-28 21:16:05,156 INFO [train.py:1114] (2/4) Epoch 16, batch 600, loss[loss=0.183, simple_loss=0.2638, pruned_loss=0.05104, over 4623.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2685, pruned_loss=0.04413, over 891870.96 frames. ], batch size: 16, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:16:14,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=205258.66666666666, ans=0.125 +2024-07-28 21:16:21,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=205272.0, ans=0.0 +2024-07-28 21:16:26,048 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.42 vs. limit=15.0 +2024-07-28 21:16:29,675 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:16:34,648 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.11 vs. limit=15.0 +2024-07-28 21:16:37,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=205312.0, ans=0.2 +2024-07-28 21:16:38,126 INFO [train.py:1114] (2/4) Epoch 16, batch 650, loss[loss=0.1742, simple_loss=0.2699, pruned_loss=0.03926, over 4758.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2684, pruned_loss=0.04343, over 903593.89 frames. ], batch size: 13, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:16:38,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205312.0, ans=0.1 +2024-07-28 21:16:46,460 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.676e+01 5.356e+01 6.014e+01 6.947e+01 8.768e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 21:16:55,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=205338.66666666666, ans=0.125 +2024-07-28 21:17:09,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=205365.33333333334, ans=0.0 +2024-07-28 21:17:10,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=205365.33333333334, ans=0.125 +2024-07-28 21:17:12,615 INFO [train.py:1114] (2/4) Epoch 16, batch 700, loss[loss=0.1632, simple_loss=0.2475, pruned_loss=0.03944, over 4644.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2681, pruned_loss=0.04353, over 911823.92 frames. ], batch size: 12, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:17:20,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=205392.0, ans=0.125 +2024-07-28 21:17:23,340 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:17:30,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=205405.33333333334, ans=15.0 +2024-07-28 21:17:37,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205418.66666666666, ans=0.1 +2024-07-28 21:17:39,119 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.11 vs. limit=15.0 +2024-07-28 21:17:42,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=205432.0, ans=0.125 +2024-07-28 21:17:42,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=205432.0, ans=0.025 +2024-07-28 21:17:45,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=205445.33333333334, ans=0.025 +2024-07-28 21:17:45,814 INFO [train.py:1114] (2/4) Epoch 16, batch 750, loss[loss=0.1818, simple_loss=0.2804, pruned_loss=0.04162, over 4683.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2682, pruned_loss=0.04359, over 917845.44 frames. ], batch size: 13, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:17:53,635 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.216e+01 5.543e+01 6.025e+01 6.972e+01 9.778e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 21:18:00,850 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.44 vs. limit=6.0 +2024-07-28 21:18:02,224 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.13 vs. limit=15.0 +2024-07-28 21:18:02,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=205472.0, ans=0.0 +2024-07-28 21:18:08,414 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.77 vs. limit=15.0 +2024-07-28 21:18:16,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=205498.66666666666, ans=0.0 +2024-07-28 21:18:17,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=205498.66666666666, ans=0.125 +2024-07-28 21:18:22,234 INFO [train.py:1114] (2/4) Epoch 16, batch 800, loss[loss=0.1635, simple_loss=0.2552, pruned_loss=0.03595, over 4849.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2689, pruned_loss=0.04405, over 922954.81 frames. ], batch size: 12, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:18:30,317 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.16 vs. limit=6.0 +2024-07-28 21:18:31,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=205525.33333333334, ans=0.125 +2024-07-28 21:18:32,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=205525.33333333334, ans=0.125 +2024-07-28 21:18:34,831 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.37 vs. limit=15.0 +2024-07-28 21:18:36,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=205525.33333333334, ans=0.125 +2024-07-28 21:19:00,063 INFO [train.py:1114] (2/4) Epoch 16, batch 850, loss[loss=0.1838, simple_loss=0.2855, pruned_loss=0.04103, over 4678.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2678, pruned_loss=0.04357, over 927356.89 frames. ], batch size: 14, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:19:11,256 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.554e+01 6.346e+01 7.200e+01 1.191e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 21:19:26,311 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.59 vs. limit=10.0 +2024-07-28 21:19:35,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=205618.66666666666, ans=0.0 +2024-07-28 21:19:51,903 INFO [train.py:1114] (2/4) Epoch 16, batch 900, loss[loss=0.1486, simple_loss=0.2401, pruned_loss=0.02857, over 4859.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2681, pruned_loss=0.04368, over 928371.76 frames. ], batch size: 12, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:20:19,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=205685.33333333334, ans=0.125 +2024-07-28 21:20:20,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=205685.33333333334, ans=0.0 +2024-07-28 21:20:20,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=205685.33333333334, ans=0.025 +2024-07-28 21:20:28,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=205698.66666666666, ans=0.0 +2024-07-28 21:20:34,591 INFO [train.py:1114] (2/4) Epoch 16, batch 950, loss[loss=0.1633, simple_loss=0.2507, pruned_loss=0.038, over 4772.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2679, pruned_loss=0.04382, over 929711.15 frames. ], batch size: 12, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:20:37,113 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=11.32 vs. limit=10.0 +2024-07-28 21:20:38,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=205712.0, ans=0.125 +2024-07-28 21:20:42,595 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.753e+01 5.442e+01 5.900e+01 6.572e+01 1.088e+02, threshold=1.180e+02, percent-clipped=0.0 +2024-07-28 21:20:48,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=205738.66666666666, ans=0.125 +2024-07-28 21:21:07,912 INFO [train.py:1114] (2/4) Epoch 16, batch 1000, loss[loss=0.1958, simple_loss=0.2844, pruned_loss=0.05355, over 4956.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.268, pruned_loss=0.0433, over 929356.26 frames. ], batch size: 13, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:21:11,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=205778.66666666666, ans=0.025 +2024-07-28 21:21:12,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=205778.66666666666, ans=10.0 +2024-07-28 21:21:16,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=205792.0, ans=0.125 +2024-07-28 21:21:21,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=205805.33333333334, ans=0.125 +2024-07-28 21:21:24,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=205805.33333333334, ans=0.125 +2024-07-28 21:21:29,138 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.46 vs. limit=15.0 +2024-07-28 21:21:52,026 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.30 vs. limit=15.0 +2024-07-28 21:21:56,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=205832.0, ans=0.0 +2024-07-28 21:21:57,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=205832.0, ans=0.125 +2024-07-28 21:22:07,915 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:22:08,533 INFO [train.py:1114] (2/4) Epoch 16, batch 1050, loss[loss=0.1771, simple_loss=0.2629, pruned_loss=0.04568, over 4882.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2686, pruned_loss=0.04361, over 932246.39 frames. ], batch size: 14, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:22:15,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205845.33333333334, ans=0.1 +2024-07-28 21:22:17,973 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.54 vs. limit=12.0 +2024-07-28 21:22:18,136 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.81 vs. limit=12.0 +2024-07-28 21:22:20,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=205858.66666666666, ans=0.1 +2024-07-28 21:22:20,843 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.413e+01 5.557e+01 6.013e+01 7.001e+01 9.107e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 21:22:32,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=205872.0, ans=0.125 +2024-07-28 21:22:35,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205885.33333333334, ans=0.1 +2024-07-28 21:24:11,063 INFO [train.py:1114] (2/4) Epoch 16, batch 1100, loss[loss=0.1951, simple_loss=0.2807, pruned_loss=0.05481, over 4897.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2686, pruned_loss=0.04386, over 934844.31 frames. ], batch size: 13, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:24:14,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=205912.0, ans=0.1 +2024-07-28 21:24:22,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=205925.33333333334, ans=0.0 +2024-07-28 21:24:24,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=205938.66666666666, ans=0.125 +2024-07-28 21:24:50,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=205952.0, ans=0.025 +2024-07-28 21:25:48,845 INFO [train.py:1114] (2/4) Epoch 16, batch 1150, loss[loss=0.2047, simple_loss=0.2843, pruned_loss=0.06252, over 4898.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2691, pruned_loss=0.04428, over 934262.17 frames. ], batch size: 13, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:29:48,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=205978.66666666666, ans=0.125 +2024-07-28 21:29:49,957 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=205978.66666666666, ans=0.125 +2024-07-28 21:30:29,622 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+01 5.518e+01 6.042e+01 7.033e+01 1.072e+02, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 21:30:33,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=205992.0, ans=0.125 +2024-07-28 21:34:10,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=206018.66666666666, ans=0.025 +2024-07-28 21:34:40,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206018.66666666666, ans=0.1 +2024-07-28 21:34:41,049 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.58 vs. limit=22.5 +2024-07-28 21:35:01,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=206032.0, ans=0.125 +2024-07-28 21:35:01,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=206032.0, ans=0.125 +2024-07-28 21:35:02,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=206032.0, ans=0.125 +2024-07-28 21:35:15,736 INFO [train.py:1114] (2/4) Epoch 16, batch 1200, loss[loss=0.1644, simple_loss=0.2576, pruned_loss=0.03558, over 4872.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2695, pruned_loss=0.04467, over 932973.61 frames. ], batch size: 14, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:35:17,360 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.83 vs. limit=6.0 +2024-07-28 21:36:07,354 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.49 vs. limit=22.5 +2024-07-28 21:36:07,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=206072.0, ans=0.025 +2024-07-28 21:36:09,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206085.33333333334, ans=0.1 +2024-07-28 21:36:25,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=206085.33333333334, ans=0.125 +2024-07-28 21:38:19,733 INFO [train.py:1114] (2/4) Epoch 16, batch 1250, loss[loss=0.1725, simple_loss=0.2641, pruned_loss=0.04046, over 4803.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2703, pruned_loss=0.04492, over 936997.94 frames. ], batch size: 15, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:38:46,733 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.375e+01 5.568e+01 5.937e+01 6.680e+01 9.097e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-28 21:39:11,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=206138.66666666666, ans=0.1 +2024-07-28 21:39:13,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=206138.66666666666, ans=0.025 +2024-07-28 21:39:13,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=206138.66666666666, ans=0.125 +2024-07-28 21:39:15,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=206152.0, ans=0.125 +2024-07-28 21:39:17,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=206152.0, ans=0.5 +2024-07-28 21:39:33,234 INFO [train.py:1114] (2/4) Epoch 16, batch 1300, loss[loss=0.2073, simple_loss=0.2996, pruned_loss=0.05755, over 4708.00 frames. ], tot_loss[loss=0.18, simple_loss=0.27, pruned_loss=0.04504, over 938666.15 frames. ], batch size: 19, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:39:35,651 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.24 vs. limit=22.5 +2024-07-28 21:39:38,160 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.84 vs. limit=15.0 +2024-07-28 21:39:56,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=206205.33333333334, ans=0.125 +2024-07-28 21:39:57,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=206205.33333333334, ans=0.0 +2024-07-28 21:39:59,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=206205.33333333334, ans=0.125 +2024-07-28 21:40:17,466 INFO [train.py:1114] (2/4) Epoch 16, batch 1350, loss[loss=0.1579, simple_loss=0.2542, pruned_loss=0.03075, over 4757.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2694, pruned_loss=0.04466, over 940524.59 frames. ], batch size: 13, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:40:26,296 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.68 vs. limit=15.0 +2024-07-28 21:40:26,352 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.922e+01 5.660e+01 6.386e+01 7.583e+01 1.369e+02, threshold=1.277e+02, percent-clipped=2.0 +2024-07-28 21:40:32,594 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.43 vs. limit=15.0 +2024-07-28 21:40:32,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=206258.66666666666, ans=0.125 +2024-07-28 21:40:40,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=206258.66666666666, ans=0.125 +2024-07-28 21:40:51,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=206285.33333333334, ans=0.0 +2024-07-28 21:40:52,033 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.73 vs. limit=22.5 +2024-07-28 21:41:18,920 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.41 vs. limit=22.5 +2024-07-28 21:41:32,872 INFO [train.py:1114] (2/4) Epoch 16, batch 1400, loss[loss=0.1751, simple_loss=0.2547, pruned_loss=0.0477, over 4711.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.269, pruned_loss=0.0446, over 942636.37 frames. ], batch size: 11, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:41:32,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=206312.0, ans=0.0 +2024-07-28 21:41:42,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=206325.33333333334, ans=0.0 +2024-07-28 21:41:43,423 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.21 vs. limit=15.0 +2024-07-28 21:42:00,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=206325.33333333334, ans=0.125 +2024-07-28 21:42:32,319 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.50 vs. limit=10.0 +2024-07-28 21:42:32,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=206352.0, ans=22.5 +2024-07-28 21:42:36,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=206352.0, ans=0.0 +2024-07-28 21:45:05,378 INFO [train.py:1114] (2/4) Epoch 16, batch 1450, loss[loss=0.2039, simple_loss=0.2849, pruned_loss=0.06144, over 4672.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2693, pruned_loss=0.04436, over 942856.01 frames. ], batch size: 15, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:45:23,236 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.652e+01 5.557e+01 6.212e+01 6.784e+01 1.021e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 21:45:36,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=206392.0, ans=0.0 +2024-07-28 21:46:29,553 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.52 vs. limit=15.0 +2024-07-28 21:46:43,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206432.0, ans=0.1 +2024-07-28 21:46:44,915 INFO [train.py:1114] (2/4) Epoch 16, batch 1500, loss[loss=0.169, simple_loss=0.2633, pruned_loss=0.03738, over 4820.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2693, pruned_loss=0.04433, over 942359.76 frames. ], batch size: 14, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:47:10,881 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.98 vs. limit=15.0 +2024-07-28 21:47:31,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=206485.33333333334, ans=22.5 +2024-07-28 21:47:41,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-07-28 21:47:42,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=206498.66666666666, ans=0.125 +2024-07-28 21:47:44,865 INFO [train.py:1114] (2/4) Epoch 16, batch 1550, loss[loss=0.2401, simple_loss=0.317, pruned_loss=0.08154, over 4914.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2693, pruned_loss=0.04492, over 938573.73 frames. ], batch size: 15, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:48:20,505 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.523e+01 5.574e+01 6.317e+01 7.056e+01 9.850e+01, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 21:48:29,183 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.34 vs. limit=15.0 +2024-07-28 21:49:10,281 INFO [train.py:1114] (2/4) Epoch 16, batch 1600, loss[loss=0.1606, simple_loss=0.2615, pruned_loss=0.02987, over 4876.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2696, pruned_loss=0.04502, over 937374.10 frames. ], batch size: 14, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:49:12,445 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-07-28 21:49:13,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=206578.66666666666, ans=0.1 +2024-07-28 21:50:24,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=206645.33333333334, ans=0.0 +2024-07-28 21:50:27,683 INFO [train.py:1114] (2/4) Epoch 16, batch 1650, loss[loss=0.2042, simple_loss=0.292, pruned_loss=0.05824, over 4664.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2691, pruned_loss=0.0451, over 937422.56 frames. ], batch size: 14, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:50:32,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=206645.33333333334, ans=0.025 +2024-07-28 21:50:36,682 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.778e+01 5.640e+01 6.319e+01 7.228e+01 1.155e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 21:50:39,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206658.66666666666, ans=0.1 +2024-07-28 21:50:42,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=206672.0, ans=0.1 +2024-07-28 21:50:44,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=206672.0, ans=0.1 +2024-07-28 21:50:55,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=206685.33333333334, ans=0.125 +2024-07-28 21:51:09,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=206698.66666666666, ans=0.125 +2024-07-28 21:51:10,362 INFO [train.py:1114] (2/4) Epoch 16, batch 1700, loss[loss=0.1546, simple_loss=0.2334, pruned_loss=0.03786, over 4704.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2694, pruned_loss=0.04494, over 938961.61 frames. ], batch size: 11, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:51:30,276 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.49 vs. limit=15.0 +2024-07-28 21:52:36,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=206752.0, ans=0.2 +2024-07-28 21:52:38,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=206752.0, ans=0.07 +2024-07-28 21:52:56,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=206765.33333333334, ans=0.0 +2024-07-28 21:52:59,278 INFO [train.py:1114] (2/4) Epoch 16, batch 1750, loss[loss=0.1447, simple_loss=0.2272, pruned_loss=0.03108, over 4792.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2696, pruned_loss=0.04473, over 940109.64 frames. ], batch size: 11, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:53:00,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=206778.66666666666, ans=0.125 +2024-07-28 21:53:05,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=206778.66666666666, ans=0.125 +2024-07-28 21:53:07,441 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.03 vs. limit=10.0 +2024-07-28 21:53:09,573 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.365e+01 5.795e+01 6.698e+01 8.081e+01 1.290e+02, threshold=1.340e+02, percent-clipped=1.0 +2024-07-28 21:53:10,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=206792.0, ans=0.0 +2024-07-28 21:53:11,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=206792.0, ans=0.0 +2024-07-28 21:53:13,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=206805.33333333334, ans=0.0 +2024-07-28 21:53:18,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=206805.33333333334, ans=0.0 +2024-07-28 21:53:44,570 INFO [train.py:1114] (2/4) Epoch 16, batch 1800, loss[loss=0.1693, simple_loss=0.2601, pruned_loss=0.03931, over 4634.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2691, pruned_loss=0.04455, over 940666.13 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:53:45,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=206845.33333333334, ans=0.1 +2024-07-28 21:53:49,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=206845.33333333334, ans=0.125 +2024-07-28 21:54:01,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=206872.0, ans=0.125 +2024-07-28 21:54:24,980 INFO [train.py:1114] (2/4) Epoch 16, batch 1850, loss[loss=0.1905, simple_loss=0.2828, pruned_loss=0.04916, over 4813.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2693, pruned_loss=0.04422, over 940656.77 frames. ], batch size: 14, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:54:29,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=206912.0, ans=0.125 +2024-07-28 21:54:33,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=206912.0, ans=0.2 +2024-07-28 21:54:33,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=206912.0, ans=0.125 +2024-07-28 21:54:37,013 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.341e+01 5.631e+01 6.106e+01 7.258e+01 1.128e+02, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 21:54:43,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=206925.33333333334, ans=0.2 +2024-07-28 21:55:08,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=206965.33333333334, ans=0.07 +2024-07-28 21:55:15,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=206965.33333333334, ans=0.025 +2024-07-28 21:55:16,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=206965.33333333334, ans=0.0 +2024-07-28 21:55:17,730 INFO [train.py:1114] (2/4) Epoch 16, batch 1900, loss[loss=0.171, simple_loss=0.2747, pruned_loss=0.0337, over 4662.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2698, pruned_loss=0.04405, over 942063.31 frames. ], batch size: 14, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:55:25,489 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:55:31,471 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:55:38,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=207005.33333333334, ans=0.125 +2024-07-28 21:56:10,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=207045.33333333334, ans=0.125 +2024-07-28 21:56:10,894 INFO [train.py:1114] (2/4) Epoch 16, batch 1950, loss[loss=0.1841, simple_loss=0.276, pruned_loss=0.04614, over 4895.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2701, pruned_loss=0.04395, over 943936.20 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:56:27,111 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.47 vs. limit=6.0 +2024-07-28 21:56:30,869 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.457e+01 5.561e+01 6.255e+01 6.715e+01 9.914e+01, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 21:56:36,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=207072.0, ans=0.05 +2024-07-28 21:56:45,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=207072.0, ans=0.0 +2024-07-28 21:56:50,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=207085.33333333334, ans=0.125 +2024-07-28 21:56:53,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=207085.33333333334, ans=0.125 +2024-07-28 21:56:53,337 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.01 vs. limit=15.0 +2024-07-28 21:56:55,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207085.33333333334, ans=0.1 +2024-07-28 21:57:02,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=207098.66666666666, ans=0.1 +2024-07-28 21:57:03,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=207098.66666666666, ans=0.0 +2024-07-28 21:57:05,253 INFO [train.py:1114] (2/4) Epoch 16, batch 2000, loss[loss=0.1745, simple_loss=0.2608, pruned_loss=0.04408, over 4806.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2703, pruned_loss=0.04416, over 940331.24 frames. ], batch size: 11, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:57:32,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=207138.66666666666, ans=0.025 +2024-07-28 21:57:39,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=207138.66666666666, ans=0.2 +2024-07-28 21:57:45,013 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.30 vs. limit=6.0 +2024-07-28 21:58:01,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=207165.33333333334, ans=0.125 +2024-07-28 21:58:20,238 INFO [train.py:1114] (2/4) Epoch 16, batch 2050, loss[loss=0.1743, simple_loss=0.2514, pruned_loss=0.04857, over 4600.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2698, pruned_loss=0.04405, over 938716.16 frames. ], batch size: 11, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:58:24,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=207178.66666666666, ans=0.125 +2024-07-28 21:58:26,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=207178.66666666666, ans=0.0 +2024-07-28 21:58:27,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=207192.0, ans=0.125 +2024-07-28 21:58:36,966 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.500e+01 5.615e+01 6.198e+01 7.046e+01 1.043e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 21:59:20,887 INFO [train.py:1114] (2/4) Epoch 16, batch 2100, loss[loss=0.1473, simple_loss=0.2569, pruned_loss=0.01888, over 4769.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.269, pruned_loss=0.04341, over 940534.85 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:59:34,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=207245.33333333334, ans=0.025 +2024-07-28 21:59:47,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=207258.66666666666, ans=0.125 +2024-07-28 21:59:50,037 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.93 vs. limit=15.0 +2024-07-28 21:59:59,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=207272.0, ans=0.125 +2024-07-28 22:01:09,954 INFO [train.py:1114] (2/4) Epoch 16, batch 2150, loss[loss=0.1573, simple_loss=0.2507, pruned_loss=0.03194, over 4888.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2695, pruned_loss=0.04378, over 943741.51 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:01:14,283 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.11 vs. limit=22.5 +2024-07-28 22:01:58,058 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.517e+01 5.463e+01 6.183e+01 7.182e+01 9.894e+01, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 22:02:04,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=207325.33333333334, ans=0.125 +2024-07-28 22:02:31,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=207352.0, ans=0.125 +2024-07-28 22:02:33,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=207352.0, ans=0.125 +2024-07-28 22:02:33,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=207352.0, ans=0.09899494936611666 +2024-07-28 22:02:53,140 INFO [train.py:1114] (2/4) Epoch 16, batch 2200, loss[loss=0.1791, simple_loss=0.2676, pruned_loss=0.04526, over 4799.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2692, pruned_loss=0.04391, over 943150.10 frames. ], batch size: 14, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:02:54,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=207378.66666666666, ans=0.125 +2024-07-28 22:02:59,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=207378.66666666666, ans=0.125 +2024-07-28 22:03:22,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=207405.33333333334, ans=0.025 +2024-07-28 22:03:23,322 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.95 vs. limit=6.0 +2024-07-28 22:03:25,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=207405.33333333334, ans=0.125 +2024-07-28 22:03:45,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=207432.0, ans=0.125 +2024-07-28 22:03:47,378 INFO [train.py:1114] (2/4) Epoch 16, batch 2250, loss[loss=0.177, simple_loss=0.272, pruned_loss=0.04104, over 4694.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2688, pruned_loss=0.04415, over 941882.37 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:03:50,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=207445.33333333334, ans=0.0 +2024-07-28 22:03:58,324 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.399e+01 5.527e+01 6.028e+01 7.010e+01 1.004e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 22:04:02,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=207458.66666666666, ans=0.125 +2024-07-28 22:04:06,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=207472.0, ans=0.1 +2024-07-28 22:04:15,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=207472.0, ans=0.125 +2024-07-28 22:04:17,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=207485.33333333334, ans=0.125 +2024-07-28 22:04:32,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=207498.66666666666, ans=0.09899494936611666 +2024-07-28 22:04:35,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=207498.66666666666, ans=0.125 +2024-07-28 22:04:53,006 INFO [train.py:1114] (2/4) Epoch 16, batch 2300, loss[loss=0.1642, simple_loss=0.244, pruned_loss=0.04217, over 4934.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2679, pruned_loss=0.04423, over 939621.22 frames. ], batch size: 12, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:05:14,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=207525.33333333334, ans=0.0 +2024-07-28 22:05:27,306 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=17.14 vs. limit=22.5 +2024-07-28 22:05:37,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=207538.66666666666, ans=0.125 +2024-07-28 22:05:43,228 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=15.0 +2024-07-28 22:05:49,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=207565.33333333334, ans=0.025 +2024-07-28 22:05:52,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=207565.33333333334, ans=0.035 +2024-07-28 22:05:57,192 INFO [train.py:1114] (2/4) Epoch 16, batch 2350, loss[loss=0.1804, simple_loss=0.278, pruned_loss=0.04142, over 4635.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2667, pruned_loss=0.04351, over 941739.77 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:06:09,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=207578.66666666666, ans=0.04949747468305833 +2024-07-28 22:06:13,022 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.426e+01 5.459e+01 6.024e+01 6.952e+01 8.823e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 22:06:17,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=207605.33333333334, ans=0.125 +2024-07-28 22:06:19,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=207605.33333333334, ans=0.125 +2024-07-28 22:06:25,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=207618.66666666666, ans=0.125 +2024-07-28 22:06:31,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=207618.66666666666, ans=0.125 +2024-07-28 22:06:35,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207632.0, ans=0.1 +2024-07-28 22:06:45,795 INFO [train.py:1114] (2/4) Epoch 16, batch 2400, loss[loss=0.1501, simple_loss=0.2482, pruned_loss=0.02599, over 4646.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2673, pruned_loss=0.04378, over 941698.90 frames. ], batch size: 12, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:06:53,214 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.52 vs. limit=15.0 +2024-07-28 22:06:53,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=207658.66666666666, ans=0.125 +2024-07-28 22:07:00,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=207672.0, ans=0.0 +2024-07-28 22:07:06,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=207685.33333333334, ans=0.0 +2024-07-28 22:07:17,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=207698.66666666666, ans=0.125 +2024-07-28 22:07:33,021 INFO [train.py:1114] (2/4) Epoch 16, batch 2450, loss[loss=0.1522, simple_loss=0.2418, pruned_loss=0.03127, over 4703.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2679, pruned_loss=0.04382, over 937177.24 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:07:41,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=207725.33333333334, ans=0.125 +2024-07-28 22:07:44,026 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.18 vs. limit=15.0 +2024-07-28 22:07:45,692 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.532e+01 5.574e+01 6.192e+01 6.939e+01 1.187e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 22:07:46,205 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.78 vs. limit=15.0 +2024-07-28 22:08:24,491 INFO [train.py:1114] (2/4) Epoch 16, batch 2500, loss[loss=0.1933, simple_loss=0.3012, pruned_loss=0.0427, over 4802.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2674, pruned_loss=0.04352, over 939106.73 frames. ], batch size: 14, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:08:26,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=207778.66666666666, ans=0.0 +2024-07-28 22:08:28,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=207778.66666666666, ans=0.125 +2024-07-28 22:08:37,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=207778.66666666666, ans=0.125 +2024-07-28 22:08:58,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=207805.33333333334, ans=10.0 +2024-07-28 22:09:04,763 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.73 vs. limit=15.0 +2024-07-28 22:09:05,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=207805.33333333334, ans=0.125 +2024-07-28 22:09:06,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=207805.33333333334, ans=0.125 +2024-07-28 22:09:08,029 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.82 vs. limit=15.0 +2024-07-28 22:09:23,881 INFO [train.py:1114] (2/4) Epoch 16, batch 2550, loss[loss=0.1431, simple_loss=0.2387, pruned_loss=0.02376, over 4806.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2669, pruned_loss=0.04292, over 938855.41 frames. ], batch size: 11, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:09:38,986 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.300e+01 5.535e+01 6.272e+01 7.311e+01 1.144e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 22:11:48,956 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.47 vs. limit=22.5 +2024-07-28 22:12:41,376 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.11 vs. limit=6.0 +2024-07-28 22:13:06,684 INFO [train.py:1114] (2/4) Epoch 16, batch 2600, loss[loss=0.2001, simple_loss=0.2887, pruned_loss=0.0557, over 4894.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2685, pruned_loss=0.04382, over 937962.19 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:13:52,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=207925.33333333334, ans=0.0 +2024-07-28 22:15:02,482 INFO [train.py:1114] (2/4) Epoch 16, batch 2650, loss[loss=0.1796, simple_loss=0.2724, pruned_loss=0.04344, over 4620.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2687, pruned_loss=0.04407, over 939806.81 frames. ], batch size: 16, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:15:41,444 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.682e+01 6.199e+01 7.227e+01 9.483e+01, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 22:15:41,870 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.79 vs. limit=6.0 +2024-07-28 22:22:43,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=208005.33333333334, ans=0.125 +2024-07-28 22:24:23,902 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.67 vs. limit=22.5 +2024-07-28 22:27:28,518 INFO [train.py:1114] (2/4) Epoch 16, batch 2700, loss[loss=0.1583, simple_loss=0.2576, pruned_loss=0.02954, over 4734.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2687, pruned_loss=0.0438, over 939678.23 frames. ], batch size: 14, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:27:36,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=208058.66666666666, ans=0.1 +2024-07-28 22:28:15,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=208085.33333333334, ans=0.0 +2024-07-28 22:28:21,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=208085.33333333334, ans=0.125 +2024-07-28 22:30:45,983 INFO [train.py:1114] (2/4) Epoch 16, batch 2750, loss[loss=0.1746, simple_loss=0.2657, pruned_loss=0.04179, over 4719.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2675, pruned_loss=0.04378, over 939030.73 frames. ], batch size: 12, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:30:52,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=208112.0, ans=0.125 +2024-07-28 22:30:57,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=208112.0, ans=0.125 +2024-07-28 22:31:02,419 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.384e+01 5.637e+01 6.771e+01 7.935e+01 1.190e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-28 22:31:19,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=208138.66666666666, ans=0.125 +2024-07-28 22:31:58,119 INFO [train.py:1114] (2/4) Epoch 16, batch 2800, loss[loss=0.1843, simple_loss=0.2626, pruned_loss=0.05297, over 3384.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.268, pruned_loss=0.04375, over 936683.93 frames. ], batch size: 35, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:31:59,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=208178.66666666666, ans=0.125 +2024-07-28 22:32:01,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=208178.66666666666, ans=0.125 +2024-07-28 22:32:04,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=208178.66666666666, ans=0.125 +2024-07-28 22:32:23,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=208205.33333333334, ans=0.0 +2024-07-28 22:32:24,306 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.39 vs. limit=15.0 +2024-07-28 22:32:26,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=208218.66666666666, ans=0.125 +2024-07-28 22:32:28,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=208218.66666666666, ans=0.125 +2024-07-28 22:32:29,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=208218.66666666666, ans=0.125 +2024-07-28 22:32:32,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=208232.0, ans=0.2 +2024-07-28 22:32:38,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=208245.33333333334, ans=0.04949747468305833 +2024-07-28 22:32:38,611 INFO [train.py:1114] (2/4) Epoch 16, batch 2850, loss[loss=0.1535, simple_loss=0.2482, pruned_loss=0.02946, over 4970.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2684, pruned_loss=0.04392, over 935073.00 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:32:39,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=208245.33333333334, ans=0.025 +2024-07-28 22:32:47,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=208258.66666666666, ans=0.0 +2024-07-28 22:32:47,409 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.515e+01 5.805e+01 6.352e+01 7.417e+01 1.040e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 22:32:53,551 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.67 vs. limit=15.0 +2024-07-28 22:33:35,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=208312.0, ans=0.2 +2024-07-28 22:33:36,454 INFO [train.py:1114] (2/4) Epoch 16, batch 2900, loss[loss=0.1787, simple_loss=0.269, pruned_loss=0.04414, over 4833.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2685, pruned_loss=0.04386, over 939065.95 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:33:50,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=208312.0, ans=15.0 +2024-07-28 22:34:08,372 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.13 vs. limit=6.0 +2024-07-28 22:34:09,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=208338.66666666666, ans=0.125 +2024-07-28 22:34:28,264 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.04 vs. limit=12.0 +2024-07-28 22:34:36,451 INFO [train.py:1114] (2/4) Epoch 16, batch 2950, loss[loss=0.1826, simple_loss=0.2834, pruned_loss=0.04086, over 4699.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2674, pruned_loss=0.04328, over 938490.52 frames. ], batch size: 12, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:34:40,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=208378.66666666666, ans=0.025 +2024-07-28 22:34:46,788 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.287e+01 5.436e+01 5.951e+01 6.814e+01 8.870e+01, threshold=1.190e+02, percent-clipped=0.0 +2024-07-28 22:35:00,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=208418.66666666666, ans=0.04949747468305833 +2024-07-28 22:35:15,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=208432.0, ans=0.125 +2024-07-28 22:35:19,629 INFO [train.py:1114] (2/4) Epoch 16, batch 3000, loss[loss=0.1655, simple_loss=0.2583, pruned_loss=0.03633, over 4759.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2677, pruned_loss=0.04329, over 938062.29 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:35:19,629 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 22:36:16,842 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.3384, 3.3828, 5.6919, 3.4276], device='cuda:2') +2024-07-28 22:37:08,209 INFO [train.py:1146] (2/4) Epoch 16, validation: loss=0.1628, simple_loss=0.2657, pruned_loss=0.02996, over 944034.00 frames. +2024-07-28 22:37:08,209 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 22:38:11,549 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.48 vs. limit=22.5 +2024-07-28 22:38:15,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=208485.33333333334, ans=0.125 +2024-07-28 22:38:17,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=208485.33333333334, ans=0.125 +2024-07-28 22:38:41,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=208512.0, ans=0.125 +2024-07-28 22:38:42,206 INFO [train.py:1114] (2/4) Epoch 16, batch 3050, loss[loss=0.1896, simple_loss=0.2855, pruned_loss=0.04682, over 4641.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2689, pruned_loss=0.04386, over 937079.37 frames. ], batch size: 12, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:38:42,634 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.60 vs. limit=15.0 +2024-07-28 22:38:44,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=208512.0, ans=0.0 +2024-07-28 22:38:51,725 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.977e+01 5.726e+01 6.358e+01 7.092e+01 1.092e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 22:39:00,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=208538.66666666666, ans=0.125 +2024-07-28 22:39:04,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=208538.66666666666, ans=0.1 +2024-07-28 22:39:15,025 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:39:26,847 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.49 vs. limit=6.0 +2024-07-28 22:39:40,200 INFO [train.py:1114] (2/4) Epoch 16, batch 3100, loss[loss=0.2254, simple_loss=0.3298, pruned_loss=0.06052, over 4653.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2692, pruned_loss=0.04391, over 937480.25 frames. ], batch size: 16, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:40:04,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=208592.0, ans=0.0 +2024-07-28 22:40:52,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=208605.33333333334, ans=0.125 +2024-07-28 22:40:52,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=208605.33333333334, ans=0.2 +2024-07-28 22:40:55,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.15 vs. limit=15.0 +2024-07-28 22:41:09,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=208632.0, ans=0.5 +2024-07-28 22:41:13,448 INFO [train.py:1114] (2/4) Epoch 16, batch 3150, loss[loss=0.1742, simple_loss=0.2649, pruned_loss=0.04177, over 4603.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2691, pruned_loss=0.04405, over 937595.45 frames. ], batch size: 17, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:41:29,378 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.743e+01 5.555e+01 6.673e+01 7.571e+01 1.321e+02, threshold=1.335e+02, percent-clipped=1.0 +2024-07-28 22:41:41,106 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=208672.0, ans=0.2 +2024-07-28 22:42:12,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=208698.66666666666, ans=0.1 +2024-07-28 22:42:14,164 INFO [train.py:1114] (2/4) Epoch 16, batch 3200, loss[loss=0.173, simple_loss=0.272, pruned_loss=0.037, over 4824.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2685, pruned_loss=0.04379, over 939321.46 frames. ], batch size: 13, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:42:18,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=208712.0, ans=0.2 +2024-07-28 22:42:18,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=208712.0, ans=0.1 +2024-07-28 22:42:42,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=208752.0, ans=0.1 +2024-07-28 22:42:45,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=208752.0, ans=0.2 +2024-07-28 22:43:03,061 INFO [train.py:1114] (2/4) Epoch 16, batch 3250, loss[loss=0.1581, simple_loss=0.2571, pruned_loss=0.02954, over 4922.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2685, pruned_loss=0.04377, over 940157.63 frames. ], batch size: 14, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:43:13,632 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.572e+01 5.431e+01 6.056e+01 6.661e+01 1.204e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-28 22:43:21,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=208792.0, ans=0.0 +2024-07-28 22:43:29,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=208805.33333333334, ans=0.125 +2024-07-28 22:43:53,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=208818.66666666666, ans=0.1 +2024-07-28 22:43:56,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=208832.0, ans=0.0 +2024-07-28 22:43:58,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=208832.0, ans=0.125 +2024-07-28 22:44:00,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=208832.0, ans=0.125 +2024-07-28 22:44:04,206 INFO [train.py:1114] (2/4) Epoch 16, batch 3300, loss[loss=0.1898, simple_loss=0.2768, pruned_loss=0.05136, over 4712.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2673, pruned_loss=0.04348, over 941065.02 frames. ], batch size: 19, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:44:05,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.56 vs. limit=15.0 +2024-07-28 22:44:10,589 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.60 vs. limit=15.0 +2024-07-28 22:44:31,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=208872.0, ans=0.125 +2024-07-28 22:44:35,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=208872.0, ans=0.125 +2024-07-28 22:44:39,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=208885.33333333334, ans=0.125 +2024-07-28 22:44:40,168 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.49 vs. limit=12.0 +2024-07-28 22:45:00,569 INFO [train.py:1114] (2/4) Epoch 16, batch 3350, loss[loss=0.1923, simple_loss=0.2899, pruned_loss=0.04738, over 4631.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2685, pruned_loss=0.04423, over 939197.80 frames. ], batch size: 17, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:45:08,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=208925.33333333334, ans=0.125 +2024-07-28 22:45:11,078 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.370e+01 5.569e+01 6.115e+01 6.727e+01 9.175e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 22:45:26,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=208952.0, ans=0.125 +2024-07-28 22:45:26,432 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=208952.0, ans=0.0 +2024-07-28 22:45:57,508 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.49 vs. limit=22.5 +2024-07-28 22:45:59,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=208965.33333333334, ans=0.0 +2024-07-28 22:46:00,028 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.10 vs. limit=6.0 +2024-07-28 22:46:00,444 INFO [train.py:1114] (2/4) Epoch 16, batch 3400, loss[loss=0.1368, simple_loss=0.2169, pruned_loss=0.02837, over 4816.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.269, pruned_loss=0.04456, over 938245.71 frames. ], batch size: 11, lr: 4.69e-03, grad_scale: 64.0 +2024-07-28 22:46:01,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=208978.66666666666, ans=0.0 +2024-07-28 22:46:05,870 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.94 vs. limit=22.5 +2024-07-28 22:46:39,570 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.54 vs. limit=6.0 +2024-07-28 22:47:00,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=209032.0, ans=0.04949747468305833 +2024-07-28 22:47:04,443 INFO [train.py:1114] (2/4) Epoch 16, batch 3450, loss[loss=0.1801, simple_loss=0.277, pruned_loss=0.04161, over 4717.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2691, pruned_loss=0.04475, over 938081.38 frames. ], batch size: 19, lr: 4.69e-03, grad_scale: 64.0 +2024-07-28 22:47:08,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=209045.33333333334, ans=0.125 +2024-07-28 22:47:11,404 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.20 vs. limit=6.0 +2024-07-28 22:47:12,850 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.338e+01 5.483e+01 6.084e+01 6.778e+01 9.605e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-28 22:47:39,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=209058.66666666666, ans=0.125 +2024-07-28 22:47:39,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=209058.66666666666, ans=0.05 +2024-07-28 22:48:01,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=209072.0, ans=0.07 +2024-07-28 22:48:38,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=209085.33333333334, ans=0.125 +2024-07-28 22:48:39,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=209085.33333333334, ans=0.125 +2024-07-28 22:48:54,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=209098.66666666666, ans=0.07 +2024-07-28 22:48:58,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=209112.0, ans=0.025 +2024-07-28 22:48:59,100 INFO [train.py:1114] (2/4) Epoch 16, batch 3500, loss[loss=0.175, simple_loss=0.2558, pruned_loss=0.04711, over 4935.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2688, pruned_loss=0.0451, over 938797.54 frames. ], batch size: 12, lr: 4.69e-03, grad_scale: 64.0 +2024-07-28 22:49:08,636 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.37 vs. limit=12.0 +2024-07-28 22:49:30,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=209165.33333333334, ans=0.035 +2024-07-28 22:49:32,614 INFO [train.py:1114] (2/4) Epoch 16, batch 3550, loss[loss=0.1812, simple_loss=0.276, pruned_loss=0.04324, over 4660.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2686, pruned_loss=0.04476, over 939313.20 frames. ], batch size: 14, lr: 4.69e-03, grad_scale: 64.0 +2024-07-28 22:49:41,605 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.365e+01 5.691e+01 6.213e+01 7.399e+01 9.936e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 22:49:52,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=209205.33333333334, ans=0.125 +2024-07-28 22:50:06,347 INFO [train.py:1114] (2/4) Epoch 16, batch 3600, loss[loss=0.143, simple_loss=0.2335, pruned_loss=0.02622, over 4963.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2686, pruned_loss=0.04413, over 941056.13 frames. ], batch size: 13, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:50:13,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=209258.66666666666, ans=0.125 +2024-07-28 22:50:29,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=209272.0, ans=0.125 +2024-07-28 22:50:46,899 INFO [train.py:1114] (2/4) Epoch 16, batch 3650, loss[loss=0.1779, simple_loss=0.2678, pruned_loss=0.04399, over 4889.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2675, pruned_loss=0.0441, over 941411.84 frames. ], batch size: 15, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:50:51,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=209312.0, ans=0.2 +2024-07-28 22:50:52,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=209312.0, ans=0.125 +2024-07-28 22:50:57,257 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.506e+01 5.574e+01 6.186e+01 7.126e+01 1.218e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 22:50:59,845 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.00 vs. limit=6.0 +2024-07-28 22:51:01,333 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.67 vs. limit=15.0 +2024-07-28 22:51:02,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=209338.66666666666, ans=0.2 +2024-07-28 22:51:14,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=209352.0, ans=0.125 +2024-07-28 22:51:27,271 INFO [train.py:1114] (2/4) Epoch 16, batch 3700, loss[loss=0.1683, simple_loss=0.2642, pruned_loss=0.03622, over 4935.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2672, pruned_loss=0.04389, over 942422.45 frames. ], batch size: 14, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:51:30,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=209378.66666666666, ans=0.2 +2024-07-28 22:51:34,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=209392.0, ans=0.0 +2024-07-28 22:51:39,686 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.59 vs. limit=12.0 +2024-07-28 22:51:44,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=209405.33333333334, ans=0.0 +2024-07-28 22:51:57,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=209432.0, ans=0.125 +2024-07-28 22:51:58,302 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.58 vs. limit=15.0 +2024-07-28 22:51:58,829 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.23 vs. limit=22.5 +2024-07-28 22:52:02,386 INFO [train.py:1114] (2/4) Epoch 16, batch 3750, loss[loss=0.1406, simple_loss=0.2267, pruned_loss=0.02726, over 4809.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2671, pruned_loss=0.04364, over 943752.49 frames. ], batch size: 11, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:52:13,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=209445.33333333334, ans=0.2 +2024-07-28 22:52:17,618 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.493e+01 5.513e+01 6.031e+01 6.754e+01 8.866e+01, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 22:52:38,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=209498.66666666666, ans=0.125 +2024-07-28 22:52:41,586 INFO [train.py:1114] (2/4) Epoch 16, batch 3800, loss[loss=0.175, simple_loss=0.273, pruned_loss=0.03855, over 4819.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2662, pruned_loss=0.04316, over 941876.52 frames. ], batch size: 14, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:52:43,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209512.0, ans=0.1 +2024-07-28 22:53:06,934 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.99 vs. limit=15.0 +2024-07-28 22:53:08,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=209538.66666666666, ans=0.0 +2024-07-28 22:53:10,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=209552.0, ans=0.0 +2024-07-28 22:53:16,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=209552.0, ans=0.125 +2024-07-28 22:53:19,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=209565.33333333334, ans=0.0 +2024-07-28 22:53:20,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=209565.33333333334, ans=0.125 +2024-07-28 22:53:20,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=209565.33333333334, ans=0.125 +2024-07-28 22:53:24,788 INFO [train.py:1114] (2/4) Epoch 16, batch 3850, loss[loss=0.2098, simple_loss=0.3081, pruned_loss=0.05568, over 4630.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2667, pruned_loss=0.04278, over 942549.71 frames. ], batch size: 16, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:53:35,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=209592.0, ans=0.125 +2024-07-28 22:53:44,019 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.595e+01 5.438e+01 6.014e+01 6.827e+01 9.667e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 22:53:50,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=209605.33333333334, ans=0.125 +2024-07-28 22:53:52,668 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.71 vs. limit=15.0 +2024-07-28 22:53:59,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=209618.66666666666, ans=0.025 +2024-07-28 22:54:11,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=209645.33333333334, ans=0.025 +2024-07-28 22:54:11,537 INFO [train.py:1114] (2/4) Epoch 16, batch 3900, loss[loss=0.1666, simple_loss=0.266, pruned_loss=0.03355, over 4808.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2676, pruned_loss=0.04338, over 942823.35 frames. ], batch size: 14, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:54:17,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=209658.66666666666, ans=0.0 +2024-07-28 22:54:17,632 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.47 vs. limit=15.0 +2024-07-28 22:54:23,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=209658.66666666666, ans=0.0 +2024-07-28 22:54:24,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=209672.0, ans=0.1 +2024-07-28 22:54:44,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209685.33333333334, ans=0.1 +2024-07-28 22:54:54,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=209698.66666666666, ans=0.125 +2024-07-28 22:54:54,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=209698.66666666666, ans=0.125 +2024-07-28 22:54:59,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=209698.66666666666, ans=0.0 +2024-07-28 22:55:00,453 INFO [train.py:1114] (2/4) Epoch 16, batch 3950, loss[loss=0.2093, simple_loss=0.307, pruned_loss=0.05584, over 4820.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2681, pruned_loss=0.04357, over 944750.89 frames. ], batch size: 16, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:55:12,723 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.99 vs. limit=15.0 +2024-07-28 22:55:13,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=209725.33333333334, ans=0.125 +2024-07-28 22:55:14,737 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 5.589e+01 5.934e+01 6.636e+01 9.172e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-28 22:55:17,348 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.63 vs. limit=15.0 +2024-07-28 22:55:19,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=209725.33333333334, ans=0.125 +2024-07-28 22:55:30,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=209752.0, ans=0.125 +2024-07-28 22:55:32,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=209752.0, ans=0.125 +2024-07-28 22:55:40,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=209765.33333333334, ans=0.07 +2024-07-28 22:55:42,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=209765.33333333334, ans=0.125 +2024-07-28 22:55:48,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=209778.66666666666, ans=0.125 +2024-07-28 22:55:49,229 INFO [train.py:1114] (2/4) Epoch 16, batch 4000, loss[loss=0.1765, simple_loss=0.2717, pruned_loss=0.04059, over 4774.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2684, pruned_loss=0.04395, over 941354.08 frames. ], batch size: 12, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:55:58,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=209792.0, ans=0.025 +2024-07-28 22:56:05,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=209805.33333333334, ans=0.125 +2024-07-28 22:56:24,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=209832.0, ans=0.0 +2024-07-28 22:56:27,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=209832.0, ans=0.04949747468305833 +2024-07-28 22:56:39,791 INFO [train.py:1114] (2/4) Epoch 16, batch 4050, loss[loss=0.2309, simple_loss=0.3094, pruned_loss=0.07622, over 3325.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2675, pruned_loss=0.04362, over 940287.17 frames. ], batch size: 35, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:56:48,108 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.63 vs. limit=22.5 +2024-07-28 22:56:51,881 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.300e+01 5.462e+01 6.013e+01 7.148e+01 1.181e+02, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 22:56:59,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=209872.0, ans=0.0 +2024-07-28 22:57:11,246 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.93 vs. limit=15.0 +2024-07-28 22:57:12,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=209885.33333333334, ans=0.035 +2024-07-28 22:57:26,356 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.13 vs. limit=15.0 +2024-07-28 22:57:32,743 INFO [train.py:1114] (2/4) Epoch 16, batch 4100, loss[loss=0.2078, simple_loss=0.2968, pruned_loss=0.0594, over 4909.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2679, pruned_loss=0.04397, over 939219.10 frames. ], batch size: 15, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:57:55,679 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=209925.33333333334, ans=0.0 +2024-07-28 22:57:56,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=209925.33333333334, ans=0.0 +2024-07-28 22:57:57,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=209925.33333333334, ans=0.0 +2024-07-28 22:57:59,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209925.33333333334, ans=0.1 +2024-07-28 22:58:00,238 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.38 vs. limit=15.0 +2024-07-28 22:58:01,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209925.33333333334, ans=0.1 +2024-07-28 22:58:03,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=209938.66666666666, ans=0.2 +2024-07-28 22:58:04,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=209938.66666666666, ans=0.125 +2024-07-28 22:58:05,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=209938.66666666666, ans=0.2 +2024-07-28 22:58:09,719 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.16 vs. limit=6.0 +2024-07-28 22:58:10,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=209952.0, ans=0.0 +2024-07-28 22:58:10,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=209952.0, ans=0.05 +2024-07-28 22:58:27,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=209965.33333333334, ans=0.125 +2024-07-28 22:58:29,580 INFO [train.py:1114] (2/4) Epoch 16, batch 4150, loss[loss=0.2097, simple_loss=0.2919, pruned_loss=0.06373, over 4833.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2668, pruned_loss=0.04385, over 938967.47 frames. ], batch size: 13, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:58:37,483 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.74 vs. limit=15.0 +2024-07-28 22:58:40,277 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.631e+01 5.817e+01 6.318e+01 7.435e+01 1.178e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 22:58:41,245 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=209992.0, ans=0.125 +2024-07-28 22:59:07,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=210032.0, ans=0.125 +2024-07-28 22:59:08,737 INFO [train.py:1114] (2/4) Epoch 16, batch 4200, loss[loss=0.2249, simple_loss=0.3089, pruned_loss=0.0704, over 4901.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2673, pruned_loss=0.04396, over 940050.30 frames. ], batch size: 15, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:59:10,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210045.33333333334, ans=0.1 +2024-07-28 22:59:14,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=210045.33333333334, ans=0.125 +2024-07-28 22:59:14,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=210045.33333333334, ans=0.0 +2024-07-28 22:59:19,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=210058.66666666666, ans=0.125 +2024-07-28 22:59:19,875 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.39 vs. limit=10.0 +2024-07-28 22:59:32,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=210085.33333333334, ans=0.125 +2024-07-28 22:59:35,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=210085.33333333334, ans=0.025 +2024-07-28 22:59:42,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=210098.66666666666, ans=0.125 +2024-07-28 22:59:46,518 INFO [train.py:1114] (2/4) Epoch 16, batch 4250, loss[loss=0.1674, simple_loss=0.254, pruned_loss=0.04043, over 4632.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2675, pruned_loss=0.04394, over 941452.92 frames. ], batch size: 12, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:59:49,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=210112.0, ans=0.125 +2024-07-28 22:59:55,623 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.556e+01 5.557e+01 6.153e+01 6.698e+01 1.216e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 23:00:06,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=210138.66666666666, ans=0.2 +2024-07-28 23:00:08,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=210152.0, ans=0.04949747468305833 +2024-07-28 23:00:21,373 INFO [train.py:1114] (2/4) Epoch 16, batch 4300, loss[loss=0.1509, simple_loss=0.234, pruned_loss=0.03389, over 4757.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2665, pruned_loss=0.0436, over 940678.93 frames. ], batch size: 13, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 23:00:28,677 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.88 vs. limit=12.0 +2024-07-28 23:00:44,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=210218.66666666666, ans=0.025 +2024-07-28 23:00:57,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=210245.33333333334, ans=0.0 +2024-07-28 23:00:57,744 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.32 vs. limit=15.0 +2024-07-28 23:00:58,189 INFO [train.py:1114] (2/4) Epoch 16, batch 4350, loss[loss=0.1994, simple_loss=0.2842, pruned_loss=0.0573, over 4760.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2669, pruned_loss=0.04353, over 941432.18 frames. ], batch size: 13, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 23:01:18,170 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.840e+01 5.387e+01 6.031e+01 6.844e+01 1.009e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 23:01:19,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=210258.66666666666, ans=0.0 +2024-07-28 23:01:22,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=210272.0, ans=0.09899494936611666 +2024-07-28 23:01:23,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=210272.0, ans=0.1 +2024-07-28 23:01:24,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=210272.0, ans=0.125 +2024-07-28 23:01:26,470 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.80 vs. limit=15.0 +2024-07-28 23:01:26,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=210272.0, ans=0.0 +2024-07-28 23:01:30,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=210285.33333333334, ans=0.07 +2024-07-28 23:01:36,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=210298.66666666666, ans=0.1 +2024-07-28 23:01:39,505 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:01:40,480 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.63 vs. limit=12.0 +2024-07-28 23:01:42,123 INFO [train.py:1114] (2/4) Epoch 16, batch 4400, loss[loss=0.2008, simple_loss=0.2956, pruned_loss=0.05303, over 4812.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2672, pruned_loss=0.0431, over 941440.44 frames. ], batch size: 14, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:01:50,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=210325.33333333334, ans=0.125 +2024-07-28 23:01:57,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=210325.33333333334, ans=0.2 +2024-07-28 23:01:58,657 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.92 vs. limit=15.0 +2024-07-28 23:01:58,830 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.55 vs. limit=15.0 +2024-07-28 23:02:08,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210352.0, ans=0.1 +2024-07-28 23:02:16,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=210365.33333333334, ans=0.2 +2024-07-28 23:02:16,842 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.94 vs. limit=15.0 +2024-07-28 23:02:21,301 INFO [train.py:1114] (2/4) Epoch 16, batch 4450, loss[loss=0.1635, simple_loss=0.2616, pruned_loss=0.03267, over 4937.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2682, pruned_loss=0.04395, over 939291.12 frames. ], batch size: 12, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:02:21,618 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.83 vs. limit=15.0 +2024-07-28 23:02:26,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=210378.66666666666, ans=0.125 +2024-07-28 23:02:28,650 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.10 vs. limit=15.0 +2024-07-28 23:02:30,824 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.581e+01 5.996e+01 6.828e+01 9.558e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 23:02:42,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=210418.66666666666, ans=0.0 +2024-07-28 23:02:55,310 INFO [train.py:1114] (2/4) Epoch 16, batch 4500, loss[loss=0.1573, simple_loss=0.2573, pruned_loss=0.02867, over 4747.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2696, pruned_loss=0.04407, over 938822.37 frames. ], batch size: 14, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:03:04,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.55 vs. limit=22.5 +2024-07-28 23:03:26,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=210498.66666666666, ans=0.0 +2024-07-28 23:03:26,676 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=210498.66666666666, ans=0.125 +2024-07-28 23:03:28,422 INFO [train.py:1114] (2/4) Epoch 16, batch 4550, loss[loss=0.1492, simple_loss=0.2306, pruned_loss=0.03387, over 4900.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2702, pruned_loss=0.0448, over 940703.47 frames. ], batch size: 13, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:03:37,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=210525.33333333334, ans=0.125 +2024-07-28 23:03:39,692 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.876e+01 5.739e+01 6.533e+01 7.196e+01 1.162e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 23:04:04,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=210565.33333333334, ans=0.2 +2024-07-28 23:04:06,256 INFO [train.py:1114] (2/4) Epoch 16, batch 4600, loss[loss=0.1736, simple_loss=0.2757, pruned_loss=0.03576, over 4518.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2695, pruned_loss=0.04446, over 938870.02 frames. ], batch size: 21, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:04:12,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=210592.0, ans=0.2 +2024-07-28 23:04:15,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=210592.0, ans=0.2 +2024-07-28 23:04:37,004 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:04:39,401 INFO [train.py:1114] (2/4) Epoch 16, batch 4650, loss[loss=0.2083, simple_loss=0.3083, pruned_loss=0.05421, over 4845.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2698, pruned_loss=0.0446, over 940473.35 frames. ], batch size: 16, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:04:42,248 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:04:52,464 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.612e+01 5.652e+01 6.180e+01 7.051e+01 1.016e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 23:04:54,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=210658.66666666666, ans=0.0 +2024-07-28 23:05:04,075 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.09 vs. limit=12.0 +2024-07-28 23:05:04,388 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=210685.33333333334, ans=0.2 +2024-07-28 23:05:06,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=210685.33333333334, ans=0.125 +2024-07-28 23:05:06,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=210685.33333333334, ans=0.0 +2024-07-28 23:05:12,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.12 vs. limit=6.0 +2024-07-28 23:05:13,417 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.22 vs. limit=15.0 +2024-07-28 23:05:19,327 INFO [train.py:1114] (2/4) Epoch 16, batch 4700, loss[loss=0.1609, simple_loss=0.2422, pruned_loss=0.03983, over 4715.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2692, pruned_loss=0.04423, over 937802.45 frames. ], batch size: 11, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:05:26,156 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=210725.33333333334, ans=0.125 +2024-07-28 23:05:27,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210725.33333333334, ans=0.1 +2024-07-28 23:05:28,322 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.58 vs. limit=22.5 +2024-07-28 23:05:31,819 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.48 vs. limit=15.0 +2024-07-28 23:05:38,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=210738.66666666666, ans=10.0 +2024-07-28 23:05:43,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=210752.0, ans=0.2 +2024-07-28 23:05:44,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=210752.0, ans=0.125 +2024-07-28 23:05:54,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=210765.33333333334, ans=0.1 +2024-07-28 23:05:56,333 INFO [train.py:1114] (2/4) Epoch 16, batch 4750, loss[loss=0.1851, simple_loss=0.275, pruned_loss=0.04762, over 4465.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2704, pruned_loss=0.0447, over 935578.33 frames. ], batch size: 21, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:05:59,447 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=15.0 +2024-07-28 23:06:01,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=210778.66666666666, ans=0.02 +2024-07-28 23:06:02,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=210792.0, ans=0.125 +2024-07-28 23:06:06,443 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.562e+01 6.169e+01 6.958e+01 1.016e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-28 23:06:09,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=210792.0, ans=0.2 +2024-07-28 23:06:20,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=210818.66666666666, ans=0.125 +2024-07-28 23:06:27,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=210832.0, ans=0.125 +2024-07-28 23:06:34,270 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.64 vs. limit=15.0 +2024-07-28 23:06:34,617 INFO [train.py:1114] (2/4) Epoch 16, batch 4800, loss[loss=0.145, simple_loss=0.2449, pruned_loss=0.02253, over 4694.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2695, pruned_loss=0.04456, over 933084.02 frames. ], batch size: 13, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:06:40,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=210845.33333333334, ans=0.2 +2024-07-28 23:06:45,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=210858.66666666666, ans=10.0 +2024-07-28 23:06:46,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=210858.66666666666, ans=0.0 +2024-07-28 23:06:50,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=210872.0, ans=0.0 +2024-07-28 23:06:55,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=210885.33333333334, ans=0.0 +2024-07-28 23:07:07,847 INFO [train.py:1114] (2/4) Epoch 16, batch 4850, loss[loss=0.1784, simple_loss=0.2859, pruned_loss=0.03548, over 4738.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2696, pruned_loss=0.0446, over 932785.85 frames. ], batch size: 14, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:07:28,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=210925.33333333334, ans=0.0 +2024-07-28 23:07:31,468 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.823e+01 5.391e+01 6.068e+01 6.775e+01 1.177e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 23:07:36,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=210938.66666666666, ans=0.125 +2024-07-28 23:07:54,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=210965.33333333334, ans=0.125 +2024-07-28 23:07:58,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210965.33333333334, ans=0.1 +2024-07-28 23:07:59,302 INFO [train.py:1114] (2/4) Epoch 16, batch 4900, loss[loss=0.1583, simple_loss=0.2604, pruned_loss=0.02811, over 4760.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2685, pruned_loss=0.0441, over 934540.00 frames. ], batch size: 13, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:08:04,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=210978.66666666666, ans=0.125 +2024-07-28 23:08:06,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=210992.0, ans=0.2 +2024-07-28 23:08:07,202 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.53 vs. limit=12.0 +2024-07-28 23:08:11,232 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.94 vs. limit=15.0 +2024-07-28 23:08:31,132 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.87 vs. limit=15.0 +2024-07-28 23:08:33,345 INFO [train.py:1114] (2/4) Epoch 16, batch 4950, loss[loss=0.3122, simple_loss=0.3465, pruned_loss=0.1389, over 3131.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2697, pruned_loss=0.0448, over 931552.99 frames. ], batch size: 35, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:08:37,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=211045.33333333334, ans=0.025 +2024-07-28 23:08:41,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=211058.66666666666, ans=0.0 +2024-07-28 23:08:42,924 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.320e+01 5.430e+01 5.977e+01 6.818e+01 1.036e+02, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 23:08:52,234 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:09:05,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=211098.66666666666, ans=0.0 +2024-07-28 23:09:08,840 INFO [train.py:1114] (2/4) Epoch 16, batch 5000, loss[loss=0.1677, simple_loss=0.2716, pruned_loss=0.0319, over 4660.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2687, pruned_loss=0.04468, over 935415.82 frames. ], batch size: 14, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:09:12,491 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.43 vs. limit=15.0 +2024-07-28 23:09:23,783 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.87 vs. limit=22.5 +2024-07-28 23:09:24,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=211138.66666666666, ans=0.0 +2024-07-28 23:09:27,477 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:09:29,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=211152.0, ans=0.125 +2024-07-28 23:09:41,991 INFO [train.py:1114] (2/4) Epoch 16, batch 5050, loss[loss=0.161, simple_loss=0.246, pruned_loss=0.03801, over 4842.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2682, pruned_loss=0.04433, over 937931.84 frames. ], batch size: 12, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:09:53,128 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.519e+01 5.620e+01 6.064e+01 6.522e+01 1.168e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-28 23:09:53,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=211192.0, ans=0.1 +2024-07-28 23:10:08,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=211218.66666666666, ans=0.09899494936611666 +2024-07-28 23:10:15,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211232.0, ans=0.1 +2024-07-28 23:10:18,019 INFO [train.py:1114] (2/4) Epoch 16, batch 5100, loss[loss=0.169, simple_loss=0.265, pruned_loss=0.03655, over 4776.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2685, pruned_loss=0.04461, over 935601.35 frames. ], batch size: 12, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:10:24,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=211258.66666666666, ans=0.125 +2024-07-28 23:10:24,463 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.71 vs. limit=10.0 +2024-07-28 23:10:25,857 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.56 vs. limit=10.0 +2024-07-28 23:10:33,066 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.95 vs. limit=10.0 +2024-07-28 23:10:36,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=211272.0, ans=0.0 +2024-07-28 23:10:38,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=211285.33333333334, ans=0.1 +2024-07-28 23:10:43,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=211285.33333333334, ans=0.0 +2024-07-28 23:10:46,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=211298.66666666666, ans=0.125 +2024-07-28 23:10:48,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=211298.66666666666, ans=0.125 +2024-07-28 23:10:48,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=211298.66666666666, ans=0.125 +2024-07-28 23:10:51,321 INFO [train.py:1114] (2/4) Epoch 16, batch 5150, loss[loss=0.2326, simple_loss=0.3448, pruned_loss=0.06026, over 4841.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.271, pruned_loss=0.04563, over 936372.88 frames. ], batch size: 16, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:10:56,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=211312.0, ans=0.025 +2024-07-28 23:10:58,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=211325.33333333334, ans=0.125 +2024-07-28 23:11:00,633 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.854e+01 6.432e+01 7.346e+01 1.040e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 23:11:00,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=211325.33333333334, ans=0.2 +2024-07-28 23:11:07,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=211338.66666666666, ans=0.0 +2024-07-28 23:11:11,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=211338.66666666666, ans=0.0 +2024-07-28 23:11:13,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=211352.0, ans=0.0 +2024-07-28 23:11:21,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=211365.33333333334, ans=0.0 +2024-07-28 23:11:24,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=211365.33333333334, ans=0.125 +2024-07-28 23:11:26,552 INFO [train.py:1114] (2/4) Epoch 16, batch 5200, loss[loss=0.2028, simple_loss=0.2968, pruned_loss=0.05437, over 4669.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2701, pruned_loss=0.04506, over 936449.34 frames. ], batch size: 14, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:11:34,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211392.0, ans=0.1 +2024-07-28 23:11:38,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=211392.0, ans=0.95 +2024-07-28 23:12:01,708 INFO [train.py:1114] (2/4) Epoch 16, batch 5250, loss[loss=0.1799, simple_loss=0.2825, pruned_loss=0.03866, over 4905.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2692, pruned_loss=0.04452, over 936716.10 frames. ], batch size: 13, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:12:12,943 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.256e+01 5.544e+01 6.376e+01 7.640e+01 1.111e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 23:12:22,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=211472.0, ans=0.125 +2024-07-28 23:12:34,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=211498.66666666666, ans=0.2 +2024-07-28 23:12:37,236 INFO [train.py:1114] (2/4) Epoch 16, batch 5300, loss[loss=0.1827, simple_loss=0.2785, pruned_loss=0.04345, over 4643.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2685, pruned_loss=0.04435, over 934949.19 frames. ], batch size: 16, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:12:38,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=211512.0, ans=0.125 +2024-07-28 23:12:45,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211525.33333333334, ans=0.1 +2024-07-28 23:12:46,197 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.30 vs. limit=15.0 +2024-07-28 23:12:53,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=211538.66666666666, ans=0.0 +2024-07-28 23:12:53,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=211538.66666666666, ans=0.0 +2024-07-28 23:13:06,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=211565.33333333334, ans=0.125 +2024-07-28 23:13:10,601 INFO [train.py:1114] (2/4) Epoch 16, batch 5350, loss[loss=0.1778, simple_loss=0.2544, pruned_loss=0.05057, over 4506.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2688, pruned_loss=0.04425, over 936580.70 frames. ], batch size: 10, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:13:15,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=211578.66666666666, ans=0.05 +2024-07-28 23:13:19,948 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.569e+01 5.493e+01 6.071e+01 6.914e+01 1.248e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 23:13:22,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=211592.0, ans=0.125 +2024-07-28 23:13:39,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=211632.0, ans=0.2 +2024-07-28 23:13:43,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=211632.0, ans=0.04949747468305833 +2024-07-28 23:13:44,728 INFO [train.py:1114] (2/4) Epoch 16, batch 5400, loss[loss=0.1885, simple_loss=0.2842, pruned_loss=0.04634, over 4147.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2694, pruned_loss=0.04444, over 931401.84 frames. ], batch size: 25, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:13:48,076 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.93 vs. limit=15.0 +2024-07-28 23:13:54,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211658.66666666666, ans=0.1 +2024-07-28 23:14:00,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=211672.0, ans=0.0 +2024-07-28 23:14:18,017 INFO [train.py:1114] (2/4) Epoch 16, batch 5450, loss[loss=0.1858, simple_loss=0.2714, pruned_loss=0.05009, over 4720.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2686, pruned_loss=0.04384, over 934068.79 frames. ], batch size: 11, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:14:18,255 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:14:20,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=211712.0, ans=0.025 +2024-07-28 23:14:26,733 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:14:27,872 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.730e+01 5.574e+01 6.234e+01 6.810e+01 1.084e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 23:14:29,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=211725.33333333334, ans=0.0 +2024-07-28 23:14:41,399 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.26 vs. limit=15.0 +2024-07-28 23:14:45,785 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:14:49,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=211765.33333333334, ans=0.125 +2024-07-28 23:14:53,048 INFO [train.py:1114] (2/4) Epoch 16, batch 5500, loss[loss=0.1983, simple_loss=0.2907, pruned_loss=0.05292, over 4253.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2678, pruned_loss=0.04407, over 931209.09 frames. ], batch size: 25, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:15:02,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=211792.0, ans=10.0 +2024-07-28 23:15:09,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211805.33333333334, ans=0.1 +2024-07-28 23:15:26,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=211832.0, ans=0.2 +2024-07-28 23:15:27,049 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.68 vs. limit=15.0 +2024-07-28 23:15:28,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211845.33333333334, ans=0.1 +2024-07-28 23:15:28,521 INFO [train.py:1114] (2/4) Epoch 16, batch 5550, loss[loss=0.1779, simple_loss=0.2528, pruned_loss=0.05151, over 4706.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2672, pruned_loss=0.04372, over 933320.69 frames. ], batch size: 12, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:15:37,921 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.698e+01 6.304e+01 7.513e+01 1.256e+02, threshold=1.261e+02, percent-clipped=1.0 +2024-07-28 23:15:54,344 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=14.13 vs. limit=15.0 +2024-07-28 23:16:00,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=211898.66666666666, ans=0.125 +2024-07-28 23:16:02,703 INFO [train.py:1114] (2/4) Epoch 16, batch 5600, loss[loss=0.1834, simple_loss=0.2725, pruned_loss=0.04716, over 4738.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2678, pruned_loss=0.04394, over 934275.84 frames. ], batch size: 14, lr: 4.66e-03, grad_scale: 64.0 +2024-07-28 23:16:19,282 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-07-28 23:16:38,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=211952.0, ans=0.125 +2024-07-28 23:16:45,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=211965.33333333334, ans=0.2 +2024-07-28 23:16:49,034 INFO [train.py:1114] (2/4) Epoch 16, batch 5650, loss[loss=0.1995, simple_loss=0.2895, pruned_loss=0.05473, over 4551.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2673, pruned_loss=0.04369, over 936811.24 frames. ], batch size: 21, lr: 4.66e-03, grad_scale: 64.0 +2024-07-28 23:16:56,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=211992.0, ans=0.025 +2024-07-28 23:16:57,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=211992.0, ans=0.035 +2024-07-28 23:16:58,583 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.452e+01 5.506e+01 6.230e+01 6.941e+01 1.207e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 23:17:02,089 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.79 vs. limit=22.5 +2024-07-28 23:17:08,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=212005.33333333334, ans=0.125 +2024-07-28 23:17:10,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=212018.66666666666, ans=0.2 +2024-07-28 23:17:18,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=212032.0, ans=0.125 +2024-07-28 23:17:19,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=212032.0, ans=0.025 +2024-07-28 23:17:22,862 INFO [train.py:1114] (2/4) Epoch 16, batch 5700, loss[loss=0.1859, simple_loss=0.2922, pruned_loss=0.0398, over 4697.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2684, pruned_loss=0.0442, over 937653.18 frames. ], batch size: 13, lr: 4.66e-03, grad_scale: 64.0 +2024-07-28 23:17:22,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=212045.33333333334, ans=0.0 +2024-07-28 23:17:32,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212058.66666666666, ans=0.1 +2024-07-28 23:17:33,167 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=212058.66666666666, ans=0.0 +2024-07-28 23:17:36,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=212058.66666666666, ans=0.2 +2024-07-28 23:17:38,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=212072.0, ans=0.125 +2024-07-28 23:17:45,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212072.0, ans=0.1 +2024-07-28 23:17:53,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=212098.66666666666, ans=0.125 +2024-07-28 23:17:55,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=212098.66666666666, ans=0.05 +2024-07-28 23:17:59,824 INFO [train.py:1114] (2/4) Epoch 16, batch 5750, loss[loss=0.176, simple_loss=0.2666, pruned_loss=0.0427, over 4745.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2693, pruned_loss=0.04443, over 937599.78 frames. ], batch size: 19, lr: 4.66e-03, grad_scale: 64.0 +2024-07-28 23:18:04,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=212112.0, ans=0.125 +2024-07-28 23:18:08,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=212125.33333333334, ans=0.0 +2024-07-28 23:18:10,183 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.804e+01 5.670e+01 6.129e+01 6.618e+01 9.069e+01, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 23:18:10,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=212125.33333333334, ans=0.025 +2024-07-28 23:18:11,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=212125.33333333334, ans=0.125 +2024-07-28 23:18:13,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=212125.33333333334, ans=0.2 +2024-07-28 23:18:15,194 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-28 23:18:27,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=212152.0, ans=0.0 +2024-07-28 23:18:38,582 INFO [train.py:1114] (2/4) Epoch 16, batch 5800, loss[loss=0.2273, simple_loss=0.3081, pruned_loss=0.07329, over 4729.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.27, pruned_loss=0.04433, over 936695.21 frames. ], batch size: 19, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:18:39,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212178.66666666666, ans=0.1 +2024-07-28 23:18:41,412 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=212178.66666666666, ans=0.125 +2024-07-28 23:19:12,030 INFO [train.py:1114] (2/4) Epoch 16, batch 5850, loss[loss=0.1941, simple_loss=0.2736, pruned_loss=0.05725, over 4486.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.27, pruned_loss=0.04475, over 937151.94 frames. ], batch size: 21, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:19:16,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=212245.33333333334, ans=0.125 +2024-07-28 23:19:21,306 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 5.632e+01 6.313e+01 6.909e+01 9.080e+01, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 23:19:31,743 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.50 vs. limit=15.0 +2024-07-28 23:19:43,312 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.68 vs. limit=22.5 +2024-07-28 23:19:46,168 INFO [train.py:1114] (2/4) Epoch 16, batch 5900, loss[loss=0.1821, simple_loss=0.2863, pruned_loss=0.0389, over 4678.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2689, pruned_loss=0.04431, over 937728.89 frames. ], batch size: 15, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:19:53,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=212325.33333333334, ans=0.2 +2024-07-28 23:20:00,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=212338.66666666666, ans=0.2 +2024-07-28 23:20:06,864 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.39 vs. limit=12.0 +2024-07-28 23:20:07,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=212352.0, ans=0.125 +2024-07-28 23:20:12,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=212365.33333333334, ans=0.025 +2024-07-28 23:20:19,860 INFO [train.py:1114] (2/4) Epoch 16, batch 5950, loss[loss=0.206, simple_loss=0.2978, pruned_loss=0.05711, over 4677.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2688, pruned_loss=0.04415, over 939606.30 frames. ], batch size: 15, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:20:22,957 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.22 vs. limit=15.0 +2024-07-28 23:20:24,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=212378.66666666666, ans=0.125 +2024-07-28 23:20:25,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=212378.66666666666, ans=0.0 +2024-07-28 23:20:29,217 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.762e+01 5.558e+01 6.099e+01 6.527e+01 9.669e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 23:20:32,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212392.0, ans=0.1 +2024-07-28 23:20:35,175 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.10 vs. limit=22.5 +2024-07-28 23:20:36,279 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.74 vs. limit=15.0 +2024-07-28 23:20:54,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=212445.33333333334, ans=0.125 +2024-07-28 23:20:55,233 INFO [train.py:1114] (2/4) Epoch 16, batch 6000, loss[loss=0.1816, simple_loss=0.2671, pruned_loss=0.04807, over 4298.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2679, pruned_loss=0.04372, over 937048.60 frames. ], batch size: 25, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:20:55,233 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-28 23:21:07,047 INFO [train.py:1146] (2/4) Epoch 16, validation: loss=0.1625, simple_loss=0.2653, pruned_loss=0.02984, over 944034.00 frames. +2024-07-28 23:21:07,048 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-28 23:21:09,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=212445.33333333334, ans=0.125 +2024-07-28 23:21:11,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=212445.33333333334, ans=0.2 +2024-07-28 23:21:34,049 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.55 vs. limit=22.5 +2024-07-28 23:21:36,096 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.71 vs. limit=15.0 +2024-07-28 23:21:39,361 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.34 vs. limit=22.5 +2024-07-28 23:21:41,038 INFO [train.py:1114] (2/4) Epoch 16, batch 6050, loss[loss=0.1744, simple_loss=0.2664, pruned_loss=0.0412, over 4783.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2682, pruned_loss=0.04412, over 938435.63 frames. ], batch size: 12, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:21:44,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212512.0, ans=0.1 +2024-07-28 23:21:56,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=212525.33333333334, ans=0.0 +2024-07-28 23:21:57,154 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.422e+01 5.490e+01 6.163e+01 6.956e+01 9.204e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 23:22:05,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=212538.66666666666, ans=0.125 +2024-07-28 23:22:11,144 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.31 vs. limit=15.0 +2024-07-28 23:22:21,153 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.33 vs. limit=6.0 +2024-07-28 23:22:24,122 INFO [train.py:1114] (2/4) Epoch 16, batch 6100, loss[loss=0.2076, simple_loss=0.2946, pruned_loss=0.06036, over 4690.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2686, pruned_loss=0.04466, over 938718.99 frames. ], batch size: 15, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:22:27,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=212578.66666666666, ans=0.0 +2024-07-28 23:22:30,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=212592.0, ans=0.125 +2024-07-28 23:22:52,271 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:23:09,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=212632.0, ans=0.0 +2024-07-28 23:23:11,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=212632.0, ans=0.0 +2024-07-28 23:23:20,107 INFO [train.py:1114] (2/4) Epoch 16, batch 6150, loss[loss=0.2143, simple_loss=0.3007, pruned_loss=0.0639, over 3241.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2682, pruned_loss=0.04428, over 937181.83 frames. ], batch size: 35, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:23:20,176 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:23:24,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=212645.33333333334, ans=0.125 +2024-07-28 23:23:29,817 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.196e+01 5.601e+01 6.236e+01 7.046e+01 1.205e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 23:23:30,202 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.47 vs. limit=15.0 +2024-07-28 23:23:30,208 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.96 vs. limit=15.0 +2024-07-28 23:23:39,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=212685.33333333334, ans=0.125 +2024-07-28 23:23:53,679 INFO [train.py:1114] (2/4) Epoch 16, batch 6200, loss[loss=0.2177, simple_loss=0.3074, pruned_loss=0.06401, over 4732.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2684, pruned_loss=0.04402, over 936642.32 frames. ], batch size: 14, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:23:54,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=212712.0, ans=0.125 +2024-07-28 23:23:56,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=212712.0, ans=0.025 +2024-07-28 23:24:00,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=212725.33333333334, ans=0.0 +2024-07-28 23:24:07,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=212738.66666666666, ans=0.125 +2024-07-28 23:24:08,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=212738.66666666666, ans=0.1 +2024-07-28 23:24:10,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=212738.66666666666, ans=0.125 +2024-07-28 23:24:12,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=212738.66666666666, ans=0.025 +2024-07-28 23:24:19,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=212752.0, ans=0.125 +2024-07-28 23:24:24,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=212752.0, ans=0.125 +2024-07-28 23:24:32,633 INFO [train.py:1114] (2/4) Epoch 16, batch 6250, loss[loss=0.1838, simple_loss=0.2786, pruned_loss=0.04452, over 4815.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2686, pruned_loss=0.04397, over 932408.14 frames. ], batch size: 14, lr: 4.65e-03, grad_scale: 32.0 +2024-07-28 23:24:46,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=212778.66666666666, ans=0.0 +2024-07-28 23:24:48,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=212792.0, ans=0.025 +2024-07-28 23:24:53,134 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.346e+01 5.825e+01 6.547e+01 7.445e+01 1.087e+02, threshold=1.309e+02, percent-clipped=0.0 +2024-07-28 23:24:56,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=212805.33333333334, ans=0.07 +2024-07-28 23:25:30,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=212832.0, ans=0.025 +2024-07-28 23:25:38,104 INFO [train.py:1114] (2/4) Epoch 16, batch 6300, loss[loss=0.1525, simple_loss=0.2395, pruned_loss=0.0327, over 4528.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2688, pruned_loss=0.04417, over 929556.84 frames. ], batch size: 10, lr: 4.65e-03, grad_scale: 32.0 +2024-07-28 23:25:40,558 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.55 vs. limit=15.0 +2024-07-28 23:25:48,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=212845.33333333334, ans=0.125 +2024-07-28 23:25:55,799 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.76 vs. limit=10.0 +2024-07-28 23:25:59,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=212872.0, ans=0.125 +2024-07-28 23:26:02,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=212872.0, ans=10.0 +2024-07-28 23:26:04,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212885.33333333334, ans=0.1 +2024-07-28 23:26:20,514 INFO [train.py:1114] (2/4) Epoch 16, batch 6350, loss[loss=0.1775, simple_loss=0.2687, pruned_loss=0.04318, over 4381.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2683, pruned_loss=0.04381, over 933505.03 frames. ], batch size: 21, lr: 4.65e-03, grad_scale: 32.0 +2024-07-28 23:26:21,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=212912.0, ans=0.125 +2024-07-28 23:26:35,563 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:26:45,158 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.844e+01 5.666e+01 6.076e+01 6.815e+01 1.142e+02, threshold=1.215e+02, percent-clipped=0.0 +2024-07-28 23:26:47,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=212925.33333333334, ans=0.125 +2024-07-28 23:26:57,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212952.0, ans=0.1 +2024-07-28 23:26:59,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=212952.0, ans=0.1 +2024-07-28 23:27:10,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=212965.33333333334, ans=0.0 +2024-07-28 23:27:12,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=212965.33333333334, ans=0.125 +2024-07-28 23:27:13,405 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.78 vs. limit=22.5 +2024-07-28 23:27:14,263 INFO [train.py:1114] (2/4) Epoch 16, batch 6400, loss[loss=0.1952, simple_loss=0.2892, pruned_loss=0.05058, over 4633.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2687, pruned_loss=0.04422, over 934778.59 frames. ], batch size: 13, lr: 4.65e-03, grad_scale: 32.0 +2024-07-28 23:27:17,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=212978.66666666666, ans=0.0 +2024-07-28 23:27:25,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=212992.0, ans=0.025 +2024-07-28 23:27:28,640 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.88 vs. limit=15.0 +2024-07-28 23:27:39,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=213018.66666666666, ans=0.0 +2024-07-28 23:27:51,053 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.55 vs. limit=15.0 +2024-07-28 23:27:51,974 INFO [train.py:1114] (2/4) Epoch 16, batch 6450, loss[loss=0.1719, simple_loss=0.2564, pruned_loss=0.04377, over 4486.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2698, pruned_loss=0.04475, over 938394.20 frames. ], batch size: 21, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:28:08,732 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.830e+01 6.533e+01 7.899e+01 1.104e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 23:28:10,742 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.87 vs. limit=22.5 +2024-07-28 23:28:12,868 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.59 vs. limit=15.0 +2024-07-28 23:28:19,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=213085.33333333334, ans=0.125 +2024-07-28 23:28:35,507 INFO [train.py:1114] (2/4) Epoch 16, batch 6500, loss[loss=0.2084, simple_loss=0.3039, pruned_loss=0.05649, over 3416.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2695, pruned_loss=0.04445, over 939798.35 frames. ], batch size: 35, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:29:05,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=213152.0, ans=0.125 +2024-07-28 23:29:19,583 INFO [train.py:1114] (2/4) Epoch 16, batch 6550, loss[loss=0.1605, simple_loss=0.2473, pruned_loss=0.03681, over 4816.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2688, pruned_loss=0.04402, over 942846.50 frames. ], batch size: 11, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:29:31,661 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.619e+01 5.761e+01 6.311e+01 7.241e+01 1.321e+02, threshold=1.262e+02, percent-clipped=1.0 +2024-07-28 23:29:35,365 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.85 vs. limit=22.5 +2024-07-28 23:29:43,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=213218.66666666666, ans=0.2 +2024-07-28 23:29:46,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=213218.66666666666, ans=0.5 +2024-07-28 23:29:55,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=213232.0, ans=0.125 +2024-07-28 23:29:59,642 INFO [train.py:1114] (2/4) Epoch 16, batch 6600, loss[loss=0.1992, simple_loss=0.2864, pruned_loss=0.05598, over 4934.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2683, pruned_loss=0.0442, over 944907.51 frames. ], batch size: 14, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:30:02,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213245.33333333334, ans=0.1 +2024-07-28 23:30:09,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=213258.66666666666, ans=0.125 +2024-07-28 23:30:25,589 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.83 vs. limit=15.0 +2024-07-28 23:30:27,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=213285.33333333334, ans=0.025 +2024-07-28 23:30:30,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=213298.66666666666, ans=0.125 +2024-07-28 23:30:32,765 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=213298.66666666666, ans=0.0 +2024-07-28 23:30:33,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=213298.66666666666, ans=0.125 +2024-07-28 23:30:35,798 INFO [train.py:1114] (2/4) Epoch 16, batch 6650, loss[loss=0.1772, simple_loss=0.2651, pruned_loss=0.04463, over 4592.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2677, pruned_loss=0.04416, over 943382.29 frames. ], batch size: 17, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:30:46,058 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.373e+01 5.740e+01 6.263e+01 6.841e+01 9.907e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 23:30:46,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=213325.33333333334, ans=0.95 +2024-07-28 23:30:46,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=213325.33333333334, ans=0.2 +2024-07-28 23:30:54,733 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.80 vs. limit=22.5 +2024-07-28 23:30:55,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213338.66666666666, ans=0.1 +2024-07-28 23:30:59,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=213352.0, ans=0.025 +2024-07-28 23:31:00,739 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.66 vs. limit=15.0 +2024-07-28 23:31:01,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=213352.0, ans=0.0 +2024-07-28 23:31:01,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=213352.0, ans=0.125 +2024-07-28 23:31:02,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=213352.0, ans=0.125 +2024-07-28 23:31:03,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=213352.0, ans=0.0 +2024-07-28 23:31:11,670 INFO [train.py:1114] (2/4) Epoch 16, batch 6700, loss[loss=0.2034, simple_loss=0.2906, pruned_loss=0.05807, over 4693.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2678, pruned_loss=0.04414, over 942169.82 frames. ], batch size: 19, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:31:29,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=213405.33333333334, ans=0.0 +2024-07-28 23:31:36,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=213418.66666666666, ans=0.0 +2024-07-28 23:31:38,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=213432.0, ans=0.2 +2024-07-28 23:31:44,649 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.97 vs. limit=15.0 +2024-07-28 23:31:44,985 INFO [train.py:1114] (2/4) Epoch 16, batch 6750, loss[loss=0.1728, simple_loss=0.256, pruned_loss=0.04482, over 4158.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2678, pruned_loss=0.0443, over 940028.93 frames. ], batch size: 25, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:31:46,042 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.14 vs. limit=15.0 +2024-07-28 23:31:47,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=213445.33333333334, ans=0.05 +2024-07-28 23:31:59,733 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.135e+01 5.547e+01 6.307e+01 7.303e+01 1.020e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 23:32:14,558 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.54 vs. limit=10.0 +2024-07-28 23:33:12,646 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=7.00 vs. limit=12.0 +2024-07-28 23:33:52,055 INFO [train.py:1114] (2/4) Epoch 16, batch 6800, loss[loss=0.1836, simple_loss=0.29, pruned_loss=0.03856, over 4637.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2689, pruned_loss=0.04442, over 938843.14 frames. ], batch size: 13, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:33:59,218 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.01 vs. limit=12.0 +2024-07-28 23:34:01,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=213525.33333333334, ans=0.2 +2024-07-28 23:34:02,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=213525.33333333334, ans=0.125 +2024-07-28 23:34:05,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=213525.33333333334, ans=0.09899494936611666 +2024-07-28 23:34:08,426 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.25 vs. limit=15.0 +2024-07-28 23:34:27,996 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:34:31,104 INFO [train.py:1114] (2/4) Epoch 16, batch 6850, loss[loss=0.1565, simple_loss=0.2583, pruned_loss=0.02732, over 4688.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2685, pruned_loss=0.04415, over 940365.84 frames. ], batch size: 13, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:34:40,949 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.265e+01 5.896e+01 6.305e+01 7.215e+01 1.193e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 23:34:41,513 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.42 vs. limit=15.0 +2024-07-28 23:34:58,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=213632.0, ans=0.125 +2024-07-28 23:35:04,778 INFO [train.py:1114] (2/4) Epoch 16, batch 6900, loss[loss=0.2184, simple_loss=0.3035, pruned_loss=0.06667, over 4970.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2688, pruned_loss=0.04415, over 942374.81 frames. ], batch size: 13, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:35:09,497 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.08 vs. limit=6.0 +2024-07-28 23:35:15,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213658.66666666666, ans=0.1 +2024-07-28 23:35:17,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=213658.66666666666, ans=0.0 +2024-07-28 23:35:22,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=213672.0, ans=0.0 +2024-07-28 23:35:25,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=213672.0, ans=0.0 +2024-07-28 23:35:30,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=213685.33333333334, ans=0.05 +2024-07-28 23:35:39,959 INFO [train.py:1114] (2/4) Epoch 16, batch 6950, loss[loss=0.1577, simple_loss=0.2462, pruned_loss=0.0346, over 4508.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2692, pruned_loss=0.04405, over 939751.12 frames. ], batch size: 10, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:35:50,013 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.539e+01 5.740e+01 6.194e+01 7.107e+01 9.358e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 23:35:54,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=213738.66666666666, ans=0.0 +2024-07-28 23:35:58,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=213738.66666666666, ans=0.125 +2024-07-28 23:36:10,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=213765.33333333334, ans=0.2 +2024-07-28 23:36:14,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=213765.33333333334, ans=0.125 +2024-07-28 23:36:14,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=213765.33333333334, ans=0.125 +2024-07-28 23:36:17,420 INFO [train.py:1114] (2/4) Epoch 16, batch 7000, loss[loss=0.1833, simple_loss=0.2849, pruned_loss=0.04086, over 4663.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2687, pruned_loss=0.04376, over 938330.96 frames. ], batch size: 17, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:36:23,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=213792.0, ans=0.125 +2024-07-28 23:36:29,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=213792.0, ans=0.0 +2024-07-28 23:36:30,204 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.64 vs. limit=15.0 +2024-07-28 23:36:32,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=213805.33333333334, ans=0.025 +2024-07-28 23:36:32,650 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.52 vs. limit=15.0 +2024-07-28 23:36:42,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=213818.66666666666, ans=0.0 +2024-07-28 23:36:42,776 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.51 vs. limit=15.0 +2024-07-28 23:36:43,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=213832.0, ans=6.0 +2024-07-28 23:36:44,057 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.45 vs. limit=6.0 +2024-07-28 23:36:44,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=213832.0, ans=0.125 +2024-07-28 23:36:44,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=213832.0, ans=0.125 +2024-07-28 23:36:50,055 INFO [train.py:1114] (2/4) Epoch 16, batch 7050, loss[loss=0.1973, simple_loss=0.2913, pruned_loss=0.05163, over 4647.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2688, pruned_loss=0.04358, over 941663.30 frames. ], batch size: 19, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:36:50,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=213845.33333333334, ans=0.125 +2024-07-28 23:36:52,311 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=213845.33333333334, ans=0.125 +2024-07-28 23:37:00,893 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.296e+01 5.658e+01 6.254e+01 7.324e+01 1.123e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 23:37:07,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213872.0, ans=0.1 +2024-07-28 23:37:09,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=213872.0, ans=0.2 +2024-07-28 23:37:11,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=213885.33333333334, ans=0.125 +2024-07-28 23:37:16,866 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.42 vs. limit=15.0 +2024-07-28 23:37:24,466 INFO [train.py:1114] (2/4) Epoch 16, batch 7100, loss[loss=0.1748, simple_loss=0.2663, pruned_loss=0.04166, over 4789.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2685, pruned_loss=0.04389, over 936997.06 frames. ], batch size: 15, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:37:45,163 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.98 vs. limit=15.0 +2024-07-28 23:37:51,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=213952.0, ans=0.0 +2024-07-28 23:37:52,437 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.58 vs. limit=12.0 +2024-07-28 23:37:54,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=213965.33333333334, ans=0.2 +2024-07-28 23:37:58,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=213978.66666666666, ans=0.125 +2024-07-28 23:37:59,200 INFO [train.py:1114] (2/4) Epoch 16, batch 7150, loss[loss=0.1915, simple_loss=0.2879, pruned_loss=0.04756, over 4533.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2673, pruned_loss=0.04373, over 937776.15 frames. ], batch size: 21, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:37:59,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=213978.66666666666, ans=0.0 +2024-07-28 23:38:02,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213978.66666666666, ans=0.1 +2024-07-28 23:38:07,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=213992.0, ans=15.0 +2024-07-28 23:38:08,741 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.615e+01 6.266e+01 7.149e+01 9.915e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 23:38:16,959 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.40 vs. limit=15.0 +2024-07-28 23:38:18,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=214018.66666666666, ans=0.0 +2024-07-28 23:38:31,557 INFO [train.py:1114] (2/4) Epoch 16, batch 7200, loss[loss=0.2015, simple_loss=0.2938, pruned_loss=0.05465, over 4795.00 frames. ], tot_loss[loss=0.178, simple_loss=0.268, pruned_loss=0.04394, over 937956.41 frames. ], batch size: 15, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:38:41,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=214058.66666666666, ans=0.2 +2024-07-28 23:38:54,178 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.48 vs. limit=15.0 +2024-07-28 23:38:59,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=214098.66666666666, ans=0.2 +2024-07-28 23:39:01,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=214098.66666666666, ans=0.0 +2024-07-28 23:39:05,749 INFO [train.py:1114] (2/4) Epoch 16, batch 7250, loss[loss=0.1566, simple_loss=0.2404, pruned_loss=0.03638, over 4849.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2668, pruned_loss=0.04362, over 940042.12 frames. ], batch size: 12, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:39:09,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=214112.0, ans=0.125 +2024-07-28 23:39:15,405 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.553e+01 6.069e+01 6.578e+01 8.706e+01, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 23:39:39,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=214138.66666666666, ans=0.0 +2024-07-28 23:39:56,161 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=214178.66666666666, ans=0.125 +2024-07-28 23:39:56,633 INFO [train.py:1114] (2/4) Epoch 16, batch 7300, loss[loss=0.1753, simple_loss=0.2535, pruned_loss=0.04857, over 4861.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2657, pruned_loss=0.04315, over 939892.06 frames. ], batch size: 12, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:40:26,455 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.14 vs. limit=15.0 +2024-07-28 23:40:29,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=214232.0, ans=0.0 +2024-07-28 23:40:30,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=214245.33333333334, ans=0.125 +2024-07-28 23:40:30,740 INFO [train.py:1114] (2/4) Epoch 16, batch 7350, loss[loss=0.167, simple_loss=0.2513, pruned_loss=0.04136, over 4648.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2663, pruned_loss=0.04329, over 938963.10 frames. ], batch size: 12, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:40:33,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=214245.33333333334, ans=0.0 +2024-07-28 23:40:33,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=214245.33333333334, ans=0.07 +2024-07-28 23:40:41,009 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.596e+01 5.572e+01 6.176e+01 6.846e+01 9.880e+01, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 23:40:51,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=214285.33333333334, ans=0.125 +2024-07-28 23:40:53,064 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:40:56,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.77 vs. limit=10.0 +2024-07-28 23:41:08,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=214298.66666666666, ans=0.0 +2024-07-28 23:41:09,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=214298.66666666666, ans=0.1 +2024-07-28 23:41:10,932 INFO [train.py:1114] (2/4) Epoch 16, batch 7400, loss[loss=0.1837, simple_loss=0.2855, pruned_loss=0.04095, over 4687.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2665, pruned_loss=0.0432, over 939943.30 frames. ], batch size: 13, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:41:13,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=214312.0, ans=0.0 +2024-07-28 23:41:15,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=214312.0, ans=0.125 +2024-07-28 23:41:35,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=214352.0, ans=0.125 +2024-07-28 23:41:40,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=214352.0, ans=0.0 +2024-07-28 23:41:43,113 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:41:50,709 INFO [train.py:1114] (2/4) Epoch 16, batch 7450, loss[loss=0.1786, simple_loss=0.2668, pruned_loss=0.04523, over 4608.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2661, pruned_loss=0.04355, over 937069.63 frames. ], batch size: 11, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:41:57,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=214378.66666666666, ans=0.125 +2024-07-28 23:42:01,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=214392.0, ans=0.125 +2024-07-28 23:42:02,402 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+01 5.840e+01 6.491e+01 7.591e+01 1.266e+02, threshold=1.298e+02, percent-clipped=1.0 +2024-07-28 23:42:02,869 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.83 vs. limit=10.0 +2024-07-28 23:42:25,750 INFO [train.py:1114] (2/4) Epoch 16, batch 7500, loss[loss=0.2082, simple_loss=0.2888, pruned_loss=0.06379, over 3347.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2672, pruned_loss=0.04383, over 935300.40 frames. ], batch size: 35, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:42:27,982 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.26 vs. limit=22.5 +2024-07-28 23:50:18,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=214445.33333333334, ans=0.0 +2024-07-28 23:50:32,035 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.30 vs. limit=15.0 +2024-07-28 23:50:36,203 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.61 vs. limit=12.0 +2024-07-28 23:50:42,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=214485.33333333334, ans=0.125 +2024-07-28 23:50:43,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=214485.33333333334, ans=0.125 +2024-07-28 23:50:46,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=214498.66666666666, ans=0.2 +2024-07-28 23:50:47,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=214498.66666666666, ans=0.95 +2024-07-28 23:50:53,146 INFO [train.py:1114] (2/4) Epoch 16, batch 7550, loss[loss=0.1904, simple_loss=0.2842, pruned_loss=0.04827, over 4596.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2684, pruned_loss=0.04427, over 935251.22 frames. ], batch size: 17, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:50:57,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=214512.0, ans=0.0 +2024-07-28 23:51:02,700 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.06 vs. limit=8.0 +2024-07-28 23:51:02,808 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.731e+01 5.493e+01 6.004e+01 6.763e+01 8.407e+01, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 23:51:18,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214565.33333333334, ans=0.1 +2024-07-28 23:51:19,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=214565.33333333334, ans=0.2 +2024-07-28 23:51:20,161 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.50 vs. limit=15.0 +2024-07-28 23:51:23,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=214565.33333333334, ans=0.125 +2024-07-28 23:51:25,725 INFO [train.py:1114] (2/4) Epoch 16, batch 7600, loss[loss=0.2146, simple_loss=0.3019, pruned_loss=0.0637, over 4808.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2683, pruned_loss=0.04411, over 937359.09 frames. ], batch size: 14, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:51:27,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=214578.66666666666, ans=0.2 +2024-07-28 23:51:33,014 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:51:41,096 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:51:44,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=214605.33333333334, ans=0.025 +2024-07-28 23:51:48,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=214618.66666666666, ans=0.125 +2024-07-28 23:52:20,202 INFO [train.py:1114] (2/4) Epoch 16, batch 7650, loss[loss=0.1898, simple_loss=0.2811, pruned_loss=0.04926, over 4952.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2687, pruned_loss=0.0444, over 936716.44 frames. ], batch size: 12, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:52:27,936 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.36 vs. limit=15.0 +2024-07-28 23:52:29,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=214658.66666666666, ans=0.0 +2024-07-28 23:52:30,302 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.652e+01 6.093e+01 6.907e+01 1.144e+02, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 23:52:36,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=214672.0, ans=0.125 +2024-07-28 23:52:37,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=214672.0, ans=0.125 +2024-07-28 23:52:39,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=214685.33333333334, ans=0.025 +2024-07-28 23:52:45,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=214685.33333333334, ans=0.0 +2024-07-28 23:52:49,996 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.23 vs. limit=12.0 +2024-07-28 23:52:53,659 INFO [train.py:1114] (2/4) Epoch 16, batch 7700, loss[loss=0.1801, simple_loss=0.279, pruned_loss=0.04061, over 4694.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2694, pruned_loss=0.04462, over 934719.97 frames. ], batch size: 13, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:52:53,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=214712.0, ans=0.0 +2024-07-28 23:53:01,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=214725.33333333334, ans=0.125 +2024-07-28 23:53:05,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=214725.33333333334, ans=0.125 +2024-07-28 23:53:08,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=214738.66666666666, ans=0.0 +2024-07-28 23:53:10,185 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.81 vs. limit=15.0 +2024-07-28 23:53:14,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=214752.0, ans=0.125 +2024-07-28 23:53:17,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=214752.0, ans=10.0 +2024-07-28 23:53:17,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=214752.0, ans=0.0 +2024-07-28 23:53:25,969 INFO [train.py:1114] (2/4) Epoch 16, batch 7750, loss[loss=0.1873, simple_loss=0.2822, pruned_loss=0.04616, over 4929.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2702, pruned_loss=0.04474, over 935903.86 frames. ], batch size: 14, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:53:26,946 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.85 vs. limit=15.0 +2024-07-28 23:53:38,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=214778.66666666666, ans=0.125 +2024-07-28 23:53:39,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=214792.0, ans=0.1 +2024-07-28 23:53:45,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214792.0, ans=0.1 +2024-07-28 23:53:46,524 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.451e+01 5.576e+01 5.953e+01 6.432e+01 8.446e+01, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 23:53:49,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214805.33333333334, ans=0.1 +2024-07-28 23:53:49,066 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=214805.33333333334, ans=0.125 +2024-07-28 23:54:01,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=214818.66666666666, ans=0.125 +2024-07-28 23:54:02,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=214832.0, ans=0.125 +2024-07-28 23:54:03,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=214832.0, ans=0.2 +2024-07-28 23:54:06,399 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.10 vs. limit=12.0 +2024-07-28 23:54:09,316 INFO [train.py:1114] (2/4) Epoch 16, batch 7800, loss[loss=0.1723, simple_loss=0.2784, pruned_loss=0.03316, over 4663.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2698, pruned_loss=0.04437, over 937792.54 frames. ], batch size: 14, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:54:11,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=214845.33333333334, ans=10.0 +2024-07-28 23:54:19,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214858.66666666666, ans=0.1 +2024-07-28 23:54:36,275 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:54:39,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=214898.66666666666, ans=0.125 +2024-07-28 23:54:42,805 INFO [train.py:1114] (2/4) Epoch 16, batch 7850, loss[loss=0.1674, simple_loss=0.2483, pruned_loss=0.04327, over 4502.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2698, pruned_loss=0.04436, over 936310.59 frames. ], batch size: 10, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:54:51,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214925.33333333334, ans=0.1 +2024-07-28 23:54:52,704 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.575e+01 6.196e+01 7.184e+01 1.116e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 23:54:55,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=214925.33333333334, ans=0.125 +2024-07-28 23:55:27,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=214952.0, ans=0.0 +2024-07-28 23:55:35,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=214965.33333333334, ans=0.2 +2024-07-28 23:55:40,815 INFO [train.py:1114] (2/4) Epoch 16, batch 7900, loss[loss=0.1772, simple_loss=0.2846, pruned_loss=0.03488, over 4877.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2712, pruned_loss=0.04504, over 932932.90 frames. ], batch size: 14, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:55:56,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=214992.0, ans=0.2 +2024-07-28 23:56:05,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=215018.66666666666, ans=0.035 +2024-07-28 23:56:10,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=215032.0, ans=0.0 +2024-07-28 23:56:20,541 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:56:21,112 INFO [train.py:1114] (2/4) Epoch 16, batch 7950, loss[loss=0.27, simple_loss=0.3317, pruned_loss=0.1041, over 3150.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2706, pruned_loss=0.04469, over 934821.81 frames. ], batch size: 35, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:56:48,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=215058.66666666666, ans=0.025 +2024-07-28 23:56:52,128 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.700e+01 5.562e+01 6.109e+01 6.836e+01 1.076e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 23:56:58,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215072.0, ans=0.1 +2024-07-28 23:56:59,562 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.51 vs. limit=22.5 +2024-07-28 23:57:36,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=215098.66666666666, ans=0.125 +2024-07-28 23:57:39,461 INFO [train.py:1114] (2/4) Epoch 16, batch 8000, loss[loss=0.1719, simple_loss=0.2581, pruned_loss=0.04286, over 4616.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2697, pruned_loss=0.04448, over 934128.84 frames. ], batch size: 11, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:57:46,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=215125.33333333334, ans=0.125 +2024-07-28 23:57:49,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=215125.33333333334, ans=0.125 +2024-07-28 23:57:57,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=215138.66666666666, ans=0.05 +2024-07-28 23:58:04,208 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.30 vs. limit=22.5 +2024-07-28 23:58:13,509 INFO [train.py:1114] (2/4) Epoch 16, batch 8050, loss[loss=0.1778, simple_loss=0.2725, pruned_loss=0.04157, over 4806.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2699, pruned_loss=0.04447, over 933808.91 frames. ], batch size: 14, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:58:15,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=215178.66666666666, ans=0.0 +2024-07-28 23:58:24,552 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.751e+01 5.579e+01 6.307e+01 7.164e+01 1.118e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 23:58:48,136 INFO [train.py:1114] (2/4) Epoch 16, batch 8100, loss[loss=0.1968, simple_loss=0.2833, pruned_loss=0.05513, over 4792.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2713, pruned_loss=0.04496, over 933846.44 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:58:50,690 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:58:54,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=215258.66666666666, ans=0.0 +2024-07-28 23:59:05,830 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.81 vs. limit=15.0 +2024-07-28 23:59:21,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=215298.66666666666, ans=0.125 +2024-07-28 23:59:22,314 INFO [train.py:1114] (2/4) Epoch 16, batch 8150, loss[loss=0.2044, simple_loss=0.2942, pruned_loss=0.05723, over 4812.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2696, pruned_loss=0.04426, over 937311.42 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:59:26,622 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.22 vs. limit=22.5 +2024-07-28 23:59:27,120 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=215312.0, ans=0.05 +2024-07-28 23:59:32,150 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.628e+01 5.614e+01 6.330e+01 7.419e+01 1.009e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 23:59:32,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=215325.33333333334, ans=0.125 +2024-07-28 23:59:33,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215325.33333333334, ans=0.1 +2024-07-28 23:59:54,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=215378.66666666666, ans=0.5 +2024-07-28 23:59:54,925 INFO [train.py:1114] (2/4) Epoch 16, batch 8200, loss[loss=0.2297, simple_loss=0.3086, pruned_loss=0.07541, over 4822.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2699, pruned_loss=0.04442, over 938613.38 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:59:55,476 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.46 vs. limit=15.0 +2024-07-29 00:00:19,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=215432.0, ans=0.09899494936611666 +2024-07-29 00:00:25,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=215432.0, ans=0.125 +2024-07-29 00:00:30,608 INFO [train.py:1114] (2/4) Epoch 16, batch 8250, loss[loss=0.1844, simple_loss=0.2715, pruned_loss=0.04866, over 4893.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2703, pruned_loss=0.04441, over 938804.66 frames. ], batch size: 13, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:00:34,806 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:00:48,547 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.803e+01 5.664e+01 6.137e+01 6.796e+01 1.110e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 00:00:55,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=215472.0, ans=0.0 +2024-07-29 00:01:02,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=215485.33333333334, ans=0.125 +2024-07-29 00:01:04,501 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.37 vs. limit=12.0 +2024-07-29 00:01:04,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=215485.33333333334, ans=0.0 +2024-07-29 00:01:13,428 INFO [train.py:1114] (2/4) Epoch 16, batch 8300, loss[loss=0.1614, simple_loss=0.2538, pruned_loss=0.0345, over 4907.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2708, pruned_loss=0.04461, over 939018.53 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:01:13,749 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.33 vs. limit=15.0 +2024-07-29 00:01:15,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.59 vs. limit=10.0 +2024-07-29 00:01:52,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=215525.33333333334, ans=0.125 +2024-07-29 00:02:40,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=215538.66666666666, ans=0.125 +2024-07-29 00:02:41,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=215538.66666666666, ans=0.0 +2024-07-29 00:02:59,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=215538.66666666666, ans=0.2 +2024-07-29 00:02:59,893 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-29 00:03:08,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=215565.33333333334, ans=0.125 +2024-07-29 00:03:26,030 INFO [train.py:1114] (2/4) Epoch 16, batch 8350, loss[loss=0.1651, simple_loss=0.2533, pruned_loss=0.03843, over 4795.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2699, pruned_loss=0.04459, over 941735.88 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:03:27,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215578.66666666666, ans=0.1 +2024-07-29 00:03:38,684 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:03:39,857 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.677e+01 6.151e+01 6.738e+01 9.364e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 00:03:45,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.45 vs. limit=15.0 +2024-07-29 00:03:47,498 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=215605.33333333334, ans=0.07 +2024-07-29 00:03:48,957 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.97 vs. limit=22.5 +2024-07-29 00:03:57,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=215632.0, ans=0.025 +2024-07-29 00:04:00,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=215632.0, ans=0.0 +2024-07-29 00:04:04,314 INFO [train.py:1114] (2/4) Epoch 16, batch 8400, loss[loss=0.1615, simple_loss=0.2413, pruned_loss=0.04084, over 4785.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2699, pruned_loss=0.04469, over 940594.09 frames. ], batch size: 12, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:04:05,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=215645.33333333334, ans=0.125 +2024-07-29 00:04:15,795 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.88 vs. limit=15.0 +2024-07-29 00:04:19,699 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.87 vs. limit=6.0 +2024-07-29 00:04:24,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215685.33333333334, ans=0.1 +2024-07-29 00:04:27,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=215685.33333333334, ans=0.2 +2024-07-29 00:04:29,635 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.11 vs. limit=22.5 +2024-07-29 00:04:32,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=215698.66666666666, ans=0.125 +2024-07-29 00:04:32,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=215698.66666666666, ans=0.125 +2024-07-29 00:04:34,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=215698.66666666666, ans=0.2 +2024-07-29 00:04:36,884 INFO [train.py:1114] (2/4) Epoch 16, batch 8450, loss[loss=0.2084, simple_loss=0.2991, pruned_loss=0.05882, over 4810.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2706, pruned_loss=0.0446, over 939289.70 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:04:46,409 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.807e+01 5.964e+01 6.863e+01 7.657e+01 1.232e+02, threshold=1.373e+02, percent-clipped=1.0 +2024-07-29 00:04:57,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=215752.0, ans=0.0 +2024-07-29 00:05:00,005 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.94 vs. limit=15.0 +2024-07-29 00:05:09,547 INFO [train.py:1114] (2/4) Epoch 16, batch 8500, loss[loss=0.1529, simple_loss=0.2366, pruned_loss=0.03455, over 4603.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2695, pruned_loss=0.04442, over 939223.55 frames. ], batch size: 11, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:05:17,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=215792.0, ans=0.125 +2024-07-29 00:05:18,157 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=215792.0, ans=0.0 +2024-07-29 00:05:25,484 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.19 vs. limit=15.0 +2024-07-29 00:05:29,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=215818.66666666666, ans=0.125 +2024-07-29 00:05:38,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=215832.0, ans=0.125 +2024-07-29 00:05:40,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=215832.0, ans=0.125 +2024-07-29 00:05:41,924 INFO [train.py:1114] (2/4) Epoch 16, batch 8550, loss[loss=0.1806, simple_loss=0.2585, pruned_loss=0.05137, over 4784.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2691, pruned_loss=0.04454, over 940241.23 frames. ], batch size: 11, lr: 4.61e-03, grad_scale: 64.0 +2024-07-29 00:05:47,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=215845.33333333334, ans=0.2 +2024-07-29 00:05:48,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=215858.66666666666, ans=0.0 +2024-07-29 00:05:52,589 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.843e+01 6.495e+01 7.573e+01 1.241e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-29 00:05:57,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=215872.0, ans=0.0 +2024-07-29 00:06:01,333 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.29 vs. limit=15.0 +2024-07-29 00:06:01,390 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.62 vs. limit=15.0 +2024-07-29 00:06:03,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=215885.33333333334, ans=0.025 +2024-07-29 00:06:09,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=215898.66666666666, ans=10.0 +2024-07-29 00:06:16,110 INFO [train.py:1114] (2/4) Epoch 16, batch 8600, loss[loss=0.2027, simple_loss=0.3037, pruned_loss=0.05081, over 4794.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2693, pruned_loss=0.04459, over 939939.09 frames. ], batch size: 15, lr: 4.61e-03, grad_scale: 64.0 +2024-07-29 00:06:23,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=215912.0, ans=0.0 +2024-07-29 00:06:30,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=215925.33333333334, ans=0.125 +2024-07-29 00:06:32,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=215925.33333333334, ans=0.0 +2024-07-29 00:06:37,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=215938.66666666666, ans=0.025 +2024-07-29 00:06:42,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=215938.66666666666, ans=0.025 +2024-07-29 00:06:45,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=215952.0, ans=0.125 +2024-07-29 00:06:46,948 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:06:53,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=215965.33333333334, ans=0.09899494936611666 +2024-07-29 00:06:55,354 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:06:57,241 INFO [train.py:1114] (2/4) Epoch 16, batch 8650, loss[loss=0.194, simple_loss=0.2842, pruned_loss=0.0519, over 4890.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2692, pruned_loss=0.04487, over 940810.54 frames. ], batch size: 15, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:07:08,158 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.461e+01 5.650e+01 6.263e+01 7.133e+01 1.178e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 00:07:08,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215992.0, ans=0.1 +2024-07-29 00:07:18,770 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=216018.66666666666, ans=0.07 +2024-07-29 00:07:18,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=216018.66666666666, ans=0.0 +2024-07-29 00:07:19,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216018.66666666666, ans=0.1 +2024-07-29 00:07:22,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=216018.66666666666, ans=15.0 +2024-07-29 00:07:30,199 INFO [train.py:1114] (2/4) Epoch 16, batch 8700, loss[loss=0.1871, simple_loss=0.2855, pruned_loss=0.04432, over 4756.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2705, pruned_loss=0.04517, over 938167.82 frames. ], batch size: 13, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:07:31,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=216045.33333333334, ans=0.125 +2024-07-29 00:07:40,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=216058.66666666666, ans=0.125 +2024-07-29 00:07:42,019 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.19 vs. limit=15.0 +2024-07-29 00:07:51,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=216085.33333333334, ans=0.125 +2024-07-29 00:08:06,546 INFO [train.py:1114] (2/4) Epoch 16, batch 8750, loss[loss=0.216, simple_loss=0.3, pruned_loss=0.06603, over 4681.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2694, pruned_loss=0.04491, over 936503.09 frames. ], batch size: 15, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:08:16,792 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.699e+01 5.631e+01 6.456e+01 7.086e+01 1.065e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-29 00:08:17,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=216125.33333333334, ans=0.125 +2024-07-29 00:08:35,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=216165.33333333334, ans=0.125 +2024-07-29 00:08:37,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=216165.33333333334, ans=0.0 +2024-07-29 00:08:41,395 INFO [train.py:1114] (2/4) Epoch 16, batch 8800, loss[loss=0.2091, simple_loss=0.2977, pruned_loss=0.06026, over 4933.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2697, pruned_loss=0.045, over 937588.09 frames. ], batch size: 14, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:08:42,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=216178.66666666666, ans=0.0 +2024-07-29 00:08:46,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=216178.66666666666, ans=0.125 +2024-07-29 00:08:54,382 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:08:59,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=216205.33333333334, ans=0.1 +2024-07-29 00:09:03,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=216218.66666666666, ans=0.125 +2024-07-29 00:09:14,358 INFO [train.py:1114] (2/4) Epoch 16, batch 8850, loss[loss=0.1853, simple_loss=0.2765, pruned_loss=0.04699, over 4616.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2695, pruned_loss=0.04551, over 932736.72 frames. ], batch size: 22, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:09:15,331 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.05 vs. limit=6.0 +2024-07-29 00:09:25,920 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.646e+01 5.534e+01 6.492e+01 7.361e+01 1.003e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-29 00:09:26,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=216258.66666666666, ans=0.125 +2024-07-29 00:09:26,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=216258.66666666666, ans=0.125 +2024-07-29 00:09:29,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216272.0, ans=0.1 +2024-07-29 00:09:32,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216272.0, ans=0.1 +2024-07-29 00:09:32,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=216272.0, ans=0.0 +2024-07-29 00:09:35,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=216285.33333333334, ans=0.0 +2024-07-29 00:09:39,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=216285.33333333334, ans=0.2 +2024-07-29 00:09:46,081 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.37 vs. limit=15.0 +2024-07-29 00:09:48,095 INFO [train.py:1114] (2/4) Epoch 16, batch 8900, loss[loss=0.1658, simple_loss=0.2639, pruned_loss=0.03389, over 4923.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2695, pruned_loss=0.0457, over 930270.66 frames. ], batch size: 12, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:09:48,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=216312.0, ans=0.1 +2024-07-29 00:09:51,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=216312.0, ans=0.125 +2024-07-29 00:09:52,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=216312.0, ans=0.2 +2024-07-29 00:09:55,590 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=216325.33333333334, ans=15.0 +2024-07-29 00:09:57,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=216325.33333333334, ans=0.1 +2024-07-29 00:09:57,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=216325.33333333334, ans=0.125 +2024-07-29 00:09:57,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=216325.33333333334, ans=10.0 +2024-07-29 00:10:06,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=216338.66666666666, ans=0.0 +2024-07-29 00:10:06,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=216352.0, ans=0.025 +2024-07-29 00:10:10,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=216352.0, ans=0.07 +2024-07-29 00:10:10,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216352.0, ans=0.1 +2024-07-29 00:10:13,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=216352.0, ans=0.1 +2024-07-29 00:10:18,327 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.63 vs. limit=15.0 +2024-07-29 00:10:21,262 INFO [train.py:1114] (2/4) Epoch 16, batch 8950, loss[loss=0.1852, simple_loss=0.273, pruned_loss=0.04872, over 4503.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2704, pruned_loss=0.0459, over 931318.76 frames. ], batch size: 21, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:10:22,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=216378.66666666666, ans=0.2 +2024-07-29 00:10:31,360 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.323e+01 5.469e+01 6.023e+01 7.554e+01 1.113e+02, threshold=1.205e+02, percent-clipped=0.0 +2024-07-29 00:10:31,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=216392.0, ans=0.125 +2024-07-29 00:10:32,936 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.40 vs. limit=15.0 +2024-07-29 00:10:36,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=216405.33333333334, ans=0.0 +2024-07-29 00:10:40,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=216418.66666666666, ans=0.125 +2024-07-29 00:10:43,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=216418.66666666666, ans=0.2 +2024-07-29 00:10:53,215 INFO [train.py:1114] (2/4) Epoch 16, batch 9000, loss[loss=0.1433, simple_loss=0.2325, pruned_loss=0.027, over 4646.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2683, pruned_loss=0.0451, over 934231.33 frames. ], batch size: 12, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:10:53,215 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 00:11:09,713 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.1365, 3.5010, 3.9514, 3.9941], device='cuda:2') +2024-07-29 00:11:09,936 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([3.4272, 2.2522, 2.8879, 3.0840, 2.9959, 2.7718, 3.1065, 2.1812], + device='cuda:2') +2024-07-29 00:11:10,918 INFO [train.py:1146] (2/4) Epoch 16, validation: loss=0.1631, simple_loss=0.2656, pruned_loss=0.03028, over 944034.00 frames. +2024-07-29 00:11:10,918 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 00:11:18,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=216458.66666666666, ans=0.125 +2024-07-29 00:11:18,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=216458.66666666666, ans=0.0 +2024-07-29 00:11:23,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=216472.0, ans=0.2 +2024-07-29 00:11:27,246 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.49 vs. limit=6.0 +2024-07-29 00:11:29,154 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.48 vs. limit=15.0 +2024-07-29 00:11:39,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=216498.66666666666, ans=0.2 +2024-07-29 00:11:43,595 INFO [train.py:1114] (2/4) Epoch 16, batch 9050, loss[loss=0.1577, simple_loss=0.2353, pruned_loss=0.04003, over 4473.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2671, pruned_loss=0.04438, over 935208.29 frames. ], batch size: 10, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:11:52,337 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.11 vs. limit=15.0 +2024-07-29 00:11:53,092 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.80 vs. limit=6.0 +2024-07-29 00:11:54,114 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.634e+01 5.894e+01 6.647e+01 7.904e+01 1.086e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-29 00:12:12,581 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.73 vs. limit=15.0 +2024-07-29 00:12:17,313 INFO [train.py:1114] (2/4) Epoch 16, batch 9100, loss[loss=0.1793, simple_loss=0.2705, pruned_loss=0.04409, over 4932.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2677, pruned_loss=0.04442, over 937484.69 frames. ], batch size: 14, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:12:21,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=216578.66666666666, ans=0.125 +2024-07-29 00:12:32,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=216605.33333333334, ans=0.025 +2024-07-29 00:12:42,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=216618.66666666666, ans=0.0 +2024-07-29 00:12:51,532 INFO [train.py:1114] (2/4) Epoch 16, batch 9150, loss[loss=0.1716, simple_loss=0.2651, pruned_loss=0.039, over 4806.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.269, pruned_loss=0.04489, over 936037.38 frames. ], batch size: 14, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:12:57,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=216658.66666666666, ans=0.015 +2024-07-29 00:12:58,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=216658.66666666666, ans=0.0 +2024-07-29 00:13:01,752 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.263e+01 5.764e+01 6.440e+01 7.377e+01 1.090e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 00:13:07,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216672.0, ans=0.1 +2024-07-29 00:13:08,511 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.47 vs. limit=22.5 +2024-07-29 00:13:17,112 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:13:23,166 INFO [train.py:1114] (2/4) Epoch 16, batch 9200, loss[loss=0.1617, simple_loss=0.2484, pruned_loss=0.0375, over 4861.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2676, pruned_loss=0.04437, over 937809.24 frames. ], batch size: 12, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:13:26,633 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:13:27,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=216712.0, ans=0.125 +2024-07-29 00:13:30,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=216725.33333333334, ans=0.2 +2024-07-29 00:13:31,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=216725.33333333334, ans=0.125 +2024-07-29 00:13:34,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=216725.33333333334, ans=0.0 +2024-07-29 00:13:43,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=216738.66666666666, ans=0.2 +2024-07-29 00:13:52,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216752.0, ans=0.1 +2024-07-29 00:13:58,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=216765.33333333334, ans=0.125 +2024-07-29 00:13:59,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=14.48 vs. limit=15.0 +2024-07-29 00:14:01,550 INFO [train.py:1114] (2/4) Epoch 16, batch 9250, loss[loss=0.1944, simple_loss=0.2899, pruned_loss=0.04949, over 4640.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2682, pruned_loss=0.0446, over 938236.37 frames. ], batch size: 13, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:14:04,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=216778.66666666666, ans=0.0 +2024-07-29 00:14:04,971 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.65 vs. limit=15.0 +2024-07-29 00:14:07,820 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:14:11,597 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.669e+01 5.549e+01 6.033e+01 6.747e+01 9.644e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-29 00:14:13,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216805.33333333334, ans=0.1 +2024-07-29 00:14:16,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=216805.33333333334, ans=0.0 +2024-07-29 00:14:16,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=216805.33333333334, ans=0.125 +2024-07-29 00:14:22,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=216818.66666666666, ans=0.0 +2024-07-29 00:14:32,972 INFO [train.py:1114] (2/4) Epoch 16, batch 9300, loss[loss=0.157, simple_loss=0.2428, pruned_loss=0.03564, over 4770.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.268, pruned_loss=0.04423, over 938262.41 frames. ], batch size: 12, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:14:37,053 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.50 vs. limit=22.5 +2024-07-29 00:14:43,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=216858.66666666666, ans=0.07 +2024-07-29 00:15:04,406 INFO [train.py:1114] (2/4) Epoch 16, batch 9350, loss[loss=0.1524, simple_loss=0.2299, pruned_loss=0.03747, over 4815.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2683, pruned_loss=0.04423, over 935491.75 frames. ], batch size: 11, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:15:08,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=216912.0, ans=0.125 +2024-07-29 00:15:12,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=216925.33333333334, ans=0.125 +2024-07-29 00:15:14,956 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.736e+01 6.318e+01 7.656e+01 1.489e+02, threshold=1.264e+02, percent-clipped=1.0 +2024-07-29 00:15:17,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=216938.66666666666, ans=0.0 +2024-07-29 00:15:23,199 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=216952.0, ans=0.0 +2024-07-29 00:15:24,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=216952.0, ans=0.125 +2024-07-29 00:15:27,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216952.0, ans=0.1 +2024-07-29 00:15:28,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=216952.0, ans=0.125 +2024-07-29 00:15:35,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=216978.66666666666, ans=0.04949747468305833 +2024-07-29 00:15:35,903 INFO [train.py:1114] (2/4) Epoch 16, batch 9400, loss[loss=0.1785, simple_loss=0.2712, pruned_loss=0.04289, over 4694.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2688, pruned_loss=0.04468, over 933281.54 frames. ], batch size: 13, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:15:44,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=216992.0, ans=0.125 +2024-07-29 00:15:53,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=217005.33333333334, ans=0.2 +2024-07-29 00:15:57,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=217018.66666666666, ans=0.125 +2024-07-29 00:15:58,722 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.09 vs. limit=12.0 +2024-07-29 00:16:08,012 INFO [train.py:1114] (2/4) Epoch 16, batch 9450, loss[loss=0.1473, simple_loss=0.2229, pruned_loss=0.03584, over 4792.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2688, pruned_loss=0.04451, over 932387.08 frames. ], batch size: 11, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:16:18,006 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.72 vs. limit=22.5 +2024-07-29 00:16:25,651 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.291e+01 5.921e+01 6.735e+01 1.029e+02, threshold=1.184e+02, percent-clipped=0.0 +2024-07-29 00:16:26,751 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.51 vs. limit=10.0 +2024-07-29 00:16:30,451 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.98 vs. limit=22.5 +2024-07-29 00:16:30,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217072.0, ans=0.1 +2024-07-29 00:16:30,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=217072.0, ans=0.125 +2024-07-29 00:16:35,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=217085.33333333334, ans=0.2 +2024-07-29 00:16:35,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=217085.33333333334, ans=0.125 +2024-07-29 00:16:40,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=217098.66666666666, ans=0.0 +2024-07-29 00:16:46,862 INFO [train.py:1114] (2/4) Epoch 16, batch 9500, loss[loss=0.1788, simple_loss=0.2711, pruned_loss=0.04325, over 4718.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2683, pruned_loss=0.04406, over 934540.91 frames. ], batch size: 12, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:16:49,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=217112.0, ans=0.07 +2024-07-29 00:16:51,753 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:16:55,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=217125.33333333334, ans=0.0 +2024-07-29 00:16:57,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=217125.33333333334, ans=0.2 +2024-07-29 00:17:03,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=217138.66666666666, ans=0.125 +2024-07-29 00:17:17,895 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.22 vs. limit=15.0 +2024-07-29 00:17:18,852 INFO [train.py:1114] (2/4) Epoch 16, batch 9550, loss[loss=0.1558, simple_loss=0.2417, pruned_loss=0.03496, over 4777.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2689, pruned_loss=0.04424, over 932110.79 frames. ], batch size: 12, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:17:28,559 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.794e+01 5.662e+01 6.269e+01 6.816e+01 8.303e+01, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 00:17:29,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217192.0, ans=0.1 +2024-07-29 00:17:31,455 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=15.0 +2024-07-29 00:17:31,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=217205.33333333334, ans=0.2 +2024-07-29 00:17:31,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=217205.33333333334, ans=0.0 +2024-07-29 00:17:32,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=217205.33333333334, ans=0.0 +2024-07-29 00:17:48,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=217232.0, ans=0.125 +2024-07-29 00:17:50,236 INFO [train.py:1114] (2/4) Epoch 16, batch 9600, loss[loss=0.2338, simple_loss=0.3098, pruned_loss=0.07893, over 3357.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2697, pruned_loss=0.04442, over 931069.71 frames. ], batch size: 36, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:17:53,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=217245.33333333334, ans=10.0 +2024-07-29 00:17:55,959 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.96 vs. limit=6.0 +2024-07-29 00:17:58,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=217258.66666666666, ans=0.1 +2024-07-29 00:18:02,058 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.44 vs. limit=15.0 +2024-07-29 00:18:06,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=217272.0, ans=0.0 +2024-07-29 00:18:13,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=217285.33333333334, ans=0.125 +2024-07-29 00:18:22,131 INFO [train.py:1114] (2/4) Epoch 16, batch 9650, loss[loss=0.1732, simple_loss=0.275, pruned_loss=0.03571, over 4845.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2692, pruned_loss=0.04409, over 926956.65 frames. ], batch size: 16, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:18:28,038 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.80 vs. limit=15.0 +2024-07-29 00:18:32,295 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.780e+01 5.687e+01 6.553e+01 7.550e+01 1.146e+02, threshold=1.311e+02, percent-clipped=0.0 +2024-07-29 00:18:35,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=217338.66666666666, ans=0.04949747468305833 +2024-07-29 00:18:53,901 INFO [train.py:1114] (2/4) Epoch 16, batch 9700, loss[loss=0.2102, simple_loss=0.2984, pruned_loss=0.06098, over 4175.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2687, pruned_loss=0.04419, over 924938.08 frames. ], batch size: 25, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:18:58,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.27 vs. limit=12.0 +2024-07-29 00:19:09,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=217405.33333333334, ans=0.125 +2024-07-29 00:19:09,243 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.58 vs. limit=15.0 +2024-07-29 00:19:13,659 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.21 vs. limit=22.5 +2024-07-29 00:19:14,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=217418.66666666666, ans=0.125 +2024-07-29 00:19:17,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=217418.66666666666, ans=0.0 +2024-07-29 00:19:21,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=217432.0, ans=0.125 +2024-07-29 00:19:22,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=217432.0, ans=0.125 +2024-07-29 00:19:24,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=217432.0, ans=0.05 +2024-07-29 00:19:25,711 INFO [train.py:1114] (2/4) Epoch 16, batch 9750, loss[loss=0.1548, simple_loss=0.253, pruned_loss=0.02826, over 4693.00 frames. ], tot_loss[loss=0.178, simple_loss=0.268, pruned_loss=0.04399, over 925223.43 frames. ], batch size: 15, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:19:25,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=217445.33333333334, ans=0.05 +2024-07-29 00:19:30,011 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.30 vs. limit=15.0 +2024-07-29 00:19:30,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217445.33333333334, ans=0.1 +2024-07-29 00:19:36,074 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.471e+01 5.634e+01 6.378e+01 7.099e+01 1.078e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 00:19:36,417 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.54 vs. limit=22.5 +2024-07-29 00:19:37,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=217458.66666666666, ans=0.09899494936611666 +2024-07-29 00:19:45,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=217485.33333333334, ans=0.0 +2024-07-29 00:19:57,522 INFO [train.py:1114] (2/4) Epoch 16, batch 9800, loss[loss=0.1575, simple_loss=0.2576, pruned_loss=0.0287, over 4705.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2673, pruned_loss=0.04409, over 925352.04 frames. ], batch size: 12, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:19:58,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=217512.0, ans=0.05 +2024-07-29 00:20:14,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=217525.33333333334, ans=0.0 +2024-07-29 00:20:18,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=217538.66666666666, ans=0.125 +2024-07-29 00:20:19,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=217538.66666666666, ans=0.125 +2024-07-29 00:20:20,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=217538.66666666666, ans=0.0 +2024-07-29 00:20:30,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=217565.33333333334, ans=0.0 +2024-07-29 00:20:35,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217578.66666666666, ans=0.1 +2024-07-29 00:20:36,554 INFO [train.py:1114] (2/4) Epoch 16, batch 9850, loss[loss=0.1983, simple_loss=0.303, pruned_loss=0.04685, over 4894.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2684, pruned_loss=0.04438, over 927926.34 frames. ], batch size: 15, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:20:47,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=217592.0, ans=0.1 +2024-07-29 00:20:49,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=217592.0, ans=0.125 +2024-07-29 00:20:50,314 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.242e+01 5.769e+01 6.344e+01 7.479e+01 1.066e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-29 00:20:56,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=217605.33333333334, ans=0.05 +2024-07-29 00:20:56,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=217605.33333333334, ans=0.0 +2024-07-29 00:21:05,116 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.20 vs. limit=22.5 +2024-07-29 00:21:11,749 INFO [train.py:1114] (2/4) Epoch 16, batch 9900, loss[loss=0.2078, simple_loss=0.2973, pruned_loss=0.05919, over 4825.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2697, pruned_loss=0.04507, over 926933.55 frames. ], batch size: 16, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:21:12,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217645.33333333334, ans=0.1 +2024-07-29 00:21:21,018 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=15.0 +2024-07-29 00:21:22,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=217658.66666666666, ans=0.125 +2024-07-29 00:21:23,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=217672.0, ans=0.0 +2024-07-29 00:21:25,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=217672.0, ans=0.0 +2024-07-29 00:21:42,968 INFO [train.py:1114] (2/4) Epoch 16, batch 9950, loss[loss=0.1461, simple_loss=0.2418, pruned_loss=0.0252, over 4782.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2695, pruned_loss=0.04517, over 929661.58 frames. ], batch size: 11, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:21:48,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=217712.0, ans=0.0 +2024-07-29 00:21:56,426 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.282e+01 5.930e+01 6.462e+01 7.578e+01 1.307e+02, threshold=1.292e+02, percent-clipped=1.0 +2024-07-29 00:22:05,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=217752.0, ans=0.125 +2024-07-29 00:22:08,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=217752.0, ans=0.125 +2024-07-29 00:22:14,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=217765.33333333334, ans=0.0 +2024-07-29 00:22:17,190 INFO [train.py:1114] (2/4) Epoch 16, batch 10000, loss[loss=0.2263, simple_loss=0.3172, pruned_loss=0.06768, over 4648.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2732, pruned_loss=0.04669, over 926514.66 frames. ], batch size: 16, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:22:27,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=217792.0, ans=0.125 +2024-07-29 00:22:35,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=217818.66666666666, ans=0.125 +2024-07-29 00:22:36,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=217818.66666666666, ans=0.0 +2024-07-29 00:22:37,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=217818.66666666666, ans=0.0 +2024-07-29 00:22:48,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=217845.33333333334, ans=0.05 +2024-07-29 00:22:49,163 INFO [train.py:1114] (2/4) Epoch 16, batch 10050, loss[loss=0.2092, simple_loss=0.2944, pruned_loss=0.06198, over 3405.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2764, pruned_loss=0.04828, over 914530.38 frames. ], batch size: 35, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:22:54,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=217845.33333333334, ans=0.07 +2024-07-29 00:22:54,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=217845.33333333334, ans=0.125 +2024-07-29 00:22:56,560 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.73 vs. limit=15.0 +2024-07-29 00:23:01,095 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.821e+01 5.872e+01 6.658e+01 7.418e+01 1.272e+02, threshold=1.332e+02, percent-clipped=0.0 +2024-07-29 00:23:01,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=217858.66666666666, ans=0.125 +2024-07-29 00:23:23,204 INFO [train.py:1114] (2/4) Epoch 16, batch 10100, loss[loss=0.2194, simple_loss=0.2971, pruned_loss=0.07083, over 3375.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2808, pruned_loss=0.05248, over 862295.62 frames. ], batch size: 36, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:23:24,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=217912.0, ans=0.125 +2024-07-29 00:23:26,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.88 vs. limit=15.0 +2024-07-29 00:23:27,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=217912.0, ans=0.0 +2024-07-29 00:23:29,515 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.54 vs. limit=6.0 +2024-07-29 00:23:39,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=217938.66666666666, ans=0.125 +2024-07-29 00:23:40,265 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.53 vs. limit=12.0 +2024-07-29 00:23:45,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=217952.0, ans=0.04949747468305833 +2024-07-29 00:23:48,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=217965.33333333334, ans=0.125 +2024-07-29 00:23:49,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=217965.33333333334, ans=0.125 +2024-07-29 00:23:50,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff2.min_abs, batch_count=217965.33333333334, ans=0.1 +2024-07-29 00:23:56,121 INFO [train.py:1114] (2/4) Epoch 16, batch 10150, loss[loss=0.2252, simple_loss=0.2968, pruned_loss=0.07679, over 3373.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2843, pruned_loss=0.05617, over 821139.89 frames. ], batch size: 35, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:24:04,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_na.min_abs, batch_count=217992.0, ans=0.02 +2024-07-29 00:24:05,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=217992.0, ans=0.0 +2024-07-29 00:24:06,950 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.947e+01 6.782e+01 7.198e+01 7.904e+01 2.355e+02, threshold=1.440e+02, percent-clipped=1.0 +2024-07-29 00:24:10,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=218005.33333333334, ans=0.125 +2024-07-29 00:24:14,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=218018.66666666666, ans=0.125 +2024-07-29 00:24:15,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=218018.66666666666, ans=0.0 +2024-07-29 00:24:17,937 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.78 vs. limit=15.0 +2024-07-29 00:24:18,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=218018.66666666666, ans=0.2 +2024-07-29 00:24:27,767 INFO [train.py:1114] (2/4) Epoch 16, batch 10200, loss[loss=0.1911, simple_loss=0.2889, pruned_loss=0.04666, over 3610.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2865, pruned_loss=0.05889, over 789512.95 frames. ], batch size: 35, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:24:29,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=218045.33333333334, ans=0.125 +2024-07-29 00:24:30,023 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.11 vs. limit=15.0 +2024-07-29 00:24:30,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=218045.33333333334, ans=0.125 +2024-07-29 00:24:36,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=218058.66666666666, ans=0.125 +2024-07-29 00:25:24,910 INFO [train.py:1114] (2/4) Epoch 17, batch 0, loss[loss=0.139, simple_loss=0.2356, pruned_loss=0.02123, over 4854.00 frames. ], tot_loss[loss=0.139, simple_loss=0.2356, pruned_loss=0.02123, over 4854.00 frames. ], batch size: 12, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:25:24,911 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 00:25:30,279 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.9710, 3.5638, 3.5116, 3.7241], device='cuda:2') +2024-07-29 00:25:36,963 INFO [train.py:1146] (2/4) Epoch 17, validation: loss=0.1632, simple_loss=0.2676, pruned_loss=0.0294, over 944034.00 frames. +2024-07-29 00:25:36,964 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 00:25:43,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=218088.0, ans=0.125 +2024-07-29 00:25:50,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=218101.33333333334, ans=0.125 +2024-07-29 00:25:50,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=218101.33333333334, ans=0.125 +2024-07-29 00:25:52,862 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.81 vs. limit=15.0 +2024-07-29 00:26:23,658 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.877e+01 5.974e+01 6.557e+01 7.210e+01 8.434e+01, threshold=1.311e+02, percent-clipped=0.0 +2024-07-29 00:26:27,164 INFO [train.py:1114] (2/4) Epoch 17, batch 50, loss[loss=0.1452, simple_loss=0.2203, pruned_loss=0.035, over 4620.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2687, pruned_loss=0.04401, over 206366.48 frames. ], batch size: 11, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:26:31,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218141.33333333334, ans=0.1 +2024-07-29 00:26:33,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218154.66666666666, ans=0.1 +2024-07-29 00:26:39,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=218168.0, ans=0.0 +2024-07-29 00:26:42,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=218168.0, ans=0.0 +2024-07-29 00:26:48,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=218181.33333333334, ans=0.09899494936611666 +2024-07-29 00:26:53,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=218194.66666666666, ans=0.0 +2024-07-29 00:26:54,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=218194.66666666666, ans=0.125 +2024-07-29 00:27:00,753 INFO [train.py:1114] (2/4) Epoch 17, batch 100, loss[loss=0.1696, simple_loss=0.2537, pruned_loss=0.04276, over 4627.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2708, pruned_loss=0.04362, over 365289.94 frames. ], batch size: 12, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:27:01,139 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.06 vs. limit=15.0 +2024-07-29 00:27:01,554 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:27:05,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=218208.0, ans=0.125 +2024-07-29 00:27:26,977 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.98 vs. limit=22.5 +2024-07-29 00:27:29,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=218261.33333333334, ans=0.0 +2024-07-29 00:27:30,361 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.772e+01 6.593e+01 7.419e+01 9.701e+01, threshold=1.319e+02, percent-clipped=0.0 +2024-07-29 00:27:33,609 INFO [train.py:1114] (2/4) Epoch 17, batch 150, loss[loss=0.1394, simple_loss=0.2266, pruned_loss=0.02607, over 4608.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2667, pruned_loss=0.04183, over 494006.33 frames. ], batch size: 11, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:27:34,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=218274.66666666666, ans=0.0 +2024-07-29 00:27:45,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=218288.0, ans=0.125 +2024-07-29 00:27:55,351 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.90 vs. limit=22.5 +2024-07-29 00:27:58,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=218314.66666666666, ans=0.125 +2024-07-29 00:28:06,840 INFO [train.py:1114] (2/4) Epoch 17, batch 200, loss[loss=0.1681, simple_loss=0.2655, pruned_loss=0.0354, over 4439.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2666, pruned_loss=0.04228, over 593398.28 frames. ], batch size: 21, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:28:10,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218341.33333333334, ans=0.1 +2024-07-29 00:28:11,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=218341.33333333334, ans=0.125 +2024-07-29 00:28:12,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=218341.33333333334, ans=0.07 +2024-07-29 00:28:16,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=218354.66666666666, ans=0.125 +2024-07-29 00:28:34,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=218394.66666666666, ans=0.125 +2024-07-29 00:28:36,800 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.366e+01 5.659e+01 6.456e+01 7.215e+01 1.150e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-29 00:28:39,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=218394.66666666666, ans=0.2 +2024-07-29 00:28:39,996 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.57 vs. limit=12.0 +2024-07-29 00:28:40,259 INFO [train.py:1114] (2/4) Epoch 17, batch 250, loss[loss=0.2222, simple_loss=0.3093, pruned_loss=0.06756, over 4620.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2668, pruned_loss=0.04266, over 669886.88 frames. ], batch size: 16, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:29:01,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=218448.0, ans=0.125 +2024-07-29 00:29:15,281 INFO [train.py:1114] (2/4) Epoch 17, batch 300, loss[loss=0.1907, simple_loss=0.2854, pruned_loss=0.04806, over 4784.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2664, pruned_loss=0.04274, over 729391.39 frames. ], batch size: 15, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:29:22,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=218474.66666666666, ans=0.1 +2024-07-29 00:29:24,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=218488.0, ans=0.125 +2024-07-29 00:29:46,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=218528.0, ans=0.125 +2024-07-29 00:29:47,006 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+01 5.418e+01 5.933e+01 6.484e+01 8.977e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-29 00:29:50,306 INFO [train.py:1114] (2/4) Epoch 17, batch 350, loss[loss=0.2063, simple_loss=0.287, pruned_loss=0.06274, over 4933.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2682, pruned_loss=0.04334, over 776187.07 frames. ], batch size: 12, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:29:51,978 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:30:06,151 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.56 vs. limit=10.0 +2024-07-29 00:30:15,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=218581.33333333334, ans=0.125 +2024-07-29 00:30:19,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=218594.66666666666, ans=0.125 +2024-07-29 00:30:19,789 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=218594.66666666666, ans=0.95 +2024-07-29 00:30:19,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=218594.66666666666, ans=0.0 +2024-07-29 00:30:19,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=218594.66666666666, ans=0.2 +2024-07-29 00:30:23,687 INFO [train.py:1114] (2/4) Epoch 17, batch 400, loss[loss=0.1908, simple_loss=0.2864, pruned_loss=0.04764, over 4700.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2684, pruned_loss=0.04358, over 813786.16 frames. ], batch size: 13, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:30:37,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=218621.33333333334, ans=0.125 +2024-07-29 00:30:38,839 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.71 vs. limit=15.0 +2024-07-29 00:30:46,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=218648.0, ans=0.0 +2024-07-29 00:30:50,327 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.32 vs. limit=15.0 +2024-07-29 00:30:58,044 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.99 vs. limit=15.0 +2024-07-29 00:30:58,240 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.369e+01 5.560e+01 5.995e+01 6.560e+01 9.746e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-29 00:30:58,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=218661.33333333334, ans=0.0 +2024-07-29 00:31:01,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=218674.66666666666, ans=0.0 +2024-07-29 00:31:01,620 INFO [train.py:1114] (2/4) Epoch 17, batch 450, loss[loss=0.1915, simple_loss=0.2722, pruned_loss=0.05543, over 4636.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2688, pruned_loss=0.04396, over 839202.14 frames. ], batch size: 13, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:31:09,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=218688.0, ans=0.0 +2024-07-29 00:31:14,632 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.80 vs. limit=22.5 +2024-07-29 00:31:15,640 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:31:27,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=218728.0, ans=0.0 +2024-07-29 00:31:28,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=218728.0, ans=0.1 +2024-07-29 00:31:30,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=218728.0, ans=0.0 +2024-07-29 00:31:34,900 INFO [train.py:1114] (2/4) Epoch 17, batch 500, loss[loss=0.2075, simple_loss=0.2927, pruned_loss=0.06113, over 4677.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2683, pruned_loss=0.04369, over 861440.44 frames. ], batch size: 15, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:31:38,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=218741.33333333334, ans=0.05 +2024-07-29 00:31:38,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=218741.33333333334, ans=0.125 +2024-07-29 00:31:42,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=218741.33333333334, ans=0.125 +2024-07-29 00:31:59,355 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.03 vs. limit=15.0 +2024-07-29 00:32:02,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=218781.33333333334, ans=0.125 +2024-07-29 00:32:08,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=218794.66666666666, ans=0.025 +2024-07-29 00:32:09,030 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.474e+01 5.408e+01 6.097e+01 6.893e+01 9.871e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 00:32:12,364 INFO [train.py:1114] (2/4) Epoch 17, batch 550, loss[loss=0.1985, simple_loss=0.286, pruned_loss=0.05551, over 4603.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2678, pruned_loss=0.04331, over 877719.40 frames. ], batch size: 17, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:32:23,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=218821.33333333334, ans=0.2 +2024-07-29 00:32:33,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=218834.66666666666, ans=0.125 +2024-07-29 00:32:48,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218861.33333333334, ans=0.1 +2024-07-29 00:32:49,782 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.54 vs. limit=15.0 +2024-07-29 00:32:50,131 INFO [train.py:1114] (2/4) Epoch 17, batch 600, loss[loss=0.1982, simple_loss=0.2792, pruned_loss=0.05867, over 4651.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2675, pruned_loss=0.04303, over 892183.33 frames. ], batch size: 16, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:32:59,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=218874.66666666666, ans=0.125 +2024-07-29 00:33:04,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=218888.0, ans=0.125 +2024-07-29 00:33:05,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=218888.0, ans=0.0 +2024-07-29 00:33:27,705 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.556e+01 5.574e+01 6.190e+01 7.231e+01 1.147e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 00:33:31,222 INFO [train.py:1114] (2/4) Epoch 17, batch 650, loss[loss=0.1735, simple_loss=0.2576, pruned_loss=0.04472, over 4760.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2668, pruned_loss=0.04304, over 903850.64 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:33:40,488 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.25 vs. limit=12.0 +2024-07-29 00:33:51,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=218981.33333333334, ans=0.0 +2024-07-29 00:33:55,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=218981.33333333334, ans=0.125 +2024-07-29 00:34:05,189 INFO [train.py:1114] (2/4) Epoch 17, batch 700, loss[loss=0.1635, simple_loss=0.2492, pruned_loss=0.03897, over 4640.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2658, pruned_loss=0.0421, over 912054.77 frames. ], batch size: 12, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:34:06,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=219008.0, ans=0.125 +2024-07-29 00:34:08,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219008.0, ans=0.1 +2024-07-29 00:34:10,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=219008.0, ans=0.125 +2024-07-29 00:34:19,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=219034.66666666666, ans=0.125 +2024-07-29 00:34:34,406 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.75 vs. limit=15.0 +2024-07-29 00:34:35,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=219048.0, ans=0.0 +2024-07-29 00:34:41,478 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.534e+01 5.477e+01 6.099e+01 6.897e+01 1.014e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 00:34:44,982 INFO [train.py:1114] (2/4) Epoch 17, batch 750, loss[loss=0.1837, simple_loss=0.2813, pruned_loss=0.04307, over 4694.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2662, pruned_loss=0.04235, over 918443.11 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:34:45,856 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:34:47,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=219074.66666666666, ans=0.125 +2024-07-29 00:34:50,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=219074.66666666666, ans=0.025 +2024-07-29 00:35:14,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=219128.0, ans=0.5 +2024-07-29 00:35:15,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=219128.0, ans=10.0 +2024-07-29 00:35:18,601 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.75 vs. limit=22.5 +2024-07-29 00:35:21,497 INFO [train.py:1114] (2/4) Epoch 17, batch 800, loss[loss=0.1526, simple_loss=0.2397, pruned_loss=0.03282, over 4857.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2675, pruned_loss=0.04303, over 923067.08 frames. ], batch size: 12, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:35:34,364 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.59 vs. limit=12.0 +2024-07-29 00:35:36,272 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=219168.0, ans=0.2 +2024-07-29 00:35:41,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=219181.33333333334, ans=0.125 +2024-07-29 00:35:43,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=219181.33333333334, ans=0.0 +2024-07-29 00:35:52,215 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.327e+01 5.596e+01 6.013e+01 6.802e+01 9.397e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-29 00:35:54,756 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.22 vs. limit=15.0 +2024-07-29 00:35:55,663 INFO [train.py:1114] (2/4) Epoch 17, batch 850, loss[loss=0.1688, simple_loss=0.2613, pruned_loss=0.0382, over 4663.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2671, pruned_loss=0.04336, over 927636.99 frames. ], batch size: 14, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:35:57,394 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.75 vs. limit=10.0 +2024-07-29 00:36:02,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=219221.33333333334, ans=0.125 +2024-07-29 00:36:03,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=219221.33333333334, ans=0.0 +2024-07-29 00:36:08,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=219221.33333333334, ans=0.125 +2024-07-29 00:36:18,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=219248.0, ans=0.0 +2024-07-29 00:36:19,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=219248.0, ans=0.0 +2024-07-29 00:36:23,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=219261.33333333334, ans=0.1 +2024-07-29 00:36:31,473 INFO [train.py:1114] (2/4) Epoch 17, batch 900, loss[loss=0.1761, simple_loss=0.2607, pruned_loss=0.04569, over 4847.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2669, pruned_loss=0.04348, over 928416.22 frames. ], batch size: 12, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:36:34,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=219274.66666666666, ans=0.0 +2024-07-29 00:36:41,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=219288.0, ans=0.125 +2024-07-29 00:36:42,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=219288.0, ans=0.125 +2024-07-29 00:36:46,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=219301.33333333334, ans=0.1 +2024-07-29 00:36:50,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=219314.66666666666, ans=0.2 +2024-07-29 00:36:52,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=219314.66666666666, ans=0.125 +2024-07-29 00:36:53,639 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.74 vs. limit=15.0 +2024-07-29 00:36:56,813 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.70 vs. limit=15.0 +2024-07-29 00:36:59,953 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:37:01,135 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.414e+01 5.608e+01 6.144e+01 6.799e+01 1.059e+02, threshold=1.229e+02, percent-clipped=0.0 +2024-07-29 00:37:04,606 INFO [train.py:1114] (2/4) Epoch 17, batch 950, loss[loss=0.1617, simple_loss=0.247, pruned_loss=0.0382, over 4768.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2675, pruned_loss=0.04373, over 930251.46 frames. ], batch size: 12, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:37:14,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219354.66666666666, ans=0.1 +2024-07-29 00:37:19,317 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.93 vs. limit=15.0 +2024-07-29 00:37:24,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219381.33333333334, ans=0.1 +2024-07-29 00:37:29,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=219381.33333333334, ans=0.0 +2024-07-29 00:37:41,928 INFO [train.py:1114] (2/4) Epoch 17, batch 1000, loss[loss=0.1579, simple_loss=0.25, pruned_loss=0.03286, over 4972.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2679, pruned_loss=0.04372, over 929575.07 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:37:44,676 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=219408.0, ans=0.125 +2024-07-29 00:37:50,903 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.57 vs. limit=15.0 +2024-07-29 00:38:01,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=219448.0, ans=0.125 +2024-07-29 00:38:13,345 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.561e+01 5.640e+01 5.981e+01 6.813e+01 9.582e+01, threshold=1.196e+02, percent-clipped=0.0 +2024-07-29 00:38:16,859 INFO [train.py:1114] (2/4) Epoch 17, batch 1050, loss[loss=0.1781, simple_loss=0.2855, pruned_loss=0.03535, over 4874.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2677, pruned_loss=0.04353, over 932385.40 frames. ], batch size: 14, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:38:25,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=219474.66666666666, ans=0.125 +2024-07-29 00:38:34,833 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.96 vs. limit=22.5 +2024-07-29 00:38:35,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=219488.0, ans=0.2 +2024-07-29 00:38:38,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=219501.33333333334, ans=0.125 +2024-07-29 00:38:57,325 INFO [train.py:1114] (2/4) Epoch 17, batch 1100, loss[loss=0.1975, simple_loss=0.2931, pruned_loss=0.05093, over 4894.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2677, pruned_loss=0.04367, over 934555.26 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:38:57,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=219541.33333333334, ans=0.0 +2024-07-29 00:38:59,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=219541.33333333334, ans=0.125 +2024-07-29 00:39:14,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=219568.0, ans=0.2 +2024-07-29 00:39:19,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219581.33333333334, ans=0.1 +2024-07-29 00:39:19,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=219581.33333333334, ans=0.0 +2024-07-29 00:39:27,544 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.574e+01 5.915e+01 6.747e+01 1.337e+02, threshold=1.183e+02, percent-clipped=1.0 +2024-07-29 00:39:28,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=219594.66666666666, ans=0.025 +2024-07-29 00:39:30,885 INFO [train.py:1114] (2/4) Epoch 17, batch 1150, loss[loss=0.1839, simple_loss=0.2797, pruned_loss=0.04405, over 4897.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2673, pruned_loss=0.04352, over 934404.15 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:39:46,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=219634.66666666666, ans=0.1 +2024-07-29 00:39:54,512 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.60 vs. limit=15.0 +2024-07-29 00:40:01,482 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=219661.33333333334, ans=0.0 +2024-07-29 00:40:04,857 INFO [train.py:1114] (2/4) Epoch 17, batch 1200, loss[loss=0.1717, simple_loss=0.2626, pruned_loss=0.04039, over 4867.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2667, pruned_loss=0.04287, over 933519.66 frames. ], batch size: 14, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:40:05,927 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.70 vs. limit=15.0 +2024-07-29 00:40:19,109 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.72 vs. limit=15.0 +2024-07-29 00:40:35,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=219728.0, ans=0.95 +2024-07-29 00:40:37,451 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.683e+01 5.543e+01 6.182e+01 6.957e+01 1.085e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-29 00:40:41,025 INFO [train.py:1114] (2/4) Epoch 17, batch 1250, loss[loss=0.1928, simple_loss=0.2835, pruned_loss=0.05102, over 4815.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2667, pruned_loss=0.04253, over 937543.27 frames. ], batch size: 15, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:41:05,736 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.05 vs. limit=10.0 +2024-07-29 00:41:14,840 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.06 vs. limit=6.0 +2024-07-29 00:41:17,242 INFO [train.py:1114] (2/4) Epoch 17, batch 1300, loss[loss=0.1707, simple_loss=0.2683, pruned_loss=0.0365, over 4711.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2661, pruned_loss=0.04234, over 939274.23 frames. ], batch size: 19, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:41:28,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219821.33333333334, ans=0.1 +2024-07-29 00:41:35,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=219834.66666666666, ans=0.2 +2024-07-29 00:41:52,950 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.663e+01 5.796e+01 6.477e+01 7.611e+01 1.197e+02, threshold=1.295e+02, percent-clipped=0.0 +2024-07-29 00:41:56,434 INFO [train.py:1114] (2/4) Epoch 17, batch 1350, loss[loss=0.162, simple_loss=0.2591, pruned_loss=0.03249, over 4760.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2656, pruned_loss=0.04205, over 941594.76 frames. ], batch size: 13, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:41:59,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=219874.66666666666, ans=0.125 +2024-07-29 00:42:00,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=219874.66666666666, ans=0.0 +2024-07-29 00:42:01,831 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.94 vs. limit=22.5 +2024-07-29 00:42:11,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=219901.33333333334, ans=0.125 +2024-07-29 00:42:28,520 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.22 vs. limit=22.5 +2024-07-29 00:42:33,533 INFO [train.py:1114] (2/4) Epoch 17, batch 1400, loss[loss=0.1219, simple_loss=0.2029, pruned_loss=0.02048, over 4703.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2647, pruned_loss=0.04163, over 943362.35 frames. ], batch size: 11, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:42:37,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=219941.33333333334, ans=15.0 +2024-07-29 00:42:39,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=219941.33333333334, ans=0.2 +2024-07-29 00:42:41,524 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.60 vs. limit=15.0 +2024-07-29 00:42:59,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=219994.66666666666, ans=0.0 +2024-07-29 00:43:06,594 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.372e+01 5.575e+01 5.917e+01 6.621e+01 1.311e+02, threshold=1.183e+02, percent-clipped=1.0 +2024-07-29 00:43:10,157 INFO [train.py:1114] (2/4) Epoch 17, batch 1450, loss[loss=0.1954, simple_loss=0.2926, pruned_loss=0.0491, over 4698.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2655, pruned_loss=0.04204, over 943177.17 frames. ], batch size: 15, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:43:20,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=220021.33333333334, ans=0.125 +2024-07-29 00:43:25,202 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.67 vs. limit=15.0 +2024-07-29 00:43:41,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=220061.33333333334, ans=0.0 +2024-07-29 00:43:43,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=220061.33333333334, ans=0.125 +2024-07-29 00:43:44,782 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.37 vs. limit=6.0 +2024-07-29 00:43:48,310 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.46 vs. limit=22.5 +2024-07-29 00:43:48,453 INFO [train.py:1114] (2/4) Epoch 17, batch 1500, loss[loss=0.1661, simple_loss=0.2631, pruned_loss=0.03449, over 4812.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2671, pruned_loss=0.0425, over 942603.00 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:43:53,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=220074.66666666666, ans=0.025 +2024-07-29 00:44:07,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220101.33333333334, ans=0.1 +2024-07-29 00:44:08,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=220114.66666666666, ans=0.2 +2024-07-29 00:44:15,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=220128.0, ans=0.0 +2024-07-29 00:44:18,919 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.888e+01 5.771e+01 6.251e+01 6.983e+01 1.071e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-29 00:44:21,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=220128.0, ans=0.0 +2024-07-29 00:44:22,217 INFO [train.py:1114] (2/4) Epoch 17, batch 1550, loss[loss=0.1909, simple_loss=0.274, pruned_loss=0.05393, over 4907.00 frames. ], tot_loss[loss=0.176, simple_loss=0.267, pruned_loss=0.04246, over 939480.98 frames. ], batch size: 15, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:44:23,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=220141.33333333334, ans=0.05 +2024-07-29 00:44:32,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=220154.66666666666, ans=0.015 +2024-07-29 00:44:32,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=220154.66666666666, ans=0.125 +2024-07-29 00:44:33,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=220154.66666666666, ans=0.0 +2024-07-29 00:44:48,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=220194.66666666666, ans=0.0 +2024-07-29 00:44:49,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=220194.66666666666, ans=0.2 +2024-07-29 00:44:55,793 INFO [train.py:1114] (2/4) Epoch 17, batch 1600, loss[loss=0.164, simple_loss=0.2567, pruned_loss=0.03561, over 4877.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2668, pruned_loss=0.04244, over 938200.06 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:45:01,657 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.46 vs. limit=10.0 +2024-07-29 00:45:05,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=220221.33333333334, ans=0.07 +2024-07-29 00:45:06,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=220221.33333333334, ans=0.125 +2024-07-29 00:45:08,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.25 vs. limit=10.0 +2024-07-29 00:45:12,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=220234.66666666666, ans=0.125 +2024-07-29 00:45:26,614 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.909e+01 5.495e+01 6.270e+01 6.960e+01 9.456e+01, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 00:45:29,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=220274.66666666666, ans=0.5 +2024-07-29 00:45:30,189 INFO [train.py:1114] (2/4) Epoch 17, batch 1650, loss[loss=0.1866, simple_loss=0.282, pruned_loss=0.04563, over 4654.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2668, pruned_loss=0.04306, over 937541.48 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:45:34,800 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.38 vs. limit=22.5 +2024-07-29 00:45:42,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=220288.0, ans=0.125 +2024-07-29 00:45:48,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=220301.33333333334, ans=0.125 +2024-07-29 00:45:52,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=220314.66666666666, ans=0.125 +2024-07-29 00:46:01,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=220328.0, ans=0.2 +2024-07-29 00:46:04,529 INFO [train.py:1114] (2/4) Epoch 17, batch 1700, loss[loss=0.1579, simple_loss=0.2446, pruned_loss=0.03564, over 4703.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2661, pruned_loss=0.04254, over 938966.26 frames. ], batch size: 11, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:46:12,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=220354.66666666666, ans=0.0 +2024-07-29 00:46:29,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=220381.33333333334, ans=0.0 +2024-07-29 00:46:33,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=220394.66666666666, ans=0.0 +2024-07-29 00:46:36,251 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:46:36,631 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.642e+01 5.850e+01 6.496e+01 7.744e+01 1.150e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-29 00:46:37,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=220394.66666666666, ans=0.125 +2024-07-29 00:46:38,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=220394.66666666666, ans=0.125 +2024-07-29 00:46:38,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=220394.66666666666, ans=0.2 +2024-07-29 00:46:40,162 INFO [train.py:1114] (2/4) Epoch 17, batch 1750, loss[loss=0.1283, simple_loss=0.2153, pruned_loss=0.02067, over 4823.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2658, pruned_loss=0.04272, over 939943.62 frames. ], batch size: 11, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:46:42,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220408.0, ans=0.1 +2024-07-29 00:46:43,577 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=220408.0, ans=0.125 +2024-07-29 00:47:10,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=220461.33333333334, ans=0.0 +2024-07-29 00:47:13,477 INFO [train.py:1114] (2/4) Epoch 17, batch 1800, loss[loss=0.1736, simple_loss=0.271, pruned_loss=0.03808, over 4634.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2655, pruned_loss=0.04248, over 940157.93 frames. ], batch size: 13, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:47:13,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=220474.66666666666, ans=0.125 +2024-07-29 00:47:13,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=220474.66666666666, ans=0.125 +2024-07-29 00:47:14,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=220474.66666666666, ans=0.125 +2024-07-29 00:47:19,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=220488.0, ans=0.0 +2024-07-29 00:47:20,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=220488.0, ans=0.2 +2024-07-29 00:47:26,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=220501.33333333334, ans=0.1 +2024-07-29 00:47:45,638 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.755e+01 5.725e+01 6.271e+01 7.257e+01 1.188e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 00:47:49,040 INFO [train.py:1114] (2/4) Epoch 17, batch 1850, loss[loss=0.1936, simple_loss=0.2871, pruned_loss=0.05002, over 4812.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2667, pruned_loss=0.04292, over 940181.02 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:48:00,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=220554.66666666666, ans=0.025 +2024-07-29 00:48:03,772 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.61 vs. limit=10.0 +2024-07-29 00:48:07,220 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.98 vs. limit=6.0 +2024-07-29 00:48:11,505 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=220581.33333333334, ans=0.015 +2024-07-29 00:48:21,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=220594.66666666666, ans=0.125 +2024-07-29 00:48:21,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=220594.66666666666, ans=0.0 +2024-07-29 00:48:23,127 INFO [train.py:1114] (2/4) Epoch 17, batch 1900, loss[loss=0.1583, simple_loss=0.2584, pruned_loss=0.02915, over 4669.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2671, pruned_loss=0.04282, over 941272.68 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:48:38,012 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.77 vs. limit=15.0 +2024-07-29 00:48:46,611 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.85 vs. limit=12.0 +2024-07-29 00:48:47,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=220648.0, ans=0.5 +2024-07-29 00:48:49,168 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.25 vs. limit=22.5 +2024-07-29 00:48:50,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=220648.0, ans=0.125 +2024-07-29 00:48:55,009 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.475e+01 5.883e+01 6.427e+01 8.062e+01 1.126e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-29 00:48:55,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=220661.33333333334, ans=0.125 +2024-07-29 00:49:10,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=220661.33333333334, ans=0.0 +2024-07-29 00:49:13,516 INFO [train.py:1114] (2/4) Epoch 17, batch 1950, loss[loss=0.1588, simple_loss=0.2517, pruned_loss=0.03295, over 4896.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2679, pruned_loss=0.04281, over 943522.84 frames. ], batch size: 13, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:49:16,112 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.16 vs. limit=12.0 +2024-07-29 00:49:23,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=220688.0, ans=0.0 +2024-07-29 00:49:51,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220701.33333333334, ans=0.1 +2024-07-29 00:49:55,097 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=220701.33333333334, ans=0.0 +2024-07-29 00:50:28,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=220741.33333333334, ans=15.0 +2024-07-29 00:50:28,661 INFO [train.py:1114] (2/4) Epoch 17, batch 2000, loss[loss=0.1608, simple_loss=0.2392, pruned_loss=0.04122, over 4804.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2693, pruned_loss=0.04382, over 940391.64 frames. ], batch size: 11, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:50:57,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=220741.33333333334, ans=0.125 +2024-07-29 00:51:03,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=220754.66666666666, ans=0.1 +2024-07-29 00:51:20,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=220768.0, ans=0.125 +2024-07-29 00:51:25,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=220781.33333333334, ans=0.125 +2024-07-29 00:51:25,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=220781.33333333334, ans=0.0 +2024-07-29 00:51:26,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=220781.33333333334, ans=0.0 +2024-07-29 00:51:27,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=220781.33333333334, ans=0.125 +2024-07-29 00:51:32,746 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.500e+01 5.436e+01 5.997e+01 6.741e+01 1.066e+02, threshold=1.199e+02, percent-clipped=0.0 +2024-07-29 00:51:36,130 INFO [train.py:1114] (2/4) Epoch 17, batch 2050, loss[loss=0.1471, simple_loss=0.227, pruned_loss=0.03355, over 4609.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2683, pruned_loss=0.04368, over 939288.70 frames. ], batch size: 11, lr: 4.42e-03, grad_scale: 64.0 +2024-07-29 00:51:54,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=220834.66666666666, ans=0.0 +2024-07-29 00:51:55,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=220834.66666666666, ans=0.0 +2024-07-29 00:51:56,100 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.72 vs. limit=22.5 +2024-07-29 00:51:56,487 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=220848.0, ans=0.125 +2024-07-29 00:51:56,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220848.0, ans=0.1 +2024-07-29 00:52:01,819 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.77 vs. limit=15.0 +2024-07-29 00:52:17,008 INFO [train.py:1114] (2/4) Epoch 17, batch 2100, loss[loss=0.1777, simple_loss=0.2721, pruned_loss=0.04164, over 4753.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2671, pruned_loss=0.04305, over 941035.22 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:52:34,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=220901.33333333334, ans=0.125 +2024-07-29 00:52:35,678 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.51 vs. limit=12.0 +2024-07-29 00:52:46,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=220928.0, ans=0.025 +2024-07-29 00:52:48,042 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.684e+01 5.567e+01 6.209e+01 7.288e+01 1.074e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-29 00:52:49,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=220928.0, ans=0.125 +2024-07-29 00:52:50,929 INFO [train.py:1114] (2/4) Epoch 17, batch 2150, loss[loss=0.1521, simple_loss=0.2465, pruned_loss=0.02881, over 4901.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2663, pruned_loss=0.04225, over 944050.56 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:52:54,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=220941.33333333334, ans=0.125 +2024-07-29 00:52:57,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=220954.66666666666, ans=0.125 +2024-07-29 00:52:58,726 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:53:00,295 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.72 vs. limit=22.5 +2024-07-29 00:53:26,727 INFO [train.py:1114] (2/4) Epoch 17, batch 2200, loss[loss=0.2093, simple_loss=0.3065, pruned_loss=0.056, over 4814.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2672, pruned_loss=0.04279, over 943352.12 frames. ], batch size: 14, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:53:33,247 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.79 vs. limit=22.5 +2024-07-29 00:53:57,772 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.391e+01 5.667e+01 6.562e+01 7.774e+01 1.023e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-29 00:54:00,483 INFO [train.py:1114] (2/4) Epoch 17, batch 2250, loss[loss=0.1602, simple_loss=0.2582, pruned_loss=0.03104, over 4693.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2675, pruned_loss=0.0429, over 941979.75 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:54:06,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=221088.0, ans=0.125 +2024-07-29 00:54:14,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=221101.33333333334, ans=0.125 +2024-07-29 00:54:15,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=221101.33333333334, ans=0.2 +2024-07-29 00:54:23,255 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.41 vs. limit=15.0 +2024-07-29 00:54:27,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=221128.0, ans=0.125 +2024-07-29 00:54:33,746 INFO [train.py:1114] (2/4) Epoch 17, batch 2300, loss[loss=0.16, simple_loss=0.2543, pruned_loss=0.03289, over 4924.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2661, pruned_loss=0.04259, over 939578.14 frames. ], batch size: 12, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:54:37,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=221141.33333333334, ans=0.2 +2024-07-29 00:54:37,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221141.33333333334, ans=0.1 +2024-07-29 00:54:38,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=221141.33333333334, ans=0.025 +2024-07-29 00:54:46,428 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.19 vs. limit=22.5 +2024-07-29 00:54:48,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221168.0, ans=0.1 +2024-07-29 00:54:56,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=221181.33333333334, ans=0.2 +2024-07-29 00:55:03,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=221194.66666666666, ans=0.025 +2024-07-29 00:55:05,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=221194.66666666666, ans=0.0 +2024-07-29 00:55:06,975 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+01 5.506e+01 6.021e+01 6.838e+01 1.144e+02, threshold=1.204e+02, percent-clipped=0.0 +2024-07-29 00:55:09,637 INFO [train.py:1114] (2/4) Epoch 17, batch 2350, loss[loss=0.2029, simple_loss=0.3025, pruned_loss=0.05167, over 4642.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2666, pruned_loss=0.04271, over 941736.29 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:55:10,645 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.50 vs. limit=22.5 +2024-07-29 00:55:20,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=221221.33333333334, ans=0.125 +2024-07-29 00:55:23,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=221234.66666666666, ans=0.125 +2024-07-29 00:55:29,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=221248.0, ans=0.1 +2024-07-29 00:55:43,253 INFO [train.py:1114] (2/4) Epoch 17, batch 2400, loss[loss=0.179, simple_loss=0.2673, pruned_loss=0.04534, over 4635.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2676, pruned_loss=0.04311, over 941376.84 frames. ], batch size: 12, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:55:43,389 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:55:44,989 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.60 vs. limit=6.0 +2024-07-29 00:55:52,328 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.36 vs. limit=15.0 +2024-07-29 00:56:08,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=221314.66666666666, ans=0.125 +2024-07-29 00:56:12,761 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-29 00:56:14,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=221328.0, ans=0.125 +2024-07-29 00:56:17,994 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.747e+01 5.694e+01 6.302e+01 6.928e+01 9.959e+01, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 00:56:20,730 INFO [train.py:1114] (2/4) Epoch 17, batch 2450, loss[loss=0.152, simple_loss=0.2432, pruned_loss=0.03042, over 4694.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2688, pruned_loss=0.04427, over 937038.37 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:56:25,697 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:56:28,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=221354.66666666666, ans=0.5 +2024-07-29 00:56:31,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=221354.66666666666, ans=0.0 +2024-07-29 00:56:38,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=221368.0, ans=0.125 +2024-07-29 00:56:43,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=221381.33333333334, ans=0.125 +2024-07-29 00:56:44,864 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.27 vs. limit=6.0 +2024-07-29 00:56:54,117 INFO [train.py:1114] (2/4) Epoch 17, batch 2500, loss[loss=0.1651, simple_loss=0.2529, pruned_loss=0.03866, over 4810.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2684, pruned_loss=0.04411, over 939449.29 frames. ], batch size: 14, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:56:54,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=221408.0, ans=0.125 +2024-07-29 00:57:13,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221448.0, ans=0.1 +2024-07-29 00:57:18,017 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.06 vs. limit=15.0 +2024-07-29 00:57:19,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=221448.0, ans=0.125 +2024-07-29 00:57:19,298 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.00 vs. limit=22.5 +2024-07-29 00:57:25,001 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.527e+01 5.455e+01 6.019e+01 6.790e+01 9.676e+01, threshold=1.204e+02, percent-clipped=0.0 +2024-07-29 00:57:27,790 INFO [train.py:1114] (2/4) Epoch 17, batch 2550, loss[loss=0.1353, simple_loss=0.2182, pruned_loss=0.02622, over 4817.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2683, pruned_loss=0.04365, over 938681.20 frames. ], batch size: 11, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:57:44,444 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.08 vs. limit=22.5 +2024-07-29 00:57:48,994 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.23 vs. limit=6.0 +2024-07-29 00:57:49,338 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=221501.33333333334, ans=0.125 +2024-07-29 00:58:21,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=221514.66666666666, ans=0.125 +2024-07-29 00:58:23,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=221528.0, ans=0.125 +2024-07-29 00:58:29,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=221528.0, ans=0.125 +2024-07-29 00:58:29,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=221541.33333333334, ans=0.125 +2024-07-29 00:58:30,288 INFO [train.py:1114] (2/4) Epoch 17, batch 2600, loss[loss=0.1777, simple_loss=0.2629, pruned_loss=0.04623, over 4898.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2682, pruned_loss=0.04384, over 937861.12 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:58:34,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=221541.33333333334, ans=0.125 +2024-07-29 00:58:51,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=221568.0, ans=0.125 +2024-07-29 00:59:08,392 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.679e+01 5.744e+01 6.230e+01 7.123e+01 1.037e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-29 00:59:09,271 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=221594.66666666666, ans=0.125 +2024-07-29 00:59:21,123 INFO [train.py:1114] (2/4) Epoch 17, batch 2650, loss[loss=0.208, simple_loss=0.2961, pruned_loss=0.05997, over 4608.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2684, pruned_loss=0.04419, over 939558.46 frames. ], batch size: 16, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:59:23,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=221608.0, ans=0.125 +2024-07-29 00:59:32,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=221621.33333333334, ans=0.125 +2024-07-29 00:59:35,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=221634.66666666666, ans=0.0 +2024-07-29 00:59:40,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=221648.0, ans=0.125 +2024-07-29 00:59:45,024 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.49 vs. limit=15.0 +2024-07-29 00:59:46,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=221648.0, ans=0.0 +2024-07-29 00:59:51,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=221661.33333333334, ans=0.0 +2024-07-29 00:59:54,775 INFO [train.py:1114] (2/4) Epoch 17, batch 2700, loss[loss=0.1977, simple_loss=0.2958, pruned_loss=0.04979, over 4744.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.268, pruned_loss=0.04408, over 939703.41 frames. ], batch size: 14, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 01:00:05,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=221688.0, ans=0.0 +2024-07-29 01:00:27,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=221714.66666666666, ans=10.0 +2024-07-29 01:00:33,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221728.0, ans=0.1 +2024-07-29 01:00:37,060 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.539e+01 6.361e+01 7.423e+01 1.026e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-29 01:00:39,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=221728.0, ans=0.125 +2024-07-29 01:00:41,173 INFO [train.py:1114] (2/4) Epoch 17, batch 2750, loss[loss=0.1587, simple_loss=0.2462, pruned_loss=0.03557, over 4714.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2672, pruned_loss=0.04385, over 939669.45 frames. ], batch size: 12, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 01:00:44,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=221741.33333333334, ans=0.125 +2024-07-29 01:01:03,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=221768.0, ans=0.125 +2024-07-29 01:01:08,236 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-07-29 01:01:08,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=221781.33333333334, ans=0.025 +2024-07-29 01:01:17,791 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.37 vs. limit=22.5 +2024-07-29 01:01:22,026 INFO [train.py:1114] (2/4) Epoch 17, batch 2800, loss[loss=0.2627, simple_loss=0.3355, pruned_loss=0.09498, over 3265.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2678, pruned_loss=0.0442, over 937199.09 frames. ], batch size: 35, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:01:22,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=221808.0, ans=0.0 +2024-07-29 01:01:26,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221808.0, ans=0.1 +2024-07-29 01:01:33,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=221821.33333333334, ans=0.0 +2024-07-29 01:01:39,745 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=11.34 vs. limit=15.0 +2024-07-29 01:01:48,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=221848.0, ans=0.125 +2024-07-29 01:01:53,602 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.485e+01 5.698e+01 6.511e+01 7.478e+01 1.084e+02, threshold=1.302e+02, percent-clipped=0.0 +2024-07-29 01:01:56,363 INFO [train.py:1114] (2/4) Epoch 17, batch 2850, loss[loss=0.1558, simple_loss=0.2465, pruned_loss=0.03253, over 4959.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2677, pruned_loss=0.04386, over 935607.15 frames. ], batch size: 13, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:02:20,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=221901.33333333334, ans=0.125 +2024-07-29 01:02:26,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=221914.66666666666, ans=0.125 +2024-07-29 01:02:32,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221928.0, ans=0.1 +2024-07-29 01:02:33,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=221928.0, ans=0.2 +2024-07-29 01:02:35,104 INFO [train.py:1114] (2/4) Epoch 17, batch 2900, loss[loss=0.1864, simple_loss=0.279, pruned_loss=0.04689, over 4828.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2681, pruned_loss=0.04384, over 939353.63 frames. ], batch size: 13, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:02:38,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.18 vs. limit=15.0 +2024-07-29 01:02:45,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=221954.66666666666, ans=0.125 +2024-07-29 01:02:47,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=221954.66666666666, ans=0.2 +2024-07-29 01:02:51,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=221968.0, ans=0.0 +2024-07-29 01:02:58,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221981.33333333334, ans=0.1 +2024-07-29 01:03:01,598 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.60 vs. limit=22.5 +2024-07-29 01:03:07,963 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.340e+01 5.542e+01 6.312e+01 7.539e+01 1.199e+02, threshold=1.262e+02, percent-clipped=0.0 +2024-07-29 01:03:10,782 INFO [train.py:1114] (2/4) Epoch 17, batch 2950, loss[loss=0.1927, simple_loss=0.2692, pruned_loss=0.05807, over 4713.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2668, pruned_loss=0.04338, over 938323.07 frames. ], batch size: 12, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:03:11,267 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.49 vs. limit=8.0 +2024-07-29 01:03:17,860 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.56 vs. limit=15.0 +2024-07-29 01:03:21,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=222021.33333333334, ans=0.5 +2024-07-29 01:03:25,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=222034.66666666666, ans=0.0 +2024-07-29 01:03:44,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=222074.66666666666, ans=0.125 +2024-07-29 01:03:44,745 INFO [train.py:1114] (2/4) Epoch 17, batch 3000, loss[loss=0.1704, simple_loss=0.2727, pruned_loss=0.034, over 4757.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2664, pruned_loss=0.04315, over 937249.54 frames. ], batch size: 13, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:03:44,745 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 01:04:05,746 INFO [train.py:1146] (2/4) Epoch 17, validation: loss=0.1635, simple_loss=0.2655, pruned_loss=0.03068, over 944034.00 frames. +2024-07-29 01:04:05,747 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 01:04:27,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222114.66666666666, ans=0.1 +2024-07-29 01:04:37,611 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.930e+01 5.724e+01 6.244e+01 7.233e+01 1.089e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 01:04:40,430 INFO [train.py:1114] (2/4) Epoch 17, batch 3050, loss[loss=0.1859, simple_loss=0.2703, pruned_loss=0.0508, over 4638.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2668, pruned_loss=0.04323, over 936790.91 frames. ], batch size: 12, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:04:40,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=222141.33333333334, ans=0.2 +2024-07-29 01:04:49,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=222154.66666666666, ans=0.2 +2024-07-29 01:05:02,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=222181.33333333334, ans=0.0 +2024-07-29 01:05:05,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=222181.33333333334, ans=0.125 +2024-07-29 01:05:09,834 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.63 vs. limit=15.0 +2024-07-29 01:05:11,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=222194.66666666666, ans=0.125 +2024-07-29 01:05:16,134 INFO [train.py:1114] (2/4) Epoch 17, batch 3100, loss[loss=0.1945, simple_loss=0.2975, pruned_loss=0.0458, over 4611.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2669, pruned_loss=0.04296, over 937895.29 frames. ], batch size: 16, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:05:29,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=222221.33333333334, ans=0.025 +2024-07-29 01:05:30,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=222221.33333333334, ans=0.05 +2024-07-29 01:05:33,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=222234.66666666666, ans=0.1 +2024-07-29 01:05:35,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=222234.66666666666, ans=0.125 +2024-07-29 01:05:36,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=222234.66666666666, ans=0.2 +2024-07-29 01:05:39,473 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=222234.66666666666, ans=0.0 +2024-07-29 01:05:44,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=222248.0, ans=0.125 +2024-07-29 01:05:46,203 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.94 vs. limit=15.0 +2024-07-29 01:05:51,829 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.936e+01 5.691e+01 6.608e+01 7.636e+01 1.029e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-29 01:05:54,536 INFO [train.py:1114] (2/4) Epoch 17, batch 3150, loss[loss=0.1734, simple_loss=0.2651, pruned_loss=0.04081, over 4617.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.267, pruned_loss=0.04284, over 937921.70 frames. ], batch size: 17, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:05:54,675 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:06:20,187 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.46 vs. limit=22.5 +2024-07-29 01:06:24,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=222328.0, ans=0.025 +2024-07-29 01:06:25,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=222328.0, ans=0.2 +2024-07-29 01:06:29,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=222341.33333333334, ans=0.125 +2024-07-29 01:06:29,741 INFO [train.py:1114] (2/4) Epoch 17, batch 3200, loss[loss=0.1583, simple_loss=0.2551, pruned_loss=0.0308, over 4814.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2661, pruned_loss=0.04242, over 939559.44 frames. ], batch size: 13, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:06:29,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=222341.33333333334, ans=0.125 +2024-07-29 01:06:48,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=222368.0, ans=0.05 +2024-07-29 01:07:02,020 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.480e+01 5.614e+01 6.191e+01 6.817e+01 1.066e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 01:07:02,582 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.63 vs. limit=15.0 +2024-07-29 01:07:05,136 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.56 vs. limit=15.0 +2024-07-29 01:07:05,959 INFO [train.py:1114] (2/4) Epoch 17, batch 3250, loss[loss=0.1504, simple_loss=0.257, pruned_loss=0.02191, over 4933.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2658, pruned_loss=0.04208, over 940442.82 frames. ], batch size: 14, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:07:08,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=222408.0, ans=0.125 +2024-07-29 01:07:11,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=222421.33333333334, ans=0.0 +2024-07-29 01:07:13,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=222421.33333333334, ans=0.0 +2024-07-29 01:07:16,770 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.64 vs. limit=15.0 +2024-07-29 01:07:21,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff3.min_abs, batch_count=222434.66666666666, ans=0.2 +2024-07-29 01:07:33,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=222461.33333333334, ans=0.0 +2024-07-29 01:07:39,396 INFO [train.py:1114] (2/4) Epoch 17, batch 3300, loss[loss=0.1758, simple_loss=0.2738, pruned_loss=0.03893, over 4676.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2646, pruned_loss=0.04201, over 940765.36 frames. ], batch size: 19, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:08:05,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=222514.66666666666, ans=0.0 +2024-07-29 01:08:07,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=222528.0, ans=0.125 +2024-07-29 01:08:08,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=222528.0, ans=0.1 +2024-07-29 01:08:13,581 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+01 5.660e+01 6.307e+01 7.257e+01 1.096e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 01:08:15,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=222541.33333333334, ans=0.05 +2024-07-29 01:08:16,323 INFO [train.py:1114] (2/4) Epoch 17, batch 3350, loss[loss=0.2162, simple_loss=0.2944, pruned_loss=0.06901, over 4648.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2662, pruned_loss=0.0432, over 939034.83 frames. ], batch size: 17, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:08:24,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=222554.66666666666, ans=0.125 +2024-07-29 01:08:25,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=222554.66666666666, ans=0.025 +2024-07-29 01:08:29,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=222568.0, ans=0.125 +2024-07-29 01:08:33,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=222568.0, ans=0.5 +2024-07-29 01:08:42,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=222581.33333333334, ans=0.2 +2024-07-29 01:08:51,963 INFO [train.py:1114] (2/4) Epoch 17, batch 3400, loss[loss=0.149, simple_loss=0.2253, pruned_loss=0.03634, over 4794.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2651, pruned_loss=0.04243, over 937758.68 frames. ], batch size: 11, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:08:58,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=222608.0, ans=0.125 +2024-07-29 01:09:00,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222608.0, ans=0.1 +2024-07-29 01:09:03,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=222621.33333333334, ans=0.0 +2024-07-29 01:09:05,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=222621.33333333334, ans=0.125 +2024-07-29 01:09:20,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=222648.0, ans=0.1 +2024-07-29 01:09:20,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=222648.0, ans=0.125 +2024-07-29 01:09:25,137 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.39 vs. limit=10.0 +2024-07-29 01:09:25,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=222661.33333333334, ans=0.0 +2024-07-29 01:09:27,091 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.040e+01 6.022e+01 6.843e+01 8.395e+01 1.350e+02, threshold=1.369e+02, percent-clipped=1.0 +2024-07-29 01:09:29,254 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:09:29,760 INFO [train.py:1114] (2/4) Epoch 17, batch 3450, loss[loss=0.2314, simple_loss=0.3171, pruned_loss=0.07283, over 4669.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2661, pruned_loss=0.04289, over 937989.09 frames. ], batch size: 19, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:09:33,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222674.66666666666, ans=0.1 +2024-07-29 01:09:35,496 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.75 vs. limit=10.0 +2024-07-29 01:09:40,806 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.39 vs. limit=15.0 +2024-07-29 01:09:44,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=222701.33333333334, ans=0.0 +2024-07-29 01:09:45,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=222701.33333333334, ans=0.0 +2024-07-29 01:09:45,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=222701.33333333334, ans=0.07 +2024-07-29 01:09:47,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=222701.33333333334, ans=0.125 +2024-07-29 01:09:50,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=222714.66666666666, ans=0.125 +2024-07-29 01:09:55,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=222728.0, ans=0.05 +2024-07-29 01:10:02,873 INFO [train.py:1114] (2/4) Epoch 17, batch 3500, loss[loss=0.1642, simple_loss=0.2497, pruned_loss=0.03939, over 4933.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2651, pruned_loss=0.04223, over 938931.06 frames. ], batch size: 12, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:10:04,066 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.73 vs. limit=15.0 +2024-07-29 01:10:18,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=222768.0, ans=0.125 +2024-07-29 01:10:35,612 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.400e+01 5.391e+01 6.097e+01 6.632e+01 8.722e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 01:10:38,347 INFO [train.py:1114] (2/4) Epoch 17, batch 3550, loss[loss=0.161, simple_loss=0.2548, pruned_loss=0.03359, over 4661.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.266, pruned_loss=0.04283, over 939205.15 frames. ], batch size: 14, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:10:47,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=222821.33333333334, ans=0.125 +2024-07-29 01:10:58,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=222834.66666666666, ans=10.0 +2024-07-29 01:11:03,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=222848.0, ans=0.1 +2024-07-29 01:11:05,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=222848.0, ans=0.2 +2024-07-29 01:11:07,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=222848.0, ans=0.1 +2024-07-29 01:11:14,888 INFO [train.py:1114] (2/4) Epoch 17, batch 3600, loss[loss=0.1554, simple_loss=0.2471, pruned_loss=0.03188, over 4970.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2656, pruned_loss=0.04284, over 941107.68 frames. ], batch size: 13, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:11:15,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=222874.66666666666, ans=0.125 +2024-07-29 01:11:17,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=222874.66666666666, ans=0.125 +2024-07-29 01:11:33,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=222901.33333333334, ans=0.1 +2024-07-29 01:11:36,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=222914.66666666666, ans=0.0 +2024-07-29 01:11:40,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=222914.66666666666, ans=0.125 +2024-07-29 01:11:47,276 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.818e+01 6.519e+01 7.348e+01 1.094e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-29 01:11:50,097 INFO [train.py:1114] (2/4) Epoch 17, batch 3650, loss[loss=0.2022, simple_loss=0.2874, pruned_loss=0.05853, over 4906.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2651, pruned_loss=0.04233, over 941398.84 frames. ], batch size: 15, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:11:52,517 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.99 vs. limit=15.0 +2024-07-29 01:12:08,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.70 vs. limit=12.0 +2024-07-29 01:12:26,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=222994.66666666666, ans=0.125 +2024-07-29 01:12:27,280 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.44 vs. limit=15.0 +2024-07-29 01:12:33,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=223008.0, ans=0.0 +2024-07-29 01:12:34,092 INFO [train.py:1114] (2/4) Epoch 17, batch 3700, loss[loss=0.1877, simple_loss=0.2654, pruned_loss=0.05497, over 4934.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2648, pruned_loss=0.04169, over 942147.47 frames. ], batch size: 14, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:12:45,978 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.51 vs. limit=22.5 +2024-07-29 01:12:47,248 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.63 vs. limit=22.5 +2024-07-29 01:12:50,612 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.01 vs. limit=15.0 +2024-07-29 01:13:04,877 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.08 vs. limit=15.0 +2024-07-29 01:13:05,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=223061.33333333334, ans=0.125 +2024-07-29 01:13:07,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=223061.33333333334, ans=0.0 +2024-07-29 01:13:09,041 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.453e+01 5.690e+01 6.166e+01 6.901e+01 9.277e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 01:13:14,303 INFO [train.py:1114] (2/4) Epoch 17, batch 3750, loss[loss=0.1413, simple_loss=0.2231, pruned_loss=0.02981, over 4805.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2642, pruned_loss=0.04179, over 943272.45 frames. ], batch size: 11, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:13:27,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=223088.0, ans=0.125 +2024-07-29 01:13:54,183 INFO [train.py:1114] (2/4) Epoch 17, batch 3800, loss[loss=0.1797, simple_loss=0.2883, pruned_loss=0.03551, over 4813.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2644, pruned_loss=0.04194, over 941262.53 frames. ], batch size: 14, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:14:14,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=223168.0, ans=0.0 +2024-07-29 01:14:25,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=223194.66666666666, ans=0.125 +2024-07-29 01:14:26,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=223194.66666666666, ans=0.125 +2024-07-29 01:14:27,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223194.66666666666, ans=0.1 +2024-07-29 01:14:28,211 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.538e+01 6.338e+01 7.177e+01 1.035e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-29 01:14:30,811 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.27 vs. limit=15.0 +2024-07-29 01:14:31,011 INFO [train.py:1114] (2/4) Epoch 17, batch 3850, loss[loss=0.1866, simple_loss=0.2829, pruned_loss=0.04515, over 4618.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2647, pruned_loss=0.04167, over 942678.41 frames. ], batch size: 16, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:14:31,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=223208.0, ans=0.125 +2024-07-29 01:14:33,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=223208.0, ans=0.125 +2024-07-29 01:14:47,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=223234.66666666666, ans=0.2 +2024-07-29 01:15:02,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=223261.33333333334, ans=0.0 +2024-07-29 01:15:05,387 INFO [train.py:1114] (2/4) Epoch 17, batch 3900, loss[loss=0.1592, simple_loss=0.2493, pruned_loss=0.03456, over 4812.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.265, pruned_loss=0.04176, over 942787.87 frames. ], batch size: 14, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:15:15,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=223288.0, ans=0.0 +2024-07-29 01:15:34,598 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.17 vs. limit=22.5 +2024-07-29 01:15:42,541 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.779e+01 5.533e+01 5.996e+01 6.814e+01 1.002e+02, threshold=1.199e+02, percent-clipped=0.0 +2024-07-29 01:15:44,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=223341.33333333334, ans=0.125 +2024-07-29 01:15:45,484 INFO [train.py:1114] (2/4) Epoch 17, batch 3950, loss[loss=0.2003, simple_loss=0.2847, pruned_loss=0.05797, over 4832.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2654, pruned_loss=0.04211, over 944650.64 frames. ], batch size: 16, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:16:00,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=223354.66666666666, ans=0.025 +2024-07-29 01:16:06,277 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.27 vs. limit=22.5 +2024-07-29 01:16:27,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-07-29 01:16:48,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=223381.33333333334, ans=15.0 +2024-07-29 01:17:18,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=223394.66666666666, ans=0.125 +2024-07-29 01:17:20,064 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=223394.66666666666, ans=0.125 +2024-07-29 01:17:23,526 INFO [train.py:1114] (2/4) Epoch 17, batch 4000, loss[loss=0.1687, simple_loss=0.2456, pruned_loss=0.04587, over 4787.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2661, pruned_loss=0.04305, over 941308.85 frames. ], batch size: 12, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:17:47,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=223421.33333333334, ans=0.0 +2024-07-29 01:17:54,429 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:17:54,733 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.02 vs. limit=22.5 +2024-07-29 01:18:19,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=223461.33333333334, ans=0.0 +2024-07-29 01:18:25,022 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.438e+01 5.691e+01 6.092e+01 6.901e+01 9.634e+01, threshold=1.218e+02, percent-clipped=0.0 +2024-07-29 01:18:33,097 INFO [train.py:1114] (2/4) Epoch 17, batch 4050, loss[loss=0.2285, simple_loss=0.3009, pruned_loss=0.07809, over 3180.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2654, pruned_loss=0.04252, over 940028.92 frames. ], batch size: 35, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:18:33,433 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.97 vs. limit=12.0 +2024-07-29 01:18:41,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223474.66666666666, ans=0.1 +2024-07-29 01:18:46,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=223474.66666666666, ans=0.125 +2024-07-29 01:19:23,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=223501.33333333334, ans=0.0 +2024-07-29 01:19:27,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=223514.66666666666, ans=0.0 +2024-07-29 01:19:27,461 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.70 vs. limit=15.0 +2024-07-29 01:25:14,254 INFO [train.py:1114] (2/4) Epoch 17, batch 4100, loss[loss=0.1832, simple_loss=0.2714, pruned_loss=0.04752, over 4896.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2654, pruned_loss=0.04281, over 938812.53 frames. ], batch size: 15, lr: 4.40e-03, grad_scale: 64.0 +2024-07-29 01:25:17,927 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.45 vs. limit=22.5 +2024-07-29 01:26:07,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=223554.66666666666, ans=0.125 +2024-07-29 01:26:15,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=223554.66666666666, ans=0.0 +2024-07-29 01:26:27,509 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.23 vs. limit=22.5 +2024-07-29 01:26:32,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=223568.0, ans=0.125 +2024-07-29 01:27:45,363 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.85 vs. limit=15.0 +2024-07-29 01:28:00,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=223594.66666666666, ans=0.125 +2024-07-29 01:28:22,403 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.591e+01 5.689e+01 6.101e+01 7.334e+01 1.100e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 01:28:28,312 INFO [train.py:1114] (2/4) Epoch 17, batch 4150, loss[loss=0.148, simple_loss=0.2517, pruned_loss=0.02215, over 4822.00 frames. ], tot_loss[loss=0.175, simple_loss=0.265, pruned_loss=0.04254, over 938095.74 frames. ], batch size: 13, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:28:41,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=223608.0, ans=0.125 +2024-07-29 01:29:40,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=223634.66666666666, ans=0.0 +2024-07-29 01:29:50,520 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.85 vs. limit=15.0 +2024-07-29 01:30:09,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=223648.0, ans=0.035 +2024-07-29 01:30:22,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=223648.0, ans=0.2 +2024-07-29 01:30:22,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=223648.0, ans=0.2 +2024-07-29 01:30:23,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=223648.0, ans=0.025 +2024-07-29 01:30:27,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223661.33333333334, ans=0.1 +2024-07-29 01:30:39,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=223661.33333333334, ans=0.2 +2024-07-29 01:30:39,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223661.33333333334, ans=0.1 +2024-07-29 01:30:48,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=223674.66666666666, ans=0.07 +2024-07-29 01:30:48,826 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.35 vs. limit=15.0 +2024-07-29 01:30:48,972 INFO [train.py:1114] (2/4) Epoch 17, batch 4200, loss[loss=0.1997, simple_loss=0.2961, pruned_loss=0.05162, over 4882.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2664, pruned_loss=0.04305, over 939748.62 frames. ], batch size: 15, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:30:57,934 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.78 vs. limit=22.5 +2024-07-29 01:31:23,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=223688.0, ans=0.125 +2024-07-29 01:31:45,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=223688.0, ans=0.125 +2024-07-29 01:32:05,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=223701.33333333334, ans=0.0 +2024-07-29 01:32:13,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=223701.33333333334, ans=0.0 +2024-07-29 01:32:29,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=223714.66666666666, ans=0.0 +2024-07-29 01:33:18,862 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.78 vs. limit=22.5 +2024-07-29 01:33:25,473 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.507e+01 5.649e+01 6.155e+01 7.132e+01 1.062e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-29 01:33:32,168 INFO [train.py:1114] (2/4) Epoch 17, batch 4250, loss[loss=0.1486, simple_loss=0.2328, pruned_loss=0.03226, over 4636.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2659, pruned_loss=0.04246, over 940263.62 frames. ], batch size: 12, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:33:48,216 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:33:49,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=223754.66666666666, ans=0.125 +2024-07-29 01:34:33,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_na.min_abs, batch_count=223768.0, ans=0.02 +2024-07-29 01:34:56,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=223781.33333333334, ans=0.125 +2024-07-29 01:35:27,636 INFO [train.py:1114] (2/4) Epoch 17, batch 4300, loss[loss=0.1671, simple_loss=0.2632, pruned_loss=0.03546, over 4756.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2665, pruned_loss=0.04263, over 939811.07 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:35:47,415 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.22 vs. limit=12.0 +2024-07-29 01:35:50,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=223808.0, ans=0.125 +2024-07-29 01:35:51,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223808.0, ans=0.1 +2024-07-29 01:36:26,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223834.66666666666, ans=0.1 +2024-07-29 01:36:27,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=223834.66666666666, ans=0.125 +2024-07-29 01:36:27,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=223834.66666666666, ans=0.025 +2024-07-29 01:36:29,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=223834.66666666666, ans=0.5 +2024-07-29 01:36:31,859 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=223834.66666666666, ans=0.0 +2024-07-29 01:38:17,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=223848.0, ans=0.125 +2024-07-29 01:38:23,168 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.92 vs. limit=15.0 +2024-07-29 01:38:25,970 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.698e+01 5.646e+01 6.421e+01 7.211e+01 1.436e+02, threshold=1.284e+02, percent-clipped=1.0 +2024-07-29 01:38:30,646 INFO [train.py:1114] (2/4) Epoch 17, batch 4350, loss[loss=0.1445, simple_loss=0.2391, pruned_loss=0.02493, over 4763.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2664, pruned_loss=0.04271, over 940407.62 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:38:35,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223874.66666666666, ans=0.1 +2024-07-29 01:38:35,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=223874.66666666666, ans=0.125 +2024-07-29 01:38:35,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=223874.66666666666, ans=0.0 +2024-07-29 01:38:36,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=223874.66666666666, ans=0.125 +2024-07-29 01:38:42,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223874.66666666666, ans=0.1 +2024-07-29 01:38:44,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=223888.0, ans=0.125 +2024-07-29 01:39:06,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=223901.33333333334, ans=0.0 +2024-07-29 01:39:24,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=223928.0, ans=0.025 +2024-07-29 01:39:35,677 INFO [train.py:1114] (2/4) Epoch 17, batch 4400, loss[loss=0.1574, simple_loss=0.2573, pruned_loss=0.02872, over 4811.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2656, pruned_loss=0.04222, over 940195.73 frames. ], batch size: 14, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:39:35,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=223941.33333333334, ans=0.125 +2024-07-29 01:40:10,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=223954.66666666666, ans=0.125 +2024-07-29 01:40:28,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=223968.0, ans=0.125 +2024-07-29 01:40:49,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=223994.66666666666, ans=0.0 +2024-07-29 01:40:54,549 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.541e+01 6.235e+01 6.902e+01 1.046e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-29 01:40:59,855 INFO [train.py:1114] (2/4) Epoch 17, batch 4450, loss[loss=0.1691, simple_loss=0.2442, pruned_loss=0.04707, over 4941.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2664, pruned_loss=0.04256, over 938418.70 frames. ], batch size: 12, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:41:01,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=224008.0, ans=0.125 +2024-07-29 01:41:27,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=224021.33333333334, ans=0.125 +2024-07-29 01:41:28,538 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.01 vs. limit=15.0 +2024-07-29 01:41:43,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=224021.33333333334, ans=0.2 +2024-07-29 01:41:51,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=224034.66666666666, ans=0.125 +2024-07-29 01:42:13,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=224034.66666666666, ans=0.125 +2024-07-29 01:42:41,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=224061.33333333334, ans=10.0 +2024-07-29 01:42:57,261 INFO [train.py:1114] (2/4) Epoch 17, batch 4500, loss[loss=0.1722, simple_loss=0.268, pruned_loss=0.03818, over 4732.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2665, pruned_loss=0.04228, over 938079.85 frames. ], batch size: 14, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:43:00,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=224074.66666666666, ans=0.0 +2024-07-29 01:43:10,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=224088.0, ans=0.1 +2024-07-29 01:43:23,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=224101.33333333334, ans=0.125 +2024-07-29 01:43:44,473 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.35 vs. limit=12.0 +2024-07-29 01:43:51,627 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.26 vs. limit=15.0 +2024-07-29 01:43:52,907 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.08 vs. limit=15.0 +2024-07-29 01:43:53,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=224128.0, ans=0.125 +2024-07-29 01:43:54,427 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.989e+01 5.696e+01 6.215e+01 7.468e+01 9.739e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 01:43:56,645 INFO [train.py:1114] (2/4) Epoch 17, batch 4550, loss[loss=0.1764, simple_loss=0.2786, pruned_loss=0.03704, over 4893.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2667, pruned_loss=0.0423, over 939819.17 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:44:01,826 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.07 vs. limit=15.0 +2024-07-29 01:44:13,379 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.68 vs. limit=15.0 +2024-07-29 01:44:24,075 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.25 vs. limit=22.5 +2024-07-29 01:44:26,235 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.42 vs. limit=22.5 +2024-07-29 01:44:28,186 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.22 vs. limit=10.0 +2024-07-29 01:44:29,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=224194.66666666666, ans=0.125 +2024-07-29 01:44:31,488 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.73 vs. limit=10.0 +2024-07-29 01:44:33,968 INFO [train.py:1114] (2/4) Epoch 17, batch 4600, loss[loss=0.151, simple_loss=0.2484, pruned_loss=0.02679, over 4473.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2655, pruned_loss=0.04175, over 938165.87 frames. ], batch size: 21, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:44:34,274 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.94 vs. limit=6.0 +2024-07-29 01:44:38,235 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:44:38,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=224208.0, ans=0.2 +2024-07-29 01:44:41,283 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.71 vs. limit=22.5 +2024-07-29 01:44:55,343 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.34 vs. limit=10.0 +2024-07-29 01:44:59,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=224248.0, ans=0.1 +2024-07-29 01:45:02,967 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:45:13,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=224261.33333333334, ans=0.0 +2024-07-29 01:45:17,107 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.685e+01 5.605e+01 6.267e+01 6.922e+01 9.428e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 01:45:19,073 INFO [train.py:1114] (2/4) Epoch 17, batch 4650, loss[loss=0.2047, simple_loss=0.2965, pruned_loss=0.05649, over 4863.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.266, pruned_loss=0.04196, over 939730.84 frames. ], batch size: 16, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:45:19,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=224274.66666666666, ans=0.0 +2024-07-29 01:45:26,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=224274.66666666666, ans=0.025 +2024-07-29 01:46:01,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=224314.66666666666, ans=0.125 +2024-07-29 01:46:15,069 INFO [train.py:1114] (2/4) Epoch 17, batch 4700, loss[loss=0.1494, simple_loss=0.2225, pruned_loss=0.03815, over 4701.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2648, pruned_loss=0.04192, over 936447.72 frames. ], batch size: 11, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:46:31,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=224368.0, ans=0.0 +2024-07-29 01:46:52,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=224394.66666666666, ans=0.2 +2024-07-29 01:46:53,332 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.618e+01 6.268e+01 7.126e+01 1.011e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 01:46:58,183 INFO [train.py:1114] (2/4) Epoch 17, batch 4750, loss[loss=0.1875, simple_loss=0.2705, pruned_loss=0.05224, over 4463.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2653, pruned_loss=0.04223, over 934556.30 frames. ], batch size: 21, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:46:58,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=224408.0, ans=0.0 +2024-07-29 01:47:27,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=224434.66666666666, ans=0.125 +2024-07-29 01:47:44,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=224434.66666666666, ans=0.025 +2024-07-29 01:47:47,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=224448.0, ans=0.025 +2024-07-29 01:48:01,697 INFO [train.py:1114] (2/4) Epoch 17, batch 4800, loss[loss=0.1546, simple_loss=0.246, pruned_loss=0.0316, over 4692.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2653, pruned_loss=0.04254, over 932146.98 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:48:09,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=224488.0, ans=0.125 +2024-07-29 01:48:31,232 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:48:41,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=224514.66666666666, ans=0.2 +2024-07-29 01:48:48,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=224528.0, ans=0.015 +2024-07-29 01:48:48,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=224528.0, ans=0.0 +2024-07-29 01:48:49,138 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.61 vs. limit=12.0 +2024-07-29 01:48:49,941 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.702e+01 6.152e+01 7.356e+01 9.741e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 01:48:53,603 INFO [train.py:1114] (2/4) Epoch 17, batch 4850, loss[loss=0.1797, simple_loss=0.2754, pruned_loss=0.04197, over 4735.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2668, pruned_loss=0.04329, over 932410.41 frames. ], batch size: 14, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:48:56,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=224541.33333333334, ans=0.09899494936611666 +2024-07-29 01:49:22,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=224581.33333333334, ans=0.09899494936611666 +2024-07-29 01:49:22,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=224581.33333333334, ans=0.0 +2024-07-29 01:49:35,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=224594.66666666666, ans=0.125 +2024-07-29 01:49:36,885 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.09 vs. limit=22.5 +2024-07-29 01:49:40,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=224594.66666666666, ans=0.2 +2024-07-29 01:49:42,570 INFO [train.py:1114] (2/4) Epoch 17, batch 4900, loss[loss=0.1778, simple_loss=0.2727, pruned_loss=0.04144, over 4766.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2665, pruned_loss=0.04328, over 934303.43 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:49:49,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=224621.33333333334, ans=0.125 +2024-07-29 01:49:56,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=224634.66666666666, ans=0.125 +2024-07-29 01:49:56,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=224634.66666666666, ans=0.1 +2024-07-29 01:50:29,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=224661.33333333334, ans=0.2 +2024-07-29 01:50:38,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=224661.33333333334, ans=0.0 +2024-07-29 01:50:48,850 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.565e+01 5.722e+01 6.197e+01 6.933e+01 1.189e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 01:50:49,263 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.60 vs. limit=15.0 +2024-07-29 01:51:00,385 INFO [train.py:1114] (2/4) Epoch 17, batch 4950, loss[loss=0.2269, simple_loss=0.3109, pruned_loss=0.07145, over 3640.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2673, pruned_loss=0.04376, over 931430.81 frames. ], batch size: 35, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:51:02,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=224674.66666666666, ans=0.125 +2024-07-29 01:51:02,381 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-29 01:51:04,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=224674.66666666666, ans=0.0 +2024-07-29 01:51:04,418 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.49 vs. limit=12.0 +2024-07-29 01:51:12,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=224688.0, ans=0.125 +2024-07-29 01:51:25,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=224701.33333333334, ans=0.0 +2024-07-29 01:51:25,415 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.89 vs. limit=15.0 +2024-07-29 01:51:32,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=224714.66666666666, ans=0.0 +2024-07-29 01:51:33,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=224714.66666666666, ans=0.0 +2024-07-29 01:51:37,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=224728.0, ans=0.0 +2024-07-29 01:51:37,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=224728.0, ans=0.5 +2024-07-29 01:51:42,476 INFO [train.py:1114] (2/4) Epoch 17, batch 5000, loss[loss=0.1663, simple_loss=0.2674, pruned_loss=0.03265, over 4663.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2667, pruned_loss=0.04338, over 935251.28 frames. ], batch size: 14, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:51:43,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=224741.33333333334, ans=0.0 +2024-07-29 01:51:46,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=224741.33333333334, ans=0.2 +2024-07-29 01:51:55,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=224754.66666666666, ans=0.025 +2024-07-29 01:52:10,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=224781.33333333334, ans=0.125 +2024-07-29 01:52:24,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=224781.33333333334, ans=0.125 +2024-07-29 01:52:35,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=224794.66666666666, ans=0.125 +2024-07-29 01:52:47,969 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.329e+01 5.582e+01 6.302e+01 7.015e+01 1.020e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 01:52:50,213 INFO [train.py:1114] (2/4) Epoch 17, batch 5050, loss[loss=0.191, simple_loss=0.279, pruned_loss=0.05146, over 4864.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2666, pruned_loss=0.04333, over 937638.38 frames. ], batch size: 12, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:52:53,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=224808.0, ans=0.2 +2024-07-29 01:52:57,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=224821.33333333334, ans=0.125 +2024-07-29 01:53:17,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=224848.0, ans=0.125 +2024-07-29 01:53:32,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=224861.33333333334, ans=0.035 +2024-07-29 01:53:34,566 INFO [train.py:1114] (2/4) Epoch 17, batch 5100, loss[loss=0.1614, simple_loss=0.2537, pruned_loss=0.03457, over 4775.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2667, pruned_loss=0.0431, over 935261.54 frames. ], batch size: 12, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:53:35,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=224874.66666666666, ans=0.125 +2024-07-29 01:53:55,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=224901.33333333334, ans=0.2 +2024-07-29 01:54:07,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=224928.0, ans=0.125 +2024-07-29 01:54:08,806 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:54:10,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=224928.0, ans=0.0 +2024-07-29 01:54:11,314 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.613e+01 5.725e+01 6.244e+01 7.275e+01 1.073e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 01:54:13,342 INFO [train.py:1114] (2/4) Epoch 17, batch 5150, loss[loss=0.2107, simple_loss=0.304, pruned_loss=0.05875, over 4858.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2672, pruned_loss=0.04313, over 936456.41 frames. ], batch size: 16, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:54:19,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=224954.66666666666, ans=0.125 +2024-07-29 01:54:23,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=224954.66666666666, ans=0.125 +2024-07-29 01:54:26,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=224968.0, ans=0.125 +2024-07-29 01:54:48,428 INFO [train.py:1114] (2/4) Epoch 17, batch 5200, loss[loss=0.1683, simple_loss=0.2634, pruned_loss=0.03654, over 4663.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2665, pruned_loss=0.04244, over 936677.75 frames. ], batch size: 14, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:54:52,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=225008.0, ans=0.5 +2024-07-29 01:54:58,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=225021.33333333334, ans=0.1 +2024-07-29 01:55:01,733 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.65 vs. limit=15.0 +2024-07-29 01:55:18,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=225048.0, ans=0.125 +2024-07-29 01:55:37,216 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.752e+01 5.840e+01 6.748e+01 7.869e+01 1.303e+02, threshold=1.350e+02, percent-clipped=1.0 +2024-07-29 01:55:39,368 INFO [train.py:1114] (2/4) Epoch 17, batch 5250, loss[loss=0.1788, simple_loss=0.2594, pruned_loss=0.04911, over 4896.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2659, pruned_loss=0.04253, over 936183.77 frames. ], batch size: 13, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:55:45,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=225074.66666666666, ans=0.09899494936611666 +2024-07-29 01:55:47,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=225088.0, ans=0.125 +2024-07-29 01:56:07,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225101.33333333334, ans=0.1 +2024-07-29 01:56:20,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=225128.0, ans=10.0 +2024-07-29 01:56:22,588 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.65 vs. limit=10.0 +2024-07-29 01:56:25,591 INFO [train.py:1114] (2/4) Epoch 17, batch 5300, loss[loss=0.1824, simple_loss=0.2751, pruned_loss=0.04488, over 4633.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2663, pruned_loss=0.04277, over 934554.37 frames. ], batch size: 16, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:56:32,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=225154.66666666666, ans=0.125 +2024-07-29 01:56:39,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=225168.0, ans=0.0 +2024-07-29 01:56:49,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=225181.33333333334, ans=0.125 +2024-07-29 01:56:51,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=225181.33333333334, ans=0.125 +2024-07-29 01:56:57,572 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.355e+01 5.754e+01 6.386e+01 7.426e+01 1.100e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 01:56:59,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=225208.0, ans=0.0 +2024-07-29 01:56:59,722 INFO [train.py:1114] (2/4) Epoch 17, batch 5350, loss[loss=0.1438, simple_loss=0.225, pruned_loss=0.0313, over 4496.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2669, pruned_loss=0.0429, over 936457.24 frames. ], batch size: 10, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:57:01,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=225208.0, ans=0.125 +2024-07-29 01:57:02,917 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.36 vs. limit=22.5 +2024-07-29 01:57:13,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=225234.66666666666, ans=0.0 +2024-07-29 01:57:14,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=225234.66666666666, ans=0.125 +2024-07-29 01:57:16,661 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:57:18,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=225234.66666666666, ans=0.2 +2024-07-29 01:57:20,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=225248.0, ans=0.0 +2024-07-29 01:57:20,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=225248.0, ans=0.125 +2024-07-29 01:57:26,882 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=15.0 +2024-07-29 01:57:29,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=225261.33333333334, ans=0.125 +2024-07-29 01:57:30,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225261.33333333334, ans=0.1 +2024-07-29 01:57:30,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=225261.33333333334, ans=0.2 +2024-07-29 01:57:30,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=225261.33333333334, ans=0.125 +2024-07-29 01:57:34,861 INFO [train.py:1114] (2/4) Epoch 17, batch 5400, loss[loss=0.1945, simple_loss=0.2783, pruned_loss=0.0554, over 4376.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2677, pruned_loss=0.04368, over 930815.81 frames. ], batch size: 26, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:57:48,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=225288.0, ans=0.1 +2024-07-29 01:57:48,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=225288.0, ans=0.2 +2024-07-29 01:57:57,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=225314.66666666666, ans=0.025 +2024-07-29 01:58:04,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=225328.0, ans=0.125 +2024-07-29 01:58:08,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=225328.0, ans=0.125 +2024-07-29 01:58:09,523 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.272e+01 5.738e+01 6.198e+01 6.838e+01 9.669e+01, threshold=1.240e+02, percent-clipped=0.0 +2024-07-29 01:58:10,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=225328.0, ans=0.125 +2024-07-29 01:58:11,781 INFO [train.py:1114] (2/4) Epoch 17, batch 5450, loss[loss=0.1711, simple_loss=0.2559, pruned_loss=0.04314, over 4705.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2663, pruned_loss=0.04311, over 933402.64 frames. ], batch size: 11, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:58:15,975 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=225341.33333333334, ans=0.025 +2024-07-29 01:58:24,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=225368.0, ans=0.125 +2024-07-29 01:58:26,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=225368.0, ans=0.025 +2024-07-29 01:58:33,827 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:58:34,392 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:58:42,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=225394.66666666666, ans=0.0 +2024-07-29 01:58:45,721 INFO [train.py:1114] (2/4) Epoch 17, batch 5500, loss[loss=0.2185, simple_loss=0.2947, pruned_loss=0.07114, over 4211.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2657, pruned_loss=0.04321, over 930688.80 frames. ], batch size: 25, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:58:47,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=225408.0, ans=0.2 +2024-07-29 01:58:52,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=225421.33333333334, ans=0.0 +2024-07-29 01:58:55,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=225421.33333333334, ans=0.0 +2024-07-29 01:58:56,040 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.73 vs. limit=15.0 +2024-07-29 01:59:01,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=225434.66666666666, ans=0.125 +2024-07-29 01:59:11,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=225448.0, ans=0.5 +2024-07-29 01:59:16,900 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.448e+01 5.683e+01 6.448e+01 7.775e+01 1.067e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 01:59:17,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=225461.33333333334, ans=0.125 +2024-07-29 01:59:18,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=225474.66666666666, ans=0.125 +2024-07-29 01:59:18,938 INFO [train.py:1114] (2/4) Epoch 17, batch 5550, loss[loss=0.1753, simple_loss=0.2572, pruned_loss=0.04671, over 4698.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2654, pruned_loss=0.04277, over 933517.05 frames. ], batch size: 12, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:59:25,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=225488.0, ans=0.125 +2024-07-29 01:59:32,811 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.21 vs. limit=10.0 +2024-07-29 01:59:33,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=225501.33333333334, ans=0.125 +2024-07-29 01:59:43,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=225514.66666666666, ans=0.0 +2024-07-29 01:59:46,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=225514.66666666666, ans=0.09899494936611666 +2024-07-29 01:59:55,059 INFO [train.py:1114] (2/4) Epoch 17, batch 5600, loss[loss=0.1725, simple_loss=0.2628, pruned_loss=0.04112, over 4731.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2661, pruned_loss=0.04336, over 934817.18 frames. ], batch size: 14, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:59:58,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=225541.33333333334, ans=0.2 +2024-07-29 02:00:09,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=225568.0, ans=0.125 +2024-07-29 02:00:10,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=225568.0, ans=0.0 +2024-07-29 02:00:11,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=225568.0, ans=0.025 +2024-07-29 02:00:13,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=225568.0, ans=0.125 +2024-07-29 02:00:18,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=225581.33333333334, ans=0.2 +2024-07-29 02:00:27,275 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.722e+01 5.591e+01 6.348e+01 7.500e+01 1.117e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-29 02:00:29,319 INFO [train.py:1114] (2/4) Epoch 17, batch 5650, loss[loss=0.2001, simple_loss=0.2845, pruned_loss=0.05784, over 4504.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2653, pruned_loss=0.04276, over 937000.76 frames. ], batch size: 21, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:00:30,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=225608.0, ans=0.025 +2024-07-29 02:00:37,292 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.68 vs. limit=22.5 +2024-07-29 02:00:37,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=225621.33333333334, ans=0.04949747468305833 +2024-07-29 02:01:12,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=225648.0, ans=0.0 +2024-07-29 02:01:18,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=225661.33333333334, ans=0.0 +2024-07-29 02:01:20,552 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.99 vs. limit=15.0 +2024-07-29 02:01:20,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=225661.33333333334, ans=0.0 +2024-07-29 02:01:24,141 INFO [train.py:1114] (2/4) Epoch 17, batch 5700, loss[loss=0.1487, simple_loss=0.2375, pruned_loss=0.02991, over 4703.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2649, pruned_loss=0.0424, over 938153.27 frames. ], batch size: 13, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:01:26,541 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.50 vs. limit=15.0 +2024-07-29 02:01:27,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225674.66666666666, ans=0.1 +2024-07-29 02:01:52,645 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=225728.0, ans=0.125 +2024-07-29 02:01:56,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=225728.0, ans=0.1 +2024-07-29 02:01:57,713 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.493e+01 6.225e+01 7.048e+01 1.096e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-29 02:02:02,541 INFO [train.py:1114] (2/4) Epoch 17, batch 5750, loss[loss=0.1974, simple_loss=0.3026, pruned_loss=0.04606, over 4730.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2659, pruned_loss=0.04234, over 938491.48 frames. ], batch size: 19, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:02:14,052 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:02:14,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=225754.66666666666, ans=0.0 +2024-07-29 02:02:18,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=225768.0, ans=0.125 +2024-07-29 02:02:18,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=225768.0, ans=0.125 +2024-07-29 02:02:35,967 INFO [train.py:1114] (2/4) Epoch 17, batch 5800, loss[loss=0.1926, simple_loss=0.2805, pruned_loss=0.05238, over 4699.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2677, pruned_loss=0.04284, over 937800.97 frames. ], batch size: 19, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:02:38,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=225808.0, ans=10.0 +2024-07-29 02:03:10,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=225848.0, ans=0.0 +2024-07-29 02:03:16,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=225861.33333333334, ans=0.125 +2024-07-29 02:03:18,477 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.618e+01 6.216e+01 6.871e+01 1.068e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 02:03:21,271 INFO [train.py:1114] (2/4) Epoch 17, batch 5850, loss[loss=0.1817, simple_loss=0.2831, pruned_loss=0.04013, over 4485.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2678, pruned_loss=0.0432, over 938061.38 frames. ], batch size: 21, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:03:25,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=225874.66666666666, ans=0.0 +2024-07-29 02:03:30,377 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.54 vs. limit=15.0 +2024-07-29 02:03:34,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=225901.33333333334, ans=0.125 +2024-07-29 02:03:52,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=225928.0, ans=0.125 +2024-07-29 02:03:56,486 INFO [train.py:1114] (2/4) Epoch 17, batch 5900, loss[loss=0.1619, simple_loss=0.2482, pruned_loss=0.03786, over 4684.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2677, pruned_loss=0.04331, over 937956.23 frames. ], batch size: 15, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:03:59,830 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.20 vs. limit=15.0 +2024-07-29 02:04:01,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=225941.33333333334, ans=0.125 +2024-07-29 02:04:01,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=225941.33333333334, ans=0.125 +2024-07-29 02:04:20,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=225981.33333333334, ans=0.125 +2024-07-29 02:04:24,968 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:04:28,164 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 5.754e+01 6.416e+01 7.190e+01 1.147e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 02:04:30,242 INFO [train.py:1114] (2/4) Epoch 17, batch 5950, loss[loss=0.187, simple_loss=0.2825, pruned_loss=0.04577, over 4694.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2669, pruned_loss=0.04267, over 939849.91 frames. ], batch size: 15, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:04:33,261 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=226008.0, ans=0.2 +2024-07-29 02:04:40,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=226021.33333333334, ans=0.125 +2024-07-29 02:04:43,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=226021.33333333334, ans=0.0 +2024-07-29 02:04:47,415 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.23 vs. limit=12.0 +2024-07-29 02:04:49,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=226034.66666666666, ans=0.0 +2024-07-29 02:04:55,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=226048.0, ans=0.0 +2024-07-29 02:05:04,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226061.33333333334, ans=0.1 +2024-07-29 02:05:06,716 INFO [train.py:1114] (2/4) Epoch 17, batch 6000, loss[loss=0.1846, simple_loss=0.2811, pruned_loss=0.04411, over 4047.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2673, pruned_loss=0.04281, over 937434.62 frames. ], batch size: 25, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:05:06,716 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 02:05:33,750 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.3667, 5.1763, 4.6109, 4.4500], device='cuda:2') +2024-07-29 02:05:43,820 INFO [train.py:1146] (2/4) Epoch 17, validation: loss=0.1623, simple_loss=0.2646, pruned_loss=0.02995, over 944034.00 frames. +2024-07-29 02:05:43,820 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 02:05:57,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=226088.0, ans=0.125 +2024-07-29 02:05:58,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=226101.33333333334, ans=0.0 +2024-07-29 02:05:59,539 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.09 vs. limit=10.0 +2024-07-29 02:06:11,237 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.45 vs. limit=22.5 +2024-07-29 02:06:12,213 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:06:13,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=226128.0, ans=0.0 +2024-07-29 02:06:14,636 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.83 vs. limit=15.0 +2024-07-29 02:06:16,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=226128.0, ans=0.0 +2024-07-29 02:06:17,673 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.382e+01 5.832e+01 6.475e+01 7.861e+01 1.037e+02, threshold=1.295e+02, percent-clipped=0.0 +2024-07-29 02:06:19,769 INFO [train.py:1114] (2/4) Epoch 17, batch 6050, loss[loss=0.1622, simple_loss=0.2696, pruned_loss=0.0274, over 4772.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2669, pruned_loss=0.0425, over 939059.78 frames. ], batch size: 12, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:06:23,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=226141.33333333334, ans=0.1 +2024-07-29 02:06:26,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=226154.66666666666, ans=0.0 +2024-07-29 02:06:28,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=226154.66666666666, ans=0.025 +2024-07-29 02:06:29,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=226154.66666666666, ans=0.125 +2024-07-29 02:06:45,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=226181.33333333334, ans=0.0 +2024-07-29 02:06:50,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=226194.66666666666, ans=0.025 +2024-07-29 02:06:59,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=226194.66666666666, ans=0.0 +2024-07-29 02:07:01,457 INFO [train.py:1114] (2/4) Epoch 17, batch 6100, loss[loss=0.2042, simple_loss=0.2973, pruned_loss=0.05557, over 4684.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2657, pruned_loss=0.0424, over 939222.49 frames. ], batch size: 15, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:07:04,798 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-29 02:07:07,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=226208.0, ans=0.125 +2024-07-29 02:07:42,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.86 vs. limit=15.0 +2024-07-29 02:07:42,461 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=226248.0, ans=0.0 +2024-07-29 02:07:49,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=226261.33333333334, ans=0.125 +2024-07-29 02:07:50,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=226261.33333333334, ans=0.125 +2024-07-29 02:07:50,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226261.33333333334, ans=0.1 +2024-07-29 02:07:53,313 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.857e+01 5.509e+01 6.001e+01 6.915e+01 1.050e+02, threshold=1.200e+02, percent-clipped=0.0 +2024-07-29 02:07:55,389 INFO [train.py:1114] (2/4) Epoch 17, batch 6150, loss[loss=0.1827, simple_loss=0.275, pruned_loss=0.04521, over 3406.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2665, pruned_loss=0.04272, over 937673.44 frames. ], batch size: 36, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:08:18,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=226288.0, ans=0.125 +2024-07-29 02:08:22,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=226301.33333333334, ans=0.125 +2024-07-29 02:08:24,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=226301.33333333334, ans=0.025 +2024-07-29 02:08:30,508 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.76 vs. limit=15.0 +2024-07-29 02:08:40,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226328.0, ans=0.1 +2024-07-29 02:08:42,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=226328.0, ans=0.125 +2024-07-29 02:08:44,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=226328.0, ans=0.125 +2024-07-29 02:08:46,986 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.11 vs. limit=15.0 +2024-07-29 02:08:56,968 INFO [train.py:1114] (2/4) Epoch 17, batch 6200, loss[loss=0.1578, simple_loss=0.263, pruned_loss=0.02627, over 4734.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.267, pruned_loss=0.04273, over 936775.81 frames. ], batch size: 14, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:09:01,378 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=8.52 vs. limit=8.0 +2024-07-29 02:09:02,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=226341.33333333334, ans=0.125 +2024-07-29 02:09:24,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=226368.0, ans=0.125 +2024-07-29 02:09:33,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=226368.0, ans=0.1 +2024-07-29 02:09:33,811 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=226368.0, ans=0.025 +2024-07-29 02:09:36,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=226381.33333333334, ans=0.125 +2024-07-29 02:09:38,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=226381.33333333334, ans=0.125 +2024-07-29 02:09:41,084 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.38 vs. limit=6.0 +2024-07-29 02:09:51,547 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.525e+01 5.563e+01 6.274e+01 7.227e+01 1.075e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 02:09:53,653 INFO [train.py:1114] (2/4) Epoch 17, batch 6250, loss[loss=0.1874, simple_loss=0.2886, pruned_loss=0.04308, over 4807.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2662, pruned_loss=0.04247, over 933568.86 frames. ], batch size: 14, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:09:57,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=226408.0, ans=0.125 +2024-07-29 02:10:13,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=226448.0, ans=0.125 +2024-07-29 02:10:14,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=226448.0, ans=0.1 +2024-07-29 02:10:15,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=226448.0, ans=0.125 +2024-07-29 02:10:27,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=226461.33333333334, ans=0.125 +2024-07-29 02:10:30,833 INFO [train.py:1114] (2/4) Epoch 17, batch 6300, loss[loss=0.1304, simple_loss=0.2243, pruned_loss=0.01825, over 4532.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2669, pruned_loss=0.04313, over 930500.80 frames. ], batch size: 10, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:10:40,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=226474.66666666666, ans=0.0 +2024-07-29 02:10:53,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=226501.33333333334, ans=0.125 +2024-07-29 02:11:01,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=226501.33333333334, ans=0.125 +2024-07-29 02:11:07,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=226514.66666666666, ans=0.125 +2024-07-29 02:11:10,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=226528.0, ans=0.125 +2024-07-29 02:11:15,466 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.120e+01 5.607e+01 6.569e+01 7.954e+01 1.446e+02, threshold=1.314e+02, percent-clipped=2.0 +2024-07-29 02:11:31,873 INFO [train.py:1114] (2/4) Epoch 17, batch 6350, loss[loss=0.1828, simple_loss=0.2761, pruned_loss=0.04473, over 4542.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2667, pruned_loss=0.0432, over 934420.49 frames. ], batch size: 21, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:11:40,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=226541.33333333334, ans=0.125 +2024-07-29 02:12:14,077 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=226568.0, ans=0.0 +2024-07-29 02:12:28,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=226594.66666666666, ans=0.0 +2024-07-29 02:12:30,411 INFO [train.py:1114] (2/4) Epoch 17, batch 6400, loss[loss=0.1836, simple_loss=0.2818, pruned_loss=0.0427, over 4631.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2666, pruned_loss=0.04353, over 936072.41 frames. ], batch size: 13, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:12:51,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=226608.0, ans=0.035 +2024-07-29 02:13:18,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=226648.0, ans=0.125 +2024-07-29 02:13:27,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=226661.33333333334, ans=0.025 +2024-07-29 02:13:28,827 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.008e+01 5.819e+01 6.340e+01 7.116e+01 1.046e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-29 02:13:28,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=226661.33333333334, ans=0.07 +2024-07-29 02:13:33,293 INFO [train.py:1114] (2/4) Epoch 17, batch 6450, loss[loss=0.1793, simple_loss=0.2722, pruned_loss=0.04321, over 4436.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2666, pruned_loss=0.04318, over 939429.86 frames. ], batch size: 21, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:13:34,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=226674.66666666666, ans=0.0 +2024-07-29 02:13:36,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=226674.66666666666, ans=0.05 +2024-07-29 02:13:42,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=226688.0, ans=0.125 +2024-07-29 02:14:10,843 INFO [train.py:1114] (2/4) Epoch 17, batch 6500, loss[loss=0.2555, simple_loss=0.3274, pruned_loss=0.09176, over 3292.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2662, pruned_loss=0.04283, over 940298.21 frames. ], batch size: 37, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:14:12,820 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.34 vs. limit=10.0 +2024-07-29 02:14:21,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=226754.66666666666, ans=0.125 +2024-07-29 02:14:33,897 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.35 vs. limit=10.0 +2024-07-29 02:14:34,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=226781.33333333334, ans=0.125 +2024-07-29 02:14:36,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=226781.33333333334, ans=0.025 +2024-07-29 02:14:39,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=226794.66666666666, ans=0.125 +2024-07-29 02:14:42,547 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.227e+01 5.659e+01 6.416e+01 7.709e+01 1.114e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 02:14:43,951 INFO [train.py:1114] (2/4) Epoch 17, batch 6550, loss[loss=0.1703, simple_loss=0.2387, pruned_loss=0.05099, over 4803.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2652, pruned_loss=0.04232, over 943482.42 frames. ], batch size: 11, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:14:59,160 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=226834.66666666666, ans=0.0 +2024-07-29 02:15:01,376 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.01 vs. limit=10.0 +2024-07-29 02:15:09,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226848.0, ans=0.1 +2024-07-29 02:15:17,690 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.58 vs. limit=15.0 +2024-07-29 02:15:18,655 INFO [train.py:1114] (2/4) Epoch 17, batch 6600, loss[loss=0.1634, simple_loss=0.2623, pruned_loss=0.03231, over 4935.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.265, pruned_loss=0.04239, over 945498.84 frames. ], batch size: 14, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:15:26,314 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.90 vs. limit=15.0 +2024-07-29 02:15:28,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=226888.0, ans=0.0 +2024-07-29 02:15:28,513 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.36 vs. limit=10.0 +2024-07-29 02:15:33,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=226901.33333333334, ans=0.125 +2024-07-29 02:15:53,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=226928.0, ans=0.125 +2024-07-29 02:15:55,951 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.636e+01 5.664e+01 6.465e+01 7.332e+01 1.238e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-29 02:15:57,324 INFO [train.py:1114] (2/4) Epoch 17, batch 6650, loss[loss=0.2071, simple_loss=0.2971, pruned_loss=0.05851, over 4648.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2648, pruned_loss=0.04217, over 943607.77 frames. ], batch size: 17, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:16:17,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=226968.0, ans=0.0 +2024-07-29 02:16:29,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=226994.66666666666, ans=0.125 +2024-07-29 02:16:32,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=226994.66666666666, ans=0.0 +2024-07-29 02:16:32,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=226994.66666666666, ans=0.95 +2024-07-29 02:16:32,860 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.70 vs. limit=15.0 +2024-07-29 02:16:35,773 INFO [train.py:1114] (2/4) Epoch 17, batch 6700, loss[loss=0.193, simple_loss=0.2844, pruned_loss=0.05078, over 4729.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2657, pruned_loss=0.04234, over 942516.70 frames. ], batch size: 19, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:16:37,520 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.23 vs. limit=22.5 +2024-07-29 02:16:45,283 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227021.33333333334, ans=0.1 +2024-07-29 02:16:52,979 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.97 vs. limit=22.5 +2024-07-29 02:16:55,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=227048.0, ans=0.07 +2024-07-29 02:17:07,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=227061.33333333334, ans=0.0 +2024-07-29 02:17:08,098 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.780e+01 5.902e+01 6.582e+01 7.550e+01 1.119e+02, threshold=1.316e+02, percent-clipped=0.0 +2024-07-29 02:17:08,515 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.37 vs. limit=15.0 +2024-07-29 02:17:09,559 INFO [train.py:1114] (2/4) Epoch 17, batch 6750, loss[loss=0.2002, simple_loss=0.3044, pruned_loss=0.048, over 4160.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2664, pruned_loss=0.0424, over 940130.19 frames. ], batch size: 25, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:17:11,013 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:17:12,009 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.00 vs. limit=15.0 +2024-07-29 02:17:25,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=227101.33333333334, ans=0.125 +2024-07-29 02:17:36,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=227114.66666666666, ans=0.125 +2024-07-29 02:17:44,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=227128.0, ans=0.0 +2024-07-29 02:17:46,133 INFO [train.py:1114] (2/4) Epoch 17, batch 6800, loss[loss=0.1857, simple_loss=0.2725, pruned_loss=0.04942, over 4635.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2671, pruned_loss=0.04295, over 938603.59 frames. ], batch size: 13, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:17:47,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=227141.33333333334, ans=0.2 +2024-07-29 02:18:04,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=227168.0, ans=0.125 +2024-07-29 02:18:07,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=227168.0, ans=0.125 +2024-07-29 02:18:18,282 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:18:23,162 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.461e+01 5.796e+01 6.354e+01 7.528e+01 1.110e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-29 02:18:24,488 INFO [train.py:1114] (2/4) Epoch 17, batch 6850, loss[loss=0.1806, simple_loss=0.2798, pruned_loss=0.04067, over 4690.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.267, pruned_loss=0.04282, over 940549.54 frames. ], batch size: 13, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:18:30,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=227221.33333333334, ans=0.2 +2024-07-29 02:18:35,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=227221.33333333334, ans=0.125 +2024-07-29 02:18:37,668 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.47 vs. limit=12.0 +2024-07-29 02:18:43,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=227234.66666666666, ans=0.125 +2024-07-29 02:18:47,137 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.25 vs. limit=15.0 +2024-07-29 02:18:57,213 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227261.33333333334, ans=0.1 +2024-07-29 02:18:58,428 INFO [train.py:1114] (2/4) Epoch 17, batch 6900, loss[loss=0.1876, simple_loss=0.283, pruned_loss=0.04605, over 4971.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2671, pruned_loss=0.04268, over 942917.58 frames. ], batch size: 13, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:19:13,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=227301.33333333334, ans=0.0 +2024-07-29 02:19:16,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=227301.33333333334, ans=0.125 +2024-07-29 02:19:20,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=227314.66666666666, ans=0.0 +2024-07-29 02:19:23,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=227314.66666666666, ans=0.125 +2024-07-29 02:19:24,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=227328.0, ans=0.125 +2024-07-29 02:19:28,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=227328.0, ans=0.0 +2024-07-29 02:19:28,426 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.65 vs. limit=15.0 +2024-07-29 02:19:29,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=227328.0, ans=0.125 +2024-07-29 02:19:30,584 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 5.673e+01 6.337e+01 7.070e+01 9.910e+01, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 02:19:31,920 INFO [train.py:1114] (2/4) Epoch 17, batch 6950, loss[loss=0.1727, simple_loss=0.2502, pruned_loss=0.04759, over 4511.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2676, pruned_loss=0.04335, over 940098.97 frames. ], batch size: 10, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:19:31,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=227341.33333333334, ans=0.125 +2024-07-29 02:19:32,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=227341.33333333334, ans=0.0 +2024-07-29 02:19:33,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=227341.33333333334, ans=0.125 +2024-07-29 02:19:38,080 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.54 vs. limit=15.0 +2024-07-29 02:19:46,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227368.0, ans=0.1 +2024-07-29 02:19:57,918 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.87 vs. limit=6.0 +2024-07-29 02:20:02,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=227394.66666666666, ans=0.125 +2024-07-29 02:20:03,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=227394.66666666666, ans=0.2 +2024-07-29 02:20:06,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=227408.0, ans=0.0 +2024-07-29 02:20:06,911 INFO [train.py:1114] (2/4) Epoch 17, batch 7000, loss[loss=0.1866, simple_loss=0.2819, pruned_loss=0.04563, over 4607.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2672, pruned_loss=0.04335, over 938951.04 frames. ], batch size: 17, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:20:13,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=227421.33333333334, ans=0.04949747468305833 +2024-07-29 02:20:19,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=227434.66666666666, ans=0.2 +2024-07-29 02:20:20,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=227434.66666666666, ans=0.2 +2024-07-29 02:20:36,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=227461.33333333334, ans=0.125 +2024-07-29 02:20:38,597 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.627e+01 5.582e+01 6.064e+01 6.691e+01 1.096e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-29 02:20:39,961 INFO [train.py:1114] (2/4) Epoch 17, batch 7050, loss[loss=0.1683, simple_loss=0.2686, pruned_loss=0.03395, over 4693.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2668, pruned_loss=0.04308, over 942160.28 frames. ], batch size: 19, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:20:46,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=227488.0, ans=0.0 +2024-07-29 02:20:58,743 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.45 vs. limit=10.0 +2024-07-29 02:21:20,633 INFO [train.py:1114] (2/4) Epoch 17, batch 7100, loss[loss=0.1664, simple_loss=0.2657, pruned_loss=0.03355, over 4803.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2684, pruned_loss=0.04391, over 937191.54 frames. ], batch size: 15, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:21:23,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=227541.33333333334, ans=0.025 +2024-07-29 02:21:25,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=227541.33333333334, ans=0.125 +2024-07-29 02:21:26,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.17 vs. limit=15.0 +2024-07-29 02:21:34,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=227568.0, ans=0.125 +2024-07-29 02:21:39,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=227568.0, ans=0.125 +2024-07-29 02:21:43,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=227581.33333333334, ans=0.125 +2024-07-29 02:21:53,002 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.433e+01 5.574e+01 6.289e+01 7.294e+01 1.340e+02, threshold=1.258e+02, percent-clipped=1.0 +2024-07-29 02:21:54,464 INFO [train.py:1114] (2/4) Epoch 17, batch 7150, loss[loss=0.2044, simple_loss=0.2969, pruned_loss=0.05599, over 4532.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2667, pruned_loss=0.0433, over 938141.06 frames. ], batch size: 21, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:21:59,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=227608.0, ans=0.125 +2024-07-29 02:22:17,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=227648.0, ans=0.025 +2024-07-29 02:22:21,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=227661.33333333334, ans=0.0 +2024-07-29 02:22:27,908 INFO [train.py:1114] (2/4) Epoch 17, batch 7200, loss[loss=0.1714, simple_loss=0.2637, pruned_loss=0.03954, over 4802.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2678, pruned_loss=0.0436, over 938154.91 frames. ], batch size: 15, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:22:33,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=227674.66666666666, ans=0.0 +2024-07-29 02:22:34,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=227688.0, ans=0.0 +2024-07-29 02:22:34,692 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:22:47,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=227714.66666666666, ans=0.125 +2024-07-29 02:22:59,857 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.749e+01 5.624e+01 6.163e+01 6.917e+01 1.062e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 02:23:01,134 INFO [train.py:1114] (2/4) Epoch 17, batch 7250, loss[loss=0.1649, simple_loss=0.2394, pruned_loss=0.04517, over 4853.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2674, pruned_loss=0.04371, over 939528.76 frames. ], batch size: 12, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:23:04,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=227741.33333333334, ans=0.125 +2024-07-29 02:23:13,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=227754.66666666666, ans=0.0 +2024-07-29 02:23:14,639 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.50 vs. limit=22.5 +2024-07-29 02:23:15,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=227768.0, ans=0.125 +2024-07-29 02:23:35,256 INFO [train.py:1114] (2/4) Epoch 17, batch 7300, loss[loss=0.1434, simple_loss=0.2274, pruned_loss=0.02974, over 4847.00 frames. ], tot_loss[loss=0.177, simple_loss=0.267, pruned_loss=0.04347, over 939835.32 frames. ], batch size: 12, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:23:36,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=227808.0, ans=10.0 +2024-07-29 02:23:40,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=227808.0, ans=0.0 +2024-07-29 02:24:01,340 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:24:05,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=227861.33333333334, ans=0.125 +2024-07-29 02:24:05,745 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=227861.33333333334, ans=0.125 +2024-07-29 02:24:07,033 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.752e+01 5.670e+01 6.102e+01 6.863e+01 9.457e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 02:24:08,409 INFO [train.py:1114] (2/4) Epoch 17, batch 7350, loss[loss=0.1524, simple_loss=0.2447, pruned_loss=0.03003, over 4642.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2662, pruned_loss=0.04289, over 938793.18 frames. ], batch size: 12, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:24:08,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=227874.66666666666, ans=0.0 +2024-07-29 02:24:08,849 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.77 vs. limit=6.0 +2024-07-29 02:24:09,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=227874.66666666666, ans=0.125 +2024-07-29 02:24:16,173 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.69 vs. limit=15.0 +2024-07-29 02:24:17,124 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:24:28,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=227901.33333333334, ans=0.125 +2024-07-29 02:24:33,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=227901.33333333334, ans=0.125 +2024-07-29 02:24:34,611 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.73 vs. limit=15.0 +2024-07-29 02:24:36,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=227914.66666666666, ans=0.125 +2024-07-29 02:24:38,911 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=227914.66666666666, ans=0.0 +2024-07-29 02:24:40,956 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.70 vs. limit=6.0 +2024-07-29 02:24:47,010 INFO [train.py:1114] (2/4) Epoch 17, batch 7400, loss[loss=0.1824, simple_loss=0.2644, pruned_loss=0.05019, over 4690.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2671, pruned_loss=0.04309, over 939936.25 frames. ], batch size: 13, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:24:49,953 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.10 vs. limit=15.0 +2024-07-29 02:24:55,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=227954.66666666666, ans=0.125 +2024-07-29 02:25:07,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=227968.0, ans=0.2 +2024-07-29 02:25:07,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=227981.33333333334, ans=0.2 +2024-07-29 02:25:20,903 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.500e+01 5.692e+01 6.442e+01 7.535e+01 1.153e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 02:25:22,160 INFO [train.py:1114] (2/4) Epoch 17, batch 7450, loss[loss=0.1476, simple_loss=0.2257, pruned_loss=0.03472, over 4616.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2661, pruned_loss=0.04291, over 937698.27 frames. ], batch size: 11, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:25:26,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=228008.0, ans=0.0 +2024-07-29 02:25:31,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=228021.33333333334, ans=0.95 +2024-07-29 02:25:38,718 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=228034.66666666666, ans=0.125 +2024-07-29 02:25:43,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228048.0, ans=0.1 +2024-07-29 02:25:48,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=228061.33333333334, ans=0.025 +2024-07-29 02:25:49,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228061.33333333334, ans=0.1 +2024-07-29 02:25:49,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=228061.33333333334, ans=0.2 +2024-07-29 02:25:49,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=228061.33333333334, ans=0.125 +2024-07-29 02:25:53,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=228061.33333333334, ans=0.05 +2024-07-29 02:25:55,073 INFO [train.py:1114] (2/4) Epoch 17, batch 7500, loss[loss=0.2107, simple_loss=0.303, pruned_loss=0.05918, over 3393.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.266, pruned_loss=0.04288, over 935703.40 frames. ], batch size: 36, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:25:58,565 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.64 vs. limit=10.0 +2024-07-29 02:26:10,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=228101.33333333334, ans=0.025 +2024-07-29 02:26:26,557 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.792e+01 5.692e+01 6.151e+01 7.079e+01 1.117e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 02:26:27,928 INFO [train.py:1114] (2/4) Epoch 17, batch 7550, loss[loss=0.1925, simple_loss=0.2842, pruned_loss=0.05039, over 4597.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.268, pruned_loss=0.04379, over 935818.40 frames. ], batch size: 17, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:26:27,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=228141.33333333334, ans=0.0 +2024-07-29 02:26:31,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=228141.33333333334, ans=0.125 +2024-07-29 02:26:31,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=228141.33333333334, ans=0.125 +2024-07-29 02:26:35,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=228154.66666666666, ans=0.125 +2024-07-29 02:26:43,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=228168.0, ans=0.0 +2024-07-29 02:26:52,768 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=228181.33333333334, ans=0.025 +2024-07-29 02:26:55,047 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.82 vs. limit=15.0 +2024-07-29 02:27:00,400 INFO [train.py:1114] (2/4) Epoch 17, batch 7600, loss[loss=0.224, simple_loss=0.2937, pruned_loss=0.07713, over 4812.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2668, pruned_loss=0.04354, over 937640.71 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:27:00,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=228208.0, ans=0.0 +2024-07-29 02:27:02,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=228208.0, ans=0.0 +2024-07-29 02:27:03,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=228208.0, ans=0.125 +2024-07-29 02:27:03,275 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.00 vs. limit=22.5 +2024-07-29 02:27:09,670 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:27:17,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=228234.66666666666, ans=0.0 +2024-07-29 02:27:20,682 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.02 vs. limit=15.0 +2024-07-29 02:27:24,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=228248.0, ans=0.0 +2024-07-29 02:27:26,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=228261.33333333334, ans=0.09899494936611666 +2024-07-29 02:27:32,118 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.867e+01 5.577e+01 6.101e+01 6.985e+01 1.081e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 02:27:33,472 INFO [train.py:1114] (2/4) Epoch 17, batch 7650, loss[loss=0.1549, simple_loss=0.2382, pruned_loss=0.03582, over 4934.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2665, pruned_loss=0.04338, over 937141.71 frames. ], batch size: 12, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:27:36,837 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.44 vs. limit=15.0 +2024-07-29 02:28:06,775 INFO [train.py:1114] (2/4) Epoch 17, batch 7700, loss[loss=0.2061, simple_loss=0.3069, pruned_loss=0.0527, over 4695.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2672, pruned_loss=0.04352, over 934508.14 frames. ], batch size: 13, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:28:08,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=228341.33333333334, ans=0.1 +2024-07-29 02:28:10,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228341.33333333334, ans=0.1 +2024-07-29 02:28:11,761 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.94 vs. limit=6.0 +2024-07-29 02:28:15,972 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.14 vs. limit=15.0 +2024-07-29 02:28:21,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=228368.0, ans=0.2 +2024-07-29 02:28:25,904 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.46 vs. limit=22.5 +2024-07-29 02:28:29,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=228381.33333333334, ans=0.125 +2024-07-29 02:28:30,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=228381.33333333334, ans=0.09899494936611666 +2024-07-29 02:28:33,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=228394.66666666666, ans=0.125 +2024-07-29 02:28:37,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=228394.66666666666, ans=0.125 +2024-07-29 02:28:38,555 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.830e+01 5.778e+01 6.221e+01 6.817e+01 1.028e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-29 02:28:39,829 INFO [train.py:1114] (2/4) Epoch 17, batch 7750, loss[loss=0.1673, simple_loss=0.275, pruned_loss=0.02985, over 4932.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2681, pruned_loss=0.04363, over 935663.79 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:29:02,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=228448.0, ans=0.125 +2024-07-29 02:29:06,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=228461.33333333334, ans=0.2 +2024-07-29 02:29:09,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228461.33333333334, ans=0.1 +2024-07-29 02:29:13,370 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.56 vs. limit=15.0 +2024-07-29 02:29:13,589 INFO [train.py:1114] (2/4) Epoch 17, batch 7800, loss[loss=0.1799, simple_loss=0.2929, pruned_loss=0.03342, over 4664.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2685, pruned_loss=0.04332, over 937406.43 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:29:19,671 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.00 vs. limit=12.0 +2024-07-29 02:29:25,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=228488.0, ans=0.0 +2024-07-29 02:29:37,455 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.64 vs. limit=15.0 +2024-07-29 02:29:39,661 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=6.75 vs. limit=12.0 +2024-07-29 02:29:43,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=228528.0, ans=0.125 +2024-07-29 02:29:45,761 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.634e+01 5.601e+01 6.061e+01 6.909e+01 9.922e+01, threshold=1.212e+02, percent-clipped=0.0 +2024-07-29 02:29:47,131 INFO [train.py:1114] (2/4) Epoch 17, batch 7850, loss[loss=0.1709, simple_loss=0.2504, pruned_loss=0.04569, over 4535.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2684, pruned_loss=0.04339, over 936152.52 frames. ], batch size: 10, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:29:49,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=228541.33333333334, ans=0.09899494936611666 +2024-07-29 02:29:57,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=228554.66666666666, ans=0.2 +2024-07-29 02:29:58,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=228554.66666666666, ans=0.125 +2024-07-29 02:30:02,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=228568.0, ans=0.025 +2024-07-29 02:30:02,852 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.45 vs. limit=12.0 +2024-07-29 02:30:04,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=228568.0, ans=0.0 +2024-07-29 02:30:04,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=228568.0, ans=0.0 +2024-07-29 02:30:19,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.22 vs. limit=22.5 +2024-07-29 02:30:20,613 INFO [train.py:1114] (2/4) Epoch 17, batch 7900, loss[loss=0.1827, simple_loss=0.2734, pruned_loss=0.04601, over 4877.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.27, pruned_loss=0.04413, over 933440.64 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:30:29,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=228621.33333333334, ans=0.0 +2024-07-29 02:30:33,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=228634.66666666666, ans=0.2 +2024-07-29 02:30:40,360 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=228648.0, ans=0.0 +2024-07-29 02:30:44,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=228648.0, ans=0.125 +2024-07-29 02:30:48,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=228661.33333333334, ans=0.0 +2024-07-29 02:30:51,954 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.771e+01 6.375e+01 7.176e+01 1.150e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-29 02:30:53,287 INFO [train.py:1114] (2/4) Epoch 17, batch 7950, loss[loss=0.2178, simple_loss=0.3098, pruned_loss=0.06291, over 3588.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2697, pruned_loss=0.04379, over 935785.24 frames. ], batch size: 36, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:30:54,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=228674.66666666666, ans=0.0 +2024-07-29 02:31:00,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=228688.0, ans=0.2 +2024-07-29 02:31:02,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=228688.0, ans=0.2 +2024-07-29 02:31:02,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=228688.0, ans=0.0 +2024-07-29 02:31:02,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=228688.0, ans=0.0 +2024-07-29 02:31:02,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=228688.0, ans=0.125 +2024-07-29 02:31:34,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=228728.0, ans=0.125 +2024-07-29 02:31:41,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=228728.0, ans=0.125 +2024-07-29 02:31:43,743 INFO [train.py:1114] (2/4) Epoch 17, batch 8000, loss[loss=0.1737, simple_loss=0.2491, pruned_loss=0.04916, over 4607.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2682, pruned_loss=0.04329, over 934794.67 frames. ], batch size: 11, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:31:52,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=228754.66666666666, ans=0.125 +2024-07-29 02:31:53,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=228754.66666666666, ans=0.0 +2024-07-29 02:31:53,975 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.71 vs. limit=22.5 +2024-07-29 02:32:10,550 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:32:17,773 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.670e+01 5.673e+01 6.449e+01 7.589e+01 1.080e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 02:32:19,118 INFO [train.py:1114] (2/4) Epoch 17, batch 8050, loss[loss=0.1635, simple_loss=0.2682, pruned_loss=0.02937, over 4816.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2683, pruned_loss=0.04336, over 934071.50 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:32:33,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=228821.33333333334, ans=0.2 +2024-07-29 02:32:36,826 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:32:45,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=228848.0, ans=0.0 +2024-07-29 02:32:51,551 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.04 vs. limit=15.0 +2024-07-29 02:32:54,996 INFO [train.py:1114] (2/4) Epoch 17, batch 8100, loss[loss=0.2252, simple_loss=0.3153, pruned_loss=0.06755, over 4804.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2681, pruned_loss=0.04321, over 933899.41 frames. ], batch size: 15, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:33:07,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=228888.0, ans=0.05 +2024-07-29 02:33:09,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=228901.33333333334, ans=0.125 +2024-07-29 02:33:17,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=228914.66666666666, ans=0.0 +2024-07-29 02:33:20,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=228914.66666666666, ans=0.0 +2024-07-29 02:33:25,123 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.81 vs. limit=15.0 +2024-07-29 02:33:28,141 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.710e+01 5.750e+01 6.401e+01 7.734e+01 1.146e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-29 02:33:28,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=228941.33333333334, ans=0.1 +2024-07-29 02:33:29,428 INFO [train.py:1114] (2/4) Epoch 17, batch 8150, loss[loss=0.1814, simple_loss=0.2837, pruned_loss=0.03952, over 4798.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2662, pruned_loss=0.04256, over 937378.16 frames. ], batch size: 15, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:33:29,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=228941.33333333334, ans=0.125 +2024-07-29 02:33:37,799 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.51 vs. limit=10.0 +2024-07-29 02:34:00,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=228994.66666666666, ans=0.125 +2024-07-29 02:34:02,005 INFO [train.py:1114] (2/4) Epoch 17, batch 8200, loss[loss=0.2004, simple_loss=0.2883, pruned_loss=0.05623, over 4804.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2665, pruned_loss=0.04257, over 938343.53 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:34:02,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=229008.0, ans=0.025 +2024-07-29 02:34:03,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229008.0, ans=0.1 +2024-07-29 02:34:11,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=229021.33333333334, ans=0.125 +2024-07-29 02:34:13,808 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.82 vs. limit=22.5 +2024-07-29 02:34:14,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=229034.66666666666, ans=0.0 +2024-07-29 02:34:34,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=229061.33333333334, ans=0.0 +2024-07-29 02:34:34,883 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.838e+01 5.522e+01 6.074e+01 7.199e+01 1.525e+02, threshold=1.215e+02, percent-clipped=1.0 +2024-07-29 02:34:36,181 INFO [train.py:1114] (2/4) Epoch 17, batch 8250, loss[loss=0.1674, simple_loss=0.2647, pruned_loss=0.03512, over 4896.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2666, pruned_loss=0.04259, over 938799.15 frames. ], batch size: 13, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:34:57,118 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:34:57,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=229101.33333333334, ans=0.0 +2024-07-29 02:35:08,498 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.53 vs. limit=15.0 +2024-07-29 02:35:11,239 INFO [train.py:1114] (2/4) Epoch 17, batch 8300, loss[loss=0.2051, simple_loss=0.2871, pruned_loss=0.06153, over 4908.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2677, pruned_loss=0.04304, over 938779.69 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:35:18,990 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=229154.66666666666, ans=0.2 +2024-07-29 02:35:20,004 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.83 vs. limit=15.0 +2024-07-29 02:35:39,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=229194.66666666666, ans=0.125 +2024-07-29 02:35:41,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=229194.66666666666, ans=0.0 +2024-07-29 02:35:44,151 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.879e+01 5.675e+01 6.316e+01 6.956e+01 1.152e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 02:35:45,458 INFO [train.py:1114] (2/4) Epoch 17, batch 8350, loss[loss=0.178, simple_loss=0.2763, pruned_loss=0.03987, over 4805.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2671, pruned_loss=0.04283, over 941536.07 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:36:11,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=229248.0, ans=0.0 +2024-07-29 02:36:12,437 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=229248.0, ans=0.125 +2024-07-29 02:36:18,241 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:36:22,693 INFO [train.py:1114] (2/4) Epoch 17, batch 8400, loss[loss=0.1673, simple_loss=0.2663, pruned_loss=0.03411, over 4777.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.268, pruned_loss=0.04353, over 940215.00 frames. ], batch size: 12, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:36:36,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=229288.0, ans=0.125 +2024-07-29 02:36:38,259 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=229301.33333333334, ans=0.125 +2024-07-29 02:36:46,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=229314.66666666666, ans=0.2 +2024-07-29 02:36:57,643 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.781e+01 6.432e+01 7.454e+01 1.243e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-29 02:36:58,928 INFO [train.py:1114] (2/4) Epoch 17, batch 8450, loss[loss=0.1895, simple_loss=0.2795, pruned_loss=0.04977, over 4809.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2695, pruned_loss=0.04404, over 939110.28 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:37:01,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=229341.33333333334, ans=15.0 +2024-07-29 02:37:03,298 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=19.88 vs. limit=22.5 +2024-07-29 02:37:04,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229354.66666666666, ans=0.1 +2024-07-29 02:37:19,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=229381.33333333334, ans=0.0 +2024-07-29 02:37:25,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=229394.66666666666, ans=0.0 +2024-07-29 02:37:31,064 INFO [train.py:1114] (2/4) Epoch 17, batch 8500, loss[loss=0.1431, simple_loss=0.2267, pruned_loss=0.02977, over 4606.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2682, pruned_loss=0.04323, over 938932.95 frames. ], batch size: 11, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:37:38,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=229408.0, ans=0.0 +2024-07-29 02:37:50,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=229434.66666666666, ans=0.0 +2024-07-29 02:37:58,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=229448.0, ans=0.0 +2024-07-29 02:37:59,545 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.12 vs. limit=15.0 +2024-07-29 02:38:04,985 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.408e+01 5.595e+01 6.449e+01 7.243e+01 1.266e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 02:38:06,372 INFO [train.py:1114] (2/4) Epoch 17, batch 8550, loss[loss=0.1355, simple_loss=0.218, pruned_loss=0.02647, over 4813.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.268, pruned_loss=0.04338, over 939801.89 frames. ], batch size: 11, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:38:08,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=229474.66666666666, ans=0.125 +2024-07-29 02:38:10,507 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.30 vs. limit=15.0 +2024-07-29 02:38:15,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=229488.0, ans=0.125 +2024-07-29 02:38:28,872 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.17 vs. limit=15.0 +2024-07-29 02:38:29,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=229514.66666666666, ans=0.125 +2024-07-29 02:38:31,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=229514.66666666666, ans=0.2 +2024-07-29 02:38:33,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=229528.0, ans=0.125 +2024-07-29 02:38:39,390 INFO [train.py:1114] (2/4) Epoch 17, batch 8600, loss[loss=0.187, simple_loss=0.2944, pruned_loss=0.03975, over 4792.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2674, pruned_loss=0.04366, over 939329.56 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:38:45,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229554.66666666666, ans=0.1 +2024-07-29 02:38:54,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=229568.0, ans=0.07 +2024-07-29 02:38:57,647 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=229568.0, ans=0.95 +2024-07-29 02:38:58,471 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.97 vs. limit=22.5 +2024-07-29 02:39:13,135 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.480e+01 5.824e+01 6.675e+01 7.491e+01 1.199e+02, threshold=1.335e+02, percent-clipped=0.0 +2024-07-29 02:39:14,437 INFO [train.py:1114] (2/4) Epoch 17, batch 8650, loss[loss=0.2053, simple_loss=0.2889, pruned_loss=0.06088, over 4895.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2671, pruned_loss=0.04357, over 940693.08 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:39:19,850 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=229608.0, ans=0.125 +2024-07-29 02:39:20,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=229621.33333333334, ans=0.125 +2024-07-29 02:39:25,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=229621.33333333334, ans=0.025 +2024-07-29 02:39:32,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=229634.66666666666, ans=0.125 +2024-07-29 02:39:36,033 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.00 vs. limit=6.0 +2024-07-29 02:39:36,569 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:39:46,643 INFO [train.py:1114] (2/4) Epoch 17, batch 8700, loss[loss=0.1821, simple_loss=0.2738, pruned_loss=0.04518, over 4760.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2682, pruned_loss=0.04412, over 938623.55 frames. ], batch size: 13, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:39:49,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=229674.66666666666, ans=0.125 +2024-07-29 02:40:07,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=229714.66666666666, ans=0.125 +2024-07-29 02:40:19,053 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.617e+01 6.057e+01 6.881e+01 1.135e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 02:40:20,343 INFO [train.py:1114] (2/4) Epoch 17, batch 8750, loss[loss=0.1943, simple_loss=0.2869, pruned_loss=0.05083, over 4680.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2675, pruned_loss=0.04386, over 937084.61 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:40:22,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=229741.33333333334, ans=0.0 +2024-07-29 02:40:24,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=229741.33333333334, ans=0.125 +2024-07-29 02:40:32,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=229754.66666666666, ans=0.0 +2024-07-29 02:40:36,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=229768.0, ans=0.125 +2024-07-29 02:40:37,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229768.0, ans=0.1 +2024-07-29 02:40:38,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=229768.0, ans=0.0 +2024-07-29 02:40:49,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=229794.66666666666, ans=0.125 +2024-07-29 02:40:56,135 INFO [train.py:1114] (2/4) Epoch 17, batch 8800, loss[loss=0.1803, simple_loss=0.271, pruned_loss=0.0448, over 4933.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2675, pruned_loss=0.04297, over 937709.65 frames. ], batch size: 14, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:41:14,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=229834.66666666666, ans=0.0 +2024-07-29 02:41:17,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=229848.0, ans=0.0 +2024-07-29 02:41:18,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=229848.0, ans=0.125 +2024-07-29 02:41:18,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=229848.0, ans=0.0 +2024-07-29 02:41:28,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229861.33333333334, ans=0.1 +2024-07-29 02:41:28,618 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.643e+01 5.657e+01 6.109e+01 6.683e+01 1.097e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-29 02:41:29,297 INFO [train.py:1114] (2/4) Epoch 17, batch 8850, loss[loss=0.1759, simple_loss=0.279, pruned_loss=0.03637, over 4499.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2673, pruned_loss=0.04322, over 932801.28 frames. ], batch size: 21, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:41:43,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=229888.0, ans=0.015 +2024-07-29 02:41:44,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=229888.0, ans=0.0 +2024-07-29 02:41:45,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=229901.33333333334, ans=0.0 +2024-07-29 02:41:57,765 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.71 vs. limit=15.0 +2024-07-29 02:42:08,015 INFO [train.py:1114] (2/4) Epoch 17, batch 8900, loss[loss=0.178, simple_loss=0.2684, pruned_loss=0.04386, over 4937.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2681, pruned_loss=0.04367, over 930555.15 frames. ], batch size: 12, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:42:11,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=229941.33333333334, ans=0.025 +2024-07-29 02:42:16,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=229954.66666666666, ans=0.5 +2024-07-29 02:42:16,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=229954.66666666666, ans=0.125 +2024-07-29 02:42:22,638 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.50 vs. limit=12.0 +2024-07-29 02:42:23,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=229968.0, ans=0.0 +2024-07-29 02:42:35,916 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.02 vs. limit=15.0 +2024-07-29 02:42:43,289 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.920e+01 5.712e+01 6.272e+01 7.147e+01 1.085e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 02:42:43,967 INFO [train.py:1114] (2/4) Epoch 17, batch 8950, loss[loss=0.1603, simple_loss=0.2591, pruned_loss=0.03074, over 4527.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2673, pruned_loss=0.04326, over 931621.66 frames. ], batch size: 21, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:42:49,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=230021.33333333334, ans=0.125 +2024-07-29 02:42:50,552 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.13 vs. limit=22.5 +2024-07-29 02:43:12,578 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.04 vs. limit=22.5 +2024-07-29 02:43:24,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=230061.33333333334, ans=0.0 +2024-07-29 02:43:26,341 INFO [train.py:1114] (2/4) Epoch 17, batch 9000, loss[loss=0.1706, simple_loss=0.2547, pruned_loss=0.04322, over 4645.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2657, pruned_loss=0.04273, over 934435.02 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:43:26,342 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 02:43:32,560 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.5788, 4.0875, 4.6294, 3.5762], device='cuda:2') +2024-07-29 02:43:37,870 INFO [train.py:1146] (2/4) Epoch 17, validation: loss=0.1619, simple_loss=0.2644, pruned_loss=0.02967, over 944034.00 frames. +2024-07-29 02:43:37,871 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 02:43:37,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=230074.66666666666, ans=0.125 +2024-07-29 02:43:44,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=230074.66666666666, ans=0.0 +2024-07-29 02:43:48,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=230088.0, ans=0.0 +2024-07-29 02:43:50,046 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.24 vs. limit=15.0 +2024-07-29 02:43:52,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230101.33333333334, ans=0.1 +2024-07-29 02:43:52,876 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=27.49 vs. limit=22.5 +2024-07-29 02:44:07,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=230128.0, ans=0.125 +2024-07-29 02:44:07,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=230128.0, ans=0.025 +2024-07-29 02:44:07,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=230128.0, ans=0.2 +2024-07-29 02:44:12,201 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.638e+01 5.566e+01 6.347e+01 7.363e+01 1.043e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-29 02:44:12,234 INFO [train.py:1114] (2/4) Epoch 17, batch 9050, loss[loss=0.1574, simple_loss=0.243, pruned_loss=0.03589, over 4531.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2652, pruned_loss=0.04225, over 934728.69 frames. ], batch size: 10, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:44:15,658 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=230141.33333333334, ans=0.125 +2024-07-29 02:44:32,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=230168.0, ans=0.0 +2024-07-29 02:44:35,605 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=230181.33333333334, ans=0.0 +2024-07-29 02:44:40,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=230181.33333333334, ans=0.0 +2024-07-29 02:44:42,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=230194.66666666666, ans=0.0 +2024-07-29 02:44:43,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230194.66666666666, ans=0.1 +2024-07-29 02:44:47,589 INFO [train.py:1114] (2/4) Epoch 17, batch 9100, loss[loss=0.1906, simple_loss=0.298, pruned_loss=0.04157, over 4932.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2662, pruned_loss=0.04234, over 937162.83 frames. ], batch size: 14, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:44:48,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=230208.0, ans=0.0 +2024-07-29 02:44:57,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=230221.33333333334, ans=0.025 +2024-07-29 02:45:03,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=230234.66666666666, ans=0.025 +2024-07-29 02:45:04,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=230234.66666666666, ans=0.125 +2024-07-29 02:45:06,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=230248.0, ans=10.0 +2024-07-29 02:45:14,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=230261.33333333334, ans=0.125 +2024-07-29 02:45:17,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=230261.33333333334, ans=0.0 +2024-07-29 02:45:19,825 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.594e+01 5.656e+01 6.287e+01 6.947e+01 9.623e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-29 02:45:19,858 INFO [train.py:1114] (2/4) Epoch 17, batch 9150, loss[loss=0.1924, simple_loss=0.2925, pruned_loss=0.0461, over 4809.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2685, pruned_loss=0.04332, over 935874.02 frames. ], batch size: 14, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:45:26,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=230288.0, ans=0.0 +2024-07-29 02:45:48,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=230328.0, ans=0.035 +2024-07-29 02:45:52,565 INFO [train.py:1114] (2/4) Epoch 17, batch 9200, loss[loss=0.1464, simple_loss=0.2296, pruned_loss=0.03155, over 4853.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2659, pruned_loss=0.04252, over 937576.92 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:45:56,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=230341.33333333334, ans=0.0 +2024-07-29 02:45:59,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=230354.66666666666, ans=0.125 +2024-07-29 02:46:18,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=230394.66666666666, ans=0.125 +2024-07-29 02:46:24,553 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.375e+01 5.574e+01 6.025e+01 6.747e+01 8.782e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-29 02:46:24,601 INFO [train.py:1114] (2/4) Epoch 17, batch 9250, loss[loss=0.1937, simple_loss=0.302, pruned_loss=0.04271, over 4631.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.266, pruned_loss=0.04266, over 938418.66 frames. ], batch size: 13, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:46:25,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230408.0, ans=0.1 +2024-07-29 02:46:32,649 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.96 vs. limit=22.5 +2024-07-29 02:46:42,796 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.34 vs. limit=12.0 +2024-07-29 02:46:43,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=230448.0, ans=0.125 +2024-07-29 02:46:43,440 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.43 vs. limit=22.5 +2024-07-29 02:46:47,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=230448.0, ans=0.125 +2024-07-29 02:46:52,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=230461.33333333334, ans=0.125 +2024-07-29 02:46:52,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=230461.33333333334, ans=0.125 +2024-07-29 02:46:56,560 INFO [train.py:1114] (2/4) Epoch 17, batch 9300, loss[loss=0.154, simple_loss=0.2441, pruned_loss=0.03194, over 4766.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2652, pruned_loss=0.04248, over 937571.37 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:46:58,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=230474.66666666666, ans=0.125 +2024-07-29 02:47:06,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=230488.0, ans=0.025 +2024-07-29 02:47:26,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230528.0, ans=0.1 +2024-07-29 02:47:28,549 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 5.719e+01 6.284e+01 7.337e+01 9.845e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-29 02:47:28,582 INFO [train.py:1114] (2/4) Epoch 17, batch 9350, loss[loss=0.1901, simple_loss=0.2686, pruned_loss=0.05581, over 4800.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2659, pruned_loss=0.04275, over 934632.12 frames. ], batch size: 11, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:47:33,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=230541.33333333334, ans=0.0 +2024-07-29 02:47:41,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=230568.0, ans=0.2 +2024-07-29 02:47:46,087 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.13 vs. limit=15.0 +2024-07-29 02:47:49,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230581.33333333334, ans=0.1 +2024-07-29 02:47:52,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=230581.33333333334, ans=0.025 +2024-07-29 02:48:00,624 INFO [train.py:1114] (2/4) Epoch 17, batch 9400, loss[loss=0.148, simple_loss=0.2417, pruned_loss=0.02713, over 4690.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2659, pruned_loss=0.04273, over 931953.17 frames. ], batch size: 13, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:48:08,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=230621.33333333334, ans=0.0 +2024-07-29 02:48:10,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=230621.33333333334, ans=0.09899494936611666 +2024-07-29 02:48:16,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=230634.66666666666, ans=0.0 +2024-07-29 02:48:24,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=230648.0, ans=0.025 +2024-07-29 02:48:30,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230661.33333333334, ans=0.1 +2024-07-29 02:48:34,672 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.369e+01 5.567e+01 6.049e+01 6.960e+01 9.210e+01, threshold=1.210e+02, percent-clipped=0.0 +2024-07-29 02:48:34,720 INFO [train.py:1114] (2/4) Epoch 17, batch 9450, loss[loss=0.14, simple_loss=0.2267, pruned_loss=0.02669, over 4818.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2656, pruned_loss=0.04239, over 931324.48 frames. ], batch size: 11, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:48:47,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=230701.33333333334, ans=0.125 +2024-07-29 02:48:51,515 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:48:51,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=230701.33333333334, ans=0.125 +2024-07-29 02:48:54,918 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.23 vs. limit=22.5 +2024-07-29 02:48:58,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230714.66666666666, ans=0.1 +2024-07-29 02:49:04,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=230728.0, ans=0.2 +2024-07-29 02:49:06,174 INFO [train.py:1114] (2/4) Epoch 17, batch 9500, loss[loss=0.158, simple_loss=0.2459, pruned_loss=0.03502, over 4705.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2665, pruned_loss=0.04251, over 933677.74 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:49:08,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=230741.33333333334, ans=0.125 +2024-07-29 02:49:08,749 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=230741.33333333334, ans=0.07 +2024-07-29 02:49:10,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=230741.33333333334, ans=0.125 +2024-07-29 02:49:11,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.whiten.whitening_limit, batch_count=230741.33333333334, ans=12.0 +2024-07-29 02:49:12,258 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.76 vs. limit=22.5 +2024-07-29 02:49:12,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=230754.66666666666, ans=0.2 +2024-07-29 02:49:18,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=230768.0, ans=0.125 +2024-07-29 02:49:19,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=230768.0, ans=0.125 +2024-07-29 02:49:26,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=230781.33333333334, ans=0.125 +2024-07-29 02:49:37,931 INFO [train.py:1114] (2/4) Epoch 17, batch 9550, loss[loss=0.1536, simple_loss=0.2391, pruned_loss=0.03403, over 4776.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2659, pruned_loss=0.04242, over 930870.01 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:49:39,097 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 5.645e+01 6.246e+01 7.009e+01 1.042e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 02:49:43,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=230808.0, ans=0.125 +2024-07-29 02:49:45,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=230821.33333333334, ans=0.0 +2024-07-29 02:49:50,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=230834.66666666666, ans=0.2 +2024-07-29 02:49:55,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=230834.66666666666, ans=0.0 +2024-07-29 02:50:09,787 INFO [train.py:1114] (2/4) Epoch 17, batch 9600, loss[loss=0.2556, simple_loss=0.3203, pruned_loss=0.09542, over 3048.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2664, pruned_loss=0.04259, over 929900.31 frames. ], batch size: 35, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:50:20,811 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.20 vs. limit=15.0 +2024-07-29 02:50:31,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230914.66666666666, ans=0.1 +2024-07-29 02:50:32,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=230914.66666666666, ans=0.125 +2024-07-29 02:50:43,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=230928.0, ans=0.0 +2024-07-29 02:50:43,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=230941.33333333334, ans=0.2 +2024-07-29 02:50:44,389 INFO [train.py:1114] (2/4) Epoch 17, batch 9650, loss[loss=0.1681, simple_loss=0.2573, pruned_loss=0.03938, over 4847.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2662, pruned_loss=0.0425, over 926134.06 frames. ], batch size: 16, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:50:44,989 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.884e+01 6.433e+01 7.222e+01 1.107e+02, threshold=1.287e+02, percent-clipped=0.0 +2024-07-29 02:51:03,820 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.54 vs. limit=12.0 +2024-07-29 02:51:05,604 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.19 vs. limit=15.0 +2024-07-29 02:51:08,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=230981.33333333334, ans=0.07 +2024-07-29 02:51:15,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=230994.66666666666, ans=0.0 +2024-07-29 02:51:16,204 INFO [train.py:1114] (2/4) Epoch 17, batch 9700, loss[loss=0.2246, simple_loss=0.3011, pruned_loss=0.07405, over 4217.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2669, pruned_loss=0.04306, over 924403.99 frames. ], batch size: 25, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:51:22,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=231021.33333333334, ans=0.125 +2024-07-29 02:51:23,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231021.33333333334, ans=0.1 +2024-07-29 02:51:24,591 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.57 vs. limit=10.0 +2024-07-29 02:51:26,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.02 vs. limit=15.0 +2024-07-29 02:51:32,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=231034.66666666666, ans=0.125 +2024-07-29 02:51:37,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=231048.0, ans=0.125 +2024-07-29 02:51:47,679 INFO [train.py:1114] (2/4) Epoch 17, batch 9750, loss[loss=0.2126, simple_loss=0.308, pruned_loss=0.0586, over 4696.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2672, pruned_loss=0.04303, over 925133.96 frames. ], batch size: 15, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:51:48,244 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.114e+01 5.556e+01 6.243e+01 6.911e+01 1.115e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 02:51:55,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=231088.0, ans=0.125 +2024-07-29 02:52:00,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=231101.33333333334, ans=0.125 +2024-07-29 02:52:16,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=231128.0, ans=0.125 +2024-07-29 02:52:18,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=231141.33333333334, ans=0.1 +2024-07-29 02:52:19,075 INFO [train.py:1114] (2/4) Epoch 17, batch 9800, loss[loss=0.1872, simple_loss=0.2708, pruned_loss=0.05177, over 4709.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2669, pruned_loss=0.04342, over 924758.82 frames. ], batch size: 12, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:52:21,227 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.82 vs. limit=15.0 +2024-07-29 02:52:24,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=231154.66666666666, ans=0.125 +2024-07-29 02:52:25,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.94 vs. limit=22.5 +2024-07-29 02:52:29,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=231154.66666666666, ans=0.125 +2024-07-29 02:52:35,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=231168.0, ans=0.125 +2024-07-29 02:52:35,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=231168.0, ans=0.2 +2024-07-29 02:52:36,556 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.99 vs. limit=10.0 +2024-07-29 02:52:40,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=231181.33333333334, ans=0.125 +2024-07-29 02:52:40,763 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.95 vs. limit=15.0 +2024-07-29 02:52:44,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=231194.66666666666, ans=0.1 +2024-07-29 02:52:50,088 INFO [train.py:1114] (2/4) Epoch 17, batch 9850, loss[loss=0.2105, simple_loss=0.303, pruned_loss=0.059, over 4896.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2673, pruned_loss=0.04355, over 927258.58 frames. ], batch size: 15, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:52:50,658 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.758e+01 6.441e+01 7.212e+01 9.230e+01, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 02:53:05,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=231234.66666666666, ans=0.125 +2024-07-29 02:53:12,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=231248.0, ans=0.2 +2024-07-29 02:53:12,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.22 vs. limit=15.0 +2024-07-29 02:53:13,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=231248.0, ans=0.0 +2024-07-29 02:53:20,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=231261.33333333334, ans=0.125 +2024-07-29 02:53:22,322 INFO [train.py:1114] (2/4) Epoch 17, batch 9900, loss[loss=0.1902, simple_loss=0.2809, pruned_loss=0.04977, over 4820.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2679, pruned_loss=0.044, over 926861.02 frames. ], batch size: 16, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:53:44,198 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.53 vs. limit=15.0 +2024-07-29 02:53:47,760 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.01 vs. limit=6.0 +2024-07-29 02:53:48,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=231328.0, ans=0.125 +2024-07-29 02:53:50,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=231328.0, ans=0.09899494936611666 +2024-07-29 02:53:53,560 INFO [train.py:1114] (2/4) Epoch 17, batch 9950, loss[loss=0.1596, simple_loss=0.2387, pruned_loss=0.04025, over 4787.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.269, pruned_loss=0.04503, over 930004.99 frames. ], batch size: 11, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:53:54,161 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.766e+01 6.356e+01 7.245e+01 1.147e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-29 02:53:54,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=231341.33333333334, ans=0.0 +2024-07-29 02:54:12,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=231381.33333333334, ans=0.2 +2024-07-29 02:54:22,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=231394.66666666666, ans=0.2 +2024-07-29 02:54:24,959 INFO [train.py:1114] (2/4) Epoch 17, batch 10000, loss[loss=0.1552, simple_loss=0.2581, pruned_loss=0.02618, over 4639.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2715, pruned_loss=0.04563, over 927320.67 frames. ], batch size: 16, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:54:29,625 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.26 vs. limit=15.0 +2024-07-29 02:54:32,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=231421.33333333334, ans=15.0 +2024-07-29 02:54:49,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=231461.33333333334, ans=0.0 +2024-07-29 02:55:00,062 INFO [train.py:1114] (2/4) Epoch 17, batch 10050, loss[loss=0.1817, simple_loss=0.2699, pruned_loss=0.0467, over 3530.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2747, pruned_loss=0.0472, over 914902.01 frames. ], batch size: 35, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:55:00,783 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.986e+01 5.675e+01 6.187e+01 6.969e+01 9.766e+01, threshold=1.237e+02, percent-clipped=0.0 +2024-07-29 02:55:08,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=231488.0, ans=0.025 +2024-07-29 02:55:09,273 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=231488.0, ans=0.125 +2024-07-29 02:55:11,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=231488.0, ans=0.125 +2024-07-29 02:55:13,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=231488.0, ans=0.04949747468305833 +2024-07-29 02:55:13,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=231488.0, ans=0.0 +2024-07-29 02:55:16,164 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.69 vs. limit=15.0 +2024-07-29 02:55:17,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=231501.33333333334, ans=0.125 +2024-07-29 02:55:24,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=231514.66666666666, ans=0.125 +2024-07-29 02:55:27,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231528.0, ans=0.1 +2024-07-29 02:55:30,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=231528.0, ans=0.125 +2024-07-29 02:55:30,707 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:55:35,513 INFO [train.py:1114] (2/4) Epoch 17, batch 10100, loss[loss=0.1987, simple_loss=0.2791, pruned_loss=0.05918, over 3629.00 frames. ], tot_loss[loss=0.19, simple_loss=0.278, pruned_loss=0.05096, over 861759.85 frames. ], batch size: 35, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:55:38,721 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:55:55,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=231568.0, ans=0.04949747468305833 +2024-07-29 02:55:55,856 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.94 vs. limit=22.5 +2024-07-29 02:55:58,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=231568.0, ans=0.125 +2024-07-29 02:56:00,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=231581.33333333334, ans=0.0 +2024-07-29 02:56:01,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=231581.33333333334, ans=0.0 +2024-07-29 02:56:10,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=231594.66666666666, ans=0.125 +2024-07-29 02:56:14,011 INFO [train.py:1114] (2/4) Epoch 17, batch 10150, loss[loss=0.2213, simple_loss=0.3006, pruned_loss=0.07098, over 3456.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2814, pruned_loss=0.05436, over 820578.67 frames. ], batch size: 36, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:56:14,590 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.873e+01 6.975e+01 7.380e+01 8.032e+01 1.303e+02, threshold=1.476e+02, percent-clipped=1.0 +2024-07-29 02:56:18,121 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.27 vs. limit=22.5 +2024-07-29 02:56:28,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231634.66666666666, ans=0.1 +2024-07-29 02:56:28,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=231634.66666666666, ans=0.125 +2024-07-29 02:56:32,139 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.87 vs. limit=15.0 +2024-07-29 02:56:33,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=231648.0, ans=0.0 +2024-07-29 02:56:37,613 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:56:45,757 INFO [train.py:1114] (2/4) Epoch 17, batch 10200, loss[loss=0.2201, simple_loss=0.2925, pruned_loss=0.0739, over 3253.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2845, pruned_loss=0.05718, over 789161.23 frames. ], batch size: 35, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:56:45,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=231674.66666666666, ans=0.09899494936611666 +2024-07-29 02:56:47,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=231674.66666666666, ans=0.125 +2024-07-29 02:56:47,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.89 vs. limit=6.0 +2024-07-29 02:56:49,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=231674.66666666666, ans=0.0 +2024-07-29 02:56:49,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=231674.66666666666, ans=0.125 +2024-07-29 02:56:57,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=231688.0, ans=0.0 +2024-07-29 02:58:12,641 INFO [train.py:1114] (2/4) Epoch 18, batch 0, loss[loss=0.1588, simple_loss=0.2537, pruned_loss=0.03195, over 4840.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2537, pruned_loss=0.03195, over 4840.00 frames. ], batch size: 12, lr: 4.20e-03, grad_scale: 32.0 +2024-07-29 02:58:12,642 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 02:58:24,198 INFO [train.py:1146] (2/4) Epoch 18, validation: loss=0.1629, simple_loss=0.2668, pruned_loss=0.02955, over 944034.00 frames. +2024-07-29 02:58:24,198 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 02:58:26,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231705.33333333334, ans=0.1 +2024-07-29 02:58:26,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=231705.33333333334, ans=0.125 +2024-07-29 02:58:27,757 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=231705.33333333334, ans=0.2 +2024-07-29 02:58:31,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=231718.66666666666, ans=0.05 +2024-07-29 02:58:37,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=231732.0, ans=0.0 +2024-07-29 02:58:41,689 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.01 vs. limit=10.0 +2024-07-29 02:58:42,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=231732.0, ans=0.0 +2024-07-29 02:58:44,126 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.569e+01 6.224e+01 6.772e+01 7.416e+01 8.385e+01, threshold=1.354e+02, percent-clipped=0.0 +2024-07-29 02:58:46,561 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.88 vs. limit=15.0 +2024-07-29 02:58:49,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=231745.33333333334, ans=0.125 +2024-07-29 02:58:55,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=231758.66666666666, ans=0.125 +2024-07-29 02:58:57,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=231758.66666666666, ans=0.04949747468305833 +2024-07-29 02:58:59,049 INFO [train.py:1114] (2/4) Epoch 18, batch 50, loss[loss=0.1484, simple_loss=0.225, pruned_loss=0.03585, over 4615.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2694, pruned_loss=0.04506, over 206352.23 frames. ], batch size: 11, lr: 4.20e-03, grad_scale: 32.0 +2024-07-29 02:59:11,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=231785.33333333334, ans=0.125 +2024-07-29 02:59:15,234 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=231798.66666666666, ans=0.125 +2024-07-29 02:59:20,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=231812.0, ans=0.0 +2024-07-29 02:59:20,681 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:59:28,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=231825.33333333334, ans=0.125 +2024-07-29 02:59:34,495 INFO [train.py:1114] (2/4) Epoch 18, batch 100, loss[loss=0.1884, simple_loss=0.2733, pruned_loss=0.05168, over 4633.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2696, pruned_loss=0.04376, over 365609.96 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 02:59:34,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=231838.66666666666, ans=0.125 +2024-07-29 02:59:39,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=231838.66666666666, ans=0.95 +2024-07-29 02:59:50,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=231865.33333333334, ans=0.125 +2024-07-29 02:59:52,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=231865.33333333334, ans=0.125 +2024-07-29 02:59:53,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=231865.33333333334, ans=0.0 +2024-07-29 02:59:54,427 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.485e+01 5.467e+01 5.995e+01 6.645e+01 8.215e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-29 03:00:04,217 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.27 vs. limit=15.0 +2024-07-29 03:00:07,755 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:00:08,834 INFO [train.py:1114] (2/4) Epoch 18, batch 150, loss[loss=0.1635, simple_loss=0.2469, pruned_loss=0.04004, over 4623.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2659, pruned_loss=0.04196, over 494018.27 frames. ], batch size: 11, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:00:17,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=231918.66666666666, ans=0.0 +2024-07-29 03:00:19,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=231918.66666666666, ans=0.125 +2024-07-29 03:00:25,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=231932.0, ans=0.125 +2024-07-29 03:00:33,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=231945.33333333334, ans=0.025 +2024-07-29 03:00:37,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=231958.66666666666, ans=0.0 +2024-07-29 03:00:42,582 INFO [train.py:1114] (2/4) Epoch 18, batch 200, loss[loss=0.1752, simple_loss=0.2685, pruned_loss=0.04088, over 4483.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.266, pruned_loss=0.04213, over 593489.52 frames. ], batch size: 21, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:00:48,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=231985.33333333334, ans=0.1 +2024-07-29 03:01:01,297 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.92 vs. limit=10.0 +2024-07-29 03:01:08,541 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.689e+01 5.881e+01 6.844e+01 7.850e+01 1.252e+02, threshold=1.369e+02, percent-clipped=1.0 +2024-07-29 03:01:13,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=232012.0, ans=0.125 +2024-07-29 03:01:50,241 INFO [train.py:1114] (2/4) Epoch 18, batch 250, loss[loss=0.177, simple_loss=0.2689, pruned_loss=0.04253, over 4646.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2666, pruned_loss=0.0426, over 670541.73 frames. ], batch size: 16, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:01:55,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=232038.66666666666, ans=0.125 +2024-07-29 03:01:56,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=232052.0, ans=0.0 +2024-07-29 03:02:22,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=232052.0, ans=0.0 +2024-07-29 03:02:34,607 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.76 vs. limit=10.0 +2024-07-29 03:02:37,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=232078.66666666666, ans=0.2 +2024-07-29 03:02:38,814 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=232078.66666666666, ans=0.125 +2024-07-29 03:02:58,482 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.93 vs. limit=15.0 +2024-07-29 03:02:58,741 INFO [train.py:1114] (2/4) Epoch 18, batch 300, loss[loss=0.1884, simple_loss=0.2799, pruned_loss=0.04842, over 4808.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2648, pruned_loss=0.04171, over 730185.27 frames. ], batch size: 15, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:03:07,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=232118.66666666666, ans=0.125 +2024-07-29 03:03:14,944 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.44 vs. limit=10.0 +2024-07-29 03:03:17,681 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.434e+01 5.467e+01 6.061e+01 6.995e+01 1.248e+02, threshold=1.212e+02, percent-clipped=0.0 +2024-07-29 03:03:21,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=232145.33333333334, ans=0.2 +2024-07-29 03:03:29,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=232158.66666666666, ans=0.125 +2024-07-29 03:03:31,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=232158.66666666666, ans=0.125 +2024-07-29 03:03:32,388 INFO [train.py:1114] (2/4) Epoch 18, batch 350, loss[loss=0.1719, simple_loss=0.2599, pruned_loss=0.04197, over 4936.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2648, pruned_loss=0.04128, over 776401.01 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:03:37,417 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:03:40,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=232185.33333333334, ans=0.125 +2024-07-29 03:03:50,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=232198.66666666666, ans=0.0 +2024-07-29 03:03:54,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=232212.0, ans=0.125 +2024-07-29 03:03:55,977 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=232212.0, ans=0.125 +2024-07-29 03:04:01,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=232225.33333333334, ans=0.2 +2024-07-29 03:04:05,785 INFO [train.py:1114] (2/4) Epoch 18, batch 400, loss[loss=0.1676, simple_loss=0.2556, pruned_loss=0.03983, over 4687.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2647, pruned_loss=0.04148, over 814021.65 frames. ], batch size: 13, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:04:10,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=232238.66666666666, ans=0.125 +2024-07-29 03:04:12,206 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.10 vs. limit=15.0 +2024-07-29 03:04:26,840 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.483e+01 6.110e+01 6.835e+01 9.648e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-29 03:04:40,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=232292.0, ans=0.125 +2024-07-29 03:04:41,609 INFO [train.py:1114] (2/4) Epoch 18, batch 450, loss[loss=0.1868, simple_loss=0.2844, pruned_loss=0.04462, over 4631.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2657, pruned_loss=0.04211, over 839536.68 frames. ], batch size: 13, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:04:41,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=232305.33333333334, ans=0.125 +2024-07-29 03:04:53,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=232318.66666666666, ans=0.125 +2024-07-29 03:04:56,463 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.17 vs. limit=22.5 +2024-07-29 03:05:02,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=232345.33333333334, ans=0.09899494936611666 +2024-07-29 03:05:14,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=232372.0, ans=0.5 +2024-07-29 03:05:15,108 INFO [train.py:1114] (2/4) Epoch 18, batch 500, loss[loss=0.2397, simple_loss=0.3207, pruned_loss=0.07936, over 4686.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2649, pruned_loss=0.04207, over 862091.00 frames. ], batch size: 15, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:05:20,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=232372.0, ans=0.1 +2024-07-29 03:05:27,821 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.48 vs. limit=12.0 +2024-07-29 03:05:31,967 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.02 vs. limit=12.0 +2024-07-29 03:05:34,183 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.530e+01 5.559e+01 6.071e+01 6.831e+01 9.618e+01, threshold=1.214e+02, percent-clipped=0.0 +2024-07-29 03:05:44,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=232425.33333333334, ans=0.025 +2024-07-29 03:05:48,922 INFO [train.py:1114] (2/4) Epoch 18, batch 550, loss[loss=0.1783, simple_loss=0.2679, pruned_loss=0.04431, over 4639.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2648, pruned_loss=0.04218, over 878555.69 frames. ], batch size: 17, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:05:51,256 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.79 vs. limit=15.0 +2024-07-29 03:05:53,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=232438.66666666666, ans=0.2 +2024-07-29 03:05:58,780 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.76 vs. limit=15.0 +2024-07-29 03:06:00,564 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=232452.0, ans=0.1 +2024-07-29 03:06:03,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=232465.33333333334, ans=0.0 +2024-07-29 03:06:08,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=232465.33333333334, ans=0.0 +2024-07-29 03:06:26,532 INFO [train.py:1114] (2/4) Epoch 18, batch 600, loss[loss=0.2074, simple_loss=0.3002, pruned_loss=0.0573, over 4625.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2651, pruned_loss=0.04208, over 892935.35 frames. ], batch size: 16, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:06:40,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=232532.0, ans=0.0 +2024-07-29 03:06:41,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=232532.0, ans=0.125 +2024-07-29 03:06:44,911 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.582e+01 6.053e+01 7.206e+01 1.079e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 03:06:51,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=232545.33333333334, ans=0.2 +2024-07-29 03:06:57,043 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.41 vs. limit=15.0 +2024-07-29 03:07:03,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=232572.0, ans=0.07 +2024-07-29 03:07:03,983 INFO [train.py:1114] (2/4) Epoch 18, batch 650, loss[loss=0.1475, simple_loss=0.2363, pruned_loss=0.02933, over 4756.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2658, pruned_loss=0.04272, over 904518.96 frames. ], batch size: 13, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:07:08,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=232572.0, ans=0.2 +2024-07-29 03:07:28,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=232598.66666666666, ans=0.125 +2024-07-29 03:07:29,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=232598.66666666666, ans=0.125 +2024-07-29 03:07:32,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=232612.0, ans=0.035 +2024-07-29 03:07:35,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=232612.0, ans=0.125 +2024-07-29 03:07:37,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=232612.0, ans=0.125 +2024-07-29 03:07:47,051 INFO [train.py:1114] (2/4) Epoch 18, batch 700, loss[loss=0.1716, simple_loss=0.2716, pruned_loss=0.03586, over 4635.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2661, pruned_loss=0.04256, over 912196.17 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:07:49,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=232638.66666666666, ans=0.0 +2024-07-29 03:07:49,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=232638.66666666666, ans=0.2 +2024-07-29 03:07:51,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=232638.66666666666, ans=0.2 +2024-07-29 03:07:52,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=232638.66666666666, ans=0.025 +2024-07-29 03:08:03,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=232665.33333333334, ans=0.0 +2024-07-29 03:08:05,613 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 5.680e+01 6.121e+01 6.839e+01 1.044e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 03:08:12,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=232678.66666666666, ans=0.125 +2024-07-29 03:08:19,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=232705.33333333334, ans=0.125 +2024-07-29 03:08:20,435 INFO [train.py:1114] (2/4) Epoch 18, batch 750, loss[loss=0.2084, simple_loss=0.2963, pruned_loss=0.06023, over 4693.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2654, pruned_loss=0.04211, over 918955.71 frames. ], batch size: 13, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:08:47,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=232732.0, ans=0.125 +2024-07-29 03:08:57,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.54 vs. limit=15.0 +2024-07-29 03:08:58,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=232745.33333333334, ans=0.125 +2024-07-29 03:08:58,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=232745.33333333334, ans=0.2 +2024-07-29 03:09:08,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=232758.66666666666, ans=0.0 +2024-07-29 03:09:10,289 INFO [train.py:1114] (2/4) Epoch 18, batch 800, loss[loss=0.1762, simple_loss=0.2691, pruned_loss=0.04168, over 4848.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2654, pruned_loss=0.04219, over 923834.11 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:09:10,981 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=232772.0, ans=0.125 +2024-07-29 03:09:20,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=232785.33333333334, ans=0.125 +2024-07-29 03:09:22,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=232785.33333333334, ans=0.125 +2024-07-29 03:09:22,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=232798.66666666666, ans=0.125 +2024-07-29 03:09:28,889 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.634e+01 6.203e+01 6.793e+01 1.019e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-29 03:09:29,864 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.69 vs. limit=22.5 +2024-07-29 03:09:42,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=232825.33333333334, ans=0.125 +2024-07-29 03:09:43,923 INFO [train.py:1114] (2/4) Epoch 18, batch 850, loss[loss=0.1664, simple_loss=0.2682, pruned_loss=0.03224, over 4648.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2652, pruned_loss=0.04213, over 927865.37 frames. ], batch size: 14, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:09:50,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=232852.0, ans=0.0 +2024-07-29 03:10:05,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=232878.66666666666, ans=0.2 +2024-07-29 03:10:08,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=232878.66666666666, ans=0.1 +2024-07-29 03:10:19,909 INFO [train.py:1114] (2/4) Epoch 18, batch 900, loss[loss=0.1307, simple_loss=0.2123, pruned_loss=0.02455, over 4854.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2654, pruned_loss=0.04244, over 929075.66 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:10:31,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=232918.66666666666, ans=0.125 +2024-07-29 03:10:44,079 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.656e+01 6.090e+01 7.210e+01 1.010e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-29 03:10:44,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=232932.0, ans=0.125 +2024-07-29 03:10:56,203 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=232945.33333333334, ans=0.1 +2024-07-29 03:11:00,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=232958.66666666666, ans=0.125 +2024-07-29 03:11:04,785 INFO [train.py:1114] (2/4) Epoch 18, batch 950, loss[loss=0.1663, simple_loss=0.2582, pruned_loss=0.03721, over 4778.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2658, pruned_loss=0.04222, over 930043.18 frames. ], batch size: 12, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:11:24,127 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.68 vs. limit=15.0 +2024-07-29 03:11:27,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=233012.0, ans=0.125 +2024-07-29 03:11:32,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=233012.0, ans=0.2 +2024-07-29 03:11:36,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=233025.33333333334, ans=0.0 +2024-07-29 03:11:37,960 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:11:39,990 INFO [train.py:1114] (2/4) Epoch 18, batch 1000, loss[loss=0.166, simple_loss=0.2602, pruned_loss=0.03591, over 4959.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2669, pruned_loss=0.04247, over 929408.39 frames. ], batch size: 13, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:11:50,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=233052.0, ans=0.125 +2024-07-29 03:11:52,227 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.00 vs. limit=6.0 +2024-07-29 03:11:52,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=233065.33333333334, ans=0.0 +2024-07-29 03:11:54,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=233065.33333333334, ans=0.0 +2024-07-29 03:11:58,671 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.759e+01 5.660e+01 6.268e+01 7.166e+01 1.041e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 03:11:59,875 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.88 vs. limit=22.5 +2024-07-29 03:12:14,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=233092.0, ans=0.95 +2024-07-29 03:12:15,360 INFO [train.py:1114] (2/4) Epoch 18, batch 1050, loss[loss=0.196, simple_loss=0.2814, pruned_loss=0.05535, over 4867.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2659, pruned_loss=0.04249, over 931851.15 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:12:32,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=233118.66666666666, ans=0.2 +2024-07-29 03:12:57,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=233145.33333333334, ans=0.1 +2024-07-29 03:12:59,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=233145.33333333334, ans=0.125 +2024-07-29 03:12:59,746 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.82 vs. limit=22.5 +2024-07-29 03:13:06,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.85 vs. limit=12.0 +2024-07-29 03:13:12,967 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.66 vs. limit=6.0 +2024-07-29 03:13:24,182 INFO [train.py:1114] (2/4) Epoch 18, batch 1100, loss[loss=0.1613, simple_loss=0.252, pruned_loss=0.03528, over 4903.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2644, pruned_loss=0.04203, over 934756.40 frames. ], batch size: 13, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:13:26,330 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=233172.0, ans=0.015 +2024-07-29 03:13:26,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=233172.0, ans=0.0 +2024-07-29 03:13:52,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=233185.33333333334, ans=0.0 +2024-07-29 03:14:26,849 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.721e+01 5.370e+01 5.951e+01 6.699e+01 1.093e+02, threshold=1.190e+02, percent-clipped=0.0 +2024-07-29 03:14:42,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=233212.0, ans=0.125 +2024-07-29 03:14:45,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=233212.0, ans=0.0 +2024-07-29 03:15:12,287 INFO [train.py:1114] (2/4) Epoch 18, batch 1150, loss[loss=0.1708, simple_loss=0.2635, pruned_loss=0.03911, over 4886.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2647, pruned_loss=0.04209, over 934605.83 frames. ], batch size: 13, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:15:25,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=233252.0, ans=0.125 +2024-07-29 03:15:27,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=233252.0, ans=0.2 +2024-07-29 03:15:43,933 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.47 vs. limit=22.5 +2024-07-29 03:15:54,164 INFO [train.py:1114] (2/4) Epoch 18, batch 1200, loss[loss=0.1706, simple_loss=0.2686, pruned_loss=0.03634, over 4869.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2661, pruned_loss=0.04273, over 933128.30 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:15:55,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=233305.33333333334, ans=0.125 +2024-07-29 03:15:55,876 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.24 vs. limit=15.0 +2024-07-29 03:16:02,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=233318.66666666666, ans=0.125 +2024-07-29 03:19:13,688 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+01 5.527e+01 5.938e+01 6.741e+01 1.045e+02, threshold=1.188e+02, percent-clipped=0.0 +2024-07-29 03:19:15,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=233345.33333333334, ans=0.0 +2024-07-29 03:19:21,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=233358.66666666666, ans=0.125 +2024-07-29 03:19:25,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=233358.66666666666, ans=0.0 +2024-07-29 03:19:30,339 INFO [train.py:1114] (2/4) Epoch 18, batch 1250, loss[loss=0.1796, simple_loss=0.2732, pruned_loss=0.04298, over 4809.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2653, pruned_loss=0.04177, over 937123.02 frames. ], batch size: 15, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:19:33,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=233372.0, ans=0.125 +2024-07-29 03:19:46,672 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.87 vs. limit=22.5 +2024-07-29 03:19:53,829 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.31 vs. limit=6.0 +2024-07-29 03:20:03,282 INFO [train.py:1114] (2/4) Epoch 18, batch 1300, loss[loss=0.1883, simple_loss=0.2822, pruned_loss=0.04724, over 4756.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2649, pruned_loss=0.04143, over 938499.08 frames. ], batch size: 19, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:20:04,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=233438.66666666666, ans=0.0 +2024-07-29 03:20:20,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=233465.33333333334, ans=0.125 +2024-07-29 03:20:21,877 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.415e+01 5.468e+01 6.194e+01 6.881e+01 8.786e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 03:20:30,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=233492.0, ans=0.0 +2024-07-29 03:20:32,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=233492.0, ans=0.1 +2024-07-29 03:20:35,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=233492.0, ans=0.125 +2024-07-29 03:20:38,025 INFO [train.py:1114] (2/4) Epoch 18, batch 1350, loss[loss=0.1438, simple_loss=0.2437, pruned_loss=0.02198, over 4761.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2645, pruned_loss=0.04079, over 940701.26 frames. ], batch size: 13, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:20:40,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=233505.33333333334, ans=0.0 +2024-07-29 03:20:48,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=233505.33333333334, ans=0.125 +2024-07-29 03:21:39,170 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.00 vs. limit=15.0 +2024-07-29 03:22:33,522 INFO [train.py:1114] (2/4) Epoch 18, batch 1400, loss[loss=0.1446, simple_loss=0.2192, pruned_loss=0.03494, over 4708.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2635, pruned_loss=0.04058, over 942774.03 frames. ], batch size: 11, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:22:37,644 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=233572.0, ans=0.125 +2024-07-29 03:22:38,737 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.01 vs. limit=15.0 +2024-07-29 03:22:39,023 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=233572.0, ans=0.125 +2024-07-29 03:22:45,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=233585.33333333334, ans=0.2 +2024-07-29 03:22:45,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=233585.33333333334, ans=0.0 +2024-07-29 03:22:57,219 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 5.887e+01 6.413e+01 7.105e+01 1.184e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 03:23:28,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=233625.33333333334, ans=0.125 +2024-07-29 03:23:33,204 INFO [train.py:1114] (2/4) Epoch 18, batch 1450, loss[loss=0.188, simple_loss=0.2765, pruned_loss=0.04978, over 4689.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2649, pruned_loss=0.04111, over 943192.16 frames. ], batch size: 15, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:23:34,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=233638.66666666666, ans=0.125 +2024-07-29 03:23:41,927 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=233652.0, ans=0.0 +2024-07-29 03:23:46,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=233652.0, ans=0.1 +2024-07-29 03:23:47,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=233665.33333333334, ans=0.125 +2024-07-29 03:23:49,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=233665.33333333334, ans=0.125 +2024-07-29 03:24:04,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=233692.0, ans=0.2 +2024-07-29 03:24:08,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=233692.0, ans=0.125 +2024-07-29 03:24:09,291 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=233705.33333333334, ans=0.125 +2024-07-29 03:24:09,783 INFO [train.py:1114] (2/4) Epoch 18, batch 1500, loss[loss=0.175, simple_loss=0.2675, pruned_loss=0.04126, over 4815.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2647, pruned_loss=0.04081, over 942693.64 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:24:28,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=233718.66666666666, ans=0.125 +2024-07-29 03:24:39,868 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.75 vs. limit=15.0 +2024-07-29 03:24:44,218 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+01 5.562e+01 6.096e+01 6.763e+01 1.145e+02, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 03:24:59,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=233758.66666666666, ans=0.125 +2024-07-29 03:24:59,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=233758.66666666666, ans=0.125 +2024-07-29 03:25:14,247 INFO [train.py:1114] (2/4) Epoch 18, batch 1550, loss[loss=0.1759, simple_loss=0.2773, pruned_loss=0.03725, over 4916.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2647, pruned_loss=0.04095, over 939219.04 frames. ], batch size: 15, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:25:17,627 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-29 03:25:23,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=233772.0, ans=0.125 +2024-07-29 03:25:29,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=233785.33333333334, ans=0.125 +2024-07-29 03:25:31,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=233785.33333333334, ans=0.125 +2024-07-29 03:25:34,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=233798.66666666666, ans=0.125 +2024-07-29 03:25:35,367 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.86 vs. limit=22.5 +2024-07-29 03:25:37,009 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:25:38,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=233812.0, ans=0.125 +2024-07-29 03:25:42,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=233812.0, ans=0.125 +2024-07-29 03:25:52,677 INFO [train.py:1114] (2/4) Epoch 18, batch 1600, loss[loss=0.1758, simple_loss=0.2726, pruned_loss=0.03953, over 4890.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2652, pruned_loss=0.04114, over 937921.37 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:25:53,210 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.95 vs. limit=15.0 +2024-07-29 03:25:56,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=233838.66666666666, ans=0.125 +2024-07-29 03:25:58,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=233838.66666666666, ans=0.125 +2024-07-29 03:26:00,046 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.86 vs. limit=15.0 +2024-07-29 03:26:00,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=233852.0, ans=0.0 +2024-07-29 03:26:07,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=233865.33333333334, ans=0.015 +2024-07-29 03:26:12,708 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.724e+01 6.283e+01 7.250e+01 9.354e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-29 03:26:14,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=233878.66666666666, ans=0.125 +2024-07-29 03:26:25,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=233878.66666666666, ans=0.025 +2024-07-29 03:26:31,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=233892.0, ans=0.125 +2024-07-29 03:26:32,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=233892.0, ans=0.2 +2024-07-29 03:26:37,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=233905.33333333334, ans=0.0 +2024-07-29 03:26:37,507 INFO [train.py:1114] (2/4) Epoch 18, batch 1650, loss[loss=0.1661, simple_loss=0.2658, pruned_loss=0.0332, over 4668.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2657, pruned_loss=0.0418, over 938067.18 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:26:38,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=233905.33333333334, ans=0.025 +2024-07-29 03:26:40,076 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.83 vs. limit=10.0 +2024-07-29 03:26:40,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=233905.33333333334, ans=0.025 +2024-07-29 03:26:47,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=233918.66666666666, ans=0.2 +2024-07-29 03:27:06,427 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=233945.33333333334, ans=0.0 +2024-07-29 03:27:25,060 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.91 vs. limit=12.0 +2024-07-29 03:27:42,556 INFO [train.py:1114] (2/4) Epoch 18, batch 1700, loss[loss=0.1471, simple_loss=0.2236, pruned_loss=0.03534, over 4719.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2656, pruned_loss=0.04141, over 939582.92 frames. ], batch size: 11, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:27:49,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=233985.33333333334, ans=0.1 +2024-07-29 03:28:01,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=233998.66666666666, ans=0.125 +2024-07-29 03:28:03,948 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.990e+01 5.769e+01 6.208e+01 7.214e+01 1.058e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-29 03:28:08,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=234012.0, ans=0.125 +2024-07-29 03:28:10,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=234012.0, ans=0.025 +2024-07-29 03:28:10,407 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:28:12,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=234025.33333333334, ans=0.0 +2024-07-29 03:28:18,160 INFO [train.py:1114] (2/4) Epoch 18, batch 1750, loss[loss=0.1835, simple_loss=0.2696, pruned_loss=0.0487, over 4783.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2648, pruned_loss=0.04089, over 940282.42 frames. ], batch size: 11, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:28:18,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=234038.66666666666, ans=0.2 +2024-07-29 03:28:27,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=234052.0, ans=0.125 +2024-07-29 03:28:32,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=234065.33333333334, ans=0.125 +2024-07-29 03:28:33,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=234065.33333333334, ans=0.2 +2024-07-29 03:28:37,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=234065.33333333334, ans=0.07 +2024-07-29 03:28:43,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=234078.66666666666, ans=0.125 +2024-07-29 03:28:51,353 INFO [train.py:1114] (2/4) Epoch 18, batch 1800, loss[loss=0.1537, simple_loss=0.2466, pruned_loss=0.03039, over 4638.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2654, pruned_loss=0.04158, over 940977.03 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:28:55,503 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=234105.33333333334, ans=0.125 +2024-07-29 03:29:01,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=234118.66666666666, ans=0.125 +2024-07-29 03:29:07,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=234132.0, ans=0.125 +2024-07-29 03:29:10,855 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.536e+01 5.659e+01 6.366e+01 7.110e+01 1.077e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-29 03:29:11,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=234145.33333333334, ans=0.2 +2024-07-29 03:29:13,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=234145.33333333334, ans=0.0 +2024-07-29 03:29:17,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=234158.66666666666, ans=0.0 +2024-07-29 03:29:21,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=234158.66666666666, ans=0.0 +2024-07-29 03:29:27,018 INFO [train.py:1114] (2/4) Epoch 18, batch 1850, loss[loss=0.1936, simple_loss=0.2898, pruned_loss=0.0487, over 4801.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2658, pruned_loss=0.04193, over 940719.06 frames. ], batch size: 14, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:29:37,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=234185.33333333334, ans=0.125 +2024-07-29 03:29:45,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=234198.66666666666, ans=0.0 +2024-07-29 03:29:52,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234212.0, ans=0.1 +2024-07-29 03:29:56,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=234225.33333333334, ans=0.125 +2024-07-29 03:30:01,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=234225.33333333334, ans=0.125 +2024-07-29 03:30:03,546 INFO [train.py:1114] (2/4) Epoch 18, batch 1900, loss[loss=0.1911, simple_loss=0.2916, pruned_loss=0.04529, over 4654.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2671, pruned_loss=0.04241, over 941794.44 frames. ], batch size: 14, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:30:19,714 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.69 vs. limit=15.0 +2024-07-29 03:30:22,650 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.802e+01 5.553e+01 6.272e+01 7.085e+01 9.977e+01, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 03:30:23,688 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.77 vs. limit=12.0 +2024-07-29 03:30:28,959 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.62 vs. limit=10.0 +2024-07-29 03:30:30,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=234292.0, ans=0.0 +2024-07-29 03:30:36,380 INFO [train.py:1114] (2/4) Epoch 18, batch 1950, loss[loss=0.2109, simple_loss=0.288, pruned_loss=0.06689, over 4898.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2683, pruned_loss=0.04302, over 943819.55 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:30:38,803 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.28 vs. limit=15.0 +2024-07-29 03:30:41,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234305.33333333334, ans=0.1 +2024-07-29 03:30:50,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=234332.0, ans=0.05 +2024-07-29 03:30:56,362 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-07-29 03:30:58,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=234345.33333333334, ans=0.0 +2024-07-29 03:31:04,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=234358.66666666666, ans=0.125 +2024-07-29 03:31:07,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=234358.66666666666, ans=0.125 +2024-07-29 03:31:09,278 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.56 vs. limit=10.0 +2024-07-29 03:31:10,160 INFO [train.py:1114] (2/4) Epoch 18, batch 2000, loss[loss=0.1537, simple_loss=0.2374, pruned_loss=0.035, over 4824.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2692, pruned_loss=0.04319, over 941363.58 frames. ], batch size: 11, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:31:22,746 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.46 vs. limit=6.0 +2024-07-29 03:31:27,485 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.37 vs. limit=15.0 +2024-07-29 03:31:33,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=234398.66666666666, ans=0.0 +2024-07-29 03:31:33,961 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.658e+01 6.506e+01 7.206e+01 1.041e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-29 03:31:41,510 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=234425.33333333334, ans=0.0 +2024-07-29 03:31:42,825 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:31:47,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=234438.66666666666, ans=0.125 +2024-07-29 03:31:48,079 INFO [train.py:1114] (2/4) Epoch 18, batch 2050, loss[loss=0.1664, simple_loss=0.2468, pruned_loss=0.04295, over 4614.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2682, pruned_loss=0.04303, over 938964.02 frames. ], batch size: 11, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:31:51,152 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.42 vs. limit=22.5 +2024-07-29 03:31:55,393 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.16 vs. limit=22.5 +2024-07-29 03:32:59,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=234492.0, ans=0.1 +2024-07-29 03:33:00,577 INFO [train.py:1114] (2/4) Epoch 18, batch 2100, loss[loss=0.179, simple_loss=0.2835, pruned_loss=0.0372, over 4758.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2669, pruned_loss=0.0424, over 940653.83 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:33:01,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=234505.33333333334, ans=0.2 +2024-07-29 03:33:05,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=234505.33333333334, ans=0.0 +2024-07-29 03:33:11,619 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.32 vs. limit=15.0 +2024-07-29 03:33:15,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=234518.66666666666, ans=0.0 +2024-07-29 03:33:19,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=234532.0, ans=0.125 +2024-07-29 03:33:22,959 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.558e+01 5.524e+01 6.278e+01 7.367e+01 1.141e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-29 03:33:52,402 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.90 vs. limit=22.5 +2024-07-29 03:33:57,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=234558.66666666666, ans=0.125 +2024-07-29 03:34:00,010 INFO [train.py:1114] (2/4) Epoch 18, batch 2150, loss[loss=0.1586, simple_loss=0.2488, pruned_loss=0.03421, over 4899.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2671, pruned_loss=0.04245, over 943877.72 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:34:00,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=234572.0, ans=0.125 +2024-07-29 03:34:01,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=234572.0, ans=0.2 +2024-07-29 03:34:02,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=234572.0, ans=0.125 +2024-07-29 03:34:04,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=234572.0, ans=0.05 +2024-07-29 03:34:10,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=234585.33333333334, ans=0.1 +2024-07-29 03:34:12,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=234585.33333333334, ans=0.0 +2024-07-29 03:34:15,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=234598.66666666666, ans=0.125 +2024-07-29 03:34:18,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=234598.66666666666, ans=0.025 +2024-07-29 03:34:36,821 INFO [train.py:1114] (2/4) Epoch 18, batch 2200, loss[loss=0.1537, simple_loss=0.2493, pruned_loss=0.02904, over 4802.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2662, pruned_loss=0.0422, over 943249.77 frames. ], batch size: 14, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:34:38,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=234638.66666666666, ans=0.035 +2024-07-29 03:34:57,728 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.703e+01 6.363e+01 7.397e+01 1.281e+02, threshold=1.273e+02, percent-clipped=1.0 +2024-07-29 03:35:00,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=234678.66666666666, ans=0.125 +2024-07-29 03:35:02,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=234678.66666666666, ans=0.0 +2024-07-29 03:35:11,888 INFO [train.py:1114] (2/4) Epoch 18, batch 2250, loss[loss=0.1699, simple_loss=0.2588, pruned_loss=0.04052, over 4690.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2659, pruned_loss=0.04242, over 942175.15 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:35:15,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=234705.33333333334, ans=0.125 +2024-07-29 03:35:20,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=234718.66666666666, ans=0.125 +2024-07-29 03:35:26,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=234732.0, ans=0.025 +2024-07-29 03:35:36,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=234745.33333333334, ans=0.0 +2024-07-29 03:35:45,221 INFO [train.py:1114] (2/4) Epoch 18, batch 2300, loss[loss=0.1713, simple_loss=0.2532, pruned_loss=0.04473, over 4941.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2643, pruned_loss=0.04144, over 940183.20 frames. ], batch size: 12, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:35:46,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=234772.0, ans=0.125 +2024-07-29 03:35:53,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=234785.33333333334, ans=0.0 +2024-07-29 03:35:55,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=234785.33333333334, ans=0.125 +2024-07-29 03:36:01,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=234798.66666666666, ans=0.125 +2024-07-29 03:36:06,806 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.677e+01 6.195e+01 6.878e+01 1.027e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 03:36:12,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=234812.0, ans=0.125 +2024-07-29 03:36:12,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=234812.0, ans=0.05 +2024-07-29 03:36:16,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=234825.33333333334, ans=0.0 +2024-07-29 03:36:17,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234825.33333333334, ans=0.1 +2024-07-29 03:36:17,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=234825.33333333334, ans=0.0 +2024-07-29 03:36:18,631 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.69 vs. limit=15.0 +2024-07-29 03:36:20,961 INFO [train.py:1114] (2/4) Epoch 18, batch 2350, loss[loss=0.183, simple_loss=0.2789, pruned_loss=0.04355, over 4632.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2647, pruned_loss=0.04164, over 941985.28 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:36:33,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=234852.0, ans=0.125 +2024-07-29 03:36:40,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=234865.33333333334, ans=0.025 +2024-07-29 03:36:41,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=234878.66666666666, ans=0.0 +2024-07-29 03:36:55,203 INFO [train.py:1114] (2/4) Epoch 18, batch 2400, loss[loss=0.1734, simple_loss=0.2482, pruned_loss=0.0493, over 4633.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2658, pruned_loss=0.04212, over 941399.61 frames. ], batch size: 12, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:36:58,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234905.33333333334, ans=0.1 +2024-07-29 03:37:00,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=234905.33333333334, ans=0.125 +2024-07-29 03:37:15,797 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.568e+01 6.148e+01 7.129e+01 1.066e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 03:37:18,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=234945.33333333334, ans=0.025 +2024-07-29 03:37:33,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=234958.66666666666, ans=0.125 +2024-07-29 03:37:44,174 INFO [train.py:1114] (2/4) Epoch 18, batch 2450, loss[loss=0.1816, simple_loss=0.284, pruned_loss=0.03959, over 4697.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2662, pruned_loss=0.0426, over 937329.02 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:37:48,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=234972.0, ans=0.0 +2024-07-29 03:37:55,797 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.56 vs. limit=15.0 +2024-07-29 03:38:06,504 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.92 vs. limit=10.0 +2024-07-29 03:38:08,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234998.66666666666, ans=0.1 +2024-07-29 03:38:13,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=235012.0, ans=0.125 +2024-07-29 03:38:42,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=235012.0, ans=0.125 +2024-07-29 03:38:45,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=235012.0, ans=0.07 +2024-07-29 03:38:53,644 INFO [train.py:1114] (2/4) Epoch 18, batch 2500, loss[loss=0.1885, simple_loss=0.2798, pruned_loss=0.04862, over 4817.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2659, pruned_loss=0.0425, over 939473.30 frames. ], batch size: 14, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:39:56,517 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.61 vs. limit=12.0 +2024-07-29 03:40:08,863 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.526e+01 6.445e+01 7.148e+01 1.003e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-29 03:40:09,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=235078.66666666666, ans=0.0 +2024-07-29 03:40:13,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=235078.66666666666, ans=0.025 +2024-07-29 03:40:16,285 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.99 vs. limit=15.0 +2024-07-29 03:40:22,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=235092.0, ans=0.1 +2024-07-29 03:40:25,114 INFO [train.py:1114] (2/4) Epoch 18, batch 2550, loss[loss=0.1637, simple_loss=0.2445, pruned_loss=0.0414, over 4796.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2651, pruned_loss=0.04208, over 938931.79 frames. ], batch size: 11, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:40:27,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235105.33333333334, ans=0.1 +2024-07-29 03:41:01,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=235145.33333333334, ans=0.125 +2024-07-29 03:41:10,220 INFO [train.py:1114] (2/4) Epoch 18, batch 2600, loss[loss=0.1469, simple_loss=0.2371, pruned_loss=0.02833, over 4891.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.265, pruned_loss=0.04197, over 937939.26 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:41:15,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=235172.0, ans=0.125 +2024-07-29 03:41:17,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=235172.0, ans=0.125 +2024-07-29 03:41:18,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=235172.0, ans=0.035 +2024-07-29 03:41:21,398 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:41:26,760 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:41:28,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=235185.33333333334, ans=0.035 +2024-07-29 03:41:36,741 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.470e+01 5.697e+01 6.154e+01 6.937e+01 9.396e+01, threshold=1.231e+02, percent-clipped=0.0 +2024-07-29 03:41:46,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=235212.0, ans=0.0 +2024-07-29 03:41:53,029 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.83 vs. limit=10.0 +2024-07-29 03:41:58,088 INFO [train.py:1114] (2/4) Epoch 18, batch 2650, loss[loss=0.1856, simple_loss=0.2778, pruned_loss=0.04675, over 4592.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2659, pruned_loss=0.04231, over 939838.50 frames. ], batch size: 16, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:42:04,802 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=235238.66666666666, ans=0.125 +2024-07-29 03:42:36,043 INFO [train.py:1114] (2/4) Epoch 18, batch 2700, loss[loss=0.1923, simple_loss=0.2853, pruned_loss=0.04961, over 4737.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2655, pruned_loss=0.04209, over 940100.97 frames. ], batch size: 14, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:42:44,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=235305.33333333334, ans=10.0 +2024-07-29 03:43:04,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=235332.0, ans=0.025 +2024-07-29 03:43:06,503 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.340e+01 5.534e+01 6.342e+01 7.179e+01 1.053e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-29 03:43:30,335 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.54 vs. limit=15.0 +2024-07-29 03:43:33,211 INFO [train.py:1114] (2/4) Epoch 18, batch 2750, loss[loss=0.1501, simple_loss=0.2387, pruned_loss=0.03075, over 4705.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2648, pruned_loss=0.04177, over 939759.00 frames. ], batch size: 12, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:43:40,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=235385.33333333334, ans=0.125 +2024-07-29 03:43:47,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=235385.33333333334, ans=0.1 +2024-07-29 03:43:51,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=235398.66666666666, ans=0.125 +2024-07-29 03:43:53,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=235398.66666666666, ans=0.125 +2024-07-29 03:43:56,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=235398.66666666666, ans=0.125 +2024-07-29 03:44:01,877 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=235412.0, ans=0.1 +2024-07-29 03:44:05,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=235425.33333333334, ans=0.0 +2024-07-29 03:44:05,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=235425.33333333334, ans=0.0 +2024-07-29 03:44:13,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=235425.33333333334, ans=0.0 +2024-07-29 03:44:13,129 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=235425.33333333334, ans=0.0 +2024-07-29 03:44:15,744 INFO [train.py:1114] (2/4) Epoch 18, batch 2800, loss[loss=0.2822, simple_loss=0.3328, pruned_loss=0.1158, over 3182.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2655, pruned_loss=0.04218, over 937512.17 frames. ], batch size: 36, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:44:19,649 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.40 vs. limit=10.0 +2024-07-29 03:44:36,880 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.862e+01 5.675e+01 6.325e+01 7.095e+01 1.073e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 03:46:33,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=235492.0, ans=0.125 +2024-07-29 03:46:33,908 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:46:34,268 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.73 vs. limit=22.5 +2024-07-29 03:46:37,223 INFO [train.py:1114] (2/4) Epoch 18, batch 2850, loss[loss=0.1832, simple_loss=0.2725, pruned_loss=0.04692, over 4965.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2651, pruned_loss=0.04195, over 935853.41 frames. ], batch size: 13, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:46:51,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=235532.0, ans=0.0 +2024-07-29 03:46:53,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=235532.0, ans=0.125 +2024-07-29 03:46:56,517 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=235545.33333333334, ans=0.125 +2024-07-29 03:46:58,595 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:47:05,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=235558.66666666666, ans=0.125 +2024-07-29 03:47:10,076 INFO [train.py:1114] (2/4) Epoch 18, batch 2900, loss[loss=0.1597, simple_loss=0.2447, pruned_loss=0.03732, over 4827.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2669, pruned_loss=0.0423, over 939612.94 frames. ], batch size: 13, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:47:14,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=235572.0, ans=0.125 +2024-07-29 03:47:18,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=235585.33333333334, ans=0.125 +2024-07-29 03:47:29,767 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+01 5.690e+01 6.267e+01 7.332e+01 1.125e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 03:47:37,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=235625.33333333334, ans=0.2 +2024-07-29 03:47:42,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=235625.33333333334, ans=0.2 +2024-07-29 03:47:45,830 INFO [train.py:1114] (2/4) Epoch 18, batch 2950, loss[loss=0.1468, simple_loss=0.2483, pruned_loss=0.02268, over 4701.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2655, pruned_loss=0.04174, over 938560.44 frames. ], batch size: 12, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:47:56,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=235652.0, ans=0.125 +2024-07-29 03:47:59,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=235652.0, ans=0.125 +2024-07-29 03:48:02,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=235665.33333333334, ans=0.1 +2024-07-29 03:48:14,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=235692.0, ans=0.125 +2024-07-29 03:48:21,414 INFO [train.py:1114] (2/4) Epoch 18, batch 3000, loss[loss=0.1625, simple_loss=0.2694, pruned_loss=0.02782, over 4757.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2651, pruned_loss=0.04186, over 938178.81 frames. ], batch size: 13, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:48:21,414 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 03:48:44,158 INFO [train.py:1146] (2/4) Epoch 18, validation: loss=0.1624, simple_loss=0.2643, pruned_loss=0.03024, over 944034.00 frames. +2024-07-29 03:48:44,159 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 03:48:50,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=235718.66666666666, ans=0.125 +2024-07-29 03:48:53,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=235718.66666666666, ans=0.07 +2024-07-29 03:49:03,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=235732.0, ans=0.025 +2024-07-29 03:49:04,558 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.349e+01 5.607e+01 6.067e+01 7.332e+01 1.132e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-29 03:49:07,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=235745.33333333334, ans=0.125 +2024-07-29 03:49:18,702 INFO [train.py:1114] (2/4) Epoch 18, batch 3050, loss[loss=0.1659, simple_loss=0.2493, pruned_loss=0.04124, over 4632.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2656, pruned_loss=0.042, over 937154.35 frames. ], batch size: 12, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:49:29,898 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.99 vs. limit=15.0 +2024-07-29 03:49:30,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=235785.33333333334, ans=0.0 +2024-07-29 03:49:33,719 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235798.66666666666, ans=0.1 +2024-07-29 03:49:39,778 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.45 vs. limit=22.5 +2024-07-29 03:49:53,633 INFO [train.py:1114] (2/4) Epoch 18, batch 3100, loss[loss=0.1679, simple_loss=0.2526, pruned_loss=0.04164, over 4621.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2655, pruned_loss=0.04261, over 937922.83 frames. ], batch size: 16, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:50:14,444 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.311e+01 5.371e+01 5.941e+01 6.939e+01 1.181e+02, threshold=1.188e+02, percent-clipped=0.0 +2024-07-29 03:50:17,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=235878.66666666666, ans=0.125 +2024-07-29 03:50:28,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=235905.33333333334, ans=0.0 +2024-07-29 03:50:28,774 INFO [train.py:1114] (2/4) Epoch 18, batch 3150, loss[loss=0.168, simple_loss=0.2514, pruned_loss=0.04231, over 4638.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2652, pruned_loss=0.04209, over 938176.79 frames. ], batch size: 17, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:50:58,408 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.09 vs. limit=6.0 +2024-07-29 03:51:02,003 INFO [train.py:1114] (2/4) Epoch 18, batch 3200, loss[loss=0.1529, simple_loss=0.255, pruned_loss=0.02537, over 4819.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2638, pruned_loss=0.04133, over 939505.56 frames. ], batch size: 13, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:51:04,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=235972.0, ans=0.0 +2024-07-29 03:51:06,919 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=235972.0, ans=0.0 +2024-07-29 03:51:19,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=235998.66666666666, ans=0.1 +2024-07-29 03:51:23,936 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.338e+01 5.694e+01 6.317e+01 7.020e+01 1.050e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 03:51:26,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=236012.0, ans=0.0 +2024-07-29 03:51:27,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=236012.0, ans=0.125 +2024-07-29 03:51:30,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236012.0, ans=0.1 +2024-07-29 03:51:38,127 INFO [train.py:1114] (2/4) Epoch 18, batch 3250, loss[loss=0.1788, simple_loss=0.2716, pruned_loss=0.04297, over 4927.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2646, pruned_loss=0.04152, over 940618.55 frames. ], batch size: 14, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:51:38,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=236038.66666666666, ans=0.1 +2024-07-29 03:51:38,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236038.66666666666, ans=0.1 +2024-07-29 03:51:49,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=236052.0, ans=0.0 +2024-07-29 03:52:10,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=236092.0, ans=0.1 +2024-07-29 03:52:11,809 INFO [train.py:1114] (2/4) Epoch 18, batch 3300, loss[loss=0.1941, simple_loss=0.2828, pruned_loss=0.05271, over 4672.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2631, pruned_loss=0.0412, over 940639.47 frames. ], batch size: 19, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:52:16,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=236105.33333333334, ans=0.125 +2024-07-29 03:52:24,993 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.96 vs. limit=15.0 +2024-07-29 03:52:26,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=236132.0, ans=0.125 +2024-07-29 03:52:31,049 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.049e+01 5.536e+01 6.135e+01 6.929e+01 1.182e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 03:52:32,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=236145.33333333334, ans=0.0 +2024-07-29 03:52:38,417 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.56 vs. limit=15.0 +2024-07-29 03:52:42,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=236158.66666666666, ans=0.0 +2024-07-29 03:52:45,454 INFO [train.py:1114] (2/4) Epoch 18, batch 3350, loss[loss=0.1955, simple_loss=0.283, pruned_loss=0.05402, over 4649.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2637, pruned_loss=0.04166, over 938716.77 frames. ], batch size: 17, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:53:05,974 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.92 vs. limit=15.0 +2024-07-29 03:53:06,415 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=236198.66666666666, ans=0.0 +2024-07-29 03:53:12,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=236212.0, ans=0.2 +2024-07-29 03:53:15,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=236225.33333333334, ans=0.0 +2024-07-29 03:53:23,139 INFO [train.py:1114] (2/4) Epoch 18, batch 3400, loss[loss=0.1558, simple_loss=0.236, pruned_loss=0.03784, over 4797.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2638, pruned_loss=0.04192, over 937502.65 frames. ], batch size: 11, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:53:27,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=236238.66666666666, ans=0.1 +2024-07-29 03:53:34,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=236252.0, ans=0.025 +2024-07-29 03:53:41,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236265.33333333334, ans=0.1 +2024-07-29 03:53:43,250 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.614e+01 5.661e+01 6.178e+01 6.933e+01 1.009e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-29 03:53:50,481 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.90 vs. limit=6.0 +2024-07-29 03:53:55,650 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=236292.0, ans=0.125 +2024-07-29 03:53:57,530 INFO [train.py:1114] (2/4) Epoch 18, batch 3450, loss[loss=0.2237, simple_loss=0.3032, pruned_loss=0.07212, over 4716.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2647, pruned_loss=0.04197, over 937987.83 frames. ], batch size: 19, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:54:02,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=236305.33333333334, ans=0.2 +2024-07-29 03:54:11,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236332.0, ans=0.1 +2024-07-29 03:54:17,984 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.60 vs. limit=15.0 +2024-07-29 03:54:20,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=236345.33333333334, ans=0.05 +2024-07-29 03:54:22,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=236345.33333333334, ans=0.125 +2024-07-29 03:54:30,873 INFO [train.py:1114] (2/4) Epoch 18, batch 3500, loss[loss=0.1394, simple_loss=0.2281, pruned_loss=0.02536, over 4935.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2635, pruned_loss=0.04142, over 938934.20 frames. ], batch size: 12, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 03:54:31,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=236372.0, ans=0.1 +2024-07-29 03:54:31,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=236372.0, ans=0.125 +2024-07-29 03:54:31,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=236372.0, ans=0.2 +2024-07-29 03:54:33,940 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=15.0 +2024-07-29 03:54:37,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=236385.33333333334, ans=0.0 +2024-07-29 03:54:43,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=236385.33333333334, ans=0.125 +2024-07-29 03:54:50,376 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.411e+01 5.401e+01 5.925e+01 6.709e+01 9.541e+01, threshold=1.185e+02, percent-clipped=0.0 +2024-07-29 03:54:55,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten.whitening_limit, batch_count=236412.0, ans=15.0 +2024-07-29 03:55:02,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=236425.33333333334, ans=0.0 +2024-07-29 03:55:04,387 INFO [train.py:1114] (2/4) Epoch 18, batch 3550, loss[loss=0.1789, simple_loss=0.2669, pruned_loss=0.04547, over 4661.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.262, pruned_loss=0.04028, over 939099.81 frames. ], batch size: 14, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 03:55:14,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=236452.0, ans=0.125 +2024-07-29 03:55:18,587 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.35 vs. limit=15.0 +2024-07-29 03:55:24,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=236465.33333333334, ans=0.2 +2024-07-29 03:55:25,431 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.79 vs. limit=6.0 +2024-07-29 03:55:25,807 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=236478.66666666666, ans=0.125 +2024-07-29 03:55:27,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=236478.66666666666, ans=0.2 +2024-07-29 03:55:28,074 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.23 vs. limit=15.0 +2024-07-29 03:55:38,310 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:55:39,458 INFO [train.py:1114] (2/4) Epoch 18, batch 3600, loss[loss=0.1491, simple_loss=0.241, pruned_loss=0.02856, over 4962.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2628, pruned_loss=0.04048, over 940973.66 frames. ], batch size: 13, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 03:55:40,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=236505.33333333334, ans=0.125 +2024-07-29 03:55:52,082 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.86 vs. limit=15.0 +2024-07-29 03:55:56,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=236532.0, ans=0.2 +2024-07-29 03:55:59,088 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.793e+01 6.774e+01 8.193e+01 1.238e+02, threshold=1.355e+02, percent-clipped=1.0 +2024-07-29 03:56:00,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=236545.33333333334, ans=0.0 +2024-07-29 03:56:05,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=236545.33333333334, ans=0.125 +2024-07-29 03:56:08,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=236558.66666666666, ans=0.025 +2024-07-29 03:56:13,418 INFO [train.py:1114] (2/4) Epoch 18, batch 3650, loss[loss=0.1568, simple_loss=0.2499, pruned_loss=0.03187, over 4901.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2618, pruned_loss=0.04018, over 941543.82 frames. ], batch size: 15, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:56:13,464 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=236572.0, ans=0.0 +2024-07-29 03:56:13,674 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.21 vs. limit=22.5 +2024-07-29 03:56:15,085 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.62 vs. limit=12.0 +2024-07-29 03:56:16,524 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.71 vs. limit=15.0 +2024-07-29 03:56:21,478 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.11 vs. limit=22.5 +2024-07-29 03:56:29,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=236598.66666666666, ans=0.0 +2024-07-29 03:56:35,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=236612.0, ans=0.2 +2024-07-29 03:56:46,964 INFO [train.py:1114] (2/4) Epoch 18, batch 3700, loss[loss=0.1592, simple_loss=0.2546, pruned_loss=0.03188, over 4938.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2622, pruned_loss=0.04024, over 942287.42 frames. ], batch size: 14, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:56:47,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=236638.66666666666, ans=0.125 +2024-07-29 03:56:52,349 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.28 vs. limit=15.0 +2024-07-29 03:57:07,763 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.494e+01 5.372e+01 6.083e+01 6.875e+01 9.330e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-29 03:57:21,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=236692.0, ans=0.0 +2024-07-29 03:57:22,972 INFO [train.py:1114] (2/4) Epoch 18, batch 3750, loss[loss=0.1394, simple_loss=0.2309, pruned_loss=0.02393, over 4821.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2636, pruned_loss=0.04081, over 943868.31 frames. ], batch size: 11, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:57:23,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=236705.33333333334, ans=0.05 +2024-07-29 03:57:32,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=236705.33333333334, ans=0.025 +2024-07-29 03:57:36,281 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.53 vs. limit=15.0 +2024-07-29 03:57:45,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=236732.0, ans=0.0 +2024-07-29 03:57:50,083 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.13 vs. limit=15.0 +2024-07-29 03:57:51,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=236745.33333333334, ans=0.0 +2024-07-29 03:57:55,800 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.73 vs. limit=6.0 +2024-07-29 03:57:57,012 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=236745.33333333334, ans=0.0 +2024-07-29 03:58:01,037 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:58:07,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=236758.66666666666, ans=0.0 +2024-07-29 03:58:09,060 INFO [train.py:1114] (2/4) Epoch 18, batch 3800, loss[loss=0.1607, simple_loss=0.2628, pruned_loss=0.02932, over 4813.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2637, pruned_loss=0.04089, over 942461.61 frames. ], batch size: 14, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:58:16,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=236785.33333333334, ans=0.2 +2024-07-29 03:58:24,078 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.24 vs. limit=15.0 +2024-07-29 03:58:28,166 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.450e+01 5.460e+01 5.890e+01 6.474e+01 8.788e+01, threshold=1.178e+02, percent-clipped=0.0 +2024-07-29 03:58:44,391 INFO [train.py:1114] (2/4) Epoch 18, batch 3850, loss[loss=0.1906, simple_loss=0.2918, pruned_loss=0.0447, over 4659.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2649, pruned_loss=0.04158, over 942745.60 frames. ], batch size: 16, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:59:18,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=236905.33333333334, ans=0.2 +2024-07-29 03:59:18,968 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=236905.33333333334, ans=0.125 +2024-07-29 03:59:19,421 INFO [train.py:1114] (2/4) Epoch 18, batch 3900, loss[loss=0.1984, simple_loss=0.2994, pruned_loss=0.04871, over 4812.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2648, pruned_loss=0.04157, over 942737.63 frames. ], batch size: 14, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:59:19,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=236905.33333333334, ans=0.025 +2024-07-29 03:59:22,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=236905.33333333334, ans=0.125 +2024-07-29 03:59:25,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=236905.33333333334, ans=0.2 +2024-07-29 03:59:28,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=236918.66666666666, ans=0.125 +2024-07-29 03:59:42,384 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.722e+01 5.580e+01 6.101e+01 6.865e+01 9.868e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 03:59:51,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=236958.66666666666, ans=0.0 +2024-07-29 03:59:56,458 INFO [train.py:1114] (2/4) Epoch 18, batch 3950, loss[loss=0.1977, simple_loss=0.287, pruned_loss=0.05418, over 4832.00 frames. ], tot_loss[loss=0.174, simple_loss=0.265, pruned_loss=0.04149, over 944660.76 frames. ], batch size: 16, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:00:18,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=237012.0, ans=0.125 +2024-07-29 04:00:29,773 INFO [train.py:1114] (2/4) Epoch 18, batch 4000, loss[loss=0.1688, simple_loss=0.2533, pruned_loss=0.04212, over 4786.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2648, pruned_loss=0.04152, over 941104.20 frames. ], batch size: 12, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:00:39,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=237052.0, ans=0.125 +2024-07-29 04:00:46,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=237065.33333333334, ans=0.07 +2024-07-29 04:00:49,607 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.725e+01 5.687e+01 6.252e+01 7.100e+01 1.258e+02, threshold=1.250e+02, percent-clipped=1.0 +2024-07-29 04:00:53,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=237078.66666666666, ans=0.125 +2024-07-29 04:00:59,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=237092.0, ans=0.125 +2024-07-29 04:01:02,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=237105.33333333334, ans=0.1 +2024-07-29 04:01:03,421 INFO [train.py:1114] (2/4) Epoch 18, batch 4050, loss[loss=0.2338, simple_loss=0.3128, pruned_loss=0.07744, over 3479.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2652, pruned_loss=0.0421, over 939912.87 frames. ], batch size: 35, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:01:06,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=237105.33333333334, ans=0.04949747468305833 +2024-07-29 04:01:12,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=237105.33333333334, ans=0.0 +2024-07-29 04:01:15,764 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.62 vs. limit=12.0 +2024-07-29 04:01:20,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=237132.0, ans=0.0 +2024-07-29 04:01:20,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=237132.0, ans=0.0 +2024-07-29 04:01:27,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=237145.33333333334, ans=0.125 +2024-07-29 04:01:32,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=237145.33333333334, ans=0.0 +2024-07-29 04:01:36,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237158.66666666666, ans=0.1 +2024-07-29 04:01:39,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=237158.66666666666, ans=0.125 +2024-07-29 04:01:41,061 INFO [train.py:1114] (2/4) Epoch 18, batch 4100, loss[loss=0.215, simple_loss=0.3077, pruned_loss=0.06118, over 4907.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2649, pruned_loss=0.04198, over 939701.48 frames. ], batch size: 15, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:01:59,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=237198.66666666666, ans=0.1 +2024-07-29 04:02:01,264 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.620e+01 5.605e+01 6.193e+01 7.147e+01 1.131e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 04:02:09,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=237225.33333333334, ans=0.125 +2024-07-29 04:02:09,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=237225.33333333334, ans=0.125 +2024-07-29 04:02:14,588 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.55 vs. limit=15.0 +2024-07-29 04:02:32,056 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=237225.33333333334, ans=0.2 +2024-07-29 04:02:51,889 INFO [train.py:1114] (2/4) Epoch 18, batch 4150, loss[loss=0.1943, simple_loss=0.2884, pruned_loss=0.05008, over 4827.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2643, pruned_loss=0.04165, over 939282.08 frames. ], batch size: 13, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:03:08,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=237238.66666666666, ans=0.0 +2024-07-29 04:03:08,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=237238.66666666666, ans=0.125 +2024-07-29 04:03:48,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=237252.0, ans=0.125 +2024-07-29 04:04:06,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=237265.33333333334, ans=0.05 +2024-07-29 04:04:10,914 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.60 vs. limit=6.0 +2024-07-29 04:04:17,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=237278.66666666666, ans=0.2 +2024-07-29 04:04:17,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=237278.66666666666, ans=0.07 +2024-07-29 04:04:20,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=237278.66666666666, ans=0.0 +2024-07-29 04:04:24,091 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=237292.0, ans=0.2 +2024-07-29 04:04:32,872 INFO [train.py:1114] (2/4) Epoch 18, batch 4200, loss[loss=0.1972, simple_loss=0.2893, pruned_loss=0.05258, over 4894.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2639, pruned_loss=0.0412, over 940389.05 frames. ], batch size: 15, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:04:37,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=237305.33333333334, ans=0.025 +2024-07-29 04:04:46,721 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=237332.0, ans=0.2 +2024-07-29 04:04:48,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=237332.0, ans=0.125 +2024-07-29 04:04:50,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=237332.0, ans=0.0 +2024-07-29 04:04:57,705 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:06:02,030 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.489e+01 5.423e+01 5.970e+01 6.521e+01 1.016e+02, threshold=1.194e+02, percent-clipped=0.0 +2024-07-29 04:06:18,689 INFO [train.py:1114] (2/4) Epoch 18, batch 4250, loss[loss=0.1519, simple_loss=0.2449, pruned_loss=0.0294, over 4641.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2646, pruned_loss=0.04123, over 940883.59 frames. ], batch size: 12, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:06:18,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=237372.0, ans=0.5 +2024-07-29 04:06:22,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237372.0, ans=0.1 +2024-07-29 04:06:33,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=237398.66666666666, ans=0.125 +2024-07-29 04:06:40,515 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.82 vs. limit=15.0 +2024-07-29 04:06:42,684 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.83 vs. limit=22.5 +2024-07-29 04:06:42,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=237412.0, ans=0.025 +2024-07-29 04:06:52,288 INFO [train.py:1114] (2/4) Epoch 18, batch 4300, loss[loss=0.2037, simple_loss=0.2985, pruned_loss=0.0545, over 4758.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2647, pruned_loss=0.04132, over 940347.59 frames. ], batch size: 13, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:07:14,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=237452.0, ans=0.0 +2024-07-29 04:07:24,060 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.877e+01 6.356e+01 7.291e+01 9.513e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-29 04:07:33,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=237492.0, ans=0.0 +2024-07-29 04:07:37,524 INFO [train.py:1114] (2/4) Epoch 18, batch 4350, loss[loss=0.1746, simple_loss=0.2619, pruned_loss=0.04363, over 4753.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2651, pruned_loss=0.04158, over 941076.37 frames. ], batch size: 13, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:07:39,110 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:07:48,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=237518.66666666666, ans=0.125 +2024-07-29 04:07:50,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=237518.66666666666, ans=0.125 +2024-07-29 04:07:53,127 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.23 vs. limit=15.0 +2024-07-29 04:07:54,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=237532.0, ans=0.0 +2024-07-29 04:07:56,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=237532.0, ans=0.2 +2024-07-29 04:08:00,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=237545.33333333334, ans=0.125 +2024-07-29 04:08:03,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237545.33333333334, ans=0.1 +2024-07-29 04:08:06,376 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.02 vs. limit=15.0 +2024-07-29 04:08:09,557 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.79 vs. limit=12.0 +2024-07-29 04:08:12,603 INFO [train.py:1114] (2/4) Epoch 18, batch 4400, loss[loss=0.1553, simple_loss=0.2563, pruned_loss=0.02715, over 4818.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2644, pruned_loss=0.04083, over 940354.42 frames. ], batch size: 14, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:08:13,790 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.17 vs. limit=15.0 +2024-07-29 04:08:29,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=237598.66666666666, ans=0.2 +2024-07-29 04:08:33,225 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.243e+01 5.655e+01 6.397e+01 7.492e+01 1.030e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-29 04:08:34,953 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.04 vs. limit=22.5 +2024-07-29 04:08:41,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=237625.33333333334, ans=0.125 +2024-07-29 04:08:44,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=237625.33333333334, ans=0.125 +2024-07-29 04:08:45,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=237625.33333333334, ans=0.1 +2024-07-29 04:08:46,779 INFO [train.py:1114] (2/4) Epoch 18, batch 4450, loss[loss=0.1471, simple_loss=0.2296, pruned_loss=0.03227, over 4944.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2651, pruned_loss=0.04166, over 938385.41 frames. ], batch size: 12, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:08:46,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=237638.66666666666, ans=0.0 +2024-07-29 04:08:52,291 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:08:58,090 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=237652.0, ans=0.0 +2024-07-29 04:09:00,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=237665.33333333334, ans=0.0 +2024-07-29 04:09:01,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=237665.33333333334, ans=0.125 +2024-07-29 04:09:08,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237678.66666666666, ans=0.1 +2024-07-29 04:09:21,652 INFO [train.py:1114] (2/4) Epoch 18, batch 4500, loss[loss=0.2037, simple_loss=0.2988, pruned_loss=0.05434, over 4745.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2666, pruned_loss=0.0425, over 938066.32 frames. ], batch size: 14, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:09:24,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=237705.33333333334, ans=0.0 +2024-07-29 04:09:24,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=237705.33333333334, ans=0.125 +2024-07-29 04:09:29,758 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=237718.66666666666, ans=0.2 +2024-07-29 04:09:34,697 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.52 vs. limit=10.0 +2024-07-29 04:09:44,779 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=237732.0, ans=0.125 +2024-07-29 04:09:48,695 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=237732.0, ans=0.2 +2024-07-29 04:09:50,472 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.387e+01 5.651e+01 6.463e+01 7.658e+01 1.183e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-29 04:10:11,257 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=237758.66666666666, ans=0.125 +2024-07-29 04:10:12,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=237758.66666666666, ans=0.2 +2024-07-29 04:10:13,158 INFO [train.py:1114] (2/4) Epoch 18, batch 4550, loss[loss=0.178, simple_loss=0.2548, pruned_loss=0.05062, over 4895.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2658, pruned_loss=0.0419, over 940051.10 frames. ], batch size: 13, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:10:14,902 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-29 04:10:19,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=237785.33333333334, ans=0.2 +2024-07-29 04:10:42,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=237825.33333333334, ans=0.125 +2024-07-29 04:10:47,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=237838.66666666666, ans=0.0 +2024-07-29 04:10:47,604 INFO [train.py:1114] (2/4) Epoch 18, batch 4600, loss[loss=0.1721, simple_loss=0.2656, pruned_loss=0.03931, over 4511.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2653, pruned_loss=0.04162, over 938188.09 frames. ], batch size: 21, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:10:49,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=237838.66666666666, ans=0.025 +2024-07-29 04:10:52,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=237838.66666666666, ans=0.0 +2024-07-29 04:10:57,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=237852.0, ans=0.125 +2024-07-29 04:10:58,069 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.71 vs. limit=15.0 +2024-07-29 04:11:09,105 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.515e+01 5.588e+01 6.056e+01 7.096e+01 1.037e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 04:11:17,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=237892.0, ans=0.0 +2024-07-29 04:11:22,405 INFO [train.py:1114] (2/4) Epoch 18, batch 4650, loss[loss=0.1767, simple_loss=0.2748, pruned_loss=0.03927, over 4833.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2665, pruned_loss=0.04198, over 940131.88 frames. ], batch size: 16, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:11:22,708 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.95 vs. limit=15.0 +2024-07-29 04:11:26,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=237905.33333333334, ans=0.125 +2024-07-29 04:11:27,285 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.34 vs. limit=15.0 +2024-07-29 04:11:29,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=237905.33333333334, ans=0.125 +2024-07-29 04:11:30,248 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.88 vs. limit=22.5 +2024-07-29 04:11:30,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys.whitening_limit, batch_count=237905.33333333334, ans=6.0 +2024-07-29 04:11:52,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237945.33333333334, ans=0.1 +2024-07-29 04:11:59,403 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.11 vs. limit=15.0 +2024-07-29 04:12:00,761 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.42 vs. limit=15.0 +2024-07-29 04:12:03,862 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=237972.0, ans=0.2 +2024-07-29 04:12:04,313 INFO [train.py:1114] (2/4) Epoch 18, batch 4700, loss[loss=0.1581, simple_loss=0.2462, pruned_loss=0.03499, over 4701.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2657, pruned_loss=0.04179, over 937345.17 frames. ], batch size: 11, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:12:14,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=237985.33333333334, ans=0.0 +2024-07-29 04:12:18,604 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:12:20,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=237998.66666666666, ans=0.0 +2024-07-29 04:12:24,427 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.519e+01 5.692e+01 6.166e+01 6.744e+01 9.680e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 04:12:25,099 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=238012.0, ans=0.125 +2024-07-29 04:12:27,557 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.24 vs. limit=6.0 +2024-07-29 04:12:38,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238025.33333333334, ans=0.1 +2024-07-29 04:12:41,525 INFO [train.py:1114] (2/4) Epoch 18, batch 4750, loss[loss=0.2004, simple_loss=0.2865, pruned_loss=0.05711, over 4579.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2664, pruned_loss=0.04245, over 935667.97 frames. ], batch size: 21, lr: 4.14e-03, grad_scale: 16.0 +2024-07-29 04:12:45,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=238038.66666666666, ans=0.09899494936611666 +2024-07-29 04:12:55,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=238065.33333333334, ans=0.5 +2024-07-29 04:13:03,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=238065.33333333334, ans=0.2 +2024-07-29 04:13:28,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=238092.0, ans=0.125 +2024-07-29 04:13:36,592 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-29 04:13:44,026 INFO [train.py:1114] (2/4) Epoch 18, batch 4800, loss[loss=0.1806, simple_loss=0.2742, pruned_loss=0.04349, over 4691.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2663, pruned_loss=0.04275, over 932854.93 frames. ], batch size: 13, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:13:46,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=238105.33333333334, ans=0.125 +2024-07-29 04:13:48,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=238105.33333333334, ans=0.2 +2024-07-29 04:13:48,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=238105.33333333334, ans=0.125 +2024-07-29 04:13:49,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=238105.33333333334, ans=0.0 +2024-07-29 04:13:58,178 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238132.0, ans=0.1 +2024-07-29 04:14:07,349 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.662e+01 6.486e+01 7.810e+01 1.129e+02, threshold=1.297e+02, percent-clipped=0.0 +2024-07-29 04:14:07,979 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.63 vs. limit=15.0 +2024-07-29 04:14:26,466 INFO [train.py:1114] (2/4) Epoch 18, batch 4850, loss[loss=0.1683, simple_loss=0.2603, pruned_loss=0.03817, over 4751.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2663, pruned_loss=0.04296, over 932602.73 frames. ], batch size: 14, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:14:35,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=238185.33333333334, ans=0.125 +2024-07-29 04:14:42,851 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=238185.33333333334, ans=0.0 +2024-07-29 04:14:48,392 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.20 vs. limit=22.5 +2024-07-29 04:15:20,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=238212.0, ans=0.125 +2024-07-29 04:15:21,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=238225.33333333334, ans=0.2 +2024-07-29 04:15:31,239 INFO [train.py:1114] (2/4) Epoch 18, batch 4900, loss[loss=0.1787, simple_loss=0.266, pruned_loss=0.0457, over 4762.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2657, pruned_loss=0.04235, over 934087.16 frames. ], batch size: 13, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:15:41,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=238252.0, ans=0.125 +2024-07-29 04:15:44,993 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.72 vs. limit=15.0 +2024-07-29 04:15:52,807 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.332e+01 5.541e+01 6.118e+01 7.300e+01 1.058e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 04:15:57,046 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.97 vs. limit=15.0 +2024-07-29 04:16:05,559 INFO [train.py:1114] (2/4) Epoch 18, batch 4950, loss[loss=0.2008, simple_loss=0.2863, pruned_loss=0.0576, over 3521.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2666, pruned_loss=0.04302, over 931570.07 frames. ], batch size: 35, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:16:05,692 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=238305.33333333334, ans=0.2 +2024-07-29 04:16:05,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238305.33333333334, ans=0.1 +2024-07-29 04:16:07,986 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.43 vs. limit=6.0 +2024-07-29 04:16:31,409 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=238345.33333333334, ans=0.125 +2024-07-29 04:16:37,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=238358.66666666666, ans=0.125 +2024-07-29 04:16:40,863 INFO [train.py:1114] (2/4) Epoch 18, batch 5000, loss[loss=0.2009, simple_loss=0.2837, pruned_loss=0.05905, over 4665.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2654, pruned_loss=0.04241, over 935471.25 frames. ], batch size: 14, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:16:48,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=238385.33333333334, ans=0.09899494936611666 +2024-07-29 04:16:52,581 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.94 vs. limit=15.0 +2024-07-29 04:16:55,756 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238398.66666666666, ans=0.1 +2024-07-29 04:16:56,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=238398.66666666666, ans=0.125 +2024-07-29 04:17:01,715 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.450e+01 5.523e+01 5.940e+01 6.612e+01 9.274e+01, threshold=1.188e+02, percent-clipped=0.0 +2024-07-29 04:17:07,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=238425.33333333334, ans=0.2 +2024-07-29 04:17:10,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=238425.33333333334, ans=0.0 +2024-07-29 04:17:13,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_na.min_abs, batch_count=238425.33333333334, ans=0.02 +2024-07-29 04:17:14,256 INFO [train.py:1114] (2/4) Epoch 18, batch 5050, loss[loss=0.1512, simple_loss=0.2458, pruned_loss=0.02828, over 4865.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.264, pruned_loss=0.04148, over 938057.27 frames. ], batch size: 12, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:17:19,508 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.83 vs. limit=15.0 +2024-07-29 04:17:22,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238452.0, ans=0.1 +2024-07-29 04:18:29,714 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=238452.0, ans=0.0 +2024-07-29 04:18:34,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=238465.33333333334, ans=0.125 +2024-07-29 04:18:41,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=238465.33333333334, ans=0.0 +2024-07-29 04:18:45,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=238478.66666666666, ans=0.125 +2024-07-29 04:18:58,346 INFO [train.py:1114] (2/4) Epoch 18, batch 5100, loss[loss=0.1572, simple_loss=0.2483, pruned_loss=0.03305, over 4778.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2639, pruned_loss=0.04163, over 935205.22 frames. ], batch size: 12, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:19:21,953 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.577e+01 6.307e+01 7.309e+01 1.155e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 04:19:29,604 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.21 vs. limit=15.0 +2024-07-29 04:19:30,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238558.66666666666, ans=0.1 +2024-07-29 04:19:31,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=238558.66666666666, ans=0.125 +2024-07-29 04:19:32,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=238558.66666666666, ans=0.125 +2024-07-29 04:19:32,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=238558.66666666666, ans=0.125 +2024-07-29 04:19:34,604 INFO [train.py:1114] (2/4) Epoch 18, batch 5150, loss[loss=0.1853, simple_loss=0.287, pruned_loss=0.04174, over 4837.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2647, pruned_loss=0.04207, over 936041.62 frames. ], batch size: 16, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:19:41,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=238585.33333333334, ans=0.2 +2024-07-29 04:19:41,593 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=238585.33333333334, ans=0.2 +2024-07-29 04:19:46,475 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=238585.33333333334, ans=0.125 +2024-07-29 04:19:51,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=238598.66666666666, ans=0.0 +2024-07-29 04:19:51,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=238598.66666666666, ans=0.125 +2024-07-29 04:19:58,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=238612.0, ans=10.0 +2024-07-29 04:20:05,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=238625.33333333334, ans=0.125 +2024-07-29 04:20:05,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238625.33333333334, ans=0.1 +2024-07-29 04:20:08,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=238638.66666666666, ans=0.125 +2024-07-29 04:20:08,825 INFO [train.py:1114] (2/4) Epoch 18, batch 5200, loss[loss=0.1667, simple_loss=0.2587, pruned_loss=0.03736, over 4665.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.264, pruned_loss=0.04185, over 936248.75 frames. ], batch size: 14, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:20:09,887 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-07-29 04:20:12,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=238638.66666666666, ans=0.125 +2024-07-29 04:20:12,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=238638.66666666666, ans=0.1 +2024-07-29 04:20:15,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238652.0, ans=0.1 +2024-07-29 04:20:17,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=238652.0, ans=0.125 +2024-07-29 04:20:20,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=238652.0, ans=0.125 +2024-07-29 04:20:26,090 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.45 vs. limit=15.0 +2024-07-29 04:20:28,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=238665.33333333334, ans=0.95 +2024-07-29 04:20:28,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=238665.33333333334, ans=0.0 +2024-07-29 04:20:30,405 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.633e+01 5.651e+01 6.355e+01 7.516e+01 2.460e+02, threshold=1.271e+02, percent-clipped=1.0 +2024-07-29 04:20:30,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=238678.66666666666, ans=0.125 +2024-07-29 04:20:44,992 INFO [train.py:1114] (2/4) Epoch 18, batch 5250, loss[loss=0.1818, simple_loss=0.2716, pruned_loss=0.04599, over 4898.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2634, pruned_loss=0.04139, over 935660.17 frames. ], batch size: 13, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:20:45,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=238705.33333333334, ans=0.125 +2024-07-29 04:20:57,266 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:21:02,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=238732.0, ans=0.125 +2024-07-29 04:21:02,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=238732.0, ans=0.125 +2024-07-29 04:21:10,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=238745.33333333334, ans=0.125 +2024-07-29 04:21:12,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=238758.66666666666, ans=0.0 +2024-07-29 04:21:13,935 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.43 vs. limit=15.0 +2024-07-29 04:21:17,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=238758.66666666666, ans=0.0 +2024-07-29 04:21:18,580 INFO [train.py:1114] (2/4) Epoch 18, batch 5300, loss[loss=0.1982, simple_loss=0.294, pruned_loss=0.05122, over 4632.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2638, pruned_loss=0.04156, over 933975.39 frames. ], batch size: 16, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:21:18,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=238772.0, ans=0.0 +2024-07-29 04:21:25,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238772.0, ans=0.1 +2024-07-29 04:21:26,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=238785.33333333334, ans=0.2 +2024-07-29 04:21:28,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=238785.33333333334, ans=0.2 +2024-07-29 04:21:40,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=238812.0, ans=0.125 +2024-07-29 04:21:41,632 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.581e+01 5.630e+01 6.188e+01 7.457e+01 1.076e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 04:22:03,218 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.47 vs. limit=10.0 +2024-07-29 04:22:05,295 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.93 vs. limit=22.5 +2024-07-29 04:22:08,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=238825.33333333334, ans=0.025 +2024-07-29 04:22:09,539 INFO [train.py:1114] (2/4) Epoch 18, batch 5350, loss[loss=0.1644, simple_loss=0.2388, pruned_loss=0.04503, over 4562.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2643, pruned_loss=0.04171, over 936519.08 frames. ], batch size: 10, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:22:30,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=238852.0, ans=0.025 +2024-07-29 04:22:45,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=238878.66666666666, ans=0.125 +2024-07-29 04:23:04,846 INFO [train.py:1114] (2/4) Epoch 18, batch 5400, loss[loss=0.191, simple_loss=0.2792, pruned_loss=0.05145, over 4179.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2653, pruned_loss=0.04204, over 930850.53 frames. ], batch size: 25, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:23:09,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=238905.33333333334, ans=0.125 +2024-07-29 04:23:19,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=238932.0, ans=0.125 +2024-07-29 04:23:19,497 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=238932.0, ans=0.125 +2024-07-29 04:23:24,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238945.33333333334, ans=0.1 +2024-07-29 04:23:25,937 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+01 5.845e+01 6.439e+01 7.513e+01 9.975e+01, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 04:23:33,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=238958.66666666666, ans=0.0 +2024-07-29 04:23:38,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=238972.0, ans=0.2 +2024-07-29 04:23:38,582 INFO [train.py:1114] (2/4) Epoch 18, batch 5450, loss[loss=0.1743, simple_loss=0.2627, pruned_loss=0.04292, over 4694.00 frames. ], tot_loss[loss=0.174, simple_loss=0.265, pruned_loss=0.04147, over 933543.13 frames. ], batch size: 11, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:23:41,859 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.44 vs. limit=15.0 +2024-07-29 04:23:47,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=238985.33333333334, ans=0.125 +2024-07-29 04:23:50,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=238985.33333333334, ans=0.125 +2024-07-29 04:23:55,329 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.15 vs. limit=6.0 +2024-07-29 04:23:56,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238998.66666666666, ans=0.1 +2024-07-29 04:24:03,432 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.81 vs. limit=22.5 +2024-07-29 04:24:13,331 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-07-29 04:24:14,284 INFO [train.py:1114] (2/4) Epoch 18, batch 5500, loss[loss=0.1901, simple_loss=0.2767, pruned_loss=0.05171, over 4293.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2654, pruned_loss=0.04208, over 931747.22 frames. ], batch size: 25, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:24:18,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=239038.66666666666, ans=0.125 +2024-07-29 04:24:21,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=239052.0, ans=0.125 +2024-07-29 04:24:23,922 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.64 vs. limit=15.0 +2024-07-29 04:24:39,149 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.809e+01 5.775e+01 6.562e+01 7.641e+01 1.081e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-29 04:24:49,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=239078.66666666666, ans=0.0 +2024-07-29 04:25:02,891 INFO [train.py:1114] (2/4) Epoch 18, batch 5550, loss[loss=0.1676, simple_loss=0.2646, pruned_loss=0.03525, over 4713.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2654, pruned_loss=0.04212, over 933904.38 frames. ], batch size: 12, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:25:03,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=239105.33333333334, ans=0.2 +2024-07-29 04:25:04,384 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=239105.33333333334, ans=0.125 +2024-07-29 04:25:14,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=239118.66666666666, ans=0.2 +2024-07-29 04:25:15,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=239118.66666666666, ans=0.2 +2024-07-29 04:25:15,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=239132.0, ans=0.015 +2024-07-29 04:25:15,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239132.0, ans=0.1 +2024-07-29 04:25:36,070 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=28.61 vs. limit=22.5 +2024-07-29 04:25:38,199 INFO [train.py:1114] (2/4) Epoch 18, batch 5600, loss[loss=0.1637, simple_loss=0.2617, pruned_loss=0.03285, over 4742.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2666, pruned_loss=0.04283, over 934930.11 frames. ], batch size: 14, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:25:40,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=239172.0, ans=0.1 +2024-07-29 04:25:43,456 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.39 vs. limit=12.0 +2024-07-29 04:25:48,050 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-07-29 04:25:59,990 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.772e+01 5.791e+01 6.471e+01 7.649e+01 1.137e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-29 04:26:01,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=239212.0, ans=0.0 +2024-07-29 04:26:12,726 INFO [train.py:1114] (2/4) Epoch 18, batch 5650, loss[loss=0.2014, simple_loss=0.2829, pruned_loss=0.05994, over 4554.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2659, pruned_loss=0.04262, over 937263.46 frames. ], batch size: 21, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:26:24,717 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.00 vs. limit=15.0 +2024-07-29 04:26:31,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=239265.33333333334, ans=0.125 +2024-07-29 04:26:39,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=239292.0, ans=0.125 +2024-07-29 04:26:47,128 INFO [train.py:1114] (2/4) Epoch 18, batch 5700, loss[loss=0.1727, simple_loss=0.2612, pruned_loss=0.04213, over 4695.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2657, pruned_loss=0.04234, over 938302.17 frames. ], batch size: 13, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:26:52,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=239305.33333333334, ans=0.125 +2024-07-29 04:26:54,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239318.66666666666, ans=0.1 +2024-07-29 04:27:01,486 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.15 vs. limit=22.5 +2024-07-29 04:27:01,747 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:27:06,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=239332.0, ans=0.125 +2024-07-29 04:27:09,683 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.698e+01 5.703e+01 6.369e+01 7.336e+01 1.206e+02, threshold=1.274e+02, percent-clipped=0.0 +2024-07-29 04:27:18,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239358.66666666666, ans=0.1 +2024-07-29 04:27:22,265 INFO [train.py:1114] (2/4) Epoch 18, batch 5750, loss[loss=0.1886, simple_loss=0.2939, pruned_loss=0.04172, over 4753.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2668, pruned_loss=0.04265, over 938597.77 frames. ], batch size: 19, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:27:24,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=239372.0, ans=0.125 +2024-07-29 04:27:26,999 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239372.0, ans=0.125 +2024-07-29 04:27:28,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=239385.33333333334, ans=0.0 +2024-07-29 04:27:34,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=239385.33333333334, ans=0.125 +2024-07-29 04:27:45,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=239412.0, ans=0.125 +2024-07-29 04:27:45,189 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=239412.0, ans=0.125 +2024-07-29 04:27:49,655 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.75 vs. limit=5.0 +2024-07-29 04:27:57,370 INFO [train.py:1114] (2/4) Epoch 18, batch 5800, loss[loss=0.183, simple_loss=0.2644, pruned_loss=0.0508, over 4713.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2668, pruned_loss=0.04254, over 937249.78 frames. ], batch size: 19, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:28:06,140 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=239452.0, ans=0.0 +2024-07-29 04:28:07,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=239452.0, ans=0.0 +2024-07-29 04:28:10,313 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.89 vs. limit=15.0 +2024-07-29 04:28:18,265 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.507e+01 5.596e+01 6.128e+01 6.728e+01 1.003e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-29 04:28:27,216 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.81 vs. limit=6.0 +2024-07-29 04:28:29,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=239492.0, ans=0.2 +2024-07-29 04:28:30,923 INFO [train.py:1114] (2/4) Epoch 18, batch 5850, loss[loss=0.1747, simple_loss=0.2724, pruned_loss=0.03849, over 4608.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2657, pruned_loss=0.04169, over 937943.90 frames. ], batch size: 21, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:28:33,990 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.98 vs. limit=15.0 +2024-07-29 04:28:37,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=239518.66666666666, ans=0.2 +2024-07-29 04:28:42,542 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=239518.66666666666, ans=0.125 +2024-07-29 04:29:04,796 INFO [train.py:1114] (2/4) Epoch 18, batch 5900, loss[loss=0.1683, simple_loss=0.2535, pruned_loss=0.04149, over 4685.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2668, pruned_loss=0.04224, over 938010.67 frames. ], batch size: 15, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:29:09,729 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.84 vs. limit=15.0 +2024-07-29 04:29:11,100 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.97 vs. limit=22.5 +2024-07-29 04:29:13,045 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.94 vs. limit=12.0 +2024-07-29 04:29:18,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=239598.66666666666, ans=0.5 +2024-07-29 04:29:18,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=239598.66666666666, ans=0.125 +2024-07-29 04:29:20,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=239598.66666666666, ans=0.025 +2024-07-29 04:29:23,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239598.66666666666, ans=0.1 +2024-07-29 04:29:27,145 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.760e+01 6.303e+01 7.095e+01 1.028e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 04:29:30,208 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.21 vs. limit=15.0 +2024-07-29 04:29:32,104 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.92 vs. limit=6.0 +2024-07-29 04:29:35,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=239625.33333333334, ans=0.2 +2024-07-29 04:29:53,218 INFO [train.py:1114] (2/4) Epoch 18, batch 5950, loss[loss=0.19, simple_loss=0.2787, pruned_loss=0.05064, over 4685.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2662, pruned_loss=0.04219, over 940205.73 frames. ], batch size: 15, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:30:11,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=239652.0, ans=0.125 +2024-07-29 04:30:42,163 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=239665.33333333334, ans=0.125 +2024-07-29 04:30:47,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239678.66666666666, ans=0.125 +2024-07-29 04:30:50,622 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:30:56,321 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=239692.0, ans=0.0 +2024-07-29 04:30:59,221 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.83 vs. limit=15.0 +2024-07-29 04:31:00,378 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=239692.0, ans=0.0 +2024-07-29 04:31:01,587 INFO [train.py:1114] (2/4) Epoch 18, batch 6000, loss[loss=0.159, simple_loss=0.2614, pruned_loss=0.02829, over 4236.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2658, pruned_loss=0.04224, over 937312.09 frames. ], batch size: 25, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:31:01,587 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 04:31:06,005 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.4071, 4.2390, 3.7134, 3.8767], device='cuda:2') +2024-07-29 04:31:13,506 INFO [train.py:1146] (2/4) Epoch 18, validation: loss=0.1615, simple_loss=0.2636, pruned_loss=0.0297, over 944034.00 frames. +2024-07-29 04:31:13,507 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 04:31:19,927 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.51 vs. limit=10.0 +2024-07-29 04:31:22,256 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.37 vs. limit=6.0 +2024-07-29 04:31:23,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=239718.66666666666, ans=0.125 +2024-07-29 04:31:26,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=239718.66666666666, ans=0.0 +2024-07-29 04:31:27,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=239718.66666666666, ans=0.125 +2024-07-29 04:31:31,508 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.61 vs. limit=6.0 +2024-07-29 04:31:34,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=239732.0, ans=0.125 +2024-07-29 04:31:36,675 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.961e+01 5.758e+01 6.298e+01 7.334e+01 1.056e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 04:31:37,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=239745.33333333334, ans=0.5 +2024-07-29 04:31:47,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=239758.66666666666, ans=0.125 +2024-07-29 04:31:49,512 INFO [train.py:1114] (2/4) Epoch 18, batch 6050, loss[loss=0.1633, simple_loss=0.2441, pruned_loss=0.04122, over 4772.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2642, pruned_loss=0.04176, over 938391.88 frames. ], batch size: 12, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:31:54,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=239772.0, ans=0.025 +2024-07-29 04:31:59,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.80 vs. limit=15.0 +2024-07-29 04:31:59,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=239785.33333333334, ans=0.125 +2024-07-29 04:31:59,929 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.53 vs. limit=15.0 +2024-07-29 04:32:00,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=239785.33333333334, ans=0.125 +2024-07-29 04:32:08,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=239798.66666666666, ans=0.125 +2024-07-29 04:32:09,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=239812.0, ans=10.0 +2024-07-29 04:32:19,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=239825.33333333334, ans=0.125 +2024-07-29 04:32:40,034 INFO [train.py:1114] (2/4) Epoch 18, batch 6100, loss[loss=0.178, simple_loss=0.2747, pruned_loss=0.04065, over 4674.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2645, pruned_loss=0.04188, over 937187.18 frames. ], batch size: 15, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:32:41,073 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-29 04:32:56,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=239852.0, ans=0.125 +2024-07-29 04:32:59,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=239852.0, ans=0.125 +2024-07-29 04:33:10,859 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.665e+01 5.504e+01 6.224e+01 7.220e+01 1.027e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-29 04:33:11,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=239878.66666666666, ans=0.125 +2024-07-29 04:33:13,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=239878.66666666666, ans=0.0 +2024-07-29 04:33:20,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=239892.0, ans=0.125 +2024-07-29 04:33:25,613 INFO [train.py:1114] (2/4) Epoch 18, batch 6150, loss[loss=0.2091, simple_loss=0.2889, pruned_loss=0.06462, over 3565.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2655, pruned_loss=0.04257, over 935995.28 frames. ], batch size: 35, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:33:29,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=239905.33333333334, ans=0.125 +2024-07-29 04:33:39,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=239932.0, ans=0.1 +2024-07-29 04:33:43,518 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.15 vs. limit=15.0 +2024-07-29 04:33:43,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=239932.0, ans=0.2 +2024-07-29 04:33:48,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=239945.33333333334, ans=0.0 +2024-07-29 04:33:49,441 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=239945.33333333334, ans=0.2 +2024-07-29 04:33:59,210 INFO [train.py:1114] (2/4) Epoch 18, batch 6200, loss[loss=0.1854, simple_loss=0.2867, pruned_loss=0.04208, over 4734.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2652, pruned_loss=0.04234, over 935794.06 frames. ], batch size: 14, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:34:07,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=239985.33333333334, ans=0.0 +2024-07-29 04:34:09,049 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=239985.33333333334, ans=0.0 +2024-07-29 04:34:13,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=239985.33333333334, ans=0.125 +2024-07-29 04:34:20,015 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=239998.66666666666, ans=0.09899494936611666 +2024-07-29 04:34:44,994 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.635e+01 5.597e+01 6.193e+01 7.328e+01 9.537e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 04:34:47,557 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.74 vs. limit=22.5 +2024-07-29 04:34:55,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=240025.33333333334, ans=0.0 +2024-07-29 04:34:58,269 INFO [train.py:1114] (2/4) Epoch 18, batch 6250, loss[loss=0.2103, simple_loss=0.3063, pruned_loss=0.05717, over 4811.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2652, pruned_loss=0.04228, over 932667.42 frames. ], batch size: 14, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:35:03,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=240038.66666666666, ans=0.07 +2024-07-29 04:35:21,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=240078.66666666666, ans=0.0 +2024-07-29 04:35:23,331 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.61 vs. limit=15.0 +2024-07-29 04:35:27,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=240092.0, ans=0.0 +2024-07-29 04:35:32,189 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.65 vs. limit=22.5 +2024-07-29 04:35:33,222 INFO [train.py:1114] (2/4) Epoch 18, batch 6300, loss[loss=0.1325, simple_loss=0.2257, pruned_loss=0.01969, over 4531.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2652, pruned_loss=0.04216, over 930232.68 frames. ], batch size: 10, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:35:57,143 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.06 vs. limit=6.0 +2024-07-29 04:35:57,716 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.65 vs. limit=10.0 +2024-07-29 04:35:58,648 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.473e+01 5.639e+01 6.264e+01 7.118e+01 1.029e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 04:35:58,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=240145.33333333334, ans=0.0 +2024-07-29 04:36:03,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=240145.33333333334, ans=0.125 +2024-07-29 04:36:11,348 INFO [train.py:1114] (2/4) Epoch 18, batch 6350, loss[loss=0.1686, simple_loss=0.274, pruned_loss=0.03156, over 4456.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2636, pruned_loss=0.04133, over 934054.74 frames. ], batch size: 21, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:36:11,857 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.19 vs. limit=15.0 +2024-07-29 04:36:18,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=240172.0, ans=0.0 +2024-07-29 04:36:44,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=240198.66666666666, ans=0.0 +2024-07-29 04:36:45,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240198.66666666666, ans=0.1 +2024-07-29 04:36:50,771 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=240212.0, ans=0.0 +2024-07-29 04:37:00,115 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:37:05,082 INFO [train.py:1114] (2/4) Epoch 18, batch 6400, loss[loss=0.1504, simple_loss=0.2501, pruned_loss=0.02534, over 4641.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.264, pruned_loss=0.0416, over 935388.30 frames. ], batch size: 13, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:37:51,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=240265.33333333334, ans=0.125 +2024-07-29 04:37:55,451 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.669e+01 6.278e+01 7.394e+01 9.691e+01, threshold=1.256e+02, percent-clipped=0.0 +2024-07-29 04:37:56,416 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.06 vs. limit=15.0 +2024-07-29 04:37:57,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=240278.66666666666, ans=0.125 +2024-07-29 04:38:02,329 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.74 vs. limit=15.0 +2024-07-29 04:38:07,971 INFO [train.py:1114] (2/4) Epoch 18, batch 6450, loss[loss=0.1881, simple_loss=0.2756, pruned_loss=0.05029, over 4525.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2648, pruned_loss=0.04165, over 938842.94 frames. ], batch size: 21, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:38:08,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=240305.33333333334, ans=0.025 +2024-07-29 04:38:11,634 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.22 vs. limit=22.5 +2024-07-29 04:38:17,177 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.38 vs. limit=12.0 +2024-07-29 04:38:19,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=240318.66666666666, ans=0.1 +2024-07-29 04:38:53,630 INFO [train.py:1114] (2/4) Epoch 18, batch 6500, loss[loss=0.2892, simple_loss=0.3576, pruned_loss=0.1104, over 3452.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2652, pruned_loss=0.04195, over 940193.93 frames. ], batch size: 36, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:38:59,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=240385.33333333334, ans=0.2 +2024-07-29 04:39:01,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=240385.33333333334, ans=0.025 +2024-07-29 04:39:10,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=240398.66666666666, ans=0.07 +2024-07-29 04:39:13,962 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.498e+01 5.507e+01 6.062e+01 6.906e+01 9.828e+01, threshold=1.212e+02, percent-clipped=0.0 +2024-07-29 04:39:27,399 INFO [train.py:1114] (2/4) Epoch 18, batch 6550, loss[loss=0.1478, simple_loss=0.2276, pruned_loss=0.034, over 4821.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2642, pruned_loss=0.04142, over 943105.97 frames. ], batch size: 11, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:39:34,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=240438.66666666666, ans=0.0 +2024-07-29 04:39:35,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=240438.66666666666, ans=0.0 +2024-07-29 04:39:37,071 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.68 vs. limit=22.5 +2024-07-29 04:39:40,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=240452.0, ans=0.125 +2024-07-29 04:39:45,233 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.96 vs. limit=15.0 +2024-07-29 04:39:54,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=240478.66666666666, ans=0.0 +2024-07-29 04:39:56,879 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=240478.66666666666, ans=0.025 +2024-07-29 04:40:04,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=240492.0, ans=0.2 +2024-07-29 04:40:04,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=240492.0, ans=0.0 +2024-07-29 04:40:05,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=240505.33333333334, ans=0.125 +2024-07-29 04:40:06,038 INFO [train.py:1114] (2/4) Epoch 18, batch 6600, loss[loss=0.1772, simple_loss=0.2771, pruned_loss=0.03864, over 4943.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2647, pruned_loss=0.04148, over 944885.17 frames. ], batch size: 14, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:40:34,501 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.498e+01 5.621e+01 6.292e+01 7.270e+01 1.272e+02, threshold=1.258e+02, percent-clipped=1.0 +2024-07-29 04:40:36,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=240545.33333333334, ans=0.125 +2024-07-29 04:40:36,962 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.27 vs. limit=22.5 +2024-07-29 04:40:44,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=240558.66666666666, ans=0.125 +2024-07-29 04:40:47,169 INFO [train.py:1114] (2/4) Epoch 18, batch 6650, loss[loss=0.2007, simple_loss=0.2903, pruned_loss=0.05552, over 4645.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2653, pruned_loss=0.04168, over 943529.49 frames. ], batch size: 17, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:40:53,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=240572.0, ans=0.125 +2024-07-29 04:40:56,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=240585.33333333334, ans=0.0 +2024-07-29 04:40:58,205 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.80 vs. limit=6.0 +2024-07-29 04:40:59,743 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=240585.33333333334, ans=0.0 +2024-07-29 04:41:21,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=240612.0, ans=0.125 +2024-07-29 04:41:26,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=240625.33333333334, ans=0.125 +2024-07-29 04:41:26,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240625.33333333334, ans=0.1 +2024-07-29 04:41:30,161 INFO [train.py:1114] (2/4) Epoch 18, batch 6700, loss[loss=0.1905, simple_loss=0.2881, pruned_loss=0.0465, over 4807.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2653, pruned_loss=0.04179, over 942325.01 frames. ], batch size: 19, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:41:30,474 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.09 vs. limit=12.0 +2024-07-29 04:41:32,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=240638.66666666666, ans=0.0 +2024-07-29 04:41:32,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=240638.66666666666, ans=0.125 +2024-07-29 04:41:34,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=240638.66666666666, ans=0.125 +2024-07-29 04:41:37,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=240652.0, ans=0.125 +2024-07-29 04:41:40,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=240652.0, ans=0.125 +2024-07-29 04:41:46,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=240665.33333333334, ans=0.125 +2024-07-29 04:41:51,434 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.779e+01 5.784e+01 6.386e+01 7.165e+01 1.123e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 04:41:55,166 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.00 vs. limit=15.0 +2024-07-29 04:41:55,663 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=240678.66666666666, ans=0.125 +2024-07-29 04:41:57,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=240692.0, ans=0.125 +2024-07-29 04:41:57,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=240692.0, ans=0.125 +2024-07-29 04:42:00,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=240692.0, ans=0.0 +2024-07-29 04:42:04,364 INFO [train.py:1114] (2/4) Epoch 18, batch 6750, loss[loss=0.1749, simple_loss=0.2635, pruned_loss=0.04316, over 4311.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2643, pruned_loss=0.04163, over 940867.34 frames. ], batch size: 25, lr: 4.12e-03, grad_scale: 64.0 +2024-07-29 04:42:10,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=240718.66666666666, ans=0.0 +2024-07-29 04:42:10,761 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.11 vs. limit=15.0 +2024-07-29 04:42:13,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=240718.66666666666, ans=0.0 +2024-07-29 04:42:35,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=240758.66666666666, ans=0.125 +2024-07-29 04:42:37,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=240758.66666666666, ans=0.125 +2024-07-29 04:42:38,968 INFO [train.py:1114] (2/4) Epoch 18, batch 6800, loss[loss=0.1558, simple_loss=0.2501, pruned_loss=0.03072, over 4639.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2649, pruned_loss=0.04206, over 938982.71 frames. ], batch size: 13, lr: 4.12e-03, grad_scale: 64.0 +2024-07-29 04:42:39,639 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=240772.0, ans=0.0 +2024-07-29 04:42:45,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240785.33333333334, ans=0.1 +2024-07-29 04:42:47,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=240785.33333333334, ans=0.2 +2024-07-29 04:42:48,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=240785.33333333334, ans=0.0 +2024-07-29 04:42:57,312 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:42:59,657 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.731e+01 5.569e+01 6.060e+01 6.382e+01 1.017e+02, threshold=1.212e+02, percent-clipped=0.0 +2024-07-29 04:43:09,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=240825.33333333334, ans=0.2 +2024-07-29 04:43:14,322 INFO [train.py:1114] (2/4) Epoch 18, batch 6850, loss[loss=0.1805, simple_loss=0.2898, pruned_loss=0.03564, over 4688.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2642, pruned_loss=0.04164, over 940727.60 frames. ], batch size: 13, lr: 4.12e-03, grad_scale: 64.0 +2024-07-29 04:43:25,430 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.73 vs. limit=15.0 +2024-07-29 04:43:26,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=240852.0, ans=0.2 +2024-07-29 04:43:27,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240865.33333333334, ans=0.1 +2024-07-29 04:43:29,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=240865.33333333334, ans=0.0 +2024-07-29 04:43:48,590 INFO [train.py:1114] (2/4) Epoch 18, batch 6900, loss[loss=0.1719, simple_loss=0.2604, pruned_loss=0.04166, over 4964.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2637, pruned_loss=0.04137, over 942949.44 frames. ], batch size: 13, lr: 4.12e-03, grad_scale: 64.0 +2024-07-29 04:43:54,523 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=240905.33333333334, ans=0.0 +2024-07-29 04:43:54,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=240905.33333333334, ans=22.5 +2024-07-29 04:44:00,774 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=240918.66666666666, ans=0.5 +2024-07-29 04:44:03,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=240932.0, ans=0.0 +2024-07-29 04:44:05,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=240932.0, ans=0.125 +2024-07-29 04:44:09,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=240945.33333333334, ans=0.5 +2024-07-29 04:44:11,088 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.810e+01 6.480e+01 7.498e+01 1.027e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-29 04:44:23,805 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=240972.0, ans=0.025 +2024-07-29 04:44:24,922 INFO [train.py:1114] (2/4) Epoch 18, batch 6950, loss[loss=0.1538, simple_loss=0.2412, pruned_loss=0.03321, over 4546.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.264, pruned_loss=0.04171, over 940128.49 frames. ], batch size: 10, lr: 4.11e-03, grad_scale: 64.0 +2024-07-29 04:44:28,949 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=240972.0, ans=0.035 +2024-07-29 04:44:29,053 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=240972.0, ans=0.025 +2024-07-29 04:44:33,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=240985.33333333334, ans=0.125 +2024-07-29 04:44:46,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=241012.0, ans=0.1 +2024-07-29 04:44:52,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=241025.33333333334, ans=0.04949747468305833 +2024-07-29 04:44:59,696 INFO [train.py:1114] (2/4) Epoch 18, batch 7000, loss[loss=0.1939, simple_loss=0.2877, pruned_loss=0.0501, over 4644.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2642, pruned_loss=0.04184, over 938430.52 frames. ], batch size: 17, lr: 4.11e-03, grad_scale: 64.0 +2024-07-29 04:45:02,379 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=241038.66666666666, ans=0.0 +2024-07-29 04:45:20,301 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.289e+01 5.764e+01 6.515e+01 7.633e+01 1.207e+02, threshold=1.303e+02, percent-clipped=0.0 +2024-07-29 04:45:25,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=241092.0, ans=0.0 +2024-07-29 04:45:30,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=241092.0, ans=0.125 +2024-07-29 04:45:33,134 INFO [train.py:1114] (2/4) Epoch 18, batch 7050, loss[loss=0.1734, simple_loss=0.2698, pruned_loss=0.03852, over 4713.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2635, pruned_loss=0.04137, over 941834.36 frames. ], batch size: 19, lr: 4.11e-03, grad_scale: 64.0 +2024-07-29 04:45:35,480 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.51 vs. limit=10.0 +2024-07-29 04:45:35,972 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=241105.33333333334, ans=0.125 +2024-07-29 04:45:36,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=241105.33333333334, ans=0.07 +2024-07-29 04:45:51,475 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.25 vs. limit=6.0 +2024-07-29 04:45:53,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=241145.33333333334, ans=0.2 +2024-07-29 04:46:02,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=241158.66666666666, ans=0.2 +2024-07-29 04:46:03,902 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:46:06,981 INFO [train.py:1114] (2/4) Epoch 18, batch 7100, loss[loss=0.1819, simple_loss=0.2805, pruned_loss=0.04161, over 4806.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2643, pruned_loss=0.0418, over 936052.83 frames. ], batch size: 15, lr: 4.11e-03, grad_scale: 64.0 +2024-07-29 04:46:28,375 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.603e+01 5.554e+01 6.044e+01 6.901e+01 9.600e+01, threshold=1.209e+02, percent-clipped=0.0 +2024-07-29 04:46:32,307 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=241212.0, ans=0.2 +2024-07-29 04:46:32,717 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.21 vs. limit=15.0 +2024-07-29 04:46:33,660 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=241225.33333333334, ans=0.125 +2024-07-29 04:46:38,528 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.79 vs. limit=15.0 +2024-07-29 04:46:40,705 INFO [train.py:1114] (2/4) Epoch 18, batch 7150, loss[loss=0.1892, simple_loss=0.2823, pruned_loss=0.048, over 4453.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2628, pruned_loss=0.04162, over 936854.91 frames. ], batch size: 21, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:46:42,134 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=241238.66666666666, ans=0.125 +2024-07-29 04:46:43,534 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.89 vs. limit=6.0 +2024-07-29 04:46:48,761 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.72 vs. limit=15.0 +2024-07-29 04:46:51,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff3.min_abs, batch_count=241252.0, ans=0.2 +2024-07-29 04:47:00,689 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.61 vs. limit=6.0 +2024-07-29 04:47:24,560 INFO [train.py:1114] (2/4) Epoch 18, batch 7200, loss[loss=0.1682, simple_loss=0.2622, pruned_loss=0.0371, over 4794.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2647, pruned_loss=0.04213, over 936903.56 frames. ], batch size: 15, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:47:41,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=241332.0, ans=0.125 +2024-07-29 04:47:45,845 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.087e+01 5.707e+01 6.356e+01 7.350e+01 1.020e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-29 04:47:51,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=241358.66666666666, ans=0.125 +2024-07-29 04:47:57,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=241372.0, ans=0.04949747468305833 +2024-07-29 04:47:57,743 INFO [train.py:1114] (2/4) Epoch 18, batch 7250, loss[loss=0.1786, simple_loss=0.2654, pruned_loss=0.04592, over 4850.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2652, pruned_loss=0.0422, over 939063.42 frames. ], batch size: 12, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:48:00,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=241372.0, ans=0.125 +2024-07-29 04:48:32,662 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.43 vs. limit=12.0 +2024-07-29 04:48:32,756 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.89 vs. limit=22.5 +2024-07-29 04:48:32,909 INFO [train.py:1114] (2/4) Epoch 18, batch 7300, loss[loss=0.1797, simple_loss=0.2707, pruned_loss=0.04432, over 4845.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2656, pruned_loss=0.04248, over 939440.80 frames. ], batch size: 12, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:48:39,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=241452.0, ans=22.5 +2024-07-29 04:48:41,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=241452.0, ans=0.125 +2024-07-29 04:48:42,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241452.0, ans=0.1 +2024-07-29 04:48:46,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241465.33333333334, ans=0.1 +2024-07-29 04:48:53,885 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+01 5.415e+01 6.086e+01 6.711e+01 9.900e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-29 04:49:08,031 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=241492.0, ans=0.2 +2024-07-29 04:49:09,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=241492.0, ans=0.125 +2024-07-29 04:49:14,878 INFO [train.py:1114] (2/4) Epoch 18, batch 7350, loss[loss=0.1514, simple_loss=0.2534, pruned_loss=0.02472, over 4634.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2657, pruned_loss=0.04236, over 938894.41 frames. ], batch size: 12, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:49:17,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=241505.33333333334, ans=0.125 +2024-07-29 04:49:46,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=241545.33333333334, ans=0.125 +2024-07-29 04:49:46,853 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241558.66666666666, ans=0.1 +2024-07-29 04:49:55,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=241558.66666666666, ans=0.0 +2024-07-29 04:49:58,974 INFO [train.py:1114] (2/4) Epoch 18, batch 7400, loss[loss=0.1789, simple_loss=0.2747, pruned_loss=0.04151, over 4695.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2656, pruned_loss=0.04214, over 940077.43 frames. ], batch size: 13, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:49:59,303 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-29 04:50:01,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=241572.0, ans=0.09899494936611666 +2024-07-29 04:50:13,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=241598.66666666666, ans=0.125 +2024-07-29 04:50:22,050 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.538e+01 5.627e+01 6.295e+01 7.057e+01 1.550e+02, threshold=1.259e+02, percent-clipped=1.0 +2024-07-29 04:50:23,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=241612.0, ans=0.0 +2024-07-29 04:50:33,794 INFO [train.py:1114] (2/4) Epoch 18, batch 7450, loss[loss=0.1571, simple_loss=0.2444, pruned_loss=0.03493, over 4624.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2639, pruned_loss=0.0413, over 938181.75 frames. ], batch size: 11, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:50:41,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=241652.0, ans=0.2 +2024-07-29 04:50:44,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241652.0, ans=0.1 +2024-07-29 04:50:58,035 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.70 vs. limit=15.0 +2024-07-29 04:51:01,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=241692.0, ans=0.0 +2024-07-29 04:51:05,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=241692.0, ans=0.2 +2024-07-29 04:51:06,929 INFO [train.py:1114] (2/4) Epoch 18, batch 7500, loss[loss=0.1916, simple_loss=0.2854, pruned_loss=0.04891, over 3494.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2652, pruned_loss=0.04163, over 936414.71 frames. ], batch size: 36, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:51:21,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=241732.0, ans=0.125 +2024-07-29 04:51:28,211 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.400e+01 5.735e+01 6.333e+01 6.793e+01 1.076e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 04:51:29,241 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.94 vs. limit=15.0 +2024-07-29 04:51:36,232 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=241758.66666666666, ans=0.0 +2024-07-29 04:51:38,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=241758.66666666666, ans=0.125 +2024-07-29 04:51:40,184 INFO [train.py:1114] (2/4) Epoch 18, batch 7550, loss[loss=0.1884, simple_loss=0.277, pruned_loss=0.04988, over 4621.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2664, pruned_loss=0.04219, over 936298.30 frames. ], batch size: 17, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:51:54,582 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.66 vs. limit=15.0 +2024-07-29 04:51:56,402 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.78 vs. limit=15.0 +2024-07-29 04:52:00,846 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=241785.33333333334, ans=0.0 +2024-07-29 04:52:09,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=241812.0, ans=0.0 +2024-07-29 04:52:10,297 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:52:10,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=241812.0, ans=0.125 +2024-07-29 04:52:21,678 INFO [train.py:1114] (2/4) Epoch 18, batch 7600, loss[loss=0.1855, simple_loss=0.2885, pruned_loss=0.0412, over 4808.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2655, pruned_loss=0.04194, over 937957.12 frames. ], batch size: 14, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:52:29,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=241852.0, ans=0.2 +2024-07-29 04:52:43,233 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.006e+01 5.592e+01 6.086e+01 6.807e+01 8.936e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-29 04:52:55,135 INFO [train.py:1114] (2/4) Epoch 18, batch 7650, loss[loss=0.1541, simple_loss=0.24, pruned_loss=0.03413, over 4944.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2642, pruned_loss=0.04126, over 936916.61 frames. ], batch size: 12, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:52:56,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=241905.33333333334, ans=0.125 +2024-07-29 04:52:57,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=241905.33333333334, ans=0.0 +2024-07-29 04:53:04,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=241918.66666666666, ans=0.0 +2024-07-29 04:53:10,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241932.0, ans=0.1 +2024-07-29 04:53:20,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=241945.33333333334, ans=0.125 +2024-07-29 04:53:24,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241958.66666666666, ans=0.1 +2024-07-29 04:53:28,762 INFO [train.py:1114] (2/4) Epoch 18, batch 7700, loss[loss=0.1574, simple_loss=0.2553, pruned_loss=0.02978, over 4702.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2651, pruned_loss=0.04195, over 933976.58 frames. ], batch size: 13, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:53:36,191 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241985.33333333334, ans=0.1 +2024-07-29 04:53:51,568 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=242012.0, ans=0.125 +2024-07-29 04:53:52,081 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.452e+01 5.527e+01 6.014e+01 6.715e+01 9.821e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-29 04:53:52,434 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.74 vs. limit=15.0 +2024-07-29 04:53:56,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=242012.0, ans=0.07 +2024-07-29 04:53:57,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=242025.33333333334, ans=0.0 +2024-07-29 04:54:01,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=242025.33333333334, ans=0.125 +2024-07-29 04:54:03,544 INFO [train.py:1114] (2/4) Epoch 18, batch 7750, loss[loss=0.1732, simple_loss=0.2595, pruned_loss=0.04345, over 4943.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2664, pruned_loss=0.04262, over 935354.67 frames. ], batch size: 14, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:54:12,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=242052.0, ans=0.125 +2024-07-29 04:54:13,955 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=27.04 vs. limit=22.5 +2024-07-29 04:54:34,338 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.40 vs. limit=22.5 +2024-07-29 04:54:39,145 INFO [train.py:1114] (2/4) Epoch 18, batch 7800, loss[loss=0.1741, simple_loss=0.2692, pruned_loss=0.03955, over 4664.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2663, pruned_loss=0.04214, over 937079.75 frames. ], batch size: 14, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:54:39,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242105.33333333334, ans=0.1 +2024-07-29 04:54:45,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=242118.66666666666, ans=0.2 +2024-07-29 04:54:56,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=242132.0, ans=0.0 +2024-07-29 04:55:00,037 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.583e+01 6.063e+01 6.593e+01 8.807e+01, threshold=1.213e+02, percent-clipped=0.0 +2024-07-29 04:55:28,070 INFO [train.py:1114] (2/4) Epoch 18, batch 7850, loss[loss=0.146, simple_loss=0.2338, pruned_loss=0.02911, over 4496.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2672, pruned_loss=0.04272, over 935609.79 frames. ], batch size: 10, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:56:09,268 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.97 vs. limit=6.0 +2024-07-29 04:56:18,459 INFO [train.py:1114] (2/4) Epoch 18, batch 7900, loss[loss=0.1596, simple_loss=0.2597, pruned_loss=0.02979, over 4872.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2689, pruned_loss=0.04309, over 933075.34 frames. ], batch size: 14, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:56:28,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=242252.0, ans=0.125 +2024-07-29 04:56:31,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=242265.33333333334, ans=0.2 +2024-07-29 04:56:39,136 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+01 5.700e+01 6.249e+01 7.197e+01 1.145e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-29 04:56:47,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=242292.0, ans=0.05 +2024-07-29 04:56:51,091 INFO [train.py:1114] (2/4) Epoch 18, batch 7950, loss[loss=0.2003, simple_loss=0.2838, pruned_loss=0.05834, over 3349.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2672, pruned_loss=0.04229, over 935451.56 frames. ], batch size: 35, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:57:01,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=242318.66666666666, ans=0.0 +2024-07-29 04:57:07,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=242332.0, ans=0.125 +2024-07-29 04:57:24,057 INFO [train.py:1114] (2/4) Epoch 18, batch 8000, loss[loss=0.1811, simple_loss=0.2602, pruned_loss=0.05103, over 4616.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2649, pruned_loss=0.04154, over 934605.38 frames. ], batch size: 11, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:57:26,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=242372.0, ans=0.125 +2024-07-29 04:57:44,297 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.01 vs. limit=15.0 +2024-07-29 04:57:45,065 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.436e+01 5.556e+01 6.379e+01 7.313e+01 1.044e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 04:57:52,838 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=18.17 vs. limit=15.0 +2024-07-29 04:57:55,302 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=242425.33333333334, ans=0.2 +2024-07-29 04:57:55,344 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:57:58,320 INFO [train.py:1114] (2/4) Epoch 18, batch 8050, loss[loss=0.1655, simple_loss=0.2658, pruned_loss=0.03265, over 4814.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2649, pruned_loss=0.04137, over 933917.20 frames. ], batch size: 14, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:57:59,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242438.66666666666, ans=0.1 +2024-07-29 04:58:05,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=242452.0, ans=0.125 +2024-07-29 04:58:06,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=242452.0, ans=0.0 +2024-07-29 04:58:15,724 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.01 vs. limit=12.0 +2024-07-29 04:58:19,547 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.59 vs. limit=15.0 +2024-07-29 04:58:31,336 INFO [train.py:1114] (2/4) Epoch 18, batch 8100, loss[loss=0.158, simple_loss=0.2561, pruned_loss=0.02996, over 4808.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2661, pruned_loss=0.04146, over 933911.66 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:58:34,477 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.54 vs. limit=15.0 +2024-07-29 04:58:37,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=242518.66666666666, ans=0.025 +2024-07-29 04:58:41,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=242518.66666666666, ans=0.125 +2024-07-29 04:58:52,551 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.503e+01 5.727e+01 6.609e+01 7.504e+01 1.146e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-29 04:58:58,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242558.66666666666, ans=0.1 +2024-07-29 04:59:04,429 INFO [train.py:1114] (2/4) Epoch 18, batch 8150, loss[loss=0.1641, simple_loss=0.265, pruned_loss=0.03167, over 4802.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2647, pruned_loss=0.04094, over 937214.85 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:59:09,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=242572.0, ans=0.125 +2024-07-29 04:59:15,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=242585.33333333334, ans=0.2 +2024-07-29 04:59:15,744 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.13 vs. limit=15.0 +2024-07-29 04:59:17,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=242598.66666666666, ans=0.125 +2024-07-29 04:59:23,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=242612.0, ans=0.125 +2024-07-29 04:59:27,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=242612.0, ans=0.0 +2024-07-29 04:59:40,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=242638.66666666666, ans=0.0 +2024-07-29 04:59:41,325 INFO [train.py:1114] (2/4) Epoch 18, batch 8200, loss[loss=0.2201, simple_loss=0.302, pruned_loss=0.06903, over 4799.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.265, pruned_loss=0.04094, over 938114.32 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:59:42,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=242638.66666666666, ans=0.2 +2024-07-29 04:59:43,524 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=242638.66666666666, ans=0.0 +2024-07-29 04:59:45,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=242638.66666666666, ans=0.2 +2024-07-29 05:00:20,495 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.484e+01 5.660e+01 6.350e+01 7.311e+01 1.182e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-29 05:00:29,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=242692.0, ans=0.125 +2024-07-29 05:00:33,415 INFO [train.py:1114] (2/4) Epoch 18, batch 8250, loss[loss=0.2217, simple_loss=0.3223, pruned_loss=0.06056, over 4891.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2661, pruned_loss=0.04141, over 938461.66 frames. ], batch size: 13, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:00:43,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=242718.66666666666, ans=0.04949747468305833 +2024-07-29 05:00:54,464 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.29 vs. limit=10.0 +2024-07-29 05:00:59,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=242758.66666666666, ans=0.04949747468305833 +2024-07-29 05:01:04,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=242758.66666666666, ans=0.2 +2024-07-29 05:01:06,375 INFO [train.py:1114] (2/4) Epoch 18, batch 8300, loss[loss=0.1784, simple_loss=0.2644, pruned_loss=0.04617, over 4896.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2673, pruned_loss=0.04214, over 938232.33 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:01:07,636 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=242772.0, ans=0.0 +2024-07-29 05:01:07,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242772.0, ans=0.1 +2024-07-29 05:01:24,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242798.66666666666, ans=0.1 +2024-07-29 05:01:24,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=242812.0, ans=0.125 +2024-07-29 05:01:26,870 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.844e+01 5.743e+01 6.319e+01 7.194e+01 1.218e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-29 05:01:29,345 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.23 vs. limit=6.0 +2024-07-29 05:01:36,742 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=242825.33333333334, ans=0.125 +2024-07-29 05:01:36,970 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.37 vs. limit=12.0 +2024-07-29 05:01:38,530 INFO [train.py:1114] (2/4) Epoch 18, batch 8350, loss[loss=0.1997, simple_loss=0.2913, pruned_loss=0.05407, over 4789.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2667, pruned_loss=0.04227, over 941034.81 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:02:10,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242878.66666666666, ans=0.1 +2024-07-29 05:02:13,829 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.29 vs. limit=15.0 +2024-07-29 05:02:18,751 INFO [train.py:1114] (2/4) Epoch 18, batch 8400, loss[loss=0.1537, simple_loss=0.249, pruned_loss=0.02924, over 4769.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2661, pruned_loss=0.04195, over 939894.52 frames. ], batch size: 12, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:02:19,659 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.52 vs. limit=15.0 +2024-07-29 05:02:23,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=242905.33333333334, ans=0.5 +2024-07-29 05:02:34,584 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:02:39,520 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.393e+01 5.882e+01 6.556e+01 7.323e+01 1.088e+02, threshold=1.311e+02, percent-clipped=0.0 +2024-07-29 05:02:50,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=242972.0, ans=0.0 +2024-07-29 05:02:51,100 INFO [train.py:1114] (2/4) Epoch 18, batch 8450, loss[loss=0.1867, simple_loss=0.2728, pruned_loss=0.05032, over 4805.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2662, pruned_loss=0.04171, over 938941.76 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:02:57,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=242985.33333333334, ans=0.0 +2024-07-29 05:03:00,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=242985.33333333334, ans=0.125 +2024-07-29 05:03:00,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=242985.33333333334, ans=0.125 +2024-07-29 05:03:03,451 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-07-29 05:03:04,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=242998.66666666666, ans=0.0 +2024-07-29 05:03:32,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=243012.0, ans=0.025 +2024-07-29 05:03:37,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=243012.0, ans=0.125 +2024-07-29 05:03:44,956 INFO [train.py:1114] (2/4) Epoch 18, batch 8500, loss[loss=0.1605, simple_loss=0.2583, pruned_loss=0.03134, over 4602.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2653, pruned_loss=0.04154, over 938628.52 frames. ], batch size: 11, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:03:48,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=243038.66666666666, ans=0.125 +2024-07-29 05:03:53,282 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.97 vs. limit=15.0 +2024-07-29 05:03:59,019 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.20 vs. limit=15.0 +2024-07-29 05:04:02,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=243065.33333333334, ans=0.2 +2024-07-29 05:04:07,489 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=243078.66666666666, ans=0.125 +2024-07-29 05:04:07,965 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.366e+01 5.501e+01 6.348e+01 7.091e+01 9.836e+01, threshold=1.270e+02, percent-clipped=0.0 +2024-07-29 05:04:12,066 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:04:19,720 INFO [train.py:1114] (2/4) Epoch 18, batch 8550, loss[loss=0.1615, simple_loss=0.2401, pruned_loss=0.04143, over 4788.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2642, pruned_loss=0.04093, over 939156.07 frames. ], batch size: 11, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:04:43,749 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:04:51,365 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.36 vs. limit=6.0 +2024-07-29 05:04:52,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=243172.0, ans=0.125 +2024-07-29 05:04:52,779 INFO [train.py:1114] (2/4) Epoch 18, batch 8600, loss[loss=0.2126, simple_loss=0.2998, pruned_loss=0.0627, over 4804.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.265, pruned_loss=0.04128, over 938955.89 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:04:54,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=243172.0, ans=0.125 +2024-07-29 05:05:01,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=243185.33333333334, ans=0.1 +2024-07-29 05:05:13,983 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.872e+01 5.639e+01 6.288e+01 7.210e+01 1.078e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-29 05:05:19,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=243225.33333333334, ans=0.0 +2024-07-29 05:05:25,064 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.36 vs. limit=15.0 +2024-07-29 05:05:25,423 INFO [train.py:1114] (2/4) Epoch 18, batch 8650, loss[loss=0.1536, simple_loss=0.2456, pruned_loss=0.03081, over 4897.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2649, pruned_loss=0.0418, over 940270.98 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:05:30,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=243238.66666666666, ans=0.0 +2024-07-29 05:05:30,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=243238.66666666666, ans=0.125 +2024-07-29 05:05:37,492 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.53 vs. limit=15.0 +2024-07-29 05:05:37,817 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243265.33333333334, ans=0.1 +2024-07-29 05:05:43,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=243265.33333333334, ans=0.0 +2024-07-29 05:05:45,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=243278.66666666666, ans=0.0 +2024-07-29 05:05:57,720 INFO [train.py:1114] (2/4) Epoch 18, batch 8700, loss[loss=0.1926, simple_loss=0.2904, pruned_loss=0.04741, over 4759.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2662, pruned_loss=0.04247, over 937836.94 frames. ], batch size: 13, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:06:05,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=243318.66666666666, ans=10.0 +2024-07-29 05:06:06,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=243318.66666666666, ans=0.125 +2024-07-29 05:06:18,234 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.815e+01 5.797e+01 6.169e+01 6.761e+01 9.579e+01, threshold=1.234e+02, percent-clipped=0.0 +2024-07-29 05:06:22,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=243358.66666666666, ans=0.0 +2024-07-29 05:06:29,977 INFO [train.py:1114] (2/4) Epoch 18, batch 8750, loss[loss=0.1911, simple_loss=0.2707, pruned_loss=0.05571, over 4668.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2658, pruned_loss=0.04261, over 935980.62 frames. ], batch size: 15, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:06:46,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=243398.66666666666, ans=0.125 +2024-07-29 05:06:49,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=243412.0, ans=0.0 +2024-07-29 05:06:56,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=243412.0, ans=0.2 +2024-07-29 05:06:57,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=243412.0, ans=0.125 +2024-07-29 05:07:01,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=243425.33333333334, ans=0.0 +2024-07-29 05:07:02,359 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.07 vs. limit=15.0 +2024-07-29 05:07:05,110 INFO [train.py:1114] (2/4) Epoch 18, batch 8800, loss[loss=0.1913, simple_loss=0.2998, pruned_loss=0.04135, over 4927.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2666, pruned_loss=0.04269, over 937035.15 frames. ], batch size: 14, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:07:09,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243438.66666666666, ans=0.1 +2024-07-29 05:07:19,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.92 vs. limit=15.0 +2024-07-29 05:07:26,774 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.698e+01 5.641e+01 6.387e+01 7.548e+01 9.629e+01, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 05:07:28,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=243478.66666666666, ans=0.125 +2024-07-29 05:07:35,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243492.0, ans=0.1 +2024-07-29 05:07:37,307 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.52 vs. limit=6.0 +2024-07-29 05:07:38,899 INFO [train.py:1114] (2/4) Epoch 18, batch 8850, loss[loss=0.1785, simple_loss=0.2676, pruned_loss=0.0447, over 4520.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2665, pruned_loss=0.04282, over 931762.19 frames. ], batch size: 21, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:07:40,050 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.42 vs. limit=12.0 +2024-07-29 05:07:43,177 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:07:44,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=243505.33333333334, ans=0.025 +2024-07-29 05:07:57,601 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.75 vs. limit=15.0 +2024-07-29 05:07:59,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=243545.33333333334, ans=0.0 +2024-07-29 05:08:06,466 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.19 vs. limit=15.0 +2024-07-29 05:08:09,878 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.10 vs. limit=10.0 +2024-07-29 05:08:12,256 INFO [train.py:1114] (2/4) Epoch 18, batch 8900, loss[loss=0.1671, simple_loss=0.2495, pruned_loss=0.04236, over 4943.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2669, pruned_loss=0.04278, over 930025.69 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:08:13,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=243572.0, ans=0.04949747468305833 +2024-07-29 05:08:32,781 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 5.619e+01 6.277e+01 7.423e+01 9.938e+01, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 05:08:42,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=243625.33333333334, ans=0.125 +2024-07-29 05:08:44,478 INFO [train.py:1114] (2/4) Epoch 18, batch 8950, loss[loss=0.1784, simple_loss=0.2744, pruned_loss=0.04123, over 4512.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2657, pruned_loss=0.0421, over 930835.25 frames. ], batch size: 21, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:08:46,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=243638.66666666666, ans=0.2 +2024-07-29 05:08:47,180 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=243638.66666666666, ans=0.125 +2024-07-29 05:08:49,627 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.05 vs. limit=15.0 +2024-07-29 05:08:54,118 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.33 vs. limit=15.0 +2024-07-29 05:08:54,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=243652.0, ans=0.025 +2024-07-29 05:09:18,123 INFO [train.py:1114] (2/4) Epoch 18, batch 9000, loss[loss=0.1481, simple_loss=0.236, pruned_loss=0.0301, over 4643.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2644, pruned_loss=0.04188, over 933885.83 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:09:18,124 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 05:09:33,028 INFO [train.py:1146] (2/4) Epoch 18, validation: loss=0.1616, simple_loss=0.2637, pruned_loss=0.02971, over 944034.00 frames. +2024-07-29 05:09:33,029 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 05:09:33,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=243705.33333333334, ans=0.125 +2024-07-29 05:09:43,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=243718.66666666666, ans=0.125 +2024-07-29 05:09:48,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=243732.0, ans=0.125 +2024-07-29 05:09:50,632 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:09:54,502 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.133e+01 5.845e+01 6.498e+01 7.420e+01 1.015e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-29 05:09:57,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243745.33333333334, ans=0.1 +2024-07-29 05:09:57,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243745.33333333334, ans=0.1 +2024-07-29 05:10:05,970 INFO [train.py:1114] (2/4) Epoch 18, batch 9050, loss[loss=0.1649, simple_loss=0.2593, pruned_loss=0.03523, over 4541.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2639, pruned_loss=0.04158, over 933994.62 frames. ], batch size: 10, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:10:08,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=243772.0, ans=0.125 +2024-07-29 05:10:09,872 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:10:10,460 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=243772.0, ans=0.125 +2024-07-29 05:10:10,609 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.14 vs. limit=15.0 +2024-07-29 05:10:15,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=243785.33333333334, ans=0.125 +2024-07-29 05:10:20,232 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.63 vs. limit=6.0 +2024-07-29 05:10:25,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=243812.0, ans=0.025 +2024-07-29 05:10:29,122 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.79 vs. limit=15.0 +2024-07-29 05:10:36,251 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=243825.33333333334, ans=0.0 +2024-07-29 05:10:37,838 INFO [train.py:1114] (2/4) Epoch 18, batch 9100, loss[loss=0.1596, simple_loss=0.256, pruned_loss=0.03159, over 4937.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2638, pruned_loss=0.04186, over 936607.91 frames. ], batch size: 14, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:10:38,706 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.63 vs. limit=15.0 +2024-07-29 05:10:39,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=243838.66666666666, ans=0.125 +2024-07-29 05:10:40,728 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.66 vs. limit=10.0 +2024-07-29 05:10:58,034 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+01 5.927e+01 6.725e+01 7.788e+01 1.053e+02, threshold=1.345e+02, percent-clipped=0.0 +2024-07-29 05:11:01,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=243878.66666666666, ans=0.125 +2024-07-29 05:11:01,569 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.32 vs. limit=15.0 +2024-07-29 05:11:02,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=243892.0, ans=0.125 +2024-07-29 05:11:06,507 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=243892.0, ans=0.0 +2024-07-29 05:11:09,471 INFO [train.py:1114] (2/4) Epoch 18, batch 9150, loss[loss=0.1493, simple_loss=0.2488, pruned_loss=0.02493, over 4810.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2642, pruned_loss=0.0417, over 935711.33 frames. ], batch size: 14, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:11:09,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=243905.33333333334, ans=0.125 +2024-07-29 05:11:19,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=243918.66666666666, ans=0.125 +2024-07-29 05:11:31,568 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.09 vs. limit=15.0 +2024-07-29 05:11:33,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=243945.33333333334, ans=0.125 +2024-07-29 05:11:40,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=243958.66666666666, ans=0.0 +2024-07-29 05:11:42,357 INFO [train.py:1114] (2/4) Epoch 18, batch 9200, loss[loss=0.171, simple_loss=0.2568, pruned_loss=0.04258, over 4852.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2645, pruned_loss=0.04201, over 937350.57 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:12:03,092 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.715e+01 5.677e+01 6.144e+01 6.790e+01 1.037e+02, threshold=1.229e+02, percent-clipped=0.0 +2024-07-29 05:12:06,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=244012.0, ans=0.025 +2024-07-29 05:12:07,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=244012.0, ans=0.5 +2024-07-29 05:12:15,012 INFO [train.py:1114] (2/4) Epoch 18, batch 9250, loss[loss=0.1667, simple_loss=0.2551, pruned_loss=0.03915, over 4639.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2645, pruned_loss=0.04202, over 938216.95 frames. ], batch size: 13, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:12:16,157 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.55 vs. limit=12.0 +2024-07-29 05:12:26,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=244052.0, ans=0.125 +2024-07-29 05:12:26,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244052.0, ans=0.1 +2024-07-29 05:12:34,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=244078.66666666666, ans=10.0 +2024-07-29 05:12:35,841 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=244078.66666666666, ans=0.0 +2024-07-29 05:12:47,077 INFO [train.py:1114] (2/4) Epoch 18, batch 9300, loss[loss=0.1601, simple_loss=0.2438, pruned_loss=0.03822, over 4782.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2647, pruned_loss=0.04202, over 937538.81 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:12:48,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=244105.33333333334, ans=0.125 +2024-07-29 05:12:53,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=244118.66666666666, ans=0.2 +2024-07-29 05:12:57,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244118.66666666666, ans=0.1 +2024-07-29 05:12:57,813 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=244118.66666666666, ans=0.125 +2024-07-29 05:13:02,861 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:13:07,080 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.598e+01 6.030e+01 6.861e+01 1.072e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-29 05:13:07,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=244145.33333333334, ans=0.125 +2024-07-29 05:13:10,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=244145.33333333334, ans=0.2 +2024-07-29 05:13:11,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=244145.33333333334, ans=0.125 +2024-07-29 05:13:16,989 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=244158.66666666666, ans=0.0 +2024-07-29 05:13:18,031 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.34 vs. limit=12.0 +2024-07-29 05:13:19,387 INFO [train.py:1114] (2/4) Epoch 18, batch 9350, loss[loss=0.1518, simple_loss=0.2367, pruned_loss=0.03346, over 4803.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2645, pruned_loss=0.04183, over 934502.12 frames. ], batch size: 11, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:13:26,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=244185.33333333334, ans=0.025 +2024-07-29 05:13:29,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244185.33333333334, ans=0.1 +2024-07-29 05:13:46,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=244225.33333333334, ans=0.0 +2024-07-29 05:13:48,048 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=244225.33333333334, ans=0.1 +2024-07-29 05:13:50,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244225.33333333334, ans=0.1 +2024-07-29 05:13:52,894 INFO [train.py:1114] (2/4) Epoch 18, batch 9400, loss[loss=0.1781, simple_loss=0.269, pruned_loss=0.04364, over 4696.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2647, pruned_loss=0.04202, over 932289.47 frames. ], batch size: 13, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:13:54,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=244238.66666666666, ans=0.1 +2024-07-29 05:13:56,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=244238.66666666666, ans=0.125 +2024-07-29 05:13:57,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=244238.66666666666, ans=0.0 +2024-07-29 05:13:58,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=244238.66666666666, ans=0.125 +2024-07-29 05:14:04,685 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.33 vs. limit=15.0 +2024-07-29 05:14:12,979 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:14:14,133 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 5.480e+01 6.250e+01 7.248e+01 1.054e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-29 05:14:19,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244292.0, ans=0.1 +2024-07-29 05:14:19,848 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=244292.0, ans=0.125 +2024-07-29 05:14:25,267 INFO [train.py:1114] (2/4) Epoch 18, batch 9450, loss[loss=0.1691, simple_loss=0.2456, pruned_loss=0.04632, over 4798.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2649, pruned_loss=0.04184, over 931582.44 frames. ], batch size: 11, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:14:31,414 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.60 vs. limit=15.0 +2024-07-29 05:14:39,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=244332.0, ans=0.0 +2024-07-29 05:14:45,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=244345.33333333334, ans=0.0 +2024-07-29 05:14:47,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=244345.33333333334, ans=0.07 +2024-07-29 05:14:56,542 INFO [train.py:1114] (2/4) Epoch 18, batch 9500, loss[loss=0.1471, simple_loss=0.2377, pruned_loss=0.0282, over 4698.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2644, pruned_loss=0.04123, over 933923.95 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:14:57,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=244372.0, ans=0.0 +2024-07-29 05:15:11,781 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.38 vs. limit=12.0 +2024-07-29 05:15:12,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=244398.66666666666, ans=0.125 +2024-07-29 05:15:17,053 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.953e+01 5.635e+01 6.260e+01 7.098e+01 9.795e+01, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 05:15:23,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=244425.33333333334, ans=0.0 +2024-07-29 05:15:27,755 INFO [train.py:1114] (2/4) Epoch 18, batch 9550, loss[loss=0.1455, simple_loss=0.2277, pruned_loss=0.03163, over 4779.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2644, pruned_loss=0.04142, over 931467.32 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:15:29,613 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:15:30,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=244438.66666666666, ans=0.1 +2024-07-29 05:15:30,888 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=244438.66666666666, ans=0.0 +2024-07-29 05:15:32,391 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.27 vs. limit=15.0 +2024-07-29 05:15:33,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=244452.0, ans=0.2 +2024-07-29 05:15:35,496 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.39 vs. limit=22.5 +2024-07-29 05:15:40,228 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:15:49,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=244478.66666666666, ans=0.025 +2024-07-29 05:15:54,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=244492.0, ans=0.07 +2024-07-29 05:15:55,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=244492.0, ans=0.125 +2024-07-29 05:16:00,362 INFO [train.py:1114] (2/4) Epoch 18, batch 9600, loss[loss=0.218, simple_loss=0.2947, pruned_loss=0.07068, over 3063.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2647, pruned_loss=0.0415, over 930107.71 frames. ], batch size: 35, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:16:01,876 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.62 vs. limit=15.0 +2024-07-29 05:16:19,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=244545.33333333334, ans=0.2 +2024-07-29 05:16:21,036 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 5.711e+01 6.305e+01 6.902e+01 1.149e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 05:16:31,667 INFO [train.py:1114] (2/4) Epoch 18, batch 9650, loss[loss=0.1534, simple_loss=0.2565, pruned_loss=0.02509, over 4829.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2647, pruned_loss=0.0418, over 926508.60 frames. ], batch size: 16, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:16:31,857 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=244572.0, ans=0.025 +2024-07-29 05:16:42,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244585.33333333334, ans=0.1 +2024-07-29 05:16:43,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=244585.33333333334, ans=0.125 +2024-07-29 05:16:46,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=244598.66666666666, ans=0.0 +2024-07-29 05:17:00,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=244625.33333333334, ans=0.125 +2024-07-29 05:17:00,266 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.59 vs. limit=22.5 +2024-07-29 05:17:01,001 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.85 vs. limit=12.0 +2024-07-29 05:17:02,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=244625.33333333334, ans=0.125 +2024-07-29 05:17:03,189 INFO [train.py:1114] (2/4) Epoch 18, batch 9700, loss[loss=0.1982, simple_loss=0.2865, pruned_loss=0.05496, over 4258.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2649, pruned_loss=0.042, over 924438.66 frames. ], batch size: 25, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:17:04,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=244638.66666666666, ans=0.025 +2024-07-29 05:17:11,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=244652.0, ans=0.125 +2024-07-29 05:17:14,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=244665.33333333334, ans=0.0 +2024-07-29 05:17:15,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=244665.33333333334, ans=0.0 +2024-07-29 05:17:23,686 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.541e+01 5.681e+01 6.275e+01 7.162e+01 1.082e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 05:17:28,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=244692.0, ans=0.125 +2024-07-29 05:17:34,253 INFO [train.py:1114] (2/4) Epoch 18, batch 9750, loss[loss=0.1986, simple_loss=0.2742, pruned_loss=0.06144, over 4701.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.265, pruned_loss=0.0421, over 925180.70 frames. ], batch size: 15, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:17:52,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=244732.0, ans=0.125 +2024-07-29 05:17:54,833 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=244745.33333333334, ans=0.125 +2024-07-29 05:18:02,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=244758.66666666666, ans=0.0 +2024-07-29 05:18:03,780 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.58 vs. limit=15.0 +2024-07-29 05:18:06,002 INFO [train.py:1114] (2/4) Epoch 18, batch 9800, loss[loss=0.1675, simple_loss=0.2542, pruned_loss=0.0404, over 4695.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2635, pruned_loss=0.0417, over 925166.14 frames. ], batch size: 12, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:18:11,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=244772.0, ans=0.0 +2024-07-29 05:18:12,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=244785.33333333334, ans=0.09899494936611666 +2024-07-29 05:18:26,705 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.81 vs. limit=15.0 +2024-07-29 05:18:26,928 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 5.797e+01 6.276e+01 7.162e+01 9.479e+01, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 05:18:31,420 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244812.0, ans=0.1 +2024-07-29 05:18:32,641 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=244825.33333333334, ans=0.0 +2024-07-29 05:18:38,927 INFO [train.py:1114] (2/4) Epoch 18, batch 9850, loss[loss=0.1794, simple_loss=0.2835, pruned_loss=0.03769, over 4903.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2643, pruned_loss=0.04167, over 927310.57 frames. ], batch size: 15, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:18:57,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=244878.66666666666, ans=0.0 +2024-07-29 05:19:03,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=244892.0, ans=0.2 +2024-07-29 05:19:08,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=244892.0, ans=0.125 +2024-07-29 05:19:09,089 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.65 vs. limit=6.0 +2024-07-29 05:19:09,930 INFO [train.py:1114] (2/4) Epoch 18, batch 9900, loss[loss=0.1935, simple_loss=0.2939, pruned_loss=0.04652, over 4851.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2656, pruned_loss=0.0422, over 926849.99 frames. ], batch size: 16, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:19:25,602 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=244932.0, ans=0.0 +2024-07-29 05:19:28,070 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=244932.0, ans=0.2 +2024-07-29 05:19:30,950 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.456e+01 5.784e+01 6.438e+01 7.578e+01 1.058e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 05:19:32,382 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=244945.33333333334, ans=0.125 +2024-07-29 05:19:32,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=244945.33333333334, ans=0.0 +2024-07-29 05:19:36,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=244958.66666666666, ans=0.0 +2024-07-29 05:19:41,558 INFO [train.py:1114] (2/4) Epoch 18, batch 9950, loss[loss=0.1452, simple_loss=0.2342, pruned_loss=0.02812, over 4791.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2657, pruned_loss=0.0425, over 929125.50 frames. ], batch size: 11, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:19:47,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=244985.33333333334, ans=0.125 +2024-07-29 05:19:47,613 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=244985.33333333334, ans=0.2 +2024-07-29 05:20:01,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=245012.0, ans=0.2 +2024-07-29 05:20:03,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=245012.0, ans=0.125 +2024-07-29 05:20:12,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=245038.66666666666, ans=0.025 +2024-07-29 05:20:12,652 INFO [train.py:1114] (2/4) Epoch 18, batch 10000, loss[loss=0.1771, simple_loss=0.278, pruned_loss=0.03809, over 4611.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.268, pruned_loss=0.04293, over 926531.77 frames. ], batch size: 16, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:20:14,780 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.01 vs. limit=15.0 +2024-07-29 05:20:22,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=245052.0, ans=0.0 +2024-07-29 05:20:30,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=245065.33333333334, ans=0.0 +2024-07-29 05:20:32,865 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.64 vs. limit=22.5 +2024-07-29 05:20:33,398 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.790e+01 5.781e+01 6.382e+01 8.189e+01 1.255e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 05:20:36,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=245078.66666666666, ans=0.125 +2024-07-29 05:20:37,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245092.0, ans=0.1 +2024-07-29 05:20:37,868 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=245092.0, ans=0.125 +2024-07-29 05:20:40,487 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=8.26 vs. limit=12.0 +2024-07-29 05:20:45,061 INFO [train.py:1114] (2/4) Epoch 18, batch 10050, loss[loss=0.2147, simple_loss=0.3007, pruned_loss=0.06433, over 3383.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2705, pruned_loss=0.04398, over 915725.28 frames. ], batch size: 36, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:20:50,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=245105.33333333334, ans=0.5 +2024-07-29 05:20:58,352 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=245132.0, ans=0.0 +2024-07-29 05:20:59,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=245132.0, ans=0.125 +2024-07-29 05:21:08,722 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=245145.33333333334, ans=0.125 +2024-07-29 05:21:18,052 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.24 vs. limit=22.5 +2024-07-29 05:21:19,137 INFO [train.py:1114] (2/4) Epoch 18, batch 10100, loss[loss=0.1908, simple_loss=0.2798, pruned_loss=0.05088, over 3160.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.275, pruned_loss=0.04878, over 861298.63 frames. ], batch size: 35, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:21:29,346 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.30 vs. limit=15.0 +2024-07-29 05:21:38,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=245212.0, ans=0.125 +2024-07-29 05:21:41,089 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.138e+01 6.796e+01 7.277e+01 7.758e+01 1.071e+02, threshold=1.455e+02, percent-clipped=0.0 +2024-07-29 05:21:45,310 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.18 vs. limit=6.0 +2024-07-29 05:21:52,415 INFO [train.py:1114] (2/4) Epoch 18, batch 10150, loss[loss=0.2859, simple_loss=0.3436, pruned_loss=0.1141, over 3345.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2786, pruned_loss=0.05241, over 820049.71 frames. ], batch size: 36, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:22:14,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=245278.66666666666, ans=0.125 +2024-07-29 05:22:21,072 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=245292.0, ans=0.09899494936611666 +2024-07-29 05:22:24,129 INFO [train.py:1114] (2/4) Epoch 18, batch 10200, loss[loss=0.2312, simple_loss=0.3131, pruned_loss=0.07466, over 3432.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.282, pruned_loss=0.0556, over 787107.03 frames. ], batch size: 35, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:22:26,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=245305.33333333334, ans=0.125 +2024-07-29 05:22:27,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=245305.33333333334, ans=0.125 +2024-07-29 05:24:06,519 INFO [train.py:1114] (2/4) Epoch 19, batch 0, loss[loss=0.1505, simple_loss=0.2385, pruned_loss=0.03122, over 4859.00 frames. ], tot_loss[loss=0.1505, simple_loss=0.2385, pruned_loss=0.03122, over 4859.00 frames. ], batch size: 12, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:24:06,520 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 05:24:11,210 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.4028, 4.0596, 3.7849, 4.1278], device='cuda:2') +2024-07-29 05:24:18,360 INFO [train.py:1146] (2/4) Epoch 19, validation: loss=0.1627, simple_loss=0.2658, pruned_loss=0.02977, over 944034.00 frames. +2024-07-29 05:24:18,361 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 05:24:20,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245336.0, ans=0.1 +2024-07-29 05:24:24,113 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.16 vs. limit=15.0 +2024-07-29 05:24:25,124 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.889e+01 6.550e+01 7.036e+01 7.426e+01 9.937e+01, threshold=1.407e+02, percent-clipped=0.0 +2024-07-29 05:24:25,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=245349.33333333334, ans=0.2 +2024-07-29 05:24:29,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=245349.33333333334, ans=0.125 +2024-07-29 05:24:44,026 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=245376.0, ans=0.125 +2024-07-29 05:24:44,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=245376.0, ans=0.95 +2024-07-29 05:24:54,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245389.33333333334, ans=0.1 +2024-07-29 05:24:55,432 INFO [train.py:1114] (2/4) Epoch 19, batch 50, loss[loss=0.1445, simple_loss=0.2264, pruned_loss=0.03132, over 4604.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2662, pruned_loss=0.04251, over 206569.41 frames. ], batch size: 11, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:24:58,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=245402.66666666666, ans=0.125 +2024-07-29 05:24:58,892 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:25:05,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=245416.0, ans=0.0 +2024-07-29 05:25:13,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245429.33333333334, ans=0.1 +2024-07-29 05:25:15,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=245442.66666666666, ans=0.025 +2024-07-29 05:25:26,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=245456.0, ans=0.0 +2024-07-29 05:25:29,123 INFO [train.py:1114] (2/4) Epoch 19, batch 100, loss[loss=0.1381, simple_loss=0.2265, pruned_loss=0.02489, over 4630.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2677, pruned_loss=0.04175, over 365142.95 frames. ], batch size: 12, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:25:34,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=245469.33333333334, ans=0.025 +2024-07-29 05:25:35,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=245482.66666666666, ans=0.0 +2024-07-29 05:25:35,846 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.410e+01 5.533e+01 6.230e+01 7.043e+01 1.593e+02, threshold=1.246e+02, percent-clipped=1.0 +2024-07-29 05:25:37,461 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.59 vs. limit=10.0 +2024-07-29 05:25:40,840 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.76 vs. limit=12.0 +2024-07-29 05:25:42,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=245496.0, ans=0.125 +2024-07-29 05:25:50,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=245509.33333333334, ans=0.2 +2024-07-29 05:26:02,308 INFO [train.py:1114] (2/4) Epoch 19, batch 150, loss[loss=0.1345, simple_loss=0.223, pruned_loss=0.02302, over 4601.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2649, pruned_loss=0.041, over 493801.87 frames. ], batch size: 11, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:26:06,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=245536.0, ans=0.125 +2024-07-29 05:26:21,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=245576.0, ans=0.0 +2024-07-29 05:26:25,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=245576.0, ans=0.0 +2024-07-29 05:26:31,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=245589.33333333334, ans=0.125 +2024-07-29 05:26:35,621 INFO [train.py:1114] (2/4) Epoch 19, batch 200, loss[loss=0.2028, simple_loss=0.2948, pruned_loss=0.05536, over 4493.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2639, pruned_loss=0.04099, over 593246.38 frames. ], batch size: 21, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:26:42,102 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.445e+01 5.609e+01 6.216e+01 6.903e+01 1.039e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 05:26:49,607 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245629.33333333334, ans=0.1 +2024-07-29 05:26:57,946 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.94 vs. limit=22.5 +2024-07-29 05:27:05,290 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=245656.0, ans=0.2 +2024-07-29 05:27:10,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=245669.33333333334, ans=0.125 +2024-07-29 05:27:10,909 INFO [train.py:1114] (2/4) Epoch 19, batch 250, loss[loss=0.1969, simple_loss=0.2802, pruned_loss=0.05677, over 4624.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2657, pruned_loss=0.04134, over 670125.57 frames. ], batch size: 16, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:27:15,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=245669.33333333334, ans=0.0 +2024-07-29 05:27:19,416 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.81 vs. limit=15.0 +2024-07-29 05:27:29,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=245696.0, ans=0.0 +2024-07-29 05:27:31,319 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:27:35,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=245709.33333333334, ans=0.125 +2024-07-29 05:27:35,886 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=245709.33333333334, ans=0.0 +2024-07-29 05:27:43,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=245736.0, ans=10.0 +2024-07-29 05:27:44,384 INFO [train.py:1114] (2/4) Epoch 19, batch 300, loss[loss=0.1771, simple_loss=0.2763, pruned_loss=0.03901, over 4805.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2656, pruned_loss=0.04119, over 729748.98 frames. ], batch size: 15, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:27:45,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=245736.0, ans=0.125 +2024-07-29 05:27:51,015 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.436e+01 5.537e+01 6.057e+01 6.917e+01 1.022e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 05:27:52,630 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:27:55,200 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=245749.33333333334, ans=0.0 +2024-07-29 05:28:06,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=245776.0, ans=0.125 +2024-07-29 05:28:07,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=245776.0, ans=0.0 +2024-07-29 05:28:16,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=245789.33333333334, ans=0.07 +2024-07-29 05:28:17,796 INFO [train.py:1114] (2/4) Epoch 19, batch 350, loss[loss=0.1686, simple_loss=0.257, pruned_loss=0.04016, over 4932.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2658, pruned_loss=0.04131, over 776024.87 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:28:22,277 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.16 vs. limit=22.5 +2024-07-29 05:28:30,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=245829.33333333334, ans=0.125 +2024-07-29 05:28:35,199 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.46 vs. limit=12.0 +2024-07-29 05:28:40,094 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=245842.66666666666, ans=0.125 +2024-07-29 05:28:47,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=245856.0, ans=0.0 +2024-07-29 05:28:50,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=245856.0, ans=0.0 +2024-07-29 05:28:51,578 INFO [train.py:1114] (2/4) Epoch 19, batch 400, loss[loss=0.1551, simple_loss=0.2513, pruned_loss=0.02948, over 4693.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2638, pruned_loss=0.04039, over 813547.10 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:28:58,548 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.537e+01 5.341e+01 5.802e+01 6.594e+01 8.688e+01, threshold=1.160e+02, percent-clipped=0.0 +2024-07-29 05:29:06,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=245896.0, ans=0.125 +2024-07-29 05:29:06,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=245896.0, ans=0.125 +2024-07-29 05:29:15,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=245909.33333333334, ans=0.1 +2024-07-29 05:29:21,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=245922.66666666666, ans=0.0 +2024-07-29 05:29:23,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=245922.66666666666, ans=0.125 +2024-07-29 05:29:27,356 INFO [train.py:1114] (2/4) Epoch 19, batch 450, loss[loss=0.1587, simple_loss=0.257, pruned_loss=0.03017, over 4634.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2645, pruned_loss=0.0405, over 839031.57 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:29:31,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=245936.0, ans=0.0 +2024-07-29 05:29:32,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=245936.0, ans=0.0 +2024-07-29 05:29:32,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=245936.0, ans=0.0 +2024-07-29 05:29:33,033 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=245936.0, ans=0.0 +2024-07-29 05:29:37,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=245949.33333333334, ans=0.1 +2024-07-29 05:29:38,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=245949.33333333334, ans=0.2 +2024-07-29 05:29:41,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=245962.66666666666, ans=0.125 +2024-07-29 05:29:43,107 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=245962.66666666666, ans=0.0 +2024-07-29 05:29:43,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=245962.66666666666, ans=0.125 +2024-07-29 05:29:47,900 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.74 vs. limit=15.0 +2024-07-29 05:29:52,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=245976.0, ans=0.2 +2024-07-29 05:29:57,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=245989.33333333334, ans=0.0 +2024-07-29 05:29:58,556 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.01 vs. limit=15.0 +2024-07-29 05:30:00,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=245989.33333333334, ans=0.125 +2024-07-29 05:30:02,723 INFO [train.py:1114] (2/4) Epoch 19, batch 500, loss[loss=0.1621, simple_loss=0.2543, pruned_loss=0.03491, over 4689.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2636, pruned_loss=0.04078, over 861295.74 frames. ], batch size: 15, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:30:11,401 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.402e+01 5.590e+01 6.119e+01 6.735e+01 9.052e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 05:30:25,089 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=246042.66666666666, ans=0.0 +2024-07-29 05:30:27,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=246042.66666666666, ans=0.0 +2024-07-29 05:30:32,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=246056.0, ans=0.125 +2024-07-29 05:30:33,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=246056.0, ans=0.125 +2024-07-29 05:30:33,573 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=246056.0, ans=0.125 +2024-07-29 05:30:38,075 INFO [train.py:1114] (2/4) Epoch 19, batch 550, loss[loss=0.1495, simple_loss=0.2499, pruned_loss=0.02451, over 4635.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2637, pruned_loss=0.0407, over 877456.78 frames. ], batch size: 17, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:30:53,653 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:30:56,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=246096.0, ans=0.0 +2024-07-29 05:30:56,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=246096.0, ans=0.0 +2024-07-29 05:31:02,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=246109.33333333334, ans=0.0 +2024-07-29 05:31:05,212 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.31 vs. limit=15.0 +2024-07-29 05:31:06,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=246122.66666666666, ans=0.125 +2024-07-29 05:31:08,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=246122.66666666666, ans=0.0 +2024-07-29 05:31:11,635 INFO [train.py:1114] (2/4) Epoch 19, batch 600, loss[loss=0.185, simple_loss=0.2789, pruned_loss=0.04552, over 4632.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2628, pruned_loss=0.04051, over 892099.36 frames. ], batch size: 16, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:31:18,194 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.268e+01 5.519e+01 6.137e+01 7.010e+01 1.025e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 05:31:21,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=246149.33333333334, ans=0.125 +2024-07-29 05:31:34,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=246176.0, ans=0.125 +2024-07-29 05:31:35,383 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.25 vs. limit=15.0 +2024-07-29 05:31:39,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=246189.33333333334, ans=0.2 +2024-07-29 05:31:42,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=246189.33333333334, ans=0.125 +2024-07-29 05:31:44,598 INFO [train.py:1114] (2/4) Epoch 19, batch 650, loss[loss=0.179, simple_loss=0.2754, pruned_loss=0.04132, over 4760.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2623, pruned_loss=0.0402, over 904025.05 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:32:08,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=246242.66666666666, ans=0.07 +2024-07-29 05:32:08,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=246242.66666666666, ans=0.125 +2024-07-29 05:32:17,385 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=246256.0, ans=0.125 +2024-07-29 05:32:18,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=246269.33333333334, ans=0.125 +2024-07-29 05:32:18,688 INFO [train.py:1114] (2/4) Epoch 19, batch 700, loss[loss=0.1434, simple_loss=0.2396, pruned_loss=0.02362, over 4646.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2625, pruned_loss=0.04049, over 911772.75 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:32:22,539 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.34 vs. limit=12.0 +2024-07-29 05:32:25,373 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.359e+01 5.672e+01 6.319e+01 7.208e+01 1.301e+02, threshold=1.264e+02, percent-clipped=1.0 +2024-07-29 05:32:50,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=246322.66666666666, ans=0.125 +2024-07-29 05:32:54,401 INFO [train.py:1114] (2/4) Epoch 19, batch 750, loss[loss=0.1601, simple_loss=0.2649, pruned_loss=0.02765, over 4694.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2623, pruned_loss=0.04011, over 918104.84 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:33:01,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=246349.33333333334, ans=0.0 +2024-07-29 05:33:01,334 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.32 vs. limit=15.0 +2024-07-29 05:33:06,813 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.98 vs. limit=15.0 +2024-07-29 05:33:12,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=246362.66666666666, ans=0.125 +2024-07-29 05:33:14,292 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.22 vs. limit=15.0 +2024-07-29 05:33:28,105 INFO [train.py:1114] (2/4) Epoch 19, batch 800, loss[loss=0.1442, simple_loss=0.2353, pruned_loss=0.02653, over 4861.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2626, pruned_loss=0.04031, over 922653.39 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:33:30,226 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246402.66666666666, ans=0.1 +2024-07-29 05:33:34,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=246416.0, ans=0.0 +2024-07-29 05:33:34,599 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.732e+01 5.665e+01 6.243e+01 7.363e+01 1.175e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 05:33:50,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=246442.66666666666, ans=0.0 +2024-07-29 05:34:01,570 INFO [train.py:1114] (2/4) Epoch 19, batch 850, loss[loss=0.1806, simple_loss=0.2813, pruned_loss=0.03994, over 4653.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2621, pruned_loss=0.04019, over 926967.38 frames. ], batch size: 14, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:34:02,075 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-07-29 05:34:16,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=246496.0, ans=0.0 +2024-07-29 05:34:17,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=246496.0, ans=0.025 +2024-07-29 05:34:24,296 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=246509.33333333334, ans=0.125 +2024-07-29 05:34:27,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=246509.33333333334, ans=0.125 +2024-07-29 05:34:27,079 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=246509.33333333334, ans=0.2 +2024-07-29 05:34:27,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=246522.66666666666, ans=0.125 +2024-07-29 05:34:27,800 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=246522.66666666666, ans=0.025 +2024-07-29 05:34:34,806 INFO [train.py:1114] (2/4) Epoch 19, batch 900, loss[loss=0.1378, simple_loss=0.2295, pruned_loss=0.02307, over 4849.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2621, pruned_loss=0.04046, over 928539.36 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:34:41,426 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+01 5.690e+01 6.264e+01 7.142e+01 9.700e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 05:34:48,860 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=246549.33333333334, ans=0.125 +2024-07-29 05:34:55,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=246562.66666666666, ans=0.09899494936611666 +2024-07-29 05:35:10,548 INFO [train.py:1114] (2/4) Epoch 19, batch 950, loss[loss=0.1609, simple_loss=0.2465, pruned_loss=0.03763, over 4783.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2632, pruned_loss=0.04086, over 930427.90 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:35:14,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=246602.66666666666, ans=0.125 +2024-07-29 05:35:17,537 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.91 vs. limit=12.0 +2024-07-29 05:35:28,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=246629.33333333334, ans=0.125 +2024-07-29 05:35:37,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=246642.66666666666, ans=0.0 +2024-07-29 05:35:48,367 INFO [train.py:1114] (2/4) Epoch 19, batch 1000, loss[loss=0.1452, simple_loss=0.2405, pruned_loss=0.02493, over 4961.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2638, pruned_loss=0.04089, over 930287.28 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:35:54,967 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.458e+01 5.789e+01 6.385e+01 7.432e+01 1.004e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 05:35:55,776 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=246682.66666666666, ans=0.0 +2024-07-29 05:35:55,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=246682.66666666666, ans=0.025 +2024-07-29 05:35:59,324 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.33 vs. limit=10.0 +2024-07-29 05:36:06,551 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.20 vs. limit=15.0 +2024-07-29 05:36:10,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=246709.33333333334, ans=0.5 +2024-07-29 05:36:17,810 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:36:21,798 INFO [train.py:1114] (2/4) Epoch 19, batch 1050, loss[loss=0.1828, simple_loss=0.2716, pruned_loss=0.04706, over 4874.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2629, pruned_loss=0.04072, over 932144.30 frames. ], batch size: 14, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:36:23,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=246736.0, ans=0.0 +2024-07-29 05:36:25,008 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.30 vs. limit=15.0 +2024-07-29 05:36:25,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246736.0, ans=0.1 +2024-07-29 05:36:40,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=246762.66666666666, ans=0.125 +2024-07-29 05:36:44,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246776.0, ans=0.1 +2024-07-29 05:36:55,280 INFO [train.py:1114] (2/4) Epoch 19, batch 1100, loss[loss=0.1735, simple_loss=0.2655, pruned_loss=0.04073, over 4909.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2635, pruned_loss=0.04107, over 934580.94 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:37:01,943 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.394e+01 5.545e+01 5.987e+01 6.620e+01 9.087e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-29 05:37:06,293 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.48 vs. limit=22.5 +2024-07-29 05:37:11,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246829.33333333334, ans=0.1 +2024-07-29 05:37:16,737 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.37 vs. limit=15.0 +2024-07-29 05:37:22,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=246856.0, ans=0.2 +2024-07-29 05:37:28,456 INFO [train.py:1114] (2/4) Epoch 19, batch 1150, loss[loss=0.1751, simple_loss=0.2774, pruned_loss=0.03641, over 4895.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.264, pruned_loss=0.04107, over 934045.49 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:37:46,691 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.78 vs. limit=12.0 +2024-07-29 05:37:51,573 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.42 vs. limit=10.0 +2024-07-29 05:37:56,141 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=246922.66666666666, ans=0.125 +2024-07-29 05:38:07,866 INFO [train.py:1114] (2/4) Epoch 19, batch 1200, loss[loss=0.16, simple_loss=0.2535, pruned_loss=0.03329, over 4872.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2646, pruned_loss=0.04108, over 932754.63 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:38:14,576 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 5.835e+01 6.415e+01 7.072e+01 9.087e+01, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 05:38:29,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=246976.0, ans=0.125 +2024-07-29 05:38:29,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=246976.0, ans=0.0 +2024-07-29 05:38:30,706 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.71 vs. limit=12.0 +2024-07-29 05:38:31,716 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=246976.0, ans=0.0 +2024-07-29 05:38:40,824 INFO [train.py:1114] (2/4) Epoch 19, batch 1250, loss[loss=0.1833, simple_loss=0.2754, pruned_loss=0.0456, over 4785.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2644, pruned_loss=0.04058, over 936830.18 frames. ], batch size: 15, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:38:49,855 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.70 vs. limit=15.0 +2024-07-29 05:38:55,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=247029.33333333334, ans=0.125 +2024-07-29 05:38:55,586 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:38:56,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=247029.33333333334, ans=0.125 +2024-07-29 05:39:12,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=247056.0, ans=0.0 +2024-07-29 05:39:14,089 INFO [train.py:1114] (2/4) Epoch 19, batch 1300, loss[loss=0.2028, simple_loss=0.2941, pruned_loss=0.05576, over 4683.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.264, pruned_loss=0.04087, over 938068.78 frames. ], batch size: 19, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:39:21,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247069.33333333334, ans=0.1 +2024-07-29 05:39:21,827 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.39 vs. limit=15.0 +2024-07-29 05:39:22,335 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=247069.33333333334, ans=0.125 +2024-07-29 05:39:25,986 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.840e+01 5.578e+01 5.975e+01 6.963e+01 1.137e+02, threshold=1.195e+02, percent-clipped=0.0 +2024-07-29 05:39:32,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=247082.66666666666, ans=0.0 +2024-07-29 05:39:37,041 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.84 vs. limit=22.5 +2024-07-29 05:39:38,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=247096.0, ans=0.0 +2024-07-29 05:39:39,091 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.33 vs. limit=15.0 +2024-07-29 05:39:44,425 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.33 vs. limit=15.0 +2024-07-29 05:39:45,875 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.71 vs. limit=15.0 +2024-07-29 05:39:51,533 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=247122.66666666666, ans=0.0 +2024-07-29 05:39:51,601 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:39:56,774 INFO [train.py:1114] (2/4) Epoch 19, batch 1350, loss[loss=0.1443, simple_loss=0.2317, pruned_loss=0.02845, over 4759.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2634, pruned_loss=0.04038, over 940191.64 frames. ], batch size: 13, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:39:57,103 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.62 vs. limit=6.0 +2024-07-29 05:39:59,049 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.00 vs. limit=15.0 +2024-07-29 05:40:00,320 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.27 vs. limit=15.0 +2024-07-29 05:40:08,293 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.78 vs. limit=15.0 +2024-07-29 05:40:18,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=247176.0, ans=0.04949747468305833 +2024-07-29 05:40:32,037 INFO [train.py:1114] (2/4) Epoch 19, batch 1400, loss[loss=0.1532, simple_loss=0.2358, pruned_loss=0.0353, over 4728.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2632, pruned_loss=0.04027, over 941905.06 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:40:34,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=247202.66666666666, ans=0.0 +2024-07-29 05:40:37,016 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=247202.66666666666, ans=0.0 +2024-07-29 05:40:38,801 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.545e+01 5.620e+01 6.318e+01 7.023e+01 1.312e+02, threshold=1.264e+02, percent-clipped=1.0 +2024-07-29 05:40:40,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=247216.0, ans=0.2 +2024-07-29 05:40:41,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=247216.0, ans=0.2 +2024-07-29 05:40:56,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=247242.66666666666, ans=0.025 +2024-07-29 05:40:59,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=247242.66666666666, ans=0.125 +2024-07-29 05:40:59,508 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.28 vs. limit=15.0 +2024-07-29 05:41:02,281 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.21 vs. limit=22.5 +2024-07-29 05:41:02,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=247256.0, ans=0.0 +2024-07-29 05:41:03,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=247256.0, ans=0.125 +2024-07-29 05:41:07,799 INFO [train.py:1114] (2/4) Epoch 19, batch 1450, loss[loss=0.1685, simple_loss=0.2652, pruned_loss=0.0359, over 4683.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2634, pruned_loss=0.04031, over 942314.72 frames. ], batch size: 15, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:41:09,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=247269.33333333334, ans=0.0 +2024-07-29 05:41:29,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=247309.33333333334, ans=0.0 +2024-07-29 05:41:32,438 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=247309.33333333334, ans=0.0 +2024-07-29 05:41:42,913 INFO [train.py:1114] (2/4) Epoch 19, batch 1500, loss[loss=0.1663, simple_loss=0.274, pruned_loss=0.02928, over 4804.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2637, pruned_loss=0.04045, over 942014.03 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:41:49,698 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.122e+01 5.560e+01 6.078e+01 6.890e+01 1.039e+02, threshold=1.216e+02, percent-clipped=0.0 +2024-07-29 05:41:53,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=247349.33333333334, ans=0.125 +2024-07-29 05:42:12,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=247389.33333333334, ans=0.125 +2024-07-29 05:42:15,166 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.31 vs. limit=22.5 +2024-07-29 05:42:16,786 INFO [train.py:1114] (2/4) Epoch 19, batch 1550, loss[loss=0.1709, simple_loss=0.2766, pruned_loss=0.03257, over 4894.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2638, pruned_loss=0.04059, over 938242.59 frames. ], batch size: 15, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:42:18,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=247402.66666666666, ans=0.0 +2024-07-29 05:42:20,171 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=247402.66666666666, ans=0.025 +2024-07-29 05:42:24,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=247416.0, ans=0.0 +2024-07-29 05:42:31,550 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.04 vs. limit=12.0 +2024-07-29 05:42:32,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=247429.33333333334, ans=0.0 +2024-07-29 05:42:33,466 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247429.33333333334, ans=0.1 +2024-07-29 05:42:50,246 INFO [train.py:1114] (2/4) Epoch 19, batch 1600, loss[loss=0.1823, simple_loss=0.2703, pruned_loss=0.04714, over 4872.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2628, pruned_loss=0.04048, over 936687.99 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:42:53,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=247469.33333333334, ans=0.125 +2024-07-29 05:42:54,230 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=247469.33333333334, ans=0.125 +2024-07-29 05:42:57,705 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=247482.66666666666, ans=0.125 +2024-07-29 05:42:58,147 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.792e+01 5.562e+01 6.323e+01 7.561e+01 1.065e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 05:42:58,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=247482.66666666666, ans=0.125 +2024-07-29 05:43:11,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=247509.33333333334, ans=0.2 +2024-07-29 05:43:24,345 INFO [train.py:1114] (2/4) Epoch 19, batch 1650, loss[loss=0.1647, simple_loss=0.2668, pruned_loss=0.03129, over 4668.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2627, pruned_loss=0.0401, over 936821.55 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:43:25,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=247536.0, ans=0.125 +2024-07-29 05:43:42,739 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247562.66666666666, ans=0.1 +2024-07-29 05:43:47,980 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=247576.0, ans=0.0 +2024-07-29 05:43:50,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=247576.0, ans=0.0 +2024-07-29 05:43:51,396 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=247576.0, ans=0.125 +2024-07-29 05:43:53,693 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.36 vs. limit=5.0 +2024-07-29 05:43:59,823 INFO [train.py:1114] (2/4) Epoch 19, batch 1700, loss[loss=0.1462, simple_loss=0.2307, pruned_loss=0.03084, over 4704.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2621, pruned_loss=0.03963, over 938594.54 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:44:04,848 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.84 vs. limit=22.5 +2024-07-29 05:44:08,238 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.612e+01 6.497e+01 7.246e+01 1.413e+02, threshold=1.299e+02, percent-clipped=1.0 +2024-07-29 05:44:11,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=247616.0, ans=0.0 +2024-07-29 05:44:18,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=247629.33333333334, ans=0.125 +2024-07-29 05:44:22,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247642.66666666666, ans=0.1 +2024-07-29 05:44:22,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=247642.66666666666, ans=0.025 +2024-07-29 05:44:24,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=247642.66666666666, ans=0.0 +2024-07-29 05:44:33,218 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.42 vs. limit=15.0 +2024-07-29 05:44:34,125 INFO [train.py:1114] (2/4) Epoch 19, batch 1750, loss[loss=0.1646, simple_loss=0.2469, pruned_loss=0.04111, over 4801.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.263, pruned_loss=0.04005, over 939998.03 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:44:46,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=247682.66666666666, ans=0.125 +2024-07-29 05:45:10,874 INFO [train.py:1114] (2/4) Epoch 19, batch 1800, loss[loss=0.199, simple_loss=0.2933, pruned_loss=0.05234, over 4641.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.264, pruned_loss=0.04075, over 940609.71 frames. ], batch size: 13, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:45:18,175 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.396e+01 5.835e+01 6.491e+01 8.060e+01 1.072e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-29 05:45:29,331 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-07-29 05:45:56,341 INFO [train.py:1114] (2/4) Epoch 19, batch 1850, loss[loss=0.185, simple_loss=0.282, pruned_loss=0.04397, over 4812.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2639, pruned_loss=0.0404, over 940727.50 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:45:59,845 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=247802.66666666666, ans=0.125 +2024-07-29 05:46:16,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=247829.33333333334, ans=0.0 +2024-07-29 05:46:21,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=247842.66666666666, ans=0.125 +2024-07-29 05:46:34,831 INFO [train.py:1114] (2/4) Epoch 19, batch 1900, loss[loss=0.1731, simple_loss=0.2734, pruned_loss=0.03645, over 4665.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2644, pruned_loss=0.0406, over 941895.72 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:46:37,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.whiten.whitening_limit, batch_count=247869.33333333334, ans=12.0 +2024-07-29 05:46:43,051 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.675e+01 6.450e+01 7.490e+01 1.080e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 05:46:49,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=247896.0, ans=0.125 +2024-07-29 05:46:49,626 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.72 vs. limit=15.0 +2024-07-29 05:46:53,242 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=247896.0, ans=0.125 +2024-07-29 05:47:05,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=247922.66666666666, ans=22.5 +2024-07-29 05:47:11,506 INFO [train.py:1114] (2/4) Epoch 19, batch 1950, loss[loss=0.1945, simple_loss=0.2871, pruned_loss=0.05093, over 4900.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2652, pruned_loss=0.0408, over 943803.53 frames. ], batch size: 13, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:47:13,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=247936.0, ans=0.2 +2024-07-29 05:47:20,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=247949.33333333334, ans=0.125 +2024-07-29 05:47:49,625 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=247989.33333333334, ans=0.025 +2024-07-29 05:47:57,262 INFO [train.py:1114] (2/4) Epoch 19, batch 2000, loss[loss=0.1703, simple_loss=0.2513, pruned_loss=0.04467, over 4809.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2653, pruned_loss=0.04093, over 941060.25 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:47:58,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=248002.66666666666, ans=0.125 +2024-07-29 05:48:03,113 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.44 vs. limit=15.0 +2024-07-29 05:48:04,229 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=248016.0, ans=0.125 +2024-07-29 05:48:04,709 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.516e+01 5.566e+01 6.044e+01 6.728e+01 1.044e+02, threshold=1.209e+02, percent-clipped=0.0 +2024-07-29 05:48:04,851 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:48:08,924 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:48:17,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=248042.66666666666, ans=0.125 +2024-07-29 05:48:18,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=248042.66666666666, ans=0.0 +2024-07-29 05:48:25,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=248056.0, ans=0.09899494936611666 +2024-07-29 05:48:25,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=248056.0, ans=0.125 +2024-07-29 05:48:26,034 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:48:31,045 INFO [train.py:1114] (2/4) Epoch 19, batch 2050, loss[loss=0.1594, simple_loss=0.2439, pruned_loss=0.0375, over 4618.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2646, pruned_loss=0.04079, over 938858.00 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:48:54,198 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.32 vs. limit=15.0 +2024-07-29 05:48:54,554 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=248109.33333333334, ans=0.125 +2024-07-29 05:49:07,023 INFO [train.py:1114] (2/4) Epoch 19, batch 2100, loss[loss=0.1653, simple_loss=0.2608, pruned_loss=0.03494, over 4758.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2634, pruned_loss=0.04027, over 941091.99 frames. ], batch size: 13, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:49:14,318 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.765e+01 5.813e+01 6.323e+01 7.221e+01 1.090e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 05:49:15,176 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248149.33333333334, ans=0.1 +2024-07-29 05:49:15,389 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.08 vs. limit=15.0 +2024-07-29 05:49:16,474 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=248149.33333333334, ans=0.0 +2024-07-29 05:49:17,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=248149.33333333334, ans=0.0 +2024-07-29 05:49:21,154 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=248162.66666666666, ans=0.0 +2024-07-29 05:49:21,393 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.69 vs. limit=15.0 +2024-07-29 05:49:23,142 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=248162.66666666666, ans=0.2 +2024-07-29 05:49:29,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=248176.0, ans=0.125 +2024-07-29 05:49:29,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=248176.0, ans=0.0 +2024-07-29 05:49:33,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=248189.33333333334, ans=0.125 +2024-07-29 05:49:37,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=248189.33333333334, ans=0.0 +2024-07-29 05:49:39,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=248202.66666666666, ans=0.125 +2024-07-29 05:49:40,180 INFO [train.py:1114] (2/4) Epoch 19, batch 2150, loss[loss=0.172, simple_loss=0.2619, pruned_loss=0.04108, over 4903.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2631, pruned_loss=0.04029, over 944243.43 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:49:47,503 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:49:51,172 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.38 vs. limit=6.0 +2024-07-29 05:50:08,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=248242.66666666666, ans=0.125 +2024-07-29 05:50:19,691 INFO [train.py:1114] (2/4) Epoch 19, batch 2200, loss[loss=0.175, simple_loss=0.2666, pruned_loss=0.04167, over 4800.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2624, pruned_loss=0.03979, over 943449.38 frames. ], batch size: 14, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:50:19,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=248269.33333333334, ans=0.2 +2024-07-29 05:50:27,108 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 5.574e+01 6.118e+01 6.873e+01 9.817e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 05:50:33,326 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=248296.0, ans=0.125 +2024-07-29 05:51:15,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=248309.33333333334, ans=0.125 +2024-07-29 05:51:26,252 INFO [train.py:1114] (2/4) Epoch 19, batch 2250, loss[loss=0.2128, simple_loss=0.3048, pruned_loss=0.06036, over 4694.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2621, pruned_loss=0.04013, over 941951.89 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:51:27,195 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=248336.0, ans=0.07 +2024-07-29 05:51:28,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248336.0, ans=0.1 +2024-07-29 05:51:45,480 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248362.66666666666, ans=0.1 +2024-07-29 05:51:49,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=248376.0, ans=0.125 +2024-07-29 05:51:50,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=248376.0, ans=0.025 +2024-07-29 05:52:00,493 INFO [train.py:1114] (2/4) Epoch 19, batch 2300, loss[loss=0.1736, simple_loss=0.2691, pruned_loss=0.03903, over 4943.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2619, pruned_loss=0.04032, over 939250.06 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:52:09,138 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.821e+01 6.321e+01 7.286e+01 1.025e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-29 05:52:11,387 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=248416.0, ans=0.0 +2024-07-29 05:52:35,423 INFO [train.py:1114] (2/4) Epoch 19, batch 2350, loss[loss=0.1851, simple_loss=0.2835, pruned_loss=0.04337, over 4644.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2617, pruned_loss=0.03976, over 941579.53 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:52:36,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=248469.33333333334, ans=0.0 +2024-07-29 05:52:40,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=248469.33333333334, ans=0.125 +2024-07-29 05:52:42,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=248469.33333333334, ans=0.125 +2024-07-29 05:52:42,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=248469.33333333334, ans=0.0 +2024-07-29 05:52:52,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=248496.0, ans=0.125 +2024-07-29 05:53:04,204 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=248509.33333333334, ans=0.125 +2024-07-29 05:53:10,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=248522.66666666666, ans=0.125 +2024-07-29 05:53:12,741 INFO [train.py:1114] (2/4) Epoch 19, batch 2400, loss[loss=0.1759, simple_loss=0.264, pruned_loss=0.04387, over 4643.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2623, pruned_loss=0.04012, over 941202.75 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:53:14,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248536.0, ans=0.1 +2024-07-29 05:53:15,129 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=26.43 vs. limit=22.5 +2024-07-29 05:53:21,904 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.538e+01 5.999e+01 6.676e+01 9.357e+01, threshold=1.200e+02, percent-clipped=0.0 +2024-07-29 05:53:32,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248562.66666666666, ans=0.1 +2024-07-29 05:53:40,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=248589.33333333334, ans=0.0 +2024-07-29 05:53:48,248 INFO [train.py:1114] (2/4) Epoch 19, batch 2450, loss[loss=0.1895, simple_loss=0.2956, pruned_loss=0.0417, over 4692.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2638, pruned_loss=0.04078, over 937330.82 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:54:03,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=248629.33333333334, ans=0.0 +2024-07-29 05:54:15,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=248656.0, ans=0.125 +2024-07-29 05:54:21,389 INFO [train.py:1114] (2/4) Epoch 19, batch 2500, loss[loss=0.1722, simple_loss=0.2724, pruned_loss=0.03596, over 4811.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2634, pruned_loss=0.04063, over 939558.98 frames. ], batch size: 14, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:54:22,599 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.82 vs. limit=15.0 +2024-07-29 05:54:28,637 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.354e+01 5.769e+01 6.395e+01 7.394e+01 1.044e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-29 05:54:28,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=248682.66666666666, ans=0.125 +2024-07-29 05:54:32,981 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.00 vs. limit=6.0 +2024-07-29 05:54:43,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=248709.33333333334, ans=0.125 +2024-07-29 05:54:54,729 INFO [train.py:1114] (2/4) Epoch 19, batch 2550, loss[loss=0.1499, simple_loss=0.2423, pruned_loss=0.02873, over 4809.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2628, pruned_loss=0.04051, over 939093.13 frames. ], batch size: 11, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:55:03,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=248749.33333333334, ans=0.0 +2024-07-29 05:55:11,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=248762.66666666666, ans=0.2 +2024-07-29 05:55:13,856 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.79 vs. limit=15.0 +2024-07-29 05:55:18,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=248776.0, ans=0.0 +2024-07-29 05:55:19,554 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.08 vs. limit=15.0 +2024-07-29 05:55:24,950 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.45 vs. limit=6.0 +2024-07-29 05:55:28,618 INFO [train.py:1114] (2/4) Epoch 19, batch 2600, loss[loss=0.1732, simple_loss=0.2672, pruned_loss=0.03967, over 4891.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2638, pruned_loss=0.04087, over 937667.13 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:55:35,908 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.520e+01 6.096e+01 6.841e+01 9.069e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 05:55:43,401 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:55:56,023 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.05 vs. limit=15.0 +2024-07-29 05:55:58,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=248856.0, ans=0.125 +2024-07-29 05:55:58,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=248856.0, ans=0.125 +2024-07-29 05:55:58,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=248856.0, ans=0.0 +2024-07-29 05:56:03,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=248869.33333333334, ans=0.125 +2024-07-29 05:56:03,694 INFO [train.py:1114] (2/4) Epoch 19, batch 2650, loss[loss=0.1837, simple_loss=0.2806, pruned_loss=0.04339, over 4665.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2644, pruned_loss=0.04099, over 939973.38 frames. ], batch size: 16, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:56:07,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=248869.33333333334, ans=0.1 +2024-07-29 05:56:26,702 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=248909.33333333334, ans=0.125 +2024-07-29 05:56:42,263 INFO [train.py:1114] (2/4) Epoch 19, batch 2700, loss[loss=0.1737, simple_loss=0.267, pruned_loss=0.04015, over 4743.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2652, pruned_loss=0.04146, over 939491.44 frames. ], batch size: 14, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:57:09,490 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.838e+01 6.361e+01 7.244e+01 1.025e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-29 05:57:19,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=248962.66666666666, ans=0.95 +2024-07-29 05:57:35,854 INFO [train.py:1114] (2/4) Epoch 19, batch 2750, loss[loss=0.1893, simple_loss=0.2783, pruned_loss=0.05018, over 4697.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2639, pruned_loss=0.04123, over 939411.10 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 16.0 +2024-07-29 05:57:36,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=249002.66666666666, ans=0.05 +2024-07-29 05:57:43,836 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=249016.0, ans=0.0 +2024-07-29 05:57:47,385 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.16 vs. limit=15.0 +2024-07-29 05:57:57,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=249042.66666666666, ans=0.125 +2024-07-29 05:58:01,411 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=249042.66666666666, ans=0.125 +2024-07-29 05:58:01,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249042.66666666666, ans=0.1 +2024-07-29 05:58:01,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=249042.66666666666, ans=0.2 +2024-07-29 05:58:06,168 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=249056.0, ans=0.2 +2024-07-29 05:58:09,575 INFO [train.py:1114] (2/4) Epoch 19, batch 2800, loss[loss=0.2207, simple_loss=0.2857, pruned_loss=0.07788, over 3508.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2642, pruned_loss=0.04103, over 937333.34 frames. ], batch size: 35, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:58:15,326 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.80 vs. limit=22.5 +2024-07-29 05:58:17,634 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.437e+01 5.861e+01 6.601e+01 8.054e+01 1.135e+02, threshold=1.320e+02, percent-clipped=0.0 +2024-07-29 05:58:21,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=249082.66666666666, ans=0.125 +2024-07-29 05:58:33,337 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=249109.33333333334, ans=0.2 +2024-07-29 05:58:34,271 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.81 vs. limit=15.0 +2024-07-29 05:58:44,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=249122.66666666666, ans=0.125 +2024-07-29 05:58:47,376 INFO [train.py:1114] (2/4) Epoch 19, batch 2850, loss[loss=0.1811, simple_loss=0.2602, pruned_loss=0.05101, over 4957.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2645, pruned_loss=0.04127, over 935419.84 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:58:51,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=249136.0, ans=0.5 +2024-07-29 05:59:14,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=249176.0, ans=0.0 +2024-07-29 05:59:22,026 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.63 vs. limit=15.0 +2024-07-29 05:59:22,115 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.74 vs. limit=10.0 +2024-07-29 05:59:22,306 INFO [train.py:1114] (2/4) Epoch 19, batch 2900, loss[loss=0.1872, simple_loss=0.2715, pruned_loss=0.05147, over 4833.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2649, pruned_loss=0.04073, over 939247.43 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:59:25,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=249202.66666666666, ans=0.09899494936611666 +2024-07-29 05:59:30,345 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.673e+01 5.762e+01 6.380e+01 7.309e+01 1.230e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 05:59:38,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=249229.33333333334, ans=0.0 +2024-07-29 05:59:53,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=249256.0, ans=0.1 +2024-07-29 05:59:55,833 INFO [train.py:1114] (2/4) Epoch 19, batch 2950, loss[loss=0.1451, simple_loss=0.2379, pruned_loss=0.02614, over 4717.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2632, pruned_loss=0.04061, over 938613.09 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 16.0 +2024-07-29 05:59:58,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=249269.33333333334, ans=0.125 +2024-07-29 06:00:00,915 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.12 vs. limit=15.0 +2024-07-29 06:00:02,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=249282.66666666666, ans=0.2 +2024-07-29 06:00:11,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=249296.0, ans=0.125 +2024-07-29 06:00:15,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=249296.0, ans=0.2 +2024-07-29 06:00:22,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=249322.66666666666, ans=0.125 +2024-07-29 06:00:25,169 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.11 vs. limit=12.0 +2024-07-29 06:00:29,670 INFO [train.py:1114] (2/4) Epoch 19, batch 3000, loss[loss=0.1631, simple_loss=0.2592, pruned_loss=0.03347, over 4754.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2631, pruned_loss=0.04058, over 938204.51 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 16.0 +2024-07-29 06:00:29,670 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 06:00:35,099 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.5847, 3.8718, 3.8331, 4.3713], device='cuda:2') +2024-07-29 06:00:41,088 INFO [train.py:1146] (2/4) Epoch 19, validation: loss=0.161, simple_loss=0.2631, pruned_loss=0.02943, over 944034.00 frames. +2024-07-29 06:00:41,088 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 06:00:50,061 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.473e+01 5.637e+01 6.118e+01 7.161e+01 1.064e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 06:00:53,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=249349.33333333334, ans=0.125 +2024-07-29 06:00:53,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=249349.33333333334, ans=0.2 +2024-07-29 06:00:55,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=249362.66666666666, ans=0.125 +2024-07-29 06:01:06,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=249376.0, ans=0.95 +2024-07-29 06:01:15,363 INFO [train.py:1114] (2/4) Epoch 19, batch 3050, loss[loss=0.1781, simple_loss=0.265, pruned_loss=0.04564, over 4638.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2643, pruned_loss=0.04082, over 937137.72 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 16.0 +2024-07-29 06:01:19,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=249402.66666666666, ans=0.07 +2024-07-29 06:01:23,954 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=249416.0, ans=0.125 +2024-07-29 06:01:34,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=249429.33333333334, ans=0.0 +2024-07-29 06:01:41,227 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=249442.66666666666, ans=0.2 +2024-07-29 06:01:41,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=249442.66666666666, ans=0.125 +2024-07-29 06:01:47,652 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.77 vs. limit=15.0 +2024-07-29 06:01:51,099 INFO [train.py:1114] (2/4) Epoch 19, batch 3100, loss[loss=0.2115, simple_loss=0.2821, pruned_loss=0.0704, over 4649.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2636, pruned_loss=0.04101, over 937727.27 frames. ], batch size: 16, lr: 3.93e-03, grad_scale: 16.0 +2024-07-29 06:01:55,129 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:01:59,699 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.648e+01 5.499e+01 6.213e+01 7.046e+01 1.053e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 06:02:06,268 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.65 vs. limit=15.0 +2024-07-29 06:02:10,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.25 vs. limit=12.0 +2024-07-29 06:02:13,051 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.13 vs. limit=15.0 +2024-07-29 06:02:20,075 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=249522.66666666666, ans=0.2 +2024-07-29 06:02:24,712 INFO [train.py:1114] (2/4) Epoch 19, batch 3150, loss[loss=0.2003, simple_loss=0.2967, pruned_loss=0.05192, over 4837.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2643, pruned_loss=0.04086, over 938545.91 frames. ], batch size: 18, lr: 3.93e-03, grad_scale: 16.0 +2024-07-29 06:02:29,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=249536.0, ans=0.125 +2024-07-29 06:02:29,557 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=249536.0, ans=0.2 +2024-07-29 06:02:32,900 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=249549.33333333334, ans=0.0 +2024-07-29 06:02:35,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=249549.33333333334, ans=0.125 +2024-07-29 06:02:38,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=249562.66666666666, ans=0.09899494936611666 +2024-07-29 06:02:41,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249562.66666666666, ans=0.1 +2024-07-29 06:03:01,861 INFO [train.py:1114] (2/4) Epoch 19, batch 3200, loss[loss=0.1913, simple_loss=0.2715, pruned_loss=0.05552, over 4829.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2644, pruned_loss=0.04046, over 939872.67 frames. ], batch size: 13, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:03:04,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=249602.66666666666, ans=0.125 +2024-07-29 06:03:06,522 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=249602.66666666666, ans=0.0 +2024-07-29 06:03:08,126 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.32 vs. limit=15.0 +2024-07-29 06:03:10,263 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.865e+01 5.924e+01 6.807e+01 8.203e+01 1.254e+02, threshold=1.361e+02, percent-clipped=1.0 +2024-07-29 06:03:19,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=249616.0, ans=0.125 +2024-07-29 06:03:37,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=249642.66666666666, ans=0.2 +2024-07-29 06:03:42,174 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.19 vs. limit=22.5 +2024-07-29 06:03:42,708 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=249656.0, ans=0.125 +2024-07-29 06:03:45,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=249656.0, ans=0.5 +2024-07-29 06:03:48,471 INFO [train.py:1114] (2/4) Epoch 19, batch 3250, loss[loss=0.1964, simple_loss=0.2876, pruned_loss=0.05259, over 4934.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2645, pruned_loss=0.04036, over 941162.76 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:03:54,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=249669.33333333334, ans=0.125 +2024-07-29 06:04:05,534 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.13 vs. limit=15.0 +2024-07-29 06:04:07,108 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=249696.0, ans=0.125 +2024-07-29 06:06:10,233 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=249709.33333333334, ans=0.125 +2024-07-29 06:07:11,331 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.04 vs. limit=15.0 +2024-07-29 06:07:13,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=249709.33333333334, ans=0.125 +2024-07-29 06:07:14,120 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.47 vs. limit=6.0 +2024-07-29 06:07:20,273 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.06 vs. limit=15.0 +2024-07-29 06:07:20,699 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=249736.0, ans=0.125 +2024-07-29 06:07:21,176 INFO [train.py:1114] (2/4) Epoch 19, batch 3300, loss[loss=0.1812, simple_loss=0.2621, pruned_loss=0.05015, over 4695.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2634, pruned_loss=0.0406, over 941095.36 frames. ], batch size: 19, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:07:24,558 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.61 vs. limit=15.0 +2024-07-29 06:07:38,748 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.082e+01 5.786e+01 6.492e+01 7.177e+01 1.036e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-29 06:07:40,347 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=249749.33333333334, ans=0.0 +2024-07-29 06:07:51,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=249776.0, ans=0.09899494936611666 +2024-07-29 06:07:51,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=249776.0, ans=0.125 +2024-07-29 06:08:07,271 INFO [train.py:1114] (2/4) Epoch 19, batch 3350, loss[loss=0.15, simple_loss=0.2449, pruned_loss=0.02754, over 4597.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2643, pruned_loss=0.04106, over 939413.67 frames. ], batch size: 17, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:08:10,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249802.66666666666, ans=0.1 +2024-07-29 06:08:14,211 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=249816.0, ans=0.2 +2024-07-29 06:08:26,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=249829.33333333334, ans=0.0 +2024-07-29 06:08:31,153 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=249842.66666666666, ans=0.125 +2024-07-29 06:08:33,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=249842.66666666666, ans=0.025 +2024-07-29 06:08:35,974 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.79 vs. limit=6.0 +2024-07-29 06:08:40,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=249856.0, ans=15.0 +2024-07-29 06:08:41,267 INFO [train.py:1114] (2/4) Epoch 19, batch 3400, loss[loss=0.154, simple_loss=0.2443, pruned_loss=0.03187, over 4806.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.265, pruned_loss=0.0416, over 937893.17 frames. ], batch size: 11, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:08:49,843 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.563e+01 5.488e+01 5.998e+01 6.910e+01 1.087e+02, threshold=1.200e+02, percent-clipped=0.0 +2024-07-29 06:08:54,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=249896.0, ans=0.0 +2024-07-29 06:09:15,460 INFO [train.py:1114] (2/4) Epoch 19, batch 3450, loss[loss=0.1825, simple_loss=0.27, pruned_loss=0.0475, over 4713.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.266, pruned_loss=0.04187, over 938035.02 frames. ], batch size: 19, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:09:19,546 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=249936.0, ans=0.2 +2024-07-29 06:09:21,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=249949.33333333334, ans=0.125 +2024-07-29 06:09:33,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=249962.66666666666, ans=0.0 +2024-07-29 06:09:36,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=249976.0, ans=0.0 +2024-07-29 06:09:39,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=249976.0, ans=0.125 +2024-07-29 06:09:39,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=249976.0, ans=0.0 +2024-07-29 06:09:48,816 INFO [train.py:1114] (2/4) Epoch 19, batch 3500, loss[loss=0.1794, simple_loss=0.2659, pruned_loss=0.04648, over 4943.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2646, pruned_loss=0.04153, over 939157.24 frames. ], batch size: 12, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:09:54,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=250002.66666666666, ans=0.125 +2024-07-29 06:09:56,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten.whitening_limit, batch_count=250016.0, ans=15.0 +2024-07-29 06:09:57,883 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.930e+01 5.660e+01 6.096e+01 6.757e+01 8.865e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 06:09:59,651 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.11 vs. limit=15.0 +2024-07-29 06:10:02,308 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.69 vs. limit=22.5 +2024-07-29 06:10:05,641 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.53 vs. limit=15.0 +2024-07-29 06:10:24,718 INFO [train.py:1114] (2/4) Epoch 19, batch 3550, loss[loss=0.1652, simple_loss=0.2651, pruned_loss=0.03261, over 4674.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.264, pruned_loss=0.04085, over 939203.94 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:10:28,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=250069.33333333334, ans=0.125 +2024-07-29 06:10:32,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=250082.66666666666, ans=0.125 +2024-07-29 06:11:06,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=250122.66666666666, ans=0.2 +2024-07-29 06:11:17,172 INFO [train.py:1114] (2/4) Epoch 19, batch 3600, loss[loss=0.1879, simple_loss=0.2708, pruned_loss=0.05254, over 4957.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2642, pruned_loss=0.04063, over 940805.90 frames. ], batch size: 13, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:11:22,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=250136.0, ans=0.2 +2024-07-29 06:11:22,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=250136.0, ans=0.2 +2024-07-29 06:11:26,763 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250149.33333333334, ans=0.1 +2024-07-29 06:11:28,764 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.784e+01 5.577e+01 6.277e+01 7.321e+01 1.396e+02, threshold=1.255e+02, percent-clipped=3.0 +2024-07-29 06:11:35,973 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.50 vs. limit=6.0 +2024-07-29 06:11:43,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=250176.0, ans=0.2 +2024-07-29 06:11:46,923 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=250189.33333333334, ans=0.0 +2024-07-29 06:11:51,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=250189.33333333334, ans=0.125 +2024-07-29 06:11:53,669 INFO [train.py:1114] (2/4) Epoch 19, batch 3650, loss[loss=0.1785, simple_loss=0.2833, pruned_loss=0.0368, over 4895.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2638, pruned_loss=0.0403, over 941459.80 frames. ], batch size: 15, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:11:55,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=250202.66666666666, ans=0.04949747468305833 +2024-07-29 06:12:04,715 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.35 vs. limit=10.0 +2024-07-29 06:12:08,024 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=250229.33333333334, ans=0.125 +2024-07-29 06:12:25,429 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.47 vs. limit=15.0 +2024-07-29 06:12:27,247 INFO [train.py:1114] (2/4) Epoch 19, batch 3700, loss[loss=0.1746, simple_loss=0.2744, pruned_loss=0.03737, over 4934.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2635, pruned_loss=0.04017, over 942255.43 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:12:29,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250269.33333333334, ans=0.1 +2024-07-29 06:12:35,697 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.449e+01 6.027e+01 6.709e+01 1.105e+02, threshold=1.205e+02, percent-clipped=0.0 +2024-07-29 06:12:35,893 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=250282.66666666666, ans=0.025 +2024-07-29 06:12:43,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=250296.0, ans=0.125 +2024-07-29 06:12:45,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=250296.0, ans=10.0 +2024-07-29 06:12:47,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=250309.33333333334, ans=0.0 +2024-07-29 06:13:03,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=250322.66666666666, ans=0.0 +2024-07-29 06:13:04,997 INFO [train.py:1114] (2/4) Epoch 19, batch 3750, loss[loss=0.138, simple_loss=0.2258, pruned_loss=0.02505, over 4813.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2627, pruned_loss=0.03984, over 943925.37 frames. ], batch size: 11, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:13:10,956 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=250336.0, ans=0.0 +2024-07-29 06:13:31,658 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.47 vs. limit=15.0 +2024-07-29 06:13:41,720 INFO [train.py:1114] (2/4) Epoch 19, batch 3800, loss[loss=0.1927, simple_loss=0.2866, pruned_loss=0.0494, over 4816.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2628, pruned_loss=0.04014, over 941994.17 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:13:42,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=250402.66666666666, ans=0.125 +2024-07-29 06:13:45,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=250402.66666666666, ans=0.125 +2024-07-29 06:13:48,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=250416.0, ans=0.0 +2024-07-29 06:13:50,558 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.347e+01 5.643e+01 6.466e+01 7.181e+01 9.486e+01, threshold=1.293e+02, percent-clipped=0.0 +2024-07-29 06:13:52,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=250416.0, ans=0.2 +2024-07-29 06:13:56,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=250429.33333333334, ans=0.2 +2024-07-29 06:13:59,985 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250429.33333333334, ans=0.1 +2024-07-29 06:14:03,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=250442.66666666666, ans=0.125 +2024-07-29 06:14:06,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=250442.66666666666, ans=0.0 +2024-07-29 06:14:15,726 INFO [train.py:1114] (2/4) Epoch 19, batch 3850, loss[loss=0.1898, simple_loss=0.2784, pruned_loss=0.0506, over 4637.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2619, pruned_loss=0.03962, over 942378.29 frames. ], batch size: 16, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:14:35,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=250482.66666666666, ans=0.0 +2024-07-29 06:14:43,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=250496.0, ans=0.125 +2024-07-29 06:14:43,368 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.34 vs. limit=15.0 +2024-07-29 06:14:43,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=250496.0, ans=0.0 +2024-07-29 06:14:44,486 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=250496.0, ans=0.125 +2024-07-29 06:14:45,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=250509.33333333334, ans=0.125 +2024-07-29 06:14:46,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=250509.33333333334, ans=0.125 +2024-07-29 06:14:58,125 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=250522.66666666666, ans=0.0 +2024-07-29 06:15:01,944 INFO [train.py:1114] (2/4) Epoch 19, batch 3900, loss[loss=0.2079, simple_loss=0.3004, pruned_loss=0.0577, over 4805.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2631, pruned_loss=0.04008, over 942431.44 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:15:10,488 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.812e+01 5.443e+01 5.935e+01 6.800e+01 9.417e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-29 06:15:12,748 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=250549.33333333334, ans=0.05 +2024-07-29 06:15:17,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250562.66666666666, ans=0.1 +2024-07-29 06:15:37,616 INFO [train.py:1114] (2/4) Epoch 19, batch 3950, loss[loss=0.1681, simple_loss=0.2651, pruned_loss=0.03557, over 4844.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2643, pruned_loss=0.04029, over 944499.21 frames. ], batch size: 16, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:15:47,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=250616.0, ans=0.0 +2024-07-29 06:15:51,724 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:15:52,040 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.47 vs. limit=12.0 +2024-07-29 06:15:53,955 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.86 vs. limit=22.5 +2024-07-29 06:15:57,314 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=250642.66666666666, ans=0.125 +2024-07-29 06:16:13,330 INFO [train.py:1114] (2/4) Epoch 19, batch 4000, loss[loss=0.1401, simple_loss=0.2275, pruned_loss=0.02639, over 4772.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2646, pruned_loss=0.04089, over 940770.03 frames. ], batch size: 12, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:16:24,303 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.630e+01 6.259e+01 7.111e+01 1.064e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 06:16:26,549 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=250682.66666666666, ans=0.125 +2024-07-29 06:16:26,764 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.60 vs. limit=15.0 +2024-07-29 06:16:32,453 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=250696.0, ans=0.0 +2024-07-29 06:16:38,354 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=250709.33333333334, ans=0.0 +2024-07-29 06:16:42,208 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-29 06:16:46,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250722.66666666666, ans=0.1 +2024-07-29 06:16:49,536 INFO [train.py:1114] (2/4) Epoch 19, batch 4050, loss[loss=0.199, simple_loss=0.2811, pruned_loss=0.05848, over 3362.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2637, pruned_loss=0.04086, over 939529.39 frames. ], batch size: 35, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:16:52,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=250736.0, ans=10.0 +2024-07-29 06:16:55,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=250749.33333333334, ans=0.125 +2024-07-29 06:17:08,048 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.54 vs. limit=15.0 +2024-07-29 06:17:10,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=250776.0, ans=0.125 +2024-07-29 06:17:19,407 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.07 vs. limit=22.5 +2024-07-29 06:17:19,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=250789.33333333334, ans=0.125 +2024-07-29 06:17:21,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=250789.33333333334, ans=0.125 +2024-07-29 06:17:21,810 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=250789.33333333334, ans=0.125 +2024-07-29 06:17:23,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=250802.66666666666, ans=0.125 +2024-07-29 06:17:23,763 INFO [train.py:1114] (2/4) Epoch 19, batch 4100, loss[loss=0.1524, simple_loss=0.2465, pruned_loss=0.02915, over 4886.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2638, pruned_loss=0.041, over 938192.09 frames. ], batch size: 15, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:17:32,509 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 5.808e+01 6.562e+01 7.760e+01 1.349e+02, threshold=1.312e+02, percent-clipped=1.0 +2024-07-29 06:17:38,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=250829.33333333334, ans=0.125 +2024-07-29 06:17:44,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=250829.33333333334, ans=0.2 +2024-07-29 06:17:50,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250842.66666666666, ans=0.1 +2024-07-29 06:18:00,126 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=250856.0, ans=0.125 +2024-07-29 06:18:04,510 INFO [train.py:1114] (2/4) Epoch 19, batch 4150, loss[loss=0.1744, simple_loss=0.2743, pruned_loss=0.03722, over 4825.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2639, pruned_loss=0.04104, over 938031.34 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:18:08,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=250869.33333333334, ans=0.2 +2024-07-29 06:18:22,407 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.71 vs. limit=15.0 +2024-07-29 06:19:55,814 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.75 vs. limit=15.0 +2024-07-29 06:19:59,650 INFO [train.py:1114] (2/4) Epoch 19, batch 4200, loss[loss=0.1763, simple_loss=0.274, pruned_loss=0.03929, over 4909.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2638, pruned_loss=0.0409, over 939429.83 frames. ], batch size: 15, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:20:16,287 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.39 vs. limit=15.0 +2024-07-29 06:20:17,258 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.502e+01 5.500e+01 5.908e+01 6.556e+01 1.150e+02, threshold=1.182e+02, percent-clipped=0.0 +2024-07-29 06:20:30,112 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.02 vs. limit=15.0 +2024-07-29 06:20:34,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=250976.0, ans=0.07 +2024-07-29 06:20:35,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=250976.0, ans=0.125 +2024-07-29 06:20:35,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=250976.0, ans=0.125 +2024-07-29 06:20:36,413 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=250989.33333333334, ans=0.125 +2024-07-29 06:20:46,986 INFO [train.py:1114] (2/4) Epoch 19, batch 4250, loss[loss=0.1758, simple_loss=0.2649, pruned_loss=0.0433, over 4645.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2639, pruned_loss=0.04062, over 940691.12 frames. ], batch size: 12, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:20:54,032 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=251002.66666666666, ans=0.125 +2024-07-29 06:21:05,294 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=251016.0, ans=0.125 +2024-07-29 06:21:20,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=251029.33333333334, ans=0.0 +2024-07-29 06:22:25,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=251042.66666666666, ans=0.1 +2024-07-29 06:22:26,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=251042.66666666666, ans=0.125 +2024-07-29 06:22:36,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=251056.0, ans=0.125 +2024-07-29 06:22:57,471 INFO [train.py:1114] (2/4) Epoch 19, batch 4300, loss[loss=0.185, simple_loss=0.2826, pruned_loss=0.04372, over 4750.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2641, pruned_loss=0.04061, over 940125.55 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:22:59,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=251069.33333333334, ans=0.2 +2024-07-29 06:23:47,230 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.890e+01 5.705e+01 6.376e+01 7.099e+01 1.039e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-29 06:24:04,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=251082.66666666666, ans=0.0 +2024-07-29 06:24:42,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=251096.0, ans=0.125 +2024-07-29 06:25:40,007 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=251109.33333333334, ans=0.0 +2024-07-29 06:26:43,787 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=251122.66666666666, ans=0.09899494936611666 +2024-07-29 06:26:47,853 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.07 vs. limit=6.0 +2024-07-29 06:26:51,240 INFO [train.py:1114] (2/4) Epoch 19, batch 4350, loss[loss=0.1649, simple_loss=0.2454, pruned_loss=0.04219, over 4761.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2641, pruned_loss=0.04048, over 940966.50 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:28:23,194 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=251162.66666666666, ans=0.125 +2024-07-29 06:28:27,796 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=251162.66666666666, ans=0.125 +2024-07-29 06:28:29,889 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=251176.0, ans=0.2 +2024-07-29 06:28:41,565 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.49 vs. limit=10.0 +2024-07-29 06:28:43,540 INFO [train.py:1114] (2/4) Epoch 19, batch 4400, loss[loss=0.1619, simple_loss=0.2603, pruned_loss=0.03172, over 4811.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2635, pruned_loss=0.03994, over 940712.78 frames. ], batch size: 14, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:28:52,357 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.682e+01 6.192e+01 7.414e+01 9.950e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 06:28:54,917 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.94 vs. limit=22.5 +2024-07-29 06:28:54,952 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.27 vs. limit=15.0 +2024-07-29 06:28:57,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251229.33333333334, ans=0.1 +2024-07-29 06:28:58,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=251229.33333333334, ans=22.5 +2024-07-29 06:28:59,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=251229.33333333334, ans=0.0 +2024-07-29 06:29:03,240 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.93 vs. limit=12.0 +2024-07-29 06:29:37,349 INFO [train.py:1114] (2/4) Epoch 19, batch 4450, loss[loss=0.1707, simple_loss=0.2665, pruned_loss=0.03744, over 4944.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2639, pruned_loss=0.04032, over 938766.41 frames. ], batch size: 12, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:29:41,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251269.33333333334, ans=0.1 +2024-07-29 06:29:43,578 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.55 vs. limit=15.0 +2024-07-29 06:30:04,512 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251309.33333333334, ans=0.1 +2024-07-29 06:30:10,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251322.66666666666, ans=0.1 +2024-07-29 06:30:13,627 INFO [train.py:1114] (2/4) Epoch 19, batch 4500, loss[loss=0.1619, simple_loss=0.2577, pruned_loss=0.03303, over 4738.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2643, pruned_loss=0.04009, over 938001.84 frames. ], batch size: 14, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:30:14,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=251336.0, ans=0.1 +2024-07-29 06:30:14,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=251336.0, ans=0.025 +2024-07-29 06:31:20,186 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.458e+01 5.536e+01 6.082e+01 6.951e+01 9.632e+01, threshold=1.216e+02, percent-clipped=0.0 +2024-07-29 06:31:22,373 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=251349.33333333334, ans=0.125 +2024-07-29 06:31:35,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=251362.66666666666, ans=0.1 +2024-07-29 06:32:15,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=251389.33333333334, ans=0.125 +2024-07-29 06:32:15,662 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=251389.33333333334, ans=0.125 +2024-07-29 06:32:20,715 INFO [train.py:1114] (2/4) Epoch 19, batch 4550, loss[loss=0.1598, simple_loss=0.2451, pruned_loss=0.03726, over 4903.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2641, pruned_loss=0.04, over 940196.44 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:32:32,009 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.96 vs. limit=15.0 +2024-07-29 06:32:36,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=251416.0, ans=0.125 +2024-07-29 06:32:37,469 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.82 vs. limit=6.0 +2024-07-29 06:33:00,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=251469.33333333334, ans=0.2 +2024-07-29 06:33:01,174 INFO [train.py:1114] (2/4) Epoch 19, batch 4600, loss[loss=0.1778, simple_loss=0.2806, pruned_loss=0.03754, over 4429.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2634, pruned_loss=0.04006, over 938833.22 frames. ], batch size: 21, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:33:05,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=251469.33333333334, ans=0.2 +2024-07-29 06:33:12,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=251482.66666666666, ans=0.125 +2024-07-29 06:33:12,683 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.648e+01 5.756e+01 6.471e+01 7.460e+01 1.091e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-29 06:33:19,260 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=251496.0, ans=0.0 +2024-07-29 06:33:58,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=251522.66666666666, ans=0.125 +2024-07-29 06:33:59,206 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.53 vs. limit=12.0 +2024-07-29 06:34:08,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=251522.66666666666, ans=0.125 +2024-07-29 06:34:16,628 INFO [train.py:1114] (2/4) Epoch 19, batch 4650, loss[loss=0.1803, simple_loss=0.2659, pruned_loss=0.04738, over 4837.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2634, pruned_loss=0.03993, over 940186.85 frames. ], batch size: 16, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:34:19,630 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=251536.0, ans=0.0 +2024-07-29 06:34:29,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=251536.0, ans=0.125 +2024-07-29 06:36:00,595 INFO [train.py:1114] (2/4) Epoch 19, batch 4700, loss[loss=0.1658, simple_loss=0.2563, pruned_loss=0.03759, over 4707.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2631, pruned_loss=0.04011, over 937719.93 frames. ], batch size: 11, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:36:28,166 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.797e+01 5.853e+01 6.382e+01 7.357e+01 1.166e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 06:36:32,137 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=251629.33333333334, ans=0.05 +2024-07-29 06:36:32,306 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=251629.33333333334, ans=0.125 +2024-07-29 06:36:34,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=251629.33333333334, ans=0.0 +2024-07-29 06:36:36,556 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.90 vs. limit=15.0 +2024-07-29 06:37:31,417 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=251642.66666666666, ans=0.0 +2024-07-29 06:38:02,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=251656.0, ans=0.1 +2024-07-29 06:38:02,662 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.58 vs. limit=15.0 +2024-07-29 06:38:09,499 INFO [train.py:1114] (2/4) Epoch 19, batch 4750, loss[loss=0.1957, simple_loss=0.287, pruned_loss=0.05219, over 4521.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2639, pruned_loss=0.04053, over 935895.85 frames. ], batch size: 21, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:39:20,579 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=251696.0, ans=0.125 +2024-07-29 06:40:04,350 INFO [train.py:1114] (2/4) Epoch 19, batch 4800, loss[loss=0.1624, simple_loss=0.2526, pruned_loss=0.03607, over 4697.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2628, pruned_loss=0.04064, over 932895.21 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:40:26,838 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.14 vs. limit=22.5 +2024-07-29 06:40:33,282 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.567e+01 5.823e+01 6.588e+01 7.932e+01 1.236e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 06:40:33,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=251749.33333333334, ans=0.125 +2024-07-29 06:41:12,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=251762.66666666666, ans=0.125 +2024-07-29 06:41:52,681 INFO [train.py:1114] (2/4) Epoch 19, batch 4850, loss[loss=0.1715, simple_loss=0.2721, pruned_loss=0.03546, over 4742.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.263, pruned_loss=0.04075, over 932545.45 frames. ], batch size: 14, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:42:00,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=251802.66666666666, ans=0.0 +2024-07-29 06:42:00,904 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.97 vs. limit=15.0 +2024-07-29 06:42:01,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=251802.66666666666, ans=0.125 +2024-07-29 06:42:13,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=251816.0, ans=0.0 +2024-07-29 06:42:15,341 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=251816.0, ans=0.0 +2024-07-29 06:42:17,304 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=251816.0, ans=0.125 +2024-07-29 06:43:11,735 INFO [train.py:1114] (2/4) Epoch 19, batch 4900, loss[loss=0.1609, simple_loss=0.2558, pruned_loss=0.03302, over 4752.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2632, pruned_loss=0.04082, over 934341.33 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:43:55,402 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.611e+01 6.100e+01 6.685e+01 9.009e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 06:43:55,671 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:44:08,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=251896.0, ans=0.2 +2024-07-29 06:44:11,043 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=251896.0, ans=0.125 +2024-07-29 06:44:11,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=251896.0, ans=0.125 +2024-07-29 06:44:39,926 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=251909.33333333334, ans=0.025 +2024-07-29 06:44:43,564 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.86 vs. limit=22.5 +2024-07-29 06:44:46,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=251922.66666666666, ans=0.125 +2024-07-29 06:44:51,787 INFO [train.py:1114] (2/4) Epoch 19, batch 4950, loss[loss=0.2048, simple_loss=0.2752, pruned_loss=0.06718, over 3402.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2643, pruned_loss=0.0415, over 931464.16 frames. ], batch size: 35, lr: 3.92e-03, grad_scale: 64.0 +2024-07-29 06:44:54,758 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.89 vs. limit=12.0 +2024-07-29 06:45:30,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251962.66666666666, ans=0.1 +2024-07-29 06:45:47,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=251989.33333333334, ans=0.125 +2024-07-29 06:45:47,821 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=251989.33333333334, ans=0.09899494936611666 +2024-07-29 06:45:51,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=251989.33333333334, ans=0.125 +2024-07-29 06:45:53,123 INFO [train.py:1114] (2/4) Epoch 19, batch 5000, loss[loss=0.1584, simple_loss=0.2444, pruned_loss=0.03616, over 4671.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2635, pruned_loss=0.04131, over 935553.07 frames. ], batch size: 14, lr: 3.91e-03, grad_scale: 64.0 +2024-07-29 06:46:17,619 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.743e+01 6.406e+01 6.805e+01 1.014e+02, threshold=1.281e+02, percent-clipped=0.0 +2024-07-29 06:46:32,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252029.33333333334, ans=0.1 +2024-07-29 06:46:39,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=252042.66666666666, ans=0.125 +2024-07-29 06:46:40,356 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=252042.66666666666, ans=0.0 +2024-07-29 06:46:43,588 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=252042.66666666666, ans=0.0 +2024-07-29 06:46:48,436 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=252056.0, ans=0.2 +2024-07-29 06:46:51,756 INFO [train.py:1114] (2/4) Epoch 19, batch 5050, loss[loss=0.1596, simple_loss=0.2484, pruned_loss=0.03539, over 4846.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2631, pruned_loss=0.0412, over 937982.94 frames. ], batch size: 12, lr: 3.91e-03, grad_scale: 64.0 +2024-07-29 06:46:53,909 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=252069.33333333334, ans=0.125 +2024-07-29 06:46:54,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=252069.33333333334, ans=0.1 +2024-07-29 06:47:16,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=252096.0, ans=0.0 +2024-07-29 06:47:26,392 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.31 vs. limit=8.0 +2024-07-29 06:47:33,907 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-07-29 06:47:40,388 INFO [train.py:1114] (2/4) Epoch 19, batch 5100, loss[loss=0.148, simple_loss=0.2321, pruned_loss=0.03193, over 4782.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2639, pruned_loss=0.04125, over 935746.83 frames. ], batch size: 12, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:47:44,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=252136.0, ans=0.0 +2024-07-29 06:47:50,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=252136.0, ans=0.0 +2024-07-29 06:47:51,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252149.33333333334, ans=0.1 +2024-07-29 06:47:53,315 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=252149.33333333334, ans=0.125 +2024-07-29 06:47:54,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=252149.33333333334, ans=0.04949747468305833 +2024-07-29 06:47:55,096 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.629e+01 5.744e+01 6.473e+01 7.169e+01 1.065e+02, threshold=1.295e+02, percent-clipped=0.0 +2024-07-29 06:48:22,401 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=252162.66666666666, ans=0.0 +2024-07-29 06:49:24,762 INFO [train.py:1114] (2/4) Epoch 19, batch 5150, loss[loss=0.1769, simple_loss=0.2713, pruned_loss=0.04129, over 4831.00 frames. ], tot_loss[loss=0.174, simple_loss=0.265, pruned_loss=0.04151, over 936330.86 frames. ], batch size: 16, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:49:29,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=252202.66666666666, ans=0.125 +2024-07-29 06:49:34,756 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.41 vs. limit=10.0 +2024-07-29 06:49:37,691 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=252216.0, ans=0.1 +2024-07-29 06:49:37,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=252216.0, ans=0.95 +2024-07-29 06:49:40,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=252216.0, ans=0.125 +2024-07-29 06:49:50,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=252242.66666666666, ans=0.2 +2024-07-29 06:50:13,873 INFO [train.py:1114] (2/4) Epoch 19, batch 5200, loss[loss=0.1711, simple_loss=0.257, pruned_loss=0.04266, over 4662.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2647, pruned_loss=0.04131, over 936856.38 frames. ], batch size: 14, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:50:24,636 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.474e+01 5.788e+01 6.281e+01 7.022e+01 9.096e+01, threshold=1.256e+02, percent-clipped=0.0 +2024-07-29 06:50:34,418 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.69 vs. limit=22.5 +2024-07-29 06:50:45,068 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.82 vs. limit=15.0 +2024-07-29 06:50:49,278 INFO [train.py:1114] (2/4) Epoch 19, batch 5250, loss[loss=0.1596, simple_loss=0.2557, pruned_loss=0.03176, over 4897.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.264, pruned_loss=0.04128, over 936392.75 frames. ], batch size: 13, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:50:50,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=252336.0, ans=0.125 +2024-07-29 06:51:00,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=252349.33333333334, ans=0.05 +2024-07-29 06:51:09,828 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=252362.66666666666, ans=0.2 +2024-07-29 06:51:14,635 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=252376.0, ans=0.125 +2024-07-29 06:51:14,717 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=252376.0, ans=0.0 +2024-07-29 06:51:15,955 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=252376.0, ans=0.125 +2024-07-29 06:51:24,395 INFO [train.py:1114] (2/4) Epoch 19, batch 5300, loss[loss=0.1827, simple_loss=0.2795, pruned_loss=0.04294, over 4598.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2634, pruned_loss=0.0413, over 934388.62 frames. ], batch size: 16, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:51:33,496 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+01 5.685e+01 6.229e+01 6.963e+01 9.686e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-29 06:51:36,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=252416.0, ans=0.125 +2024-07-29 06:51:42,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=252429.33333333334, ans=0.125 +2024-07-29 06:51:53,267 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=252456.0, ans=0.125 +2024-07-29 06:51:56,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=252456.0, ans=6.0 +2024-07-29 06:51:57,736 INFO [train.py:1114] (2/4) Epoch 19, batch 5350, loss[loss=0.1612, simple_loss=0.2438, pruned_loss=0.03928, over 4537.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2642, pruned_loss=0.04117, over 936287.64 frames. ], batch size: 10, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:52:12,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=252496.0, ans=0.0 +2024-07-29 06:52:24,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=252522.66666666666, ans=0.125 +2024-07-29 06:52:26,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=252522.66666666666, ans=0.0 +2024-07-29 06:52:32,373 INFO [train.py:1114] (2/4) Epoch 19, batch 5400, loss[loss=0.1823, simple_loss=0.2713, pruned_loss=0.04667, over 4273.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.265, pruned_loss=0.04166, over 931342.81 frames. ], batch size: 25, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:52:41,216 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.66 vs. limit=22.5 +2024-07-29 06:52:42,196 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.436e+01 5.716e+01 6.217e+01 6.684e+01 8.948e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 06:52:49,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=252562.66666666666, ans=0.0 +2024-07-29 06:52:53,360 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.44 vs. limit=12.0 +2024-07-29 06:52:56,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=252576.0, ans=0.125 +2024-07-29 06:53:08,933 INFO [train.py:1114] (2/4) Epoch 19, batch 5450, loss[loss=0.1738, simple_loss=0.2525, pruned_loss=0.04753, over 4702.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2638, pruned_loss=0.04101, over 933898.24 frames. ], batch size: 11, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:53:12,872 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=252602.66666666666, ans=0.2 +2024-07-29 06:53:16,329 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:53:21,991 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.41 vs. limit=12.0 +2024-07-29 06:53:23,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=252616.0, ans=0.125 +2024-07-29 06:53:28,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=252629.33333333334, ans=22.5 +2024-07-29 06:53:39,235 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=252656.0, ans=0.0 +2024-07-29 06:53:39,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=252656.0, ans=0.125 +2024-07-29 06:53:45,871 INFO [train.py:1114] (2/4) Epoch 19, batch 5500, loss[loss=0.1808, simple_loss=0.2744, pruned_loss=0.0436, over 4172.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2629, pruned_loss=0.0406, over 931424.63 frames. ], batch size: 25, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:53:46,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=252669.33333333334, ans=0.125 +2024-07-29 06:53:55,334 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.797e+01 5.621e+01 6.177e+01 7.042e+01 9.819e+01, threshold=1.235e+02, percent-clipped=0.0 +2024-07-29 06:54:00,533 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.97 vs. limit=15.0 +2024-07-29 06:54:01,888 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.91 vs. limit=10.0 +2024-07-29 06:54:03,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=252696.0, ans=0.125 +2024-07-29 06:54:23,883 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=252736.0, ans=0.0 +2024-07-29 06:54:24,294 INFO [train.py:1114] (2/4) Epoch 19, batch 5550, loss[loss=0.1328, simple_loss=0.2193, pruned_loss=0.02318, over 4704.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2618, pruned_loss=0.0403, over 933448.66 frames. ], batch size: 12, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:54:30,904 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.07 vs. limit=10.0 +2024-07-29 06:54:34,892 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:54:46,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=252776.0, ans=0.125 +2024-07-29 06:54:46,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=252776.0, ans=0.2 +2024-07-29 06:54:49,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=252776.0, ans=0.0 +2024-07-29 06:54:53,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=252789.33333333334, ans=0.2 +2024-07-29 06:54:53,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=252789.33333333334, ans=15.0 +2024-07-29 06:54:55,875 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=252789.33333333334, ans=0.2 +2024-07-29 06:55:00,412 INFO [train.py:1114] (2/4) Epoch 19, batch 5600, loss[loss=0.1713, simple_loss=0.2554, pruned_loss=0.04362, over 4747.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2623, pruned_loss=0.04006, over 934427.43 frames. ], batch size: 14, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:55:04,671 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=252802.66666666666, ans=0.2 +2024-07-29 06:55:10,090 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.976e+01 6.000e+01 7.138e+01 7.919e+01 1.152e+02, threshold=1.428e+02, percent-clipped=0.0 +2024-07-29 06:55:12,067 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.08 vs. limit=15.0 +2024-07-29 06:55:15,759 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.15 vs. limit=22.5 +2024-07-29 06:55:18,065 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.91 vs. limit=22.5 +2024-07-29 06:55:20,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=252829.33333333334, ans=0.5 +2024-07-29 06:55:24,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=252842.66666666666, ans=0.125 +2024-07-29 06:55:31,440 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=252856.0, ans=0.0 +2024-07-29 06:55:37,015 INFO [train.py:1114] (2/4) Epoch 19, batch 5650, loss[loss=0.1682, simple_loss=0.2591, pruned_loss=0.03859, over 4418.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2616, pruned_loss=0.04006, over 936916.38 frames. ], batch size: 21, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:55:41,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=252869.33333333334, ans=0.125 +2024-07-29 06:55:43,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252882.66666666666, ans=0.1 +2024-07-29 06:55:48,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=252882.66666666666, ans=0.2 +2024-07-29 06:56:02,088 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=252909.33333333334, ans=0.2 +2024-07-29 06:56:14,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=252922.66666666666, ans=0.0 +2024-07-29 06:56:15,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=252936.0, ans=0.125 +2024-07-29 06:56:15,668 INFO [train.py:1114] (2/4) Epoch 19, batch 5700, loss[loss=0.1859, simple_loss=0.2929, pruned_loss=0.03946, over 4698.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2624, pruned_loss=0.04022, over 937725.79 frames. ], batch size: 13, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:56:16,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=252936.0, ans=0.125 +2024-07-29 06:56:19,279 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:56:21,197 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=252936.0, ans=0.0 +2024-07-29 06:56:25,034 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.780e+01 5.631e+01 6.115e+01 6.862e+01 9.521e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-29 06:56:29,258 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=252962.66666666666, ans=0.125 +2024-07-29 06:56:36,824 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.64 vs. limit=15.0 +2024-07-29 06:56:43,016 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.70 vs. limit=15.0 +2024-07-29 06:56:54,828 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.08 vs. limit=15.0 +2024-07-29 06:56:56,497 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.62 vs. limit=22.5 +2024-07-29 06:56:57,572 INFO [train.py:1114] (2/4) Epoch 19, batch 5750, loss[loss=0.2038, simple_loss=0.2905, pruned_loss=0.05857, over 4726.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2633, pruned_loss=0.04054, over 937704.15 frames. ], batch size: 19, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:57:12,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=253016.0, ans=0.0 +2024-07-29 06:57:27,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=253056.0, ans=0.0 +2024-07-29 06:57:33,971 INFO [train.py:1114] (2/4) Epoch 19, batch 5800, loss[loss=0.19, simple_loss=0.2729, pruned_loss=0.05356, over 4747.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2642, pruned_loss=0.04125, over 937085.75 frames. ], batch size: 19, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:57:38,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=253069.33333333334, ans=0.0 +2024-07-29 06:57:39,403 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=253069.33333333334, ans=0.125 +2024-07-29 06:57:42,854 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=253082.66666666666, ans=0.025 +2024-07-29 06:57:43,291 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.734e+01 5.591e+01 6.504e+01 7.272e+01 1.266e+02, threshold=1.301e+02, percent-clipped=1.0 +2024-07-29 06:57:44,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=253082.66666666666, ans=0.0 +2024-07-29 06:57:51,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=253096.0, ans=0.0 +2024-07-29 06:58:01,177 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:58:01,182 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=253122.66666666666, ans=10.0 +2024-07-29 06:58:01,462 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.45 vs. limit=6.0 +2024-07-29 06:58:03,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253122.66666666666, ans=0.1 +2024-07-29 06:58:04,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=253122.66666666666, ans=0.025 +2024-07-29 06:58:08,033 INFO [train.py:1114] (2/4) Epoch 19, batch 5850, loss[loss=0.1846, simple_loss=0.2685, pruned_loss=0.05032, over 4480.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2641, pruned_loss=0.04148, over 937873.18 frames. ], batch size: 21, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:58:16,102 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253149.33333333334, ans=0.1 +2024-07-29 06:58:26,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=253162.66666666666, ans=0.125 +2024-07-29 06:58:45,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=253202.66666666666, ans=0.125 +2024-07-29 06:58:46,386 INFO [train.py:1114] (2/4) Epoch 19, batch 5900, loss[loss=0.181, simple_loss=0.2752, pruned_loss=0.0434, over 4680.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2635, pruned_loss=0.04124, over 938102.12 frames. ], batch size: 15, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:58:55,600 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+01 5.657e+01 6.141e+01 7.066e+01 1.029e+02, threshold=1.228e+02, percent-clipped=0.0 +2024-07-29 06:58:58,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=253216.0, ans=0.0 +2024-07-29 06:59:04,601 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=253229.33333333334, ans=0.125 +2024-07-29 06:59:06,680 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=253242.66666666666, ans=0.1 +2024-07-29 06:59:19,505 INFO [train.py:1114] (2/4) Epoch 19, batch 5950, loss[loss=0.1998, simple_loss=0.2861, pruned_loss=0.05673, over 4681.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2628, pruned_loss=0.04078, over 939811.91 frames. ], batch size: 15, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 06:59:19,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=253269.33333333334, ans=0.125 +2024-07-29 06:59:43,826 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253296.0, ans=0.1 +2024-07-29 06:59:52,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=253322.66666666666, ans=0.125 +2024-07-29 06:59:54,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=253322.66666666666, ans=0.0 +2024-07-29 06:59:58,959 INFO [train.py:1114] (2/4) Epoch 19, batch 6000, loss[loss=0.1769, simple_loss=0.2781, pruned_loss=0.0378, over 4134.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2622, pruned_loss=0.04046, over 936848.48 frames. ], batch size: 25, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 06:59:58,959 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 07:00:08,055 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.7991, 2.0942, 3.6501, 2.1767], device='cuda:2') +2024-07-29 07:00:15,063 INFO [train.py:1146] (2/4) Epoch 19, validation: loss=0.1606, simple_loss=0.2627, pruned_loss=0.02924, over 944034.00 frames. +2024-07-29 07:00:15,063 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 07:00:22,217 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=253349.33333333334, ans=0.07 +2024-07-29 07:00:24,597 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.643e+01 5.715e+01 6.299e+01 6.877e+01 1.010e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 07:00:26,096 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=253349.33333333334, ans=0.025 +2024-07-29 07:00:31,589 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=253362.66666666666, ans=0.125 +2024-07-29 07:00:47,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=253389.33333333334, ans=0.09899494936611666 +2024-07-29 07:00:47,789 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:00:50,873 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.32 vs. limit=15.0 +2024-07-29 07:00:57,729 INFO [train.py:1114] (2/4) Epoch 19, batch 6050, loss[loss=0.1802, simple_loss=0.2662, pruned_loss=0.04715, over 4777.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2616, pruned_loss=0.0402, over 938147.33 frames. ], batch size: 12, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:00:59,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=253402.66666666666, ans=0.025 +2024-07-29 07:01:13,553 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.66 vs. limit=15.0 +2024-07-29 07:01:13,839 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:01:15,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=253429.33333333334, ans=0.04949747468305833 +2024-07-29 07:01:32,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=253456.0, ans=0.125 +2024-07-29 07:01:35,551 INFO [train.py:1114] (2/4) Epoch 19, batch 6100, loss[loss=0.1903, simple_loss=0.2869, pruned_loss=0.04685, over 4682.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2615, pruned_loss=0.04014, over 937573.28 frames. ], batch size: 15, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:01:42,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=253469.33333333334, ans=0.0 +2024-07-29 07:01:46,498 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.746e+01 6.337e+01 7.599e+01 1.096e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 07:01:46,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=253482.66666666666, ans=0.125 +2024-07-29 07:01:57,245 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.81 vs. limit=15.0 +2024-07-29 07:01:59,643 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=253509.33333333334, ans=0.125 +2024-07-29 07:02:02,925 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=253509.33333333334, ans=0.125 +2024-07-29 07:02:10,980 INFO [train.py:1114] (2/4) Epoch 19, batch 6150, loss[loss=0.1959, simple_loss=0.2803, pruned_loss=0.05572, over 3402.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2619, pruned_loss=0.04043, over 936599.34 frames. ], batch size: 35, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:02:13,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=253536.0, ans=0.125 +2024-07-29 07:02:46,250 INFO [train.py:1114] (2/4) Epoch 19, batch 6200, loss[loss=0.1807, simple_loss=0.2688, pruned_loss=0.04627, over 4744.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2621, pruned_loss=0.04011, over 936451.45 frames. ], batch size: 14, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:03:00,850 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.644e+01 5.872e+01 6.274e+01 7.114e+01 1.110e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 07:03:04,431 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=253629.33333333334, ans=0.0 +2024-07-29 07:03:10,465 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.21 vs. limit=15.0 +2024-07-29 07:03:15,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=253642.66666666666, ans=0.125 +2024-07-29 07:03:16,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=253642.66666666666, ans=0.2 +2024-07-29 07:03:19,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=253656.0, ans=0.125 +2024-07-29 07:03:25,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=253656.0, ans=0.0 +2024-07-29 07:03:26,825 INFO [train.py:1114] (2/4) Epoch 19, batch 6250, loss[loss=0.1671, simple_loss=0.267, pruned_loss=0.03358, over 4812.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2618, pruned_loss=0.03979, over 932993.95 frames. ], batch size: 14, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:03:35,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=253682.66666666666, ans=0.125 +2024-07-29 07:03:39,993 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=253696.0, ans=0.125 +2024-07-29 07:03:42,160 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.93 vs. limit=6.0 +2024-07-29 07:03:50,179 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.61 vs. limit=22.5 +2024-07-29 07:03:54,582 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=253722.66666666666, ans=0.125 +2024-07-29 07:03:54,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=253722.66666666666, ans=0.125 +2024-07-29 07:04:00,512 INFO [train.py:1114] (2/4) Epoch 19, batch 6300, loss[loss=0.1534, simple_loss=0.2325, pruned_loss=0.03717, over 4542.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2618, pruned_loss=0.04006, over 929932.42 frames. ], batch size: 10, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:04:00,643 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=253736.0, ans=0.2 +2024-07-29 07:04:07,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=253749.33333333334, ans=0.125 +2024-07-29 07:04:09,769 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+01 5.656e+01 6.439e+01 7.394e+01 1.114e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 07:04:10,703 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=253749.33333333334, ans=0.125 +2024-07-29 07:04:23,666 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.91 vs. limit=15.0 +2024-07-29 07:04:25,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=253776.0, ans=0.125 +2024-07-29 07:04:46,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=253789.33333333334, ans=0.04949747468305833 +2024-07-29 07:04:47,720 INFO [train.py:1114] (2/4) Epoch 19, batch 6350, loss[loss=0.2332, simple_loss=0.3273, pruned_loss=0.06955, over 4531.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2617, pruned_loss=0.03992, over 933880.01 frames. ], batch size: 21, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:04:55,109 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.48 vs. limit=12.0 +2024-07-29 07:04:56,159 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=253816.0, ans=0.125 +2024-07-29 07:04:57,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=253816.0, ans=0.0 +2024-07-29 07:05:06,382 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.76 vs. limit=10.0 +2024-07-29 07:05:08,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253842.66666666666, ans=0.1 +2024-07-29 07:05:10,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=253842.66666666666, ans=0.125 +2024-07-29 07:05:11,645 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.26 vs. limit=15.0 +2024-07-29 07:05:12,004 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=253842.66666666666, ans=0.0 +2024-07-29 07:05:18,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=253856.0, ans=0.125 +2024-07-29 07:05:21,130 INFO [train.py:1114] (2/4) Epoch 19, batch 6400, loss[loss=0.1811, simple_loss=0.2724, pruned_loss=0.04484, over 4640.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2627, pruned_loss=0.04048, over 934947.93 frames. ], batch size: 13, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:05:21,248 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253869.33333333334, ans=0.1 +2024-07-29 07:05:26,920 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.04 vs. limit=22.5 +2024-07-29 07:05:27,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=253882.66666666666, ans=0.1 +2024-07-29 07:05:30,226 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.587e+01 5.936e+01 6.680e+01 7.365e+01 1.184e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-29 07:05:31,006 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=253882.66666666666, ans=0.125 +2024-07-29 07:05:34,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=253896.0, ans=0.0 +2024-07-29 07:05:36,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=253896.0, ans=0.2 +2024-07-29 07:05:39,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=253896.0, ans=0.125 +2024-07-29 07:05:51,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=253922.66666666666, ans=0.125 +2024-07-29 07:06:01,013 INFO [train.py:1114] (2/4) Epoch 19, batch 6450, loss[loss=0.1697, simple_loss=0.2638, pruned_loss=0.03778, over 4523.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2632, pruned_loss=0.04056, over 938538.78 frames. ], batch size: 21, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:06:05,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=253936.0, ans=0.125 +2024-07-29 07:06:05,874 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=253936.0, ans=0.1 +2024-07-29 07:06:06,406 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=253936.0, ans=0.125 +2024-07-29 07:06:45,365 INFO [train.py:1114] (2/4) Epoch 19, batch 6500, loss[loss=0.2028, simple_loss=0.2827, pruned_loss=0.06138, over 3048.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2628, pruned_loss=0.04023, over 939588.97 frames. ], batch size: 35, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:06:49,697 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=254002.66666666666, ans=0.125 +2024-07-29 07:06:50,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=254002.66666666666, ans=0.1 +2024-07-29 07:06:52,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=254016.0, ans=0.0 +2024-07-29 07:06:54,899 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.939e+01 5.824e+01 6.462e+01 7.830e+01 1.082e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-29 07:07:03,216 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.08 vs. limit=15.0 +2024-07-29 07:07:20,293 INFO [train.py:1114] (2/4) Epoch 19, batch 6550, loss[loss=0.1522, simple_loss=0.2316, pruned_loss=0.03638, over 4803.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2614, pruned_loss=0.03966, over 942567.62 frames. ], batch size: 11, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:07:27,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=254069.33333333334, ans=0.125 +2024-07-29 07:07:42,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=254109.33333333334, ans=0.125 +2024-07-29 07:07:49,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=254122.66666666666, ans=0.02 +2024-07-29 07:07:49,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=254122.66666666666, ans=0.1 +2024-07-29 07:07:52,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=254122.66666666666, ans=0.125 +2024-07-29 07:07:55,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=254122.66666666666, ans=0.125 +2024-07-29 07:07:56,772 INFO [train.py:1114] (2/4) Epoch 19, batch 6600, loss[loss=0.173, simple_loss=0.2792, pruned_loss=0.03338, over 4932.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2613, pruned_loss=0.03938, over 944747.24 frames. ], batch size: 14, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:08:06,408 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.740e+01 5.577e+01 6.191e+01 6.872e+01 1.333e+02, threshold=1.238e+02, percent-clipped=1.0 +2024-07-29 07:08:10,185 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.04 vs. limit=12.0 +2024-07-29 07:08:13,264 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=254162.66666666666, ans=0.0 +2024-07-29 07:08:17,318 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=254176.0, ans=0.0 +2024-07-29 07:08:22,034 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=254176.0, ans=0.2 +2024-07-29 07:08:22,179 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.62 vs. limit=6.0 +2024-07-29 07:08:30,613 INFO [train.py:1114] (2/4) Epoch 19, batch 6650, loss[loss=0.1695, simple_loss=0.2734, pruned_loss=0.03279, over 4614.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2617, pruned_loss=0.03959, over 943374.34 frames. ], batch size: 17, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:08:31,481 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254202.66666666666, ans=0.1 +2024-07-29 07:08:34,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=254202.66666666666, ans=0.125 +2024-07-29 07:08:47,054 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=254229.33333333334, ans=0.0 +2024-07-29 07:08:51,300 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.56 vs. limit=15.0 +2024-07-29 07:09:04,206 INFO [train.py:1114] (2/4) Epoch 19, batch 6700, loss[loss=0.1915, simple_loss=0.3, pruned_loss=0.04153, over 4666.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2621, pruned_loss=0.03983, over 941920.39 frames. ], batch size: 19, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:09:07,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=254269.33333333334, ans=0.2 +2024-07-29 07:09:13,677 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.681e+01 5.588e+01 6.301e+01 6.767e+01 8.851e+01, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 07:09:21,681 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.07 vs. limit=22.5 +2024-07-29 07:09:22,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=254296.0, ans=0.125 +2024-07-29 07:09:23,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=254296.0, ans=0.125 +2024-07-29 07:09:32,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=254322.66666666666, ans=0.125 +2024-07-29 07:09:35,155 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.62 vs. limit=15.0 +2024-07-29 07:09:38,296 INFO [train.py:1114] (2/4) Epoch 19, batch 6750, loss[loss=0.1846, simple_loss=0.2721, pruned_loss=0.04853, over 4292.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2621, pruned_loss=0.04001, over 940378.96 frames. ], batch size: 25, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:09:42,200 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=19.07 vs. limit=22.5 +2024-07-29 07:09:42,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=254336.0, ans=0.125 +2024-07-29 07:09:43,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=254336.0, ans=0.125 +2024-07-29 07:09:44,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=254349.33333333334, ans=0.0 +2024-07-29 07:09:51,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=254362.66666666666, ans=0.025 +2024-07-29 07:10:01,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=254376.0, ans=0.025 +2024-07-29 07:10:05,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=254389.33333333334, ans=0.125 +2024-07-29 07:10:13,861 INFO [train.py:1114] (2/4) Epoch 19, batch 6800, loss[loss=0.1436, simple_loss=0.2313, pruned_loss=0.02797, over 4628.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2631, pruned_loss=0.04006, over 938836.12 frames. ], batch size: 13, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:10:17,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=254402.66666666666, ans=0.0 +2024-07-29 07:10:18,429 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=254402.66666666666, ans=0.125 +2024-07-29 07:10:19,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=254402.66666666666, ans=0.125 +2024-07-29 07:10:22,884 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+01 5.699e+01 6.328e+01 7.077e+01 1.070e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-29 07:10:23,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=254416.0, ans=10.0 +2024-07-29 07:10:25,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=254416.0, ans=0.125 +2024-07-29 07:10:32,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=254429.33333333334, ans=0.125 +2024-07-29 07:10:40,471 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.93 vs. limit=15.0 +2024-07-29 07:10:44,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=254456.0, ans=0.0 +2024-07-29 07:10:46,437 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.68 vs. limit=22.5 +2024-07-29 07:10:46,763 INFO [train.py:1114] (2/4) Epoch 19, batch 6850, loss[loss=0.2005, simple_loss=0.3013, pruned_loss=0.04985, over 4691.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2624, pruned_loss=0.03978, over 940755.62 frames. ], batch size: 13, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:10:48,221 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=254469.33333333334, ans=0.125 +2024-07-29 07:11:01,463 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=254496.0, ans=0.0 +2024-07-29 07:11:03,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=254496.0, ans=0.0 +2024-07-29 07:11:07,370 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:11:08,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=254509.33333333334, ans=0.5 +2024-07-29 07:11:18,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=254522.66666666666, ans=0.125 +2024-07-29 07:11:20,049 INFO [train.py:1114] (2/4) Epoch 19, batch 6900, loss[loss=0.1522, simple_loss=0.2428, pruned_loss=0.03082, over 4964.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.263, pruned_loss=0.04004, over 943274.30 frames. ], batch size: 13, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:11:22,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=254536.0, ans=0.0 +2024-07-29 07:11:23,535 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=254536.0, ans=0.0 +2024-07-29 07:11:29,550 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.808e+01 6.453e+01 7.424e+01 1.237e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-29 07:11:35,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=254562.66666666666, ans=0.125 +2024-07-29 07:11:38,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=254562.66666666666, ans=0.09899494936611666 +2024-07-29 07:11:53,802 INFO [train.py:1114] (2/4) Epoch 19, batch 6950, loss[loss=0.165, simple_loss=0.2376, pruned_loss=0.04623, over 4542.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2627, pruned_loss=0.04042, over 940763.89 frames. ], batch size: 10, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:11:57,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=254602.66666666666, ans=0.0 +2024-07-29 07:11:57,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=254602.66666666666, ans=0.2 +2024-07-29 07:12:05,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=254616.0, ans=0.025 +2024-07-29 07:12:07,394 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=254629.33333333334, ans=0.125 +2024-07-29 07:12:08,677 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=254629.33333333334, ans=0.0 +2024-07-29 07:12:12,002 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:12:29,176 INFO [train.py:1114] (2/4) Epoch 19, batch 7000, loss[loss=0.2107, simple_loss=0.304, pruned_loss=0.05867, over 4631.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2619, pruned_loss=0.04037, over 939198.62 frames. ], batch size: 17, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:12:31,512 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.89 vs. limit=15.0 +2024-07-29 07:12:38,423 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.459e+01 5.806e+01 6.455e+01 7.186e+01 1.060e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-29 07:12:45,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254696.0, ans=0.1 +2024-07-29 07:13:00,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254722.66666666666, ans=0.1 +2024-07-29 07:13:01,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=254736.0, ans=0.025 +2024-07-29 07:13:02,122 INFO [train.py:1114] (2/4) Epoch 19, batch 7050, loss[loss=0.1952, simple_loss=0.2848, pruned_loss=0.05285, over 4699.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2625, pruned_loss=0.04055, over 942449.43 frames. ], batch size: 19, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:13:06,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=254736.0, ans=0.125 +2024-07-29 07:13:13,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254749.33333333334, ans=0.1 +2024-07-29 07:13:29,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=254776.0, ans=0.0 +2024-07-29 07:13:38,703 INFO [train.py:1114] (2/4) Epoch 19, batch 7100, loss[loss=0.1711, simple_loss=0.2698, pruned_loss=0.03619, over 4796.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2638, pruned_loss=0.04119, over 937140.18 frames. ], batch size: 15, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:13:49,300 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.888e+01 5.809e+01 6.351e+01 7.232e+01 1.086e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-29 07:14:05,433 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=254842.66666666666, ans=0.125 +2024-07-29 07:14:08,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=254856.0, ans=0.125 +2024-07-29 07:14:13,169 INFO [train.py:1114] (2/4) Epoch 19, batch 7150, loss[loss=0.1814, simple_loss=0.2799, pruned_loss=0.04144, over 4491.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2621, pruned_loss=0.04036, over 938289.36 frames. ], batch size: 21, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:14:15,459 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.21 vs. limit=12.0 +2024-07-29 07:14:15,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=254869.33333333334, ans=0.125 +2024-07-29 07:14:24,819 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=254882.66666666666, ans=0.125 +2024-07-29 07:14:28,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=254896.0, ans=0.0 +2024-07-29 07:14:32,059 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=254909.33333333334, ans=0.125 +2024-07-29 07:14:32,732 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=254909.33333333334, ans=0.125 +2024-07-29 07:14:40,074 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=254922.66666666666, ans=0.1 +2024-07-29 07:14:40,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254922.66666666666, ans=0.1 +2024-07-29 07:14:42,672 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=254922.66666666666, ans=0.125 +2024-07-29 07:14:46,072 INFO [train.py:1114] (2/4) Epoch 19, batch 7200, loss[loss=0.1977, simple_loss=0.2901, pruned_loss=0.05263, over 4803.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2629, pruned_loss=0.04074, over 938429.04 frames. ], batch size: 15, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:14:52,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=254949.33333333334, ans=0.125 +2024-07-29 07:14:55,078 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.575e+01 5.607e+01 6.088e+01 6.745e+01 8.858e+01, threshold=1.218e+02, percent-clipped=0.0 +2024-07-29 07:15:12,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=254989.33333333334, ans=0.1 +2024-07-29 07:15:18,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=255002.66666666666, ans=0.0 +2024-07-29 07:15:18,472 INFO [train.py:1114] (2/4) Epoch 19, batch 7250, loss[loss=0.1515, simple_loss=0.2416, pruned_loss=0.03071, over 4852.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2626, pruned_loss=0.04058, over 939804.21 frames. ], batch size: 12, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:15:25,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=255016.0, ans=0.125 +2024-07-29 07:15:27,986 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.94 vs. limit=15.0 +2024-07-29 07:15:45,355 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.49 vs. limit=22.5 +2024-07-29 07:15:47,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=255056.0, ans=0.04949747468305833 +2024-07-29 07:15:50,914 INFO [train.py:1114] (2/4) Epoch 19, batch 7300, loss[loss=0.1563, simple_loss=0.2392, pruned_loss=0.03676, over 4839.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2624, pruned_loss=0.04006, over 939999.50 frames. ], batch size: 12, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:15:51,642 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=255069.33333333334, ans=0.0 +2024-07-29 07:16:00,098 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.606e+01 6.073e+01 6.714e+01 9.388e+01, threshold=1.215e+02, percent-clipped=0.0 +2024-07-29 07:16:01,962 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=255082.66666666666, ans=15.0 +2024-07-29 07:16:08,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=255096.0, ans=0.0 +2024-07-29 07:16:15,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255109.33333333334, ans=0.1 +2024-07-29 07:16:23,624 INFO [train.py:1114] (2/4) Epoch 19, batch 7350, loss[loss=0.1592, simple_loss=0.2512, pruned_loss=0.03358, over 4645.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2621, pruned_loss=0.03946, over 939215.98 frames. ], batch size: 12, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:16:29,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=255136.0, ans=0.95 +2024-07-29 07:16:33,332 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.01 vs. limit=22.5 +2024-07-29 07:17:00,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=255176.0, ans=0.125 +2024-07-29 07:17:01,802 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.66 vs. limit=15.0 +2024-07-29 07:17:02,000 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=255176.0, ans=0.125 +2024-07-29 07:17:10,011 INFO [train.py:1114] (2/4) Epoch 19, batch 7400, loss[loss=0.1649, simple_loss=0.2615, pruned_loss=0.03413, over 4691.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2616, pruned_loss=0.03943, over 940289.81 frames. ], batch size: 13, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:17:15,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=255202.66666666666, ans=0.025 +2024-07-29 07:17:19,331 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.361e+01 5.806e+01 6.617e+01 8.276e+01 1.312e+02, threshold=1.323e+02, percent-clipped=3.0 +2024-07-29 07:17:22,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=255216.0, ans=0.125 +2024-07-29 07:17:25,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=255229.33333333334, ans=0.0 +2024-07-29 07:17:28,060 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=255229.33333333334, ans=0.2 +2024-07-29 07:17:42,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=255269.33333333334, ans=0.2 +2024-07-29 07:17:42,984 INFO [train.py:1114] (2/4) Epoch 19, batch 7450, loss[loss=0.1539, simple_loss=0.2312, pruned_loss=0.03836, over 4612.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2601, pruned_loss=0.03916, over 937666.44 frames. ], batch size: 11, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:17:45,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=255269.33333333334, ans=0.0 +2024-07-29 07:17:51,654 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=255282.66666666666, ans=0.125 +2024-07-29 07:17:52,963 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=255282.66666666666, ans=0.125 +2024-07-29 07:17:58,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=255296.0, ans=0.025 +2024-07-29 07:18:07,514 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=255309.33333333334, ans=0.0 +2024-07-29 07:18:09,402 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=255322.66666666666, ans=0.125 +2024-07-29 07:18:13,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=255322.66666666666, ans=0.125 +2024-07-29 07:18:15,941 INFO [train.py:1114] (2/4) Epoch 19, batch 7500, loss[loss=0.2019, simple_loss=0.2932, pruned_loss=0.05537, over 3441.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2601, pruned_loss=0.03959, over 936173.00 frames. ], batch size: 35, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:18:24,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=255349.33333333334, ans=0.2 +2024-07-29 07:18:25,197 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.636e+01 5.501e+01 5.980e+01 6.814e+01 1.020e+02, threshold=1.196e+02, percent-clipped=0.0 +2024-07-29 07:18:48,955 INFO [train.py:1114] (2/4) Epoch 19, batch 7550, loss[loss=0.1776, simple_loss=0.2796, pruned_loss=0.03785, over 4621.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2625, pruned_loss=0.04047, over 936427.71 frames. ], batch size: 17, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:18:49,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=6.15 vs. limit=12.0 +2024-07-29 07:18:50,929 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255402.66666666666, ans=0.1 +2024-07-29 07:19:02,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=255416.0, ans=0.0 +2024-07-29 07:19:03,622 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.52 vs. limit=22.5 +2024-07-29 07:19:11,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=255442.66666666666, ans=0.125 +2024-07-29 07:19:17,534 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=255456.0, ans=0.125 +2024-07-29 07:19:34,136 INFO [train.py:1114] (2/4) Epoch 19, batch 7600, loss[loss=0.201, simple_loss=0.2815, pruned_loss=0.06025, over 4801.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2617, pruned_loss=0.03991, over 938194.39 frames. ], batch size: 14, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:21:14,856 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.357e+01 5.885e+01 6.503e+01 9.082e+01, threshold=1.177e+02, percent-clipped=0.0 +2024-07-29 07:21:27,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=255509.33333333334, ans=0.0 +2024-07-29 07:21:29,459 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=255509.33333333334, ans=0.125 +2024-07-29 07:21:29,650 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.13 vs. limit=10.0 +2024-07-29 07:21:32,703 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:21:36,801 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=255522.66666666666, ans=0.0 +2024-07-29 07:21:38,457 INFO [train.py:1114] (2/4) Epoch 19, batch 7650, loss[loss=0.1341, simple_loss=0.2302, pruned_loss=0.01903, over 4939.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2616, pruned_loss=0.03963, over 937454.04 frames. ], batch size: 12, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:21:43,950 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=255536.0, ans=0.125 +2024-07-29 07:21:46,701 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.98 vs. limit=15.0 +2024-07-29 07:21:53,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=255562.66666666666, ans=0.125 +2024-07-29 07:21:53,483 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.44 vs. limit=12.0 +2024-07-29 07:21:54,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=255562.66666666666, ans=0.0 +2024-07-29 07:21:54,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=255562.66666666666, ans=0.125 +2024-07-29 07:21:57,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=255576.0, ans=0.025 +2024-07-29 07:22:11,509 INFO [train.py:1114] (2/4) Epoch 19, batch 7700, loss[loss=0.1916, simple_loss=0.2816, pruned_loss=0.05087, over 4701.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2623, pruned_loss=0.0399, over 934449.73 frames. ], batch size: 13, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:22:12,353 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=255602.66666666666, ans=0.2 +2024-07-29 07:22:21,009 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.579e+01 5.495e+01 5.903e+01 6.797e+01 9.764e+01, threshold=1.181e+02, percent-clipped=0.0 +2024-07-29 07:22:24,525 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=255616.0, ans=0.0 +2024-07-29 07:22:27,421 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=255629.33333333334, ans=0.125 +2024-07-29 07:22:28,236 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.78 vs. limit=15.0 +2024-07-29 07:22:29,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=255629.33333333334, ans=0.125 +2024-07-29 07:22:33,933 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255642.66666666666, ans=0.1 +2024-07-29 07:22:46,048 INFO [train.py:1114] (2/4) Epoch 19, batch 7750, loss[loss=0.1699, simple_loss=0.2585, pruned_loss=0.04069, over 4925.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2636, pruned_loss=0.03991, over 936208.11 frames. ], batch size: 14, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:22:59,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=255682.66666666666, ans=0.0 +2024-07-29 07:23:08,828 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.86 vs. limit=15.0 +2024-07-29 07:23:11,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=255709.33333333334, ans=0.0 +2024-07-29 07:23:13,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=255709.33333333334, ans=0.07 +2024-07-29 07:23:18,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=255722.66666666666, ans=0.125 +2024-07-29 07:23:41,576 INFO [train.py:1114] (2/4) Epoch 19, batch 7800, loss[loss=0.1725, simple_loss=0.2736, pruned_loss=0.03569, over 4661.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2644, pruned_loss=0.04024, over 937705.89 frames. ], batch size: 14, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:23:49,374 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.86 vs. limit=15.0 +2024-07-29 07:23:49,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=255736.0, ans=0.125 +2024-07-29 07:23:49,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=255736.0, ans=0.0 +2024-07-29 07:23:50,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=255736.0, ans=0.125 +2024-07-29 07:23:57,877 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.016e+01 5.806e+01 6.397e+01 7.223e+01 9.492e+01, threshold=1.279e+02, percent-clipped=0.0 +2024-07-29 07:23:59,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255749.33333333334, ans=0.1 +2024-07-29 07:24:00,036 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=255749.33333333334, ans=0.0 +2024-07-29 07:24:16,055 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=255789.33333333334, ans=15.0 +2024-07-29 07:24:16,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=255789.33333333334, ans=0.2 +2024-07-29 07:24:17,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=255789.33333333334, ans=0.0 +2024-07-29 07:24:20,825 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.64 vs. limit=10.0 +2024-07-29 07:24:21,848 INFO [train.py:1114] (2/4) Epoch 19, batch 7850, loss[loss=0.1507, simple_loss=0.2361, pruned_loss=0.03261, over 4499.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2641, pruned_loss=0.04017, over 936725.08 frames. ], batch size: 10, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:24:26,793 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=255802.66666666666, ans=0.0 +2024-07-29 07:24:34,534 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.86 vs. limit=15.0 +2024-07-29 07:24:39,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=255816.0, ans=0.5 +2024-07-29 07:24:43,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=255829.33333333334, ans=0.125 +2024-07-29 07:24:58,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=255856.0, ans=0.125 +2024-07-29 07:24:59,390 INFO [train.py:1114] (2/4) Epoch 19, batch 7900, loss[loss=0.1885, simple_loss=0.2835, pruned_loss=0.04674, over 4874.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2651, pruned_loss=0.04035, over 933557.10 frames. ], batch size: 14, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:25:19,920 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.535e+01 5.757e+01 6.184e+01 6.980e+01 1.069e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-29 07:25:22,980 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.04 vs. limit=15.0 +2024-07-29 07:25:25,812 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.02 vs. limit=15.0 +2024-07-29 07:25:26,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=255896.0, ans=0.125 +2024-07-29 07:25:26,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=255896.0, ans=0.125 +2024-07-29 07:26:55,415 INFO [train.py:1114] (2/4) Epoch 19, batch 7950, loss[loss=0.2089, simple_loss=0.2968, pruned_loss=0.06047, over 3371.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2635, pruned_loss=0.03954, over 935566.09 frames. ], batch size: 35, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:26:56,172 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=255936.0, ans=0.125 +2024-07-29 07:27:00,042 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=255936.0, ans=0.125 +2024-07-29 07:27:21,824 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255949.33333333334, ans=0.1 +2024-07-29 07:27:28,741 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=255962.66666666666, ans=0.125 +2024-07-29 07:27:40,889 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.21 vs. limit=22.5 +2024-07-29 07:27:44,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=255989.33333333334, ans=0.125 +2024-07-29 07:27:50,622 INFO [train.py:1114] (2/4) Epoch 19, batch 8000, loss[loss=0.1536, simple_loss=0.2373, pruned_loss=0.03489, over 4613.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2623, pruned_loss=0.03961, over 934312.64 frames. ], batch size: 11, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:27:52,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=256002.66666666666, ans=0.125 +2024-07-29 07:27:52,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=256002.66666666666, ans=0.2 +2024-07-29 07:27:55,928 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=256002.66666666666, ans=0.1 +2024-07-29 07:27:58,493 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=256016.0, ans=0.0 +2024-07-29 07:28:01,417 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.608e+01 5.673e+01 6.447e+01 7.571e+01 1.092e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-29 07:28:02,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=256016.0, ans=0.2 +2024-07-29 07:28:06,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=256029.33333333334, ans=0.125 +2024-07-29 07:28:14,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=256042.66666666666, ans=0.07 +2024-07-29 07:28:17,829 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=256056.0, ans=0.125 +2024-07-29 07:28:19,082 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=256056.0, ans=0.125 +2024-07-29 07:28:24,184 INFO [train.py:1114] (2/4) Epoch 19, batch 8050, loss[loss=0.1677, simple_loss=0.2666, pruned_loss=0.03441, over 4808.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2632, pruned_loss=0.03968, over 934250.40 frames. ], batch size: 14, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:28:26,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256069.33333333334, ans=0.1 +2024-07-29 07:28:26,947 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=256069.33333333334, ans=0.025 +2024-07-29 07:28:36,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=256082.66666666666, ans=0.2 +2024-07-29 07:28:37,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=256096.0, ans=0.1 +2024-07-29 07:28:38,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=256096.0, ans=0.125 +2024-07-29 07:28:49,435 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-29 07:28:55,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=256122.66666666666, ans=0.0 +2024-07-29 07:28:56,869 INFO [train.py:1114] (2/4) Epoch 19, batch 8100, loss[loss=0.1832, simple_loss=0.2755, pruned_loss=0.04544, over 4795.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2637, pruned_loss=0.04012, over 934295.26 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:29:01,059 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.05 vs. limit=22.5 +2024-07-29 07:29:01,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=256136.0, ans=0.1 +2024-07-29 07:29:06,409 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.781e+01 6.315e+01 7.245e+01 1.091e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 07:29:08,114 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.79 vs. limit=10.0 +2024-07-29 07:29:17,648 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.37 vs. limit=12.0 +2024-07-29 07:29:29,462 INFO [train.py:1114] (2/4) Epoch 19, batch 8150, loss[loss=0.1734, simple_loss=0.2712, pruned_loss=0.03781, over 4810.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.263, pruned_loss=0.04002, over 937608.04 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:29:32,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=256202.66666666666, ans=0.125 +2024-07-29 07:29:40,381 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.72 vs. limit=15.0 +2024-07-29 07:29:45,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=256229.33333333334, ans=0.1 +2024-07-29 07:29:46,622 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:29:46,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=256229.33333333334, ans=0.2 +2024-07-29 07:29:52,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=256242.66666666666, ans=0.125 +2024-07-29 07:29:55,138 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=256242.66666666666, ans=0.0 +2024-07-29 07:29:55,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=256242.66666666666, ans=0.125 +2024-07-29 07:30:00,054 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.76 vs. limit=15.0 +2024-07-29 07:30:03,065 INFO [train.py:1114] (2/4) Epoch 19, batch 8200, loss[loss=0.1606, simple_loss=0.2521, pruned_loss=0.03453, over 4797.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2629, pruned_loss=0.03973, over 938481.55 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:30:06,669 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.14 vs. limit=12.0 +2024-07-29 07:30:10,151 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=256282.66666666666, ans=0.0 +2024-07-29 07:30:12,538 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.925e+01 5.702e+01 6.206e+01 7.193e+01 9.525e+01, threshold=1.241e+02, percent-clipped=0.0 +2024-07-29 07:30:23,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=256309.33333333334, ans=0.125 +2024-07-29 07:30:32,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=256322.66666666666, ans=0.2 +2024-07-29 07:30:34,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=256336.0, ans=0.125 +2024-07-29 07:30:35,088 INFO [train.py:1114] (2/4) Epoch 19, batch 8250, loss[loss=0.1736, simple_loss=0.2583, pruned_loss=0.04449, over 4898.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2628, pruned_loss=0.04002, over 938728.57 frames. ], batch size: 13, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:30:52,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=256362.66666666666, ans=0.0 +2024-07-29 07:31:07,566 INFO [train.py:1114] (2/4) Epoch 19, batch 8300, loss[loss=0.1826, simple_loss=0.2697, pruned_loss=0.04781, over 4891.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2641, pruned_loss=0.04054, over 938386.30 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:31:16,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=256416.0, ans=0.0 +2024-07-29 07:31:17,051 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.581e+01 6.136e+01 6.669e+01 1.025e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 07:31:25,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256429.33333333334, ans=0.1 +2024-07-29 07:31:34,499 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=256456.0, ans=0.0 +2024-07-29 07:31:39,839 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=256456.0, ans=0.125 +2024-07-29 07:31:41,645 INFO [train.py:1114] (2/4) Epoch 19, batch 8350, loss[loss=0.1697, simple_loss=0.2618, pruned_loss=0.03883, over 4790.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2634, pruned_loss=0.04022, over 941275.34 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:31:52,180 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:31:52,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=256482.66666666666, ans=0.2 +2024-07-29 07:32:00,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=256496.0, ans=0.125 +2024-07-29 07:32:14,740 INFO [train.py:1114] (2/4) Epoch 19, batch 8400, loss[loss=0.1777, simple_loss=0.26, pruned_loss=0.04767, over 4784.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2645, pruned_loss=0.0412, over 940326.03 frames. ], batch size: 12, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:32:22,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=256549.33333333334, ans=0.125 +2024-07-29 07:32:24,409 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.693e+01 5.636e+01 6.331e+01 6.924e+01 1.027e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-29 07:32:34,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=256562.66666666666, ans=0.5 +2024-07-29 07:32:43,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=256589.33333333334, ans=0.125 +2024-07-29 07:32:49,193 INFO [train.py:1114] (2/4) Epoch 19, batch 8450, loss[loss=0.1854, simple_loss=0.2786, pruned_loss=0.04615, over 4808.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2659, pruned_loss=0.04136, over 939105.17 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:33:03,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=256616.0, ans=0.2 +2024-07-29 07:33:08,114 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=256629.33333333334, ans=0.1 +2024-07-29 07:33:23,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=256656.0, ans=0.025 +2024-07-29 07:33:28,014 INFO [train.py:1114] (2/4) Epoch 19, batch 8500, loss[loss=0.1575, simple_loss=0.2352, pruned_loss=0.03993, over 4606.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2635, pruned_loss=0.04048, over 939107.84 frames. ], batch size: 11, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:33:31,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=256669.33333333334, ans=0.0 +2024-07-29 07:33:35,777 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=256682.66666666666, ans=0.125 +2024-07-29 07:33:37,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=256682.66666666666, ans=0.0 +2024-07-29 07:33:37,647 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.699e+01 6.220e+01 6.936e+01 1.043e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-29 07:33:38,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=256682.66666666666, ans=0.125 +2024-07-29 07:33:45,179 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=256696.0, ans=0.0 +2024-07-29 07:33:46,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256696.0, ans=0.1 +2024-07-29 07:34:03,202 INFO [train.py:1114] (2/4) Epoch 19, batch 8550, loss[loss=0.1549, simple_loss=0.242, pruned_loss=0.03392, over 4813.00 frames. ], tot_loss[loss=0.172, simple_loss=0.263, pruned_loss=0.04049, over 940155.79 frames. ], batch size: 11, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:34:09,202 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=256749.33333333334, ans=0.125 +2024-07-29 07:34:28,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=256762.66666666666, ans=0.025 +2024-07-29 07:34:29,270 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=256762.66666666666, ans=0.125 +2024-07-29 07:34:33,978 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=256776.0, ans=0.125 +2024-07-29 07:34:52,555 INFO [train.py:1114] (2/4) Epoch 19, batch 8600, loss[loss=0.1701, simple_loss=0.2673, pruned_loss=0.03645, over 4808.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2634, pruned_loss=0.04057, over 939736.04 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:34:52,746 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=256802.66666666666, ans=0.125 +2024-07-29 07:35:04,896 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.663e+01 5.627e+01 6.563e+01 7.545e+01 1.202e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-29 07:35:12,811 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-07-29 07:35:14,050 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.51 vs. limit=12.0 +2024-07-29 07:35:16,779 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.27 vs. limit=15.0 +2024-07-29 07:35:17,508 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=256842.66666666666, ans=0.125 +2024-07-29 07:35:18,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=256842.66666666666, ans=0.125 +2024-07-29 07:35:26,780 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=256856.0, ans=0.125 +2024-07-29 07:36:04,096 INFO [train.py:1114] (2/4) Epoch 19, batch 8650, loss[loss=0.1799, simple_loss=0.2773, pruned_loss=0.04126, over 4906.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2636, pruned_loss=0.04087, over 941217.59 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:36:09,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=256869.33333333334, ans=0.07 +2024-07-29 07:36:16,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=256882.66666666666, ans=0.0 +2024-07-29 07:36:36,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=256922.66666666666, ans=0.0 +2024-07-29 07:36:43,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=256922.66666666666, ans=0.2 +2024-07-29 07:36:44,215 INFO [train.py:1114] (2/4) Epoch 19, batch 8700, loss[loss=0.183, simple_loss=0.269, pruned_loss=0.04852, over 4762.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2645, pruned_loss=0.04113, over 938722.86 frames. ], batch size: 13, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:36:45,314 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.43 vs. limit=15.0 +2024-07-29 07:36:45,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=256936.0, ans=0.0 +2024-07-29 07:36:53,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=256949.33333333334, ans=0.0 +2024-07-29 07:36:53,801 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.837e+01 5.735e+01 6.299e+01 7.253e+01 1.043e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 07:37:00,664 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.72 vs. limit=10.0 +2024-07-29 07:37:01,330 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.21 vs. limit=15.0 +2024-07-29 07:37:01,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256962.66666666666, ans=0.1 +2024-07-29 07:37:01,943 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.91 vs. limit=22.5 +2024-07-29 07:37:16,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=256976.0, ans=0.2 +2024-07-29 07:37:18,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=256989.33333333334, ans=0.1 +2024-07-29 07:37:23,598 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.44 vs. limit=15.0 +2024-07-29 07:37:27,838 INFO [train.py:1114] (2/4) Epoch 19, batch 8750, loss[loss=0.1709, simple_loss=0.2688, pruned_loss=0.03652, over 4682.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2636, pruned_loss=0.04085, over 936889.10 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:37:46,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=257002.66666666666, ans=0.125 +2024-07-29 07:37:48,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257016.0, ans=0.1 +2024-07-29 07:37:49,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=257016.0, ans=0.2 +2024-07-29 07:37:52,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff3.min_abs, batch_count=257016.0, ans=0.2 +2024-07-29 07:38:02,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=257042.66666666666, ans=0.125 +2024-07-29 07:38:06,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=257042.66666666666, ans=0.125 +2024-07-29 07:38:08,020 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=257056.0, ans=0.125 +2024-07-29 07:38:15,035 INFO [train.py:1114] (2/4) Epoch 19, batch 8800, loss[loss=0.1734, simple_loss=0.2707, pruned_loss=0.03803, over 4936.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2644, pruned_loss=0.04077, over 937993.78 frames. ], batch size: 14, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:38:21,570 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257069.33333333334, ans=0.1 +2024-07-29 07:38:24,424 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.10 vs. limit=6.0 +2024-07-29 07:38:38,090 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.89 vs. limit=5.0 +2024-07-29 07:38:38,830 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.487e+01 5.702e+01 6.437e+01 7.118e+01 1.132e+02, threshold=1.287e+02, percent-clipped=0.0 +2024-07-29 07:38:44,044 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=257082.66666666666, ans=0.0 +2024-07-29 07:38:56,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=257109.33333333334, ans=0.125 +2024-07-29 07:39:08,182 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-07-29 07:39:17,575 INFO [train.py:1114] (2/4) Epoch 19, batch 8850, loss[loss=0.1834, simple_loss=0.2824, pruned_loss=0.04226, over 4519.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2637, pruned_loss=0.0408, over 932981.33 frames. ], batch size: 21, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:39:21,849 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.38 vs. limit=22.5 +2024-07-29 07:39:30,724 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=257149.33333333334, ans=0.125 +2024-07-29 07:39:43,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=257176.0, ans=0.125 +2024-07-29 07:39:48,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=257189.33333333334, ans=0.125 +2024-07-29 07:39:52,640 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=257189.33333333334, ans=0.125 +2024-07-29 07:39:54,420 INFO [train.py:1114] (2/4) Epoch 19, batch 8900, loss[loss=0.1352, simple_loss=0.2184, pruned_loss=0.02606, over 4932.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2629, pruned_loss=0.04031, over 930680.30 frames. ], batch size: 12, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:40:01,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=257202.66666666666, ans=0.125 +2024-07-29 07:40:02,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=257202.66666666666, ans=0.125 +2024-07-29 07:40:19,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=257216.0, ans=0.0 +2024-07-29 07:40:20,223 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.370e+01 5.736e+01 6.296e+01 7.033e+01 9.064e+01, threshold=1.259e+02, percent-clipped=0.0 +2024-07-29 07:42:50,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=257216.0, ans=0.125 +2024-07-29 07:43:04,131 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.55 vs. limit=15.0 +2024-07-29 07:43:04,878 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.09 vs. limit=15.0 +2024-07-29 07:43:14,244 INFO [train.py:1114] (2/4) Epoch 19, batch 8950, loss[loss=0.1696, simple_loss=0.2631, pruned_loss=0.03804, over 4479.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2626, pruned_loss=0.04008, over 931325.21 frames. ], batch size: 21, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:43:21,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=257282.66666666666, ans=0.125 +2024-07-29 07:43:23,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=257282.66666666666, ans=0.125 +2024-07-29 07:43:23,851 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.82 vs. limit=12.0 +2024-07-29 07:43:24,424 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.83 vs. limit=15.0 +2024-07-29 07:43:32,099 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.33 vs. limit=15.0 +2024-07-29 07:43:35,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=257309.33333333334, ans=0.025 +2024-07-29 07:43:38,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=257309.33333333334, ans=0.0 +2024-07-29 07:43:47,545 INFO [train.py:1114] (2/4) Epoch 19, batch 9000, loss[loss=0.1833, simple_loss=0.2762, pruned_loss=0.04521, over 4642.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2617, pruned_loss=0.03993, over 934096.25 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:43:47,546 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 07:43:52,944 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.5750, 3.8331, 3.8130, 4.3599], device='cuda:2') +2024-07-29 07:43:57,582 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.3682, 3.3506, 2.1871, 3.5234, 3.1124, 3.2228, 3.7997, 3.6799], + device='cuda:2') +2024-07-29 07:43:59,127 INFO [train.py:1146] (2/4) Epoch 19, validation: loss=0.1612, simple_loss=0.2635, pruned_loss=0.02943, over 944034.00 frames. +2024-07-29 07:43:59,127 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 07:44:01,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=257336.0, ans=0.025 +2024-07-29 07:44:03,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=257336.0, ans=0.125 +2024-07-29 07:44:04,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=257336.0, ans=0.125 +2024-07-29 07:44:07,178 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=15.0 +2024-07-29 07:44:08,783 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.731e+01 5.623e+01 6.391e+01 7.404e+01 1.117e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-29 07:44:10,359 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=257349.33333333334, ans=0.1 +2024-07-29 07:44:13,752 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=257362.66666666666, ans=0.125 +2024-07-29 07:44:27,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=257389.33333333334, ans=0.125 +2024-07-29 07:44:31,557 INFO [train.py:1114] (2/4) Epoch 19, batch 9050, loss[loss=0.1641, simple_loss=0.2434, pruned_loss=0.04238, over 4504.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2617, pruned_loss=0.04006, over 934919.95 frames. ], batch size: 10, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:44:32,265 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257402.66666666666, ans=0.1 +2024-07-29 07:44:33,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=257402.66666666666, ans=0.0 +2024-07-29 07:44:36,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=257402.66666666666, ans=0.1 +2024-07-29 07:44:37,494 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257416.0, ans=0.1 +2024-07-29 07:44:37,647 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.14 vs. limit=15.0 +2024-07-29 07:44:44,347 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.56 vs. limit=15.0 +2024-07-29 07:44:48,861 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.18 vs. limit=15.0 +2024-07-29 07:44:49,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=257429.33333333334, ans=0.125 +2024-07-29 07:44:58,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=257442.66666666666, ans=0.0 +2024-07-29 07:45:04,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=257456.0, ans=0.025 +2024-07-29 07:45:10,210 INFO [train.py:1114] (2/4) Epoch 19, batch 9100, loss[loss=0.1662, simple_loss=0.2675, pruned_loss=0.03241, over 4934.00 frames. ], tot_loss[loss=0.171, simple_loss=0.262, pruned_loss=0.04005, over 937263.79 frames. ], batch size: 14, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:45:11,287 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.28 vs. limit=15.0 +2024-07-29 07:45:18,969 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=257482.66666666666, ans=0.125 +2024-07-29 07:45:19,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=257482.66666666666, ans=0.125 +2024-07-29 07:45:26,410 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.674e+01 6.326e+01 7.504e+01 9.644e+01, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 07:45:28,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=257482.66666666666, ans=0.0 +2024-07-29 07:45:41,788 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=257509.33333333334, ans=0.0 +2024-07-29 07:45:57,442 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=257536.0, ans=0.125 +2024-07-29 07:45:57,962 INFO [train.py:1114] (2/4) Epoch 19, batch 9150, loss[loss=0.1916, simple_loss=0.2817, pruned_loss=0.05072, over 4813.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2632, pruned_loss=0.04051, over 935506.59 frames. ], batch size: 14, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:46:03,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=257536.0, ans=0.125 +2024-07-29 07:46:22,841 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-29 07:46:34,021 INFO [train.py:1114] (2/4) Epoch 19, batch 9200, loss[loss=0.1706, simple_loss=0.2538, pruned_loss=0.04368, over 4854.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2638, pruned_loss=0.04091, over 937367.33 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:46:36,124 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=257602.66666666666, ans=0.0 +2024-07-29 07:46:37,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=257602.66666666666, ans=0.0 +2024-07-29 07:46:42,428 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.33 vs. limit=22.5 +2024-07-29 07:46:42,503 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.85 vs. limit=22.5 +2024-07-29 07:46:43,421 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.603e+01 5.777e+01 6.391e+01 7.233e+01 9.749e+01, threshold=1.278e+02, percent-clipped=0.0 +2024-07-29 07:46:43,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=257616.0, ans=0.1 +2024-07-29 07:46:45,095 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.32 vs. limit=6.0 +2024-07-29 07:46:48,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten.whitening_limit, batch_count=257629.33333333334, ans=22.5 +2024-07-29 07:46:50,991 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=257629.33333333334, ans=0.125 +2024-07-29 07:46:52,790 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=257642.66666666666, ans=0.0 +2024-07-29 07:46:53,448 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=257642.66666666666, ans=0.09899494936611666 +2024-07-29 07:47:05,829 INFO [train.py:1114] (2/4) Epoch 19, batch 9250, loss[loss=0.2034, simple_loss=0.2955, pruned_loss=0.05566, over 4627.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2634, pruned_loss=0.04065, over 938134.48 frames. ], batch size: 13, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:47:07,220 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=257669.33333333334, ans=0.0 +2024-07-29 07:47:08,766 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.68 vs. limit=15.0 +2024-07-29 07:47:16,068 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=257682.66666666666, ans=0.0 +2024-07-29 07:47:19,765 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.57 vs. limit=6.0 +2024-07-29 07:47:24,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=257696.0, ans=0.07 +2024-07-29 07:47:28,942 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=257709.33333333334, ans=0.125 +2024-07-29 07:47:38,237 INFO [train.py:1114] (2/4) Epoch 19, batch 9300, loss[loss=0.1474, simple_loss=0.2439, pruned_loss=0.02547, over 4766.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2643, pruned_loss=0.04153, over 937467.12 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:47:38,982 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=257736.0, ans=0.125 +2024-07-29 07:47:47,630 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.607e+01 5.571e+01 6.148e+01 7.388e+01 1.007e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 07:47:56,626 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=257762.66666666666, ans=0.05 +2024-07-29 07:47:58,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=257776.0, ans=0.0 +2024-07-29 07:47:59,657 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:48:12,960 INFO [train.py:1114] (2/4) Epoch 19, batch 9350, loss[loss=0.137, simple_loss=0.2251, pruned_loss=0.02447, over 4812.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2638, pruned_loss=0.04111, over 934476.81 frames. ], batch size: 11, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:48:18,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=257802.66666666666, ans=0.0 +2024-07-29 07:48:18,778 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=257816.0, ans=0.0 +2024-07-29 07:48:19,963 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:48:22,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=257816.0, ans=0.0 +2024-07-29 07:48:26,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=257829.33333333334, ans=0.125 +2024-07-29 07:48:30,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=257829.33333333334, ans=0.2 +2024-07-29 07:48:41,572 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=257856.0, ans=0.125 +2024-07-29 07:48:44,716 INFO [train.py:1114] (2/4) Epoch 19, batch 9400, loss[loss=0.1735, simple_loss=0.2715, pruned_loss=0.03776, over 4687.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2636, pruned_loss=0.04134, over 932581.35 frames. ], batch size: 13, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:48:48,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=257869.33333333334, ans=0.125 +2024-07-29 07:48:54,088 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 5.728e+01 6.199e+01 7.519e+01 1.174e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-29 07:48:59,333 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=257896.0, ans=0.0 +2024-07-29 07:49:01,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=257896.0, ans=0.04949747468305833 +2024-07-29 07:49:04,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=257909.33333333334, ans=0.125 +2024-07-29 07:49:09,864 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=257922.66666666666, ans=0.125 +2024-07-29 07:49:10,789 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.78 vs. limit=15.0 +2024-07-29 07:49:16,041 INFO [train.py:1114] (2/4) Epoch 19, batch 9450, loss[loss=0.1374, simple_loss=0.2213, pruned_loss=0.02672, over 4813.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2638, pruned_loss=0.04123, over 931772.75 frames. ], batch size: 11, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:49:41,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=257962.66666666666, ans=0.025 +2024-07-29 07:49:46,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=257976.0, ans=0.125 +2024-07-29 07:49:55,440 INFO [train.py:1114] (2/4) Epoch 19, batch 9500, loss[loss=0.1564, simple_loss=0.2539, pruned_loss=0.02946, over 4703.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2632, pruned_loss=0.04066, over 933804.92 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:50:02,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258016.0, ans=0.1 +2024-07-29 07:50:04,906 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.577e+01 5.446e+01 5.959e+01 6.735e+01 9.596e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-29 07:50:07,594 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=258029.33333333334, ans=0.0 +2024-07-29 07:50:08,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258029.33333333334, ans=0.1 +2024-07-29 07:50:28,084 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.76 vs. limit=15.0 +2024-07-29 07:50:29,000 INFO [train.py:1114] (2/4) Epoch 19, batch 9550, loss[loss=0.1514, simple_loss=0.2459, pruned_loss=0.02843, over 4772.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2635, pruned_loss=0.04044, over 931688.73 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:51:01,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258109.33333333334, ans=0.1 +2024-07-29 07:51:02,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=258122.66666666666, ans=0.125 +2024-07-29 07:51:12,514 INFO [train.py:1114] (2/4) Epoch 19, batch 9600, loss[loss=0.2242, simple_loss=0.3074, pruned_loss=0.0705, over 3628.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2639, pruned_loss=0.04021, over 930853.48 frames. ], batch size: 35, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:51:13,480 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.08 vs. limit=6.0 +2024-07-29 07:51:13,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=258136.0, ans=0.0 +2024-07-29 07:51:21,992 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.779e+01 5.937e+01 6.386e+01 7.744e+01 1.025e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 07:51:22,177 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=258149.33333333334, ans=0.2 +2024-07-29 07:51:29,537 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258162.66666666666, ans=0.1 +2024-07-29 07:51:33,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=258176.0, ans=0.125 +2024-07-29 07:51:38,769 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.54 vs. limit=22.5 +2024-07-29 07:51:41,856 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=258189.33333333334, ans=0.2 +2024-07-29 07:51:44,393 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.85 vs. limit=12.0 +2024-07-29 07:51:46,148 INFO [train.py:1114] (2/4) Epoch 19, batch 9650, loss[loss=0.1694, simple_loss=0.2684, pruned_loss=0.03524, over 4835.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2643, pruned_loss=0.04061, over 927101.46 frames. ], batch size: 16, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:51:46,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258202.66666666666, ans=0.1 +2024-07-29 07:51:53,653 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=258216.0, ans=0.125 +2024-07-29 07:51:54,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=258216.0, ans=0.125 +2024-07-29 07:51:56,825 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=258216.0, ans=0.125 +2024-07-29 07:51:57,802 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=15.0 +2024-07-29 07:52:07,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=258242.66666666666, ans=0.04949747468305833 +2024-07-29 07:52:09,987 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.49 vs. limit=6.0 +2024-07-29 07:52:13,479 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=258256.0, ans=0.125 +2024-07-29 07:52:17,722 INFO [train.py:1114] (2/4) Epoch 19, batch 9700, loss[loss=0.1645, simple_loss=0.2511, pruned_loss=0.03894, over 4148.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2644, pruned_loss=0.04066, over 924976.09 frames. ], batch size: 25, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:52:26,895 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.593e+01 5.780e+01 6.621e+01 7.551e+01 1.114e+02, threshold=1.324e+02, percent-clipped=0.0 +2024-07-29 07:52:36,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=258309.33333333334, ans=0.0 +2024-07-29 07:52:48,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258322.66666666666, ans=0.1 +2024-07-29 07:52:52,000 INFO [train.py:1114] (2/4) Epoch 19, batch 9750, loss[loss=0.2218, simple_loss=0.3117, pruned_loss=0.06592, over 4681.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.264, pruned_loss=0.04036, over 925477.08 frames. ], batch size: 15, lr: 3.87e-03, grad_scale: 64.0 +2024-07-29 07:52:56,884 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=258336.0, ans=0.125 +2024-07-29 07:53:08,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258362.66666666666, ans=0.1 +2024-07-29 07:53:08,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=258362.66666666666, ans=0.025 +2024-07-29 07:53:16,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=258376.0, ans=0.125 +2024-07-29 07:53:57,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=258376.0, ans=0.5 +2024-07-29 07:53:58,855 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.06 vs. limit=15.0 +2024-07-29 07:54:03,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=258389.33333333334, ans=0.125 +2024-07-29 07:54:17,488 INFO [train.py:1114] (2/4) Epoch 19, batch 9800, loss[loss=0.1388, simple_loss=0.2329, pruned_loss=0.02239, over 4709.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2628, pruned_loss=0.04027, over 925545.40 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 64.0 +2024-07-29 07:54:27,266 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.539e+01 5.598e+01 6.395e+01 7.278e+01 1.117e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-29 07:54:27,414 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=258416.0, ans=0.125 +2024-07-29 07:54:28,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.whiten.whitening_limit, batch_count=258416.0, ans=12.0 +2024-07-29 07:54:30,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=258429.33333333334, ans=0.0 +2024-07-29 07:54:34,104 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.31 vs. limit=15.0 +2024-07-29 07:54:39,795 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.24 vs. limit=15.0 +2024-07-29 07:54:41,310 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=258442.66666666666, ans=0.1 +2024-07-29 07:54:44,452 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=258456.0, ans=0.95 +2024-07-29 07:54:49,167 INFO [train.py:1114] (2/4) Epoch 19, batch 9850, loss[loss=0.1697, simple_loss=0.2604, pruned_loss=0.03951, over 4891.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2629, pruned_loss=0.04032, over 927472.65 frames. ], batch size: 15, lr: 3.87e-03, grad_scale: 64.0 +2024-07-29 07:55:15,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=258482.66666666666, ans=0.125 +2024-07-29 07:55:16,072 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:55:16,727 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=258482.66666666666, ans=0.125 +2024-07-29 07:55:24,642 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.60 vs. limit=10.0 +2024-07-29 07:55:33,136 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=258522.66666666666, ans=0.2 +2024-07-29 07:55:40,080 INFO [train.py:1114] (2/4) Epoch 19, batch 9900, loss[loss=0.204, simple_loss=0.2872, pruned_loss=0.06038, over 4814.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.264, pruned_loss=0.04124, over 927021.93 frames. ], batch size: 16, lr: 3.87e-03, grad_scale: 64.0 +2024-07-29 07:55:40,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=258536.0, ans=0.125 +2024-07-29 07:55:49,458 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.672e+01 5.747e+01 6.549e+01 7.522e+01 9.931e+01, threshold=1.310e+02, percent-clipped=0.0 +2024-07-29 07:55:52,517 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.27 vs. limit=6.0 +2024-07-29 07:55:55,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=258562.66666666666, ans=0.125 +2024-07-29 07:55:59,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=258576.0, ans=0.2 +2024-07-29 07:56:02,391 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:56:05,588 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.47 vs. limit=15.0 +2024-07-29 07:56:08,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=258589.33333333334, ans=0.125 +2024-07-29 07:56:10,860 INFO [train.py:1114] (2/4) Epoch 19, batch 9950, loss[loss=0.1516, simple_loss=0.2385, pruned_loss=0.03233, over 4802.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2644, pruned_loss=0.04142, over 929476.94 frames. ], batch size: 11, lr: 3.86e-03, grad_scale: 64.0 +2024-07-29 07:56:25,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=258629.33333333334, ans=0.05 +2024-07-29 07:56:31,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=258642.66666666666, ans=0.2 +2024-07-29 07:56:33,595 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=258642.66666666666, ans=0.0 +2024-07-29 07:56:42,849 INFO [train.py:1114] (2/4) Epoch 19, batch 10000, loss[loss=0.1997, simple_loss=0.2995, pruned_loss=0.04996, over 4631.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2671, pruned_loss=0.04231, over 926851.04 frames. ], batch size: 16, lr: 3.86e-03, grad_scale: 64.0 +2024-07-29 07:56:44,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=258669.33333333334, ans=0.1 +2024-07-29 07:56:44,635 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.25 vs. limit=6.0 +2024-07-29 07:56:51,994 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.927e+01 5.763e+01 6.186e+01 6.988e+01 1.066e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-29 07:56:53,701 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.58 vs. limit=15.0 +2024-07-29 07:56:55,109 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=258696.0, ans=0.125 +2024-07-29 07:56:56,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=258696.0, ans=0.125 +2024-07-29 07:57:01,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=258709.33333333334, ans=0.2 +2024-07-29 07:57:12,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=258722.66666666666, ans=0.125 +2024-07-29 07:57:14,767 INFO [train.py:1114] (2/4) Epoch 19, batch 10050, loss[loss=0.2127, simple_loss=0.3016, pruned_loss=0.0619, over 3451.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2708, pruned_loss=0.04431, over 915131.75 frames. ], batch size: 35, lr: 3.86e-03, grad_scale: 64.0 +2024-07-29 07:57:18,186 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=258736.0, ans=0.125 +2024-07-29 07:57:19,021 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=258736.0, ans=0.0 +2024-07-29 07:57:19,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=258736.0, ans=0.125 +2024-07-29 07:57:27,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=258749.33333333334, ans=0.0 +2024-07-29 07:57:31,169 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.69 vs. limit=8.0 +2024-07-29 07:57:32,427 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=8.97 vs. limit=12.0 +2024-07-29 07:57:38,357 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=258776.0, ans=0.125 +2024-07-29 07:57:46,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=258789.33333333334, ans=0.125 +2024-07-29 07:57:46,489 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=10.98 vs. limit=15.0 +2024-07-29 07:57:48,578 INFO [train.py:1114] (2/4) Epoch 19, batch 10100, loss[loss=0.2008, simple_loss=0.2871, pruned_loss=0.05728, over 3277.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2747, pruned_loss=0.04817, over 863154.40 frames. ], batch size: 36, lr: 3.86e-03, grad_scale: 64.0 +2024-07-29 07:57:50,831 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=258802.66666666666, ans=0.0 +2024-07-29 07:57:56,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258816.0, ans=0.1 +2024-07-29 07:57:58,540 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.153e+01 6.634e+01 7.311e+01 7.897e+01 1.171e+02, threshold=1.462e+02, percent-clipped=0.0 +2024-07-29 07:58:11,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258842.66666666666, ans=0.1 +2024-07-29 07:58:13,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=258842.66666666666, ans=0.125 +2024-07-29 07:58:21,426 INFO [train.py:1114] (2/4) Epoch 19, batch 10150, loss[loss=0.2159, simple_loss=0.2945, pruned_loss=0.06861, over 3462.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2786, pruned_loss=0.05222, over 821817.80 frames. ], batch size: 35, lr: 3.86e-03, grad_scale: 32.0 +2024-07-29 07:58:36,938 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=258882.66666666666, ans=0.125 +2024-07-29 07:58:38,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=258882.66666666666, ans=0.0 +2024-07-29 07:58:52,329 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=258896.0, ans=0.0 +2024-07-29 07:59:04,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=258922.66666666666, ans=0.125 +2024-07-29 07:59:07,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=258922.66666666666, ans=0.125 +2024-07-29 07:59:07,755 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.04 vs. limit=15.0 +2024-07-29 07:59:09,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=258936.0, ans=0.05 +2024-07-29 07:59:09,558 INFO [train.py:1114] (2/4) Epoch 19, batch 10200, loss[loss=0.1814, simple_loss=0.2611, pruned_loss=0.05084, over 3392.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2805, pruned_loss=0.05465, over 789005.83 frames. ], batch size: 35, lr: 3.86e-03, grad_scale: 32.0 +2024-07-29 07:59:09,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=258936.0, ans=0.125 +2024-07-29 07:59:17,992 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=258949.33333333334, ans=0.0 +2024-07-29 07:59:19,707 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.022e+01 7.024e+01 7.484e+01 8.101e+01 1.029e+02, threshold=1.497e+02, percent-clipped=0.0 +2024-07-29 07:59:23,400 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=258962.66666666666, ans=0.0 +2024-07-29 08:02:27,573 INFO [train.py:1114] (2/4) Epoch 20, batch 0, loss[loss=0.1367, simple_loss=0.225, pruned_loss=0.02424, over 4851.00 frames. ], tot_loss[loss=0.1367, simple_loss=0.225, pruned_loss=0.02424, over 4851.00 frames. ], batch size: 12, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:02:27,573 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 08:02:31,680 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.0337, 3.9381, 3.5324, 3.6583], device='cuda:2') +2024-07-29 08:02:35,369 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.7306, 5.6631, 5.0702, 5.2785], device='cuda:2') +2024-07-29 08:02:40,769 INFO [train.py:1146] (2/4) Epoch 20, validation: loss=0.161, simple_loss=0.2644, pruned_loss=0.02883, over 944034.00 frames. +2024-07-29 08:02:40,769 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 08:03:02,386 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=259006.66666666666, ans=0.07 +2024-07-29 08:03:05,785 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=259006.66666666666, ans=0.125 +2024-07-29 08:03:08,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=259020.0, ans=0.125 +2024-07-29 08:03:10,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=259020.0, ans=0.05 +2024-07-29 08:03:14,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=259020.0, ans=0.125 +2024-07-29 08:03:17,471 INFO [train.py:1114] (2/4) Epoch 20, batch 50, loss[loss=0.1546, simple_loss=0.2409, pruned_loss=0.03418, over 4633.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2664, pruned_loss=0.04113, over 206195.04 frames. ], batch size: 11, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:03:19,095 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.66 vs. limit=15.0 +2024-07-29 08:03:21,130 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259033.33333333334, ans=0.1 +2024-07-29 08:03:23,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=259046.66666666666, ans=0.125 +2024-07-29 08:03:43,769 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=259073.33333333334, ans=0.0 +2024-07-29 08:03:46,125 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.05 vs. limit=15.0 +2024-07-29 08:03:46,178 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.78 vs. limit=6.0 +2024-07-29 08:03:47,273 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.549e+01 5.582e+01 6.158e+01 6.826e+01 9.280e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-29 08:03:51,423 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=259100.0, ans=0.125 +2024-07-29 08:03:51,950 INFO [train.py:1114] (2/4) Epoch 20, batch 100, loss[loss=0.1887, simple_loss=0.2741, pruned_loss=0.05166, over 4631.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.265, pruned_loss=0.0407, over 365379.89 frames. ], batch size: 12, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:03:57,574 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.78 vs. limit=15.0 +2024-07-29 08:04:00,610 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259113.33333333334, ans=0.1 +2024-07-29 08:04:12,772 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=259126.66666666666, ans=0.0 +2024-07-29 08:04:18,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=259140.0, ans=0.0 +2024-07-29 08:04:18,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=259140.0, ans=0.125 +2024-07-29 08:04:19,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=259140.0, ans=0.125 +2024-07-29 08:04:27,183 INFO [train.py:1114] (2/4) Epoch 20, batch 150, loss[loss=0.1532, simple_loss=0.2421, pruned_loss=0.0321, over 4614.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2625, pruned_loss=0.04013, over 494399.21 frames. ], batch size: 11, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:04:28,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=259166.66666666666, ans=0.125 +2024-07-29 08:04:40,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=259193.33333333334, ans=0.0 +2024-07-29 08:04:57,103 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.593e+01 6.135e+01 6.886e+01 1.305e+02, threshold=1.227e+02, percent-clipped=1.0 +2024-07-29 08:04:57,382 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:05:01,697 INFO [train.py:1114] (2/4) Epoch 20, batch 200, loss[loss=0.1843, simple_loss=0.2831, pruned_loss=0.04281, over 4425.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2624, pruned_loss=0.04017, over 593683.10 frames. ], batch size: 21, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:05:13,022 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=7.95 vs. limit=15.0 +2024-07-29 08:05:29,686 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=259273.33333333334, ans=0.125 +2024-07-29 08:05:31,078 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=259273.33333333334, ans=0.125 +2024-07-29 08:05:31,085 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=259273.33333333334, ans=10.0 +2024-07-29 08:05:32,281 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=17.43 vs. limit=22.5 +2024-07-29 08:05:38,405 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=259273.33333333334, ans=0.125 +2024-07-29 08:05:40,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=259273.33333333334, ans=0.125 +2024-07-29 08:05:50,886 INFO [train.py:1114] (2/4) Epoch 20, batch 250, loss[loss=0.1764, simple_loss=0.2651, pruned_loss=0.04386, over 4619.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2629, pruned_loss=0.03999, over 670669.77 frames. ], batch size: 16, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:05:59,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259313.33333333334, ans=0.1 +2024-07-29 08:06:05,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=259326.66666666666, ans=0.025 +2024-07-29 08:06:52,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=259326.66666666666, ans=0.125 +2024-07-29 08:06:55,288 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=259326.66666666666, ans=0.0 +2024-07-29 08:07:09,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=259326.66666666666, ans=0.0 +2024-07-29 08:07:13,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259340.0, ans=0.1 +2024-07-29 08:07:18,139 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.76 vs. limit=12.0 +2024-07-29 08:07:30,207 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.296e+01 5.734e+01 6.099e+01 7.044e+01 1.100e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 08:07:35,942 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.42 vs. limit=15.0 +2024-07-29 08:07:36,795 INFO [train.py:1114] (2/4) Epoch 20, batch 300, loss[loss=0.2029, simple_loss=0.2918, pruned_loss=0.05701, over 4801.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2625, pruned_loss=0.03963, over 730536.59 frames. ], batch size: 15, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:07:39,285 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.37 vs. limit=6.0 +2024-07-29 08:07:47,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259380.0, ans=0.1 +2024-07-29 08:07:48,778 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.39 vs. limit=15.0 +2024-07-29 08:08:14,326 INFO [train.py:1114] (2/4) Epoch 20, batch 350, loss[loss=0.16, simple_loss=0.2525, pruned_loss=0.03377, over 4937.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2631, pruned_loss=0.03994, over 776641.86 frames. ], batch size: 12, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:08:37,555 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=259473.33333333334, ans=0.125 +2024-07-29 08:08:45,775 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=259486.66666666666, ans=0.2 +2024-07-29 08:08:47,024 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.322e+01 5.507e+01 5.880e+01 6.811e+01 8.968e+01, threshold=1.176e+02, percent-clipped=0.0 +2024-07-29 08:08:51,050 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=259500.0, ans=0.125 +2024-07-29 08:08:51,658 INFO [train.py:1114] (2/4) Epoch 20, batch 400, loss[loss=0.1958, simple_loss=0.2923, pruned_loss=0.04963, over 4701.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2626, pruned_loss=0.03982, over 814424.68 frames. ], batch size: 13, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:08:52,030 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.17 vs. limit=15.0 +2024-07-29 08:09:00,317 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=259513.33333333334, ans=0.125 +2024-07-29 08:09:02,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=259513.33333333334, ans=0.0 +2024-07-29 08:09:11,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=259526.66666666666, ans=0.05 +2024-07-29 08:09:30,917 INFO [train.py:1114] (2/4) Epoch 20, batch 450, loss[loss=0.1866, simple_loss=0.2784, pruned_loss=0.04741, over 4636.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2642, pruned_loss=0.04053, over 839826.54 frames. ], batch size: 13, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:09:42,449 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=259580.0, ans=0.0 +2024-07-29 08:09:47,710 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.03 vs. limit=12.0 +2024-07-29 08:10:04,439 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=259620.0, ans=0.125 +2024-07-29 08:10:05,752 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.282e+01 5.641e+01 6.168e+01 6.736e+01 1.200e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-29 08:10:10,566 INFO [train.py:1114] (2/4) Epoch 20, batch 500, loss[loss=0.1714, simple_loss=0.2634, pruned_loss=0.03964, over 4689.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2631, pruned_loss=0.03987, over 861707.87 frames. ], batch size: 15, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:10:36,028 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:10:51,630 INFO [train.py:1114] (2/4) Epoch 20, batch 550, loss[loss=0.2118, simple_loss=0.2997, pruned_loss=0.06199, over 4643.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2624, pruned_loss=0.0396, over 877778.30 frames. ], batch size: 17, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:15:11,587 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=259740.0, ans=0.0 +2024-07-29 08:15:13,290 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.42 vs. limit=6.0 +2024-07-29 08:15:15,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=259740.0, ans=0.0 +2024-07-29 08:15:21,356 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 5.418e+01 6.036e+01 6.579e+01 9.144e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-29 08:15:29,582 INFO [train.py:1114] (2/4) Epoch 20, batch 600, loss[loss=0.1742, simple_loss=0.2554, pruned_loss=0.04654, over 4642.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2618, pruned_loss=0.03913, over 892067.44 frames. ], batch size: 16, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:15:31,755 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=259766.66666666666, ans=10.0 +2024-07-29 08:15:35,356 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.08 vs. limit=15.0 +2024-07-29 08:15:47,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=259793.33333333334, ans=0.125 +2024-07-29 08:15:56,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=259806.66666666666, ans=0.035 +2024-07-29 08:16:01,804 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=259820.0, ans=0.125 +2024-07-29 08:16:02,391 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=259820.0, ans=0.025 +2024-07-29 08:16:08,387 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.45 vs. limit=6.0 +2024-07-29 08:16:10,529 INFO [train.py:1114] (2/4) Epoch 20, batch 650, loss[loss=0.19, simple_loss=0.2841, pruned_loss=0.0479, over 4757.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2612, pruned_loss=0.0391, over 903770.87 frames. ], batch size: 13, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:16:13,559 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.26 vs. limit=15.0 +2024-07-29 08:18:22,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=259873.33333333334, ans=0.125 +2024-07-29 08:18:27,271 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.89 vs. limit=15.0 +2024-07-29 08:18:30,222 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.467e+01 5.565e+01 6.152e+01 6.795e+01 9.682e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 08:18:35,073 INFO [train.py:1114] (2/4) Epoch 20, batch 700, loss[loss=0.1308, simple_loss=0.2201, pruned_loss=0.02075, over 4637.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2619, pruned_loss=0.03973, over 911415.15 frames. ], batch size: 12, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:18:53,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259913.33333333334, ans=0.1 +2024-07-29 08:18:59,256 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=259926.66666666666, ans=0.1 +2024-07-29 08:19:04,521 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=259940.0, ans=0.1 +2024-07-29 08:19:08,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=259940.0, ans=0.025 +2024-07-29 08:19:10,434 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.46 vs. limit=15.0 +2024-07-29 08:19:19,004 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.44 vs. limit=10.0 +2024-07-29 08:19:19,933 INFO [train.py:1114] (2/4) Epoch 20, batch 750, loss[loss=0.1801, simple_loss=0.2789, pruned_loss=0.04063, over 4690.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2621, pruned_loss=0.03992, over 918223.73 frames. ], batch size: 13, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:19:23,477 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=259966.66666666666, ans=0.0 +2024-07-29 08:19:36,472 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.65 vs. limit=22.5 +2024-07-29 08:19:48,628 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=260020.0, ans=0.2 +2024-07-29 08:19:51,132 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.556e+01 5.579e+01 6.090e+01 6.934e+01 1.125e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-29 08:19:51,692 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.76 vs. limit=22.5 +2024-07-29 08:19:55,190 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=260033.33333333334, ans=0.025 +2024-07-29 08:19:55,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=260033.33333333334, ans=0.0 +2024-07-29 08:19:55,324 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=260033.33333333334, ans=0.0 +2024-07-29 08:19:55,774 INFO [train.py:1114] (2/4) Epoch 20, batch 800, loss[loss=0.1666, simple_loss=0.2489, pruned_loss=0.04218, over 4852.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.263, pruned_loss=0.04071, over 923182.02 frames. ], batch size: 12, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:20:07,453 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.59 vs. limit=15.0 +2024-07-29 08:20:11,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=260060.0, ans=0.0 +2024-07-29 08:20:29,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=260073.33333333334, ans=0.0 +2024-07-29 08:20:33,694 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=260073.33333333334, ans=0.125 +2024-07-29 08:20:37,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=260086.66666666666, ans=0.125 +2024-07-29 08:20:43,556 INFO [train.py:1114] (2/4) Epoch 20, batch 850, loss[loss=0.1853, simple_loss=0.2845, pruned_loss=0.04301, over 4680.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2636, pruned_loss=0.04079, over 927458.44 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:20:43,767 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=260100.0, ans=0.125 +2024-07-29 08:20:46,943 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260100.0, ans=0.1 +2024-07-29 08:20:50,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=260113.33333333334, ans=0.125 +2024-07-29 08:20:51,659 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=260113.33333333334, ans=0.125 +2024-07-29 08:20:57,692 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:20:58,669 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.64 vs. limit=10.0 +2024-07-29 08:21:01,005 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=260126.66666666666, ans=0.125 +2024-07-29 08:21:12,438 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.741e+01 5.604e+01 6.314e+01 7.197e+01 9.359e+01, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 08:21:16,678 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=260166.66666666666, ans=0.1 +2024-07-29 08:21:17,232 INFO [train.py:1114] (2/4) Epoch 20, batch 900, loss[loss=0.1891, simple_loss=0.2695, pruned_loss=0.0543, over 4849.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2646, pruned_loss=0.04114, over 928174.46 frames. ], batch size: 12, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:21:22,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=260166.66666666666, ans=0.2 +2024-07-29 08:21:39,509 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=260206.66666666666, ans=0.125 +2024-07-29 08:21:52,563 INFO [train.py:1114] (2/4) Epoch 20, batch 950, loss[loss=0.1544, simple_loss=0.2501, pruned_loss=0.02934, over 4767.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2645, pruned_loss=0.0406, over 929834.93 frames. ], batch size: 12, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:21:57,529 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.24 vs. limit=15.0 +2024-07-29 08:21:57,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=260233.33333333334, ans=0.2 +2024-07-29 08:23:15,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=260246.66666666666, ans=0.125 +2024-07-29 08:23:15,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=260246.66666666666, ans=0.0 +2024-07-29 08:23:53,901 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=260273.33333333334, ans=0.2 +2024-07-29 08:23:57,478 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=260273.33333333334, ans=0.0 +2024-07-29 08:23:58,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=260273.33333333334, ans=0.2 +2024-07-29 08:24:02,479 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.740e+01 6.532e+01 7.410e+01 9.580e+01, threshold=1.306e+02, percent-clipped=0.0 +2024-07-29 08:24:07,422 INFO [train.py:1114] (2/4) Epoch 20, batch 1000, loss[loss=0.1448, simple_loss=0.2417, pruned_loss=0.0239, over 4975.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2646, pruned_loss=0.04067, over 929775.77 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:24:10,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=260300.0, ans=0.035 +2024-07-29 08:24:11,665 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=260300.0, ans=0.125 +2024-07-29 08:24:40,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=260353.33333333334, ans=0.2 +2024-07-29 08:24:41,506 INFO [train.py:1114] (2/4) Epoch 20, batch 1050, loss[loss=0.1659, simple_loss=0.2593, pruned_loss=0.03627, over 4872.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.264, pruned_loss=0.04052, over 932081.60 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:24:45,180 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.86 vs. limit=6.0 +2024-07-29 08:24:49,648 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=260380.0, ans=0.0 +2024-07-29 08:24:57,580 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=260393.33333333334, ans=0.125 +2024-07-29 08:24:58,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=260393.33333333334, ans=0.125 +2024-07-29 08:24:59,701 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=260393.33333333334, ans=0.04949747468305833 +2024-07-29 08:25:05,592 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.00 vs. limit=22.5 +2024-07-29 08:25:11,124 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.41 vs. limit=22.5 +2024-07-29 08:25:12,298 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.430e+01 5.624e+01 6.219e+01 7.008e+01 1.029e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-29 08:25:17,055 INFO [train.py:1114] (2/4) Epoch 20, batch 1100, loss[loss=0.1424, simple_loss=0.2313, pruned_loss=0.0267, over 4894.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2634, pruned_loss=0.04028, over 934720.97 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:25:21,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=260433.33333333334, ans=0.05 +2024-07-29 08:25:38,728 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.28 vs. limit=15.0 +2024-07-29 08:25:52,062 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.42 vs. limit=15.0 +2024-07-29 08:25:52,985 INFO [train.py:1114] (2/4) Epoch 20, batch 1150, loss[loss=0.1631, simple_loss=0.2515, pruned_loss=0.03734, over 4903.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2631, pruned_loss=0.03978, over 934641.91 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:26:07,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=260526.66666666666, ans=0.125 +2024-07-29 08:26:14,635 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.23 vs. limit=22.5 +2024-07-29 08:26:15,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260540.0, ans=0.1 +2024-07-29 08:26:19,116 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=260540.0, ans=0.125 +2024-07-29 08:26:22,169 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.531e+01 5.714e+01 6.232e+01 6.999e+01 1.113e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-29 08:26:24,571 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=260553.33333333334, ans=0.0 +2024-07-29 08:26:26,992 INFO [train.py:1114] (2/4) Epoch 20, batch 1200, loss[loss=0.1565, simple_loss=0.2539, pruned_loss=0.02951, over 4866.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2641, pruned_loss=0.04022, over 933747.92 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:26:27,262 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=260566.66666666666, ans=0.0 +2024-07-29 08:26:30,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=260566.66666666666, ans=0.125 +2024-07-29 08:26:32,759 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-29 08:26:33,187 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=260580.0, ans=0.1 +2024-07-29 08:28:37,624 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=260593.33333333334, ans=0.125 +2024-07-29 08:28:46,451 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260606.66666666666, ans=0.1 +2024-07-29 08:28:48,482 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.98 vs. limit=22.5 +2024-07-29 08:28:59,363 INFO [train.py:1114] (2/4) Epoch 20, batch 1250, loss[loss=0.1674, simple_loss=0.2627, pruned_loss=0.03606, over 4802.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2638, pruned_loss=0.04001, over 937685.41 frames. ], batch size: 15, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:29:09,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260646.66666666666, ans=0.1 +2024-07-29 08:29:15,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=260660.0, ans=0.125 +2024-07-29 08:29:28,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=260686.66666666666, ans=0.1 +2024-07-29 08:29:29,555 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.565e+01 6.190e+01 6.882e+01 9.944e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 08:29:42,144 INFO [train.py:1114] (2/4) Epoch 20, batch 1300, loss[loss=0.1958, simple_loss=0.2803, pruned_loss=0.05566, over 4660.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2622, pruned_loss=0.03916, over 939044.78 frames. ], batch size: 19, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:29:44,144 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=260700.0, ans=0.09899494936611666 +2024-07-29 08:29:44,183 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260700.0, ans=0.1 +2024-07-29 08:29:54,834 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=260726.66666666666, ans=0.0 +2024-07-29 08:29:56,223 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=260726.66666666666, ans=0.1 +2024-07-29 08:29:59,011 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=260726.66666666666, ans=0.2 +2024-07-29 08:29:59,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=260726.66666666666, ans=0.125 +2024-07-29 08:30:15,512 INFO [train.py:1114] (2/4) Epoch 20, batch 1350, loss[loss=0.1813, simple_loss=0.2644, pruned_loss=0.04913, over 4752.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2622, pruned_loss=0.03916, over 941324.22 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:30:40,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=260806.66666666666, ans=0.0 +2024-07-29 08:30:42,254 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=260820.0, ans=0.125 +2024-07-29 08:30:44,744 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.728e+01 5.649e+01 6.305e+01 7.298e+01 1.047e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 08:30:49,553 INFO [train.py:1114] (2/4) Epoch 20, batch 1400, loss[loss=0.1887, simple_loss=0.2622, pruned_loss=0.0576, over 4704.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2623, pruned_loss=0.03945, over 942963.61 frames. ], batch size: 11, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:30:51,667 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=260833.33333333334, ans=0.2 +2024-07-29 08:30:52,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=260833.33333333334, ans=0.0 +2024-07-29 08:30:52,336 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260833.33333333334, ans=0.1 +2024-07-29 08:31:05,071 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=260860.0, ans=0.125 +2024-07-29 08:31:05,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=260860.0, ans=0.125 +2024-07-29 08:31:06,766 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn1.whiten.whitening_limit, batch_count=260860.0, ans=22.5 +2024-07-29 08:31:09,015 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:31:14,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=260873.33333333334, ans=0.025 +2024-07-29 08:31:25,048 INFO [train.py:1114] (2/4) Epoch 20, batch 1450, loss[loss=0.1775, simple_loss=0.2667, pruned_loss=0.0442, over 4687.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2625, pruned_loss=0.03953, over 942886.95 frames. ], batch size: 15, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:31:40,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=260926.66666666666, ans=0.125 +2024-07-29 08:31:43,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=260926.66666666666, ans=0.04949747468305833 +2024-07-29 08:31:43,920 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=260926.66666666666, ans=0.125 +2024-07-29 08:31:53,200 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.80 vs. limit=15.0 +2024-07-29 08:31:53,429 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.680e+01 5.592e+01 6.212e+01 7.267e+01 9.238e+01, threshold=1.242e+02, percent-clipped=0.0 +2024-07-29 08:31:55,682 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=260953.33333333334, ans=0.2 +2024-07-29 08:31:57,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=260966.66666666666, ans=0.2 +2024-07-29 08:31:57,937 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.04 vs. limit=12.0 +2024-07-29 08:31:58,329 INFO [train.py:1114] (2/4) Epoch 20, batch 1500, loss[loss=0.1839, simple_loss=0.2863, pruned_loss=0.04074, over 4806.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2614, pruned_loss=0.03915, over 942986.35 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:32:08,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=260980.0, ans=0.0 +2024-07-29 08:32:09,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=260980.0, ans=0.0 +2024-07-29 08:32:22,128 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.31 vs. limit=15.0 +2024-07-29 08:32:23,371 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.22 vs. limit=22.5 +2024-07-29 08:32:27,001 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=261020.0, ans=0.125 +2024-07-29 08:32:33,975 INFO [train.py:1114] (2/4) Epoch 20, batch 1550, loss[loss=0.1671, simple_loss=0.2652, pruned_loss=0.03443, over 4911.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2608, pruned_loss=0.03892, over 939353.46 frames. ], batch size: 15, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:32:35,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=261033.33333333334, ans=0.0 +2024-07-29 08:32:41,504 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=261046.66666666666, ans=0.0 +2024-07-29 08:32:53,022 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=261060.0, ans=0.05 +2024-07-29 08:32:57,303 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=261073.33333333334, ans=0.125 +2024-07-29 08:32:58,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=261073.33333333334, ans=0.025 +2024-07-29 08:33:04,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=261086.66666666666, ans=0.125 +2024-07-29 08:33:04,821 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.534e+01 6.134e+01 7.096e+01 1.070e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 08:33:09,548 INFO [train.py:1114] (2/4) Epoch 20, batch 1600, loss[loss=0.1529, simple_loss=0.2349, pruned_loss=0.03545, over 4875.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2608, pruned_loss=0.03933, over 937895.00 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:33:14,488 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=261100.0, ans=0.125 +2024-07-29 08:33:15,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=261113.33333333334, ans=0.0 +2024-07-29 08:33:16,772 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.38 vs. limit=22.5 +2024-07-29 08:33:22,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=261126.66666666666, ans=0.125 +2024-07-29 08:33:22,457 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=261126.66666666666, ans=0.2 +2024-07-29 08:33:28,454 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=261126.66666666666, ans=0.125 +2024-07-29 08:33:37,905 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=261153.33333333334, ans=0.125 +2024-07-29 08:33:42,135 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.41 vs. limit=15.0 +2024-07-29 08:33:44,394 INFO [train.py:1114] (2/4) Epoch 20, batch 1650, loss[loss=0.1674, simple_loss=0.2652, pruned_loss=0.0348, over 4662.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2603, pruned_loss=0.03943, over 938118.26 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:33:50,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=261166.66666666666, ans=0.125 +2024-07-29 08:33:54,571 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.61 vs. limit=12.0 +2024-07-29 08:33:54,792 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=261180.0, ans=0.125 +2024-07-29 08:33:55,191 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.89 vs. limit=15.0 +2024-07-29 08:33:58,935 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261193.33333333334, ans=0.1 +2024-07-29 08:34:19,823 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.82 vs. limit=22.5 +2024-07-29 08:34:22,553 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.514e+01 5.635e+01 6.098e+01 6.570e+01 1.046e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 08:34:27,355 INFO [train.py:1114] (2/4) Epoch 20, batch 1700, loss[loss=0.151, simple_loss=0.2274, pruned_loss=0.0373, over 4716.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2603, pruned_loss=0.03908, over 939711.94 frames. ], batch size: 11, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:34:34,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=261246.66666666666, ans=0.0 +2024-07-29 08:34:37,447 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=261246.66666666666, ans=0.2 +2024-07-29 08:34:38,127 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=261246.66666666666, ans=0.125 +2024-07-29 08:34:39,299 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=261246.66666666666, ans=0.125 +2024-07-29 08:34:41,323 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=261260.0, ans=0.05 +2024-07-29 08:34:44,397 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=20.05 vs. limit=22.5 +2024-07-29 08:34:51,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=261273.33333333334, ans=0.2 +2024-07-29 08:34:56,608 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=261273.33333333334, ans=0.0 +2024-07-29 08:35:00,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=261286.66666666666, ans=0.125 +2024-07-29 08:35:02,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=261286.66666666666, ans=0.125 +2024-07-29 08:35:05,859 INFO [train.py:1114] (2/4) Epoch 20, batch 1750, loss[loss=0.1441, simple_loss=0.2278, pruned_loss=0.03019, over 4807.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2597, pruned_loss=0.03869, over 940594.72 frames. ], batch size: 11, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:35:09,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=261300.0, ans=0.125 +2024-07-29 08:35:14,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=261300.0, ans=0.1 +2024-07-29 08:35:16,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261313.33333333334, ans=0.1 +2024-07-29 08:35:19,539 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=261313.33333333334, ans=0.0 +2024-07-29 08:35:21,228 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.78 vs. limit=15.0 +2024-07-29 08:35:22,489 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.92 vs. limit=6.0 +2024-07-29 08:35:35,970 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.54 vs. limit=22.5 +2024-07-29 08:35:49,777 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.94 vs. limit=15.0 +2024-07-29 08:36:11,243 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.694e+01 6.446e+01 7.395e+01 1.026e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-29 08:36:14,596 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.02 vs. limit=12.0 +2024-07-29 08:36:16,134 INFO [train.py:1114] (2/4) Epoch 20, batch 1800, loss[loss=0.175, simple_loss=0.2682, pruned_loss=0.04094, over 4637.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2605, pruned_loss=0.03907, over 940981.34 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:36:21,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=261366.66666666666, ans=0.125 +2024-07-29 08:37:19,280 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.25 vs. limit=15.0 +2024-07-29 08:37:19,954 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.98 vs. limit=10.0 +2024-07-29 08:37:22,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=261406.66666666666, ans=0.05 +2024-07-29 08:37:47,135 INFO [train.py:1114] (2/4) Epoch 20, batch 1850, loss[loss=0.1764, simple_loss=0.2767, pruned_loss=0.038, over 4810.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2604, pruned_loss=0.03899, over 940885.72 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:37:59,942 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:38:07,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=261446.66666666666, ans=0.025 +2024-07-29 08:38:23,017 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=261473.33333333334, ans=0.0 +2024-07-29 08:38:30,213 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.557e+01 5.603e+01 6.221e+01 6.965e+01 1.039e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-29 08:38:33,877 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.58 vs. limit=15.0 +2024-07-29 08:38:34,834 INFO [train.py:1114] (2/4) Epoch 20, batch 1900, loss[loss=0.181, simple_loss=0.2832, pruned_loss=0.03942, over 4655.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2616, pruned_loss=0.03932, over 942040.05 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:38:36,205 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=261500.0, ans=0.015 +2024-07-29 08:38:36,276 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=261500.0, ans=0.125 +2024-07-29 08:38:39,587 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:38:45,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=261513.33333333334, ans=0.2 +2024-07-29 08:38:49,855 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.69 vs. limit=15.0 +2024-07-29 08:38:52,293 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=261526.66666666666, ans=0.0 +2024-07-29 08:39:02,832 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=261553.33333333334, ans=0.1 +2024-07-29 08:39:04,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=261553.33333333334, ans=0.125 +2024-07-29 08:39:11,526 INFO [train.py:1114] (2/4) Epoch 20, batch 1950, loss[loss=0.1423, simple_loss=0.2273, pruned_loss=0.02866, over 4890.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2625, pruned_loss=0.03961, over 943951.10 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 64.0 +2024-07-29 08:39:19,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=261580.0, ans=0.125 +2024-07-29 08:39:21,447 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.04 vs. limit=15.0 +2024-07-29 08:39:30,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=261593.33333333334, ans=0.2 +2024-07-29 08:39:31,103 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.83 vs. limit=6.0 +2024-07-29 08:39:40,819 INFO [scaling.py:1024] (2/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.97 vs. limit=8.0 +2024-07-29 08:39:40,869 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.726e+01 5.670e+01 6.297e+01 7.133e+01 1.211e+02, threshold=1.259e+02, percent-clipped=0.0 +2024-07-29 08:39:45,751 INFO [train.py:1114] (2/4) Epoch 20, batch 2000, loss[loss=0.1543, simple_loss=0.2463, pruned_loss=0.03112, over 4800.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.263, pruned_loss=0.03972, over 941108.97 frames. ], batch size: 11, lr: 3.74e-03, grad_scale: 64.0 +2024-07-29 08:39:47,236 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=261633.33333333334, ans=0.0 +2024-07-29 08:39:56,331 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=261646.66666666666, ans=0.1 +2024-07-29 08:40:00,119 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=261646.66666666666, ans=0.0 +2024-07-29 08:40:02,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=261646.66666666666, ans=0.125 +2024-07-29 08:40:04,193 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=261660.0, ans=0.125 +2024-07-29 08:40:14,495 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=261673.33333333334, ans=0.125 +2024-07-29 08:40:29,400 INFO [train.py:1114] (2/4) Epoch 20, batch 2050, loss[loss=0.151, simple_loss=0.2317, pruned_loss=0.03514, over 4612.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2623, pruned_loss=0.03943, over 939039.43 frames. ], batch size: 11, lr: 3.74e-03, grad_scale: 64.0 +2024-07-29 08:40:30,439 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.82 vs. limit=15.0 +2024-07-29 08:40:35,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=261713.33333333334, ans=0.125 +2024-07-29 08:40:41,207 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=261713.33333333334, ans=0.125 +2024-07-29 08:41:14,704 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.484e+01 5.787e+01 6.324e+01 7.549e+01 1.272e+02, threshold=1.265e+02, percent-clipped=1.0 +2024-07-29 08:41:18,650 INFO [train.py:1114] (2/4) Epoch 20, batch 2100, loss[loss=0.1636, simple_loss=0.2627, pruned_loss=0.03228, over 4756.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2613, pruned_loss=0.03939, over 941256.89 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:41:23,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=261766.66666666666, ans=0.125 +2024-07-29 08:41:25,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=261780.0, ans=0.125 +2024-07-29 08:41:32,513 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=261793.33333333334, ans=0.125 +2024-07-29 08:41:47,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=261820.0, ans=0.125 +2024-07-29 08:41:51,701 INFO [train.py:1114] (2/4) Epoch 20, batch 2150, loss[loss=0.1567, simple_loss=0.2493, pruned_loss=0.03199, over 4889.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2612, pruned_loss=0.03961, over 944432.28 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:42:25,444 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=261860.0, ans=0.0 +2024-07-29 08:42:29,173 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.50 vs. limit=15.0 +2024-07-29 08:42:30,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=261873.33333333334, ans=0.2 +2024-07-29 08:42:43,743 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-07-29 08:42:43,998 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.706e+01 5.512e+01 6.073e+01 7.043e+01 1.112e+02, threshold=1.215e+02, percent-clipped=0.0 +2024-07-29 08:42:44,379 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.90 vs. limit=22.5 +2024-07-29 08:42:53,722 INFO [train.py:1114] (2/4) Epoch 20, batch 2200, loss[loss=0.1754, simple_loss=0.2693, pruned_loss=0.04076, over 4806.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.261, pruned_loss=0.03934, over 943325.50 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:42:54,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=261900.0, ans=0.95 +2024-07-29 08:42:58,196 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=261900.0, ans=0.125 +2024-07-29 08:43:17,263 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=261926.66666666666, ans=0.125 +2024-07-29 08:43:18,575 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=261926.66666666666, ans=0.125 +2024-07-29 08:43:31,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=261940.0, ans=0.125 +2024-07-29 08:43:33,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=261940.0, ans=0.0 +2024-07-29 08:43:35,441 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.36 vs. limit=15.0 +2024-07-29 08:43:42,630 INFO [train.py:1114] (2/4) Epoch 20, batch 2250, loss[loss=0.1748, simple_loss=0.2776, pruned_loss=0.03593, over 4691.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2616, pruned_loss=0.03923, over 941774.56 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:44:51,834 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.514e+01 6.259e+01 6.946e+01 1.195e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 08:45:13,581 INFO [train.py:1114] (2/4) Epoch 20, batch 2300, loss[loss=0.1587, simple_loss=0.237, pruned_loss=0.04017, over 4935.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2608, pruned_loss=0.03909, over 939335.79 frames. ], batch size: 12, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:45:37,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=262033.33333333334, ans=0.125 +2024-07-29 08:45:55,818 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=262046.66666666666, ans=0.125 +2024-07-29 08:45:59,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=262046.66666666666, ans=0.2 +2024-07-29 08:46:02,325 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262046.66666666666, ans=0.1 +2024-07-29 08:47:01,613 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.20 vs. limit=15.0 +2024-07-29 08:47:18,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=262073.33333333334, ans=0.125 +2024-07-29 08:47:19,024 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.56 vs. limit=6.0 +2024-07-29 08:47:28,982 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.60 vs. limit=15.0 +2024-07-29 08:47:34,611 INFO [train.py:1114] (2/4) Epoch 20, batch 2350, loss[loss=0.1801, simple_loss=0.275, pruned_loss=0.04254, over 4637.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2613, pruned_loss=0.03915, over 941165.86 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:48:00,709 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=262100.0, ans=0.125 +2024-07-29 08:48:11,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=262113.33333333334, ans=0.125 +2024-07-29 08:48:12,944 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.47 vs. limit=15.0 +2024-07-29 08:48:30,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=262126.66666666666, ans=0.0 +2024-07-29 08:48:47,941 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.71 vs. limit=15.0 +2024-07-29 08:48:50,832 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.808e+01 6.182e+01 6.944e+01 1.016e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-29 08:48:54,824 INFO [train.py:1114] (2/4) Epoch 20, batch 2400, loss[loss=0.1495, simple_loss=0.2417, pruned_loss=0.02867, over 4637.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2616, pruned_loss=0.03936, over 941018.56 frames. ], batch size: 12, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:48:56,375 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=262166.6666666667, ans=0.025 +2024-07-29 08:49:02,669 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=262180.0, ans=0.125 +2024-07-29 08:49:13,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=262193.3333333333, ans=0.125 +2024-07-29 08:49:25,563 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=262220.0, ans=0.025 +2024-07-29 08:49:26,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=262220.0, ans=0.125 +2024-07-29 08:49:28,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=262220.0, ans=0.125 +2024-07-29 08:49:32,086 INFO [train.py:1114] (2/4) Epoch 20, batch 2450, loss[loss=0.1583, simple_loss=0.2615, pruned_loss=0.02757, over 4695.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2616, pruned_loss=0.03929, over 937078.16 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:49:46,240 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=262246.6666666667, ans=0.0 +2024-07-29 08:49:46,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=262246.6666666667, ans=0.125 +2024-07-29 08:49:48,867 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=262246.6666666667, ans=0.125 +2024-07-29 08:49:50,284 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=262246.6666666667, ans=0.125 +2024-07-29 08:50:04,010 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.21 vs. limit=15.0 +2024-07-29 08:50:33,799 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.751e+01 6.244e+01 7.182e+01 1.173e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 08:50:35,998 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262286.6666666667, ans=0.1 +2024-07-29 08:50:38,478 INFO [train.py:1114] (2/4) Epoch 20, batch 2500, loss[loss=0.1692, simple_loss=0.2645, pruned_loss=0.03691, over 4804.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2611, pruned_loss=0.03894, over 939141.86 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:50:40,808 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=262300.0, ans=0.0 +2024-07-29 08:50:58,319 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=262313.3333333333, ans=0.2 +2024-07-29 08:51:04,715 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=262326.6666666667, ans=0.2 +2024-07-29 08:51:43,420 INFO [train.py:1114] (2/4) Epoch 20, batch 2550, loss[loss=0.1668, simple_loss=0.2528, pruned_loss=0.04042, over 4822.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2618, pruned_loss=0.03881, over 938861.72 frames. ], batch size: 11, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:51:43,591 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=262366.6666666667, ans=0.0 +2024-07-29 08:51:44,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=262366.6666666667, ans=0.0 +2024-07-29 08:51:44,894 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=262366.6666666667, ans=0.125 +2024-07-29 08:51:47,603 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=262366.6666666667, ans=0.125 +2024-07-29 08:51:48,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=262366.6666666667, ans=0.07 +2024-07-29 08:52:02,713 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=262393.3333333333, ans=0.125 +2024-07-29 08:52:03,648 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.60 vs. limit=22.5 +2024-07-29 08:52:04,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=262393.3333333333, ans=0.125 +2024-07-29 08:52:07,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=262393.3333333333, ans=0.2 +2024-07-29 08:52:09,886 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.85 vs. limit=15.0 +2024-07-29 08:52:11,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=262406.6666666667, ans=0.125 +2024-07-29 08:52:12,515 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262406.6666666667, ans=0.1 +2024-07-29 08:52:23,029 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.587e+01 5.541e+01 6.134e+01 6.874e+01 1.013e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 08:52:27,212 INFO [train.py:1114] (2/4) Epoch 20, batch 2600, loss[loss=0.1707, simple_loss=0.2664, pruned_loss=0.0375, over 4899.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2628, pruned_loss=0.03907, over 937362.38 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:52:30,571 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.30 vs. limit=15.0 +2024-07-29 08:52:43,422 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=262446.6666666667, ans=0.0 +2024-07-29 08:52:57,740 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=262473.3333333333, ans=0.0 +2024-07-29 08:53:05,792 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:53:27,602 INFO [train.py:1114] (2/4) Epoch 20, batch 2650, loss[loss=0.2064, simple_loss=0.2952, pruned_loss=0.05879, over 4632.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2625, pruned_loss=0.03917, over 939538.33 frames. ], batch size: 16, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:53:37,512 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.95 vs. limit=15.0 +2024-07-29 08:53:40,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=262513.3333333333, ans=15.0 +2024-07-29 08:53:40,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=262513.3333333333, ans=0.125 +2024-07-29 08:53:43,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=262526.6666666667, ans=0.025 +2024-07-29 08:53:53,366 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=262526.6666666667, ans=0.125 +2024-07-29 08:53:53,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=262526.6666666667, ans=0.025 +2024-07-29 08:54:09,626 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.850e+01 5.564e+01 6.225e+01 7.006e+01 1.126e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-29 08:54:14,579 INFO [train.py:1114] (2/4) Epoch 20, batch 2700, loss[loss=0.1599, simple_loss=0.2469, pruned_loss=0.0365, over 4740.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2629, pruned_loss=0.03932, over 939460.08 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:54:31,069 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=262593.3333333333, ans=0.0 +2024-07-29 08:54:34,639 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.24 vs. limit=22.5 +2024-07-29 08:54:41,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=262606.6666666667, ans=0.025 +2024-07-29 08:54:52,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=262620.0, ans=0.125 +2024-07-29 08:54:52,710 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262620.0, ans=0.1 +2024-07-29 08:54:53,369 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=262633.3333333333, ans=0.0 +2024-07-29 08:54:53,804 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.95 vs. limit=15.0 +2024-07-29 08:54:54,046 INFO [train.py:1114] (2/4) Epoch 20, batch 2750, loss[loss=0.1707, simple_loss=0.2536, pruned_loss=0.04385, over 4711.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2626, pruned_loss=0.03962, over 939478.85 frames. ], batch size: 12, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:54:59,649 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=262633.3333333333, ans=0.2 +2024-07-29 08:55:00,445 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=262646.6666666667, ans=0.0 +2024-07-29 08:55:12,551 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=262660.0, ans=0.04949747468305833 +2024-07-29 08:55:35,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=262686.6666666667, ans=0.09899494936611666 +2024-07-29 08:55:40,169 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.537e+01 5.878e+01 6.772e+01 7.962e+01 1.092e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-29 08:55:40,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=262686.6666666667, ans=0.1 +2024-07-29 08:55:48,450 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=262700.0, ans=0.025 +2024-07-29 08:55:48,995 INFO [train.py:1114] (2/4) Epoch 20, batch 2800, loss[loss=0.2161, simple_loss=0.3055, pruned_loss=0.06335, over 3473.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2631, pruned_loss=0.03995, over 937055.80 frames. ], batch size: 35, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:55:51,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262700.0, ans=0.1 +2024-07-29 08:56:01,269 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=262713.3333333333, ans=0.025 +2024-07-29 08:56:02,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=262726.6666666667, ans=0.1 +2024-07-29 08:56:03,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=262726.6666666667, ans=0.0 +2024-07-29 08:56:08,949 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.37 vs. limit=15.0 +2024-07-29 08:56:11,604 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=262740.0, ans=0.035 +2024-07-29 08:56:18,516 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=262753.3333333333, ans=0.125 +2024-07-29 08:56:26,513 INFO [train.py:1114] (2/4) Epoch 20, batch 2850, loss[loss=0.1707, simple_loss=0.2524, pruned_loss=0.04455, over 4973.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2634, pruned_loss=0.03994, over 935184.03 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:56:33,838 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=262766.6666666667, ans=0.125 +2024-07-29 08:56:39,388 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.13 vs. limit=15.0 +2024-07-29 08:56:52,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=262806.6666666667, ans=0.125 +2024-07-29 08:56:55,238 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.97 vs. limit=6.0 +2024-07-29 08:57:01,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=262820.0, ans=0.0 +2024-07-29 08:57:02,169 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.799e+01 6.410e+01 7.214e+01 1.051e+02, threshold=1.282e+02, percent-clipped=0.0 +2024-07-29 08:57:06,644 INFO [train.py:1114] (2/4) Epoch 20, batch 2900, loss[loss=0.182, simple_loss=0.284, pruned_loss=0.04, over 4823.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2649, pruned_loss=0.04025, over 939257.70 frames. ], batch size: 13, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 08:57:09,292 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=262833.3333333333, ans=0.125 +2024-07-29 08:57:09,565 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.82 vs. limit=6.0 +2024-07-29 08:57:11,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=262833.3333333333, ans=0.0 +2024-07-29 08:57:12,274 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=262833.3333333333, ans=0.1 +2024-07-29 08:57:14,809 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=262846.6666666667, ans=0.0 +2024-07-29 08:57:14,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262846.6666666667, ans=0.1 +2024-07-29 08:57:15,168 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.85 vs. limit=22.5 +2024-07-29 08:57:29,726 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=262873.3333333333, ans=0.0 +2024-07-29 08:57:36,620 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262886.6666666667, ans=0.1 +2024-07-29 08:57:40,671 INFO [train.py:1114] (2/4) Epoch 20, batch 2950, loss[loss=0.1176, simple_loss=0.2119, pruned_loss=0.0117, over 4711.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2629, pruned_loss=0.04014, over 938635.70 frames. ], batch size: 12, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 08:57:53,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262913.3333333333, ans=0.1 +2024-07-29 08:57:56,446 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.13 vs. limit=15.0 +2024-07-29 08:58:12,843 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.161e+01 5.582e+01 5.984e+01 6.557e+01 9.213e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-29 08:58:18,650 INFO [train.py:1114] (2/4) Epoch 20, batch 3000, loss[loss=0.1571, simple_loss=0.2456, pruned_loss=0.03428, over 4755.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2621, pruned_loss=0.03953, over 938173.59 frames. ], batch size: 13, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 08:58:18,651 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 08:58:33,776 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.1833, 2.8223, 3.9282, 3.3398, 3.9378, 3.8161, 3.2021, 2.7178], + device='cuda:2') +2024-07-29 08:58:44,396 INFO [train.py:1146] (2/4) Epoch 20, validation: loss=0.1605, simple_loss=0.2625, pruned_loss=0.02922, over 944034.00 frames. +2024-07-29 08:58:44,397 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 08:58:47,206 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.80 vs. limit=15.0 +2024-07-29 08:58:51,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=262966.6666666667, ans=0.0 +2024-07-29 08:58:55,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=262980.0, ans=0.04949747468305833 +2024-07-29 08:59:10,600 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:59:14,738 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263006.6666666667, ans=0.1 +2024-07-29 08:59:19,439 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:59:23,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=263020.0, ans=0.125 +2024-07-29 08:59:40,235 INFO [train.py:1114] (2/4) Epoch 20, batch 3050, loss[loss=0.1763, simple_loss=0.261, pruned_loss=0.04578, over 4630.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2626, pruned_loss=0.03954, over 937572.64 frames. ], batch size: 12, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 08:59:46,188 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=263033.3333333333, ans=0.2 +2024-07-29 08:59:50,976 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=263046.6666666667, ans=0.125 +2024-07-29 08:59:51,259 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.37 vs. limit=6.0 +2024-07-29 08:59:55,438 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:59:58,092 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:59:58,729 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=263060.0, ans=0.2 +2024-07-29 09:00:00,154 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.90 vs. limit=15.0 +2024-07-29 09:00:02,040 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=263060.0, ans=0.95 +2024-07-29 09:00:03,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=263060.0, ans=22.5 +2024-07-29 09:00:04,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff3.min_abs, batch_count=263073.3333333333, ans=0.2 +2024-07-29 09:00:05,606 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263073.3333333333, ans=0.1 +2024-07-29 09:00:07,035 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263073.3333333333, ans=0.1 +2024-07-29 09:00:19,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263086.6666666667, ans=0.1 +2024-07-29 09:00:19,900 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.456e+01 5.658e+01 6.248e+01 7.167e+01 1.022e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-29 09:00:24,265 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.71 vs. limit=15.0 +2024-07-29 09:00:33,576 INFO [train.py:1114] (2/4) Epoch 20, batch 3100, loss[loss=0.1968, simple_loss=0.2888, pruned_loss=0.05241, over 4647.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.262, pruned_loss=0.03981, over 938307.76 frames. ], batch size: 16, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:00:34,023 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.62 vs. limit=15.0 +2024-07-29 09:00:37,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=263100.0, ans=0.125 +2024-07-29 09:00:42,633 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.52 vs. limit=15.0 +2024-07-29 09:00:55,996 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=263126.6666666667, ans=0.125 +2024-07-29 09:01:00,028 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=263140.0, ans=0.5 +2024-07-29 09:01:04,918 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=263140.0, ans=0.125 +2024-07-29 09:01:13,566 INFO [train.py:1114] (2/4) Epoch 20, batch 3150, loss[loss=0.1919, simple_loss=0.2824, pruned_loss=0.05074, over 4627.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2619, pruned_loss=0.03979, over 938612.79 frames. ], batch size: 17, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:01:16,416 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=263166.6666666667, ans=0.1 +2024-07-29 09:01:32,681 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=263193.3333333333, ans=0.125 +2024-07-29 09:01:33,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263193.3333333333, ans=0.1 +2024-07-29 09:01:45,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=263220.0, ans=0.125 +2024-07-29 09:01:46,061 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.636e+01 5.746e+01 6.588e+01 7.668e+01 1.344e+02, threshold=1.318e+02, percent-clipped=1.0 +2024-07-29 09:01:49,685 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263233.3333333333, ans=0.1 +2024-07-29 09:01:50,148 INFO [train.py:1114] (2/4) Epoch 20, batch 3200, loss[loss=0.1715, simple_loss=0.2646, pruned_loss=0.03923, over 4830.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2609, pruned_loss=0.03938, over 940013.23 frames. ], batch size: 13, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:01:59,976 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.72 vs. limit=15.0 +2024-07-29 09:02:11,887 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.57 vs. limit=15.0 +2024-07-29 09:02:17,285 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=12.0 +2024-07-29 09:02:27,345 INFO [train.py:1114] (2/4) Epoch 20, batch 3250, loss[loss=0.1602, simple_loss=0.2598, pruned_loss=0.03035, over 4931.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2613, pruned_loss=0.03959, over 941140.08 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:02:30,006 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:02:31,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=263300.0, ans=0.04949747468305833 +2024-07-29 09:02:45,730 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263326.6666666667, ans=0.1 +2024-07-29 09:02:55,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=263340.0, ans=0.125 +2024-07-29 09:02:57,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=263353.3333333333, ans=0.0 +2024-07-29 09:02:59,779 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.410e+01 5.543e+01 6.289e+01 7.306e+01 9.331e+01, threshold=1.258e+02, percent-clipped=0.0 +2024-07-29 09:03:03,838 INFO [train.py:1114] (2/4) Epoch 20, batch 3300, loss[loss=0.1824, simple_loss=0.2748, pruned_loss=0.04493, over 4706.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2602, pruned_loss=0.03962, over 941233.43 frames. ], batch size: 19, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:03:04,711 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=263366.6666666667, ans=0.2 +2024-07-29 09:03:05,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=263366.6666666667, ans=0.2 +2024-07-29 09:03:10,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=263380.0, ans=0.0 +2024-07-29 09:03:13,162 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=15.0 +2024-07-29 09:03:17,913 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.19 vs. limit=22.5 +2024-07-29 09:03:18,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=263393.3333333333, ans=0.125 +2024-07-29 09:03:22,117 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=263393.3333333333, ans=0.125 +2024-07-29 09:03:30,830 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=263420.0, ans=0.0 +2024-07-29 09:03:36,791 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=263433.3333333333, ans=0.025 +2024-07-29 09:03:37,251 INFO [train.py:1114] (2/4) Epoch 20, batch 3350, loss[loss=0.1772, simple_loss=0.2667, pruned_loss=0.04381, over 4644.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.261, pruned_loss=0.03982, over 939563.87 frames. ], batch size: 17, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:03:56,098 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=263460.0, ans=0.125 +2024-07-29 09:03:58,725 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=263473.3333333333, ans=0.125 +2024-07-29 09:03:59,820 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.94 vs. limit=10.0 +2024-07-29 09:04:02,946 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=263473.3333333333, ans=0.025 +2024-07-29 09:04:07,375 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.670e+01 5.699e+01 6.337e+01 7.173e+01 1.148e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 09:04:08,278 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=263486.6666666667, ans=0.0 +2024-07-29 09:04:08,334 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=263486.6666666667, ans=0.0 +2024-07-29 09:04:11,633 INFO [train.py:1114] (2/4) Epoch 20, batch 3400, loss[loss=0.1438, simple_loss=0.2299, pruned_loss=0.02885, over 4800.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2605, pruned_loss=0.03958, over 938370.42 frames. ], batch size: 11, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:04:18,732 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-29 09:04:33,684 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=263526.6666666667, ans=0.0 +2024-07-29 09:04:44,245 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.44 vs. limit=10.0 +2024-07-29 09:04:44,469 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=263553.3333333333, ans=0.0 +2024-07-29 09:04:49,188 INFO [train.py:1114] (2/4) Epoch 20, batch 3450, loss[loss=0.1661, simple_loss=0.2575, pruned_loss=0.03738, over 4664.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2616, pruned_loss=0.04, over 938354.10 frames. ], batch size: 19, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:04:59,965 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.29 vs. limit=15.0 +2024-07-29 09:05:14,623 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=263606.6666666667, ans=0.025 +2024-07-29 09:05:18,526 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.353e+01 5.782e+01 6.590e+01 7.406e+01 1.017e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 09:05:19,623 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.06 vs. limit=12.0 +2024-07-29 09:05:20,730 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:05:22,631 INFO [train.py:1114] (2/4) Epoch 20, batch 3500, loss[loss=0.1754, simple_loss=0.2597, pruned_loss=0.04558, over 4940.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2621, pruned_loss=0.03993, over 938407.02 frames. ], batch size: 12, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:05:35,815 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=263660.0, ans=0.025 +2024-07-29 09:05:43,192 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263673.3333333333, ans=0.1 +2024-07-29 09:05:43,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=263673.3333333333, ans=0.125 +2024-07-29 09:05:45,225 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=263673.3333333333, ans=0.0 +2024-07-29 09:05:53,241 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=263686.6666666667, ans=0.1 +2024-07-29 09:05:55,092 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=263686.6666666667, ans=0.125 +2024-07-29 09:05:56,338 INFO [train.py:1114] (2/4) Epoch 20, batch 3550, loss[loss=0.1615, simple_loss=0.2645, pruned_loss=0.02924, over 4660.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2615, pruned_loss=0.03995, over 938787.09 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:05:59,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=263700.0, ans=0.125 +2024-07-29 09:06:15,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=263726.6666666667, ans=0.125 +2024-07-29 09:06:18,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=263740.0, ans=0.1 +2024-07-29 09:06:31,398 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=263740.0, ans=0.125 +2024-07-29 09:06:33,363 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:06:35,180 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.573e+01 6.229e+01 6.741e+01 1.100e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-29 09:06:35,624 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.18 vs. limit=22.5 +2024-07-29 09:06:37,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=263753.3333333333, ans=0.2 +2024-07-29 09:06:38,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=263753.3333333333, ans=0.0 +2024-07-29 09:06:40,732 INFO [train.py:1114] (2/4) Epoch 20, batch 3600, loss[loss=0.1737, simple_loss=0.2687, pruned_loss=0.03928, over 4972.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2612, pruned_loss=0.03959, over 940469.18 frames. ], batch size: 13, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:06:47,686 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.66 vs. limit=22.5 +2024-07-29 09:06:55,404 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=263793.3333333333, ans=0.2 +2024-07-29 09:07:08,332 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=263820.0, ans=0.125 +2024-07-29 09:07:09,033 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:07:14,877 INFO [train.py:1114] (2/4) Epoch 20, batch 3650, loss[loss=0.1667, simple_loss=0.2568, pruned_loss=0.03832, over 4906.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.26, pruned_loss=0.03914, over 940831.69 frames. ], batch size: 15, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:07:30,835 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.12 vs. limit=22.5 +2024-07-29 09:07:31,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=263860.0, ans=0.0 +2024-07-29 09:07:31,887 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=263860.0, ans=0.1 +2024-07-29 09:07:35,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263873.3333333333, ans=0.1 +2024-07-29 09:07:44,530 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.430e+01 6.137e+01 7.012e+01 1.010e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 09:07:48,572 INFO [train.py:1114] (2/4) Epoch 20, batch 3700, loss[loss=0.1751, simple_loss=0.2782, pruned_loss=0.03604, over 4929.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2601, pruned_loss=0.03871, over 941855.31 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:07:49,964 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=263900.0, ans=0.125 +2024-07-29 09:08:04,556 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263926.6666666667, ans=0.1 +2024-07-29 09:08:06,540 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=263926.6666666667, ans=0.0 +2024-07-29 09:08:21,546 INFO [train.py:1114] (2/4) Epoch 20, batch 3750, loss[loss=0.1487, simple_loss=0.2254, pruned_loss=0.03605, over 4797.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2598, pruned_loss=0.03844, over 943294.88 frames. ], batch size: 11, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:08:37,863 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=263993.3333333333, ans=0.125 +2024-07-29 09:08:40,094 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-07-29 09:08:44,008 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=264006.6666666667, ans=0.0 +2024-07-29 09:08:57,861 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+01 5.705e+01 6.410e+01 7.000e+01 1.025e+02, threshold=1.282e+02, percent-clipped=0.0 +2024-07-29 09:09:02,214 INFO [train.py:1114] (2/4) Epoch 20, batch 3800, loss[loss=0.1803, simple_loss=0.2741, pruned_loss=0.04323, over 4806.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2599, pruned_loss=0.03885, over 941447.28 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:09:08,649 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.37 vs. limit=15.0 +2024-07-29 09:09:47,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=264086.6666666667, ans=0.125 +2024-07-29 09:09:54,163 INFO [train.py:1114] (2/4) Epoch 20, batch 3850, loss[loss=0.2214, simple_loss=0.3179, pruned_loss=0.06247, over 4647.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2598, pruned_loss=0.03862, over 942080.16 frames. ], batch size: 16, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:10:02,638 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.88 vs. limit=15.0 +2024-07-29 09:10:10,745 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.63 vs. limit=15.0 +2024-07-29 09:10:18,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=264140.0, ans=0.0 +2024-07-29 09:10:24,564 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.802e+01 5.626e+01 6.107e+01 6.849e+01 9.588e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-29 09:10:25,638 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.61 vs. limit=15.0 +2024-07-29 09:10:31,760 INFO [train.py:1114] (2/4) Epoch 20, batch 3900, loss[loss=0.1737, simple_loss=0.2691, pruned_loss=0.03911, over 4809.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2603, pruned_loss=0.03877, over 942709.54 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 16.0 +2024-07-29 09:10:36,395 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=264166.6666666667, ans=0.2 +2024-07-29 09:10:43,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=264180.0, ans=0.125 +2024-07-29 09:10:46,253 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=264193.3333333333, ans=0.125 +2024-07-29 09:10:51,940 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.35 vs. limit=6.0 +2024-07-29 09:10:55,820 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.18 vs. limit=15.0 +2024-07-29 09:10:57,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=264220.0, ans=0.125 +2024-07-29 09:11:02,930 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=264220.0, ans=0.07 +2024-07-29 09:11:26,904 INFO [train.py:1114] (2/4) Epoch 20, batch 3950, loss[loss=0.1802, simple_loss=0.2634, pruned_loss=0.04851, over 4826.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2609, pruned_loss=0.03917, over 944767.93 frames. ], batch size: 16, lr: 3.72e-03, grad_scale: 16.0 +2024-07-29 09:11:54,312 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=264273.3333333333, ans=0.125 +2024-07-29 09:11:59,941 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.407e+01 5.612e+01 6.214e+01 7.012e+01 1.031e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 09:12:03,454 INFO [train.py:1114] (2/4) Epoch 20, batch 4000, loss[loss=0.1491, simple_loss=0.2463, pruned_loss=0.02596, over 4770.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2613, pruned_loss=0.04006, over 941655.45 frames. ], batch size: 12, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:12:47,553 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=264313.3333333333, ans=0.125 +2024-07-29 09:13:36,393 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=264353.3333333333, ans=0.125 +2024-07-29 09:13:42,199 INFO [train.py:1114] (2/4) Epoch 20, batch 4050, loss[loss=0.2239, simple_loss=0.3042, pruned_loss=0.07183, over 3162.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2613, pruned_loss=0.0401, over 939654.08 frames. ], batch size: 35, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:13:43,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=264366.6666666667, ans=0.125 +2024-07-29 09:13:44,368 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=264366.6666666667, ans=0.0 +2024-07-29 09:13:53,349 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=264380.0, ans=0.125 +2024-07-29 09:14:18,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=264420.0, ans=0.125 +2024-07-29 09:14:18,732 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.925e+01 5.618e+01 6.150e+01 7.099e+01 1.073e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 09:14:21,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=264420.0, ans=0.125 +2024-07-29 09:14:22,372 INFO [train.py:1114] (2/4) Epoch 20, batch 4100, loss[loss=0.1794, simple_loss=0.281, pruned_loss=0.03891, over 4895.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2625, pruned_loss=0.0406, over 938628.92 frames. ], batch size: 15, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:14:28,268 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=264433.3333333333, ans=0.1 +2024-07-29 09:14:37,133 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=264460.0, ans=0.0 +2024-07-29 09:14:52,175 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=264486.6666666667, ans=0.125 +2024-07-29 09:14:59,514 INFO [train.py:1114] (2/4) Epoch 20, batch 4150, loss[loss=0.191, simple_loss=0.2894, pruned_loss=0.04631, over 4823.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2624, pruned_loss=0.04027, over 938418.37 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:15:01,932 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=264500.0, ans=0.125 +2024-07-29 09:15:03,346 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.23 vs. limit=15.0 +2024-07-29 09:15:05,237 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=264500.0, ans=0.125 +2024-07-29 09:15:08,621 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=264513.3333333333, ans=0.07 +2024-07-29 09:15:08,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=264513.3333333333, ans=0.0 +2024-07-29 09:15:13,987 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=264526.6666666667, ans=0.0 +2024-07-29 09:15:16,002 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=264526.6666666667, ans=0.025 +2024-07-29 09:15:18,858 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=264526.6666666667, ans=0.2 +2024-07-29 09:15:25,189 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=9.14 vs. limit=15.0 +2024-07-29 09:15:35,199 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.580e+01 5.706e+01 6.359e+01 7.433e+01 1.126e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-29 09:15:38,657 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=264553.3333333333, ans=0.0 +2024-07-29 09:15:39,930 INFO [train.py:1114] (2/4) Epoch 20, batch 4200, loss[loss=0.1613, simple_loss=0.2572, pruned_loss=0.03271, over 4898.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2616, pruned_loss=0.03941, over 939591.39 frames. ], batch size: 15, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:15:55,837 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=264566.6666666667, ans=0.125 +2024-07-29 09:15:58,277 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.71 vs. limit=22.5 +2024-07-29 09:16:12,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=264580.0, ans=0.125 +2024-07-29 09:16:14,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=264593.3333333333, ans=0.125 +2024-07-29 09:16:19,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=264593.3333333333, ans=0.125 +2024-07-29 09:16:39,845 INFO [train.py:1114] (2/4) Epoch 20, batch 4250, loss[loss=0.1715, simple_loss=0.2561, pruned_loss=0.04348, over 4644.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2616, pruned_loss=0.03942, over 940435.23 frames. ], batch size: 12, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:16:42,545 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=264633.3333333333, ans=0.0 +2024-07-29 09:16:45,882 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=264646.6666666667, ans=0.0 +2024-07-29 09:16:46,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=264646.6666666667, ans=0.125 +2024-07-29 09:16:47,152 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=264646.6666666667, ans=0.2 +2024-07-29 09:16:59,325 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.78 vs. limit=6.0 +2024-07-29 09:17:02,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=264673.3333333333, ans=0.2 +2024-07-29 09:17:09,908 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.690e+01 5.620e+01 6.275e+01 6.899e+01 1.013e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 09:17:13,147 INFO [train.py:1114] (2/4) Epoch 20, batch 4300, loss[loss=0.1568, simple_loss=0.2505, pruned_loss=0.03157, over 4760.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2621, pruned_loss=0.03957, over 939996.02 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:17:20,616 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=264700.0, ans=0.2 +2024-07-29 09:17:33,701 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.45 vs. limit=12.0 +2024-07-29 09:17:46,132 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=264753.3333333333, ans=0.025 +2024-07-29 09:17:53,013 INFO [train.py:1114] (2/4) Epoch 20, batch 4350, loss[loss=0.2126, simple_loss=0.2939, pruned_loss=0.06569, over 4763.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2621, pruned_loss=0.03941, over 940824.01 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:17:54,135 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=264766.6666666667, ans=0.125 +2024-07-29 09:17:57,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=264766.6666666667, ans=0.125 +2024-07-29 09:18:19,216 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=264806.6666666667, ans=0.125 +2024-07-29 09:18:23,100 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=264806.6666666667, ans=0.0 +2024-07-29 09:18:23,174 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=264806.6666666667, ans=0.0 +2024-07-29 09:18:23,351 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.45 vs. limit=22.5 +2024-07-29 09:18:27,677 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.816e+01 5.702e+01 6.164e+01 6.960e+01 9.569e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 09:18:31,126 INFO [train.py:1114] (2/4) Epoch 20, batch 4400, loss[loss=0.1806, simple_loss=0.2762, pruned_loss=0.04252, over 4813.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2623, pruned_loss=0.03946, over 940696.69 frames. ], batch size: 14, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:18:40,030 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=264833.3333333333, ans=0.125 +2024-07-29 09:19:02,170 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=264873.3333333333, ans=0.125 +2024-07-29 09:19:05,615 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=264886.6666666667, ans=0.2 +2024-07-29 09:19:12,272 INFO [train.py:1114] (2/4) Epoch 20, batch 4450, loss[loss=0.1534, simple_loss=0.25, pruned_loss=0.02843, over 4942.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2625, pruned_loss=0.03999, over 938496.17 frames. ], batch size: 12, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:19:21,961 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.43 vs. limit=22.5 +2024-07-29 09:19:35,155 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=264940.0, ans=0.125 +2024-07-29 09:19:47,874 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.33 vs. limit=10.0 +2024-07-29 09:19:53,319 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.879e+01 5.589e+01 6.388e+01 7.277e+01 9.167e+01, threshold=1.278e+02, percent-clipped=0.0 +2024-07-29 09:19:54,143 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=264953.3333333333, ans=0.025 +2024-07-29 09:19:55,887 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.58 vs. limit=15.0 +2024-07-29 09:19:57,641 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.75 vs. limit=6.0 +2024-07-29 09:19:57,878 INFO [train.py:1114] (2/4) Epoch 20, batch 4500, loss[loss=0.1698, simple_loss=0.271, pruned_loss=0.03434, over 4739.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.263, pruned_loss=0.04017, over 938064.99 frames. ], batch size: 14, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:19:59,298 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=264966.6666666667, ans=0.2 +2024-07-29 09:20:00,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=264966.6666666667, ans=0.125 +2024-07-29 09:20:04,105 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=264980.0, ans=0.0 +2024-07-29 09:20:06,351 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=264980.0, ans=0.2 +2024-07-29 09:20:09,297 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=264980.0, ans=0.125 +2024-07-29 09:20:17,646 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=264993.3333333333, ans=0.125 +2024-07-29 09:20:19,611 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=264993.3333333333, ans=0.0 +2024-07-29 09:20:21,759 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=265006.6666666667, ans=0.2 +2024-07-29 09:20:35,735 INFO [train.py:1114] (2/4) Epoch 20, batch 4550, loss[loss=0.169, simple_loss=0.2565, pruned_loss=0.04076, over 4898.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2629, pruned_loss=0.04023, over 939726.59 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:20:40,295 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265033.3333333333, ans=0.1 +2024-07-29 09:20:48,471 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=265060.0, ans=0.125 +2024-07-29 09:20:49,083 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=265060.0, ans=0.125 +2024-07-29 09:20:50,086 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.84 vs. limit=10.0 +2024-07-29 09:21:09,150 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=265073.3333333333, ans=0.125 +2024-07-29 09:21:16,615 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.804e+01 5.639e+01 6.516e+01 7.459e+01 1.043e+02, threshold=1.303e+02, percent-clipped=0.0 +2024-07-29 09:21:19,989 INFO [train.py:1114] (2/4) Epoch 20, batch 4600, loss[loss=0.184, simple_loss=0.2832, pruned_loss=0.04233, over 4583.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.262, pruned_loss=0.03991, over 938058.11 frames. ], batch size: 21, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:21:22,115 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=265100.0, ans=0.0 +2024-07-29 09:21:34,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265126.6666666667, ans=0.1 +2024-07-29 09:21:40,147 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=265140.0, ans=0.1 +2024-07-29 09:21:53,407 INFO [train.py:1114] (2/4) Epoch 20, batch 4650, loss[loss=0.2029, simple_loss=0.2929, pruned_loss=0.05647, over 4826.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.263, pruned_loss=0.0403, over 939808.63 frames. ], batch size: 16, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:21:53,618 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=265166.6666666667, ans=0.125 +2024-07-29 09:21:57,823 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=265166.6666666667, ans=0.07 +2024-07-29 09:22:04,137 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.63 vs. limit=15.0 +2024-07-29 09:22:10,419 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265193.3333333333, ans=0.1 +2024-07-29 09:22:10,524 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:22:15,997 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=265206.6666666667, ans=0.05 +2024-07-29 09:22:16,656 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=265206.6666666667, ans=0.0 +2024-07-29 09:22:25,176 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.723e+01 5.549e+01 5.991e+01 6.748e+01 1.053e+02, threshold=1.198e+02, percent-clipped=0.0 +2024-07-29 09:22:28,505 INFO [train.py:1114] (2/4) Epoch 20, batch 4700, loss[loss=0.1253, simple_loss=0.2083, pruned_loss=0.02112, over 4702.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2621, pruned_loss=0.03985, over 936960.19 frames. ], batch size: 11, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:22:39,139 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265233.3333333333, ans=0.1 +2024-07-29 09:22:40,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=265233.3333333333, ans=0.0 +2024-07-29 09:22:58,101 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=265273.3333333333, ans=0.2 +2024-07-29 09:23:04,467 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=265273.3333333333, ans=0.125 +2024-07-29 09:23:06,279 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.86 vs. limit=6.0 +2024-07-29 09:23:06,733 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=265286.6666666667, ans=0.2 +2024-07-29 09:23:10,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=265286.6666666667, ans=0.2 +2024-07-29 09:23:12,667 INFO [train.py:1114] (2/4) Epoch 20, batch 4750, loss[loss=0.1842, simple_loss=0.2794, pruned_loss=0.04446, over 4531.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2626, pruned_loss=0.03999, over 935061.93 frames. ], batch size: 21, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:23:18,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=265300.0, ans=0.0 +2024-07-29 09:23:22,672 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.76 vs. limit=10.0 +2024-07-29 09:23:23,693 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=265313.3333333333, ans=0.125 +2024-07-29 09:23:36,544 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=265326.6666666667, ans=0.125 +2024-07-29 09:23:44,425 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=265340.0, ans=0.125 +2024-07-29 09:23:46,067 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.06 vs. limit=15.0 +2024-07-29 09:23:59,500 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.589e+01 5.596e+01 6.192e+01 7.037e+01 1.008e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 09:24:06,736 INFO [train.py:1114] (2/4) Epoch 20, batch 4800, loss[loss=0.17, simple_loss=0.2619, pruned_loss=0.03904, over 4693.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2612, pruned_loss=0.03953, over 932536.29 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:24:06,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=265366.6666666667, ans=0.125 +2024-07-29 09:24:25,093 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=265380.0, ans=0.125 +2024-07-29 09:24:25,812 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=265380.0, ans=0.125 +2024-07-29 09:24:27,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=265380.0, ans=0.0 +2024-07-29 09:24:28,390 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=265380.0, ans=0.0 +2024-07-29 09:24:32,172 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.35 vs. limit=15.0 +2024-07-29 09:24:33,294 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:24:59,664 INFO [train.py:1114] (2/4) Epoch 20, batch 4850, loss[loss=0.175, simple_loss=0.267, pruned_loss=0.0415, over 4729.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2619, pruned_loss=0.03964, over 932211.70 frames. ], batch size: 14, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:25:20,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=265460.0, ans=0.125 +2024-07-29 09:25:22,010 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265460.0, ans=0.1 +2024-07-29 09:25:24,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=265460.0, ans=0.1 +2024-07-29 09:25:43,422 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.605e+01 5.554e+01 6.055e+01 6.631e+01 1.173e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 09:25:46,890 INFO [train.py:1114] (2/4) Epoch 20, batch 4900, loss[loss=0.1672, simple_loss=0.2566, pruned_loss=0.03885, over 4758.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2617, pruned_loss=0.03971, over 934165.20 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:25:54,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=265513.3333333333, ans=0.2 +2024-07-29 09:25:54,847 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=265513.3333333333, ans=0.025 +2024-07-29 09:26:18,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=265526.6666666667, ans=0.125 +2024-07-29 09:26:18,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=265526.6666666667, ans=0.2 +2024-07-29 09:26:19,609 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=265526.6666666667, ans=0.0 +2024-07-29 09:26:26,560 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.32 vs. limit=22.5 +2024-07-29 09:26:26,635 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.46 vs. limit=22.5 +2024-07-29 09:26:29,527 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=265553.3333333333, ans=0.0 +2024-07-29 09:26:32,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=265553.3333333333, ans=0.125 +2024-07-29 09:26:34,956 INFO [train.py:1114] (2/4) Epoch 20, batch 4950, loss[loss=0.2098, simple_loss=0.2944, pruned_loss=0.0626, over 3328.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2632, pruned_loss=0.04049, over 931379.40 frames. ], batch size: 35, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:27:10,735 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265620.0, ans=0.1 +2024-07-29 09:27:11,249 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.725e+01 5.554e+01 6.246e+01 6.923e+01 9.859e+01, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 09:27:14,702 INFO [train.py:1114] (2/4) Epoch 20, batch 5000, loss[loss=0.1834, simple_loss=0.2877, pruned_loss=0.03957, over 4659.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2625, pruned_loss=0.04012, over 935003.88 frames. ], batch size: 14, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:27:29,514 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.88 vs. limit=10.0 +2024-07-29 09:27:31,067 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265660.0, ans=0.1 +2024-07-29 09:27:34,519 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265660.0, ans=0.1 +2024-07-29 09:27:34,648 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.37 vs. limit=22.5 +2024-07-29 09:27:46,936 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=265686.6666666667, ans=0.2 +2024-07-29 09:27:50,276 INFO [train.py:1114] (2/4) Epoch 20, batch 5050, loss[loss=0.1667, simple_loss=0.2537, pruned_loss=0.0398, over 4859.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2611, pruned_loss=0.03937, over 937901.59 frames. ], batch size: 12, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:28:01,940 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=265700.0, ans=0.125 +2024-07-29 09:28:17,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=265726.6666666667, ans=0.0 +2024-07-29 09:28:24,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=265740.0, ans=0.125 +2024-07-29 09:28:34,373 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+01 5.782e+01 6.489e+01 7.303e+01 1.011e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-29 09:28:36,013 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=265753.3333333333, ans=0.07 +2024-07-29 09:28:38,655 INFO [train.py:1114] (2/4) Epoch 20, batch 5100, loss[loss=0.1497, simple_loss=0.2379, pruned_loss=0.03075, over 4770.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2621, pruned_loss=0.03976, over 935700.50 frames. ], batch size: 12, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:28:42,971 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=265766.6666666667, ans=0.125 +2024-07-29 09:28:46,371 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=265780.0, ans=0.0 +2024-07-29 09:28:50,983 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=265780.0, ans=0.125 +2024-07-29 09:28:51,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=265793.3333333333, ans=0.2 +2024-07-29 09:29:04,722 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.11 vs. limit=15.0 +2024-07-29 09:29:12,339 INFO [train.py:1114] (2/4) Epoch 20, batch 5150, loss[loss=0.1857, simple_loss=0.2768, pruned_loss=0.04733, over 4818.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2628, pruned_loss=0.03983, over 936554.16 frames. ], batch size: 16, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:29:13,849 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=265833.3333333333, ans=0.0 +2024-07-29 09:29:16,009 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=265833.3333333333, ans=0.0 +2024-07-29 09:29:28,698 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=265860.0, ans=0.0 +2024-07-29 09:29:31,683 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=265860.0, ans=0.0 +2024-07-29 09:29:44,548 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.013e+01 5.725e+01 6.279e+01 7.318e+01 1.119e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-29 09:29:44,819 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:30:06,180 INFO [train.py:1114] (2/4) Epoch 20, batch 5200, loss[loss=0.1588, simple_loss=0.2509, pruned_loss=0.03339, over 4660.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2623, pruned_loss=0.03928, over 936546.38 frames. ], batch size: 14, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:30:09,051 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=265900.0, ans=0.125 +2024-07-29 09:30:09,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=265900.0, ans=0.125 +2024-07-29 09:30:33,287 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265926.6666666667, ans=0.1 +2024-07-29 09:30:49,518 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=265940.0, ans=0.0 +2024-07-29 09:30:50,459 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.23 vs. limit=15.0 +2024-07-29 09:31:01,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=265966.6666666667, ans=0.125 +2024-07-29 09:31:02,526 INFO [train.py:1114] (2/4) Epoch 20, batch 5250, loss[loss=0.1654, simple_loss=0.2635, pruned_loss=0.03364, over 4900.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2617, pruned_loss=0.03903, over 936142.17 frames. ], batch size: 13, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:31:07,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=265966.6666666667, ans=0.025 +2024-07-29 09:31:07,465 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.21 vs. limit=15.0 +2024-07-29 09:31:15,476 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=265993.3333333333, ans=0.0 +2024-07-29 09:31:32,635 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.533e+01 5.588e+01 6.109e+01 7.391e+01 1.107e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-29 09:31:37,723 INFO [train.py:1114] (2/4) Epoch 20, batch 5300, loss[loss=0.1701, simple_loss=0.263, pruned_loss=0.03853, over 4637.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2613, pruned_loss=0.03951, over 934054.77 frames. ], batch size: 16, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:31:44,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=266046.6666666667, ans=0.0 +2024-07-29 09:31:51,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=266060.0, ans=0.125 +2024-07-29 09:31:58,870 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=266060.0, ans=0.1 +2024-07-29 09:32:02,089 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:32:10,340 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=266086.6666666667, ans=0.125 +2024-07-29 09:32:13,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=266086.6666666667, ans=0.0 +2024-07-29 09:32:15,632 INFO [train.py:1114] (2/4) Epoch 20, batch 5350, loss[loss=0.1734, simple_loss=0.2576, pruned_loss=0.04464, over 4492.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2622, pruned_loss=0.03974, over 936020.93 frames. ], batch size: 10, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:32:16,660 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.77 vs. limit=22.5 +2024-07-29 09:32:17,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=266100.0, ans=0.09899494936611666 +2024-07-29 09:32:48,904 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=266126.6666666667, ans=0.0 +2024-07-29 09:32:50,728 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=266126.6666666667, ans=0.125 +2024-07-29 09:32:54,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=266126.6666666667, ans=0.1 +2024-07-29 09:32:59,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=266140.0, ans=0.2 +2024-07-29 09:33:02,491 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=266153.3333333333, ans=0.0 +2024-07-29 09:33:03,256 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:33:04,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=266153.3333333333, ans=0.125 +2024-07-29 09:33:05,169 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=266153.3333333333, ans=0.125 +2024-07-29 09:33:06,393 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.461e+01 5.787e+01 6.416e+01 7.278e+01 1.158e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 09:33:09,779 INFO [train.py:1114] (2/4) Epoch 20, batch 5400, loss[loss=0.1816, simple_loss=0.2772, pruned_loss=0.04303, over 4317.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2631, pruned_loss=0.04021, over 930476.04 frames. ], batch size: 25, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:33:10,043 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.42 vs. limit=22.5 +2024-07-29 09:33:15,142 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.49 vs. limit=12.0 +2024-07-29 09:33:22,027 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=266180.0, ans=0.125 +2024-07-29 09:33:28,410 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=266193.3333333333, ans=0.125 +2024-07-29 09:33:28,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=266193.3333333333, ans=0.1 +2024-07-29 09:33:29,668 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=266193.3333333333, ans=0.125 +2024-07-29 09:33:29,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=266193.3333333333, ans=0.0 +2024-07-29 09:33:31,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=266193.3333333333, ans=0.0 +2024-07-29 09:33:34,282 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=266206.6666666667, ans=10.0 +2024-07-29 09:33:37,614 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=266206.6666666667, ans=0.125 +2024-07-29 09:33:46,293 INFO [train.py:1114] (2/4) Epoch 20, batch 5450, loss[loss=0.157, simple_loss=0.2414, pruned_loss=0.03631, over 4704.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2625, pruned_loss=0.03985, over 933249.79 frames. ], batch size: 11, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:34:23,416 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.85 vs. limit=22.5 +2024-07-29 09:34:42,063 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.44 vs. limit=15.0 +2024-07-29 09:34:50,647 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.141e+01 5.730e+01 6.160e+01 6.781e+01 9.375e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-29 09:34:54,545 INFO [train.py:1114] (2/4) Epoch 20, batch 5500, loss[loss=0.1433, simple_loss=0.2357, pruned_loss=0.02539, over 4416.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2618, pruned_loss=0.03997, over 930655.96 frames. ], batch size: 26, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:35:00,687 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=266313.3333333333, ans=0.0 +2024-07-29 09:35:13,935 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.49 vs. limit=15.0 +2024-07-29 09:35:14,380 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=266326.6666666667, ans=0.1 +2024-07-29 09:35:21,908 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=266340.0, ans=0.125 +2024-07-29 09:35:34,377 INFO [train.py:1114] (2/4) Epoch 20, batch 5550, loss[loss=0.157, simple_loss=0.2519, pruned_loss=0.03102, over 4719.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2608, pruned_loss=0.03987, over 932973.10 frames. ], batch size: 12, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:35:41,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=266380.0, ans=0.0 +2024-07-29 09:35:44,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=266380.0, ans=0.125 +2024-07-29 09:35:48,145 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=266393.3333333333, ans=0.125 +2024-07-29 09:36:00,921 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=266406.6666666667, ans=0.1 +2024-07-29 09:36:07,408 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.955e+01 5.872e+01 6.404e+01 7.729e+01 1.135e+02, threshold=1.281e+02, percent-clipped=0.0 +2024-07-29 09:36:09,554 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.41 vs. limit=15.0 +2024-07-29 09:36:10,966 INFO [train.py:1114] (2/4) Epoch 20, batch 5600, loss[loss=0.1919, simple_loss=0.292, pruned_loss=0.04593, over 4740.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.262, pruned_loss=0.04038, over 934013.70 frames. ], batch size: 14, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:36:11,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=266433.3333333333, ans=0.0 +2024-07-29 09:36:15,243 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=266433.3333333333, ans=0.1 +2024-07-29 09:36:17,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=266433.3333333333, ans=0.125 +2024-07-29 09:36:18,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=266433.3333333333, ans=0.0 +2024-07-29 09:36:39,073 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=266473.3333333333, ans=0.025 +2024-07-29 09:36:40,665 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.45 vs. limit=12.0 +2024-07-29 09:36:45,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=266486.6666666667, ans=0.125 +2024-07-29 09:36:52,284 INFO [train.py:1114] (2/4) Epoch 20, batch 5650, loss[loss=0.1861, simple_loss=0.2856, pruned_loss=0.04327, over 4466.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2606, pruned_loss=0.03965, over 936573.41 frames. ], batch size: 21, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:36:57,208 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=266500.0, ans=0.025 +2024-07-29 09:37:08,585 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=266526.6666666667, ans=0.125 +2024-07-29 09:37:09,277 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=266526.6666666667, ans=0.125 +2024-07-29 09:37:14,561 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=266540.0, ans=0.05 +2024-07-29 09:37:22,261 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.795e+01 5.835e+01 6.614e+01 7.684e+01 1.140e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-29 09:37:25,572 INFO [train.py:1114] (2/4) Epoch 20, batch 5700, loss[loss=0.1525, simple_loss=0.2451, pruned_loss=0.02991, over 4689.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2626, pruned_loss=0.0407, over 938082.99 frames. ], batch size: 13, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:37:38,201 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=266566.6666666667, ans=0.0 +2024-07-29 09:38:26,956 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.06 vs. limit=12.0 +2024-07-29 09:38:31,809 INFO [train.py:1114] (2/4) Epoch 20, batch 5750, loss[loss=0.2044, simple_loss=0.2929, pruned_loss=0.05798, over 4763.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2634, pruned_loss=0.04066, over 938341.31 frames. ], batch size: 19, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:38:58,852 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=266660.0, ans=0.0 +2024-07-29 09:39:02,025 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=266660.0, ans=0.125 +2024-07-29 09:39:04,632 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.62 vs. limit=22.5 +2024-07-29 09:39:06,214 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=266673.3333333333, ans=0.1 +2024-07-29 09:39:17,486 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.798e+01 5.816e+01 6.291e+01 7.219e+01 1.004e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-29 09:39:19,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=266686.6666666667, ans=0.125 +2024-07-29 09:39:20,771 INFO [train.py:1114] (2/4) Epoch 20, batch 5800, loss[loss=0.1949, simple_loss=0.2825, pruned_loss=0.05363, over 4727.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2637, pruned_loss=0.04067, over 937188.92 frames. ], batch size: 19, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:39:21,543 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=266700.0, ans=0.0 +2024-07-29 09:39:25,651 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=266700.0, ans=0.0 +2024-07-29 09:39:34,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=266726.6666666667, ans=0.125 +2024-07-29 09:39:35,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=266726.6666666667, ans=0.2 +2024-07-29 09:39:39,430 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=266726.6666666667, ans=0.025 +2024-07-29 09:39:39,721 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.43 vs. limit=15.0 +2024-07-29 09:39:41,381 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=266740.0, ans=0.05 +2024-07-29 09:39:41,434 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=266740.0, ans=0.125 +2024-07-29 09:39:44,877 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.19 vs. limit=10.0 +2024-07-29 09:39:58,128 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=266753.3333333333, ans=0.07 +2024-07-29 09:40:00,082 INFO [train.py:1114] (2/4) Epoch 20, batch 5850, loss[loss=0.1625, simple_loss=0.2554, pruned_loss=0.03479, over 4410.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2642, pruned_loss=0.04097, over 937638.36 frames. ], batch size: 21, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:40:03,309 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.50 vs. limit=15.0 +2024-07-29 09:40:14,917 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=266793.3333333333, ans=0.125 +2024-07-29 09:40:26,123 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=266806.6666666667, ans=0.125 +2024-07-29 09:40:31,389 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.976e+01 5.785e+01 6.450e+01 7.129e+01 1.228e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 09:40:34,129 INFO [train.py:1114] (2/4) Epoch 20, batch 5900, loss[loss=0.1638, simple_loss=0.2654, pruned_loss=0.03112, over 4687.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2635, pruned_loss=0.04065, over 938530.19 frames. ], batch size: 15, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:40:35,569 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=266833.3333333333, ans=0.0 +2024-07-29 09:40:44,822 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=266833.3333333333, ans=0.125 +2024-07-29 09:40:44,885 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=266833.3333333333, ans=0.125 +2024-07-29 09:40:58,185 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=266846.6666666667, ans=0.125 +2024-07-29 09:41:06,146 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=266873.3333333333, ans=0.025 +2024-07-29 09:41:21,154 INFO [train.py:1114] (2/4) Epoch 20, batch 5950, loss[loss=0.1886, simple_loss=0.2747, pruned_loss=0.05121, over 4685.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2633, pruned_loss=0.04012, over 940267.79 frames. ], batch size: 15, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:41:32,541 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=266913.3333333333, ans=0.0 +2024-07-29 09:41:48,783 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=266940.0, ans=0.125 +2024-07-29 09:41:51,383 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=266940.0, ans=0.125 +2024-07-29 09:41:58,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=266953.3333333333, ans=0.125 +2024-07-29 09:42:00,474 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.654e+01 6.112e+01 6.775e+01 1.038e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-29 09:42:26,530 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.49 vs. limit=15.0 +2024-07-29 09:42:27,690 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=266966.6666666667, ans=0.2 +2024-07-29 09:42:28,149 INFO [train.py:1114] (2/4) Epoch 20, batch 6000, loss[loss=0.1775, simple_loss=0.2714, pruned_loss=0.04182, over 4153.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2624, pruned_loss=0.03982, over 937258.68 frames. ], batch size: 25, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:42:28,150 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 09:42:34,213 INFO [zipformer.py:1858] (2/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.9521, 3.4601, 3.2599, 3.7309], device='cuda:2') +2024-07-29 09:42:44,431 INFO [train.py:1146] (2/4) Epoch 20, validation: loss=0.1606, simple_loss=0.2622, pruned_loss=0.02953, over 944034.00 frames. +2024-07-29 09:42:44,432 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 09:43:01,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=266993.3333333333, ans=0.125 +2024-07-29 09:43:02,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=266993.3333333333, ans=0.09899494936611666 +2024-07-29 09:43:04,797 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=267006.6666666667, ans=0.1 +2024-07-29 09:43:18,766 INFO [train.py:1114] (2/4) Epoch 20, batch 6050, loss[loss=0.1673, simple_loss=0.2635, pruned_loss=0.03554, over 4777.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2619, pruned_loss=0.03959, over 938625.40 frames. ], batch size: 12, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:43:22,045 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.43 vs. limit=22.5 +2024-07-29 09:43:36,979 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=267046.6666666667, ans=0.0 +2024-07-29 09:43:52,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=267073.3333333333, ans=0.025 +2024-07-29 09:43:53,224 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=267073.3333333333, ans=0.5 +2024-07-29 09:43:57,701 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.73 vs. limit=22.5 +2024-07-29 09:44:05,097 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.461e+01 5.588e+01 6.267e+01 7.088e+01 1.023e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 09:44:12,291 INFO [train.py:1114] (2/4) Epoch 20, batch 6100, loss[loss=0.1798, simple_loss=0.2856, pruned_loss=0.03698, over 4684.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2622, pruned_loss=0.03968, over 938503.21 frames. ], batch size: 15, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:44:21,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=267113.3333333333, ans=0.125 +2024-07-29 09:44:23,066 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.84 vs. limit=22.5 +2024-07-29 09:44:32,328 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=267126.6666666667, ans=0.0 +2024-07-29 09:44:50,616 INFO [train.py:1114] (2/4) Epoch 20, batch 6150, loss[loss=0.1778, simple_loss=0.2715, pruned_loss=0.04207, over 3440.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2623, pruned_loss=0.03946, over 937527.78 frames. ], batch size: 36, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:44:51,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=267166.6666666667, ans=0.2 +2024-07-29 09:45:00,764 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=267180.0, ans=0.2 +2024-07-29 09:45:02,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=267180.0, ans=0.025 +2024-07-29 09:45:07,536 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=267193.3333333333, ans=0.0 +2024-07-29 09:45:12,995 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=267206.6666666667, ans=0.025 +2024-07-29 09:45:13,226 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.15 vs. limit=15.0 +2024-07-29 09:45:17,966 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.47 vs. limit=6.0 +2024-07-29 09:45:26,400 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.706e+01 5.902e+01 6.533e+01 7.507e+01 1.268e+02, threshold=1.307e+02, percent-clipped=1.0 +2024-07-29 09:45:29,245 INFO [train.py:1114] (2/4) Epoch 20, batch 6200, loss[loss=0.1513, simple_loss=0.2454, pruned_loss=0.02856, over 4744.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2625, pruned_loss=0.03979, over 936960.15 frames. ], batch size: 14, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:45:34,158 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=267233.3333333333, ans=0.0 +2024-07-29 09:45:36,866 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=267246.6666666667, ans=0.1 +2024-07-29 09:45:40,599 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=267246.6666666667, ans=0.0 +2024-07-29 09:45:50,094 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:45:52,914 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=267273.3333333333, ans=0.125 +2024-07-29 09:46:03,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=267286.6666666667, ans=0.125 +2024-07-29 09:46:05,165 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=267300.0, ans=0.0 +2024-07-29 09:46:05,340 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:46:05,741 INFO [train.py:1114] (2/4) Epoch 20, batch 6250, loss[loss=0.1643, simple_loss=0.2638, pruned_loss=0.03237, over 4811.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2619, pruned_loss=0.03984, over 933468.05 frames. ], batch size: 14, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:46:10,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=267300.0, ans=0.2 +2024-07-29 09:46:17,500 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=267313.3333333333, ans=0.125 +2024-07-29 09:46:18,896 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=267326.6666666667, ans=0.125 +2024-07-29 09:46:45,646 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.404e+01 5.746e+01 6.370e+01 7.341e+01 9.825e+01, threshold=1.274e+02, percent-clipped=0.0 +2024-07-29 09:46:55,218 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=267353.3333333333, ans=0.025 +2024-07-29 09:46:56,417 INFO [train.py:1114] (2/4) Epoch 20, batch 6300, loss[loss=0.1272, simple_loss=0.2132, pruned_loss=0.02063, over 4483.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2623, pruned_loss=0.04024, over 930159.37 frames. ], batch size: 10, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:47:01,619 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=267366.6666666667, ans=0.1 +2024-07-29 09:47:16,855 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=267406.6666666667, ans=0.05 +2024-07-29 09:47:32,260 INFO [train.py:1114] (2/4) Epoch 20, batch 6350, loss[loss=0.1663, simple_loss=0.2729, pruned_loss=0.02991, over 4517.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2618, pruned_loss=0.03957, over 934179.80 frames. ], batch size: 21, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:47:34,550 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=267433.3333333333, ans=0.2 +2024-07-29 09:47:35,215 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=267433.3333333333, ans=0.125 +2024-07-29 09:47:49,018 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=267460.0, ans=0.125 +2024-07-29 09:48:06,776 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 5.677e+01 6.317e+01 7.481e+01 1.107e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 09:48:06,986 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=267486.6666666667, ans=0.1 +2024-07-29 09:48:09,448 INFO [train.py:1114] (2/4) Epoch 20, batch 6400, loss[loss=0.1684, simple_loss=0.2603, pruned_loss=0.03824, over 4631.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2613, pruned_loss=0.0395, over 935454.69 frames. ], batch size: 13, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:48:10,861 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=267500.0, ans=0.125 +2024-07-29 09:48:12,322 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=267500.0, ans=0.0 +2024-07-29 09:48:12,363 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=267500.0, ans=0.125 +2024-07-29 09:48:12,457 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.25 vs. limit=15.0 +2024-07-29 09:48:27,923 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.69 vs. limit=10.0 +2024-07-29 09:48:30,228 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=267540.0, ans=0.1 +2024-07-29 09:48:30,988 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=267540.0, ans=0.0 +2024-07-29 09:48:33,688 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=267540.0, ans=0.2 +2024-07-29 09:48:41,673 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=267553.3333333333, ans=0.07 +2024-07-29 09:48:42,723 INFO [train.py:1114] (2/4) Epoch 20, batch 6450, loss[loss=0.1752, simple_loss=0.2695, pruned_loss=0.04044, over 4531.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2613, pruned_loss=0.0394, over 938952.27 frames. ], batch size: 21, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:48:44,178 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:48:49,465 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=267580.0, ans=0.0 +2024-07-29 09:49:06,747 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=267593.3333333333, ans=0.04949747468305833 +2024-07-29 09:49:10,039 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:49:46,233 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.162e+01 5.798e+01 6.360e+01 7.229e+01 1.035e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-29 09:49:48,986 INFO [train.py:1114] (2/4) Epoch 20, batch 6500, loss[loss=0.176, simple_loss=0.2602, pruned_loss=0.0459, over 3284.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2609, pruned_loss=0.03931, over 940128.61 frames. ], batch size: 35, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:49:57,531 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=267633.3333333333, ans=0.0 +2024-07-29 09:50:03,712 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=267646.6666666667, ans=0.2 +2024-07-29 09:50:11,538 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=267660.0, ans=0.125 +2024-07-29 09:50:13,198 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.84 vs. limit=12.0 +2024-07-29 09:50:30,559 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=267686.6666666667, ans=0.025 +2024-07-29 09:50:31,668 INFO [train.py:1114] (2/4) Epoch 20, batch 6550, loss[loss=0.1486, simple_loss=0.2325, pruned_loss=0.03237, over 4803.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2609, pruned_loss=0.03909, over 943032.33 frames. ], batch size: 11, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:50:31,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=267700.0, ans=0.1 +2024-07-29 09:50:33,910 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=267700.0, ans=0.125 +2024-07-29 09:50:35,720 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=267700.0, ans=0.125 +2024-07-29 09:50:42,131 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=267713.3333333333, ans=0.025 +2024-07-29 09:51:18,327 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=267740.0, ans=0.0 +2024-07-29 09:51:18,450 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.78 vs. limit=6.0 +2024-07-29 09:51:23,456 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=267753.3333333333, ans=0.125 +2024-07-29 09:51:26,544 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.591e+01 5.663e+01 6.386e+01 7.207e+01 1.403e+02, threshold=1.277e+02, percent-clipped=3.0 +2024-07-29 09:51:27,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=267753.3333333333, ans=0.0 +2024-07-29 09:51:29,170 INFO [train.py:1114] (2/4) Epoch 20, batch 6600, loss[loss=0.156, simple_loss=0.2516, pruned_loss=0.03017, over 4931.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.261, pruned_loss=0.03894, over 945021.95 frames. ], batch size: 14, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:51:29,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=267766.6666666667, ans=0.125 +2024-07-29 09:51:32,212 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=267766.6666666667, ans=0.0 +2024-07-29 09:51:33,627 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.88 vs. limit=15.0 +2024-07-29 09:52:53,368 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.41 vs. limit=10.0 +2024-07-29 09:52:53,912 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=267806.6666666667, ans=0.025 +2024-07-29 09:53:11,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=267820.0, ans=0.125 +2024-07-29 09:53:12,455 INFO [train.py:1114] (2/4) Epoch 20, batch 6650, loss[loss=0.1678, simple_loss=0.2651, pruned_loss=0.03532, over 4649.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2609, pruned_loss=0.03916, over 943874.98 frames. ], batch size: 17, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:53:31,935 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.45 vs. limit=22.5 +2024-07-29 09:53:39,754 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=267873.3333333333, ans=0.025 +2024-07-29 09:53:39,865 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:53:41,795 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=267886.6666666667, ans=0.0 +2024-07-29 09:53:44,862 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.709e+01 6.420e+01 7.242e+01 1.116e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-29 09:53:47,512 INFO [train.py:1114] (2/4) Epoch 20, batch 6700, loss[loss=0.1461, simple_loss=0.2396, pruned_loss=0.02631, over 4705.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2619, pruned_loss=0.03981, over 942681.64 frames. ], batch size: 19, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:53:57,734 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.34 vs. limit=15.0 +2024-07-29 09:54:02,345 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=267913.3333333333, ans=0.125 +2024-07-29 09:55:02,462 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=267953.3333333333, ans=0.125 +2024-07-29 09:55:03,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=267953.3333333333, ans=0.125 +2024-07-29 09:55:06,376 INFO [train.py:1114] (2/4) Epoch 20, batch 6750, loss[loss=0.2147, simple_loss=0.3009, pruned_loss=0.06431, over 4291.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2613, pruned_loss=0.0399, over 940731.33 frames. ], batch size: 26, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:55:40,707 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=267993.3333333333, ans=0.1 +2024-07-29 09:55:41,483 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=267993.3333333333, ans=0.0 +2024-07-29 09:55:53,696 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.727e+01 5.999e+01 6.595e+01 7.628e+01 1.756e+02, threshold=1.319e+02, percent-clipped=1.0 +2024-07-29 09:55:53,915 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=268020.0, ans=0.0 +2024-07-29 09:55:56,367 INFO [train.py:1114] (2/4) Epoch 20, batch 6800, loss[loss=0.1655, simple_loss=0.2573, pruned_loss=0.03686, over 4631.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2623, pruned_loss=0.03996, over 939517.77 frames. ], batch size: 13, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:56:00,677 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.94 vs. limit=22.5 +2024-07-29 09:56:04,361 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=268046.6666666667, ans=0.0 +2024-07-29 09:56:17,238 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=268046.6666666667, ans=0.0 +2024-07-29 09:56:29,206 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=268073.3333333333, ans=0.04949747468305833 +2024-07-29 09:56:39,211 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.94 vs. limit=15.0 +2024-07-29 09:56:40,833 INFO [train.py:1114] (2/4) Epoch 20, batch 6850, loss[loss=0.1446, simple_loss=0.2426, pruned_loss=0.02326, over 4702.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2619, pruned_loss=0.03974, over 941498.31 frames. ], batch size: 13, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:56:45,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=268100.0, ans=0.125 +2024-07-29 09:57:29,003 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=268140.0, ans=0.125 +2024-07-29 09:57:30,231 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:57:36,086 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.628e+01 5.831e+01 6.589e+01 8.147e+01 1.219e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 09:57:38,801 INFO [train.py:1114] (2/4) Epoch 20, batch 6900, loss[loss=0.1544, simple_loss=0.2393, pruned_loss=0.03478, over 4973.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2616, pruned_loss=0.03959, over 943528.56 frames. ], batch size: 13, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:57:49,219 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=268166.6666666667, ans=0.125 +2024-07-29 09:57:49,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=268166.6666666667, ans=0.0 +2024-07-29 09:58:09,247 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=268206.6666666667, ans=0.125 +2024-07-29 09:58:13,576 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=268206.6666666667, ans=0.0 +2024-07-29 09:58:19,605 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:58:21,470 INFO [train.py:1114] (2/4) Epoch 20, batch 6950, loss[loss=0.1563, simple_loss=0.2451, pruned_loss=0.03374, over 4586.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2613, pruned_loss=0.03948, over 940580.74 frames. ], batch size: 10, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:59:13,731 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=268260.0, ans=0.05 +2024-07-29 09:59:14,392 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=268260.0, ans=0.125 +2024-07-29 09:59:17,486 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.39 vs. limit=15.0 +2024-07-29 09:59:17,581 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.64 vs. limit=10.0 +2024-07-29 09:59:17,994 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=268273.3333333333, ans=0.0 +2024-07-29 09:59:19,711 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:59:22,346 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=268273.3333333333, ans=0.2 +2024-07-29 09:59:22,519 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.78 vs. limit=15.0 +2024-07-29 09:59:43,560 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.18 vs. limit=22.5 +2024-07-29 09:59:44,603 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.835e+01 5.602e+01 6.145e+01 6.791e+01 9.985e+01, threshold=1.229e+02, percent-clipped=0.0 +2024-07-29 10:00:04,162 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=268286.6666666667, ans=0.0 +2024-07-29 10:00:05,969 INFO [train.py:1114] (2/4) Epoch 20, batch 7000, loss[loss=0.1932, simple_loss=0.2905, pruned_loss=0.04796, over 4644.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2612, pruned_loss=0.03939, over 938427.06 frames. ], batch size: 17, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 10:00:07,039 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.18 vs. limit=15.0 +2024-07-29 10:00:39,218 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.95 vs. limit=15.0 +2024-07-29 10:00:49,316 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=268340.0, ans=0.125 +2024-07-29 10:00:56,374 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=268353.3333333333, ans=0.015 +2024-07-29 10:01:12,976 INFO [train.py:1114] (2/4) Epoch 20, batch 7050, loss[loss=0.1722, simple_loss=0.2723, pruned_loss=0.03603, over 4741.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2617, pruned_loss=0.03924, over 941730.61 frames. ], batch size: 19, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 10:01:15,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=268366.6666666667, ans=0.025 +2024-07-29 10:01:17,958 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=268366.6666666667, ans=0.1 +2024-07-29 10:01:29,511 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=268393.3333333333, ans=0.0 +2024-07-29 10:01:30,895 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=268393.3333333333, ans=0.125 +2024-07-29 10:01:35,037 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=268406.6666666667, ans=0.125 +2024-07-29 10:01:45,957 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.517e+01 5.713e+01 6.192e+01 7.192e+01 1.067e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 10:01:49,631 INFO [train.py:1114] (2/4) Epoch 20, batch 7100, loss[loss=0.1722, simple_loss=0.274, pruned_loss=0.03517, over 4800.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2622, pruned_loss=0.03956, over 936703.39 frames. ], batch size: 15, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 10:01:52,355 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=268433.3333333333, ans=0.0 +2024-07-29 10:01:58,305 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.37 vs. limit=22.5 +2024-07-29 10:02:04,592 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=268446.6666666667, ans=0.025 +2024-07-29 10:02:05,246 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=268446.6666666667, ans=0.07 +2024-07-29 10:02:08,818 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.10 vs. limit=15.0 +2024-07-29 10:02:31,663 INFO [train.py:1114] (2/4) Epoch 20, batch 7150, loss[loss=0.1691, simple_loss=0.2608, pruned_loss=0.03871, over 4458.00 frames. ], tot_loss[loss=0.17, simple_loss=0.261, pruned_loss=0.03943, over 937369.71 frames. ], batch size: 21, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 10:02:43,784 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=268500.0, ans=0.125 +2024-07-29 10:02:46,892 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=268500.0, ans=0.04949747468305833 +2024-07-29 10:02:50,675 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=268513.3333333333, ans=0.125 +2024-07-29 10:02:53,350 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=268513.3333333333, ans=0.0 +2024-07-29 10:03:03,807 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.61 vs. limit=15.0 +2024-07-29 10:03:04,944 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=268540.0, ans=0.0 +2024-07-29 10:03:08,424 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=268540.0, ans=0.2 +2024-07-29 10:03:33,079 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.350e+01 5.664e+01 6.258e+01 7.035e+01 1.192e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 10:03:59,859 INFO [train.py:1114] (2/4) Epoch 20, batch 7200, loss[loss=0.1584, simple_loss=0.2518, pruned_loss=0.03251, over 4797.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2616, pruned_loss=0.03959, over 937813.41 frames. ], batch size: 15, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:04:20,362 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=268566.6666666667, ans=0.0 +2024-07-29 10:05:05,560 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=268566.6666666667, ans=0.125 +2024-07-29 10:05:15,827 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=268566.6666666667, ans=0.125 +2024-07-29 10:05:53,047 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.17 vs. limit=15.0 +2024-07-29 10:07:50,916 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=268593.3333333333, ans=0.2 +2024-07-29 10:07:54,213 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.88 vs. limit=22.5 +2024-07-29 10:09:48,455 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=268620.0, ans=0.125 +2024-07-29 10:09:49,563 INFO [train.py:1114] (2/4) Epoch 20, batch 7250, loss[loss=0.1454, simple_loss=0.2394, pruned_loss=0.02565, over 4851.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2601, pruned_loss=0.03866, over 939383.02 frames. ], batch size: 12, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:10:13,938 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.92 vs. limit=15.0 +2024-07-29 10:10:16,414 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.67 vs. limit=22.5 +2024-07-29 10:10:23,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=268646.6666666667, ans=0.125 +2024-07-29 10:11:35,253 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.440e+01 5.763e+01 6.435e+01 7.253e+01 9.940e+01, threshold=1.287e+02, percent-clipped=0.0 +2024-07-29 10:11:35,987 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:12:08,051 INFO [train.py:1114] (2/4) Epoch 20, batch 7300, loss[loss=0.1548, simple_loss=0.2435, pruned_loss=0.03301, over 4863.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2601, pruned_loss=0.03865, over 939134.63 frames. ], batch size: 12, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:12:25,873 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=268700.0, ans=0.125 +2024-07-29 10:14:10,637 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=268700.0, ans=0.125 +2024-07-29 10:17:48,706 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=268726.6666666667, ans=0.125 +2024-07-29 10:19:38,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=268766.6666666667, ans=0.0 +2024-07-29 10:19:39,279 INFO [train.py:1114] (2/4) Epoch 20, batch 7350, loss[loss=0.1439, simple_loss=0.2283, pruned_loss=0.02973, over 4639.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2605, pruned_loss=0.039, over 938926.51 frames. ], batch size: 12, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:19:40,164 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=268766.6666666667, ans=0.0 +2024-07-29 10:19:42,343 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.18 vs. limit=15.0 +2024-07-29 10:19:43,865 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=268766.6666666667, ans=0.125 +2024-07-29 10:19:48,939 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=268780.0, ans=0.125 +2024-07-29 10:19:49,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=268780.0, ans=0.05 +2024-07-29 10:19:52,056 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:21:04,111 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=268806.6666666667, ans=0.2 +2024-07-29 10:21:05,244 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=268806.6666666667, ans=0.1 +2024-07-29 10:22:02,087 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=268820.0, ans=0.1 +2024-07-29 10:22:45,956 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.689e+01 5.764e+01 6.635e+01 7.838e+01 1.063e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-29 10:22:55,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=268820.0, ans=0.0 +2024-07-29 10:22:59,770 INFO [train.py:1114] (2/4) Epoch 20, batch 7400, loss[loss=0.1735, simple_loss=0.2729, pruned_loss=0.03705, over 4698.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2609, pruned_loss=0.03933, over 940291.53 frames. ], batch size: 13, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:23:34,655 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=268833.3333333333, ans=0.1 +2024-07-29 10:23:37,634 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=268833.3333333333, ans=0.125 +2024-07-29 10:23:42,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=268846.6666666667, ans=0.125 +2024-07-29 10:23:47,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=268860.0, ans=0.0 +2024-07-29 10:23:55,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=268873.3333333333, ans=0.125 +2024-07-29 10:23:58,578 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff3.min_abs, batch_count=268873.3333333333, ans=0.2 +2024-07-29 10:24:05,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=268900.0, ans=0.125 +2024-07-29 10:24:05,869 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=268900.0, ans=0.07 +2024-07-29 10:24:06,271 INFO [train.py:1114] (2/4) Epoch 20, batch 7450, loss[loss=0.1767, simple_loss=0.2582, pruned_loss=0.04758, over 4606.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2596, pruned_loss=0.03925, over 937797.06 frames. ], batch size: 11, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:24:08,454 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.86 vs. limit=6.0 +2024-07-29 10:25:30,951 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=268926.6666666667, ans=0.0 +2024-07-29 10:25:33,484 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=268926.6666666667, ans=0.1 +2024-07-29 10:25:37,408 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=268940.0, ans=0.0 +2024-07-29 10:25:38,945 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=268940.0, ans=0.1 +2024-07-29 10:25:44,552 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=268953.3333333333, ans=0.125 +2024-07-29 10:25:46,121 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.697e+01 5.622e+01 6.334e+01 7.188e+01 1.210e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 10:25:51,753 INFO [train.py:1114] (2/4) Epoch 20, batch 7500, loss[loss=0.2198, simple_loss=0.3156, pruned_loss=0.06197, over 3366.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2604, pruned_loss=0.03934, over 935663.49 frames. ], batch size: 35, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:25:52,598 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=268966.6666666667, ans=0.1 +2024-07-29 10:25:57,300 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=268966.6666666667, ans=0.05 +2024-07-29 10:26:02,398 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.86 vs. limit=15.0 +2024-07-29 10:26:02,674 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=268980.0, ans=0.025 +2024-07-29 10:26:06,736 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=268993.3333333333, ans=0.1 +2024-07-29 10:26:24,837 INFO [train.py:1114] (2/4) Epoch 20, batch 7550, loss[loss=0.1816, simple_loss=0.2763, pruned_loss=0.04346, over 4594.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2628, pruned_loss=0.04038, over 935362.87 frames. ], batch size: 17, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:26:31,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=269046.6666666667, ans=0.125 +2024-07-29 10:26:48,315 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.96 vs. limit=15.0 +2024-07-29 10:27:06,368 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.638e+01 5.698e+01 6.238e+01 6.861e+01 9.805e+01, threshold=1.248e+02, percent-clipped=0.0 +2024-07-29 10:27:14,584 INFO [train.py:1114] (2/4) Epoch 20, batch 7600, loss[loss=0.1787, simple_loss=0.2774, pruned_loss=0.04004, over 4807.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2622, pruned_loss=0.04015, over 937333.90 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:27:19,953 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=269100.0, ans=0.07 +2024-07-29 10:27:23,095 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=269113.3333333333, ans=0.125 +2024-07-29 10:27:31,632 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=269126.6666666667, ans=0.025 +2024-07-29 10:27:56,965 INFO [train.py:1114] (2/4) Epoch 20, batch 7650, loss[loss=0.1713, simple_loss=0.2433, pruned_loss=0.04963, over 4942.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2626, pruned_loss=0.04013, over 936255.10 frames. ], batch size: 12, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:28:01,266 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=269166.6666666667, ans=0.1 +2024-07-29 10:28:04,428 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=269180.0, ans=0.125 +2024-07-29 10:28:56,739 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.12 vs. limit=15.0 +2024-07-29 10:29:09,506 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.493e+01 5.550e+01 6.272e+01 7.437e+01 1.310e+02, threshold=1.254e+02, percent-clipped=1.0 +2024-07-29 10:29:13,704 INFO [train.py:1114] (2/4) Epoch 20, batch 7700, loss[loss=0.1835, simple_loss=0.2725, pruned_loss=0.0473, over 4693.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.263, pruned_loss=0.04008, over 934615.11 frames. ], batch size: 13, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:29:18,880 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=269233.3333333333, ans=0.2 +2024-07-29 10:29:18,934 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=269233.3333333333, ans=0.1 +2024-07-29 10:29:24,798 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=269246.6666666667, ans=0.1 +2024-07-29 10:29:26,058 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=269260.0, ans=0.0 +2024-07-29 10:29:33,820 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=269273.3333333333, ans=0.125 +2024-07-29 10:29:33,881 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=269273.3333333333, ans=0.125 +2024-07-29 10:29:34,627 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.49 vs. limit=15.0 +2024-07-29 10:29:41,419 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.48 vs. limit=22.5 +2024-07-29 10:29:47,496 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=269286.6666666667, ans=0.125 +2024-07-29 10:29:49,380 INFO [train.py:1114] (2/4) Epoch 20, batch 7750, loss[loss=0.1551, simple_loss=0.2448, pruned_loss=0.03273, over 4943.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2632, pruned_loss=0.04019, over 935605.17 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:29:51,558 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=269300.0, ans=0.2 +2024-07-29 10:29:54,825 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.79 vs. limit=15.0 +2024-07-29 10:30:28,808 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.865e+01 5.687e+01 6.141e+01 6.601e+01 8.666e+01, threshold=1.228e+02, percent-clipped=0.0 +2024-07-29 10:30:31,967 INFO [train.py:1114] (2/4) Epoch 20, batch 7800, loss[loss=0.1874, simple_loss=0.2765, pruned_loss=0.04911, over 4663.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2633, pruned_loss=0.03996, over 937355.60 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:30:32,782 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=269366.6666666667, ans=0.1 +2024-07-29 10:30:34,799 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=269366.6666666667, ans=0.125 +2024-07-29 10:30:45,526 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=269366.6666666667, ans=0.125 +2024-07-29 10:30:55,638 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=269380.0, ans=0.2 +2024-07-29 10:31:21,219 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.39 vs. limit=15.0 +2024-07-29 10:31:37,967 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=269406.6666666667, ans=0.5 +2024-07-29 10:31:38,019 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=269406.6666666667, ans=0.025 +2024-07-29 10:31:42,811 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.50 vs. limit=15.0 +2024-07-29 10:31:46,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=269420.0, ans=0.125 +2024-07-29 10:31:47,986 INFO [train.py:1114] (2/4) Epoch 20, batch 7850, loss[loss=0.1352, simple_loss=0.228, pruned_loss=0.02122, over 4576.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2628, pruned_loss=0.04014, over 936260.66 frames. ], batch size: 10, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:31:48,149 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=269433.3333333333, ans=0.025 +2024-07-29 10:31:50,057 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=269433.3333333333, ans=0.125 +2024-07-29 10:31:56,816 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=269446.6666666667, ans=0.2 +2024-07-29 10:31:59,548 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=269446.6666666667, ans=0.1 +2024-07-29 10:32:00,687 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.55 vs. limit=15.0 +2024-07-29 10:32:01,840 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=269460.0, ans=0.125 +2024-07-29 10:32:02,075 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.59 vs. limit=22.5 +2024-07-29 10:32:10,661 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=269473.3333333333, ans=0.2 +2024-07-29 10:32:11,364 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=269473.3333333333, ans=0.125 +2024-07-29 10:32:12,271 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.65 vs. limit=6.0 +2024-07-29 10:32:13,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=269473.3333333333, ans=0.0 +2024-07-29 10:32:18,448 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.034e+01 5.765e+01 6.588e+01 7.311e+01 1.076e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 10:32:21,193 INFO [train.py:1114] (2/4) Epoch 20, batch 7900, loss[loss=0.1554, simple_loss=0.2455, pruned_loss=0.0326, over 4875.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2645, pruned_loss=0.04063, over 932987.71 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:32:36,779 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:33:09,093 INFO [train.py:1114] (2/4) Epoch 20, batch 7950, loss[loss=0.2124, simple_loss=0.2889, pruned_loss=0.068, over 3394.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2636, pruned_loss=0.04003, over 935004.99 frames. ], batch size: 35, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:33:12,468 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=269566.6666666667, ans=10.0 +2024-07-29 10:33:13,309 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=269566.6666666667, ans=0.0 +2024-07-29 10:33:26,966 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=269593.3333333333, ans=0.0 +2024-07-29 10:33:30,443 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=269593.3333333333, ans=22.5 +2024-07-29 10:33:30,898 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=269593.3333333333, ans=0.5 +2024-07-29 10:33:36,104 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=269606.6666666667, ans=0.125 +2024-07-29 10:33:40,528 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=269620.0, ans=0.0 +2024-07-29 10:33:44,270 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.355e+01 5.756e+01 6.342e+01 7.191e+01 1.019e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-29 10:33:44,480 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:33:45,842 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=269620.0, ans=0.125 +2024-07-29 10:33:46,624 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.47 vs. limit=15.0 +2024-07-29 10:33:46,953 INFO [train.py:1114] (2/4) Epoch 20, batch 8000, loss[loss=0.1638, simple_loss=0.2574, pruned_loss=0.03511, over 4607.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2623, pruned_loss=0.03944, over 934157.84 frames. ], batch size: 11, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:34:18,689 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=269660.0, ans=0.0 +2024-07-29 10:34:19,165 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.76 vs. limit=15.0 +2024-07-29 10:34:29,685 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.50 vs. limit=6.0 +2024-07-29 10:34:38,121 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=269686.6666666667, ans=0.125 +2024-07-29 10:34:48,978 INFO [train.py:1114] (2/4) Epoch 20, batch 8050, loss[loss=0.1653, simple_loss=0.2732, pruned_loss=0.02869, over 4808.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2625, pruned_loss=0.0395, over 933756.74 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:34:54,300 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.33 vs. limit=15.0 +2024-07-29 10:35:02,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=269713.3333333333, ans=0.125 +2024-07-29 10:35:10,389 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=269740.0, ans=0.125 +2024-07-29 10:35:15,016 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.69 vs. limit=6.0 +2024-07-29 10:35:15,562 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=269740.0, ans=0.125 +2024-07-29 10:35:18,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=269753.3333333333, ans=0.0 +2024-07-29 10:35:20,666 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=269753.3333333333, ans=0.0 +2024-07-29 10:35:21,090 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.839e+01 5.695e+01 6.260e+01 6.907e+01 1.067e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 10:35:35,908 INFO [train.py:1114] (2/4) Epoch 20, batch 8100, loss[loss=0.1802, simple_loss=0.2747, pruned_loss=0.04287, over 4811.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.263, pruned_loss=0.03973, over 933416.59 frames. ], batch size: 15, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:35:43,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=269780.0, ans=0.125 +2024-07-29 10:35:48,085 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.89 vs. limit=15.0 +2024-07-29 10:36:05,907 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=269820.0, ans=0.2 +2024-07-29 10:36:08,452 INFO [train.py:1114] (2/4) Epoch 20, batch 8150, loss[loss=0.1686, simple_loss=0.2811, pruned_loss=0.02804, over 4807.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.262, pruned_loss=0.03906, over 936966.35 frames. ], batch size: 15, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:36:37,275 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=269860.0, ans=0.0 +2024-07-29 10:36:52,373 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.965e+01 5.540e+01 6.167e+01 6.859e+01 1.030e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 10:36:53,370 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=269886.6666666667, ans=0.0 +2024-07-29 10:36:55,164 INFO [train.py:1114] (2/4) Epoch 20, batch 8200, loss[loss=0.1835, simple_loss=0.2737, pruned_loss=0.0467, over 4813.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.262, pruned_loss=0.03858, over 938078.58 frames. ], batch size: 15, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:36:57,897 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=269900.0, ans=0.2 +2024-07-29 10:36:58,567 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=269900.0, ans=0.2 +2024-07-29 10:37:02,520 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=269913.3333333333, ans=0.1 +2024-07-29 10:37:04,583 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=269913.3333333333, ans=0.1 +2024-07-29 10:37:06,895 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.92 vs. limit=10.0 +2024-07-29 10:37:07,794 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:37:10,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=269926.6666666667, ans=0.125 +2024-07-29 10:37:12,534 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:38:03,180 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.99 vs. limit=15.0 +2024-07-29 10:38:11,146 INFO [train.py:1114] (2/4) Epoch 20, batch 8250, loss[loss=0.1662, simple_loss=0.2651, pruned_loss=0.03362, over 4892.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2607, pruned_loss=0.0382, over 938051.52 frames. ], batch size: 13, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:38:28,936 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.58 vs. limit=15.0 +2024-07-29 10:38:31,622 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.33 vs. limit=12.0 +2024-07-29 10:38:41,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=270006.6666666667, ans=0.1 +2024-07-29 10:38:44,289 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=270006.6666666667, ans=0.1 +2024-07-29 10:38:45,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=270020.0, ans=0.125 +2024-07-29 10:38:49,955 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.571e+01 5.582e+01 6.021e+01 6.658e+01 1.061e+02, threshold=1.204e+02, percent-clipped=0.0 +2024-07-29 10:38:52,536 INFO [train.py:1114] (2/4) Epoch 20, batch 8300, loss[loss=0.1772, simple_loss=0.2704, pruned_loss=0.04199, over 4908.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2621, pruned_loss=0.03865, over 938440.14 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:39:01,777 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-29 10:39:07,788 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.97 vs. limit=15.0 +2024-07-29 10:39:12,696 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=270060.0, ans=0.1 +2024-07-29 10:39:15,843 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=270073.3333333333, ans=0.0 +2024-07-29 10:39:28,019 INFO [train.py:1114] (2/4) Epoch 20, batch 8350, loss[loss=0.1912, simple_loss=0.2856, pruned_loss=0.04839, over 4795.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.262, pruned_loss=0.03877, over 941212.65 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:39:30,084 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=270100.0, ans=0.125 +2024-07-29 10:39:30,704 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=270100.0, ans=0.125 +2024-07-29 10:39:34,103 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=270113.3333333333, ans=0.125 +2024-07-29 10:39:37,806 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=270113.3333333333, ans=0.125 +2024-07-29 10:39:39,321 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.01 vs. limit=15.0 +2024-07-29 10:39:53,076 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=270140.0, ans=0.07 +2024-07-29 10:39:56,762 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=270153.3333333333, ans=0.0 +2024-07-29 10:39:57,946 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.685e+01 5.732e+01 6.400e+01 7.266e+01 9.706e+01, threshold=1.280e+02, percent-clipped=0.0 +2024-07-29 10:40:00,646 INFO [train.py:1114] (2/4) Epoch 20, batch 8400, loss[loss=0.1349, simple_loss=0.2201, pruned_loss=0.02489, over 4776.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2618, pruned_loss=0.03888, over 939989.30 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:40:00,859 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:40:26,052 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=270193.3333333333, ans=0.125 +2024-07-29 10:40:33,125 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.35 vs. limit=22.5 +2024-07-29 10:40:41,432 INFO [train.py:1114] (2/4) Epoch 20, batch 8450, loss[loss=0.1941, simple_loss=0.2875, pruned_loss=0.05034, over 4802.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2635, pruned_loss=0.03941, over 939137.64 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:40:43,547 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=270233.3333333333, ans=0.125 +2024-07-29 10:40:53,372 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=270246.6666666667, ans=0.025 +2024-07-29 10:41:10,262 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.98 vs. limit=10.0 +2024-07-29 10:41:12,719 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.341e+01 5.700e+01 6.559e+01 7.490e+01 1.068e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-29 10:41:22,380 INFO [train.py:1114] (2/4) Epoch 20, batch 8500, loss[loss=0.16, simple_loss=0.2444, pruned_loss=0.03781, over 4610.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2622, pruned_loss=0.03902, over 939084.01 frames. ], batch size: 11, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:41:23,737 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=270300.0, ans=0.125 +2024-07-29 10:41:31,348 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=270313.3333333333, ans=0.125 +2024-07-29 10:41:36,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=270326.6666666667, ans=0.0 +2024-07-29 10:41:40,835 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=270326.6666666667, ans=0.0 +2024-07-29 10:41:42,210 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=270326.6666666667, ans=0.2 +2024-07-29 10:41:56,545 INFO [train.py:1114] (2/4) Epoch 20, batch 8550, loss[loss=0.175, simple_loss=0.2521, pruned_loss=0.04893, over 4800.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2621, pruned_loss=0.03931, over 939736.26 frames. ], batch size: 11, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:42:01,358 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=270366.6666666667, ans=0.2 +2024-07-29 10:42:15,584 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=270393.3333333333, ans=0.0 +2024-07-29 10:42:28,700 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.out_whiten.whitening_limit, batch_count=270420.0, ans=8.0 +2024-07-29 10:42:28,841 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.753e+01 6.246e+01 7.230e+01 1.151e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 10:42:30,774 INFO [train.py:1114] (2/4) Epoch 20, batch 8600, loss[loss=0.1725, simple_loss=0.2661, pruned_loss=0.03945, over 4796.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2616, pruned_loss=0.03903, over 939376.98 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:42:49,714 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.02 vs. limit=15.0 +2024-07-29 10:43:08,642 INFO [train.py:1114] (2/4) Epoch 20, batch 8650, loss[loss=0.1715, simple_loss=0.2715, pruned_loss=0.03573, over 4889.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2617, pruned_loss=0.03892, over 940477.76 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:43:11,736 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.59 vs. limit=12.0 +2024-07-29 10:43:12,231 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=270500.0, ans=0.1 +2024-07-29 10:43:22,113 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=270526.6666666667, ans=0.025 +2024-07-29 10:43:26,255 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.90 vs. limit=15.0 +2024-07-29 10:43:30,252 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=270540.0, ans=0.0 +2024-07-29 10:43:31,596 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=270540.0, ans=0.0 +2024-07-29 10:43:38,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=270553.3333333333, ans=0.0 +2024-07-29 10:43:41,996 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.826e+01 5.889e+01 6.581e+01 7.578e+01 1.021e+02, threshold=1.316e+02, percent-clipped=0.0 +2024-07-29 10:43:43,930 INFO [train.py:1114] (2/4) Epoch 20, batch 8700, loss[loss=0.1677, simple_loss=0.2656, pruned_loss=0.03492, over 4760.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2621, pruned_loss=0.03937, over 937964.14 frames. ], batch size: 13, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:43:46,773 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=270566.6666666667, ans=0.0 +2024-07-29 10:43:52,259 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.71 vs. limit=15.0 +2024-07-29 10:44:08,948 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=270606.6666666667, ans=0.1 +2024-07-29 10:44:08,960 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=270606.6666666667, ans=0.2 +2024-07-29 10:44:16,437 INFO [train.py:1114] (2/4) Epoch 20, batch 8750, loss[loss=0.1843, simple_loss=0.2765, pruned_loss=0.04599, over 4671.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2619, pruned_loss=0.03916, over 936503.20 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:44:28,209 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=270646.6666666667, ans=0.05 +2024-07-29 10:44:33,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=270646.6666666667, ans=0.5 +2024-07-29 10:44:34,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=270660.0, ans=0.1 +2024-07-29 10:44:41,803 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=270673.3333333333, ans=0.1 +2024-07-29 10:44:52,041 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=270686.6666666667, ans=0.0 +2024-07-29 10:44:55,915 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.478e+01 5.802e+01 6.247e+01 7.031e+01 1.068e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 10:44:57,839 INFO [train.py:1114] (2/4) Epoch 20, batch 8800, loss[loss=0.1683, simple_loss=0.2562, pruned_loss=0.04017, over 4932.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2625, pruned_loss=0.03984, over 937281.29 frames. ], batch size: 14, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:45:26,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=270740.0, ans=0.125 +2024-07-29 10:45:28,077 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.95 vs. limit=22.5 +2024-07-29 10:45:31,574 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=270753.3333333333, ans=0.125 +2024-07-29 10:45:35,523 INFO [train.py:1114] (2/4) Epoch 20, batch 8850, loss[loss=0.2016, simple_loss=0.2908, pruned_loss=0.05617, over 4434.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2626, pruned_loss=0.04004, over 931546.75 frames. ], batch size: 21, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:45:52,249 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=270766.6666666667, ans=0.125 +2024-07-29 10:45:56,130 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.87 vs. limit=15.0 +2024-07-29 10:45:58,435 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=270780.0, ans=0.0 +2024-07-29 10:46:01,602 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:46:05,072 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.98 vs. limit=10.0 +2024-07-29 10:46:21,952 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=270806.6666666667, ans=10.0 +2024-07-29 10:46:26,965 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=270820.0, ans=0.125 +2024-07-29 10:46:30,253 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.550e+01 5.751e+01 6.544e+01 7.566e+01 1.087e+02, threshold=1.309e+02, percent-clipped=0.0 +2024-07-29 10:46:32,233 INFO [train.py:1114] (2/4) Epoch 20, batch 8900, loss[loss=0.1614, simple_loss=0.2413, pruned_loss=0.04077, over 4945.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2621, pruned_loss=0.03973, over 929139.76 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:46:32,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=270833.3333333333, ans=0.2 +2024-07-29 10:46:35,506 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=270833.3333333333, ans=0.025 +2024-07-29 10:46:50,492 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=270860.0, ans=0.0 +2024-07-29 10:46:54,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=270873.3333333333, ans=0.07 +2024-07-29 10:47:00,586 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=270886.6666666667, ans=0.0 +2024-07-29 10:47:03,696 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.73 vs. limit=15.0 +2024-07-29 10:47:06,575 INFO [train.py:1114] (2/4) Epoch 20, batch 8950, loss[loss=0.18, simple_loss=0.2756, pruned_loss=0.04217, over 4447.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2626, pruned_loss=0.0397, over 929697.08 frames. ], batch size: 21, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:47:12,602 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-29 10:47:13,062 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=270900.0, ans=0.125 +2024-07-29 10:47:24,122 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=270926.6666666667, ans=0.125 +2024-07-29 10:47:47,029 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=270940.0, ans=0.125 +2024-07-29 10:49:17,222 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=270953.3333333333, ans=0.0 +2024-07-29 10:49:33,589 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.383e+01 5.554e+01 6.323e+01 6.834e+01 1.028e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 10:49:37,831 INFO [train.py:1114] (2/4) Epoch 20, batch 9000, loss[loss=0.1636, simple_loss=0.246, pruned_loss=0.04057, over 4632.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2614, pruned_loss=0.03955, over 932744.85 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:49:37,831 INFO [train.py:1137] (2/4) Computing validation loss +2024-07-29 10:51:38,765 INFO [train.py:1146] (2/4) Epoch 20, validation: loss=0.1604, simple_loss=0.262, pruned_loss=0.02938, over 944034.00 frames. +2024-07-29 10:51:38,769 INFO [train.py:1147] (2/4) Maximum memory allocated so far is 4179MB +2024-07-29 10:51:48,351 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.33 vs. limit=15.0 +2024-07-29 10:52:07,629 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=270980.0, ans=0.2 +2024-07-29 10:52:08,984 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=270980.0, ans=0.125 +2024-07-29 10:52:20,941 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=270993.3333333333, ans=0.125 +2024-07-29 10:52:41,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=270993.3333333333, ans=0.0 +2024-07-29 10:53:25,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=271020.0, ans=0.1 +2024-07-29 10:53:27,080 INFO [train.py:1114] (2/4) Epoch 20, batch 9050, loss[loss=0.1558, simple_loss=0.2367, pruned_loss=0.03741, over 4554.00 frames. ], tot_loss[loss=0.17, simple_loss=0.261, pruned_loss=0.03948, over 933336.12 frames. ], batch size: 10, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:53:27,922 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=271033.3333333333, ans=0.125 +2024-07-29 10:53:28,751 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=271033.3333333333, ans=0.0 +2024-07-29 10:53:37,046 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=271033.3333333333, ans=0.0 +2024-07-29 10:53:37,148 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=271033.3333333333, ans=0.5 +2024-07-29 10:53:39,781 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=271046.6666666667, ans=0.125 +2024-07-29 10:53:40,308 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=271046.6666666667, ans=0.125 +2024-07-29 10:53:52,280 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=271046.6666666667, ans=0.125 +2024-07-29 10:54:15,532 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=271073.3333333333, ans=0.09899494936611666 +2024-07-29 10:54:22,474 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.730e+01 6.143e+01 7.007e+01 1.074e+02, threshold=1.229e+02, percent-clipped=0.0 +2024-07-29 10:54:28,585 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.86 vs. limit=15.0 +2024-07-29 10:54:30,291 INFO [train.py:1114] (2/4) Epoch 20, batch 9100, loss[loss=0.1778, simple_loss=0.2746, pruned_loss=0.04052, over 4934.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2604, pruned_loss=0.03913, over 936161.32 frames. ], batch size: 14, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:54:47,501 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=271113.3333333333, ans=0.125 +2024-07-29 10:54:50,899 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=271113.3333333333, ans=0.125 +2024-07-29 10:54:57,612 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=271126.6666666667, ans=0.2 +2024-07-29 10:54:57,652 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=271126.6666666667, ans=0.1 +2024-07-29 10:55:13,878 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=271153.3333333333, ans=0.1 +2024-07-29 10:55:14,907 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.96 vs. limit=22.5 +2024-07-29 10:55:19,599 INFO [train.py:1114] (2/4) Epoch 20, batch 9150, loss[loss=0.1687, simple_loss=0.2696, pruned_loss=0.03392, over 4812.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2612, pruned_loss=0.03917, over 935011.77 frames. ], batch size: 14, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:55:19,753 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=271166.6666666667, ans=0.05 +2024-07-29 10:55:22,239 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=271166.6666666667, ans=0.125 +2024-07-29 10:55:31,931 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=271180.0, ans=0.125 +2024-07-29 10:56:35,320 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=271206.6666666667, ans=0.125 +2024-07-29 10:56:52,955 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.481e+01 5.764e+01 6.206e+01 7.056e+01 9.843e+01, threshold=1.241e+02, percent-clipped=0.0 +2024-07-29 10:56:53,887 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.50 vs. limit=15.0 +2024-07-29 10:56:55,340 INFO [train.py:1114] (2/4) Epoch 20, batch 9200, loss[loss=0.1587, simple_loss=0.2452, pruned_loss=0.03605, over 4857.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.261, pruned_loss=0.03915, over 936921.00 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:57:14,045 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=271260.0, ans=0.1 +2024-07-29 10:57:25,407 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=271286.6666666667, ans=0.0 +2024-07-29 10:57:29,685 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.24 vs. limit=12.0 +2024-07-29 10:57:31,173 INFO [train.py:1114] (2/4) Epoch 20, batch 9250, loss[loss=0.1929, simple_loss=0.2855, pruned_loss=0.0501, over 4629.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2606, pruned_loss=0.03892, over 937563.08 frames. ], batch size: 13, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:57:34,011 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.69 vs. limit=22.5 +2024-07-29 10:57:42,583 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.73 vs. limit=15.0 +2024-07-29 10:57:54,110 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=271326.6666666667, ans=0.1 +2024-07-29 10:57:54,912 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.50 vs. limit=12.0 +2024-07-29 10:57:57,286 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=271340.0, ans=0.125 +2024-07-29 10:58:04,490 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=271353.3333333333, ans=0.0 +2024-07-29 10:58:11,146 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.506e+01 5.724e+01 6.498e+01 7.478e+01 1.094e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-29 10:58:14,102 INFO [train.py:1114] (2/4) Epoch 20, batch 9300, loss[loss=0.1504, simple_loss=0.2453, pruned_loss=0.02777, over 4778.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2604, pruned_loss=0.03895, over 937865.08 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:58:24,485 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=271366.6666666667, ans=0.025 +2024-07-29 10:58:46,565 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=271380.0, ans=0.0 +2024-07-29 10:58:48,890 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=271380.0, ans=0.125 +2024-07-29 10:58:54,627 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=271393.3333333333, ans=0.125 +2024-07-29 10:59:09,407 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.19 vs. limit=15.0 +2024-07-29 11:01:37,034 INFO [train.py:1114] (2/4) Epoch 20, batch 9350, loss[loss=0.1585, simple_loss=0.2377, pruned_loss=0.03964, over 4795.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2609, pruned_loss=0.03892, over 934750.33 frames. ], batch size: 11, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 11:02:04,530 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=271433.3333333333, ans=0.0 +2024-07-29 11:02:13,285 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=271446.6666666667, ans=0.1 +2024-07-29 11:02:58,446 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=271473.3333333333, ans=0.125 +2024-07-29 11:03:06,656 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.48 vs. limit=15.0 +2024-07-29 11:03:22,496 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.890e+01 5.751e+01 6.401e+01 7.888e+01 1.207e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-29 11:03:24,397 INFO [train.py:1114] (2/4) Epoch 20, batch 9400, loss[loss=0.1641, simple_loss=0.2658, pruned_loss=0.03116, over 4694.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2611, pruned_loss=0.03927, over 932605.05 frames. ], batch size: 13, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:03:25,761 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=271500.0, ans=0.125 +2024-07-29 11:03:29,184 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=271500.0, ans=0.0 +2024-07-29 11:03:39,794 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=271526.6666666667, ans=0.0 +2024-07-29 11:03:40,472 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=271526.6666666667, ans=0.125 +2024-07-29 11:03:44,581 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=271540.0, ans=0.125 +2024-07-29 11:03:48,903 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=271540.0, ans=0.125 +2024-07-29 11:03:59,255 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=271553.3333333333, ans=0.2 +2024-07-29 11:04:02,418 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=271566.6666666667, ans=0.0 +2024-07-29 11:04:02,881 INFO [train.py:1114] (2/4) Epoch 20, batch 9450, loss[loss=0.1381, simple_loss=0.218, pruned_loss=0.02914, over 4801.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2606, pruned_loss=0.03899, over 931514.70 frames. ], batch size: 11, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:04:06,937 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=271566.6666666667, ans=0.1 +2024-07-29 11:04:11,844 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=271580.0, ans=0.125 +2024-07-29 11:04:13,198 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=271580.0, ans=0.2 +2024-07-29 11:04:33,861 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.597e+01 5.763e+01 6.160e+01 6.815e+01 1.077e+02, threshold=1.232e+02, percent-clipped=0.0 +2024-07-29 11:04:35,850 INFO [train.py:1114] (2/4) Epoch 20, batch 9500, loss[loss=0.1368, simple_loss=0.2283, pruned_loss=0.02261, over 4707.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2616, pruned_loss=0.03912, over 933623.08 frames. ], batch size: 12, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:04:50,081 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=271660.0, ans=10.0 +2024-07-29 11:04:55,458 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=271673.3333333333, ans=0.125 +2024-07-29 11:04:58,365 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=271673.3333333333, ans=0.2 +2024-07-29 11:05:07,548 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:05:08,684 INFO [train.py:1114] (2/4) Epoch 20, batch 9550, loss[loss=0.165, simple_loss=0.2472, pruned_loss=0.04145, over 4772.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.262, pruned_loss=0.03949, over 931291.77 frames. ], batch size: 12, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:05:09,343 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=271700.0, ans=0.125 +2024-07-29 11:05:39,597 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=271726.6666666667, ans=0.0 +2024-07-29 11:05:41,181 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=271726.6666666667, ans=0.2 +2024-07-29 11:05:42,969 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-29 11:06:00,808 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.785e+01 5.730e+01 6.530e+01 7.456e+01 1.001e+02, threshold=1.306e+02, percent-clipped=0.0 +2024-07-29 11:06:02,898 INFO [train.py:1114] (2/4) Epoch 20, batch 9600, loss[loss=0.2299, simple_loss=0.3124, pruned_loss=0.07372, over 3515.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2617, pruned_loss=0.0391, over 930950.87 frames. ], batch size: 35, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:06:13,502 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=271780.0, ans=0.0 +2024-07-29 11:06:16,633 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=271793.3333333333, ans=0.0 +2024-07-29 11:06:39,303 INFO [train.py:1114] (2/4) Epoch 20, batch 9650, loss[loss=0.1779, simple_loss=0.2766, pruned_loss=0.03961, over 4844.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.262, pruned_loss=0.03945, over 927112.77 frames. ], batch size: 16, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:06:39,342 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=271833.3333333333, ans=0.125 +2024-07-29 11:06:42,529 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=271833.3333333333, ans=0.125 +2024-07-29 11:06:43,876 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=271833.3333333333, ans=0.125 +2024-07-29 11:06:44,699 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.90 vs. limit=15.0 +2024-07-29 11:06:45,063 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=271846.6666666667, ans=0.025 +2024-07-29 11:06:45,723 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=271846.6666666667, ans=0.125 +2024-07-29 11:06:50,086 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=271846.6666666667, ans=0.125 +2024-07-29 11:07:05,664 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=271860.0, ans=0.125 +2024-07-29 11:07:15,426 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=271873.3333333333, ans=0.125 +2024-07-29 11:07:17,891 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=271886.6666666667, ans=0.0 +2024-07-29 11:07:21,938 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.580e+01 5.811e+01 6.589e+01 7.580e+01 1.190e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 11:07:22,959 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=7.73 vs. limit=15.0 +2024-07-29 11:07:23,846 INFO [train.py:1114] (2/4) Epoch 20, batch 9700, loss[loss=0.1907, simple_loss=0.2838, pruned_loss=0.04881, over 4297.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2625, pruned_loss=0.03976, over 925093.08 frames. ], batch size: 25, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:07:35,004 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.96 vs. limit=10.0 +2024-07-29 11:07:48,622 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=271926.6666666667, ans=0.125 +2024-07-29 11:07:52,305 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=271940.0, ans=0.125 +2024-07-29 11:07:56,617 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=271940.0, ans=0.2 +2024-07-29 11:08:09,760 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=271953.3333333333, ans=0.125 +2024-07-29 11:08:12,201 INFO [train.py:1114] (2/4) Epoch 20, batch 9750, loss[loss=0.1991, simple_loss=0.2863, pruned_loss=0.05596, over 4668.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2617, pruned_loss=0.0395, over 925740.32 frames. ], batch size: 15, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:08:14,279 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=271966.6666666667, ans=0.1 +2024-07-29 11:08:18,970 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=271980.0, ans=0.125 +2024-07-29 11:09:06,958 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+01 5.621e+01 6.289e+01 7.582e+01 9.528e+01, threshold=1.258e+02, percent-clipped=0.0 +2024-07-29 11:09:10,468 INFO [train.py:1114] (2/4) Epoch 20, batch 9800, loss[loss=0.1718, simple_loss=0.2658, pruned_loss=0.03888, over 4705.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2613, pruned_loss=0.03961, over 924948.43 frames. ], batch size: 12, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:09:13,166 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=272033.3333333333, ans=0.125 +2024-07-29 11:09:14,313 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=272033.3333333333, ans=0.1 +2024-07-29 11:09:28,211 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.76 vs. limit=15.0 +2024-07-29 11:10:01,871 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=272073.3333333333, ans=0.5 +2024-07-29 11:10:08,061 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=272086.6666666667, ans=0.125 +2024-07-29 11:10:12,339 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=272086.6666666667, ans=0.1 +2024-07-29 11:10:13,521 INFO [train.py:1114] (2/4) Epoch 20, batch 9850, loss[loss=0.1582, simple_loss=0.2534, pruned_loss=0.03147, over 4899.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2617, pruned_loss=0.03997, over 927091.30 frames. ], batch size: 15, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:10:14,924 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=272100.0, ans=0.125 +2024-07-29 11:10:24,281 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=272113.3333333333, ans=0.0 +2024-07-29 11:10:33,974 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=272140.0, ans=0.1 +2024-07-29 11:10:43,038 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=272153.3333333333, ans=0.125 +2024-07-29 11:10:43,065 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=272153.3333333333, ans=0.0 +2024-07-29 11:10:43,567 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 6.014e+01 6.927e+01 8.025e+01 1.186e+02, threshold=1.385e+02, percent-clipped=0.0 +2024-07-29 11:10:44,902 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=272166.6666666667, ans=0.125 +2024-07-29 11:10:45,487 INFO [train.py:1114] (2/4) Epoch 20, batch 9900, loss[loss=0.1462, simple_loss=0.2277, pruned_loss=0.0323, over 4845.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.263, pruned_loss=0.04083, over 926388.48 frames. ], batch size: 16, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:11:09,127 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.92 vs. limit=22.5 +2024-07-29 11:11:10,080 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=272220.0, ans=0.125 +2024-07-29 11:11:10,250 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=272220.0, ans=0.07 +2024-07-29 11:11:16,893 INFO [train.py:1114] (2/4) Epoch 20, batch 9950, loss[loss=0.1622, simple_loss=0.2472, pruned_loss=0.03859, over 4806.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2639, pruned_loss=0.04089, over 928775.19 frames. ], batch size: 11, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:11:53,344 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=272273.3333333333, ans=0.125 +2024-07-29 11:12:02,014 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=272286.6666666667, ans=0.0 +2024-07-29 11:12:02,410 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.836e+01 6.511e+01 7.365e+01 1.166e+02, threshold=1.302e+02, percent-clipped=0.0 +2024-07-29 11:12:04,274 INFO [train.py:1114] (2/4) Epoch 20, batch 10000, loss[loss=0.1542, simple_loss=0.2634, pruned_loss=0.02255, over 4635.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.266, pruned_loss=0.04123, over 925900.05 frames. ], batch size: 16, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:12:12,376 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=272300.0, ans=0.125 +2024-07-29 11:12:44,750 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=272353.3333333333, ans=0.125 +2024-07-29 11:12:48,749 INFO [train.py:1114] (2/4) Epoch 20, batch 10050, loss[loss=0.1864, simple_loss=0.2672, pruned_loss=0.05283, over 3426.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.27, pruned_loss=0.04343, over 913667.20 frames. ], batch size: 36, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:13:20,973 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=272393.3333333333, ans=0.1 +2024-07-29 11:14:42,825 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.13 vs. limit=22.5 +2024-07-29 11:14:45,433 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.683e+01 5.969e+01 6.772e+01 7.755e+01 1.002e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-29 11:14:47,459 INFO [train.py:1114] (2/4) Epoch 20, batch 10100, loss[loss=0.2109, simple_loss=0.2845, pruned_loss=0.06869, over 3271.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.274, pruned_loss=0.04741, over 859725.93 frames. ], batch size: 36, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:14:58,744 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=272446.6666666667, ans=0.05 +2024-07-29 11:15:07,792 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:15:15,377 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=272486.6666666667, ans=0.0 +2024-07-29 11:15:18,845 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=15.0 +2024-07-29 11:15:19,709 INFO [train.py:1114] (2/4) Epoch 20, batch 10150, loss[loss=0.2103, simple_loss=0.2941, pruned_loss=0.06328, over 3282.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2762, pruned_loss=0.05005, over 820000.41 frames. ], batch size: 35, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:15:29,974 INFO [scaling.py:1120] (2/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:15:31,568 INFO [scaling.py:1024] (2/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.52 vs. limit=15.0 +2024-07-29 11:15:38,118 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=272526.6666666667, ans=0.09899494936611666 +2024-07-29 11:15:45,566 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=272540.0, ans=0.125 +2024-07-29 11:15:47,959 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=272540.0, ans=0.025 +2024-07-29 11:15:53,611 WARNING [optim.py:487] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.562e+01 6.747e+01 7.203e+01 7.565e+01 9.241e+01, threshold=1.441e+02, percent-clipped=0.0 +2024-07-29 11:15:57,600 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=272566.6666666667, ans=0.1 +2024-07-29 11:15:58,185 INFO [train.py:1114] (2/4) Epoch 20, batch 10200, loss[loss=0.1889, simple_loss=0.2697, pruned_loss=0.05403, over 3369.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2788, pruned_loss=0.05262, over 788859.99 frames. ], batch size: 35, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:15:58,301 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=272566.6666666667, ans=0.125 +2024-07-29 11:19:20,734 INFO [scaling.py:214] (2/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=272593.3333333333, ans=0.025 +2024-07-29 11:19:22,635 INFO [train.py:1387] (2/4) Done! diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/log/log-train-2024-07-27-09-10-10-3 b/zipformer/pretrained/non_ctc/non_causal/exp/log/log-train-2024-07-27-09-10-10-3 new file mode 100644 index 0000000000000000000000000000000000000000..d48771ddc3a43fbfbfc2dc50b70b3f90f6542f35 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/log/log-train-2024-07-27-09-10-10-3 @@ -0,0 +1,28855 @@ +2024-07-27 09:10:10,872 INFO [train.py:1182] (3/4) Training started +2024-07-27 09:10:10,873 INFO [train.py:1192] (3/4) Device: cuda:3 +2024-07-27 09:10:10,875 INFO [train.py:1210] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'ignore_id': -1, 'label_smoothing': 0.1, 'warm_step': 2000, 'env_info': {'k2-version': '1.24.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'ff1d435a8d3c4eaa15828a84a7240678a70539a7', 'k2-git-date': 'Fri Feb 23 01:48:38 2024', 'lhotse-version': '1.25.0.dev+git.012532f.clean', 'torch-version': '2.2.1', 'torch-cuda-available': True, 'torch-cuda-version': '12.1', 'python-version': '3.10', 'icefall-git-branch': None, 'icefall-git-sha1': None, 'icefall-git-date': None, 'icefall-path': '/workspace/icefall', 'k2-path': '/opt/conda/lib/python3.10/site-packages/k2/__init__.py', 'lhotse-path': '/opt/conda/lib/python3.10/site-packages/lhotse/__init__.py', 'hostname': 'cdr2658.int.cedar.computecanada.ca', 'IP address': '172.16.146.95'}, 'world_size': 4, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('zipformer/libri/exp'), 'bpe_model': '/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/lang_bpe_500/bpe.model', 'base_lr': 0.045, 'lr_batches': 7500, 'lr_epochs': 3.5, 'ref_duration': 600, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'ctc_loss_scale': 0.2, 'attention_decoder_loss_scale': 0.8, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 4000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,2,3,4,3,2', 'downsampling_factor': '1,2,4,8,4,2', 'feedforward_dim': '512,768,1024,1536,1024,768', 'num_heads': '4,4,4,8,4,4', 'encoder_dim': '192,256,384,512,384,256', 'query_head_dim': '32', 'value_head_dim': '12', 'pos_head_dim': '4', 'pos_dim': 48, 'encoder_unmasked_dim': '192,192,256,256,256,192', 'cnn_module_kernel': '31,31,15,15,15,31', 'decoder_dim': 512, 'joiner_dim': 512, 'attention_decoder_dim': 512, 'attention_decoder_num_layers': 6, 'attention_decoder_attention_dim': 512, 'attention_decoder_num_heads': 8, 'attention_decoder_feedforward_dim': 2048, 'causal': False, 'chunk_size': '16,32,64,-1', 'left_context_frames': '64,128,256,-1', 'use_transducer': True, 'use_ctc': False, 'use_attention_decoder': False, 'full_libri': True, 'mini_libri': False, 'manifest_dir': PosixPath('/home/liqihan/scratch/git/icefall/egs/librispeech/ASR/data/fbank'), 'max_duration': 200.0, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': False, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'sos_id': 1, 'eos_id': 1, 'vocab_size': 500} +2024-07-27 09:10:10,876 INFO [train.py:1212] (3/4) About to create model +2024-07-27 09:10:23,765 INFO [train.py:1216] (3/4) Number of model parameters: 65549011 +2024-07-27 09:10:24,724 INFO [train.py:1231] (3/4) Using DDP +2024-07-27 09:11:00,502 INFO [asr_datamodule.py:893] (3/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2024-07-27 09:11:00,828 INFO [asr_datamodule.py:696] (3/4) Disable MUSAN +2024-07-27 09:11:00,829 INFO [asr_datamodule.py:714] (3/4) Enable SpecAugment +2024-07-27 09:11:00,829 INFO [asr_datamodule.py:715] (3/4) Time warp factor: 80 +2024-07-27 09:11:00,829 INFO [asr_datamodule.py:725] (3/4) Num frame mask: 10 +2024-07-27 09:11:00,829 INFO [asr_datamodule.py:738] (3/4) About to create train dataset +2024-07-27 09:11:00,829 INFO [asr_datamodule.py:765] (3/4) Using DynamicBucketingSampler. +2024-07-27 09:11:02,439 INFO [asr_datamodule.py:782] (3/4) About to create train dataloader +2024-07-27 09:11:02,446 INFO [asr_datamodule.py:910] (3/4) About to get dev-clean cuts +2024-07-27 09:11:02,592 INFO [asr_datamodule.py:917] (3/4) About to get dev-other cuts +2024-07-27 09:11:03,488 INFO [asr_datamodule.py:813] (3/4) About to create dev dataset +2024-07-27 09:11:03,810 INFO [asr_datamodule.py:830] (3/4) About to create dev dataloader +2024-07-27 09:11:03,810 INFO [train.py:1435] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2024-07-27 09:17:48,867 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=192, metric=39.69 vs. limit=7.5 +2024-07-27 09:17:49,700 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 3345MB +2024-07-27 09:17:50,299 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 3345MB +2024-07-27 09:17:54,282 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 3345MB +2024-07-27 09:17:55,230 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 3345MB +2024-07-27 09:18:08,380 INFO [scaling.py:1024] (3/4) Whitening: name=None, num_groups=1, num_channels=288, metric=62.48 vs. limit=5.0 +2024-07-27 09:18:08,567 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 3345MB +2024-07-27 09:18:09,384 INFO [train.py:1463] (3/4) Maximum memory allocated so far is 3345MB +2024-07-27 09:18:51,919 INFO [train.py:1114] (3/4) Epoch 1, batch 0, loss[loss=7.681, simple_loss=6.999, pruned_loss=6.801, over 4852.00 frames. ], tot_loss[loss=7.681, simple_loss=6.999, pruned_loss=6.801, over 4852.00 frames. ], batch size: 12, lr: 2.25e-02, grad_scale: 2.0 +2024-07-27 09:18:51,920 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 09:19:27,483 INFO [train.py:1146] (3/4) Epoch 1, validation: loss=7.631, simple_loss=6.945, pruned_loss=6.846, over 944034.00 frames. +2024-07-27 09:19:27,484 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 3345MB +2024-07-27 09:19:29,892 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=11.44 vs. limit=5.0 +2024-07-27 09:19:31,592 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=27.54 vs. limit=7.5 +2024-07-27 09:19:33,729 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=44.76 vs. limit=7.5 +2024-07-27 09:19:40,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=0.0, ans=0.9 +2024-07-27 09:19:47,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=13.333333333333334, ans=0.499375 +2024-07-27 09:19:48,765 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.15 vs. limit=7.505 +2024-07-27 09:19:52,383 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.784e+02 9.392e+02 1.009e+03 1.270e+03 1.305e+03, threshold=4.037e+03, percent-clipped=0.0 +2024-07-27 09:20:00,041 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=463.56 vs. limit=5.006666666666667 +2024-07-27 09:20:05,268 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=379.31 vs. limit=7.505 +2024-07-27 09:20:09,140 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.923e+01 2.100e+02 8.784e+02 1.111e+03 1.403e+03, threshold=3.513e+03, percent-clipped=0.0 +2024-07-27 09:20:10,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26.666666666666668, ans=0.2997333333333333 +2024-07-27 09:20:20,805 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=422.91 vs. limit=7.51 +2024-07-27 09:20:26,322 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=68.94 vs. limit=4.010666666666666 +2024-07-27 09:20:31,437 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=487.36 vs. limit=7.53 +2024-07-27 09:20:36,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=248.62 vs. limit=7.515 +2024-07-27 09:20:36,590 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=31.63 vs. limit=7.53 +2024-07-27 09:20:41,321 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 8.923e+01 1.821e+02 2.209e+02 8.784e+02 1.403e+03, threshold=8.837e+02, percent-clipped=0.0 +2024-07-27 09:20:49,330 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=321.03 vs. limit=7.52 +2024-07-27 09:20:50,025 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=120.22 vs. limit=7.52 +2024-07-27 09:20:54,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=53.333333333333336, ans=0.4975 +2024-07-27 09:21:33,268 INFO [train.py:1114] (3/4) Epoch 1, batch 50, loss[loss=1.153, simple_loss=1.023, pruned_loss=1.162, over 4621.00 frames. ], tot_loss[loss=2.995, simple_loss=2.751, pruned_loss=2.38, over 207023.39 frames. ], batch size: 11, lr: 2.48e-02, grad_scale: 1.0 +2024-07-27 09:21:35,076 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=257.50 vs. limit=7.525 +2024-07-27 09:21:35,169 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=145.51 vs. limit=7.525 +2024-07-27 09:21:37,276 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=305.14 vs. limit=7.525 +2024-07-27 09:21:37,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=66.66666666666667, ans=0.1975 +2024-07-27 09:21:43,588 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=373.83 vs. limit=7.53 +2024-07-27 09:21:56,952 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=230.73 vs. limit=7.53 +2024-07-27 09:21:59,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=80.0, ans=0.49625 +2024-07-27 09:22:03,171 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=174.80 vs. limit=7.535 +2024-07-27 09:22:06,789 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=252.09 vs. limit=7.535 +2024-07-27 09:22:07,444 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=431.41 vs. limit=7.535 +2024-07-27 09:22:08,496 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=226.05 vs. limit=7.535 +2024-07-27 09:22:16,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=93.33333333333333, ans=5.058333333333334 +2024-07-27 09:22:17,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=106.66666666666667, ans=0.244 +2024-07-27 09:22:35,417 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=344.04 vs. limit=7.54 +2024-07-27 09:22:37,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=120.0, ans=0.8958 +2024-07-27 09:22:54,396 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=62.78 vs. limit=7.545 +2024-07-27 09:23:04,785 INFO [train.py:1114] (3/4) Epoch 1, batch 100, loss[loss=1.117, simple_loss=0.969, pruned_loss=1.186, over 4639.00 frames. ], tot_loss[loss=2.046, simple_loss=1.851, pruned_loss=1.786, over 366060.54 frames. ], batch size: 12, lr: 2.70e-02, grad_scale: 2.0 +2024-07-27 09:23:05,184 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=55.95 vs. limit=7.6 +2024-07-27 09:23:06,811 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.579e+01 2.513e+01 6.174e+01 1.938e+02 1.403e+03, threshold=1.235e+02, percent-clipped=0.0 +2024-07-27 09:23:16,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=133.33333333333334, ans=5.083333333333333 +2024-07-27 09:23:21,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=146.66666666666666, ans=0.5 +2024-07-27 09:23:26,707 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=328.46 vs. limit=7.555 +2024-07-27 09:23:32,047 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=85.45 vs. limit=7.61 +2024-07-27 09:23:32,155 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=369.23 vs. limit=7.555 +2024-07-27 09:23:45,129 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=320.58 vs. limit=5.08 +2024-07-27 09:24:01,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173.33333333333334, ans=0.2982666666666667 +2024-07-27 09:24:01,727 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=37.60 vs. limit=7.565 +2024-07-27 09:24:04,652 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 09:24:08,268 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.16 vs. limit=3.026 +2024-07-27 09:24:12,604 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=51.14 vs. limit=7.57 +2024-07-27 09:24:13,485 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=144.12 vs. limit=7.57 +2024-07-27 09:24:14,148 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=32.18 vs. limit=4.074666666666666 +2024-07-27 09:24:17,750 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=38.01 vs. limit=7.64 +2024-07-27 09:24:19,854 INFO [train.py:1114] (3/4) Epoch 1, batch 150, loss[loss=1.069, simple_loss=0.913, pruned_loss=1.136, over 4617.00 frames. ], tot_loss[loss=1.659, simple_loss=1.479, pruned_loss=1.54, over 494611.33 frames. ], batch size: 11, lr: 2.93e-02, grad_scale: 2.0 +2024-07-27 09:24:19,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=200.0, ans=0.490625 +2024-07-27 09:24:28,255 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=44.68 vs. limit=7.575 +2024-07-27 09:24:36,500 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=57.30 vs. limit=7.66 +2024-07-27 09:24:39,843 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=214.45 vs. limit=7.58 +2024-07-27 09:24:39,991 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=62.39 vs. limit=7.66 +2024-07-27 09:24:42,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=295.77 vs. limit=7.58 +2024-07-27 09:24:48,558 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=55.89 vs. limit=7.67 +2024-07-27 09:24:48,631 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=62.89 vs. limit=5.113333333333333 +2024-07-27 09:24:50,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=226.66666666666666, ans=0.2034 +2024-07-27 09:24:53,346 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=109.87 vs. limit=7.585 +2024-07-27 09:24:55,047 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.05 vs. limit=3.036 +2024-07-27 09:25:03,511 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=31.89 vs. limit=5.06 +2024-07-27 09:25:08,410 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.60 vs. limit=3.038 +2024-07-27 09:25:10,132 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=39.22 vs. limit=5.126666666666667 +2024-07-27 09:25:10,978 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=97.20 vs. limit=7.595 +2024-07-27 09:25:11,790 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=20.84 vs. limit=7.595 +2024-07-27 09:25:14,275 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=26.47 vs. limit=7.595 +2024-07-27 09:25:17,245 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=19.33 vs. limit=7.595 +2024-07-27 09:25:22,980 INFO [train.py:1114] (3/4) Epoch 1, batch 200, loss[loss=1.047, simple_loss=0.8915, pruned_loss=1.051, over 4492.00 frames. ], tot_loss[loss=1.441, simple_loss=1.271, pruned_loss=1.376, over 593971.47 frames. ], batch size: 21, lr: 3.15e-02, grad_scale: 4.0 +2024-07-27 09:25:23,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=266.6666666666667, ans=0.29733333333333334 +2024-07-27 09:25:24,356 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 1.807e+01 2.398e+01 2.890e+01 3.614e+01 1.455e+02, threshold=5.780e+01, percent-clipped=1.0 +2024-07-27 09:25:30,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=266.6666666666667, ans=0.4875 +2024-07-27 09:25:52,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=280.0, ans=0.29719999999999996 +2024-07-27 09:25:52,794 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=25.01 vs. limit=5.14 +2024-07-27 09:25:52,912 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=29.47 vs. limit=5.14 +2024-07-27 09:25:53,110 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=42.61 vs. limit=5.14 +2024-07-27 09:25:53,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=280.0, ans=0.1895 +2024-07-27 09:25:53,891 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=199.04 vs. limit=7.605 +2024-07-27 09:25:57,451 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=16.76 vs. limit=5.073333333333333 +2024-07-27 09:25:59,999 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=74.03 vs. limit=7.61 +2024-07-27 09:26:04,466 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=18.26 vs. limit=7.61 +2024-07-27 09:26:04,779 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=17.70 vs. limit=7.61 +2024-07-27 09:26:05,516 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=243.53 vs. limit=7.61 +2024-07-27 09:26:05,706 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.12 vs. limit=7.72 +2024-07-27 09:26:07,251 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=16.97 vs. limit=7.61 +2024-07-27 09:26:23,356 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=16.92 vs. limit=7.615 +2024-07-27 09:26:30,007 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=291.78 vs. limit=7.62 +2024-07-27 09:26:35,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=320.0, ans=0.232 +2024-07-27 09:26:44,527 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=13.48 vs. limit=7.62 +2024-07-27 09:26:46,528 INFO [train.py:1114] (3/4) Epoch 1, batch 250, loss[loss=1.009, simple_loss=0.8491, pruned_loss=0.9996, over 4633.00 frames. ], tot_loss[loss=1.303, simple_loss=1.137, pruned_loss=1.259, over 670872.32 frames. ], batch size: 16, lr: 3.38e-02, grad_scale: 4.0 +2024-07-27 09:26:49,883 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.60 vs. limit=5.083333333333333 +2024-07-27 09:26:55,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=333.3333333333333, ans=0.1875 +2024-07-27 09:26:57,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=333.3333333333333, ans=0.23125 +2024-07-27 09:26:57,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=333.3333333333333, ans=0.1875 +2024-07-27 09:26:58,064 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=26.40 vs. limit=7.625 +2024-07-27 09:26:58,997 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=11.36 vs. limit=5.086666666666667 +2024-07-27 09:26:59,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=23.72 vs. limit=7.63 +2024-07-27 09:27:00,911 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=203.72 vs. limit=5.173333333333334 +2024-07-27 09:27:06,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=346.6666666666667, ans=0.48375 +2024-07-27 09:27:08,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=360.0, ans=0.2964 +2024-07-27 09:27:11,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=360.0, ans=0.1865 +2024-07-27 09:27:13,442 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=12.59 vs. limit=4.144 +2024-07-27 09:27:13,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=360.0, ans=0.8874000000000001 +2024-07-27 09:27:14,233 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.99 vs. limit=7.635 +2024-07-27 09:27:21,256 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=72.42 vs. limit=7.64 +2024-07-27 09:27:24,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=373.3333333333333, ans=0.2962666666666667 +2024-07-27 09:27:24,684 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=99.23 vs. limit=7.64 +2024-07-27 09:27:31,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=386.6666666666667, ans=0.0913 +2024-07-27 09:27:33,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=386.6666666666667, ans=0.8864666666666667 +2024-07-27 09:27:34,857 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=102.96 vs. limit=7.645 +2024-07-27 09:27:37,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=386.6666666666667, ans=0.22825 +2024-07-27 09:27:38,728 INFO [train.py:1114] (3/4) Epoch 1, batch 300, loss[loss=1.034, simple_loss=0.8643, pruned_loss=0.9948, over 4796.00 frames. ], tot_loss[loss=1.215, simple_loss=1.051, pruned_loss=1.178, over 730193.39 frames. ], batch size: 15, lr: 3.60e-02, grad_scale: 8.0 +2024-07-27 09:27:40,099 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.723e+01 3.145e+01 3.570e+01 4.574e+01 1.008e+02, threshold=7.140e+01, percent-clipped=16.0 +2024-07-27 09:27:45,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=400.0, ans=0.48125 +2024-07-27 09:27:51,393 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=9.86 vs. limit=5.1 +2024-07-27 09:27:54,924 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=37.31 vs. limit=7.655 +2024-07-27 09:27:59,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=413.3333333333333, ans=0.480625 +2024-07-27 09:28:01,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=413.3333333333333, ans=0.480625 +2024-07-27 09:28:02,718 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.05 vs. limit=5.1066666666666665 +2024-07-27 09:28:05,176 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=17.30 vs. limit=5.213333333333333 +2024-07-27 09:28:05,201 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.16 vs. limit=7.82 +2024-07-27 09:28:06,181 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=6.71 vs. limit=4.1706666666666665 +2024-07-27 09:28:09,606 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=374.36 vs. limit=7.66 +2024-07-27 09:28:12,346 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=6.82 vs. limit=7.665 +2024-07-27 09:28:14,139 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=111.70 vs. limit=7.665 +2024-07-27 09:28:16,402 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=22.84 vs. limit=7.665 +2024-07-27 09:28:16,505 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=8.16 vs. limit=4.176 +2024-07-27 09:28:19,953 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=27.74 vs. limit=7.67 +2024-07-27 09:28:20,030 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=429.26 vs. limit=5.226666666666667 +2024-07-27 09:28:23,623 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=55.03 vs. limit=7.67 +2024-07-27 09:28:23,787 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.48 vs. limit=5.113333333333333 +2024-07-27 09:28:30,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=466.6666666666667, ans=0.478125 +2024-07-27 09:28:31,001 INFO [train.py:1114] (3/4) Epoch 1, batch 350, loss[loss=0.9781, simple_loss=0.8052, pruned_loss=0.9441, over 4950.00 frames. ], tot_loss[loss=1.158, simple_loss=0.992, pruned_loss=1.12, over 776347.92 frames. ], batch size: 12, lr: 3.83e-02, grad_scale: 8.0 +2024-07-27 09:28:31,470 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=15.30 vs. limit=7.675 +2024-07-27 09:28:31,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=466.6666666666667, ans=0.478125 +2024-07-27 09:28:33,428 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=179.90 vs. limit=7.675 +2024-07-27 09:28:41,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=480.0, ans=0.8832 +2024-07-27 09:28:48,404 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=123.65 vs. limit=5.24 +2024-07-27 09:28:56,434 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.05 vs. limit=7.87 +2024-07-27 09:28:58,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=493.3333333333333, ans=0.476875 +2024-07-27 09:29:05,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=506.6666666666667, ans=0.09683333333333334 +2024-07-27 09:29:06,839 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=7.01 vs. limit=5.126666666666667 +2024-07-27 09:29:10,103 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.77 vs. limit=5.126666666666667 +2024-07-27 09:29:11,957 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=24.88 vs. limit=7.69 +2024-07-27 09:29:23,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=520.0, ans=0.2948 +2024-07-27 09:29:25,731 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.79 vs. limit=7.89 +2024-07-27 09:29:26,747 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.41 vs. limit=7.9 +2024-07-27 09:29:27,554 INFO [train.py:1114] (3/4) Epoch 1, batch 400, loss[loss=1.026, simple_loss=0.8393, pruned_loss=0.964, over 4695.00 frames. ], tot_loss[loss=1.111, simple_loss=0.9437, pruned_loss=1.07, over 814063.03 frames. ], batch size: 13, lr: 4.05e-02, grad_scale: 16.0 +2024-07-27 09:29:29,107 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 2.881e+01 3.675e+01 4.330e+01 5.451e+01 8.565e+01, threshold=8.660e+01, percent-clipped=3.0 +2024-07-27 09:29:30,572 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=13.69 vs. limit=7.7 +2024-07-27 09:29:30,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=533.3333333333334, ans=0.29466666666666663 +2024-07-27 09:29:34,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=533.3333333333334, ans=0.29466666666666663 +2024-07-27 09:29:36,777 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=68.94 vs. limit=7.705 +2024-07-27 09:29:40,333 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=21.21 vs. limit=7.705 +2024-07-27 09:29:41,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=546.6666666666666, ans=0.09658333333333334 +2024-07-27 09:29:41,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=546.6666666666666, ans=7.705 +2024-07-27 09:29:43,021 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=18.26 vs. limit=7.705 +2024-07-27 09:29:46,623 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=12.22 vs. limit=7.71 +2024-07-27 09:29:49,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=560.0, ans=0.0874 +2024-07-27 09:29:57,455 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=160.56 vs. limit=7.71 +2024-07-27 09:30:10,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=573.3333333333334, ans=0.473125 +2024-07-27 09:30:39,741 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=1.746e+01 +2024-07-27 09:30:43,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=586.6666666666666, ans=7.72 +2024-07-27 09:30:43,697 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.38 vs. limit=5.1466666666666665 +2024-07-27 09:31:00,540 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=25.84 vs. limit=7.72 +2024-07-27 09:31:09,336 INFO [train.py:1114] (3/4) Epoch 1, batch 450, loss[loss=1.101, simple_loss=0.8995, pruned_loss=0.9987, over 4640.00 frames. ], tot_loss[loss=1.08, simple_loss=0.9097, pruned_loss=1.029, over 839520.44 frames. ], batch size: 13, lr: 4.28e-02, grad_scale: 16.0 +2024-07-27 09:31:10,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=600.0, ans=0.20900000000000002 +2024-07-27 09:31:12,296 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=9.983e-01 +2024-07-27 09:31:16,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=600.0, ans=0.5 +2024-07-27 09:31:20,984 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=107.37 vs. limit=7.73 +2024-07-27 09:31:28,641 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=28.06 vs. limit=7.96 +2024-07-27 09:36:53,418 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.15 vs. limit=5.156666666666666 +2024-07-27 09:36:55,343 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.78 vs. limit=5.3133333333333335 +2024-07-27 09:36:55,683 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.62 vs. limit=5.3133333333333335 +2024-07-27 09:37:01,075 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.11 vs. limit=3.096 +2024-07-27 09:37:05,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=640.0, ans=0.47 +2024-07-27 09:37:07,457 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=41.14 vs. limit=7.74 +2024-07-27 09:37:08,369 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.29 vs. limit=4.256 +2024-07-27 09:37:09,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=640.0, ans=0.2436 +2024-07-27 09:37:09,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=640.0, ans=0.8776 +2024-07-27 09:37:11,769 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=36.42 vs. limit=7.745 +2024-07-27 09:37:13,335 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=22.35 vs. limit=7.99 +2024-07-27 09:37:16,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=653.3333333333334, ans=7.745 +2024-07-27 09:37:20,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=653.3333333333334, ans=0.469375 +2024-07-27 09:37:21,161 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=22.83 vs. limit=7.745 +2024-07-27 09:37:22,229 INFO [train.py:1114] (3/4) Epoch 1, batch 500, loss[loss=0.9251, simple_loss=0.7568, pruned_loss=0.8085, over 4673.00 frames. ], tot_loss[loss=1.052, simple_loss=0.8798, pruned_loss=0.9892, over 861927.71 frames. ], batch size: 15, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:37:22,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=666.6666666666666, ans=0.46875 +2024-07-27 09:37:25,285 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.049e+01 3.795e+01 4.382e+01 5.151e+01 8.333e+01, threshold=8.764e+01, percent-clipped=0.0 +2024-07-27 09:37:27,827 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=28.02 vs. limit=7.75 +2024-07-27 09:37:38,180 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=31.95 vs. limit=7.75 +2024-07-27 09:37:39,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=666.6666666666666, ans=0.46875 +2024-07-27 09:37:40,846 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=11.53 vs. limit=7.75 +2024-07-27 09:37:53,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=680.0, ans=0.2932 +2024-07-27 09:37:54,267 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=18.61 vs. limit=7.755 +2024-07-27 09:38:13,842 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.37 vs. limit=5.346666666666667 +2024-07-27 09:38:14,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=693.3333333333334, ans=0.7569333333333333 +2024-07-27 09:38:17,519 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=62.40 vs. limit=5.346666666666667 +2024-07-27 09:38:17,640 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=53.88 vs. limit=7.76 +2024-07-27 09:38:34,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=706.6666666666666, ans=0.21025 +2024-07-27 09:38:38,764 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=26.36 vs. limit=7.77 +2024-07-27 09:38:42,124 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.96 vs. limit=8.04 +2024-07-27 09:38:46,096 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=25.95 vs. limit=7.77 +2024-07-27 09:38:51,519 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=8.659e+00 +2024-07-27 09:38:55,245 INFO [train.py:1114] (3/4) Epoch 1, batch 550, loss[loss=1.012, simple_loss=0.8238, pruned_loss=0.8657, over 4613.00 frames. ], tot_loss[loss=1.03, simple_loss=0.8563, pruned_loss=0.9522, over 877777.81 frames. ], batch size: 17, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:38:59,051 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.51 vs. limit=4.293333333333333 +2024-07-27 09:39:05,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=746.6666666666666, ans=0.5 +2024-07-27 09:39:07,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=746.6666666666666, ans=0.46499999999999997 +2024-07-27 09:39:13,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=746.6666666666666, ans=0.17200000000000001 +2024-07-27 09:39:20,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=760.0, ans=8.07 +2024-07-27 09:39:31,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=773.3333333333334, ans=0.0826 +2024-07-27 09:39:38,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=786.6666666666666, ans=0.463125 +2024-07-27 09:39:40,372 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=18.10 vs. limit=7.795 +2024-07-27 09:39:41,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=800.0, ans=0.082 +2024-07-27 09:39:43,029 INFO [train.py:1114] (3/4) Epoch 1, batch 600, loss[loss=1.021, simple_loss=0.8277, pruned_loss=0.8572, over 4650.00 frames. ], tot_loss[loss=1.015, simple_loss=0.8388, pruned_loss=0.9228, over 892047.25 frames. ], batch size: 16, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:39:43,870 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.748e+01 6.137e+01 8.087e+01 1.069e+02 3.258e+02, threshold=1.617e+02, percent-clipped=41.0 +2024-07-27 09:39:46,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=800.0, ans=0.4625 +2024-07-27 09:39:51,606 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=23.81 vs. limit=7.805 +2024-07-27 09:39:59,113 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=28.61 vs. limit=8.11 +2024-07-27 09:40:06,555 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.96 vs. limit=5.206666666666667 +2024-07-27 09:40:15,321 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=9.81 vs. limit=8.13 +2024-07-27 09:40:17,025 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=10.24 vs. limit=7.815 +2024-07-27 09:40:18,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=840.0, ans=8.13 +2024-07-27 09:40:20,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=853.3333333333334, ans=0.168 +2024-07-27 09:40:24,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=853.3333333333334, ans=0.08080000000000001 +2024-07-27 09:40:30,103 INFO [train.py:1114] (3/4) Epoch 1, batch 650, loss[loss=0.9968, simple_loss=0.7984, pruned_loss=0.8357, over 4756.00 frames. ], tot_loss[loss=1, simple_loss=0.8217, pruned_loss=0.8948, over 903692.12 frames. ], batch size: 13, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:40:30,442 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=25.97 vs. limit=7.825 +2024-07-27 09:40:32,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=866.6666666666666, ans=5.541666666666667 +2024-07-27 09:40:32,727 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=13.64 vs. limit=7.825 +2024-07-27 09:40:35,158 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=38.29 vs. limit=7.825 +2024-07-27 09:40:37,046 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.33 vs. limit=4.346666666666667 +2024-07-27 09:40:37,706 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=21.48 vs. limit=8.15 +2024-07-27 09:40:39,813 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.42 vs. limit=3.132 +2024-07-27 09:40:44,677 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.36 vs. limit=4.352 +2024-07-27 09:40:47,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=893.3333333333334, ans=0.458125 +2024-07-27 09:40:55,604 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=42.79 vs. limit=7.835 +2024-07-27 09:40:56,759 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=16.11 vs. limit=7.835 +2024-07-27 09:40:59,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=893.3333333333334, ans=0.458125 +2024-07-27 09:41:02,493 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=15.08 vs. limit=7.84 +2024-07-27 09:41:04,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=906.6666666666666, ans=0.4575 +2024-07-27 09:41:32,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=920.0, ans=0.1655 +2024-07-27 09:41:39,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=933.3333333333334, ans=0.1975 +2024-07-27 09:41:40,642 INFO [train.py:1114] (3/4) Epoch 1, batch 700, loss[loss=0.8657, simple_loss=0.7016, pruned_loss=0.691, over 4638.00 frames. ], tot_loss[loss=0.9923, simple_loss=0.8111, pruned_loss=0.8716, over 911705.70 frames. ], batch size: 12, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:41:41,494 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.672e+01 9.322e+01 1.196e+02 1.686e+02 3.909e+02, threshold=2.392e+02, percent-clipped=30.0 +2024-07-27 09:41:45,272 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=6.42 vs. limit=4.373333333333333 +2024-07-27 09:41:46,581 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=1.708e-02 +2024-07-27 09:41:51,971 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=16.92 vs. limit=7.85 +2024-07-27 09:42:00,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=946.6666666666666, ans=0.455625 +2024-07-27 09:42:03,115 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.04 vs. limit=8.21 +2024-07-27 09:42:16,805 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=16.91 vs. limit=7.855 +2024-07-27 09:42:17,978 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=8.09 vs. limit=8.22 +2024-07-27 09:42:20,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=22.22 vs. limit=7.86 +2024-07-27 09:42:27,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=973.3333333333334, ans=0.1635 +2024-07-27 09:42:27,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=973.3333333333334, ans=0.2902666666666667 +2024-07-27 09:42:34,303 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.30 vs. limit=5.243333333333333 +2024-07-27 09:42:35,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=986.6666666666666, ans=0.45375 +2024-07-27 09:42:37,687 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.68 vs. limit=5.493333333333333 +2024-07-27 09:42:40,300 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=12.30 vs. limit=5.246666666666667 +2024-07-27 09:42:42,791 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=34.88 vs. limit=8.24 +2024-07-27 09:42:44,013 INFO [train.py:1114] (3/4) Epoch 1, batch 750, loss[loss=0.9858, simple_loss=0.7963, pruned_loss=0.7742, over 4694.00 frames. ], tot_loss[loss=0.9814, simple_loss=0.7998, pruned_loss=0.8445, over 918472.04 frames. ], batch size: 13, lr: 4.49e-02, grad_scale: 16.0 +2024-07-27 09:42:48,739 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=9.92 vs. limit=7.875 +2024-07-27 09:42:52,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=1013.3333333333334, ans=0.193 +2024-07-27 09:42:53,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=1013.3333333333334, ans=0.8645333333333334 +2024-07-27 09:42:54,664 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=29.94 vs. limit=8.26 +2024-07-27 09:42:54,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=1013.3333333333334, ans=8.26 +2024-07-27 09:42:54,756 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=9.98 vs. limit=7.88 +2024-07-27 09:42:58,084 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=24.91 vs. limit=8.26 +2024-07-27 09:43:00,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=1026.6666666666667, ans=0.451875 +2024-07-27 09:43:01,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1026.6666666666667, ans=0.28973333333333334 +2024-07-27 09:43:03,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=1026.6666666666667, ans=0.451875 +2024-07-27 09:43:06,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=1026.6666666666667, ans=0.451875 +2024-07-27 09:43:11,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=1040.0, ans=0.37 +2024-07-27 09:43:21,812 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.05 vs. limit=4.416 +2024-07-27 09:43:27,941 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=31.63 vs. limit=8.29 +2024-07-27 09:43:34,784 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.15 vs. limit=7.9 +2024-07-27 09:43:35,567 INFO [train.py:1114] (3/4) Epoch 1, batch 800, loss[loss=0.8598, simple_loss=0.7046, pruned_loss=0.6435, over 4850.00 frames. ], tot_loss[loss=0.9698, simple_loss=0.7892, pruned_loss=0.8161, over 924014.87 frames. ], batch size: 12, lr: 4.49e-02, grad_scale: 32.0 +2024-07-27 09:43:35,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=1066.6666666666667, ans=0.45 +2024-07-27 09:43:36,072 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.95 vs. limit=5.266666666666667 +2024-07-27 09:43:36,419 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.122e+01 7.305e+01 9.106e+01 1.068e+02 1.961e+02, threshold=1.821e+02, percent-clipped=0.0 +2024-07-27 09:43:40,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.06 vs. limit=7.9 +2024-07-27 09:43:41,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=1066.6666666666667, ans=7.9 +2024-07-27 09:43:41,993 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=26.33 vs. limit=8.3 +2024-07-27 09:43:42,012 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=22.39 vs. limit=7.9 +2024-07-27 09:43:43,641 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=11.40 vs. limit=5.27 +2024-07-27 09:43:51,840 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.25 vs. limit=8.31 +2024-07-27 09:44:37,177 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=2.73 vs. limit=4.437333333333333 +2024-07-27 09:44:37,835 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=29.48 vs. limit=8.32 +2024-07-27 09:44:39,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=1093.3333333333333, ans=0.8617333333333334 +2024-07-27 09:44:42,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=1106.6666666666667, ans=0.18775 +2024-07-27 09:44:44,596 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=22.24 vs. limit=7.915 +2024-07-27 09:44:46,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=1106.6666666666667, ans=0.0751 +2024-07-27 09:44:48,023 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.53 vs. limit=8.33 +2024-07-27 09:44:50,499 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=22.59 vs. limit=7.92 +2024-07-27 09:44:51,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=1120.0, ans=0.2888 +2024-07-27 09:44:54,682 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.69 vs. limit=7.92 +2024-07-27 09:44:55,607 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=17.78 vs. limit=7.92 +2024-07-27 09:45:05,386 INFO [train.py:1114] (3/4) Epoch 1, batch 850, loss[loss=1.025, simple_loss=0.8348, pruned_loss=0.7613, over 4662.00 frames. ], tot_loss[loss=0.9519, simple_loss=0.7756, pruned_loss=0.7814, over 928073.84 frames. ], batch size: 14, lr: 4.49e-02, grad_scale: 32.0 +2024-07-27 09:46:03,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=1133.3333333333333, ans=0.217 +2024-07-27 09:46:07,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=1146.6666666666667, ans=0.8598666666666667 +2024-07-27 09:46:07,869 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.09 vs. limit=5.573333333333333 +2024-07-27 09:46:23,504 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.14 vs. limit=7.935 +2024-07-27 09:46:50,917 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=19.79 vs. limit=8.38 +2024-07-27 09:46:51,844 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.71 vs. limit=5.293333333333333 +2024-07-27 09:46:53,812 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.65 vs. limit=8.39 +2024-07-27 09:46:57,287 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.42 vs. limit=8.39 +2024-07-27 09:46:57,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=1186.6666666666667, ans=0.444375 +2024-07-27 09:46:58,966 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.52 vs. limit=5.593333333333334 +2024-07-27 09:47:01,749 INFO [train.py:1114] (3/4) Epoch 1, batch 900, loss[loss=0.7039, simple_loss=0.5837, pruned_loss=0.4979, over 4862.00 frames. ], tot_loss[loss=0.9296, simple_loss=0.7594, pruned_loss=0.7438, over 928976.28 frames. ], batch size: 12, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:47:05,079 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.509e+01 5.472e+01 6.615e+01 8.339e+01 1.626e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-27 09:47:07,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=1200.0, ans=0.44375 +2024-07-27 09:47:08,077 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.76 vs. limit=7.95 +2024-07-27 09:47:08,976 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.54 vs. limit=5.3 +2024-07-27 09:47:09,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=1200.0, ans=0.762 +2024-07-27 09:47:14,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=1213.3333333333333, ans=0.1545 +2024-07-27 09:47:23,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=1226.6666666666667, ans=0.8570666666666666 +2024-07-27 09:47:24,245 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.08 vs. limit=8.42 +2024-07-27 09:47:32,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=1240.0, ans=0.8566 +2024-07-27 09:47:35,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=1240.0, ans=0.8566 +2024-07-27 09:47:39,563 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.89 vs. limit=8.44 +2024-07-27 09:47:47,824 INFO [train.py:1114] (3/4) Epoch 1, batch 950, loss[loss=0.7539, simple_loss=0.6373, pruned_loss=0.5079, over 4789.00 frames. ], tot_loss[loss=0.9049, simple_loss=0.7422, pruned_loss=0.705, over 930496.21 frames. ], batch size: 12, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:47:53,505 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.05 vs. limit=7.975 +2024-07-27 09:48:08,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=1293.3333333333333, ans=0.439375 +2024-07-27 09:48:10,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1293.3333333333333, ans=0.28706666666666664 +2024-07-27 09:48:16,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=1306.6666666666667, ans=0.0706 +2024-07-27 09:48:17,225 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.63 vs. limit=7.99 +2024-07-27 09:48:18,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=1306.6666666666667, ans=0.151 +2024-07-27 09:48:20,460 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.32 vs. limit=8.48 +2024-07-27 09:48:20,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.94 vs. limit=7.99 +2024-07-27 09:48:32,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=1320.0, ans=5.825 +2024-07-27 09:48:34,937 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.07 vs. limit=8.49 +2024-07-27 09:48:38,207 INFO [train.py:1114] (3/4) Epoch 1, batch 1000, loss[loss=0.8111, simple_loss=0.682, pruned_loss=0.5446, over 4968.00 frames. ], tot_loss[loss=0.8807, simple_loss=0.7257, pruned_loss=0.6683, over 929558.41 frames. ], batch size: 13, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:48:38,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=1333.3333333333333, ans=0.07 +2024-07-27 09:48:39,232 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 6.604e+01 7.221e+01 8.711e+01 1.557e+02, threshold=1.444e+02, percent-clipped=4.0 +2024-07-27 09:48:42,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=1333.3333333333333, ans=0.4375 +2024-07-27 09:48:44,351 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.81 vs. limit=8.0 +2024-07-27 09:48:45,222 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.66 vs. limit=8.0 +2024-07-27 09:48:46,069 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.35 vs. limit=5.333333333333333 +2024-07-27 09:48:47,217 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.31 vs. limit=8.51 +2024-07-27 09:48:48,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=1346.6666666666667, ans=0.5 +2024-07-27 09:49:02,423 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.39 vs. limit=8.52 +2024-07-27 09:49:09,093 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=8.99 vs. limit=8.53 +2024-07-27 09:49:10,673 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.91 vs. limit=5.343333333333334 +2024-07-27 09:49:11,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.99 vs. limit=8.015 +2024-07-27 09:49:11,616 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.19 vs. limit=8.53 +2024-07-27 09:49:20,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=1400.0, ans=0.14750000000000002 +2024-07-27 09:49:21,065 INFO [train.py:1114] (3/4) Epoch 1, batch 1050, loss[loss=0.7194, simple_loss=0.6246, pruned_loss=0.452, over 4869.00 frames. ], tot_loss[loss=0.8489, simple_loss=0.7037, pruned_loss=0.6271, over 931984.23 frames. ], batch size: 14, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:49:27,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=1400.0, ans=0.434375 +2024-07-27 09:49:29,936 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=7.33 vs. limit=4.565333333333333 +2024-07-27 09:49:32,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=1413.3333333333333, ans=0.14700000000000002 +2024-07-27 09:49:33,947 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.61 vs. limit=8.56 +2024-07-27 09:49:51,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=1426.6666666666667, ans=0.433125 +2024-07-27 09:49:56,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=1426.6666666666667, ans=0.433125 +2024-07-27 09:49:56,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=1426.6666666666667, ans=0.433125 +2024-07-27 09:50:00,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1440.0, ans=0.28559999999999997 +2024-07-27 09:50:03,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=1440.0, ans=0.0676 +2024-07-27 09:50:06,986 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.50 vs. limit=8.04 +2024-07-27 09:50:12,473 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.88 vs. limit=5.72 +2024-07-27 09:50:23,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=1453.3333333333333, ans=0.431875 +2024-07-27 09:50:24,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=1453.3333333333333, ans=0.431875 +2024-07-27 09:50:25,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=1453.3333333333333, ans=0.23546666666666666 +2024-07-27 09:50:25,657 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.38 vs. limit=8.045 +2024-07-27 09:50:30,344 INFO [train.py:1114] (3/4) Epoch 1, batch 1100, loss[loss=0.7444, simple_loss=0.63, pruned_loss=0.4834, over 4892.00 frames. ], tot_loss[loss=0.8205, simple_loss=0.6845, pruned_loss=0.5904, over 934270.79 frames. ], batch size: 13, lr: 4.48e-02, grad_scale: 32.0 +2024-07-27 09:50:31,169 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.580e+01 6.586e+01 7.875e+01 9.417e+01 1.858e+02, threshold=1.575e+02, percent-clipped=4.0 +2024-07-27 09:50:35,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=1466.6666666666667, ans=0.31666666666666665 +2024-07-27 09:50:36,144 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.30 vs. limit=8.6 +2024-07-27 09:50:38,776 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.83 vs. limit=8.61 +2024-07-27 09:50:40,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=1480.0, ans=0.14450000000000002 +2024-07-27 09:50:40,361 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=7.27 vs. limit=4.592 +2024-07-27 09:50:41,264 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.02 vs. limit=8.61 +2024-07-27 09:50:47,415 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.24 vs. limit=4.592 +2024-07-27 09:50:56,411 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.29 vs. limit=8.06 +2024-07-27 09:50:57,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=1506.6666666666667, ans=0.429375 +2024-07-27 09:51:03,448 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.12 vs. limit=8.629999999999999 +2024-07-27 09:51:10,170 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.36 vs. limit=3.228 +2024-07-27 09:51:14,859 INFO [train.py:1114] (3/4) Epoch 1, batch 1150, loss[loss=0.6427, simple_loss=0.5654, pruned_loss=0.3884, over 4896.00 frames. ], tot_loss[loss=0.7954, simple_loss=0.6677, pruned_loss=0.5581, over 933983.95 frames. ], batch size: 13, lr: 4.47e-02, grad_scale: 32.0 +2024-07-27 09:51:16,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=1533.3333333333333, ans=0.8463333333333334 +2024-07-27 09:51:38,268 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.44 vs. limit=5.773333333333333 +2024-07-27 09:51:42,383 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=3.23 vs. limit=3.232 +2024-07-27 09:51:44,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.71 vs. limit=5.39 +2024-07-27 09:51:48,906 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.57 vs. limit=8.085 +2024-07-27 09:51:51,358 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.52 vs. limit=5.786666666666667 +2024-07-27 09:51:53,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=1573.3333333333333, ans=0.30333333333333334 +2024-07-27 09:51:56,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=1573.3333333333333, ans=0.42625 +2024-07-27 09:51:57,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=1573.3333333333333, ans=0.42625 +2024-07-27 09:51:58,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=1573.3333333333333, ans=0.42625 +2024-07-27 09:52:01,492 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.43 vs. limit=8.095 +2024-07-27 09:52:09,187 INFO [train.py:1114] (3/4) Epoch 1, batch 1200, loss[loss=0.7159, simple_loss=0.6197, pruned_loss=0.4407, over 4873.00 frames. ], tot_loss[loss=0.7746, simple_loss=0.6543, pruned_loss=0.5304, over 933585.56 frames. ], batch size: 14, lr: 4.47e-02, grad_scale: 32.0 +2024-07-27 09:52:10,014 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.946e+01 6.977e+01 8.267e+01 1.004e+02 1.485e+02, threshold=1.653e+02, percent-clipped=0.0 +2024-07-27 09:52:22,452 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.34 vs. limit=8.105 +2024-07-27 09:52:30,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=1626.6666666666667, ans=0.42375 +2024-07-27 09:52:31,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=1626.6666666666667, ans=0.42375 +2024-07-27 09:52:33,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=1626.6666666666667, ans=0.18495333333333336 +2024-07-27 09:52:39,978 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.87 vs. limit=5.82 +2024-07-27 09:52:47,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=1653.3333333333333, ans=0.4225 +2024-07-27 09:52:49,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=1653.3333333333333, ans=0.138 +2024-07-27 09:52:55,869 INFO [train.py:1114] (3/4) Epoch 1, batch 1250, loss[loss=0.69, simple_loss=0.5971, pruned_loss=0.4218, over 4811.00 frames. ], tot_loss[loss=0.7524, simple_loss=0.6403, pruned_loss=0.5028, over 937513.52 frames. ], batch size: 15, lr: 4.47e-02, grad_scale: 32.0 +2024-07-27 09:52:59,264 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.23 vs. limit=8.125 +2024-07-27 09:53:00,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=1666.6666666666667, ans=0.1375 +2024-07-27 09:53:01,224 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.28 vs. limit=5.833333333333333 +2024-07-27 09:53:04,241 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.19 vs. limit=5.42 +2024-07-27 09:53:04,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=1680.0, ans=0.42125 +2024-07-27 09:53:09,174 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.36 vs. limit=5.84 +2024-07-27 09:53:09,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=1680.0, ans=0.42125 +2024-07-27 09:53:32,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=1706.6666666666667, ans=0.136 +2024-07-27 09:53:34,870 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.80 vs. limit=8.145 +2024-07-27 09:53:56,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=1720.0, ans=0.15325 +2024-07-27 09:53:59,084 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.31 vs. limit=8.15 +2024-07-27 09:53:59,479 INFO [train.py:1114] (3/4) Epoch 1, batch 1300, loss[loss=0.644, simple_loss=0.5611, pruned_loss=0.3873, over 4652.00 frames. ], tot_loss[loss=0.7277, simple_loss=0.6237, pruned_loss=0.4754, over 938771.90 frames. ], batch size: 19, lr: 4.47e-02, grad_scale: 32.0 +2024-07-27 09:54:00,179 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.513e+01 6.459e+01 7.334e+01 8.641e+01 1.550e+02, threshold=1.467e+02, percent-clipped=0.0 +2024-07-27 09:54:01,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=1733.3333333333333, ans=0.41875 +2024-07-27 09:54:04,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=1733.3333333333333, ans=0.135 +2024-07-27 09:54:05,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=1733.3333333333333, ans=0.2826666666666667 +2024-07-27 09:54:06,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=1733.3333333333333, ans=0.8393333333333334 +2024-07-27 09:54:15,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=1746.6666666666667, ans=0.41812499999999997 +2024-07-27 09:54:17,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=1746.6666666666667, ans=0.41812499999999997 +2024-07-27 09:54:22,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=1760.0, ans=0.4175 +2024-07-27 09:54:30,133 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=30.93 vs. limit=8.82 +2024-07-27 09:54:43,212 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.48 vs. limit=8.83 +2024-07-27 09:55:16,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=1786.6666666666667, ans=0.41625 +2024-07-27 09:55:18,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=1786.6666666666667, ans=0.28213333333333335 +2024-07-27 09:55:29,128 INFO [train.py:1114] (3/4) Epoch 1, batch 1350, loss[loss=0.5829, simple_loss=0.5318, pruned_loss=0.3259, over 4751.00 frames. ], tot_loss[loss=0.7059, simple_loss=0.6095, pruned_loss=0.4511, over 940643.41 frames. ], batch size: 13, lr: 4.46e-02, grad_scale: 32.0 +2024-07-27 09:55:34,825 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.45 vs. limit=5.45 +2024-07-27 09:55:52,280 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.66 vs. limit=8.18 +2024-07-27 09:55:52,290 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.23 vs. limit=8.86 +2024-07-27 09:55:53,310 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.68 vs. limit=8.86 +2024-07-27 09:55:56,358 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.94 vs. limit=8.870000000000001 +2024-07-27 09:55:58,412 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.57 vs. limit=5.913333333333333 +2024-07-27 09:56:00,776 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.31 vs. limit=5.913333333333333 +2024-07-27 09:56:00,930 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.03 vs. limit=5.913333333333333 +2024-07-27 09:56:11,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=1840.0, ans=0.08850000000000001 +2024-07-27 09:56:16,956 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.62 vs. limit=8.879999999999999 +2024-07-27 09:56:17,060 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.02 vs. limit=5.46 +2024-07-27 09:56:17,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=1840.0, ans=0.8356 +2024-07-27 09:56:20,440 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.30 vs. limit=5.92 +2024-07-27 09:56:24,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=1853.3333333333333, ans=0.28146666666666664 +2024-07-27 09:56:24,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=1853.3333333333333, ans=0.1305 +2024-07-27 09:56:26,763 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.58 vs. limit=5.463333333333333 +2024-07-27 09:56:29,348 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.12 vs. limit=8.89 +2024-07-27 09:56:31,797 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.84 vs. limit=8.9 +2024-07-27 09:56:32,590 INFO [train.py:1114] (3/4) Epoch 1, batch 1400, loss[loss=0.5573, simple_loss=0.5055, pruned_loss=0.3133, over 4709.00 frames. ], tot_loss[loss=0.6843, simple_loss=0.5954, pruned_loss=0.4282, over 942800.25 frames. ], batch size: 11, lr: 4.46e-02, grad_scale: 32.0 +2024-07-27 09:56:32,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=1866.6666666666667, ans=0.13 +2024-07-27 09:56:33,350 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.208e+01 7.358e+01 8.189e+01 9.683e+01 1.850e+02, threshold=1.638e+02, percent-clipped=1.0 +2024-07-27 09:56:34,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=1866.6666666666667, ans=0.8346666666666667 +2024-07-27 09:56:50,051 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.91 vs. limit=8.91 +2024-07-27 09:56:54,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=1880.0, ans=0.0577 +2024-07-27 09:57:04,318 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=27.17 vs. limit=8.92 +2024-07-27 09:57:05,241 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.70 vs. limit=8.92 +2024-07-27 09:57:10,036 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.65 vs. limit=5.953333333333333 +2024-07-27 09:57:10,784 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 09:57:11,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=1906.6666666666667, ans=0.23093333333333332 +2024-07-27 09:57:16,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=1920.0, ans=0.41000000000000003 +2024-07-27 09:57:24,391 INFO [train.py:1114] (3/4) Epoch 1, batch 1450, loss[loss=0.6023, simple_loss=0.5495, pruned_loss=0.3349, over 4689.00 frames. ], tot_loss[loss=0.6657, simple_loss=0.5835, pruned_loss=0.4086, over 942659.38 frames. ], batch size: 15, lr: 4.46e-02, grad_scale: 32.0 +2024-07-27 09:57:24,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=1933.3333333333333, ans=0.409375 +2024-07-27 09:57:25,532 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.86 vs. limit=5.483333333333333 +2024-07-27 09:57:28,070 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.97 vs. limit=8.95 +2024-07-27 09:57:36,507 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.31 vs. limit=8.96 +2024-07-27 09:57:42,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=1960.0, ans=0.408125 +2024-07-27 09:57:57,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=1973.3333333333333, ans=0.4075 +2024-07-27 09:57:59,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=1973.3333333333333, ans=0.4075 +2024-07-27 09:58:01,111 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.72 vs. limit=5.986666666666666 +2024-07-27 09:58:12,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=1973.3333333333333, ans=0.126 +2024-07-27 09:58:18,480 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 09:58:21,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.24 vs. limit=3.298 +2024-07-27 09:58:22,748 INFO [train.py:1114] (3/4) Epoch 1, batch 1500, loss[loss=0.5918, simple_loss=0.5297, pruned_loss=0.3365, over 4805.00 frames. ], tot_loss[loss=0.6507, simple_loss=0.5742, pruned_loss=0.3925, over 942024.57 frames. ], batch size: 14, lr: 4.46e-02, grad_scale: 32.0 +2024-07-27 09:58:23,589 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.537e+01 6.985e+01 7.625e+01 8.885e+01 1.224e+02, threshold=1.525e+02, percent-clipped=0.0 +2024-07-27 09:58:25,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=2000.0, ans=0.125 +2024-07-27 09:58:43,416 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.69 vs. limit=4.8053333333333335 +2024-07-27 09:58:45,172 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.65 vs. limit=5.503333333333333 +2024-07-27 09:58:59,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=2040.0, ans=0.0541 +2024-07-27 09:59:06,328 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.18 vs. limit=9.040000000000001 +2024-07-27 09:59:10,615 INFO [train.py:1114] (3/4) Epoch 1, batch 1550, loss[loss=0.5869, simple_loss=0.5385, pruned_loss=0.3223, over 4896.00 frames. ], tot_loss[loss=0.6366, simple_loss=0.565, pruned_loss=0.378, over 938293.42 frames. ], batch size: 15, lr: 4.45e-02, grad_scale: 32.0 +2024-07-27 09:59:10,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2066.6666666666665, ans=0.2793333333333333 +2024-07-27 09:59:13,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.65 vs. limit=8.275 +2024-07-27 09:59:15,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=2066.6666666666665, ans=0.053500000000000006 +2024-07-27 09:59:50,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2106.6666666666665, ans=0.2789333333333333 +2024-07-27 09:59:51,230 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.86 vs. limit=5.526666666666666 +2024-07-27 09:59:51,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=2106.6666666666665, ans=0.2789333333333333 +2024-07-27 10:00:00,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=2120.0, ans=0.0523 +2024-07-27 10:00:00,943 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.42 vs. limit=5.53 +2024-07-27 10:00:04,788 INFO [train.py:1114] (3/4) Epoch 1, batch 1600, loss[loss=0.6346, simple_loss=0.5703, pruned_loss=0.3565, over 4879.00 frames. ], tot_loss[loss=0.6234, simple_loss=0.5566, pruned_loss=0.3648, over 937014.85 frames. ], batch size: 14, lr: 4.45e-02, grad_scale: 32.0 +2024-07-27 10:00:05,614 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.018e+01 7.259e+01 8.235e+01 9.551e+01 1.793e+02, threshold=1.647e+02, percent-clipped=2.0 +2024-07-27 10:00:33,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.94 vs. limit=9.11 +2024-07-27 10:00:39,647 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.64 vs. limit=5.54 +2024-07-27 10:00:43,206 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.73 vs. limit=8.31 +2024-07-27 10:00:44,661 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.79 vs. limit=6.08 +2024-07-27 10:00:53,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=2173.3333333333335, ans=0.8239333333333334 +2024-07-27 10:01:04,976 INFO [train.py:1114] (3/4) Epoch 1, batch 1650, loss[loss=0.619, simple_loss=0.5792, pruned_loss=0.3308, over 4673.00 frames. ], tot_loss[loss=0.6121, simple_loss=0.5498, pruned_loss=0.3533, over 937135.41 frames. ], batch size: 14, lr: 4.45e-02, grad_scale: 32.0 +2024-07-27 10:01:05,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=2200.0, ans=0.035 +2024-07-27 10:01:07,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=2200.0, ans=0.22499999999999998 +2024-07-27 10:01:13,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=2213.3333333333335, ans=9.16 +2024-07-27 10:01:16,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=2213.3333333333335, ans=0.39625 +2024-07-27 10:01:25,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=2226.6666666666665, ans=0.1165 +2024-07-27 10:01:33,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=2240.0, ans=0.395 +2024-07-27 10:01:39,266 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.63 vs. limit=9.18 +2024-07-27 10:01:48,781 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:01:50,405 INFO [train.py:1114] (3/4) Epoch 1, batch 1700, loss[loss=0.4749, simple_loss=0.4427, pruned_loss=0.2547, over 4706.00 frames. ], tot_loss[loss=0.5991, simple_loss=0.5422, pruned_loss=0.3409, over 938916.62 frames. ], batch size: 11, lr: 4.44e-02, grad_scale: 32.0 +2024-07-27 10:01:51,143 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.631e+01 6.759e+01 7.966e+01 9.777e+01 1.760e+02, threshold=1.593e+02, percent-clipped=1.0 +2024-07-27 10:01:54,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=2266.6666666666665, ans=0.04291666666666667 +2024-07-27 10:01:57,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=2266.6666666666665, ans=0.21666666666666667 +2024-07-27 10:02:08,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=2293.3333333333335, ans=0.3925 +2024-07-27 10:02:08,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=2293.3333333333335, ans=6.433333333333334 +2024-07-27 10:02:20,073 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=7.96 vs. limit=8.365 +2024-07-27 10:02:33,577 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.21 vs. limit=5.583333333333333 +2024-07-27 10:02:33,854 INFO [train.py:1114] (3/4) Epoch 1, batch 1750, loss[loss=0.4868, simple_loss=0.4543, pruned_loss=0.2605, over 4812.00 frames. ], tot_loss[loss=0.5859, simple_loss=0.5338, pruned_loss=0.3293, over 940056.29 frames. ], batch size: 11, lr: 4.44e-02, grad_scale: 32.0 +2024-07-27 10:02:37,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=2333.3333333333335, ans=0.390625 +2024-07-27 10:02:39,872 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.09 vs. limit=6.166666666666667 +2024-07-27 10:02:48,224 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.03 vs. limit=8.375 +2024-07-27 10:02:52,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=2346.6666666666665, ans=6.466666666666667 +2024-07-27 10:02:53,052 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.26 vs. limit=3.352 +2024-07-27 10:02:53,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=2346.6666666666665, ans=0.112 +2024-07-27 10:02:58,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=2360.0, ans=0.2264 +2024-07-27 10:02:59,529 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.69 vs. limit=9.27 +2024-07-27 10:03:03,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=2360.0, ans=0.8174 +2024-07-27 10:03:20,098 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.39 vs. limit=8.395 +2024-07-27 10:03:25,546 INFO [train.py:1114] (3/4) Epoch 1, batch 1800, loss[loss=0.5803, simple_loss=0.536, pruned_loss=0.3137, over 4638.00 frames. ], tot_loss[loss=0.5772, simple_loss=0.5287, pruned_loss=0.3211, over 940925.63 frames. ], batch size: 13, lr: 4.44e-02, grad_scale: 32.0 +2024-07-27 10:03:26,387 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.000e+01 7.252e+01 8.218e+01 9.576e+01 1.850e+02, threshold=1.644e+02, percent-clipped=1.0 +2024-07-27 10:03:39,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=2413.3333333333335, ans=0.38687499999999997 +2024-07-27 10:03:40,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=2413.3333333333335, ans=0.23620000000000002 +2024-07-27 10:03:59,351 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.22 vs. limit=9.33 +2024-07-27 10:04:02,084 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.12 vs. limit=4.976 +2024-07-27 10:04:05,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=2453.3333333333335, ans=0.385 +2024-07-27 10:04:08,177 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:04:13,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=2466.6666666666665, ans=0.22533333333333333 +2024-07-27 10:04:13,826 INFO [train.py:1114] (3/4) Epoch 1, batch 1850, loss[loss=0.5952, simple_loss=0.548, pruned_loss=0.3224, over 4816.00 frames. ], tot_loss[loss=0.569, simple_loss=0.5243, pruned_loss=0.3133, over 940588.16 frames. ], batch size: 14, lr: 4.43e-02, grad_scale: 32.0 +2024-07-27 10:04:15,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=2466.6666666666665, ans=0.044500000000000005 +2024-07-27 10:04:29,898 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.75 vs. limit=6.246666666666667 +2024-07-27 10:04:31,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=2493.3333333333335, ans=0.043899999999999995 +2024-07-27 10:04:36,307 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.57 vs. limit=6.246666666666667 +2024-07-27 10:04:41,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=2506.6666666666665, ans=0.3825 +2024-07-27 10:04:43,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=2506.6666666666665, ans=0.106 +2024-07-27 10:04:54,830 INFO [train.py:1114] (3/4) Epoch 1, batch 1900, loss[loss=0.5596, simple_loss=0.5362, pruned_loss=0.2913, over 4656.00 frames. ], tot_loss[loss=0.5598, simple_loss=0.5195, pruned_loss=0.305, over 942241.53 frames. ], batch size: 14, lr: 4.43e-02, grad_scale: 32.0 +2024-07-27 10:04:55,623 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.340e+01 7.620e+01 8.335e+01 9.482e+01 1.510e+02, threshold=1.667e+02, percent-clipped=0.0 +2024-07-27 10:04:56,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=2533.3333333333335, ans=0.105 +2024-07-27 10:04:58,392 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.06 vs. limit=9.4 +2024-07-27 10:05:02,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=2546.6666666666665, ans=0.8108666666666667 +2024-07-27 10:05:07,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2546.6666666666665, ans=0.27453333333333335 +2024-07-27 10:05:20,963 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.87 vs. limit=9.41 +2024-07-27 10:05:39,936 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.98 vs. limit=5.6466666666666665 +2024-07-27 10:05:39,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.40 vs. limit=5.6466666666666665 +2024-07-27 10:05:46,629 INFO [train.py:1114] (3/4) Epoch 1, batch 1950, loss[loss=0.4532, simple_loss=0.442, pruned_loss=0.2319, over 4893.00 frames. ], tot_loss[loss=0.5534, simple_loss=0.517, pruned_loss=0.2987, over 944091.20 frames. ], batch size: 13, lr: 4.43e-02, grad_scale: 32.0 +2024-07-27 10:05:55,904 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.14 vs. limit=8.48 +2024-07-27 10:06:11,371 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.39 vs. limit=9.47 +2024-07-27 10:06:16,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=2626.6666666666665, ans=0.376875 +2024-07-27 10:06:17,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=2626.6666666666665, ans=0.2394 +2024-07-27 10:06:24,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=2640.0, ans=0.10099999999999999 +2024-07-27 10:06:29,633 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.28 vs. limit=8.495 +2024-07-27 10:06:33,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=2653.3333333333335, ans=0.375625 +2024-07-27 10:06:35,563 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=15.65 vs. limit=8.5 +2024-07-27 10:06:42,513 INFO [train.py:1114] (3/4) Epoch 1, batch 2000, loss[loss=0.4367, simple_loss=0.4287, pruned_loss=0.2223, over 4798.00 frames. ], tot_loss[loss=0.5465, simple_loss=0.5133, pruned_loss=0.2928, over 941786.74 frames. ], batch size: 11, lr: 4.42e-02, grad_scale: 32.0 +2024-07-27 10:06:43,353 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.312e+01 7.554e+01 8.059e+01 9.021e+01 3.573e+02, threshold=1.612e+02, percent-clipped=2.0 +2024-07-27 10:06:51,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=2666.6666666666665, ans=0.375 +2024-07-27 10:07:03,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.17 vs. limit=6.346666666666667 +2024-07-27 10:07:07,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=2693.3333333333335, ans=0.2404 +2024-07-27 10:07:07,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=2693.3333333333335, ans=0.2730666666666667 +2024-07-27 10:07:07,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.48 vs. limit=9.52 +2024-07-27 10:07:09,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=2706.6666666666665, ans=0.04154166666666667 +2024-07-27 10:07:14,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=2706.6666666666665, ans=0.37312500000000004 +2024-07-27 10:07:15,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=2706.6666666666665, ans=0.37312500000000004 +2024-07-27 10:07:17,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2706.6666666666665, ans=0.2729333333333333 +2024-07-27 10:07:19,260 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.91 vs. limit=5.68 +2024-07-27 10:07:19,326 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.36 vs. limit=3.408 +2024-07-27 10:07:26,887 INFO [train.py:1114] (3/4) Epoch 1, batch 2050, loss[loss=0.4531, simple_loss=0.4411, pruned_loss=0.2326, over 4602.00 frames. ], tot_loss[loss=0.5381, simple_loss=0.5081, pruned_loss=0.2864, over 939643.26 frames. ], batch size: 11, lr: 4.42e-02, grad_scale: 64.0 +2024-07-27 10:07:29,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2733.3333333333335, ans=0.27266666666666667 +2024-07-27 10:07:34,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.50 vs. limit=3.412 +2024-07-27 10:07:37,619 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.21 vs. limit=6.373333333333333 +2024-07-27 10:07:53,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=2773.3333333333335, ans=0.22226666666666667 +2024-07-27 10:07:56,815 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.12 vs. limit=8.54 +2024-07-27 10:07:58,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=2786.6666666666665, ans=0.27213333333333334 +2024-07-27 10:08:02,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=2786.6666666666665, ans=0.369375 +2024-07-27 10:08:08,457 INFO [train.py:1114] (3/4) Epoch 1, batch 2100, loss[loss=0.4965, simple_loss=0.4896, pruned_loss=0.2516, over 4760.00 frames. ], tot_loss[loss=0.5307, simple_loss=0.5043, pruned_loss=0.2804, over 941566.25 frames. ], batch size: 13, lr: 4.42e-02, grad_scale: 64.0 +2024-07-27 10:08:09,834 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.063e+01 7.785e+01 9.607e+01 1.091e+02 1.489e+02, threshold=1.921e+02, percent-clipped=0.0 +2024-07-27 10:08:11,785 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.50 vs. limit=8.55 +2024-07-27 10:08:19,262 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.35 vs. limit=8.555 +2024-07-27 10:08:22,751 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.44 vs. limit=3.422 +2024-07-27 10:08:23,347 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.55 vs. limit=9.61 +2024-07-27 10:08:34,461 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.92 vs. limit=9.620000000000001 +2024-07-27 10:08:35,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=2840.0, ans=0.09025 +2024-07-27 10:08:39,244 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.32 vs. limit=6.42 +2024-07-27 10:08:39,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=2840.0, ans=9.629999999999999 +2024-07-27 10:08:46,066 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.34 vs. limit=5.136 +2024-07-27 10:08:59,115 INFO [train.py:1114] (3/4) Epoch 1, batch 2150, loss[loss=0.496, simple_loss=0.4928, pruned_loss=0.2496, over 4903.00 frames. ], tot_loss[loss=0.523, simple_loss=0.5003, pruned_loss=0.2743, over 944487.12 frames. ], batch size: 13, lr: 4.41e-02, grad_scale: 64.0 +2024-07-27 10:09:03,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.93 vs. limit=8.575 +2024-07-27 10:09:11,111 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.48 vs. limit=3.432 +2024-07-27 10:09:17,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=2880.0, ans=0.365 +2024-07-27 10:09:19,338 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.32 vs. limit=9.66 +2024-07-27 10:09:22,050 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.81 vs. limit=9.67 +2024-07-27 10:09:45,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=2906.6666666666665, ans=0.2709333333333333 +2024-07-27 10:09:50,055 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.01 vs. limit=9.68 +2024-07-27 10:10:05,504 INFO [train.py:1114] (3/4) Epoch 1, batch 2200, loss[loss=0.507, simple_loss=0.5155, pruned_loss=0.2492, over 4821.00 frames. ], tot_loss[loss=0.5158, simple_loss=0.4959, pruned_loss=0.269, over 943476.94 frames. ], batch size: 14, lr: 4.41e-02, grad_scale: 64.0 +2024-07-27 10:10:06,092 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.14 vs. limit=9.7 +2024-07-27 10:10:06,255 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.332e+01 7.672e+01 8.381e+01 9.351e+01 1.723e+02, threshold=1.676e+02, percent-clipped=0.0 +2024-07-27 10:10:15,044 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.47 vs. limit=6.466666666666667 +2024-07-27 10:10:29,451 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.97 vs. limit=5.74 +2024-07-27 10:10:37,528 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.66 vs. limit=6.486666666666666 +2024-07-27 10:10:41,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=2986.6666666666665, ans=0.088 +2024-07-27 10:10:46,289 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.71 vs. limit=6.493333333333333 +2024-07-27 10:10:47,989 INFO [train.py:1114] (3/4) Epoch 1, batch 2250, loss[loss=0.4266, simple_loss=0.4394, pruned_loss=0.2069, over 4694.00 frames. ], tot_loss[loss=0.5125, simple_loss=0.4942, pruned_loss=0.2662, over 941903.90 frames. ], batch size: 13, lr: 4.40e-02, grad_scale: 64.0 +2024-07-27 10:10:49,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=3000.0, ans=0.359375 +2024-07-27 10:10:50,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=3000.0, ans=0.795 +2024-07-27 10:10:56,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=3013.3333333333335, ans=0.35875 +2024-07-27 10:10:57,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=3013.3333333333335, ans=0.08699999999999998 +2024-07-27 10:11:04,647 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.44 vs. limit=9.76 +2024-07-27 10:11:10,360 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.36 vs. limit=9.77 +2024-07-27 10:11:13,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.whiten.whitening_limit, batch_count=3026.6666666666665, ans=5.210666666666667 +2024-07-27 10:11:23,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=3026.6666666666665, ans=0.1216666666666667 +2024-07-27 10:11:48,652 INFO [train.py:1114] (3/4) Epoch 1, batch 2300, loss[loss=0.4755, simple_loss=0.4643, pruned_loss=0.2433, over 4943.00 frames. ], tot_loss[loss=0.5055, simple_loss=0.49, pruned_loss=0.2611, over 939187.91 frames. ], batch size: 12, lr: 4.40e-02, grad_scale: 64.0 +2024-07-27 10:11:49,438 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.418e+01 7.845e+01 8.717e+01 9.817e+01 1.762e+02, threshold=1.743e+02, percent-clipped=1.0 +2024-07-27 10:11:49,919 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.30 vs. limit=8.65 +2024-07-27 10:11:50,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=3066.6666666666665, ans=0.21933333333333332 +2024-07-27 10:11:53,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=3066.6666666666665, ans=9.8 +2024-07-27 10:11:56,323 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.34 vs. limit=9.81 +2024-07-27 10:11:58,410 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.99 vs. limit=8.655 +2024-07-27 10:12:07,988 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.56 vs. limit=9.82 +2024-07-27 10:12:10,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=3093.3333333333335, ans=0.355 +2024-07-27 10:12:19,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=3106.6666666666665, ans=0.354375 +2024-07-27 10:12:25,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=3120.0, ans=0.35375 +2024-07-27 10:12:29,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=3120.0, ans=0.35375 +2024-07-27 10:12:32,064 INFO [train.py:1114] (3/4) Epoch 1, batch 2350, loss[loss=0.5542, simple_loss=0.5237, pruned_loss=0.2924, over 4638.00 frames. ], tot_loss[loss=0.4995, simple_loss=0.487, pruned_loss=0.2566, over 941142.39 frames. ], batch size: 13, lr: 4.40e-02, grad_scale: 64.0 +2024-07-27 10:12:38,182 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.30 vs. limit=9.85 +2024-07-27 10:12:41,197 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.34 vs. limit=9.86 +2024-07-27 10:12:44,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=3146.6666666666665, ans=0.35250000000000004 +2024-07-27 10:12:49,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=3146.6666666666665, ans=6.573333333333333 +2024-07-27 10:13:03,227 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.64 vs. limit=8.69 +2024-07-27 10:13:06,273 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:13:16,261 INFO [train.py:1114] (3/4) Epoch 1, batch 2400, loss[loss=0.4047, simple_loss=0.4224, pruned_loss=0.1935, over 4643.00 frames. ], tot_loss[loss=0.4973, simple_loss=0.4862, pruned_loss=0.2546, over 941007.73 frames. ], batch size: 12, lr: 4.39e-02, grad_scale: 64.0 +2024-07-27 10:13:16,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=3200.0, ans=0.35 +2024-07-27 10:13:16,980 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.405e+01 7.978e+01 8.770e+01 1.032e+02 1.902e+02, threshold=1.754e+02, percent-clipped=2.0 +2024-07-27 10:13:27,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3213.3333333333335, ans=0.26786666666666664 +2024-07-27 10:13:34,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=3226.6666666666665, ans=0.34875 +2024-07-27 10:13:36,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=3226.6666666666665, ans=0.34875 +2024-07-27 10:13:37,556 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.02 vs. limit=5.290666666666667 +2024-07-27 10:13:40,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=3240.0, ans=0.2676 +2024-07-27 10:13:43,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=3240.0, ans=0.7866 +2024-07-27 10:13:44,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3240.0, ans=0.2676 +2024-07-27 10:13:46,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=3240.0, ans=0.07975 +2024-07-27 10:13:56,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.74 vs. limit=8.725 +2024-07-27 10:13:56,392 INFO [train.py:1114] (3/4) Epoch 1, batch 2450, loss[loss=0.4984, simple_loss=0.4984, pruned_loss=0.2492, over 4698.00 frames. ], tot_loss[loss=0.4939, simple_loss=0.4849, pruned_loss=0.2518, over 936744.79 frames. ], batch size: 13, lr: 4.39e-02, grad_scale: 64.0 +2024-07-27 10:13:57,634 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=11.28 vs. limit=9.95 +2024-07-27 10:14:07,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=12.97 vs. limit=9.96 +2024-07-27 10:14:07,654 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.07 vs. limit=6.64 +2024-07-27 10:14:08,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=3280.0, ans=9.96 +2024-07-27 10:14:09,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=3280.0, ans=0.077 +2024-07-27 10:14:19,798 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.66 vs. limit=5.82 +2024-07-27 10:14:28,828 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.91 vs. limit=6.6466666666666665 +2024-07-27 10:14:44,342 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.22 vs. limit=6.653333333333333 +2024-07-27 10:14:51,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=3320.0, ans=0.07925 +2024-07-27 10:14:52,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=3320.0, ans=0.0755 +2024-07-27 10:14:53,958 INFO [train.py:1114] (3/4) Epoch 1, batch 2500, loss[loss=0.5298, simple_loss=0.5208, pruned_loss=0.2694, over 4809.00 frames. ], tot_loss[loss=0.4894, simple_loss=0.4826, pruned_loss=0.2483, over 938876.22 frames. ], batch size: 14, lr: 4.38e-02, grad_scale: 64.0 +2024-07-27 10:14:54,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3333.3333333333335, ans=0.26666666666666666 +2024-07-27 10:14:54,651 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.439e+01 7.441e+01 8.140e+01 9.225e+01 1.396e+02, threshold=1.628e+02, percent-clipped=0.0 +2024-07-27 10:14:58,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=3333.3333333333335, ans=0.34375 +2024-07-27 10:14:58,971 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.04 vs. limit=5.833333333333333 +2024-07-27 10:15:01,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=3346.6666666666665, ans=0.343125 +2024-07-27 10:15:09,168 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.00 vs. limit=8.754999999999999 +2024-07-27 10:15:14,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=11.72 vs. limit=10.01 +2024-07-27 10:15:35,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=3360.0, ans=0.3425 +2024-07-27 10:15:48,721 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.88 vs. limit=10.03 +2024-07-27 10:15:53,939 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.41 vs. limit=10.04 +2024-07-27 10:16:03,508 INFO [train.py:1114] (3/4) Epoch 1, batch 2550, loss[loss=0.4377, simple_loss=0.4439, pruned_loss=0.2158, over 4793.00 frames. ], tot_loss[loss=0.4834, simple_loss=0.4789, pruned_loss=0.2441, over 938563.13 frames. ], batch size: 11, lr: 4.38e-02, grad_scale: 64.0 +2024-07-27 10:16:09,608 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.94 vs. limit=5.85 +2024-07-27 10:16:11,360 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.29 vs. limit=6.706666666666667 +2024-07-27 10:16:12,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=3413.3333333333335, ans=0.33999999999999997 +2024-07-27 10:16:20,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=3413.3333333333335, ans=0.33999999999999997 +2024-07-27 10:16:22,095 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.91 vs. limit=10.06 +2024-07-27 10:16:30,586 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.65 vs. limit=6.713333333333333 +2024-07-27 10:16:36,375 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=10.80 vs. limit=10.08 +2024-07-27 10:16:40,916 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.01 vs. limit=5.863333333333333 +2024-07-27 10:16:42,016 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.99 vs. limit=5.863333333333333 +2024-07-27 10:16:42,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=3453.3333333333335, ans=0.338125 +2024-07-27 10:16:48,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=3466.6666666666665, ans=0.3375 +2024-07-27 10:16:49,871 INFO [train.py:1114] (3/4) Epoch 1, batch 2600, loss[loss=0.5124, simple_loss=0.5017, pruned_loss=0.2615, over 4905.00 frames. ], tot_loss[loss=0.4812, simple_loss=0.4777, pruned_loss=0.2425, over 937525.19 frames. ], batch size: 13, lr: 4.37e-02, grad_scale: 64.0 +2024-07-27 10:16:50,624 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.396e+01 7.798e+01 8.275e+01 9.472e+01 1.752e+02, threshold=1.655e+02, percent-clipped=1.0 +2024-07-27 10:17:03,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=3480.0, ans=0.021699999999999997 +2024-07-27 10:17:07,520 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.90 vs. limit=6.746666666666667 +2024-07-27 10:17:07,625 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.38 vs. limit=5.873333333333333 +2024-07-27 10:17:12,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=3493.3333333333335, ans=0.33625 +2024-07-27 10:17:14,756 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.85 vs. limit=10.120000000000001 +2024-07-27 10:17:24,163 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.93 vs. limit=8.815 +2024-07-27 10:17:38,260 INFO [train.py:1114] (3/4) Epoch 1, batch 2650, loss[loss=0.5613, simple_loss=0.5425, pruned_loss=0.29, over 4632.00 frames. ], tot_loss[loss=0.4775, simple_loss=0.4757, pruned_loss=0.2398, over 939489.06 frames. ], batch size: 16, lr: 4.37e-02, grad_scale: 64.0 +2024-07-27 10:17:40,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=3533.3333333333335, ans=0.05833333333333329 +2024-07-27 10:17:42,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=3533.3333333333335, ans=0.334375 +2024-07-27 10:17:44,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=3533.3333333333335, ans=0.253 +2024-07-27 10:17:46,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=3546.6666666666665, ans=0.26453333333333334 +2024-07-27 10:17:50,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=3546.6666666666665, ans=6.773333333333333 +2024-07-27 10:17:51,002 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.00 vs. limit=10.16 +2024-07-27 10:17:52,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=3546.6666666666665, ans=0.33375 +2024-07-27 10:17:52,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=3546.6666666666665, ans=0.03891666666666667 +2024-07-27 10:17:54,828 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.06 vs. limit=5.886666666666667 +2024-07-27 10:17:56,260 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.38 vs. limit=6.78 +2024-07-27 10:18:06,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=3573.3333333333335, ans=0.3325 +2024-07-27 10:18:40,845 INFO [train.py:1114] (3/4) Epoch 1, batch 2700, loss[loss=0.4849, simple_loss=0.4902, pruned_loss=0.2398, over 4741.00 frames. ], tot_loss[loss=0.4744, simple_loss=0.4741, pruned_loss=0.2375, over 939413.52 frames. ], batch size: 14, lr: 4.36e-02, grad_scale: 64.0 +2024-07-27 10:18:41,567 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.609e+01 7.664e+01 8.465e+01 9.239e+01 1.807e+02, threshold=1.693e+02, percent-clipped=1.0 +2024-07-27 10:18:42,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=3600.0, ans=0.774 +2024-07-27 10:18:44,618 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.73 vs. limit=6.8 +2024-07-27 10:18:45,335 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.98 vs. limit=10.2 +2024-07-27 10:19:00,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3613.3333333333335, ans=0.26386666666666664 +2024-07-27 10:19:08,660 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.19 vs. limit=5.906666666666666 +2024-07-27 10:19:13,402 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.37 vs. limit=8.865 +2024-07-27 10:19:14,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=3640.0, ans=0.06349999999999997 +2024-07-27 10:19:32,816 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.68 vs. limit=6.826666666666667 +2024-07-27 10:19:35,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=3653.3333333333335, ans=0.32875 +2024-07-27 10:19:37,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=3666.6666666666665, ans=0.041666666666666685 +2024-07-27 10:19:40,870 INFO [train.py:1114] (3/4) Epoch 1, batch 2750, loss[loss=0.3665, simple_loss=0.3948, pruned_loss=0.1691, over 4702.00 frames. ], tot_loss[loss=0.4688, simple_loss=0.4704, pruned_loss=0.2337, over 939372.31 frames. ], batch size: 12, lr: 4.36e-02, grad_scale: 32.0 +2024-07-27 10:19:55,233 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.01 vs. limit=10.26 +2024-07-27 10:20:00,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=3680.0, ans=0.2552 +2024-07-27 10:20:06,079 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.07 vs. limit=5.923333333333334 +2024-07-27 10:20:17,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=3720.0, ans=0.7698 +2024-07-27 10:20:27,063 INFO [train.py:1114] (3/4) Epoch 1, batch 2800, loss[loss=0.5455, simple_loss=0.5148, pruned_loss=0.2881, over 3434.00 frames. ], tot_loss[loss=0.4657, simple_loss=0.4685, pruned_loss=0.2315, over 937716.42 frames. ], batch size: 35, lr: 4.36e-02, grad_scale: 32.0 +2024-07-27 10:20:28,616 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.137e+01 7.490e+01 8.370e+01 9.871e+01 2.286e+02, threshold=1.674e+02, percent-clipped=1.0 +2024-07-27 10:20:29,235 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.25 vs. limit=10.3 +2024-07-27 10:20:29,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=3733.3333333333335, ans=0.05999999999999997 +2024-07-27 10:20:29,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=3733.3333333333335, ans=0.7693333333333333 +2024-07-27 10:20:36,106 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.86 vs. limit=8.905 +2024-07-27 10:20:45,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=3760.0, ans=0.32375 +2024-07-27 10:20:48,258 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.77 vs. limit=10.32 +2024-07-27 10:20:51,779 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.38 vs. limit=10.33 +2024-07-27 10:21:09,262 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:21:11,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=3800.0, ans=0.767 +2024-07-27 10:21:12,418 INFO [train.py:1114] (3/4) Epoch 1, batch 2850, loss[loss=0.4025, simple_loss=0.4118, pruned_loss=0.1966, over 4960.00 frames. ], tot_loss[loss=0.464, simple_loss=0.4679, pruned_loss=0.2301, over 936092.28 frames. ], batch size: 13, lr: 4.35e-02, grad_scale: 32.0 +2024-07-27 10:21:14,424 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.25 vs. limit=6.9 +2024-07-27 10:21:18,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=3800.0, ans=0.321875 +2024-07-27 10:21:20,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=3813.3333333333335, ans=0.056999999999999995 +2024-07-27 10:21:22,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.14 vs. limit=10.36 +2024-07-27 10:21:30,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=3826.6666666666665, ans=0.056499999999999995 +2024-07-27 10:21:31,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=3826.6666666666665, ans=0.2574 +2024-07-27 10:21:38,708 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.08 vs. limit=10.379999999999999 +2024-07-27 10:21:41,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=3840.0, ans=0.020000000000000018 +2024-07-27 10:21:46,271 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.73 vs. limit=10.39 +2024-07-27 10:21:55,032 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.96 vs. limit=10.39 +2024-07-27 10:21:55,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=3853.3333333333335, ans=0.7651333333333333 +2024-07-27 10:21:57,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_na.min_abs, batch_count=3866.6666666666665, ans=0.019466666666666667 +2024-07-27 10:21:58,258 INFO [train.py:1114] (3/4) Epoch 1, batch 2900, loss[loss=0.4228, simple_loss=0.4461, pruned_loss=0.1998, over 4828.00 frames. ], tot_loss[loss=0.4604, simple_loss=0.4672, pruned_loss=0.2269, over 939906.85 frames. ], batch size: 13, lr: 4.35e-02, grad_scale: 32.0 +2024-07-27 10:21:58,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=3866.6666666666665, ans=0.258 +2024-07-27 10:21:58,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=3866.6666666666665, ans=0.013000000000000012 +2024-07-27 10:22:06,562 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.56 vs. limit=6.933333333333334 +2024-07-27 10:22:07,177 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.485e+01 7.711e+01 8.512e+01 9.288e+01 5.214e+02, threshold=1.702e+02, percent-clipped=1.0 +2024-07-27 10:22:08,494 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.06 vs. limit=10.4 +2024-07-27 10:22:10,262 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.42 vs. limit=10.4 +2024-07-27 10:22:11,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=3866.6666666666665, ans=0.013000000000000012 +2024-07-27 10:22:14,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=3880.0, ans=0.7888 +2024-07-27 10:22:17,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3880.0, ans=0.2612 +2024-07-27 10:22:24,115 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.11 vs. limit=5.973333333333334 +2024-07-27 10:22:43,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=3920.0, ans=0.31625000000000003 +2024-07-27 10:22:45,706 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.32 vs. limit=8.97 +2024-07-27 10:22:47,288 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.34 vs. limit=10.45 +2024-07-27 10:22:47,619 INFO [train.py:1114] (3/4) Epoch 1, batch 2950, loss[loss=0.4143, simple_loss=0.4354, pruned_loss=0.1966, over 4697.00 frames. ], tot_loss[loss=0.4561, simple_loss=0.464, pruned_loss=0.2242, over 938887.51 frames. ], batch size: 12, lr: 4.34e-02, grad_scale: 32.0 +2024-07-27 10:22:52,870 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=10.77 vs. limit=10.45 +2024-07-27 10:22:57,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=3933.3333333333335, ans=0.008333333333333304 +2024-07-27 10:23:00,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=3933.3333333333335, ans=0.315625 +2024-07-27 10:23:17,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=3960.0, ans=0.26039999999999996 +2024-07-27 10:23:18,921 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.11 vs. limit=10.47 +2024-07-27 10:23:21,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=7.24 vs. limit=5.993333333333333 +2024-07-27 10:23:21,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=3973.3333333333335, ans=0.31375 +2024-07-27 10:23:30,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=3986.6666666666665, ans=0.7604666666666667 +2024-07-27 10:23:38,736 INFO [train.py:1114] (3/4) Epoch 1, batch 3000, loss[loss=0.3963, simple_loss=0.4244, pruned_loss=0.1841, over 4765.00 frames. ], tot_loss[loss=0.4539, simple_loss=0.4627, pruned_loss=0.2226, over 938424.78 frames. ], batch size: 13, lr: 4.34e-02, grad_scale: 32.0 +2024-07-27 10:23:38,879 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 10:23:46,711 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([3.7388, 3.4914, 3.9340, 3.9079], device='cuda:3') +2024-07-27 10:23:48,145 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.3498, 4.7682, 4.5647, 4.9091], device='cuda:3') +2024-07-27 10:23:52,403 INFO [train.py:1146] (3/4) Epoch 1, validation: loss=0.3584, simple_loss=0.4212, pruned_loss=0.1478, over 944034.00 frames. +2024-07-27 10:23:52,914 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 10:23:54,438 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.536e+01 7.537e+01 8.350e+01 9.496e+01 1.510e+02, threshold=1.670e+02, percent-clipped=0.0 +2024-07-27 10:23:58,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=4000.0, ans=0.3125 +2024-07-27 10:24:10,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.83 vs. limit=10.5 +2024-07-27 10:24:12,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=4013.3333333333335, ans=0.311875 +2024-07-27 10:24:14,185 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.90 vs. limit=9.004999999999999 +2024-07-27 10:24:20,185 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.47 vs. limit=5.605333333333333 +2024-07-27 10:24:28,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=4026.6666666666665, ans=7.516666666666667 +2024-07-27 10:24:29,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=4026.6666666666665, ans=0.04988888888888889 +2024-07-27 10:24:41,282 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.55 vs. limit=7.026666666666667 +2024-07-27 10:24:49,758 INFO [train.py:1114] (3/4) Epoch 1, batch 3050, loss[loss=0.4469, simple_loss=0.4592, pruned_loss=0.2173, over 4639.00 frames. ], tot_loss[loss=0.4541, simple_loss=0.4629, pruned_loss=0.2226, over 937324.88 frames. ], batch size: 12, lr: 4.33e-02, grad_scale: 32.0 +2024-07-27 10:24:49,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=4066.6666666666665, ans=0.309375 +2024-07-27 10:24:53,291 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=2.747e-02 +2024-07-27 10:24:55,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=4066.6666666666665, ans=0.009985507246376812 +2024-07-27 10:24:57,546 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.23 vs. limit=6.02 +2024-07-27 10:25:03,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=4080.0, ans=0.04966666666666667 +2024-07-27 10:25:03,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=4080.0, ans=0.04966666666666667 +2024-07-27 10:25:09,386 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.22 vs. limit=10.57 +2024-07-27 10:25:14,713 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.42 vs. limit=10.57 +2024-07-27 10:25:21,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=4106.666666666667, ans=0.049555555555555554 +2024-07-27 10:25:25,486 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=6.22 vs. limit=6.026666666666667 +2024-07-27 10:25:36,738 INFO [train.py:1114] (3/4) Epoch 1, batch 3100, loss[loss=0.4974, simple_loss=0.4947, pruned_loss=0.25, over 4622.00 frames. ], tot_loss[loss=0.4508, simple_loss=0.4609, pruned_loss=0.2203, over 938138.34 frames. ], batch size: 16, lr: 4.33e-02, grad_scale: 32.0 +2024-07-27 10:25:36,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=4133.333333333333, ans=0.7553333333333334 +2024-07-27 10:25:38,257 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.617e+01 7.727e+01 8.300e+01 9.366e+01 1.573e+02, threshold=1.660e+02, percent-clipped=0.0 +2024-07-27 10:25:51,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=4146.666666666667, ans=0.009968115942028986 +2024-07-27 10:25:53,431 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.61 vs. limit=7.073333333333334 +2024-07-27 10:26:05,361 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=11.37 vs. limit=10.620000000000001 +2024-07-27 10:26:15,115 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.53 vs. limit=3.628 +2024-07-27 10:26:25,186 INFO [train.py:1114] (3/4) Epoch 1, batch 3150, loss[loss=0.4348, simple_loss=0.4432, pruned_loss=0.2132, over 4625.00 frames. ], tot_loss[loss=0.4485, simple_loss=0.4596, pruned_loss=0.2187, over 938282.57 frames. ], batch size: 17, lr: 4.32e-02, grad_scale: 32.0 +2024-07-27 10:26:32,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=4213.333333333333, ans=0.3025 +2024-07-27 10:26:36,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=4213.333333333333, ans=0.04911111111111111 +2024-07-27 10:26:41,134 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=5.15 vs. limit=9.085 +2024-07-27 10:26:54,818 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=11.29 vs. limit=10.68 +2024-07-27 10:27:05,658 INFO [train.py:1114] (3/4) Epoch 1, batch 3200, loss[loss=0.3554, simple_loss=0.387, pruned_loss=0.1619, over 4828.00 frames. ], tot_loss[loss=0.4454, simple_loss=0.4578, pruned_loss=0.2165, over 939573.58 frames. ], batch size: 13, lr: 4.32e-02, grad_scale: 32.0 +2024-07-27 10:27:12,009 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.15 vs. limit=10.7 +2024-07-27 10:27:12,169 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.236e+01 7.498e+01 8.243e+01 8.897e+01 1.348e+02, threshold=1.649e+02, percent-clipped=0.0 +2024-07-27 10:27:13,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=4266.666666666667, ans=0.3 +2024-07-27 10:27:33,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=4280.0, ans=0.299375 +2024-07-27 10:27:33,685 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.82 vs. limit=10.71 +2024-07-27 10:27:38,094 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.66 vs. limit=9.105 +2024-07-27 10:27:38,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=4280.0, ans=0.009939130434782608 +2024-07-27 10:27:42,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=4280.0, ans=0.04883333333333333 +2024-07-27 10:27:52,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=4293.333333333333, ans=0.29874999999999996 +2024-07-27 10:27:57,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=4306.666666666667, ans=0.298125 +2024-07-27 10:28:15,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=4320.0, ans=0.2568 +2024-07-27 10:28:17,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=4320.0, ans=0.04866666666666667 +2024-07-27 10:28:19,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=4320.0, ans=0.009930434782608695 +2024-07-27 10:28:29,685 INFO [train.py:1114] (3/4) Epoch 1, batch 3250, loss[loss=0.4547, simple_loss=0.4766, pruned_loss=0.2164, over 4928.00 frames. ], tot_loss[loss=0.4432, simple_loss=0.4562, pruned_loss=0.2152, over 940474.03 frames. ], batch size: 14, lr: 4.31e-02, grad_scale: 32.0 +2024-07-27 10:28:31,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=4333.333333333333, ans=0.7933333333333333 +2024-07-27 10:28:47,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=4360.0, ans=0.295625 +2024-07-27 10:28:48,594 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.98 vs. limit=9.135 +2024-07-27 10:28:49,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=4360.0, ans=0.295625 +2024-07-27 10:28:58,450 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.01 vs. limit=6.093333333333334 +2024-07-27 10:29:00,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=4373.333333333333, ans=0.025 +2024-07-27 10:29:12,408 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.05 vs. limit=7.2 +2024-07-27 10:29:12,740 INFO [train.py:1114] (3/4) Epoch 1, batch 3300, loss[loss=0.469, simple_loss=0.4784, pruned_loss=0.2298, over 4735.00 frames. ], tot_loss[loss=0.4393, simple_loss=0.4533, pruned_loss=0.2127, over 941027.13 frames. ], batch size: 19, lr: 4.31e-02, grad_scale: 32.0 +2024-07-27 10:29:14,041 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.33 vs. limit=10.8 +2024-07-27 10:29:14,418 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.348e+01 7.414e+01 8.133e+01 9.480e+01 1.579e+02, threshold=1.627e+02, percent-clipped=0.0 +2024-07-27 10:29:14,857 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.98 vs. limit=9.15 +2024-07-27 10:29:16,814 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.55 vs. limit=10.8 +2024-07-27 10:29:17,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=4400.0, ans=0.04833333333333334 +2024-07-27 10:30:06,587 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.78 vs. limit=7.213333333333333 +2024-07-27 10:30:23,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=4440.0, ans=0.2556 +2024-07-27 10:30:27,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=4440.0, ans=3.666 +2024-07-27 10:30:36,193 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.74 vs. limit=5.781333333333333 +2024-07-27 10:30:37,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=4453.333333333333, ans=0.7441333333333333 +2024-07-27 10:30:47,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.48 vs. limit=9.17 +2024-07-27 10:30:53,658 INFO [train.py:1114] (3/4) Epoch 1, batch 3350, loss[loss=0.4842, simple_loss=0.4948, pruned_loss=0.2369, over 4588.00 frames. ], tot_loss[loss=0.4405, simple_loss=0.4543, pruned_loss=0.2133, over 938750.34 frames. ], batch size: 17, lr: 4.30e-02, grad_scale: 32.0 +2024-07-27 10:31:07,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=4466.666666666667, ans=0.036041666666666666 +2024-07-27 10:31:13,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=4466.666666666667, ans=0.04805555555555556 +2024-07-27 10:31:20,085 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:31:21,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=4480.0, ans=0.048 +2024-07-27 10:31:32,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=4506.666666666667, ans=0.28875 +2024-07-27 10:31:46,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=4520.0, ans=0.7418 +2024-07-27 10:31:46,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=4520.0, ans=0.7418 +2024-07-27 10:31:55,319 INFO [train.py:1114] (3/4) Epoch 1, batch 3400, loss[loss=0.3754, simple_loss=0.3821, pruned_loss=0.1844, over 4799.00 frames. ], tot_loss[loss=0.4392, simple_loss=0.4534, pruned_loss=0.2125, over 937587.77 frames. ], batch size: 11, lr: 4.29e-02, grad_scale: 32.0 +2024-07-27 10:31:56,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.77 vs. limit=10.9 +2024-07-27 10:31:56,824 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.312e+01 7.521e+01 8.329e+01 9.335e+01 1.968e+02, threshold=1.666e+02, percent-clipped=1.0 +2024-07-27 10:32:15,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=4546.666666666667, ans=0.009881159420289855 +2024-07-27 10:32:18,914 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.58 vs. limit=7.273333333333333 +2024-07-27 10:32:38,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=4573.333333333333, ans=0.00987536231884058 +2024-07-27 10:32:41,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=4573.333333333333, ans=0.285625 +2024-07-27 10:32:54,901 INFO [train.py:1114] (3/4) Epoch 1, batch 3450, loss[loss=0.4992, simple_loss=0.5047, pruned_loss=0.2469, over 4701.00 frames. ], tot_loss[loss=0.4391, simple_loss=0.4535, pruned_loss=0.2124, over 937927.56 frames. ], batch size: 19, lr: 4.29e-02, grad_scale: 32.0 +2024-07-27 10:32:55,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=4600.0, ans=7.3 +2024-07-27 10:32:55,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=4600.0, ans=0.254 +2024-07-27 10:33:08,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=4613.333333333333, ans=0.28375 +2024-07-27 10:33:16,962 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.23 vs. limit=6.153333333333333 +2024-07-27 10:33:18,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=4626.666666666667, ans=0.04738888888888889 +2024-07-27 10:33:21,083 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.30 vs. limit=6.156666666666666 +2024-07-27 10:33:26,731 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.03 vs. limit=10.97 +2024-07-27 10:33:53,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=4653.333333333333, ans=0.20346666666666668 +2024-07-27 10:33:53,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=4653.333333333333, ans=0.1 +2024-07-27 10:33:58,731 INFO [train.py:1114] (3/4) Epoch 1, batch 3500, loss[loss=0.409, simple_loss=0.4269, pruned_loss=0.1955, over 4940.00 frames. ], tot_loss[loss=0.4343, simple_loss=0.4505, pruned_loss=0.2091, over 938690.62 frames. ], batch size: 12, lr: 4.28e-02, grad_scale: 32.0 +2024-07-27 10:34:00,615 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.699e+01 7.535e+01 8.121e+01 9.134e+01 1.279e+02, threshold=1.624e+02, percent-clipped=0.0 +2024-07-27 10:34:04,301 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.31 vs. limit=7.333333333333334 +2024-07-27 10:35:00,061 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.07 vs. limit=11.03 +2024-07-27 10:35:03,442 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.08 vs. limit=9.27 +2024-07-27 10:35:07,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=4720.0, ans=0.047 +2024-07-27 10:35:11,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=4720.0, ans=0.7348 +2024-07-27 10:35:18,203 INFO [train.py:1114] (3/4) Epoch 1, batch 3550, loss[loss=0.444, simple_loss=0.4587, pruned_loss=0.2147, over 4665.00 frames. ], tot_loss[loss=0.4331, simple_loss=0.4499, pruned_loss=0.2081, over 939440.57 frames. ], batch size: 14, lr: 4.28e-02, grad_scale: 32.0 +2024-07-27 10:35:18,799 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.99 vs. limit=7.366666666666666 +2024-07-27 10:35:19,415 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=11.63 vs. limit=11.05 +2024-07-27 10:35:19,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=4733.333333333333, ans=0.04694444444444445 +2024-07-27 10:35:23,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.47 vs. limit=7.366666666666666 +2024-07-27 10:35:37,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=4746.666666666667, ans=0.0 +2024-07-27 10:35:44,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=4760.0, ans=0.276875 +2024-07-27 10:35:47,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=4760.0, ans=0.009834782608695653 +2024-07-27 10:35:48,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=4773.333333333333, ans=0.25226666666666664 +2024-07-27 10:35:51,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=4773.333333333333, ans=0.7329333333333334 +2024-07-27 10:35:59,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=4786.666666666667, ans=0.7324666666666667 +2024-07-27 10:36:07,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=4786.666666666667, ans=0.7324666666666667 +2024-07-27 10:36:12,110 INFO [train.py:1114] (3/4) Epoch 1, batch 3600, loss[loss=0.4245, simple_loss=0.4351, pruned_loss=0.2069, over 4966.00 frames. ], tot_loss[loss=0.432, simple_loss=0.449, pruned_loss=0.2074, over 941071.63 frames. ], batch size: 13, lr: 4.27e-02, grad_scale: 32.0 +2024-07-27 10:36:13,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=4800.0, ans=0.00982608695652174 +2024-07-27 10:36:13,741 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.357e+01 7.358e+01 8.127e+01 9.443e+01 1.425e+02, threshold=1.625e+02, percent-clipped=0.0 +2024-07-27 10:36:17,099 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.77 vs. limit=11.1 +2024-07-27 10:36:20,206 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.05 vs. limit=9.305 +2024-07-27 10:36:25,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=4813.333333333333, ans=0.27437500000000004 +2024-07-27 10:36:26,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=4813.333333333333, ans=0.27437500000000004 +2024-07-27 10:36:29,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=4826.666666666667, ans=0.07 +2024-07-27 10:36:40,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=4840.0, ans=0.04949747468305833 +2024-07-27 10:36:43,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=4840.0, ans=0.06975 +2024-07-27 10:36:43,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=4840.0, ans=0.2516 +2024-07-27 10:36:47,269 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.36 vs. limit=7.426666666666666 +2024-07-27 10:36:47,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.58 vs. limit=3.7279999999999998 +2024-07-27 10:37:08,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=4853.333333333333, ans=0.04644444444444445 +2024-07-27 10:37:22,218 INFO [train.py:1114] (3/4) Epoch 1, batch 3650, loss[loss=0.4709, simple_loss=0.4921, pruned_loss=0.2248, over 4899.00 frames. ], tot_loss[loss=0.4293, simple_loss=0.4473, pruned_loss=0.2057, over 941368.16 frames. ], batch size: 15, lr: 4.27e-02, grad_scale: 32.0 +2024-07-27 10:37:25,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.81 vs. limit=9.325 +2024-07-27 10:37:31,854 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.35 vs. limit=6.216666666666667 +2024-07-27 10:37:37,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=4880.0, ans=0.27125 +2024-07-27 10:37:44,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=4893.333333333333, ans=0.0 +2024-07-27 10:38:00,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=4920.0, ans=0.0098 +2024-07-27 10:38:00,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=4920.0, ans=0.06925 +2024-07-27 10:38:08,644 INFO [train.py:1114] (3/4) Epoch 1, batch 3700, loss[loss=0.4537, simple_loss=0.4649, pruned_loss=0.2212, over 4934.00 frames. ], tot_loss[loss=0.4264, simple_loss=0.4456, pruned_loss=0.2036, over 942277.59 frames. ], batch size: 14, lr: 4.26e-02, grad_scale: 32.0 +2024-07-27 10:38:09,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=4933.333333333333, ans=0.009797101449275362 +2024-07-27 10:38:10,096 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.394e+01 7.604e+01 8.315e+01 9.088e+01 1.291e+02, threshold=1.663e+02, percent-clipped=0.0 +2024-07-27 10:38:10,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=4933.333333333333, ans=0.26875 +2024-07-27 10:38:18,401 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.84 vs. limit=3.742 +2024-07-27 10:38:22,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=4946.666666666667, ans=0.2742 +2024-07-27 10:38:23,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=4960.0, ans=0.069 +2024-07-27 10:38:33,620 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.80 vs. limit=9.365 +2024-07-27 10:38:44,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=4986.666666666667, ans=0.26625 +2024-07-27 10:38:46,999 INFO [train.py:1114] (3/4) Epoch 1, batch 3750, loss[loss=0.4492, simple_loss=0.4439, pruned_loss=0.2273, over 4806.00 frames. ], tot_loss[loss=0.4252, simple_loss=0.4447, pruned_loss=0.2029, over 944026.18 frames. ], batch size: 11, lr: 4.26e-02, grad_scale: 32.0 +2024-07-27 10:38:47,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5000.0, ans=0.265625 +2024-07-27 10:38:53,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=5000.0, ans=0.09899494936611666 +2024-07-27 10:38:58,615 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.44 vs. limit=11.26 +2024-07-27 10:39:03,861 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.59 vs. limit=7.506666666666666 +2024-07-27 10:39:03,940 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.43 vs. limit=11.26 +2024-07-27 10:39:08,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=5026.666666666667, ans=0.264375 +2024-07-27 10:39:11,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=5026.666666666667, ans=0.264375 +2024-07-27 10:39:12,318 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.26 vs. limit=11.27 +2024-07-27 10:39:13,199 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.06 vs. limit=7.513333333333334 +2024-07-27 10:39:16,915 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.11 vs. limit=11.28 +2024-07-27 10:39:29,992 INFO [train.py:1114] (3/4) Epoch 1, batch 3800, loss[loss=0.4614, simple_loss=0.474, pruned_loss=0.2244, over 4805.00 frames. ], tot_loss[loss=0.4256, simple_loss=0.4447, pruned_loss=0.2033, over 942150.33 frames. ], batch size: 14, lr: 4.25e-02, grad_scale: 32.0 +2024-07-27 10:39:30,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=5066.666666666667, ans=0.7226666666666667 +2024-07-27 10:39:31,469 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.397e+01 7.848e+01 8.926e+01 1.062e+02 1.659e+02, threshold=1.785e+02, percent-clipped=0.0 +2024-07-27 10:39:33,366 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.48 vs. limit=11.3 +2024-07-27 10:39:40,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=5080.0, ans=0.26187499999999997 +2024-07-27 10:39:50,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=5093.333333333333, ans=0.24906666666666666 +2024-07-27 10:40:00,341 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.48 vs. limit=11.33 +2024-07-27 10:40:07,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=5120.0, ans=0.7208 +2024-07-27 10:40:09,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=5133.333333333333, ans=0.24866666666666665 +2024-07-27 10:40:09,711 INFO [train.py:1114] (3/4) Epoch 1, batch 3850, loss[loss=0.4187, simple_loss=0.4624, pruned_loss=0.1875, over 4641.00 frames. ], tot_loss[loss=0.421, simple_loss=0.4423, pruned_loss=0.1998, over 942742.99 frames. ], batch size: 16, lr: 4.24e-02, grad_scale: 32.0 +2024-07-27 10:40:24,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=5146.666666666667, ans=0.04949747468305833 +2024-07-27 10:40:33,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=5146.666666666667, ans=0.06783333333333333 +2024-07-27 10:40:38,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=5160.0, ans=0.258125 +2024-07-27 10:40:42,002 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.06 vs. limit=3.774 +2024-07-27 10:40:45,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=5160.0, ans=0.258125 +2024-07-27 10:40:47,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=5173.333333333333, ans=0.009744927536231884 +2024-07-27 10:40:56,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=5186.666666666667, ans=0.25687499999999996 +2024-07-27 10:41:02,117 INFO [train.py:1114] (3/4) Epoch 1, batch 3900, loss[loss=0.4759, simple_loss=0.4942, pruned_loss=0.2288, over 4817.00 frames. ], tot_loss[loss=0.4198, simple_loss=0.4421, pruned_loss=0.1987, over 942990.75 frames. ], batch size: 14, lr: 4.24e-02, grad_scale: 32.0 +2024-07-27 10:41:05,347 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.129e+01 7.258e+01 7.897e+01 8.876e+01 1.354e+02, threshold=1.579e+02, percent-clipped=0.0 +2024-07-27 10:41:15,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=5213.333333333333, ans=0.04494444444444445 +2024-07-27 10:41:26,248 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.38 vs. limit=9.46 +2024-07-27 10:41:37,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=5253.333333333333, ans=0.25375000000000003 +2024-07-27 10:41:38,952 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.94 vs. limit=9.47 +2024-07-27 10:41:46,756 INFO [train.py:1114] (3/4) Epoch 1, batch 3950, loss[loss=0.4274, simple_loss=0.4497, pruned_loss=0.2026, over 4827.00 frames. ], tot_loss[loss=0.4183, simple_loss=0.4416, pruned_loss=0.1975, over 944808.18 frames. ], batch size: 16, lr: 4.23e-02, grad_scale: 32.0 +2024-07-27 10:41:47,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=5266.666666666667, ans=0.253125 +2024-07-27 10:41:47,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=5266.666666666667, ans=0.253125 +2024-07-27 10:41:50,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=5266.666666666667, ans=0.06708333333333333 +2024-07-27 10:42:06,634 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.39 vs. limit=11.46 +2024-07-27 10:42:10,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=5293.333333333333, ans=0.25187499999999996 +2024-07-27 10:42:10,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=5293.333333333333, ans=0.25187499999999996 +2024-07-27 10:42:37,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=5306.666666666667, ans=0.25125 +2024-07-27 10:42:39,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=5320.0, ans=8.325 +2024-07-27 10:42:45,173 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:42:45,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=5320.0, ans=0.00971304347826087 +2024-07-27 10:42:47,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=5320.0, ans=0.044500000000000005 +2024-07-27 10:43:00,282 INFO [train.py:1114] (3/4) Epoch 1, batch 4000, loss[loss=0.3392, simple_loss=0.379, pruned_loss=0.1497, over 4770.00 frames. ], tot_loss[loss=0.4206, simple_loss=0.443, pruned_loss=0.1991, over 941135.97 frames. ], batch size: 12, lr: 4.23e-02, grad_scale: 32.0 +2024-07-27 10:43:01,643 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.01 vs. limit=11.5 +2024-07-27 10:43:01,865 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.271e+01 7.652e+01 8.472e+01 9.315e+01 2.163e+02, threshold=1.694e+02, percent-clipped=2.0 +2024-07-27 10:43:27,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=5360.0, ans=0.009704347826086956 +2024-07-27 10:43:34,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=5373.333333333333, ans=0.7119333333333333 +2024-07-27 10:43:46,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=5386.666666666667, ans=0.009698550724637682 +2024-07-27 10:43:48,368 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.18 vs. limit=3.808 +2024-07-27 10:43:53,176 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.27 vs. limit=9.52 +2024-07-27 10:43:53,929 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.32 vs. limit=7.693333333333333 +2024-07-27 10:43:55,180 INFO [train.py:1114] (3/4) Epoch 1, batch 4050, loss[loss=0.4987, simple_loss=0.4823, pruned_loss=0.2575, over 3312.00 frames. ], tot_loss[loss=0.4176, simple_loss=0.4408, pruned_loss=0.1972, over 939839.63 frames. ], batch size: 35, lr: 4.22e-02, grad_scale: 32.0 +2024-07-27 10:43:56,435 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.31 vs. limit=11.55 +2024-07-27 10:44:23,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=5440.0, ans=0.24559999999999998 +2024-07-27 10:44:23,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=5440.0, ans=0.044000000000000004 +2024-07-27 10:44:37,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=5453.333333333333, ans=0.7091333333333334 +2024-07-27 10:44:38,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=5453.333333333333, ans=0.7091333333333334 +2024-07-27 10:44:40,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=5453.333333333333, ans=0.244375 +2024-07-27 10:44:42,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=5453.333333333333, ans=0.03295833333333334 +2024-07-27 10:44:44,999 INFO [train.py:1114] (3/4) Epoch 1, batch 4100, loss[loss=0.4947, simple_loss=0.5108, pruned_loss=0.2393, over 4906.00 frames. ], tot_loss[loss=0.4193, simple_loss=0.4419, pruned_loss=0.1983, over 939100.99 frames. ], batch size: 15, lr: 4.22e-02, grad_scale: 32.0 +2024-07-27 10:44:46,473 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.345e+01 7.438e+01 7.964e+01 9.010e+01 1.753e+02, threshold=1.593e+02, percent-clipped=1.0 +2024-07-27 10:44:47,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=5466.666666666667, ans=0.0 +2024-07-27 10:44:52,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=5480.0, ans=0.24519999999999997 +2024-07-27 10:44:57,957 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.18 vs. limit=6.192 +2024-07-27 10:45:35,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=5520.0, ans=0.025 +2024-07-27 10:45:41,331 INFO [train.py:1114] (3/4) Epoch 1, batch 4150, loss[loss=0.3924, simple_loss=0.4135, pruned_loss=0.1857, over 4835.00 frames. ], tot_loss[loss=0.4161, simple_loss=0.4399, pruned_loss=0.1962, over 938648.50 frames. ], batch size: 13, lr: 4.21e-02, grad_scale: 32.0 +2024-07-27 10:45:42,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=5533.333333333333, ans=0.24466666666666667 +2024-07-27 10:45:47,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=22.13 vs. limit=9.575 +2024-07-27 10:45:48,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=5546.666666666667, ans=0.00966376811594203 +2024-07-27 10:45:53,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=5546.666666666667, ans=0.00966376811594203 +2024-07-27 10:46:25,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=5573.333333333333, ans=0.009657971014492754 +2024-07-27 10:46:25,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=5573.333333333333, ans=0.23875000000000002 +2024-07-27 10:46:29,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=5573.333333333333, ans=0.24426666666666666 +2024-07-27 10:46:34,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=5586.666666666667, ans=0.23812499999999998 +2024-07-27 10:46:35,184 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.43 vs. limit=11.690000000000001 +2024-07-27 10:46:38,733 INFO [train.py:1114] (3/4) Epoch 1, batch 4200, loss[loss=0.4418, simple_loss=0.4699, pruned_loss=0.2069, over 4895.00 frames. ], tot_loss[loss=0.4166, simple_loss=0.4402, pruned_loss=0.1965, over 939705.40 frames. ], batch size: 15, lr: 4.20e-02, grad_scale: 32.0 +2024-07-27 10:46:40,169 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.767e+01 7.218e+01 8.164e+01 9.157e+01 1.293e+02, threshold=1.633e+02, percent-clipped=0.0 +2024-07-27 10:47:01,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=5613.333333333333, ans=0.24386666666666668 +2024-07-27 10:47:02,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=5613.333333333333, ans=9.605 +2024-07-27 10:47:04,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=5626.666666666667, ans=0.7030666666666667 +2024-07-27 10:47:06,379 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.71 vs. limit=11.719999999999999 +2024-07-27 10:47:07,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=5626.666666666667, ans=0.23625000000000002 +2024-07-27 10:47:11,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=5626.666666666667, ans=0.043222222222222224 +2024-07-27 10:47:21,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=5653.333333333333, ans=0.009640579710144927 +2024-07-27 10:47:27,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=5653.333333333333, ans=0.235 +2024-07-27 10:47:31,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=5653.333333333333, ans=0.043111111111111114 +2024-07-27 10:47:33,665 INFO [train.py:1114] (3/4) Epoch 1, batch 4250, loss[loss=0.3908, simple_loss=0.4288, pruned_loss=0.1764, over 4640.00 frames. ], tot_loss[loss=0.4167, simple_loss=0.4407, pruned_loss=0.1963, over 940898.38 frames. ], batch size: 12, lr: 4.20e-02, grad_scale: 32.0 +2024-07-27 10:47:33,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=5666.666666666667, ans=0.234375 +2024-07-27 10:47:36,287 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.18 vs. limit=6.416666666666667 +2024-07-27 10:47:59,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=5680.0, ans=0.23375 +2024-07-27 10:48:01,214 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.06 vs. limit=11.77 +2024-07-27 10:48:05,163 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.96 vs. limit=9.635 +2024-07-27 10:48:10,911 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 10:48:16,095 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.85 vs. limit=9.64 +2024-07-27 10:48:16,774 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.21 vs. limit=11.780000000000001 +2024-07-27 10:48:23,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5720.0, ans=0.231875 +2024-07-27 10:48:23,690 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=11.92 vs. limit=11.79 +2024-07-27 10:48:25,305 INFO [train.py:1114] (3/4) Epoch 1, batch 4300, loss[loss=0.4679, simple_loss=0.4723, pruned_loss=0.2318, over 4750.00 frames. ], tot_loss[loss=0.4158, simple_loss=0.4399, pruned_loss=0.1958, over 939948.98 frames. ], batch size: 13, lr: 4.19e-02, grad_scale: 32.0 +2024-07-27 10:48:26,778 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.009e+01 7.278e+01 8.201e+01 9.440e+01 2.695e+02, threshold=1.640e+02, percent-clipped=2.0 +2024-07-27 10:48:30,926 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.81 vs. limit=9.65 +2024-07-27 10:48:32,594 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.26 vs. limit=6.4366666666666665 +2024-07-27 10:48:32,614 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.86 vs. limit=7.873333333333333 +2024-07-27 10:48:53,487 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.85 vs. limit=9.665 +2024-07-27 10:49:00,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=5786.666666666667, ans=0.03191666666666666 +2024-07-27 10:49:02,909 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.30 vs. limit=11.84 +2024-07-27 10:49:15,558 INFO [train.py:1114] (3/4) Epoch 1, batch 4350, loss[loss=0.4007, simple_loss=0.4307, pruned_loss=0.1854, over 4764.00 frames. ], tot_loss[loss=0.416, simple_loss=0.4406, pruned_loss=0.1957, over 940850.87 frames. ], batch size: 13, lr: 4.19e-02, grad_scale: 32.0 +2024-07-27 10:49:16,059 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.66 vs. limit=6.45 +2024-07-27 10:49:24,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=5800.0, ans=0.22812500000000002 +2024-07-27 10:49:28,335 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=8.358e-02 +2024-07-27 10:49:33,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=5813.333333333333, ans=0.6965333333333333 +2024-07-27 10:49:43,340 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.90 vs. limit=9.685 +2024-07-27 10:50:07,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=5853.333333333333, ans=0.009597101449275362 +2024-07-27 10:50:12,262 INFO [train.py:1114] (3/4) Epoch 1, batch 4400, loss[loss=0.3824, simple_loss=0.4176, pruned_loss=0.1736, over 4813.00 frames. ], tot_loss[loss=0.417, simple_loss=0.4413, pruned_loss=0.1964, over 940584.70 frames. ], batch size: 14, lr: 4.18e-02, grad_scale: 32.0 +2024-07-27 10:50:12,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=5866.666666666667, ans=0.22499999999999998 +2024-07-27 10:50:13,754 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.875e+01 7.282e+01 8.065e+01 8.793e+01 1.417e+02, threshold=1.613e+02, percent-clipped=0.0 +2024-07-27 10:50:16,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=5866.666666666667, ans=0.0 +2024-07-27 10:50:25,558 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.58 vs. limit=7.9399999999999995 +2024-07-27 10:50:38,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=5906.666666666667, ans=0.009585507246376811 +2024-07-27 10:50:39,163 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=37.97 vs. limit=9.715 +2024-07-27 10:50:39,261 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.63 vs. limit=6.476666666666667 +2024-07-27 10:50:56,603 INFO [train.py:1114] (3/4) Epoch 1, batch 4450, loss[loss=0.4255, simple_loss=0.4373, pruned_loss=0.2068, over 4941.00 frames. ], tot_loss[loss=0.4176, simple_loss=0.4412, pruned_loss=0.197, over 938728.04 frames. ], batch size: 12, lr: 4.17e-02, grad_scale: 32.0 +2024-07-27 10:50:58,686 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.10 vs. limit=11.95 +2024-07-27 10:51:06,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=5946.666666666667, ans=0.009576811594202898 +2024-07-27 10:51:08,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=5946.666666666667, ans=8.716666666666667 +2024-07-27 10:51:12,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=5946.666666666667, ans=0.22125 +2024-07-27 10:51:24,547 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.84 vs. limit=11.969999999999999 +2024-07-27 10:51:43,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=5973.333333333333, ans=0.6909333333333334 +2024-07-27 10:51:47,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=5973.333333333333, ans=0.21999999999999997 +2024-07-27 10:52:01,882 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.66 vs. limit=6.496666666666667 +2024-07-27 10:52:02,905 INFO [train.py:1114] (3/4) Epoch 1, batch 4500, loss[loss=0.4359, simple_loss=0.459, pruned_loss=0.2064, over 4744.00 frames. ], tot_loss[loss=0.4154, simple_loss=0.4404, pruned_loss=0.1952, over 938386.55 frames. ], batch size: 14, lr: 4.17e-02, grad_scale: 32.0 +2024-07-27 10:52:04,384 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.515e+01 7.518e+01 8.133e+01 8.921e+01 1.342e+02, threshold=1.627e+02, percent-clipped=0.0 +2024-07-27 10:52:05,921 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.75 vs. limit=8.0 +2024-07-27 10:52:27,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=6040.0, ans=0.009556521739130436 +2024-07-27 10:52:30,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=6040.0, ans=0.21687499999999998 +2024-07-27 10:52:43,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=6053.333333333333, ans=0.07 +2024-07-27 10:52:47,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=6066.666666666667, ans=0.215625 +2024-07-27 10:52:47,954 INFO [train.py:1114] (3/4) Epoch 1, batch 4550, loss[loss=0.3968, simple_loss=0.4206, pruned_loss=0.1865, over 4902.00 frames. ], tot_loss[loss=0.4142, simple_loss=0.4397, pruned_loss=0.1943, over 940114.16 frames. ], batch size: 13, lr: 4.16e-02, grad_scale: 32.0 +2024-07-27 10:52:53,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=6066.666666666667, ans=0.215625 +2024-07-27 10:54:45,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6080.0, ans=0.2392 +2024-07-27 10:55:19,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=6093.333333333333, ans=0.21437499999999998 +2024-07-27 10:55:26,977 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.39 vs. limit=6.526666666666667 +2024-07-27 10:55:39,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=6133.333333333333, ans=0.21250000000000002 +2024-07-27 10:55:40,603 INFO [train.py:1114] (3/4) Epoch 1, batch 4600, loss[loss=0.4577, simple_loss=0.4597, pruned_loss=0.2278, over 4475.00 frames. ], tot_loss[loss=0.4106, simple_loss=0.4369, pruned_loss=0.1922, over 938270.19 frames. ], batch size: 21, lr: 4.15e-02, grad_scale: 32.0 +2024-07-27 10:55:47,212 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.044e+01 7.331e+01 8.005e+01 8.983e+01 1.431e+02, threshold=1.601e+02, percent-clipped=0.0 +2024-07-27 10:55:49,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=6133.333333333333, ans=0.21250000000000002 +2024-07-27 10:55:54,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=6146.666666666667, ans=0.21187499999999998 +2024-07-27 10:56:00,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=6146.666666666667, ans=0.21187499999999998 +2024-07-27 10:56:00,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=6146.666666666667, ans=0.21187499999999998 +2024-07-27 10:56:02,543 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.47 vs. limit=12.11 +2024-07-27 10:56:15,241 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.23 vs. limit=8.086666666666666 +2024-07-27 10:56:17,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=6173.333333333333, ans=0.23826666666666665 +2024-07-27 10:56:19,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=6173.333333333333, ans=0.210625 +2024-07-27 10:56:21,790 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.36 vs. limit=12.14 +2024-07-27 10:56:27,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=6186.666666666667, ans=0.21000000000000002 +2024-07-27 10:56:29,199 INFO [train.py:1114] (3/4) Epoch 1, batch 4650, loss[loss=0.4082, simple_loss=0.4397, pruned_loss=0.1883, over 4820.00 frames. ], tot_loss[loss=0.412, simple_loss=0.4383, pruned_loss=0.1928, over 939926.98 frames. ], batch size: 16, lr: 4.15e-02, grad_scale: 32.0 +2024-07-27 10:56:45,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.59 vs. limit=9.835 +2024-07-27 10:57:05,896 INFO [train.py:1114] (3/4) Epoch 1, batch 4700, loss[loss=0.3344, simple_loss=0.3637, pruned_loss=0.1525, over 4694.00 frames. ], tot_loss[loss=0.4136, simple_loss=0.4394, pruned_loss=0.1939, over 937270.96 frames. ], batch size: 11, lr: 4.14e-02, grad_scale: 32.0 +2024-07-27 10:57:06,541 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.69 vs. limit=12.2 +2024-07-27 10:57:07,353 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.557e+01 7.394e+01 8.015e+01 9.109e+01 1.664e+02, threshold=1.603e+02, percent-clipped=1.0 +2024-07-27 10:57:23,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=6293.333333333333, ans=0.04044444444444445 +2024-07-27 10:57:31,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=6306.666666666667, ans=0.23693333333333333 +2024-07-27 10:57:39,389 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.40 vs. limit=12.24 +2024-07-27 10:57:40,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=6320.0, ans=0.20375 +2024-07-27 10:57:44,068 INFO [train.py:1114] (3/4) Epoch 1, batch 4750, loss[loss=0.459, simple_loss=0.466, pruned_loss=0.2261, over 4476.00 frames. ], tot_loss[loss=0.4124, simple_loss=0.4386, pruned_loss=0.1931, over 935229.86 frames. ], batch size: 21, lr: 4.14e-02, grad_scale: 64.0 +2024-07-27 10:57:48,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=6333.333333333333, ans=0.203125 +2024-07-27 10:57:48,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=6333.333333333333, ans=0.009492753623188407 +2024-07-27 10:57:51,229 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.64 vs. limit=3.95 +2024-07-27 10:57:51,318 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.82 vs. limit=9.875 +2024-07-27 10:58:00,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=6346.666666666667, ans=0.025 +2024-07-27 10:58:17,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=6373.333333333333, ans=0.6769333333333334 +2024-07-27 10:58:17,943 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.44 vs. limit=12.280000000000001 +2024-07-27 10:58:18,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=6386.666666666667, ans=0.200625 +2024-07-27 10:58:25,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=6400.0, ans=0.2 +2024-07-27 10:58:26,656 INFO [train.py:1114] (3/4) Epoch 1, batch 4800, loss[loss=0.4928, simple_loss=0.4972, pruned_loss=0.2442, over 4695.00 frames. ], tot_loss[loss=0.4131, simple_loss=0.439, pruned_loss=0.1935, over 932522.33 frames. ], batch size: 13, lr: 4.13e-02, grad_scale: 64.0 +2024-07-27 10:58:28,213 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.395e+01 7.298e+01 7.833e+01 8.734e+01 1.995e+02, threshold=1.567e+02, percent-clipped=2.0 +2024-07-27 10:58:34,675 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=12.76 vs. limit=12.309999999999999 +2024-07-27 10:58:39,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=6413.333333333333, ans=0.029958333333333333 +2024-07-27 10:58:57,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.78 vs. limit=9.915 +2024-07-27 10:58:59,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=6453.333333333333, ans=0.025 +2024-07-27 10:59:05,507 INFO [train.py:1114] (3/4) Epoch 1, batch 4850, loss[loss=0.3971, simple_loss=0.4258, pruned_loss=0.1842, over 4744.00 frames. ], tot_loss[loss=0.4121, simple_loss=0.438, pruned_loss=0.1931, over 932020.10 frames. ], batch size: 14, lr: 4.12e-02, grad_scale: 64.0 +2024-07-27 10:59:31,888 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.25 vs. limit=12.379999999999999 +2024-07-27 10:59:37,414 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.74 vs. limit=12.39 +2024-07-27 10:59:37,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=6520.0, ans=0.059250000000000004 +2024-07-27 10:59:38,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=6520.0, ans=0.0 +2024-07-27 10:59:43,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=6520.0, ans=0.6718000000000001 +2024-07-27 10:59:44,804 INFO [train.py:1114] (3/4) Epoch 1, batch 4900, loss[loss=0.4732, simple_loss=0.4994, pruned_loss=0.2235, over 4751.00 frames. ], tot_loss[loss=0.4089, simple_loss=0.4355, pruned_loss=0.1911, over 934035.21 frames. ], batch size: 13, lr: 4.12e-02, grad_scale: 64.0 +2024-07-27 10:59:46,286 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.298e+01 7.338e+01 8.038e+01 8.614e+01 1.106e+02, threshold=1.608e+02, percent-clipped=0.0 +2024-07-27 10:59:49,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=6533.333333333333, ans=0.6713333333333333 +2024-07-27 10:59:50,753 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=4.82 vs. limit=5.306666666666667 +2024-07-27 10:59:51,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=6533.333333333333, ans=0.6713333333333333 +2024-07-27 10:59:54,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=6546.666666666667, ans=0.23453333333333332 +2024-07-27 10:59:57,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.03 vs. limit=3.982 +2024-07-27 10:59:58,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.24 vs. limit=3.982 +2024-07-27 10:59:59,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=6546.666666666667, ans=0.2982 +2024-07-27 11:00:01,008 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.07 vs. limit=9.955 +2024-07-27 11:00:05,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=6560.0, ans=0.2344 +2024-07-27 11:00:05,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=6560.0, ans=0.1925 +2024-07-27 11:00:07,590 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.53 vs. limit=8.28 +2024-07-27 11:00:19,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=6586.666666666667, ans=0.19124999999999998 +2024-07-27 11:00:23,793 INFO [train.py:1114] (3/4) Epoch 1, batch 4950, loss[loss=0.4575, simple_loss=0.464, pruned_loss=0.2255, over 3571.00 frames. ], tot_loss[loss=0.4102, simple_loss=0.4367, pruned_loss=0.1919, over 931313.33 frames. ], batch size: 35, lr: 4.11e-02, grad_scale: 64.0 +2024-07-27 11:00:51,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=6640.0, ans=0.03900000000000001 +2024-07-27 11:01:00,652 INFO [train.py:1114] (3/4) Epoch 1, batch 5000, loss[loss=0.3964, simple_loss=0.447, pruned_loss=0.1729, over 4655.00 frames. ], tot_loss[loss=0.4081, simple_loss=0.4357, pruned_loss=0.1903, over 935152.72 frames. ], batch size: 14, lr: 4.10e-02, grad_scale: 64.0 +2024-07-27 11:01:01,109 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.19 vs. limit=6.666666666666667 +2024-07-27 11:01:01,168 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.66 vs. limit=12.5 +2024-07-27 11:01:02,002 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.106e+01 7.393e+01 8.012e+01 9.177e+01 1.350e+02, threshold=1.602e+02, percent-clipped=0.0 +2024-07-27 11:01:06,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=6666.666666666667, ans=0.6666666666666667 +2024-07-27 11:01:14,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=6693.333333333333, ans=0.18625000000000003 +2024-07-27 11:01:20,474 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.51 vs. limit=12.52 +2024-07-27 11:01:23,470 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.41 vs. limit=8.353333333333333 +2024-07-27 11:01:28,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=6706.666666666667, ans=0.18562499999999998 +2024-07-27 11:01:33,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=6720.0, ans=0.025 +2024-07-27 11:01:35,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=6720.0, ans=12.54 +2024-07-27 11:01:35,432 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.32 vs. limit=12.54 +2024-07-27 11:01:36,546 INFO [train.py:1114] (3/4) Epoch 1, batch 5050, loss[loss=0.3657, simple_loss=0.4174, pruned_loss=0.157, over 4849.00 frames. ], tot_loss[loss=0.4062, simple_loss=0.4344, pruned_loss=0.189, over 937677.87 frames. ], batch size: 12, lr: 4.10e-02, grad_scale: 64.0 +2024-07-27 11:01:36,969 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.66 vs. limit=12.55 +2024-07-27 11:01:43,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=6746.666666666667, ans=0.03855555555555556 +2024-07-27 11:01:44,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=6746.666666666667, ans=0.18375000000000002 +2024-07-27 11:01:45,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=6746.666666666667, ans=0.18375000000000002 +2024-07-27 11:01:45,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=6746.666666666667, ans=0.6638666666666666 +2024-07-27 11:01:46,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=6746.666666666667, ans=0.0 +2024-07-27 11:01:50,460 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.78 vs. limit=10.03 +2024-07-27 11:01:56,503 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.10 vs. limit=10.035 +2024-07-27 11:01:56,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=6760.0, ans=0.2324 +2024-07-27 11:02:01,116 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=12.12 vs. limit=12.58 +2024-07-27 11:02:03,226 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.06 vs. limit=4.016 +2024-07-27 11:02:14,729 INFO [train.py:1114] (3/4) Epoch 1, batch 5100, loss[loss=0.349, simple_loss=0.386, pruned_loss=0.156, over 4777.00 frames. ], tot_loss[loss=0.4079, simple_loss=0.4357, pruned_loss=0.19, over 935203.71 frames. ], batch size: 12, lr: 4.09e-02, grad_scale: 64.0 +2024-07-27 11:02:15,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=6800.0, ans=0.0 +2024-07-27 11:02:16,195 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.973e+01 7.191e+01 7.778e+01 8.421e+01 1.083e+02, threshold=1.556e+02, percent-clipped=0.0 +2024-07-27 11:02:16,813 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.21 vs. limit=4.02 +2024-07-27 11:02:24,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=6813.333333333333, ans=0.0 +2024-07-27 11:02:25,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=6813.333333333333, ans=8.406666666666666 +2024-07-27 11:02:40,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=6840.0, ans=0.2316 +2024-07-27 11:02:47,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=6853.333333333333, ans=0.009379710144927536 +2024-07-27 11:02:51,283 INFO [train.py:1114] (3/4) Epoch 1, batch 5150, loss[loss=0.4092, simple_loss=0.4526, pruned_loss=0.1829, over 4821.00 frames. ], tot_loss[loss=0.4084, simple_loss=0.4363, pruned_loss=0.1902, over 936440.09 frames. ], batch size: 16, lr: 4.09e-02, grad_scale: 64.0 +2024-07-27 11:02:54,596 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.34 vs. limit=12.65 +2024-07-27 11:03:01,129 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.19 vs. limit=8.44 +2024-07-27 11:03:03,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=6880.0, ans=0.038000000000000006 +2024-07-27 11:03:09,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=6893.333333333333, ans=0.0 +2024-07-27 11:03:15,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=6906.666666666667, ans=0.025 +2024-07-27 11:03:22,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=6906.666666666667, ans=0.07 +2024-07-27 11:03:30,069 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.02 vs. limit=10.095 +2024-07-27 11:03:32,249 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.70 vs. limit=10.095 +2024-07-27 11:03:33,711 INFO [train.py:1114] (3/4) Epoch 1, batch 5200, loss[loss=0.4974, simple_loss=0.5026, pruned_loss=0.2462, over 4660.00 frames. ], tot_loss[loss=0.4057, simple_loss=0.4344, pruned_loss=0.1885, over 936378.50 frames. ], batch size: 14, lr: 4.08e-02, grad_scale: 64.0 +2024-07-27 11:03:35,260 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.869e+01 7.238e+01 8.043e+01 8.705e+01 1.237e+02, threshold=1.609e+02, percent-clipped=0.0 +2024-07-27 11:03:47,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=6960.0, ans=0.009356521739130435 +2024-07-27 11:03:52,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=6960.0, ans=0.03766666666666667 +2024-07-27 11:03:55,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=6973.333333333333, ans=0.09899494936611666 +2024-07-27 11:04:00,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=6973.333333333333, ans=0.17312499999999997 +2024-07-27 11:04:01,014 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.34 vs. limit=10.115 +2024-07-27 11:04:05,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=6973.333333333333, ans=0.17312499999999997 +2024-07-27 11:04:13,846 INFO [train.py:1114] (3/4) Epoch 1, batch 5250, loss[loss=0.3358, simple_loss=0.3949, pruned_loss=0.1384, over 4894.00 frames. ], tot_loss[loss=0.4022, simple_loss=0.4319, pruned_loss=0.1863, over 935996.09 frames. ], batch size: 13, lr: 4.07e-02, grad_scale: 64.0 +2024-07-27 11:04:15,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=7000.0, ans=0.171875 +2024-07-27 11:04:23,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7013.333333333333, ans=0.22986666666666666 +2024-07-27 11:04:26,150 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.66 vs. limit=10.129999999999999 +2024-07-27 11:04:26,398 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.01 vs. limit=4.052 +2024-07-27 11:04:29,811 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.99 vs. limit=10.135 +2024-07-27 11:04:39,676 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.81 vs. limit=12.77 +2024-07-27 11:04:41,823 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=10.135 +2024-07-27 11:04:41,935 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.98 vs. limit=12.77 +2024-07-27 11:04:46,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=7040.0, ans=0.037333333333333336 +2024-07-27 11:04:50,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=7053.333333333333, ans=0.169375 +2024-07-27 11:04:58,092 INFO [train.py:1114] (3/4) Epoch 1, batch 5300, loss[loss=0.4697, simple_loss=0.4812, pruned_loss=0.2291, over 4640.00 frames. ], tot_loss[loss=0.3997, simple_loss=0.4299, pruned_loss=0.1847, over 934351.79 frames. ], batch size: 16, lr: 4.07e-02, grad_scale: 64.0 +2024-07-27 11:05:04,096 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.771e+01 7.230e+01 7.839e+01 8.733e+01 1.218e+02, threshold=1.568e+02, percent-clipped=0.0 +2024-07-27 11:05:08,726 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.34 vs. limit=12.8 +2024-07-27 11:05:10,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn2.whiten.whitening_limit, batch_count=7080.0, ans=12.809999999999999 +2024-07-27 11:05:14,560 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.94 vs. limit=4.062 +2024-07-27 11:05:20,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=7093.333333333333, ans=0.6517333333333334 +2024-07-27 11:05:23,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7093.333333333333, ans=0.22906666666666667 +2024-07-27 11:05:25,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=7106.666666666667, ans=0.166875 +2024-07-27 11:05:25,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=7106.666666666667, ans=0.025 +2024-07-27 11:05:34,724 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.42 vs. limit=8.56 +2024-07-27 11:05:35,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=7120.0, ans=0.037000000000000005 +2024-07-27 11:05:41,458 INFO [train.py:1114] (3/4) Epoch 1, batch 5350, loss[loss=0.3827, simple_loss=0.3999, pruned_loss=0.1827, over 4515.00 frames. ], tot_loss[loss=0.4013, simple_loss=0.4314, pruned_loss=0.1856, over 936528.09 frames. ], batch size: 10, lr: 4.06e-02, grad_scale: 64.0 +2024-07-27 11:05:45,531 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.68 vs. limit=12.85 +2024-07-27 11:05:51,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.99 vs. limit=10.18 +2024-07-27 11:05:53,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=7146.666666666667, ans=0.03688888888888889 +2024-07-27 11:06:00,332 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.18 vs. limit=10.185 +2024-07-27 11:06:04,659 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.38 vs. limit=6.793333333333333 +2024-07-27 11:06:05,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=7173.333333333333, ans=9.483333333333334 +2024-07-27 11:06:09,899 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.06 vs. limit=10.19 +2024-07-27 11:06:09,988 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.59 vs. limit=10.19 +2024-07-27 11:06:10,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=7173.333333333333, ans=0.0 +2024-07-27 11:06:20,223 INFO [train.py:1114] (3/4) Epoch 1, batch 5400, loss[loss=0.481, simple_loss=0.4942, pruned_loss=0.2339, over 4443.00 frames. ], tot_loss[loss=0.4042, simple_loss=0.4331, pruned_loss=0.1876, over 930957.37 frames. ], batch size: 26, lr: 4.05e-02, grad_scale: 64.0 +2024-07-27 11:06:20,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=7200.0, ans=0.308 +2024-07-27 11:06:21,688 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.213e+01 7.171e+01 7.909e+01 8.696e+01 2.349e+02, threshold=1.582e+02, percent-clipped=3.0 +2024-07-27 11:06:39,392 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.97 vs. limit=8.613333333333333 +2024-07-27 11:06:44,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=7240.0, ans=0.2276 +2024-07-27 11:06:52,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=7253.333333333333, ans=0.05466666666666667 +2024-07-27 11:06:55,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.42 vs. limit=8.633333333333333 +2024-07-27 11:06:55,835 INFO [train.py:1114] (3/4) Epoch 1, batch 5450, loss[loss=0.3369, simple_loss=0.382, pruned_loss=0.1459, over 4711.00 frames. ], tot_loss[loss=0.3993, simple_loss=0.4298, pruned_loss=0.1845, over 933927.52 frames. ], batch size: 11, lr: 4.05e-02, grad_scale: 64.0 +2024-07-27 11:07:04,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=7280.0, ans=0.15875 +2024-07-27 11:07:04,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=7280.0, ans=0.15875 +2024-07-27 11:07:05,010 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.89 vs. limit=10.23 +2024-07-27 11:07:18,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=7293.333333333333, ans=0.15812500000000002 +2024-07-27 11:07:21,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=7293.333333333333, ans=0.025 +2024-07-27 11:07:27,366 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.88 vs. limit=12.98 +2024-07-27 11:07:31,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=7320.0, ans=0.156875 +2024-07-27 11:07:37,167 INFO [train.py:1114] (3/4) Epoch 1, batch 5500, loss[loss=0.4583, simple_loss=0.4612, pruned_loss=0.2277, over 4265.00 frames. ], tot_loss[loss=0.3989, simple_loss=0.4288, pruned_loss=0.1845, over 931345.77 frames. ], batch size: 25, lr: 4.04e-02, grad_scale: 64.0 +2024-07-27 11:07:38,666 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.895e+01 7.344e+01 7.791e+01 8.854e+01 1.594e+02, threshold=1.558e+02, percent-clipped=1.0 +2024-07-27 11:07:39,029 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=2.79 vs. limit=10.25 +2024-07-27 11:07:40,420 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.07 vs. limit=8.666666666666666 +2024-07-27 11:07:41,330 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=7.85 vs. limit=6.833333333333333 +2024-07-27 11:07:44,733 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.45 vs. limit=10.254999999999999 +2024-07-27 11:07:45,286 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.96 vs. limit=13.01 +2024-07-27 11:07:46,066 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.93 vs. limit=10.254999999999999 +2024-07-27 11:07:47,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=7346.666666666667, ans=0.036055555555555556 +2024-07-27 11:07:53,173 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.87 vs. limit=10.26 +2024-07-27 11:07:53,998 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=10.26 +2024-07-27 11:07:58,401 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.35 vs. limit=13.02 +2024-07-27 11:08:09,809 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.23 vs. limit=6.954666666666666 +2024-07-27 11:08:13,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=7400.0, ans=0.153125 +2024-07-27 11:08:14,264 INFO [train.py:1114] (3/4) Epoch 1, batch 5550, loss[loss=0.3596, simple_loss=0.4075, pruned_loss=0.1559, over 4698.00 frames. ], tot_loss[loss=0.3975, simple_loss=0.4281, pruned_loss=0.1835, over 933380.91 frames. ], batch size: 12, lr: 4.03e-02, grad_scale: 64.0 +2024-07-27 11:08:17,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=7400.0, ans=0.153125 +2024-07-27 11:08:30,778 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.00 vs. limit=10.285 +2024-07-27 11:08:33,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=7426.666666666667, ans=0.6400666666666667 +2024-07-27 11:08:50,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=7466.666666666667, ans=0.15000000000000002 +2024-07-27 11:08:50,833 INFO [train.py:1114] (3/4) Epoch 1, batch 5600, loss[loss=0.4542, simple_loss=0.4788, pruned_loss=0.2148, over 4739.00 frames. ], tot_loss[loss=0.3991, simple_loss=0.4301, pruned_loss=0.1841, over 934574.11 frames. ], batch size: 14, lr: 4.03e-02, grad_scale: 64.0 +2024-07-27 11:08:52,370 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.970e+01 7.181e+01 7.813e+01 8.583e+01 1.892e+02, threshold=1.563e+02, percent-clipped=1.0 +2024-07-27 11:08:56,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=7466.666666666667, ans=0.15000000000000002 +2024-07-27 11:08:59,796 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.95 vs. limit=6.87 +2024-07-27 11:09:19,377 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.99 vs. limit=13.129999999999999 +2024-07-27 11:09:26,790 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.70 vs. limit=8.76 +2024-07-27 11:09:29,072 INFO [train.py:1114] (3/4) Epoch 1, batch 5650, loss[loss=0.4323, simple_loss=0.4587, pruned_loss=0.2029, over 4512.00 frames. ], tot_loss[loss=0.3959, simple_loss=0.4277, pruned_loss=0.1821, over 936815.75 frames. ], batch size: 21, lr: 4.02e-02, grad_scale: 64.0 +2024-07-27 11:09:29,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=7533.333333333333, ans=0.22466666666666668 +2024-07-27 11:09:31,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=7533.333333333333, ans=0.14687499999999998 +2024-07-27 11:09:34,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=7533.333333333333, ans=0.03527777777777778 +2024-07-27 11:09:36,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=7546.666666666667, ans=0.035222222222222224 +2024-07-27 11:09:39,668 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=9.02 vs. limit=10.33 +2024-07-27 11:09:41,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=7546.666666666667, ans=0.14625 +2024-07-27 11:09:42,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=7546.666666666667, ans=0.14625 +2024-07-27 11:09:50,070 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.95 vs. limit=8.78 +2024-07-27 11:09:51,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=7573.333333333333, ans=0.6349333333333333 +2024-07-27 11:10:01,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.29 vs. limit=13.190000000000001 +2024-07-27 11:10:05,075 INFO [train.py:1114] (3/4) Epoch 1, batch 5700, loss[loss=0.3935, simple_loss=0.4286, pruned_loss=0.1792, over 4687.00 frames. ], tot_loss[loss=0.3963, simple_loss=0.4281, pruned_loss=0.1823, over 937682.18 frames. ], batch size: 13, lr: 4.02e-02, grad_scale: 64.0 +2024-07-27 11:10:06,393 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.074e+01 7.227e+01 8.129e+01 9.173e+01 1.333e+02, threshold=1.626e+02, percent-clipped=0.0 +2024-07-27 11:10:12,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=7613.333333333333, ans=0.0 +2024-07-27 11:10:17,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=7613.333333333333, ans=0.143125 +2024-07-27 11:10:21,751 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.23 vs. limit=13.219999999999999 +2024-07-27 11:10:35,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=7653.333333333333, ans=0.03477777777777778 +2024-07-27 11:10:36,059 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.64 vs. limit=8.826666666666666 +2024-07-27 11:10:40,062 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.16 vs. limit=13.24 +2024-07-27 11:10:41,774 INFO [train.py:1114] (3/4) Epoch 1, batch 5750, loss[loss=0.4148, simple_loss=0.4453, pruned_loss=0.1922, over 4660.00 frames. ], tot_loss[loss=0.3971, simple_loss=0.4294, pruned_loss=0.1825, over 938021.27 frames. ], batch size: 19, lr: 4.01e-02, grad_scale: 64.0 +2024-07-27 11:10:46,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=7666.666666666667, ans=0.140625 +2024-07-27 11:10:46,428 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=10.375 +2024-07-27 11:11:00,049 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.88 vs. limit=5.0 +2024-07-27 11:11:09,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=7706.666666666667, ans=0.6302666666666668 +2024-07-27 11:11:12,310 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.54 vs. limit=8.86 +2024-07-27 11:11:25,455 INFO [train.py:1114] (3/4) Epoch 1, batch 5800, loss[loss=0.4894, simple_loss=0.4834, pruned_loss=0.2477, over 4721.00 frames. ], tot_loss[loss=0.3968, simple_loss=0.429, pruned_loss=0.1823, over 936614.68 frames. ], batch size: 19, lr: 4.00e-02, grad_scale: 64.0 +2024-07-27 11:11:25,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.whiten.whitening_limit, batch_count=7733.333333333333, ans=7.093333333333334 +2024-07-27 11:11:26,803 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.680e+01 7.353e+01 8.081e+01 9.227e+01 1.347e+02, threshold=1.616e+02, percent-clipped=0.0 +2024-07-27 11:11:39,337 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.10 vs. limit=10.405 +2024-07-27 11:11:39,383 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.64 vs. limit=6.9366666666666665 +2024-07-27 11:11:42,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.26 vs. limit=10.41 +2024-07-27 11:11:45,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=7760.0, ans=0.2224 +2024-07-27 11:11:52,443 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.24 vs. limit=13.33 +2024-07-27 11:12:00,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=7786.666666666667, ans=0.03422222222222222 +2024-07-27 11:12:00,811 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.40 vs. limit=10.42 +2024-07-27 11:12:04,119 INFO [train.py:1114] (3/4) Epoch 1, batch 5850, loss[loss=0.3948, simple_loss=0.4333, pruned_loss=0.1782, over 4432.00 frames. ], tot_loss[loss=0.398, simple_loss=0.4298, pruned_loss=0.1832, over 937292.23 frames. ], batch size: 21, lr: 4.00e-02, grad_scale: 64.0 +2024-07-27 11:12:04,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=7800.0, ans=0.13437500000000002 +2024-07-27 11:12:10,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=10.43 +2024-07-27 11:12:16,187 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:12:18,547 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.78 vs. limit=13.370000000000001 +2024-07-27 11:12:19,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=7826.666666666667, ans=0.133125 +2024-07-27 11:12:26,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=7840.0, ans=0.1325 +2024-07-27 11:12:30,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=7840.0, ans=0.034 +2024-07-27 11:12:32,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=4.45 vs. limit=10.445 +2024-07-27 11:12:35,685 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.11 vs. limit=10.445 +2024-07-27 11:12:36,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=7853.333333333333, ans=0.13187500000000002 +2024-07-27 11:12:39,508 INFO [train.py:1114] (3/4) Epoch 1, batch 5900, loss[loss=0.4328, simple_loss=0.4735, pruned_loss=0.196, over 4674.00 frames. ], tot_loss[loss=0.3976, simple_loss=0.4298, pruned_loss=0.1827, over 937773.49 frames. ], batch size: 15, lr: 3.99e-02, grad_scale: 64.0 +2024-07-27 11:12:40,947 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.022e+01 7.265e+01 7.754e+01 8.488e+01 1.052e+02, threshold=1.551e+02, percent-clipped=0.0 +2024-07-27 11:12:44,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=7866.666666666667, ans=0.13124999999999998 +2024-07-27 11:12:55,438 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.13 vs. limit=13.42 +2024-07-27 11:12:55,484 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.57 vs. limit=13.42 +2024-07-27 11:12:56,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=7893.333333333333, ans=0.025 +2024-07-27 11:13:00,420 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.35 vs. limit=10.465 +2024-07-27 11:13:05,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=7906.666666666667, ans=0.0 +2024-07-27 11:13:08,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=7920.0, ans=0.0 +2024-07-27 11:13:14,899 INFO [train.py:1114] (3/4) Epoch 1, batch 5950, loss[loss=0.4523, simple_loss=0.4897, pruned_loss=0.2075, over 4696.00 frames. ], tot_loss[loss=0.3969, simple_loss=0.4295, pruned_loss=0.1822, over 939785.81 frames. ], batch size: 15, lr: 3.98e-02, grad_scale: 64.0 +2024-07-27 11:13:18,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=7933.333333333333, ans=0.04949747468305833 +2024-07-27 11:13:23,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=7946.666666666667, ans=0.1275 +2024-07-27 11:13:33,869 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.30 vs. limit=13.469999999999999 +2024-07-27 11:13:45,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=7986.666666666667, ans=0.125625 +2024-07-27 11:13:50,892 INFO [train.py:1114] (3/4) Epoch 1, batch 6000, loss[loss=0.4628, simple_loss=0.4886, pruned_loss=0.2185, over 4279.00 frames. ], tot_loss[loss=0.396, simple_loss=0.4285, pruned_loss=0.1818, over 937043.13 frames. ], batch size: 25, lr: 3.98e-02, grad_scale: 64.0 +2024-07-27 11:13:50,892 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 11:14:08,833 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.9669, 5.9009, 5.6771, 5.7522], device='cuda:3') +2024-07-27 11:14:16,111 INFO [train.py:1146] (3/4) Epoch 1, validation: loss=0.3082, simple_loss=0.3886, pruned_loss=0.1139, over 944034.00 frames. +2024-07-27 11:14:16,112 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 11:14:16,662 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.75 vs. limit=7.2 +2024-07-27 11:14:17,455 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.690e+01 7.303e+01 7.945e+01 8.512e+01 1.515e+02, threshold=1.589e+02, percent-clipped=0.0 +2024-07-27 11:14:17,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=8000.0, ans=0.07 +2024-07-27 11:14:18,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=8000.0, ans=0.03333333333333334 +2024-07-27 11:14:21,459 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.11 vs. limit=13.5 +2024-07-27 11:14:24,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=8013.333333333333, ans=0.6195333333333334 +2024-07-27 11:14:32,372 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.22 vs. limit=7.006666666666667 +2024-07-27 11:14:33,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=8026.666666666667, ans=0.125 +2024-07-27 11:14:34,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=8026.666666666667, ans=0.125 +2024-07-27 11:14:42,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=8040.0, ans=0.009121739130434783 +2024-07-27 11:14:52,564 INFO [train.py:1114] (3/4) Epoch 1, batch 6050, loss[loss=0.362, simple_loss=0.4035, pruned_loss=0.1603, over 4779.00 frames. ], tot_loss[loss=0.3954, simple_loss=0.4277, pruned_loss=0.1815, over 938189.13 frames. ], batch size: 12, lr: 3.97e-02, grad_scale: 64.0 +2024-07-27 11:15:02,460 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.07 vs. limit=13.559999999999999 +2024-07-27 11:15:06,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=8080.0, ans=0.025 +2024-07-27 11:15:10,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=8093.333333333333, ans=0.21906666666666666 +2024-07-27 11:15:29,914 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.99 vs. limit=9.066666666666666 +2024-07-27 11:15:30,047 INFO [train.py:1114] (3/4) Epoch 1, batch 6100, loss[loss=0.4215, simple_loss=0.4489, pruned_loss=0.1971, over 4689.00 frames. ], tot_loss[loss=0.3937, simple_loss=0.4263, pruned_loss=0.1805, over 937754.65 frames. ], batch size: 15, lr: 3.96e-02, grad_scale: 64.0 +2024-07-27 11:15:31,489 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.445e+01 6.771e+01 7.517e+01 8.445e+01 1.300e+02, threshold=1.503e+02, percent-clipped=0.0 +2024-07-27 11:15:32,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=8133.333333333333, ans=0.125 +2024-07-27 11:15:40,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=8146.666666666667, ans=0.025 +2024-07-27 11:15:48,372 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.53 vs. limit=13.620000000000001 +2024-07-27 11:15:56,743 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=5.85 vs. limit=10.565 +2024-07-27 11:16:05,335 INFO [train.py:1114] (3/4) Epoch 1, batch 6150, loss[loss=0.5127, simple_loss=0.5067, pruned_loss=0.2593, over 3320.00 frames. ], tot_loss[loss=0.394, simple_loss=0.4265, pruned_loss=0.1808, over 936578.36 frames. ], batch size: 36, lr: 3.96e-02, grad_scale: 64.0 +2024-07-27 11:16:07,308 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.32 vs. limit=9.1 +2024-07-27 11:16:12,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=8213.333333333334, ans=0.009084057971014492 +2024-07-27 11:16:12,864 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:16:22,271 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=5.58 vs. limit=5.645333333333333 +2024-07-27 11:16:24,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=8226.666666666666, ans=0.8322666666666666 +2024-07-27 11:16:33,565 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.23 vs. limit=10.59 +2024-07-27 11:16:41,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=8253.333333333334, ans=0.125 +2024-07-27 11:16:43,764 INFO [train.py:1114] (3/4) Epoch 1, batch 6200, loss[loss=0.3636, simple_loss=0.4124, pruned_loss=0.1574, over 4740.00 frames. ], tot_loss[loss=0.3945, simple_loss=0.4268, pruned_loss=0.1811, over 936373.06 frames. ], batch size: 14, lr: 3.95e-02, grad_scale: 64.0 +2024-07-27 11:16:45,337 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.005e+01 7.091e+01 7.789e+01 8.708e+01 1.298e+02, threshold=1.558e+02, percent-clipped=0.0 +2024-07-27 11:16:57,640 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.35 vs. limit=13.7 +2024-07-27 11:17:08,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=8280.0, ans=0.125 +2024-07-27 11:17:11,507 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.92 vs. limit=13.72 +2024-07-27 11:17:21,250 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.82 vs. limit=13.73 +2024-07-27 11:17:24,129 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.53 vs. limit=13.73 +2024-07-27 11:17:26,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=8320.0, ans=10.0 +2024-07-27 11:17:27,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=8320.0, ans=0.6088 +2024-07-27 11:17:28,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=8320.0, ans=0.009060869565217391 +2024-07-27 11:17:29,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=8320.0, ans=0.032 +2024-07-27 11:17:29,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=8320.0, ans=0.032 +2024-07-27 11:17:33,943 INFO [train.py:1114] (3/4) Epoch 1, batch 6250, loss[loss=0.3973, simple_loss=0.4259, pruned_loss=0.1844, over 4819.00 frames. ], tot_loss[loss=0.3942, simple_loss=0.4266, pruned_loss=0.1808, over 933032.23 frames. ], batch size: 14, lr: 3.94e-02, grad_scale: 64.0 +2024-07-27 11:17:39,190 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.76 vs. limit=10.625 +2024-07-27 11:17:42,312 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.65 vs. limit=13.759999999999998 +2024-07-27 11:18:09,201 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=13.98 vs. limit=13.8 +2024-07-27 11:18:09,418 INFO [train.py:1114] (3/4) Epoch 1, batch 6300, loss[loss=0.3616, simple_loss=0.401, pruned_loss=0.1611, over 4523.00 frames. ], tot_loss[loss=0.3931, simple_loss=0.426, pruned_loss=0.1801, over 930003.20 frames. ], batch size: 10, lr: 3.94e-02, grad_scale: 64.0 +2024-07-27 11:18:10,748 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.284e+01 7.148e+01 7.847e+01 8.773e+01 1.332e+02, threshold=1.569e+02, percent-clipped=0.0 +2024-07-27 11:18:10,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=8400.0, ans=0.09899494936611666 +2024-07-27 11:18:42,235 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.15 vs. limit=13.84 +2024-07-27 11:18:43,769 INFO [train.py:1114] (3/4) Epoch 1, batch 6350, loss[loss=0.3897, simple_loss=0.4243, pruned_loss=0.1775, over 4536.00 frames. ], tot_loss[loss=0.3911, simple_loss=0.4252, pruned_loss=0.1786, over 933907.89 frames. ], batch size: 21, lr: 3.93e-02, grad_scale: 64.0 +2024-07-27 11:18:56,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=8480.0, ans=0.025 +2024-07-27 11:18:57,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=8493.333333333334, ans=10.685 +2024-07-27 11:19:00,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8493.333333333334, ans=0.21506666666666666 +2024-07-27 11:19:00,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=8493.333333333334, ans=0.125 +2024-07-27 11:19:02,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=8493.333333333334, ans=0.03127777777777778 +2024-07-27 11:19:11,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=8520.0, ans=0.009017391304347826 +2024-07-27 11:19:12,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=8520.0, ans=0.2148 +2024-07-27 11:19:19,054 INFO [train.py:1114] (3/4) Epoch 1, batch 6400, loss[loss=0.4175, simple_loss=0.4493, pruned_loss=0.1929, over 4641.00 frames. ], tot_loss[loss=0.3931, simple_loss=0.4267, pruned_loss=0.1797, over 935088.50 frames. ], batch size: 13, lr: 3.92e-02, grad_scale: 64.0 +2024-07-27 11:19:20,483 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.815e+01 7.135e+01 7.649e+01 8.994e+01 1.161e+02, threshold=1.530e+02, percent-clipped=0.0 +2024-07-27 11:19:28,000 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.80 vs. limit=10.705 +2024-07-27 11:19:36,614 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=13.67 vs. limit=13.91 +2024-07-27 11:19:36,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=8546.666666666666, ans=0.6008666666666667 +2024-07-27 11:19:40,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=8560.0, ans=0.009008695652173913 +2024-07-27 11:19:40,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=8560.0, ans=10.71 +2024-07-27 11:19:46,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=8560.0, ans=0.125 +2024-07-27 11:20:02,093 INFO [train.py:1114] (3/4) Epoch 1, batch 6450, loss[loss=0.4059, simple_loss=0.4355, pruned_loss=0.1881, over 4498.00 frames. ], tot_loss[loss=0.3917, simple_loss=0.4259, pruned_loss=0.1788, over 938567.83 frames. ], batch size: 21, lr: 3.92e-02, grad_scale: 64.0 +2024-07-27 11:20:14,406 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.38 vs. limit=13.96 +2024-07-27 11:20:24,524 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.98 vs. limit=10.74 +2024-07-27 11:20:25,234 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=14.05 vs. limit=13.98 +2024-07-27 11:20:26,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=8640.0, ans=0.008991304347826088 +2024-07-27 11:20:29,991 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.13 vs. limit=13.98 +2024-07-27 11:20:38,006 INFO [train.py:1114] (3/4) Epoch 1, batch 6500, loss[loss=0.5014, simple_loss=0.4832, pruned_loss=0.2598, over 3355.00 frames. ], tot_loss[loss=0.39, simple_loss=0.4248, pruned_loss=0.1776, over 939680.56 frames. ], batch size: 35, lr: 3.91e-02, grad_scale: 64.0 +2024-07-27 11:20:39,465 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.881e+01 7.078e+01 7.610e+01 8.619e+01 1.357e+02, threshold=1.522e+02, percent-clipped=0.0 +2024-07-27 11:20:40,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=8666.666666666666, ans=0.125 +2024-07-27 11:20:49,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=8680.0, ans=0.2132 +2024-07-27 11:20:53,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=8693.333333333334, ans=10.0 +2024-07-27 11:21:01,737 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.81 vs. limit=14.03 +2024-07-27 11:21:05,247 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=4.38 vs. limit=10.765 +2024-07-27 11:21:14,509 INFO [train.py:1114] (3/4) Epoch 1, batch 6550, loss[loss=0.3345, simple_loss=0.3748, pruned_loss=0.1471, over 4806.00 frames. ], tot_loss[loss=0.3871, simple_loss=0.4233, pruned_loss=0.1754, over 942635.93 frames. ], batch size: 11, lr: 3.91e-02, grad_scale: 64.0 +2024-07-27 11:21:16,404 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.93 vs. limit=10.775 +2024-07-27 11:21:19,900 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.83 vs. limit=14.05 +2024-07-27 11:21:25,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.39 vs. limit=7.1866666666666665 +2024-07-27 11:21:32,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=8760.0, ans=0.008965217391304348 +2024-07-27 11:21:33,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=8760.0, ans=0.030166666666666668 +2024-07-27 11:21:42,212 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:21:45,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=8786.666666666666, ans=0.0 +2024-07-27 11:21:49,213 INFO [train.py:1114] (3/4) Epoch 1, batch 6600, loss[loss=0.417, simple_loss=0.4419, pruned_loss=0.196, over 4928.00 frames. ], tot_loss[loss=0.3859, simple_loss=0.422, pruned_loss=0.1749, over 944582.44 frames. ], batch size: 14, lr: 3.90e-02, grad_scale: 64.0 +2024-07-27 11:21:50,619 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.093e+01 7.005e+01 7.535e+01 8.213e+01 1.214e+02, threshold=1.507e+02, percent-clipped=0.0 +2024-07-27 11:21:50,967 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.37 vs. limit=4.32 +2024-07-27 11:21:58,855 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=5.96 vs. limit=5.762666666666667 +2024-07-27 11:22:20,342 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.39 vs. limit=10.82 +2024-07-27 11:22:22,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=8853.333333333334, ans=0.029777777777777778 +2024-07-27 11:22:25,934 INFO [train.py:1114] (3/4) Epoch 1, batch 6650, loss[loss=0.4097, simple_loss=0.445, pruned_loss=0.1872, over 4642.00 frames. ], tot_loss[loss=0.3851, simple_loss=0.4209, pruned_loss=0.1747, over 942719.52 frames. ], batch size: 17, lr: 3.89e-02, grad_scale: 64.0 +2024-07-27 11:22:27,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=8866.666666666666, ans=0.5896666666666668 +2024-07-27 11:22:29,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.20 vs. limit=10.825 +2024-07-27 11:22:33,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=8880.0, ans=0.125 +2024-07-27 11:22:40,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=8893.333333333334, ans=0.07 +2024-07-27 11:23:05,443 INFO [train.py:1114] (3/4) Epoch 1, batch 6700, loss[loss=0.4067, simple_loss=0.4328, pruned_loss=0.1903, over 4720.00 frames. ], tot_loss[loss=0.385, simple_loss=0.4209, pruned_loss=0.1745, over 941687.47 frames. ], batch size: 19, lr: 3.89e-02, grad_scale: 64.0 +2024-07-27 11:23:06,708 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.382e+01 7.413e+01 7.948e+01 9.118e+01 1.138e+02, threshold=1.590e+02, percent-clipped=0.0 +2024-07-27 11:23:20,527 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.77 vs. limit=10.86 +2024-07-27 11:23:22,630 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.68 vs. limit=10.86 +2024-07-27 11:23:23,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=8960.0, ans=0.2104 +2024-07-27 11:23:25,561 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.80 vs. limit=14.219999999999999 +2024-07-27 11:23:28,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=8973.333333333334, ans=0.125 +2024-07-27 11:23:33,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=8986.666666666666, ans=0.125 +2024-07-27 11:23:41,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=8986.666666666666, ans=0.05 +2024-07-27 11:23:41,275 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.43 vs. limit=7.246666666666666 +2024-07-27 11:23:44,422 INFO [train.py:1114] (3/4) Epoch 1, batch 6750, loss[loss=0.4581, simple_loss=0.4668, pruned_loss=0.2247, over 4250.00 frames. ], tot_loss[loss=0.3853, simple_loss=0.4216, pruned_loss=0.1744, over 940058.36 frames. ], batch size: 25, lr: 3.88e-02, grad_scale: 128.0 +2024-07-27 11:23:44,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=9000.0, ans=0.21000000000000002 +2024-07-27 11:23:53,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=9013.333333333334, ans=0.02911111111111111 +2024-07-27 11:23:56,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=9013.333333333334, ans=0.02911111111111111 +2024-07-27 11:23:58,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=9026.666666666666, ans=0.0 +2024-07-27 11:24:06,132 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=10.89 +2024-07-27 11:24:08,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=9040.0, ans=0.008904347826086957 +2024-07-27 11:24:11,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=9040.0, ans=0.125 +2024-07-27 11:24:15,860 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.06 vs. limit=7.621333333333334 +2024-07-27 11:24:19,787 INFO [train.py:1114] (3/4) Epoch 1, batch 6800, loss[loss=0.4313, simple_loss=0.4739, pruned_loss=0.1944, over 4632.00 frames. ], tot_loss[loss=0.3864, simple_loss=0.4228, pruned_loss=0.175, over 938108.63 frames. ], batch size: 13, lr: 3.87e-02, grad_scale: 128.0 +2024-07-27 11:24:19,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=9066.666666666666, ans=0.0 +2024-07-27 11:24:21,070 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.897e+01 7.261e+01 7.946e+01 8.901e+01 1.743e+02, threshold=1.589e+02, percent-clipped=1.0 +2024-07-27 11:24:22,839 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.97 vs. limit=14.3 +2024-07-27 11:24:32,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=9080.0, ans=0.025 +2024-07-27 11:24:43,280 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.05 vs. limit=14.33 +2024-07-27 11:24:46,655 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.44 vs. limit=14.34 +2024-07-27 11:24:51,375 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=14.58 vs. limit=14.34 +2024-07-27 11:24:53,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=9133.333333333334, ans=0.0 +2024-07-27 11:24:53,441 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.97 vs. limit=10.925 +2024-07-27 11:24:53,538 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.98 vs. limit=4.37 +2024-07-27 11:24:53,715 INFO [train.py:1114] (3/4) Epoch 1, batch 6850, loss[loss=0.3932, simple_loss=0.4278, pruned_loss=0.1792, over 4697.00 frames. ], tot_loss[loss=0.3859, simple_loss=0.4219, pruned_loss=0.1749, over 939865.83 frames. ], batch size: 13, lr: 3.87e-02, grad_scale: 64.0 +2024-07-27 11:24:59,487 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:25:14,007 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.37 vs. limit=14.379999999999999 +2024-07-27 11:25:24,487 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.90 vs. limit=10.945 +2024-07-27 11:25:25,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=9186.666666666666, ans=0.125 +2024-07-27 11:25:27,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=9200.0, ans=0.008869565217391304 +2024-07-27 11:25:28,389 INFO [train.py:1114] (3/4) Epoch 1, batch 6900, loss[loss=0.3241, simple_loss=0.3633, pruned_loss=0.1424, over 4965.00 frames. ], tot_loss[loss=0.3832, simple_loss=0.4199, pruned_loss=0.1733, over 941928.39 frames. ], batch size: 13, lr: 3.86e-02, grad_scale: 64.0 +2024-07-27 11:25:30,372 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.952e+01 6.982e+01 7.530e+01 8.620e+01 1.386e+02, threshold=1.506e+02, percent-clipped=0.0 +2024-07-27 11:25:31,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=9200.0, ans=0.125 +2024-07-27 11:25:38,435 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.78 vs. limit=10.955 +2024-07-27 11:25:47,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=9226.666666666666, ans=0.20773333333333333 +2024-07-27 11:25:57,805 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.06 vs. limit=9.626666666666667 +2024-07-27 11:25:59,811 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.41 vs. limit=14.440000000000001 +2024-07-27 11:26:00,795 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:26:00,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=9253.333333333334, ans=0.025 +2024-07-27 11:26:00,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=9253.333333333334, ans=0.125 +2024-07-27 11:26:01,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=9253.333333333334, ans=0.20746666666666663 +2024-07-27 11:26:02,698 INFO [train.py:1114] (3/4) Epoch 1, batch 6950, loss[loss=0.3262, simple_loss=0.3603, pruned_loss=0.146, over 4530.00 frames. ], tot_loss[loss=0.3834, simple_loss=0.4196, pruned_loss=0.1737, over 939700.81 frames. ], batch size: 10, lr: 3.85e-02, grad_scale: 64.0 +2024-07-27 11:26:03,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=9266.666666666666, ans=0.125 +2024-07-27 11:26:08,576 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=28.86 vs. limit=10.975 +2024-07-27 11:26:15,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.98 vs. limit=10.98 +2024-07-27 11:26:16,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=9293.333333333334, ans=0.125 +2024-07-27 11:26:17,402 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.638e+00 +2024-07-27 11:26:23,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=9306.666666666666, ans=0.035 +2024-07-27 11:26:29,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=9320.0, ans=0.025 +2024-07-27 11:26:37,595 INFO [train.py:1114] (3/4) Epoch 1, batch 7000, loss[loss=0.4247, simple_loss=0.4489, pruned_loss=0.2002, over 4575.00 frames. ], tot_loss[loss=0.3827, simple_loss=0.4189, pruned_loss=0.1733, over 938104.86 frames. ], batch size: 17, lr: 3.85e-02, grad_scale: 64.0 +2024-07-27 11:26:38,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=9333.333333333334, ans=0.0 +2024-07-27 11:26:38,699 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.34 vs. limit=14.5 +2024-07-27 11:26:39,626 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.819e+01 7.301e+01 8.158e+01 9.084e+01 2.160e+02, threshold=1.632e+02, percent-clipped=1.0 +2024-07-27 11:26:41,850 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:26:51,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=9346.666666666666, ans=0.125 +2024-07-27 11:27:03,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=9360.0, ans=0.2064 +2024-07-27 11:27:12,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=9373.333333333334, ans=0.02761111111111111 +2024-07-27 11:27:28,113 INFO [train.py:1114] (3/4) Epoch 1, batch 7050, loss[loss=0.3928, simple_loss=0.4413, pruned_loss=0.1722, over 4701.00 frames. ], tot_loss[loss=0.382, simple_loss=0.4186, pruned_loss=0.1726, over 941596.54 frames. ], batch size: 19, lr: 3.84e-02, grad_scale: 64.0 +2024-07-27 11:27:29,800 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=11.025 +2024-07-27 11:27:30,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=9400.0, ans=0.025 +2024-07-27 11:27:31,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=9400.0, ans=10.0 +2024-07-27 11:27:44,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=9426.666666666666, ans=0.125 +2024-07-27 11:27:52,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=9426.666666666666, ans=0.027388888888888893 +2024-07-27 11:28:02,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=9440.0, ans=0.125 +2024-07-27 11:28:04,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=9440.0, ans=0.027333333333333334 +2024-07-27 11:28:10,495 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.66 vs. limit=7.781333333333333 +2024-07-27 11:28:13,677 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.17 vs. limit=14.59 +2024-07-27 11:28:14,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=9466.666666666666, ans=0.125 +2024-07-27 11:28:15,351 INFO [train.py:1114] (3/4) Epoch 1, batch 7100, loss[loss=0.3807, simple_loss=0.4116, pruned_loss=0.1749, over 4794.00 frames. ], tot_loss[loss=0.3816, simple_loss=0.4186, pruned_loss=0.1723, over 936464.79 frames. ], batch size: 15, lr: 3.83e-02, grad_scale: 64.0 +2024-07-27 11:28:15,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=9466.666666666666, ans=0.5686666666666667 +2024-07-27 11:28:15,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=9466.666666666666, ans=0.125 +2024-07-27 11:28:17,408 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.024e+01 6.989e+01 7.688e+01 8.481e+01 1.289e+02, threshold=1.538e+02, percent-clipped=0.0 +2024-07-27 11:28:23,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=9480.0, ans=0.125 +2024-07-27 11:28:25,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=9480.0, ans=0.125 +2024-07-27 11:28:26,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=9480.0, ans=0.125 +2024-07-27 11:28:30,081 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:28:34,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=9493.333333333334, ans=0.025 +2024-07-27 11:28:38,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=9506.666666666666, ans=0.125 +2024-07-27 11:28:44,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=9520.0, ans=0.20479999999999998 +2024-07-27 11:28:46,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=9520.0, ans=0.20479999999999998 +2024-07-27 11:28:46,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=9520.0, ans=0.125 +2024-07-27 11:28:50,200 INFO [train.py:1114] (3/4) Epoch 1, batch 7150, loss[loss=0.4636, simple_loss=0.4646, pruned_loss=0.2313, over 4467.00 frames. ], tot_loss[loss=0.3794, simple_loss=0.4165, pruned_loss=0.1711, over 937606.08 frames. ], batch size: 21, lr: 3.83e-02, grad_scale: 64.0 +2024-07-27 11:29:01,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=9546.666666666666, ans=0.125 +2024-07-27 11:29:07,156 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.94 vs. limit=14.67 +2024-07-27 11:29:14,474 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.78 vs. limit=7.3933333333333335 +2024-07-27 11:29:25,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=9600.0, ans=0.125 +2024-07-27 11:29:26,390 INFO [train.py:1114] (3/4) Epoch 1, batch 7200, loss[loss=0.387, simple_loss=0.4155, pruned_loss=0.1793, over 4801.00 frames. ], tot_loss[loss=0.3808, simple_loss=0.4179, pruned_loss=0.1719, over 938042.16 frames. ], batch size: 15, lr: 3.82e-02, grad_scale: 64.0 +2024-07-27 11:29:28,207 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.86 vs. limit=14.7 +2024-07-27 11:29:28,268 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.182e+01 6.919e+01 7.589e+01 8.160e+01 1.329e+02, threshold=1.518e+02, percent-clipped=0.0 +2024-07-27 11:29:34,774 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.75 vs. limit=14.71 +2024-07-27 11:29:36,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=9613.333333333334, ans=0.5635333333333334 +2024-07-27 11:29:39,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=9626.666666666666, ans=0.20373333333333332 +2024-07-27 11:29:40,891 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=15.51 vs. limit=14.719999999999999 +2024-07-27 11:30:00,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=9640.0, ans=0.05 +2024-07-27 11:31:01,719 INFO [train.py:1114] (3/4) Epoch 1, batch 7250, loss[loss=0.3106, simple_loss=0.3636, pruned_loss=0.1287, over 4853.00 frames. ], tot_loss[loss=0.381, simple_loss=0.418, pruned_loss=0.172, over 939602.35 frames. ], batch size: 12, lr: 3.82e-02, grad_scale: 64.0 +2024-07-27 11:31:04,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.91 vs. limit=14.75 +2024-07-27 11:31:05,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=9666.666666666666, ans=0.125 +2024-07-27 11:31:18,185 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.72 vs. limit=4.454 +2024-07-27 11:31:23,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=9706.666666666666, ans=0.125 +2024-07-27 11:31:29,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=9720.0, ans=0.025 +2024-07-27 11:31:29,609 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.60 vs. limit=11.145 +2024-07-27 11:31:36,540 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=15.68 vs. limit=14.79 +2024-07-27 11:31:36,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_ff3.min_abs, batch_count=9733.333333333334, ans=0.2 +2024-07-27 11:31:37,467 INFO [train.py:1114] (3/4) Epoch 1, batch 7300, loss[loss=0.3664, simple_loss=0.4136, pruned_loss=0.1596, over 4860.00 frames. ], tot_loss[loss=0.3802, simple_loss=0.4178, pruned_loss=0.1712, over 940002.60 frames. ], batch size: 12, lr: 3.81e-02, grad_scale: 64.0 +2024-07-27 11:31:39,741 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.899e+01 6.987e+01 7.392e+01 8.309e+01 1.190e+02, threshold=1.478e+02, percent-clipped=0.0 +2024-07-27 11:31:50,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=9746.666666666666, ans=0.5588666666666667 +2024-07-27 11:32:17,896 INFO [train.py:1114] (3/4) Epoch 1, batch 7350, loss[loss=0.4037, simple_loss=0.4175, pruned_loss=0.1949, over 4638.00 frames. ], tot_loss[loss=0.3805, simple_loss=0.4181, pruned_loss=0.1714, over 939403.69 frames. ], batch size: 12, lr: 3.80e-02, grad_scale: 64.0 +2024-07-27 11:32:21,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=9800.0, ans=0.125 +2024-07-27 11:32:32,870 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.73 vs. limit=4.474 +2024-07-27 11:32:42,512 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=11.19 +2024-07-27 11:32:53,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=9853.333333333334, ans=0.125 +2024-07-27 11:32:55,345 INFO [train.py:1114] (3/4) Epoch 1, batch 7400, loss[loss=0.4469, simple_loss=0.4828, pruned_loss=0.2055, over 4697.00 frames. ], tot_loss[loss=0.3804, simple_loss=0.4188, pruned_loss=0.171, over 940748.95 frames. ], batch size: 13, lr: 3.80e-02, grad_scale: 64.0 +2024-07-27 11:32:57,410 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.028e+01 6.927e+01 7.410e+01 8.183e+01 1.194e+02, threshold=1.482e+02, percent-clipped=0.0 +2024-07-27 11:32:59,254 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.40 vs. limit=11.2 +2024-07-27 11:33:03,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=9880.0, ans=0.1512 +2024-07-27 11:33:03,238 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.66 vs. limit=9.940000000000001 +2024-07-27 11:33:14,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=9893.333333333334, ans=0.125 +2024-07-27 11:33:37,724 INFO [train.py:1114] (3/4) Epoch 1, batch 7450, loss[loss=0.3773, simple_loss=0.4011, pruned_loss=0.1768, over 4613.00 frames. ], tot_loss[loss=0.3785, simple_loss=0.4167, pruned_loss=0.1701, over 937939.63 frames. ], batch size: 11, lr: 3.79e-02, grad_scale: 64.0 +2024-07-27 11:33:38,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=9933.333333333334, ans=0.05 +2024-07-27 11:33:45,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=9946.666666666666, ans=0.008707246376811594 +2024-07-27 11:34:06,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=9986.666666666666, ans=0.125 +2024-07-27 11:34:39,721 INFO [train.py:1114] (3/4) Epoch 1, batch 7500, loss[loss=0.4805, simple_loss=0.4775, pruned_loss=0.2417, over 3560.00 frames. ], tot_loss[loss=0.3779, simple_loss=0.4164, pruned_loss=0.1698, over 936541.41 frames. ], batch size: 36, lr: 3.78e-02, grad_scale: 64.0 +2024-07-27 11:34:41,635 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.028e+01 6.970e+01 7.592e+01 8.473e+01 1.449e+02, threshold=1.518e+02, percent-clipped=0.0 +2024-07-27 11:34:47,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=10013.333333333334, ans=0.125 +2024-07-27 11:34:52,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=10026.666666666666, ans=0.0 +2024-07-27 11:34:54,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=10026.666666666666, ans=0.125 +2024-07-27 11:35:13,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=10026.666666666666, ans=0.19973333333333332 +2024-07-27 11:35:19,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=10040.0, ans=0.025 +2024-07-27 11:35:29,212 INFO [train.py:1114] (3/4) Epoch 1, batch 7550, loss[loss=0.3908, simple_loss=0.4375, pruned_loss=0.172, over 4614.00 frames. ], tot_loss[loss=0.3804, simple_loss=0.4189, pruned_loss=0.171, over 936608.42 frames. ], batch size: 17, lr: 3.78e-02, grad_scale: 64.0 +2024-07-27 11:35:33,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=10066.666666666666, ans=0.5476666666666667 +2024-07-27 11:35:35,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=10080.0, ans=0.125 +2024-07-27 11:35:38,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=10080.0, ans=0.125 +2024-07-27 11:35:40,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=10080.0, ans=0.125 +2024-07-27 11:35:53,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=10106.666666666666, ans=0.125 +2024-07-27 11:35:58,823 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.45 vs. limit=15.08 +2024-07-27 11:36:00,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=10106.666666666666, ans=0.035 +2024-07-27 11:36:10,595 INFO [train.py:1114] (3/4) Epoch 1, batch 7600, loss[loss=0.3568, simple_loss=0.4042, pruned_loss=0.1547, over 4811.00 frames. ], tot_loss[loss=0.3793, simple_loss=0.4183, pruned_loss=0.1702, over 938230.08 frames. ], batch size: 14, lr: 3.77e-02, grad_scale: 64.0 +2024-07-27 11:36:12,582 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.409e+01 6.929e+01 7.591e+01 8.810e+01 1.172e+02, threshold=1.518e+02, percent-clipped=0.0 +2024-07-27 11:36:15,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=10133.333333333334, ans=0.008666666666666666 +2024-07-27 11:36:32,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=10146.666666666666, ans=0.125 +2024-07-27 11:36:46,287 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=10.12 vs. limit=8.064 +2024-07-27 11:36:51,699 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.39 vs. limit=4.526 +2024-07-27 11:36:54,233 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:37:00,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=10186.666666666666, ans=0.008655072463768116 +2024-07-27 11:37:01,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=10186.666666666666, ans=0.125 +2024-07-27 11:37:02,060 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.75 vs. limit=15.14 +2024-07-27 11:37:02,938 INFO [train.py:1114] (3/4) Epoch 1, batch 7650, loss[loss=0.3339, simple_loss=0.3889, pruned_loss=0.1395, over 4938.00 frames. ], tot_loss[loss=0.3778, simple_loss=0.4167, pruned_loss=0.1695, over 937643.15 frames. ], batch size: 12, lr: 3.77e-02, grad_scale: 64.0 +2024-07-27 11:37:03,832 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:37:10,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=10213.333333333334, ans=0.125 +2024-07-27 11:37:13,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=10213.333333333334, ans=0.125 +2024-07-27 11:37:14,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=10213.333333333334, ans=0.024111111111111107 +2024-07-27 11:37:18,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=10226.666666666666, ans=0.5420666666666667 +2024-07-27 11:37:22,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=10226.666666666666, ans=0.19773333333333334 +2024-07-27 11:37:35,170 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.68 vs. limit=15.190000000000001 +2024-07-27 11:37:37,474 INFO [train.py:1114] (3/4) Epoch 1, batch 7700, loss[loss=0.3561, simple_loss=0.4071, pruned_loss=0.1525, over 4693.00 frames. ], tot_loss[loss=0.3781, simple_loss=0.4167, pruned_loss=0.1697, over 935495.88 frames. ], batch size: 13, lr: 3.76e-02, grad_scale: 64.0 +2024-07-27 11:37:39,433 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.134e+01 7.002e+01 7.732e+01 8.804e+01 1.160e+02, threshold=1.546e+02, percent-clipped=0.0 +2024-07-27 11:37:39,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff2.min_abs, batch_count=10266.666666666666, ans=0.1 +2024-07-27 11:37:44,453 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.02 vs. limit=11.355 +2024-07-27 11:37:47,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=10280.0, ans=0.5402 +2024-07-27 11:38:09,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=10320.0, ans=0.125 +2024-07-27 11:38:10,886 INFO [train.py:1114] (3/4) Epoch 1, batch 7750, loss[loss=0.4008, simple_loss=0.4423, pruned_loss=0.1796, over 4924.00 frames. ], tot_loss[loss=0.3799, simple_loss=0.4186, pruned_loss=0.1706, over 936480.16 frames. ], batch size: 14, lr: 3.75e-02, grad_scale: 64.0 +2024-07-27 11:38:11,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=10333.333333333334, ans=0.023611111111111107 +2024-07-27 11:38:20,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=10346.666666666666, ans=0.05 +2024-07-27 11:38:20,669 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.07 vs. limit=11.379999999999999 +2024-07-27 11:38:23,296 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.34 vs. limit=4.552 +2024-07-27 11:38:26,360 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.10 vs. limit=15.27 +2024-07-27 11:38:27,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.66 vs. limit=15.27 +2024-07-27 11:38:29,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=10360.0, ans=0.8536 +2024-07-27 11:38:30,515 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.53 vs. limit=11.385 +2024-07-27 11:38:40,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=10386.666666666666, ans=0.035 +2024-07-27 11:38:45,452 INFO [train.py:1114] (3/4) Epoch 1, batch 7800, loss[loss=0.3614, simple_loss=0.4055, pruned_loss=0.1587, over 4673.00 frames. ], tot_loss[loss=0.3786, simple_loss=0.4182, pruned_loss=0.1696, over 937886.09 frames. ], batch size: 14, lr: 3.75e-02, grad_scale: 64.0 +2024-07-27 11:38:47,309 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.952e+01 6.890e+01 7.293e+01 8.300e+01 1.085e+02, threshold=1.459e+02, percent-clipped=0.0 +2024-07-27 11:38:47,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=10400.0, ans=0.023333333333333334 +2024-07-27 11:38:55,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=10413.333333333334, ans=0.8541333333333333 +2024-07-27 11:39:00,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=10426.666666666666, ans=0.023222222222222227 +2024-07-27 11:39:01,058 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.16 vs. limit=11.41 +2024-07-27 11:39:02,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=10426.666666666666, ans=0.07 +2024-07-27 11:39:03,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=10426.666666666666, ans=11.41 +2024-07-27 11:39:12,679 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.01 vs. limit=15.33 +2024-07-27 11:39:13,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=10453.333333333334, ans=0.09899494936611666 +2024-07-27 11:39:15,455 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.86 vs. limit=15.34 +2024-07-27 11:39:16,836 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.29 vs. limit=15.34 +2024-07-27 11:39:20,620 INFO [train.py:1114] (3/4) Epoch 1, batch 7850, loss[loss=0.2397, simple_loss=0.3138, pruned_loss=0.08274, over 4501.00 frames. ], tot_loss[loss=0.3793, simple_loss=0.4181, pruned_loss=0.1702, over 936920.10 frames. ], batch size: 10, lr: 3.74e-02, grad_scale: 64.0 +2024-07-27 11:39:23,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=10466.666666666666, ans=0.125 +2024-07-27 11:39:25,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10466.666666666666, ans=0.19533333333333333 +2024-07-27 11:39:37,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=10493.333333333334, ans=0.125 +2024-07-27 11:39:39,377 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.32 vs. limit=15.370000000000001 +2024-07-27 11:39:40,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=10506.666666666666, ans=0.19493333333333335 +2024-07-27 11:39:42,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=10506.666666666666, ans=0.19493333333333335 +2024-07-27 11:39:44,050 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.20 vs. limit=7.626666666666667 +2024-07-27 11:39:45,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=10506.666666666666, ans=0.19493333333333335 +2024-07-27 11:39:48,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=10520.0, ans=0.125 +2024-07-27 11:39:49,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10520.0, ans=0.19479999999999997 +2024-07-27 11:39:54,396 INFO [train.py:1114] (3/4) Epoch 1, batch 7900, loss[loss=0.3972, simple_loss=0.4446, pruned_loss=0.1749, over 4870.00 frames. ], tot_loss[loss=0.3805, simple_loss=0.4196, pruned_loss=0.1707, over 934124.80 frames. ], batch size: 14, lr: 3.73e-02, grad_scale: 64.0 +2024-07-27 11:39:56,373 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.883e+01 7.101e+01 7.733e+01 8.610e+01 1.628e+02, threshold=1.547e+02, percent-clipped=1.0 +2024-07-27 11:40:02,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=10546.666666666666, ans=0.025 +2024-07-27 11:40:06,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=10546.666666666666, ans=0.022722222222222227 +2024-07-27 11:40:27,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=10600.0, ans=0.125 +2024-07-27 11:40:28,112 INFO [train.py:1114] (3/4) Epoch 1, batch 7950, loss[loss=0.4704, simple_loss=0.4688, pruned_loss=0.236, over 3714.00 frames. ], tot_loss[loss=0.3793, simple_loss=0.4183, pruned_loss=0.1702, over 936393.27 frames. ], batch size: 35, lr: 3.73e-02, grad_scale: 64.0 +2024-07-27 11:40:37,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=10613.333333333334, ans=0.125 +2024-07-27 11:40:40,950 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.65 vs. limit=8.245333333333335 +2024-07-27 11:40:47,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=10626.666666666666, ans=0.5280666666666667 +2024-07-27 11:40:57,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=10653.333333333334, ans=0.125 +2024-07-27 11:40:59,561 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.06 vs. limit=11.495000000000001 +2024-07-27 11:41:03,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=10666.666666666666, ans=0.07 +2024-07-27 11:41:44,680 INFO [train.py:1114] (3/4) Epoch 1, batch 8000, loss[loss=0.3141, simple_loss=0.3639, pruned_loss=0.1321, over 4617.00 frames. ], tot_loss[loss=0.376, simple_loss=0.415, pruned_loss=0.1685, over 935415.37 frames. ], batch size: 11, lr: 3.72e-02, grad_scale: 64.0 +2024-07-27 11:41:46,747 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.949e+01 6.868e+01 7.730e+01 8.687e+01 2.055e+02, threshold=1.546e+02, percent-clipped=1.0 +2024-07-27 11:41:48,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=10666.666666666666, ans=0.022222222222222227 +2024-07-27 11:41:53,293 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=5.04 vs. limit=11.504999999999999 +2024-07-27 11:42:08,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=10706.666666666666, ans=0.125 +2024-07-27 11:42:14,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10720.0, ans=0.19279999999999997 +2024-07-27 11:42:18,029 INFO [train.py:1114] (3/4) Epoch 1, batch 8050, loss[loss=0.3709, simple_loss=0.414, pruned_loss=0.1639, over 4814.00 frames. ], tot_loss[loss=0.3752, simple_loss=0.4147, pruned_loss=0.1679, over 935134.95 frames. ], batch size: 14, lr: 3.72e-02, grad_scale: 64.0 +2024-07-27 11:42:18,457 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.94 vs. limit=11.525 +2024-07-27 11:42:33,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=10760.0, ans=0.008530434782608697 +2024-07-27 11:42:33,657 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.96 vs. limit=10.379999999999999 +2024-07-27 11:42:35,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=10760.0, ans=0.021833333333333337 +2024-07-27 11:42:38,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=10760.0, ans=0.125 +2024-07-27 11:42:41,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=10773.333333333334, ans=0.0 +2024-07-27 11:42:42,991 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.61 vs. limit=15.58 +2024-07-27 11:42:46,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=10786.666666666666, ans=0.09899494936611666 +2024-07-27 11:42:51,551 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=3.700e+00 +2024-07-27 11:42:52,709 INFO [train.py:1114] (3/4) Epoch 1, batch 8100, loss[loss=0.4054, simple_loss=0.4505, pruned_loss=0.1802, over 4797.00 frames. ], tot_loss[loss=0.3778, simple_loss=0.4171, pruned_loss=0.1692, over 934411.26 frames. ], batch size: 15, lr: 3.71e-02, grad_scale: 64.0 +2024-07-27 11:42:54,752 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.815e+01 7.005e+01 7.921e+01 8.722e+01 1.648e+02, threshold=1.584e+02, percent-clipped=1.0 +2024-07-27 11:43:12,287 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.63 vs. limit=15.63 +2024-07-27 11:43:25,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=10866.666666666666, ans=0.125 +2024-07-27 11:43:26,150 INFO [train.py:1114] (3/4) Epoch 1, batch 8150, loss[loss=0.3748, simple_loss=0.4312, pruned_loss=0.1592, over 4782.00 frames. ], tot_loss[loss=0.3746, simple_loss=0.4147, pruned_loss=0.1673, over 938086.63 frames. ], batch size: 15, lr: 3.70e-02, grad_scale: 64.0 +2024-07-27 11:43:33,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=10880.0, ans=0.0 +2024-07-27 11:43:42,191 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:43:49,032 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.94 vs. limit=15.68 +2024-07-27 11:43:52,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=10920.0, ans=0.125 +2024-07-27 11:43:55,002 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.46 vs. limit=11.594999999999999 +2024-07-27 11:43:55,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=10920.0, ans=0.09899494936611666 +2024-07-27 11:44:05,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=10920.0, ans=0.125 +2024-07-27 11:44:05,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=10933.333333333334, ans=0.02111111111111111 +2024-07-27 11:44:06,716 INFO [train.py:1114] (3/4) Epoch 1, batch 8200, loss[loss=0.3884, simple_loss=0.4165, pruned_loss=0.1802, over 4795.00 frames. ], tot_loss[loss=0.3746, simple_loss=0.4151, pruned_loss=0.167, over 939186.68 frames. ], batch size: 15, lr: 3.70e-02, grad_scale: 64.0 +2024-07-27 11:44:08,823 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.129e+01 6.954e+01 7.394e+01 8.427e+01 2.023e+02, threshold=1.479e+02, percent-clipped=1.0 +2024-07-27 11:44:12,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=10933.333333333334, ans=0.19066666666666665 +2024-07-27 11:44:25,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=10960.0, ans=0.025 +2024-07-27 11:44:29,473 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.17 vs. limit=10.486666666666668 +2024-07-27 11:44:40,416 INFO [train.py:1114] (3/4) Epoch 1, batch 8250, loss[loss=0.3068, simple_loss=0.3625, pruned_loss=0.1255, over 4897.00 frames. ], tot_loss[loss=0.3743, simple_loss=0.4145, pruned_loss=0.167, over 939582.80 frames. ], batch size: 13, lr: 3.69e-02, grad_scale: 64.0 +2024-07-27 11:44:53,759 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.02 vs. limit=10.513333333333332 +2024-07-27 11:44:55,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=11026.666666666666, ans=0.125 +2024-07-27 11:44:55,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=11026.666666666666, ans=0.125 +2024-07-27 11:44:56,567 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.16 vs. limit=4.654 +2024-07-27 11:45:06,127 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.07 vs. limit=11.64 +2024-07-27 11:45:07,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=11053.333333333334, ans=0.008466666666666667 +2024-07-27 11:46:40,207 INFO [train.py:1114] (3/4) Epoch 1, batch 8300, loss[loss=0.3552, simple_loss=0.4056, pruned_loss=0.1524, over 4896.00 frames. ], tot_loss[loss=0.3764, simple_loss=0.416, pruned_loss=0.1684, over 938945.22 frames. ], batch size: 15, lr: 3.68e-02, grad_scale: 64.0 +2024-07-27 11:46:41,180 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.49 vs. limit=11.65 +2024-07-27 11:46:42,161 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.900e+01 6.941e+01 7.717e+01 8.510e+01 1.243e+02, threshold=1.543e+02, percent-clipped=0.0 +2024-07-27 11:47:03,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=11093.333333333334, ans=0.125 +2024-07-27 11:47:07,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=11093.333333333334, ans=15.82 +2024-07-27 11:47:20,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11106.666666666666, ans=0.18893333333333334 +2024-07-27 11:47:28,611 INFO [train.py:1114] (3/4) Epoch 1, batch 8350, loss[loss=0.401, simple_loss=0.4236, pruned_loss=0.1892, over 4805.00 frames. ], tot_loss[loss=0.3752, simple_loss=0.4151, pruned_loss=0.1677, over 941623.90 frames. ], batch size: 15, lr: 3.68e-02, grad_scale: 64.0 +2024-07-27 11:47:28,955 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.81 vs. limit=15.85 +2024-07-27 11:47:33,102 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.41 vs. limit=15.85 +2024-07-27 11:47:38,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=11146.666666666666, ans=0.18853333333333333 +2024-07-27 11:47:49,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=11146.666666666666, ans=0.18853333333333333 +2024-07-27 11:48:01,664 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.20 vs. limit=11.690000000000001 +2024-07-27 11:48:05,742 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.98 vs. limit=10.593333333333334 +2024-07-27 11:48:11,884 INFO [train.py:1114] (3/4) Epoch 1, batch 8400, loss[loss=0.3178, simple_loss=0.3602, pruned_loss=0.1377, over 4777.00 frames. ], tot_loss[loss=0.375, simple_loss=0.4151, pruned_loss=0.1674, over 939814.61 frames. ], batch size: 12, lr: 3.67e-02, grad_scale: 64.0 +2024-07-27 11:48:13,817 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.876e+01 7.080e+01 7.641e+01 8.587e+01 1.412e+02, threshold=1.528e+02, percent-clipped=0.0 +2024-07-27 11:48:14,286 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=16.81 vs. limit=15.9 +2024-07-27 11:48:17,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=11213.333333333334, ans=0.125 +2024-07-27 11:48:20,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=11213.333333333334, ans=0.125 +2024-07-27 11:48:22,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=11213.333333333334, ans=10.0 +2024-07-27 11:48:28,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=11226.666666666666, ans=0.025 +2024-07-27 11:48:34,858 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.26 vs. limit=11.715 +2024-07-27 11:48:40,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=11253.333333333334, ans=0.125 +2024-07-27 11:48:43,849 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:48:45,019 INFO [train.py:1114] (3/4) Epoch 1, batch 8450, loss[loss=0.4629, simple_loss=0.4857, pruned_loss=0.2201, over 4787.00 frames. ], tot_loss[loss=0.3748, simple_loss=0.4158, pruned_loss=0.1669, over 938864.90 frames. ], batch size: 15, lr: 3.67e-02, grad_scale: 64.0 +2024-07-27 11:48:45,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=11266.666666666666, ans=0.025 +2024-07-27 11:48:47,188 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.97 vs. limit=10.633333333333333 +2024-07-27 11:48:49,957 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.23 vs. limit=11.725 +2024-07-27 11:48:51,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=11280.0, ans=0.18719999999999998 +2024-07-27 11:48:58,971 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.25 vs. limit=15.96 +2024-07-27 11:49:06,947 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.15 vs. limit=11.74 +2024-07-27 11:49:10,817 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.98 vs. limit=15.98 +2024-07-27 11:49:14,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=11320.0, ans=0.019500000000000003 +2024-07-27 11:49:16,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=11320.0, ans=0.5038 +2024-07-27 11:49:20,000 INFO [train.py:1114] (3/4) Epoch 1, batch 8500, loss[loss=0.3054, simple_loss=0.3598, pruned_loss=0.1255, over 4620.00 frames. ], tot_loss[loss=0.3722, simple_loss=0.4139, pruned_loss=0.1652, over 938581.11 frames. ], batch size: 11, lr: 3.66e-02, grad_scale: 64.0 +2024-07-27 11:49:21,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=11333.333333333334, ans=0.125 +2024-07-27 11:49:21,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=11333.333333333334, ans=0.5033333333333334 +2024-07-27 11:49:21,904 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.800e+01 6.867e+01 7.338e+01 8.262e+01 1.317e+02, threshold=1.468e+02, percent-clipped=0.0 +2024-07-27 11:49:39,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=11360.0, ans=0.5024000000000001 +2024-07-27 11:49:40,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.94 vs. limit=7.843333333333334 +2024-07-27 11:49:47,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=11386.666666666666, ans=0.125 +2024-07-27 11:49:50,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=11386.666666666666, ans=0.008394202898550725 +2024-07-27 11:49:54,526 INFO [train.py:1114] (3/4) Epoch 1, batch 8550, loss[loss=0.3162, simple_loss=0.3731, pruned_loss=0.1297, over 4798.00 frames. ], tot_loss[loss=0.3721, simple_loss=0.4137, pruned_loss=0.1653, over 939399.79 frames. ], batch size: 11, lr: 3.65e-02, grad_scale: 64.0 +2024-07-27 11:49:56,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11400.0, ans=0.186 +2024-07-27 11:50:03,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.08 vs. limit=11.780000000000001 +2024-07-27 11:50:13,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=11426.666666666666, ans=0.5000666666666667 +2024-07-27 11:50:16,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=11440.0, ans=0.125 +2024-07-27 11:50:18,327 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.13 vs. limit=8.576 +2024-07-27 11:50:18,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=11440.0, ans=0.019000000000000003 +2024-07-27 11:50:20,371 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.24 vs. limit=11.79 +2024-07-27 11:50:26,820 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.32 vs. limit=11.795 +2024-07-27 11:50:28,073 INFO [train.py:1114] (3/4) Epoch 1, batch 8600, loss[loss=0.3436, simple_loss=0.3912, pruned_loss=0.148, over 4819.00 frames. ], tot_loss[loss=0.3709, simple_loss=0.4123, pruned_loss=0.1648, over 938780.39 frames. ], batch size: 15, lr: 3.65e-02, grad_scale: 64.0 +2024-07-27 11:50:31,111 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.520e+01 6.717e+01 7.221e+01 8.025e+01 1.285e+02, threshold=1.444e+02, percent-clipped=0.0 +2024-07-27 11:50:32,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=11466.666666666666, ans=0.07 +2024-07-27 11:50:51,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=11506.666666666666, ans=0.008368115942028985 +2024-07-27 11:51:00,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=11520.0, ans=0.025 +2024-07-27 11:51:01,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=11520.0, ans=0.01866666666666667 +2024-07-27 11:51:02,923 INFO [train.py:1114] (3/4) Epoch 1, batch 8650, loss[loss=0.3851, simple_loss=0.4357, pruned_loss=0.1673, over 4910.00 frames. ], tot_loss[loss=0.371, simple_loss=0.4124, pruned_loss=0.1648, over 939793.12 frames. ], batch size: 15, lr: 3.64e-02, grad_scale: 64.0 +2024-07-27 11:51:04,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=11533.333333333334, ans=0.49633333333333335 +2024-07-27 11:51:14,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=11546.666666666666, ans=0.125 +2024-07-27 11:51:16,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=11546.666666666666, ans=0.008359420289855073 +2024-07-27 11:51:24,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=11560.0, ans=0.49540000000000006 +2024-07-27 11:51:35,632 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=2.76 vs. limit=11.84 +2024-07-27 11:51:44,972 INFO [train.py:1114] (3/4) Epoch 1, batch 8700, loss[loss=0.3274, simple_loss=0.3837, pruned_loss=0.1356, over 4759.00 frames. ], tot_loss[loss=0.3718, simple_loss=0.4131, pruned_loss=0.1652, over 937427.11 frames. ], batch size: 13, lr: 3.64e-02, grad_scale: 64.0 +2024-07-27 11:51:46,839 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.502e+01 6.768e+01 7.395e+01 8.572e+01 1.594e+02, threshold=1.479e+02, percent-clipped=2.0 +2024-07-27 11:51:54,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=11613.333333333334, ans=0.125 +2024-07-27 11:52:08,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=11640.0, ans=0.125 +2024-07-27 11:52:20,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=11666.666666666666, ans=0.09899494936611666 +2024-07-27 11:52:21,399 INFO [train.py:1114] (3/4) Epoch 1, batch 8750, loss[loss=0.4226, simple_loss=0.4588, pruned_loss=0.1932, over 4682.00 frames. ], tot_loss[loss=0.371, simple_loss=0.4125, pruned_loss=0.1648, over 935509.30 frames. ], batch size: 15, lr: 3.63e-02, grad_scale: 64.0 +2024-07-27 11:52:24,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=11666.666666666666, ans=0.125 +2024-07-27 11:52:27,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=11680.0, ans=0.3752 +2024-07-27 11:52:32,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=11680.0, ans=0.125 +2024-07-27 11:52:32,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=11680.0, ans=0.025 +2024-07-27 11:52:33,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=11680.0, ans=0.4912000000000001 +2024-07-27 11:52:44,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=11706.666666666666, ans=0.00832463768115942 +2024-07-27 11:52:47,623 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.81 vs. limit=7.926666666666666 +2024-07-27 11:52:48,295 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.05 vs. limit=16.29 +2024-07-27 11:52:55,475 INFO [train.py:1114] (3/4) Epoch 1, batch 8800, loss[loss=0.4004, simple_loss=0.4525, pruned_loss=0.1742, over 4933.00 frames. ], tot_loss[loss=0.3704, simple_loss=0.4125, pruned_loss=0.1642, over 936571.21 frames. ], batch size: 14, lr: 3.62e-02, grad_scale: 64.0 +2024-07-27 11:52:57,638 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.900e+01 7.237e+01 7.954e+01 8.853e+01 1.433e+02, threshold=1.591e+02, percent-clipped=0.0 +2024-07-27 11:53:12,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=11760.0, ans=0.00831304347826087 +2024-07-27 11:53:19,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=11773.333333333334, ans=0.125 +2024-07-27 11:53:24,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=11786.666666666666, ans=0.0 +2024-07-27 11:53:29,052 INFO [train.py:1114] (3/4) Epoch 1, batch 8850, loss[loss=0.3879, simple_loss=0.428, pruned_loss=0.1739, over 4496.00 frames. ], tot_loss[loss=0.3699, simple_loss=0.4114, pruned_loss=0.1642, over 931246.59 frames. ], batch size: 21, lr: 3.62e-02, grad_scale: 128.0 +2024-07-27 11:53:31,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=11800.0, ans=0.0175 +2024-07-27 11:53:43,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=11826.666666666666, ans=0.125 +2024-07-27 11:53:44,256 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.63 vs. limit=11.934999999999999 +2024-07-27 11:53:50,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=11840.0, ans=0.008295652173913044 +2024-07-27 11:53:53,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=11840.0, ans=0.48560000000000003 +2024-07-27 11:53:56,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=11853.333333333334, ans=0.125 +2024-07-27 11:54:00,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=2.69 vs. limit=11.945 +2024-07-27 11:54:02,399 INFO [train.py:1114] (3/4) Epoch 1, batch 8900, loss[loss=0.3453, simple_loss=0.3878, pruned_loss=0.1514, over 4949.00 frames. ], tot_loss[loss=0.3692, simple_loss=0.4109, pruned_loss=0.1638, over 929426.15 frames. ], batch size: 12, lr: 3.61e-02, grad_scale: 128.0 +2024-07-27 11:54:04,466 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.705e+01 6.769e+01 7.408e+01 8.026e+01 1.011e+02, threshold=1.482e+02, percent-clipped=0.0 +2024-07-27 11:54:05,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=11866.666666666666, ans=0.125 +2024-07-27 11:54:07,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=11866.666666666666, ans=0.125 +2024-07-27 11:54:07,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=11866.666666666666, ans=0.13133333333333333 +2024-07-27 11:54:13,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=11880.0, ans=0.125 +2024-07-27 11:54:28,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=11893.333333333334, ans=0.125 +2024-07-27 11:54:31,571 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=11.96 +2024-07-27 11:54:33,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=11893.333333333334, ans=0.025 +2024-07-27 11:54:35,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=11906.666666666666, ans=0.01705555555555556 +2024-07-27 11:54:36,242 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.55 vs. limit=11.965 +2024-07-27 11:54:37,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=11906.666666666666, ans=0.01705555555555556 +2024-07-27 11:54:43,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=11920.0, ans=0.1808 +2024-07-27 11:54:48,652 INFO [train.py:1114] (3/4) Epoch 1, batch 8950, loss[loss=0.3576, simple_loss=0.4217, pruned_loss=0.1467, over 4580.00 frames. ], tot_loss[loss=0.368, simple_loss=0.4102, pruned_loss=0.1629, over 930613.97 frames. ], batch size: 21, lr: 3.61e-02, grad_scale: 128.0 +2024-07-27 11:55:17,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=11973.333333333334, ans=0.4809333333333333 +2024-07-27 11:55:31,945 INFO [train.py:1114] (3/4) Epoch 1, batch 9000, loss[loss=0.3086, simple_loss=0.3608, pruned_loss=0.1282, over 4636.00 frames. ], tot_loss[loss=0.3655, simple_loss=0.4079, pruned_loss=0.1616, over 933274.74 frames. ], batch size: 12, lr: 3.60e-02, grad_scale: 64.0 +2024-07-27 11:55:31,946 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 11:55:37,720 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([0.7820, 1.1519, 1.5836, 1.5963, 1.3290, 1.6671, 1.5718, 1.1302], + device='cuda:3') +2024-07-27 11:55:45,438 INFO [train.py:1146] (3/4) Epoch 1, validation: loss=0.2917, simple_loss=0.3779, pruned_loss=0.1028, over 944034.00 frames. +2024-07-27 11:55:45,438 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 11:55:45,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=12000.0, ans=0.0 +2024-07-27 11:55:45,749 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.80 vs. limit=12.0 +2024-07-27 11:55:48,956 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.414e+01 6.571e+01 7.230e+01 7.907e+01 1.156e+02, threshold=1.446e+02, percent-clipped=0.0 +2024-07-27 11:55:58,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=12013.333333333334, ans=0.125 +2024-07-27 11:56:02,712 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.74 vs. limit=8.006666666666666 +2024-07-27 11:56:07,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=12026.666666666666, ans=0.01655555555555556 +2024-07-27 11:56:09,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=12040.0, ans=0.8704 +2024-07-27 11:56:17,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=12053.333333333334, ans=0.0 +2024-07-27 11:56:21,461 INFO [train.py:1114] (3/4) Epoch 1, batch 9050, loss[loss=0.3058, simple_loss=0.3443, pruned_loss=0.1337, over 4483.00 frames. ], tot_loss[loss=0.3642, simple_loss=0.4068, pruned_loss=0.1608, over 934000.70 frames. ], batch size: 10, lr: 3.59e-02, grad_scale: 64.0 +2024-07-27 11:56:26,524 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.09 vs. limit=11.033333333333333 +2024-07-27 11:56:29,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12080.0, ans=0.17919999999999997 +2024-07-27 11:56:35,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12093.333333333334, ans=0.17906666666666665 +2024-07-27 11:56:35,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=12093.333333333334, ans=0.125 +2024-07-27 11:56:45,385 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.62 vs. limit=11.053333333333333 +2024-07-27 11:56:47,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=12120.0, ans=0.4758 +2024-07-27 11:56:47,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=12120.0, ans=0.1788 +2024-07-27 11:56:53,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=12120.0, ans=12.045 +2024-07-27 11:56:54,150 INFO [train.py:1114] (3/4) Epoch 1, batch 9100, loss[loss=0.3633, simple_loss=0.411, pruned_loss=0.1578, over 4940.00 frames. ], tot_loss[loss=0.3624, simple_loss=0.4058, pruned_loss=0.1596, over 937352.07 frames. ], batch size: 14, lr: 3.59e-02, grad_scale: 64.0 +2024-07-27 11:57:01,197 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.077e+01 6.999e+01 7.542e+01 8.527e+01 1.258e+02, threshold=1.508e+02, percent-clipped=0.0 +2024-07-27 11:57:04,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=12146.666666666666, ans=0.125 +2024-07-27 11:57:05,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12146.666666666666, ans=0.17853333333333332 +2024-07-27 11:57:07,510 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.37 vs. limit=12.055 +2024-07-27 11:57:09,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=12146.666666666666, ans=0.09899494936611666 +2024-07-27 11:57:11,405 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.17 vs. limit=16.619999999999997 +2024-07-27 11:57:22,713 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.94 vs. limit=16.630000000000003 +2024-07-27 11:57:31,650 INFO [train.py:1114] (3/4) Epoch 1, batch 9150, loss[loss=0.4325, simple_loss=0.4801, pruned_loss=0.1925, over 4809.00 frames. ], tot_loss[loss=0.3633, simple_loss=0.4069, pruned_loss=0.1599, over 936620.39 frames. ], batch size: 14, lr: 3.58e-02, grad_scale: 64.0 +2024-07-27 11:57:35,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=12200.0, ans=0.015833333333333338 +2024-07-27 11:57:35,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=12200.0, ans=0.125 +2024-07-27 11:57:39,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=12213.333333333334, ans=0.125 +2024-07-27 11:57:55,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=12240.0, ans=0.01566666666666667 +2024-07-27 11:57:56,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=12240.0, ans=0.01566666666666667 +2024-07-27 11:57:56,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=12240.0, ans=0.008208695652173914 +2024-07-27 11:58:04,616 INFO [train.py:1114] (3/4) Epoch 1, batch 9200, loss[loss=0.3435, simple_loss=0.3953, pruned_loss=0.1459, over 4852.00 frames. ], tot_loss[loss=0.3625, simple_loss=0.4061, pruned_loss=0.1595, over 938511.05 frames. ], batch size: 12, lr: 3.58e-02, grad_scale: 64.0 +2024-07-27 11:58:04,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=12266.666666666666, ans=0.125 +2024-07-27 11:58:07,213 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.834e+01 6.731e+01 7.265e+01 8.123e+01 1.608e+02, threshold=1.453e+02, percent-clipped=1.0 +2024-07-27 11:58:14,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=12280.0, ans=0.0082 +2024-07-27 11:58:15,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=12280.0, ans=0.125 +2024-07-27 11:58:22,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=12293.333333333334, ans=0.015444444444444441 +2024-07-27 11:58:27,142 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=4.63 vs. limit=12.115 +2024-07-27 11:58:28,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=12306.666666666666, ans=0.125 +2024-07-27 11:58:31,943 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 11:58:32,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=12306.666666666666, ans=0.125 +2024-07-27 11:58:35,064 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.57 vs. limit=16.740000000000002 +2024-07-27 11:58:40,675 INFO [train.py:1114] (3/4) Epoch 1, batch 9250, loss[loss=0.3418, simple_loss=0.4037, pruned_loss=0.1399, over 4641.00 frames. ], tot_loss[loss=0.3624, simple_loss=0.4064, pruned_loss=0.1592, over 939096.76 frames. ], batch size: 13, lr: 3.57e-02, grad_scale: 64.0 +2024-07-27 11:58:47,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=12346.666666666666, ans=0.008185507246376811 +2024-07-27 11:58:50,081 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.54 vs. limit=11.173333333333332 +2024-07-27 11:58:53,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=12360.0, ans=0.125 +2024-07-27 11:58:53,560 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=12.135 +2024-07-27 11:59:06,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=12373.333333333334, ans=0.07 +2024-07-27 11:59:14,660 INFO [train.py:1114] (3/4) Epoch 1, batch 9300, loss[loss=0.3712, simple_loss=0.4058, pruned_loss=0.1683, over 4774.00 frames. ], tot_loss[loss=0.3618, simple_loss=0.4056, pruned_loss=0.159, over 938955.62 frames. ], batch size: 12, lr: 3.57e-02, grad_scale: 64.0 +2024-07-27 11:59:17,481 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.284e+01 6.919e+01 7.820e+01 8.678e+01 1.247e+02, threshold=1.564e+02, percent-clipped=0.0 +2024-07-27 11:59:22,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=12413.333333333334, ans=0.125 +2024-07-27 11:59:24,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=12413.333333333334, ans=0.125 +2024-07-27 11:59:32,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12426.666666666666, ans=0.17573333333333335 +2024-07-27 11:59:33,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=12426.666666666666, ans=0.125 +2024-07-27 11:59:34,673 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.33 vs. limit=16.83 +2024-07-27 11:59:38,836 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=5.99 vs. limit=6.4879999999999995 +2024-07-27 11:59:40,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=12440.0, ans=0.125 +2024-07-27 11:59:48,174 INFO [train.py:1114] (3/4) Epoch 1, batch 9350, loss[loss=0.3555, simple_loss=0.3946, pruned_loss=0.1582, over 4795.00 frames. ], tot_loss[loss=0.364, simple_loss=0.4073, pruned_loss=0.1603, over 935486.83 frames. ], batch size: 11, lr: 3.56e-02, grad_scale: 64.0 +2024-07-27 11:59:50,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=12466.666666666666, ans=0.025 +2024-07-27 11:59:53,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=12466.666666666666, ans=0.17533333333333334 +2024-07-27 11:59:57,436 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.56 vs. limit=12.18 +2024-07-27 12:00:01,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=12493.333333333334, ans=0.125 +2024-07-27 12:00:06,356 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.14 vs. limit=16.869999999999997 +2024-07-27 12:00:14,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=12520.0, ans=0.125 +2024-07-27 12:00:14,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=12520.0, ans=0.38780000000000003 +2024-07-27 12:00:21,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=12533.333333333334, ans=0.008144927536231885 +2024-07-27 12:00:22,083 INFO [train.py:1114] (3/4) Epoch 1, batch 9400, loss[loss=0.4285, simple_loss=0.4635, pruned_loss=0.1967, over 4697.00 frames. ], tot_loss[loss=0.3653, simple_loss=0.4083, pruned_loss=0.1611, over 933398.43 frames. ], batch size: 13, lr: 3.55e-02, grad_scale: 64.0 +2024-07-27 12:00:24,566 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.848e+01 6.575e+01 7.346e+01 8.658e+01 2.018e+02, threshold=1.469e+02, percent-clipped=2.0 +2024-07-27 12:00:26,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=12533.333333333334, ans=0.125 +2024-07-27 12:00:26,816 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.21 vs. limit=16.9 +2024-07-27 12:00:28,873 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.60 vs. limit=12.205 +2024-07-27 12:00:31,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=12546.666666666666, ans=0.014388888888888896 +2024-07-27 12:00:40,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12560.0, ans=0.1744 +2024-07-27 12:00:45,261 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.23 vs. limit=16.93 +2024-07-27 12:00:46,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=12573.333333333334, ans=0.014277777777777771 +2024-07-27 12:00:47,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.39 vs. limit=16.93 +2024-07-27 12:00:56,850 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.24 vs. limit=9.034666666666666 +2024-07-27 12:00:57,888 INFO [train.py:1114] (3/4) Epoch 1, batch 9450, loss[loss=0.3034, simple_loss=0.3496, pruned_loss=0.1286, over 4811.00 frames. ], tot_loss[loss=0.3651, simple_loss=0.4086, pruned_loss=0.1608, over 932839.85 frames. ], batch size: 11, lr: 3.55e-02, grad_scale: 64.0 +2024-07-27 12:01:09,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=12613.333333333334, ans=0.45853333333333335 +2024-07-27 12:01:15,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=12626.666666666666, ans=0.125 +2024-07-27 12:01:16,380 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.88 vs. limit=4.894 +2024-07-27 12:01:33,365 INFO [train.py:1114] (3/4) Epoch 1, batch 9500, loss[loss=0.3598, simple_loss=0.3946, pruned_loss=0.1625, over 4702.00 frames. ], tot_loss[loss=0.3674, simple_loss=0.4104, pruned_loss=0.1622, over 934728.28 frames. ], batch size: 12, lr: 3.54e-02, grad_scale: 64.0 +2024-07-27 12:01:35,890 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.605e+01 6.746e+01 7.341e+01 8.122e+01 1.206e+02, threshold=1.468e+02, percent-clipped=0.0 +2024-07-27 12:01:44,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=12680.0, ans=0.00811304347826087 +2024-07-27 12:02:02,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=12693.333333333334, ans=0.008110144927536232 +2024-07-27 12:02:31,232 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=23.38 vs. limit=12.27 +2024-07-27 12:02:40,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=12720.0, ans=0.125 +2024-07-27 12:02:42,666 INFO [train.py:1114] (3/4) Epoch 1, batch 9550, loss[loss=0.3102, simple_loss=0.3637, pruned_loss=0.1283, over 4779.00 frames. ], tot_loss[loss=0.3636, simple_loss=0.4075, pruned_loss=0.1599, over 932174.13 frames. ], batch size: 12, lr: 3.54e-02, grad_scale: 64.0 +2024-07-27 12:02:47,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=12733.333333333334, ans=0.125 +2024-07-27 12:02:49,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=12733.333333333334, ans=0.17266666666666666 +2024-07-27 12:03:03,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12760.0, ans=0.1724 +2024-07-27 12:03:07,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12760.0, ans=0.1724 +2024-07-27 12:03:09,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=12760.0, ans=0.125 +2024-07-27 12:03:19,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=12786.666666666666, ans=0.013388888888888895 +2024-07-27 12:03:24,184 INFO [train.py:1114] (3/4) Epoch 1, batch 9600, loss[loss=0.469, simple_loss=0.4742, pruned_loss=0.232, over 3191.00 frames. ], tot_loss[loss=0.3627, simple_loss=0.4072, pruned_loss=0.1591, over 930977.32 frames. ], batch size: 35, lr: 3.53e-02, grad_scale: 64.0 +2024-07-27 12:03:30,716 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.723e+01 6.771e+01 7.099e+01 8.382e+01 1.458e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 12:03:31,195 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.06 vs. limit=12.3 +2024-07-27 12:03:35,273 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.46 vs. limit=12.305 +2024-07-27 12:03:37,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=12813.333333333334, ans=0.025 +2024-07-27 12:03:45,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=12826.666666666666, ans=0.125 +2024-07-27 12:03:47,270 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.93 vs. limit=12.309999999999999 +2024-07-27 12:03:52,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=12840.0, ans=0.008078260869565217 +2024-07-27 12:03:55,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=12853.333333333334, ans=0.125 +2024-07-27 12:04:00,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=12853.333333333334, ans=0.013111111111111108 +2024-07-27 12:04:01,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=12853.333333333334, ans=0.17146666666666666 +2024-07-27 12:04:01,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=12853.333333333334, ans=0.125 +2024-07-27 12:04:11,708 INFO [train.py:1114] (3/4) Epoch 1, batch 9650, loss[loss=0.4125, simple_loss=0.4508, pruned_loss=0.1871, over 4837.00 frames. ], tot_loss[loss=0.3642, simple_loss=0.4076, pruned_loss=0.1603, over 926593.07 frames. ], batch size: 16, lr: 3.53e-02, grad_scale: 64.0 +2024-07-27 12:04:14,231 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.70 vs. limit=11.433333333333334 +2024-07-27 12:04:32,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=12893.333333333334, ans=0.008066666666666666 +2024-07-27 12:04:46,744 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.67 vs. limit=17.189999999999998 +2024-07-27 12:04:49,098 INFO [train.py:1114] (3/4) Epoch 1, batch 9700, loss[loss=0.3846, simple_loss=0.4028, pruned_loss=0.1833, over 4358.00 frames. ], tot_loss[loss=0.3641, simple_loss=0.4077, pruned_loss=0.1603, over 924196.66 frames. ], batch size: 26, lr: 3.52e-02, grad_scale: 64.0 +2024-07-27 12:04:51,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=12933.333333333334, ans=0.125 +2024-07-27 12:04:52,735 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.794e+01 6.661e+01 7.332e+01 8.273e+01 1.352e+02, threshold=1.466e+02, percent-clipped=0.0 +2024-07-27 12:04:57,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=12946.666666666666, ans=0.17053333333333334 +2024-07-27 12:05:19,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=12973.333333333334, ans=0.125 +2024-07-27 12:05:36,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=12986.666666666666, ans=0.125 +2024-07-27 12:05:52,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=12986.666666666666, ans=0.008046376811594202 +2024-07-27 12:05:52,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=12986.666666666666, ans=12.370000000000001 +2024-07-27 12:05:53,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=12986.666666666666, ans=0.025 +2024-07-27 12:05:57,817 INFO [train.py:1114] (3/4) Epoch 1, batch 9750, loss[loss=0.3964, simple_loss=0.4307, pruned_loss=0.1811, over 4693.00 frames. ], tot_loss[loss=0.3634, simple_loss=0.4071, pruned_loss=0.1598, over 925240.84 frames. ], batch size: 15, lr: 3.51e-02, grad_scale: 64.0 +2024-07-27 12:06:00,100 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.48 vs. limit=12.375 +2024-07-27 12:06:00,807 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.04 vs. limit=4.95 +2024-07-27 12:06:02,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=13000.0, ans=0.445 +2024-07-27 12:06:03,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=13000.0, ans=0.125 +2024-07-27 12:06:19,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=13013.333333333334, ans=0.3952 +2024-07-27 12:06:33,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=13026.666666666666, ans=0.00803768115942029 +2024-07-27 12:06:45,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=13053.333333333334, ans=0.008031884057971015 +2024-07-27 12:06:48,519 INFO [train.py:1114] (3/4) Epoch 1, batch 9800, loss[loss=0.3687, simple_loss=0.4055, pruned_loss=0.1659, over 4711.00 frames. ], tot_loss[loss=0.3631, simple_loss=0.4063, pruned_loss=0.16, over 924634.36 frames. ], batch size: 12, lr: 3.51e-02, grad_scale: 64.0 +2024-07-27 12:06:50,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=13066.666666666666, ans=0.8806666666666666 +2024-07-27 12:06:51,240 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.642e+01 6.855e+01 7.493e+01 8.291e+01 1.245e+02, threshold=1.499e+02, percent-clipped=0.0 +2024-07-27 12:06:59,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=13066.666666666666, ans=0.125 +2024-07-27 12:07:15,405 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.44 vs. limit=12.415 +2024-07-27 12:07:19,788 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=4.28 vs. limit=12.415 +2024-07-27 12:07:24,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=13120.0, ans=0.012000000000000004 +2024-07-27 12:07:26,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=13120.0, ans=0.125 +2024-07-27 12:07:31,389 INFO [train.py:1114] (3/4) Epoch 1, batch 9850, loss[loss=0.4542, simple_loss=0.4811, pruned_loss=0.2136, over 4904.00 frames. ], tot_loss[loss=0.3639, simple_loss=0.4075, pruned_loss=0.1602, over 926813.84 frames. ], batch size: 15, lr: 3.50e-02, grad_scale: 64.0 +2024-07-27 12:07:32,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=13133.333333333334, ans=0.04949747468305833 +2024-07-27 12:07:32,885 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.67 vs. limit=12.425 +2024-07-27 12:07:33,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=13133.333333333334, ans=0.011944444444444438 +2024-07-27 12:07:36,459 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.68 vs. limit=11.566666666666666 +2024-07-27 12:07:43,197 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.53 vs. limit=12.43 +2024-07-27 12:07:45,140 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.72 vs. limit=17.369999999999997 +2024-07-27 12:07:48,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=13160.0, ans=0.3974 +2024-07-27 12:07:49,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten.whitening_limit, batch_count=13160.0, ans=17.369999999999997 +2024-07-27 12:08:03,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13186.666666666666, ans=0.16813333333333333 +2024-07-27 12:08:06,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=13186.666666666666, ans=0.025 +2024-07-27 12:08:13,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.18 vs. limit=12.445 +2024-07-27 12:08:15,397 INFO [train.py:1114] (3/4) Epoch 1, batch 9900, loss[loss=0.3618, simple_loss=0.4068, pruned_loss=0.1584, over 4845.00 frames. ], tot_loss[loss=0.364, simple_loss=0.4072, pruned_loss=0.1604, over 926341.83 frames. ], batch size: 16, lr: 3.50e-02, grad_scale: 64.0 +2024-07-27 12:08:15,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=13200.0, ans=0.125 +2024-07-27 12:08:17,903 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.707e+01 6.801e+01 7.469e+01 8.450e+01 1.233e+02, threshold=1.494e+02, percent-clipped=0.0 +2024-07-27 12:08:34,861 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.46 vs. limit=17.42 +2024-07-27 12:08:42,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=13240.0, ans=0.125 +2024-07-27 12:08:52,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=13253.333333333334, ans=0.125 +2024-07-27 12:08:54,180 INFO [train.py:1114] (3/4) Epoch 1, batch 9950, loss[loss=0.2469, simple_loss=0.3149, pruned_loss=0.08948, over 4528.00 frames. ], tot_loss[loss=0.3641, simple_loss=0.4069, pruned_loss=0.1607, over 928767.99 frames. ], batch size: 10, lr: 3.49e-02, grad_scale: 64.0 +2024-07-27 12:09:05,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=13280.0, ans=0.07 +2024-07-27 12:09:06,351 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.91 vs. limit=12.48 +2024-07-27 12:09:23,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=13320.0, ans=0.125 +2024-07-27 12:09:24,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13320.0, ans=0.16679999999999998 +2024-07-27 12:09:25,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=13320.0, ans=0.16679999999999998 +2024-07-27 12:09:27,213 INFO [train.py:1114] (3/4) Epoch 1, batch 10000, loss[loss=0.3418, simple_loss=0.3959, pruned_loss=0.1438, over 4632.00 frames. ], tot_loss[loss=0.3672, simple_loss=0.4106, pruned_loss=0.1619, over 926727.69 frames. ], batch size: 16, lr: 3.49e-02, grad_scale: 64.0 +2024-07-27 12:09:29,738 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.713e+01 6.862e+01 7.247e+01 8.214e+01 1.240e+02, threshold=1.449e+02, percent-clipped=0.0 +2024-07-27 12:09:33,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=13346.666666666666, ans=0.125 +2024-07-27 12:09:45,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13360.0, ans=0.1664 +2024-07-27 12:09:48,410 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.96 vs. limit=8.343333333333334 +2024-07-27 12:10:01,315 INFO [train.py:1114] (3/4) Epoch 1, batch 10050, loss[loss=0.4978, simple_loss=0.4884, pruned_loss=0.2536, over 3413.00 frames. ], tot_loss[loss=0.3722, simple_loss=0.4144, pruned_loss=0.165, over 915013.81 frames. ], batch size: 36, lr: 3.48e-02, grad_scale: 64.0 +2024-07-27 12:10:01,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=13400.0, ans=0.125 +2024-07-27 12:10:01,663 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=12.46 vs. limit=9.36 +2024-07-27 12:10:05,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=13400.0, ans=0.09899494936611666 +2024-07-27 12:10:21,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=13413.333333333334, ans=0.125 +2024-07-27 12:10:30,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=13426.666666666666, ans=0.07 +2024-07-27 12:10:30,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=13426.666666666666, ans=0.125 +2024-07-27 12:10:42,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=13453.333333333334, ans=0.125 +2024-07-27 12:10:42,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=13453.333333333334, ans=0.125 +2024-07-27 12:10:46,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=13466.666666666666, ans=0.010555555555555561 +2024-07-27 12:10:46,729 INFO [train.py:1114] (3/4) Epoch 1, batch 10100, loss[loss=0.4728, simple_loss=0.4674, pruned_loss=0.2391, over 3660.00 frames. ], tot_loss[loss=0.3874, simple_loss=0.4229, pruned_loss=0.1759, over 860484.72 frames. ], batch size: 36, lr: 3.47e-02, grad_scale: 64.0 +2024-07-27 12:10:49,394 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.041e+01 6.990e+01 7.547e+01 8.268e+01 1.617e+02, threshold=1.509e+02, percent-clipped=1.0 +2024-07-27 12:10:55,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=13480.0, ans=0.125 +2024-07-27 12:10:58,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=13480.0, ans=0.010500000000000002 +2024-07-27 12:11:00,529 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.27 vs. limit=12.555 +2024-07-27 12:11:05,513 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.28 vs. limit=12.559999999999999 +2024-07-27 12:11:09,455 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.72 vs. limit=12.559999999999999 +2024-07-27 12:11:23,841 INFO [train.py:1114] (3/4) Epoch 1, batch 10150, loss[loss=0.4788, simple_loss=0.4838, pruned_loss=0.2369, over 3607.00 frames. ], tot_loss[loss=0.3956, simple_loss=0.4273, pruned_loss=0.1819, over 820876.23 frames. ], batch size: 35, lr: 3.47e-02, grad_scale: 64.0 +2024-07-27 12:11:26,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=13533.333333333334, ans=0.07 +2024-07-27 12:11:36,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=13546.666666666666, ans=0.8854666666666666 +2024-07-27 12:11:37,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13546.666666666666, ans=0.16453333333333334 +2024-07-27 12:11:40,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=13546.666666666666, ans=0.4258666666666667 +2024-07-27 12:11:42,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=13560.0, ans=0.1644 +2024-07-27 12:11:52,951 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=8.14 vs. limit=8.393333333333334 +2024-07-27 12:11:58,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=13586.666666666666, ans=0.2 +2024-07-27 12:12:02,429 INFO [train.py:1114] (3/4) Epoch 1, batch 10200, loss[loss=0.517, simple_loss=0.5027, pruned_loss=0.2657, over 3411.00 frames. ], tot_loss[loss=0.4052, simple_loss=0.4326, pruned_loss=0.1889, over 787792.06 frames. ], batch size: 35, lr: 3.46e-02, grad_scale: 64.0 +2024-07-27 12:12:04,946 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.695e+01 6.612e+01 7.159e+01 7.876e+01 1.155e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 12:12:54,724 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.77 vs. limit=17.722 +2024-07-27 12:13:02,329 INFO [train.py:1114] (3/4) Epoch 2, batch 0, loss[loss=0.3695, simple_loss=0.4295, pruned_loss=0.1547, over 4849.00 frames. ], tot_loss[loss=0.3695, simple_loss=0.4295, pruned_loss=0.1547, over 4849.00 frames. ], batch size: 12, lr: 3.39e-02, grad_scale: 64.0 +2024-07-27 12:13:02,329 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 12:13:07,310 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.2264, 1.9137, 2.5424, 2.7712, 2.9014, 1.7111, 2.6827, 1.4450], + device='cuda:3') +2024-07-27 12:13:12,273 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([6.0461, 5.7985, 5.8615, 5.9067], device='cuda:3') +2024-07-27 12:13:13,917 INFO [train.py:1146] (3/4) Epoch 2, validation: loss=0.3005, simple_loss=0.3865, pruned_loss=0.1073, over 944034.00 frames. +2024-07-27 12:13:13,918 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 12:13:22,324 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.55 vs. limit=17.732 +2024-07-27 12:13:24,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=13642.666666666666, ans=0.8864266666666666 +2024-07-27 12:13:32,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=13656.0, ans=0.125 +2024-07-27 12:13:49,241 INFO [train.py:1114] (3/4) Epoch 2, batch 50, loss[loss=0.2741, simple_loss=0.3348, pruned_loss=0.1067, over 4616.00 frames. ], tot_loss[loss=0.3742, simple_loss=0.4173, pruned_loss=0.1656, over 206320.42 frames. ], batch size: 11, lr: 3.39e-02, grad_scale: 64.0 +2024-07-27 12:13:52,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=13696.0, ans=0.16304 +2024-07-27 12:13:54,991 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.43 vs. limit=5.054399999999999 +2024-07-27 12:14:14,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=13736.0, ans=0.125 +2024-07-27 12:14:16,676 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.870e+01 6.791e+01 7.517e+01 8.543e+01 1.783e+02, threshold=1.503e+02, percent-clipped=1.0 +2024-07-27 12:14:23,919 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:14:24,148 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.79 vs. limit=17.811999999999998 +2024-07-27 12:14:29,091 INFO [train.py:1114] (3/4) Epoch 2, batch 100, loss[loss=0.3735, simple_loss=0.415, pruned_loss=0.166, over 4635.00 frames. ], tot_loss[loss=0.3727, simple_loss=0.4169, pruned_loss=0.1642, over 365270.30 frames. ], batch size: 12, lr: 3.38e-02, grad_scale: 64.0 +2024-07-27 12:14:34,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=13762.666666666666, ans=0.009322222222222225 +2024-07-27 12:14:35,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=13776.0, ans=0.125 +2024-07-27 12:14:37,451 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.54 vs. limit=17.832 +2024-07-27 12:14:40,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=13776.0, ans=0.125 +2024-07-27 12:14:40,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=13776.0, ans=0.16224 +2024-07-27 12:14:50,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=13802.666666666666, ans=0.125 +2024-07-27 12:14:54,285 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.46 vs. limit=12.676 +2024-07-27 12:14:55,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=13802.666666666666, ans=0.009155555555555556 +2024-07-27 12:15:02,397 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.24 vs. limit=11.908000000000001 +2024-07-27 12:15:04,096 INFO [train.py:1114] (3/4) Epoch 2, batch 150, loss[loss=0.3085, simple_loss=0.3626, pruned_loss=0.1272, over 4601.00 frames. ], tot_loss[loss=0.3628, simple_loss=0.4091, pruned_loss=0.1582, over 493956.55 frames. ], batch size: 11, lr: 3.38e-02, grad_scale: 64.0 +2024-07-27 12:15:05,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=13829.333333333334, ans=0.125 +2024-07-27 12:15:08,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=13829.333333333334, ans=0.007863188405797101 +2024-07-27 12:15:10,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=13842.666666666666, ans=0.125 +2024-07-27 12:15:10,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=12.690999999999999 +2024-07-27 12:15:18,920 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.14 vs. limit=9.5424 +2024-07-27 12:15:26,447 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.728e+01 6.633e+01 7.371e+01 8.455e+01 1.546e+02, threshold=1.474e+02, percent-clipped=1.0 +2024-07-27 12:15:34,439 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.98 vs. limit=5.0824 +2024-07-27 12:15:37,213 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.29 vs. limit=11.941333333333333 +2024-07-27 12:15:38,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=13896.0, ans=0.16104 +2024-07-27 12:15:38,909 INFO [train.py:1114] (3/4) Epoch 2, batch 200, loss[loss=0.3332, simple_loss=0.3863, pruned_loss=0.14, over 4543.00 frames. ], tot_loss[loss=0.3585, simple_loss=0.4054, pruned_loss=0.1557, over 593496.86 frames. ], batch size: 21, lr: 3.37e-02, grad_scale: 64.0 +2024-07-27 12:15:48,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=13909.333333333334, ans=0.125 +2024-07-27 12:15:52,040 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.97 vs. limit=17.942 +2024-07-27 12:16:13,511 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.15 vs. limit=8.487333333333334 +2024-07-27 12:16:14,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=13949.333333333334, ans=0.04949747468305833 +2024-07-27 12:16:15,234 INFO [train.py:1114] (3/4) Epoch 2, batch 250, loss[loss=0.3876, simple_loss=0.4429, pruned_loss=0.1662, over 4649.00 frames. ], tot_loss[loss=0.3562, simple_loss=0.4037, pruned_loss=0.1543, over 670190.26 frames. ], batch size: 16, lr: 3.37e-02, grad_scale: 64.0 +2024-07-27 12:16:34,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=13989.333333333334, ans=0.41037333333333337 +2024-07-27 12:16:35,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=13989.333333333334, ans=0.125 +2024-07-27 12:16:37,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=13989.333333333334, ans=0.125 +2024-07-27 12:16:41,807 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.968e+01 6.499e+01 7.152e+01 7.948e+01 1.053e+02, threshold=1.430e+02, percent-clipped=0.0 +2024-07-27 12:16:48,255 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.21 vs. limit=9.601066666666666 +2024-07-27 12:16:56,553 INFO [train.py:1114] (3/4) Epoch 2, batch 300, loss[loss=0.3385, simple_loss=0.395, pruned_loss=0.141, over 4799.00 frames. ], tot_loss[loss=0.3532, simple_loss=0.4009, pruned_loss=0.1528, over 729993.45 frames. ], batch size: 15, lr: 3.36e-02, grad_scale: 64.0 +2024-07-27 12:16:57,364 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:17:01,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=14029.333333333334, ans=0.125 +2024-07-27 12:17:02,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=14042.666666666666, ans=0.125 +2024-07-27 12:17:05,541 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.02 vs. limit=12.021333333333333 +2024-07-27 12:17:07,028 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.23 vs. limit=12.766 +2024-07-27 12:17:10,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=14056.0, ans=0.0 +2024-07-27 12:17:25,779 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.74 vs. limit=9.633066666666666 +2024-07-27 12:17:25,873 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.64 vs. limit=12.780999999999999 +2024-07-27 12:17:31,949 INFO [train.py:1114] (3/4) Epoch 2, batch 350, loss[loss=0.365, simple_loss=0.3917, pruned_loss=0.1691, over 4933.00 frames. ], tot_loss[loss=0.353, simple_loss=0.401, pruned_loss=0.1526, over 776171.90 frames. ], batch size: 12, lr: 3.36e-02, grad_scale: 64.0 +2024-07-27 12:17:36,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=14096.0, ans=0.125 +2024-07-27 12:17:36,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=14096.0, ans=0.125 +2024-07-27 12:17:41,271 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.86 vs. limit=12.791 +2024-07-27 12:17:43,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.19 vs. limit=12.054666666666666 +2024-07-27 12:17:51,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=14122.666666666666, ans=0.125 +2024-07-27 12:17:57,626 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.955e+01 6.715e+01 7.349e+01 8.005e+01 1.409e+02, threshold=1.470e+02, percent-clipped=0.0 +2024-07-27 12:18:10,146 INFO [train.py:1114] (3/4) Epoch 2, batch 400, loss[loss=0.3839, simple_loss=0.4266, pruned_loss=0.1706, over 4691.00 frames. ], tot_loss[loss=0.353, simple_loss=0.4009, pruned_loss=0.1525, over 813798.52 frames. ], batch size: 13, lr: 3.35e-02, grad_scale: 64.0 +2024-07-27 12:18:13,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=14162.666666666666, ans=0.09899494936611666 +2024-07-27 12:18:15,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=14162.666666666666, ans=0.125 +2024-07-27 12:18:26,518 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:18:32,205 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.90 vs. limit=18.152 +2024-07-27 12:18:45,070 INFO [train.py:1114] (3/4) Epoch 2, batch 450, loss[loss=0.3604, simple_loss=0.4222, pruned_loss=0.1493, over 4637.00 frames. ], tot_loss[loss=0.3528, simple_loss=0.4008, pruned_loss=0.1524, over 839469.57 frames. ], batch size: 13, lr: 3.35e-02, grad_scale: 64.0 +2024-07-27 12:18:51,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=14242.666666666666, ans=0.007322222222222223 +2024-07-27 12:18:57,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=14242.666666666666, ans=0.007322222222222223 +2024-07-27 12:19:02,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=14256.0, ans=0.007266666666666664 +2024-07-27 12:19:06,253 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.65 vs. limit=18.201999999999998 +2024-07-27 12:19:07,276 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.533e+01 6.535e+01 7.099e+01 8.060e+01 1.224e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 12:19:13,302 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.78 vs. limit=9.713066666666666 +2024-07-27 12:19:19,767 INFO [train.py:1114] (3/4) Epoch 2, batch 500, loss[loss=0.367, simple_loss=0.4261, pruned_loss=0.154, over 4666.00 frames. ], tot_loss[loss=0.3515, simple_loss=0.3995, pruned_loss=0.1518, over 861698.87 frames. ], batch size: 15, lr: 3.34e-02, grad_scale: 64.0 +2024-07-27 12:19:22,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=14296.0, ans=0.125 +2024-07-27 12:19:29,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=14309.333333333334, ans=0.41464 +2024-07-27 12:19:41,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=14322.666666666666, ans=0.035 +2024-07-27 12:19:51,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=14336.0, ans=0.006933333333333333 +2024-07-27 12:20:00,827 INFO [train.py:1114] (3/4) Epoch 2, batch 550, loss[loss=0.416, simple_loss=0.4571, pruned_loss=0.1874, over 4594.00 frames. ], tot_loss[loss=0.3514, simple_loss=0.3997, pruned_loss=0.1515, over 877687.87 frames. ], batch size: 17, lr: 3.34e-02, grad_scale: 64.0 +2024-07-27 12:20:01,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14362.666666666666, ans=0.15637333333333334 +2024-07-27 12:20:05,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=14362.666666666666, ans=0.006822222222222223 +2024-07-27 12:20:06,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=14362.666666666666, ans=0.3973066666666668 +2024-07-27 12:20:13,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=14376.0, ans=0.125 +2024-07-27 12:20:15,870 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:20:23,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=14389.333333333334, ans=0.41584 +2024-07-27 12:20:27,924 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.749e+01 6.667e+01 7.400e+01 8.349e+01 1.588e+02, threshold=1.480e+02, percent-clipped=3.0 +2024-07-27 12:20:28,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=14402.666666666666, ans=0.15597333333333335 +2024-07-27 12:20:33,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=14416.0, ans=0.41624000000000005 +2024-07-27 12:20:34,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=14416.0, ans=0.3954400000000001 +2024-07-27 12:20:34,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14416.0, ans=0.15584 +2024-07-27 12:20:36,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=14416.0, ans=0.025 +2024-07-27 12:20:40,336 INFO [train.py:1114] (3/4) Epoch 2, batch 600, loss[loss=0.4155, simple_loss=0.4387, pruned_loss=0.1962, over 4630.00 frames. ], tot_loss[loss=0.3503, simple_loss=0.3988, pruned_loss=0.1509, over 892083.76 frames. ], batch size: 16, lr: 3.33e-02, grad_scale: 64.0 +2024-07-27 12:20:43,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=14429.333333333334, ans=0.3949733333333334 +2024-07-27 12:20:48,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=14442.666666666666, ans=0.04949747468305833 +2024-07-27 12:21:02,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=14469.333333333334, ans=0.07 +2024-07-27 12:21:04,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=14469.333333333334, ans=0.8946933333333333 +2024-07-27 12:21:14,597 INFO [train.py:1114] (3/4) Epoch 2, batch 650, loss[loss=0.3619, simple_loss=0.3992, pruned_loss=0.1623, over 4765.00 frames. ], tot_loss[loss=0.3509, simple_loss=0.3991, pruned_loss=0.1514, over 903837.19 frames. ], batch size: 13, lr: 3.33e-02, grad_scale: 64.0 +2024-07-27 12:21:19,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=14496.0, ans=0.007718260869565218 +2024-07-27 12:21:23,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=14509.333333333334, ans=0.3921733333333334 +2024-07-27 12:21:34,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.25 vs. limit=12.946 +2024-07-27 12:21:36,639 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.116e+01 6.647e+01 7.177e+01 7.899e+01 1.481e+02, threshold=1.435e+02, percent-clipped=1.0 +2024-07-27 12:21:38,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=14536.0, ans=0.125 +2024-07-27 12:21:40,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=14536.0, ans=0.39124000000000003 +2024-07-27 12:21:43,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=14549.333333333334, ans=0.007706666666666667 +2024-07-27 12:21:47,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=14549.333333333334, ans=0.125 +2024-07-27 12:21:49,039 INFO [train.py:1114] (3/4) Epoch 2, batch 700, loss[loss=0.3265, simple_loss=0.3695, pruned_loss=0.1417, over 4640.00 frames. ], tot_loss[loss=0.3496, simple_loss=0.398, pruned_loss=0.1506, over 911559.79 frames. ], batch size: 12, lr: 3.32e-02, grad_scale: 64.0 +2024-07-27 12:21:50,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=14562.666666666666, ans=0.007703768115942029 +2024-07-27 12:21:55,418 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=12.961 +2024-07-27 12:22:03,596 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.03 vs. limit=18.432000000000002 +2024-07-27 12:22:04,312 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.45 vs. limit=8.647333333333334 +2024-07-27 12:22:06,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=14589.333333333334, ans=0.0 +2024-07-27 12:22:07,712 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.90 vs. limit=9.835733333333334 +2024-07-27 12:22:17,356 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.71 vs. limit=12.301333333333332 +2024-07-27 12:22:25,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=14616.0, ans=0.125 +2024-07-27 12:22:29,301 INFO [train.py:1114] (3/4) Epoch 2, batch 750, loss[loss=0.3393, simple_loss=0.3786, pruned_loss=0.15, over 4704.00 frames. ], tot_loss[loss=0.3486, simple_loss=0.3972, pruned_loss=0.1499, over 918460.59 frames. ], batch size: 13, lr: 3.31e-02, grad_scale: 64.0 +2024-07-27 12:22:31,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=14629.333333333334, ans=0.125 +2024-07-27 12:22:53,343 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.645e+01 6.839e+01 7.355e+01 8.149e+01 1.440e+02, threshold=1.471e+02, percent-clipped=1.0 +2024-07-27 12:22:53,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=14669.333333333334, ans=0.125 +2024-07-27 12:22:55,737 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.46 vs. limit=13.001000000000001 +2024-07-27 12:22:57,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=14669.333333333334, ans=18.502000000000002 +2024-07-27 12:22:58,392 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:23:04,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=14682.666666666666, ans=0.025 +2024-07-27 12:23:05,826 INFO [train.py:1114] (3/4) Epoch 2, batch 800, loss[loss=0.3197, simple_loss=0.3732, pruned_loss=0.1331, over 4856.00 frames. ], tot_loss[loss=0.3477, simple_loss=0.3966, pruned_loss=0.1494, over 923624.91 frames. ], batch size: 12, lr: 3.31e-02, grad_scale: 128.0 +2024-07-27 12:23:09,539 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.08 vs. limit=13.011 +2024-07-27 12:23:12,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14709.333333333334, ans=0.15290666666666666 +2024-07-27 12:23:18,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=14722.666666666666, ans=0.0076689855072463765 +2024-07-27 12:23:40,057 INFO [train.py:1114] (3/4) Epoch 2, batch 850, loss[loss=0.3712, simple_loss=0.4166, pruned_loss=0.1629, over 4649.00 frames. ], tot_loss[loss=0.3488, simple_loss=0.3974, pruned_loss=0.1501, over 927743.46 frames. ], batch size: 14, lr: 3.30e-02, grad_scale: 64.0 +2024-07-27 12:23:40,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=14762.666666666666, ans=0.3833066666666668 +2024-07-27 12:23:40,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=14762.666666666666, ans=0.007660289855072464 +2024-07-27 12:23:42,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=14762.666666666666, ans=0.005155555555555559 +2024-07-27 12:23:44,347 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=1.616e-01 +2024-07-27 12:23:51,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=14776.0, ans=0.125 +2024-07-27 12:24:02,971 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.532e+01 6.555e+01 7.139e+01 7.731e+01 1.156e+02, threshold=1.428e+02, percent-clipped=0.0 +2024-07-27 12:24:03,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=14802.666666666666, ans=0.125 +2024-07-27 12:24:08,447 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.60 vs. limit=13.056000000000001 +2024-07-27 12:24:08,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=14816.0, ans=0.3814400000000001 +2024-07-27 12:24:11,971 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=71.07 vs. limit=13.056000000000001 +2024-07-27 12:24:15,203 INFO [train.py:1114] (3/4) Epoch 2, batch 900, loss[loss=0.3136, simple_loss=0.3732, pruned_loss=0.127, over 4857.00 frames. ], tot_loss[loss=0.3488, simple_loss=0.3976, pruned_loss=0.15, over 928551.94 frames. ], batch size: 12, lr: 3.30e-02, grad_scale: 64.0 +2024-07-27 12:24:19,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=14829.333333333334, ans=0.125 +2024-07-27 12:24:20,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=14829.333333333334, ans=0.15170666666666668 +2024-07-27 12:24:38,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten.whitening_limit, batch_count=14869.333333333334, ans=13.076 +2024-07-27 12:24:42,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=14882.666666666666, ans=0.125 +2024-07-27 12:24:50,049 INFO [train.py:1114] (3/4) Epoch 2, batch 950, loss[loss=0.3103, simple_loss=0.3537, pruned_loss=0.1335, over 4775.00 frames. ], tot_loss[loss=0.3474, simple_loss=0.3963, pruned_loss=0.1492, over 930626.96 frames. ], batch size: 12, lr: 3.29e-02, grad_scale: 64.0 +2024-07-27 12:24:59,725 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=5.74 vs. limit=13.091000000000001 +2024-07-27 12:25:07,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=14922.666666666666, ans=0.007625507246376811 +2024-07-27 12:25:13,549 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.467e+01 6.513e+01 7.102e+01 8.226e+01 2.101e+02, threshold=1.420e+02, percent-clipped=1.0 +2024-07-27 12:25:19,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=14949.333333333334, ans=0.004377777777777779 +2024-07-27 12:25:25,580 INFO [train.py:1114] (3/4) Epoch 2, batch 1000, loss[loss=0.2981, simple_loss=0.3485, pruned_loss=0.1238, over 4971.00 frames. ], tot_loss[loss=0.3498, simple_loss=0.3982, pruned_loss=0.1507, over 930109.53 frames. ], batch size: 13, lr: 3.29e-02, grad_scale: 64.0 +2024-07-27 12:25:27,650 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.37 vs. limit=13.111 +2024-07-27 12:25:28,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=14962.666666666666, ans=0.125 +2024-07-27 12:25:43,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=14989.333333333334, ans=0.37537333333333334 +2024-07-27 12:25:45,778 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:25:47,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=15002.666666666666, ans=0.125 +2024-07-27 12:25:48,870 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.58 vs. limit=12.501333333333333 +2024-07-27 12:25:56,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=15016.0, ans=0.125 +2024-07-27 12:25:59,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=15029.333333333334, ans=0.3739733333333334 +2024-07-27 12:26:00,220 INFO [train.py:1114] (3/4) Epoch 2, batch 1050, loss[loss=0.3184, simple_loss=0.3795, pruned_loss=0.1287, over 4868.00 frames. ], tot_loss[loss=0.3469, simple_loss=0.3958, pruned_loss=0.149, over 932117.38 frames. ], batch size: 14, lr: 3.28e-02, grad_scale: 64.0 +2024-07-27 12:26:06,903 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.85 vs. limit=13.141 +2024-07-27 12:26:11,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=15042.666666666666, ans=0.125 +2024-07-27 12:26:14,693 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:26:17,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=15056.0, ans=10.0 +2024-07-27 12:26:18,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=15056.0, ans=0.007596521739130435 +2024-07-27 12:26:22,963 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.050e+01 6.480e+01 6.937e+01 7.724e+01 1.151e+02, threshold=1.387e+02, percent-clipped=0.0 +2024-07-27 12:26:27,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_ff2.min_abs, batch_count=15082.666666666666, ans=0.1 +2024-07-27 12:26:27,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=15082.666666666666, ans=0.125 +2024-07-27 12:26:35,188 INFO [train.py:1114] (3/4) Epoch 2, batch 1100, loss[loss=0.3286, simple_loss=0.3759, pruned_loss=0.1407, over 4896.00 frames. ], tot_loss[loss=0.345, simple_loss=0.3941, pruned_loss=0.148, over 934277.85 frames. ], batch size: 13, lr: 3.28e-02, grad_scale: 64.0 +2024-07-27 12:26:36,311 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.99 vs. limit=12.548 +2024-07-27 12:26:42,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15109.333333333334, ans=0.14890666666666666 +2024-07-27 12:26:54,615 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.19 vs. limit=18.842 +2024-07-27 12:27:04,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=15149.333333333334, ans=0.14850666666666668 +2024-07-27 12:27:08,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.whiten.whitening_limit, batch_count=15149.333333333334, ans=10.059733333333334 +2024-07-27 12:27:09,694 INFO [train.py:1114] (3/4) Epoch 2, batch 1150, loss[loss=0.3196, simple_loss=0.3741, pruned_loss=0.1325, over 4899.00 frames. ], tot_loss[loss=0.3452, simple_loss=0.3943, pruned_loss=0.148, over 933828.80 frames. ], batch size: 13, lr: 3.27e-02, grad_scale: 64.0 +2024-07-27 12:27:14,695 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=1.413e-02 +2024-07-27 12:27:15,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=15162.666666666666, ans=0.007573333333333333 +2024-07-27 12:27:20,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=15176.0, ans=0.125 +2024-07-27 12:27:28,062 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.28 vs. limit=13.196 +2024-07-27 12:27:35,088 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.945e+01 6.616e+01 7.321e+01 8.237e+01 1.316e+02, threshold=1.464e+02, percent-clipped=0.0 +2024-07-27 12:27:48,587 INFO [train.py:1114] (3/4) Epoch 2, batch 1200, loss[loss=0.3767, simple_loss=0.4238, pruned_loss=0.1648, over 4877.00 frames. ], tot_loss[loss=0.3459, simple_loss=0.3947, pruned_loss=0.1486, over 932904.34 frames. ], batch size: 14, lr: 3.27e-02, grad_scale: 64.0 +2024-07-27 12:28:07,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=15256.0, ans=0.125 +2024-07-27 12:28:13,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=15269.333333333334, ans=0.3655733333333334 +2024-07-27 12:28:25,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=15282.666666666666, ans=0.125 +2024-07-27 12:28:26,770 INFO [train.py:1114] (3/4) Epoch 2, batch 1250, loss[loss=0.3785, simple_loss=0.3909, pruned_loss=0.1831, over 4801.00 frames. ], tot_loss[loss=0.3458, simple_loss=0.3955, pruned_loss=0.148, over 937029.28 frames. ], batch size: 15, lr: 3.26e-02, grad_scale: 64.0 +2024-07-27 12:28:38,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=15309.333333333334, ans=0.025 +2024-07-27 12:28:41,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=15322.666666666666, ans=0.025 +2024-07-27 12:28:45,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=15322.666666666666, ans=0.125 +2024-07-27 12:28:49,355 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.661e+01 6.573e+01 7.173e+01 8.198e+01 1.375e+02, threshold=1.435e+02, percent-clipped=0.0 +2024-07-27 12:28:50,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=15336.0, ans=0.125 +2024-07-27 12:28:55,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=15349.333333333334, ans=0.04949747468305833 +2024-07-27 12:29:01,003 INFO [train.py:1114] (3/4) Epoch 2, batch 1300, loss[loss=0.3814, simple_loss=0.4265, pruned_loss=0.1681, over 4705.00 frames. ], tot_loss[loss=0.3434, simple_loss=0.393, pruned_loss=0.1469, over 938700.17 frames. ], batch size: 19, lr: 3.26e-02, grad_scale: 64.0 +2024-07-27 12:29:05,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=15362.666666666666, ans=0.025 +2024-07-27 12:29:18,555 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.72 vs. limit=13.271 +2024-07-27 12:29:19,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=15389.333333333334, ans=0.0 +2024-07-27 12:29:20,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=15389.333333333334, ans=0.125 +2024-07-27 12:29:35,585 INFO [train.py:1114] (3/4) Epoch 2, batch 1350, loss[loss=0.3226, simple_loss=0.3884, pruned_loss=0.1284, over 4766.00 frames. ], tot_loss[loss=0.3431, simple_loss=0.3928, pruned_loss=0.1467, over 940659.94 frames. ], batch size: 13, lr: 3.25e-02, grad_scale: 64.0 +2024-07-27 12:29:46,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=15442.666666666666, ans=0.125 +2024-07-27 12:29:57,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=15469.333333333334, ans=0.002211111111111108 +2024-07-27 12:29:58,812 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.201e+01 6.395e+01 7.183e+01 7.821e+01 1.561e+02, threshold=1.437e+02, percent-clipped=1.0 +2024-07-27 12:30:00,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=15469.333333333334, ans=0.007506666666666667 +2024-07-27 12:30:10,899 INFO [train.py:1114] (3/4) Epoch 2, batch 1400, loss[loss=0.3512, simple_loss=0.3845, pruned_loss=0.1589, over 4698.00 frames. ], tot_loss[loss=0.3429, simple_loss=0.3928, pruned_loss=0.1465, over 942503.40 frames. ], batch size: 11, lr: 3.25e-02, grad_scale: 64.0 +2024-07-27 12:30:17,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=15509.333333333334, ans=0.125 +2024-07-27 12:30:25,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=15522.666666666666, ans=0.001988888888888894 +2024-07-27 12:30:31,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=15536.0, ans=0.025 +2024-07-27 12:30:33,842 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.51 vs. limit=13.326 +2024-07-27 12:30:37,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=15536.0, ans=0.125 +2024-07-27 12:30:42,261 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.60 vs. limit=10.219733333333334 +2024-07-27 12:30:46,196 INFO [train.py:1114] (3/4) Epoch 2, batch 1450, loss[loss=0.3505, simple_loss=0.4111, pruned_loss=0.145, over 4675.00 frames. ], tot_loss[loss=0.3448, simple_loss=0.3946, pruned_loss=0.1475, over 942270.47 frames. ], batch size: 15, lr: 3.24e-02, grad_scale: 64.0 +2024-07-27 12:30:49,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=15562.666666666666, ans=0.125 +2024-07-27 12:30:50,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=15562.666666666666, ans=0.001822222222222225 +2024-07-27 12:30:55,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=15576.0, ans=0.125 +2024-07-27 12:30:55,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=15576.0, ans=0.0074834782608695655 +2024-07-27 12:30:55,679 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.51 vs. limit=19.182000000000002 +2024-07-27 12:31:08,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=15602.666666666666, ans=0.001655555555555556 +2024-07-27 12:31:08,926 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.669e+01 6.624e+01 7.283e+01 7.925e+01 1.878e+02, threshold=1.457e+02, percent-clipped=2.0 +2024-07-27 12:31:12,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=15602.666666666666, ans=19.201999999999998 +2024-07-27 12:31:18,714 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.73 vs. limit=19.212 +2024-07-27 12:31:20,099 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.12 vs. limit=13.356 +2024-07-27 12:31:21,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=15629.333333333334, ans=0.14370666666666668 +2024-07-27 12:31:23,973 INFO [train.py:1114] (3/4) Epoch 2, batch 1500, loss[loss=0.2797, simple_loss=0.3553, pruned_loss=0.1021, over 4808.00 frames. ], tot_loss[loss=0.3444, simple_loss=0.3945, pruned_loss=0.1472, over 942201.76 frames. ], batch size: 14, lr: 3.24e-02, grad_scale: 64.0 +2024-07-27 12:31:24,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=15629.333333333334, ans=0.125 +2024-07-27 12:31:30,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.34 vs. limit=10.251733333333334 +2024-07-27 12:31:30,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=15642.666666666666, ans=0.9064266666666666 +2024-07-27 12:31:30,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=15642.666666666666, ans=0.125 +2024-07-27 12:31:30,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=15642.666666666666, ans=0.0014888888888888938 +2024-07-27 12:31:30,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=15642.666666666666, ans=0.125 +2024-07-27 12:31:31,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=15642.666666666666, ans=0.35250666666666675 +2024-07-27 12:31:32,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=15642.666666666666, ans=0.35250666666666675 +2024-07-27 12:31:43,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=15656.0, ans=0.125 +2024-07-27 12:31:46,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=15669.333333333334, ans=0.125 +2024-07-27 12:31:46,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=15669.333333333334, ans=0.125 +2024-07-27 12:31:58,520 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.72 vs. limit=13.386 +2024-07-27 12:31:58,848 INFO [train.py:1114] (3/4) Epoch 2, batch 1550, loss[loss=0.3969, simple_loss=0.4329, pruned_loss=0.1805, over 4891.00 frames. ], tot_loss[loss=0.3453, simple_loss=0.3951, pruned_loss=0.1477, over 938013.27 frames. ], batch size: 15, lr: 3.23e-02, grad_scale: 64.0 +2024-07-27 12:32:04,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15696.0, ans=0.14304 +2024-07-27 12:32:06,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=15709.333333333334, ans=0.0012111111111111142 +2024-07-27 12:32:08,378 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.40 vs. limit=13.391 +2024-07-27 12:32:22,802 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.789e+01 6.571e+01 7.346e+01 8.400e+01 2.303e+02, threshold=1.469e+02, percent-clipped=1.0 +2024-07-27 12:32:23,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=15736.0, ans=0.125 +2024-07-27 12:32:29,621 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.75 vs. limit=13.405999999999999 +2024-07-27 12:32:34,498 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.87 vs. limit=13.411 +2024-07-27 12:32:34,711 INFO [train.py:1114] (3/4) Epoch 2, batch 1600, loss[loss=0.3122, simple_loss=0.38, pruned_loss=0.1221, over 4870.00 frames. ], tot_loss[loss=0.3425, simple_loss=0.3929, pruned_loss=0.1461, over 936967.45 frames. ], batch size: 14, lr: 3.23e-02, grad_scale: 64.0 +2024-07-27 12:32:37,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15762.666666666666, ans=0.14237333333333335 +2024-07-27 12:32:41,998 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.51 vs. limit=13.416 +2024-07-27 12:32:45,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.56 vs. limit=13.416 +2024-07-27 12:32:49,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=15789.333333333334, ans=0.025 +2024-07-27 12:32:55,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15789.333333333334, ans=0.14210666666666666 +2024-07-27 12:33:06,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=15816.0, ans=0.125 +2024-07-27 12:33:07,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=15816.0, ans=0.007431304347826087 +2024-07-27 12:33:13,396 INFO [train.py:1114] (3/4) Epoch 2, batch 1650, loss[loss=0.3675, simple_loss=0.4148, pruned_loss=0.1601, over 4672.00 frames. ], tot_loss[loss=0.3433, simple_loss=0.393, pruned_loss=0.1468, over 936547.71 frames. ], batch size: 14, lr: 3.22e-02, grad_scale: 64.0 +2024-07-27 12:33:15,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=15829.333333333334, ans=0.0007111111111111137 +2024-07-27 12:33:26,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=15842.666666666666, ans=0.14157333333333336 +2024-07-27 12:33:36,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15856.0, ans=0.14144 +2024-07-27 12:33:42,229 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.638e+01 6.520e+01 7.164e+01 7.874e+01 1.221e+02, threshold=1.433e+02, percent-clipped=0.0 +2024-07-27 12:33:44,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=15869.333333333334, ans=0.025 +2024-07-27 12:33:58,773 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.09 vs. limit=19.422 +2024-07-27 12:33:59,015 INFO [train.py:1114] (3/4) Epoch 2, batch 1700, loss[loss=0.3445, simple_loss=0.3885, pruned_loss=0.1502, over 4691.00 frames. ], tot_loss[loss=0.3433, simple_loss=0.3932, pruned_loss=0.1467, over 938655.77 frames. ], batch size: 11, lr: 3.22e-02, grad_scale: 64.0 +2024-07-27 12:34:02,276 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.61 vs. limit=13.461 +2024-07-27 12:34:03,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=15896.0, ans=0.125 +2024-07-27 12:34:04,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=15896.0, ans=0.07 +2024-07-27 12:34:14,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=15922.666666666666, ans=0.14077333333333333 +2024-07-27 12:34:16,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=15922.666666666666, ans=0.00032222222222222374 +2024-07-27 12:34:29,364 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.88 vs. limit=19.462 +2024-07-27 12:34:32,863 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.12 vs. limit=13.481 +2024-07-27 12:34:33,914 INFO [train.py:1114] (3/4) Epoch 2, batch 1750, loss[loss=0.3362, simple_loss=0.3552, pruned_loss=0.1586, over 4812.00 frames. ], tot_loss[loss=0.3433, simple_loss=0.393, pruned_loss=0.1468, over 940098.30 frames. ], batch size: 11, lr: 3.22e-02, grad_scale: 64.0 +2024-07-27 12:34:37,711 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.47 vs. limit=13.486 +2024-07-27 12:34:38,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=15962.666666666666, ans=0.0073994202898550725 +2024-07-27 12:34:39,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=15962.666666666666, ans=0.14037333333333335 +2024-07-27 12:34:51,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=15989.333333333334, ans=0.125 +2024-07-27 12:34:59,089 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.489e+01 6.792e+01 7.364e+01 8.042e+01 2.018e+02, threshold=1.473e+02, percent-clipped=1.0 +2024-07-27 12:35:02,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=16002.666666666666, ans=0.125 +2024-07-27 12:35:04,255 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.31 vs. limit=13.506 +2024-07-27 12:35:04,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=16016.0, ans=0.125 +2024-07-27 12:35:10,710 INFO [train.py:1114] (3/4) Epoch 2, batch 1800, loss[loss=0.3593, simple_loss=0.3999, pruned_loss=0.1594, over 4638.00 frames. ], tot_loss[loss=0.3432, simple_loss=0.3927, pruned_loss=0.1469, over 940996.58 frames. ], batch size: 13, lr: 3.21e-02, grad_scale: 64.0 +2024-07-27 12:35:22,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=16042.666666666666, ans=0.125 +2024-07-27 12:35:22,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=16042.666666666666, ans=0.13957333333333335 +2024-07-27 12:35:39,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=16082.666666666666, ans=0.125 +2024-07-27 12:35:45,376 INFO [train.py:1114] (3/4) Epoch 2, batch 1850, loss[loss=0.3167, simple_loss=0.3808, pruned_loss=0.1264, over 4804.00 frames. ], tot_loss[loss=0.3435, simple_loss=0.3931, pruned_loss=0.147, over 940505.20 frames. ], batch size: 14, lr: 3.21e-02, grad_scale: 64.0 +2024-07-27 12:35:52,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=16109.333333333334, ans=0.125 +2024-07-27 12:35:52,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=16109.333333333334, ans=0.125 +2024-07-27 12:35:55,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=16109.333333333334, ans=0.125 +2024-07-27 12:36:17,282 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.330e+01 6.413e+01 7.038e+01 7.663e+01 1.052e+02, threshold=1.408e+02, percent-clipped=0.0 +2024-07-27 12:36:17,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16136.0, ans=0.13863999999999999 +2024-07-27 12:36:28,686 INFO [train.py:1114] (3/4) Epoch 2, batch 1900, loss[loss=0.3249, simple_loss=0.3864, pruned_loss=0.1317, over 4666.00 frames. ], tot_loss[loss=0.3419, simple_loss=0.3923, pruned_loss=0.1457, over 942050.24 frames. ], batch size: 14, lr: 3.20e-02, grad_scale: 64.0 +2024-07-27 12:36:34,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=16162.666666666666, ans=0.125 +2024-07-27 12:36:41,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=16176.0, ans=0.125 +2024-07-27 12:37:05,420 INFO [train.py:1114] (3/4) Epoch 2, batch 1950, loss[loss=0.3048, simple_loss=0.3527, pruned_loss=0.1285, over 4899.00 frames. ], tot_loss[loss=0.3419, simple_loss=0.3928, pruned_loss=0.1455, over 943884.10 frames. ], batch size: 13, lr: 3.20e-02, grad_scale: 64.0 +2024-07-27 12:37:22,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=16256.0, ans=0.0 +2024-07-27 12:37:24,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=16256.0, ans=0.125 +2024-07-27 12:37:28,261 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.625e+01 6.630e+01 7.143e+01 8.194e+01 1.176e+02, threshold=1.429e+02, percent-clipped=0.0 +2024-07-27 12:37:43,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=16282.666666666666, ans=0.08717333333333332 +2024-07-27 12:38:00,620 INFO [train.py:1114] (3/4) Epoch 2, batch 2000, loss[loss=0.3072, simple_loss=0.3583, pruned_loss=0.1281, over 4805.00 frames. ], tot_loss[loss=0.3438, simple_loss=0.3939, pruned_loss=0.1468, over 941223.92 frames. ], batch size: 11, lr: 3.19e-02, grad_scale: 64.0 +2024-07-27 12:38:05,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=16296.0, ans=19.722 +2024-07-27 12:38:06,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16309.333333333334, ans=0.13690666666666668 +2024-07-27 12:38:22,376 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.40 vs. limit=19.752000000000002 +2024-07-27 12:38:23,134 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.66 vs. limit=13.626000000000001 +2024-07-27 12:38:35,085 INFO [train.py:1114] (3/4) Epoch 2, batch 2050, loss[loss=0.3288, simple_loss=0.3654, pruned_loss=0.1461, over 4618.00 frames. ], tot_loss[loss=0.3412, simple_loss=0.3916, pruned_loss=0.1454, over 939411.54 frames. ], batch size: 11, lr: 3.19e-02, grad_scale: 64.0 +2024-07-27 12:38:49,990 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.24 vs. limit=19.791999999999998 +2024-07-27 12:38:53,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=16389.333333333332, ans=0.9138933333333333 +2024-07-27 12:38:56,528 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.36 vs. limit=13.651 +2024-07-27 12:38:58,885 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.186e+01 6.444e+01 7.138e+01 8.017e+01 1.723e+02, threshold=1.428e+02, percent-clipped=1.0 +2024-07-27 12:39:14,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=16416.0, ans=0.125 +2024-07-27 12:39:19,463 INFO [train.py:1114] (3/4) Epoch 2, batch 2100, loss[loss=0.351, simple_loss=0.4038, pruned_loss=0.1491, over 4767.00 frames. ], tot_loss[loss=0.3381, simple_loss=0.3897, pruned_loss=0.1433, over 941100.65 frames. ], batch size: 13, lr: 3.18e-02, grad_scale: 64.0 +2024-07-27 12:39:27,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=16429.333333333332, ans=0.125 +2024-07-27 12:39:41,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=16456.0, ans=0.3240400000000001 +2024-07-27 12:39:45,833 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.11 vs. limit=13.671 +2024-07-27 12:39:51,365 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.58 vs. limit=19.851999999999997 +2024-07-27 12:39:58,023 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.13 vs. limit=13.241333333333335 +2024-07-27 12:39:58,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=16482.666666666668, ans=0.32310666666666665 +2024-07-27 12:40:06,950 INFO [train.py:1114] (3/4) Epoch 2, batch 2150, loss[loss=0.3144, simple_loss=0.3779, pruned_loss=0.1254, over 4896.00 frames. ], tot_loss[loss=0.3353, simple_loss=0.3879, pruned_loss=0.1413, over 944055.66 frames. ], batch size: 13, lr: 3.18e-02, grad_scale: 64.0 +2024-07-27 12:40:07,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=16496.0, ans=0.025 +2024-07-27 12:40:07,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=16496.0, ans=0.125 +2024-07-27 12:40:15,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=16496.0, ans=0.125 +2024-07-27 12:40:16,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=16509.333333333332, ans=0.0 +2024-07-27 12:40:18,637 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.98 vs. limit=13.690999999999999 +2024-07-27 12:40:25,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=16522.666666666668, ans=0.125 +2024-07-27 12:40:31,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=16536.0, ans=0.125 +2024-07-27 12:40:33,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=16536.0, ans=0.3212400000000001 +2024-07-27 12:40:33,589 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.279e+01 6.440e+01 7.313e+01 8.077e+01 1.347e+02, threshold=1.463e+02, percent-clipped=0.0 +2024-07-27 12:40:44,971 INFO [train.py:1114] (3/4) Epoch 2, batch 2200, loss[loss=0.3832, simple_loss=0.4432, pruned_loss=0.1617, over 4808.00 frames. ], tot_loss[loss=0.3371, simple_loss=0.389, pruned_loss=0.1426, over 943567.94 frames. ], batch size: 14, lr: 3.17e-02, grad_scale: 64.0 +2024-07-27 12:40:48,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=16562.666666666668, ans=0.125 +2024-07-27 12:40:49,720 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.07 vs. limit=19.922 +2024-07-27 12:40:49,796 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.03 vs. limit=19.922 +2024-07-27 12:40:54,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=16576.0, ans=0.125 +2024-07-27 12:41:08,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=16589.333333333332, ans=0.13410666666666668 +2024-07-27 12:41:08,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=16589.333333333332, ans=0.0 +2024-07-27 12:41:12,916 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.30 vs. limit=13.726 +2024-07-27 12:41:22,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=16616.0, ans=0.125 +2024-07-27 12:41:26,216 INFO [train.py:1114] (3/4) Epoch 2, batch 2250, loss[loss=0.3187, simple_loss=0.3813, pruned_loss=0.1281, over 4701.00 frames. ], tot_loss[loss=0.336, simple_loss=0.3879, pruned_loss=0.1421, over 941802.03 frames. ], batch size: 13, lr: 3.17e-02, grad_scale: 64.0 +2024-07-27 12:41:41,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=16656.0, ans=0.125 +2024-07-27 12:41:46,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=16669.333333333332, ans=0.3165733333333335 +2024-07-27 12:41:48,433 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.64 vs. limit=20.002 +2024-07-27 12:41:48,633 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.022e+01 6.411e+01 7.130e+01 8.285e+01 1.332e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 12:41:49,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=16669.333333333332, ans=0.025 +2024-07-27 12:42:00,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=16696.0, ans=0.0 +2024-07-27 12:42:00,435 INFO [train.py:1114] (3/4) Epoch 2, batch 2300, loss[loss=0.3083, simple_loss=0.3498, pruned_loss=0.1334, over 4941.00 frames. ], tot_loss[loss=0.3358, simple_loss=0.3875, pruned_loss=0.1421, over 939017.07 frames. ], batch size: 12, lr: 3.16e-02, grad_scale: 64.0 +2024-07-27 12:42:29,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16736.0, ans=0.13264 +2024-07-27 12:42:36,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=16749.333333333332, ans=0.125 +2024-07-27 12:42:45,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=16749.333333333332, ans=0.31377333333333346 +2024-07-27 12:42:46,240 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=16749.333333333332, ans=0.09899494936611666 +2024-07-27 12:42:47,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=16762.666666666668, ans=0.125 +2024-07-27 12:42:47,927 INFO [train.py:1114] (3/4) Epoch 2, batch 2350, loss[loss=0.302, simple_loss=0.3723, pruned_loss=0.1158, over 4642.00 frames. ], tot_loss[loss=0.3352, simple_loss=0.3873, pruned_loss=0.1415, over 941414.59 frames. ], batch size: 13, lr: 3.16e-02, grad_scale: 64.0 +2024-07-27 12:43:06,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=16789.333333333332, ans=0.3123733333333335 +2024-07-27 12:43:13,076 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.842e+01 6.484e+01 7.035e+01 7.953e+01 1.463e+02, threshold=1.407e+02, percent-clipped=1.0 +2024-07-27 12:43:24,675 INFO [train.py:1114] (3/4) Epoch 2, batch 2400, loss[loss=0.3224, simple_loss=0.3838, pruned_loss=0.1305, over 4636.00 frames. ], tot_loss[loss=0.3352, simple_loss=0.3874, pruned_loss=0.1415, over 941040.82 frames. ], batch size: 12, lr: 3.15e-02, grad_scale: 64.0 +2024-07-27 12:43:36,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=16842.666666666668, ans=0.125 +2024-07-27 12:44:06,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=16869.333333333332, ans=0.9186933333333334 +2024-07-27 12:44:14,567 INFO [train.py:1114] (3/4) Epoch 2, batch 2450, loss[loss=0.3073, simple_loss=0.3797, pruned_loss=0.1174, over 4698.00 frames. ], tot_loss[loss=0.3362, simple_loss=0.3883, pruned_loss=0.142, over 937259.50 frames. ], batch size: 13, lr: 3.15e-02, grad_scale: 64.0 +2024-07-27 12:44:28,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=16909.333333333332, ans=0.13090666666666667 +2024-07-27 12:44:39,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=16936.0, ans=0.13064 +2024-07-27 12:44:43,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=16936.0, ans=0.125 +2024-07-27 12:44:43,826 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.068e+01 6.416e+01 7.061e+01 7.801e+01 1.253e+02, threshold=1.412e+02, percent-clipped=0.0 +2024-07-27 12:44:50,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=16949.333333333332, ans=0.125 +2024-07-27 12:44:56,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=16949.333333333332, ans=0.125 +2024-07-27 12:45:00,022 INFO [train.py:1114] (3/4) Epoch 2, batch 2500, loss[loss=0.3342, simple_loss=0.3917, pruned_loss=0.1384, over 4812.00 frames. ], tot_loss[loss=0.3358, simple_loss=0.388, pruned_loss=0.1418, over 939407.77 frames. ], batch size: 14, lr: 3.14e-02, grad_scale: 64.0 +2024-07-27 12:45:11,266 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.38 vs. limit=13.488 +2024-07-27 12:45:13,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=16989.333333333332, ans=0.0 +2024-07-27 12:45:23,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=17002.666666666668, ans=0.125 +2024-07-27 12:45:26,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17002.666666666668, ans=0.12997333333333333 +2024-07-27 12:45:27,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=17002.666666666668, ans=0.125 +2024-07-27 12:45:31,140 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.99 vs. limit=13.876000000000001 +2024-07-27 12:45:36,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=17016.0, ans=0.025 +2024-07-27 12:45:38,911 INFO [train.py:1114] (3/4) Epoch 2, batch 2550, loss[loss=0.326, simple_loss=0.3784, pruned_loss=0.1368, over 4793.00 frames. ], tot_loss[loss=0.3363, simple_loss=0.3889, pruned_loss=0.1419, over 938640.71 frames. ], batch size: 11, lr: 3.14e-02, grad_scale: 64.0 +2024-07-27 12:45:42,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=17029.333333333332, ans=0.125 +2024-07-27 12:45:47,673 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.61 vs. limit=5.5564 +2024-07-27 12:46:03,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=17056.0, ans=0.125 +2024-07-27 12:46:04,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=17056.0, ans=0.125 +2024-07-27 12:46:06,526 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.53 vs. limit=13.896 +2024-07-27 12:46:07,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=17056.0, ans=0.12944 +2024-07-27 12:46:07,829 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.37 vs. limit=13.896 +2024-07-27 12:46:13,110 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.177e+01 6.481e+01 6.949e+01 7.902e+01 1.029e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-27 12:46:14,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=17069.333333333332, ans=0.12930666666666668 +2024-07-27 12:46:14,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=17069.333333333332, ans=0.125 +2024-07-27 12:46:26,458 INFO [train.py:1114] (3/4) Epoch 2, batch 2600, loss[loss=0.307, simple_loss=0.3667, pruned_loss=0.1236, over 4895.00 frames. ], tot_loss[loss=0.3375, simple_loss=0.3899, pruned_loss=0.1426, over 937468.33 frames. ], batch size: 13, lr: 3.14e-02, grad_scale: 32.0 +2024-07-27 12:46:28,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=17096.0, ans=0.30164000000000013 +2024-07-27 12:46:34,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=17109.333333333332, ans=0.0071501449275362325 +2024-07-27 12:46:38,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=17109.333333333332, ans=0.0 +2024-07-27 12:46:47,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17122.666666666668, ans=0.12877333333333332 +2024-07-27 12:46:49,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17122.666666666668, ans=0.12877333333333332 +2024-07-27 12:46:52,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.18 vs. limit=13.561333333333334 +2024-07-27 12:47:23,231 INFO [train.py:1114] (3/4) Epoch 2, batch 2650, loss[loss=0.334, simple_loss=0.4047, pruned_loss=0.1317, over 4645.00 frames. ], tot_loss[loss=0.337, simple_loss=0.3896, pruned_loss=0.1422, over 939567.25 frames. ], batch size: 16, lr: 3.13e-02, grad_scale: 32.0 +2024-07-27 12:47:23,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=17162.666666666668, ans=0.125 +2024-07-27 12:47:29,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=17176.0, ans=0.125 +2024-07-27 12:47:41,521 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.21 vs. limit=13.945999999999998 +2024-07-27 12:47:46,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=17189.333333333332, ans=0.125 +2024-07-27 12:47:51,449 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.937e+01 6.612e+01 7.199e+01 8.016e+01 1.169e+02, threshold=1.440e+02, percent-clipped=0.0 +2024-07-27 12:47:52,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=17202.666666666668, ans=0.125 +2024-07-27 12:48:02,475 INFO [train.py:1114] (3/4) Epoch 2, batch 2700, loss[loss=0.3343, simple_loss=0.3885, pruned_loss=0.14, over 4745.00 frames. ], tot_loss[loss=0.3364, simple_loss=0.3894, pruned_loss=0.1417, over 939613.94 frames. ], batch size: 14, lr: 3.13e-02, grad_scale: 32.0 +2024-07-27 12:48:07,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=17229.333333333332, ans=0.125 +2024-07-27 12:48:14,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=17242.666666666668, ans=0.0 +2024-07-27 12:48:15,891 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=13.971 +2024-07-27 12:48:22,813 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.36 vs. limit=20.451999999999998 +2024-07-27 12:48:23,612 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.67 vs. limit=20.451999999999998 +2024-07-27 12:48:32,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=17282.666666666668, ans=0.125 +2024-07-27 12:48:36,982 INFO [train.py:1114] (3/4) Epoch 2, batch 2750, loss[loss=0.2816, simple_loss=0.3424, pruned_loss=0.1104, over 4706.00 frames. ], tot_loss[loss=0.3349, simple_loss=0.3882, pruned_loss=0.1408, over 939569.25 frames. ], batch size: 12, lr: 3.12e-02, grad_scale: 32.0 +2024-07-27 12:48:40,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=17296.0, ans=0.04949747468305833 +2024-07-27 12:48:43,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=17296.0, ans=0.125 +2024-07-27 12:48:44,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=17296.0, ans=0.12704 +2024-07-27 12:48:52,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=17322.666666666668, ans=0.125 +2024-07-27 12:48:53,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=17322.666666666668, ans=0.125 +2024-07-27 12:49:02,554 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.446e+01 6.464e+01 7.074e+01 8.489e+01 1.052e+02, threshold=1.415e+02, percent-clipped=0.0 +2024-07-27 12:49:12,203 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.73 vs. limit=14.006 +2024-07-27 12:49:12,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=17349.333333333332, ans=0.025 +2024-07-27 12:49:13,957 INFO [train.py:1114] (3/4) Epoch 2, batch 2800, loss[loss=0.4632, simple_loss=0.4635, pruned_loss=0.2315, over 3567.00 frames. ], tot_loss[loss=0.3356, simple_loss=0.3887, pruned_loss=0.1413, over 937357.14 frames. ], batch size: 35, lr: 3.12e-02, grad_scale: 32.0 +2024-07-27 12:49:18,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=17362.666666666668, ans=0.125 +2024-07-27 12:49:20,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=17376.0, ans=0.0 +2024-07-27 12:49:48,561 INFO [train.py:1114] (3/4) Epoch 2, batch 2850, loss[loss=0.2931, simple_loss=0.346, pruned_loss=0.1201, over 4963.00 frames. ], tot_loss[loss=0.3378, simple_loss=0.3902, pruned_loss=0.1427, over 935385.20 frames. ], batch size: 13, lr: 3.11e-02, grad_scale: 32.0 +2024-07-27 12:49:48,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=17429.333333333332, ans=0.0 +2024-07-27 12:49:59,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=17442.666666666668, ans=0.025 +2024-07-27 12:50:02,057 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.20 vs. limit=20.592 +2024-07-27 12:50:06,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=17456.0, ans=0.2890400000000001 +2024-07-27 12:50:09,548 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.29 vs. limit=5.6204 +2024-07-27 12:50:11,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=17469.333333333332, ans=0.007071884057971015 +2024-07-27 12:50:11,763 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.157e+01 6.590e+01 7.080e+01 8.267e+01 4.948e+02, threshold=1.416e+02, percent-clipped=1.0 +2024-07-27 12:50:21,121 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.53 vs. limit=10.993066666666667 +2024-07-27 12:50:32,062 INFO [train.py:1114] (3/4) Epoch 2, batch 2900, loss[loss=0.3024, simple_loss=0.3632, pruned_loss=0.1208, over 4829.00 frames. ], tot_loss[loss=0.3364, simple_loss=0.3902, pruned_loss=0.1413, over 939344.75 frames. ], batch size: 13, lr: 3.11e-02, grad_scale: 32.0 +2024-07-27 12:50:41,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=17509.333333333332, ans=0.125 +2024-07-27 12:50:44,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=17509.333333333332, ans=0.125 +2024-07-27 12:50:44,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=17509.333333333332, ans=0.0 +2024-07-27 12:50:57,209 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:50:59,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=17536.0, ans=0.28624000000000005 +2024-07-27 12:51:03,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=17549.333333333332, ans=0.28577333333333343 +2024-07-27 12:51:10,549 INFO [train.py:1114] (3/4) Epoch 2, batch 2950, loss[loss=0.325, simple_loss=0.3679, pruned_loss=0.1411, over 4703.00 frames. ], tot_loss[loss=0.3347, simple_loss=0.3877, pruned_loss=0.1408, over 938695.18 frames. ], batch size: 12, lr: 3.10e-02, grad_scale: 32.0 +2024-07-27 12:51:13,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=17562.666666666668, ans=0.2853066666666667 +2024-07-27 12:51:15,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=17562.666666666668, ans=0.125 +2024-07-27 12:51:33,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=17589.333333333332, ans=0.125 +2024-07-27 12:51:38,179 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.77 vs. limit=20.701999999999998 +2024-07-27 12:51:38,401 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.210e+01 6.523e+01 7.161e+01 8.021e+01 1.155e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 12:51:43,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.08 vs. limit=14.106 +2024-07-27 12:51:49,463 INFO [train.py:1114] (3/4) Epoch 2, batch 3000, loss[loss=0.3413, simple_loss=0.3973, pruned_loss=0.1427, over 4755.00 frames. ], tot_loss[loss=0.333, simple_loss=0.3863, pruned_loss=0.1398, over 938229.55 frames. ], batch size: 13, lr: 3.10e-02, grad_scale: 32.0 +2024-07-27 12:51:49,464 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 12:51:57,955 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.3615, 3.5898, 3.7805, 2.7690], device='cuda:3') +2024-07-27 12:52:02,770 INFO [train.py:1146] (3/4) Epoch 2, validation: loss=0.2667, simple_loss=0.3583, pruned_loss=0.0876, over 944034.00 frames. +2024-07-27 12:52:13,329 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 12:52:21,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=17642.666666666668, ans=0.0 +2024-07-27 12:52:39,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=17669.333333333332, ans=0.04949747468305833 +2024-07-27 12:52:52,631 INFO [train.py:1114] (3/4) Epoch 2, batch 3050, loss[loss=0.3326, simple_loss=0.4026, pruned_loss=0.1313, over 4640.00 frames. ], tot_loss[loss=0.3349, simple_loss=0.3877, pruned_loss=0.1411, over 937146.77 frames. ], batch size: 12, lr: 3.09e-02, grad_scale: 32.0 +2024-07-27 12:53:02,430 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:53:07,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=17709.333333333332, ans=0.09899494936611666 +2024-07-27 12:53:18,158 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.638e+01 6.442e+01 7.179e+01 7.661e+01 1.033e+02, threshold=1.436e+02, percent-clipped=0.0 +2024-07-27 12:53:25,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=17749.333333333332, ans=0.125 +2024-07-27 12:53:29,166 INFO [train.py:1114] (3/4) Epoch 2, batch 3100, loss[loss=0.3285, simple_loss=0.3986, pruned_loss=0.1292, over 4649.00 frames. ], tot_loss[loss=0.334, simple_loss=0.3872, pruned_loss=0.1404, over 938306.69 frames. ], batch size: 16, lr: 3.09e-02, grad_scale: 32.0 +2024-07-27 12:53:39,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=17776.0, ans=0.125 +2024-07-27 12:53:43,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=17789.333333333332, ans=0.0 +2024-07-27 12:53:51,690 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.29 vs. limit=14.176 +2024-07-27 12:53:52,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=17802.666666666668, ans=0.125 +2024-07-27 12:54:01,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=17816.0, ans=0.12184 +2024-07-27 12:54:03,851 INFO [train.py:1114] (3/4) Epoch 2, batch 3150, loss[loss=0.3655, simple_loss=0.4219, pruned_loss=0.1546, over 4610.00 frames. ], tot_loss[loss=0.3315, simple_loss=0.3855, pruned_loss=0.1387, over 937898.47 frames. ], batch size: 17, lr: 3.09e-02, grad_scale: 32.0 +2024-07-27 12:54:08,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=17829.333333333332, ans=0.2759733333333335 +2024-07-27 12:54:12,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=17842.666666666668, ans=0.00699072463768116 +2024-07-27 12:54:14,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=17842.666666666668, ans=0.0 +2024-07-27 12:54:27,256 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.400e+01 6.385e+01 6.845e+01 7.954e+01 1.765e+02, threshold=1.369e+02, percent-clipped=1.0 +2024-07-27 12:54:36,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=17882.666666666668, ans=0.0069820289855072465 +2024-07-27 12:54:38,097 INFO [train.py:1114] (3/4) Epoch 2, batch 3200, loss[loss=0.3459, simple_loss=0.3941, pruned_loss=0.1488, over 4821.00 frames. ], tot_loss[loss=0.3295, simple_loss=0.3844, pruned_loss=0.1373, over 939415.57 frames. ], batch size: 13, lr: 3.08e-02, grad_scale: 32.0 +2024-07-27 12:54:42,377 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=2.88 vs. limit=14.211 +2024-07-27 12:54:50,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=17909.333333333332, ans=0.2731733333333335 +2024-07-27 12:54:51,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=17909.333333333332, ans=0.09899494936611666 +2024-07-27 12:54:53,166 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.01 vs. limit=20.932000000000002 +2024-07-27 12:54:57,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=17922.666666666668, ans=0.125 +2024-07-27 12:55:01,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=17922.666666666668, ans=0.125 +2024-07-27 12:55:09,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=17949.333333333332, ans=0.0 +2024-07-27 12:55:09,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=17949.333333333332, ans=0.2717733333333334 +2024-07-27 12:55:16,930 INFO [train.py:1114] (3/4) Epoch 2, batch 3250, loss[loss=0.3223, simple_loss=0.3833, pruned_loss=0.1306, over 4931.00 frames. ], tot_loss[loss=0.3308, simple_loss=0.3857, pruned_loss=0.138, over 940287.20 frames. ], batch size: 14, lr: 3.08e-02, grad_scale: 32.0 +2024-07-27 12:55:37,084 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:55:46,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=17976.0, ans=0.125 +2024-07-27 12:55:49,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=17989.333333333332, ans=0.0 +2024-07-27 12:55:51,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=17989.333333333332, ans=0.27037333333333347 +2024-07-27 12:56:00,162 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.105e+01 6.706e+01 7.327e+01 8.227e+01 1.129e+02, threshold=1.465e+02, percent-clipped=0.0 +2024-07-27 12:56:00,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=18002.666666666668, ans=0.025 +2024-07-27 12:56:00,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=18002.666666666668, ans=0.006955942028985507 +2024-07-27 12:56:03,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=18002.666666666668, ans=0.0 +2024-07-27 12:56:05,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=18016.0, ans=0.0 +2024-07-27 12:56:05,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=18016.0, ans=10.0 +2024-07-27 12:56:07,384 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.55 vs. limit=21.012 +2024-07-27 12:56:11,172 INFO [train.py:1114] (3/4) Epoch 2, batch 3300, loss[loss=0.3145, simple_loss=0.363, pruned_loss=0.1331, over 4735.00 frames. ], tot_loss[loss=0.3308, simple_loss=0.3853, pruned_loss=0.1382, over 940472.90 frames. ], batch size: 19, lr: 3.07e-02, grad_scale: 32.0 +2024-07-27 12:56:14,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=18029.333333333332, ans=0.05 +2024-07-27 12:56:16,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=18029.333333333332, ans=0.0 +2024-07-27 12:56:16,426 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.04 vs. limit=21.022 +2024-07-27 12:56:27,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=18056.0, ans=0.125 +2024-07-27 12:56:41,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=18082.666666666668, ans=0.125 +2024-07-27 12:56:42,160 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=13.07 vs. limit=14.041333333333334 +2024-07-27 12:56:46,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=18096.0, ans=0.11904 +2024-07-27 12:56:47,388 INFO [train.py:1114] (3/4) Epoch 2, batch 3350, loss[loss=0.3252, simple_loss=0.3888, pruned_loss=0.1308, over 4637.00 frames. ], tot_loss[loss=0.3322, simple_loss=0.3859, pruned_loss=0.1392, over 938064.84 frames. ], batch size: 17, lr: 3.07e-02, grad_scale: 32.0 +2024-07-27 12:56:53,706 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:56:55,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=18109.333333333332, ans=0.0 +2024-07-27 12:57:09,857 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.21 vs. limit=14.301 +2024-07-27 12:57:10,885 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.417e+01 6.714e+01 7.318e+01 8.136e+01 2.148e+02, threshold=1.464e+02, percent-clipped=2.0 +2024-07-27 12:57:13,372 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.25 vs. limit=21.102 +2024-07-27 12:57:22,055 INFO [train.py:1114] (3/4) Epoch 2, batch 3400, loss[loss=0.2612, simple_loss=0.3154, pruned_loss=0.1035, over 4801.00 frames. ], tot_loss[loss=0.3331, simple_loss=0.3865, pruned_loss=0.1399, over 937040.15 frames. ], batch size: 11, lr: 3.06e-02, grad_scale: 32.0 +2024-07-27 12:57:35,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=18189.333333333332, ans=0.00691536231884058 +2024-07-27 12:57:44,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=18202.666666666668, ans=0.0 +2024-07-27 12:58:06,347 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.86 vs. limit=14.331 +2024-07-27 12:58:07,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=18229.333333333332, ans=0.2619733333333335 +2024-07-27 12:58:07,981 INFO [train.py:1114] (3/4) Epoch 2, batch 3450, loss[loss=0.3576, simple_loss=0.409, pruned_loss=0.1531, over 4708.00 frames. ], tot_loss[loss=0.3327, simple_loss=0.3868, pruned_loss=0.1393, over 937056.79 frames. ], batch size: 19, lr: 3.06e-02, grad_scale: 32.0 +2024-07-27 12:58:19,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=18242.666666666668, ans=0.125 +2024-07-27 12:58:35,294 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.179e+01 6.586e+01 6.989e+01 7.796e+01 1.302e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 12:58:40,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=18282.666666666668, ans=0.125 +2024-07-27 12:58:46,673 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 12:58:48,039 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.55 vs. limit=14.356 +2024-07-27 12:58:48,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=18282.666666666668, ans=0.2601066666666667 +2024-07-27 12:58:50,960 INFO [train.py:1114] (3/4) Epoch 2, batch 3500, loss[loss=0.314, simple_loss=0.3732, pruned_loss=0.1274, over 4934.00 frames. ], tot_loss[loss=0.3319, simple_loss=0.3862, pruned_loss=0.1388, over 937738.67 frames. ], batch size: 12, lr: 3.06e-02, grad_scale: 32.0 +2024-07-27 12:59:04,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=18309.333333333332, ans=0.11690666666666666 +2024-07-27 12:59:05,487 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=2.127e-02 +2024-07-27 12:59:08,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=18322.666666666668, ans=0.125 +2024-07-27 12:59:23,187 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.14 vs. limit=21.262 +2024-07-27 12:59:29,549 INFO [train.py:1114] (3/4) Epoch 2, batch 3550, loss[loss=0.3634, simple_loss=0.4124, pruned_loss=0.1572, over 4675.00 frames. ], tot_loss[loss=0.3329, simple_loss=0.3867, pruned_loss=0.1395, over 938256.42 frames. ], batch size: 14, lr: 3.05e-02, grad_scale: 32.0 +2024-07-27 12:59:35,995 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.60 vs. limit=14.391 +2024-07-27 12:59:39,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=18376.0, ans=0.0 +2024-07-27 12:59:40,180 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.09 vs. limit=21.282 +2024-07-27 12:59:40,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=18376.0, ans=0.125 +2024-07-27 12:59:43,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=18389.333333333332, ans=14.396 +2024-07-27 12:59:47,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=18389.333333333332, ans=0.25637333333333345 +2024-07-27 12:59:49,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=18389.333333333332, ans=0.125 +2024-07-27 12:59:51,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=18402.666666666668, ans=0.0 +2024-07-27 12:59:53,859 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.354e+01 6.416e+01 6.884e+01 7.445e+01 1.050e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-27 13:00:01,150 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.75 vs. limit=14.405999999999999 +2024-07-27 13:00:03,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=18416.0, ans=0.0 +2024-07-27 13:00:04,807 INFO [train.py:1114] (3/4) Epoch 2, batch 3600, loss[loss=0.2648, simple_loss=0.3319, pruned_loss=0.09886, over 4957.00 frames. ], tot_loss[loss=0.3312, simple_loss=0.3855, pruned_loss=0.1385, over 939899.43 frames. ], batch size: 13, lr: 3.05e-02, grad_scale: 32.0 +2024-07-27 13:00:14,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=18442.666666666668, ans=0.25450666666666666 +2024-07-27 13:00:16,668 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.88 vs. limit=21.332 +2024-07-27 13:00:19,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=18456.0, ans=0.0068573913043478265 +2024-07-27 13:00:19,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=18456.0, ans=0.125 +2024-07-27 13:00:20,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=18456.0, ans=0.125 +2024-07-27 13:00:27,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=18469.333333333332, ans=0.11530666666666667 +2024-07-27 13:00:46,941 INFO [train.py:1114] (3/4) Epoch 2, batch 3650, loss[loss=0.2665, simple_loss=0.3502, pruned_loss=0.09135, over 4889.00 frames. ], tot_loss[loss=0.3288, simple_loss=0.3833, pruned_loss=0.1371, over 940876.49 frames. ], batch size: 15, lr: 3.04e-02, grad_scale: 32.0 +2024-07-27 13:00:50,689 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.38 vs. limit=21.372 +2024-07-27 13:00:53,358 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.58 vs. limit=21.372 +2024-07-27 13:01:01,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=18509.333333333332, ans=0.025 +2024-07-27 13:01:20,524 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.247e+01 6.612e+01 7.129e+01 7.786e+01 1.024e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 13:01:23,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=18536.0, ans=0.125 +2024-07-27 13:01:25,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=18549.333333333332, ans=0.2507733333333335 +2024-07-27 13:01:27,265 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.69 vs. limit=21.412 +2024-07-27 13:01:30,694 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.44 vs. limit=9.637333333333332 +2024-07-27 13:01:32,251 INFO [train.py:1114] (3/4) Epoch 2, batch 3700, loss[loss=0.3893, simple_loss=0.4421, pruned_loss=0.1682, over 4928.00 frames. ], tot_loss[loss=0.3285, simple_loss=0.3832, pruned_loss=0.1369, over 942133.05 frames. ], batch size: 14, lr: 3.04e-02, grad_scale: 32.0 +2024-07-27 13:01:36,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=18562.666666666668, ans=0.125 +2024-07-27 13:01:40,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=18576.0, ans=0.07 +2024-07-27 13:01:42,088 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:01:52,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=18589.333333333332, ans=0.00682840579710145 +2024-07-27 13:02:25,805 INFO [train.py:1114] (3/4) Epoch 2, batch 3750, loss[loss=0.2909, simple_loss=0.3459, pruned_loss=0.118, over 4812.00 frames. ], tot_loss[loss=0.3266, simple_loss=0.3817, pruned_loss=0.1358, over 943446.33 frames. ], batch size: 11, lr: 3.03e-02, grad_scale: 32.0 +2024-07-27 13:02:57,353 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.04 vs. limit=21.482 +2024-07-27 13:02:58,780 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.77 vs. limit=21.482 +2024-07-27 13:03:33,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=18669.333333333332, ans=0.0 +2024-07-27 13:03:37,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=18669.333333333332, ans=0.0 +2024-07-27 13:03:38,405 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.449e+01 6.486e+01 7.051e+01 7.963e+01 1.237e+02, threshold=1.410e+02, percent-clipped=0.0 +2024-07-27 13:03:56,245 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.97 vs. limit=14.506 +2024-07-27 13:04:17,942 INFO [train.py:1114] (3/4) Epoch 2, batch 3800, loss[loss=0.2987, simple_loss=0.3653, pruned_loss=0.116, over 4811.00 frames. ], tot_loss[loss=0.3257, simple_loss=0.3804, pruned_loss=0.1355, over 941826.17 frames. ], batch size: 14, lr: 3.03e-02, grad_scale: 32.0 +2024-07-27 13:04:23,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=18696.0, ans=0.125 +2024-07-27 13:04:26,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=18709.333333333332, ans=0.125 +2024-07-27 13:05:35,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18736.0, ans=0.11263999999999999 +2024-07-27 13:07:02,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=18749.333333333332, ans=0.125 +2024-07-27 13:07:10,271 INFO [train.py:1114] (3/4) Epoch 2, batch 3850, loss[loss=0.3142, simple_loss=0.3869, pruned_loss=0.1207, over 4603.00 frames. ], tot_loss[loss=0.3225, simple_loss=0.3782, pruned_loss=0.1334, over 942142.51 frames. ], batch size: 16, lr: 3.03e-02, grad_scale: 32.0 +2024-07-27 13:07:54,084 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.12 vs. limit=14.541 +2024-07-27 13:08:10,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=18776.0, ans=0.11224 +2024-07-27 13:08:12,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=18789.333333333332, ans=0.125 +2024-07-27 13:08:16,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=18789.333333333332, ans=0.006784927536231885 +2024-07-27 13:08:21,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=18802.666666666668, ans=0.24190666666666671 +2024-07-27 13:08:23,995 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.367e+01 6.538e+01 7.102e+01 7.754e+01 1.153e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 13:08:30,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=18802.666666666668, ans=0.125 +2024-07-27 13:08:36,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=18816.0, ans=0.2414400000000001 +2024-07-27 13:08:38,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=18816.0, ans=0.11184 +2024-07-27 13:08:41,624 INFO [train.py:1114] (3/4) Epoch 2, batch 3900, loss[loss=0.3542, simple_loss=0.4046, pruned_loss=0.1519, over 4813.00 frames. ], tot_loss[loss=0.3242, simple_loss=0.3798, pruned_loss=0.1343, over 942500.80 frames. ], batch size: 14, lr: 3.02e-02, grad_scale: 32.0 +2024-07-27 13:08:47,436 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=11.96 vs. limit=9.707333333333333 +2024-07-27 13:09:28,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=18856.0, ans=0.125 +2024-07-27 13:09:34,331 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.26 vs. limit=14.576 +2024-07-27 13:09:45,096 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.20 vs. limit=14.581 +2024-07-27 13:09:52,408 INFO [train.py:1114] (3/4) Epoch 2, batch 3950, loss[loss=0.3372, simple_loss=0.3938, pruned_loss=0.1403, over 4830.00 frames. ], tot_loss[loss=0.3243, simple_loss=0.3799, pruned_loss=0.1343, over 944372.85 frames. ], batch size: 16, lr: 3.02e-02, grad_scale: 32.0 +2024-07-27 13:09:55,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=18896.0, ans=0.11104 +2024-07-27 13:10:28,275 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.314e+01 6.596e+01 7.241e+01 7.988e+01 1.615e+02, threshold=1.448e+02, percent-clipped=1.0 +2024-07-27 13:11:05,249 INFO [train.py:1114] (3/4) Epoch 2, batch 4000, loss[loss=0.3627, simple_loss=0.4149, pruned_loss=0.1553, over 4778.00 frames. ], tot_loss[loss=0.3256, simple_loss=0.3806, pruned_loss=0.1353, over 941125.33 frames. ], batch size: 12, lr: 3.01e-02, grad_scale: 32.0 +2024-07-27 13:11:11,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=18962.666666666668, ans=0.48444000000000004 +2024-07-27 13:11:13,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=18976.0, ans=0.125 +2024-07-27 13:11:15,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=18976.0, ans=0.125 +2024-07-27 13:11:15,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=18976.0, ans=0.11024 +2024-07-27 13:11:28,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=19002.666666666668, ans=0.0 +2024-07-27 13:11:37,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=19016.0, ans=0.09899494936611666 +2024-07-27 13:12:03,972 INFO [train.py:1114] (3/4) Epoch 2, batch 4050, loss[loss=0.4323, simple_loss=0.4486, pruned_loss=0.208, over 3493.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3806, pruned_loss=0.1352, over 939807.67 frames. ], batch size: 35, lr: 3.01e-02, grad_scale: 32.0 +2024-07-27 13:12:12,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=19042.666666666668, ans=0.0067298550724637675 +2024-07-27 13:12:27,060 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.071e+01 6.599e+01 7.309e+01 8.116e+01 1.221e+02, threshold=1.462e+02, percent-clipped=0.0 +2024-07-27 13:12:29,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=19069.333333333332, ans=10.0 +2024-07-27 13:12:30,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=19069.333333333332, ans=0.10930666666666666 +2024-07-27 13:12:31,102 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.54 vs. limit=14.655999999999999 +2024-07-27 13:12:39,185 INFO [train.py:1114] (3/4) Epoch 2, batch 4100, loss[loss=0.3779, simple_loss=0.4407, pruned_loss=0.1576, over 4916.00 frames. ], tot_loss[loss=0.329, simple_loss=0.383, pruned_loss=0.1376, over 938682.50 frames. ], batch size: 15, lr: 3.01e-02, grad_scale: 32.0 +2024-07-27 13:12:47,830 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=6.23 vs. limit=14.666 +2024-07-27 13:12:51,404 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.76 vs. limit=11.643733333333333 +2024-07-27 13:12:52,844 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.90 vs. limit=14.666 +2024-07-27 13:12:52,894 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=13.31 vs. limit=14.554666666666666 +2024-07-27 13:13:02,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=19136.0, ans=0.0 +2024-07-27 13:13:09,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=19149.333333333332, ans=0.9414933333333333 +2024-07-27 13:13:09,288 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.91 vs. limit=14.681000000000001 +2024-07-27 13:13:14,992 INFO [train.py:1114] (3/4) Epoch 2, batch 4150, loss[loss=0.3321, simple_loss=0.3809, pruned_loss=0.1416, over 4828.00 frames. ], tot_loss[loss=0.3275, simple_loss=0.382, pruned_loss=0.1365, over 938649.13 frames. ], batch size: 13, lr: 3.00e-02, grad_scale: 32.0 +2024-07-27 13:13:15,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=19162.666666666668, ans=0.125 +2024-07-27 13:13:22,266 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.94 vs. limit=14.588 +2024-07-27 13:13:27,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=19176.0, ans=0.0 +2024-07-27 13:13:30,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=19189.333333333332, ans=0.2 +2024-07-27 13:13:44,556 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.086e+01 6.337e+01 6.945e+01 7.844e+01 2.237e+02, threshold=1.389e+02, percent-clipped=1.0 +2024-07-27 13:13:47,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=19202.666666666668, ans=0.0 +2024-07-27 13:13:56,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=19216.0, ans=0.125 +2024-07-27 13:13:58,215 INFO [train.py:1114] (3/4) Epoch 2, batch 4200, loss[loss=0.3653, simple_loss=0.4245, pruned_loss=0.1531, over 4902.00 frames. ], tot_loss[loss=0.3263, simple_loss=0.3814, pruned_loss=0.1356, over 940254.87 frames. ], batch size: 15, lr: 3.00e-02, grad_scale: 32.0 +2024-07-27 13:14:01,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=19229.333333333332, ans=0.0 +2024-07-27 13:14:23,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=19269.333333333332, ans=0.22557333333333351 +2024-07-27 13:14:30,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=19282.666666666668, ans=0.22510666666666668 +2024-07-27 13:14:32,753 INFO [train.py:1114] (3/4) Epoch 2, batch 4250, loss[loss=0.3173, simple_loss=0.3755, pruned_loss=0.1295, over 4631.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3813, pruned_loss=0.1349, over 941381.43 frames. ], batch size: 12, lr: 2.99e-02, grad_scale: 32.0 +2024-07-27 13:14:32,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=19296.0, ans=0.006674782608695652 +2024-07-27 13:14:44,388 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.96 vs. limit=14.741 +2024-07-27 13:14:55,983 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.180e+01 6.301e+01 6.853e+01 7.797e+01 1.151e+02, threshold=1.371e+02, percent-clipped=0.0 +2024-07-27 13:15:06,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=19362.666666666668, ans=0.0 +2024-07-27 13:15:06,763 INFO [train.py:1114] (3/4) Epoch 2, batch 4300, loss[loss=0.3463, simple_loss=0.3841, pruned_loss=0.1543, over 4765.00 frames. ], tot_loss[loss=0.3252, simple_loss=0.381, pruned_loss=0.1347, over 940426.83 frames. ], batch size: 13, lr: 2.99e-02, grad_scale: 32.0 +2024-07-27 13:15:13,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=19362.666666666668, ans=0.125 +2024-07-27 13:15:13,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=19362.666666666668, ans=0.125 +2024-07-27 13:15:17,467 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:15:19,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=19376.0, ans=0.125 +2024-07-27 13:15:21,921 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.96 vs. limit=14.766 +2024-07-27 13:15:34,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=19402.666666666668, ans=0.2209066666666667 +2024-07-27 13:15:37,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=19402.666666666668, ans=0.0 +2024-07-27 13:15:37,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=19402.666666666668, ans=0.0 +2024-07-27 13:15:38,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=19416.0, ans=0.125 +2024-07-27 13:15:39,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=19416.0, ans=0.10583999999999999 +2024-07-27 13:15:39,999 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.88 vs. limit=11.7664 +2024-07-27 13:15:42,305 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.92 vs. limit=14.780999999999999 +2024-07-27 13:15:45,767 INFO [train.py:1114] (3/4) Epoch 2, batch 4350, loss[loss=0.3309, simple_loss=0.3826, pruned_loss=0.1396, over 4755.00 frames. ], tot_loss[loss=0.3256, simple_loss=0.3816, pruned_loss=0.1347, over 941352.46 frames. ], batch size: 13, lr: 2.98e-02, grad_scale: 32.0 +2024-07-27 13:15:53,830 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.90 vs. limit=9.860666666666667 +2024-07-27 13:16:06,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=19469.333333333332, ans=0.025 +2024-07-27 13:16:09,152 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.359e+01 6.368e+01 6.866e+01 7.654e+01 1.225e+02, threshold=1.373e+02, percent-clipped=0.0 +2024-07-27 13:16:12,330 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.67 vs. limit=14.801 +2024-07-27 13:16:14,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=19482.666666666668, ans=0.0 +2024-07-27 13:16:17,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=19482.666666666668, ans=0.125 +2024-07-27 13:16:21,962 INFO [train.py:1114] (3/4) Epoch 2, batch 4400, loss[loss=0.3616, simple_loss=0.4149, pruned_loss=0.1541, over 4806.00 frames. ], tot_loss[loss=0.3268, simple_loss=0.3826, pruned_loss=0.1355, over 940967.80 frames. ], batch size: 14, lr: 2.98e-02, grad_scale: 32.0 +2024-07-27 13:16:22,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=19496.0, ans=0.125 +2024-07-27 13:16:38,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19522.666666666668, ans=0.10477333333333333 +2024-07-27 13:16:47,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=19536.0, ans=0.94536 +2024-07-27 13:16:50,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=19549.333333333332, ans=0.0 +2024-07-27 13:16:56,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=19549.333333333332, ans=0.125 +2024-07-27 13:16:57,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=19562.666666666668, ans=0.21530666666666665 +2024-07-27 13:16:58,313 INFO [train.py:1114] (3/4) Epoch 2, batch 4450, loss[loss=0.3633, simple_loss=0.3997, pruned_loss=0.1635, over 4929.00 frames. ], tot_loss[loss=0.3279, simple_loss=0.3833, pruned_loss=0.1363, over 938946.06 frames. ], batch size: 12, lr: 2.98e-02, grad_scale: 32.0 +2024-07-27 13:17:06,755 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.60 vs. limit=14.788 +2024-07-27 13:17:07,292 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.07 vs. limit=22.182000000000002 +2024-07-27 13:17:13,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=19589.333333333332, ans=0.125 +2024-07-27 13:17:15,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=19589.333333333332, ans=0.125 +2024-07-27 13:17:16,270 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.89 vs. limit=11.835733333333334 +2024-07-27 13:17:16,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=19589.333333333332, ans=0.21437333333333342 +2024-07-27 13:17:20,328 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.71 vs. limit=14.846 +2024-07-27 13:17:31,925 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.227e+01 6.384e+01 6.851e+01 7.779e+01 1.148e+02, threshold=1.370e+02, percent-clipped=0.0 +2024-07-27 13:17:42,967 INFO [train.py:1114] (3/4) Epoch 2, batch 4500, loss[loss=0.3192, simple_loss=0.3833, pruned_loss=0.1276, over 4739.00 frames. ], tot_loss[loss=0.3271, simple_loss=0.3829, pruned_loss=0.1356, over 938120.81 frames. ], batch size: 14, lr: 2.97e-02, grad_scale: 32.0 +2024-07-27 13:17:50,276 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.05 vs. limit=22.232 +2024-07-27 13:17:51,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=19642.666666666668, ans=0.125 +2024-07-27 13:17:54,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=19642.666666666668, ans=0.10357333333333332 +2024-07-27 13:17:56,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19656.0, ans=0.10344 +2024-07-27 13:18:08,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=19669.333333333332, ans=0.125 +2024-07-27 13:18:13,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=19682.666666666668, ans=0.10317333333333331 +2024-07-27 13:18:17,181 INFO [train.py:1114] (3/4) Epoch 2, batch 4550, loss[loss=0.324, simple_loss=0.384, pruned_loss=0.132, over 4890.00 frames. ], tot_loss[loss=0.3265, simple_loss=0.3824, pruned_loss=0.1353, over 939986.24 frames. ], batch size: 13, lr: 2.97e-02, grad_scale: 32.0 +2024-07-27 13:18:25,319 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.55 vs. limit=14.886 +2024-07-27 13:18:27,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=19709.333333333332, ans=0.125 +2024-07-27 13:18:30,237 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.15 vs. limit=14.891 +2024-07-27 13:18:35,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=19722.666666666668, ans=0.0 +2024-07-27 13:18:41,960 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.17 vs. limit=14.901 +2024-07-27 13:18:43,565 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.866e+01 6.563e+01 7.303e+01 8.334e+01 1.051e+02, threshold=1.461e+02, percent-clipped=0.0 +2024-07-27 13:18:54,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=19749.333333333332, ans=0.0 +2024-07-27 13:18:57,900 INFO [train.py:1114] (3/4) Epoch 2, batch 4600, loss[loss=0.3043, simple_loss=0.3759, pruned_loss=0.1163, over 4482.00 frames. ], tot_loss[loss=0.3245, simple_loss=0.3807, pruned_loss=0.1342, over 938142.85 frames. ], batch size: 21, lr: 2.96e-02, grad_scale: 64.0 +2024-07-27 13:19:01,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=19762.666666666668, ans=0.20830666666666664 +2024-07-27 13:19:03,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=19762.666666666668, ans=0.9476266666666666 +2024-07-27 13:19:06,520 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.48 vs. limit=22.332 +2024-07-27 13:19:21,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=19802.666666666668, ans=0.025 +2024-07-27 13:19:22,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=19802.666666666668, ans=0.125 +2024-07-27 13:19:26,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=19816.0, ans=0.0 +2024-07-27 13:19:29,188 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=19816.0, ans=0.125 +2024-07-27 13:19:31,759 INFO [train.py:1114] (3/4) Epoch 2, batch 4650, loss[loss=0.339, simple_loss=0.4077, pruned_loss=0.1351, over 4848.00 frames. ], tot_loss[loss=0.3249, simple_loss=0.3818, pruned_loss=0.134, over 939583.97 frames. ], batch size: 16, lr: 2.96e-02, grad_scale: 64.0 +2024-07-27 13:19:35,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=19829.333333333332, ans=0.006558840579710146 +2024-07-27 13:19:36,836 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=15.16 vs. limit=14.936 +2024-07-27 13:19:38,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=19842.666666666668, ans=0.0 +2024-07-27 13:19:39,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=19842.666666666668, ans=0.10157333333333332 +2024-07-27 13:19:44,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=19856.0, ans=0.00655304347826087 +2024-07-27 13:19:50,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=19856.0, ans=0.00655304347826087 +2024-07-27 13:19:54,897 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.553e+01 6.585e+01 7.200e+01 8.002e+01 1.335e+02, threshold=1.440e+02, percent-clipped=0.0 +2024-07-27 13:20:05,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=19896.0, ans=0.125 +2024-07-27 13:20:06,199 INFO [train.py:1114] (3/4) Epoch 2, batch 4700, loss[loss=0.3075, simple_loss=0.3612, pruned_loss=0.1269, over 4702.00 frames. ], tot_loss[loss=0.3251, simple_loss=0.3816, pruned_loss=0.1344, over 937198.87 frames. ], batch size: 11, lr: 2.96e-02, grad_scale: 64.0 +2024-07-27 13:20:10,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=19896.0, ans=0.125 +2024-07-27 13:20:16,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=19909.333333333332, ans=0.125 +2024-07-27 13:20:16,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=19909.333333333332, ans=0.20317333333333343 +2024-07-27 13:20:55,541 INFO [train.py:1114] (3/4) Epoch 2, batch 4750, loss[loss=0.3961, simple_loss=0.4394, pruned_loss=0.1764, over 4470.00 frames. ], tot_loss[loss=0.3261, simple_loss=0.3824, pruned_loss=0.1349, over 935037.86 frames. ], batch size: 21, lr: 2.95e-02, grad_scale: 64.0 +2024-07-27 13:20:59,693 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.96 vs. limit=14.981333333333334 +2024-07-27 13:21:03,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=19976.0, ans=0.20084000000000013 +2024-07-27 13:21:05,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=19976.0, ans=0.0 +2024-07-27 13:21:19,838 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.240e+01 6.354e+01 6.910e+01 7.839e+01 1.849e+02, threshold=1.382e+02, percent-clipped=1.0 +2024-07-27 13:21:30,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=20029.333333333332, ans=0.2 +2024-07-27 13:21:31,071 INFO [train.py:1114] (3/4) Epoch 2, batch 4800, loss[loss=0.3597, simple_loss=0.4036, pruned_loss=0.1579, over 4686.00 frames. ], tot_loss[loss=0.3277, simple_loss=0.3832, pruned_loss=0.1361, over 932594.96 frames. ], batch size: 13, lr: 2.95e-02, grad_scale: 64.0 +2024-07-27 13:21:33,590 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.30 vs. limit=15.0 +2024-07-27 13:21:34,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=20029.333333333332, ans=0.2 +2024-07-27 13:21:37,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=20042.666666666668, ans=0.125 +2024-07-27 13:21:39,001 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.20 vs. limit=10.0 +2024-07-27 13:21:39,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20042.666666666668, ans=0.1 +2024-07-27 13:21:41,789 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.93 vs. limit=10.0 +2024-07-27 13:21:45,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=20056.0, ans=0.006509565217391305 +2024-07-27 13:21:47,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20056.0, ans=0.1 +2024-07-27 13:21:50,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=20056.0, ans=0.125 +2024-07-27 13:21:55,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20069.333333333332, ans=0.1 +2024-07-27 13:22:01,726 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.67 vs. limit=10.0 +2024-07-27 13:22:05,290 INFO [train.py:1114] (3/4) Epoch 2, batch 4850, loss[loss=0.3775, simple_loss=0.4337, pruned_loss=0.1606, over 4754.00 frames. ], tot_loss[loss=0.3279, simple_loss=0.3835, pruned_loss=0.1362, over 932079.26 frames. ], batch size: 14, lr: 2.95e-02, grad_scale: 64.0 +2024-07-27 13:22:09,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=20096.0, ans=0.125 +2024-07-27 13:22:25,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=20122.666666666668, ans=0.0 +2024-07-27 13:22:25,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=20122.666666666668, ans=0.125 +2024-07-27 13:22:33,552 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.340e+01 6.424e+01 6.890e+01 7.552e+01 1.246e+02, threshold=1.378e+02, percent-clipped=0.0 +2024-07-27 13:22:38,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=20149.333333333332, ans=0.125 +2024-07-27 13:22:40,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=20149.333333333332, ans=0.125 +2024-07-27 13:22:41,516 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.40 vs. limit=6.0 +2024-07-27 13:22:43,078 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=17.69 vs. limit=15.0 +2024-07-27 13:22:46,563 INFO [train.py:1114] (3/4) Epoch 2, batch 4900, loss[loss=0.3174, simple_loss=0.3732, pruned_loss=0.1308, over 4750.00 frames. ], tot_loss[loss=0.3249, simple_loss=0.381, pruned_loss=0.1344, over 934005.43 frames. ], batch size: 13, lr: 2.94e-02, grad_scale: 64.0 +2024-07-27 13:22:46,868 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-07-27 13:23:02,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20189.333333333332, ans=0.1 +2024-07-27 13:23:08,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=20202.666666666668, ans=0.125 +2024-07-27 13:23:17,030 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.29 vs. limit=6.0 +2024-07-27 13:23:25,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=20216.0, ans=0.125 +2024-07-27 13:23:27,086 INFO [train.py:1114] (3/4) Epoch 2, batch 4950, loss[loss=0.4362, simple_loss=0.4462, pruned_loss=0.2131, over 3275.00 frames. ], tot_loss[loss=0.3277, simple_loss=0.3831, pruned_loss=0.1362, over 931169.72 frames. ], batch size: 35, lr: 2.94e-02, grad_scale: 64.0 +2024-07-27 13:23:35,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=20242.666666666668, ans=0.125 +2024-07-27 13:23:39,444 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=12.19 vs. limit=15.0 +2024-07-27 13:23:40,013 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.13 vs. limit=12.0 +2024-07-27 13:23:41,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=20256.0, ans=0.125 +2024-07-27 13:23:41,336 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.94 vs. limit=12.0 +2024-07-27 13:23:51,053 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.442e+01 6.541e+01 7.146e+01 7.949e+01 1.013e+02, threshold=1.429e+02, percent-clipped=0.0 +2024-07-27 13:23:54,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=20282.666666666668, ans=0.2 +2024-07-27 13:24:01,088 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.59 vs. limit=12.0 +2024-07-27 13:24:01,985 INFO [train.py:1114] (3/4) Epoch 2, batch 5000, loss[loss=0.271, simple_loss=0.3446, pruned_loss=0.09872, over 4657.00 frames. ], tot_loss[loss=0.3236, simple_loss=0.38, pruned_loss=0.1336, over 935051.11 frames. ], batch size: 14, lr: 2.93e-02, grad_scale: 64.0 +2024-07-27 13:24:11,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=20309.333333333332, ans=0.125 +2024-07-27 13:24:13,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=20309.333333333332, ans=0.2 +2024-07-27 13:24:23,444 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=1.127e-02 +2024-07-27 13:24:36,477 INFO [train.py:1114] (3/4) Epoch 2, batch 5050, loss[loss=0.2204, simple_loss=0.2917, pruned_loss=0.07455, over 4849.00 frames. ], tot_loss[loss=0.3212, simple_loss=0.3781, pruned_loss=0.1322, over 937440.22 frames. ], batch size: 12, lr: 2.93e-02, grad_scale: 64.0 +2024-07-27 13:24:44,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=20376.0, ans=0.125 +2024-07-27 13:24:55,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20389.333333333332, ans=0.1 +2024-07-27 13:24:58,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=20402.666666666668, ans=0.125 +2024-07-27 13:25:00,638 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.122e+01 6.416e+01 7.117e+01 7.818e+01 1.344e+02, threshold=1.423e+02, percent-clipped=0.0 +2024-07-27 13:25:11,751 INFO [train.py:1114] (3/4) Epoch 2, batch 5100, loss[loss=0.3283, simple_loss=0.3721, pruned_loss=0.1423, over 4778.00 frames. ], tot_loss[loss=0.3218, simple_loss=0.3782, pruned_loss=0.1327, over 935375.56 frames. ], batch size: 12, lr: 2.93e-02, grad_scale: 64.0 +2024-07-27 13:25:11,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=20429.333333333332, ans=0.1 +2024-07-27 13:25:16,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=20429.333333333332, ans=0.125 +2024-07-27 13:25:23,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=20442.666666666668, ans=0.2 +2024-07-27 13:25:26,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=20456.0, ans=0.2 +2024-07-27 13:25:27,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=20456.0, ans=0.0 +2024-07-27 13:25:28,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=20456.0, ans=0.125 +2024-07-27 13:25:29,097 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.51 vs. limit=15.0 +2024-07-27 13:25:35,214 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.75 vs. limit=22.5 +2024-07-27 13:25:46,240 INFO [train.py:1114] (3/4) Epoch 2, batch 5150, loss[loss=0.3176, simple_loss=0.3918, pruned_loss=0.1217, over 4832.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3794, pruned_loss=0.1337, over 936271.18 frames. ], batch size: 16, lr: 2.92e-02, grad_scale: 64.0 +2024-07-27 13:25:53,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20509.333333333332, ans=0.1 +2024-07-27 13:25:55,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20509.333333333332, ans=0.1 +2024-07-27 13:26:09,644 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.469e+01 6.536e+01 7.424e+01 8.253e+01 1.032e+02, threshold=1.485e+02, percent-clipped=0.0 +2024-07-27 13:26:19,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=20549.333333333332, ans=0.2 +2024-07-27 13:26:19,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=20549.333333333332, ans=0.0 +2024-07-27 13:26:20,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=20562.666666666668, ans=0.006399420289855072 +2024-07-27 13:26:21,112 INFO [train.py:1114] (3/4) Epoch 2, batch 5200, loss[loss=0.302, simple_loss=0.3746, pruned_loss=0.1147, over 4663.00 frames. ], tot_loss[loss=0.3228, simple_loss=0.3794, pruned_loss=0.1331, over 936132.30 frames. ], batch size: 14, lr: 2.92e-02, grad_scale: 64.0 +2024-07-27 13:26:25,673 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.86 vs. limit=22.5 +2024-07-27 13:26:26,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=20562.666666666668, ans=0.09899494936611666 +2024-07-27 13:26:30,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=20576.0, ans=10.0 +2024-07-27 13:26:31,507 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.90 vs. limit=15.0 +2024-07-27 13:26:33,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=20576.0, ans=0.2 +2024-07-27 13:26:34,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=20589.333333333332, ans=0.0 +2024-07-27 13:26:37,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.61 vs. limit=15.0 +2024-07-27 13:26:42,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=20602.666666666668, ans=0.2 +2024-07-27 13:26:50,025 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=38.48 vs. limit=15.0 +2024-07-27 13:26:55,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=20616.0, ans=0.125 +2024-07-27 13:26:55,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=20616.0, ans=0.025 +2024-07-27 13:26:58,067 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=22.57 vs. limit=22.5 +2024-07-27 13:27:00,330 INFO [train.py:1114] (3/4) Epoch 2, batch 5250, loss[loss=0.3816, simple_loss=0.4258, pruned_loss=0.1687, over 4894.00 frames. ], tot_loss[loss=0.3212, simple_loss=0.378, pruned_loss=0.1321, over 936001.19 frames. ], batch size: 13, lr: 2.91e-02, grad_scale: 64.0 +2024-07-27 13:27:03,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=20629.333333333332, ans=0.0 +2024-07-27 13:27:10,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=20642.666666666668, ans=0.125 +2024-07-27 13:27:11,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=20642.666666666668, ans=0.2 +2024-07-27 13:27:24,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=20669.333333333332, ans=0.125 +2024-07-27 13:27:27,949 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.995e+01 6.506e+01 7.005e+01 7.765e+01 1.418e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-27 13:27:33,879 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.21 vs. limit=15.0 +2024-07-27 13:27:40,045 INFO [train.py:1114] (3/4) Epoch 2, batch 5300, loss[loss=0.2905, simple_loss=0.3619, pruned_loss=0.1095, over 4644.00 frames. ], tot_loss[loss=0.323, simple_loss=0.379, pruned_loss=0.1335, over 934509.47 frames. ], batch size: 16, lr: 2.91e-02, grad_scale: 64.0 +2024-07-27 13:27:40,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=20696.0, ans=0.0 +2024-07-27 13:27:57,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20722.666666666668, ans=0.1 +2024-07-27 13:28:02,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=20736.0, ans=0.2 +2024-07-27 13:28:02,916 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.50 vs. limit=15.0 +2024-07-27 13:28:10,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=20749.333333333332, ans=0.125 +2024-07-27 13:28:11,963 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.37 vs. limit=15.0 +2024-07-27 13:28:15,713 INFO [train.py:1114] (3/4) Epoch 2, batch 5350, loss[loss=0.313, simple_loss=0.376, pruned_loss=0.125, over 4565.00 frames. ], tot_loss[loss=0.3223, simple_loss=0.3788, pruned_loss=0.1329, over 936531.19 frames. ], batch size: 10, lr: 2.91e-02, grad_scale: 64.0 +2024-07-27 13:28:33,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=20789.333333333332, ans=0.2 +2024-07-27 13:28:41,454 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.442e+01 6.375e+01 6.982e+01 7.841e+01 1.512e+02, threshold=1.396e+02, percent-clipped=1.0 +2024-07-27 13:28:54,560 INFO [train.py:1114] (3/4) Epoch 2, batch 5400, loss[loss=0.4017, simple_loss=0.4366, pruned_loss=0.1834, over 4218.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3809, pruned_loss=0.135, over 930381.33 frames. ], batch size: 25, lr: 2.90e-02, grad_scale: 64.0 +2024-07-27 13:28:56,244 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.01 vs. limit=15.0 +2024-07-27 13:28:59,871 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.74 vs. limit=22.5 +2024-07-27 13:29:12,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=20856.0, ans=0.125 +2024-07-27 13:29:13,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=20856.0, ans=0.125 +2024-07-27 13:29:14,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=20856.0, ans=0.125 +2024-07-27 13:29:18,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=20869.333333333332, ans=0.125 +2024-07-27 13:29:26,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=20882.666666666668, ans=0.006329855072463768 +2024-07-27 13:29:31,763 INFO [train.py:1114] (3/4) Epoch 2, batch 5450, loss[loss=0.2641, simple_loss=0.3189, pruned_loss=0.1046, over 4710.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3793, pruned_loss=0.1337, over 933027.10 frames. ], batch size: 11, lr: 2.90e-02, grad_scale: 64.0 +2024-07-27 13:29:35,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=20896.0, ans=0.1 +2024-07-27 13:29:42,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=20909.333333333332, ans=0.0 +2024-07-27 13:29:54,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=20936.0, ans=0.125 +2024-07-27 13:29:55,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=20936.0, ans=0.0 +2024-07-27 13:29:55,721 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.61 vs. limit=22.5 +2024-07-27 13:29:55,871 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.402e+01 6.377e+01 6.925e+01 7.766e+01 1.521e+02, threshold=1.385e+02, percent-clipped=1.0 +2024-07-27 13:29:57,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=20936.0, ans=0.125 +2024-07-27 13:29:59,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=20949.333333333332, ans=0.2 +2024-07-27 13:30:06,861 INFO [train.py:1114] (3/4) Epoch 2, batch 5500, loss[loss=0.4065, simple_loss=0.4451, pruned_loss=0.1839, over 4286.00 frames. ], tot_loss[loss=0.3235, simple_loss=0.379, pruned_loss=0.134, over 930923.80 frames. ], batch size: 25, lr: 2.90e-02, grad_scale: 64.0 +2024-07-27 13:30:11,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=20962.666666666668, ans=0.125 +2024-07-27 13:30:26,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=20989.333333333332, ans=0.125 +2024-07-27 13:30:28,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=21002.666666666668, ans=0.125 +2024-07-27 13:30:29,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=21002.666666666668, ans=0.0 +2024-07-27 13:30:41,370 INFO [train.py:1114] (3/4) Epoch 2, batch 5550, loss[loss=0.307, simple_loss=0.3518, pruned_loss=0.1311, over 4711.00 frames. ], tot_loss[loss=0.3218, simple_loss=0.3778, pruned_loss=0.1329, over 933136.22 frames. ], batch size: 12, lr: 2.89e-02, grad_scale: 64.0 +2024-07-27 13:30:47,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=21042.666666666668, ans=0.125 +2024-07-27 13:30:54,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=21056.0, ans=0.125 +2024-07-27 13:30:58,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=21056.0, ans=0.125 +2024-07-27 13:31:01,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=21069.333333333332, ans=0.04949747468305833 +2024-07-27 13:31:02,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=21069.333333333332, ans=0.0 +2024-07-27 13:31:04,810 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.723e+01 6.613e+01 7.499e+01 8.477e+01 2.130e+02, threshold=1.500e+02, percent-clipped=3.0 +2024-07-27 13:31:10,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21082.666666666668, ans=0.1 +2024-07-27 13:31:14,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=21082.666666666668, ans=0.125 +2024-07-27 13:31:15,951 INFO [train.py:1114] (3/4) Epoch 2, batch 5600, loss[loss=0.3542, simple_loss=0.4026, pruned_loss=0.1529, over 4743.00 frames. ], tot_loss[loss=0.3224, simple_loss=0.378, pruned_loss=0.1333, over 934152.13 frames. ], batch size: 14, lr: 2.89e-02, grad_scale: 64.0 +2024-07-27 13:31:24,667 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-27 13:31:29,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=21122.666666666668, ans=0.025 +2024-07-27 13:31:32,688 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.94 vs. limit=6.0 +2024-07-27 13:31:35,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=21122.666666666668, ans=0.125 +2024-07-27 13:31:36,030 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=17.34 vs. limit=15.0 +2024-07-27 13:31:39,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=21136.0, ans=0.2 +2024-07-27 13:31:42,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=21136.0, ans=0.1 +2024-07-27 13:31:50,639 INFO [train.py:1114] (3/4) Epoch 2, batch 5650, loss[loss=0.3578, simple_loss=0.4048, pruned_loss=0.1554, over 4431.00 frames. ], tot_loss[loss=0.3198, simple_loss=0.3765, pruned_loss=0.1316, over 936518.45 frames. ], batch size: 21, lr: 2.88e-02, grad_scale: 64.0 +2024-07-27 13:32:10,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=21189.333333333332, ans=0.0 +2024-07-27 13:32:10,658 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.36 vs. limit=12.0 +2024-07-27 13:32:14,251 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.661e+01 6.325e+01 6.816e+01 7.626e+01 1.168e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-27 13:32:14,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=21202.666666666668, ans=0.125 +2024-07-27 13:32:17,475 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.65 vs. limit=15.0 +2024-07-27 13:32:18,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=21216.0, ans=0.125 +2024-07-27 13:32:19,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=21216.0, ans=10.0 +2024-07-27 13:32:25,094 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.07 vs. limit=15.0 +2024-07-27 13:32:25,397 INFO [train.py:1114] (3/4) Epoch 2, batch 5700, loss[loss=0.3072, simple_loss=0.3797, pruned_loss=0.1174, over 4698.00 frames. ], tot_loss[loss=0.3185, simple_loss=0.376, pruned_loss=0.1305, over 937678.15 frames. ], batch size: 13, lr: 2.88e-02, grad_scale: 64.0 +2024-07-27 13:32:31,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=21229.333333333332, ans=0.125 +2024-07-27 13:32:38,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=21242.666666666668, ans=0.006251594202898551 +2024-07-27 13:32:42,862 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.00 vs. limit=6.0 +2024-07-27 13:32:51,238 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.71 vs. limit=15.0 +2024-07-27 13:32:53,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21269.333333333332, ans=0.1 +2024-07-27 13:32:56,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=21269.333333333332, ans=0.1 +2024-07-27 13:32:57,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=21269.333333333332, ans=0.1 +2024-07-27 13:33:02,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=21282.666666666668, ans=0.025 +2024-07-27 13:33:05,788 INFO [train.py:1114] (3/4) Epoch 2, batch 5750, loss[loss=0.4063, simple_loss=0.4615, pruned_loss=0.1755, over 4667.00 frames. ], tot_loss[loss=0.3212, simple_loss=0.3783, pruned_loss=0.1321, over 937600.99 frames. ], batch size: 19, lr: 2.88e-02, grad_scale: 64.0 +2024-07-27 13:33:06,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=21296.0, ans=0.2 +2024-07-27 13:33:11,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=21296.0, ans=0.0 +2024-07-27 13:33:15,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=21309.333333333332, ans=0.2 +2024-07-27 13:33:31,925 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:33:33,902 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:33:34,447 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.471e+01 6.485e+01 7.135e+01 7.978e+01 1.224e+02, threshold=1.427e+02, percent-clipped=0.0 +2024-07-27 13:33:43,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=21349.333333333332, ans=0.125 +2024-07-27 13:33:44,510 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=8.26 vs. limit=12.0 +2024-07-27 13:33:45,479 INFO [train.py:1114] (3/4) Epoch 2, batch 5800, loss[loss=0.334, simple_loss=0.3891, pruned_loss=0.1395, over 4726.00 frames. ], tot_loss[loss=0.322, simple_loss=0.3786, pruned_loss=0.1327, over 936757.60 frames. ], batch size: 19, lr: 2.87e-02, grad_scale: 64.0 +2024-07-27 13:33:50,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=21362.666666666668, ans=0.125 +2024-07-27 13:33:51,147 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.24 vs. limit=10.0 +2024-07-27 13:34:06,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=21389.333333333332, ans=0.125 +2024-07-27 13:34:15,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=21416.0, ans=0.125 +2024-07-27 13:34:24,442 INFO [train.py:1114] (3/4) Epoch 2, batch 5850, loss[loss=0.3839, simple_loss=0.4395, pruned_loss=0.1641, over 4504.00 frames. ], tot_loss[loss=0.3215, simple_loss=0.3784, pruned_loss=0.1323, over 937150.78 frames. ], batch size: 21, lr: 2.87e-02, grad_scale: 64.0 +2024-07-27 13:34:39,499 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.84 vs. limit=15.0 +2024-07-27 13:34:49,984 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.936e+01 6.249e+01 6.870e+01 7.832e+01 1.003e+02, threshold=1.374e+02, percent-clipped=0.0 +2024-07-27 13:34:52,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=21469.333333333332, ans=0.125 +2024-07-27 13:34:54,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=21482.666666666668, ans=0.125 +2024-07-27 13:34:59,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=21482.666666666668, ans=0.006199420289855073 +2024-07-27 13:35:01,014 INFO [train.py:1114] (3/4) Epoch 2, batch 5900, loss[loss=0.3542, simple_loss=0.4013, pruned_loss=0.1535, over 4674.00 frames. ], tot_loss[loss=0.3221, simple_loss=0.3785, pruned_loss=0.1328, over 937125.94 frames. ], batch size: 15, lr: 2.87e-02, grad_scale: 64.0 +2024-07-27 13:35:07,435 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:35:21,450 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.09 vs. limit=10.0 +2024-07-27 13:35:28,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=21549.333333333332, ans=0.1 +2024-07-27 13:35:32,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=21549.333333333332, ans=0.125 +2024-07-27 13:35:35,451 INFO [train.py:1114] (3/4) Epoch 2, batch 5950, loss[loss=0.3203, simple_loss=0.3819, pruned_loss=0.1294, over 4687.00 frames. ], tot_loss[loss=0.3207, simple_loss=0.3776, pruned_loss=0.1319, over 939397.44 frames. ], batch size: 15, lr: 2.86e-02, grad_scale: 64.0 +2024-07-27 13:35:52,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=21589.333333333332, ans=0.025 +2024-07-27 13:35:59,409 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.396e+01 6.345e+01 7.134e+01 8.050e+01 1.843e+02, threshold=1.427e+02, percent-clipped=1.0 +2024-07-27 13:36:03,005 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.48 vs. limit=22.5 +2024-07-27 13:36:12,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=21616.0, ans=0.1 +2024-07-27 13:36:14,159 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.99 vs. limit=6.0 +2024-07-27 13:36:20,606 INFO [train.py:1114] (3/4) Epoch 2, batch 6000, loss[loss=0.3374, simple_loss=0.3909, pruned_loss=0.1419, over 4301.00 frames. ], tot_loss[loss=0.3192, simple_loss=0.3763, pruned_loss=0.1311, over 937006.53 frames. ], batch size: 26, lr: 2.86e-02, grad_scale: 64.0 +2024-07-27 13:36:20,972 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 13:36:36,194 INFO [train.py:1146] (3/4) Epoch 2, validation: loss=0.2564, simple_loss=0.3503, pruned_loss=0.08121, over 944034.00 frames. +2024-07-27 13:36:36,195 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 13:36:36,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=21629.333333333332, ans=0.125 +2024-07-27 13:36:45,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=21642.666666666668, ans=0.125 +2024-07-27 13:36:47,810 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:36:51,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=21656.0, ans=0.2 +2024-07-27 13:36:58,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=21669.333333333332, ans=0.0 +2024-07-27 13:37:02,548 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.73 vs. limit=22.5 +2024-07-27 13:37:11,084 INFO [train.py:1114] (3/4) Epoch 2, batch 6050, loss[loss=0.3013, simple_loss=0.3516, pruned_loss=0.1255, over 4771.00 frames. ], tot_loss[loss=0.3168, simple_loss=0.3744, pruned_loss=0.1296, over 938231.62 frames. ], batch size: 12, lr: 2.85e-02, grad_scale: 64.0 +2024-07-27 13:37:12,929 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.29 vs. limit=15.0 +2024-07-27 13:37:13,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=21696.0, ans=0.00615304347826087 +2024-07-27 13:37:17,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=21709.333333333332, ans=15.0 +2024-07-27 13:37:17,659 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.09 vs. limit=15.0 +2024-07-27 13:37:18,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=21709.333333333332, ans=0.125 +2024-07-27 13:37:31,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=21736.0, ans=0.125 +2024-07-27 13:37:31,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=21736.0, ans=0.025 +2024-07-27 13:37:34,504 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.397e+01 6.133e+01 6.810e+01 7.852e+01 1.499e+02, threshold=1.362e+02, percent-clipped=2.0 +2024-07-27 13:37:45,484 INFO [train.py:1114] (3/4) Epoch 2, batch 6100, loss[loss=0.3372, simple_loss=0.3999, pruned_loss=0.1373, over 4680.00 frames. ], tot_loss[loss=0.3167, simple_loss=0.3741, pruned_loss=0.1296, over 937706.03 frames. ], batch size: 15, lr: 2.85e-02, grad_scale: 64.0 +2024-07-27 13:37:50,668 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.69 vs. limit=15.0 +2024-07-27 13:37:55,730 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.99 vs. limit=6.0 +2024-07-27 13:38:01,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=21789.333333333332, ans=0.125 +2024-07-27 13:38:02,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=21789.333333333332, ans=0.2 +2024-07-27 13:38:02,557 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.01 vs. limit=22.5 +2024-07-27 13:38:11,240 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=21802.666666666668, ans=0.125 +2024-07-27 13:38:12,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=21816.0, ans=0.05 +2024-07-27 13:38:20,142 INFO [train.py:1114] (3/4) Epoch 2, batch 6150, loss[loss=0.4769, simple_loss=0.4774, pruned_loss=0.2382, over 3138.00 frames. ], tot_loss[loss=0.3169, simple_loss=0.374, pruned_loss=0.1299, over 936291.20 frames. ], batch size: 35, lr: 2.85e-02, grad_scale: 64.0 +2024-07-27 13:38:22,802 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.82 vs. limit=22.5 +2024-07-27 13:38:34,693 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.75 vs. limit=15.0 +2024-07-27 13:38:40,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.28 vs. limit=22.5 +2024-07-27 13:38:44,317 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.693e+01 6.434e+01 7.098e+01 7.748e+01 1.262e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 13:38:47,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=21882.666666666668, ans=0.09899494936611666 +2024-07-27 13:38:55,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=21882.666666666668, ans=0.1 +2024-07-27 13:38:56,902 INFO [train.py:1114] (3/4) Epoch 2, batch 6200, loss[loss=0.3176, simple_loss=0.3785, pruned_loss=0.1283, over 4749.00 frames. ], tot_loss[loss=0.3177, simple_loss=0.3749, pruned_loss=0.1303, over 936006.34 frames. ], batch size: 14, lr: 2.84e-02, grad_scale: 64.0 +2024-07-27 13:39:01,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=21896.0, ans=0.006109565217391305 +2024-07-27 13:39:05,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=21909.333333333332, ans=0.125 +2024-07-27 13:39:06,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=21909.333333333332, ans=0.125 +2024-07-27 13:39:10,130 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.00 vs. limit=22.5 +2024-07-27 13:39:13,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=21922.666666666668, ans=0.2 +2024-07-27 13:39:17,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=21922.666666666668, ans=0.125 +2024-07-27 13:39:20,812 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.90 vs. limit=10.0 +2024-07-27 13:39:23,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=21936.0, ans=0.0 +2024-07-27 13:39:29,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=21949.333333333332, ans=0.125 +2024-07-27 13:39:33,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=21949.333333333332, ans=0.006097971014492755 +2024-07-27 13:39:38,442 INFO [train.py:1114] (3/4) Epoch 2, batch 6250, loss[loss=0.3556, simple_loss=0.4056, pruned_loss=0.1528, over 4816.00 frames. ], tot_loss[loss=0.3177, simple_loss=0.3745, pruned_loss=0.1305, over 932584.71 frames. ], batch size: 14, lr: 2.84e-02, grad_scale: 64.0 +2024-07-27 13:39:45,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=21976.0, ans=0.125 +2024-07-27 13:39:50,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=21976.0, ans=0.125 +2024-07-27 13:39:56,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=21989.333333333332, ans=0.0 +2024-07-27 13:40:04,078 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.093e+01 6.216e+01 6.990e+01 7.888e+01 1.132e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 13:40:05,170 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.20 vs. limit=22.5 +2024-07-27 13:40:10,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=22016.0, ans=0.0 +2024-07-27 13:40:13,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=22016.0, ans=0.006083478260869565 +2024-07-27 13:40:15,174 INFO [train.py:1114] (3/4) Epoch 2, batch 6300, loss[loss=0.281, simple_loss=0.3404, pruned_loss=0.1108, over 4601.00 frames. ], tot_loss[loss=0.3187, simple_loss=0.3753, pruned_loss=0.1311, over 929708.98 frames. ], batch size: 10, lr: 2.84e-02, grad_scale: 64.0 +2024-07-27 13:40:21,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22029.333333333332, ans=0.1 +2024-07-27 13:40:23,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=22042.666666666668, ans=0.2 +2024-07-27 13:40:41,842 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.26 vs. limit=15.0 +2024-07-27 13:40:46,499 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-07-27 13:40:59,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=22082.666666666668, ans=0.0 +2024-07-27 13:41:02,157 INFO [train.py:1114] (3/4) Epoch 2, batch 6350, loss[loss=0.3183, simple_loss=0.3745, pruned_loss=0.1311, over 4504.00 frames. ], tot_loss[loss=0.3177, simple_loss=0.3748, pruned_loss=0.1303, over 933753.78 frames. ], batch size: 21, lr: 2.83e-02, grad_scale: 64.0 +2024-07-27 13:41:04,750 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.77 vs. limit=6.0 +2024-07-27 13:41:06,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22096.0, ans=0.1 +2024-07-27 13:41:12,238 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.76 vs. limit=6.0 +2024-07-27 13:41:25,746 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.585e+01 6.300e+01 6.631e+01 7.435e+01 1.313e+02, threshold=1.326e+02, percent-clipped=0.0 +2024-07-27 13:41:26,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=22136.0, ans=0.125 +2024-07-27 13:41:36,417 INFO [train.py:1114] (3/4) Epoch 2, batch 6400, loss[loss=0.3767, simple_loss=0.4264, pruned_loss=0.1635, over 4643.00 frames. ], tot_loss[loss=0.3183, simple_loss=0.3754, pruned_loss=0.1306, over 934847.27 frames. ], batch size: 13, lr: 2.83e-02, grad_scale: 64.0 +2024-07-27 13:41:40,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=22162.666666666668, ans=0.00605159420289855 +2024-07-27 13:41:42,355 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.31 vs. limit=22.5 +2024-07-27 13:41:50,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=22189.333333333332, ans=0.125 +2024-07-27 13:42:09,947 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.25 vs. limit=22.5 +2024-07-27 13:42:10,974 INFO [train.py:1114] (3/4) Epoch 2, batch 6450, loss[loss=0.3582, simple_loss=0.3978, pruned_loss=0.1593, over 4501.00 frames. ], tot_loss[loss=0.318, simple_loss=0.3757, pruned_loss=0.1302, over 938553.59 frames. ], batch size: 21, lr: 2.83e-02, grad_scale: 64.0 +2024-07-27 13:42:14,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=22229.333333333332, ans=0.125 +2024-07-27 13:42:28,276 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.45 vs. limit=15.0 +2024-07-27 13:42:30,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=22256.0, ans=10.0 +2024-07-27 13:42:34,025 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.359e+01 6.221e+01 6.785e+01 7.657e+01 1.359e+02, threshold=1.357e+02, percent-clipped=1.0 +2024-07-27 13:42:44,915 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.97 vs. limit=15.0 +2024-07-27 13:42:45,071 INFO [train.py:1114] (3/4) Epoch 2, batch 6500, loss[loss=0.4218, simple_loss=0.4365, pruned_loss=0.2035, over 3610.00 frames. ], tot_loss[loss=0.3178, simple_loss=0.3753, pruned_loss=0.1301, over 940041.77 frames. ], batch size: 37, lr: 2.82e-02, grad_scale: 64.0 +2024-07-27 13:42:56,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=22309.333333333332, ans=0.025 +2024-07-27 13:43:04,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=22336.0, ans=0.125 +2024-07-27 13:43:10,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=22336.0, ans=0.0 +2024-07-27 13:43:12,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=22349.333333333332, ans=0.125 +2024-07-27 13:43:19,931 INFO [train.py:1114] (3/4) Epoch 2, batch 6550, loss[loss=0.2692, simple_loss=0.3258, pruned_loss=0.1063, over 4808.00 frames. ], tot_loss[loss=0.314, simple_loss=0.3728, pruned_loss=0.1276, over 943027.03 frames. ], batch size: 11, lr: 2.82e-02, grad_scale: 64.0 +2024-07-27 13:43:22,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=22362.666666666668, ans=0.2 +2024-07-27 13:43:43,158 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.181e+01 6.196e+01 6.780e+01 7.401e+01 1.122e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 13:43:44,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22402.666666666668, ans=0.1 +2024-07-27 13:43:49,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22416.0, ans=0.1 +2024-07-27 13:43:51,722 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.30 vs. limit=22.5 +2024-07-27 13:43:56,091 INFO [train.py:1114] (3/4) Epoch 2, batch 6600, loss[loss=0.3358, simple_loss=0.3931, pruned_loss=0.1393, over 4930.00 frames. ], tot_loss[loss=0.3151, simple_loss=0.3739, pruned_loss=0.1281, over 944859.41 frames. ], batch size: 14, lr: 2.82e-02, grad_scale: 128.0 +2024-07-27 13:44:15,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=22456.0, ans=0.125 +2024-07-27 13:44:29,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=22482.666666666668, ans=0.125 +2024-07-27 13:44:31,236 INFO [train.py:1114] (3/4) Epoch 2, batch 6650, loss[loss=0.3135, simple_loss=0.3773, pruned_loss=0.1249, over 4605.00 frames. ], tot_loss[loss=0.3139, simple_loss=0.3729, pruned_loss=0.1274, over 943871.42 frames. ], batch size: 17, lr: 2.81e-02, grad_scale: 128.0 +2024-07-27 13:44:31,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22496.0, ans=0.1 +2024-07-27 13:44:37,873 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.57 vs. limit=15.0 +2024-07-27 13:44:38,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=22509.333333333332, ans=0.09899494936611666 +2024-07-27 13:44:42,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=22509.333333333332, ans=0.125 +2024-07-27 13:44:48,752 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.24 vs. limit=22.5 +2024-07-27 13:44:58,373 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.864e+01 6.602e+01 7.128e+01 7.971e+01 1.702e+02, threshold=1.426e+02, percent-clipped=1.0 +2024-07-27 13:44:58,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=22536.0, ans=0.125 +2024-07-27 13:45:05,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=22536.0, ans=0.125 +2024-07-27 13:45:17,346 INFO [train.py:1114] (3/4) Epoch 2, batch 6700, loss[loss=0.3919, simple_loss=0.4353, pruned_loss=0.1742, over 4665.00 frames. ], tot_loss[loss=0.3127, simple_loss=0.3722, pruned_loss=0.1266, over 942885.77 frames. ], batch size: 19, lr: 2.81e-02, grad_scale: 128.0 +2024-07-27 13:45:21,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=22562.666666666668, ans=0.95 +2024-07-27 13:45:29,665 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.41 vs. limit=15.0 +2024-07-27 13:45:29,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=22576.0, ans=0.125 +2024-07-27 13:45:32,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=22589.333333333332, ans=0.125 +2024-07-27 13:45:50,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=22616.0, ans=0.2 +2024-07-27 13:45:52,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=22616.0, ans=0.125 +2024-07-27 13:45:53,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22629.333333333332, ans=0.1 +2024-07-27 13:45:53,897 INFO [train.py:1114] (3/4) Epoch 2, batch 6750, loss[loss=0.3547, simple_loss=0.4029, pruned_loss=0.1533, over 4287.00 frames. ], tot_loss[loss=0.3131, simple_loss=0.3723, pruned_loss=0.127, over 940653.44 frames. ], batch size: 26, lr: 2.81e-02, grad_scale: 128.0 +2024-07-27 13:45:54,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=22629.333333333332, ans=0.125 +2024-07-27 13:46:17,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=22669.333333333332, ans=0.2 +2024-07-27 13:46:17,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=22669.333333333332, ans=0.125 +2024-07-27 13:46:18,631 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:46:19,139 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.110e+01 6.419e+01 6.907e+01 8.025e+01 1.154e+02, threshold=1.381e+02, percent-clipped=0.0 +2024-07-27 13:46:26,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=22682.666666666668, ans=0.005938550724637681 +2024-07-27 13:46:31,952 INFO [train.py:1114] (3/4) Epoch 2, batch 6800, loss[loss=0.3164, simple_loss=0.3783, pruned_loss=0.1273, over 4629.00 frames. ], tot_loss[loss=0.3126, simple_loss=0.3721, pruned_loss=0.1265, over 939284.52 frames. ], batch size: 13, lr: 2.80e-02, grad_scale: 128.0 +2024-07-27 13:46:38,262 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-07-27 13:46:42,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=22709.333333333332, ans=0.125 +2024-07-27 13:46:46,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=22722.666666666668, ans=0.125 +2024-07-27 13:46:49,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=22722.666666666668, ans=0.0 +2024-07-27 13:46:51,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=22736.0, ans=0.125 +2024-07-27 13:46:54,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=22736.0, ans=0.1 +2024-07-27 13:46:56,518 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.51 vs. limit=15.0 +2024-07-27 13:46:57,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=22736.0, ans=0.125 +2024-07-27 13:46:58,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=22749.333333333332, ans=0.2 +2024-07-27 13:46:58,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=22749.333333333332, ans=0.125 +2024-07-27 13:47:06,016 INFO [train.py:1114] (3/4) Epoch 2, batch 6850, loss[loss=0.2825, simple_loss=0.3641, pruned_loss=0.1004, over 4685.00 frames. ], tot_loss[loss=0.3128, simple_loss=0.3724, pruned_loss=0.1266, over 940963.55 frames. ], batch size: 13, lr: 2.80e-02, grad_scale: 128.0 +2024-07-27 13:47:17,583 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.14 vs. limit=15.0 +2024-07-27 13:47:18,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=22776.0, ans=0.5 +2024-07-27 13:47:32,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=22802.666666666668, ans=0.07 +2024-07-27 13:47:33,802 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.902e+01 6.353e+01 6.914e+01 7.942e+01 1.137e+02, threshold=1.383e+02, percent-clipped=0.0 +2024-07-27 13:47:40,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=22816.0, ans=0.125 +2024-07-27 13:47:40,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=22816.0, ans=0.1 +2024-07-27 13:47:44,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=22816.0, ans=0.005909565217391305 +2024-07-27 13:47:46,054 INFO [train.py:1114] (3/4) Epoch 2, batch 6900, loss[loss=0.297, simple_loss=0.3591, pruned_loss=0.1174, over 4968.00 frames. ], tot_loss[loss=0.3123, simple_loss=0.372, pruned_loss=0.1263, over 942911.92 frames. ], batch size: 13, lr: 2.79e-02, grad_scale: 128.0 +2024-07-27 13:47:46,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=22829.333333333332, ans=0.125 +2024-07-27 13:47:53,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=22842.666666666668, ans=0.2 +2024-07-27 13:48:00,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=22856.0, ans=0.2 +2024-07-27 13:48:06,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=22856.0, ans=0.025 +2024-07-27 13:48:09,308 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=2.88 vs. limit=15.0 +2024-07-27 13:48:20,697 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.31 vs. limit=22.5 +2024-07-27 13:48:21,665 INFO [train.py:1114] (3/4) Epoch 2, batch 6950, loss[loss=0.2972, simple_loss=0.3404, pruned_loss=0.127, over 4510.00 frames. ], tot_loss[loss=0.3129, simple_loss=0.3718, pruned_loss=0.127, over 939972.10 frames. ], batch size: 10, lr: 2.79e-02, grad_scale: 128.0 +2024-07-27 13:48:28,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=22896.0, ans=0.125 +2024-07-27 13:48:35,458 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.94 vs. limit=15.0 +2024-07-27 13:48:41,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=22909.333333333332, ans=0.025 +2024-07-27 13:48:50,219 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.18 vs. limit=10.0 +2024-07-27 13:48:52,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=22936.0, ans=0.1 +2024-07-27 13:48:53,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=22936.0, ans=0.5 +2024-07-27 13:48:54,772 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.437e+01 6.446e+01 7.112e+01 7.644e+01 1.059e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 13:49:02,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=22949.333333333332, ans=0.025 +2024-07-27 13:49:05,667 INFO [train.py:1114] (3/4) Epoch 2, batch 7000, loss[loss=0.3497, simple_loss=0.4149, pruned_loss=0.1422, over 4633.00 frames. ], tot_loss[loss=0.3122, simple_loss=0.3709, pruned_loss=0.1267, over 938370.66 frames. ], batch size: 17, lr: 2.79e-02, grad_scale: 128.0 +2024-07-27 13:49:13,390 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=2.768e-02 +2024-07-27 13:49:16,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=22976.0, ans=0.125 +2024-07-27 13:49:21,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=22989.333333333332, ans=0.005871884057971015 +2024-07-27 13:49:32,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=23002.666666666668, ans=0.125 +2024-07-27 13:49:42,742 INFO [train.py:1114] (3/4) Epoch 2, batch 7050, loss[loss=0.3387, simple_loss=0.4028, pruned_loss=0.1373, over 4747.00 frames. ], tot_loss[loss=0.3121, simple_loss=0.3717, pruned_loss=0.1263, over 941903.66 frames. ], batch size: 19, lr: 2.78e-02, grad_scale: 128.0 +2024-07-27 13:50:02,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=23042.666666666668, ans=0.09899494936611666 +2024-07-27 13:50:17,257 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.353e+01 6.903e+01 7.811e+01 8.989e+01 1.248e+02, threshold=1.562e+02, percent-clipped=0.0 +2024-07-27 13:50:20,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=23082.666666666668, ans=0.2 +2024-07-27 13:50:22,387 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.20 vs. limit=6.0 +2024-07-27 13:50:27,355 INFO [train.py:1114] (3/4) Epoch 2, batch 7100, loss[loss=0.2916, simple_loss=0.3679, pruned_loss=0.1076, over 4800.00 frames. ], tot_loss[loss=0.3145, simple_loss=0.3733, pruned_loss=0.1279, over 936583.35 frames. ], batch size: 15, lr: 2.78e-02, grad_scale: 64.0 +2024-07-27 13:50:34,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=23109.333333333332, ans=0.125 +2024-07-27 13:50:38,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=23109.333333333332, ans=0.2 +2024-07-27 13:50:39,365 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=7.11 vs. limit=6.0 +2024-07-27 13:50:56,666 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.95 vs. limit=10.0 +2024-07-27 13:50:59,481 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.96 vs. limit=12.0 +2024-07-27 13:51:00,515 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=4.005e-02 +2024-07-27 13:51:07,816 INFO [train.py:1114] (3/4) Epoch 2, batch 7150, loss[loss=0.3729, simple_loss=0.4241, pruned_loss=0.1609, over 4526.00 frames. ], tot_loss[loss=0.3136, simple_loss=0.3721, pruned_loss=0.1275, over 937379.80 frames. ], batch size: 21, lr: 2.78e-02, grad_scale: 64.0 +2024-07-27 13:51:08,954 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.64 vs. limit=22.5 +2024-07-27 13:51:29,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=23189.333333333332, ans=0.125 +2024-07-27 13:51:30,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=23189.333333333332, ans=0.00582840579710145 +2024-07-27 13:51:41,523 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.182e+01 6.431e+01 7.159e+01 7.939e+01 1.328e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 13:51:53,964 INFO [train.py:1114] (3/4) Epoch 2, batch 7200, loss[loss=0.3711, simple_loss=0.4319, pruned_loss=0.1551, over 4807.00 frames. ], tot_loss[loss=0.3172, simple_loss=0.3753, pruned_loss=0.1296, over 937832.38 frames. ], batch size: 15, lr: 2.77e-02, grad_scale: 64.0 +2024-07-27 13:51:59,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=23229.333333333332, ans=0.0 +2024-07-27 13:52:07,528 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.93 vs. limit=22.5 +2024-07-27 13:52:09,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=23256.0, ans=0.125 +2024-07-27 13:52:10,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=23256.0, ans=0.04949747468305833 +2024-07-27 13:52:28,625 INFO [train.py:1114] (3/4) Epoch 2, batch 7250, loss[loss=0.2993, simple_loss=0.3486, pruned_loss=0.125, over 4861.00 frames. ], tot_loss[loss=0.3143, simple_loss=0.3726, pruned_loss=0.128, over 940229.99 frames. ], batch size: 12, lr: 2.77e-02, grad_scale: 64.0 +2024-07-27 13:52:49,667 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.47 vs. limit=15.0 +2024-07-27 13:52:49,670 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.62 vs. limit=15.0 +2024-07-27 13:52:54,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=23336.0, ans=0.125 +2024-07-27 13:52:54,557 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.076e+01 6.237e+01 6.919e+01 7.525e+01 1.117e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-27 13:53:06,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=23349.333333333332, ans=0.005793623188405797 +2024-07-27 13:53:14,184 INFO [train.py:1114] (3/4) Epoch 2, batch 7300, loss[loss=0.2947, simple_loss=0.3504, pruned_loss=0.1194, over 4857.00 frames. ], tot_loss[loss=0.3134, simple_loss=0.3717, pruned_loss=0.1276, over 940417.58 frames. ], batch size: 12, lr: 2.77e-02, grad_scale: 64.0 +2024-07-27 13:53:24,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=23376.0, ans=0.2 +2024-07-27 13:53:26,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=23389.333333333332, ans=0.025 +2024-07-27 13:53:42,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23416.0, ans=0.1 +2024-07-27 13:53:49,001 INFO [train.py:1114] (3/4) Epoch 2, batch 7350, loss[loss=0.2772, simple_loss=0.3451, pruned_loss=0.1046, over 4643.00 frames. ], tot_loss[loss=0.3141, simple_loss=0.3726, pruned_loss=0.1278, over 939584.19 frames. ], batch size: 12, lr: 2.76e-02, grad_scale: 64.0 +2024-07-27 13:53:54,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=23429.333333333332, ans=0.125 +2024-07-27 13:54:02,237 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:54:05,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.54 vs. limit=15.0 +2024-07-27 13:54:07,056 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:54:12,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=23469.333333333332, ans=0.125 +2024-07-27 13:54:12,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23469.333333333332, ans=0.1 +2024-07-27 13:54:14,583 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.043e+01 6.547e+01 7.387e+01 8.600e+01 1.543e+02, threshold=1.477e+02, percent-clipped=1.0 +2024-07-27 13:54:19,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=23469.333333333332, ans=0.125 +2024-07-27 13:54:26,279 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=5.61 vs. limit=15.0 +2024-07-27 13:54:44,639 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:54:45,264 INFO [train.py:1114] (3/4) Epoch 2, batch 7400, loss[loss=0.3133, simple_loss=0.3832, pruned_loss=0.1217, over 4690.00 frames. ], tot_loss[loss=0.3129, simple_loss=0.3714, pruned_loss=0.1272, over 940621.80 frames. ], batch size: 13, lr: 2.76e-02, grad_scale: 64.0 +2024-07-27 13:55:00,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=6.55 vs. limit=6.0 +2024-07-27 13:55:11,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=23536.0, ans=0.0 +2024-07-27 13:55:19,704 INFO [train.py:1114] (3/4) Epoch 2, batch 7450, loss[loss=0.3249, simple_loss=0.375, pruned_loss=0.1374, over 4607.00 frames. ], tot_loss[loss=0.3105, simple_loss=0.3689, pruned_loss=0.1261, over 937838.96 frames. ], batch size: 11, lr: 2.76e-02, grad_scale: 64.0 +2024-07-27 13:55:24,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=23562.666666666668, ans=0.2 +2024-07-27 13:55:28,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=23576.0, ans=0.125 +2024-07-27 13:55:39,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=23602.666666666668, ans=0.125 +2024-07-27 13:55:39,724 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.79 vs. limit=22.5 +2024-07-27 13:55:45,338 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:55:45,575 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.20 vs. limit=15.0 +2024-07-27 13:55:47,788 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.335e+01 6.373e+01 7.113e+01 7.806e+01 1.283e+02, threshold=1.423e+02, percent-clipped=0.0 +2024-07-27 13:55:50,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=23616.0, ans=0.1 +2024-07-27 13:55:58,078 INFO [train.py:1114] (3/4) Epoch 2, batch 7500, loss[loss=0.4564, simple_loss=0.4705, pruned_loss=0.2212, over 2954.00 frames. ], tot_loss[loss=0.3151, simple_loss=0.3725, pruned_loss=0.1288, over 935659.69 frames. ], batch size: 35, lr: 2.75e-02, grad_scale: 64.0 +2024-07-27 13:56:01,418 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.44 vs. limit=15.0 +2024-07-27 13:56:14,776 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.50 vs. limit=6.0 +2024-07-27 13:56:24,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=23642.666666666668, ans=0.005729855072463768 +2024-07-27 13:56:26,946 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.08 vs. limit=15.0 +2024-07-27 13:56:28,044 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.86 vs. limit=22.5 +2024-07-27 13:56:28,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=23656.0, ans=0.025 +2024-07-27 13:56:29,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=23656.0, ans=0.1 +2024-07-27 13:56:31,363 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.57 vs. limit=22.5 +2024-07-27 13:56:46,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=23696.0, ans=0.1 +2024-07-27 13:56:47,502 INFO [train.py:1114] (3/4) Epoch 2, batch 7550, loss[loss=0.3818, simple_loss=0.4193, pruned_loss=0.1722, over 4603.00 frames. ], tot_loss[loss=0.3165, simple_loss=0.3741, pruned_loss=0.1294, over 935962.43 frames. ], batch size: 17, lr: 2.75e-02, grad_scale: 64.0 +2024-07-27 13:56:54,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=23696.0, ans=0.125 +2024-07-27 13:56:57,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=23709.333333333332, ans=0.125 +2024-07-27 13:57:04,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=23722.666666666668, ans=0.1 +2024-07-27 13:57:13,047 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.34 vs. limit=6.0 +2024-07-27 13:57:14,887 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 13:57:15,319 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.177e+01 6.494e+01 6.851e+01 7.705e+01 1.471e+02, threshold=1.370e+02, percent-clipped=1.0 +2024-07-27 13:57:15,848 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.88 vs. limit=8.0 +2024-07-27 13:57:19,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=23749.333333333332, ans=0.2 +2024-07-27 13:57:21,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=23749.333333333332, ans=0.125 +2024-07-27 13:57:25,097 INFO [train.py:1114] (3/4) Epoch 2, batch 7600, loss[loss=0.3047, simple_loss=0.3777, pruned_loss=0.1159, over 4804.00 frames. ], tot_loss[loss=0.315, simple_loss=0.3733, pruned_loss=0.1283, over 937801.92 frames. ], batch size: 14, lr: 2.75e-02, grad_scale: 64.0 +2024-07-27 13:57:25,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=23762.666666666668, ans=0.0 +2024-07-27 13:57:37,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=23776.0, ans=0.005700869565217392 +2024-07-27 13:57:44,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=23802.666666666668, ans=0.125 +2024-07-27 13:57:47,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=23802.666666666668, ans=0.0 +2024-07-27 13:57:58,709 INFO [train.py:1114] (3/4) Epoch 2, batch 7650, loss[loss=0.2575, simple_loss=0.3194, pruned_loss=0.09775, over 4928.00 frames. ], tot_loss[loss=0.3142, simple_loss=0.3726, pruned_loss=0.1279, over 936933.09 frames. ], batch size: 12, lr: 2.74e-02, grad_scale: 64.0 +2024-07-27 13:59:40,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=23856.0, ans=0.125 +2024-07-27 13:59:50,982 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.430e+01 6.487e+01 6.980e+01 8.234e+01 1.140e+02, threshold=1.396e+02, percent-clipped=0.0 +2024-07-27 13:59:52,137 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.55 vs. limit=15.0 +2024-07-27 13:59:56,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=23882.666666666668, ans=0.2 +2024-07-27 13:59:57,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=23882.666666666668, ans=0.0 +2024-07-27 13:59:58,227 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.05 vs. limit=6.0 +2024-07-27 14:00:01,138 INFO [train.py:1114] (3/4) Epoch 2, batch 7700, loss[loss=0.3575, simple_loss=0.4164, pruned_loss=0.1493, over 4686.00 frames. ], tot_loss[loss=0.3147, simple_loss=0.3729, pruned_loss=0.1282, over 934072.39 frames. ], batch size: 13, lr: 2.74e-02, grad_scale: 64.0 +2024-07-27 14:00:01,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=23896.0, ans=0.0 +2024-07-27 14:00:07,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=23909.333333333332, ans=0.07 +2024-07-27 14:00:17,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=23922.666666666668, ans=0.0056689855072463765 +2024-07-27 14:00:18,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=23922.666666666668, ans=0.1 +2024-07-27 14:00:23,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=23936.0, ans=0.125 +2024-07-27 14:00:24,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=23936.0, ans=0.125 +2024-07-27 14:00:30,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=23949.333333333332, ans=0.2 +2024-07-27 14:00:34,013 INFO [train.py:1114] (3/4) Epoch 2, batch 7750, loss[loss=0.2632, simple_loss=0.3465, pruned_loss=0.09001, over 4929.00 frames. ], tot_loss[loss=0.3157, simple_loss=0.3745, pruned_loss=0.1284, over 935701.83 frames. ], batch size: 14, lr: 2.74e-02, grad_scale: 64.0 +2024-07-27 14:00:45,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=23976.0, ans=0.2 +2024-07-27 14:00:47,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=23989.333333333332, ans=0.1 +2024-07-27 14:00:51,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=23989.333333333332, ans=0.025 +2024-07-27 14:00:55,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=23989.333333333332, ans=0.025 +2024-07-27 14:01:00,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=24002.666666666668, ans=0.125 +2024-07-27 14:01:00,622 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.06 vs. limit=15.0 +2024-07-27 14:01:01,471 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.212e+01 6.407e+01 7.069e+01 7.682e+01 1.137e+02, threshold=1.414e+02, percent-clipped=0.0 +2024-07-27 14:01:11,459 INFO [train.py:1114] (3/4) Epoch 2, batch 7800, loss[loss=0.3021, simple_loss=0.3705, pruned_loss=0.1168, over 4661.00 frames. ], tot_loss[loss=0.3162, simple_loss=0.3753, pruned_loss=0.1286, over 937121.94 frames. ], batch size: 14, lr: 2.74e-02, grad_scale: 64.0 +2024-07-27 14:01:14,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=24029.333333333332, ans=0.0 +2024-07-27 14:01:24,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=24056.0, ans=0.0 +2024-07-27 14:01:44,898 INFO [train.py:1114] (3/4) Epoch 2, batch 7850, loss[loss=0.2485, simple_loss=0.311, pruned_loss=0.09298, over 4507.00 frames. ], tot_loss[loss=0.317, simple_loss=0.3758, pruned_loss=0.1291, over 936005.06 frames. ], batch size: 10, lr: 2.73e-02, grad_scale: 64.0 +2024-07-27 14:01:50,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=24096.0, ans=0.125 +2024-07-27 14:01:53,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=24109.333333333332, ans=0.125 +2024-07-27 14:01:56,521 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.56 vs. limit=15.0 +2024-07-27 14:02:05,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=24122.666666666668, ans=0.125 +2024-07-27 14:02:10,322 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.460e+01 6.475e+01 7.021e+01 7.812e+01 1.156e+02, threshold=1.404e+02, percent-clipped=0.0 +2024-07-27 14:02:10,714 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.37 vs. limit=15.0 +2024-07-27 14:02:20,254 INFO [train.py:1114] (3/4) Epoch 2, batch 7900, loss[loss=0.326, simple_loss=0.3937, pruned_loss=0.1291, over 4869.00 frames. ], tot_loss[loss=0.3184, simple_loss=0.377, pruned_loss=0.1299, over 933163.27 frames. ], batch size: 14, lr: 2.73e-02, grad_scale: 64.0 +2024-07-27 14:02:22,021 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.68 vs. limit=10.0 +2024-07-27 14:02:24,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=24162.666666666668, ans=0.1 +2024-07-27 14:02:28,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=24176.0, ans=0.125 +2024-07-27 14:02:32,961 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.37 vs. limit=15.0 +2024-07-27 14:02:33,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=24189.333333333332, ans=0.005611014492753624 +2024-07-27 14:02:38,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=24189.333333333332, ans=0.125 +2024-07-27 14:02:43,977 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.41 vs. limit=12.0 +2024-07-27 14:02:51,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=24216.0, ans=0.95 +2024-07-27 14:02:51,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=24216.0, ans=0.125 +2024-07-27 14:02:53,688 INFO [train.py:1114] (3/4) Epoch 2, batch 7950, loss[loss=0.3518, simple_loss=0.3944, pruned_loss=0.1546, over 3527.00 frames. ], tot_loss[loss=0.3147, simple_loss=0.3739, pruned_loss=0.1277, over 935445.66 frames. ], batch size: 35, lr: 2.73e-02, grad_scale: 64.0 +2024-07-27 14:03:00,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=24242.666666666668, ans=0.125 +2024-07-27 14:03:02,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=24242.666666666668, ans=0.0 +2024-07-27 14:03:11,739 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24256.0, ans=0.1 +2024-07-27 14:03:13,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=24269.333333333332, ans=0.125 +2024-07-27 14:03:16,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=24269.333333333332, ans=0.2 +2024-07-27 14:03:17,185 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.361e+01 6.433e+01 7.086e+01 8.045e+01 1.490e+02, threshold=1.417e+02, percent-clipped=1.0 +2024-07-27 14:03:18,854 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=17.44 vs. limit=15.0 +2024-07-27 14:03:22,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24282.666666666668, ans=0.1 +2024-07-27 14:03:23,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=24282.666666666668, ans=0.125 +2024-07-27 14:03:23,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=24282.666666666668, ans=0.125 +2024-07-27 14:03:26,872 INFO [train.py:1114] (3/4) Epoch 2, batch 8000, loss[loss=0.2795, simple_loss=0.3232, pruned_loss=0.1179, over 4606.00 frames. ], tot_loss[loss=0.3136, simple_loss=0.3724, pruned_loss=0.1274, over 934826.41 frames. ], batch size: 11, lr: 2.72e-02, grad_scale: 64.0 +2024-07-27 14:03:43,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=24322.666666666668, ans=0.0 +2024-07-27 14:03:44,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=24322.666666666668, ans=0.5 +2024-07-27 14:04:00,346 INFO [train.py:1114] (3/4) Epoch 2, batch 8050, loss[loss=0.2784, simple_loss=0.3561, pruned_loss=0.1003, over 4814.00 frames. ], tot_loss[loss=0.3123, simple_loss=0.3717, pruned_loss=0.1264, over 934725.27 frames. ], batch size: 14, lr: 2.72e-02, grad_scale: 64.0 +2024-07-27 14:04:11,670 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.80 vs. limit=12.0 +2024-07-27 14:04:13,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=24389.333333333332, ans=0.05 +2024-07-27 14:04:21,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=24402.666666666668, ans=0.125 +2024-07-27 14:04:23,842 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.030e+01 6.126e+01 6.809e+01 7.483e+01 1.319e+02, threshold=1.362e+02, percent-clipped=0.0 +2024-07-27 14:04:24,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=24402.666666666668, ans=0.09899494936611666 +2024-07-27 14:04:26,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=24416.0, ans=0.125 +2024-07-27 14:04:33,620 INFO [train.py:1114] (3/4) Epoch 2, batch 8100, loss[loss=0.3293, simple_loss=0.3834, pruned_loss=0.1376, over 4803.00 frames. ], tot_loss[loss=0.3134, simple_loss=0.3729, pruned_loss=0.1269, over 934286.29 frames. ], batch size: 15, lr: 2.72e-02, grad_scale: 64.0 +2024-07-27 14:04:53,640 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-27 14:05:04,669 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.98 vs. limit=15.0 +2024-07-27 14:05:07,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=24442.666666666668, ans=0.125 +2024-07-27 14:05:14,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24456.0, ans=0.1 +2024-07-27 14:05:29,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=24469.333333333332, ans=0.125 +2024-07-27 14:05:44,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=24469.333333333332, ans=0.125 +2024-07-27 14:05:46,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=24469.333333333332, ans=0.0 +2024-07-27 14:05:57,935 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.46 vs. limit=12.0 +2024-07-27 14:06:02,877 INFO [train.py:1114] (3/4) Epoch 2, batch 8150, loss[loss=0.2868, simple_loss=0.3659, pruned_loss=0.1038, over 4790.00 frames. ], tot_loss[loss=0.3122, simple_loss=0.3718, pruned_loss=0.1263, over 937862.00 frames. ], batch size: 15, lr: 2.71e-02, grad_scale: 64.0 +2024-07-27 14:06:05,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=24496.0, ans=0.05 +2024-07-27 14:06:11,943 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:06:22,181 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.10 vs. limit=15.0 +2024-07-27 14:06:50,921 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.617e+01 6.763e+01 7.386e+01 8.088e+01 1.261e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 14:06:51,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=24536.0, ans=0.0 +2024-07-27 14:06:55,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=24549.333333333332, ans=0.125 +2024-07-27 14:06:56,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=24549.333333333332, ans=0.125 +2024-07-27 14:06:58,304 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.62 vs. limit=10.0 +2024-07-27 14:07:01,482 INFO [train.py:1114] (3/4) Epoch 2, batch 8200, loss[loss=0.3174, simple_loss=0.3804, pruned_loss=0.1273, over 4808.00 frames. ], tot_loss[loss=0.3138, simple_loss=0.3733, pruned_loss=0.1271, over 938722.49 frames. ], batch size: 15, lr: 2.71e-02, grad_scale: 64.0 +2024-07-27 14:07:06,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=24562.666666666668, ans=0.125 +2024-07-27 14:07:08,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn1.whiten.whitening_limit, batch_count=24576.0, ans=22.5 +2024-07-27 14:07:11,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=24576.0, ans=0.1 +2024-07-27 14:07:27,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=24602.666666666668, ans=0.005521159420289855 +2024-07-27 14:07:35,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=24616.0, ans=0.125 +2024-07-27 14:07:38,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=24616.0, ans=0.005518260869565218 +2024-07-27 14:07:39,094 INFO [train.py:1114] (3/4) Epoch 2, batch 8250, loss[loss=0.241, simple_loss=0.3135, pruned_loss=0.08422, over 4895.00 frames. ], tot_loss[loss=0.3123, simple_loss=0.3718, pruned_loss=0.1264, over 939302.63 frames. ], batch size: 13, lr: 2.71e-02, grad_scale: 64.0 +2024-07-27 14:07:39,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=24629.333333333332, ans=0.125 +2024-07-27 14:07:53,825 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.68 vs. limit=6.0 +2024-07-27 14:07:55,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=24642.666666666668, ans=0.125 +2024-07-27 14:07:59,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=24656.0, ans=0.0 +2024-07-27 14:08:03,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.23 vs. limit=10.0 +2024-07-27 14:08:06,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=24669.333333333332, ans=0.0 +2024-07-27 14:08:07,825 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.008e+01 6.272e+01 7.057e+01 7.926e+01 1.070e+02, threshold=1.411e+02, percent-clipped=0.0 +2024-07-27 14:08:16,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=24682.666666666668, ans=0.005503768115942029 +2024-07-27 14:08:32,476 INFO [train.py:1114] (3/4) Epoch 2, batch 8300, loss[loss=0.3602, simple_loss=0.4093, pruned_loss=0.1556, over 4896.00 frames. ], tot_loss[loss=0.3116, simple_loss=0.3713, pruned_loss=0.1259, over 939036.69 frames. ], batch size: 15, lr: 2.70e-02, grad_scale: 64.0 +2024-07-27 14:08:38,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.53 vs. limit=15.0 +2024-07-27 14:08:42,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=24709.333333333332, ans=0.0 +2024-07-27 14:08:42,482 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.36 vs. limit=15.0 +2024-07-27 14:08:45,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=24709.333333333332, ans=0.1 +2024-07-27 14:08:45,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=24709.333333333332, ans=0.0 +2024-07-27 14:08:47,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=24722.666666666668, ans=0.1 +2024-07-27 14:08:50,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=24722.666666666668, ans=0.125 +2024-07-27 14:09:06,240 INFO [train.py:1114] (3/4) Epoch 2, batch 8350, loss[loss=0.3662, simple_loss=0.4187, pruned_loss=0.1568, over 4816.00 frames. ], tot_loss[loss=0.3103, simple_loss=0.3708, pruned_loss=0.125, over 941796.58 frames. ], batch size: 15, lr: 2.70e-02, grad_scale: 64.0 +2024-07-27 14:09:16,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=24776.0, ans=0.0054834782608695655 +2024-07-27 14:09:18,006 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=25.07 vs. limit=15.0 +2024-07-27 14:09:29,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=24789.333333333332, ans=0.0 +2024-07-27 14:09:33,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=24802.666666666668, ans=0.2 +2024-07-27 14:09:34,716 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.707e+01 6.372e+01 7.103e+01 7.786e+01 1.162e+02, threshold=1.421e+02, percent-clipped=0.0 +2024-07-27 14:09:44,919 INFO [train.py:1114] (3/4) Epoch 2, batch 8400, loss[loss=0.2898, simple_loss=0.345, pruned_loss=0.1173, over 4770.00 frames. ], tot_loss[loss=0.3115, simple_loss=0.3715, pruned_loss=0.1257, over 940134.40 frames. ], batch size: 12, lr: 2.70e-02, grad_scale: 64.0 +2024-07-27 14:09:49,322 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.65 vs. limit=12.0 +2024-07-27 14:09:54,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=24842.666666666668, ans=0.125 +2024-07-27 14:09:55,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=24842.666666666668, ans=0.1 +2024-07-27 14:10:00,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=24856.0, ans=0.2 +2024-07-27 14:10:17,805 INFO [train.py:1114] (3/4) Epoch 2, batch 8450, loss[loss=0.3326, simple_loss=0.3948, pruned_loss=0.1352, over 4784.00 frames. ], tot_loss[loss=0.3124, simple_loss=0.3726, pruned_loss=0.1261, over 939130.96 frames. ], batch size: 15, lr: 2.69e-02, grad_scale: 64.0 +2024-07-27 14:10:20,118 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.59 vs. limit=15.0 +2024-07-27 14:10:21,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=24896.0, ans=0.0 +2024-07-27 14:10:46,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=24922.666666666668, ans=0.125 +2024-07-27 14:10:51,591 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.225e+01 6.363e+01 6.808e+01 7.563e+01 1.440e+02, threshold=1.362e+02, percent-clipped=1.0 +2024-07-27 14:11:00,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=24962.666666666668, ans=0.0 +2024-07-27 14:11:01,358 INFO [train.py:1114] (3/4) Epoch 2, batch 8500, loss[loss=0.2845, simple_loss=0.3319, pruned_loss=0.1185, over 4614.00 frames. ], tot_loss[loss=0.3128, simple_loss=0.3725, pruned_loss=0.1265, over 938859.65 frames. ], batch size: 11, lr: 2.69e-02, grad_scale: 64.0 +2024-07-27 14:11:10,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=24976.0, ans=0.05 +2024-07-27 14:11:13,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.63 vs. limit=12.0 +2024-07-27 14:11:22,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25002.666666666668, ans=0.1 +2024-07-27 14:11:34,220 INFO [train.py:1114] (3/4) Epoch 2, batch 8550, loss[loss=0.2952, simple_loss=0.3451, pruned_loss=0.1226, over 4813.00 frames. ], tot_loss[loss=0.312, simple_loss=0.3716, pruned_loss=0.1262, over 939725.74 frames. ], batch size: 11, lr: 2.69e-02, grad_scale: 64.0 +2024-07-27 14:11:42,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=25042.666666666668, ans=0.025 +2024-07-27 14:11:46,500 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.26 vs. limit=10.0 +2024-07-27 14:11:59,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=25069.333333333332, ans=0.0 +2024-07-27 14:12:01,369 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.226e+01 6.174e+01 6.782e+01 7.598e+01 1.715e+02, threshold=1.356e+02, percent-clipped=1.0 +2024-07-27 14:12:09,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=25082.666666666668, ans=0.2 +2024-07-27 14:12:10,655 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=5.322e-03 +2024-07-27 14:12:11,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=25082.666666666668, ans=0.005416811594202898 +2024-07-27 14:12:11,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=25082.666666666668, ans=0.0 +2024-07-27 14:12:12,546 INFO [train.py:1114] (3/4) Epoch 2, batch 8600, loss[loss=0.3832, simple_loss=0.4378, pruned_loss=0.1643, over 4809.00 frames. ], tot_loss[loss=0.3114, simple_loss=0.3708, pruned_loss=0.126, over 939193.16 frames. ], batch size: 15, lr: 2.68e-02, grad_scale: 64.0 +2024-07-27 14:12:16,379 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.81 vs. limit=15.0 +2024-07-27 14:12:25,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=25122.666666666668, ans=0.125 +2024-07-27 14:12:30,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25122.666666666668, ans=0.1 +2024-07-27 14:12:42,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=25149.333333333332, ans=0.125 +2024-07-27 14:12:50,763 INFO [train.py:1114] (3/4) Epoch 2, batch 8650, loss[loss=0.3328, simple_loss=0.3855, pruned_loss=0.14, over 4905.00 frames. ], tot_loss[loss=0.3107, simple_loss=0.3703, pruned_loss=0.1255, over 940483.76 frames. ], batch size: 15, lr: 2.68e-02, grad_scale: 64.0 +2024-07-27 14:12:51,555 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:12:53,168 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.82 vs. limit=22.5 +2024-07-27 14:12:55,156 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.01 vs. limit=6.0 +2024-07-27 14:13:10,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=25202.666666666668, ans=0.2 +2024-07-27 14:13:13,968 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.076e+01 6.621e+01 7.393e+01 8.155e+01 1.216e+02, threshold=1.479e+02, percent-clipped=0.0 +2024-07-27 14:13:14,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=25202.666666666668, ans=0.0 +2024-07-27 14:13:21,492 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.76 vs. limit=10.0 +2024-07-27 14:13:23,800 INFO [train.py:1114] (3/4) Epoch 2, batch 8700, loss[loss=0.3444, simple_loss=0.4038, pruned_loss=0.1425, over 4758.00 frames. ], tot_loss[loss=0.3118, simple_loss=0.3712, pruned_loss=0.1262, over 937797.96 frames. ], batch size: 13, lr: 2.68e-02, grad_scale: 64.0 +2024-07-27 14:13:23,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=25229.333333333332, ans=0.125 +2024-07-27 14:13:25,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=25229.333333333332, ans=0.025 +2024-07-27 14:13:25,617 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.61 vs. limit=22.5 +2024-07-27 14:13:28,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=25229.333333333332, ans=0.125 +2024-07-27 14:13:30,466 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.54 vs. limit=22.5 +2024-07-27 14:13:31,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=25242.666666666668, ans=0.125 +2024-07-27 14:13:34,481 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-07-27 14:13:42,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25256.0, ans=0.1 +2024-07-27 14:14:00,737 INFO [train.py:1114] (3/4) Epoch 2, batch 8750, loss[loss=0.3276, simple_loss=0.3791, pruned_loss=0.138, over 4674.00 frames. ], tot_loss[loss=0.3122, simple_loss=0.3714, pruned_loss=0.1265, over 936680.24 frames. ], batch size: 15, lr: 2.68e-02, grad_scale: 64.0 +2024-07-27 14:14:13,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=25296.0, ans=0.125 +2024-07-27 14:14:16,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=25296.0, ans=0.005370434782608696 +2024-07-27 14:14:18,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=25296.0, ans=0.125 +2024-07-27 14:14:19,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=25296.0, ans=0.05 +2024-07-27 14:14:31,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=25309.333333333332, ans=0.025 +2024-07-27 14:14:35,377 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.61 vs. limit=12.0 +2024-07-27 14:14:43,960 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.590e+01 6.384e+01 6.883e+01 7.910e+01 1.074e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-27 14:14:53,578 INFO [train.py:1114] (3/4) Epoch 2, batch 8800, loss[loss=0.2687, simple_loss=0.3516, pruned_loss=0.09295, over 4932.00 frames. ], tot_loss[loss=0.3125, simple_loss=0.3716, pruned_loss=0.1267, over 937614.54 frames. ], batch size: 14, lr: 2.67e-02, grad_scale: 64.0 +2024-07-27 14:15:11,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=25389.333333333332, ans=0.125 +2024-07-27 14:15:12,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=25389.333333333332, ans=0.07 +2024-07-27 14:15:13,179 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=13.10 vs. limit=15.0 +2024-07-27 14:15:15,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=25402.666666666668, ans=0.1 +2024-07-27 14:15:19,086 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:15:27,656 INFO [train.py:1114] (3/4) Epoch 2, batch 8850, loss[loss=0.3303, simple_loss=0.3902, pruned_loss=0.1351, over 4520.00 frames. ], tot_loss[loss=0.3112, simple_loss=0.3705, pruned_loss=0.1259, over 932229.21 frames. ], batch size: 21, lr: 2.67e-02, grad_scale: 64.0 +2024-07-27 14:15:30,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=25429.333333333332, ans=0.2 +2024-07-27 14:15:32,425 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:15:46,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=25456.0, ans=0.1 +2024-07-27 14:15:55,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=25456.0, ans=0.2 +2024-07-27 14:16:00,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=25469.333333333332, ans=0.125 +2024-07-27 14:16:01,199 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.339e+01 6.518e+01 6.996e+01 7.988e+01 1.039e+02, threshold=1.399e+02, percent-clipped=0.0 +2024-07-27 14:16:05,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=25482.666666666668, ans=0.005329855072463768 +2024-07-27 14:16:11,056 INFO [train.py:1114] (3/4) Epoch 2, batch 8900, loss[loss=0.2333, simple_loss=0.2865, pruned_loss=0.09002, over 4931.00 frames. ], tot_loss[loss=0.3126, simple_loss=0.3711, pruned_loss=0.127, over 930114.30 frames. ], batch size: 12, lr: 2.67e-02, grad_scale: 64.0 +2024-07-27 14:16:16,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=25509.333333333332, ans=0.125 +2024-07-27 14:16:16,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=25509.333333333332, ans=0.0 +2024-07-27 14:16:19,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=25509.333333333332, ans=0.125 +2024-07-27 14:16:34,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=25536.0, ans=0.0 +2024-07-27 14:16:38,939 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.56 vs. limit=15.0 +2024-07-27 14:16:41,073 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.49 vs. limit=15.0 +2024-07-27 14:16:44,025 INFO [train.py:1114] (3/4) Epoch 2, batch 8950, loss[loss=0.3046, simple_loss=0.3622, pruned_loss=0.1235, over 4464.00 frames. ], tot_loss[loss=0.3113, simple_loss=0.3702, pruned_loss=0.1262, over 931134.42 frames. ], batch size: 21, lr: 2.66e-02, grad_scale: 64.0 +2024-07-27 14:16:52,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=25576.0, ans=0.025 +2024-07-27 14:16:54,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=25576.0, ans=0.07 +2024-07-27 14:16:54,286 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.38 vs. limit=15.0 +2024-07-27 14:16:59,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=25589.333333333332, ans=0.125 +2024-07-27 14:16:59,338 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.06 vs. limit=15.0 +2024-07-27 14:17:01,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=25589.333333333332, ans=0.2 +2024-07-27 14:17:02,520 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.50 vs. limit=10.0 +2024-07-27 14:17:04,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=25602.666666666668, ans=0.125 +2024-07-27 14:17:06,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=25602.666666666668, ans=0.0 +2024-07-27 14:17:06,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=25602.666666666668, ans=0.125 +2024-07-27 14:17:08,256 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.246e+01 6.427e+01 6.847e+01 7.354e+01 1.255e+02, threshold=1.369e+02, percent-clipped=0.0 +2024-07-27 14:17:11,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=25616.0, ans=0.025 +2024-07-27 14:17:18,095 INFO [train.py:1114] (3/4) Epoch 2, batch 9000, loss[loss=0.2809, simple_loss=0.3543, pruned_loss=0.1037, over 4640.00 frames. ], tot_loss[loss=0.3081, simple_loss=0.3678, pruned_loss=0.1242, over 934092.71 frames. ], batch size: 12, lr: 2.66e-02, grad_scale: 64.0 +2024-07-27 14:17:18,095 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 14:17:37,004 INFO [train.py:1146] (3/4) Epoch 2, validation: loss=0.2471, simple_loss=0.3424, pruned_loss=0.07587, over 944034.00 frames. +2024-07-27 14:17:37,005 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 14:17:38,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=25629.333333333332, ans=0.07 +2024-07-27 14:17:55,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=25656.0, ans=0.125 +2024-07-27 14:18:00,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=25669.333333333332, ans=0.125 +2024-07-27 14:18:18,630 INFO [train.py:1114] (3/4) Epoch 2, batch 9050, loss[loss=0.2968, simple_loss=0.3628, pruned_loss=0.1154, over 4588.00 frames. ], tot_loss[loss=0.3073, simple_loss=0.3671, pruned_loss=0.1238, over 934754.70 frames. ], batch size: 10, lr: 2.66e-02, grad_scale: 64.0 +2024-07-27 14:18:24,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=25696.0, ans=15.0 +2024-07-27 14:18:38,091 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.79 vs. limit=15.0 +2024-07-27 14:18:38,803 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.02 vs. limit=15.0 +2024-07-27 14:18:43,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=25722.666666666668, ans=0.125 +2024-07-27 14:18:48,201 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.394e+01 6.425e+01 6.926e+01 7.624e+01 1.076e+02, threshold=1.385e+02, percent-clipped=0.0 +2024-07-27 14:18:51,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=25749.333333333332, ans=0.005271884057971014 +2024-07-27 14:18:52,012 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.70 vs. limit=15.0 +2024-07-27 14:18:58,026 INFO [train.py:1114] (3/4) Epoch 2, batch 9100, loss[loss=0.2991, simple_loss=0.3812, pruned_loss=0.1085, over 4930.00 frames. ], tot_loss[loss=0.3068, simple_loss=0.367, pruned_loss=0.1232, over 937028.33 frames. ], batch size: 14, lr: 2.65e-02, grad_scale: 128.0 +2024-07-27 14:18:59,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=25762.666666666668, ans=0.04949747468305833 +2024-07-27 14:19:02,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=25762.666666666668, ans=0.035 +2024-07-27 14:19:02,396 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.10 vs. limit=15.0 +2024-07-27 14:19:21,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=25802.666666666668, ans=0.125 +2024-07-27 14:19:22,645 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.85 vs. limit=15.0 +2024-07-27 14:19:30,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=25816.0, ans=0.005257391304347827 +2024-07-27 14:19:32,200 INFO [train.py:1114] (3/4) Epoch 2, batch 9150, loss[loss=0.2919, simple_loss=0.3542, pruned_loss=0.1147, over 4802.00 frames. ], tot_loss[loss=0.3089, simple_loss=0.3695, pruned_loss=0.1242, over 935758.05 frames. ], batch size: 14, lr: 2.65e-02, grad_scale: 64.0 +2024-07-27 14:19:45,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=25842.666666666668, ans=0.125 +2024-07-27 14:19:56,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=25856.0, ans=15.0 +2024-07-27 14:20:02,449 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.426e+01 6.682e+01 7.261e+01 8.100e+01 1.344e+02, threshold=1.452e+02, percent-clipped=0.0 +2024-07-27 14:20:11,596 INFO [train.py:1114] (3/4) Epoch 2, batch 9200, loss[loss=0.2625, simple_loss=0.3275, pruned_loss=0.0988, over 4852.00 frames. ], tot_loss[loss=0.3065, simple_loss=0.3673, pruned_loss=0.1228, over 937772.38 frames. ], batch size: 12, lr: 2.65e-02, grad_scale: 64.0 +2024-07-27 14:20:12,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25896.0, ans=0.1 +2024-07-27 14:20:24,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=25922.666666666668, ans=0.2 +2024-07-27 14:20:24,948 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-27 14:20:25,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=25922.666666666668, ans=0.125 +2024-07-27 14:20:32,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=25936.0, ans=0.0 +2024-07-27 14:20:44,396 INFO [train.py:1114] (3/4) Epoch 2, batch 9250, loss[loss=0.2833, simple_loss=0.3511, pruned_loss=0.1077, over 4635.00 frames. ], tot_loss[loss=0.3065, simple_loss=0.3673, pruned_loss=0.1228, over 938423.06 frames. ], batch size: 13, lr: 2.65e-02, grad_scale: 64.0 +2024-07-27 14:20:44,581 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:20:50,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=25976.0, ans=0.1 +2024-07-27 14:20:59,259 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.89 vs. limit=15.0 +2024-07-27 14:20:59,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=25989.333333333332, ans=0.0 +2024-07-27 14:21:07,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=26002.666666666668, ans=0.2 +2024-07-27 14:21:07,929 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.437e+01 6.326e+01 6.900e+01 7.743e+01 1.339e+02, threshold=1.380e+02, percent-clipped=0.0 +2024-07-27 14:21:08,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=26002.666666666668, ans=0.0 +2024-07-27 14:21:11,002 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.87 vs. limit=15.0 +2024-07-27 14:21:12,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=26016.0, ans=0.125 +2024-07-27 14:21:16,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=26016.0, ans=0.125 +2024-07-27 14:21:17,301 INFO [train.py:1114] (3/4) Epoch 2, batch 9300, loss[loss=0.2523, simple_loss=0.3218, pruned_loss=0.09141, over 4785.00 frames. ], tot_loss[loss=0.306, simple_loss=0.3667, pruned_loss=0.1227, over 938258.11 frames. ], batch size: 12, lr: 2.64e-02, grad_scale: 64.0 +2024-07-27 14:21:27,246 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.95 vs. limit=10.0 +2024-07-27 14:21:28,510 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.42 vs. limit=15.0 +2024-07-27 14:21:30,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=26056.0, ans=0.125 +2024-07-27 14:21:34,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=26056.0, ans=0.125 +2024-07-27 14:21:37,373 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:21:38,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=26069.333333333332, ans=0.125 +2024-07-27 14:21:39,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=26069.333333333332, ans=0.125 +2024-07-27 14:21:47,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=26082.666666666668, ans=0.125 +2024-07-27 14:21:49,569 INFO [train.py:1114] (3/4) Epoch 2, batch 9350, loss[loss=0.287, simple_loss=0.3523, pruned_loss=0.1108, over 4806.00 frames. ], tot_loss[loss=0.3072, simple_loss=0.368, pruned_loss=0.1232, over 935197.17 frames. ], batch size: 11, lr: 2.64e-02, grad_scale: 64.0 +2024-07-27 14:21:52,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=26096.0, ans=0.0 +2024-07-27 14:21:59,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=26109.333333333332, ans=0.1 +2024-07-27 14:22:04,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=26109.333333333332, ans=0.2 +2024-07-27 14:22:05,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=26122.666666666668, ans=0.0 +2024-07-27 14:22:10,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=26122.666666666668, ans=0.09899494936611666 +2024-07-27 14:22:13,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=26136.0, ans=0.025 +2024-07-27 14:22:16,013 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.591e+01 6.512e+01 7.104e+01 8.363e+01 3.070e+02, threshold=1.421e+02, percent-clipped=1.0 +2024-07-27 14:22:17,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=26136.0, ans=0.025 +2024-07-27 14:22:26,801 INFO [train.py:1114] (3/4) Epoch 2, batch 9400, loss[loss=0.3426, simple_loss=0.3903, pruned_loss=0.1475, over 4690.00 frames. ], tot_loss[loss=0.3094, simple_loss=0.369, pruned_loss=0.1249, over 933061.30 frames. ], batch size: 13, lr: 2.64e-02, grad_scale: 64.0 +2024-07-27 14:22:36,150 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.83 vs. limit=10.0 +2024-07-27 14:22:40,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26189.333333333332, ans=0.1 +2024-07-27 14:22:51,102 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.80 vs. limit=22.5 +2024-07-27 14:22:58,567 INFO [train.py:1114] (3/4) Epoch 2, batch 9450, loss[loss=0.2851, simple_loss=0.3245, pruned_loss=0.1228, over 4814.00 frames. ], tot_loss[loss=0.308, simple_loss=0.3679, pruned_loss=0.124, over 932505.89 frames. ], batch size: 11, lr: 2.63e-02, grad_scale: 64.0 +2024-07-27 14:23:14,403 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.05 vs. limit=15.0 +2024-07-27 14:23:20,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=26269.333333333332, ans=0.125 +2024-07-27 14:23:23,991 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.999e+01 6.119e+01 6.627e+01 7.680e+01 1.096e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-27 14:23:32,700 INFO [train.py:1114] (3/4) Epoch 2, batch 9500, loss[loss=0.2617, simple_loss=0.3281, pruned_loss=0.09764, over 4700.00 frames. ], tot_loss[loss=0.3071, simple_loss=0.3677, pruned_loss=0.1233, over 934672.28 frames. ], batch size: 12, lr: 2.63e-02, grad_scale: 64.0 +2024-07-27 14:23:32,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=26296.0, ans=0.00515304347826087 +2024-07-27 14:23:44,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=26309.333333333332, ans=0.1 +2024-07-27 14:23:46,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=26322.666666666668, ans=0.05 +2024-07-27 14:23:58,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=26349.333333333332, ans=0.125 +2024-07-27 14:24:05,364 INFO [train.py:1114] (3/4) Epoch 2, batch 9550, loss[loss=0.269, simple_loss=0.3347, pruned_loss=0.1017, over 4773.00 frames. ], tot_loss[loss=0.3066, simple_loss=0.3674, pruned_loss=0.123, over 932373.60 frames. ], batch size: 12, lr: 2.63e-02, grad_scale: 64.0 +2024-07-27 14:24:06,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=26362.666666666668, ans=0.015 +2024-07-27 14:24:17,608 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:24:18,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=26362.666666666668, ans=0.2 +2024-07-27 14:24:19,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=26362.666666666668, ans=0.09899494936611666 +2024-07-27 14:24:27,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=26389.333333333332, ans=0.1 +2024-07-27 14:24:36,199 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.30 vs. limit=15.0 +2024-07-27 14:24:37,641 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.234e+01 6.396e+01 7.111e+01 8.222e+01 1.095e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 14:24:39,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=26402.666666666668, ans=10.0 +2024-07-27 14:24:47,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=26416.0, ans=0.125 +2024-07-27 14:24:50,601 INFO [train.py:1114] (3/4) Epoch 2, batch 9600, loss[loss=0.3962, simple_loss=0.4315, pruned_loss=0.1805, over 3287.00 frames. ], tot_loss[loss=0.3059, simple_loss=0.3665, pruned_loss=0.1226, over 930856.13 frames. ], batch size: 35, lr: 2.62e-02, grad_scale: 64.0 +2024-07-27 14:24:56,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=26429.333333333332, ans=0.005124057971014493 +2024-07-27 14:25:05,925 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.78 vs. limit=22.5 +2024-07-27 14:25:10,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=26442.666666666668, ans=0.2 +2024-07-27 14:25:16,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=26456.0, ans=0.125 +2024-07-27 14:25:30,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=26469.333333333332, ans=0.0 +2024-07-27 14:25:34,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=26469.333333333332, ans=0.125 +2024-07-27 14:25:40,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=26482.666666666668, ans=0.125 +2024-07-27 14:25:41,595 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.68 vs. limit=22.5 +2024-07-27 14:25:48,570 INFO [train.py:1114] (3/4) Epoch 2, batch 9650, loss[loss=0.2983, simple_loss=0.3846, pruned_loss=0.106, over 4832.00 frames. ], tot_loss[loss=0.3071, simple_loss=0.3676, pruned_loss=0.1233, over 926345.33 frames. ], batch size: 16, lr: 2.62e-02, grad_scale: 64.0 +2024-07-27 14:25:58,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=26509.333333333332, ans=0.125 +2024-07-27 14:26:03,366 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.36 vs. limit=15.0 +2024-07-27 14:26:07,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=26522.666666666668, ans=10.0 +2024-07-27 14:26:14,867 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.044e+01 6.379e+01 6.976e+01 8.027e+01 1.621e+02, threshold=1.395e+02, percent-clipped=2.0 +2024-07-27 14:26:16,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=26549.333333333332, ans=0.2 +2024-07-27 14:26:19,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=26549.333333333332, ans=0.2 +2024-07-27 14:26:25,381 INFO [train.py:1114] (3/4) Epoch 2, batch 9700, loss[loss=0.3302, simple_loss=0.3818, pruned_loss=0.1393, over 4200.00 frames. ], tot_loss[loss=0.308, simple_loss=0.3684, pruned_loss=0.1238, over 924096.72 frames. ], batch size: 25, lr: 2.62e-02, grad_scale: 64.0 +2024-07-27 14:26:26,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=26562.666666666668, ans=0.125 +2024-07-27 14:26:37,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=26576.0, ans=0.0 +2024-07-27 14:26:45,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=26602.666666666668, ans=0.125 +2024-07-27 14:26:50,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=26602.666666666668, ans=0.005086376811594203 +2024-07-27 14:27:00,094 INFO [train.py:1114] (3/4) Epoch 2, batch 9750, loss[loss=0.3261, simple_loss=0.3837, pruned_loss=0.1342, over 4674.00 frames. ], tot_loss[loss=0.3071, simple_loss=0.3679, pruned_loss=0.1231, over 924652.42 frames. ], batch size: 15, lr: 2.62e-02, grad_scale: 64.0 +2024-07-27 14:27:07,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=26642.666666666668, ans=0.2 +2024-07-27 14:27:08,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=26642.666666666668, ans=0.1 +2024-07-27 14:27:32,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=26656.0, ans=0.1 +2024-07-27 14:27:35,447 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:27:37,702 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.374e+01 6.232e+01 6.802e+01 7.534e+01 1.606e+02, threshold=1.360e+02, percent-clipped=1.0 +2024-07-27 14:27:42,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=26682.666666666668, ans=0.04949747468305833 +2024-07-27 14:27:46,368 INFO [train.py:1114] (3/4) Epoch 2, batch 9800, loss[loss=0.2648, simple_loss=0.3258, pruned_loss=0.1019, over 4704.00 frames. ], tot_loss[loss=0.3066, simple_loss=0.367, pruned_loss=0.1231, over 924292.16 frames. ], batch size: 12, lr: 2.61e-02, grad_scale: 64.0 +2024-07-27 14:28:05,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=26736.0, ans=0.0 +2024-07-27 14:28:12,227 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.35 vs. limit=22.5 +2024-07-27 14:28:15,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=26749.333333333332, ans=0.2 +2024-07-27 14:28:18,175 INFO [train.py:1114] (3/4) Epoch 2, batch 9850, loss[loss=0.3298, simple_loss=0.3891, pruned_loss=0.1353, over 4913.00 frames. ], tot_loss[loss=0.3062, simple_loss=0.3669, pruned_loss=0.1227, over 926475.29 frames. ], batch size: 15, lr: 2.61e-02, grad_scale: 64.0 +2024-07-27 14:28:25,964 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.59 vs. limit=15.0 +2024-07-27 14:28:28,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=26762.666666666668, ans=0.125 +2024-07-27 14:28:29,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=26762.666666666668, ans=0.125 +2024-07-27 14:28:30,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=26776.0, ans=0.125 +2024-07-27 14:28:31,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=26776.0, ans=0.5 +2024-07-27 14:28:38,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=26789.333333333332, ans=0.0 +2024-07-27 14:28:42,668 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.81 vs. limit=15.0 +2024-07-27 14:28:48,006 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.332e+01 6.564e+01 7.229e+01 8.186e+01 1.183e+02, threshold=1.446e+02, percent-clipped=0.0 +2024-07-27 14:28:50,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=26816.0, ans=0.125 +2024-07-27 14:28:50,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=26816.0, ans=0.0 +2024-07-27 14:28:57,016 INFO [train.py:1114] (3/4) Epoch 2, batch 9900, loss[loss=0.3115, simple_loss=0.3835, pruned_loss=0.1198, over 4853.00 frames. ], tot_loss[loss=0.308, simple_loss=0.368, pruned_loss=0.124, over 925991.04 frames. ], batch size: 16, lr: 2.61e-02, grad_scale: 64.0 +2024-07-27 14:29:07,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=26842.666666666668, ans=0.125 +2024-07-27 14:29:09,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=26856.0, ans=0.125 +2024-07-27 14:29:28,779 INFO [train.py:1114] (3/4) Epoch 2, batch 9950, loss[loss=0.2602, simple_loss=0.3255, pruned_loss=0.09744, over 4488.00 frames. ], tot_loss[loss=0.3069, simple_loss=0.3671, pruned_loss=0.1233, over 928192.00 frames. ], batch size: 10, lr: 2.60e-02, grad_scale: 64.0 +2024-07-27 14:29:33,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=26896.0, ans=0.95 +2024-07-27 14:29:38,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=26909.333333333332, ans=0.125 +2024-07-27 14:29:40,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=26909.333333333332, ans=0.125 +2024-07-27 14:29:56,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=26909.333333333332, ans=0.125 +2024-07-27 14:30:16,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=26922.666666666668, ans=0.005016811594202898 +2024-07-27 14:30:18,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=26922.666666666668, ans=0.125 +2024-07-27 14:30:23,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=26936.0, ans=0.0 +2024-07-27 14:30:27,307 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.345e+01 6.388e+01 7.080e+01 7.845e+01 1.130e+02, threshold=1.416e+02, percent-clipped=0.0 +2024-07-27 14:30:31,947 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.82 vs. limit=15.0 +2024-07-27 14:30:35,492 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.25 vs. limit=15.0 +2024-07-27 14:30:35,768 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:30:37,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=26949.333333333332, ans=0.035 +2024-07-27 14:30:41,811 INFO [train.py:1114] (3/4) Epoch 2, batch 10000, loss[loss=0.3003, simple_loss=0.3803, pruned_loss=0.1101, over 4626.00 frames. ], tot_loss[loss=0.3119, simple_loss=0.3719, pruned_loss=0.1259, over 925985.67 frames. ], batch size: 16, lr: 2.60e-02, grad_scale: 64.0 +2024-07-27 14:30:45,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=26962.666666666668, ans=0.125 +2024-07-27 14:30:55,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=26976.0, ans=15.0 +2024-07-27 14:31:03,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=26989.333333333332, ans=0.1 +2024-07-27 14:31:04,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=27002.666666666668, ans=0.0 +2024-07-27 14:31:04,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=27002.666666666668, ans=0.125 +2024-07-27 14:31:05,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=27002.666666666668, ans=0.004999420289855072 +2024-07-27 14:31:07,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=27002.666666666668, ans=0.025 +2024-07-27 14:31:09,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=27002.666666666668, ans=0.05 +2024-07-27 14:31:12,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=27016.0, ans=0.125 +2024-07-27 14:31:18,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=27016.0, ans=0.0 +2024-07-27 14:31:24,566 INFO [train.py:1114] (3/4) Epoch 2, batch 10050, loss[loss=0.4109, simple_loss=0.4252, pruned_loss=0.1983, over 3271.00 frames. ], tot_loss[loss=0.319, simple_loss=0.3775, pruned_loss=0.1303, over 913979.02 frames. ], batch size: 37, lr: 2.60e-02, grad_scale: 64.0 +2024-07-27 14:31:34,277 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.34 vs. limit=15.0 +2024-07-27 14:31:34,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=27042.666666666668, ans=0.0 +2024-07-27 14:31:50,392 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.657e+01 6.782e+01 7.547e+01 8.673e+01 1.246e+02, threshold=1.509e+02, percent-clipped=0.0 +2024-07-27 14:31:51,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=27069.333333333332, ans=0.1 +2024-07-27 14:31:54,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=27082.666666666668, ans=0.125 +2024-07-27 14:31:59,882 INFO [train.py:1114] (3/4) Epoch 2, batch 10100, loss[loss=0.3758, simple_loss=0.4154, pruned_loss=0.1682, over 3070.00 frames. ], tot_loss[loss=0.3322, simple_loss=0.3854, pruned_loss=0.1395, over 861302.07 frames. ], batch size: 35, lr: 2.60e-02, grad_scale: 64.0 +2024-07-27 14:32:14,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=27122.666666666668, ans=0.125 +2024-07-27 14:32:19,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=27136.0, ans=0.125 +2024-07-27 14:32:22,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=27136.0, ans=0.004970434782608696 +2024-07-27 14:32:36,624 INFO [train.py:1114] (3/4) Epoch 2, batch 10150, loss[loss=0.4231, simple_loss=0.4408, pruned_loss=0.2027, over 2998.00 frames. ], tot_loss[loss=0.3402, simple_loss=0.3901, pruned_loss=0.1452, over 819263.02 frames. ], batch size: 35, lr: 2.59e-02, grad_scale: 64.0 +2024-07-27 14:32:44,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=27162.666666666668, ans=0.125 +2024-07-27 14:32:46,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=27176.0, ans=0.125 +2024-07-27 14:32:46,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=27176.0, ans=0.025 +2024-07-27 14:32:46,895 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.80 vs. limit=10.0 +2024-07-27 14:32:48,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=27176.0, ans=0.07 +2024-07-27 14:32:51,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=27176.0, ans=0.025 +2024-07-27 14:32:57,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=27189.333333333332, ans=0.004958840579710145 +2024-07-27 14:33:01,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=27189.333333333332, ans=0.125 +2024-07-27 14:33:04,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=27202.666666666668, ans=0.1 +2024-07-27 14:33:07,431 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.821e+01 6.551e+01 7.013e+01 7.617e+01 1.384e+02, threshold=1.403e+02, percent-clipped=0.0 +2024-07-27 14:33:09,676 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-27 14:33:10,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=27216.0, ans=0.1 +2024-07-27 14:33:16,420 INFO [train.py:1114] (3/4) Epoch 2, batch 10200, loss[loss=0.3568, simple_loss=0.3977, pruned_loss=0.158, over 3502.00 frames. ], tot_loss[loss=0.3482, simple_loss=0.3943, pruned_loss=0.151, over 788334.59 frames. ], batch size: 38, lr: 2.59e-02, grad_scale: 64.0 +2024-07-27 14:33:21,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=27229.333333333332, ans=0.0 +2024-07-27 14:33:27,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=27242.666666666668, ans=0.125 +2024-07-27 14:34:07,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=27260.0, ans=0.07 +2024-07-27 14:34:23,832 INFO [train.py:1114] (3/4) Epoch 3, batch 0, loss[loss=0.3036, simple_loss=0.3594, pruned_loss=0.1239, over 4856.00 frames. ], tot_loss[loss=0.3036, simple_loss=0.3594, pruned_loss=0.1239, over 4856.00 frames. ], batch size: 12, lr: 2.46e-02, grad_scale: 64.0 +2024-07-27 14:34:23,832 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 14:34:35,397 INFO [train.py:1146] (3/4) Epoch 3, validation: loss=0.2558, simple_loss=0.3526, pruned_loss=0.07947, over 944034.00 frames. +2024-07-27 14:34:35,658 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 14:34:37,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=27260.0, ans=0.004943478260869566 +2024-07-27 14:34:55,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=27273.333333333332, ans=0.2 +2024-07-27 14:34:55,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.04 vs. limit=22.5 +2024-07-27 14:34:58,387 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.77 vs. limit=22.5 +2024-07-27 14:35:03,654 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.50 vs. limit=5.0 +2024-07-27 14:35:10,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27300.0, ans=0.1 +2024-07-27 14:35:13,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=27300.0, ans=0.025 +2024-07-27 14:35:21,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27313.333333333332, ans=0.1 +2024-07-27 14:35:23,881 INFO [train.py:1114] (3/4) Epoch 3, batch 50, loss[loss=0.2683, simple_loss=0.3269, pruned_loss=0.1049, over 4610.00 frames. ], tot_loss[loss=0.3175, simple_loss=0.378, pruned_loss=0.1285, over 206050.61 frames. ], batch size: 11, lr: 2.46e-02, grad_scale: 64.0 +2024-07-27 14:35:25,348 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:35:28,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=27326.666666666668, ans=0.1 +2024-07-27 14:35:33,789 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.475e+01 6.508e+01 7.033e+01 7.791e+01 1.183e+02, threshold=1.407e+02, percent-clipped=0.0 +2024-07-27 14:35:33,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=27340.0, ans=0.125 +2024-07-27 14:35:37,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=27340.0, ans=0.004926086956521739 +2024-07-27 14:35:53,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=27366.666666666668, ans=0.125 +2024-07-27 14:36:02,364 INFO [train.py:1114] (3/4) Epoch 3, batch 100, loss[loss=0.2729, simple_loss=0.3324, pruned_loss=0.1067, over 4638.00 frames. ], tot_loss[loss=0.3087, simple_loss=0.3708, pruned_loss=0.1233, over 365050.75 frames. ], batch size: 12, lr: 2.46e-02, grad_scale: 64.0 +2024-07-27 14:36:09,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=27406.666666666668, ans=0.0 +2024-07-27 14:36:15,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=27420.0, ans=0.0 +2024-07-27 14:36:17,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=27420.0, ans=0.125 +2024-07-27 14:36:37,592 INFO [train.py:1114] (3/4) Epoch 3, batch 150, loss[loss=0.246, simple_loss=0.3177, pruned_loss=0.08715, over 4613.00 frames. ], tot_loss[loss=0.3012, simple_loss=0.3643, pruned_loss=0.119, over 493778.34 frames. ], batch size: 11, lr: 2.45e-02, grad_scale: 64.0 +2024-07-27 14:36:39,906 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.71 vs. limit=15.0 +2024-07-27 14:36:47,459 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.350e+01 6.333e+01 7.071e+01 8.102e+01 1.073e+02, threshold=1.414e+02, percent-clipped=0.0 +2024-07-27 14:37:11,995 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.95 vs. limit=15.0 +2024-07-27 14:37:14,977 INFO [train.py:1114] (3/4) Epoch 3, batch 200, loss[loss=0.3261, simple_loss=0.3853, pruned_loss=0.1335, over 4469.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3615, pruned_loss=0.117, over 593141.98 frames. ], batch size: 21, lr: 2.45e-02, grad_scale: 64.0 +2024-07-27 14:37:33,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=27553.333333333332, ans=0.2 +2024-07-27 14:37:34,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27566.666666666668, ans=0.1 +2024-07-27 14:37:35,402 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.58 vs. limit=22.5 +2024-07-27 14:37:39,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=27566.666666666668, ans=0.125 +2024-07-27 14:37:43,052 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=7.06 vs. limit=6.0 +2024-07-27 14:37:48,657 INFO [train.py:1114] (3/4) Epoch 3, batch 250, loss[loss=0.3268, simple_loss=0.395, pruned_loss=0.1293, over 4616.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3625, pruned_loss=0.1173, over 669897.07 frames. ], batch size: 16, lr: 2.45e-02, grad_scale: 64.0 +2024-07-27 14:37:54,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=27593.333333333332, ans=0.2 +2024-07-27 14:37:57,411 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.290e+01 6.405e+01 7.025e+01 7.906e+01 1.155e+02, threshold=1.405e+02, percent-clipped=0.0 +2024-07-27 14:38:01,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=27620.0, ans=0.125 +2024-07-27 14:38:02,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=27620.0, ans=0.125 +2024-07-27 14:38:15,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=27633.333333333332, ans=0.125 +2024-07-27 14:38:24,173 INFO [train.py:1114] (3/4) Epoch 3, batch 300, loss[loss=0.2849, simple_loss=0.3528, pruned_loss=0.1085, over 4809.00 frames. ], tot_loss[loss=0.2978, simple_loss=0.3607, pruned_loss=0.1174, over 730299.38 frames. ], batch size: 15, lr: 2.44e-02, grad_scale: 64.0 +2024-07-27 14:38:42,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=27686.666666666668, ans=0.2 +2024-07-27 14:38:47,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=27700.0, ans=0.125 +2024-07-27 14:38:55,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=27700.0, ans=0.0 +2024-07-27 14:39:11,995 INFO [train.py:1114] (3/4) Epoch 3, batch 350, loss[loss=0.2405, simple_loss=0.3119, pruned_loss=0.08456, over 4928.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3618, pruned_loss=0.1176, over 776312.02 frames. ], batch size: 12, lr: 2.44e-02, grad_scale: 64.0 +2024-07-27 14:39:18,220 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.20 vs. limit=15.0 +2024-07-27 14:39:18,273 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.54 vs. limit=15.0 +2024-07-27 14:39:21,243 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.509e+01 6.194e+01 6.978e+01 7.817e+01 1.142e+02, threshold=1.396e+02, percent-clipped=0.0 +2024-07-27 14:39:22,431 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.31 vs. limit=5.0 +2024-07-27 14:39:28,242 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.70 vs. limit=22.5 +2024-07-27 14:39:31,002 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=18.72 vs. limit=22.5 +2024-07-27 14:39:37,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=27766.666666666668, ans=0.125 +2024-07-27 14:39:39,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=27780.0, ans=0.125 +2024-07-27 14:39:45,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=27780.0, ans=0.0 +2024-07-27 14:39:46,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.80 vs. limit=15.0 +2024-07-27 14:39:53,818 INFO [train.py:1114] (3/4) Epoch 3, batch 400, loss[loss=0.3078, simple_loss=0.3631, pruned_loss=0.1263, over 4704.00 frames. ], tot_loss[loss=0.2967, simple_loss=0.3604, pruned_loss=0.1165, over 813602.72 frames. ], batch size: 13, lr: 2.44e-02, grad_scale: 64.0 +2024-07-27 14:39:55,595 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.64 vs. limit=15.0 +2024-07-27 14:40:06,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=27806.666666666668, ans=0.125 +2024-07-27 14:40:06,989 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 14:40:13,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=27820.0, ans=0.1 +2024-07-27 14:40:18,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=27833.333333333332, ans=0.125 +2024-07-27 14:40:26,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=27846.666666666668, ans=0.0 +2024-07-27 14:40:29,611 INFO [train.py:1114] (3/4) Epoch 3, batch 450, loss[loss=0.3637, simple_loss=0.4208, pruned_loss=0.1533, over 4638.00 frames. ], tot_loss[loss=0.298, simple_loss=0.3612, pruned_loss=0.1174, over 838521.68 frames. ], batch size: 13, lr: 2.44e-02, grad_scale: 64.0 +2024-07-27 14:40:39,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=27873.333333333332, ans=0.125 +2024-07-27 14:40:40,713 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.175e+01 6.156e+01 6.961e+01 7.854e+01 1.209e+02, threshold=1.392e+02, percent-clipped=0.0 +2024-07-27 14:40:45,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=27873.333333333332, ans=0.125 +2024-07-27 14:40:51,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=27886.666666666668, ans=0.125 +2024-07-27 14:41:09,147 INFO [train.py:1114] (3/4) Epoch 3, batch 500, loss[loss=0.3027, simple_loss=0.3704, pruned_loss=0.1175, over 4673.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.3613, pruned_loss=0.1178, over 861015.77 frames. ], batch size: 15, lr: 2.43e-02, grad_scale: 64.0 +2024-07-27 14:41:26,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=27953.333333333332, ans=0.0 +2024-07-27 14:41:29,069 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.36 vs. limit=22.5 +2024-07-27 14:41:48,918 INFO [train.py:1114] (3/4) Epoch 3, batch 550, loss[loss=0.3292, simple_loss=0.3805, pruned_loss=0.1389, over 4634.00 frames. ], tot_loss[loss=0.299, simple_loss=0.3618, pruned_loss=0.1181, over 877382.17 frames. ], batch size: 17, lr: 2.43e-02, grad_scale: 64.0 +2024-07-27 14:41:53,934 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.49 vs. limit=10.0 +2024-07-27 14:41:59,868 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.022e+01 6.181e+01 6.683e+01 7.809e+01 1.184e+02, threshold=1.337e+02, percent-clipped=0.0 +2024-07-27 14:42:00,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=28006.666666666668, ans=0.2 +2024-07-27 14:42:08,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=28006.666666666668, ans=0.125 +2024-07-27 14:42:15,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=28020.0, ans=0.125 +2024-07-27 14:42:29,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=28033.333333333332, ans=0.125 +2024-07-27 14:42:34,589 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.72 vs. limit=22.5 +2024-07-27 14:42:46,419 INFO [train.py:1114] (3/4) Epoch 3, batch 600, loss[loss=0.2902, simple_loss=0.3572, pruned_loss=0.1116, over 4617.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3623, pruned_loss=0.118, over 892064.52 frames. ], batch size: 16, lr: 2.43e-02, grad_scale: 64.0 +2024-07-27 14:42:49,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=28060.0, ans=0.125 +2024-07-27 14:43:00,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=28073.333333333332, ans=0.125 +2024-07-27 14:43:19,282 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.82 vs. limit=15.0 +2024-07-27 14:43:30,816 INFO [train.py:1114] (3/4) Epoch 3, batch 650, loss[loss=0.3426, simple_loss=0.401, pruned_loss=0.1421, over 4756.00 frames. ], tot_loss[loss=0.2988, simple_loss=0.3615, pruned_loss=0.118, over 903694.01 frames. ], batch size: 13, lr: 2.43e-02, grad_scale: 64.0 +2024-07-27 14:43:32,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=28126.666666666668, ans=0.004755072463768116 +2024-07-27 14:43:32,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=28126.666666666668, ans=0.0 +2024-07-27 14:43:39,813 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.390e+01 6.211e+01 6.879e+01 7.737e+01 1.031e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-27 14:43:40,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=28140.0, ans=0.0 +2024-07-27 14:43:45,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=28153.333333333332, ans=0.025 +2024-07-27 14:43:48,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=28153.333333333332, ans=0.125 +2024-07-27 14:44:08,359 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=25.71 vs. limit=22.5 +2024-07-27 14:44:23,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=28193.333333333332, ans=0.1 +2024-07-27 14:44:23,208 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.43 vs. limit=22.5 +2024-07-27 14:44:23,645 INFO [train.py:1114] (3/4) Epoch 3, batch 700, loss[loss=0.2991, simple_loss=0.3605, pruned_loss=0.1188, over 4632.00 frames. ], tot_loss[loss=0.2995, simple_loss=0.3628, pruned_loss=0.1181, over 911636.66 frames. ], batch size: 12, lr: 2.42e-02, grad_scale: 64.0 +2024-07-27 14:44:27,378 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.45 vs. limit=15.0 +2024-07-27 14:44:47,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=28206.666666666668, ans=0.0 +2024-07-27 14:44:56,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=28220.0, ans=0.2 +2024-07-27 14:45:10,222 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.82 vs. limit=15.0 +2024-07-27 14:45:18,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=28233.333333333332, ans=0.125 +2024-07-27 14:45:24,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=28246.666666666668, ans=0.2 +2024-07-27 14:45:39,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28246.666666666668, ans=0.1 +2024-07-27 14:45:41,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=28246.666666666668, ans=0.125 +2024-07-27 14:45:42,576 INFO [train.py:1114] (3/4) Epoch 3, batch 750, loss[loss=0.3191, simple_loss=0.3669, pruned_loss=0.1357, over 4689.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.3617, pruned_loss=0.1178, over 918190.32 frames. ], batch size: 13, lr: 2.42e-02, grad_scale: 64.0 +2024-07-27 14:46:01,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=28260.0, ans=0.004726086956521739 +2024-07-27 14:46:08,316 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.231e+01 6.433e+01 7.255e+01 8.187e+01 1.605e+02, threshold=1.451e+02, percent-clipped=1.0 +2024-07-27 14:46:23,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=28300.0, ans=0.2 +2024-07-27 14:46:40,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=28313.333333333332, ans=0.025 +2024-07-27 14:46:49,667 INFO [train.py:1114] (3/4) Epoch 3, batch 800, loss[loss=0.2425, simple_loss=0.3017, pruned_loss=0.09162, over 4860.00 frames. ], tot_loss[loss=0.2988, simple_loss=0.3619, pruned_loss=0.1178, over 923629.58 frames. ], batch size: 12, lr: 2.42e-02, grad_scale: 64.0 +2024-07-27 14:47:07,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=28326.666666666668, ans=0.04949747468305833 +2024-07-27 14:47:08,975 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.88 vs. limit=10.0 +2024-07-27 14:47:13,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=28340.0, ans=0.125 +2024-07-27 14:48:01,665 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.49 vs. limit=10.0 +2024-07-27 14:48:09,325 INFO [train.py:1114] (3/4) Epoch 3, batch 850, loss[loss=0.3153, simple_loss=0.3818, pruned_loss=0.1244, over 4657.00 frames. ], tot_loss[loss=0.2995, simple_loss=0.3626, pruned_loss=0.1181, over 928052.08 frames. ], batch size: 14, lr: 2.42e-02, grad_scale: 64.0 +2024-07-27 14:48:21,521 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.186e+01 6.419e+01 6.895e+01 7.641e+01 1.957e+02, threshold=1.379e+02, percent-clipped=1.0 +2024-07-27 14:48:34,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=28420.0, ans=0.015 +2024-07-27 14:48:35,394 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.14 vs. limit=6.0 +2024-07-27 14:48:46,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=28446.666666666668, ans=0.125 +2024-07-27 14:48:48,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=28446.666666666668, ans=0.125 +2024-07-27 14:48:49,155 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.87 vs. limit=22.5 +2024-07-27 14:48:52,061 INFO [train.py:1114] (3/4) Epoch 3, batch 900, loss[loss=0.2276, simple_loss=0.3006, pruned_loss=0.07733, over 4840.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3616, pruned_loss=0.1178, over 928643.36 frames. ], batch size: 12, lr: 2.41e-02, grad_scale: 64.0 +2024-07-27 14:48:56,959 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.10 vs. limit=12.0 +2024-07-27 14:49:33,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=28486.666666666668, ans=0.1 +2024-07-27 14:50:08,972 INFO [train.py:1114] (3/4) Epoch 3, batch 950, loss[loss=0.2632, simple_loss=0.3193, pruned_loss=0.1036, over 4773.00 frames. ], tot_loss[loss=0.299, simple_loss=0.3619, pruned_loss=0.118, over 930459.02 frames. ], batch size: 12, lr: 2.41e-02, grad_scale: 128.0 +2024-07-27 14:50:09,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=28526.666666666668, ans=0.1 +2024-07-27 14:50:23,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.24 vs. limit=12.0 +2024-07-27 14:50:23,821 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.236e+01 6.152e+01 6.859e+01 7.763e+01 1.125e+02, threshold=1.372e+02, percent-clipped=0.0 +2024-07-27 14:50:24,230 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.64 vs. limit=6.0 +2024-07-27 14:50:34,421 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.45 vs. limit=15.0 +2024-07-27 14:50:38,531 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.48 vs. limit=15.0 +2024-07-27 14:50:40,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=28566.666666666668, ans=0.125 +2024-07-27 14:50:43,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=28580.0, ans=0.125 +2024-07-27 14:51:03,511 INFO [train.py:1114] (3/4) Epoch 3, batch 1000, loss[loss=0.2561, simple_loss=0.331, pruned_loss=0.09058, over 4957.00 frames. ], tot_loss[loss=0.2997, simple_loss=0.3623, pruned_loss=0.1186, over 930635.16 frames. ], batch size: 13, lr: 2.41e-02, grad_scale: 128.0 +2024-07-27 14:51:09,269 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.05 vs. limit=15.0 +2024-07-27 14:51:18,479 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.33 vs. limit=15.0 +2024-07-27 14:51:34,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=28633.333333333332, ans=0.1 +2024-07-27 14:51:38,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=28633.333333333332, ans=0.125 +2024-07-27 14:51:41,793 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.81 vs. limit=12.0 +2024-07-27 14:51:50,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=28646.666666666668, ans=0.1 +2024-07-27 14:51:54,878 INFO [train.py:1114] (3/4) Epoch 3, batch 1050, loss[loss=0.2845, simple_loss=0.3534, pruned_loss=0.1078, over 4879.00 frames. ], tot_loss[loss=0.298, simple_loss=0.3608, pruned_loss=0.1176, over 933292.94 frames. ], batch size: 14, lr: 2.41e-02, grad_scale: 128.0 +2024-07-27 14:52:06,687 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.090e+01 6.458e+01 7.095e+01 7.722e+01 9.914e+01, threshold=1.419e+02, percent-clipped=0.0 +2024-07-27 14:52:06,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=28673.333333333332, ans=0.1 +2024-07-27 14:52:07,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28673.333333333332, ans=0.1 +2024-07-27 14:52:16,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=28686.666666666668, ans=0.125 +2024-07-27 14:52:17,262 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.62 vs. limit=22.5 +2024-07-27 14:52:27,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=28686.666666666668, ans=0.125 +2024-07-27 14:52:41,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=28713.333333333332, ans=0.125 +2024-07-27 14:52:44,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.97 vs. limit=22.5 +2024-07-27 14:52:54,290 INFO [train.py:1114] (3/4) Epoch 3, batch 1100, loss[loss=0.2812, simple_loss=0.349, pruned_loss=0.1067, over 4904.00 frames. ], tot_loss[loss=0.2958, simple_loss=0.3595, pruned_loss=0.116, over 935437.47 frames. ], batch size: 13, lr: 2.40e-02, grad_scale: 128.0 +2024-07-27 14:53:27,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=28753.333333333332, ans=0.125 +2024-07-27 14:53:48,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=28753.333333333332, ans=0.125 +2024-07-27 14:53:59,819 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=6.01 vs. limit=12.0 +2024-07-27 14:54:04,141 INFO [train.py:1114] (3/4) Epoch 3, batch 1150, loss[loss=0.2241, simple_loss=0.2997, pruned_loss=0.07421, over 4892.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.3593, pruned_loss=0.1159, over 935052.11 frames. ], batch size: 13, lr: 2.40e-02, grad_scale: 128.0 +2024-07-27 14:54:13,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=28806.666666666668, ans=0.0 +2024-07-27 14:54:14,006 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.388e+01 6.303e+01 6.956e+01 7.734e+01 1.852e+02, threshold=1.391e+02, percent-clipped=1.0 +2024-07-27 14:54:21,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=28820.0, ans=0.2 +2024-07-27 14:54:22,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=28820.0, ans=0.125 +2024-07-27 14:54:25,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=28833.333333333332, ans=0.125 +2024-07-27 14:54:30,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=28833.333333333332, ans=0.125 +2024-07-27 14:54:38,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=28846.666666666668, ans=0.1 +2024-07-27 14:54:40,677 INFO [train.py:1114] (3/4) Epoch 3, batch 1200, loss[loss=0.3337, simple_loss=0.3952, pruned_loss=0.1361, over 4869.00 frames. ], tot_loss[loss=0.2986, simple_loss=0.3616, pruned_loss=0.1177, over 933829.77 frames. ], batch size: 14, lr: 2.40e-02, grad_scale: 64.0 +2024-07-27 14:54:43,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=28860.0, ans=0.125 +2024-07-27 14:54:51,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=28873.333333333332, ans=0.2 +2024-07-27 14:55:24,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=28913.333333333332, ans=0.0 +2024-07-27 14:55:31,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=28913.333333333332, ans=0.2 +2024-07-27 14:55:32,479 INFO [train.py:1114] (3/4) Epoch 3, batch 1250, loss[loss=0.2934, simple_loss=0.3708, pruned_loss=0.108, over 4797.00 frames. ], tot_loss[loss=0.2985, simple_loss=0.3622, pruned_loss=0.1173, over 937662.49 frames. ], batch size: 15, lr: 2.40e-02, grad_scale: 64.0 +2024-07-27 14:55:41,891 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 6.356e+01 6.895e+01 7.489e+01 1.286e+02, threshold=1.379e+02, percent-clipped=0.0 +2024-07-27 14:55:52,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=28953.333333333332, ans=0.0 +2024-07-27 14:56:07,127 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.72 vs. limit=15.0 +2024-07-27 14:56:07,278 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.40 vs. limit=6.0 +2024-07-27 14:56:08,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=28980.0, ans=0.0 +2024-07-27 14:56:10,876 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.30 vs. limit=12.0 +2024-07-27 14:56:15,697 INFO [train.py:1114] (3/4) Epoch 3, batch 1300, loss[loss=0.3346, simple_loss=0.3868, pruned_loss=0.1412, over 4715.00 frames. ], tot_loss[loss=0.2983, simple_loss=0.3619, pruned_loss=0.1174, over 939141.96 frames. ], batch size: 19, lr: 2.39e-02, grad_scale: 64.0 +2024-07-27 14:56:22,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=29006.666666666668, ans=0.2 +2024-07-27 14:56:52,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=29020.0, ans=0.125 +2024-07-27 14:57:03,641 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.15 vs. limit=22.5 +2024-07-27 14:57:04,120 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.87 vs. limit=22.5 +2024-07-27 14:57:16,158 INFO [train.py:1114] (3/4) Epoch 3, batch 1350, loss[loss=0.2693, simple_loss=0.3473, pruned_loss=0.09567, over 4770.00 frames. ], tot_loss[loss=0.2963, simple_loss=0.3604, pruned_loss=0.1161, over 940881.56 frames. ], batch size: 13, lr: 2.39e-02, grad_scale: 64.0 +2024-07-27 14:57:19,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=29060.0, ans=0.125 +2024-07-27 14:57:21,564 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.35 vs. limit=15.0 +2024-07-27 14:57:31,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29073.333333333332, ans=0.1 +2024-07-27 14:57:32,073 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.012e+01 6.171e+01 6.881e+01 8.115e+01 1.166e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-27 14:57:32,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=29073.333333333332, ans=0.125 +2024-07-27 14:57:46,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=29086.666666666668, ans=0.2 +2024-07-27 14:57:46,725 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.83 vs. limit=22.5 +2024-07-27 14:57:48,945 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.96 vs. limit=15.0 +2024-07-27 14:58:06,122 INFO [train.py:1114] (3/4) Epoch 3, batch 1400, loss[loss=0.2667, simple_loss=0.3285, pruned_loss=0.1024, over 4704.00 frames. ], tot_loss[loss=0.2951, simple_loss=0.3591, pruned_loss=0.1155, over 942811.14 frames. ], batch size: 11, lr: 2.39e-02, grad_scale: 64.0 +2024-07-27 14:58:37,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=29166.666666666668, ans=0.125 +2024-07-27 14:58:37,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=29166.666666666668, ans=10.0 +2024-07-27 14:58:43,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=29166.666666666668, ans=0.0 +2024-07-27 14:59:01,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=29193.333333333332, ans=0.125 +2024-07-27 14:59:02,312 INFO [train.py:1114] (3/4) Epoch 3, batch 1450, loss[loss=0.3576, simple_loss=0.4205, pruned_loss=0.1473, over 4685.00 frames. ], tot_loss[loss=0.296, simple_loss=0.3604, pruned_loss=0.1158, over 942385.38 frames. ], batch size: 15, lr: 2.39e-02, grad_scale: 64.0 +2024-07-27 14:59:18,812 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.112e+01 6.399e+01 7.002e+01 7.900e+01 1.035e+02, threshold=1.400e+02, percent-clipped=0.0 +2024-07-27 14:59:26,282 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.58 vs. limit=5.0 +2024-07-27 14:59:33,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=29233.333333333332, ans=0.2 +2024-07-27 14:59:41,917 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.34 vs. limit=22.5 +2024-07-27 14:59:53,881 INFO [train.py:1114] (3/4) Epoch 3, batch 1500, loss[loss=0.2859, simple_loss=0.3637, pruned_loss=0.104, over 4820.00 frames. ], tot_loss[loss=0.2967, simple_loss=0.361, pruned_loss=0.1162, over 942318.20 frames. ], batch size: 14, lr: 2.38e-02, grad_scale: 64.0 +2024-07-27 15:00:08,858 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.28 vs. limit=22.5 +2024-07-27 15:00:10,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=29286.666666666668, ans=0.125 +2024-07-27 15:00:15,258 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.35 vs. limit=22.5 +2024-07-27 15:00:24,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=29313.333333333332, ans=0.125 +2024-07-27 15:00:29,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=29326.666666666668, ans=0.2 +2024-07-27 15:00:29,947 INFO [train.py:1114] (3/4) Epoch 3, batch 1550, loss[loss=0.302, simple_loss=0.3573, pruned_loss=0.1233, over 4890.00 frames. ], tot_loss[loss=0.2976, simple_loss=0.3613, pruned_loss=0.117, over 938600.88 frames. ], batch size: 15, lr: 2.38e-02, grad_scale: 64.0 +2024-07-27 15:00:41,580 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.380e+01 6.188e+01 6.996e+01 8.008e+01 1.128e+02, threshold=1.399e+02, percent-clipped=0.0 +2024-07-27 15:00:41,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=29340.0, ans=0.0 +2024-07-27 15:00:48,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=29353.333333333332, ans=0.00448840579710145 +2024-07-27 15:01:51,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=29380.0, ans=0.125 +2024-07-27 15:01:53,200 INFO [train.py:1114] (3/4) Epoch 3, batch 1600, loss[loss=0.2838, simple_loss=0.3544, pruned_loss=0.1066, over 4877.00 frames. ], tot_loss[loss=0.2969, simple_loss=0.3601, pruned_loss=0.1168, over 937249.80 frames. ], batch size: 14, lr: 2.38e-02, grad_scale: 32.0 +2024-07-27 15:01:55,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=29393.333333333332, ans=0.125 +2024-07-27 15:02:10,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=29420.0, ans=0.125 +2024-07-27 15:02:25,053 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.30 vs. limit=15.0 +2024-07-27 15:02:25,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=29446.666666666668, ans=0.125 +2024-07-27 15:02:28,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=29446.666666666668, ans=0.2 +2024-07-27 15:02:30,085 INFO [train.py:1114] (3/4) Epoch 3, batch 1650, loss[loss=0.3184, simple_loss=0.3913, pruned_loss=0.1228, over 4671.00 frames. ], tot_loss[loss=0.2965, simple_loss=0.3596, pruned_loss=0.1166, over 937219.91 frames. ], batch size: 14, lr: 2.38e-02, grad_scale: 32.0 +2024-07-27 15:02:33,332 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.27 vs. limit=22.5 +2024-07-27 15:02:34,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=29460.0, ans=0.025 +2024-07-27 15:02:35,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=29460.0, ans=0.04949747468305833 +2024-07-27 15:02:40,272 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.917e+01 6.286e+01 6.723e+01 7.368e+01 1.143e+02, threshold=1.345e+02, percent-clipped=0.0 +2024-07-27 15:03:04,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=29486.666666666668, ans=0.0 +2024-07-27 15:03:09,356 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.43 vs. limit=6.0 +2024-07-27 15:03:11,987 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:03:14,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=29500.0, ans=0.125 +2024-07-27 15:03:20,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=29513.333333333332, ans=0.04949747468305833 +2024-07-27 15:03:27,849 INFO [train.py:1114] (3/4) Epoch 3, batch 1700, loss[loss=0.2641, simple_loss=0.3212, pruned_loss=0.1035, over 4707.00 frames. ], tot_loss[loss=0.2959, simple_loss=0.3594, pruned_loss=0.1162, over 938970.62 frames. ], batch size: 11, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:03:39,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=29540.0, ans=0.125 +2024-07-27 15:03:41,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=29540.0, ans=0.125 +2024-07-27 15:03:55,249 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:04:01,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=29580.0, ans=0.125 +2024-07-27 15:04:14,461 INFO [train.py:1114] (3/4) Epoch 3, batch 1750, loss[loss=0.2357, simple_loss=0.2973, pruned_loss=0.08711, over 4814.00 frames. ], tot_loss[loss=0.2942, simple_loss=0.3584, pruned_loss=0.115, over 939929.96 frames. ], batch size: 11, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:04:17,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=29593.333333333332, ans=0.125 +2024-07-27 15:04:30,954 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.973e+01 6.164e+01 6.649e+01 7.575e+01 1.168e+02, threshold=1.330e+02, percent-clipped=0.0 +2024-07-27 15:04:31,984 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.55 vs. limit=22.5 +2024-07-27 15:04:44,750 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.82 vs. limit=15.0 +2024-07-27 15:05:04,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=29633.333333333332, ans=0.125 +2024-07-27 15:05:11,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=29646.666666666668, ans=0.0 +2024-07-27 15:05:13,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=29646.666666666668, ans=0.125 +2024-07-27 15:05:13,736 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.66 vs. limit=15.0 +2024-07-27 15:05:15,314 INFO [train.py:1114] (3/4) Epoch 3, batch 1800, loss[loss=0.2985, simple_loss=0.3607, pruned_loss=0.1181, over 4642.00 frames. ], tot_loss[loss=0.2947, simple_loss=0.3586, pruned_loss=0.1154, over 940749.46 frames. ], batch size: 13, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:05:24,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=29673.333333333332, ans=0.2 +2024-07-27 15:05:34,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.00 vs. limit=15.0 +2024-07-27 15:05:40,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=29700.0, ans=0.125 +2024-07-27 15:05:49,941 INFO [train.py:1114] (3/4) Epoch 3, batch 1850, loss[loss=0.3178, simple_loss=0.3823, pruned_loss=0.1266, over 4801.00 frames. ], tot_loss[loss=0.293, simple_loss=0.3574, pruned_loss=0.1143, over 940716.38 frames. ], batch size: 14, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:06:13,606 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.082e+01 6.411e+01 6.989e+01 8.311e+01 1.252e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 15:06:13,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29740.0, ans=0.1 +2024-07-27 15:06:17,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=29753.333333333332, ans=0.0 +2024-07-27 15:07:00,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=29780.0, ans=0.125 +2024-07-27 15:07:01,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29780.0, ans=0.1 +2024-07-27 15:07:02,456 INFO [train.py:1114] (3/4) Epoch 3, batch 1900, loss[loss=0.291, simple_loss=0.3762, pruned_loss=0.1029, over 4649.00 frames. ], tot_loss[loss=0.2929, simple_loss=0.3574, pruned_loss=0.1142, over 941922.71 frames. ], batch size: 14, lr: 2.37e-02, grad_scale: 32.0 +2024-07-27 15:07:03,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=29793.333333333332, ans=0.125 +2024-07-27 15:07:13,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=29806.666666666668, ans=0.1 +2024-07-27 15:07:14,023 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.96 vs. limit=12.0 +2024-07-27 15:07:22,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=29806.666666666668, ans=0.125 +2024-07-27 15:07:24,543 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.32 vs. limit=15.0 +2024-07-27 15:07:36,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=29833.333333333332, ans=0.025 +2024-07-27 15:07:38,977 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.15 vs. limit=10.0 +2024-07-27 15:08:05,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=29846.666666666668, ans=0.0 +2024-07-27 15:08:06,602 INFO [train.py:1114] (3/4) Epoch 3, batch 1950, loss[loss=0.3289, simple_loss=0.3817, pruned_loss=0.1381, over 4898.00 frames. ], tot_loss[loss=0.2951, simple_loss=0.3597, pruned_loss=0.1153, over 943864.87 frames. ], batch size: 13, lr: 2.36e-02, grad_scale: 32.0 +2024-07-27 15:08:07,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=29860.0, ans=0.125 +2024-07-27 15:08:12,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.40 vs. limit=15.0 +2024-07-27 15:08:15,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=29873.333333333332, ans=0.1 +2024-07-27 15:08:18,670 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.14 vs. limit=15.0 +2024-07-27 15:08:18,972 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.566e+01 6.436e+01 6.844e+01 7.392e+01 3.834e+02, threshold=1.369e+02, percent-clipped=1.0 +2024-07-27 15:08:27,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=29873.333333333332, ans=0.125 +2024-07-27 15:08:39,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=29900.0, ans=0.5 +2024-07-27 15:08:51,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=29913.333333333332, ans=0.125 +2024-07-27 15:08:52,886 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.73 vs. limit=15.0 +2024-07-27 15:08:54,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=29913.333333333332, ans=0.004366666666666667 +2024-07-27 15:08:56,179 INFO [train.py:1114] (3/4) Epoch 3, batch 2000, loss[loss=0.2497, simple_loss=0.2966, pruned_loss=0.1015, over 4804.00 frames. ], tot_loss[loss=0.295, simple_loss=0.3596, pruned_loss=0.1152, over 941009.33 frames. ], batch size: 11, lr: 2.36e-02, grad_scale: 32.0 +2024-07-27 15:08:56,386 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:09:02,291 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.40 vs. limit=15.0 +2024-07-27 15:09:22,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=29966.666666666668, ans=0.2 +2024-07-27 15:09:23,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=29966.666666666668, ans=0.025 +2024-07-27 15:09:28,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=29980.0, ans=0.025 +2024-07-27 15:09:41,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=29980.0, ans=0.125 +2024-07-27 15:09:42,733 INFO [train.py:1114] (3/4) Epoch 3, batch 2050, loss[loss=0.2717, simple_loss=0.3283, pruned_loss=0.1075, over 4613.00 frames. ], tot_loss[loss=0.2932, simple_loss=0.3578, pruned_loss=0.1143, over 938907.62 frames. ], batch size: 11, lr: 2.36e-02, grad_scale: 32.0 +2024-07-27 15:09:52,892 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.226e+01 6.283e+01 7.092e+01 8.463e+01 1.553e+02, threshold=1.418e+02, percent-clipped=1.0 +2024-07-27 15:09:58,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=30020.0, ans=22.5 +2024-07-27 15:10:16,479 INFO [train.py:1114] (3/4) Epoch 3, batch 2100, loss[loss=0.2982, simple_loss=0.3654, pruned_loss=0.1155, over 4769.00 frames. ], tot_loss[loss=0.2924, simple_loss=0.3568, pruned_loss=0.114, over 940940.05 frames. ], batch size: 13, lr: 2.36e-02, grad_scale: 32.0 +2024-07-27 15:10:17,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=30060.0, ans=0.0 +2024-07-27 15:10:18,321 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.02 vs. limit=22.5 +2024-07-27 15:10:19,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=30060.0, ans=10.0 +2024-07-27 15:10:25,550 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.99 vs. limit=15.0 +2024-07-27 15:10:27,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=30073.333333333332, ans=0.125 +2024-07-27 15:10:46,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=30113.333333333332, ans=0.125 +2024-07-27 15:10:50,458 INFO [train.py:1114] (3/4) Epoch 3, batch 2150, loss[loss=0.228, simple_loss=0.3187, pruned_loss=0.06868, over 4897.00 frames. ], tot_loss[loss=0.2887, simple_loss=0.354, pruned_loss=0.1117, over 944018.75 frames. ], batch size: 13, lr: 2.35e-02, grad_scale: 32.0 +2024-07-27 15:10:58,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=30140.0, ans=0.2 +2024-07-27 15:10:59,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=30140.0, ans=0.125 +2024-07-27 15:11:01,325 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.479e+01 6.161e+01 6.566e+01 7.305e+01 9.854e+01, threshold=1.313e+02, percent-clipped=0.0 +2024-07-27 15:11:03,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=30140.0, ans=0.0 +2024-07-27 15:11:05,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=30153.333333333332, ans=0.1 +2024-07-27 15:11:14,850 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.06 vs. limit=15.0 +2024-07-27 15:11:26,655 INFO [train.py:1114] (3/4) Epoch 3, batch 2200, loss[loss=0.327, simple_loss=0.4021, pruned_loss=0.126, over 4813.00 frames. ], tot_loss[loss=0.2898, simple_loss=0.3552, pruned_loss=0.1123, over 943209.88 frames. ], batch size: 14, lr: 2.35e-02, grad_scale: 32.0 +2024-07-27 15:11:29,491 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=2.77 vs. limit=15.0 +2024-07-27 15:11:30,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=30193.333333333332, ans=0.004305797101449275 +2024-07-27 15:11:31,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=30193.333333333332, ans=0.125 +2024-07-27 15:11:35,380 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.88 vs. limit=10.0 +2024-07-27 15:11:37,507 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.24 vs. limit=10.0 +2024-07-27 15:11:44,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=30220.0, ans=0.2 +2024-07-27 15:11:52,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=30233.333333333332, ans=0.025 +2024-07-27 15:11:56,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=30246.666666666668, ans=0.125 +2024-07-27 15:12:05,475 INFO [train.py:1114] (3/4) Epoch 3, batch 2250, loss[loss=0.2975, simple_loss=0.375, pruned_loss=0.11, over 4689.00 frames. ], tot_loss[loss=0.2916, simple_loss=0.3567, pruned_loss=0.1132, over 941716.86 frames. ], batch size: 13, lr: 2.35e-02, grad_scale: 32.0 +2024-07-27 15:12:13,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=30273.333333333332, ans=0.0 +2024-07-27 15:12:16,108 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.240e+01 6.175e+01 6.906e+01 7.852e+01 1.345e+02, threshold=1.381e+02, percent-clipped=1.0 +2024-07-27 15:12:20,495 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.81 vs. limit=6.0 +2024-07-27 15:12:29,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=30286.666666666668, ans=0.125 +2024-07-27 15:12:32,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=30286.666666666668, ans=0.0 +2024-07-27 15:12:34,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=30300.0, ans=0.125 +2024-07-27 15:12:50,204 INFO [train.py:1114] (3/4) Epoch 3, batch 2300, loss[loss=0.2605, simple_loss=0.3187, pruned_loss=0.1011, over 4943.00 frames. ], tot_loss[loss=0.2908, simple_loss=0.3559, pruned_loss=0.1129, over 939176.46 frames. ], batch size: 12, lr: 2.35e-02, grad_scale: 32.0 +2024-07-27 15:12:53,389 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.97 vs. limit=6.0 +2024-07-27 15:12:54,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=30326.666666666668, ans=0.5 +2024-07-27 15:13:01,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=30340.0, ans=0.1 +2024-07-27 15:13:01,725 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.58 vs. limit=12.0 +2024-07-27 15:13:05,167 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.16 vs. limit=6.0 +2024-07-27 15:13:21,326 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.72 vs. limit=15.0 +2024-07-27 15:13:23,377 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.73 vs. limit=15.0 +2024-07-27 15:13:35,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=30393.333333333332, ans=0.125 +2024-07-27 15:13:35,756 INFO [train.py:1114] (3/4) Epoch 3, batch 2350, loss[loss=0.3818, simple_loss=0.4275, pruned_loss=0.1681, over 4635.00 frames. ], tot_loss[loss=0.2895, simple_loss=0.3553, pruned_loss=0.1118, over 941141.54 frames. ], batch size: 13, lr: 2.34e-02, grad_scale: 32.0 +2024-07-27 15:13:37,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=30393.333333333332, ans=0.09899494936611666 +2024-07-27 15:13:51,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=30406.666666666668, ans=0.2 +2024-07-27 15:13:54,299 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.436e+01 6.422e+01 7.140e+01 8.022e+01 1.675e+02, threshold=1.428e+02, percent-clipped=1.0 +2024-07-27 15:14:13,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=30446.666666666668, ans=0.035 +2024-07-27 15:14:18,266 INFO [train.py:1114] (3/4) Epoch 3, batch 2400, loss[loss=0.2453, simple_loss=0.3151, pruned_loss=0.08779, over 4643.00 frames. ], tot_loss[loss=0.291, simple_loss=0.3569, pruned_loss=0.1126, over 941019.34 frames. ], batch size: 12, lr: 2.34e-02, grad_scale: 32.0 +2024-07-27 15:14:30,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=30460.0, ans=0.1 +2024-07-27 15:14:30,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=30460.0, ans=0.125 +2024-07-27 15:14:32,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=30460.0, ans=0.0 +2024-07-27 15:14:46,061 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.40 vs. limit=12.0 +2024-07-27 15:14:59,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=30513.333333333332, ans=0.0 +2024-07-27 15:15:04,220 INFO [train.py:1114] (3/4) Epoch 3, batch 2450, loss[loss=0.2782, simple_loss=0.3445, pruned_loss=0.1059, over 4688.00 frames. ], tot_loss[loss=0.2942, simple_loss=0.3593, pruned_loss=0.1146, over 937180.95 frames. ], batch size: 13, lr: 2.34e-02, grad_scale: 32.0 +2024-07-27 15:15:22,369 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.262e+01 6.488e+01 7.535e+01 9.077e+01 1.631e+02, threshold=1.507e+02, percent-clipped=1.0 +2024-07-27 15:15:46,321 INFO [train.py:1114] (3/4) Epoch 3, batch 2500, loss[loss=0.3087, simple_loss=0.3846, pruned_loss=0.1164, over 4817.00 frames. ], tot_loss[loss=0.2935, simple_loss=0.3586, pruned_loss=0.1142, over 939341.27 frames. ], batch size: 14, lr: 2.34e-02, grad_scale: 32.0 +2024-07-27 15:15:47,962 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=6.81 vs. limit=15.0 +2024-07-27 15:16:02,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=30620.0, ans=0.125 +2024-07-27 15:16:13,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=30633.333333333332, ans=0.0 +2024-07-27 15:16:23,929 INFO [train.py:1114] (3/4) Epoch 3, batch 2550, loss[loss=0.268, simple_loss=0.3186, pruned_loss=0.1087, over 4804.00 frames. ], tot_loss[loss=0.2932, simple_loss=0.3582, pruned_loss=0.1141, over 939012.33 frames. ], batch size: 11, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:16:26,936 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.11 vs. limit=22.5 +2024-07-27 15:16:54,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=30660.0, ans=0.1 +2024-07-27 15:16:59,985 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.098e+01 6.217e+01 6.996e+01 7.708e+01 1.283e+02, threshold=1.399e+02, percent-clipped=0.0 +2024-07-27 15:17:18,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=30700.0, ans=0.1 +2024-07-27 15:17:28,306 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.78 vs. limit=15.0 +2024-07-27 15:17:31,980 INFO [train.py:1114] (3/4) Epoch 3, batch 2600, loss[loss=0.2632, simple_loss=0.3478, pruned_loss=0.08931, over 4893.00 frames. ], tot_loss[loss=0.2939, simple_loss=0.3588, pruned_loss=0.1145, over 937798.04 frames. ], batch size: 13, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:18:08,637 INFO [train.py:1114] (3/4) Epoch 3, batch 2650, loss[loss=0.3592, simple_loss=0.4063, pruned_loss=0.1561, over 4615.00 frames. ], tot_loss[loss=0.2944, simple_loss=0.3592, pruned_loss=0.1149, over 939906.76 frames. ], batch size: 16, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:18:25,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=30806.666666666668, ans=0.0 +2024-07-27 15:18:28,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=30806.666666666668, ans=0.125 +2024-07-27 15:18:28,833 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.827e+01 6.212e+01 6.736e+01 7.183e+01 9.052e+01, threshold=1.347e+02, percent-clipped=0.0 +2024-07-27 15:18:33,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=30820.0, ans=0.125 +2024-07-27 15:18:44,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=30833.333333333332, ans=0.025 +2024-07-27 15:18:53,034 INFO [train.py:1114] (3/4) Epoch 3, batch 2700, loss[loss=0.3004, simple_loss=0.3667, pruned_loss=0.1171, over 4734.00 frames. ], tot_loss[loss=0.2946, simple_loss=0.3592, pruned_loss=0.115, over 939850.81 frames. ], batch size: 14, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:18:55,892 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.02 vs. limit=15.0 +2024-07-27 15:19:03,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=30873.333333333332, ans=0.0 +2024-07-27 15:19:34,127 INFO [train.py:1114] (3/4) Epoch 3, batch 2750, loss[loss=0.2586, simple_loss=0.3285, pruned_loss=0.09434, over 4700.00 frames. ], tot_loss[loss=0.2935, simple_loss=0.3577, pruned_loss=0.1146, over 940035.62 frames. ], batch size: 12, lr: 2.33e-02, grad_scale: 32.0 +2024-07-27 15:19:40,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=30940.0, ans=0.0 +2024-07-27 15:19:41,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=30940.0, ans=0.1 +2024-07-27 15:19:44,301 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.704e+01 6.371e+01 6.868e+01 7.779e+01 1.190e+02, threshold=1.374e+02, percent-clipped=0.0 +2024-07-27 15:20:01,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=30980.0, ans=0.0 +2024-07-27 15:20:07,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=30980.0, ans=0.0 +2024-07-27 15:20:08,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=30993.333333333332, ans=0.125 +2024-07-27 15:20:08,675 INFO [train.py:1114] (3/4) Epoch 3, batch 2800, loss[loss=0.4062, simple_loss=0.4452, pruned_loss=0.1835, over 3657.00 frames. ], tot_loss[loss=0.2931, simple_loss=0.3573, pruned_loss=0.1144, over 937943.95 frames. ], batch size: 36, lr: 2.32e-02, grad_scale: 32.0 +2024-07-27 15:20:18,697 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.74 vs. limit=22.5 +2024-07-27 15:20:20,124 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.15 vs. limit=22.5 +2024-07-27 15:20:37,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=31046.666666666668, ans=10.0 +2024-07-27 15:20:40,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=31046.666666666668, ans=0.2 +2024-07-27 15:20:47,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=31046.666666666668, ans=0.125 +2024-07-27 15:20:49,902 INFO [train.py:1114] (3/4) Epoch 3, batch 2850, loss[loss=0.2658, simple_loss=0.3363, pruned_loss=0.09765, over 4958.00 frames. ], tot_loss[loss=0.2935, simple_loss=0.3579, pruned_loss=0.1145, over 936313.69 frames. ], batch size: 13, lr: 2.32e-02, grad_scale: 32.0 +2024-07-27 15:20:52,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=31060.0, ans=0.015 +2024-07-27 15:21:00,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31073.333333333332, ans=0.1 +2024-07-27 15:21:01,743 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.366e+01 6.414e+01 6.981e+01 8.121e+01 1.632e+02, threshold=1.396e+02, percent-clipped=1.0 +2024-07-27 15:21:04,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=31086.666666666668, ans=0.004111594202898551 +2024-07-27 15:21:15,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=31086.666666666668, ans=0.0 +2024-07-27 15:21:30,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=31113.333333333332, ans=0.004105797101449276 +2024-07-27 15:21:31,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=31113.333333333332, ans=0.0 +2024-07-27 15:21:42,023 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=15.17 vs. limit=15.0 +2024-07-27 15:21:43,054 INFO [train.py:1114] (3/4) Epoch 3, batch 2900, loss[loss=0.265, simple_loss=0.3391, pruned_loss=0.09547, over 4833.00 frames. ], tot_loss[loss=0.2935, simple_loss=0.3586, pruned_loss=0.1142, over 939901.35 frames. ], batch size: 13, lr: 2.32e-02, grad_scale: 32.0 +2024-07-27 15:21:51,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=31126.666666666668, ans=0.2 +2024-07-27 15:21:55,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=31126.666666666668, ans=0.004102898550724637 +2024-07-27 15:21:59,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=31140.0, ans=0.07 +2024-07-27 15:22:07,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=31153.333333333332, ans=0.0 +2024-07-27 15:22:08,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=31153.333333333332, ans=0.125 +2024-07-27 15:22:17,075 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.51 vs. limit=15.0 +2024-07-27 15:22:20,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=31180.0, ans=0.025 +2024-07-27 15:22:29,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=31180.0, ans=0.1 +2024-07-27 15:22:35,885 INFO [train.py:1114] (3/4) Epoch 3, batch 2950, loss[loss=0.2777, simple_loss=0.3326, pruned_loss=0.1114, over 4713.00 frames. ], tot_loss[loss=0.2928, simple_loss=0.3577, pruned_loss=0.114, over 938831.20 frames. ], batch size: 12, lr: 2.32e-02, grad_scale: 32.0 +2024-07-27 15:22:39,114 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.51 vs. limit=15.0 +2024-07-27 15:22:50,503 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.93 vs. limit=10.0 +2024-07-27 15:22:52,193 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.280e+01 6.239e+01 6.722e+01 7.619e+01 1.818e+02, threshold=1.344e+02, percent-clipped=1.0 +2024-07-27 15:23:01,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=31220.0, ans=0.0040826086956521745 +2024-07-27 15:23:03,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31220.0, ans=0.1 +2024-07-27 15:23:05,556 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.23 vs. limit=15.0 +2024-07-27 15:23:09,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=31233.333333333332, ans=0.0 +2024-07-27 15:23:14,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=31246.666666666668, ans=0.125 +2024-07-27 15:23:18,257 INFO [train.py:1114] (3/4) Epoch 3, batch 3000, loss[loss=0.2959, simple_loss=0.3639, pruned_loss=0.114, over 4756.00 frames. ], tot_loss[loss=0.291, simple_loss=0.3563, pruned_loss=0.1129, over 938462.98 frames. ], batch size: 13, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:23:18,258 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 15:23:26,682 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([1.7073, 2.2068, 2.3675, 1.7735, 2.2994, 1.9595, 1.9217, 2.0052], + device='cuda:3') +2024-07-27 15:23:27,591 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.9581, 3.6560, 3.5314, 3.2930], device='cuda:3') +2024-07-27 15:23:33,137 INFO [train.py:1146] (3/4) Epoch 3, validation: loss=0.2358, simple_loss=0.3336, pruned_loss=0.06904, over 944034.00 frames. +2024-07-27 15:23:33,137 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 15:23:51,123 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.23 vs. limit=22.5 +2024-07-27 15:24:06,655 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:24:09,324 INFO [train.py:1114] (3/4) Epoch 3, batch 3050, loss[loss=0.2305, simple_loss=0.3003, pruned_loss=0.0804, over 4642.00 frames. ], tot_loss[loss=0.2925, simple_loss=0.3576, pruned_loss=0.1136, over 936999.91 frames. ], batch size: 12, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:24:19,670 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.862e+01 6.122e+01 6.753e+01 7.490e+01 1.166e+02, threshold=1.351e+02, percent-clipped=0.0 +2024-07-27 15:24:24,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=31353.333333333332, ans=0.004053623188405798 +2024-07-27 15:24:35,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=31366.666666666668, ans=0.004050724637681159 +2024-07-27 15:24:43,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=31380.0, ans=0.2 +2024-07-27 15:24:45,609 INFO [train.py:1114] (3/4) Epoch 3, batch 3100, loss[loss=0.3016, simple_loss=0.3693, pruned_loss=0.1169, over 4615.00 frames. ], tot_loss[loss=0.2916, simple_loss=0.3574, pruned_loss=0.1129, over 937454.88 frames. ], batch size: 16, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:24:54,857 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.39 vs. limit=15.0 +2024-07-27 15:24:58,167 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.05 vs. limit=15.0 +2024-07-27 15:25:09,041 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:25:14,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=31446.666666666668, ans=0.004033333333333333 +2024-07-27 15:25:16,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=31446.666666666668, ans=0.125 +2024-07-27 15:25:17,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=31446.666666666668, ans=0.004033333333333333 +2024-07-27 15:25:19,793 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.39 vs. limit=15.0 +2024-07-27 15:25:21,432 INFO [train.py:1114] (3/4) Epoch 3, batch 3150, loss[loss=0.3132, simple_loss=0.3868, pruned_loss=0.1199, over 4636.00 frames. ], tot_loss[loss=0.2918, simple_loss=0.3573, pruned_loss=0.1131, over 937597.25 frames. ], batch size: 17, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:25:31,602 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.33 vs. limit=6.0 +2024-07-27 15:25:31,676 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.963e+01 6.198e+01 6.919e+01 7.574e+01 1.132e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-27 15:25:35,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=31486.666666666668, ans=0.025 +2024-07-27 15:25:36,405 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.44 vs. limit=15.0 +2024-07-27 15:25:38,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=31486.666666666668, ans=0.125 +2024-07-27 15:25:49,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=31500.0, ans=0.004021739130434783 +2024-07-27 15:25:49,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=31513.333333333332, ans=0.04949747468305833 +2024-07-27 15:25:51,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=31513.333333333332, ans=0.2 +2024-07-27 15:25:51,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31513.333333333332, ans=0.1 +2024-07-27 15:25:52,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=31513.333333333332, ans=0.1 +2024-07-27 15:25:55,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=31513.333333333332, ans=0.125 +2024-07-27 15:25:56,853 INFO [train.py:1114] (3/4) Epoch 3, batch 3200, loss[loss=0.3151, simple_loss=0.362, pruned_loss=0.1341, over 4825.00 frames. ], tot_loss[loss=0.2918, simple_loss=0.3572, pruned_loss=0.1132, over 939089.02 frames. ], batch size: 13, lr: 2.31e-02, grad_scale: 32.0 +2024-07-27 15:25:57,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=31526.666666666668, ans=0.0040159420289855065 +2024-07-27 15:26:23,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=31553.333333333332, ans=0.125 +2024-07-27 15:26:39,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=31580.0, ans=0.09899494936611666 +2024-07-27 15:26:45,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=31580.0, ans=0.0 +2024-07-27 15:26:49,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=31593.333333333332, ans=0.025 +2024-07-27 15:26:49,775 INFO [train.py:1114] (3/4) Epoch 3, batch 3250, loss[loss=0.2819, simple_loss=0.3584, pruned_loss=0.1027, over 4931.00 frames. ], tot_loss[loss=0.2916, simple_loss=0.3576, pruned_loss=0.1129, over 940211.68 frames. ], batch size: 14, lr: 2.30e-02, grad_scale: 32.0 +2024-07-27 15:26:54,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=31593.333333333332, ans=0.004001449275362319 +2024-07-27 15:27:01,542 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.545e+01 6.278e+01 6.797e+01 7.554e+01 1.103e+02, threshold=1.359e+02, percent-clipped=0.0 +2024-07-27 15:27:07,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=31620.0, ans=0.025 +2024-07-27 15:27:10,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=31620.0, ans=0.95 +2024-07-27 15:27:19,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=31633.333333333332, ans=0.003992753623188406 +2024-07-27 15:27:30,559 INFO [train.py:1114] (3/4) Epoch 3, batch 3300, loss[loss=0.325, simple_loss=0.3781, pruned_loss=0.136, over 4676.00 frames. ], tot_loss[loss=0.2908, simple_loss=0.3559, pruned_loss=0.1128, over 940465.62 frames. ], batch size: 19, lr: 2.30e-02, grad_scale: 32.0 +2024-07-27 15:27:42,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=31660.0, ans=0.025 +2024-07-27 15:27:45,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=31673.333333333332, ans=0.5 +2024-07-27 15:27:51,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=31686.666666666668, ans=0.2 +2024-07-27 15:27:52,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=31686.666666666668, ans=0.2 +2024-07-27 15:27:53,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=31686.666666666668, ans=0.2 +2024-07-27 15:27:54,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=31686.666666666668, ans=0.125 +2024-07-27 15:28:00,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=31700.0, ans=0.1 +2024-07-27 15:28:04,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=31700.0, ans=0.04949747468305833 +2024-07-27 15:28:10,965 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.30 vs. limit=15.0 +2024-07-27 15:28:13,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=31726.666666666668, ans=0.125 +2024-07-27 15:28:13,926 INFO [train.py:1114] (3/4) Epoch 3, batch 3350, loss[loss=0.3449, simple_loss=0.4165, pruned_loss=0.1366, over 4635.00 frames. ], tot_loss[loss=0.2936, simple_loss=0.3585, pruned_loss=0.1144, over 938364.17 frames. ], batch size: 17, lr: 2.30e-02, grad_scale: 32.0 +2024-07-27 15:28:24,376 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.034e+01 6.313e+01 6.716e+01 7.505e+01 1.231e+02, threshold=1.343e+02, percent-clipped=0.0 +2024-07-27 15:28:28,335 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.38 vs. limit=5.0 +2024-07-27 15:28:39,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=31766.666666666668, ans=0.0 +2024-07-27 15:28:39,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=31766.666666666668, ans=0.003963768115942029 +2024-07-27 15:28:50,134 INFO [train.py:1114] (3/4) Epoch 3, batch 3400, loss[loss=0.2237, simple_loss=0.3035, pruned_loss=0.07195, over 4800.00 frames. ], tot_loss[loss=0.2919, simple_loss=0.3567, pruned_loss=0.1136, over 937205.50 frames. ], batch size: 11, lr: 2.30e-02, grad_scale: 32.0 +2024-07-27 15:28:50,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=31793.333333333332, ans=0.125 +2024-07-27 15:29:02,960 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.14 vs. limit=15.0 +2024-07-27 15:29:08,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=31806.666666666668, ans=0.003955072463768116 +2024-07-27 15:29:14,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=31820.0, ans=0.125 +2024-07-27 15:29:25,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=31833.333333333332, ans=0.125 +2024-07-27 15:29:31,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=31846.666666666668, ans=0.125 +2024-07-27 15:29:35,521 INFO [train.py:1114] (3/4) Epoch 3, batch 3450, loss[loss=0.3374, simple_loss=0.3875, pruned_loss=0.1437, over 4686.00 frames. ], tot_loss[loss=0.2921, simple_loss=0.3572, pruned_loss=0.1135, over 937726.83 frames. ], batch size: 19, lr: 2.29e-02, grad_scale: 32.0 +2024-07-27 15:29:40,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=31860.0, ans=0.0 +2024-07-27 15:29:55,349 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.891e+01 6.313e+01 6.956e+01 7.933e+01 1.220e+02, threshold=1.391e+02, percent-clipped=0.0 +2024-07-27 15:30:03,107 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.30 vs. limit=6.0 +2024-07-27 15:30:04,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=31886.666666666668, ans=0.2 +2024-07-27 15:30:10,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=31900.0, ans=0.003934782608695652 +2024-07-27 15:30:14,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=31913.333333333332, ans=0.125 +2024-07-27 15:30:20,201 INFO [train.py:1114] (3/4) Epoch 3, batch 3500, loss[loss=0.2599, simple_loss=0.3184, pruned_loss=0.1008, over 4944.00 frames. ], tot_loss[loss=0.2899, simple_loss=0.3553, pruned_loss=0.1122, over 938146.20 frames. ], batch size: 12, lr: 2.29e-02, grad_scale: 32.0 +2024-07-27 15:30:26,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=31940.0, ans=0.125 +2024-07-27 15:30:34,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=31953.333333333332, ans=0.0 +2024-07-27 15:30:39,596 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.77 vs. limit=10.0 +2024-07-27 15:30:40,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=31966.666666666668, ans=0.1 +2024-07-27 15:30:53,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=31980.0, ans=0.2 +2024-07-27 15:30:55,138 INFO [train.py:1114] (3/4) Epoch 3, batch 3550, loss[loss=0.3228, simple_loss=0.4028, pruned_loss=0.1214, over 4677.00 frames. ], tot_loss[loss=0.2898, simple_loss=0.3552, pruned_loss=0.1122, over 938561.07 frames. ], batch size: 14, lr: 2.29e-02, grad_scale: 32.0 +2024-07-27 15:31:05,790 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.25 vs. limit=15.0 +2024-07-27 15:31:10,781 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 6.203e+01 6.849e+01 7.664e+01 1.472e+02, threshold=1.370e+02, percent-clipped=1.0 +2024-07-27 15:31:12,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32006.666666666668, ans=0.1 +2024-07-27 15:31:21,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32033.333333333332, ans=0.1 +2024-07-27 15:31:21,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=32033.333333333332, ans=0.003905797101449276 +2024-07-27 15:31:29,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=32046.666666666668, ans=0.2 +2024-07-27 15:31:31,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=32046.666666666668, ans=0.125 +2024-07-27 15:31:34,514 INFO [train.py:1114] (3/4) Epoch 3, batch 3600, loss[loss=0.3143, simple_loss=0.3764, pruned_loss=0.1261, over 4958.00 frames. ], tot_loss[loss=0.289, simple_loss=0.3545, pruned_loss=0.1117, over 940240.13 frames. ], batch size: 13, lr: 2.29e-02, grad_scale: 64.0 +2024-07-27 15:32:00,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=32100.0, ans=0.95 +2024-07-27 15:32:04,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=32113.333333333332, ans=0.0038884057971014492 +2024-07-27 15:32:08,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=32113.333333333332, ans=0.025 +2024-07-27 15:32:10,746 INFO [train.py:1114] (3/4) Epoch 3, batch 3650, loss[loss=0.3223, simple_loss=0.3827, pruned_loss=0.131, over 4896.00 frames. ], tot_loss[loss=0.289, simple_loss=0.3542, pruned_loss=0.1119, over 940709.50 frames. ], batch size: 15, lr: 2.29e-02, grad_scale: 64.0 +2024-07-27 15:32:12,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=32126.666666666668, ans=0.125 +2024-07-27 15:32:20,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=32140.0, ans=0.125 +2024-07-27 15:32:21,263 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.293e+01 6.817e+01 8.019e+01 9.949e+01 1.573e+02, threshold=1.604e+02, percent-clipped=3.0 +2024-07-27 15:32:39,338 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.61 vs. limit=12.0 +2024-07-27 15:32:43,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=32180.0, ans=0.5 +2024-07-27 15:32:45,072 INFO [train.py:1114] (3/4) Epoch 3, batch 3700, loss[loss=0.3929, simple_loss=0.4452, pruned_loss=0.1703, over 4937.00 frames. ], tot_loss[loss=0.2888, simple_loss=0.3546, pruned_loss=0.1115, over 941883.14 frames. ], batch size: 14, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:32:49,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=32193.333333333332, ans=0.125 +2024-07-27 15:32:58,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=32220.0, ans=0.1 +2024-07-27 15:33:02,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=32220.0, ans=0.0 +2024-07-27 15:33:14,124 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.55 vs. limit=22.5 +2024-07-27 15:33:19,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=32246.666666666668, ans=0.125 +2024-07-27 15:33:20,704 INFO [train.py:1114] (3/4) Epoch 3, batch 3750, loss[loss=0.2973, simple_loss=0.3439, pruned_loss=0.1253, over 4813.00 frames. ], tot_loss[loss=0.2903, simple_loss=0.3557, pruned_loss=0.1125, over 943675.25 frames. ], batch size: 11, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:33:23,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=32260.0, ans=0.1 +2024-07-27 15:33:31,094 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.210e+01 6.187e+01 6.972e+01 7.768e+01 2.543e+02, threshold=1.394e+02, percent-clipped=1.0 +2024-07-27 15:33:36,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=32273.333333333332, ans=0.0 +2024-07-27 15:33:49,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=32300.0, ans=0.125 +2024-07-27 15:34:01,596 INFO [train.py:1114] (3/4) Epoch 3, batch 3800, loss[loss=0.3144, simple_loss=0.3866, pruned_loss=0.1211, over 4816.00 frames. ], tot_loss[loss=0.2892, simple_loss=0.3545, pruned_loss=0.1119, over 941992.15 frames. ], batch size: 14, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:34:04,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=32326.666666666668, ans=0.04949747468305833 +2024-07-27 15:34:12,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=32340.0, ans=0.003839130434782608 +2024-07-27 15:34:21,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=32366.666666666668, ans=0.125 +2024-07-27 15:34:34,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=32380.0, ans=0.125 +2024-07-27 15:34:36,456 INFO [train.py:1114] (3/4) Epoch 3, batch 3850, loss[loss=0.317, simple_loss=0.3789, pruned_loss=0.1276, over 4628.00 frames. ], tot_loss[loss=0.2886, simple_loss=0.3545, pruned_loss=0.1114, over 942761.25 frames. ], batch size: 16, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:34:45,863 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.36 vs. limit=10.0 +2024-07-27 15:34:46,752 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.157e+01 6.304e+01 7.059e+01 8.148e+01 1.168e+02, threshold=1.412e+02, percent-clipped=0.0 +2024-07-27 15:35:02,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=32433.333333333332, ans=0.1 +2024-07-27 15:35:12,181 INFO [train.py:1114] (3/4) Epoch 3, batch 3900, loss[loss=0.2987, simple_loss=0.3671, pruned_loss=0.1152, over 4806.00 frames. ], tot_loss[loss=0.2888, simple_loss=0.3547, pruned_loss=0.1114, over 943021.97 frames. ], batch size: 14, lr: 2.28e-02, grad_scale: 64.0 +2024-07-27 15:35:26,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=32486.666666666668, ans=0.125 +2024-07-27 15:35:26,630 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.54 vs. limit=15.0 +2024-07-27 15:35:41,220 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=15.67 vs. limit=15.0 +2024-07-27 15:35:46,894 INFO [train.py:1114] (3/4) Epoch 3, batch 3950, loss[loss=0.3594, simple_loss=0.4066, pruned_loss=0.1561, over 4823.00 frames. ], tot_loss[loss=0.29, simple_loss=0.3561, pruned_loss=0.112, over 945068.46 frames. ], batch size: 16, lr: 2.27e-02, grad_scale: 64.0 +2024-07-27 15:35:47,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=32526.666666666668, ans=0.125 +2024-07-27 15:35:49,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=32526.666666666668, ans=0.025 +2024-07-27 15:35:51,639 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-07-27 15:35:51,972 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=5.152e-03 +2024-07-27 15:35:53,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=32526.666666666668, ans=0.2 +2024-07-27 15:35:56,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=32540.0, ans=0.125 +2024-07-27 15:35:58,547 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.016e+01 6.366e+01 6.864e+01 8.017e+01 1.947e+02, threshold=1.373e+02, percent-clipped=1.0 +2024-07-27 15:35:59,098 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.16 vs. limit=15.0 +2024-07-27 15:36:04,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=32553.333333333332, ans=0.125 +2024-07-27 15:36:19,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=32580.0, ans=0.1 +2024-07-27 15:36:20,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=32580.0, ans=10.0 +2024-07-27 15:36:22,687 INFO [train.py:1114] (3/4) Epoch 3, batch 4000, loss[loss=0.2599, simple_loss=0.3218, pruned_loss=0.099, over 4769.00 frames. ], tot_loss[loss=0.2894, simple_loss=0.3548, pruned_loss=0.1119, over 941327.59 frames. ], batch size: 12, lr: 2.27e-02, grad_scale: 64.0 +2024-07-27 15:36:25,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=32593.333333333332, ans=0.125 +2024-07-27 15:36:25,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=32593.333333333332, ans=0.125 +2024-07-27 15:36:27,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=32593.333333333332, ans=0.125 +2024-07-27 15:36:34,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=32606.666666666668, ans=0.125 +2024-07-27 15:36:38,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=32620.0, ans=0.003778260869565218 +2024-07-27 15:36:38,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=32620.0, ans=0.125 +2024-07-27 15:36:42,080 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.61 vs. limit=15.0 +2024-07-27 15:36:52,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=32646.666666666668, ans=0.09899494936611666 +2024-07-27 15:36:56,014 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.88 vs. limit=15.0 +2024-07-27 15:36:59,105 INFO [train.py:1114] (3/4) Epoch 3, batch 4050, loss[loss=0.3385, simple_loss=0.3805, pruned_loss=0.1483, over 3522.00 frames. ], tot_loss[loss=0.2885, simple_loss=0.3544, pruned_loss=0.1113, over 939762.76 frames. ], batch size: 35, lr: 2.27e-02, grad_scale: 64.0 +2024-07-27 15:37:01,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=32660.0, ans=0.2 +2024-07-27 15:37:11,294 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.463e+01 6.459e+01 6.983e+01 7.697e+01 1.084e+02, threshold=1.397e+02, percent-clipped=0.0 +2024-07-27 15:37:21,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=32686.666666666668, ans=0.003763768115942029 +2024-07-27 15:37:24,733 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.79 vs. limit=6.0 +2024-07-27 15:37:25,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=32700.0, ans=0.05 +2024-07-27 15:37:29,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=32700.0, ans=0.0 +2024-07-27 15:37:31,293 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=17.43 vs. limit=15.0 +2024-07-27 15:37:36,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=32713.333333333332, ans=0.0 +2024-07-27 15:37:37,936 INFO [train.py:1114] (3/4) Epoch 3, batch 4100, loss[loss=0.287, simple_loss=0.3496, pruned_loss=0.1122, over 4902.00 frames. ], tot_loss[loss=0.2885, simple_loss=0.3542, pruned_loss=0.1114, over 938810.72 frames. ], batch size: 15, lr: 2.27e-02, grad_scale: 64.0 +2024-07-27 15:37:39,937 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:37:40,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=32726.666666666668, ans=0.125 +2024-07-27 15:37:52,616 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:38:02,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=32766.666666666668, ans=0.125 +2024-07-27 15:38:04,006 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.70 vs. limit=22.5 +2024-07-27 15:38:07,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=32766.666666666668, ans=0.125 +2024-07-27 15:38:15,382 INFO [train.py:1114] (3/4) Epoch 3, batch 4150, loss[loss=0.2501, simple_loss=0.3258, pruned_loss=0.08724, over 4818.00 frames. ], tot_loss[loss=0.2879, simple_loss=0.3538, pruned_loss=0.111, over 938117.71 frames. ], batch size: 13, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:38:23,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=32806.666666666664, ans=0.2 +2024-07-27 15:38:24,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=32806.666666666664, ans=0.00373768115942029 +2024-07-27 15:38:25,904 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.960e+01 6.227e+01 6.781e+01 8.028e+01 1.229e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 15:38:31,231 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.36 vs. limit=15.0 +2024-07-27 15:38:41,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=32833.333333333336, ans=0.2 +2024-07-27 15:38:43,550 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.17 vs. limit=15.0 +2024-07-27 15:38:48,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=32846.666666666664, ans=0.2 +2024-07-27 15:38:54,828 INFO [train.py:1114] (3/4) Epoch 3, batch 4200, loss[loss=0.3371, simple_loss=0.3877, pruned_loss=0.1432, over 4899.00 frames. ], tot_loss[loss=0.287, simple_loss=0.3531, pruned_loss=0.1105, over 939434.43 frames. ], batch size: 15, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:38:56,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=32860.0, ans=0.1 +2024-07-27 15:38:56,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=32860.0, ans=0.0037260869565217385 +2024-07-27 15:38:59,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=32860.0, ans=0.125 +2024-07-27 15:39:06,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=32873.333333333336, ans=0.125 +2024-07-27 15:39:18,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=32900.0, ans=0.025 +2024-07-27 15:39:21,772 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.40 vs. limit=15.0 +2024-07-27 15:39:22,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=32900.0, ans=0.0 +2024-07-27 15:39:31,212 INFO [train.py:1114] (3/4) Epoch 3, batch 4250, loss[loss=0.2576, simple_loss=0.326, pruned_loss=0.09459, over 4649.00 frames. ], tot_loss[loss=0.2864, simple_loss=0.3531, pruned_loss=0.1099, over 940687.10 frames. ], batch size: 12, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:39:33,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=32926.666666666664, ans=0.0 +2024-07-27 15:39:41,108 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.439e+01 6.186e+01 6.763e+01 7.704e+01 1.140e+02, threshold=1.353e+02, percent-clipped=0.0 +2024-07-27 15:40:03,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=32980.0, ans=0.2 +2024-07-27 15:40:06,769 INFO [train.py:1114] (3/4) Epoch 3, batch 4300, loss[loss=0.2725, simple_loss=0.3585, pruned_loss=0.09326, over 4759.00 frames. ], tot_loss[loss=0.2885, simple_loss=0.3548, pruned_loss=0.1111, over 940329.51 frames. ], batch size: 13, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:40:40,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=33046.666666666664, ans=0.0 +2024-07-27 15:40:42,787 INFO [train.py:1114] (3/4) Epoch 3, batch 4350, loss[loss=0.301, simple_loss=0.377, pruned_loss=0.1125, over 4765.00 frames. ], tot_loss[loss=0.2886, simple_loss=0.3555, pruned_loss=0.1109, over 941430.08 frames. ], batch size: 13, lr: 2.26e-02, grad_scale: 64.0 +2024-07-27 15:40:50,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=33073.333333333336, ans=0.1 +2024-07-27 15:40:54,687 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.277e+01 6.236e+01 6.804e+01 7.780e+01 1.356e+02, threshold=1.361e+02, percent-clipped=1.0 +2024-07-27 15:40:59,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=33086.666666666664, ans=0.125 +2024-07-27 15:41:01,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=33086.666666666664, ans=0.125 +2024-07-27 15:41:14,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=33113.333333333336, ans=0.125 +2024-07-27 15:41:17,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=33113.333333333336, ans=0.0 +2024-07-27 15:41:18,123 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.42 vs. limit=15.0 +2024-07-27 15:41:19,012 INFO [train.py:1114] (3/4) Epoch 3, batch 4400, loss[loss=0.2646, simple_loss=0.3456, pruned_loss=0.09179, over 4806.00 frames. ], tot_loss[loss=0.2886, simple_loss=0.3559, pruned_loss=0.1107, over 941121.55 frames. ], batch size: 14, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:41:38,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=33153.333333333336, ans=0.125 +2024-07-27 15:41:42,600 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.57 vs. limit=10.0 +2024-07-27 15:41:47,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=33180.0, ans=0.0 +2024-07-27 15:41:52,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=33180.0, ans=0.2 +2024-07-27 15:41:58,385 INFO [train.py:1114] (3/4) Epoch 3, batch 4450, loss[loss=0.2603, simple_loss=0.3226, pruned_loss=0.09898, over 4943.00 frames. ], tot_loss[loss=0.2898, simple_loss=0.3568, pruned_loss=0.1114, over 939138.09 frames. ], batch size: 12, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:42:00,784 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.16 vs. limit=22.5 +2024-07-27 15:42:08,444 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.127e+01 6.763e+01 7.448e+01 8.954e+01 1.362e+02, threshold=1.490e+02, percent-clipped=1.0 +2024-07-27 15:42:12,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=33220.0, ans=0.0 +2024-07-27 15:42:14,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=33220.0, ans=0.0 +2024-07-27 15:42:24,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=33233.333333333336, ans=0.125 +2024-07-27 15:42:28,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.90 vs. limit=15.0 +2024-07-27 15:42:31,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=33246.666666666664, ans=0.0036420289855072473 +2024-07-27 15:42:44,699 INFO [train.py:1114] (3/4) Epoch 3, batch 4500, loss[loss=0.308, simple_loss=0.3695, pruned_loss=0.1232, over 4737.00 frames. ], tot_loss[loss=0.2882, simple_loss=0.3559, pruned_loss=0.1102, over 938067.03 frames. ], batch size: 14, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:42:53,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=33273.333333333336, ans=0.0 +2024-07-27 15:43:00,535 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.27 vs. limit=10.0 +2024-07-27 15:43:12,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=33313.333333333336, ans=0.1 +2024-07-27 15:43:18,850 INFO [train.py:1114] (3/4) Epoch 3, batch 4550, loss[loss=0.2738, simple_loss=0.3379, pruned_loss=0.1049, over 4896.00 frames. ], tot_loss[loss=0.2872, simple_loss=0.3552, pruned_loss=0.1096, over 940063.50 frames. ], batch size: 13, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:43:25,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=33326.666666666664, ans=0.125 +2024-07-27 15:43:30,999 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.122e+01 6.619e+01 7.429e+01 8.895e+01 1.429e+02, threshold=1.486e+02, percent-clipped=0.0 +2024-07-27 15:43:31,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=33340.0, ans=0.0 +2024-07-27 15:43:34,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=33353.333333333336, ans=0.2 +2024-07-27 15:43:50,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=33380.0, ans=0.025 +2024-07-27 15:43:55,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=33380.0, ans=0.125 +2024-07-27 15:43:57,984 INFO [train.py:1114] (3/4) Epoch 3, batch 4600, loss[loss=0.2623, simple_loss=0.3338, pruned_loss=0.09536, over 4515.00 frames. ], tot_loss[loss=0.2858, simple_loss=0.3535, pruned_loss=0.1091, over 938529.30 frames. ], batch size: 21, lr: 2.25e-02, grad_scale: 64.0 +2024-07-27 15:43:58,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=33393.333333333336, ans=0.0 +2024-07-27 15:44:05,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=33406.666666666664, ans=0.125 +2024-07-27 15:44:21,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=33433.333333333336, ans=0.125 +2024-07-27 15:44:24,935 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.18 vs. limit=22.5 +2024-07-27 15:44:30,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=33446.666666666664, ans=0.05 +2024-07-27 15:44:32,060 INFO [train.py:1114] (3/4) Epoch 3, batch 4650, loss[loss=0.3241, simple_loss=0.3877, pruned_loss=0.1303, over 4843.00 frames. ], tot_loss[loss=0.2869, simple_loss=0.3548, pruned_loss=0.1095, over 939941.68 frames. ], batch size: 16, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:44:42,684 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.996e+01 6.580e+01 7.328e+01 8.938e+01 2.315e+02, threshold=1.466e+02, percent-clipped=1.0 +2024-07-27 15:44:44,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=33473.333333333336, ans=0.035 +2024-07-27 15:44:48,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=33486.666666666664, ans=0.1 +2024-07-27 15:44:50,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=33486.666666666664, ans=0.0 +2024-07-27 15:45:00,726 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.13 vs. limit=15.0 +2024-07-27 15:45:07,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=33513.333333333336, ans=0.125 +2024-07-27 15:45:08,282 INFO [train.py:1114] (3/4) Epoch 3, batch 4700, loss[loss=0.2225, simple_loss=0.2918, pruned_loss=0.07666, over 4707.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.353, pruned_loss=0.1089, over 937811.61 frames. ], batch size: 11, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:45:10,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=33526.666666666664, ans=0.0 +2024-07-27 15:45:15,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=33540.0, ans=0.0035782608695652165 +2024-07-27 15:45:24,845 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.08 vs. limit=15.0 +2024-07-27 15:45:26,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=33553.333333333336, ans=0.125 +2024-07-27 15:45:27,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=33553.333333333336, ans=0.2 +2024-07-27 15:45:30,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=33566.666666666664, ans=0.003572463768115943 +2024-07-27 15:45:44,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=33580.0, ans=0.025 +2024-07-27 15:45:46,783 INFO [train.py:1114] (3/4) Epoch 3, batch 4750, loss[loss=0.2702, simple_loss=0.3433, pruned_loss=0.09858, over 4498.00 frames. ], tot_loss[loss=0.2873, simple_loss=0.3544, pruned_loss=0.1101, over 936224.84 frames. ], batch size: 21, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:45:52,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=33593.333333333336, ans=0.125 +2024-07-27 15:45:54,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=33606.666666666664, ans=0.125 +2024-07-27 15:45:57,481 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.591e+01 6.473e+01 7.371e+01 8.571e+01 1.233e+02, threshold=1.474e+02, percent-clipped=0.0 +2024-07-27 15:46:05,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=33620.0, ans=0.125 +2024-07-27 15:46:12,180 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.84 vs. limit=15.0 +2024-07-27 15:46:19,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=33646.666666666664, ans=0.025 +2024-07-27 15:46:20,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=33660.0, ans=0.2 +2024-07-27 15:46:20,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=33660.0, ans=0.003552173913043478 +2024-07-27 15:46:21,156 INFO [train.py:1114] (3/4) Epoch 3, batch 4800, loss[loss=0.3012, simple_loss=0.3697, pruned_loss=0.1164, over 4698.00 frames. ], tot_loss[loss=0.2874, simple_loss=0.3543, pruned_loss=0.1103, over 933231.19 frames. ], batch size: 13, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:46:27,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=33673.333333333336, ans=0.125 +2024-07-27 15:46:31,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33673.333333333336, ans=0.1 +2024-07-27 15:46:31,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=33673.333333333336, ans=0.5 +2024-07-27 15:46:33,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=33673.333333333336, ans=0.00354927536231884 +2024-07-27 15:46:39,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.50 vs. limit=15.0 +2024-07-27 15:46:40,504 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.54 vs. limit=15.0 +2024-07-27 15:46:47,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=33700.0, ans=10.0 +2024-07-27 15:46:56,841 INFO [train.py:1114] (3/4) Epoch 3, batch 4850, loss[loss=0.2856, simple_loss=0.3436, pruned_loss=0.1138, over 4746.00 frames. ], tot_loss[loss=0.2898, simple_loss=0.3561, pruned_loss=0.1118, over 932422.49 frames. ], batch size: 14, lr: 2.24e-02, grad_scale: 64.0 +2024-07-27 15:46:57,267 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.07 vs. limit=6.0 +2024-07-27 15:47:07,315 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.203e+01 6.462e+01 7.308e+01 8.577e+01 1.443e+02, threshold=1.462e+02, percent-clipped=0.0 +2024-07-27 15:47:19,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=33766.666666666664, ans=0.0 +2024-07-27 15:47:21,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33766.666666666664, ans=0.1 +2024-07-27 15:47:24,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=33780.0, ans=0.025 +2024-07-27 15:47:27,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=33780.0, ans=0.0 +2024-07-27 15:47:30,989 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.14 vs. limit=22.5 +2024-07-27 15:47:31,286 INFO [train.py:1114] (3/4) Epoch 3, batch 4900, loss[loss=0.3444, simple_loss=0.3914, pruned_loss=0.1487, over 4769.00 frames. ], tot_loss[loss=0.2899, simple_loss=0.3559, pruned_loss=0.112, over 934041.54 frames. ], batch size: 13, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:47:39,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33806.666666666664, ans=0.1 +2024-07-27 15:47:46,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=33820.0, ans=0.0035173913043478264 +2024-07-27 15:47:53,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=33833.333333333336, ans=0.125 +2024-07-27 15:48:03,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=33846.666666666664, ans=0.2 +2024-07-27 15:48:04,107 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.94 vs. limit=22.5 +2024-07-27 15:48:04,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=33846.666666666664, ans=0.125 +2024-07-27 15:48:06,513 INFO [train.py:1114] (3/4) Epoch 3, batch 4950, loss[loss=0.412, simple_loss=0.4461, pruned_loss=0.1889, over 3308.00 frames. ], tot_loss[loss=0.292, simple_loss=0.3578, pruned_loss=0.1131, over 930897.26 frames. ], batch size: 35, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:48:06,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=33860.0, ans=0.0 +2024-07-27 15:48:12,548 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.28 vs. limit=15.0 +2024-07-27 15:48:16,793 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.224e+01 6.458e+01 7.350e+01 8.583e+01 1.982e+02, threshold=1.470e+02, percent-clipped=1.0 +2024-07-27 15:48:16,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=33873.333333333336, ans=0.125 +2024-07-27 15:48:18,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=33873.333333333336, ans=0.125 +2024-07-27 15:48:24,754 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.16 vs. limit=15.0 +2024-07-27 15:48:28,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=33900.0, ans=0.125 +2024-07-27 15:48:30,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=33900.0, ans=0.125 +2024-07-27 15:48:34,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=33913.333333333336, ans=0.125 +2024-07-27 15:48:35,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=33913.333333333336, ans=0.0 +2024-07-27 15:48:41,078 INFO [train.py:1114] (3/4) Epoch 3, batch 5000, loss[loss=0.3165, simple_loss=0.4032, pruned_loss=0.1149, over 4655.00 frames. ], tot_loss[loss=0.2897, simple_loss=0.3561, pruned_loss=0.1117, over 934880.18 frames. ], batch size: 14, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:48:46,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=33926.666666666664, ans=0.125 +2024-07-27 15:48:48,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=33940.0, ans=0.2 +2024-07-27 15:48:50,743 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.92 vs. limit=10.0 +2024-07-27 15:49:01,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=33953.333333333336, ans=0.1 +2024-07-27 15:49:02,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=33966.666666666664, ans=0.0 +2024-07-27 15:49:13,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=33980.0, ans=0.125 +2024-07-27 15:49:19,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=33980.0, ans=0.1 +2024-07-27 15:49:21,818 INFO [train.py:1114] (3/4) Epoch 3, batch 5050, loss[loss=0.2229, simple_loss=0.314, pruned_loss=0.06592, over 4839.00 frames. ], tot_loss[loss=0.2887, simple_loss=0.3551, pruned_loss=0.1112, over 937346.21 frames. ], batch size: 12, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:49:24,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=33993.333333333336, ans=0.2 +2024-07-27 15:49:38,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=34006.666666666664, ans=0.0 +2024-07-27 15:49:40,703 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.504e+01 6.490e+01 6.878e+01 7.828e+01 1.247e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-27 15:49:46,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=34020.0, ans=0.125 +2024-07-27 15:49:53,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=34033.333333333336, ans=0.025 +2024-07-27 15:50:00,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=34046.666666666664, ans=0.0 +2024-07-27 15:50:05,321 INFO [train.py:1114] (3/4) Epoch 3, batch 5100, loss[loss=0.269, simple_loss=0.3416, pruned_loss=0.09816, over 4773.00 frames. ], tot_loss[loss=0.2891, simple_loss=0.3554, pruned_loss=0.1114, over 934800.02 frames. ], batch size: 12, lr: 2.23e-02, grad_scale: 64.0 +2024-07-27 15:50:09,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=34060.0, ans=0.125 +2024-07-27 15:50:12,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=34060.0, ans=0.125 +2024-07-27 15:50:16,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=34073.333333333336, ans=0.0 +2024-07-27 15:50:29,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=34086.666666666664, ans=0.125 +2024-07-27 15:50:29,670 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.84 vs. limit=15.0 +2024-07-27 15:50:37,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=34113.333333333336, ans=0.0 +2024-07-27 15:50:37,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=34113.333333333336, ans=0.125 +2024-07-27 15:50:37,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=34113.333333333336, ans=0.125 +2024-07-27 15:50:37,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=34113.333333333336, ans=0.0034536231884057965 +2024-07-27 15:50:37,764 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.88 vs. limit=12.0 +2024-07-27 15:50:45,818 INFO [train.py:1114] (3/4) Epoch 3, batch 5150, loss[loss=0.295, simple_loss=0.3649, pruned_loss=0.1126, over 4830.00 frames. ], tot_loss[loss=0.2895, simple_loss=0.3557, pruned_loss=0.1116, over 935401.15 frames. ], batch size: 16, lr: 2.22e-02, grad_scale: 64.0 +2024-07-27 15:50:49,439 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=3.233e+01 +2024-07-27 15:50:56,020 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.400e+01 6.638e+01 7.768e+01 8.989e+01 1.373e+02, threshold=1.554e+02, percent-clipped=0.0 +2024-07-27 15:51:00,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=34153.333333333336, ans=0.0 +2024-07-27 15:51:03,494 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.03 vs. limit=6.0 +2024-07-27 15:51:06,978 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.70 vs. limit=15.0 +2024-07-27 15:51:08,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=34166.666666666664, ans=0.1 +2024-07-27 15:51:12,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=34180.0, ans=0.0 +2024-07-27 15:51:22,077 INFO [train.py:1114] (3/4) Epoch 3, batch 5200, loss[loss=0.3081, simple_loss=0.3781, pruned_loss=0.1191, over 4682.00 frames. ], tot_loss[loss=0.2878, simple_loss=0.3543, pruned_loss=0.1106, over 936118.69 frames. ], batch size: 14, lr: 2.22e-02, grad_scale: 64.0 +2024-07-27 15:51:30,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=34206.666666666664, ans=0.025 +2024-07-27 15:51:40,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=34220.0, ans=0.035 +2024-07-27 15:51:43,925 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.56 vs. limit=10.0 +2024-07-27 15:51:47,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=34233.333333333336, ans=0.0 +2024-07-27 15:51:48,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=34233.333333333336, ans=0.025 +2024-07-27 15:51:48,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=34233.333333333336, ans=0.125 +2024-07-27 15:51:51,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=34233.333333333336, ans=0.025 +2024-07-27 15:51:52,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=34233.333333333336, ans=0.125 +2024-07-27 15:51:54,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=34233.333333333336, ans=0.07 +2024-07-27 15:52:04,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=34246.666666666664, ans=0.1 +2024-07-27 15:52:05,533 INFO [train.py:1114] (3/4) Epoch 3, batch 5250, loss[loss=0.2692, simple_loss=0.3349, pruned_loss=0.1017, over 4896.00 frames. ], tot_loss[loss=0.2859, simple_loss=0.3527, pruned_loss=0.1095, over 935686.67 frames. ], batch size: 13, lr: 2.22e-02, grad_scale: 64.0 +2024-07-27 15:52:08,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=34260.0, ans=0.5 +2024-07-27 15:52:14,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=34273.333333333336, ans=0.05 +2024-07-27 15:52:17,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=34273.333333333336, ans=0.0 +2024-07-27 15:52:18,630 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.312e+01 6.590e+01 7.442e+01 8.415e+01 1.347e+02, threshold=1.488e+02, percent-clipped=0.0 +2024-07-27 15:52:32,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=34300.0, ans=0.125 +2024-07-27 15:52:34,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=34300.0, ans=0.05 +2024-07-27 15:52:34,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=34300.0, ans=0.125 +2024-07-27 15:52:35,684 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.03 vs. limit=22.5 +2024-07-27 15:52:40,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=34313.333333333336, ans=0.125 +2024-07-27 15:52:45,538 INFO [train.py:1114] (3/4) Epoch 3, batch 5300, loss[loss=0.2992, simple_loss=0.3711, pruned_loss=0.1136, over 4622.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.3525, pruned_loss=0.1098, over 934392.21 frames. ], batch size: 16, lr: 2.22e-02, grad_scale: 32.0 +2024-07-27 15:52:54,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=34340.0, ans=0.2 +2024-07-27 15:52:55,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=34340.0, ans=0.125 +2024-07-27 15:52:55,998 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.37 vs. limit=15.0 +2024-07-27 15:52:59,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=34353.333333333336, ans=0.2 +2024-07-27 15:52:59,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=34353.333333333336, ans=0.0 +2024-07-27 15:53:01,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=34353.333333333336, ans=0.09899494936611666 +2024-07-27 15:53:03,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=34353.333333333336, ans=0.1 +2024-07-27 15:53:14,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=34380.0, ans=0.025 +2024-07-27 15:53:21,081 INFO [train.py:1114] (3/4) Epoch 3, batch 5350, loss[loss=0.2112, simple_loss=0.2863, pruned_loss=0.06807, over 4568.00 frames. ], tot_loss[loss=0.2859, simple_loss=0.353, pruned_loss=0.1094, over 936312.75 frames. ], batch size: 10, lr: 2.22e-02, grad_scale: 32.0 +2024-07-27 15:53:25,902 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.06 vs. limit=15.0 +2024-07-27 15:53:32,206 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.392e+01 6.457e+01 7.092e+01 8.534e+01 1.457e+02, threshold=1.418e+02, percent-clipped=0.0 +2024-07-27 15:53:43,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=34433.333333333336, ans=0.0 +2024-07-27 15:53:48,022 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.16 vs. limit=15.0 +2024-07-27 15:53:56,295 INFO [train.py:1114] (3/4) Epoch 3, batch 5400, loss[loss=0.3135, simple_loss=0.3732, pruned_loss=0.1269, over 4252.00 frames. ], tot_loss[loss=0.288, simple_loss=0.3543, pruned_loss=0.1108, over 929950.83 frames. ], batch size: 25, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:53:57,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=34460.0, ans=0.125 +2024-07-27 15:53:57,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=34460.0, ans=0.125 +2024-07-27 15:53:59,529 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.31 vs. limit=15.0 +2024-07-27 15:54:02,885 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.64 vs. limit=15.0 +2024-07-27 15:54:11,045 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.82 vs. limit=22.5 +2024-07-27 15:54:26,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=34513.333333333336, ans=0.125 +2024-07-27 15:54:30,037 INFO [train.py:1114] (3/4) Epoch 3, batch 5450, loss[loss=0.2483, simple_loss=0.3028, pruned_loss=0.09693, over 4689.00 frames. ], tot_loss[loss=0.2872, simple_loss=0.3536, pruned_loss=0.1104, over 932603.21 frames. ], batch size: 11, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:54:39,856 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.83 vs. limit=15.0 +2024-07-27 15:54:40,908 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.485e+01 6.790e+01 7.649e+01 9.479e+01 1.674e+02, threshold=1.530e+02, percent-clipped=4.0 +2024-07-27 15:54:44,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=34553.333333333336, ans=0.0 +2024-07-27 15:54:53,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=34566.666666666664, ans=0.05 +2024-07-27 15:54:56,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=34566.666666666664, ans=0.125 +2024-07-27 15:55:03,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=34580.0, ans=0.2 +2024-07-27 15:55:08,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=34593.333333333336, ans=0.0 +2024-07-27 15:55:09,460 INFO [train.py:1114] (3/4) Epoch 3, batch 5500, loss[loss=0.3232, simple_loss=0.3725, pruned_loss=0.1369, over 4388.00 frames. ], tot_loss[loss=0.2875, simple_loss=0.3536, pruned_loss=0.1107, over 930358.43 frames. ], batch size: 25, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:55:19,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=34606.666666666664, ans=0.125 +2024-07-27 15:55:24,124 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.17 vs. limit=15.0 +2024-07-27 15:55:25,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=34620.0, ans=0.125 +2024-07-27 15:55:25,556 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.83 vs. limit=15.0 +2024-07-27 15:55:25,608 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.18 vs. limit=22.5 +2024-07-27 15:55:26,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=34620.0, ans=0.0 +2024-07-27 15:55:29,576 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=19.08 vs. limit=15.0 +2024-07-27 15:55:35,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=34633.333333333336, ans=0.125 +2024-07-27 15:55:45,405 INFO [train.py:1114] (3/4) Epoch 3, batch 5550, loss[loss=0.2786, simple_loss=0.3318, pruned_loss=0.1127, over 4699.00 frames. ], tot_loss[loss=0.2876, simple_loss=0.3536, pruned_loss=0.1108, over 932892.41 frames. ], batch size: 12, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:55:54,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=34660.0, ans=0.125 +2024-07-27 15:56:05,368 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.078e+01 6.918e+01 7.816e+01 8.981e+01 2.239e+02, threshold=1.563e+02, percent-clipped=1.0 +2024-07-27 15:56:06,986 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.93 vs. limit=15.0 +2024-07-27 15:56:09,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=34686.666666666664, ans=0.025 +2024-07-27 15:56:15,733 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=16.80 vs. limit=15.0 +2024-07-27 15:56:16,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=34700.0, ans=0.1 +2024-07-27 15:56:19,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=34700.0, ans=0.1 +2024-07-27 15:56:27,368 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.36 vs. limit=15.0 +2024-07-27 15:56:28,881 INFO [train.py:1114] (3/4) Epoch 3, batch 5600, loss[loss=0.2759, simple_loss=0.349, pruned_loss=0.1014, over 4735.00 frames. ], tot_loss[loss=0.2887, simple_loss=0.3548, pruned_loss=0.1113, over 934186.79 frames. ], batch size: 14, lr: 2.21e-02, grad_scale: 32.0 +2024-07-27 15:56:30,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=34726.666666666664, ans=0.2 +2024-07-27 15:56:31,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=34726.666666666664, ans=0.125 +2024-07-27 15:56:52,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=34766.666666666664, ans=0.0 +2024-07-27 15:56:59,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=34780.0, ans=0.05 +2024-07-27 15:57:04,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=34793.333333333336, ans=0.0 +2024-07-27 15:57:04,496 INFO [train.py:1114] (3/4) Epoch 3, batch 5650, loss[loss=0.3426, simple_loss=0.4061, pruned_loss=0.1396, over 4496.00 frames. ], tot_loss[loss=0.2867, simple_loss=0.3533, pruned_loss=0.1101, over 936550.13 frames. ], batch size: 21, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:57:06,150 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 15:57:19,462 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.551e+01 6.421e+01 6.946e+01 8.141e+01 1.354e+02, threshold=1.389e+02, percent-clipped=0.0 +2024-07-27 15:57:27,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=34820.0, ans=0.0 +2024-07-27 15:57:31,677 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.46 vs. limit=15.0 +2024-07-27 15:57:37,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=34846.666666666664, ans=0.125 +2024-07-27 15:57:42,869 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.11 vs. limit=12.0 +2024-07-27 15:57:43,149 INFO [train.py:1114] (3/4) Epoch 3, batch 5700, loss[loss=0.2575, simple_loss=0.3382, pruned_loss=0.08834, over 4697.00 frames. ], tot_loss[loss=0.2872, simple_loss=0.354, pruned_loss=0.1102, over 937458.52 frames. ], batch size: 13, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:57:45,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=34860.0, ans=0.2 +2024-07-27 15:57:53,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=34873.333333333336, ans=0.0032884057971014494 +2024-07-27 15:58:05,091 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.30 vs. limit=22.5 +2024-07-27 15:58:05,109 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.73 vs. limit=15.0 +2024-07-27 15:58:14,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=34913.333333333336, ans=0.125 +2024-07-27 15:58:16,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=34913.333333333336, ans=0.125 +2024-07-27 15:58:19,543 INFO [train.py:1114] (3/4) Epoch 3, batch 5750, loss[loss=0.3593, simple_loss=0.4111, pruned_loss=0.1537, over 4692.00 frames. ], tot_loss[loss=0.2906, simple_loss=0.3567, pruned_loss=0.1123, over 937728.68 frames. ], batch size: 19, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:58:30,751 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.375e+01 6.773e+01 7.385e+01 8.434e+01 1.352e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 15:58:34,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=34953.333333333336, ans=0.125 +2024-07-27 15:58:36,191 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.02 vs. limit=15.0 +2024-07-27 15:58:39,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=34953.333333333336, ans=0.125 +2024-07-27 15:58:50,512 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.45 vs. limit=22.5 +2024-07-27 15:58:56,482 INFO [train.py:1114] (3/4) Epoch 3, batch 5800, loss[loss=0.313, simple_loss=0.375, pruned_loss=0.1255, over 4696.00 frames. ], tot_loss[loss=0.2889, simple_loss=0.3556, pruned_loss=0.1111, over 936764.70 frames. ], batch size: 19, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:58:59,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=34993.333333333336, ans=0.125 +2024-07-27 15:59:06,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=35006.666666666664, ans=0.125 +2024-07-27 15:59:06,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35006.666666666664, ans=0.1 +2024-07-27 15:59:07,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=35006.666666666664, ans=0.2 +2024-07-27 15:59:12,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=35020.0, ans=0.125 +2024-07-27 15:59:14,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=35020.0, ans=0.0 +2024-07-27 15:59:25,215 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.58 vs. limit=15.0 +2024-07-27 15:59:30,546 INFO [train.py:1114] (3/4) Epoch 3, batch 5850, loss[loss=0.2878, simple_loss=0.3539, pruned_loss=0.1108, over 4538.00 frames. ], tot_loss[loss=0.2867, simple_loss=0.3534, pruned_loss=0.11, over 937413.30 frames. ], batch size: 21, lr: 2.20e-02, grad_scale: 32.0 +2024-07-27 15:59:37,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=35060.0, ans=0.0 +2024-07-27 15:59:39,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=35060.0, ans=0.125 +2024-07-27 15:59:45,119 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.147e+01 6.773e+01 7.644e+01 9.466e+01 1.883e+02, threshold=1.529e+02, percent-clipped=1.0 +2024-07-27 15:59:47,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=35086.666666666664, ans=0.125 +2024-07-27 15:59:56,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=35100.0, ans=0.125 +2024-07-27 16:00:10,154 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.77 vs. limit=15.0 +2024-07-27 16:00:12,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=35126.666666666664, ans=0.125 +2024-07-27 16:00:12,753 INFO [train.py:1114] (3/4) Epoch 3, batch 5900, loss[loss=0.2835, simple_loss=0.3496, pruned_loss=0.1087, over 4687.00 frames. ], tot_loss[loss=0.2864, simple_loss=0.3535, pruned_loss=0.1096, over 937560.11 frames. ], batch size: 15, lr: 2.19e-02, grad_scale: 16.0 +2024-07-27 16:00:25,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=35140.0, ans=0.2 +2024-07-27 16:00:30,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35153.333333333336, ans=0.1 +2024-07-27 16:00:52,883 INFO [train.py:1114] (3/4) Epoch 3, batch 5950, loss[loss=0.3286, simple_loss=0.3877, pruned_loss=0.1348, over 4680.00 frames. ], tot_loss[loss=0.2868, simple_loss=0.3537, pruned_loss=0.11, over 940069.98 frames. ], batch size: 15, lr: 2.19e-02, grad_scale: 16.0 +2024-07-27 16:01:06,207 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.498e+01 6.928e+01 7.896e+01 9.145e+01 1.429e+02, threshold=1.579e+02, percent-clipped=0.0 +2024-07-27 16:01:06,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=35206.666666666664, ans=0.125 +2024-07-27 16:01:11,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35220.0, ans=0.1 +2024-07-27 16:01:15,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=35233.333333333336, ans=0.0032101449275362317 +2024-07-27 16:01:24,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=35246.666666666664, ans=0.0 +2024-07-27 16:01:29,012 INFO [train.py:1114] (3/4) Epoch 3, batch 6000, loss[loss=0.3028, simple_loss=0.3721, pruned_loss=0.1168, over 4293.00 frames. ], tot_loss[loss=0.2872, simple_loss=0.3539, pruned_loss=0.1103, over 937322.81 frames. ], batch size: 25, lr: 2.19e-02, grad_scale: 32.0 +2024-07-27 16:01:29,012 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 16:01:34,448 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.7690, 4.8580, 4.8122, 5.5306], device='cuda:3') +2024-07-27 16:01:38,732 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.8177, 2.9770, 4.5474, 4.8632], device='cuda:3') +2024-07-27 16:01:39,368 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.9281, 5.0707, 5.0390, 5.7107], device='cuda:3') +2024-07-27 16:01:40,727 INFO [train.py:1146] (3/4) Epoch 3, validation: loss=0.2286, simple_loss=0.328, pruned_loss=0.06459, over 944034.00 frames. +2024-07-27 16:01:40,728 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 16:01:52,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35273.333333333336, ans=0.1 +2024-07-27 16:02:01,432 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.23 vs. limit=15.0 +2024-07-27 16:02:17,520 INFO [train.py:1114] (3/4) Epoch 3, batch 6050, loss[loss=0.2797, simple_loss=0.3363, pruned_loss=0.1116, over 4782.00 frames. ], tot_loss[loss=0.2878, simple_loss=0.3542, pruned_loss=0.1107, over 938878.86 frames. ], batch size: 12, lr: 2.19e-02, grad_scale: 32.0 +2024-07-27 16:02:19,160 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.54 vs. limit=15.0 +2024-07-27 16:02:23,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=35340.0, ans=0.125 +2024-07-27 16:02:25,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=35340.0, ans=0.125 +2024-07-27 16:02:25,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=35340.0, ans=0.2 +2024-07-27 16:02:29,017 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.303e+01 6.741e+01 7.557e+01 8.762e+01 1.550e+02, threshold=1.511e+02, percent-clipped=0.0 +2024-07-27 16:02:29,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=35340.0, ans=0.0 +2024-07-27 16:02:29,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.06 vs. limit=15.0 +2024-07-27 16:02:29,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=35340.0, ans=10.0 +2024-07-27 16:02:52,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=35380.0, ans=0.0 +2024-07-27 16:02:54,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=35380.0, ans=0.0 +2024-07-27 16:02:57,046 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.40 vs. limit=15.0 +2024-07-27 16:02:57,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=35380.0, ans=0.0 +2024-07-27 16:02:59,369 INFO [train.py:1114] (3/4) Epoch 3, batch 6100, loss[loss=0.2511, simple_loss=0.3358, pruned_loss=0.0832, over 4679.00 frames. ], tot_loss[loss=0.286, simple_loss=0.3526, pruned_loss=0.1097, over 938376.24 frames. ], batch size: 15, lr: 2.19e-02, grad_scale: 32.0 +2024-07-27 16:03:00,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=35393.333333333336, ans=0.125 +2024-07-27 16:03:03,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=35393.333333333336, ans=0.0 +2024-07-27 16:03:07,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=35406.666666666664, ans=0.2 +2024-07-27 16:03:09,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=35406.666666666664, ans=0.015 +2024-07-27 16:03:17,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=35420.0, ans=0.2 +2024-07-27 16:03:29,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=35446.666666666664, ans=0.5 +2024-07-27 16:03:32,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=35446.666666666664, ans=0.1 +2024-07-27 16:03:33,286 INFO [train.py:1114] (3/4) Epoch 3, batch 6150, loss[loss=0.3602, simple_loss=0.401, pruned_loss=0.1596, over 3374.00 frames. ], tot_loss[loss=0.2852, simple_loss=0.3523, pruned_loss=0.109, over 937227.44 frames. ], batch size: 35, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:03:35,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=35460.0, ans=0.125 +2024-07-27 16:03:43,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=35473.333333333336, ans=0.0 +2024-07-27 16:03:44,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=35473.333333333336, ans=0.0 +2024-07-27 16:03:46,860 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.320e+01 6.689e+01 7.561e+01 9.895e+01 1.847e+02, threshold=1.512e+02, percent-clipped=5.0 +2024-07-27 16:03:59,087 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.69 vs. limit=15.0 +2024-07-27 16:04:05,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=35513.333333333336, ans=0.125 +2024-07-27 16:04:06,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=35513.333333333336, ans=0.125 +2024-07-27 16:04:09,277 INFO [train.py:1114] (3/4) Epoch 3, batch 6200, loss[loss=0.2856, simple_loss=0.3628, pruned_loss=0.1042, over 4744.00 frames. ], tot_loss[loss=0.2843, simple_loss=0.3514, pruned_loss=0.1086, over 936838.27 frames. ], batch size: 14, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:04:15,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=35540.0, ans=0.025 +2024-07-27 16:04:27,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=35553.333333333336, ans=0.125 +2024-07-27 16:04:34,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=35566.666666666664, ans=0.125 +2024-07-27 16:04:36,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=35580.0, ans=0.5 +2024-07-27 16:04:38,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=35580.0, ans=0.003134782608695652 +2024-07-27 16:04:39,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=35580.0, ans=0.0 +2024-07-27 16:04:43,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.20 vs. limit=22.5 +2024-07-27 16:04:43,468 INFO [train.py:1114] (3/4) Epoch 3, batch 6250, loss[loss=0.2671, simple_loss=0.3516, pruned_loss=0.0913, over 4825.00 frames. ], tot_loss[loss=0.2849, simple_loss=0.3517, pruned_loss=0.1091, over 933617.16 frames. ], batch size: 14, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:04:43,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=35593.333333333336, ans=0.0 +2024-07-27 16:04:54,894 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.517e+01 6.327e+01 7.433e+01 8.878e+01 1.317e+02, threshold=1.487e+02, percent-clipped=0.0 +2024-07-27 16:04:58,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=35606.666666666664, ans=0.125 +2024-07-27 16:04:59,725 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.56 vs. limit=15.0 +2024-07-27 16:05:04,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.13 vs. limit=15.0 +2024-07-27 16:05:05,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=35633.333333333336, ans=0.0 +2024-07-27 16:05:19,623 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.88 vs. limit=6.0 +2024-07-27 16:05:21,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=35646.666666666664, ans=0.125 +2024-07-27 16:05:23,831 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.69 vs. limit=22.5 +2024-07-27 16:05:27,311 INFO [train.py:1114] (3/4) Epoch 3, batch 6300, loss[loss=0.2332, simple_loss=0.303, pruned_loss=0.08168, over 4537.00 frames. ], tot_loss[loss=0.2859, simple_loss=0.3526, pruned_loss=0.1096, over 929827.31 frames. ], batch size: 10, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:05:30,308 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.93 vs. limit=15.0 +2024-07-27 16:05:34,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=35673.333333333336, ans=0.125 +2024-07-27 16:05:35,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=35673.333333333336, ans=0.1 +2024-07-27 16:05:38,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=35673.333333333336, ans=0.125 +2024-07-27 16:05:39,064 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.92 vs. limit=15.0 +2024-07-27 16:05:39,757 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.89 vs. limit=15.0 +2024-07-27 16:05:44,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=35686.666666666664, ans=0.0 +2024-07-27 16:05:46,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=35700.0, ans=0.125 +2024-07-27 16:05:46,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35700.0, ans=0.1 +2024-07-27 16:05:48,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=35700.0, ans=0.0 +2024-07-27 16:05:50,345 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.90 vs. limit=15.0 +2024-07-27 16:05:52,306 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.09 vs. limit=15.0 +2024-07-27 16:05:54,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=35713.333333333336, ans=0.125 +2024-07-27 16:05:54,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=35713.333333333336, ans=0.2 +2024-07-27 16:05:54,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=35713.333333333336, ans=0.0 +2024-07-27 16:05:58,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=35713.333333333336, ans=0.1 +2024-07-27 16:06:00,825 INFO [train.py:1114] (3/4) Epoch 3, batch 6350, loss[loss=0.2736, simple_loss=0.3507, pruned_loss=0.0982, over 4475.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.3508, pruned_loss=0.1079, over 933849.38 frames. ], batch size: 21, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:06:09,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=35740.0, ans=0.125 +2024-07-27 16:06:12,644 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.099e+01 7.124e+01 7.949e+01 9.215e+01 1.375e+02, threshold=1.590e+02, percent-clipped=0.0 +2024-07-27 16:06:20,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=35753.333333333336, ans=0.2 +2024-07-27 16:06:27,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=35780.0, ans=0.125 +2024-07-27 16:06:36,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=35793.333333333336, ans=0.125 +2024-07-27 16:06:36,559 INFO [train.py:1114] (3/4) Epoch 3, batch 6400, loss[loss=0.3541, simple_loss=0.407, pruned_loss=0.1506, over 4636.00 frames. ], tot_loss[loss=0.2846, simple_loss=0.3515, pruned_loss=0.1088, over 935219.84 frames. ], batch size: 13, lr: 2.18e-02, grad_scale: 32.0 +2024-07-27 16:06:47,052 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.58 vs. limit=22.5 +2024-07-27 16:06:48,327 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.28 vs. limit=15.0 +2024-07-27 16:07:07,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=35833.333333333336, ans=0.0 +2024-07-27 16:07:15,635 INFO [train.py:1114] (3/4) Epoch 3, batch 6450, loss[loss=0.3209, simple_loss=0.3893, pruned_loss=0.1262, over 4523.00 frames. ], tot_loss[loss=0.2843, simple_loss=0.3519, pruned_loss=0.1084, over 938850.67 frames. ], batch size: 21, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:07:23,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=15.26 vs. limit=15.0 +2024-07-27 16:07:25,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.81 vs. limit=15.0 +2024-07-27 16:07:32,076 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.336e+01 7.043e+01 8.051e+01 9.807e+01 1.613e+02, threshold=1.610e+02, percent-clipped=2.0 +2024-07-27 16:07:38,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=35886.666666666664, ans=0.003068115942028986 +2024-07-27 16:07:40,947 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.72 vs. limit=12.0 +2024-07-27 16:07:41,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=35900.0, ans=0.1 +2024-07-27 16:07:44,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=35900.0, ans=0.125 +2024-07-27 16:07:55,530 INFO [train.py:1114] (3/4) Epoch 3, batch 6500, loss[loss=0.4142, simple_loss=0.4258, pruned_loss=0.2013, over 3398.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3487, pruned_loss=0.1065, over 940336.28 frames. ], batch size: 36, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:07:56,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=35926.666666666664, ans=10.0 +2024-07-27 16:08:01,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=35926.666666666664, ans=0.035 +2024-07-27 16:08:03,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=35940.0, ans=0.0030565217391304344 +2024-07-27 16:08:05,993 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.85 vs. limit=22.5 +2024-07-27 16:08:08,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=35953.333333333336, ans=0.125 +2024-07-27 16:08:10,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=35953.333333333336, ans=0.125 +2024-07-27 16:08:12,149 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.31 vs. limit=6.0 +2024-07-27 16:08:23,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=35980.0, ans=0.2 +2024-07-27 16:08:25,820 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.54 vs. limit=15.0 +2024-07-27 16:08:26,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=35980.0, ans=0.125 +2024-07-27 16:08:26,555 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.43 vs. limit=22.5 +2024-07-27 16:08:29,005 INFO [train.py:1114] (3/4) Epoch 3, batch 6550, loss[loss=0.257, simple_loss=0.3165, pruned_loss=0.09874, over 4790.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3486, pruned_loss=0.1063, over 943233.98 frames. ], batch size: 11, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:08:37,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=35993.333333333336, ans=0.125 +2024-07-27 16:08:42,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=36006.666666666664, ans=0.0030420289855072466 +2024-07-27 16:08:44,616 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.03 vs. limit=15.0 +2024-07-27 16:08:44,817 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.305e+01 6.734e+01 7.453e+01 8.745e+01 1.645e+02, threshold=1.491e+02, percent-clipped=1.0 +2024-07-27 16:08:46,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=36020.0, ans=0.125 +2024-07-27 16:08:48,485 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.32 vs. limit=15.0 +2024-07-27 16:08:56,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=36033.333333333336, ans=0.125 +2024-07-27 16:09:05,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=36046.666666666664, ans=0.09899494936611666 +2024-07-27 16:09:07,348 INFO [train.py:1114] (3/4) Epoch 3, batch 6600, loss[loss=0.2875, simple_loss=0.3546, pruned_loss=0.1102, over 4930.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3489, pruned_loss=0.1064, over 945000.84 frames. ], batch size: 14, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:09:07,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=36060.0, ans=0.125 +2024-07-27 16:09:21,084 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:09:23,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=36086.666666666664, ans=0.125 +2024-07-27 16:09:32,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=36100.0, ans=0.09899494936611666 +2024-07-27 16:09:33,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=36100.0, ans=0.0 +2024-07-27 16:09:43,505 INFO [train.py:1114] (3/4) Epoch 3, batch 6650, loss[loss=0.2928, simple_loss=0.3645, pruned_loss=0.1106, over 4642.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3485, pruned_loss=0.1065, over 943820.78 frames. ], batch size: 17, lr: 2.17e-02, grad_scale: 32.0 +2024-07-27 16:09:49,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=36126.666666666664, ans=0.0 +2024-07-27 16:09:50,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=36140.0, ans=0.125 +2024-07-27 16:09:52,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=36140.0, ans=0.1 +2024-07-27 16:09:54,994 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.577e+01 6.831e+01 8.168e+01 1.025e+02 1.593e+02, threshold=1.634e+02, percent-clipped=2.0 +2024-07-27 16:10:05,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=36153.333333333336, ans=0.003010144927536231 +2024-07-27 16:10:07,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=36153.333333333336, ans=0.125 +2024-07-27 16:10:19,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=36180.0, ans=0.0 +2024-07-27 16:10:24,378 INFO [train.py:1114] (3/4) Epoch 3, batch 6700, loss[loss=0.3109, simple_loss=0.3804, pruned_loss=0.1207, over 4674.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3493, pruned_loss=0.1067, over 942309.86 frames. ], batch size: 19, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:10:30,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=36206.666666666664, ans=0.125 +2024-07-27 16:10:48,507 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.47 vs. limit=15.0 +2024-07-27 16:10:59,153 INFO [train.py:1114] (3/4) Epoch 3, batch 6750, loss[loss=0.3482, simple_loss=0.4066, pruned_loss=0.1449, over 4236.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3491, pruned_loss=0.1067, over 940373.04 frames. ], batch size: 25, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:11:03,544 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=36260.0, ans=0.1 +2024-07-27 16:11:10,904 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.207e+01 6.852e+01 7.798e+01 8.780e+01 1.253e+02, threshold=1.560e+02, percent-clipped=0.0 +2024-07-27 16:11:14,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=36286.666666666664, ans=0.125 +2024-07-27 16:11:16,120 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.82 vs. limit=12.0 +2024-07-27 16:11:21,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=36300.0, ans=0.0029782608695652175 +2024-07-27 16:11:25,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=36313.333333333336, ans=0.125 +2024-07-27 16:11:27,382 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.27 vs. limit=15.0 +2024-07-27 16:11:29,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=36313.333333333336, ans=0.125 +2024-07-27 16:11:33,071 INFO [train.py:1114] (3/4) Epoch 3, batch 6800, loss[loss=0.2816, simple_loss=0.3546, pruned_loss=0.1043, over 4634.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3507, pruned_loss=0.1079, over 939028.30 frames. ], batch size: 13, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:11:37,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=36326.666666666664, ans=0.125 +2024-07-27 16:11:40,548 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.33 vs. limit=15.0 +2024-07-27 16:11:46,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=36340.0, ans=0.125 +2024-07-27 16:12:07,431 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.55 vs. limit=10.0 +2024-07-27 16:12:08,431 INFO [train.py:1114] (3/4) Epoch 3, batch 6850, loss[loss=0.2964, simple_loss=0.371, pruned_loss=0.1109, over 4693.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.351, pruned_loss=0.108, over 940556.20 frames. ], batch size: 13, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:12:09,579 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=26.60 vs. limit=22.5 +2024-07-27 16:12:09,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=36393.333333333336, ans=0.125 +2024-07-27 16:12:16,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.99 vs. limit=22.5 +2024-07-27 16:12:17,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=36406.666666666664, ans=0.0029550724637681164 +2024-07-27 16:12:19,975 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.402e+01 6.863e+01 7.550e+01 8.711e+01 1.509e+02, threshold=1.510e+02, percent-clipped=0.0 +2024-07-27 16:12:35,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=36433.333333333336, ans=0.125 +2024-07-27 16:12:37,024 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.56 vs. limit=10.0 +2024-07-27 16:12:38,452 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.07 vs. limit=22.5 +2024-07-27 16:12:44,070 INFO [train.py:1114] (3/4) Epoch 3, batch 6900, loss[loss=0.3061, simple_loss=0.3723, pruned_loss=0.1199, over 4964.00 frames. ], tot_loss[loss=0.2848, simple_loss=0.3521, pruned_loss=0.1087, over 942824.29 frames. ], batch size: 13, lr: 2.16e-02, grad_scale: 32.0 +2024-07-27 16:12:45,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=36460.0, ans=0.1 +2024-07-27 16:12:54,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=36473.333333333336, ans=0.1 +2024-07-27 16:12:58,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=36486.666666666664, ans=0.0 +2024-07-27 16:13:07,347 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.30 vs. limit=6.0 +2024-07-27 16:13:14,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=36513.333333333336, ans=0.0 +2024-07-27 16:13:18,509 INFO [train.py:1114] (3/4) Epoch 3, batch 6950, loss[loss=0.2248, simple_loss=0.3077, pruned_loss=0.07095, over 4523.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3515, pruned_loss=0.1082, over 940203.56 frames. ], batch size: 10, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:13:23,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=36526.666666666664, ans=0.025 +2024-07-27 16:13:32,728 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.311e+01 6.730e+01 8.056e+01 9.531e+01 1.380e+02, threshold=1.611e+02, percent-clipped=0.0 +2024-07-27 16:13:34,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=36553.333333333336, ans=0.0 +2024-07-27 16:13:34,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=36553.333333333336, ans=0.002923188405797101 +2024-07-27 16:13:54,237 INFO [train.py:1114] (3/4) Epoch 3, batch 7000, loss[loss=0.2745, simple_loss=0.3422, pruned_loss=0.1034, over 4595.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3496, pruned_loss=0.1068, over 938375.48 frames. ], batch size: 17, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:14:02,406 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.44 vs. limit=22.5 +2024-07-27 16:14:14,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=36633.333333333336, ans=0.0 +2024-07-27 16:14:14,501 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.72 vs. limit=10.0 +2024-07-27 16:14:14,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=36633.333333333336, ans=0.125 +2024-07-27 16:14:21,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=36646.666666666664, ans=0.0 +2024-07-27 16:14:22,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=36646.666666666664, ans=0.07 +2024-07-27 16:14:26,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=36646.666666666664, ans=22.5 +2024-07-27 16:14:28,491 INFO [train.py:1114] (3/4) Epoch 3, batch 7050, loss[loss=0.2829, simple_loss=0.35, pruned_loss=0.1079, over 4712.00 frames. ], tot_loss[loss=0.2805, simple_loss=0.349, pruned_loss=0.106, over 941612.45 frames. ], batch size: 19, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:14:33,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=36660.0, ans=0.2 +2024-07-27 16:14:49,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=36673.333333333336, ans=0.09899494936611666 +2024-07-27 16:14:50,687 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.466e+01 6.926e+01 7.603e+01 8.954e+01 1.226e+02, threshold=1.521e+02, percent-clipped=0.0 +2024-07-27 16:14:53,303 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.86 vs. limit=15.0 +2024-07-27 16:14:55,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=36686.666666666664, ans=0.1 +2024-07-27 16:15:02,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=36700.0, ans=0.125 +2024-07-27 16:15:07,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=36713.333333333336, ans=0.125 +2024-07-27 16:15:08,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=36713.333333333336, ans=0.125 +2024-07-27 16:15:13,274 INFO [train.py:1114] (3/4) Epoch 3, batch 7100, loss[loss=0.2595, simple_loss=0.3519, pruned_loss=0.08358, over 4788.00 frames. ], tot_loss[loss=0.2831, simple_loss=0.3509, pruned_loss=0.1077, over 936387.72 frames. ], batch size: 15, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:15:14,906 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.84 vs. limit=15.0 +2024-07-27 16:15:27,883 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-07-27 16:15:29,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=36753.333333333336, ans=0.125 +2024-07-27 16:15:30,557 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.33 vs. limit=15.0 +2024-07-27 16:15:47,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=36780.0, ans=0.2 +2024-07-27 16:15:51,808 INFO [train.py:1114] (3/4) Epoch 3, batch 7150, loss[loss=0.3415, simple_loss=0.3846, pruned_loss=0.1492, over 4538.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3472, pruned_loss=0.1055, over 937486.43 frames. ], batch size: 21, lr: 2.15e-02, grad_scale: 16.0 +2024-07-27 16:15:53,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=36793.333333333336, ans=0.0 +2024-07-27 16:15:58,734 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.80 vs. limit=15.0 +2024-07-27 16:16:07,965 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.541e+01 6.708e+01 7.597e+01 9.458e+01 1.380e+02, threshold=1.519e+02, percent-clipped=0.0 +2024-07-27 16:16:08,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=36820.0, ans=0.125 +2024-07-27 16:16:21,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=36833.333333333336, ans=0.125 +2024-07-27 16:16:23,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=36846.666666666664, ans=0.125 +2024-07-27 16:16:24,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=36846.666666666664, ans=0.125 +2024-07-27 16:16:29,422 INFO [train.py:1114] (3/4) Epoch 3, batch 7200, loss[loss=0.2976, simple_loss=0.3646, pruned_loss=0.1153, over 4804.00 frames. ], tot_loss[loss=0.2795, simple_loss=0.3476, pruned_loss=0.1057, over 937855.44 frames. ], batch size: 15, lr: 2.15e-02, grad_scale: 32.0 +2024-07-27 16:16:30,401 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.52 vs. limit=15.0 +2024-07-27 16:16:34,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=36860.0, ans=0.0 +2024-07-27 16:16:34,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=36860.0, ans=0.125 +2024-07-27 16:16:35,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=36873.333333333336, ans=0.125 +2024-07-27 16:16:52,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.56 vs. limit=15.0 +2024-07-27 16:17:10,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=36913.333333333336, ans=0.125 +2024-07-27 16:17:11,410 INFO [train.py:1114] (3/4) Epoch 3, batch 7250, loss[loss=0.2561, simple_loss=0.3296, pruned_loss=0.09129, over 4850.00 frames. ], tot_loss[loss=0.2787, simple_loss=0.3468, pruned_loss=0.1053, over 939337.04 frames. ], batch size: 12, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:17:12,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=36926.666666666664, ans=0.125 +2024-07-27 16:17:21,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=36940.0, ans=0.125 +2024-07-27 16:17:23,000 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.450e+01 6.548e+01 7.607e+01 9.272e+01 1.593e+02, threshold=1.521e+02, percent-clipped=2.0 +2024-07-27 16:17:29,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=36953.333333333336, ans=0.0028362318840579707 +2024-07-27 16:17:34,542 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.92 vs. limit=22.5 +2024-07-27 16:17:42,954 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.20 vs. limit=15.0 +2024-07-27 16:17:44,568 INFO [train.py:1114] (3/4) Epoch 3, batch 7300, loss[loss=0.2475, simple_loss=0.3204, pruned_loss=0.0873, over 4842.00 frames. ], tot_loss[loss=0.2798, simple_loss=0.3483, pruned_loss=0.1056, over 939773.48 frames. ], batch size: 12, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:17:50,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=36993.333333333336, ans=0.09899494936611666 +2024-07-27 16:18:13,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=37046.666666666664, ans=0.2 +2024-07-27 16:18:17,359 INFO [train.py:1114] (3/4) Epoch 3, batch 7350, loss[loss=0.2499, simple_loss=0.3257, pruned_loss=0.08705, over 4636.00 frames. ], tot_loss[loss=0.2794, simple_loss=0.348, pruned_loss=0.1054, over 939515.05 frames. ], batch size: 12, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:18:19,514 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:18:23,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=37073.333333333336, ans=0.0 +2024-07-27 16:18:26,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.58 vs. limit=22.5 +2024-07-27 16:18:29,217 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.542e+01 6.884e+01 7.906e+01 1.038e+02 1.585e+02, threshold=1.581e+02, percent-clipped=4.0 +2024-07-27 16:18:29,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=37073.333333333336, ans=0.0 +2024-07-27 16:18:32,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=37086.666666666664, ans=0.125 +2024-07-27 16:18:34,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=37086.666666666664, ans=0.2 +2024-07-27 16:18:40,131 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.17 vs. limit=15.0 +2024-07-27 16:18:47,655 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.57 vs. limit=10.0 +2024-07-27 16:18:49,933 INFO [train.py:1114] (3/4) Epoch 3, batch 7400, loss[loss=0.3371, simple_loss=0.4107, pruned_loss=0.1317, over 4690.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.3481, pruned_loss=0.1059, over 940513.92 frames. ], batch size: 13, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:18:49,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=37126.666666666664, ans=0.0 +2024-07-27 16:19:07,371 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.58 vs. limit=15.0 +2024-07-27 16:19:15,969 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.20 vs. limit=12.0 +2024-07-27 16:19:22,925 INFO [train.py:1114] (3/4) Epoch 3, batch 7450, loss[loss=0.2887, simple_loss=0.3467, pruned_loss=0.1154, over 4617.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3474, pruned_loss=0.1054, over 938116.61 frames. ], batch size: 11, lr: 2.14e-02, grad_scale: 32.0 +2024-07-27 16:19:27,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=37193.333333333336, ans=0.125 +2024-07-27 16:19:28,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=37206.666666666664, ans=0.0 +2024-07-27 16:19:31,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=37206.666666666664, ans=0.2 +2024-07-27 16:19:32,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=37206.666666666664, ans=0.125 +2024-07-27 16:19:34,463 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.095e+01 6.772e+01 7.758e+01 9.808e+01 2.086e+02, threshold=1.552e+02, percent-clipped=2.0 +2024-07-27 16:19:55,861 INFO [train.py:1114] (3/4) Epoch 3, batch 7500, loss[loss=0.3649, simple_loss=0.398, pruned_loss=0.1659, over 3574.00 frames. ], tot_loss[loss=0.282, simple_loss=0.35, pruned_loss=0.107, over 936343.19 frames. ], batch size: 35, lr: 2.13e-02, grad_scale: 16.0 +2024-07-27 16:20:25,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=37313.333333333336, ans=0.025 +2024-07-27 16:20:27,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=37313.333333333336, ans=0.125 +2024-07-27 16:20:29,139 INFO [train.py:1114] (3/4) Epoch 3, batch 7550, loss[loss=0.2974, simple_loss=0.3658, pruned_loss=0.1145, over 4669.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.352, pruned_loss=0.108, over 936490.85 frames. ], batch size: 17, lr: 2.13e-02, grad_scale: 16.0 +2024-07-27 16:20:31,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=37326.666666666664, ans=0.09899494936611666 +2024-07-27 16:21:30,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=37326.666666666664, ans=0.0 +2024-07-27 16:21:33,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=37340.0, ans=0.125 +2024-07-27 16:21:36,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=37340.0, ans=0.0027521739130434777 +2024-07-27 16:21:38,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=37340.0, ans=0.125 +2024-07-27 16:21:39,447 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.630e+01 6.808e+01 7.761e+01 9.046e+01 1.679e+02, threshold=1.552e+02, percent-clipped=1.0 +2024-07-27 16:21:40,491 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.66 vs. limit=15.0 +2024-07-27 16:21:45,873 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.72 vs. limit=12.0 +2024-07-27 16:21:55,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37380.0, ans=0.1 +2024-07-27 16:21:59,524 INFO [train.py:1114] (3/4) Epoch 3, batch 7600, loss[loss=0.3424, simple_loss=0.401, pruned_loss=0.1419, over 4810.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3511, pruned_loss=0.1075, over 938118.01 frames. ], batch size: 14, lr: 2.13e-02, grad_scale: 32.0 +2024-07-27 16:22:00,637 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=16.76 vs. limit=15.0 +2024-07-27 16:22:01,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37393.333333333336, ans=0.1 +2024-07-27 16:22:09,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=37406.666666666664, ans=0.025 +2024-07-27 16:22:14,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=37420.0, ans=0.125 +2024-07-27 16:22:15,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=37420.0, ans=0.125 +2024-07-27 16:22:15,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=37420.0, ans=0.0 +2024-07-27 16:22:30,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37433.333333333336, ans=0.1 +2024-07-27 16:22:33,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=37446.666666666664, ans=0.125 +2024-07-27 16:22:34,730 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.38 vs. limit=15.0 +2024-07-27 16:22:40,210 INFO [train.py:1114] (3/4) Epoch 3, batch 7650, loss[loss=0.2556, simple_loss=0.3196, pruned_loss=0.09578, over 4947.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.3506, pruned_loss=0.1074, over 937122.09 frames. ], batch size: 12, lr: 2.13e-02, grad_scale: 32.0 +2024-07-27 16:22:47,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=37460.0, ans=0.5 +2024-07-27 16:22:49,402 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.48 vs. limit=6.0 +2024-07-27 16:22:59,006 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.77 vs. limit=6.0 +2024-07-27 16:23:00,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=37473.333333333336, ans=0.1 +2024-07-27 16:23:02,039 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.597e+01 7.183e+01 8.812e+01 1.036e+02 1.540e+02, threshold=1.762e+02, percent-clipped=0.0 +2024-07-27 16:23:33,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=37513.333333333336, ans=0.0 +2024-07-27 16:23:33,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.71 vs. limit=15.0 +2024-07-27 16:23:39,142 INFO [train.py:1114] (3/4) Epoch 3, batch 7700, loss[loss=0.2789, simple_loss=0.3542, pruned_loss=0.1018, over 4696.00 frames. ], tot_loss[loss=0.2823, simple_loss=0.3504, pruned_loss=0.1071, over 934276.21 frames. ], batch size: 13, lr: 2.13e-02, grad_scale: 32.0 +2024-07-27 16:24:05,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=37553.333333333336, ans=0.2 +2024-07-27 16:24:09,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=37566.666666666664, ans=0.125 +2024-07-27 16:24:13,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=37566.666666666664, ans=0.0 +2024-07-27 16:24:18,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=37580.0, ans=0.125 +2024-07-27 16:24:22,436 INFO [train.py:1114] (3/4) Epoch 3, batch 7750, loss[loss=0.3202, simple_loss=0.3898, pruned_loss=0.1253, over 4927.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3519, pruned_loss=0.1074, over 935897.20 frames. ], batch size: 14, lr: 2.13e-02, grad_scale: 32.0 +2024-07-27 16:24:46,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=37606.666666666664, ans=0.0 +2024-07-27 16:24:49,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=37606.666666666664, ans=0.125 +2024-07-27 16:24:52,946 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.568e+01 6.584e+01 7.302e+01 8.614e+01 1.487e+02, threshold=1.460e+02, percent-clipped=0.0 +2024-07-27 16:25:08,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.54 vs. limit=15.0 +2024-07-27 16:25:13,156 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.14 vs. limit=15.0 +2024-07-27 16:25:18,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=37646.666666666664, ans=0.125 +2024-07-27 16:25:24,451 INFO [train.py:1114] (3/4) Epoch 3, batch 7800, loss[loss=0.274, simple_loss=0.3414, pruned_loss=0.1033, over 4672.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.3508, pruned_loss=0.1064, over 937403.64 frames. ], batch size: 14, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:25:26,779 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.14 vs. limit=10.0 +2024-07-27 16:25:46,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=37686.666666666664, ans=0.025 +2024-07-27 16:25:46,754 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.59 vs. limit=15.0 +2024-07-27 16:25:58,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=37713.333333333336, ans=0.125 +2024-07-27 16:26:03,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=37726.666666666664, ans=0.2 +2024-07-27 16:26:06,467 INFO [train.py:1114] (3/4) Epoch 3, batch 7850, loss[loss=0.3156, simple_loss=0.3628, pruned_loss=0.1342, over 4538.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3505, pruned_loss=0.1066, over 936107.31 frames. ], batch size: 10, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:26:07,570 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.33 vs. limit=22.5 +2024-07-27 16:26:08,891 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.22 vs. limit=6.0 +2024-07-27 16:26:19,923 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.341e+01 6.634e+01 7.796e+01 9.040e+01 1.354e+02, threshold=1.559e+02, percent-clipped=0.0 +2024-07-27 16:26:38,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=37766.666666666664, ans=0.002659420289855073 +2024-07-27 16:26:44,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=37780.0, ans=0.125 +2024-07-27 16:26:51,028 INFO [train.py:1114] (3/4) Epoch 3, batch 7900, loss[loss=0.3099, simple_loss=0.3806, pruned_loss=0.1196, over 4873.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.3537, pruned_loss=0.1085, over 932947.13 frames. ], batch size: 14, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:27:26,011 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.86 vs. limit=6.0 +2024-07-27 16:27:30,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=37833.333333333336, ans=0.125 +2024-07-27 16:27:40,321 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=17.21 vs. limit=15.0 +2024-07-27 16:27:47,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=37846.666666666664, ans=0.2 +2024-07-27 16:27:56,955 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=12.93 vs. limit=15.0 +2024-07-27 16:27:58,712 INFO [train.py:1114] (3/4) Epoch 3, batch 7950, loss[loss=0.4056, simple_loss=0.4322, pruned_loss=0.1895, over 3335.00 frames. ], tot_loss[loss=0.284, simple_loss=0.3526, pruned_loss=0.1077, over 935107.57 frames. ], batch size: 35, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:28:04,139 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.43 vs. limit=22.5 +2024-07-27 16:28:14,334 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.890e+01 6.860e+01 7.518e+01 9.206e+01 1.306e+02, threshold=1.504e+02, percent-clipped=0.0 +2024-07-27 16:28:51,962 INFO [train.py:1114] (3/4) Epoch 3, batch 8000, loss[loss=0.2584, simple_loss=0.3107, pruned_loss=0.103, over 4624.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.3517, pruned_loss=0.1076, over 934783.76 frames. ], batch size: 11, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:28:52,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=37926.666666666664, ans=0.1 +2024-07-27 16:28:58,204 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.67 vs. limit=22.5 +2024-07-27 16:29:32,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=37940.0, ans=0.05 +2024-07-27 16:29:46,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=37953.333333333336, ans=0.07 +2024-07-27 16:29:49,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=37966.666666666664, ans=0.2 +2024-07-27 16:30:04,907 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.35 vs. limit=10.0 +2024-07-27 16:30:06,823 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.05 vs. limit=12.0 +2024-07-27 16:30:15,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=37980.0, ans=0.125 +2024-07-27 16:30:39,454 INFO [train.py:1114] (3/4) Epoch 3, batch 8050, loss[loss=0.3114, simple_loss=0.376, pruned_loss=0.1234, over 4805.00 frames. ], tot_loss[loss=0.2832, simple_loss=0.3518, pruned_loss=0.1073, over 934306.75 frames. ], batch size: 14, lr: 2.12e-02, grad_scale: 32.0 +2024-07-27 16:30:46,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=37993.333333333336, ans=0.125 +2024-07-27 16:30:51,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=38006.666666666664, ans=0.0 +2024-07-27 16:30:55,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38006.666666666664, ans=0.1 +2024-07-27 16:31:03,283 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.744e+01 7.005e+01 8.059e+01 9.966e+01 1.848e+02, threshold=1.612e+02, percent-clipped=3.0 +2024-07-27 16:31:40,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=38020.0, ans=0.04949747468305833 +2024-07-27 16:32:24,480 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.91 vs. limit=15.0 +2024-07-27 16:32:37,832 INFO [train.py:1114] (3/4) Epoch 3, batch 8100, loss[loss=0.3147, simple_loss=0.3818, pruned_loss=0.1238, over 4802.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3516, pruned_loss=0.1071, over 933560.09 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 32.0 +2024-07-27 16:32:38,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=38060.0, ans=0.125 +2024-07-27 16:32:54,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=38060.0, ans=0.07 +2024-07-27 16:33:03,963 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.71 vs. limit=15.0 +2024-07-27 16:33:03,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.97 vs. limit=15.0 +2024-07-27 16:33:09,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=38073.333333333336, ans=0.125 +2024-07-27 16:33:10,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=38073.333333333336, ans=0.002592753623188406 +2024-07-27 16:33:10,807 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.67 vs. limit=15.0 +2024-07-27 16:33:11,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.11 vs. limit=15.0 +2024-07-27 16:33:13,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=38073.333333333336, ans=0.2 +2024-07-27 16:33:29,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=38100.0, ans=0.125 +2024-07-27 16:33:32,949 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.45 vs. limit=15.0 +2024-07-27 16:33:36,422 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.92 vs. limit=10.0 +2024-07-27 16:33:47,672 INFO [train.py:1114] (3/4) Epoch 3, batch 8150, loss[loss=0.2466, simple_loss=0.3433, pruned_loss=0.07495, over 4789.00 frames. ], tot_loss[loss=0.2833, simple_loss=0.3516, pruned_loss=0.1075, over 937154.63 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 32.0 +2024-07-27 16:33:48,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=38126.666666666664, ans=0.125 +2024-07-27 16:34:11,124 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.08 vs. limit=12.0 +2024-07-27 16:34:17,690 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.48 vs. limit=15.0 +2024-07-27 16:34:19,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=38140.0, ans=0.5 +2024-07-27 16:34:19,631 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.808e+01 6.778e+01 7.869e+01 9.669e+01 1.901e+02, threshold=1.574e+02, percent-clipped=1.0 +2024-07-27 16:34:25,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38153.333333333336, ans=0.1 +2024-07-27 16:34:38,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=38166.666666666664, ans=0.5 +2024-07-27 16:34:39,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=38166.666666666664, ans=0.002572463768115943 +2024-07-27 16:34:49,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=38180.0, ans=0.125 +2024-07-27 16:34:53,681 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.72 vs. limit=5.0 +2024-07-27 16:34:54,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=38180.0, ans=0.125 +2024-07-27 16:34:56,477 INFO [train.py:1114] (3/4) Epoch 3, batch 8200, loss[loss=0.3728, simple_loss=0.417, pruned_loss=0.1643, over 4793.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.351, pruned_loss=0.107, over 938361.48 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 32.0 +2024-07-27 16:35:25,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=38233.333333333336, ans=0.125 +2024-07-27 16:35:26,544 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=38233.333333333336, ans=0.2 +2024-07-27 16:35:27,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=38233.333333333336, ans=0.125 +2024-07-27 16:35:34,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=38233.333333333336, ans=0.2 +2024-07-27 16:35:45,647 INFO [train.py:1114] (3/4) Epoch 3, batch 8250, loss[loss=0.2972, simple_loss=0.3545, pruned_loss=0.1199, over 4897.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3517, pruned_loss=0.1071, over 938473.38 frames. ], batch size: 13, lr: 2.11e-02, grad_scale: 16.0 +2024-07-27 16:35:50,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=38260.0, ans=0.0 +2024-07-27 16:35:58,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=38273.333333333336, ans=0.0 +2024-07-27 16:36:04,215 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.365e+01 6.777e+01 7.463e+01 9.374e+01 1.482e+02, threshold=1.493e+02, percent-clipped=0.0 +2024-07-27 16:36:04,561 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.27 vs. limit=22.5 +2024-07-27 16:36:14,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=38300.0, ans=0.125 +2024-07-27 16:36:24,688 INFO [train.py:1114] (3/4) Epoch 3, batch 8300, loss[loss=0.275, simple_loss=0.37, pruned_loss=0.08995, over 4905.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.351, pruned_loss=0.1059, over 938179.63 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 16.0 +2024-07-27 16:36:27,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=38326.666666666664, ans=0.0025376811594202903 +2024-07-27 16:36:27,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=38326.666666666664, ans=0.2 +2024-07-27 16:36:30,551 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.81 vs. limit=6.0 +2024-07-27 16:36:43,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=38353.333333333336, ans=0.1 +2024-07-27 16:36:46,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=38366.666666666664, ans=0.125 +2024-07-27 16:36:49,135 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.85 vs. limit=10.0 +2024-07-27 16:36:50,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=38380.0, ans=0.0 +2024-07-27 16:36:53,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=38380.0, ans=0.125 +2024-07-27 16:36:56,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=38380.0, ans=0.125 +2024-07-27 16:37:01,102 INFO [train.py:1114] (3/4) Epoch 3, batch 8350, loss[loss=0.2911, simple_loss=0.3657, pruned_loss=0.1082, over 4794.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3508, pruned_loss=0.106, over 941378.77 frames. ], batch size: 15, lr: 2.11e-02, grad_scale: 16.0 +2024-07-27 16:37:01,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=38393.333333333336, ans=0.125 +2024-07-27 16:37:01,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=38393.333333333336, ans=0.125 +2024-07-27 16:38:23,648 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.458e+01 6.838e+01 7.813e+01 8.986e+01 1.214e+02, threshold=1.563e+02, percent-clipped=0.0 +2024-07-27 16:38:47,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=38433.333333333336, ans=0.2 +2024-07-27 16:39:00,199 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.64 vs. limit=15.0 +2024-07-27 16:39:01,315 INFO [train.py:1114] (3/4) Epoch 3, batch 8400, loss[loss=0.2353, simple_loss=0.3088, pruned_loss=0.08093, over 4785.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3502, pruned_loss=0.1058, over 940259.07 frames. ], batch size: 12, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:39:11,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=38473.333333333336, ans=0.95 +2024-07-27 16:39:15,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=38473.333333333336, ans=0.125 +2024-07-27 16:39:18,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=38486.666666666664, ans=0.0 +2024-07-27 16:39:18,677 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.22 vs. limit=15.0 +2024-07-27 16:39:33,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=38513.333333333336, ans=0.1 +2024-07-27 16:39:35,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=38513.333333333336, ans=0.0 +2024-07-27 16:39:46,495 INFO [train.py:1114] (3/4) Epoch 3, batch 8450, loss[loss=0.2627, simple_loss=0.3412, pruned_loss=0.0921, over 4812.00 frames. ], tot_loss[loss=0.281, simple_loss=0.3503, pruned_loss=0.1059, over 939132.02 frames. ], batch size: 15, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:40:00,168 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.33 vs. limit=15.0 +2024-07-27 16:40:01,025 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.12 vs. limit=15.0 +2024-07-27 16:40:03,726 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.374e+01 6.960e+01 7.996e+01 9.204e+01 1.346e+02, threshold=1.599e+02, percent-clipped=0.0 +2024-07-27 16:40:07,902 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.16 vs. limit=22.5 +2024-07-27 16:40:08,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38553.333333333336, ans=0.1 +2024-07-27 16:40:10,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=38566.666666666664, ans=0.0 +2024-07-27 16:40:10,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=38566.666666666664, ans=0.125 +2024-07-27 16:40:10,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=38566.666666666664, ans=0.1 +2024-07-27 16:40:21,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=38580.0, ans=0.125 +2024-07-27 16:40:34,577 INFO [train.py:1114] (3/4) Epoch 3, batch 8500, loss[loss=0.2223, simple_loss=0.2939, pruned_loss=0.0754, over 4610.00 frames. ], tot_loss[loss=0.2804, simple_loss=0.3497, pruned_loss=0.1056, over 938797.26 frames. ], batch size: 11, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:40:36,904 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.29 vs. limit=12.0 +2024-07-27 16:40:50,567 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.84 vs. limit=5.0 +2024-07-27 16:40:51,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38620.0, ans=0.1 +2024-07-27 16:40:54,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=38620.0, ans=0.05 +2024-07-27 16:41:22,945 INFO [train.py:1114] (3/4) Epoch 3, batch 8550, loss[loss=0.2253, simple_loss=0.293, pruned_loss=0.07879, over 4805.00 frames. ], tot_loss[loss=0.2805, simple_loss=0.3493, pruned_loss=0.1058, over 939543.33 frames. ], batch size: 11, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:41:25,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=38660.0, ans=0.07 +2024-07-27 16:41:59,820 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.027e+01 6.868e+01 7.768e+01 9.567e+01 1.448e+02, threshold=1.554e+02, percent-clipped=0.0 +2024-07-27 16:42:04,575 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=2.94 vs. limit=12.0 +2024-07-27 16:42:10,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=38700.0, ans=0.2 +2024-07-27 16:42:40,833 INFO [train.py:1114] (3/4) Epoch 3, batch 8600, loss[loss=0.25, simple_loss=0.3381, pruned_loss=0.08097, over 4808.00 frames. ], tot_loss[loss=0.2801, simple_loss=0.3487, pruned_loss=0.1057, over 939492.48 frames. ], batch size: 15, lr: 2.10e-02, grad_scale: 32.0 +2024-07-27 16:42:41,216 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.53 vs. limit=15.0 +2024-07-27 16:42:46,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=38726.666666666664, ans=0.125 +2024-07-27 16:42:50,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=38740.0, ans=0.125 +2024-07-27 16:43:01,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=38766.666666666664, ans=0.0024420289855072476 +2024-07-27 16:43:01,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=38766.666666666664, ans=0.125 +2024-07-27 16:43:13,336 INFO [train.py:1114] (3/4) Epoch 3, batch 8650, loss[loss=0.2607, simple_loss=0.3456, pruned_loss=0.08791, over 4917.00 frames. ], tot_loss[loss=0.2788, simple_loss=0.3478, pruned_loss=0.1049, over 940726.75 frames. ], batch size: 15, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:44:00,158 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.000e+01 6.909e+01 7.732e+01 9.254e+01 1.585e+02, threshold=1.546e+02, percent-clipped=1.0 +2024-07-27 16:44:27,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=38860.0, ans=0.125 +2024-07-27 16:44:27,632 INFO [train.py:1114] (3/4) Epoch 3, batch 8700, loss[loss=0.3484, simple_loss=0.3948, pruned_loss=0.151, over 4763.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.3491, pruned_loss=0.1058, over 937343.55 frames. ], batch size: 13, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:44:29,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=38860.0, ans=0.1 +2024-07-27 16:44:31,333 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.05 vs. limit=6.0 +2024-07-27 16:44:42,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=38886.666666666664, ans=0.125 +2024-07-27 16:44:42,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=38886.666666666664, ans=0.05 +2024-07-27 16:45:01,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=38886.666666666664, ans=0.125 +2024-07-27 16:45:15,414 INFO [train.py:1114] (3/4) Epoch 3, batch 8750, loss[loss=0.2989, simple_loss=0.3563, pruned_loss=0.1208, over 4667.00 frames. ], tot_loss[loss=0.2787, simple_loss=0.3475, pruned_loss=0.105, over 936100.80 frames. ], batch size: 15, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:45:22,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=38940.0, ans=0.0 +2024-07-27 16:45:28,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=38940.0, ans=0.002404347826086956 +2024-07-27 16:45:35,869 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.390e+01 6.721e+01 7.628e+01 9.057e+01 1.548e+02, threshold=1.526e+02, percent-clipped=1.0 +2024-07-27 16:45:39,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=38953.333333333336, ans=0.09899494936611666 +2024-07-27 16:45:39,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=38953.333333333336, ans=0.002401449275362319 +2024-07-27 16:45:55,278 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.89 vs. limit=15.0 +2024-07-27 16:45:57,591 INFO [train.py:1114] (3/4) Epoch 3, batch 8800, loss[loss=0.2777, simple_loss=0.3535, pruned_loss=0.101, over 4934.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3475, pruned_loss=0.1053, over 937189.69 frames. ], batch size: 14, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:46:08,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=38993.333333333336, ans=0.2 +2024-07-27 16:46:36,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=39006.666666666664, ans=0.0023898550724637682 +2024-07-27 16:46:41,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=39006.666666666664, ans=0.015 +2024-07-27 16:46:44,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.34 vs. limit=22.5 +2024-07-27 16:46:44,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=39020.0, ans=0.002386956521739131 +2024-07-27 16:46:45,072 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.52 vs. limit=15.0 +2024-07-27 16:46:46,313 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.98 vs. limit=15.0 +2024-07-27 16:47:18,772 INFO [train.py:1114] (3/4) Epoch 3, batch 8850, loss[loss=0.2855, simple_loss=0.3487, pruned_loss=0.1111, over 4383.00 frames. ], tot_loss[loss=0.2792, simple_loss=0.3474, pruned_loss=0.1055, over 931446.78 frames. ], batch size: 21, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:47:18,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=39060.0, ans=0.125 +2024-07-27 16:47:24,095 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=5.234e-03 +2024-07-27 16:47:27,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=39073.333333333336, ans=0.1 +2024-07-27 16:47:30,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=39073.333333333336, ans=0.2 +2024-07-27 16:47:32,060 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.718e+01 6.978e+01 8.333e+01 9.846e+01 2.201e+02, threshold=1.667e+02, percent-clipped=2.0 +2024-07-27 16:48:02,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=39086.666666666664, ans=0.2 +2024-07-27 16:48:08,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=39100.0, ans=0.0 +2024-07-27 16:48:09,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=39100.0, ans=0.0 +2024-07-27 16:48:16,929 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.12 vs. limit=15.0 +2024-07-27 16:48:17,151 INFO [train.py:1114] (3/4) Epoch 3, batch 8900, loss[loss=0.2538, simple_loss=0.3185, pruned_loss=0.09459, over 4944.00 frames. ], tot_loss[loss=0.2804, simple_loss=0.3485, pruned_loss=0.1061, over 929716.76 frames. ], batch size: 12, lr: 2.09e-02, grad_scale: 32.0 +2024-07-27 16:48:17,545 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.57 vs. limit=15.0 +2024-07-27 16:48:24,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39140.0, ans=0.1 +2024-07-27 16:48:34,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=39153.333333333336, ans=0.0 +2024-07-27 16:48:39,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=39166.666666666664, ans=0.1 +2024-07-27 16:48:55,920 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.70 vs. limit=22.5 +2024-07-27 16:48:56,368 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=18.45 vs. limit=15.0 +2024-07-27 16:49:00,625 INFO [train.py:1114] (3/4) Epoch 3, batch 8950, loss[loss=0.2432, simple_loss=0.3314, pruned_loss=0.07747, over 4504.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.3491, pruned_loss=0.1058, over 930594.89 frames. ], batch size: 21, lr: 2.08e-02, grad_scale: 32.0 +2024-07-27 16:49:00,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39193.333333333336, ans=0.1 +2024-07-27 16:49:06,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=39193.333333333336, ans=0.125 +2024-07-27 16:49:15,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=39206.666666666664, ans=0.0 +2024-07-27 16:49:23,808 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.972e+01 6.809e+01 7.609e+01 8.972e+01 1.358e+02, threshold=1.522e+02, percent-clipped=0.0 +2024-07-27 16:49:29,590 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:49:41,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=39233.333333333336, ans=0.125 +2024-07-27 16:49:51,382 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.64 vs. limit=22.5 +2024-07-27 16:49:51,640 INFO [train.py:1114] (3/4) Epoch 3, batch 9000, loss[loss=0.248, simple_loss=0.3128, pruned_loss=0.09155, over 4653.00 frames. ], tot_loss[loss=0.2797, simple_loss=0.3486, pruned_loss=0.1054, over 933655.98 frames. ], batch size: 12, lr: 2.08e-02, grad_scale: 32.0 +2024-07-27 16:49:51,640 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 16:50:05,982 INFO [train.py:1146] (3/4) Epoch 3, validation: loss=0.2254, simple_loss=0.3252, pruned_loss=0.06281, over 944034.00 frames. +2024-07-27 16:50:05,982 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 16:50:06,997 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.49 vs. limit=22.5 +2024-07-27 16:50:08,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=39260.0, ans=15.0 +2024-07-27 16:50:18,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=39273.333333333336, ans=0.125 +2024-07-27 16:50:19,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=39273.333333333336, ans=0.125 +2024-07-27 16:50:38,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=39300.0, ans=0.125 +2024-07-27 16:50:39,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=39300.0, ans=0.0 +2024-07-27 16:50:46,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=39313.333333333336, ans=0.125 +2024-07-27 16:50:49,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=39313.333333333336, ans=0.0 +2024-07-27 16:50:52,321 INFO [train.py:1114] (3/4) Epoch 3, batch 9050, loss[loss=0.2244, simple_loss=0.2952, pruned_loss=0.07683, over 4516.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3464, pruned_loss=0.1042, over 934539.36 frames. ], batch size: 10, lr: 2.08e-02, grad_scale: 32.0 +2024-07-27 16:51:04,888 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.552e+01 6.807e+01 7.856e+01 8.861e+01 3.440e+02, threshold=1.571e+02, percent-clipped=1.0 +2024-07-27 16:51:09,548 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:51:11,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=39366.666666666664, ans=0.125 +2024-07-27 16:51:12,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=39366.666666666664, ans=0.05 +2024-07-27 16:51:45,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=39380.0, ans=0.125 +2024-07-27 16:51:46,095 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.45 vs. limit=15.0 +2024-07-27 16:52:03,151 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:52:04,334 INFO [train.py:1114] (3/4) Epoch 3, batch 9100, loss[loss=0.3098, simple_loss=0.387, pruned_loss=0.1163, over 4931.00 frames. ], tot_loss[loss=0.2781, simple_loss=0.3475, pruned_loss=0.1044, over 937489.33 frames. ], batch size: 14, lr: 2.08e-02, grad_scale: 16.0 +2024-07-27 16:52:14,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=39393.333333333336, ans=0.2 +2024-07-27 16:52:14,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=39393.333333333336, ans=0.125 +2024-07-27 16:52:37,107 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.60 vs. limit=22.5 +2024-07-27 16:52:40,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.03 vs. limit=12.0 +2024-07-27 16:52:46,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=39446.666666666664, ans=0.1 +2024-07-27 16:53:04,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=39446.666666666664, ans=0.125 +2024-07-27 16:53:06,169 INFO [train.py:1114] (3/4) Epoch 3, batch 9150, loss[loss=0.2927, simple_loss=0.3543, pruned_loss=0.1155, over 4804.00 frames. ], tot_loss[loss=0.2805, simple_loss=0.3497, pruned_loss=0.1056, over 936104.79 frames. ], batch size: 14, lr: 2.08e-02, grad_scale: 16.0 +2024-07-27 16:53:30,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=39473.333333333336, ans=0.0 +2024-07-27 16:53:32,900 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.20 vs. limit=15.0 +2024-07-27 16:53:33,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=39473.333333333336, ans=0.0 +2024-07-27 16:53:38,852 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.526e+01 6.919e+01 8.427e+01 9.572e+01 1.552e+02, threshold=1.685e+02, percent-clipped=0.0 +2024-07-27 16:53:39,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=39486.666666666664, ans=0.125 +2024-07-27 16:54:03,875 INFO [train.py:1114] (3/4) Epoch 3, batch 9200, loss[loss=0.3206, simple_loss=0.3698, pruned_loss=0.1357, over 4845.00 frames. ], tot_loss[loss=0.2794, simple_loss=0.3483, pruned_loss=0.1052, over 937888.93 frames. ], batch size: 12, lr: 2.08e-02, grad_scale: 32.0 +2024-07-27 16:54:06,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=39526.666666666664, ans=0.0 +2024-07-27 16:54:17,442 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 16:54:25,014 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.07 vs. limit=15.0 +2024-07-27 16:54:31,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=39580.0, ans=0.125 +2024-07-27 16:54:36,156 INFO [train.py:1114] (3/4) Epoch 3, batch 9250, loss[loss=0.2928, simple_loss=0.3701, pruned_loss=0.1078, over 4631.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.3465, pruned_loss=0.1037, over 938536.18 frames. ], batch size: 13, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:54:38,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=39593.333333333336, ans=0.025 +2024-07-27 16:54:39,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39593.333333333336, ans=0.1 +2024-07-27 16:54:41,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=39593.333333333336, ans=0.0 +2024-07-27 16:54:44,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=39606.666666666664, ans=0.0 +2024-07-27 16:54:49,606 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.05 vs. limit=6.0 +2024-07-27 16:54:49,930 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.796e+01 6.391e+01 6.941e+01 8.054e+01 1.289e+02, threshold=1.388e+02, percent-clipped=0.0 +2024-07-27 16:54:55,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=39620.0, ans=0.125 +2024-07-27 16:54:55,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=39620.0, ans=0.125 +2024-07-27 16:55:10,139 INFO [train.py:1114] (3/4) Epoch 3, batch 9300, loss[loss=0.278, simple_loss=0.3479, pruned_loss=0.104, over 4778.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.3463, pruned_loss=0.1037, over 938263.94 frames. ], batch size: 12, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:55:10,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=39660.0, ans=0.0 +2024-07-27 16:55:16,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=39673.333333333336, ans=0.125 +2024-07-27 16:55:18,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=39673.333333333336, ans=0.0 +2024-07-27 16:55:22,721 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.13 vs. limit=15.0 +2024-07-27 16:56:03,122 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.33 vs. limit=15.0 +2024-07-27 16:56:05,258 INFO [train.py:1114] (3/4) Epoch 3, batch 9350, loss[loss=0.2543, simple_loss=0.314, pruned_loss=0.09726, over 4818.00 frames. ], tot_loss[loss=0.2781, simple_loss=0.3477, pruned_loss=0.1042, over 935400.26 frames. ], batch size: 11, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:56:10,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=39726.666666666664, ans=0.2 +2024-07-27 16:56:12,885 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.78 vs. limit=15.0 +2024-07-27 16:56:14,063 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.80 vs. limit=12.0 +2024-07-27 16:56:20,302 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.575e+01 6.744e+01 7.337e+01 8.957e+01 1.228e+02, threshold=1.467e+02, percent-clipped=0.0 +2024-07-27 16:56:23,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=39753.333333333336, ans=0.0022275362318840575 +2024-07-27 16:56:23,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=39753.333333333336, ans=0.0022275362318840575 +2024-07-27 16:56:27,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=39766.666666666664, ans=0.125 +2024-07-27 16:56:32,769 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.48 vs. limit=15.0 +2024-07-27 16:56:37,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=39780.0, ans=0.1 +2024-07-27 16:56:39,563 INFO [train.py:1114] (3/4) Epoch 3, batch 9400, loss[loss=0.2481, simple_loss=0.3253, pruned_loss=0.08547, over 4693.00 frames. ], tot_loss[loss=0.279, simple_loss=0.3483, pruned_loss=0.1048, over 933236.75 frames. ], batch size: 13, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:56:41,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=39793.333333333336, ans=0.125 +2024-07-27 16:56:46,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39806.666666666664, ans=0.1 +2024-07-27 16:56:46,829 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.15 vs. limit=22.5 +2024-07-27 16:56:54,623 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.36 vs. limit=6.0 +2024-07-27 16:56:55,273 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.62 vs. limit=15.0 +2024-07-27 16:57:04,108 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.50 vs. limit=15.0 +2024-07-27 16:57:11,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=39860.0, ans=0.125 +2024-07-27 16:57:11,998 INFO [train.py:1114] (3/4) Epoch 3, batch 9450, loss[loss=0.2526, simple_loss=0.3189, pruned_loss=0.09319, over 4807.00 frames. ], tot_loss[loss=0.2795, simple_loss=0.3487, pruned_loss=0.1051, over 932812.92 frames. ], batch size: 11, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:57:22,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=39873.333333333336, ans=0.125 +2024-07-27 16:57:25,848 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.577e+01 6.734e+01 7.503e+01 8.983e+01 1.272e+02, threshold=1.501e+02, percent-clipped=0.0 +2024-07-27 16:57:32,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39886.666666666664, ans=0.1 +2024-07-27 16:57:38,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.84 vs. limit=10.0 +2024-07-27 16:57:39,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=39900.0, ans=0.2 +2024-07-27 16:57:46,046 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.92 vs. limit=15.0 +2024-07-27 16:57:46,765 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.47 vs. limit=5.0 +2024-07-27 16:58:03,384 INFO [train.py:1114] (3/4) Epoch 3, batch 9500, loss[loss=0.2978, simple_loss=0.357, pruned_loss=0.1193, over 4721.00 frames. ], tot_loss[loss=0.2781, simple_loss=0.3482, pruned_loss=0.104, over 934800.11 frames. ], batch size: 12, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:58:19,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=39926.666666666664, ans=0.1 +2024-07-27 16:58:23,446 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.41 vs. limit=10.0 +2024-07-27 16:58:23,468 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.37 vs. limit=12.0 +2024-07-27 16:59:09,465 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.06 vs. limit=6.0 +2024-07-27 16:59:12,966 INFO [train.py:1114] (3/4) Epoch 3, batch 9550, loss[loss=0.2567, simple_loss=0.3233, pruned_loss=0.09502, over 4773.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.3494, pruned_loss=0.1052, over 931703.77 frames. ], batch size: 12, lr: 2.07e-02, grad_scale: 32.0 +2024-07-27 16:59:14,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=39993.333333333336, ans=0.125 +2024-07-27 16:59:24,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40006.666666666664, ans=0.1 +2024-07-27 16:59:26,039 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.187e+01 6.649e+01 7.565e+01 8.321e+01 1.560e+02, threshold=1.513e+02, percent-clipped=2.0 +2024-07-27 16:59:32,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=40033.333333333336, ans=0.125 +2024-07-27 16:59:35,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=40033.333333333336, ans=0.125 +2024-07-27 16:59:41,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=40046.666666666664, ans=0.125 +2024-07-27 16:59:44,504 INFO [train.py:1114] (3/4) Epoch 3, batch 9600, loss[loss=0.3701, simple_loss=0.4023, pruned_loss=0.1689, over 3552.00 frames. ], tot_loss[loss=0.2793, simple_loss=0.349, pruned_loss=0.1048, over 930989.64 frames. ], batch size: 35, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 16:59:51,657 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.89 vs. limit=12.0 +2024-07-27 17:00:24,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=40113.333333333336, ans=0.125 +2024-07-27 17:00:26,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=40113.333333333336, ans=0.125 +2024-07-27 17:00:26,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=40113.333333333336, ans=0.002149275362318839 +2024-07-27 17:00:30,247 INFO [train.py:1114] (3/4) Epoch 3, batch 9650, loss[loss=0.3, simple_loss=0.3727, pruned_loss=0.1137, over 4837.00 frames. ], tot_loss[loss=0.2796, simple_loss=0.3488, pruned_loss=0.1052, over 926812.99 frames. ], batch size: 16, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:00:31,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40126.666666666664, ans=0.1 +2024-07-27 17:00:34,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=40126.666666666664, ans=0.125 +2024-07-27 17:00:35,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=40126.666666666664, ans=0.0 +2024-07-27 17:00:44,535 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.272e+01 6.641e+01 7.549e+01 8.923e+01 1.361e+02, threshold=1.510e+02, percent-clipped=0.0 +2024-07-27 17:00:45,499 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:00:48,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=40153.333333333336, ans=0.125 +2024-07-27 17:00:55,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=40166.666666666664, ans=0.125 +2024-07-27 17:00:58,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40180.0, ans=0.1 +2024-07-27 17:01:04,421 INFO [train.py:1114] (3/4) Epoch 3, batch 9700, loss[loss=0.3584, simple_loss=0.4175, pruned_loss=0.1496, over 4137.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3497, pruned_loss=0.1059, over 924803.41 frames. ], batch size: 25, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:01:05,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=40193.333333333336, ans=0.025 +2024-07-27 17:01:32,627 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=9.51 vs. limit=15.0 +2024-07-27 17:01:34,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=40246.666666666664, ans=0.0 +2024-07-27 17:01:39,654 INFO [train.py:1114] (3/4) Epoch 3, batch 9750, loss[loss=0.2902, simple_loss=0.352, pruned_loss=0.1142, over 4682.00 frames. ], tot_loss[loss=0.2822, simple_loss=0.3506, pruned_loss=0.1069, over 925358.79 frames. ], batch size: 15, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:01:43,334 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.69 vs. limit=6.0 +2024-07-27 17:01:44,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=40260.0, ans=0.125 +2024-07-27 17:01:53,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=40286.666666666664, ans=0.125 +2024-07-27 17:01:53,660 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.325e+01 6.567e+01 7.224e+01 8.540e+01 1.142e+02, threshold=1.445e+02, percent-clipped=0.0 +2024-07-27 17:01:53,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=40286.666666666664, ans=0.125 +2024-07-27 17:01:54,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=40286.666666666664, ans=0.1 +2024-07-27 17:02:01,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40300.0, ans=0.1 +2024-07-27 17:02:02,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.13 vs. limit=15.0 +2024-07-27 17:02:12,030 INFO [train.py:1114] (3/4) Epoch 3, batch 9800, loss[loss=0.2409, simple_loss=0.3199, pruned_loss=0.08088, over 4700.00 frames. ], tot_loss[loss=0.2805, simple_loss=0.3488, pruned_loss=0.1061, over 924739.01 frames. ], batch size: 12, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:02:13,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=40326.666666666664, ans=0.125 +2024-07-27 17:02:40,812 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.30 vs. limit=22.5 +2024-07-27 17:02:43,047 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.05 vs. limit=15.0 +2024-07-27 17:02:45,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=40380.0, ans=0.125 +2024-07-27 17:02:45,600 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=18.03 vs. limit=15.0 +2024-07-27 17:02:50,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=40380.0, ans=0.5 +2024-07-27 17:02:51,238 INFO [train.py:1114] (3/4) Epoch 3, batch 9850, loss[loss=0.2804, simple_loss=0.3626, pruned_loss=0.09908, over 4911.00 frames. ], tot_loss[loss=0.2802, simple_loss=0.3486, pruned_loss=0.1058, over 927255.44 frames. ], batch size: 15, lr: 2.06e-02, grad_scale: 32.0 +2024-07-27 17:02:56,813 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:03:09,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=40406.666666666664, ans=0.2 +2024-07-27 17:03:10,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=40406.666666666664, ans=0.0 +2024-07-27 17:03:11,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=40406.666666666664, ans=0.0 +2024-07-27 17:03:15,226 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.438e+01 6.644e+01 7.357e+01 1.003e+02 1.564e+02, threshold=1.471e+02, percent-clipped=2.0 +2024-07-27 17:03:15,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=40420.0, ans=0.125 +2024-07-27 17:03:20,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.78 vs. limit=15.0 +2024-07-27 17:03:29,692 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=7.20 vs. limit=15.0 +2024-07-27 17:03:33,889 INFO [train.py:1114] (3/4) Epoch 3, batch 9900, loss[loss=0.316, simple_loss=0.3864, pruned_loss=0.1228, over 4842.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3496, pruned_loss=0.1064, over 926094.43 frames. ], batch size: 16, lr: 2.05e-02, grad_scale: 32.0 +2024-07-27 17:03:37,873 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.84 vs. limit=22.5 +2024-07-27 17:03:47,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=40473.333333333336, ans=0.002071014492753624 +2024-07-27 17:03:56,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=40473.333333333336, ans=0.125 +2024-07-27 17:04:13,566 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.63 vs. limit=15.0 +2024-07-27 17:04:18,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=40513.333333333336, ans=0.125 +2024-07-27 17:04:22,286 INFO [train.py:1114] (3/4) Epoch 3, batch 9950, loss[loss=0.2642, simple_loss=0.3464, pruned_loss=0.09097, over 4519.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3497, pruned_loss=0.1065, over 928579.64 frames. ], batch size: 10, lr: 2.05e-02, grad_scale: 32.0 +2024-07-27 17:04:33,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=40540.0, ans=0.125 +2024-07-27 17:04:42,172 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.432e+01 7.065e+01 7.952e+01 9.840e+01 1.527e+02, threshold=1.590e+02, percent-clipped=1.0 +2024-07-27 17:04:42,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=40553.333333333336, ans=0.125 +2024-07-27 17:04:43,699 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.81 vs. limit=12.0 +2024-07-27 17:04:44,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=40553.333333333336, ans=0.002053623188405797 +2024-07-27 17:04:45,610 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.84 vs. limit=22.5 +2024-07-27 17:04:45,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=40553.333333333336, ans=0.002053623188405797 +2024-07-27 17:04:56,031 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.14 vs. limit=15.0 +2024-07-27 17:05:00,004 INFO [train.py:1114] (3/4) Epoch 3, batch 10000, loss[loss=0.2821, simple_loss=0.3526, pruned_loss=0.1058, over 4638.00 frames. ], tot_loss[loss=0.2832, simple_loss=0.3522, pruned_loss=0.1071, over 926350.07 frames. ], batch size: 16, lr: 2.05e-02, grad_scale: 32.0 +2024-07-27 17:05:05,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=40593.333333333336, ans=0.025 +2024-07-27 17:05:19,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=40633.333333333336, ans=0.0020362318840579704 +2024-07-27 17:05:32,232 INFO [train.py:1114] (3/4) Epoch 3, batch 10050, loss[loss=0.3577, simple_loss=0.4039, pruned_loss=0.1558, over 3405.00 frames. ], tot_loss[loss=0.287, simple_loss=0.3557, pruned_loss=0.1091, over 914531.96 frames. ], batch size: 35, lr: 2.05e-02, grad_scale: 32.0 +2024-07-27 17:05:33,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=40660.0, ans=0.2 +2024-07-27 17:05:36,848 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=13.35 vs. limit=15.0 +2024-07-27 17:05:46,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=40686.666666666664, ans=10.0 +2024-07-27 17:05:47,410 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.930e+01 7.073e+01 7.900e+01 8.546e+01 1.194e+02, threshold=1.580e+02, percent-clipped=0.0 +2024-07-27 17:05:49,238 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.68 vs. limit=15.0 +2024-07-27 17:05:59,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=40700.0, ans=0.1 +2024-07-27 17:06:02,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=40713.333333333336, ans=0.125 +2024-07-27 17:06:04,991 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.84 vs. limit=15.0 +2024-07-27 17:06:07,147 INFO [train.py:1114] (3/4) Epoch 3, batch 10100, loss[loss=0.3484, simple_loss=0.3949, pruned_loss=0.151, over 3148.00 frames. ], tot_loss[loss=0.2994, simple_loss=0.3631, pruned_loss=0.1179, over 860418.63 frames. ], batch size: 36, lr: 2.05e-02, grad_scale: 16.0 +2024-07-27 17:06:13,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=40740.0, ans=0.125 +2024-07-27 17:06:14,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=40740.0, ans=0.0020130434782608692 +2024-07-27 17:06:17,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=40740.0, ans=0.2 +2024-07-27 17:06:18,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=40740.0, ans=0.2 +2024-07-27 17:06:18,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=40740.0, ans=0.0020130434782608692 +2024-07-27 17:06:25,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=40753.333333333336, ans=0.05 +2024-07-27 17:06:26,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=40753.333333333336, ans=0.125 +2024-07-27 17:06:29,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=40766.666666666664, ans=0.125 +2024-07-27 17:06:29,473 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.24 vs. limit=15.0 +2024-07-27 17:06:41,349 INFO [train.py:1114] (3/4) Epoch 3, batch 10150, loss[loss=0.2977, simple_loss=0.3474, pruned_loss=0.124, over 3533.00 frames. ], tot_loss[loss=0.3078, simple_loss=0.3677, pruned_loss=0.124, over 818396.56 frames. ], batch size: 35, lr: 2.05e-02, grad_scale: 16.0 +2024-07-27 17:06:42,413 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.74 vs. limit=15.0 +2024-07-27 17:06:42,997 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.62 vs. limit=6.0 +2024-07-27 17:07:02,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40806.666666666664, ans=0.1 +2024-07-27 17:07:05,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=40806.666666666664, ans=0.2 +2024-07-27 17:07:09,198 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.948e+01 6.999e+01 7.537e+01 8.281e+01 1.738e+02, threshold=1.507e+02, percent-clipped=1.0 +2024-07-27 17:07:09,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=40820.0, ans=0.125 +2024-07-27 17:07:10,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=40820.0, ans=0.2 +2024-07-27 17:07:15,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=40833.333333333336, ans=0.07 +2024-07-27 17:07:24,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=40833.333333333336, ans=0.125 +2024-07-27 17:07:32,450 INFO [train.py:1114] (3/4) Epoch 3, batch 10200, loss[loss=0.3742, simple_loss=0.3904, pruned_loss=0.179, over 3337.00 frames. ], tot_loss[loss=0.316, simple_loss=0.3722, pruned_loss=0.1299, over 787879.16 frames. ], batch size: 35, lr: 2.04e-02, grad_scale: 16.0 +2024-07-27 17:07:35,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.11 vs. limit=15.0 +2024-07-27 17:07:39,126 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.91 vs. limit=15.0 +2024-07-27 17:08:50,764 INFO [train.py:1114] (3/4) Epoch 4, batch 0, loss[loss=0.2385, simple_loss=0.3154, pruned_loss=0.08083, over 4851.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.3154, pruned_loss=0.08083, over 4851.00 frames. ], batch size: 12, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:08:50,764 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 17:09:02,630 INFO [train.py:1146] (3/4) Epoch 4, validation: loss=0.2303, simple_loss=0.3319, pruned_loss=0.06433, over 944034.00 frames. +2024-07-27 17:09:12,332 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 17:09:26,722 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.82 vs. limit=15.0 +2024-07-27 17:09:30,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=40917.333333333336, ans=0.125 +2024-07-27 17:09:33,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40930.666666666664, ans=0.1 +2024-07-27 17:09:38,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=40930.666666666664, ans=0.0019715942028985505 +2024-07-27 17:09:39,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=40930.666666666664, ans=0.025 +2024-07-27 17:09:55,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=40944.0, ans=0.125 +2024-07-27 17:09:56,710 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.176e+01 6.703e+01 7.240e+01 7.919e+01 1.564e+02, threshold=1.448e+02, percent-clipped=1.0 +2024-07-27 17:09:57,433 INFO [train.py:1114] (3/4) Epoch 4, batch 50, loss[loss=0.2924, simple_loss=0.3517, pruned_loss=0.1166, over 4606.00 frames. ], tot_loss[loss=0.2877, simple_loss=0.3565, pruned_loss=0.1095, over 206519.76 frames. ], batch size: 11, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:09:59,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.85 vs. limit=15.0 +2024-07-27 17:10:05,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=40970.666666666664, ans=0.1 +2024-07-27 17:10:09,074 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.71 vs. limit=22.5 +2024-07-27 17:10:11,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=40984.0, ans=0.125 +2024-07-27 17:10:11,724 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:10:24,260 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.40 vs. limit=22.5 +2024-07-27 17:10:25,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=41010.666666666664, ans=0.1 +2024-07-27 17:10:31,383 INFO [train.py:1114] (3/4) Epoch 4, batch 100, loss[loss=0.3051, simple_loss=0.3728, pruned_loss=0.1187, over 4642.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.3524, pruned_loss=0.1055, over 365440.83 frames. ], batch size: 12, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:10:31,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=41024.0, ans=0.125 +2024-07-27 17:10:32,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=41024.0, ans=0.0 +2024-07-27 17:10:34,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=41024.0, ans=0.125 +2024-07-27 17:10:35,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=41024.0, ans=0.0 +2024-07-27 17:10:37,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=41037.333333333336, ans=0.0 +2024-07-27 17:11:01,351 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.90 vs. limit=6.0 +2024-07-27 17:11:04,705 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.318e+01 6.667e+01 8.145e+01 9.581e+01 1.407e+02, threshold=1.629e+02, percent-clipped=0.0 +2024-07-27 17:15:47,225 INFO [train.py:1114] (3/4) Epoch 4, batch 150, loss[loss=0.224, simple_loss=0.295, pruned_loss=0.07646, over 4623.00 frames. ], tot_loss[loss=0.2767, simple_loss=0.3478, pruned_loss=0.1028, over 494018.02 frames. ], batch size: 11, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:16:05,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41090.666666666664, ans=0.1 +2024-07-27 17:16:05,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=41104.0, ans=0.125 +2024-07-27 17:16:06,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=41104.0, ans=0.015 +2024-07-27 17:16:11,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.19 vs. limit=22.5 +2024-07-27 17:16:15,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=41117.333333333336, ans=0.2 +2024-07-27 17:16:18,418 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=8.213e+00 +2024-07-27 17:16:29,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=41144.0, ans=0.125 +2024-07-27 17:16:35,952 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:16:36,409 INFO [train.py:1114] (3/4) Epoch 4, batch 200, loss[loss=0.3185, simple_loss=0.3808, pruned_loss=0.1281, over 4456.00 frames. ], tot_loss[loss=0.2733, simple_loss=0.3439, pruned_loss=0.1013, over 593173.18 frames. ], batch size: 21, lr: 1.91e-02, grad_scale: 32.0 +2024-07-27 17:16:38,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=41157.333333333336, ans=0.125 +2024-07-27 17:16:47,290 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.40 vs. limit=15.0 +2024-07-27 17:16:51,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=41184.0, ans=0.0 +2024-07-27 17:16:53,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=41184.0, ans=0.2 +2024-07-27 17:16:58,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=41197.333333333336, ans=0.1 +2024-07-27 17:17:09,101 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.496e+01 6.403e+01 7.504e+01 8.893e+01 1.315e+02, threshold=1.501e+02, percent-clipped=0.0 +2024-07-27 17:17:09,905 INFO [train.py:1114] (3/4) Epoch 4, batch 250, loss[loss=0.3359, simple_loss=0.3951, pruned_loss=0.1383, over 4617.00 frames. ], tot_loss[loss=0.2751, simple_loss=0.3449, pruned_loss=0.1026, over 669902.51 frames. ], batch size: 16, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:17:16,478 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.54 vs. limit=10.0 +2024-07-27 17:17:18,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=41237.333333333336, ans=0.0 +2024-07-27 17:17:28,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=41250.666666666664, ans=0.125 +2024-07-27 17:17:41,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=41277.333333333336, ans=0.07 +2024-07-27 17:17:43,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=41290.666666666664, ans=0.1 +2024-07-27 17:17:43,113 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.76 vs. limit=15.0 +2024-07-27 17:17:43,511 INFO [train.py:1114] (3/4) Epoch 4, batch 300, loss[loss=0.2809, simple_loss=0.3617, pruned_loss=0.1, over 4802.00 frames. ], tot_loss[loss=0.2756, simple_loss=0.3453, pruned_loss=0.103, over 730072.07 frames. ], batch size: 15, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:18:07,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=41330.666666666664, ans=0.0018846376811594203 +2024-07-27 17:18:14,742 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.49 vs. limit=6.0 +2024-07-27 17:18:18,360 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.327e+01 6.726e+01 7.955e+01 9.020e+01 1.256e+02, threshold=1.591e+02, percent-clipped=0.0 +2024-07-27 17:18:19,052 INFO [train.py:1114] (3/4) Epoch 4, batch 350, loss[loss=0.2199, simple_loss=0.2932, pruned_loss=0.0733, over 4947.00 frames. ], tot_loss[loss=0.2753, simple_loss=0.3455, pruned_loss=0.1025, over 775995.58 frames. ], batch size: 12, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:18:22,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=41357.333333333336, ans=0.2 +2024-07-27 17:18:42,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41397.333333333336, ans=0.1 +2024-07-27 17:18:51,896 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.69 vs. limit=22.5 +2024-07-27 17:18:52,197 INFO [train.py:1114] (3/4) Epoch 4, batch 400, loss[loss=0.2696, simple_loss=0.3429, pruned_loss=0.09814, over 4694.00 frames. ], tot_loss[loss=0.2723, simple_loss=0.3432, pruned_loss=0.1007, over 813571.91 frames. ], batch size: 13, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:18:52,531 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.58 vs. limit=10.0 +2024-07-27 17:19:04,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=41437.333333333336, ans=0.025 +2024-07-27 17:19:08,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=41450.666666666664, ans=0.125 +2024-07-27 17:19:16,622 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.14 vs. limit=22.5 +2024-07-27 17:19:20,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=41477.333333333336, ans=0.0 +2024-07-27 17:19:24,666 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.330e+01 6.564e+01 7.397e+01 8.870e+01 1.499e+02, threshold=1.479e+02, percent-clipped=0.0 +2024-07-27 17:19:25,330 INFO [train.py:1114] (3/4) Epoch 4, batch 450, loss[loss=0.2729, simple_loss=0.363, pruned_loss=0.09142, over 4635.00 frames. ], tot_loss[loss=0.273, simple_loss=0.3436, pruned_loss=0.1012, over 838834.97 frames. ], batch size: 13, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:19:39,242 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.52 vs. limit=6.0 +2024-07-27 17:19:50,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=41530.666666666664, ans=0.07 +2024-07-27 17:19:58,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=41544.0, ans=0.125 +2024-07-27 17:20:09,642 INFO [train.py:1114] (3/4) Epoch 4, batch 500, loss[loss=0.2722, simple_loss=0.3615, pruned_loss=0.09143, over 4671.00 frames. ], tot_loss[loss=0.2718, simple_loss=0.3425, pruned_loss=0.1005, over 861554.67 frames. ], batch size: 15, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:20:11,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=41557.333333333336, ans=0.125 +2024-07-27 17:20:22,665 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.57 vs. limit=15.0 +2024-07-27 17:20:38,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=41610.666666666664, ans=0.125 +2024-07-27 17:20:45,338 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.329e+01 6.267e+01 7.385e+01 9.027e+01 1.460e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 17:20:46,084 INFO [train.py:1114] (3/4) Epoch 4, batch 550, loss[loss=0.2962, simple_loss=0.3735, pruned_loss=0.1094, over 4610.00 frames. ], tot_loss[loss=0.2722, simple_loss=0.3434, pruned_loss=0.1004, over 877604.52 frames. ], batch size: 17, lr: 1.90e-02, grad_scale: 32.0 +2024-07-27 17:20:49,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=41624.0, ans=0.025 +2024-07-27 17:20:55,079 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.82 vs. limit=22.5 +2024-07-27 17:20:55,833 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.70 vs. limit=22.5 +2024-07-27 17:20:56,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=41637.333333333336, ans=0.1 +2024-07-27 17:21:03,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=41650.666666666664, ans=0.0018150724637681168 +2024-07-27 17:21:03,954 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.13 vs. limit=15.0 +2024-07-27 17:21:12,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=41677.333333333336, ans=0.0018092753623188407 +2024-07-27 17:21:21,548 INFO [train.py:1114] (3/4) Epoch 4, batch 600, loss[loss=0.3007, simple_loss=0.3626, pruned_loss=0.1194, over 4636.00 frames. ], tot_loss[loss=0.272, simple_loss=0.3432, pruned_loss=0.1004, over 892399.24 frames. ], batch size: 16, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:21:26,581 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.65 vs. limit=15.0 +2024-07-27 17:21:29,892 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.35 vs. limit=22.5 +2024-07-27 17:21:30,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=41704.0, ans=0.125 +2024-07-27 17:21:33,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=41704.0, ans=0.125 +2024-07-27 17:21:35,186 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.88 vs. limit=15.0 +2024-07-27 17:21:42,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=41730.666666666664, ans=0.00179768115942029 +2024-07-27 17:21:44,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=41730.666666666664, ans=0.125 +2024-07-27 17:21:51,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=41744.0, ans=0.125 +2024-07-27 17:21:52,119 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.65 vs. limit=15.0 +2024-07-27 17:21:52,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=41744.0, ans=0.0 +2024-07-27 17:21:52,889 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=18.16 vs. limit=22.5 +2024-07-27 17:21:53,703 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.519e+01 6.252e+01 7.090e+01 7.980e+01 1.452e+02, threshold=1.418e+02, percent-clipped=0.0 +2024-07-27 17:21:54,399 INFO [train.py:1114] (3/4) Epoch 4, batch 650, loss[loss=0.2847, simple_loss=0.3406, pruned_loss=0.1144, over 4761.00 frames. ], tot_loss[loss=0.271, simple_loss=0.3421, pruned_loss=0.09997, over 904230.64 frames. ], batch size: 13, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:22:05,337 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.83 vs. limit=15.0 +2024-07-27 17:22:08,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=41784.0, ans=0.025 +2024-07-27 17:22:08,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=41784.0, ans=0.0 +2024-07-27 17:22:09,935 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.47 vs. limit=22.5 +2024-07-27 17:22:19,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=41797.333333333336, ans=0.125 +2024-07-27 17:22:27,770 INFO [train.py:1114] (3/4) Epoch 4, batch 700, loss[loss=0.2293, simple_loss=0.2958, pruned_loss=0.08146, over 4640.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3416, pruned_loss=0.09942, over 911780.30 frames. ], batch size: 12, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:22:27,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=41824.0, ans=0.0 +2024-07-27 17:22:28,694 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.04 vs. limit=22.5 +2024-07-27 17:22:29,519 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.06 vs. limit=15.0 +2024-07-27 17:22:31,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=41824.0, ans=0.125 +2024-07-27 17:22:43,263 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:22:43,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=41850.666666666664, ans=0.1 +2024-07-27 17:22:51,652 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.08 vs. limit=15.0 +2024-07-27 17:22:55,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=41877.333333333336, ans=0.125 +2024-07-27 17:23:02,155 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.371e+01 6.772e+01 7.672e+01 9.334e+01 1.432e+02, threshold=1.534e+02, percent-clipped=1.0 +2024-07-27 17:23:02,190 INFO [train.py:1114] (3/4) Epoch 4, batch 750, loss[loss=0.2641, simple_loss=0.3384, pruned_loss=0.09493, over 4695.00 frames. ], tot_loss[loss=0.2707, simple_loss=0.3415, pruned_loss=0.09997, over 918295.83 frames. ], batch size: 13, lr: 1.89e-02, grad_scale: 16.0 +2024-07-27 17:23:03,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=41890.666666666664, ans=15.0 +2024-07-27 17:23:03,921 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.35 vs. limit=12.0 +2024-07-27 17:23:06,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=41890.666666666664, ans=0.0017628985507246384 +2024-07-27 17:23:08,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=41904.0, ans=0.125 +2024-07-27 17:23:13,235 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.74 vs. limit=10.0 +2024-07-27 17:23:13,974 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.26 vs. limit=6.0 +2024-07-27 17:23:18,013 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.58 vs. limit=6.0 +2024-07-27 17:23:19,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=41917.333333333336, ans=0.125 +2024-07-27 17:23:29,251 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.74 vs. limit=15.0 +2024-07-27 17:23:33,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=41944.0, ans=0.125 +2024-07-27 17:23:37,970 INFO [train.py:1114] (3/4) Epoch 4, batch 800, loss[loss=0.2062, simple_loss=0.2835, pruned_loss=0.06442, over 4855.00 frames. ], tot_loss[loss=0.2704, simple_loss=0.3414, pruned_loss=0.09974, over 923489.89 frames. ], batch size: 12, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:24:04,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=41997.333333333336, ans=0.0017397101449275355 +2024-07-27 17:24:07,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=41997.333333333336, ans=0.125 +2024-07-27 17:24:10,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=42010.666666666664, ans=0.125 +2024-07-27 17:24:14,865 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=6.64 vs. limit=15.0 +2024-07-27 17:24:18,032 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.573e+01 6.422e+01 7.236e+01 8.133e+01 1.458e+02, threshold=1.447e+02, percent-clipped=0.0 +2024-07-27 17:24:25,679 INFO [train.py:1114] (3/4) Epoch 4, batch 850, loss[loss=0.316, simple_loss=0.3806, pruned_loss=0.1257, over 4661.00 frames. ], tot_loss[loss=0.27, simple_loss=0.341, pruned_loss=0.09946, over 927448.04 frames. ], batch size: 14, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:24:44,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=42064.0, ans=0.125 +2024-07-27 17:24:47,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42064.0, ans=0.1 +2024-07-27 17:24:50,479 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.25 vs. limit=15.0 +2024-07-27 17:24:51,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=42077.333333333336, ans=15.0 +2024-07-27 17:24:54,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=42077.333333333336, ans=0.125 +2024-07-27 17:24:58,780 INFO [train.py:1114] (3/4) Epoch 4, batch 900, loss[loss=0.2504, simple_loss=0.32, pruned_loss=0.09036, over 4855.00 frames. ], tot_loss[loss=0.272, simple_loss=0.3426, pruned_loss=0.1007, over 928114.74 frames. ], batch size: 12, lr: 1.89e-02, grad_scale: 32.0 +2024-07-27 17:25:02,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=42090.666666666664, ans=0.07 +2024-07-27 17:25:13,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=42117.333333333336, ans=0.125 +2024-07-27 17:25:13,938 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=10.42 vs. limit=15.0 +2024-07-27 17:25:14,114 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.60 vs. limit=15.0 +2024-07-27 17:25:28,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42144.0, ans=0.1 +2024-07-27 17:25:34,687 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.294e+01 6.285e+01 6.831e+01 7.468e+01 1.764e+02, threshold=1.366e+02, percent-clipped=2.0 +2024-07-27 17:25:36,695 INFO [train.py:1114] (3/4) Epoch 4, batch 950, loss[loss=0.2422, simple_loss=0.311, pruned_loss=0.08669, over 4785.00 frames. ], tot_loss[loss=0.2716, simple_loss=0.3425, pruned_loss=0.1004, over 930253.18 frames. ], batch size: 12, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:26:10,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=42210.666666666664, ans=0.0 +2024-07-27 17:26:12,279 INFO [train.py:1114] (3/4) Epoch 4, batch 1000, loss[loss=0.2489, simple_loss=0.3299, pruned_loss=0.08398, over 4961.00 frames. ], tot_loss[loss=0.273, simple_loss=0.3437, pruned_loss=0.1011, over 929843.44 frames. ], batch size: 13, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:26:52,348 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.144e+01 6.274e+01 6.992e+01 7.907e+01 1.150e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 17:26:52,382 INFO [train.py:1114] (3/4) Epoch 4, batch 1050, loss[loss=0.2315, simple_loss=0.3093, pruned_loss=0.07689, over 4877.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.3416, pruned_loss=0.09972, over 931950.64 frames. ], batch size: 14, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:27:11,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=42304.0, ans=0.0 +2024-07-27 17:27:24,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=42330.666666666664, ans=0.0 +2024-07-27 17:27:24,494 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.91 vs. limit=15.0 +2024-07-27 17:27:25,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=42330.666666666664, ans=0.015 +2024-07-27 17:27:31,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=42330.666666666664, ans=0.0 +2024-07-27 17:27:40,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=42344.0, ans=0.04949747468305833 +2024-07-27 17:27:40,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=42344.0, ans=0.0 +2024-07-27 17:27:44,197 INFO [train.py:1114] (3/4) Epoch 4, batch 1100, loss[loss=0.2588, simple_loss=0.3369, pruned_loss=0.09036, over 4887.00 frames. ], tot_loss[loss=0.27, simple_loss=0.3409, pruned_loss=0.09953, over 934371.48 frames. ], batch size: 13, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:27:46,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=42357.333333333336, ans=0.025 +2024-07-27 17:27:47,166 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.29 vs. limit=15.0 +2024-07-27 17:27:50,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=42370.666666666664, ans=0.125 +2024-07-27 17:27:57,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=42370.666666666664, ans=0.2 +2024-07-27 17:28:01,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=42384.0, ans=0.2 +2024-07-27 17:28:04,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=42384.0, ans=0.05 +2024-07-27 17:28:16,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=42410.666666666664, ans=0.0 +2024-07-27 17:28:18,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=42410.666666666664, ans=0.2 +2024-07-27 17:28:22,185 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.166e+01 6.208e+01 6.982e+01 7.743e+01 1.395e+02, threshold=1.396e+02, percent-clipped=0.0 +2024-07-27 17:28:22,219 INFO [train.py:1114] (3/4) Epoch 4, batch 1150, loss[loss=0.2202, simple_loss=0.2984, pruned_loss=0.07098, over 4908.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3408, pruned_loss=0.09978, over 934376.02 frames. ], batch size: 13, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:28:23,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.51 vs. limit=12.0 +2024-07-27 17:29:05,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=42464.0, ans=0.125 +2024-07-27 17:29:05,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=42464.0, ans=0.1 +2024-07-27 17:29:09,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=42464.0, ans=0.125 +2024-07-27 17:29:14,204 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.64 vs. limit=15.0 +2024-07-27 17:29:24,968 INFO [train.py:1114] (3/4) Epoch 4, batch 1200, loss[loss=0.2618, simple_loss=0.3432, pruned_loss=0.09024, over 4874.00 frames. ], tot_loss[loss=0.2712, simple_loss=0.3416, pruned_loss=0.1004, over 933720.02 frames. ], batch size: 14, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:30:06,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=42544.0, ans=0.0016208695652173907 +2024-07-27 17:30:10,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=42544.0, ans=0.1 +2024-07-27 17:30:12,126 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.401e+01 6.877e+01 7.526e+01 8.642e+01 1.436e+02, threshold=1.505e+02, percent-clipped=1.0 +2024-07-27 17:30:12,161 INFO [train.py:1114] (3/4) Epoch 4, batch 1250, loss[loss=0.2979, simple_loss=0.3622, pruned_loss=0.1168, over 4807.00 frames. ], tot_loss[loss=0.2701, simple_loss=0.3414, pruned_loss=0.09939, over 937442.64 frames. ], batch size: 15, lr: 1.88e-02, grad_scale: 32.0 +2024-07-27 17:30:13,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=42557.333333333336, ans=0.125 +2024-07-27 17:30:15,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=42557.333333333336, ans=0.125 +2024-07-27 17:30:35,406 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.89 vs. limit=22.5 +2024-07-27 17:30:41,384 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.89 vs. limit=10.0 +2024-07-27 17:30:42,065 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.77 vs. limit=15.0 +2024-07-27 17:30:50,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=42610.666666666664, ans=0.0 +2024-07-27 17:30:54,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=42610.666666666664, ans=0.125 +2024-07-27 17:30:57,769 INFO [train.py:1114] (3/4) Epoch 4, batch 1300, loss[loss=0.2984, simple_loss=0.3687, pruned_loss=0.1141, over 4653.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.341, pruned_loss=0.09909, over 938732.27 frames. ], batch size: 19, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:30:57,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=42624.0, ans=0.025 +2024-07-27 17:30:57,946 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:31:01,013 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:31:03,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=42624.0, ans=0.125 +2024-07-27 17:31:03,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=42624.0, ans=0.125 +2024-07-27 17:31:07,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=42624.0, ans=0.0 +2024-07-27 17:31:31,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=42650.666666666664, ans=0.125 +2024-07-27 17:31:31,381 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.25 vs. limit=15.0 +2024-07-27 17:31:31,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=42650.666666666664, ans=0.125 +2024-07-27 17:31:53,871 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.07 vs. limit=22.5 +2024-07-27 17:31:59,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=42690.666666666664, ans=0.125 +2024-07-27 17:31:59,664 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.286e+01 6.475e+01 6.974e+01 8.075e+01 1.412e+02, threshold=1.395e+02, percent-clipped=0.0 +2024-07-27 17:31:59,698 INFO [train.py:1114] (3/4) Epoch 4, batch 1350, loss[loss=0.2391, simple_loss=0.3121, pruned_loss=0.083, over 4749.00 frames. ], tot_loss[loss=0.2683, simple_loss=0.3403, pruned_loss=0.09813, over 940874.74 frames. ], batch size: 13, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:32:02,492 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:32:32,794 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=10.09 vs. limit=15.0 +2024-07-27 17:32:34,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=42717.333333333336, ans=0.125 +2024-07-27 17:32:35,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=42730.666666666664, ans=0.0015802898550724646 +2024-07-27 17:32:39,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=42730.666666666664, ans=0.125 +2024-07-27 17:32:42,108 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.44 vs. limit=15.0 +2024-07-27 17:32:49,786 INFO [train.py:1114] (3/4) Epoch 4, batch 1400, loss[loss=0.2848, simple_loss=0.3321, pruned_loss=0.1188, over 4703.00 frames. ], tot_loss[loss=0.2692, simple_loss=0.3404, pruned_loss=0.09898, over 942517.39 frames. ], batch size: 11, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:33:59,444 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.546e+01 6.502e+01 7.039e+01 8.275e+01 1.312e+02, threshold=1.408e+02, percent-clipped=0.0 +2024-07-27 17:33:59,478 INFO [train.py:1114] (3/4) Epoch 4, batch 1450, loss[loss=0.2583, simple_loss=0.3453, pruned_loss=0.08562, over 4676.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3411, pruned_loss=0.09911, over 942645.88 frames. ], batch size: 15, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:34:27,807 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.77 vs. limit=15.0 +2024-07-27 17:34:46,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=42877.333333333336, ans=0.125 +2024-07-27 17:34:54,669 INFO [train.py:1114] (3/4) Epoch 4, batch 1500, loss[loss=0.2869, simple_loss=0.355, pruned_loss=0.1093, over 4807.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.342, pruned_loss=0.09951, over 942197.58 frames. ], batch size: 14, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:35:05,137 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.97 vs. limit=15.0 +2024-07-27 17:35:10,453 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.18 vs. limit=22.5 +2024-07-27 17:35:20,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=42917.333333333336, ans=0.125 +2024-07-27 17:35:21,519 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.27 vs. limit=22.5 +2024-07-27 17:35:46,841 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.518e+01 6.513e+01 7.459e+01 8.473e+01 1.359e+02, threshold=1.492e+02, percent-clipped=0.0 +2024-07-27 17:35:46,875 INFO [train.py:1114] (3/4) Epoch 4, batch 1550, loss[loss=0.2703, simple_loss=0.3458, pruned_loss=0.09736, over 4918.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3413, pruned_loss=0.0995, over 939041.92 frames. ], batch size: 15, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:35:50,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=42957.333333333336, ans=0.0 +2024-07-27 17:35:51,962 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.64 vs. limit=22.5 +2024-07-27 17:35:53,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=42970.666666666664, ans=0.0 +2024-07-27 17:35:55,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=42970.666666666664, ans=0.025 +2024-07-27 17:36:18,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=43010.666666666664, ans=0.2 +2024-07-27 17:36:18,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=43010.666666666664, ans=0.2 +2024-07-27 17:36:25,515 INFO [train.py:1114] (3/4) Epoch 4, batch 1600, loss[loss=0.2769, simple_loss=0.3508, pruned_loss=0.1015, over 4873.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3408, pruned_loss=0.09918, over 937336.60 frames. ], batch size: 14, lr: 1.87e-02, grad_scale: 32.0 +2024-07-27 17:36:39,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=43037.333333333336, ans=0.125 +2024-07-27 17:36:50,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=43050.666666666664, ans=0.125 +2024-07-27 17:36:50,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=43050.666666666664, ans=0.125 +2024-07-27 17:36:50,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=43064.0, ans=0.0 +2024-07-27 17:36:54,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=43064.0, ans=0.125 +2024-07-27 17:36:54,903 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.74 vs. limit=10.0 +2024-07-27 17:37:01,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=43077.333333333336, ans=0.125 +2024-07-27 17:37:02,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=43077.333333333336, ans=0.025 +2024-07-27 17:37:04,628 INFO [train.py:1114] (3/4) Epoch 4, batch 1650, loss[loss=0.2491, simple_loss=0.3326, pruned_loss=0.08281, over 4668.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3407, pruned_loss=0.09928, over 937529.19 frames. ], batch size: 14, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:37:05,272 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.309e+01 6.450e+01 7.502e+01 9.535e+01 1.419e+02, threshold=1.500e+02, percent-clipped=0.0 +2024-07-27 17:37:06,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=43090.666666666664, ans=0.04949747468305833 +2024-07-27 17:37:10,409 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.72 vs. limit=15.0 +2024-07-27 17:37:23,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=43130.666666666664, ans=0.125 +2024-07-27 17:37:26,863 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.04 vs. limit=15.0 +2024-07-27 17:37:28,187 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.20 vs. limit=15.0 +2024-07-27 17:37:30,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=43144.0, ans=0.125 +2024-07-27 17:37:37,805 INFO [train.py:1114] (3/4) Epoch 4, batch 1700, loss[loss=0.2053, simple_loss=0.2891, pruned_loss=0.06074, over 4700.00 frames. ], tot_loss[loss=0.2682, simple_loss=0.3402, pruned_loss=0.09809, over 939171.09 frames. ], batch size: 11, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:37:40,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=43157.333333333336, ans=0.125 +2024-07-27 17:37:44,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=43157.333333333336, ans=0.5 +2024-07-27 17:37:51,598 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.21 vs. limit=12.0 +2024-07-27 17:37:52,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=43184.0, ans=0.2 +2024-07-27 17:38:08,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=43210.666666666664, ans=0.125 +2024-07-27 17:38:10,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=43210.666666666664, ans=0.07 +2024-07-27 17:38:12,050 INFO [train.py:1114] (3/4) Epoch 4, batch 1750, loss[loss=0.2297, simple_loss=0.3015, pruned_loss=0.07891, over 4810.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.3403, pruned_loss=0.09796, over 940232.42 frames. ], batch size: 11, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:38:12,714 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.524e+01 6.769e+01 7.815e+01 9.643e+01 1.575e+02, threshold=1.563e+02, percent-clipped=1.0 +2024-07-27 17:38:24,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=43237.333333333336, ans=0.125 +2024-07-27 17:38:26,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=43237.333333333336, ans=0.05 +2024-07-27 17:38:27,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=43237.333333333336, ans=0.125 +2024-07-27 17:38:34,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=43250.666666666664, ans=0.125 +2024-07-27 17:38:35,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=43264.0, ans=0.04949747468305833 +2024-07-27 17:38:36,230 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.90 vs. limit=15.0 +2024-07-27 17:38:37,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=43264.0, ans=0.1 +2024-07-27 17:38:38,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=43264.0, ans=0.125 +2024-07-27 17:38:49,159 INFO [train.py:1114] (3/4) Epoch 4, batch 1800, loss[loss=0.288, simple_loss=0.361, pruned_loss=0.1075, over 4638.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.3404, pruned_loss=0.09821, over 940814.91 frames. ], batch size: 13, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:38:51,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=43290.666666666664, ans=0.0014585507246376826 +2024-07-27 17:38:56,571 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.78 vs. limit=15.0 +2024-07-27 17:38:57,062 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-27 17:39:15,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43330.666666666664, ans=0.1 +2024-07-27 17:39:19,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=43344.0, ans=0.125 +2024-07-27 17:39:23,349 INFO [train.py:1114] (3/4) Epoch 4, batch 1850, loss[loss=0.2888, simple_loss=0.3667, pruned_loss=0.1054, over 4818.00 frames. ], tot_loss[loss=0.267, simple_loss=0.3392, pruned_loss=0.09744, over 940514.83 frames. ], batch size: 14, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:39:23,919 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.011e+01 6.740e+01 7.721e+01 9.480e+01 1.911e+02, threshold=1.544e+02, percent-clipped=3.0 +2024-07-27 17:39:28,884 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.10 vs. limit=15.0 +2024-07-27 17:39:46,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=43397.333333333336, ans=0.125 +2024-07-27 17:39:48,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=43397.333333333336, ans=0.125 +2024-07-27 17:39:55,729 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.18 vs. limit=15.0 +2024-07-27 17:39:56,915 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.62 vs. limit=15.0 +2024-07-27 17:39:58,475 INFO [train.py:1114] (3/4) Epoch 4, batch 1900, loss[loss=0.2763, simple_loss=0.3605, pruned_loss=0.09601, over 4671.00 frames. ], tot_loss[loss=0.2675, simple_loss=0.3398, pruned_loss=0.09761, over 941633.47 frames. ], batch size: 14, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:40:04,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=43424.0, ans=0.125 +2024-07-27 17:40:19,580 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.78 vs. limit=22.5 +2024-07-27 17:40:22,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=43464.0, ans=0.125 +2024-07-27 17:40:23,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=43464.0, ans=0.125 +2024-07-27 17:40:24,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=43464.0, ans=0.2 +2024-07-27 17:40:29,419 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.31 vs. limit=22.5 +2024-07-27 17:40:33,685 INFO [train.py:1114] (3/4) Epoch 4, batch 1950, loss[loss=0.276, simple_loss=0.3452, pruned_loss=0.1034, over 4892.00 frames. ], tot_loss[loss=0.2679, simple_loss=0.3404, pruned_loss=0.09771, over 943891.31 frames. ], batch size: 13, lr: 1.86e-02, grad_scale: 16.0 +2024-07-27 17:40:34,315 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.087e+01 6.498e+01 7.387e+01 8.650e+01 1.667e+02, threshold=1.477e+02, percent-clipped=1.0 +2024-07-27 17:40:41,832 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.94 vs. limit=15.0 +2024-07-27 17:40:46,526 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.54 vs. limit=15.0 +2024-07-27 17:40:55,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=43517.333333333336, ans=0.2 +2024-07-27 17:40:59,111 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.57 vs. limit=22.5 +2024-07-27 17:41:00,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43530.666666666664, ans=0.1 +2024-07-27 17:41:02,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=43530.666666666664, ans=0.125 +2024-07-27 17:41:03,536 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.51 vs. limit=15.0 +2024-07-27 17:41:05,650 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=7.95 vs. limit=15.0 +2024-07-27 17:41:11,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=43544.0, ans=0.125 +2024-07-27 17:41:17,126 INFO [train.py:1114] (3/4) Epoch 4, batch 2000, loss[loss=0.2013, simple_loss=0.2724, pruned_loss=0.06506, over 4811.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.341, pruned_loss=0.09803, over 940976.87 frames. ], batch size: 11, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:41:24,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=43570.666666666664, ans=0.125 +2024-07-27 17:41:42,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=43597.333333333336, ans=0.125 +2024-07-27 17:41:52,686 INFO [train.py:1114] (3/4) Epoch 4, batch 2050, loss[loss=0.2275, simple_loss=0.2908, pruned_loss=0.08207, over 4614.00 frames. ], tot_loss[loss=0.2675, simple_loss=0.3398, pruned_loss=0.09759, over 939185.32 frames. ], batch size: 11, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:41:53,319 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.522e+01 6.397e+01 6.971e+01 8.145e+01 1.317e+02, threshold=1.394e+02, percent-clipped=0.0 +2024-07-27 17:42:14,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=43664.0, ans=0.125 +2024-07-27 17:42:19,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=43677.333333333336, ans=0.035 +2024-07-27 17:42:21,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=43677.333333333336, ans=0.0 +2024-07-27 17:42:25,801 INFO [train.py:1114] (3/4) Epoch 4, batch 2100, loss[loss=0.2295, simple_loss=0.3113, pruned_loss=0.0739, over 4758.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3393, pruned_loss=0.0973, over 940847.90 frames. ], batch size: 13, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:42:29,511 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.33 vs. limit=22.5 +2024-07-27 17:42:31,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.83 vs. limit=22.5 +2024-07-27 17:42:38,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=43717.333333333336, ans=0.125 +2024-07-27 17:42:45,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=43730.666666666664, ans=0.5 +2024-07-27 17:42:45,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=43730.666666666664, ans=0.125 +2024-07-27 17:42:58,957 INFO [train.py:1114] (3/4) Epoch 4, batch 2150, loss[loss=0.2399, simple_loss=0.3021, pruned_loss=0.08883, over 4892.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.338, pruned_loss=0.09688, over 944121.09 frames. ], batch size: 13, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:42:59,570 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.474e+01 6.533e+01 7.336e+01 8.956e+01 1.647e+02, threshold=1.467e+02, percent-clipped=5.0 +2024-07-27 17:43:01,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=43757.333333333336, ans=0.125 +2024-07-27 17:43:03,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=43757.333333333336, ans=0.125 +2024-07-27 17:43:07,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=43770.666666666664, ans=0.125 +2024-07-27 17:43:11,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=43770.666666666664, ans=0.2 +2024-07-27 17:43:16,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=43784.0, ans=0.0 +2024-07-27 17:43:21,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=43797.333333333336, ans=0.0 +2024-07-27 17:43:26,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=43810.666666666664, ans=0.125 +2024-07-27 17:43:29,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=43810.666666666664, ans=0.1 +2024-07-27 17:43:32,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.20 vs. limit=15.0 +2024-07-27 17:43:32,598 INFO [train.py:1114] (3/4) Epoch 4, batch 2200, loss[loss=0.3315, simple_loss=0.3982, pruned_loss=0.1323, over 4803.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.3385, pruned_loss=0.09659, over 943214.70 frames. ], batch size: 14, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:43:58,640 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:44:12,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=43877.333333333336, ans=0.0 +2024-07-27 17:44:15,783 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.96 vs. limit=15.0 +2024-07-27 17:44:16,763 INFO [train.py:1114] (3/4) Epoch 4, batch 2250, loss[loss=0.2782, simple_loss=0.3429, pruned_loss=0.1068, over 4699.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3393, pruned_loss=0.09714, over 942027.65 frames. ], batch size: 13, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:44:17,407 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 6.403e+01 7.459e+01 9.142e+01 2.382e+02, threshold=1.492e+02, percent-clipped=1.0 +2024-07-27 17:44:38,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=43917.333333333336, ans=0.125 +2024-07-27 17:44:45,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=43930.666666666664, ans=0.125 +2024-07-27 17:44:49,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=43930.666666666664, ans=0.0 +2024-07-27 17:44:59,212 INFO [train.py:1114] (3/4) Epoch 4, batch 2300, loss[loss=0.2123, simple_loss=0.2738, pruned_loss=0.07538, over 4944.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.3381, pruned_loss=0.09686, over 939598.35 frames. ], batch size: 12, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:45:01,770 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.72 vs. limit=22.5 +2024-07-27 17:45:17,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=43970.666666666664, ans=0.125 +2024-07-27 17:45:25,410 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.61 vs. limit=6.0 +2024-07-27 17:45:25,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=43984.0, ans=0.0 +2024-07-27 17:45:29,768 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.88 vs. limit=8.0 +2024-07-27 17:45:34,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=44010.666666666664, ans=0.125 +2024-07-27 17:45:34,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=44010.666666666664, ans=0.0 +2024-07-27 17:45:35,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=44010.666666666664, ans=0.04949747468305833 +2024-07-27 17:45:41,392 INFO [train.py:1114] (3/4) Epoch 4, batch 2350, loss[loss=0.2529, simple_loss=0.3292, pruned_loss=0.08825, over 4635.00 frames. ], tot_loss[loss=0.2656, simple_loss=0.3378, pruned_loss=0.09671, over 941383.06 frames. ], batch size: 13, lr: 1.85e-02, grad_scale: 32.0 +2024-07-27 17:45:41,990 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.850e+01 6.786e+01 8.508e+01 1.044e+02 1.776e+02, threshold=1.702e+02, percent-clipped=2.0 +2024-07-27 17:45:43,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=44024.0, ans=0.125 +2024-07-27 17:45:43,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=44024.0, ans=0.2 +2024-07-27 17:45:54,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=44037.333333333336, ans=0.125 +2024-07-27 17:46:01,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=44050.666666666664, ans=0.125 +2024-07-27 17:46:11,557 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.93 vs. limit=22.5 +2024-07-27 17:46:14,195 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.22 vs. limit=15.0 +2024-07-27 17:46:15,445 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.14 vs. limit=15.0 +2024-07-27 17:46:26,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=44077.333333333336, ans=0.0012875362318840577 +2024-07-27 17:46:28,014 INFO [train.py:1114] (3/4) Epoch 4, batch 2400, loss[loss=0.3032, simple_loss=0.3681, pruned_loss=0.1191, over 4637.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.3384, pruned_loss=0.0967, over 941053.68 frames. ], batch size: 12, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:46:45,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=44117.333333333336, ans=0.0012788405797101443 +2024-07-27 17:46:45,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=44117.333333333336, ans=0.0 +2024-07-27 17:46:46,832 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.46 vs. limit=22.5 +2024-07-27 17:46:49,862 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:46:53,293 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.61 vs. limit=15.0 +2024-07-27 17:46:54,090 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.47 vs. limit=10.0 +2024-07-27 17:47:00,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=44144.0, ans=0.125 +2024-07-27 17:47:01,651 INFO [train.py:1114] (3/4) Epoch 4, batch 2450, loss[loss=0.2516, simple_loss=0.3378, pruned_loss=0.08272, over 4694.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.34, pruned_loss=0.09778, over 936729.82 frames. ], batch size: 13, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:47:02,264 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.203e+01 6.348e+01 7.314e+01 8.641e+01 1.426e+02, threshold=1.463e+02, percent-clipped=0.0 +2024-07-27 17:47:04,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=44157.333333333336, ans=0.025 +2024-07-27 17:47:05,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=44157.333333333336, ans=0.0 +2024-07-27 17:47:07,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=44157.333333333336, ans=0.2 +2024-07-27 17:47:21,680 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.66 vs. limit=22.5 +2024-07-27 17:47:35,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=44210.666666666664, ans=0.125 +2024-07-27 17:47:39,896 INFO [train.py:1114] (3/4) Epoch 4, batch 2500, loss[loss=0.2833, simple_loss=0.3626, pruned_loss=0.102, over 4819.00 frames. ], tot_loss[loss=0.2672, simple_loss=0.3396, pruned_loss=0.09741, over 938813.11 frames. ], batch size: 14, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:47:42,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=44224.0, ans=0.2 +2024-07-27 17:47:44,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=44224.0, ans=15.0 +2024-07-27 17:47:47,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=44237.333333333336, ans=0.125 +2024-07-27 17:47:53,004 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.39 vs. limit=15.0 +2024-07-27 17:47:55,317 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.90 vs. limit=8.0 +2024-07-27 17:48:14,561 INFO [train.py:1114] (3/4) Epoch 4, batch 2550, loss[loss=0.2182, simple_loss=0.2946, pruned_loss=0.07083, over 4807.00 frames. ], tot_loss[loss=0.2671, simple_loss=0.3393, pruned_loss=0.09748, over 938604.77 frames. ], batch size: 11, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:48:15,139 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.441e+01 6.325e+01 6.836e+01 7.764e+01 1.443e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-27 17:48:18,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=44290.666666666664, ans=0.125 +2024-07-27 17:48:30,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=44317.333333333336, ans=0.125 +2024-07-27 17:48:31,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=44317.333333333336, ans=0.0 +2024-07-27 17:48:48,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=44344.0, ans=0.125 +2024-07-27 17:48:49,563 INFO [train.py:1114] (3/4) Epoch 4, batch 2600, loss[loss=0.2344, simple_loss=0.3083, pruned_loss=0.08027, over 4904.00 frames. ], tot_loss[loss=0.2685, simple_loss=0.34, pruned_loss=0.09854, over 937572.82 frames. ], batch size: 13, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:48:53,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=44357.333333333336, ans=0.95 +2024-07-27 17:48:55,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=44357.333333333336, ans=0.2 +2024-07-27 17:49:00,311 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.83 vs. limit=15.0 +2024-07-27 17:49:07,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=44384.0, ans=0.0 +2024-07-27 17:49:08,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=44384.0, ans=0.0 +2024-07-27 17:49:12,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=44397.333333333336, ans=0.2 +2024-07-27 17:49:16,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=44397.333333333336, ans=0.125 +2024-07-27 17:49:24,930 INFO [train.py:1114] (3/4) Epoch 4, batch 2650, loss[loss=0.2483, simple_loss=0.3274, pruned_loss=0.08458, over 4658.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3403, pruned_loss=0.09873, over 939642.52 frames. ], batch size: 16, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:49:25,613 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.438e+01 6.678e+01 7.695e+01 9.100e+01 1.480e+02, threshold=1.539e+02, percent-clipped=3.0 +2024-07-27 17:49:32,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=44437.333333333336, ans=0.125 +2024-07-27 17:49:33,830 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 17:49:37,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=44437.333333333336, ans=0.125 +2024-07-27 17:49:47,574 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.36 vs. limit=15.0 +2024-07-27 17:50:00,240 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=44477.333333333336, ans=0.125 +2024-07-27 17:50:06,086 INFO [train.py:1114] (3/4) Epoch 4, batch 2700, loss[loss=0.2571, simple_loss=0.3391, pruned_loss=0.08755, over 4741.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3395, pruned_loss=0.09804, over 939717.90 frames. ], batch size: 14, lr: 1.84e-02, grad_scale: 32.0 +2024-07-27 17:50:16,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=44504.0, ans=0.0 +2024-07-27 17:50:18,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=44517.333333333336, ans=0.2 +2024-07-27 17:50:21,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=44517.333333333336, ans=0.2 +2024-07-27 17:50:24,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=44517.333333333336, ans=0.1 +2024-07-27 17:50:41,739 INFO [train.py:1114] (3/4) Epoch 4, batch 2750, loss[loss=0.2677, simple_loss=0.336, pruned_loss=0.09963, over 4709.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3382, pruned_loss=0.09707, over 939443.49 frames. ], batch size: 12, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:50:42,304 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.077e+01 6.612e+01 7.573e+01 9.586e+01 1.480e+02, threshold=1.515e+02, percent-clipped=0.0 +2024-07-27 17:50:46,071 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.30 vs. limit=15.0 +2024-07-27 17:51:02,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=44597.333333333336, ans=0.125 +2024-07-27 17:51:07,287 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.13 vs. limit=15.0 +2024-07-27 17:51:19,256 INFO [train.py:1114] (3/4) Epoch 4, batch 2800, loss[loss=0.3859, simple_loss=0.4292, pruned_loss=0.1714, over 3664.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3387, pruned_loss=0.09731, over 937379.09 frames. ], batch size: 35, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:51:48,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=44664.0, ans=0.125 +2024-07-27 17:51:49,994 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.85 vs. limit=8.0 +2024-07-27 17:51:50,550 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.49 vs. limit=22.5 +2024-07-27 17:51:53,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=44677.333333333336, ans=0.125 +2024-07-27 17:51:57,069 INFO [train.py:1114] (3/4) Epoch 4, batch 2850, loss[loss=0.2667, simple_loss=0.3221, pruned_loss=0.1056, over 4968.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.3398, pruned_loss=0.09869, over 936161.38 frames. ], batch size: 13, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:51:57,811 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.345e+01 6.785e+01 7.509e+01 8.652e+01 1.296e+02, threshold=1.502e+02, percent-clipped=0.0 +2024-07-27 17:52:03,162 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.68 vs. limit=22.5 +2024-07-27 17:52:22,800 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=3.406e+00 +2024-07-27 17:52:24,388 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.05 vs. limit=12.0 +2024-07-27 17:52:27,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=44744.0, ans=0.125 +2024-07-27 17:52:32,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=44744.0, ans=0.0 +2024-07-27 17:52:33,421 INFO [train.py:1114] (3/4) Epoch 4, batch 2900, loss[loss=0.2624, simple_loss=0.3418, pruned_loss=0.0915, over 4817.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3405, pruned_loss=0.09752, over 939966.01 frames. ], batch size: 13, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:52:33,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=44757.333333333336, ans=0.2 +2024-07-27 17:52:46,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=44770.666666666664, ans=0.125 +2024-07-27 17:52:46,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=44770.666666666664, ans=0.5 +2024-07-27 17:52:48,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=44784.0, ans=0.07 +2024-07-27 17:53:07,381 INFO [train.py:1114] (3/4) Epoch 4, batch 2950, loss[loss=0.2362, simple_loss=0.3028, pruned_loss=0.08478, over 4714.00 frames. ], tot_loss[loss=0.2664, simple_loss=0.3386, pruned_loss=0.09706, over 938630.51 frames. ], batch size: 12, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:53:07,996 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.029e+01 6.448e+01 7.326e+01 8.943e+01 1.391e+02, threshold=1.465e+02, percent-clipped=0.0 +2024-07-27 17:53:11,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=44824.0, ans=0.125 +2024-07-27 17:53:17,734 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-07-27 17:53:22,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=44850.666666666664, ans=0.125 +2024-07-27 17:53:28,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=44864.0, ans=0.125 +2024-07-27 17:53:37,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=44877.333333333336, ans=0.0 +2024-07-27 17:53:38,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=44877.333333333336, ans=0.1 +2024-07-27 17:53:41,136 INFO [train.py:1114] (3/4) Epoch 4, batch 3000, loss[loss=0.2471, simple_loss=0.3366, pruned_loss=0.07878, over 4770.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.3374, pruned_loss=0.09649, over 938179.67 frames. ], batch size: 13, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:53:41,136 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 17:53:47,897 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.7738, 2.3492, 4.1057, 3.5553, 3.7815, 3.7728, 3.4684, 2.5296], + device='cuda:3') +2024-07-27 17:53:49,452 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.8410, 2.2943, 4.0615, 3.5447, 3.7731, 3.7321, 3.4776, 2.3855], + device='cuda:3') +2024-07-27 17:53:52,965 INFO [train.py:1146] (3/4) Epoch 4, validation: loss=0.2168, simple_loss=0.3177, pruned_loss=0.05793, over 944034.00 frames. +2024-07-27 17:53:52,965 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 17:53:56,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=44890.666666666664, ans=0.1 +2024-07-27 17:54:04,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=44904.0, ans=0.125 +2024-07-27 17:54:15,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=44917.333333333336, ans=0.0011049275362318839 +2024-07-27 17:54:15,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=44917.333333333336, ans=0.1 +2024-07-27 17:54:32,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=44944.0, ans=0.125 +2024-07-27 17:54:34,897 INFO [train.py:1114] (3/4) Epoch 4, batch 3050, loss[loss=0.2523, simple_loss=0.3138, pruned_loss=0.09538, over 4637.00 frames. ], tot_loss[loss=0.2651, simple_loss=0.3375, pruned_loss=0.09639, over 936982.49 frames. ], batch size: 12, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:54:42,779 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.188e+01 6.571e+01 7.374e+01 8.801e+01 1.359e+02, threshold=1.475e+02, percent-clipped=0.0 +2024-07-27 17:54:43,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=44957.333333333336, ans=0.125 +2024-07-27 17:54:46,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=44957.333333333336, ans=0.125 +2024-07-27 17:54:52,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=44970.666666666664, ans=0.125 +2024-07-27 17:57:40,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=45010.666666666664, ans=0.125 +2024-07-27 17:57:42,495 INFO [train.py:1114] (3/4) Epoch 4, batch 3100, loss[loss=0.3591, simple_loss=0.4077, pruned_loss=0.1553, over 4664.00 frames. ], tot_loss[loss=0.267, simple_loss=0.3391, pruned_loss=0.09747, over 937894.76 frames. ], batch size: 16, lr: 1.83e-02, grad_scale: 32.0 +2024-07-27 17:57:46,289 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.32 vs. limit=22.5 +2024-07-27 17:57:48,322 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.06 vs. limit=15.0 +2024-07-27 17:58:00,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=45050.666666666664, ans=0.0 +2024-07-27 17:58:03,232 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.37 vs. limit=15.0 +2024-07-27 17:58:08,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45050.666666666664, ans=0.1 +2024-07-27 17:58:16,755 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-27 17:58:19,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=45077.333333333336, ans=0.0 +2024-07-27 17:58:47,041 INFO [train.py:1114] (3/4) Epoch 4, batch 3150, loss[loss=0.3197, simple_loss=0.3935, pruned_loss=0.123, over 4597.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3389, pruned_loss=0.09746, over 938108.25 frames. ], batch size: 17, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 17:58:47,641 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.206e+01 6.605e+01 7.303e+01 8.284e+01 1.349e+02, threshold=1.461e+02, percent-clipped=0.0 +2024-07-27 17:58:58,626 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.35 vs. limit=22.5 +2024-07-27 17:59:00,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=45104.0, ans=0.125 +2024-07-27 17:59:15,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=45117.333333333336, ans=0.95 +2024-07-27 17:59:16,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=45117.333333333336, ans=0.04949747468305833 +2024-07-27 17:59:19,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=45130.666666666664, ans=0.125 +2024-07-27 17:59:29,226 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.36 vs. limit=15.0 +2024-07-27 17:59:31,928 INFO [train.py:1114] (3/4) Epoch 4, batch 3200, loss[loss=0.269, simple_loss=0.348, pruned_loss=0.09499, over 4828.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.3372, pruned_loss=0.09663, over 939699.20 frames. ], batch size: 13, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 17:59:34,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=45157.333333333336, ans=0.125 +2024-07-27 17:59:35,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=45157.333333333336, ans=0.0 +2024-07-27 18:00:46,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=45197.333333333336, ans=0.2 +2024-07-27 18:01:01,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=45210.666666666664, ans=0.125 +2024-07-27 18:01:05,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=45210.666666666664, ans=0.2 +2024-07-27 18:01:12,033 INFO [train.py:1114] (3/4) Epoch 4, batch 3250, loss[loss=0.273, simple_loss=0.3524, pruned_loss=0.09677, over 4928.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3383, pruned_loss=0.09707, over 940474.63 frames. ], batch size: 14, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:01:12,743 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.153e+01 6.665e+01 7.646e+01 9.547e+01 1.516e+02, threshold=1.529e+02, percent-clipped=1.0 +2024-07-27 18:01:14,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=45224.0, ans=0.5 +2024-07-27 18:01:47,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=45277.333333333336, ans=0.125 +2024-07-27 18:01:49,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=45277.333333333336, ans=0.125 +2024-07-27 18:01:50,281 INFO [train.py:1114] (3/4) Epoch 4, batch 3300, loss[loss=0.307, simple_loss=0.3789, pruned_loss=0.1175, over 4803.00 frames. ], tot_loss[loss=0.266, simple_loss=0.3372, pruned_loss=0.09741, over 940841.54 frames. ], batch size: 19, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:01:54,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=45290.666666666664, ans=0.125 +2024-07-27 18:01:55,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45290.666666666664, ans=0.1 +2024-07-27 18:02:03,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=45317.333333333336, ans=0.0 +2024-07-27 18:02:24,059 INFO [train.py:1114] (3/4) Epoch 4, batch 3350, loss[loss=0.2866, simple_loss=0.3642, pruned_loss=0.1045, over 4605.00 frames. ], tot_loss[loss=0.268, simple_loss=0.3395, pruned_loss=0.09824, over 938746.38 frames. ], batch size: 17, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:02:24,712 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.140e+01 6.495e+01 7.490e+01 8.565e+01 1.368e+02, threshold=1.498e+02, percent-clipped=0.0 +2024-07-27 18:02:26,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=45357.333333333336, ans=0.125 +2024-07-27 18:02:30,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=45370.666666666664, ans=0.125 +2024-07-27 18:02:31,182 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.68 vs. limit=15.0 +2024-07-27 18:02:33,798 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.26 vs. limit=22.5 +2024-07-27 18:02:40,045 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.76 vs. limit=15.0 +2024-07-27 18:02:41,362 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.39 vs. limit=15.0 +2024-07-27 18:02:54,929 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.76 vs. limit=22.5 +2024-07-27 18:02:57,870 INFO [train.py:1114] (3/4) Epoch 4, batch 3400, loss[loss=0.2517, simple_loss=0.3125, pruned_loss=0.09544, over 4802.00 frames. ], tot_loss[loss=0.2674, simple_loss=0.3391, pruned_loss=0.09784, over 937431.57 frames. ], batch size: 11, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:03:02,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=45424.0, ans=0.5 +2024-07-27 18:03:21,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.17 vs. limit=15.0 +2024-07-27 18:03:21,704 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.16 vs. limit=22.5 +2024-07-27 18:03:26,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=45464.0, ans=0.125 +2024-07-27 18:03:37,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=45477.333333333336, ans=0.0 +2024-07-27 18:03:38,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45477.333333333336, ans=0.1 +2024-07-27 18:03:39,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=45477.333333333336, ans=0.125 +2024-07-27 18:03:42,185 INFO [train.py:1114] (3/4) Epoch 4, batch 3450, loss[loss=0.2724, simple_loss=0.3399, pruned_loss=0.1024, over 4714.00 frames. ], tot_loss[loss=0.2671, simple_loss=0.3397, pruned_loss=0.09724, over 937421.49 frames. ], batch size: 19, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:03:42,639 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.80 vs. limit=22.5 +2024-07-27 18:03:42,786 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.068e+01 6.545e+01 7.401e+01 8.660e+01 1.564e+02, threshold=1.480e+02, percent-clipped=3.0 +2024-07-27 18:03:44,537 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.57 vs. limit=15.0 +2024-07-27 18:03:46,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=45490.666666666664, ans=0.025 +2024-07-27 18:03:48,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=45504.0, ans=0.125 +2024-07-27 18:04:00,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=45517.333333333336, ans=0.125 +2024-07-27 18:04:01,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=45517.333333333336, ans=0.125 +2024-07-27 18:04:16,188 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.88 vs. limit=22.5 +2024-07-27 18:04:20,088 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.98 vs. limit=15.0 +2024-07-27 18:04:21,763 INFO [train.py:1114] (3/4) Epoch 4, batch 3500, loss[loss=0.2594, simple_loss=0.3208, pruned_loss=0.09904, over 4933.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.337, pruned_loss=0.09593, over 938332.70 frames. ], batch size: 12, lr: 1.82e-02, grad_scale: 32.0 +2024-07-27 18:04:23,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=45557.333333333336, ans=0.07 +2024-07-27 18:04:28,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=45570.666666666664, ans=10.0 +2024-07-27 18:04:34,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=45570.666666666664, ans=0.1 +2024-07-27 18:04:37,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=45584.0, ans=0.125 +2024-07-27 18:04:46,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45597.333333333336, ans=0.1 +2024-07-27 18:04:50,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=45610.666666666664, ans=0.125 +2024-07-27 18:04:53,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=45610.666666666664, ans=0.125 +2024-07-27 18:04:55,589 INFO [train.py:1114] (3/4) Epoch 4, batch 3550, loss[loss=0.2524, simple_loss=0.3375, pruned_loss=0.0837, over 4663.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3355, pruned_loss=0.09505, over 938902.70 frames. ], batch size: 14, lr: 1.81e-02, grad_scale: 32.0 +2024-07-27 18:04:56,223 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.319e+01 6.373e+01 7.017e+01 7.924e+01 1.305e+02, threshold=1.403e+02, percent-clipped=0.0 +2024-07-27 18:05:04,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=45637.333333333336, ans=0.0 +2024-07-27 18:05:06,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45637.333333333336, ans=0.1 +2024-07-27 18:05:07,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=45637.333333333336, ans=0.125 +2024-07-27 18:05:12,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=45650.666666666664, ans=0.125 +2024-07-27 18:05:16,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=45650.666666666664, ans=0.2 +2024-07-27 18:05:22,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=45664.0, ans=0.0 +2024-07-27 18:05:35,442 INFO [train.py:1114] (3/4) Epoch 4, batch 3600, loss[loss=0.2493, simple_loss=0.329, pruned_loss=0.0848, over 4969.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3357, pruned_loss=0.09496, over 940793.12 frames. ], batch size: 13, lr: 1.81e-02, grad_scale: 32.0 +2024-07-27 18:05:38,986 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.95 vs. limit=22.5 +2024-07-27 18:05:43,014 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.09 vs. limit=6.0 +2024-07-27 18:05:49,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=45717.333333333336, ans=0.125 +2024-07-27 18:05:58,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=45730.666666666664, ans=0.125 +2024-07-27 18:06:11,489 INFO [train.py:1114] (3/4) Epoch 4, batch 3650, loss[loss=0.288, simple_loss=0.3732, pruned_loss=0.1014, over 4898.00 frames. ], tot_loss[loss=0.2622, simple_loss=0.3351, pruned_loss=0.09462, over 941232.30 frames. ], batch size: 15, lr: 1.81e-02, grad_scale: 64.0 +2024-07-27 18:06:12,145 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.552e+01 6.653e+01 7.624e+01 9.000e+01 1.438e+02, threshold=1.525e+02, percent-clipped=1.0 +2024-07-27 18:06:12,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=45757.333333333336, ans=0.125 +2024-07-27 18:06:15,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=45757.333333333336, ans=0.1 +2024-07-27 18:06:17,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=45757.333333333336, ans=0.125 +2024-07-27 18:06:23,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=45770.666666666664, ans=0.2 +2024-07-27 18:06:26,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=45784.0, ans=0.125 +2024-07-27 18:06:35,438 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=21.18 vs. limit=22.5 +2024-07-27 18:06:44,754 INFO [train.py:1114] (3/4) Epoch 4, batch 3700, loss[loss=0.2598, simple_loss=0.3323, pruned_loss=0.09361, over 4920.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3354, pruned_loss=0.09403, over 941864.99 frames. ], batch size: 14, lr: 1.81e-02, grad_scale: 64.0 +2024-07-27 18:06:45,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=45824.0, ans=0.0 +2024-07-27 18:06:48,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=45824.0, ans=0.035 +2024-07-27 18:06:52,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=45837.333333333336, ans=0.0009049275362318834 +2024-07-27 18:07:01,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=45850.666666666664, ans=0.125 +2024-07-27 18:07:21,640 INFO [train.py:1114] (3/4) Epoch 4, batch 3750, loss[loss=0.2721, simple_loss=0.3338, pruned_loss=0.1052, over 4816.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3358, pruned_loss=0.09452, over 943416.15 frames. ], batch size: 11, lr: 1.81e-02, grad_scale: 64.0 +2024-07-27 18:07:22,322 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.296e+01 6.507e+01 7.242e+01 8.300e+01 1.182e+02, threshold=1.448e+02, percent-clipped=0.0 +2024-07-27 18:07:25,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=45890.666666666664, ans=0.0008933333333333345 +2024-07-27 18:07:25,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=45890.666666666664, ans=0.125 +2024-07-27 18:07:31,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=45904.0, ans=0.125 +2024-07-27 18:07:39,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=45917.333333333336, ans=0.0 +2024-07-27 18:07:41,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=45930.666666666664, ans=0.1 +2024-07-27 18:07:43,105 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.07 vs. limit=12.0 +2024-07-27 18:07:44,432 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.46 vs. limit=12.0 +2024-07-27 18:07:47,101 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.04 vs. limit=15.0 +2024-07-27 18:07:52,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=45944.0, ans=0.125 +2024-07-27 18:07:54,669 INFO [train.py:1114] (3/4) Epoch 4, batch 3800, loss[loss=0.2507, simple_loss=0.3379, pruned_loss=0.08175, over 4809.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3359, pruned_loss=0.0949, over 941085.36 frames. ], batch size: 14, lr: 1.81e-02, grad_scale: 64.0 +2024-07-27 18:08:02,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=45970.666666666664, ans=0.2 +2024-07-27 18:08:19,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=45997.333333333336, ans=0.125 +2024-07-27 18:08:25,562 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.72 vs. limit=10.0 +2024-07-27 18:08:26,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=46010.666666666664, ans=0.1 +2024-07-27 18:08:28,685 INFO [train.py:1114] (3/4) Epoch 4, batch 3850, loss[loss=0.2564, simple_loss=0.3432, pruned_loss=0.08483, over 4645.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3353, pruned_loss=0.0939, over 941899.12 frames. ], batch size: 16, lr: 1.81e-02, grad_scale: 32.0 +2024-07-27 18:08:29,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=46024.0, ans=0.125 +2024-07-27 18:08:30,036 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.355e+01 6.600e+01 7.617e+01 8.935e+01 1.540e+02, threshold=1.523e+02, percent-clipped=1.0 +2024-07-27 18:08:36,671 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.67 vs. limit=6.0 +2024-07-27 18:08:59,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46077.333333333336, ans=0.1 +2024-07-27 18:09:02,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=46077.333333333336, ans=0.0008527536231884049 +2024-07-27 18:09:03,329 INFO [train.py:1114] (3/4) Epoch 4, batch 3900, loss[loss=0.237, simple_loss=0.3371, pruned_loss=0.06845, over 4821.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3357, pruned_loss=0.09409, over 942194.17 frames. ], batch size: 14, lr: 1.81e-02, grad_scale: 32.0 +2024-07-27 18:09:11,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=46104.0, ans=0.0008469565217391305 +2024-07-27 18:09:12,752 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.22 vs. limit=15.0 +2024-07-27 18:09:13,268 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:09:13,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=46104.0, ans=0.0 +2024-07-27 18:09:13,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=46104.0, ans=0.07 +2024-07-27 18:09:16,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=46117.333333333336, ans=0.0 +2024-07-27 18:09:25,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=46130.666666666664, ans=0.125 +2024-07-27 18:09:26,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=46130.666666666664, ans=0.125 +2024-07-27 18:09:26,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=46130.666666666664, ans=0.125 +2024-07-27 18:09:33,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=46144.0, ans=0.2 +2024-07-27 18:09:41,733 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.98 vs. limit=22.5 +2024-07-27 18:09:42,262 INFO [train.py:1114] (3/4) Epoch 4, batch 3950, loss[loss=0.3237, simple_loss=0.3799, pruned_loss=0.1337, over 4829.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3356, pruned_loss=0.09406, over 944181.81 frames. ], batch size: 16, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:09:44,107 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.311e+01 6.796e+01 7.722e+01 1.006e+02 1.504e+02, threshold=1.544e+02, percent-clipped=0.0 +2024-07-27 18:10:00,010 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.06 vs. limit=22.5 +2024-07-27 18:10:16,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=46197.333333333336, ans=0.125 +2024-07-27 18:10:21,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=46197.333333333336, ans=0.125 +2024-07-27 18:10:22,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=46197.333333333336, ans=0.125 +2024-07-27 18:10:23,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=46210.666666666664, ans=0.0 +2024-07-27 18:10:25,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=46210.666666666664, ans=0.5 +2024-07-27 18:10:30,482 INFO [train.py:1114] (3/4) Epoch 4, batch 4000, loss[loss=0.2207, simple_loss=0.3088, pruned_loss=0.06629, over 4773.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3365, pruned_loss=0.09512, over 941054.89 frames. ], batch size: 12, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:10:33,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=46224.0, ans=0.125 +2024-07-27 18:10:38,918 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.46 vs. limit=10.0 +2024-07-27 18:10:39,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=46237.333333333336, ans=0.0 +2024-07-27 18:10:41,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=46237.333333333336, ans=0.125 +2024-07-27 18:10:54,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=46264.0, ans=0.125 +2024-07-27 18:10:55,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46264.0, ans=0.1 +2024-07-27 18:11:01,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=46277.333333333336, ans=0.025 +2024-07-27 18:11:02,166 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.74 vs. limit=6.0 +2024-07-27 18:11:06,049 INFO [train.py:1114] (3/4) Epoch 4, batch 4050, loss[loss=0.3229, simple_loss=0.377, pruned_loss=0.1344, over 3490.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3355, pruned_loss=0.09481, over 939198.59 frames. ], batch size: 36, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:11:07,318 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.344e+01 6.516e+01 7.339e+01 8.508e+01 1.190e+02, threshold=1.468e+02, percent-clipped=0.0 +2024-07-27 18:11:08,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=46290.666666666664, ans=0.125 +2024-07-27 18:11:26,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=46317.333333333336, ans=0.0 +2024-07-27 18:11:30,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=46317.333333333336, ans=0.125 +2024-07-27 18:11:41,737 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.92 vs. limit=12.0 +2024-07-27 18:11:47,337 INFO [train.py:1114] (3/4) Epoch 4, batch 4100, loss[loss=0.3214, simple_loss=0.3841, pruned_loss=0.1294, over 4896.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3347, pruned_loss=0.0942, over 938391.56 frames. ], batch size: 15, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:11:48,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=46357.333333333336, ans=0.1 +2024-07-27 18:12:01,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=46384.0, ans=0.125 +2024-07-27 18:12:01,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=46384.0, ans=0.1 +2024-07-27 18:12:04,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=46384.0, ans=0.125 +2024-07-27 18:12:05,883 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-27 18:12:11,514 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=16.38 vs. limit=15.0 +2024-07-27 18:12:19,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=46410.666666666664, ans=0.0007802898550724642 +2024-07-27 18:12:19,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=46410.666666666664, ans=0.125 +2024-07-27 18:12:21,677 INFO [train.py:1114] (3/4) Epoch 4, batch 4150, loss[loss=0.2509, simple_loss=0.3261, pruned_loss=0.08788, over 4828.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3349, pruned_loss=0.09414, over 937733.83 frames. ], batch size: 13, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:12:22,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=46424.0, ans=0.125 +2024-07-27 18:12:22,994 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.177e+01 6.950e+01 8.086e+01 1.014e+02 1.411e+02, threshold=1.617e+02, percent-clipped=0.0 +2024-07-27 18:12:50,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=46464.0, ans=0.125 +2024-07-27 18:12:56,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=46477.333333333336, ans=15.0 +2024-07-27 18:13:01,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=46477.333333333336, ans=0.0007657971014492747 +2024-07-27 18:13:03,902 INFO [train.py:1114] (3/4) Epoch 4, batch 4200, loss[loss=0.2625, simple_loss=0.3471, pruned_loss=0.08894, over 4903.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3352, pruned_loss=0.09448, over 939579.89 frames. ], batch size: 15, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:13:04,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46490.666666666664, ans=0.1 +2024-07-27 18:13:05,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=46490.666666666664, ans=0.0007628985507246392 +2024-07-27 18:13:06,458 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.22 vs. limit=6.0 +2024-07-27 18:13:16,191 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.23 vs. limit=22.5 +2024-07-27 18:13:19,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=46517.333333333336, ans=0.125 +2024-07-27 18:13:22,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=46517.333333333336, ans=0.125 +2024-07-27 18:13:24,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=46517.333333333336, ans=0.125 +2024-07-27 18:13:25,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=46530.666666666664, ans=0.1 +2024-07-27 18:13:34,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=46544.0, ans=0.125 +2024-07-27 18:13:35,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=46544.0, ans=0.125 +2024-07-27 18:13:39,601 INFO [train.py:1114] (3/4) Epoch 4, batch 4250, loss[loss=0.2239, simple_loss=0.2992, pruned_loss=0.07434, over 4628.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3366, pruned_loss=0.09515, over 940954.05 frames. ], batch size: 12, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:13:40,929 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.134e+01 6.597e+01 7.169e+01 7.931e+01 1.247e+02, threshold=1.434e+02, percent-clipped=0.0 +2024-07-27 18:13:56,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=46570.666666666664, ans=0.0 +2024-07-27 18:14:05,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=46584.0, ans=0.0 +2024-07-27 18:14:15,414 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.08 vs. limit=15.0 +2024-07-27 18:14:17,704 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:14:24,134 INFO [train.py:1114] (3/4) Epoch 4, batch 4300, loss[loss=0.254, simple_loss=0.3226, pruned_loss=0.09272, over 4756.00 frames. ], tot_loss[loss=0.2633, simple_loss=0.3363, pruned_loss=0.09515, over 940488.32 frames. ], batch size: 13, lr: 1.80e-02, grad_scale: 32.0 +2024-07-27 18:14:28,423 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.05 vs. limit=6.0 +2024-07-27 18:14:31,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=46637.333333333336, ans=0.0007310144927536229 +2024-07-27 18:14:39,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=46650.666666666664, ans=0.05 +2024-07-27 18:14:53,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=46677.333333333336, ans=0.0007223188405797096 +2024-07-27 18:14:53,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=46677.333333333336, ans=0.1 +2024-07-27 18:14:56,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=46690.666666666664, ans=0.0007194202898550741 +2024-07-27 18:14:57,359 INFO [train.py:1114] (3/4) Epoch 4, batch 4350, loss[loss=0.2637, simple_loss=0.345, pruned_loss=0.09122, over 4748.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3372, pruned_loss=0.09517, over 941283.36 frames. ], batch size: 13, lr: 1.79e-02, grad_scale: 32.0 +2024-07-27 18:14:58,631 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.449e+01 6.647e+01 7.749e+01 8.957e+01 1.514e+02, threshold=1.550e+02, percent-clipped=2.0 +2024-07-27 18:15:01,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=46690.666666666664, ans=0.125 +2024-07-27 18:15:13,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=46717.333333333336, ans=0.125 +2024-07-27 18:15:24,482 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.79 vs. limit=15.0 +2024-07-27 18:15:29,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=46744.0, ans=0.0007078260869565218 +2024-07-27 18:15:29,842 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.85 vs. limit=22.5 +2024-07-27 18:15:30,907 INFO [train.py:1114] (3/4) Epoch 4, batch 4400, loss[loss=0.2637, simple_loss=0.3428, pruned_loss=0.09228, over 4802.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.3371, pruned_loss=0.0953, over 940812.48 frames. ], batch size: 14, lr: 1.79e-02, grad_scale: 32.0 +2024-07-27 18:15:31,105 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=8.324e+00 +2024-07-27 18:15:38,714 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.14 vs. limit=22.5 +2024-07-27 18:15:40,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=46770.666666666664, ans=0.0 +2024-07-27 18:15:42,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=46770.666666666664, ans=0.125 +2024-07-27 18:15:56,805 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.58 vs. limit=22.5 +2024-07-27 18:16:04,522 INFO [train.py:1114] (3/4) Epoch 4, batch 4450, loss[loss=0.2147, simple_loss=0.2846, pruned_loss=0.07241, over 4939.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3375, pruned_loss=0.09567, over 938987.69 frames. ], batch size: 12, lr: 1.79e-02, grad_scale: 32.0 +2024-07-27 18:16:05,841 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.136e+01 6.574e+01 7.932e+01 1.004e+02 1.651e+02, threshold=1.586e+02, percent-clipped=3.0 +2024-07-27 18:16:11,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=46837.333333333336, ans=0.2 +2024-07-27 18:16:32,974 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.59 vs. limit=15.0 +2024-07-27 18:16:33,254 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:16:34,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=46877.333333333336, ans=0.125 +2024-07-27 18:16:37,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=46877.333333333336, ans=0.125 +2024-07-27 18:16:38,379 INFO [train.py:1114] (3/4) Epoch 4, batch 4500, loss[loss=0.2634, simple_loss=0.3324, pruned_loss=0.09721, over 4743.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.3379, pruned_loss=0.0954, over 938195.41 frames. ], batch size: 14, lr: 1.79e-02, grad_scale: 32.0 +2024-07-27 18:16:48,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=46904.0, ans=0.00067304347826087 +2024-07-27 18:16:58,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=46917.333333333336, ans=0.125 +2024-07-27 18:16:58,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=46917.333333333336, ans=0.025 +2024-07-27 18:17:15,576 INFO [train.py:1114] (3/4) Epoch 4, batch 4550, loss[loss=0.256, simple_loss=0.3358, pruned_loss=0.08809, over 4906.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3377, pruned_loss=0.09549, over 940106.91 frames. ], batch size: 13, lr: 1.79e-02, grad_scale: 16.0 +2024-07-27 18:17:17,510 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.369e+01 6.640e+01 7.268e+01 8.274e+01 1.292e+02, threshold=1.454e+02, percent-clipped=0.0 +2024-07-27 18:17:18,941 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:17:19,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=46957.333333333336, ans=0.0006614492753623177 +2024-07-27 18:17:22,498 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.28 vs. limit=10.0 +2024-07-27 18:17:32,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=46984.0, ans=0.0006556521739130433 +2024-07-27 18:17:47,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=47010.666666666664, ans=0.0 +2024-07-27 18:17:49,273 INFO [train.py:1114] (3/4) Epoch 4, batch 4600, loss[loss=0.2959, simple_loss=0.3775, pruned_loss=0.1072, over 4609.00 frames. ], tot_loss[loss=0.2642, simple_loss=0.3375, pruned_loss=0.09544, over 938171.31 frames. ], batch size: 21, lr: 1.79e-02, grad_scale: 16.0 +2024-07-27 18:17:50,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=47024.0, ans=0.05 +2024-07-27 18:17:54,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47024.0, ans=0.1 +2024-07-27 18:17:57,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=47037.333333333336, ans=0.125 +2024-07-27 18:18:03,618 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=18.88 vs. limit=15.0 +2024-07-27 18:18:11,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=47064.0, ans=0.0 +2024-07-27 18:18:22,635 INFO [train.py:1114] (3/4) Epoch 4, batch 4650, loss[loss=0.2242, simple_loss=0.3088, pruned_loss=0.06977, over 4833.00 frames. ], tot_loss[loss=0.2644, simple_loss=0.3378, pruned_loss=0.09551, over 939824.01 frames. ], batch size: 16, lr: 1.79e-02, grad_scale: 8.0 +2024-07-27 18:18:25,329 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.210e+01 6.570e+01 7.431e+01 9.301e+01 1.835e+02, threshold=1.486e+02, percent-clipped=1.0 +2024-07-27 18:18:39,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=47104.0, ans=15.0 +2024-07-27 18:18:40,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=47104.0, ans=0.2 +2024-07-27 18:18:53,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=47117.333333333336, ans=0.1 +2024-07-27 18:19:04,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=47144.0, ans=0.125 +2024-07-27 18:19:05,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=47144.0, ans=0.0006208695652173916 +2024-07-27 18:19:10,950 INFO [train.py:1114] (3/4) Epoch 4, batch 4700, loss[loss=0.2288, simple_loss=0.3067, pruned_loss=0.07548, over 4707.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3359, pruned_loss=0.09454, over 937248.66 frames. ], batch size: 11, lr: 1.79e-02, grad_scale: 8.0 +2024-07-27 18:19:11,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=47157.333333333336, ans=0.125 +2024-07-27 18:19:14,049 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.52 vs. limit=6.0 +2024-07-27 18:19:16,100 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-07-27 18:19:24,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=47184.0, ans=0.125 +2024-07-27 18:19:31,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=47197.333333333336, ans=0.2 +2024-07-27 18:19:35,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.87 vs. limit=6.0 +2024-07-27 18:19:35,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=47197.333333333336, ans=0.5 +2024-07-27 18:19:42,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.93 vs. limit=15.0 +2024-07-27 18:19:43,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=47210.666666666664, ans=0.1 +2024-07-27 18:19:44,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=47224.0, ans=0.125 +2024-07-27 18:19:44,640 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.72 vs. limit=15.0 +2024-07-27 18:19:45,066 INFO [train.py:1114] (3/4) Epoch 4, batch 4750, loss[loss=0.2872, simple_loss=0.3626, pruned_loss=0.1059, over 4537.00 frames. ], tot_loss[loss=0.2624, simple_loss=0.3355, pruned_loss=0.09467, over 935625.03 frames. ], batch size: 21, lr: 1.78e-02, grad_scale: 8.0 +2024-07-27 18:19:45,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=47224.0, ans=0.125 +2024-07-27 18:19:47,743 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.323e+01 6.439e+01 7.166e+01 9.768e+01 1.474e+02, threshold=1.433e+02, percent-clipped=0.0 +2024-07-27 18:19:52,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=47237.333333333336, ans=0.0006005797101449259 +2024-07-27 18:19:53,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=47237.333333333336, ans=0.125 +2024-07-27 18:19:55,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=47237.333333333336, ans=0.125 +2024-07-27 18:20:01,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=47250.666666666664, ans=0.0005976811594202904 +2024-07-27 18:20:02,651 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-07-27 18:20:10,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=47264.0, ans=0.09899494936611666 +2024-07-27 18:20:14,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.76 vs. limit=15.0 +2024-07-27 18:20:19,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=47290.666666666664, ans=0.000588985507246377 +2024-07-27 18:20:19,554 INFO [train.py:1114] (3/4) Epoch 4, batch 4800, loss[loss=0.2606, simple_loss=0.3365, pruned_loss=0.09233, over 4694.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.3352, pruned_loss=0.09471, over 933598.25 frames. ], batch size: 13, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:20:21,301 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.81 vs. limit=22.5 +2024-07-27 18:20:28,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=47304.0, ans=0.025 +2024-07-27 18:20:35,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=47317.333333333336, ans=0.04949747468305833 +2024-07-27 18:20:36,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=47317.333333333336, ans=0.2 +2024-07-27 18:20:45,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=47330.666666666664, ans=0.0 +2024-07-27 18:20:46,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=47330.666666666664, ans=0.125 +2024-07-27 18:20:50,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.38 vs. limit=22.5 +2024-07-27 18:20:55,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=47344.0, ans=0.0 +2024-07-27 18:21:03,392 INFO [train.py:1114] (3/4) Epoch 4, batch 4850, loss[loss=0.2846, simple_loss=0.363, pruned_loss=0.1031, over 4749.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.3357, pruned_loss=0.09483, over 933071.46 frames. ], batch size: 14, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:21:06,074 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.193e+01 6.442e+01 7.162e+01 7.877e+01 1.649e+02, threshold=1.432e+02, percent-clipped=2.0 +2024-07-27 18:21:06,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=47357.333333333336, ans=0.05 +2024-07-27 18:21:17,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=47384.0, ans=0.125 +2024-07-27 18:21:25,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=47397.333333333336, ans=0.1 +2024-07-27 18:21:31,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=47410.666666666664, ans=0.2 +2024-07-27 18:21:32,120 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.62 vs. limit=15.0 +2024-07-27 18:21:37,087 INFO [train.py:1114] (3/4) Epoch 4, batch 4900, loss[loss=0.2616, simple_loss=0.3321, pruned_loss=0.09558, over 4758.00 frames. ], tot_loss[loss=0.2617, simple_loss=0.3352, pruned_loss=0.09413, over 934785.79 frames. ], batch size: 13, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:21:44,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=47437.333333333336, ans=0.125 +2024-07-27 18:21:51,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=47450.666666666664, ans=0.125 +2024-07-27 18:22:03,079 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.15 vs. limit=10.0 +2024-07-27 18:22:06,234 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.44 vs. limit=15.0 +2024-07-27 18:22:11,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=47477.333333333336, ans=0.0 +2024-07-27 18:22:14,197 INFO [train.py:1114] (3/4) Epoch 4, batch 4950, loss[loss=0.3228, simple_loss=0.3919, pruned_loss=0.1268, over 3471.00 frames. ], tot_loss[loss=0.2637, simple_loss=0.3368, pruned_loss=0.09535, over 931420.87 frames. ], batch size: 35, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:22:16,768 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.778e+01 6.647e+01 7.619e+01 9.936e+01 1.671e+02, threshold=1.524e+02, percent-clipped=3.0 +2024-07-27 18:22:26,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=47517.333333333336, ans=0.0 +2024-07-27 18:22:31,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=47517.333333333336, ans=0.2 +2024-07-27 18:22:34,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=47530.666666666664, ans=0.125 +2024-07-27 18:22:44,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=47544.0, ans=0.125 +2024-07-27 18:22:44,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=47544.0, ans=0.125 +2024-07-27 18:22:52,599 INFO [train.py:1114] (3/4) Epoch 4, batch 5000, loss[loss=0.2876, simple_loss=0.3628, pruned_loss=0.1062, over 4669.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3359, pruned_loss=0.095, over 935233.58 frames. ], batch size: 14, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:22:58,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=47557.333333333336, ans=0.125 +2024-07-27 18:23:02,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=47570.666666666664, ans=0.1 +2024-07-27 18:23:09,204 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.65 vs. limit=15.0 +2024-07-27 18:23:19,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=47610.666666666664, ans=0.125 +2024-07-27 18:23:26,403 INFO [train.py:1114] (3/4) Epoch 4, batch 5050, loss[loss=0.2387, simple_loss=0.3156, pruned_loss=0.08085, over 4852.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3356, pruned_loss=0.09428, over 937656.13 frames. ], batch size: 12, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:23:29,105 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.322e+01 6.671e+01 7.390e+01 9.030e+01 1.584e+02, threshold=1.478e+02, percent-clipped=1.0 +2024-07-27 18:23:37,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=47637.333333333336, ans=0.0 +2024-07-27 18:23:41,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=47650.666666666664, ans=0.125 +2024-07-27 18:23:42,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=47650.666666666664, ans=0.5 +2024-07-27 18:23:56,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=47677.333333333336, ans=0.125 +2024-07-27 18:23:57,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=47677.333333333336, ans=0.1 +2024-07-27 18:24:01,843 INFO [train.py:1114] (3/4) Epoch 4, batch 5100, loss[loss=0.2762, simple_loss=0.355, pruned_loss=0.09876, over 4773.00 frames. ], tot_loss[loss=0.263, simple_loss=0.3363, pruned_loss=0.09487, over 934849.51 frames. ], batch size: 12, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:24:04,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=47690.666666666664, ans=0.1 +2024-07-27 18:24:08,572 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.56 vs. limit=10.0 +2024-07-27 18:24:37,616 INFO [train.py:1114] (3/4) Epoch 4, batch 5150, loss[loss=0.274, simple_loss=0.3441, pruned_loss=0.102, over 4828.00 frames. ], tot_loss[loss=0.2655, simple_loss=0.3379, pruned_loss=0.09651, over 936039.08 frames. ], batch size: 16, lr: 1.78e-02, grad_scale: 16.0 +2024-07-27 18:24:40,254 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.251e+01 6.747e+01 7.591e+01 8.914e+01 1.388e+02, threshold=1.518e+02, percent-clipped=0.0 +2024-07-27 18:24:47,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=47770.666666666664, ans=0.125 +2024-07-27 18:24:50,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=47770.666666666664, ans=0.0004846376811594201 +2024-07-27 18:24:59,120 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.80 vs. limit=22.5 +2024-07-27 18:25:08,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=47810.666666666664, ans=0.05 +2024-07-27 18:25:09,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=47810.666666666664, ans=0.2 +2024-07-27 18:25:09,631 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.85 vs. limit=22.5 +2024-07-27 18:25:13,268 INFO [train.py:1114] (3/4) Epoch 4, batch 5200, loss[loss=0.2966, simple_loss=0.3684, pruned_loss=0.1124, over 4669.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3377, pruned_loss=0.09526, over 935861.73 frames. ], batch size: 14, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:25:24,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=47837.333333333336, ans=0.2 +2024-07-27 18:25:31,983 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.59 vs. limit=22.5 +2024-07-27 18:25:47,789 INFO [train.py:1114] (3/4) Epoch 4, batch 5250, loss[loss=0.3024, simple_loss=0.3861, pruned_loss=0.1094, over 4888.00 frames. ], tot_loss[loss=0.262, simple_loss=0.3357, pruned_loss=0.09411, over 935935.33 frames. ], batch size: 13, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:25:48,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=47890.666666666664, ans=0.1 +2024-07-27 18:25:50,443 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.322e+01 6.549e+01 7.419e+01 9.087e+01 1.892e+02, threshold=1.484e+02, percent-clipped=1.0 +2024-07-27 18:26:15,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=47944.0, ans=0.125 +2024-07-27 18:26:15,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=47944.0, ans=0.00044695652173913115 +2024-07-27 18:26:19,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=47944.0, ans=0.0 +2024-07-27 18:26:21,664 INFO [train.py:1114] (3/4) Epoch 4, batch 5300, loss[loss=0.2576, simple_loss=0.3363, pruned_loss=0.08943, over 4600.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3341, pruned_loss=0.09304, over 934875.97 frames. ], batch size: 16, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:26:23,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=47957.333333333336, ans=0.0 +2024-07-27 18:26:25,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=47957.333333333336, ans=0.1 +2024-07-27 18:26:27,117 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.59 vs. limit=10.0 +2024-07-27 18:26:29,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=47970.666666666664, ans=0.125 +2024-07-27 18:26:53,270 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:26:53,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=48010.666666666664, ans=0.125 +2024-07-27 18:26:57,393 INFO [train.py:1114] (3/4) Epoch 4, batch 5350, loss[loss=0.1994, simple_loss=0.2799, pruned_loss=0.05942, over 4521.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.335, pruned_loss=0.09304, over 936740.64 frames. ], batch size: 10, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:27:00,010 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.313e+01 6.419e+01 7.171e+01 7.752e+01 1.208e+02, threshold=1.434e+02, percent-clipped=0.0 +2024-07-27 18:27:02,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=48024.0, ans=0.0 +2024-07-27 18:27:07,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=48037.333333333336, ans=0.125 +2024-07-27 18:27:10,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=48050.666666666664, ans=0.125 +2024-07-27 18:27:21,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48064.0, ans=0.1 +2024-07-27 18:27:24,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=48077.333333333336, ans=0.025 +2024-07-27 18:27:24,905 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.99 vs. limit=15.0 +2024-07-27 18:27:26,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=48077.333333333336, ans=0.1 +2024-07-27 18:27:27,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=48077.333333333336, ans=0.1 +2024-07-27 18:27:29,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=48077.333333333336, ans=0.0 +2024-07-27 18:27:30,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=48077.333333333336, ans=0.125 +2024-07-27 18:27:30,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=48090.666666666664, ans=0.125 +2024-07-27 18:27:31,382 INFO [train.py:1114] (3/4) Epoch 4, batch 5400, loss[loss=0.3188, simple_loss=0.3673, pruned_loss=0.1351, over 4201.00 frames. ], tot_loss[loss=0.2637, simple_loss=0.337, pruned_loss=0.09519, over 930906.33 frames. ], batch size: 25, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:27:44,380 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.88 vs. limit=15.0 +2024-07-27 18:27:52,056 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.47 vs. limit=22.5 +2024-07-27 18:28:03,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=48144.0, ans=0.125 +2024-07-27 18:28:05,837 INFO [train.py:1114] (3/4) Epoch 4, batch 5450, loss[loss=0.2182, simple_loss=0.2977, pruned_loss=0.06939, over 4709.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.336, pruned_loss=0.09469, over 933793.11 frames. ], batch size: 11, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:28:14,666 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.319e+01 6.320e+01 7.105e+01 8.639e+01 1.249e+02, threshold=1.421e+02, percent-clipped=0.0 +2024-07-27 18:28:22,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=48170.666666666664, ans=0.125 +2024-07-27 18:28:28,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=48184.0, ans=0.05 +2024-07-27 18:28:33,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=48184.0, ans=0.2 +2024-07-27 18:28:45,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=48210.666666666664, ans=0.125 +2024-07-27 18:28:50,525 INFO [train.py:1114] (3/4) Epoch 4, batch 5500, loss[loss=0.2512, simple_loss=0.3257, pruned_loss=0.08833, over 4221.00 frames. ], tot_loss[loss=0.2635, simple_loss=0.3363, pruned_loss=0.09539, over 930735.76 frames. ], batch size: 25, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:28:58,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=48237.333333333336, ans=0.2 +2024-07-27 18:29:02,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=48237.333333333336, ans=0.125 +2024-07-27 18:29:23,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48290.666666666664, ans=0.1 +2024-07-27 18:29:24,391 INFO [train.py:1114] (3/4) Epoch 4, batch 5550, loss[loss=0.2115, simple_loss=0.2896, pruned_loss=0.06668, over 4707.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3355, pruned_loss=0.09511, over 933007.90 frames. ], batch size: 12, lr: 1.77e-02, grad_scale: 32.0 +2024-07-27 18:29:27,165 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.873e+01 6.976e+01 8.822e+01 1.148e+02 2.032e+02, threshold=1.764e+02, percent-clipped=8.0 +2024-07-27 18:29:32,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=48304.0, ans=0.07 +2024-07-27 18:29:40,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=48317.333333333336, ans=0.0 +2024-07-27 18:29:45,650 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:30:02,099 INFO [train.py:1114] (3/4) Epoch 4, batch 5600, loss[loss=0.2534, simple_loss=0.3406, pruned_loss=0.0831, over 4742.00 frames. ], tot_loss[loss=0.2636, simple_loss=0.3364, pruned_loss=0.09538, over 934155.91 frames. ], batch size: 14, lr: 1.76e-02, grad_scale: 32.0 +2024-07-27 18:30:04,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=48357.333333333336, ans=0.2 +2024-07-27 18:30:08,487 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.91 vs. limit=22.5 +2024-07-27 18:30:14,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=48370.666666666664, ans=0.125 +2024-07-27 18:30:15,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=48384.0, ans=0.125 +2024-07-27 18:30:25,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=48397.333333333336, ans=0.5 +2024-07-27 18:30:27,926 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=15.08 vs. limit=15.0 +2024-07-27 18:30:38,210 INFO [train.py:1114] (3/4) Epoch 4, batch 5650, loss[loss=0.2869, simple_loss=0.3662, pruned_loss=0.1038, over 4467.00 frames. ], tot_loss[loss=0.2627, simple_loss=0.3354, pruned_loss=0.095, over 936800.23 frames. ], batch size: 21, lr: 1.76e-02, grad_scale: 32.0 +2024-07-27 18:30:41,017 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.980e+01 6.257e+01 6.942e+01 8.186e+01 1.408e+02, threshold=1.388e+02, percent-clipped=0.0 +2024-07-27 18:30:43,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=48424.0, ans=0.0 +2024-07-27 18:30:47,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48437.333333333336, ans=0.1 +2024-07-27 18:30:50,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=48437.333333333336, ans=0.1 +2024-07-27 18:30:51,460 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.90 vs. limit=6.0 +2024-07-27 18:31:00,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=48464.0, ans=0.00033391304347826084 +2024-07-27 18:31:09,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=48477.333333333336, ans=0.1 +2024-07-27 18:31:10,045 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.13 vs. limit=12.0 +2024-07-27 18:31:11,758 INFO [train.py:1114] (3/4) Epoch 4, batch 5700, loss[loss=0.2354, simple_loss=0.3272, pruned_loss=0.07179, over 4702.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.3351, pruned_loss=0.09481, over 938313.77 frames. ], batch size: 13, lr: 1.76e-02, grad_scale: 32.0 +2024-07-27 18:31:16,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=48490.666666666664, ans=0.125 +2024-07-27 18:31:18,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=48504.0, ans=0.125 +2024-07-27 18:31:21,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=48504.0, ans=0.0 +2024-07-27 18:31:24,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=48504.0, ans=0.125 +2024-07-27 18:31:35,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=48530.666666666664, ans=0.00031942028985507305 +2024-07-27 18:31:37,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=48530.666666666664, ans=0.025 +2024-07-27 18:31:39,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=48544.0, ans=0.125 +2024-07-27 18:31:45,703 INFO [train.py:1114] (3/4) Epoch 4, batch 5750, loss[loss=0.2396, simple_loss=0.3169, pruned_loss=0.08111, over 4750.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3351, pruned_loss=0.09458, over 938492.09 frames. ], batch size: 19, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:31:51,403 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.193e+01 6.612e+01 7.726e+01 1.001e+02 1.887e+02, threshold=1.545e+02, percent-clipped=6.0 +2024-07-27 18:31:55,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=48570.666666666664, ans=0.0 +2024-07-27 18:32:05,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=48584.0, ans=0.04949747468305833 +2024-07-27 18:32:22,608 INFO [train.py:1114] (3/4) Epoch 4, batch 5800, loss[loss=0.2895, simple_loss=0.3623, pruned_loss=0.1084, over 4749.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3362, pruned_loss=0.09533, over 937251.00 frames. ], batch size: 19, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:32:22,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=48624.0, ans=0.5 +2024-07-27 18:32:31,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=48637.333333333336, ans=0.0 +2024-07-27 18:32:32,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=48637.333333333336, ans=0.025 +2024-07-27 18:32:36,000 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.82 vs. limit=15.0 +2024-07-27 18:32:40,597 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.44 vs. limit=15.0 +2024-07-27 18:32:45,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.whiten.whitening_limit, batch_count=48664.0, ans=15.0 +2024-07-27 18:32:56,503 INFO [train.py:1114] (3/4) Epoch 4, batch 5850, loss[loss=0.2244, simple_loss=0.3122, pruned_loss=0.06831, over 4430.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.3361, pruned_loss=0.09577, over 937561.63 frames. ], batch size: 21, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:32:59,843 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.208e+01 6.444e+01 7.225e+01 8.494e+01 1.330e+02, threshold=1.445e+02, percent-clipped=0.0 +2024-07-27 18:33:05,034 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.86 vs. limit=22.5 +2024-07-27 18:33:06,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=48704.0, ans=0.125 +2024-07-27 18:33:15,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=48717.333333333336, ans=0.025 +2024-07-27 18:33:19,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=48717.333333333336, ans=0.00027884057971014516 +2024-07-27 18:33:25,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_na.min_abs, batch_count=48730.666666666664, ans=0.02 +2024-07-27 18:33:34,385 INFO [train.py:1114] (3/4) Epoch 4, batch 5900, loss[loss=0.2493, simple_loss=0.3345, pruned_loss=0.08201, over 4698.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3354, pruned_loss=0.0951, over 937901.70 frames. ], batch size: 15, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:33:43,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=48770.666666666664, ans=0.1 +2024-07-27 18:33:55,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=48784.0, ans=0.125 +2024-07-27 18:34:06,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=48810.666666666664, ans=0.0 +2024-07-27 18:34:09,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=48810.666666666664, ans=0.125 +2024-07-27 18:34:13,800 INFO [train.py:1114] (3/4) Epoch 4, batch 5950, loss[loss=0.2763, simple_loss=0.3546, pruned_loss=0.099, over 4684.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.334, pruned_loss=0.09378, over 939851.66 frames. ], batch size: 15, lr: 1.76e-02, grad_scale: 16.0 +2024-07-27 18:34:15,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=48824.0, ans=0.125 +2024-07-27 18:34:16,229 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-07-27 18:34:17,270 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.376e+01 6.577e+01 7.476e+01 8.958e+01 1.675e+02, threshold=1.495e+02, percent-clipped=2.0 +2024-07-27 18:34:18,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=48824.0, ans=0.07 +2024-07-27 18:34:23,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=48837.333333333336, ans=0.2 +2024-07-27 18:34:26,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=48837.333333333336, ans=0.0 +2024-07-27 18:34:31,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=48850.666666666664, ans=0.125 +2024-07-27 18:34:34,147 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.61 vs. limit=22.5 +2024-07-27 18:34:36,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=48864.0, ans=0.125 +2024-07-27 18:34:38,427 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.02 vs. limit=6.0 +2024-07-27 18:34:40,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=48877.333333333336, ans=0.125 +2024-07-27 18:34:43,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=48877.333333333336, ans=0.125 +2024-07-27 18:34:46,130 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.97 vs. limit=10.0 +2024-07-27 18:34:47,628 INFO [train.py:1114] (3/4) Epoch 4, batch 6000, loss[loss=0.2998, simple_loss=0.3595, pruned_loss=0.12, over 4083.00 frames. ], tot_loss[loss=0.26, simple_loss=0.333, pruned_loss=0.09346, over 936752.31 frames. ], batch size: 25, lr: 1.76e-02, grad_scale: 32.0 +2024-07-27 18:34:47,628 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 18:34:54,090 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([3.8844, 2.5049, 3.3779, 3.8551, 3.7752, 3.2825, 3.4838, 2.7894], + device='cuda:3') +2024-07-27 18:35:03,513 INFO [train.py:1146] (3/4) Epoch 4, validation: loss=0.2107, simple_loss=0.3128, pruned_loss=0.05435, over 944034.00 frames. +2024-07-27 18:35:03,514 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 18:35:09,283 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=26.43 vs. limit=22.5 +2024-07-27 18:35:24,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=48930.666666666664, ans=0.125 +2024-07-27 18:35:26,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=48930.666666666664, ans=0.0 +2024-07-27 18:35:31,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48944.0, ans=0.1 +2024-07-27 18:35:34,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=48944.0, ans=0.125 +2024-07-27 18:35:36,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=48944.0, ans=0.1 +2024-07-27 18:35:37,388 INFO [train.py:1114] (3/4) Epoch 4, batch 6050, loss[loss=0.2544, simple_loss=0.3326, pruned_loss=0.08806, over 4781.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3324, pruned_loss=0.09354, over 938215.88 frames. ], batch size: 12, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:35:42,518 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.402e+01 6.393e+01 7.329e+01 8.400e+01 1.158e+02, threshold=1.466e+02, percent-clipped=0.0 +2024-07-27 18:35:43,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=48957.333333333336, ans=0.125 +2024-07-27 18:35:57,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=48984.0, ans=0.125 +2024-07-27 18:36:08,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=49010.666666666664, ans=0.0 +2024-07-27 18:36:12,768 INFO [train.py:1114] (3/4) Epoch 4, batch 6100, loss[loss=0.3076, simple_loss=0.3794, pruned_loss=0.1179, over 4695.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3322, pruned_loss=0.09299, over 937718.61 frames. ], batch size: 15, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:36:12,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=49024.0, ans=0.125 +2024-07-27 18:36:13,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=49024.0, ans=0.125 +2024-07-27 18:36:17,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=49024.0, ans=0.125 +2024-07-27 18:36:25,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=49037.333333333336, ans=0.2 +2024-07-27 18:36:26,077 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.33 vs. limit=15.0 +2024-07-27 18:36:29,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.73 vs. limit=22.5 +2024-07-27 18:36:34,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=49064.0, ans=0.125 +2024-07-27 18:36:40,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=49077.333333333336, ans=0.0 +2024-07-27 18:36:44,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=49077.333333333336, ans=15.0 +2024-07-27 18:36:46,624 INFO [train.py:1114] (3/4) Epoch 4, batch 6150, loss[loss=0.3294, simple_loss=0.3795, pruned_loss=0.1396, over 3442.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3317, pruned_loss=0.09259, over 936848.61 frames. ], batch size: 36, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:36:50,106 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.101e+01 6.312e+01 7.204e+01 8.554e+01 1.450e+02, threshold=1.441e+02, percent-clipped=0.0 +2024-07-27 18:36:53,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=49104.0, ans=0.2 +2024-07-27 18:36:53,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=49104.0, ans=0.125 +2024-07-27 18:37:05,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=49117.333333333336, ans=0.125 +2024-07-27 18:37:09,729 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.35 vs. limit=15.0 +2024-07-27 18:37:10,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=49130.666666666664, ans=0.2 +2024-07-27 18:37:20,687 INFO [train.py:1114] (3/4) Epoch 4, batch 6200, loss[loss=0.278, simple_loss=0.3653, pruned_loss=0.09533, over 4736.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3322, pruned_loss=0.09266, over 936553.68 frames. ], batch size: 14, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:37:21,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=49157.333333333336, ans=10.0 +2024-07-27 18:37:21,994 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=10.48 vs. limit=15.0 +2024-07-27 18:38:05,392 INFO [train.py:1114] (3/4) Epoch 4, batch 6250, loss[loss=0.2137, simple_loss=0.304, pruned_loss=0.06169, over 4811.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3323, pruned_loss=0.09263, over 932816.31 frames. ], batch size: 14, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:38:08,829 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.216e+01 6.578e+01 7.418e+01 8.909e+01 1.704e+02, threshold=1.484e+02, percent-clipped=3.0 +2024-07-27 18:38:15,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49237.333333333336, ans=0.1 +2024-07-27 18:38:25,369 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.48 vs. limit=15.0 +2024-07-27 18:38:33,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=49264.0, ans=0.125 +2024-07-27 18:38:42,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=49277.333333333336, ans=0.125 +2024-07-27 18:38:43,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49277.333333333336, ans=0.1 +2024-07-27 18:39:01,854 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.88 vs. limit=10.0 +2024-07-27 18:39:03,627 INFO [train.py:1114] (3/4) Epoch 4, batch 6300, loss[loss=0.1955, simple_loss=0.2777, pruned_loss=0.05661, over 4552.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3337, pruned_loss=0.09369, over 929514.69 frames. ], batch size: 10, lr: 1.75e-02, grad_scale: 16.0 +2024-07-27 18:39:11,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=21.33 vs. limit=15.0 +2024-07-27 18:39:12,815 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-27 18:39:19,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=49317.333333333336, ans=0.1 +2024-07-27 18:39:20,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=49317.333333333336, ans=0.125 +2024-07-27 18:39:31,827 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.13 vs. limit=6.0 +2024-07-27 18:39:43,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=49344.0, ans=0.00014260869565217368 +2024-07-27 18:39:46,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=49344.0, ans=0.125 +2024-07-27 18:39:49,716 INFO [train.py:1114] (3/4) Epoch 4, batch 6350, loss[loss=0.2605, simple_loss=0.332, pruned_loss=0.09455, over 4565.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.334, pruned_loss=0.0935, over 933557.49 frames. ], batch size: 21, lr: 1.75e-02, grad_scale: 16.0 +2024-07-27 18:39:56,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=49357.333333333336, ans=0.125 +2024-07-27 18:40:02,484 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.450e+01 6.143e+01 6.766e+01 7.753e+01 2.111e+02, threshold=1.353e+02, percent-clipped=1.0 +2024-07-27 18:40:17,834 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.80 vs. limit=15.0 +2024-07-27 18:40:20,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49384.0, ans=0.1 +2024-07-27 18:40:22,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.76 vs. limit=22.5 +2024-07-27 18:40:33,696 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=4.064e+00 +2024-07-27 18:40:35,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=49397.333333333336, ans=0.025 +2024-07-27 18:40:48,897 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=15.65 vs. limit=15.0 +2024-07-27 18:41:04,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=49410.666666666664, ans=0.125 +2024-07-27 18:41:05,490 INFO [train.py:1114] (3/4) Epoch 4, batch 6400, loss[loss=0.2746, simple_loss=0.3518, pruned_loss=0.09873, over 4639.00 frames. ], tot_loss[loss=0.2604, simple_loss=0.3335, pruned_loss=0.09366, over 934929.10 frames. ], batch size: 13, lr: 1.75e-02, grad_scale: 32.0 +2024-07-27 18:41:15,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=49437.333333333336, ans=0.125 +2024-07-27 18:41:39,069 INFO [train.py:1114] (3/4) Epoch 4, batch 6450, loss[loss=0.2833, simple_loss=0.3475, pruned_loss=0.1096, over 4649.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3338, pruned_loss=0.09337, over 938658.96 frames. ], batch size: 22, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:41:42,983 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.447e+01 6.416e+01 7.153e+01 7.876e+01 1.277e+02, threshold=1.431e+02, percent-clipped=0.0 +2024-07-27 18:41:44,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=49490.666666666664, ans=0.0 +2024-07-27 18:41:51,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=49504.0, ans=0.125 +2024-07-27 18:41:51,364 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.78 vs. limit=15.0 +2024-07-27 18:41:52,506 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:41:58,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=49517.333333333336, ans=0.04949747468305833 +2024-07-27 18:42:03,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=49517.333333333336, ans=0.125 +2024-07-27 18:42:09,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=49530.666666666664, ans=0.125 +2024-07-27 18:42:19,510 INFO [train.py:1114] (3/4) Epoch 4, batch 6500, loss[loss=0.3607, simple_loss=0.4039, pruned_loss=0.1588, over 3454.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3337, pruned_loss=0.09316, over 940088.60 frames. ], batch size: 35, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:42:32,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49570.666666666664, ans=0.1 +2024-07-27 18:42:49,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49597.333333333336, ans=0.1 +2024-07-27 18:42:54,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=49597.333333333336, ans=0.1 +2024-07-27 18:43:10,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=49610.666666666664, ans=0.125 +2024-07-27 18:43:14,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=49610.666666666664, ans=0.5 +2024-07-27 18:43:18,131 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.70 vs. limit=15.0 +2024-07-27 18:43:18,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=49610.666666666664, ans=0.0 +2024-07-27 18:43:19,877 INFO [train.py:1114] (3/4) Epoch 4, batch 6550, loss[loss=0.2465, simple_loss=0.32, pruned_loss=0.08648, over 4808.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3327, pruned_loss=0.09216, over 942862.55 frames. ], batch size: 11, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:43:23,932 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.133e+01 6.247e+01 6.814e+01 7.966e+01 1.482e+02, threshold=1.363e+02, percent-clipped=1.0 +2024-07-27 18:43:37,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=49650.666666666664, ans=0.0 +2024-07-27 18:43:42,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=49664.0, ans=0.125 +2024-07-27 18:43:54,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=49664.0, ans=0.125 +2024-07-27 18:44:02,855 INFO [train.py:1114] (3/4) Epoch 4, batch 6600, loss[loss=0.2841, simple_loss=0.3652, pruned_loss=0.1014, over 4936.00 frames. ], tot_loss[loss=0.259, simple_loss=0.333, pruned_loss=0.09247, over 944755.97 frames. ], batch size: 14, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:44:05,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=49690.666666666664, ans=0.2 +2024-07-27 18:44:08,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=49690.666666666664, ans=0.125 +2024-07-27 18:44:17,479 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.30 vs. limit=15.0 +2024-07-27 18:44:35,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=49730.666666666664, ans=0.0 +2024-07-27 18:44:43,518 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.30 vs. limit=15.0 +2024-07-27 18:44:51,845 INFO [train.py:1114] (3/4) Epoch 4, batch 6650, loss[loss=0.2776, simple_loss=0.3585, pruned_loss=0.09836, over 4626.00 frames. ], tot_loss[loss=0.2587, simple_loss=0.3323, pruned_loss=0.09258, over 943265.03 frames. ], batch size: 17, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:45:01,621 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.604e+01 6.574e+01 7.387e+01 9.385e+01 1.471e+02, threshold=1.477e+02, percent-clipped=2.0 +2024-07-27 18:45:02,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.max_abs, batch_count=49757.333333333336, ans=10.0 +2024-07-27 18:45:02,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=49757.333333333336, ans=0.125 +2024-07-27 18:45:30,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=49810.666666666664, ans=0.2 +2024-07-27 18:45:36,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=49824.0, ans=0.025 +2024-07-27 18:45:37,466 INFO [train.py:1114] (3/4) Epoch 4, batch 6700, loss[loss=0.2595, simple_loss=0.3352, pruned_loss=0.09188, over 4751.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3334, pruned_loss=0.09292, over 942165.95 frames. ], batch size: 19, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:45:57,190 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.08 vs. limit=15.0 +2024-07-27 18:45:59,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=49864.0, ans=0.0 +2024-07-27 18:46:02,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.80 vs. limit=15.0 +2024-07-27 18:46:20,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=49877.333333333336, ans=0.125 +2024-07-27 18:46:21,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=49877.333333333336, ans=0.125 +2024-07-27 18:46:26,241 INFO [train.py:1114] (3/4) Epoch 4, batch 6750, loss[loss=0.2922, simple_loss=0.3639, pruned_loss=0.1102, over 4156.00 frames. ], tot_loss[loss=0.2606, simple_loss=0.3344, pruned_loss=0.09339, over 940340.19 frames. ], batch size: 25, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:46:27,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=49890.666666666664, ans=0.04949747468305833 +2024-07-27 18:46:29,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=49890.666666666664, ans=0.125 +2024-07-27 18:46:30,209 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.238e+01 6.545e+01 7.445e+01 9.250e+01 1.508e+02, threshold=1.489e+02, percent-clipped=1.0 +2024-07-27 18:46:31,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=49890.666666666664, ans=2.3768115942030682e-05 +2024-07-27 18:46:31,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=49890.666666666664, ans=0.2 +2024-07-27 18:46:32,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=49904.0, ans=0.1 +2024-07-27 18:46:35,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=49904.0, ans=0.125 +2024-07-27 18:46:49,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=49930.666666666664, ans=0.125 +2024-07-27 18:46:51,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=49930.666666666664, ans=0.125 +2024-07-27 18:46:56,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=49944.0, ans=0.125 +2024-07-27 18:47:01,351 INFO [train.py:1114] (3/4) Epoch 4, batch 6800, loss[loss=0.2704, simple_loss=0.3482, pruned_loss=0.0963, over 4641.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3345, pruned_loss=0.09326, over 938791.73 frames. ], batch size: 13, lr: 1.74e-02, grad_scale: 32.0 +2024-07-27 18:47:01,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=49957.333333333336, ans=0.125 +2024-07-27 18:47:20,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.13 vs. limit=12.0 +2024-07-27 18:47:37,000 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.32 vs. limit=15.0 +2024-07-27 18:47:58,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=49997.333333333336, ans=0.125 +2024-07-27 18:48:37,374 INFO [train.py:1114] (3/4) Epoch 4, batch 6850, loss[loss=0.2312, simple_loss=0.3129, pruned_loss=0.07475, over 4696.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3349, pruned_loss=0.09394, over 940490.22 frames. ], batch size: 13, lr: 1.74e-02, grad_scale: 16.0 +2024-07-27 18:48:42,390 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.116e+01 6.490e+01 7.044e+01 8.185e+01 1.640e+02, threshold=1.409e+02, percent-clipped=3.0 +2024-07-27 18:48:44,041 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.14 vs. limit=6.0 +2024-07-27 18:49:06,864 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.23 vs. limit=22.5 +2024-07-27 18:49:10,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.whiten.whitening_limit, batch_count=50050.666666666664, ans=12.0 +2024-07-27 18:49:10,143 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.50 vs. limit=15.0 +2024-07-27 18:49:11,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=50050.666666666664, ans=0.125 +2024-07-27 18:49:12,311 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.37 vs. limit=15.0 +2024-07-27 18:49:18,802 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.31 vs. limit=22.5 +2024-07-27 18:49:20,609 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.40 vs. limit=15.0 +2024-07-27 18:49:43,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=50077.333333333336, ans=0.125 +2024-07-27 18:49:46,659 INFO [train.py:1114] (3/4) Epoch 4, batch 6900, loss[loss=0.2461, simple_loss=0.3046, pruned_loss=0.09376, over 4974.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3352, pruned_loss=0.09399, over 942941.65 frames. ], batch size: 13, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:49:47,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=50090.666666666664, ans=0.125 +2024-07-27 18:50:04,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=50117.333333333336, ans=0.125 +2024-07-27 18:50:39,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=50117.333333333336, ans=0.0 +2024-07-27 18:50:41,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=50130.666666666664, ans=0.125 +2024-07-27 18:50:41,489 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.97 vs. limit=15.0 +2024-07-27 18:50:54,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=50144.0, ans=0.04949747468305833 +2024-07-27 18:50:56,895 INFO [train.py:1114] (3/4) Epoch 4, batch 6950, loss[loss=0.2438, simple_loss=0.3125, pruned_loss=0.08748, over 4557.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3346, pruned_loss=0.09369, over 940662.36 frames. ], batch size: 10, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:50:59,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=50157.333333333336, ans=0.0 +2024-07-27 18:51:01,452 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.030e+01 6.625e+01 7.241e+01 8.326e+01 1.274e+02, threshold=1.448e+02, percent-clipped=0.0 +2024-07-27 18:51:02,803 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.00 vs. limit=15.0 +2024-07-27 18:51:07,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=50170.666666666664, ans=0.125 +2024-07-27 18:51:09,279 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:51:09,474 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.82 vs. limit=6.0 +2024-07-27 18:51:18,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=50197.333333333336, ans=0.125 +2024-07-27 18:51:18,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=50197.333333333336, ans=0.025 +2024-07-27 18:51:22,674 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.41 vs. limit=15.0 +2024-07-27 18:51:31,305 INFO [train.py:1114] (3/4) Epoch 4, batch 7000, loss[loss=0.3328, simple_loss=0.3848, pruned_loss=0.1404, over 4611.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3347, pruned_loss=0.09399, over 938982.51 frames. ], batch size: 17, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:51:42,013 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-27 18:51:42,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=50237.333333333336, ans=0.2 +2024-07-27 18:51:58,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=50277.333333333336, ans=0.125 +2024-07-27 18:52:05,900 INFO [train.py:1114] (3/4) Epoch 4, batch 7050, loss[loss=0.2838, simple_loss=0.3489, pruned_loss=0.1093, over 4694.00 frames. ], tot_loss[loss=0.2612, simple_loss=0.3345, pruned_loss=0.09391, over 942019.92 frames. ], batch size: 19, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:52:06,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50290.666666666664, ans=0.1 +2024-07-27 18:52:10,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=50290.666666666664, ans=0.0 +2024-07-27 18:52:10,641 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.88 vs. limit=15.0 +2024-07-27 18:52:10,848 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.329e+01 6.665e+01 7.548e+01 9.503e+01 1.584e+02, threshold=1.510e+02, percent-clipped=1.0 +2024-07-27 18:52:15,376 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.22 vs. limit=12.0 +2024-07-27 18:52:17,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=50304.0, ans=0.025 +2024-07-27 18:52:20,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=50317.333333333336, ans=0.0 +2024-07-27 18:52:23,178 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.90 vs. limit=15.0 +2024-07-27 18:52:31,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=50330.666666666664, ans=0.035 +2024-07-27 18:52:41,457 INFO [train.py:1114] (3/4) Epoch 4, batch 7100, loss[loss=0.251, simple_loss=0.3358, pruned_loss=0.08311, over 4799.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3348, pruned_loss=0.09406, over 937368.18 frames. ], batch size: 15, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:52:43,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=50357.333333333336, ans=0.025 +2024-07-27 18:52:44,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=50357.333333333336, ans=0.2 +2024-07-27 18:52:57,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=50384.0, ans=0.0 +2024-07-27 18:53:11,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=50410.666666666664, ans=0.1 +2024-07-27 18:53:14,538 INFO [train.py:1114] (3/4) Epoch 4, batch 7150, loss[loss=0.3217, simple_loss=0.387, pruned_loss=0.1282, over 4488.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3338, pruned_loss=0.09377, over 938149.81 frames. ], batch size: 21, lr: 1.73e-02, grad_scale: 16.0 +2024-07-27 18:53:18,907 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.925e+01 6.686e+01 7.675e+01 9.181e+01 1.338e+02, threshold=1.535e+02, percent-clipped=0.0 +2024-07-27 18:53:21,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=50437.333333333336, ans=0.2 +2024-07-27 18:53:41,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=50477.333333333336, ans=0.1 +2024-07-27 18:53:47,311 INFO [train.py:1114] (3/4) Epoch 4, batch 7200, loss[loss=0.2897, simple_loss=0.3668, pruned_loss=0.1064, over 4801.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.334, pruned_loss=0.09376, over 938073.62 frames. ], batch size: 15, lr: 1.73e-02, grad_scale: 32.0 +2024-07-27 18:53:50,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=50490.666666666664, ans=0.125 +2024-07-27 18:54:00,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=50517.333333333336, ans=0.125 +2024-07-27 18:54:09,705 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.16 vs. limit=15.0 +2024-07-27 18:54:10,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=50530.666666666664, ans=0.035 +2024-07-27 18:54:20,060 INFO [train.py:1114] (3/4) Epoch 4, batch 7250, loss[loss=0.2121, simple_loss=0.291, pruned_loss=0.06655, over 4861.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.333, pruned_loss=0.0937, over 939534.71 frames. ], batch size: 12, lr: 1.73e-02, grad_scale: 32.0 +2024-07-27 18:54:21,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=50557.333333333336, ans=0.04949747468305833 +2024-07-27 18:54:22,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=50557.333333333336, ans=0.125 +2024-07-27 18:54:24,522 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.173e+01 6.374e+01 7.128e+01 8.077e+01 1.230e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 18:54:29,780 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:54:37,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=50584.0, ans=0.025 +2024-07-27 18:54:45,634 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.87 vs. limit=10.0 +2024-07-27 18:54:46,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=50610.666666666664, ans=0.125 +2024-07-27 18:54:52,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=50624.0, ans=0.07 +2024-07-27 18:54:52,631 INFO [train.py:1114] (3/4) Epoch 4, batch 7300, loss[loss=0.2275, simple_loss=0.3085, pruned_loss=0.07323, over 4845.00 frames. ], tot_loss[loss=0.2597, simple_loss=0.3328, pruned_loss=0.09332, over 939867.02 frames. ], batch size: 12, lr: 1.73e-02, grad_scale: 32.0 +2024-07-27 18:54:56,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=50624.0, ans=0.0 +2024-07-27 18:54:58,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=50624.0, ans=0.125 +2024-07-27 18:55:03,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=50637.333333333336, ans=0.0 +2024-07-27 18:55:08,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=50650.666666666664, ans=0.125 +2024-07-27 18:55:22,794 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.60 vs. limit=22.5 +2024-07-27 18:55:24,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=50677.333333333336, ans=0.0 +2024-07-27 18:55:25,720 INFO [train.py:1114] (3/4) Epoch 4, batch 7350, loss[loss=0.2441, simple_loss=0.3085, pruned_loss=0.0899, over 4638.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3335, pruned_loss=0.09353, over 939211.00 frames. ], batch size: 12, lr: 1.73e-02, grad_scale: 32.0 +2024-07-27 18:55:30,229 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.456e+01 6.562e+01 7.152e+01 9.266e+01 1.352e+02, threshold=1.430e+02, percent-clipped=0.0 +2024-07-27 18:55:31,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=50704.0, ans=0.125 +2024-07-27 18:55:45,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=50730.666666666664, ans=0.1 +2024-07-27 18:55:49,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=50730.666666666664, ans=0.025 +2024-07-27 18:55:58,347 INFO [train.py:1114] (3/4) Epoch 4, batch 7400, loss[loss=0.3094, simple_loss=0.3656, pruned_loss=0.1266, over 4688.00 frames. ], tot_loss[loss=0.261, simple_loss=0.334, pruned_loss=0.09397, over 940333.77 frames. ], batch size: 13, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:56:02,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=50757.333333333336, ans=0.125 +2024-07-27 18:56:10,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=50770.666666666664, ans=0.125 +2024-07-27 18:56:12,314 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:56:14,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=50784.0, ans=0.0 +2024-07-27 18:56:21,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=50797.333333333336, ans=0.125 +2024-07-27 18:56:28,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.63 vs. limit=15.0 +2024-07-27 18:56:32,547 INFO [train.py:1114] (3/4) Epoch 4, batch 7450, loss[loss=0.2641, simple_loss=0.3228, pruned_loss=0.1027, over 4608.00 frames. ], tot_loss[loss=0.2591, simple_loss=0.3318, pruned_loss=0.09318, over 938198.19 frames. ], batch size: 11, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:56:37,130 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.488e+01 6.489e+01 7.278e+01 8.154e+01 1.203e+02, threshold=1.456e+02, percent-clipped=0.0 +2024-07-27 18:56:48,938 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.74 vs. limit=10.0 +2024-07-27 18:56:49,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=50850.666666666664, ans=0.125 +2024-07-27 18:56:57,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=50864.0, ans=0.0 +2024-07-27 18:57:01,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=50877.333333333336, ans=0.1 +2024-07-27 18:57:04,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=50877.333333333336, ans=0.1 +2024-07-27 18:57:06,489 INFO [train.py:1114] (3/4) Epoch 4, batch 7500, loss[loss=0.3141, simple_loss=0.3612, pruned_loss=0.1335, over 3372.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.3319, pruned_loss=0.09265, over 936476.69 frames. ], batch size: 35, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:57:07,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=50890.666666666664, ans=0.0 +2024-07-27 18:57:13,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=50904.0, ans=0.125 +2024-07-27 18:57:21,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.03 vs. limit=22.5 +2024-07-27 18:57:28,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=50930.666666666664, ans=0.0 +2024-07-27 18:57:28,564 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.93 vs. limit=22.5 +2024-07-27 18:57:39,288 INFO [train.py:1114] (3/4) Epoch 4, batch 7550, loss[loss=0.2538, simple_loss=0.3305, pruned_loss=0.0886, over 4570.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.3332, pruned_loss=0.09287, over 936563.17 frames. ], batch size: 17, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:57:42,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50957.333333333336, ans=0.1 +2024-07-27 18:57:45,383 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 18:57:46,432 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.472e+01 6.522e+01 7.335e+01 8.635e+01 1.380e+02, threshold=1.467e+02, percent-clipped=0.0 +2024-07-27 18:57:47,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=50970.666666666664, ans=0.0 +2024-07-27 18:57:52,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=50970.666666666664, ans=0.125 +2024-07-27 18:58:11,198 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.99 vs. limit=22.5 +2024-07-27 18:58:15,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=50997.333333333336, ans=0.125 +2024-07-27 18:58:16,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=50997.333333333336, ans=0.1 +2024-07-27 18:58:20,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=51010.666666666664, ans=0.0 +2024-07-27 18:58:23,790 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-07-27 18:58:24,742 INFO [train.py:1114] (3/4) Epoch 4, batch 7600, loss[loss=0.2556, simple_loss=0.3302, pruned_loss=0.09046, over 4800.00 frames. ], tot_loss[loss=0.2572, simple_loss=0.3312, pruned_loss=0.09159, over 938246.56 frames. ], batch size: 14, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:58:30,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.57 vs. limit=15.0 +2024-07-27 18:58:39,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=51037.333333333336, ans=0.0 +2024-07-27 18:58:46,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=51037.333333333336, ans=0.125 +2024-07-27 18:58:49,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=51050.666666666664, ans=0.125 +2024-07-27 18:59:03,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=51077.333333333336, ans=0.125 +2024-07-27 18:59:06,956 INFO [train.py:1114] (3/4) Epoch 4, batch 7650, loss[loss=0.2666, simple_loss=0.321, pruned_loss=0.1061, over 4940.00 frames. ], tot_loss[loss=0.2593, simple_loss=0.3328, pruned_loss=0.09287, over 937278.69 frames. ], batch size: 12, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:59:13,316 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.235e+01 6.494e+01 7.893e+01 8.811e+01 1.540e+02, threshold=1.579e+02, percent-clipped=3.0 +2024-07-27 18:59:22,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=51104.0, ans=0.0 +2024-07-27 18:59:22,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=51104.0, ans=0.0 +2024-07-27 18:59:31,327 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.92 vs. limit=10.0 +2024-07-27 18:59:35,870 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-07-27 18:59:35,932 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.12 vs. limit=15.0 +2024-07-27 18:59:39,052 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.79 vs. limit=10.0 +2024-07-27 18:59:44,100 INFO [train.py:1114] (3/4) Epoch 4, batch 7700, loss[loss=0.27, simple_loss=0.3515, pruned_loss=0.09421, over 4690.00 frames. ], tot_loss[loss=0.2589, simple_loss=0.3325, pruned_loss=0.09265, over 934403.41 frames. ], batch size: 13, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 18:59:55,774 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.90 vs. limit=22.5 +2024-07-27 18:59:58,018 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.64 vs. limit=15.0 +2024-07-27 18:59:58,030 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.16 vs. limit=15.0 +2024-07-27 18:59:59,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=51184.0, ans=0.1 +2024-07-27 19:00:00,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=51184.0, ans=0.0 +2024-07-27 19:00:15,533 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.17 vs. limit=15.0 +2024-07-27 19:00:19,723 INFO [train.py:1114] (3/4) Epoch 4, batch 7750, loss[loss=0.2944, simple_loss=0.3644, pruned_loss=0.1122, over 4920.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3338, pruned_loss=0.09323, over 935810.06 frames. ], batch size: 14, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 19:00:20,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=51224.0, ans=0.0 +2024-07-27 19:00:24,700 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.378e+01 6.531e+01 7.452e+01 8.452e+01 1.344e+02, threshold=1.490e+02, percent-clipped=0.0 +2024-07-27 19:00:26,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=51237.333333333336, ans=0.0 +2024-07-27 19:01:02,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=51264.0, ans=0.125 +2024-07-27 19:01:03,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=51264.0, ans=0.125 +2024-07-27 19:01:06,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=51264.0, ans=0.0 +2024-07-27 19:01:12,766 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.57 vs. limit=15.0 +2024-07-27 19:01:19,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=51277.333333333336, ans=0.0 +2024-07-27 19:01:21,759 INFO [train.py:1114] (3/4) Epoch 4, batch 7800, loss[loss=0.2815, simple_loss=0.3633, pruned_loss=0.09983, over 4669.00 frames. ], tot_loss[loss=0.2603, simple_loss=0.3342, pruned_loss=0.09324, over 937174.60 frames. ], batch size: 14, lr: 1.72e-02, grad_scale: 32.0 +2024-07-27 19:01:21,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=51290.666666666664, ans=0.125 +2024-07-27 19:01:25,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=51290.666666666664, ans=0.0 +2024-07-27 19:01:34,216 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.78 vs. limit=15.0 +2024-07-27 19:01:40,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=51330.666666666664, ans=0.125 +2024-07-27 19:01:43,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=51330.666666666664, ans=0.125 +2024-07-27 19:01:45,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=51330.666666666664, ans=0.0 +2024-07-27 19:01:57,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=51344.0, ans=0.125 +2024-07-27 19:01:59,105 INFO [train.py:1114] (3/4) Epoch 4, batch 7850, loss[loss=0.217, simple_loss=0.2904, pruned_loss=0.07184, over 4520.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3347, pruned_loss=0.09392, over 936386.88 frames. ], batch size: 10, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:02:04,739 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.164e+01 6.243e+01 7.019e+01 7.976e+01 1.332e+02, threshold=1.404e+02, percent-clipped=0.0 +2024-07-27 19:02:07,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=51357.333333333336, ans=0.125 +2024-07-27 19:02:16,215 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=11.07 vs. limit=12.0 +2024-07-27 19:02:27,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=51384.0, ans=0.125 +2024-07-27 19:02:37,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=51410.666666666664, ans=0.125 +2024-07-27 19:02:41,354 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.80 vs. limit=15.0 +2024-07-27 19:02:43,540 INFO [train.py:1114] (3/4) Epoch 4, batch 7900, loss[loss=0.2795, simple_loss=0.3535, pruned_loss=0.1028, over 4877.00 frames. ], tot_loss[loss=0.2604, simple_loss=0.3345, pruned_loss=0.09308, over 933301.50 frames. ], batch size: 14, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:02:48,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=51424.0, ans=0.0 +2024-07-27 19:03:19,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=51477.333333333336, ans=0.025 +2024-07-27 19:03:20,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=51477.333333333336, ans=0.125 +2024-07-27 19:03:25,443 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.52 vs. limit=22.5 +2024-07-27 19:03:25,613 INFO [train.py:1114] (3/4) Epoch 4, batch 7950, loss[loss=0.3176, simple_loss=0.3661, pruned_loss=0.1346, over 3554.00 frames. ], tot_loss[loss=0.2598, simple_loss=0.3339, pruned_loss=0.09291, over 935412.96 frames. ], batch size: 35, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:03:28,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=51490.666666666664, ans=0.125 +2024-07-27 19:03:30,100 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.275e+01 6.617e+01 8.169e+01 1.040e+02 2.019e+02, threshold=1.634e+02, percent-clipped=10.0 +2024-07-27 19:03:46,684 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-27 19:03:47,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51530.666666666664, ans=0.1 +2024-07-27 19:03:54,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=51544.0, ans=0.0 +2024-07-27 19:04:01,617 INFO [train.py:1114] (3/4) Epoch 4, batch 8000, loss[loss=0.275, simple_loss=0.3313, pruned_loss=0.1093, over 4601.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3324, pruned_loss=0.09258, over 934333.89 frames. ], batch size: 11, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:04:11,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=51570.666666666664, ans=0.0 +2024-07-27 19:04:12,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=51570.666666666664, ans=0.125 +2024-07-27 19:04:22,147 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.09 vs. limit=15.0 +2024-07-27 19:04:39,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=51610.666666666664, ans=0.125 +2024-07-27 19:04:43,562 INFO [train.py:1114] (3/4) Epoch 4, batch 8050, loss[loss=0.2879, simple_loss=0.3547, pruned_loss=0.1105, over 4817.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3331, pruned_loss=0.09282, over 934225.62 frames. ], batch size: 14, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:04:46,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=51624.0, ans=0.2 +2024-07-27 19:04:48,126 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+01 6.358e+01 7.394e+01 8.578e+01 1.528e+02, threshold=1.479e+02, percent-clipped=0.0 +2024-07-27 19:04:48,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=51624.0, ans=0.0 +2024-07-27 19:05:07,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=51664.0, ans=0.1 +2024-07-27 19:05:10,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=51677.333333333336, ans=0.1 +2024-07-27 19:05:11,778 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=18.91 vs. limit=22.5 +2024-07-27 19:05:20,004 INFO [train.py:1114] (3/4) Epoch 4, batch 8100, loss[loss=0.2931, simple_loss=0.367, pruned_loss=0.1096, over 4800.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3345, pruned_loss=0.09323, over 934072.50 frames. ], batch size: 15, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:05:23,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=51690.666666666664, ans=0.125 +2024-07-27 19:05:23,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=51690.666666666664, ans=0.125 +2024-07-27 19:05:24,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=51690.666666666664, ans=0.0 +2024-07-27 19:05:30,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=51704.0, ans=0.125 +2024-07-27 19:05:33,829 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.38 vs. limit=15.0 +2024-07-27 19:05:57,495 INFO [train.py:1114] (3/4) Epoch 4, batch 8150, loss[loss=0.2213, simple_loss=0.3092, pruned_loss=0.06675, over 4819.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3321, pruned_loss=0.09144, over 937369.46 frames. ], batch size: 15, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:06:01,093 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.81 vs. limit=15.0 +2024-07-27 19:06:02,175 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.306e+01 6.341e+01 7.110e+01 7.968e+01 1.215e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 19:06:04,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=51770.666666666664, ans=0.0 +2024-07-27 19:06:04,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=51770.666666666664, ans=0.125 +2024-07-27 19:06:07,084 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.69 vs. limit=6.0 +2024-07-27 19:06:08,393 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.27 vs. limit=22.5 +2024-07-27 19:06:09,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=51770.666666666664, ans=0.09899494936611666 +2024-07-27 19:06:09,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=51770.666666666664, ans=0.0 +2024-07-27 19:06:17,806 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.95 vs. limit=22.5 +2024-07-27 19:06:24,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=51810.666666666664, ans=0.125 +2024-07-27 19:06:26,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=51810.666666666664, ans=0.0 +2024-07-27 19:06:28,136 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.49 vs. limit=6.0 +2024-07-27 19:06:28,276 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.36 vs. limit=22.5 +2024-07-27 19:06:32,399 INFO [train.py:1114] (3/4) Epoch 4, batch 8200, loss[loss=0.2757, simple_loss=0.3387, pruned_loss=0.1063, over 4810.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3328, pruned_loss=0.09207, over 938511.88 frames. ], batch size: 15, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:06:33,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=51824.0, ans=0.0 +2024-07-27 19:06:40,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=51837.333333333336, ans=0.0 +2024-07-27 19:06:42,800 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:06:51,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=51864.0, ans=0.125 +2024-07-27 19:06:52,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=51864.0, ans=0.0 +2024-07-27 19:06:53,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=51864.0, ans=0.0 +2024-07-27 19:06:57,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=51864.0, ans=0.025 +2024-07-27 19:06:58,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=51877.333333333336, ans=0.1 +2024-07-27 19:06:59,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=51877.333333333336, ans=0.125 +2024-07-27 19:07:04,512 INFO [train.py:1114] (3/4) Epoch 4, batch 8250, loss[loss=0.2706, simple_loss=0.3507, pruned_loss=0.09524, over 4890.00 frames. ], tot_loss[loss=0.2586, simple_loss=0.333, pruned_loss=0.09213, over 938471.27 frames. ], batch size: 13, lr: 1.71e-02, grad_scale: 32.0 +2024-07-27 19:07:08,062 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.62 vs. limit=10.0 +2024-07-27 19:07:09,005 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.095e+01 6.190e+01 7.037e+01 8.392e+01 1.258e+02, threshold=1.407e+02, percent-clipped=0.0 +2024-07-27 19:07:15,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=51904.0, ans=0.125 +2024-07-27 19:07:17,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=51917.333333333336, ans=0.0 +2024-07-27 19:07:35,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=51944.0, ans=0.125 +2024-07-27 19:07:37,260 INFO [train.py:1114] (3/4) Epoch 4, batch 8300, loss[loss=0.2984, simple_loss=0.3854, pruned_loss=0.1057, over 4900.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.3353, pruned_loss=0.09343, over 938317.96 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:07:42,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=51957.333333333336, ans=0.125 +2024-07-27 19:07:46,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=51970.666666666664, ans=0.125 +2024-07-27 19:07:47,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=51970.666666666664, ans=0.0 +2024-07-27 19:08:11,099 INFO [train.py:1114] (3/4) Epoch 4, batch 8350, loss[loss=0.2662, simple_loss=0.347, pruned_loss=0.09273, over 4788.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3336, pruned_loss=0.09258, over 941317.94 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:08:14,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=52024.0, ans=0.125 +2024-07-27 19:08:15,729 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.058e+01 6.472e+01 7.036e+01 8.315e+01 1.538e+02, threshold=1.407e+02, percent-clipped=2.0 +2024-07-27 19:08:35,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=52064.0, ans=0.025 +2024-07-27 19:08:37,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=52077.333333333336, ans=0.0 +2024-07-27 19:08:38,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=52077.333333333336, ans=0.0 +2024-07-27 19:08:43,091 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=25.13 vs. limit=22.5 +2024-07-27 19:08:44,800 INFO [train.py:1114] (3/4) Epoch 4, batch 8400, loss[loss=0.2518, simple_loss=0.3268, pruned_loss=0.0884, over 4772.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3338, pruned_loss=0.09252, over 939823.69 frames. ], batch size: 12, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:08:47,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52090.666666666664, ans=0.1 +2024-07-27 19:08:56,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=52104.0, ans=0.125 +2024-07-27 19:08:57,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=52104.0, ans=0.0 +2024-07-27 19:09:19,576 INFO [train.py:1114] (3/4) Epoch 4, batch 8450, loss[loss=0.3272, simple_loss=0.3946, pruned_loss=0.1299, over 4805.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3351, pruned_loss=0.09313, over 938698.79 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:09:22,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=52157.333333333336, ans=0.125 +2024-07-27 19:09:24,026 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.442e+01 6.588e+01 7.394e+01 8.228e+01 1.463e+02, threshold=1.479e+02, percent-clipped=1.0 +2024-07-27 19:09:27,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=52170.666666666664, ans=0.125 +2024-07-27 19:09:42,947 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.64 vs. limit=12.0 +2024-07-27 19:09:51,715 INFO [train.py:1114] (3/4) Epoch 4, batch 8500, loss[loss=0.1947, simple_loss=0.2706, pruned_loss=0.05944, over 4619.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3335, pruned_loss=0.09226, over 938488.27 frames. ], batch size: 11, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:09:54,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=52224.0, ans=0.125 +2024-07-27 19:09:59,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52237.333333333336, ans=0.1 +2024-07-27 19:10:23,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52264.0, ans=0.1 +2024-07-27 19:10:25,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=52277.333333333336, ans=0.2 +2024-07-27 19:10:25,295 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=13.87 vs. limit=15.0 +2024-07-27 19:10:35,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52290.666666666664, ans=0.1 +2024-07-27 19:10:35,615 INFO [train.py:1114] (3/4) Epoch 4, batch 8550, loss[loss=0.1855, simple_loss=0.2691, pruned_loss=0.05097, over 4799.00 frames. ], tot_loss[loss=0.258, simple_loss=0.3327, pruned_loss=0.09168, over 939389.81 frames. ], batch size: 11, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:10:40,185 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 6.270e+01 6.908e+01 7.613e+01 1.129e+02, threshold=1.382e+02, percent-clipped=0.0 +2024-07-27 19:10:46,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52304.0, ans=0.1 +2024-07-27 19:10:49,569 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.45 vs. limit=10.0 +2024-07-27 19:10:50,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.79 vs. limit=22.5 +2024-07-27 19:10:56,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=52330.666666666664, ans=0.125 +2024-07-27 19:11:00,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=52330.666666666664, ans=0.0 +2024-07-27 19:11:02,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=52344.0, ans=0.2 +2024-07-27 19:11:07,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=52344.0, ans=0.2 +2024-07-27 19:11:09,054 INFO [train.py:1114] (3/4) Epoch 4, batch 8600, loss[loss=0.2482, simple_loss=0.3229, pruned_loss=0.0868, over 4795.00 frames. ], tot_loss[loss=0.2566, simple_loss=0.3311, pruned_loss=0.09101, over 939062.89 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:11:10,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=52357.333333333336, ans=0.2 +2024-07-27 19:11:36,563 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.89 vs. limit=15.0 +2024-07-27 19:11:41,608 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.86 vs. limit=15.0 +2024-07-27 19:11:43,015 INFO [train.py:1114] (3/4) Epoch 4, batch 8650, loss[loss=0.2762, simple_loss=0.3385, pruned_loss=0.107, over 4895.00 frames. ], tot_loss[loss=0.2557, simple_loss=0.3305, pruned_loss=0.09049, over 940246.99 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:11:54,069 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.219e+01 6.477e+01 7.280e+01 8.362e+01 1.223e+02, threshold=1.456e+02, percent-clipped=0.0 +2024-07-27 19:12:01,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=52437.333333333336, ans=0.0 +2024-07-27 19:12:03,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=52450.666666666664, ans=0.125 +2024-07-27 19:12:05,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=52450.666666666664, ans=0.0 +2024-07-27 19:12:08,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.16 vs. limit=22.5 +2024-07-27 19:12:10,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=52464.0, ans=0.2 +2024-07-27 19:12:11,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=52464.0, ans=0.125 +2024-07-27 19:12:12,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=52464.0, ans=0.0 +2024-07-27 19:12:19,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=52477.333333333336, ans=0.125 +2024-07-27 19:12:21,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=52477.333333333336, ans=0.0 +2024-07-27 19:12:24,876 INFO [train.py:1114] (3/4) Epoch 4, batch 8700, loss[loss=0.27, simple_loss=0.3471, pruned_loss=0.0965, over 4763.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3317, pruned_loss=0.09175, over 937910.52 frames. ], batch size: 13, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:12:29,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=52490.666666666664, ans=0.125 +2024-07-27 19:12:39,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=52517.333333333336, ans=0.125 +2024-07-27 19:12:44,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=52530.666666666664, ans=0.125 +2024-07-27 19:12:48,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=52530.666666666664, ans=0.1 +2024-07-27 19:12:55,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=52544.0, ans=0.0 +2024-07-27 19:13:01,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=52544.0, ans=0.0 +2024-07-27 19:13:03,650 INFO [train.py:1114] (3/4) Epoch 4, batch 8750, loss[loss=0.2512, simple_loss=0.3519, pruned_loss=0.07522, over 4686.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.3311, pruned_loss=0.09132, over 936588.92 frames. ], batch size: 15, lr: 1.70e-02, grad_scale: 32.0 +2024-07-27 19:16:51,369 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.136e+01 6.508e+01 7.367e+01 8.337e+01 1.242e+02, threshold=1.473e+02, percent-clipped=0.0 +2024-07-27 19:17:00,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52570.666666666664, ans=0.1 +2024-07-27 19:17:13,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=52570.666666666664, ans=0.125 +2024-07-27 19:17:14,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=52570.666666666664, ans=0.125 +2024-07-27 19:17:26,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=52597.333333333336, ans=0.125 +2024-07-27 19:17:27,715 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.37 vs. limit=15.0 +2024-07-27 19:17:28,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=52597.333333333336, ans=0.09899494936611666 +2024-07-27 19:17:38,247 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.42 vs. limit=15.0 +2024-07-27 19:17:38,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=52597.333333333336, ans=0.125 +2024-07-27 19:17:40,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=52597.333333333336, ans=0.2 +2024-07-27 19:17:48,787 INFO [train.py:1114] (3/4) Epoch 4, batch 8800, loss[loss=0.2777, simple_loss=0.3549, pruned_loss=0.1002, over 4932.00 frames. ], tot_loss[loss=0.2566, simple_loss=0.3312, pruned_loss=0.091, over 936930.99 frames. ], batch size: 14, lr: 1.69e-02, grad_scale: 32.0 +2024-07-27 19:17:52,841 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:18:31,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=52677.333333333336, ans=0.125 +2024-07-27 19:18:34,296 INFO [train.py:1114] (3/4) Epoch 4, batch 8850, loss[loss=0.2682, simple_loss=0.3461, pruned_loss=0.09514, over 4440.00 frames. ], tot_loss[loss=0.2578, simple_loss=0.3319, pruned_loss=0.09182, over 930943.87 frames. ], batch size: 21, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:18:38,868 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.151e+01 6.204e+01 6.999e+01 8.264e+01 1.249e+02, threshold=1.400e+02, percent-clipped=0.0 +2024-07-27 19:18:40,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=52690.666666666664, ans=0.125 +2024-07-27 19:18:43,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=52704.0, ans=0.125 +2024-07-27 19:18:54,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=52704.0, ans=0.125 +2024-07-27 19:19:03,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52730.666666666664, ans=0.1 +2024-07-27 19:19:09,239 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.97 vs. limit=12.0 +2024-07-27 19:19:15,447 INFO [train.py:1114] (3/4) Epoch 4, batch 8900, loss[loss=0.2371, simple_loss=0.3099, pruned_loss=0.08213, over 4948.00 frames. ], tot_loss[loss=0.2601, simple_loss=0.3337, pruned_loss=0.09323, over 929243.35 frames. ], batch size: 12, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:19:17,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52757.333333333336, ans=0.1 +2024-07-27 19:19:20,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=52757.333333333336, ans=0.125 +2024-07-27 19:19:26,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=52770.666666666664, ans=0.0 +2024-07-27 19:19:31,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52784.0, ans=0.1 +2024-07-27 19:19:34,044 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.17 vs. limit=15.0 +2024-07-27 19:19:36,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=52797.333333333336, ans=0.025 +2024-07-27 19:19:38,974 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.79 vs. limit=22.5 +2024-07-27 19:19:41,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=52797.333333333336, ans=0.125 +2024-07-27 19:19:41,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=52797.333333333336, ans=0.025 +2024-07-27 19:19:44,329 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:19:49,279 INFO [train.py:1114] (3/4) Epoch 4, batch 8950, loss[loss=0.2603, simple_loss=0.3352, pruned_loss=0.09272, over 4439.00 frames. ], tot_loss[loss=0.2582, simple_loss=0.3321, pruned_loss=0.09214, over 930394.26 frames. ], batch size: 21, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:19:53,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=52824.0, ans=0.1 +2024-07-27 19:19:59,030 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.556e+01 6.546e+01 7.266e+01 8.543e+01 1.301e+02, threshold=1.453e+02, percent-clipped=0.0 +2024-07-27 19:20:01,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=52837.333333333336, ans=15.0 +2024-07-27 19:20:03,271 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=8.98 vs. limit=10.0 +2024-07-27 19:20:06,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52837.333333333336, ans=0.1 +2024-07-27 19:20:18,337 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.31 vs. limit=15.0 +2024-07-27 19:20:21,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=52864.0, ans=0.0 +2024-07-27 19:20:28,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=52877.333333333336, ans=0.1 +2024-07-27 19:20:29,462 INFO [train.py:1114] (3/4) Epoch 4, batch 9000, loss[loss=0.2542, simple_loss=0.3256, pruned_loss=0.0914, over 4649.00 frames. ], tot_loss[loss=0.2562, simple_loss=0.3301, pruned_loss=0.09115, over 933527.93 frames. ], batch size: 12, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:20:29,462 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 19:20:48,918 INFO [train.py:1146] (3/4) Epoch 4, validation: loss=0.2088, simple_loss=0.3114, pruned_loss=0.05305, over 944034.00 frames. +2024-07-27 19:20:48,919 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 19:20:51,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=52890.666666666664, ans=0.0 +2024-07-27 19:20:58,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=52904.0, ans=0.0 +2024-07-27 19:21:06,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=52917.333333333336, ans=0.1 +2024-07-27 19:21:16,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=52930.666666666664, ans=0.2 +2024-07-27 19:21:18,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=52930.666666666664, ans=0.125 +2024-07-27 19:21:19,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=52930.666666666664, ans=0.125 +2024-07-27 19:21:27,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=52944.0, ans=0.125 +2024-07-27 19:21:31,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=52944.0, ans=0.2 +2024-07-27 19:21:41,737 INFO [train.py:1114] (3/4) Epoch 4, batch 9050, loss[loss=0.1981, simple_loss=0.2801, pruned_loss=0.05804, over 4518.00 frames. ], tot_loss[loss=0.2557, simple_loss=0.3298, pruned_loss=0.09077, over 933931.05 frames. ], batch size: 10, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:21:41,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=52957.333333333336, ans=0.0 +2024-07-27 19:21:46,105 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.336e+01 6.460e+01 7.493e+01 8.562e+01 1.240e+02, threshold=1.499e+02, percent-clipped=0.0 +2024-07-27 19:22:14,875 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.16 vs. limit=12.0 +2024-07-27 19:22:16,423 INFO [train.py:1114] (3/4) Epoch 4, batch 9100, loss[loss=0.2583, simple_loss=0.3407, pruned_loss=0.08789, over 4921.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3301, pruned_loss=0.09039, over 936433.19 frames. ], batch size: 14, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:22:21,239 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.55 vs. limit=22.5 +2024-07-27 19:22:23,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=53037.333333333336, ans=0.2 +2024-07-27 19:22:29,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53050.666666666664, ans=0.1 +2024-07-27 19:22:48,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=53050.666666666664, ans=0.125 +2024-07-27 19:22:56,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=53064.0, ans=0.0 +2024-07-27 19:23:04,834 INFO [train.py:1114] (3/4) Epoch 4, batch 9150, loss[loss=0.2812, simple_loss=0.3483, pruned_loss=0.107, over 4808.00 frames. ], tot_loss[loss=0.2582, simple_loss=0.3324, pruned_loss=0.092, over 935336.47 frames. ], batch size: 14, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:23:11,022 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+01 6.280e+01 7.131e+01 8.307e+01 1.469e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 19:23:14,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=53104.0, ans=0.09899494936611666 +2024-07-27 19:23:19,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=53117.333333333336, ans=0.125 +2024-07-27 19:23:21,787 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.69 vs. limit=22.5 +2024-07-27 19:23:27,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=53130.666666666664, ans=10.0 +2024-07-27 19:23:31,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=53144.0, ans=0.125 +2024-07-27 19:23:35,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=53144.0, ans=0.125 +2024-07-27 19:23:38,854 INFO [train.py:1114] (3/4) Epoch 4, batch 9200, loss[loss=0.2196, simple_loss=0.3047, pruned_loss=0.0672, over 4858.00 frames. ], tot_loss[loss=0.2572, simple_loss=0.3314, pruned_loss=0.09145, over 937086.95 frames. ], batch size: 12, lr: 1.69e-02, grad_scale: 64.0 +2024-07-27 19:23:40,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=53157.333333333336, ans=0.0 +2024-07-27 19:23:40,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53157.333333333336, ans=0.1 +2024-07-27 19:23:43,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=53157.333333333336, ans=10.0 +2024-07-27 19:23:48,342 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.08 vs. limit=22.5 +2024-07-27 19:23:55,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=53184.0, ans=0.125 +2024-07-27 19:24:02,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53197.333333333336, ans=0.1 +2024-07-27 19:24:02,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=53197.333333333336, ans=0.0 +2024-07-27 19:24:10,829 INFO [train.py:1114] (3/4) Epoch 4, batch 9250, loss[loss=0.3005, simple_loss=0.3723, pruned_loss=0.1144, over 4629.00 frames. ], tot_loss[loss=0.257, simple_loss=0.331, pruned_loss=0.09155, over 938033.92 frames. ], batch size: 13, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:24:14,379 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.64 vs. limit=15.0 +2024-07-27 19:24:15,279 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.176e+01 6.609e+01 7.603e+01 9.259e+01 1.699e+02, threshold=1.521e+02, percent-clipped=1.0 +2024-07-27 19:24:40,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=53264.0, ans=0.125 +2024-07-27 19:24:42,983 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:24:52,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=53277.333333333336, ans=0.125 +2024-07-27 19:24:53,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=53290.666666666664, ans=0.125 +2024-07-27 19:24:53,698 INFO [train.py:1114] (3/4) Epoch 4, batch 9300, loss[loss=0.2101, simple_loss=0.2928, pruned_loss=0.06373, over 4771.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.3313, pruned_loss=0.09129, over 938162.59 frames. ], batch size: 12, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:24:56,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=53290.666666666664, ans=0.2 +2024-07-27 19:25:03,279 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.60 vs. limit=10.0 +2024-07-27 19:25:05,592 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.83 vs. limit=15.0 +2024-07-27 19:25:07,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53317.333333333336, ans=0.1 +2024-07-27 19:25:09,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=53317.333333333336, ans=0.125 +2024-07-27 19:25:12,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=53330.666666666664, ans=0.125 +2024-07-27 19:25:21,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=53330.666666666664, ans=0.125 +2024-07-27 19:25:38,983 INFO [train.py:1114] (3/4) Epoch 4, batch 9350, loss[loss=0.2351, simple_loss=0.3038, pruned_loss=0.08318, over 4812.00 frames. ], tot_loss[loss=0.257, simple_loss=0.3315, pruned_loss=0.0913, over 934902.05 frames. ], batch size: 11, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:25:39,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=53357.333333333336, ans=0.1 +2024-07-27 19:25:40,567 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.76 vs. limit=12.0 +2024-07-27 19:25:40,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=53357.333333333336, ans=0.125 +2024-07-27 19:25:43,192 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.072e+01 6.213e+01 6.915e+01 8.745e+01 1.555e+02, threshold=1.383e+02, percent-clipped=1.0 +2024-07-27 19:25:43,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=53357.333333333336, ans=0.0 +2024-07-27 19:25:48,009 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.64 vs. limit=6.0 +2024-07-27 19:25:57,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=53384.0, ans=0.125 +2024-07-27 19:25:59,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=53384.0, ans=0.2 +2024-07-27 19:26:10,178 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.40 vs. limit=15.0 +2024-07-27 19:26:14,178 INFO [train.py:1114] (3/4) Epoch 4, batch 9400, loss[loss=0.2543, simple_loss=0.3254, pruned_loss=0.09161, over 4688.00 frames. ], tot_loss[loss=0.2568, simple_loss=0.3308, pruned_loss=0.09138, over 933199.80 frames. ], batch size: 13, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:26:19,628 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.99 vs. limit=22.5 +2024-07-27 19:26:20,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=53437.333333333336, ans=0.0 +2024-07-27 19:26:25,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=53437.333333333336, ans=0.0 +2024-07-27 19:26:27,314 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.52 vs. limit=6.0 +2024-07-27 19:26:41,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=53477.333333333336, ans=0.0 +2024-07-27 19:26:47,552 INFO [train.py:1114] (3/4) Epoch 4, batch 9450, loss[loss=0.2374, simple_loss=0.3048, pruned_loss=0.08504, over 4789.00 frames. ], tot_loss[loss=0.2568, simple_loss=0.331, pruned_loss=0.09132, over 932271.12 frames. ], batch size: 11, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:26:53,647 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.978e+01 6.050e+01 6.667e+01 7.624e+01 1.196e+02, threshold=1.333e+02, percent-clipped=0.0 +2024-07-27 19:27:01,311 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.81 vs. limit=22.5 +2024-07-27 19:27:06,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=53517.333333333336, ans=0.125 +2024-07-27 19:27:20,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=53544.0, ans=0.125 +2024-07-27 19:27:20,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=53544.0, ans=0.04949747468305833 +2024-07-27 19:27:21,643 INFO [train.py:1114] (3/4) Epoch 4, batch 9500, loss[loss=0.18, simple_loss=0.2549, pruned_loss=0.05262, over 4695.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3294, pruned_loss=0.09036, over 934535.42 frames. ], batch size: 12, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:27:27,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=53570.666666666664, ans=0.02 +2024-07-27 19:27:31,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=53570.666666666664, ans=0.125 +2024-07-27 19:27:42,604 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.50 vs. limit=6.0 +2024-07-27 19:27:50,342 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.37 vs. limit=15.0 +2024-07-27 19:27:54,669 INFO [train.py:1114] (3/4) Epoch 4, batch 9550, loss[loss=0.2047, simple_loss=0.2861, pruned_loss=0.06162, over 4775.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.329, pruned_loss=0.09015, over 932068.58 frames. ], batch size: 12, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:27:57,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=53624.0, ans=0.0 +2024-07-27 19:27:58,966 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.893e+01 6.641e+01 7.346e+01 8.353e+01 1.240e+02, threshold=1.469e+02, percent-clipped=0.0 +2024-07-27 19:28:02,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=53637.333333333336, ans=0.125 +2024-07-27 19:28:17,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=53650.666666666664, ans=0.125 +2024-07-27 19:28:33,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=53677.333333333336, ans=0.0 +2024-07-27 19:28:39,656 INFO [train.py:1114] (3/4) Epoch 4, batch 9600, loss[loss=0.3338, simple_loss=0.3769, pruned_loss=0.1454, over 3398.00 frames. ], tot_loss[loss=0.2561, simple_loss=0.3305, pruned_loss=0.09085, over 930921.31 frames. ], batch size: 35, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:28:48,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=53704.0, ans=0.125 +2024-07-27 19:29:07,766 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.02 vs. limit=22.5 +2024-07-27 19:29:11,823 INFO [train.py:1114] (3/4) Epoch 4, batch 9650, loss[loss=0.2505, simple_loss=0.3205, pruned_loss=0.09025, over 4837.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.3308, pruned_loss=0.09149, over 926388.23 frames. ], batch size: 16, lr: 1.68e-02, grad_scale: 64.0 +2024-07-27 19:29:17,124 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:29:19,465 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.095e+01 6.349e+01 7.028e+01 7.935e+01 1.425e+02, threshold=1.406e+02, percent-clipped=0.0 +2024-07-27 19:29:20,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=53757.333333333336, ans=0.125 +2024-07-27 19:29:21,784 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=12.30 vs. limit=15.0 +2024-07-27 19:29:51,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.13 vs. limit=10.0 +2024-07-27 19:29:52,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=53810.666666666664, ans=0.125 +2024-07-27 19:29:57,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=53824.0, ans=0.125 +2024-07-27 19:29:57,619 INFO [train.py:1114] (3/4) Epoch 4, batch 9700, loss[loss=0.3186, simple_loss=0.3725, pruned_loss=0.1323, over 4156.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3319, pruned_loss=0.09249, over 923838.37 frames. ], batch size: 25, lr: 1.68e-02, grad_scale: 32.0 +2024-07-27 19:30:00,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=53824.0, ans=0.1 +2024-07-27 19:30:08,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=53837.333333333336, ans=0.035 +2024-07-27 19:30:12,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=53837.333333333336, ans=0.0 +2024-07-27 19:30:15,478 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.36 vs. limit=10.0 +2024-07-27 19:30:17,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=53850.666666666664, ans=0.2 +2024-07-27 19:30:17,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=53850.666666666664, ans=0.5 +2024-07-27 19:30:31,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=53877.333333333336, ans=0.0 +2024-07-27 19:30:31,196 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.08 vs. limit=15.0 +2024-07-27 19:30:31,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=53877.333333333336, ans=0.1 +2024-07-27 19:30:33,407 INFO [train.py:1114] (3/4) Epoch 4, batch 9750, loss[loss=0.2732, simple_loss=0.3454, pruned_loss=0.1005, over 4677.00 frames. ], tot_loss[loss=0.2583, simple_loss=0.332, pruned_loss=0.09226, over 924588.28 frames. ], batch size: 15, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:30:38,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=53890.666666666664, ans=0.0 +2024-07-27 19:30:41,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=53890.666666666664, ans=0.125 +2024-07-27 19:30:42,342 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.008e+01 6.435e+01 7.103e+01 8.018e+01 1.499e+02, threshold=1.421e+02, percent-clipped=1.0 +2024-07-27 19:30:44,639 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.29 vs. limit=15.0 +2024-07-27 19:30:52,272 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.34 vs. limit=15.0 +2024-07-27 19:30:54,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=53917.333333333336, ans=0.125 +2024-07-27 19:30:55,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=53930.666666666664, ans=0.125 +2024-07-27 19:31:09,154 INFO [train.py:1114] (3/4) Epoch 4, batch 9800, loss[loss=0.2526, simple_loss=0.3336, pruned_loss=0.08577, over 4702.00 frames. ], tot_loss[loss=0.2574, simple_loss=0.3314, pruned_loss=0.09175, over 924430.34 frames. ], batch size: 12, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:31:09,549 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.30 vs. limit=15.0 +2024-07-27 19:31:12,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=53957.333333333336, ans=0.04949747468305833 +2024-07-27 19:31:25,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=53984.0, ans=0.125 +2024-07-27 19:31:27,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=53984.0, ans=0.1 +2024-07-27 19:31:29,480 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.26 vs. limit=10.0 +2024-07-27 19:31:40,671 INFO [train.py:1114] (3/4) Epoch 4, batch 9850, loss[loss=0.2569, simple_loss=0.3352, pruned_loss=0.08929, over 4899.00 frames. ], tot_loss[loss=0.2574, simple_loss=0.3315, pruned_loss=0.09161, over 927269.78 frames. ], batch size: 15, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:31:46,037 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.505e+01 6.777e+01 7.966e+01 9.401e+01 1.769e+02, threshold=1.593e+02, percent-clipped=1.0 +2024-07-27 19:31:53,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=54037.333333333336, ans=0.125 +2024-07-27 19:32:17,531 INFO [train.py:1114] (3/4) Epoch 4, batch 9900, loss[loss=0.2565, simple_loss=0.331, pruned_loss=0.09099, over 4860.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3334, pruned_loss=0.0925, over 927144.38 frames. ], batch size: 16, lr: 1.67e-02, grad_scale: 16.0 +2024-07-27 19:32:18,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=54090.666666666664, ans=0.125 +2024-07-27 19:32:23,581 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.33 vs. limit=10.0 +2024-07-27 19:32:29,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=54104.0, ans=0.125 +2024-07-27 19:32:29,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=54104.0, ans=0.0 +2024-07-27 19:32:31,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=54104.0, ans=0.0 +2024-07-27 19:32:36,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=54117.333333333336, ans=0.0 +2024-07-27 19:32:43,366 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.31 vs. limit=10.0 +2024-07-27 19:32:47,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=54130.666666666664, ans=0.125 +2024-07-27 19:33:03,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=54144.0, ans=0.025 +2024-07-27 19:33:14,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=54157.333333333336, ans=0.125 +2024-07-27 19:33:15,477 INFO [train.py:1114] (3/4) Epoch 4, batch 9950, loss[loss=0.2471, simple_loss=0.3037, pruned_loss=0.09522, over 4543.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.333, pruned_loss=0.09303, over 929065.29 frames. ], batch size: 10, lr: 1.67e-02, grad_scale: 16.0 +2024-07-27 19:33:20,988 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.075e+01 6.568e+01 7.447e+01 8.780e+01 1.338e+02, threshold=1.489e+02, percent-clipped=0.0 +2024-07-27 19:33:27,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=54170.666666666664, ans=0.0 +2024-07-27 19:33:33,848 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.50 vs. limit=22.5 +2024-07-27 19:33:46,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=54210.666666666664, ans=0.0 +2024-07-27 19:33:51,644 INFO [train.py:1114] (3/4) Epoch 4, batch 10000, loss[loss=0.2597, simple_loss=0.3553, pruned_loss=0.08201, over 4642.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3353, pruned_loss=0.09366, over 926782.43 frames. ], batch size: 16, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:33:59,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=54237.333333333336, ans=0.125 +2024-07-27 19:34:07,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=54237.333333333336, ans=0.2 +2024-07-27 19:34:09,309 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.68 vs. limit=22.5 +2024-07-27 19:34:35,779 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:34:36,614 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=15.0 +2024-07-27 19:34:39,363 INFO [train.py:1114] (3/4) Epoch 4, batch 10050, loss[loss=0.323, simple_loss=0.3823, pruned_loss=0.1318, over 3315.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3413, pruned_loss=0.09801, over 914740.22 frames. ], batch size: 35, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:34:40,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=54290.666666666664, ans=0.0 +2024-07-27 19:34:40,850 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:34:44,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=54290.666666666664, ans=0.125 +2024-07-27 19:34:45,412 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.986e+01 6.968e+01 7.682e+01 9.310e+01 1.537e+02, threshold=1.536e+02, percent-clipped=1.0 +2024-07-27 19:35:05,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=54330.666666666664, ans=0.0 +2024-07-27 19:35:07,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=54344.0, ans=0.0 +2024-07-27 19:35:14,125 INFO [train.py:1114] (3/4) Epoch 4, batch 10100, loss[loss=0.3357, simple_loss=0.3775, pruned_loss=0.1469, over 3038.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3487, pruned_loss=0.1065, over 860213.10 frames. ], batch size: 35, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:35:18,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=54357.333333333336, ans=10.0 +2024-07-27 19:35:19,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=54357.333333333336, ans=0.07 +2024-07-27 19:35:24,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=54370.666666666664, ans=0.125 +2024-07-27 19:35:39,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=54397.333333333336, ans=0.1 +2024-07-27 19:35:47,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=54410.666666666664, ans=0.2 +2024-07-27 19:35:49,234 INFO [train.py:1114] (3/4) Epoch 4, batch 10150, loss[loss=0.3198, simple_loss=0.3689, pruned_loss=0.1354, over 3289.00 frames. ], tot_loss[loss=0.2888, simple_loss=0.3539, pruned_loss=0.1118, over 820486.58 frames. ], batch size: 35, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:35:52,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=54424.0, ans=0.125 +2024-07-27 19:35:53,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=54424.0, ans=0.125 +2024-07-27 19:36:02,455 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.057e+01 7.252e+01 7.644e+01 8.757e+01 1.198e+02, threshold=1.529e+02, percent-clipped=0.0 +2024-07-27 19:36:10,820 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:36:28,236 INFO [train.py:1114] (3/4) Epoch 4, batch 10200, loss[loss=0.3044, simple_loss=0.3461, pruned_loss=0.1314, over 3478.00 frames. ], tot_loss[loss=0.295, simple_loss=0.3576, pruned_loss=0.1162, over 791476.73 frames. ], batch size: 35, lr: 1.67e-02, grad_scale: 32.0 +2024-07-27 19:36:29,926 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.74 vs. limit=15.0 +2024-07-27 19:36:36,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=54504.0, ans=0.125 +2024-07-27 19:36:41,238 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:37:32,108 INFO [train.py:1114] (3/4) Epoch 5, batch 0, loss[loss=0.2124, simple_loss=0.2916, pruned_loss=0.06658, over 4856.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2916, pruned_loss=0.06658, over 4856.00 frames. ], batch size: 12, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:37:32,109 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 19:37:43,746 INFO [train.py:1146] (3/4) Epoch 5, validation: loss=0.2167, simple_loss=0.3194, pruned_loss=0.05704, over 944034.00 frames. +2024-07-27 19:37:43,747 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 19:37:53,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=54534.666666666664, ans=0.0 +2024-07-27 19:38:08,549 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.642e+01 6.667e+01 7.198e+01 8.159e+01 1.101e+02, threshold=1.440e+02, percent-clipped=0.0 +2024-07-27 19:43:24,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=54588.0, ans=0.2 +2024-07-27 19:43:25,065 INFO [train.py:1114] (3/4) Epoch 5, batch 50, loss[loss=0.1848, simple_loss=0.2687, pruned_loss=0.05043, over 4620.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.341, pruned_loss=0.09822, over 205557.83 frames. ], batch size: 11, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:44:14,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=54628.0, ans=0.025 +2024-07-27 19:44:16,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=54628.0, ans=0.2 +2024-07-27 19:44:17,197 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.07 vs. limit=15.0 +2024-07-27 19:44:17,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=54628.0, ans=0.125 +2024-07-27 19:44:17,854 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=16.17 vs. limit=15.0 +2024-07-27 19:44:30,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=54641.333333333336, ans=0.1 +2024-07-27 19:44:39,467 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=4.04 vs. limit=12.0 +2024-07-27 19:44:43,617 INFO [train.py:1114] (3/4) Epoch 5, batch 100, loss[loss=0.2637, simple_loss=0.332, pruned_loss=0.09765, over 4628.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3366, pruned_loss=0.09382, over 365440.71 frames. ], batch size: 12, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:45:13,915 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.47 vs. limit=15.0 +2024-07-27 19:45:20,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=54668.0, ans=0.0 +2024-07-27 19:45:29,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=54668.0, ans=10.0 +2024-07-27 19:45:30,023 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=18.43 vs. limit=22.5 +2024-07-27 19:45:45,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=54681.333333333336, ans=0.0 +2024-07-27 19:45:52,462 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 6.028e+01 6.816e+01 7.937e+01 1.219e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-27 19:46:09,992 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.28 vs. limit=6.0 +2024-07-27 19:46:13,238 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.27 vs. limit=10.0 +2024-07-27 19:46:20,043 INFO [train.py:1114] (3/4) Epoch 5, batch 150, loss[loss=0.2166, simple_loss=0.2903, pruned_loss=0.07142, over 4619.00 frames. ], tot_loss[loss=0.2579, simple_loss=0.3315, pruned_loss=0.09211, over 493777.62 frames. ], batch size: 11, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:46:23,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=54721.333333333336, ans=0.1 +2024-07-27 19:46:31,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=54721.333333333336, ans=0.125 +2024-07-27 19:46:49,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=54734.666666666664, ans=0.2 +2024-07-27 19:47:07,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=54748.0, ans=0.025 +2024-07-27 19:47:20,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=54761.333333333336, ans=0.125 +2024-07-27 19:47:33,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=54774.666666666664, ans=0.0 +2024-07-27 19:47:50,700 INFO [train.py:1114] (3/4) Epoch 5, batch 200, loss[loss=0.2279, simple_loss=0.3155, pruned_loss=0.07013, over 4471.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.3297, pruned_loss=0.09092, over 593408.17 frames. ], batch size: 21, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:47:52,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=54788.0, ans=0.025 +2024-07-27 19:48:14,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=54801.333333333336, ans=0.125 +2024-07-27 19:48:41,787 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.041e+01 6.282e+01 6.864e+01 7.827e+01 1.211e+02, threshold=1.373e+02, percent-clipped=0.0 +2024-07-27 19:49:02,706 INFO [train.py:1114] (3/4) Epoch 5, batch 250, loss[loss=0.2655, simple_loss=0.35, pruned_loss=0.09053, over 4626.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3293, pruned_loss=0.09094, over 670199.84 frames. ], batch size: 16, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:49:17,569 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:49:22,483 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.40 vs. limit=15.0 +2024-07-27 19:49:23,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=54894.666666666664, ans=0.0 +2024-07-27 19:49:24,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=54894.666666666664, ans=0.125 +2024-07-27 19:49:27,663 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.96 vs. limit=15.0 +2024-07-27 19:49:39,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=54908.0, ans=0.2 +2024-07-27 19:49:50,295 INFO [train.py:1114] (3/4) Epoch 5, batch 300, loss[loss=0.2625, simple_loss=0.3452, pruned_loss=0.08992, over 4790.00 frames. ], tot_loss[loss=0.2552, simple_loss=0.3293, pruned_loss=0.09056, over 729926.27 frames. ], batch size: 15, lr: 1.55e-02, grad_scale: 32.0 +2024-07-27 19:49:54,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=54921.333333333336, ans=0.125 +2024-07-27 19:50:03,911 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.79 vs. limit=15.0 +2024-07-27 19:50:16,106 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.053e+01 6.343e+01 7.108e+01 8.248e+01 1.263e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 19:50:17,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=54961.333333333336, ans=0.125 +2024-07-27 19:50:26,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=54974.666666666664, ans=0.125 +2024-07-27 19:50:27,361 INFO [train.py:1114] (3/4) Epoch 5, batch 350, loss[loss=0.2058, simple_loss=0.2794, pruned_loss=0.06605, over 4946.00 frames. ], tot_loss[loss=0.2557, simple_loss=0.3303, pruned_loss=0.09056, over 776069.28 frames. ], batch size: 12, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:50:30,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=54988.0, ans=0.0 +2024-07-27 19:50:32,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=54988.0, ans=0.0 +2024-07-27 19:50:38,418 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.83 vs. limit=10.0 +2024-07-27 19:50:43,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=55014.666666666664, ans=0.0 +2024-07-27 19:51:12,115 INFO [train.py:1114] (3/4) Epoch 5, batch 400, loss[loss=0.2785, simple_loss=0.358, pruned_loss=0.09945, over 4693.00 frames. ], tot_loss[loss=0.2543, simple_loss=0.3293, pruned_loss=0.08962, over 813525.90 frames. ], batch size: 13, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:51:17,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=55054.666666666664, ans=0.125 +2024-07-27 19:51:29,313 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.57 vs. limit=15.0 +2024-07-27 19:51:32,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=55081.333333333336, ans=0.125 +2024-07-27 19:51:39,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=55094.666666666664, ans=0.125 +2024-07-27 19:51:40,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=55094.666666666664, ans=0.125 +2024-07-27 19:51:43,474 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.118e+01 6.055e+01 6.518e+01 7.484e+01 1.056e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-27 19:51:44,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=55094.666666666664, ans=0.125 +2024-07-27 19:51:51,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=55108.0, ans=0.125 +2024-07-27 19:52:02,005 INFO [train.py:1114] (3/4) Epoch 5, batch 450, loss[loss=0.2838, simple_loss=0.3531, pruned_loss=0.1072, over 4632.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3285, pruned_loss=0.08912, over 838458.07 frames. ], batch size: 13, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:52:13,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=55134.666666666664, ans=0.125 +2024-07-27 19:52:16,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=55134.666666666664, ans=0.125 +2024-07-27 19:52:30,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=55174.666666666664, ans=0.125 +2024-07-27 19:52:39,051 INFO [train.py:1114] (3/4) Epoch 5, batch 500, loss[loss=0.2916, simple_loss=0.3633, pruned_loss=0.11, over 4698.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3262, pruned_loss=0.08773, over 861113.48 frames. ], batch size: 15, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:52:40,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55188.0, ans=0.1 +2024-07-27 19:52:51,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=55201.333333333336, ans=0.035 +2024-07-27 19:52:56,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=55214.666666666664, ans=0.1 +2024-07-27 19:53:04,119 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.317e+01 6.118e+01 6.781e+01 7.848e+01 1.133e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 19:53:04,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=55228.0, ans=0.2 +2024-07-27 19:53:19,189 INFO [train.py:1114] (3/4) Epoch 5, batch 550, loss[loss=0.2714, simple_loss=0.3462, pruned_loss=0.09831, over 4637.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3279, pruned_loss=0.08808, over 877534.27 frames. ], batch size: 17, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:53:30,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=55268.0, ans=0.0 +2024-07-27 19:53:53,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=55308.0, ans=0.125 +2024-07-27 19:53:55,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=55308.0, ans=0.0 +2024-07-27 19:53:59,353 INFO [train.py:1114] (3/4) Epoch 5, batch 600, loss[loss=0.3294, simple_loss=0.3885, pruned_loss=0.1352, over 4647.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.3279, pruned_loss=0.08796, over 892199.85 frames. ], batch size: 16, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:54:03,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=55321.333333333336, ans=0.125 +2024-07-27 19:54:23,111 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.126e+01 6.489e+01 7.020e+01 8.216e+01 1.209e+02, threshold=1.404e+02, percent-clipped=0.0 +2024-07-27 19:54:32,214 INFO [train.py:1114] (3/4) Epoch 5, batch 650, loss[loss=0.2379, simple_loss=0.3378, pruned_loss=0.06894, over 4759.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3276, pruned_loss=0.08796, over 903896.17 frames. ], batch size: 13, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:54:33,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=55388.0, ans=0.0 +2024-07-27 19:54:34,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=55388.0, ans=0.125 +2024-07-27 19:54:40,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=55401.333333333336, ans=0.0 +2024-07-27 19:54:47,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=55414.666666666664, ans=0.05 +2024-07-27 19:54:49,850 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.96 vs. limit=15.0 +2024-07-27 19:55:01,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=55428.0, ans=0.125 +2024-07-27 19:55:02,110 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.26 vs. limit=22.5 +2024-07-27 19:55:05,313 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.03 vs. limit=15.0 +2024-07-27 19:55:10,107 INFO [train.py:1114] (3/4) Epoch 5, batch 700, loss[loss=0.2306, simple_loss=0.3125, pruned_loss=0.07437, over 4634.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3267, pruned_loss=0.08738, over 911603.89 frames. ], batch size: 12, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:55:18,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=55454.666666666664, ans=0.0 +2024-07-27 19:55:26,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=55481.333333333336, ans=0.125 +2024-07-27 19:55:27,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=55481.333333333336, ans=0.125 +2024-07-27 19:55:31,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=55481.333333333336, ans=0.125 +2024-07-27 19:55:32,917 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.14 vs. limit=15.0 +2024-07-27 19:55:37,845 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.381e+01 6.482e+01 7.754e+01 9.297e+01 1.843e+02, threshold=1.551e+02, percent-clipped=6.0 +2024-07-27 19:55:39,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=55494.666666666664, ans=0.0 +2024-07-27 19:55:47,637 INFO [train.py:1114] (3/4) Epoch 5, batch 750, loss[loss=0.2253, simple_loss=0.3089, pruned_loss=0.07083, over 4687.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3264, pruned_loss=0.08721, over 918250.71 frames. ], batch size: 13, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:55:50,096 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.04 vs. limit=15.0 +2024-07-27 19:55:50,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=55521.333333333336, ans=0.125 +2024-07-27 19:55:50,705 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.31 vs. limit=15.0 +2024-07-27 19:55:56,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=55534.666666666664, ans=0.0 +2024-07-27 19:56:05,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=55548.0, ans=0.125 +2024-07-27 19:56:23,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=55561.333333333336, ans=0.125 +2024-07-27 19:56:32,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=55574.666666666664, ans=0.125 +2024-07-27 19:56:35,974 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.33 vs. limit=15.0 +2024-07-27 19:56:37,355 INFO [train.py:1114] (3/4) Epoch 5, batch 800, loss[loss=0.2273, simple_loss=0.3112, pruned_loss=0.07173, over 4848.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3276, pruned_loss=0.088, over 923238.68 frames. ], batch size: 12, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:56:43,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.99 vs. limit=15.0 +2024-07-27 19:56:47,556 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.69 vs. limit=15.0 +2024-07-27 19:56:48,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=55601.333333333336, ans=0.1 +2024-07-27 19:56:56,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=55614.666666666664, ans=0.125 +2024-07-27 19:56:58,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=55614.666666666664, ans=0.0 +2024-07-27 19:57:05,531 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.583e+01 6.253e+01 7.054e+01 8.487e+01 1.181e+02, threshold=1.411e+02, percent-clipped=0.0 +2024-07-27 19:57:11,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=55641.333333333336, ans=0.025 +2024-07-27 19:57:15,366 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 19:57:17,157 INFO [train.py:1114] (3/4) Epoch 5, batch 850, loss[loss=0.3317, simple_loss=0.3908, pruned_loss=0.1363, over 4655.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.327, pruned_loss=0.08809, over 927390.49 frames. ], batch size: 14, lr: 1.54e-02, grad_scale: 32.0 +2024-07-27 19:57:44,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=55694.666666666664, ans=0.0 +2024-07-27 19:57:47,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=55708.0, ans=0.04949747468305833 +2024-07-27 19:57:53,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=55708.0, ans=0.125 +2024-07-27 19:57:54,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=55708.0, ans=0.1 +2024-07-27 19:57:56,345 INFO [train.py:1114] (3/4) Epoch 5, batch 900, loss[loss=0.2347, simple_loss=0.3057, pruned_loss=0.08182, over 4846.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.3277, pruned_loss=0.08857, over 928542.99 frames. ], batch size: 12, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 19:57:57,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=55721.333333333336, ans=0.2 +2024-07-27 19:58:00,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=55721.333333333336, ans=0.2 +2024-07-27 19:58:32,496 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.179e+01 6.369e+01 7.320e+01 8.500e+01 1.312e+02, threshold=1.464e+02, percent-clipped=0.0 +2024-07-27 19:58:53,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=55788.0, ans=0.125 +2024-07-27 19:58:53,869 INFO [train.py:1114] (3/4) Epoch 5, batch 950, loss[loss=0.2075, simple_loss=0.2872, pruned_loss=0.06393, over 4780.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3273, pruned_loss=0.08812, over 930018.42 frames. ], batch size: 12, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 19:58:57,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=55788.0, ans=0.125 +2024-07-27 19:58:58,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=55788.0, ans=0.0 +2024-07-27 19:59:19,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=55828.0, ans=0.0 +2024-07-27 19:59:21,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=55828.0, ans=0.0 +2024-07-27 19:59:26,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=55841.333333333336, ans=0.125 +2024-07-27 19:59:32,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=55841.333333333336, ans=0.0 +2024-07-27 19:59:32,677 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.19 vs. limit=6.0 +2024-07-27 19:59:34,239 INFO [train.py:1114] (3/4) Epoch 5, batch 1000, loss[loss=0.2104, simple_loss=0.2912, pruned_loss=0.06481, over 4968.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.3279, pruned_loss=0.0885, over 929617.09 frames. ], batch size: 13, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 19:59:40,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=55854.666666666664, ans=0.125 +2024-07-27 20:00:03,951 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.112e+01 6.099e+01 6.760e+01 7.878e+01 1.806e+02, threshold=1.352e+02, percent-clipped=1.0 +2024-07-27 20:00:12,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=55908.0, ans=0.0 +2024-07-27 20:00:13,325 INFO [train.py:1114] (3/4) Epoch 5, batch 1050, loss[loss=0.2711, simple_loss=0.3495, pruned_loss=0.09633, over 4876.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3255, pruned_loss=0.08667, over 932070.64 frames. ], batch size: 14, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:00:15,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=55921.333333333336, ans=0.125 +2024-07-27 20:00:17,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=55921.333333333336, ans=0.025 +2024-07-27 20:00:25,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=55934.666666666664, ans=0.2 +2024-07-27 20:00:35,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=55961.333333333336, ans=10.0 +2024-07-27 20:00:41,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=55974.666666666664, ans=0.125 +2024-07-27 20:00:43,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=55974.666666666664, ans=0.0 +2024-07-27 20:00:47,866 INFO [train.py:1114] (3/4) Epoch 5, batch 1100, loss[loss=0.2203, simple_loss=0.3055, pruned_loss=0.06752, over 4899.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3253, pruned_loss=0.08682, over 934393.52 frames. ], batch size: 13, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:00:54,940 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:00:56,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=56001.333333333336, ans=0.2 +2024-07-27 20:01:02,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.15 vs. limit=22.5 +2024-07-27 20:01:03,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=56014.666666666664, ans=10.0 +2024-07-27 20:01:08,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=56028.0, ans=0.125 +2024-07-27 20:01:09,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=56028.0, ans=0.125 +2024-07-27 20:01:11,709 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.396e+01 6.284e+01 6.917e+01 8.137e+01 1.279e+02, threshold=1.383e+02, percent-clipped=0.0 +2024-07-27 20:01:12,103 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.07 vs. limit=12.0 +2024-07-27 20:01:20,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=56041.333333333336, ans=0.0 +2024-07-27 20:01:22,329 INFO [train.py:1114] (3/4) Epoch 5, batch 1150, loss[loss=0.2084, simple_loss=0.2946, pruned_loss=0.06111, over 4888.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3246, pruned_loss=0.08628, over 934608.65 frames. ], batch size: 13, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:01:24,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=56054.666666666664, ans=0.0 +2024-07-27 20:01:43,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=56094.666666666664, ans=0.125 +2024-07-27 20:01:50,708 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.19 vs. limit=22.5 +2024-07-27 20:01:55,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=56108.0, ans=0.025 +2024-07-27 20:01:57,031 INFO [train.py:1114] (3/4) Epoch 5, batch 1200, loss[loss=0.2726, simple_loss=0.3496, pruned_loss=0.09777, over 4873.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.3275, pruned_loss=0.08817, over 933734.48 frames. ], batch size: 14, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:02:04,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=56121.333333333336, ans=0.125 +2024-07-27 20:02:19,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=56134.666666666664, ans=0.125 +2024-07-27 20:02:23,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=56134.666666666664, ans=0.2 +2024-07-27 20:02:27,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=56148.0, ans=0.125 +2024-07-27 20:02:31,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=56148.0, ans=0.125 +2024-07-27 20:02:39,592 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.15 vs. limit=10.0 +2024-07-27 20:02:40,437 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.476e+01 6.643e+01 8.181e+01 1.020e+02 1.586e+02, threshold=1.636e+02, percent-clipped=2.0 +2024-07-27 20:02:42,218 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.99 vs. limit=15.0 +2024-07-27 20:02:53,268 INFO [train.py:1114] (3/4) Epoch 5, batch 1250, loss[loss=0.23, simple_loss=0.309, pruned_loss=0.07551, over 4797.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3278, pruned_loss=0.08809, over 937534.02 frames. ], batch size: 15, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:02:53,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=56188.0, ans=0.125 +2024-07-27 20:03:19,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=56228.0, ans=0.09899494936611666 +2024-07-27 20:03:24,486 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.05 vs. limit=15.0 +2024-07-27 20:03:28,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=56241.333333333336, ans=0.125 +2024-07-27 20:03:29,985 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.25 vs. limit=15.0 +2024-07-27 20:03:30,282 INFO [train.py:1114] (3/4) Epoch 5, batch 1300, loss[loss=0.2638, simple_loss=0.3234, pruned_loss=0.1021, over 4760.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3267, pruned_loss=0.08752, over 939110.10 frames. ], batch size: 19, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:03:33,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=56254.666666666664, ans=0.2 +2024-07-27 20:03:39,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=56268.0, ans=0.125 +2024-07-27 20:03:49,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=56281.333333333336, ans=0.05 +2024-07-27 20:03:51,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=56281.333333333336, ans=0.125 +2024-07-27 20:04:00,102 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.011e+01 6.264e+01 7.458e+01 8.643e+01 1.456e+02, threshold=1.492e+02, percent-clipped=0.0 +2024-07-27 20:04:04,334 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:04:09,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=56321.333333333336, ans=0.125 +2024-07-27 20:04:10,190 INFO [train.py:1114] (3/4) Epoch 5, batch 1350, loss[loss=0.2119, simple_loss=0.2959, pruned_loss=0.06399, over 4760.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3243, pruned_loss=0.08599, over 941150.10 frames. ], batch size: 13, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:04:11,629 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:04:13,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=56321.333333333336, ans=0.2 +2024-07-27 20:04:20,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=56334.666666666664, ans=0.125 +2024-07-27 20:04:20,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=56334.666666666664, ans=0.0 +2024-07-27 20:04:22,062 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.18 vs. limit=22.5 +2024-07-27 20:04:22,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=56334.666666666664, ans=0.2 +2024-07-27 20:04:28,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=56348.0, ans=0.125 +2024-07-27 20:04:42,075 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.58 vs. limit=15.0 +2024-07-27 20:04:47,668 INFO [train.py:1114] (3/4) Epoch 5, batch 1400, loss[loss=0.2081, simple_loss=0.2796, pruned_loss=0.06832, over 4701.00 frames. ], tot_loss[loss=0.2478, simple_loss=0.3236, pruned_loss=0.086, over 942975.98 frames. ], batch size: 11, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:04:58,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=56401.333333333336, ans=0.05 +2024-07-27 20:05:47,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=56428.0, ans=0.05 +2024-07-27 20:05:48,360 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.750e+01 6.394e+01 7.108e+01 8.417e+01 1.153e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-27 20:05:57,870 INFO [train.py:1114] (3/4) Epoch 5, batch 1450, loss[loss=0.303, simple_loss=0.3772, pruned_loss=0.1144, over 4681.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3239, pruned_loss=0.08589, over 942827.86 frames. ], batch size: 15, lr: 1.53e-02, grad_scale: 32.0 +2024-07-27 20:06:08,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=56468.0, ans=0.125 +2024-07-27 20:06:22,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=56494.666666666664, ans=0.025 +2024-07-27 20:06:24,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=56494.666666666664, ans=0.025 +2024-07-27 20:06:26,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=56508.0, ans=0.125 +2024-07-27 20:06:27,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=56508.0, ans=0.0 +2024-07-27 20:06:32,437 INFO [train.py:1114] (3/4) Epoch 5, batch 1500, loss[loss=0.2671, simple_loss=0.3521, pruned_loss=0.09104, over 4807.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3263, pruned_loss=0.0872, over 942612.73 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 32.0 +2024-07-27 20:06:35,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=56521.333333333336, ans=0.2 +2024-07-27 20:06:41,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=56534.666666666664, ans=0.07 +2024-07-27 20:06:42,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=56534.666666666664, ans=10.0 +2024-07-27 20:06:51,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=56548.0, ans=0.0 +2024-07-27 20:06:59,211 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.351e+01 6.555e+01 7.313e+01 8.345e+01 1.115e+02, threshold=1.463e+02, percent-clipped=0.0 +2024-07-27 20:06:59,445 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:07:04,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=56574.666666666664, ans=0.025 +2024-07-27 20:07:08,542 INFO [train.py:1114] (3/4) Epoch 5, batch 1550, loss[loss=0.2269, simple_loss=0.3179, pruned_loss=0.06789, over 4913.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3256, pruned_loss=0.08641, over 938862.31 frames. ], batch size: 15, lr: 1.52e-02, grad_scale: 32.0 +2024-07-27 20:07:10,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=56588.0, ans=0.0 +2024-07-27 20:07:11,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=56588.0, ans=0.0 +2024-07-27 20:07:18,153 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.11 vs. limit=15.0 +2024-07-27 20:07:28,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=56628.0, ans=0.2 +2024-07-27 20:07:30,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=56628.0, ans=0.0 +2024-07-27 20:07:42,162 INFO [train.py:1114] (3/4) Epoch 5, batch 1600, loss[loss=0.2442, simple_loss=0.3319, pruned_loss=0.07822, over 4870.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3263, pruned_loss=0.08706, over 937762.57 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 32.0 +2024-07-27 20:07:44,596 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.01 vs. limit=6.0 +2024-07-27 20:07:48,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=56668.0, ans=0.2 +2024-07-27 20:07:55,316 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.77 vs. limit=5.0 +2024-07-27 20:07:56,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=56681.333333333336, ans=0.09899494936611666 +2024-07-27 20:08:06,105 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.974e+01 6.291e+01 7.006e+01 7.974e+01 1.110e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-27 20:08:08,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=56708.0, ans=0.0 +2024-07-27 20:08:12,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=56708.0, ans=0.125 +2024-07-27 20:08:12,655 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.87 vs. limit=15.0 +2024-07-27 20:08:15,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=56721.333333333336, ans=0.125 +2024-07-27 20:08:15,476 INFO [train.py:1114] (3/4) Epoch 5, batch 1650, loss[loss=0.2657, simple_loss=0.3399, pruned_loss=0.0957, over 4655.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.3271, pruned_loss=0.08813, over 937277.93 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:08:28,129 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.33 vs. limit=6.0 +2024-07-27 20:08:31,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=56748.0, ans=0.125 +2024-07-27 20:08:34,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=56748.0, ans=0.09899494936611666 +2024-07-27 20:08:35,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=56761.333333333336, ans=0.2 +2024-07-27 20:08:42,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=56774.666666666664, ans=0.0 +2024-07-27 20:08:46,404 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.06 vs. limit=15.0 +2024-07-27 20:08:50,059 INFO [train.py:1114] (3/4) Epoch 5, batch 1700, loss[loss=0.2063, simple_loss=0.2856, pruned_loss=0.06351, over 4716.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.327, pruned_loss=0.08743, over 939148.83 frames. ], batch size: 11, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:08:59,114 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.29 vs. limit=15.0 +2024-07-27 20:08:59,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=56801.333333333336, ans=0.125 +2024-07-27 20:08:59,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.78 vs. limit=22.5 +2024-07-27 20:09:05,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=56814.666666666664, ans=0.125 +2024-07-27 20:09:05,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=56814.666666666664, ans=0.125 +2024-07-27 20:09:06,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=56814.666666666664, ans=0.125 +2024-07-27 20:09:12,210 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.12 vs. limit=22.5 +2024-07-27 20:09:16,486 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.365e+01 6.475e+01 7.223e+01 8.445e+01 1.275e+02, threshold=1.445e+02, percent-clipped=0.0 +2024-07-27 20:09:22,773 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.77 vs. limit=15.0 +2024-07-27 20:09:24,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=56841.333333333336, ans=0.05 +2024-07-27 20:09:25,628 INFO [train.py:1114] (3/4) Epoch 5, batch 1750, loss[loss=0.2157, simple_loss=0.2976, pruned_loss=0.0669, over 4795.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3264, pruned_loss=0.08686, over 940460.53 frames. ], batch size: 11, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:09:47,073 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=14.23 vs. limit=15.0 +2024-07-27 20:09:56,315 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.99 vs. limit=15.0 +2024-07-27 20:10:00,291 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.39 vs. limit=10.0 +2024-07-27 20:10:02,665 INFO [train.py:1114] (3/4) Epoch 5, batch 1800, loss[loss=0.2528, simple_loss=0.3211, pruned_loss=0.0923, over 4642.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3266, pruned_loss=0.08695, over 940896.61 frames. ], batch size: 13, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:10:12,296 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.27 vs. limit=15.0 +2024-07-27 20:10:26,593 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.038e+01 6.233e+01 6.949e+01 8.152e+01 1.410e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-27 20:10:28,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=56974.666666666664, ans=0.0 +2024-07-27 20:10:29,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=56974.666666666664, ans=0.125 +2024-07-27 20:10:30,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=56974.666666666664, ans=0.1 +2024-07-27 20:10:35,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=56974.666666666664, ans=0.125 +2024-07-27 20:10:37,643 INFO [train.py:1114] (3/4) Epoch 5, batch 1850, loss[loss=0.2851, simple_loss=0.3584, pruned_loss=0.1059, over 4807.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3269, pruned_loss=0.08724, over 940583.84 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:10:44,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=57001.333333333336, ans=0.125 +2024-07-27 20:10:44,651 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.75 vs. limit=15.0 +2024-07-27 20:10:49,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=57001.333333333336, ans=0.125 +2024-07-27 20:11:03,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=57028.0, ans=0.0 +2024-07-27 20:11:11,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=57041.333333333336, ans=0.125 +2024-07-27 20:11:12,477 INFO [train.py:1114] (3/4) Epoch 5, batch 1900, loss[loss=0.1962, simple_loss=0.2796, pruned_loss=0.05637, over 4668.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3262, pruned_loss=0.0869, over 942272.20 frames. ], batch size: 14, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:11:13,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=57054.666666666664, ans=0.0 +2024-07-27 20:11:14,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=57054.666666666664, ans=0.125 +2024-07-27 20:11:23,108 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.43 vs. limit=22.5 +2024-07-27 20:11:24,533 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.68 vs. limit=15.0 +2024-07-27 20:11:27,753 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.68 vs. limit=10.0 +2024-07-27 20:11:35,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=57094.666666666664, ans=0.1 +2024-07-27 20:11:36,826 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.851e+01 6.078e+01 6.608e+01 7.914e+01 1.166e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-27 20:11:37,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=57094.666666666664, ans=0.0 +2024-07-27 20:11:46,532 INFO [train.py:1114] (3/4) Epoch 5, batch 1950, loss[loss=0.2407, simple_loss=0.3054, pruned_loss=0.088, over 4899.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3276, pruned_loss=0.08718, over 944170.12 frames. ], batch size: 13, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:11:48,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=57121.333333333336, ans=0.125 +2024-07-27 20:11:49,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=57121.333333333336, ans=0.2 +2024-07-27 20:11:49,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=57121.333333333336, ans=0.0 +2024-07-27 20:11:54,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=57134.666666666664, ans=0.2 +2024-07-27 20:11:56,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=57134.666666666664, ans=0.2 +2024-07-27 20:11:58,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57134.666666666664, ans=0.1 +2024-07-27 20:12:04,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=57148.0, ans=0.125 +2024-07-27 20:12:06,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=57148.0, ans=0.125 +2024-07-27 20:12:09,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=57161.333333333336, ans=0.125 +2024-07-27 20:12:22,152 INFO [train.py:1114] (3/4) Epoch 5, batch 2000, loss[loss=0.1946, simple_loss=0.2752, pruned_loss=0.05696, over 4803.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3276, pruned_loss=0.08698, over 940641.43 frames. ], batch size: 11, lr: 1.52e-02, grad_scale: 64.0 +2024-07-27 20:12:27,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.13 vs. limit=15.0 +2024-07-27 20:12:29,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=57201.333333333336, ans=0.125 +2024-07-27 20:12:36,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=57214.666666666664, ans=0.2 +2024-07-27 20:12:36,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=57214.666666666664, ans=0.125 +2024-07-27 20:12:46,751 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.030e+01 6.356e+01 7.460e+01 8.642e+01 1.315e+02, threshold=1.492e+02, percent-clipped=0.0 +2024-07-27 20:12:49,146 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.02 vs. limit=15.0 +2024-07-27 20:12:55,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=57254.666666666664, ans=0.125 +2024-07-27 20:12:56,125 INFO [train.py:1114] (3/4) Epoch 5, batch 2050, loss[loss=0.1769, simple_loss=0.2595, pruned_loss=0.04717, over 4618.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.3259, pruned_loss=0.08613, over 938947.52 frames. ], batch size: 11, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:13:07,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=57268.0, ans=0.0 +2024-07-27 20:13:21,598 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.70 vs. limit=6.0 +2024-07-27 20:13:22,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=57308.0, ans=0.0 +2024-07-27 20:13:26,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=57308.0, ans=0.035 +2024-07-27 20:13:28,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=57308.0, ans=0.0 +2024-07-27 20:13:29,753 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.49 vs. limit=15.0 +2024-07-27 20:13:29,964 INFO [train.py:1114] (3/4) Epoch 5, batch 2100, loss[loss=0.2626, simple_loss=0.3475, pruned_loss=0.08882, over 4766.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3247, pruned_loss=0.08535, over 940703.48 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:13:30,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=57321.333333333336, ans=0.0 +2024-07-27 20:13:36,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=57334.666666666664, ans=0.0 +2024-07-27 20:13:41,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=57334.666666666664, ans=0.0 +2024-07-27 20:13:43,911 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.20 vs. limit=22.5 +2024-07-27 20:13:49,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57361.333333333336, ans=0.1 +2024-07-27 20:13:53,936 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.957e+01 6.234e+01 6.918e+01 8.302e+01 1.274e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-27 20:13:54,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=57361.333333333336, ans=0.125 +2024-07-27 20:13:55,609 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.37 vs. limit=10.0 +2024-07-27 20:14:02,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57388.0, ans=0.1 +2024-07-27 20:14:03,261 INFO [train.py:1114] (3/4) Epoch 5, batch 2150, loss[loss=0.1968, simple_loss=0.2681, pruned_loss=0.06275, over 4887.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3233, pruned_loss=0.08472, over 943803.58 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:14:06,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=57388.0, ans=0.0 +2024-07-27 20:14:13,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=57401.333333333336, ans=0.0 +2024-07-27 20:14:23,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=57414.666666666664, ans=0.0 +2024-07-27 20:14:29,210 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.95 vs. limit=15.0 +2024-07-27 20:14:38,766 INFO [train.py:1114] (3/4) Epoch 5, batch 2200, loss[loss=0.2607, simple_loss=0.3541, pruned_loss=0.08362, over 4808.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3236, pruned_loss=0.08501, over 943170.23 frames. ], batch size: 14, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:14:43,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=57454.666666666664, ans=0.2 +2024-07-27 20:14:51,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57481.333333333336, ans=0.1 +2024-07-27 20:14:51,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=57481.333333333336, ans=0.09899494936611666 +2024-07-27 20:15:03,286 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.963e+01 6.386e+01 7.473e+01 9.024e+01 1.169e+02, threshold=1.495e+02, percent-clipped=0.0 +2024-07-27 20:15:09,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=57508.0, ans=0.1 +2024-07-27 20:15:09,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.76 vs. limit=15.0 +2024-07-27 20:15:12,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=57508.0, ans=0.125 +2024-07-27 20:15:13,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57508.0, ans=0.1 +2024-07-27 20:15:13,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=57508.0, ans=0.5 +2024-07-27 20:15:13,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=57508.0, ans=0.0 +2024-07-27 20:15:14,974 INFO [train.py:1114] (3/4) Epoch 5, batch 2250, loss[loss=0.2586, simple_loss=0.3324, pruned_loss=0.09234, over 4701.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3236, pruned_loss=0.08465, over 942156.70 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:15:16,939 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.62 vs. limit=8.0 +2024-07-27 20:15:18,224 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.56 vs. limit=15.0 +2024-07-27 20:15:23,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=57534.666666666664, ans=0.125 +2024-07-27 20:15:50,312 INFO [train.py:1114] (3/4) Epoch 5, batch 2300, loss[loss=0.2581, simple_loss=0.3234, pruned_loss=0.09642, over 4930.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3231, pruned_loss=0.08504, over 939803.01 frames. ], batch size: 12, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:15:58,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=57601.333333333336, ans=0.2 +2024-07-27 20:16:03,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=57614.666666666664, ans=0.2 +2024-07-27 20:16:12,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=57628.0, ans=0.125 +2024-07-27 20:16:15,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=57628.0, ans=0.125 +2024-07-27 20:16:16,284 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.130e+01 6.014e+01 6.647e+01 7.772e+01 1.123e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-27 20:16:16,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=57628.0, ans=0.125 +2024-07-27 20:16:24,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=57641.333333333336, ans=0.2 +2024-07-27 20:16:29,314 INFO [train.py:1114] (3/4) Epoch 5, batch 2350, loss[loss=0.2816, simple_loss=0.3479, pruned_loss=0.1077, over 4634.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3238, pruned_loss=0.08556, over 941941.25 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:16:36,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=57668.0, ans=0.0 +2024-07-27 20:16:37,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=57668.0, ans=0.0 +2024-07-27 20:16:40,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=57668.0, ans=0.125 +2024-07-27 20:16:48,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=57681.333333333336, ans=0.125 +2024-07-27 20:16:48,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=57681.333333333336, ans=0.1 +2024-07-27 20:16:52,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=57694.666666666664, ans=0.0 +2024-07-27 20:16:53,695 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.65 vs. limit=10.0 +2024-07-27 20:16:53,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=14.78 vs. limit=15.0 +2024-07-27 20:17:03,187 INFO [train.py:1114] (3/4) Epoch 5, batch 2400, loss[loss=0.2253, simple_loss=0.3128, pruned_loss=0.06894, over 4634.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3246, pruned_loss=0.08633, over 941539.06 frames. ], batch size: 12, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:17:12,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=57734.666666666664, ans=0.125 +2024-07-27 20:17:27,274 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.323e+01 6.252e+01 6.682e+01 7.735e+01 1.071e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-27 20:17:30,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=57774.666666666664, ans=0.125 +2024-07-27 20:17:36,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=57788.0, ans=0.1 +2024-07-27 20:17:36,684 INFO [train.py:1114] (3/4) Epoch 5, batch 2450, loss[loss=0.2707, simple_loss=0.3563, pruned_loss=0.09254, over 4694.00 frames. ], tot_loss[loss=0.2512, simple_loss=0.3271, pruned_loss=0.08763, over 937326.53 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:17:37,626 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.94 vs. limit=15.0 +2024-07-27 20:17:40,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=57788.0, ans=0.0 +2024-07-27 20:17:50,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=57801.333333333336, ans=0.2 +2024-07-27 20:17:58,469 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.05 vs. limit=6.0 +2024-07-27 20:18:06,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=57828.0, ans=0.1 +2024-07-27 20:18:17,784 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.66 vs. limit=12.0 +2024-07-27 20:18:20,675 INFO [train.py:1114] (3/4) Epoch 5, batch 2500, loss[loss=0.269, simple_loss=0.3472, pruned_loss=0.09539, over 4807.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.327, pruned_loss=0.08741, over 939354.99 frames. ], batch size: 14, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:18:26,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=57854.666666666664, ans=0.2 +2024-07-27 20:18:35,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=57881.333333333336, ans=0.0 +2024-07-27 20:18:41,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=57881.333333333336, ans=0.125 +2024-07-27 20:18:51,013 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.916e+01 6.442e+01 7.418e+01 9.024e+01 1.336e+02, threshold=1.484e+02, percent-clipped=0.0 +2024-07-27 20:18:51,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=57894.666666666664, ans=0.125 +2024-07-27 20:18:52,992 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.74 vs. limit=15.0 +2024-07-27 20:18:59,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=57908.0, ans=0.0 +2024-07-27 20:19:01,027 INFO [train.py:1114] (3/4) Epoch 5, batch 2550, loss[loss=0.2095, simple_loss=0.2859, pruned_loss=0.06653, over 4807.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3268, pruned_loss=0.08736, over 938917.25 frames. ], batch size: 11, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:19:03,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=57921.333333333336, ans=10.0 +2024-07-27 20:19:07,861 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.19 vs. limit=6.0 +2024-07-27 20:19:20,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=57961.333333333336, ans=0.0 +2024-07-27 20:19:20,879 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.47 vs. limit=10.0 +2024-07-27 20:19:34,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=57988.0, ans=0.025 +2024-07-27 20:19:34,664 INFO [train.py:1114] (3/4) Epoch 5, batch 2600, loss[loss=0.2194, simple_loss=0.2885, pruned_loss=0.07518, over 4891.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3276, pruned_loss=0.08765, over 938125.25 frames. ], batch size: 13, lr: 1.51e-02, grad_scale: 64.0 +2024-07-27 20:19:36,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=57988.0, ans=0.125 +2024-07-27 20:19:36,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=57988.0, ans=0.125 +2024-07-27 20:19:38,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=57988.0, ans=0.125 +2024-07-27 20:19:43,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=58001.333333333336, ans=0.025 +2024-07-27 20:19:43,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=58001.333333333336, ans=0.125 +2024-07-27 20:19:58,985 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.906e+01 6.417e+01 7.272e+01 8.306e+01 1.432e+02, threshold=1.454e+02, percent-clipped=0.0 +2024-07-27 20:20:11,712 INFO [train.py:1114] (3/4) Epoch 5, batch 2650, loss[loss=0.2451, simple_loss=0.3136, pruned_loss=0.08829, over 4651.00 frames. ], tot_loss[loss=0.2516, simple_loss=0.3277, pruned_loss=0.08768, over 940108.80 frames. ], batch size: 16, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:20:27,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58068.0, ans=0.1 +2024-07-27 20:20:29,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=58068.0, ans=0.05 +2024-07-27 20:20:41,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=58094.666666666664, ans=0.025 +2024-07-27 20:20:53,944 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.55 vs. limit=15.0 +2024-07-27 20:20:58,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=58108.0, ans=0.125 +2024-07-27 20:20:59,480 INFO [train.py:1114] (3/4) Epoch 5, batch 2700, loss[loss=0.2719, simple_loss=0.345, pruned_loss=0.09943, over 4745.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3262, pruned_loss=0.0868, over 939837.12 frames. ], batch size: 14, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:21:02,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=58121.333333333336, ans=0.125 +2024-07-27 20:21:15,692 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.65 vs. limit=15.0 +2024-07-27 20:21:16,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=58148.0, ans=0.0 +2024-07-27 20:21:27,045 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 6.186e+01 6.835e+01 7.719e+01 1.191e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-27 20:21:38,088 INFO [train.py:1114] (3/4) Epoch 5, batch 2750, loss[loss=0.2015, simple_loss=0.2821, pruned_loss=0.06042, over 4707.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.3245, pruned_loss=0.0861, over 939713.15 frames. ], batch size: 12, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:21:45,703 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:21:47,868 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.76 vs. limit=22.5 +2024-07-27 20:21:53,761 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.91 vs. limit=22.5 +2024-07-27 20:21:54,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=58214.666666666664, ans=0.025 +2024-07-27 20:22:00,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=58228.0, ans=0.025 +2024-07-27 20:22:04,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=58228.0, ans=0.125 +2024-07-27 20:22:07,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=58228.0, ans=0.0 +2024-07-27 20:22:09,483 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.42 vs. limit=15.0 +2024-07-27 20:22:19,866 INFO [train.py:1114] (3/4) Epoch 5, batch 2800, loss[loss=0.4015, simple_loss=0.4273, pruned_loss=0.1879, over 3433.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3251, pruned_loss=0.08696, over 937649.99 frames. ], batch size: 35, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:22:20,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=58254.666666666664, ans=0.125 +2024-07-27 20:22:28,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58268.0, ans=0.1 +2024-07-27 20:22:34,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=58281.333333333336, ans=0.0 +2024-07-27 20:22:39,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=58294.666666666664, ans=0.0 +2024-07-27 20:22:44,006 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 6.174e+01 6.624e+01 7.261e+01 1.719e+02, threshold=1.325e+02, percent-clipped=1.0 +2024-07-27 20:22:46,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=58308.0, ans=0.2 +2024-07-27 20:22:49,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=58308.0, ans=0.0 +2024-07-27 20:22:53,234 INFO [train.py:1114] (3/4) Epoch 5, batch 2850, loss[loss=0.2263, simple_loss=0.301, pruned_loss=0.07583, over 4963.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3258, pruned_loss=0.08734, over 936231.44 frames. ], batch size: 13, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:22:59,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.10 vs. limit=6.0 +2024-07-27 20:23:05,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=58348.0, ans=0.125 +2024-07-27 20:23:14,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=58361.333333333336, ans=0.0 +2024-07-27 20:23:26,076 INFO [train.py:1114] (3/4) Epoch 5, batch 2900, loss[loss=0.2147, simple_loss=0.2968, pruned_loss=0.06625, over 4837.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3263, pruned_loss=0.08705, over 939974.90 frames. ], batch size: 13, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:23:44,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=58414.666666666664, ans=0.05 +2024-07-27 20:23:46,739 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.93 vs. limit=15.0 +2024-07-27 20:23:51,047 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.201e+01 6.205e+01 6.873e+01 7.885e+01 1.448e+02, threshold=1.375e+02, percent-clipped=1.0 +2024-07-27 20:23:51,423 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.94 vs. limit=10.0 +2024-07-27 20:23:51,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=58428.0, ans=0.0 +2024-07-27 20:23:52,060 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.91 vs. limit=15.0 +2024-07-27 20:23:56,569 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.56 vs. limit=22.5 +2024-07-27 20:23:57,294 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.29 vs. limit=10.0 +2024-07-27 20:24:01,532 INFO [train.py:1114] (3/4) Epoch 5, batch 2950, loss[loss=0.2929, simple_loss=0.3396, pruned_loss=0.1231, over 4704.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.3251, pruned_loss=0.08665, over 939389.18 frames. ], batch size: 12, lr: 1.50e-02, grad_scale: 64.0 +2024-07-27 20:24:20,232 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:24:20,593 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.48 vs. limit=10.0 +2024-07-27 20:24:27,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58494.666666666664, ans=0.1 +2024-07-27 20:24:39,799 INFO [train.py:1114] (3/4) Epoch 5, batch 3000, loss[loss=0.2573, simple_loss=0.3313, pruned_loss=0.09166, over 4761.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.325, pruned_loss=0.08626, over 938608.00 frames. ], batch size: 13, lr: 1.50e-02, grad_scale: 32.0 +2024-07-27 20:24:39,800 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 20:25:01,111 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.7432, 4.5410, 4.1217, 4.1947], device='cuda:3') +2024-07-27 20:25:07,123 INFO [train.py:1146] (3/4) Epoch 5, validation: loss=0.2018, simple_loss=0.3051, pruned_loss=0.04931, over 944034.00 frames. +2024-07-27 20:25:07,217 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 20:25:11,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=58521.333333333336, ans=0.125 +2024-07-27 20:25:13,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=58534.666666666664, ans=0.125 +2024-07-27 20:25:19,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=58534.666666666664, ans=0.0 +2024-07-27 20:25:26,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=58548.0, ans=0.1 +2024-07-27 20:25:33,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=58561.333333333336, ans=0.0 +2024-07-27 20:25:51,054 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.481e+01 6.112e+01 6.899e+01 7.724e+01 1.072e+02, threshold=1.380e+02, percent-clipped=0.0 +2024-07-27 20:25:55,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58574.666666666664, ans=0.1 +2024-07-27 20:26:01,530 INFO [train.py:1114] (3/4) Epoch 5, batch 3050, loss[loss=0.2407, simple_loss=0.3156, pruned_loss=0.08285, over 4651.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3261, pruned_loss=0.0866, over 937274.20 frames. ], batch size: 12, lr: 1.50e-02, grad_scale: 32.0 +2024-07-27 20:26:04,065 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=11.69 vs. limit=15.0 +2024-07-27 20:26:08,839 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=5.87 vs. limit=15.0 +2024-07-27 20:26:31,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=58628.0, ans=10.0 +2024-07-27 20:26:32,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=58628.0, ans=0.1 +2024-07-27 20:26:33,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=58641.333333333336, ans=0.125 +2024-07-27 20:26:34,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=58641.333333333336, ans=0.2 +2024-07-27 20:26:40,772 INFO [train.py:1114] (3/4) Epoch 5, batch 3100, loss[loss=0.2765, simple_loss=0.3441, pruned_loss=0.1044, over 4656.00 frames. ], tot_loss[loss=0.2505, simple_loss=0.3265, pruned_loss=0.08729, over 938206.52 frames. ], batch size: 16, lr: 1.50e-02, grad_scale: 16.0 +2024-07-27 20:29:03,168 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.714e+01 6.229e+01 6.955e+01 7.996e+01 1.498e+02, threshold=1.391e+02, percent-clipped=1.0 +2024-07-27 20:29:27,434 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.93 vs. limit=15.0 +2024-07-27 20:29:32,955 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.92 vs. limit=6.0 +2024-07-27 20:29:36,747 INFO [train.py:1114] (3/4) Epoch 5, batch 3150, loss[loss=0.2581, simple_loss=0.3491, pruned_loss=0.08353, over 4556.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.326, pruned_loss=0.08672, over 938175.12 frames. ], batch size: 17, lr: 1.50e-02, grad_scale: 16.0 +2024-07-27 20:29:38,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=58721.333333333336, ans=0.0 +2024-07-27 20:29:42,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=58734.666666666664, ans=0.125 +2024-07-27 20:30:18,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=58774.666666666664, ans=0.125 +2024-07-27 20:30:20,979 INFO [train.py:1114] (3/4) Epoch 5, batch 3200, loss[loss=0.2359, simple_loss=0.3117, pruned_loss=0.08005, over 4832.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3244, pruned_loss=0.08604, over 939428.27 frames. ], batch size: 13, lr: 1.50e-02, grad_scale: 32.0 +2024-07-27 20:30:23,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=58788.0, ans=0.125 +2024-07-27 20:30:23,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=58788.0, ans=0.04949747468305833 +2024-07-27 20:30:36,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=58801.333333333336, ans=0.0 +2024-07-27 20:31:09,394 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.060e+01 6.414e+01 7.232e+01 8.731e+01 1.300e+02, threshold=1.446e+02, percent-clipped=0.0 +2024-07-27 20:31:17,449 INFO [train.py:1114] (3/4) Epoch 5, batch 3250, loss[loss=0.2401, simple_loss=0.3292, pruned_loss=0.0755, over 4927.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3243, pruned_loss=0.0856, over 940287.22 frames. ], batch size: 14, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:31:18,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=58854.666666666664, ans=0.0 +2024-07-27 20:31:18,474 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.24 vs. limit=15.0 +2024-07-27 20:31:23,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=58854.666666666664, ans=0.125 +2024-07-27 20:31:51,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=58894.666666666664, ans=0.125 +2024-07-27 20:31:53,619 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:32:06,917 INFO [train.py:1114] (3/4) Epoch 5, batch 3300, loss[loss=0.2722, simple_loss=0.342, pruned_loss=0.1012, over 4704.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3219, pruned_loss=0.08474, over 940380.60 frames. ], batch size: 19, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:32:14,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=58921.333333333336, ans=0.1 +2024-07-27 20:32:50,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=58961.333333333336, ans=0.0 +2024-07-27 20:32:54,496 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+01 6.806e+01 7.832e+01 9.289e+01 1.732e+02, threshold=1.566e+02, percent-clipped=1.0 +2024-07-27 20:33:05,630 INFO [train.py:1114] (3/4) Epoch 5, batch 3350, loss[loss=0.2868, simple_loss=0.3426, pruned_loss=0.1155, over 4586.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3235, pruned_loss=0.08653, over 938131.57 frames. ], batch size: 17, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:33:07,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=58988.0, ans=0.0 +2024-07-27 20:33:12,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59001.333333333336, ans=0.1 +2024-07-27 20:33:24,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=59014.666666666664, ans=0.0 +2024-07-27 20:33:24,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=59014.666666666664, ans=0.2 +2024-07-27 20:33:25,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59028.0, ans=0.1 +2024-07-27 20:33:33,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=59041.333333333336, ans=0.0 +2024-07-27 20:33:37,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=59041.333333333336, ans=0.2 +2024-07-27 20:33:39,085 INFO [train.py:1114] (3/4) Epoch 5, batch 3400, loss[loss=0.2123, simple_loss=0.2934, pruned_loss=0.06564, over 4804.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.324, pruned_loss=0.0864, over 937026.25 frames. ], batch size: 11, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:33:51,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=59068.0, ans=0.0 +2024-07-27 20:33:54,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=59081.333333333336, ans=0.125 +2024-07-27 20:33:56,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=59081.333333333336, ans=0.125 +2024-07-27 20:34:04,247 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.71 vs. limit=15.0 +2024-07-27 20:34:04,613 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.766e+01 6.358e+01 7.066e+01 8.502e+01 1.252e+02, threshold=1.413e+02, percent-clipped=0.0 +2024-07-27 20:34:06,988 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.25 vs. limit=15.0 +2024-07-27 20:34:08,492 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.82 vs. limit=10.0 +2024-07-27 20:34:09,147 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.57 vs. limit=15.0 +2024-07-27 20:34:11,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=59108.0, ans=0.125 +2024-07-27 20:34:12,483 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.81 vs. limit=15.0 +2024-07-27 20:34:12,601 INFO [train.py:1114] (3/4) Epoch 5, batch 3450, loss[loss=0.3076, simple_loss=0.366, pruned_loss=0.1247, over 4697.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3248, pruned_loss=0.08621, over 937509.61 frames. ], batch size: 19, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:34:18,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.71 vs. limit=15.0 +2024-07-27 20:34:19,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=59134.666666666664, ans=0.125 +2024-07-27 20:34:20,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=59134.666666666664, ans=0.2 +2024-07-27 20:34:22,662 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.70 vs. limit=15.0 +2024-07-27 20:34:23,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=59134.666666666664, ans=0.125 +2024-07-27 20:34:26,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=59148.0, ans=0.125 +2024-07-27 20:34:33,038 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.16 vs. limit=15.0 +2024-07-27 20:34:35,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=59161.333333333336, ans=0.2 +2024-07-27 20:34:35,807 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.40 vs. limit=22.5 +2024-07-27 20:34:40,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=59174.666666666664, ans=0.025 +2024-07-27 20:34:46,092 INFO [train.py:1114] (3/4) Epoch 5, batch 3500, loss[loss=0.2043, simple_loss=0.2758, pruned_loss=0.06636, over 4941.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3226, pruned_loss=0.08542, over 938401.53 frames. ], batch size: 12, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:34:54,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=59201.333333333336, ans=0.2 +2024-07-27 20:35:03,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=59214.666666666664, ans=0.2 +2024-07-27 20:35:08,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=59228.0, ans=0.0 +2024-07-27 20:35:13,403 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.758e+01 6.287e+01 6.647e+01 7.437e+01 1.544e+02, threshold=1.329e+02, percent-clipped=1.0 +2024-07-27 20:35:16,031 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.52 vs. limit=10.0 +2024-07-27 20:35:16,742 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=5.44 vs. limit=10.0 +2024-07-27 20:35:21,567 INFO [train.py:1114] (3/4) Epoch 5, batch 3550, loss[loss=0.2829, simple_loss=0.3628, pruned_loss=0.1015, over 4658.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3223, pruned_loss=0.08487, over 938664.71 frames. ], batch size: 14, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:35:33,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59268.0, ans=0.1 +2024-07-27 20:35:48,837 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:35:49,748 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.79 vs. limit=15.0 +2024-07-27 20:35:51,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=59308.0, ans=0.125 +2024-07-27 20:35:53,826 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.60 vs. limit=22.5 +2024-07-27 20:35:54,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=59308.0, ans=0.2 +2024-07-27 20:35:55,987 INFO [train.py:1114] (3/4) Epoch 5, batch 3600, loss[loss=0.2109, simple_loss=0.2912, pruned_loss=0.06531, over 4976.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3225, pruned_loss=0.08465, over 940384.72 frames. ], batch size: 13, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:36:00,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=59321.333333333336, ans=0.125 +2024-07-27 20:36:01,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=59321.333333333336, ans=0.125 +2024-07-27 20:36:02,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=59321.333333333336, ans=15.0 +2024-07-27 20:36:17,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=59348.0, ans=0.0 +2024-07-27 20:36:26,683 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.050e+01 6.226e+01 6.920e+01 7.848e+01 1.341e+02, threshold=1.384e+02, percent-clipped=1.0 +2024-07-27 20:36:28,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=59374.666666666664, ans=0.125 +2024-07-27 20:36:35,260 INFO [train.py:1114] (3/4) Epoch 5, batch 3650, loss[loss=0.2913, simple_loss=0.3731, pruned_loss=0.1048, over 4894.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3225, pruned_loss=0.08454, over 940722.05 frames. ], batch size: 15, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:36:50,420 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.22 vs. limit=15.0 +2024-07-27 20:37:01,200 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.35 vs. limit=22.5 +2024-07-27 20:37:02,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=59441.333333333336, ans=0.125 +2024-07-27 20:37:10,439 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.96 vs. limit=22.5 +2024-07-27 20:37:11,926 INFO [train.py:1114] (3/4) Epoch 5, batch 3700, loss[loss=0.2279, simple_loss=0.3146, pruned_loss=0.07056, over 4928.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.3209, pruned_loss=0.08361, over 941743.19 frames. ], batch size: 14, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:37:17,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=59454.666666666664, ans=0.0 +2024-07-27 20:37:22,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=59468.0, ans=0.125 +2024-07-27 20:37:28,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=59481.333333333336, ans=0.125 +2024-07-27 20:37:37,468 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.201e+01 6.383e+01 7.266e+01 8.369e+01 1.200e+02, threshold=1.453e+02, percent-clipped=0.0 +2024-07-27 20:37:45,347 INFO [train.py:1114] (3/4) Epoch 5, batch 3750, loss[loss=0.2624, simple_loss=0.3111, pruned_loss=0.1068, over 4818.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.321, pruned_loss=0.08422, over 943631.50 frames. ], batch size: 11, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:37:46,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=59521.333333333336, ans=0.0 +2024-07-27 20:37:52,484 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.41 vs. limit=15.0 +2024-07-27 20:37:54,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=59534.666666666664, ans=0.125 +2024-07-27 20:37:55,836 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=17.35 vs. limit=22.5 +2024-07-27 20:37:59,714 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.68 vs. limit=15.0 +2024-07-27 20:38:00,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=59548.0, ans=0.2 +2024-07-27 20:38:02,894 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.38 vs. limit=15.0 +2024-07-27 20:38:13,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=59574.666666666664, ans=0.2 +2024-07-27 20:38:15,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=59574.666666666664, ans=0.035 +2024-07-27 20:38:18,247 INFO [train.py:1114] (3/4) Epoch 5, batch 3800, loss[loss=0.2618, simple_loss=0.3292, pruned_loss=0.09722, over 4804.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3212, pruned_loss=0.08445, over 942037.27 frames. ], batch size: 14, lr: 1.49e-02, grad_scale: 32.0 +2024-07-27 20:38:19,350 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.12 vs. limit=15.0 +2024-07-27 20:38:23,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=59588.0, ans=0.125 +2024-07-27 20:38:28,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=59601.333333333336, ans=0.125 +2024-07-27 20:38:36,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=59614.666666666664, ans=0.05 +2024-07-27 20:38:44,251 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.840e+01 6.382e+01 7.291e+01 8.683e+01 1.605e+02, threshold=1.458e+02, percent-clipped=1.0 +2024-07-27 20:38:45,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=59641.333333333336, ans=0.1 +2024-07-27 20:38:45,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=59641.333333333336, ans=0.125 +2024-07-27 20:38:45,944 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.53 vs. limit=15.0 +2024-07-27 20:38:49,417 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.04 vs. limit=15.0 +2024-07-27 20:38:52,567 INFO [train.py:1114] (3/4) Epoch 5, batch 3850, loss[loss=0.2317, simple_loss=0.3065, pruned_loss=0.07847, over 4678.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.3207, pruned_loss=0.08327, over 942638.82 frames. ], batch size: 16, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:38:54,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=59654.666666666664, ans=0.0 +2024-07-27 20:39:03,730 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.21 vs. limit=22.5 +2024-07-27 20:39:06,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=59681.333333333336, ans=0.125 +2024-07-27 20:39:08,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=59681.333333333336, ans=0.0 +2024-07-27 20:39:11,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=59694.666666666664, ans=0.07 +2024-07-27 20:39:15,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=59694.666666666664, ans=0.125 +2024-07-27 20:39:17,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=59694.666666666664, ans=0.0 +2024-07-27 20:39:24,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=59708.0, ans=0.125 +2024-07-27 20:39:26,346 INFO [train.py:1114] (3/4) Epoch 5, batch 3900, loss[loss=0.2694, simple_loss=0.3405, pruned_loss=0.09916, over 4809.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.322, pruned_loss=0.08362, over 943006.51 frames. ], batch size: 14, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:39:35,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=59734.666666666664, ans=0.125 +2024-07-27 20:39:46,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=59761.333333333336, ans=0.0 +2024-07-27 20:39:47,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=59761.333333333336, ans=0.05 +2024-07-27 20:39:49,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=59761.333333333336, ans=0.125 +2024-07-27 20:39:51,226 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 6.368e+01 7.161e+01 8.539e+01 1.176e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 20:39:54,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=59774.666666666664, ans=0.05 +2024-07-27 20:39:59,279 INFO [train.py:1114] (3/4) Epoch 5, batch 3950, loss[loss=0.2692, simple_loss=0.3376, pruned_loss=0.1005, over 4828.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.3207, pruned_loss=0.08307, over 944813.56 frames. ], batch size: 16, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:40:04,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59788.0, ans=0.1 +2024-07-27 20:40:07,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=59801.333333333336, ans=0.125 +2024-07-27 20:40:10,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=59801.333333333336, ans=0.07 +2024-07-27 20:40:13,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=59814.666666666664, ans=0.07 +2024-07-27 20:40:13,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=59814.666666666664, ans=0.125 +2024-07-27 20:40:13,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=59814.666666666664, ans=0.125 +2024-07-27 20:40:16,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=59814.666666666664, ans=0.125 +2024-07-27 20:40:23,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=59828.0, ans=0.2 +2024-07-27 20:40:23,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=59828.0, ans=0.0 +2024-07-27 20:40:33,152 INFO [train.py:1114] (3/4) Epoch 5, batch 4000, loss[loss=0.2107, simple_loss=0.2815, pruned_loss=0.06991, over 4780.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3214, pruned_loss=0.08383, over 941419.11 frames. ], batch size: 12, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:40:35,381 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.06 vs. limit=22.5 +2024-07-27 20:40:38,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=59854.666666666664, ans=0.125 +2024-07-27 20:40:50,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=59881.333333333336, ans=0.0 +2024-07-27 20:40:52,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=59881.333333333336, ans=10.0 +2024-07-27 20:40:57,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=59894.666666666664, ans=0.2 +2024-07-27 20:40:59,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=59894.666666666664, ans=0.125 +2024-07-27 20:41:01,639 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.290e+01 6.270e+01 7.255e+01 8.485e+01 1.075e+02, threshold=1.451e+02, percent-clipped=0.0 +2024-07-27 20:41:09,821 INFO [train.py:1114] (3/4) Epoch 5, batch 4050, loss[loss=0.3005, simple_loss=0.352, pruned_loss=0.1245, over 3409.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.3202, pruned_loss=0.08315, over 939666.75 frames. ], batch size: 36, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:41:10,334 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.68 vs. limit=12.0 +2024-07-27 20:41:34,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=59961.333333333336, ans=0.1 +2024-07-27 20:41:34,391 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.74 vs. limit=15.0 +2024-07-27 20:41:43,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=59974.666666666664, ans=0.125 +2024-07-27 20:41:45,358 INFO [train.py:1114] (3/4) Epoch 5, batch 4100, loss[loss=0.2501, simple_loss=0.3324, pruned_loss=0.08388, over 4895.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3217, pruned_loss=0.08424, over 938543.83 frames. ], batch size: 15, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:41:46,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=59988.0, ans=0.125 +2024-07-27 20:41:47,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=59988.0, ans=0.125 +2024-07-27 20:42:01,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=60014.666666666664, ans=0.2 +2024-07-27 20:42:11,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=60028.0, ans=0.0 +2024-07-27 20:42:12,618 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.156e+01 6.593e+01 8.156e+01 1.046e+02 1.897e+02, threshold=1.631e+02, percent-clipped=3.0 +2024-07-27 20:42:15,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=60041.333333333336, ans=0.0 +2024-07-27 20:42:17,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=60041.333333333336, ans=0.09899494936611666 +2024-07-27 20:42:18,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=60041.333333333336, ans=0.125 +2024-07-27 20:42:20,630 INFO [train.py:1114] (3/4) Epoch 5, batch 4150, loss[loss=0.2447, simple_loss=0.3095, pruned_loss=0.08999, over 4829.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3216, pruned_loss=0.08445, over 938353.07 frames. ], batch size: 13, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:42:22,100 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:42:22,919 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.92 vs. limit=22.5 +2024-07-27 20:42:23,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60054.666666666664, ans=0.1 +2024-07-27 20:42:24,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=60054.666666666664, ans=0.125 +2024-07-27 20:42:26,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=60068.0, ans=0.125 +2024-07-27 20:42:31,578 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.62 vs. limit=15.0 +2024-07-27 20:42:42,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=60094.666666666664, ans=0.125 +2024-07-27 20:42:43,365 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.392e+00 +2024-07-27 20:42:45,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=60094.666666666664, ans=0.125 +2024-07-27 20:42:47,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=60094.666666666664, ans=0.025 +2024-07-27 20:42:55,753 INFO [train.py:1114] (3/4) Epoch 5, batch 4200, loss[loss=0.2911, simple_loss=0.3654, pruned_loss=0.1084, over 4905.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3221, pruned_loss=0.08434, over 939754.96 frames. ], batch size: 15, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:42:59,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=60121.333333333336, ans=0.125 +2024-07-27 20:43:09,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=60148.0, ans=0.125 +2024-07-27 20:43:17,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60161.333333333336, ans=0.1 +2024-07-27 20:43:20,329 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+01 6.148e+01 7.735e+01 9.943e+01 1.461e+02, threshold=1.547e+02, percent-clipped=0.0 +2024-07-27 20:43:26,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=60174.666666666664, ans=15.0 +2024-07-27 20:43:28,470 INFO [train.py:1114] (3/4) Epoch 5, batch 4250, loss[loss=0.2211, simple_loss=0.2936, pruned_loss=0.07427, over 4642.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3222, pruned_loss=0.08401, over 940835.90 frames. ], batch size: 12, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:43:36,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=60201.333333333336, ans=0.0 +2024-07-27 20:43:43,098 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.44 vs. limit=15.0 +2024-07-27 20:44:00,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=60254.666666666664, ans=0.07 +2024-07-27 20:44:01,223 INFO [train.py:1114] (3/4) Epoch 5, batch 4300, loss[loss=0.2385, simple_loss=0.3163, pruned_loss=0.08029, over 4760.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3224, pruned_loss=0.08428, over 940480.31 frames. ], batch size: 13, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:44:10,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=60268.0, ans=0.125 +2024-07-27 20:44:18,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60281.333333333336, ans=0.1 +2024-07-27 20:44:19,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=60281.333333333336, ans=0.125 +2024-07-27 20:44:22,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=60294.666666666664, ans=0.1 +2024-07-27 20:44:26,304 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.022e+01 6.170e+01 6.762e+01 7.364e+01 1.372e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-27 20:44:30,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=60308.0, ans=0.125 +2024-07-27 20:44:34,439 INFO [train.py:1114] (3/4) Epoch 5, batch 4350, loss[loss=0.2191, simple_loss=0.2962, pruned_loss=0.07095, over 4760.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3227, pruned_loss=0.08454, over 941259.78 frames. ], batch size: 13, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:44:41,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.96 vs. limit=15.0 +2024-07-27 20:44:50,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=60348.0, ans=0.125 +2024-07-27 20:44:53,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=60348.0, ans=0.125 +2024-07-27 20:44:53,736 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.16 vs. limit=10.0 +2024-07-27 20:44:54,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=60361.333333333336, ans=0.125 +2024-07-27 20:44:59,675 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.35 vs. limit=22.5 +2024-07-27 20:45:00,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=60374.666666666664, ans=0.2 +2024-07-27 20:45:05,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=60374.666666666664, ans=0.025 +2024-07-27 20:45:08,020 INFO [train.py:1114] (3/4) Epoch 5, batch 4400, loss[loss=0.242, simple_loss=0.3152, pruned_loss=0.0844, over 4801.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3224, pruned_loss=0.0844, over 940765.56 frames. ], batch size: 14, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:45:08,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=60388.0, ans=0.0 +2024-07-27 20:45:13,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=60388.0, ans=0.125 +2024-07-27 20:45:20,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=60401.333333333336, ans=0.125 +2024-07-27 20:45:33,393 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.176e+01 6.353e+01 6.947e+01 8.100e+01 1.220e+02, threshold=1.389e+02, percent-clipped=0.0 +2024-07-27 20:45:37,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=60441.333333333336, ans=0.0 +2024-07-27 20:45:41,668 INFO [train.py:1114] (3/4) Epoch 5, batch 4450, loss[loss=0.2324, simple_loss=0.3036, pruned_loss=0.0806, over 4947.00 frames. ], tot_loss[loss=0.2472, simple_loss=0.3235, pruned_loss=0.08542, over 938515.22 frames. ], batch size: 12, lr: 1.48e-02, grad_scale: 32.0 +2024-07-27 20:45:41,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=60454.666666666664, ans=0.125 +2024-07-27 20:45:51,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=60468.0, ans=0.05 +2024-07-27 20:45:55,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=60481.333333333336, ans=0.125 +2024-07-27 20:46:01,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff2.min_abs, batch_count=60494.666666666664, ans=0.1 +2024-07-27 20:46:07,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=60494.666666666664, ans=0.125 +2024-07-27 20:46:10,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=60508.0, ans=0.0 +2024-07-27 20:46:13,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=60508.0, ans=0.1 +2024-07-27 20:46:16,936 INFO [train.py:1114] (3/4) Epoch 5, batch 4500, loss[loss=0.2491, simple_loss=0.336, pruned_loss=0.08115, over 4725.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3235, pruned_loss=0.08526, over 937563.96 frames. ], batch size: 14, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:46:17,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=60521.333333333336, ans=0.0 +2024-07-27 20:46:25,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60534.666666666664, ans=0.1 +2024-07-27 20:46:36,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60561.333333333336, ans=0.1 +2024-07-27 20:46:41,931 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.270e+01 6.144e+01 7.215e+01 8.358e+01 1.180e+02, threshold=1.443e+02, percent-clipped=0.0 +2024-07-27 20:46:44,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=60574.666666666664, ans=0.1 +2024-07-27 20:46:49,960 INFO [train.py:1114] (3/4) Epoch 5, batch 4550, loss[loss=0.2371, simple_loss=0.31, pruned_loss=0.08207, over 4901.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3225, pruned_loss=0.0847, over 939673.16 frames. ], batch size: 13, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:46:54,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=60588.0, ans=0.0 +2024-07-27 20:46:56,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=60601.333333333336, ans=0.95 +2024-07-27 20:46:59,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=60601.333333333336, ans=0.2 +2024-07-27 20:47:14,711 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.98 vs. limit=22.5 +2024-07-27 20:47:20,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=60641.333333333336, ans=0.2 +2024-07-27 20:47:25,410 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.55 vs. limit=22.5 +2024-07-27 20:47:25,722 INFO [train.py:1114] (3/4) Epoch 5, batch 4600, loss[loss=0.2965, simple_loss=0.3705, pruned_loss=0.1113, over 4424.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3223, pruned_loss=0.08407, over 938174.99 frames. ], batch size: 21, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:47:41,242 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=12.35 vs. limit=15.0 +2024-07-27 20:47:41,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=60681.333333333336, ans=0.125 +2024-07-27 20:47:46,563 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.81 vs. limit=15.0 +2024-07-27 20:47:48,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=60694.666666666664, ans=0.125 +2024-07-27 20:47:50,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=60694.666666666664, ans=0.0 +2024-07-27 20:47:51,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=60694.666666666664, ans=0.125 +2024-07-27 20:47:52,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=60694.666666666664, ans=0.0 +2024-07-27 20:47:52,393 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.04 vs. limit=15.0 +2024-07-27 20:47:53,335 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 6.426e+01 7.546e+01 8.603e+01 1.273e+02, threshold=1.509e+02, percent-clipped=0.0 +2024-07-27 20:48:00,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=60708.0, ans=0.0 +2024-07-27 20:48:03,053 INFO [train.py:1114] (3/4) Epoch 5, batch 4650, loss[loss=0.2508, simple_loss=0.3153, pruned_loss=0.09311, over 4834.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3215, pruned_loss=0.08376, over 939823.41 frames. ], batch size: 16, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:48:13,999 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:48:22,512 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.23 vs. limit=15.0 +2024-07-27 20:48:25,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=60761.333333333336, ans=0.07 +2024-07-27 20:48:28,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=60761.333333333336, ans=0.2 +2024-07-27 20:48:28,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=60761.333333333336, ans=0.0 +2024-07-27 20:48:28,992 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.37 vs. limit=15.0 +2024-07-27 20:48:36,511 INFO [train.py:1114] (3/4) Epoch 5, batch 4700, loss[loss=0.1825, simple_loss=0.2576, pruned_loss=0.05366, over 4699.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.3209, pruned_loss=0.0834, over 937612.06 frames. ], batch size: 11, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:48:42,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=60801.333333333336, ans=0.0 +2024-07-27 20:48:43,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=60801.333333333336, ans=0.125 +2024-07-27 20:48:53,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=60814.666666666664, ans=0.125 +2024-07-27 20:49:01,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=60828.0, ans=0.2 +2024-07-27 20:49:02,022 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.640e+01 6.344e+01 7.380e+01 9.406e+01 1.591e+02, threshold=1.476e+02, percent-clipped=1.0 +2024-07-27 20:49:07,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=60841.333333333336, ans=0.2 +2024-07-27 20:49:10,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=60854.666666666664, ans=0.1 +2024-07-27 20:49:10,642 INFO [train.py:1114] (3/4) Epoch 5, batch 4750, loss[loss=0.2861, simple_loss=0.3693, pruned_loss=0.1015, over 4599.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3222, pruned_loss=0.08439, over 935983.73 frames. ], batch size: 21, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:49:11,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=60854.666666666664, ans=0.0 +2024-07-27 20:49:27,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=60881.333333333336, ans=0.125 +2024-07-27 20:49:45,210 INFO [train.py:1114] (3/4) Epoch 5, batch 4800, loss[loss=0.1898, simple_loss=0.2592, pruned_loss=0.0602, over 4690.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3215, pruned_loss=0.0848, over 933152.67 frames. ], batch size: 13, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:49:49,595 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.10 vs. limit=6.0 +2024-07-27 20:49:59,603 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.83 vs. limit=15.0 +2024-07-27 20:50:10,498 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.19 vs. limit=12.0 +2024-07-27 20:50:10,635 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.566e+01 6.152e+01 6.705e+01 7.633e+01 9.767e+01, threshold=1.341e+02, percent-clipped=0.0 +2024-07-27 20:50:18,674 INFO [train.py:1114] (3/4) Epoch 5, batch 4850, loss[loss=0.2526, simple_loss=0.3293, pruned_loss=0.0879, over 4734.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3219, pruned_loss=0.08492, over 932917.94 frames. ], batch size: 14, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:50:26,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=61001.333333333336, ans=0.0 +2024-07-27 20:50:29,473 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.05 vs. limit=12.0 +2024-07-27 20:50:47,377 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.80 vs. limit=15.0 +2024-07-27 20:50:49,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61041.333333333336, ans=0.1 +2024-07-27 20:50:51,572 INFO [train.py:1114] (3/4) Epoch 5, batch 4900, loss[loss=0.2066, simple_loss=0.2908, pruned_loss=0.06117, over 4756.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3209, pruned_loss=0.08404, over 934677.67 frames. ], batch size: 13, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:51:02,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=61068.0, ans=0.125 +2024-07-27 20:51:12,827 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.66 vs. limit=22.5 +2024-07-27 20:51:13,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=61081.333333333336, ans=0.07 +2024-07-27 20:51:13,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=61081.333333333336, ans=15.0 +2024-07-27 20:51:20,165 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.134e+01 6.112e+01 6.910e+01 8.321e+01 1.535e+02, threshold=1.382e+02, percent-clipped=5.0 +2024-07-27 20:51:28,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=61108.0, ans=0.2 +2024-07-27 20:51:35,802 INFO [train.py:1114] (3/4) Epoch 5, batch 4950, loss[loss=0.377, simple_loss=0.4023, pruned_loss=0.1759, over 3456.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3222, pruned_loss=0.08493, over 931466.74 frames. ], batch size: 35, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:51:43,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=61121.333333333336, ans=0.1 +2024-07-27 20:51:44,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=61121.333333333336, ans=0.0 +2024-07-27 20:51:58,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=61148.0, ans=0.025 +2024-07-27 20:52:13,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=61174.666666666664, ans=0.025 +2024-07-27 20:52:16,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=61174.666666666664, ans=10.0 +2024-07-27 20:52:18,631 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.73 vs. limit=22.5 +2024-07-27 20:52:19,437 INFO [train.py:1114] (3/4) Epoch 5, batch 5000, loss[loss=0.234, simple_loss=0.3271, pruned_loss=0.07048, over 4676.00 frames. ], tot_loss[loss=0.245, simple_loss=0.3212, pruned_loss=0.08437, over 935354.66 frames. ], batch size: 14, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:52:22,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=61188.0, ans=0.025 +2024-07-27 20:52:28,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=61201.333333333336, ans=0.0 +2024-07-27 20:52:34,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=61214.666666666664, ans=0.05 +2024-07-27 20:52:38,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=61214.666666666664, ans=0.025 +2024-07-27 20:52:40,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=61228.0, ans=0.0 +2024-07-27 20:52:41,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=61228.0, ans=0.0 +2024-07-27 20:52:41,178 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.59 vs. limit=10.0 +2024-07-27 20:52:41,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=61228.0, ans=0.0 +2024-07-27 20:52:48,112 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.055e+01 6.404e+01 7.517e+01 8.761e+01 1.608e+02, threshold=1.503e+02, percent-clipped=2.0 +2024-07-27 20:53:06,827 INFO [train.py:1114] (3/4) Epoch 5, batch 5050, loss[loss=0.2104, simple_loss=0.2845, pruned_loss=0.06817, over 4855.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.3191, pruned_loss=0.08328, over 937864.43 frames. ], batch size: 12, lr: 1.47e-02, grad_scale: 32.0 +2024-07-27 20:53:12,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=61254.666666666664, ans=0.125 +2024-07-27 20:53:19,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=61268.0, ans=0.1 +2024-07-27 20:53:21,342 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.00 vs. limit=22.5 +2024-07-27 20:53:52,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=61321.333333333336, ans=0.125 +2024-07-27 20:53:53,182 INFO [train.py:1114] (3/4) Epoch 5, batch 5100, loss[loss=0.2482, simple_loss=0.3246, pruned_loss=0.08587, over 4771.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3212, pruned_loss=0.08431, over 935358.21 frames. ], batch size: 12, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:53:53,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=61321.333333333336, ans=0.0 +2024-07-27 20:53:55,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=61321.333333333336, ans=0.0 +2024-07-27 20:53:55,584 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.37 vs. limit=6.0 +2024-07-27 20:53:56,886 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.94 vs. limit=15.0 +2024-07-27 20:53:58,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=61321.333333333336, ans=0.125 +2024-07-27 20:54:09,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61348.0, ans=0.1 +2024-07-27 20:54:18,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=61361.333333333336, ans=0.0 +2024-07-27 20:54:20,585 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.842e+01 6.137e+01 6.842e+01 8.040e+01 3.164e+02, threshold=1.368e+02, percent-clipped=1.0 +2024-07-27 20:54:23,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=61374.666666666664, ans=0.125 +2024-07-27 20:54:27,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=61374.666666666664, ans=0.2 +2024-07-27 20:54:29,751 INFO [train.py:1114] (3/4) Epoch 5, batch 5150, loss[loss=0.2601, simple_loss=0.3377, pruned_loss=0.09123, over 4854.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3223, pruned_loss=0.08438, over 936209.33 frames. ], batch size: 16, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:54:29,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=61388.0, ans=0.09899494936611666 +2024-07-27 20:54:31,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=61388.0, ans=0.125 +2024-07-27 20:54:43,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff3.min_abs, batch_count=61401.333333333336, ans=0.2 +2024-07-27 20:54:48,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=61401.333333333336, ans=0.1 +2024-07-27 20:54:51,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=61414.666666666664, ans=0.025 +2024-07-27 20:54:52,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=61414.666666666664, ans=0.2 +2024-07-27 20:54:53,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=61414.666666666664, ans=0.04949747468305833 +2024-07-27 20:54:57,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=61428.0, ans=0.1 +2024-07-27 20:55:10,111 INFO [train.py:1114] (3/4) Epoch 5, batch 5200, loss[loss=0.3095, simple_loss=0.3856, pruned_loss=0.1167, over 4674.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3234, pruned_loss=0.08524, over 936627.82 frames. ], batch size: 14, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:55:26,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=61468.0, ans=0.04949747468305833 +2024-07-27 20:55:28,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=61468.0, ans=0.0 +2024-07-27 20:55:29,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=61481.333333333336, ans=0.125 +2024-07-27 20:55:37,653 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.74 vs. limit=15.0 +2024-07-27 20:55:38,992 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.00 vs. limit=15.0 +2024-07-27 20:55:42,001 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.018e+01 6.430e+01 7.385e+01 8.844e+01 1.293e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 20:55:42,516 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.27 vs. limit=15.0 +2024-07-27 20:55:44,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=61508.0, ans=0.125 +2024-07-27 20:55:46,135 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 20:55:50,192 INFO [train.py:1114] (3/4) Epoch 5, batch 5250, loss[loss=0.2141, simple_loss=0.292, pruned_loss=0.06816, over 4891.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.3221, pruned_loss=0.08431, over 936002.75 frames. ], batch size: 13, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:55:56,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=61534.666666666664, ans=0.0 +2024-07-27 20:55:59,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=61534.666666666664, ans=0.125 +2024-07-27 20:56:01,633 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.26 vs. limit=15.0 +2024-07-27 20:56:04,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=61548.0, ans=0.1 +2024-07-27 20:56:20,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=61574.666666666664, ans=0.07 +2024-07-27 20:56:24,401 INFO [train.py:1114] (3/4) Epoch 5, batch 5300, loss[loss=0.2731, simple_loss=0.3494, pruned_loss=0.0984, over 4640.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3211, pruned_loss=0.08428, over 934176.49 frames. ], batch size: 16, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:56:28,037 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-27 20:56:33,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=61601.333333333336, ans=0.035 +2024-07-27 20:56:40,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=61614.666666666664, ans=0.1 +2024-07-27 20:56:48,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=61628.0, ans=0.05 +2024-07-27 20:56:50,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=61628.0, ans=0.0 +2024-07-27 20:56:53,862 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.75 vs. limit=12.0 +2024-07-27 20:56:54,053 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.117e+01 6.200e+01 6.732e+01 7.536e+01 1.097e+02, threshold=1.346e+02, percent-clipped=0.0 +2024-07-27 20:56:58,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=61641.333333333336, ans=0.025 +2024-07-27 20:57:02,173 INFO [train.py:1114] (3/4) Epoch 5, batch 5350, loss[loss=0.2028, simple_loss=0.2784, pruned_loss=0.06364, over 4530.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3234, pruned_loss=0.08484, over 936168.28 frames. ], batch size: 10, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:57:09,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=61668.0, ans=0.95 +2024-07-27 20:57:10,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=61668.0, ans=0.0 +2024-07-27 20:57:12,842 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.97 vs. limit=15.0 +2024-07-27 20:57:21,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=61681.333333333336, ans=0.125 +2024-07-27 20:57:22,263 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.38 vs. limit=22.5 +2024-07-27 20:57:27,829 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.45 vs. limit=15.0 +2024-07-27 20:57:41,135 INFO [train.py:1114] (3/4) Epoch 5, batch 5400, loss[loss=0.2809, simple_loss=0.3575, pruned_loss=0.1022, over 4287.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.324, pruned_loss=0.08553, over 930922.30 frames. ], batch size: 25, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:57:47,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=61721.333333333336, ans=0.125 +2024-07-27 20:57:50,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=61734.666666666664, ans=0.0 +2024-07-27 20:57:58,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=61748.0, ans=0.0 +2024-07-27 20:58:08,953 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.945e+01 6.299e+01 6.991e+01 7.974e+01 1.272e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-27 20:58:11,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=61774.666666666664, ans=0.07 +2024-07-27 20:58:22,864 INFO [train.py:1114] (3/4) Epoch 5, batch 5450, loss[loss=0.211, simple_loss=0.2829, pruned_loss=0.06952, over 4710.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3224, pruned_loss=0.08456, over 933992.65 frames. ], batch size: 11, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:58:45,525 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.56 vs. limit=15.0 +2024-07-27 20:58:58,517 INFO [train.py:1114] (3/4) Epoch 5, batch 5500, loss[loss=0.2806, simple_loss=0.3434, pruned_loss=0.1089, over 4297.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3226, pruned_loss=0.08514, over 931950.38 frames. ], batch size: 26, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:59:00,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=61854.666666666664, ans=0.09899494936611666 +2024-07-27 20:59:05,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=61868.0, ans=0.2 +2024-07-27 20:59:24,622 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.189e+01 6.179e+01 6.952e+01 7.770e+01 1.227e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-27 20:59:28,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=61908.0, ans=0.125 +2024-07-27 20:59:32,511 INFO [train.py:1114] (3/4) Epoch 5, batch 5550, loss[loss=0.2517, simple_loss=0.3289, pruned_loss=0.08726, over 4720.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3221, pruned_loss=0.08495, over 933922.71 frames. ], batch size: 12, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 20:59:45,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=61934.666666666664, ans=0.125 +2024-07-27 20:59:57,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=14.98 vs. limit=15.0 +2024-07-27 21:00:00,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=61961.333333333336, ans=0.125 +2024-07-27 21:00:00,338 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.24 vs. limit=15.0 +2024-07-27 21:00:04,395 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.02 vs. limit=6.0 +2024-07-27 21:00:09,915 INFO [train.py:1114] (3/4) Epoch 5, batch 5600, loss[loss=0.2625, simple_loss=0.3445, pruned_loss=0.09019, over 4735.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.323, pruned_loss=0.08504, over 935103.98 frames. ], batch size: 14, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 21:00:13,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=61988.0, ans=0.0 +2024-07-27 21:00:14,325 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.26 vs. limit=10.0 +2024-07-27 21:00:25,388 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.02 vs. limit=6.0 +2024-07-27 21:00:29,240 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=62014.666666666664, ans=0.125 +2024-07-27 21:00:36,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=62028.0, ans=0.125 +2024-07-27 21:00:47,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=62028.0, ans=0.125 +2024-07-27 21:00:53,095 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.144e+01 6.040e+01 6.647e+01 7.605e+01 1.041e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-27 21:00:59,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=62041.333333333336, ans=0.5 +2024-07-27 21:01:00,130 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.27 vs. limit=15.0 +2024-07-27 21:01:01,030 INFO [train.py:1114] (3/4) Epoch 5, batch 5650, loss[loss=0.2587, simple_loss=0.321, pruned_loss=0.0982, over 4560.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.3216, pruned_loss=0.08437, over 937585.23 frames. ], batch size: 21, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 21:01:07,341 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.16 vs. limit=22.5 +2024-07-27 21:01:07,468 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.19 vs. limit=15.0 +2024-07-27 21:01:07,495 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.85 vs. limit=15.0 +2024-07-27 21:01:07,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=62068.0, ans=0.125 +2024-07-27 21:01:10,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=62068.0, ans=0.0 +2024-07-27 21:01:19,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62068.0, ans=0.1 +2024-07-27 21:01:25,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=62081.333333333336, ans=0.025 +2024-07-27 21:01:26,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=62081.333333333336, ans=0.035 +2024-07-27 21:01:42,140 INFO [train.py:1114] (3/4) Epoch 5, batch 5700, loss[loss=0.2532, simple_loss=0.3253, pruned_loss=0.09054, over 4700.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3224, pruned_loss=0.08443, over 938211.43 frames. ], batch size: 13, lr: 1.46e-02, grad_scale: 64.0 +2024-07-27 21:01:42,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=62121.333333333336, ans=0.0 +2024-07-27 21:01:51,405 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.56 vs. limit=10.0 +2024-07-27 21:02:03,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=62161.333333333336, ans=0.125 +2024-07-27 21:02:07,557 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.270e+01 6.365e+01 6.985e+01 7.849e+01 1.267e+02, threshold=1.397e+02, percent-clipped=0.0 +2024-07-27 21:02:20,506 INFO [train.py:1114] (3/4) Epoch 5, batch 5750, loss[loss=0.2868, simple_loss=0.3588, pruned_loss=0.1074, over 4748.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.3225, pruned_loss=0.08418, over 938246.69 frames. ], batch size: 19, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:02:22,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=62188.0, ans=0.04949747468305833 +2024-07-27 21:02:27,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=62188.0, ans=0.125 +2024-07-27 21:02:34,638 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.89 vs. limit=15.0 +2024-07-27 21:02:46,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=62214.666666666664, ans=0.1 +2024-07-27 21:02:56,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=62228.0, ans=0.0 +2024-07-27 21:03:09,030 INFO [train.py:1114] (3/4) Epoch 5, batch 5800, loss[loss=0.2765, simple_loss=0.3409, pruned_loss=0.106, over 4725.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3221, pruned_loss=0.08375, over 937308.78 frames. ], batch size: 19, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:03:25,221 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.32 vs. limit=15.0 +2024-07-27 21:03:34,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=62281.333333333336, ans=0.125 +2024-07-27 21:03:36,977 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.21 vs. limit=15.0 +2024-07-27 21:03:46,566 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.301e+01 6.477e+01 7.083e+01 8.928e+01 1.486e+02, threshold=1.417e+02, percent-clipped=3.0 +2024-07-27 21:03:47,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62308.0, ans=0.1 +2024-07-27 21:03:51,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=62308.0, ans=0.0 +2024-07-27 21:03:57,630 INFO [train.py:1114] (3/4) Epoch 5, batch 5850, loss[loss=0.2602, simple_loss=0.3387, pruned_loss=0.09083, over 4472.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3224, pruned_loss=0.08351, over 938202.17 frames. ], batch size: 21, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:04:15,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=62334.666666666664, ans=0.125 +2024-07-27 21:04:21,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=62348.0, ans=0.125 +2024-07-27 21:04:36,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=62348.0, ans=0.125 +2024-07-27 21:04:39,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=62361.333333333336, ans=0.025 +2024-07-27 21:04:43,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=62361.333333333336, ans=0.125 +2024-07-27 21:04:47,510 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.08 vs. limit=6.0 +2024-07-27 21:04:49,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=62374.666666666664, ans=0.125 +2024-07-27 21:05:03,539 INFO [train.py:1114] (3/4) Epoch 5, batch 5900, loss[loss=0.2495, simple_loss=0.3443, pruned_loss=0.07737, over 4692.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3227, pruned_loss=0.08371, over 938639.97 frames. ], batch size: 15, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:05:04,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=62388.0, ans=0.125 +2024-07-27 21:05:07,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=62388.0, ans=0.125 +2024-07-27 21:05:27,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=62428.0, ans=0.025 +2024-07-27 21:05:29,068 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.135e+01 6.365e+01 7.384e+01 8.628e+01 1.400e+02, threshold=1.477e+02, percent-clipped=0.0 +2024-07-27 21:05:37,068 INFO [train.py:1114] (3/4) Epoch 5, batch 5950, loss[loss=0.2758, simple_loss=0.3585, pruned_loss=0.09652, over 4676.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.324, pruned_loss=0.0845, over 940335.94 frames. ], batch size: 15, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:05:42,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=62454.666666666664, ans=0.2 +2024-07-27 21:05:51,834 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.14 vs. limit=15.0 +2024-07-27 21:05:55,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=62481.333333333336, ans=0.1 +2024-07-27 21:05:58,354 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.50 vs. limit=15.0 +2024-07-27 21:06:08,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=62508.0, ans=0.0 +2024-07-27 21:06:12,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=62521.333333333336, ans=0.125 +2024-07-27 21:06:13,056 INFO [train.py:1114] (3/4) Epoch 5, batch 6000, loss[loss=0.3096, simple_loss=0.3711, pruned_loss=0.1241, over 4122.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3241, pruned_loss=0.08475, over 937302.90 frames. ], batch size: 25, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:06:13,057 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 21:06:38,804 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.7644, 2.4549, 3.5964, 2.1741], device='cuda:3') +2024-07-27 21:07:25,585 INFO [train.py:1146] (3/4) Epoch 5, validation: loss=0.1984, simple_loss=0.3025, pruned_loss=0.04714, over 944034.00 frames. +2024-07-27 21:07:25,587 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 21:07:37,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=62534.666666666664, ans=0.125 +2024-07-27 21:07:40,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=62534.666666666664, ans=0.125 +2024-07-27 21:07:53,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=62561.333333333336, ans=0.035 +2024-07-27 21:07:55,030 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.867e+01 6.382e+01 7.254e+01 8.565e+01 1.652e+02, threshold=1.451e+02, percent-clipped=1.0 +2024-07-27 21:07:56,255 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.28 vs. limit=15.0 +2024-07-27 21:08:01,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=62574.666666666664, ans=0.125 +2024-07-27 21:08:03,174 INFO [train.py:1114] (3/4) Epoch 5, batch 6050, loss[loss=0.2374, simple_loss=0.3001, pruned_loss=0.08737, over 4781.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3233, pruned_loss=0.08448, over 938413.00 frames. ], batch size: 12, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:08:18,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=62614.666666666664, ans=0.0 +2024-07-27 21:08:30,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=62628.0, ans=0.125 +2024-07-27 21:08:38,503 INFO [train.py:1114] (3/4) Epoch 5, batch 6100, loss[loss=0.2283, simple_loss=0.3185, pruned_loss=0.06905, over 4682.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3223, pruned_loss=0.08369, over 938086.82 frames. ], batch size: 15, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:08:51,652 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.98 vs. limit=22.5 +2024-07-27 21:08:59,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=62694.666666666664, ans=0.125 +2024-07-27 21:09:01,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=62694.666666666664, ans=0.2 +2024-07-27 21:09:01,509 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.32 vs. limit=12.0 +2024-07-27 21:09:04,970 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.680e+01 6.274e+01 6.891e+01 8.796e+01 1.456e+02, threshold=1.378e+02, percent-clipped=1.0 +2024-07-27 21:09:11,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=62708.0, ans=0.125 +2024-07-27 21:09:13,063 INFO [train.py:1114] (3/4) Epoch 5, batch 6150, loss[loss=0.3775, simple_loss=0.421, pruned_loss=0.167, over 3273.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3219, pruned_loss=0.08376, over 936155.05 frames. ], batch size: 35, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:09:18,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=62721.333333333336, ans=0.0 +2024-07-27 21:09:36,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=62734.666666666664, ans=0.2 +2024-07-27 21:09:37,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=62734.666666666664, ans=0.0 +2024-07-27 21:09:39,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=62748.0, ans=0.0 +2024-07-27 21:09:42,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=62748.0, ans=0.0 +2024-07-27 21:09:49,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=62761.333333333336, ans=0.125 +2024-07-27 21:09:56,306 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.38 vs. limit=22.5 +2024-07-27 21:09:57,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=62774.666666666664, ans=0.125 +2024-07-27 21:09:57,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=62774.666666666664, ans=0.0 +2024-07-27 21:09:58,598 INFO [train.py:1114] (3/4) Epoch 5, batch 6200, loss[loss=0.233, simple_loss=0.3271, pruned_loss=0.06942, over 4741.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3231, pruned_loss=0.08453, over 936010.01 frames. ], batch size: 14, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:10:01,603 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:10:06,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=62801.333333333336, ans=0.125 +2024-07-27 21:10:09,204 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=16.53 vs. limit=15.0 +2024-07-27 21:10:10,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=62801.333333333336, ans=0.125 +2024-07-27 21:10:12,356 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.41 vs. limit=15.0 +2024-07-27 21:10:15,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=62814.666666666664, ans=0.0 +2024-07-27 21:10:25,267 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.230e+01 6.498e+01 7.693e+01 9.750e+01 1.653e+02, threshold=1.539e+02, percent-clipped=3.0 +2024-07-27 21:10:32,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=62841.333333333336, ans=0.0 +2024-07-27 21:10:33,740 INFO [train.py:1114] (3/4) Epoch 5, batch 6250, loss[loss=0.242, simple_loss=0.3258, pruned_loss=0.07906, over 4807.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3231, pruned_loss=0.08459, over 933097.93 frames. ], batch size: 14, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:10:39,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=62854.666666666664, ans=0.125 +2024-07-27 21:10:56,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=62894.666666666664, ans=0.125 +2024-07-27 21:11:01,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=62894.666666666664, ans=0.125 +2024-07-27 21:11:08,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=62908.0, ans=0.125 +2024-07-27 21:11:12,028 INFO [train.py:1114] (3/4) Epoch 5, batch 6300, loss[loss=0.2833, simple_loss=0.3371, pruned_loss=0.1148, over 4553.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3223, pruned_loss=0.08462, over 929849.68 frames. ], batch size: 10, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:11:13,844 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.74 vs. limit=15.0 +2024-07-27 21:11:14,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=62921.333333333336, ans=0.0 +2024-07-27 21:11:23,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=62934.666666666664, ans=0.2 +2024-07-27 21:11:23,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=62934.666666666664, ans=0.125 +2024-07-27 21:11:25,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.63 vs. limit=15.0 +2024-07-27 21:11:30,932 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.77 vs. limit=15.0 +2024-07-27 21:11:37,091 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.292e+01 6.261e+01 7.099e+01 7.903e+01 1.165e+02, threshold=1.420e+02, percent-clipped=0.0 +2024-07-27 21:11:43,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=62974.666666666664, ans=0.1 +2024-07-27 21:11:44,970 INFO [train.py:1114] (3/4) Epoch 5, batch 6350, loss[loss=0.2599, simple_loss=0.3362, pruned_loss=0.09175, over 4550.00 frames. ], tot_loss[loss=0.245, simple_loss=0.3217, pruned_loss=0.08421, over 933941.14 frames. ], batch size: 21, lr: 1.45e-02, grad_scale: 64.0 +2024-07-27 21:11:47,760 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:12:07,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=63028.0, ans=0.2 +2024-07-27 21:12:11,345 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.50 vs. limit=15.0 +2024-07-27 21:12:13,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=63041.333333333336, ans=0.1 +2024-07-27 21:12:17,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=63041.333333333336, ans=0.125 +2024-07-27 21:12:20,305 INFO [train.py:1114] (3/4) Epoch 5, batch 6400, loss[loss=0.2929, simple_loss=0.3693, pruned_loss=0.1083, over 4633.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3216, pruned_loss=0.08472, over 935512.19 frames. ], batch size: 13, lr: 1.45e-02, grad_scale: 32.0 +2024-07-27 21:12:24,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=63054.666666666664, ans=0.0 +2024-07-27 21:12:25,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=63054.666666666664, ans=0.125 +2024-07-27 21:12:47,233 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.147e+01 6.719e+01 7.795e+01 8.869e+01 1.661e+02, threshold=1.559e+02, percent-clipped=1.0 +2024-07-27 21:12:50,339 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.02 vs. limit=22.5 +2024-07-27 21:12:53,959 INFO [train.py:1114] (3/4) Epoch 5, batch 6450, loss[loss=0.2457, simple_loss=0.3367, pruned_loss=0.07734, over 4518.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3216, pruned_loss=0.08382, over 939269.72 frames. ], batch size: 21, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:13:00,238 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.92 vs. limit=6.0 +2024-07-27 21:13:06,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=63148.0, ans=0.125 +2024-07-27 21:13:06,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=63148.0, ans=0.125 +2024-07-27 21:13:07,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=63148.0, ans=0.2 +2024-07-27 21:13:16,342 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:13:27,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=63174.666666666664, ans=0.125 +2024-07-27 21:13:30,345 INFO [train.py:1114] (3/4) Epoch 5, batch 6500, loss[loss=0.3475, simple_loss=0.3877, pruned_loss=0.1536, over 3352.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3212, pruned_loss=0.08376, over 940275.22 frames. ], batch size: 35, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:13:35,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=63188.0, ans=0.125 +2024-07-27 21:13:45,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=63214.666666666664, ans=0.0 +2024-07-27 21:13:56,999 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.806e+01 6.095e+01 6.739e+01 7.533e+01 1.080e+02, threshold=1.348e+02, percent-clipped=0.0 +2024-07-27 21:14:03,806 INFO [train.py:1114] (3/4) Epoch 5, batch 6550, loss[loss=0.1899, simple_loss=0.2644, pruned_loss=0.05766, over 4794.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.3205, pruned_loss=0.08293, over 943105.62 frames. ], batch size: 11, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:14:22,621 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.50 vs. limit=22.5 +2024-07-27 21:14:23,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=63294.666666666664, ans=0.0 +2024-07-27 21:14:27,168 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.70 vs. limit=12.0 +2024-07-27 21:14:36,525 INFO [train.py:1114] (3/4) Epoch 5, batch 6600, loss[loss=0.2541, simple_loss=0.3389, pruned_loss=0.0846, over 4934.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3197, pruned_loss=0.08229, over 945005.50 frames. ], batch size: 14, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:14:42,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=63321.333333333336, ans=0.0 +2024-07-27 21:14:50,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=63348.0, ans=0.1 +2024-07-27 21:15:00,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=63361.333333333336, ans=0.0 +2024-07-27 21:15:03,302 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.320e+01 6.369e+01 7.181e+01 8.583e+01 1.412e+02, threshold=1.436e+02, percent-clipped=2.0 +2024-07-27 21:15:04,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.56 vs. limit=15.0 +2024-07-27 21:15:09,891 INFO [train.py:1114] (3/4) Epoch 5, batch 6650, loss[loss=0.2514, simple_loss=0.3314, pruned_loss=0.08572, over 4658.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.3187, pruned_loss=0.08198, over 943775.73 frames. ], batch size: 17, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:15:22,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=63401.333333333336, ans=0.2 +2024-07-27 21:15:25,427 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.59 vs. limit=6.0 +2024-07-27 21:15:26,671 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.01 vs. limit=15.0 +2024-07-27 21:15:32,271 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.92 vs. limit=12.0 +2024-07-27 21:15:43,857 INFO [train.py:1114] (3/4) Epoch 5, batch 6700, loss[loss=0.2591, simple_loss=0.3251, pruned_loss=0.09661, over 4699.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.319, pruned_loss=0.08193, over 942548.23 frames. ], batch size: 19, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:15:45,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=63454.666666666664, ans=0.125 +2024-07-27 21:15:46,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=63454.666666666664, ans=0.0 +2024-07-27 21:15:56,055 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.05 vs. limit=22.5 +2024-07-27 21:16:06,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=63494.666666666664, ans=0.025 +2024-07-27 21:16:07,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=63494.666666666664, ans=0.0 +2024-07-27 21:16:11,138 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.250e+01 6.326e+01 7.074e+01 8.168e+01 1.305e+02, threshold=1.415e+02, percent-clipped=0.0 +2024-07-27 21:16:19,125 INFO [train.py:1114] (3/4) Epoch 5, batch 6750, loss[loss=0.2575, simple_loss=0.3222, pruned_loss=0.09637, over 4221.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3191, pruned_loss=0.08193, over 940683.75 frames. ], batch size: 25, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:16:25,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=63534.666666666664, ans=0.125 +2024-07-27 21:16:25,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=63534.666666666664, ans=0.125 +2024-07-27 21:16:32,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=63548.0, ans=0.2 +2024-07-27 21:16:41,871 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.11 vs. limit=15.0 +2024-07-27 21:16:47,921 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.23 vs. limit=22.5 +2024-07-27 21:16:54,730 INFO [train.py:1114] (3/4) Epoch 5, batch 6800, loss[loss=0.2755, simple_loss=0.3431, pruned_loss=0.1039, over 4631.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3201, pruned_loss=0.08228, over 939260.64 frames. ], batch size: 13, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:16:56,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=63588.0, ans=0.0 +2024-07-27 21:16:58,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=63588.0, ans=0.125 +2024-07-27 21:17:04,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=63601.333333333336, ans=0.2 +2024-07-27 21:17:14,464 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.30 vs. limit=15.0 +2024-07-27 21:17:21,273 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.069e+01 6.010e+01 6.782e+01 8.396e+01 1.269e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 21:17:24,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=63641.333333333336, ans=0.125 +2024-07-27 21:17:27,122 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.96 vs. limit=6.0 +2024-07-27 21:17:28,003 INFO [train.py:1114] (3/4) Epoch 5, batch 6850, loss[loss=0.2453, simple_loss=0.336, pruned_loss=0.07727, over 4686.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3202, pruned_loss=0.08222, over 940714.53 frames. ], batch size: 13, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:17:30,469 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.63 vs. limit=22.5 +2024-07-27 21:17:40,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=63668.0, ans=0.125 +2024-07-27 21:17:49,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=63694.666666666664, ans=0.0 +2024-07-27 21:18:02,829 INFO [train.py:1114] (3/4) Epoch 5, batch 6900, loss[loss=0.2213, simple_loss=0.3056, pruned_loss=0.0685, over 4963.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.3205, pruned_loss=0.08255, over 942666.59 frames. ], batch size: 13, lr: 1.44e-02, grad_scale: 32.0 +2024-07-27 21:18:04,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=63721.333333333336, ans=0.05 +2024-07-27 21:18:22,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=63761.333333333336, ans=0.2 +2024-07-27 21:18:30,459 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.148e+01 6.563e+01 7.062e+01 8.255e+01 1.155e+02, threshold=1.412e+02, percent-clipped=0.0 +2024-07-27 21:18:36,458 INFO [train.py:1114] (3/4) Epoch 5, batch 6950, loss[loss=0.203, simple_loss=0.2737, pruned_loss=0.06616, over 4479.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3191, pruned_loss=0.08192, over 940413.14 frames. ], batch size: 10, lr: 1.44e-02, grad_scale: 16.0 +2024-07-27 21:18:49,682 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.01 vs. limit=15.0 +2024-07-27 21:18:57,407 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.58 vs. limit=22.5 +2024-07-27 21:19:13,713 INFO [train.py:1114] (3/4) Epoch 5, batch 7000, loss[loss=0.3099, simple_loss=0.372, pruned_loss=0.1239, over 4623.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3198, pruned_loss=0.08256, over 938961.15 frames. ], batch size: 17, lr: 1.44e-02, grad_scale: 16.0 +2024-07-27 21:19:23,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=63868.0, ans=0.125 +2024-07-27 21:19:30,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=63881.333333333336, ans=0.125 +2024-07-27 21:19:32,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=63881.333333333336, ans=0.2 +2024-07-27 21:19:35,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=63894.666666666664, ans=0.125 +2024-07-27 21:19:40,752 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.209e+01 6.125e+01 6.883e+01 8.000e+01 1.166e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-27 21:19:46,898 INFO [train.py:1114] (3/4) Epoch 5, batch 7050, loss[loss=0.2785, simple_loss=0.3445, pruned_loss=0.1063, over 4673.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.3206, pruned_loss=0.0825, over 942078.62 frames. ], batch size: 19, lr: 1.44e-02, grad_scale: 16.0 +2024-07-27 21:19:48,567 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.26 vs. limit=15.0 +2024-07-27 21:19:56,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=63934.666666666664, ans=0.0 +2024-07-27 21:20:01,254 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.42 vs. limit=15.0 +2024-07-27 21:20:14,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=63974.666666666664, ans=0.0 +2024-07-27 21:20:20,295 INFO [train.py:1114] (3/4) Epoch 5, batch 7100, loss[loss=0.2402, simple_loss=0.3291, pruned_loss=0.07564, over 4810.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3218, pruned_loss=0.08384, over 936750.65 frames. ], batch size: 15, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:20:20,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=63988.0, ans=0.125 +2024-07-27 21:20:20,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=63988.0, ans=0.125 +2024-07-27 21:20:24,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=63988.0, ans=0.125 +2024-07-27 21:20:35,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=64001.333333333336, ans=0.0 +2024-07-27 21:20:43,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=64014.666666666664, ans=0.0 +2024-07-27 21:20:45,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=64014.666666666664, ans=0.125 +2024-07-27 21:20:47,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=64028.0, ans=0.0 +2024-07-27 21:20:49,279 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.32 vs. limit=15.0 +2024-07-27 21:20:53,654 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.086e+01 6.110e+01 6.595e+01 7.846e+01 1.344e+02, threshold=1.319e+02, percent-clipped=0.0 +2024-07-27 21:20:58,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=64054.666666666664, ans=0.0 +2024-07-27 21:20:59,482 INFO [train.py:1114] (3/4) Epoch 5, batch 7150, loss[loss=0.2605, simple_loss=0.3274, pruned_loss=0.09681, over 4490.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.3182, pruned_loss=0.0822, over 937802.60 frames. ], batch size: 21, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:21:02,979 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.19 vs. limit=22.5 +2024-07-27 21:21:03,694 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.05 vs. limit=22.5 +2024-07-27 21:21:07,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=64068.0, ans=0.025 +2024-07-27 21:21:11,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=64068.0, ans=0.125 +2024-07-27 21:21:14,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=64081.333333333336, ans=0.0 +2024-07-27 21:21:18,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=64094.666666666664, ans=0.125 +2024-07-27 21:21:18,875 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.69 vs. limit=15.0 +2024-07-27 21:21:29,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=64108.0, ans=0.125 +2024-07-27 21:21:32,468 INFO [train.py:1114] (3/4) Epoch 5, batch 7200, loss[loss=0.2431, simple_loss=0.3393, pruned_loss=0.07348, over 4807.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3194, pruned_loss=0.08247, over 938032.37 frames. ], batch size: 15, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:21:42,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=64134.666666666664, ans=0.125 +2024-07-27 21:21:59,177 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.508e+01 6.414e+01 7.135e+01 8.390e+01 1.273e+02, threshold=1.427e+02, percent-clipped=0.0 +2024-07-27 21:22:03,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64174.666666666664, ans=0.1 +2024-07-27 21:22:04,942 INFO [train.py:1114] (3/4) Epoch 5, batch 7250, loss[loss=0.1715, simple_loss=0.2552, pruned_loss=0.04394, over 4849.00 frames. ], tot_loss[loss=0.242, simple_loss=0.3194, pruned_loss=0.0823, over 939902.28 frames. ], batch size: 12, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:22:05,240 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.87 vs. limit=12.0 +2024-07-27 21:22:12,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=64201.333333333336, ans=0.0 +2024-07-27 21:22:16,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.04 vs. limit=15.0 +2024-07-27 21:22:17,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=64214.666666666664, ans=0.125 +2024-07-27 21:22:26,546 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.99 vs. limit=22.5 +2024-07-27 21:22:43,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=64241.333333333336, ans=0.125 +2024-07-27 21:22:44,623 INFO [train.py:1114] (3/4) Epoch 5, batch 7300, loss[loss=0.1939, simple_loss=0.2799, pruned_loss=0.05399, over 4854.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.3175, pruned_loss=0.08093, over 940370.61 frames. ], batch size: 12, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:22:45,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=64254.666666666664, ans=0.125 +2024-07-27 21:22:52,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=64268.0, ans=0.2 +2024-07-27 21:22:56,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=64268.0, ans=0.0 +2024-07-27 21:23:05,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=64294.666666666664, ans=0.125 +2024-07-27 21:23:06,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=64294.666666666664, ans=0.0 +2024-07-27 21:23:07,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=64294.666666666664, ans=0.5 +2024-07-27 21:23:11,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=64308.0, ans=0.95 +2024-07-27 21:23:14,909 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.221e+01 6.188e+01 7.170e+01 8.494e+01 1.437e+02, threshold=1.434e+02, percent-clipped=1.0 +2024-07-27 21:23:20,884 INFO [train.py:1114] (3/4) Epoch 5, batch 7350, loss[loss=0.2266, simple_loss=0.2971, pruned_loss=0.07806, over 4644.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.318, pruned_loss=0.08144, over 939345.80 frames. ], batch size: 12, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:23:22,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=64321.333333333336, ans=0.125 +2024-07-27 21:23:28,315 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.03 vs. limit=15.0 +2024-07-27 21:23:33,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=64348.0, ans=0.025 +2024-07-27 21:23:48,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=64374.666666666664, ans=0.025 +2024-07-27 21:23:51,172 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:23:56,169 INFO [train.py:1114] (3/4) Epoch 5, batch 7400, loss[loss=0.2578, simple_loss=0.3445, pruned_loss=0.08549, over 4696.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.319, pruned_loss=0.08196, over 940697.72 frames. ], batch size: 13, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:24:06,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=64401.333333333336, ans=0.2 +2024-07-27 21:24:12,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=64414.666666666664, ans=0.125 +2024-07-27 21:24:12,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=64414.666666666664, ans=0.125 +2024-07-27 21:24:23,431 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.841e+01 6.697e+01 7.956e+01 9.233e+01 1.549e+02, threshold=1.591e+02, percent-clipped=1.0 +2024-07-27 21:24:29,313 INFO [train.py:1114] (3/4) Epoch 5, batch 7450, loss[loss=0.28, simple_loss=0.3442, pruned_loss=0.1079, over 4617.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3178, pruned_loss=0.08177, over 937814.26 frames. ], batch size: 11, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:24:34,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=64454.666666666664, ans=0.0 +2024-07-27 21:24:42,231 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:25:03,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64521.333333333336, ans=0.1 +2024-07-27 21:25:03,919 INFO [train.py:1114] (3/4) Epoch 5, batch 7500, loss[loss=0.3312, simple_loss=0.3875, pruned_loss=0.1374, over 3396.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3197, pruned_loss=0.08262, over 936092.54 frames. ], batch size: 35, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:25:13,469 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.39 vs. limit=15.0 +2024-07-27 21:25:19,412 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:25:28,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=64561.333333333336, ans=0.125 +2024-07-27 21:25:33,069 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.613e+01 6.039e+01 6.623e+01 7.552e+01 1.223e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-27 21:25:42,035 INFO [train.py:1114] (3/4) Epoch 5, batch 7550, loss[loss=0.231, simple_loss=0.3046, pruned_loss=0.07868, over 4634.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.3221, pruned_loss=0.08373, over 935665.99 frames. ], batch size: 17, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:25:44,074 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.58 vs. limit=22.5 +2024-07-27 21:25:44,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=64588.0, ans=0.0 +2024-07-27 21:25:50,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=64601.333333333336, ans=0.125 +2024-07-27 21:25:53,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=64601.333333333336, ans=0.0 +2024-07-27 21:25:57,147 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:25:59,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=64614.666666666664, ans=0.1 +2024-07-27 21:26:03,664 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.59 vs. limit=10.0 +2024-07-27 21:26:08,767 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.78 vs. limit=22.5 +2024-07-27 21:26:10,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=64628.0, ans=0.2 +2024-07-27 21:26:17,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=64641.333333333336, ans=0.2 +2024-07-27 21:26:27,401 INFO [train.py:1114] (3/4) Epoch 5, batch 7600, loss[loss=0.3003, simple_loss=0.3679, pruned_loss=0.1163, over 4809.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.3211, pruned_loss=0.08293, over 937685.74 frames. ], batch size: 14, lr: 1.43e-02, grad_scale: 32.0 +2024-07-27 21:26:28,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=64654.666666666664, ans=0.0 +2024-07-27 21:26:39,802 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.50 vs. limit=15.0 +2024-07-27 21:26:42,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=64681.333333333336, ans=0.125 +2024-07-27 21:26:44,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=64681.333333333336, ans=0.125 +2024-07-27 21:26:52,338 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.74 vs. limit=22.5 +2024-07-27 21:26:52,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=64681.333333333336, ans=0.1 +2024-07-27 21:27:02,470 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.190e+01 6.111e+01 6.673e+01 8.200e+01 1.239e+02, threshold=1.335e+02, percent-clipped=0.0 +2024-07-27 21:27:07,741 INFO [train.py:1114] (3/4) Epoch 5, batch 7650, loss[loss=0.2214, simple_loss=0.2826, pruned_loss=0.08011, over 4943.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.3198, pruned_loss=0.08257, over 937068.56 frames. ], batch size: 12, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:27:09,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=64721.333333333336, ans=0.2 +2024-07-27 21:27:13,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=64734.666666666664, ans=0.2 +2024-07-27 21:27:25,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=64748.0, ans=0.1 +2024-07-27 21:27:31,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=64761.333333333336, ans=0.0 +2024-07-27 21:27:43,710 INFO [train.py:1114] (3/4) Epoch 5, batch 7700, loss[loss=0.2004, simple_loss=0.2989, pruned_loss=0.05097, over 4693.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.3211, pruned_loss=0.08325, over 934639.71 frames. ], batch size: 13, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:27:45,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=64788.0, ans=0.125 +2024-07-27 21:27:53,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=64788.0, ans=0.0 +2024-07-27 21:27:57,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=64801.333333333336, ans=0.125 +2024-07-27 21:28:20,007 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.750e+01 6.292e+01 7.097e+01 8.458e+01 1.099e+02, threshold=1.419e+02, percent-clipped=0.0 +2024-07-27 21:28:25,209 INFO [train.py:1114] (3/4) Epoch 5, batch 7750, loss[loss=0.2188, simple_loss=0.3168, pruned_loss=0.06038, over 4935.00 frames. ], tot_loss[loss=0.2452, simple_loss=0.323, pruned_loss=0.08373, over 935523.65 frames. ], batch size: 14, lr: 1.43e-02, grad_scale: 16.0 +2024-07-27 21:28:25,543 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.18 vs. limit=15.0 +2024-07-27 21:28:28,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=64854.666666666664, ans=0.2 +2024-07-27 21:28:36,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=64868.0, ans=0.2 +2024-07-27 21:28:43,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.79 vs. limit=6.0 +2024-07-27 21:28:45,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=64881.333333333336, ans=0.2 +2024-07-27 21:29:02,995 INFO [train.py:1114] (3/4) Epoch 5, batch 7800, loss[loss=0.2066, simple_loss=0.3048, pruned_loss=0.05421, over 4669.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3223, pruned_loss=0.08314, over 937468.85 frames. ], batch size: 14, lr: 1.42e-02, grad_scale: 16.0 +2024-07-27 21:29:03,286 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.95 vs. limit=22.5 +2024-07-27 21:29:05,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=64921.333333333336, ans=0.125 +2024-07-27 21:29:05,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=64921.333333333336, ans=0.0 +2024-07-27 21:29:06,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=64921.333333333336, ans=0.125 +2024-07-27 21:29:10,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=64934.666666666664, ans=0.0 +2024-07-27 21:29:16,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=64948.0, ans=0.125 +2024-07-27 21:29:19,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=64948.0, ans=0.125 +2024-07-27 21:29:30,978 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.317e+01 6.312e+01 7.129e+01 8.364e+01 1.154e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 21:29:36,491 INFO [train.py:1114] (3/4) Epoch 5, batch 7850, loss[loss=0.2689, simple_loss=0.3183, pruned_loss=0.1098, over 4529.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.3224, pruned_loss=0.08336, over 936390.39 frames. ], batch size: 10, lr: 1.42e-02, grad_scale: 16.0 +2024-07-27 21:29:37,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=64988.0, ans=0.0 +2024-07-27 21:29:38,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=64988.0, ans=0.125 +2024-07-27 21:29:53,914 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:29:55,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=65014.666666666664, ans=0.125 +2024-07-27 21:30:05,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=65014.666666666664, ans=0.125 +2024-07-27 21:30:12,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=65028.0, ans=0.5 +2024-07-27 21:30:18,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=65041.333333333336, ans=0.125 +2024-07-27 21:30:22,628 INFO [train.py:1114] (3/4) Epoch 5, batch 7900, loss[loss=0.2203, simple_loss=0.2974, pruned_loss=0.07161, over 4871.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.324, pruned_loss=0.08454, over 933405.16 frames. ], batch size: 14, lr: 1.42e-02, grad_scale: 16.0 +2024-07-27 21:30:23,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=65054.666666666664, ans=0.125 +2024-07-27 21:30:25,651 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.90 vs. limit=15.0 +2024-07-27 21:30:36,256 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.82 vs. limit=15.0 +2024-07-27 21:30:37,092 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.48 vs. limit=15.0 +2024-07-27 21:30:46,120 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.81 vs. limit=15.0 +2024-07-27 21:30:48,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=65094.666666666664, ans=0.025 +2024-07-27 21:30:51,045 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.29 vs. limit=22.5 +2024-07-27 21:30:52,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=65094.666666666664, ans=0.05 +2024-07-27 21:30:55,206 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.330e+01 6.363e+01 7.228e+01 8.012e+01 1.089e+02, threshold=1.446e+02, percent-clipped=0.0 +2024-07-27 21:30:58,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=65108.0, ans=0.125 +2024-07-27 21:31:03,076 INFO [train.py:1114] (3/4) Epoch 5, batch 7950, loss[loss=0.2998, simple_loss=0.359, pruned_loss=0.1203, over 3593.00 frames. ], tot_loss[loss=0.245, simple_loss=0.3227, pruned_loss=0.08366, over 935959.84 frames. ], batch size: 35, lr: 1.42e-02, grad_scale: 16.0 +2024-07-27 21:31:05,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=65121.333333333336, ans=0.125 +2024-07-27 21:31:17,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=65148.0, ans=0.2 +2024-07-27 21:31:20,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=65148.0, ans=0.025 +2024-07-27 21:31:30,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=65161.333333333336, ans=0.125 +2024-07-27 21:31:30,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=65161.333333333336, ans=0.125 +2024-07-27 21:31:36,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=65174.666666666664, ans=0.125 +2024-07-27 21:31:43,112 INFO [train.py:1114] (3/4) Epoch 5, batch 8000, loss[loss=0.2479, simple_loss=0.3044, pruned_loss=0.0957, over 4597.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3217, pruned_loss=0.08316, over 934919.46 frames. ], batch size: 11, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:31:43,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=65188.0, ans=0.125 +2024-07-27 21:31:45,667 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:31:56,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=65201.333333333336, ans=0.125 +2024-07-27 21:32:04,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=65214.666666666664, ans=0.2 +2024-07-27 21:32:05,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=65214.666666666664, ans=0.125 +2024-07-27 21:32:07,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=65214.666666666664, ans=0.125 +2024-07-27 21:32:10,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=65228.0, ans=0.0 +2024-07-27 21:32:17,001 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.802e+01 6.447e+01 7.209e+01 8.816e+01 1.330e+02, threshold=1.442e+02, percent-clipped=0.0 +2024-07-27 21:32:23,333 INFO [train.py:1114] (3/4) Epoch 5, batch 8050, loss[loss=0.2453, simple_loss=0.3214, pruned_loss=0.08461, over 4800.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3223, pruned_loss=0.0836, over 934076.35 frames. ], batch size: 14, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:32:26,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=65254.666666666664, ans=0.125 +2024-07-27 21:32:36,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=65281.333333333336, ans=0.05 +2024-07-27 21:32:47,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=65294.666666666664, ans=0.125 +2024-07-27 21:32:49,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=65294.666666666664, ans=0.0 +2024-07-27 21:32:54,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=65308.0, ans=0.0 +2024-07-27 21:32:57,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=65308.0, ans=0.125 +2024-07-27 21:32:58,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=65308.0, ans=0.2 +2024-07-27 21:33:02,305 INFO [train.py:1114] (3/4) Epoch 5, batch 8100, loss[loss=0.2937, simple_loss=0.365, pruned_loss=0.1112, over 4800.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3222, pruned_loss=0.08326, over 933284.27 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:33:04,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=65321.333333333336, ans=0.125 +2024-07-27 21:33:06,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=65321.333333333336, ans=0.0 +2024-07-27 21:33:30,063 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.16 vs. limit=15.0 +2024-07-27 21:33:30,389 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.006e+01 6.276e+01 6.776e+01 7.896e+01 1.142e+02, threshold=1.355e+02, percent-clipped=0.0 +2024-07-27 21:33:33,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=65374.666666666664, ans=0.125 +2024-07-27 21:33:35,664 INFO [train.py:1114] (3/4) Epoch 5, batch 8150, loss[loss=0.2455, simple_loss=0.3267, pruned_loss=0.08216, over 4802.00 frames. ], tot_loss[loss=0.2435, simple_loss=0.321, pruned_loss=0.08295, over 936935.01 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:33:39,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=65388.0, ans=0.1 +2024-07-27 21:33:58,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=65428.0, ans=0.07 +2024-07-27 21:33:59,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=65428.0, ans=0.025 +2024-07-27 21:34:11,476 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.78 vs. limit=15.0 +2024-07-27 21:34:11,730 INFO [train.py:1114] (3/4) Epoch 5, batch 8200, loss[loss=0.2707, simple_loss=0.3535, pruned_loss=0.09396, over 4810.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3223, pruned_loss=0.0831, over 938171.14 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:34:11,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=65454.666666666664, ans=0.2 +2024-07-27 21:34:12,732 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.66 vs. limit=22.5 +2024-07-27 21:34:14,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=65454.666666666664, ans=0.0 +2024-07-27 21:34:35,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=65468.0, ans=0.125 +2024-07-27 21:34:40,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=65481.333333333336, ans=0.125 +2024-07-27 21:34:43,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=65481.333333333336, ans=0.2 +2024-07-27 21:34:48,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=65481.333333333336, ans=0.0 +2024-07-27 21:35:01,183 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.077e+01 6.058e+01 6.820e+01 7.758e+01 1.671e+02, threshold=1.364e+02, percent-clipped=1.0 +2024-07-27 21:35:03,998 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:35:06,472 INFO [train.py:1114] (3/4) Epoch 5, batch 8250, loss[loss=0.2577, simple_loss=0.3358, pruned_loss=0.08974, over 4903.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.3225, pruned_loss=0.08348, over 938422.71 frames. ], batch size: 13, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:35:08,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=65521.333333333336, ans=0.0 +2024-07-27 21:35:27,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=65548.0, ans=0.0 +2024-07-27 21:35:33,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=65548.0, ans=0.0 +2024-07-27 21:35:51,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=65561.33333333333, ans=0.125 +2024-07-27 21:35:54,888 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.37 vs. limit=15.0 +2024-07-27 21:35:55,573 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-27 21:35:57,990 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.32 vs. limit=15.0 +2024-07-27 21:36:00,905 INFO [train.py:1114] (3/4) Epoch 5, batch 8300, loss[loss=0.2442, simple_loss=0.3273, pruned_loss=0.0805, over 4913.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.3233, pruned_loss=0.08391, over 938075.86 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:36:18,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=65614.66666666667, ans=0.0 +2024-07-27 21:36:25,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=65628.0, ans=0.0 +2024-07-27 21:36:25,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=65628.0, ans=10.0 +2024-07-27 21:36:26,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=65628.0, ans=0.0 +2024-07-27 21:36:29,254 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.95 vs. limit=22.5 +2024-07-27 21:36:32,860 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.068e+01 6.223e+01 6.833e+01 7.614e+01 1.184e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-27 21:36:39,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=65641.33333333333, ans=0.125 +2024-07-27 21:36:44,506 INFO [train.py:1114] (3/4) Epoch 5, batch 8350, loss[loss=0.2156, simple_loss=0.3011, pruned_loss=0.06502, over 4794.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.3221, pruned_loss=0.08335, over 941146.45 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:36:45,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=65654.66666666667, ans=0.0 +2024-07-27 21:36:46,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=65654.66666666667, ans=0.0 +2024-07-27 21:36:48,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=65654.66666666667, ans=0.0 +2024-07-27 21:36:48,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=65654.66666666667, ans=0.125 +2024-07-27 21:36:49,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=65654.66666666667, ans=0.2 +2024-07-27 21:36:56,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=65668.0, ans=0.125 +2024-07-27 21:37:03,474 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.68 vs. limit=12.0 +2024-07-27 21:37:24,210 INFO [train.py:1114] (3/4) Epoch 5, batch 8400, loss[loss=0.2468, simple_loss=0.3124, pruned_loss=0.09059, over 4775.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.3224, pruned_loss=0.08367, over 939651.59 frames. ], batch size: 12, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:37:26,421 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.71 vs. limit=15.0 +2024-07-27 21:37:28,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=65721.33333333333, ans=0.0 +2024-07-27 21:37:29,831 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.61 vs. limit=6.0 +2024-07-27 21:37:30,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=65734.66666666667, ans=0.1 +2024-07-27 21:37:35,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=65734.66666666667, ans=0.0 +2024-07-27 21:37:35,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=65734.66666666667, ans=0.125 +2024-07-27 21:37:39,457 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.18 vs. limit=15.0 +2024-07-27 21:37:45,069 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:37:49,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.78 vs. limit=22.5 +2024-07-27 21:37:57,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=65761.33333333333, ans=0.1 +2024-07-27 21:38:02,268 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.300e+01 6.390e+01 7.817e+01 9.094e+01 1.508e+02, threshold=1.563e+02, percent-clipped=1.0 +2024-07-27 21:38:07,398 INFO [train.py:1114] (3/4) Epoch 5, batch 8450, loss[loss=0.243, simple_loss=0.3111, pruned_loss=0.08739, over 4799.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.3226, pruned_loss=0.08329, over 938696.47 frames. ], batch size: 15, lr: 1.42e-02, grad_scale: 32.0 +2024-07-27 21:38:15,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=65801.33333333333, ans=0.125 +2024-07-27 21:38:33,132 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.35 vs. limit=15.0 +2024-07-27 21:38:43,065 INFO [train.py:1114] (3/4) Epoch 5, batch 8500, loss[loss=0.2801, simple_loss=0.3357, pruned_loss=0.1123, over 4621.00 frames. ], tot_loss[loss=0.2431, simple_loss=0.3208, pruned_loss=0.08271, over 938424.22 frames. ], batch size: 11, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:38:52,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=65868.0, ans=0.125 +2024-07-27 21:39:14,110 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.478e+01 6.095e+01 6.601e+01 7.527e+01 1.077e+02, threshold=1.320e+02, percent-clipped=0.0 +2024-07-27 21:39:15,853 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.89 vs. limit=12.0 +2024-07-27 21:39:19,307 INFO [train.py:1114] (3/4) Epoch 5, batch 8550, loss[loss=0.1836, simple_loss=0.2591, pruned_loss=0.05403, over 4802.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.3197, pruned_loss=0.08225, over 939180.76 frames. ], batch size: 11, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:39:19,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=65921.33333333333, ans=0.125 +2024-07-27 21:39:42,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=65961.33333333333, ans=0.035 +2024-07-27 21:39:53,160 INFO [train.py:1114] (3/4) Epoch 5, batch 8600, loss[loss=0.2203, simple_loss=0.3088, pruned_loss=0.06585, over 4802.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3195, pruned_loss=0.0821, over 938945.59 frames. ], batch size: 15, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:39:53,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=65988.0, ans=0.0 +2024-07-27 21:40:01,908 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.38 vs. limit=6.0 +2024-07-27 21:40:11,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=66014.66666666667, ans=0.0 +2024-07-27 21:40:14,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=66028.0, ans=0.0 +2024-07-27 21:40:18,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66041.33333333333, ans=0.1 +2024-07-27 21:40:19,962 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.202e+01 6.597e+01 7.756e+01 9.469e+01 1.243e+02, threshold=1.551e+02, percent-clipped=0.0 +2024-07-27 21:40:20,993 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=10.26 vs. limit=10.0 +2024-07-27 21:40:21,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=66041.33333333333, ans=0.125 +2024-07-27 21:40:22,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=66041.33333333333, ans=0.0 +2024-07-27 21:40:25,123 INFO [train.py:1114] (3/4) Epoch 5, batch 8650, loss[loss=0.2543, simple_loss=0.3367, pruned_loss=0.08594, over 4896.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.3194, pruned_loss=0.08273, over 940289.14 frames. ], batch size: 15, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:40:29,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66054.66666666667, ans=0.1 +2024-07-27 21:40:40,100 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=15.0 +2024-07-27 21:40:41,339 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.69 vs. limit=22.5 +2024-07-27 21:40:44,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=66094.66666666667, ans=0.04949747468305833 +2024-07-27 21:40:44,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=66094.66666666667, ans=0.125 +2024-07-27 21:40:51,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=66108.0, ans=0.125 +2024-07-27 21:40:58,370 INFO [train.py:1114] (3/4) Epoch 5, batch 8700, loss[loss=0.1935, simple_loss=0.2843, pruned_loss=0.05134, over 4750.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.3207, pruned_loss=0.08331, over 937877.59 frames. ], batch size: 13, lr: 1.41e-02, grad_scale: 16.0 +2024-07-27 21:40:59,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=66121.33333333333, ans=0.0 +2024-07-27 21:41:11,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=66134.66666666667, ans=0.125 +2024-07-27 21:41:22,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.55 vs. limit=15.0 +2024-07-27 21:41:23,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=66161.33333333333, ans=0.0 +2024-07-27 21:41:24,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=66161.33333333333, ans=0.0 +2024-07-27 21:41:27,646 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.897e+01 6.098e+01 6.655e+01 7.583e+01 1.149e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-27 21:41:32,130 INFO [train.py:1114] (3/4) Epoch 5, batch 8750, loss[loss=0.2592, simple_loss=0.336, pruned_loss=0.09117, over 4671.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3195, pruned_loss=0.08249, over 936010.50 frames. ], batch size: 15, lr: 1.41e-02, grad_scale: 16.0 +2024-07-27 21:41:33,083 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.75 vs. limit=15.0 +2024-07-27 21:41:38,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66188.0, ans=0.1 +2024-07-27 21:41:43,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=66201.33333333333, ans=0.125 +2024-07-27 21:41:48,718 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.07 vs. limit=15.0 +2024-07-27 21:41:50,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=66214.66666666667, ans=0.125 +2024-07-27 21:42:03,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=66228.0, ans=0.0 +2024-07-27 21:42:03,627 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:42:08,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=66241.33333333333, ans=0.125 +2024-07-27 21:42:21,958 INFO [train.py:1114] (3/4) Epoch 5, batch 8800, loss[loss=0.2427, simple_loss=0.3273, pruned_loss=0.07911, over 4939.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.321, pruned_loss=0.08331, over 936832.65 frames. ], batch size: 14, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:42:29,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=66254.66666666667, ans=0.015 +2024-07-27 21:42:30,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=66254.66666666667, ans=0.2 +2024-07-27 21:42:30,780 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.63 vs. limit=15.0 +2024-07-27 21:42:32,158 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=13.73 vs. limit=15.0 +2024-07-27 21:42:42,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=66281.33333333333, ans=0.125 +2024-07-27 21:42:54,234 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.166e+01 6.254e+01 7.129e+01 8.198e+01 1.307e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 21:42:58,857 INFO [train.py:1114] (3/4) Epoch 5, batch 8850, loss[loss=0.2495, simple_loss=0.3267, pruned_loss=0.08615, over 4559.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3214, pruned_loss=0.08354, over 931437.74 frames. ], batch size: 21, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:43:00,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=66321.33333333333, ans=0.125 +2024-07-27 21:43:13,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=66348.0, ans=0.0 +2024-07-27 21:43:24,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=66361.33333333333, ans=0.0 +2024-07-27 21:43:31,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=66388.0, ans=0.125 +2024-07-27 21:43:33,889 INFO [train.py:1114] (3/4) Epoch 5, batch 8900, loss[loss=0.2257, simple_loss=0.2992, pruned_loss=0.07616, over 4931.00 frames. ], tot_loss[loss=0.245, simple_loss=0.322, pruned_loss=0.084, over 929304.50 frames. ], batch size: 12, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:43:36,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66388.0, ans=0.1 +2024-07-27 21:43:36,882 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.91 vs. limit=15.0 +2024-07-27 21:43:38,481 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:43:39,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=66388.0, ans=0.0 +2024-07-27 21:43:50,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=66414.66666666667, ans=0.1 +2024-07-27 21:44:03,544 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 6.558e+01 7.585e+01 9.378e+01 1.606e+02, threshold=1.517e+02, percent-clipped=2.0 +2024-07-27 21:44:08,116 INFO [train.py:1114] (3/4) Epoch 5, batch 8950, loss[loss=0.264, simple_loss=0.3405, pruned_loss=0.09373, over 4511.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.3222, pruned_loss=0.084, over 929928.01 frames. ], batch size: 21, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:44:11,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=66454.66666666667, ans=0.0 +2024-07-27 21:44:12,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=66454.66666666667, ans=0.125 +2024-07-27 21:44:26,521 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.42 vs. limit=6.0 +2024-07-27 21:44:43,392 INFO [train.py:1114] (3/4) Epoch 5, batch 9000, loss[loss=0.2355, simple_loss=0.3061, pruned_loss=0.08248, over 4635.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.321, pruned_loss=0.08362, over 933073.77 frames. ], batch size: 12, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:44:43,393 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 21:44:53,783 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.4294, 2.2018, 4.5509, 3.1489], device='cuda:3') +2024-07-27 21:44:54,790 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([6.6518, 6.2766, 6.4349, 6.4526], device='cuda:3') +2024-07-27 21:44:55,829 INFO [train.py:1146] (3/4) Epoch 5, validation: loss=0.197, simple_loss=0.3006, pruned_loss=0.04666, over 944034.00 frames. +2024-07-27 21:44:55,830 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 21:45:05,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=66534.66666666667, ans=0.125 +2024-07-27 21:45:07,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=66534.66666666667, ans=10.0 +2024-07-27 21:45:12,760 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.72 vs. limit=6.0 +2024-07-27 21:45:36,643 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.268e+01 6.311e+01 7.147e+01 8.276e+01 1.860e+02, threshold=1.429e+02, percent-clipped=1.0 +2024-07-27 21:45:41,884 INFO [train.py:1114] (3/4) Epoch 5, batch 9050, loss[loss=0.243, simple_loss=0.3032, pruned_loss=0.09146, over 4536.00 frames. ], tot_loss[loss=0.244, simple_loss=0.3204, pruned_loss=0.08379, over 933864.11 frames. ], batch size: 10, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:45:48,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=66588.0, ans=0.125 +2024-07-27 21:46:01,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=66614.66666666667, ans=0.025 +2024-07-27 21:46:16,352 INFO [train.py:1114] (3/4) Epoch 5, batch 9100, loss[loss=0.2415, simple_loss=0.3216, pruned_loss=0.08073, over 4932.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3194, pruned_loss=0.0826, over 936448.80 frames. ], batch size: 14, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:46:28,162 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.17 vs. limit=22.5 +2024-07-27 21:46:43,194 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.19 vs. limit=15.0 +2024-07-27 21:46:44,168 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.191e+01 6.256e+01 6.952e+01 8.323e+01 1.113e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-27 21:46:44,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=66708.0, ans=0.025 +2024-07-27 21:46:48,049 INFO [train.py:1114] (3/4) Epoch 5, batch 9150, loss[loss=0.232, simple_loss=0.311, pruned_loss=0.07656, over 4812.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.3206, pruned_loss=0.08358, over 935621.25 frames. ], batch size: 14, lr: 1.41e-02, grad_scale: 16.0 +2024-07-27 21:46:51,759 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.84 vs. limit=15.0 +2024-07-27 21:46:53,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=66721.33333333333, ans=0.04949747468305833 +2024-07-27 21:46:53,767 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.14 vs. limit=15.0 +2024-07-27 21:46:58,604 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.23 vs. limit=22.5 +2024-07-27 21:47:03,262 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.30 vs. limit=15.0 +2024-07-27 21:47:05,132 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.75 vs. limit=6.0 +2024-07-27 21:47:10,338 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.11 vs. limit=10.0 +2024-07-27 21:47:11,613 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.03 vs. limit=15.0 +2024-07-27 21:47:19,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=66774.66666666667, ans=0.0 +2024-07-27 21:47:19,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.62 vs. limit=15.0 +2024-07-27 21:47:22,560 INFO [train.py:1114] (3/4) Epoch 5, batch 9200, loss[loss=0.1819, simple_loss=0.2685, pruned_loss=0.04765, over 4861.00 frames. ], tot_loss[loss=0.2415, simple_loss=0.3186, pruned_loss=0.08223, over 937532.41 frames. ], batch size: 12, lr: 1.41e-02, grad_scale: 32.0 +2024-07-27 21:47:26,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=66788.0, ans=0.125 +2024-07-27 21:47:27,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=66788.0, ans=0.0 +2024-07-27 21:47:32,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=66801.33333333333, ans=0.125 +2024-07-27 21:47:34,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=66801.33333333333, ans=0.0 +2024-07-27 21:47:47,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=66841.33333333333, ans=0.125 +2024-07-27 21:47:50,766 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.899e+01 6.022e+01 6.976e+01 8.483e+01 1.676e+02, threshold=1.395e+02, percent-clipped=4.0 +2024-07-27 21:47:51,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=66841.33333333333, ans=0.125 +2024-07-27 21:47:54,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=66854.66666666667, ans=0.125 +2024-07-27 21:47:54,598 INFO [train.py:1114] (3/4) Epoch 5, batch 9250, loss[loss=0.2378, simple_loss=0.3211, pruned_loss=0.07729, over 4638.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3184, pruned_loss=0.08135, over 938181.56 frames. ], batch size: 13, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:47:58,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=66854.66666666667, ans=0.125 +2024-07-27 21:48:06,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=66881.33333333333, ans=0.0 +2024-07-27 21:48:07,512 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.97 vs. limit=15.0 +2024-07-27 21:48:12,647 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.27 vs. limit=10.0 +2024-07-27 21:48:14,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=66894.66666666667, ans=0.125 +2024-07-27 21:48:14,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=66894.66666666667, ans=0.2 +2024-07-27 21:48:15,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=66894.66666666667, ans=0.0 +2024-07-27 21:48:23,000 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.79 vs. limit=6.0 +2024-07-27 21:48:26,550 INFO [train.py:1114] (3/4) Epoch 5, batch 9300, loss[loss=0.2662, simple_loss=0.334, pruned_loss=0.09916, over 4775.00 frames. ], tot_loss[loss=0.241, simple_loss=0.3188, pruned_loss=0.08158, over 938194.96 frames. ], batch size: 12, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:48:29,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=66921.33333333333, ans=0.125 +2024-07-27 21:48:32,743 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.22 vs. limit=15.0 +2024-07-27 21:48:35,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=66934.66666666667, ans=0.025 +2024-07-27 21:48:44,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=66948.0, ans=0.1 +2024-07-27 21:48:53,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=66961.33333333333, ans=0.125 +2024-07-27 21:48:56,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=66974.66666666667, ans=0.125 +2024-07-27 21:48:58,824 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.61 vs. limit=12.0 +2024-07-27 21:48:59,168 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.113e+01 6.261e+01 7.073e+01 8.470e+01 1.590e+02, threshold=1.415e+02, percent-clipped=1.0 +2024-07-27 21:49:02,797 INFO [train.py:1114] (3/4) Epoch 5, batch 9350, loss[loss=0.2215, simple_loss=0.2883, pruned_loss=0.07734, over 4795.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.3197, pruned_loss=0.08249, over 935263.13 frames. ], batch size: 11, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:49:10,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=67001.33333333333, ans=0.0 +2024-07-27 21:49:29,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=67041.33333333333, ans=0.0 +2024-07-27 21:49:31,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=67041.33333333333, ans=0.1 +2024-07-27 21:49:34,570 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.91 vs. limit=15.0 +2024-07-27 21:49:35,454 INFO [train.py:1114] (3/4) Epoch 5, batch 9400, loss[loss=0.2068, simple_loss=0.2907, pruned_loss=0.06146, over 4693.00 frames. ], tot_loss[loss=0.242, simple_loss=0.3194, pruned_loss=0.08232, over 932844.17 frames. ], batch size: 13, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:49:35,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=67054.66666666667, ans=0.025 +2024-07-27 21:49:37,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=67054.66666666667, ans=0.125 +2024-07-27 21:49:41,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=67068.0, ans=0.0 +2024-07-27 21:49:48,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=67081.33333333333, ans=0.125 +2024-07-27 21:49:52,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=67081.33333333333, ans=0.125 +2024-07-27 21:49:59,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=67094.66666666667, ans=0.0 +2024-07-27 21:50:03,179 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.976e+01 5.980e+01 6.533e+01 7.095e+01 1.005e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-27 21:50:04,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=67108.0, ans=0.125 +2024-07-27 21:50:07,151 INFO [train.py:1114] (3/4) Epoch 5, batch 9450, loss[loss=0.179, simple_loss=0.2625, pruned_loss=0.0477, over 4807.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3188, pruned_loss=0.0813, over 932247.26 frames. ], batch size: 11, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:50:15,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67134.66666666667, ans=0.1 +2024-07-27 21:50:16,199 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.26 vs. limit=12.0 +2024-07-27 21:50:19,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=67148.0, ans=0.0 +2024-07-27 21:50:19,900 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.82 vs. limit=15.0 +2024-07-27 21:50:20,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=67148.0, ans=0.0 +2024-07-27 21:50:21,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=67148.0, ans=0.025 +2024-07-27 21:50:34,861 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.73 vs. limit=15.0 +2024-07-27 21:50:37,817 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:50:39,298 INFO [train.py:1114] (3/4) Epoch 5, batch 9500, loss[loss=0.2222, simple_loss=0.2975, pruned_loss=0.07339, over 4707.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3192, pruned_loss=0.0815, over 934362.73 frames. ], batch size: 12, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:50:52,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=67214.66666666667, ans=0.0 +2024-07-27 21:50:58,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=67214.66666666667, ans=0.1 +2024-07-27 21:50:58,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=67228.0, ans=0.125 +2024-07-27 21:51:01,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=67228.0, ans=0.125 +2024-07-27 21:51:05,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=67241.33333333333, ans=0.1 +2024-07-27 21:51:08,213 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.271e+01 6.540e+01 7.330e+01 8.472e+01 1.165e+02, threshold=1.466e+02, percent-clipped=0.0 +2024-07-27 21:51:11,965 INFO [train.py:1114] (3/4) Epoch 5, batch 9550, loss[loss=0.2297, simple_loss=0.3071, pruned_loss=0.07619, over 4779.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3193, pruned_loss=0.08211, over 931488.27 frames. ], batch size: 12, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:51:25,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=67254.66666666667, ans=0.0 +2024-07-27 21:51:26,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=67254.66666666667, ans=0.05 +2024-07-27 21:51:26,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=67254.66666666667, ans=0.0 +2024-07-27 21:51:37,814 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.57 vs. limit=15.0 +2024-07-27 21:51:39,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=67281.33333333333, ans=0.1 +2024-07-27 21:51:52,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=67308.0, ans=0.125 +2024-07-27 21:51:58,328 INFO [train.py:1114] (3/4) Epoch 5, batch 9600, loss[loss=0.3164, simple_loss=0.3751, pruned_loss=0.1289, over 3640.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.3196, pruned_loss=0.08205, over 930645.61 frames. ], batch size: 36, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:52:05,826 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.84 vs. limit=6.0 +2024-07-27 21:52:09,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=67321.33333333333, ans=0.025 +2024-07-27 21:52:18,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=67348.0, ans=0.0 +2024-07-27 21:52:19,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=67348.0, ans=0.125 +2024-07-27 21:52:25,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=67361.33333333333, ans=0.125 +2024-07-27 21:52:32,044 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.609e+01 6.845e+01 7.921e+01 9.332e+01 1.441e+02, threshold=1.584e+02, percent-clipped=0.0 +2024-07-27 21:52:34,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=67374.66666666667, ans=0.0 +2024-07-27 21:52:35,911 INFO [train.py:1114] (3/4) Epoch 5, batch 9650, loss[loss=0.2788, simple_loss=0.3579, pruned_loss=0.09981, over 4824.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.3201, pruned_loss=0.08278, over 926798.58 frames. ], batch size: 16, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:52:46,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67401.33333333333, ans=0.1 +2024-07-27 21:52:53,598 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.83 vs. limit=15.0 +2024-07-27 21:53:09,957 INFO [train.py:1114] (3/4) Epoch 5, batch 9700, loss[loss=0.2804, simple_loss=0.3355, pruned_loss=0.1127, over 4315.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.3196, pruned_loss=0.08257, over 924150.60 frames. ], batch size: 25, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:53:10,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=67454.66666666667, ans=0.0 +2024-07-27 21:53:16,185 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.94 vs. limit=15.0 +2024-07-27 21:53:17,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67468.0, ans=0.1 +2024-07-27 21:53:19,239 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.55 vs. limit=15.0 +2024-07-27 21:53:20,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=67468.0, ans=0.125 +2024-07-27 21:53:20,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=67468.0, ans=0.0 +2024-07-27 21:53:22,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=67481.33333333333, ans=0.2 +2024-07-27 21:53:25,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=67481.33333333333, ans=0.125 +2024-07-27 21:53:28,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=67494.66666666667, ans=0.1 +2024-07-27 21:53:30,122 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.35 vs. limit=22.5 +2024-07-27 21:53:30,237 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.39 vs. limit=15.0 +2024-07-27 21:53:32,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=67494.66666666667, ans=0.125 +2024-07-27 21:53:33,892 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.53 vs. limit=6.0 +2024-07-27 21:53:37,139 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.756e+01 6.215e+01 7.059e+01 7.921e+01 1.151e+02, threshold=1.412e+02, percent-clipped=0.0 +2024-07-27 21:53:41,360 INFO [train.py:1114] (3/4) Epoch 5, batch 9750, loss[loss=0.2323, simple_loss=0.3036, pruned_loss=0.08048, over 4668.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.3186, pruned_loss=0.08179, over 925004.35 frames. ], batch size: 15, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:53:43,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=67521.33333333333, ans=0.025 +2024-07-27 21:53:46,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=67521.33333333333, ans=0.0 +2024-07-27 21:53:59,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=67548.0, ans=0.125 +2024-07-27 21:53:59,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=67548.0, ans=0.125 +2024-07-27 21:54:10,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=67574.66666666667, ans=0.125 +2024-07-27 21:54:13,367 INFO [train.py:1114] (3/4) Epoch 5, batch 9800, loss[loss=0.2478, simple_loss=0.329, pruned_loss=0.08329, over 4713.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.3177, pruned_loss=0.08137, over 924972.71 frames. ], batch size: 12, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:54:20,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=67601.33333333333, ans=0.0 +2024-07-27 21:54:40,551 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.676e+01 6.437e+01 7.516e+01 8.874e+01 1.109e+02, threshold=1.503e+02, percent-clipped=0.0 +2024-07-27 21:54:44,118 INFO [train.py:1114] (3/4) Epoch 5, batch 9850, loss[loss=0.3013, simple_loss=0.3846, pruned_loss=0.109, over 4904.00 frames. ], tot_loss[loss=0.24, simple_loss=0.318, pruned_loss=0.081, over 927276.87 frames. ], batch size: 15, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:54:44,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=67654.66666666667, ans=0.0 +2024-07-27 21:54:57,211 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.34 vs. limit=10.0 +2024-07-27 21:55:06,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=67694.66666666667, ans=0.125 +2024-07-27 21:55:09,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=67708.0, ans=0.125 +2024-07-27 21:55:15,333 INFO [train.py:1114] (3/4) Epoch 5, batch 9900, loss[loss=0.296, simple_loss=0.3723, pruned_loss=0.1099, over 4848.00 frames. ], tot_loss[loss=0.2416, simple_loss=0.319, pruned_loss=0.08208, over 927044.39 frames. ], batch size: 16, lr: 1.40e-02, grad_scale: 32.0 +2024-07-27 21:55:16,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=67721.33333333333, ans=0.1 +2024-07-27 21:55:19,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff3.min_abs, batch_count=67721.33333333333, ans=0.2 +2024-07-27 21:55:30,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=67748.0, ans=0.125 +2024-07-27 21:55:34,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=67748.0, ans=15.0 +2024-07-27 21:55:44,751 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.290e+01 6.621e+01 7.499e+01 8.431e+01 1.516e+02, threshold=1.500e+02, percent-clipped=1.0 +2024-07-27 21:55:47,767 INFO [train.py:1114] (3/4) Epoch 5, batch 9950, loss[loss=0.2147, simple_loss=0.2888, pruned_loss=0.07027, over 4530.00 frames. ], tot_loss[loss=0.243, simple_loss=0.3202, pruned_loss=0.08292, over 929566.03 frames. ], batch size: 10, lr: 1.39e-02, grad_scale: 16.0 +2024-07-27 21:55:51,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=67788.0, ans=0.0 +2024-07-27 21:56:03,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=67814.66666666667, ans=0.125 +2024-07-27 21:56:08,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=67828.0, ans=0.125 +2024-07-27 21:56:17,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=67841.33333333333, ans=0.025 +2024-07-27 21:56:21,620 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.95 vs. limit=15.0 +2024-07-27 21:56:22,652 INFO [train.py:1114] (3/4) Epoch 5, batch 10000, loss[loss=0.3136, simple_loss=0.3763, pruned_loss=0.1254, over 4670.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.3236, pruned_loss=0.08452, over 927230.59 frames. ], batch size: 16, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:56:24,129 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.96 vs. limit=15.0 +2024-07-27 21:56:28,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=67854.66666666667, ans=0.0 +2024-07-27 21:56:34,447 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 21:56:46,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=67894.66666666667, ans=0.1 +2024-07-27 21:56:47,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=67894.66666666667, ans=0.125 +2024-07-27 21:56:48,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=67894.66666666667, ans=0.125 +2024-07-27 21:56:51,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=67908.0, ans=0.125 +2024-07-27 21:56:51,624 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.13 vs. limit=22.5 +2024-07-27 21:56:52,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=67908.0, ans=0.025 +2024-07-27 21:56:53,466 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.246e+01 6.323e+01 6.858e+01 7.699e+01 1.357e+02, threshold=1.372e+02, percent-clipped=0.0 +2024-07-27 21:56:57,390 INFO [train.py:1114] (3/4) Epoch 5, batch 10050, loss[loss=0.2784, simple_loss=0.3362, pruned_loss=0.1102, over 3584.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3276, pruned_loss=0.08725, over 915225.00 frames. ], batch size: 36, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:57:09,412 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.71 vs. limit=15.0 +2024-07-27 21:57:14,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=67948.0, ans=0.125 +2024-07-27 21:57:16,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=67948.0, ans=0.2 +2024-07-27 21:57:26,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=67974.66666666667, ans=0.125 +2024-07-27 21:57:31,359 INFO [train.py:1114] (3/4) Epoch 5, batch 10100, loss[loss=0.2766, simple_loss=0.3573, pruned_loss=0.09795, over 3457.00 frames. ], tot_loss[loss=0.2619, simple_loss=0.3344, pruned_loss=0.09475, over 862171.69 frames. ], batch size: 35, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:57:42,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=68001.33333333333, ans=0.2 +2024-07-27 21:57:45,059 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=10.83 vs. limit=15.0 +2024-07-27 21:57:50,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.24 vs. limit=15.0 +2024-07-27 21:58:01,136 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.071e+01 6.809e+01 7.405e+01 8.060e+01 1.302e+02, threshold=1.481e+02, percent-clipped=0.0 +2024-07-27 21:58:02,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=68041.33333333333, ans=0.125 +2024-07-27 21:58:02,793 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=8.17 vs. limit=12.0 +2024-07-27 21:58:03,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=68041.33333333333, ans=0.2 +2024-07-27 21:58:04,301 INFO [train.py:1114] (3/4) Epoch 5, batch 10150, loss[loss=0.298, simple_loss=0.3605, pruned_loss=0.1177, over 3485.00 frames. ], tot_loss[loss=0.2699, simple_loss=0.3394, pruned_loss=0.1002, over 820396.09 frames. ], batch size: 35, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:58:05,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=68054.66666666667, ans=0.125 +2024-07-27 21:58:09,519 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.21 vs. limit=22.5 +2024-07-27 21:58:27,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=68094.66666666667, ans=0.0 +2024-07-27 21:58:31,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=68108.0, ans=0.125 +2024-07-27 21:58:32,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=68108.0, ans=0.5 +2024-07-27 21:58:35,559 INFO [train.py:1114] (3/4) Epoch 5, batch 10200, loss[loss=0.2881, simple_loss=0.354, pruned_loss=0.1111, over 3460.00 frames. ], tot_loss[loss=0.2766, simple_loss=0.3437, pruned_loss=0.1047, over 788769.29 frames. ], batch size: 35, lr: 1.39e-02, grad_scale: 32.0 +2024-07-27 21:58:35,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=68121.33333333333, ans=0.125 +2024-07-27 21:58:35,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68121.33333333333, ans=0.1 +2024-07-27 21:58:36,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=68121.33333333333, ans=0.0 +2024-07-27 21:58:37,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=68121.33333333333, ans=0.125 +2024-07-27 21:58:45,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=68134.66666666667, ans=0.125 +2024-07-27 21:59:31,814 INFO [train.py:1114] (3/4) Epoch 6, batch 0, loss[loss=0.194, simple_loss=0.2822, pruned_loss=0.05294, over 4850.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2822, pruned_loss=0.05294, over 4850.00 frames. ], batch size: 12, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 21:59:31,814 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 21:59:42,221 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.3867, 5.2511, 4.5963, 4.0479], device='cuda:3') +2024-07-27 21:59:43,347 INFO [train.py:1146] (3/4) Epoch 6, validation: loss=0.203, simple_loss=0.3084, pruned_loss=0.04884, over 944034.00 frames. +2024-07-27 21:59:43,348 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 21:59:45,132 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.12 vs. limit=22.5 +2024-07-27 21:59:50,719 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.49 vs. limit=22.5 +2024-07-27 21:59:51,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=68164.0, ans=0.0 +2024-07-27 21:59:58,962 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.074e+01 6.594e+01 7.055e+01 7.805e+01 1.292e+02, threshold=1.411e+02, percent-clipped=0.0 +2024-07-27 22:00:03,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=68190.66666666667, ans=0.0 +2024-07-27 22:00:10,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=68190.66666666667, ans=0.125 +2024-07-27 22:00:12,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=68204.0, ans=0.0 +2024-07-27 22:00:18,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=68217.33333333333, ans=0.125 +2024-07-27 22:00:18,877 INFO [train.py:1114] (3/4) Epoch 6, batch 50, loss[loss=0.2177, simple_loss=0.304, pruned_loss=0.06567, over 4615.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3284, pruned_loss=0.08379, over 206769.80 frames. ], batch size: 11, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:00:22,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=68217.33333333333, ans=0.0 +2024-07-27 22:00:30,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=68230.66666666667, ans=0.125 +2024-07-27 22:00:35,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=68244.0, ans=0.0 +2024-07-27 22:00:46,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=68270.66666666667, ans=0.125 +2024-07-27 22:00:50,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=68270.66666666667, ans=0.025 +2024-07-27 22:00:52,635 INFO [train.py:1114] (3/4) Epoch 6, batch 100, loss[loss=0.2417, simple_loss=0.3106, pruned_loss=0.08636, over 4637.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.3229, pruned_loss=0.08114, over 365895.79 frames. ], batch size: 12, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:01:00,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=68297.33333333333, ans=0.025 +2024-07-27 22:01:09,851 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.163e+01 6.212e+01 6.939e+01 8.250e+01 1.265e+02, threshold=1.388e+02, percent-clipped=0.0 +2024-07-27 22:01:16,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=68324.0, ans=0.5 +2024-07-27 22:01:24,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=68337.33333333333, ans=0.125 +2024-07-27 22:01:27,685 INFO [train.py:1114] (3/4) Epoch 6, batch 150, loss[loss=0.1836, simple_loss=0.2734, pruned_loss=0.0469, over 4620.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.3203, pruned_loss=0.08003, over 494520.53 frames. ], batch size: 11, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:01:27,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68350.66666666667, ans=0.1 +2024-07-27 22:01:27,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=68350.66666666667, ans=0.125 +2024-07-27 22:01:46,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=68390.66666666667, ans=0.125 +2024-07-27 22:01:53,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=68404.0, ans=0.2 +2024-07-27 22:01:58,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=68404.0, ans=0.1 +2024-07-27 22:02:04,986 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.33 vs. limit=15.0 +2024-07-27 22:02:05,153 INFO [train.py:1114] (3/4) Epoch 6, batch 200, loss[loss=0.273, simple_loss=0.3477, pruned_loss=0.09911, over 4495.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.3177, pruned_loss=0.07981, over 593808.90 frames. ], batch size: 21, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:02:06,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=68417.33333333333, ans=0.025 +2024-07-27 22:02:12,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=68430.66666666667, ans=0.0 +2024-07-27 22:02:17,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=68430.66666666667, ans=0.025 +2024-07-27 22:02:20,255 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.000e+01 6.270e+01 7.736e+01 9.618e+01 1.930e+02, threshold=1.547e+02, percent-clipped=5.0 +2024-07-27 22:02:22,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=68444.0, ans=0.125 +2024-07-27 22:02:30,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=68457.33333333333, ans=0.125 +2024-07-27 22:02:31,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=68470.66666666667, ans=0.125 +2024-07-27 22:02:32,347 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.22 vs. limit=15.0 +2024-07-27 22:02:32,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=68470.66666666667, ans=0.125 +2024-07-27 22:02:38,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff2.min_abs, batch_count=68484.0, ans=0.1 +2024-07-27 22:02:38,664 INFO [train.py:1114] (3/4) Epoch 6, batch 250, loss[loss=0.2247, simple_loss=0.3166, pruned_loss=0.06636, over 4635.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.3189, pruned_loss=0.08099, over 670867.47 frames. ], batch size: 16, lr: 1.30e-02, grad_scale: 32.0 +2024-07-27 22:02:41,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=68484.0, ans=0.1 +2024-07-27 22:02:54,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=68510.66666666667, ans=0.025 +2024-07-27 22:02:55,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=68510.66666666667, ans=0.05 +2024-07-27 22:03:02,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=68524.0, ans=0.02 +2024-07-27 22:03:11,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=68537.33333333333, ans=0.2 +2024-07-27 22:03:12,811 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.94 vs. limit=15.0 +2024-07-27 22:03:14,324 INFO [train.py:1114] (3/4) Epoch 6, batch 300, loss[loss=0.2533, simple_loss=0.3297, pruned_loss=0.08851, over 4787.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.318, pruned_loss=0.08056, over 729808.30 frames. ], batch size: 15, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:03:17,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=68550.66666666667, ans=0.125 +2024-07-27 22:03:20,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=68564.0, ans=0.125 +2024-07-27 22:03:29,803 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.873e+01 6.141e+01 6.927e+01 8.037e+01 1.226e+02, threshold=1.385e+02, percent-clipped=0.0 +2024-07-27 22:03:33,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=68577.33333333333, ans=0.125 +2024-07-27 22:03:35,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=68590.66666666667, ans=0.125 +2024-07-27 22:03:39,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=68590.66666666667, ans=0.2 +2024-07-27 22:03:42,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=68604.0, ans=0.125 +2024-07-27 22:03:49,908 INFO [train.py:1114] (3/4) Epoch 6, batch 350, loss[loss=0.1676, simple_loss=0.2566, pruned_loss=0.03928, over 4937.00 frames. ], tot_loss[loss=0.2383, simple_loss=0.3175, pruned_loss=0.07953, over 776368.78 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:03:53,088 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.23 vs. limit=22.5 +2024-07-27 22:03:56,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=68630.66666666667, ans=0.0 +2024-07-27 22:04:05,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=68644.0, ans=0.125 +2024-07-27 22:04:05,787 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.76 vs. limit=22.5 +2024-07-27 22:04:11,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=68657.33333333333, ans=0.0 +2024-07-27 22:04:20,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=68670.66666666667, ans=0.125 +2024-07-27 22:04:23,113 INFO [train.py:1114] (3/4) Epoch 6, batch 400, loss[loss=0.2104, simple_loss=0.2966, pruned_loss=0.0621, over 4699.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3161, pruned_loss=0.07854, over 813655.86 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:04:30,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=68684.0, ans=0.125 +2024-07-27 22:04:39,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=68697.33333333333, ans=0.1 +2024-07-27 22:04:41,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=68710.66666666667, ans=0.125 +2024-07-27 22:04:42,395 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.890e+01 6.252e+01 7.226e+01 8.425e+01 1.439e+02, threshold=1.445e+02, percent-clipped=1.0 +2024-07-27 22:04:55,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.33 vs. limit=15.0 +2024-07-27 22:05:00,650 INFO [train.py:1114] (3/4) Epoch 6, batch 450, loss[loss=0.2721, simple_loss=0.3454, pruned_loss=0.09936, over 4636.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3161, pruned_loss=0.07906, over 838910.75 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:05:02,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=68750.66666666667, ans=0.1 +2024-07-27 22:05:14,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=68777.33333333333, ans=0.125 +2024-07-27 22:05:33,919 INFO [train.py:1114] (3/4) Epoch 6, batch 500, loss[loss=0.254, simple_loss=0.3343, pruned_loss=0.08689, over 4687.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.3152, pruned_loss=0.07858, over 861057.44 frames. ], batch size: 15, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:05:51,015 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 6.191e+01 6.809e+01 7.735e+01 1.328e+02, threshold=1.362e+02, percent-clipped=0.0 +2024-07-27 22:05:53,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=68844.0, ans=0.0 +2024-07-27 22:06:09,501 INFO [train.py:1114] (3/4) Epoch 6, batch 550, loss[loss=0.2771, simple_loss=0.3414, pruned_loss=0.1064, over 4609.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3167, pruned_loss=0.07978, over 877976.40 frames. ], batch size: 17, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:06:21,093 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-27 22:06:24,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=68910.66666666667, ans=0.0 +2024-07-27 22:06:28,431 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.68 vs. limit=15.0 +2024-07-27 22:06:40,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=68937.33333333333, ans=0.125 +2024-07-27 22:06:42,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=68937.33333333333, ans=0.2 +2024-07-27 22:06:43,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=68950.66666666667, ans=0.0 +2024-07-27 22:06:43,639 INFO [train.py:1114] (3/4) Epoch 6, batch 600, loss[loss=0.2388, simple_loss=0.3118, pruned_loss=0.0829, over 4636.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3168, pruned_loss=0.07952, over 892553.64 frames. ], batch size: 16, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:06:46,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=68950.66666666667, ans=0.125 +2024-07-27 22:07:00,810 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.994e+01 6.226e+01 6.771e+01 7.767e+01 1.130e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-27 22:07:01,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=68977.33333333333, ans=0.125 +2024-07-27 22:07:05,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=68990.66666666667, ans=0.125 +2024-07-27 22:07:13,497 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:07:18,954 INFO [train.py:1114] (3/4) Epoch 6, batch 650, loss[loss=0.2038, simple_loss=0.2857, pruned_loss=0.06096, over 4762.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3139, pruned_loss=0.07798, over 904477.66 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:07:33,305 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.81 vs. limit=15.0 +2024-07-27 22:07:41,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=69057.33333333333, ans=0.125 +2024-07-27 22:07:52,606 INFO [train.py:1114] (3/4) Epoch 6, batch 700, loss[loss=0.2168, simple_loss=0.2884, pruned_loss=0.07256, over 4638.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3138, pruned_loss=0.07744, over 912434.99 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:08:07,875 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.953e+01 5.900e+01 6.634e+01 8.042e+01 1.194e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-27 22:08:08,395 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.25 vs. limit=22.5 +2024-07-27 22:08:10,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69110.66666666667, ans=0.1 +2024-07-27 22:08:14,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=69124.0, ans=0.125 +2024-07-27 22:08:15,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=69124.0, ans=0.0 +2024-07-27 22:08:27,950 INFO [train.py:1114] (3/4) Epoch 6, batch 750, loss[loss=0.2548, simple_loss=0.3449, pruned_loss=0.08238, over 4691.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3136, pruned_loss=0.07707, over 918417.94 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:08:33,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=69164.0, ans=0.0 +2024-07-27 22:08:35,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69164.0, ans=0.1 +2024-07-27 22:08:46,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=69177.33333333333, ans=0.125 +2024-07-27 22:08:51,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.52 vs. limit=15.0 +2024-07-27 22:08:53,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=69204.0, ans=0.0 +2024-07-27 22:09:00,971 INFO [train.py:1114] (3/4) Epoch 6, batch 800, loss[loss=0.2138, simple_loss=0.2881, pruned_loss=0.06979, over 4849.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.314, pruned_loss=0.0774, over 923603.51 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:09:03,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=69217.33333333333, ans=0.125 +2024-07-27 22:09:07,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=69230.66666666667, ans=0.2 +2024-07-27 22:09:12,528 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.12 vs. limit=15.0 +2024-07-27 22:09:18,019 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.094e+01 6.022e+01 6.607e+01 7.761e+01 1.209e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-27 22:09:22,177 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:09:26,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=69257.33333333333, ans=0.025 +2024-07-27 22:09:37,856 INFO [train.py:1114] (3/4) Epoch 6, batch 850, loss[loss=0.3267, simple_loss=0.3949, pruned_loss=0.1292, over 4680.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3144, pruned_loss=0.07759, over 927952.60 frames. ], batch size: 14, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:09:51,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=69310.66666666667, ans=0.125 +2024-07-27 22:09:51,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=69310.66666666667, ans=0.125 +2024-07-27 22:09:53,814 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.31 vs. limit=15.0 +2024-07-27 22:09:55,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=69310.66666666667, ans=0.0 +2024-07-27 22:09:57,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=69324.0, ans=0.125 +2024-07-27 22:09:58,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=69324.0, ans=0.125 +2024-07-27 22:10:15,125 INFO [train.py:1114] (3/4) Epoch 6, batch 900, loss[loss=0.219, simple_loss=0.2854, pruned_loss=0.07632, over 4857.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3151, pruned_loss=0.07829, over 928067.40 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:10:15,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69350.66666666667, ans=0.1 +2024-07-27 22:10:19,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=69350.66666666667, ans=0.125 +2024-07-27 22:10:30,381 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.456e+01 6.354e+01 7.008e+01 8.406e+01 1.301e+02, threshold=1.402e+02, percent-clipped=0.0 +2024-07-27 22:10:31,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=69377.33333333333, ans=0.125 +2024-07-27 22:10:35,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=69390.66666666667, ans=0.015 +2024-07-27 22:10:40,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=69390.66666666667, ans=0.125 +2024-07-27 22:10:46,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=69404.0, ans=0.0 +2024-07-27 22:10:48,672 INFO [train.py:1114] (3/4) Epoch 6, batch 950, loss[loss=0.2098, simple_loss=0.2975, pruned_loss=0.0611, over 4773.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3136, pruned_loss=0.0773, over 929596.79 frames. ], batch size: 12, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:10:50,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=69417.33333333333, ans=0.0 +2024-07-27 22:10:52,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=69417.33333333333, ans=0.125 +2024-07-27 22:11:00,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=69430.66666666667, ans=0.05 +2024-07-27 22:11:00,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=69430.66666666667, ans=0.125 +2024-07-27 22:11:01,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=69444.0, ans=0.0 +2024-07-27 22:11:06,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=69444.0, ans=0.125 +2024-07-27 22:11:13,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=69457.33333333333, ans=0.125 +2024-07-27 22:11:23,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=69484.0, ans=0.1 +2024-07-27 22:11:23,637 INFO [train.py:1114] (3/4) Epoch 6, batch 1000, loss[loss=0.1988, simple_loss=0.289, pruned_loss=0.05431, over 4972.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3151, pruned_loss=0.07816, over 929655.64 frames. ], batch size: 13, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:11:30,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=69497.33333333333, ans=0.07 +2024-07-27 22:11:35,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=69497.33333333333, ans=0.0 +2024-07-27 22:11:39,439 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.085e+01 6.209e+01 6.779e+01 8.145e+01 1.211e+02, threshold=1.356e+02, percent-clipped=0.0 +2024-07-27 22:11:40,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=69510.66666666667, ans=0.04949747468305833 +2024-07-27 22:11:42,268 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:11:45,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=69524.0, ans=0.0 +2024-07-27 22:11:46,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=69524.0, ans=0.025 +2024-07-27 22:11:57,385 INFO [train.py:1114] (3/4) Epoch 6, batch 1050, loss[loss=0.2173, simple_loss=0.3057, pruned_loss=0.06444, over 4876.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3144, pruned_loss=0.07759, over 932127.73 frames. ], batch size: 14, lr: 1.29e-02, grad_scale: 32.0 +2024-07-27 22:12:02,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=69550.66666666667, ans=0.125 +2024-07-27 22:12:14,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=69577.33333333333, ans=0.125 +2024-07-27 22:12:21,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=69590.66666666667, ans=0.0 +2024-07-27 22:12:30,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=69604.0, ans=0.125 +2024-07-27 22:12:32,788 INFO [train.py:1114] (3/4) Epoch 6, batch 1100, loss[loss=0.1776, simple_loss=0.267, pruned_loss=0.04414, over 4886.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3139, pruned_loss=0.07757, over 934529.27 frames. ], batch size: 13, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:12:34,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=69617.33333333333, ans=0.125 +2024-07-27 22:12:37,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=69617.33333333333, ans=0.125 +2024-07-27 22:12:48,097 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.955e+01 5.872e+01 6.350e+01 6.961e+01 9.139e+01, threshold=1.270e+02, percent-clipped=0.0 +2024-07-27 22:12:52,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=69657.33333333333, ans=0.125 +2024-07-27 22:13:02,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=69670.66666666667, ans=0.0 +2024-07-27 22:13:05,923 INFO [train.py:1114] (3/4) Epoch 6, batch 1150, loss[loss=0.2244, simple_loss=0.2935, pruned_loss=0.07762, over 4895.00 frames. ], tot_loss[loss=0.235, simple_loss=0.314, pruned_loss=0.07798, over 934217.48 frames. ], batch size: 13, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:13:08,425 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.58 vs. limit=15.0 +2024-07-27 22:13:19,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=69710.66666666667, ans=0.0 +2024-07-27 22:13:28,602 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.10 vs. limit=15.0 +2024-07-27 22:13:43,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=69737.33333333333, ans=0.125 +2024-07-27 22:13:48,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=69737.33333333333, ans=0.0 +2024-07-27 22:13:50,300 INFO [train.py:1114] (3/4) Epoch 6, batch 1200, loss[loss=0.23, simple_loss=0.3148, pruned_loss=0.07259, over 4876.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.3161, pruned_loss=0.07916, over 933417.56 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:13:58,700 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.68 vs. limit=15.0 +2024-07-27 22:14:02,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=69764.0, ans=0.0 +2024-07-27 22:14:07,404 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.905e+01 5.960e+01 6.565e+01 7.380e+01 1.067e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-27 22:14:24,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=69817.33333333333, ans=0.0 +2024-07-27 22:14:25,336 INFO [train.py:1114] (3/4) Epoch 6, batch 1250, loss[loss=0.246, simple_loss=0.3153, pruned_loss=0.08835, over 4795.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3156, pruned_loss=0.07887, over 937500.46 frames. ], batch size: 15, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:14:33,249 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.25 vs. limit=10.0 +2024-07-27 22:14:35,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=69830.66666666667, ans=0.0 +2024-07-27 22:14:50,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=69857.33333333333, ans=0.025 +2024-07-27 22:14:52,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=69870.66666666667, ans=10.0 +2024-07-27 22:14:55,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=69870.66666666667, ans=0.125 +2024-07-27 22:14:58,323 INFO [train.py:1114] (3/4) Epoch 6, batch 1300, loss[loss=0.275, simple_loss=0.3499, pruned_loss=0.09999, over 4638.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.3156, pruned_loss=0.07913, over 938895.77 frames. ], batch size: 19, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:14:59,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=69884.0, ans=0.125 +2024-07-27 22:15:03,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=69884.0, ans=0.0 +2024-07-27 22:15:05,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=69884.0, ans=0.0 +2024-07-27 22:15:07,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=69897.33333333333, ans=0.95 +2024-07-27 22:15:15,338 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.914e+01 6.589e+01 7.357e+01 1.015e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-27 22:15:17,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=69910.66666666667, ans=0.125 +2024-07-27 22:15:18,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=69910.66666666667, ans=0.1 +2024-07-27 22:15:19,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=69924.0, ans=0.025 +2024-07-27 22:15:22,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=69924.0, ans=0.125 +2024-07-27 22:15:25,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=69924.0, ans=0.1 +2024-07-27 22:15:28,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=69937.33333333333, ans=0.125 +2024-07-27 22:15:32,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=69937.33333333333, ans=0.125 +2024-07-27 22:15:33,794 INFO [train.py:1114] (3/4) Epoch 6, batch 1350, loss[loss=0.225, simple_loss=0.3171, pruned_loss=0.06648, over 4767.00 frames. ], tot_loss[loss=0.236, simple_loss=0.315, pruned_loss=0.07849, over 940937.06 frames. ], batch size: 13, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:15:45,254 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.89 vs. limit=15.0 +2024-07-27 22:15:52,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=69977.33333333333, ans=0.125 +2024-07-27 22:15:53,188 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.71 vs. limit=15.0 +2024-07-27 22:16:00,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=70004.0, ans=0.0 +2024-07-27 22:16:05,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=70004.0, ans=0.0 +2024-07-27 22:16:07,352 INFO [train.py:1114] (3/4) Epoch 6, batch 1400, loss[loss=0.1801, simple_loss=0.2638, pruned_loss=0.04819, over 4697.00 frames. ], tot_loss[loss=0.235, simple_loss=0.3146, pruned_loss=0.07769, over 943035.96 frames. ], batch size: 11, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:16:13,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=70017.33333333333, ans=0.09899494936611666 +2024-07-27 22:16:20,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=70044.0, ans=0.2 +2024-07-27 22:16:22,914 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.942e+01 6.116e+01 6.900e+01 7.787e+01 1.307e+02, threshold=1.380e+02, percent-clipped=0.0 +2024-07-27 22:17:15,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=70057.33333333333, ans=0.2 +2024-07-27 22:17:21,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=70070.66666666667, ans=0.0 +2024-07-27 22:17:29,171 INFO [train.py:1114] (3/4) Epoch 6, batch 1450, loss[loss=0.2389, simple_loss=0.3168, pruned_loss=0.08045, over 4670.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3145, pruned_loss=0.07756, over 942815.28 frames. ], batch size: 15, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:17:29,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=70084.0, ans=0.025 +2024-07-27 22:17:32,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=70084.0, ans=0.1 +2024-07-27 22:17:41,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=70097.33333333333, ans=0.0 +2024-07-27 22:17:45,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=70110.66666666667, ans=0.125 +2024-07-27 22:17:50,050 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.06 vs. limit=15.0 +2024-07-27 22:17:52,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=70124.0, ans=0.1 +2024-07-27 22:18:02,530 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.58 vs. limit=12.0 +2024-07-27 22:18:04,307 INFO [train.py:1114] (3/4) Epoch 6, batch 1500, loss[loss=0.218, simple_loss=0.3101, pruned_loss=0.06289, over 4820.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3149, pruned_loss=0.07795, over 942737.04 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:18:04,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=70150.66666666667, ans=0.0 +2024-07-27 22:18:08,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=70150.66666666667, ans=0.0 +2024-07-27 22:18:20,389 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.729e+01 5.883e+01 6.851e+01 7.584e+01 1.194e+02, threshold=1.370e+02, percent-clipped=0.0 +2024-07-27 22:18:25,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=70190.66666666667, ans=0.125 +2024-07-27 22:18:33,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=70204.0, ans=0.1 +2024-07-27 22:18:40,403 INFO [train.py:1114] (3/4) Epoch 6, batch 1550, loss[loss=0.2585, simple_loss=0.3507, pruned_loss=0.08314, over 4900.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3161, pruned_loss=0.07874, over 939454.46 frames. ], batch size: 15, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:18:44,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.60 vs. limit=22.5 +2024-07-27 22:18:45,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=70217.33333333333, ans=0.125 +2024-07-27 22:18:46,053 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.01 vs. limit=15.0 +2024-07-27 22:18:53,493 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=12.77 vs. limit=15.0 +2024-07-27 22:19:09,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=70270.66666666667, ans=0.125 +2024-07-27 22:19:13,501 INFO [train.py:1114] (3/4) Epoch 6, batch 1600, loss[loss=0.244, simple_loss=0.3257, pruned_loss=0.08112, over 4870.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3164, pruned_loss=0.0792, over 938334.07 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:19:17,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=70284.0, ans=0.1 +2024-07-27 22:19:18,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=70284.0, ans=0.0 +2024-07-27 22:19:31,156 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.952e+01 6.615e+01 7.870e+01 9.186e+01 1.944e+02, threshold=1.574e+02, percent-clipped=2.0 +2024-07-27 22:19:33,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=70310.66666666667, ans=0.125 +2024-07-27 22:19:39,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=70324.0, ans=0.125 +2024-07-27 22:19:49,373 INFO [train.py:1114] (3/4) Epoch 6, batch 1650, loss[loss=0.2475, simple_loss=0.3402, pruned_loss=0.07745, over 4660.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3164, pruned_loss=0.0791, over 938664.22 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:20:05,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=70377.33333333333, ans=0.1 +2024-07-27 22:20:11,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=70390.66666666667, ans=0.0 +2024-07-27 22:20:16,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=70390.66666666667, ans=0.0 +2024-07-27 22:20:20,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=70404.0, ans=0.025 +2024-07-27 22:20:22,329 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.24 vs. limit=22.5 +2024-07-27 22:20:22,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=70404.0, ans=0.125 +2024-07-27 22:20:23,498 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.33 vs. limit=15.0 +2024-07-27 22:20:24,475 INFO [train.py:1114] (3/4) Epoch 6, batch 1700, loss[loss=0.195, simple_loss=0.2785, pruned_loss=0.05573, over 4695.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3156, pruned_loss=0.07802, over 940037.56 frames. ], batch size: 11, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:20:24,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=70417.33333333333, ans=0.125 +2024-07-27 22:20:26,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=70417.33333333333, ans=0.2 +2024-07-27 22:20:28,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=70417.33333333333, ans=0.0 +2024-07-27 22:20:28,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=70417.33333333333, ans=0.1 +2024-07-27 22:20:30,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=70430.66666666667, ans=0.025 +2024-07-27 22:20:36,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=70430.66666666667, ans=0.125 +2024-07-27 22:20:39,546 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.065e+01 6.237e+01 7.615e+01 9.161e+01 1.409e+02, threshold=1.523e+02, percent-clipped=0.0 +2024-07-27 22:20:41,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=70444.0, ans=0.0 +2024-07-27 22:20:58,125 INFO [train.py:1114] (3/4) Epoch 6, batch 1750, loss[loss=0.2201, simple_loss=0.2869, pruned_loss=0.0767, over 4819.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.315, pruned_loss=0.0781, over 940773.90 frames. ], batch size: 11, lr: 1.28e-02, grad_scale: 64.0 +2024-07-27 22:21:17,419 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.37 vs. limit=12.0 +2024-07-27 22:21:38,396 INFO [train.py:1114] (3/4) Epoch 6, batch 1800, loss[loss=0.2267, simple_loss=0.3176, pruned_loss=0.06789, over 4637.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3159, pruned_loss=0.0787, over 941351.99 frames. ], batch size: 13, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:21:51,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=70564.0, ans=0.0 +2024-07-27 22:21:52,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=70577.33333333333, ans=0.125 +2024-07-27 22:21:54,692 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+01 6.220e+01 7.110e+01 8.756e+01 1.676e+02, threshold=1.422e+02, percent-clipped=1.0 +2024-07-27 22:21:58,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=70590.66666666667, ans=0.0 +2024-07-27 22:22:05,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=70604.0, ans=15.0 +2024-07-27 22:22:09,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=70604.0, ans=10.0 +2024-07-27 22:22:12,105 INFO [train.py:1114] (3/4) Epoch 6, batch 1850, loss[loss=0.3057, simple_loss=0.3724, pruned_loss=0.1195, over 4809.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.3151, pruned_loss=0.07869, over 941070.43 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:22:13,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=70617.33333333333, ans=0.0 +2024-07-27 22:22:17,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=70617.33333333333, ans=0.125 +2024-07-27 22:22:18,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=70630.66666666667, ans=0.1 +2024-07-27 22:22:19,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=70630.66666666667, ans=0.125 +2024-07-27 22:22:27,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=70644.0, ans=0.125 +2024-07-27 22:22:32,777 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.94 vs. limit=12.0 +2024-07-27 22:22:40,848 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.31 vs. limit=15.0 +2024-07-27 22:22:45,974 INFO [train.py:1114] (3/4) Epoch 6, batch 1900, loss[loss=0.2664, simple_loss=0.3485, pruned_loss=0.09216, over 4661.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.316, pruned_loss=0.07919, over 942165.92 frames. ], batch size: 14, lr: 1.28e-02, grad_scale: 32.0 +2024-07-27 22:22:52,090 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:22:58,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=70710.66666666667, ans=0.125 +2024-07-27 22:23:01,911 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 6.167e+01 7.357e+01 8.960e+01 1.368e+02, threshold=1.471e+02, percent-clipped=0.0 +2024-07-27 22:23:03,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=70710.66666666667, ans=0.125 +2024-07-27 22:23:04,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=70710.66666666667, ans=0.2 +2024-07-27 22:23:23,066 INFO [train.py:1114] (3/4) Epoch 6, batch 1950, loss[loss=0.2823, simple_loss=0.3548, pruned_loss=0.1049, over 4900.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3165, pruned_loss=0.07902, over 944018.60 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:23:29,001 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.93 vs. limit=10.0 +2024-07-27 22:23:33,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=70764.0, ans=0.125 +2024-07-27 22:23:41,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=70777.33333333333, ans=0.0 +2024-07-27 22:23:42,806 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.88 vs. limit=15.0 +2024-07-27 22:23:49,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=70804.0, ans=0.1 +2024-07-27 22:23:53,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=70804.0, ans=0.125 +2024-07-27 22:23:56,811 INFO [train.py:1114] (3/4) Epoch 6, batch 2000, loss[loss=0.1994, simple_loss=0.2711, pruned_loss=0.06386, over 4803.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3173, pruned_loss=0.0793, over 941823.62 frames. ], batch size: 11, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:24:03,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_na.min_abs, batch_count=70830.66666666667, ans=0.02 +2024-07-27 22:24:15,302 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 6.102e+01 6.803e+01 8.517e+01 1.833e+02, threshold=1.361e+02, percent-clipped=3.0 +2024-07-27 22:24:19,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=70857.33333333333, ans=0.2 +2024-07-27 22:24:30,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=70870.66666666667, ans=0.125 +2024-07-27 22:24:32,966 INFO [train.py:1114] (3/4) Epoch 6, batch 2050, loss[loss=0.2343, simple_loss=0.2986, pruned_loss=0.085, over 4600.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3148, pruned_loss=0.07837, over 939796.50 frames. ], batch size: 11, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:24:37,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=70884.0, ans=0.0 +2024-07-27 22:24:42,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=70897.33333333333, ans=0.125 +2024-07-27 22:24:43,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=70897.33333333333, ans=0.125 +2024-07-27 22:24:49,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=70910.66666666667, ans=0.125 +2024-07-27 22:24:53,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=70924.0, ans=0.125 +2024-07-27 22:24:53,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=70924.0, ans=0.0 +2024-07-27 22:25:02,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=70937.33333333333, ans=0.125 +2024-07-27 22:25:02,905 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.06 vs. limit=15.0 +2024-07-27 22:25:05,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=70937.33333333333, ans=0.1 +2024-07-27 22:25:07,024 INFO [train.py:1114] (3/4) Epoch 6, batch 2100, loss[loss=0.1965, simple_loss=0.2876, pruned_loss=0.0527, over 4755.00 frames. ], tot_loss[loss=0.236, simple_loss=0.315, pruned_loss=0.07852, over 941439.82 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:25:11,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=70950.66666666667, ans=0.125 +2024-07-27 22:25:14,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=70964.0, ans=0.125 +2024-07-27 22:25:23,160 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.747e+01 6.024e+01 6.945e+01 8.681e+01 1.626e+02, threshold=1.389e+02, percent-clipped=3.0 +2024-07-27 22:25:29,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=70990.66666666667, ans=0.2 +2024-07-27 22:25:39,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=71004.0, ans=0.125 +2024-07-27 22:25:39,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=71017.33333333333, ans=0.0 +2024-07-27 22:25:40,256 INFO [train.py:1114] (3/4) Epoch 6, batch 2150, loss[loss=0.2147, simple_loss=0.3081, pruned_loss=0.06072, over 4889.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.313, pruned_loss=0.07706, over 944903.64 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:25:46,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=71017.33333333333, ans=0.125 +2024-07-27 22:25:52,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=71030.66666666667, ans=0.2 +2024-07-27 22:25:56,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=71044.0, ans=0.05 +2024-07-27 22:25:56,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=71044.0, ans=0.2 +2024-07-27 22:25:56,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff3.min_abs, batch_count=71044.0, ans=0.2 +2024-07-27 22:26:12,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=71070.66666666667, ans=0.125 +2024-07-27 22:26:14,878 INFO [train.py:1114] (3/4) Epoch 6, batch 2200, loss[loss=0.2074, simple_loss=0.3001, pruned_loss=0.05732, over 4807.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3133, pruned_loss=0.07685, over 943875.92 frames. ], batch size: 14, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:26:15,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=71084.0, ans=0.07 +2024-07-27 22:26:16,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=71084.0, ans=0.125 +2024-07-27 22:26:30,686 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.31 vs. limit=15.0 +2024-07-27 22:26:30,845 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.032e+01 5.977e+01 6.533e+01 7.474e+01 1.096e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-27 22:26:31,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71110.66666666667, ans=0.1 +2024-07-27 22:26:38,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=71124.0, ans=0.1 +2024-07-27 22:26:38,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=71124.0, ans=0.0 +2024-07-27 22:26:47,959 INFO [train.py:1114] (3/4) Epoch 6, batch 2250, loss[loss=0.2271, simple_loss=0.3126, pruned_loss=0.07078, over 4695.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3139, pruned_loss=0.07779, over 941828.82 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:27:20,237 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.34 vs. limit=22.5 +2024-07-27 22:27:24,051 INFO [train.py:1114] (3/4) Epoch 6, batch 2300, loss[loss=0.2404, simple_loss=0.3114, pruned_loss=0.08469, over 4932.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3121, pruned_loss=0.07694, over 939411.87 frames. ], batch size: 12, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:27:24,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=71217.33333333333, ans=0.125 +2024-07-27 22:27:27,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=71217.33333333333, ans=0.0 +2024-07-27 22:27:28,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=71217.33333333333, ans=0.0 +2024-07-27 22:27:39,867 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.011e+01 6.045e+01 6.668e+01 7.674e+01 1.080e+02, threshold=1.334e+02, percent-clipped=0.0 +2024-07-27 22:27:40,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=71244.0, ans=0.0 +2024-07-27 22:27:57,313 INFO [train.py:1114] (3/4) Epoch 6, batch 2350, loss[loss=0.214, simple_loss=0.2984, pruned_loss=0.06481, over 4637.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3122, pruned_loss=0.07709, over 941325.55 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:28:04,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=71297.33333333333, ans=0.125 +2024-07-27 22:28:06,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=71297.33333333333, ans=0.0 +2024-07-27 22:28:16,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=71324.0, ans=0.125 +2024-07-27 22:28:28,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=71337.33333333333, ans=0.025 +2024-07-27 22:28:29,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=71337.33333333333, ans=0.125 +2024-07-27 22:28:29,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71337.33333333333, ans=0.1 +2024-07-27 22:28:30,519 INFO [train.py:1114] (3/4) Epoch 6, batch 2400, loss[loss=0.2123, simple_loss=0.2745, pruned_loss=0.07509, over 4639.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3127, pruned_loss=0.07767, over 940992.44 frames. ], batch size: 12, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:28:32,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.06 vs. limit=6.0 +2024-07-27 22:28:37,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71364.0, ans=0.1 +2024-07-27 22:28:38,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=71364.0, ans=0.05 +2024-07-27 22:28:48,417 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.400e+01 6.375e+01 7.289e+01 8.298e+01 1.037e+02, threshold=1.458e+02, percent-clipped=0.0 +2024-07-27 22:28:48,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=71377.33333333333, ans=0.125 +2024-07-27 22:29:00,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=71404.0, ans=0.125 +2024-07-27 22:29:03,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=71404.0, ans=0.125 +2024-07-27 22:29:05,458 INFO [train.py:1114] (3/4) Epoch 6, batch 2450, loss[loss=0.2203, simple_loss=0.3122, pruned_loss=0.06426, over 4696.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3142, pruned_loss=0.07868, over 936936.83 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:29:09,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=71417.33333333333, ans=0.1 +2024-07-27 22:29:13,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=71430.66666666667, ans=0.125 +2024-07-27 22:29:13,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=71430.66666666667, ans=0.0 +2024-07-27 22:29:23,197 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.71 vs. limit=15.0 +2024-07-27 22:29:23,289 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.57 vs. limit=15.0 +2024-07-27 22:29:35,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=71470.66666666667, ans=0.1 +2024-07-27 22:29:37,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=71470.66666666667, ans=0.05 +2024-07-27 22:29:40,989 INFO [train.py:1114] (3/4) Epoch 6, batch 2500, loss[loss=0.2544, simple_loss=0.3359, pruned_loss=0.08648, over 4814.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3152, pruned_loss=0.07907, over 939039.00 frames. ], batch size: 14, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:29:46,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=71484.0, ans=0.0 +2024-07-27 22:29:49,873 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-27 22:29:50,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=71497.33333333333, ans=0.125 +2024-07-27 22:29:56,875 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.024e+01 6.200e+01 6.677e+01 7.747e+01 1.498e+02, threshold=1.335e+02, percent-clipped=1.0 +2024-07-27 22:30:05,765 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.229e-01 +2024-07-27 22:30:07,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=71537.33333333333, ans=0.125 +2024-07-27 22:30:14,582 INFO [train.py:1114] (3/4) Epoch 6, batch 2550, loss[loss=0.1966, simple_loss=0.2751, pruned_loss=0.05908, over 4784.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3143, pruned_loss=0.07862, over 938594.18 frames. ], batch size: 11, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:30:22,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=71564.0, ans=0.1 +2024-07-27 22:30:27,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=71577.33333333333, ans=0.035 +2024-07-27 22:30:28,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=71577.33333333333, ans=0.0 +2024-07-27 22:30:47,949 INFO [train.py:1114] (3/4) Epoch 6, batch 2600, loss[loss=0.2202, simple_loss=0.2968, pruned_loss=0.07186, over 4892.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3151, pruned_loss=0.07878, over 937511.29 frames. ], batch size: 13, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:30:52,328 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.56 vs. limit=15.0 +2024-07-27 22:30:52,354 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.18 vs. limit=15.0 +2024-07-27 22:30:55,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=71630.66666666667, ans=0.0 +2024-07-27 22:31:01,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=71644.0, ans=0.0 +2024-07-27 22:31:05,412 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.818e+01 6.119e+01 7.086e+01 8.200e+01 1.372e+02, threshold=1.417e+02, percent-clipped=1.0 +2024-07-27 22:31:08,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=71657.33333333333, ans=0.125 +2024-07-27 22:31:11,227 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.88 vs. limit=10.0 +2024-07-27 22:31:13,963 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.30 vs. limit=15.0 +2024-07-27 22:31:22,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=71684.0, ans=0.0 +2024-07-27 22:31:22,649 INFO [train.py:1114] (3/4) Epoch 6, batch 2650, loss[loss=0.2623, simple_loss=0.3521, pruned_loss=0.08619, over 4642.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3153, pruned_loss=0.07916, over 939484.07 frames. ], batch size: 16, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:31:31,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=71697.33333333333, ans=0.125 +2024-07-27 22:31:48,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=71724.0, ans=0.125 +2024-07-27 22:31:54,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=71737.33333333333, ans=0.0 +2024-07-27 22:31:56,225 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.01 vs. limit=10.0 +2024-07-27 22:31:56,599 INFO [train.py:1114] (3/4) Epoch 6, batch 2700, loss[loss=0.2792, simple_loss=0.3565, pruned_loss=0.101, over 4740.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3158, pruned_loss=0.07911, over 939476.31 frames. ], batch size: 14, lr: 1.27e-02, grad_scale: 32.0 +2024-07-27 22:32:01,384 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:32:12,948 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.944e+01 6.253e+01 7.142e+01 8.501e+01 1.377e+02, threshold=1.428e+02, percent-clipped=0.0 +2024-07-27 22:32:17,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=71790.66666666667, ans=0.125 +2024-07-27 22:32:23,827 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.20 vs. limit=15.0 +2024-07-27 22:32:27,081 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.56 vs. limit=15.0 +2024-07-27 22:32:31,805 INFO [train.py:1114] (3/4) Epoch 6, batch 2750, loss[loss=0.1884, simple_loss=0.2771, pruned_loss=0.04985, over 4701.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.315, pruned_loss=0.07871, over 939580.80 frames. ], batch size: 12, lr: 1.27e-02, grad_scale: 16.0 +2024-07-27 22:32:49,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=71844.0, ans=0.125 +2024-07-27 22:33:04,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=71884.0, ans=0.125 +2024-07-27 22:33:05,088 INFO [train.py:1114] (3/4) Epoch 6, batch 2800, loss[loss=0.2956, simple_loss=0.3579, pruned_loss=0.1167, over 3300.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3152, pruned_loss=0.07865, over 937382.18 frames. ], batch size: 35, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:33:05,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=71884.0, ans=0.0 +2024-07-27 22:33:17,792 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.75 vs. limit=5.0 +2024-07-27 22:33:19,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.whiten.whitening_limit, batch_count=71910.66666666667, ans=12.0 +2024-07-27 22:33:19,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=71910.66666666667, ans=0.2 +2024-07-27 22:33:21,419 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.67 vs. limit=22.5 +2024-07-27 22:33:22,058 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.18 vs. limit=15.0 +2024-07-27 22:33:22,312 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.117e+01 6.346e+01 7.274e+01 8.194e+01 1.245e+02, threshold=1.455e+02, percent-clipped=0.0 +2024-07-27 22:33:24,720 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.46 vs. limit=22.5 +2024-07-27 22:33:25,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=71924.0, ans=0.0 +2024-07-27 22:33:25,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=71924.0, ans=0.125 +2024-07-27 22:33:28,512 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:33:36,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=71937.33333333333, ans=0.125 +2024-07-27 22:33:38,923 INFO [train.py:1114] (3/4) Epoch 6, batch 2850, loss[loss=0.2084, simple_loss=0.2825, pruned_loss=0.06718, over 4966.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.3169, pruned_loss=0.08006, over 935867.95 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:33:44,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=71950.66666666667, ans=0.0 +2024-07-27 22:33:58,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=71990.66666666667, ans=0.0 +2024-07-27 22:34:04,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=71990.66666666667, ans=0.2 +2024-07-27 22:34:07,268 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.01 vs. limit=6.0 +2024-07-27 22:34:11,920 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.07 vs. limit=15.0 +2024-07-27 22:34:12,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=72004.0, ans=0.125 +2024-07-27 22:34:14,081 INFO [train.py:1114] (3/4) Epoch 6, batch 2900, loss[loss=0.2176, simple_loss=0.3023, pruned_loss=0.06649, over 4823.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3166, pruned_loss=0.07897, over 939794.76 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:34:16,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=72017.33333333333, ans=0.05 +2024-07-27 22:34:30,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=72044.0, ans=0.0 +2024-07-27 22:34:31,186 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.576e+01 5.900e+01 6.392e+01 7.089e+01 1.311e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-27 22:34:40,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=72070.66666666667, ans=0.125 +2024-07-27 22:34:41,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=72070.66666666667, ans=0.0 +2024-07-27 22:34:41,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=72070.66666666667, ans=0.125 +2024-07-27 22:34:44,341 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.63 vs. limit=15.0 +2024-07-27 22:34:45,596 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.04 vs. limit=15.0 +2024-07-27 22:34:47,739 INFO [train.py:1114] (3/4) Epoch 6, batch 2950, loss[loss=0.2056, simple_loss=0.2894, pruned_loss=0.0609, over 4705.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.3154, pruned_loss=0.07846, over 938915.27 frames. ], batch size: 12, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:35:23,024 INFO [train.py:1114] (3/4) Epoch 6, batch 3000, loss[loss=0.2003, simple_loss=0.2855, pruned_loss=0.05752, over 4769.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3137, pruned_loss=0.07682, over 938533.33 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:35:23,025 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 22:35:30,578 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.5169, 2.5948, 4.7591, 3.4444], device='cuda:3') +2024-07-27 22:35:35,849 INFO [train.py:1146] (3/4) Epoch 6, validation: loss=0.194, simple_loss=0.2973, pruned_loss=0.04533, over 944034.00 frames. +2024-07-27 22:35:35,850 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 22:35:38,391 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.23 vs. limit=22.5 +2024-07-27 22:35:40,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=72150.66666666667, ans=0.5 +2024-07-27 22:35:47,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72164.0, ans=0.1 +2024-07-27 22:35:53,038 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.659e+01 5.896e+01 6.493e+01 7.360e+01 1.026e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-27 22:36:02,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=72190.66666666667, ans=0.0 +2024-07-27 22:36:04,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=72204.0, ans=0.125 +2024-07-27 22:36:13,119 INFO [train.py:1114] (3/4) Epoch 6, batch 3050, loss[loss=0.2144, simple_loss=0.3017, pruned_loss=0.06359, over 4647.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.3164, pruned_loss=0.07845, over 937134.38 frames. ], batch size: 12, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:36:13,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72217.33333333333, ans=0.1 +2024-07-27 22:36:16,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=72217.33333333333, ans=0.125 +2024-07-27 22:36:22,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=72230.66666666667, ans=0.125 +2024-07-27 22:36:41,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=72270.66666666667, ans=0.035 +2024-07-27 22:36:46,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=72284.0, ans=0.125 +2024-07-27 22:36:46,895 INFO [train.py:1114] (3/4) Epoch 6, batch 3100, loss[loss=0.2572, simple_loss=0.3324, pruned_loss=0.09097, over 4647.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3155, pruned_loss=0.07803, over 937949.91 frames. ], batch size: 16, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:36:50,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=72284.0, ans=0.1 +2024-07-27 22:36:54,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=72297.33333333333, ans=0.2 +2024-07-27 22:36:55,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=72297.33333333333, ans=0.0 +2024-07-27 22:36:57,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=72297.33333333333, ans=0.125 +2024-07-27 22:37:00,261 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.90 vs. limit=12.0 +2024-07-27 22:37:03,365 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.197e+01 6.089e+01 6.786e+01 8.344e+01 1.227e+02, threshold=1.357e+02, percent-clipped=0.0 +2024-07-27 22:37:06,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=72324.0, ans=10.0 +2024-07-27 22:37:19,944 INFO [train.py:1114] (3/4) Epoch 6, batch 3150, loss[loss=0.2147, simple_loss=0.294, pruned_loss=0.06765, over 4610.00 frames. ], tot_loss[loss=0.234, simple_loss=0.314, pruned_loss=0.07697, over 938037.05 frames. ], batch size: 17, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:37:20,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=72350.66666666667, ans=0.125 +2024-07-27 22:37:26,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=72364.0, ans=0.125 +2024-07-27 22:37:30,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=72364.0, ans=0.125 +2024-07-27 22:37:32,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72377.33333333333, ans=0.1 +2024-07-27 22:37:40,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=72377.33333333333, ans=0.0 +2024-07-27 22:37:55,007 INFO [train.py:1114] (3/4) Epoch 6, batch 3200, loss[loss=0.2508, simple_loss=0.3306, pruned_loss=0.08549, over 4835.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3135, pruned_loss=0.07652, over 939612.94 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:37:57,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=72417.33333333333, ans=0.125 +2024-07-27 22:37:59,455 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.79 vs. limit=8.0 +2024-07-27 22:38:04,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=72430.66666666667, ans=0.125 +2024-07-27 22:38:11,654 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.049e+01 5.899e+01 6.448e+01 7.393e+01 1.095e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-27 22:38:12,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=72444.0, ans=0.0 +2024-07-27 22:38:14,607 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.17 vs. limit=22.5 +2024-07-27 22:38:17,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=72457.33333333333, ans=0.125 +2024-07-27 22:38:28,285 INFO [train.py:1114] (3/4) Epoch 6, batch 3250, loss[loss=0.2356, simple_loss=0.3228, pruned_loss=0.07417, over 4927.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.314, pruned_loss=0.07644, over 940248.67 frames. ], batch size: 14, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:38:28,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=72484.0, ans=0.125 +2024-07-27 22:38:37,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=72497.33333333333, ans=0.125 +2024-07-27 22:38:46,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=72510.66666666667, ans=0.125 +2024-07-27 22:38:49,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=72524.0, ans=0.125 +2024-07-27 22:39:00,182 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.65 vs. limit=22.5 +2024-07-27 22:39:01,871 INFO [train.py:1114] (3/4) Epoch 6, batch 3300, loss[loss=0.241, simple_loss=0.3128, pruned_loss=0.08466, over 4687.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3123, pruned_loss=0.07604, over 940758.81 frames. ], batch size: 19, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:39:11,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=72564.0, ans=0.2 +2024-07-27 22:39:15,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=72564.0, ans=0.0 +2024-07-27 22:39:20,235 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.031e+01 6.037e+01 6.381e+01 7.466e+01 1.307e+02, threshold=1.276e+02, percent-clipped=1.0 +2024-07-27 22:39:23,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=72590.66666666667, ans=0.0 +2024-07-27 22:39:36,879 INFO [train.py:1114] (3/4) Epoch 6, batch 3350, loss[loss=0.2881, simple_loss=0.3612, pruned_loss=0.1075, over 4615.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3134, pruned_loss=0.07683, over 938565.35 frames. ], batch size: 17, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:39:52,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=72644.0, ans=0.95 +2024-07-27 22:39:54,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=72644.0, ans=0.125 +2024-07-27 22:40:00,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=72657.33333333333, ans=0.0 +2024-07-27 22:40:02,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=72670.66666666667, ans=0.125 +2024-07-27 22:40:07,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=72670.66666666667, ans=0.04949747468305833 +2024-07-27 22:40:11,977 INFO [train.py:1114] (3/4) Epoch 6, batch 3400, loss[loss=0.197, simple_loss=0.2663, pruned_loss=0.06382, over 4802.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3129, pruned_loss=0.07736, over 937408.23 frames. ], batch size: 11, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:40:14,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=72684.0, ans=0.125 +2024-07-27 22:40:15,478 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.03 vs. limit=15.0 +2024-07-27 22:40:16,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=72684.0, ans=0.0 +2024-07-27 22:40:18,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=72697.33333333333, ans=0.07 +2024-07-27 22:40:22,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=72697.33333333333, ans=0.125 +2024-07-27 22:40:26,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=72710.66666666667, ans=0.09899494936611666 +2024-07-27 22:40:28,633 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.932e+01 5.919e+01 6.608e+01 7.688e+01 1.157e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-27 22:40:32,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=72724.0, ans=0.125 +2024-07-27 22:40:40,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=72737.33333333333, ans=0.0 +2024-07-27 22:40:45,207 INFO [train.py:1114] (3/4) Epoch 6, batch 3450, loss[loss=0.2358, simple_loss=0.3178, pruned_loss=0.0769, over 4727.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.313, pruned_loss=0.07667, over 937675.24 frames. ], batch size: 19, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:40:45,553 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.70 vs. limit=15.0 +2024-07-27 22:40:49,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=72750.66666666667, ans=0.1 +2024-07-27 22:40:54,923 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.40 vs. limit=15.0 +2024-07-27 22:40:56,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72764.0, ans=0.1 +2024-07-27 22:41:05,708 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.48 vs. limit=15.0 +2024-07-27 22:41:14,984 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=5.556e-02 +2024-07-27 22:41:16,422 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.84 vs. limit=12.0 +2024-07-27 22:41:18,863 INFO [train.py:1114] (3/4) Epoch 6, batch 3500, loss[loss=0.1958, simple_loss=0.2892, pruned_loss=0.05118, over 4930.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3126, pruned_loss=0.07657, over 938452.22 frames. ], batch size: 12, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:41:24,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=72830.66666666667, ans=0.125 +2024-07-27 22:41:36,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=72844.0, ans=0.125 +2024-07-27 22:41:37,345 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.111e+01 6.112e+01 6.695e+01 8.214e+01 1.239e+02, threshold=1.339e+02, percent-clipped=0.0 +2024-07-27 22:41:37,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=72844.0, ans=0.125 +2024-07-27 22:41:44,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=72857.33333333333, ans=0.125 +2024-07-27 22:41:46,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=72870.66666666667, ans=0.125 +2024-07-27 22:41:49,760 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.97 vs. limit=22.5 +2024-07-27 22:41:53,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=72884.0, ans=0.0 +2024-07-27 22:41:58,734 INFO [train.py:1114] (3/4) Epoch 6, batch 3550, loss[loss=0.2479, simple_loss=0.3334, pruned_loss=0.08123, over 4663.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3117, pruned_loss=0.07639, over 939118.11 frames. ], batch size: 14, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:42:02,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=72884.0, ans=0.125 +2024-07-27 22:42:05,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=72897.33333333333, ans=0.1 +2024-07-27 22:42:11,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=72910.66666666667, ans=0.0 +2024-07-27 22:42:15,695 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.95 vs. limit=22.5 +2024-07-27 22:42:19,001 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.44 vs. limit=15.0 +2024-07-27 22:42:24,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=72937.33333333333, ans=0.1 +2024-07-27 22:42:26,169 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:42:31,736 INFO [train.py:1114] (3/4) Epoch 6, batch 3600, loss[loss=0.219, simple_loss=0.2976, pruned_loss=0.07017, over 4968.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3117, pruned_loss=0.07648, over 940432.16 frames. ], batch size: 13, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:42:48,879 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.822e+01 6.148e+01 6.891e+01 7.768e+01 1.144e+02, threshold=1.378e+02, percent-clipped=0.0 +2024-07-27 22:42:53,837 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.60 vs. limit=15.0 +2024-07-27 22:43:06,988 INFO [train.py:1114] (3/4) Epoch 6, batch 3650, loss[loss=0.2669, simple_loss=0.3396, pruned_loss=0.09709, over 4899.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3101, pruned_loss=0.07566, over 940791.08 frames. ], batch size: 15, lr: 1.26e-02, grad_scale: 32.0 +2024-07-27 22:43:07,767 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:43:21,229 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.33 vs. limit=12.0 +2024-07-27 22:43:24,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=73044.0, ans=0.125 +2024-07-27 22:43:40,165 INFO [train.py:1114] (3/4) Epoch 6, batch 3700, loss[loss=0.2215, simple_loss=0.3111, pruned_loss=0.06597, over 4936.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3096, pruned_loss=0.07493, over 941802.13 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:43:47,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73097.33333333333, ans=0.1 +2024-07-27 22:43:55,577 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.57 vs. limit=10.0 +2024-07-27 22:43:56,980 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.456e+01 6.084e+01 6.656e+01 7.917e+01 1.226e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-27 22:44:03,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=73124.0, ans=0.0 +2024-07-27 22:44:06,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.63 vs. limit=22.5 +2024-07-27 22:44:08,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=73137.33333333333, ans=0.125 +2024-07-27 22:44:11,708 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.56 vs. limit=22.5 +2024-07-27 22:44:12,686 INFO [train.py:1114] (3/4) Epoch 6, batch 3750, loss[loss=0.1844, simple_loss=0.2628, pruned_loss=0.05298, over 4791.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3091, pruned_loss=0.07459, over 943421.54 frames. ], batch size: 11, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:44:13,868 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.50 vs. limit=22.5 +2024-07-27 22:44:14,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=73150.66666666667, ans=0.125 +2024-07-27 22:44:34,905 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.45 vs. limit=12.0 +2024-07-27 22:44:35,710 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=14.73 vs. limit=15.0 +2024-07-27 22:44:39,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=73204.0, ans=0.035 +2024-07-27 22:44:42,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=73204.0, ans=0.1 +2024-07-27 22:44:44,144 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.91 vs. limit=10.0 +2024-07-27 22:44:45,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.40 vs. limit=22.5 +2024-07-27 22:44:47,691 INFO [train.py:1114] (3/4) Epoch 6, batch 3800, loss[loss=0.2447, simple_loss=0.3337, pruned_loss=0.07788, over 4820.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3098, pruned_loss=0.07585, over 942184.52 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:44:54,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=73230.66666666667, ans=0.125 +2024-07-27 22:44:57,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=73230.66666666667, ans=0.0 +2024-07-27 22:44:58,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=73230.66666666667, ans=0.0 +2024-07-27 22:44:59,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=73230.66666666667, ans=0.1 +2024-07-27 22:45:00,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=73244.0, ans=0.125 +2024-07-27 22:45:00,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=73244.0, ans=0.5 +2024-07-27 22:45:02,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=73244.0, ans=0.2 +2024-07-27 22:45:04,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=73244.0, ans=0.125 +2024-07-27 22:45:04,710 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.923e+01 6.031e+01 6.654e+01 7.619e+01 1.236e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-27 22:45:10,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=73257.33333333333, ans=0.025 +2024-07-27 22:45:14,683 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.05 vs. limit=10.0 +2024-07-27 22:45:20,975 INFO [train.py:1114] (3/4) Epoch 6, batch 3850, loss[loss=0.2007, simple_loss=0.2968, pruned_loss=0.05228, over 4600.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.3105, pruned_loss=0.07618, over 942497.06 frames. ], batch size: 16, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:45:46,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=73324.0, ans=0.1 +2024-07-27 22:45:47,382 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.36 vs. limit=15.0 +2024-07-27 22:45:48,862 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.72 vs. limit=22.5 +2024-07-27 22:45:52,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=73337.33333333333, ans=0.09899494936611666 +2024-07-27 22:45:52,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.50 vs. limit=15.0 +2024-07-27 22:45:56,553 INFO [train.py:1114] (3/4) Epoch 6, batch 3900, loss[loss=0.2235, simple_loss=0.3201, pruned_loss=0.06339, over 4817.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3101, pruned_loss=0.07539, over 942632.73 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:46:03,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=73364.0, ans=0.025 +2024-07-27 22:46:13,362 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.979e+01 6.121e+01 6.587e+01 7.635e+01 1.146e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-27 22:46:26,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=73404.0, ans=0.04949747468305833 +2024-07-27 22:46:26,813 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.79 vs. limit=6.0 +2024-07-27 22:46:29,896 INFO [train.py:1114] (3/4) Epoch 6, batch 3950, loss[loss=0.2698, simple_loss=0.3482, pruned_loss=0.09569, over 4842.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3109, pruned_loss=0.07571, over 944510.27 frames. ], batch size: 16, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:46:37,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=73430.66666666667, ans=0.2 +2024-07-27 22:46:41,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=73430.66666666667, ans=0.2 +2024-07-27 22:46:41,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=73430.66666666667, ans=0.0 +2024-07-27 22:46:45,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=73444.0, ans=0.1 +2024-07-27 22:46:47,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=73444.0, ans=0.0 +2024-07-27 22:46:47,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=73444.0, ans=0.125 +2024-07-27 22:46:51,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.40 vs. limit=12.0 +2024-07-27 22:46:57,024 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-07-27 22:46:58,959 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.20 vs. limit=22.5 +2024-07-27 22:47:02,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=73470.66666666667, ans=0.125 +2024-07-27 22:47:05,387 INFO [train.py:1114] (3/4) Epoch 6, batch 4000, loss[loss=0.1953, simple_loss=0.2877, pruned_loss=0.05142, over 4771.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3115, pruned_loss=0.07628, over 941303.22 frames. ], batch size: 12, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:47:22,994 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.882e+01 6.236e+01 6.803e+01 7.982e+01 1.360e+02, threshold=1.361e+02, percent-clipped=1.0 +2024-07-27 22:47:24,124 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.10 vs. limit=15.0 +2024-07-27 22:47:25,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=73524.0, ans=0.0 +2024-07-27 22:47:35,131 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-07-27 22:47:37,496 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.01 vs. limit=6.0 +2024-07-27 22:47:39,591 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.51 vs. limit=15.0 +2024-07-27 22:47:39,834 INFO [train.py:1114] (3/4) Epoch 6, batch 4050, loss[loss=0.3264, simple_loss=0.3744, pruned_loss=0.1392, over 3428.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3114, pruned_loss=0.07606, over 939596.54 frames. ], batch size: 36, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:47:43,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=73550.66666666667, ans=0.125 +2024-07-27 22:47:49,194 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.23 vs. limit=22.5 +2024-07-27 22:47:51,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=73564.0, ans=0.025 +2024-07-27 22:47:55,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=73577.33333333333, ans=0.125 +2024-07-27 22:47:59,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=73590.66666666667, ans=0.2 +2024-07-27 22:48:07,953 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.27 vs. limit=15.0 +2024-07-27 22:48:09,855 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.66 vs. limit=6.0 +2024-07-27 22:48:14,295 INFO [train.py:1114] (3/4) Epoch 6, batch 4100, loss[loss=0.2587, simple_loss=0.3391, pruned_loss=0.08912, over 4895.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.3121, pruned_loss=0.07634, over 939210.68 frames. ], batch size: 15, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:48:18,580 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.55 vs. limit=10.0 +2024-07-27 22:48:23,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=73630.66666666667, ans=0.125 +2024-07-27 22:48:27,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=73644.0, ans=10.0 +2024-07-27 22:48:27,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=25.87 vs. limit=22.5 +2024-07-27 22:48:31,603 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.808e+01 6.159e+01 6.782e+01 8.525e+01 1.477e+02, threshold=1.356e+02, percent-clipped=2.0 +2024-07-27 22:48:38,559 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.54 vs. limit=15.0 +2024-07-27 22:48:49,531 INFO [train.py:1114] (3/4) Epoch 6, batch 4150, loss[loss=0.2426, simple_loss=0.316, pruned_loss=0.08457, over 4843.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3118, pruned_loss=0.07608, over 938468.35 frames. ], batch size: 13, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:48:52,495 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.69 vs. limit=15.0 +2024-07-27 22:49:11,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=73724.0, ans=0.0 +2024-07-27 22:49:12,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=73724.0, ans=0.04949747468305833 +2024-07-27 22:49:17,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=73737.33333333333, ans=0.2 +2024-07-27 22:49:23,811 INFO [train.py:1114] (3/4) Epoch 6, batch 4200, loss[loss=0.2143, simple_loss=0.2965, pruned_loss=0.06604, over 4914.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3116, pruned_loss=0.07606, over 939556.38 frames. ], batch size: 15, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:49:23,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=73750.66666666667, ans=0.025 +2024-07-27 22:49:27,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=73750.66666666667, ans=0.1 +2024-07-27 22:49:40,726 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.699e+01 5.760e+01 6.554e+01 7.096e+01 1.149e+02, threshold=1.311e+02, percent-clipped=0.0 +2024-07-27 22:49:42,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=73790.66666666667, ans=0.0 +2024-07-27 22:49:47,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=73790.66666666667, ans=0.0 +2024-07-27 22:49:57,282 INFO [train.py:1114] (3/4) Epoch 6, batch 4250, loss[loss=0.2133, simple_loss=0.2939, pruned_loss=0.06633, over 4643.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.3116, pruned_loss=0.07604, over 940372.76 frames. ], batch size: 12, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:50:04,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=73830.66666666667, ans=0.125 +2024-07-27 22:50:06,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=73830.66666666667, ans=0.125 +2024-07-27 22:50:16,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=73844.0, ans=0.125 +2024-07-27 22:50:29,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=73870.66666666667, ans=0.0 +2024-07-27 22:50:32,503 INFO [train.py:1114] (3/4) Epoch 6, batch 4300, loss[loss=0.2345, simple_loss=0.3137, pruned_loss=0.07767, over 4762.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3128, pruned_loss=0.07648, over 940041.76 frames. ], batch size: 13, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:50:39,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=73897.33333333333, ans=0.125 +2024-07-27 22:50:49,608 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.043e+01 6.051e+01 7.094e+01 8.613e+01 1.493e+02, threshold=1.419e+02, percent-clipped=5.0 +2024-07-27 22:50:55,275 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.48 vs. limit=10.0 +2024-07-27 22:51:05,143 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.87 vs. limit=22.5 +2024-07-27 22:51:08,577 INFO [train.py:1114] (3/4) Epoch 6, batch 4350, loss[loss=0.2459, simple_loss=0.3187, pruned_loss=0.08652, over 4759.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3134, pruned_loss=0.07657, over 941083.53 frames. ], batch size: 13, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:51:17,742 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.29 vs. limit=22.5 +2024-07-27 22:51:27,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=73977.33333333333, ans=0.025 +2024-07-27 22:51:30,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.63 vs. limit=15.0 +2024-07-27 22:51:32,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=73990.66666666667, ans=0.025 +2024-07-27 22:51:43,388 INFO [train.py:1114] (3/4) Epoch 6, batch 4400, loss[loss=0.2561, simple_loss=0.3418, pruned_loss=0.08527, over 4818.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.3136, pruned_loss=0.07659, over 940846.47 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:51:45,809 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.32 vs. limit=10.0 +2024-07-27 22:51:46,601 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.29 vs. limit=15.0 +2024-07-27 22:51:52,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74030.66666666667, ans=0.1 +2024-07-27 22:51:53,728 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 22:51:54,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=74030.66666666667, ans=0.125 +2024-07-27 22:52:00,868 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.058e+01 5.949e+01 6.629e+01 7.705e+01 1.284e+02, threshold=1.326e+02, percent-clipped=0.0 +2024-07-27 22:52:00,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=74044.0, ans=0.125 +2024-07-27 22:52:02,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=74057.33333333333, ans=0.125 +2024-07-27 22:52:10,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=74070.66666666667, ans=0.0 +2024-07-27 22:52:17,027 INFO [train.py:1114] (3/4) Epoch 6, batch 4450, loss[loss=0.2578, simple_loss=0.314, pruned_loss=0.1008, over 4950.00 frames. ], tot_loss[loss=0.234, simple_loss=0.314, pruned_loss=0.07696, over 939789.02 frames. ], batch size: 12, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:52:18,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=74084.0, ans=0.125 +2024-07-27 22:52:21,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=74084.0, ans=0.2 +2024-07-27 22:52:23,869 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=20.84 vs. limit=15.0 +2024-07-27 22:52:31,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=74110.66666666667, ans=0.2 +2024-07-27 22:52:44,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=74137.33333333333, ans=0.2 +2024-07-27 22:52:50,437 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.70 vs. limit=12.0 +2024-07-27 22:52:51,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=74150.66666666667, ans=0.0 +2024-07-27 22:52:51,883 INFO [train.py:1114] (3/4) Epoch 6, batch 4500, loss[loss=0.1957, simple_loss=0.291, pruned_loss=0.05026, over 4742.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3133, pruned_loss=0.07604, over 939231.71 frames. ], batch size: 14, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:52:54,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=74150.66666666667, ans=0.2 +2024-07-27 22:52:55,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=74150.66666666667, ans=0.125 +2024-07-27 22:52:57,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74164.0, ans=0.1 +2024-07-27 22:53:08,909 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.835e+01 5.811e+01 6.353e+01 6.989e+01 9.336e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-27 22:53:10,702 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.73 vs. limit=15.0 +2024-07-27 22:53:16,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=74190.66666666667, ans=0.0 +2024-07-27 22:53:24,769 INFO [train.py:1114] (3/4) Epoch 6, batch 4550, loss[loss=0.2324, simple_loss=0.3117, pruned_loss=0.07655, over 4901.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3136, pruned_loss=0.07621, over 940819.89 frames. ], batch size: 13, lr: 1.25e-02, grad_scale: 32.0 +2024-07-27 22:53:31,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=74230.66666666667, ans=0.0 +2024-07-27 22:53:43,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=74244.0, ans=0.125 +2024-07-27 22:53:45,867 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.32 vs. limit=12.0 +2024-07-27 22:53:47,899 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=6.70 vs. limit=15.0 +2024-07-27 22:53:48,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=74257.33333333333, ans=0.0 +2024-07-27 22:53:52,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=74270.66666666667, ans=0.125 +2024-07-27 22:53:52,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=74270.66666666667, ans=10.0 +2024-07-27 22:53:58,221 INFO [train.py:1114] (3/4) Epoch 6, batch 4600, loss[loss=0.2238, simple_loss=0.3039, pruned_loss=0.0719, over 4569.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.312, pruned_loss=0.07533, over 939155.95 frames. ], batch size: 21, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:54:07,732 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.94 vs. limit=15.0 +2024-07-27 22:54:18,335 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.798e+01 6.152e+01 6.770e+01 8.392e+01 1.380e+02, threshold=1.354e+02, percent-clipped=1.0 +2024-07-27 22:54:21,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=74324.0, ans=0.1 +2024-07-27 22:54:28,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=74337.33333333333, ans=0.125 +2024-07-27 22:54:31,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=74337.33333333333, ans=0.125 +2024-07-27 22:54:32,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=74337.33333333333, ans=0.0 +2024-07-27 22:54:34,078 INFO [train.py:1114] (3/4) Epoch 6, batch 4650, loss[loss=0.2191, simple_loss=0.3108, pruned_loss=0.0637, over 4859.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3126, pruned_loss=0.07565, over 940798.32 frames. ], batch size: 16, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:54:34,235 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.023e-01 +2024-07-27 22:54:34,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=74350.66666666667, ans=0.125 +2024-07-27 22:54:34,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=74350.66666666667, ans=0.0 +2024-07-27 22:54:39,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=74350.66666666667, ans=0.0 +2024-07-27 22:54:40,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=74364.0, ans=0.0 +2024-07-27 22:54:40,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=74364.0, ans=0.125 +2024-07-27 22:54:41,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=74364.0, ans=0.0 +2024-07-27 22:55:04,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=74377.33333333333, ans=0.2 +2024-07-27 22:55:37,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74404.0, ans=0.1 +2024-07-27 22:55:38,799 INFO [train.py:1114] (3/4) Epoch 6, batch 4700, loss[loss=0.1889, simple_loss=0.2807, pruned_loss=0.04859, over 4719.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3122, pruned_loss=0.07574, over 937860.22 frames. ], batch size: 11, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:55:41,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=74417.33333333333, ans=0.1 +2024-07-27 22:55:42,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=74417.33333333333, ans=0.125 +2024-07-27 22:55:51,056 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.69 vs. limit=15.0 +2024-07-27 22:55:54,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=74444.0, ans=0.125 +2024-07-27 22:55:57,184 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.339e+01 6.070e+01 6.903e+01 7.937e+01 1.102e+02, threshold=1.381e+02, percent-clipped=0.0 +2024-07-27 22:57:01,147 INFO [train.py:1114] (3/4) Epoch 6, batch 4750, loss[loss=0.2356, simple_loss=0.3103, pruned_loss=0.08043, over 4491.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3124, pruned_loss=0.07643, over 935958.44 frames. ], batch size: 21, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:57:04,152 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.34 vs. limit=15.0 +2024-07-27 22:57:06,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=74484.0, ans=0.1 +2024-07-27 22:57:08,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=74497.33333333333, ans=0.2 +2024-07-27 22:57:09,718 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.79 vs. limit=15.0 +2024-07-27 22:57:14,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=74510.66666666667, ans=0.2 +2024-07-27 22:57:16,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=74510.66666666667, ans=15.0 +2024-07-27 22:57:22,508 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=18.49 vs. limit=15.0 +2024-07-27 22:57:23,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=74510.66666666667, ans=0.0 +2024-07-27 22:57:54,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74537.33333333333, ans=0.1 +2024-07-27 22:58:00,100 INFO [train.py:1114] (3/4) Epoch 6, batch 4800, loss[loss=0.2363, simple_loss=0.3164, pruned_loss=0.07809, over 4693.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3123, pruned_loss=0.07695, over 933259.53 frames. ], batch size: 13, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:58:05,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=74550.66666666667, ans=0.0 +2024-07-27 22:58:08,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=74564.0, ans=0.0 +2024-07-27 22:58:11,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=74564.0, ans=0.0 +2024-07-27 22:58:27,446 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.139e+01 5.953e+01 6.774e+01 8.357e+01 1.268e+02, threshold=1.355e+02, percent-clipped=0.0 +2024-07-27 22:58:29,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=74590.66666666667, ans=0.2 +2024-07-27 22:58:32,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=74590.66666666667, ans=0.125 +2024-07-27 22:58:41,939 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.55 vs. limit=15.0 +2024-07-27 22:58:43,664 INFO [train.py:1114] (3/4) Epoch 6, batch 4850, loss[loss=0.217, simple_loss=0.3067, pruned_loss=0.06365, over 4738.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3126, pruned_loss=0.07726, over 932636.92 frames. ], batch size: 14, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:58:44,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=74617.33333333333, ans=0.125 +2024-07-27 22:58:45,418 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.90 vs. limit=12.0 +2024-07-27 22:58:49,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=74630.66666666667, ans=0.125 +2024-07-27 22:58:53,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=74630.66666666667, ans=0.125 +2024-07-27 22:58:55,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=74630.66666666667, ans=0.125 +2024-07-27 22:58:56,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=74644.0, ans=0.1 +2024-07-27 22:58:59,632 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.36 vs. limit=15.0 +2024-07-27 22:59:29,830 INFO [train.py:1114] (3/4) Epoch 6, batch 4900, loss[loss=0.2085, simple_loss=0.2937, pruned_loss=0.06163, over 4772.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3119, pruned_loss=0.0764, over 934540.36 frames. ], batch size: 13, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 22:59:44,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=74710.66666666667, ans=0.2 +2024-07-27 22:59:48,779 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.911e+01 6.095e+01 6.974e+01 8.315e+01 1.441e+02, threshold=1.395e+02, percent-clipped=3.0 +2024-07-27 23:00:07,419 INFO [train.py:1114] (3/4) Epoch 6, batch 4950, loss[loss=0.3199, simple_loss=0.3704, pruned_loss=0.1347, over 3033.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3151, pruned_loss=0.07844, over 931151.10 frames. ], batch size: 35, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:00:14,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=74764.0, ans=0.2 +2024-07-27 23:00:31,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=74777.33333333333, ans=0.05 +2024-07-27 23:00:47,380 INFO [train.py:1114] (3/4) Epoch 6, batch 5000, loss[loss=0.261, simple_loss=0.3472, pruned_loss=0.08739, over 4657.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3155, pruned_loss=0.07822, over 935172.71 frames. ], batch size: 14, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:00:49,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=74817.33333333333, ans=15.0 +2024-07-27 23:01:00,435 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.48 vs. limit=12.0 +2024-07-27 23:01:00,439 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.48 vs. limit=15.0 +2024-07-27 23:01:05,415 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.034e+01 6.345e+01 7.582e+01 9.212e+01 1.315e+02, threshold=1.516e+02, percent-clipped=0.0 +2024-07-27 23:01:11,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=74857.33333333333, ans=0.1 +2024-07-27 23:01:24,716 INFO [train.py:1114] (3/4) Epoch 6, batch 5050, loss[loss=0.1775, simple_loss=0.2525, pruned_loss=0.05127, over 4858.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3141, pruned_loss=0.07691, over 937786.18 frames. ], batch size: 12, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:01:53,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.19 vs. limit=15.0 +2024-07-27 23:01:56,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=74937.33333333333, ans=0.2 +2024-07-27 23:02:00,476 INFO [train.py:1114] (3/4) Epoch 6, batch 5100, loss[loss=0.2434, simple_loss=0.3121, pruned_loss=0.08734, over 4776.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.3142, pruned_loss=0.0772, over 936040.40 frames. ], batch size: 12, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:02:01,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=74950.66666666667, ans=0.0 +2024-07-27 23:02:10,638 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=22.71 vs. limit=22.5 +2024-07-27 23:02:17,022 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-07-27 23:02:23,235 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.910e+01 5.981e+01 6.894e+01 7.665e+01 1.178e+02, threshold=1.379e+02, percent-clipped=0.0 +2024-07-27 23:02:32,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=75004.0, ans=0.125 +2024-07-27 23:02:32,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=75004.0, ans=0.0 +2024-07-27 23:02:39,311 INFO [train.py:1114] (3/4) Epoch 6, batch 5150, loss[loss=0.2691, simple_loss=0.3412, pruned_loss=0.09846, over 4827.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3149, pruned_loss=0.07738, over 937036.19 frames. ], batch size: 16, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:02:41,070 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.07 vs. limit=22.5 +2024-07-27 23:02:48,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=75030.66666666667, ans=0.025 +2024-07-27 23:02:54,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=75044.0, ans=0.07 +2024-07-27 23:03:10,595 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.14 vs. limit=12.0 +2024-07-27 23:03:12,933 INFO [train.py:1114] (3/4) Epoch 6, batch 5200, loss[loss=0.2494, simple_loss=0.3523, pruned_loss=0.0733, over 4666.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3139, pruned_loss=0.07697, over 936902.59 frames. ], batch size: 14, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:03:22,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=75097.33333333333, ans=15.0 +2024-07-27 23:03:24,795 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.62 vs. limit=15.0 +2024-07-27 23:03:25,431 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.90 vs. limit=6.0 +2024-07-27 23:03:30,914 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.93 vs. limit=15.0 +2024-07-27 23:03:32,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=75110.66666666667, ans=0.0 +2024-07-27 23:03:32,616 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 6.053e+01 6.671e+01 7.847e+01 1.456e+02, threshold=1.334e+02, percent-clipped=1.0 +2024-07-27 23:03:36,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=75124.0, ans=0.0 +2024-07-27 23:03:39,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=75124.0, ans=0.125 +2024-07-27 23:03:42,719 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.02 vs. limit=22.5 +2024-07-27 23:03:43,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75137.33333333333, ans=0.1 +2024-07-27 23:03:48,753 INFO [train.py:1114] (3/4) Epoch 6, batch 5250, loss[loss=0.2391, simple_loss=0.311, pruned_loss=0.08358, over 4901.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3125, pruned_loss=0.0765, over 936557.56 frames. ], batch size: 13, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:03:53,015 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.96 vs. limit=6.0 +2024-07-27 23:03:55,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=75164.0, ans=0.1 +2024-07-27 23:04:03,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=75177.33333333333, ans=0.1 +2024-07-27 23:04:06,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=75177.33333333333, ans=0.2 +2024-07-27 23:04:07,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=75177.33333333333, ans=0.125 +2024-07-27 23:04:09,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=75190.66666666667, ans=0.125 +2024-07-27 23:04:24,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=75217.33333333333, ans=0.125 +2024-07-27 23:04:24,658 INFO [train.py:1114] (3/4) Epoch 6, batch 5300, loss[loss=0.257, simple_loss=0.3571, pruned_loss=0.07845, over 4608.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3113, pruned_loss=0.07601, over 934776.87 frames. ], batch size: 16, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:04:31,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=75230.66666666667, ans=0.125 +2024-07-27 23:04:32,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=75230.66666666667, ans=0.125 +2024-07-27 23:04:41,939 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.955e+01 6.651e+01 7.573e+01 1.282e+02, threshold=1.330e+02, percent-clipped=0.0 +2024-07-27 23:04:47,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=75257.33333333333, ans=0.0 +2024-07-27 23:04:54,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=75270.66666666667, ans=0.125 +2024-07-27 23:04:57,838 INFO [train.py:1114] (3/4) Epoch 6, batch 5350, loss[loss=0.2069, simple_loss=0.2757, pruned_loss=0.06904, over 4542.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3116, pruned_loss=0.07589, over 936622.99 frames. ], batch size: 10, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:05:04,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=75297.33333333333, ans=0.125 +2024-07-27 23:05:05,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=75297.33333333333, ans=0.2 +2024-07-27 23:05:06,938 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.88 vs. limit=15.0 +2024-07-27 23:05:12,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=75310.66666666667, ans=0.125 +2024-07-27 23:05:33,162 INFO [train.py:1114] (3/4) Epoch 6, batch 5400, loss[loss=0.3029, simple_loss=0.3714, pruned_loss=0.1173, over 4233.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3133, pruned_loss=0.07719, over 930511.03 frames. ], batch size: 25, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:05:42,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75364.0, ans=0.1 +2024-07-27 23:05:44,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=75364.0, ans=0.125 +2024-07-27 23:05:45,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=75377.33333333333, ans=10.0 +2024-07-27 23:05:47,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=75377.33333333333, ans=0.125 +2024-07-27 23:05:50,264 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.680e+01 5.962e+01 6.573e+01 7.607e+01 1.590e+02, threshold=1.315e+02, percent-clipped=1.0 +2024-07-27 23:05:50,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=75377.33333333333, ans=15.0 +2024-07-27 23:05:51,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=75377.33333333333, ans=0.2 +2024-07-27 23:05:52,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=75390.66666666667, ans=0.125 +2024-07-27 23:05:54,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=75390.66666666667, ans=0.95 +2024-07-27 23:05:56,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=75390.66666666667, ans=0.125 +2024-07-27 23:06:00,633 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.42 vs. limit=15.0 +2024-07-27 23:06:01,943 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.78 vs. limit=15.0 +2024-07-27 23:06:04,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75404.0, ans=0.1 +2024-07-27 23:06:07,098 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.55 vs. limit=10.0 +2024-07-27 23:06:07,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=75417.33333333333, ans=0.2 +2024-07-27 23:06:07,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=75417.33333333333, ans=0.125 +2024-07-27 23:06:08,281 INFO [train.py:1114] (3/4) Epoch 6, batch 5450, loss[loss=0.2505, simple_loss=0.3131, pruned_loss=0.09393, over 4685.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3128, pruned_loss=0.07683, over 933022.04 frames. ], batch size: 11, lr: 1.24e-02, grad_scale: 32.0 +2024-07-27 23:06:12,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=75417.33333333333, ans=0.125 +2024-07-27 23:06:12,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=75417.33333333333, ans=0.125 +2024-07-27 23:06:16,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75430.66666666667, ans=0.1 +2024-07-27 23:06:18,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=75430.66666666667, ans=0.025 +2024-07-27 23:06:28,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75457.33333333333, ans=0.1 +2024-07-27 23:06:30,894 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.70 vs. limit=15.0 +2024-07-27 23:06:34,380 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.67 vs. limit=22.5 +2024-07-27 23:06:36,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=75470.66666666667, ans=0.125 +2024-07-27 23:06:40,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.45 vs. limit=22.5 +2024-07-27 23:06:42,303 INFO [train.py:1114] (3/4) Epoch 6, batch 5500, loss[loss=0.265, simple_loss=0.338, pruned_loss=0.09597, over 4239.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.3129, pruned_loss=0.077, over 930517.84 frames. ], batch size: 25, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:06:43,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75484.0, ans=0.1 +2024-07-27 23:06:51,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=75497.33333333333, ans=0.125 +2024-07-27 23:06:53,845 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.22 vs. limit=6.0 +2024-07-27 23:06:59,682 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.538e+01 6.092e+01 6.682e+01 7.913e+01 1.212e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-27 23:07:02,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=75524.0, ans=0.0 +2024-07-27 23:07:10,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=75537.33333333333, ans=0.125 +2024-07-27 23:07:10,830 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=8.00 vs. limit=8.0 +2024-07-27 23:07:15,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=75537.33333333333, ans=0.125 +2024-07-27 23:07:21,735 INFO [train.py:1114] (3/4) Epoch 6, batch 5550, loss[loss=0.1828, simple_loss=0.2708, pruned_loss=0.04744, over 4705.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3125, pruned_loss=0.07696, over 932560.68 frames. ], batch size: 12, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:07:22,877 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.78 vs. limit=22.5 +2024-07-27 23:07:27,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75564.0, ans=0.1 +2024-07-27 23:07:31,424 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.62 vs. limit=15.0 +2024-07-27 23:07:34,066 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.10 vs. limit=22.5 +2024-07-27 23:07:37,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=75577.33333333333, ans=0.0 +2024-07-27 23:07:38,015 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.69 vs. limit=6.0 +2024-07-27 23:07:39,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=75577.33333333333, ans=0.025 +2024-07-27 23:07:40,052 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.35 vs. limit=6.0 +2024-07-27 23:07:41,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=75590.66666666667, ans=0.0 +2024-07-27 23:07:41,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75590.66666666667, ans=0.1 +2024-07-27 23:07:44,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-07-27 23:07:54,737 INFO [train.py:1114] (3/4) Epoch 6, batch 5600, loss[loss=0.2267, simple_loss=0.3143, pruned_loss=0.06957, over 4736.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3125, pruned_loss=0.07687, over 934001.63 frames. ], batch size: 14, lr: 1.23e-02, grad_scale: 64.0 +2024-07-27 23:07:54,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_na.min_abs, batch_count=75617.33333333333, ans=0.02 +2024-07-27 23:07:56,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=75617.33333333333, ans=0.1 +2024-07-27 23:08:04,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=75617.33333333333, ans=0.2 +2024-07-27 23:08:08,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75630.66666666667, ans=0.1 +2024-07-27 23:08:09,642 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.25 vs. limit=15.0 +2024-07-27 23:08:13,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=75644.0, ans=0.0 +2024-07-27 23:08:16,233 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.86 vs. limit=6.0 +2024-07-27 23:08:16,466 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.881e+01 6.141e+01 6.729e+01 7.455e+01 1.025e+02, threshold=1.346e+02, percent-clipped=0.0 +2024-07-27 23:08:27,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=75670.66666666667, ans=0.125 +2024-07-27 23:08:32,405 INFO [train.py:1114] (3/4) Epoch 6, batch 5650, loss[loss=0.2561, simple_loss=0.3357, pruned_loss=0.08823, over 4425.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3128, pruned_loss=0.07715, over 936280.95 frames. ], batch size: 21, lr: 1.23e-02, grad_scale: 64.0 +2024-07-27 23:08:42,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=75697.33333333333, ans=0.125 +2024-07-27 23:08:50,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=75710.66666666667, ans=0.0 +2024-07-27 23:09:00,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=75724.0, ans=0.0 +2024-07-27 23:09:02,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=75737.33333333333, ans=0.0 +2024-07-27 23:09:08,126 INFO [train.py:1114] (3/4) Epoch 6, batch 5700, loss[loss=0.2466, simple_loss=0.3338, pruned_loss=0.07968, over 4686.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3137, pruned_loss=0.07732, over 937642.24 frames. ], batch size: 13, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:09:15,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=75764.0, ans=0.0 +2024-07-27 23:09:19,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=75764.0, ans=0.125 +2024-07-27 23:09:26,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=75777.33333333333, ans=0.025 +2024-07-27 23:09:26,481 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.116e+01 6.584e+01 7.686e+01 8.929e+01 1.310e+02, threshold=1.537e+02, percent-clipped=0.0 +2024-07-27 23:09:34,135 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.39 vs. limit=15.0 +2024-07-27 23:09:41,527 INFO [train.py:1114] (3/4) Epoch 6, batch 5750, loss[loss=0.2384, simple_loss=0.3246, pruned_loss=0.07606, over 4726.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3139, pruned_loss=0.07699, over 937730.50 frames. ], batch size: 19, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:09:56,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=75844.0, ans=10.0 +2024-07-27 23:10:02,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75844.0, ans=0.1 +2024-07-27 23:10:04,227 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.53 vs. limit=15.0 +2024-07-27 23:10:04,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=75857.33333333333, ans=0.125 +2024-07-27 23:10:16,848 INFO [train.py:1114] (3/4) Epoch 6, batch 5800, loss[loss=0.2714, simple_loss=0.3539, pruned_loss=0.09443, over 4717.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.3147, pruned_loss=0.07788, over 937134.12 frames. ], batch size: 19, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:10:16,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=75884.0, ans=0.125 +2024-07-27 23:10:28,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=75897.33333333333, ans=0.1 +2024-07-27 23:10:34,693 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.114e+01 6.081e+01 6.996e+01 7.790e+01 1.543e+02, threshold=1.399e+02, percent-clipped=1.0 +2024-07-27 23:10:35,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=75910.66666666667, ans=0.1 +2024-07-27 23:10:41,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=75924.0, ans=0.0 +2024-07-27 23:10:45,025 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.71 vs. limit=10.0 +2024-07-27 23:10:50,812 INFO [train.py:1114] (3/4) Epoch 6, batch 5850, loss[loss=0.2558, simple_loss=0.3299, pruned_loss=0.09089, over 4471.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.3139, pruned_loss=0.07743, over 937342.65 frames. ], batch size: 21, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:10:56,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=75950.66666666667, ans=0.0 +2024-07-27 23:11:00,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=75964.0, ans=0.125 +2024-07-27 23:11:02,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=75964.0, ans=0.0 +2024-07-27 23:11:05,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=75964.0, ans=0.09899494936611666 +2024-07-27 23:11:08,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=75977.33333333333, ans=0.0 +2024-07-27 23:11:30,105 INFO [train.py:1114] (3/4) Epoch 6, batch 5900, loss[loss=0.2625, simple_loss=0.3514, pruned_loss=0.08681, over 4702.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3134, pruned_loss=0.07687, over 937538.18 frames. ], batch size: 15, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:11:31,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=76017.33333333333, ans=0.025 +2024-07-27 23:11:34,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=76017.33333333333, ans=0.125 +2024-07-27 23:11:37,823 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.58 vs. limit=22.5 +2024-07-27 23:11:40,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=76030.66666666667, ans=0.125 +2024-07-27 23:11:40,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=76030.66666666667, ans=0.0 +2024-07-27 23:11:48,176 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.509e+01 6.028e+01 6.783e+01 7.450e+01 1.132e+02, threshold=1.357e+02, percent-clipped=0.0 +2024-07-27 23:12:03,427 INFO [train.py:1114] (3/4) Epoch 6, batch 5950, loss[loss=0.2807, simple_loss=0.3616, pruned_loss=0.09996, over 4698.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3132, pruned_loss=0.07664, over 939659.57 frames. ], batch size: 15, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:12:04,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=76084.0, ans=0.125 +2024-07-27 23:12:15,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=76097.33333333333, ans=0.0 +2024-07-27 23:12:19,362 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.17 vs. limit=15.0 +2024-07-27 23:12:20,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.55 vs. limit=22.5 +2024-07-27 23:12:21,203 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.59 vs. limit=15.0 +2024-07-27 23:12:21,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=76110.66666666667, ans=0.09899494936611666 +2024-07-27 23:12:27,957 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-27 23:12:33,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=76137.33333333333, ans=0.0 +2024-07-27 23:12:36,361 INFO [train.py:1114] (3/4) Epoch 6, batch 6000, loss[loss=0.2764, simple_loss=0.349, pruned_loss=0.1019, over 4503.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3125, pruned_loss=0.07692, over 937246.47 frames. ], batch size: 26, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:12:36,361 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 23:12:46,244 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.2412, 3.5023, 3.6331, 3.5211], device='cuda:3') +2024-07-27 23:12:50,096 INFO [train.py:1146] (3/4) Epoch 6, validation: loss=0.1905, simple_loss=0.2947, pruned_loss=0.04318, over 944034.00 frames. +2024-07-27 23:12:50,096 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 23:13:06,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=76177.33333333333, ans=0.125 +2024-07-27 23:13:07,972 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.732e+01 6.230e+01 7.142e+01 8.647e+01 1.308e+02, threshold=1.428e+02, percent-clipped=0.0 +2024-07-27 23:13:14,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=76190.66666666667, ans=0.2 +2024-07-27 23:13:24,114 INFO [train.py:1114] (3/4) Epoch 6, batch 6050, loss[loss=0.1918, simple_loss=0.2739, pruned_loss=0.05484, over 4781.00 frames. ], tot_loss[loss=0.232, simple_loss=0.3114, pruned_loss=0.07624, over 938701.05 frames. ], batch size: 12, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:13:28,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=76217.33333333333, ans=0.1 +2024-07-27 23:13:29,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=76217.33333333333, ans=0.0 +2024-07-27 23:13:30,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=76230.66666666667, ans=0.0 +2024-07-27 23:13:57,302 INFO [train.py:1114] (3/4) Epoch 6, batch 6100, loss[loss=0.2693, simple_loss=0.3552, pruned_loss=0.09168, over 4705.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3104, pruned_loss=0.07559, over 938313.14 frames. ], batch size: 15, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:13:58,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=76284.0, ans=0.0 +2024-07-27 23:14:11,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=76297.33333333333, ans=0.0 +2024-07-27 23:14:17,100 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 6.126e+01 6.655e+01 7.850e+01 1.418e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-27 23:14:28,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=76337.33333333333, ans=0.125 +2024-07-27 23:14:32,319 INFO [train.py:1114] (3/4) Epoch 6, batch 6150, loss[loss=0.2739, simple_loss=0.3349, pruned_loss=0.1064, over 3211.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3102, pruned_loss=0.07506, over 936653.17 frames. ], batch size: 35, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:14:38,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=76350.66666666667, ans=0.07 +2024-07-27 23:14:48,187 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:14:54,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=76390.66666666667, ans=0.125 +2024-07-27 23:14:58,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=76390.66666666667, ans=0.125 +2024-07-27 23:15:01,311 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:15:02,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=76404.0, ans=0.0 +2024-07-27 23:15:07,853 INFO [train.py:1114] (3/4) Epoch 6, batch 6200, loss[loss=0.2279, simple_loss=0.3097, pruned_loss=0.07306, over 4739.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3102, pruned_loss=0.07529, over 936238.05 frames. ], batch size: 14, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:15:16,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=76417.33333333333, ans=0.025 +2024-07-27 23:15:21,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=76430.66666666667, ans=0.125 +2024-07-27 23:15:22,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=76430.66666666667, ans=0.125 +2024-07-27 23:15:22,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=76430.66666666667, ans=0.125 +2024-07-27 23:15:25,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=76444.0, ans=0.2 +2024-07-27 23:15:30,095 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.613e+01 5.920e+01 6.889e+01 8.181e+01 1.186e+02, threshold=1.378e+02, percent-clipped=0.0 +2024-07-27 23:15:42,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=76470.66666666667, ans=0.09899494936611666 +2024-07-27 23:15:45,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=76484.0, ans=0.125 +2024-07-27 23:15:45,610 INFO [train.py:1114] (3/4) Epoch 6, batch 6250, loss[loss=0.225, simple_loss=0.3148, pruned_loss=0.0676, over 4802.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3106, pruned_loss=0.07558, over 932846.98 frames. ], batch size: 14, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:15:47,851 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.77 vs. limit=15.0 +2024-07-27 23:15:49,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=76484.0, ans=0.0 +2024-07-27 23:15:55,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=76497.33333333333, ans=0.125 +2024-07-27 23:16:00,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=76510.66666666667, ans=0.0 +2024-07-27 23:16:01,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=76510.66666666667, ans=0.0 +2024-07-27 23:16:21,062 INFO [train.py:1114] (3/4) Epoch 6, batch 6300, loss[loss=0.223, simple_loss=0.2955, pruned_loss=0.07521, over 4531.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.3107, pruned_loss=0.07587, over 929850.33 frames. ], batch size: 10, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:16:26,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=76550.66666666667, ans=0.0 +2024-07-27 23:16:26,841 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.18 vs. limit=15.0 +2024-07-27 23:16:29,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=76564.0, ans=0.5 +2024-07-27 23:16:36,184 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.83 vs. limit=15.0 +2024-07-27 23:16:38,838 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.517e+01 5.917e+01 6.519e+01 7.440e+01 1.686e+02, threshold=1.304e+02, percent-clipped=1.0 +2024-07-27 23:16:43,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=76590.66666666667, ans=0.125 +2024-07-27 23:16:45,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=76590.66666666667, ans=0.2 +2024-07-27 23:16:45,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=76590.66666666667, ans=0.125 +2024-07-27 23:16:51,581 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.13 vs. limit=15.0 +2024-07-27 23:16:53,926 INFO [train.py:1114] (3/4) Epoch 6, batch 6350, loss[loss=0.2741, simple_loss=0.3446, pruned_loss=0.1018, over 4500.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3097, pruned_loss=0.07479, over 933843.03 frames. ], batch size: 21, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:17:02,779 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:17:05,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=76630.66666666667, ans=0.125 +2024-07-27 23:17:17,592 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.89 vs. limit=15.0 +2024-07-27 23:17:21,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=76670.66666666667, ans=0.2 +2024-07-27 23:17:22,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=76670.66666666667, ans=0.0 +2024-07-27 23:17:22,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=76670.66666666667, ans=0.0 +2024-07-27 23:17:25,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=76670.66666666667, ans=0.015 +2024-07-27 23:17:27,176 INFO [train.py:1114] (3/4) Epoch 6, batch 6400, loss[loss=0.2509, simple_loss=0.3289, pruned_loss=0.08649, over 4637.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3091, pruned_loss=0.0749, over 934908.65 frames. ], batch size: 13, lr: 1.23e-02, grad_scale: 32.0 +2024-07-27 23:17:28,325 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.56 vs. limit=15.0 +2024-07-27 23:17:29,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=76684.0, ans=22.5 +2024-07-27 23:17:35,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=76697.33333333333, ans=0.1 +2024-07-27 23:17:39,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=76710.66666666667, ans=0.0 +2024-07-27 23:17:44,983 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.061e+01 6.138e+01 6.927e+01 7.775e+01 1.168e+02, threshold=1.385e+02, percent-clipped=0.0 +2024-07-27 23:17:45,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=76710.66666666667, ans=0.0 +2024-07-27 23:17:54,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=76737.33333333333, ans=22.5 +2024-07-27 23:18:00,342 INFO [train.py:1114] (3/4) Epoch 6, batch 6450, loss[loss=0.2587, simple_loss=0.3397, pruned_loss=0.08882, over 4502.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3101, pruned_loss=0.07538, over 938755.84 frames. ], batch size: 21, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:18:03,278 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.28 vs. limit=15.0 +2024-07-27 23:18:18,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=76764.0, ans=0.2 +2024-07-27 23:18:25,349 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.72 vs. limit=15.0 +2024-07-27 23:18:37,165 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=8.567e-02 +2024-07-27 23:18:39,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=76817.33333333333, ans=0.09899494936611666 +2024-07-27 23:18:40,300 INFO [train.py:1114] (3/4) Epoch 6, batch 6500, loss[loss=0.3359, simple_loss=0.3855, pruned_loss=0.1431, over 3359.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3093, pruned_loss=0.07493, over 940078.30 frames. ], batch size: 35, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:18:43,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=76817.33333333333, ans=0.125 +2024-07-27 23:18:43,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=76817.33333333333, ans=0.125 +2024-07-27 23:18:54,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=76844.0, ans=0.0 +2024-07-27 23:18:58,074 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 6.177e+01 7.054e+01 8.466e+01 1.519e+02, threshold=1.411e+02, percent-clipped=2.0 +2024-07-27 23:19:13,153 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.48 vs. limit=15.0 +2024-07-27 23:19:13,482 INFO [train.py:1114] (3/4) Epoch 6, batch 6550, loss[loss=0.1518, simple_loss=0.243, pruned_loss=0.03026, over 4804.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.3087, pruned_loss=0.07403, over 943047.11 frames. ], batch size: 11, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:19:16,976 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.30 vs. limit=12.0 +2024-07-27 23:19:19,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=76897.33333333333, ans=0.125 +2024-07-27 23:19:21,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=76897.33333333333, ans=0.125 +2024-07-27 23:19:40,182 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.70 vs. limit=15.0 +2024-07-27 23:19:46,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=76937.33333333333, ans=0.1 +2024-07-27 23:19:47,591 INFO [train.py:1114] (3/4) Epoch 6, batch 6600, loss[loss=0.2568, simple_loss=0.3367, pruned_loss=0.08844, over 4933.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3089, pruned_loss=0.07435, over 944824.93 frames. ], batch size: 14, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:19:55,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=76964.0, ans=0.07 +2024-07-27 23:19:58,730 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.19 vs. limit=15.0 +2024-07-27 23:20:05,790 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.013e+01 6.034e+01 7.063e+01 8.869e+01 1.315e+02, threshold=1.413e+02, percent-clipped=0.0 +2024-07-27 23:20:06,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=76977.33333333333, ans=0.125 +2024-07-27 23:20:08,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=76990.66666666667, ans=0.0 +2024-07-27 23:20:09,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=76990.66666666667, ans=0.2 +2024-07-27 23:20:12,187 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.92 vs. limit=6.0 +2024-07-27 23:20:15,483 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.00 vs. limit=22.5 +2024-07-27 23:20:21,205 INFO [train.py:1114] (3/4) Epoch 6, batch 6650, loss[loss=0.2339, simple_loss=0.3226, pruned_loss=0.07258, over 4657.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3096, pruned_loss=0.07472, over 943592.97 frames. ], batch size: 17, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:20:24,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77017.33333333333, ans=0.1 +2024-07-27 23:20:33,062 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.97 vs. limit=10.0 +2024-07-27 23:20:36,305 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.49 vs. limit=10.0 +2024-07-27 23:20:36,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77044.0, ans=0.1 +2024-07-27 23:20:40,410 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.49 vs. limit=22.5 +2024-07-27 23:20:44,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=77057.33333333333, ans=0.125 +2024-07-27 23:20:47,861 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.53 vs. limit=15.0 +2024-07-27 23:20:54,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=77070.66666666667, ans=0.0 +2024-07-27 23:20:57,070 INFO [train.py:1114] (3/4) Epoch 6, batch 6700, loss[loss=0.2554, simple_loss=0.3345, pruned_loss=0.08815, over 4676.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3111, pruned_loss=0.0753, over 942349.23 frames. ], batch size: 19, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:20:59,220 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:20:59,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=77084.0, ans=0.125 +2024-07-27 23:21:10,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77110.66666666667, ans=0.1 +2024-07-27 23:21:15,128 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.982e+01 6.173e+01 6.934e+01 8.423e+01 1.268e+02, threshold=1.387e+02, percent-clipped=0.0 +2024-07-27 23:21:41,299 INFO [train.py:1114] (3/4) Epoch 6, batch 6750, loss[loss=0.2898, simple_loss=0.3604, pruned_loss=0.1096, over 4441.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3106, pruned_loss=0.07495, over 940738.79 frames. ], batch size: 26, lr: 1.22e-02, grad_scale: 16.0 +2024-07-27 23:21:44,266 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.97 vs. limit=15.0 +2024-07-27 23:21:45,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=77150.66666666667, ans=0.125 +2024-07-27 23:21:48,611 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:22:00,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77190.66666666667, ans=0.1 +2024-07-27 23:22:05,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=77190.66666666667, ans=0.125 +2024-07-27 23:22:08,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=77190.66666666667, ans=0.0 +2024-07-27 23:22:11,985 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.77 vs. limit=22.5 +2024-07-27 23:22:16,817 INFO [train.py:1114] (3/4) Epoch 6, batch 6800, loss[loss=0.2568, simple_loss=0.3345, pruned_loss=0.08958, over 4642.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.311, pruned_loss=0.07481, over 938797.37 frames. ], batch size: 13, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:23:14,541 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.127e+01 5.858e+01 6.351e+01 7.283e+01 1.199e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-27 23:23:29,484 INFO [train.py:1114] (3/4) Epoch 6, batch 6850, loss[loss=0.2265, simple_loss=0.3255, pruned_loss=0.06377, over 4684.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3103, pruned_loss=0.0742, over 940420.25 frames. ], batch size: 13, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:23:31,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77284.0, ans=0.1 +2024-07-27 23:24:00,906 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.141e-02 +2024-07-27 23:24:00,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77337.33333333333, ans=0.1 +2024-07-27 23:24:03,352 INFO [train.py:1114] (3/4) Epoch 6, batch 6900, loss[loss=0.1964, simple_loss=0.2795, pruned_loss=0.05658, over 4966.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3107, pruned_loss=0.07446, over 943120.03 frames. ], batch size: 13, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:24:03,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=77350.66666666667, ans=0.125 +2024-07-27 23:24:09,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=77364.0, ans=0.0 +2024-07-27 23:24:13,040 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.73 vs. limit=15.0 +2024-07-27 23:24:16,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=77377.33333333333, ans=0.125 +2024-07-27 23:24:19,063 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.40 vs. limit=10.0 +2024-07-27 23:24:21,825 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.943e+01 5.966e+01 6.630e+01 7.138e+01 1.259e+02, threshold=1.326e+02, percent-clipped=0.0 +2024-07-27 23:24:35,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77404.0, ans=0.1 +2024-07-27 23:24:38,562 INFO [train.py:1114] (3/4) Epoch 6, batch 6950, loss[loss=0.1835, simple_loss=0.2646, pruned_loss=0.05118, over 4530.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3109, pruned_loss=0.0747, over 940359.16 frames. ], batch size: 10, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:24:47,688 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:24:58,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=77457.33333333333, ans=0.125 +2024-07-27 23:24:58,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=77457.33333333333, ans=0.125 +2024-07-27 23:25:09,221 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.83 vs. limit=22.5 +2024-07-27 23:25:12,292 INFO [train.py:1114] (3/4) Epoch 6, batch 7000, loss[loss=0.2288, simple_loss=0.3153, pruned_loss=0.07115, over 4619.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3106, pruned_loss=0.07494, over 938513.66 frames. ], batch size: 17, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:25:16,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=77484.0, ans=0.125 +2024-07-27 23:25:26,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=77510.66666666667, ans=0.125 +2024-07-27 23:25:30,327 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.154e+01 6.184e+01 7.015e+01 8.119e+01 1.355e+02, threshold=1.403e+02, percent-clipped=1.0 +2024-07-27 23:25:32,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=77524.0, ans=0.2 +2024-07-27 23:25:33,943 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.93 vs. limit=15.0 +2024-07-27 23:25:38,658 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.92 vs. limit=15.0 +2024-07-27 23:25:41,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=77537.33333333333, ans=0.0 +2024-07-27 23:25:44,778 INFO [train.py:1114] (3/4) Epoch 6, batch 7050, loss[loss=0.2325, simple_loss=0.3193, pruned_loss=0.07286, over 4727.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3111, pruned_loss=0.07488, over 941740.79 frames. ], batch size: 19, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:25:46,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=77550.66666666667, ans=0.5 +2024-07-27 23:26:00,620 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.51 vs. limit=15.0 +2024-07-27 23:26:01,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=77577.33333333333, ans=0.125 +2024-07-27 23:26:09,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77590.66666666667, ans=0.1 +2024-07-27 23:26:15,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=77604.0, ans=0.125 +2024-07-27 23:26:15,531 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.16 vs. limit=22.5 +2024-07-27 23:26:18,232 INFO [train.py:1114] (3/4) Epoch 6, batch 7100, loss[loss=0.283, simple_loss=0.3582, pruned_loss=0.1039, over 4813.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3134, pruned_loss=0.07649, over 936639.10 frames. ], batch size: 15, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:26:28,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=77630.66666666667, ans=0.125 +2024-07-27 23:26:29,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=77630.66666666667, ans=0.2 +2024-07-27 23:26:40,770 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.696e+01 6.046e+01 6.711e+01 7.848e+01 1.418e+02, threshold=1.342e+02, percent-clipped=1.0 +2024-07-27 23:26:40,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=77644.0, ans=0.0 +2024-07-27 23:26:41,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=77657.33333333333, ans=0.125 +2024-07-27 23:26:49,139 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.57 vs. limit=15.0 +2024-07-27 23:26:54,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=77684.0, ans=0.125 +2024-07-27 23:26:55,144 INFO [train.py:1114] (3/4) Epoch 6, batch 7150, loss[loss=0.2376, simple_loss=0.3256, pruned_loss=0.07482, over 4538.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.31, pruned_loss=0.0755, over 937793.01 frames. ], batch size: 21, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:26:55,615 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.27 vs. limit=15.0 +2024-07-27 23:26:56,190 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.32 vs. limit=15.0 +2024-07-27 23:27:03,149 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.08 vs. limit=22.5 +2024-07-27 23:27:04,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=77697.33333333333, ans=0.1 +2024-07-27 23:27:12,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=77710.66666666667, ans=0.1 +2024-07-27 23:27:12,256 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.21 vs. limit=12.0 +2024-07-27 23:27:13,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=77710.66666666667, ans=0.1 +2024-07-27 23:27:14,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=77710.66666666667, ans=0.125 +2024-07-27 23:27:25,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=77737.33333333333, ans=0.125 +2024-07-27 23:27:29,627 INFO [train.py:1114] (3/4) Epoch 6, batch 7200, loss[loss=0.3306, simple_loss=0.4001, pruned_loss=0.1305, over 4800.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3124, pruned_loss=0.07633, over 938107.64 frames. ], batch size: 15, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:27:29,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=77750.66666666667, ans=0.0 +2024-07-27 23:27:34,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=77750.66666666667, ans=0.05 +2024-07-27 23:27:35,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=77750.66666666667, ans=0.125 +2024-07-27 23:27:39,068 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.02 vs. limit=6.0 +2024-07-27 23:27:43,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=77777.33333333333, ans=0.09899494936611666 +2024-07-27 23:27:47,987 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.062e+01 6.035e+01 6.773e+01 8.115e+01 1.390e+02, threshold=1.355e+02, percent-clipped=1.0 +2024-07-27 23:27:49,115 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.04 vs. limit=15.0 +2024-07-27 23:27:52,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=77790.66666666667, ans=0.0 +2024-07-27 23:27:56,187 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.13 vs. limit=15.0 +2024-07-27 23:28:02,494 INFO [train.py:1114] (3/4) Epoch 6, batch 7250, loss[loss=0.1686, simple_loss=0.2563, pruned_loss=0.0404, over 4859.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3111, pruned_loss=0.07535, over 939985.74 frames. ], batch size: 12, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:28:06,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=77817.33333333333, ans=0.125 +2024-07-27 23:28:07,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.37 vs. limit=15.0 +2024-07-27 23:28:08,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=77830.66666666667, ans=0.125 +2024-07-27 23:28:09,949 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.74 vs. limit=15.0 +2024-07-27 23:28:17,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=77844.0, ans=0.07 +2024-07-27 23:28:24,636 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.43 vs. limit=22.5 +2024-07-27 23:28:33,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=77870.66666666667, ans=0.0 +2024-07-27 23:28:36,957 INFO [train.py:1114] (3/4) Epoch 6, batch 7300, loss[loss=0.1911, simple_loss=0.268, pruned_loss=0.05707, over 4843.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3102, pruned_loss=0.07499, over 940504.01 frames. ], batch size: 12, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:28:45,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=77897.33333333333, ans=0.0 +2024-07-27 23:28:48,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=77897.33333333333, ans=0.125 +2024-07-27 23:28:52,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=77910.66666666667, ans=0.125 +2024-07-27 23:28:55,455 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.159e+01 6.187e+01 6.781e+01 8.208e+01 1.800e+02, threshold=1.356e+02, percent-clipped=4.0 +2024-07-27 23:28:59,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=77924.0, ans=0.125 +2024-07-27 23:29:06,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=77937.33333333333, ans=0.2 +2024-07-27 23:29:08,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=77937.33333333333, ans=0.125 +2024-07-27 23:29:09,751 INFO [train.py:1114] (3/4) Epoch 6, batch 7350, loss[loss=0.1928, simple_loss=0.2741, pruned_loss=0.05577, over 4639.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.3094, pruned_loss=0.07452, over 939688.02 frames. ], batch size: 12, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:29:10,809 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.61 vs. limit=10.0 +2024-07-27 23:29:40,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=78004.0, ans=0.125 +2024-07-27 23:29:42,398 INFO [train.py:1114] (3/4) Epoch 6, batch 7400, loss[loss=0.2014, simple_loss=0.2842, pruned_loss=0.05932, over 4689.00 frames. ], tot_loss[loss=0.228, simple_loss=0.309, pruned_loss=0.07355, over 940749.92 frames. ], batch size: 13, lr: 1.22e-02, grad_scale: 32.0 +2024-07-27 23:29:49,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78030.66666666667, ans=0.1 +2024-07-27 23:29:53,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=78030.66666666667, ans=0.0 +2024-07-27 23:29:54,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=78030.66666666667, ans=0.0 +2024-07-27 23:29:57,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=78044.0, ans=0.0 +2024-07-27 23:29:58,002 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.82 vs. limit=15.0 +2024-07-27 23:30:00,719 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.794e+01 6.318e+01 7.281e+01 8.792e+01 1.336e+02, threshold=1.456e+02, percent-clipped=0.0 +2024-07-27 23:30:12,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=78057.33333333333, ans=0.5 +2024-07-27 23:30:17,851 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.42 vs. limit=15.0 +2024-07-27 23:30:26,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=78070.66666666667, ans=0.0 +2024-07-27 23:30:27,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=78070.66666666667, ans=0.125 +2024-07-27 23:30:27,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=78070.66666666667, ans=0.2 +2024-07-27 23:30:31,755 INFO [train.py:1114] (3/4) Epoch 6, batch 7450, loss[loss=0.2078, simple_loss=0.2747, pruned_loss=0.07047, over 4606.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3081, pruned_loss=0.07373, over 938158.23 frames. ], batch size: 11, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:30:33,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=78084.0, ans=0.2 +2024-07-27 23:30:39,975 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.77 vs. limit=15.0 +2024-07-27 23:30:53,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=78124.0, ans=0.07 +2024-07-27 23:31:04,797 INFO [train.py:1114] (3/4) Epoch 6, batch 7500, loss[loss=0.3742, simple_loss=0.4081, pruned_loss=0.1702, over 3241.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3092, pruned_loss=0.07493, over 935971.75 frames. ], batch size: 35, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:31:12,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=78164.0, ans=0.125 +2024-07-27 23:31:17,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=78164.0, ans=0.125 +2024-07-27 23:31:19,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78177.33333333333, ans=0.1 +2024-07-27 23:31:24,015 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.339e+01 6.209e+01 6.853e+01 7.670e+01 1.087e+02, threshold=1.371e+02, percent-clipped=0.0 +2024-07-27 23:31:30,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=78190.66666666667, ans=0.07 +2024-07-27 23:31:35,464 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.70 vs. limit=10.0 +2024-07-27 23:31:37,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=78204.0, ans=0.2 +2024-07-27 23:31:38,276 INFO [train.py:1114] (3/4) Epoch 6, batch 7550, loss[loss=0.2272, simple_loss=0.3114, pruned_loss=0.07147, over 4629.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3114, pruned_loss=0.07538, over 935917.91 frames. ], batch size: 17, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:31:43,075 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:31:46,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=78230.66666666667, ans=0.025 +2024-07-27 23:32:11,965 INFO [train.py:1114] (3/4) Epoch 6, batch 7600, loss[loss=0.2401, simple_loss=0.3214, pruned_loss=0.0794, over 4806.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3113, pruned_loss=0.07528, over 937619.03 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:32:33,520 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.93 vs. limit=6.0 +2024-07-27 23:32:33,860 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.827e+01 6.092e+01 6.628e+01 7.251e+01 1.124e+02, threshold=1.326e+02, percent-clipped=0.0 +2024-07-27 23:32:34,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=78310.66666666667, ans=0.1 +2024-07-27 23:32:34,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=78324.0, ans=0.2 +2024-07-27 23:32:42,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=78337.33333333333, ans=0.125 +2024-07-27 23:32:49,857 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.94 vs. limit=22.5 +2024-07-27 23:32:50,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=78337.33333333333, ans=0.2 +2024-07-27 23:32:52,073 INFO [train.py:1114] (3/4) Epoch 6, batch 7650, loss[loss=0.1954, simple_loss=0.2737, pruned_loss=0.05859, over 4944.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3112, pruned_loss=0.07549, over 936388.50 frames. ], batch size: 12, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:33:01,179 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.17 vs. limit=15.0 +2024-07-27 23:33:09,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=78364.0, ans=0.2 +2024-07-27 23:33:37,391 INFO [train.py:1114] (3/4) Epoch 6, batch 7700, loss[loss=0.2225, simple_loss=0.3021, pruned_loss=0.07148, over 4692.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3132, pruned_loss=0.07686, over 933447.18 frames. ], batch size: 13, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:33:50,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=78417.33333333333, ans=0.0 +2024-07-27 23:33:51,559 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:33:52,608 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.38 vs. limit=15.0 +2024-07-27 23:34:11,044 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.983e+01 6.189e+01 6.836e+01 7.774e+01 1.390e+02, threshold=1.367e+02, percent-clipped=1.0 +2024-07-27 23:34:28,154 INFO [train.py:1114] (3/4) Epoch 6, batch 7750, loss[loss=0.2282, simple_loss=0.3095, pruned_loss=0.07346, over 4931.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3142, pruned_loss=0.07748, over 934869.78 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:34:32,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=78484.0, ans=0.1 +2024-07-27 23:34:42,201 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.24 vs. limit=15.0 +2024-07-27 23:34:43,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=78510.66666666667, ans=0.125 +2024-07-27 23:34:56,481 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.48 vs. limit=15.0 +2024-07-27 23:35:04,336 INFO [train.py:1114] (3/4) Epoch 6, batch 7800, loss[loss=0.267, simple_loss=0.3538, pruned_loss=0.09014, over 4656.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3138, pruned_loss=0.07672, over 936552.50 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:35:10,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=78564.0, ans=0.125 +2024-07-27 23:35:13,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=78564.0, ans=0.0 +2024-07-27 23:35:18,075 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.89 vs. limit=15.0 +2024-07-27 23:35:21,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=78577.33333333333, ans=0.5 +2024-07-27 23:35:21,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=78577.33333333333, ans=0.125 +2024-07-27 23:35:21,344 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.82 vs. limit=15.0 +2024-07-27 23:35:21,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=78577.33333333333, ans=0.125 +2024-07-27 23:35:22,304 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+01 6.050e+01 6.523e+01 7.521e+01 9.871e+01, threshold=1.305e+02, percent-clipped=0.0 +2024-07-27 23:35:27,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=78590.66666666667, ans=0.025 +2024-07-27 23:35:35,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=78604.0, ans=0.0 +2024-07-27 23:35:36,951 INFO [train.py:1114] (3/4) Epoch 6, batch 7850, loss[loss=0.1916, simple_loss=0.2698, pruned_loss=0.05663, over 4565.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3131, pruned_loss=0.07646, over 936291.37 frames. ], batch size: 10, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:35:37,470 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.97 vs. limit=22.5 +2024-07-27 23:35:43,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78630.66666666667, ans=0.1 +2024-07-27 23:35:54,367 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.42 vs. limit=15.0 +2024-07-27 23:35:54,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=78644.0, ans=0.125 +2024-07-27 23:35:55,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=78644.0, ans=0.125 +2024-07-27 23:35:58,384 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.74 vs. limit=15.0 +2024-07-27 23:36:02,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=78657.33333333333, ans=0.2 +2024-07-27 23:36:11,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=78684.0, ans=0.0 +2024-07-27 23:36:11,568 INFO [train.py:1114] (3/4) Epoch 6, batch 7900, loss[loss=0.229, simple_loss=0.3191, pruned_loss=0.0694, over 4881.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.3137, pruned_loss=0.07644, over 933558.04 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:36:23,620 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.36 vs. limit=22.5 +2024-07-27 23:36:27,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=78710.66666666667, ans=0.125 +2024-07-27 23:36:29,746 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.141e+01 6.160e+01 7.004e+01 8.333e+01 1.233e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-27 23:36:29,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=78710.66666666667, ans=0.125 +2024-07-27 23:36:41,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=78737.33333333333, ans=0.125 +2024-07-27 23:36:41,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=78737.33333333333, ans=0.1 +2024-07-27 23:36:44,059 INFO [train.py:1114] (3/4) Epoch 6, batch 7950, loss[loss=0.3213, simple_loss=0.367, pruned_loss=0.1379, over 3526.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3119, pruned_loss=0.0754, over 935749.40 frames. ], batch size: 35, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:36:44,403 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.79 vs. limit=15.0 +2024-07-27 23:36:48,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=78750.66666666667, ans=0.1 +2024-07-27 23:36:57,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=78777.33333333333, ans=0.125 +2024-07-27 23:37:02,261 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.55 vs. limit=10.0 +2024-07-27 23:37:02,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=78790.66666666667, ans=0.025 +2024-07-27 23:37:02,919 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-07-27 23:37:09,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=78804.0, ans=0.0 +2024-07-27 23:37:09,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=78804.0, ans=0.0 +2024-07-27 23:37:12,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=78804.0, ans=0.0 +2024-07-27 23:37:12,839 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.46 vs. limit=15.0 +2024-07-27 23:37:13,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=78804.0, ans=0.125 +2024-07-27 23:37:15,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=78804.0, ans=0.125 +2024-07-27 23:37:16,328 INFO [train.py:1114] (3/4) Epoch 6, batch 8000, loss[loss=0.2213, simple_loss=0.3044, pruned_loss=0.06911, over 4623.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3108, pruned_loss=0.07489, over 934536.73 frames. ], batch size: 11, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:37:17,192 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:37:34,332 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.441e+01 5.938e+01 6.564e+01 7.603e+01 1.476e+02, threshold=1.313e+02, percent-clipped=1.0 +2024-07-27 23:37:35,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=78857.33333333333, ans=0.0 +2024-07-27 23:37:37,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=78857.33333333333, ans=0.125 +2024-07-27 23:37:48,718 INFO [train.py:1114] (3/4) Epoch 6, batch 8050, loss[loss=0.2076, simple_loss=0.2988, pruned_loss=0.05815, over 4813.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3114, pruned_loss=0.07482, over 934314.34 frames. ], batch size: 14, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:37:50,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff3.min_abs, batch_count=78884.0, ans=0.2 +2024-07-27 23:38:14,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=78924.0, ans=0.0 +2024-07-27 23:38:15,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=78937.33333333333, ans=0.125 +2024-07-27 23:38:16,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=78937.33333333333, ans=0.0 +2024-07-27 23:38:23,502 INFO [train.py:1114] (3/4) Epoch 6, batch 8100, loss[loss=0.2514, simple_loss=0.3368, pruned_loss=0.083, over 4803.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3119, pruned_loss=0.07494, over 933734.84 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:38:23,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=78950.66666666667, ans=0.0 +2024-07-27 23:38:37,863 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=13.33 vs. limit=15.0 +2024-07-27 23:38:41,456 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.051e+01 5.969e+01 6.429e+01 6.997e+01 9.390e+01, threshold=1.286e+02, percent-clipped=0.0 +2024-07-27 23:38:49,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=79004.0, ans=0.0 +2024-07-27 23:38:53,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=79004.0, ans=0.125 +2024-07-27 23:38:55,583 INFO [train.py:1114] (3/4) Epoch 6, batch 8150, loss[loss=0.2468, simple_loss=0.3443, pruned_loss=0.07468, over 4809.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.3114, pruned_loss=0.07537, over 937309.99 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:39:10,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=79044.0, ans=0.0 +2024-07-27 23:39:14,181 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.55 vs. limit=10.0 +2024-07-27 23:39:18,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=79057.33333333333, ans=0.1 +2024-07-27 23:39:28,470 INFO [train.py:1114] (3/4) Epoch 6, batch 8200, loss[loss=0.2663, simple_loss=0.3395, pruned_loss=0.09649, over 4798.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.3109, pruned_loss=0.07464, over 938277.20 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:39:31,303 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.95 vs. limit=12.0 +2024-07-27 23:39:32,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=79084.0, ans=0.0 +2024-07-27 23:39:47,311 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.029e+01 5.934e+01 6.554e+01 7.415e+01 1.580e+02, threshold=1.311e+02, percent-clipped=1.0 +2024-07-27 23:39:57,835 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.84 vs. limit=15.0 +2024-07-27 23:40:01,171 INFO [train.py:1114] (3/4) Epoch 6, batch 8250, loss[loss=0.2006, simple_loss=0.2818, pruned_loss=0.05967, over 4898.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3108, pruned_loss=0.07474, over 939139.48 frames. ], batch size: 13, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:40:09,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=79164.0, ans=0.0 +2024-07-27 23:40:10,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=79164.0, ans=0.125 +2024-07-27 23:40:15,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=79177.33333333333, ans=0.125 +2024-07-27 23:40:17,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.94 vs. limit=6.0 +2024-07-27 23:40:23,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=79190.66666666667, ans=0.0 +2024-07-27 23:40:33,941 INFO [train.py:1114] (3/4) Epoch 6, batch 8300, loss[loss=0.2259, simple_loss=0.2972, pruned_loss=0.07728, over 4900.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3113, pruned_loss=0.07512, over 938556.08 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:40:43,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=79230.66666666667, ans=0.0 +2024-07-27 23:40:44,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=79230.66666666667, ans=0.0 +2024-07-27 23:40:46,555 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.24 vs. limit=6.0 +2024-07-27 23:40:54,342 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.146e+01 5.976e+01 6.704e+01 7.897e+01 1.175e+02, threshold=1.341e+02, percent-clipped=0.0 +2024-07-27 23:40:58,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=79257.33333333333, ans=0.125 +2024-07-27 23:40:58,511 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.04 vs. limit=15.0 +2024-07-27 23:41:08,393 INFO [train.py:1114] (3/4) Epoch 6, batch 8350, loss[loss=0.2364, simple_loss=0.3271, pruned_loss=0.07286, over 4787.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3107, pruned_loss=0.0746, over 941280.41 frames. ], batch size: 15, lr: 1.21e-02, grad_scale: 32.0 +2024-07-27 23:41:10,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=79284.0, ans=0.125 +2024-07-27 23:41:13,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=79284.0, ans=0.125 +2024-07-27 23:41:13,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=79284.0, ans=0.0 +2024-07-27 23:41:15,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=79297.33333333333, ans=0.125 +2024-07-27 23:41:18,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=79297.33333333333, ans=0.07 +2024-07-27 23:41:27,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=79324.0, ans=0.0 +2024-07-27 23:41:40,605 INFO [train.py:1114] (3/4) Epoch 6, batch 8400, loss[loss=0.1821, simple_loss=0.265, pruned_loss=0.04959, over 4779.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.309, pruned_loss=0.07379, over 939866.96 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:41:45,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=79350.66666666667, ans=0.125 +2024-07-27 23:41:54,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=79377.33333333333, ans=0.125 +2024-07-27 23:41:54,188 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=79377.33333333333, ans=0.1 +2024-07-27 23:41:58,572 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.741e+01 6.271e+01 7.007e+01 8.306e+01 1.253e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-27 23:42:11,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=79417.33333333333, ans=0.1 +2024-07-27 23:42:12,520 INFO [train.py:1114] (3/4) Epoch 6, batch 8450, loss[loss=0.2659, simple_loss=0.3448, pruned_loss=0.09352, over 4806.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3095, pruned_loss=0.07369, over 938751.64 frames. ], batch size: 15, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:42:18,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=79430.66666666667, ans=0.0 +2024-07-27 23:42:29,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=79444.0, ans=0.0 +2024-07-27 23:42:31,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=79457.33333333333, ans=0.1 +2024-07-27 23:42:43,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=79470.66666666667, ans=0.125 +2024-07-27 23:42:45,638 INFO [train.py:1114] (3/4) Epoch 6, batch 8500, loss[loss=0.196, simple_loss=0.2646, pruned_loss=0.06373, over 4610.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3083, pruned_loss=0.07316, over 938236.19 frames. ], batch size: 11, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:42:51,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=79497.33333333333, ans=0.0 +2024-07-27 23:42:56,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=79497.33333333333, ans=0.125 +2024-07-27 23:43:00,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=79510.66666666667, ans=0.1 +2024-07-27 23:43:04,879 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.976e+01 5.862e+01 6.704e+01 7.850e+01 1.312e+02, threshold=1.341e+02, percent-clipped=0.0 +2024-07-27 23:43:06,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=79524.0, ans=0.0 +2024-07-27 23:43:15,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=79537.33333333333, ans=0.125 +2024-07-27 23:43:19,074 INFO [train.py:1114] (3/4) Epoch 6, batch 8550, loss[loss=0.2203, simple_loss=0.2918, pruned_loss=0.07444, over 4788.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.3087, pruned_loss=0.07417, over 939144.48 frames. ], batch size: 11, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:43:21,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=79550.66666666667, ans=0.125 +2024-07-27 23:43:23,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.30 vs. limit=15.0 +2024-07-27 23:43:33,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=79577.33333333333, ans=0.0 +2024-07-27 23:43:34,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=79577.33333333333, ans=0.125 +2024-07-27 23:43:48,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=79604.0, ans=0.0 +2024-07-27 23:43:51,127 INFO [train.py:1114] (3/4) Epoch 6, batch 8600, loss[loss=0.2525, simple_loss=0.33, pruned_loss=0.08753, over 4805.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3092, pruned_loss=0.075, over 938888.99 frames. ], batch size: 15, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:44:09,903 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.199e+01 6.001e+01 6.460e+01 7.651e+01 1.281e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-27 23:44:17,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=79670.66666666667, ans=0.125 +2024-07-27 23:44:24,677 INFO [train.py:1114] (3/4) Epoch 6, batch 8650, loss[loss=0.2635, simple_loss=0.3386, pruned_loss=0.0942, over 4896.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3102, pruned_loss=0.07525, over 940095.35 frames. ], batch size: 15, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:44:29,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=79684.0, ans=0.1 +2024-07-27 23:44:30,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=79697.33333333333, ans=0.0 +2024-07-27 23:44:45,427 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.48 vs. limit=15.0 +2024-07-27 23:44:48,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=79724.0, ans=0.125 +2024-07-27 23:44:54,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=79737.33333333333, ans=0.125 +2024-07-27 23:44:56,638 INFO [train.py:1114] (3/4) Epoch 6, batch 8700, loss[loss=0.2483, simple_loss=0.3272, pruned_loss=0.08468, over 4759.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.3115, pruned_loss=0.07613, over 937146.55 frames. ], batch size: 13, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:45:04,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=79764.0, ans=0.2 +2024-07-27 23:45:09,891 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:45:14,454 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.012e+01 6.110e+01 6.862e+01 8.564e+01 1.344e+02, threshold=1.372e+02, percent-clipped=1.0 +2024-07-27 23:45:17,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=79790.66666666667, ans=0.07 +2024-07-27 23:45:28,842 INFO [train.py:1114] (3/4) Epoch 6, batch 8750, loss[loss=0.2481, simple_loss=0.3364, pruned_loss=0.07992, over 4684.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3115, pruned_loss=0.07643, over 935827.42 frames. ], batch size: 15, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:45:33,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=79817.33333333333, ans=0.125 +2024-07-27 23:45:42,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=79844.0, ans=0.125 +2024-07-27 23:45:52,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=79857.33333333333, ans=0.125 +2024-07-27 23:45:52,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=79857.33333333333, ans=0.0 +2024-07-27 23:45:56,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=79870.66666666667, ans=0.0 +2024-07-27 23:46:01,303 INFO [train.py:1114] (3/4) Epoch 6, batch 8800, loss[loss=0.1949, simple_loss=0.2869, pruned_loss=0.05147, over 4935.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3124, pruned_loss=0.07658, over 937100.85 frames. ], batch size: 14, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:46:07,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=79897.33333333333, ans=0.1 +2024-07-27 23:46:09,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=79897.33333333333, ans=0.125 +2024-07-27 23:46:14,703 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.51 vs. limit=22.5 +2024-07-27 23:46:19,209 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.942e+01 5.815e+01 6.538e+01 7.322e+01 9.632e+01, threshold=1.308e+02, percent-clipped=0.0 +2024-07-27 23:46:23,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=79924.0, ans=0.1 +2024-07-27 23:46:28,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=79937.33333333333, ans=0.0 +2024-07-27 23:46:30,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=79937.33333333333, ans=0.1 +2024-07-27 23:46:33,567 INFO [train.py:1114] (3/4) Epoch 6, batch 8850, loss[loss=0.2665, simple_loss=0.3255, pruned_loss=0.1037, over 4520.00 frames. ], tot_loss[loss=0.2316, simple_loss=0.3111, pruned_loss=0.07602, over 931908.51 frames. ], batch size: 21, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:46:35,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=79950.66666666667, ans=0.0 +2024-07-27 23:46:39,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=79964.0, ans=0.125 +2024-07-27 23:46:39,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=79964.0, ans=0.0 +2024-07-27 23:46:55,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.45 vs. limit=10.0 +2024-07-27 23:47:04,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=79977.33333333333, ans=0.0 +2024-07-27 23:47:07,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=79977.33333333333, ans=0.125 +2024-07-27 23:47:16,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=79990.66666666667, ans=0.125 +2024-07-27 23:47:35,385 INFO [train.py:1114] (3/4) Epoch 6, batch 8900, loss[loss=0.1882, simple_loss=0.2634, pruned_loss=0.05651, over 4942.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3118, pruned_loss=0.0763, over 930368.62 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:47:42,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=80030.66666666667, ans=15.0 +2024-07-27 23:47:43,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=80030.66666666667, ans=0.09899494936611666 +2024-07-27 23:47:43,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80030.66666666667, ans=0.1 +2024-07-27 23:47:50,628 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.83 vs. limit=15.0 +2024-07-27 23:47:53,363 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.820e+01 6.167e+01 6.816e+01 7.855e+01 1.273e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-27 23:47:58,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=80057.33333333333, ans=0.2 +2024-07-27 23:47:59,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=80057.33333333333, ans=0.1 +2024-07-27 23:48:07,383 INFO [train.py:1114] (3/4) Epoch 6, batch 8950, loss[loss=0.2931, simple_loss=0.3612, pruned_loss=0.1125, over 4502.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3123, pruned_loss=0.07619, over 931140.89 frames. ], batch size: 21, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:48:07,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=80084.0, ans=0.125 +2024-07-27 23:48:17,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=80097.33333333333, ans=10.0 +2024-07-27 23:48:40,047 INFO [train.py:1114] (3/4) Epoch 6, batch 9000, loss[loss=0.2216, simple_loss=0.3107, pruned_loss=0.06622, over 4635.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3104, pruned_loss=0.07522, over 934083.98 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:48:40,047 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-27 23:48:45,518 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.8175, 2.8122, 3.4499, 2.4312], device='cuda:3') +2024-07-27 23:48:50,202 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.5845, 3.7410, 3.9917, 3.7770], device='cuda:3') +2024-07-27 23:48:52,408 INFO [train.py:1146] (3/4) Epoch 6, validation: loss=0.1898, simple_loss=0.2938, pruned_loss=0.0429, over 944034.00 frames. +2024-07-27 23:48:52,409 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-27 23:49:06,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=80177.33333333333, ans=0.125 +2024-07-27 23:49:06,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=80177.33333333333, ans=0.125 +2024-07-27 23:49:08,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=80177.33333333333, ans=0.125 +2024-07-27 23:49:10,600 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.150e+01 6.230e+01 7.342e+01 8.976e+01 1.203e+02, threshold=1.468e+02, percent-clipped=0.0 +2024-07-27 23:49:11,580 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-27 23:49:11,743 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.73 vs. limit=15.0 +2024-07-27 23:49:13,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80190.66666666667, ans=0.1 +2024-07-27 23:49:17,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=80204.0, ans=0.2 +2024-07-27 23:49:21,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=80204.0, ans=0.125 +2024-07-27 23:49:25,569 INFO [train.py:1114] (3/4) Epoch 6, batch 9050, loss[loss=0.1693, simple_loss=0.2515, pruned_loss=0.04353, over 4514.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3108, pruned_loss=0.07535, over 934511.04 frames. ], batch size: 10, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:49:29,193 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.57 vs. limit=10.0 +2024-07-27 23:49:29,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=80217.33333333333, ans=0.0 +2024-07-27 23:49:33,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=80230.66666666667, ans=0.05 +2024-07-27 23:49:36,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80230.66666666667, ans=0.1 +2024-07-27 23:49:44,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=80244.0, ans=0.125 +2024-07-27 23:49:52,954 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=15.33 vs. limit=15.0 +2024-07-27 23:50:03,561 INFO [train.py:1114] (3/4) Epoch 6, batch 9100, loss[loss=0.2501, simple_loss=0.34, pruned_loss=0.08004, over 4921.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.3097, pruned_loss=0.07497, over 937652.70 frames. ], batch size: 14, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:50:05,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=80284.0, ans=0.125 +2024-07-27 23:50:13,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=80297.33333333333, ans=0.1 +2024-07-27 23:50:20,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=80310.66666666667, ans=0.0 +2024-07-27 23:50:21,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=80310.66666666667, ans=15.0 +2024-07-27 23:50:21,447 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 6.154e+01 7.130e+01 8.632e+01 1.081e+02, threshold=1.426e+02, percent-clipped=0.0 +2024-07-27 23:50:21,775 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.41 vs. limit=10.0 +2024-07-27 23:50:32,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=80337.33333333333, ans=0.0 +2024-07-27 23:50:35,539 INFO [train.py:1114] (3/4) Epoch 6, batch 9150, loss[loss=0.206, simple_loss=0.2965, pruned_loss=0.05774, over 4809.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3106, pruned_loss=0.07523, over 936401.18 frames. ], batch size: 14, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:50:37,852 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.94 vs. limit=15.0 +2024-07-27 23:50:44,224 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.06 vs. limit=15.0 +2024-07-27 23:50:55,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=80390.66666666667, ans=0.05 +2024-07-27 23:51:10,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=80390.66666666667, ans=0.125 +2024-07-27 23:51:14,280 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.45 vs. limit=15.0 +2024-07-27 23:51:18,718 INFO [train.py:1114] (3/4) Epoch 6, batch 9200, loss[loss=0.1869, simple_loss=0.2699, pruned_loss=0.05199, over 4837.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.3096, pruned_loss=0.07467, over 938095.81 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:51:27,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=80430.66666666667, ans=0.035 +2024-07-27 23:51:28,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=80430.66666666667, ans=0.125 +2024-07-27 23:51:38,811 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.82 vs. limit=15.0 +2024-07-27 23:51:40,367 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 6.086e+01 6.690e+01 8.259e+01 1.289e+02, threshold=1.338e+02, percent-clipped=0.0 +2024-07-27 23:51:43,285 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.70 vs. limit=15.0 +2024-07-27 23:51:49,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=80470.66666666667, ans=0.125 +2024-07-27 23:51:52,654 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.33 vs. limit=15.0 +2024-07-27 23:51:54,185 INFO [train.py:1114] (3/4) Epoch 6, batch 9250, loss[loss=0.2324, simple_loss=0.324, pruned_loss=0.07036, over 4643.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3099, pruned_loss=0.07476, over 938899.47 frames. ], batch size: 13, lr: 1.20e-02, grad_scale: 64.0 +2024-07-27 23:52:00,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=80497.33333333333, ans=0.2 +2024-07-27 23:52:07,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=80510.66666666667, ans=0.0 +2024-07-27 23:52:10,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=80510.66666666667, ans=0.125 +2024-07-27 23:52:25,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=80550.66666666667, ans=0.0 +2024-07-27 23:52:26,149 INFO [train.py:1114] (3/4) Epoch 6, batch 9300, loss[loss=0.2283, simple_loss=0.291, pruned_loss=0.08273, over 4778.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.3096, pruned_loss=0.07483, over 937917.98 frames. ], batch size: 12, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:52:31,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=80564.0, ans=0.0 +2024-07-27 23:52:37,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=80564.0, ans=0.05 +2024-07-27 23:52:40,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=80577.33333333333, ans=0.125 +2024-07-27 23:52:42,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=80577.33333333333, ans=0.125 +2024-07-27 23:52:43,892 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.063e+01 5.901e+01 6.419e+01 7.368e+01 1.271e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-27 23:52:45,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=80590.66666666667, ans=0.125 +2024-07-27 23:52:57,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=80617.33333333333, ans=0.125 +2024-07-27 23:52:58,703 INFO [train.py:1114] (3/4) Epoch 6, batch 9350, loss[loss=0.1752, simple_loss=0.2563, pruned_loss=0.047, over 4818.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3115, pruned_loss=0.07576, over 934347.50 frames. ], batch size: 11, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:53:00,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80617.33333333333, ans=0.1 +2024-07-27 23:53:07,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=80630.66666666667, ans=0.125 +2024-07-27 23:53:07,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=80630.66666666667, ans=0.125 +2024-07-27 23:53:21,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=80657.33333333333, ans=0.125 +2024-07-27 23:53:25,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=80670.66666666667, ans=0.125 +2024-07-27 23:53:30,415 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.59 vs. limit=22.5 +2024-07-27 23:53:31,203 INFO [train.py:1114] (3/4) Epoch 6, batch 9400, loss[loss=0.221, simple_loss=0.303, pruned_loss=0.06949, over 4693.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3115, pruned_loss=0.07574, over 932519.17 frames. ], batch size: 13, lr: 1.20e-02, grad_scale: 32.0 +2024-07-27 23:53:31,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=80684.0, ans=0.09899494936611666 +2024-07-27 23:53:32,139 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.85 vs. limit=15.0 +2024-07-27 23:53:32,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=80684.0, ans=0.125 +2024-07-27 23:53:36,001 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.14 vs. limit=6.0 +2024-07-27 23:53:41,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=80697.33333333333, ans=0.125 +2024-07-27 23:53:48,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=80710.66666666667, ans=0.5 +2024-07-27 23:53:49,494 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.580e+01 6.055e+01 7.065e+01 8.211e+01 1.397e+02, threshold=1.413e+02, percent-clipped=1.0 +2024-07-27 23:53:53,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80724.0, ans=0.1 +2024-07-27 23:53:58,719 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.83 vs. limit=15.0 +2024-07-27 23:53:59,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=80737.33333333333, ans=0.125 +2024-07-27 23:54:01,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=80737.33333333333, ans=0.1 +2024-07-27 23:54:02,797 INFO [train.py:1114] (3/4) Epoch 6, batch 9450, loss[loss=0.2177, simple_loss=0.2899, pruned_loss=0.07276, over 4801.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3117, pruned_loss=0.07542, over 932243.36 frames. ], batch size: 11, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:54:04,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=80750.66666666667, ans=0.2 +2024-07-27 23:54:11,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=80764.0, ans=0.125 +2024-07-27 23:54:13,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=80764.0, ans=0.0 +2024-07-27 23:54:20,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=80777.33333333333, ans=0.125 +2024-07-27 23:54:23,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=80790.66666666667, ans=0.2 +2024-07-27 23:54:25,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=80790.66666666667, ans=0.0 +2024-07-27 23:54:30,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=80804.0, ans=0.125 +2024-07-27 23:54:34,484 INFO [train.py:1114] (3/4) Epoch 6, batch 9500, loss[loss=0.2153, simple_loss=0.2948, pruned_loss=0.06794, over 4715.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.3111, pruned_loss=0.07515, over 934440.83 frames. ], batch size: 12, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:54:39,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80830.66666666667, ans=0.1 +2024-07-27 23:54:49,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=80844.0, ans=0.2 +2024-07-27 23:54:52,209 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.921e+01 6.030e+01 6.974e+01 8.015e+01 1.181e+02, threshold=1.395e+02, percent-clipped=0.0 +2024-07-27 23:54:57,524 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.05 vs. limit=15.0 +2024-07-27 23:54:59,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=80870.66666666667, ans=0.0 +2024-07-27 23:55:04,938 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.92 vs. limit=22.5 +2024-07-27 23:55:05,207 INFO [train.py:1114] (3/4) Epoch 6, batch 9550, loss[loss=0.2014, simple_loss=0.272, pruned_loss=0.06536, over 4776.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.3102, pruned_loss=0.07463, over 931816.94 frames. ], batch size: 12, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:55:09,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=80884.0, ans=0.2 +2024-07-27 23:55:17,175 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.79 vs. limit=15.0 +2024-07-27 23:55:17,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=80910.66666666667, ans=0.125 +2024-07-27 23:55:32,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80937.33333333333, ans=0.1 +2024-07-27 23:55:38,081 INFO [train.py:1114] (3/4) Epoch 6, batch 9600, loss[loss=0.3198, simple_loss=0.3623, pruned_loss=0.1386, over 3398.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3108, pruned_loss=0.07504, over 930976.84 frames. ], batch size: 35, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:55:42,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=80950.66666666667, ans=0.05 +2024-07-27 23:55:45,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=80964.0, ans=0.1 +2024-07-27 23:55:56,585 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.004e+01 6.228e+01 7.001e+01 7.870e+01 1.117e+02, threshold=1.400e+02, percent-clipped=0.0 +2024-07-27 23:56:01,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=80990.66666666667, ans=0.1 +2024-07-27 23:58:43,521 INFO [train.py:1114] (3/4) Epoch 6, batch 9650, loss[loss=0.2538, simple_loss=0.3391, pruned_loss=0.08425, over 4831.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.3108, pruned_loss=0.07556, over 926750.12 frames. ], batch size: 16, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:58:56,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81044.0, ans=0.1 +2024-07-27 23:58:57,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=81044.0, ans=0.125 +2024-07-27 23:59:05,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=81057.33333333333, ans=0.125 +2024-07-27 23:59:15,244 INFO [train.py:1114] (3/4) Epoch 6, batch 9700, loss[loss=0.3283, simple_loss=0.3867, pruned_loss=0.1349, over 4250.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.3112, pruned_loss=0.07594, over 924548.57 frames. ], batch size: 26, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:59:15,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=81084.0, ans=0.035 +2024-07-27 23:59:22,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=81097.33333333333, ans=0.125 +2024-07-27 23:59:32,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=81110.66666666667, ans=0.2 +2024-07-27 23:59:33,224 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.713e+01 6.355e+01 7.161e+01 8.228e+01 1.300e+02, threshold=1.432e+02, percent-clipped=0.0 +2024-07-27 23:59:38,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81124.0, ans=0.1 +2024-07-27 23:59:46,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=81150.66666666667, ans=0.0 +2024-07-27 23:59:46,569 INFO [train.py:1114] (3/4) Epoch 6, batch 9750, loss[loss=0.2465, simple_loss=0.3327, pruned_loss=0.08013, over 4666.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3099, pruned_loss=0.07525, over 925492.67 frames. ], batch size: 15, lr: 1.19e-02, grad_scale: 32.0 +2024-07-27 23:59:50,879 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-07-28 00:00:13,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=81164.0, ans=0.125 +2024-07-28 00:00:16,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=81177.33333333333, ans=0.5 +2024-07-28 00:00:18,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=81177.33333333333, ans=0.1 +2024-07-28 00:00:22,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=81190.66666666667, ans=0.125 +2024-07-28 00:00:23,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=81190.66666666667, ans=0.125 +2024-07-28 00:00:29,048 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:00:33,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=81204.0, ans=0.125 +2024-07-28 00:00:35,017 INFO [train.py:1114] (3/4) Epoch 6, batch 9800, loss[loss=0.2048, simple_loss=0.2789, pruned_loss=0.06536, over 4706.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3086, pruned_loss=0.07439, over 925506.98 frames. ], batch size: 12, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:00:49,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81244.0, ans=0.1 +2024-07-28 00:00:52,768 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.096e+01 6.416e+01 7.275e+01 8.758e+01 1.346e+02, threshold=1.455e+02, percent-clipped=0.0 +2024-07-28 00:01:05,407 INFO [train.py:1114] (3/4) Epoch 6, batch 9850, loss[loss=0.2749, simple_loss=0.3558, pruned_loss=0.09699, over 4906.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.31, pruned_loss=0.07512, over 928118.04 frames. ], batch size: 15, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:01:05,656 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.14 vs. limit=15.0 +2024-07-28 00:01:06,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=81284.0, ans=0.125 +2024-07-28 00:01:12,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=81297.33333333333, ans=0.125 +2024-07-28 00:01:14,145 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.28 vs. limit=22.5 +2024-07-28 00:01:18,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=81310.66666666667, ans=0.125 +2024-07-28 00:01:30,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.whiten.whitening_limit, batch_count=81324.0, ans=12.0 +2024-07-28 00:01:36,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=81350.66666666667, ans=0.125 +2024-07-28 00:01:37,307 INFO [train.py:1114] (3/4) Epoch 6, batch 9900, loss[loss=0.3101, simple_loss=0.3686, pruned_loss=0.1258, over 4852.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3117, pruned_loss=0.07663, over 926939.70 frames. ], batch size: 16, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:01:38,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=81350.66666666667, ans=0.025 +2024-07-28 00:01:43,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=81364.0, ans=0.0 +2024-07-28 00:01:46,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=81364.0, ans=0.0 +2024-07-28 00:01:50,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=81377.33333333333, ans=0.0 +2024-07-28 00:01:54,973 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.847e+01 6.249e+01 6.784e+01 7.688e+01 1.136e+02, threshold=1.357e+02, percent-clipped=0.0 +2024-07-28 00:01:55,888 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.71 vs. limit=15.0 +2024-07-28 00:02:02,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=81404.0, ans=0.125 +2024-07-28 00:02:05,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=81404.0, ans=0.125 +2024-07-28 00:02:05,189 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.37 vs. limit=15.0 +2024-07-28 00:02:07,896 INFO [train.py:1114] (3/4) Epoch 6, batch 9950, loss[loss=0.231, simple_loss=0.2904, pruned_loss=0.08585, over 4478.00 frames. ], tot_loss[loss=0.2321, simple_loss=0.311, pruned_loss=0.07659, over 929250.45 frames. ], batch size: 10, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:02:09,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=81417.33333333333, ans=0.1 +2024-07-28 00:02:13,045 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.72 vs. limit=15.0 +2024-07-28 00:02:14,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=81430.66666666667, ans=0.025 +2024-07-28 00:02:26,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=81457.33333333333, ans=10.0 +2024-07-28 00:02:28,149 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.36 vs. limit=8.0 +2024-07-28 00:02:29,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=81457.33333333333, ans=0.125 +2024-07-28 00:02:30,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=81457.33333333333, ans=0.125 +2024-07-28 00:02:32,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=81470.66666666667, ans=0.125 +2024-07-28 00:02:35,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=81470.66666666667, ans=0.2 +2024-07-28 00:02:39,509 INFO [train.py:1114] (3/4) Epoch 6, batch 10000, loss[loss=0.2452, simple_loss=0.32, pruned_loss=0.08523, over 4628.00 frames. ], tot_loss[loss=0.2347, simple_loss=0.314, pruned_loss=0.07775, over 926793.23 frames. ], batch size: 16, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:02:42,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=81484.0, ans=0.125 +2024-07-28 00:02:44,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=81484.0, ans=0.125 +2024-07-28 00:02:50,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=81497.33333333333, ans=0.125 +2024-07-28 00:02:57,854 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.089e+01 5.998e+01 6.471e+01 7.600e+01 1.218e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 00:02:59,725 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:03:01,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=81524.0, ans=0.125 +2024-07-28 00:03:01,765 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.16 vs. limit=22.5 +2024-07-28 00:03:07,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=81537.33333333333, ans=0.0 +2024-07-28 00:03:09,042 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.15 vs. limit=22.5 +2024-07-28 00:03:10,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=81550.66666666667, ans=0.1 +2024-07-28 00:03:11,405 INFO [train.py:1114] (3/4) Epoch 6, batch 10050, loss[loss=0.3224, simple_loss=0.3858, pruned_loss=0.1295, over 3138.00 frames. ], tot_loss[loss=0.24, simple_loss=0.3186, pruned_loss=0.08068, over 913321.00 frames. ], batch size: 35, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:03:12,488 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=47.25 vs. limit=15.0 +2024-07-28 00:03:14,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=81550.66666666667, ans=0.125 +2024-07-28 00:03:21,279 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.55 vs. limit=15.0 +2024-07-28 00:03:25,662 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.64 vs. limit=15.0 +2024-07-28 00:03:30,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=81577.33333333333, ans=15.0 +2024-07-28 00:03:35,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=81590.66666666667, ans=0.0 +2024-07-28 00:03:36,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=81590.66666666667, ans=0.2 +2024-07-28 00:03:37,793 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=9.19 vs. limit=12.0 +2024-07-28 00:03:44,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=81604.0, ans=0.125 +2024-07-28 00:03:45,251 INFO [train.py:1114] (3/4) Epoch 6, batch 10100, loss[loss=0.2688, simple_loss=0.3293, pruned_loss=0.1042, over 3311.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3251, pruned_loss=0.08783, over 860975.93 frames. ], batch size: 35, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:03:48,819 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.17 vs. limit=15.0 +2024-07-28 00:03:55,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=81630.66666666667, ans=0.125 +2024-07-28 00:04:04,051 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.601e+01 6.841e+01 7.276e+01 7.854e+01 1.337e+02, threshold=1.455e+02, percent-clipped=1.0 +2024-07-28 00:04:17,439 INFO [train.py:1114] (3/4) Epoch 6, batch 10150, loss[loss=0.2892, simple_loss=0.3451, pruned_loss=0.1166, over 3385.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3298, pruned_loss=0.09267, over 820774.59 frames. ], batch size: 36, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:04:30,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=81710.66666666667, ans=0.125 +2024-07-28 00:04:33,761 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:04:35,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=81724.0, ans=0.0 +2024-07-28 00:04:38,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=81724.0, ans=0.0 +2024-07-28 00:04:39,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=81724.0, ans=0.1 +2024-07-28 00:04:48,174 INFO [train.py:1114] (3/4) Epoch 6, batch 10200, loss[loss=0.2572, simple_loss=0.3332, pruned_loss=0.09058, over 3339.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3326, pruned_loss=0.09651, over 790009.13 frames. ], batch size: 35, lr: 1.19e-02, grad_scale: 32.0 +2024-07-28 00:04:53,354 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.41 vs. limit=15.0 +2024-07-28 00:04:57,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=81764.0, ans=0.09899494936611666 +2024-07-28 00:04:59,865 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:05:45,915 INFO [train.py:1114] (3/4) Epoch 7, batch 0, loss[loss=0.1992, simple_loss=0.2861, pruned_loss=0.05615, over 4848.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2861, pruned_loss=0.05615, over 4848.00 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:05:45,916 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 00:05:57,486 INFO [train.py:1146] (3/4) Epoch 7, validation: loss=0.1928, simple_loss=0.2981, pruned_loss=0.04372, over 944034.00 frames. +2024-07-28 00:05:57,486 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 00:05:59,878 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.05 vs. limit=10.0 +2024-07-28 00:06:04,566 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.706e+01 6.568e+01 7.074e+01 7.483e+01 1.038e+02, threshold=1.415e+02, percent-clipped=0.0 +2024-07-28 00:06:08,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=81793.33333333333, ans=0.2 +2024-07-28 00:06:21,655 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.89 vs. limit=15.0 +2024-07-28 00:06:22,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=81820.0, ans=0.125 +2024-07-28 00:06:28,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.84 vs. limit=15.0 +2024-07-28 00:06:33,936 INFO [train.py:1114] (3/4) Epoch 7, batch 50, loss[loss=0.1792, simple_loss=0.2532, pruned_loss=0.05257, over 4601.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3161, pruned_loss=0.0751, over 206659.16 frames. ], batch size: 11, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:06:54,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=81886.66666666667, ans=0.125 +2024-07-28 00:06:56,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=81886.66666666667, ans=0.1 +2024-07-28 00:07:07,527 INFO [train.py:1114] (3/4) Epoch 7, batch 100, loss[loss=0.2533, simple_loss=0.3146, pruned_loss=0.096, over 4638.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.3139, pruned_loss=0.07383, over 365093.98 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:07:12,079 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.841e+01 5.914e+01 6.777e+01 7.920e+01 1.192e+02, threshold=1.355e+02, percent-clipped=0.0 +2024-07-28 00:07:24,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=81940.0, ans=0.0 +2024-07-28 00:07:28,325 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.24 vs. limit=15.0 +2024-07-28 00:07:35,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=81966.66666666667, ans=0.1 +2024-07-28 00:07:36,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=81966.66666666667, ans=0.125 +2024-07-28 00:07:40,137 INFO [train.py:1114] (3/4) Epoch 7, batch 150, loss[loss=0.2115, simple_loss=0.2952, pruned_loss=0.06391, over 4609.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.3123, pruned_loss=0.07423, over 493640.06 frames. ], batch size: 11, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:07:41,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=81980.0, ans=0.125 +2024-07-28 00:07:58,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=82006.66666666667, ans=0.0 +2024-07-28 00:08:01,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=82020.0, ans=0.0 +2024-07-28 00:08:11,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=82033.33333333333, ans=0.025 +2024-07-28 00:08:12,770 INFO [train.py:1114] (3/4) Epoch 7, batch 200, loss[loss=0.299, simple_loss=0.3658, pruned_loss=0.1161, over 4582.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3108, pruned_loss=0.0741, over 593434.48 frames. ], batch size: 21, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:08:13,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=82046.66666666667, ans=0.125 +2024-07-28 00:08:13,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=82046.66666666667, ans=0.125 +2024-07-28 00:08:17,412 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.841e+01 5.956e+01 6.544e+01 7.409e+01 1.468e+02, threshold=1.309e+02, percent-clipped=1.0 +2024-07-28 00:08:18,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=82046.66666666667, ans=0.125 +2024-07-28 00:08:25,144 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.88 vs. limit=15.0 +2024-07-28 00:08:29,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=82073.33333333333, ans=0.2 +2024-07-28 00:08:35,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=82086.66666666667, ans=0.95 +2024-07-28 00:08:37,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=82086.66666666667, ans=0.2 +2024-07-28 00:08:46,289 INFO [train.py:1114] (3/4) Epoch 7, batch 250, loss[loss=0.2423, simple_loss=0.3311, pruned_loss=0.07678, over 4625.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.3111, pruned_loss=0.07442, over 670137.41 frames. ], batch size: 16, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:08:49,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=82113.33333333333, ans=0.125 +2024-07-28 00:09:06,153 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.07 vs. limit=6.0 +2024-07-28 00:09:09,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=82153.33333333333, ans=0.1 +2024-07-28 00:09:15,731 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:09:15,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=82166.66666666667, ans=0.125 +2024-07-28 00:09:19,499 INFO [train.py:1114] (3/4) Epoch 7, batch 300, loss[loss=0.1984, simple_loss=0.2964, pruned_loss=0.05017, over 4808.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.3099, pruned_loss=0.07375, over 729953.44 frames. ], batch size: 15, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:09:21,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=82180.0, ans=0.125 +2024-07-28 00:09:22,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=82180.0, ans=0.0 +2024-07-28 00:09:24,032 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.788e+01 5.988e+01 6.705e+01 7.891e+01 1.591e+02, threshold=1.341e+02, percent-clipped=1.0 +2024-07-28 00:09:30,018 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.49 vs. limit=6.0 +2024-07-28 00:09:30,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=82193.33333333333, ans=0.125 +2024-07-28 00:09:45,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=82220.0, ans=0.125 +2024-07-28 00:09:47,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=82233.33333333333, ans=0.1 +2024-07-28 00:09:53,921 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.49 vs. limit=15.0 +2024-07-28 00:09:54,202 INFO [train.py:1114] (3/4) Epoch 7, batch 350, loss[loss=0.2171, simple_loss=0.2841, pruned_loss=0.07502, over 4940.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.3088, pruned_loss=0.07331, over 775774.61 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:09:57,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=82246.66666666667, ans=0.125 +2024-07-28 00:10:26,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=82300.0, ans=0.125 +2024-07-28 00:10:27,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=82300.0, ans=0.125 +2024-07-28 00:10:29,116 INFO [train.py:1114] (3/4) Epoch 7, batch 400, loss[loss=0.2341, simple_loss=0.3179, pruned_loss=0.07519, over 4692.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3076, pruned_loss=0.07274, over 813208.89 frames. ], batch size: 13, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:10:31,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=82313.33333333333, ans=0.125 +2024-07-28 00:10:33,743 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.839e+01 6.182e+01 6.903e+01 9.738e+01, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 00:10:33,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=82313.33333333333, ans=0.0 +2024-07-28 00:10:36,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=82326.66666666667, ans=0.125 +2024-07-28 00:10:39,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=82326.66666666667, ans=0.125 +2024-07-28 00:10:43,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=82340.0, ans=0.035 +2024-07-28 00:10:43,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=82340.0, ans=0.125 +2024-07-28 00:10:45,412 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.76 vs. limit=15.0 +2024-07-28 00:11:00,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=82366.66666666667, ans=6.0 +2024-07-28 00:11:03,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=82366.66666666667, ans=0.0 +2024-07-28 00:11:04,410 INFO [train.py:1114] (3/4) Epoch 7, batch 450, loss[loss=0.2679, simple_loss=0.358, pruned_loss=0.08889, over 4635.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.3082, pruned_loss=0.07347, over 838676.23 frames. ], batch size: 13, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:11:08,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=82380.0, ans=0.125 +2024-07-28 00:11:11,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=82393.33333333333, ans=0.125 +2024-07-28 00:11:12,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=82393.33333333333, ans=0.025 +2024-07-28 00:11:13,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=82393.33333333333, ans=0.125 +2024-07-28 00:11:13,984 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.94 vs. limit=22.5 +2024-07-28 00:11:16,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=82393.33333333333, ans=0.07 +2024-07-28 00:11:17,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=82406.66666666667, ans=0.0 +2024-07-28 00:11:27,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82420.0, ans=0.1 +2024-07-28 00:11:33,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=82433.33333333333, ans=0.2 +2024-07-28 00:11:38,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=82433.33333333333, ans=0.1 +2024-07-28 00:11:39,246 INFO [train.py:1114] (3/4) Epoch 7, batch 500, loss[loss=0.2833, simple_loss=0.365, pruned_loss=0.1008, over 4685.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.3067, pruned_loss=0.07194, over 861167.64 frames. ], batch size: 15, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:11:44,367 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.615e+01 5.805e+01 6.520e+01 7.491e+01 1.046e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 00:11:44,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82446.66666666667, ans=0.1 +2024-07-28 00:11:46,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=82460.0, ans=0.0 +2024-07-28 00:11:50,285 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.68 vs. limit=10.0 +2024-07-28 00:12:02,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=82486.66666666667, ans=0.0 +2024-07-28 00:12:06,568 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:12:09,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=82500.0, ans=0.025 +2024-07-28 00:12:10,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=82500.0, ans=0.0 +2024-07-28 00:12:12,269 INFO [train.py:1114] (3/4) Epoch 7, batch 550, loss[loss=0.2854, simple_loss=0.3717, pruned_loss=0.09951, over 4621.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3064, pruned_loss=0.0715, over 877149.61 frames. ], batch size: 17, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:12:14,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=82513.33333333333, ans=0.1 +2024-07-28 00:12:19,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=82526.66666666667, ans=0.125 +2024-07-28 00:12:29,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82540.0, ans=0.1 +2024-07-28 00:12:40,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=82553.33333333333, ans=0.025 +2024-07-28 00:12:44,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=82566.66666666667, ans=0.95 +2024-07-28 00:12:45,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=82566.66666666667, ans=0.0 +2024-07-28 00:12:47,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=82566.66666666667, ans=0.0 +2024-07-28 00:12:52,464 INFO [train.py:1114] (3/4) Epoch 7, batch 600, loss[loss=0.2396, simple_loss=0.3374, pruned_loss=0.07093, over 4622.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.3063, pruned_loss=0.07103, over 891752.15 frames. ], batch size: 16, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:13:01,194 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.565e+01 5.825e+01 6.471e+01 7.822e+01 1.372e+02, threshold=1.294e+02, percent-clipped=1.0 +2024-07-28 00:13:14,874 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.62 vs. limit=6.0 +2024-07-28 00:13:20,074 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.99 vs. limit=10.0 +2024-07-28 00:13:28,958 INFO [train.py:1114] (3/4) Epoch 7, batch 650, loss[loss=0.2152, simple_loss=0.3051, pruned_loss=0.06269, over 4760.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3063, pruned_loss=0.07167, over 903331.51 frames. ], batch size: 13, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:13:29,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=82646.66666666667, ans=0.125 +2024-07-28 00:13:31,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=82646.66666666667, ans=0.125 +2024-07-28 00:14:02,614 INFO [train.py:1114] (3/4) Epoch 7, batch 700, loss[loss=0.2058, simple_loss=0.2772, pruned_loss=0.06716, over 4647.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3071, pruned_loss=0.0718, over 911729.76 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:14:04,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=82713.33333333333, ans=0.125 +2024-07-28 00:14:07,881 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.492e+01 5.955e+01 6.627e+01 7.908e+01 1.237e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-28 00:14:10,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=82726.66666666667, ans=0.125 +2024-07-28 00:14:21,138 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.98 vs. limit=15.0 +2024-07-28 00:14:24,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=82753.33333333333, ans=0.0 +2024-07-28 00:14:27,185 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.45 vs. limit=6.0 +2024-07-28 00:14:35,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=82766.66666666667, ans=0.125 +2024-07-28 00:14:36,835 INFO [train.py:1114] (3/4) Epoch 7, batch 750, loss[loss=0.2103, simple_loss=0.2956, pruned_loss=0.06244, over 4690.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3064, pruned_loss=0.07134, over 918039.87 frames. ], batch size: 13, lr: 1.11e-02, grad_scale: 16.0 +2024-07-28 00:14:38,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=82780.0, ans=0.025 +2024-07-28 00:14:39,587 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:14:55,095 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:14:56,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=82820.0, ans=0.0 +2024-07-28 00:14:56,763 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-28 00:14:59,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=82820.0, ans=0.0 +2024-07-28 00:15:05,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=82833.33333333333, ans=0.1 +2024-07-28 00:15:10,150 INFO [train.py:1114] (3/4) Epoch 7, batch 800, loss[loss=0.2353, simple_loss=0.2909, pruned_loss=0.08988, over 4860.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3062, pruned_loss=0.0714, over 923361.57 frames. ], batch size: 12, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:15:17,228 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.015e+01 5.902e+01 6.465e+01 7.413e+01 1.020e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-28 00:15:19,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=82860.0, ans=0.125 +2024-07-28 00:15:34,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=82886.66666666667, ans=0.125 +2024-07-28 00:15:34,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=82886.66666666667, ans=0.125 +2024-07-28 00:15:39,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=82900.0, ans=0.125 +2024-07-28 00:15:46,839 INFO [train.py:1114] (3/4) Epoch 7, batch 850, loss[loss=0.2326, simple_loss=0.3228, pruned_loss=0.07119, over 4663.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3061, pruned_loss=0.07137, over 927613.46 frames. ], batch size: 14, lr: 1.11e-02, grad_scale: 32.0 +2024-07-28 00:15:52,315 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.77 vs. limit=15.0 +2024-07-28 00:15:56,125 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:15:57,960 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.75 vs. limit=15.0 +2024-07-28 00:15:58,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=82926.66666666667, ans=0.125 +2024-07-28 00:16:01,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=82940.0, ans=0.125 +2024-07-28 00:16:03,964 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.49 vs. limit=15.0 +2024-07-28 00:16:06,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=82953.33333333333, ans=0.0 +2024-07-28 00:16:08,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff3.min_abs, batch_count=82953.33333333333, ans=0.2 +2024-07-28 00:16:14,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=82966.66666666667, ans=0.125 +2024-07-28 00:16:22,226 INFO [train.py:1114] (3/4) Epoch 7, batch 900, loss[loss=0.1886, simple_loss=0.2708, pruned_loss=0.05318, over 4852.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3072, pruned_loss=0.07201, over 928361.79 frames. ], batch size: 12, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:16:24,039 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.99 vs. limit=15.0 +2024-07-28 00:16:26,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=82980.0, ans=0.125 +2024-07-28 00:16:27,459 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.904e+01 6.297e+01 6.765e+01 1.145e+02, threshold=1.259e+02, percent-clipped=0.0 +2024-07-28 00:16:30,471 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.66 vs. limit=10.0 +2024-07-28 00:16:40,085 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.27 vs. limit=22.5 +2024-07-28 00:16:46,182 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=114.37 vs. limit=15.0 +2024-07-28 00:16:48,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=83033.33333333333, ans=0.0 +2024-07-28 00:16:50,726 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:16:56,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=83033.33333333333, ans=0.125 +2024-07-28 00:16:57,782 INFO [train.py:1114] (3/4) Epoch 7, batch 950, loss[loss=0.1883, simple_loss=0.2686, pruned_loss=0.05399, over 4776.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3066, pruned_loss=0.07159, over 930338.64 frames. ], batch size: 12, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:17:03,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=83060.0, ans=0.125 +2024-07-28 00:17:04,831 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.97 vs. limit=22.5 +2024-07-28 00:17:12,998 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.41 vs. limit=12.0 +2024-07-28 00:17:14,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=83073.33333333333, ans=0.0 +2024-07-28 00:17:27,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=83100.0, ans=0.0 +2024-07-28 00:17:31,226 INFO [train.py:1114] (3/4) Epoch 7, batch 1000, loss[loss=0.2273, simple_loss=0.3069, pruned_loss=0.07382, over 4956.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3066, pruned_loss=0.07178, over 929627.18 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:17:34,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=83113.33333333333, ans=0.125 +2024-07-28 00:17:36,723 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.037e+01 6.185e+01 7.251e+01 8.642e+01 1.358e+02, threshold=1.450e+02, percent-clipped=3.0 +2024-07-28 00:17:39,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=83126.66666666667, ans=0.0 +2024-07-28 00:17:53,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=83153.33333333333, ans=0.0 +2024-07-28 00:17:57,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=83166.66666666667, ans=0.0 +2024-07-28 00:18:05,098 INFO [train.py:1114] (3/4) Epoch 7, batch 1050, loss[loss=0.2229, simple_loss=0.3081, pruned_loss=0.06883, over 4873.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.306, pruned_loss=0.07138, over 931669.74 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:18:29,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=83193.33333333333, ans=0.2 +2024-07-28 00:18:29,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=83193.33333333333, ans=0.125 +2024-07-28 00:18:44,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=83233.33333333333, ans=0.0 +2024-07-28 00:18:50,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=83246.66666666667, ans=0.125 +2024-07-28 00:18:50,762 INFO [train.py:1114] (3/4) Epoch 7, batch 1100, loss[loss=0.191, simple_loss=0.2842, pruned_loss=0.04887, over 4897.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3062, pruned_loss=0.0716, over 934245.79 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:18:52,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=83246.66666666667, ans=0.125 +2024-07-28 00:18:52,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=83246.66666666667, ans=0.1 +2024-07-28 00:18:52,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=83246.66666666667, ans=0.125 +2024-07-28 00:18:54,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=83246.66666666667, ans=0.0 +2024-07-28 00:18:56,149 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.849e+01 5.958e+01 6.479e+01 7.755e+01 1.091e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 00:18:58,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=83260.0, ans=0.2 +2024-07-28 00:19:10,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=83273.33333333333, ans=0.125 +2024-07-28 00:19:11,296 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:19:28,120 INFO [train.py:1114] (3/4) Epoch 7, batch 1150, loss[loss=0.218, simple_loss=0.3032, pruned_loss=0.06643, over 4894.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3049, pruned_loss=0.07101, over 934308.03 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:19:28,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=83313.33333333333, ans=0.04949747468305833 +2024-07-28 00:19:32,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=83313.33333333333, ans=0.0 +2024-07-28 00:19:34,132 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:19:40,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=83340.0, ans=0.125 +2024-07-28 00:19:46,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=83340.0, ans=0.125 +2024-07-28 00:19:48,200 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=13.80 vs. limit=15.0 +2024-07-28 00:19:48,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=83340.0, ans=0.125 +2024-07-28 00:19:52,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=83353.33333333333, ans=0.125 +2024-07-28 00:19:54,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=83353.33333333333, ans=0.0 +2024-07-28 00:20:02,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=83366.66666666667, ans=0.0 +2024-07-28 00:20:05,240 INFO [train.py:1114] (3/4) Epoch 7, batch 1200, loss[loss=0.2307, simple_loss=0.3183, pruned_loss=0.07158, over 4871.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3047, pruned_loss=0.07103, over 933572.57 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:20:05,677 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.21 vs. limit=15.0 +2024-07-28 00:20:08,954 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.13 vs. limit=22.5 +2024-07-28 00:20:10,449 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.726e+01 5.660e+01 6.364e+01 7.390e+01 1.227e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 00:20:15,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=83393.33333333333, ans=0.125 +2024-07-28 00:20:24,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=83420.0, ans=0.2 +2024-07-28 00:20:25,747 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.00 vs. limit=15.0 +2024-07-28 00:20:37,999 INFO [train.py:1114] (3/4) Epoch 7, batch 1250, loss[loss=0.2709, simple_loss=0.3485, pruned_loss=0.09666, over 4801.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3053, pruned_loss=0.07095, over 937411.07 frames. ], batch size: 15, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:21:00,297 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:21:18,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=83500.0, ans=0.0 +2024-07-28 00:21:21,942 INFO [train.py:1114] (3/4) Epoch 7, batch 1300, loss[loss=0.2611, simple_loss=0.3441, pruned_loss=0.08902, over 4692.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3046, pruned_loss=0.071, over 939190.95 frames. ], batch size: 19, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:21:26,945 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.618e+01 5.788e+01 6.480e+01 7.663e+01 1.256e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 00:21:27,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=83526.66666666667, ans=0.125 +2024-07-28 00:21:33,355 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.68 vs. limit=12.0 +2024-07-28 00:21:56,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=83580.0, ans=0.025 +2024-07-28 00:21:56,800 INFO [train.py:1114] (3/4) Epoch 7, batch 1350, loss[loss=0.2155, simple_loss=0.2987, pruned_loss=0.06615, over 4754.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3041, pruned_loss=0.07047, over 941372.82 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:21:57,247 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.26 vs. limit=15.0 +2024-07-28 00:22:00,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=83580.0, ans=0.0 +2024-07-28 00:22:13,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=83606.66666666667, ans=0.95 +2024-07-28 00:22:14,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=83606.66666666667, ans=0.125 +2024-07-28 00:22:17,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=83606.66666666667, ans=0.1 +2024-07-28 00:22:23,960 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.84 vs. limit=15.0 +2024-07-28 00:22:29,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=83633.33333333333, ans=0.5 +2024-07-28 00:22:31,662 INFO [train.py:1114] (3/4) Epoch 7, batch 1400, loss[loss=0.184, simple_loss=0.2628, pruned_loss=0.05256, over 4700.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.304, pruned_loss=0.07061, over 943058.15 frames. ], batch size: 11, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:22:33,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=83646.66666666667, ans=0.0 +2024-07-28 00:22:36,879 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.900e+01 5.949e+01 6.637e+01 7.853e+01 1.145e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-28 00:22:50,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=83673.33333333333, ans=0.1 +2024-07-28 00:22:59,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=83700.0, ans=0.125 +2024-07-28 00:23:03,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=83700.0, ans=0.025 +2024-07-28 00:23:04,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=83700.0, ans=0.125 +2024-07-28 00:23:06,299 INFO [train.py:1114] (3/4) Epoch 7, batch 1450, loss[loss=0.2339, simple_loss=0.3082, pruned_loss=0.07983, over 4698.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3053, pruned_loss=0.07146, over 942819.39 frames. ], batch size: 15, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:23:08,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=83713.33333333333, ans=0.025 +2024-07-28 00:23:22,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=83740.0, ans=0.0 +2024-07-28 00:23:31,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=83753.33333333333, ans=0.125 +2024-07-28 00:23:33,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=83766.66666666667, ans=0.1 +2024-07-28 00:23:39,634 INFO [train.py:1114] (3/4) Epoch 7, batch 1500, loss[loss=0.2483, simple_loss=0.3332, pruned_loss=0.08165, over 4812.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3055, pruned_loss=0.07155, over 942244.46 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:23:45,190 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 5.882e+01 6.521e+01 7.412e+01 1.092e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 00:23:55,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=83806.66666666667, ans=0.0 +2024-07-28 00:23:57,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=83806.66666666667, ans=0.125 +2024-07-28 00:24:02,320 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.77 vs. limit=6.0 +2024-07-28 00:24:02,757 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:24:05,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=83820.0, ans=0.125 +2024-07-28 00:24:06,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=83833.33333333333, ans=0.2 +2024-07-28 00:24:11,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=83833.33333333333, ans=0.2 +2024-07-28 00:24:13,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=83833.33333333333, ans=0.0 +2024-07-28 00:24:13,631 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.10 vs. limit=12.0 +2024-07-28 00:24:15,243 INFO [train.py:1114] (3/4) Epoch 7, batch 1550, loss[loss=0.2158, simple_loss=0.3113, pruned_loss=0.06016, over 4903.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3051, pruned_loss=0.07074, over 938406.97 frames. ], batch size: 15, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:24:23,241 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:24:31,726 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.02 vs. limit=15.0 +2024-07-28 00:24:52,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=83886.66666666667, ans=0.125 +2024-07-28 00:24:58,673 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.91 vs. limit=15.0 +2024-07-28 00:25:09,013 INFO [train.py:1114] (3/4) Epoch 7, batch 1600, loss[loss=0.2118, simple_loss=0.3117, pruned_loss=0.05591, over 4869.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3054, pruned_loss=0.07124, over 937549.82 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:25:12,499 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:25:14,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=83913.33333333333, ans=0.125 +2024-07-28 00:25:17,126 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=5.119e-03 +2024-07-28 00:25:17,691 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.865e+01 6.513e+01 7.777e+01 1.353e+02, threshold=1.303e+02, percent-clipped=1.0 +2024-07-28 00:25:38,321 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.42 vs. limit=6.0 +2024-07-28 00:25:45,978 INFO [train.py:1114] (3/4) Epoch 7, batch 1650, loss[loss=0.2354, simple_loss=0.316, pruned_loss=0.07743, over 4656.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3061, pruned_loss=0.0718, over 937660.77 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:25:47,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=83980.0, ans=0.07 +2024-07-28 00:26:07,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=84020.0, ans=0.07 +2024-07-28 00:26:11,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=84020.0, ans=0.0 +2024-07-28 00:26:20,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84033.33333333333, ans=0.1 +2024-07-28 00:26:21,407 INFO [train.py:1114] (3/4) Epoch 7, batch 1700, loss[loss=0.1794, simple_loss=0.2519, pruned_loss=0.05347, over 4700.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3054, pruned_loss=0.0713, over 939248.69 frames. ], batch size: 11, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:26:26,712 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.280e+01 6.250e+01 6.932e+01 8.047e+01 1.262e+02, threshold=1.386e+02, percent-clipped=0.0 +2024-07-28 00:26:36,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=84073.33333333333, ans=0.125 +2024-07-28 00:26:39,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=84073.33333333333, ans=0.125 +2024-07-28 00:26:40,491 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.82 vs. limit=22.5 +2024-07-28 00:26:41,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=84086.66666666667, ans=0.125 +2024-07-28 00:26:42,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=84086.66666666667, ans=0.0 +2024-07-28 00:26:44,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=84086.66666666667, ans=0.0 +2024-07-28 00:26:50,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=84100.0, ans=0.2 +2024-07-28 00:26:51,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=84100.0, ans=0.0 +2024-07-28 00:26:54,564 INFO [train.py:1114] (3/4) Epoch 7, batch 1750, loss[loss=0.2019, simple_loss=0.279, pruned_loss=0.06241, over 4809.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3041, pruned_loss=0.07062, over 940377.95 frames. ], batch size: 11, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:27:02,691 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.53 vs. limit=10.0 +2024-07-28 00:27:05,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=84126.66666666667, ans=0.0 +2024-07-28 00:27:28,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=84166.66666666667, ans=0.1 +2024-07-28 00:27:31,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=84166.66666666667, ans=0.5 +2024-07-28 00:27:32,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=84166.66666666667, ans=0.0 +2024-07-28 00:27:33,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=84166.66666666667, ans=0.0 +2024-07-28 00:27:35,784 INFO [train.py:1114] (3/4) Epoch 7, batch 1800, loss[loss=0.2283, simple_loss=0.3151, pruned_loss=0.07079, over 4630.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.3036, pruned_loss=0.07055, over 940872.42 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:27:36,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=84180.0, ans=0.2 +2024-07-28 00:27:41,154 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.062e+01 5.927e+01 6.951e+01 8.175e+01 1.232e+02, threshold=1.390e+02, percent-clipped=0.0 +2024-07-28 00:27:41,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=84193.33333333333, ans=0.125 +2024-07-28 00:27:53,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=84206.66666666667, ans=0.04949747468305833 +2024-07-28 00:28:04,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=84220.0, ans=0.025 +2024-07-28 00:28:05,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=84220.0, ans=0.1 +2024-07-28 00:28:10,847 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.17 vs. limit=15.0 +2024-07-28 00:28:13,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84233.33333333333, ans=0.1 +2024-07-28 00:28:14,940 INFO [train.py:1114] (3/4) Epoch 7, batch 1850, loss[loss=0.1977, simple_loss=0.2931, pruned_loss=0.05114, over 4812.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.3028, pruned_loss=0.07002, over 940919.63 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:28:27,451 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=5.94 vs. limit=15.0 +2024-07-28 00:28:27,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=84273.33333333333, ans=0.0 +2024-07-28 00:28:47,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=84300.0, ans=0.0 +2024-07-28 00:28:50,151 INFO [train.py:1114] (3/4) Epoch 7, batch 1900, loss[loss=0.2141, simple_loss=0.3035, pruned_loss=0.0624, over 4661.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3039, pruned_loss=0.07016, over 941915.68 frames. ], batch size: 14, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:28:55,322 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.933e+01 5.987e+01 6.515e+01 7.725e+01 1.148e+02, threshold=1.303e+02, percent-clipped=0.0 +2024-07-28 00:29:02,964 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.99 vs. limit=15.0 +2024-07-28 00:29:13,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=84353.33333333333, ans=0.125 +2024-07-28 00:29:16,422 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.15 vs. limit=22.5 +2024-07-28 00:29:22,775 INFO [train.py:1114] (3/4) Epoch 7, batch 1950, loss[loss=0.1898, simple_loss=0.2813, pruned_loss=0.04913, over 4907.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.306, pruned_loss=0.07135, over 943978.23 frames. ], batch size: 13, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:29:25,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=84380.0, ans=0.125 +2024-07-28 00:29:26,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=84380.0, ans=0.1 +2024-07-28 00:29:32,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=84393.33333333333, ans=0.0 +2024-07-28 00:29:44,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=84420.0, ans=0.2 +2024-07-28 00:29:46,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=84420.0, ans=0.0 +2024-07-28 00:29:49,916 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.90 vs. limit=15.0 +2024-07-28 00:29:52,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=84433.33333333333, ans=0.025 +2024-07-28 00:29:56,353 INFO [train.py:1114] (3/4) Epoch 7, batch 2000, loss[loss=0.1925, simple_loss=0.2638, pruned_loss=0.06057, over 4788.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3066, pruned_loss=0.07168, over 941144.95 frames. ], batch size: 11, lr: 1.10e-02, grad_scale: 32.0 +2024-07-28 00:30:01,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=84446.66666666667, ans=0.2 +2024-07-28 00:30:01,592 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 6.163e+01 6.683e+01 7.706e+01 1.195e+02, threshold=1.337e+02, percent-clipped=0.0 +2024-07-28 00:30:09,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=84473.33333333333, ans=0.0 +2024-07-28 00:30:10,754 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.63 vs. limit=22.5 +2024-07-28 00:30:12,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=84473.33333333333, ans=0.0 +2024-07-28 00:30:27,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=84486.66666666667, ans=0.125 +2024-07-28 00:30:32,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=84500.0, ans=0.09899494936611666 +2024-07-28 00:30:36,704 INFO [train.py:1114] (3/4) Epoch 7, batch 2050, loss[loss=0.1827, simple_loss=0.2718, pruned_loss=0.0468, over 4604.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3054, pruned_loss=0.07083, over 939747.96 frames. ], batch size: 11, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:30:39,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=84513.33333333333, ans=0.125 +2024-07-28 00:30:44,240 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=84526.66666666667, ans=0.1 +2024-07-28 00:30:49,207 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:30:50,321 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.33 vs. limit=22.5 +2024-07-28 00:31:00,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=84553.33333333333, ans=0.125 +2024-07-28 00:31:01,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=84553.33333333333, ans=0.0 +2024-07-28 00:31:04,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=84566.66666666667, ans=0.025 +2024-07-28 00:31:09,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=84566.66666666667, ans=0.5 +2024-07-28 00:31:10,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=84580.0, ans=0.0 +2024-07-28 00:31:11,091 INFO [train.py:1114] (3/4) Epoch 7, batch 2100, loss[loss=0.2656, simple_loss=0.3455, pruned_loss=0.0928, over 4758.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3056, pruned_loss=0.07106, over 941726.56 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:31:11,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=84580.0, ans=0.125 +2024-07-28 00:31:16,339 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.722e+01 5.891e+01 6.506e+01 7.465e+01 1.283e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 00:31:17,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=84593.33333333333, ans=0.07 +2024-07-28 00:31:17,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=84593.33333333333, ans=0.125 +2024-07-28 00:31:25,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=84606.66666666667, ans=0.2 +2024-07-28 00:31:35,100 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.52 vs. limit=10.0 +2024-07-28 00:31:36,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.16 vs. limit=22.5 +2024-07-28 00:31:39,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=84633.33333333333, ans=0.125 +2024-07-28 00:31:39,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=84633.33333333333, ans=0.0 +2024-07-28 00:31:40,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=84633.33333333333, ans=0.025 +2024-07-28 00:31:41,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=84633.33333333333, ans=0.125 +2024-07-28 00:31:42,555 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.49 vs. limit=15.0 +2024-07-28 00:31:44,327 INFO [train.py:1114] (3/4) Epoch 7, batch 2150, loss[loss=0.2209, simple_loss=0.2885, pruned_loss=0.0766, over 4894.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3045, pruned_loss=0.07066, over 944642.97 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:31:45,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=84646.66666666667, ans=0.0 +2024-07-28 00:31:48,717 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.66 vs. limit=6.0 +2024-07-28 00:32:10,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=84700.0, ans=0.125 +2024-07-28 00:32:17,327 INFO [train.py:1114] (3/4) Epoch 7, batch 2200, loss[loss=0.2249, simple_loss=0.3164, pruned_loss=0.06675, over 4810.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3055, pruned_loss=0.07124, over 944008.50 frames. ], batch size: 14, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:32:22,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=84713.33333333333, ans=0.125 +2024-07-28 00:32:22,238 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.58 vs. limit=6.0 +2024-07-28 00:32:22,573 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.863e+01 5.835e+01 6.281e+01 7.163e+01 1.109e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 00:32:23,464 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:32:34,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=84740.0, ans=0.125 +2024-07-28 00:32:35,849 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:32:43,272 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.60 vs. limit=15.0 +2024-07-28 00:32:48,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=84766.66666666667, ans=0.125 +2024-07-28 00:32:52,121 INFO [train.py:1114] (3/4) Epoch 7, batch 2250, loss[loss=0.2549, simple_loss=0.3298, pruned_loss=0.08997, over 4703.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.306, pruned_loss=0.07177, over 942718.53 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:33:18,121 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:33:23,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=84833.33333333333, ans=0.125 +2024-07-28 00:33:27,433 INFO [train.py:1114] (3/4) Epoch 7, batch 2300, loss[loss=0.2446, simple_loss=0.3223, pruned_loss=0.08346, over 4943.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3045, pruned_loss=0.071, over 939984.64 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:33:27,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=84846.66666666667, ans=0.125 +2024-07-28 00:33:32,859 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.138e+01 5.907e+01 7.082e+01 8.177e+01 1.156e+02, threshold=1.416e+02, percent-clipped=0.0 +2024-07-28 00:33:33,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=84846.66666666667, ans=0.125 +2024-07-28 00:33:37,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=84860.0, ans=0.025 +2024-07-28 00:33:45,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=84873.33333333333, ans=0.0 +2024-07-28 00:33:45,334 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.35 vs. limit=15.0 +2024-07-28 00:33:45,415 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.57 vs. limit=15.0 +2024-07-28 00:33:54,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=84886.66666666667, ans=0.2 +2024-07-28 00:34:02,992 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.51 vs. limit=15.0 +2024-07-28 00:34:03,232 INFO [train.py:1114] (3/4) Epoch 7, batch 2350, loss[loss=0.2039, simple_loss=0.2934, pruned_loss=0.05724, over 4636.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3056, pruned_loss=0.07147, over 941637.98 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:34:03,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=84913.33333333333, ans=0.125 +2024-07-28 00:34:25,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=84953.33333333333, ans=0.125 +2024-07-28 00:34:27,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=84953.33333333333, ans=0.125 +2024-07-28 00:34:38,001 INFO [train.py:1114] (3/4) Epoch 7, batch 2400, loss[loss=0.1985, simple_loss=0.2793, pruned_loss=0.05884, over 4638.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.3052, pruned_loss=0.07122, over 941298.90 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:34:43,148 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 6.102e+01 6.788e+01 7.615e+01 1.111e+02, threshold=1.358e+02, percent-clipped=0.0 +2024-07-28 00:34:47,152 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.73 vs. limit=15.0 +2024-07-28 00:34:48,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=84993.33333333333, ans=0.2 +2024-07-28 00:34:48,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=84993.33333333333, ans=0.125 +2024-07-28 00:34:55,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=85006.66666666667, ans=15.0 +2024-07-28 00:35:03,022 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.35 vs. limit=6.0 +2024-07-28 00:35:11,393 INFO [train.py:1114] (3/4) Epoch 7, batch 2450, loss[loss=0.2338, simple_loss=0.33, pruned_loss=0.06878, over 4691.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3063, pruned_loss=0.07209, over 936576.52 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:35:14,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=85046.66666666667, ans=0.0 +2024-07-28 00:35:15,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=85046.66666666667, ans=0.0 +2024-07-28 00:35:15,861 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.90 vs. limit=15.0 +2024-07-28 00:35:16,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=85046.66666666667, ans=0.1 +2024-07-28 00:35:25,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=85073.33333333333, ans=0.0 +2024-07-28 00:35:44,970 INFO [train.py:1114] (3/4) Epoch 7, batch 2500, loss[loss=0.249, simple_loss=0.3521, pruned_loss=0.07296, over 4817.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.307, pruned_loss=0.07225, over 938531.67 frames. ], batch size: 14, lr: 1.09e-02, grad_scale: 32.0 +2024-07-28 00:35:50,130 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.036e+01 6.265e+01 6.846e+01 8.137e+01 1.168e+02, threshold=1.369e+02, percent-clipped=0.0 +2024-07-28 00:36:05,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=85153.33333333333, ans=0.025 +2024-07-28 00:36:07,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=85153.33333333333, ans=0.125 +2024-07-28 00:36:17,875 INFO [train.py:1114] (3/4) Epoch 7, batch 2550, loss[loss=0.1915, simple_loss=0.2747, pruned_loss=0.05412, over 4806.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3066, pruned_loss=0.07158, over 937971.71 frames. ], batch size: 11, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:36:20,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=85180.0, ans=0.125 +2024-07-28 00:36:28,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=85193.33333333333, ans=0.125 +2024-07-28 00:36:28,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=85193.33333333333, ans=0.125 +2024-07-28 00:36:31,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=85206.66666666667, ans=0.025 +2024-07-28 00:36:32,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=85206.66666666667, ans=0.125 +2024-07-28 00:36:45,482 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.26 vs. limit=22.5 +2024-07-28 00:36:49,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=85233.33333333333, ans=0.0 +2024-07-28 00:36:51,086 INFO [train.py:1114] (3/4) Epoch 7, batch 2600, loss[loss=0.2204, simple_loss=0.3032, pruned_loss=0.06877, over 4898.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3071, pruned_loss=0.07164, over 936799.32 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:36:56,532 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.315e+01 5.684e+01 6.063e+01 6.727e+01 1.050e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-28 00:37:08,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=85273.33333333333, ans=0.125 +2024-07-28 00:37:24,486 INFO [train.py:1114] (3/4) Epoch 7, batch 2650, loss[loss=0.2771, simple_loss=0.3556, pruned_loss=0.09931, over 4637.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3058, pruned_loss=0.07028, over 938868.67 frames. ], batch size: 16, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:37:27,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=85313.33333333333, ans=0.125 +2024-07-28 00:37:31,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=85326.66666666667, ans=0.2 +2024-07-28 00:37:31,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=85326.66666666667, ans=0.125 +2024-07-28 00:37:38,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=85326.66666666667, ans=0.0 +2024-07-28 00:37:51,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=85353.33333333333, ans=0.0 +2024-07-28 00:37:56,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=85366.66666666667, ans=0.1 +2024-07-28 00:37:59,896 INFO [train.py:1114] (3/4) Epoch 7, batch 2700, loss[loss=0.2497, simple_loss=0.339, pruned_loss=0.08026, over 4738.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3064, pruned_loss=0.07087, over 938784.98 frames. ], batch size: 14, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:38:00,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=85380.0, ans=0.2 +2024-07-28 00:38:02,951 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.72 vs. limit=10.0 +2024-07-28 00:38:05,156 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.110e+01 5.828e+01 6.522e+01 7.194e+01 9.710e+01, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 00:38:08,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=85393.33333333333, ans=0.125 +2024-07-28 00:38:12,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=85393.33333333333, ans=0.07 +2024-07-28 00:38:14,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=85393.33333333333, ans=0.125 +2024-07-28 00:38:38,344 INFO [train.py:1114] (3/4) Epoch 7, batch 2750, loss[loss=0.1721, simple_loss=0.252, pruned_loss=0.04616, over 4706.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.3039, pruned_loss=0.07035, over 938922.12 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:38:49,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85460.0, ans=0.1 +2024-07-28 00:38:51,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=85473.33333333333, ans=0.125 +2024-07-28 00:38:51,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=85473.33333333333, ans=0.125 +2024-07-28 00:38:51,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=85473.33333333333, ans=0.125 +2024-07-28 00:39:05,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=85486.66666666667, ans=0.0 +2024-07-28 00:39:07,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=85500.0, ans=0.0 +2024-07-28 00:39:13,498 INFO [train.py:1114] (3/4) Epoch 7, batch 2800, loss[loss=0.2854, simple_loss=0.3348, pruned_loss=0.118, over 3277.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3036, pruned_loss=0.06997, over 936733.29 frames. ], batch size: 35, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:39:13,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=85513.33333333333, ans=0.0 +2024-07-28 00:39:15,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85513.33333333333, ans=0.1 +2024-07-28 00:39:18,793 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.845e+01 5.969e+01 6.581e+01 7.409e+01 1.159e+02, threshold=1.316e+02, percent-clipped=0.0 +2024-07-28 00:39:26,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=85540.0, ans=0.2 +2024-07-28 00:39:37,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=85553.33333333333, ans=0.125 +2024-07-28 00:39:40,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=85553.33333333333, ans=0.2 +2024-07-28 00:39:49,477 INFO [train.py:1114] (3/4) Epoch 7, batch 2850, loss[loss=0.2027, simple_loss=0.2913, pruned_loss=0.05707, over 4961.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3046, pruned_loss=0.07053, over 935152.15 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:39:50,609 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.40 vs. limit=22.5 +2024-07-28 00:40:12,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85620.0, ans=0.1 +2024-07-28 00:40:17,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=85633.33333333333, ans=0.125 +2024-07-28 00:40:23,173 INFO [train.py:1114] (3/4) Epoch 7, batch 2900, loss[loss=0.2174, simple_loss=0.2977, pruned_loss=0.06858, over 4834.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.3047, pruned_loss=0.07027, over 939142.79 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:40:24,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=85646.66666666667, ans=0.125 +2024-07-28 00:40:24,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=85646.66666666667, ans=0.125 +2024-07-28 00:40:28,616 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 6.206e+01 7.013e+01 8.326e+01 1.461e+02, threshold=1.403e+02, percent-clipped=1.0 +2024-07-28 00:40:29,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=85660.0, ans=0.125 +2024-07-28 00:40:41,077 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.93 vs. limit=12.0 +2024-07-28 00:40:42,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=85673.33333333333, ans=0.0 +2024-07-28 00:40:43,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=85686.66666666667, ans=0.125 +2024-07-28 00:40:45,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=85686.66666666667, ans=0.0 +2024-07-28 00:40:45,944 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.12 vs. limit=15.0 +2024-07-28 00:40:56,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=85700.0, ans=0.125 +2024-07-28 00:41:00,424 INFO [train.py:1114] (3/4) Epoch 7, batch 2950, loss[loss=0.2271, simple_loss=0.3088, pruned_loss=0.0727, over 4695.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.3026, pruned_loss=0.06949, over 938572.10 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:41:27,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=85766.66666666667, ans=0.125 +2024-07-28 00:41:29,628 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.43 vs. limit=10.0 +2024-07-28 00:41:33,779 INFO [train.py:1114] (3/4) Epoch 7, batch 3000, loss[loss=0.2221, simple_loss=0.31, pruned_loss=0.06712, over 4760.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.3028, pruned_loss=0.06938, over 938362.37 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:41:33,779 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 00:41:41,512 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.4686, 2.4506, 4.0526, 2.2215], device='cuda:3') +2024-07-28 00:41:46,475 INFO [train.py:1146] (3/4) Epoch 7, validation: loss=0.1857, simple_loss=0.2896, pruned_loss=0.04088, over 944034.00 frames. +2024-07-28 00:41:46,476 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 00:41:51,998 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 6.009e+01 6.936e+01 8.242e+01 1.252e+02, threshold=1.387e+02, percent-clipped=0.0 +2024-07-28 00:41:56,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=85793.33333333333, ans=0.1 +2024-07-28 00:41:59,961 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.71 vs. limit=15.0 +2024-07-28 00:42:00,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=85806.66666666667, ans=0.025 +2024-07-28 00:42:02,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=85806.66666666667, ans=0.07 +2024-07-28 00:42:17,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=85833.33333333333, ans=0.125 +2024-07-28 00:42:20,757 INFO [train.py:1114] (3/4) Epoch 7, batch 3050, loss[loss=0.2201, simple_loss=0.31, pruned_loss=0.06511, over 4646.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3038, pruned_loss=0.06942, over 937169.24 frames. ], batch size: 12, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:42:24,555 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-07-28 00:42:27,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=85860.0, ans=0.1 +2024-07-28 00:42:35,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=85873.33333333333, ans=0.125 +2024-07-28 00:42:47,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=85900.0, ans=0.1 +2024-07-28 00:42:52,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=85900.0, ans=0.0 +2024-07-28 00:42:54,038 INFO [train.py:1114] (3/4) Epoch 7, batch 3100, loss[loss=0.2409, simple_loss=0.3459, pruned_loss=0.06794, over 4570.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.3037, pruned_loss=0.06958, over 937630.16 frames. ], batch size: 16, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:42:55,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=85913.33333333333, ans=0.95 +2024-07-28 00:42:56,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=85913.33333333333, ans=0.1 +2024-07-28 00:42:59,248 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.759e+01 6.343e+01 7.086e+01 1.226e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 00:43:14,863 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.96 vs. limit=15.0 +2024-07-28 00:43:16,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=85953.33333333333, ans=0.95 +2024-07-28 00:43:23,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=85953.33333333333, ans=0.1 +2024-07-28 00:43:31,711 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:43:32,973 INFO [train.py:1114] (3/4) Epoch 7, batch 3150, loss[loss=0.2081, simple_loss=0.3035, pruned_loss=0.05637, over 4618.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3034, pruned_loss=0.06947, over 938161.18 frames. ], batch size: 17, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:43:34,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=85980.0, ans=0.125 +2024-07-28 00:43:34,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=85980.0, ans=0.035 +2024-07-28 00:43:41,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=85993.33333333333, ans=0.0 +2024-07-28 00:43:42,653 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.87 vs. limit=6.0 +2024-07-28 00:43:54,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86020.0, ans=0.1 +2024-07-28 00:44:01,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_na.min_abs, batch_count=86033.33333333333, ans=0.02 +2024-07-28 00:44:08,148 INFO [train.py:1114] (3/4) Epoch 7, batch 3200, loss[loss=0.1672, simple_loss=0.257, pruned_loss=0.03868, over 4822.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.3029, pruned_loss=0.0691, over 939663.12 frames. ], batch size: 13, lr: 1.09e-02, grad_scale: 64.0 +2024-07-28 00:44:08,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=86046.66666666667, ans=0.04949747468305833 +2024-07-28 00:44:13,316 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+01 6.085e+01 7.068e+01 8.225e+01 1.298e+02, threshold=1.414e+02, percent-clipped=1.0 +2024-07-28 00:44:21,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=86060.0, ans=0.09899494936611666 +2024-07-28 00:44:24,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=86073.33333333333, ans=0.2 +2024-07-28 00:44:26,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=86073.33333333333, ans=0.0 +2024-07-28 00:44:27,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=86073.33333333333, ans=0.2 +2024-07-28 00:44:35,336 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:44:42,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.66 vs. limit=15.0 +2024-07-28 00:44:50,711 INFO [train.py:1114] (3/4) Epoch 7, batch 3250, loss[loss=0.2328, simple_loss=0.3145, pruned_loss=0.07557, over 4930.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.304, pruned_loss=0.0697, over 940726.25 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:44:50,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=86113.33333333333, ans=10.0 +2024-07-28 00:44:52,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=86113.33333333333, ans=0.2 +2024-07-28 00:45:13,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=86153.33333333333, ans=0.1 +2024-07-28 00:45:24,553 INFO [train.py:1114] (3/4) Epoch 7, batch 3300, loss[loss=0.2363, simple_loss=0.3129, pruned_loss=0.07979, over 4757.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.302, pruned_loss=0.06863, over 941064.47 frames. ], batch size: 19, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:45:30,687 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 5.736e+01 6.420e+01 6.992e+01 1.033e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 00:45:34,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=86193.33333333333, ans=0.2 +2024-07-28 00:45:40,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=86206.66666666667, ans=0.125 +2024-07-28 00:45:43,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=86206.66666666667, ans=0.0 +2024-07-28 00:45:57,982 INFO [train.py:1114] (3/4) Epoch 7, batch 3350, loss[loss=0.268, simple_loss=0.3553, pruned_loss=0.09035, over 4600.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3039, pruned_loss=0.06978, over 938967.13 frames. ], batch size: 17, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:46:00,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=86246.66666666667, ans=0.125 +2024-07-28 00:46:08,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=86260.0, ans=0.0 +2024-07-28 00:46:15,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=86273.33333333333, ans=0.125 +2024-07-28 00:46:25,435 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=12.86 vs. limit=15.0 +2024-07-28 00:46:31,261 INFO [train.py:1114] (3/4) Epoch 7, batch 3400, loss[loss=0.1852, simple_loss=0.2615, pruned_loss=0.0544, over 4801.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3042, pruned_loss=0.07004, over 937773.66 frames. ], batch size: 11, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:46:37,178 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.995e+01 5.874e+01 6.654e+01 7.588e+01 1.124e+02, threshold=1.331e+02, percent-clipped=0.0 +2024-07-28 00:46:45,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=86340.0, ans=0.0 +2024-07-28 00:46:55,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=86353.33333333333, ans=0.1 +2024-07-28 00:47:04,937 INFO [train.py:1114] (3/4) Epoch 7, batch 3450, loss[loss=0.2224, simple_loss=0.3023, pruned_loss=0.07129, over 4644.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.304, pruned_loss=0.0698, over 938272.97 frames. ], batch size: 19, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:47:09,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=86380.0, ans=0.0 +2024-07-28 00:47:18,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=86406.66666666667, ans=0.0 +2024-07-28 00:47:23,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=86406.66666666667, ans=0.125 +2024-07-28 00:47:23,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=86406.66666666667, ans=0.07 +2024-07-28 00:47:30,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=86420.0, ans=0.0 +2024-07-28 00:47:32,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=86433.33333333333, ans=0.5 +2024-07-28 00:47:33,438 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.27 vs. limit=12.0 +2024-07-28 00:47:33,607 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.36 vs. limit=15.0 +2024-07-28 00:47:38,425 INFO [train.py:1114] (3/4) Epoch 7, batch 3500, loss[loss=0.212, simple_loss=0.2906, pruned_loss=0.06672, over 4952.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3034, pruned_loss=0.06958, over 938482.24 frames. ], batch size: 12, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:47:44,467 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.672e+01 5.841e+01 6.535e+01 7.195e+01 1.031e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 00:47:51,646 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.98 vs. limit=22.5 +2024-07-28 00:47:55,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=86473.33333333333, ans=10.0 +2024-07-28 00:47:57,040 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.43 vs. limit=10.0 +2024-07-28 00:48:05,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=86486.66666666667, ans=0.025 +2024-07-28 00:48:06,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=86486.66666666667, ans=0.125 +2024-07-28 00:48:08,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.43 vs. limit=15.0 +2024-07-28 00:48:10,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=86500.0, ans=0.125 +2024-07-28 00:48:13,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=86500.0, ans=0.125 +2024-07-28 00:48:16,911 INFO [train.py:1114] (3/4) Epoch 7, batch 3550, loss[loss=0.2286, simple_loss=0.3084, pruned_loss=0.07444, over 4662.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.3026, pruned_loss=0.06939, over 938872.19 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:48:19,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=86513.33333333333, ans=0.0 +2024-07-28 00:48:27,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86526.66666666667, ans=0.1 +2024-07-28 00:48:31,042 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.23 vs. limit=10.0 +2024-07-28 00:48:38,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=86553.33333333333, ans=0.025 +2024-07-28 00:48:49,699 INFO [train.py:1114] (3/4) Epoch 7, batch 3600, loss[loss=0.2317, simple_loss=0.3041, pruned_loss=0.07963, over 4964.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.3022, pruned_loss=0.06895, over 940457.85 frames. ], batch size: 13, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:48:55,677 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.014e+01 6.005e+01 6.689e+01 7.700e+01 1.084e+02, threshold=1.338e+02, percent-clipped=0.0 +2024-07-28 00:48:58,984 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.89 vs. limit=10.0 +2024-07-28 00:49:06,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=86606.66666666667, ans=0.2 +2024-07-28 00:49:14,521 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.87 vs. limit=22.5 +2024-07-28 00:49:25,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86633.33333333333, ans=0.1 +2024-07-28 00:49:26,862 INFO [train.py:1114] (3/4) Epoch 7, batch 3650, loss[loss=0.2455, simple_loss=0.3373, pruned_loss=0.07689, over 4903.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3033, pruned_loss=0.0698, over 941202.15 frames. ], batch size: 15, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:49:29,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=86646.66666666667, ans=0.125 +2024-07-28 00:49:30,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=86646.66666666667, ans=0.2 +2024-07-28 00:49:31,181 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.61 vs. limit=15.0 +2024-07-28 00:49:33,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=86660.0, ans=0.09899494936611666 +2024-07-28 00:49:40,080 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.09 vs. limit=22.5 +2024-07-28 00:49:59,244 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.95 vs. limit=10.0 +2024-07-28 00:50:02,203 INFO [train.py:1114] (3/4) Epoch 7, batch 3700, loss[loss=0.2379, simple_loss=0.3169, pruned_loss=0.0795, over 4925.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.3034, pruned_loss=0.06922, over 941879.10 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:50:07,940 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.091e+01 5.984e+01 6.849e+01 8.141e+01 1.285e+02, threshold=1.370e+02, percent-clipped=0.0 +2024-07-28 00:50:31,719 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:50:36,835 INFO [train.py:1114] (3/4) Epoch 7, batch 3750, loss[loss=0.2601, simple_loss=0.3215, pruned_loss=0.09939, over 4799.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.3032, pruned_loss=0.06929, over 943512.31 frames. ], batch size: 11, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:50:56,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86820.0, ans=0.1 +2024-07-28 00:51:05,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=86833.33333333333, ans=0.04949747468305833 +2024-07-28 00:51:10,297 INFO [train.py:1114] (3/4) Epoch 7, batch 3800, loss[loss=0.2244, simple_loss=0.3109, pruned_loss=0.06897, over 4816.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.3006, pruned_loss=0.06797, over 942248.60 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:51:16,204 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.845e+01 5.938e+01 6.490e+01 7.260e+01 1.083e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-28 00:51:16,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=86860.0, ans=0.2 +2024-07-28 00:51:28,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=86873.33333333333, ans=0.0 +2024-07-28 00:51:29,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=86886.66666666667, ans=0.09899494936611666 +2024-07-28 00:51:39,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=86900.0, ans=0.125 +2024-07-28 00:51:39,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=86900.0, ans=0.125 +2024-07-28 00:51:43,434 INFO [train.py:1114] (3/4) Epoch 7, batch 3850, loss[loss=0.202, simple_loss=0.2919, pruned_loss=0.05604, over 4660.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.3009, pruned_loss=0.06772, over 942738.44 frames. ], batch size: 16, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:51:53,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=86926.66666666667, ans=0.1 +2024-07-28 00:51:59,312 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.05 vs. limit=22.5 +2024-07-28 00:52:03,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=86953.33333333333, ans=0.0 +2024-07-28 00:52:04,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=86953.33333333333, ans=0.0 +2024-07-28 00:52:09,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=86966.66666666667, ans=0.1 +2024-07-28 00:52:13,443 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.05 vs. limit=15.0 +2024-07-28 00:52:15,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=86966.66666666667, ans=0.125 +2024-07-28 00:52:17,056 INFO [train.py:1114] (3/4) Epoch 7, batch 3900, loss[loss=0.2027, simple_loss=0.2971, pruned_loss=0.05421, over 4805.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.3026, pruned_loss=0.06856, over 942854.28 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:52:22,767 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.069e+01 5.781e+01 6.376e+01 7.079e+01 1.169e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 00:52:32,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=87006.66666666667, ans=0.5 +2024-07-28 00:52:48,062 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 00:52:49,966 INFO [train.py:1114] (3/4) Epoch 7, batch 3950, loss[loss=0.2337, simple_loss=0.3215, pruned_loss=0.073, over 4846.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.3023, pruned_loss=0.06828, over 944687.15 frames. ], batch size: 16, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:53:13,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=87086.66666666667, ans=0.0 +2024-07-28 00:53:13,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=87086.66666666667, ans=0.0 +2024-07-28 00:53:17,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=87100.0, ans=0.125 +2024-07-28 00:53:21,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=87100.0, ans=0.125 +2024-07-28 00:53:23,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.00 vs. limit=15.0 +2024-07-28 00:53:23,288 INFO [train.py:1114] (3/4) Epoch 7, batch 4000, loss[loss=0.2308, simple_loss=0.312, pruned_loss=0.07483, over 4775.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.303, pruned_loss=0.0693, over 941175.64 frames. ], batch size: 12, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:53:29,164 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.994e+01 5.981e+01 6.594e+01 7.315e+01 1.099e+02, threshold=1.319e+02, percent-clipped=0.0 +2024-07-28 00:53:32,739 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=87126.66666666667, ans=0.1 +2024-07-28 00:53:51,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=87166.66666666667, ans=0.2 +2024-07-28 00:53:54,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=87166.66666666667, ans=0.125 +2024-07-28 00:53:57,315 INFO [train.py:1114] (3/4) Epoch 7, batch 4050, loss[loss=0.2408, simple_loss=0.3338, pruned_loss=0.07391, over 3623.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.3022, pruned_loss=0.0685, over 939897.51 frames. ], batch size: 35, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:53:57,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=87180.0, ans=0.0 +2024-07-28 00:54:05,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=87193.33333333333, ans=0.05 +2024-07-28 00:54:16,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=87220.0, ans=0.1 +2024-07-28 00:54:32,975 INFO [train.py:1114] (3/4) Epoch 7, batch 4100, loss[loss=0.2218, simple_loss=0.3091, pruned_loss=0.06726, over 4903.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.302, pruned_loss=0.06892, over 939350.84 frames. ], batch size: 15, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:54:33,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=87246.66666666667, ans=0.125 +2024-07-28 00:54:39,027 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 5.802e+01 6.541e+01 7.841e+01 1.191e+02, threshold=1.308e+02, percent-clipped=0.0 +2024-07-28 00:54:41,458 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.89 vs. limit=15.0 +2024-07-28 00:54:43,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=87260.0, ans=0.04949747468305833 +2024-07-28 00:54:45,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=87260.0, ans=0.125 +2024-07-28 00:54:56,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=87286.66666666667, ans=0.035 +2024-07-28 00:55:08,134 INFO [train.py:1114] (3/4) Epoch 7, batch 4150, loss[loss=0.218, simple_loss=0.2956, pruned_loss=0.0702, over 4828.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.3017, pruned_loss=0.06898, over 938978.98 frames. ], batch size: 13, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:55:10,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=87313.33333333333, ans=0.0 +2024-07-28 00:55:15,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=87326.66666666667, ans=0.0 +2024-07-28 00:55:26,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=87340.0, ans=0.05 +2024-07-28 00:55:35,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=87353.33333333333, ans=0.0 +2024-07-28 00:55:39,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=87366.66666666667, ans=0.125 +2024-07-28 00:55:40,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=87366.66666666667, ans=0.125 +2024-07-28 00:55:40,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=87366.66666666667, ans=0.05 +2024-07-28 00:55:44,754 INFO [train.py:1114] (3/4) Epoch 7, batch 4200, loss[loss=0.2568, simple_loss=0.3452, pruned_loss=0.08422, over 4901.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3028, pruned_loss=0.06966, over 940463.98 frames. ], batch size: 15, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:55:50,405 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 5.692e+01 6.166e+01 6.641e+01 1.038e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 00:55:52,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=87393.33333333333, ans=0.125 +2024-07-28 00:55:53,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=87393.33333333333, ans=0.1 +2024-07-28 00:55:57,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=87406.66666666667, ans=0.125 +2024-07-28 00:56:01,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=87406.66666666667, ans=0.125 +2024-07-28 00:56:02,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=87406.66666666667, ans=0.125 +2024-07-28 00:56:02,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=87406.66666666667, ans=0.1 +2024-07-28 00:56:03,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=87420.0, ans=0.125 +2024-07-28 00:56:06,186 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.99 vs. limit=15.0 +2024-07-28 00:56:08,076 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.99 vs. limit=15.0 +2024-07-28 00:56:08,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=87420.0, ans=0.0 +2024-07-28 00:56:15,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=87433.33333333333, ans=0.1 +2024-07-28 00:56:17,896 INFO [train.py:1114] (3/4) Epoch 7, batch 4250, loss[loss=0.2432, simple_loss=0.3084, pruned_loss=0.089, over 4644.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.3034, pruned_loss=0.06973, over 941136.71 frames. ], batch size: 12, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:56:19,062 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.22 vs. limit=10.0 +2024-07-28 00:56:24,111 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.15 vs. limit=10.0 +2024-07-28 00:56:36,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=87473.33333333333, ans=0.125 +2024-07-28 00:56:38,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=87486.66666666667, ans=0.035 +2024-07-28 00:56:38,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=87486.66666666667, ans=0.125 +2024-07-28 00:56:51,125 INFO [train.py:1114] (3/4) Epoch 7, batch 4300, loss[loss=0.2212, simple_loss=0.3084, pruned_loss=0.06699, over 4754.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3035, pruned_loss=0.07004, over 940369.57 frames. ], batch size: 13, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:56:57,164 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.961e+01 6.079e+01 6.780e+01 8.042e+01 1.237e+02, threshold=1.356e+02, percent-clipped=1.0 +2024-07-28 00:56:57,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=87526.66666666667, ans=0.1 +2024-07-28 00:57:08,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=87540.0, ans=0.025 +2024-07-28 00:57:15,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=87553.33333333333, ans=0.025 +2024-07-28 00:57:20,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=87566.66666666667, ans=0.125 +2024-07-28 00:57:24,601 INFO [train.py:1114] (3/4) Epoch 7, batch 4350, loss[loss=0.226, simple_loss=0.3218, pruned_loss=0.06511, over 4759.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3033, pruned_loss=0.06931, over 941127.57 frames. ], batch size: 13, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:57:40,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=87606.66666666667, ans=0.1 +2024-07-28 00:57:48,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=87620.0, ans=0.2 +2024-07-28 00:57:53,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=87633.33333333333, ans=0.2 +2024-07-28 00:57:53,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=87633.33333333333, ans=0.125 +2024-07-28 00:57:56,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=87633.33333333333, ans=0.2 +2024-07-28 00:57:58,011 INFO [train.py:1114] (3/4) Epoch 7, batch 4400, loss[loss=0.2354, simple_loss=0.3211, pruned_loss=0.07487, over 4807.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3035, pruned_loss=0.06983, over 940559.79 frames. ], batch size: 14, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:57:59,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=87646.66666666667, ans=0.125 +2024-07-28 00:58:03,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=87646.66666666667, ans=0.0 +2024-07-28 00:58:04,029 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.884e+01 5.991e+01 6.337e+01 7.130e+01 1.070e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 00:58:04,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=87660.0, ans=0.125 +2024-07-28 00:58:15,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.93 vs. limit=10.0 +2024-07-28 00:58:23,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=87686.66666666667, ans=0.125 +2024-07-28 00:58:31,606 INFO [train.py:1114] (3/4) Epoch 7, batch 4450, loss[loss=0.1936, simple_loss=0.2663, pruned_loss=0.06042, over 4930.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.3037, pruned_loss=0.07009, over 938456.88 frames. ], batch size: 12, lr: 1.08e-02, grad_scale: 32.0 +2024-07-28 00:58:33,985 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.44 vs. limit=22.5 +2024-07-28 00:58:39,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=87726.66666666667, ans=0.125 +2024-07-28 00:58:53,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.whiten.whitening_limit, batch_count=87753.33333333333, ans=12.0 +2024-07-28 00:58:57,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=87753.33333333333, ans=0.0 +2024-07-28 00:58:57,567 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.52 vs. limit=10.0 +2024-07-28 00:58:59,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=87766.66666666667, ans=0.0 +2024-07-28 00:59:00,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=87766.66666666667, ans=0.2 +2024-07-28 00:59:05,053 INFO [train.py:1114] (3/4) Epoch 7, batch 4500, loss[loss=0.214, simple_loss=0.2967, pruned_loss=0.06562, over 4735.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3041, pruned_loss=0.06971, over 938235.77 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 00:59:10,924 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.444e+01 5.694e+01 6.393e+01 7.700e+01 1.282e+02, threshold=1.279e+02, percent-clipped=1.0 +2024-07-28 00:59:11,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=87793.33333333333, ans=0.025 +2024-07-28 00:59:33,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=87833.33333333333, ans=0.125 +2024-07-28 00:59:37,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=87833.33333333333, ans=0.125 +2024-07-28 00:59:38,264 INFO [train.py:1114] (3/4) Epoch 7, batch 4550, loss[loss=0.2246, simple_loss=0.3051, pruned_loss=0.072, over 4904.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3041, pruned_loss=0.06963, over 940347.23 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:00:06,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=87900.0, ans=0.125 +2024-07-28 01:00:07,717 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.32 vs. limit=10.0 +2024-07-28 01:00:13,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=87900.0, ans=0.0 +2024-07-28 01:00:14,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=87900.0, ans=0.0 +2024-07-28 01:00:15,713 INFO [train.py:1114] (3/4) Epoch 7, batch 4600, loss[loss=0.2103, simple_loss=0.2898, pruned_loss=0.06543, over 4559.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3039, pruned_loss=0.06968, over 938398.08 frames. ], batch size: 21, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:00:21,776 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.972e+01 5.990e+01 6.916e+01 8.662e+01 1.306e+02, threshold=1.383e+02, percent-clipped=1.0 +2024-07-28 01:00:22,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=87926.66666666667, ans=0.125 +2024-07-28 01:00:30,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=87940.0, ans=0.0 +2024-07-28 01:00:38,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=87953.33333333333, ans=0.0 +2024-07-28 01:00:39,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=87953.33333333333, ans=0.125 +2024-07-28 01:00:43,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=87966.66666666667, ans=0.125 +2024-07-28 01:00:47,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=87966.66666666667, ans=0.5 +2024-07-28 01:00:48,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=87966.66666666667, ans=0.2 +2024-07-28 01:00:49,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=87966.66666666667, ans=0.125 +2024-07-28 01:00:50,812 INFO [train.py:1114] (3/4) Epoch 7, batch 4650, loss[loss=0.2205, simple_loss=0.3037, pruned_loss=0.0687, over 4839.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3042, pruned_loss=0.06968, over 940219.15 frames. ], batch size: 16, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:00:53,953 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=6.37 vs. limit=12.0 +2024-07-28 01:00:55,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=87980.0, ans=0.125 +2024-07-28 01:01:03,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=87993.33333333333, ans=0.0 +2024-07-28 01:01:04,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=87993.33333333333, ans=0.0 +2024-07-28 01:01:06,672 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.22 vs. limit=6.0 +2024-07-28 01:01:09,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=88006.66666666667, ans=0.0 +2024-07-28 01:01:12,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=88020.0, ans=0.125 +2024-07-28 01:01:26,362 INFO [train.py:1114] (3/4) Epoch 7, batch 4700, loss[loss=0.2247, simple_loss=0.2984, pruned_loss=0.07555, over 4701.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3044, pruned_loss=0.0699, over 937471.70 frames. ], batch size: 11, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:01:28,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=88046.66666666667, ans=0.0 +2024-07-28 01:01:32,304 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.754e+01 5.909e+01 6.693e+01 7.629e+01 1.851e+02, threshold=1.339e+02, percent-clipped=2.0 +2024-07-28 01:01:37,470 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.02 vs. limit=10.0 +2024-07-28 01:01:37,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=88060.0, ans=0.0 +2024-07-28 01:01:43,303 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.58 vs. limit=10.0 +2024-07-28 01:01:44,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=88073.33333333333, ans=0.125 +2024-07-28 01:01:57,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=88100.0, ans=0.0 +2024-07-28 01:01:58,679 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.79 vs. limit=15.0 +2024-07-28 01:01:59,573 INFO [train.py:1114] (3/4) Epoch 7, batch 4750, loss[loss=0.2434, simple_loss=0.3156, pruned_loss=0.08555, over 4489.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3046, pruned_loss=0.07017, over 935277.13 frames. ], batch size: 21, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:02:11,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=88126.66666666667, ans=0.0 +2024-07-28 01:02:18,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=88140.0, ans=0.125 +2024-07-28 01:02:20,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=88153.33333333333, ans=0.025 +2024-07-28 01:02:21,038 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.11 vs. limit=22.5 +2024-07-28 01:02:32,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=88180.0, ans=0.025 +2024-07-28 01:02:33,427 INFO [train.py:1114] (3/4) Epoch 7, batch 4800, loss[loss=0.239, simple_loss=0.3157, pruned_loss=0.08114, over 4697.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.3049, pruned_loss=0.07071, over 932499.17 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:02:33,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=88180.0, ans=0.035 +2024-07-28 01:02:39,288 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.695e+01 5.971e+01 6.574e+01 7.583e+01 1.047e+02, threshold=1.315e+02, percent-clipped=0.0 +2024-07-28 01:02:51,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=88206.66666666667, ans=0.0 +2024-07-28 01:03:00,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=88233.33333333333, ans=0.0 +2024-07-28 01:03:06,431 INFO [train.py:1114] (3/4) Epoch 7, batch 4850, loss[loss=0.2206, simple_loss=0.3, pruned_loss=0.0706, over 4736.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.3045, pruned_loss=0.07032, over 931613.26 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:03:08,755 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.15 vs. limit=22.5 +2024-07-28 01:03:11,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=88246.66666666667, ans=0.1 +2024-07-28 01:03:18,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=88260.0, ans=0.1 +2024-07-28 01:03:18,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=88260.0, ans=0.09899494936611666 +2024-07-28 01:03:39,970 INFO [train.py:1114] (3/4) Epoch 7, batch 4900, loss[loss=0.2052, simple_loss=0.2914, pruned_loss=0.05953, over 4751.00 frames. ], tot_loss[loss=0.223, simple_loss=0.3045, pruned_loss=0.07078, over 933594.94 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:03:46,982 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.185e+01 5.896e+01 6.545e+01 7.673e+01 1.105e+02, threshold=1.309e+02, percent-clipped=0.0 +2024-07-28 01:03:51,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=88326.66666666667, ans=0.0 +2024-07-28 01:03:55,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=88340.0, ans=0.1 +2024-07-28 01:04:07,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=88366.66666666667, ans=0.125 +2024-07-28 01:04:13,957 INFO [train.py:1114] (3/4) Epoch 7, batch 4950, loss[loss=0.2884, simple_loss=0.3501, pruned_loss=0.1134, over 3285.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.3066, pruned_loss=0.07192, over 930462.43 frames. ], batch size: 35, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:04:14,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=88380.0, ans=0.125 +2024-07-28 01:04:24,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=88393.33333333333, ans=0.125 +2024-07-28 01:04:34,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=88420.0, ans=0.025 +2024-07-28 01:04:35,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=88420.0, ans=0.125 +2024-07-28 01:04:44,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=88433.33333333333, ans=0.2 +2024-07-28 01:04:48,684 INFO [train.py:1114] (3/4) Epoch 7, batch 5000, loss[loss=0.2208, simple_loss=0.3191, pruned_loss=0.06127, over 4667.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3053, pruned_loss=0.07088, over 934345.99 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:04:55,130 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.044e+01 6.033e+01 7.025e+01 8.348e+01 1.303e+02, threshold=1.405e+02, percent-clipped=0.0 +2024-07-28 01:05:02,797 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.27 vs. limit=15.0 +2024-07-28 01:05:11,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=88486.66666666667, ans=0.0 +2024-07-28 01:05:16,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=88500.0, ans=0.0 +2024-07-28 01:05:17,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=88500.0, ans=0.125 +2024-07-28 01:05:21,741 INFO [train.py:1114] (3/4) Epoch 7, batch 5050, loss[loss=0.1959, simple_loss=0.2828, pruned_loss=0.05456, over 4846.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3037, pruned_loss=0.06979, over 936807.24 frames. ], batch size: 12, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:05:27,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=88513.33333333333, ans=0.02 +2024-07-28 01:05:30,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=88526.66666666667, ans=0.125 +2024-07-28 01:05:41,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=88540.0, ans=0.0 +2024-07-28 01:05:59,003 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.950e-02 +2024-07-28 01:05:59,504 INFO [train.py:1114] (3/4) Epoch 7, batch 5100, loss[loss=0.1856, simple_loss=0.2763, pruned_loss=0.04742, over 4774.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3046, pruned_loss=0.07059, over 934925.56 frames. ], batch size: 12, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:06:00,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=88580.0, ans=0.0 +2024-07-28 01:06:02,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=88580.0, ans=0.2 +2024-07-28 01:06:06,159 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.527e+01 5.884e+01 6.519e+01 7.454e+01 1.176e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 01:06:11,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=88593.33333333333, ans=0.0 +2024-07-28 01:06:30,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=88633.33333333333, ans=0.0 +2024-07-28 01:06:36,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=88633.33333333333, ans=0.125 +2024-07-28 01:06:37,930 INFO [train.py:1114] (3/4) Epoch 7, batch 5150, loss[loss=0.2319, simple_loss=0.3192, pruned_loss=0.07229, over 4842.00 frames. ], tot_loss[loss=0.223, simple_loss=0.3044, pruned_loss=0.07078, over 936175.80 frames. ], batch size: 16, lr: 1.07e-02, grad_scale: 16.0 +2024-07-28 01:06:41,879 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.15 vs. limit=15.0 +2024-07-28 01:06:42,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=88646.66666666667, ans=0.025 +2024-07-28 01:06:47,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=88660.0, ans=0.025 +2024-07-28 01:06:50,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=88673.33333333333, ans=0.125 +2024-07-28 01:06:58,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=88686.66666666667, ans=0.125 +2024-07-28 01:06:58,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=88686.66666666667, ans=0.1 +2024-07-28 01:07:03,745 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.83 vs. limit=22.5 +2024-07-28 01:07:11,379 INFO [train.py:1114] (3/4) Epoch 7, batch 5200, loss[loss=0.227, simple_loss=0.3055, pruned_loss=0.07424, over 4659.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.304, pruned_loss=0.07013, over 936131.05 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:07:14,518 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.90 vs. limit=15.0 +2024-07-28 01:07:16,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=88713.33333333333, ans=0.0 +2024-07-28 01:07:18,341 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 6.071e+01 6.603e+01 7.061e+01 1.007e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-28 01:07:20,142 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.54 vs. limit=22.5 +2024-07-28 01:07:24,991 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.84 vs. limit=15.0 +2024-07-28 01:07:34,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=88753.33333333333, ans=0.1 +2024-07-28 01:07:44,902 INFO [train.py:1114] (3/4) Epoch 7, batch 5250, loss[loss=0.2287, simple_loss=0.3039, pruned_loss=0.07669, over 4896.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.3029, pruned_loss=0.06945, over 935490.54 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:07:47,224 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.06 vs. limit=10.0 +2024-07-28 01:07:53,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=88793.33333333333, ans=0.125 +2024-07-28 01:08:02,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=88806.66666666667, ans=0.09899494936611666 +2024-07-28 01:08:15,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=88833.33333333333, ans=0.0 +2024-07-28 01:08:18,678 INFO [train.py:1114] (3/4) Epoch 7, batch 5300, loss[loss=0.2983, simple_loss=0.3631, pruned_loss=0.1167, over 4665.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3032, pruned_loss=0.0701, over 933892.94 frames. ], batch size: 16, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:08:21,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=88846.66666666667, ans=0.125 +2024-07-28 01:08:25,198 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.360e+01 5.926e+01 6.505e+01 7.271e+01 1.034e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 01:08:30,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=88860.0, ans=0.1 +2024-07-28 01:08:33,552 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.12 vs. limit=12.0 +2024-07-28 01:08:41,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=88886.66666666667, ans=0.025 +2024-07-28 01:08:41,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=88886.66666666667, ans=0.125 +2024-07-28 01:08:51,785 INFO [train.py:1114] (3/4) Epoch 7, batch 5350, loss[loss=0.2145, simple_loss=0.2906, pruned_loss=0.06921, over 4481.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3035, pruned_loss=0.06991, over 935924.02 frames. ], batch size: 10, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:08:55,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=88913.33333333333, ans=0.5 +2024-07-28 01:09:08,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=88940.0, ans=0.125 +2024-07-28 01:09:08,942 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:09:20,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=88953.33333333333, ans=0.125 +2024-07-28 01:09:21,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=88953.33333333333, ans=0.025 +2024-07-28 01:09:22,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=88953.33333333333, ans=0.125 +2024-07-28 01:09:27,696 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.93 vs. limit=22.5 +2024-07-28 01:09:28,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=88966.66666666667, ans=0.025 +2024-07-28 01:09:30,874 INFO [train.py:1114] (3/4) Epoch 7, batch 5400, loss[loss=0.2349, simple_loss=0.324, pruned_loss=0.07286, over 4366.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.305, pruned_loss=0.07036, over 929843.01 frames. ], batch size: 26, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:09:34,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=88980.0, ans=0.125 +2024-07-28 01:09:37,773 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.898e+01 5.891e+01 6.586e+01 7.274e+01 1.067e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-28 01:09:39,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=88993.33333333333, ans=0.125 +2024-07-28 01:09:43,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=89006.66666666667, ans=0.125 +2024-07-28 01:09:46,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=89006.66666666667, ans=0.1 +2024-07-28 01:09:48,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=89006.66666666667, ans=0.125 +2024-07-28 01:09:54,662 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.61 vs. limit=22.5 +2024-07-28 01:09:59,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=89033.33333333333, ans=0.0 +2024-07-28 01:10:03,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=89033.33333333333, ans=0.04949747468305833 +2024-07-28 01:10:04,292 INFO [train.py:1114] (3/4) Epoch 7, batch 5450, loss[loss=0.2114, simple_loss=0.2902, pruned_loss=0.06633, over 4715.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3053, pruned_loss=0.07053, over 932620.92 frames. ], batch size: 11, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:10:45,384 INFO [train.py:1114] (3/4) Epoch 7, batch 5500, loss[loss=0.2562, simple_loss=0.3221, pruned_loss=0.09511, over 4351.00 frames. ], tot_loss[loss=0.2234, simple_loss=0.3047, pruned_loss=0.07101, over 930512.69 frames. ], batch size: 26, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:10:47,109 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.07 vs. limit=22.5 +2024-07-28 01:11:09,584 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.832e+01 6.102e+01 6.764e+01 7.655e+01 1.015e+02, threshold=1.353e+02, percent-clipped=0.0 +2024-07-28 01:11:20,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=89140.0, ans=0.0 +2024-07-28 01:11:37,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.40 vs. limit=12.0 +2024-07-28 01:12:22,823 INFO [train.py:1114] (3/4) Epoch 7, batch 5550, loss[loss=0.2034, simple_loss=0.2809, pruned_loss=0.06299, over 4701.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.304, pruned_loss=0.07082, over 932891.68 frames. ], batch size: 12, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:12:22,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=89180.0, ans=0.1 +2024-07-28 01:12:26,742 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-07-28 01:12:38,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=89206.66666666667, ans=0.0 +2024-07-28 01:12:49,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=89220.0, ans=0.125 +2024-07-28 01:13:03,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=89233.33333333333, ans=0.025 +2024-07-28 01:13:06,827 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.27 vs. limit=12.0 +2024-07-28 01:13:07,183 INFO [train.py:1114] (3/4) Epoch 7, batch 5600, loss[loss=0.2314, simple_loss=0.3184, pruned_loss=0.07223, over 4741.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3035, pruned_loss=0.07024, over 934314.46 frames. ], batch size: 14, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:13:07,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=89246.66666666667, ans=0.0 +2024-07-28 01:13:10,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=89246.66666666667, ans=0.2 +2024-07-28 01:13:14,833 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 6.099e+01 6.958e+01 8.233e+01 1.047e+02, threshold=1.392e+02, percent-clipped=0.0 +2024-07-28 01:13:25,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=89273.33333333333, ans=0.0 +2024-07-28 01:13:26,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=89273.33333333333, ans=22.5 +2024-07-28 01:13:31,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=89286.66666666667, ans=0.0 +2024-07-28 01:13:37,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=89300.0, ans=0.125 +2024-07-28 01:13:42,968 INFO [train.py:1114] (3/4) Epoch 7, batch 5650, loss[loss=0.2216, simple_loss=0.3083, pruned_loss=0.06748, over 4597.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3025, pruned_loss=0.07003, over 937165.89 frames. ], batch size: 21, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:13:44,430 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.74 vs. limit=15.0 +2024-07-28 01:13:45,991 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.82 vs. limit=12.0 +2024-07-28 01:13:49,840 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:13:54,127 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-28 01:13:57,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=89340.0, ans=0.0 +2024-07-28 01:14:05,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=89353.33333333333, ans=0.125 +2024-07-28 01:14:05,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=89353.33333333333, ans=0.125 +2024-07-28 01:14:09,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=89366.66666666667, ans=0.1 +2024-07-28 01:14:10,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=89366.66666666667, ans=0.125 +2024-07-28 01:14:16,567 INFO [train.py:1114] (3/4) Epoch 7, batch 5700, loss[loss=0.1899, simple_loss=0.2757, pruned_loss=0.05206, over 4689.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3024, pruned_loss=0.07001, over 938473.09 frames. ], batch size: 13, lr: 1.07e-02, grad_scale: 32.0 +2024-07-28 01:14:23,478 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.757e+01 5.798e+01 6.210e+01 7.158e+01 1.197e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 01:14:28,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=89393.33333333333, ans=0.5 +2024-07-28 01:14:35,404 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.98 vs. limit=22.5 +2024-07-28 01:14:38,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=89420.0, ans=0.0 +2024-07-28 01:14:50,062 INFO [train.py:1114] (3/4) Epoch 7, batch 5750, loss[loss=0.2267, simple_loss=0.3226, pruned_loss=0.06545, over 4701.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3043, pruned_loss=0.07052, over 938476.04 frames. ], batch size: 19, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:14:55,105 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.03 vs. limit=6.0 +2024-07-28 01:15:19,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=89500.0, ans=0.1 +2024-07-28 01:15:23,592 INFO [train.py:1114] (3/4) Epoch 7, batch 5800, loss[loss=0.2578, simple_loss=0.3235, pruned_loss=0.09608, over 4718.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3051, pruned_loss=0.07065, over 937582.91 frames. ], batch size: 19, lr: 1.06e-02, grad_scale: 16.0 +2024-07-28 01:15:31,063 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 6.066e+01 6.586e+01 7.704e+01 1.621e+02, threshold=1.317e+02, percent-clipped=1.0 +2024-07-28 01:15:32,217 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.20 vs. limit=15.0 +2024-07-28 01:15:52,110 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:15:57,446 INFO [train.py:1114] (3/4) Epoch 7, batch 5850, loss[loss=0.2885, simple_loss=0.3653, pruned_loss=0.1059, over 4583.00 frames. ], tot_loss[loss=0.223, simple_loss=0.3053, pruned_loss=0.0704, over 938161.36 frames. ], batch size: 21, lr: 1.06e-02, grad_scale: 16.0 +2024-07-28 01:16:12,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=89606.66666666667, ans=0.125 +2024-07-28 01:16:15,250 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.17 vs. limit=22.5 +2024-07-28 01:16:17,361 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-28 01:16:20,211 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.86 vs. limit=10.0 +2024-07-28 01:16:21,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=89620.0, ans=0.125 +2024-07-28 01:16:24,926 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.47 vs. limit=15.0 +2024-07-28 01:16:26,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=89633.33333333333, ans=0.125 +2024-07-28 01:16:32,045 INFO [train.py:1114] (3/4) Epoch 7, batch 5900, loss[loss=0.2413, simple_loss=0.3252, pruned_loss=0.07872, over 4697.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.305, pruned_loss=0.07056, over 938177.69 frames. ], batch size: 15, lr: 1.06e-02, grad_scale: 16.0 +2024-07-28 01:16:38,596 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-07-28 01:16:39,541 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.684e+01 6.380e+01 7.370e+01 9.045e+01 1.525e+02, threshold=1.474e+02, percent-clipped=5.0 +2024-07-28 01:16:44,514 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:16:52,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=89686.66666666667, ans=0.125 +2024-07-28 01:17:07,214 INFO [train.py:1114] (3/4) Epoch 7, batch 5950, loss[loss=0.2269, simple_loss=0.3117, pruned_loss=0.07099, over 4676.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3048, pruned_loss=0.07042, over 940062.78 frames. ], batch size: 15, lr: 1.06e-02, grad_scale: 16.0 +2024-07-28 01:17:10,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=89713.33333333333, ans=0.125 +2024-07-28 01:17:15,625 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.03 vs. limit=10.0 +2024-07-28 01:17:18,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=89726.66666666667, ans=0.125 +2024-07-28 01:17:31,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=89753.33333333333, ans=0.025 +2024-07-28 01:17:35,645 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.79 vs. limit=12.0 +2024-07-28 01:17:42,384 INFO [train.py:1114] (3/4) Epoch 7, batch 6000, loss[loss=0.2354, simple_loss=0.3213, pruned_loss=0.07473, over 4273.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3052, pruned_loss=0.07076, over 937356.69 frames. ], batch size: 25, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:17:42,385 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 01:17:49,390 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.2604, 3.1073, 2.4407, 2.4065], device='cuda:3') +2024-07-28 01:17:54,544 INFO [train.py:1146] (3/4) Epoch 7, validation: loss=0.1857, simple_loss=0.2893, pruned_loss=0.04109, over 944034.00 frames. +2024-07-28 01:17:54,548 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 01:18:02,831 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.96 vs. limit=15.0 +2024-07-28 01:18:03,853 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.906e+01 5.859e+01 6.415e+01 7.407e+01 1.156e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 01:18:10,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=89793.33333333333, ans=0.0 +2024-07-28 01:18:25,588 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.19 vs. limit=15.0 +2024-07-28 01:18:31,893 INFO [train.py:1114] (3/4) Epoch 7, batch 6050, loss[loss=0.2289, simple_loss=0.2938, pruned_loss=0.08203, over 4780.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3054, pruned_loss=0.07127, over 938409.85 frames. ], batch size: 12, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:18:32,783 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:18:34,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=89846.66666666667, ans=0.5 +2024-07-28 01:18:41,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=89860.0, ans=0.0 +2024-07-28 01:18:48,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=89873.33333333333, ans=0.1 +2024-07-28 01:19:04,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=89913.33333333333, ans=0.1 +2024-07-28 01:19:05,216 INFO [train.py:1114] (3/4) Epoch 7, batch 6100, loss[loss=0.213, simple_loss=0.3184, pruned_loss=0.05379, over 4674.00 frames. ], tot_loss[loss=0.223, simple_loss=0.3049, pruned_loss=0.07059, over 937905.97 frames. ], batch size: 15, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:19:12,437 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.648e+01 5.958e+01 6.611e+01 7.776e+01 1.081e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-28 01:19:13,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=89926.66666666667, ans=0.025 +2024-07-28 01:19:15,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=89926.66666666667, ans=0.125 +2024-07-28 01:19:17,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=89926.66666666667, ans=0.125 +2024-07-28 01:19:21,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=89940.0, ans=0.125 +2024-07-28 01:19:37,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=89966.66666666667, ans=0.0 +2024-07-28 01:19:38,547 INFO [train.py:1114] (3/4) Epoch 7, batch 6150, loss[loss=0.3021, simple_loss=0.3634, pruned_loss=0.1204, over 3364.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3051, pruned_loss=0.07076, over 936908.50 frames. ], batch size: 35, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:19:42,443 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.26 vs. limit=15.0 +2024-07-28 01:19:50,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=89993.33333333333, ans=0.0 +2024-07-28 01:19:56,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=90006.66666666667, ans=0.125 +2024-07-28 01:20:04,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=90020.0, ans=0.0 +2024-07-28 01:20:15,021 INFO [train.py:1114] (3/4) Epoch 7, batch 6200, loss[loss=0.2005, simple_loss=0.2872, pruned_loss=0.05688, over 4738.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.3043, pruned_loss=0.07046, over 936333.16 frames. ], batch size: 14, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:20:22,606 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.692e+01 6.027e+01 6.497e+01 7.393e+01 1.206e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-28 01:20:22,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=90060.0, ans=0.2 +2024-07-28 01:20:30,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=90073.33333333333, ans=0.125 +2024-07-28 01:20:48,962 INFO [train.py:1114] (3/4) Epoch 7, batch 6250, loss[loss=0.2225, simple_loss=0.3096, pruned_loss=0.06773, over 4812.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.3047, pruned_loss=0.0708, over 933150.65 frames. ], batch size: 14, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:20:55,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=90126.66666666667, ans=0.125 +2024-07-28 01:20:56,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=90126.66666666667, ans=0.2 +2024-07-28 01:21:11,679 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.65 vs. limit=22.5 +2024-07-28 01:21:13,141 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.28 vs. limit=22.5 +2024-07-28 01:21:13,818 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.44 vs. limit=22.5 +2024-07-28 01:21:16,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=90166.66666666667, ans=0.07 +2024-07-28 01:21:19,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90166.66666666667, ans=0.1 +2024-07-28 01:21:22,798 INFO [train.py:1114] (3/4) Epoch 7, batch 6300, loss[loss=0.1905, simple_loss=0.2773, pruned_loss=0.05188, over 4556.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3051, pruned_loss=0.07108, over 929837.07 frames. ], batch size: 10, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:21:28,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=90180.0, ans=0.04949747468305833 +2024-07-28 01:21:29,895 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.769e+01 5.979e+01 7.188e+01 8.735e+01 1.314e+02, threshold=1.438e+02, percent-clipped=1.0 +2024-07-28 01:21:31,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=90193.33333333333, ans=0.125 +2024-07-28 01:21:32,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=90193.33333333333, ans=0.0 +2024-07-28 01:21:36,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=90206.66666666667, ans=0.125 +2024-07-28 01:21:44,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=90220.0, ans=0.125 +2024-07-28 01:21:49,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=90233.33333333333, ans=0.125 +2024-07-28 01:21:54,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=90233.33333333333, ans=0.125 +2024-07-28 01:21:55,883 INFO [train.py:1114] (3/4) Epoch 7, batch 6350, loss[loss=0.2458, simple_loss=0.3257, pruned_loss=0.08296, over 4529.00 frames. ], tot_loss[loss=0.223, simple_loss=0.3047, pruned_loss=0.0706, over 933869.60 frames. ], batch size: 21, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:22:02,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=90260.0, ans=0.2 +2024-07-28 01:22:04,977 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=16.19 vs. limit=15.0 +2024-07-28 01:22:13,091 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=24.83 vs. limit=22.5 +2024-07-28 01:22:14,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=90273.33333333333, ans=0.0 +2024-07-28 01:22:19,898 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.87 vs. limit=12.0 +2024-07-28 01:22:29,301 INFO [train.py:1114] (3/4) Epoch 7, batch 6400, loss[loss=0.2612, simple_loss=0.3364, pruned_loss=0.09304, over 4634.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3044, pruned_loss=0.07057, over 935621.34 frames. ], batch size: 13, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:22:33,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=90313.33333333333, ans=0.125 +2024-07-28 01:22:34,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=90313.33333333333, ans=0.125 +2024-07-28 01:22:36,604 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.788e+01 5.990e+01 6.724e+01 8.012e+01 1.042e+02, threshold=1.345e+02, percent-clipped=0.0 +2024-07-28 01:22:37,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=90326.66666666667, ans=0.2 +2024-07-28 01:22:44,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=90340.0, ans=0.0 +2024-07-28 01:22:55,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.47 vs. limit=15.0 +2024-07-28 01:23:00,544 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.44 vs. limit=15.0 +2024-07-28 01:23:00,665 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.31 vs. limit=10.0 +2024-07-28 01:23:06,109 INFO [train.py:1114] (3/4) Epoch 7, batch 6450, loss[loss=0.2223, simple_loss=0.31, pruned_loss=0.06734, over 4577.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3046, pruned_loss=0.07049, over 939443.99 frames. ], batch size: 21, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:23:07,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=90380.0, ans=0.1 +2024-07-28 01:23:25,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=90406.66666666667, ans=0.125 +2024-07-28 01:23:32,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90420.0, ans=0.1 +2024-07-28 01:23:33,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=90420.0, ans=0.125 +2024-07-28 01:23:34,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=90420.0, ans=0.125 +2024-07-28 01:23:35,165 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-28 01:23:40,717 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.12 vs. limit=15.0 +2024-07-28 01:23:45,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=90433.33333333333, ans=0.125 +2024-07-28 01:23:46,191 INFO [train.py:1114] (3/4) Epoch 7, batch 6500, loss[loss=0.269, simple_loss=0.3252, pruned_loss=0.1064, over 3245.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.3033, pruned_loss=0.06912, over 940395.10 frames. ], batch size: 35, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:23:57,752 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.888e+01 5.820e+01 6.453e+01 7.206e+01 1.081e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-28 01:24:06,621 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.88 vs. limit=10.0 +2024-07-28 01:24:07,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=90460.0, ans=0.1 +2024-07-28 01:24:10,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=90473.33333333333, ans=0.125 +2024-07-28 01:24:10,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=90473.33333333333, ans=0.125 +2024-07-28 01:24:12,802 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=13.63 vs. limit=15.0 +2024-07-28 01:24:14,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=90486.66666666667, ans=0.125 +2024-07-28 01:24:16,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=90486.66666666667, ans=0.125 +2024-07-28 01:24:30,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=90486.66666666667, ans=0.025 +2024-07-28 01:24:30,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=90486.66666666667, ans=0.0 +2024-07-28 01:24:30,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=90486.66666666667, ans=0.2 +2024-07-28 01:24:31,882 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:24:38,482 INFO [train.py:1114] (3/4) Epoch 7, batch 6550, loss[loss=0.2084, simple_loss=0.2931, pruned_loss=0.06184, over 4813.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.3034, pruned_loss=0.06905, over 943526.05 frames. ], batch size: 11, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:24:53,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=90540.0, ans=0.125 +2024-07-28 01:25:00,638 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.23 vs. limit=10.0 +2024-07-28 01:25:13,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=90553.33333333333, ans=0.0 +2024-07-28 01:25:21,616 INFO [train.py:1114] (3/4) Epoch 7, batch 6600, loss[loss=0.1966, simple_loss=0.2893, pruned_loss=0.05192, over 4933.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3039, pruned_loss=0.06909, over 945379.76 frames. ], batch size: 14, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:25:29,263 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.788e+01 6.007e+01 7.132e+01 8.613e+01 1.294e+02, threshold=1.426e+02, percent-clipped=1.0 +2024-07-28 01:25:33,532 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.37 vs. limit=22.5 +2024-07-28 01:25:40,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=90606.66666666667, ans=0.025 +2024-07-28 01:25:55,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=90633.33333333333, ans=0.0 +2024-07-28 01:25:56,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=90633.33333333333, ans=0.125 +2024-07-28 01:25:57,364 INFO [train.py:1114] (3/4) Epoch 7, batch 6650, loss[loss=0.2159, simple_loss=0.2876, pruned_loss=0.07212, over 4655.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.3029, pruned_loss=0.06919, over 944109.90 frames. ], batch size: 17, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:26:00,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=90646.66666666667, ans=0.125 +2024-07-28 01:26:06,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=90660.0, ans=0.125 +2024-07-28 01:26:14,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=90673.33333333333, ans=0.0 +2024-07-28 01:26:14,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=90673.33333333333, ans=0.125 +2024-07-28 01:26:16,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=90673.33333333333, ans=0.1 +2024-07-28 01:26:26,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=90700.0, ans=0.025 +2024-07-28 01:26:29,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=90700.0, ans=0.125 +2024-07-28 01:26:33,326 INFO [train.py:1114] (3/4) Epoch 7, batch 6700, loss[loss=0.2215, simple_loss=0.3179, pruned_loss=0.06258, over 4678.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.3031, pruned_loss=0.06905, over 942689.08 frames. ], batch size: 19, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:26:33,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=90713.33333333333, ans=0.125 +2024-07-28 01:26:40,646 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.203e+01 5.978e+01 6.873e+01 8.305e+01 1.151e+02, threshold=1.375e+02, percent-clipped=0.0 +2024-07-28 01:26:50,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90740.0, ans=0.1 +2024-07-28 01:27:01,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=90766.66666666667, ans=0.0 +2024-07-28 01:27:07,208 INFO [train.py:1114] (3/4) Epoch 7, batch 6750, loss[loss=0.2307, simple_loss=0.3126, pruned_loss=0.07445, over 4276.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.3024, pruned_loss=0.06861, over 940406.35 frames. ], batch size: 25, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:27:10,474 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.93 vs. limit=15.0 +2024-07-28 01:27:19,862 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.05 vs. limit=22.5 +2024-07-28 01:27:20,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=90806.66666666667, ans=0.2 +2024-07-28 01:27:26,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=90820.0, ans=0.1 +2024-07-28 01:27:29,225 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.69 vs. limit=22.5 +2024-07-28 01:27:32,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=90820.0, ans=0.2 +2024-07-28 01:27:37,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=90833.33333333333, ans=0.125 +2024-07-28 01:27:38,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=90833.33333333333, ans=0.025 +2024-07-28 01:27:40,182 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.67 vs. limit=22.5 +2024-07-28 01:27:40,991 INFO [train.py:1114] (3/4) Epoch 7, batch 6800, loss[loss=0.2095, simple_loss=0.2894, pruned_loss=0.06481, over 4636.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.303, pruned_loss=0.06869, over 938957.45 frames. ], batch size: 13, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:27:41,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=90846.66666666667, ans=0.125 +2024-07-28 01:27:48,115 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.925e+01 5.741e+01 6.354e+01 7.079e+01 9.743e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 01:27:54,459 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.68 vs. limit=15.0 +2024-07-28 01:27:54,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=90873.33333333333, ans=0.125 +2024-07-28 01:27:55,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=90873.33333333333, ans=0.1 +2024-07-28 01:28:12,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=90900.0, ans=0.025 +2024-07-28 01:28:14,117 INFO [train.py:1114] (3/4) Epoch 7, batch 6850, loss[loss=0.1978, simple_loss=0.294, pruned_loss=0.05077, over 4694.00 frames. ], tot_loss[loss=0.22, simple_loss=0.3028, pruned_loss=0.06858, over 940527.60 frames. ], batch size: 13, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:28:29,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=90940.0, ans=0.125 +2024-07-28 01:28:36,496 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.12 vs. limit=15.0 +2024-07-28 01:28:40,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=90966.66666666667, ans=0.2 +2024-07-28 01:28:42,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=90966.66666666667, ans=0.0 +2024-07-28 01:28:45,305 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.92 vs. limit=10.0 +2024-07-28 01:28:48,216 INFO [train.py:1114] (3/4) Epoch 7, batch 6900, loss[loss=0.1851, simple_loss=0.2758, pruned_loss=0.04719, over 4957.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.3022, pruned_loss=0.06841, over 942478.30 frames. ], batch size: 13, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:28:48,721 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.65 vs. limit=10.0 +2024-07-28 01:28:49,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=90980.0, ans=0.125 +2024-07-28 01:28:51,172 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.22 vs. limit=15.0 +2024-07-28 01:28:51,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=90980.0, ans=0.0 +2024-07-28 01:28:56,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=90993.33333333333, ans=0.2 +2024-07-28 01:28:57,320 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.878e+01 5.899e+01 6.510e+01 7.129e+01 1.062e+02, threshold=1.302e+02, percent-clipped=0.0 +2024-07-28 01:29:09,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91006.66666666667, ans=0.1 +2024-07-28 01:29:11,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=91020.0, ans=0.0 +2024-07-28 01:29:17,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=91033.33333333333, ans=0.125 +2024-07-28 01:29:22,865 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:29:25,540 INFO [train.py:1114] (3/4) Epoch 7, batch 6950, loss[loss=0.2219, simple_loss=0.2993, pruned_loss=0.07226, over 4522.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.302, pruned_loss=0.06811, over 939835.23 frames. ], batch size: 10, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:29:26,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=91046.66666666667, ans=0.125 +2024-07-28 01:29:30,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=91046.66666666667, ans=0.0 +2024-07-28 01:29:52,195 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.19 vs. limit=15.0 +2024-07-28 01:30:02,402 INFO [train.py:1114] (3/4) Epoch 7, batch 7000, loss[loss=0.2271, simple_loss=0.311, pruned_loss=0.07158, over 4619.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.3012, pruned_loss=0.06849, over 938581.13 frames. ], batch size: 17, lr: 1.06e-02, grad_scale: 32.0 +2024-07-28 01:30:02,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=91113.33333333333, ans=0.0 +2024-07-28 01:30:09,589 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.067e+01 5.877e+01 6.787e+01 8.210e+01 1.500e+02, threshold=1.357e+02, percent-clipped=1.0 +2024-07-28 01:30:35,445 INFO [train.py:1114] (3/4) Epoch 7, batch 7050, loss[loss=0.218, simple_loss=0.3074, pruned_loss=0.06433, over 4694.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.301, pruned_loss=0.06779, over 941804.09 frames. ], batch size: 19, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:30:35,659 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:30:39,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=91180.0, ans=0.0 +2024-07-28 01:30:48,408 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.27 vs. limit=22.5 +2024-07-28 01:30:51,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=91206.66666666667, ans=0.2 +2024-07-28 01:30:57,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=91220.0, ans=0.1 +2024-07-28 01:31:00,071 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:31:00,678 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:31:09,040 INFO [train.py:1114] (3/4) Epoch 7, batch 7100, loss[loss=0.237, simple_loss=0.3278, pruned_loss=0.07313, over 4799.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.3015, pruned_loss=0.06848, over 936401.37 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 16.0 +2024-07-28 01:31:16,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=91260.0, ans=0.125 +2024-07-28 01:31:16,783 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.787e+01 5.674e+01 6.634e+01 7.600e+01 1.129e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-28 01:31:18,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91260.0, ans=0.1 +2024-07-28 01:31:22,666 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.09 vs. limit=22.5 +2024-07-28 01:31:27,240 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.68 vs. limit=15.0 +2024-07-28 01:31:39,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=91300.0, ans=0.0 +2024-07-28 01:31:40,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.66 vs. limit=22.5 +2024-07-28 01:31:41,774 INFO [train.py:1114] (3/4) Epoch 7, batch 7150, loss[loss=0.2619, simple_loss=0.3442, pruned_loss=0.0898, over 4510.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2998, pruned_loss=0.06743, over 937377.56 frames. ], batch size: 21, lr: 1.05e-02, grad_scale: 16.0 +2024-07-28 01:31:43,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91313.33333333333, ans=0.1 +2024-07-28 01:31:59,844 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-07-28 01:32:14,586 INFO [train.py:1114] (3/4) Epoch 7, batch 7200, loss[loss=0.2517, simple_loss=0.3438, pruned_loss=0.07978, over 4791.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.3017, pruned_loss=0.06832, over 937733.09 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:32:22,359 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.818e+01 5.919e+01 6.755e+01 7.806e+01 1.038e+02, threshold=1.351e+02, percent-clipped=0.0 +2024-07-28 01:32:23,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=91393.33333333333, ans=0.125 +2024-07-28 01:32:28,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=91406.66666666667, ans=0.04949747468305833 +2024-07-28 01:32:32,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=91406.66666666667, ans=0.2 +2024-07-28 01:32:32,894 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:32:35,823 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.35 vs. limit=22.5 +2024-07-28 01:32:45,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=91433.33333333333, ans=0.2 +2024-07-28 01:32:49,499 INFO [train.py:1114] (3/4) Epoch 7, batch 7250, loss[loss=0.1984, simple_loss=0.2809, pruned_loss=0.0579, over 4866.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.3017, pruned_loss=0.06848, over 939382.67 frames. ], batch size: 12, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:32:56,476 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.20 vs. limit=12.0 +2024-07-28 01:33:12,960 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.93 vs. limit=6.0 +2024-07-28 01:33:18,050 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.64 vs. limit=15.0 +2024-07-28 01:33:22,427 INFO [train.py:1114] (3/4) Epoch 7, batch 7300, loss[loss=0.2072, simple_loss=0.2977, pruned_loss=0.05832, over 4868.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.3014, pruned_loss=0.06811, over 939920.62 frames. ], batch size: 12, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:33:25,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=91513.33333333333, ans=0.125 +2024-07-28 01:33:26,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91513.33333333333, ans=0.1 +2024-07-28 01:33:30,223 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.315e+01 6.274e+01 7.077e+01 8.324e+01 1.199e+02, threshold=1.415e+02, percent-clipped=0.0 +2024-07-28 01:33:30,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=91526.66666666667, ans=0.125 +2024-07-28 01:33:30,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=91526.66666666667, ans=0.125 +2024-07-28 01:33:37,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=91540.0, ans=0.125 +2024-07-28 01:33:47,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=91566.66666666667, ans=0.5 +2024-07-28 01:33:54,592 INFO [train.py:1114] (3/4) Epoch 7, batch 7350, loss[loss=0.2157, simple_loss=0.315, pruned_loss=0.05818, over 4639.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.3015, pruned_loss=0.06792, over 939347.52 frames. ], batch size: 12, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:34:09,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=91606.66666666667, ans=0.125 +2024-07-28 01:34:10,479 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.82 vs. limit=6.0 +2024-07-28 01:34:14,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=91620.0, ans=0.0 +2024-07-28 01:34:15,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=91620.0, ans=0.0 +2024-07-28 01:34:18,567 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.57 vs. limit=12.0 +2024-07-28 01:34:19,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=91620.0, ans=0.1 +2024-07-28 01:34:27,594 INFO [train.py:1114] (3/4) Epoch 7, batch 7400, loss[loss=0.2477, simple_loss=0.3344, pruned_loss=0.0805, over 4693.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.301, pruned_loss=0.06768, over 940577.43 frames. ], batch size: 13, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:34:32,741 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.24 vs. limit=15.0 +2024-07-28 01:34:35,812 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.824e+01 5.881e+01 6.822e+01 8.435e+01 1.377e+02, threshold=1.364e+02, percent-clipped=0.0 +2024-07-28 01:34:42,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=91673.33333333333, ans=0.1 +2024-07-28 01:34:44,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=91673.33333333333, ans=0.2 +2024-07-28 01:34:51,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=91686.66666666667, ans=0.015 +2024-07-28 01:35:01,068 INFO [train.py:1114] (3/4) Epoch 7, batch 7450, loss[loss=0.1766, simple_loss=0.2527, pruned_loss=0.0503, over 4623.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2992, pruned_loss=0.06705, over 937862.70 frames. ], batch size: 11, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:35:03,970 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.12 vs. limit=12.0 +2024-07-28 01:35:33,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=91780.0, ans=0.07 +2024-07-28 01:35:34,020 INFO [train.py:1114] (3/4) Epoch 7, batch 7500, loss[loss=0.3147, simple_loss=0.3694, pruned_loss=0.13, over 3534.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.3006, pruned_loss=0.06846, over 935881.87 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:35:34,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=91780.0, ans=0.025 +2024-07-28 01:35:35,584 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.95 vs. limit=15.0 +2024-07-28 01:35:35,671 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.30 vs. limit=22.5 +2024-07-28 01:35:36,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=91780.0, ans=0.125 +2024-07-28 01:35:39,985 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.38 vs. limit=22.5 +2024-07-28 01:35:41,659 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.899e+01 6.430e+01 7.635e+01 1.398e+02, threshold=1.286e+02, percent-clipped=1.0 +2024-07-28 01:35:52,651 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.78 vs. limit=15.0 +2024-07-28 01:35:55,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=91820.0, ans=0.05 +2024-07-28 01:35:55,426 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.94 vs. limit=15.0 +2024-07-28 01:35:59,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91820.0, ans=0.1 +2024-07-28 01:36:02,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=91833.33333333333, ans=0.125 +2024-07-28 01:36:03,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=91833.33333333333, ans=0.125 +2024-07-28 01:36:07,907 INFO [train.py:1114] (3/4) Epoch 7, batch 7550, loss[loss=0.2162, simple_loss=0.3051, pruned_loss=0.06358, over 4610.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3029, pruned_loss=0.06974, over 935902.21 frames. ], batch size: 17, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:36:14,706 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.88 vs. limit=22.5 +2024-07-28 01:36:19,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.40 vs. limit=15.0 +2024-07-28 01:36:21,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=91873.33333333333, ans=0.0 +2024-07-28 01:36:27,932 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.63 vs. limit=22.5 +2024-07-28 01:36:37,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=91900.0, ans=0.1 +2024-07-28 01:36:40,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=91913.33333333333, ans=0.0 +2024-07-28 01:36:40,627 INFO [train.py:1114] (3/4) Epoch 7, batch 7600, loss[loss=0.2373, simple_loss=0.3139, pruned_loss=0.08033, over 4806.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.302, pruned_loss=0.06922, over 937736.65 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:36:42,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=91913.33333333333, ans=0.025 +2024-07-28 01:36:46,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=91913.33333333333, ans=0.0 +2024-07-28 01:36:47,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=91913.33333333333, ans=0.125 +2024-07-28 01:36:50,218 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.825e+01 5.658e+01 6.042e+01 7.178e+01 9.793e+01, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 01:36:50,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=91926.66666666667, ans=0.125 +2024-07-28 01:37:00,513 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.37 vs. limit=10.0 +2024-07-28 01:37:02,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=91953.33333333333, ans=0.0 +2024-07-28 01:37:05,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=91953.33333333333, ans=10.0 +2024-07-28 01:37:05,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=91953.33333333333, ans=0.125 +2024-07-28 01:37:06,501 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.75 vs. limit=6.0 +2024-07-28 01:37:07,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=91953.33333333333, ans=0.0 +2024-07-28 01:37:08,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=91966.66666666667, ans=0.2 +2024-07-28 01:37:12,324 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.06 vs. limit=22.5 +2024-07-28 01:37:15,247 INFO [train.py:1114] (3/4) Epoch 7, batch 7650, loss[loss=0.1847, simple_loss=0.2696, pruned_loss=0.04987, over 4927.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.3023, pruned_loss=0.06937, over 936644.86 frames. ], batch size: 12, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:37:15,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=91980.0, ans=0.125 +2024-07-28 01:37:15,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=91980.0, ans=0.1 +2024-07-28 01:37:49,600 INFO [train.py:1114] (3/4) Epoch 7, batch 7700, loss[loss=0.202, simple_loss=0.2976, pruned_loss=0.05323, over 4693.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.3027, pruned_loss=0.06931, over 933958.64 frames. ], batch size: 13, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:37:52,257 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:37:56,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=92060.0, ans=0.125 +2024-07-28 01:37:57,207 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.942e+01 5.877e+01 6.503e+01 7.905e+01 1.085e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 01:38:14,428 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.88 vs. limit=15.0 +2024-07-28 01:38:21,610 INFO [train.py:1114] (3/4) Epoch 7, batch 7750, loss[loss=0.2287, simple_loss=0.3147, pruned_loss=0.07131, over 4930.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3043, pruned_loss=0.06998, over 935528.92 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:38:23,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=92113.33333333333, ans=0.05 +2024-07-28 01:38:38,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=92140.0, ans=0.0 +2024-07-28 01:38:38,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=92140.0, ans=0.025 +2024-07-28 01:38:59,297 INFO [train.py:1114] (3/4) Epoch 7, batch 7800, loss[loss=0.1964, simple_loss=0.3004, pruned_loss=0.04617, over 4657.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3041, pruned_loss=0.06965, over 937305.98 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:39:06,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=92193.33333333333, ans=0.125 +2024-07-28 01:39:06,959 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.786e+01 5.790e+01 6.287e+01 7.177e+01 9.845e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 01:39:08,005 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=15.0 +2024-07-28 01:39:22,846 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.84 vs. limit=15.0 +2024-07-28 01:39:25,055 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.91 vs. limit=15.0 +2024-07-28 01:39:26,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.85 vs. limit=15.0 +2024-07-28 01:39:27,043 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.26 vs. limit=15.0 +2024-07-28 01:39:27,681 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.69 vs. limit=15.0 +2024-07-28 01:39:42,970 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.53 vs. limit=6.0 +2024-07-28 01:39:53,514 INFO [train.py:1114] (3/4) Epoch 7, batch 7850, loss[loss=0.1527, simple_loss=0.238, pruned_loss=0.03373, over 4522.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3041, pruned_loss=0.07008, over 936140.87 frames. ], batch size: 10, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:40:01,504 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.97 vs. limit=6.0 +2024-07-28 01:40:03,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92260.0, ans=0.1 +2024-07-28 01:40:06,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=92273.33333333333, ans=0.09899494936611666 +2024-07-28 01:40:16,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=92286.66666666667, ans=0.5 +2024-07-28 01:40:17,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=92286.66666666667, ans=10.0 +2024-07-28 01:40:31,972 INFO [train.py:1114] (3/4) Epoch 7, batch 7900, loss[loss=0.2075, simple_loss=0.3013, pruned_loss=0.05687, over 4876.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.305, pruned_loss=0.06968, over 933501.67 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:40:32,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=92313.33333333333, ans=0.0 +2024-07-28 01:40:35,425 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.09 vs. limit=15.0 +2024-07-28 01:40:39,610 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.593e+01 6.059e+01 6.486e+01 7.471e+01 1.043e+02, threshold=1.297e+02, percent-clipped=0.0 +2024-07-28 01:40:40,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=92326.66666666667, ans=0.0 +2024-07-28 01:40:43,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=92326.66666666667, ans=0.125 +2024-07-28 01:40:53,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=92340.0, ans=0.125 +2024-07-28 01:40:59,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=92353.33333333333, ans=0.125 +2024-07-28 01:41:09,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=92366.66666666667, ans=0.125 +2024-07-28 01:41:11,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=92366.66666666667, ans=0.125 +2024-07-28 01:41:14,313 INFO [train.py:1114] (3/4) Epoch 7, batch 7950, loss[loss=0.2457, simple_loss=0.3082, pruned_loss=0.0916, over 3316.00 frames. ], tot_loss[loss=0.221, simple_loss=0.3042, pruned_loss=0.06894, over 935430.37 frames. ], batch size: 35, lr: 1.05e-02, grad_scale: 16.0 +2024-07-28 01:41:20,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=92393.33333333333, ans=0.125 +2024-07-28 01:41:21,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=92393.33333333333, ans=0.0 +2024-07-28 01:41:37,720 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.19 vs. limit=15.0 +2024-07-28 01:41:41,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=92433.33333333333, ans=0.2 +2024-07-28 01:41:44,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_abs, batch_count=92433.33333333333, ans=0.5 +2024-07-28 01:41:47,115 INFO [train.py:1114] (3/4) Epoch 7, batch 8000, loss[loss=0.2002, simple_loss=0.2832, pruned_loss=0.05862, over 4606.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.3032, pruned_loss=0.06867, over 934185.41 frames. ], batch size: 11, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:41:50,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=92446.66666666667, ans=0.125 +2024-07-28 01:41:51,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=92446.66666666667, ans=0.125 +2024-07-28 01:41:55,553 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.050e+01 6.079e+01 6.641e+01 7.975e+01 1.086e+02, threshold=1.328e+02, percent-clipped=0.0 +2024-07-28 01:41:57,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=92460.0, ans=0.125 +2024-07-28 01:41:58,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92460.0, ans=0.1 +2024-07-28 01:42:01,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=92473.33333333333, ans=0.0 +2024-07-28 01:42:04,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=92473.33333333333, ans=0.125 +2024-07-28 01:42:04,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=92473.33333333333, ans=0.1 +2024-07-28 01:42:04,777 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.08 vs. limit=6.0 +2024-07-28 01:42:10,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92486.66666666667, ans=0.1 +2024-07-28 01:42:11,426 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.60 vs. limit=6.0 +2024-07-28 01:42:11,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=92486.66666666667, ans=0.025 +2024-07-28 01:42:15,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=92500.0, ans=0.125 +2024-07-28 01:42:19,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=92500.0, ans=0.125 +2024-07-28 01:42:20,851 INFO [train.py:1114] (3/4) Epoch 7, batch 8050, loss[loss=0.2321, simple_loss=0.325, pruned_loss=0.06961, over 4804.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.3035, pruned_loss=0.06893, over 934221.04 frames. ], batch size: 14, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:42:23,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=92513.33333333333, ans=0.0 +2024-07-28 01:42:23,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=92513.33333333333, ans=0.2 +2024-07-28 01:42:24,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92513.33333333333, ans=0.1 +2024-07-28 01:42:49,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=92566.66666666667, ans=0.125 +2024-07-28 01:42:51,394 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.03 vs. limit=15.0 +2024-07-28 01:42:51,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=92566.66666666667, ans=0.125 +2024-07-28 01:42:53,529 INFO [train.py:1114] (3/4) Epoch 7, batch 8100, loss[loss=0.2595, simple_loss=0.3307, pruned_loss=0.0942, over 4797.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.305, pruned_loss=0.06986, over 933711.07 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:42:54,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=92580.0, ans=0.0 +2024-07-28 01:43:01,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=92593.33333333333, ans=0.0 +2024-07-28 01:43:01,844 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.964e+01 5.903e+01 6.479e+01 7.411e+01 1.026e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 01:43:07,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=92606.66666666667, ans=0.125 +2024-07-28 01:43:07,983 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.01 vs. limit=10.0 +2024-07-28 01:43:15,824 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.97 vs. limit=15.0 +2024-07-28 01:43:27,002 INFO [train.py:1114] (3/4) Epoch 7, batch 8150, loss[loss=0.2437, simple_loss=0.3321, pruned_loss=0.07764, over 4808.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.303, pruned_loss=0.06909, over 937160.75 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:43:27,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=92646.66666666667, ans=0.0 +2024-07-28 01:43:35,529 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.82 vs. limit=10.0 +2024-07-28 01:43:38,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92660.0, ans=0.1 +2024-07-28 01:43:41,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=92673.33333333333, ans=0.1 +2024-07-28 01:43:52,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=92700.0, ans=0.125 +2024-07-28 01:44:00,493 INFO [train.py:1114] (3/4) Epoch 7, batch 8200, loss[loss=0.2397, simple_loss=0.3243, pruned_loss=0.07759, over 4792.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.3034, pruned_loss=0.06887, over 938351.21 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:44:04,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=92713.33333333333, ans=0.125 +2024-07-28 01:44:08,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92726.66666666667, ans=0.1 +2024-07-28 01:44:08,974 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.964e+01 6.053e+01 7.008e+01 8.416e+01 1.296e+02, threshold=1.402e+02, percent-clipped=1.0 +2024-07-28 01:44:13,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=92726.66666666667, ans=0.1 +2024-07-28 01:44:22,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=92753.33333333333, ans=0.0 +2024-07-28 01:44:23,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.96 vs. limit=6.0 +2024-07-28 01:44:24,496 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.48 vs. limit=6.0 +2024-07-28 01:44:34,200 INFO [train.py:1114] (3/4) Epoch 7, batch 8250, loss[loss=0.1786, simple_loss=0.2618, pruned_loss=0.04768, over 4892.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.3032, pruned_loss=0.06881, over 938432.83 frames. ], batch size: 13, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:44:39,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=92780.0, ans=0.2 +2024-07-28 01:44:42,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=92793.33333333333, ans=0.125 +2024-07-28 01:44:46,455 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.37 vs. limit=10.0 +2024-07-28 01:44:46,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=92806.66666666667, ans=0.0 +2024-07-28 01:44:48,435 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.16 vs. limit=15.0 +2024-07-28 01:44:52,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92806.66666666667, ans=0.1 +2024-07-28 01:45:05,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=92833.33333333333, ans=0.0 +2024-07-28 01:45:06,706 INFO [train.py:1114] (3/4) Epoch 7, batch 8300, loss[loss=0.1979, simple_loss=0.2858, pruned_loss=0.05495, over 4908.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.304, pruned_loss=0.06918, over 938141.17 frames. ], batch size: 15, lr: 1.05e-02, grad_scale: 32.0 +2024-07-28 01:45:11,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=92846.66666666667, ans=0.1 +2024-07-28 01:45:12,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92860.0, ans=0.1 +2024-07-28 01:45:15,033 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.705e+01 6.122e+01 6.815e+01 8.383e+01 1.214e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-28 01:45:17,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=92860.0, ans=0.1 +2024-07-28 01:45:34,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=92900.0, ans=0.125 +2024-07-28 01:45:36,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92900.0, ans=0.1 +2024-07-28 01:45:38,734 INFO [train.py:1114] (3/4) Epoch 7, batch 8350, loss[loss=0.2437, simple_loss=0.3243, pruned_loss=0.08153, over 4807.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3036, pruned_loss=0.06932, over 941199.63 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:45:40,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=92913.33333333333, ans=0.1 +2024-07-28 01:45:40,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=92913.33333333333, ans=0.125 +2024-07-28 01:45:51,484 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.11 vs. limit=6.0 +2024-07-28 01:45:53,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=92940.0, ans=0.2 +2024-07-28 01:45:54,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=92940.0, ans=0.0 +2024-07-28 01:45:57,905 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.25 vs. limit=6.0 +2024-07-28 01:46:02,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=92953.33333333333, ans=6.0 +2024-07-28 01:46:11,400 INFO [train.py:1114] (3/4) Epoch 7, batch 8400, loss[loss=0.2088, simple_loss=0.2773, pruned_loss=0.07014, over 4776.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3041, pruned_loss=0.06962, over 940014.80 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:46:12,896 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.08 vs. limit=15.0 +2024-07-28 01:46:16,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=92980.0, ans=0.125 +2024-07-28 01:46:20,640 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.750e+01 5.850e+01 6.401e+01 7.146e+01 1.045e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 01:46:26,727 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:46:31,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=93020.0, ans=15.0 +2024-07-28 01:46:33,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=93020.0, ans=0.125 +2024-07-28 01:46:41,255 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.88 vs. limit=15.0 +2024-07-28 01:46:43,903 INFO [train.py:1114] (3/4) Epoch 7, batch 8450, loss[loss=0.2179, simple_loss=0.3041, pruned_loss=0.06584, over 4803.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3048, pruned_loss=0.06954, over 939046.82 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:46:51,834 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.06 vs. limit=15.0 +2024-07-28 01:47:05,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=93073.33333333333, ans=22.5 +2024-07-28 01:47:09,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93086.66666666667, ans=0.1 +2024-07-28 01:47:09,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=93086.66666666667, ans=0.125 +2024-07-28 01:47:16,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=93100.0, ans=0.125 +2024-07-28 01:47:19,788 INFO [train.py:1114] (3/4) Epoch 7, batch 8500, loss[loss=0.1852, simple_loss=0.2678, pruned_loss=0.0513, over 4620.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.3043, pruned_loss=0.0692, over 938983.05 frames. ], batch size: 11, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:47:21,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=93113.33333333333, ans=0.0 +2024-07-28 01:47:29,189 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.808e+01 5.964e+01 6.473e+01 7.597e+01 1.017e+02, threshold=1.295e+02, percent-clipped=0.0 +2024-07-28 01:47:31,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=93126.66666666667, ans=0.125 +2024-07-28 01:47:31,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=93126.66666666667, ans=0.2 +2024-07-28 01:47:46,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=93166.66666666667, ans=0.0 +2024-07-28 01:47:53,746 INFO [train.py:1114] (3/4) Epoch 7, batch 8550, loss[loss=0.1952, simple_loss=0.2813, pruned_loss=0.05448, over 4801.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.304, pruned_loss=0.06925, over 939939.53 frames. ], batch size: 11, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:47:57,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=93180.0, ans=0.0 +2024-07-28 01:48:00,401 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.32 vs. limit=22.5 +2024-07-28 01:48:04,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=93193.33333333333, ans=0.125 +2024-07-28 01:48:04,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=93193.33333333333, ans=0.025 +2024-07-28 01:48:06,990 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.86 vs. limit=6.0 +2024-07-28 01:48:09,501 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.85 vs. limit=12.0 +2024-07-28 01:48:11,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=93206.66666666667, ans=0.0 +2024-07-28 01:48:24,067 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.38 vs. limit=15.0 +2024-07-28 01:48:25,461 INFO [train.py:1114] (3/4) Epoch 7, batch 8600, loss[loss=0.2447, simple_loss=0.3232, pruned_loss=0.0831, over 4809.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3039, pruned_loss=0.06929, over 939807.23 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:48:30,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=93246.66666666667, ans=0.125 +2024-07-28 01:48:35,626 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.806e+01 6.089e+01 7.126e+01 9.182e+01 1.339e+02, threshold=1.425e+02, percent-clipped=2.0 +2024-07-28 01:48:43,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=93273.33333333333, ans=0.125 +2024-07-28 01:48:44,267 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.91 vs. limit=15.0 +2024-07-28 01:48:56,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=93286.66666666667, ans=0.2 +2024-07-28 01:48:59,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=93300.0, ans=0.0 +2024-07-28 01:48:59,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.whiten.whitening_limit, batch_count=93300.0, ans=12.0 +2024-07-28 01:49:04,764 INFO [train.py:1114] (3/4) Epoch 7, batch 8650, loss[loss=0.2143, simple_loss=0.3039, pruned_loss=0.06234, over 4903.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.3029, pruned_loss=0.06822, over 940634.98 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:49:15,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93313.33333333333, ans=0.1 +2024-07-28 01:49:25,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=93340.0, ans=0.125 +2024-07-28 01:49:29,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=93340.0, ans=0.0 +2024-07-28 01:49:40,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=93366.66666666667, ans=0.125 +2024-07-28 01:49:46,863 INFO [train.py:1114] (3/4) Epoch 7, batch 8700, loss[loss=0.2071, simple_loss=0.3032, pruned_loss=0.05555, over 4747.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.3034, pruned_loss=0.069, over 938301.20 frames. ], batch size: 13, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:49:49,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=93380.0, ans=0.07 +2024-07-28 01:49:57,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=93393.33333333333, ans=0.2 +2024-07-28 01:49:58,262 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.007e+01 5.695e+01 6.363e+01 6.862e+01 1.009e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 01:49:58,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=93393.33333333333, ans=0.125 +2024-07-28 01:50:04,278 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.554e-03 +2024-07-28 01:50:04,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=93406.66666666667, ans=0.2 +2024-07-28 01:50:05,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=93406.66666666667, ans=0.0 +2024-07-28 01:50:07,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=93406.66666666667, ans=0.125 +2024-07-28 01:50:21,414 INFO [train.py:1114] (3/4) Epoch 7, batch 8750, loss[loss=0.2274, simple_loss=0.3148, pruned_loss=0.06995, over 4682.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.304, pruned_loss=0.06976, over 936750.65 frames. ], batch size: 15, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:50:24,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=93446.66666666667, ans=0.125 +2024-07-28 01:50:42,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=93486.66666666667, ans=0.125 +2024-07-28 01:50:45,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93486.66666666667, ans=0.1 +2024-07-28 01:50:47,684 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.39 vs. limit=22.5 +2024-07-28 01:50:49,135 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.71 vs. limit=22.5 +2024-07-28 01:50:54,988 INFO [train.py:1114] (3/4) Epoch 7, batch 8800, loss[loss=0.2167, simple_loss=0.3021, pruned_loss=0.06567, over 4931.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.3048, pruned_loss=0.06945, over 937492.50 frames. ], batch size: 14, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:51:05,112 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.892e+01 5.962e+01 6.661e+01 7.820e+01 1.016e+02, threshold=1.332e+02, percent-clipped=0.0 +2024-07-28 01:51:13,091 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.91 vs. limit=12.0 +2024-07-28 01:51:16,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=93553.33333333333, ans=0.125 +2024-07-28 01:51:27,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=93566.66666666667, ans=0.0 +2024-07-28 01:51:28,562 INFO [train.py:1114] (3/4) Epoch 7, batch 8850, loss[loss=0.2466, simple_loss=0.3279, pruned_loss=0.08264, over 4453.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3039, pruned_loss=0.06966, over 932007.25 frames. ], batch size: 21, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:51:33,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=93580.0, ans=0.125 +2024-07-28 01:51:39,405 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.76 vs. limit=12.0 +2024-07-28 01:51:40,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93593.33333333333, ans=0.1 +2024-07-28 01:51:48,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=93620.0, ans=0.0 +2024-07-28 01:51:49,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93620.0, ans=0.1 +2024-07-28 01:51:56,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93633.33333333333, ans=0.1 +2024-07-28 01:51:59,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=93633.33333333333, ans=0.0 +2024-07-28 01:52:00,218 INFO [train.py:1114] (3/4) Epoch 7, batch 8900, loss[loss=0.1863, simple_loss=0.2762, pruned_loss=0.04817, over 4945.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3043, pruned_loss=0.06999, over 930237.76 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:52:02,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=93646.66666666667, ans=0.125 +2024-07-28 01:52:02,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=93646.66666666667, ans=0.125 +2024-07-28 01:52:07,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=93660.0, ans=0.1 +2024-07-28 01:52:08,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.33 vs. limit=15.0 +2024-07-28 01:52:09,252 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.776e+01 6.236e+01 6.887e+01 8.483e+01 1.202e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-28 01:52:10,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=93660.0, ans=0.025 +2024-07-28 01:52:13,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=93673.33333333333, ans=0.1 +2024-07-28 01:52:18,082 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.50 vs. limit=6.0 +2024-07-28 01:52:32,126 INFO [train.py:1114] (3/4) Epoch 7, batch 8950, loss[loss=0.2387, simple_loss=0.3103, pruned_loss=0.08358, over 4475.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.3046, pruned_loss=0.07036, over 931095.94 frames. ], batch size: 21, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:52:44,490 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.30 vs. limit=22.5 +2024-07-28 01:52:48,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.40 vs. limit=15.0 +2024-07-28 01:52:49,313 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 01:52:59,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=93766.66666666667, ans=10.0 +2024-07-28 01:53:03,462 INFO [train.py:1114] (3/4) Epoch 7, batch 9000, loss[loss=0.2075, simple_loss=0.3005, pruned_loss=0.05727, over 4642.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.3031, pruned_loss=0.06967, over 933840.13 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:53:03,462 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 01:53:10,278 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.5611, 4.3333, 3.8327, 3.9862], device='cuda:3') +2024-07-28 01:53:11,425 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.9813, 5.5596, 4.9723, 5.8358], device='cuda:3') +2024-07-28 01:53:15,550 INFO [train.py:1146] (3/4) Epoch 7, validation: loss=0.1831, simple_loss=0.2876, pruned_loss=0.03931, over 944034.00 frames. +2024-07-28 01:53:15,550 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 01:53:24,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=93793.33333333333, ans=0.125 +2024-07-28 01:53:25,271 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.089e+01 5.776e+01 6.458e+01 7.441e+01 1.035e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 01:53:39,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=93806.66666666667, ans=0.125 +2024-07-28 01:53:41,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=93806.66666666667, ans=0.0 +2024-07-28 01:53:42,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=93806.66666666667, ans=0.0 +2024-07-28 01:53:47,140 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.32 vs. limit=10.0 +2024-07-28 01:53:56,184 INFO [train.py:1114] (3/4) Epoch 7, batch 9050, loss[loss=0.2008, simple_loss=0.2803, pruned_loss=0.06061, over 4524.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.3018, pruned_loss=0.06893, over 934611.74 frames. ], batch size: 10, lr: 1.04e-02, grad_scale: 16.0 +2024-07-28 01:54:03,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=93860.0, ans=0.0 +2024-07-28 01:54:27,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=93913.33333333333, ans=0.0 +2024-07-28 01:54:28,088 INFO [train.py:1114] (3/4) Epoch 7, batch 9100, loss[loss=0.1963, simple_loss=0.2907, pruned_loss=0.05088, over 4932.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.3014, pruned_loss=0.06866, over 937080.36 frames. ], batch size: 14, lr: 1.04e-02, grad_scale: 16.0 +2024-07-28 01:54:36,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=93926.66666666667, ans=10.0 +2024-07-28 01:54:37,212 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.668e+01 5.684e+01 6.462e+01 7.112e+01 1.033e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 01:54:43,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=93940.0, ans=0.125 +2024-07-28 01:54:51,025 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.39 vs. limit=15.0 +2024-07-28 01:54:53,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=93966.66666666667, ans=0.0 +2024-07-28 01:54:55,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=93966.66666666667, ans=0.0 +2024-07-28 01:54:57,176 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-28 01:54:59,363 INFO [train.py:1114] (3/4) Epoch 7, batch 9150, loss[loss=0.2036, simple_loss=0.3054, pruned_loss=0.05086, over 4806.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.3023, pruned_loss=0.06889, over 936096.57 frames. ], batch size: 14, lr: 1.04e-02, grad_scale: 16.0 +2024-07-28 01:54:59,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93980.0, ans=0.1 +2024-07-28 01:55:02,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=93980.0, ans=0.1 +2024-07-28 01:55:09,162 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.97 vs. limit=15.0 +2024-07-28 01:55:12,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=94006.66666666667, ans=0.0 +2024-07-28 01:55:23,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=94020.0, ans=0.2 +2024-07-28 01:55:26,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=94020.0, ans=15.0 +2024-07-28 01:55:29,294 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.43 vs. limit=15.0 +2024-07-28 01:55:33,117 INFO [train.py:1114] (3/4) Epoch 7, batch 9200, loss[loss=0.2043, simple_loss=0.2814, pruned_loss=0.06364, over 4848.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.3018, pruned_loss=0.06843, over 938255.96 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:55:42,553 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.593e+01 5.866e+01 6.542e+01 7.562e+01 1.078e+02, threshold=1.308e+02, percent-clipped=0.0 +2024-07-28 01:55:54,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=94086.66666666667, ans=0.0 +2024-07-28 01:55:56,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=94086.66666666667, ans=0.04949747468305833 +2024-07-28 01:56:04,048 INFO [train.py:1114] (3/4) Epoch 7, batch 9250, loss[loss=0.2172, simple_loss=0.3093, pruned_loss=0.06255, over 4639.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.3023, pruned_loss=0.06832, over 939072.47 frames. ], batch size: 13, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:56:09,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=94126.66666666667, ans=0.025 +2024-07-28 01:56:13,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=94126.66666666667, ans=0.1 +2024-07-28 01:56:16,453 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.03 vs. limit=10.0 +2024-07-28 01:56:35,475 INFO [train.py:1114] (3/4) Epoch 7, batch 9300, loss[loss=0.2303, simple_loss=0.3268, pruned_loss=0.0669, over 4770.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.3024, pruned_loss=0.06865, over 938923.25 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:56:36,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=94180.0, ans=0.125 +2024-07-28 01:56:38,141 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.24 vs. limit=15.0 +2024-07-28 01:56:44,614 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.557e+01 5.679e+01 6.402e+01 7.728e+01 1.178e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 01:56:52,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=94206.66666666667, ans=0.125 +2024-07-28 01:56:53,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=94206.66666666667, ans=0.0 +2024-07-28 01:56:55,582 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.65 vs. limit=6.0 +2024-07-28 01:57:07,393 INFO [train.py:1114] (3/4) Epoch 7, batch 9350, loss[loss=0.1762, simple_loss=0.2483, pruned_loss=0.05203, over 4796.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.3026, pruned_loss=0.06918, over 935408.78 frames. ], batch size: 11, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:57:16,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=94260.0, ans=0.025 +2024-07-28 01:57:25,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=94273.33333333333, ans=0.2 +2024-07-28 01:57:28,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=94286.66666666667, ans=0.125 +2024-07-28 01:57:31,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=94286.66666666667, ans=0.125 +2024-07-28 01:57:31,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=94300.0, ans=0.125 +2024-07-28 01:57:38,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94313.33333333333, ans=0.1 +2024-07-28 01:57:38,747 INFO [train.py:1114] (3/4) Epoch 7, batch 9400, loss[loss=0.2496, simple_loss=0.3195, pruned_loss=0.08987, over 4688.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.3032, pruned_loss=0.06945, over 933673.12 frames. ], batch size: 13, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:57:48,109 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.963e+01 5.906e+01 6.522e+01 7.564e+01 1.110e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 01:57:50,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=94340.0, ans=0.025 +2024-07-28 01:58:07,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=94366.66666666667, ans=15.0 +2024-07-28 01:58:09,839 INFO [train.py:1114] (3/4) Epoch 7, batch 9450, loss[loss=0.193, simple_loss=0.265, pruned_loss=0.06052, over 4802.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.3031, pruned_loss=0.06932, over 932789.30 frames. ], batch size: 11, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:58:11,311 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.88 vs. limit=22.5 +2024-07-28 01:58:25,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=94406.66666666667, ans=0.125 +2024-07-28 01:58:25,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=94406.66666666667, ans=0.125 +2024-07-28 01:58:34,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=94420.0, ans=0.0 +2024-07-28 01:58:34,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=94433.33333333333, ans=0.125 +2024-07-28 01:58:47,080 INFO [train.py:1114] (3/4) Epoch 7, batch 9500, loss[loss=0.1838, simple_loss=0.2592, pruned_loss=0.05417, over 4700.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.303, pruned_loss=0.06923, over 934729.37 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:58:56,880 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.893e+01 5.944e+01 6.483e+01 7.199e+01 9.045e+01, threshold=1.297e+02, percent-clipped=0.0 +2024-07-28 01:59:04,739 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.36 vs. limit=15.0 +2024-07-28 01:59:18,779 INFO [train.py:1114] (3/4) Epoch 7, batch 9550, loss[loss=0.196, simple_loss=0.2848, pruned_loss=0.05358, over 4777.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.3025, pruned_loss=0.06927, over 931946.57 frames. ], batch size: 12, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:59:26,031 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.216e-01 +2024-07-28 01:59:36,771 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.66 vs. limit=22.5 +2024-07-28 01:59:37,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=94553.33333333333, ans=0.125 +2024-07-28 01:59:39,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=94553.33333333333, ans=0.1 +2024-07-28 01:59:39,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=94553.33333333333, ans=0.2 +2024-07-28 01:59:44,184 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.230e-01 +2024-07-28 01:59:45,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=94566.66666666667, ans=0.0 +2024-07-28 01:59:46,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=94566.66666666667, ans=0.05 +2024-07-28 01:59:48,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=94566.66666666667, ans=0.0 +2024-07-28 01:59:49,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=94580.0, ans=0.125 +2024-07-28 01:59:49,821 INFO [train.py:1114] (3/4) Epoch 7, batch 9600, loss[loss=0.3054, simple_loss=0.3647, pruned_loss=0.1231, over 3650.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.3026, pruned_loss=0.06919, over 931265.34 frames. ], batch size: 35, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 01:59:51,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.95 vs. limit=10.0 +2024-07-28 01:59:53,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=94580.0, ans=0.0 +2024-07-28 01:59:55,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.98 vs. limit=6.0 +2024-07-28 01:59:59,435 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 6.136e+01 6.787e+01 7.890e+01 1.161e+02, threshold=1.357e+02, percent-clipped=0.0 +2024-07-28 02:00:02,398 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.47 vs. limit=6.0 +2024-07-28 02:00:04,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=94606.66666666667, ans=0.125 +2024-07-28 02:00:17,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=94633.33333333333, ans=0.2 +2024-07-28 02:00:19,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=94633.33333333333, ans=0.125 +2024-07-28 02:00:21,638 INFO [train.py:1114] (3/4) Epoch 7, batch 9650, loss[loss=0.2087, simple_loss=0.2882, pruned_loss=0.06458, over 4833.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.3037, pruned_loss=0.06983, over 927450.31 frames. ], batch size: 16, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 02:00:34,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=94673.33333333333, ans=0.125 +2024-07-28 02:00:39,933 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:00:44,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=94686.66666666667, ans=0.125 +2024-07-28 02:00:45,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=94686.66666666667, ans=0.0 +2024-07-28 02:00:46,907 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.52 vs. limit=15.0 +2024-07-28 02:00:52,826 INFO [train.py:1114] (3/4) Epoch 7, batch 9700, loss[loss=0.2406, simple_loss=0.3308, pruned_loss=0.07523, over 4284.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3048, pruned_loss=0.0701, over 925274.53 frames. ], batch size: 25, lr: 1.04e-02, grad_scale: 32.0 +2024-07-28 02:00:53,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=94713.33333333333, ans=0.2 +2024-07-28 02:00:54,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=94713.33333333333, ans=0.0 +2024-07-28 02:00:58,582 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-28 02:00:59,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94726.66666666667, ans=0.1 +2024-07-28 02:01:01,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=94726.66666666667, ans=0.125 +2024-07-28 02:01:02,009 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.092e+01 6.194e+01 6.881e+01 8.155e+01 1.257e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-28 02:01:15,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=94753.33333333333, ans=0.125 +2024-07-28 02:01:17,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=94766.66666666667, ans=0.125 +2024-07-28 02:01:18,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=94766.66666666667, ans=0.2 +2024-07-28 02:01:24,029 INFO [train.py:1114] (3/4) Epoch 7, batch 9750, loss[loss=0.2486, simple_loss=0.3385, pruned_loss=0.07935, over 4674.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.3047, pruned_loss=0.06998, over 925856.08 frames. ], batch size: 15, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:01:25,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=94780.0, ans=0.125 +2024-07-28 02:01:31,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=94793.33333333333, ans=0.0 +2024-07-28 02:01:31,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=94793.33333333333, ans=0.2 +2024-07-28 02:01:36,087 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:01:42,272 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.49 vs. limit=22.5 +2024-07-28 02:01:44,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94820.0, ans=0.1 +2024-07-28 02:01:55,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=94833.33333333333, ans=0.0 +2024-07-28 02:01:56,352 INFO [train.py:1114] (3/4) Epoch 7, batch 9800, loss[loss=0.1769, simple_loss=0.2605, pruned_loss=0.04666, over 4715.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.3034, pruned_loss=0.06977, over 925524.95 frames. ], batch size: 12, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:02:05,938 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 5.759e+01 6.678e+01 8.256e+01 1.240e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-28 02:02:27,278 INFO [train.py:1114] (3/4) Epoch 7, batch 9850, loss[loss=0.1966, simple_loss=0.2808, pruned_loss=0.05625, over 4896.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3035, pruned_loss=0.06984, over 927518.47 frames. ], batch size: 15, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:02:27,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=94913.33333333333, ans=0.1 +2024-07-28 02:02:34,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=94926.66666666667, ans=0.0 +2024-07-28 02:02:38,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=94926.66666666667, ans=0.125 +2024-07-28 02:02:46,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=94953.33333333333, ans=0.07 +2024-07-28 02:02:50,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=94953.33333333333, ans=0.025 +2024-07-28 02:02:54,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=94966.66666666667, ans=10.0 +2024-07-28 02:02:57,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=94966.66666666667, ans=0.0 +2024-07-28 02:02:58,783 INFO [train.py:1114] (3/4) Epoch 7, batch 9900, loss[loss=0.2523, simple_loss=0.3379, pruned_loss=0.08335, over 4830.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.3048, pruned_loss=0.07018, over 926784.44 frames. ], batch size: 16, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:03:08,104 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 5.950e+01 6.593e+01 7.492e+01 1.029e+02, threshold=1.319e+02, percent-clipped=0.0 +2024-07-28 02:03:09,447 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:03:09,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=94993.33333333333, ans=0.2 +2024-07-28 02:03:13,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=95006.66666666667, ans=0.125 +2024-07-28 02:03:21,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=95020.0, ans=0.1 +2024-07-28 02:03:29,237 INFO [train.py:1114] (3/4) Epoch 7, batch 9950, loss[loss=0.2096, simple_loss=0.2753, pruned_loss=0.07197, over 4502.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3056, pruned_loss=0.07078, over 929064.34 frames. ], batch size: 10, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:03:35,544 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.66 vs. limit=15.0 +2024-07-28 02:03:36,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=95060.0, ans=0.125 +2024-07-28 02:03:37,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=95060.0, ans=0.0 +2024-07-28 02:03:39,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=95060.0, ans=0.125 +2024-07-28 02:03:39,915 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.43 vs. limit=6.0 +2024-07-28 02:03:44,544 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=95073.33333333333, ans=0.125 +2024-07-28 02:03:47,074 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.52 vs. limit=22.5 +2024-07-28 02:03:49,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=95086.66666666667, ans=0.125 +2024-07-28 02:03:57,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=95100.0, ans=0.125 +2024-07-28 02:04:00,298 INFO [train.py:1114] (3/4) Epoch 7, batch 10000, loss[loss=0.228, simple_loss=0.3141, pruned_loss=0.07093, over 4596.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.308, pruned_loss=0.07154, over 926471.16 frames. ], batch size: 16, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:04:02,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=95113.33333333333, ans=0.125 +2024-07-28 02:04:07,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=95126.66666666667, ans=0.125 +2024-07-28 02:04:09,350 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.061e+01 5.825e+01 6.191e+01 6.916e+01 9.527e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 02:04:13,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=95140.0, ans=0.0 +2024-07-28 02:04:16,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=95140.0, ans=0.125 +2024-07-28 02:04:27,294 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.33 vs. limit=6.0 +2024-07-28 02:04:30,156 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:04:32,328 INFO [train.py:1114] (3/4) Epoch 7, batch 10050, loss[loss=0.2686, simple_loss=0.3324, pruned_loss=0.1024, over 3399.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.3113, pruned_loss=0.0739, over 914001.35 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:04:35,946 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:04:41,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=95193.33333333333, ans=0.0 +2024-07-28 02:04:44,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=95193.33333333333, ans=0.125 +2024-07-28 02:04:49,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=95206.66666666667, ans=0.025 +2024-07-28 02:04:52,021 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.68 vs. limit=6.0 +2024-07-28 02:04:52,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=95206.66666666667, ans=15.0 +2024-07-28 02:04:53,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=95206.66666666667, ans=0.125 +2024-07-28 02:05:04,585 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.48 vs. limit=8.0 +2024-07-28 02:05:06,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=95233.33333333333, ans=0.125 +2024-07-28 02:05:08,118 INFO [train.py:1114] (3/4) Epoch 7, batch 10100, loss[loss=0.281, simple_loss=0.3452, pruned_loss=0.1084, over 3296.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.3173, pruned_loss=0.0808, over 862351.50 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:05:15,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=95260.0, ans=0.0 +2024-07-28 02:05:17,905 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.232e+01 6.813e+01 7.421e+01 7.882e+01 1.006e+02, threshold=1.484e+02, percent-clipped=0.0 +2024-07-28 02:05:32,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=95286.66666666667, ans=0.125 +2024-07-28 02:05:37,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=95300.0, ans=0.0 +2024-07-28 02:05:40,552 INFO [train.py:1114] (3/4) Epoch 7, batch 10150, loss[loss=0.2552, simple_loss=0.3264, pruned_loss=0.092, over 3463.00 frames. ], tot_loss[loss=0.2455, simple_loss=0.3208, pruned_loss=0.08509, over 820028.20 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:05:44,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=95313.33333333333, ans=0.2 +2024-07-28 02:05:47,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=95326.66666666667, ans=0.125 +2024-07-28 02:05:57,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=95340.0, ans=0.07 +2024-07-28 02:06:14,354 INFO [train.py:1114] (3/4) Epoch 7, batch 10200, loss[loss=0.2727, simple_loss=0.3364, pruned_loss=0.1045, over 3254.00 frames. ], tot_loss[loss=0.2522, simple_loss=0.3252, pruned_loss=0.0896, over 788370.30 frames. ], batch size: 35, lr: 1.03e-02, grad_scale: 32.0 +2024-07-28 02:06:20,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=95393.33333333333, ans=0.125 +2024-07-28 02:06:21,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=95393.33333333333, ans=0.125 +2024-07-28 02:06:24,490 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.931e+01 6.733e+01 7.121e+01 8.042e+01 1.219e+02, threshold=1.424e+02, percent-clipped=0.0 +2024-07-28 02:07:11,997 INFO [train.py:1114] (3/4) Epoch 8, batch 0, loss[loss=0.1984, simple_loss=0.2912, pruned_loss=0.0528, over 4854.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2912, pruned_loss=0.0528, over 4854.00 frames. ], batch size: 12, lr: 9.72e-03, grad_scale: 32.0 +2024-07-28 02:07:11,997 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 02:07:22,255 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([2.6680, 3.1836, 2.3488, 2.2627], device='cuda:3') +2024-07-28 02:07:23,602 INFO [train.py:1146] (3/4) Epoch 8, validation: loss=0.1876, simple_loss=0.2932, pruned_loss=0.04099, over 944034.00 frames. +2024-07-28 02:07:23,602 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 02:07:35,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=95424.0, ans=0.125 +2024-07-28 02:08:06,607 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.94 vs. limit=10.0 +2024-07-28 02:08:07,313 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.55 vs. limit=22.5 +2024-07-28 02:08:09,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=95464.0, ans=0.125 +2024-07-28 02:08:12,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=95464.0, ans=0.05 +2024-07-28 02:08:13,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=95464.0, ans=0.0 +2024-07-28 02:08:23,643 INFO [train.py:1114] (3/4) Epoch 8, batch 50, loss[loss=0.1806, simple_loss=0.2604, pruned_loss=0.05039, over 4615.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3072, pruned_loss=0.07088, over 206387.17 frames. ], batch size: 11, lr: 9.71e-03, grad_scale: 32.0 +2024-07-28 02:08:25,256 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.63 vs. limit=22.5 +2024-07-28 02:08:26,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=95477.33333333333, ans=0.125 +2024-07-28 02:08:27,621 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.78 vs. limit=10.0 +2024-07-28 02:08:47,716 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.92 vs. limit=15.0 +2024-07-28 02:08:54,398 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 5.904e+01 6.447e+01 7.403e+01 1.012e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 02:08:59,013 INFO [train.py:1114] (3/4) Epoch 8, batch 100, loss[loss=0.2181, simple_loss=0.2952, pruned_loss=0.0705, over 4641.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.307, pruned_loss=0.07026, over 365426.39 frames. ], batch size: 12, lr: 9.71e-03, grad_scale: 32.0 +2024-07-28 02:09:01,429 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.36 vs. limit=22.5 +2024-07-28 02:09:01,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=95544.0, ans=0.125 +2024-07-28 02:09:10,367 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=2.85 vs. limit=12.0 +2024-07-28 02:09:22,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=95584.0, ans=0.025 +2024-07-28 02:09:24,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=95584.0, ans=0.125 +2024-07-28 02:09:24,477 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.99 vs. limit=15.0 +2024-07-28 02:09:27,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=95597.33333333333, ans=0.125 +2024-07-28 02:09:31,874 INFO [train.py:1114] (3/4) Epoch 8, batch 150, loss[loss=0.1762, simple_loss=0.2613, pruned_loss=0.04552, over 4618.00 frames. ], tot_loss[loss=0.219, simple_loss=0.3023, pruned_loss=0.06788, over 493904.09 frames. ], batch size: 11, lr: 9.71e-03, grad_scale: 32.0 +2024-07-28 02:09:38,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=95624.0, ans=0.1 +2024-07-28 02:09:43,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=95624.0, ans=0.125 +2024-07-28 02:09:45,752 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:09:55,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=95650.66666666667, ans=0.125 +2024-07-28 02:10:00,296 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.617e+01 5.653e+01 6.192e+01 6.799e+01 9.993e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 02:10:05,089 INFO [train.py:1114] (3/4) Epoch 8, batch 200, loss[loss=0.2508, simple_loss=0.3452, pruned_loss=0.07823, over 4419.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.3018, pruned_loss=0.06753, over 593389.06 frames. ], batch size: 21, lr: 9.70e-03, grad_scale: 32.0 +2024-07-28 02:10:11,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=95690.66666666667, ans=0.2 +2024-07-28 02:10:13,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=95690.66666666667, ans=0.125 +2024-07-28 02:10:17,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=95704.0, ans=0.125 +2024-07-28 02:10:22,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=95704.0, ans=0.0 +2024-07-28 02:10:38,233 INFO [train.py:1114] (3/4) Epoch 8, batch 250, loss[loss=0.2015, simple_loss=0.299, pruned_loss=0.05194, over 4642.00 frames. ], tot_loss[loss=0.217, simple_loss=0.3006, pruned_loss=0.06666, over 669893.66 frames. ], batch size: 16, lr: 9.70e-03, grad_scale: 32.0 +2024-07-28 02:10:41,509 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.55 vs. limit=22.5 +2024-07-28 02:10:47,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=95757.33333333333, ans=0.1 +2024-07-28 02:10:57,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=95770.66666666667, ans=0.2 +2024-07-28 02:11:02,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=95784.0, ans=0.0 +2024-07-28 02:11:03,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=95784.0, ans=0.0 +2024-07-28 02:11:06,464 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.20 vs. limit=6.0 +2024-07-28 02:11:08,826 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.464e+01 5.845e+01 7.036e+01 8.606e+01 1.725e+02, threshold=1.407e+02, percent-clipped=4.0 +2024-07-28 02:11:11,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=95797.33333333333, ans=0.05 +2024-07-28 02:11:13,558 INFO [train.py:1114] (3/4) Epoch 8, batch 300, loss[loss=0.1914, simple_loss=0.2925, pruned_loss=0.04512, over 4798.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2999, pruned_loss=0.06654, over 729656.01 frames. ], batch size: 15, lr: 9.70e-03, grad_scale: 32.0 +2024-07-28 02:11:21,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=95824.0, ans=0.2 +2024-07-28 02:11:24,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=95824.0, ans=0.125 +2024-07-28 02:11:28,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=95837.33333333333, ans=0.0 +2024-07-28 02:11:29,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=95837.33333333333, ans=0.125 +2024-07-28 02:11:46,906 INFO [train.py:1114] (3/4) Epoch 8, batch 350, loss[loss=0.2082, simple_loss=0.2841, pruned_loss=0.06618, over 4942.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2997, pruned_loss=0.06604, over 775907.26 frames. ], batch size: 12, lr: 9.69e-03, grad_scale: 32.0 +2024-07-28 02:11:57,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=95890.66666666667, ans=10.0 +2024-07-28 02:12:01,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=95904.0, ans=0.0 +2024-07-28 02:12:04,611 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.32 vs. limit=15.0 +2024-07-28 02:12:15,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=95930.66666666667, ans=0.125 +2024-07-28 02:12:15,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=95930.66666666667, ans=0.0 +2024-07-28 02:12:18,890 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.049e+01 5.562e+01 6.059e+01 7.082e+01 1.101e+02, threshold=1.212e+02, percent-clipped=0.0 +2024-07-28 02:12:21,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=95930.66666666667, ans=0.1 +2024-07-28 02:12:23,504 INFO [train.py:1114] (3/4) Epoch 8, batch 400, loss[loss=0.2205, simple_loss=0.2951, pruned_loss=0.07294, over 4693.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2982, pruned_loss=0.06526, over 813729.42 frames. ], batch size: 13, lr: 9.69e-03, grad_scale: 32.0 +2024-07-28 02:12:25,766 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.19 vs. limit=15.0 +2024-07-28 02:12:33,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=95957.33333333333, ans=0.125 +2024-07-28 02:12:35,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=95957.33333333333, ans=0.125 +2024-07-28 02:12:36,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=95970.66666666667, ans=0.0 +2024-07-28 02:12:41,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=95970.66666666667, ans=0.0 +2024-07-28 02:13:06,694 INFO [train.py:1114] (3/4) Epoch 8, batch 450, loss[loss=0.199, simple_loss=0.2935, pruned_loss=0.0523, over 4637.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2976, pruned_loss=0.06491, over 839123.11 frames. ], batch size: 13, lr: 9.69e-03, grad_scale: 32.0 +2024-07-28 02:13:08,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=96010.66666666667, ans=0.125 +2024-07-28 02:13:10,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=96010.66666666667, ans=0.125 +2024-07-28 02:13:17,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96024.0, ans=0.1 +2024-07-28 02:13:24,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=96037.33333333333, ans=15.0 +2024-07-28 02:13:26,587 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-07-28 02:13:27,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=96050.66666666667, ans=0.0 +2024-07-28 02:13:33,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=96064.0, ans=0.125 +2024-07-28 02:13:38,588 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.745e+01 5.894e+01 6.679e+01 8.075e+01 1.208e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-28 02:13:43,463 INFO [train.py:1114] (3/4) Epoch 8, batch 500, loss[loss=0.2497, simple_loss=0.3162, pruned_loss=0.09158, over 4692.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2981, pruned_loss=0.06529, over 861617.26 frames. ], batch size: 15, lr: 9.68e-03, grad_scale: 32.0 +2024-07-28 02:13:43,555 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:13:48,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=96077.33333333333, ans=0.125 +2024-07-28 02:13:49,103 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.71 vs. limit=15.0 +2024-07-28 02:13:57,278 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.43 vs. limit=12.0 +2024-07-28 02:14:02,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=96104.0, ans=0.125 +2024-07-28 02:14:06,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=96117.33333333333, ans=0.0 +2024-07-28 02:14:14,942 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.52 vs. limit=12.0 +2024-07-28 02:14:16,579 INFO [train.py:1114] (3/4) Epoch 8, batch 550, loss[loss=0.2514, simple_loss=0.3316, pruned_loss=0.08564, over 4656.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2981, pruned_loss=0.06546, over 877301.93 frames. ], batch size: 17, lr: 9.68e-03, grad_scale: 32.0 +2024-07-28 02:14:16,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=96144.0, ans=0.2 +2024-07-28 02:14:32,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=96170.66666666667, ans=0.2 +2024-07-28 02:14:38,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=96184.0, ans=0.125 +2024-07-28 02:14:47,414 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.015e+01 5.729e+01 6.322e+01 7.437e+01 1.078e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 02:14:52,226 INFO [train.py:1114] (3/4) Epoch 8, batch 600, loss[loss=0.2398, simple_loss=0.3101, pruned_loss=0.08475, over 4649.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.298, pruned_loss=0.06547, over 891692.14 frames. ], batch size: 16, lr: 9.68e-03, grad_scale: 32.0 +2024-07-28 02:14:52,722 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.08 vs. limit=22.5 +2024-07-28 02:15:05,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=96237.33333333333, ans=0.125 +2024-07-28 02:15:18,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=96264.0, ans=0.2 +2024-07-28 02:15:25,413 INFO [train.py:1114] (3/4) Epoch 8, batch 650, loss[loss=0.2144, simple_loss=0.2964, pruned_loss=0.06623, over 4748.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2975, pruned_loss=0.06506, over 903578.39 frames. ], batch size: 13, lr: 9.67e-03, grad_scale: 32.0 +2024-07-28 02:15:30,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=96277.33333333333, ans=0.125 +2024-07-28 02:15:34,739 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=96290.66666666667, ans=0.0 +2024-07-28 02:15:35,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=96290.66666666667, ans=0.125 +2024-07-28 02:15:36,287 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.93 vs. limit=22.5 +2024-07-28 02:15:38,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=96304.0, ans=0.125 +2024-07-28 02:15:53,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=96330.66666666667, ans=0.125 +2024-07-28 02:15:53,645 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.945e+01 6.095e+01 6.758e+01 8.122e+01 1.148e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-28 02:15:56,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=96330.66666666667, ans=0.125 +2024-07-28 02:15:56,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=96330.66666666667, ans=10.0 +2024-07-28 02:15:58,371 INFO [train.py:1114] (3/4) Epoch 8, batch 700, loss[loss=0.1841, simple_loss=0.2786, pruned_loss=0.0448, over 4644.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2977, pruned_loss=0.06476, over 911744.77 frames. ], batch size: 12, lr: 9.67e-03, grad_scale: 32.0 +2024-07-28 02:16:13,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-28 02:16:27,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=96397.33333333333, ans=0.0 +2024-07-28 02:16:31,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=96397.33333333333, ans=0.2 +2024-07-28 02:16:33,897 INFO [train.py:1114] (3/4) Epoch 8, batch 750, loss[loss=0.1852, simple_loss=0.2806, pruned_loss=0.04492, over 4691.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2977, pruned_loss=0.06494, over 918316.62 frames. ], batch size: 13, lr: 9.67e-03, grad_scale: 32.0 +2024-07-28 02:16:52,554 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.97 vs. limit=15.0 +2024-07-28 02:17:02,888 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.794e+01 5.806e+01 6.357e+01 7.174e+01 1.221e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 02:17:07,421 INFO [train.py:1114] (3/4) Epoch 8, batch 800, loss[loss=0.2219, simple_loss=0.2933, pruned_loss=0.07525, over 4841.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.298, pruned_loss=0.06562, over 923233.68 frames. ], batch size: 12, lr: 9.66e-03, grad_scale: 32.0 +2024-07-28 02:17:18,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96490.66666666667, ans=0.1 +2024-07-28 02:17:25,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=96504.0, ans=0.125 +2024-07-28 02:17:56,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=96530.66666666667, ans=0.0 +2024-07-28 02:17:58,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=96530.66666666667, ans=0.125 +2024-07-28 02:17:59,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=96530.66666666667, ans=0.125 +2024-07-28 02:18:00,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=96530.66666666667, ans=0.125 +2024-07-28 02:18:02,120 INFO [train.py:1114] (3/4) Epoch 8, batch 850, loss[loss=0.2248, simple_loss=0.3194, pruned_loss=0.06514, over 4666.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2971, pruned_loss=0.06504, over 927481.38 frames. ], batch size: 14, lr: 9.66e-03, grad_scale: 32.0 +2024-07-28 02:18:07,870 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.66 vs. limit=22.5 +2024-07-28 02:18:14,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=96557.33333333333, ans=0.1 +2024-07-28 02:18:15,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=96570.66666666667, ans=0.1 +2024-07-28 02:18:15,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=96570.66666666667, ans=0.5 +2024-07-28 02:18:23,025 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.59 vs. limit=10.0 +2024-07-28 02:18:32,787 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.484e+01 5.816e+01 6.612e+01 7.766e+01 1.010e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-28 02:18:37,395 INFO [train.py:1114] (3/4) Epoch 8, batch 900, loss[loss=0.2008, simple_loss=0.2786, pruned_loss=0.06153, over 4839.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2987, pruned_loss=0.0661, over 927848.41 frames. ], batch size: 12, lr: 9.66e-03, grad_scale: 32.0 +2024-07-28 02:18:44,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=96624.0, ans=10.0 +2024-07-28 02:18:50,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=96637.33333333333, ans=0.0 +2024-07-28 02:19:02,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=96650.66666666667, ans=0.125 +2024-07-28 02:19:07,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96664.0, ans=0.1 +2024-07-28 02:19:11,050 INFO [train.py:1114] (3/4) Epoch 8, batch 950, loss[loss=0.1823, simple_loss=0.2541, pruned_loss=0.05527, over 4782.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2986, pruned_loss=0.06612, over 930034.84 frames. ], batch size: 12, lr: 9.65e-03, grad_scale: 32.0 +2024-07-28 02:19:11,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=96677.33333333333, ans=0.0 +2024-07-28 02:19:11,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=96677.33333333333, ans=0.2 +2024-07-28 02:19:22,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=96690.66666666667, ans=0.125 +2024-07-28 02:19:23,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=96690.66666666667, ans=0.0 +2024-07-28 02:19:29,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=96704.0, ans=0.0 +2024-07-28 02:19:38,620 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.68 vs. limit=15.0 +2024-07-28 02:19:40,112 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.894e+01 6.010e+01 6.768e+01 8.162e+01 1.047e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-28 02:19:43,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96730.66666666667, ans=0.1 +2024-07-28 02:19:44,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=96744.0, ans=0.0 +2024-07-28 02:19:44,745 INFO [train.py:1114] (3/4) Epoch 8, batch 1000, loss[loss=0.2063, simple_loss=0.2852, pruned_loss=0.0637, over 4972.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2989, pruned_loss=0.06663, over 929418.61 frames. ], batch size: 13, lr: 9.65e-03, grad_scale: 32.0 +2024-07-28 02:19:50,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_ff2.min_abs, batch_count=96744.0, ans=0.1 +2024-07-28 02:19:58,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96770.66666666667, ans=0.1 +2024-07-28 02:20:11,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=96784.0, ans=0.0 +2024-07-28 02:20:18,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=96797.33333333333, ans=0.0 +2024-07-28 02:20:19,453 INFO [train.py:1114] (3/4) Epoch 8, batch 1050, loss[loss=0.2025, simple_loss=0.2969, pruned_loss=0.054, over 4877.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2978, pruned_loss=0.06587, over 931616.51 frames. ], batch size: 14, lr: 9.65e-03, grad_scale: 32.0 +2024-07-28 02:20:33,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=96810.66666666667, ans=0.1 +2024-07-28 02:20:35,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=96824.0, ans=0.125 +2024-07-28 02:20:47,138 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:20:55,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=96850.66666666667, ans=0.0 +2024-07-28 02:21:00,905 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.947e+01 5.815e+01 6.423e+01 7.080e+01 9.595e+01, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 02:21:28,313 INFO [train.py:1114] (3/4) Epoch 8, batch 1100, loss[loss=0.2149, simple_loss=0.2892, pruned_loss=0.07029, over 4894.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2979, pruned_loss=0.06596, over 934249.19 frames. ], batch size: 13, lr: 9.64e-03, grad_scale: 32.0 +2024-07-28 02:25:34,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=96890.66666666667, ans=0.125 +2024-07-28 02:25:36,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=96904.0, ans=0.125 +2024-07-28 02:25:37,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.28 vs. limit=15.0 +2024-07-28 02:25:38,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=96904.0, ans=0.04949747468305833 +2024-07-28 02:25:39,694 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.05 vs. limit=15.0 +2024-07-28 02:25:41,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=96904.0, ans=0.0 +2024-07-28 02:25:50,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=96930.66666666667, ans=0.05 +2024-07-28 02:25:56,435 INFO [train.py:1114] (3/4) Epoch 8, batch 1150, loss[loss=0.1918, simple_loss=0.2742, pruned_loss=0.05474, over 4902.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2986, pruned_loss=0.06652, over 934327.39 frames. ], batch size: 13, lr: 9.64e-03, grad_scale: 32.0 +2024-07-28 02:26:28,500 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.21 vs. limit=15.0 +2024-07-28 02:26:30,182 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.889e+01 6.022e+01 6.608e+01 7.492e+01 1.273e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-28 02:26:39,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=96997.33333333333, ans=0.2 +2024-07-28 02:26:41,675 INFO [train.py:1114] (3/4) Epoch 8, batch 1200, loss[loss=0.2492, simple_loss=0.3314, pruned_loss=0.0835, over 4873.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2996, pruned_loss=0.06701, over 932843.05 frames. ], batch size: 14, lr: 9.64e-03, grad_scale: 32.0 +2024-07-28 02:27:00,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=97037.33333333333, ans=0.0 +2024-07-28 02:27:16,980 INFO [train.py:1114] (3/4) Epoch 8, batch 1250, loss[loss=0.2319, simple_loss=0.3208, pruned_loss=0.07153, over 4807.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.3004, pruned_loss=0.06671, over 936680.60 frames. ], batch size: 15, lr: 9.63e-03, grad_scale: 32.0 +2024-07-28 02:27:27,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=97090.66666666667, ans=0.125 +2024-07-28 02:27:34,615 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.65 vs. limit=15.0 +2024-07-28 02:27:35,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=97104.0, ans=0.1 +2024-07-28 02:27:36,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=97104.0, ans=0.125 +2024-07-28 02:27:47,566 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.862e+01 5.611e+01 6.251e+01 6.902e+01 9.769e+01, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 02:27:52,332 INFO [train.py:1114] (3/4) Epoch 8, batch 1300, loss[loss=0.2047, simple_loss=0.2934, pruned_loss=0.05805, over 4678.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2986, pruned_loss=0.06568, over 938460.95 frames. ], batch size: 19, lr: 9.63e-03, grad_scale: 32.0 +2024-07-28 02:27:59,584 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.04 vs. limit=6.0 +2024-07-28 02:28:00,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=97157.33333333333, ans=0.0 +2024-07-28 02:28:04,077 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.98 vs. limit=15.0 +2024-07-28 02:28:20,180 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.38 vs. limit=12.0 +2024-07-28 02:28:20,192 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.92 vs. limit=22.5 +2024-07-28 02:28:21,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=97184.0, ans=0.125 +2024-07-28 02:28:22,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=97184.0, ans=0.125 +2024-07-28 02:28:31,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=97197.33333333333, ans=0.0 +2024-07-28 02:28:33,050 INFO [train.py:1114] (3/4) Epoch 8, batch 1350, loss[loss=0.2109, simple_loss=0.3095, pruned_loss=0.05619, over 4759.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2997, pruned_loss=0.06624, over 940459.78 frames. ], batch size: 13, lr: 9.63e-03, grad_scale: 32.0 +2024-07-28 02:28:43,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=97224.0, ans=0.125 +2024-07-28 02:28:43,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=97224.0, ans=0.125 +2024-07-28 02:28:43,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=97224.0, ans=0.125 +2024-07-28 02:28:47,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=97224.0, ans=0.035 +2024-07-28 02:28:49,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=97237.33333333333, ans=0.0 +2024-07-28 02:28:51,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=97237.33333333333, ans=0.125 +2024-07-28 02:29:04,451 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+01 5.768e+01 6.671e+01 8.189e+01 1.142e+02, threshold=1.334e+02, percent-clipped=0.0 +2024-07-28 02:29:08,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=97277.33333333333, ans=10.0 +2024-07-28 02:29:09,258 INFO [train.py:1114] (3/4) Epoch 8, batch 1400, loss[loss=0.1671, simple_loss=0.2521, pruned_loss=0.04111, over 4708.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2992, pruned_loss=0.06626, over 942345.23 frames. ], batch size: 11, lr: 9.62e-03, grad_scale: 32.0 +2024-07-28 02:29:17,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=97290.66666666667, ans=0.0 +2024-07-28 02:29:17,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=97290.66666666667, ans=0.125 +2024-07-28 02:29:20,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=97290.66666666667, ans=0.0 +2024-07-28 02:29:25,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=97304.0, ans=0.2 +2024-07-28 02:29:27,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=97304.0, ans=0.0 +2024-07-28 02:29:29,407 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.59 vs. limit=15.0 +2024-07-28 02:29:30,912 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.78 vs. limit=6.0 +2024-07-28 02:29:32,036 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.383e-01 +2024-07-28 02:29:32,384 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.67 vs. limit=22.5 +2024-07-28 02:29:35,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=97317.33333333333, ans=0.5 +2024-07-28 02:29:43,335 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.83 vs. limit=22.5 +2024-07-28 02:29:43,466 INFO [train.py:1114] (3/4) Epoch 8, batch 1450, loss[loss=0.2829, simple_loss=0.357, pruned_loss=0.1043, over 4693.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2997, pruned_loss=0.06632, over 942490.71 frames. ], batch size: 15, lr: 9.62e-03, grad_scale: 32.0 +2024-07-28 02:29:47,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97344.0, ans=0.1 +2024-07-28 02:29:56,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=97370.66666666667, ans=0.125 +2024-07-28 02:29:57,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=97370.66666666667, ans=0.2 +2024-07-28 02:30:00,555 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.30 vs. limit=10.0 +2024-07-28 02:30:08,178 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.29 vs. limit=15.0 +2024-07-28 02:30:08,671 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:30:09,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=97397.33333333333, ans=0.125 +2024-07-28 02:30:12,513 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.584e+01 5.678e+01 6.336e+01 6.902e+01 9.292e+01, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 02:30:16,530 INFO [train.py:1114] (3/4) Epoch 8, batch 1500, loss[loss=0.2521, simple_loss=0.3346, pruned_loss=0.08476, over 4815.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2998, pruned_loss=0.06644, over 942256.55 frames. ], batch size: 14, lr: 9.62e-03, grad_scale: 16.0 +2024-07-28 02:30:18,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.21 vs. limit=15.0 +2024-07-28 02:30:21,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=97410.66666666667, ans=0.1 +2024-07-28 02:30:42,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=97450.66666666667, ans=0.125 +2024-07-28 02:30:48,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=97464.0, ans=0.2 +2024-07-28 02:30:51,715 INFO [train.py:1114] (3/4) Epoch 8, batch 1550, loss[loss=0.2642, simple_loss=0.3488, pruned_loss=0.08981, over 4903.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2996, pruned_loss=0.06652, over 938170.85 frames. ], batch size: 15, lr: 9.61e-03, grad_scale: 16.0 +2024-07-28 02:31:15,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=97517.33333333333, ans=0.125 +2024-07-28 02:31:18,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=97530.66666666667, ans=0.0 +2024-07-28 02:31:19,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=97530.66666666667, ans=0.125 +2024-07-28 02:31:21,195 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.890e+01 6.503e+01 7.700e+01 2.674e+02, threshold=1.301e+02, percent-clipped=1.0 +2024-07-28 02:31:21,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=97530.66666666667, ans=0.125 +2024-07-28 02:31:25,114 INFO [train.py:1114] (3/4) Epoch 8, batch 1600, loss[loss=0.2139, simple_loss=0.3068, pruned_loss=0.06048, over 4874.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2998, pruned_loss=0.06709, over 936634.88 frames. ], batch size: 14, lr: 9.61e-03, grad_scale: 32.0 +2024-07-28 02:31:36,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=97557.33333333333, ans=0.125 +2024-07-28 02:31:49,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=97584.0, ans=0.125 +2024-07-28 02:31:54,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=97597.33333333333, ans=0.0 +2024-07-28 02:31:54,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=97597.33333333333, ans=0.0 +2024-07-28 02:31:59,464 INFO [train.py:1114] (3/4) Epoch 8, batch 1650, loss[loss=0.2788, simple_loss=0.3529, pruned_loss=0.1023, over 4659.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2987, pruned_loss=0.06669, over 936673.86 frames. ], batch size: 14, lr: 9.61e-03, grad_scale: 32.0 +2024-07-28 02:32:11,098 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:32:15,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=97637.33333333333, ans=0.04949747468305833 +2024-07-28 02:32:18,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=97650.66666666667, ans=0.125 +2024-07-28 02:32:21,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=97650.66666666667, ans=0.05 +2024-07-28 02:32:29,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=97664.0, ans=0.125 +2024-07-28 02:32:30,781 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.779e+01 6.597e+01 7.631e+01 1.276e+02, threshold=1.319e+02, percent-clipped=0.0 +2024-07-28 02:32:34,705 INFO [train.py:1114] (3/4) Epoch 8, batch 1700, loss[loss=0.2139, simple_loss=0.2928, pruned_loss=0.06749, over 4713.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2978, pruned_loss=0.06536, over 938449.08 frames. ], batch size: 11, lr: 9.60e-03, grad_scale: 32.0 +2024-07-28 02:32:54,627 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.43 vs. limit=6.0 +2024-07-28 02:33:00,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=97717.33333333333, ans=0.125 +2024-07-28 02:33:07,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=97730.66666666667, ans=0.125 +2024-07-28 02:33:07,862 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.95 vs. limit=15.0 +2024-07-28 02:33:09,494 INFO [train.py:1114] (3/4) Epoch 8, batch 1750, loss[loss=0.1685, simple_loss=0.254, pruned_loss=0.04151, over 4801.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2972, pruned_loss=0.06488, over 939400.74 frames. ], batch size: 11, lr: 9.60e-03, grad_scale: 32.0 +2024-07-28 02:33:14,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=97744.0, ans=0.125 +2024-07-28 02:33:24,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=97770.66666666667, ans=0.0 +2024-07-28 02:33:34,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.56 vs. limit=15.0 +2024-07-28 02:33:37,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=97784.0, ans=0.5 +2024-07-28 02:33:46,446 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.043e+01 5.863e+01 6.439e+01 7.161e+01 1.257e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 02:33:52,177 INFO [train.py:1114] (3/4) Epoch 8, batch 1800, loss[loss=0.1941, simple_loss=0.2831, pruned_loss=0.05254, over 4627.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2973, pruned_loss=0.06515, over 940183.91 frames. ], batch size: 13, lr: 9.60e-03, grad_scale: 32.0 +2024-07-28 02:33:59,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=97824.0, ans=0.125 +2024-07-28 02:34:01,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=97824.0, ans=0.1 +2024-07-28 02:34:16,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=97837.33333333333, ans=0.0 +2024-07-28 02:34:33,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=97864.0, ans=0.125 +2024-07-28 02:34:34,878 INFO [train.py:1114] (3/4) Epoch 8, batch 1850, loss[loss=0.1966, simple_loss=0.2961, pruned_loss=0.04851, over 4813.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2974, pruned_loss=0.06494, over 940221.69 frames. ], batch size: 14, lr: 9.59e-03, grad_scale: 32.0 +2024-07-28 02:34:35,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=97877.33333333333, ans=0.0 +2024-07-28 02:34:35,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=97877.33333333333, ans=0.2 +2024-07-28 02:34:40,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=97877.33333333333, ans=0.1 +2024-07-28 02:34:52,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=97904.0, ans=0.0 +2024-07-28 02:34:52,893 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:35:02,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=97930.66666666667, ans=0.05 +2024-07-28 02:35:04,086 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.444e+01 5.869e+01 6.668e+01 7.730e+01 1.207e+02, threshold=1.334e+02, percent-clipped=0.0 +2024-07-28 02:35:05,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=97930.66666666667, ans=0.125 +2024-07-28 02:35:08,080 INFO [train.py:1114] (3/4) Epoch 8, batch 1900, loss[loss=0.2293, simple_loss=0.3285, pruned_loss=0.06501, over 4654.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2979, pruned_loss=0.06504, over 941628.39 frames. ], batch size: 14, lr: 9.59e-03, grad_scale: 32.0 +2024-07-28 02:35:41,418 INFO [train.py:1114] (3/4) Epoch 8, batch 1950, loss[loss=0.1958, simple_loss=0.2797, pruned_loss=0.05593, over 4898.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.299, pruned_loss=0.06511, over 943701.31 frames. ], batch size: 13, lr: 9.59e-03, grad_scale: 32.0 +2024-07-28 02:35:42,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=98010.66666666667, ans=0.125 +2024-07-28 02:35:45,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=98010.66666666667, ans=0.125 +2024-07-28 02:35:46,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=98010.66666666667, ans=0.125 +2024-07-28 02:36:09,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=98064.0, ans=0.0 +2024-07-28 02:36:12,627 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.950e+01 5.706e+01 6.313e+01 6.898e+01 1.010e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 02:36:16,767 INFO [train.py:1114] (3/4) Epoch 8, batch 2000, loss[loss=0.227, simple_loss=0.2992, pruned_loss=0.07744, over 4814.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2996, pruned_loss=0.06562, over 940970.26 frames. ], batch size: 11, lr: 9.58e-03, grad_scale: 32.0 +2024-07-28 02:36:22,450 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-28 02:36:30,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=98104.0, ans=0.0 +2024-07-28 02:36:41,230 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=24.09 vs. limit=15.0 +2024-07-28 02:36:48,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=98130.66666666667, ans=0.025 +2024-07-28 02:36:50,178 INFO [train.py:1114] (3/4) Epoch 8, batch 2050, loss[loss=0.2007, simple_loss=0.2709, pruned_loss=0.06521, over 4609.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2985, pruned_loss=0.06586, over 939017.47 frames. ], batch size: 11, lr: 9.58e-03, grad_scale: 16.0 +2024-07-28 02:36:53,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=98144.0, ans=0.1 +2024-07-28 02:36:53,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=98144.0, ans=0.125 +2024-07-28 02:37:05,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=98170.66666666667, ans=0.2 +2024-07-28 02:37:08,453 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=18.94 vs. limit=15.0 +2024-07-28 02:37:11,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=98184.0, ans=0.1 +2024-07-28 02:37:20,053 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.635e+01 6.128e+01 6.881e+01 8.380e+01 1.718e+02, threshold=1.376e+02, percent-clipped=3.0 +2024-07-28 02:37:23,357 INFO [train.py:1114] (3/4) Epoch 8, batch 2100, loss[loss=0.2284, simple_loss=0.3115, pruned_loss=0.07271, over 4763.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2974, pruned_loss=0.06486, over 940989.78 frames. ], batch size: 13, lr: 9.58e-03, grad_scale: 16.0 +2024-07-28 02:37:33,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=98224.0, ans=0.1 +2024-07-28 02:37:34,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=98224.0, ans=0.125 +2024-07-28 02:37:36,995 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.08 vs. limit=22.5 +2024-07-28 02:37:51,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=98264.0, ans=0.0 +2024-07-28 02:37:56,377 INFO [train.py:1114] (3/4) Epoch 8, batch 2150, loss[loss=0.1704, simple_loss=0.2619, pruned_loss=0.0394, over 4900.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.296, pruned_loss=0.06395, over 944127.23 frames. ], batch size: 13, lr: 9.57e-03, grad_scale: 16.0 +2024-07-28 02:38:03,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=98290.66666666667, ans=0.125 +2024-07-28 02:38:28,126 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.526e+01 5.621e+01 6.298e+01 7.456e+01 1.063e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 02:38:28,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=98330.66666666667, ans=0.0 +2024-07-28 02:38:28,454 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.25 vs. limit=12.0 +2024-07-28 02:38:31,421 INFO [train.py:1114] (3/4) Epoch 8, batch 2200, loss[loss=0.257, simple_loss=0.3284, pruned_loss=0.09281, over 4808.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2966, pruned_loss=0.06433, over 943491.20 frames. ], batch size: 14, lr: 9.57e-03, grad_scale: 16.0 +2024-07-28 02:38:35,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=98344.0, ans=0.125 +2024-07-28 02:38:40,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=98357.33333333333, ans=0.125 +2024-07-28 02:38:51,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=98384.0, ans=0.0 +2024-07-28 02:38:55,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=98384.0, ans=0.125 +2024-07-28 02:38:55,250 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=19.21 vs. limit=15.0 +2024-07-28 02:39:06,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=98410.66666666667, ans=0.125 +2024-07-28 02:39:06,690 INFO [train.py:1114] (3/4) Epoch 8, batch 2250, loss[loss=0.2435, simple_loss=0.3332, pruned_loss=0.0769, over 4688.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2972, pruned_loss=0.06479, over 941795.39 frames. ], batch size: 13, lr: 9.57e-03, grad_scale: 16.0 +2024-07-28 02:39:28,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=98450.66666666667, ans=0.2 +2024-07-28 02:39:35,839 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.467e+01 5.847e+01 6.592e+01 7.483e+01 1.040e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-28 02:39:39,153 INFO [train.py:1114] (3/4) Epoch 8, batch 2300, loss[loss=0.1976, simple_loss=0.2812, pruned_loss=0.05695, over 4955.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2955, pruned_loss=0.06406, over 939926.92 frames. ], batch size: 12, lr: 9.57e-03, grad_scale: 16.0 +2024-07-28 02:39:41,515 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.94 vs. limit=12.0 +2024-07-28 02:39:42,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=98477.33333333333, ans=0.125 +2024-07-28 02:39:44,142 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.80 vs. limit=6.0 +2024-07-28 02:39:44,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=98477.33333333333, ans=10.0 +2024-07-28 02:39:44,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=98477.33333333333, ans=0.125 +2024-07-28 02:41:01,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=98490.66666666667, ans=0.0 +2024-07-28 02:41:15,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=98517.33333333333, ans=0.125 +2024-07-28 02:41:15,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=98530.66666666667, ans=0.2 +2024-07-28 02:41:22,932 INFO [train.py:1114] (3/4) Epoch 8, batch 2350, loss[loss=0.2028, simple_loss=0.2915, pruned_loss=0.0571, over 4638.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2962, pruned_loss=0.06433, over 941981.11 frames. ], batch size: 13, lr: 9.56e-03, grad_scale: 16.0 +2024-07-28 02:41:28,670 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.02 vs. limit=10.0 +2024-07-28 02:41:48,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=98584.0, ans=0.125 +2024-07-28 02:41:53,306 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.834e+01 6.332e+01 7.540e+01 1.064e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 02:41:56,607 INFO [train.py:1114] (3/4) Epoch 8, batch 2400, loss[loss=0.1998, simple_loss=0.2868, pruned_loss=0.05637, over 4637.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2968, pruned_loss=0.06457, over 941921.47 frames. ], batch size: 12, lr: 9.56e-03, grad_scale: 32.0 +2024-07-28 02:42:22,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=98637.33333333333, ans=0.1 +2024-07-28 02:42:37,994 INFO [train.py:1114] (3/4) Epoch 8, batch 2450, loss[loss=0.2318, simple_loss=0.3183, pruned_loss=0.07268, over 4692.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2978, pruned_loss=0.06507, over 937437.14 frames. ], batch size: 13, lr: 9.56e-03, grad_scale: 32.0 +2024-07-28 02:42:40,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=98677.33333333333, ans=0.125 +2024-07-28 02:42:44,194 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.45 vs. limit=12.0 +2024-07-28 02:42:44,949 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.34 vs. limit=15.0 +2024-07-28 02:42:47,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=98690.66666666667, ans=0.035 +2024-07-28 02:42:55,007 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.38 vs. limit=22.5 +2024-07-28 02:43:01,394 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.77 vs. limit=6.0 +2024-07-28 02:43:05,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=98704.0, ans=10.0 +2024-07-28 02:43:16,089 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.41 vs. limit=15.0 +2024-07-28 02:43:25,561 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.031e+01 6.058e+01 6.704e+01 7.921e+01 1.237e+02, threshold=1.341e+02, percent-clipped=0.0 +2024-07-28 02:43:28,907 INFO [train.py:1114] (3/4) Epoch 8, batch 2500, loss[loss=0.1934, simple_loss=0.2848, pruned_loss=0.05101, over 4801.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2982, pruned_loss=0.06534, over 938960.84 frames. ], batch size: 14, lr: 9.55e-03, grad_scale: 32.0 +2024-07-28 02:43:30,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=98744.0, ans=0.0 +2024-07-28 02:43:30,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=98744.0, ans=0.125 +2024-07-28 02:43:40,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=98757.33333333333, ans=0.1 +2024-07-28 02:44:16,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=98797.33333333333, ans=0.0 +2024-07-28 02:44:19,061 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.95 vs. limit=10.0 +2024-07-28 02:44:21,342 INFO [train.py:1114] (3/4) Epoch 8, batch 2550, loss[loss=0.1718, simple_loss=0.249, pruned_loss=0.04731, over 4805.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2977, pruned_loss=0.06508, over 938876.19 frames. ], batch size: 11, lr: 9.55e-03, grad_scale: 32.0 +2024-07-28 02:44:29,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=98824.0, ans=0.2 +2024-07-28 02:44:31,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=98824.0, ans=0.0 +2024-07-28 02:44:58,728 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.823e+01 5.841e+01 6.423e+01 7.700e+01 1.142e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 02:45:02,065 INFO [train.py:1114] (3/4) Epoch 8, batch 2600, loss[loss=0.2175, simple_loss=0.2952, pruned_loss=0.06988, over 4896.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2986, pruned_loss=0.06544, over 937268.64 frames. ], batch size: 13, lr: 9.55e-03, grad_scale: 32.0 +2024-07-28 02:45:09,004 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.90 vs. limit=10.0 +2024-07-28 02:45:09,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=98890.66666666667, ans=0.0 +2024-07-28 02:45:11,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=98890.66666666667, ans=0.125 +2024-07-28 02:45:37,243 INFO [train.py:1114] (3/4) Epoch 8, batch 2650, loss[loss=0.2517, simple_loss=0.3237, pruned_loss=0.08982, over 4653.00 frames. ], tot_loss[loss=0.215, simple_loss=0.299, pruned_loss=0.06548, over 939491.19 frames. ], batch size: 16, lr: 9.54e-03, grad_scale: 32.0 +2024-07-28 02:45:37,825 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.12 vs. limit=15.0 +2024-07-28 02:45:39,636 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.17 vs. limit=12.0 +2024-07-28 02:45:51,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=98970.66666666667, ans=0.125 +2024-07-28 02:45:56,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=98984.0, ans=0.2 +2024-07-28 02:46:01,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=98984.0, ans=0.0 +2024-07-28 02:46:06,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=98984.0, ans=0.0 +2024-07-28 02:46:07,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=98984.0, ans=0.2 +2024-07-28 02:46:12,207 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.525e+01 5.757e+01 6.469e+01 7.162e+01 1.151e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 02:46:14,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=98997.33333333333, ans=0.0 +2024-07-28 02:46:18,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99010.66666666667, ans=0.1 +2024-07-28 02:46:18,787 INFO [train.py:1114] (3/4) Epoch 8, batch 2700, loss[loss=0.228, simple_loss=0.3121, pruned_loss=0.07196, over 4738.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.3003, pruned_loss=0.06597, over 939507.50 frames. ], batch size: 14, lr: 9.54e-03, grad_scale: 32.0 +2024-07-28 02:46:29,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99024.0, ans=0.1 +2024-07-28 02:46:29,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=99024.0, ans=0.0 +2024-07-28 02:46:33,711 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:46:35,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=99037.33333333333, ans=10.0 +2024-07-28 02:46:42,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=99050.66666666667, ans=0.1 +2024-07-28 02:46:42,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=99050.66666666667, ans=0.0 +2024-07-28 02:46:46,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=99050.66666666667, ans=0.0 +2024-07-28 02:46:50,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=99064.0, ans=0.125 +2024-07-28 02:46:51,473 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.56 vs. limit=15.0 +2024-07-28 02:46:54,448 INFO [train.py:1114] (3/4) Epoch 8, batch 2750, loss[loss=0.1745, simple_loss=0.2542, pruned_loss=0.0474, over 4696.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2995, pruned_loss=0.06616, over 939966.28 frames. ], batch size: 12, lr: 9.54e-03, grad_scale: 32.0 +2024-07-28 02:46:55,389 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.11 vs. limit=15.0 +2024-07-28 02:46:59,714 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.36 vs. limit=22.5 +2024-07-28 02:47:08,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=99090.66666666667, ans=0.125 +2024-07-28 02:47:09,756 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.90 vs. limit=22.5 +2024-07-28 02:47:12,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=99104.0, ans=0.0 +2024-07-28 02:47:31,845 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.602e+01 6.000e+01 6.844e+01 8.152e+01 1.229e+02, threshold=1.369e+02, percent-clipped=0.0 +2024-07-28 02:47:34,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=99144.0, ans=0.2 +2024-07-28 02:47:40,074 INFO [train.py:1114] (3/4) Epoch 8, batch 2800, loss[loss=0.315, simple_loss=0.366, pruned_loss=0.132, over 3563.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2985, pruned_loss=0.06608, over 937692.95 frames. ], batch size: 36, lr: 9.53e-03, grad_scale: 32.0 +2024-07-28 02:47:41,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=99144.0, ans=0.125 +2024-07-28 02:47:42,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=99144.0, ans=0.125 +2024-07-28 02:47:48,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=99157.33333333333, ans=0.025 +2024-07-28 02:47:55,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=99170.66666666667, ans=0.2 +2024-07-28 02:47:57,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=99170.66666666667, ans=0.125 +2024-07-28 02:48:02,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=99170.66666666667, ans=0.2 +2024-07-28 02:48:21,816 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.69 vs. limit=6.0 +2024-07-28 02:48:24,681 INFO [train.py:1114] (3/4) Epoch 8, batch 2850, loss[loss=0.1995, simple_loss=0.2818, pruned_loss=0.05858, over 4967.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2995, pruned_loss=0.06673, over 936089.50 frames. ], batch size: 13, lr: 9.53e-03, grad_scale: 32.0 +2024-07-28 02:48:25,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=99210.66666666667, ans=0.125 +2024-07-28 02:48:27,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=99210.66666666667, ans=0.125 +2024-07-28 02:48:30,415 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.46 vs. limit=15.0 +2024-07-28 02:48:32,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=99224.0, ans=0.125 +2024-07-28 02:48:34,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=99224.0, ans=0.125 +2024-07-28 02:48:39,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=99237.33333333333, ans=0.125 +2024-07-28 02:48:42,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=99237.33333333333, ans=0.1 +2024-07-28 02:48:47,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=99250.66666666667, ans=0.125 +2024-07-28 02:48:54,156 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.514e+01 5.619e+01 6.304e+01 7.225e+01 1.077e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 02:48:57,344 INFO [train.py:1114] (3/4) Epoch 8, batch 2900, loss[loss=0.1926, simple_loss=0.2851, pruned_loss=0.05008, over 4833.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.3001, pruned_loss=0.06587, over 939855.32 frames. ], batch size: 13, lr: 9.53e-03, grad_scale: 32.0 +2024-07-28 02:48:58,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=99277.33333333333, ans=0.125 +2024-07-28 02:49:23,636 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.87 vs. limit=12.0 +2024-07-28 02:49:27,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=99330.66666666667, ans=0.125 +2024-07-28 02:49:33,156 INFO [train.py:1114] (3/4) Epoch 8, batch 2950, loss[loss=0.1619, simple_loss=0.2568, pruned_loss=0.03351, over 4708.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2983, pruned_loss=0.06504, over 938933.97 frames. ], batch size: 12, lr: 9.52e-03, grad_scale: 32.0 +2024-07-28 02:49:36,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=99344.0, ans=0.125 +2024-07-28 02:49:43,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=99357.33333333333, ans=0.0 +2024-07-28 02:49:46,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=99370.66666666667, ans=0.1 +2024-07-28 02:49:51,361 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.98 vs. limit=15.0 +2024-07-28 02:49:57,377 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.18 vs. limit=15.0 +2024-07-28 02:49:58,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=99384.0, ans=0.0 +2024-07-28 02:49:59,916 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.08 vs. limit=10.0 +2024-07-28 02:50:02,210 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.04 vs. limit=15.0 +2024-07-28 02:50:04,307 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.366e+01 5.988e+01 6.681e+01 8.290e+01 1.259e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-28 02:50:05,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=99397.33333333333, ans=0.025 +2024-07-28 02:50:05,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=99397.33333333333, ans=0.125 +2024-07-28 02:50:07,619 INFO [train.py:1114] (3/4) Epoch 8, batch 3000, loss[loss=0.2327, simple_loss=0.3218, pruned_loss=0.07177, over 4758.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2983, pruned_loss=0.06522, over 938793.05 frames. ], batch size: 13, lr: 9.52e-03, grad_scale: 32.0 +2024-07-28 02:50:07,620 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 02:50:19,015 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.5517, 5.0383, 5.2389, 5.0633], device='cuda:3') +2024-07-28 02:50:54,535 INFO [train.py:1146] (3/4) Epoch 8, validation: loss=0.1802, simple_loss=0.2848, pruned_loss=0.03781, over 944034.00 frames. +2024-07-28 02:50:54,538 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 02:50:56,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=99410.66666666667, ans=0.0 +2024-07-28 02:51:00,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=99424.0, ans=0.0 +2024-07-28 02:51:02,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=99424.0, ans=0.025 +2024-07-28 02:51:05,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=99424.0, ans=0.2 +2024-07-28 02:51:10,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=99437.33333333333, ans=0.2 +2024-07-28 02:51:12,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=99437.33333333333, ans=0.0 +2024-07-28 02:51:15,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=99450.66666666667, ans=0.1 +2024-07-28 02:51:29,234 INFO [train.py:1114] (3/4) Epoch 8, batch 3050, loss[loss=0.1981, simple_loss=0.2879, pruned_loss=0.05412, over 4635.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2985, pruned_loss=0.06513, over 937233.54 frames. ], batch size: 12, lr: 9.52e-03, grad_scale: 32.0 +2024-07-28 02:51:33,623 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.74 vs. limit=15.0 +2024-07-28 02:51:37,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=99490.66666666667, ans=0.0 +2024-07-28 02:52:07,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=99504.0, ans=0.05 +2024-07-28 02:52:22,248 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.490e+01 5.732e+01 6.156e+01 7.183e+01 1.083e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 02:52:25,462 INFO [train.py:1114] (3/4) Epoch 8, batch 3100, loss[loss=0.2652, simple_loss=0.3459, pruned_loss=0.09223, over 4629.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2984, pruned_loss=0.06493, over 937773.49 frames. ], batch size: 16, lr: 9.51e-03, grad_scale: 32.0 +2024-07-28 02:52:35,216 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=19.73 vs. limit=22.5 +2024-07-28 02:52:40,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=99570.66666666667, ans=0.0 +2024-07-28 02:52:40,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=99570.66666666667, ans=0.125 +2024-07-28 02:52:41,663 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.45 vs. limit=10.0 +2024-07-28 02:52:42,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=99570.66666666667, ans=10.0 +2024-07-28 02:52:45,716 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.69 vs. limit=15.0 +2024-07-28 02:53:00,726 INFO [train.py:1114] (3/4) Epoch 8, batch 3150, loss[loss=0.246, simple_loss=0.3327, pruned_loss=0.0796, over 4635.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2982, pruned_loss=0.06506, over 938196.36 frames. ], batch size: 17, lr: 9.51e-03, grad_scale: 32.0 +2024-07-28 02:53:33,337 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.886e+01 5.840e+01 6.506e+01 7.424e+01 1.196e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 02:53:33,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=99664.0, ans=0.0 +2024-07-28 02:53:34,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=99664.0, ans=0.025 +2024-07-28 02:53:36,729 INFO [train.py:1114] (3/4) Epoch 8, batch 3200, loss[loss=0.2016, simple_loss=0.2849, pruned_loss=0.05912, over 4827.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2976, pruned_loss=0.06494, over 939787.66 frames. ], batch size: 13, lr: 9.51e-03, grad_scale: 32.0 +2024-07-28 02:53:43,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=99690.66666666667, ans=0.125 +2024-07-28 02:53:55,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=99704.0, ans=0.125 +2024-07-28 02:53:59,212 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.49 vs. limit=10.0 +2024-07-28 02:54:07,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=99730.66666666667, ans=0.0 +2024-07-28 02:54:11,574 INFO [train.py:1114] (3/4) Epoch 8, batch 3250, loss[loss=0.2072, simple_loss=0.2832, pruned_loss=0.06557, over 4931.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2975, pruned_loss=0.06462, over 940916.89 frames. ], batch size: 14, lr: 9.50e-03, grad_scale: 32.0 +2024-07-28 02:54:14,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=99744.0, ans=0.125 +2024-07-28 02:54:15,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=99744.0, ans=0.125 +2024-07-28 02:54:16,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=99744.0, ans=0.125 +2024-07-28 02:54:35,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=99784.0, ans=0.125 +2024-07-28 02:54:35,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=99784.0, ans=22.5 +2024-07-28 02:54:38,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=99797.33333333333, ans=0.0 +2024-07-28 02:54:39,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=99797.33333333333, ans=0.125 +2024-07-28 02:54:41,674 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 5.887e+01 6.598e+01 7.799e+01 2.167e+02, threshold=1.320e+02, percent-clipped=1.0 +2024-07-28 02:54:44,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=99810.66666666667, ans=0.125 +2024-07-28 02:54:45,027 INFO [train.py:1114] (3/4) Epoch 8, batch 3300, loss[loss=0.2626, simple_loss=0.3385, pruned_loss=0.09336, over 4727.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2967, pruned_loss=0.06481, over 941593.85 frames. ], batch size: 19, lr: 9.50e-03, grad_scale: 32.0 +2024-07-28 02:55:07,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=99850.66666666667, ans=0.125 +2024-07-28 02:55:18,687 INFO [train.py:1114] (3/4) Epoch 8, batch 3350, loss[loss=0.2317, simple_loss=0.3067, pruned_loss=0.07838, over 4634.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2978, pruned_loss=0.06519, over 939390.23 frames. ], batch size: 17, lr: 9.50e-03, grad_scale: 32.0 +2024-07-28 02:55:19,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=99877.33333333333, ans=0.125 +2024-07-28 02:55:24,816 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.63 vs. limit=6.0 +2024-07-28 02:55:39,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=99917.33333333333, ans=0.125 +2024-07-28 02:55:46,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=99930.66666666667, ans=0.125 +2024-07-28 02:55:48,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=99930.66666666667, ans=0.125 +2024-07-28 02:55:50,387 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.676e+01 5.817e+01 6.427e+01 7.197e+01 1.127e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 02:55:53,715 INFO [train.py:1114] (3/4) Epoch 8, batch 3400, loss[loss=0.1662, simple_loss=0.2577, pruned_loss=0.03735, over 4803.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2971, pruned_loss=0.06527, over 938344.07 frames. ], batch size: 11, lr: 9.50e-03, grad_scale: 32.0 +2024-07-28 02:56:05,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=99957.33333333333, ans=0.125 +2024-07-28 02:56:08,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=99970.66666666667, ans=0.0 +2024-07-28 02:56:10,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=99970.66666666667, ans=0.0 +2024-07-28 02:56:11,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=99970.66666666667, ans=0.0 +2024-07-28 02:56:13,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=99970.66666666667, ans=0.5 +2024-07-28 02:56:17,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=99984.0, ans=0.2 +2024-07-28 02:56:19,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=99984.0, ans=0.5 +2024-07-28 02:56:26,227 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:56:27,979 INFO [train.py:1114] (3/4) Epoch 8, batch 3450, loss[loss=0.244, simple_loss=0.3221, pruned_loss=0.08295, over 4759.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2968, pruned_loss=0.06512, over 938277.41 frames. ], batch size: 19, lr: 9.49e-03, grad_scale: 32.0 +2024-07-28 02:56:34,434 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.41 vs. limit=10.0 +2024-07-28 02:56:42,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=100037.33333333333, ans=0.125 +2024-07-28 02:56:55,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=100064.0, ans=0.025 +2024-07-28 02:56:57,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=100064.0, ans=0.0 +2024-07-28 02:56:58,310 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 5.830e+01 6.643e+01 7.875e+01 1.454e+02, threshold=1.329e+02, percent-clipped=3.0 +2024-07-28 02:57:01,713 INFO [train.py:1114] (3/4) Epoch 8, batch 3500, loss[loss=0.2047, simple_loss=0.2872, pruned_loss=0.06109, over 4950.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2954, pruned_loss=0.06475, over 938403.91 frames. ], batch size: 12, lr: 9.49e-03, grad_scale: 32.0 +2024-07-28 02:57:03,879 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 02:57:10,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=100090.66666666667, ans=0.125 +2024-07-28 02:57:37,826 INFO [train.py:1114] (3/4) Epoch 8, batch 3550, loss[loss=0.2238, simple_loss=0.3122, pruned_loss=0.06765, over 4666.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2959, pruned_loss=0.06522, over 938888.41 frames. ], batch size: 14, lr: 9.49e-03, grad_scale: 32.0 +2024-07-28 02:57:47,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=100157.33333333333, ans=0.125 +2024-07-28 02:58:03,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=100184.0, ans=0.2 +2024-07-28 02:58:07,551 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.623e+01 5.767e+01 6.398e+01 7.244e+01 1.008e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 02:58:24,514 INFO [train.py:1114] (3/4) Epoch 8, batch 3600, loss[loss=0.1995, simple_loss=0.2836, pruned_loss=0.05769, over 4964.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2959, pruned_loss=0.06481, over 940756.17 frames. ], batch size: 13, lr: 9.48e-03, grad_scale: 32.0 +2024-07-28 02:58:53,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=100224.0, ans=0.0 +2024-07-28 02:59:11,524 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-07-28 02:59:14,760 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.94 vs. limit=15.0 +2024-07-28 02:59:18,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=100264.0, ans=0.2 +2024-07-28 02:59:19,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=100264.0, ans=0.125 +2024-07-28 02:59:25,323 INFO [train.py:1114] (3/4) Epoch 8, batch 3650, loss[loss=0.2185, simple_loss=0.3084, pruned_loss=0.06432, over 4887.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2944, pruned_loss=0.06367, over 941142.58 frames. ], batch size: 15, lr: 9.48e-03, grad_scale: 32.0 +2024-07-28 02:59:33,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=100290.66666666667, ans=0.0 +2024-07-28 02:59:39,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=100304.0, ans=0.0 +2024-07-28 02:59:49,827 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.60 vs. limit=15.0 +2024-07-28 02:59:51,143 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.24 vs. limit=15.0 +2024-07-28 02:59:55,976 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.900e+01 6.500e+01 7.963e+01 1.457e+02, threshold=1.300e+02, percent-clipped=1.0 +2024-07-28 02:59:59,600 INFO [train.py:1114] (3/4) Epoch 8, batch 3700, loss[loss=0.2218, simple_loss=0.3173, pruned_loss=0.06317, over 4928.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.295, pruned_loss=0.06373, over 942020.43 frames. ], batch size: 14, lr: 9.48e-03, grad_scale: 32.0 +2024-07-28 03:00:09,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=100357.33333333333, ans=0.125 +2024-07-28 03:00:09,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=100357.33333333333, ans=0.0 +2024-07-28 03:00:10,039 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.92 vs. limit=22.5 +2024-07-28 03:00:10,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=100357.33333333333, ans=0.125 +2024-07-28 03:00:10,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=100357.33333333333, ans=0.0 +2024-07-28 03:00:18,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=100370.66666666667, ans=0.125 +2024-07-28 03:00:22,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=100384.0, ans=0.125 +2024-07-28 03:00:24,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100384.0, ans=0.1 +2024-07-28 03:00:33,313 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.23 vs. limit=10.0 +2024-07-28 03:00:34,749 INFO [train.py:1114] (3/4) Epoch 8, batch 3750, loss[loss=0.2353, simple_loss=0.2914, pruned_loss=0.0896, over 4792.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2953, pruned_loss=0.06358, over 943567.91 frames. ], batch size: 11, lr: 9.47e-03, grad_scale: 16.0 +2024-07-28 03:00:42,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=100424.0, ans=0.09899494936611666 +2024-07-28 03:00:51,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=100437.33333333333, ans=0.0 +2024-07-28 03:00:54,603 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.69 vs. limit=15.0 +2024-07-28 03:01:04,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=100464.0, ans=0.125 +2024-07-28 03:01:05,679 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.700e+01 5.636e+01 6.396e+01 7.360e+01 1.035e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 03:01:08,156 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.54 vs. limit=15.0 +2024-07-28 03:01:08,365 INFO [train.py:1114] (3/4) Epoch 8, batch 3800, loss[loss=0.2221, simple_loss=0.3065, pruned_loss=0.06889, over 4814.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2958, pruned_loss=0.06378, over 941726.96 frames. ], batch size: 14, lr: 9.47e-03, grad_scale: 16.0 +2024-07-28 03:01:10,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=100477.33333333333, ans=0.0 +2024-07-28 03:01:37,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=100504.0, ans=0.0 +2024-07-28 03:01:37,844 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:01:55,208 INFO [train.py:1114] (3/4) Epoch 8, batch 3850, loss[loss=0.2395, simple_loss=0.3318, pruned_loss=0.07363, over 4635.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.296, pruned_loss=0.06416, over 942452.99 frames. ], batch size: 16, lr: 9.47e-03, grad_scale: 16.0 +2024-07-28 03:01:58,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.28 vs. limit=15.0 +2024-07-28 03:02:04,363 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=14.27 vs. limit=15.0 +2024-07-28 03:02:04,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=100557.33333333333, ans=0.0 +2024-07-28 03:02:07,548 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.04 vs. limit=22.5 +2024-07-28 03:02:20,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=100584.0, ans=15.0 +2024-07-28 03:02:29,298 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.866e+01 5.806e+01 6.374e+01 7.296e+01 1.382e+02, threshold=1.275e+02, percent-clipped=1.0 +2024-07-28 03:02:36,099 INFO [train.py:1114] (3/4) Epoch 8, batch 3900, loss[loss=0.2059, simple_loss=0.2976, pruned_loss=0.05707, over 4817.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2962, pruned_loss=0.0643, over 942500.55 frames. ], batch size: 14, lr: 9.46e-03, grad_scale: 16.0 +2024-07-28 03:02:55,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=100637.33333333333, ans=0.125 +2024-07-28 03:03:08,957 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.89 vs. limit=15.0 +2024-07-28 03:03:13,746 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.80 vs. limit=6.0 +2024-07-28 03:03:14,047 INFO [train.py:1114] (3/4) Epoch 8, batch 3950, loss[loss=0.2367, simple_loss=0.3089, pruned_loss=0.08221, over 4832.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2958, pruned_loss=0.06389, over 944542.15 frames. ], batch size: 16, lr: 9.46e-03, grad_scale: 16.0 +2024-07-28 03:03:16,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=100677.33333333333, ans=0.125 +2024-07-28 03:03:22,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100690.66666666667, ans=0.1 +2024-07-28 03:03:23,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=100690.66666666667, ans=0.125 +2024-07-28 03:03:24,147 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:03:26,976 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.41 vs. limit=22.5 +2024-07-28 03:03:29,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=100704.0, ans=0.2 +2024-07-28 03:03:31,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=100704.0, ans=0.125 +2024-07-28 03:03:44,900 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.455e+01 5.725e+01 6.427e+01 7.427e+01 2.052e+02, threshold=1.285e+02, percent-clipped=1.0 +2024-07-28 03:04:03,407 INFO [train.py:1114] (3/4) Epoch 8, batch 4000, loss[loss=0.1827, simple_loss=0.2663, pruned_loss=0.04955, over 4780.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2955, pruned_loss=0.06377, over 940959.96 frames. ], batch size: 12, lr: 9.46e-03, grad_scale: 32.0 +2024-07-28 03:04:09,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=100757.33333333333, ans=0.0 +2024-07-28 03:04:13,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=100757.33333333333, ans=0.0 +2024-07-28 03:04:36,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=100797.33333333333, ans=0.5 +2024-07-28 03:04:38,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=100797.33333333333, ans=0.1 +2024-07-28 03:04:40,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=100797.33333333333, ans=0.125 +2024-07-28 03:04:43,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=100810.66666666667, ans=0.2 +2024-07-28 03:04:44,250 INFO [train.py:1114] (3/4) Epoch 8, batch 4050, loss[loss=0.3038, simple_loss=0.3517, pruned_loss=0.128, over 3510.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2953, pruned_loss=0.06438, over 939954.75 frames. ], batch size: 35, lr: 9.45e-03, grad_scale: 32.0 +2024-07-28 03:04:55,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=100824.0, ans=0.0 +2024-07-28 03:04:57,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=100837.33333333333, ans=0.07 +2024-07-28 03:05:00,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=100837.33333333333, ans=0.0 +2024-07-28 03:05:17,166 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.596e+01 5.984e+01 6.561e+01 7.849e+01 1.305e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-28 03:05:19,920 INFO [train.py:1114] (3/4) Epoch 8, batch 4100, loss[loss=0.2731, simple_loss=0.3562, pruned_loss=0.095, over 4892.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2971, pruned_loss=0.06521, over 938766.79 frames. ], batch size: 15, lr: 9.45e-03, grad_scale: 32.0 +2024-07-28 03:05:23,797 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.48 vs. limit=15.0 +2024-07-28 03:05:29,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=100890.66666666667, ans=0.0 +2024-07-28 03:05:46,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=100917.33333333333, ans=0.125 +2024-07-28 03:06:46,140 INFO [train.py:1114] (3/4) Epoch 8, batch 4150, loss[loss=0.24, simple_loss=0.3323, pruned_loss=0.07384, over 4818.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2966, pruned_loss=0.06489, over 938391.18 frames. ], batch size: 13, lr: 9.45e-03, grad_scale: 32.0 +2024-07-28 03:06:46,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=100944.0, ans=0.0 +2024-07-28 03:06:48,529 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.04 vs. limit=15.0 +2024-07-28 03:06:55,128 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-07-28 03:07:02,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=100970.66666666667, ans=0.2 +2024-07-28 03:07:02,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=100970.66666666667, ans=0.125 +2024-07-28 03:07:24,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=100984.0, ans=0.0 +2024-07-28 03:07:26,481 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.79 vs. limit=6.0 +2024-07-28 03:07:29,975 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.669e+01 6.007e+01 6.703e+01 7.835e+01 1.474e+02, threshold=1.341e+02, percent-clipped=1.0 +2024-07-28 03:07:30,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=100997.33333333333, ans=0.0 +2024-07-28 03:07:52,646 INFO [train.py:1114] (3/4) Epoch 8, batch 4200, loss[loss=0.2492, simple_loss=0.3276, pruned_loss=0.08536, over 4912.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2968, pruned_loss=0.0652, over 940342.65 frames. ], batch size: 15, lr: 9.45e-03, grad_scale: 32.0 +2024-07-28 03:07:59,130 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.28 vs. limit=22.5 +2024-07-28 03:08:03,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=101024.0, ans=0.125 +2024-07-28 03:08:38,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=101037.33333333333, ans=0.125 +2024-07-28 03:08:58,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=101050.66666666667, ans=0.09899494936611666 +2024-07-28 03:08:58,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=101050.66666666667, ans=0.2 +2024-07-28 03:09:14,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=101064.0, ans=0.025 +2024-07-28 03:09:17,408 INFO [train.py:1114] (3/4) Epoch 8, batch 4250, loss[loss=0.2019, simple_loss=0.2756, pruned_loss=0.06416, over 4637.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2967, pruned_loss=0.06511, over 941117.43 frames. ], batch size: 12, lr: 9.44e-03, grad_scale: 32.0 +2024-07-28 03:09:17,852 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.26 vs. limit=10.0 +2024-07-28 03:09:19,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=101077.33333333333, ans=0.0 +2024-07-28 03:09:44,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=101117.33333333333, ans=0.05 +2024-07-28 03:09:49,641 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 5.910e+01 6.569e+01 7.778e+01 1.465e+02, threshold=1.314e+02, percent-clipped=1.0 +2024-07-28 03:09:52,210 INFO [train.py:1114] (3/4) Epoch 8, batch 4300, loss[loss=0.2157, simple_loss=0.3005, pruned_loss=0.06541, over 4756.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2966, pruned_loss=0.06507, over 940244.74 frames. ], batch size: 13, lr: 9.44e-03, grad_scale: 32.0 +2024-07-28 03:09:54,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=101144.0, ans=0.125 +2024-07-28 03:10:09,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=101170.66666666667, ans=0.1 +2024-07-28 03:10:27,456 INFO [train.py:1114] (3/4) Epoch 8, batch 4350, loss[loss=0.2206, simple_loss=0.3189, pruned_loss=0.06117, over 4756.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2973, pruned_loss=0.06525, over 941193.72 frames. ], batch size: 13, lr: 9.44e-03, grad_scale: 32.0 +2024-07-28 03:10:30,621 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.51 vs. limit=15.0 +2024-07-28 03:10:32,463 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:10:40,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=101237.33333333333, ans=0.2 +2024-07-28 03:10:43,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=101237.33333333333, ans=0.04949747468305833 +2024-07-28 03:10:58,163 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.299e+01 5.773e+01 6.336e+01 7.369e+01 1.096e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 03:11:01,089 INFO [train.py:1114] (3/4) Epoch 8, batch 4400, loss[loss=0.2085, simple_loss=0.3148, pruned_loss=0.05112, over 4811.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2967, pruned_loss=0.06452, over 940998.96 frames. ], batch size: 14, lr: 9.43e-03, grad_scale: 32.0 +2024-07-28 03:11:20,468 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.32 vs. limit=22.5 +2024-07-28 03:11:35,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=101330.66666666667, ans=0.5 +2024-07-28 03:11:35,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=101330.66666666667, ans=0.125 +2024-07-28 03:11:36,964 INFO [train.py:1114] (3/4) Epoch 8, batch 4450, loss[loss=0.1909, simple_loss=0.2787, pruned_loss=0.05152, over 4934.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2965, pruned_loss=0.06407, over 938915.10 frames. ], batch size: 12, lr: 9.43e-03, grad_scale: 32.0 +2024-07-28 03:11:38,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=101344.0, ans=0.125 +2024-07-28 03:11:42,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=101344.0, ans=15.0 +2024-07-28 03:11:50,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=101370.66666666667, ans=0.125 +2024-07-28 03:11:51,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=101370.66666666667, ans=0.025 +2024-07-28 03:12:06,484 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.72 vs. limit=15.0 +2024-07-28 03:12:07,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=101397.33333333333, ans=0.2 +2024-07-28 03:12:07,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=101397.33333333333, ans=0.0 +2024-07-28 03:12:09,298 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.991e+01 6.173e+01 7.006e+01 8.907e+01 1.361e+02, threshold=1.401e+02, percent-clipped=3.0 +2024-07-28 03:12:12,342 INFO [train.py:1114] (3/4) Epoch 8, batch 4500, loss[loss=0.233, simple_loss=0.3121, pruned_loss=0.07697, over 4732.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2981, pruned_loss=0.06459, over 938028.35 frames. ], batch size: 14, lr: 9.43e-03, grad_scale: 32.0 +2024-07-28 03:12:14,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=101410.66666666667, ans=0.0 +2024-07-28 03:12:22,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=101424.0, ans=0.1 +2024-07-28 03:12:24,917 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.46 vs. limit=22.5 +2024-07-28 03:12:36,361 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.48 vs. limit=12.0 +2024-07-28 03:12:40,519 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.84 vs. limit=15.0 +2024-07-28 03:12:41,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=101464.0, ans=0.2 +2024-07-28 03:12:42,402 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.94 vs. limit=22.5 +2024-07-28 03:12:46,594 INFO [train.py:1114] (3/4) Epoch 8, batch 4550, loss[loss=0.1822, simple_loss=0.2675, pruned_loss=0.0484, over 4895.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2973, pruned_loss=0.06466, over 940023.54 frames. ], batch size: 13, lr: 9.42e-03, grad_scale: 32.0 +2024-07-28 03:12:47,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=101477.33333333333, ans=0.125 +2024-07-28 03:13:09,938 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.06 vs. limit=6.0 +2024-07-28 03:13:12,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=101517.33333333333, ans=0.125 +2024-07-28 03:13:15,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=101530.66666666667, ans=0.0 +2024-07-28 03:13:15,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=101530.66666666667, ans=0.125 +2024-07-28 03:13:19,425 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.586e+01 5.796e+01 6.389e+01 7.358e+01 1.083e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-28 03:13:22,137 INFO [train.py:1114] (3/4) Epoch 8, batch 4600, loss[loss=0.2433, simple_loss=0.3265, pruned_loss=0.08004, over 4500.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2972, pruned_loss=0.06467, over 938248.33 frames. ], batch size: 21, lr: 9.42e-03, grad_scale: 32.0 +2024-07-28 03:13:23,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=101544.0, ans=0.2 +2024-07-28 03:13:25,294 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.14 vs. limit=10.0 +2024-07-28 03:13:28,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=101557.33333333333, ans=0.0 +2024-07-28 03:13:30,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=101557.33333333333, ans=0.0 +2024-07-28 03:13:37,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=101570.66666666667, ans=0.025 +2024-07-28 03:13:48,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=101597.33333333333, ans=0.1 +2024-07-28 03:13:55,116 INFO [train.py:1114] (3/4) Epoch 8, batch 4650, loss[loss=0.2065, simple_loss=0.3015, pruned_loss=0.05577, over 4826.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2975, pruned_loss=0.06453, over 939675.01 frames. ], batch size: 16, lr: 9.42e-03, grad_scale: 32.0 +2024-07-28 03:13:55,566 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.26 vs. limit=22.5 +2024-07-28 03:14:19,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=101650.66666666667, ans=0.0 +2024-07-28 03:14:24,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=101664.0, ans=0.0 +2024-07-28 03:14:27,598 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.801e+01 5.774e+01 6.444e+01 7.624e+01 1.056e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 03:14:29,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=101677.33333333333, ans=0.0 +2024-07-28 03:14:30,291 INFO [train.py:1114] (3/4) Epoch 8, batch 4700, loss[loss=0.1932, simple_loss=0.2701, pruned_loss=0.05819, over 4709.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2978, pruned_loss=0.06501, over 937012.92 frames. ], batch size: 11, lr: 9.41e-03, grad_scale: 32.0 +2024-07-28 03:14:37,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=101690.66666666667, ans=0.125 +2024-07-28 03:14:43,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=101704.0, ans=0.0 +2024-07-28 03:15:04,165 INFO [train.py:1114] (3/4) Epoch 8, batch 4750, loss[loss=0.2102, simple_loss=0.2851, pruned_loss=0.06764, over 4503.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.297, pruned_loss=0.06502, over 934999.76 frames. ], batch size: 21, lr: 9.41e-03, grad_scale: 32.0 +2024-07-28 03:15:12,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=101757.33333333333, ans=0.0 +2024-07-28 03:15:15,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=101757.33333333333, ans=0.125 +2024-07-28 03:15:17,353 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.83 vs. limit=22.5 +2024-07-28 03:15:19,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=101770.66666666667, ans=0.025 +2024-07-28 03:15:30,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=101784.0, ans=0.125 +2024-07-28 03:15:37,282 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.494e+01 5.718e+01 6.515e+01 7.341e+01 9.928e+01, threshold=1.303e+02, percent-clipped=0.0 +2024-07-28 03:15:40,387 INFO [train.py:1114] (3/4) Epoch 8, batch 4800, loss[loss=0.2263, simple_loss=0.3031, pruned_loss=0.07473, over 4695.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2961, pruned_loss=0.06469, over 932141.52 frames. ], batch size: 13, lr: 9.41e-03, grad_scale: 32.0 +2024-07-28 03:15:53,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=101824.0, ans=0.125 +2024-07-28 03:15:58,779 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.49 vs. limit=22.5 +2024-07-28 03:16:08,548 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.99 vs. limit=15.0 +2024-07-28 03:16:11,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=101864.0, ans=0.125 +2024-07-28 03:16:13,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=101864.0, ans=0.0 +2024-07-28 03:16:17,654 INFO [train.py:1114] (3/4) Epoch 8, batch 4850, loss[loss=0.246, simple_loss=0.3293, pruned_loss=0.08132, over 4750.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2974, pruned_loss=0.06562, over 931323.51 frames. ], batch size: 14, lr: 9.41e-03, grad_scale: 32.0 +2024-07-28 03:16:36,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=101904.0, ans=0.0 +2024-07-28 03:16:46,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=101930.66666666667, ans=0.125 +2024-07-28 03:16:51,213 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.836e+01 5.718e+01 6.267e+01 6.950e+01 1.595e+02, threshold=1.253e+02, percent-clipped=1.0 +2024-07-28 03:17:01,615 INFO [train.py:1114] (3/4) Epoch 8, batch 4900, loss[loss=0.1919, simple_loss=0.2984, pruned_loss=0.04272, over 4755.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2975, pruned_loss=0.06512, over 933459.61 frames. ], batch size: 13, lr: 9.40e-03, grad_scale: 32.0 +2024-07-28 03:17:08,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=101957.33333333333, ans=10.0 +2024-07-28 03:17:20,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=101970.66666666667, ans=0.04949747468305833 +2024-07-28 03:17:23,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=101984.0, ans=0.09899494936611666 +2024-07-28 03:17:29,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=101997.33333333333, ans=0.125 +2024-07-28 03:17:33,116 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:17:35,650 INFO [train.py:1114] (3/4) Epoch 8, batch 4950, loss[loss=0.2509, simple_loss=0.3329, pruned_loss=0.08443, over 3316.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2992, pruned_loss=0.06623, over 931192.25 frames. ], batch size: 35, lr: 9.40e-03, grad_scale: 32.0 +2024-07-28 03:17:44,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=102024.0, ans=10.0 +2024-07-28 03:17:46,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=102024.0, ans=0.025 +2024-07-28 03:17:53,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.47 vs. limit=10.0 +2024-07-28 03:18:08,030 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.891e+01 5.855e+01 6.357e+01 7.218e+01 9.647e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 03:18:10,645 INFO [train.py:1114] (3/4) Epoch 8, batch 5000, loss[loss=0.2058, simple_loss=0.3042, pruned_loss=0.05368, over 4667.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2977, pruned_loss=0.06517, over 935009.26 frames. ], batch size: 14, lr: 9.40e-03, grad_scale: 32.0 +2024-07-28 03:18:10,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102077.33333333333, ans=0.1 +2024-07-28 03:18:12,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=102077.33333333333, ans=0.0 +2024-07-28 03:18:28,362 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:18:33,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102117.33333333333, ans=0.1 +2024-07-28 03:18:39,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=102130.66666666667, ans=0.125 +2024-07-28 03:18:40,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=102130.66666666667, ans=0.0 +2024-07-28 03:18:42,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=102130.66666666667, ans=0.125 +2024-07-28 03:18:43,781 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:18:45,829 INFO [train.py:1114] (3/4) Epoch 8, batch 5050, loss[loss=0.2011, simple_loss=0.2789, pruned_loss=0.06164, over 4833.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2973, pruned_loss=0.06518, over 937709.56 frames. ], batch size: 12, lr: 9.39e-03, grad_scale: 16.0 +2024-07-28 03:18:47,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102144.0, ans=0.1 +2024-07-28 03:18:50,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102144.0, ans=0.1 +2024-07-28 03:18:54,585 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.88 vs. limit=15.0 +2024-07-28 03:18:55,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=102157.33333333333, ans=0.0 +2024-07-28 03:18:57,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=102157.33333333333, ans=0.0 +2024-07-28 03:19:02,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102170.66666666667, ans=0.1 +2024-07-28 03:19:09,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=102184.0, ans=0.0 +2024-07-28 03:19:16,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=102197.33333333333, ans=0.2 +2024-07-28 03:19:17,811 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.666e+01 5.803e+01 6.269e+01 7.279e+01 1.149e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 03:19:19,903 INFO [train.py:1114] (3/4) Epoch 8, batch 5100, loss[loss=0.2052, simple_loss=0.2889, pruned_loss=0.06076, over 4774.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2971, pruned_loss=0.0655, over 935007.43 frames. ], batch size: 12, lr: 9.39e-03, grad_scale: 16.0 +2024-07-28 03:19:19,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=102210.66666666667, ans=0.125 +2024-07-28 03:19:51,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=102264.0, ans=0.1 +2024-07-28 03:19:54,794 INFO [train.py:1114] (3/4) Epoch 8, batch 5150, loss[loss=0.2472, simple_loss=0.3323, pruned_loss=0.08104, over 4846.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2982, pruned_loss=0.06553, over 935615.02 frames. ], batch size: 16, lr: 9.39e-03, grad_scale: 16.0 +2024-07-28 03:19:56,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=102277.33333333333, ans=0.025 +2024-07-28 03:19:57,746 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.06 vs. limit=15.0 +2024-07-28 03:20:12,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=102304.0, ans=0.0 +2024-07-28 03:20:17,036 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.07 vs. limit=15.0 +2024-07-28 03:20:19,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=102317.33333333333, ans=0.0 +2024-07-28 03:20:26,078 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.475e+01 5.815e+01 6.319e+01 7.025e+01 9.950e+01, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 03:20:30,186 INFO [train.py:1114] (3/4) Epoch 8, batch 5200, loss[loss=0.234, simple_loss=0.3433, pruned_loss=0.06231, over 4664.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2969, pruned_loss=0.0645, over 935592.38 frames. ], batch size: 14, lr: 9.38e-03, grad_scale: 32.0 +2024-07-28 03:20:30,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=102344.0, ans=0.125 +2024-07-28 03:20:30,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=102344.0, ans=0.0 +2024-07-28 03:20:30,405 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:20:52,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer_ff2.min_abs, batch_count=102384.0, ans=0.1 +2024-07-28 03:20:53,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102384.0, ans=0.1 +2024-07-28 03:21:04,855 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:21:05,406 INFO [train.py:1114] (3/4) Epoch 8, batch 5250, loss[loss=0.2113, simple_loss=0.3032, pruned_loss=0.0597, over 4891.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2972, pruned_loss=0.0647, over 935536.37 frames. ], batch size: 13, lr: 9.38e-03, grad_scale: 32.0 +2024-07-28 03:21:05,480 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:21:13,799 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.09 vs. limit=15.0 +2024-07-28 03:21:15,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=102424.0, ans=0.125 +2024-07-28 03:21:16,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=102424.0, ans=0.04949747468305833 +2024-07-28 03:21:18,460 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.96 vs. limit=22.5 +2024-07-28 03:21:22,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=102437.33333333333, ans=0.2 +2024-07-28 03:21:24,291 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.32 vs. limit=15.0 +2024-07-28 03:21:26,459 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.12 vs. limit=15.0 +2024-07-28 03:21:35,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=102464.0, ans=0.0 +2024-07-28 03:21:36,699 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.677e+01 5.809e+01 6.446e+01 7.224e+01 1.154e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 03:21:38,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=102477.33333333333, ans=0.125 +2024-07-28 03:21:38,685 INFO [train.py:1114] (3/4) Epoch 8, batch 5300, loss[loss=0.2485, simple_loss=0.3165, pruned_loss=0.09029, over 4663.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2966, pruned_loss=0.06461, over 934413.98 frames. ], batch size: 16, lr: 9.38e-03, grad_scale: 32.0 +2024-07-28 03:21:38,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=102477.33333333333, ans=0.0 +2024-07-28 03:21:42,189 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.69 vs. limit=22.5 +2024-07-28 03:21:44,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=102477.33333333333, ans=0.125 +2024-07-28 03:21:45,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102490.66666666667, ans=0.1 +2024-07-28 03:21:50,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=102490.66666666667, ans=0.125 +2024-07-28 03:21:50,954 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.96 vs. limit=22.5 +2024-07-28 03:21:52,306 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.08 vs. limit=15.0 +2024-07-28 03:21:58,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=102517.33333333333, ans=0.0 +2024-07-28 03:22:02,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102517.33333333333, ans=0.1 +2024-07-28 03:22:04,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=102530.66666666667, ans=0.1 +2024-07-28 03:22:06,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=102530.66666666667, ans=0.09899494936611666 +2024-07-28 03:22:08,909 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.11 vs. limit=22.5 +2024-07-28 03:22:11,880 INFO [train.py:1114] (3/4) Epoch 8, batch 5350, loss[loss=0.1814, simple_loss=0.2525, pruned_loss=0.05515, over 4517.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2974, pruned_loss=0.06523, over 936148.51 frames. ], batch size: 10, lr: 9.38e-03, grad_scale: 32.0 +2024-07-28 03:22:16,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=102544.0, ans=0.125 +2024-07-28 03:22:19,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=102557.33333333333, ans=0.04949747468305833 +2024-07-28 03:22:22,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=102557.33333333333, ans=0.5 +2024-07-28 03:22:22,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=102557.33333333333, ans=0.0 +2024-07-28 03:22:22,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=102557.33333333333, ans=0.125 +2024-07-28 03:22:26,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=102570.66666666667, ans=0.2 +2024-07-28 03:22:30,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=102570.66666666667, ans=0.07 +2024-07-28 03:22:35,571 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.60 vs. limit=15.0 +2024-07-28 03:22:43,564 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 6.073e+01 6.739e+01 7.548e+01 1.442e+02, threshold=1.348e+02, percent-clipped=1.0 +2024-07-28 03:22:44,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=102597.33333333333, ans=0.125 +2024-07-28 03:22:45,667 INFO [train.py:1114] (3/4) Epoch 8, batch 5400, loss[loss=0.2129, simple_loss=0.2932, pruned_loss=0.06635, over 4265.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.298, pruned_loss=0.06547, over 930160.19 frames. ], batch size: 25, lr: 9.37e-03, grad_scale: 32.0 +2024-07-28 03:23:03,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=102637.33333333333, ans=0.1 +2024-07-28 03:23:07,782 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.36 vs. limit=6.0 +2024-07-28 03:23:10,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=102650.66666666667, ans=0.0 +2024-07-28 03:23:20,680 INFO [train.py:1114] (3/4) Epoch 8, batch 5450, loss[loss=0.1891, simple_loss=0.2611, pruned_loss=0.05856, over 4714.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2969, pruned_loss=0.06456, over 933146.43 frames. ], batch size: 11, lr: 9.37e-03, grad_scale: 32.0 +2024-07-28 03:23:29,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=102690.66666666667, ans=0.2 +2024-07-28 03:23:54,557 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.766e+01 6.203e+01 6.756e+01 7.672e+01 1.108e+02, threshold=1.351e+02, percent-clipped=0.0 +2024-07-28 03:23:56,656 INFO [train.py:1114] (3/4) Epoch 8, batch 5500, loss[loss=0.2296, simple_loss=0.3099, pruned_loss=0.07465, over 4139.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.297, pruned_loss=0.06482, over 930340.20 frames. ], batch size: 25, lr: 9.37e-03, grad_scale: 32.0 +2024-07-28 03:24:13,329 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.76 vs. limit=15.0 +2024-07-28 03:24:15,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=102770.66666666667, ans=0.0 +2024-07-28 03:24:27,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=102797.33333333333, ans=0.0 +2024-07-28 03:24:29,632 INFO [train.py:1114] (3/4) Epoch 8, batch 5550, loss[loss=0.2093, simple_loss=0.2901, pruned_loss=0.06429, over 4692.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2961, pruned_loss=0.06422, over 932417.43 frames. ], batch size: 12, lr: 9.36e-03, grad_scale: 32.0 +2024-07-28 03:24:32,166 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.97 vs. limit=15.0 +2024-07-28 03:24:35,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=102810.66666666667, ans=0.0 +2024-07-28 03:24:42,023 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.45 vs. limit=15.0 +2024-07-28 03:24:48,963 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.76 vs. limit=15.0 +2024-07-28 03:24:49,539 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.61 vs. limit=15.0 +2024-07-28 03:24:51,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=102850.66666666667, ans=0.0 +2024-07-28 03:25:00,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=102864.0, ans=0.125 +2024-07-28 03:25:01,400 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.969e+01 5.947e+01 6.604e+01 7.771e+01 1.160e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-28 03:25:03,446 INFO [train.py:1114] (3/4) Epoch 8, batch 5600, loss[loss=0.1986, simple_loss=0.2875, pruned_loss=0.0548, over 4745.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2962, pruned_loss=0.06406, over 933972.23 frames. ], batch size: 14, lr: 9.36e-03, grad_scale: 32.0 +2024-07-28 03:25:25,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=102917.33333333333, ans=0.09899494936611666 +2024-07-28 03:25:25,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=102917.33333333333, ans=0.125 +2024-07-28 03:25:33,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=102930.66666666667, ans=0.0 +2024-07-28 03:25:35,541 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.64 vs. limit=12.0 +2024-07-28 03:25:38,385 INFO [train.py:1114] (3/4) Epoch 8, batch 5650, loss[loss=0.2107, simple_loss=0.2983, pruned_loss=0.06157, over 4476.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2955, pruned_loss=0.064, over 936282.12 frames. ], batch size: 21, lr: 9.36e-03, grad_scale: 32.0 +2024-07-28 03:25:56,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=102970.66666666667, ans=0.2 +2024-07-28 03:26:00,553 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.84 vs. limit=22.5 +2024-07-28 03:26:09,442 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.666e+01 6.096e+01 6.693e+01 9.432e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 03:26:11,511 INFO [train.py:1114] (3/4) Epoch 8, batch 5700, loss[loss=0.2355, simple_loss=0.3252, pruned_loss=0.07285, over 4697.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.296, pruned_loss=0.06456, over 937651.85 frames. ], batch size: 13, lr: 9.35e-03, grad_scale: 32.0 +2024-07-28 03:26:11,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=103010.66666666667, ans=0.0 +2024-07-28 03:26:15,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=103010.66666666667, ans=0.125 +2024-07-28 03:26:21,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103024.0, ans=0.1 +2024-07-28 03:26:26,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=103024.0, ans=0.125 +2024-07-28 03:26:28,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=103037.33333333333, ans=10.0 +2024-07-28 03:26:34,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=103050.66666666667, ans=0.0 +2024-07-28 03:26:46,893 INFO [train.py:1114] (3/4) Epoch 8, batch 5750, loss[loss=0.2333, simple_loss=0.3127, pruned_loss=0.07694, over 4741.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2961, pruned_loss=0.06467, over 937831.42 frames. ], batch size: 19, lr: 9.35e-03, grad_scale: 32.0 +2024-07-28 03:26:49,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=103077.33333333333, ans=0.2 +2024-07-28 03:26:55,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=103090.66666666667, ans=0.07 +2024-07-28 03:27:04,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=103104.0, ans=0.0 +2024-07-28 03:27:18,489 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.742e+01 5.884e+01 6.600e+01 7.288e+01 1.127e+02, threshold=1.320e+02, percent-clipped=0.0 +2024-07-28 03:27:20,763 INFO [train.py:1114] (3/4) Epoch 8, batch 5800, loss[loss=0.2369, simple_loss=0.3302, pruned_loss=0.07179, over 4757.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2969, pruned_loss=0.06522, over 937384.26 frames. ], batch size: 19, lr: 9.35e-03, grad_scale: 32.0 +2024-07-28 03:27:28,523 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.03 vs. limit=15.0 +2024-07-28 03:27:38,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=103170.66666666667, ans=0.2 +2024-07-28 03:27:49,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=103184.0, ans=0.125 +2024-07-28 03:27:49,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=103184.0, ans=0.2 +2024-07-28 03:27:52,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=103197.33333333333, ans=0.125 +2024-07-28 03:27:59,746 INFO [train.py:1114] (3/4) Epoch 8, batch 5850, loss[loss=0.2907, simple_loss=0.3603, pruned_loss=0.1106, over 4444.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2973, pruned_loss=0.06519, over 937689.66 frames. ], batch size: 21, lr: 9.35e-03, grad_scale: 32.0 +2024-07-28 03:28:00,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=103210.66666666667, ans=0.025 +2024-07-28 03:28:10,590 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.61 vs. limit=6.0 +2024-07-28 03:28:21,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=103250.66666666667, ans=0.09899494936611666 +2024-07-28 03:28:30,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=103264.0, ans=0.0 +2024-07-28 03:28:30,669 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 5.776e+01 6.352e+01 7.126e+01 1.312e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 03:28:38,757 INFO [train.py:1114] (3/4) Epoch 8, batch 5900, loss[loss=0.2472, simple_loss=0.323, pruned_loss=0.08569, over 4678.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.298, pruned_loss=0.06527, over 937776.08 frames. ], batch size: 15, lr: 9.34e-03, grad_scale: 32.0 +2024-07-28 03:28:56,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=103304.0, ans=0.0 +2024-07-28 03:29:14,046 INFO [train.py:1114] (3/4) Epoch 8, batch 5950, loss[loss=0.2046, simple_loss=0.2893, pruned_loss=0.05993, over 4669.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2973, pruned_loss=0.06503, over 939969.62 frames. ], batch size: 15, lr: 9.34e-03, grad_scale: 32.0 +2024-07-28 03:29:17,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=103344.0, ans=0.2 +2024-07-28 03:29:45,387 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.782e+01 5.778e+01 6.625e+01 7.689e+01 1.053e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-28 03:29:47,255 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.89 vs. limit=15.0 +2024-07-28 03:29:47,507 INFO [train.py:1114] (3/4) Epoch 8, batch 6000, loss[loss=0.2346, simple_loss=0.3233, pruned_loss=0.07296, over 4203.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2969, pruned_loss=0.06501, over 937286.35 frames. ], batch size: 25, lr: 9.34e-03, grad_scale: 32.0 +2024-07-28 03:29:47,507 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 03:30:03,576 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([6.2949, 5.4085, 5.4380, 6.0679], device='cuda:3') +2024-07-28 03:30:06,860 INFO [train.py:1146] (3/4) Epoch 8, validation: loss=0.1796, simple_loss=0.2837, pruned_loss=0.03775, over 944034.00 frames. +2024-07-28 03:30:06,861 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 03:30:15,160 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.10 vs. limit=15.0 +2024-07-28 03:30:19,084 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:30:21,962 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.42 vs. limit=15.0 +2024-07-28 03:30:42,206 INFO [train.py:1114] (3/4) Epoch 8, batch 6050, loss[loss=0.2197, simple_loss=0.2935, pruned_loss=0.07295, over 4781.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.297, pruned_loss=0.06518, over 938419.16 frames. ], batch size: 12, lr: 9.33e-03, grad_scale: 32.0 +2024-07-28 03:30:44,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=103477.33333333333, ans=0.0 +2024-07-28 03:30:56,711 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.91 vs. limit=15.0 +2024-07-28 03:31:01,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=103504.0, ans=0.125 +2024-07-28 03:31:12,560 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.19 vs. limit=12.0 +2024-07-28 03:31:14,074 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.635e+01 6.111e+01 6.957e+01 1.112e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 03:31:16,063 INFO [train.py:1114] (3/4) Epoch 8, batch 6100, loss[loss=0.2223, simple_loss=0.3075, pruned_loss=0.06853, over 4684.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2965, pruned_loss=0.06477, over 938163.13 frames. ], batch size: 15, lr: 9.33e-03, grad_scale: 32.0 +2024-07-28 03:31:16,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=103544.0, ans=0.125 +2024-07-28 03:31:19,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=103544.0, ans=0.125 +2024-07-28 03:31:22,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=103557.33333333333, ans=0.125 +2024-07-28 03:31:23,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=103557.33333333333, ans=0.1 +2024-07-28 03:31:24,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=103557.33333333333, ans=0.2 +2024-07-28 03:31:26,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=103557.33333333333, ans=0.125 +2024-07-28 03:31:28,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=103557.33333333333, ans=0.125 +2024-07-28 03:31:29,875 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:31:39,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=103584.0, ans=0.0 +2024-07-28 03:31:40,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=103584.0, ans=0.125 +2024-07-28 03:31:54,405 INFO [train.py:1114] (3/4) Epoch 8, batch 6150, loss[loss=0.2635, simple_loss=0.3385, pruned_loss=0.0942, over 3298.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2968, pruned_loss=0.0648, over 936826.25 frames. ], batch size: 35, lr: 9.33e-03, grad_scale: 32.0 +2024-07-28 03:32:11,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=103637.33333333333, ans=0.125 +2024-07-28 03:32:20,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn2.whiten.whitening_limit, batch_count=103650.66666666667, ans=22.5 +2024-07-28 03:32:25,776 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.86 vs. limit=6.0 +2024-07-28 03:32:26,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=103664.0, ans=0.025 +2024-07-28 03:32:29,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=103664.0, ans=0.125 +2024-07-28 03:32:30,316 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.158e+01 5.994e+01 6.634e+01 7.988e+01 1.219e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-28 03:32:32,388 INFO [train.py:1114] (3/4) Epoch 8, batch 6200, loss[loss=0.1804, simple_loss=0.2827, pruned_loss=0.03906, over 4739.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2968, pruned_loss=0.06447, over 936357.32 frames. ], batch size: 14, lr: 9.32e-03, grad_scale: 32.0 +2024-07-28 03:32:42,307 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.42 vs. limit=22.5 +2024-07-28 03:32:43,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=103690.66666666667, ans=0.0 +2024-07-28 03:32:49,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=103704.0, ans=0.125 +2024-07-28 03:32:53,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103717.33333333333, ans=0.1 +2024-07-28 03:32:55,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103717.33333333333, ans=0.1 +2024-07-28 03:33:00,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=103730.66666666667, ans=0.125 +2024-07-28 03:33:06,924 INFO [train.py:1114] (3/4) Epoch 8, batch 6250, loss[loss=0.2364, simple_loss=0.3194, pruned_loss=0.07673, over 4811.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2975, pruned_loss=0.06519, over 932898.28 frames. ], batch size: 14, lr: 9.32e-03, grad_scale: 32.0 +2024-07-28 03:33:11,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=103744.0, ans=0.025 +2024-07-28 03:33:16,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=103757.33333333333, ans=0.125 +2024-07-28 03:33:29,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=103770.66666666667, ans=0.125 +2024-07-28 03:33:30,028 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:33:37,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=103784.0, ans=0.125 +2024-07-28 03:33:59,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=103797.33333333333, ans=0.125 +2024-07-28 03:34:02,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=103797.33333333333, ans=0.1 +2024-07-28 03:34:05,278 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.565e+01 5.622e+01 6.181e+01 7.164e+01 1.267e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 03:34:07,332 INFO [train.py:1114] (3/4) Epoch 8, batch 6300, loss[loss=0.1966, simple_loss=0.2707, pruned_loss=0.06129, over 4500.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.298, pruned_loss=0.06566, over 929994.83 frames. ], batch size: 10, lr: 9.32e-03, grad_scale: 32.0 +2024-07-28 03:34:09,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=103810.66666666667, ans=0.0 +2024-07-28 03:34:10,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=103810.66666666667, ans=0.0 +2024-07-28 03:34:29,208 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.92 vs. limit=12.0 +2024-07-28 03:34:32,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=103850.66666666667, ans=0.0 +2024-07-28 03:34:36,478 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.93 vs. limit=10.0 +2024-07-28 03:34:53,142 INFO [train.py:1114] (3/4) Epoch 8, batch 6350, loss[loss=0.2558, simple_loss=0.3211, pruned_loss=0.09521, over 4474.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2962, pruned_loss=0.06452, over 933954.91 frames. ], batch size: 21, lr: 9.32e-03, grad_scale: 32.0 +2024-07-28 03:34:53,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=103877.33333333333, ans=0.125 +2024-07-28 03:34:53,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=103877.33333333333, ans=0.125 +2024-07-28 03:35:05,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=103890.66666666667, ans=0.125 +2024-07-28 03:35:12,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=103904.0, ans=0.125 +2024-07-28 03:35:19,421 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.84 vs. limit=6.0 +2024-07-28 03:35:25,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=103930.66666666667, ans=0.2 +2024-07-28 03:35:37,029 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 5.697e+01 6.431e+01 7.734e+01 1.122e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 03:35:38,996 INFO [train.py:1114] (3/4) Epoch 8, batch 6400, loss[loss=0.2097, simple_loss=0.2916, pruned_loss=0.06389, over 4630.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2958, pruned_loss=0.06463, over 935157.48 frames. ], batch size: 13, lr: 9.31e-03, grad_scale: 32.0 +2024-07-28 03:35:40,677 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.00 vs. limit=15.0 +2024-07-28 03:35:40,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=103944.0, ans=0.0 +2024-07-28 03:35:43,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=103944.0, ans=10.0 +2024-07-28 03:35:48,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=103957.33333333333, ans=0.125 +2024-07-28 03:35:53,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=103970.66666666667, ans=0.1 +2024-07-28 03:35:55,751 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.08 vs. limit=15.0 +2024-07-28 03:35:57,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=103970.66666666667, ans=0.0 +2024-07-28 03:36:04,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=103984.0, ans=0.125 +2024-07-28 03:36:09,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=103984.0, ans=0.125 +2024-07-28 03:36:13,684 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.29 vs. limit=10.0 +2024-07-28 03:36:20,337 INFO [train.py:1114] (3/4) Epoch 8, batch 6450, loss[loss=0.2269, simple_loss=0.3005, pruned_loss=0.07662, over 4514.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2965, pruned_loss=0.06484, over 938902.59 frames. ], batch size: 21, lr: 9.31e-03, grad_scale: 32.0 +2024-07-28 03:36:20,753 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.22 vs. limit=15.0 +2024-07-28 03:36:33,548 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.00 vs. limit=10.0 +2024-07-28 03:36:40,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=104050.66666666667, ans=0.5 +2024-07-28 03:36:49,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=104064.0, ans=0.1 +2024-07-28 03:36:52,543 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.901e+01 6.090e+01 6.968e+01 8.127e+01 1.259e+02, threshold=1.394e+02, percent-clipped=0.0 +2024-07-28 03:36:54,646 INFO [train.py:1114] (3/4) Epoch 8, batch 6500, loss[loss=0.2511, simple_loss=0.3121, pruned_loss=0.09501, over 3466.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2953, pruned_loss=0.06418, over 940192.70 frames. ], batch size: 35, lr: 9.31e-03, grad_scale: 32.0 +2024-07-28 03:36:54,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=104077.33333333333, ans=0.125 +2024-07-28 03:36:54,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=104077.33333333333, ans=0.125 +2024-07-28 03:37:18,504 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.72 vs. limit=6.0 +2024-07-28 03:37:19,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=104117.33333333333, ans=0.0 +2024-07-28 03:37:30,146 INFO [train.py:1114] (3/4) Epoch 8, batch 6550, loss[loss=0.1787, simple_loss=0.2559, pruned_loss=0.05078, over 4803.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2942, pruned_loss=0.0636, over 943079.15 frames. ], batch size: 11, lr: 9.30e-03, grad_scale: 32.0 +2024-07-28 03:37:30,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=104144.0, ans=0.125 +2024-07-28 03:37:34,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=104144.0, ans=0.0 +2024-07-28 03:37:39,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=104157.33333333333, ans=0.125 +2024-07-28 03:37:43,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104170.66666666667, ans=0.1 +2024-07-28 03:38:02,247 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.025e+01 5.695e+01 6.284e+01 7.396e+01 1.281e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 03:38:04,221 INFO [train.py:1114] (3/4) Epoch 8, batch 6600, loss[loss=0.1867, simple_loss=0.2806, pruned_loss=0.0464, over 4929.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.295, pruned_loss=0.06419, over 944942.84 frames. ], batch size: 14, lr: 9.30e-03, grad_scale: 32.0 +2024-07-28 03:38:08,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=104210.66666666667, ans=0.5 +2024-07-28 03:38:09,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=104210.66666666667, ans=0.2 +2024-07-28 03:38:15,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=104224.0, ans=0.125 +2024-07-28 03:38:29,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=104237.33333333333, ans=0.0 +2024-07-28 03:38:55,178 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.70 vs. limit=15.0 +2024-07-28 03:38:58,594 INFO [train.py:1114] (3/4) Epoch 8, batch 6650, loss[loss=0.1793, simple_loss=0.261, pruned_loss=0.0488, over 4651.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2943, pruned_loss=0.06394, over 943807.02 frames. ], batch size: 17, lr: 9.30e-03, grad_scale: 32.0 +2024-07-28 03:39:05,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=104290.66666666667, ans=0.125 +2024-07-28 03:39:06,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=104290.66666666667, ans=0.0 +2024-07-28 03:39:13,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=104304.0, ans=0.125 +2024-07-28 03:39:20,603 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-28 03:39:21,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=104304.0, ans=0.125 +2024-07-28 03:39:22,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=104317.33333333333, ans=0.0 +2024-07-28 03:39:32,441 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.00 vs. limit=15.0 +2024-07-28 03:39:34,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=104330.66666666667, ans=0.125 +2024-07-28 03:39:34,786 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.880e+01 5.792e+01 6.278e+01 6.949e+01 1.059e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 03:39:44,721 INFO [train.py:1114] (3/4) Epoch 8, batch 6700, loss[loss=0.2535, simple_loss=0.3341, pruned_loss=0.08644, over 4649.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.295, pruned_loss=0.06425, over 942501.86 frames. ], batch size: 19, lr: 9.29e-03, grad_scale: 32.0 +2024-07-28 03:40:00,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.52 vs. limit=22.5 +2024-07-28 03:40:13,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=104384.0, ans=0.0 +2024-07-28 03:40:13,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=104384.0, ans=0.2 +2024-07-28 03:40:15,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=104384.0, ans=0.125 +2024-07-28 03:40:17,252 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.62 vs. limit=15.0 +2024-07-28 03:40:20,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=104397.33333333333, ans=0.125 +2024-07-28 03:40:24,515 INFO [train.py:1114] (3/4) Epoch 8, batch 6750, loss[loss=0.2423, simple_loss=0.3341, pruned_loss=0.07521, over 4143.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.296, pruned_loss=0.06428, over 940435.92 frames. ], batch size: 25, lr: 9.29e-03, grad_scale: 32.0 +2024-07-28 03:40:48,115 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.10 vs. limit=15.0 +2024-07-28 03:40:51,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=104437.33333333333, ans=0.125 +2024-07-28 03:40:56,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=104450.66666666667, ans=0.0 +2024-07-28 03:41:04,954 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.898e+01 5.597e+01 6.085e+01 6.894e+01 1.207e+02, threshold=1.217e+02, percent-clipped=0.0 +2024-07-28 03:41:09,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=104477.33333333333, ans=0.125 +2024-07-28 03:41:14,449 INFO [train.py:1114] (3/4) Epoch 8, batch 6800, loss[loss=0.2327, simple_loss=0.3202, pruned_loss=0.07264, over 4641.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2972, pruned_loss=0.06479, over 938972.46 frames. ], batch size: 13, lr: 9.29e-03, grad_scale: 32.0 +2024-07-28 03:41:25,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104490.66666666667, ans=0.1 +2024-07-28 03:41:25,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=104490.66666666667, ans=0.125 +2024-07-28 03:41:31,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104504.0, ans=0.1 +2024-07-28 03:41:37,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=104517.33333333333, ans=0.2 +2024-07-28 03:41:49,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=104544.0, ans=0.0 +2024-07-28 03:41:50,248 INFO [train.py:1114] (3/4) Epoch 8, batch 6850, loss[loss=0.2209, simple_loss=0.3134, pruned_loss=0.06417, over 4693.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2959, pruned_loss=0.06413, over 940867.14 frames. ], batch size: 13, lr: 9.29e-03, grad_scale: 32.0 +2024-07-28 03:41:57,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104557.33333333333, ans=0.1 +2024-07-28 03:41:57,300 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.47 vs. limit=12.0 +2024-07-28 03:41:58,600 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.80 vs. limit=15.0 +2024-07-28 03:42:01,948 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.88 vs. limit=12.0 +2024-07-28 03:42:12,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104584.0, ans=0.1 +2024-07-28 03:42:16,166 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.06 vs. limit=22.5 +2024-07-28 03:42:21,578 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.580e+01 6.042e+01 6.902e+01 8.247e+01 1.133e+02, threshold=1.380e+02, percent-clipped=0.0 +2024-07-28 03:42:22,906 INFO [train.py:1114] (3/4) Epoch 8, batch 6900, loss[loss=0.2403, simple_loss=0.3129, pruned_loss=0.08386, over 4966.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2968, pruned_loss=0.06485, over 943179.32 frames. ], batch size: 13, lr: 9.28e-03, grad_scale: 16.0 +2024-07-28 03:42:25,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=104610.66666666667, ans=0.2 +2024-07-28 03:42:27,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=104610.66666666667, ans=0.1 +2024-07-28 03:42:32,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=104624.0, ans=0.125 +2024-07-28 03:42:36,487 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.46 vs. limit=15.0 +2024-07-28 03:42:38,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=104637.33333333333, ans=0.07 +2024-07-28 03:42:45,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=104650.66666666667, ans=0.125 +2024-07-28 03:42:55,985 INFO [train.py:1114] (3/4) Epoch 8, batch 6950, loss[loss=0.1694, simple_loss=0.2581, pruned_loss=0.04036, over 4524.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2965, pruned_loss=0.06437, over 941141.83 frames. ], batch size: 10, lr: 9.28e-03, grad_scale: 16.0 +2024-07-28 03:42:58,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=104677.33333333333, ans=0.2 +2024-07-28 03:43:02,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=104690.66666666667, ans=0.0 +2024-07-28 03:43:04,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=104690.66666666667, ans=0.125 +2024-07-28 03:43:07,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=104690.66666666667, ans=0.125 +2024-07-28 03:43:22,711 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.21 vs. limit=15.0 +2024-07-28 03:43:28,343 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.937e+01 5.764e+01 6.206e+01 6.980e+01 1.236e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 03:43:29,717 INFO [train.py:1114] (3/4) Epoch 8, batch 7000, loss[loss=0.2491, simple_loss=0.3404, pruned_loss=0.07891, over 4860.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2958, pruned_loss=0.06373, over 939384.15 frames. ], batch size: 18, lr: 9.28e-03, grad_scale: 16.0 +2024-07-28 03:43:31,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=104744.0, ans=0.125 +2024-07-28 03:43:36,010 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.69 vs. limit=15.0 +2024-07-28 03:43:37,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-28 03:43:45,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=104770.66666666667, ans=0.125 +2024-07-28 03:43:46,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=104770.66666666667, ans=0.0 +2024-07-28 03:43:52,489 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-07-28 03:44:09,224 INFO [train.py:1114] (3/4) Epoch 8, batch 7050, loss[loss=0.1898, simple_loss=0.2856, pruned_loss=0.04703, over 4701.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2954, pruned_loss=0.06332, over 942281.28 frames. ], batch size: 19, lr: 9.27e-03, grad_scale: 16.0 +2024-07-28 03:44:11,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=104810.66666666667, ans=0.2 +2024-07-28 03:44:13,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=104810.66666666667, ans=0.025 +2024-07-28 03:44:36,709 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.06 vs. limit=6.0 +2024-07-28 03:44:41,369 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.865e+01 5.673e+01 6.225e+01 7.440e+01 1.294e+02, threshold=1.245e+02, percent-clipped=1.0 +2024-07-28 03:44:42,667 INFO [train.py:1114] (3/4) Epoch 8, batch 7100, loss[loss=0.2312, simple_loss=0.3122, pruned_loss=0.07514, over 4802.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2968, pruned_loss=0.0646, over 937001.23 frames. ], batch size: 15, lr: 9.27e-03, grad_scale: 16.0 +2024-07-28 03:44:53,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=104890.66666666667, ans=0.125 +2024-07-28 03:44:57,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=104904.0, ans=0.2 +2024-07-28 03:44:58,039 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.96 vs. limit=22.5 +2024-07-28 03:44:58,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=104904.0, ans=0.1 +2024-07-28 03:45:00,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=104904.0, ans=0.125 +2024-07-28 03:45:07,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=104917.33333333333, ans=0.5 +2024-07-28 03:45:10,093 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.90 vs. limit=12.0 +2024-07-28 03:45:12,482 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:45:15,084 INFO [train.py:1114] (3/4) Epoch 8, batch 7150, loss[loss=0.2099, simple_loss=0.3005, pruned_loss=0.05971, over 4514.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2942, pruned_loss=0.06365, over 938073.74 frames. ], batch size: 21, lr: 9.27e-03, grad_scale: 16.0 +2024-07-28 03:45:17,701 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.28 vs. limit=15.0 +2024-07-28 03:45:24,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=104957.33333333333, ans=0.025 +2024-07-28 03:45:33,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=104970.66666666667, ans=0.025 +2024-07-28 03:45:43,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=104997.33333333333, ans=0.125 +2024-07-28 03:45:48,072 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 5.987e+01 7.110e+01 8.384e+01 1.191e+02, threshold=1.422e+02, percent-clipped=0.0 +2024-07-28 03:45:49,420 INFO [train.py:1114] (3/4) Epoch 8, batch 7200, loss[loss=0.2252, simple_loss=0.3141, pruned_loss=0.06811, over 4800.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2949, pruned_loss=0.06287, over 938383.73 frames. ], batch size: 15, lr: 9.27e-03, grad_scale: 32.0 +2024-07-28 03:45:53,781 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.12 vs. limit=15.0 +2024-07-28 03:45:55,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=105024.0, ans=0.0 +2024-07-28 03:45:58,443 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.33 vs. limit=15.0 +2024-07-28 03:46:07,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=105037.33333333333, ans=0.1 +2024-07-28 03:46:21,907 INFO [train.py:1114] (3/4) Epoch 8, batch 7250, loss[loss=0.1703, simple_loss=0.243, pruned_loss=0.04878, over 4848.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2954, pruned_loss=0.06348, over 940153.73 frames. ], batch size: 12, lr: 9.26e-03, grad_scale: 32.0 +2024-07-28 03:46:22,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=105077.33333333333, ans=0.0 +2024-07-28 03:46:30,592 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-07-28 03:46:40,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=105117.33333333333, ans=0.125 +2024-07-28 03:46:46,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=105117.33333333333, ans=0.125 +2024-07-28 03:46:46,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=105117.33333333333, ans=0.1 +2024-07-28 03:46:48,739 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=105130.66666666667, ans=0.09899494936611666 +2024-07-28 03:46:50,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=105130.66666666667, ans=0.0 +2024-07-28 03:46:53,134 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.994e+01 6.010e+01 6.491e+01 7.289e+01 9.989e+01, threshold=1.298e+02, percent-clipped=0.0 +2024-07-28 03:46:54,406 INFO [train.py:1114] (3/4) Epoch 8, batch 7300, loss[loss=0.1949, simple_loss=0.2839, pruned_loss=0.05296, over 4849.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2941, pruned_loss=0.06304, over 940203.43 frames. ], batch size: 12, lr: 9.26e-03, grad_scale: 32.0 +2024-07-28 03:46:58,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=105144.0, ans=0.125 +2024-07-28 03:47:10,584 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.46 vs. limit=15.0 +2024-07-28 03:47:15,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=105184.0, ans=0.125 +2024-07-28 03:47:27,036 INFO [train.py:1114] (3/4) Epoch 8, batch 7350, loss[loss=0.2149, simple_loss=0.3015, pruned_loss=0.06418, over 4642.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2956, pruned_loss=0.06351, over 939344.67 frames. ], batch size: 12, lr: 9.26e-03, grad_scale: 32.0 +2024-07-28 03:47:34,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=105224.0, ans=0.125 +2024-07-28 03:47:38,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=105224.0, ans=0.125 +2024-07-28 03:47:41,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=105237.33333333333, ans=0.125 +2024-07-28 03:47:43,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=105237.33333333333, ans=0.0 +2024-07-28 03:47:51,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=105250.66666666667, ans=0.025 +2024-07-28 03:47:51,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=105250.66666666667, ans=0.0 +2024-07-28 03:47:51,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105250.66666666667, ans=0.1 +2024-07-28 03:47:55,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=105264.0, ans=0.025 +2024-07-28 03:47:58,204 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.496e+01 5.796e+01 6.371e+01 7.883e+01 1.311e+02, threshold=1.274e+02, percent-clipped=1.0 +2024-07-28 03:47:59,459 INFO [train.py:1114] (3/4) Epoch 8, batch 7400, loss[loss=0.2126, simple_loss=0.2958, pruned_loss=0.0647, over 4687.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2965, pruned_loss=0.06412, over 940289.21 frames. ], batch size: 13, lr: 9.25e-03, grad_scale: 32.0 +2024-07-28 03:48:00,586 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.50 vs. limit=15.0 +2024-07-28 03:48:09,493 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 03:48:13,045 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.61 vs. limit=15.0 +2024-07-28 03:48:16,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=105304.0, ans=0.035 +2024-07-28 03:48:21,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=105317.33333333333, ans=0.0 +2024-07-28 03:48:32,434 INFO [train.py:1114] (3/4) Epoch 8, batch 7450, loss[loss=0.1515, simple_loss=0.2269, pruned_loss=0.03803, over 4616.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2962, pruned_loss=0.06451, over 937561.75 frames. ], batch size: 11, lr: 9.25e-03, grad_scale: 32.0 +2024-07-28 03:48:35,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105344.0, ans=0.1 +2024-07-28 03:48:36,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=105344.0, ans=22.5 +2024-07-28 03:48:42,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=105357.33333333333, ans=0.5 +2024-07-28 03:48:50,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=105370.66666666667, ans=0.035 +2024-07-28 03:48:58,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=105397.33333333333, ans=0.125 +2024-07-28 03:49:02,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=105397.33333333333, ans=0.125 +2024-07-28 03:49:03,638 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.24 vs. limit=15.0 +2024-07-28 03:49:03,909 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.015e+01 5.959e+01 6.584e+01 7.550e+01 1.203e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-28 03:49:05,210 INFO [train.py:1114] (3/4) Epoch 8, batch 7500, loss[loss=0.3156, simple_loss=0.3794, pruned_loss=0.1259, over 3555.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2965, pruned_loss=0.06442, over 936354.15 frames. ], batch size: 35, lr: 9.25e-03, grad_scale: 32.0 +2024-07-28 03:49:12,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=105424.0, ans=0.0 +2024-07-28 03:49:13,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=105424.0, ans=0.09899494936611666 +2024-07-28 03:49:16,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=105424.0, ans=0.125 +2024-07-28 03:49:34,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105464.0, ans=0.1 +2024-07-28 03:49:35,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=105464.0, ans=0.1 +2024-07-28 03:49:39,662 INFO [train.py:1114] (3/4) Epoch 8, batch 7550, loss[loss=0.2207, simple_loss=0.301, pruned_loss=0.07021, over 4620.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2983, pruned_loss=0.06546, over 936078.16 frames. ], batch size: 17, lr: 9.25e-03, grad_scale: 32.0 +2024-07-28 03:49:45,940 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.26 vs. limit=10.0 +2024-07-28 03:49:48,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=105490.66666666667, ans=0.125 +2024-07-28 03:49:49,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=105490.66666666667, ans=0.0 +2024-07-28 03:49:50,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=105490.66666666667, ans=0.125 +2024-07-28 03:49:51,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=105490.66666666667, ans=0.0 +2024-07-28 03:49:59,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=105517.33333333333, ans=0.1 +2024-07-28 03:50:08,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=105530.66666666667, ans=0.125 +2024-07-28 03:50:10,695 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.921e+01 5.717e+01 6.338e+01 7.144e+01 8.798e+01, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 03:50:10,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=105530.66666666667, ans=0.0 +2024-07-28 03:50:12,728 INFO [train.py:1114] (3/4) Epoch 8, batch 7600, loss[loss=0.2112, simple_loss=0.3023, pruned_loss=0.06006, over 4817.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2977, pruned_loss=0.06482, over 938205.53 frames. ], batch size: 14, lr: 9.24e-03, grad_scale: 32.0 +2024-07-28 03:50:14,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=105544.0, ans=0.1 +2024-07-28 03:50:43,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=105597.33333333333, ans=0.0 +2024-07-28 03:50:45,952 INFO [train.py:1114] (3/4) Epoch 8, batch 7650, loss[loss=0.1947, simple_loss=0.2652, pruned_loss=0.06213, over 4951.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.297, pruned_loss=0.06489, over 937251.90 frames. ], batch size: 12, lr: 9.24e-03, grad_scale: 32.0 +2024-07-28 03:50:51,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=105610.66666666667, ans=0.125 +2024-07-28 03:51:00,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=105637.33333333333, ans=0.0 +2024-07-28 03:51:10,150 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.44 vs. limit=15.0 +2024-07-28 03:51:10,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=105650.66666666667, ans=0.0 +2024-07-28 03:51:17,765 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.951e+01 6.499e+01 7.761e+01 1.442e+02, threshold=1.300e+02, percent-clipped=1.0 +2024-07-28 03:51:19,097 INFO [train.py:1114] (3/4) Epoch 8, batch 7700, loss[loss=0.1882, simple_loss=0.2792, pruned_loss=0.04863, over 4686.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2965, pruned_loss=0.06515, over 934836.47 frames. ], batch size: 13, lr: 9.24e-03, grad_scale: 32.0 +2024-07-28 03:51:31,417 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.74 vs. limit=15.0 +2024-07-28 03:51:33,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=105704.0, ans=0.2 +2024-07-28 03:51:51,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=105744.0, ans=0.125 +2024-07-28 03:51:51,757 INFO [train.py:1114] (3/4) Epoch 8, batch 7750, loss[loss=0.211, simple_loss=0.3105, pruned_loss=0.05579, over 4925.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2977, pruned_loss=0.065, over 935993.76 frames. ], batch size: 14, lr: 9.23e-03, grad_scale: 32.0 +2024-07-28 03:51:54,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=105744.0, ans=0.125 +2024-07-28 03:52:07,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=105770.66666666667, ans=0.125 +2024-07-28 03:52:19,389 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.72 vs. limit=15.0 +2024-07-28 03:52:21,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=105797.33333333333, ans=0.0 +2024-07-28 03:52:21,328 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.10 vs. limit=22.5 +2024-07-28 03:52:23,023 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.861e+01 6.500e+01 7.436e+01 9.708e+01, threshold=1.300e+02, percent-clipped=0.0 +2024-07-28 03:52:24,031 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.40 vs. limit=22.5 +2024-07-28 03:52:24,796 INFO [train.py:1114] (3/4) Epoch 8, batch 7800, loss[loss=0.2586, simple_loss=0.342, pruned_loss=0.08755, over 4669.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2978, pruned_loss=0.06484, over 937460.36 frames. ], batch size: 14, lr: 9.23e-03, grad_scale: 32.0 +2024-07-28 03:52:26,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=105810.66666666667, ans=0.0 +2024-07-28 03:52:32,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=105824.0, ans=0.125 +2024-07-28 03:52:35,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=105824.0, ans=0.125 +2024-07-28 03:52:36,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=105824.0, ans=0.025 +2024-07-28 03:52:52,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=105864.0, ans=0.125 +2024-07-28 03:52:54,051 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.79 vs. limit=15.0 +2024-07-28 03:52:57,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=105864.0, ans=0.2 +2024-07-28 03:52:59,045 INFO [train.py:1114] (3/4) Epoch 8, batch 7850, loss[loss=0.2267, simple_loss=0.2928, pruned_loss=0.08031, over 4528.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2975, pruned_loss=0.0652, over 937112.52 frames. ], batch size: 10, lr: 9.23e-03, grad_scale: 32.0 +2024-07-28 03:53:14,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=105904.0, ans=0.125 +2024-07-28 03:53:25,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=105930.66666666667, ans=0.0 +2024-07-28 03:53:30,109 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.073e+01 5.939e+01 6.689e+01 8.282e+01 1.225e+02, threshold=1.338e+02, percent-clipped=0.0 +2024-07-28 03:53:31,394 INFO [train.py:1114] (3/4) Epoch 8, batch 7900, loss[loss=0.1921, simple_loss=0.2818, pruned_loss=0.05124, over 4881.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.299, pruned_loss=0.06593, over 934481.67 frames. ], batch size: 14, lr: 9.22e-03, grad_scale: 32.0 +2024-07-28 03:53:31,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=105944.0, ans=0.04949747468305833 +2024-07-28 03:53:32,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=105944.0, ans=0.125 +2024-07-28 03:53:32,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=105944.0, ans=0.0 +2024-07-28 03:53:50,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=105970.66666666667, ans=0.0 +2024-07-28 03:53:50,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=105970.66666666667, ans=0.0 +2024-07-28 03:54:04,737 INFO [train.py:1114] (3/4) Epoch 8, batch 7950, loss[loss=0.3222, simple_loss=0.3684, pruned_loss=0.138, over 3096.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2976, pruned_loss=0.0651, over 936369.69 frames. ], batch size: 36, lr: 9.22e-03, grad_scale: 16.0 +2024-07-28 03:54:07,478 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.49 vs. limit=22.5 +2024-07-28 03:54:11,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106010.66666666667, ans=0.1 +2024-07-28 03:54:13,443 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.76 vs. limit=15.0 +2024-07-28 03:54:26,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=106050.66666666667, ans=0.125 +2024-07-28 03:54:30,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=106050.66666666667, ans=0.125 +2024-07-28 03:54:31,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=106050.66666666667, ans=0.1 +2024-07-28 03:54:32,116 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.83 vs. limit=15.0 +2024-07-28 03:54:38,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106064.0, ans=0.1 +2024-07-28 03:54:39,423 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.635e+01 5.772e+01 6.445e+01 7.166e+01 9.685e+01, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 03:54:40,294 INFO [train.py:1114] (3/4) Epoch 8, batch 8000, loss[loss=0.1641, simple_loss=0.2466, pruned_loss=0.04082, over 4606.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2952, pruned_loss=0.06437, over 935890.15 frames. ], batch size: 11, lr: 9.22e-03, grad_scale: 32.0 +2024-07-28 03:55:04,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106117.33333333333, ans=0.1 +2024-07-28 03:55:05,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=106130.66666666667, ans=0.2 +2024-07-28 03:55:07,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=106130.66666666667, ans=0.125 +2024-07-28 03:55:12,998 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-28 03:55:14,573 INFO [train.py:1114] (3/4) Epoch 8, batch 8050, loss[loss=0.1891, simple_loss=0.2808, pruned_loss=0.04872, over 4798.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2956, pruned_loss=0.06462, over 935504.68 frames. ], batch size: 14, lr: 9.22e-03, grad_scale: 32.0 +2024-07-28 03:55:18,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=106144.0, ans=0.125 +2024-07-28 03:55:18,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=106144.0, ans=0.125 +2024-07-28 03:55:19,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=106144.0, ans=0.95 +2024-07-28 03:55:20,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=106144.0, ans=0.125 +2024-07-28 03:55:22,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=106157.33333333333, ans=0.0 +2024-07-28 03:55:25,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=106157.33333333333, ans=0.125 +2024-07-28 03:55:34,149 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.43 vs. limit=15.0 +2024-07-28 03:55:38,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=106184.0, ans=0.2 +2024-07-28 03:55:43,727 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.03 vs. limit=22.5 +2024-07-28 03:55:47,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106197.33333333333, ans=0.1 +2024-07-28 03:55:47,756 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.934e+01 5.620e+01 6.153e+01 6.973e+01 1.002e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 03:55:48,431 INFO [train.py:1114] (3/4) Epoch 8, batch 8100, loss[loss=0.2031, simple_loss=0.2915, pruned_loss=0.05733, over 4815.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2954, pruned_loss=0.0644, over 935078.69 frames. ], batch size: 15, lr: 9.21e-03, grad_scale: 32.0 +2024-07-28 03:55:49,441 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.73 vs. limit=12.0 +2024-07-28 03:55:53,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=106210.66666666667, ans=0.125 +2024-07-28 03:55:54,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=106224.0, ans=0.025 +2024-07-28 03:55:55,421 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.31 vs. limit=10.0 +2024-07-28 03:56:00,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=106237.33333333333, ans=22.5 +2024-07-28 03:56:09,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=106250.66666666667, ans=0.2 +2024-07-28 03:56:16,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=106264.0, ans=0.95 +2024-07-28 03:56:17,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=106264.0, ans=0.09899494936611666 +2024-07-28 03:56:18,830 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.98 vs. limit=15.0 +2024-07-28 03:56:20,980 INFO [train.py:1114] (3/4) Epoch 8, batch 8150, loss[loss=0.2382, simple_loss=0.324, pruned_loss=0.07622, over 4797.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2941, pruned_loss=0.06349, over 938235.52 frames. ], batch size: 15, lr: 9.21e-03, grad_scale: 32.0 +2024-07-28 03:56:22,163 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.12 vs. limit=15.0 +2024-07-28 03:56:23,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=106277.33333333333, ans=0.2 +2024-07-28 03:56:24,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=106277.33333333333, ans=0.125 +2024-07-28 03:56:28,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=106290.66666666667, ans=0.125 +2024-07-28 03:56:31,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=106290.66666666667, ans=0.025 +2024-07-28 03:56:36,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=106304.0, ans=0.1 +2024-07-28 03:56:51,242 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.81 vs. limit=15.0 +2024-07-28 03:56:51,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=106330.66666666667, ans=0.125 +2024-07-28 03:56:52,815 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.056e+01 5.810e+01 6.420e+01 7.411e+01 1.127e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 03:56:53,444 INFO [train.py:1114] (3/4) Epoch 8, batch 8200, loss[loss=0.2506, simple_loss=0.3375, pruned_loss=0.08186, over 4811.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2958, pruned_loss=0.06397, over 939258.26 frames. ], batch size: 15, lr: 9.21e-03, grad_scale: 32.0 +2024-07-28 03:56:55,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=106344.0, ans=0.025 +2024-07-28 03:56:55,853 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.87 vs. limit=15.0 +2024-07-28 03:56:58,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=106344.0, ans=0.125 +2024-07-28 03:57:06,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=106370.66666666667, ans=0.0 +2024-07-28 03:57:14,621 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.34 vs. limit=22.5 +2024-07-28 03:57:19,361 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.98 vs. limit=15.0 +2024-07-28 03:57:26,361 INFO [train.py:1114] (3/4) Epoch 8, batch 8250, loss[loss=0.2175, simple_loss=0.3072, pruned_loss=0.06392, over 4901.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2967, pruned_loss=0.06458, over 939174.08 frames. ], batch size: 13, lr: 9.20e-03, grad_scale: 32.0 +2024-07-28 03:57:35,986 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.48 vs. limit=15.0 +2024-07-28 03:57:57,943 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.754e+01 5.787e+01 6.260e+01 6.993e+01 1.105e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 03:57:58,642 INFO [train.py:1114] (3/4) Epoch 8, batch 8300, loss[loss=0.2127, simple_loss=0.3033, pruned_loss=0.061, over 4906.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2974, pruned_loss=0.06471, over 938973.98 frames. ], batch size: 15, lr: 9.20e-03, grad_scale: 32.0 +2024-07-28 03:58:01,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106477.33333333333, ans=0.1 +2024-07-28 03:58:06,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106490.66666666667, ans=0.0 +2024-07-28 03:58:12,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=106504.0, ans=0.0 +2024-07-28 03:58:30,625 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.64 vs. limit=15.0 +2024-07-28 03:58:32,067 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.56 vs. limit=15.0 +2024-07-28 03:58:32,087 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.42 vs. limit=15.0 +2024-07-28 03:58:36,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=106517.33333333333, ans=0.125 +2024-07-28 03:58:37,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106517.33333333333, ans=0.1 +2024-07-28 03:58:42,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=106530.66666666667, ans=0.125 +2024-07-28 03:58:42,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=106530.66666666667, ans=0.125 +2024-07-28 03:58:45,427 INFO [train.py:1114] (3/4) Epoch 8, batch 8350, loss[loss=0.2389, simple_loss=0.3311, pruned_loss=0.07332, over 4795.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2969, pruned_loss=0.06436, over 941840.14 frames. ], batch size: 15, lr: 9.20e-03, grad_scale: 32.0 +2024-07-28 03:58:46,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=106544.0, ans=0.125 +2024-07-28 03:58:52,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=106557.33333333333, ans=0.07 +2024-07-28 03:58:53,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106557.33333333333, ans=0.125 +2024-07-28 03:59:06,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=106584.0, ans=0.125 +2024-07-28 03:59:16,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=106597.33333333333, ans=0.125 +2024-07-28 03:59:17,897 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.872e+01 6.040e+01 6.810e+01 8.092e+01 1.142e+02, threshold=1.362e+02, percent-clipped=0.0 +2024-07-28 03:59:18,627 INFO [train.py:1114] (3/4) Epoch 8, batch 8400, loss[loss=0.177, simple_loss=0.2574, pruned_loss=0.04833, over 4783.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2971, pruned_loss=0.06454, over 940382.14 frames. ], batch size: 12, lr: 9.20e-03, grad_scale: 32.0 +2024-07-28 03:59:26,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=106624.0, ans=0.04949747468305833 +2024-07-28 03:59:27,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=106624.0, ans=0.1 +2024-07-28 03:59:27,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=106624.0, ans=0.0 +2024-07-28 03:59:37,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=106650.66666666667, ans=0.125 +2024-07-28 03:59:40,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=106650.66666666667, ans=0.0 +2024-07-28 03:59:41,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=106650.66666666667, ans=0.0 +2024-07-28 03:59:52,392 INFO [train.py:1114] (3/4) Epoch 8, batch 8450, loss[loss=0.2255, simple_loss=0.3111, pruned_loss=0.07001, over 4792.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2977, pruned_loss=0.06435, over 939443.67 frames. ], batch size: 15, lr: 9.19e-03, grad_scale: 32.0 +2024-07-28 03:59:54,628 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.07 vs. limit=15.0 +2024-07-28 03:59:55,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106677.33333333333, ans=0.1 +2024-07-28 03:59:59,851 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.35 vs. limit=15.0 +2024-07-28 04:00:01,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=106690.66666666667, ans=0.0 +2024-07-28 04:00:02,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=106690.66666666667, ans=0.125 +2024-07-28 04:00:05,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=106704.0, ans=0.0 +2024-07-28 04:00:24,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=106730.66666666667, ans=0.0 +2024-07-28 04:00:25,072 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.985e+01 6.391e+01 7.364e+01 1.076e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-28 04:00:25,733 INFO [train.py:1114] (3/4) Epoch 8, batch 8500, loss[loss=0.1893, simple_loss=0.2799, pruned_loss=0.04937, over 4613.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2974, pruned_loss=0.06416, over 939265.97 frames. ], batch size: 11, lr: 9.19e-03, grad_scale: 32.0 +2024-07-28 04:00:39,621 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.16 vs. limit=22.5 +2024-07-28 04:00:54,444 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=12.0 +2024-07-28 04:00:57,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=106797.33333333333, ans=0.2 +2024-07-28 04:00:58,738 INFO [train.py:1114] (3/4) Epoch 8, batch 8550, loss[loss=0.159, simple_loss=0.2412, pruned_loss=0.03844, over 4808.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2967, pruned_loss=0.06401, over 940371.00 frames. ], batch size: 11, lr: 9.19e-03, grad_scale: 32.0 +2024-07-28 04:01:02,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.97 vs. limit=6.0 +2024-07-28 04:01:04,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=106810.66666666667, ans=0.1 +2024-07-28 04:01:08,446 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.40 vs. limit=22.5 +2024-07-28 04:01:08,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=106824.0, ans=0.125 +2024-07-28 04:01:10,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=106824.0, ans=0.125 +2024-07-28 04:01:27,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=106864.0, ans=0.1 +2024-07-28 04:01:31,412 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.793e+01 5.840e+01 6.821e+01 7.770e+01 1.284e+02, threshold=1.364e+02, percent-clipped=1.0 +2024-07-28 04:01:32,070 INFO [train.py:1114] (3/4) Epoch 8, batch 8600, loss[loss=0.2396, simple_loss=0.3188, pruned_loss=0.0802, over 4809.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2965, pruned_loss=0.06466, over 939482.32 frames. ], batch size: 15, lr: 9.18e-03, grad_scale: 32.0 +2024-07-28 04:01:35,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=106877.33333333333, ans=0.0 +2024-07-28 04:01:37,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=106877.33333333333, ans=0.125 +2024-07-28 04:01:40,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106890.66666666667, ans=0.1 +2024-07-28 04:01:44,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=106904.0, ans=0.2 +2024-07-28 04:01:47,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=106904.0, ans=0.125 +2024-07-28 04:01:49,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=106904.0, ans=0.125 +2024-07-28 04:02:03,753 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.44 vs. limit=15.0 +2024-07-28 04:02:04,641 INFO [train.py:1114] (3/4) Epoch 8, batch 8650, loss[loss=0.2314, simple_loss=0.3146, pruned_loss=0.07413, over 4901.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2971, pruned_loss=0.06454, over 940640.18 frames. ], batch size: 15, lr: 9.18e-03, grad_scale: 32.0 +2024-07-28 04:02:04,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=106944.0, ans=0.125 +2024-07-28 04:02:08,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=106944.0, ans=0.1 +2024-07-28 04:02:11,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=106944.0, ans=0.125 +2024-07-28 04:02:11,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=106957.33333333333, ans=0.04949747468305833 +2024-07-28 04:02:30,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=106984.0, ans=0.025 +2024-07-28 04:02:30,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=106984.0, ans=0.025 +2024-07-28 04:02:35,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=106997.33333333333, ans=0.2 +2024-07-28 04:02:37,622 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.946e+01 6.079e+01 7.020e+01 8.285e+01 1.215e+02, threshold=1.404e+02, percent-clipped=0.0 +2024-07-28 04:02:38,287 INFO [train.py:1114] (3/4) Epoch 8, batch 8700, loss[loss=0.2099, simple_loss=0.2978, pruned_loss=0.06097, over 4763.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2981, pruned_loss=0.06536, over 938307.47 frames. ], batch size: 13, lr: 9.18e-03, grad_scale: 32.0 +2024-07-28 04:02:56,725 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.12 vs. limit=6.0 +2024-07-28 04:03:04,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=107050.66666666667, ans=0.0 +2024-07-28 04:03:05,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107064.0, ans=0.1 +2024-07-28 04:03:10,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=107064.0, ans=0.125 +2024-07-28 04:03:13,346 INFO [train.py:1114] (3/4) Epoch 8, batch 8750, loss[loss=0.2666, simple_loss=0.3537, pruned_loss=0.08979, over 4677.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2983, pruned_loss=0.06525, over 936546.07 frames. ], batch size: 15, lr: 9.18e-03, grad_scale: 32.0 +2024-07-28 04:03:16,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=107077.33333333333, ans=0.1 +2024-07-28 04:03:38,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107117.33333333333, ans=0.1 +2024-07-28 04:03:39,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=107117.33333333333, ans=0.0 +2024-07-28 04:03:40,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=107130.66666666667, ans=0.125 +2024-07-28 04:03:45,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=107130.66666666667, ans=0.2 +2024-07-28 04:03:46,703 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 6.010e+01 6.887e+01 8.098e+01 1.294e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-28 04:03:47,335 INFO [train.py:1114] (3/4) Epoch 8, batch 8800, loss[loss=0.2359, simple_loss=0.3335, pruned_loss=0.06915, over 4926.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2982, pruned_loss=0.06517, over 937494.03 frames. ], batch size: 14, lr: 9.17e-03, grad_scale: 32.0 +2024-07-28 04:03:53,458 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.53 vs. limit=15.0 +2024-07-28 04:03:56,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=107157.33333333333, ans=0.0 +2024-07-28 04:04:10,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=107184.0, ans=0.125 +2024-07-28 04:04:20,451 INFO [train.py:1114] (3/4) Epoch 8, batch 8850, loss[loss=0.1904, simple_loss=0.2879, pruned_loss=0.04642, over 4600.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.298, pruned_loss=0.06571, over 932061.94 frames. ], batch size: 21, lr: 9.17e-03, grad_scale: 32.0 +2024-07-28 04:04:32,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.whiten.whitening_limit, batch_count=107224.0, ans=12.0 +2024-07-28 04:04:37,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=107237.33333333333, ans=0.125 +2024-07-28 04:04:49,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=107264.0, ans=6.0 +2024-07-28 04:04:50,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107264.0, ans=0.125 +2024-07-28 04:04:53,231 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 5.819e+01 6.564e+01 7.832e+01 1.170e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-28 04:04:53,900 INFO [train.py:1114] (3/4) Epoch 8, batch 8900, loss[loss=0.1799, simple_loss=0.2629, pruned_loss=0.04846, over 4942.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2982, pruned_loss=0.06581, over 930429.32 frames. ], batch size: 12, lr: 9.17e-03, grad_scale: 32.0 +2024-07-28 04:04:54,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=107277.33333333333, ans=0.125 +2024-07-28 04:04:55,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=107277.33333333333, ans=0.125 +2024-07-28 04:04:58,607 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.55 vs. limit=10.0 +2024-07-28 04:05:14,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=107317.33333333333, ans=0.2 +2024-07-28 04:05:25,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=107344.0, ans=0.125 +2024-07-28 04:05:26,160 INFO [train.py:1114] (3/4) Epoch 8, batch 8950, loss[loss=0.2293, simple_loss=0.3081, pruned_loss=0.07524, over 4589.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2982, pruned_loss=0.06559, over 931848.53 frames. ], batch size: 21, lr: 9.17e-03, grad_scale: 32.0 +2024-07-28 04:05:32,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=107357.33333333333, ans=0.0 +2024-07-28 04:05:35,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=107357.33333333333, ans=0.125 +2024-07-28 04:05:45,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=107370.66666666667, ans=0.125 +2024-07-28 04:05:58,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=107397.33333333333, ans=0.125 +2024-07-28 04:05:58,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107397.33333333333, ans=0.1 +2024-07-28 04:06:00,005 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.334e+01 5.823e+01 6.551e+01 7.649e+01 1.257e+02, threshold=1.310e+02, percent-clipped=0.0 +2024-07-28 04:06:00,721 INFO [train.py:1114] (3/4) Epoch 8, batch 9000, loss[loss=0.2036, simple_loss=0.2878, pruned_loss=0.05968, over 4642.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.296, pruned_loss=0.06508, over 934721.45 frames. ], batch size: 12, lr: 9.16e-03, grad_scale: 32.0 +2024-07-28 04:06:00,721 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 04:06:06,952 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.7223, 4.4659, 3.4074, 3.0687], device='cuda:3') +2024-07-28 04:06:07,807 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.2890, 3.6167, 3.8011, 3.5725], device='cuda:3') +2024-07-28 04:06:09,426 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.9306, 5.0541, 4.9582, 5.6482], device='cuda:3') +2024-07-28 04:06:12,614 INFO [train.py:1146] (3/4) Epoch 8, validation: loss=0.1781, simple_loss=0.2826, pruned_loss=0.03685, over 944034.00 frames. +2024-07-28 04:06:12,615 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 04:06:32,223 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.48 vs. limit=15.0 +2024-07-28 04:06:45,369 INFO [train.py:1114] (3/4) Epoch 8, batch 9050, loss[loss=0.1947, simple_loss=0.2833, pruned_loss=0.05305, over 4530.00 frames. ], tot_loss[loss=0.212, simple_loss=0.295, pruned_loss=0.0645, over 935289.93 frames. ], batch size: 10, lr: 9.16e-03, grad_scale: 32.0 +2024-07-28 04:06:51,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=107490.66666666667, ans=0.125 +2024-07-28 04:06:53,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=107490.66666666667, ans=0.0 +2024-07-28 04:06:56,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=107490.66666666667, ans=0.07 +2024-07-28 04:06:56,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=107490.66666666667, ans=0.0 +2024-07-28 04:07:00,106 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.97 vs. limit=15.0 +2024-07-28 04:07:00,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=107504.0, ans=0.125 +2024-07-28 04:07:01,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=107504.0, ans=0.025 +2024-07-28 04:07:15,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=107530.66666666667, ans=0.09899494936611666 +2024-07-28 04:07:16,878 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.793e+01 6.353e+01 7.194e+01 9.869e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 04:07:17,492 INFO [train.py:1114] (3/4) Epoch 8, batch 9100, loss[loss=0.2187, simple_loss=0.3154, pruned_loss=0.06099, over 4927.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2948, pruned_loss=0.06411, over 937632.26 frames. ], batch size: 14, lr: 9.16e-03, grad_scale: 32.0 +2024-07-28 04:07:21,592 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.63 vs. limit=15.0 +2024-07-28 04:07:24,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=107557.33333333333, ans=0.1 +2024-07-28 04:07:48,972 INFO [train.py:1114] (3/4) Epoch 8, batch 9150, loss[loss=0.2229, simple_loss=0.3141, pruned_loss=0.06588, over 4812.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2945, pruned_loss=0.06364, over 936436.94 frames. ], batch size: 14, lr: 9.15e-03, grad_scale: 32.0 +2024-07-28 04:07:51,438 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:07:57,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=107624.0, ans=0.125 +2024-07-28 04:07:57,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107624.0, ans=0.1 +2024-07-28 04:08:00,496 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.79 vs. limit=12.0 +2024-07-28 04:08:04,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=107637.33333333333, ans=0.125 +2024-07-28 04:08:10,943 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:08:19,829 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.141e+01 5.934e+01 6.513e+01 7.499e+01 1.086e+02, threshold=1.303e+02, percent-clipped=0.0 +2024-07-28 04:08:20,441 INFO [train.py:1114] (3/4) Epoch 8, batch 9200, loss[loss=0.2023, simple_loss=0.2788, pruned_loss=0.06287, over 4845.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2947, pruned_loss=0.06362, over 938399.76 frames. ], batch size: 12, lr: 9.15e-03, grad_scale: 32.0 +2024-07-28 04:08:51,973 INFO [train.py:1114] (3/4) Epoch 8, batch 9250, loss[loss=0.196, simple_loss=0.2883, pruned_loss=0.05187, over 4636.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2967, pruned_loss=0.0647, over 938720.24 frames. ], batch size: 13, lr: 9.15e-03, grad_scale: 32.0 +2024-07-28 04:08:55,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=107744.0, ans=0.125 +2024-07-28 04:09:03,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=107757.33333333333, ans=0.1 +2024-07-28 04:09:07,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=107770.66666666667, ans=0.125 +2024-07-28 04:09:16,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=107797.33333333333, ans=0.0 +2024-07-28 04:09:21,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=107797.33333333333, ans=0.2 +2024-07-28 04:09:22,906 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.575e+01 5.836e+01 6.420e+01 7.069e+01 1.211e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 04:09:23,527 INFO [train.py:1114] (3/4) Epoch 8, batch 9300, loss[loss=0.1742, simple_loss=0.2535, pruned_loss=0.04742, over 4783.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2972, pruned_loss=0.06523, over 938399.54 frames. ], batch size: 12, lr: 9.15e-03, grad_scale: 32.0 +2024-07-28 04:09:26,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=107810.66666666667, ans=0.025 +2024-07-28 04:09:36,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=107837.33333333333, ans=0.0 +2024-07-28 04:09:39,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=107837.33333333333, ans=0.125 +2024-07-28 04:09:43,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=107850.66666666667, ans=0.125 +2024-07-28 04:09:48,351 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.48 vs. limit=22.5 +2024-07-28 04:09:52,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=107864.0, ans=0.125 +2024-07-28 04:09:53,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=107864.0, ans=0.125 +2024-07-28 04:09:55,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=107877.33333333333, ans=0.0 +2024-07-28 04:09:55,753 INFO [train.py:1114] (3/4) Epoch 8, batch 9350, loss[loss=0.1858, simple_loss=0.264, pruned_loss=0.05378, over 4816.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2974, pruned_loss=0.06512, over 935304.09 frames. ], batch size: 11, lr: 9.14e-03, grad_scale: 32.0 +2024-07-28 04:09:58,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=107877.33333333333, ans=0.025 +2024-07-28 04:09:59,718 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-28 04:10:08,528 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.08 vs. limit=15.0 +2024-07-28 04:10:19,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=107917.33333333333, ans=0.0 +2024-07-28 04:10:20,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=107930.66666666667, ans=0.125 +2024-07-28 04:10:27,326 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.820e+01 5.454e+01 5.997e+01 6.849e+01 9.161e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 04:10:27,667 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.49 vs. limit=15.0 +2024-07-28 04:10:27,986 INFO [train.py:1114] (3/4) Epoch 8, batch 9400, loss[loss=0.2449, simple_loss=0.3235, pruned_loss=0.08316, over 4690.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2975, pruned_loss=0.06528, over 933234.51 frames. ], batch size: 13, lr: 9.14e-03, grad_scale: 32.0 +2024-07-28 04:10:54,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=107984.0, ans=0.125 +2024-07-28 04:10:58,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=107997.33333333333, ans=0.05 +2024-07-28 04:10:59,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=107997.33333333333, ans=0.125 +2024-07-28 04:11:03,419 INFO [train.py:1114] (3/4) Epoch 8, batch 9450, loss[loss=0.1726, simple_loss=0.255, pruned_loss=0.04508, over 4818.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2981, pruned_loss=0.06504, over 932846.01 frames. ], batch size: 11, lr: 9.14e-03, grad_scale: 32.0 +2024-07-28 04:11:05,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=108010.66666666667, ans=0.0 +2024-07-28 04:11:07,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=108010.66666666667, ans=0.0 +2024-07-28 04:11:14,777 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-28 04:11:27,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=108050.66666666667, ans=0.025 +2024-07-28 04:11:31,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=108064.0, ans=0.125 +2024-07-28 04:11:34,693 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.777e+01 5.745e+01 6.311e+01 7.517e+01 1.007e+02, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 04:11:35,349 INFO [train.py:1114] (3/4) Epoch 8, batch 9500, loss[loss=0.2025, simple_loss=0.274, pruned_loss=0.06554, over 4702.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2973, pruned_loss=0.06424, over 934988.29 frames. ], batch size: 12, lr: 9.13e-03, grad_scale: 32.0 +2024-07-28 04:11:49,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108104.0, ans=0.1 +2024-07-28 04:11:57,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=108117.33333333333, ans=0.125 +2024-07-28 04:11:58,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=108117.33333333333, ans=0.5 +2024-07-28 04:12:02,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=108130.66666666667, ans=0.0 +2024-07-28 04:12:06,613 INFO [train.py:1114] (3/4) Epoch 8, batch 9550, loss[loss=0.2202, simple_loss=0.293, pruned_loss=0.07374, over 4783.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2971, pruned_loss=0.06429, over 931695.89 frames. ], batch size: 12, lr: 9.13e-03, grad_scale: 32.0 +2024-07-28 04:12:06,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=108144.0, ans=0.0 +2024-07-28 04:12:10,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=108144.0, ans=0.125 +2024-07-28 04:12:17,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=108157.33333333333, ans=0.125 +2024-07-28 04:12:17,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=108157.33333333333, ans=0.07 +2024-07-28 04:12:19,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=108170.66666666667, ans=0.125 +2024-07-28 04:12:29,492 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.82 vs. limit=15.0 +2024-07-28 04:12:31,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=108197.33333333333, ans=0.125 +2024-07-28 04:12:37,463 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.060e+01 5.899e+01 6.832e+01 8.942e+01 1.153e+02, threshold=1.366e+02, percent-clipped=0.0 +2024-07-28 04:12:38,144 INFO [train.py:1114] (3/4) Epoch 8, batch 9600, loss[loss=0.2775, simple_loss=0.3376, pruned_loss=0.1087, over 3737.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2969, pruned_loss=0.06437, over 931021.51 frames. ], batch size: 35, lr: 9.13e-03, grad_scale: 32.0 +2024-07-28 04:12:39,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=108210.66666666667, ans=15.0 +2024-07-28 04:12:48,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=108224.0, ans=0.2 +2024-07-28 04:12:49,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=108224.0, ans=0.125 +2024-07-28 04:12:50,997 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.15 vs. limit=15.0 +2024-07-28 04:13:07,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=108264.0, ans=0.125 +2024-07-28 04:13:10,091 INFO [train.py:1114] (3/4) Epoch 8, batch 9650, loss[loss=0.2114, simple_loss=0.2985, pruned_loss=0.06215, over 4850.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2969, pruned_loss=0.06488, over 926835.69 frames. ], batch size: 16, lr: 9.13e-03, grad_scale: 32.0 +2024-07-28 04:13:14,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=108277.33333333333, ans=0.125 +2024-07-28 04:13:34,982 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.68 vs. limit=15.0 +2024-07-28 04:13:36,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=108330.66666666667, ans=0.125 +2024-07-28 04:13:40,238 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.895e+01 5.890e+01 6.394e+01 7.383e+01 1.171e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 04:13:40,927 INFO [train.py:1114] (3/4) Epoch 8, batch 9700, loss[loss=0.2335, simple_loss=0.3125, pruned_loss=0.07722, over 4185.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2969, pruned_loss=0.06448, over 924823.51 frames. ], batch size: 25, lr: 9.12e-03, grad_scale: 32.0 +2024-07-28 04:13:41,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=108344.0, ans=0.0 +2024-07-28 04:13:42,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.28 vs. limit=15.0 +2024-07-28 04:13:53,076 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.83 vs. limit=10.0 +2024-07-28 04:13:53,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=108370.66666666667, ans=0.025 +2024-07-28 04:14:03,726 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.28 vs. limit=15.0 +2024-07-28 04:14:05,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108397.33333333333, ans=0.1 +2024-07-28 04:14:11,906 INFO [train.py:1114] (3/4) Epoch 8, batch 9750, loss[loss=0.2209, simple_loss=0.3021, pruned_loss=0.06986, over 4683.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2959, pruned_loss=0.06402, over 925540.00 frames. ], batch size: 15, lr: 9.12e-03, grad_scale: 32.0 +2024-07-28 04:14:16,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108410.66666666667, ans=0.1 +2024-07-28 04:14:18,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=108424.0, ans=0.0 +2024-07-28 04:14:20,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108424.0, ans=0.1 +2024-07-28 04:14:32,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=108450.66666666667, ans=0.1 +2024-07-28 04:14:35,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108450.66666666667, ans=0.1 +2024-07-28 04:14:42,360 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.361e+01 6.068e+01 7.067e+01 8.452e+01 1.289e+02, threshold=1.413e+02, percent-clipped=1.0 +2024-07-28 04:14:42,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=108477.33333333333, ans=0.0 +2024-07-28 04:14:42,980 INFO [train.py:1114] (3/4) Epoch 8, batch 9800, loss[loss=0.2298, simple_loss=0.3045, pruned_loss=0.07757, over 4709.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2957, pruned_loss=0.06447, over 925136.16 frames. ], batch size: 12, lr: 9.12e-03, grad_scale: 32.0 +2024-07-28 04:14:57,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=108504.0, ans=0.125 +2024-07-28 04:15:13,598 INFO [train.py:1114] (3/4) Epoch 8, batch 9850, loss[loss=0.2328, simple_loss=0.3243, pruned_loss=0.07065, over 4909.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2961, pruned_loss=0.06469, over 927259.52 frames. ], batch size: 15, lr: 9.11e-03, grad_scale: 32.0 +2024-07-28 04:15:17,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=108544.0, ans=10.0 +2024-07-28 04:15:22,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=108557.33333333333, ans=0.0 +2024-07-28 04:15:22,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108557.33333333333, ans=0.1 +2024-07-28 04:15:31,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=108570.66666666667, ans=0.125 +2024-07-28 04:15:31,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108584.0, ans=0.1 +2024-07-28 04:15:42,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108597.33333333333, ans=0.1 +2024-07-28 04:15:44,369 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.741e+01 5.942e+01 6.515e+01 7.439e+01 1.419e+02, threshold=1.303e+02, percent-clipped=1.0 +2024-07-28 04:15:45,044 INFO [train.py:1114] (3/4) Epoch 8, batch 9900, loss[loss=0.2702, simple_loss=0.3488, pruned_loss=0.0958, over 4855.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2978, pruned_loss=0.06529, over 926564.55 frames. ], batch size: 16, lr: 9.11e-03, grad_scale: 32.0 +2024-07-28 04:15:50,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=108610.66666666667, ans=10.0 +2024-07-28 04:15:57,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108637.33333333333, ans=0.1 +2024-07-28 04:15:57,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=108637.33333333333, ans=0.2 +2024-07-28 04:15:57,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn1.whiten.whitening_limit, batch_count=108637.33333333333, ans=22.5 +2024-07-28 04:16:15,790 INFO [train.py:1114] (3/4) Epoch 8, batch 9950, loss[loss=0.1732, simple_loss=0.2499, pruned_loss=0.04826, over 4522.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2985, pruned_loss=0.06596, over 928545.77 frames. ], batch size: 10, lr: 9.11e-03, grad_scale: 64.0 +2024-07-28 04:16:17,340 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.57 vs. limit=10.0 +2024-07-28 04:16:18,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=108677.33333333333, ans=0.125 +2024-07-28 04:16:21,681 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.37 vs. limit=15.0 +2024-07-28 04:16:21,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=108690.66666666667, ans=15.0 +2024-07-28 04:16:24,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108690.66666666667, ans=0.1 +2024-07-28 04:16:25,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=108690.66666666667, ans=10.0 +2024-07-28 04:16:36,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=108717.33333333333, ans=0.0 +2024-07-28 04:16:38,133 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-28 04:16:40,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=108730.66666666667, ans=0.0 +2024-07-28 04:16:43,873 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.02 vs. limit=10.0 +2024-07-28 04:16:46,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=108730.66666666667, ans=0.125 +2024-07-28 04:16:46,387 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.858e+01 5.881e+01 6.237e+01 7.241e+01 1.097e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 04:16:46,979 INFO [train.py:1114] (3/4) Epoch 8, batch 10000, loss[loss=0.2724, simple_loss=0.3593, pruned_loss=0.09278, over 4632.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.3013, pruned_loss=0.06693, over 926528.51 frames. ], batch size: 16, lr: 9.11e-03, grad_scale: 64.0 +2024-07-28 04:16:52,193 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.45 vs. limit=15.0 +2024-07-28 04:17:04,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=108784.0, ans=0.125 +2024-07-28 04:17:05,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=108784.0, ans=0.0 +2024-07-28 04:17:07,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=108784.0, ans=0.125 +2024-07-28 04:17:20,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=108810.66666666667, ans=0.025 +2024-07-28 04:17:21,344 INFO [train.py:1114] (3/4) Epoch 8, batch 10050, loss[loss=0.2861, simple_loss=0.343, pruned_loss=0.1146, over 3671.00 frames. ], tot_loss[loss=0.222, simple_loss=0.3053, pruned_loss=0.0694, over 914511.44 frames. ], batch size: 35, lr: 9.10e-03, grad_scale: 64.0 +2024-07-28 04:17:35,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=108837.33333333333, ans=0.125 +2024-07-28 04:17:38,238 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.34 vs. limit=15.0 +2024-07-28 04:17:41,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=108850.66666666667, ans=0.0 +2024-07-28 04:17:46,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=108850.66666666667, ans=0.025 +2024-07-28 04:17:52,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.39 vs. limit=22.5 +2024-07-28 04:17:53,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108864.0, ans=0.1 +2024-07-28 04:17:55,198 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.156e+01 6.508e+01 7.175e+01 7.748e+01 1.103e+02, threshold=1.435e+02, percent-clipped=0.0 +2024-07-28 04:17:55,232 INFO [train.py:1114] (3/4) Epoch 8, batch 10100, loss[loss=0.2502, simple_loss=0.3181, pruned_loss=0.09113, over 3390.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.3112, pruned_loss=0.07574, over 861384.56 frames. ], batch size: 35, lr: 9.10e-03, grad_scale: 32.0 +2024-07-28 04:17:55,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=108877.33333333333, ans=0.125 +2024-07-28 04:17:56,393 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=10.94 vs. limit=12.0 +2024-07-28 04:18:00,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=108877.33333333333, ans=0.1 +2024-07-28 04:18:02,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=108890.66666666667, ans=0.1 +2024-07-28 04:18:10,674 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=6.52 vs. limit=10.0 +2024-07-28 04:18:16,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=108917.33333333333, ans=0.125 +2024-07-28 04:18:27,874 INFO [train.py:1114] (3/4) Epoch 8, batch 10150, loss[loss=0.2521, simple_loss=0.3216, pruned_loss=0.09133, over 3039.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3145, pruned_loss=0.07954, over 819117.96 frames. ], batch size: 35, lr: 9.10e-03, grad_scale: 32.0 +2024-07-28 04:18:29,009 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.15 vs. limit=10.0 +2024-07-28 04:18:58,639 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.15 vs. limit=22.5 +2024-07-28 04:18:59,458 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.833e+01 6.827e+01 7.269e+01 7.768e+01 1.197e+02, threshold=1.454e+02, percent-clipped=0.0 +2024-07-28 04:18:59,492 INFO [train.py:1114] (3/4) Epoch 8, batch 10200, loss[loss=0.2598, simple_loss=0.3181, pruned_loss=0.1008, over 3338.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.3181, pruned_loss=0.08379, over 787246.71 frames. ], batch size: 35, lr: 9.10e-03, grad_scale: 32.0 +2024-07-28 04:19:05,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=109024.0, ans=0.0 +2024-07-28 04:19:08,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109024.0, ans=0.1 +2024-07-28 04:19:49,904 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:19:59,116 INFO [train.py:1114] (3/4) Epoch 9, batch 0, loss[loss=0.1939, simple_loss=0.2856, pruned_loss=0.05112, over 4847.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2856, pruned_loss=0.05112, over 4847.00 frames. ], batch size: 12, lr: 8.61e-03, grad_scale: 32.0 +2024-07-28 04:19:59,117 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 04:20:10,795 INFO [train.py:1146] (3/4) Epoch 9, validation: loss=0.1818, simple_loss=0.2877, pruned_loss=0.03795, over 944034.00 frames. +2024-07-28 04:20:10,796 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 04:20:12,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=109040.0, ans=0.1 +2024-07-28 04:20:27,116 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=23.68 vs. limit=22.5 +2024-07-28 04:20:28,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=109066.66666666667, ans=0.125 +2024-07-28 04:20:28,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=109066.66666666667, ans=0.125 +2024-07-28 04:20:29,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=109066.66666666667, ans=0.125 +2024-07-28 04:20:36,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=109080.0, ans=0.2 +2024-07-28 04:20:41,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=109093.33333333333, ans=0.05 +2024-07-28 04:20:45,146 INFO [train.py:1114] (3/4) Epoch 9, batch 50, loss[loss=0.2163, simple_loss=0.3058, pruned_loss=0.06336, over 4609.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2997, pruned_loss=0.06544, over 206641.77 frames. ], batch size: 11, lr: 8.61e-03, grad_scale: 32.0 +2024-07-28 04:20:47,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=109106.66666666667, ans=0.125 +2024-07-28 04:20:48,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=109106.66666666667, ans=0.2 +2024-07-28 04:20:50,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=109106.66666666667, ans=0.0 +2024-07-28 04:20:57,859 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.37 vs. limit=22.5 +2024-07-28 04:21:01,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=109133.33333333333, ans=0.2 +2024-07-28 04:21:01,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=109133.33333333333, ans=0.0 +2024-07-28 04:21:01,712 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.96 vs. limit=12.0 +2024-07-28 04:21:01,862 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.02 vs. limit=15.0 +2024-07-28 04:21:05,386 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.946e+01 5.804e+01 6.519e+01 7.318e+01 1.022e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 04:21:06,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=109146.66666666667, ans=0.0 +2024-07-28 04:21:11,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=109146.66666666667, ans=0.125 +2024-07-28 04:21:15,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=109160.0, ans=0.0 +2024-07-28 04:21:16,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=109160.0, ans=0.0 +2024-07-28 04:21:16,480 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.41 vs. limit=6.0 +2024-07-28 04:21:17,753 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.78 vs. limit=12.0 +2024-07-28 04:21:22,074 INFO [train.py:1114] (3/4) Epoch 9, batch 100, loss[loss=0.1833, simple_loss=0.2696, pruned_loss=0.04845, over 4637.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.3014, pruned_loss=0.065, over 365988.51 frames. ], batch size: 12, lr: 8.60e-03, grad_scale: 32.0 +2024-07-28 04:21:32,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=109186.66666666667, ans=0.0 +2024-07-28 04:21:36,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=109200.0, ans=0.125 +2024-07-28 04:21:41,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=109200.0, ans=0.0 +2024-07-28 04:21:46,642 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.14 vs. limit=15.0 +2024-07-28 04:21:52,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=109226.66666666667, ans=0.125 +2024-07-28 04:21:53,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=109226.66666666667, ans=0.015 +2024-07-28 04:21:59,534 INFO [train.py:1114] (3/4) Epoch 9, batch 150, loss[loss=0.1707, simple_loss=0.2586, pruned_loss=0.04135, over 4611.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2967, pruned_loss=0.06301, over 494143.53 frames. ], batch size: 11, lr: 8.60e-03, grad_scale: 32.0 +2024-07-28 04:21:59,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=109240.0, ans=0.125 +2024-07-28 04:22:07,874 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.77 vs. limit=15.0 +2024-07-28 04:22:10,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=109253.33333333333, ans=0.95 +2024-07-28 04:22:17,893 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.795e+01 5.760e+01 6.227e+01 6.826e+01 1.008e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 04:22:32,675 INFO [train.py:1114] (3/4) Epoch 9, batch 200, loss[loss=0.2004, simple_loss=0.2835, pruned_loss=0.05864, over 4547.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2942, pruned_loss=0.06225, over 593612.36 frames. ], batch size: 21, lr: 8.60e-03, grad_scale: 32.0 +2024-07-28 04:22:33,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=109306.66666666667, ans=0.0 +2024-07-28 04:22:37,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=109306.66666666667, ans=0.125 +2024-07-28 04:22:43,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=109320.0, ans=0.125 +2024-07-28 04:22:48,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=109333.33333333333, ans=0.125 +2024-07-28 04:22:50,929 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.45 vs. limit=22.5 +2024-07-28 04:23:03,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=109360.0, ans=0.2 +2024-07-28 04:23:03,630 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.06 vs. limit=6.0 +2024-07-28 04:23:05,832 INFO [train.py:1114] (3/4) Epoch 9, batch 250, loss[loss=0.2068, simple_loss=0.2921, pruned_loss=0.06075, over 4636.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2937, pruned_loss=0.06224, over 670377.37 frames. ], batch size: 16, lr: 8.60e-03, grad_scale: 32.0 +2024-07-28 04:23:06,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109373.33333333333, ans=0.1 +2024-07-28 04:23:15,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=109386.66666666667, ans=0.1 +2024-07-28 04:23:26,176 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.872e+01 6.084e+01 6.743e+01 8.358e+01 1.381e+02, threshold=1.349e+02, percent-clipped=2.0 +2024-07-28 04:23:26,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=109413.33333333333, ans=0.125 +2024-07-28 04:23:28,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109413.33333333333, ans=0.1 +2024-07-28 04:23:34,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=109426.66666666667, ans=0.0 +2024-07-28 04:23:35,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=109426.66666666667, ans=0.0 +2024-07-28 04:23:40,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=109440.0, ans=0.125 +2024-07-28 04:23:40,735 INFO [train.py:1114] (3/4) Epoch 9, batch 300, loss[loss=0.2488, simple_loss=0.3265, pruned_loss=0.08557, over 4809.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.294, pruned_loss=0.06245, over 730087.53 frames. ], batch size: 15, lr: 8.59e-03, grad_scale: 32.0 +2024-07-28 04:23:48,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=109453.33333333333, ans=0.125 +2024-07-28 04:23:50,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=109453.33333333333, ans=0.0 +2024-07-28 04:23:52,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=109453.33333333333, ans=0.125 +2024-07-28 04:23:57,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=109466.66666666667, ans=0.125 +2024-07-28 04:24:09,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=109493.33333333333, ans=0.125 +2024-07-28 04:24:14,376 INFO [train.py:1114] (3/4) Epoch 9, batch 350, loss[loss=0.168, simple_loss=0.2522, pruned_loss=0.04188, over 4938.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2938, pruned_loss=0.06188, over 775818.25 frames. ], batch size: 12, lr: 8.59e-03, grad_scale: 32.0 +2024-07-28 04:24:23,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=109520.0, ans=0.0 +2024-07-28 04:24:27,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=109533.33333333333, ans=0.125 +2024-07-28 04:24:32,535 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.584e+01 5.878e+01 6.356e+01 6.901e+01 1.235e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 04:24:34,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=109546.66666666667, ans=0.125 +2024-07-28 04:24:46,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109573.33333333333, ans=0.1 +2024-07-28 04:24:47,190 INFO [train.py:1114] (3/4) Epoch 9, batch 400, loss[loss=0.2026, simple_loss=0.292, pruned_loss=0.05661, over 4692.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2932, pruned_loss=0.06215, over 813469.90 frames. ], batch size: 13, lr: 8.59e-03, grad_scale: 32.0 +2024-07-28 04:24:47,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109573.33333333333, ans=0.1 +2024-07-28 04:25:03,863 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.19 vs. limit=6.0 +2024-07-28 04:25:13,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109626.66666666667, ans=0.1 +2024-07-28 04:25:19,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=109626.66666666667, ans=0.025 +2024-07-28 04:25:21,009 INFO [train.py:1114] (3/4) Epoch 9, batch 450, loss[loss=0.2232, simple_loss=0.3072, pruned_loss=0.06958, over 4643.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2934, pruned_loss=0.06248, over 838597.10 frames. ], batch size: 13, lr: 8.59e-03, grad_scale: 32.0 +2024-07-28 04:25:22,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=109640.0, ans=0.125 +2024-07-28 04:25:32,997 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=15.0 +2024-07-28 04:25:39,076 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+01 5.801e+01 6.257e+01 7.055e+01 9.311e+01, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 04:25:40,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=109680.0, ans=0.2 +2024-07-28 04:25:45,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=109680.0, ans=0.125 +2024-07-28 04:25:47,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=109693.33333333333, ans=0.05 +2024-07-28 04:25:48,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=109693.33333333333, ans=0.1 +2024-07-28 04:25:53,416 INFO [train.py:1114] (3/4) Epoch 9, batch 500, loss[loss=0.2423, simple_loss=0.3239, pruned_loss=0.08038, over 4718.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2921, pruned_loss=0.06161, over 861285.91 frames. ], batch size: 15, lr: 8.58e-03, grad_scale: 32.0 +2024-07-28 04:26:17,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=109720.0, ans=0.0 +2024-07-28 04:26:27,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=109733.33333333333, ans=0.125 +2024-07-28 04:26:38,318 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.80 vs. limit=15.0 +2024-07-28 04:26:41,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=109746.66666666667, ans=0.2 +2024-07-28 04:26:43,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=109746.66666666667, ans=0.2 +2024-07-28 04:26:46,246 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=2.93 vs. limit=15.0 +2024-07-28 04:26:49,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=109760.0, ans=0.125 +2024-07-28 04:26:51,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=109760.0, ans=0.125 +2024-07-28 04:26:54,766 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.58 vs. limit=15.0 +2024-07-28 04:26:55,068 INFO [train.py:1114] (3/4) Epoch 9, batch 550, loss[loss=0.2343, simple_loss=0.3181, pruned_loss=0.07525, over 4630.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2928, pruned_loss=0.06195, over 877683.00 frames. ], batch size: 17, lr: 8.58e-03, grad_scale: 32.0 +2024-07-28 04:27:13,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=109800.0, ans=0.125 +2024-07-28 04:27:15,813 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.786e+01 5.880e+01 6.464e+01 7.237e+01 1.061e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-28 04:27:17,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=109813.33333333333, ans=0.0 +2024-07-28 04:27:27,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=109826.66666666667, ans=0.0 +2024-07-28 04:27:46,994 INFO [train.py:1114] (3/4) Epoch 9, batch 600, loss[loss=0.213, simple_loss=0.2925, pruned_loss=0.06673, over 4658.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2936, pruned_loss=0.06218, over 892071.03 frames. ], batch size: 16, lr: 8.58e-03, grad_scale: 32.0 +2024-07-28 04:27:50,594 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-28 04:28:01,977 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.76 vs. limit=15.0 +2024-07-28 04:28:04,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=109866.66666666667, ans=0.0 +2024-07-28 04:28:05,204 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.81 vs. limit=15.0 +2024-07-28 04:28:06,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=109880.0, ans=0.0 +2024-07-28 04:28:10,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=109880.0, ans=0.125 +2024-07-28 04:28:10,458 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.20 vs. limit=15.0 +2024-07-28 04:28:16,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=109893.33333333333, ans=0.1 +2024-07-28 04:28:20,225 INFO [train.py:1114] (3/4) Epoch 9, batch 650, loss[loss=0.223, simple_loss=0.3083, pruned_loss=0.06891, over 4767.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.293, pruned_loss=0.06222, over 903719.40 frames. ], batch size: 13, lr: 8.58e-03, grad_scale: 32.0 +2024-07-28 04:28:26,811 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.93 vs. limit=15.0 +2024-07-28 04:28:32,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=109920.0, ans=0.0 +2024-07-28 04:28:38,936 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.902e+01 5.739e+01 6.277e+01 6.982e+01 1.071e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 04:28:39,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=109946.66666666667, ans=0.125 +2024-07-28 04:28:41,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=109946.66666666667, ans=0.125 +2024-07-28 04:28:53,233 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.32 vs. limit=10.0 +2024-07-28 04:28:53,381 INFO [train.py:1114] (3/4) Epoch 9, batch 700, loss[loss=0.1661, simple_loss=0.259, pruned_loss=0.03655, over 4639.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2926, pruned_loss=0.06209, over 911305.57 frames. ], batch size: 12, lr: 8.57e-03, grad_scale: 32.0 +2024-07-28 04:28:57,219 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.34 vs. limit=15.0 +2024-07-28 04:28:58,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=109973.33333333333, ans=0.125 +2024-07-28 04:29:02,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=109986.66666666667, ans=0.2 +2024-07-28 04:29:04,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=109986.66666666667, ans=0.125 +2024-07-28 04:29:15,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=110013.33333333333, ans=0.0 +2024-07-28 04:29:21,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=110026.66666666667, ans=0.025 +2024-07-28 04:29:22,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=110026.66666666667, ans=0.2 +2024-07-28 04:29:24,316 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.01 vs. limit=10.0 +2024-07-28 04:29:27,309 INFO [train.py:1114] (3/4) Epoch 9, batch 750, loss[loss=0.2023, simple_loss=0.2897, pruned_loss=0.05745, over 4703.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2914, pruned_loss=0.06128, over 917948.91 frames. ], batch size: 13, lr: 8.57e-03, grad_scale: 32.0 +2024-07-28 04:29:28,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=110040.0, ans=0.025 +2024-07-28 04:29:35,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=110053.33333333333, ans=0.125 +2024-07-28 04:29:38,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=110053.33333333333, ans=0.125 +2024-07-28 04:29:48,613 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.674e+01 6.132e+01 7.146e+01 1.139e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 04:29:50,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=110080.0, ans=0.2 +2024-07-28 04:29:53,173 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.66 vs. limit=15.0 +2024-07-28 04:30:01,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=110093.33333333333, ans=0.125 +2024-07-28 04:30:03,386 INFO [train.py:1114] (3/4) Epoch 9, batch 800, loss[loss=0.25, simple_loss=0.3194, pruned_loss=0.09029, over 4840.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2923, pruned_loss=0.0616, over 922989.10 frames. ], batch size: 12, lr: 8.57e-03, grad_scale: 32.0 +2024-07-28 04:30:04,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=110106.66666666667, ans=0.125 +2024-07-28 04:30:37,054 INFO [train.py:1114] (3/4) Epoch 9, batch 850, loss[loss=0.2402, simple_loss=0.3342, pruned_loss=0.07314, over 4662.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2922, pruned_loss=0.06172, over 927309.09 frames. ], batch size: 14, lr: 8.57e-03, grad_scale: 32.0 +2024-07-28 04:30:39,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=110173.33333333333, ans=0.125 +2024-07-28 04:30:39,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110173.33333333333, ans=0.1 +2024-07-28 04:30:44,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=110186.66666666667, ans=0.125 +2024-07-28 04:30:47,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=110186.66666666667, ans=0.125 +2024-07-28 04:30:50,803 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.78 vs. limit=15.0 +2024-07-28 04:30:52,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=110200.0, ans=0.0 +2024-07-28 04:30:55,614 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.250e+01 5.670e+01 6.591e+01 7.214e+01 1.079e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-28 04:31:10,555 INFO [train.py:1114] (3/4) Epoch 9, batch 900, loss[loss=0.1622, simple_loss=0.2505, pruned_loss=0.03692, over 4856.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2923, pruned_loss=0.06186, over 928392.13 frames. ], batch size: 12, lr: 8.56e-03, grad_scale: 32.0 +2024-07-28 04:31:20,719 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.64 vs. limit=22.5 +2024-07-28 04:31:32,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=110280.0, ans=0.0 +2024-07-28 04:31:44,083 INFO [train.py:1114] (3/4) Epoch 9, batch 950, loss[loss=0.1718, simple_loss=0.2636, pruned_loss=0.03999, over 4781.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2926, pruned_loss=0.06182, over 930153.92 frames. ], batch size: 12, lr: 8.56e-03, grad_scale: 32.0 +2024-07-28 04:31:44,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=110306.66666666667, ans=0.0 +2024-07-28 04:31:44,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=110306.66666666667, ans=0.0 +2024-07-28 04:31:45,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110306.66666666667, ans=0.1 +2024-07-28 04:32:02,679 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.659e+01 5.804e+01 6.637e+01 7.593e+01 9.914e+01, threshold=1.327e+02, percent-clipped=0.0 +2024-07-28 04:32:04,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=110346.66666666667, ans=0.025 +2024-07-28 04:32:07,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=110346.66666666667, ans=0.125 +2024-07-28 04:32:10,990 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:32:17,284 INFO [train.py:1114] (3/4) Epoch 9, batch 1000, loss[loss=0.2042, simple_loss=0.2967, pruned_loss=0.05585, over 4973.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2941, pruned_loss=0.06263, over 929702.95 frames. ], batch size: 13, lr: 8.56e-03, grad_scale: 32.0 +2024-07-28 04:32:44,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=110413.33333333333, ans=0.2 +2024-07-28 04:32:52,775 INFO [train.py:1114] (3/4) Epoch 9, batch 1050, loss[loss=0.2043, simple_loss=0.2925, pruned_loss=0.05803, over 4874.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2931, pruned_loss=0.06199, over 932470.39 frames. ], batch size: 14, lr: 8.56e-03, grad_scale: 32.0 +2024-07-28 04:32:57,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=110440.0, ans=0.0 +2024-07-28 04:33:07,799 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.01 vs. limit=15.0 +2024-07-28 04:33:11,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110453.33333333333, ans=0.1 +2024-07-28 04:33:13,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=110453.33333333333, ans=0.0 +2024-07-28 04:33:26,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=110466.66666666667, ans=0.07 +2024-07-28 04:33:37,203 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.983e+01 5.646e+01 6.301e+01 7.018e+01 9.967e+01, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 04:33:37,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=110466.66666666667, ans=0.125 +2024-07-28 04:33:42,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=110480.0, ans=0.0 +2024-07-28 04:33:50,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.94 vs. limit=15.0 +2024-07-28 04:33:53,692 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:33:55,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=110493.33333333333, ans=0.125 +2024-07-28 04:33:58,449 INFO [train.py:1114] (3/4) Epoch 9, batch 1100, loss[loss=0.2456, simple_loss=0.315, pruned_loss=0.08814, over 4900.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2928, pruned_loss=0.06235, over 934828.50 frames. ], batch size: 13, lr: 8.55e-03, grad_scale: 32.0 +2024-07-28 04:34:07,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=110520.0, ans=0.0 +2024-07-28 04:34:32,505 INFO [train.py:1114] (3/4) Epoch 9, batch 1150, loss[loss=0.2327, simple_loss=0.3057, pruned_loss=0.07984, over 4905.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2929, pruned_loss=0.06275, over 934086.13 frames. ], batch size: 13, lr: 8.55e-03, grad_scale: 32.0 +2024-07-28 04:34:34,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=110573.33333333333, ans=0.125 +2024-07-28 04:34:36,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110573.33333333333, ans=0.1 +2024-07-28 04:34:37,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=110573.33333333333, ans=0.125 +2024-07-28 04:34:41,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110586.66666666667, ans=0.1 +2024-07-28 04:34:46,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=110600.0, ans=0.125 +2024-07-28 04:34:46,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=110600.0, ans=0.0 +2024-07-28 04:34:51,452 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.662e+01 6.289e+01 6.921e+01 1.035e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 04:34:54,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=110613.33333333333, ans=0.125 +2024-07-28 04:35:01,004 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.31 vs. limit=10.0 +2024-07-28 04:35:03,115 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.84 vs. limit=10.0 +2024-07-28 04:35:03,678 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.27 vs. limit=15.0 +2024-07-28 04:35:07,034 INFO [train.py:1114] (3/4) Epoch 9, batch 1200, loss[loss=0.1667, simple_loss=0.2635, pruned_loss=0.03493, over 4875.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2937, pruned_loss=0.06289, over 933220.14 frames. ], batch size: 14, lr: 8.55e-03, grad_scale: 32.0 +2024-07-28 04:35:08,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=110640.0, ans=0.0 +2024-07-28 04:35:12,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=110640.0, ans=0.125 +2024-07-28 04:35:15,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=110653.33333333333, ans=0.125 +2024-07-28 04:35:27,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=110666.66666666667, ans=0.1 +2024-07-28 04:35:29,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=110680.0, ans=0.125 +2024-07-28 04:35:37,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=110693.33333333333, ans=0.125 +2024-07-28 04:35:37,790 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.92 vs. limit=22.5 +2024-07-28 04:35:43,338 INFO [train.py:1114] (3/4) Epoch 9, batch 1250, loss[loss=0.2109, simple_loss=0.3044, pruned_loss=0.05868, over 4801.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2942, pruned_loss=0.0628, over 937383.62 frames. ], batch size: 15, lr: 8.55e-03, grad_scale: 32.0 +2024-07-28 04:35:45,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=110706.66666666667, ans=0.125 +2024-07-28 04:36:07,227 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.896e+01 5.807e+01 6.256e+01 7.154e+01 1.109e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 04:36:07,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=110733.33333333333, ans=0.2 +2024-07-28 04:36:20,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=110760.0, ans=0.0 +2024-07-28 04:36:21,789 INFO [train.py:1114] (3/4) Epoch 9, batch 1300, loss[loss=0.2491, simple_loss=0.3243, pruned_loss=0.08695, over 4726.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2939, pruned_loss=0.06279, over 938958.06 frames. ], batch size: 19, lr: 8.54e-03, grad_scale: 32.0 +2024-07-28 04:36:26,171 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.03 vs. limit=15.0 +2024-07-28 04:36:55,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=110813.33333333333, ans=0.0 +2024-07-28 04:37:22,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=110826.66666666667, ans=0.125 +2024-07-28 04:37:25,167 INFO [train.py:1114] (3/4) Epoch 9, batch 1350, loss[loss=0.1923, simple_loss=0.2728, pruned_loss=0.05593, over 4760.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2934, pruned_loss=0.06243, over 941241.23 frames. ], batch size: 13, lr: 8.54e-03, grad_scale: 32.0 +2024-07-28 04:37:31,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=110853.33333333333, ans=0.125 +2024-07-28 04:37:36,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=110853.33333333333, ans=0.0 +2024-07-28 04:37:43,872 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.586e+01 5.724e+01 6.443e+01 7.516e+01 1.167e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 04:37:52,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=110880.0, ans=0.1 +2024-07-28 04:37:59,958 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.39 vs. limit=15.0 +2024-07-28 04:38:01,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=110893.33333333333, ans=0.125 +2024-07-28 04:38:02,854 INFO [train.py:1114] (3/4) Epoch 9, batch 1400, loss[loss=0.1705, simple_loss=0.2493, pruned_loss=0.0458, over 4706.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2937, pruned_loss=0.06247, over 942884.80 frames. ], batch size: 11, lr: 8.54e-03, grad_scale: 32.0 +2024-07-28 04:38:04,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=110906.66666666667, ans=0.2 +2024-07-28 04:38:08,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=110906.66666666667, ans=0.0 +2024-07-28 04:38:19,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=110920.0, ans=0.0 +2024-07-28 04:38:31,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=110946.66666666667, ans=0.125 +2024-07-28 04:38:44,498 INFO [train.py:1114] (3/4) Epoch 9, batch 1450, loss[loss=0.2677, simple_loss=0.3444, pruned_loss=0.09543, over 4682.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2933, pruned_loss=0.06253, over 942946.64 frames. ], batch size: 15, lr: 8.53e-03, grad_scale: 32.0 +2024-07-28 04:38:48,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=110973.33333333333, ans=0.125 +2024-07-28 04:38:50,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=110986.66666666667, ans=0.125 +2024-07-28 04:39:03,121 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.128e+01 5.882e+01 6.432e+01 7.495e+01 9.959e+01, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 04:39:14,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=111026.66666666667, ans=0.125 +2024-07-28 04:39:19,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.07 vs. limit=15.0 +2024-07-28 04:39:19,214 INFO [train.py:1114] (3/4) Epoch 9, batch 1500, loss[loss=0.2067, simple_loss=0.297, pruned_loss=0.0582, over 4805.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2944, pruned_loss=0.06284, over 942678.43 frames. ], batch size: 14, lr: 8.53e-03, grad_scale: 32.0 +2024-07-28 04:39:20,457 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-07-28 04:39:20,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=111040.0, ans=0.125 +2024-07-28 04:39:23,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=111040.0, ans=0.0 +2024-07-28 04:39:25,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=111053.33333333333, ans=0.2 +2024-07-28 04:39:50,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=111093.33333333333, ans=0.0 +2024-07-28 04:39:56,249 INFO [train.py:1114] (3/4) Epoch 9, batch 1550, loss[loss=0.2356, simple_loss=0.332, pruned_loss=0.06961, over 4896.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2934, pruned_loss=0.06218, over 938558.04 frames. ], batch size: 15, lr: 8.53e-03, grad_scale: 32.0 +2024-07-28 04:40:06,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111120.0, ans=0.125 +2024-07-28 04:40:08,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=111120.0, ans=0.0 +2024-07-28 04:40:11,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=111133.33333333333, ans=0.025 +2024-07-28 04:40:14,624 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.785e+01 5.781e+01 6.614e+01 7.335e+01 1.076e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-28 04:40:29,183 INFO [train.py:1114] (3/4) Epoch 9, batch 1600, loss[loss=0.1984, simple_loss=0.2946, pruned_loss=0.05107, over 4880.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2946, pruned_loss=0.06308, over 936972.18 frames. ], batch size: 14, lr: 8.53e-03, grad_scale: 32.0 +2024-07-28 04:40:30,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=111173.33333333333, ans=0.0 +2024-07-28 04:40:42,262 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.05 vs. limit=6.0 +2024-07-28 04:40:45,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=111200.0, ans=0.125 +2024-07-28 04:40:50,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=111213.33333333333, ans=0.125 +2024-07-28 04:40:52,388 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.67 vs. limit=15.0 +2024-07-28 04:40:58,349 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.12 vs. limit=15.0 +2024-07-28 04:41:03,420 INFO [train.py:1114] (3/4) Epoch 9, batch 1650, loss[loss=0.2212, simple_loss=0.3083, pruned_loss=0.06702, over 4671.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2948, pruned_loss=0.0631, over 936608.89 frames. ], batch size: 14, lr: 8.52e-03, grad_scale: 32.0 +2024-07-28 04:41:10,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111253.33333333333, ans=0.125 +2024-07-28 04:41:13,113 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-07-28 04:41:18,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=111266.66666666667, ans=0.125 +2024-07-28 04:41:21,886 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.810e+01 6.591e+01 7.411e+01 1.241e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-28 04:41:28,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=111280.0, ans=0.09899494936611666 +2024-07-28 04:41:31,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=111293.33333333333, ans=0.125 +2024-07-28 04:41:38,619 INFO [train.py:1114] (3/4) Epoch 9, batch 1700, loss[loss=0.2146, simple_loss=0.286, pruned_loss=0.07164, over 4686.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2934, pruned_loss=0.06201, over 938533.98 frames. ], batch size: 11, lr: 8.52e-03, grad_scale: 32.0 +2024-07-28 04:41:42,188 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=111306.66666666667, ans=0.0 +2024-07-28 04:41:47,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=111320.0, ans=0.125 +2024-07-28 04:41:48,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111320.0, ans=0.1 +2024-07-28 04:41:49,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=111320.0, ans=0.125 +2024-07-28 04:41:52,606 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.67 vs. limit=15.0 +2024-07-28 04:41:54,455 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.80 vs. limit=15.0 +2024-07-28 04:41:56,479 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.89 vs. limit=6.0 +2024-07-28 04:41:56,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=111333.33333333333, ans=0.125 +2024-07-28 04:42:04,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.47 vs. limit=15.0 +2024-07-28 04:42:12,143 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=18.13 vs. limit=15.0 +2024-07-28 04:42:13,651 INFO [train.py:1114] (3/4) Epoch 9, batch 1750, loss[loss=0.1649, simple_loss=0.2416, pruned_loss=0.04414, over 4804.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2917, pruned_loss=0.06132, over 939809.12 frames. ], batch size: 11, lr: 8.52e-03, grad_scale: 32.0 +2024-07-28 04:42:17,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=111373.33333333333, ans=0.0 +2024-07-28 04:42:23,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=111386.66666666667, ans=0.125 +2024-07-28 04:42:32,287 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.858e+01 5.540e+01 6.107e+01 6.918e+01 9.511e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 04:42:33,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=111413.33333333333, ans=0.95 +2024-07-28 04:42:37,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=111413.33333333333, ans=0.0 +2024-07-28 04:42:41,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=111426.66666666667, ans=0.07 +2024-07-28 04:42:46,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=111440.0, ans=0.125 +2024-07-28 04:42:47,085 INFO [train.py:1114] (3/4) Epoch 9, batch 1800, loss[loss=0.1813, simple_loss=0.2697, pruned_loss=0.0465, over 4642.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2915, pruned_loss=0.06127, over 940380.59 frames. ], batch size: 13, lr: 8.52e-03, grad_scale: 32.0 +2024-07-28 04:43:05,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=111466.66666666667, ans=0.0 +2024-07-28 04:43:15,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=111493.33333333333, ans=0.0 +2024-07-28 04:43:17,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=111493.33333333333, ans=0.125 +2024-07-28 04:43:20,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=111493.33333333333, ans=0.025 +2024-07-28 04:43:22,191 INFO [train.py:1114] (3/4) Epoch 9, batch 1850, loss[loss=0.2122, simple_loss=0.3021, pruned_loss=0.06111, over 4810.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2911, pruned_loss=0.061, over 940657.83 frames. ], batch size: 14, lr: 8.51e-03, grad_scale: 32.0 +2024-07-28 04:43:32,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=111520.0, ans=0.1 +2024-07-28 04:43:41,628 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.354e+01 5.789e+01 6.622e+01 8.000e+01 1.293e+02, threshold=1.324e+02, percent-clipped=1.0 +2024-07-28 04:43:45,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=111546.66666666667, ans=0.125 +2024-07-28 04:43:53,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=111560.0, ans=0.025 +2024-07-28 04:43:56,516 INFO [train.py:1114] (3/4) Epoch 9, batch 1900, loss[loss=0.203, simple_loss=0.2866, pruned_loss=0.05972, over 4663.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2908, pruned_loss=0.06067, over 942082.81 frames. ], batch size: 14, lr: 8.51e-03, grad_scale: 64.0 +2024-07-28 04:43:59,339 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:44:01,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=111573.33333333333, ans=0.125 +2024-07-28 04:44:29,957 INFO [train.py:1114] (3/4) Epoch 9, batch 1950, loss[loss=0.1768, simple_loss=0.2646, pruned_loss=0.04452, over 4892.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2918, pruned_loss=0.06039, over 943805.24 frames. ], batch size: 13, lr: 8.51e-03, grad_scale: 64.0 +2024-07-28 04:44:46,546 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.21 vs. limit=15.0 +2024-07-28 04:44:48,648 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.24 vs. limit=15.0 +2024-07-28 04:44:50,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=111666.66666666667, ans=0.1 +2024-07-28 04:44:50,740 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.065e+01 5.906e+01 6.292e+01 6.984e+01 1.022e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 04:44:54,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=111680.0, ans=10.0 +2024-07-28 04:44:56,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=111680.0, ans=0.125 +2024-07-28 04:45:02,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=111693.33333333333, ans=0.125 +2024-07-28 04:45:05,297 INFO [train.py:1114] (3/4) Epoch 9, batch 2000, loss[loss=0.1794, simple_loss=0.26, pruned_loss=0.04937, over 4798.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2931, pruned_loss=0.06111, over 941096.07 frames. ], batch size: 11, lr: 8.51e-03, grad_scale: 64.0 +2024-07-28 04:45:21,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=111720.0, ans=0.025 +2024-07-28 04:45:21,508 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.26 vs. limit=22.5 +2024-07-28 04:45:22,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=111733.33333333333, ans=0.125 +2024-07-28 04:45:23,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=111733.33333333333, ans=0.0 +2024-07-28 04:45:25,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=111733.33333333333, ans=0.2 +2024-07-28 04:45:29,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=111746.66666666667, ans=0.2 +2024-07-28 04:45:35,638 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.09 vs. limit=15.0 +2024-07-28 04:45:42,825 INFO [train.py:1114] (3/4) Epoch 9, batch 2050, loss[loss=0.1631, simple_loss=0.2496, pruned_loss=0.03827, over 4604.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2922, pruned_loss=0.06112, over 939777.96 frames. ], batch size: 11, lr: 8.50e-03, grad_scale: 64.0 +2024-07-28 04:45:53,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=111786.66666666667, ans=0.125 +2024-07-28 04:45:53,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=111786.66666666667, ans=0.125 +2024-07-28 04:45:58,788 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.86 vs. limit=15.0 +2024-07-28 04:46:01,608 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.743e+01 6.420e+01 7.803e+01 1.541e+02, threshold=1.284e+02, percent-clipped=1.0 +2024-07-28 04:46:16,245 INFO [train.py:1114] (3/4) Epoch 9, batch 2100, loss[loss=0.2312, simple_loss=0.3225, pruned_loss=0.06993, over 4765.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2911, pruned_loss=0.06064, over 941521.64 frames. ], batch size: 13, lr: 8.50e-03, grad_scale: 64.0 +2024-07-28 04:46:40,668 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.44 vs. limit=15.0 +2024-07-28 04:46:50,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=111893.33333333333, ans=0.125 +2024-07-28 04:46:51,241 INFO [train.py:1114] (3/4) Epoch 9, batch 2150, loss[loss=0.2003, simple_loss=0.2912, pruned_loss=0.05472, over 4893.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2905, pruned_loss=0.0603, over 944503.09 frames. ], batch size: 13, lr: 8.50e-03, grad_scale: 64.0 +2024-07-28 04:46:51,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=111906.66666666667, ans=0.0 +2024-07-28 04:46:52,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=111906.66666666667, ans=0.125 +2024-07-28 04:46:53,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=111906.66666666667, ans=0.125 +2024-07-28 04:46:58,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=111920.0, ans=0.125 +2024-07-28 04:47:01,082 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.66 vs. limit=6.0 +2024-07-28 04:47:08,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=111933.33333333333, ans=0.125 +2024-07-28 04:47:12,510 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.843e+01 7.038e+01 8.009e+01 1.104e+02, threshold=1.408e+02, percent-clipped=0.0 +2024-07-28 04:47:12,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=111946.66666666667, ans=0.0 +2024-07-28 04:47:15,828 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.02 vs. limit=15.0 +2024-07-28 04:47:20,377 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.80 vs. limit=15.0 +2024-07-28 04:47:23,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=111960.0, ans=0.1 +2024-07-28 04:47:26,784 INFO [train.py:1114] (3/4) Epoch 9, batch 2200, loss[loss=0.2053, simple_loss=0.2976, pruned_loss=0.05645, over 4810.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2915, pruned_loss=0.06086, over 943854.42 frames. ], batch size: 14, lr: 8.50e-03, grad_scale: 32.0 +2024-07-28 04:47:33,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=111986.66666666667, ans=0.2 +2024-07-28 04:47:57,563 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.06 vs. limit=15.0 +2024-07-28 04:48:01,875 INFO [train.py:1114] (3/4) Epoch 9, batch 2250, loss[loss=0.2054, simple_loss=0.298, pruned_loss=0.05643, over 4688.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2913, pruned_loss=0.06125, over 942293.36 frames. ], batch size: 13, lr: 8.49e-03, grad_scale: 32.0 +2024-07-28 04:48:02,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=112040.0, ans=0.125 +2024-07-28 04:48:15,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=112066.66666666667, ans=0.125 +2024-07-28 04:48:15,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=112066.66666666667, ans=0.5 +2024-07-28 04:48:19,932 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.76 vs. limit=15.0 +2024-07-28 04:48:20,917 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.419e+01 6.028e+01 6.824e+01 8.191e+01 1.096e+02, threshold=1.365e+02, percent-clipped=0.0 +2024-07-28 04:48:34,842 INFO [train.py:1114] (3/4) Epoch 9, batch 2300, loss[loss=0.1946, simple_loss=0.2806, pruned_loss=0.05432, over 4934.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2908, pruned_loss=0.06126, over 939779.36 frames. ], batch size: 12, lr: 8.49e-03, grad_scale: 32.0 +2024-07-28 04:48:52,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=112133.33333333333, ans=0.125 +2024-07-28 04:49:30,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=112146.66666666667, ans=0.125 +2024-07-28 04:49:34,631 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.13 vs. limit=8.0 +2024-07-28 04:49:50,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=112160.0, ans=0.2 +2024-07-28 04:49:52,133 INFO [train.py:1114] (3/4) Epoch 9, batch 2350, loss[loss=0.2203, simple_loss=0.3036, pruned_loss=0.06853, over 4638.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2909, pruned_loss=0.06122, over 941898.81 frames. ], batch size: 13, lr: 8.49e-03, grad_scale: 32.0 +2024-07-28 04:49:52,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=112173.33333333333, ans=0.125 +2024-07-28 04:49:58,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=112186.66666666667, ans=0.0 +2024-07-28 04:49:59,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=112186.66666666667, ans=0.125 +2024-07-28 04:50:07,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=112200.0, ans=0.2 +2024-07-28 04:50:11,751 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.897e+01 5.463e+01 6.108e+01 6.939e+01 1.035e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 04:50:12,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=112213.33333333333, ans=0.1 +2024-07-28 04:50:12,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=112213.33333333333, ans=0.0 +2024-07-28 04:50:18,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=112226.66666666667, ans=0.0 +2024-07-28 04:50:22,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112226.66666666667, ans=0.1 +2024-07-28 04:50:25,456 INFO [train.py:1114] (3/4) Epoch 9, batch 2400, loss[loss=0.2074, simple_loss=0.2899, pruned_loss=0.06249, over 4638.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2912, pruned_loss=0.06076, over 941655.33 frames. ], batch size: 12, lr: 8.49e-03, grad_scale: 32.0 +2024-07-28 04:50:30,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=112240.0, ans=0.2 +2024-07-28 04:50:36,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=112240.0, ans=0.0 +2024-07-28 04:50:41,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=112253.33333333333, ans=0.0 +2024-07-28 04:50:42,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=112253.33333333333, ans=0.05 +2024-07-28 04:50:50,031 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.52 vs. limit=12.0 +2024-07-28 04:51:05,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=112306.66666666667, ans=0.125 +2024-07-28 04:51:05,888 INFO [train.py:1114] (3/4) Epoch 9, batch 2450, loss[loss=0.1941, simple_loss=0.2843, pruned_loss=0.0519, over 4686.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2931, pruned_loss=0.06163, over 936864.79 frames. ], batch size: 13, lr: 8.48e-03, grad_scale: 32.0 +2024-07-28 04:51:14,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=112320.0, ans=0.125 +2024-07-28 04:51:22,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112333.33333333333, ans=0.1 +2024-07-28 04:51:25,462 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.551e+01 5.862e+01 6.434e+01 7.688e+01 1.164e+02, threshold=1.287e+02, percent-clipped=0.0 +2024-07-28 04:51:27,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=112346.66666666667, ans=0.0 +2024-07-28 04:51:42,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=112373.33333333333, ans=0.2 +2024-07-28 04:51:43,136 INFO [train.py:1114] (3/4) Epoch 9, batch 2500, loss[loss=0.2059, simple_loss=0.2911, pruned_loss=0.06035, over 4815.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2918, pruned_loss=0.0609, over 938910.29 frames. ], batch size: 14, lr: 8.48e-03, grad_scale: 32.0 +2024-07-28 04:51:43,588 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.82 vs. limit=15.0 +2024-07-28 04:51:56,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=112400.0, ans=0.125 +2024-07-28 04:52:01,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=112400.0, ans=0.2 +2024-07-28 04:52:01,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112400.0, ans=0.1 +2024-07-28 04:52:07,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=112413.33333333333, ans=0.125 +2024-07-28 04:52:09,676 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=13.05 vs. limit=22.5 +2024-07-28 04:52:16,733 INFO [train.py:1114] (3/4) Epoch 9, batch 2550, loss[loss=0.1897, simple_loss=0.2595, pruned_loss=0.05995, over 4799.00 frames. ], tot_loss[loss=0.207, simple_loss=0.292, pruned_loss=0.06101, over 938531.04 frames. ], batch size: 11, lr: 8.48e-03, grad_scale: 32.0 +2024-07-28 04:52:36,354 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.845e+01 6.430e+01 7.273e+01 1.102e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 04:52:36,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=112480.0, ans=0.0 +2024-07-28 04:52:40,584 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 04:52:44,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=112493.33333333333, ans=0.0 +2024-07-28 04:52:46,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=112493.33333333333, ans=0.0 +2024-07-28 04:52:50,640 INFO [train.py:1114] (3/4) Epoch 9, batch 2600, loss[loss=0.1784, simple_loss=0.267, pruned_loss=0.04492, over 4907.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2926, pruned_loss=0.06104, over 938317.47 frames. ], batch size: 13, lr: 8.48e-03, grad_scale: 32.0 +2024-07-28 04:52:53,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=112506.66666666667, ans=0.025 +2024-07-28 04:53:00,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=112520.0, ans=0.125 +2024-07-28 04:53:03,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=112533.33333333333, ans=0.125 +2024-07-28 04:53:06,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=112533.33333333333, ans=0.125 +2024-07-28 04:53:11,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=112546.66666666667, ans=0.125 +2024-07-28 04:53:18,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=112560.0, ans=0.0 +2024-07-28 04:53:25,570 INFO [train.py:1114] (3/4) Epoch 9, batch 2650, loss[loss=0.2171, simple_loss=0.3046, pruned_loss=0.06486, over 4631.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2925, pruned_loss=0.06103, over 940089.40 frames. ], batch size: 16, lr: 8.47e-03, grad_scale: 32.0 +2024-07-28 04:53:26,588 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.61 vs. limit=6.0 +2024-07-28 04:53:36,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=112586.66666666667, ans=0.125 +2024-07-28 04:53:44,511 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.626e+01 6.228e+01 7.272e+01 1.238e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 04:53:50,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=112613.33333333333, ans=0.1 +2024-07-28 04:53:51,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112626.66666666667, ans=0.1 +2024-07-28 04:53:52,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=112626.66666666667, ans=0.025 +2024-07-28 04:53:57,771 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.18 vs. limit=6.0 +2024-07-28 04:53:58,570 INFO [train.py:1114] (3/4) Epoch 9, batch 2700, loss[loss=0.2072, simple_loss=0.3027, pruned_loss=0.05584, over 4732.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2925, pruned_loss=0.06133, over 940059.82 frames. ], batch size: 14, lr: 8.47e-03, grad_scale: 32.0 +2024-07-28 04:54:02,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=112640.0, ans=0.125 +2024-07-28 04:54:06,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=112653.33333333333, ans=0.125 +2024-07-28 04:54:15,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=112666.66666666667, ans=15.0 +2024-07-28 04:54:28,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=112693.33333333333, ans=0.125 +2024-07-28 04:54:32,616 INFO [train.py:1114] (3/4) Epoch 9, batch 2750, loss[loss=0.1801, simple_loss=0.2628, pruned_loss=0.04873, over 4695.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2918, pruned_loss=0.06132, over 939830.54 frames. ], batch size: 12, lr: 8.47e-03, grad_scale: 32.0 +2024-07-28 04:54:42,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=112720.0, ans=0.125 +2024-07-28 04:54:47,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=112733.33333333333, ans=0.07 +2024-07-28 04:54:47,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=112733.33333333333, ans=0.125 +2024-07-28 04:54:51,674 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.642e+01 6.173e+01 6.885e+01 7.984e+01 1.102e+02, threshold=1.377e+02, percent-clipped=0.0 +2024-07-28 04:54:57,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=112746.66666666667, ans=0.125 +2024-07-28 04:54:57,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=112746.66666666667, ans=0.0 +2024-07-28 04:55:01,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=112760.0, ans=0.1 +2024-07-28 04:55:05,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=112773.33333333333, ans=0.2 +2024-07-28 04:55:05,839 INFO [train.py:1114] (3/4) Epoch 9, batch 2800, loss[loss=0.3257, simple_loss=0.3856, pruned_loss=0.1328, over 2907.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2924, pruned_loss=0.06168, over 937151.28 frames. ], batch size: 35, lr: 8.47e-03, grad_scale: 32.0 +2024-07-28 04:55:08,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=112773.33333333333, ans=0.025 +2024-07-28 04:55:10,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.35 vs. limit=22.5 +2024-07-28 04:55:11,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.38 vs. limit=12.0 +2024-07-28 04:55:18,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=112786.66666666667, ans=0.2 +2024-07-28 04:55:24,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=112800.0, ans=0.125 +2024-07-28 04:55:30,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=112813.33333333333, ans=0.125 +2024-07-28 04:55:39,185 INFO [train.py:1114] (3/4) Epoch 9, batch 2850, loss[loss=0.2198, simple_loss=0.299, pruned_loss=0.07025, over 4956.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2927, pruned_loss=0.0618, over 935370.84 frames. ], batch size: 13, lr: 8.46e-03, grad_scale: 32.0 +2024-07-28 04:55:39,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=112840.0, ans=0.0 +2024-07-28 04:55:50,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=112853.33333333333, ans=0.125 +2024-07-28 04:55:51,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=112853.33333333333, ans=0.1 +2024-07-28 04:55:56,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=112866.66666666667, ans=0.2 +2024-07-28 04:55:58,249 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.781e+01 6.339e+01 7.378e+01 1.144e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 04:56:11,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=112893.33333333333, ans=0.125 +2024-07-28 04:56:12,161 INFO [train.py:1114] (3/4) Epoch 9, batch 2900, loss[loss=0.214, simple_loss=0.2945, pruned_loss=0.06673, over 4826.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2933, pruned_loss=0.06126, over 939215.26 frames. ], batch size: 13, lr: 8.46e-03, grad_scale: 32.0 +2024-07-28 04:56:26,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=112920.0, ans=0.0 +2024-07-28 04:56:28,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=112933.33333333333, ans=0.125 +2024-07-28 04:56:30,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=112933.33333333333, ans=0.2 +2024-07-28 04:56:47,482 INFO [train.py:1114] (3/4) Epoch 9, batch 2950, loss[loss=0.1845, simple_loss=0.2661, pruned_loss=0.05141, over 4696.00 frames. ], tot_loss[loss=0.207, simple_loss=0.292, pruned_loss=0.06098, over 938473.57 frames. ], batch size: 12, lr: 8.46e-03, grad_scale: 32.0 +2024-07-28 04:56:48,598 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.85 vs. limit=15.0 +2024-07-28 04:57:04,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=113000.0, ans=0.1 +2024-07-28 04:57:10,754 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.416e+01 5.914e+01 6.880e+01 7.946e+01 1.236e+02, threshold=1.376e+02, percent-clipped=0.0 +2024-07-28 04:57:13,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=113013.33333333333, ans=0.0 +2024-07-28 04:57:18,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113026.66666666667, ans=0.1 +2024-07-28 04:57:24,700 INFO [train.py:1114] (3/4) Epoch 9, batch 3000, loss[loss=0.2175, simple_loss=0.3038, pruned_loss=0.06557, over 4765.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2917, pruned_loss=0.06095, over 938083.85 frames. ], batch size: 13, lr: 8.46e-03, grad_scale: 32.0 +2024-07-28 04:57:24,700 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 04:57:37,296 INFO [train.py:1146] (3/4) Epoch 9, validation: loss=0.1766, simple_loss=0.2807, pruned_loss=0.03626, over 944034.00 frames. +2024-07-28 04:57:37,296 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 04:57:39,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=113040.0, ans=0.09899494936611666 +2024-07-28 04:57:44,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=113053.33333333333, ans=0.125 +2024-07-28 04:57:48,491 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.56 vs. limit=6.0 +2024-07-28 04:58:03,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=113093.33333333333, ans=0.125 +2024-07-28 04:58:06,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113093.33333333333, ans=0.1 +2024-07-28 04:58:11,105 INFO [train.py:1114] (3/4) Epoch 9, batch 3050, loss[loss=0.2048, simple_loss=0.2928, pruned_loss=0.0584, over 4640.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2926, pruned_loss=0.06085, over 937030.47 frames. ], batch size: 12, lr: 8.45e-03, grad_scale: 32.0 +2024-07-28 04:58:11,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=113106.66666666667, ans=0.125 +2024-07-28 04:58:11,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=113106.66666666667, ans=0.2 +2024-07-28 04:58:17,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=113120.0, ans=0.125 +2024-07-28 04:58:20,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=113120.0, ans=0.125 +2024-07-28 04:58:21,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=113120.0, ans=0.125 +2024-07-28 04:58:23,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=113120.0, ans=0.025 +2024-07-28 04:58:24,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=113133.33333333333, ans=0.07 +2024-07-28 04:58:32,584 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.569e+01 5.528e+01 6.161e+01 6.934e+01 1.105e+02, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 04:58:35,770 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.58 vs. limit=15.0 +2024-07-28 04:58:39,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=113160.0, ans=0.125 +2024-07-28 04:58:39,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=113160.0, ans=0.015 +2024-07-28 04:58:42,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=113160.0, ans=0.125 +2024-07-28 04:58:47,569 INFO [train.py:1114] (3/4) Epoch 9, batch 3100, loss[loss=0.2096, simple_loss=0.2986, pruned_loss=0.06027, over 4615.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2932, pruned_loss=0.06163, over 937540.52 frames. ], batch size: 16, lr: 8.45e-03, grad_scale: 32.0 +2024-07-28 04:58:47,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=113173.33333333333, ans=0.125 +2024-07-28 04:58:58,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=113186.66666666667, ans=0.0 +2024-07-28 04:58:59,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=113186.66666666667, ans=0.125 +2024-07-28 04:59:02,642 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.34 vs. limit=12.0 +2024-07-28 04:59:22,196 INFO [train.py:1114] (3/4) Epoch 9, batch 3150, loss[loss=0.1968, simple_loss=0.282, pruned_loss=0.05582, over 4641.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2938, pruned_loss=0.06182, over 937739.52 frames. ], batch size: 17, lr: 8.45e-03, grad_scale: 32.0 +2024-07-28 04:59:24,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113240.0, ans=0.1 +2024-07-28 04:59:41,418 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.794e+01 6.244e+01 6.965e+01 1.084e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-28 04:59:55,510 INFO [train.py:1114] (3/4) Epoch 9, batch 3200, loss[loss=0.2288, simple_loss=0.3102, pruned_loss=0.07373, over 4827.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2927, pruned_loss=0.06126, over 939323.22 frames. ], batch size: 13, lr: 8.45e-03, grad_scale: 32.0 +2024-07-28 04:59:56,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=113306.66666666667, ans=0.0 +2024-07-28 05:00:03,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=113320.0, ans=0.1 +2024-07-28 05:00:15,564 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.75 vs. limit=6.0 +2024-07-28 05:00:18,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=113346.66666666667, ans=0.0 +2024-07-28 05:00:30,618 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.39 vs. limit=12.0 +2024-07-28 05:00:32,261 INFO [train.py:1114] (3/4) Epoch 9, batch 3250, loss[loss=0.2056, simple_loss=0.3022, pruned_loss=0.05446, over 4937.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2932, pruned_loss=0.06171, over 940118.23 frames. ], batch size: 14, lr: 8.44e-03, grad_scale: 32.0 +2024-07-28 05:00:51,416 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.809e+01 6.527e+01 7.156e+01 1.090e+02, threshold=1.305e+02, percent-clipped=0.0 +2024-07-28 05:00:55,978 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.69 vs. limit=6.0 +2024-07-28 05:01:05,603 INFO [train.py:1114] (3/4) Epoch 9, batch 3300, loss[loss=0.2485, simple_loss=0.3281, pruned_loss=0.08445, over 4657.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2916, pruned_loss=0.06084, over 940263.70 frames. ], batch size: 19, lr: 8.44e-03, grad_scale: 32.0 +2024-07-28 05:01:07,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=113440.0, ans=0.125 +2024-07-28 05:01:10,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=113440.0, ans=0.125 +2024-07-28 05:01:13,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=113453.33333333333, ans=0.1 +2024-07-28 05:01:15,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=113453.33333333333, ans=0.0 +2024-07-28 05:01:20,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=113466.66666666667, ans=0.2 +2024-07-28 05:01:39,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=113493.33333333333, ans=10.0 +2024-07-28 05:01:40,564 INFO [train.py:1114] (3/4) Epoch 9, batch 3350, loss[loss=0.2735, simple_loss=0.3555, pruned_loss=0.09578, over 4612.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2924, pruned_loss=0.06123, over 938681.06 frames. ], batch size: 17, lr: 8.44e-03, grad_scale: 32.0 +2024-07-28 05:01:44,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=113506.66666666667, ans=0.125 +2024-07-28 05:01:48,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=113520.0, ans=0.2 +2024-07-28 05:01:49,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=113520.0, ans=0.05 +2024-07-28 05:01:55,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=113533.33333333333, ans=0.2 +2024-07-28 05:01:59,595 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:02:00,181 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.890e+01 5.762e+01 6.208e+01 6.963e+01 1.151e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 05:02:08,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=113546.66666666667, ans=0.0 +2024-07-28 05:02:19,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=113560.0, ans=0.2 +2024-07-28 05:02:22,812 INFO [train.py:1114] (3/4) Epoch 9, batch 3400, loss[loss=0.1792, simple_loss=0.2628, pruned_loss=0.04775, over 4820.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2928, pruned_loss=0.06146, over 937342.19 frames. ], batch size: 11, lr: 8.44e-03, grad_scale: 32.0 +2024-07-28 05:02:41,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=113600.0, ans=0.0 +2024-07-28 05:02:41,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=113600.0, ans=0.125 +2024-07-28 05:02:41,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=113600.0, ans=0.125 +2024-07-28 05:02:47,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=113613.33333333333, ans=0.025 +2024-07-28 05:02:49,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=113626.66666666667, ans=0.125 +2024-07-28 05:02:51,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=113626.66666666667, ans=0.0 +2024-07-28 05:02:56,784 INFO [train.py:1114] (3/4) Epoch 9, batch 3450, loss[loss=0.2384, simple_loss=0.3142, pruned_loss=0.08126, over 4729.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2938, pruned_loss=0.06193, over 937379.96 frames. ], batch size: 19, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:03:01,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten.whitening_limit, batch_count=113640.0, ans=15.0 +2024-07-28 05:03:02,786 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:03:07,108 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.81 vs. limit=22.5 +2024-07-28 05:03:16,035 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.764e+01 6.050e+01 6.762e+01 7.636e+01 1.132e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-28 05:03:17,705 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.97 vs. limit=12.0 +2024-07-28 05:03:19,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=113680.0, ans=0.1 +2024-07-28 05:03:26,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=113693.33333333333, ans=0.125 +2024-07-28 05:03:26,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=113693.33333333333, ans=0.07 +2024-07-28 05:03:29,919 INFO [train.py:1114] (3/4) Epoch 9, batch 3500, loss[loss=0.173, simple_loss=0.249, pruned_loss=0.04847, over 4927.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2913, pruned_loss=0.06093, over 937676.26 frames. ], batch size: 12, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:03:32,957 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.84 vs. limit=15.0 +2024-07-28 05:03:41,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=113720.0, ans=0.125 +2024-07-28 05:03:42,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=113720.0, ans=0.2 +2024-07-28 05:03:43,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=113720.0, ans=0.125 +2024-07-28 05:03:51,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=113746.66666666667, ans=0.125 +2024-07-28 05:03:56,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=113746.66666666667, ans=0.09899494936611666 +2024-07-28 05:04:00,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=113760.0, ans=0.125 +2024-07-28 05:04:06,623 INFO [train.py:1114] (3/4) Epoch 9, batch 3550, loss[loss=0.2089, simple_loss=0.3074, pruned_loss=0.05515, over 4661.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.291, pruned_loss=0.06092, over 938617.79 frames. ], batch size: 14, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:04:07,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=113773.33333333333, ans=0.0 +2024-07-28 05:04:25,245 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.07 vs. limit=10.0 +2024-07-28 05:04:26,149 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.826e+01 5.609e+01 6.345e+01 7.145e+01 1.049e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 05:04:31,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=113813.33333333333, ans=0.2 +2024-07-28 05:04:33,944 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.16 vs. limit=15.0 +2024-07-28 05:04:40,132 INFO [train.py:1114] (3/4) Epoch 9, batch 3600, loss[loss=0.1998, simple_loss=0.293, pruned_loss=0.05329, over 4962.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2909, pruned_loss=0.06052, over 940181.30 frames. ], batch size: 13, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:04:40,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=113840.0, ans=0.125 +2024-07-28 05:04:47,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=113853.33333333333, ans=0.2 +2024-07-28 05:05:13,674 INFO [train.py:1114] (3/4) Epoch 9, batch 3650, loss[loss=0.2141, simple_loss=0.3086, pruned_loss=0.05983, over 4916.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2899, pruned_loss=0.06012, over 940634.51 frames. ], batch size: 15, lr: 8.43e-03, grad_scale: 32.0 +2024-07-28 05:05:21,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=113920.0, ans=0.125 +2024-07-28 05:05:24,587 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.35 vs. limit=15.0 +2024-07-28 05:05:25,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=113920.0, ans=0.125 +2024-07-28 05:05:32,931 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 5.862e+01 6.678e+01 8.090e+01 1.321e+02, threshold=1.336e+02, percent-clipped=1.0 +2024-07-28 05:05:36,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=113946.66666666667, ans=0.125 +2024-07-28 05:05:47,152 INFO [train.py:1114] (3/4) Epoch 9, batch 3700, loss[loss=0.2224, simple_loss=0.3127, pruned_loss=0.06607, over 4927.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2901, pruned_loss=0.06031, over 941900.72 frames. ], batch size: 14, lr: 8.42e-03, grad_scale: 32.0 +2024-07-28 05:06:24,418 INFO [train.py:1114] (3/4) Epoch 9, batch 3750, loss[loss=0.2145, simple_loss=0.2804, pruned_loss=0.07432, over 4791.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2911, pruned_loss=0.06074, over 943261.55 frames. ], batch size: 11, lr: 8.42e-03, grad_scale: 32.0 +2024-07-28 05:06:25,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=114040.0, ans=0.125 +2024-07-28 05:06:25,902 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:06:35,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=114053.33333333333, ans=0.125 +2024-07-28 05:06:44,318 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.905e+01 5.778e+01 6.645e+01 7.408e+01 1.039e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-28 05:06:44,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=114080.0, ans=0.0 +2024-07-28 05:06:50,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=114080.0, ans=0.0 +2024-07-28 05:06:57,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=114093.33333333333, ans=0.125 +2024-07-28 05:06:58,229 INFO [train.py:1114] (3/4) Epoch 9, batch 3800, loss[loss=0.2511, simple_loss=0.3341, pruned_loss=0.08404, over 4812.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2914, pruned_loss=0.06124, over 941615.94 frames. ], batch size: 14, lr: 8.42e-03, grad_scale: 32.0 +2024-07-28 05:07:02,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=114106.66666666667, ans=0.2 +2024-07-28 05:07:03,844 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.74 vs. limit=6.0 +2024-07-28 05:07:06,372 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.79 vs. limit=6.0 +2024-07-28 05:07:29,219 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.81 vs. limit=6.0 +2024-07-28 05:07:29,418 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.63 vs. limit=15.0 +2024-07-28 05:07:33,844 INFO [train.py:1114] (3/4) Epoch 9, batch 3850, loss[loss=0.2565, simple_loss=0.3465, pruned_loss=0.08327, over 4625.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2901, pruned_loss=0.06043, over 942190.44 frames. ], batch size: 16, lr: 8.42e-03, grad_scale: 32.0 +2024-07-28 05:07:38,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=114173.33333333333, ans=0.5 +2024-07-28 05:07:41,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=114186.66666666667, ans=0.025 +2024-07-28 05:07:43,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=114186.66666666667, ans=0.0 +2024-07-28 05:07:46,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=114186.66666666667, ans=0.125 +2024-07-28 05:07:51,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=114200.0, ans=0.125 +2024-07-28 05:07:57,333 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 5.654e+01 6.534e+01 7.463e+01 1.189e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 05:07:57,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=114213.33333333333, ans=0.125 +2024-07-28 05:07:57,675 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.76 vs. limit=15.0 +2024-07-28 05:08:00,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=114213.33333333333, ans=0.2 +2024-07-28 05:08:11,617 INFO [train.py:1114] (3/4) Epoch 9, batch 3900, loss[loss=0.2127, simple_loss=0.3087, pruned_loss=0.05836, over 4808.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2907, pruned_loss=0.06012, over 942511.60 frames. ], batch size: 14, lr: 8.41e-03, grad_scale: 32.0 +2024-07-28 05:08:20,056 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.21 vs. limit=15.0 +2024-07-28 05:08:29,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=114266.66666666667, ans=15.0 +2024-07-28 05:08:34,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=114280.0, ans=0.125 +2024-07-28 05:08:42,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=114293.33333333333, ans=0.2 +2024-07-28 05:08:44,763 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.05 vs. limit=6.0 +2024-07-28 05:08:45,016 INFO [train.py:1114] (3/4) Epoch 9, batch 3950, loss[loss=0.2624, simple_loss=0.3317, pruned_loss=0.09656, over 4848.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2911, pruned_loss=0.06055, over 944468.14 frames. ], batch size: 16, lr: 8.41e-03, grad_scale: 32.0 +2024-07-28 05:08:52,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=114320.0, ans=0.125 +2024-07-28 05:08:55,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=114320.0, ans=0.125 +2024-07-28 05:08:56,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114320.0, ans=0.1 +2024-07-28 05:09:03,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=114333.33333333333, ans=0.0 +2024-07-28 05:09:04,059 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.380e+01 5.786e+01 6.190e+01 6.950e+01 9.125e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 05:09:17,989 INFO [train.py:1114] (3/4) Epoch 9, batch 4000, loss[loss=0.1712, simple_loss=0.253, pruned_loss=0.04471, over 4772.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2911, pruned_loss=0.06068, over 940721.45 frames. ], batch size: 12, lr: 8.41e-03, grad_scale: 32.0 +2024-07-28 05:09:20,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=114373.33333333333, ans=0.04949747468305833 +2024-07-28 05:09:22,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=114373.33333333333, ans=0.125 +2024-07-28 05:09:27,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=114386.66666666667, ans=0.125 +2024-07-28 05:09:31,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=114400.0, ans=0.125 +2024-07-28 05:09:32,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=114400.0, ans=0.035 +2024-07-28 05:09:39,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114413.33333333333, ans=0.1 +2024-07-28 05:09:53,449 INFO [train.py:1114] (3/4) Epoch 9, batch 4050, loss[loss=0.2686, simple_loss=0.3334, pruned_loss=0.1019, over 3568.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2906, pruned_loss=0.06045, over 939880.39 frames. ], batch size: 37, lr: 8.41e-03, grad_scale: 32.0 +2024-07-28 05:10:05,108 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.18 vs. limit=22.5 +2024-07-28 05:10:12,635 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.800e+01 6.025e+01 6.921e+01 7.969e+01 1.217e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-28 05:10:21,748 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=11.58 vs. limit=15.0 +2024-07-28 05:10:23,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=114493.33333333333, ans=0.125 +2024-07-28 05:10:25,193 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.17 vs. limit=15.0 +2024-07-28 05:10:26,735 INFO [train.py:1114] (3/4) Epoch 9, batch 4100, loss[loss=0.223, simple_loss=0.321, pruned_loss=0.06254, over 4900.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2919, pruned_loss=0.061, over 938687.25 frames. ], batch size: 15, lr: 8.40e-03, grad_scale: 32.0 +2024-07-28 05:10:29,751 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:10:42,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=114533.33333333333, ans=0.125 +2024-07-28 05:10:42,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=114533.33333333333, ans=0.07 +2024-07-28 05:10:42,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=114533.33333333333, ans=0.125 +2024-07-28 05:10:43,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=114533.33333333333, ans=0.1 +2024-07-28 05:10:47,832 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.41 vs. limit=10.0 +2024-07-28 05:10:59,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=114560.0, ans=0.5 +2024-07-28 05:10:59,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=114560.0, ans=0.0 +2024-07-28 05:11:01,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=114560.0, ans=0.2 +2024-07-28 05:11:02,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=114560.0, ans=0.09899494936611666 +2024-07-28 05:11:11,947 INFO [train.py:1114] (3/4) Epoch 9, batch 4150, loss[loss=0.2299, simple_loss=0.3173, pruned_loss=0.07126, over 4832.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2913, pruned_loss=0.06065, over 938417.91 frames. ], batch size: 13, lr: 8.40e-03, grad_scale: 32.0 +2024-07-28 05:11:14,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=114573.33333333333, ans=0.035 +2024-07-28 05:11:25,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=114600.0, ans=0.0 +2024-07-28 05:11:31,353 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.579e+01 5.561e+01 6.118e+01 6.990e+01 1.145e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 05:11:38,930 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:11:42,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=114626.66666666667, ans=0.0 +2024-07-28 05:12:05,255 INFO [train.py:1114] (3/4) Epoch 9, batch 4200, loss[loss=0.1707, simple_loss=0.2607, pruned_loss=0.04034, over 4901.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2919, pruned_loss=0.06106, over 940164.69 frames. ], batch size: 15, lr: 8.40e-03, grad_scale: 64.0 +2024-07-28 05:12:33,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=114653.33333333333, ans=0.0 +2024-07-28 05:12:35,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=114653.33333333333, ans=0.0 +2024-07-28 05:12:44,792 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.82 vs. limit=12.0 +2024-07-28 05:12:49,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=114680.0, ans=0.125 +2024-07-28 05:12:55,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=114693.33333333333, ans=0.125 +2024-07-28 05:12:57,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=114693.33333333333, ans=0.025 +2024-07-28 05:12:57,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=114693.33333333333, ans=0.2 +2024-07-28 05:12:58,964 INFO [train.py:1114] (3/4) Epoch 9, batch 4250, loss[loss=0.1677, simple_loss=0.2506, pruned_loss=0.04236, over 4640.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2924, pruned_loss=0.06115, over 941180.05 frames. ], batch size: 12, lr: 8.40e-03, grad_scale: 32.0 +2024-07-28 05:13:00,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.49 vs. limit=15.0 +2024-07-28 05:13:01,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=114706.66666666667, ans=0.125 +2024-07-28 05:13:47,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=114733.33333333333, ans=0.125 +2024-07-28 05:13:49,189 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.539e+01 5.532e+01 6.240e+01 7.121e+01 1.493e+02, threshold=1.248e+02, percent-clipped=1.0 +2024-07-28 05:13:52,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=114746.66666666667, ans=0.125 +2024-07-28 05:13:58,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=114760.0, ans=0.04949747468305833 +2024-07-28 05:14:04,261 INFO [train.py:1114] (3/4) Epoch 9, batch 4300, loss[loss=0.2305, simple_loss=0.33, pruned_loss=0.06553, over 4748.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2927, pruned_loss=0.0614, over 940472.05 frames. ], batch size: 13, lr: 8.39e-03, grad_scale: 32.0 +2024-07-28 05:14:09,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=114773.33333333333, ans=0.1 +2024-07-28 05:14:39,642 INFO [train.py:1114] (3/4) Epoch 9, batch 4350, loss[loss=0.2084, simple_loss=0.2964, pruned_loss=0.06021, over 4760.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2921, pruned_loss=0.06057, over 941532.27 frames. ], batch size: 13, lr: 8.39e-03, grad_scale: 32.0 +2024-07-28 05:14:47,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=114853.33333333333, ans=0.125 +2024-07-28 05:14:49,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=114853.33333333333, ans=0.125 +2024-07-28 05:14:51,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=114853.33333333333, ans=0.125 +2024-07-28 05:14:53,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=114853.33333333333, ans=0.125 +2024-07-28 05:15:01,417 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.772e+01 5.655e+01 6.124e+01 6.925e+01 1.522e+02, threshold=1.225e+02, percent-clipped=1.0 +2024-07-28 05:15:09,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=114893.33333333333, ans=0.125 +2024-07-28 05:15:09,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=114893.33333333333, ans=0.125 +2024-07-28 05:15:14,982 INFO [train.py:1114] (3/4) Epoch 9, batch 4400, loss[loss=0.2086, simple_loss=0.2931, pruned_loss=0.06203, over 4810.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2924, pruned_loss=0.06101, over 941489.77 frames. ], batch size: 14, lr: 8.39e-03, grad_scale: 32.0 +2024-07-28 05:15:21,434 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.14 vs. limit=15.0 +2024-07-28 05:15:24,308 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.52 vs. limit=22.5 +2024-07-28 05:15:28,612 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.28 vs. limit=6.0 +2024-07-28 05:15:33,301 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.40 vs. limit=15.0 +2024-07-28 05:15:45,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=114960.0, ans=0.125 +2024-07-28 05:15:49,342 INFO [train.py:1114] (3/4) Epoch 9, batch 4450, loss[loss=0.1712, simple_loss=0.2497, pruned_loss=0.04638, over 4941.00 frames. ], tot_loss[loss=0.208, simple_loss=0.293, pruned_loss=0.06146, over 939391.76 frames. ], batch size: 12, lr: 8.39e-03, grad_scale: 32.0 +2024-07-28 05:16:03,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=115000.0, ans=0.2 +2024-07-28 05:16:09,214 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.877e+01 5.739e+01 6.410e+01 7.552e+01 1.027e+02, threshold=1.282e+02, percent-clipped=0.0 +2024-07-28 05:16:10,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=115013.33333333333, ans=0.0 +2024-07-28 05:16:16,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=115026.66666666667, ans=0.0 +2024-07-28 05:16:24,260 INFO [train.py:1114] (3/4) Epoch 9, batch 4500, loss[loss=0.1629, simple_loss=0.2501, pruned_loss=0.0378, over 4743.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2933, pruned_loss=0.06129, over 938355.31 frames. ], batch size: 14, lr: 8.38e-03, grad_scale: 32.0 +2024-07-28 05:16:27,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=115040.0, ans=0.0 +2024-07-28 05:16:35,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=115053.33333333333, ans=0.125 +2024-07-28 05:16:55,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=115093.33333333333, ans=0.125 +2024-07-28 05:16:58,146 INFO [train.py:1114] (3/4) Epoch 9, batch 4550, loss[loss=0.2111, simple_loss=0.2877, pruned_loss=0.06723, over 4889.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.293, pruned_loss=0.06115, over 939964.66 frames. ], batch size: 13, lr: 8.38e-03, grad_scale: 32.0 +2024-07-28 05:16:59,135 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.14 vs. limit=15.0 +2024-07-28 05:17:08,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=115120.0, ans=0.125 +2024-07-28 05:17:38,912 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.875e+01 5.713e+01 6.359e+01 7.183e+01 1.180e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 05:17:39,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115146.66666666667, ans=0.1 +2024-07-28 05:17:39,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=115146.66666666667, ans=0.125 +2024-07-28 05:17:44,155 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.61 vs. limit=12.0 +2024-07-28 05:17:50,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=115160.0, ans=0.04949747468305833 +2024-07-28 05:17:52,566 INFO [train.py:1114] (3/4) Epoch 9, batch 4600, loss[loss=0.2319, simple_loss=0.3238, pruned_loss=0.07002, over 4472.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2922, pruned_loss=0.06092, over 938246.16 frames. ], batch size: 21, lr: 8.38e-03, grad_scale: 32.0 +2024-07-28 05:17:53,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=115173.33333333333, ans=0.0 +2024-07-28 05:18:09,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=115200.0, ans=0.125 +2024-07-28 05:19:34,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=115226.66666666667, ans=0.125 +2024-07-28 05:19:46,530 INFO [train.py:1114] (3/4) Epoch 9, batch 4650, loss[loss=0.2541, simple_loss=0.325, pruned_loss=0.09164, over 4842.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2929, pruned_loss=0.06122, over 940020.02 frames. ], batch size: 16, lr: 8.38e-03, grad_scale: 32.0 +2024-07-28 05:19:59,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=115253.33333333333, ans=0.125 +2024-07-28 05:20:04,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=115266.66666666667, ans=10.0 +2024-07-28 05:20:19,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=115266.66666666667, ans=0.1 +2024-07-28 05:20:20,851 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 5.631e+01 6.409e+01 7.272e+01 9.674e+01, threshold=1.282e+02, percent-clipped=0.0 +2024-07-28 05:20:27,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.84 vs. limit=15.0 +2024-07-28 05:20:27,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=115280.0, ans=0.5 +2024-07-28 05:20:33,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=115293.33333333333, ans=0.125 +2024-07-28 05:20:37,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115293.33333333333, ans=0.1 +2024-07-28 05:20:39,175 INFO [train.py:1114] (3/4) Epoch 9, batch 4700, loss[loss=0.1737, simple_loss=0.2615, pruned_loss=0.04302, over 4695.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2923, pruned_loss=0.06118, over 937390.67 frames. ], batch size: 11, lr: 8.37e-03, grad_scale: 32.0 +2024-07-28 05:20:42,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.45 vs. limit=15.0 +2024-07-28 05:20:48,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=115320.0, ans=0.125 +2024-07-28 05:20:54,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=115333.33333333333, ans=0.035 +2024-07-28 05:20:54,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=115333.33333333333, ans=0.1 +2024-07-28 05:21:02,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=115346.66666666667, ans=0.0 +2024-07-28 05:21:04,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=115346.66666666667, ans=0.05 +2024-07-28 05:21:04,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=115346.66666666667, ans=0.0 +2024-07-28 05:21:12,377 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:21:12,803 INFO [train.py:1114] (3/4) Epoch 9, batch 4750, loss[loss=0.2159, simple_loss=0.3031, pruned_loss=0.06433, over 4510.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2923, pruned_loss=0.06117, over 935689.66 frames. ], batch size: 21, lr: 8.37e-03, grad_scale: 32.0 +2024-07-28 05:21:13,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=115373.33333333333, ans=0.0 +2024-07-28 05:21:33,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=115400.0, ans=0.07 +2024-07-28 05:21:34,803 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.742e+01 6.606e+01 7.346e+01 1.206e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-28 05:21:50,046 INFO [train.py:1114] (3/4) Epoch 9, batch 4800, loss[loss=0.1931, simple_loss=0.2844, pruned_loss=0.05095, over 4690.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2916, pruned_loss=0.06116, over 933033.88 frames. ], batch size: 13, lr: 8.37e-03, grad_scale: 32.0 +2024-07-28 05:21:51,919 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.90 vs. limit=15.0 +2024-07-28 05:21:53,308 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.14 vs. limit=15.0 +2024-07-28 05:22:12,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=115453.33333333333, ans=0.2 +2024-07-28 05:22:16,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=115466.66666666667, ans=0.125 +2024-07-28 05:22:20,034 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:22:30,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_na.min_abs, batch_count=115480.0, ans=0.02 +2024-07-28 05:22:41,487 INFO [train.py:1114] (3/4) Epoch 9, batch 4850, loss[loss=0.1958, simple_loss=0.2836, pruned_loss=0.05403, over 4737.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2917, pruned_loss=0.06125, over 932806.05 frames. ], batch size: 14, lr: 8.37e-03, grad_scale: 32.0 +2024-07-28 05:22:43,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=115506.66666666667, ans=0.125 +2024-07-28 05:22:44,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=115506.66666666667, ans=0.125 +2024-07-28 05:22:48,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=115506.66666666667, ans=0.0 +2024-07-28 05:23:04,559 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.04 vs. limit=22.5 +2024-07-28 05:23:04,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115533.33333333333, ans=0.1 +2024-07-28 05:23:06,495 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.52 vs. limit=15.0 +2024-07-28 05:23:07,386 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 5.778e+01 6.431e+01 7.298e+01 1.043e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 05:23:16,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=115560.0, ans=0.0 +2024-07-28 05:23:20,763 INFO [train.py:1114] (3/4) Epoch 9, batch 4900, loss[loss=0.1797, simple_loss=0.2664, pruned_loss=0.04653, over 4760.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2915, pruned_loss=0.06122, over 934416.17 frames. ], batch size: 13, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:23:29,685 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.71 vs. limit=15.0 +2024-07-28 05:23:36,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=115586.66666666667, ans=0.0 +2024-07-28 05:23:37,799 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.66 vs. limit=22.5 +2024-07-28 05:23:43,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=115600.0, ans=0.2 +2024-07-28 05:23:44,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=115600.0, ans=0.2 +2024-07-28 05:23:55,528 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.79 vs. limit=12.0 +2024-07-28 05:23:59,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=115626.66666666667, ans=0.05 +2024-07-28 05:24:00,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=115626.66666666667, ans=0.125 +2024-07-28 05:24:09,223 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.79 vs. limit=15.0 +2024-07-28 05:24:10,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=115626.66666666667, ans=0.015 +2024-07-28 05:24:12,918 INFO [train.py:1114] (3/4) Epoch 9, batch 4950, loss[loss=0.3052, simple_loss=0.3559, pruned_loss=0.1272, over 3510.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2934, pruned_loss=0.06268, over 931648.73 frames. ], batch size: 35, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:24:13,819 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.16 vs. limit=15.0 +2024-07-28 05:24:23,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=115653.33333333333, ans=0.125 +2024-07-28 05:24:27,598 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.10 vs. limit=15.0 +2024-07-28 05:24:35,819 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.843e+01 5.657e+01 6.231e+01 6.947e+01 1.249e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 05:24:41,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=115680.0, ans=0.125 +2024-07-28 05:24:46,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=115680.0, ans=0.125 +2024-07-28 05:25:13,968 INFO [train.py:1114] (3/4) Epoch 9, batch 5000, loss[loss=0.1808, simple_loss=0.2702, pruned_loss=0.04572, over 4660.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2928, pruned_loss=0.06239, over 935223.47 frames. ], batch size: 14, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:25:54,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=115720.0, ans=0.07 +2024-07-28 05:25:58,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=115733.33333333333, ans=0.1 +2024-07-28 05:26:37,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=115760.0, ans=0.125 +2024-07-28 05:26:42,519 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:26:44,474 INFO [train.py:1114] (3/4) Epoch 9, batch 5050, loss[loss=0.1756, simple_loss=0.2536, pruned_loss=0.0488, over 4854.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2912, pruned_loss=0.06161, over 937464.04 frames. ], batch size: 12, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:27:01,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=115773.33333333333, ans=0.125 +2024-07-28 05:27:11,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115800.0, ans=0.1 +2024-07-28 05:27:14,833 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.53 vs. limit=15.0 +2024-07-28 05:27:29,888 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.024e+01 5.915e+01 6.647e+01 7.788e+01 1.077e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-28 05:27:32,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=115813.33333333333, ans=0.125 +2024-07-28 05:27:35,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=115813.33333333333, ans=0.025 +2024-07-28 05:27:37,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=115813.33333333333, ans=0.2 +2024-07-28 05:27:40,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=115826.66666666667, ans=0.125 +2024-07-28 05:27:48,484 INFO [train.py:1114] (3/4) Epoch 9, batch 5100, loss[loss=0.1807, simple_loss=0.2656, pruned_loss=0.0479, over 4774.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2921, pruned_loss=0.06218, over 934829.11 frames. ], batch size: 12, lr: 8.36e-03, grad_scale: 32.0 +2024-07-28 05:28:13,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=115840.0, ans=0.1 +2024-07-28 05:28:26,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=115866.66666666667, ans=0.125 +2024-07-28 05:28:26,398 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.55 vs. limit=15.0 +2024-07-28 05:28:28,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=115866.66666666667, ans=0.0 +2024-07-28 05:28:44,213 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:28:44,301 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.57 vs. limit=12.0 +2024-07-28 05:28:47,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=115893.33333333333, ans=0.125 +2024-07-28 05:28:59,104 INFO [train.py:1114] (3/4) Epoch 9, batch 5150, loss[loss=0.2138, simple_loss=0.3012, pruned_loss=0.06318, over 4822.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2926, pruned_loss=0.06213, over 935786.17 frames. ], batch size: 16, lr: 8.35e-03, grad_scale: 32.0 +2024-07-28 05:29:13,852 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.06 vs. limit=15.0 +2024-07-28 05:29:36,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=115933.33333333333, ans=0.125 +2024-07-28 05:29:37,795 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.832e+01 5.668e+01 6.329e+01 7.486e+01 1.027e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 05:29:51,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=115960.0, ans=0.0 +2024-07-28 05:29:56,322 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.06 vs. limit=12.0 +2024-07-28 05:30:01,871 INFO [train.py:1114] (3/4) Epoch 9, batch 5200, loss[loss=0.2046, simple_loss=0.2981, pruned_loss=0.05559, over 4663.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2917, pruned_loss=0.06147, over 935995.54 frames. ], batch size: 14, lr: 8.35e-03, grad_scale: 32.0 +2024-07-28 05:30:04,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=115973.33333333333, ans=0.07 +2024-07-28 05:30:08,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=115986.66666666667, ans=0.5 +2024-07-28 05:30:16,320 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.91 vs. limit=15.0 +2024-07-28 05:30:18,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=115986.66666666667, ans=0.025 +2024-07-28 05:30:20,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=116000.0, ans=0.0 +2024-07-28 05:30:29,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=116013.33333333333, ans=0.2 +2024-07-28 05:30:42,782 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.17 vs. limit=15.0 +2024-07-28 05:30:43,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=116026.66666666667, ans=0.125 +2024-07-28 05:30:47,194 INFO [train.py:1114] (3/4) Epoch 9, batch 5250, loss[loss=0.1893, simple_loss=0.2774, pruned_loss=0.05061, over 4899.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2921, pruned_loss=0.06221, over 936153.86 frames. ], batch size: 13, lr: 8.35e-03, grad_scale: 32.0 +2024-07-28 05:30:48,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=116040.0, ans=0.125 +2024-07-28 05:30:49,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=116040.0, ans=0.1 +2024-07-28 05:30:49,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=116040.0, ans=0.125 +2024-07-28 05:30:50,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=116040.0, ans=0.0 +2024-07-28 05:31:05,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=116053.33333333333, ans=0.125 +2024-07-28 05:31:07,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=116053.33333333333, ans=0.125 +2024-07-28 05:31:13,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer_na.min_abs, batch_count=116053.33333333333, ans=0.02 +2024-07-28 05:31:22,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=116066.66666666667, ans=0.0 +2024-07-28 05:31:31,297 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.193e+01 5.667e+01 6.856e+01 8.237e+01 1.145e+02, threshold=1.371e+02, percent-clipped=0.0 +2024-07-28 05:31:44,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=116093.33333333333, ans=0.0 +2024-07-28 05:31:59,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=116093.33333333333, ans=0.125 +2024-07-28 05:32:01,287 INFO [train.py:1114] (3/4) Epoch 9, batch 5300, loss[loss=0.2089, simple_loss=0.3007, pruned_loss=0.05851, over 4647.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2915, pruned_loss=0.0622, over 934706.62 frames. ], batch size: 16, lr: 8.35e-03, grad_scale: 32.0 +2024-07-28 05:32:17,617 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.81 vs. limit=15.0 +2024-07-28 05:32:19,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=116120.0, ans=0.1 +2024-07-28 05:32:23,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=116133.33333333333, ans=10.0 +2024-07-28 05:32:23,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=116133.33333333333, ans=0.04949747468305833 +2024-07-28 05:32:27,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=116146.66666666667, ans=0.0 +2024-07-28 05:32:33,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=116146.66666666667, ans=0.125 +2024-07-28 05:32:39,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=116160.0, ans=0.2 +2024-07-28 05:32:43,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=116160.0, ans=0.0 +2024-07-28 05:32:44,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=116160.0, ans=0.1 +2024-07-28 05:32:46,257 INFO [train.py:1114] (3/4) Epoch 9, batch 5350, loss[loss=0.1828, simple_loss=0.2619, pruned_loss=0.0518, over 4499.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2917, pruned_loss=0.06191, over 936505.58 frames. ], batch size: 10, lr: 8.34e-03, grad_scale: 32.0 +2024-07-28 05:32:49,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=116173.33333333333, ans=0.125 +2024-07-28 05:33:05,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=116200.0, ans=0.125 +2024-07-28 05:33:09,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=116200.0, ans=0.125 +2024-07-28 05:33:14,381 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.727e+01 5.909e+01 6.357e+01 7.144e+01 1.044e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 05:33:18,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=116213.33333333333, ans=0.125 +2024-07-28 05:33:22,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=116213.33333333333, ans=0.025 +2024-07-28 05:33:36,867 INFO [train.py:1114] (3/4) Epoch 9, batch 5400, loss[loss=0.2204, simple_loss=0.3042, pruned_loss=0.06832, over 4325.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2925, pruned_loss=0.06194, over 930863.80 frames. ], batch size: 25, lr: 8.34e-03, grad_scale: 32.0 +2024-07-28 05:33:43,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=116253.33333333333, ans=0.125 +2024-07-28 05:33:47,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=116253.33333333333, ans=0.025 +2024-07-28 05:33:57,820 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.21 vs. limit=15.0 +2024-07-28 05:34:02,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=116266.66666666667, ans=0.025 +2024-07-28 05:34:04,717 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.77 vs. limit=22.5 +2024-07-28 05:34:09,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=116280.0, ans=0.0 +2024-07-28 05:34:22,030 INFO [train.py:1114] (3/4) Epoch 9, batch 5450, loss[loss=0.2477, simple_loss=0.3088, pruned_loss=0.0933, over 4696.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2922, pruned_loss=0.06122, over 933505.45 frames. ], batch size: 11, lr: 8.34e-03, grad_scale: 32.0 +2024-07-28 05:34:24,563 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.81 vs. limit=12.0 +2024-07-28 05:34:30,155 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.18 vs. limit=15.0 +2024-07-28 05:34:35,519 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.16 vs. limit=15.0 +2024-07-28 05:34:52,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-28 05:34:53,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=116333.33333333333, ans=0.1 +2024-07-28 05:34:58,372 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.683e+01 5.949e+01 6.805e+01 7.625e+01 9.971e+01, threshold=1.361e+02, percent-clipped=0.0 +2024-07-28 05:35:14,812 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:35:16,768 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:35:22,099 INFO [train.py:1114] (3/4) Epoch 9, batch 5500, loss[loss=0.2627, simple_loss=0.3425, pruned_loss=0.09145, over 4204.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2917, pruned_loss=0.06096, over 930517.94 frames. ], batch size: 25, lr: 8.34e-03, grad_scale: 32.0 +2024-07-28 05:35:39,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=116400.0, ans=0.125 +2024-07-28 05:35:56,269 INFO [train.py:1114] (3/4) Epoch 9, batch 5550, loss[loss=0.1732, simple_loss=0.2634, pruned_loss=0.04147, over 4704.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2917, pruned_loss=0.06139, over 932500.13 frames. ], batch size: 12, lr: 8.33e-03, grad_scale: 32.0 +2024-07-28 05:35:57,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=116440.0, ans=0.0 +2024-07-28 05:36:09,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=116453.33333333333, ans=0.125 +2024-07-28 05:36:10,134 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.56 vs. limit=6.0 +2024-07-28 05:36:16,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=116480.0, ans=0.0 +2024-07-28 05:36:16,590 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:36:17,000 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.435e+01 5.683e+01 6.413e+01 7.380e+01 1.098e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 05:36:18,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=116480.0, ans=0.125 +2024-07-28 05:36:24,615 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.49 vs. limit=15.0 +2024-07-28 05:36:31,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=116493.33333333333, ans=0.05 +2024-07-28 05:36:35,082 INFO [train.py:1114] (3/4) Epoch 9, batch 5600, loss[loss=0.1874, simple_loss=0.2685, pruned_loss=0.05317, over 4749.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2914, pruned_loss=0.0612, over 933800.34 frames. ], batch size: 14, lr: 8.33e-03, grad_scale: 32.0 +2024-07-28 05:36:49,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=116533.33333333333, ans=0.125 +2024-07-28 05:37:08,963 INFO [train.py:1114] (3/4) Epoch 9, batch 5650, loss[loss=0.2047, simple_loss=0.2819, pruned_loss=0.06377, over 4580.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2897, pruned_loss=0.06015, over 936469.32 frames. ], batch size: 21, lr: 8.33e-03, grad_scale: 32.0 +2024-07-28 05:37:09,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=116573.33333333333, ans=0.125 +2024-07-28 05:37:11,923 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.55 vs. limit=15.0 +2024-07-28 05:37:12,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=116573.33333333333, ans=0.05 +2024-07-28 05:37:17,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=116586.66666666667, ans=0.0 +2024-07-28 05:37:27,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=116600.0, ans=0.0 +2024-07-28 05:37:28,751 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.968e+01 5.748e+01 6.232e+01 7.231e+01 1.019e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 05:37:43,669 INFO [train.py:1114] (3/4) Epoch 9, batch 5700, loss[loss=0.1972, simple_loss=0.2864, pruned_loss=0.05394, over 4695.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2903, pruned_loss=0.05979, over 937543.59 frames. ], batch size: 13, lr: 8.33e-03, grad_scale: 32.0 +2024-07-28 05:37:45,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=116640.0, ans=0.1 +2024-07-28 05:37:59,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=116666.66666666667, ans=0.125 +2024-07-28 05:38:10,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=116680.0, ans=0.125 +2024-07-28 05:38:13,137 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.27 vs. limit=6.0 +2024-07-28 05:38:15,686 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.80 vs. limit=15.0 +2024-07-28 05:38:18,380 INFO [train.py:1114] (3/4) Epoch 9, batch 5750, loss[loss=0.2466, simple_loss=0.3281, pruned_loss=0.08251, over 4699.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2903, pruned_loss=0.05984, over 937801.34 frames. ], batch size: 19, lr: 8.32e-03, grad_scale: 32.0 +2024-07-28 05:38:25,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=116706.66666666667, ans=0.1 +2024-07-28 05:38:34,499 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.68 vs. limit=15.0 +2024-07-28 05:38:37,167 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-07-28 05:38:43,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=116733.33333333333, ans=0.125 +2024-07-28 05:38:47,983 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.019e+01 5.728e+01 6.287e+01 7.264e+01 1.232e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 05:39:03,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=116746.66666666667, ans=0.125 +2024-07-28 05:39:15,876 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:39:17,867 INFO [train.py:1114] (3/4) Epoch 9, batch 5800, loss[loss=0.2392, simple_loss=0.3161, pruned_loss=0.08113, over 4755.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2919, pruned_loss=0.06092, over 937098.44 frames. ], batch size: 19, lr: 8.32e-03, grad_scale: 32.0 +2024-07-28 05:39:18,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=116773.33333333333, ans=0.125 +2024-07-28 05:39:22,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=116773.33333333333, ans=0.125 +2024-07-28 05:39:38,300 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.60 vs. limit=15.0 +2024-07-28 05:39:40,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=116800.0, ans=0.0 +2024-07-28 05:39:43,557 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.79 vs. limit=22.5 +2024-07-28 05:39:46,765 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.34 vs. limit=15.0 +2024-07-28 05:40:02,722 INFO [train.py:1114] (3/4) Epoch 9, batch 5850, loss[loss=0.2381, simple_loss=0.317, pruned_loss=0.07965, over 4449.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2923, pruned_loss=0.06139, over 938133.71 frames. ], batch size: 21, lr: 8.32e-03, grad_scale: 32.0 +2024-07-28 05:40:08,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=116840.0, ans=0.125 +2024-07-28 05:40:25,339 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.305e+01 6.200e+01 6.975e+01 8.009e+01 1.394e+02, threshold=1.395e+02, percent-clipped=2.0 +2024-07-28 05:40:32,780 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:40:38,639 INFO [train.py:1114] (3/4) Epoch 9, batch 5900, loss[loss=0.1939, simple_loss=0.2966, pruned_loss=0.0456, over 4697.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2926, pruned_loss=0.06152, over 938209.03 frames. ], batch size: 15, lr: 8.32e-03, grad_scale: 32.0 +2024-07-28 05:40:45,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=116906.66666666667, ans=0.0 +2024-07-28 05:40:52,604 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.18 vs. limit=10.0 +2024-07-28 05:40:53,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=116920.0, ans=0.05 +2024-07-28 05:41:02,827 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.35 vs. limit=15.0 +2024-07-28 05:41:12,382 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:41:15,376 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.65 vs. limit=10.0 +2024-07-28 05:41:17,907 INFO [train.py:1114] (3/4) Epoch 9, batch 5950, loss[loss=0.2373, simple_loss=0.3279, pruned_loss=0.07331, over 4674.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2925, pruned_loss=0.06116, over 940074.16 frames. ], batch size: 15, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:41:19,143 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.80 vs. limit=15.0 +2024-07-28 05:41:20,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff2.min_abs, batch_count=116973.33333333333, ans=0.1 +2024-07-28 05:41:37,800 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.704e+01 5.843e+01 6.530e+01 7.569e+01 1.342e+02, threshold=1.306e+02, percent-clipped=0.0 +2024-07-28 05:41:51,468 INFO [train.py:1114] (3/4) Epoch 9, batch 6000, loss[loss=0.2085, simple_loss=0.2934, pruned_loss=0.06178, over 4168.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2933, pruned_loss=0.06156, over 936710.84 frames. ], batch size: 25, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:41:51,469 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 05:41:57,768 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.4076, 2.9704, 2.8316, 2.7061], device='cuda:3') +2024-07-28 05:42:03,634 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.6861, 4.4155, 3.3936, 3.2016], device='cuda:3') +2024-07-28 05:42:04,275 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([4.1364, 2.7754, 3.3694, 3.9766, 4.0751, 3.1796, 3.9366, 3.0694], + device='cuda:3') +2024-07-28 05:42:05,184 INFO [train.py:1146] (3/4) Epoch 9, validation: loss=0.175, simple_loss=0.2796, pruned_loss=0.03521, over 944034.00 frames. +2024-07-28 05:42:05,184 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 05:42:08,235 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.31 vs. limit=12.0 +2024-07-28 05:42:16,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=117053.33333333333, ans=0.0 +2024-07-28 05:42:25,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=117080.0, ans=0.95 +2024-07-28 05:42:29,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=117080.0, ans=0.2 +2024-07-28 05:42:36,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.whiten.whitening_limit, batch_count=117093.33333333333, ans=12.0 +2024-07-28 05:42:39,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=117106.66666666667, ans=0.125 +2024-07-28 05:42:39,951 INFO [train.py:1114] (3/4) Epoch 9, batch 6050, loss[loss=0.195, simple_loss=0.2655, pruned_loss=0.06228, over 4781.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2926, pruned_loss=0.06153, over 938148.07 frames. ], batch size: 12, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:42:40,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=117106.66666666667, ans=0.0 +2024-07-28 05:42:54,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117120.0, ans=0.1 +2024-07-28 05:42:58,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=117133.33333333333, ans=0.2 +2024-07-28 05:42:59,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=117133.33333333333, ans=0.2 +2024-07-28 05:43:05,319 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.759e+01 5.753e+01 6.307e+01 7.312e+01 1.282e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 05:43:05,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=117146.66666666667, ans=0.125 +2024-07-28 05:43:18,565 INFO [train.py:1114] (3/4) Epoch 9, batch 6100, loss[loss=0.2191, simple_loss=0.3091, pruned_loss=0.06455, over 4675.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2916, pruned_loss=0.06086, over 938077.35 frames. ], batch size: 15, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:43:22,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=117173.33333333333, ans=0.05 +2024-07-28 05:43:23,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=117173.33333333333, ans=0.1 +2024-07-28 05:43:27,638 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.45 vs. limit=12.0 +2024-07-28 05:43:46,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=117213.33333333333, ans=0.0 +2024-07-28 05:43:46,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=117226.66666666667, ans=0.125 +2024-07-28 05:43:47,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=117226.66666666667, ans=0.0 +2024-07-28 05:43:53,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=117240.0, ans=0.0 +2024-07-28 05:43:53,990 INFO [train.py:1114] (3/4) Epoch 9, batch 6150, loss[loss=0.2919, simple_loss=0.3388, pruned_loss=0.1225, over 3392.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2918, pruned_loss=0.06054, over 937499.45 frames. ], batch size: 37, lr: 8.31e-03, grad_scale: 32.0 +2024-07-28 05:44:15,650 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.025e+01 5.523e+01 6.022e+01 6.962e+01 1.002e+02, threshold=1.204e+02, percent-clipped=1.0 +2024-07-28 05:44:32,086 INFO [train.py:1114] (3/4) Epoch 9, batch 6200, loss[loss=0.1854, simple_loss=0.2933, pruned_loss=0.03871, over 4741.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.291, pruned_loss=0.06015, over 937035.92 frames. ], batch size: 14, lr: 8.30e-03, grad_scale: 32.0 +2024-07-28 05:44:36,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=117306.66666666667, ans=0.0 +2024-07-28 05:44:49,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=117320.0, ans=0.025 +2024-07-28 05:44:51,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=117333.33333333333, ans=0.125 +2024-07-28 05:45:04,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=117346.66666666667, ans=0.125 +2024-07-28 05:45:05,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=117346.66666666667, ans=0.125 +2024-07-28 05:45:07,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=117346.66666666667, ans=0.025 +2024-07-28 05:45:20,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=117360.0, ans=0.0 +2024-07-28 05:45:22,590 INFO [train.py:1114] (3/4) Epoch 9, batch 6250, loss[loss=0.226, simple_loss=0.318, pruned_loss=0.06705, over 4807.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2911, pruned_loss=0.06076, over 933601.63 frames. ], batch size: 14, lr: 8.30e-03, grad_scale: 64.0 +2024-07-28 05:45:26,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=117373.33333333333, ans=0.125 +2024-07-28 05:45:30,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=117373.33333333333, ans=0.1 +2024-07-28 05:45:32,531 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.50 vs. limit=22.5 +2024-07-28 05:45:39,580 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.17 vs. limit=15.0 +2024-07-28 05:45:40,887 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.26 vs. limit=15.0 +2024-07-28 05:45:42,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=117386.66666666667, ans=0.125 +2024-07-28 05:45:43,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=117386.66666666667, ans=0.125 +2024-07-28 05:45:45,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=117400.0, ans=0.125 +2024-07-28 05:45:52,397 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.71 vs. limit=15.0 +2024-07-28 05:45:57,915 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.368e+01 5.807e+01 6.495e+01 7.426e+01 1.051e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-28 05:46:02,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=117413.33333333333, ans=0.09899494936611666 +2024-07-28 05:46:11,019 INFO [train.py:1114] (3/4) Epoch 9, batch 6300, loss[loss=0.1818, simple_loss=0.2617, pruned_loss=0.05091, over 4538.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2907, pruned_loss=0.06048, over 929447.14 frames. ], batch size: 10, lr: 8.30e-03, grad_scale: 64.0 +2024-07-28 05:46:23,641 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.78 vs. limit=15.0 +2024-07-28 05:46:24,878 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.63 vs. limit=6.0 +2024-07-28 05:46:25,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=117466.66666666667, ans=0.0 +2024-07-28 05:46:27,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=117466.66666666667, ans=0.1 +2024-07-28 05:46:45,006 INFO [train.py:1114] (3/4) Epoch 9, batch 6350, loss[loss=0.2049, simple_loss=0.2918, pruned_loss=0.05903, over 4463.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2896, pruned_loss=0.06004, over 933493.12 frames. ], batch size: 21, lr: 8.30e-03, grad_scale: 64.0 +2024-07-28 05:46:50,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=117506.66666666667, ans=0.125 +2024-07-28 05:46:55,554 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.16 vs. limit=15.0 +2024-07-28 05:47:00,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=117533.33333333333, ans=0.0 +2024-07-28 05:47:05,465 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.541e+01 5.641e+01 6.337e+01 7.331e+01 1.035e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 05:47:10,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=117546.66666666667, ans=0.0 +2024-07-28 05:47:18,814 INFO [train.py:1114] (3/4) Epoch 9, batch 6400, loss[loss=0.2255, simple_loss=0.3112, pruned_loss=0.06992, over 4634.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.289, pruned_loss=0.05987, over 935395.16 frames. ], batch size: 13, lr: 8.29e-03, grad_scale: 64.0 +2024-07-28 05:47:24,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=117573.33333333333, ans=10.0 +2024-07-28 05:47:35,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten.whitening_limit, batch_count=117600.0, ans=22.5 +2024-07-28 05:47:46,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=117626.66666666667, ans=0.125 +2024-07-28 05:47:48,460 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.68 vs. limit=12.0 +2024-07-28 05:47:51,886 INFO [train.py:1114] (3/4) Epoch 9, batch 6450, loss[loss=0.2288, simple_loss=0.315, pruned_loss=0.07125, over 4376.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2903, pruned_loss=0.05983, over 938763.20 frames. ], batch size: 21, lr: 8.29e-03, grad_scale: 32.0 +2024-07-28 05:47:54,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=117640.0, ans=0.025 +2024-07-28 05:47:54,119 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.80 vs. limit=15.0 +2024-07-28 05:47:59,614 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.09 vs. limit=15.0 +2024-07-28 05:48:04,913 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.44 vs. limit=15.0 +2024-07-28 05:48:05,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=117666.66666666667, ans=0.125 +2024-07-28 05:48:06,118 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=15.0 +2024-07-28 05:48:09,416 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.53 vs. limit=15.0 +2024-07-28 05:48:12,203 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.958e+01 5.777e+01 6.265e+01 7.458e+01 1.073e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 05:48:17,200 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.09 vs. limit=15.0 +2024-07-28 05:48:17,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=117693.33333333333, ans=0.0 +2024-07-28 05:48:24,568 INFO [train.py:1114] (3/4) Epoch 9, batch 6500, loss[loss=0.2944, simple_loss=0.349, pruned_loss=0.1199, over 3311.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2902, pruned_loss=0.05957, over 939983.23 frames. ], batch size: 35, lr: 8.29e-03, grad_scale: 32.0 +2024-07-28 05:48:28,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=117706.66666666667, ans=0.125 +2024-07-28 05:48:41,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=117733.33333333333, ans=0.125 +2024-07-28 05:48:42,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=117733.33333333333, ans=0.125 +2024-07-28 05:48:44,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=117746.66666666667, ans=0.0 +2024-07-28 05:49:02,990 INFO [train.py:1114] (3/4) Epoch 9, batch 6550, loss[loss=0.1797, simple_loss=0.2586, pruned_loss=0.05037, over 4800.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2892, pruned_loss=0.05877, over 942881.60 frames. ], batch size: 11, lr: 8.29e-03, grad_scale: 32.0 +2024-07-28 05:49:17,332 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=17.05 vs. limit=22.5 +2024-07-28 05:49:21,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=117800.0, ans=0.07 +2024-07-28 05:49:21,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=117800.0, ans=0.07 +2024-07-28 05:49:23,500 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.708e+01 5.652e+01 6.284e+01 7.270e+01 1.094e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 05:49:27,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=117813.33333333333, ans=0.0 +2024-07-28 05:49:30,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=117826.66666666667, ans=0.125 +2024-07-28 05:49:35,909 INFO [train.py:1114] (3/4) Epoch 9, batch 6600, loss[loss=0.1895, simple_loss=0.2789, pruned_loss=0.05002, over 4932.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2898, pruned_loss=0.05928, over 944853.22 frames. ], batch size: 14, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:49:52,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=117866.66666666667, ans=0.0 +2024-07-28 05:49:52,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=117866.66666666667, ans=0.125 +2024-07-28 05:50:08,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=117893.33333333333, ans=0.125 +2024-07-28 05:50:12,819 INFO [train.py:1114] (3/4) Epoch 9, batch 6650, loss[loss=0.2071, simple_loss=0.2935, pruned_loss=0.06041, over 4638.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2893, pruned_loss=0.05925, over 943598.82 frames. ], batch size: 17, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:50:15,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=117906.66666666667, ans=0.0 +2024-07-28 05:50:17,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=117906.66666666667, ans=0.2 +2024-07-28 05:50:18,460 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.86 vs. limit=15.0 +2024-07-28 05:50:19,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=117920.0, ans=0.125 +2024-07-28 05:50:28,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=117933.33333333333, ans=0.125 +2024-07-28 05:50:28,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=117933.33333333333, ans=0.0 +2024-07-28 05:50:35,490 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.523e+01 5.735e+01 6.176e+01 7.286e+01 9.615e+01, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 05:50:38,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=117946.66666666667, ans=0.125 +2024-07-28 05:50:41,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=117960.0, ans=0.0 +2024-07-28 05:50:43,358 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.82 vs. limit=22.5 +2024-07-28 05:50:48,323 INFO [train.py:1114] (3/4) Epoch 9, batch 6700, loss[loss=0.2456, simple_loss=0.3391, pruned_loss=0.07607, over 4687.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2891, pruned_loss=0.05929, over 942247.55 frames. ], batch size: 19, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:50:48,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=117973.33333333333, ans=0.125 +2024-07-28 05:51:09,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=118013.33333333333, ans=0.0 +2024-07-28 05:51:22,052 INFO [train.py:1114] (3/4) Epoch 9, batch 6750, loss[loss=0.2371, simple_loss=0.3193, pruned_loss=0.07745, over 4300.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2889, pruned_loss=0.0594, over 940568.52 frames. ], batch size: 25, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:51:29,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=118053.33333333333, ans=0.125 +2024-07-28 05:51:30,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=118053.33333333333, ans=0.025 +2024-07-28 05:51:38,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=118066.66666666667, ans=0.125 +2024-07-28 05:51:42,736 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.453e+01 5.833e+01 6.338e+01 7.124e+01 1.183e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 05:51:55,775 INFO [train.py:1114] (3/4) Epoch 9, batch 6800, loss[loss=0.2154, simple_loss=0.3078, pruned_loss=0.06153, over 4643.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2899, pruned_loss=0.05952, over 938711.26 frames. ], batch size: 13, lr: 8.28e-03, grad_scale: 32.0 +2024-07-28 05:51:57,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=118106.66666666667, ans=0.125 +2024-07-28 05:51:58,484 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.55 vs. limit=10.0 +2024-07-28 05:52:01,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=118106.66666666667, ans=0.125 +2024-07-28 05:52:01,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=118106.66666666667, ans=0.125 +2024-07-28 05:52:04,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=118120.0, ans=0.125 +2024-07-28 05:52:13,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=118133.33333333333, ans=0.0 +2024-07-28 05:52:13,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=118133.33333333333, ans=0.2 +2024-07-28 05:52:16,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=118133.33333333333, ans=0.0 +2024-07-28 05:52:16,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=118133.33333333333, ans=0.0 +2024-07-28 05:52:17,826 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.27 vs. limit=10.0 +2024-07-28 05:52:19,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=118146.66666666667, ans=0.2 +2024-07-28 05:52:30,655 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.97 vs. limit=15.0 +2024-07-28 05:52:32,164 INFO [train.py:1114] (3/4) Epoch 9, batch 6850, loss[loss=0.2058, simple_loss=0.2938, pruned_loss=0.05894, over 4683.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2895, pruned_loss=0.05918, over 939958.45 frames. ], batch size: 13, lr: 8.27e-03, grad_scale: 32.0 +2024-07-28 05:52:35,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.68 vs. limit=10.0 +2024-07-28 05:52:36,577 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=20.30 vs. limit=22.5 +2024-07-28 05:52:43,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=118186.66666666667, ans=0.125 +2024-07-28 05:52:51,405 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.52 vs. limit=15.0 +2024-07-28 05:52:52,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=118213.33333333333, ans=0.125 +2024-07-28 05:52:53,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=118213.33333333333, ans=10.0 +2024-07-28 05:52:53,782 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.745e+01 6.443e+01 7.368e+01 1.069e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 05:52:54,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=118213.33333333333, ans=0.125 +2024-07-28 05:52:57,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=118213.33333333333, ans=0.125 +2024-07-28 05:53:00,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=118226.66666666667, ans=0.125 +2024-07-28 05:53:10,622 INFO [train.py:1114] (3/4) Epoch 9, batch 6900, loss[loss=0.2122, simple_loss=0.3028, pruned_loss=0.06086, over 4951.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2894, pruned_loss=0.05943, over 942391.29 frames. ], batch size: 13, lr: 8.27e-03, grad_scale: 32.0 +2024-07-28 05:53:14,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=118240.0, ans=0.125 +2024-07-28 05:53:16,872 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.87 vs. limit=22.5 +2024-07-28 05:53:19,340 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.38 vs. limit=15.0 +2024-07-28 05:53:27,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=118266.66666666667, ans=0.125 +2024-07-28 05:53:29,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=118266.66666666667, ans=0.0 +2024-07-28 05:53:29,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=118266.66666666667, ans=0.125 +2024-07-28 05:53:37,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=118280.0, ans=0.125 +2024-07-28 05:53:46,416 INFO [train.py:1114] (3/4) Epoch 9, batch 6950, loss[loss=0.1728, simple_loss=0.2496, pruned_loss=0.048, over 4545.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2881, pruned_loss=0.05874, over 939771.41 frames. ], batch size: 10, lr: 8.27e-03, grad_scale: 32.0 +2024-07-28 05:53:51,544 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=118306.66666666667, ans=6.0 +2024-07-28 05:53:51,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=118306.66666666667, ans=0.125 +2024-07-28 05:53:54,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=118320.0, ans=0.05 +2024-07-28 05:53:54,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=118320.0, ans=0.125 +2024-07-28 05:53:56,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=118320.0, ans=0.0 +2024-07-28 05:54:01,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=118333.33333333333, ans=0.0 +2024-07-28 05:54:06,830 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.989e+01 5.744e+01 6.460e+01 7.316e+01 1.273e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 05:54:10,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=118346.66666666667, ans=0.025 +2024-07-28 05:54:19,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=118373.33333333333, ans=0.025 +2024-07-28 05:54:19,635 INFO [train.py:1114] (3/4) Epoch 9, batch 7000, loss[loss=0.2319, simple_loss=0.3186, pruned_loss=0.07266, over 4616.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2872, pruned_loss=0.05856, over 938451.65 frames. ], batch size: 17, lr: 8.27e-03, grad_scale: 32.0 +2024-07-28 05:54:29,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.42 vs. limit=15.0 +2024-07-28 05:54:30,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=118386.66666666667, ans=0.1 +2024-07-28 05:54:50,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=118426.66666666667, ans=0.125 +2024-07-28 05:54:53,264 INFO [train.py:1114] (3/4) Epoch 9, batch 7050, loss[loss=0.2174, simple_loss=0.3, pruned_loss=0.06737, over 4628.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2884, pruned_loss=0.05863, over 941903.27 frames. ], batch size: 19, lr: 8.26e-03, grad_scale: 32.0 +2024-07-28 05:55:04,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=118453.33333333333, ans=0.125 +2024-07-28 05:55:04,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=118453.33333333333, ans=0.125 +2024-07-28 05:55:06,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=118466.66666666667, ans=0.0 +2024-07-28 05:55:14,450 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.643e+01 5.657e+01 6.222e+01 6.949e+01 1.042e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-28 05:55:16,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=12.26 vs. limit=15.0 +2024-07-28 05:55:26,699 INFO [train.py:1114] (3/4) Epoch 9, batch 7100, loss[loss=0.227, simple_loss=0.3194, pruned_loss=0.06724, over 4811.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2906, pruned_loss=0.06019, over 936276.12 frames. ], batch size: 15, lr: 8.26e-03, grad_scale: 32.0 +2024-07-28 05:55:31,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=118506.66666666667, ans=0.1 +2024-07-28 05:55:31,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=118506.66666666667, ans=0.0 +2024-07-28 05:55:40,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=118533.33333333333, ans=0.125 +2024-07-28 05:55:50,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=118546.66666666667, ans=0.125 +2024-07-28 05:55:51,329 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.75 vs. limit=15.0 +2024-07-28 05:55:55,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=118560.0, ans=0.025 +2024-07-28 05:55:55,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=118560.0, ans=0.1 +2024-07-28 05:55:57,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=118560.0, ans=0.05 +2024-07-28 05:55:59,507 INFO [train.py:1114] (3/4) Epoch 9, batch 7150, loss[loss=0.2085, simple_loss=0.296, pruned_loss=0.06053, over 4392.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2898, pruned_loss=0.05984, over 937107.10 frames. ], batch size: 21, lr: 8.26e-03, grad_scale: 32.0 +2024-07-28 05:56:04,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=118573.33333333333, ans=0.0 +2024-07-28 05:56:06,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=118586.66666666667, ans=0.125 +2024-07-28 05:56:10,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=118586.66666666667, ans=0.0 +2024-07-28 05:56:14,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118600.0, ans=0.1 +2024-07-28 05:56:15,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=118600.0, ans=0.125 +2024-07-28 05:56:19,955 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.871e+01 5.611e+01 6.289e+01 7.655e+01 1.013e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 05:56:21,485 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:56:22,560 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.17 vs. limit=22.5 +2024-07-28 05:56:26,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=118626.66666666667, ans=0.0 +2024-07-28 05:56:32,653 INFO [train.py:1114] (3/4) Epoch 9, batch 7200, loss[loss=0.2173, simple_loss=0.3015, pruned_loss=0.06651, over 4795.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2899, pruned_loss=0.05951, over 937896.59 frames. ], batch size: 15, lr: 8.26e-03, grad_scale: 32.0 +2024-07-28 05:56:35,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=118640.0, ans=0.125 +2024-07-28 05:56:37,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=118640.0, ans=0.0 +2024-07-28 05:56:38,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=118653.33333333333, ans=0.0 +2024-07-28 05:56:39,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=118653.33333333333, ans=0.125 +2024-07-28 05:56:42,531 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.31 vs. limit=10.0 +2024-07-28 05:56:45,978 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.51 vs. limit=6.0 +2024-07-28 05:56:52,542 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.64 vs. limit=12.0 +2024-07-28 05:57:00,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=118693.33333333333, ans=0.0 +2024-07-28 05:57:04,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=118693.33333333333, ans=0.1 +2024-07-28 05:57:06,750 INFO [train.py:1114] (3/4) Epoch 9, batch 7250, loss[loss=0.1547, simple_loss=0.2402, pruned_loss=0.03464, over 4854.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2891, pruned_loss=0.05909, over 939737.08 frames. ], batch size: 12, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:57:15,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.95 vs. limit=15.0 +2024-07-28 05:57:16,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=118720.0, ans=0.1 +2024-07-28 05:57:18,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=118720.0, ans=0.025 +2024-07-28 05:57:25,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=118746.66666666667, ans=0.125 +2024-07-28 05:57:26,912 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.398e+01 5.726e+01 6.433e+01 7.236e+01 9.812e+01, threshold=1.287e+02, percent-clipped=0.0 +2024-07-28 05:57:27,276 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.68 vs. limit=15.0 +2024-07-28 05:57:33,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=118760.0, ans=0.0 +2024-07-28 05:57:37,276 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 05:57:39,733 INFO [train.py:1114] (3/4) Epoch 9, batch 7300, loss[loss=0.1789, simple_loss=0.2665, pruned_loss=0.04567, over 4850.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2886, pruned_loss=0.05924, over 939884.10 frames. ], batch size: 12, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:57:51,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=118786.66666666667, ans=0.2 +2024-07-28 05:57:52,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=118786.66666666667, ans=0.0 +2024-07-28 05:57:56,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=118800.0, ans=0.125 +2024-07-28 05:58:14,821 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.46 vs. limit=15.0 +2024-07-28 05:58:16,991 INFO [train.py:1114] (3/4) Epoch 9, batch 7350, loss[loss=0.2292, simple_loss=0.297, pruned_loss=0.08076, over 4643.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2893, pruned_loss=0.05917, over 939045.07 frames. ], batch size: 12, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:58:19,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=118840.0, ans=0.0 +2024-07-28 05:58:24,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=118853.33333333333, ans=0.125 +2024-07-28 05:58:24,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=118853.33333333333, ans=0.0 +2024-07-28 05:58:26,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=118853.33333333333, ans=0.0 +2024-07-28 05:58:30,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=118866.66666666667, ans=0.0 +2024-07-28 05:58:36,182 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.91 vs. limit=12.0 +2024-07-28 05:58:37,726 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.559e+01 5.606e+01 6.103e+01 6.789e+01 9.069e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 05:58:43,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=118893.33333333333, ans=0.1 +2024-07-28 05:58:44,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=118893.33333333333, ans=0.2 +2024-07-28 05:58:50,088 INFO [train.py:1114] (3/4) Epoch 9, batch 7400, loss[loss=0.187, simple_loss=0.2835, pruned_loss=0.04525, over 4695.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.289, pruned_loss=0.05891, over 940345.39 frames. ], batch size: 13, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:58:50,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=118906.66666666667, ans=0.125 +2024-07-28 05:58:58,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=118920.0, ans=0.2 +2024-07-28 05:58:59,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=118920.0, ans=0.0 +2024-07-28 05:59:01,493 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.10 vs. limit=22.5 +2024-07-28 05:59:01,567 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.43 vs. limit=6.0 +2024-07-28 05:59:07,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=118933.33333333333, ans=0.0 +2024-07-28 05:59:14,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=118946.66666666667, ans=0.035 +2024-07-28 05:59:17,047 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.91 vs. limit=15.0 +2024-07-28 05:59:22,844 INFO [train.py:1114] (3/4) Epoch 9, batch 7450, loss[loss=0.1461, simple_loss=0.2313, pruned_loss=0.03044, over 4615.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2884, pruned_loss=0.05909, over 937685.82 frames. ], batch size: 11, lr: 8.25e-03, grad_scale: 32.0 +2024-07-28 05:59:27,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=118973.33333333333, ans=0.125 +2024-07-28 05:59:36,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=118986.66666666667, ans=0.125 +2024-07-28 05:59:42,386 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.12 vs. limit=15.0 +2024-07-28 05:59:46,251 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.870e+01 5.815e+01 6.542e+01 7.746e+01 1.541e+02, threshold=1.308e+02, percent-clipped=5.0 +2024-07-28 05:59:52,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=119026.66666666667, ans=0.125 +2024-07-28 05:59:59,610 INFO [train.py:1114] (3/4) Epoch 9, batch 7500, loss[loss=0.2748, simple_loss=0.3368, pruned_loss=0.1064, over 3397.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2885, pruned_loss=0.05935, over 936737.56 frames. ], batch size: 35, lr: 8.24e-03, grad_scale: 32.0 +2024-07-28 06:00:04,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=119040.0, ans=0.125 +2024-07-28 06:00:09,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=119053.33333333333, ans=0.125 +2024-07-28 06:00:19,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=119066.66666666667, ans=0.0 +2024-07-28 06:00:22,064 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:00:26,817 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.62 vs. limit=15.0 +2024-07-28 06:00:35,133 INFO [train.py:1114] (3/4) Epoch 9, batch 7550, loss[loss=0.2256, simple_loss=0.3015, pruned_loss=0.07488, over 4622.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.29, pruned_loss=0.05972, over 936470.67 frames. ], batch size: 17, lr: 8.24e-03, grad_scale: 32.0 +2024-07-28 06:00:35,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=119106.66666666667, ans=0.0 +2024-07-28 06:00:46,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=119120.0, ans=0.0 +2024-07-28 06:00:55,462 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.364e+01 5.884e+01 6.441e+01 7.385e+01 1.107e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 06:01:06,988 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:01:07,530 INFO [train.py:1114] (3/4) Epoch 9, batch 7600, loss[loss=0.1846, simple_loss=0.2754, pruned_loss=0.04692, over 4805.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2897, pruned_loss=0.06005, over 938489.63 frames. ], batch size: 14, lr: 8.24e-03, grad_scale: 32.0 +2024-07-28 06:01:10,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=119173.33333333333, ans=0.0 +2024-07-28 06:01:18,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=119186.66666666667, ans=0.125 +2024-07-28 06:01:27,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=119213.33333333333, ans=0.0 +2024-07-28 06:01:35,134 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.64 vs. limit=15.0 +2024-07-28 06:01:41,966 INFO [train.py:1114] (3/4) Epoch 9, batch 7650, loss[loss=0.185, simple_loss=0.2614, pruned_loss=0.05424, over 4932.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2902, pruned_loss=0.0605, over 937255.34 frames. ], batch size: 12, lr: 8.24e-03, grad_scale: 32.0 +2024-07-28 06:01:42,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=119240.0, ans=0.1 +2024-07-28 06:01:55,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=119266.66666666667, ans=0.125 +2024-07-28 06:01:58,244 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.25 vs. limit=22.5 +2024-07-28 06:02:02,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119266.66666666667, ans=0.1 +2024-07-28 06:02:04,510 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.140e+01 5.757e+01 6.386e+01 7.107e+01 1.097e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 06:02:09,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=119280.0, ans=0.125 +2024-07-28 06:02:14,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=119293.33333333333, ans=0.0 +2024-07-28 06:02:18,632 INFO [train.py:1114] (3/4) Epoch 9, batch 7700, loss[loss=0.2162, simple_loss=0.3056, pruned_loss=0.0634, over 4686.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2908, pruned_loss=0.06044, over 934507.94 frames. ], batch size: 13, lr: 8.23e-03, grad_scale: 16.0 +2024-07-28 06:02:19,671 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.26 vs. limit=15.0 +2024-07-28 06:02:25,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=119320.0, ans=0.125 +2024-07-28 06:02:41,070 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.13 vs. limit=15.0 +2024-07-28 06:02:41,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=119333.33333333333, ans=0.035 +2024-07-28 06:03:18,523 INFO [train.py:1114] (3/4) Epoch 9, batch 7750, loss[loss=0.2034, simple_loss=0.2932, pruned_loss=0.05677, over 4934.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2912, pruned_loss=0.06059, over 935623.81 frames. ], batch size: 14, lr: 8.23e-03, grad_scale: 16.0 +2024-07-28 06:03:48,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=119400.0, ans=0.125 +2024-07-28 06:03:51,265 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.407e+01 5.505e+01 6.110e+01 6.941e+01 1.112e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 06:03:59,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=119426.66666666667, ans=0.125 +2024-07-28 06:04:00,107 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.82 vs. limit=15.0 +2024-07-28 06:04:02,581 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.04 vs. limit=15.0 +2024-07-28 06:04:03,702 INFO [train.py:1114] (3/4) Epoch 9, batch 7800, loss[loss=0.2225, simple_loss=0.3153, pruned_loss=0.06487, over 4652.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2914, pruned_loss=0.06033, over 937503.43 frames. ], batch size: 14, lr: 8.23e-03, grad_scale: 8.0 +2024-07-28 06:04:32,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=119453.33333333333, ans=0.0 +2024-07-28 06:04:33,373 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.44 vs. limit=6.0 +2024-07-28 06:04:58,013 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.59 vs. limit=6.0 +2024-07-28 06:04:59,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=119466.66666666667, ans=0.125 +2024-07-28 06:05:03,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=119480.0, ans=0.025 +2024-07-28 06:05:26,846 INFO [train.py:1114] (3/4) Epoch 9, batch 7850, loss[loss=0.1961, simple_loss=0.2656, pruned_loss=0.06336, over 4488.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2913, pruned_loss=0.0605, over 935964.02 frames. ], batch size: 10, lr: 8.23e-03, grad_scale: 8.0 +2024-07-28 06:05:56,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=119520.0, ans=0.0 +2024-07-28 06:06:16,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=119533.33333333333, ans=0.125 +2024-07-28 06:06:33,868 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.602e+01 5.758e+01 6.163e+01 6.826e+01 1.029e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 06:06:38,577 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.47 vs. limit=10.0 +2024-07-28 06:06:39,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119546.66666666667, ans=0.1 +2024-07-28 06:07:13,094 INFO [train.py:1114] (3/4) Epoch 9, batch 7900, loss[loss=0.2233, simple_loss=0.3214, pruned_loss=0.06256, over 4881.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2924, pruned_loss=0.06089, over 933114.74 frames. ], batch size: 14, lr: 8.22e-03, grad_scale: 8.0 +2024-07-28 06:07:20,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=119586.66666666667, ans=0.1 +2024-07-28 06:08:05,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=119626.66666666667, ans=0.125 +2024-07-28 06:08:11,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=119626.66666666667, ans=0.125 +2024-07-28 06:08:16,887 INFO [train.py:1114] (3/4) Epoch 9, batch 7950, loss[loss=0.2482, simple_loss=0.3242, pruned_loss=0.08606, over 3212.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2921, pruned_loss=0.06054, over 934871.19 frames. ], batch size: 35, lr: 8.22e-03, grad_scale: 8.0 +2024-07-28 06:08:45,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=119666.66666666667, ans=0.1 +2024-07-28 06:08:54,861 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.650e+01 5.779e+01 6.459e+01 7.298e+01 1.141e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 06:09:23,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=119706.66666666667, ans=0.0 +2024-07-28 06:09:24,213 INFO [train.py:1114] (3/4) Epoch 9, batch 8000, loss[loss=0.1826, simple_loss=0.2578, pruned_loss=0.05371, over 4609.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2906, pruned_loss=0.06017, over 934093.02 frames. ], batch size: 11, lr: 8.22e-03, grad_scale: 16.0 +2024-07-28 06:09:26,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=119706.66666666667, ans=0.0 +2024-07-28 06:09:30,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=119706.66666666667, ans=0.0 +2024-07-28 06:09:43,281 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.37 vs. limit=22.5 +2024-07-28 06:10:17,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=119760.0, ans=0.125 +2024-07-28 06:10:28,782 INFO [train.py:1114] (3/4) Epoch 9, batch 8050, loss[loss=0.2152, simple_loss=0.3186, pruned_loss=0.05592, over 4801.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2916, pruned_loss=0.06027, over 934041.92 frames. ], batch size: 14, lr: 8.22e-03, grad_scale: 16.0 +2024-07-28 06:10:32,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=119773.33333333333, ans=0.04949747468305833 +2024-07-28 06:10:33,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=119773.33333333333, ans=0.025 +2024-07-28 06:10:49,917 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 6.138e+01 7.014e+01 8.220e+01 1.277e+02, threshold=1.403e+02, percent-clipped=0.0 +2024-07-28 06:10:58,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=119826.66666666667, ans=0.0 +2024-07-28 06:11:03,033 INFO [train.py:1114] (3/4) Epoch 9, batch 8100, loss[loss=0.2103, simple_loss=0.2937, pruned_loss=0.06346, over 4812.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2916, pruned_loss=0.06024, over 934104.35 frames. ], batch size: 15, lr: 8.22e-03, grad_scale: 16.0 +2024-07-28 06:11:20,247 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.12 vs. limit=15.0 +2024-07-28 06:11:33,503 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:11:34,313 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.40 vs. limit=22.5 +2024-07-28 06:11:36,514 INFO [train.py:1114] (3/4) Epoch 9, batch 8150, loss[loss=0.2012, simple_loss=0.2825, pruned_loss=0.05997, over 4794.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2901, pruned_loss=0.05945, over 937496.16 frames. ], batch size: 15, lr: 8.21e-03, grad_scale: 16.0 +2024-07-28 06:11:39,207 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:11:43,835 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.27 vs. limit=15.0 +2024-07-28 06:11:45,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=119920.0, ans=0.125 +2024-07-28 06:11:57,270 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.727e+01 5.730e+01 6.295e+01 7.311e+01 1.625e+02, threshold=1.259e+02, percent-clipped=1.0 +2024-07-28 06:12:08,487 INFO [train.py:1114] (3/4) Epoch 9, batch 8200, loss[loss=0.2152, simple_loss=0.3027, pruned_loss=0.06378, over 4807.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2898, pruned_loss=0.05932, over 938648.69 frames. ], batch size: 15, lr: 8.21e-03, grad_scale: 16.0 +2024-07-28 06:12:09,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=119973.33333333333, ans=0.0 +2024-07-28 06:12:13,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=119973.33333333333, ans=0.2 +2024-07-28 06:12:13,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=119973.33333333333, ans=0.125 +2024-07-28 06:12:23,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120000.0, ans=0.1 +2024-07-28 06:12:27,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=120000.0, ans=0.02 +2024-07-28 06:12:28,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=120000.0, ans=0.125 +2024-07-28 06:12:38,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=120013.33333333333, ans=0.125 +2024-07-28 06:12:40,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=120026.66666666667, ans=0.125 +2024-07-28 06:12:46,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=120026.66666666667, ans=0.0 +2024-07-28 06:12:47,771 INFO [train.py:1114] (3/4) Epoch 9, batch 8250, loss[loss=0.2014, simple_loss=0.2749, pruned_loss=0.06393, over 4888.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2902, pruned_loss=0.05986, over 938978.83 frames. ], batch size: 13, lr: 8.21e-03, grad_scale: 16.0 +2024-07-28 06:12:51,865 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.30 vs. limit=15.0 +2024-07-28 06:12:53,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer_ff3.min_abs, batch_count=120040.0, ans=0.2 +2024-07-28 06:13:09,946 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.55 vs. limit=15.0 +2024-07-28 06:13:12,021 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.840e+01 6.472e+01 7.401e+01 1.114e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 06:13:18,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=120093.33333333333, ans=0.125 +2024-07-28 06:13:31,396 INFO [train.py:1114] (3/4) Epoch 9, batch 8300, loss[loss=0.1874, simple_loss=0.2835, pruned_loss=0.04564, over 4904.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2916, pruned_loss=0.06026, over 939002.06 frames. ], batch size: 15, lr: 8.21e-03, grad_scale: 16.0 +2024-07-28 06:13:39,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=120120.0, ans=0.1 +2024-07-28 06:13:46,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120133.33333333333, ans=0.1 +2024-07-28 06:13:56,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=120146.66666666667, ans=0.125 +2024-07-28 06:14:06,569 INFO [train.py:1114] (3/4) Epoch 9, batch 8350, loss[loss=0.2055, simple_loss=0.2911, pruned_loss=0.05994, over 4808.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2909, pruned_loss=0.06033, over 941682.09 frames. ], batch size: 15, lr: 8.20e-03, grad_scale: 16.0 +2024-07-28 06:14:07,625 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.38 vs. limit=6.0 +2024-07-28 06:14:07,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=120173.33333333333, ans=0.125 +2024-07-28 06:14:11,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=120173.33333333333, ans=0.09899494936611666 +2024-07-28 06:14:16,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=120186.66666666667, ans=0.0 +2024-07-28 06:14:17,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=120186.66666666667, ans=0.125 +2024-07-28 06:14:30,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=120213.33333333333, ans=0.125 +2024-07-28 06:14:31,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=120213.33333333333, ans=0.0 +2024-07-28 06:14:32,032 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.453e+01 5.725e+01 6.523e+01 7.692e+01 9.570e+01, threshold=1.305e+02, percent-clipped=0.0 +2024-07-28 06:14:40,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=120226.66666666667, ans=0.125 +2024-07-28 06:14:42,955 INFO [train.py:1114] (3/4) Epoch 9, batch 8400, loss[loss=0.2306, simple_loss=0.3144, pruned_loss=0.07346, over 4776.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2917, pruned_loss=0.06095, over 940178.27 frames. ], batch size: 12, lr: 8.20e-03, grad_scale: 32.0 +2024-07-28 06:14:50,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=120253.33333333333, ans=0.0 +2024-07-28 06:14:50,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=120253.33333333333, ans=0.09899494936611666 +2024-07-28 06:14:53,603 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.23 vs. limit=22.5 +2024-07-28 06:14:56,486 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.29 vs. limit=15.0 +2024-07-28 06:14:58,406 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.32 vs. limit=15.0 +2024-07-28 06:15:01,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=120266.66666666667, ans=0.95 +2024-07-28 06:15:06,798 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.87 vs. limit=10.0 +2024-07-28 06:15:14,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=120306.66666666667, ans=0.025 +2024-07-28 06:15:15,457 INFO [train.py:1114] (3/4) Epoch 9, batch 8450, loss[loss=0.2011, simple_loss=0.2916, pruned_loss=0.0553, over 4798.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2921, pruned_loss=0.06094, over 938940.05 frames. ], batch size: 15, lr: 8.20e-03, grad_scale: 32.0 +2024-07-28 06:15:25,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=120320.0, ans=0.125 +2024-07-28 06:15:27,761 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.00 vs. limit=15.0 +2024-07-28 06:15:37,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=120346.66666666667, ans=0.125 +2024-07-28 06:15:38,204 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+01 5.798e+01 6.423e+01 7.347e+01 1.111e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 06:15:44,969 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.29 vs. limit=15.0 +2024-07-28 06:15:49,314 INFO [train.py:1114] (3/4) Epoch 9, batch 8500, loss[loss=0.2033, simple_loss=0.2845, pruned_loss=0.06101, over 4629.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2915, pruned_loss=0.06083, over 938690.13 frames. ], batch size: 11, lr: 8.20e-03, grad_scale: 32.0 +2024-07-28 06:15:56,134 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.27 vs. limit=6.0 +2024-07-28 06:16:12,295 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.98 vs. limit=5.0 +2024-07-28 06:16:21,539 INFO [train.py:1114] (3/4) Epoch 9, batch 8550, loss[loss=0.166, simple_loss=0.2546, pruned_loss=0.03871, over 4802.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2913, pruned_loss=0.06086, over 939617.71 frames. ], batch size: 11, lr: 8.20e-03, grad_scale: 16.0 +2024-07-28 06:16:24,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=120440.0, ans=0.025 +2024-07-28 06:16:33,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=120466.66666666667, ans=0.125 +2024-07-28 06:16:38,102 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.57 vs. limit=10.0 +2024-07-28 06:16:43,550 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.883e+01 5.723e+01 6.764e+01 8.281e+01 1.171e+02, threshold=1.353e+02, percent-clipped=0.0 +2024-07-28 06:16:48,650 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.58 vs. limit=22.5 +2024-07-28 06:16:52,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=120493.33333333333, ans=0.0 +2024-07-28 06:16:54,058 INFO [train.py:1114] (3/4) Epoch 9, batch 8600, loss[loss=0.2345, simple_loss=0.3286, pruned_loss=0.0702, over 4806.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2913, pruned_loss=0.06105, over 939082.29 frames. ], batch size: 15, lr: 8.19e-03, grad_scale: 16.0 +2024-07-28 06:17:06,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=120533.33333333333, ans=0.2 +2024-07-28 06:17:24,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=120560.0, ans=0.0 +2024-07-28 06:17:26,334 INFO [train.py:1114] (3/4) Epoch 9, batch 8650, loss[loss=0.2136, simple_loss=0.3096, pruned_loss=0.05881, over 4893.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2907, pruned_loss=0.06111, over 940231.18 frames. ], batch size: 15, lr: 8.19e-03, grad_scale: 16.0 +2024-07-28 06:17:30,320 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:17:44,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=120613.33333333333, ans=0.125 +2024-07-28 06:17:48,025 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.611e+01 5.743e+01 6.194e+01 7.423e+01 1.120e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 06:17:49,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=120613.33333333333, ans=0.0 +2024-07-28 06:17:57,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=120626.66666666667, ans=0.125 +2024-07-28 06:17:59,183 INFO [train.py:1114] (3/4) Epoch 9, batch 8700, loss[loss=0.1837, simple_loss=0.2791, pruned_loss=0.04413, over 4752.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2919, pruned_loss=0.06176, over 938123.72 frames. ], batch size: 13, lr: 8.19e-03, grad_scale: 16.0 +2024-07-28 06:18:00,546 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.02 vs. limit=22.5 +2024-07-28 06:18:07,648 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.25 vs. limit=12.0 +2024-07-28 06:18:15,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=120666.66666666667, ans=0.0 +2024-07-28 06:18:22,990 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.85 vs. limit=15.0 +2024-07-28 06:18:26,200 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.14 vs. limit=12.0 +2024-07-28 06:18:33,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=120693.33333333333, ans=0.5 +2024-07-28 06:18:33,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=120693.33333333333, ans=0.1 +2024-07-28 06:18:34,227 INFO [train.py:1114] (3/4) Epoch 9, batch 8750, loss[loss=0.2266, simple_loss=0.3101, pruned_loss=0.07157, over 4693.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2916, pruned_loss=0.0615, over 936517.64 frames. ], batch size: 15, lr: 8.19e-03, grad_scale: 16.0 +2024-07-28 06:18:37,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=120706.66666666667, ans=0.2 +2024-07-28 06:18:49,531 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.84 vs. limit=15.0 +2024-07-28 06:18:50,103 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.90 vs. limit=22.5 +2024-07-28 06:18:56,340 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 5.820e+01 6.301e+01 7.114e+01 1.037e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 06:18:56,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=120746.66666666667, ans=0.125 +2024-07-28 06:19:06,416 INFO [train.py:1114] (3/4) Epoch 9, batch 8800, loss[loss=0.1874, simple_loss=0.2837, pruned_loss=0.04553, over 4934.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2907, pruned_loss=0.06042, over 937472.49 frames. ], batch size: 14, lr: 8.18e-03, grad_scale: 32.0 +2024-07-28 06:19:12,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=120786.66666666667, ans=0.125 +2024-07-28 06:19:22,157 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.06 vs. limit=15.0 +2024-07-28 06:19:28,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=120813.33333333333, ans=0.1 +2024-07-28 06:19:39,906 INFO [train.py:1114] (3/4) Epoch 9, batch 8850, loss[loss=0.2641, simple_loss=0.3255, pruned_loss=0.1013, over 4361.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2902, pruned_loss=0.06069, over 931843.91 frames. ], batch size: 21, lr: 8.18e-03, grad_scale: 32.0 +2024-07-28 06:19:50,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=120853.33333333333, ans=0.125 +2024-07-28 06:20:00,200 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.87 vs. limit=22.5 +2024-07-28 06:20:02,409 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.399e+01 5.678e+01 6.367e+01 7.332e+01 1.676e+02, threshold=1.273e+02, percent-clipped=2.0 +2024-07-28 06:20:02,499 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:20:07,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=120893.33333333333, ans=0.025 +2024-07-28 06:20:13,209 INFO [train.py:1114] (3/4) Epoch 9, batch 8900, loss[loss=0.1793, simple_loss=0.2692, pruned_loss=0.04466, over 4942.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2908, pruned_loss=0.06115, over 930039.71 frames. ], batch size: 12, lr: 8.18e-03, grad_scale: 32.0 +2024-07-28 06:20:28,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=120933.33333333333, ans=10.0 +2024-07-28 06:20:29,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=120933.33333333333, ans=0.0 +2024-07-28 06:20:31,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=120933.33333333333, ans=0.125 +2024-07-28 06:20:37,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.05 vs. limit=15.0 +2024-07-28 06:20:48,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=120960.0, ans=0.0 +2024-07-28 06:20:58,024 INFO [train.py:1114] (3/4) Epoch 9, batch 8950, loss[loss=0.195, simple_loss=0.2897, pruned_loss=0.05017, over 4474.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2912, pruned_loss=0.06128, over 930824.22 frames. ], batch size: 21, lr: 8.18e-03, grad_scale: 32.0 +2024-07-28 06:21:06,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=120986.66666666667, ans=0.0 +2024-07-28 06:21:07,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=120986.66666666667, ans=0.125 +2024-07-28 06:21:08,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=120986.66666666667, ans=0.125 +2024-07-28 06:21:19,652 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.879e+01 5.816e+01 6.215e+01 7.468e+01 1.036e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 06:21:20,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=121013.33333333333, ans=0.015 +2024-07-28 06:21:29,710 INFO [train.py:1114] (3/4) Epoch 9, batch 9000, loss[loss=0.2168, simple_loss=0.2832, pruned_loss=0.07521, over 4636.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2905, pruned_loss=0.06087, over 933849.81 frames. ], batch size: 12, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:21:29,710 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 06:21:37,340 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.5946, 3.8423, 3.6213, 4.3125], device='cuda:3') +2024-07-28 06:21:43,389 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.1396, 2.6454, 2.7920, 3.0289], device='cuda:3') +2024-07-28 06:21:45,146 INFO [train.py:1146] (3/4) Epoch 9, validation: loss=0.1749, simple_loss=0.2792, pruned_loss=0.03531, over 944034.00 frames. +2024-07-28 06:21:45,147 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 06:22:26,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=121093.33333333333, ans=0.125 +2024-07-28 06:22:35,217 INFO [train.py:1114] (3/4) Epoch 9, batch 9050, loss[loss=0.1692, simple_loss=0.2546, pruned_loss=0.04184, over 4529.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2889, pruned_loss=0.05984, over 934088.86 frames. ], batch size: 10, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:22:39,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=121106.66666666667, ans=0.125 +2024-07-28 06:23:15,946 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+01 5.675e+01 6.570e+01 7.797e+01 1.121e+02, threshold=1.314e+02, percent-clipped=0.0 +2024-07-28 06:23:32,981 INFO [train.py:1114] (3/4) Epoch 9, batch 9100, loss[loss=0.2089, simple_loss=0.2954, pruned_loss=0.06119, over 4945.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2884, pruned_loss=0.05918, over 936664.77 frames. ], batch size: 14, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:23:47,858 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.79 vs. limit=22.5 +2024-07-28 06:23:53,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=121200.0, ans=0.125 +2024-07-28 06:24:05,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=121226.66666666667, ans=0.0 +2024-07-28 06:24:12,179 INFO [train.py:1114] (3/4) Epoch 9, batch 9150, loss[loss=0.2167, simple_loss=0.3101, pruned_loss=0.06161, over 4808.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2904, pruned_loss=0.06014, over 935681.42 frames. ], batch size: 14, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:24:32,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=121253.33333333333, ans=0.125 +2024-07-28 06:24:33,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=121253.33333333333, ans=0.0 +2024-07-28 06:24:36,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=121253.33333333333, ans=0.125 +2024-07-28 06:24:47,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=121266.66666666667, ans=0.1 +2024-07-28 06:24:49,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=121266.66666666667, ans=0.07 +2024-07-28 06:24:59,225 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.588e+01 5.661e+01 6.215e+01 7.054e+01 1.564e+02, threshold=1.243e+02, percent-clipped=1.0 +2024-07-28 06:25:07,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=121293.33333333333, ans=0.0 +2024-07-28 06:25:10,707 INFO [train.py:1114] (3/4) Epoch 9, batch 9200, loss[loss=0.1757, simple_loss=0.2687, pruned_loss=0.04139, over 4850.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2894, pruned_loss=0.0594, over 937506.88 frames. ], batch size: 12, lr: 8.17e-03, grad_scale: 32.0 +2024-07-28 06:25:29,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=121320.0, ans=0.05 +2024-07-28 06:25:43,113 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.31 vs. limit=10.0 +2024-07-28 06:25:47,371 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:25:48,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=121360.0, ans=0.125 +2024-07-28 06:25:52,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=121360.0, ans=0.0 +2024-07-28 06:25:54,950 INFO [train.py:1114] (3/4) Epoch 9, batch 9250, loss[loss=0.1995, simple_loss=0.2751, pruned_loss=0.06193, over 4632.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2899, pruned_loss=0.05966, over 938284.84 frames. ], batch size: 13, lr: 8.16e-03, grad_scale: 32.0 +2024-07-28 06:25:55,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=121373.33333333333, ans=0.125 +2024-07-28 06:25:58,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=121373.33333333333, ans=0.125 +2024-07-28 06:26:00,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=121386.66666666667, ans=0.125 +2024-07-28 06:26:03,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121386.66666666667, ans=0.1 +2024-07-28 06:26:08,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=121400.0, ans=0.2 +2024-07-28 06:26:16,172 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.708e+01 5.765e+01 6.275e+01 7.273e+01 1.016e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 06:26:19,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=121413.33333333333, ans=0.04949747468305833 +2024-07-28 06:26:26,995 INFO [train.py:1114] (3/4) Epoch 9, batch 9300, loss[loss=0.2202, simple_loss=0.3035, pruned_loss=0.06846, over 4774.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2898, pruned_loss=0.05974, over 938029.89 frames. ], batch size: 12, lr: 8.16e-03, grad_scale: 32.0 +2024-07-28 06:26:30,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=121440.0, ans=0.125 +2024-07-28 06:26:42,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=121453.33333333333, ans=0.95 +2024-07-28 06:26:52,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=121466.66666666667, ans=0.125 +2024-07-28 06:26:54,846 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.88 vs. limit=15.0 +2024-07-28 06:26:55,532 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.83 vs. limit=10.0 +2024-07-28 06:26:57,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=121466.66666666667, ans=0.1 +2024-07-28 06:27:00,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121480.0, ans=0.1 +2024-07-28 06:27:30,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=121493.33333333333, ans=0.05 +2024-07-28 06:27:30,551 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.89 vs. limit=22.5 +2024-07-28 06:27:31,950 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.07 vs. limit=10.0 +2024-07-28 06:27:34,636 INFO [train.py:1114] (3/4) Epoch 9, batch 9350, loss[loss=0.1711, simple_loss=0.2588, pruned_loss=0.04176, over 4827.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2909, pruned_loss=0.06011, over 934987.98 frames. ], batch size: 11, lr: 8.16e-03, grad_scale: 32.0 +2024-07-28 06:27:43,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=121506.66666666667, ans=0.125 +2024-07-28 06:27:56,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=121533.33333333333, ans=0.0 +2024-07-28 06:27:58,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121546.66666666667, ans=0.1 +2024-07-28 06:28:01,021 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.924e+01 5.677e+01 6.203e+01 7.268e+01 1.059e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 06:28:01,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=121546.66666666667, ans=0.0 +2024-07-28 06:28:17,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=121560.0, ans=0.125 +2024-07-28 06:28:17,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=121560.0, ans=0.125 +2024-07-28 06:28:21,360 INFO [train.py:1114] (3/4) Epoch 9, batch 9400, loss[loss=0.2016, simple_loss=0.2964, pruned_loss=0.05343, over 4693.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2912, pruned_loss=0.06059, over 932856.40 frames. ], batch size: 13, lr: 8.16e-03, grad_scale: 32.0 +2024-07-28 06:28:45,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=121613.33333333333, ans=0.0 +2024-07-28 06:28:52,887 INFO [train.py:1114] (3/4) Epoch 9, batch 9450, loss[loss=0.1657, simple_loss=0.2601, pruned_loss=0.03571, over 4799.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2906, pruned_loss=0.06029, over 932570.10 frames. ], batch size: 11, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:29:05,426 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.58 vs. limit=6.0 +2024-07-28 06:29:06,094 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.27 vs. limit=15.0 +2024-07-28 06:29:13,875 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.626e+01 6.016e+01 7.236e+01 1.280e+02, threshold=1.203e+02, percent-clipped=1.0 +2024-07-28 06:29:17,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=121693.33333333333, ans=0.125 +2024-07-28 06:29:24,031 INFO [train.py:1114] (3/4) Epoch 9, batch 9500, loss[loss=0.1883, simple_loss=0.2749, pruned_loss=0.05085, over 4703.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2889, pruned_loss=0.05905, over 934693.08 frames. ], batch size: 12, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:29:24,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=121706.66666666667, ans=0.0 +2024-07-28 06:29:27,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=121706.66666666667, ans=0.0 +2024-07-28 06:29:27,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=121706.66666666667, ans=0.0 +2024-07-28 06:29:34,171 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.20 vs. limit=15.0 +2024-07-28 06:29:41,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=121733.33333333333, ans=0.125 +2024-07-28 06:29:52,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=121760.0, ans=0.1 +2024-07-28 06:29:55,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=121760.0, ans=0.1 +2024-07-28 06:30:02,244 INFO [train.py:1114] (3/4) Epoch 9, batch 9550, loss[loss=0.2231, simple_loss=0.2973, pruned_loss=0.07445, over 4779.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2902, pruned_loss=0.06054, over 931839.07 frames. ], batch size: 12, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:30:10,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.04 vs. limit=15.0 +2024-07-28 06:30:15,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=121800.0, ans=0.125 +2024-07-28 06:30:18,120 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=7.759e-03 +2024-07-28 06:30:19,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121800.0, ans=0.1 +2024-07-28 06:30:20,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=121813.33333333333, ans=0.0 +2024-07-28 06:30:23,657 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.774e+01 6.473e+01 7.553e+01 1.235e+02, threshold=1.295e+02, percent-clipped=1.0 +2024-07-28 06:30:29,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=121826.66666666667, ans=0.025 +2024-07-28 06:30:35,252 INFO [train.py:1114] (3/4) Epoch 9, batch 9600, loss[loss=0.2781, simple_loss=0.3331, pruned_loss=0.1116, over 3600.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2905, pruned_loss=0.06027, over 931105.26 frames. ], batch size: 35, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:30:39,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=121840.0, ans=0.2 +2024-07-28 06:30:47,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=121853.33333333333, ans=0.0 +2024-07-28 06:30:52,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=121866.66666666667, ans=0.0 +2024-07-28 06:30:59,464 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:31:04,748 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.69 vs. limit=22.5 +2024-07-28 06:31:15,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=121893.33333333333, ans=0.125 +2024-07-28 06:31:18,863 INFO [train.py:1114] (3/4) Epoch 9, batch 9650, loss[loss=0.2317, simple_loss=0.3256, pruned_loss=0.06887, over 4847.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2904, pruned_loss=0.06014, over 926722.62 frames. ], batch size: 16, lr: 8.15e-03, grad_scale: 32.0 +2024-07-28 06:31:20,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=121906.66666666667, ans=0.125 +2024-07-28 06:31:20,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=121906.66666666667, ans=0.0 +2024-07-28 06:31:30,946 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.86 vs. limit=15.0 +2024-07-28 06:31:36,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=121920.0, ans=0.0 +2024-07-28 06:31:44,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=121933.33333333333, ans=0.125 +2024-07-28 06:31:53,654 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.690e+01 5.799e+01 6.411e+01 7.286e+01 1.019e+02, threshold=1.282e+02, percent-clipped=0.0 +2024-07-28 06:31:54,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121946.66666666667, ans=0.1 +2024-07-28 06:31:58,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=121960.0, ans=0.1 +2024-07-28 06:32:11,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=121960.0, ans=0.0 +2024-07-28 06:32:15,511 INFO [train.py:1114] (3/4) Epoch 9, batch 9700, loss[loss=0.2777, simple_loss=0.349, pruned_loss=0.1032, over 4201.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2903, pruned_loss=0.06028, over 924725.08 frames. ], batch size: 25, lr: 8.14e-03, grad_scale: 32.0 +2024-07-28 06:32:20,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=121973.33333333333, ans=0.1 +2024-07-28 06:32:23,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=121986.66666666667, ans=0.125 +2024-07-28 06:32:25,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=121986.66666666667, ans=0.0 +2024-07-28 06:32:44,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=122026.66666666667, ans=0.2 +2024-07-28 06:32:48,544 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.15 vs. limit=6.0 +2024-07-28 06:32:49,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=122026.66666666667, ans=0.0 +2024-07-28 06:32:49,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=122026.66666666667, ans=0.125 +2024-07-28 06:32:51,287 INFO [train.py:1114] (3/4) Epoch 9, batch 9750, loss[loss=0.2149, simple_loss=0.3023, pruned_loss=0.06375, over 4676.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2899, pruned_loss=0.06034, over 925496.73 frames. ], batch size: 15, lr: 8.14e-03, grad_scale: 32.0 +2024-07-28 06:32:51,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=122040.0, ans=0.0 +2024-07-28 06:32:55,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122040.0, ans=0.1 +2024-07-28 06:32:57,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=122040.0, ans=0.125 +2024-07-28 06:33:13,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=122066.66666666667, ans=0.0 +2024-07-28 06:33:18,532 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.645e+01 5.595e+01 6.071e+01 7.420e+01 1.003e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 06:33:19,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=122080.0, ans=0.2 +2024-07-28 06:33:19,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=122080.0, ans=0.125 +2024-07-28 06:33:24,216 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:33:28,408 INFO [train.py:1114] (3/4) Epoch 9, batch 9800, loss[loss=0.1936, simple_loss=0.2797, pruned_loss=0.0538, over 4704.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2889, pruned_loss=0.05974, over 924691.84 frames. ], batch size: 12, lr: 8.14e-03, grad_scale: 32.0 +2024-07-28 06:33:34,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=122106.66666666667, ans=0.09899494936611666 +2024-07-28 06:33:46,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=122133.33333333333, ans=0.125 +2024-07-28 06:34:02,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=122160.0, ans=0.125 +2024-07-28 06:34:09,105 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:34:10,940 INFO [train.py:1114] (3/4) Epoch 9, batch 9850, loss[loss=0.2193, simple_loss=0.3088, pruned_loss=0.06488, over 4903.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2894, pruned_loss=0.05997, over 926850.99 frames. ], batch size: 15, lr: 8.14e-03, grad_scale: 32.0 +2024-07-28 06:34:18,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=122186.66666666667, ans=0.05 +2024-07-28 06:34:24,130 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.28 vs. limit=12.0 +2024-07-28 06:34:26,138 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.01 vs. limit=10.0 +2024-07-28 06:34:30,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122213.33333333333, ans=0.1 +2024-07-28 06:34:31,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=122213.33333333333, ans=0.0 +2024-07-28 06:34:32,419 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.677e+01 5.834e+01 6.555e+01 7.421e+01 1.036e+02, threshold=1.311e+02, percent-clipped=0.0 +2024-07-28 06:34:34,006 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.54 vs. limit=15.0 +2024-07-28 06:34:42,488 INFO [train.py:1114] (3/4) Epoch 9, batch 9900, loss[loss=0.2352, simple_loss=0.3133, pruned_loss=0.07849, over 4845.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2911, pruned_loss=0.06095, over 926303.79 frames. ], batch size: 16, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:34:53,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=122253.33333333333, ans=0.2 +2024-07-28 06:35:08,095 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.92 vs. limit=15.0 +2024-07-28 06:35:11,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=122293.33333333333, ans=0.0 +2024-07-28 06:35:26,860 INFO [train.py:1114] (3/4) Epoch 9, batch 9950, loss[loss=0.1933, simple_loss=0.2646, pruned_loss=0.061, over 4476.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2915, pruned_loss=0.06154, over 928913.59 frames. ], batch size: 10, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:35:29,203 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.99 vs. limit=15.0 +2024-07-28 06:35:29,872 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.75 vs. limit=15.0 +2024-07-28 06:35:31,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=122306.66666666667, ans=0.0 +2024-07-28 06:35:43,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=122333.33333333333, ans=0.125 +2024-07-28 06:35:46,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=122333.33333333333, ans=0.05 +2024-07-28 06:35:47,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=122346.66666666667, ans=0.0 +2024-07-28 06:35:47,703 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.57 vs. limit=15.0 +2024-07-28 06:35:49,830 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.887e+01 6.567e+01 7.886e+01 1.035e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-28 06:35:56,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=122360.0, ans=0.125 +2024-07-28 06:36:00,349 INFO [train.py:1114] (3/4) Epoch 9, batch 10000, loss[loss=0.2201, simple_loss=0.3151, pruned_loss=0.06259, over 4602.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2946, pruned_loss=0.06278, over 926158.49 frames. ], batch size: 16, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:36:12,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=122386.66666666667, ans=0.125 +2024-07-28 06:36:19,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=122400.0, ans=0.1 +2024-07-28 06:36:21,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=122400.0, ans=0.125 +2024-07-28 06:36:24,622 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.07 vs. limit=22.5 +2024-07-28 06:36:27,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=122413.33333333333, ans=0.0 +2024-07-28 06:36:42,689 INFO [train.py:1114] (3/4) Epoch 9, batch 10050, loss[loss=0.2621, simple_loss=0.3356, pruned_loss=0.09424, over 3296.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2977, pruned_loss=0.06497, over 914820.55 frames. ], batch size: 35, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:36:47,356 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.21 vs. limit=15.0 +2024-07-28 06:36:48,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=122440.0, ans=0.125 +2024-07-28 06:36:56,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=122466.66666666667, ans=0.2 +2024-07-28 06:37:06,684 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 6.481e+01 7.122e+01 8.299e+01 1.409e+02, threshold=1.424e+02, percent-clipped=1.0 +2024-07-28 06:37:10,160 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:37:10,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122493.33333333333, ans=0.1 +2024-07-28 06:37:12,388 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.90 vs. limit=10.0 +2024-07-28 06:37:14,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=122493.33333333333, ans=0.2 +2024-07-28 06:37:18,577 INFO [train.py:1114] (3/4) Epoch 9, batch 10100, loss[loss=0.2767, simple_loss=0.3385, pruned_loss=0.1074, over 3319.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3037, pruned_loss=0.07091, over 861699.59 frames. ], batch size: 35, lr: 8.13e-03, grad_scale: 32.0 +2024-07-28 06:37:31,870 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=11.61 vs. limit=12.0 +2024-07-28 06:37:42,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=122546.66666666667, ans=10.0 +2024-07-28 06:37:43,498 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.66 vs. limit=22.5 +2024-07-28 06:37:52,176 INFO [train.py:1114] (3/4) Epoch 9, batch 10150, loss[loss=0.2629, simple_loss=0.3292, pruned_loss=0.09826, over 3358.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.3086, pruned_loss=0.07595, over 820521.96 frames. ], batch size: 36, lr: 8.12e-03, grad_scale: 32.0 +2024-07-28 06:37:54,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=122573.33333333333, ans=0.125 +2024-07-28 06:38:02,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.95 vs. limit=15.0 +2024-07-28 06:38:07,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=122586.66666666667, ans=0.05 +2024-07-28 06:38:11,350 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 06:38:17,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=122613.33333333333, ans=0.0 +2024-07-28 06:38:18,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122613.33333333333, ans=0.1 +2024-07-28 06:38:19,557 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.853e+01 6.506e+01 6.978e+01 7.406e+01 9.051e+01, threshold=1.396e+02, percent-clipped=0.0 +2024-07-28 06:38:22,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122626.66666666667, ans=0.1 +2024-07-28 06:38:28,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.33 vs. limit=22.5 +2024-07-28 06:39:22,373 INFO [train.py:1114] (3/4) Epoch 9, batch 10200, loss[loss=0.2331, simple_loss=0.3003, pruned_loss=0.0829, over 3614.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3109, pruned_loss=0.07879, over 790033.96 frames. ], batch size: 35, lr: 8.12e-03, grad_scale: 32.0 +2024-07-28 06:39:31,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=122653.33333333333, ans=0.0 +2024-07-28 06:39:32,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=122653.33333333333, ans=0.125 +2024-07-28 06:41:02,955 INFO [train.py:1114] (3/4) Epoch 10, batch 0, loss[loss=0.1756, simple_loss=0.2603, pruned_loss=0.04545, over 4837.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2603, pruned_loss=0.04545, over 4837.00 frames. ], batch size: 12, lr: 7.72e-03, grad_scale: 32.0 +2024-07-28 06:41:02,956 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 06:41:14,704 INFO [train.py:1146] (3/4) Epoch 10, validation: loss=0.1773, simple_loss=0.2829, pruned_loss=0.03584, over 944034.00 frames. +2024-07-28 06:41:14,705 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 06:41:21,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=122682.66666666667, ans=0.0 +2024-07-28 06:41:31,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=122696.0, ans=0.125 +2024-07-28 06:41:32,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=122696.0, ans=0.2 +2024-07-28 06:41:39,088 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.63 vs. limit=6.0 +2024-07-28 06:41:52,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=122722.66666666667, ans=0.125 +2024-07-28 06:41:58,943 INFO [train.py:1114] (3/4) Epoch 10, batch 50, loss[loss=0.1978, simple_loss=0.2679, pruned_loss=0.06383, over 4611.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2979, pruned_loss=0.0633, over 207074.58 frames. ], batch size: 11, lr: 7.72e-03, grad_scale: 32.0 +2024-07-28 06:42:03,251 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.57 vs. limit=22.5 +2024-07-28 06:42:04,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=122749.33333333333, ans=0.0 +2024-07-28 06:42:06,808 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.770e+01 5.950e+01 6.646e+01 7.258e+01 1.106e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-28 06:42:10,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.73 vs. limit=15.0 +2024-07-28 06:42:10,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=122749.33333333333, ans=0.04949747468305833 +2024-07-28 06:42:20,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122749.33333333333, ans=0.1 +2024-07-28 06:42:21,339 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.29 vs. limit=22.5 +2024-07-28 06:42:27,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=122762.66666666667, ans=0.2 +2024-07-28 06:42:34,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=122776.0, ans=0.125 +2024-07-28 06:42:46,651 INFO [train.py:1114] (3/4) Epoch 10, batch 100, loss[loss=0.1502, simple_loss=0.2358, pruned_loss=0.03231, over 4645.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2947, pruned_loss=0.06171, over 366169.71 frames. ], batch size: 12, lr: 7.72e-03, grad_scale: 32.0 +2024-07-28 06:43:27,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=122842.66666666667, ans=0.1 +2024-07-28 06:43:34,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=122842.66666666667, ans=0.0 +2024-07-28 06:43:42,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=122869.33333333333, ans=0.1 +2024-07-28 06:43:42,691 INFO [train.py:1114] (3/4) Epoch 10, batch 150, loss[loss=0.1578, simple_loss=0.2451, pruned_loss=0.03526, over 4623.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2896, pruned_loss=0.05943, over 494607.89 frames. ], batch size: 11, lr: 7.72e-03, grad_scale: 32.0 +2024-07-28 06:43:49,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=122882.66666666667, ans=0.0 +2024-07-28 06:43:51,038 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.790e+01 6.360e+01 7.461e+01 1.069e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 06:43:54,252 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.09 vs. limit=15.0 +2024-07-28 06:44:09,968 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.98 vs. limit=22.5 +2024-07-28 06:44:13,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=122909.33333333333, ans=0.0 +2024-07-28 06:44:16,730 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.18 vs. limit=22.5 +2024-07-28 06:44:35,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=122922.66666666667, ans=0.2 +2024-07-28 06:44:37,853 INFO [train.py:1114] (3/4) Epoch 10, batch 200, loss[loss=0.2156, simple_loss=0.3039, pruned_loss=0.06366, over 4615.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.289, pruned_loss=0.05889, over 594304.06 frames. ], batch size: 22, lr: 7.71e-03, grad_scale: 32.0 +2024-07-28 06:44:51,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=122936.0, ans=0.1 +2024-07-28 06:44:57,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=122949.33333333333, ans=0.0 +2024-07-28 06:44:57,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=122949.33333333333, ans=0.0 +2024-07-28 06:45:08,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=122962.66666666667, ans=0.025 +2024-07-28 06:45:28,936 INFO [train.py:1114] (3/4) Epoch 10, batch 250, loss[loss=0.2051, simple_loss=0.2986, pruned_loss=0.05577, over 4612.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2878, pruned_loss=0.05809, over 670855.31 frames. ], batch size: 16, lr: 7.71e-03, grad_scale: 32.0 +2024-07-28 06:45:32,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=123002.66666666667, ans=0.125 +2024-07-28 06:45:34,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=123002.66666666667, ans=0.125 +2024-07-28 06:45:35,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=123002.66666666667, ans=0.125 +2024-07-28 06:45:37,248 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.24 vs. limit=15.0 +2024-07-28 06:45:38,192 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+01 5.690e+01 6.559e+01 7.773e+01 1.314e+02, threshold=1.312e+02, percent-clipped=1.0 +2024-07-28 06:45:42,381 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.95 vs. limit=15.0 +2024-07-28 06:45:42,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=123016.0, ans=0.0 +2024-07-28 06:45:45,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=123029.33333333333, ans=0.125 +2024-07-28 06:45:48,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=123029.33333333333, ans=0.125 +2024-07-28 06:45:57,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=123056.0, ans=0.015 +2024-07-28 06:46:08,656 INFO [train.py:1114] (3/4) Epoch 10, batch 300, loss[loss=0.1826, simple_loss=0.2805, pruned_loss=0.04233, over 4805.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2873, pruned_loss=0.05792, over 730205.41 frames. ], batch size: 15, lr: 7.71e-03, grad_scale: 32.0 +2024-07-28 06:46:17,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=123082.66666666667, ans=0.2 +2024-07-28 06:46:18,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=123082.66666666667, ans=0.125 +2024-07-28 06:46:22,263 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.50 vs. limit=15.0 +2024-07-28 06:46:23,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=123096.0, ans=0.1 +2024-07-28 06:46:30,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=123096.0, ans=0.125 +2024-07-28 06:46:45,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=123122.66666666667, ans=0.0 +2024-07-28 06:46:48,126 INFO [train.py:1114] (3/4) Epoch 10, batch 350, loss[loss=0.1835, simple_loss=0.2698, pruned_loss=0.04867, over 4945.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2877, pruned_loss=0.05761, over 776281.07 frames. ], batch size: 12, lr: 7.71e-03, grad_scale: 64.0 +2024-07-28 06:46:51,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=123136.0, ans=0.125 +2024-07-28 06:46:53,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=123136.0, ans=0.2 +2024-07-28 06:46:59,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=123149.33333333333, ans=0.125 +2024-07-28 06:47:00,720 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.715e+01 5.536e+01 6.033e+01 6.929e+01 1.043e+02, threshold=1.207e+02, percent-clipped=0.0 +2024-07-28 06:47:48,191 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.41 vs. limit=22.5 +2024-07-28 06:50:15,407 INFO [train.py:1114] (3/4) Epoch 10, batch 400, loss[loss=0.1896, simple_loss=0.2831, pruned_loss=0.04808, over 4697.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2874, pruned_loss=0.05734, over 813632.97 frames. ], batch size: 13, lr: 7.71e-03, grad_scale: 64.0 +2024-07-28 06:50:45,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=123216.0, ans=0.1 +2024-07-28 06:50:49,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=123216.0, ans=0.1 +2024-07-28 06:51:19,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=123242.66666666667, ans=0.125 +2024-07-28 06:52:07,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=123256.0, ans=0.125 +2024-07-28 06:52:21,001 INFO [train.py:1114] (3/4) Epoch 10, batch 450, loss[loss=0.1817, simple_loss=0.2796, pruned_loss=0.0419, over 4633.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2886, pruned_loss=0.05819, over 839122.29 frames. ], batch size: 13, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:52:46,131 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.482e+01 5.561e+01 6.292e+01 7.345e+01 1.157e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 06:53:30,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=123296.0, ans=0.09899494936611666 +2024-07-28 06:53:43,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=123296.0, ans=0.125 +2024-07-28 06:53:47,453 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.67 vs. limit=6.0 +2024-07-28 06:54:43,240 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.42 vs. limit=15.0 +2024-07-28 06:54:48,132 INFO [train.py:1114] (3/4) Epoch 10, batch 500, loss[loss=0.1925, simple_loss=0.2875, pruned_loss=0.04873, over 4682.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2873, pruned_loss=0.05763, over 861549.42 frames. ], batch size: 15, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:54:58,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=123336.0, ans=0.0 +2024-07-28 06:55:05,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=123349.33333333333, ans=0.025 +2024-07-28 06:55:09,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=123349.33333333333, ans=0.2 +2024-07-28 06:55:29,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=123362.66666666667, ans=0.125 +2024-07-28 06:55:51,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=123376.0, ans=0.1 +2024-07-28 06:55:55,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=123376.0, ans=0.2 +2024-07-28 06:56:06,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=123389.33333333333, ans=0.125 +2024-07-28 06:56:07,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=123389.33333333333, ans=0.5 +2024-07-28 06:56:13,170 INFO [train.py:1114] (3/4) Epoch 10, batch 550, loss[loss=0.1928, simple_loss=0.2867, pruned_loss=0.04943, over 4637.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2877, pruned_loss=0.05802, over 877899.33 frames. ], batch size: 17, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:56:14,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=123402.66666666667, ans=0.1 +2024-07-28 06:56:18,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=123402.66666666667, ans=0.0 +2024-07-28 06:56:21,996 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.607e+01 5.657e+01 6.359e+01 7.249e+01 1.002e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 06:56:31,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=123429.33333333333, ans=0.125 +2024-07-28 06:56:56,560 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.87 vs. limit=15.0 +2024-07-28 06:57:03,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=123469.33333333333, ans=0.125 +2024-07-28 06:57:03,609 INFO [train.py:1114] (3/4) Epoch 10, batch 600, loss[loss=0.2009, simple_loss=0.2907, pruned_loss=0.05553, over 4613.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2882, pruned_loss=0.05812, over 892429.33 frames. ], batch size: 16, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:57:05,170 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=19.22 vs. limit=15.0 +2024-07-28 06:57:21,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=123496.0, ans=0.025 +2024-07-28 06:57:30,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=123509.33333333333, ans=0.125 +2024-07-28 06:57:34,065 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.19 vs. limit=15.0 +2024-07-28 06:57:38,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=123522.66666666667, ans=0.125 +2024-07-28 06:57:41,757 INFO [train.py:1114] (3/4) Epoch 10, batch 650, loss[loss=0.2149, simple_loss=0.3002, pruned_loss=0.06481, over 4749.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2871, pruned_loss=0.05752, over 904434.58 frames. ], batch size: 13, lr: 7.70e-03, grad_scale: 64.0 +2024-07-28 06:57:49,742 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.335e+01 5.819e+01 6.416e+01 7.118e+01 9.444e+01, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 06:58:32,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=123589.33333333333, ans=0.0 +2024-07-28 06:58:33,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=123589.33333333333, ans=0.04949747468305833 +2024-07-28 06:58:37,441 INFO [train.py:1114] (3/4) Epoch 10, batch 700, loss[loss=0.216, simple_loss=0.2912, pruned_loss=0.07042, over 4635.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2883, pruned_loss=0.05826, over 911860.82 frames. ], batch size: 12, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 06:58:45,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=123602.66666666667, ans=0.2 +2024-07-28 06:58:52,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=123616.0, ans=0.125 +2024-07-28 06:59:00,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=123629.33333333333, ans=0.125 +2024-07-28 06:59:20,796 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.62 vs. limit=6.0 +2024-07-28 06:59:37,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=123642.66666666667, ans=0.0 +2024-07-28 06:59:59,347 INFO [train.py:1114] (3/4) Epoch 10, batch 750, loss[loss=0.202, simple_loss=0.2823, pruned_loss=0.06089, over 4688.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2875, pruned_loss=0.05792, over 918659.98 frames. ], batch size: 13, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 07:00:00,933 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:00:04,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=123669.33333333333, ans=0.2 +2024-07-28 07:00:05,864 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.68 vs. limit=6.0 +2024-07-28 07:00:07,404 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.006e+01 5.598e+01 6.088e+01 6.743e+01 1.006e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 07:00:07,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=123682.66666666667, ans=0.0 +2024-07-28 07:00:19,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=123696.0, ans=0.025 +2024-07-28 07:00:50,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=123709.33333333333, ans=0.125 +2024-07-28 07:00:51,024 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.37 vs. limit=15.0 +2024-07-28 07:01:01,969 INFO [train.py:1114] (3/4) Epoch 10, batch 800, loss[loss=0.1918, simple_loss=0.2678, pruned_loss=0.05786, over 4849.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2872, pruned_loss=0.05838, over 923309.37 frames. ], batch size: 12, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 07:01:09,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=123749.33333333333, ans=0.125 +2024-07-28 07:01:19,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=123762.66666666667, ans=0.04949747468305833 +2024-07-28 07:01:20,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=123762.66666666667, ans=0.125 +2024-07-28 07:01:26,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=123776.0, ans=0.125 +2024-07-28 07:01:28,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=123776.0, ans=0.0 +2024-07-28 07:01:38,557 INFO [train.py:1114] (3/4) Epoch 10, batch 850, loss[loss=0.2191, simple_loss=0.3051, pruned_loss=0.06654, over 4662.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2865, pruned_loss=0.05781, over 927498.84 frames. ], batch size: 14, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 07:01:39,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=123802.66666666667, ans=0.125 +2024-07-28 07:01:48,402 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.695e+01 6.333e+01 6.870e+01 1.740e+02, threshold=1.267e+02, percent-clipped=1.0 +2024-07-28 07:01:52,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=123816.0, ans=0.0 +2024-07-28 07:01:53,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=123829.33333333333, ans=0.125 +2024-07-28 07:01:55,650 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.26 vs. limit=15.0 +2024-07-28 07:02:01,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.11 vs. limit=10.0 +2024-07-28 07:02:22,329 INFO [train.py:1114] (3/4) Epoch 10, batch 900, loss[loss=0.1687, simple_loss=0.2707, pruned_loss=0.03331, over 4838.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.287, pruned_loss=0.05818, over 928339.41 frames. ], batch size: 12, lr: 7.69e-03, grad_scale: 64.0 +2024-07-28 07:02:27,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=123869.33333333333, ans=0.2 +2024-07-28 07:02:28,049 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.08 vs. limit=15.0 +2024-07-28 07:02:30,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=123882.66666666667, ans=0.02 +2024-07-28 07:02:41,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=123896.0, ans=0.125 +2024-07-28 07:02:45,915 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=1.799e-01 +2024-07-28 07:02:51,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=123922.66666666667, ans=0.1 +2024-07-28 07:02:56,036 INFO [train.py:1114] (3/4) Epoch 10, batch 950, loss[loss=0.2131, simple_loss=0.2961, pruned_loss=0.06502, over 4767.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2873, pruned_loss=0.05828, over 930589.47 frames. ], batch size: 12, lr: 7.68e-03, grad_scale: 64.0 +2024-07-28 07:02:59,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=123936.0, ans=0.125 +2024-07-28 07:03:04,118 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.892e+01 5.603e+01 6.108e+01 6.683e+01 9.503e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 07:03:26,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=123989.33333333333, ans=0.1 +2024-07-28 07:03:29,723 INFO [train.py:1114] (3/4) Epoch 10, batch 1000, loss[loss=0.1941, simple_loss=0.2856, pruned_loss=0.05134, over 4964.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2881, pruned_loss=0.05872, over 929948.92 frames. ], batch size: 13, lr: 7.68e-03, grad_scale: 64.0 +2024-07-28 07:03:32,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=124002.66666666667, ans=0.125 +2024-07-28 07:03:55,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=124016.0, ans=0.125 +2024-07-28 07:03:58,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=124029.33333333333, ans=0.025 +2024-07-28 07:03:58,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=124029.33333333333, ans=0.2 +2024-07-28 07:04:00,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=124029.33333333333, ans=0.125 +2024-07-28 07:04:03,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.64 vs. limit=15.0 +2024-07-28 07:04:20,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=124056.0, ans=0.125 +2024-07-28 07:04:22,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=124056.0, ans=0.0 +2024-07-28 07:04:25,367 INFO [train.py:1114] (3/4) Epoch 10, batch 1050, loss[loss=0.1818, simple_loss=0.2719, pruned_loss=0.04583, over 4870.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2866, pruned_loss=0.058, over 932163.53 frames. ], batch size: 14, lr: 7.68e-03, grad_scale: 64.0 +2024-07-28 07:04:50,084 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.89 vs. limit=15.0 +2024-07-28 07:04:55,555 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.602e+01 6.129e+01 7.252e+01 1.285e+02, threshold=1.226e+02, percent-clipped=1.0 +2024-07-28 07:05:04,556 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.98 vs. limit=22.5 +2024-07-28 07:05:07,463 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:05:25,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=124122.66666666667, ans=0.07 +2024-07-28 07:05:33,707 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.14 vs. limit=10.0 +2024-07-28 07:05:37,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=124122.66666666667, ans=0.0 +2024-07-28 07:05:38,636 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.93 vs. limit=22.5 +2024-07-28 07:05:39,258 INFO [train.py:1114] (3/4) Epoch 10, batch 1100, loss[loss=0.1943, simple_loss=0.2773, pruned_loss=0.05563, over 4897.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2864, pruned_loss=0.05796, over 934669.27 frames. ], batch size: 13, lr: 7.68e-03, grad_scale: 32.0 +2024-07-28 07:05:39,713 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.77 vs. limit=12.0 +2024-07-28 07:05:44,872 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.05 vs. limit=15.0 +2024-07-28 07:05:45,521 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.05 vs. limit=15.0 +2024-07-28 07:05:48,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124149.33333333333, ans=0.1 +2024-07-28 07:06:00,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=124162.66666666667, ans=0.2 +2024-07-28 07:06:18,270 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:06:19,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=124189.33333333333, ans=0.0 +2024-07-28 07:06:23,651 INFO [train.py:1114] (3/4) Epoch 10, batch 1150, loss[loss=0.1861, simple_loss=0.2759, pruned_loss=0.04811, over 4902.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2857, pruned_loss=0.05735, over 934700.35 frames. ], batch size: 13, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:07:00,534 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.669e+01 6.088e+01 6.784e+01 1.007e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 07:07:00,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=124216.0, ans=0.0 +2024-07-28 07:07:48,875 INFO [train.py:1114] (3/4) Epoch 10, batch 1200, loss[loss=0.1808, simple_loss=0.2832, pruned_loss=0.03918, over 4873.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2876, pruned_loss=0.0581, over 933627.22 frames. ], batch size: 14, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:07:48,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124269.33333333333, ans=0.1 +2024-07-28 07:07:56,580 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.08 vs. limit=15.0 +2024-07-28 07:07:57,126 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.49 vs. limit=15.0 +2024-07-28 07:08:10,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=124296.0, ans=0.125 +2024-07-28 07:08:27,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=124309.33333333333, ans=0.125 +2024-07-28 07:08:41,846 INFO [train.py:1114] (3/4) Epoch 10, batch 1250, loss[loss=0.2008, simple_loss=0.2894, pruned_loss=0.05613, over 4790.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2883, pruned_loss=0.05792, over 937613.74 frames. ], batch size: 15, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:08:50,579 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.912e+01 6.433e+01 7.478e+01 1.098e+02, threshold=1.287e+02, percent-clipped=0.0 +2024-07-28 07:08:51,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=124349.33333333333, ans=0.0 +2024-07-28 07:08:54,185 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.40 vs. limit=15.0 +2024-07-28 07:09:05,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=124376.0, ans=0.125 +2024-07-28 07:09:20,435 INFO [train.py:1114] (3/4) Epoch 10, batch 1300, loss[loss=0.2106, simple_loss=0.3023, pruned_loss=0.05942, over 4719.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2874, pruned_loss=0.05766, over 939274.36 frames. ], batch size: 19, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:09:22,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=124402.66666666667, ans=0.125 +2024-07-28 07:09:24,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=124402.66666666667, ans=0.125 +2024-07-28 07:09:28,237 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.10 vs. limit=15.0 +2024-07-28 07:09:36,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=124416.0, ans=0.125 +2024-07-28 07:09:51,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=124429.33333333333, ans=0.0 +2024-07-28 07:09:52,857 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.67 vs. limit=22.5 +2024-07-28 07:09:54,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=124429.33333333333, ans=0.0 +2024-07-28 07:09:54,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=124429.33333333333, ans=0.125 +2024-07-28 07:09:54,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=124429.33333333333, ans=0.0 +2024-07-28 07:10:02,220 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.96 vs. limit=15.0 +2024-07-28 07:10:10,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=124456.0, ans=0.025 +2024-07-28 07:10:15,029 INFO [train.py:1114] (3/4) Epoch 10, batch 1350, loss[loss=0.1636, simple_loss=0.2589, pruned_loss=0.03416, over 4757.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2868, pruned_loss=0.05706, over 941331.23 frames. ], batch size: 13, lr: 7.67e-03, grad_scale: 32.0 +2024-07-28 07:10:23,758 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.509e+01 5.516e+01 6.216e+01 7.014e+01 1.025e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 07:10:23,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=124482.66666666667, ans=0.0 +2024-07-28 07:10:29,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=124496.0, ans=0.0 +2024-07-28 07:10:39,420 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.26 vs. limit=15.0 +2024-07-28 07:10:43,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=124522.66666666667, ans=0.09899494936611666 +2024-07-28 07:10:47,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=124522.66666666667, ans=0.0 +2024-07-28 07:10:48,573 INFO [train.py:1114] (3/4) Epoch 10, batch 1400, loss[loss=0.1706, simple_loss=0.2434, pruned_loss=0.04885, over 4705.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2871, pruned_loss=0.0573, over 943060.42 frames. ], batch size: 11, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:10:51,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=124536.0, ans=0.025 +2024-07-28 07:10:57,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=124549.33333333333, ans=0.125 +2024-07-28 07:11:02,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=124562.66666666667, ans=0.1 +2024-07-28 07:11:13,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=124576.0, ans=0.1 +2024-07-28 07:11:17,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=124589.33333333333, ans=0.0 +2024-07-28 07:11:20,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=124589.33333333333, ans=0.125 +2024-07-28 07:11:21,991 INFO [train.py:1114] (3/4) Epoch 10, batch 1450, loss[loss=0.21, simple_loss=0.2942, pruned_loss=0.06293, over 4691.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2872, pruned_loss=0.05722, over 942764.67 frames. ], batch size: 15, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:11:23,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.00 vs. limit=15.0 +2024-07-28 07:11:24,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=124602.66666666667, ans=0.125 +2024-07-28 07:11:29,124 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.35 vs. limit=5.0 +2024-07-28 07:11:30,548 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.685e+01 6.213e+01 7.325e+01 1.109e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 07:11:47,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=124642.66666666667, ans=0.125 +2024-07-28 07:11:47,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=124656.0, ans=0.125 +2024-07-28 07:11:50,452 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.30 vs. limit=15.0 +2024-07-28 07:11:59,750 INFO [train.py:1114] (3/4) Epoch 10, batch 1500, loss[loss=0.1652, simple_loss=0.283, pruned_loss=0.02373, over 4810.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2881, pruned_loss=0.05749, over 942342.77 frames. ], batch size: 14, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:12:03,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=124669.33333333333, ans=0.125 +2024-07-28 07:12:06,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=124682.66666666667, ans=0.09899494936611666 +2024-07-28 07:12:09,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124682.66666666667, ans=0.1 +2024-07-28 07:12:15,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=124696.0, ans=0.2 +2024-07-28 07:12:15,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=124696.0, ans=0.125 +2024-07-28 07:12:18,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124696.0, ans=0.1 +2024-07-28 07:12:20,895 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.94 vs. limit=15.0 +2024-07-28 07:12:27,337 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.39 vs. limit=6.0 +2024-07-28 07:12:29,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=124722.66666666667, ans=0.025 +2024-07-28 07:12:29,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=124722.66666666667, ans=0.125 +2024-07-28 07:12:32,123 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.59 vs. limit=15.0 +2024-07-28 07:12:33,111 INFO [train.py:1114] (3/4) Epoch 10, batch 1550, loss[loss=0.2064, simple_loss=0.3013, pruned_loss=0.05574, over 4899.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2878, pruned_loss=0.0577, over 938729.69 frames. ], batch size: 15, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:12:33,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=124736.0, ans=0.125 +2024-07-28 07:12:41,761 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.366e+01 5.520e+01 6.164e+01 6.899e+01 9.824e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 07:12:43,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=124749.33333333333, ans=0.0 +2024-07-28 07:12:49,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=124762.66666666667, ans=0.125 +2024-07-28 07:12:49,455 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.28 vs. limit=15.0 +2024-07-28 07:13:04,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=124789.33333333333, ans=0.0 +2024-07-28 07:13:05,536 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.02 vs. limit=15.0 +2024-07-28 07:13:06,577 INFO [train.py:1114] (3/4) Epoch 10, batch 1600, loss[loss=0.1728, simple_loss=0.2625, pruned_loss=0.04158, over 4870.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2883, pruned_loss=0.05811, over 937247.05 frames. ], batch size: 14, lr: 7.66e-03, grad_scale: 32.0 +2024-07-28 07:13:09,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=124802.66666666667, ans=0.1 +2024-07-28 07:13:17,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=124816.0, ans=0.0 +2024-07-28 07:13:23,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=124829.33333333333, ans=0.125 +2024-07-28 07:13:31,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=124842.66666666667, ans=0.125 +2024-07-28 07:13:33,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=124856.0, ans=0.125 +2024-07-28 07:13:34,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=124856.0, ans=0.2 +2024-07-28 07:13:40,193 INFO [train.py:1114] (3/4) Epoch 10, batch 1650, loss[loss=0.2245, simple_loss=0.3112, pruned_loss=0.06884, over 4670.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2881, pruned_loss=0.05775, over 937086.78 frames. ], batch size: 14, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:13:42,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=124869.33333333333, ans=0.125 +2024-07-28 07:13:48,849 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.680e+01 5.790e+01 6.415e+01 7.555e+01 1.180e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 07:13:49,250 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.12 vs. limit=10.0 +2024-07-28 07:13:58,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.04 vs. limit=22.5 +2024-07-28 07:14:00,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=124909.33333333333, ans=0.0 +2024-07-28 07:14:10,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=124922.66666666667, ans=0.125 +2024-07-28 07:14:13,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=124922.66666666667, ans=0.1 +2024-07-28 07:14:14,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=124922.66666666667, ans=0.1 +2024-07-28 07:14:16,910 INFO [train.py:1114] (3/4) Epoch 10, batch 1700, loss[loss=0.1791, simple_loss=0.243, pruned_loss=0.05762, over 4717.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2871, pruned_loss=0.05747, over 939091.75 frames. ], batch size: 11, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:14:25,731 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.96 vs. limit=10.0 +2024-07-28 07:14:31,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=124949.33333333333, ans=0.125 +2024-07-28 07:14:31,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=124949.33333333333, ans=0.125 +2024-07-28 07:14:33,806 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.93 vs. limit=10.0 +2024-07-28 07:14:35,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=124962.66666666667, ans=0.125 +2024-07-28 07:14:41,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=124976.0, ans=0.0 +2024-07-28 07:14:47,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=124989.33333333333, ans=0.1 +2024-07-28 07:14:53,745 INFO [train.py:1114] (3/4) Epoch 10, batch 1750, loss[loss=0.1767, simple_loss=0.2528, pruned_loss=0.05034, over 4809.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2875, pruned_loss=0.05794, over 940328.92 frames. ], batch size: 11, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:14:57,914 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.36 vs. limit=22.5 +2024-07-28 07:15:04,251 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.898e+01 5.615e+01 6.197e+01 6.752e+01 9.322e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 07:15:08,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=125029.33333333333, ans=0.125 +2024-07-28 07:15:08,703 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.16 vs. limit=22.5 +2024-07-28 07:15:14,567 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.39 vs. limit=15.0 +2024-07-28 07:15:14,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=125042.66666666667, ans=0.125 +2024-07-28 07:15:19,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=125042.66666666667, ans=0.125 +2024-07-28 07:15:31,048 INFO [train.py:1114] (3/4) Epoch 10, batch 1800, loss[loss=0.1677, simple_loss=0.2648, pruned_loss=0.03528, over 4638.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2873, pruned_loss=0.05757, over 940923.93 frames. ], batch size: 13, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:16:00,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=125122.66666666667, ans=0.125 +2024-07-28 07:16:05,134 INFO [train.py:1114] (3/4) Epoch 10, batch 1850, loss[loss=0.1904, simple_loss=0.2903, pruned_loss=0.0453, over 4801.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2861, pruned_loss=0.05713, over 941217.55 frames. ], batch size: 14, lr: 7.65e-03, grad_scale: 32.0 +2024-07-28 07:16:05,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=125136.0, ans=0.125 +2024-07-28 07:16:14,684 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.312e+01 5.824e+01 6.671e+01 8.109e+01 1.121e+02, threshold=1.334e+02, percent-clipped=0.0 +2024-07-28 07:16:22,658 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.05 vs. limit=15.0 +2024-07-28 07:16:36,100 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:16:37,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=125176.0, ans=0.2 +2024-07-28 07:16:37,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=125176.0, ans=0.125 +2024-07-28 07:16:39,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=125189.33333333333, ans=0.0 +2024-07-28 07:16:45,549 INFO [train.py:1114] (3/4) Epoch 10, batch 1900, loss[loss=0.2148, simple_loss=0.3025, pruned_loss=0.06356, over 4663.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2874, pruned_loss=0.05782, over 942313.71 frames. ], batch size: 14, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:17:09,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=125229.33333333333, ans=0.125 +2024-07-28 07:17:17,935 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.48 vs. limit=12.0 +2024-07-28 07:17:28,518 INFO [train.py:1114] (3/4) Epoch 10, batch 1950, loss[loss=0.1556, simple_loss=0.2392, pruned_loss=0.03597, over 4896.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2872, pruned_loss=0.05748, over 944306.73 frames. ], batch size: 13, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:17:30,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=125269.33333333333, ans=0.0 +2024-07-28 07:17:37,347 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.707e+01 5.662e+01 6.185e+01 7.189e+01 1.102e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 07:17:42,471 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.51 vs. limit=22.5 +2024-07-28 07:17:46,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=125296.0, ans=0.07 +2024-07-28 07:17:54,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=125309.33333333333, ans=0.0 +2024-07-28 07:17:57,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=125322.66666666667, ans=0.0 +2024-07-28 07:18:01,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=125322.66666666667, ans=0.1 +2024-07-28 07:18:04,794 INFO [train.py:1114] (3/4) Epoch 10, batch 2000, loss[loss=0.1771, simple_loss=0.2541, pruned_loss=0.05004, over 4793.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2881, pruned_loss=0.05808, over 941860.12 frames. ], batch size: 11, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:18:10,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=125336.0, ans=0.07 +2024-07-28 07:18:12,591 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=16.08 vs. limit=15.0 +2024-07-28 07:18:16,074 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=8.00 vs. limit=15.0 +2024-07-28 07:18:24,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=125376.0, ans=0.05 +2024-07-28 07:18:30,055 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.82 vs. limit=6.0 +2024-07-28 07:18:38,321 INFO [train.py:1114] (3/4) Epoch 10, batch 2050, loss[loss=0.1787, simple_loss=0.2483, pruned_loss=0.05458, over 4627.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2876, pruned_loss=0.05856, over 939961.29 frames. ], batch size: 11, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:18:47,042 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.499e+01 5.685e+01 6.326e+01 7.286e+01 1.205e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-28 07:18:53,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=125429.33333333333, ans=0.95 +2024-07-28 07:19:02,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=125442.66666666667, ans=0.0 +2024-07-28 07:19:03,362 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:19:04,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=125456.0, ans=0.0 +2024-07-28 07:19:10,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=125456.0, ans=0.1 +2024-07-28 07:19:13,196 INFO [train.py:1114] (3/4) Epoch 10, batch 2100, loss[loss=0.2104, simple_loss=0.3001, pruned_loss=0.06036, over 4765.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2868, pruned_loss=0.05776, over 941668.50 frames. ], batch size: 13, lr: 7.64e-03, grad_scale: 32.0 +2024-07-28 07:19:24,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=125482.66666666667, ans=0.0 +2024-07-28 07:19:24,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=125482.66666666667, ans=0.0 +2024-07-28 07:19:31,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=125496.0, ans=0.125 +2024-07-28 07:19:33,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=125496.0, ans=0.0 +2024-07-28 07:19:36,276 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=12.50 vs. limit=15.0 +2024-07-28 07:19:37,524 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.43 vs. limit=15.0 +2024-07-28 07:19:46,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=125522.66666666667, ans=0.07 +2024-07-28 07:19:47,875 INFO [train.py:1114] (3/4) Epoch 10, batch 2150, loss[loss=0.1881, simple_loss=0.2852, pruned_loss=0.04546, over 4898.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2859, pruned_loss=0.0575, over 944560.23 frames. ], batch size: 13, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:19:56,663 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.831e+01 5.697e+01 6.227e+01 7.381e+01 1.023e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 07:19:57,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=125549.33333333333, ans=0.0 +2024-07-28 07:20:01,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=125549.33333333333, ans=0.0 +2024-07-28 07:20:10,234 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.82 vs. limit=22.5 +2024-07-28 07:20:16,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=125576.0, ans=0.09899494936611666 +2024-07-28 07:20:24,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=125589.33333333333, ans=0.125 +2024-07-28 07:20:26,045 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:20:29,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=125589.33333333333, ans=0.125 +2024-07-28 07:20:30,845 INFO [train.py:1114] (3/4) Epoch 10, batch 2200, loss[loss=0.1909, simple_loss=0.2821, pruned_loss=0.04979, over 4819.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2858, pruned_loss=0.05719, over 943849.02 frames. ], batch size: 14, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:20:34,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=125602.66666666667, ans=0.125 +2024-07-28 07:20:37,259 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.04 vs. limit=15.0 +2024-07-28 07:20:37,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=125616.0, ans=0.5 +2024-07-28 07:21:26,430 INFO [train.py:1114] (3/4) Epoch 10, batch 2250, loss[loss=0.2137, simple_loss=0.3084, pruned_loss=0.05955, over 4704.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2859, pruned_loss=0.05746, over 942158.28 frames. ], batch size: 13, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:21:29,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=125669.33333333333, ans=0.035 +2024-07-28 07:21:29,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=125669.33333333333, ans=0.125 +2024-07-28 07:21:32,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=125682.66666666667, ans=0.0 +2024-07-28 07:21:35,182 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.513e+01 5.590e+01 6.237e+01 6.942e+01 1.306e+02, threshold=1.247e+02, percent-clipped=1.0 +2024-07-28 07:21:43,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=125682.66666666667, ans=10.0 +2024-07-28 07:21:44,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=125682.66666666667, ans=0.1 +2024-07-28 07:21:45,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=125682.66666666667, ans=0.2 +2024-07-28 07:21:48,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=125696.0, ans=0.125 +2024-07-28 07:22:09,995 INFO [train.py:1114] (3/4) Epoch 10, batch 2300, loss[loss=0.185, simple_loss=0.2613, pruned_loss=0.0543, over 4943.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2846, pruned_loss=0.05676, over 939494.41 frames. ], batch size: 12, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:22:14,919 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.33 vs. limit=15.0 +2024-07-28 07:22:19,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=125749.33333333333, ans=0.07 +2024-07-28 07:22:28,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=125762.66666666667, ans=0.0 +2024-07-28 07:22:39,703 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.98 vs. limit=10.0 +2024-07-28 07:22:40,999 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.77 vs. limit=15.0 +2024-07-28 07:22:45,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=125789.33333333333, ans=0.07 +2024-07-28 07:22:46,383 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.49 vs. limit=15.0 +2024-07-28 07:22:47,879 INFO [train.py:1114] (3/4) Epoch 10, batch 2350, loss[loss=0.2031, simple_loss=0.2975, pruned_loss=0.05435, over 4632.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2851, pruned_loss=0.057, over 941646.40 frames. ], batch size: 13, lr: 7.63e-03, grad_scale: 32.0 +2024-07-28 07:22:52,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=125802.66666666667, ans=0.1 +2024-07-28 07:22:56,458 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.493e+01 6.004e+01 6.754e+01 1.065e+02, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 07:23:00,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=125829.33333333333, ans=0.125 +2024-07-28 07:23:04,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=125829.33333333333, ans=15.0 +2024-07-28 07:23:12,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=125842.66666666667, ans=0.0 +2024-07-28 07:23:20,988 INFO [train.py:1114] (3/4) Epoch 10, batch 2400, loss[loss=0.1966, simple_loss=0.2929, pruned_loss=0.05019, over 4642.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2862, pruned_loss=0.05745, over 941493.22 frames. ], batch size: 12, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:23:27,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=125882.66666666667, ans=0.125 +2024-07-28 07:23:34,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125896.0, ans=0.1 +2024-07-28 07:23:34,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=125896.0, ans=15.0 +2024-07-28 07:23:54,379 INFO [train.py:1114] (3/4) Epoch 10, batch 2450, loss[loss=0.2264, simple_loss=0.3133, pruned_loss=0.06979, over 4694.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2878, pruned_loss=0.05838, over 937058.83 frames. ], batch size: 13, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:23:59,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=125936.0, ans=0.125 +2024-07-28 07:24:02,811 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.69 vs. limit=12.0 +2024-07-28 07:24:02,998 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.788e+01 5.824e+01 6.375e+01 7.344e+01 1.011e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 07:24:13,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=125962.66666666667, ans=0.125 +2024-07-28 07:24:18,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=125976.0, ans=0.04949747468305833 +2024-07-28 07:24:20,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=125989.33333333333, ans=0.0 +2024-07-28 07:24:24,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=125989.33333333333, ans=0.1 +2024-07-28 07:24:27,191 INFO [train.py:1114] (3/4) Epoch 10, batch 2500, loss[loss=0.2424, simple_loss=0.3326, pruned_loss=0.07614, over 4806.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2878, pruned_loss=0.05836, over 938747.12 frames. ], batch size: 14, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:24:40,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=126016.0, ans=0.125 +2024-07-28 07:24:43,731 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.93 vs. limit=22.5 +2024-07-28 07:24:51,506 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.62 vs. limit=12.0 +2024-07-28 07:24:53,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=126042.66666666667, ans=0.125 +2024-07-28 07:25:04,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=126056.0, ans=0.125 +2024-07-28 07:25:05,721 INFO [train.py:1114] (3/4) Epoch 10, batch 2550, loss[loss=0.1494, simple_loss=0.2252, pruned_loss=0.03681, over 4794.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2874, pruned_loss=0.05795, over 938019.42 frames. ], batch size: 11, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:25:06,663 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.50 vs. limit=15.0 +2024-07-28 07:25:14,258 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.249e+01 5.571e+01 6.137e+01 7.112e+01 1.171e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 07:25:21,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=126096.0, ans=0.2 +2024-07-28 07:25:25,826 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.37 vs. limit=22.5 +2024-07-28 07:25:30,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=126109.33333333333, ans=0.2 +2024-07-28 07:25:38,945 INFO [train.py:1114] (3/4) Epoch 10, batch 2600, loss[loss=0.1997, simple_loss=0.2817, pruned_loss=0.05888, over 4893.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2878, pruned_loss=0.0581, over 936938.82 frames. ], batch size: 13, lr: 7.62e-03, grad_scale: 32.0 +2024-07-28 07:25:43,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=126136.0, ans=0.125 +2024-07-28 07:25:51,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=126162.66666666667, ans=0.125 +2024-07-28 07:25:59,217 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.48 vs. limit=15.0 +2024-07-28 07:26:04,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=126176.0, ans=0.125 +2024-07-28 07:26:04,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=126189.33333333333, ans=0.125 +2024-07-28 07:26:12,040 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.14 vs. limit=15.0 +2024-07-28 07:26:15,514 INFO [train.py:1114] (3/4) Epoch 10, batch 2650, loss[loss=0.186, simple_loss=0.2795, pruned_loss=0.04622, over 4639.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2878, pruned_loss=0.05775, over 939089.60 frames. ], batch size: 16, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:26:17,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=126202.66666666667, ans=0.2 +2024-07-28 07:26:25,845 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.753e+01 5.752e+01 6.121e+01 6.935e+01 9.272e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 07:26:28,275 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.25 vs. limit=15.0 +2024-07-28 07:26:28,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=126216.0, ans=0.1 +2024-07-28 07:26:36,922 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.96 vs. limit=15.0 +2024-07-28 07:26:39,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=126242.66666666667, ans=10.0 +2024-07-28 07:26:41,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=126242.66666666667, ans=0.125 +2024-07-28 07:26:46,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=126256.0, ans=0.0 +2024-07-28 07:26:46,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=126256.0, ans=0.05 +2024-07-28 07:26:48,080 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.25 vs. limit=15.0 +2024-07-28 07:26:51,011 INFO [train.py:1114] (3/4) Epoch 10, batch 2700, loss[loss=0.1975, simple_loss=0.3116, pruned_loss=0.04168, over 4750.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2883, pruned_loss=0.05761, over 939128.70 frames. ], batch size: 14, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:26:56,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=126269.33333333333, ans=0.125 +2024-07-28 07:27:10,915 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-07-28 07:27:14,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=126309.33333333333, ans=0.1 +2024-07-28 07:27:15,587 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.59 vs. limit=15.0 +2024-07-28 07:27:18,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=126309.33333333333, ans=0.125 +2024-07-28 07:27:20,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=126322.66666666667, ans=0.125 +2024-07-28 07:27:27,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=126322.66666666667, ans=0.125 +2024-07-28 07:27:28,537 INFO [train.py:1114] (3/4) Epoch 10, batch 2750, loss[loss=0.1778, simple_loss=0.2672, pruned_loss=0.04423, over 4710.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2874, pruned_loss=0.05717, over 939631.32 frames. ], batch size: 12, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:27:37,076 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.293e+01 5.804e+01 6.361e+01 7.427e+01 1.283e+02, threshold=1.272e+02, percent-clipped=1.0 +2024-07-28 07:27:39,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff2.min_abs, batch_count=126349.33333333333, ans=0.1 +2024-07-28 07:27:41,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=126362.66666666667, ans=0.025 +2024-07-28 07:27:50,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=126376.0, ans=0.1 +2024-07-28 07:28:02,091 INFO [train.py:1114] (3/4) Epoch 10, batch 2800, loss[loss=0.2479, simple_loss=0.3232, pruned_loss=0.08631, over 3453.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2876, pruned_loss=0.05764, over 937648.33 frames. ], batch size: 37, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:28:08,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=126416.0, ans=0.05 +2024-07-28 07:28:11,184 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.82 vs. limit=22.5 +2024-07-28 07:28:18,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=126429.33333333333, ans=0.0 +2024-07-28 07:28:19,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=126429.33333333333, ans=0.025 +2024-07-28 07:28:22,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=126442.66666666667, ans=0.0 +2024-07-28 07:28:23,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=126442.66666666667, ans=0.07 +2024-07-28 07:28:26,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=126442.66666666667, ans=0.0 +2024-07-28 07:28:27,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=126442.66666666667, ans=0.0 +2024-07-28 07:28:31,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=126456.0, ans=0.125 +2024-07-28 07:28:35,570 INFO [train.py:1114] (3/4) Epoch 10, batch 2850, loss[loss=0.2057, simple_loss=0.2932, pruned_loss=0.05913, over 4961.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2876, pruned_loss=0.05748, over 935982.99 frames. ], batch size: 13, lr: 7.61e-03, grad_scale: 32.0 +2024-07-28 07:28:36,216 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.77 vs. limit=15.0 +2024-07-28 07:28:41,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=126482.66666666667, ans=0.125 +2024-07-28 07:28:44,290 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.524e+01 5.758e+01 6.530e+01 7.801e+01 1.215e+02, threshold=1.306e+02, percent-clipped=0.0 +2024-07-28 07:28:58,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=126509.33333333333, ans=0.0 +2024-07-28 07:28:59,824 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.64 vs. limit=10.0 +2024-07-28 07:29:02,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=126522.66666666667, ans=0.125 +2024-07-28 07:29:08,560 INFO [train.py:1114] (3/4) Epoch 10, batch 2900, loss[loss=0.2077, simple_loss=0.2822, pruned_loss=0.06662, over 4830.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2883, pruned_loss=0.05754, over 939791.12 frames. ], batch size: 13, lr: 7.60e-03, grad_scale: 32.0 +2024-07-28 07:29:09,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=126536.0, ans=0.1 +2024-07-28 07:29:12,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=126536.0, ans=0.0 +2024-07-28 07:29:20,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=126549.33333333333, ans=0.125 +2024-07-28 07:29:22,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=126562.66666666667, ans=0.0 +2024-07-28 07:29:23,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=126562.66666666667, ans=0.0 +2024-07-28 07:29:32,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=126576.0, ans=0.0 +2024-07-28 07:29:33,483 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.61 vs. limit=15.0 +2024-07-28 07:29:42,670 INFO [train.py:1114] (3/4) Epoch 10, batch 2950, loss[loss=0.212, simple_loss=0.3007, pruned_loss=0.06171, over 4718.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2877, pruned_loss=0.0578, over 938870.54 frames. ], batch size: 12, lr: 7.60e-03, grad_scale: 32.0 +2024-07-28 07:29:42,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=126602.66666666667, ans=0.125 +2024-07-28 07:29:44,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=126602.66666666667, ans=0.025 +2024-07-28 07:29:50,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=126616.0, ans=0.025 +2024-07-28 07:29:51,645 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 5.710e+01 6.450e+01 7.485e+01 1.036e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-28 07:29:52,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=126616.0, ans=0.125 +2024-07-28 07:30:01,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=126629.33333333333, ans=0.0 +2024-07-28 07:30:08,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=126642.66666666667, ans=0.0 +2024-07-28 07:30:08,664 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.72 vs. limit=15.0 +2024-07-28 07:30:10,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=126656.0, ans=0.125 +2024-07-28 07:30:24,807 INFO [train.py:1114] (3/4) Epoch 10, batch 3000, loss[loss=0.1789, simple_loss=0.2658, pruned_loss=0.046, over 4758.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2871, pruned_loss=0.05714, over 938476.59 frames. ], batch size: 13, lr: 7.60e-03, grad_scale: 32.0 +2024-07-28 07:30:24,807 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 07:30:40,903 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([1.8032, 2.1904, 2.3340, 1.8187, 2.3044, 2.3967, 2.5060, 2.2557], + device='cuda:3') +2024-07-28 07:30:42,396 INFO [train.py:1146] (3/4) Epoch 10, validation: loss=0.173, simple_loss=0.277, pruned_loss=0.03444, over 944034.00 frames. +2024-07-28 07:30:42,397 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 07:30:51,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=126682.66666666667, ans=0.125 +2024-07-28 07:31:04,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=126709.33333333333, ans=0.0 +2024-07-28 07:31:06,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=126709.33333333333, ans=0.125 +2024-07-28 07:31:17,819 INFO [train.py:1114] (3/4) Epoch 10, batch 3050, loss[loss=0.1686, simple_loss=0.2563, pruned_loss=0.04047, over 4645.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2885, pruned_loss=0.05796, over 937620.48 frames. ], batch size: 12, lr: 7.60e-03, grad_scale: 32.0 +2024-07-28 07:31:19,318 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:31:26,349 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.79 vs. limit=15.0 +2024-07-28 07:31:38,385 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.819e+01 5.667e+01 6.279e+01 7.137e+01 1.004e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 07:32:04,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=126776.0, ans=0.125 +2024-07-28 07:32:04,673 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.81 vs. limit=22.5 +2024-07-28 07:32:04,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=126776.0, ans=0.0 +2024-07-28 07:32:17,191 INFO [train.py:1114] (3/4) Epoch 10, batch 3100, loss[loss=0.2011, simple_loss=0.2901, pruned_loss=0.05608, over 4639.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2874, pruned_loss=0.05755, over 938507.82 frames. ], batch size: 16, lr: 7.60e-03, grad_scale: 64.0 +2024-07-28 07:32:18,062 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.91 vs. limit=15.0 +2024-07-28 07:32:49,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=126856.0, ans=0.125 +2024-07-28 07:32:52,367 INFO [train.py:1114] (3/4) Epoch 10, batch 3150, loss[loss=0.2158, simple_loss=0.3032, pruned_loss=0.06419, over 4608.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2871, pruned_loss=0.05717, over 938210.97 frames. ], batch size: 17, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:32:53,460 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.47 vs. limit=15.0 +2024-07-28 07:33:01,117 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.793e+01 5.563e+01 5.962e+01 7.006e+01 9.323e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 07:33:09,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=126896.0, ans=0.035 +2024-07-28 07:33:15,720 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.91 vs. limit=10.0 +2024-07-28 07:33:19,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=126909.33333333333, ans=0.0 +2024-07-28 07:33:23,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=126922.66666666667, ans=0.0 +2024-07-28 07:33:27,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=126922.66666666667, ans=0.125 +2024-07-28 07:33:27,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=126922.66666666667, ans=0.025 +2024-07-28 07:33:29,756 INFO [train.py:1114] (3/4) Epoch 10, batch 3200, loss[loss=0.2177, simple_loss=0.3012, pruned_loss=0.06708, over 4828.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2869, pruned_loss=0.0571, over 939691.10 frames. ], batch size: 13, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:33:30,132 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.31 vs. limit=22.5 +2024-07-28 07:33:39,870 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.10 vs. limit=15.0 +2024-07-28 07:33:57,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=126989.33333333333, ans=0.0 +2024-07-28 07:34:02,610 INFO [train.py:1114] (3/4) Epoch 10, batch 3250, loss[loss=0.1814, simple_loss=0.2682, pruned_loss=0.04728, over 4932.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2876, pruned_loss=0.05704, over 940524.64 frames. ], batch size: 14, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:34:11,318 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.705e+01 5.496e+01 6.167e+01 6.993e+01 1.063e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 07:34:21,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=127029.33333333333, ans=0.125 +2024-07-28 07:34:23,026 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=14.89 vs. limit=22.5 +2024-07-28 07:34:31,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=127056.0, ans=0.0 +2024-07-28 07:34:36,026 INFO [train.py:1114] (3/4) Epoch 10, batch 3300, loss[loss=0.2363, simple_loss=0.3122, pruned_loss=0.08023, over 4703.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2876, pruned_loss=0.0576, over 940636.00 frames. ], batch size: 19, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:34:36,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=127069.33333333333, ans=0.025 +2024-07-28 07:34:41,187 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.23 vs. limit=12.0 +2024-07-28 07:34:42,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127082.66666666667, ans=0.1 +2024-07-28 07:34:45,120 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.45 vs. limit=15.0 +2024-07-28 07:34:49,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127096.0, ans=0.1 +2024-07-28 07:34:53,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=127096.0, ans=0.025 +2024-07-28 07:35:02,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=127122.66666666667, ans=0.125 +2024-07-28 07:35:08,945 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.19 vs. limit=22.5 +2024-07-28 07:35:09,152 INFO [train.py:1114] (3/4) Epoch 10, batch 3350, loss[loss=0.2174, simple_loss=0.312, pruned_loss=0.06135, over 4620.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2883, pruned_loss=0.05792, over 938285.62 frames. ], batch size: 17, lr: 7.59e-03, grad_scale: 64.0 +2024-07-28 07:35:17,676 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.856e+01 5.618e+01 6.272e+01 7.252e+01 1.069e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 07:35:23,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=127162.66666666667, ans=0.125 +2024-07-28 07:35:27,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=127162.66666666667, ans=0.05 +2024-07-28 07:35:27,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127162.66666666667, ans=0.1 +2024-07-28 07:35:28,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys.whitening_limit, batch_count=127176.0, ans=6.0 +2024-07-28 07:35:30,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=127176.0, ans=0.125 +2024-07-28 07:35:32,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=127176.0, ans=0.0 +2024-07-28 07:35:35,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=127189.33333333333, ans=0.05 +2024-07-28 07:35:36,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=127189.33333333333, ans=0.125 +2024-07-28 07:35:42,747 INFO [train.py:1114] (3/4) Epoch 10, batch 3400, loss[loss=0.1952, simple_loss=0.2783, pruned_loss=0.05607, over 4825.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2882, pruned_loss=0.05835, over 937164.21 frames. ], batch size: 11, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:35:46,823 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=2.548e-03 +2024-07-28 07:35:51,192 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.98 vs. limit=22.5 +2024-07-28 07:35:52,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=127216.0, ans=0.1 +2024-07-28 07:35:55,453 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=5.08 vs. limit=12.0 +2024-07-28 07:35:56,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.61 vs. limit=12.0 +2024-07-28 07:36:00,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=127229.33333333333, ans=0.125 +2024-07-28 07:36:07,098 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.90 vs. limit=6.0 +2024-07-28 07:36:13,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127256.0, ans=0.1 +2024-07-28 07:36:16,679 INFO [train.py:1114] (3/4) Epoch 10, batch 3450, loss[loss=0.2282, simple_loss=0.306, pruned_loss=0.07519, over 4791.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2891, pruned_loss=0.05897, over 937549.53 frames. ], batch size: 19, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:36:16,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=127269.33333333333, ans=0.0 +2024-07-28 07:36:20,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=127269.33333333333, ans=0.0 +2024-07-28 07:36:25,812 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.819e+01 5.619e+01 6.055e+01 6.552e+01 2.053e+02, threshold=1.211e+02, percent-clipped=1.0 +2024-07-28 07:36:44,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=127322.66666666667, ans=0.125 +2024-07-28 07:36:53,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=127322.66666666667, ans=0.125 +2024-07-28 07:36:56,170 INFO [train.py:1114] (3/4) Epoch 10, batch 3500, loss[loss=0.1718, simple_loss=0.256, pruned_loss=0.04379, over 4923.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2875, pruned_loss=0.05839, over 938220.53 frames. ], batch size: 12, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:37:09,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=127349.33333333333, ans=0.0 +2024-07-28 07:37:37,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=127389.33333333333, ans=0.1 +2024-07-28 07:37:39,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127389.33333333333, ans=0.1 +2024-07-28 07:37:47,037 INFO [train.py:1114] (3/4) Epoch 10, batch 3550, loss[loss=0.229, simple_loss=0.3238, pruned_loss=0.06708, over 4657.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2872, pruned_loss=0.05822, over 938712.01 frames. ], batch size: 14, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:37:47,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=127402.66666666667, ans=0.125 +2024-07-28 07:37:51,332 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.41 vs. limit=10.0 +2024-07-28 07:38:16,056 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.484e+01 5.635e+01 6.291e+01 7.462e+01 1.218e+02, threshold=1.258e+02, percent-clipped=1.0 +2024-07-28 07:38:16,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff2.min_abs, batch_count=127416.0, ans=0.1 +2024-07-28 07:38:18,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=127416.0, ans=0.0 +2024-07-28 07:38:18,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=127416.0, ans=0.95 +2024-07-28 07:38:21,124 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.43 vs. limit=12.0 +2024-07-28 07:38:22,407 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.08 vs. limit=15.0 +2024-07-28 07:38:23,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=127429.33333333333, ans=0.0 +2024-07-28 07:38:24,296 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.82 vs. limit=15.0 +2024-07-28 07:38:26,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=127442.66666666667, ans=0.0 +2024-07-28 07:38:34,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=127456.0, ans=0.125 +2024-07-28 07:38:40,233 INFO [train.py:1114] (3/4) Epoch 10, batch 3600, loss[loss=0.1704, simple_loss=0.2616, pruned_loss=0.03959, over 4969.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2872, pruned_loss=0.0575, over 940004.48 frames. ], batch size: 13, lr: 7.58e-03, grad_scale: 32.0 +2024-07-28 07:38:43,268 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.88 vs. limit=15.0 +2024-07-28 07:38:47,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=127482.66666666667, ans=0.0 +2024-07-28 07:38:48,265 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:38:48,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127482.66666666667, ans=0.1 +2024-07-28 07:38:53,695 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.93 vs. limit=15.0 +2024-07-28 07:38:56,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=127496.0, ans=0.2 +2024-07-28 07:39:12,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=127509.33333333333, ans=0.125 +2024-07-28 07:39:12,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=127509.33333333333, ans=0.2 +2024-07-28 07:39:23,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127522.66666666667, ans=0.1 +2024-07-28 07:39:25,741 INFO [train.py:1114] (3/4) Epoch 10, batch 3650, loss[loss=0.2574, simple_loss=0.3432, pruned_loss=0.0858, over 4899.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2864, pruned_loss=0.05763, over 940595.51 frames. ], batch size: 15, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:39:29,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=127536.0, ans=0.125 +2024-07-28 07:39:41,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=127536.0, ans=0.2 +2024-07-28 07:39:41,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=127536.0, ans=0.2 +2024-07-28 07:39:51,677 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.686e+01 5.725e+01 6.100e+01 7.132e+01 1.043e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 07:40:05,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=127562.66666666667, ans=0.125 +2024-07-28 07:40:31,247 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.42 vs. limit=15.0 +2024-07-28 07:40:38,303 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.40 vs. limit=15.0 +2024-07-28 07:40:50,428 INFO [train.py:1114] (3/4) Epoch 10, batch 3700, loss[loss=0.2496, simple_loss=0.3315, pruned_loss=0.08385, over 4931.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2867, pruned_loss=0.05727, over 941561.41 frames. ], batch size: 14, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:40:55,353 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.55 vs. limit=15.0 +2024-07-28 07:41:06,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=127616.0, ans=0.0 +2024-07-28 07:41:10,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=127629.33333333333, ans=0.1 +2024-07-28 07:41:10,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=127629.33333333333, ans=0.0 +2024-07-28 07:41:28,283 INFO [train.py:1114] (3/4) Epoch 10, batch 3750, loss[loss=0.1734, simple_loss=0.2517, pruned_loss=0.04751, over 4788.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2861, pruned_loss=0.05681, over 943128.79 frames. ], batch size: 11, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:41:51,639 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.342e+01 5.968e+01 6.692e+01 7.910e+01 1.742e+02, threshold=1.338e+02, percent-clipped=0.0 +2024-07-28 07:41:58,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=127696.0, ans=0.125 +2024-07-28 07:42:11,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=127722.66666666667, ans=0.2 +2024-07-28 07:42:14,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=127722.66666666667, ans=0.125 +2024-07-28 07:42:22,847 INFO [train.py:1114] (3/4) Epoch 10, batch 3800, loss[loss=0.2391, simple_loss=0.3295, pruned_loss=0.07433, over 4816.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2863, pruned_loss=0.05719, over 941596.75 frames. ], batch size: 14, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:42:29,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=127736.0, ans=0.125 +2024-07-28 07:42:42,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=127749.33333333333, ans=0.015 +2024-07-28 07:43:10,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=127789.33333333333, ans=0.025 +2024-07-28 07:43:11,571 INFO [train.py:1114] (3/4) Epoch 10, batch 3850, loss[loss=0.2406, simple_loss=0.3299, pruned_loss=0.07562, over 4644.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.285, pruned_loss=0.05634, over 942241.24 frames. ], batch size: 16, lr: 7.57e-03, grad_scale: 32.0 +2024-07-28 07:43:21,989 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.612e+01 5.662e+01 6.521e+01 7.617e+01 1.192e+02, threshold=1.304e+02, percent-clipped=1.0 +2024-07-28 07:43:28,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=127829.33333333333, ans=0.0 +2024-07-28 07:43:39,993 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.71 vs. limit=22.5 +2024-07-28 07:43:48,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=127856.0, ans=0.04949747468305833 +2024-07-28 07:43:52,480 INFO [train.py:1114] (3/4) Epoch 10, batch 3900, loss[loss=0.2208, simple_loss=0.312, pruned_loss=0.0648, over 4817.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2863, pruned_loss=0.05672, over 942368.31 frames. ], batch size: 14, lr: 7.56e-03, grad_scale: 32.0 +2024-07-28 07:43:53,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=127869.33333333333, ans=0.2 +2024-07-28 07:43:55,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=127869.33333333333, ans=0.0 +2024-07-28 07:43:59,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=127882.66666666667, ans=0.125 +2024-07-28 07:43:59,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=127882.66666666667, ans=0.05 +2024-07-28 07:44:23,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=127909.33333333333, ans=0.0 +2024-07-28 07:44:23,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=127909.33333333333, ans=0.2 +2024-07-28 07:44:29,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=127922.66666666667, ans=0.125 +2024-07-28 07:44:29,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=127922.66666666667, ans=0.1 +2024-07-28 07:44:32,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=127922.66666666667, ans=0.0 +2024-07-28 07:44:33,689 INFO [train.py:1114] (3/4) Epoch 10, batch 3950, loss[loss=0.2241, simple_loss=0.3262, pruned_loss=0.06104, over 4839.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2861, pruned_loss=0.05654, over 944428.63 frames. ], batch size: 16, lr: 7.56e-03, grad_scale: 16.0 +2024-07-28 07:44:33,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=127936.0, ans=0.025 +2024-07-28 07:44:33,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=127936.0, ans=0.125 +2024-07-28 07:44:49,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=127936.0, ans=0.2 +2024-07-28 07:44:52,741 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.46 vs. limit=15.0 +2024-07-28 07:44:57,177 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.715e+01 6.133e+01 6.852e+01 1.045e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 07:45:08,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=127976.0, ans=0.125 +2024-07-28 07:45:16,583 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.56 vs. limit=15.0 +2024-07-28 07:45:23,679 INFO [train.py:1114] (3/4) Epoch 10, batch 4000, loss[loss=0.1716, simple_loss=0.261, pruned_loss=0.04111, over 4768.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2874, pruned_loss=0.05757, over 940999.42 frames. ], batch size: 12, lr: 7.56e-03, grad_scale: 32.0 +2024-07-28 07:45:41,619 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.55 vs. limit=22.5 +2024-07-28 07:45:45,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=128042.66666666667, ans=0.2 +2024-07-28 07:45:51,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=128042.66666666667, ans=0.025 +2024-07-28 07:45:55,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=128056.0, ans=0.125 +2024-07-28 07:46:03,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128056.0, ans=0.1 +2024-07-28 07:46:07,090 INFO [train.py:1114] (3/4) Epoch 10, batch 4050, loss[loss=0.2659, simple_loss=0.3296, pruned_loss=0.1012, over 3020.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2879, pruned_loss=0.05809, over 939185.97 frames. ], batch size: 35, lr: 7.56e-03, grad_scale: 32.0 +2024-07-28 07:46:10,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=128069.33333333333, ans=0.0 +2024-07-28 07:46:17,338 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.919e+01 6.572e+01 7.473e+01 1.130e+02, threshold=1.314e+02, percent-clipped=0.0 +2024-07-28 07:46:26,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=128096.0, ans=0.125 +2024-07-28 07:46:26,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=128109.33333333333, ans=0.125 +2024-07-28 07:46:38,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=128122.66666666667, ans=0.125 +2024-07-28 07:46:39,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=128122.66666666667, ans=0.2 +2024-07-28 07:46:40,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff3.min_abs, batch_count=128122.66666666667, ans=0.2 +2024-07-28 07:46:42,598 INFO [train.py:1114] (3/4) Epoch 10, batch 4100, loss[loss=0.2413, simple_loss=0.3249, pruned_loss=0.0788, over 4907.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2872, pruned_loss=0.05752, over 938530.08 frames. ], batch size: 15, lr: 7.56e-03, grad_scale: 32.0 +2024-07-28 07:46:47,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=128136.0, ans=0.5 +2024-07-28 07:46:52,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=128149.33333333333, ans=0.125 +2024-07-28 07:47:01,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=128162.66666666667, ans=0.125 +2024-07-28 07:47:15,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=128189.33333333333, ans=0.125 +2024-07-28 07:47:15,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=128189.33333333333, ans=0.0 +2024-07-28 07:47:18,149 INFO [train.py:1114] (3/4) Epoch 10, batch 4150, loss[loss=0.1762, simple_loss=0.2726, pruned_loss=0.03994, over 4816.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2866, pruned_loss=0.05716, over 938280.49 frames. ], batch size: 13, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:47:22,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.18 vs. limit=15.0 +2024-07-28 07:47:25,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=128216.0, ans=0.0 +2024-07-28 07:47:28,111 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.249e+01 5.846e+01 6.728e+01 7.607e+01 1.158e+02, threshold=1.346e+02, percent-clipped=0.0 +2024-07-28 07:47:51,042 INFO [train.py:1114] (3/4) Epoch 10, batch 4200, loss[loss=0.208, simple_loss=0.3067, pruned_loss=0.05463, over 4916.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2867, pruned_loss=0.05733, over 939929.83 frames. ], batch size: 15, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:47:57,128 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.43 vs. limit=22.5 +2024-07-28 07:47:57,848 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.72 vs. limit=5.0 +2024-07-28 07:47:58,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=128282.66666666667, ans=0.04949747468305833 +2024-07-28 07:47:59,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=128282.66666666667, ans=0.0 +2024-07-28 07:48:01,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=128282.66666666667, ans=0.0 +2024-07-28 07:48:12,771 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.96 vs. limit=10.0 +2024-07-28 07:48:13,180 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:48:13,267 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:48:15,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=128309.33333333333, ans=0.0 +2024-07-28 07:48:17,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=128322.66666666667, ans=0.0 +2024-07-28 07:48:17,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=128322.66666666667, ans=0.2 +2024-07-28 07:48:23,683 INFO [train.py:1114] (3/4) Epoch 10, batch 4250, loss[loss=0.1875, simple_loss=0.2779, pruned_loss=0.04858, over 4646.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2875, pruned_loss=0.05775, over 941525.35 frames. ], batch size: 12, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:48:24,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=128336.0, ans=0.125 +2024-07-28 07:48:30,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=128349.33333333333, ans=0.2 +2024-07-28 07:48:33,347 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.588e+01 5.567e+01 6.071e+01 6.705e+01 1.236e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 07:48:34,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=128349.33333333333, ans=0.125 +2024-07-28 07:48:46,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=128376.0, ans=0.0 +2024-07-28 07:48:57,121 INFO [train.py:1114] (3/4) Epoch 10, batch 4300, loss[loss=0.2261, simple_loss=0.3045, pruned_loss=0.07382, over 4760.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2882, pruned_loss=0.05813, over 940562.29 frames. ], batch size: 13, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:48:58,276 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.58 vs. limit=15.0 +2024-07-28 07:49:00,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=128402.66666666667, ans=0.05 +2024-07-28 07:49:01,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=128402.66666666667, ans=0.125 +2024-07-28 07:49:07,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=128416.0, ans=0.125 +2024-07-28 07:49:17,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=128442.66666666667, ans=0.1 +2024-07-28 07:49:18,783 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:49:20,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=128442.66666666667, ans=0.125 +2024-07-28 07:49:22,754 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=2.527e-03 +2024-07-28 07:49:24,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=128456.0, ans=0.0 +2024-07-28 07:49:30,483 INFO [train.py:1114] (3/4) Epoch 10, batch 4350, loss[loss=0.1815, simple_loss=0.2728, pruned_loss=0.04509, over 4765.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.288, pruned_loss=0.05769, over 940961.41 frames. ], batch size: 13, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:49:32,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=128469.33333333333, ans=0.025 +2024-07-28 07:49:40,760 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.507e+01 6.201e+01 7.013e+01 1.119e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 07:49:46,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=128496.0, ans=0.2 +2024-07-28 07:49:47,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=128496.0, ans=10.0 +2024-07-28 07:49:50,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=128509.33333333333, ans=0.5 +2024-07-28 07:50:01,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=128522.66666666667, ans=0.2 +2024-07-28 07:50:04,258 INFO [train.py:1114] (3/4) Epoch 10, batch 4400, loss[loss=0.1577, simple_loss=0.2524, pruned_loss=0.03151, over 4814.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2884, pruned_loss=0.05779, over 941018.99 frames. ], batch size: 14, lr: 7.55e-03, grad_scale: 32.0 +2024-07-28 07:50:13,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=128549.33333333333, ans=0.2 +2024-07-28 07:50:26,932 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.13 vs. limit=15.0 +2024-07-28 07:50:33,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=128589.33333333333, ans=0.025 +2024-07-28 07:50:37,963 INFO [train.py:1114] (3/4) Epoch 10, batch 4450, loss[loss=0.2049, simple_loss=0.2935, pruned_loss=0.05816, over 4939.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2882, pruned_loss=0.0582, over 939076.53 frames. ], batch size: 12, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:50:44,284 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.94 vs. limit=6.0 +2024-07-28 07:50:47,724 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+01 5.603e+01 6.224e+01 7.010e+01 9.776e+01, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 07:51:08,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=128656.0, ans=0.09899494936611666 +2024-07-28 07:51:10,427 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.77 vs. limit=12.0 +2024-07-28 07:51:10,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=128656.0, ans=0.0 +2024-07-28 07:51:12,775 INFO [train.py:1114] (3/4) Epoch 10, batch 4500, loss[loss=0.1846, simple_loss=0.2758, pruned_loss=0.04674, over 4739.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2884, pruned_loss=0.05814, over 938466.00 frames. ], batch size: 14, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:51:17,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=128669.33333333333, ans=0.125 +2024-07-28 07:51:26,373 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=25.56 vs. limit=22.5 +2024-07-28 07:51:29,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=128696.0, ans=0.125 +2024-07-28 07:51:29,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.09 vs. limit=15.0 +2024-07-28 07:51:47,649 INFO [train.py:1114] (3/4) Epoch 10, batch 4550, loss[loss=0.1705, simple_loss=0.2579, pruned_loss=0.04159, over 4900.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2884, pruned_loss=0.05789, over 940528.02 frames. ], batch size: 13, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:51:54,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=128749.33333333333, ans=0.2 +2024-07-28 07:51:57,765 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.502e+01 5.839e+01 6.410e+01 7.232e+01 1.296e+02, threshold=1.282e+02, percent-clipped=2.0 +2024-07-28 07:52:07,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=128762.66666666667, ans=0.2 +2024-07-28 07:52:09,744 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:52:12,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=128776.0, ans=0.125 +2024-07-28 07:52:20,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=128789.33333333333, ans=0.125 +2024-07-28 07:52:24,748 INFO [train.py:1114] (3/4) Epoch 10, batch 4600, loss[loss=0.2313, simple_loss=0.3143, pruned_loss=0.07417, over 4489.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2869, pruned_loss=0.05775, over 938771.02 frames. ], batch size: 21, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:52:39,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=128829.33333333333, ans=0.125 +2024-07-28 07:52:40,712 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.10 vs. limit=15.0 +2024-07-28 07:52:54,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=128856.0, ans=0.125 +2024-07-28 07:52:57,428 INFO [train.py:1114] (3/4) Epoch 10, batch 4650, loss[loss=0.205, simple_loss=0.2932, pruned_loss=0.05845, over 4822.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2874, pruned_loss=0.05727, over 940530.55 frames. ], batch size: 16, lr: 7.54e-03, grad_scale: 32.0 +2024-07-28 07:53:05,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=128882.66666666667, ans=0.0 +2024-07-28 07:53:07,566 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.546e+01 5.595e+01 6.179e+01 7.275e+01 1.134e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 07:53:30,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=128922.66666666667, ans=10.0 +2024-07-28 07:53:31,108 INFO [train.py:1114] (3/4) Epoch 10, batch 4700, loss[loss=0.1854, simple_loss=0.2552, pruned_loss=0.0578, over 4707.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2872, pruned_loss=0.0578, over 937705.45 frames. ], batch size: 11, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:53:34,694 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:53:36,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=128936.0, ans=0.1 +2024-07-28 07:53:41,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=128949.33333333333, ans=0.0 +2024-07-28 07:53:43,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=128949.33333333333, ans=0.125 +2024-07-28 07:53:49,045 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.72 vs. limit=15.0 +2024-07-28 07:53:52,951 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.07 vs. limit=22.5 +2024-07-28 07:54:05,547 INFO [train.py:1114] (3/4) Epoch 10, batch 4750, loss[loss=0.2091, simple_loss=0.2971, pruned_loss=0.06055, over 4451.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2882, pruned_loss=0.05835, over 935623.55 frames. ], batch size: 21, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:54:09,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=129002.66666666667, ans=0.0 +2024-07-28 07:54:14,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129016.0, ans=0.1 +2024-07-28 07:54:15,560 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.408e+01 5.636e+01 6.177e+01 7.080e+01 9.506e+01, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 07:54:29,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=129042.66666666667, ans=0.0 +2024-07-28 07:54:36,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=129056.0, ans=0.0 +2024-07-28 07:54:39,984 INFO [train.py:1114] (3/4) Epoch 10, batch 4800, loss[loss=0.2034, simple_loss=0.2942, pruned_loss=0.05626, over 4695.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2874, pruned_loss=0.05804, over 932737.91 frames. ], batch size: 13, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:54:41,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=129069.33333333333, ans=0.125 +2024-07-28 07:54:53,710 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.79 vs. limit=15.0 +2024-07-28 07:54:54,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129096.0, ans=0.1 +2024-07-28 07:55:01,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=129109.33333333333, ans=0.5 +2024-07-28 07:55:06,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=129122.66666666667, ans=10.0 +2024-07-28 07:55:07,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129122.66666666667, ans=0.1 +2024-07-28 07:55:09,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=129122.66666666667, ans=0.125 +2024-07-28 07:55:12,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=129136.0, ans=0.2 +2024-07-28 07:55:13,158 INFO [train.py:1114] (3/4) Epoch 10, batch 4850, loss[loss=0.1876, simple_loss=0.2921, pruned_loss=0.04149, over 4736.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2883, pruned_loss=0.05833, over 932104.30 frames. ], batch size: 14, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:55:15,004 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.49 vs. limit=15.0 +2024-07-28 07:55:23,229 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.594e+01 5.570e+01 6.105e+01 6.787e+01 9.790e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 07:55:32,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=129176.0, ans=0.0 +2024-07-28 07:55:34,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=129176.0, ans=0.125 +2024-07-28 07:55:37,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=129176.0, ans=0.0 +2024-07-28 07:55:39,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=129189.33333333333, ans=0.125 +2024-07-28 07:55:43,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=129189.33333333333, ans=0.125 +2024-07-28 07:55:46,406 INFO [train.py:1114] (3/4) Epoch 10, batch 4900, loss[loss=0.2237, simple_loss=0.308, pruned_loss=0.06973, over 4769.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2877, pruned_loss=0.05797, over 934061.38 frames. ], batch size: 13, lr: 7.53e-03, grad_scale: 32.0 +2024-07-28 07:55:49,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=129202.66666666667, ans=0.2 +2024-07-28 07:55:50,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=129202.66666666667, ans=0.125 +2024-07-28 07:55:53,400 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 07:55:55,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=129216.0, ans=0.125 +2024-07-28 07:55:56,992 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-28 07:55:58,475 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.73 vs. limit=15.0 +2024-07-28 07:56:03,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=129229.33333333333, ans=0.0 +2024-07-28 07:56:11,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=129242.66666666667, ans=0.0 +2024-07-28 07:56:20,956 INFO [train.py:1114] (3/4) Epoch 10, batch 4950, loss[loss=0.2467, simple_loss=0.3156, pruned_loss=0.0889, over 3575.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2893, pruned_loss=0.05896, over 931357.45 frames. ], batch size: 35, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:56:29,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=129282.66666666667, ans=15.0 +2024-07-28 07:56:33,107 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.746e+01 5.675e+01 6.169e+01 7.226e+01 1.073e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-28 07:56:38,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=129296.0, ans=0.0 +2024-07-28 07:56:44,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=129309.33333333333, ans=0.125 +2024-07-28 07:56:44,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=129309.33333333333, ans=0.125 +2024-07-28 07:57:00,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=129336.0, ans=0.125 +2024-07-28 07:57:00,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=129336.0, ans=0.125 +2024-07-28 07:57:01,461 INFO [train.py:1114] (3/4) Epoch 10, batch 5000, loss[loss=0.2115, simple_loss=0.308, pruned_loss=0.05748, over 4670.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2885, pruned_loss=0.05818, over 935232.25 frames. ], batch size: 14, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:57:02,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=129336.0, ans=0.0 +2024-07-28 07:57:15,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=129362.66666666667, ans=0.2 +2024-07-28 07:57:19,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=129362.66666666667, ans=0.2 +2024-07-28 07:57:28,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=129376.0, ans=0.0 +2024-07-28 07:57:34,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=129389.33333333333, ans=0.5 +2024-07-28 07:57:35,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=129389.33333333333, ans=0.125 +2024-07-28 07:57:36,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=129389.33333333333, ans=0.2 +2024-07-28 07:57:36,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=129389.33333333333, ans=0.125 +2024-07-28 07:57:37,480 INFO [train.py:1114] (3/4) Epoch 10, batch 5050, loss[loss=0.1735, simple_loss=0.2599, pruned_loss=0.04352, over 4847.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2876, pruned_loss=0.05775, over 937483.45 frames. ], batch size: 12, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:57:40,639 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.13 vs. limit=22.5 +2024-07-28 07:57:43,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129416.0, ans=0.1 +2024-07-28 07:57:47,629 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.667e+01 5.711e+01 6.360e+01 7.128e+01 1.073e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 07:57:51,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=129416.0, ans=0.125 +2024-07-28 07:57:51,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=129416.0, ans=0.0 +2024-07-28 07:58:00,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=129442.66666666667, ans=0.125 +2024-07-28 07:58:03,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=129442.66666666667, ans=0.5 +2024-07-28 07:58:04,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=129442.66666666667, ans=0.125 +2024-07-28 07:58:05,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=129442.66666666667, ans=0.125 +2024-07-28 07:58:05,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=129442.66666666667, ans=0.2 +2024-07-28 07:58:07,119 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.91 vs. limit=12.0 +2024-07-28 07:58:13,500 INFO [train.py:1114] (3/4) Epoch 10, batch 5100, loss[loss=0.1808, simple_loss=0.2719, pruned_loss=0.04479, over 4780.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2881, pruned_loss=0.05786, over 935921.35 frames. ], batch size: 12, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:58:18,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=129469.33333333333, ans=0.0 +2024-07-28 07:58:24,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=129482.66666666667, ans=0.025 +2024-07-28 07:58:28,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=129496.0, ans=0.125 +2024-07-28 07:58:37,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=129509.33333333333, ans=0.125 +2024-07-28 07:58:37,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=129509.33333333333, ans=0.125 +2024-07-28 07:58:38,209 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.80 vs. limit=15.0 +2024-07-28 07:58:46,345 INFO [train.py:1114] (3/4) Epoch 10, batch 5150, loss[loss=0.2119, simple_loss=0.295, pruned_loss=0.06433, over 4838.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2887, pruned_loss=0.05781, over 936857.69 frames. ], batch size: 16, lr: 7.52e-03, grad_scale: 32.0 +2024-07-28 07:58:48,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=129536.0, ans=0.125 +2024-07-28 07:58:49,461 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.54 vs. limit=22.5 +2024-07-28 07:58:55,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=129549.33333333333, ans=0.025 +2024-07-28 07:58:56,279 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.874e+01 5.650e+01 6.455e+01 7.114e+01 1.167e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-28 07:59:08,022 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.86 vs. limit=22.5 +2024-07-28 07:59:20,215 INFO [train.py:1114] (3/4) Epoch 10, batch 5200, loss[loss=0.2647, simple_loss=0.3334, pruned_loss=0.09797, over 4655.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2882, pruned_loss=0.05739, over 936407.81 frames. ], batch size: 14, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 07:59:33,377 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.33 vs. limit=22.5 +2024-07-28 07:59:40,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=129642.66666666667, ans=0.0 +2024-07-28 07:59:40,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=129642.66666666667, ans=0.0 +2024-07-28 07:59:41,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=129642.66666666667, ans=0.0 +2024-07-28 07:59:43,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=129642.66666666667, ans=0.125 +2024-07-28 07:59:45,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=129642.66666666667, ans=0.125 +2024-07-28 07:59:53,602 INFO [train.py:1114] (3/4) Epoch 10, batch 5250, loss[loss=0.1667, simple_loss=0.2636, pruned_loss=0.03488, over 4891.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2875, pruned_loss=0.05684, over 936314.80 frames. ], batch size: 13, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 07:59:55,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=129669.33333333333, ans=0.2 +2024-07-28 07:59:59,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=129669.33333333333, ans=0.125 +2024-07-28 08:00:03,736 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.916e+01 5.858e+01 6.971e+01 8.204e+01 1.196e+02, threshold=1.394e+02, percent-clipped=0.0 +2024-07-28 08:00:06,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=129696.0, ans=0.125 +2024-07-28 08:00:18,426 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.93 vs. limit=6.0 +2024-07-28 08:00:18,481 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.11 vs. limit=15.0 +2024-07-28 08:00:27,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129736.0, ans=0.1 +2024-07-28 08:00:27,511 INFO [train.py:1114] (3/4) Epoch 10, batch 5300, loss[loss=0.2505, simple_loss=0.3438, pruned_loss=0.07862, over 4636.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2878, pruned_loss=0.05708, over 934679.35 frames. ], batch size: 16, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 08:00:30,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=129736.0, ans=0.09899494936611666 +2024-07-28 08:00:32,956 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:00:43,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=129762.66666666667, ans=0.0 +2024-07-28 08:00:47,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=129776.0, ans=0.1 +2024-07-28 08:00:52,544 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:01:00,901 INFO [train.py:1114] (3/4) Epoch 10, batch 5350, loss[loss=0.1536, simple_loss=0.2291, pruned_loss=0.03906, over 4537.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2892, pruned_loss=0.0577, over 936462.59 frames. ], batch size: 10, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 08:01:05,362 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.73 vs. limit=12.0 +2024-07-28 08:01:11,099 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.715e+01 5.483e+01 5.986e+01 6.738e+01 1.016e+02, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 08:01:12,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=129816.0, ans=0.125 +2024-07-28 08:01:17,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=129829.33333333333, ans=0.125 +2024-07-28 08:01:22,441 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.42 vs. limit=15.0 +2024-07-28 08:01:30,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=129856.0, ans=0.0 +2024-07-28 08:01:32,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=9.29 vs. limit=12.0 +2024-07-28 08:01:36,309 INFO [train.py:1114] (3/4) Epoch 10, batch 5400, loss[loss=0.2324, simple_loss=0.3199, pruned_loss=0.07244, over 4248.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2902, pruned_loss=0.05847, over 930198.08 frames. ], batch size: 25, lr: 7.51e-03, grad_scale: 32.0 +2024-07-28 08:01:38,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=129869.33333333333, ans=0.125 +2024-07-28 08:01:44,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=129882.66666666667, ans=0.0 +2024-07-28 08:01:45,720 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.14 vs. limit=15.0 +2024-07-28 08:01:47,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=129882.66666666667, ans=0.5 +2024-07-28 08:01:48,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=129882.66666666667, ans=0.125 +2024-07-28 08:01:52,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=129896.0, ans=0.0 +2024-07-28 08:01:54,179 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=3.83 vs. limit=15.0 +2024-07-28 08:01:54,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=129896.0, ans=0.0 +2024-07-28 08:01:58,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129909.33333333333, ans=0.1 +2024-07-28 08:02:01,095 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:02:12,885 INFO [train.py:1114] (3/4) Epoch 10, batch 5450, loss[loss=0.1735, simple_loss=0.2517, pruned_loss=0.04765, over 4703.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2889, pruned_loss=0.05794, over 933505.51 frames. ], batch size: 11, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:02:14,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=129936.0, ans=0.0 +2024-07-28 08:02:15,824 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.75 vs. limit=22.5 +2024-07-28 08:02:24,705 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.820e+01 5.695e+01 6.364e+01 7.750e+01 1.165e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 08:02:30,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=129962.66666666667, ans=0.0 +2024-07-28 08:02:31,222 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.68 vs. limit=15.0 +2024-07-28 08:02:35,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=129976.0, ans=0.125 +2024-07-28 08:02:40,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=129976.0, ans=0.1 +2024-07-28 08:02:48,476 INFO [train.py:1114] (3/4) Epoch 10, batch 5500, loss[loss=0.2176, simple_loss=0.2843, pruned_loss=0.07548, over 4200.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2894, pruned_loss=0.05862, over 930683.42 frames. ], batch size: 25, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:02:48,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=130002.66666666667, ans=0.125 +2024-07-28 08:02:54,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=130002.66666666667, ans=0.0 +2024-07-28 08:02:58,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=130016.0, ans=0.07 +2024-07-28 08:03:02,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=130016.0, ans=0.0 +2024-07-28 08:03:02,858 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.09 vs. limit=6.0 +2024-07-28 08:03:19,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=130056.0, ans=0.125 +2024-07-28 08:03:20,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=130056.0, ans=0.1 +2024-07-28 08:03:24,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=130056.0, ans=0.125 +2024-07-28 08:03:26,090 INFO [train.py:1114] (3/4) Epoch 10, batch 5550, loss[loss=0.175, simple_loss=0.2535, pruned_loss=0.04827, over 4696.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2881, pruned_loss=0.05786, over 932713.57 frames. ], batch size: 12, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:03:27,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=130069.33333333333, ans=0.125 +2024-07-28 08:03:35,927 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 5.857e+01 6.242e+01 7.417e+01 1.070e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 08:03:44,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=130096.0, ans=0.125 +2024-07-28 08:03:45,457 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.77 vs. limit=15.0 +2024-07-28 08:03:48,967 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.30 vs. limit=15.0 +2024-07-28 08:03:55,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=130122.66666666667, ans=0.0 +2024-07-28 08:03:58,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=130122.66666666667, ans=0.125 +2024-07-28 08:03:59,472 INFO [train.py:1114] (3/4) Epoch 10, batch 5600, loss[loss=0.1711, simple_loss=0.2685, pruned_loss=0.03691, over 4730.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2877, pruned_loss=0.05761, over 934072.34 frames. ], batch size: 14, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:03:59,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=130136.0, ans=0.0 +2024-07-28 08:04:00,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=130136.0, ans=0.0 +2024-07-28 08:04:07,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=130149.33333333333, ans=0.2 +2024-07-28 08:04:10,680 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.14 vs. limit=12.0 +2024-07-28 08:04:11,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=130149.33333333333, ans=0.125 +2024-07-28 08:04:23,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=130162.66666666667, ans=0.07 +2024-07-28 08:04:25,344 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.91 vs. limit=15.0 +2024-07-28 08:04:26,568 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=6.582e-02 +2024-07-28 08:04:30,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=130176.0, ans=0.1 +2024-07-28 08:04:40,123 INFO [train.py:1114] (3/4) Epoch 10, batch 5650, loss[loss=0.2341, simple_loss=0.3211, pruned_loss=0.07361, over 4461.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2864, pruned_loss=0.0571, over 936738.24 frames. ], batch size: 21, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:04:50,450 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.652e+01 5.620e+01 6.091e+01 7.074e+01 1.306e+02, threshold=1.218e+02, percent-clipped=1.0 +2024-07-28 08:04:52,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=130216.0, ans=0.125 +2024-07-28 08:04:58,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=130229.33333333333, ans=0.04949747468305833 +2024-07-28 08:05:03,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=130242.66666666667, ans=0.95 +2024-07-28 08:05:04,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=130242.66666666667, ans=0.04949747468305833 +2024-07-28 08:05:07,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=130256.0, ans=0.1 +2024-07-28 08:05:09,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.94 vs. limit=22.5 +2024-07-28 08:05:13,683 INFO [train.py:1114] (3/4) Epoch 10, batch 5700, loss[loss=0.2307, simple_loss=0.3095, pruned_loss=0.07593, over 4699.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2871, pruned_loss=0.05717, over 938306.15 frames. ], batch size: 13, lr: 7.50e-03, grad_scale: 32.0 +2024-07-28 08:05:14,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=130269.33333333333, ans=0.0 +2024-07-28 08:05:25,649 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.11 vs. limit=15.0 +2024-07-28 08:05:25,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=130282.66666666667, ans=0.125 +2024-07-28 08:05:27,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=130282.66666666667, ans=0.125 +2024-07-28 08:05:34,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.81 vs. limit=22.5 +2024-07-28 08:05:50,247 INFO [train.py:1114] (3/4) Epoch 10, batch 5750, loss[loss=0.1985, simple_loss=0.3038, pruned_loss=0.04661, over 4756.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2874, pruned_loss=0.05689, over 938240.30 frames. ], batch size: 19, lr: 7.49e-03, grad_scale: 32.0 +2024-07-28 08:05:59,498 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:06:00,018 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.227e+01 5.735e+01 6.185e+01 6.687e+01 9.991e+01, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 08:06:01,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=130349.33333333333, ans=0.125 +2024-07-28 08:06:01,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=130349.33333333333, ans=0.1 +2024-07-28 08:06:11,430 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.87 vs. limit=10.0 +2024-07-28 08:06:16,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=130389.33333333333, ans=10.0 +2024-07-28 08:06:19,583 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=12.56 vs. limit=10.0 +2024-07-28 08:06:20,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=130389.33333333333, ans=0.125 +2024-07-28 08:06:24,163 INFO [train.py:1114] (3/4) Epoch 10, batch 5800, loss[loss=0.2149, simple_loss=0.3063, pruned_loss=0.06176, over 4680.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2879, pruned_loss=0.05704, over 937638.11 frames. ], batch size: 19, lr: 7.49e-03, grad_scale: 32.0 +2024-07-28 08:06:31,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=130416.0, ans=0.2 +2024-07-28 08:06:36,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=130429.33333333333, ans=0.07 +2024-07-28 08:06:46,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=130442.66666666667, ans=15.0 +2024-07-28 08:06:48,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=130442.66666666667, ans=0.015 +2024-07-28 08:06:50,722 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.43 vs. limit=15.0 +2024-07-28 08:06:55,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=130456.0, ans=0.125 +2024-07-28 08:06:59,059 INFO [train.py:1114] (3/4) Epoch 10, batch 5850, loss[loss=0.2105, simple_loss=0.3018, pruned_loss=0.05962, over 4524.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.288, pruned_loss=0.05727, over 937796.62 frames. ], batch size: 21, lr: 7.49e-03, grad_scale: 32.0 +2024-07-28 08:07:09,040 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.958e+01 5.762e+01 6.655e+01 7.927e+01 1.283e+02, threshold=1.331e+02, percent-clipped=2.0 +2024-07-28 08:07:11,527 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.08 vs. limit=15.0 +2024-07-28 08:07:34,280 INFO [train.py:1114] (3/4) Epoch 10, batch 5900, loss[loss=0.2379, simple_loss=0.3232, pruned_loss=0.07633, over 4690.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2887, pruned_loss=0.05782, over 938084.18 frames. ], batch size: 15, lr: 7.49e-03, grad_scale: 32.0 +2024-07-28 08:07:37,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=130536.0, ans=0.125 +2024-07-28 08:07:39,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=130536.0, ans=0.05 +2024-07-28 08:07:41,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=130549.33333333333, ans=0.125 +2024-07-28 08:07:49,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=130562.66666666667, ans=0.125 +2024-07-28 08:07:53,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=130562.66666666667, ans=0.1 +2024-07-28 08:08:04,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.min_positive, batch_count=130589.33333333333, ans=0.05 +2024-07-28 08:08:10,394 INFO [train.py:1114] (3/4) Epoch 10, batch 5950, loss[loss=0.2463, simple_loss=0.3367, pruned_loss=0.07798, over 4686.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2882, pruned_loss=0.05761, over 940359.21 frames. ], batch size: 15, lr: 7.49e-03, grad_scale: 64.0 +2024-07-28 08:08:10,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=130602.66666666667, ans=0.125 +2024-07-28 08:08:14,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=130602.66666666667, ans=0.2 +2024-07-28 08:08:20,239 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.913e+01 5.602e+01 6.081e+01 6.794e+01 9.729e+01, threshold=1.216e+02, percent-clipped=0.0 +2024-07-28 08:08:24,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=130629.33333333333, ans=0.125 +2024-07-28 08:08:30,276 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.68 vs. limit=15.0 +2024-07-28 08:08:31,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=130642.66666666667, ans=0.125 +2024-07-28 08:08:37,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=130642.66666666667, ans=0.0 +2024-07-28 08:08:40,333 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:08:45,950 INFO [train.py:1114] (3/4) Epoch 10, batch 6000, loss[loss=0.2019, simple_loss=0.2824, pruned_loss=0.06065, over 4296.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2879, pruned_loss=0.05808, over 937278.64 frames. ], batch size: 25, lr: 7.48e-03, grad_scale: 64.0 +2024-07-28 08:08:45,951 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 08:08:58,338 INFO [train.py:1146] (3/4) Epoch 10, validation: loss=0.1713, simple_loss=0.2758, pruned_loss=0.03335, over 944034.00 frames. +2024-07-28 08:08:58,366 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 08:09:03,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=130669.33333333333, ans=0.0 +2024-07-28 08:09:14,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=130696.0, ans=0.0 +2024-07-28 08:09:24,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=130722.66666666667, ans=0.125 +2024-07-28 08:09:32,066 INFO [train.py:1114] (3/4) Epoch 10, batch 6050, loss[loss=0.1798, simple_loss=0.2557, pruned_loss=0.0519, over 4779.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2872, pruned_loss=0.0577, over 938358.15 frames. ], batch size: 12, lr: 7.48e-03, grad_scale: 32.0 +2024-07-28 08:09:34,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=130736.0, ans=0.125 +2024-07-28 08:09:42,574 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.834e+01 5.819e+01 6.565e+01 7.638e+01 1.917e+02, threshold=1.313e+02, percent-clipped=1.0 +2024-07-28 08:10:03,198 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.35 vs. limit=15.0 +2024-07-28 08:10:04,893 INFO [train.py:1114] (3/4) Epoch 10, batch 6100, loss[loss=0.2029, simple_loss=0.2894, pruned_loss=0.05819, over 4681.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2877, pruned_loss=0.05818, over 937991.21 frames. ], batch size: 15, lr: 7.48e-03, grad_scale: 32.0 +2024-07-28 08:10:05,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.72 vs. limit=6.0 +2024-07-28 08:10:07,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=130802.66666666667, ans=0.5 +2024-07-28 08:10:12,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=130816.0, ans=0.125 +2024-07-28 08:10:15,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=130816.0, ans=0.2 +2024-07-28 08:10:18,316 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.43 vs. limit=15.0 +2024-07-28 08:10:23,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=130829.33333333333, ans=0.0 +2024-07-28 08:10:24,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=130842.66666666667, ans=0.09899494936611666 +2024-07-28 08:10:24,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-28 08:10:36,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=130856.0, ans=0.125 +2024-07-28 08:10:38,549 INFO [train.py:1114] (3/4) Epoch 10, batch 6150, loss[loss=0.264, simple_loss=0.3354, pruned_loss=0.09627, over 3550.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2881, pruned_loss=0.05817, over 936876.34 frames. ], batch size: 37, lr: 7.48e-03, grad_scale: 32.0 +2024-07-28 08:10:49,586 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+01 5.804e+01 6.352e+01 7.086e+01 1.134e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 08:10:53,098 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:10:56,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=130896.0, ans=0.0 +2024-07-28 08:10:59,480 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.56 vs. limit=22.5 +2024-07-28 08:11:03,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=130909.33333333333, ans=0.125 +2024-07-28 08:11:04,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=130922.66666666667, ans=10.0 +2024-07-28 08:11:06,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=130922.66666666667, ans=0.125 +2024-07-28 08:11:12,358 INFO [train.py:1114] (3/4) Epoch 10, batch 6200, loss[loss=0.269, simple_loss=0.3429, pruned_loss=0.09754, over 4739.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2884, pruned_loss=0.05802, over 936005.61 frames. ], batch size: 14, lr: 7.48e-03, grad_scale: 32.0 +2024-07-28 08:11:19,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=130949.33333333333, ans=0.125 +2024-07-28 08:11:45,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=130989.33333333333, ans=10.0 +2024-07-28 08:11:46,550 INFO [train.py:1114] (3/4) Epoch 10, batch 6250, loss[loss=0.1992, simple_loss=0.2911, pruned_loss=0.05367, over 4815.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2873, pruned_loss=0.05807, over 932846.49 frames. ], batch size: 14, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:11:53,846 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.11 vs. limit=6.0 +2024-07-28 08:11:56,414 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.14 vs. limit=22.5 +2024-07-28 08:11:57,348 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.378e+01 5.979e+01 6.836e+01 8.576e+01 1.211e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-28 08:12:17,666 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.11 vs. limit=12.0 +2024-07-28 08:12:19,829 INFO [train.py:1114] (3/4) Epoch 10, batch 6300, loss[loss=0.1861, simple_loss=0.2764, pruned_loss=0.04791, over 4561.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2875, pruned_loss=0.05832, over 929768.48 frames. ], batch size: 10, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:12:26,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=131069.33333333333, ans=0.125 +2024-07-28 08:12:27,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131082.66666666666, ans=0.1 +2024-07-28 08:12:29,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=131082.66666666666, ans=10.0 +2024-07-28 08:12:30,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=131082.66666666666, ans=0.125 +2024-07-28 08:12:52,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131122.66666666666, ans=0.1 +2024-07-28 08:12:54,185 INFO [train.py:1114] (3/4) Epoch 10, batch 6350, loss[loss=0.1973, simple_loss=0.2797, pruned_loss=0.05746, over 4461.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2854, pruned_loss=0.05711, over 933690.85 frames. ], batch size: 21, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:13:07,204 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.906e+01 5.570e+01 6.150e+01 7.348e+01 9.033e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 08:13:10,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=131162.66666666666, ans=0.125 +2024-07-28 08:13:16,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=131176.0, ans=0.2 +2024-07-28 08:13:24,581 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.66 vs. limit=10.0 +2024-07-28 08:13:28,739 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.17 vs. limit=10.0 +2024-07-28 08:13:29,579 INFO [train.py:1114] (3/4) Epoch 10, batch 6400, loss[loss=0.1778, simple_loss=0.2704, pruned_loss=0.04264, over 4640.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2859, pruned_loss=0.05716, over 935063.01 frames. ], batch size: 13, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:13:42,996 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.29 vs. limit=15.0 +2024-07-28 08:13:49,021 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.04 vs. limit=15.0 +2024-07-28 08:13:51,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=131242.66666666666, ans=0.0 +2024-07-28 08:14:01,906 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.37 vs. limit=15.0 +2024-07-28 08:14:03,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=131256.0, ans=0.1 +2024-07-28 08:14:06,252 INFO [train.py:1114] (3/4) Epoch 10, batch 6450, loss[loss=0.255, simple_loss=0.3316, pruned_loss=0.0892, over 4479.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2871, pruned_loss=0.05724, over 938765.42 frames. ], batch size: 21, lr: 7.47e-03, grad_scale: 32.0 +2024-07-28 08:14:16,714 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.797e+01 5.892e+01 6.683e+01 7.805e+01 1.062e+02, threshold=1.337e+02, percent-clipped=0.0 +2024-07-28 08:14:18,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=131296.0, ans=0.125 +2024-07-28 08:14:23,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=131296.0, ans=0.125 +2024-07-28 08:14:28,775 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.05 vs. limit=15.0 +2024-07-28 08:14:38,897 INFO [train.py:1114] (3/4) Epoch 10, batch 6500, loss[loss=0.2639, simple_loss=0.324, pruned_loss=0.1019, over 3048.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.286, pruned_loss=0.05632, over 939430.19 frames. ], batch size: 35, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:14:50,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=131349.33333333334, ans=0.0 +2024-07-28 08:14:53,843 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.92 vs. limit=10.0 +2024-07-28 08:14:58,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=131376.0, ans=0.2 +2024-07-28 08:14:59,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=131376.0, ans=0.2 +2024-07-28 08:15:01,662 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.00 vs. limit=15.0 +2024-07-28 08:15:11,955 INFO [train.py:1114] (3/4) Epoch 10, batch 6550, loss[loss=0.1851, simple_loss=0.2664, pruned_loss=0.05196, over 4809.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2857, pruned_loss=0.05582, over 942424.85 frames. ], batch size: 11, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:15:14,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=131402.66666666666, ans=0.125 +2024-07-28 08:15:19,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=131416.0, ans=0.2 +2024-07-28 08:15:32,282 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.408e+01 5.455e+01 5.898e+01 6.813e+01 1.235e+02, threshold=1.180e+02, percent-clipped=0.0 +2024-07-28 08:15:42,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=131429.33333333334, ans=0.0 +2024-07-28 08:15:42,210 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.12 vs. limit=12.0 +2024-07-28 08:15:51,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=131442.66666666666, ans=0.1 +2024-07-28 08:16:02,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=131456.0, ans=0.0 +2024-07-28 08:16:03,315 INFO [train.py:1114] (3/4) Epoch 10, batch 6600, loss[loss=0.1765, simple_loss=0.2781, pruned_loss=0.03743, over 4933.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2855, pruned_loss=0.05604, over 944448.71 frames. ], batch size: 14, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:16:09,133 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.11 vs. limit=15.0 +2024-07-28 08:16:24,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=131509.33333333334, ans=0.07 +2024-07-28 08:16:34,982 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.97 vs. limit=15.0 +2024-07-28 08:16:37,432 INFO [train.py:1114] (3/4) Epoch 10, batch 6650, loss[loss=0.2087, simple_loss=0.3014, pruned_loss=0.05803, over 4624.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2856, pruned_loss=0.05593, over 943356.35 frames. ], batch size: 17, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:16:38,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=131536.0, ans=0.125 +2024-07-28 08:16:44,775 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.22 vs. limit=15.0 +2024-07-28 08:16:48,317 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.943e+01 5.716e+01 6.391e+01 7.041e+01 1.048e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-28 08:16:49,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=131549.33333333334, ans=0.125 +2024-07-28 08:16:53,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=131562.66666666666, ans=0.125 +2024-07-28 08:17:01,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=131576.0, ans=0.125 +2024-07-28 08:17:11,433 INFO [train.py:1114] (3/4) Epoch 10, batch 6700, loss[loss=0.2365, simple_loss=0.3296, pruned_loss=0.07168, over 4731.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2861, pruned_loss=0.05628, over 942054.54 frames. ], batch size: 19, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:17:18,275 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.96 vs. limit=10.0 +2024-07-28 08:17:22,113 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.92 vs. limit=15.0 +2024-07-28 08:17:42,893 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.69 vs. limit=15.0 +2024-07-28 08:17:50,784 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:17:54,052 INFO [train.py:1114] (3/4) Epoch 10, batch 6750, loss[loss=0.2377, simple_loss=0.3216, pruned_loss=0.07688, over 4118.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.286, pruned_loss=0.05615, over 939941.32 frames. ], batch size: 25, lr: 7.46e-03, grad_scale: 32.0 +2024-07-28 08:18:03,085 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.75 vs. limit=15.0 +2024-07-28 08:18:04,617 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.612e+01 5.726e+01 6.534e+01 7.091e+01 1.095e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 08:18:19,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=131709.33333333334, ans=0.125 +2024-07-28 08:18:21,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=131709.33333333334, ans=0.125 +2024-07-28 08:18:23,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=131722.66666666666, ans=0.0 +2024-07-28 08:18:29,032 INFO [train.py:1114] (3/4) Epoch 10, batch 6800, loss[loss=0.2148, simple_loss=0.2958, pruned_loss=0.06693, over 4640.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2867, pruned_loss=0.05669, over 938500.97 frames. ], batch size: 13, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:18:32,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=131736.0, ans=0.125 +2024-07-28 08:18:37,676 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:18:39,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=131749.33333333334, ans=0.0 +2024-07-28 08:18:48,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=131776.0, ans=0.1 +2024-07-28 08:18:50,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=131776.0, ans=0.5 +2024-07-28 08:18:56,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=131789.33333333334, ans=0.025 +2024-07-28 08:19:02,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=131789.33333333334, ans=0.125 +2024-07-28 08:19:03,573 INFO [train.py:1114] (3/4) Epoch 10, batch 6850, loss[loss=0.1956, simple_loss=0.2882, pruned_loss=0.05149, over 4702.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2863, pruned_loss=0.05667, over 940188.75 frames. ], batch size: 13, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:19:13,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=131816.0, ans=0.0 +2024-07-28 08:19:13,970 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.625e+01 5.736e+01 6.428e+01 7.691e+01 1.005e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 08:19:32,382 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=131856.0, ans=10.0 +2024-07-28 08:19:36,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.21 vs. limit=15.0 +2024-07-28 08:19:38,306 INFO [train.py:1114] (3/4) Epoch 10, batch 6900, loss[loss=0.1812, simple_loss=0.2654, pruned_loss=0.04847, over 4963.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2873, pruned_loss=0.05715, over 942686.60 frames. ], batch size: 13, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:19:39,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=131869.33333333334, ans=0.2 +2024-07-28 08:20:07,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=131922.66666666666, ans=0.025 +2024-07-28 08:20:13,422 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.16 vs. limit=22.5 +2024-07-28 08:20:13,660 INFO [train.py:1114] (3/4) Epoch 10, batch 6950, loss[loss=0.1816, simple_loss=0.2635, pruned_loss=0.0498, over 4506.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2865, pruned_loss=0.05684, over 939973.88 frames. ], batch size: 10, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:20:20,222 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:20:20,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=131949.33333333334, ans=0.125 +2024-07-28 08:20:21,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=131949.33333333334, ans=0.125 +2024-07-28 08:20:23,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=131949.33333333334, ans=0.1 +2024-07-28 08:20:24,310 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.214e+01 5.671e+01 6.271e+01 7.214e+01 1.195e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 08:20:37,905 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.42 vs. limit=22.5 +2024-07-28 08:20:46,958 INFO [train.py:1114] (3/4) Epoch 10, batch 7000, loss[loss=0.2556, simple_loss=0.3306, pruned_loss=0.09028, over 4584.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2859, pruned_loss=0.05683, over 938308.91 frames. ], batch size: 17, lr: 7.45e-03, grad_scale: 32.0 +2024-07-28 08:20:56,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=132016.0, ans=10.0 +2024-07-28 08:21:06,185 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.72 vs. limit=15.0 +2024-07-28 08:21:06,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=132042.66666666666, ans=0.1 +2024-07-28 08:21:07,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=132042.66666666666, ans=0.125 +2024-07-28 08:21:11,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=132042.66666666666, ans=0.125 +2024-07-28 08:21:13,602 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:21:19,393 INFO [train.py:1114] (3/4) Epoch 10, batch 7050, loss[loss=0.2373, simple_loss=0.3207, pruned_loss=0.07688, over 4737.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2862, pruned_loss=0.05652, over 941661.17 frames. ], batch size: 19, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:21:24,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=132069.33333333334, ans=0.2 +2024-07-28 08:21:30,143 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.631e+01 5.787e+01 6.450e+01 7.707e+01 1.222e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-28 08:21:40,021 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.84 vs. limit=10.0 +2024-07-28 08:21:45,190 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.86 vs. limit=15.0 +2024-07-28 08:21:52,625 INFO [train.py:1114] (3/4) Epoch 10, batch 7100, loss[loss=0.2108, simple_loss=0.3022, pruned_loss=0.05971, over 4788.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2884, pruned_loss=0.05812, over 936771.63 frames. ], batch size: 15, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:21:59,252 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:22:03,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=132149.33333333334, ans=0.2 +2024-07-28 08:22:08,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=132162.66666666666, ans=0.0 +2024-07-28 08:22:12,082 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.54 vs. limit=15.0 +2024-07-28 08:22:15,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=132176.0, ans=0.0 +2024-07-28 08:22:19,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=132189.33333333334, ans=0.0 +2024-07-28 08:22:20,649 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.94 vs. limit=15.0 +2024-07-28 08:22:25,455 INFO [train.py:1114] (3/4) Epoch 10, batch 7150, loss[loss=0.2303, simple_loss=0.3057, pruned_loss=0.07745, over 4611.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.287, pruned_loss=0.05757, over 937485.27 frames. ], batch size: 21, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:22:27,717 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.44 vs. limit=10.0 +2024-07-28 08:22:35,934 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+01 5.833e+01 6.423e+01 7.127e+01 1.033e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 08:22:40,860 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.43 vs. limit=15.0 +2024-07-28 08:22:47,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=132242.66666666666, ans=0.125 +2024-07-28 08:22:53,187 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=19.25 vs. limit=15.0 +2024-07-28 08:22:58,299 INFO [train.py:1114] (3/4) Epoch 10, batch 7200, loss[loss=0.2241, simple_loss=0.3092, pruned_loss=0.06948, over 4807.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2871, pruned_loss=0.05716, over 937733.23 frames. ], batch size: 15, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:22:58,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=132269.33333333334, ans=0.2 +2024-07-28 08:22:59,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=132269.33333333334, ans=0.025 +2024-07-28 08:23:01,675 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.44 vs. limit=15.0 +2024-07-28 08:23:33,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=132309.33333333334, ans=0.0 +2024-07-28 08:23:35,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=132322.66666666666, ans=0.0 +2024-07-28 08:23:41,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132322.66666666666, ans=0.1 +2024-07-28 08:23:42,008 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.18 vs. limit=10.0 +2024-07-28 08:23:42,263 INFO [train.py:1114] (3/4) Epoch 10, batch 7250, loss[loss=0.1722, simple_loss=0.2516, pruned_loss=0.04636, over 4854.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2862, pruned_loss=0.05667, over 939409.47 frames. ], batch size: 12, lr: 7.44e-03, grad_scale: 32.0 +2024-07-28 08:23:46,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=132336.0, ans=0.2 +2024-07-28 08:23:48,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=132349.33333333334, ans=0.125 +2024-07-28 08:23:48,601 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.72 vs. limit=22.5 +2024-07-28 08:23:52,710 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.426e+01 5.603e+01 6.257e+01 7.383e+01 1.105e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 08:23:53,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=132349.33333333334, ans=10.0 +2024-07-28 08:23:56,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=132362.66666666666, ans=0.125 +2024-07-28 08:23:57,806 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.40 vs. limit=15.0 +2024-07-28 08:23:58,867 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:24:09,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=132389.33333333334, ans=0.125 +2024-07-28 08:24:13,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=132389.33333333334, ans=0.2 +2024-07-28 08:24:15,233 INFO [train.py:1114] (3/4) Epoch 10, batch 7300, loss[loss=0.2017, simple_loss=0.2798, pruned_loss=0.06176, over 4842.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2864, pruned_loss=0.05682, over 940042.72 frames. ], batch size: 12, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:24:24,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132416.0, ans=0.1 +2024-07-28 08:24:31,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=132429.33333333334, ans=0.025 +2024-07-28 08:24:35,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=132442.66666666666, ans=0.05 +2024-07-28 08:24:39,515 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.75 vs. limit=15.0 +2024-07-28 08:24:42,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=132456.0, ans=0.125 +2024-07-28 08:24:42,974 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.16 vs. limit=10.0 +2024-07-28 08:24:48,302 INFO [train.py:1114] (3/4) Epoch 10, batch 7350, loss[loss=0.1633, simple_loss=0.261, pruned_loss=0.03275, over 4643.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2871, pruned_loss=0.05727, over 939615.75 frames. ], batch size: 12, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:24:49,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132469.33333333334, ans=0.1 +2024-07-28 08:24:58,620 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.713e+01 5.561e+01 5.989e+01 6.823e+01 9.799e+01, threshold=1.198e+02, percent-clipped=0.0 +2024-07-28 08:24:59,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=132482.66666666666, ans=0.0 +2024-07-28 08:25:04,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132496.0, ans=0.1 +2024-07-28 08:25:13,036 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.77 vs. limit=10.0 +2024-07-28 08:25:13,691 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.58 vs. limit=15.0 +2024-07-28 08:25:14,641 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.70 vs. limit=8.0 +2024-07-28 08:25:20,772 INFO [train.py:1114] (3/4) Epoch 10, batch 7400, loss[loss=0.1917, simple_loss=0.2836, pruned_loss=0.04991, over 4702.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2875, pruned_loss=0.05748, over 941009.30 frames. ], batch size: 13, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:25:33,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=132562.66666666666, ans=0.125 +2024-07-28 08:25:39,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=132576.0, ans=0.125 +2024-07-28 08:25:43,441 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.36 vs. limit=22.5 +2024-07-28 08:25:53,284 INFO [train.py:1114] (3/4) Epoch 10, batch 7450, loss[loss=0.1581, simple_loss=0.2462, pruned_loss=0.03506, over 4602.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2861, pruned_loss=0.05699, over 937999.89 frames. ], batch size: 11, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:25:54,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=132602.66666666666, ans=0.125 +2024-07-28 08:25:59,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=132616.0, ans=0.025 +2024-07-28 08:26:05,095 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.415e+01 5.617e+01 6.160e+01 7.093e+01 9.986e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 08:26:05,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=132616.0, ans=0.125 +2024-07-28 08:26:09,609 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.33 vs. limit=15.0 +2024-07-28 08:26:15,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=132642.66666666666, ans=0.1 +2024-07-28 08:26:18,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=132642.66666666666, ans=0.125 +2024-07-28 08:26:24,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=132656.0, ans=0.0 +2024-07-28 08:26:27,361 INFO [train.py:1114] (3/4) Epoch 10, batch 7500, loss[loss=0.2507, simple_loss=0.3205, pruned_loss=0.09045, over 3656.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2855, pruned_loss=0.05664, over 936493.22 frames. ], batch size: 35, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:26:41,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=132696.0, ans=0.0 +2024-07-28 08:26:41,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=132696.0, ans=0.2 +2024-07-28 08:26:50,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=132709.33333333334, ans=0.2 +2024-07-28 08:26:54,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=132722.66666666666, ans=0.125 +2024-07-28 08:27:00,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=132736.0, ans=0.2 +2024-07-28 08:27:00,937 INFO [train.py:1114] (3/4) Epoch 10, batch 7550, loss[loss=0.2189, simple_loss=0.3109, pruned_loss=0.06347, over 4672.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2863, pruned_loss=0.05695, over 936172.24 frames. ], batch size: 17, lr: 7.43e-03, grad_scale: 32.0 +2024-07-28 08:27:11,155 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.785e+01 5.843e+01 6.500e+01 7.580e+01 1.303e+02, threshold=1.300e+02, percent-clipped=2.0 +2024-07-28 08:27:28,513 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=15.45 vs. limit=15.0 +2024-07-28 08:27:29,988 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.19 vs. limit=10.0 +2024-07-28 08:27:36,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=132789.33333333334, ans=0.1 +2024-07-28 08:27:45,627 INFO [train.py:1114] (3/4) Epoch 10, batch 7600, loss[loss=0.1821, simple_loss=0.2835, pruned_loss=0.04038, over 4818.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2847, pruned_loss=0.05602, over 937932.61 frames. ], batch size: 14, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:27:54,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=132816.0, ans=0.1 +2024-07-28 08:27:59,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=132829.33333333334, ans=0.125 +2024-07-28 08:28:10,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=132842.66666666666, ans=0.125 +2024-07-28 08:28:19,600 INFO [train.py:1114] (3/4) Epoch 10, batch 7650, loss[loss=0.1784, simple_loss=0.253, pruned_loss=0.05197, over 4928.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2849, pruned_loss=0.05605, over 937208.09 frames. ], batch size: 12, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:28:26,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=132882.66666666666, ans=0.0 +2024-07-28 08:28:30,219 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.708e+01 5.574e+01 6.113e+01 7.353e+01 1.031e+02, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 08:28:31,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=132882.66666666666, ans=0.09899494936611666 +2024-07-28 08:28:56,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=132922.66666666666, ans=0.0 +2024-07-28 08:28:58,471 INFO [train.py:1114] (3/4) Epoch 10, batch 7700, loss[loss=0.1747, simple_loss=0.2749, pruned_loss=0.03721, over 4686.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2854, pruned_loss=0.05657, over 934980.00 frames. ], batch size: 13, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:29:15,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=132949.33333333334, ans=0.2 +2024-07-28 08:29:18,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=132962.66666666666, ans=0.1 +2024-07-28 08:29:21,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=132962.66666666666, ans=0.0 +2024-07-28 08:29:27,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=132976.0, ans=0.2 +2024-07-28 08:29:28,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=132989.33333333334, ans=0.1 +2024-07-28 08:29:35,844 INFO [train.py:1114] (3/4) Epoch 10, batch 7750, loss[loss=0.1847, simple_loss=0.2838, pruned_loss=0.04284, over 4941.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2859, pruned_loss=0.05648, over 935772.86 frames. ], batch size: 14, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:29:46,087 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 5.644e+01 6.328e+01 7.366e+01 9.654e+01, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 08:29:50,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=133029.33333333334, ans=0.1 +2024-07-28 08:29:51,124 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.83 vs. limit=15.0 +2024-07-28 08:30:01,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=133056.0, ans=0.025 +2024-07-28 08:30:04,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=133056.0, ans=0.0 +2024-07-28 08:30:08,495 INFO [train.py:1114] (3/4) Epoch 10, batch 7800, loss[loss=0.1888, simple_loss=0.2864, pruned_loss=0.04555, over 4667.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2865, pruned_loss=0.05673, over 937514.16 frames. ], batch size: 14, lr: 7.42e-03, grad_scale: 32.0 +2024-07-28 08:30:17,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=133082.66666666666, ans=0.125 +2024-07-28 08:30:32,779 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.63 vs. limit=6.0 +2024-07-28 08:30:33,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.59 vs. limit=15.0 +2024-07-28 08:30:35,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=133122.66666666666, ans=0.1 +2024-07-28 08:30:46,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=133122.66666666666, ans=0.0 +2024-07-28 08:30:48,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=133122.66666666666, ans=0.0 +2024-07-28 08:30:51,381 INFO [train.py:1114] (3/4) Epoch 10, batch 7850, loss[loss=0.1847, simple_loss=0.2724, pruned_loss=0.0485, over 4505.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2865, pruned_loss=0.05704, over 936221.14 frames. ], batch size: 10, lr: 7.41e-03, grad_scale: 32.0 +2024-07-28 08:31:14,761 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.13 vs. limit=15.0 +2024-07-28 08:31:16,880 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.726e+01 6.171e+01 6.913e+01 1.107e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-28 08:31:22,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=133162.66666666666, ans=0.125 +2024-07-28 08:31:25,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=133176.0, ans=0.125 +2024-07-28 08:31:26,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=133176.0, ans=0.2 +2024-07-28 08:31:38,775 INFO [train.py:1114] (3/4) Epoch 10, batch 7900, loss[loss=0.1991, simple_loss=0.2876, pruned_loss=0.05527, over 4878.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2876, pruned_loss=0.05748, over 933500.67 frames. ], batch size: 14, lr: 7.41e-03, grad_scale: 32.0 +2024-07-28 08:31:39,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=133202.66666666666, ans=0.125 +2024-07-28 08:31:55,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=133216.0, ans=0.0 +2024-07-28 08:32:01,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133229.33333333334, ans=0.1 +2024-07-28 08:32:07,866 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.55 vs. limit=12.0 +2024-07-28 08:32:12,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=133229.33333333334, ans=0.125 +2024-07-28 08:32:18,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=133242.66666666666, ans=0.125 +2024-07-28 08:32:19,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=133256.0, ans=0.125 +2024-07-28 08:32:22,209 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.56 vs. limit=15.0 +2024-07-28 08:32:26,513 INFO [train.py:1114] (3/4) Epoch 10, batch 7950, loss[loss=0.2374, simple_loss=0.3146, pruned_loss=0.08016, over 3379.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2874, pruned_loss=0.05743, over 935831.27 frames. ], batch size: 35, lr: 7.41e-03, grad_scale: 32.0 +2024-07-28 08:32:34,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=133282.66666666666, ans=0.125 +2024-07-28 08:32:34,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=133282.66666666666, ans=0.1 +2024-07-28 08:32:35,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=133282.66666666666, ans=0.125 +2024-07-28 08:32:37,003 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.684e+01 5.725e+01 6.303e+01 6.935e+01 1.035e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 08:32:42,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=133296.0, ans=0.0 +2024-07-28 08:32:46,805 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.77 vs. limit=15.0 +2024-07-28 08:33:00,925 INFO [train.py:1114] (3/4) Epoch 10, batch 8000, loss[loss=0.176, simple_loss=0.2548, pruned_loss=0.04858, over 4611.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2859, pruned_loss=0.05709, over 934923.24 frames. ], batch size: 11, lr: 7.41e-03, grad_scale: 32.0 +2024-07-28 08:33:08,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=133349.33333333334, ans=0.025 +2024-07-28 08:33:08,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=133349.33333333334, ans=0.1 +2024-07-28 08:33:13,356 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.44 vs. limit=22.5 +2024-07-28 08:33:32,894 INFO [train.py:1114] (3/4) Epoch 10, batch 8050, loss[loss=0.1912, simple_loss=0.2873, pruned_loss=0.04758, over 4808.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2857, pruned_loss=0.05684, over 934687.96 frames. ], batch size: 14, lr: 7.41e-03, grad_scale: 64.0 +2024-07-28 08:33:43,159 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.644e+01 5.682e+01 6.216e+01 7.101e+01 1.040e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 08:33:52,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=133429.33333333334, ans=0.1 +2024-07-28 08:33:54,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=133442.66666666666, ans=0.0 +2024-07-28 08:33:56,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=133442.66666666666, ans=0.035 +2024-07-28 08:34:03,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=133456.0, ans=0.125 +2024-07-28 08:34:03,966 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.06 vs. limit=15.0 +2024-07-28 08:34:06,151 INFO [train.py:1114] (3/4) Epoch 10, batch 8100, loss[loss=0.1866, simple_loss=0.278, pruned_loss=0.04761, over 4792.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2866, pruned_loss=0.05659, over 934194.90 frames. ], batch size: 15, lr: 7.41e-03, grad_scale: 64.0 +2024-07-28 08:34:14,226 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.31 vs. limit=6.0 +2024-07-28 08:34:26,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=133509.33333333334, ans=0.125 +2024-07-28 08:34:37,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=133522.66666666666, ans=0.125 +2024-07-28 08:34:38,562 INFO [train.py:1114] (3/4) Epoch 10, batch 8150, loss[loss=0.2015, simple_loss=0.2947, pruned_loss=0.05412, over 4804.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2857, pruned_loss=0.05619, over 937344.91 frames. ], batch size: 15, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:34:39,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=133536.0, ans=0.125 +2024-07-28 08:34:45,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=133549.33333333334, ans=0.125 +2024-07-28 08:34:48,785 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.759e+01 5.775e+01 6.372e+01 7.050e+01 1.046e+02, threshold=1.274e+02, percent-clipped=0.0 +2024-07-28 08:34:59,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=133576.0, ans=0.125 +2024-07-28 08:35:09,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=133589.33333333334, ans=0.0 +2024-07-28 08:35:11,084 INFO [train.py:1114] (3/4) Epoch 10, batch 8200, loss[loss=0.2503, simple_loss=0.3223, pruned_loss=0.0892, over 4804.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2858, pruned_loss=0.05624, over 938302.40 frames. ], batch size: 15, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:35:15,844 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.56 vs. limit=10.0 +2024-07-28 08:35:16,367 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.66 vs. limit=15.0 +2024-07-28 08:35:53,141 INFO [train.py:1114] (3/4) Epoch 10, batch 8250, loss[loss=0.2246, simple_loss=0.3141, pruned_loss=0.06757, over 4906.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2859, pruned_loss=0.05642, over 938574.30 frames. ], batch size: 13, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:35:54,107 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.82 vs. limit=10.0 +2024-07-28 08:35:54,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=133669.33333333334, ans=22.5 +2024-07-28 08:35:54,788 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.32 vs. limit=22.5 +2024-07-28 08:36:06,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=133682.66666666666, ans=0.125 +2024-07-28 08:36:10,114 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.694e+01 6.323e+01 7.329e+01 1.024e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-28 08:36:11,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=133682.66666666666, ans=0.125 +2024-07-28 08:36:24,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=133709.33333333334, ans=0.5 +2024-07-28 08:36:25,783 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.66 vs. limit=22.5 +2024-07-28 08:36:32,393 INFO [train.py:1114] (3/4) Epoch 10, batch 8300, loss[loss=0.2197, simple_loss=0.3033, pruned_loss=0.06809, over 4895.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2869, pruned_loss=0.05684, over 938621.02 frames. ], batch size: 15, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:36:37,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=133736.0, ans=0.125 +2024-07-28 08:36:40,523 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.62 vs. limit=12.0 +2024-07-28 08:36:55,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=133776.0, ans=0.0 +2024-07-28 08:36:59,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=133789.33333333334, ans=0.125 +2024-07-28 08:37:04,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=133789.33333333334, ans=0.125 +2024-07-28 08:37:05,224 INFO [train.py:1114] (3/4) Epoch 10, batch 8350, loss[loss=0.2298, simple_loss=0.315, pruned_loss=0.0723, over 4809.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.286, pruned_loss=0.0563, over 941472.78 frames. ], batch size: 15, lr: 7.40e-03, grad_scale: 64.0 +2024-07-28 08:37:08,865 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.45 vs. limit=15.0 +2024-07-28 08:37:13,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=133816.0, ans=0.09899494936611666 +2024-07-28 08:37:14,916 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-28 08:37:15,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=133816.0, ans=0.0 +2024-07-28 08:37:17,104 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.840e+01 5.721e+01 6.167e+01 6.839e+01 1.069e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 08:37:18,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=133816.0, ans=0.125 +2024-07-28 08:37:23,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=133829.33333333334, ans=0.2 +2024-07-28 08:37:31,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=133842.66666666666, ans=0.125 +2024-07-28 08:37:34,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=133856.0, ans=0.125 +2024-07-28 08:37:39,699 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.50 vs. limit=6.0 +2024-07-28 08:37:44,280 INFO [train.py:1114] (3/4) Epoch 10, batch 8400, loss[loss=0.1605, simple_loss=0.2544, pruned_loss=0.0333, over 4776.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2863, pruned_loss=0.05681, over 940315.47 frames. ], batch size: 12, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:37:51,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=133882.66666666666, ans=0.125 +2024-07-28 08:38:00,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=133896.0, ans=0.0 +2024-07-28 08:38:04,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=133909.33333333334, ans=0.0 +2024-07-28 08:38:17,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=133922.66666666666, ans=0.125 +2024-07-28 08:38:18,212 INFO [train.py:1114] (3/4) Epoch 10, batch 8450, loss[loss=0.2289, simple_loss=0.3138, pruned_loss=0.072, over 4804.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2875, pruned_loss=0.05729, over 938791.57 frames. ], batch size: 15, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:38:28,300 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.430e+01 5.805e+01 6.479e+01 7.666e+01 1.044e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 08:38:29,938 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.89 vs. limit=15.0 +2024-07-28 08:38:43,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=133976.0, ans=0.0 +2024-07-28 08:39:02,048 INFO [train.py:1114] (3/4) Epoch 10, batch 8500, loss[loss=0.1928, simple_loss=0.2715, pruned_loss=0.05708, over 4618.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2866, pruned_loss=0.05719, over 938695.85 frames. ], batch size: 11, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:39:12,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=134002.66666666666, ans=0.04949747468305833 +2024-07-28 08:39:13,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=134002.66666666666, ans=0.125 +2024-07-28 08:39:33,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=134029.33333333334, ans=0.125 +2024-07-28 08:40:24,356 INFO [train.py:1114] (3/4) Epoch 10, batch 8550, loss[loss=0.2177, simple_loss=0.293, pruned_loss=0.07119, over 4817.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2862, pruned_loss=0.05696, over 940082.53 frames. ], batch size: 11, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:40:27,099 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.27 vs. limit=22.5 +2024-07-28 08:40:44,213 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.526e+01 5.946e+01 6.615e+01 7.789e+01 1.197e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-28 08:41:19,433 INFO [train.py:1114] (3/4) Epoch 10, batch 8600, loss[loss=0.2039, simple_loss=0.3037, pruned_loss=0.05202, over 4810.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2859, pruned_loss=0.05686, over 939406.32 frames. ], batch size: 15, lr: 7.39e-03, grad_scale: 64.0 +2024-07-28 08:41:20,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=134136.0, ans=0.2 +2024-07-28 08:41:31,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=134149.33333333334, ans=0.125 +2024-07-28 08:41:31,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=134162.66666666666, ans=0.04949747468305833 +2024-07-28 08:41:59,314 INFO [train.py:1114] (3/4) Epoch 10, batch 8650, loss[loss=0.1978, simple_loss=0.291, pruned_loss=0.05226, over 4909.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.286, pruned_loss=0.05683, over 940514.07 frames. ], batch size: 15, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:42:03,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134202.66666666666, ans=0.1 +2024-07-28 08:42:09,608 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.967e+01 5.912e+01 6.591e+01 7.425e+01 1.041e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-28 08:42:15,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=134229.33333333334, ans=0.2 +2024-07-28 08:42:17,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=134229.33333333334, ans=0.125 +2024-07-28 08:42:18,794 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:42:42,711 INFO [train.py:1114] (3/4) Epoch 10, batch 8700, loss[loss=0.1914, simple_loss=0.2696, pruned_loss=0.05656, over 4766.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2881, pruned_loss=0.05807, over 938328.08 frames. ], batch size: 13, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:42:43,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=134269.33333333334, ans=0.0 +2024-07-28 08:42:54,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=134282.66666666666, ans=0.1 +2024-07-28 08:42:56,601 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.93 vs. limit=15.0 +2024-07-28 08:43:06,130 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.46 vs. limit=15.0 +2024-07-28 08:43:07,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=134309.33333333334, ans=0.5 +2024-07-28 08:43:12,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=134322.66666666666, ans=0.125 +2024-07-28 08:43:14,602 INFO [train.py:1114] (3/4) Epoch 10, batch 8750, loss[loss=0.2158, simple_loss=0.3076, pruned_loss=0.06197, over 4693.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2876, pruned_loss=0.05728, over 936844.86 frames. ], batch size: 15, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:43:16,849 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.39 vs. limit=15.0 +2024-07-28 08:43:24,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=134349.33333333334, ans=0.0 +2024-07-28 08:43:24,876 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.932e+01 5.734e+01 6.452e+01 7.346e+01 1.067e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-28 08:43:27,263 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-28 08:43:29,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=134362.66666666666, ans=0.0 +2024-07-28 08:43:46,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=134389.33333333334, ans=0.125 +2024-07-28 08:43:47,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=134389.33333333334, ans=0.125 +2024-07-28 08:43:48,697 INFO [train.py:1114] (3/4) Epoch 10, batch 8800, loss[loss=0.2344, simple_loss=0.318, pruned_loss=0.07545, over 4930.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2873, pruned_loss=0.05716, over 937434.53 frames. ], batch size: 14, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:43:58,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=134416.0, ans=0.125 +2024-07-28 08:44:17,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=134456.0, ans=0.125 +2024-07-28 08:44:22,025 INFO [train.py:1114] (3/4) Epoch 10, batch 8850, loss[loss=0.2014, simple_loss=0.2925, pruned_loss=0.0551, over 4506.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2876, pruned_loss=0.05732, over 931864.41 frames. ], batch size: 21, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:44:22,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=134469.33333333334, ans=0.125 +2024-07-28 08:44:22,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=134469.33333333334, ans=0.125 +2024-07-28 08:44:22,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=134469.33333333334, ans=0.125 +2024-07-28 08:44:34,668 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.302e+01 5.576e+01 6.226e+01 7.150e+01 1.100e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 08:44:42,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=134496.0, ans=0.125 +2024-07-28 08:44:43,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=134509.33333333334, ans=0.125 +2024-07-28 08:44:54,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=134522.66666666666, ans=0.125 +2024-07-28 08:44:56,773 INFO [train.py:1114] (3/4) Epoch 10, batch 8900, loss[loss=0.1991, simple_loss=0.2811, pruned_loss=0.0586, over 4926.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2877, pruned_loss=0.05724, over 930060.02 frames. ], batch size: 12, lr: 7.38e-03, grad_scale: 64.0 +2024-07-28 08:45:00,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=134536.0, ans=0.0 +2024-07-28 08:45:09,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134549.33333333334, ans=0.1 +2024-07-28 08:45:26,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134562.66666666666, ans=0.1 +2024-07-28 08:45:29,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=134562.66666666666, ans=10.0 +2024-07-28 08:45:35,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=134576.0, ans=0.0 +2024-07-28 08:45:40,486 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.66 vs. limit=5.0 +2024-07-28 08:45:43,821 INFO [train.py:1114] (3/4) Epoch 10, batch 8950, loss[loss=0.2177, simple_loss=0.3117, pruned_loss=0.06184, over 4550.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2864, pruned_loss=0.05678, over 930657.84 frames. ], batch size: 21, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:45:46,090 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.05 vs. limit=12.0 +2024-07-28 08:45:49,749 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.80 vs. limit=22.5 +2024-07-28 08:45:53,855 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.674e+01 5.627e+01 6.283e+01 7.444e+01 1.084e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 08:46:15,699 INFO [train.py:1114] (3/4) Epoch 10, batch 9000, loss[loss=0.1841, simple_loss=0.2705, pruned_loss=0.04885, over 4639.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2859, pruned_loss=0.05683, over 933965.96 frames. ], batch size: 12, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:46:15,699 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 08:46:28,323 INFO [train.py:1146] (3/4) Epoch 10, validation: loss=0.1719, simple_loss=0.2766, pruned_loss=0.0336, over 944034.00 frames. +2024-07-28 08:46:28,324 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 08:46:48,842 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.79 vs. limit=15.0 +2024-07-28 08:47:00,167 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.01 vs. limit=22.5 +2024-07-28 08:47:00,433 INFO [train.py:1114] (3/4) Epoch 10, batch 9050, loss[loss=0.1441, simple_loss=0.2213, pruned_loss=0.03342, over 4516.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2838, pruned_loss=0.05556, over 934288.14 frames. ], batch size: 10, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:47:04,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=134736.0, ans=0.125 +2024-07-28 08:47:06,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=134749.33333333334, ans=0.025 +2024-07-28 08:47:08,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=134749.33333333334, ans=0.2 +2024-07-28 08:47:09,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=134749.33333333334, ans=0.125 +2024-07-28 08:47:10,431 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.777e+01 6.275e+01 7.546e+01 8.998e+01 1.332e+02, threshold=1.509e+02, percent-clipped=1.0 +2024-07-28 08:47:11,299 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.26 vs. limit=6.0 +2024-07-28 08:47:12,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=134749.33333333334, ans=0.125 +2024-07-28 08:47:17,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=134762.66666666666, ans=10.0 +2024-07-28 08:47:26,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=134789.33333333334, ans=0.0 +2024-07-28 08:47:32,974 INFO [train.py:1114] (3/4) Epoch 10, batch 9100, loss[loss=0.2002, simple_loss=0.2969, pruned_loss=0.05179, over 4926.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2837, pruned_loss=0.05543, over 936691.33 frames. ], batch size: 14, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:47:37,627 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.26 vs. limit=15.0 +2024-07-28 08:47:44,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=134816.0, ans=0.125 +2024-07-28 08:47:48,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=134829.33333333334, ans=0.0 +2024-07-28 08:47:50,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=134829.33333333334, ans=0.125 +2024-07-28 08:48:04,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=16.10 vs. limit=15.0 +2024-07-28 08:48:06,627 INFO [train.py:1114] (3/4) Epoch 10, batch 9150, loss[loss=0.1675, simple_loss=0.2568, pruned_loss=0.03912, over 4809.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2838, pruned_loss=0.05566, over 935718.44 frames. ], batch size: 14, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:48:13,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=134882.66666666666, ans=0.1 +2024-07-28 08:48:16,565 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.926e+01 6.660e+01 7.545e+01 1.146e+02, threshold=1.332e+02, percent-clipped=0.0 +2024-07-28 08:48:18,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=134882.66666666666, ans=0.2 +2024-07-28 08:48:24,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=134896.0, ans=0.0 +2024-07-28 08:48:28,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=134909.33333333334, ans=0.05 +2024-07-28 08:48:38,472 INFO [train.py:1114] (3/4) Epoch 10, batch 9200, loss[loss=0.1886, simple_loss=0.2694, pruned_loss=0.05391, over 4859.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2833, pruned_loss=0.05503, over 937628.58 frames. ], batch size: 12, lr: 7.37e-03, grad_scale: 64.0 +2024-07-28 08:48:47,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=134949.33333333334, ans=0.1 +2024-07-28 08:48:57,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=134976.0, ans=0.0 +2024-07-28 08:48:59,704 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.90 vs. limit=15.0 +2024-07-28 08:49:02,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=134976.0, ans=0.07 +2024-07-28 08:49:07,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=134989.33333333334, ans=0.09899494936611666 +2024-07-28 08:49:10,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=134989.33333333334, ans=0.125 +2024-07-28 08:49:11,332 INFO [train.py:1114] (3/4) Epoch 10, batch 9250, loss[loss=0.199, simple_loss=0.2908, pruned_loss=0.05359, over 4636.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2834, pruned_loss=0.05528, over 938631.19 frames. ], batch size: 13, lr: 7.36e-03, grad_scale: 64.0 +2024-07-28 08:49:11,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=135002.66666666666, ans=0.0 +2024-07-28 08:49:18,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=135016.0, ans=0.0 +2024-07-28 08:49:21,509 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.514e+01 5.719e+01 6.236e+01 6.936e+01 9.849e+01, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 08:49:27,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=135029.33333333334, ans=0.2 +2024-07-28 08:49:31,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=7.08 vs. limit=15.0 +2024-07-28 08:49:37,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=135042.66666666666, ans=0.125 +2024-07-28 08:49:41,391 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-28 08:49:41,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=135056.0, ans=0.1 +2024-07-28 08:49:46,100 INFO [train.py:1114] (3/4) Epoch 10, batch 9300, loss[loss=0.1925, simple_loss=0.2684, pruned_loss=0.05826, over 4765.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2837, pruned_loss=0.05535, over 938771.07 frames. ], batch size: 12, lr: 7.36e-03, grad_scale: 64.0 +2024-07-28 08:49:54,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=135082.66666666666, ans=0.2 +2024-07-28 08:49:57,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=135082.66666666666, ans=0.125 +2024-07-28 08:50:02,441 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.26 vs. limit=15.0 +2024-07-28 08:50:17,994 INFO [train.py:1114] (3/4) Epoch 10, batch 9350, loss[loss=0.1909, simple_loss=0.2649, pruned_loss=0.05842, over 4818.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2845, pruned_loss=0.05573, over 935973.13 frames. ], batch size: 11, lr: 7.36e-03, grad_scale: 64.0 +2024-07-28 08:50:19,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=135136.0, ans=0.125 +2024-07-28 08:50:34,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=135149.33333333334, ans=0.05 +2024-07-28 08:50:36,249 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.595e+01 5.628e+01 6.269e+01 7.143e+01 1.097e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 08:50:39,743 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-28 08:50:44,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=135176.0, ans=0.025 +2024-07-28 08:50:58,073 INFO [train.py:1114] (3/4) Epoch 10, batch 9400, loss[loss=0.2172, simple_loss=0.3223, pruned_loss=0.05603, over 4691.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2845, pruned_loss=0.05615, over 933899.90 frames. ], batch size: 13, lr: 7.36e-03, grad_scale: 64.0 +2024-07-28 08:51:02,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=135202.66666666666, ans=0.2 +2024-07-28 08:51:07,401 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.19 vs. limit=22.5 +2024-07-28 08:51:22,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=135242.66666666666, ans=0.125 +2024-07-28 08:51:23,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=135256.0, ans=0.125 +2024-07-28 08:51:25,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=135256.0, ans=0.95 +2024-07-28 08:51:28,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=135256.0, ans=0.2 +2024-07-28 08:51:28,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=135269.33333333334, ans=0.125 +2024-07-28 08:51:29,450 INFO [train.py:1114] (3/4) Epoch 10, batch 9450, loss[loss=0.1831, simple_loss=0.2639, pruned_loss=0.0511, over 4797.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2852, pruned_loss=0.05605, over 933417.54 frames. ], batch size: 11, lr: 7.36e-03, grad_scale: 32.0 +2024-07-28 08:51:30,361 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.51 vs. limit=15.0 +2024-07-28 08:51:32,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=135269.33333333334, ans=0.0 +2024-07-28 08:51:37,336 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.82 vs. limit=15.0 +2024-07-28 08:51:39,983 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.423e+01 5.630e+01 6.223e+01 7.000e+01 1.011e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 08:51:40,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=135282.66666666666, ans=0.125 +2024-07-28 08:51:44,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=135296.0, ans=0.125 +2024-07-28 08:51:49,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=135309.33333333334, ans=0.125 +2024-07-28 08:51:55,572 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:52:01,559 INFO [train.py:1114] (3/4) Epoch 10, batch 9500, loss[loss=0.1609, simple_loss=0.249, pruned_loss=0.03644, over 4704.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2858, pruned_loss=0.05615, over 935260.20 frames. ], batch size: 12, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:52:02,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=135336.0, ans=0.2 +2024-07-28 08:52:22,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=135376.0, ans=0.125 +2024-07-28 08:52:34,230 INFO [train.py:1114] (3/4) Epoch 10, batch 9550, loss[loss=0.1801, simple_loss=0.2551, pruned_loss=0.05252, over 4771.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2861, pruned_loss=0.05624, over 932038.97 frames. ], batch size: 12, lr: 7.35e-03, grad_scale: 16.0 +2024-07-28 08:52:34,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=135402.66666666666, ans=0.0 +2024-07-28 08:52:40,377 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.05 vs. limit=15.0 +2024-07-28 08:52:48,487 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.659e+01 6.121e+01 6.852e+01 1.035e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 08:52:51,048 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:52:53,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=135429.33333333334, ans=0.125 +2024-07-28 08:52:53,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=135429.33333333334, ans=0.125 +2024-07-28 08:52:57,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=135442.66666666666, ans=0.125 +2024-07-28 08:53:02,589 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.93 vs. limit=6.0 +2024-07-28 08:53:06,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=135456.0, ans=0.125 +2024-07-28 08:53:09,498 INFO [train.py:1114] (3/4) Epoch 10, batch 9600, loss[loss=0.2727, simple_loss=0.332, pruned_loss=0.1067, over 3667.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.287, pruned_loss=0.05678, over 931418.37 frames. ], batch size: 35, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:53:11,848 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.08 vs. limit=12.0 +2024-07-28 08:53:16,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=135469.33333333334, ans=0.0 +2024-07-28 08:53:17,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=135469.33333333334, ans=0.125 +2024-07-28 08:53:19,312 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.50 vs. limit=15.0 +2024-07-28 08:53:26,185 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.48 vs. limit=15.0 +2024-07-28 08:53:28,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=135496.0, ans=0.125 +2024-07-28 08:53:31,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=135509.33333333334, ans=0.2 +2024-07-28 08:53:31,775 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.62 vs. limit=10.0 +2024-07-28 08:53:32,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=135509.33333333334, ans=0.125 +2024-07-28 08:53:32,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=135509.33333333334, ans=0.0 +2024-07-28 08:53:38,815 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.10 vs. limit=15.0 +2024-07-28 08:53:43,549 INFO [train.py:1114] (3/4) Epoch 10, batch 9650, loss[loss=0.1908, simple_loss=0.2962, pruned_loss=0.04271, over 4852.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2874, pruned_loss=0.05746, over 927425.55 frames. ], batch size: 16, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:53:43,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=135536.0, ans=0.125 +2024-07-28 08:53:49,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=135549.33333333334, ans=0.125 +2024-07-28 08:53:54,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=135549.33333333334, ans=0.1 +2024-07-28 08:53:54,685 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.653e+01 6.117e+01 7.383e+01 9.422e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 08:54:03,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=135576.0, ans=0.125 +2024-07-28 08:54:04,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=135576.0, ans=0.2 +2024-07-28 08:54:07,631 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.14 vs. limit=10.0 +2024-07-28 08:54:07,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=135589.33333333334, ans=0.125 +2024-07-28 08:54:14,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=135602.66666666666, ans=0.1 +2024-07-28 08:54:15,218 INFO [train.py:1114] (3/4) Epoch 10, batch 9700, loss[loss=0.2161, simple_loss=0.3027, pruned_loss=0.06475, over 4337.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2884, pruned_loss=0.05767, over 925961.10 frames. ], batch size: 25, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:54:22,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=135616.0, ans=0.07 +2024-07-28 08:54:25,545 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.99 vs. limit=22.5 +2024-07-28 08:54:25,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=135616.0, ans=0.125 +2024-07-28 08:54:54,875 INFO [train.py:1114] (3/4) Epoch 10, batch 9750, loss[loss=0.2468, simple_loss=0.3116, pruned_loss=0.091, over 4665.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2875, pruned_loss=0.05741, over 926251.55 frames. ], batch size: 15, lr: 7.35e-03, grad_scale: 32.0 +2024-07-28 08:55:09,164 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.808e+01 6.506e+01 7.716e+01 1.140e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 08:55:10,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=135696.0, ans=0.125 +2024-07-28 08:55:11,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135696.0, ans=0.1 +2024-07-28 08:55:25,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=135722.66666666666, ans=0.035 +2024-07-28 08:55:27,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=135722.66666666666, ans=0.0 +2024-07-28 08:55:28,968 INFO [train.py:1114] (3/4) Epoch 10, batch 9800, loss[loss=0.1683, simple_loss=0.26, pruned_loss=0.03833, over 4694.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2864, pruned_loss=0.05704, over 926102.14 frames. ], batch size: 12, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:55:46,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=135762.66666666666, ans=0.125 +2024-07-28 08:55:51,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=135776.0, ans=0.1 +2024-07-28 08:56:00,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=135789.33333333334, ans=0.125 +2024-07-28 08:56:01,762 INFO [train.py:1114] (3/4) Epoch 10, batch 9850, loss[loss=0.1868, simple_loss=0.281, pruned_loss=0.04633, over 4893.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2863, pruned_loss=0.05716, over 928696.50 frames. ], batch size: 15, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:56:14,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=135816.0, ans=0.125 +2024-07-28 08:56:14,543 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.854e+01 5.916e+01 6.813e+01 8.007e+01 1.183e+02, threshold=1.363e+02, percent-clipped=0.0 +2024-07-28 08:56:22,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=135842.66666666666, ans=0.125 +2024-07-28 08:56:33,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=135856.0, ans=0.125 +2024-07-28 08:56:34,476 INFO [train.py:1114] (3/4) Epoch 10, batch 9900, loss[loss=0.22, simple_loss=0.315, pruned_loss=0.06247, over 4825.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2879, pruned_loss=0.05828, over 927741.71 frames. ], batch size: 16, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:56:38,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=135869.33333333334, ans=0.125 +2024-07-28 08:56:41,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=135882.66666666666, ans=0.125 +2024-07-28 08:56:41,787 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.80 vs. limit=15.0 +2024-07-28 08:56:46,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=135896.0, ans=0.125 +2024-07-28 08:56:49,717 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.59 vs. limit=15.0 +2024-07-28 08:57:04,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=135922.66666666666, ans=0.125 +2024-07-28 08:57:05,751 INFO [train.py:1114] (3/4) Epoch 10, batch 9950, loss[loss=0.1523, simple_loss=0.2402, pruned_loss=0.03224, over 4543.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2871, pruned_loss=0.05806, over 929179.37 frames. ], batch size: 10, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:57:10,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=135936.0, ans=0.125 +2024-07-28 08:57:15,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=135949.33333333334, ans=0.1 +2024-07-28 08:57:17,159 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.632e+01 6.153e+01 7.007e+01 8.060e+01 1.036e+02, threshold=1.401e+02, percent-clipped=0.0 +2024-07-28 08:57:28,077 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.26 vs. limit=15.0 +2024-07-28 08:57:29,592 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:57:32,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=135989.33333333334, ans=0.025 +2024-07-28 08:57:34,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=135989.33333333334, ans=0.1 +2024-07-28 08:57:35,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=135989.33333333334, ans=0.125 +2024-07-28 08:57:37,847 INFO [train.py:1114] (3/4) Epoch 10, batch 10000, loss[loss=0.1965, simple_loss=0.2855, pruned_loss=0.05374, over 4624.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.29, pruned_loss=0.0586, over 927145.38 frames. ], batch size: 16, lr: 7.34e-03, grad_scale: 32.0 +2024-07-28 08:57:42,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=136002.66666666666, ans=0.125 +2024-07-28 08:57:49,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=136016.0, ans=0.125 +2024-07-28 08:58:05,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=136056.0, ans=0.125 +2024-07-28 08:58:09,777 INFO [train.py:1114] (3/4) Epoch 10, batch 10050, loss[loss=0.2227, simple_loss=0.2844, pruned_loss=0.08046, over 3601.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2926, pruned_loss=0.0603, over 914916.64 frames. ], batch size: 35, lr: 7.33e-03, grad_scale: 32.0 +2024-07-28 08:58:09,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=136069.33333333334, ans=0.07 +2024-07-28 08:58:14,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-07-28 08:58:22,174 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.772e+01 6.455e+01 7.428e+01 1.276e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-28 08:58:27,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.56 vs. limit=15.0 +2024-07-28 08:58:27,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=136096.0, ans=0.0 +2024-07-28 08:58:30,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136109.33333333334, ans=0.1 +2024-07-28 08:58:34,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=136109.33333333334, ans=0.5 +2024-07-28 08:58:39,625 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.49 vs. limit=15.0 +2024-07-28 08:58:43,271 INFO [train.py:1114] (3/4) Epoch 10, batch 10100, loss[loss=0.2601, simple_loss=0.3394, pruned_loss=0.09042, over 3336.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2982, pruned_loss=0.06622, over 861998.87 frames. ], batch size: 35, lr: 7.33e-03, grad_scale: 32.0 +2024-07-28 08:58:46,462 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.34 vs. limit=15.0 +2024-07-28 08:59:02,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=136176.0, ans=0.125 +2024-07-28 08:59:14,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136189.33333333334, ans=0.1 +2024-07-28 08:59:15,617 INFO [train.py:1114] (3/4) Epoch 10, batch 10150, loss[loss=0.2225, simple_loss=0.2871, pruned_loss=0.07898, over 3633.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.3023, pruned_loss=0.07043, over 821035.96 frames. ], batch size: 35, lr: 7.33e-03, grad_scale: 32.0 +2024-07-28 08:59:16,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136202.66666666666, ans=0.1 +2024-07-28 08:59:17,857 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.85 vs. limit=15.0 +2024-07-28 08:59:19,264 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=15.00 vs. limit=15.0 +2024-07-28 08:59:21,821 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=6.38 vs. limit=12.0 +2024-07-28 08:59:23,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten.whitening_limit, batch_count=136216.0, ans=15.0 +2024-07-28 08:59:27,022 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.570e+01 6.618e+01 7.074e+01 7.488e+01 9.490e+01, threshold=1.415e+02, percent-clipped=0.0 +2024-07-28 08:59:35,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=136242.66666666666, ans=0.125 +2024-07-28 08:59:37,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=136242.66666666666, ans=0.025 +2024-07-28 08:59:44,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=136256.0, ans=0.025 +2024-07-28 08:59:47,609 INFO [train.py:1114] (3/4) Epoch 10, batch 10200, loss[loss=0.266, simple_loss=0.3287, pruned_loss=0.1016, over 3279.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3046, pruned_loss=0.07358, over 788180.75 frames. ], batch size: 35, lr: 7.33e-03, grad_scale: 32.0 +2024-07-28 08:59:47,646 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 08:59:49,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136269.33333333334, ans=0.1 +2024-07-28 08:59:54,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=136282.66666666666, ans=0.0 +2024-07-28 09:00:46,199 INFO [train.py:1114] (3/4) Epoch 11, batch 0, loss[loss=0.1952, simple_loss=0.2838, pruned_loss=0.05334, over 4845.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2838, pruned_loss=0.05334, over 4845.00 frames. ], batch size: 12, lr: 7.00e-03, grad_scale: 32.0 +2024-07-28 09:00:46,200 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 09:00:54,821 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([3.7691, 4.1397, 4.3240, 4.3129], device='cuda:3') +2024-07-28 09:00:57,971 INFO [train.py:1146] (3/4) Epoch 11, validation: loss=0.1737, simple_loss=0.279, pruned_loss=0.03421, over 944034.00 frames. +2024-07-28 09:00:57,971 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 09:01:07,535 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.55 vs. limit=6.0 +2024-07-28 09:01:11,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=136325.33333333334, ans=0.125 +2024-07-28 09:01:12,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=136325.33333333334, ans=0.025 +2024-07-28 09:01:15,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136325.33333333334, ans=0.1 +2024-07-28 09:01:20,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=136338.66666666666, ans=0.1 +2024-07-28 09:01:21,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.51 vs. limit=6.0 +2024-07-28 09:01:24,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=136338.66666666666, ans=0.0 +2024-07-28 09:01:25,685 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.92 vs. limit=22.5 +2024-07-28 09:01:29,220 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.650e+01 6.307e+01 6.804e+01 7.464e+01 1.172e+02, threshold=1.361e+02, percent-clipped=0.0 +2024-07-28 09:01:32,020 INFO [train.py:1114] (3/4) Epoch 11, batch 50, loss[loss=0.1778, simple_loss=0.2606, pruned_loss=0.04754, over 4610.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2894, pruned_loss=0.05812, over 206625.66 frames. ], batch size: 11, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:01:32,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=136365.33333333334, ans=22.5 +2024-07-28 09:01:35,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=136365.33333333334, ans=15.0 +2024-07-28 09:01:37,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=136365.33333333334, ans=0.125 +2024-07-28 09:01:39,706 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.58 vs. limit=15.0 +2024-07-28 09:01:41,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136378.66666666666, ans=0.1 +2024-07-28 09:01:43,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=136378.66666666666, ans=0.1 +2024-07-28 09:01:48,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=136392.0, ans=0.125 +2024-07-28 09:01:56,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=136405.33333333334, ans=0.0 +2024-07-28 09:02:05,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=136432.0, ans=0.125 +2024-07-28 09:02:05,895 INFO [train.py:1114] (3/4) Epoch 11, batch 100, loss[loss=0.1874, simple_loss=0.2818, pruned_loss=0.0465, over 4646.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2891, pruned_loss=0.05714, over 365826.04 frames. ], batch size: 12, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:02:14,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=136445.33333333334, ans=0.2 +2024-07-28 09:02:15,907 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=15.0 +2024-07-28 09:02:28,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=136472.0, ans=0.2 +2024-07-28 09:02:32,445 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.46 vs. limit=22.5 +2024-07-28 09:02:36,700 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.385e+01 5.958e+01 6.972e+01 1.024e+02, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 09:02:39,366 INFO [train.py:1114] (3/4) Epoch 11, batch 150, loss[loss=0.1606, simple_loss=0.2475, pruned_loss=0.03685, over 4621.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2848, pruned_loss=0.05468, over 494369.39 frames. ], batch size: 11, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:02:55,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=136525.33333333334, ans=0.0 +2024-07-28 09:02:56,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=136525.33333333334, ans=0.015 +2024-07-28 09:02:58,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=136538.66666666666, ans=0.2 +2024-07-28 09:03:14,117 INFO [train.py:1114] (3/4) Epoch 11, batch 200, loss[loss=0.1911, simple_loss=0.2789, pruned_loss=0.05163, over 4504.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2804, pruned_loss=0.05301, over 593950.89 frames. ], batch size: 21, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:03:16,484 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.37 vs. limit=12.0 +2024-07-28 09:03:17,731 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-07-28 09:03:19,007 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.91 vs. limit=15.0 +2024-07-28 09:03:24,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=136578.66666666666, ans=6.0 +2024-07-28 09:03:26,350 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.89 vs. limit=15.0 +2024-07-28 09:03:43,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=136618.66666666666, ans=0.0 +2024-07-28 09:03:45,021 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.586e+01 5.746e+01 6.330e+01 7.204e+01 1.314e+02, threshold=1.266e+02, percent-clipped=1.0 +2024-07-28 09:03:46,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=136618.66666666666, ans=0.125 +2024-07-28 09:03:47,791 INFO [train.py:1114] (3/4) Epoch 11, batch 250, loss[loss=0.2249, simple_loss=0.3162, pruned_loss=0.06684, over 4595.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2816, pruned_loss=0.05311, over 670497.95 frames. ], batch size: 16, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:03:53,024 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.86 vs. limit=10.0 +2024-07-28 09:03:54,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=136645.33333333334, ans=0.2 +2024-07-28 09:03:55,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=136645.33333333334, ans=0.0 +2024-07-28 09:03:56,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=136645.33333333334, ans=0.125 +2024-07-28 09:04:17,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=136685.33333333334, ans=0.025 +2024-07-28 09:04:17,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=136685.33333333334, ans=0.0 +2024-07-28 09:04:21,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=136685.33333333334, ans=0.0 +2024-07-28 09:04:25,277 INFO [train.py:1114] (3/4) Epoch 11, batch 300, loss[loss=0.2023, simple_loss=0.302, pruned_loss=0.0513, over 4796.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.281, pruned_loss=0.05276, over 730320.55 frames. ], batch size: 15, lr: 6.99e-03, grad_scale: 32.0 +2024-07-28 09:04:34,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=136712.0, ans=0.0 +2024-07-28 09:04:35,423 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.30 vs. limit=15.0 +2024-07-28 09:04:39,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=136725.33333333334, ans=0.1 +2024-07-28 09:04:51,711 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.23 vs. limit=15.0 +2024-07-28 09:04:56,685 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.265e+01 5.546e+01 5.956e+01 6.746e+01 1.009e+02, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 09:04:59,415 INFO [train.py:1114] (3/4) Epoch 11, batch 350, loss[loss=0.1987, simple_loss=0.2771, pruned_loss=0.06012, over 4935.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2825, pruned_loss=0.05393, over 777017.47 frames. ], batch size: 12, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:05:11,554 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.71 vs. limit=10.0 +2024-07-28 09:05:19,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=136805.33333333334, ans=0.2 +2024-07-28 09:05:23,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=136805.33333333334, ans=0.0 +2024-07-28 09:05:25,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=136805.33333333334, ans=0.1 +2024-07-28 09:05:28,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=136818.66666666666, ans=0.125 +2024-07-28 09:05:33,211 INFO [train.py:1114] (3/4) Epoch 11, batch 400, loss[loss=0.2104, simple_loss=0.3007, pruned_loss=0.06008, over 4696.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2829, pruned_loss=0.0537, over 814415.62 frames. ], batch size: 13, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:05:38,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=136832.0, ans=0.125 +2024-07-28 09:05:55,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=136858.66666666666, ans=0.0 +2024-07-28 09:05:57,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=136858.66666666666, ans=0.0 +2024-07-28 09:05:58,776 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.72 vs. limit=15.0 +2024-07-28 09:06:01,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=136872.0, ans=0.125 +2024-07-28 09:06:09,855 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.15 vs. limit=15.0 +2024-07-28 09:06:12,501 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.549e+01 5.682e+01 6.253e+01 7.367e+01 1.050e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 09:06:15,187 INFO [train.py:1114] (3/4) Epoch 11, batch 450, loss[loss=0.1619, simple_loss=0.2542, pruned_loss=0.03478, over 4640.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.283, pruned_loss=0.05403, over 839741.76 frames. ], batch size: 13, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:06:15,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=136898.66666666666, ans=0.125 +2024-07-28 09:06:17,547 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.10 vs. limit=15.0 +2024-07-28 09:06:21,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=136912.0, ans=0.025 +2024-07-28 09:06:40,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=136938.66666666666, ans=0.125 +2024-07-28 09:06:46,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=136938.66666666666, ans=0.07 +2024-07-28 09:06:55,244 INFO [train.py:1114] (3/4) Epoch 11, batch 500, loss[loss=0.2393, simple_loss=0.3243, pruned_loss=0.07714, over 4690.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2829, pruned_loss=0.05381, over 862139.91 frames. ], batch size: 15, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:06:56,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=136965.33333333334, ans=0.125 +2024-07-28 09:07:00,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=136965.33333333334, ans=0.125 +2024-07-28 09:07:02,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=136978.66666666666, ans=0.125 +2024-07-28 09:07:22,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=137018.66666666666, ans=0.125 +2024-07-28 09:07:25,820 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.558e+01 5.492e+01 6.007e+01 6.943e+01 8.543e+01, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 09:07:28,419 INFO [train.py:1114] (3/4) Epoch 11, batch 550, loss[loss=0.1979, simple_loss=0.2995, pruned_loss=0.04814, over 4604.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2836, pruned_loss=0.05379, over 877459.36 frames. ], batch size: 17, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:07:28,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=137032.0, ans=0.0 +2024-07-28 09:07:29,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=137032.0, ans=0.2 +2024-07-28 09:07:29,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=137032.0, ans=0.125 +2024-07-28 09:07:33,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=137032.0, ans=0.2 +2024-07-28 09:07:39,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137045.33333333334, ans=0.1 +2024-07-28 09:07:51,661 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.26 vs. limit=15.0 +2024-07-28 09:08:02,908 INFO [train.py:1114] (3/4) Epoch 11, batch 600, loss[loss=0.2248, simple_loss=0.3081, pruned_loss=0.07072, over 4637.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2839, pruned_loss=0.05404, over 891979.35 frames. ], batch size: 16, lr: 6.98e-03, grad_scale: 32.0 +2024-07-28 09:08:03,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=137098.66666666666, ans=0.125 +2024-07-28 09:08:23,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=137138.66666666666, ans=0.125 +2024-07-28 09:08:33,004 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.811e+01 5.574e+01 6.202e+01 6.752e+01 1.007e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 09:08:35,649 INFO [train.py:1114] (3/4) Epoch 11, batch 650, loss[loss=0.1681, simple_loss=0.2635, pruned_loss=0.03634, over 4755.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2831, pruned_loss=0.05376, over 903893.02 frames. ], batch size: 13, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:08:52,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=137192.0, ans=0.0 +2024-07-28 09:08:57,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=137205.33333333334, ans=0.025 +2024-07-28 09:09:09,628 INFO [train.py:1114] (3/4) Epoch 11, batch 700, loss[loss=0.171, simple_loss=0.2581, pruned_loss=0.04197, over 4633.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2832, pruned_loss=0.05399, over 911901.90 frames. ], batch size: 12, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:09:09,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=137232.0, ans=0.125 +2024-07-28 09:09:16,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=137245.33333333334, ans=0.2 +2024-07-28 09:09:24,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=137258.66666666666, ans=0.2 +2024-07-28 09:09:33,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=137272.0, ans=0.2 +2024-07-28 09:09:40,611 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.468e+01 5.602e+01 6.234e+01 6.972e+01 9.125e+01, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 09:09:45,970 INFO [train.py:1114] (3/4) Epoch 11, batch 750, loss[loss=0.1909, simple_loss=0.2789, pruned_loss=0.05145, over 4694.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2834, pruned_loss=0.05439, over 918265.84 frames. ], batch size: 13, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:09:46,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=137298.66666666666, ans=0.025 +2024-07-28 09:09:53,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=137312.0, ans=0.125 +2024-07-28 09:09:57,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=137312.0, ans=0.1 +2024-07-28 09:09:59,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=137325.33333333334, ans=0.125 +2024-07-28 09:10:11,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=137338.66666666666, ans=0.2 +2024-07-28 09:10:17,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=137352.0, ans=0.0 +2024-07-28 09:10:19,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=137352.0, ans=0.1 +2024-07-28 09:10:20,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=137365.33333333334, ans=0.2 +2024-07-28 09:10:21,531 INFO [train.py:1114] (3/4) Epoch 11, batch 800, loss[loss=0.1804, simple_loss=0.2635, pruned_loss=0.04869, over 4855.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2837, pruned_loss=0.05473, over 923452.50 frames. ], batch size: 12, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:10:25,184 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.06 vs. limit=12.0 +2024-07-28 09:10:33,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=137378.66666666666, ans=0.125 +2024-07-28 09:10:33,541 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.87 vs. limit=22.5 +2024-07-28 09:10:57,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=137418.66666666666, ans=0.0 +2024-07-28 09:11:00,611 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.302e+01 5.597e+01 6.070e+01 6.795e+01 9.040e+01, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 09:11:03,294 INFO [train.py:1114] (3/4) Epoch 11, batch 850, loss[loss=0.233, simple_loss=0.3307, pruned_loss=0.06763, over 4664.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.284, pruned_loss=0.0547, over 927609.08 frames. ], batch size: 14, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:11:06,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=137432.0, ans=0.0 +2024-07-28 09:11:08,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=137432.0, ans=0.125 +2024-07-28 09:11:10,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=137445.33333333334, ans=0.125 +2024-07-28 09:11:11,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=137445.33333333334, ans=0.0 +2024-07-28 09:11:14,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=137445.33333333334, ans=0.04949747468305833 +2024-07-28 09:11:16,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=137458.66666666666, ans=0.0 +2024-07-28 09:11:24,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=137472.0, ans=0.0 +2024-07-28 09:11:32,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=137485.33333333334, ans=0.025 +2024-07-28 09:11:36,217 INFO [train.py:1114] (3/4) Epoch 11, batch 900, loss[loss=0.1663, simple_loss=0.2484, pruned_loss=0.04212, over 4844.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2835, pruned_loss=0.05461, over 928564.15 frames. ], batch size: 12, lr: 6.97e-03, grad_scale: 32.0 +2024-07-28 09:11:36,703 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.43 vs. limit=22.5 +2024-07-28 09:11:49,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=137525.33333333334, ans=0.1 +2024-07-28 09:11:51,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=137525.33333333334, ans=0.125 +2024-07-28 09:11:54,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=137525.33333333334, ans=0.0 +2024-07-28 09:11:54,181 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.67 vs. limit=12.0 +2024-07-28 09:11:54,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=137525.33333333334, ans=0.05 +2024-07-28 09:11:57,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=137538.66666666666, ans=0.125 +2024-07-28 09:12:09,246 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.848e+01 5.732e+01 6.294e+01 7.433e+01 1.155e+02, threshold=1.259e+02, percent-clipped=0.0 +2024-07-28 09:12:10,283 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.07 vs. limit=15.0 +2024-07-28 09:12:12,063 INFO [train.py:1114] (3/4) Epoch 11, batch 950, loss[loss=0.1703, simple_loss=0.2444, pruned_loss=0.04817, over 4775.00 frames. ], tot_loss[loss=0.196, simple_loss=0.283, pruned_loss=0.05449, over 930164.70 frames. ], batch size: 12, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:12:14,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=137565.33333333334, ans=0.0 +2024-07-28 09:12:14,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=137565.33333333334, ans=0.5 +2024-07-28 09:12:38,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=137605.33333333334, ans=0.05 +2024-07-28 09:12:46,509 INFO [train.py:1114] (3/4) Epoch 11, batch 1000, loss[loss=0.184, simple_loss=0.282, pruned_loss=0.043, over 4960.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2839, pruned_loss=0.05512, over 929960.65 frames. ], batch size: 13, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:13:04,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=137658.66666666666, ans=0.125 +2024-07-28 09:13:05,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=137658.66666666666, ans=0.0 +2024-07-28 09:13:16,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=137685.33333333334, ans=10.0 +2024-07-28 09:13:18,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=137685.33333333334, ans=0.0 +2024-07-28 09:13:18,718 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.919e+01 5.562e+01 6.150e+01 7.152e+01 9.857e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 09:13:21,483 INFO [train.py:1114] (3/4) Epoch 11, batch 1050, loss[loss=0.2144, simple_loss=0.298, pruned_loss=0.06539, over 4879.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2825, pruned_loss=0.05459, over 932263.66 frames. ], batch size: 14, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:13:26,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=137698.66666666666, ans=0.125 +2024-07-28 09:13:28,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=137698.66666666666, ans=0.0 +2024-07-28 09:13:29,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=137698.66666666666, ans=0.125 +2024-07-28 09:13:30,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=137698.66666666666, ans=0.125 +2024-07-28 09:13:34,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=137712.0, ans=0.125 +2024-07-28 09:13:34,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=137712.0, ans=0.125 +2024-07-28 09:13:46,313 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.14 vs. limit=22.5 +2024-07-28 09:13:54,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=137752.0, ans=0.125 +2024-07-28 09:13:58,579 INFO [train.py:1114] (3/4) Epoch 11, batch 1100, loss[loss=0.2002, simple_loss=0.2772, pruned_loss=0.0616, over 4888.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2822, pruned_loss=0.05456, over 934593.42 frames. ], batch size: 13, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:14:02,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=137765.33333333334, ans=0.0 +2024-07-28 09:14:02,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=137765.33333333334, ans=0.1 +2024-07-28 09:14:03,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=137765.33333333334, ans=0.125 +2024-07-28 09:14:10,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=137778.66666666666, ans=0.05 +2024-07-28 09:14:12,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=137792.0, ans=0.125 +2024-07-28 09:14:16,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=137792.0, ans=0.2 +2024-07-28 09:14:17,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=137792.0, ans=0.0 +2024-07-28 09:14:17,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=137792.0, ans=0.125 +2024-07-28 09:14:40,318 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.240e+01 5.580e+01 6.119e+01 6.842e+01 1.423e+02, threshold=1.224e+02, percent-clipped=1.0 +2024-07-28 09:14:42,085 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.58 vs. limit=12.0 +2024-07-28 09:14:42,889 INFO [train.py:1114] (3/4) Epoch 11, batch 1150, loss[loss=0.1969, simple_loss=0.2773, pruned_loss=0.0582, over 4894.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2821, pruned_loss=0.05467, over 933869.36 frames. ], batch size: 13, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:14:49,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=137845.33333333334, ans=0.125 +2024-07-28 09:15:06,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=137872.0, ans=0.125 +2024-07-28 09:15:06,448 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.78 vs. limit=15.0 +2024-07-28 09:15:07,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=137872.0, ans=0.0 +2024-07-28 09:15:15,433 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.49 vs. limit=22.5 +2024-07-28 09:15:16,428 INFO [train.py:1114] (3/4) Epoch 11, batch 1200, loss[loss=0.2014, simple_loss=0.2966, pruned_loss=0.05311, over 4868.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2837, pruned_loss=0.05547, over 933153.92 frames. ], batch size: 14, lr: 6.96e-03, grad_scale: 32.0 +2024-07-28 09:15:19,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=137898.66666666666, ans=0.125 +2024-07-28 09:15:22,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.81 vs. limit=6.0 +2024-07-28 09:15:31,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=137925.33333333334, ans=0.125 +2024-07-28 09:15:37,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=137925.33333333334, ans=0.025 +2024-07-28 09:15:37,207 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.97 vs. limit=15.0 +2024-07-28 09:15:44,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=137952.0, ans=0.025 +2024-07-28 09:15:45,217 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.56 vs. limit=15.0 +2024-07-28 09:15:50,320 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 5.564e+01 6.259e+01 7.036e+01 9.371e+01, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 09:15:53,033 INFO [train.py:1114] (3/4) Epoch 11, batch 1250, loss[loss=0.2269, simple_loss=0.3021, pruned_loss=0.07586, over 4785.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2841, pruned_loss=0.05527, over 937110.71 frames. ], batch size: 15, lr: 6.95e-03, grad_scale: 32.0 +2024-07-28 09:15:58,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=137965.33333333334, ans=0.05 +2024-07-28 09:16:10,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=137992.0, ans=10.0 +2024-07-28 09:16:12,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=138005.33333333334, ans=0.125 +2024-07-28 09:16:16,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=138005.33333333334, ans=0.125 +2024-07-28 09:16:26,248 INFO [train.py:1114] (3/4) Epoch 11, batch 1300, loss[loss=0.2158, simple_loss=0.3089, pruned_loss=0.06134, over 4732.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2831, pruned_loss=0.05472, over 938654.32 frames. ], batch size: 19, lr: 6.95e-03, grad_scale: 32.0 +2024-07-28 09:16:31,098 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.99 vs. limit=12.0 +2024-07-28 09:16:38,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138045.33333333334, ans=0.1 +2024-07-28 09:16:55,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=138085.33333333334, ans=0.125 +2024-07-28 09:16:56,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=138085.33333333334, ans=0.2 +2024-07-28 09:16:57,015 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.412e+01 5.624e+01 6.382e+01 7.662e+01 1.173e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-28 09:16:59,900 INFO [train.py:1114] (3/4) Epoch 11, batch 1350, loss[loss=0.1807, simple_loss=0.2763, pruned_loss=0.04254, over 4757.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2824, pruned_loss=0.0543, over 940688.93 frames. ], batch size: 13, lr: 6.95e-03, grad_scale: 64.0 +2024-07-28 09:17:06,242 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.77 vs. limit=15.0 +2024-07-28 09:17:11,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=138112.0, ans=0.125 +2024-07-28 09:17:12,147 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.12 vs. limit=15.0 +2024-07-28 09:17:12,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=138125.33333333334, ans=0.125 +2024-07-28 09:17:22,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=138138.66666666666, ans=0.125 +2024-07-28 09:17:22,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=138138.66666666666, ans=0.125 +2024-07-28 09:17:24,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=138138.66666666666, ans=0.0 +2024-07-28 09:17:33,277 INFO [train.py:1114] (3/4) Epoch 11, batch 1400, loss[loss=0.1918, simple_loss=0.2761, pruned_loss=0.05377, over 4691.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2811, pruned_loss=0.05365, over 942294.81 frames. ], batch size: 11, lr: 6.95e-03, grad_scale: 64.0 +2024-07-28 09:17:56,997 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.56 vs. limit=22.5 +2024-07-28 09:18:04,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=138218.66666666666, ans=0.125 +2024-07-28 09:18:06,173 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.597e+01 5.725e+01 6.807e+01 7.781e+01 1.138e+02, threshold=1.361e+02, percent-clipped=0.0 +2024-07-28 09:18:07,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=138218.66666666666, ans=0.2 +2024-07-28 09:18:08,970 INFO [train.py:1114] (3/4) Epoch 11, batch 1450, loss[loss=0.2066, simple_loss=0.2973, pruned_loss=0.05796, over 4688.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2824, pruned_loss=0.05439, over 942303.11 frames. ], batch size: 15, lr: 6.95e-03, grad_scale: 64.0 +2024-07-28 09:18:11,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=138232.0, ans=0.0 +2024-07-28 09:18:11,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1.whitening_limit, batch_count=138232.0, ans=10.0 +2024-07-28 09:18:31,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=138258.66666666666, ans=0.125 +2024-07-28 09:18:41,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=138285.33333333334, ans=0.0 +2024-07-28 09:18:52,731 INFO [train.py:1114] (3/4) Epoch 11, batch 1500, loss[loss=0.2281, simple_loss=0.3251, pruned_loss=0.06558, over 4810.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2833, pruned_loss=0.05479, over 942242.18 frames. ], batch size: 14, lr: 6.95e-03, grad_scale: 64.0 +2024-07-28 09:19:01,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=138312.0, ans=0.0 +2024-07-28 09:19:02,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=138312.0, ans=0.2 +2024-07-28 09:19:06,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=138325.33333333334, ans=0.125 +2024-07-28 09:19:21,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=138338.66666666666, ans=0.125 +2024-07-28 09:19:31,301 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.052e+01 5.776e+01 6.231e+01 7.086e+01 9.841e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 09:19:33,311 INFO [train.py:1114] (3/4) Epoch 11, batch 1550, loss[loss=0.2246, simple_loss=0.3162, pruned_loss=0.06651, over 4911.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2838, pruned_loss=0.05521, over 938456.91 frames. ], batch size: 15, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:19:34,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=138365.33333333334, ans=0.0 +2024-07-28 09:19:40,844 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.79 vs. limit=22.5 +2024-07-28 09:19:41,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=138378.66666666666, ans=0.2 +2024-07-28 09:19:57,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=138405.33333333334, ans=0.125 +2024-07-28 09:20:01,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=138418.66666666666, ans=10.0 +2024-07-28 09:20:09,043 INFO [train.py:1114] (3/4) Epoch 11, batch 1600, loss[loss=0.1832, simple_loss=0.2924, pruned_loss=0.03695, over 4872.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.284, pruned_loss=0.05543, over 936647.35 frames. ], batch size: 14, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:20:13,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=138432.0, ans=0.1 +2024-07-28 09:20:21,757 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=15.0 +2024-07-28 09:20:38,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=138485.33333333334, ans=0.025 +2024-07-28 09:20:38,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=138485.33333333334, ans=0.05 +2024-07-28 09:20:46,013 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.563e+01 5.538e+01 5.961e+01 6.813e+01 9.879e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 09:20:47,972 INFO [train.py:1114] (3/4) Epoch 11, batch 1650, loss[loss=0.2163, simple_loss=0.3023, pruned_loss=0.06516, over 4667.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2837, pruned_loss=0.05565, over 936836.89 frames. ], batch size: 14, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:20:56,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=138512.0, ans=0.0 +2024-07-28 09:21:00,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=138525.33333333334, ans=0.1 +2024-07-28 09:21:21,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=138552.0, ans=0.0 +2024-07-28 09:21:29,792 INFO [train.py:1114] (3/4) Epoch 11, batch 1700, loss[loss=0.1801, simple_loss=0.2629, pruned_loss=0.04869, over 4704.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2828, pruned_loss=0.05511, over 938568.14 frames. ], batch size: 11, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:21:52,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=138592.0, ans=0.0 +2024-07-28 09:21:58,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=138605.33333333334, ans=0.125 +2024-07-28 09:21:59,204 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.13 vs. limit=15.0 +2024-07-28 09:22:04,473 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.86 vs. limit=12.0 +2024-07-28 09:22:06,849 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.454e+01 5.772e+01 6.333e+01 7.541e+01 1.576e+02, threshold=1.267e+02, percent-clipped=2.0 +2024-07-28 09:22:08,867 INFO [train.py:1114] (3/4) Epoch 11, batch 1750, loss[loss=0.1658, simple_loss=0.2512, pruned_loss=0.04019, over 4794.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2817, pruned_loss=0.05415, over 939806.98 frames. ], batch size: 11, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:22:12,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=138632.0, ans=15.0 +2024-07-28 09:22:18,685 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.18 vs. limit=15.0 +2024-07-28 09:22:48,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=138685.33333333334, ans=0.125 +2024-07-28 09:22:50,658 INFO [train.py:1114] (3/4) Epoch 11, batch 1800, loss[loss=0.2285, simple_loss=0.3168, pruned_loss=0.07006, over 4634.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2823, pruned_loss=0.05456, over 940478.39 frames. ], batch size: 13, lr: 6.94e-03, grad_scale: 32.0 +2024-07-28 09:22:51,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.69 vs. limit=15.0 +2024-07-28 09:22:55,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=138698.66666666666, ans=0.1 +2024-07-28 09:22:59,719 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.07 vs. limit=15.0 +2024-07-28 09:23:03,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=138712.0, ans=0.125 +2024-07-28 09:23:04,855 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.98 vs. limit=12.0 +2024-07-28 09:23:05,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=138725.33333333334, ans=0.125 +2024-07-28 09:23:06,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.93 vs. limit=15.0 +2024-07-28 09:23:18,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=138738.66666666666, ans=0.1 +2024-07-28 09:23:24,316 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.405e+01 5.944e+01 6.989e+01 8.458e+01 1.208e+02, threshold=1.398e+02, percent-clipped=0.0 +2024-07-28 09:23:28,454 INFO [train.py:1114] (3/4) Epoch 11, batch 1850, loss[loss=0.1958, simple_loss=0.2925, pruned_loss=0.04961, over 4814.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2826, pruned_loss=0.05421, over 940589.12 frames. ], batch size: 14, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:23:30,715 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.17 vs. limit=15.0 +2024-07-28 09:23:37,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=138778.66666666666, ans=0.125 +2024-07-28 09:23:58,177 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.92 vs. limit=15.0 +2024-07-28 09:24:05,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=138818.66666666666, ans=0.2 +2024-07-28 09:24:07,713 INFO [train.py:1114] (3/4) Epoch 11, batch 1900, loss[loss=0.2054, simple_loss=0.304, pruned_loss=0.0534, over 4666.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2827, pruned_loss=0.05424, over 942081.22 frames. ], batch size: 14, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:24:17,679 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.17 vs. limit=22.5 +2024-07-28 09:24:18,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=138845.33333333334, ans=0.125 +2024-07-28 09:24:18,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=138845.33333333334, ans=0.125 +2024-07-28 09:24:22,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=138858.66666666666, ans=0.09899494936611666 +2024-07-28 09:24:29,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=138872.0, ans=0.0 +2024-07-28 09:24:39,500 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.648e+01 6.210e+01 7.045e+01 1.018e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 09:24:41,647 INFO [train.py:1114] (3/4) Epoch 11, batch 1950, loss[loss=0.1723, simple_loss=0.2712, pruned_loss=0.03674, over 4895.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2844, pruned_loss=0.05462, over 943951.80 frames. ], batch size: 13, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:24:44,515 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:25:10,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=138952.0, ans=0.125 +2024-07-28 09:25:12,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=138952.0, ans=0.125 +2024-07-28 09:25:18,192 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.87 vs. limit=15.0 +2024-07-28 09:25:19,150 INFO [train.py:1114] (3/4) Epoch 11, batch 2000, loss[loss=0.1476, simple_loss=0.2412, pruned_loss=0.02696, over 4797.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2844, pruned_loss=0.05504, over 941280.63 frames. ], batch size: 11, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:25:22,758 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.44 vs. limit=6.0 +2024-07-28 09:32:39,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=139005.33333333334, ans=0.0 +2024-07-28 09:32:42,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=139005.33333333334, ans=0.0 +2024-07-28 09:32:43,514 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.62 vs. limit=12.0 +2024-07-28 09:32:51,186 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.589e+01 5.827e+01 6.350e+01 7.381e+01 1.146e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 09:32:53,239 INFO [train.py:1114] (3/4) Epoch 11, batch 2050, loss[loss=0.1786, simple_loss=0.2648, pruned_loss=0.04622, over 4605.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2835, pruned_loss=0.05454, over 939309.43 frames. ], batch size: 11, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:32:55,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=139032.0, ans=0.125 +2024-07-28 09:32:56,349 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.86 vs. limit=10.0 +2024-07-28 09:33:02,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=139045.33333333334, ans=0.0 +2024-07-28 09:33:04,298 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.59 vs. limit=6.0 +2024-07-28 09:33:04,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=139045.33333333334, ans=0.2 +2024-07-28 09:33:14,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=139072.0, ans=0.0 +2024-07-28 09:33:15,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=139072.0, ans=0.0 +2024-07-28 09:33:24,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139085.33333333334, ans=0.1 +2024-07-28 09:33:28,462 INFO [train.py:1114] (3/4) Epoch 11, batch 2100, loss[loss=0.2217, simple_loss=0.3018, pruned_loss=0.07078, over 4761.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2832, pruned_loss=0.05426, over 940963.84 frames. ], batch size: 13, lr: 6.93e-03, grad_scale: 32.0 +2024-07-28 09:33:28,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.84 vs. limit=10.0 +2024-07-28 09:33:29,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=139098.66666666666, ans=0.125 +2024-07-28 09:33:35,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139112.0, ans=0.1 +2024-07-28 09:33:37,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=139112.0, ans=0.125 +2024-07-28 09:33:43,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=139125.33333333334, ans=0.125 +2024-07-28 09:33:46,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=139125.33333333334, ans=0.125 +2024-07-28 09:33:48,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=139125.33333333334, ans=0.0 +2024-07-28 09:33:50,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=139138.66666666666, ans=0.025 +2024-07-28 09:33:52,856 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.87 vs. limit=15.0 +2024-07-28 09:33:56,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=139138.66666666666, ans=0.125 +2024-07-28 09:34:05,602 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.787e+01 5.652e+01 6.255e+01 7.375e+01 9.920e+01, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 09:34:06,906 INFO [train.py:1114] (3/4) Epoch 11, batch 2150, loss[loss=0.2121, simple_loss=0.3037, pruned_loss=0.06021, over 4894.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2827, pruned_loss=0.05466, over 944120.04 frames. ], batch size: 13, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:34:45,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=139205.33333333334, ans=0.025 +2024-07-28 09:34:54,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=139232.0, ans=0.0 +2024-07-28 09:34:55,031 INFO [train.py:1114] (3/4) Epoch 11, batch 2200, loss[loss=0.1824, simple_loss=0.2737, pruned_loss=0.04559, over 4814.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.282, pruned_loss=0.05436, over 942957.61 frames. ], batch size: 14, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:35:09,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=139258.66666666666, ans=0.04949747468305833 +2024-07-28 09:35:09,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=139258.66666666666, ans=0.2 +2024-07-28 09:35:13,689 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:35:27,040 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.263e+01 5.559e+01 6.152e+01 7.200e+01 1.019e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 09:35:28,419 INFO [train.py:1114] (3/4) Epoch 11, batch 2250, loss[loss=0.1916, simple_loss=0.2927, pruned_loss=0.04531, over 4699.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.283, pruned_loss=0.05454, over 941593.89 frames. ], batch size: 13, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:35:33,188 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.25 vs. limit=6.0 +2024-07-28 09:35:48,301 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.37 vs. limit=6.0 +2024-07-28 09:35:53,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=139338.66666666666, ans=0.125 +2024-07-28 09:35:57,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=139338.66666666666, ans=0.0 +2024-07-28 09:36:27,455 INFO [train.py:1114] (3/4) Epoch 11, batch 2300, loss[loss=0.1566, simple_loss=0.2433, pruned_loss=0.03498, over 4940.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2806, pruned_loss=0.0541, over 938991.59 frames. ], batch size: 12, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:36:46,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=139392.0, ans=0.0 +2024-07-28 09:36:59,892 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 5.544e+01 6.088e+01 7.000e+01 1.026e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 09:37:01,161 INFO [train.py:1114] (3/4) Epoch 11, batch 2350, loss[loss=0.2112, simple_loss=0.302, pruned_loss=0.0602, over 4636.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2807, pruned_loss=0.05389, over 940995.35 frames. ], batch size: 13, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:37:04,884 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.27 vs. limit=22.5 +2024-07-28 09:37:05,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=139432.0, ans=0.1 +2024-07-28 09:37:06,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=139432.0, ans=0.125 +2024-07-28 09:37:07,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=139445.33333333334, ans=0.0 +2024-07-28 09:37:15,981 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.31 vs. limit=15.0 +2024-07-28 09:37:36,624 INFO [train.py:1114] (3/4) Epoch 11, batch 2400, loss[loss=0.1733, simple_loss=0.2652, pruned_loss=0.04065, over 4641.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2814, pruned_loss=0.054, over 940796.53 frames. ], batch size: 12, lr: 6.92e-03, grad_scale: 16.0 +2024-07-28 09:37:37,869 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.01 vs. limit=15.0 +2024-07-28 09:37:55,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=139525.33333333334, ans=0.125 +2024-07-28 09:38:02,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=139538.66666666666, ans=0.125 +2024-07-28 09:38:03,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=139538.66666666666, ans=0.125 +2024-07-28 09:38:16,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.60 vs. limit=5.0 +2024-07-28 09:38:18,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=139552.0, ans=0.2 +2024-07-28 09:38:18,419 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.707e+01 6.350e+01 6.927e+01 1.167e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 09:38:19,092 INFO [train.py:1114] (3/4) Epoch 11, batch 2450, loss[loss=0.2015, simple_loss=0.2961, pruned_loss=0.05348, over 4692.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2825, pruned_loss=0.05464, over 936786.88 frames. ], batch size: 13, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:38:25,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=139578.66666666666, ans=0.09899494936611666 +2024-07-28 09:38:28,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=139578.66666666666, ans=0.125 +2024-07-28 09:38:30,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=139578.66666666666, ans=0.1 +2024-07-28 09:38:33,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=139592.0, ans=0.0 +2024-07-28 09:38:53,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=139618.66666666666, ans=0.2 +2024-07-28 09:38:57,024 INFO [train.py:1114] (3/4) Epoch 11, batch 2500, loss[loss=0.1761, simple_loss=0.2617, pruned_loss=0.0452, over 4813.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.283, pruned_loss=0.05466, over 938707.50 frames. ], batch size: 14, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:39:05,939 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.41 vs. limit=15.0 +2024-07-28 09:39:10,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=139658.66666666666, ans=0.0 +2024-07-28 09:39:15,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=139658.66666666666, ans=0.2 +2024-07-28 09:39:22,202 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.62 vs. limit=22.5 +2024-07-28 09:39:31,727 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:39:32,200 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.264e+01 5.568e+01 6.165e+01 6.885e+01 1.396e+02, threshold=1.233e+02, percent-clipped=2.0 +2024-07-28 09:39:32,993 INFO [train.py:1114] (3/4) Epoch 11, batch 2550, loss[loss=0.1724, simple_loss=0.2488, pruned_loss=0.04795, over 4827.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2822, pruned_loss=0.05434, over 938346.52 frames. ], batch size: 11, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:39:35,350 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.29 vs. limit=15.0 +2024-07-28 09:39:39,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=139712.0, ans=0.95 +2024-07-28 09:39:42,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=139712.0, ans=0.125 +2024-07-28 09:39:53,628 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:39:56,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=139738.66666666666, ans=0.2 +2024-07-28 09:40:08,248 INFO [train.py:1114] (3/4) Epoch 11, batch 2600, loss[loss=0.2133, simple_loss=0.292, pruned_loss=0.06727, over 4888.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2835, pruned_loss=0.05509, over 937325.96 frames. ], batch size: 13, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:40:08,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=139765.33333333334, ans=0.07 +2024-07-28 09:40:16,856 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.55 vs. limit=15.0 +2024-07-28 09:40:19,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139778.66666666666, ans=0.1 +2024-07-28 09:40:24,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=139792.0, ans=0.125 +2024-07-28 09:40:26,202 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.47 vs. limit=12.0 +2024-07-28 09:40:38,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=139818.66666666666, ans=0.125 +2024-07-28 09:40:44,007 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.437e+01 5.632e+01 6.432e+01 7.757e+01 1.315e+02, threshold=1.286e+02, percent-clipped=1.0 +2024-07-28 09:40:44,698 INFO [train.py:1114] (3/4) Epoch 11, batch 2650, loss[loss=0.2025, simple_loss=0.2796, pruned_loss=0.0627, over 4618.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2835, pruned_loss=0.05498, over 939325.82 frames. ], batch size: 16, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:40:46,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=139832.0, ans=0.125 +2024-07-28 09:41:07,371 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.99 vs. limit=10.0 +2024-07-28 09:41:09,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=139858.66666666666, ans=0.125 +2024-07-28 09:41:11,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=139858.66666666666, ans=0.0 +2024-07-28 09:41:13,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=139872.0, ans=0.07 +2024-07-28 09:41:28,864 INFO [train.py:1114] (3/4) Epoch 11, batch 2700, loss[loss=0.2046, simple_loss=0.2897, pruned_loss=0.05973, over 4748.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2841, pruned_loss=0.05512, over 939294.64 frames. ], batch size: 14, lr: 6.91e-03, grad_scale: 16.0 +2024-07-28 09:41:41,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=139898.66666666666, ans=0.125 +2024-07-28 09:41:47,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=139912.0, ans=0.0 +2024-07-28 09:41:52,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=139925.33333333334, ans=0.025 +2024-07-28 09:41:53,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=139925.33333333334, ans=0.125 +2024-07-28 09:42:10,947 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.187e+01 5.691e+01 6.358e+01 7.173e+01 9.845e+01, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 09:42:11,616 INFO [train.py:1114] (3/4) Epoch 11, batch 2750, loss[loss=0.1687, simple_loss=0.2691, pruned_loss=0.03408, over 4697.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2822, pruned_loss=0.05417, over 939971.62 frames. ], batch size: 12, lr: 6.90e-03, grad_scale: 16.0 +2024-07-28 09:42:14,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=139965.33333333334, ans=0.125 +2024-07-28 09:44:59,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=139978.66666666666, ans=0.1 +2024-07-28 09:45:01,128 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:45:24,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=140005.33333333334, ans=0.0 +2024-07-28 09:45:25,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=140005.33333333334, ans=0.0 +2024-07-28 09:45:25,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=140005.33333333334, ans=0.125 +2024-07-28 09:45:30,420 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.43 vs. limit=6.0 +2024-07-28 09:45:33,262 INFO [train.py:1114] (3/4) Epoch 11, batch 2800, loss[loss=0.2311, simple_loss=0.3011, pruned_loss=0.0805, over 3475.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2828, pruned_loss=0.05435, over 938014.68 frames. ], batch size: 35, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:45:36,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=140032.0, ans=0.125 +2024-07-28 09:45:36,314 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.79 vs. limit=22.5 +2024-07-28 09:45:42,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=140045.33333333334, ans=0.0 +2024-07-28 09:45:46,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=140058.66666666666, ans=0.025 +2024-07-28 09:45:54,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=140072.0, ans=0.125 +2024-07-28 09:45:54,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=140072.0, ans=0.125 +2024-07-28 09:45:58,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=140072.0, ans=0.125 +2024-07-28 09:46:07,730 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.695e+01 5.521e+01 6.232e+01 7.025e+01 9.705e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 09:46:08,484 INFO [train.py:1114] (3/4) Epoch 11, batch 2850, loss[loss=0.1867, simple_loss=0.2816, pruned_loss=0.04593, over 4967.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2839, pruned_loss=0.05494, over 936826.20 frames. ], batch size: 13, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:46:08,914 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.94 vs. limit=15.0 +2024-07-28 09:46:14,899 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.37 vs. limit=22.5 +2024-07-28 09:46:17,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140112.0, ans=0.1 +2024-07-28 09:46:27,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=140125.33333333334, ans=0.125 +2024-07-28 09:46:39,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=140152.0, ans=0.125 +2024-07-28 09:46:40,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=140152.0, ans=0.125 +2024-07-28 09:46:42,414 INFO [train.py:1114] (3/4) Epoch 11, batch 2900, loss[loss=0.1574, simple_loss=0.2515, pruned_loss=0.03167, over 4817.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2849, pruned_loss=0.05502, over 940512.01 frames. ], batch size: 13, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:46:43,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=140165.33333333334, ans=0.125 +2024-07-28 09:47:01,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=140192.0, ans=0.125 +2024-07-28 09:47:01,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=140192.0, ans=0.125 +2024-07-28 09:47:08,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=140205.33333333334, ans=0.125 +2024-07-28 09:47:09,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=140218.66666666666, ans=0.125 +2024-07-28 09:47:12,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=140218.66666666666, ans=0.2 +2024-07-28 09:47:17,029 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.98 vs. limit=6.0 +2024-07-28 09:47:17,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=140218.66666666666, ans=0.025 +2024-07-28 09:47:18,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=140218.66666666666, ans=0.125 +2024-07-28 09:47:20,095 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:47:20,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=140218.66666666666, ans=0.125 +2024-07-28 09:47:21,184 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.597e+01 5.615e+01 6.138e+01 7.226e+01 1.097e+02, threshold=1.228e+02, percent-clipped=0.0 +2024-07-28 09:47:22,501 INFO [train.py:1114] (3/4) Epoch 11, batch 2950, loss[loss=0.1508, simple_loss=0.2351, pruned_loss=0.03323, over 4703.00 frames. ], tot_loss[loss=0.196, simple_loss=0.283, pruned_loss=0.05447, over 940035.02 frames. ], batch size: 12, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:47:32,431 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.25 vs. limit=15.0 +2024-07-28 09:47:40,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=140245.33333333334, ans=0.125 +2024-07-28 09:47:58,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140285.33333333334, ans=0.1 +2024-07-28 09:48:02,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=140298.66666666666, ans=0.125 +2024-07-28 09:48:05,769 INFO [train.py:1114] (3/4) Epoch 11, batch 3000, loss[loss=0.1949, simple_loss=0.2849, pruned_loss=0.05248, over 4762.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2832, pruned_loss=0.05451, over 939356.84 frames. ], batch size: 13, lr: 6.90e-03, grad_scale: 32.0 +2024-07-28 09:48:05,769 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 09:48:11,075 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.2041, 2.6384, 2.8701, 3.0578], device='cuda:3') +2024-07-28 09:48:19,192 INFO [train.py:1146] (3/4) Epoch 11, validation: loss=0.1714, simple_loss=0.2749, pruned_loss=0.03396, over 944034.00 frames. +2024-07-28 09:48:19,193 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 09:48:23,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=140298.66666666666, ans=0.125 +2024-07-28 09:48:36,276 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.09 vs. limit=15.0 +2024-07-28 09:48:36,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=140325.33333333334, ans=0.0 +2024-07-28 09:48:46,908 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:48:52,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=140352.0, ans=0.1 +2024-07-28 09:48:52,408 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.96 vs. limit=12.0 +2024-07-28 09:48:53,328 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.500e+01 5.535e+01 6.032e+01 6.917e+01 1.051e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 09:48:54,496 INFO [train.py:1114] (3/4) Epoch 11, batch 3050, loss[loss=0.1772, simple_loss=0.261, pruned_loss=0.0467, over 4644.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2846, pruned_loss=0.05499, over 937897.69 frames. ], batch size: 12, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:48:56,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=140365.33333333334, ans=0.125 +2024-07-28 09:49:05,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=140378.66666666666, ans=0.0 +2024-07-28 09:49:08,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=140392.0, ans=0.025 +2024-07-28 09:49:08,920 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.95 vs. limit=15.0 +2024-07-28 09:49:22,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=140418.66666666666, ans=0.125 +2024-07-28 09:49:32,125 INFO [train.py:1114] (3/4) Epoch 11, batch 3100, loss[loss=0.1976, simple_loss=0.3035, pruned_loss=0.04587, over 4633.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2838, pruned_loss=0.05462, over 938300.68 frames. ], batch size: 16, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:49:33,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=140432.0, ans=15.0 +2024-07-28 09:49:42,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=140445.33333333334, ans=0.0 +2024-07-28 09:49:43,039 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=15.0 +2024-07-28 09:49:47,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=140458.66666666666, ans=0.0 +2024-07-28 09:49:55,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=140472.0, ans=0.1 +2024-07-28 09:49:55,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=140472.0, ans=0.125 +2024-07-28 09:50:02,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=140485.33333333334, ans=0.09899494936611666 +2024-07-28 09:50:06,683 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.03 vs. limit=22.5 +2024-07-28 09:50:07,164 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.605e+01 5.405e+01 6.178e+01 7.390e+01 1.037e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 09:50:07,877 INFO [train.py:1114] (3/4) Epoch 11, batch 3150, loss[loss=0.2018, simple_loss=0.2868, pruned_loss=0.05841, over 4610.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2838, pruned_loss=0.05485, over 938321.07 frames. ], batch size: 17, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:50:18,503 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.89 vs. limit=15.0 +2024-07-28 09:50:21,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140525.33333333334, ans=0.1 +2024-07-28 09:50:26,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=140525.33333333334, ans=0.0 +2024-07-28 09:50:34,252 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.36 vs. limit=6.0 +2024-07-28 09:50:37,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140552.0, ans=0.1 +2024-07-28 09:50:38,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=140552.0, ans=0.025 +2024-07-28 09:50:43,669 INFO [train.py:1114] (3/4) Epoch 11, batch 3200, loss[loss=0.1942, simple_loss=0.2805, pruned_loss=0.05391, over 4829.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.282, pruned_loss=0.05406, over 939715.30 frames. ], batch size: 13, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:50:46,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=140565.33333333334, ans=0.125 +2024-07-28 09:50:57,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=140578.66666666666, ans=0.125 +2024-07-28 09:50:59,341 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.82 vs. limit=15.0 +2024-07-28 09:51:48,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=140618.66666666666, ans=0.125 +2024-07-28 09:52:00,434 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.837e+01 5.714e+01 6.190e+01 6.678e+01 8.069e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 09:52:01,129 INFO [train.py:1114] (3/4) Epoch 11, batch 3250, loss[loss=0.2148, simple_loss=0.2947, pruned_loss=0.06746, over 4927.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2828, pruned_loss=0.05424, over 940907.38 frames. ], batch size: 14, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:52:54,466 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.95 vs. limit=15.0 +2024-07-28 09:52:54,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=140658.66666666666, ans=0.125 +2024-07-28 09:53:00,462 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.23 vs. limit=15.0 +2024-07-28 09:53:23,299 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.58 vs. limit=15.0 +2024-07-28 09:53:27,561 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.92 vs. limit=15.0 +2024-07-28 09:53:37,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140685.33333333334, ans=0.1 +2024-07-28 09:53:46,911 INFO [train.py:1114] (3/4) Epoch 11, batch 3300, loss[loss=0.2042, simple_loss=0.3015, pruned_loss=0.05342, over 4732.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2819, pruned_loss=0.05427, over 941486.25 frames. ], batch size: 19, lr: 6.89e-03, grad_scale: 32.0 +2024-07-28 09:53:48,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=140698.66666666666, ans=0.125 +2024-07-28 09:53:51,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=140698.66666666666, ans=0.5 +2024-07-28 09:54:09,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=140712.0, ans=0.125 +2024-07-28 09:54:36,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=140738.66666666666, ans=0.125 +2024-07-28 09:54:52,383 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+01 5.701e+01 6.395e+01 7.330e+01 1.076e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 09:54:53,122 INFO [train.py:1114] (3/4) Epoch 11, batch 3350, loss[loss=0.2218, simple_loss=0.3002, pruned_loss=0.07171, over 4611.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2823, pruned_loss=0.05475, over 939426.32 frames. ], batch size: 17, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:54:54,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=140765.33333333334, ans=0.025 +2024-07-28 09:55:16,030 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.37 vs. limit=15.0 +2024-07-28 09:55:22,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=140818.66666666666, ans=0.125 +2024-07-28 09:55:25,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=140818.66666666666, ans=0.125 +2024-07-28 09:55:29,092 INFO [train.py:1114] (3/4) Epoch 11, batch 3400, loss[loss=0.173, simple_loss=0.2662, pruned_loss=0.03992, over 4804.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2825, pruned_loss=0.05473, over 937641.09 frames. ], batch size: 11, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:55:29,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.63 vs. limit=10.0 +2024-07-28 09:55:46,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=140858.66666666666, ans=0.0 +2024-07-28 09:55:50,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=140872.0, ans=0.0 +2024-07-28 09:55:51,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=140872.0, ans=0.1 +2024-07-28 09:55:51,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=140872.0, ans=0.125 +2024-07-28 09:56:00,320 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.88 vs. limit=22.5 +2024-07-28 09:56:04,025 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.505e+01 5.604e+01 6.128e+01 6.821e+01 1.006e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 09:56:04,726 INFO [train.py:1114] (3/4) Epoch 11, batch 3450, loss[loss=0.2038, simple_loss=0.3061, pruned_loss=0.05074, over 4721.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2835, pruned_loss=0.05471, over 938094.72 frames. ], batch size: 19, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:56:06,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=140898.66666666666, ans=0.1 +2024-07-28 09:56:09,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=140898.66666666666, ans=0.125 +2024-07-28 09:56:10,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=140912.0, ans=0.5 +2024-07-28 09:56:17,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=140925.33333333334, ans=0.125 +2024-07-28 09:56:19,487 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.01 vs. limit=15.0 +2024-07-28 09:56:29,438 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 09:56:30,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=140938.66666666666, ans=0.0 +2024-07-28 09:56:34,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=140952.0, ans=0.0 +2024-07-28 09:56:35,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140952.0, ans=0.1 +2024-07-28 09:56:37,251 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=6.02 vs. limit=12.0 +2024-07-28 09:56:38,866 INFO [train.py:1114] (3/4) Epoch 11, batch 3500, loss[loss=0.1769, simple_loss=0.2666, pruned_loss=0.04357, over 4934.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.283, pruned_loss=0.05419, over 938946.45 frames. ], batch size: 12, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:56:51,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140978.66666666666, ans=0.1 +2024-07-28 09:56:54,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=140978.66666666666, ans=0.1 +2024-07-28 09:57:04,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=141005.33333333334, ans=0.04949747468305833 +2024-07-28 09:57:16,581 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.512e+01 5.451e+01 6.238e+01 7.293e+01 9.971e+01, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 09:57:17,309 INFO [train.py:1114] (3/4) Epoch 11, batch 3550, loss[loss=0.1959, simple_loss=0.2958, pruned_loss=0.04798, over 4663.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2827, pruned_loss=0.05365, over 939400.47 frames. ], batch size: 14, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:57:19,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=141032.0, ans=0.125 +2024-07-28 09:57:19,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=141032.0, ans=0.2 +2024-07-28 09:57:22,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=141032.0, ans=0.0 +2024-07-28 09:57:33,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=141058.66666666666, ans=0.0 +2024-07-28 09:57:34,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=141058.66666666666, ans=0.125 +2024-07-28 09:57:34,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=141058.66666666666, ans=0.2 +2024-07-28 09:57:58,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=141098.66666666666, ans=0.125 +2024-07-28 09:57:59,194 INFO [train.py:1114] (3/4) Epoch 11, batch 3600, loss[loss=0.212, simple_loss=0.296, pruned_loss=0.06395, over 4959.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2829, pruned_loss=0.05395, over 941224.95 frames. ], batch size: 13, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:58:32,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=141152.0, ans=0.0 +2024-07-28 09:58:35,668 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.546e+01 5.527e+01 6.114e+01 7.370e+01 1.148e+02, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 09:58:37,552 INFO [train.py:1114] (3/4) Epoch 11, batch 3650, loss[loss=0.198, simple_loss=0.2819, pruned_loss=0.05707, over 4896.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2817, pruned_loss=0.053, over 941738.91 frames. ], batch size: 15, lr: 6.88e-03, grad_scale: 32.0 +2024-07-28 09:58:47,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=141178.66666666666, ans=0.0 +2024-07-28 09:58:55,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=141192.0, ans=0.1 +2024-07-28 09:59:04,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141218.66666666666, ans=0.1 +2024-07-28 09:59:11,458 INFO [train.py:1114] (3/4) Epoch 11, batch 3700, loss[loss=0.2111, simple_loss=0.3075, pruned_loss=0.0574, over 4930.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2814, pruned_loss=0.05256, over 942497.60 frames. ], batch size: 14, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 09:59:17,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=141245.33333333334, ans=0.2 +2024-07-28 09:59:17,864 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.14 vs. limit=12.0 +2024-07-28 09:59:19,917 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.26 vs. limit=15.0 +2024-07-28 09:59:43,613 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.04 vs. limit=15.0 +2024-07-28 09:59:50,247 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.396e+01 5.987e+01 6.537e+01 9.206e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 09:59:50,930 INFO [train.py:1114] (3/4) Epoch 11, batch 3750, loss[loss=0.1901, simple_loss=0.2663, pruned_loss=0.05695, over 4791.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2815, pruned_loss=0.05297, over 944075.42 frames. ], batch size: 11, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 09:59:57,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=141298.66666666666, ans=0.125 +2024-07-28 10:00:00,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.22 vs. limit=15.0 +2024-07-28 10:00:21,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=141338.66666666666, ans=0.125 +2024-07-28 10:00:22,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=141352.0, ans=0.2 +2024-07-28 10:00:33,298 INFO [train.py:1114] (3/4) Epoch 11, batch 3800, loss[loss=0.2058, simple_loss=0.2989, pruned_loss=0.05641, over 4805.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2804, pruned_loss=0.05287, over 942486.65 frames. ], batch size: 14, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 10:00:34,278 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.30 vs. limit=12.0 +2024-07-28 10:00:54,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=141405.33333333334, ans=0.0 +2024-07-28 10:00:55,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=141405.33333333334, ans=0.0 +2024-07-28 10:01:02,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=141418.66666666666, ans=0.125 +2024-07-28 10:01:04,836 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.11 vs. limit=22.5 +2024-07-28 10:01:07,865 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.632e+01 5.570e+01 6.150e+01 7.131e+01 1.072e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 10:01:08,503 INFO [train.py:1114] (3/4) Epoch 11, batch 3850, loss[loss=0.2087, simple_loss=0.3037, pruned_loss=0.05691, over 4631.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2803, pruned_loss=0.05267, over 943094.87 frames. ], batch size: 16, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 10:01:19,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=141445.33333333334, ans=0.2 +2024-07-28 10:01:22,387 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.22 vs. limit=15.0 +2024-07-28 10:01:38,911 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.26 vs. limit=15.0 +2024-07-28 10:01:42,031 INFO [train.py:1114] (3/4) Epoch 11, batch 3900, loss[loss=0.1792, simple_loss=0.2817, pruned_loss=0.03832, over 4814.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2817, pruned_loss=0.05309, over 943160.81 frames. ], batch size: 14, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 10:01:44,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=141498.66666666666, ans=0.0 +2024-07-28 10:01:55,965 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.05 vs. limit=15.0 +2024-07-28 10:01:58,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=141525.33333333334, ans=0.125 +2024-07-28 10:02:02,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=141525.33333333334, ans=0.1 +2024-07-28 10:02:09,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=141538.66666666666, ans=0.125 +2024-07-28 10:02:11,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141552.0, ans=0.1 +2024-07-28 10:02:16,446 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.644e+01 6.231e+01 6.992e+01 1.031e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 10:02:17,223 INFO [train.py:1114] (3/4) Epoch 11, batch 3950, loss[loss=0.2015, simple_loss=0.2838, pruned_loss=0.0596, over 4840.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.282, pruned_loss=0.05377, over 945028.36 frames. ], batch size: 16, lr: 6.87e-03, grad_scale: 32.0 +2024-07-28 10:02:18,177 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-28 10:02:24,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=141578.66666666666, ans=0.125 +2024-07-28 10:02:36,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=141592.0, ans=0.0 +2024-07-28 10:02:41,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=141605.33333333334, ans=0.125 +2024-07-28 10:02:54,311 INFO [train.py:1114] (3/4) Epoch 11, batch 4000, loss[loss=0.1487, simple_loss=0.2483, pruned_loss=0.02454, over 4783.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2818, pruned_loss=0.05385, over 941773.79 frames. ], batch size: 12, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:03:12,679 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.74 vs. limit=6.0 +2024-07-28 10:03:20,905 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.15 vs. limit=15.0 +2024-07-28 10:03:22,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=141658.66666666666, ans=0.0 +2024-07-28 10:03:45,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=141685.33333333334, ans=0.2 +2024-07-28 10:03:53,267 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.441e+01 6.028e+01 6.961e+01 9.604e+01, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 10:03:53,945 INFO [train.py:1114] (3/4) Epoch 11, batch 4050, loss[loss=0.2208, simple_loss=0.3098, pruned_loss=0.06593, over 3424.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2815, pruned_loss=0.05403, over 940799.80 frames. ], batch size: 35, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:04:03,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.87 vs. limit=22.5 +2024-07-28 10:04:08,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=141712.0, ans=0.125 +2024-07-28 10:04:12,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=141712.0, ans=0.2 +2024-07-28 10:04:14,995 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.93 vs. limit=6.0 +2024-07-28 10:04:18,221 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.70 vs. limit=22.5 +2024-07-28 10:04:23,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=141738.66666666666, ans=0.09899494936611666 +2024-07-28 10:04:34,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=141765.33333333334, ans=0.2 +2024-07-28 10:04:34,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141765.33333333334, ans=0.1 +2024-07-28 10:04:34,718 INFO [train.py:1114] (3/4) Epoch 11, batch 4100, loss[loss=0.2281, simple_loss=0.321, pruned_loss=0.06762, over 4890.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2821, pruned_loss=0.05469, over 940208.68 frames. ], batch size: 15, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:05:04,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=141792.0, ans=0.09899494936611666 +2024-07-28 10:05:18,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=141818.66666666666, ans=0.125 +2024-07-28 10:05:19,223 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.62 vs. limit=15.0 +2024-07-28 10:05:19,952 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.20 vs. limit=15.0 +2024-07-28 10:05:36,364 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.721e+01 5.886e+01 6.549e+01 7.693e+01 1.193e+02, threshold=1.310e+02, percent-clipped=0.0 +2024-07-28 10:05:36,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=141832.0, ans=0.0 +2024-07-28 10:05:37,304 INFO [train.py:1114] (3/4) Epoch 11, batch 4150, loss[loss=0.1802, simple_loss=0.268, pruned_loss=0.04621, over 4826.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2802, pruned_loss=0.0538, over 939810.72 frames. ], batch size: 13, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:06:02,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=141832.0, ans=0.125 +2024-07-28 10:06:26,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=141858.66666666666, ans=0.1 +2024-07-28 10:06:36,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=141858.66666666666, ans=0.125 +2024-07-28 10:06:58,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=141872.0, ans=0.125 +2024-07-28 10:06:59,430 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.07 vs. limit=15.0 +2024-07-28 10:07:01,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=141885.33333333334, ans=0.125 +2024-07-28 10:07:15,452 INFO [train.py:1114] (3/4) Epoch 11, batch 4200, loss[loss=0.2156, simple_loss=0.3124, pruned_loss=0.0594, over 4913.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2802, pruned_loss=0.05344, over 940820.67 frames. ], batch size: 15, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:07:39,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=141912.0, ans=0.125 +2024-07-28 10:08:39,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=141952.0, ans=0.1 +2024-07-28 10:08:41,004 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.620e+01 5.645e+01 6.237e+01 6.874e+01 1.098e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 10:08:41,704 INFO [train.py:1114] (3/4) Epoch 11, batch 4250, loss[loss=0.1925, simple_loss=0.2848, pruned_loss=0.05011, over 4646.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2801, pruned_loss=0.05348, over 941573.39 frames. ], batch size: 12, lr: 6.86e-03, grad_scale: 32.0 +2024-07-28 10:08:41,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=141965.33333333334, ans=0.125 +2024-07-28 10:08:46,565 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.13 vs. limit=6.0 +2024-07-28 10:08:52,254 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.57 vs. limit=6.0 +2024-07-28 10:09:00,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=141992.0, ans=0.2 +2024-07-28 10:09:07,383 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.13 vs. limit=15.0 +2024-07-28 10:09:07,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=141992.0, ans=0.025 +2024-07-28 10:09:25,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=142018.66666666666, ans=0.0 +2024-07-28 10:09:25,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=142018.66666666666, ans=0.125 +2024-07-28 10:09:31,303 INFO [train.py:1114] (3/4) Epoch 11, batch 4300, loss[loss=0.2116, simple_loss=0.2946, pruned_loss=0.06431, over 4758.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2814, pruned_loss=0.05386, over 940766.43 frames. ], batch size: 13, lr: 6.85e-03, grad_scale: 32.0 +2024-07-28 10:09:35,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=142032.0, ans=0.5 +2024-07-28 10:09:36,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=142032.0, ans=0.125 +2024-07-28 10:10:00,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=142058.66666666666, ans=0.125 +2024-07-28 10:10:15,198 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.44 vs. limit=22.5 +2024-07-28 10:10:27,597 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.309e+01 5.443e+01 5.948e+01 6.522e+01 9.090e+01, threshold=1.190e+02, percent-clipped=0.0 +2024-07-28 10:10:28,343 INFO [train.py:1114] (3/4) Epoch 11, batch 4350, loss[loss=0.1625, simple_loss=0.2485, pruned_loss=0.03824, over 4757.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2813, pruned_loss=0.05343, over 941155.28 frames. ], batch size: 13, lr: 6.85e-03, grad_scale: 32.0 +2024-07-28 10:10:29,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=142098.66666666666, ans=0.1 +2024-07-28 10:10:29,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=142098.66666666666, ans=0.025 +2024-07-28 10:10:31,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=142098.66666666666, ans=0.125 +2024-07-28 10:10:40,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=142112.0, ans=0.0 +2024-07-28 10:11:05,770 INFO [train.py:1114] (3/4) Epoch 11, batch 4400, loss[loss=0.1863, simple_loss=0.2803, pruned_loss=0.04615, over 4812.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2812, pruned_loss=0.05321, over 941267.94 frames. ], batch size: 14, lr: 6.85e-03, grad_scale: 64.0 +2024-07-28 10:11:14,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=142178.66666666666, ans=0.025 +2024-07-28 10:11:17,662 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.67 vs. limit=10.0 +2024-07-28 10:11:38,551 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+01 5.755e+01 6.372e+01 7.291e+01 1.018e+02, threshold=1.274e+02, percent-clipped=0.0 +2024-07-28 10:11:39,288 INFO [train.py:1114] (3/4) Epoch 11, batch 4450, loss[loss=0.1596, simple_loss=0.2446, pruned_loss=0.03727, over 4932.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2816, pruned_loss=0.05348, over 939397.53 frames. ], batch size: 12, lr: 6.85e-03, grad_scale: 64.0 +2024-07-28 10:12:11,909 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=15.79 vs. limit=15.0 +2024-07-28 10:12:20,691 INFO [train.py:1114] (3/4) Epoch 11, batch 4500, loss[loss=0.2028, simple_loss=0.2932, pruned_loss=0.05624, over 4745.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2836, pruned_loss=0.05425, over 937987.62 frames. ], batch size: 14, lr: 6.85e-03, grad_scale: 64.0 +2024-07-28 10:12:32,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=142312.0, ans=0.1 +2024-07-28 10:12:41,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=142325.33333333334, ans=0.0 +2024-07-28 10:12:44,089 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.86 vs. limit=15.0 +2024-07-28 10:12:45,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=142338.66666666666, ans=0.125 +2024-07-28 10:12:46,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=142338.66666666666, ans=0.125 +2024-07-28 10:12:50,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=142338.66666666666, ans=0.125 +2024-07-28 10:12:55,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=142352.0, ans=0.125 +2024-07-28 10:12:57,412 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.457e+01 5.934e+01 6.532e+01 9.481e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-28 10:12:58,142 INFO [train.py:1114] (3/4) Epoch 11, batch 4550, loss[loss=0.1834, simple_loss=0.2681, pruned_loss=0.04931, over 4892.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2822, pruned_loss=0.05374, over 940127.41 frames. ], batch size: 13, lr: 6.85e-03, grad_scale: 64.0 +2024-07-28 10:12:58,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=142365.33333333334, ans=0.0 +2024-07-28 10:13:05,735 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.95 vs. limit=15.0 +2024-07-28 10:13:06,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=142378.66666666666, ans=0.0 +2024-07-28 10:13:19,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=142405.33333333334, ans=0.125 +2024-07-28 10:13:33,038 INFO [train.py:1114] (3/4) Epoch 11, batch 4600, loss[loss=0.1964, simple_loss=0.2955, pruned_loss=0.04861, over 4550.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2821, pruned_loss=0.0536, over 938042.50 frames. ], batch size: 21, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:13:36,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=142432.0, ans=0.0 +2024-07-28 10:13:43,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=142445.33333333334, ans=0.125 +2024-07-28 10:14:11,968 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.92 vs. limit=6.0 +2024-07-28 10:14:16,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=142485.33333333334, ans=0.125 +2024-07-28 10:14:16,899 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.448e+01 5.751e+01 6.441e+01 7.092e+01 1.186e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 10:14:22,130 INFO [train.py:1114] (3/4) Epoch 11, batch 4650, loss[loss=0.23, simple_loss=0.3163, pruned_loss=0.07191, over 4807.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.284, pruned_loss=0.05473, over 940161.33 frames. ], batch size: 16, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:14:39,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=142525.33333333334, ans=0.125 +2024-07-28 10:14:47,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=142538.66666666666, ans=0.0 +2024-07-28 10:14:49,107 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:15:03,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=142552.0, ans=0.025 +2024-07-28 10:15:08,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=142552.0, ans=0.125 +2024-07-28 10:15:09,645 INFO [train.py:1114] (3/4) Epoch 11, batch 4700, loss[loss=0.1951, simple_loss=0.2693, pruned_loss=0.06045, over 4720.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2831, pruned_loss=0.05425, over 936915.30 frames. ], batch size: 11, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:15:15,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=142578.66666666666, ans=0.0 +2024-07-28 10:15:16,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=142578.66666666666, ans=0.0 +2024-07-28 10:15:30,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=142592.0, ans=0.2 +2024-07-28 10:15:49,432 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.868e+01 6.350e+01 7.061e+01 1.022e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 10:15:49,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=142632.0, ans=0.125 +2024-07-28 10:15:49,819 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.88 vs. limit=22.5 +2024-07-28 10:15:50,093 INFO [train.py:1114] (3/4) Epoch 11, batch 4750, loss[loss=0.1859, simple_loss=0.2755, pruned_loss=0.04815, over 4510.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2841, pruned_loss=0.05501, over 934803.27 frames. ], batch size: 21, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:16:07,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=142658.66666666666, ans=0.125 +2024-07-28 10:16:10,799 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.45 vs. limit=15.0 +2024-07-28 10:16:15,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=142672.0, ans=0.125 +2024-07-28 10:16:28,970 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:16:42,916 INFO [train.py:1114] (3/4) Epoch 11, batch 4800, loss[loss=0.2144, simple_loss=0.3097, pruned_loss=0.05951, over 4696.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2843, pruned_loss=0.0558, over 931691.49 frames. ], batch size: 13, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:16:44,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=142698.66666666666, ans=0.125 +2024-07-28 10:16:45,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=142698.66666666666, ans=15.0 +2024-07-28 10:16:45,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=142698.66666666666, ans=0.125 +2024-07-28 10:16:59,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=142712.0, ans=0.125 +2024-07-28 10:17:01,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=142712.0, ans=0.025 +2024-07-28 10:17:03,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=142712.0, ans=0.125 +2024-07-28 10:17:04,652 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.82 vs. limit=22.5 +2024-07-28 10:17:21,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=142752.0, ans=0.025 +2024-07-28 10:17:27,021 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.631e+01 5.658e+01 6.076e+01 6.872e+01 9.188e+01, threshold=1.215e+02, percent-clipped=0.0 +2024-07-28 10:17:35,868 INFO [train.py:1114] (3/4) Epoch 11, batch 4850, loss[loss=0.2235, simple_loss=0.3138, pruned_loss=0.06664, over 4745.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2849, pruned_loss=0.05573, over 931841.94 frames. ], batch size: 14, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:17:51,779 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.95 vs. limit=6.0 +2024-07-28 10:18:03,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=142805.33333333334, ans=0.07 +2024-07-28 10:18:03,671 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.82 vs. limit=15.0 +2024-07-28 10:18:05,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=142805.33333333334, ans=0.07 +2024-07-28 10:18:10,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=142818.66666666666, ans=0.125 +2024-07-28 10:18:11,258 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.23 vs. limit=15.0 +2024-07-28 10:18:15,434 INFO [train.py:1114] (3/4) Epoch 11, batch 4900, loss[loss=0.1896, simple_loss=0.2757, pruned_loss=0.05174, over 4755.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.284, pruned_loss=0.05545, over 933655.14 frames. ], batch size: 13, lr: 6.84e-03, grad_scale: 64.0 +2024-07-28 10:18:23,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=142832.0, ans=0.0 +2024-07-28 10:18:24,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=142832.0, ans=0.125 +2024-07-28 10:18:26,397 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.58 vs. limit=15.0 +2024-07-28 10:18:36,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=142858.66666666666, ans=0.125 +2024-07-28 10:18:38,141 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.76 vs. limit=15.0 +2024-07-28 10:18:39,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=142858.66666666666, ans=0.2 +2024-07-28 10:18:40,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=142872.0, ans=0.2 +2024-07-28 10:18:45,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=142872.0, ans=0.125 +2024-07-28 10:18:48,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=142885.33333333334, ans=0.0 +2024-07-28 10:18:54,121 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.706e+01 5.557e+01 6.177e+01 6.945e+01 1.051e+02, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 10:18:54,884 INFO [train.py:1114] (3/4) Epoch 11, batch 4950, loss[loss=0.2874, simple_loss=0.3411, pruned_loss=0.1168, over 3427.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2844, pruned_loss=0.05595, over 930790.83 frames. ], batch size: 35, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:19:03,892 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.18 vs. limit=15.0 +2024-07-28 10:19:04,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=142912.0, ans=0.07 +2024-07-28 10:19:05,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=142912.0, ans=0.09899494936611666 +2024-07-28 10:19:22,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=142952.0, ans=0.125 +2024-07-28 10:19:26,133 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.42 vs. limit=15.0 +2024-07-28 10:19:32,825 INFO [train.py:1114] (3/4) Epoch 11, batch 5000, loss[loss=0.2646, simple_loss=0.3578, pruned_loss=0.08575, over 4658.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2844, pruned_loss=0.05611, over 934886.54 frames. ], batch size: 14, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:19:55,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=142992.0, ans=0.025 +2024-07-28 10:19:56,718 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.86 vs. limit=15.0 +2024-07-28 10:20:00,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143005.33333333334, ans=0.1 +2024-07-28 10:20:17,708 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.576e+01 5.560e+01 5.974e+01 6.425e+01 8.960e+01, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 10:20:18,548 INFO [train.py:1114] (3/4) Epoch 11, batch 5050, loss[loss=0.188, simple_loss=0.2756, pruned_loss=0.05026, over 4850.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2828, pruned_loss=0.05486, over 937149.91 frames. ], batch size: 12, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:20:18,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=143032.0, ans=0.125 +2024-07-28 10:20:22,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=143032.0, ans=0.0 +2024-07-28 10:20:23,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=143032.0, ans=0.05 +2024-07-28 10:20:25,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=143045.33333333334, ans=0.125 +2024-07-28 10:20:27,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=143045.33333333334, ans=0.125 +2024-07-28 10:20:28,557 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.33 vs. limit=5.0 +2024-07-28 10:20:29,074 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.11 vs. limit=10.0 +2024-07-28 10:20:29,810 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.80 vs. limit=12.0 +2024-07-28 10:20:45,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=143085.33333333334, ans=10.0 +2024-07-28 10:20:47,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=143085.33333333334, ans=0.125 +2024-07-28 10:20:49,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=143085.33333333334, ans=0.125 +2024-07-28 10:20:49,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=143085.33333333334, ans=0.125 +2024-07-28 10:20:53,952 INFO [train.py:1114] (3/4) Epoch 11, batch 5100, loss[loss=0.1983, simple_loss=0.2864, pruned_loss=0.05513, over 4786.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.284, pruned_loss=0.0554, over 934661.96 frames. ], batch size: 12, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:21:07,465 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.09 vs. limit=15.0 +2024-07-28 10:21:08,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=143125.33333333334, ans=0.125 +2024-07-28 10:21:12,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=143125.33333333334, ans=0.2 +2024-07-28 10:21:13,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=143125.33333333334, ans=0.125 +2024-07-28 10:21:16,375 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.45 vs. limit=15.0 +2024-07-28 10:21:40,922 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.87 vs. limit=10.0 +2024-07-28 10:21:46,389 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.445e+01 5.691e+01 6.335e+01 6.758e+01 9.887e+01, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 10:21:47,045 INFO [train.py:1114] (3/4) Epoch 11, batch 5150, loss[loss=0.2315, simple_loss=0.3198, pruned_loss=0.07159, over 4829.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2834, pruned_loss=0.05507, over 935613.10 frames. ], batch size: 16, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:21:49,540 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.42 vs. limit=10.0 +2024-07-28 10:22:07,250 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.40 vs. limit=22.5 +2024-07-28 10:22:08,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143205.33333333334, ans=0.1 +2024-07-28 10:22:11,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=143205.33333333334, ans=0.2 +2024-07-28 10:22:20,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=143218.66666666666, ans=0.0 +2024-07-28 10:22:22,787 INFO [train.py:1114] (3/4) Epoch 11, batch 5200, loss[loss=0.1854, simple_loss=0.284, pruned_loss=0.04343, over 4661.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2837, pruned_loss=0.05521, over 935679.95 frames. ], batch size: 14, lr: 6.83e-03, grad_scale: 64.0 +2024-07-28 10:22:27,460 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.40 vs. limit=15.0 +2024-07-28 10:22:29,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=143245.33333333334, ans=0.0 +2024-07-28 10:22:36,917 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.22 vs. limit=22.5 +2024-07-28 10:22:40,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=143258.66666666666, ans=0.125 +2024-07-28 10:22:56,374 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.779e+01 6.416e+01 7.170e+01 1.127e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 10:22:57,061 INFO [train.py:1114] (3/4) Epoch 11, batch 5250, loss[loss=0.1653, simple_loss=0.267, pruned_loss=0.03182, over 4901.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2827, pruned_loss=0.05485, over 935308.94 frames. ], batch size: 13, lr: 6.82e-03, grad_scale: 64.0 +2024-07-28 10:23:18,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=143338.66666666666, ans=0.1 +2024-07-28 10:23:19,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=143338.66666666666, ans=0.025 +2024-07-28 10:23:30,542 INFO [train.py:1114] (3/4) Epoch 11, batch 5300, loss[loss=0.254, simple_loss=0.3389, pruned_loss=0.08461, over 4651.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2814, pruned_loss=0.05437, over 934246.46 frames. ], batch size: 16, lr: 6.82e-03, grad_scale: 64.0 +2024-07-28 10:23:40,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=143378.66666666666, ans=0.125 +2024-07-28 10:23:44,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=143378.66666666666, ans=0.125 +2024-07-28 10:23:47,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=143392.0, ans=0.025 +2024-07-28 10:24:06,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=143405.33333333334, ans=0.2 +2024-07-28 10:24:11,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=143418.66666666666, ans=10.0 +2024-07-28 10:24:13,405 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.823e+01 5.558e+01 6.072e+01 7.139e+01 1.045e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 10:24:14,197 INFO [train.py:1114] (3/4) Epoch 11, batch 5350, loss[loss=0.1857, simple_loss=0.272, pruned_loss=0.04972, over 4516.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2822, pruned_loss=0.05426, over 936135.75 frames. ], batch size: 10, lr: 6.82e-03, grad_scale: 64.0 +2024-07-28 10:24:15,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=143432.0, ans=0.05 +2024-07-28 10:24:17,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=143432.0, ans=0.2 +2024-07-28 10:24:38,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=143472.0, ans=0.125 +2024-07-28 10:24:48,738 INFO [train.py:1114] (3/4) Epoch 11, batch 5400, loss[loss=0.2165, simple_loss=0.3001, pruned_loss=0.0665, over 4161.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2829, pruned_loss=0.05464, over 930418.87 frames. ], batch size: 25, lr: 6.82e-03, grad_scale: 64.0 +2024-07-28 10:24:49,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=143498.66666666666, ans=0.0 +2024-07-28 10:24:53,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=143498.66666666666, ans=10.0 +2024-07-28 10:24:56,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=143512.0, ans=0.125 +2024-07-28 10:24:59,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143512.0, ans=0.1 +2024-07-28 10:25:01,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=143525.33333333334, ans=0.5 +2024-07-28 10:25:17,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=143525.33333333334, ans=0.0 +2024-07-28 10:25:17,774 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.60 vs. limit=10.0 +2024-07-28 10:25:25,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=143538.66666666666, ans=0.125 +2024-07-28 10:25:26,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=143538.66666666666, ans=0.125 +2024-07-28 10:25:29,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=143538.66666666666, ans=0.025 +2024-07-28 10:25:33,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=143552.0, ans=0.2 +2024-07-28 10:25:34,376 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.38 vs. limit=15.0 +2024-07-28 10:25:36,121 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:25:38,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=143552.0, ans=0.125 +2024-07-28 10:25:38,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=143552.0, ans=0.125 +2024-07-28 10:25:40,459 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.745e+01 5.583e+01 6.179e+01 6.977e+01 1.082e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 10:25:40,493 INFO [train.py:1114] (3/4) Epoch 11, batch 5450, loss[loss=0.1631, simple_loss=0.2423, pruned_loss=0.04189, over 4701.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2817, pruned_loss=0.05397, over 933164.28 frames. ], batch size: 11, lr: 6.82e-03, grad_scale: 32.0 +2024-07-28 10:25:41,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=143565.33333333334, ans=0.0 +2024-07-28 10:25:43,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=143565.33333333334, ans=0.2 +2024-07-28 10:25:45,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=143565.33333333334, ans=0.1 +2024-07-28 10:25:45,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143565.33333333334, ans=0.1 +2024-07-28 10:25:56,671 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.12 vs. limit=12.0 +2024-07-28 10:26:03,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=143592.0, ans=22.5 +2024-07-28 10:26:04,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=143605.33333333334, ans=0.125 +2024-07-28 10:26:06,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=143605.33333333334, ans=0.0 +2024-07-28 10:26:14,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=143618.66666666666, ans=0.05 +2024-07-28 10:26:19,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=143618.66666666666, ans=0.125 +2024-07-28 10:26:23,929 INFO [train.py:1114] (3/4) Epoch 11, batch 5500, loss[loss=0.1994, simple_loss=0.294, pruned_loss=0.05239, over 4163.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2816, pruned_loss=0.05436, over 929904.99 frames. ], batch size: 25, lr: 6.82e-03, grad_scale: 32.0 +2024-07-28 10:32:01,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=143658.66666666666, ans=0.2 +2024-07-28 10:32:16,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=143685.33333333334, ans=0.2 +2024-07-28 10:32:22,306 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.760e+01 6.498e+01 7.825e+01 1.226e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-28 10:32:22,340 INFO [train.py:1114] (3/4) Epoch 11, batch 5550, loss[loss=0.1414, simple_loss=0.231, pruned_loss=0.02588, over 4707.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2817, pruned_loss=0.05432, over 932026.55 frames. ], batch size: 12, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:32:28,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=143698.66666666666, ans=0.125 +2024-07-28 10:32:36,110 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.15 vs. limit=15.0 +2024-07-28 10:32:38,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=143725.33333333334, ans=0.95 +2024-07-28 10:32:39,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=143725.33333333334, ans=0.1 +2024-07-28 10:32:42,283 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.88 vs. limit=6.0 +2024-07-28 10:32:58,925 INFO [train.py:1114] (3/4) Epoch 11, batch 5600, loss[loss=0.1847, simple_loss=0.289, pruned_loss=0.04015, over 4740.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.283, pruned_loss=0.0548, over 933374.79 frames. ], batch size: 14, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:32:59,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=143765.33333333334, ans=0.125 +2024-07-28 10:33:00,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=143765.33333333334, ans=0.0 +2024-07-28 10:33:06,893 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.11 vs. limit=15.0 +2024-07-28 10:33:11,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=143778.66666666666, ans=0.125 +2024-07-28 10:33:15,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=143792.0, ans=0.0 +2024-07-28 10:33:18,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=143792.0, ans=0.125 +2024-07-28 10:33:34,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143805.33333333334, ans=0.1 +2024-07-28 10:33:38,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=143818.66666666666, ans=0.1 +2024-07-28 10:33:39,837 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.52 vs. limit=15.0 +2024-07-28 10:33:40,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=143818.66666666666, ans=0.1 +2024-07-28 10:33:43,360 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.771e+01 6.077e+01 6.890e+01 8.236e+01 1.387e+02, threshold=1.378e+02, percent-clipped=1.0 +2024-07-28 10:33:44,776 INFO [train.py:1114] (3/4) Epoch 11, batch 5650, loss[loss=0.2175, simple_loss=0.3019, pruned_loss=0.06659, over 4541.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2831, pruned_loss=0.05503, over 935910.68 frames. ], batch size: 21, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:33:56,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=143845.33333333334, ans=0.125 +2024-07-28 10:34:00,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=143858.66666666666, ans=0.05 +2024-07-28 10:34:03,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=143858.66666666666, ans=0.125 +2024-07-28 10:34:15,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=143885.33333333334, ans=0.1 +2024-07-28 10:34:16,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=143885.33333333334, ans=0.09899494936611666 +2024-07-28 10:34:21,289 INFO [train.py:1114] (3/4) Epoch 11, batch 5700, loss[loss=0.1949, simple_loss=0.2844, pruned_loss=0.05268, over 4697.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2833, pruned_loss=0.05529, over 937209.79 frames. ], batch size: 13, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:34:25,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=143898.66666666666, ans=0.2 +2024-07-28 10:34:40,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=143925.33333333334, ans=0.125 +2024-07-28 10:34:56,152 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.558e+01 6.017e+01 6.629e+01 9.464e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 10:34:56,186 INFO [train.py:1114] (3/4) Epoch 11, batch 5750, loss[loss=0.21, simple_loss=0.3071, pruned_loss=0.05643, over 4754.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2836, pruned_loss=0.05546, over 937395.80 frames. ], batch size: 19, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:35:06,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=143978.66666666666, ans=0.025 +2024-07-28 10:35:06,601 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.03 vs. limit=12.0 +2024-07-28 10:35:07,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=143978.66666666666, ans=0.125 +2024-07-28 10:35:07,271 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.18 vs. limit=10.0 +2024-07-28 10:35:08,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=143978.66666666666, ans=0.125 +2024-07-28 10:35:09,461 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.92 vs. limit=6.0 +2024-07-28 10:35:27,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=144005.33333333334, ans=0.125 +2024-07-28 10:35:27,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=144005.33333333334, ans=0.0 +2024-07-28 10:35:33,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=144018.66666666666, ans=0.125 +2024-07-28 10:35:37,648 INFO [train.py:1114] (3/4) Epoch 11, batch 5800, loss[loss=0.2241, simple_loss=0.3155, pruned_loss=0.06637, over 4713.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2837, pruned_loss=0.05526, over 936775.45 frames. ], batch size: 19, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:35:46,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=144045.33333333334, ans=0.0 +2024-07-28 10:35:53,181 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.77 vs. limit=6.0 +2024-07-28 10:36:07,471 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:36:10,693 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.07 vs. limit=15.0 +2024-07-28 10:36:11,530 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.841e+01 5.791e+01 6.490e+01 7.663e+01 1.100e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-28 10:36:11,564 INFO [train.py:1114] (3/4) Epoch 11, batch 5850, loss[loss=0.2201, simple_loss=0.3037, pruned_loss=0.0683, over 4580.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2826, pruned_loss=0.05445, over 937402.73 frames. ], batch size: 21, lr: 6.81e-03, grad_scale: 32.0 +2024-07-28 10:36:19,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=144112.0, ans=0.125 +2024-07-28 10:36:29,677 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.01 vs. limit=12.0 +2024-07-28 10:36:30,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=144125.33333333334, ans=0.125 +2024-07-28 10:36:34,011 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:36:36,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=144138.66666666666, ans=0.2 +2024-07-28 10:36:45,264 INFO [train.py:1114] (3/4) Epoch 11, batch 5900, loss[loss=0.1892, simple_loss=0.2802, pruned_loss=0.04913, over 4690.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2824, pruned_loss=0.05396, over 937710.57 frames. ], batch size: 15, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:36:46,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=144165.33333333334, ans=0.125 +2024-07-28 10:36:56,702 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.10 vs. limit=15.0 +2024-07-28 10:36:59,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.whiten.whitening_limit, batch_count=144178.66666666666, ans=12.0 +2024-07-28 10:36:59,462 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:37:02,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=144192.0, ans=0.125 +2024-07-28 10:37:21,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144192.0, ans=0.1 +2024-07-28 10:37:21,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=144192.0, ans=0.0 +2024-07-28 10:37:24,684 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.34 vs. limit=6.0 +2024-07-28 10:37:32,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=144205.33333333334, ans=0.0 +2024-07-28 10:37:36,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=144218.66666666666, ans=0.0 +2024-07-28 10:37:40,535 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.879e+01 5.784e+01 6.286e+01 7.070e+01 1.230e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 10:37:46,653 INFO [train.py:1114] (3/4) Epoch 11, batch 5950, loss[loss=0.2209, simple_loss=0.3066, pruned_loss=0.06757, over 4681.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2819, pruned_loss=0.05368, over 939730.62 frames. ], batch size: 15, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:37:57,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_na.min_abs, batch_count=144245.33333333334, ans=0.02 +2024-07-28 10:38:05,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=144258.66666666666, ans=0.2 +2024-07-28 10:38:15,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=144272.0, ans=0.025 +2024-07-28 10:38:25,030 INFO [train.py:1114] (3/4) Epoch 11, batch 6000, loss[loss=0.1937, simple_loss=0.2893, pruned_loss=0.04901, over 4199.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2813, pruned_loss=0.05351, over 936827.54 frames. ], batch size: 25, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:38:26,395 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 10:39:05,466 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.9575, 5.1358, 4.9351, 5.6616], device='cuda:3') +2024-07-28 10:39:08,753 INFO [train.py:1146] (3/4) Epoch 11, validation: loss=0.1692, simple_loss=0.2732, pruned_loss=0.03262, over 944034.00 frames. +2024-07-28 10:39:08,754 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 10:39:11,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=144298.66666666666, ans=0.125 +2024-07-28 10:39:11,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=144298.66666666666, ans=0.2 +2024-07-28 10:39:21,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=144312.0, ans=0.0 +2024-07-28 10:39:45,807 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.289e+01 5.724e+01 6.626e+01 8.238e+01 1.220e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-28 10:39:45,841 INFO [train.py:1114] (3/4) Epoch 11, batch 6050, loss[loss=0.1703, simple_loss=0.2577, pruned_loss=0.04148, over 4767.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2805, pruned_loss=0.05297, over 938211.24 frames. ], batch size: 12, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:40:07,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=144392.0, ans=0.2 +2024-07-28 10:40:08,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=144392.0, ans=0.125 +2024-07-28 10:40:09,594 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.78 vs. limit=12.0 +2024-07-28 10:40:10,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=144392.0, ans=0.0 +2024-07-28 10:40:14,354 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.60 vs. limit=15.0 +2024-07-28 10:40:14,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=144405.33333333334, ans=0.1 +2024-07-28 10:40:23,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=144418.66666666666, ans=0.1 +2024-07-28 10:40:28,019 INFO [train.py:1114] (3/4) Epoch 11, batch 6100, loss[loss=0.2026, simple_loss=0.2962, pruned_loss=0.05451, over 4663.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2795, pruned_loss=0.05252, over 937521.54 frames. ], batch size: 15, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:40:28,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144432.0, ans=0.1 +2024-07-28 10:40:50,143 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.75 vs. limit=22.5 +2024-07-28 10:40:51,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=144472.0, ans=0.05 +2024-07-28 10:41:01,436 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.304e+01 5.350e+01 6.027e+01 7.047e+01 1.301e+02, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 10:41:01,470 INFO [train.py:1114] (3/4) Epoch 11, batch 6150, loss[loss=0.2611, simple_loss=0.3219, pruned_loss=0.1001, over 3568.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2804, pruned_loss=0.05301, over 936483.63 frames. ], batch size: 35, lr: 6.80e-03, grad_scale: 32.0 +2024-07-28 10:41:05,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=144498.66666666666, ans=0.04949747468305833 +2024-07-28 10:41:19,367 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=8.66 vs. limit=15.0 +2024-07-28 10:41:23,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=144525.33333333334, ans=0.125 +2024-07-28 10:41:36,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=144552.0, ans=0.2 +2024-07-28 10:41:37,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=144552.0, ans=0.125 +2024-07-28 10:41:39,700 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.94 vs. limit=15.0 +2024-07-28 10:41:39,926 INFO [train.py:1114] (3/4) Epoch 11, batch 6200, loss[loss=0.1779, simple_loss=0.2716, pruned_loss=0.04211, over 4738.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2806, pruned_loss=0.05329, over 935942.85 frames. ], batch size: 14, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:42:04,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=144605.33333333334, ans=0.025 +2024-07-28 10:42:05,107 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=8.84 vs. limit=12.0 +2024-07-28 10:42:06,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144605.33333333334, ans=0.1 +2024-07-28 10:42:09,175 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.88 vs. limit=22.5 +2024-07-28 10:42:11,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=144618.66666666666, ans=0.1 +2024-07-28 10:42:15,876 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.646e+01 5.697e+01 6.150e+01 7.002e+01 1.067e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 10:42:15,910 INFO [train.py:1114] (3/4) Epoch 11, batch 6250, loss[loss=0.1842, simple_loss=0.2822, pruned_loss=0.04311, over 4811.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2814, pruned_loss=0.05384, over 932768.74 frames. ], batch size: 14, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:42:30,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=144658.66666666666, ans=0.125 +2024-07-28 10:42:38,975 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.02 vs. limit=22.5 +2024-07-28 10:42:45,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=144672.0, ans=0.125 +2024-07-28 10:42:58,534 INFO [train.py:1114] (3/4) Epoch 11, batch 6300, loss[loss=0.1592, simple_loss=0.2537, pruned_loss=0.03233, over 4506.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2809, pruned_loss=0.05348, over 929183.11 frames. ], batch size: 10, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:42:58,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=144698.66666666666, ans=0.0 +2024-07-28 10:43:03,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=144698.66666666666, ans=0.1 +2024-07-28 10:43:11,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=144712.0, ans=0.125 +2024-07-28 10:43:17,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=144725.33333333334, ans=0.025 +2024-07-28 10:43:19,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=144725.33333333334, ans=0.125 +2024-07-28 10:43:28,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=144752.0, ans=0.0 +2024-07-28 10:43:36,749 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.336e+01 5.612e+01 6.120e+01 6.711e+01 9.743e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 10:43:36,783 INFO [train.py:1114] (3/4) Epoch 11, batch 6350, loss[loss=0.2145, simple_loss=0.296, pruned_loss=0.06651, over 4500.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2806, pruned_loss=0.05315, over 933416.79 frames. ], batch size: 21, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:43:47,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=144778.66666666666, ans=0.1 +2024-07-28 10:43:51,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=144792.0, ans=0.1 +2024-07-28 10:44:04,404 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.34 vs. limit=6.0 +2024-07-28 10:44:06,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=144818.66666666666, ans=0.025 +2024-07-28 10:44:10,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=144818.66666666666, ans=0.0 +2024-07-28 10:44:11,946 INFO [train.py:1114] (3/4) Epoch 11, batch 6400, loss[loss=0.2074, simple_loss=0.2906, pruned_loss=0.06214, over 4636.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2803, pruned_loss=0.05319, over 934991.77 frames. ], batch size: 13, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:44:20,205 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.10 vs. limit=12.0 +2024-07-28 10:44:27,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=144858.66666666666, ans=0.025 +2024-07-28 10:44:45,102 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.492e+01 5.883e+01 6.533e+01 7.974e+01 1.055e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 10:44:45,136 INFO [train.py:1114] (3/4) Epoch 11, batch 6450, loss[loss=0.218, simple_loss=0.3166, pruned_loss=0.05971, over 4506.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2809, pruned_loss=0.05276, over 938551.35 frames. ], batch size: 21, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:44:51,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=144912.0, ans=0.0 +2024-07-28 10:44:57,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=144912.0, ans=0.0 +2024-07-28 10:45:03,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=144925.33333333334, ans=10.0 +2024-07-28 10:45:03,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=144925.33333333334, ans=0.125 +2024-07-28 10:45:08,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=144938.66666666666, ans=0.09899494936611666 +2024-07-28 10:45:18,202 INFO [train.py:1114] (3/4) Epoch 11, batch 6500, loss[loss=0.286, simple_loss=0.3558, pruned_loss=0.1081, over 3254.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2816, pruned_loss=0.05307, over 939694.07 frames. ], batch size: 35, lr: 6.79e-03, grad_scale: 32.0 +2024-07-28 10:45:28,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=144978.66666666666, ans=0.125 +2024-07-28 10:45:33,430 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.85 vs. limit=15.0 +2024-07-28 10:45:39,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145005.33333333334, ans=0.1 +2024-07-28 10:45:40,141 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.74 vs. limit=6.0 +2024-07-28 10:45:43,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=145005.33333333334, ans=0.0 +2024-07-28 10:45:45,265 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.12 vs. limit=15.0 +2024-07-28 10:45:46,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=145018.66666666666, ans=0.1 +2024-07-28 10:45:49,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=145018.66666666666, ans=0.2 +2024-07-28 10:45:51,639 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.465e+01 5.571e+01 6.263e+01 7.370e+01 1.165e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 10:45:51,673 INFO [train.py:1114] (3/4) Epoch 11, batch 6550, loss[loss=0.1599, simple_loss=0.2482, pruned_loss=0.0358, over 4804.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2806, pruned_loss=0.05257, over 942683.86 frames. ], batch size: 11, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:45:59,140 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.50 vs. limit=15.0 +2024-07-28 10:46:00,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=145045.33333333334, ans=0.05 +2024-07-28 10:46:01,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=145045.33333333334, ans=0.0 +2024-07-28 10:46:02,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=145045.33333333334, ans=0.0 +2024-07-28 10:46:07,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=145058.66666666666, ans=0.125 +2024-07-28 10:46:21,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=145085.33333333334, ans=0.0 +2024-07-28 10:46:22,613 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.38 vs. limit=5.0 +2024-07-28 10:46:22,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=145085.33333333334, ans=0.025 +2024-07-28 10:46:25,932 INFO [train.py:1114] (3/4) Epoch 11, batch 6600, loss[loss=0.1649, simple_loss=0.2623, pruned_loss=0.03373, over 4935.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2804, pruned_loss=0.0526, over 944696.54 frames. ], batch size: 14, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:46:26,372 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.98 vs. limit=10.0 +2024-07-28 10:46:33,813 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-28 10:46:42,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=145125.33333333334, ans=0.125 +2024-07-28 10:46:55,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145152.0, ans=0.1 +2024-07-28 10:46:59,235 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.651e+01 6.150e+01 6.851e+01 8.170e+01 1.263e+02, threshold=1.370e+02, percent-clipped=1.0 +2024-07-28 10:46:59,270 INFO [train.py:1114] (3/4) Epoch 11, batch 6650, loss[loss=0.2249, simple_loss=0.3114, pruned_loss=0.06921, over 4636.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2806, pruned_loss=0.05288, over 943541.51 frames. ], batch size: 17, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:46:59,780 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.42 vs. limit=10.0 +2024-07-28 10:47:00,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=145165.33333333334, ans=0.125 +2024-07-28 10:47:08,388 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-28 10:47:18,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=145178.66666666666, ans=0.125 +2024-07-28 10:47:27,688 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.77 vs. limit=6.0 +2024-07-28 10:47:43,298 INFO [train.py:1114] (3/4) Epoch 11, batch 6700, loss[loss=0.189, simple_loss=0.2823, pruned_loss=0.04783, over 4693.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2809, pruned_loss=0.05314, over 942390.96 frames. ], batch size: 19, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:47:44,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=145232.0, ans=0.1 +2024-07-28 10:47:44,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=145232.0, ans=0.125 +2024-07-28 10:47:44,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=145232.0, ans=0.2 +2024-07-28 10:48:02,283 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:48:03,276 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.91 vs. limit=15.0 +2024-07-28 10:48:05,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=145272.0, ans=0.0 +2024-07-28 10:48:11,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=145272.0, ans=0.125 +2024-07-28 10:48:14,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=145285.33333333334, ans=0.125 +2024-07-28 10:48:21,937 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.733e+01 5.801e+01 6.276e+01 7.380e+01 1.183e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 10:48:21,971 INFO [train.py:1114] (3/4) Epoch 11, batch 6750, loss[loss=0.2064, simple_loss=0.2941, pruned_loss=0.05932, over 4283.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2819, pruned_loss=0.05381, over 940483.81 frames. ], batch size: 26, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:48:22,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=145298.66666666666, ans=0.025 +2024-07-28 10:48:25,731 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.16 vs. limit=15.0 +2024-07-28 10:48:30,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=145312.0, ans=0.125 +2024-07-28 10:48:38,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=145325.33333333334, ans=0.125 +2024-07-28 10:48:45,184 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.54 vs. limit=15.0 +2024-07-28 10:48:49,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=145338.66666666666, ans=0.125 +2024-07-28 10:49:05,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=145365.33333333334, ans=0.125 +2024-07-28 10:49:06,044 INFO [train.py:1114] (3/4) Epoch 11, batch 6800, loss[loss=0.1671, simple_loss=0.2705, pruned_loss=0.03189, over 4639.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2819, pruned_loss=0.0538, over 938680.42 frames. ], batch size: 13, lr: 6.78e-03, grad_scale: 32.0 +2024-07-28 10:49:08,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=145365.33333333334, ans=0.0 +2024-07-28 10:49:10,396 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.04 vs. limit=22.5 +2024-07-28 10:49:20,812 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.64 vs. limit=15.0 +2024-07-28 10:49:28,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=145405.33333333334, ans=0.125 +2024-07-28 10:49:43,880 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.787e+01 5.530e+01 6.115e+01 7.020e+01 1.132e+02, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 10:49:43,914 INFO [train.py:1114] (3/4) Epoch 11, batch 6850, loss[loss=0.171, simple_loss=0.2708, pruned_loss=0.03564, over 4705.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2814, pruned_loss=0.05349, over 940496.69 frames. ], batch size: 13, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:49:47,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=145432.0, ans=0.125 +2024-07-28 10:49:49,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=145445.33333333334, ans=0.125 +2024-07-28 10:50:22,661 INFO [train.py:1114] (3/4) Epoch 11, batch 6900, loss[loss=0.1944, simple_loss=0.2781, pruned_loss=0.05538, over 4964.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2813, pruned_loss=0.05327, over 942703.49 frames. ], batch size: 13, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:50:26,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=145498.66666666666, ans=0.125 +2024-07-28 10:50:28,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=145498.66666666666, ans=0.125 +2024-07-28 10:50:31,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=145512.0, ans=0.125 +2024-07-28 10:50:43,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=145538.66666666666, ans=0.2 +2024-07-28 10:50:51,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=145552.0, ans=0.125 +2024-07-28 10:50:57,127 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.940e+01 5.560e+01 6.281e+01 7.160e+01 1.002e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 10:50:57,161 INFO [train.py:1114] (3/4) Epoch 11, batch 6950, loss[loss=0.1852, simple_loss=0.273, pruned_loss=0.04869, over 4563.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2806, pruned_loss=0.05298, over 940036.38 frames. ], batch size: 10, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:50:57,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.11 vs. limit=15.0 +2024-07-28 10:51:00,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=145565.33333333334, ans=0.0 +2024-07-28 10:51:03,561 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.84 vs. limit=15.0 +2024-07-28 10:51:10,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=145592.0, ans=0.2 +2024-07-28 10:51:20,880 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:51:22,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=145605.33333333334, ans=0.1 +2024-07-28 10:51:30,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=145618.66666666666, ans=0.025 +2024-07-28 10:51:31,226 INFO [train.py:1114] (3/4) Epoch 11, batch 7000, loss[loss=0.2248, simple_loss=0.3078, pruned_loss=0.07086, over 4599.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2801, pruned_loss=0.05288, over 938318.93 frames. ], batch size: 17, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:51:31,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=145632.0, ans=0.125 +2024-07-28 10:51:35,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145632.0, ans=0.1 +2024-07-28 10:51:37,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=145645.33333333334, ans=0.125 +2024-07-28 10:51:46,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=145658.66666666666, ans=0.125 +2024-07-28 10:52:02,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=145685.33333333334, ans=0.2 +2024-07-28 10:52:03,910 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.575e+01 5.703e+01 6.345e+01 7.288e+01 1.132e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 10:52:03,944 INFO [train.py:1114] (3/4) Epoch 11, batch 7050, loss[loss=0.2179, simple_loss=0.302, pruned_loss=0.06694, over 4721.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2813, pruned_loss=0.05283, over 941665.64 frames. ], batch size: 19, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:52:21,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=145725.33333333334, ans=0.125 +2024-07-28 10:52:29,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=145738.66666666666, ans=0.125 +2024-07-28 10:52:32,181 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 10:52:38,907 INFO [train.py:1114] (3/4) Epoch 11, batch 7100, loss[loss=0.1889, simple_loss=0.2815, pruned_loss=0.04817, over 4799.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2822, pruned_loss=0.05373, over 936517.29 frames. ], batch size: 15, lr: 6.77e-03, grad_scale: 32.0 +2024-07-28 10:52:49,023 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.35 vs. limit=15.0 +2024-07-28 10:52:57,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=145805.33333333334, ans=0.2 +2024-07-28 10:53:09,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=145818.66666666666, ans=0.125 +2024-07-28 10:53:10,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=145818.66666666666, ans=0.025 +2024-07-28 10:53:11,357 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.457e+01 5.413e+01 6.227e+01 7.503e+01 1.030e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 10:53:11,391 INFO [train.py:1114] (3/4) Epoch 11, batch 7150, loss[loss=0.2204, simple_loss=0.2977, pruned_loss=0.07152, over 4567.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2806, pruned_loss=0.05338, over 937710.24 frames. ], batch size: 21, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:53:13,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=145832.0, ans=0.1 +2024-07-28 10:53:16,160 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.02 vs. limit=22.5 +2024-07-28 10:53:23,953 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.26 vs. limit=12.0 +2024-07-28 10:53:24,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=145858.66666666666, ans=0.0 +2024-07-28 10:53:27,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=145858.66666666666, ans=0.125 +2024-07-28 10:53:29,161 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.68 vs. limit=15.0 +2024-07-28 10:53:30,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=145858.66666666666, ans=0.125 +2024-07-28 10:53:32,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=145872.0, ans=0.125 +2024-07-28 10:53:33,226 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.61 vs. limit=15.0 +2024-07-28 10:53:41,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=145885.33333333334, ans=0.0 +2024-07-28 10:53:44,203 INFO [train.py:1114] (3/4) Epoch 11, batch 7200, loss[loss=0.21, simple_loss=0.3012, pruned_loss=0.05938, over 4786.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2809, pruned_loss=0.05371, over 937727.51 frames. ], batch size: 15, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:53:47,157 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.79 vs. limit=10.0 +2024-07-28 10:53:56,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=145912.0, ans=0.0 +2024-07-28 10:53:59,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=145925.33333333334, ans=0.2 +2024-07-28 10:54:01,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=145925.33333333334, ans=0.025 +2024-07-28 10:54:08,183 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.90 vs. limit=6.0 +2024-07-28 10:54:11,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=145952.0, ans=0.04949747468305833 +2024-07-28 10:54:22,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=145965.33333333334, ans=0.0 +2024-07-28 10:54:23,288 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 5.502e+01 5.961e+01 6.542e+01 9.167e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 10:54:23,322 INFO [train.py:1114] (3/4) Epoch 11, batch 7250, loss[loss=0.1899, simple_loss=0.2724, pruned_loss=0.05364, over 4849.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2807, pruned_loss=0.0536, over 939797.42 frames. ], batch size: 12, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:54:23,789 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.54 vs. limit=10.0 +2024-07-28 10:54:28,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=145965.33333333334, ans=0.125 +2024-07-28 10:54:31,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=145978.66666666666, ans=0.0 +2024-07-28 10:54:32,641 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.77 vs. limit=22.5 +2024-07-28 10:54:39,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=145992.0, ans=0.0 +2024-07-28 10:55:01,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=146005.33333333334, ans=0.2 +2024-07-28 10:55:03,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=146005.33333333334, ans=0.125 +2024-07-28 10:55:03,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=146005.33333333334, ans=0.125 +2024-07-28 10:55:14,336 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=10.73 vs. limit=15.0 +2024-07-28 10:55:14,510 INFO [train.py:1114] (3/4) Epoch 11, batch 7300, loss[loss=0.1614, simple_loss=0.2444, pruned_loss=0.03919, over 4864.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2802, pruned_loss=0.05323, over 939926.39 frames. ], batch size: 12, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:55:18,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=146032.0, ans=0.1 +2024-07-28 10:55:20,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=146032.0, ans=0.125 +2024-07-28 10:55:20,325 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.39 vs. limit=15.0 +2024-07-28 10:55:32,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=146058.66666666666, ans=10.0 +2024-07-28 10:55:41,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=146072.0, ans=0.0 +2024-07-28 10:55:44,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=146085.33333333334, ans=0.0 +2024-07-28 10:55:49,208 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.444e+01 5.651e+01 6.063e+01 6.776e+01 1.053e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-28 10:55:52,472 INFO [train.py:1114] (3/4) Epoch 11, batch 7350, loss[loss=0.1663, simple_loss=0.263, pruned_loss=0.03481, over 4638.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2802, pruned_loss=0.05313, over 939146.57 frames. ], batch size: 12, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:56:10,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=146112.0, ans=0.025 +2024-07-28 10:56:36,374 INFO [train.py:1114] (3/4) Epoch 11, batch 7400, loss[loss=0.1828, simple_loss=0.2617, pruned_loss=0.052, over 4689.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2799, pruned_loss=0.05304, over 940305.32 frames. ], batch size: 13, lr: 6.76e-03, grad_scale: 32.0 +2024-07-28 10:56:43,661 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.01 vs. limit=15.0 +2024-07-28 10:56:56,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=146192.0, ans=0.125 +2024-07-28 10:57:02,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146205.33333333334, ans=0.1 +2024-07-28 10:57:11,528 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.799e+01 5.675e+01 6.306e+01 7.270e+01 1.053e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 10:57:11,562 INFO [train.py:1114] (3/4) Epoch 11, batch 7450, loss[loss=0.1623, simple_loss=0.2387, pruned_loss=0.04293, over 4605.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2791, pruned_loss=0.0531, over 937550.75 frames. ], batch size: 11, lr: 6.76e-03, grad_scale: 64.0 +2024-07-28 10:57:12,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=146232.0, ans=0.0 +2024-07-28 10:57:16,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=146232.0, ans=0.125 +2024-07-28 10:57:26,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=146258.66666666666, ans=0.0 +2024-07-28 10:57:33,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=146272.0, ans=0.04949747468305833 +2024-07-28 10:57:41,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=146285.33333333334, ans=0.0 +2024-07-28 10:57:44,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=146298.66666666666, ans=0.125 +2024-07-28 10:57:45,284 INFO [train.py:1114] (3/4) Epoch 11, batch 7500, loss[loss=0.2175, simple_loss=0.3007, pruned_loss=0.06714, over 3270.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2801, pruned_loss=0.05353, over 935541.30 frames. ], batch size: 35, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 10:57:57,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=146312.0, ans=0.125 +2024-07-28 10:57:59,416 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=21.38 vs. limit=22.5 +2024-07-28 10:58:10,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=146338.66666666666, ans=0.125 +2024-07-28 10:58:12,675 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.13 vs. limit=22.5 +2024-07-28 10:58:13,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=146338.66666666666, ans=0.0 +2024-07-28 10:58:36,011 INFO [train.py:1114] (3/4) Epoch 11, batch 7550, loss[loss=0.2283, simple_loss=0.3079, pruned_loss=0.07436, over 4585.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2817, pruned_loss=0.05399, over 935379.35 frames. ], batch size: 17, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 10:58:37,333 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.706e+01 6.227e+01 6.985e+01 1.230e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 10:58:49,064 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.05 vs. limit=15.0 +2024-07-28 10:58:54,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=146392.0, ans=0.0 +2024-07-28 10:59:03,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=146418.66666666666, ans=0.0 +2024-07-28 10:59:10,210 INFO [train.py:1114] (3/4) Epoch 11, batch 7600, loss[loss=0.1784, simple_loss=0.2743, pruned_loss=0.04122, over 4813.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2819, pruned_loss=0.05411, over 937374.74 frames. ], batch size: 14, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 10:59:11,324 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.56 vs. limit=12.0 +2024-07-28 10:59:14,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=146432.0, ans=0.1 +2024-07-28 10:59:25,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=146445.33333333334, ans=0.125 +2024-07-28 10:59:33,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=146458.66666666666, ans=0.1 +2024-07-28 10:59:53,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-28 10:59:54,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=146485.33333333334, ans=0.04949747468305833 +2024-07-28 10:59:56,751 INFO [train.py:1114] (3/4) Epoch 11, batch 7650, loss[loss=0.1716, simple_loss=0.2654, pruned_loss=0.03888, over 4945.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2812, pruned_loss=0.05371, over 936413.97 frames. ], batch size: 12, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 10:59:57,329 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.695e+01 5.694e+01 6.162e+01 7.312e+01 1.050e+02, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 10:59:59,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=146498.66666666666, ans=0.125 +2024-07-28 11:00:03,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=146512.0, ans=0.025 +2024-07-28 11:00:15,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=146525.33333333334, ans=0.125 +2024-07-28 11:00:19,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=146525.33333333334, ans=0.0 +2024-07-28 11:00:20,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=146525.33333333334, ans=0.2 +2024-07-28 11:00:28,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=146538.66666666666, ans=0.125 +2024-07-28 11:00:38,584 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.52 vs. limit=15.0 +2024-07-28 11:00:52,405 INFO [train.py:1114] (3/4) Epoch 11, batch 7700, loss[loss=0.2117, simple_loss=0.2975, pruned_loss=0.06296, over 4694.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2815, pruned_loss=0.05383, over 933487.53 frames. ], batch size: 13, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 11:01:01,646 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=22.00 vs. limit=22.5 +2024-07-28 11:01:04,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=146592.0, ans=0.1 +2024-07-28 11:01:07,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=146592.0, ans=0.125 +2024-07-28 11:01:07,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=146592.0, ans=0.125 +2024-07-28 11:01:10,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=146592.0, ans=0.125 +2024-07-28 11:01:15,924 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.70 vs. limit=15.0 +2024-07-28 11:01:17,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=146605.33333333334, ans=0.125 +2024-07-28 11:01:18,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=146618.66666666666, ans=0.09899494936611666 +2024-07-28 11:01:20,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=146618.66666666666, ans=0.125 +2024-07-28 11:01:22,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=146618.66666666666, ans=0.0 +2024-07-28 11:01:24,497 INFO [train.py:1114] (3/4) Epoch 11, batch 7750, loss[loss=0.2178, simple_loss=0.3034, pruned_loss=0.06612, over 4945.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2833, pruned_loss=0.0542, over 934877.18 frames. ], batch size: 14, lr: 6.75e-03, grad_scale: 32.0 +2024-07-28 11:01:25,062 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.807e+01 5.502e+01 5.839e+01 6.536e+01 9.660e+01, threshold=1.168e+02, percent-clipped=0.0 +2024-07-28 11:01:27,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=146632.0, ans=0.125 +2024-07-28 11:01:40,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=146658.66666666666, ans=0.125 +2024-07-28 11:01:45,722 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.40 vs. limit=15.0 +2024-07-28 11:01:57,060 INFO [train.py:1114] (3/4) Epoch 11, batch 7800, loss[loss=0.1467, simple_loss=0.2464, pruned_loss=0.02352, over 4663.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2832, pruned_loss=0.05401, over 937006.21 frames. ], batch size: 14, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:02:05,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=146712.0, ans=0.0 +2024-07-28 11:02:08,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=146712.0, ans=0.0 +2024-07-28 11:02:09,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=146725.33333333334, ans=10.0 +2024-07-28 11:02:10,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=146725.33333333334, ans=0.2 +2024-07-28 11:02:14,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=146725.33333333334, ans=0.125 +2024-07-28 11:02:17,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=146738.66666666666, ans=0.125 +2024-07-28 11:02:20,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=146738.66666666666, ans=0.0 +2024-07-28 11:02:20,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=146738.66666666666, ans=0.2 +2024-07-28 11:02:20,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=146738.66666666666, ans=0.1 +2024-07-28 11:02:29,528 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.57 vs. limit=6.0 +2024-07-28 11:02:29,900 INFO [train.py:1114] (3/4) Epoch 11, batch 7850, loss[loss=0.2078, simple_loss=0.2737, pruned_loss=0.07094, over 4607.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2832, pruned_loss=0.05428, over 935512.69 frames. ], batch size: 10, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:02:30,521 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.015e+01 5.817e+01 6.561e+01 7.399e+01 1.277e+02, threshold=1.312e+02, percent-clipped=1.0 +2024-07-28 11:02:33,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=146765.33333333334, ans=0.0 +2024-07-28 11:02:33,257 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:02:36,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=146778.66666666666, ans=0.125 +2024-07-28 11:02:41,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=146778.66666666666, ans=0.1 +2024-07-28 11:02:44,149 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.45 vs. limit=10.0 +2024-07-28 11:02:46,840 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=12.0 +2024-07-28 11:02:47,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=146792.0, ans=0.1 +2024-07-28 11:02:49,347 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.35 vs. limit=15.0 +2024-07-28 11:02:51,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=146805.33333333334, ans=0.125 +2024-07-28 11:02:55,461 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.81 vs. limit=6.0 +2024-07-28 11:03:00,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=146818.66666666666, ans=0.125 +2024-07-28 11:03:02,644 INFO [train.py:1114] (3/4) Epoch 11, batch 7900, loss[loss=0.23, simple_loss=0.3193, pruned_loss=0.07036, over 4879.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2845, pruned_loss=0.05475, over 932353.11 frames. ], batch size: 14, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:03:22,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=146858.66666666666, ans=0.1 +2024-07-28 11:03:22,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=146858.66666666666, ans=0.0 +2024-07-28 11:03:22,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=146858.66666666666, ans=0.04949747468305833 +2024-07-28 11:03:29,228 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:03:31,232 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.50 vs. limit=15.0 +2024-07-28 11:03:33,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=146872.0, ans=0.125 +2024-07-28 11:03:42,602 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.24 vs. limit=22.5 +2024-07-28 11:03:43,575 INFO [train.py:1114] (3/4) Epoch 11, batch 7950, loss[loss=0.2554, simple_loss=0.3272, pruned_loss=0.09178, over 3651.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2843, pruned_loss=0.05444, over 935052.59 frames. ], batch size: 35, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:03:44,164 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.442e+01 5.704e+01 6.229e+01 6.685e+01 9.610e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 11:03:55,593 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.55 vs. limit=15.0 +2024-07-28 11:03:58,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=146925.33333333334, ans=0.125 +2024-07-28 11:03:58,620 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.51 vs. limit=15.0 +2024-07-28 11:04:04,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=146938.66666666666, ans=0.125 +2024-07-28 11:04:13,957 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:04:18,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=146952.0, ans=0.125 +2024-07-28 11:04:24,560 INFO [train.py:1114] (3/4) Epoch 11, batch 8000, loss[loss=0.1634, simple_loss=0.2372, pruned_loss=0.04482, over 4620.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2822, pruned_loss=0.05399, over 933946.31 frames. ], batch size: 11, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:04:32,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=146978.66666666666, ans=0.125 +2024-07-28 11:04:33,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=146978.66666666666, ans=0.125 +2024-07-28 11:04:57,135 INFO [train.py:1114] (3/4) Epoch 11, batch 8050, loss[loss=0.2144, simple_loss=0.3079, pruned_loss=0.06052, over 4815.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2824, pruned_loss=0.05406, over 933712.25 frames. ], batch size: 14, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:04:57,751 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.054e+01 5.507e+01 6.263e+01 7.215e+01 1.111e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 11:05:06,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=147045.33333333334, ans=0.0 +2024-07-28 11:05:20,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=147072.0, ans=0.0 +2024-07-28 11:05:24,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=147085.33333333334, ans=0.0 +2024-07-28 11:05:28,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=147098.66666666666, ans=0.125 +2024-07-28 11:05:28,978 INFO [train.py:1114] (3/4) Epoch 11, batch 8100, loss[loss=0.2288, simple_loss=0.3093, pruned_loss=0.0742, over 4810.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2828, pruned_loss=0.05434, over 934033.82 frames. ], batch size: 15, lr: 6.74e-03, grad_scale: 32.0 +2024-07-28 11:05:32,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=147098.66666666666, ans=0.125 +2024-07-28 11:05:42,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=147112.0, ans=0.125 +2024-07-28 11:05:44,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=147112.0, ans=0.025 +2024-07-28 11:05:54,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=147125.33333333334, ans=0.125 +2024-07-28 11:06:11,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=147138.66666666666, ans=0.2 +2024-07-28 11:06:14,347 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.35 vs. limit=22.5 +2024-07-28 11:06:16,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=147152.0, ans=0.0 +2024-07-28 11:06:19,674 INFO [train.py:1114] (3/4) Epoch 11, batch 8150, loss[loss=0.2198, simple_loss=0.3045, pruned_loss=0.06755, over 4818.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2821, pruned_loss=0.0544, over 937226.54 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:06:20,270 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.704e+01 5.553e+01 6.182e+01 6.972e+01 1.059e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 11:06:36,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=147192.0, ans=0.0 +2024-07-28 11:06:45,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=147205.33333333334, ans=0.0 +2024-07-28 11:06:54,226 INFO [train.py:1114] (3/4) Epoch 11, batch 8200, loss[loss=0.1833, simple_loss=0.2709, pruned_loss=0.04786, over 4810.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2811, pruned_loss=0.05351, over 938169.41 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:06:56,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=147232.0, ans=0.2 +2024-07-28 11:06:56,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=147232.0, ans=0.0 +2024-07-28 11:06:56,562 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.69 vs. limit=15.0 +2024-07-28 11:07:03,814 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:07:04,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=147245.33333333334, ans=0.0 +2024-07-28 11:07:06,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=147258.66666666666, ans=0.125 +2024-07-28 11:07:08,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=147258.66666666666, ans=0.0 +2024-07-28 11:07:10,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147258.66666666666, ans=0.1 +2024-07-28 11:07:21,689 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.76 vs. limit=15.0 +2024-07-28 11:07:26,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=147272.0, ans=0.125 +2024-07-28 11:07:33,403 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.94 vs. limit=12.0 +2024-07-28 11:07:34,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=147285.33333333334, ans=0.0 +2024-07-28 11:07:38,665 INFO [train.py:1114] (3/4) Epoch 11, batch 8250, loss[loss=0.1952, simple_loss=0.2734, pruned_loss=0.05856, over 4898.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2821, pruned_loss=0.05434, over 938347.06 frames. ], batch size: 13, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:07:38,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=147298.66666666666, ans=0.2 +2024-07-28 11:07:39,315 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.558e+01 5.575e+01 5.968e+01 7.239e+01 1.462e+02, threshold=1.194e+02, percent-clipped=1.0 +2024-07-28 11:07:39,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=147298.66666666666, ans=0.0 +2024-07-28 11:07:46,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=147312.0, ans=0.0 +2024-07-28 11:08:16,283 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.66 vs. limit=10.0 +2024-07-28 11:08:17,921 INFO [train.py:1114] (3/4) Epoch 11, batch 8300, loss[loss=0.197, simple_loss=0.2737, pruned_loss=0.06009, over 4902.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2831, pruned_loss=0.05478, over 937997.75 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:08:20,900 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.84 vs. limit=15.0 +2024-07-28 11:08:21,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=147365.33333333334, ans=0.2 +2024-07-28 11:08:39,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=147405.33333333334, ans=0.2 +2024-07-28 11:08:41,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147405.33333333334, ans=0.1 +2024-07-28 11:08:43,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=147405.33333333334, ans=0.125 +2024-07-28 11:08:51,019 INFO [train.py:1114] (3/4) Epoch 11, batch 8350, loss[loss=0.2026, simple_loss=0.2889, pruned_loss=0.0582, over 4809.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2819, pruned_loss=0.05385, over 941022.33 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:08:51,649 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.686e+01 6.163e+01 6.949e+01 9.683e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 11:09:09,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=147458.66666666666, ans=0.0 +2024-07-28 11:09:11,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=147472.0, ans=0.2 +2024-07-28 11:09:17,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=147485.33333333334, ans=0.2 +2024-07-28 11:09:20,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=147485.33333333334, ans=0.125 +2024-07-28 11:09:24,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=147485.33333333334, ans=0.1 +2024-07-28 11:09:25,413 INFO [train.py:1114] (3/4) Epoch 11, batch 8400, loss[loss=0.177, simple_loss=0.2593, pruned_loss=0.04738, over 4769.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2823, pruned_loss=0.05421, over 940238.45 frames. ], batch size: 12, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:09:28,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=147498.66666666666, ans=0.0 +2024-07-28 11:09:37,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=147525.33333333334, ans=0.05 +2024-07-28 11:09:38,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147525.33333333334, ans=0.1 +2024-07-28 11:09:44,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=147538.66666666666, ans=0.125 +2024-07-28 11:09:57,655 INFO [train.py:1114] (3/4) Epoch 11, batch 8450, loss[loss=0.2061, simple_loss=0.3037, pruned_loss=0.05422, over 4790.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.283, pruned_loss=0.05428, over 939137.19 frames. ], batch size: 15, lr: 6.73e-03, grad_scale: 32.0 +2024-07-28 11:09:58,225 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+01 5.711e+01 6.250e+01 7.138e+01 1.059e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 11:10:04,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=147578.66666666666, ans=0.1 +2024-07-28 11:10:07,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=147578.66666666666, ans=0.125 +2024-07-28 11:10:20,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=147605.33333333334, ans=0.125 +2024-07-28 11:10:21,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=147605.33333333334, ans=0.0 +2024-07-28 11:10:22,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=147618.66666666666, ans=0.125 +2024-07-28 11:10:24,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=147618.66666666666, ans=0.025 +2024-07-28 11:10:29,744 INFO [train.py:1114] (3/4) Epoch 11, batch 8500, loss[loss=0.2133, simple_loss=0.2833, pruned_loss=0.07168, over 4617.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2816, pruned_loss=0.05389, over 938634.15 frames. ], batch size: 11, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:10:35,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=147632.0, ans=0.1 +2024-07-28 11:10:44,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=147658.66666666666, ans=0.2 +2024-07-28 11:10:49,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=147672.0, ans=0.125 +2024-07-28 11:10:53,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=147672.0, ans=0.125 +2024-07-28 11:10:58,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=147685.33333333334, ans=0.125 +2024-07-28 11:11:01,925 INFO [train.py:1114] (3/4) Epoch 11, batch 8550, loss[loss=0.1848, simple_loss=0.2553, pruned_loss=0.05714, over 4811.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2813, pruned_loss=0.05375, over 939484.30 frames. ], batch size: 11, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:11:03,201 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+01 5.768e+01 6.482e+01 7.355e+01 1.079e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 11:11:06,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=147698.66666666666, ans=0.2 +2024-07-28 11:11:21,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=147738.66666666666, ans=0.0 +2024-07-28 11:11:22,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=147738.66666666666, ans=0.125 +2024-07-28 11:11:30,137 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.32 vs. limit=22.5 +2024-07-28 11:11:34,228 INFO [train.py:1114] (3/4) Epoch 11, batch 8600, loss[loss=0.1931, simple_loss=0.2978, pruned_loss=0.04419, over 4798.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2804, pruned_loss=0.05333, over 939056.15 frames. ], batch size: 15, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:11:37,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=147765.33333333334, ans=0.0 +2024-07-28 11:11:39,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=147765.33333333334, ans=0.125 +2024-07-28 11:11:40,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=147778.66666666666, ans=10.0 +2024-07-28 11:11:49,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=147792.0, ans=0.125 +2024-07-28 11:11:51,339 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.27 vs. limit=22.5 +2024-07-28 11:11:55,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=147805.33333333334, ans=0.125 +2024-07-28 11:11:59,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=147818.66666666666, ans=0.125 +2024-07-28 11:12:05,724 INFO [train.py:1114] (3/4) Epoch 11, batch 8650, loss[loss=0.198, simple_loss=0.2862, pruned_loss=0.05493, over 4897.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2803, pruned_loss=0.05334, over 939997.94 frames. ], batch size: 15, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:12:06,995 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.868e+01 5.716e+01 6.623e+01 8.030e+01 1.303e+02, threshold=1.325e+02, percent-clipped=1.0 +2024-07-28 11:12:14,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147845.33333333334, ans=0.1 +2024-07-28 11:12:19,270 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.41 vs. limit=6.0 +2024-07-28 11:12:25,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=147858.66666666666, ans=0.1 +2024-07-28 11:12:29,116 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.34 vs. limit=15.0 +2024-07-28 11:12:33,396 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:12:34,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=147885.33333333334, ans=0.125 +2024-07-28 11:12:34,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=147885.33333333334, ans=0.125 +2024-07-28 11:12:39,081 INFO [train.py:1114] (3/4) Epoch 11, batch 8700, loss[loss=0.2237, simple_loss=0.3136, pruned_loss=0.06688, over 4767.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2818, pruned_loss=0.05393, over 938299.66 frames. ], batch size: 13, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:12:39,762 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:12:43,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=147898.66666666666, ans=0.125 +2024-07-28 11:12:58,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=147938.66666666666, ans=0.125 +2024-07-28 11:13:11,401 INFO [train.py:1114] (3/4) Epoch 11, batch 8750, loss[loss=0.2101, simple_loss=0.2908, pruned_loss=0.06474, over 4674.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2821, pruned_loss=0.05419, over 936815.95 frames. ], batch size: 15, lr: 6.72e-03, grad_scale: 16.0 +2024-07-28 11:13:12,641 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.615e+01 5.668e+01 6.375e+01 7.547e+01 1.367e+02, threshold=1.275e+02, percent-clipped=1.0 +2024-07-28 11:13:19,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=147978.66666666666, ans=0.0 +2024-07-28 11:13:23,812 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.58 vs. limit=15.0 +2024-07-28 11:13:26,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=147992.0, ans=0.1 +2024-07-28 11:13:34,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=148005.33333333334, ans=0.125 +2024-07-28 11:13:51,466 INFO [train.py:1114] (3/4) Epoch 11, batch 8800, loss[loss=0.1965, simple_loss=0.2857, pruned_loss=0.05363, over 4937.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2827, pruned_loss=0.05385, over 937738.95 frames. ], batch size: 14, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:13:51,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=148032.0, ans=0.02 +2024-07-28 11:13:57,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=148045.33333333334, ans=0.125 +2024-07-28 11:13:57,677 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.78 vs. limit=15.0 +2024-07-28 11:13:59,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=148045.33333333334, ans=0.125 +2024-07-28 11:14:32,886 INFO [train.py:1114] (3/4) Epoch 11, batch 8850, loss[loss=0.2211, simple_loss=0.3203, pruned_loss=0.06101, over 4564.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2823, pruned_loss=0.05421, over 932444.38 frames. ], batch size: 21, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:14:34,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=148098.66666666666, ans=15.0 +2024-07-28 11:14:34,136 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.414e+01 5.683e+01 6.364e+01 7.220e+01 1.136e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 11:14:40,037 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.71 vs. limit=15.0 +2024-07-28 11:14:41,233 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:14:47,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=148125.33333333334, ans=0.2 +2024-07-28 11:14:50,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=148125.33333333334, ans=0.025 +2024-07-28 11:15:01,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=148152.0, ans=0.125 +2024-07-28 11:15:05,037 INFO [train.py:1114] (3/4) Epoch 11, batch 8900, loss[loss=0.1873, simple_loss=0.2673, pruned_loss=0.05366, over 4936.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2832, pruned_loss=0.05497, over 930074.79 frames. ], batch size: 12, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:15:14,266 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.14 vs. limit=15.0 +2024-07-28 11:15:35,969 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.52 vs. limit=22.5 +2024-07-28 11:15:37,387 INFO [train.py:1114] (3/4) Epoch 11, batch 8950, loss[loss=0.2231, simple_loss=0.3149, pruned_loss=0.06562, over 4521.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2825, pruned_loss=0.05466, over 930517.54 frames. ], batch size: 21, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:15:38,591 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.938e+01 5.642e+01 6.194e+01 7.205e+01 1.181e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 11:15:48,431 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.22 vs. limit=22.5 +2024-07-28 11:15:55,140 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.40 vs. limit=22.5 +2024-07-28 11:15:55,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=148258.66666666666, ans=0.0 +2024-07-28 11:16:01,435 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.12 vs. limit=22.5 +2024-07-28 11:16:02,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=148285.33333333334, ans=0.0 +2024-07-28 11:16:05,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=148285.33333333334, ans=0.125 +2024-07-28 11:16:09,512 INFO [train.py:1114] (3/4) Epoch 11, batch 9000, loss[loss=0.1745, simple_loss=0.2686, pruned_loss=0.04022, over 4639.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2815, pruned_loss=0.05422, over 933328.35 frames. ], batch size: 12, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:16:09,512 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 11:16:21,142 INFO [train.py:1146] (3/4) Epoch 11, validation: loss=0.1703, simple_loss=0.274, pruned_loss=0.03325, over 944034.00 frames. +2024-07-28 11:16:21,143 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 11:16:27,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=148312.0, ans=0.125 +2024-07-28 11:16:27,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=148312.0, ans=0.0 +2024-07-28 11:16:31,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=148312.0, ans=0.025 +2024-07-28 11:16:31,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=148312.0, ans=0.0 +2024-07-28 11:16:33,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=148325.33333333334, ans=0.0 +2024-07-28 11:16:45,774 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=20.38 vs. limit=22.5 +2024-07-28 11:16:50,527 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:16:53,499 INFO [train.py:1114] (3/4) Epoch 11, batch 9050, loss[loss=0.1609, simple_loss=0.2422, pruned_loss=0.03986, over 4489.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2807, pruned_loss=0.05365, over 933949.23 frames. ], batch size: 10, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:16:54,783 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.736e+01 5.677e+01 6.450e+01 7.430e+01 1.132e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-28 11:16:56,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=148365.33333333334, ans=0.125 +2024-07-28 11:16:58,988 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.51 vs. limit=22.5 +2024-07-28 11:17:01,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=148378.66666666666, ans=0.0 +2024-07-28 11:17:25,680 INFO [train.py:1114] (3/4) Epoch 11, batch 9100, loss[loss=0.1886, simple_loss=0.2715, pruned_loss=0.05281, over 4938.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2805, pruned_loss=0.05329, over 937057.59 frames. ], batch size: 14, lr: 6.71e-03, grad_scale: 32.0 +2024-07-28 11:17:28,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148432.0, ans=0.1 +2024-07-28 11:17:36,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=148445.33333333334, ans=0.0 +2024-07-28 11:17:37,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=148458.66666666666, ans=0.04949747468305833 +2024-07-28 11:17:37,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=148458.66666666666, ans=0.125 +2024-07-28 11:17:40,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=148458.66666666666, ans=0.0 +2024-07-28 11:17:54,858 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:17:57,377 INFO [train.py:1114] (3/4) Epoch 11, batch 9150, loss[loss=0.2041, simple_loss=0.2916, pruned_loss=0.05831, over 4805.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2817, pruned_loss=0.05389, over 936019.10 frames. ], batch size: 14, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:17:58,676 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.328e+01 5.452e+01 6.035e+01 6.657e+01 8.728e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-28 11:18:09,619 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.66 vs. limit=12.0 +2024-07-28 11:18:11,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=148525.33333333334, ans=0.07 +2024-07-28 11:18:21,692 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:18:22,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=148538.66666666666, ans=0.07 +2024-07-28 11:18:31,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=148565.33333333334, ans=0.1 +2024-07-28 11:18:32,056 INFO [train.py:1114] (3/4) Epoch 11, batch 9200, loss[loss=0.1969, simple_loss=0.27, pruned_loss=0.0619, over 4844.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2822, pruned_loss=0.05448, over 938007.00 frames. ], batch size: 12, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:18:34,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=148565.33333333334, ans=0.0 +2024-07-28 11:18:38,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=148578.66666666666, ans=0.025 +2024-07-28 11:18:46,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148578.66666666666, ans=0.1 +2024-07-28 11:19:06,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=148618.66666666666, ans=0.0 +2024-07-28 11:19:08,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=148618.66666666666, ans=0.2 +2024-07-28 11:19:10,156 INFO [train.py:1114] (3/4) Epoch 11, batch 9250, loss[loss=0.1945, simple_loss=0.2858, pruned_loss=0.0516, over 4634.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2821, pruned_loss=0.05411, over 938642.11 frames. ], batch size: 13, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:19:11,110 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:19:11,549 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.586e+01 5.571e+01 5.944e+01 7.071e+01 9.935e+01, threshold=1.189e+02, percent-clipped=0.0 +2024-07-28 11:19:16,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=148645.33333333334, ans=0.125 +2024-07-28 11:19:20,229 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.29 vs. limit=22.5 +2024-07-28 11:19:20,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=148645.33333333334, ans=0.0 +2024-07-28 11:19:20,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148645.33333333334, ans=0.1 +2024-07-28 11:19:41,602 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.64 vs. limit=22.5 +2024-07-28 11:19:42,418 INFO [train.py:1114] (3/4) Epoch 11, batch 9300, loss[loss=0.2056, simple_loss=0.2814, pruned_loss=0.06492, over 4770.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2817, pruned_loss=0.05421, over 938091.50 frames. ], batch size: 12, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:19:42,851 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.54 vs. limit=15.0 +2024-07-28 11:19:50,732 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.06 vs. limit=15.0 +2024-07-28 11:20:04,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=148738.66666666666, ans=0.125 +2024-07-28 11:20:14,404 INFO [train.py:1114] (3/4) Epoch 11, batch 9350, loss[loss=0.2196, simple_loss=0.281, pruned_loss=0.07911, over 4793.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2841, pruned_loss=0.05541, over 935488.80 frames. ], batch size: 11, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:20:14,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=148765.33333333334, ans=0.125 +2024-07-28 11:20:15,614 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 5.532e+01 6.030e+01 6.752e+01 9.117e+01, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 11:20:31,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=148792.0, ans=0.125 +2024-07-28 11:20:35,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=148805.33333333334, ans=0.1 +2024-07-28 11:20:45,587 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.83 vs. limit=22.5 +2024-07-28 11:20:45,875 INFO [train.py:1114] (3/4) Epoch 11, batch 9400, loss[loss=0.1883, simple_loss=0.2703, pruned_loss=0.05311, over 4690.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2833, pruned_loss=0.0551, over 933355.86 frames. ], batch size: 13, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:20:56,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=148845.33333333334, ans=0.125 +2024-07-28 11:21:06,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=148872.0, ans=0.02 +2024-07-28 11:21:07,504 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.03 vs. limit=22.5 +2024-07-28 11:21:08,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=148872.0, ans=0.125 +2024-07-28 11:21:10,598 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:21:15,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff3.min_abs, batch_count=148885.33333333334, ans=0.2 +2024-07-28 11:21:18,900 INFO [train.py:1114] (3/4) Epoch 11, batch 9450, loss[loss=0.1837, simple_loss=0.2646, pruned_loss=0.05137, over 4804.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2822, pruned_loss=0.05443, over 932694.42 frames. ], batch size: 11, lr: 6.70e-03, grad_scale: 32.0 +2024-07-28 11:21:20,120 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.380e+01 5.492e+01 5.833e+01 6.605e+01 9.079e+01, threshold=1.167e+02, percent-clipped=0.0 +2024-07-28 11:21:32,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=148925.33333333334, ans=0.125 +2024-07-28 11:21:50,265 INFO [train.py:1114] (3/4) Epoch 11, batch 9500, loss[loss=0.177, simple_loss=0.2591, pruned_loss=0.0474, over 4700.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2824, pruned_loss=0.05407, over 934654.59 frames. ], batch size: 12, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:21:53,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=148965.33333333334, ans=0.0 +2024-07-28 11:21:53,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=148965.33333333334, ans=6.0 +2024-07-28 11:21:54,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=148965.33333333334, ans=0.2 +2024-07-28 11:22:02,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=148992.0, ans=0.2 +2024-07-28 11:22:10,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=149005.33333333334, ans=0.0 +2024-07-28 11:22:22,539 INFO [train.py:1114] (3/4) Epoch 11, batch 9550, loss[loss=0.1697, simple_loss=0.2573, pruned_loss=0.04106, over 4774.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2843, pruned_loss=0.05508, over 932234.02 frames. ], batch size: 12, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:22:23,736 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.769e+01 5.584e+01 6.073e+01 6.801e+01 9.660e+01, threshold=1.215e+02, percent-clipped=0.0 +2024-07-28 11:22:34,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=149058.66666666666, ans=0.95 +2024-07-28 11:22:35,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=149058.66666666666, ans=0.125 +2024-07-28 11:22:35,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149058.66666666666, ans=0.1 +2024-07-28 11:22:44,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=149072.0, ans=0.125 +2024-07-28 11:22:50,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=149085.33333333334, ans=0.2 +2024-07-28 11:22:53,736 INFO [train.py:1114] (3/4) Epoch 11, batch 9600, loss[loss=0.2574, simple_loss=0.3207, pruned_loss=0.09707, over 3384.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2842, pruned_loss=0.05474, over 931259.44 frames. ], batch size: 35, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:23:09,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=149125.33333333334, ans=0.125 +2024-07-28 11:23:12,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=149138.66666666666, ans=0.125 +2024-07-28 11:23:21,296 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.54 vs. limit=15.0 +2024-07-28 11:23:22,498 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.82 vs. limit=15.0 +2024-07-28 11:23:23,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=149152.0, ans=0.0 +2024-07-28 11:23:24,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=149165.33333333334, ans=0.0 +2024-07-28 11:23:25,445 INFO [train.py:1114] (3/4) Epoch 11, batch 9650, loss[loss=0.1675, simple_loss=0.2599, pruned_loss=0.0376, over 4857.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2849, pruned_loss=0.05545, over 926897.75 frames. ], batch size: 16, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:23:26,661 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.554e+01 5.812e+01 6.472e+01 7.420e+01 1.092e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 11:23:32,596 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.75 vs. limit=15.0 +2024-07-28 11:23:36,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=149178.66666666666, ans=0.0 +2024-07-28 11:23:38,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=149192.0, ans=0.125 +2024-07-28 11:23:39,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=149192.0, ans=15.0 +2024-07-28 11:23:41,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=149192.0, ans=0.0 +2024-07-28 11:23:44,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=149205.33333333334, ans=10.0 +2024-07-28 11:23:47,996 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.45 vs. limit=12.0 +2024-07-28 11:24:02,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=149218.66666666666, ans=0.125 +2024-07-28 11:24:03,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=149218.66666666666, ans=0.125 +2024-07-28 11:24:05,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=149218.66666666666, ans=0.04949747468305833 +2024-07-28 11:24:08,279 INFO [train.py:1114] (3/4) Epoch 11, batch 9700, loss[loss=0.224, simple_loss=0.3005, pruned_loss=0.07377, over 4414.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2848, pruned_loss=0.05571, over 924924.56 frames. ], batch size: 26, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:24:39,668 INFO [train.py:1114] (3/4) Epoch 11, batch 9750, loss[loss=0.2067, simple_loss=0.3075, pruned_loss=0.05293, over 4682.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2836, pruned_loss=0.05503, over 925448.71 frames. ], batch size: 15, lr: 6.69e-03, grad_scale: 32.0 +2024-07-28 11:24:40,895 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.638e+01 5.600e+01 6.430e+01 7.398e+01 1.191e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 11:24:47,937 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.36 vs. limit=15.0 +2024-07-28 11:24:58,870 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:25:13,043 INFO [train.py:1114] (3/4) Epoch 11, batch 9800, loss[loss=0.1541, simple_loss=0.2525, pruned_loss=0.02786, over 4705.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2831, pruned_loss=0.05511, over 925237.32 frames. ], batch size: 12, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:25:15,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=149365.33333333334, ans=0.02 +2024-07-28 11:25:16,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=149365.33333333334, ans=0.125 +2024-07-28 11:25:25,337 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:25:31,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=149405.33333333334, ans=0.0 +2024-07-28 11:25:38,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=149418.66666666666, ans=0.125 +2024-07-28 11:25:39,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=149418.66666666666, ans=0.0 +2024-07-28 11:25:43,720 INFO [train.py:1114] (3/4) Epoch 11, batch 9850, loss[loss=0.2054, simple_loss=0.2854, pruned_loss=0.06274, over 4889.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2832, pruned_loss=0.05509, over 927710.38 frames. ], batch size: 15, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:25:44,877 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.760e+01 6.754e+01 7.559e+01 1.117e+02, threshold=1.351e+02, percent-clipped=0.0 +2024-07-28 11:25:45,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=149432.0, ans=0.0 +2024-07-28 11:25:45,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=149432.0, ans=0.025 +2024-07-28 11:25:48,269 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.17 vs. limit=15.0 +2024-07-28 11:25:58,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=149458.66666666666, ans=0.0 +2024-07-28 11:26:00,397 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.91 vs. limit=15.0 +2024-07-28 11:26:14,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=149498.66666666666, ans=0.125 +2024-07-28 11:26:14,540 INFO [train.py:1114] (3/4) Epoch 11, batch 9900, loss[loss=0.2451, simple_loss=0.3343, pruned_loss=0.07802, over 4849.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.284, pruned_loss=0.05545, over 926809.79 frames. ], batch size: 16, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:26:14,985 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-28 11:26:22,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=149512.0, ans=0.2 +2024-07-28 11:26:23,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=149512.0, ans=0.0 +2024-07-28 11:26:31,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=149525.33333333334, ans=0.125 +2024-07-28 11:26:33,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=149538.66666666666, ans=0.125 +2024-07-28 11:26:34,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=149538.66666666666, ans=0.125 +2024-07-28 11:26:40,653 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.59 vs. limit=15.0 +2024-07-28 11:26:42,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149552.0, ans=0.1 +2024-07-28 11:26:43,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=149552.0, ans=0.2 +2024-07-28 11:26:45,834 INFO [train.py:1114] (3/4) Epoch 11, batch 9950, loss[loss=0.172, simple_loss=0.2483, pruned_loss=0.04784, over 4513.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2845, pruned_loss=0.05611, over 929287.68 frames. ], batch size: 10, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:26:47,369 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.729e+01 5.848e+01 6.460e+01 7.731e+01 1.083e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 11:26:53,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=149578.66666666666, ans=0.0 +2024-07-28 11:27:06,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=149605.33333333334, ans=0.125 +2024-07-28 11:27:11,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=149618.66666666666, ans=0.125 +2024-07-28 11:27:11,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=149618.66666666666, ans=0.95 +2024-07-28 11:27:13,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=149618.66666666666, ans=0.0 +2024-07-28 11:27:17,374 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.30 vs. limit=15.0 +2024-07-28 11:27:18,158 INFO [train.py:1114] (3/4) Epoch 11, batch 10000, loss[loss=0.2385, simple_loss=0.3146, pruned_loss=0.08119, over 4641.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2869, pruned_loss=0.0569, over 926605.79 frames. ], batch size: 16, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:27:20,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=149632.0, ans=0.125 +2024-07-28 11:27:22,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_na.min_abs, batch_count=149632.0, ans=0.02 +2024-07-28 11:27:23,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=149632.0, ans=0.0 +2024-07-28 11:27:26,908 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.34 vs. limit=15.0 +2024-07-28 11:27:30,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=149658.66666666666, ans=0.125 +2024-07-28 11:27:47,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=149685.33333333334, ans=0.125 +2024-07-28 11:27:50,528 INFO [train.py:1114] (3/4) Epoch 11, batch 10050, loss[loss=0.2351, simple_loss=0.3098, pruned_loss=0.08024, over 3671.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2905, pruned_loss=0.0588, over 915626.17 frames. ], batch size: 36, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:27:51,875 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.828e+01 6.328e+01 6.971e+01 1.016e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 11:27:57,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=149712.0, ans=0.025 +2024-07-28 11:27:58,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=149712.0, ans=0.95 +2024-07-28 11:27:58,357 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.88 vs. limit=15.0 +2024-07-28 11:28:01,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149712.0, ans=0.1 +2024-07-28 11:28:05,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=149725.33333333334, ans=0.2 +2024-07-28 11:28:13,978 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:28:18,367 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.71 vs. limit=22.5 +2024-07-28 11:28:23,776 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:28:24,934 INFO [train.py:1114] (3/4) Epoch 11, batch 10100, loss[loss=0.246, simple_loss=0.319, pruned_loss=0.0865, over 3365.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.295, pruned_loss=0.06372, over 862731.06 frames. ], batch size: 35, lr: 6.68e-03, grad_scale: 32.0 +2024-07-28 11:28:27,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=149765.33333333334, ans=0.025 +2024-07-28 11:28:34,402 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.02 vs. limit=22.5 +2024-07-28 11:28:38,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=149792.0, ans=0.025 +2024-07-28 11:28:42,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=149792.0, ans=0.0 +2024-07-28 11:28:57,733 INFO [train.py:1114] (3/4) Epoch 11, batch 10150, loss[loss=0.2081, simple_loss=0.287, pruned_loss=0.0646, over 3664.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2983, pruned_loss=0.06742, over 820661.92 frames. ], batch size: 35, lr: 6.67e-03, grad_scale: 32.0 +2024-07-28 11:28:58,997 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.659e+01 6.801e+01 7.178e+01 7.670e+01 2.138e+02, threshold=1.436e+02, percent-clipped=2.0 +2024-07-28 11:29:14,815 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.81 vs. limit=10.0 +2024-07-28 11:29:28,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=149885.33333333334, ans=0.125 +2024-07-28 11:29:29,436 INFO [train.py:1114] (3/4) Epoch 11, batch 10200, loss[loss=0.2636, simple_loss=0.3303, pruned_loss=0.09841, over 3207.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.3012, pruned_loss=0.07071, over 789421.55 frames. ], batch size: 36, lr: 6.67e-03, grad_scale: 32.0 +2024-07-28 11:30:30,087 INFO [train.py:1114] (3/4) Epoch 12, batch 0, loss[loss=0.1792, simple_loss=0.2707, pruned_loss=0.04388, over 4855.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2707, pruned_loss=0.04388, over 4855.00 frames. ], batch size: 12, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:30:30,088 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 11:30:34,875 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.8263, 5.1802, 5.0866, 5.6176], device='cuda:3') +2024-07-28 11:30:46,883 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.8289, 4.7020, 4.3342, 4.4852], device='cuda:3') +2024-07-28 11:30:49,827 INFO [train.py:1146] (3/4) Epoch 12, validation: loss=0.171, simple_loss=0.2765, pruned_loss=0.03276, over 944034.00 frames. +2024-07-28 11:31:06,824 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 11:31:22,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=149954.66666666666, ans=0.04949747468305833 +2024-07-28 11:31:23,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=149954.66666666666, ans=0.1 +2024-07-28 11:31:27,527 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 6.296e+01 6.981e+01 7.560e+01 1.062e+02, threshold=1.396e+02, percent-clipped=0.0 +2024-07-28 11:31:32,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=149968.0, ans=0.025 +2024-07-28 11:31:40,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149994.66666666666, ans=0.1 +2024-07-28 11:31:40,986 INFO [train.py:1114] (3/4) Epoch 12, batch 50, loss[loss=0.1832, simple_loss=0.2574, pruned_loss=0.05444, over 4608.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2868, pruned_loss=0.05546, over 206844.89 frames. ], batch size: 11, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:31:44,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=149994.66666666666, ans=0.2 +2024-07-28 11:31:47,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=149994.66666666666, ans=0.1 +2024-07-28 11:31:49,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=150008.0, ans=0.125 +2024-07-28 11:32:02,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=150021.33333333334, ans=0.125 +2024-07-28 11:32:14,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=150048.0, ans=0.125 +2024-07-28 11:32:17,178 INFO [train.py:1114] (3/4) Epoch 12, batch 100, loss[loss=0.1768, simple_loss=0.2678, pruned_loss=0.04291, over 4640.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2834, pruned_loss=0.0527, over 365351.18 frames. ], batch size: 12, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:32:35,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=150088.0, ans=0.0 +2024-07-28 11:32:38,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.50 vs. limit=15.0 +2024-07-28 11:32:39,033 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.212e+01 5.482e+01 5.996e+01 6.450e+01 1.001e+02, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 11:32:43,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150101.33333333334, ans=0.1 +2024-07-28 11:32:44,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=150101.33333333334, ans=0.2 +2024-07-28 11:32:51,994 INFO [train.py:1114] (3/4) Epoch 12, batch 150, loss[loss=0.1706, simple_loss=0.254, pruned_loss=0.04364, over 4611.00 frames. ], tot_loss[loss=0.191, simple_loss=0.28, pruned_loss=0.05097, over 493848.24 frames. ], batch size: 11, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:33:06,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=150154.66666666666, ans=0.95 +2024-07-28 11:33:10,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=150154.66666666666, ans=0.125 +2024-07-28 11:33:11,764 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.17 vs. limit=15.0 +2024-07-28 11:33:12,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=150154.66666666666, ans=0.0 +2024-07-28 11:33:14,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=150168.0, ans=0.2 +2024-07-28 11:33:22,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150181.33333333334, ans=0.1 +2024-07-28 11:33:29,117 INFO [train.py:1114] (3/4) Epoch 12, batch 200, loss[loss=0.1903, simple_loss=0.2877, pruned_loss=0.04647, over 4460.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2796, pruned_loss=0.05184, over 593411.05 frames. ], batch size: 21, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:33:32,866 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.46 vs. limit=15.0 +2024-07-28 11:33:35,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=150208.0, ans=0.125 +2024-07-28 11:33:36,483 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.40 vs. limit=22.5 +2024-07-28 11:33:48,810 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.686e+01 5.805e+01 6.723e+01 7.880e+01 1.326e+02, threshold=1.345e+02, percent-clipped=1.0 +2024-07-28 11:33:58,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=150248.0, ans=0.125 +2024-07-28 11:34:02,289 INFO [train.py:1114] (3/4) Epoch 12, batch 250, loss[loss=0.2094, simple_loss=0.2981, pruned_loss=0.0603, over 4652.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2804, pruned_loss=0.05282, over 670279.09 frames. ], batch size: 16, lr: 6.39e-03, grad_scale: 32.0 +2024-07-28 11:34:03,536 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.89 vs. limit=15.0 +2024-07-28 11:34:06,160 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.71 vs. limit=22.5 +2024-07-28 11:34:06,751 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.58 vs. limit=15.0 +2024-07-28 11:34:17,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=150288.0, ans=0.0 +2024-07-28 11:34:21,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=150288.0, ans=0.2 +2024-07-28 11:34:29,708 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.50 vs. limit=22.5 +2024-07-28 11:34:31,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150314.66666666666, ans=0.1 +2024-07-28 11:34:35,841 INFO [train.py:1114] (3/4) Epoch 12, batch 300, loss[loss=0.2099, simple_loss=0.3059, pruned_loss=0.05694, over 4807.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2799, pruned_loss=0.05251, over 730147.61 frames. ], batch size: 15, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:34:37,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=150328.0, ans=0.125 +2024-07-28 11:34:53,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=150354.66666666666, ans=0.125 +2024-07-28 11:34:56,210 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.803e+01 5.570e+01 6.129e+01 6.973e+01 1.064e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 11:34:57,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=150368.0, ans=0.1 +2024-07-28 11:35:01,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=150368.0, ans=0.125 +2024-07-28 11:35:07,346 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.11 vs. limit=12.0 +2024-07-28 11:35:09,571 INFO [train.py:1114] (3/4) Epoch 12, batch 350, loss[loss=0.1801, simple_loss=0.2625, pruned_loss=0.04888, over 4944.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2812, pruned_loss=0.0532, over 776335.57 frames. ], batch size: 12, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:35:28,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=150421.33333333334, ans=0.0 +2024-07-28 11:35:30,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=150434.66666666666, ans=0.125 +2024-07-28 11:35:37,182 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.93 vs. limit=15.0 +2024-07-28 11:35:42,765 INFO [train.py:1114] (3/4) Epoch 12, batch 400, loss[loss=0.1979, simple_loss=0.2838, pruned_loss=0.05601, over 4700.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2806, pruned_loss=0.05263, over 813705.60 frames. ], batch size: 13, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:36:00,474 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.46 vs. limit=15.0 +2024-07-28 11:36:04,834 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.526e+01 5.661e+01 6.256e+01 7.189e+01 1.032e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 11:36:05,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=150501.33333333334, ans=0.125 +2024-07-28 11:36:09,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=150501.33333333334, ans=0.0 +2024-07-28 11:36:18,206 INFO [train.py:1114] (3/4) Epoch 12, batch 450, loss[loss=0.1646, simple_loss=0.271, pruned_loss=0.02906, over 4633.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2801, pruned_loss=0.05241, over 838971.57 frames. ], batch size: 13, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:36:24,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=150541.33333333334, ans=0.125 +2024-07-28 11:36:32,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.28 vs. limit=15.0 +2024-07-28 11:36:43,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=150568.0, ans=0.025 +2024-07-28 11:36:50,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=150581.33333333334, ans=0.125 +2024-07-28 11:36:50,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=150581.33333333334, ans=0.125 +2024-07-28 11:36:51,739 INFO [train.py:1114] (3/4) Epoch 12, batch 500, loss[loss=0.2206, simple_loss=0.3238, pruned_loss=0.05873, over 4687.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2799, pruned_loss=0.05234, over 861560.25 frames. ], batch size: 15, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:36:55,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=150594.66666666666, ans=0.0 +2024-07-28 11:36:58,529 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:37:14,897 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+01 5.527e+01 6.124e+01 7.195e+01 1.120e+02, threshold=1.225e+02, percent-clipped=0.0 +2024-07-28 11:37:17,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=150634.66666666666, ans=0.125 +2024-07-28 11:37:24,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=150648.0, ans=0.125 +2024-07-28 11:37:29,002 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.66 vs. limit=15.0 +2024-07-28 11:37:30,003 INFO [train.py:1114] (3/4) Epoch 12, batch 550, loss[loss=0.2077, simple_loss=0.3068, pruned_loss=0.05429, over 4890.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2795, pruned_loss=0.05195, over 877902.23 frames. ], batch size: 18, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:37:31,728 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.38 vs. limit=15.0 +2024-07-28 11:37:33,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=150661.33333333334, ans=22.5 +2024-07-28 11:37:37,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=150674.66666666666, ans=0.0 +2024-07-28 11:37:44,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=150688.0, ans=0.07 +2024-07-28 11:37:50,649 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.15 vs. limit=12.0 +2024-07-28 11:37:59,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=150714.66666666666, ans=0.125 +2024-07-28 11:38:03,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=150728.0, ans=0.025 +2024-07-28 11:38:08,495 INFO [train.py:1114] (3/4) Epoch 12, batch 600, loss[loss=0.2064, simple_loss=0.2898, pruned_loss=0.0615, over 4631.00 frames. ], tot_loss[loss=0.192, simple_loss=0.28, pruned_loss=0.05201, over 892540.64 frames. ], batch size: 16, lr: 6.38e-03, grad_scale: 64.0 +2024-07-28 11:38:23,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=150754.66666666666, ans=0.07 +2024-07-28 11:38:30,306 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.714e+01 6.286e+01 7.173e+01 1.255e+02, threshold=1.257e+02, percent-clipped=1.0 +2024-07-28 11:38:42,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=150768.0, ans=0.2 +2024-07-28 11:38:51,318 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:38:52,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=150794.66666666666, ans=0.0 +2024-07-28 11:38:53,097 INFO [train.py:1114] (3/4) Epoch 12, batch 650, loss[loss=0.2013, simple_loss=0.2913, pruned_loss=0.05562, over 4762.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2799, pruned_loss=0.05241, over 903967.13 frames. ], batch size: 13, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:38:53,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=150794.66666666666, ans=0.1 +2024-07-28 11:39:01,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=150794.66666666666, ans=0.125 +2024-07-28 11:39:02,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=150794.66666666666, ans=0.1 +2024-07-28 11:39:04,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=150794.66666666666, ans=0.125 +2024-07-28 11:39:07,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=150808.0, ans=0.125 +2024-07-28 11:39:19,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=150821.33333333334, ans=0.2 +2024-07-28 11:39:20,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=150834.66666666666, ans=0.5 +2024-07-28 11:39:24,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=150834.66666666666, ans=0.125 +2024-07-28 11:39:31,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=150848.0, ans=0.0 +2024-07-28 11:39:32,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=150848.0, ans=0.125 +2024-07-28 11:39:33,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=150848.0, ans=0.125 +2024-07-28 11:39:37,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=150848.0, ans=0.125 +2024-07-28 11:39:37,632 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.04 vs. limit=15.0 +2024-07-28 11:39:40,043 INFO [train.py:1114] (3/4) Epoch 12, batch 700, loss[loss=0.2123, simple_loss=0.2981, pruned_loss=0.06329, over 4650.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.28, pruned_loss=0.05214, over 912304.62 frames. ], batch size: 12, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:39:50,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=150874.66666666666, ans=0.125 +2024-07-28 11:40:00,048 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.230e+01 5.630e+01 6.208e+01 7.148e+01 1.083e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 11:40:01,766 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.80 vs. limit=15.0 +2024-07-28 11:40:13,769 INFO [train.py:1114] (3/4) Epoch 12, batch 750, loss[loss=0.2163, simple_loss=0.2982, pruned_loss=0.06718, over 4693.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2797, pruned_loss=0.05199, over 918726.40 frames. ], batch size: 13, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:40:16,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=150928.0, ans=15.0 +2024-07-28 11:40:19,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=150928.0, ans=0.2 +2024-07-28 11:40:23,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=150941.33333333334, ans=0.0 +2024-07-28 11:40:28,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=150954.66666666666, ans=0.125 +2024-07-28 11:40:31,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=150954.66666666666, ans=0.2 +2024-07-28 11:40:32,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=150954.66666666666, ans=0.125 +2024-07-28 11:40:47,002 INFO [train.py:1114] (3/4) Epoch 12, batch 800, loss[loss=0.1661, simple_loss=0.2414, pruned_loss=0.0454, over 4856.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2791, pruned_loss=0.05215, over 924029.83 frames. ], batch size: 12, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:40:48,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=150994.66666666666, ans=0.035 +2024-07-28 11:40:51,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=150994.66666666666, ans=0.125 +2024-07-28 11:40:57,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=151008.0, ans=0.015 +2024-07-28 11:40:58,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=151008.0, ans=0.0 +2024-07-28 11:40:59,232 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.46 vs. limit=15.0 +2024-07-28 11:41:06,753 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.726e+01 6.208e+01 6.822e+01 1.017e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 11:41:07,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.33 vs. limit=15.0 +2024-07-28 11:41:14,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151048.0, ans=0.1 +2024-07-28 11:41:20,184 INFO [train.py:1114] (3/4) Epoch 12, batch 850, loss[loss=0.1498, simple_loss=0.2463, pruned_loss=0.02669, over 4646.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2791, pruned_loss=0.05192, over 927903.26 frames. ], batch size: 14, lr: 6.37e-03, grad_scale: 64.0 +2024-07-28 11:41:20,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151061.33333333334, ans=0.1 +2024-07-28 11:41:31,296 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:41:37,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=151088.0, ans=0.04949747468305833 +2024-07-28 11:41:38,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=151088.0, ans=0.125 +2024-07-28 11:41:51,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=151114.66666666666, ans=0.1 +2024-07-28 11:41:55,113 INFO [train.py:1114] (3/4) Epoch 12, batch 900, loss[loss=0.1833, simple_loss=0.2562, pruned_loss=0.05517, over 4852.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2792, pruned_loss=0.05186, over 929120.45 frames. ], batch size: 12, lr: 6.37e-03, grad_scale: 32.0 +2024-07-28 11:41:59,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=151128.0, ans=0.1 +2024-07-28 11:42:15,735 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.369e+01 5.655e+01 6.355e+01 7.195e+01 9.950e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 11:42:27,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=151181.33333333334, ans=0.0 +2024-07-28 11:42:28,523 INFO [train.py:1114] (3/4) Epoch 12, batch 950, loss[loss=0.221, simple_loss=0.3027, pruned_loss=0.06961, over 4777.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.28, pruned_loss=0.05226, over 930806.13 frames. ], batch size: 12, lr: 6.37e-03, grad_scale: 32.0 +2024-07-28 11:42:29,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=151194.66666666666, ans=0.125 +2024-07-28 11:42:30,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=151194.66666666666, ans=0.0 +2024-07-28 11:42:47,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=151221.33333333334, ans=0.2 +2024-07-28 11:42:48,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151234.66666666666, ans=0.1 +2024-07-28 11:42:55,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=151248.0, ans=0.0 +2024-07-28 11:43:02,361 INFO [train.py:1114] (3/4) Epoch 12, batch 1000, loss[loss=0.1659, simple_loss=0.2532, pruned_loss=0.0393, over 4959.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2811, pruned_loss=0.05264, over 930435.36 frames. ], batch size: 13, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:43:03,256 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:43:04,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=151261.33333333334, ans=0.0 +2024-07-28 11:43:08,894 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.61 vs. limit=15.0 +2024-07-28 11:43:10,739 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=151274.66666666666, ans=0.125 +2024-07-28 11:43:11,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=151274.66666666666, ans=0.125 +2024-07-28 11:43:25,150 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.535e+01 6.224e+01 7.277e+01 1.100e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 11:43:35,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=151314.66666666666, ans=0.0 +2024-07-28 11:43:37,739 INFO [train.py:1114] (3/4) Epoch 12, batch 1050, loss[loss=0.1843, simple_loss=0.2712, pruned_loss=0.04872, over 4881.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2801, pruned_loss=0.05231, over 932935.71 frames. ], batch size: 14, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:43:39,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=151328.0, ans=0.125 +2024-07-28 11:43:47,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=151341.33333333334, ans=0.0 +2024-07-28 11:43:52,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=151341.33333333334, ans=0.2 +2024-07-28 11:44:03,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=151368.0, ans=0.2 +2024-07-28 11:44:17,157 INFO [train.py:1114] (3/4) Epoch 12, batch 1100, loss[loss=0.1973, simple_loss=0.2746, pruned_loss=0.06004, over 4897.00 frames. ], tot_loss[loss=0.192, simple_loss=0.28, pruned_loss=0.05202, over 935003.02 frames. ], batch size: 13, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:44:18,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=151394.66666666666, ans=0.125 +2024-07-28 11:44:20,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=151394.66666666666, ans=0.2 +2024-07-28 11:44:24,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=151408.0, ans=0.0 +2024-07-28 11:44:31,607 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.15 vs. limit=15.0 +2024-07-28 11:44:33,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=151421.33333333334, ans=0.125 +2024-07-28 11:44:34,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=151421.33333333334, ans=0.025 +2024-07-28 11:44:38,702 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.346e+01 5.539e+01 6.009e+01 6.753e+01 8.123e+01, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 11:44:41,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=151434.66666666666, ans=0.125 +2024-07-28 11:44:43,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=151434.66666666666, ans=0.125 +2024-07-28 11:44:45,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=151434.66666666666, ans=0.09899494936611666 +2024-07-28 11:44:50,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=151448.0, ans=0.09899494936611666 +2024-07-28 11:44:53,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=151461.33333333334, ans=0.025 +2024-07-28 11:44:53,941 INFO [train.py:1114] (3/4) Epoch 12, batch 1150, loss[loss=0.1743, simple_loss=0.2602, pruned_loss=0.04418, over 4895.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2792, pruned_loss=0.05187, over 934812.98 frames. ], batch size: 13, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:44:54,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=151461.33333333334, ans=0.125 +2024-07-28 11:44:55,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=151461.33333333334, ans=0.2 +2024-07-28 11:44:58,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=151461.33333333334, ans=0.125 +2024-07-28 11:45:03,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=151474.66666666666, ans=0.2 +2024-07-28 11:45:04,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=151474.66666666666, ans=0.125 +2024-07-28 11:45:05,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=151474.66666666666, ans=0.125 +2024-07-28 11:45:06,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=151474.66666666666, ans=0.0 +2024-07-28 11:45:07,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=151488.0, ans=0.125 +2024-07-28 11:45:28,011 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.83 vs. limit=15.0 +2024-07-28 11:45:28,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=151514.66666666666, ans=0.0 +2024-07-28 11:45:30,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=151514.66666666666, ans=0.125 +2024-07-28 11:45:36,566 INFO [train.py:1114] (3/4) Epoch 12, batch 1200, loss[loss=0.2455, simple_loss=0.3419, pruned_loss=0.07457, over 4870.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2799, pruned_loss=0.05194, over 933735.57 frames. ], batch size: 14, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:45:36,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151528.0, ans=0.1 +2024-07-28 11:45:53,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=151554.66666666666, ans=0.1 +2024-07-28 11:45:56,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=151554.66666666666, ans=0.025 +2024-07-28 11:45:57,707 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.539e+01 6.207e+01 7.047e+01 1.080e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 11:46:09,179 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.02 vs. limit=15.0 +2024-07-28 11:46:10,287 INFO [train.py:1114] (3/4) Epoch 12, batch 1250, loss[loss=0.2236, simple_loss=0.3067, pruned_loss=0.0703, over 4789.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2798, pruned_loss=0.05134, over 937654.80 frames. ], batch size: 15, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:46:10,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=151594.66666666666, ans=0.1 +2024-07-28 11:46:19,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=151608.0, ans=0.125 +2024-07-28 11:46:30,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=151634.66666666666, ans=0.125 +2024-07-28 11:46:43,309 INFO [train.py:1114] (3/4) Epoch 12, batch 1300, loss[loss=0.2095, simple_loss=0.2935, pruned_loss=0.06269, over 4772.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2796, pruned_loss=0.05157, over 938839.02 frames. ], batch size: 19, lr: 6.36e-03, grad_scale: 32.0 +2024-07-28 11:46:49,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=151674.66666666666, ans=0.5 +2024-07-28 11:47:03,819 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.884e+01 5.669e+01 6.218e+01 7.134e+01 9.799e+01, threshold=1.244e+02, percent-clipped=0.0 +2024-07-28 11:47:03,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=151701.33333333334, ans=0.125 +2024-07-28 11:47:05,815 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:47:08,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=151701.33333333334, ans=0.2 +2024-07-28 11:47:16,372 INFO [train.py:1114] (3/4) Epoch 12, batch 1350, loss[loss=0.2007, simple_loss=0.2992, pruned_loss=0.05115, over 4761.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2793, pruned_loss=0.05118, over 940745.58 frames. ], batch size: 13, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:47:25,864 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.28 vs. limit=10.0 +2024-07-28 11:47:27,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=151741.33333333334, ans=0.2 +2024-07-28 11:47:32,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=151741.33333333334, ans=0.025 +2024-07-28 11:47:35,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=151754.66666666666, ans=0.0 +2024-07-28 11:47:45,970 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:47:51,435 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.53 vs. limit=10.0 +2024-07-28 11:47:54,336 INFO [train.py:1114] (3/4) Epoch 12, batch 1400, loss[loss=0.1691, simple_loss=0.2363, pruned_loss=0.05091, over 4707.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2785, pruned_loss=0.05091, over 942463.69 frames. ], batch size: 11, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:47:55,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=151794.66666666666, ans=0.0 +2024-07-28 11:47:58,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=151794.66666666666, ans=0.125 +2024-07-28 11:48:16,693 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.713e+01 6.249e+01 7.424e+01 1.107e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 11:48:29,489 INFO [train.py:1114] (3/4) Epoch 12, batch 1450, loss[loss=0.1947, simple_loss=0.2983, pruned_loss=0.04555, over 4660.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2787, pruned_loss=0.05109, over 942808.88 frames. ], batch size: 15, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:48:41,904 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.45 vs. limit=15.0 +2024-07-28 11:48:47,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=151888.0, ans=0.2 +2024-07-28 11:48:50,486 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.04 vs. limit=15.0 +2024-07-28 11:49:04,471 INFO [train.py:1114] (3/4) Epoch 12, batch 1500, loss[loss=0.1815, simple_loss=0.2827, pruned_loss=0.04013, over 4818.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2803, pruned_loss=0.05145, over 942446.16 frames. ], batch size: 14, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:49:12,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=151941.33333333334, ans=0.125 +2024-07-28 11:49:15,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=151941.33333333334, ans=0.125 +2024-07-28 11:49:18,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=151941.33333333334, ans=0.125 +2024-07-28 11:49:23,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=151954.66666666666, ans=0.0 +2024-07-28 11:49:25,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=151954.66666666666, ans=0.125 +2024-07-28 11:49:29,095 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.429e+01 5.582e+01 5.945e+01 6.654e+01 9.521e+01, threshold=1.189e+02, percent-clipped=0.0 +2024-07-28 11:49:33,915 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.75 vs. limit=15.0 +2024-07-28 11:49:59,629 INFO [train.py:1114] (3/4) Epoch 12, batch 1550, loss[loss=0.2145, simple_loss=0.3013, pruned_loss=0.06387, over 4894.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2801, pruned_loss=0.05145, over 938304.70 frames. ], batch size: 15, lr: 6.35e-03, grad_scale: 16.0 +2024-07-28 11:50:13,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=152008.0, ans=0.125 +2024-07-28 11:50:14,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=152008.0, ans=0.0 +2024-07-28 11:50:14,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=152021.33333333334, ans=0.04949747468305833 +2024-07-28 11:50:17,574 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.28 vs. limit=15.0 +2024-07-28 11:50:18,102 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.71 vs. limit=15.0 +2024-07-28 11:50:24,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=152034.66666666666, ans=0.0 +2024-07-28 11:50:32,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=152048.0, ans=0.125 +2024-07-28 11:50:35,402 INFO [train.py:1114] (3/4) Epoch 12, batch 1600, loss[loss=0.1535, simple_loss=0.2499, pruned_loss=0.02856, over 4867.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2798, pruned_loss=0.0511, over 936910.77 frames. ], batch size: 14, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:50:36,665 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.57 vs. limit=12.0 +2024-07-28 11:50:38,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=152061.33333333334, ans=0.125 +2024-07-28 11:50:41,896 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=14.98 vs. limit=15.0 +2024-07-28 11:50:48,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=152074.66666666666, ans=0.0 +2024-07-28 11:50:54,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=152088.0, ans=0.0 +2024-07-28 11:50:56,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=152088.0, ans=0.07 +2024-07-28 11:50:59,239 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.730e+01 5.721e+01 6.309e+01 7.092e+01 1.066e+02, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 11:51:11,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=152114.66666666666, ans=0.125 +2024-07-28 11:51:12,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=152114.66666666666, ans=0.0 +2024-07-28 11:51:12,482 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=7.09 vs. limit=10.0 +2024-07-28 11:51:15,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=152128.0, ans=0.125 +2024-07-28 11:51:16,032 INFO [train.py:1114] (3/4) Epoch 12, batch 1650, loss[loss=0.2296, simple_loss=0.321, pruned_loss=0.06906, over 4662.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2804, pruned_loss=0.05153, over 936710.00 frames. ], batch size: 14, lr: 6.35e-03, grad_scale: 32.0 +2024-07-28 11:51:28,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=152141.33333333334, ans=0.125 +2024-07-28 11:51:31,613 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.38 vs. limit=22.5 +2024-07-28 11:51:48,111 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.29 vs. limit=12.0 +2024-07-28 11:51:48,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=152154.66666666666, ans=0.0 +2024-07-28 11:52:00,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff2.min_abs, batch_count=152181.33333333334, ans=0.1 +2024-07-28 11:52:05,265 INFO [train.py:1114] (3/4) Epoch 12, batch 1700, loss[loss=0.1807, simple_loss=0.2533, pruned_loss=0.05403, over 4702.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2801, pruned_loss=0.05191, over 938703.73 frames. ], batch size: 11, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:52:21,642 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.91 vs. limit=15.0 +2024-07-28 11:52:25,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=152234.66666666666, ans=0.0 +2024-07-28 11:52:26,631 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.325e+01 5.663e+01 6.309e+01 7.408e+01 1.033e+02, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 11:52:35,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=152248.0, ans=0.0 +2024-07-28 11:52:36,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=152248.0, ans=0.125 +2024-07-28 11:52:38,673 INFO [train.py:1114] (3/4) Epoch 12, batch 1750, loss[loss=0.153, simple_loss=0.217, pruned_loss=0.04444, over 4811.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2799, pruned_loss=0.0521, over 939798.32 frames. ], batch size: 11, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:52:45,167 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.25 vs. limit=15.0 +2024-07-28 11:52:45,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152274.66666666666, ans=0.1 +2024-07-28 11:52:46,262 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:53:01,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=152301.33333333334, ans=0.2 +2024-07-28 11:53:12,210 INFO [train.py:1114] (3/4) Epoch 12, batch 1800, loss[loss=0.175, simple_loss=0.27, pruned_loss=0.03999, over 4637.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2797, pruned_loss=0.05222, over 940769.02 frames. ], batch size: 13, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:53:17,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=152328.0, ans=0.2 +2024-07-28 11:53:22,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=152341.33333333334, ans=0.125 +2024-07-28 11:53:28,034 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=11.58 vs. limit=15.0 +2024-07-28 11:53:28,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=152354.66666666666, ans=0.0 +2024-07-28 11:53:29,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=152354.66666666666, ans=0.2 +2024-07-28 11:53:35,387 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.783e+01 5.693e+01 6.293e+01 7.294e+01 9.358e+01, threshold=1.259e+02, percent-clipped=0.0 +2024-07-28 11:53:38,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=152368.0, ans=0.5 +2024-07-28 11:53:45,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=152381.33333333334, ans=0.0 +2024-07-28 11:53:49,304 INFO [train.py:1114] (3/4) Epoch 12, batch 1850, loss[loss=0.2171, simple_loss=0.3041, pruned_loss=0.06505, over 4817.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2796, pruned_loss=0.05229, over 940866.28 frames. ], batch size: 14, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:53:49,363 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:53:56,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=152408.0, ans=0.1 +2024-07-28 11:54:00,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=152408.0, ans=0.0 +2024-07-28 11:54:05,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=152421.33333333334, ans=0.125 +2024-07-28 11:54:15,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=152434.66666666666, ans=0.125 +2024-07-28 11:54:21,643 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.65 vs. limit=15.0 +2024-07-28 11:54:23,280 INFO [train.py:1114] (3/4) Epoch 12, batch 1900, loss[loss=0.1606, simple_loss=0.258, pruned_loss=0.03164, over 4658.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2805, pruned_loss=0.05246, over 941682.38 frames. ], batch size: 14, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:54:32,133 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.93 vs. limit=22.5 +2024-07-28 11:54:44,567 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.417e+01 5.599e+01 6.321e+01 7.441e+01 1.076e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 11:54:50,861 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.66 vs. limit=5.0 +2024-07-28 11:54:51,395 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.11 vs. limit=15.0 +2024-07-28 11:54:52,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=152514.66666666666, ans=0.1 +2024-07-28 11:54:52,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=152514.66666666666, ans=0.125 +2024-07-28 11:54:53,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=152514.66666666666, ans=0.125 +2024-07-28 11:54:55,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=152528.0, ans=0.2 +2024-07-28 11:54:56,301 INFO [train.py:1114] (3/4) Epoch 12, batch 1950, loss[loss=0.1624, simple_loss=0.2551, pruned_loss=0.03486, over 4892.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2807, pruned_loss=0.05279, over 943608.55 frames. ], batch size: 13, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:55:02,329 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.80 vs. limit=15.0 +2024-07-28 11:55:29,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=152581.33333333334, ans=0.125 +2024-07-28 11:55:31,683 INFO [train.py:1114] (3/4) Epoch 12, batch 2000, loss[loss=0.1655, simple_loss=0.2394, pruned_loss=0.04576, over 4801.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2814, pruned_loss=0.05308, over 940907.58 frames. ], batch size: 11, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:55:32,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=152594.66666666666, ans=0.0 +2024-07-28 11:55:48,040 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.67 vs. limit=10.0 +2024-07-28 11:55:52,943 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.820e+01 6.521e+01 7.809e+01 1.085e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 11:55:59,345 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.06 vs. limit=12.0 +2024-07-28 11:56:07,050 INFO [train.py:1114] (3/4) Epoch 12, batch 2050, loss[loss=0.1681, simple_loss=0.2465, pruned_loss=0.04487, over 4618.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.28, pruned_loss=0.05242, over 939331.48 frames. ], batch size: 11, lr: 6.34e-03, grad_scale: 32.0 +2024-07-28 11:56:08,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=152661.33333333334, ans=0.0 +2024-07-28 11:56:17,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=152674.66666666666, ans=0.0 +2024-07-28 11:56:27,781 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.17 vs. limit=12.0 +2024-07-28 11:56:28,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=152701.33333333334, ans=0.125 +2024-07-28 11:56:35,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=152714.66666666666, ans=0.0 +2024-07-28 11:56:42,392 INFO [train.py:1114] (3/4) Epoch 12, batch 2100, loss[loss=0.1947, simple_loss=0.2801, pruned_loss=0.0546, over 4756.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2791, pruned_loss=0.05181, over 941202.85 frames. ], batch size: 13, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:56:42,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=152728.0, ans=0.125 +2024-07-28 11:56:43,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=152728.0, ans=0.2 +2024-07-28 11:56:54,316 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=10.23 vs. limit=15.0 +2024-07-28 11:57:02,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=152768.0, ans=0.0 +2024-07-28 11:57:03,674 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.175e+01 5.578e+01 6.172e+01 6.931e+01 1.014e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-28 11:57:13,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=152781.33333333334, ans=0.05 +2024-07-28 11:57:14,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=152781.33333333334, ans=0.125 +2024-07-28 11:57:19,552 INFO [train.py:1114] (3/4) Epoch 12, batch 2150, loss[loss=0.1814, simple_loss=0.2703, pruned_loss=0.0462, over 4896.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2799, pruned_loss=0.05177, over 944378.89 frames. ], batch size: 13, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:57:21,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=152794.66666666666, ans=0.025 +2024-07-28 11:57:27,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=152808.0, ans=0.1 +2024-07-28 11:57:40,711 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.56 vs. limit=22.5 +2024-07-28 11:57:47,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=152848.0, ans=0.0 +2024-07-28 11:57:52,752 INFO [train.py:1114] (3/4) Epoch 12, batch 2200, loss[loss=0.1979, simple_loss=0.2926, pruned_loss=0.05164, over 4814.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2795, pruned_loss=0.05158, over 943406.29 frames. ], batch size: 14, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:57:53,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=152861.33333333334, ans=0.07 +2024-07-28 11:57:54,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=152861.33333333334, ans=0.125 +2024-07-28 11:57:57,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=152861.33333333334, ans=0.0 +2024-07-28 11:58:01,364 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.66 vs. limit=12.0 +2024-07-28 11:58:02,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=152874.66666666666, ans=0.125 +2024-07-28 11:58:03,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=152874.66666666666, ans=0.125 +2024-07-28 11:58:07,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=152888.0, ans=0.025 +2024-07-28 11:58:09,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=152888.0, ans=0.125 +2024-07-28 11:58:14,226 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.745e+01 5.791e+01 6.232e+01 7.216e+01 1.117e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 11:58:20,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=152914.66666666666, ans=0.1 +2024-07-28 11:58:27,781 INFO [train.py:1114] (3/4) Epoch 12, batch 2250, loss[loss=0.2128, simple_loss=0.2927, pruned_loss=0.06647, over 4690.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2795, pruned_loss=0.05169, over 941673.46 frames. ], batch size: 13, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:58:36,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=152941.33333333334, ans=0.0 +2024-07-28 11:58:39,713 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.69 vs. limit=10.0 +2024-07-28 11:58:49,635 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.39 vs. limit=15.0 +2024-07-28 11:58:50,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=152968.0, ans=0.0 +2024-07-28 11:59:02,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=152981.33333333334, ans=0.125 +2024-07-28 11:59:03,212 INFO [train.py:1114] (3/4) Epoch 12, batch 2300, loss[loss=0.1742, simple_loss=0.2375, pruned_loss=0.05546, over 4928.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2779, pruned_loss=0.05117, over 939104.71 frames. ], batch size: 12, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:59:04,026 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 11:59:04,213 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.86 vs. limit=22.5 +2024-07-28 11:59:24,486 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.625e+01 5.617e+01 6.257e+01 7.219e+01 1.104e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 11:59:30,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=153048.0, ans=0.0 +2024-07-28 11:59:36,826 INFO [train.py:1114] (3/4) Epoch 12, batch 2350, loss[loss=0.2354, simple_loss=0.3276, pruned_loss=0.07155, over 4633.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2774, pruned_loss=0.05092, over 941406.65 frames. ], batch size: 13, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 11:59:36,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=153061.33333333334, ans=0.125 +2024-07-28 11:59:46,753 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.74 vs. limit=15.0 +2024-07-28 11:59:48,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=153074.66666666666, ans=0.125 +2024-07-28 11:59:48,847 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.80 vs. limit=12.0 +2024-07-28 11:59:50,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=153088.0, ans=0.0 +2024-07-28 11:59:53,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=153088.0, ans=0.025 +2024-07-28 11:59:54,810 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.51 vs. limit=15.0 +2024-07-28 11:59:55,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=153088.0, ans=0.0 +2024-07-28 12:00:00,922 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.99 vs. limit=6.0 +2024-07-28 12:00:04,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=153114.66666666666, ans=0.125 +2024-07-28 12:00:10,576 INFO [train.py:1114] (3/4) Epoch 12, batch 2400, loss[loss=0.1622, simple_loss=0.2581, pruned_loss=0.03317, over 4637.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.277, pruned_loss=0.05011, over 941097.38 frames. ], batch size: 12, lr: 6.33e-03, grad_scale: 32.0 +2024-07-28 12:00:31,899 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 5.703e+01 6.200e+01 6.966e+01 9.820e+01, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 12:00:35,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=153168.0, ans=0.125 +2024-07-28 12:00:35,468 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.15 vs. limit=15.0 +2024-07-28 12:00:37,450 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.27 vs. limit=22.5 +2024-07-28 12:00:45,551 INFO [train.py:1114] (3/4) Epoch 12, batch 2450, loss[loss=0.1809, simple_loss=0.283, pruned_loss=0.0394, over 4686.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2782, pruned_loss=0.05084, over 936427.89 frames. ], batch size: 13, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:00:46,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=153194.66666666666, ans=0.125 +2024-07-28 12:00:49,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=153194.66666666666, ans=0.07 +2024-07-28 12:00:55,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=153208.0, ans=0.125 +2024-07-28 12:00:55,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=153208.0, ans=0.125 +2024-07-28 12:00:59,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=153221.33333333334, ans=0.0 +2024-07-28 12:01:11,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=153234.66666666666, ans=0.125 +2024-07-28 12:01:13,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=153248.0, ans=0.1 +2024-07-28 12:01:13,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=153248.0, ans=0.125 +2024-07-28 12:01:18,862 INFO [train.py:1114] (3/4) Epoch 12, batch 2500, loss[loss=0.209, simple_loss=0.2982, pruned_loss=0.05989, over 4814.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2794, pruned_loss=0.05124, over 938688.26 frames. ], batch size: 14, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:01:23,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=153261.33333333334, ans=0.5 +2024-07-28 12:01:25,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=153274.66666666666, ans=0.05 +2024-07-28 12:01:40,401 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.56 vs. limit=22.5 +2024-07-28 12:01:41,949 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.651e+01 6.166e+01 7.013e+01 1.450e+02, threshold=1.233e+02, percent-clipped=1.0 +2024-07-28 12:01:43,772 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.46 vs. limit=15.0 +2024-07-28 12:01:47,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=153314.66666666666, ans=15.0 +2024-07-28 12:01:54,191 INFO [train.py:1114] (3/4) Epoch 12, batch 2550, loss[loss=0.1817, simple_loss=0.2575, pruned_loss=0.0529, over 4788.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2793, pruned_loss=0.05138, over 938333.40 frames. ], batch size: 11, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:02:01,655 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.30 vs. limit=15.0 +2024-07-28 12:02:11,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=153354.66666666666, ans=0.0 +2024-07-28 12:02:26,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=153381.33333333334, ans=0.0 +2024-07-28 12:02:28,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=153381.33333333334, ans=0.2 +2024-07-28 12:02:30,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=153381.33333333334, ans=0.125 +2024-07-28 12:02:32,199 INFO [train.py:1114] (3/4) Epoch 12, batch 2600, loss[loss=0.146, simple_loss=0.2373, pruned_loss=0.02729, over 4901.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2791, pruned_loss=0.05111, over 937385.54 frames. ], batch size: 13, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:02:47,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=153394.66666666666, ans=0.0 +2024-07-28 12:02:47,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff2.min_abs, batch_count=153394.66666666666, ans=0.1 +2024-07-28 12:03:00,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=153421.33333333334, ans=0.125 +2024-07-28 12:03:07,392 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.455e+01 5.682e+01 6.373e+01 7.145e+01 1.030e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 12:03:22,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=153448.0, ans=0.2 +2024-07-28 12:03:23,894 INFO [train.py:1114] (3/4) Epoch 12, batch 2650, loss[loss=0.1968, simple_loss=0.2861, pruned_loss=0.05379, over 4626.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2796, pruned_loss=0.05175, over 939524.93 frames. ], batch size: 16, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:03:35,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=153474.66666666666, ans=0.125 +2024-07-28 12:03:39,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=153488.0, ans=0.2 +2024-07-28 12:03:41,030 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.69 vs. limit=15.0 +2024-07-28 12:03:41,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=153488.0, ans=0.125 +2024-07-28 12:03:44,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=153501.33333333334, ans=0.0 +2024-07-28 12:03:46,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=153501.33333333334, ans=0.125 +2024-07-28 12:03:50,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=153514.66666666666, ans=0.025 +2024-07-28 12:03:51,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=153514.66666666666, ans=10.0 +2024-07-28 12:03:53,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=153514.66666666666, ans=10.0 +2024-07-28 12:03:57,520 INFO [train.py:1114] (3/4) Epoch 12, batch 2700, loss[loss=0.2394, simple_loss=0.3424, pruned_loss=0.06821, over 4742.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2803, pruned_loss=0.05169, over 939539.27 frames. ], batch size: 14, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:04:00,514 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.54 vs. limit=15.0 +2024-07-28 12:04:04,337 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:04:06,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=153541.33333333334, ans=0.0 +2024-07-28 12:04:06,729 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.99 vs. limit=15.0 +2024-07-28 12:04:16,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=153554.66666666666, ans=0.125 +2024-07-28 12:04:22,343 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.622e+01 5.642e+01 6.087e+01 6.756e+01 9.576e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-28 12:04:23,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=153568.0, ans=0.2 +2024-07-28 12:04:23,262 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=18.06 vs. limit=15.0 +2024-07-28 12:04:24,687 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.47 vs. limit=15.0 +2024-07-28 12:04:34,301 INFO [train.py:1114] (3/4) Epoch 12, batch 2750, loss[loss=0.1907, simple_loss=0.2754, pruned_loss=0.05299, over 4717.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2792, pruned_loss=0.05163, over 939894.84 frames. ], batch size: 12, lr: 6.32e-03, grad_scale: 32.0 +2024-07-28 12:04:44,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=153608.0, ans=0.2 +2024-07-28 12:04:48,533 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.59 vs. limit=15.0 +2024-07-28 12:04:51,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=153621.33333333334, ans=0.0 +2024-07-28 12:04:53,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=153621.33333333334, ans=0.125 +2024-07-28 12:04:57,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=153634.66666666666, ans=0.1 +2024-07-28 12:05:10,492 INFO [train.py:1114] (3/4) Epoch 12, batch 2800, loss[loss=0.2656, simple_loss=0.3297, pruned_loss=0.1008, over 3339.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2792, pruned_loss=0.05162, over 937379.46 frames. ], batch size: 35, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:05:13,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=153661.33333333334, ans=0.125 +2024-07-28 12:05:31,814 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.685e+01 5.464e+01 6.125e+01 7.070e+01 1.105e+02, threshold=1.225e+02, percent-clipped=0.0 +2024-07-28 12:05:38,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=153714.66666666666, ans=0.025 +2024-07-28 12:05:41,513 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.10 vs. limit=10.0 +2024-07-28 12:05:41,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=153714.66666666666, ans=0.2 +2024-07-28 12:05:43,861 INFO [train.py:1114] (3/4) Epoch 12, batch 2850, loss[loss=0.2168, simple_loss=0.2952, pruned_loss=0.0692, over 4968.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2796, pruned_loss=0.05162, over 935890.79 frames. ], batch size: 13, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:06:03,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=153768.0, ans=0.025 +2024-07-28 12:06:06,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=153768.0, ans=0.1 +2024-07-28 12:06:16,605 INFO [train.py:1114] (3/4) Epoch 12, batch 2900, loss[loss=0.1911, simple_loss=0.2868, pruned_loss=0.04772, over 4828.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2806, pruned_loss=0.05172, over 939626.83 frames. ], batch size: 13, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:06:16,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=153794.66666666666, ans=0.125 +2024-07-28 12:06:21,947 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.10 vs. limit=10.0 +2024-07-28 12:06:30,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=153808.0, ans=0.2 +2024-07-28 12:06:30,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=153808.0, ans=0.2 +2024-07-28 12:06:39,898 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.688e+01 6.255e+01 7.399e+01 1.060e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 12:06:40,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=153834.66666666666, ans=0.0 +2024-07-28 12:06:47,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=153848.0, ans=0.0 +2024-07-28 12:06:50,743 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.71 vs. limit=15.0 +2024-07-28 12:06:51,620 INFO [train.py:1114] (3/4) Epoch 12, batch 2950, loss[loss=0.1567, simple_loss=0.2494, pruned_loss=0.03202, over 4704.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2794, pruned_loss=0.05195, over 938711.97 frames. ], batch size: 12, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:06:56,196 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.69 vs. limit=15.0 +2024-07-28 12:07:15,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=153901.33333333334, ans=0.0 +2024-07-28 12:07:25,340 INFO [train.py:1114] (3/4) Epoch 12, batch 3000, loss[loss=0.1522, simple_loss=0.2497, pruned_loss=0.02737, over 4759.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2791, pruned_loss=0.05178, over 938235.76 frames. ], batch size: 13, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:07:25,341 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 12:07:43,411 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.5.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.6490, 2.3917, 5.1722, 3.0186], device='cuda:3') +2024-07-28 12:07:46,424 INFO [train.py:1146] (3/4) Epoch 12, validation: loss=0.1682, simple_loss=0.272, pruned_loss=0.03224, over 944034.00 frames. +2024-07-28 12:07:46,424 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 12:07:54,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=153941.33333333334, ans=0.0 +2024-07-28 12:08:00,551 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.23 vs. limit=12.0 +2024-07-28 12:08:08,182 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.713e+01 5.599e+01 6.354e+01 7.168e+01 1.019e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 12:08:13,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=153981.33333333334, ans=0.125 +2024-07-28 12:08:13,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=153981.33333333334, ans=0.125 +2024-07-28 12:08:20,402 INFO [train.py:1114] (3/4) Epoch 12, batch 3050, loss[loss=0.1788, simple_loss=0.2634, pruned_loss=0.04712, over 4642.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2807, pruned_loss=0.05239, over 937115.04 frames. ], batch size: 12, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:08:45,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=154034.66666666666, ans=0.09899494936611666 +2024-07-28 12:08:53,627 INFO [train.py:1114] (3/4) Epoch 12, batch 3100, loss[loss=0.2299, simple_loss=0.3197, pruned_loss=0.07008, over 4905.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2799, pruned_loss=0.05207, over 938140.45 frames. ], batch size: 17, lr: 6.31e-03, grad_scale: 32.0 +2024-07-28 12:08:56,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=154061.33333333334, ans=0.1 +2024-07-28 12:09:10,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=154088.0, ans=0.2 +2024-07-28 12:09:11,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=154088.0, ans=0.02 +2024-07-28 12:09:14,830 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.787e+01 5.719e+01 6.483e+01 7.749e+01 1.294e+02, threshold=1.297e+02, percent-clipped=1.0 +2024-07-28 12:09:19,930 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.88 vs. limit=15.0 +2024-07-28 12:09:26,876 INFO [train.py:1114] (3/4) Epoch 12, batch 3150, loss[loss=0.179, simple_loss=0.2733, pruned_loss=0.04233, over 4619.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2798, pruned_loss=0.05194, over 938490.48 frames. ], batch size: 17, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:09:34,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=154141.33333333334, ans=0.125 +2024-07-28 12:09:35,595 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.77 vs. limit=10.0 +2024-07-28 12:09:39,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=154141.33333333334, ans=0.125 +2024-07-28 12:09:41,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=154154.66666666666, ans=0.1 +2024-07-28 12:09:42,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=154154.66666666666, ans=0.05 +2024-07-28 12:09:48,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=154168.0, ans=0.0 +2024-07-28 12:10:02,171 INFO [train.py:1114] (3/4) Epoch 12, batch 3200, loss[loss=0.1906, simple_loss=0.2748, pruned_loss=0.05314, over 4818.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.279, pruned_loss=0.05143, over 939763.53 frames. ], batch size: 13, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:10:06,587 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.05 vs. limit=15.0 +2024-07-28 12:10:08,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=154208.0, ans=0.025 +2024-07-28 12:10:23,358 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.738e+01 6.146e+01 6.845e+01 1.156e+02, threshold=1.229e+02, percent-clipped=0.0 +2024-07-28 12:10:23,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=154234.66666666666, ans=0.0 +2024-07-28 12:10:26,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=154234.66666666666, ans=0.0 +2024-07-28 12:10:29,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=154248.0, ans=0.0 +2024-07-28 12:10:35,321 INFO [train.py:1114] (3/4) Epoch 12, batch 3250, loss[loss=0.194, simple_loss=0.2765, pruned_loss=0.05574, over 4939.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2794, pruned_loss=0.05136, over 940517.17 frames. ], batch size: 14, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:10:36,276 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.45 vs. limit=15.0 +2024-07-28 12:10:37,694 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=11.71 vs. limit=15.0 +2024-07-28 12:10:39,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=154261.33333333334, ans=0.2 +2024-07-28 12:10:47,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=154274.66666666666, ans=0.0 +2024-07-28 12:10:54,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=154301.33333333334, ans=0.2 +2024-07-28 12:10:57,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=154301.33333333334, ans=0.125 +2024-07-28 12:11:09,147 INFO [train.py:1114] (3/4) Epoch 12, batch 3300, loss[loss=0.1979, simple_loss=0.2843, pruned_loss=0.05568, over 4711.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2785, pruned_loss=0.05164, over 940566.27 frames. ], batch size: 19, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:11:12,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=154328.0, ans=0.035 +2024-07-28 12:11:21,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=154341.33333333334, ans=0.0 +2024-07-28 12:11:31,596 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.932e+01 5.622e+01 6.140e+01 6.825e+01 9.627e+01, threshold=1.228e+02, percent-clipped=0.0 +2024-07-28 12:11:34,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=154368.0, ans=0.0 +2024-07-28 12:11:36,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=154381.33333333334, ans=0.0 +2024-07-28 12:11:36,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=154381.33333333334, ans=0.025 +2024-07-28 12:11:45,530 INFO [train.py:1114] (3/4) Epoch 12, batch 3350, loss[loss=0.2031, simple_loss=0.2925, pruned_loss=0.05686, over 4600.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2791, pruned_loss=0.05218, over 938118.10 frames. ], batch size: 17, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:11:52,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=154408.0, ans=0.125 +2024-07-28 12:11:55,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=154408.0, ans=0.125 +2024-07-28 12:12:00,644 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.62 vs. limit=22.5 +2024-07-28 12:12:06,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=154421.33333333334, ans=0.125 +2024-07-28 12:12:10,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=154434.66666666666, ans=0.0 +2024-07-28 12:12:28,958 INFO [train.py:1114] (3/4) Epoch 12, batch 3400, loss[loss=0.1761, simple_loss=0.2534, pruned_loss=0.04943, over 4814.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2797, pruned_loss=0.0522, over 936892.79 frames. ], batch size: 11, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:12:31,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=154461.33333333334, ans=0.1 +2024-07-28 12:12:32,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=154461.33333333334, ans=0.95 +2024-07-28 12:12:33,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=154461.33333333334, ans=0.125 +2024-07-28 12:12:33,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=154461.33333333334, ans=15.0 +2024-07-28 12:12:44,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=154488.0, ans=0.125 +2024-07-28 12:12:47,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=154488.0, ans=0.125 +2024-07-28 12:12:50,515 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.411e+01 5.633e+01 6.152e+01 6.788e+01 1.015e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 12:13:01,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=154514.66666666666, ans=0.125 +2024-07-28 12:13:04,218 INFO [train.py:1114] (3/4) Epoch 12, batch 3450, loss[loss=0.2121, simple_loss=0.2999, pruned_loss=0.06211, over 4748.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2793, pruned_loss=0.05167, over 937611.42 frames. ], batch size: 19, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:13:12,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=154541.33333333334, ans=0.125 +2024-07-28 12:13:13,923 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.59 vs. limit=6.0 +2024-07-28 12:13:23,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=154554.66666666666, ans=0.0 +2024-07-28 12:13:35,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=154581.33333333334, ans=0.1 +2024-07-28 12:13:39,607 INFO [train.py:1114] (3/4) Epoch 12, batch 3500, loss[loss=0.1672, simple_loss=0.248, pruned_loss=0.04316, over 4931.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2784, pruned_loss=0.05125, over 938375.66 frames. ], batch size: 12, lr: 6.30e-03, grad_scale: 32.0 +2024-07-28 12:13:40,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=154594.66666666666, ans=0.125 +2024-07-28 12:14:01,206 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.093e+01 5.546e+01 6.148e+01 6.737e+01 9.893e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 12:14:06,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=154648.0, ans=0.125 +2024-07-28 12:14:06,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=154648.0, ans=0.0 +2024-07-28 12:14:12,943 INFO [train.py:1114] (3/4) Epoch 12, batch 3550, loss[loss=0.1816, simple_loss=0.2684, pruned_loss=0.04745, over 4673.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.278, pruned_loss=0.0509, over 939152.81 frames. ], batch size: 14, lr: 6.29e-03, grad_scale: 64.0 +2024-07-28 12:14:15,233 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.81 vs. limit=22.5 +2024-07-28 12:14:40,049 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.69 vs. limit=15.0 +2024-07-28 12:14:54,230 INFO [train.py:1114] (3/4) Epoch 12, batch 3600, loss[loss=0.1676, simple_loss=0.257, pruned_loss=0.03904, over 4967.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2783, pruned_loss=0.05091, over 940519.05 frames. ], batch size: 13, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:14:57,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=154728.0, ans=0.2 +2024-07-28 12:15:02,472 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:15:13,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=154754.66666666666, ans=15.0 +2024-07-28 12:15:16,128 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.517e+01 5.873e+01 6.627e+01 7.814e+01 1.281e+02, threshold=1.325e+02, percent-clipped=1.0 +2024-07-28 12:15:17,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=154768.0, ans=0.125 +2024-07-28 12:15:22,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=154781.33333333334, ans=0.125 +2024-07-28 12:15:27,722 INFO [train.py:1114] (3/4) Epoch 12, batch 3650, loss[loss=0.1926, simple_loss=0.2771, pruned_loss=0.05408, over 4891.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2773, pruned_loss=0.05077, over 941019.77 frames. ], batch size: 15, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:15:30,503 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.77 vs. limit=15.0 +2024-07-28 12:15:32,431 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:15:33,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=154794.66666666666, ans=0.025 +2024-07-28 12:15:33,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.33 vs. limit=8.0 +2024-07-28 12:15:37,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=154808.0, ans=0.125 +2024-07-28 12:15:38,624 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.44 vs. limit=15.0 +2024-07-28 12:15:42,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=154821.33333333334, ans=10.0 +2024-07-28 12:15:46,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=154821.33333333334, ans=0.2 +2024-07-28 12:15:46,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=154821.33333333334, ans=0.2 +2024-07-28 12:15:50,291 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.36 vs. limit=15.0 +2024-07-28 12:15:50,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=154834.66666666666, ans=0.2 +2024-07-28 12:15:59,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=154848.0, ans=0.125 +2024-07-28 12:16:02,904 INFO [train.py:1114] (3/4) Epoch 12, batch 3700, loss[loss=0.1872, simple_loss=0.2834, pruned_loss=0.04547, over 4932.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2772, pruned_loss=0.05042, over 941697.71 frames. ], batch size: 14, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:16:08,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=154861.33333333334, ans=0.125 +2024-07-28 12:16:19,115 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-28 12:16:22,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=154901.33333333334, ans=0.125 +2024-07-28 12:16:24,633 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.998e+01 5.499e+01 5.998e+01 6.974e+01 1.210e+02, threshold=1.200e+02, percent-clipped=0.0 +2024-07-28 12:16:24,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=154901.33333333334, ans=0.125 +2024-07-28 12:16:26,376 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.84 vs. limit=22.5 +2024-07-28 12:16:35,616 INFO [train.py:1114] (3/4) Epoch 12, batch 3750, loss[loss=0.2041, simple_loss=0.2842, pruned_loss=0.06201, over 4807.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2776, pruned_loss=0.0507, over 943616.49 frames. ], batch size: 11, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:16:45,325 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.66 vs. limit=15.0 +2024-07-28 12:16:47,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff3.min_abs, batch_count=154941.33333333334, ans=0.2 +2024-07-28 12:16:50,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=154954.66666666666, ans=0.1 +2024-07-28 12:16:52,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=154954.66666666666, ans=0.09899494936611666 +2024-07-28 12:16:53,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=154954.66666666666, ans=0.125 +2024-07-28 12:16:54,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=154954.66666666666, ans=0.125 +2024-07-28 12:17:00,716 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:17:04,498 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.62 vs. limit=10.0 +2024-07-28 12:17:09,274 INFO [train.py:1114] (3/4) Epoch 12, batch 3800, loss[loss=0.2022, simple_loss=0.2973, pruned_loss=0.05357, over 4804.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2773, pruned_loss=0.05064, over 941672.62 frames. ], batch size: 14, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:17:28,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=155021.33333333334, ans=0.125 +2024-07-28 12:17:32,842 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.431e+01 5.749e+01 6.230e+01 7.169e+01 2.120e+02, threshold=1.246e+02, percent-clipped=1.0 +2024-07-28 12:17:47,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=155061.33333333334, ans=0.0 +2024-07-28 12:17:48,212 INFO [train.py:1114] (3/4) Epoch 12, batch 3850, loss[loss=0.2129, simple_loss=0.3101, pruned_loss=0.05791, over 4631.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2765, pruned_loss=0.05015, over 942190.23 frames. ], batch size: 16, lr: 6.29e-03, grad_scale: 32.0 +2024-07-28 12:17:56,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=155061.33333333334, ans=0.1 +2024-07-28 12:18:04,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=155074.66666666666, ans=0.0 +2024-07-28 12:18:12,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=155088.0, ans=0.125 +2024-07-28 12:18:14,836 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.04 vs. limit=15.0 +2024-07-28 12:18:15,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=155101.33333333334, ans=0.0 +2024-07-28 12:18:20,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=155101.33333333334, ans=0.0 +2024-07-28 12:18:29,374 INFO [train.py:1114] (3/4) Epoch 12, batch 3900, loss[loss=0.2018, simple_loss=0.2821, pruned_loss=0.06072, over 4814.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2781, pruned_loss=0.05102, over 942507.26 frames. ], batch size: 14, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:18:30,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=155128.0, ans=0.0 +2024-07-28 12:18:31,190 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.00 vs. limit=15.0 +2024-07-28 12:18:32,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=155128.0, ans=0.125 +2024-07-28 12:18:49,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=155154.66666666666, ans=0.2 +2024-07-28 12:18:52,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=155168.0, ans=0.125 +2024-07-28 12:18:53,187 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.209e+01 5.496e+01 6.124e+01 6.680e+01 9.090e+01, threshold=1.225e+02, percent-clipped=0.0 +2024-07-28 12:18:54,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=155168.0, ans=0.0 +2024-07-28 12:18:54,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=155168.0, ans=0.125 +2024-07-28 12:18:57,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=155181.33333333334, ans=0.0 +2024-07-28 12:19:05,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155181.33333333334, ans=0.1 +2024-07-28 12:19:07,239 INFO [train.py:1114] (3/4) Epoch 12, batch 3950, loss[loss=0.2112, simple_loss=0.2956, pruned_loss=0.0634, over 4837.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2779, pruned_loss=0.05084, over 944396.71 frames. ], batch size: 16, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:19:13,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=155208.0, ans=0.2 +2024-07-28 12:19:13,924 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.43 vs. limit=12.0 +2024-07-28 12:19:23,544 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=155221.33333333334, ans=0.0 +2024-07-28 12:19:24,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=155221.33333333334, ans=0.05 +2024-07-28 12:19:29,805 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.50 vs. limit=22.5 +2024-07-28 12:19:35,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=155248.0, ans=0.125 +2024-07-28 12:19:39,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=155248.0, ans=0.07 +2024-07-28 12:19:41,096 INFO [train.py:1114] (3/4) Epoch 12, batch 4000, loss[loss=0.1934, simple_loss=0.2729, pruned_loss=0.05693, over 4774.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2784, pruned_loss=0.05108, over 940749.03 frames. ], batch size: 12, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:19:41,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=155261.33333333334, ans=0.07 +2024-07-28 12:19:47,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=155274.66666666666, ans=0.125 +2024-07-28 12:19:49,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=155274.66666666666, ans=0.0 +2024-07-28 12:19:58,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=155288.0, ans=0.0 +2024-07-28 12:20:02,780 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.540e+01 6.444e+01 7.146e+01 1.519e+02, threshold=1.289e+02, percent-clipped=1.0 +2024-07-28 12:20:14,417 INFO [train.py:1114] (3/4) Epoch 12, batch 4050, loss[loss=0.2339, simple_loss=0.3056, pruned_loss=0.08111, over 3423.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2785, pruned_loss=0.0517, over 939375.26 frames. ], batch size: 35, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:20:24,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=155341.33333333334, ans=0.125 +2024-07-28 12:20:29,701 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.00 vs. limit=15.0 +2024-07-28 12:20:41,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=155381.33333333334, ans=0.125 +2024-07-28 12:20:44,872 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.84 vs. limit=15.0 +2024-07-28 12:20:48,476 INFO [train.py:1114] (3/4) Epoch 12, batch 4100, loss[loss=0.2033, simple_loss=0.3057, pruned_loss=0.0505, over 4892.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2787, pruned_loss=0.05151, over 938223.68 frames. ], batch size: 15, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:20:51,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=155394.66666666666, ans=0.1 +2024-07-28 12:20:52,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=155394.66666666666, ans=0.2 +2024-07-28 12:21:12,173 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.341e+01 5.731e+01 6.585e+01 8.286e+01 1.195e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-28 12:21:19,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=155448.0, ans=0.0 +2024-07-28 12:21:23,654 INFO [train.py:1114] (3/4) Epoch 12, batch 4150, loss[loss=0.1738, simple_loss=0.2577, pruned_loss=0.04494, over 4831.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2791, pruned_loss=0.05193, over 938079.51 frames. ], batch size: 13, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:21:23,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=155461.33333333334, ans=0.125 +2024-07-28 12:21:29,481 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.86 vs. limit=15.0 +2024-07-28 12:21:33,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=155474.66666666666, ans=0.125 +2024-07-28 12:21:45,689 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:21:56,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=155528.0, ans=0.0 +2024-07-28 12:21:57,096 INFO [train.py:1114] (3/4) Epoch 12, batch 4200, loss[loss=0.2155, simple_loss=0.2951, pruned_loss=0.06793, over 4914.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2786, pruned_loss=0.05184, over 939814.05 frames. ], batch size: 15, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:21:57,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=155528.0, ans=0.125 +2024-07-28 12:21:58,601 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:22:05,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=155541.33333333334, ans=0.125 +2024-07-28 12:22:17,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=155568.0, ans=0.125 +2024-07-28 12:22:18,891 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.397e+01 5.594e+01 6.177e+01 7.434e+01 1.256e+02, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 12:22:19,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=155568.0, ans=0.2 +2024-07-28 12:22:30,565 INFO [train.py:1114] (3/4) Epoch 12, batch 4250, loss[loss=0.2177, simple_loss=0.3086, pruned_loss=0.06336, over 4638.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2796, pruned_loss=0.05202, over 941575.97 frames. ], batch size: 12, lr: 6.28e-03, grad_scale: 32.0 +2024-07-28 12:22:31,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=155594.66666666666, ans=0.125 +2024-07-28 12:22:33,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=155594.66666666666, ans=0.125 +2024-07-28 12:22:36,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=155608.0, ans=0.125 +2024-07-28 12:22:37,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-07-28 12:22:52,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=155634.66666666666, ans=0.2 +2024-07-28 12:22:57,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=155648.0, ans=0.125 +2024-07-28 12:23:02,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=155648.0, ans=0.09899494936611666 +2024-07-28 12:23:06,444 INFO [train.py:1114] (3/4) Epoch 12, batch 4300, loss[loss=0.1585, simple_loss=0.2694, pruned_loss=0.02382, over 4753.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2793, pruned_loss=0.05131, over 941285.30 frames. ], batch size: 13, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:23:11,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=155661.33333333334, ans=0.5 +2024-07-28 12:23:12,631 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.36 vs. limit=15.0 +2024-07-28 12:23:25,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=155701.33333333334, ans=0.125 +2024-07-28 12:23:26,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=155701.33333333334, ans=0.0 +2024-07-28 12:23:27,975 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.634e+01 6.193e+01 6.969e+01 9.578e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 12:23:28,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=155701.33333333334, ans=0.0 +2024-07-28 12:23:39,250 INFO [train.py:1114] (3/4) Epoch 12, batch 4350, loss[loss=0.1799, simple_loss=0.2762, pruned_loss=0.04178, over 4769.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2795, pruned_loss=0.05133, over 941680.12 frames. ], batch size: 13, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:23:46,071 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.92 vs. limit=10.0 +2024-07-28 12:23:54,598 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.93 vs. limit=15.0 +2024-07-28 12:23:56,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=155754.66666666666, ans=0.125 +2024-07-28 12:24:04,397 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.73 vs. limit=15.0 +2024-07-28 12:24:06,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=155781.33333333334, ans=0.035 +2024-07-28 12:24:20,687 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.46 vs. limit=10.0 +2024-07-28 12:24:21,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=155781.33333333334, ans=0.0 +2024-07-28 12:24:22,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=155781.33333333334, ans=0.125 +2024-07-28 12:24:28,595 INFO [train.py:1114] (3/4) Epoch 12, batch 4400, loss[loss=0.1792, simple_loss=0.2848, pruned_loss=0.03681, over 4803.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2791, pruned_loss=0.05104, over 941192.81 frames. ], batch size: 14, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:24:31,233 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.34 vs. limit=6.0 +2024-07-28 12:24:52,799 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.007e+01 5.483e+01 6.215e+01 6.857e+01 1.527e+02, threshold=1.243e+02, percent-clipped=1.0 +2024-07-28 12:25:07,425 INFO [train.py:1114] (3/4) Epoch 12, batch 4450, loss[loss=0.1638, simple_loss=0.2422, pruned_loss=0.04267, over 4938.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2786, pruned_loss=0.05108, over 939010.87 frames. ], batch size: 12, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:25:20,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=155874.66666666666, ans=0.0 +2024-07-28 12:25:26,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=155888.0, ans=0.0 +2024-07-28 12:25:37,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=155901.33333333334, ans=0.125 +2024-07-28 12:25:46,640 INFO [train.py:1114] (3/4) Epoch 12, batch 4500, loss[loss=0.1813, simple_loss=0.2774, pruned_loss=0.04262, over 4744.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2789, pruned_loss=0.0511, over 938022.34 frames. ], batch size: 14, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:26:08,388 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.634e+01 5.459e+01 6.375e+01 7.469e+01 1.021e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 12:26:09,366 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.06 vs. limit=15.0 +2024-07-28 12:26:09,458 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.92 vs. limit=6.0 +2024-07-28 12:26:13,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=155981.33333333334, ans=0.125 +2024-07-28 12:26:14,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=155981.33333333334, ans=0.125 +2024-07-28 12:26:19,642 INFO [train.py:1114] (3/4) Epoch 12, batch 4550, loss[loss=0.1812, simple_loss=0.2798, pruned_loss=0.04135, over 4896.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2785, pruned_loss=0.05036, over 940068.73 frames. ], batch size: 13, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:26:21,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=155994.66666666666, ans=0.125 +2024-07-28 12:26:21,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=155994.66666666666, ans=0.125 +2024-07-28 12:26:25,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=155994.66666666666, ans=0.2 +2024-07-28 12:26:42,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=156034.66666666666, ans=0.125 +2024-07-28 12:26:54,976 INFO [train.py:1114] (3/4) Epoch 12, batch 4600, loss[loss=0.1975, simple_loss=0.2928, pruned_loss=0.0511, over 4517.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2776, pruned_loss=0.05021, over 938225.59 frames. ], batch size: 21, lr: 6.27e-03, grad_scale: 32.0 +2024-07-28 12:26:57,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=156061.33333333334, ans=0.125 +2024-07-28 12:27:00,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=156061.33333333334, ans=0.125 +2024-07-28 12:27:07,174 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.95 vs. limit=15.0 +2024-07-28 12:27:12,081 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.30 vs. limit=15.0 +2024-07-28 12:27:15,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=156101.33333333334, ans=0.125 +2024-07-28 12:27:16,923 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.470e+01 5.723e+01 6.384e+01 7.730e+01 1.121e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 12:27:20,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=156101.33333333334, ans=0.0 +2024-07-28 12:27:28,347 INFO [train.py:1114] (3/4) Epoch 12, batch 4650, loss[loss=0.2198, simple_loss=0.3071, pruned_loss=0.06626, over 4835.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2778, pruned_loss=0.05051, over 939828.43 frames. ], batch size: 16, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:27:33,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=156128.0, ans=0.125 +2024-07-28 12:27:34,129 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.91 vs. limit=10.0 +2024-07-28 12:27:50,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=156168.0, ans=0.125 +2024-07-28 12:27:52,042 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.03 vs. limit=15.0 +2024-07-28 12:28:01,579 INFO [train.py:1114] (3/4) Epoch 12, batch 4700, loss[loss=0.1934, simple_loss=0.2769, pruned_loss=0.05496, over 4693.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2776, pruned_loss=0.05028, over 936919.87 frames. ], batch size: 11, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:28:03,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=156194.66666666666, ans=0.125 +2024-07-28 12:28:10,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=156208.0, ans=0.2 +2024-07-28 12:28:16,016 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.15 vs. limit=15.0 +2024-07-28 12:28:21,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=156234.66666666666, ans=10.0 +2024-07-28 12:28:23,591 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.610e+01 5.553e+01 6.034e+01 6.597e+01 9.759e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-28 12:28:35,484 INFO [train.py:1114] (3/4) Epoch 12, batch 4750, loss[loss=0.2541, simple_loss=0.3246, pruned_loss=0.09179, over 4444.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2793, pruned_loss=0.05175, over 934852.46 frames. ], batch size: 21, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:28:35,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=156261.33333333334, ans=0.125 +2024-07-28 12:28:37,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=156261.33333333334, ans=0.125 +2024-07-28 12:28:41,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=156261.33333333334, ans=0.125 +2024-07-28 12:28:48,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=156274.66666666666, ans=0.07 +2024-07-28 12:28:52,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=156288.0, ans=0.1 +2024-07-28 12:29:00,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=156301.33333333334, ans=0.0 +2024-07-28 12:29:06,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=156314.66666666666, ans=0.125 +2024-07-28 12:29:10,106 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.78 vs. limit=15.0 +2024-07-28 12:29:11,596 INFO [train.py:1114] (3/4) Epoch 12, batch 4800, loss[loss=0.2257, simple_loss=0.3141, pruned_loss=0.06864, over 4698.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2789, pruned_loss=0.0521, over 932395.14 frames. ], batch size: 13, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:29:22,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=156341.33333333334, ans=0.1 +2024-07-28 12:29:23,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=156341.33333333334, ans=0.2 +2024-07-28 12:29:24,569 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.95 vs. limit=12.0 +2024-07-28 12:29:29,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=156354.66666666666, ans=0.0 +2024-07-28 12:29:31,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=156354.66666666666, ans=0.0 +2024-07-28 12:29:32,405 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.98 vs. limit=15.0 +2024-07-28 12:29:35,339 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.785e+01 5.583e+01 6.047e+01 7.018e+01 9.420e+01, threshold=1.209e+02, percent-clipped=0.0 +2024-07-28 12:29:46,703 INFO [train.py:1114] (3/4) Epoch 12, batch 4850, loss[loss=0.2171, simple_loss=0.3077, pruned_loss=0.06329, over 4738.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2796, pruned_loss=0.05215, over 932078.37 frames. ], batch size: 14, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:29:53,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=156408.0, ans=0.125 +2024-07-28 12:30:04,895 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.86 vs. limit=15.0 +2024-07-28 12:30:05,431 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-07-28 12:30:05,954 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:30:25,571 INFO [train.py:1114] (3/4) Epoch 12, batch 4900, loss[loss=0.1863, simple_loss=0.272, pruned_loss=0.05027, over 4764.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.28, pruned_loss=0.05241, over 934144.80 frames. ], batch size: 13, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:30:29,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=156461.33333333334, ans=0.0 +2024-07-28 12:30:32,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=156474.66666666666, ans=0.04949747468305833 +2024-07-28 12:30:41,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=156488.0, ans=0.0 +2024-07-28 12:30:45,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=156488.0, ans=0.125 +2024-07-28 12:30:48,384 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.179e+01 5.700e+01 6.377e+01 7.192e+01 1.081e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 12:30:59,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=156528.0, ans=0.125 +2024-07-28 12:30:59,636 INFO [train.py:1114] (3/4) Epoch 12, batch 4950, loss[loss=0.2895, simple_loss=0.3496, pruned_loss=0.1147, over 3414.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2813, pruned_loss=0.05299, over 931691.21 frames. ], batch size: 35, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:31:08,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=156541.33333333334, ans=0.2 +2024-07-28 12:31:13,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=156554.66666666666, ans=0.125 +2024-07-28 12:31:19,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=156568.0, ans=0.125 +2024-07-28 12:31:21,334 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.78 vs. limit=15.0 +2024-07-28 12:31:32,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=156581.33333333334, ans=0.1 +2024-07-28 12:31:32,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=156594.66666666666, ans=0.0 +2024-07-28 12:31:33,224 INFO [train.py:1114] (3/4) Epoch 12, batch 5000, loss[loss=0.2167, simple_loss=0.3133, pruned_loss=0.06005, over 4656.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2805, pruned_loss=0.0525, over 935736.84 frames. ], batch size: 14, lr: 6.26e-03, grad_scale: 32.0 +2024-07-28 12:31:56,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156621.33333333334, ans=0.1 +2024-07-28 12:31:57,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=156621.33333333334, ans=0.2 +2024-07-28 12:31:58,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=156634.66666666666, ans=0.2 +2024-07-28 12:32:00,477 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.430e+01 5.699e+01 6.190e+01 6.580e+01 9.599e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 12:32:08,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=156648.0, ans=0.125 +2024-07-28 12:32:11,982 INFO [train.py:1114] (3/4) Epoch 12, batch 5050, loss[loss=0.1645, simple_loss=0.2505, pruned_loss=0.03929, over 4847.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2795, pruned_loss=0.05195, over 938062.92 frames. ], batch size: 12, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:32:14,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=156661.33333333334, ans=0.0 +2024-07-28 12:32:29,483 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.67 vs. limit=22.5 +2024-07-28 12:32:29,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=156688.0, ans=0.2 +2024-07-28 12:32:34,215 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.29 vs. limit=15.0 +2024-07-28 12:32:48,324 INFO [train.py:1114] (3/4) Epoch 12, batch 5100, loss[loss=0.1763, simple_loss=0.2639, pruned_loss=0.04432, over 4777.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2801, pruned_loss=0.05273, over 935583.36 frames. ], batch size: 12, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:32:49,431 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.23 vs. limit=6.0 +2024-07-28 12:32:50,110 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=17.79 vs. limit=22.5 +2024-07-28 12:32:50,655 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.83 vs. limit=15.0 +2024-07-28 12:32:59,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=156741.33333333334, ans=0.1 +2024-07-28 12:33:09,635 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.98 vs. limit=15.0 +2024-07-28 12:33:11,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=156754.66666666666, ans=0.0 +2024-07-28 12:33:19,268 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.805e+01 5.671e+01 6.012e+01 6.981e+01 1.009e+02, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 12:33:22,898 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.07 vs. limit=15.0 +2024-07-28 12:33:32,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=156781.33333333334, ans=0.0 +2024-07-28 12:33:36,871 INFO [train.py:1114] (3/4) Epoch 12, batch 5150, loss[loss=0.2094, simple_loss=0.2904, pruned_loss=0.06423, over 4843.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2816, pruned_loss=0.0535, over 936553.05 frames. ], batch size: 16, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:33:37,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=156794.66666666666, ans=0.2 +2024-07-28 12:33:51,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=156821.33333333334, ans=0.2 +2024-07-28 12:34:04,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=156834.66666666666, ans=0.125 +2024-07-28 12:34:12,709 INFO [train.py:1114] (3/4) Epoch 12, batch 5200, loss[loss=0.1781, simple_loss=0.273, pruned_loss=0.04162, over 4665.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2811, pruned_loss=0.05299, over 936685.62 frames. ], batch size: 14, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:34:13,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=156861.33333333334, ans=0.125 +2024-07-28 12:34:19,687 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:34:25,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=156874.66666666666, ans=0.025 +2024-07-28 12:34:26,171 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.11 vs. limit=6.0 +2024-07-28 12:34:28,101 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.08 vs. limit=6.0 +2024-07-28 12:34:30,607 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:34:39,524 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:34:39,944 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.716e+01 5.675e+01 6.398e+01 7.446e+01 1.094e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 12:34:40,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=156901.33333333334, ans=0.025 +2024-07-28 12:34:40,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=156901.33333333334, ans=0.125 +2024-07-28 12:34:51,272 INFO [train.py:1114] (3/4) Epoch 12, batch 5250, loss[loss=0.1457, simple_loss=0.2307, pruned_loss=0.03036, over 4898.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2799, pruned_loss=0.0524, over 935928.22 frames. ], batch size: 13, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:35:02,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=156941.33333333334, ans=0.1 +2024-07-28 12:35:07,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=156954.66666666666, ans=0.0 +2024-07-28 12:35:12,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=156968.0, ans=0.125 +2024-07-28 12:35:21,299 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.39 vs. limit=12.0 +2024-07-28 12:35:24,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=156994.66666666666, ans=0.125 +2024-07-28 12:35:24,843 INFO [train.py:1114] (3/4) Epoch 12, batch 5300, loss[loss=0.2251, simple_loss=0.3108, pruned_loss=0.0697, over 4651.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2798, pruned_loss=0.05276, over 934351.64 frames. ], batch size: 16, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:35:36,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=157008.0, ans=0.5 +2024-07-28 12:35:39,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.22 vs. limit=15.0 +2024-07-28 12:35:39,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=157021.33333333334, ans=0.125 +2024-07-28 12:35:43,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.71 vs. limit=15.0 +2024-07-28 12:35:46,889 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.557e+01 5.552e+01 6.428e+01 7.649e+01 1.141e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 12:35:47,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157034.66666666666, ans=0.1 +2024-07-28 12:35:49,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=157034.66666666666, ans=0.125 +2024-07-28 12:35:58,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=157048.0, ans=0.2 +2024-07-28 12:36:00,590 INFO [train.py:1114] (3/4) Epoch 12, batch 5350, loss[loss=0.1551, simple_loss=0.2343, pruned_loss=0.03795, over 4560.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2801, pruned_loss=0.0526, over 936338.05 frames. ], batch size: 10, lr: 6.25e-03, grad_scale: 32.0 +2024-07-28 12:36:08,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=157074.66666666666, ans=0.125 +2024-07-28 12:36:12,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=157074.66666666666, ans=0.04949747468305833 +2024-07-28 12:36:13,175 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.71 vs. limit=15.0 +2024-07-28 12:36:14,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=157074.66666666666, ans=0.0 +2024-07-28 12:36:15,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=157088.0, ans=0.125 +2024-07-28 12:36:19,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.90 vs. limit=15.0 +2024-07-28 12:36:23,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=157101.33333333334, ans=0.125 +2024-07-28 12:36:31,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=157114.66666666666, ans=0.125 +2024-07-28 12:36:34,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=157114.66666666666, ans=0.2 +2024-07-28 12:36:36,744 INFO [train.py:1114] (3/4) Epoch 12, batch 5400, loss[loss=0.1807, simple_loss=0.2767, pruned_loss=0.04234, over 4235.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2811, pruned_loss=0.05322, over 930283.77 frames. ], batch size: 25, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:36:38,425 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.69 vs. limit=22.5 +2024-07-28 12:36:58,604 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.622e+01 6.108e+01 6.944e+01 7.812e+01 1.147e+02, threshold=1.389e+02, percent-clipped=0.0 +2024-07-28 12:37:09,779 INFO [train.py:1114] (3/4) Epoch 12, batch 5450, loss[loss=0.1752, simple_loss=0.256, pruned_loss=0.04717, over 4701.00 frames. ], tot_loss[loss=0.192, simple_loss=0.279, pruned_loss=0.05247, over 933379.49 frames. ], batch size: 11, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:37:18,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=157208.0, ans=0.125 +2024-07-28 12:37:21,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.13 vs. limit=12.0 +2024-07-28 12:37:24,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=157221.33333333334, ans=0.1 +2024-07-28 12:37:26,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=157221.33333333334, ans=0.2 +2024-07-28 12:37:26,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=157221.33333333334, ans=0.04949747468305833 +2024-07-28 12:37:38,526 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.21 vs. limit=15.0 +2024-07-28 12:37:39,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157248.0, ans=0.1 +2024-07-28 12:37:43,610 INFO [train.py:1114] (3/4) Epoch 12, batch 5500, loss[loss=0.2115, simple_loss=0.2972, pruned_loss=0.06288, over 4207.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2793, pruned_loss=0.05262, over 931484.22 frames. ], batch size: 25, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:37:44,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=157261.33333333334, ans=0.025 +2024-07-28 12:37:52,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=157274.66666666666, ans=0.025 +2024-07-28 12:38:03,234 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.72 vs. limit=15.0 +2024-07-28 12:38:05,255 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.35 vs. limit=12.0 +2024-07-28 12:38:07,911 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.744e+01 6.392e+01 7.523e+01 1.431e+02, threshold=1.278e+02, percent-clipped=1.0 +2024-07-28 12:38:08,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=157301.33333333334, ans=0.1 +2024-07-28 12:38:12,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=157314.66666666666, ans=0.0 +2024-07-28 12:38:14,188 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:38:16,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=157314.66666666666, ans=0.125 +2024-07-28 12:38:17,564 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.15 vs. limit=22.5 +2024-07-28 12:38:19,222 INFO [train.py:1114] (3/4) Epoch 12, batch 5550, loss[loss=0.1652, simple_loss=0.247, pruned_loss=0.04167, over 4700.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2787, pruned_loss=0.05247, over 933738.40 frames. ], batch size: 12, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:38:35,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=157354.66666666666, ans=0.0 +2024-07-28 12:38:36,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=157354.66666666666, ans=0.0 +2024-07-28 12:38:46,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=157381.33333333334, ans=0.0 +2024-07-28 12:38:46,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=157381.33333333334, ans=0.125 +2024-07-28 12:38:47,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=157381.33333333334, ans=0.0 +2024-07-28 12:38:51,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=157381.33333333334, ans=0.0 +2024-07-28 12:38:52,991 INFO [train.py:1114] (3/4) Epoch 12, batch 5600, loss[loss=0.2059, simple_loss=0.2993, pruned_loss=0.05629, over 4742.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2799, pruned_loss=0.05255, over 935473.84 frames. ], batch size: 14, lr: 6.24e-03, grad_scale: 64.0 +2024-07-28 12:38:55,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=157394.66666666666, ans=0.125 +2024-07-28 12:39:02,248 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.83 vs. limit=10.0 +2024-07-28 12:39:09,602 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.60 vs. limit=15.0 +2024-07-28 12:39:13,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=157434.66666666666, ans=0.0 +2024-07-28 12:39:15,770 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.811e+01 5.521e+01 6.342e+01 7.244e+01 1.033e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 12:39:26,385 INFO [train.py:1114] (3/4) Epoch 12, batch 5650, loss[loss=0.2446, simple_loss=0.3228, pruned_loss=0.08324, over 4535.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.279, pruned_loss=0.05203, over 937759.19 frames. ], batch size: 21, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:39:31,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=157461.33333333334, ans=0.0 +2024-07-28 12:39:36,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=157474.66666666666, ans=0.125 +2024-07-28 12:39:39,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=157488.0, ans=0.0 +2024-07-28 12:39:42,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=157488.0, ans=0.0 +2024-07-28 12:39:44,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=157488.0, ans=0.0 +2024-07-28 12:39:45,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=157488.0, ans=0.125 +2024-07-28 12:39:47,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.76 vs. limit=15.0 +2024-07-28 12:39:59,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=157514.66666666666, ans=0.025 +2024-07-28 12:40:01,553 INFO [train.py:1114] (3/4) Epoch 12, batch 5700, loss[loss=0.1795, simple_loss=0.2735, pruned_loss=0.04273, over 4704.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2793, pruned_loss=0.05221, over 938703.06 frames. ], batch size: 13, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:40:02,550 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.06 vs. limit=22.5 +2024-07-28 12:40:06,335 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.89 vs. limit=15.0 +2024-07-28 12:40:11,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=157541.33333333334, ans=0.025 +2024-07-28 12:40:13,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=157541.33333333334, ans=0.125 +2024-07-28 12:40:26,155 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.907e+01 5.849e+01 6.761e+01 7.551e+01 1.061e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-28 12:40:28,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=157568.0, ans=0.0 +2024-07-28 12:40:36,679 INFO [train.py:1114] (3/4) Epoch 12, batch 5750, loss[loss=0.2545, simple_loss=0.3392, pruned_loss=0.08484, over 4706.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2796, pruned_loss=0.0523, over 938476.65 frames. ], batch size: 19, lr: 6.24e-03, grad_scale: 32.0 +2024-07-28 12:40:41,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=157594.66666666666, ans=0.125 +2024-07-28 12:40:47,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=157608.0, ans=0.0 +2024-07-28 12:41:01,944 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.00 vs. limit=15.0 +2024-07-28 12:41:07,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=157634.66666666666, ans=0.125 +2024-07-28 12:41:07,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=157634.66666666666, ans=0.125 +2024-07-28 12:41:11,500 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=18.70 vs. limit=22.5 +2024-07-28 12:41:16,560 INFO [train.py:1114] (3/4) Epoch 12, batch 5800, loss[loss=0.2191, simple_loss=0.3036, pruned_loss=0.06724, over 4665.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2803, pruned_loss=0.05259, over 937408.32 frames. ], batch size: 19, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:41:17,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=157661.33333333334, ans=0.125 +2024-07-28 12:41:20,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=157661.33333333334, ans=15.0 +2024-07-28 12:41:36,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157688.0, ans=0.1 +2024-07-28 12:41:43,100 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.866e+01 5.542e+01 6.072e+01 7.218e+01 1.008e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 12:41:59,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=157728.0, ans=0.025 +2024-07-28 12:41:59,705 INFO [train.py:1114] (3/4) Epoch 12, batch 5850, loss[loss=0.2288, simple_loss=0.32, pruned_loss=0.06876, over 4473.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2808, pruned_loss=0.05292, over 937984.59 frames. ], batch size: 21, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:42:16,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=157754.66666666666, ans=0.0 +2024-07-28 12:42:21,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=157768.0, ans=0.125 +2024-07-28 12:42:21,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=157768.0, ans=0.09899494936611666 +2024-07-28 12:42:24,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=157768.0, ans=0.0 +2024-07-28 12:42:27,429 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.18 vs. limit=12.0 +2024-07-28 12:42:33,081 INFO [train.py:1114] (3/4) Epoch 12, batch 5900, loss[loss=0.1993, simple_loss=0.2867, pruned_loss=0.05598, over 4685.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2804, pruned_loss=0.05299, over 938459.73 frames. ], batch size: 15, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:42:33,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=157794.66666666666, ans=0.125 +2024-07-28 12:42:43,051 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.40 vs. limit=15.0 +2024-07-28 12:42:48,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=157821.33333333334, ans=0.1 +2024-07-28 12:42:51,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=157821.33333333334, ans=0.125 +2024-07-28 12:42:52,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=157821.33333333334, ans=0.125 +2024-07-28 12:42:56,390 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 5.671e+01 6.327e+01 7.319e+01 1.125e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-28 12:43:04,207 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.83 vs. limit=22.5 +2024-07-28 12:43:05,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=157848.0, ans=0.125 +2024-07-28 12:43:07,207 INFO [train.py:1114] (3/4) Epoch 12, batch 5950, loss[loss=0.2291, simple_loss=0.312, pruned_loss=0.0731, over 4682.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.28, pruned_loss=0.05239, over 940579.02 frames. ], batch size: 15, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:43:11,095 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.57 vs. limit=6.0 +2024-07-28 12:43:12,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=157861.33333333334, ans=0.2 +2024-07-28 12:43:22,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=157888.0, ans=0.125 +2024-07-28 12:43:42,759 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.37 vs. limit=6.0 +2024-07-28 12:43:42,910 INFO [train.py:1114] (3/4) Epoch 12, batch 6000, loss[loss=0.2219, simple_loss=0.3054, pruned_loss=0.06922, over 4167.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2796, pruned_loss=0.05249, over 938024.88 frames. ], batch size: 25, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:43:42,910 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 12:43:50,383 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.5284, 4.3452, 3.7534, 4.2271], device='cuda:3') +2024-07-28 12:43:50,766 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.4684, 4.1006, 4.8284, 3.6080], device='cuda:3') +2024-07-28 12:43:54,399 INFO [train.py:1146] (3/4) Epoch 12, validation: loss=0.1672, simple_loss=0.2713, pruned_loss=0.03161, over 944034.00 frames. +2024-07-28 12:43:54,399 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 12:43:57,030 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=25.24 vs. limit=22.5 +2024-07-28 12:44:17,591 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.752e+01 5.694e+01 6.318e+01 7.255e+01 1.160e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 12:44:28,519 INFO [train.py:1114] (3/4) Epoch 12, batch 6050, loss[loss=0.1915, simple_loss=0.2783, pruned_loss=0.05236, over 4775.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2793, pruned_loss=0.0526, over 939002.48 frames. ], batch size: 12, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:44:31,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=157994.66666666666, ans=0.2 +2024-07-28 12:44:35,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=158008.0, ans=0.0 +2024-07-28 12:44:42,960 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.58 vs. limit=15.0 +2024-07-28 12:45:01,897 INFO [train.py:1114] (3/4) Epoch 12, batch 6100, loss[loss=0.226, simple_loss=0.3181, pruned_loss=0.06696, over 4677.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2788, pruned_loss=0.05239, over 938667.97 frames. ], batch size: 15, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:45:26,345 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.227e+01 5.720e+01 6.414e+01 7.144e+01 1.177e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 12:45:27,178 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:45:29,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=158114.66666666666, ans=0.025 +2024-07-28 12:45:35,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=158114.66666666666, ans=0.125 +2024-07-28 12:45:36,976 INFO [train.py:1114] (3/4) Epoch 12, batch 6150, loss[loss=0.2075, simple_loss=0.2997, pruned_loss=0.05762, over 3527.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2788, pruned_loss=0.05179, over 937278.87 frames. ], batch size: 35, lr: 6.23e-03, grad_scale: 32.0 +2024-07-28 12:45:43,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=158141.33333333334, ans=0.125 +2024-07-28 12:45:46,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=158141.33333333334, ans=0.0 +2024-07-28 12:45:59,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=158168.0, ans=0.09899494936611666 +2024-07-28 12:45:59,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=158168.0, ans=0.0 +2024-07-28 12:46:01,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=158168.0, ans=0.2 +2024-07-28 12:46:08,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=158181.33333333334, ans=0.125 +2024-07-28 12:46:08,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=158181.33333333334, ans=0.0 +2024-07-28 12:46:10,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=158194.66666666666, ans=0.125 +2024-07-28 12:46:11,262 INFO [train.py:1114] (3/4) Epoch 12, batch 6200, loss[loss=0.1834, simple_loss=0.27, pruned_loss=0.04838, over 4743.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2787, pruned_loss=0.05189, over 937008.06 frames. ], batch size: 14, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:46:13,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=158194.66666666666, ans=15.0 +2024-07-28 12:46:14,114 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:46:15,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=158194.66666666666, ans=0.1 +2024-07-28 12:46:17,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=158194.66666666666, ans=0.0 +2024-07-28 12:46:20,242 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:46:27,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=158221.33333333334, ans=0.0 +2024-07-28 12:46:34,428 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.522e+01 5.559e+01 6.111e+01 6.861e+01 1.032e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 12:46:37,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=158234.66666666666, ans=0.2 +2024-07-28 12:46:47,155 INFO [train.py:1114] (3/4) Epoch 12, batch 6250, loss[loss=0.205, simple_loss=0.3011, pruned_loss=0.05448, over 4800.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2792, pruned_loss=0.05219, over 933181.44 frames. ], batch size: 14, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:46:47,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=158261.33333333334, ans=0.0 +2024-07-28 12:47:04,138 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.18 vs. limit=15.0 +2024-07-28 12:47:10,280 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.54 vs. limit=15.0 +2024-07-28 12:47:22,586 INFO [train.py:1114] (3/4) Epoch 12, batch 6300, loss[loss=0.1288, simple_loss=0.2196, pruned_loss=0.019, over 4532.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2799, pruned_loss=0.05265, over 929713.05 frames. ], batch size: 10, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:47:23,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=158328.0, ans=0.125 +2024-07-28 12:47:25,506 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.46 vs. limit=15.0 +2024-07-28 12:47:37,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=158354.66666666666, ans=0.0 +2024-07-28 12:47:37,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=158354.66666666666, ans=0.0 +2024-07-28 12:47:44,770 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.429e+01 5.769e+01 6.275e+01 7.297e+01 9.885e+01, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 12:47:48,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=158381.33333333334, ans=0.0 +2024-07-28 12:47:51,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=158381.33333333334, ans=0.0 +2024-07-28 12:47:52,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=158381.33333333334, ans=0.0 +2024-07-28 12:47:55,500 INFO [train.py:1114] (3/4) Epoch 12, batch 6350, loss[loss=0.1727, simple_loss=0.2604, pruned_loss=0.0425, over 4488.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2797, pruned_loss=0.05214, over 933717.07 frames. ], batch size: 21, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:48:00,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=158394.66666666666, ans=0.125 +2024-07-28 12:48:03,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=158408.0, ans=0.0 +2024-07-28 12:48:08,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158421.33333333334, ans=0.1 +2024-07-28 12:48:12,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=158421.33333333334, ans=0.0 +2024-07-28 12:48:29,207 INFO [train.py:1114] (3/4) Epoch 12, batch 6400, loss[loss=0.1881, simple_loss=0.2846, pruned_loss=0.04575, over 4632.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2787, pruned_loss=0.05195, over 934874.69 frames. ], batch size: 13, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:48:33,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=158461.33333333334, ans=0.125 +2024-07-28 12:48:35,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158474.66666666666, ans=0.1 +2024-07-28 12:48:42,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=158474.66666666666, ans=0.05 +2024-07-28 12:48:51,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=158501.33333333334, ans=0.025 +2024-07-28 12:48:53,532 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.709e+01 5.709e+01 6.303e+01 7.389e+01 1.106e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 12:49:04,232 INFO [train.py:1114] (3/4) Epoch 12, batch 6450, loss[loss=0.2057, simple_loss=0.2855, pruned_loss=0.06295, over 4433.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.279, pruned_loss=0.05195, over 938338.77 frames. ], batch size: 21, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:49:05,988 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.79 vs. limit=15.0 +2024-07-28 12:49:20,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=158554.66666666666, ans=0.125 +2024-07-28 12:49:24,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=158568.0, ans=10.0 +2024-07-28 12:49:29,173 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.66 vs. limit=8.0 +2024-07-28 12:49:36,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=158594.66666666666, ans=0.125 +2024-07-28 12:49:37,228 INFO [train.py:1114] (3/4) Epoch 12, batch 6500, loss[loss=0.2526, simple_loss=0.3168, pruned_loss=0.09421, over 3610.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2776, pruned_loss=0.05137, over 940004.38 frames. ], batch size: 35, lr: 6.22e-03, grad_scale: 32.0 +2024-07-28 12:49:41,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=158594.66666666666, ans=10.0 +2024-07-28 12:49:51,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=158621.33333333334, ans=0.2 +2024-07-28 12:49:54,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=158621.33333333334, ans=0.09899494936611666 +2024-07-28 12:49:59,307 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.679e+01 6.205e+01 7.346e+01 1.316e+02, threshold=1.241e+02, percent-clipped=1.0 +2024-07-28 12:50:03,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158648.0, ans=0.1 +2024-07-28 12:50:06,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=158648.0, ans=0.125 +2024-07-28 12:50:10,239 INFO [train.py:1114] (3/4) Epoch 12, batch 6550, loss[loss=0.1749, simple_loss=0.2498, pruned_loss=0.04998, over 4792.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2771, pruned_loss=0.05126, over 942998.77 frames. ], batch size: 11, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:50:12,823 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.72 vs. limit=15.0 +2024-07-28 12:50:17,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=158674.66666666666, ans=0.125 +2024-07-28 12:50:23,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=158688.0, ans=0.125 +2024-07-28 12:50:31,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158701.33333333334, ans=0.1 +2024-07-28 12:50:37,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=158714.66666666666, ans=0.0 +2024-07-28 12:50:42,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=158714.66666666666, ans=0.025 +2024-07-28 12:50:43,174 INFO [train.py:1114] (3/4) Epoch 12, batch 6600, loss[loss=0.2296, simple_loss=0.3101, pruned_loss=0.07455, over 4936.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2771, pruned_loss=0.0512, over 944996.14 frames. ], batch size: 14, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:50:56,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158741.33333333334, ans=0.1 +2024-07-28 12:51:02,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=158754.66666666666, ans=0.125 +2024-07-28 12:51:02,991 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.00 vs. limit=15.0 +2024-07-28 12:51:07,916 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.596e+01 5.716e+01 6.452e+01 7.100e+01 1.307e+02, threshold=1.290e+02, percent-clipped=2.0 +2024-07-28 12:51:10,437 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=12.43 vs. limit=15.0 +2024-07-28 12:51:12,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=158781.33333333334, ans=0.1 +2024-07-28 12:51:23,535 INFO [train.py:1114] (3/4) Epoch 12, batch 6650, loss[loss=0.1768, simple_loss=0.2806, pruned_loss=0.03647, over 4583.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2764, pruned_loss=0.05082, over 943669.29 frames. ], batch size: 17, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:51:50,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=158821.33333333334, ans=0.0 +2024-07-28 12:52:04,750 INFO [train.py:1114] (3/4) Epoch 12, batch 6700, loss[loss=0.2043, simple_loss=0.2968, pruned_loss=0.05587, over 4697.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2772, pruned_loss=0.05114, over 942504.54 frames. ], batch size: 19, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:52:06,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=158861.33333333334, ans=0.125 +2024-07-28 12:52:31,270 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.714e+01 5.714e+01 6.445e+01 7.279e+01 1.274e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 12:52:33,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=158901.33333333334, ans=0.2 +2024-07-28 12:52:42,416 INFO [train.py:1114] (3/4) Epoch 12, batch 6750, loss[loss=0.1667, simple_loss=0.2571, pruned_loss=0.03817, over 4175.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.278, pruned_loss=0.05192, over 940726.07 frames. ], batch size: 25, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:52:56,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=158954.66666666666, ans=0.125 +2024-07-28 12:53:00,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=158954.66666666666, ans=0.0 +2024-07-28 12:53:00,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=158954.66666666666, ans=0.125 +2024-07-28 12:53:03,814 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.00 vs. limit=15.0 +2024-07-28 12:53:10,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=158981.33333333334, ans=0.125 +2024-07-28 12:53:11,262 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.20 vs. limit=15.0 +2024-07-28 12:53:16,210 INFO [train.py:1114] (3/4) Epoch 12, batch 6800, loss[loss=0.18, simple_loss=0.2834, pruned_loss=0.03835, over 4633.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2779, pruned_loss=0.05159, over 939722.78 frames. ], batch size: 13, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:53:20,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=158994.66666666666, ans=0.04949747468305833 +2024-07-28 12:53:22,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=159008.0, ans=0.125 +2024-07-28 12:53:23,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=159008.0, ans=0.125 +2024-07-28 12:53:24,489 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.18 vs. limit=15.0 +2024-07-28 12:53:24,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=159008.0, ans=0.125 +2024-07-28 12:53:25,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=159008.0, ans=0.125 +2024-07-28 12:53:37,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=159034.66666666666, ans=0.125 +2024-07-28 12:53:38,553 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.462e+01 5.632e+01 6.105e+01 6.995e+01 1.094e+02, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 12:53:39,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=159034.66666666666, ans=0.125 +2024-07-28 12:53:39,600 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.51 vs. limit=12.0 +2024-07-28 12:53:49,462 INFO [train.py:1114] (3/4) Epoch 12, batch 6850, loss[loss=0.1775, simple_loss=0.2747, pruned_loss=0.04008, over 4693.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2772, pruned_loss=0.05098, over 941269.67 frames. ], batch size: 13, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:53:52,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159061.33333333334, ans=0.1 +2024-07-28 12:53:54,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159061.33333333334, ans=0.1 +2024-07-28 12:54:02,435 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.15 vs. limit=10.0 +2024-07-28 12:54:10,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=159101.33333333334, ans=0.125 +2024-07-28 12:54:10,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=159101.33333333334, ans=0.125 +2024-07-28 12:54:18,349 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=14.56 vs. limit=15.0 +2024-07-28 12:54:23,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=159128.0, ans=10.0 +2024-07-28 12:54:24,192 INFO [train.py:1114] (3/4) Epoch 12, batch 6900, loss[loss=0.1541, simple_loss=0.2405, pruned_loss=0.03384, over 4964.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2776, pruned_loss=0.05122, over 943158.71 frames. ], batch size: 13, lr: 6.21e-03, grad_scale: 32.0 +2024-07-28 12:54:25,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=159128.0, ans=0.2 +2024-07-28 12:54:27,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=159128.0, ans=0.0 +2024-07-28 12:54:28,707 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.75 vs. limit=10.0 +2024-07-28 12:54:40,379 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.76 vs. limit=22.5 +2024-07-28 12:54:40,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=159154.66666666666, ans=0.0 +2024-07-28 12:54:46,740 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.739e+01 5.528e+01 6.156e+01 7.028e+01 9.720e+01, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 12:54:55,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=159181.33333333334, ans=0.0 +2024-07-28 12:54:57,576 INFO [train.py:1114] (3/4) Epoch 12, batch 6950, loss[loss=0.1586, simple_loss=0.2492, pruned_loss=0.03399, over 4531.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2782, pruned_loss=0.05148, over 940428.88 frames. ], batch size: 10, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:55:07,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=159208.0, ans=0.1 +2024-07-28 12:55:30,916 INFO [train.py:1114] (3/4) Epoch 12, batch 7000, loss[loss=0.1914, simple_loss=0.2916, pruned_loss=0.04558, over 4606.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2774, pruned_loss=0.05144, over 938671.61 frames. ], batch size: 17, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:55:32,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=159261.33333333334, ans=0.125 +2024-07-28 12:55:38,448 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.56 vs. limit=12.0 +2024-07-28 12:55:39,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=159274.66666666666, ans=0.2 +2024-07-28 12:55:39,559 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:55:40,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=159274.66666666666, ans=0.125 +2024-07-28 12:55:51,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=159301.33333333334, ans=0.015 +2024-07-28 12:55:52,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=159301.33333333334, ans=0.025 +2024-07-28 12:55:53,273 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.935e+01 5.641e+01 6.482e+01 7.445e+01 1.063e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 12:56:07,637 INFO [train.py:1114] (3/4) Epoch 12, batch 7050, loss[loss=0.1862, simple_loss=0.2825, pruned_loss=0.04497, over 4738.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2765, pruned_loss=0.05073, over 941863.58 frames. ], batch size: 19, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:56:12,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=159328.0, ans=0.125 +2024-07-28 12:56:22,173 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.98 vs. limit=15.0 +2024-07-28 12:56:24,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=159354.66666666666, ans=0.125 +2024-07-28 12:56:38,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=159381.33333333334, ans=0.125 +2024-07-28 12:56:40,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=159381.33333333334, ans=0.0 +2024-07-28 12:56:41,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=159381.33333333334, ans=0.025 +2024-07-28 12:56:42,913 INFO [train.py:1114] (3/4) Epoch 12, batch 7100, loss[loss=0.2097, simple_loss=0.2909, pruned_loss=0.06426, over 4807.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2774, pruned_loss=0.05154, over 936364.95 frames. ], batch size: 15, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:56:47,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=159394.66666666666, ans=0.0 +2024-07-28 12:56:56,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=159421.33333333334, ans=0.0 +2024-07-28 12:56:57,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=159421.33333333334, ans=0.125 +2024-07-28 12:57:03,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=159434.66666666666, ans=0.125 +2024-07-28 12:57:06,527 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.638e+01 6.257e+01 7.591e+01 1.588e+02, threshold=1.251e+02, percent-clipped=2.0 +2024-07-28 12:57:11,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159448.0, ans=0.1 +2024-07-28 12:57:13,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=159448.0, ans=0.0 +2024-07-28 12:57:16,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=159461.33333333334, ans=0.025 +2024-07-28 12:57:16,937 INFO [train.py:1114] (3/4) Epoch 12, batch 7150, loss[loss=0.1923, simple_loss=0.269, pruned_loss=0.05782, over 4676.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2747, pruned_loss=0.0504, over 937220.10 frames. ], batch size: 22, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:57:17,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=159461.33333333334, ans=0.125 +2024-07-28 12:57:23,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=159474.66666666666, ans=0.1 +2024-07-28 12:57:23,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=159474.66666666666, ans=10.0 +2024-07-28 12:57:51,492 INFO [train.py:1114] (3/4) Epoch 12, batch 7200, loss[loss=0.2309, simple_loss=0.3166, pruned_loss=0.0726, over 4801.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2756, pruned_loss=0.05106, over 937356.22 frames. ], batch size: 15, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:58:00,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=159541.33333333334, ans=0.125 +2024-07-28 12:58:13,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=159554.66666666666, ans=6.0 +2024-07-28 12:58:18,798 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.659e+01 5.813e+01 6.361e+01 7.395e+01 9.715e+01, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 12:58:20,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=159568.0, ans=0.125 +2024-07-28 12:58:23,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=159581.33333333334, ans=0.2 +2024-07-28 12:58:26,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=159581.33333333334, ans=0.2 +2024-07-28 12:58:29,559 INFO [train.py:1114] (3/4) Epoch 12, batch 7250, loss[loss=0.1809, simple_loss=0.2555, pruned_loss=0.05312, over 4834.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2757, pruned_loss=0.05096, over 939166.74 frames. ], batch size: 12, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:58:32,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=159594.66666666666, ans=0.2 +2024-07-28 12:58:44,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=159621.33333333334, ans=0.0 +2024-07-28 12:58:56,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=159648.0, ans=0.125 +2024-07-28 12:59:00,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=159648.0, ans=0.125 +2024-07-28 12:59:02,436 INFO [train.py:1114] (3/4) Epoch 12, batch 7300, loss[loss=0.1679, simple_loss=0.2621, pruned_loss=0.03682, over 4856.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2756, pruned_loss=0.0504, over 939728.38 frames. ], batch size: 12, lr: 6.20e-03, grad_scale: 32.0 +2024-07-28 12:59:08,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=159674.66666666666, ans=0.025 +2024-07-28 12:59:10,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=159674.66666666666, ans=0.125 +2024-07-28 12:59:18,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=159688.0, ans=0.125 +2024-07-28 12:59:27,309 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.673e+01 5.393e+01 5.789e+01 6.409e+01 1.096e+02, threshold=1.158e+02, percent-clipped=0.0 +2024-07-28 12:59:29,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=159701.33333333334, ans=0.125 +2024-07-28 12:59:37,712 INFO [train.py:1114] (3/4) Epoch 12, batch 7350, loss[loss=0.1884, simple_loss=0.2818, pruned_loss=0.04749, over 4640.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2775, pruned_loss=0.051, over 939142.67 frames. ], batch size: 12, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 12:59:37,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=159728.0, ans=0.125 +2024-07-28 12:59:40,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=159728.0, ans=0.0 +2024-07-28 12:59:53,420 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 12:59:55,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=159741.33333333334, ans=0.125 +2024-07-28 12:59:55,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=159741.33333333334, ans=0.125 +2024-07-28 12:59:56,034 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.32 vs. limit=15.0 +2024-07-28 12:59:56,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=159754.66666666666, ans=0.125 +2024-07-28 13:00:17,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=159781.33333333334, ans=0.125 +2024-07-28 13:00:23,180 INFO [train.py:1114] (3/4) Epoch 12, batch 7400, loss[loss=0.1774, simple_loss=0.2742, pruned_loss=0.04028, over 4696.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2773, pruned_loss=0.05055, over 940757.19 frames. ], batch size: 13, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:00:25,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=159794.66666666666, ans=0.125 +2024-07-28 13:00:48,511 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.45 vs. limit=15.0 +2024-07-28 13:00:51,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=159821.33333333334, ans=0.1 +2024-07-28 13:00:55,281 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.574e+01 5.910e+01 6.477e+01 7.704e+01 1.281e+02, threshold=1.295e+02, percent-clipped=1.0 +2024-07-28 13:00:56,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=159834.66666666666, ans=0.0 +2024-07-28 13:00:56,649 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:01:03,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=159848.0, ans=0.125 +2024-07-28 13:01:05,393 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.32 vs. limit=15.0 +2024-07-28 13:01:05,633 INFO [train.py:1114] (3/4) Epoch 12, batch 7450, loss[loss=0.184, simple_loss=0.2688, pruned_loss=0.04956, over 4604.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2767, pruned_loss=0.05058, over 938405.85 frames. ], batch size: 11, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:01:12,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=159874.66666666666, ans=0.125 +2024-07-28 13:01:19,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=159888.0, ans=0.95 +2024-07-28 13:01:20,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=159888.0, ans=0.0 +2024-07-28 13:01:27,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=159901.33333333334, ans=0.125 +2024-07-28 13:01:32,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=159914.66666666666, ans=0.025 +2024-07-28 13:01:38,268 INFO [train.py:1114] (3/4) Epoch 12, batch 7500, loss[loss=0.2415, simple_loss=0.3226, pruned_loss=0.08019, over 3241.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2778, pruned_loss=0.05129, over 936670.90 frames. ], batch size: 35, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:02:09,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=159941.33333333334, ans=0.0 +2024-07-28 13:02:13,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=159954.66666666666, ans=0.125 +2024-07-28 13:02:16,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=159954.66666666666, ans=0.125 +2024-07-28 13:02:17,779 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.95 vs. limit=15.0 +2024-07-28 13:05:14,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=159968.0, ans=0.2 +2024-07-28 13:05:16,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=159968.0, ans=0.05 +2024-07-28 13:05:16,501 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.752e+01 6.223e+01 6.904e+01 1.181e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 13:05:21,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=159981.33333333334, ans=0.125 +2024-07-28 13:05:22,093 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.11 vs. limit=22.5 +2024-07-28 13:05:26,949 INFO [train.py:1114] (3/4) Epoch 12, batch 7550, loss[loss=0.1882, simple_loss=0.2798, pruned_loss=0.04827, over 4613.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2795, pruned_loss=0.05176, over 936251.51 frames. ], batch size: 17, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:05:36,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=159994.66666666666, ans=0.125 +2024-07-28 13:05:38,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=160008.0, ans=0.125 +2024-07-28 13:05:51,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=160034.66666666666, ans=0.2 +2024-07-28 13:06:04,266 INFO [train.py:1114] (3/4) Epoch 12, batch 7600, loss[loss=0.1907, simple_loss=0.2892, pruned_loss=0.0461, over 4805.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2786, pruned_loss=0.0511, over 937738.88 frames. ], batch size: 14, lr: 6.19e-03, grad_scale: 32.0 +2024-07-28 13:06:04,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=160061.33333333334, ans=0.0 +2024-07-28 13:06:08,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=160061.33333333334, ans=0.125 +2024-07-28 13:06:12,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=160074.66666666666, ans=0.125 +2024-07-28 13:06:13,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=160074.66666666666, ans=0.2 +2024-07-28 13:06:22,291 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.74 vs. limit=15.0 +2024-07-28 13:06:26,387 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.534e+01 5.580e+01 6.028e+01 7.060e+01 1.012e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 13:06:36,732 INFO [train.py:1114] (3/4) Epoch 12, batch 7650, loss[loss=0.1526, simple_loss=0.2338, pruned_loss=0.03576, over 4949.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2781, pruned_loss=0.05112, over 937008.98 frames. ], batch size: 12, lr: 6.19e-03, grad_scale: 64.0 +2024-07-28 13:06:48,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=160141.33333333334, ans=0.125 +2024-07-28 13:06:51,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=160154.66666666666, ans=0.125 +2024-07-28 13:06:52,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=160154.66666666666, ans=0.025 +2024-07-28 13:07:07,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=160181.33333333334, ans=0.0 +2024-07-28 13:07:10,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=160194.66666666666, ans=0.025 +2024-07-28 13:07:10,993 INFO [train.py:1114] (3/4) Epoch 12, batch 7700, loss[loss=0.1916, simple_loss=0.2762, pruned_loss=0.0535, over 4689.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2785, pruned_loss=0.05125, over 935205.28 frames. ], batch size: 13, lr: 6.18e-03, grad_scale: 64.0 +2024-07-28 13:07:14,406 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.87 vs. limit=15.0 +2024-07-28 13:07:18,943 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.35 vs. limit=15.0 +2024-07-28 13:07:23,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=160221.33333333334, ans=0.1 +2024-07-28 13:07:32,959 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.508e+01 5.585e+01 6.116e+01 6.946e+01 9.555e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 13:07:43,367 INFO [train.py:1114] (3/4) Epoch 12, batch 7750, loss[loss=0.1801, simple_loss=0.2705, pruned_loss=0.04483, over 4922.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.28, pruned_loss=0.05219, over 936202.14 frames. ], batch size: 14, lr: 6.18e-03, grad_scale: 64.0 +2024-07-28 13:07:48,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=160261.33333333334, ans=0.09899494936611666 +2024-07-28 13:07:56,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=160288.0, ans=0.025 +2024-07-28 13:07:56,285 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.56 vs. limit=6.0 +2024-07-28 13:08:03,985 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.94 vs. limit=22.5 +2024-07-28 13:08:05,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=160301.33333333334, ans=0.125 +2024-07-28 13:08:16,041 INFO [train.py:1114] (3/4) Epoch 12, batch 7800, loss[loss=0.19, simple_loss=0.2978, pruned_loss=0.04112, over 4669.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2802, pruned_loss=0.05168, over 937887.42 frames. ], batch size: 14, lr: 6.18e-03, grad_scale: 64.0 +2024-07-28 13:08:38,406 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.749e+01 5.545e+01 6.012e+01 6.981e+01 9.442e+01, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 13:08:42,890 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.42 vs. limit=15.0 +2024-07-28 13:08:47,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160381.33333333334, ans=0.1 +2024-07-28 13:08:48,346 INFO [train.py:1114] (3/4) Epoch 12, batch 7850, loss[loss=0.1437, simple_loss=0.2238, pruned_loss=0.03176, over 4568.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2802, pruned_loss=0.05185, over 936295.08 frames. ], batch size: 10, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:09:00,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=160408.0, ans=0.0 +2024-07-28 13:09:01,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=160421.33333333334, ans=0.125 +2024-07-28 13:09:02,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_na.min_abs, batch_count=160421.33333333334, ans=0.02 +2024-07-28 13:09:20,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=160448.0, ans=0.125 +2024-07-28 13:09:21,155 INFO [train.py:1114] (3/4) Epoch 12, batch 7900, loss[loss=0.2159, simple_loss=0.2992, pruned_loss=0.06636, over 4878.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2817, pruned_loss=0.05279, over 934274.12 frames. ], batch size: 14, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:09:26,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.out_whiten.whitening_limit, batch_count=160461.33333333334, ans=8.0 +2024-07-28 13:09:30,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=160474.66666666666, ans=0.125 +2024-07-28 13:09:31,545 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.85 vs. limit=10.0 +2024-07-28 13:09:37,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=160488.0, ans=0.1 +2024-07-28 13:09:37,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=160488.0, ans=0.125 +2024-07-28 13:09:41,206 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.19 vs. limit=15.0 +2024-07-28 13:09:43,189 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.620e+01 5.515e+01 6.026e+01 6.730e+01 9.606e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 13:09:46,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=160514.66666666666, ans=0.0 +2024-07-28 13:09:51,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=160514.66666666666, ans=0.0 +2024-07-28 13:09:53,234 INFO [train.py:1114] (3/4) Epoch 12, batch 7950, loss[loss=0.2446, simple_loss=0.3198, pruned_loss=0.08473, over 3628.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2811, pruned_loss=0.05234, over 936123.30 frames. ], batch size: 35, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:10:01,737 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.42 vs. limit=22.5 +2024-07-28 13:10:17,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=160568.0, ans=0.025 +2024-07-28 13:10:20,938 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.25 vs. limit=15.0 +2024-07-28 13:10:25,953 INFO [train.py:1114] (3/4) Epoch 12, batch 8000, loss[loss=0.1627, simple_loss=0.243, pruned_loss=0.04125, over 4601.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2785, pruned_loss=0.0512, over 934885.72 frames. ], batch size: 11, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:10:34,230 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.72 vs. limit=15.0 +2024-07-28 13:10:35,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=160608.0, ans=0.1 +2024-07-28 13:10:50,018 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.821e+01 5.959e+01 6.918e+01 8.297e+01 1.204e+02, threshold=1.384e+02, percent-clipped=0.0 +2024-07-28 13:11:00,330 INFO [train.py:1114] (3/4) Epoch 12, batch 8050, loss[loss=0.1918, simple_loss=0.2981, pruned_loss=0.04273, over 4811.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2789, pruned_loss=0.05147, over 934449.37 frames. ], batch size: 14, lr: 6.18e-03, grad_scale: 32.0 +2024-07-28 13:11:01,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160661.33333333334, ans=0.1 +2024-07-28 13:11:05,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=160661.33333333334, ans=0.2 +2024-07-28 13:11:05,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=160661.33333333334, ans=0.2 +2024-07-28 13:11:07,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=160674.66666666666, ans=0.1 +2024-07-28 13:11:09,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=160674.66666666666, ans=0.025 +2024-07-28 13:11:09,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=160674.66666666666, ans=0.125 +2024-07-28 13:11:31,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=160714.66666666666, ans=0.125 +2024-07-28 13:11:32,802 INFO [train.py:1114] (3/4) Epoch 12, batch 8100, loss[loss=0.2727, simple_loss=0.3569, pruned_loss=0.09427, over 4790.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2798, pruned_loss=0.05152, over 934247.12 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:11:34,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=160728.0, ans=0.125 +2024-07-28 13:11:38,173 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.69 vs. limit=22.5 +2024-07-28 13:11:45,417 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.21 vs. limit=15.0 +2024-07-28 13:11:45,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=160754.66666666666, ans=0.2 +2024-07-28 13:11:55,384 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 5.710e+01 6.581e+01 7.221e+01 1.063e+02, threshold=1.316e+02, percent-clipped=0.0 +2024-07-28 13:12:06,451 INFO [train.py:1114] (3/4) Epoch 12, batch 8150, loss[loss=0.1806, simple_loss=0.283, pruned_loss=0.03908, over 4813.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2785, pruned_loss=0.05096, over 937783.69 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:12:06,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=160794.66666666666, ans=0.2 +2024-07-28 13:12:17,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=160808.0, ans=0.1 +2024-07-28 13:12:17,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=160808.0, ans=0.125 +2024-07-28 13:12:18,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=160808.0, ans=0.0 +2024-07-28 13:12:26,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=160834.66666666666, ans=0.2 +2024-07-28 13:12:26,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=160834.66666666666, ans=0.07 +2024-07-28 13:12:27,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=160834.66666666666, ans=0.0 +2024-07-28 13:12:39,521 INFO [train.py:1114] (3/4) Epoch 12, batch 8200, loss[loss=0.183, simple_loss=0.266, pruned_loss=0.04993, over 4793.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2799, pruned_loss=0.05144, over 939274.08 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:12:40,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=160861.33333333334, ans=0.0 +2024-07-28 13:12:41,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=160861.33333333334, ans=10.0 +2024-07-28 13:12:50,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=160874.66666666666, ans=0.125 +2024-07-28 13:12:51,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=160874.66666666666, ans=0.0 +2024-07-28 13:13:03,300 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.924e+01 5.545e+01 6.398e+01 7.080e+01 1.151e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 13:13:11,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=160914.66666666666, ans=0.125 +2024-07-28 13:13:12,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=160928.0, ans=0.0 +2024-07-28 13:13:12,842 INFO [train.py:1114] (3/4) Epoch 12, batch 8250, loss[loss=0.176, simple_loss=0.2677, pruned_loss=0.04218, over 4891.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2797, pruned_loss=0.05159, over 939435.25 frames. ], batch size: 13, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:13:22,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=160941.33333333334, ans=0.05 +2024-07-28 13:13:29,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=160954.66666666666, ans=0.2 +2024-07-28 13:13:35,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=160968.0, ans=0.125 +2024-07-28 13:13:36,127 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.61 vs. limit=12.0 +2024-07-28 13:13:45,321 INFO [train.py:1114] (3/4) Epoch 12, batch 8300, loss[loss=0.185, simple_loss=0.2761, pruned_loss=0.0469, over 4886.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2808, pruned_loss=0.05242, over 939275.75 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:13:53,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=161008.0, ans=0.0 +2024-07-28 13:13:53,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=161008.0, ans=0.125 +2024-07-28 13:13:53,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=161008.0, ans=0.05 +2024-07-28 13:13:57,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=161021.33333333334, ans=0.0 +2024-07-28 13:14:05,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=161034.66666666666, ans=0.125 +2024-07-28 13:14:07,775 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+01 5.767e+01 6.201e+01 7.053e+01 1.187e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 13:14:17,547 INFO [train.py:1114] (3/4) Epoch 12, batch 8350, loss[loss=0.2485, simple_loss=0.3238, pruned_loss=0.0866, over 4810.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2804, pruned_loss=0.05181, over 941819.85 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:14:21,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=161061.33333333334, ans=0.2 +2024-07-28 13:14:30,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=161088.0, ans=0.07 +2024-07-28 13:14:30,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=161088.0, ans=0.2 +2024-07-28 13:14:33,637 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.18 vs. limit=22.5 +2024-07-28 13:14:35,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161088.0, ans=0.1 +2024-07-28 13:14:36,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=161101.33333333334, ans=0.2 +2024-07-28 13:14:39,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=161101.33333333334, ans=0.0 +2024-07-28 13:14:46,682 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.60 vs. limit=5.0 +2024-07-28 13:14:48,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161114.66666666666, ans=0.1 +2024-07-28 13:14:50,100 INFO [train.py:1114] (3/4) Epoch 12, batch 8400, loss[loss=0.1931, simple_loss=0.2726, pruned_loss=0.05686, over 4774.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2806, pruned_loss=0.05209, over 940279.57 frames. ], batch size: 12, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:14:53,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=161128.0, ans=15.0 +2024-07-28 13:14:57,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=161141.33333333334, ans=0.125 +2024-07-28 13:15:03,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=161154.66666666666, ans=0.2 +2024-07-28 13:15:05,534 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.47 vs. limit=15.0 +2024-07-28 13:15:09,773 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.74 vs. limit=22.5 +2024-07-28 13:15:13,422 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.804e+01 5.899e+01 6.443e+01 7.292e+01 9.298e+01, threshold=1.289e+02, percent-clipped=0.0 +2024-07-28 13:15:18,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=161181.33333333334, ans=0.0 +2024-07-28 13:15:23,165 INFO [train.py:1114] (3/4) Epoch 12, batch 8450, loss[loss=0.1828, simple_loss=0.2707, pruned_loss=0.04743, over 4801.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2808, pruned_loss=0.05211, over 938942.52 frames. ], batch size: 15, lr: 6.17e-03, grad_scale: 32.0 +2024-07-28 13:15:27,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=161194.66666666666, ans=0.125 +2024-07-28 13:15:36,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=161221.33333333334, ans=0.0 +2024-07-28 13:15:37,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=161221.33333333334, ans=0.125 +2024-07-28 13:15:40,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=161221.33333333334, ans=0.125 +2024-07-28 13:15:55,158 INFO [train.py:1114] (3/4) Epoch 12, batch 8500, loss[loss=0.1464, simple_loss=0.244, pruned_loss=0.02434, over 4618.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2799, pruned_loss=0.05169, over 938662.19 frames. ], batch size: 11, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:16:09,986 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:16:17,529 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.709e+01 5.769e+01 6.331e+01 7.345e+01 1.019e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 13:16:25,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=161314.66666666666, ans=0.125 +2024-07-28 13:16:25,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=161314.66666666666, ans=0.125 +2024-07-28 13:16:27,216 INFO [train.py:1114] (3/4) Epoch 12, batch 8550, loss[loss=0.1849, simple_loss=0.2524, pruned_loss=0.05864, over 4809.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2798, pruned_loss=0.05184, over 939430.03 frames. ], batch size: 11, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:16:29,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=161328.0, ans=0.125 +2024-07-28 13:16:31,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=161328.0, ans=0.0 +2024-07-28 13:16:34,860 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-07-28 13:17:00,037 INFO [train.py:1114] (3/4) Epoch 12, batch 8600, loss[loss=0.1982, simple_loss=0.2937, pruned_loss=0.05136, over 4806.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2792, pruned_loss=0.05148, over 939137.40 frames. ], batch size: 15, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:17:21,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.61 vs. limit=15.0 +2024-07-28 13:17:23,226 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.568e+01 5.734e+01 6.405e+01 7.244e+01 9.929e+01, threshold=1.281e+02, percent-clipped=0.0 +2024-07-28 13:17:26,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=161448.0, ans=0.2 +2024-07-28 13:17:31,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=161448.0, ans=0.125 +2024-07-28 13:17:32,674 INFO [train.py:1114] (3/4) Epoch 12, batch 8650, loss[loss=0.1842, simple_loss=0.282, pruned_loss=0.04322, over 4890.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2788, pruned_loss=0.05149, over 940575.27 frames. ], batch size: 15, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:17:32,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=161461.33333333334, ans=0.125 +2024-07-28 13:17:33,133 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.00 vs. limit=6.0 +2024-07-28 13:17:39,963 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.08 vs. limit=6.0 +2024-07-28 13:17:42,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=161474.66666666666, ans=0.0 +2024-07-28 13:17:43,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=161474.66666666666, ans=0.0 +2024-07-28 13:17:44,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=161474.66666666666, ans=0.025 +2024-07-28 13:17:48,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161488.0, ans=0.1 +2024-07-28 13:18:01,398 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.65 vs. limit=15.0 +2024-07-28 13:18:03,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=161501.33333333334, ans=0.09899494936611666 +2024-07-28 13:18:05,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=161514.66666666666, ans=0.125 +2024-07-28 13:18:06,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=161514.66666666666, ans=0.125 +2024-07-28 13:18:11,212 INFO [train.py:1114] (3/4) Epoch 12, batch 8700, loss[loss=0.1972, simple_loss=0.294, pruned_loss=0.05015, over 4762.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2802, pruned_loss=0.05224, over 938268.35 frames. ], batch size: 13, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:18:15,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=161528.0, ans=0.125 +2024-07-28 13:18:21,982 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.32 vs. limit=15.0 +2024-07-28 13:18:23,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=161554.66666666666, ans=0.125 +2024-07-28 13:18:33,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=161568.0, ans=0.025 +2024-07-28 13:18:34,019 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.629e+01 5.645e+01 6.105e+01 7.078e+01 1.033e+02, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 13:18:36,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=161581.33333333334, ans=0.125 +2024-07-28 13:18:41,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=161581.33333333334, ans=0.5 +2024-07-28 13:18:43,673 INFO [train.py:1114] (3/4) Epoch 12, batch 8750, loss[loss=0.2029, simple_loss=0.2999, pruned_loss=0.05297, over 4684.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2799, pruned_loss=0.05197, over 936694.91 frames. ], batch size: 15, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:18:49,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161608.0, ans=0.1 +2024-07-28 13:18:55,617 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.75 vs. limit=6.0 +2024-07-28 13:18:58,026 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.97 vs. limit=15.0 +2024-07-28 13:19:03,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=161634.66666666666, ans=0.125 +2024-07-28 13:19:10,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=161648.0, ans=0.125 +2024-07-28 13:19:16,560 INFO [train.py:1114] (3/4) Epoch 12, batch 8800, loss[loss=0.1827, simple_loss=0.276, pruned_loss=0.04469, over 4936.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2806, pruned_loss=0.0522, over 937722.43 frames. ], batch size: 14, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:19:20,124 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.63 vs. limit=22.5 +2024-07-28 13:19:21,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=161661.33333333334, ans=0.1 +2024-07-28 13:19:30,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=161688.0, ans=0.5 +2024-07-28 13:19:37,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=161701.33333333334, ans=0.125 +2024-07-28 13:19:40,339 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.669e+01 5.675e+01 6.216e+01 7.145e+01 9.386e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 13:19:45,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=161714.66666666666, ans=0.125 +2024-07-28 13:19:46,529 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.01 vs. limit=15.0 +2024-07-28 13:19:50,059 INFO [train.py:1114] (3/4) Epoch 12, batch 8850, loss[loss=0.1796, simple_loss=0.2808, pruned_loss=0.0392, over 4452.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2799, pruned_loss=0.0525, over 931649.64 frames. ], batch size: 21, lr: 6.16e-03, grad_scale: 32.0 +2024-07-28 13:19:50,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=161728.0, ans=0.0 +2024-07-28 13:19:53,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=161728.0, ans=0.2 +2024-07-28 13:19:56,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=161741.33333333334, ans=0.025 +2024-07-28 13:19:58,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=161741.33333333334, ans=0.125 +2024-07-28 13:20:02,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=161741.33333333334, ans=0.2 +2024-07-28 13:20:17,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=161781.33333333334, ans=0.125 +2024-07-28 13:20:19,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=161781.33333333334, ans=0.2 +2024-07-28 13:20:23,565 INFO [train.py:1114] (3/4) Epoch 12, batch 8900, loss[loss=0.1554, simple_loss=0.2403, pruned_loss=0.03527, over 4942.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2807, pruned_loss=0.05315, over 929775.50 frames. ], batch size: 12, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:20:28,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161794.66666666666, ans=0.1 +2024-07-28 13:20:30,974 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.93 vs. limit=22.5 +2024-07-28 13:20:37,577 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.28 vs. limit=22.5 +2024-07-28 13:20:44,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=161834.66666666666, ans=0.025 +2024-07-28 13:20:44,479 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.17 vs. limit=10.0 +2024-07-28 13:20:45,939 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.688e+01 5.749e+01 6.497e+01 7.319e+01 1.057e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-28 13:20:46,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=161834.66666666666, ans=0.125 +2024-07-28 13:20:55,342 INFO [train.py:1114] (3/4) Epoch 12, batch 8950, loss[loss=0.1661, simple_loss=0.2625, pruned_loss=0.03484, over 4495.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2799, pruned_loss=0.05285, over 930649.82 frames. ], batch size: 21, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:20:59,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=161861.33333333334, ans=0.125 +2024-07-28 13:21:02,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=161874.66666666666, ans=0.125 +2024-07-28 13:21:26,982 INFO [train.py:1114] (3/4) Epoch 12, batch 9000, loss[loss=0.1661, simple_loss=0.2531, pruned_loss=0.03951, over 4638.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2777, pruned_loss=0.05154, over 933548.01 frames. ], batch size: 12, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:21:26,983 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 13:21:37,269 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([0.6222, 3.4064, 1.8225, 3.4372, 2.8635, 3.1235, 3.6464, 3.5238], + device='cuda:3') +2024-07-28 13:21:39,259 INFO [train.py:1146] (3/4) Epoch 12, validation: loss=0.1673, simple_loss=0.2713, pruned_loss=0.03166, over 944034.00 frames. +2024-07-28 13:21:39,259 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 13:21:39,700 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.58 vs. limit=22.5 +2024-07-28 13:21:40,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=161928.0, ans=0.125 +2024-07-28 13:21:47,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=161941.33333333334, ans=0.125 +2024-07-28 13:21:51,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=161941.33333333334, ans=0.125 +2024-07-28 13:21:55,765 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.74 vs. limit=12.0 +2024-07-28 13:22:00,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=161968.0, ans=0.0 +2024-07-28 13:22:02,099 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.972e+01 5.644e+01 6.027e+01 6.782e+01 9.850e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 13:22:06,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=161981.33333333334, ans=0.1 +2024-07-28 13:22:08,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=161981.33333333334, ans=0.125 +2024-07-28 13:22:11,781 INFO [train.py:1114] (3/4) Epoch 12, batch 9050, loss[loss=0.2003, simple_loss=0.2722, pruned_loss=0.06417, over 4513.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2763, pruned_loss=0.05126, over 934124.33 frames. ], batch size: 10, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:22:13,549 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.28 vs. limit=15.0 +2024-07-28 13:22:14,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=161994.66666666666, ans=0.0 +2024-07-28 13:22:18,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=162008.0, ans=0.125 +2024-07-28 13:22:23,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=162008.0, ans=0.125 +2024-07-28 13:22:27,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=162021.33333333334, ans=0.2 +2024-07-28 13:22:27,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=162021.33333333334, ans=0.0 +2024-07-28 13:22:43,279 INFO [train.py:1114] (3/4) Epoch 12, batch 9100, loss[loss=0.2123, simple_loss=0.3035, pruned_loss=0.06053, over 4932.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2769, pruned_loss=0.05107, over 936868.17 frames. ], batch size: 14, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:22:50,551 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.76 vs. limit=15.0 +2024-07-28 13:23:06,062 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.556e+01 5.681e+01 6.344e+01 7.391e+01 1.004e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 13:23:08,589 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:23:09,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162114.66666666666, ans=0.1 +2024-07-28 13:23:15,555 INFO [train.py:1114] (3/4) Epoch 12, batch 9150, loss[loss=0.1836, simple_loss=0.2754, pruned_loss=0.04586, over 4815.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2776, pruned_loss=0.05091, over 935404.25 frames. ], batch size: 14, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:23:21,032 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.92 vs. limit=15.0 +2024-07-28 13:23:21,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=162141.33333333334, ans=0.125 +2024-07-28 13:23:35,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=162168.0, ans=10.0 +2024-07-28 13:23:39,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=162168.0, ans=0.125 +2024-07-28 13:23:47,216 INFO [train.py:1114] (3/4) Epoch 12, batch 9200, loss[loss=0.1986, simple_loss=0.2778, pruned_loss=0.05975, over 4840.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2772, pruned_loss=0.0507, over 937574.24 frames. ], batch size: 12, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:23:49,650 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.65 vs. limit=22.5 +2024-07-28 13:23:53,959 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-28 13:23:56,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=162208.0, ans=0.025 +2024-07-28 13:23:58,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=162208.0, ans=0.035 +2024-07-28 13:24:09,765 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.766e+01 5.530e+01 6.301e+01 7.507e+01 1.119e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 13:24:19,319 INFO [train.py:1114] (3/4) Epoch 12, batch 9250, loss[loss=0.1668, simple_loss=0.2665, pruned_loss=0.03356, over 4637.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2765, pruned_loss=0.0505, over 938588.94 frames. ], batch size: 13, lr: 6.15e-03, grad_scale: 32.0 +2024-07-28 13:24:27,162 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.56 vs. limit=15.0 +2024-07-28 13:24:45,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=162314.66666666666, ans=0.1 +2024-07-28 13:24:51,030 INFO [train.py:1114] (3/4) Epoch 12, batch 9300, loss[loss=0.1777, simple_loss=0.26, pruned_loss=0.04766, over 4788.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2758, pruned_loss=0.05019, over 938079.19 frames. ], batch size: 12, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:24:55,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=162328.0, ans=0.125 +2024-07-28 13:25:08,286 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.04 vs. limit=10.0 +2024-07-28 13:25:09,620 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.69 vs. limit=15.0 +2024-07-28 13:25:11,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162368.0, ans=0.1 +2024-07-28 13:25:13,004 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.254e+01 5.656e+01 6.395e+01 7.099e+01 1.199e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 13:25:16,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=162381.33333333334, ans=0.125 +2024-07-28 13:25:22,438 INFO [train.py:1114] (3/4) Epoch 12, batch 9350, loss[loss=0.181, simple_loss=0.2537, pruned_loss=0.05419, over 4794.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2772, pruned_loss=0.05091, over 934879.38 frames. ], batch size: 11, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:25:23,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=162394.66666666666, ans=0.0 +2024-07-28 13:25:29,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=162408.0, ans=0.125 +2024-07-28 13:25:31,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=162408.0, ans=0.2 +2024-07-28 13:25:35,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=162421.33333333334, ans=0.125 +2024-07-28 13:25:38,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=162421.33333333334, ans=0.125 +2024-07-28 13:25:38,845 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.97 vs. limit=15.0 +2024-07-28 13:25:53,649 INFO [train.py:1114] (3/4) Epoch 12, batch 9400, loss[loss=0.1818, simple_loss=0.2727, pruned_loss=0.04542, over 4697.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2776, pruned_loss=0.05116, over 932690.54 frames. ], batch size: 13, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:26:03,979 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.65 vs. limit=15.0 +2024-07-28 13:26:05,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=162488.0, ans=0.125 +2024-07-28 13:26:15,656 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.679e+01 5.567e+01 6.093e+01 7.292e+01 1.222e+02, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 13:26:16,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=162501.33333333334, ans=0.125 +2024-07-28 13:26:19,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=162514.66666666666, ans=0.2 +2024-07-28 13:26:19,747 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-28 13:26:25,589 INFO [train.py:1114] (3/4) Epoch 12, batch 9450, loss[loss=0.1678, simple_loss=0.2564, pruned_loss=0.03957, over 4782.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2786, pruned_loss=0.0514, over 932453.42 frames. ], batch size: 11, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:26:37,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=162554.66666666666, ans=0.0 +2024-07-28 13:26:49,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.21 vs. limit=10.0 +2024-07-28 13:26:51,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=162581.33333333334, ans=0.0 +2024-07-28 13:26:56,292 INFO [train.py:1114] (3/4) Epoch 12, batch 9500, loss[loss=0.1862, simple_loss=0.2692, pruned_loss=0.05154, over 4701.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2781, pruned_loss=0.05096, over 934550.22 frames. ], batch size: 12, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:27:00,834 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.77 vs. limit=15.0 +2024-07-28 13:27:03,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=162608.0, ans=0.0 +2024-07-28 13:27:05,246 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=18.61 vs. limit=22.5 +2024-07-28 13:27:07,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=162621.33333333334, ans=0.125 +2024-07-28 13:27:15,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=162634.66666666666, ans=0.1 +2024-07-28 13:27:16,802 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.52 vs. limit=6.0 +2024-07-28 13:27:17,794 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.732e+01 5.541e+01 6.151e+01 7.043e+01 9.368e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 13:27:19,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=162634.66666666666, ans=0.0 +2024-07-28 13:27:27,268 INFO [train.py:1114] (3/4) Epoch 12, batch 9550, loss[loss=0.1942, simple_loss=0.266, pruned_loss=0.06124, over 4770.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2791, pruned_loss=0.05154, over 931789.00 frames. ], batch size: 12, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:27:32,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=162661.33333333334, ans=0.125 +2024-07-28 13:27:35,137 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.17 vs. limit=6.0 +2024-07-28 13:27:48,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=162701.33333333334, ans=0.125 +2024-07-28 13:27:50,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=162701.33333333334, ans=0.2 +2024-07-28 13:27:59,583 INFO [train.py:1114] (3/4) Epoch 12, batch 9600, loss[loss=0.2964, simple_loss=0.3476, pruned_loss=0.1226, over 3228.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2785, pruned_loss=0.05099, over 930562.04 frames. ], batch size: 35, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:28:00,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=162728.0, ans=0.125 +2024-07-28 13:28:10,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=162741.33333333334, ans=0.125 +2024-07-28 13:28:13,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=162754.66666666666, ans=0.125 +2024-07-28 13:28:19,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=162768.0, ans=0.125 +2024-07-28 13:28:22,486 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 5.832e+01 6.811e+01 8.204e+01 1.211e+02, threshold=1.362e+02, percent-clipped=0.0 +2024-07-28 13:28:26,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=162781.33333333334, ans=0.07 +2024-07-28 13:28:28,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=162781.33333333334, ans=0.09899494936611666 +2024-07-28 13:28:31,980 INFO [train.py:1114] (3/4) Epoch 12, batch 9650, loss[loss=0.209, simple_loss=0.3149, pruned_loss=0.05152, over 4840.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2798, pruned_loss=0.05136, over 926179.01 frames. ], batch size: 16, lr: 6.14e-03, grad_scale: 32.0 +2024-07-28 13:28:37,348 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.06 vs. limit=15.0 +2024-07-28 13:28:52,673 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.82 vs. limit=22.5 +2024-07-28 13:28:57,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=162848.0, ans=0.0 +2024-07-28 13:29:02,792 INFO [train.py:1114] (3/4) Epoch 12, batch 9700, loss[loss=0.2218, simple_loss=0.3005, pruned_loss=0.07159, over 4241.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.28, pruned_loss=0.05154, over 924062.53 frames. ], batch size: 25, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:29:07,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=162861.33333333334, ans=0.0 +2024-07-28 13:29:11,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=162874.66666666666, ans=0.0 +2024-07-28 13:29:25,017 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.802e+01 5.692e+01 6.242e+01 7.537e+01 1.052e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 13:29:34,182 INFO [train.py:1114] (3/4) Epoch 12, batch 9750, loss[loss=0.1923, simple_loss=0.2863, pruned_loss=0.04917, over 4675.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2796, pruned_loss=0.05163, over 924633.76 frames. ], batch size: 15, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:29:49,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=162954.66666666666, ans=0.0 +2024-07-28 13:29:52,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=162968.0, ans=0.0 +2024-07-28 13:29:55,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=162968.0, ans=0.125 +2024-07-28 13:29:57,113 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.45 vs. limit=15.0 +2024-07-28 13:30:05,286 INFO [train.py:1114] (3/4) Epoch 12, batch 9800, loss[loss=0.1719, simple_loss=0.2508, pruned_loss=0.04654, over 4705.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.278, pruned_loss=0.05136, over 924317.35 frames. ], batch size: 12, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:30:07,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=162994.66666666666, ans=0.0 +2024-07-28 13:30:15,451 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.05 vs. limit=15.0 +2024-07-28 13:30:20,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=163021.33333333334, ans=0.0 +2024-07-28 13:30:23,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=163034.66666666666, ans=0.125 +2024-07-28 13:30:26,611 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.850e+01 5.828e+01 6.429e+01 7.275e+01 1.013e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 13:30:26,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=163034.66666666666, ans=0.1 +2024-07-28 13:30:36,211 INFO [train.py:1114] (3/4) Epoch 12, batch 9850, loss[loss=0.2298, simple_loss=0.3146, pruned_loss=0.07248, over 4902.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2789, pruned_loss=0.05194, over 926950.83 frames. ], batch size: 15, lr: 6.13e-03, grad_scale: 64.0 +2024-07-28 13:30:40,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=163061.33333333334, ans=0.2 +2024-07-28 13:30:41,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=163061.33333333334, ans=0.125 +2024-07-28 13:30:44,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=163074.66666666666, ans=0.125 +2024-07-28 13:30:49,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=163088.0, ans=0.1 +2024-07-28 13:30:54,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=163101.33333333334, ans=0.025 +2024-07-28 13:30:59,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=163101.33333333334, ans=0.0 +2024-07-28 13:31:05,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.91 vs. limit=15.0 +2024-07-28 13:31:06,849 INFO [train.py:1114] (3/4) Epoch 12, batch 9900, loss[loss=0.1664, simple_loss=0.258, pruned_loss=0.03741, over 4852.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2797, pruned_loss=0.05237, over 926138.51 frames. ], batch size: 16, lr: 6.13e-03, grad_scale: 64.0 +2024-07-28 13:31:09,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=163128.0, ans=0.125 +2024-07-28 13:31:11,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=163128.0, ans=0.2 +2024-07-28 13:31:13,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=163141.33333333334, ans=0.0 +2024-07-28 13:31:15,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=163141.33333333334, ans=0.125 +2024-07-28 13:31:29,352 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.746e+01 5.809e+01 6.400e+01 7.583e+01 1.176e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 13:31:35,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163181.33333333334, ans=0.1 +2024-07-28 13:31:38,447 INFO [train.py:1114] (3/4) Epoch 12, batch 9950, loss[loss=0.1598, simple_loss=0.2361, pruned_loss=0.04177, over 4514.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2785, pruned_loss=0.05205, over 928251.94 frames. ], batch size: 10, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:31:39,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=163194.66666666666, ans=0.2 +2024-07-28 13:31:43,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=163194.66666666666, ans=0.125 +2024-07-28 13:31:47,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=163208.0, ans=0.1 +2024-07-28 13:31:49,230 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.06 vs. limit=22.5 +2024-07-28 13:31:53,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=163221.33333333334, ans=0.0 +2024-07-28 13:31:54,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=163221.33333333334, ans=0.1 +2024-07-28 13:31:58,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=163234.66666666666, ans=0.125 +2024-07-28 13:32:09,638 INFO [train.py:1114] (3/4) Epoch 12, batch 10000, loss[loss=0.2197, simple_loss=0.3105, pruned_loss=0.06445, over 4598.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2825, pruned_loss=0.05376, over 925481.06 frames. ], batch size: 16, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:32:15,873 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:32:22,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=163288.0, ans=0.125 +2024-07-28 13:32:24,109 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.51 vs. limit=10.0 +2024-07-28 13:32:27,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163301.33333333334, ans=0.1 +2024-07-28 13:32:31,310 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.999e+01 5.883e+01 6.345e+01 7.076e+01 8.600e+01, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 13:32:31,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=163301.33333333334, ans=0.07 +2024-07-28 13:32:41,184 INFO [train.py:1114] (3/4) Epoch 12, batch 10050, loss[loss=0.2231, simple_loss=0.3006, pruned_loss=0.0728, over 3563.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2858, pruned_loss=0.05544, over 912815.38 frames. ], batch size: 35, lr: 6.13e-03, grad_scale: 32.0 +2024-07-28 13:32:42,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163328.0, ans=0.1 +2024-07-28 13:32:50,330 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.53 vs. limit=6.0 +2024-07-28 13:33:02,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=163368.0, ans=0.125 +2024-07-28 13:33:03,148 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=23.17 vs. limit=22.5 +2024-07-28 13:33:11,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=163381.33333333334, ans=0.1 +2024-07-28 13:33:12,938 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.42 vs. limit=15.0 +2024-07-28 13:33:14,551 INFO [train.py:1114] (3/4) Epoch 12, batch 10100, loss[loss=0.2285, simple_loss=0.3046, pruned_loss=0.07622, over 3364.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2911, pruned_loss=0.06077, over 859950.77 frames. ], batch size: 35, lr: 6.12e-03, grad_scale: 32.0 +2024-07-28 13:33:16,324 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.02 vs. limit=5.0 +2024-07-28 13:33:20,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=163408.0, ans=0.07 +2024-07-28 13:33:22,127 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.52 vs. limit=22.5 +2024-07-28 13:33:26,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=163408.0, ans=0.0 +2024-07-28 13:33:31,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=163421.33333333334, ans=0.125 +2024-07-28 13:33:34,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=163434.66666666666, ans=0.0 +2024-07-28 13:33:37,793 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.859e+01 6.715e+01 7.355e+01 7.791e+01 1.006e+02, threshold=1.471e+02, percent-clipped=0.0 +2024-07-28 13:33:39,396 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.65 vs. limit=22.5 +2024-07-28 13:33:45,784 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=13.02 vs. limit=12.0 +2024-07-28 13:33:46,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=163461.33333333334, ans=0.1 +2024-07-28 13:33:46,744 INFO [train.py:1114] (3/4) Epoch 12, batch 10150, loss[loss=0.2563, simple_loss=0.3287, pruned_loss=0.09192, over 3265.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2943, pruned_loss=0.06458, over 817813.62 frames. ], batch size: 36, lr: 6.12e-03, grad_scale: 32.0 +2024-07-28 13:33:52,452 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.27 vs. limit=15.0 +2024-07-28 13:33:57,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=163474.66666666666, ans=0.1 +2024-07-28 13:33:58,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=163474.66666666666, ans=0.0 +2024-07-28 13:34:14,548 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.81 vs. limit=15.0 +2024-07-28 13:34:18,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=163528.0, ans=0.125 +2024-07-28 13:34:18,678 INFO [train.py:1114] (3/4) Epoch 12, batch 10200, loss[loss=0.2356, simple_loss=0.3062, pruned_loss=0.08253, over 3477.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2967, pruned_loss=0.06755, over 786573.10 frames. ], batch size: 35, lr: 6.12e-03, grad_scale: 32.0 +2024-07-28 13:34:21,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=163528.0, ans=0.125 +2024-07-28 13:34:26,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=163541.33333333334, ans=0.0 +2024-07-28 13:34:27,172 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=10.03 vs. limit=12.0 +2024-07-28 13:35:14,739 INFO [train.py:1114] (3/4) Epoch 13, batch 0, loss[loss=0.1565, simple_loss=0.2477, pruned_loss=0.03266, over 4851.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2477, pruned_loss=0.03266, over 4851.00 frames. ], batch size: 12, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:35:14,740 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 13:35:20,209 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.8109, 5.1485, 4.9807, 5.5616], device='cuda:3') +2024-07-28 13:35:26,207 INFO [train.py:1146] (3/4) Epoch 13, validation: loss=0.1689, simple_loss=0.2745, pruned_loss=0.03167, over 944034.00 frames. +2024-07-28 13:35:26,208 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 13:35:26,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=163557.33333333334, ans=0.125 +2024-07-28 13:35:26,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=163557.33333333334, ans=0.125 +2024-07-28 13:35:27,739 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=163557.33333333334, ans=0.125 +2024-07-28 13:35:27,957 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=9.85 vs. limit=10.0 +2024-07-28 13:35:34,954 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=13.25 vs. limit=15.0 +2024-07-28 13:35:35,872 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.887e+01 6.365e+01 6.777e+01 7.332e+01 9.562e+01, threshold=1.355e+02, percent-clipped=0.0 +2024-07-28 13:35:43,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=163584.0, ans=0.0 +2024-07-28 13:35:48,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=163597.33333333334, ans=0.025 +2024-07-28 13:35:49,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=163597.33333333334, ans=0.125 +2024-07-28 13:35:59,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=163610.66666666666, ans=0.025 +2024-07-28 13:36:00,567 INFO [train.py:1114] (3/4) Epoch 13, batch 50, loss[loss=0.1698, simple_loss=0.2562, pruned_loss=0.04167, over 4603.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2793, pruned_loss=0.05246, over 206893.56 frames. ], batch size: 11, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:36:11,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=163637.33333333334, ans=0.2 +2024-07-28 13:36:12,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=163637.33333333334, ans=0.0 +2024-07-28 13:36:16,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=163650.66666666666, ans=0.125 +2024-07-28 13:36:38,724 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.72 vs. limit=15.0 +2024-07-28 13:36:42,397 INFO [train.py:1114] (3/4) Epoch 13, batch 100, loss[loss=0.1747, simple_loss=0.2534, pruned_loss=0.04796, over 4640.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2812, pruned_loss=0.05146, over 366025.23 frames. ], batch size: 12, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:36:42,812 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.94 vs. limit=15.0 +2024-07-28 13:36:45,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=163690.66666666666, ans=0.0 +2024-07-28 13:36:45,240 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=163690.66666666666, ans=0.125 +2024-07-28 13:36:51,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=163704.0, ans=0.0 +2024-07-28 13:36:51,942 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.677e+01 5.407e+01 6.133e+01 6.720e+01 8.973e+01, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 13:36:53,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=163704.0, ans=0.2 +2024-07-28 13:36:54,223 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.87 vs. limit=22.5 +2024-07-28 13:36:59,663 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.32 vs. limit=15.0 +2024-07-28 13:37:10,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163730.66666666666, ans=0.1 +2024-07-28 13:37:11,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=163730.66666666666, ans=0.125 +2024-07-28 13:37:20,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=163744.0, ans=0.2 +2024-07-28 13:37:23,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=163744.0, ans=0.0 +2024-07-28 13:37:28,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=163744.0, ans=0.125 +2024-07-28 13:37:32,110 INFO [train.py:1114] (3/4) Epoch 13, batch 150, loss[loss=0.1536, simple_loss=0.2414, pruned_loss=0.03293, over 4607.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2797, pruned_loss=0.05049, over 494236.65 frames. ], batch size: 11, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:37:32,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=163757.33333333334, ans=0.0 +2024-07-28 13:37:48,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=163784.0, ans=0.125 +2024-07-28 13:37:52,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=163797.33333333334, ans=0.0 +2024-07-28 13:38:05,816 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.46 vs. limit=15.0 +2024-07-28 13:38:08,236 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:38:08,723 INFO [train.py:1114] (3/4) Epoch 13, batch 200, loss[loss=0.1867, simple_loss=0.2726, pruned_loss=0.05041, over 4513.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2771, pruned_loss=0.04952, over 593580.98 frames. ], batch size: 21, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:38:12,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=163824.0, ans=0.0 +2024-07-28 13:38:13,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=163824.0, ans=0.1 +2024-07-28 13:38:18,039 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.812e+01 5.615e+01 6.251e+01 7.683e+01 1.063e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 13:38:19,720 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.79 vs. limit=15.0 +2024-07-28 13:38:20,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=163837.33333333334, ans=0.0 +2024-07-28 13:38:22,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=163850.66666666666, ans=0.2 +2024-07-28 13:38:25,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-28 13:38:40,238 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:38:42,111 INFO [train.py:1114] (3/4) Epoch 13, batch 250, loss[loss=0.1919, simple_loss=0.28, pruned_loss=0.05191, over 4662.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2776, pruned_loss=0.04963, over 670316.24 frames. ], batch size: 16, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:38:46,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=163890.66666666666, ans=0.2 +2024-07-28 13:38:47,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=163890.66666666666, ans=0.125 +2024-07-28 13:39:01,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=163930.66666666666, ans=0.0 +2024-07-28 13:39:01,875 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.84 vs. limit=12.0 +2024-07-28 13:39:05,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=163930.66666666666, ans=0.0 +2024-07-28 13:39:07,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=163930.66666666666, ans=0.125 +2024-07-28 13:39:09,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=163944.0, ans=0.0 +2024-07-28 13:39:16,247 INFO [train.py:1114] (3/4) Epoch 13, batch 300, loss[loss=0.1776, simple_loss=0.2807, pruned_loss=0.03722, over 4795.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2772, pruned_loss=0.04909, over 729597.31 frames. ], batch size: 15, lr: 5.88e-03, grad_scale: 32.0 +2024-07-28 13:39:19,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=163957.33333333334, ans=0.125 +2024-07-28 13:39:25,703 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+01 5.595e+01 6.354e+01 7.540e+01 1.026e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 13:39:27,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=163970.66666666666, ans=0.2 +2024-07-28 13:39:28,868 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.52 vs. limit=15.0 +2024-07-28 13:39:47,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=164010.66666666666, ans=0.125 +2024-07-28 13:39:49,398 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.18 vs. limit=15.0 +2024-07-28 13:39:49,688 INFO [train.py:1114] (3/4) Epoch 13, batch 350, loss[loss=0.1431, simple_loss=0.2277, pruned_loss=0.02926, over 4941.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2779, pruned_loss=0.04901, over 776051.12 frames. ], batch size: 12, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:40:13,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=164064.0, ans=0.125 +2024-07-28 13:40:17,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=164077.33333333334, ans=0.2 +2024-07-28 13:40:24,303 INFO [train.py:1114] (3/4) Epoch 13, batch 400, loss[loss=0.2006, simple_loss=0.2957, pruned_loss=0.05274, over 4694.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2769, pruned_loss=0.04869, over 813518.97 frames. ], batch size: 13, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:40:26,571 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.88 vs. limit=15.0 +2024-07-28 13:40:28,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=164090.66666666666, ans=0.125 +2024-07-28 13:40:31,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=164104.0, ans=0.125 +2024-07-28 13:40:31,363 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.53 vs. limit=15.0 +2024-07-28 13:40:35,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=164104.0, ans=0.0 +2024-07-28 13:40:35,599 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.297e+01 5.430e+01 5.754e+01 6.889e+01 9.909e+01, threshold=1.151e+02, percent-clipped=0.0 +2024-07-28 13:40:36,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=164104.0, ans=0.0 +2024-07-28 13:40:37,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=164104.0, ans=0.125 +2024-07-28 13:40:47,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=164130.66666666666, ans=0.0 +2024-07-28 13:40:48,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=164130.66666666666, ans=0.125 +2024-07-28 13:40:51,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=164130.66666666666, ans=0.125 +2024-07-28 13:40:59,516 INFO [train.py:1114] (3/4) Epoch 13, batch 450, loss[loss=0.187, simple_loss=0.282, pruned_loss=0.04598, over 4632.00 frames. ], tot_loss[loss=0.189, simple_loss=0.278, pruned_loss=0.05001, over 838907.71 frames. ], batch size: 13, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:41:10,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=164170.66666666666, ans=0.0 +2024-07-28 13:41:11,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164170.66666666666, ans=0.1 +2024-07-28 13:41:16,148 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:41:20,818 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:41:26,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=164210.66666666666, ans=0.2 +2024-07-28 13:41:32,428 INFO [train.py:1114] (3/4) Epoch 13, batch 500, loss[loss=0.1879, simple_loss=0.2895, pruned_loss=0.04319, over 4684.00 frames. ], tot_loss[loss=0.188, simple_loss=0.277, pruned_loss=0.04956, over 861100.63 frames. ], batch size: 15, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:41:37,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=164224.0, ans=0.125 +2024-07-28 13:41:41,729 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.481e+01 5.521e+01 6.089e+01 6.841e+01 9.670e+01, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 13:41:43,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=164237.33333333334, ans=0.0 +2024-07-28 13:41:50,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=164250.66666666666, ans=0.0 +2024-07-28 13:41:58,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=164277.33333333334, ans=0.0 +2024-07-28 13:42:06,176 INFO [train.py:1114] (3/4) Epoch 13, batch 550, loss[loss=0.221, simple_loss=0.3189, pruned_loss=0.06161, over 4632.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.277, pruned_loss=0.04971, over 877700.62 frames. ], batch size: 17, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:42:08,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=164290.66666666666, ans=0.025 +2024-07-28 13:42:20,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=164317.33333333334, ans=0.0 +2024-07-28 13:42:27,033 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.46 vs. limit=15.0 +2024-07-28 13:42:30,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=164330.66666666666, ans=0.025 +2024-07-28 13:42:30,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=164330.66666666666, ans=0.0 +2024-07-28 13:42:39,324 INFO [train.py:1114] (3/4) Epoch 13, batch 600, loss[loss=0.2016, simple_loss=0.2887, pruned_loss=0.05726, over 4640.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2772, pruned_loss=0.04978, over 892347.16 frames. ], batch size: 16, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:42:41,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=164357.33333333334, ans=0.025 +2024-07-28 13:42:48,629 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.455e+01 5.528e+01 6.337e+01 7.273e+01 1.055e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 13:42:49,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=164370.66666666666, ans=0.04949747468305833 +2024-07-28 13:42:51,793 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.66 vs. limit=15.0 +2024-07-28 13:42:53,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=164384.0, ans=0.1 +2024-07-28 13:42:55,780 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=5.53 vs. limit=15.0 +2024-07-28 13:43:00,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=164397.33333333334, ans=0.5 +2024-07-28 13:43:02,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=164397.33333333334, ans=0.1 +2024-07-28 13:43:19,287 INFO [train.py:1114] (3/4) Epoch 13, batch 650, loss[loss=0.1677, simple_loss=0.2615, pruned_loss=0.03693, over 4762.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2769, pruned_loss=0.04968, over 903930.23 frames. ], batch size: 13, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:43:19,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=164424.0, ans=0.025 +2024-07-28 13:43:23,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=164424.0, ans=0.125 +2024-07-28 13:43:35,749 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.51 vs. limit=15.0 +2024-07-28 13:43:39,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=164450.66666666666, ans=0.0 +2024-07-28 13:43:41,711 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.59 vs. limit=15.0 +2024-07-28 13:43:51,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164477.33333333334, ans=0.1 +2024-07-28 13:43:52,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=164477.33333333334, ans=0.0 +2024-07-28 13:43:55,023 INFO [train.py:1114] (3/4) Epoch 13, batch 700, loss[loss=0.1721, simple_loss=0.2586, pruned_loss=0.04285, over 4634.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.277, pruned_loss=0.04985, over 911940.77 frames. ], batch size: 12, lr: 5.87e-03, grad_scale: 32.0 +2024-07-28 13:44:04,379 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.803e+01 5.621e+01 6.058e+01 7.095e+01 1.199e+02, threshold=1.212e+02, percent-clipped=0.0 +2024-07-28 13:44:12,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=164504.0, ans=0.1 +2024-07-28 13:44:15,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=164517.33333333334, ans=0.125 +2024-07-28 13:44:21,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=164517.33333333334, ans=0.125 +2024-07-28 13:44:31,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=164544.0, ans=0.5 +2024-07-28 13:44:38,232 INFO [train.py:1114] (3/4) Epoch 13, batch 750, loss[loss=0.1572, simple_loss=0.2503, pruned_loss=0.0321, over 4692.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2751, pruned_loss=0.04887, over 918438.67 frames. ], batch size: 13, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:44:48,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=164570.66666666666, ans=0.2 +2024-07-28 13:44:57,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=164584.0, ans=0.125 +2024-07-28 13:45:05,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=164597.33333333334, ans=0.125 +2024-07-28 13:45:05,863 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.01 vs. limit=12.0 +2024-07-28 13:45:08,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=164610.66666666666, ans=0.125 +2024-07-28 13:45:13,493 INFO [train.py:1114] (3/4) Epoch 13, batch 800, loss[loss=0.2008, simple_loss=0.2911, pruned_loss=0.05528, over 4861.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2754, pruned_loss=0.04907, over 923371.55 frames. ], batch size: 12, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:45:16,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.41 vs. limit=15.0 +2024-07-28 13:45:16,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=164624.0, ans=0.07 +2024-07-28 13:45:22,567 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.403e+01 5.509e+01 5.892e+01 6.560e+01 1.053e+02, threshold=1.178e+02, percent-clipped=0.0 +2024-07-28 13:45:42,999 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.65 vs. limit=22.5 +2024-07-28 13:45:53,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=164664.0, ans=0.125 +2024-07-28 13:45:54,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=164677.33333333334, ans=0.0 +2024-07-28 13:46:00,992 INFO [train.py:1114] (3/4) Epoch 13, batch 850, loss[loss=0.2191, simple_loss=0.3094, pruned_loss=0.06434, over 4660.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2754, pruned_loss=0.04939, over 927787.64 frames. ], batch size: 14, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:46:05,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164690.66666666666, ans=0.1 +2024-07-28 13:46:06,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=164690.66666666666, ans=0.0 +2024-07-28 13:46:09,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=164704.0, ans=0.2 +2024-07-28 13:46:28,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=164717.33333333334, ans=0.125 +2024-07-28 13:47:04,275 INFO [train.py:1114] (3/4) Epoch 13, batch 900, loss[loss=0.166, simple_loss=0.2502, pruned_loss=0.04093, over 4862.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.276, pruned_loss=0.04981, over 928272.01 frames. ], batch size: 12, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:47:13,467 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.792e+01 6.438e+01 7.268e+01 1.084e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 13:47:22,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=164784.0, ans=0.07 +2024-07-28 13:47:29,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=164797.33333333334, ans=0.125 +2024-07-28 13:47:36,488 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.35 vs. limit=6.0 +2024-07-28 13:47:38,001 INFO [train.py:1114] (3/4) Epoch 13, batch 950, loss[loss=0.1582, simple_loss=0.2425, pruned_loss=0.03699, over 4779.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2765, pruned_loss=0.04997, over 930142.69 frames. ], batch size: 12, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:47:43,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=164824.0, ans=0.125 +2024-07-28 13:47:52,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=164850.66666666666, ans=0.025 +2024-07-28 13:47:57,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=164864.0, ans=0.0 +2024-07-28 13:48:08,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=164877.33333333334, ans=0.125 +2024-07-28 13:48:11,275 INFO [train.py:1114] (3/4) Epoch 13, batch 1000, loss[loss=0.1769, simple_loss=0.2602, pruned_loss=0.04677, over 4951.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2775, pruned_loss=0.05075, over 929142.64 frames. ], batch size: 13, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:48:12,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=164890.66666666666, ans=0.2 +2024-07-28 13:48:17,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=164904.0, ans=0.0 +2024-07-28 13:48:19,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=164904.0, ans=0.1 +2024-07-28 13:48:20,577 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.383e+01 5.622e+01 6.136e+01 7.218e+01 8.877e+01, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 13:48:23,580 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.94 vs. limit=22.5 +2024-07-28 13:48:40,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=164944.0, ans=0.125 +2024-07-28 13:48:44,472 INFO [train.py:1114] (3/4) Epoch 13, batch 1050, loss[loss=0.2073, simple_loss=0.2969, pruned_loss=0.05883, over 4873.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2774, pruned_loss=0.05085, over 931432.50 frames. ], batch size: 14, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:48:50,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=164957.33333333334, ans=0.125 +2024-07-28 13:49:03,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=164984.0, ans=0.0 +2024-07-28 13:49:09,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=164997.33333333334, ans=0.2 +2024-07-28 13:49:19,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=165010.66666666666, ans=0.0 +2024-07-28 13:49:23,110 INFO [train.py:1114] (3/4) Epoch 13, batch 1100, loss[loss=0.1884, simple_loss=0.2724, pruned_loss=0.05215, over 4902.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2776, pruned_loss=0.05085, over 933886.78 frames. ], batch size: 13, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:49:47,103 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.13 vs. limit=6.0 +2024-07-28 13:49:50,756 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.914e+01 5.557e+01 6.150e+01 6.948e+01 9.915e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 13:50:15,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=165050.66666666666, ans=0.07 +2024-07-28 13:50:36,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=165077.33333333334, ans=0.2 +2024-07-28 13:50:41,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=165077.33333333334, ans=0.05 +2024-07-28 13:50:46,019 INFO [train.py:1114] (3/4) Epoch 13, batch 1150, loss[loss=0.2006, simple_loss=0.2992, pruned_loss=0.05098, over 4902.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2776, pruned_loss=0.05087, over 933931.09 frames. ], batch size: 13, lr: 5.86e-03, grad_scale: 32.0 +2024-07-28 13:50:47,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=165090.66666666666, ans=0.125 +2024-07-28 13:50:48,301 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:50:52,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=165104.0, ans=0.125 +2024-07-28 13:51:06,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=165130.66666666666, ans=0.125 +2024-07-28 13:51:06,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=165130.66666666666, ans=0.125 +2024-07-28 13:51:07,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=165130.66666666666, ans=0.0 +2024-07-28 13:51:20,621 INFO [train.py:1114] (3/4) Epoch 13, batch 1200, loss[loss=0.1816, simple_loss=0.2647, pruned_loss=0.04925, over 4873.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.278, pruned_loss=0.05092, over 932946.65 frames. ], batch size: 14, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:51:22,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=165157.33333333334, ans=0.125 +2024-07-28 13:51:25,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=165157.33333333334, ans=0.1 +2024-07-28 13:51:30,265 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.565e+01 5.602e+01 6.215e+01 7.036e+01 9.353e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 13:51:37,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=165184.0, ans=0.07 +2024-07-28 13:51:43,035 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.77 vs. limit=15.0 +2024-07-28 13:51:47,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=165197.33333333334, ans=0.125 +2024-07-28 13:51:47,625 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.49 vs. limit=10.0 +2024-07-28 13:51:55,904 INFO [train.py:1114] (3/4) Epoch 13, batch 1250, loss[loss=0.195, simple_loss=0.2998, pruned_loss=0.04511, over 4800.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.278, pruned_loss=0.05051, over 936860.53 frames. ], batch size: 15, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:51:56,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=165224.0, ans=0.125 +2024-07-28 13:52:12,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=165250.66666666666, ans=0.125 +2024-07-28 13:52:19,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=165264.0, ans=0.05 +2024-07-28 13:52:20,761 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.26 vs. limit=15.0 +2024-07-28 13:52:25,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=165277.33333333334, ans=0.125 +2024-07-28 13:52:34,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=165277.33333333334, ans=0.125 +2024-07-28 13:52:37,830 INFO [train.py:1114] (3/4) Epoch 13, batch 1300, loss[loss=0.2129, simple_loss=0.3035, pruned_loss=0.06117, over 4664.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2767, pruned_loss=0.04985, over 938125.19 frames. ], batch size: 19, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:52:48,771 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.215e+01 5.537e+01 6.038e+01 6.682e+01 9.542e+01, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 13:52:54,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=165304.0, ans=0.125 +2024-07-28 13:52:54,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=165304.0, ans=0.125 +2024-07-28 13:52:57,101 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.24 vs. limit=22.5 +2024-07-28 13:53:02,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=165330.66666666666, ans=0.125 +2024-07-28 13:53:19,168 INFO [train.py:1114] (3/4) Epoch 13, batch 1350, loss[loss=0.1863, simple_loss=0.2684, pruned_loss=0.05206, over 4755.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2768, pruned_loss=0.04995, over 940459.81 frames. ], batch size: 13, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:53:20,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=165357.33333333334, ans=0.125 +2024-07-28 13:53:22,234 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.63 vs. limit=15.0 +2024-07-28 13:53:28,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=165370.66666666666, ans=0.1 +2024-07-28 13:53:34,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=165384.0, ans=0.025 +2024-07-28 13:53:35,437 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.36 vs. limit=15.0 +2024-07-28 13:53:36,929 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.84 vs. limit=6.0 +2024-07-28 13:53:39,342 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:53:54,750 INFO [train.py:1114] (3/4) Epoch 13, batch 1400, loss[loss=0.1653, simple_loss=0.2531, pruned_loss=0.03876, over 4701.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.276, pruned_loss=0.04957, over 942285.49 frames. ], batch size: 11, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:54:00,424 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.67 vs. limit=22.5 +2024-07-28 13:54:04,062 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.297e+01 5.870e+01 6.563e+01 8.092e+01 1.108e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-28 13:54:04,393 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.02 vs. limit=6.0 +2024-07-28 13:54:14,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165464.0, ans=0.1 +2024-07-28 13:54:18,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=165464.0, ans=0.95 +2024-07-28 13:54:27,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=165490.66666666666, ans=0.05 +2024-07-28 13:54:28,178 INFO [train.py:1114] (3/4) Epoch 13, batch 1450, loss[loss=0.188, simple_loss=0.2835, pruned_loss=0.04626, over 4686.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2775, pruned_loss=0.05042, over 942738.81 frames. ], batch size: 15, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:54:31,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=165490.66666666666, ans=0.07 +2024-07-28 13:54:36,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=165504.0, ans=0.025 +2024-07-28 13:54:51,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=165530.66666666666, ans=0.125 +2024-07-28 13:54:54,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=165544.0, ans=0.125 +2024-07-28 13:54:59,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165544.0, ans=0.1 +2024-07-28 13:55:00,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.52 vs. limit=12.0 +2024-07-28 13:55:01,063 INFO [train.py:1114] (3/4) Epoch 13, batch 1500, loss[loss=0.1775, simple_loss=0.2782, pruned_loss=0.03845, over 4805.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.277, pruned_loss=0.05002, over 942250.60 frames. ], batch size: 14, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:55:06,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff3.min_abs, batch_count=165557.33333333334, ans=0.2 +2024-07-28 13:55:08,129 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.42 vs. limit=15.0 +2024-07-28 13:55:10,414 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.997e+01 5.672e+01 6.060e+01 6.827e+01 9.493e+01, threshold=1.212e+02, percent-clipped=0.0 +2024-07-28 13:55:18,116 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.74 vs. limit=15.0 +2024-07-28 13:55:25,344 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.85 vs. limit=15.0 +2024-07-28 13:55:27,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=165597.33333333334, ans=0.1 +2024-07-28 13:55:27,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=165610.66666666666, ans=0.025 +2024-07-28 13:55:34,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=165624.0, ans=0.125 +2024-07-28 13:55:34,843 INFO [train.py:1114] (3/4) Epoch 13, batch 1550, loss[loss=0.1749, simple_loss=0.2722, pruned_loss=0.03885, over 4894.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2771, pruned_loss=0.05015, over 938623.17 frames. ], batch size: 15, lr: 5.85e-03, grad_scale: 32.0 +2024-07-28 13:55:37,222 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.95 vs. limit=15.0 +2024-07-28 13:55:40,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=165637.33333333334, ans=0.125 +2024-07-28 13:55:51,112 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.49 vs. limit=6.0 +2024-07-28 13:55:53,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=165650.66666666666, ans=0.2 +2024-07-28 13:56:01,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=165677.33333333334, ans=0.0 +2024-07-28 13:56:07,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=165690.66666666666, ans=0.1 +2024-07-28 13:56:08,017 INFO [train.py:1114] (3/4) Epoch 13, batch 1600, loss[loss=0.1959, simple_loss=0.289, pruned_loss=0.05141, over 4875.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.277, pruned_loss=0.05043, over 937774.61 frames. ], batch size: 14, lr: 5.84e-03, grad_scale: 32.0 +2024-07-28 13:56:16,351 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.92 vs. limit=6.0 +2024-07-28 13:56:19,119 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.588e+01 5.661e+01 6.268e+01 7.174e+01 9.497e+01, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 13:56:23,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=165717.33333333334, ans=0.125 +2024-07-28 13:56:28,678 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.43 vs. limit=15.0 +2024-07-28 13:56:28,775 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.94 vs. limit=10.0 +2024-07-28 13:56:29,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=165730.66666666666, ans=0.0 +2024-07-28 13:56:29,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=165730.66666666666, ans=0.0 +2024-07-28 13:56:38,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=165744.0, ans=0.0 +2024-07-28 13:56:42,659 INFO [train.py:1114] (3/4) Epoch 13, batch 1650, loss[loss=0.1957, simple_loss=0.2847, pruned_loss=0.05333, over 4666.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2767, pruned_loss=0.05037, over 937641.59 frames. ], batch size: 14, lr: 5.84e-03, grad_scale: 32.0 +2024-07-28 13:57:00,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=165784.0, ans=0.025 +2024-07-28 13:57:01,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=165784.0, ans=0.2 +2024-07-28 13:57:15,593 INFO [train.py:1114] (3/4) Epoch 13, batch 1700, loss[loss=0.1674, simple_loss=0.2449, pruned_loss=0.04492, over 4715.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2762, pruned_loss=0.04969, over 939218.58 frames. ], batch size: 11, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:57:26,860 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 5.803e+01 6.268e+01 7.328e+01 1.138e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 13:57:34,680 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.50 vs. limit=6.0 +2024-07-28 13:57:45,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=165877.33333333334, ans=0.125 +2024-07-28 13:57:49,335 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.24 vs. limit=15.0 +2024-07-28 13:57:51,031 INFO [train.py:1114] (3/4) Epoch 13, batch 1750, loss[loss=0.1593, simple_loss=0.2485, pruned_loss=0.03504, over 4812.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2758, pruned_loss=0.04924, over 940310.58 frames. ], batch size: 11, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:57:51,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=165890.66666666666, ans=0.125 +2024-07-28 13:57:51,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=165890.66666666666, ans=0.2 +2024-07-28 13:57:58,749 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.96 vs. limit=6.0 +2024-07-28 13:57:59,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=165904.0, ans=0.125 +2024-07-28 13:58:12,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=165930.66666666666, ans=0.0 +2024-07-28 13:58:13,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=165930.66666666666, ans=0.2 +2024-07-28 13:58:25,886 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.08 vs. limit=22.5 +2024-07-28 13:58:26,048 INFO [train.py:1114] (3/4) Epoch 13, batch 1800, loss[loss=0.1921, simple_loss=0.2985, pruned_loss=0.04286, over 4633.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2761, pruned_loss=0.0496, over 940956.09 frames. ], batch size: 13, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:58:26,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=165957.33333333334, ans=0.04949747468305833 +2024-07-28 13:58:34,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=165970.66666666666, ans=0.1 +2024-07-28 13:58:35,516 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+01 5.623e+01 6.283e+01 7.470e+01 1.047e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 13:58:38,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=165970.66666666666, ans=0.125 +2024-07-28 13:58:43,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=165984.0, ans=0.2 +2024-07-28 13:58:46,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=165997.33333333334, ans=0.0 +2024-07-28 13:58:52,486 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=5.05 vs. limit=15.0 +2024-07-28 13:59:01,621 INFO [train.py:1114] (3/4) Epoch 13, batch 1850, loss[loss=0.1991, simple_loss=0.295, pruned_loss=0.05163, over 4806.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2756, pruned_loss=0.04948, over 940440.69 frames. ], batch size: 14, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:59:06,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=166024.0, ans=0.125 +2024-07-28 13:59:07,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=166024.0, ans=0.0 +2024-07-28 13:59:07,943 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 13:59:24,134 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.91 vs. limit=15.0 +2024-07-28 13:59:35,544 INFO [train.py:1114] (3/4) Epoch 13, batch 1900, loss[loss=0.1977, simple_loss=0.2882, pruned_loss=0.05358, over 4668.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2765, pruned_loss=0.04982, over 941969.54 frames. ], batch size: 14, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 13:59:38,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=166090.66666666666, ans=0.1 +2024-07-28 13:59:44,624 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.534e+01 5.588e+01 6.157e+01 7.144e+01 1.104e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 13:59:58,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166130.66666666666, ans=0.1 +2024-07-28 14:00:06,561 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.72 vs. limit=12.0 +2024-07-28 14:00:08,385 INFO [train.py:1114] (3/4) Epoch 13, batch 1950, loss[loss=0.1768, simple_loss=0.271, pruned_loss=0.04134, over 4903.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2775, pruned_loss=0.0502, over 944058.89 frames. ], batch size: 13, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 14:00:16,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=166170.66666666666, ans=0.025 +2024-07-28 14:00:24,534 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=9.35 vs. limit=10.0 +2024-07-28 14:00:25,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=166184.0, ans=0.0 +2024-07-28 14:00:25,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=166184.0, ans=0.125 +2024-07-28 14:00:41,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=166210.66666666666, ans=0.0 +2024-07-28 14:00:42,311 INFO [train.py:1114] (3/4) Epoch 13, batch 2000, loss[loss=0.135, simple_loss=0.2061, pruned_loss=0.03197, over 4798.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2788, pruned_loss=0.05047, over 940987.09 frames. ], batch size: 11, lr: 5.84e-03, grad_scale: 64.0 +2024-07-28 14:00:44,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=166224.0, ans=0.125 +2024-07-28 14:00:45,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=166224.0, ans=0.125 +2024-07-28 14:00:51,856 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.600e+01 5.856e+01 6.495e+01 7.461e+01 1.148e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-28 14:01:16,503 INFO [train.py:1114] (3/4) Epoch 13, batch 2050, loss[loss=0.1793, simple_loss=0.259, pruned_loss=0.04982, over 4608.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2772, pruned_loss=0.04995, over 939115.53 frames. ], batch size: 11, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:01:19,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=166290.66666666666, ans=0.1 +2024-07-28 14:01:26,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166304.0, ans=0.1 +2024-07-28 14:01:46,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=166344.0, ans=0.05 +2024-07-28 14:01:48,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=166344.0, ans=0.0 +2024-07-28 14:01:51,262 INFO [train.py:1114] (3/4) Epoch 13, batch 2100, loss[loss=0.2081, simple_loss=0.2964, pruned_loss=0.05992, over 4754.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2756, pruned_loss=0.04899, over 940892.50 frames. ], batch size: 13, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:02:00,482 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.442e+01 5.528e+01 6.162e+01 7.061e+01 9.278e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 14:02:10,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=166397.33333333334, ans=0.125 +2024-07-28 14:02:17,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=166410.66666666666, ans=0.125 +2024-07-28 14:02:18,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=166410.66666666666, ans=0.0 +2024-07-28 14:02:24,077 INFO [train.py:1114] (3/4) Epoch 13, batch 2150, loss[loss=0.1725, simple_loss=0.2561, pruned_loss=0.04443, over 4901.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2751, pruned_loss=0.04909, over 944010.78 frames. ], batch size: 13, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:02:26,309 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.50 vs. limit=12.0 +2024-07-28 14:02:26,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=166424.0, ans=0.125 +2024-07-28 14:02:26,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=166424.0, ans=0.025 +2024-07-28 14:02:31,996 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.28 vs. limit=12.0 +2024-07-28 14:02:46,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=166464.0, ans=0.0 +2024-07-28 14:02:57,432 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.22 vs. limit=15.0 +2024-07-28 14:02:59,557 INFO [train.py:1114] (3/4) Epoch 13, batch 2200, loss[loss=0.1983, simple_loss=0.2882, pruned_loss=0.0542, over 4815.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2753, pruned_loss=0.04937, over 943129.51 frames. ], batch size: 14, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:03:04,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=166490.66666666666, ans=0.0 +2024-07-28 14:03:06,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=166504.0, ans=0.0 +2024-07-28 14:03:08,838 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.709e+01 5.698e+01 6.654e+01 7.833e+01 2.383e+02, threshold=1.331e+02, percent-clipped=1.0 +2024-07-28 14:03:12,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.min_positive, batch_count=166517.33333333334, ans=0.025 +2024-07-28 14:03:17,129 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.82 vs. limit=12.0 +2024-07-28 14:03:22,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=166530.66666666666, ans=0.125 +2024-07-28 14:03:32,645 INFO [train.py:1114] (3/4) Epoch 13, batch 2250, loss[loss=0.1893, simple_loss=0.2909, pruned_loss=0.04387, over 4691.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2744, pruned_loss=0.04907, over 941852.45 frames. ], batch size: 13, lr: 5.83e-03, grad_scale: 64.0 +2024-07-28 14:03:38,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=166570.66666666666, ans=0.125 +2024-07-28 14:03:55,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=166597.33333333334, ans=0.125 +2024-07-28 14:03:58,048 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.80 vs. limit=6.0 +2024-07-28 14:04:07,911 INFO [train.py:1114] (3/4) Epoch 13, batch 2300, loss[loss=0.1975, simple_loss=0.2739, pruned_loss=0.06054, over 4931.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2737, pruned_loss=0.04859, over 939853.70 frames. ], batch size: 12, lr: 5.83e-03, grad_scale: 32.0 +2024-07-28 14:04:13,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=166637.33333333334, ans=0.125 +2024-07-28 14:04:19,880 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.764e+01 5.399e+01 5.798e+01 6.898e+01 9.306e+01, threshold=1.160e+02, percent-clipped=0.0 +2024-07-28 14:04:21,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=166637.33333333334, ans=0.04949747468305833 +2024-07-28 14:04:23,204 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.65 vs. limit=15.0 +2024-07-28 14:04:33,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=166664.0, ans=0.125 +2024-07-28 14:04:35,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=166664.0, ans=0.125 +2024-07-28 14:04:44,037 INFO [train.py:1114] (3/4) Epoch 13, batch 2350, loss[loss=0.1724, simple_loss=0.2722, pruned_loss=0.03628, over 4639.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2739, pruned_loss=0.04822, over 941490.09 frames. ], batch size: 13, lr: 5.83e-03, grad_scale: 32.0 +2024-07-28 14:04:46,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten.whitening_limit, batch_count=166690.66666666666, ans=22.5 +2024-07-28 14:04:50,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=166704.0, ans=0.1 +2024-07-28 14:04:58,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=166717.33333333334, ans=0.2 +2024-07-28 14:05:00,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=166717.33333333334, ans=0.125 +2024-07-28 14:05:02,582 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.30 vs. limit=15.0 +2024-07-28 14:05:06,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=166730.66666666666, ans=0.125 +2024-07-28 14:05:06,654 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-28 14:05:07,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=166730.66666666666, ans=0.125 +2024-07-28 14:05:17,041 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.41 vs. limit=6.0 +2024-07-28 14:05:17,416 INFO [train.py:1114] (3/4) Epoch 13, batch 2400, loss[loss=0.1464, simple_loss=0.2373, pruned_loss=0.02772, over 4639.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2743, pruned_loss=0.04862, over 941108.90 frames. ], batch size: 12, lr: 5.83e-03, grad_scale: 32.0 +2024-07-28 14:05:24,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=166770.66666666666, ans=0.125 +2024-07-28 14:05:27,241 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.15 vs. limit=15.0 +2024-07-28 14:05:27,489 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.619e+01 5.555e+01 6.337e+01 7.554e+01 1.093e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 14:05:31,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=166784.0, ans=0.0 +2024-07-28 14:05:37,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=166797.33333333334, ans=0.125 +2024-07-28 14:05:40,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=166797.33333333334, ans=0.07 +2024-07-28 14:05:50,660 INFO [train.py:1114] (3/4) Epoch 13, batch 2450, loss[loss=0.1755, simple_loss=0.2693, pruned_loss=0.04088, over 4694.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2748, pruned_loss=0.04915, over 936471.28 frames. ], batch size: 13, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:05:54,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=166824.0, ans=0.125 +2024-07-28 14:06:04,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=166850.66666666666, ans=0.0 +2024-07-28 14:06:10,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=166864.0, ans=0.1 +2024-07-28 14:06:10,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=166864.0, ans=0.125 +2024-07-28 14:06:23,971 INFO [train.py:1114] (3/4) Epoch 13, batch 2500, loss[loss=0.1934, simple_loss=0.2827, pruned_loss=0.05206, over 4823.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2752, pruned_loss=0.04932, over 938714.99 frames. ], batch size: 14, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:06:29,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=166890.66666666666, ans=0.2 +2024-07-28 14:06:31,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=166904.0, ans=0.2 +2024-07-28 14:06:33,895 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.764e+01 5.445e+01 5.909e+01 6.665e+01 1.016e+02, threshold=1.182e+02, percent-clipped=0.0 +2024-07-28 14:06:37,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=166917.33333333334, ans=0.125 +2024-07-28 14:06:46,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=166930.66666666666, ans=0.025 +2024-07-28 14:06:53,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=166944.0, ans=0.05 +2024-07-28 14:06:54,695 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.64 vs. limit=15.0 +2024-07-28 14:06:57,759 INFO [train.py:1114] (3/4) Epoch 13, batch 2550, loss[loss=0.1757, simple_loss=0.2693, pruned_loss=0.04103, over 4811.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2751, pruned_loss=0.04906, over 938502.62 frames. ], batch size: 11, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:07:04,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=166957.33333333334, ans=0.125 +2024-07-28 14:07:12,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=166984.0, ans=0.1 +2024-07-28 14:07:14,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=166984.0, ans=0.125 +2024-07-28 14:07:16,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=166984.0, ans=0.0 +2024-07-28 14:07:27,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=167010.66666666666, ans=0.125 +2024-07-28 14:07:29,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=167010.66666666666, ans=0.2 +2024-07-28 14:07:32,594 INFO [train.py:1114] (3/4) Epoch 13, batch 2600, loss[loss=0.1781, simple_loss=0.2695, pruned_loss=0.04331, over 4898.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2761, pruned_loss=0.04939, over 938039.81 frames. ], batch size: 13, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:07:40,003 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:07:42,339 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.776e+01 5.608e+01 6.313e+01 7.050e+01 1.090e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 14:07:43,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=167037.33333333334, ans=0.2 +2024-07-28 14:07:47,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=167050.66666666666, ans=0.125 +2024-07-28 14:07:50,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=167050.66666666666, ans=0.125 +2024-07-28 14:07:54,052 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.00 vs. limit=15.0 +2024-07-28 14:07:55,033 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:08:07,480 INFO [train.py:1114] (3/4) Epoch 13, batch 2650, loss[loss=0.1867, simple_loss=0.2808, pruned_loss=0.04629, over 4636.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2756, pruned_loss=0.04931, over 939983.27 frames. ], batch size: 16, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:08:09,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_positive, batch_count=167090.66666666666, ans=0.05 +2024-07-28 14:08:30,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=167130.66666666666, ans=0.2 +2024-07-28 14:08:38,886 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.62 vs. limit=15.0 +2024-07-28 14:08:41,005 INFO [train.py:1114] (3/4) Epoch 13, batch 2700, loss[loss=0.1847, simple_loss=0.2787, pruned_loss=0.04536, over 4745.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2749, pruned_loss=0.04832, over 939958.93 frames. ], batch size: 14, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:08:41,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=167157.33333333334, ans=0.2 +2024-07-28 14:08:43,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=167157.33333333334, ans=0.125 +2024-07-28 14:08:46,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.whiten.whitening_limit, batch_count=167157.33333333334, ans=12.0 +2024-07-28 14:08:48,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=167170.66666666666, ans=0.125 +2024-07-28 14:08:51,006 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.399e+01 5.482e+01 5.925e+01 6.824e+01 1.004e+02, threshold=1.185e+02, percent-clipped=0.0 +2024-07-28 14:08:55,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=167184.0, ans=0.0 +2024-07-28 14:08:59,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167184.0, ans=0.1 +2024-07-28 14:08:59,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=167184.0, ans=0.2 +2024-07-28 14:09:00,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=167197.33333333334, ans=0.125 +2024-07-28 14:09:04,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167197.33333333334, ans=0.1 +2024-07-28 14:09:06,409 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=15.32 vs. limit=15.0 +2024-07-28 14:09:10,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=167210.66666666666, ans=0.0 +2024-07-28 14:09:14,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=167210.66666666666, ans=0.125 +2024-07-28 14:09:17,202 INFO [train.py:1114] (3/4) Epoch 13, batch 2750, loss[loss=0.204, simple_loss=0.2825, pruned_loss=0.0628, over 4705.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2747, pruned_loss=0.04906, over 939913.25 frames. ], batch size: 12, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:09:24,748 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.12 vs. limit=10.0 +2024-07-28 14:09:28,174 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.96 vs. limit=15.0 +2024-07-28 14:09:50,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167277.33333333334, ans=0.1 +2024-07-28 14:09:52,621 INFO [train.py:1114] (3/4) Epoch 13, batch 2800, loss[loss=0.2702, simple_loss=0.3264, pruned_loss=0.1071, over 3613.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2755, pruned_loss=0.04945, over 937955.99 frames. ], batch size: 35, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:09:54,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=167290.66666666666, ans=0.125 +2024-07-28 14:09:54,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=167290.66666666666, ans=0.05 +2024-07-28 14:10:02,560 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.817e+01 5.664e+01 6.211e+01 7.205e+01 1.021e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 14:10:04,707 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:10:04,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=167304.0, ans=0.0 +2024-07-28 14:10:13,919 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.32 vs. limit=10.0 +2024-07-28 14:10:26,048 INFO [train.py:1114] (3/4) Epoch 13, batch 2850, loss[loss=0.1706, simple_loss=0.2626, pruned_loss=0.03933, over 4957.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2769, pruned_loss=0.0503, over 936631.58 frames. ], batch size: 13, lr: 5.82e-03, grad_scale: 32.0 +2024-07-28 14:10:36,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=167370.66666666666, ans=0.125 +2024-07-28 14:10:37,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=167370.66666666666, ans=0.0 +2024-07-28 14:10:37,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=167370.66666666666, ans=0.125 +2024-07-28 14:10:38,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=167384.0, ans=0.0 +2024-07-28 14:10:44,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=167384.0, ans=0.0 +2024-07-28 14:10:49,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=167397.33333333334, ans=0.125 +2024-07-28 14:10:49,811 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.20 vs. limit=15.0 +2024-07-28 14:10:59,333 INFO [train.py:1114] (3/4) Epoch 13, batch 2900, loss[loss=0.1863, simple_loss=0.2721, pruned_loss=0.05029, over 4829.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2775, pruned_loss=0.05001, over 940156.51 frames. ], batch size: 13, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:11:09,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167437.33333333334, ans=0.1 +2024-07-28 14:11:09,570 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.537e+01 5.851e+01 6.550e+01 7.504e+01 1.142e+02, threshold=1.310e+02, percent-clipped=0.0 +2024-07-28 14:11:16,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=167450.66666666666, ans=0.0 +2024-07-28 14:11:18,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=167450.66666666666, ans=0.0 +2024-07-28 14:11:18,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=167464.0, ans=0.125 +2024-07-28 14:11:24,839 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.62 vs. limit=12.0 +2024-07-28 14:11:29,781 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.28 vs. limit=15.0 +2024-07-28 14:11:31,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=167477.33333333334, ans=0.125 +2024-07-28 14:11:33,212 INFO [train.py:1114] (3/4) Epoch 13, batch 2950, loss[loss=0.1726, simple_loss=0.2629, pruned_loss=0.04116, over 4704.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2763, pruned_loss=0.04947, over 938787.87 frames. ], batch size: 12, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:11:40,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=167504.0, ans=0.125 +2024-07-28 14:11:46,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=167517.33333333334, ans=0.0 +2024-07-28 14:11:54,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=167530.66666666666, ans=0.2 +2024-07-28 14:11:58,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=167530.66666666666, ans=0.125 +2024-07-28 14:12:06,768 INFO [train.py:1114] (3/4) Epoch 13, batch 3000, loss[loss=0.1628, simple_loss=0.2542, pruned_loss=0.03569, over 4754.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2767, pruned_loss=0.04956, over 938044.69 frames. ], batch size: 13, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:12:06,768 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 14:12:11,222 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.5.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.2723, 2.3425, 4.0359, 2.0248], device='cuda:3') +2024-07-28 14:12:16,802 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.4457, 3.9294, 4.4957, 3.3930], device='cuda:3') +2024-07-28 14:12:18,646 INFO [train.py:1146] (3/4) Epoch 13, validation: loss=0.1663, simple_loss=0.2701, pruned_loss=0.0312, over 944034.00 frames. +2024-07-28 14:12:18,646 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 14:12:22,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=167557.33333333334, ans=0.125 +2024-07-28 14:12:27,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=167570.66666666666, ans=0.125 +2024-07-28 14:12:29,076 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.643e+01 5.635e+01 6.154e+01 7.337e+01 1.248e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 14:12:39,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.max_abs, batch_count=167597.33333333334, ans=10.0 +2024-07-28 14:12:43,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.93 vs. limit=15.0 +2024-07-28 14:12:50,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=167610.66666666666, ans=0.125 +2024-07-28 14:12:51,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=167610.66666666666, ans=0.2 +2024-07-28 14:12:52,950 INFO [train.py:1114] (3/4) Epoch 13, batch 3050, loss[loss=0.1953, simple_loss=0.2874, pruned_loss=0.05161, over 4633.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.278, pruned_loss=0.04995, over 936714.51 frames. ], batch size: 12, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:12:53,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=167624.0, ans=0.025 +2024-07-28 14:12:55,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=167624.0, ans=0.125 +2024-07-28 14:12:56,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=167624.0, ans=0.125 +2024-07-28 14:12:59,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=167637.33333333334, ans=0.0 +2024-07-28 14:13:03,943 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.16 vs. limit=15.0 +2024-07-28 14:13:13,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=167650.66666666666, ans=0.1 +2024-07-28 14:13:23,346 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:13:27,845 INFO [train.py:1114] (3/4) Epoch 13, batch 3100, loss[loss=0.2003, simple_loss=0.2894, pruned_loss=0.05556, over 4683.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2772, pruned_loss=0.04944, over 937404.73 frames. ], batch size: 16, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:13:34,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=167704.0, ans=0.0 +2024-07-28 14:13:37,633 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.158e+01 5.544e+01 6.108e+01 7.072e+01 9.683e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 14:13:43,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=167717.33333333334, ans=0.0 +2024-07-28 14:13:43,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=167717.33333333334, ans=0.2 +2024-07-28 14:13:44,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=167717.33333333334, ans=0.0 +2024-07-28 14:13:47,473 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.05 vs. limit=15.0 +2024-07-28 14:13:52,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=167730.66666666666, ans=0.95 +2024-07-28 14:14:01,406 INFO [train.py:1114] (3/4) Epoch 13, batch 3150, loss[loss=0.1845, simple_loss=0.2836, pruned_loss=0.04269, over 4620.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2774, pruned_loss=0.04946, over 937527.51 frames. ], batch size: 17, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:14:04,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=167757.33333333334, ans=0.125 +2024-07-28 14:14:16,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=167784.0, ans=0.1 +2024-07-28 14:14:28,177 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.17 vs. limit=15.0 +2024-07-28 14:14:37,497 INFO [train.py:1114] (3/4) Epoch 13, batch 3200, loss[loss=0.1846, simple_loss=0.2633, pruned_loss=0.05293, over 4837.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2751, pruned_loss=0.04837, over 938922.48 frames. ], batch size: 13, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:14:38,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=167824.0, ans=0.0 +2024-07-28 14:14:47,195 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.951e+01 5.665e+01 6.377e+01 7.022e+01 9.065e+01, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 14:15:00,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=167850.66666666666, ans=0.1 +2024-07-28 14:15:12,694 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.37 vs. limit=22.5 +2024-07-28 14:15:17,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=167890.66666666666, ans=0.0 +2024-07-28 14:15:18,650 INFO [train.py:1114] (3/4) Epoch 13, batch 3250, loss[loss=0.1772, simple_loss=0.2651, pruned_loss=0.0447, over 4938.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2748, pruned_loss=0.04803, over 940096.82 frames. ], batch size: 14, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:15:21,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=167890.66666666666, ans=0.0 +2024-07-28 14:15:22,227 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.42 vs. limit=22.5 +2024-07-28 14:15:28,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=167904.0, ans=0.125 +2024-07-28 14:15:48,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=167944.0, ans=0.1 +2024-07-28 14:15:50,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=167944.0, ans=0.125 +2024-07-28 14:15:50,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=167944.0, ans=0.125 +2024-07-28 14:15:52,609 INFO [train.py:1114] (3/4) Epoch 13, batch 3300, loss[loss=0.2198, simple_loss=0.3136, pruned_loss=0.063, over 4743.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2742, pruned_loss=0.04817, over 940328.60 frames. ], batch size: 19, lr: 5.81e-03, grad_scale: 32.0 +2024-07-28 14:15:53,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=167957.33333333334, ans=0.125 +2024-07-28 14:15:55,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=167957.33333333334, ans=0.05 +2024-07-28 14:16:02,797 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 5.327e+01 5.938e+01 6.571e+01 1.063e+02, threshold=1.188e+02, percent-clipped=0.0 +2024-07-28 14:16:26,159 INFO [train.py:1114] (3/4) Epoch 13, batch 3350, loss[loss=0.1927, simple_loss=0.2802, pruned_loss=0.05262, over 4620.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2752, pruned_loss=0.04849, over 938475.61 frames. ], batch size: 17, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:16:26,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=168024.0, ans=0.125 +2024-07-28 14:17:00,157 INFO [train.py:1114] (3/4) Epoch 13, batch 3400, loss[loss=0.1709, simple_loss=0.2579, pruned_loss=0.04193, over 4804.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2757, pruned_loss=0.04949, over 937274.23 frames. ], batch size: 11, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:17:05,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=168090.66666666666, ans=0.05 +2024-07-28 14:17:05,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=168090.66666666666, ans=0.125 +2024-07-28 14:17:10,164 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.911e+01 6.407e+01 7.548e+01 1.179e+02, threshold=1.281e+02, percent-clipped=0.0 +2024-07-28 14:17:15,382 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.75 vs. limit=12.0 +2024-07-28 14:17:24,955 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.55 vs. limit=22.5 +2024-07-28 14:17:29,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=168144.0, ans=0.1 +2024-07-28 14:17:33,662 INFO [train.py:1114] (3/4) Epoch 13, batch 3450, loss[loss=0.1892, simple_loss=0.2904, pruned_loss=0.044, over 4661.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2759, pruned_loss=0.04933, over 937640.73 frames. ], batch size: 19, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:17:34,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=168157.33333333334, ans=0.125 +2024-07-28 14:17:54,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=168184.0, ans=0.125 +2024-07-28 14:18:02,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=168210.66666666666, ans=0.125 +2024-07-28 14:18:08,627 INFO [train.py:1114] (3/4) Epoch 13, batch 3500, loss[loss=0.1872, simple_loss=0.2554, pruned_loss=0.05949, over 4934.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2755, pruned_loss=0.04915, over 938521.89 frames. ], batch size: 12, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:18:09,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=168224.0, ans=0.125 +2024-07-28 14:18:15,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=168237.33333333334, ans=0.125 +2024-07-28 14:18:18,524 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.446e+01 5.616e+01 6.376e+01 7.329e+01 9.586e+01, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 14:18:21,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=168250.66666666666, ans=0.125 +2024-07-28 14:18:24,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=168250.66666666666, ans=0.0 +2024-07-28 14:18:24,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=168250.66666666666, ans=0.125 +2024-07-28 14:18:32,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=168264.0, ans=0.125 +2024-07-28 14:18:40,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=168277.33333333334, ans=0.125 +2024-07-28 14:18:41,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=168277.33333333334, ans=0.0 +2024-07-28 14:18:44,202 INFO [train.py:1114] (3/4) Epoch 13, batch 3550, loss[loss=0.1825, simple_loss=0.2803, pruned_loss=0.04232, over 4673.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2747, pruned_loss=0.04876, over 938925.88 frames. ], batch size: 14, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:18:48,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=168290.66666666666, ans=0.125 +2024-07-28 14:18:50,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=168304.0, ans=0.2 +2024-07-28 14:19:05,841 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:19:08,847 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:19:13,744 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.58 vs. limit=15.0 +2024-07-28 14:19:17,384 INFO [train.py:1114] (3/4) Epoch 13, batch 3600, loss[loss=0.1714, simple_loss=0.2568, pruned_loss=0.04304, over 4963.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2753, pruned_loss=0.04937, over 940598.26 frames. ], batch size: 13, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:19:22,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=168357.33333333334, ans=0.0 +2024-07-28 14:19:27,242 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.558e+01 5.903e+01 6.553e+01 7.584e+01 1.363e+02, threshold=1.311e+02, percent-clipped=1.0 +2024-07-28 14:19:38,769 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:19:50,594 INFO [train.py:1114] (3/4) Epoch 13, batch 3650, loss[loss=0.1806, simple_loss=0.2746, pruned_loss=0.04329, over 4902.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2744, pruned_loss=0.04917, over 940941.21 frames. ], batch size: 15, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:19:56,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_na.min_abs, batch_count=168424.0, ans=0.02 +2024-07-28 14:20:03,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=168437.33333333334, ans=0.1 +2024-07-28 14:20:12,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=168464.0, ans=6.0 +2024-07-28 14:20:22,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=168477.33333333334, ans=0.125 +2024-07-28 14:20:27,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=168490.66666666666, ans=0.1 +2024-07-28 14:20:27,680 INFO [train.py:1114] (3/4) Epoch 13, batch 3700, loss[loss=0.2053, simple_loss=0.3019, pruned_loss=0.0543, over 4928.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2748, pruned_loss=0.04873, over 942005.52 frames. ], batch size: 14, lr: 5.80e-03, grad_scale: 32.0 +2024-07-28 14:20:31,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168490.66666666666, ans=0.1 +2024-07-28 14:20:37,609 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.274e+01 5.551e+01 6.034e+01 6.765e+01 1.404e+02, threshold=1.207e+02, percent-clipped=1.0 +2024-07-28 14:20:40,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168517.33333333334, ans=0.1 +2024-07-28 14:20:48,510 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.07 vs. limit=15.0 +2024-07-28 14:20:53,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=168530.66666666666, ans=0.0 +2024-07-28 14:20:55,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=168544.0, ans=0.0 +2024-07-28 14:21:01,042 INFO [train.py:1114] (3/4) Epoch 13, batch 3750, loss[loss=0.1643, simple_loss=0.2514, pruned_loss=0.03862, over 4806.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2748, pruned_loss=0.04859, over 943486.37 frames. ], batch size: 11, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:21:08,749 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.66 vs. limit=22.5 +2024-07-28 14:21:10,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=168570.66666666666, ans=0.2 +2024-07-28 14:21:11,852 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.26 vs. limit=12.0 +2024-07-28 14:21:17,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=168584.0, ans=0.1 +2024-07-28 14:21:24,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=168597.33333333334, ans=0.125 +2024-07-28 14:21:32,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=168610.66666666666, ans=0.125 +2024-07-28 14:21:34,569 INFO [train.py:1114] (3/4) Epoch 13, batch 3800, loss[loss=0.2142, simple_loss=0.2984, pruned_loss=0.06501, over 4812.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2754, pruned_loss=0.04881, over 941279.97 frames. ], batch size: 14, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:21:38,066 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:21:41,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=168637.33333333334, ans=0.125 +2024-07-28 14:21:42,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=168637.33333333334, ans=0.125 +2024-07-28 14:21:44,598 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.768e+01 5.664e+01 6.425e+01 7.356e+01 1.029e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 14:21:48,483 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.93 vs. limit=15.0 +2024-07-28 14:22:00,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168677.33333333334, ans=0.1 +2024-07-28 14:22:03,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=168677.33333333334, ans=0.1 +2024-07-28 14:22:05,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=168677.33333333334, ans=0.0 +2024-07-28 14:22:06,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=168677.33333333334, ans=0.0 +2024-07-28 14:22:07,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=168677.33333333334, ans=0.125 +2024-07-28 14:22:07,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=168677.33333333334, ans=0.0 +2024-07-28 14:22:08,476 INFO [train.py:1114] (3/4) Epoch 13, batch 3850, loss[loss=0.2066, simple_loss=0.2832, pruned_loss=0.06504, over 4649.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2744, pruned_loss=0.04833, over 942186.80 frames. ], batch size: 16, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:22:12,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=168690.66666666666, ans=0.2 +2024-07-28 14:22:13,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=168690.66666666666, ans=0.125 +2024-07-28 14:22:14,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=168690.66666666666, ans=0.0 +2024-07-28 14:22:14,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=168690.66666666666, ans=0.025 +2024-07-28 14:22:16,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=168704.0, ans=0.07 +2024-07-28 14:22:25,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=168717.33333333334, ans=0.125 +2024-07-28 14:22:41,757 INFO [train.py:1114] (3/4) Epoch 13, batch 3900, loss[loss=0.1727, simple_loss=0.2681, pruned_loss=0.03861, over 4811.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2747, pruned_loss=0.04813, over 942457.02 frames. ], batch size: 14, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:22:43,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168757.33333333334, ans=0.1 +2024-07-28 14:22:51,515 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.515e+01 5.611e+01 6.115e+01 6.716e+01 9.720e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 14:23:00,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168784.0, ans=0.1 +2024-07-28 14:23:17,168 INFO [train.py:1114] (3/4) Epoch 13, batch 3950, loss[loss=0.1932, simple_loss=0.2786, pruned_loss=0.05387, over 4830.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2742, pruned_loss=0.04795, over 944278.54 frames. ], batch size: 16, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:23:21,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=168824.0, ans=0.0 +2024-07-28 14:23:24,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=168837.33333333334, ans=0.09899494936611666 +2024-07-28 14:23:25,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=168837.33333333334, ans=0.125 +2024-07-28 14:23:26,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=168837.33333333334, ans=0.2 +2024-07-28 14:23:27,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=168837.33333333334, ans=0.0 +2024-07-28 14:23:33,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=168850.66666666666, ans=0.0 +2024-07-28 14:23:40,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=168864.0, ans=0.125 +2024-07-28 14:23:42,912 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.06 vs. limit=15.0 +2024-07-28 14:23:50,573 INFO [train.py:1114] (3/4) Epoch 13, batch 4000, loss[loss=0.1751, simple_loss=0.2543, pruned_loss=0.04789, over 4770.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2747, pruned_loss=0.0486, over 940548.27 frames. ], batch size: 12, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:23:58,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=168904.0, ans=0.0 +2024-07-28 14:24:00,469 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.269e+01 5.777e+01 6.304e+01 7.103e+01 1.026e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 14:24:05,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=168917.33333333334, ans=0.0 +2024-07-28 14:24:11,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=168930.66666666666, ans=0.1 +2024-07-28 14:24:15,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=168930.66666666666, ans=0.125 +2024-07-28 14:24:20,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=168944.0, ans=0.0 +2024-07-28 14:24:23,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=168944.0, ans=0.125 +2024-07-28 14:24:23,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=168944.0, ans=0.125 +2024-07-28 14:24:25,722 INFO [train.py:1114] (3/4) Epoch 13, batch 4050, loss[loss=0.2653, simple_loss=0.3387, pruned_loss=0.09593, over 3557.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2745, pruned_loss=0.04918, over 939232.11 frames. ], batch size: 37, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:24:26,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=168957.33333333334, ans=0.0 +2024-07-28 14:24:35,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=168970.66666666666, ans=0.125 +2024-07-28 14:24:35,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.19 vs. limit=15.0 +2024-07-28 14:24:59,973 INFO [train.py:1114] (3/4) Epoch 13, batch 4100, loss[loss=0.2173, simple_loss=0.2955, pruned_loss=0.06959, over 4916.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2756, pruned_loss=0.0498, over 937958.93 frames. ], batch size: 15, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:25:08,593 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.23 vs. limit=15.0 +2024-07-28 14:25:09,985 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.483e+01 5.994e+01 6.398e+01 7.649e+01 1.244e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-28 14:25:31,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=169077.33333333334, ans=0.015 +2024-07-28 14:25:33,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=169077.33333333334, ans=0.0 +2024-07-28 14:25:35,752 INFO [train.py:1114] (3/4) Epoch 13, batch 4150, loss[loss=0.1621, simple_loss=0.2443, pruned_loss=0.03991, over 4827.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2744, pruned_loss=0.04945, over 937753.25 frames. ], batch size: 13, lr: 5.79e-03, grad_scale: 32.0 +2024-07-28 14:25:39,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169090.66666666666, ans=0.1 +2024-07-28 14:26:03,379 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.92 vs. limit=15.0 +2024-07-28 14:26:11,039 INFO [train.py:1114] (3/4) Epoch 13, batch 4200, loss[loss=0.2213, simple_loss=0.3058, pruned_loss=0.06846, over 4907.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2752, pruned_loss=0.04949, over 939126.46 frames. ], batch size: 15, lr: 5.78e-03, grad_scale: 32.0 +2024-07-28 14:26:17,236 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:26:17,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=169170.66666666666, ans=0.125 +2024-07-28 14:26:19,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=169170.66666666666, ans=0.09899494936611666 +2024-07-28 14:26:20,875 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 5.568e+01 6.158e+01 7.068e+01 9.655e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 14:26:36,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=169197.33333333334, ans=0.125 +2024-07-28 14:26:38,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=169210.66666666666, ans=0.0 +2024-07-28 14:26:44,203 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.16 vs. limit=15.0 +2024-07-28 14:26:44,499 INFO [train.py:1114] (3/4) Epoch 13, batch 4250, loss[loss=0.209, simple_loss=0.3027, pruned_loss=0.0576, over 4640.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2763, pruned_loss=0.04998, over 939965.50 frames. ], batch size: 12, lr: 5.78e-03, grad_scale: 32.0 +2024-07-28 14:26:49,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=169224.0, ans=0.125 +2024-07-28 14:26:58,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=169250.66666666666, ans=0.125 +2024-07-28 14:27:04,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=169264.0, ans=0.035 +2024-07-28 14:27:17,706 INFO [train.py:1114] (3/4) Epoch 13, batch 4300, loss[loss=0.233, simple_loss=0.3064, pruned_loss=0.07985, over 4760.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2761, pruned_loss=0.04995, over 939958.88 frames. ], batch size: 13, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:27:20,058 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.92 vs. limit=15.0 +2024-07-28 14:27:21,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169290.66666666666, ans=0.1 +2024-07-28 14:27:27,631 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.628e+01 5.556e+01 6.095e+01 6.767e+01 1.249e+02, threshold=1.219e+02, percent-clipped=1.0 +2024-07-28 14:27:37,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=169330.66666666666, ans=0.025 +2024-07-28 14:27:45,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=169344.0, ans=0.125 +2024-07-28 14:27:50,967 INFO [train.py:1114] (3/4) Epoch 13, batch 4350, loss[loss=0.1778, simple_loss=0.2645, pruned_loss=0.04559, over 4756.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2765, pruned_loss=0.04966, over 940844.84 frames. ], batch size: 13, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:28:00,066 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.68 vs. limit=22.5 +2024-07-28 14:28:04,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=169384.0, ans=0.0 +2024-07-28 14:28:07,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=169384.0, ans=0.025 +2024-07-28 14:28:23,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=169410.66666666666, ans=22.5 +2024-07-28 14:28:24,270 INFO [train.py:1114] (3/4) Epoch 13, batch 4400, loss[loss=0.1801, simple_loss=0.2802, pruned_loss=0.04002, over 4807.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2764, pruned_loss=0.04958, over 940746.63 frames. ], batch size: 14, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:28:27,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=169424.0, ans=0.125 +2024-07-28 14:28:36,399 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.899e+01 5.545e+01 6.054e+01 6.710e+01 1.195e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-28 14:28:51,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=169464.0, ans=0.2 +2024-07-28 14:29:00,267 INFO [train.py:1114] (3/4) Epoch 13, batch 4450, loss[loss=0.1538, simple_loss=0.2444, pruned_loss=0.03155, over 4939.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2755, pruned_loss=0.04928, over 938799.65 frames. ], batch size: 12, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:29:03,556 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.17 vs. limit=15.0 +2024-07-28 14:29:08,057 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.68 vs. limit=22.5 +2024-07-28 14:29:09,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=169504.0, ans=0.1 +2024-07-28 14:29:13,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=169517.33333333334, ans=0.1 +2024-07-28 14:29:13,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=169517.33333333334, ans=0.125 +2024-07-28 14:29:14,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=169517.33333333334, ans=0.07 +2024-07-28 14:29:15,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=169517.33333333334, ans=0.125 +2024-07-28 14:29:16,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=169517.33333333334, ans=0.025 +2024-07-28 14:29:20,766 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.27 vs. limit=22.5 +2024-07-28 14:29:27,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=169530.66666666666, ans=0.125 +2024-07-28 14:29:36,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=169544.0, ans=0.2 +2024-07-28 14:29:38,960 INFO [train.py:1114] (3/4) Epoch 13, batch 4500, loss[loss=0.2336, simple_loss=0.3293, pruned_loss=0.06894, over 4733.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2762, pruned_loss=0.04927, over 938396.49 frames. ], batch size: 14, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:29:39,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=169557.33333333334, ans=0.125 +2024-07-28 14:29:48,688 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.567e+01 5.617e+01 6.099e+01 7.289e+01 9.992e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 14:31:47,417 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:31:56,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=169610.66666666666, ans=0.125 +2024-07-28 14:31:59,576 INFO [train.py:1114] (3/4) Epoch 13, batch 4550, loss[loss=0.2027, simple_loss=0.2814, pruned_loss=0.06194, over 4892.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2762, pruned_loss=0.04893, over 940130.68 frames. ], batch size: 13, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:32:10,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=169637.33333333334, ans=0.125 +2024-07-28 14:32:18,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=169650.66666666666, ans=0.0 +2024-07-28 14:32:18,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=169650.66666666666, ans=0.125 +2024-07-28 14:32:34,424 INFO [train.py:1114] (3/4) Epoch 13, batch 4600, loss[loss=0.1843, simple_loss=0.2797, pruned_loss=0.04443, over 4557.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2756, pruned_loss=0.04904, over 938167.68 frames. ], batch size: 21, lr: 5.78e-03, grad_scale: 64.0 +2024-07-28 14:32:48,089 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.248e+01 5.789e+01 6.719e+01 7.977e+01 1.194e+02, threshold=1.344e+02, percent-clipped=0.0 +2024-07-28 14:32:51,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=169717.33333333334, ans=0.0 +2024-07-28 14:33:01,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=169730.66666666666, ans=0.0 +2024-07-28 14:33:12,841 INFO [train.py:1114] (3/4) Epoch 13, batch 4650, loss[loss=0.1761, simple_loss=0.2667, pruned_loss=0.04275, over 4826.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2764, pruned_loss=0.04946, over 940013.13 frames. ], batch size: 16, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:33:15,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=169757.33333333334, ans=0.125 +2024-07-28 14:33:15,909 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.23 vs. limit=12.0 +2024-07-28 14:33:18,654 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.49 vs. limit=10.0 +2024-07-28 14:33:21,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=169770.66666666666, ans=0.0 +2024-07-28 14:33:25,571 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.19 vs. limit=22.5 +2024-07-28 14:33:28,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=169784.0, ans=0.125 +2024-07-28 14:33:30,807 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.75 vs. limit=15.0 +2024-07-28 14:33:35,356 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:33:42,936 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:33:45,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=169810.66666666666, ans=0.125 +2024-07-28 14:33:46,618 INFO [train.py:1114] (3/4) Epoch 13, batch 4700, loss[loss=0.2007, simple_loss=0.2841, pruned_loss=0.05868, over 4708.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2756, pruned_loss=0.04901, over 937436.84 frames. ], batch size: 11, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:33:47,746 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.18 vs. limit=15.0 +2024-07-28 14:33:49,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=169824.0, ans=0.1 +2024-07-28 14:33:51,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=169824.0, ans=0.0 +2024-07-28 14:33:56,553 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.492e+01 5.422e+01 6.008e+01 7.035e+01 1.017e+02, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 14:34:01,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=169850.66666666666, ans=0.0 +2024-07-28 14:34:05,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=169850.66666666666, ans=0.125 +2024-07-28 14:34:14,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=169877.33333333334, ans=0.2 +2024-07-28 14:34:15,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=169877.33333333334, ans=0.125 +2024-07-28 14:34:20,165 INFO [train.py:1114] (3/4) Epoch 13, batch 4750, loss[loss=0.1898, simple_loss=0.2793, pruned_loss=0.05016, over 4522.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2763, pruned_loss=0.04955, over 935532.54 frames. ], batch size: 21, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:34:23,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=169890.66666666666, ans=0.125 +2024-07-28 14:34:43,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=169930.66666666666, ans=0.0 +2024-07-28 14:34:50,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=169944.0, ans=0.1 +2024-07-28 14:34:53,981 INFO [train.py:1114] (3/4) Epoch 13, batch 4800, loss[loss=0.208, simple_loss=0.2917, pruned_loss=0.06214, over 4690.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2761, pruned_loss=0.04967, over 933269.68 frames. ], batch size: 13, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:35:03,984 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.473e+01 5.668e+01 6.259e+01 7.420e+01 1.160e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 14:35:04,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=169970.66666666666, ans=0.125 +2024-07-28 14:35:12,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=169984.0, ans=0.125 +2024-07-28 14:35:15,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=169997.33333333334, ans=0.0 +2024-07-28 14:35:23,718 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.93 vs. limit=15.0 +2024-07-28 14:35:24,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=170010.66666666666, ans=0.2 +2024-07-28 14:35:31,455 INFO [train.py:1114] (3/4) Epoch 13, batch 4850, loss[loss=0.2213, simple_loss=0.3199, pruned_loss=0.06131, over 4738.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2767, pruned_loss=0.05001, over 932464.39 frames. ], batch size: 14, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:35:32,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=170024.0, ans=0.0 +2024-07-28 14:35:37,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=170024.0, ans=0.125 +2024-07-28 14:35:48,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=170050.66666666666, ans=0.1 +2024-07-28 14:35:55,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=170050.66666666666, ans=0.125 +2024-07-28 14:36:14,102 INFO [train.py:1114] (3/4) Epoch 13, batch 4900, loss[loss=0.201, simple_loss=0.2808, pruned_loss=0.06056, over 4763.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2764, pruned_loss=0.04952, over 934164.13 frames. ], batch size: 13, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:36:26,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=170090.66666666666, ans=0.125 +2024-07-28 14:36:32,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=170090.66666666666, ans=0.2 +2024-07-28 14:36:37,275 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:36:37,691 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.642e+01 5.628e+01 6.419e+01 7.139e+01 1.048e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 14:36:38,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=170104.0, ans=0.025 +2024-07-28 14:36:46,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=170117.33333333334, ans=0.0 +2024-07-28 14:36:51,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=170130.66666666666, ans=0.04949747468305833 +2024-07-28 14:36:59,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=170144.0, ans=0.125 +2024-07-28 14:37:03,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=170157.33333333334, ans=0.0 +2024-07-28 14:37:04,271 INFO [train.py:1114] (3/4) Epoch 13, batch 4950, loss[loss=0.2584, simple_loss=0.3294, pruned_loss=0.09371, over 3223.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2775, pruned_loss=0.05047, over 930973.33 frames. ], batch size: 36, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:37:05,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170157.33333333334, ans=0.1 +2024-07-28 14:37:10,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=170157.33333333334, ans=0.125 +2024-07-28 14:37:12,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=170170.66666666666, ans=0.125 +2024-07-28 14:37:15,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=170170.66666666666, ans=0.07 +2024-07-28 14:37:15,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=170170.66666666666, ans=0.125 +2024-07-28 14:37:27,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=170197.33333333334, ans=0.125 +2024-07-28 14:37:29,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=170197.33333333334, ans=0.125 +2024-07-28 14:37:35,481 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.47 vs. limit=10.0 +2024-07-28 14:37:36,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=170210.66666666666, ans=0.125 +2024-07-28 14:37:40,474 INFO [train.py:1114] (3/4) Epoch 13, batch 5000, loss[loss=0.1866, simple_loss=0.2756, pruned_loss=0.04875, over 4652.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.277, pruned_loss=0.05005, over 935171.10 frames. ], batch size: 14, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:37:52,211 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.619e+01 5.707e+01 6.178e+01 6.994e+01 1.058e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 14:37:52,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170237.33333333334, ans=0.1 +2024-07-28 14:38:06,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=170264.0, ans=0.125 +2024-07-28 14:38:15,703 INFO [train.py:1114] (3/4) Epoch 13, batch 5050, loss[loss=0.1543, simple_loss=0.2285, pruned_loss=0.04005, over 4859.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2761, pruned_loss=0.04942, over 937733.24 frames. ], batch size: 12, lr: 5.77e-03, grad_scale: 64.0 +2024-07-28 14:38:19,614 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.06 vs. limit=15.0 +2024-07-28 14:38:21,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=170290.66666666666, ans=0.125 +2024-07-28 14:38:29,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=170317.33333333334, ans=0.125 +2024-07-28 14:38:48,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=170344.0, ans=0.025 +2024-07-28 14:38:51,494 INFO [train.py:1114] (3/4) Epoch 13, batch 5100, loss[loss=0.1814, simple_loss=0.2614, pruned_loss=0.05068, over 4771.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2765, pruned_loss=0.04998, over 935163.85 frames. ], batch size: 12, lr: 5.76e-03, grad_scale: 64.0 +2024-07-28 14:39:04,402 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.671e+01 6.468e+01 7.600e+01 1.076e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 14:39:05,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=170370.66666666666, ans=0.125 +2024-07-28 14:39:09,150 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:39:10,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=170384.0, ans=0.125 +2024-07-28 14:39:15,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=170397.33333333334, ans=0.125 +2024-07-28 14:39:20,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=170397.33333333334, ans=0.2 +2024-07-28 14:39:27,933 INFO [train.py:1114] (3/4) Epoch 13, batch 5150, loss[loss=0.2178, simple_loss=0.3137, pruned_loss=0.06097, over 4870.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2771, pruned_loss=0.05016, over 936275.01 frames. ], batch size: 16, lr: 5.76e-03, grad_scale: 64.0 +2024-07-28 14:39:39,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=170437.33333333334, ans=0.0 +2024-07-28 14:39:39,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=170437.33333333334, ans=0.125 +2024-07-28 14:39:54,043 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.83 vs. limit=6.0 +2024-07-28 14:40:00,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=15.0 +2024-07-28 14:40:01,715 INFO [train.py:1114] (3/4) Epoch 13, batch 5200, loss[loss=0.1861, simple_loss=0.2859, pruned_loss=0.04316, over 4663.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2766, pruned_loss=0.04945, over 936612.97 frames. ], batch size: 14, lr: 5.76e-03, grad_scale: 64.0 +2024-07-28 14:40:02,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=170490.66666666666, ans=0.0 +2024-07-28 14:40:05,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=170490.66666666666, ans=0.125 +2024-07-28 14:40:11,856 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.435e+01 5.593e+01 6.249e+01 7.313e+01 1.397e+02, threshold=1.250e+02, percent-clipped=1.0 +2024-07-28 14:40:12,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=170504.0, ans=0.125 +2024-07-28 14:40:15,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=170517.33333333334, ans=0.125 +2024-07-28 14:40:22,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170530.66666666666, ans=0.1 +2024-07-28 14:40:29,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=170544.0, ans=0.2 +2024-07-28 14:40:32,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=170544.0, ans=0.125 +2024-07-28 14:40:35,398 INFO [train.py:1114] (3/4) Epoch 13, batch 5250, loss[loss=0.1624, simple_loss=0.2502, pruned_loss=0.03731, over 4885.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2756, pruned_loss=0.04937, over 936537.04 frames. ], batch size: 13, lr: 5.76e-03, grad_scale: 64.0 +2024-07-28 14:40:38,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=170557.33333333334, ans=0.0 +2024-07-28 14:40:43,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=170570.66666666666, ans=0.2 +2024-07-28 14:40:57,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=170597.33333333334, ans=0.0 +2024-07-28 14:41:09,239 INFO [train.py:1114] (3/4) Epoch 13, batch 5300, loss[loss=0.2017, simple_loss=0.2947, pruned_loss=0.05439, over 4616.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2759, pruned_loss=0.05001, over 934820.09 frames. ], batch size: 16, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:41:16,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=170637.33333333334, ans=0.2 +2024-07-28 14:41:17,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=170637.33333333334, ans=0.025 +2024-07-28 14:41:17,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=170637.33333333334, ans=0.0 +2024-07-28 14:41:19,623 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.242e+01 5.756e+01 6.384e+01 7.054e+01 9.587e+01, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 14:41:26,097 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.31 vs. limit=15.0 +2024-07-28 14:41:38,646 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.65 vs. limit=15.0 +2024-07-28 14:41:47,978 INFO [train.py:1114] (3/4) Epoch 13, batch 5350, loss[loss=0.1673, simple_loss=0.2551, pruned_loss=0.03976, over 4506.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2756, pruned_loss=0.04944, over 936708.31 frames. ], batch size: 10, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:41:49,065 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.06 vs. limit=15.0 +2024-07-28 14:41:52,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=170690.66666666666, ans=0.125 +2024-07-28 14:41:53,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=170690.66666666666, ans=0.125 +2024-07-28 14:41:59,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=170704.0, ans=0.125 +2024-07-28 14:42:15,117 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=14.68 vs. limit=15.0 +2024-07-28 14:42:17,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=170744.0, ans=0.025 +2024-07-28 14:42:21,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=170744.0, ans=0.025 +2024-07-28 14:42:33,296 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.62 vs. limit=6.0 +2024-07-28 14:42:34,765 INFO [train.py:1114] (3/4) Epoch 13, batch 5400, loss[loss=0.2131, simple_loss=0.3148, pruned_loss=0.05573, over 4219.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2772, pruned_loss=0.05015, over 930485.36 frames. ], batch size: 25, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:42:35,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=170757.33333333334, ans=0.0 +2024-07-28 14:42:36,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=170757.33333333334, ans=0.0 +2024-07-28 14:42:37,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=170757.33333333334, ans=0.09899494936611666 +2024-07-28 14:42:47,125 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.370e+01 5.692e+01 6.413e+01 7.093e+01 1.081e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-28 14:42:51,617 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.96 vs. limit=12.0 +2024-07-28 14:42:58,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=170797.33333333334, ans=0.0 +2024-07-28 14:43:03,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=170810.66666666666, ans=0.04949747468305833 +2024-07-28 14:43:03,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=170810.66666666666, ans=0.2 +2024-07-28 14:43:09,614 INFO [train.py:1114] (3/4) Epoch 13, batch 5450, loss[loss=0.188, simple_loss=0.2653, pruned_loss=0.05541, over 4705.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2771, pruned_loss=0.05026, over 933210.91 frames. ], batch size: 11, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:43:13,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=170824.0, ans=0.1 +2024-07-28 14:43:26,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=170850.66666666666, ans=0.0 +2024-07-28 14:43:27,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=170850.66666666666, ans=0.1 +2024-07-28 14:43:32,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=170864.0, ans=0.125 +2024-07-28 14:43:46,362 INFO [train.py:1114] (3/4) Epoch 13, batch 5500, loss[loss=0.1738, simple_loss=0.2606, pruned_loss=0.04356, over 4219.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2768, pruned_loss=0.05052, over 930829.33 frames. ], batch size: 25, lr: 5.76e-03, grad_scale: 32.0 +2024-07-28 14:43:49,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.19 vs. limit=15.0 +2024-07-28 14:43:52,289 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.20 vs. limit=15.0 +2024-07-28 14:43:57,654 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.576e+01 6.394e+01 7.172e+01 9.673e+01, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 14:44:04,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=170917.33333333334, ans=0.2 +2024-07-28 14:44:10,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=170930.66666666666, ans=0.125 +2024-07-28 14:44:12,534 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.16 vs. limit=5.0 +2024-07-28 14:44:42,498 INFO [train.py:1114] (3/4) Epoch 13, batch 5550, loss[loss=0.1693, simple_loss=0.2617, pruned_loss=0.03846, over 4707.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2755, pruned_loss=0.05019, over 932815.03 frames. ], batch size: 12, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:44:48,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=170957.33333333334, ans=0.125 +2024-07-28 14:44:51,811 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.99 vs. limit=15.0 +2024-07-28 14:45:00,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=170970.66666666666, ans=0.125 +2024-07-28 14:45:34,090 INFO [train.py:1114] (3/4) Epoch 13, batch 5600, loss[loss=0.1928, simple_loss=0.2927, pruned_loss=0.04643, over 4740.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2765, pruned_loss=0.05017, over 934085.05 frames. ], batch size: 14, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:45:41,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171037.33333333334, ans=0.1 +2024-07-28 14:45:44,818 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.519e+01 5.953e+01 6.683e+01 8.989e+01, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 14:45:50,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=171050.66666666666, ans=0.0 +2024-07-28 14:45:51,734 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.27 vs. limit=22.5 +2024-07-28 14:46:07,382 INFO [train.py:1114] (3/4) Epoch 13, batch 5650, loss[loss=0.2289, simple_loss=0.3164, pruned_loss=0.0707, over 4563.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2757, pruned_loss=0.05, over 936753.23 frames. ], batch size: 21, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:46:15,394 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.27 vs. limit=15.0 +2024-07-28 14:46:22,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=171117.33333333334, ans=0.125 +2024-07-28 14:46:38,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=171144.0, ans=0.04949747468305833 +2024-07-28 14:46:42,392 INFO [train.py:1114] (3/4) Epoch 13, batch 5700, loss[loss=0.1849, simple_loss=0.2701, pruned_loss=0.04984, over 4692.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2758, pruned_loss=0.05015, over 937819.37 frames. ], batch size: 13, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:46:46,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=171157.33333333334, ans=0.2 +2024-07-28 14:46:58,590 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.488e+01 5.340e+01 5.994e+01 6.863e+01 1.115e+02, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 14:47:02,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=171184.0, ans=0.0 +2024-07-28 14:47:05,925 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=5.24 vs. limit=12.0 +2024-07-28 14:47:12,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=171197.33333333334, ans=0.025 +2024-07-28 14:47:13,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=171197.33333333334, ans=0.0 +2024-07-28 14:47:23,377 INFO [train.py:1114] (3/4) Epoch 13, batch 5750, loss[loss=0.1797, simple_loss=0.2762, pruned_loss=0.04158, over 4730.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2761, pruned_loss=0.04982, over 938221.13 frames. ], batch size: 19, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:47:26,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=171224.0, ans=0.125 +2024-07-28 14:47:32,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=171237.33333333334, ans=0.0 +2024-07-28 14:47:43,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171264.0, ans=0.1 +2024-07-28 14:47:56,853 INFO [train.py:1114] (3/4) Epoch 13, batch 5800, loss[loss=0.1837, simple_loss=0.287, pruned_loss=0.04021, over 4736.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.277, pruned_loss=0.05034, over 937565.78 frames. ], batch size: 19, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:48:04,287 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.56 vs. limit=22.5 +2024-07-28 14:48:04,988 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.23 vs. limit=6.0 +2024-07-28 14:48:07,660 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.533e+01 5.852e+01 6.546e+01 7.322e+01 1.389e+02, threshold=1.309e+02, percent-clipped=1.0 +2024-07-28 14:48:18,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=171317.33333333334, ans=0.2 +2024-07-28 14:48:19,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=171330.66666666666, ans=0.125 +2024-07-28 14:48:23,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=171330.66666666666, ans=0.125 +2024-07-28 14:48:27,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=171344.0, ans=0.125 +2024-07-28 14:48:32,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=171344.0, ans=0.0 +2024-07-28 14:48:35,958 INFO [train.py:1114] (3/4) Epoch 13, batch 5850, loss[loss=0.2207, simple_loss=0.3032, pruned_loss=0.06908, over 4508.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2768, pruned_loss=0.0503, over 938494.36 frames. ], batch size: 21, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:48:42,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=171357.33333333334, ans=0.0 +2024-07-28 14:48:42,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=171370.66666666666, ans=0.0 +2024-07-28 14:48:52,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=171384.0, ans=0.025 +2024-07-28 14:48:54,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171384.0, ans=0.1 +2024-07-28 14:48:54,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=171384.0, ans=0.025 +2024-07-28 14:48:56,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171384.0, ans=0.1 +2024-07-28 14:49:00,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=171397.33333333334, ans=0.125 +2024-07-28 14:49:05,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=171397.33333333334, ans=0.125 +2024-07-28 14:49:05,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=171410.66666666666, ans=0.1 +2024-07-28 14:49:12,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=171424.0, ans=0.0 +2024-07-28 14:49:13,158 INFO [train.py:1114] (3/4) Epoch 13, batch 5900, loss[loss=0.1886, simple_loss=0.2858, pruned_loss=0.04572, over 4671.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2768, pruned_loss=0.05009, over 939118.84 frames. ], batch size: 15, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:49:46,005 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 5.643e+01 6.441e+01 7.134e+01 1.016e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-28 14:49:53,296 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.46 vs. limit=15.0 +2024-07-28 14:49:57,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=171464.0, ans=0.125 +2024-07-28 14:49:59,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=171464.0, ans=0.125 +2024-07-28 14:50:02,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171477.33333333334, ans=0.1 +2024-07-28 14:50:03,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=171477.33333333334, ans=0.2 +2024-07-28 14:50:05,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=171477.33333333334, ans=0.125 +2024-07-28 14:50:09,015 INFO [train.py:1114] (3/4) Epoch 13, batch 5950, loss[loss=0.1781, simple_loss=0.2703, pruned_loss=0.04292, over 4672.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2762, pruned_loss=0.04952, over 941169.31 frames. ], batch size: 15, lr: 5.75e-03, grad_scale: 32.0 +2024-07-28 14:50:20,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=171504.0, ans=0.125 +2024-07-28 14:50:21,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=171504.0, ans=0.125 +2024-07-28 14:50:24,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=171504.0, ans=0.125 +2024-07-28 14:50:24,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=171504.0, ans=0.0 +2024-07-28 14:50:30,031 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.25 vs. limit=6.0 +2024-07-28 14:50:43,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=171544.0, ans=0.0 +2024-07-28 14:50:45,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=171544.0, ans=0.07 +2024-07-28 14:50:47,141 INFO [train.py:1114] (3/4) Epoch 13, batch 6000, loss[loss=0.2094, simple_loss=0.2976, pruned_loss=0.06059, over 4051.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2765, pruned_loss=0.05013, over 938163.29 frames. ], batch size: 25, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:50:47,141 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 14:51:12,228 INFO [train.py:1146] (3/4) Epoch 13, validation: loss=0.1644, simple_loss=0.2689, pruned_loss=0.02993, over 944034.00 frames. +2024-07-28 14:51:12,229 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 14:51:18,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=171557.33333333334, ans=0.125 +2024-07-28 14:51:21,604 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:51:25,691 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.755e+01 5.656e+01 6.363e+01 7.172e+01 1.139e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 14:51:40,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=171597.33333333334, ans=0.125 +2024-07-28 14:51:42,056 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.81 vs. limit=6.0 +2024-07-28 14:51:43,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=171610.66666666666, ans=0.125 +2024-07-28 14:51:56,419 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:51:59,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=171624.0, ans=0.125 +2024-07-28 14:52:00,227 INFO [train.py:1114] (3/4) Epoch 13, batch 6050, loss[loss=0.1942, simple_loss=0.2683, pruned_loss=0.06011, over 4773.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2754, pruned_loss=0.04994, over 938855.90 frames. ], batch size: 12, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:52:05,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=171624.0, ans=0.0 +2024-07-28 14:52:12,676 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.65 vs. limit=10.0 +2024-07-28 14:52:14,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=171637.33333333334, ans=0.0 +2024-07-28 14:52:28,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=171664.0, ans=0.1 +2024-07-28 14:52:32,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=171677.33333333334, ans=0.0 +2024-07-28 14:52:36,819 INFO [train.py:1114] (3/4) Epoch 13, batch 6100, loss[loss=0.1899, simple_loss=0.2823, pruned_loss=0.04878, over 4671.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2755, pruned_loss=0.04967, over 938319.20 frames. ], batch size: 15, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:52:46,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=171704.0, ans=0.5 +2024-07-28 14:52:51,824 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.506e+01 6.070e+01 6.932e+01 1.254e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 14:53:10,529 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-28 14:53:14,215 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=171744.0, ans=0.1 +2024-07-28 14:53:20,069 INFO [train.py:1114] (3/4) Epoch 13, batch 6150, loss[loss=0.2206, simple_loss=0.2976, pruned_loss=0.07177, over 3456.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2764, pruned_loss=0.0496, over 936793.68 frames. ], batch size: 35, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:53:26,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=171770.66666666666, ans=0.125 +2024-07-28 14:53:28,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=171770.66666666666, ans=0.125 +2024-07-28 14:53:29,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff3.min_abs, batch_count=171770.66666666666, ans=0.2 +2024-07-28 14:53:50,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=171810.66666666666, ans=0.0 +2024-07-28 14:53:53,562 INFO [train.py:1114] (3/4) Epoch 13, batch 6200, loss[loss=0.2011, simple_loss=0.2799, pruned_loss=0.06112, over 4738.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2765, pruned_loss=0.04979, over 936698.46 frames. ], batch size: 14, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:54:07,618 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.605e+01 5.672e+01 6.206e+01 7.275e+01 9.803e+01, threshold=1.241e+02, percent-clipped=1.0 +2024-07-28 14:54:16,107 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.67 vs. limit=15.0 +2024-07-28 14:54:24,425 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=24.08 vs. limit=22.5 +2024-07-28 14:54:27,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=171877.33333333334, ans=0.95 +2024-07-28 14:54:34,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=171877.33333333334, ans=0.04949747468305833 +2024-07-28 14:54:35,960 INFO [train.py:1114] (3/4) Epoch 13, batch 6250, loss[loss=0.202, simple_loss=0.2981, pruned_loss=0.05295, over 4806.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2764, pruned_loss=0.04995, over 932631.43 frames. ], batch size: 14, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:54:40,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=171890.66666666666, ans=0.04949747468305833 +2024-07-28 14:54:46,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=171904.0, ans=0.125 +2024-07-28 14:54:46,725 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.32 vs. limit=22.5 +2024-07-28 14:54:57,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=171930.66666666666, ans=15.0 +2024-07-28 14:54:58,157 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.99 vs. limit=15.0 +2024-07-28 14:55:00,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=171930.66666666666, ans=0.125 +2024-07-28 14:55:04,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=171944.0, ans=0.0 +2024-07-28 14:55:08,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=171944.0, ans=0.09899494936611666 +2024-07-28 14:55:10,115 INFO [train.py:1114] (3/4) Epoch 13, batch 6300, loss[loss=0.1751, simple_loss=0.2448, pruned_loss=0.05277, over 4564.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2768, pruned_loss=0.05033, over 929411.29 frames. ], batch size: 10, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:55:23,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=171970.66666666666, ans=0.09899494936611666 +2024-07-28 14:55:26,614 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.561e+01 5.859e+01 6.673e+01 7.738e+01 1.141e+02, threshold=1.335e+02, percent-clipped=0.0 +2024-07-28 14:55:27,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=171970.66666666666, ans=0.125 +2024-07-28 14:55:28,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=171970.66666666666, ans=0.0 +2024-07-28 14:55:30,922 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.53 vs. limit=15.0 +2024-07-28 14:55:35,537 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.29 vs. limit=15.0 +2024-07-28 14:55:39,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=171997.33333333334, ans=0.2 +2024-07-28 14:55:41,476 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.58 vs. limit=22.5 +2024-07-28 14:55:44,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=172010.66666666666, ans=0.125 +2024-07-28 14:55:47,560 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.04 vs. limit=15.0 +2024-07-28 14:55:49,028 INFO [train.py:1114] (3/4) Epoch 13, batch 6350, loss[loss=0.191, simple_loss=0.2828, pruned_loss=0.0496, over 4487.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2753, pruned_loss=0.04923, over 933479.12 frames. ], batch size: 21, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:55:49,957 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 14:55:55,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=172037.33333333334, ans=0.125 +2024-07-28 14:55:55,371 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.09 vs. limit=15.0 +2024-07-28 14:56:05,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=172050.66666666666, ans=0.125 +2024-07-28 14:56:06,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=172050.66666666666, ans=0.0 +2024-07-28 14:56:12,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=172064.0, ans=0.125 +2024-07-28 14:56:12,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=172064.0, ans=10.0 +2024-07-28 14:56:15,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172064.0, ans=0.1 +2024-07-28 14:56:17,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=172077.33333333334, ans=0.125 +2024-07-28 14:56:17,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=172077.33333333334, ans=0.0 +2024-07-28 14:56:21,871 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=15.65 vs. limit=15.0 +2024-07-28 14:56:26,786 INFO [train.py:1114] (3/4) Epoch 13, batch 6400, loss[loss=0.2003, simple_loss=0.2942, pruned_loss=0.05323, over 4631.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2752, pruned_loss=0.04992, over 934818.42 frames. ], batch size: 13, lr: 5.74e-03, grad_scale: 32.0 +2024-07-28 14:56:37,084 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.564e+01 5.588e+01 6.261e+01 7.317e+01 1.038e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 14:56:39,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=172117.33333333334, ans=0.125 +2024-07-28 14:56:40,871 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.64 vs. limit=10.0 +2024-07-28 14:56:42,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=172117.33333333334, ans=0.0 +2024-07-28 14:56:55,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=172144.0, ans=0.125 +2024-07-28 14:56:58,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=172144.0, ans=0.0 +2024-07-28 14:57:00,242 INFO [train.py:1114] (3/4) Epoch 13, batch 6450, loss[loss=0.2247, simple_loss=0.3254, pruned_loss=0.06203, over 4533.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2767, pruned_loss=0.0502, over 938575.59 frames. ], batch size: 21, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:57:06,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=172157.33333333334, ans=0.125 +2024-07-28 14:57:09,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=172170.66666666666, ans=0.125 +2024-07-28 14:57:10,132 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.42 vs. limit=15.0 +2024-07-28 14:57:13,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=172184.0, ans=0.0 +2024-07-28 14:57:16,757 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.78 vs. limit=15.0 +2024-07-28 14:57:18,098 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.45 vs. limit=12.0 +2024-07-28 14:57:24,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=172197.33333333334, ans=0.125 +2024-07-28 14:57:31,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=172197.33333333334, ans=0.125 +2024-07-28 14:57:33,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=172210.66666666666, ans=0.0 +2024-07-28 14:57:39,253 INFO [train.py:1114] (3/4) Epoch 13, batch 6500, loss[loss=0.2308, simple_loss=0.3115, pruned_loss=0.07503, over 3533.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.276, pruned_loss=0.04976, over 939888.89 frames. ], batch size: 35, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:57:41,034 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=12.0 +2024-07-28 14:57:43,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=172224.0, ans=0.0 +2024-07-28 14:57:49,793 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.639e+01 5.677e+01 6.560e+01 8.086e+01 1.120e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-28 14:57:55,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=172250.66666666666, ans=0.0 +2024-07-28 14:57:55,420 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.11 vs. limit=22.5 +2024-07-28 14:57:57,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=172250.66666666666, ans=0.125 +2024-07-28 14:57:57,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=172250.66666666666, ans=0.125 +2024-07-28 14:58:01,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=172264.0, ans=0.125 +2024-07-28 14:58:09,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=172277.33333333334, ans=0.2 +2024-07-28 14:58:14,168 INFO [train.py:1114] (3/4) Epoch 13, batch 6550, loss[loss=0.1452, simple_loss=0.2294, pruned_loss=0.03048, over 4808.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2759, pruned_loss=0.04902, over 942833.50 frames. ], batch size: 11, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:58:17,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=172290.66666666666, ans=0.125 +2024-07-28 14:58:43,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=172344.0, ans=10.0 +2024-07-28 14:58:47,966 INFO [train.py:1114] (3/4) Epoch 13, batch 6600, loss[loss=0.1789, simple_loss=0.2725, pruned_loss=0.04267, over 4926.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2762, pruned_loss=0.04918, over 945034.12 frames. ], batch size: 14, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:58:48,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=172357.33333333334, ans=0.025 +2024-07-28 14:58:50,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=172357.33333333334, ans=0.0 +2024-07-28 14:58:58,751 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.632e+01 5.699e+01 6.105e+01 6.926e+01 1.138e+02, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 14:59:00,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=172384.0, ans=0.125 +2024-07-28 14:59:02,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=172384.0, ans=0.0 +2024-07-28 14:59:05,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=172384.0, ans=0.125 +2024-07-28 14:59:17,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=172410.66666666666, ans=0.125 +2024-07-28 14:59:22,868 INFO [train.py:1114] (3/4) Epoch 13, batch 6650, loss[loss=0.2361, simple_loss=0.3184, pruned_loss=0.07692, over 4566.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2754, pruned_loss=0.04936, over 943568.46 frames. ], batch size: 17, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 14:59:25,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=172424.0, ans=0.125 +2024-07-28 14:59:31,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172437.33333333334, ans=0.1 +2024-07-28 14:59:32,793 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.03 vs. limit=15.0 +2024-07-28 14:59:40,870 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.13 vs. limit=22.5 +2024-07-28 14:59:54,430 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.61 vs. limit=15.0 +2024-07-28 14:59:56,803 INFO [train.py:1114] (3/4) Epoch 13, batch 6700, loss[loss=0.2296, simple_loss=0.3293, pruned_loss=0.06495, over 4686.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2759, pruned_loss=0.04944, over 942384.02 frames. ], batch size: 19, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 15:00:06,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=172504.0, ans=0.125 +2024-07-28 15:00:07,463 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.867e+01 5.630e+01 6.292e+01 7.000e+01 1.303e+02, threshold=1.258e+02, percent-clipped=1.0 +2024-07-28 15:00:13,167 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.01 vs. limit=22.5 +2024-07-28 15:00:18,251 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.30 vs. limit=15.0 +2024-07-28 15:00:24,788 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.01 vs. limit=15.0 +2024-07-28 15:00:25,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=172544.0, ans=0.125 +2024-07-28 15:00:27,949 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.14 vs. limit=12.0 +2024-07-28 15:00:29,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=172544.0, ans=10.0 +2024-07-28 15:00:32,525 INFO [train.py:1114] (3/4) Epoch 13, batch 6750, loss[loss=0.2048, simple_loss=0.2998, pruned_loss=0.05487, over 4167.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2752, pruned_loss=0.04928, over 939925.68 frames. ], batch size: 25, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 15:00:36,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=172557.33333333334, ans=0.025 +2024-07-28 15:00:36,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=172557.33333333334, ans=0.0 +2024-07-28 15:00:38,385 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.00 vs. limit=15.0 +2024-07-28 15:00:41,559 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.36 vs. limit=15.0 +2024-07-28 15:00:48,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=172584.0, ans=0.0 +2024-07-28 15:01:08,806 INFO [train.py:1114] (3/4) Epoch 13, batch 6800, loss[loss=0.1813, simple_loss=0.2625, pruned_loss=0.05003, over 4638.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2755, pruned_loss=0.04914, over 938673.53 frames. ], batch size: 13, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 15:01:14,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=172637.33333333334, ans=0.0 +2024-07-28 15:01:19,461 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.751e+01 5.597e+01 6.324e+01 7.266e+01 1.591e+02, threshold=1.265e+02, percent-clipped=1.0 +2024-07-28 15:01:20,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172637.33333333334, ans=0.1 +2024-07-28 15:01:41,584 INFO [train.py:1114] (3/4) Epoch 13, batch 6850, loss[loss=0.2008, simple_loss=0.2896, pruned_loss=0.05602, over 4688.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2756, pruned_loss=0.04939, over 940384.74 frames. ], batch size: 13, lr: 5.73e-03, grad_scale: 32.0 +2024-07-28 15:01:43,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=172690.66666666666, ans=0.2 +2024-07-28 15:01:48,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=172690.66666666666, ans=0.125 +2024-07-28 15:01:49,862 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.67 vs. limit=10.0 +2024-07-28 15:02:02,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=172704.0, ans=0.125 +2024-07-28 15:02:03,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=172704.0, ans=0.125 +2024-07-28 15:02:04,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=172704.0, ans=0.125 +2024-07-28 15:02:06,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=172704.0, ans=0.1 +2024-07-28 15:02:11,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=172717.33333333334, ans=0.0 +2024-07-28 15:02:27,239 INFO [train.py:1114] (3/4) Epoch 13, batch 6900, loss[loss=0.1681, simple_loss=0.2591, pruned_loss=0.03862, over 4951.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2756, pruned_loss=0.04926, over 942873.84 frames. ], batch size: 13, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:02:28,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=172757.33333333334, ans=0.125 +2024-07-28 15:02:32,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=172757.33333333334, ans=0.2 +2024-07-28 15:02:33,548 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:02:38,169 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.866e+01 5.650e+01 5.997e+01 6.576e+01 8.900e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 15:02:56,226 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.79 vs. limit=15.0 +2024-07-28 15:02:58,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=172810.66666666666, ans=0.0 +2024-07-28 15:03:01,336 INFO [train.py:1114] (3/4) Epoch 13, batch 6950, loss[loss=0.187, simple_loss=0.2655, pruned_loss=0.05421, over 4485.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.275, pruned_loss=0.04894, over 940106.33 frames. ], batch size: 10, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:03:13,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=172837.33333333334, ans=0.1 +2024-07-28 15:03:18,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=172850.66666666666, ans=0.2 +2024-07-28 15:03:25,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=172864.0, ans=0.125 +2024-07-28 15:03:27,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=172864.0, ans=0.125 +2024-07-28 15:03:29,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=172864.0, ans=0.125 +2024-07-28 15:03:30,704 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.25 vs. limit=15.0 +2024-07-28 15:03:38,285 INFO [train.py:1114] (3/4) Epoch 13, batch 7000, loss[loss=0.1967, simple_loss=0.2752, pruned_loss=0.05911, over 4611.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2754, pruned_loss=0.04955, over 938707.31 frames. ], batch size: 17, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:03:43,085 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.46 vs. limit=22.5 +2024-07-28 15:03:44,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=172904.0, ans=0.0 +2024-07-28 15:03:48,575 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.631e+01 6.423e+01 7.992e+01 1.097e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 15:03:58,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=172930.66666666666, ans=0.025 +2024-07-28 15:04:02,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=172930.66666666666, ans=0.0 +2024-07-28 15:04:03,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=172930.66666666666, ans=0.025 +2024-07-28 15:04:09,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=172944.0, ans=0.0 +2024-07-28 15:04:10,936 INFO [train.py:1114] (3/4) Epoch 13, batch 7050, loss[loss=0.1997, simple_loss=0.3078, pruned_loss=0.04574, over 4713.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2753, pruned_loss=0.04944, over 942063.63 frames. ], batch size: 19, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:04:14,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=172957.33333333334, ans=0.1 +2024-07-28 15:04:22,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=172970.66666666666, ans=0.0 +2024-07-28 15:04:33,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=172997.33333333334, ans=0.125 +2024-07-28 15:04:35,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.68 vs. limit=15.0 +2024-07-28 15:04:38,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=173010.66666666666, ans=0.125 +2024-07-28 15:04:43,146 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.18 vs. limit=6.0 +2024-07-28 15:04:44,108 INFO [train.py:1114] (3/4) Epoch 13, batch 7100, loss[loss=0.1957, simple_loss=0.2794, pruned_loss=0.05599, over 4799.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2761, pruned_loss=0.04967, over 936886.65 frames. ], batch size: 15, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:04:54,283 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+01 5.692e+01 6.139e+01 7.289e+01 1.294e+02, threshold=1.228e+02, percent-clipped=1.0 +2024-07-28 15:05:00,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=173050.66666666666, ans=0.125 +2024-07-28 15:05:03,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=173064.0, ans=0.125 +2024-07-28 15:05:07,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=173064.0, ans=0.125 +2024-07-28 15:05:16,966 INFO [train.py:1114] (3/4) Epoch 13, batch 7150, loss[loss=0.2176, simple_loss=0.3069, pruned_loss=0.06415, over 4530.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2746, pruned_loss=0.04904, over 937948.45 frames. ], batch size: 21, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:05:28,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173104.0, ans=0.1 +2024-07-28 15:05:32,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=173117.33333333334, ans=0.125 +2024-07-28 15:05:44,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=173144.0, ans=0.125 +2024-07-28 15:05:50,030 INFO [train.py:1114] (3/4) Epoch 13, batch 7200, loss[loss=0.2263, simple_loss=0.3094, pruned_loss=0.07155, over 4808.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2754, pruned_loss=0.04955, over 938349.78 frames. ], batch size: 15, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:05:53,480 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.89 vs. limit=15.0 +2024-07-28 15:06:00,400 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.641e+01 6.340e+01 7.110e+01 1.006e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 15:06:03,282 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=173184.0, ans=0.125 +2024-07-28 15:06:07,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=173184.0, ans=0.125 +2024-07-28 15:06:10,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=173197.33333333334, ans=0.0 +2024-07-28 15:06:11,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=173197.33333333334, ans=0.0 +2024-07-28 15:06:11,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=173197.33333333334, ans=0.125 +2024-07-28 15:06:13,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=173197.33333333334, ans=0.125 +2024-07-28 15:06:14,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=173197.33333333334, ans=0.025 +2024-07-28 15:06:22,753 INFO [train.py:1114] (3/4) Epoch 13, batch 7250, loss[loss=0.1822, simple_loss=0.2681, pruned_loss=0.04817, over 4845.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2751, pruned_loss=0.04961, over 940177.92 frames. ], batch size: 12, lr: 5.72e-03, grad_scale: 32.0 +2024-07-28 15:06:28,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=173237.33333333334, ans=0.2 +2024-07-28 15:06:40,984 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=26.50 vs. limit=22.5 +2024-07-28 15:06:48,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=173277.33333333334, ans=0.0 +2024-07-28 15:06:54,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=173290.66666666666, ans=0.125 +2024-07-28 15:06:55,534 INFO [train.py:1114] (3/4) Epoch 13, batch 7300, loss[loss=0.1694, simple_loss=0.2571, pruned_loss=0.04088, over 4862.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2749, pruned_loss=0.04945, over 940159.76 frames. ], batch size: 12, lr: 5.72e-03, grad_scale: 64.0 +2024-07-28 15:06:55,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173290.66666666666, ans=0.1 +2024-07-28 15:06:56,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=173290.66666666666, ans=0.09899494936611666 +2024-07-28 15:06:59,179 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.67 vs. limit=15.0 +2024-07-28 15:07:00,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=173290.66666666666, ans=0.0 +2024-07-28 15:07:05,993 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.989e+01 5.468e+01 5.985e+01 6.770e+01 9.344e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 15:07:07,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=173304.0, ans=10.0 +2024-07-28 15:07:16,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=173330.66666666666, ans=0.125 +2024-07-28 15:07:18,168 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:07:23,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=173344.0, ans=0.125 +2024-07-28 15:07:28,291 INFO [train.py:1114] (3/4) Epoch 13, batch 7350, loss[loss=0.2045, simple_loss=0.2901, pruned_loss=0.05944, over 4640.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2755, pruned_loss=0.04959, over 939546.42 frames. ], batch size: 12, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:07:37,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=173370.66666666666, ans=0.1 +2024-07-28 15:07:38,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=173370.66666666666, ans=0.125 +2024-07-28 15:07:44,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=173384.0, ans=0.125 +2024-07-28 15:07:50,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=173397.33333333334, ans=0.125 +2024-07-28 15:07:54,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=173410.66666666666, ans=0.125 +2024-07-28 15:08:02,221 INFO [train.py:1114] (3/4) Epoch 13, batch 7400, loss[loss=0.1852, simple_loss=0.2811, pruned_loss=0.04469, over 4696.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2758, pruned_loss=0.04982, over 941073.89 frames. ], batch size: 13, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:08:12,819 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.374e+01 5.640e+01 6.317e+01 7.601e+01 1.154e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 15:08:31,057 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:08:36,731 INFO [train.py:1114] (3/4) Epoch 13, batch 7450, loss[loss=0.1448, simple_loss=0.2211, pruned_loss=0.0343, over 4618.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2752, pruned_loss=0.05055, over 938335.19 frames. ], batch size: 11, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:08:41,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=173490.66666666666, ans=0.0 +2024-07-28 15:08:45,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=173504.0, ans=0.2 +2024-07-28 15:08:57,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=173530.66666666666, ans=0.125 +2024-07-28 15:08:58,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=173530.66666666666, ans=0.0 +2024-07-28 15:09:00,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=173530.66666666666, ans=0.125 +2024-07-28 15:09:00,649 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.08 vs. limit=15.0 +2024-07-28 15:09:09,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=173557.33333333334, ans=0.0 +2024-07-28 15:09:09,546 INFO [train.py:1114] (3/4) Epoch 13, batch 7500, loss[loss=0.2332, simple_loss=0.3195, pruned_loss=0.07349, over 3432.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2765, pruned_loss=0.05094, over 936730.19 frames. ], batch size: 35, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:09:14,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=173557.33333333334, ans=0.2 +2024-07-28 15:09:20,246 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.712e+01 6.192e+01 7.126e+01 1.284e+02, threshold=1.238e+02, percent-clipped=1.0 +2024-07-28 15:09:21,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=173570.66666666666, ans=0.125 +2024-07-28 15:09:23,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=173584.0, ans=0.015 +2024-07-28 15:09:33,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=173584.0, ans=10.0 +2024-07-28 15:09:49,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173610.66666666666, ans=0.1 +2024-07-28 15:09:53,529 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.86 vs. limit=22.5 +2024-07-28 15:09:56,861 INFO [train.py:1114] (3/4) Epoch 13, batch 7550, loss[loss=0.2165, simple_loss=0.3047, pruned_loss=0.06416, over 4642.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2777, pruned_loss=0.05127, over 936790.62 frames. ], batch size: 17, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:10:15,581 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.53 vs. limit=10.0 +2024-07-28 15:10:22,300 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.03 vs. limit=22.5 +2024-07-28 15:10:24,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=173637.33333333334, ans=0.05 +2024-07-28 15:10:39,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173677.33333333334, ans=0.1 +2024-07-28 15:10:41,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten.whitening_limit, batch_count=173677.33333333334, ans=22.5 +2024-07-28 15:10:42,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=173677.33333333334, ans=0.125 +2024-07-28 15:10:42,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173677.33333333334, ans=0.1 +2024-07-28 15:10:45,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=173690.66666666666, ans=0.1 +2024-07-28 15:10:57,561 INFO [train.py:1114] (3/4) Epoch 13, batch 7600, loss[loss=0.169, simple_loss=0.2666, pruned_loss=0.03574, over 4804.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2771, pruned_loss=0.05055, over 938654.15 frames. ], batch size: 14, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:11:08,017 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.498e+01 5.988e+01 6.691e+01 9.239e+01, threshold=1.198e+02, percent-clipped=0.0 +2024-07-28 15:11:17,459 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.79 vs. limit=15.0 +2024-07-28 15:11:37,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=173730.66666666666, ans=0.125 +2024-07-28 15:11:55,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=173730.66666666666, ans=0.125 +2024-07-28 15:12:00,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=173744.0, ans=0.2 +2024-07-28 15:12:06,085 INFO [train.py:1114] (3/4) Epoch 13, batch 7650, loss[loss=0.1599, simple_loss=0.2411, pruned_loss=0.03933, over 4945.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2764, pruned_loss=0.0503, over 937528.44 frames. ], batch size: 12, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:12:08,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173757.33333333334, ans=0.1 +2024-07-28 15:12:10,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=173757.33333333334, ans=0.125 +2024-07-28 15:12:24,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=173784.0, ans=0.125 +2024-07-28 15:12:27,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=173784.0, ans=0.125 +2024-07-28 15:12:32,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=173797.33333333334, ans=0.04949747468305833 +2024-07-28 15:12:34,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=173810.66666666666, ans=0.05 +2024-07-28 15:12:41,862 INFO [train.py:1114] (3/4) Epoch 13, batch 7700, loss[loss=0.1876, simple_loss=0.2831, pruned_loss=0.04601, over 4695.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.276, pruned_loss=0.0495, over 935074.87 frames. ], batch size: 13, lr: 5.71e-03, grad_scale: 64.0 +2024-07-28 15:12:43,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=173824.0, ans=0.125 +2024-07-28 15:12:52,767 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.633e+01 5.534e+01 6.118e+01 6.663e+01 8.734e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 15:12:57,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=173850.66666666666, ans=0.125 +2024-07-28 15:13:06,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=173864.0, ans=0.0 +2024-07-28 15:13:09,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=173877.33333333334, ans=0.1 +2024-07-28 15:13:14,201 INFO [train.py:1114] (3/4) Epoch 13, batch 7750, loss[loss=0.1632, simple_loss=0.2675, pruned_loss=0.02941, over 4934.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2769, pruned_loss=0.04922, over 936177.38 frames. ], batch size: 14, lr: 5.71e-03, grad_scale: 32.0 +2024-07-28 15:13:18,293 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:13:31,450 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.64 vs. limit=10.0 +2024-07-28 15:13:34,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=173917.33333333334, ans=0.2 +2024-07-28 15:13:47,555 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.65 vs. limit=15.0 +2024-07-28 15:13:49,765 INFO [train.py:1114] (3/4) Epoch 13, batch 7800, loss[loss=0.2102, simple_loss=0.2853, pruned_loss=0.06757, over 4672.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2769, pruned_loss=0.0495, over 938165.13 frames. ], batch size: 14, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:14:01,086 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.796e+01 5.555e+01 6.069e+01 6.471e+01 9.594e+01, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 15:14:11,498 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.80 vs. limit=10.0 +2024-07-28 15:14:34,712 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.41 vs. limit=22.5 +2024-07-28 15:14:35,069 INFO [train.py:1114] (3/4) Epoch 13, batch 7850, loss[loss=0.1382, simple_loss=0.2132, pruned_loss=0.03165, over 4481.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.277, pruned_loss=0.04986, over 936301.30 frames. ], batch size: 10, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:14:37,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=174024.0, ans=0.125 +2024-07-28 15:14:38,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=174024.0, ans=0.125 +2024-07-28 15:14:49,279 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=9.62 vs. limit=12.0 +2024-07-28 15:14:50,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=174050.66666666666, ans=0.0 +2024-07-28 15:15:09,971 INFO [train.py:1114] (3/4) Epoch 13, batch 7900, loss[loss=0.2204, simple_loss=0.311, pruned_loss=0.06493, over 4870.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2778, pruned_loss=0.05056, over 932774.05 frames. ], batch size: 14, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:15:20,616 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.723e+01 5.632e+01 6.110e+01 7.084e+01 9.814e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 15:15:22,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=174117.33333333334, ans=0.0 +2024-07-28 15:15:22,759 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:15:29,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=174130.66666666666, ans=0.07 +2024-07-28 15:15:38,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=174144.0, ans=15.0 +2024-07-28 15:15:41,922 INFO [train.py:1114] (3/4) Epoch 13, batch 7950, loss[loss=0.238, simple_loss=0.3071, pruned_loss=0.08445, over 3572.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2779, pruned_loss=0.05077, over 935319.68 frames. ], batch size: 37, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:15:47,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=174157.33333333334, ans=0.125 +2024-07-28 15:16:11,711 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.39 vs. limit=15.0 +2024-07-28 15:16:14,592 INFO [train.py:1114] (3/4) Epoch 13, batch 8000, loss[loss=0.1751, simple_loss=0.2521, pruned_loss=0.04907, over 4623.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2757, pruned_loss=0.04982, over 934804.82 frames. ], batch size: 11, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:16:25,590 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.549e+01 5.751e+01 6.184e+01 6.866e+01 1.059e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 15:16:26,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=174250.66666666666, ans=0.1 +2024-07-28 15:16:31,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=174250.66666666666, ans=0.125 +2024-07-28 15:16:33,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=174250.66666666666, ans=0.0 +2024-07-28 15:16:33,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=174250.66666666666, ans=0.0 +2024-07-28 15:16:34,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=174264.0, ans=0.125 +2024-07-28 15:16:40,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=174264.0, ans=0.0 +2024-07-28 15:16:41,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=174277.33333333334, ans=0.2 +2024-07-28 15:16:44,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=174277.33333333334, ans=0.125 +2024-07-28 15:16:47,954 INFO [train.py:1114] (3/4) Epoch 13, batch 8050, loss[loss=0.217, simple_loss=0.3044, pruned_loss=0.06481, over 4811.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2761, pruned_loss=0.04995, over 934085.15 frames. ], batch size: 14, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:16:52,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=174290.66666666666, ans=0.0 +2024-07-28 15:16:56,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=174304.0, ans=15.0 +2024-07-28 15:17:08,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=174330.66666666666, ans=0.125 +2024-07-28 15:17:17,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=174344.0, ans=0.125 +2024-07-28 15:17:21,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=174344.0, ans=0.125 +2024-07-28 15:17:22,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174344.0, ans=0.1 +2024-07-28 15:17:22,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=174344.0, ans=0.125 +2024-07-28 15:17:23,901 INFO [train.py:1114] (3/4) Epoch 13, batch 8100, loss[loss=0.2061, simple_loss=0.2988, pruned_loss=0.05673, over 4800.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2763, pruned_loss=0.04967, over 934182.65 frames. ], batch size: 15, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:17:24,027 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:17:25,730 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.20 vs. limit=22.5 +2024-07-28 15:17:27,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=174357.33333333334, ans=0.125 +2024-07-28 15:17:29,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=174357.33333333334, ans=0.125 +2024-07-28 15:17:30,134 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.23 vs. limit=22.5 +2024-07-28 15:17:34,671 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.608e+01 5.712e+01 6.251e+01 7.311e+01 9.756e+01, threshold=1.250e+02, percent-clipped=0.0 +2024-07-28 15:17:45,157 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.67 vs. limit=15.0 +2024-07-28 15:17:45,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=174397.33333333334, ans=0.125 +2024-07-28 15:18:04,940 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.85 vs. limit=22.5 +2024-07-28 15:18:25,031 INFO [train.py:1114] (3/4) Epoch 13, batch 8150, loss[loss=0.2193, simple_loss=0.3152, pruned_loss=0.06168, over 4813.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2763, pruned_loss=0.04966, over 937507.58 frames. ], batch size: 15, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:18:25,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174424.0, ans=0.1 +2024-07-28 15:18:49,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=174450.66666666666, ans=0.0 +2024-07-28 15:21:24,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=174464.0, ans=0.125 +2024-07-28 15:21:31,914 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.48 vs. limit=22.5 +2024-07-28 15:21:34,239 INFO [train.py:1114] (3/4) Epoch 13, batch 8200, loss[loss=0.1989, simple_loss=0.2894, pruned_loss=0.05418, over 4814.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2759, pruned_loss=0.04899, over 938429.92 frames. ], batch size: 15, lr: 5.70e-03, grad_scale: 32.0 +2024-07-28 15:21:44,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=174490.66666666666, ans=0.125 +2024-07-28 15:21:57,180 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.25 vs. limit=15.0 +2024-07-28 15:21:57,442 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.419e+01 5.624e+01 6.115e+01 7.227e+01 1.322e+02, threshold=1.223e+02, percent-clipped=1.0 +2024-07-28 15:22:01,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=174517.33333333334, ans=0.015 +2024-07-28 15:22:04,596 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.35 vs. limit=15.0 +2024-07-28 15:22:27,438 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.71 vs. limit=10.0 +2024-07-28 15:23:06,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=174544.0, ans=0.1 +2024-07-28 15:23:06,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=174544.0, ans=0.125 +2024-07-28 15:23:08,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=174544.0, ans=0.125 +2024-07-28 15:23:09,308 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.33 vs. limit=22.5 +2024-07-28 15:23:15,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=174544.0, ans=0.035 +2024-07-28 15:23:17,766 INFO [train.py:1114] (3/4) Epoch 13, batch 8250, loss[loss=0.1702, simple_loss=0.2622, pruned_loss=0.03909, over 4883.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2758, pruned_loss=0.04913, over 938471.28 frames. ], batch size: 13, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:23:45,325 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.65 vs. limit=15.0 +2024-07-28 15:23:59,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=174610.66666666666, ans=0.09899494936611666 +2024-07-28 15:24:00,771 INFO [train.py:1114] (3/4) Epoch 13, batch 8300, loss[loss=0.1945, simple_loss=0.2925, pruned_loss=0.04826, over 4913.00 frames. ], tot_loss[loss=0.188, simple_loss=0.277, pruned_loss=0.04948, over 938184.70 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:24:11,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=174637.33333333334, ans=0.125 +2024-07-28 15:24:11,814 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.568e+01 5.640e+01 5.984e+01 6.893e+01 9.803e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 15:24:12,172 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.01 vs. limit=15.0 +2024-07-28 15:24:26,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=174664.0, ans=0.125 +2024-07-28 15:24:38,262 INFO [train.py:1114] (3/4) Epoch 13, batch 8350, loss[loss=0.218, simple_loss=0.3075, pruned_loss=0.06429, over 4802.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2766, pruned_loss=0.04918, over 940898.99 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:24:41,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=174690.66666666666, ans=0.125 +2024-07-28 15:24:48,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=174704.0, ans=0.125 +2024-07-28 15:24:52,068 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.63 vs. limit=22.5 +2024-07-28 15:25:12,895 INFO [train.py:1114] (3/4) Epoch 13, batch 8400, loss[loss=0.1867, simple_loss=0.2737, pruned_loss=0.04991, over 4775.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2772, pruned_loss=0.04951, over 940050.18 frames. ], batch size: 12, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:25:13,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=174757.33333333334, ans=0.0 +2024-07-28 15:25:23,772 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.906e+01 5.833e+01 6.092e+01 7.413e+01 1.221e+02, threshold=1.218e+02, percent-clipped=1.0 +2024-07-28 15:25:31,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=174784.0, ans=0.125 +2024-07-28 15:25:31,919 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=2.95 vs. limit=12.0 +2024-07-28 15:25:42,890 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.99 vs. limit=15.0 +2024-07-28 15:25:51,141 INFO [train.py:1114] (3/4) Epoch 13, batch 8450, loss[loss=0.1927, simple_loss=0.2808, pruned_loss=0.05233, over 4809.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2771, pruned_loss=0.04942, over 938559.62 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:25:55,378 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.30 vs. limit=8.0 +2024-07-28 15:25:55,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=174824.0, ans=0.0 +2024-07-28 15:26:09,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=174864.0, ans=0.125 +2024-07-28 15:26:09,690 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.61 vs. limit=15.0 +2024-07-28 15:26:29,353 INFO [train.py:1114] (3/4) Epoch 13, batch 8500, loss[loss=0.1614, simple_loss=0.2508, pruned_loss=0.03603, over 4615.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2766, pruned_loss=0.04927, over 938748.14 frames. ], batch size: 11, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:29:17,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=174890.66666666666, ans=0.0 +2024-07-28 15:29:20,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=174890.66666666666, ans=0.125 +2024-07-28 15:29:23,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=174904.0, ans=0.025 +2024-07-28 15:29:35,085 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.221e+01 5.689e+01 6.230e+01 7.373e+01 1.057e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 15:29:35,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=174904.0, ans=0.0 +2024-07-28 15:29:35,886 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:29:36,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=174917.33333333334, ans=0.125 +2024-07-28 15:29:58,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=174944.0, ans=0.1 +2024-07-28 15:29:59,595 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.97 vs. limit=22.5 +2024-07-28 15:30:03,278 INFO [train.py:1114] (3/4) Epoch 13, batch 8550, loss[loss=0.1431, simple_loss=0.219, pruned_loss=0.0336, over 4807.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.276, pruned_loss=0.04912, over 939641.03 frames. ], batch size: 11, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:30:15,906 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.82 vs. limit=15.0 +2024-07-28 15:30:16,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=174984.0, ans=0.125 +2024-07-28 15:30:29,933 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.23 vs. limit=15.0 +2024-07-28 15:30:31,943 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.82 vs. limit=15.0 +2024-07-28 15:30:32,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=175010.66666666666, ans=0.0 +2024-07-28 15:30:41,170 INFO [train.py:1114] (3/4) Epoch 13, batch 8600, loss[loss=0.2011, simple_loss=0.3014, pruned_loss=0.05041, over 4802.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2762, pruned_loss=0.04925, over 939426.96 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:30:42,834 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=19.01 vs. limit=22.5 +2024-07-28 15:30:47,549 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.50 vs. limit=6.0 +2024-07-28 15:30:54,283 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.590e+01 5.714e+01 6.617e+01 7.604e+01 1.022e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-28 15:31:06,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=175064.0, ans=0.07 +2024-07-28 15:31:09,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=175077.33333333334, ans=0.125 +2024-07-28 15:31:16,116 INFO [train.py:1114] (3/4) Epoch 13, batch 8650, loss[loss=0.206, simple_loss=0.2873, pruned_loss=0.06239, over 4905.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2757, pruned_loss=0.04947, over 940466.90 frames. ], batch size: 15, lr: 5.69e-03, grad_scale: 32.0 +2024-07-28 15:35:01,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=175104.0, ans=0.125 +2024-07-28 15:35:02,150 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.63 vs. limit=15.0 +2024-07-28 15:36:20,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=175104.0, ans=0.1 +2024-07-28 15:36:28,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=175117.33333333334, ans=0.125 +2024-07-28 15:36:37,247 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.77 vs. limit=10.0 +2024-07-28 15:36:41,443 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.61 vs. limit=15.0 +2024-07-28 15:36:41,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=175144.0, ans=0.125 +2024-07-28 15:36:43,571 INFO [train.py:1114] (3/4) Epoch 13, batch 8700, loss[loss=0.1942, simple_loss=0.281, pruned_loss=0.05375, over 4771.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2763, pruned_loss=0.04996, over 937999.71 frames. ], batch size: 13, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:36:46,499 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.68 vs. limit=15.0 +2024-07-28 15:36:59,762 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.706e+01 5.561e+01 6.137e+01 6.917e+01 9.151e+01, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 15:37:01,989 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.04 vs. limit=15.0 +2024-07-28 15:37:11,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=175197.33333333334, ans=0.04949747468305833 +2024-07-28 15:37:13,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=175197.33333333334, ans=0.5 +2024-07-28 15:37:25,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=175210.66666666666, ans=0.0 +2024-07-28 15:37:34,702 INFO [train.py:1114] (3/4) Epoch 13, batch 8750, loss[loss=0.2116, simple_loss=0.2988, pruned_loss=0.06221, over 4692.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2758, pruned_loss=0.04982, over 936369.07 frames. ], batch size: 15, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:37:40,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=175224.0, ans=0.125 +2024-07-28 15:37:44,078 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.53 vs. limit=12.0 +2024-07-28 15:37:52,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=175250.66666666666, ans=0.0 +2024-07-28 15:38:02,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=175277.33333333334, ans=0.125 +2024-07-28 15:38:07,019 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:38:07,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=175277.33333333334, ans=0.125 +2024-07-28 15:38:09,911 INFO [train.py:1114] (3/4) Epoch 13, batch 8800, loss[loss=0.1764, simple_loss=0.2759, pruned_loss=0.03848, over 4934.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2752, pruned_loss=0.04895, over 937013.76 frames. ], batch size: 14, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:38:18,385 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=16.74 vs. limit=15.0 +2024-07-28 15:38:21,215 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.482e+01 5.841e+01 6.340e+01 7.291e+01 9.820e+01, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 15:38:26,167 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.64 vs. limit=15.0 +2024-07-28 15:38:26,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=175317.33333333334, ans=0.125 +2024-07-28 15:38:29,262 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:38:32,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=175330.66666666666, ans=0.0 +2024-07-28 15:38:41,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=175344.0, ans=0.0 +2024-07-28 15:38:43,106 INFO [train.py:1114] (3/4) Epoch 13, batch 8850, loss[loss=0.2218, simple_loss=0.3139, pruned_loss=0.06488, over 4481.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2758, pruned_loss=0.04924, over 931603.86 frames. ], batch size: 21, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:38:46,154 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.99 vs. limit=15.0 +2024-07-28 15:38:54,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=175370.66666666666, ans=0.05 +2024-07-28 15:38:59,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=175384.0, ans=15.0 +2024-07-28 15:39:06,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=175397.33333333334, ans=0.0 +2024-07-28 15:39:08,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=175410.66666666666, ans=0.0 +2024-07-28 15:39:09,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175410.66666666666, ans=0.1 +2024-07-28 15:39:14,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=13.00 vs. limit=15.0 +2024-07-28 15:39:15,588 INFO [train.py:1114] (3/4) Epoch 13, batch 8900, loss[loss=0.1543, simple_loss=0.242, pruned_loss=0.03334, over 4934.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2751, pruned_loss=0.04894, over 929608.66 frames. ], batch size: 12, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:39:27,482 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.79 vs. limit=15.0 +2024-07-28 15:39:30,614 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.70 vs. limit=15.0 +2024-07-28 15:39:32,293 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.207e+01 5.752e+01 6.427e+01 7.462e+01 1.101e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-28 15:39:33,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=175437.33333333334, ans=0.125 +2024-07-28 15:39:48,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=175477.33333333334, ans=0.125 +2024-07-28 15:39:53,970 INFO [train.py:1114] (3/4) Epoch 13, batch 8950, loss[loss=0.2091, simple_loss=0.2852, pruned_loss=0.06654, over 4497.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2748, pruned_loss=0.04911, over 930529.82 frames. ], batch size: 21, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:39:54,263 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.48 vs. limit=6.0 +2024-07-28 15:40:08,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=175504.0, ans=0.125 +2024-07-28 15:40:08,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=175517.33333333334, ans=0.025 +2024-07-28 15:40:09,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=175517.33333333334, ans=0.125 +2024-07-28 15:40:13,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=175517.33333333334, ans=0.125 +2024-07-28 15:40:13,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=175517.33333333334, ans=0.125 +2024-07-28 15:40:35,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=175544.0, ans=0.1 +2024-07-28 15:40:38,253 INFO [train.py:1114] (3/4) Epoch 13, batch 9000, loss[loss=0.1807, simple_loss=0.2732, pruned_loss=0.04408, over 4650.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2744, pruned_loss=0.049, over 933259.55 frames. ], batch size: 12, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:40:38,253 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 15:40:52,669 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([3.6679, 2.5072, 3.0052, 3.4365, 3.3548, 2.9093, 3.4726, 2.2848], + device='cuda:3') +2024-07-28 15:42:48,986 INFO [train.py:1146] (3/4) Epoch 13, validation: loss=0.1657, simple_loss=0.2696, pruned_loss=0.03096, over 944034.00 frames. +2024-07-28 15:42:48,986 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 15:45:00,874 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.83 vs. limit=10.0 +2024-07-28 15:45:01,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=175557.33333333334, ans=0.125 +2024-07-28 15:45:15,800 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.541e+01 5.562e+01 6.322e+01 7.112e+01 1.143e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 15:45:16,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=175570.66666666666, ans=0.125 +2024-07-28 15:45:18,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=175584.0, ans=0.2 +2024-07-28 15:45:26,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=175597.33333333334, ans=0.5 +2024-07-28 15:45:29,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=175597.33333333334, ans=0.125 +2024-07-28 15:45:37,583 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.77 vs. limit=15.0 +2024-07-28 15:45:39,136 INFO [train.py:1114] (3/4) Epoch 13, batch 9050, loss[loss=0.1617, simple_loss=0.2571, pruned_loss=0.03311, over 4599.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2749, pruned_loss=0.04868, over 933582.18 frames. ], batch size: 10, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:45:43,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=175624.0, ans=0.0 +2024-07-28 15:45:45,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=175624.0, ans=0.125 +2024-07-28 15:46:04,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=175664.0, ans=10.0 +2024-07-28 15:46:05,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=175677.33333333334, ans=0.125 +2024-07-28 15:46:07,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=175677.33333333334, ans=0.0 +2024-07-28 15:46:11,975 INFO [train.py:1114] (3/4) Epoch 13, batch 9100, loss[loss=0.1871, simple_loss=0.2732, pruned_loss=0.05051, over 4935.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.274, pruned_loss=0.04824, over 936294.50 frames. ], batch size: 14, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:46:13,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=175690.66666666666, ans=0.025 +2024-07-28 15:46:22,479 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.359e+01 5.613e+01 6.012e+01 6.953e+01 8.806e+01, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 15:46:23,447 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=14.41 vs. limit=15.0 +2024-07-28 15:46:27,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=175717.33333333334, ans=0.0 +2024-07-28 15:46:34,153 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.93 vs. limit=15.0 +2024-07-28 15:46:59,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=175744.0, ans=0.125 +2024-07-28 15:47:18,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=175744.0, ans=0.125 +2024-07-28 15:47:19,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175757.33333333334, ans=0.1 +2024-07-28 15:47:19,813 INFO [train.py:1114] (3/4) Epoch 13, batch 9150, loss[loss=0.1875, simple_loss=0.2861, pruned_loss=0.04451, over 4818.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2751, pruned_loss=0.04886, over 935093.19 frames. ], batch size: 14, lr: 5.68e-03, grad_scale: 32.0 +2024-07-28 15:47:19,954 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:47:20,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=175757.33333333334, ans=0.125 +2024-07-28 15:48:13,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=175770.66666666666, ans=0.125 +2024-07-28 15:48:21,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=175784.0, ans=0.1 +2024-07-28 15:48:36,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=175810.66666666666, ans=0.2 +2024-07-28 15:48:39,556 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.38 vs. limit=22.5 +2024-07-28 15:48:41,161 INFO [train.py:1114] (3/4) Epoch 13, batch 9200, loss[loss=0.1717, simple_loss=0.2542, pruned_loss=0.04463, over 4849.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.274, pruned_loss=0.04863, over 936894.43 frames. ], batch size: 12, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:48:44,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=175824.0, ans=0.125 +2024-07-28 15:48:51,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=175837.33333333334, ans=0.125 +2024-07-28 15:48:51,964 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.679e+01 5.600e+01 6.167e+01 6.927e+01 1.004e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 15:48:52,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=175837.33333333334, ans=0.125 +2024-07-28 15:48:56,115 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.80 vs. limit=15.0 +2024-07-28 15:49:12,536 INFO [train.py:1114] (3/4) Epoch 13, batch 9250, loss[loss=0.2207, simple_loss=0.3194, pruned_loss=0.06107, over 4636.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2745, pruned_loss=0.04887, over 937888.64 frames. ], batch size: 13, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:49:17,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=175890.66666666666, ans=0.125 +2024-07-28 15:51:56,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=175917.33333333334, ans=0.125 +2024-07-28 15:52:26,797 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.85 vs. limit=22.5 +2024-07-28 15:52:44,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=175930.66666666666, ans=0.0 +2024-07-28 15:52:47,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=175930.66666666666, ans=0.025 +2024-07-28 15:52:53,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=175944.0, ans=0.2 +2024-07-28 15:52:56,048 INFO [train.py:1114] (3/4) Epoch 13, batch 9300, loss[loss=0.1567, simple_loss=0.258, pruned_loss=0.02767, over 4779.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2738, pruned_loss=0.04853, over 937929.15 frames. ], batch size: 12, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:52:56,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=175957.33333333334, ans=0.125 +2024-07-28 15:52:58,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=175957.33333333334, ans=0.2 +2024-07-28 15:53:06,731 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+01 5.420e+01 5.839e+01 6.596e+01 1.003e+02, threshold=1.168e+02, percent-clipped=0.0 +2024-07-28 15:53:22,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=175997.33333333334, ans=0.0 +2024-07-28 15:53:36,027 INFO [train.py:1114] (3/4) Epoch 13, batch 9350, loss[loss=0.1759, simple_loss=0.2579, pruned_loss=0.04693, over 4821.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2742, pruned_loss=0.0484, over 935166.52 frames. ], batch size: 11, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:53:50,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=176050.66666666666, ans=0.2 +2024-07-28 15:53:53,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=176050.66666666666, ans=0.125 +2024-07-28 15:53:57,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=176064.0, ans=0.125 +2024-07-28 15:53:58,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=176064.0, ans=0.0 +2024-07-28 15:54:05,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=176077.33333333334, ans=0.125 +2024-07-28 15:54:08,559 INFO [train.py:1114] (3/4) Epoch 13, batch 9400, loss[loss=0.1987, simple_loss=0.2931, pruned_loss=0.05208, over 4697.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2757, pruned_loss=0.04887, over 933306.47 frames. ], batch size: 13, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:54:19,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=176104.0, ans=0.125 +2024-07-28 15:54:19,729 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.585e+01 5.549e+01 6.208e+01 6.780e+01 1.030e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 15:54:25,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=176117.33333333334, ans=0.0 +2024-07-28 15:54:27,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=176130.66666666666, ans=0.125 +2024-07-28 15:54:29,761 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.32 vs. limit=15.0 +2024-07-28 15:54:40,625 INFO [train.py:1114] (3/4) Epoch 13, batch 9450, loss[loss=0.15, simple_loss=0.235, pruned_loss=0.03248, over 4820.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2756, pruned_loss=0.04913, over 932548.33 frames. ], batch size: 11, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:54:41,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=176157.33333333334, ans=0.2 +2024-07-28 15:54:46,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=176170.66666666666, ans=0.0 +2024-07-28 15:54:46,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=176170.66666666666, ans=0.125 +2024-07-28 15:54:53,276 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.70 vs. limit=15.0 +2024-07-28 15:54:53,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=176184.0, ans=0.125 +2024-07-28 15:55:00,647 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.52 vs. limit=10.0 +2024-07-28 15:55:02,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=176197.33333333334, ans=0.025 +2024-07-28 15:55:11,691 INFO [train.py:1114] (3/4) Epoch 13, batch 9500, loss[loss=0.1651, simple_loss=0.2483, pruned_loss=0.04096, over 4709.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2753, pruned_loss=0.04896, over 934832.86 frames. ], batch size: 12, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:55:15,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=176224.0, ans=0.09899494936611666 +2024-07-28 15:55:15,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=176224.0, ans=0.125 +2024-07-28 15:55:18,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.85 vs. limit=12.0 +2024-07-28 15:55:22,346 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.570e+01 5.491e+01 5.977e+01 6.811e+01 8.816e+01, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 15:55:27,602 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.03 vs. limit=15.0 +2024-07-28 15:55:29,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=176264.0, ans=0.025 +2024-07-28 15:55:39,202 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=6.96 vs. limit=12.0 +2024-07-28 15:55:40,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=176277.33333333334, ans=0.125 +2024-07-28 15:55:43,369 INFO [train.py:1114] (3/4) Epoch 13, batch 9550, loss[loss=0.1798, simple_loss=0.2626, pruned_loss=0.0485, over 4766.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2742, pruned_loss=0.04872, over 932344.36 frames. ], batch size: 12, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:55:44,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176290.66666666666, ans=0.1 +2024-07-28 15:55:46,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=176290.66666666666, ans=0.0 +2024-07-28 15:55:48,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=176290.66666666666, ans=0.125 +2024-07-28 15:55:52,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=176304.0, ans=0.0 +2024-07-28 15:55:54,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=176304.0, ans=0.125 +2024-07-28 15:55:54,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=176304.0, ans=0.125 +2024-07-28 15:55:59,973 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.29 vs. limit=15.0 +2024-07-28 15:56:03,885 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.76 vs. limit=15.0 +2024-07-28 15:56:07,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=176330.66666666666, ans=0.025 +2024-07-28 15:56:13,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=176344.0, ans=0.125 +2024-07-28 15:56:15,429 INFO [train.py:1114] (3/4) Epoch 13, batch 9600, loss[loss=0.2685, simple_loss=0.3295, pruned_loss=0.1038, over 3572.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2748, pruned_loss=0.04865, over 931525.44 frames. ], batch size: 36, lr: 5.67e-03, grad_scale: 32.0 +2024-07-28 15:56:26,136 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.903e+01 5.951e+01 6.565e+01 7.484e+01 1.008e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-28 15:56:28,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176384.0, ans=0.1 +2024-07-28 15:56:37,511 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.37 vs. limit=15.0 +2024-07-28 15:56:42,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=176410.66666666666, ans=0.2 +2024-07-28 15:56:44,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=176410.66666666666, ans=10.0 +2024-07-28 15:56:47,958 INFO [train.py:1114] (3/4) Epoch 13, batch 9650, loss[loss=0.2035, simple_loss=0.3013, pruned_loss=0.05286, over 4852.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2761, pruned_loss=0.04949, over 927060.54 frames. ], batch size: 16, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 15:56:49,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=176424.0, ans=0.0 +2024-07-28 15:56:49,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176424.0, ans=0.1 +2024-07-28 15:56:52,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=176424.0, ans=0.0 +2024-07-28 15:56:52,719 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=18.11 vs. limit=22.5 +2024-07-28 15:56:58,250 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.10 vs. limit=6.0 +2024-07-28 15:57:01,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=176450.66666666666, ans=0.0 +2024-07-28 15:57:12,264 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.63 vs. limit=15.0 +2024-07-28 15:57:12,648 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.48 vs. limit=15.0 +2024-07-28 15:57:21,390 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:57:24,275 INFO [train.py:1114] (3/4) Epoch 13, batch 9700, loss[loss=0.2062, simple_loss=0.2982, pruned_loss=0.05711, over 4302.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2771, pruned_loss=0.05002, over 924440.08 frames. ], batch size: 25, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 15:57:28,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=176490.66666666666, ans=0.0 +2024-07-28 15:57:31,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=176504.0, ans=0.125 +2024-07-28 15:57:33,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=176504.0, ans=0.125 +2024-07-28 15:57:34,737 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.912e+01 5.596e+01 6.037e+01 6.865e+01 8.980e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-28 15:57:44,383 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 15:57:59,700 INFO [train.py:1114] (3/4) Epoch 13, batch 9750, loss[loss=0.1843, simple_loss=0.2687, pruned_loss=0.04996, over 4689.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2766, pruned_loss=0.04998, over 924631.93 frames. ], batch size: 15, lr: 5.66e-03, grad_scale: 64.0 +2024-07-28 15:58:02,009 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.04 vs. limit=10.0 +2024-07-28 15:58:02,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=176557.33333333334, ans=0.0 +2024-07-28 15:58:08,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=176570.66666666666, ans=0.1 +2024-07-28 15:58:13,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176570.66666666666, ans=0.1 +2024-07-28 16:00:15,683 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:00:27,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=176584.0, ans=0.1 +2024-07-28 16:01:24,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=176597.33333333334, ans=0.0 +2024-07-28 16:01:25,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=176597.33333333334, ans=0.125 +2024-07-28 16:01:27,314 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.36 vs. limit=22.5 +2024-07-28 16:01:34,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=176610.66666666666, ans=0.1 +2024-07-28 16:01:37,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=176624.0, ans=0.2 +2024-07-28 16:01:45,800 INFO [train.py:1114] (3/4) Epoch 13, batch 9800, loss[loss=0.1604, simple_loss=0.2435, pruned_loss=0.0387, over 4706.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2751, pruned_loss=0.04949, over 924597.35 frames. ], batch size: 12, lr: 5.66e-03, grad_scale: 64.0 +2024-07-28 16:01:57,764 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.35 vs. limit=22.5 +2024-07-28 16:02:02,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=176637.33333333334, ans=0.125 +2024-07-28 16:02:08,118 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.529e+01 5.638e+01 6.459e+01 7.664e+01 1.106e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-28 16:02:30,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=176650.66666666666, ans=0.2 +2024-07-28 16:02:56,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=176664.0, ans=0.125 +2024-07-28 16:02:58,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=176664.0, ans=0.0 +2024-07-28 16:03:00,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=176677.33333333334, ans=0.0 +2024-07-28 16:03:19,603 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.50 vs. limit=15.0 +2024-07-28 16:03:21,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=176690.66666666666, ans=0.0 +2024-07-28 16:03:21,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=176690.66666666666, ans=0.125 +2024-07-28 16:03:21,596 INFO [train.py:1114] (3/4) Epoch 13, batch 9850, loss[loss=0.1878, simple_loss=0.2784, pruned_loss=0.04861, over 4904.00 frames. ], tot_loss[loss=0.188, simple_loss=0.276, pruned_loss=0.05, over 927045.20 frames. ], batch size: 15, lr: 5.66e-03, grad_scale: 64.0 +2024-07-28 16:03:21,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=176690.66666666666, ans=0.0 +2024-07-28 16:03:25,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176690.66666666666, ans=0.1 +2024-07-28 16:04:42,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=176717.33333333334, ans=0.0 +2024-07-28 16:05:30,431 INFO [train.py:1114] (3/4) Epoch 13, batch 9900, loss[loss=0.201, simple_loss=0.2845, pruned_loss=0.05877, over 4825.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.277, pruned_loss=0.05081, over 925953.69 frames. ], batch size: 16, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 16:05:57,427 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=14.92 vs. limit=15.0 +2024-07-28 16:06:10,573 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.12 vs. limit=22.5 +2024-07-28 16:06:17,570 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.68 vs. limit=6.0 +2024-07-28 16:06:49,267 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.859e+01 5.715e+01 6.519e+01 7.339e+01 1.147e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 16:06:58,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=176797.33333333334, ans=0.125 +2024-07-28 16:07:01,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=176797.33333333334, ans=0.0 +2024-07-28 16:07:11,626 INFO [train.py:1114] (3/4) Epoch 13, batch 9950, loss[loss=0.1663, simple_loss=0.2485, pruned_loss=0.04199, over 4541.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2772, pruned_loss=0.05151, over 928308.14 frames. ], batch size: 10, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 16:07:18,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=176824.0, ans=0.0 +2024-07-28 16:07:21,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=176824.0, ans=0.2 +2024-07-28 16:07:22,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=176824.0, ans=0.025 +2024-07-28 16:07:36,970 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:07:49,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=176890.66666666666, ans=0.125 +2024-07-28 16:07:50,043 INFO [train.py:1114] (3/4) Epoch 13, batch 10000, loss[loss=0.2084, simple_loss=0.299, pruned_loss=0.05892, over 4655.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2794, pruned_loss=0.05215, over 925857.92 frames. ], batch size: 16, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 16:07:53,000 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.27 vs. limit=6.0 +2024-07-28 16:08:01,260 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.743e+01 6.303e+01 7.198e+01 1.105e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 16:08:08,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=176930.66666666666, ans=0.125 +2024-07-28 16:08:11,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=176930.66666666666, ans=0.0 +2024-07-28 16:08:16,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=176944.0, ans=0.07 +2024-07-28 16:08:17,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=176944.0, ans=0.125 +2024-07-28 16:08:22,353 INFO [train.py:1114] (3/4) Epoch 13, batch 10050, loss[loss=0.2221, simple_loss=0.2861, pruned_loss=0.07902, over 3362.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.283, pruned_loss=0.05435, over 913929.86 frames. ], batch size: 35, lr: 5.66e-03, grad_scale: 32.0 +2024-07-28 16:08:28,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=176970.66666666666, ans=0.125 +2024-07-28 16:08:38,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=176984.0, ans=0.0 +2024-07-28 16:08:47,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=176997.33333333334, ans=0.1 +2024-07-28 16:08:55,888 INFO [train.py:1114] (3/4) Epoch 13, batch 10100, loss[loss=0.218, simple_loss=0.2988, pruned_loss=0.06856, over 3291.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2881, pruned_loss=0.05948, over 862118.91 frames. ], batch size: 35, lr: 5.65e-03, grad_scale: 32.0 +2024-07-28 16:09:01,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=177024.0, ans=0.125 +2024-07-28 16:09:07,527 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.274e+01 6.562e+01 7.156e+01 7.782e+01 1.093e+02, threshold=1.431e+02, percent-clipped=0.0 +2024-07-28 16:09:10,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=177050.66666666666, ans=0.125 +2024-07-28 16:09:16,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=177064.0, ans=0.1 +2024-07-28 16:09:27,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=177090.66666666666, ans=0.125 +2024-07-28 16:09:28,322 INFO [train.py:1114] (3/4) Epoch 13, batch 10150, loss[loss=0.2231, simple_loss=0.3068, pruned_loss=0.06972, over 3412.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2921, pruned_loss=0.06327, over 821739.81 frames. ], batch size: 36, lr: 5.65e-03, grad_scale: 32.0 +2024-07-28 16:09:34,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=177104.0, ans=0.125 +2024-07-28 16:09:40,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177104.0, ans=0.1 +2024-07-28 16:09:55,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=177130.66666666666, ans=0.125 +2024-07-28 16:10:12,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=177144.0, ans=0.125 +2024-07-28 16:10:15,117 INFO [train.py:1114] (3/4) Epoch 13, batch 10200, loss[loss=0.2227, simple_loss=0.3027, pruned_loss=0.07132, over 3278.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2945, pruned_loss=0.06586, over 790930.62 frames. ], batch size: 35, lr: 5.65e-03, grad_scale: 32.0 +2024-07-28 16:10:16,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=177157.33333333334, ans=0.125 +2024-07-28 16:10:26,156 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.992e+01 6.629e+01 7.003e+01 7.390e+01 9.064e+01, threshold=1.401e+02, percent-clipped=0.0 +2024-07-28 16:11:16,419 INFO [train.py:1114] (3/4) Epoch 14, batch 0, loss[loss=0.1634, simple_loss=0.2596, pruned_loss=0.0336, over 4850.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2596, pruned_loss=0.0336, over 4850.00 frames. ], batch size: 12, lr: 5.45e-03, grad_scale: 32.0 +2024-07-28 16:11:16,420 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 16:14:37,030 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.2913, 4.6924, 4.6142, 5.0730], device='cuda:3') +2024-07-28 16:14:55,768 INFO [train.py:1146] (3/4) Epoch 14, validation: loss=0.1673, simple_loss=0.2724, pruned_loss=0.03104, over 944034.00 frames. +2024-07-28 16:14:55,769 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 16:15:10,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=177200.0, ans=0.125 +2024-07-28 16:15:24,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=177226.66666666666, ans=0.0 +2024-07-28 16:15:35,831 INFO [train.py:1114] (3/4) Epoch 14, batch 50, loss[loss=0.1767, simple_loss=0.2606, pruned_loss=0.04638, over 4612.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2769, pruned_loss=0.04739, over 206022.57 frames. ], batch size: 11, lr: 5.45e-03, grad_scale: 32.0 +2024-07-28 16:15:37,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=177253.33333333334, ans=0.125 +2024-07-28 16:15:50,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=177266.66666666666, ans=0.2 +2024-07-28 16:15:50,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=177266.66666666666, ans=0.125 +2024-07-28 16:15:53,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=177280.0, ans=0.2 +2024-07-28 16:16:01,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=177280.0, ans=0.125 +2024-07-28 16:16:03,754 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.24 vs. limit=12.0 +2024-07-28 16:16:17,033 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.504e+01 5.430e+01 5.954e+01 6.690e+01 1.022e+02, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 16:16:19,820 INFO [train.py:1114] (3/4) Epoch 14, batch 100, loss[loss=0.1759, simple_loss=0.2656, pruned_loss=0.04303, over 4640.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2784, pruned_loss=0.04882, over 364865.46 frames. ], batch size: 12, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:16:23,493 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.33 vs. limit=10.0 +2024-07-28 16:16:56,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=177346.66666666666, ans=0.1 +2024-07-28 16:17:02,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=177360.0, ans=0.0 +2024-07-28 16:17:06,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=177373.33333333334, ans=0.125 +2024-07-28 16:17:09,015 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.68 vs. limit=15.0 +2024-07-28 16:17:11,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=177373.33333333334, ans=0.125 +2024-07-28 16:17:13,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=177373.33333333334, ans=0.025 +2024-07-28 16:17:13,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=177373.33333333334, ans=0.2 +2024-07-28 16:17:14,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=177386.66666666666, ans=0.125 +2024-07-28 16:17:14,951 INFO [train.py:1114] (3/4) Epoch 14, batch 150, loss[loss=0.1503, simple_loss=0.2362, pruned_loss=0.0322, over 4608.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2766, pruned_loss=0.04819, over 493956.47 frames. ], batch size: 11, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:17:35,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=177413.33333333334, ans=0.125 +2024-07-28 16:17:44,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=177426.66666666666, ans=0.125 +2024-07-28 16:17:45,503 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.95 vs. limit=15.0 +2024-07-28 16:17:49,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=177440.0, ans=0.2 +2024-07-28 16:17:52,154 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.588e+01 5.424e+01 5.956e+01 7.040e+01 1.129e+02, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 16:17:57,485 INFO [train.py:1114] (3/4) Epoch 14, batch 200, loss[loss=0.1585, simple_loss=0.2467, pruned_loss=0.0352, over 4462.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2754, pruned_loss=0.04826, over 593660.93 frames. ], batch size: 21, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:17:59,270 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.29 vs. limit=15.0 +2024-07-28 16:18:01,256 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.41 vs. limit=15.0 +2024-07-28 16:18:01,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=177453.33333333334, ans=0.125 +2024-07-28 16:18:03,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=177466.66666666666, ans=0.2 +2024-07-28 16:18:22,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=177493.33333333334, ans=0.125 +2024-07-28 16:18:29,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=177506.66666666666, ans=0.035 +2024-07-28 16:18:37,119 INFO [train.py:1114] (3/4) Epoch 14, batch 250, loss[loss=0.2052, simple_loss=0.2882, pruned_loss=0.0611, over 4654.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2762, pruned_loss=0.04916, over 670816.34 frames. ], batch size: 16, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:18:38,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=177520.0, ans=0.0 +2024-07-28 16:18:38,148 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.21 vs. limit=15.0 +2024-07-28 16:18:53,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=177546.66666666666, ans=0.125 +2024-07-28 16:18:55,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=177546.66666666666, ans=0.125 +2024-07-28 16:19:02,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=177560.0, ans=0.0 +2024-07-28 16:19:05,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=177573.33333333334, ans=0.125 +2024-07-28 16:19:08,674 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.684e+01 5.662e+01 6.232e+01 7.449e+01 1.133e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 16:19:09,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=177573.33333333334, ans=0.125 +2024-07-28 16:19:10,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=177586.66666666666, ans=0.025 +2024-07-28 16:19:11,379 INFO [train.py:1114] (3/4) Epoch 14, batch 300, loss[loss=0.1881, simple_loss=0.2766, pruned_loss=0.0498, over 4794.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2761, pruned_loss=0.04945, over 730506.63 frames. ], batch size: 15, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:19:15,752 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.94 vs. limit=22.5 +2024-07-28 16:19:16,408 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.00 vs. limit=15.0 +2024-07-28 16:19:33,180 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.15 vs. limit=22.5 +2024-07-28 16:19:35,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=177626.66666666666, ans=0.125 +2024-07-28 16:19:37,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=177626.66666666666, ans=0.0 +2024-07-28 16:19:41,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=177640.0, ans=0.0 +2024-07-28 16:19:48,700 INFO [train.py:1114] (3/4) Epoch 14, batch 350, loss[loss=0.1683, simple_loss=0.2594, pruned_loss=0.03857, over 4943.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2761, pruned_loss=0.04918, over 776593.00 frames. ], batch size: 12, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:19:51,870 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.02 vs. limit=15.0 +2024-07-28 16:20:08,502 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.02 vs. limit=15.0 +2024-07-28 16:20:09,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=177666.66666666666, ans=0.125 +2024-07-28 16:20:28,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=177680.0, ans=0.125 +2024-07-28 16:20:44,596 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.533e+01 5.994e+01 6.845e+01 1.570e+02, threshold=1.199e+02, percent-clipped=1.0 +2024-07-28 16:20:47,241 INFO [train.py:1114] (3/4) Epoch 14, batch 400, loss[loss=0.1766, simple_loss=0.2748, pruned_loss=0.03921, over 4701.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2753, pruned_loss=0.04825, over 814270.45 frames. ], batch size: 13, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:20:50,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=177720.0, ans=0.0 +2024-07-28 16:20:59,596 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.86 vs. limit=15.0 +2024-07-28 16:20:59,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=177746.66666666666, ans=0.2 +2024-07-28 16:21:10,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=177760.0, ans=0.0 +2024-07-28 16:21:21,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=177773.33333333334, ans=0.0 +2024-07-28 16:21:22,173 INFO [train.py:1114] (3/4) Epoch 14, batch 450, loss[loss=0.2122, simple_loss=0.3006, pruned_loss=0.06185, over 4636.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.275, pruned_loss=0.04804, over 839506.84 frames. ], batch size: 13, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:21:52,671 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.533e+01 5.509e+01 6.147e+01 6.796e+01 9.434e+01, threshold=1.229e+02, percent-clipped=0.0 +2024-07-28 16:21:55,367 INFO [train.py:1114] (3/4) Epoch 14, batch 500, loss[loss=0.225, simple_loss=0.3126, pruned_loss=0.06865, over 4683.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2741, pruned_loss=0.0477, over 861896.58 frames. ], batch size: 15, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:21:56,471 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=24.39 vs. limit=22.5 +2024-07-28 16:22:11,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=177880.0, ans=0.0 +2024-07-28 16:22:19,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=177893.33333333334, ans=0.125 +2024-07-28 16:22:24,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=177906.66666666666, ans=0.04949747468305833 +2024-07-28 16:22:28,998 INFO [train.py:1114] (3/4) Epoch 14, batch 550, loss[loss=0.1979, simple_loss=0.3056, pruned_loss=0.04505, over 4598.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2745, pruned_loss=0.04759, over 878001.54 frames. ], batch size: 17, lr: 5.44e-03, grad_scale: 32.0 +2024-07-28 16:22:31,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=177920.0, ans=0.125 +2024-07-28 16:22:32,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=177920.0, ans=0.0 +2024-07-28 16:22:37,879 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.02 vs. limit=15.0 +2024-07-28 16:22:47,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=177946.66666666666, ans=0.1 +2024-07-28 16:22:54,934 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.17 vs. limit=15.0 +2024-07-28 16:22:58,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=177973.33333333334, ans=0.125 +2024-07-28 16:22:58,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=177973.33333333334, ans=0.125 +2024-07-28 16:22:59,816 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+01 5.791e+01 6.101e+01 6.506e+01 8.521e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 16:23:02,688 INFO [train.py:1114] (3/4) Epoch 14, batch 600, loss[loss=0.1814, simple_loss=0.284, pruned_loss=0.03936, over 4631.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2746, pruned_loss=0.04784, over 892433.87 frames. ], batch size: 16, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:23:07,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=177986.66666666666, ans=0.025 +2024-07-28 16:23:13,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=178000.0, ans=10.0 +2024-07-28 16:23:14,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=178000.0, ans=0.125 +2024-07-28 16:23:18,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=178013.33333333334, ans=0.125 +2024-07-28 16:23:19,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=178013.33333333334, ans=0.1 +2024-07-28 16:23:53,565 INFO [train.py:1114] (3/4) Epoch 14, batch 650, loss[loss=0.1752, simple_loss=0.2695, pruned_loss=0.04043, over 4760.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.274, pruned_loss=0.04781, over 904271.74 frames. ], batch size: 13, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:23:58,390 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:24:21,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=178093.33333333334, ans=0.125 +2024-07-28 16:24:22,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=178093.33333333334, ans=0.125 +2024-07-28 16:24:22,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178093.33333333334, ans=0.1 +2024-07-28 16:24:24,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=178106.66666666666, ans=0.04949747468305833 +2024-07-28 16:24:27,862 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.659e+01 6.159e+01 7.127e+01 1.309e+02, threshold=1.232e+02, percent-clipped=1.0 +2024-07-28 16:24:28,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=178106.66666666666, ans=0.2 +2024-07-28 16:24:30,520 INFO [train.py:1114] (3/4) Epoch 14, batch 700, loss[loss=0.1511, simple_loss=0.2395, pruned_loss=0.03136, over 4646.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2745, pruned_loss=0.04811, over 912312.33 frames. ], batch size: 12, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:24:38,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=178133.33333333334, ans=0.125 +2024-07-28 16:24:42,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=178133.33333333334, ans=0.0 +2024-07-28 16:24:45,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=178146.66666666666, ans=0.125 +2024-07-28 16:24:52,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=178160.0, ans=0.0 +2024-07-28 16:26:34,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=178173.33333333334, ans=0.025 +2024-07-28 16:26:48,784 INFO [train.py:1114] (3/4) Epoch 14, batch 750, loss[loss=0.2024, simple_loss=0.2834, pruned_loss=0.06074, over 4686.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2742, pruned_loss=0.04841, over 918798.42 frames. ], batch size: 13, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:26:52,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=178186.66666666666, ans=0.0 +2024-07-28 16:26:55,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=178200.0, ans=0.0 +2024-07-28 16:27:01,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=178213.33333333334, ans=0.125 +2024-07-28 16:27:13,778 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.25 vs. limit=22.5 +2024-07-28 16:27:14,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=178226.66666666666, ans=0.125 +2024-07-28 16:27:14,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=178226.66666666666, ans=0.125 +2024-07-28 16:27:20,238 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.515e+01 6.007e+01 6.700e+01 1.144e+02, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 16:27:21,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=178240.0, ans=0.1 +2024-07-28 16:27:22,823 INFO [train.py:1114] (3/4) Epoch 14, batch 800, loss[loss=0.1647, simple_loss=0.2566, pruned_loss=0.0364, over 4855.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.275, pruned_loss=0.04893, over 923621.23 frames. ], batch size: 12, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:27:41,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=178280.0, ans=0.07 +2024-07-28 16:27:47,305 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.67 vs. limit=15.0 +2024-07-28 16:27:56,263 INFO [train.py:1114] (3/4) Epoch 14, batch 850, loss[loss=0.173, simple_loss=0.2779, pruned_loss=0.03404, over 4662.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2744, pruned_loss=0.04855, over 927631.71 frames. ], batch size: 14, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:27:56,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=178320.0, ans=0.125 +2024-07-28 16:27:59,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=178320.0, ans=0.125 +2024-07-28 16:28:06,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=178333.33333333334, ans=0.125 +2024-07-28 16:28:09,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=178346.66666666666, ans=0.0 +2024-07-28 16:28:14,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=178346.66666666666, ans=0.0 +2024-07-28 16:28:16,119 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=178360.0, ans=0.025 +2024-07-28 16:28:24,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=178373.33333333334, ans=0.2 +2024-07-28 16:28:27,737 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.266e+01 5.522e+01 6.221e+01 7.119e+01 8.769e+01, threshold=1.244e+02, percent-clipped=0.0 +2024-07-28 16:28:27,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=178373.33333333334, ans=0.125 +2024-07-28 16:28:29,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=178373.33333333334, ans=0.125 +2024-07-28 16:28:30,528 INFO [train.py:1114] (3/4) Epoch 14, batch 900, loss[loss=0.1539, simple_loss=0.2307, pruned_loss=0.03855, over 4853.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2743, pruned_loss=0.04858, over 928478.54 frames. ], batch size: 12, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:28:34,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=178386.66666666666, ans=0.025 +2024-07-28 16:28:43,843 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.38 vs. limit=10.0 +2024-07-28 16:28:44,386 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.32 vs. limit=15.0 +2024-07-28 16:28:58,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=178426.66666666666, ans=0.0 +2024-07-28 16:28:59,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=178440.0, ans=0.1 +2024-07-28 16:29:05,942 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.84 vs. limit=15.0 +2024-07-28 16:29:06,312 INFO [train.py:1114] (3/4) Epoch 14, batch 950, loss[loss=0.1867, simple_loss=0.2738, pruned_loss=0.04981, over 4780.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2749, pruned_loss=0.04851, over 930181.00 frames. ], batch size: 12, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:29:17,215 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:29:21,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=178480.0, ans=0.1 +2024-07-28 16:29:24,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=178480.0, ans=0.125 +2024-07-28 16:29:24,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=178480.0, ans=0.125 +2024-07-28 16:29:25,352 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=13.89 vs. limit=15.0 +2024-07-28 16:29:36,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=178506.66666666666, ans=0.025 +2024-07-28 16:29:37,156 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.450e+01 5.573e+01 6.236e+01 6.880e+01 1.050e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 16:29:39,835 INFO [train.py:1114] (3/4) Epoch 14, batch 1000, loss[loss=0.1699, simple_loss=0.253, pruned_loss=0.04347, over 4962.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2746, pruned_loss=0.0485, over 930211.48 frames. ], batch size: 13, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:29:41,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.prob, batch_count=178520.0, ans=0.125 +2024-07-28 16:29:42,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=178520.0, ans=0.0 +2024-07-28 16:29:46,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=178533.33333333334, ans=0.2 +2024-07-28 16:29:47,113 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.97 vs. limit=15.0 +2024-07-28 16:29:56,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=178546.66666666666, ans=0.125 +2024-07-28 16:29:58,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=178546.66666666666, ans=0.2 +2024-07-28 16:30:04,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=178560.0, ans=0.2 +2024-07-28 16:30:16,066 INFO [train.py:1114] (3/4) Epoch 14, batch 1050, loss[loss=0.1665, simple_loss=0.2589, pruned_loss=0.03705, over 4873.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2741, pruned_loss=0.04838, over 932450.39 frames. ], batch size: 14, lr: 5.43e-03, grad_scale: 32.0 +2024-07-28 16:30:28,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=178600.0, ans=0.2 +2024-07-28 16:30:32,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=178613.33333333334, ans=0.125 +2024-07-28 16:30:33,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=178613.33333333334, ans=0.2 +2024-07-28 16:30:38,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=178626.66666666666, ans=0.2 +2024-07-28 16:30:48,749 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.456e+01 5.778e+01 6.259e+01 7.627e+01 1.146e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-28 16:30:53,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=178653.33333333334, ans=0.2 +2024-07-28 16:30:53,944 INFO [train.py:1114] (3/4) Epoch 14, batch 1100, loss[loss=0.1537, simple_loss=0.2388, pruned_loss=0.03428, over 4899.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2741, pruned_loss=0.04859, over 934656.88 frames. ], batch size: 13, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:30:55,071 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.65 vs. limit=15.0 +2024-07-28 16:31:05,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=178666.66666666666, ans=0.125 +2024-07-28 16:31:12,615 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.91 vs. limit=15.0 +2024-07-28 16:31:14,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=178693.33333333334, ans=0.125 +2024-07-28 16:31:27,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178706.66666666666, ans=0.1 +2024-07-28 16:31:31,675 INFO [train.py:1114] (3/4) Epoch 14, batch 1150, loss[loss=0.1679, simple_loss=0.258, pruned_loss=0.0389, over 4903.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2745, pruned_loss=0.04872, over 934554.73 frames. ], batch size: 13, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:31:33,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=178720.0, ans=0.2 +2024-07-28 16:31:35,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=178720.0, ans=0.0 +2024-07-28 16:31:38,358 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.15 vs. limit=15.0 +2024-07-28 16:31:39,788 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-28 16:31:40,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=178733.33333333334, ans=0.1 +2024-07-28 16:31:42,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.61 vs. limit=15.0 +2024-07-28 16:31:42,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=178733.33333333334, ans=0.1 +2024-07-28 16:31:43,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=178733.33333333334, ans=0.2 +2024-07-28 16:31:47,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178746.66666666666, ans=0.1 +2024-07-28 16:31:47,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=178746.66666666666, ans=15.0 +2024-07-28 16:31:52,604 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.52 vs. limit=22.5 +2024-07-28 16:31:55,077 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=178760.0, ans=0.05 +2024-07-28 16:31:55,293 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.72 vs. limit=6.0 +2024-07-28 16:32:02,869 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+01 5.551e+01 6.026e+01 6.659e+01 1.121e+02, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 16:32:05,595 INFO [train.py:1114] (3/4) Epoch 14, batch 1200, loss[loss=0.1883, simple_loss=0.275, pruned_loss=0.05081, over 4874.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.274, pruned_loss=0.04829, over 933994.88 frames. ], batch size: 14, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:32:05,954 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=32.58 vs. limit=22.5 +2024-07-28 16:32:20,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=178813.33333333334, ans=0.5 +2024-07-28 16:32:24,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=178826.66666666666, ans=0.125 +2024-07-28 16:32:25,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=178826.66666666666, ans=0.125 +2024-07-28 16:32:38,592 INFO [train.py:1114] (3/4) Epoch 14, batch 1250, loss[loss=0.2062, simple_loss=0.2905, pruned_loss=0.0609, over 4809.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2753, pruned_loss=0.04857, over 937810.75 frames. ], batch size: 15, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:32:42,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=178853.33333333334, ans=0.2 +2024-07-28 16:32:49,117 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:33:01,374 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.94 vs. limit=15.0 +2024-07-28 16:33:01,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=178893.33333333334, ans=0.025 +2024-07-28 16:33:03,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178893.33333333334, ans=0.1 +2024-07-28 16:33:05,366 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.23 vs. limit=10.0 +2024-07-28 16:33:06,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=178906.66666666666, ans=0.0 +2024-07-28 16:33:08,072 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.64 vs. limit=10.0 +2024-07-28 16:33:09,548 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.792e+01 5.596e+01 6.109e+01 6.927e+01 8.665e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 16:33:11,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=178920.0, ans=0.07 +2024-07-28 16:33:12,232 INFO [train.py:1114] (3/4) Epoch 14, batch 1300, loss[loss=0.2088, simple_loss=0.2956, pruned_loss=0.06104, over 4744.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2741, pruned_loss=0.04795, over 938981.78 frames. ], batch size: 19, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:33:14,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=178920.0, ans=0.125 +2024-07-28 16:33:17,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=178920.0, ans=0.125 +2024-07-28 16:33:17,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=178920.0, ans=0.125 +2024-07-28 16:33:24,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178933.33333333334, ans=0.1 +2024-07-28 16:33:24,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=178946.66666666666, ans=0.1 +2024-07-28 16:33:30,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=178946.66666666666, ans=0.0 +2024-07-28 16:33:36,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=178960.0, ans=15.0 +2024-07-28 16:33:45,454 INFO [train.py:1114] (3/4) Epoch 14, batch 1350, loss[loss=0.188, simple_loss=0.2802, pruned_loss=0.04792, over 4758.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2738, pruned_loss=0.04781, over 940892.55 frames. ], batch size: 13, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:33:46,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=178986.66666666666, ans=0.125 +2024-07-28 16:33:57,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=179000.0, ans=0.125 +2024-07-28 16:34:03,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=179013.33333333334, ans=0.0 +2024-07-28 16:34:07,278 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.77 vs. limit=15.0 +2024-07-28 16:34:15,992 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.59 vs. limit=15.0 +2024-07-28 16:34:18,363 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.737e+01 5.767e+01 6.518e+01 7.803e+01 1.206e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 16:34:18,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=179040.0, ans=0.0 +2024-07-28 16:34:21,088 INFO [train.py:1114] (3/4) Epoch 14, batch 1400, loss[loss=0.1633, simple_loss=0.2478, pruned_loss=0.03938, over 4695.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2748, pruned_loss=0.04819, over 942761.34 frames. ], batch size: 11, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:34:37,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=179080.0, ans=0.125 +2024-07-28 16:34:42,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=179093.33333333334, ans=0.125 +2024-07-28 16:34:47,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=179106.66666666666, ans=0.125 +2024-07-28 16:34:54,875 INFO [train.py:1114] (3/4) Epoch 14, batch 1450, loss[loss=0.2004, simple_loss=0.2913, pruned_loss=0.05469, over 4681.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2743, pruned_loss=0.04817, over 942615.19 frames. ], batch size: 15, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:34:56,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=179120.0, ans=0.125 +2024-07-28 16:35:02,640 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.89 vs. limit=22.5 +2024-07-28 16:35:07,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=179133.33333333334, ans=0.2 +2024-07-28 16:35:11,709 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:35:16,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=179160.0, ans=0.0 +2024-07-28 16:35:25,056 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.14 vs. limit=6.0 +2024-07-28 16:35:25,328 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.281e+01 5.717e+01 6.158e+01 6.700e+01 8.649e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 16:35:28,157 INFO [train.py:1114] (3/4) Epoch 14, batch 1500, loss[loss=0.1881, simple_loss=0.2854, pruned_loss=0.04537, over 4808.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2756, pruned_loss=0.04853, over 942109.66 frames. ], batch size: 14, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:35:28,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=179186.66666666666, ans=0.125 +2024-07-28 16:35:30,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=179186.66666666666, ans=0.125 +2024-07-28 16:35:31,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=179186.66666666666, ans=0.125 +2024-07-28 16:35:31,373 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.43 vs. limit=15.0 +2024-07-28 16:35:31,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=179186.66666666666, ans=0.125 +2024-07-28 16:35:36,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=179200.0, ans=0.125 +2024-07-28 16:35:47,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=179213.33333333334, ans=0.125 +2024-07-28 16:35:48,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=179226.66666666666, ans=0.1 +2024-07-28 16:35:52,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=179226.66666666666, ans=0.07 +2024-07-28 16:35:54,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=179226.66666666666, ans=0.0 +2024-07-28 16:36:02,056 INFO [train.py:1114] (3/4) Epoch 14, batch 1550, loss[loss=0.2183, simple_loss=0.316, pruned_loss=0.06029, over 4909.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.276, pruned_loss=0.04878, over 938538.34 frames. ], batch size: 15, lr: 5.42e-03, grad_scale: 32.0 +2024-07-28 16:36:04,501 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.37 vs. limit=15.0 +2024-07-28 16:36:12,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179266.66666666666, ans=0.1 +2024-07-28 16:36:16,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=179280.0, ans=0.125 +2024-07-28 16:36:23,110 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.58 vs. limit=6.0 +2024-07-28 16:36:25,854 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.62 vs. limit=15.0 +2024-07-28 16:36:26,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=179293.33333333334, ans=0.125 +2024-07-28 16:36:31,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=179306.66666666666, ans=0.125 +2024-07-28 16:36:34,738 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.776e+01 5.675e+01 6.307e+01 6.776e+01 1.138e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 16:36:39,296 INFO [train.py:1114] (3/4) Epoch 14, batch 1600, loss[loss=0.1881, simple_loss=0.2771, pruned_loss=0.04954, over 4869.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2752, pruned_loss=0.04844, over 937098.92 frames. ], batch size: 14, lr: 5.41e-03, grad_scale: 32.0 +2024-07-28 16:36:49,114 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.58 vs. limit=15.0 +2024-07-28 16:36:52,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=179333.33333333334, ans=0.125 +2024-07-28 16:36:55,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=179346.66666666666, ans=0.035 +2024-07-28 16:37:03,169 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.32 vs. limit=15.0 +2024-07-28 16:37:14,612 INFO [train.py:1114] (3/4) Epoch 14, batch 1650, loss[loss=0.1876, simple_loss=0.2866, pruned_loss=0.04433, over 4657.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2749, pruned_loss=0.04862, over 937817.65 frames. ], batch size: 14, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:37:15,826 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.01 vs. limit=10.0 +2024-07-28 16:37:22,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=179400.0, ans=0.1 +2024-07-28 16:37:22,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=179400.0, ans=0.125 +2024-07-28 16:37:30,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=179413.33333333334, ans=0.0 +2024-07-28 16:37:32,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=179413.33333333334, ans=0.125 +2024-07-28 16:37:45,304 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.393e+01 5.723e+01 6.070e+01 6.636e+01 1.142e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 16:37:47,999 INFO [train.py:1114] (3/4) Epoch 14, batch 1700, loss[loss=0.1497, simple_loss=0.2337, pruned_loss=0.03285, over 4716.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2739, pruned_loss=0.04778, over 939476.19 frames. ], batch size: 11, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:37:54,266 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.89 vs. limit=15.0 +2024-07-28 16:37:54,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=179466.66666666666, ans=0.125 +2024-07-28 16:37:59,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=179466.66666666666, ans=0.125 +2024-07-28 16:38:15,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=179506.66666666666, ans=0.125 +2024-07-28 16:38:16,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=179506.66666666666, ans=0.125 +2024-07-28 16:38:21,302 INFO [train.py:1114] (3/4) Epoch 14, batch 1750, loss[loss=0.1822, simple_loss=0.2524, pruned_loss=0.05595, over 4811.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2726, pruned_loss=0.04733, over 940236.71 frames. ], batch size: 11, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:38:23,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=179520.0, ans=0.0 +2024-07-28 16:38:42,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=179560.0, ans=0.0 +2024-07-28 16:38:44,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=179560.0, ans=0.125 +2024-07-28 16:38:47,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=179560.0, ans=0.125 +2024-07-28 16:38:52,729 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.429e+01 5.501e+01 6.182e+01 7.069e+01 1.179e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 16:38:55,450 INFO [train.py:1114] (3/4) Epoch 14, batch 1800, loss[loss=0.1938, simple_loss=0.2845, pruned_loss=0.05153, over 4641.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2732, pruned_loss=0.04744, over 940722.70 frames. ], batch size: 13, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:38:57,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.35 vs. limit=6.0 +2024-07-28 16:39:01,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=179586.66666666666, ans=0.0 +2024-07-28 16:39:04,565 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.33 vs. limit=6.0 +2024-07-28 16:39:10,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=179613.33333333334, ans=0.04949747468305833 +2024-07-28 16:39:16,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=179626.66666666666, ans=0.125 +2024-07-28 16:39:24,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=179640.0, ans=0.0 +2024-07-28 16:39:24,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=179640.0, ans=0.0 +2024-07-28 16:39:29,078 INFO [train.py:1114] (3/4) Epoch 14, batch 1850, loss[loss=0.1747, simple_loss=0.2777, pruned_loss=0.03581, over 4809.00 frames. ], tot_loss[loss=0.184, simple_loss=0.273, pruned_loss=0.04754, over 940787.77 frames. ], batch size: 14, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:39:29,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=179653.33333333334, ans=0.0 +2024-07-28 16:39:30,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=179653.33333333334, ans=0.125 +2024-07-28 16:39:38,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=179666.66666666666, ans=0.125 +2024-07-28 16:39:43,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=179680.0, ans=0.0 +2024-07-28 16:39:52,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=179693.33333333334, ans=0.2 +2024-07-28 16:39:52,380 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.17 vs. limit=15.0 +2024-07-28 16:39:56,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.22 vs. limit=15.0 +2024-07-28 16:40:01,781 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.632e+01 6.282e+01 7.683e+01 1.282e+02, threshold=1.256e+02, percent-clipped=1.0 +2024-07-28 16:42:23,049 INFO [train.py:1114] (3/4) Epoch 14, batch 1900, loss[loss=0.1658, simple_loss=0.2662, pruned_loss=0.03264, over 4663.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2733, pruned_loss=0.04718, over 941810.57 frames. ], batch size: 14, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:42:43,542 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.21 vs. limit=22.5 +2024-07-28 16:42:44,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=179733.33333333334, ans=0.125 +2024-07-28 16:42:55,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179733.33333333334, ans=0.1 +2024-07-28 16:42:58,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=179733.33333333334, ans=0.0 +2024-07-28 16:43:20,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=179773.33333333334, ans=0.2 +2024-07-28 16:43:22,171 INFO [train.py:1114] (3/4) Epoch 14, batch 1950, loss[loss=0.1634, simple_loss=0.2497, pruned_loss=0.03858, over 4885.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2747, pruned_loss=0.04794, over 943711.63 frames. ], batch size: 13, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:43:33,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=179800.0, ans=0.025 +2024-07-28 16:43:33,929 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.79 vs. limit=15.0 +2024-07-28 16:43:44,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=179826.66666666666, ans=0.07 +2024-07-28 16:43:48,016 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.32 vs. limit=10.0 +2024-07-28 16:43:51,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=179840.0, ans=0.125 +2024-07-28 16:43:52,884 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.413e+01 5.659e+01 6.185e+01 6.876e+01 9.171e+01, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 16:43:55,615 INFO [train.py:1114] (3/4) Epoch 14, batch 2000, loss[loss=0.1657, simple_loss=0.2472, pruned_loss=0.04204, over 4786.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2747, pruned_loss=0.04804, over 940588.14 frames. ], batch size: 11, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:44:05,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=179866.66666666666, ans=0.0 +2024-07-28 16:44:15,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=179893.33333333334, ans=0.125 +2024-07-28 16:44:32,573 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.34 vs. limit=15.0 +2024-07-28 16:44:36,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=179906.66666666666, ans=0.0 +2024-07-28 16:44:39,842 INFO [train.py:1114] (3/4) Epoch 14, batch 2050, loss[loss=0.1608, simple_loss=0.2386, pruned_loss=0.04155, over 4622.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2731, pruned_loss=0.0474, over 938258.36 frames. ], batch size: 11, lr: 5.41e-03, grad_scale: 64.0 +2024-07-28 16:44:41,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=179920.0, ans=0.1 +2024-07-28 16:44:48,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=179933.33333333334, ans=0.125 +2024-07-28 16:44:57,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=179946.66666666666, ans=0.1 +2024-07-28 16:45:09,726 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.70 vs. limit=15.0 +2024-07-28 16:45:14,162 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.388e+01 5.698e+01 6.586e+01 7.906e+01 1.162e+02, threshold=1.317e+02, percent-clipped=0.0 +2024-07-28 16:45:16,945 INFO [train.py:1114] (3/4) Epoch 14, batch 2100, loss[loss=0.1455, simple_loss=0.2455, pruned_loss=0.02277, over 4760.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2724, pruned_loss=0.04702, over 940283.95 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:45:24,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=180000.0, ans=0.025 +2024-07-28 16:45:43,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=180040.0, ans=0.125 +2024-07-28 16:45:49,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=180040.0, ans=0.0 +2024-07-28 16:45:50,454 INFO [train.py:1114] (3/4) Epoch 14, batch 2150, loss[loss=0.1672, simple_loss=0.2548, pruned_loss=0.03975, over 4898.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2717, pruned_loss=0.04703, over 943666.14 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:45:54,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=180053.33333333334, ans=0.125 +2024-07-28 16:45:57,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=180066.66666666666, ans=0.125 +2024-07-28 16:46:15,611 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.92 vs. limit=15.0 +2024-07-28 16:46:18,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=180106.66666666666, ans=0.125 +2024-07-28 16:46:22,333 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.354e+01 5.458e+01 6.111e+01 6.832e+01 1.017e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 16:46:36,560 INFO [train.py:1114] (3/4) Epoch 14, batch 2200, loss[loss=0.2064, simple_loss=0.2957, pruned_loss=0.05848, over 4810.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2718, pruned_loss=0.04679, over 943040.57 frames. ], batch size: 14, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:47:04,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=180160.0, ans=0.0 +2024-07-28 16:47:05,418 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.73 vs. limit=15.0 +2024-07-28 16:47:06,746 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.04 vs. limit=12.0 +2024-07-28 16:47:17,910 INFO [train.py:1114] (3/4) Epoch 14, batch 2250, loss[loss=0.1877, simple_loss=0.2897, pruned_loss=0.04286, over 4699.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2713, pruned_loss=0.04666, over 941369.79 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:47:22,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180186.66666666666, ans=0.1 +2024-07-28 16:47:31,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=180200.0, ans=0.2 +2024-07-28 16:47:39,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=180213.33333333334, ans=0.125 +2024-07-28 16:47:43,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=180226.66666666666, ans=0.0 +2024-07-28 16:48:08,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=180240.0, ans=0.0 +2024-07-28 16:48:09,491 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.321e+01 5.481e+01 6.034e+01 6.798e+01 1.360e+02, threshold=1.207e+02, percent-clipped=1.0 +2024-07-28 16:48:20,572 INFO [train.py:1114] (3/4) Epoch 14, batch 2300, loss[loss=0.1813, simple_loss=0.2703, pruned_loss=0.04614, over 4942.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2716, pruned_loss=0.04714, over 938925.19 frames. ], batch size: 12, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:49:28,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=180266.66666666666, ans=0.125 +2024-07-28 16:49:35,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=180280.0, ans=0.1 +2024-07-28 16:49:41,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=180293.33333333334, ans=0.125 +2024-07-28 16:49:49,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=180306.66666666666, ans=0.025 +2024-07-28 16:49:53,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=180306.66666666666, ans=0.0 +2024-07-28 16:49:54,918 INFO [train.py:1114] (3/4) Epoch 14, batch 2350, loss[loss=0.1837, simple_loss=0.2814, pruned_loss=0.04299, over 4628.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2708, pruned_loss=0.04647, over 940930.51 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:50:07,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=180333.33333333334, ans=0.125 +2024-07-28 16:50:23,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=180373.33333333334, ans=0.125 +2024-07-28 16:50:26,411 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.883e+01 5.752e+01 6.311e+01 7.505e+01 9.885e+01, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 16:50:29,053 INFO [train.py:1114] (3/4) Epoch 14, batch 2400, loss[loss=0.1542, simple_loss=0.2383, pruned_loss=0.03503, over 4645.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2712, pruned_loss=0.04661, over 940730.71 frames. ], batch size: 12, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:51:03,420 INFO [train.py:1114] (3/4) Epoch 14, batch 2450, loss[loss=0.1636, simple_loss=0.2543, pruned_loss=0.03651, over 4698.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2722, pruned_loss=0.04692, over 936555.95 frames. ], batch size: 13, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:51:12,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=180466.66666666666, ans=0.1 +2024-07-28 16:51:18,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=180480.0, ans=0.1 +2024-07-28 16:51:26,530 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.70 vs. limit=15.0 +2024-07-28 16:51:27,209 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.69 vs. limit=15.0 +2024-07-28 16:51:34,058 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.381e+01 5.494e+01 6.004e+01 6.734e+01 1.227e+02, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 16:51:36,694 INFO [train.py:1114] (3/4) Epoch 14, batch 2500, loss[loss=0.1861, simple_loss=0.2812, pruned_loss=0.04545, over 4813.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2727, pruned_loss=0.04679, over 938782.70 frames. ], batch size: 14, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:51:54,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=180546.66666666666, ans=0.125 +2024-07-28 16:52:00,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=180560.0, ans=0.0 +2024-07-28 16:52:16,697 INFO [train.py:1114] (3/4) Epoch 14, batch 2550, loss[loss=0.1521, simple_loss=0.2351, pruned_loss=0.03459, over 4828.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2723, pruned_loss=0.04681, over 938346.08 frames. ], batch size: 11, lr: 5.40e-03, grad_scale: 64.0 +2024-07-28 16:52:17,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=180586.66666666666, ans=0.04949747468305833 +2024-07-28 16:52:19,612 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.18 vs. limit=12.0 +2024-07-28 16:52:24,816 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.84 vs. limit=15.0 +2024-07-28 16:52:27,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=180600.0, ans=0.2 +2024-07-28 16:52:27,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=180600.0, ans=0.0 +2024-07-28 16:52:28,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=180600.0, ans=0.1 +2024-07-28 16:52:31,233 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.91 vs. limit=15.0 +2024-07-28 16:52:34,623 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.60 vs. limit=10.0 +2024-07-28 16:52:39,056 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.56 vs. limit=15.0 +2024-07-28 16:52:46,739 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=180640.0, ans=0.125 +2024-07-28 16:52:48,646 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.421e+01 5.545e+01 6.227e+01 6.776e+01 1.046e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 16:52:49,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=180640.0, ans=0.0 +2024-07-28 16:52:51,357 INFO [train.py:1114] (3/4) Epoch 14, batch 2600, loss[loss=0.1771, simple_loss=0.2568, pruned_loss=0.04872, over 4893.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2732, pruned_loss=0.04727, over 937392.57 frames. ], batch size: 13, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:52:59,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=180666.66666666666, ans=0.125 +2024-07-28 16:53:02,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180666.66666666666, ans=0.1 +2024-07-28 16:53:05,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=180680.0, ans=0.125 +2024-07-28 16:53:10,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=180680.0, ans=0.025 +2024-07-28 16:53:14,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=180693.33333333334, ans=0.125 +2024-07-28 16:53:14,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=180693.33333333334, ans=0.125 +2024-07-28 16:53:21,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=180706.66666666666, ans=0.125 +2024-07-28 16:53:25,231 INFO [train.py:1114] (3/4) Epoch 14, batch 2650, loss[loss=0.1832, simple_loss=0.2825, pruned_loss=0.04199, over 4653.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.274, pruned_loss=0.04762, over 939549.70 frames. ], batch size: 16, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:53:27,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=180720.0, ans=0.0 +2024-07-28 16:53:33,142 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.66 vs. limit=12.0 +2024-07-28 16:53:41,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=180746.66666666666, ans=0.125 +2024-07-28 16:53:41,645 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.89 vs. limit=15.0 +2024-07-28 16:53:42,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=180746.66666666666, ans=0.125 +2024-07-28 16:53:48,425 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.71 vs. limit=15.0 +2024-07-28 16:53:52,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=180773.33333333334, ans=0.0 +2024-07-28 16:53:53,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=180773.33333333334, ans=0.125 +2024-07-28 16:53:56,284 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.582e+01 5.983e+01 6.716e+01 1.150e+02, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 16:53:57,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=180773.33333333334, ans=0.0 +2024-07-28 16:53:58,983 INFO [train.py:1114] (3/4) Epoch 14, batch 2700, loss[loss=0.1887, simple_loss=0.2751, pruned_loss=0.05112, over 4749.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2743, pruned_loss=0.04797, over 939854.66 frames. ], batch size: 14, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:54:00,855 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.02 vs. limit=22.5 +2024-07-28 16:54:21,765 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.15 vs. limit=15.0 +2024-07-28 16:54:27,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=180840.0, ans=0.0 +2024-07-28 16:54:32,657 INFO [train.py:1114] (3/4) Epoch 14, batch 2750, loss[loss=0.2003, simple_loss=0.2871, pruned_loss=0.05674, over 4689.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2734, pruned_loss=0.04767, over 939689.64 frames. ], batch size: 12, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:54:40,711 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 16:54:40,900 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.08 vs. limit=15.0 +2024-07-28 16:54:43,536 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.02 vs. limit=12.0 +2024-07-28 16:54:48,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=180880.0, ans=0.5 +2024-07-28 16:55:03,429 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.266e+01 5.739e+01 6.573e+01 7.646e+01 1.098e+02, threshold=1.315e+02, percent-clipped=0.0 +2024-07-28 16:55:04,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=180906.66666666666, ans=0.2 +2024-07-28 16:55:06,154 INFO [train.py:1114] (3/4) Epoch 14, batch 2800, loss[loss=0.2391, simple_loss=0.3132, pruned_loss=0.0825, over 3307.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2734, pruned_loss=0.04798, over 937151.98 frames. ], batch size: 38, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:55:19,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=180933.33333333334, ans=0.0 +2024-07-28 16:55:24,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=180946.66666666666, ans=0.2 +2024-07-28 16:55:25,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=180946.66666666666, ans=0.1 +2024-07-28 16:55:34,528 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=22.49 vs. limit=22.5 +2024-07-28 16:55:39,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=180973.33333333334, ans=0.125 +2024-07-28 16:55:42,083 INFO [train.py:1114] (3/4) Epoch 14, batch 2850, loss[loss=0.224, simple_loss=0.3003, pruned_loss=0.07382, over 4955.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.274, pruned_loss=0.04847, over 935771.77 frames. ], batch size: 13, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:55:42,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=180986.66666666666, ans=0.125 +2024-07-28 16:55:43,064 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.80 vs. limit=15.0 +2024-07-28 16:55:46,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=180986.66666666666, ans=0.125 +2024-07-28 16:56:15,626 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.493e+01 5.822e+01 6.351e+01 7.357e+01 1.031e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 16:56:15,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=181040.0, ans=0.0 +2024-07-28 16:56:15,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=181040.0, ans=0.0 +2024-07-28 16:56:18,128 INFO [train.py:1114] (3/4) Epoch 14, batch 2900, loss[loss=0.1605, simple_loss=0.2569, pruned_loss=0.032, over 4829.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2744, pruned_loss=0.04843, over 939903.59 frames. ], batch size: 13, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:56:21,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=181053.33333333334, ans=0.0 +2024-07-28 16:56:40,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=181080.0, ans=0.1 +2024-07-28 16:56:49,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=181093.33333333334, ans=0.1 +2024-07-28 16:57:00,020 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.03 vs. limit=15.0 +2024-07-28 16:57:04,977 INFO [train.py:1114] (3/4) Epoch 14, batch 2950, loss[loss=0.1752, simple_loss=0.2633, pruned_loss=0.04356, over 4696.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2727, pruned_loss=0.04777, over 938856.13 frames. ], batch size: 12, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:57:07,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=181120.0, ans=0.025 +2024-07-28 16:57:22,105 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.70 vs. limit=22.5 +2024-07-28 16:57:39,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=181160.0, ans=0.125 +2024-07-28 16:57:42,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=181173.33333333334, ans=0.0 +2024-07-28 16:57:53,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=181173.33333333334, ans=0.125 +2024-07-28 16:57:55,636 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.382e+01 5.574e+01 6.305e+01 7.129e+01 1.096e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 16:57:58,337 INFO [train.py:1114] (3/4) Epoch 14, batch 3000, loss[loss=0.1679, simple_loss=0.2668, pruned_loss=0.03455, over 4752.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.272, pruned_loss=0.04731, over 938482.00 frames. ], batch size: 13, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:57:58,338 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 16:58:17,169 INFO [train.py:1146] (3/4) Epoch 14, validation: loss=0.1652, simple_loss=0.2685, pruned_loss=0.03098, over 944034.00 frames. +2024-07-28 16:58:17,169 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 16:58:27,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=181200.0, ans=0.125 +2024-07-28 16:58:40,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=181226.66666666666, ans=0.125 +2024-07-28 16:58:43,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=181226.66666666666, ans=0.125 +2024-07-28 16:58:47,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=181240.0, ans=0.0 +2024-07-28 16:59:00,479 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=13.08 vs. limit=15.0 +2024-07-28 16:59:04,087 INFO [train.py:1114] (3/4) Epoch 14, batch 3050, loss[loss=0.1575, simple_loss=0.2483, pruned_loss=0.03336, over 4643.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2728, pruned_loss=0.04739, over 936832.99 frames. ], batch size: 12, lr: 5.39e-03, grad_scale: 64.0 +2024-07-28 16:59:11,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=181266.66666666666, ans=0.125 +2024-07-28 16:59:14,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=181266.66666666666, ans=0.0 +2024-07-28 16:59:35,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=181293.33333333334, ans=0.125 +2024-07-28 16:59:41,676 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.769e+01 5.765e+01 6.488e+01 7.325e+01 1.172e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-28 16:59:48,316 INFO [train.py:1114] (3/4) Epoch 14, batch 3100, loss[loss=0.2245, simple_loss=0.3146, pruned_loss=0.06716, over 4610.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2729, pruned_loss=0.04786, over 937651.01 frames. ], batch size: 16, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 16:59:52,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=181320.0, ans=0.125 +2024-07-28 17:00:08,642 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.94 vs. limit=6.0 +2024-07-28 17:00:49,895 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.61 vs. limit=22.5 +2024-07-28 17:00:59,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=181360.0, ans=0.125 +2024-07-28 17:01:00,607 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:01:10,555 INFO [train.py:1114] (3/4) Epoch 14, batch 3150, loss[loss=0.1895, simple_loss=0.2824, pruned_loss=0.04827, over 4657.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2733, pruned_loss=0.04787, over 938272.42 frames. ], batch size: 17, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:01:11,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=181386.66666666666, ans=0.0 +2024-07-28 17:01:14,155 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.59 vs. limit=22.5 +2024-07-28 17:01:25,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=181413.33333333334, ans=0.0 +2024-07-28 17:01:38,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=181426.66666666666, ans=0.05 +2024-07-28 17:01:39,983 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.51 vs. limit=12.0 +2024-07-28 17:01:43,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=181426.66666666666, ans=0.125 +2024-07-28 17:01:50,901 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.644e+01 5.747e+01 6.201e+01 6.953e+01 1.061e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 17:01:53,632 INFO [train.py:1114] (3/4) Epoch 14, batch 3200, loss[loss=0.1642, simple_loss=0.2485, pruned_loss=0.03995, over 4824.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2732, pruned_loss=0.04773, over 940002.73 frames. ], batch size: 13, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:02:00,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=181466.66666666666, ans=0.125 +2024-07-28 17:02:02,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=181466.66666666666, ans=0.125 +2024-07-28 17:02:08,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=181480.0, ans=0.125 +2024-07-28 17:02:16,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=181493.33333333334, ans=0.125 +2024-07-28 17:02:17,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=181493.33333333334, ans=0.125 +2024-07-28 17:02:28,098 INFO [train.py:1114] (3/4) Epoch 14, batch 3250, loss[loss=0.2121, simple_loss=0.2905, pruned_loss=0.06683, over 4932.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2726, pruned_loss=0.04718, over 940788.48 frames. ], batch size: 14, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:02:34,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=181520.0, ans=0.0 +2024-07-28 17:02:43,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=181533.33333333334, ans=0.2 +2024-07-28 17:02:51,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=181546.66666666666, ans=0.125 +2024-07-28 17:02:58,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=181560.0, ans=0.125 +2024-07-28 17:03:17,417 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.388e+01 5.561e+01 6.069e+01 6.754e+01 1.054e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 17:04:11,101 INFO [train.py:1114] (3/4) Epoch 14, batch 3300, loss[loss=0.1922, simple_loss=0.2799, pruned_loss=0.0522, over 4729.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2721, pruned_loss=0.04724, over 940627.11 frames. ], batch size: 19, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:04:14,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=181586.66666666666, ans=0.07 +2024-07-28 17:04:17,115 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.34 vs. limit=15.0 +2024-07-28 17:04:28,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=181600.0, ans=0.0 +2024-07-28 17:04:35,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=181613.33333333334, ans=0.125 +2024-07-28 17:04:36,919 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.36 vs. limit=10.0 +2024-07-28 17:04:41,252 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.88 vs. limit=15.0 +2024-07-28 17:04:44,188 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=181626.66666666666, ans=0.125 +2024-07-28 17:04:54,333 INFO [train.py:1114] (3/4) Epoch 14, batch 3350, loss[loss=0.228, simple_loss=0.3157, pruned_loss=0.0702, over 4650.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.273, pruned_loss=0.04803, over 938927.44 frames. ], batch size: 17, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:04:59,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=181653.33333333334, ans=0.0 +2024-07-28 17:05:03,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=181666.66666666666, ans=0.1 +2024-07-28 17:05:04,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=181666.66666666666, ans=0.125 +2024-07-28 17:05:10,949 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.26 vs. limit=22.5 +2024-07-28 17:05:23,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=181706.66666666666, ans=0.1 +2024-07-28 17:05:25,877 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.710e+01 5.702e+01 6.286e+01 7.207e+01 1.084e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 17:05:29,883 INFO [train.py:1114] (3/4) Epoch 14, batch 3400, loss[loss=0.1485, simple_loss=0.2261, pruned_loss=0.03549, over 4818.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2739, pruned_loss=0.0485, over 937900.41 frames. ], batch size: 11, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:05:31,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=181720.0, ans=0.125 +2024-07-28 17:05:32,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=181720.0, ans=0.2 +2024-07-28 17:05:59,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=181773.33333333334, ans=0.125 +2024-07-28 17:06:02,059 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.22 vs. limit=6.0 +2024-07-28 17:06:06,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=181786.66666666666, ans=0.0 +2024-07-28 17:06:07,188 INFO [train.py:1114] (3/4) Epoch 14, batch 3450, loss[loss=0.1888, simple_loss=0.2925, pruned_loss=0.04256, over 4677.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2748, pruned_loss=0.04848, over 937907.37 frames. ], batch size: 19, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:06:10,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=181786.66666666666, ans=0.05 +2024-07-28 17:06:41,510 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.360e+01 5.614e+01 6.099e+01 6.810e+01 1.220e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 17:06:44,269 INFO [train.py:1114] (3/4) Epoch 14, batch 3500, loss[loss=0.1572, simple_loss=0.2439, pruned_loss=0.03529, over 4931.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.274, pruned_loss=0.04793, over 938493.21 frames. ], batch size: 12, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:06:58,231 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.08 vs. limit=15.0 +2024-07-28 17:07:03,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=181880.0, ans=0.125 +2024-07-28 17:07:04,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=181893.33333333334, ans=0.07 +2024-07-28 17:07:09,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=181893.33333333334, ans=0.2 +2024-07-28 17:07:10,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=181893.33333333334, ans=0.125 +2024-07-28 17:07:10,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=181906.66666666666, ans=0.125 +2024-07-28 17:07:17,705 INFO [train.py:1114] (3/4) Epoch 14, batch 3550, loss[loss=0.2054, simple_loss=0.2894, pruned_loss=0.06075, over 4667.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2743, pruned_loss=0.04821, over 938816.69 frames. ], batch size: 14, lr: 5.38e-03, grad_scale: 64.0 +2024-07-28 17:07:28,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=181933.33333333334, ans=0.125 +2024-07-28 17:07:31,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=181946.66666666666, ans=0.1 +2024-07-28 17:07:32,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=181946.66666666666, ans=0.0 +2024-07-28 17:07:33,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=181946.66666666666, ans=0.2 +2024-07-28 17:07:34,783 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:07:35,748 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.57 vs. limit=15.0 +2024-07-28 17:07:38,989 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.15 vs. limit=6.0 +2024-07-28 17:07:47,924 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 5.433e+01 6.095e+01 6.753e+01 1.044e+02, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 17:07:50,082 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:07:50,516 INFO [train.py:1114] (3/4) Epoch 14, batch 3600, loss[loss=0.1912, simple_loss=0.2654, pruned_loss=0.05856, over 4957.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2737, pruned_loss=0.048, over 940397.26 frames. ], batch size: 13, lr: 5.37e-03, grad_scale: 64.0 +2024-07-28 17:07:53,337 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:08:00,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=182000.0, ans=0.125 +2024-07-28 17:08:02,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=182000.0, ans=0.025 +2024-07-28 17:08:05,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=182013.33333333334, ans=0.125 +2024-07-28 17:08:14,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182026.66666666666, ans=0.1 +2024-07-28 17:08:20,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=182040.0, ans=0.2 +2024-07-28 17:08:22,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=182040.0, ans=0.0 +2024-07-28 17:08:26,141 INFO [train.py:1114] (3/4) Epoch 14, batch 3650, loss[loss=0.2371, simple_loss=0.3233, pruned_loss=0.07541, over 4896.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2726, pruned_loss=0.04787, over 940660.38 frames. ], batch size: 15, lr: 5.37e-03, grad_scale: 64.0 +2024-07-28 17:08:57,484 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.626e+01 5.717e+01 6.299e+01 7.471e+01 1.089e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 17:08:59,532 INFO [train.py:1114] (3/4) Epoch 14, batch 3700, loss[loss=0.2097, simple_loss=0.3104, pruned_loss=0.05446, over 4934.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2727, pruned_loss=0.04771, over 941851.88 frames. ], batch size: 14, lr: 5.37e-03, grad_scale: 64.0 +2024-07-28 17:09:16,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=182120.0, ans=0.125 +2024-07-28 17:09:19,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=182133.33333333334, ans=0.0 +2024-07-28 17:09:26,178 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.33 vs. limit=10.0 +2024-07-28 17:09:28,034 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.35 vs. limit=6.0 +2024-07-28 17:09:39,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=182160.0, ans=0.125 +2024-07-28 17:09:45,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=182173.33333333334, ans=0.2 +2024-07-28 17:09:47,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=182173.33333333334, ans=0.1 +2024-07-28 17:09:48,834 INFO [train.py:1114] (3/4) Epoch 14, batch 3750, loss[loss=0.148, simple_loss=0.2329, pruned_loss=0.03152, over 4818.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2713, pruned_loss=0.04726, over 943459.36 frames. ], batch size: 11, lr: 5.37e-03, grad_scale: 64.0 +2024-07-28 17:09:57,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=182200.0, ans=0.125 +2024-07-28 17:09:59,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=182200.0, ans=10.0 +2024-07-28 17:10:20,836 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.520e+01 5.513e+01 6.095e+01 6.820e+01 9.830e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 17:10:21,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=182240.0, ans=0.025 +2024-07-28 17:10:22,244 INFO [train.py:1114] (3/4) Epoch 14, batch 3800, loss[loss=0.1844, simple_loss=0.2751, pruned_loss=0.04685, over 4817.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2713, pruned_loss=0.04718, over 941764.17 frames. ], batch size: 14, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:10:26,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=182253.33333333334, ans=0.125 +2024-07-28 17:10:42,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=182293.33333333334, ans=0.09899494936611666 +2024-07-28 17:10:55,059 INFO [train.py:1114] (3/4) Epoch 14, batch 3850, loss[loss=0.2014, simple_loss=0.2982, pruned_loss=0.05231, over 4649.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2713, pruned_loss=0.047, over 942220.12 frames. ], batch size: 16, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:10:57,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182320.0, ans=0.1 +2024-07-28 17:10:59,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=182320.0, ans=0.025 +2024-07-28 17:11:01,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=182333.33333333334, ans=0.125 +2024-07-28 17:11:02,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=182333.33333333334, ans=0.125 +2024-07-28 17:11:09,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=182346.66666666666, ans=0.125 +2024-07-28 17:11:12,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=182346.66666666666, ans=0.125 +2024-07-28 17:11:15,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=182360.0, ans=0.0 +2024-07-28 17:11:22,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=182373.33333333334, ans=0.125 +2024-07-28 17:11:27,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=182373.33333333334, ans=0.125 +2024-07-28 17:11:27,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=182373.33333333334, ans=0.125 +2024-07-28 17:11:28,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=182373.33333333334, ans=0.0 +2024-07-28 17:11:29,000 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.431e+01 5.612e+01 6.160e+01 6.955e+01 1.058e+02, threshold=1.232e+02, percent-clipped=0.0 +2024-07-28 17:11:30,354 INFO [train.py:1114] (3/4) Epoch 14, batch 3900, loss[loss=0.2037, simple_loss=0.298, pruned_loss=0.05464, over 4818.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2712, pruned_loss=0.04708, over 942439.24 frames. ], batch size: 14, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:11:30,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182386.66666666666, ans=0.1 +2024-07-28 17:11:32,071 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.74 vs. limit=22.5 +2024-07-28 17:11:34,612 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.97 vs. limit=22.5 +2024-07-28 17:11:37,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=182400.0, ans=0.0 +2024-07-28 17:11:37,925 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182400.0, ans=0.1 +2024-07-28 17:11:55,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=182426.66666666666, ans=0.125 +2024-07-28 17:11:57,103 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.87 vs. limit=15.0 +2024-07-28 17:12:00,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=182440.0, ans=0.09899494936611666 +2024-07-28 17:12:06,182 INFO [train.py:1114] (3/4) Epoch 14, batch 3950, loss[loss=0.2391, simple_loss=0.3385, pruned_loss=0.06983, over 4842.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2725, pruned_loss=0.04784, over 944449.36 frames. ], batch size: 16, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:12:11,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=182453.33333333334, ans=0.0 +2024-07-28 17:12:13,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=182453.33333333334, ans=0.05 +2024-07-28 17:12:17,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=182466.66666666666, ans=0.125 +2024-07-28 17:12:34,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=182493.33333333334, ans=0.0 +2024-07-28 17:12:44,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=182506.66666666666, ans=0.125 +2024-07-28 17:12:44,490 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.459e+01 5.506e+01 6.184e+01 7.058e+01 1.004e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 17:12:54,532 INFO [train.py:1114] (3/4) Epoch 14, batch 4000, loss[loss=0.1804, simple_loss=0.2639, pruned_loss=0.04841, over 4766.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2728, pruned_loss=0.04831, over 940829.96 frames. ], batch size: 12, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:12:56,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=182520.0, ans=0.1 +2024-07-28 17:13:05,747 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.96 vs. limit=22.5 +2024-07-28 17:13:09,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=182533.33333333334, ans=0.0 +2024-07-28 17:13:10,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=182533.33333333334, ans=0.125 +2024-07-28 17:13:14,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=182546.66666666666, ans=0.125 +2024-07-28 17:13:23,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=182560.0, ans=0.05 +2024-07-28 17:14:57,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=182573.33333333334, ans=0.125 +2024-07-28 17:15:02,342 INFO [train.py:1114] (3/4) Epoch 14, batch 4050, loss[loss=0.227, simple_loss=0.2999, pruned_loss=0.07703, over 3307.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2738, pruned_loss=0.04856, over 939356.70 frames. ], batch size: 35, lr: 5.37e-03, grad_scale: 32.0 +2024-07-28 17:15:03,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=182586.66666666666, ans=0.125 +2024-07-28 17:15:05,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=182586.66666666666, ans=0.09899494936611666 +2024-07-28 17:15:44,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182600.0, ans=0.1 +2024-07-28 17:15:46,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=182613.33333333334, ans=0.0 +2024-07-28 17:15:49,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=182613.33333333334, ans=0.2 +2024-07-28 17:15:59,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=182626.66666666666, ans=0.0 +2024-07-28 17:16:07,186 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.679e+01 5.600e+01 6.211e+01 7.334e+01 1.251e+02, threshold=1.242e+02, percent-clipped=2.0 +2024-07-28 17:16:08,553 INFO [train.py:1114] (3/4) Epoch 14, batch 4100, loss[loss=0.1696, simple_loss=0.2619, pruned_loss=0.03866, over 4903.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.273, pruned_loss=0.048, over 938459.70 frames. ], batch size: 15, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:16:14,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=182653.33333333334, ans=0.07 +2024-07-28 17:16:22,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=182666.66666666666, ans=0.2 +2024-07-28 17:16:32,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=182680.0, ans=0.04949747468305833 +2024-07-28 17:16:34,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182693.33333333334, ans=0.1 +2024-07-28 17:16:35,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=182693.33333333334, ans=0.125 +2024-07-28 17:16:43,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=182706.66666666666, ans=0.125 +2024-07-28 17:16:45,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=182706.66666666666, ans=0.0 +2024-07-28 17:16:49,308 INFO [train.py:1114] (3/4) Epoch 14, batch 4150, loss[loss=0.1795, simple_loss=0.2679, pruned_loss=0.04555, over 4822.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2733, pruned_loss=0.04766, over 937931.52 frames. ], batch size: 13, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:16:57,575 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.19 vs. limit=22.5 +2024-07-28 17:16:59,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=182733.33333333334, ans=0.2 +2024-07-28 17:17:00,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=182733.33333333334, ans=0.125 +2024-07-28 17:17:54,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=182746.66666666666, ans=0.125 +2024-07-28 17:18:37,653 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.53 vs. limit=15.0 +2024-07-28 17:18:39,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=182773.33333333334, ans=0.1 +2024-07-28 17:18:47,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=182773.33333333334, ans=0.125 +2024-07-28 17:18:48,505 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.421e+01 5.631e+01 6.207e+01 7.543e+01 1.114e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 17:18:49,846 INFO [train.py:1114] (3/4) Epoch 14, batch 4200, loss[loss=0.2038, simple_loss=0.3005, pruned_loss=0.05349, over 4909.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.273, pruned_loss=0.04728, over 939651.93 frames. ], batch size: 15, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:18:51,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.55 vs. limit=15.0 +2024-07-28 17:18:53,600 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.26 vs. limit=15.0 +2024-07-28 17:19:25,426 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.46 vs. limit=15.0 +2024-07-28 17:19:25,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=182800.0, ans=0.0 +2024-07-28 17:19:29,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=182813.33333333334, ans=0.0 +2024-07-28 17:19:48,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=182840.0, ans=0.125 +2024-07-28 17:19:52,371 INFO [train.py:1114] (3/4) Epoch 14, batch 4250, loss[loss=0.179, simple_loss=0.2671, pruned_loss=0.0454, over 4643.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2733, pruned_loss=0.04728, over 940769.21 frames. ], batch size: 12, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:19:59,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=182853.33333333334, ans=0.0 +2024-07-28 17:20:03,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=182853.33333333334, ans=0.2 +2024-07-28 17:20:30,831 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.73 vs. limit=15.0 +2024-07-28 17:20:33,151 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.532e+01 5.602e+01 6.327e+01 7.435e+01 1.299e+02, threshold=1.265e+02, percent-clipped=1.0 +2024-07-28 17:20:34,465 INFO [train.py:1114] (3/4) Epoch 14, batch 4300, loss[loss=0.1623, simple_loss=0.2542, pruned_loss=0.03514, over 4770.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.273, pruned_loss=0.04712, over 940230.94 frames. ], batch size: 13, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:20:39,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys.whitening_limit, batch_count=182920.0, ans=6.0 +2024-07-28 17:20:56,046 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:21:01,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=182960.0, ans=0.0 +2024-07-28 17:21:08,367 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.16 vs. limit=15.0 +2024-07-28 17:21:09,267 INFO [train.py:1114] (3/4) Epoch 14, batch 4350, loss[loss=0.1762, simple_loss=0.2692, pruned_loss=0.04159, over 4746.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2732, pruned_loss=0.0468, over 940914.84 frames. ], batch size: 13, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:21:19,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=183000.0, ans=0.1 +2024-07-28 17:21:20,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=183000.0, ans=0.125 +2024-07-28 17:21:25,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=183013.33333333334, ans=0.025 +2024-07-28 17:21:44,667 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.884e+01 5.657e+01 6.269e+01 7.008e+01 1.088e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 17:21:46,161 INFO [train.py:1114] (3/4) Epoch 14, batch 4400, loss[loss=0.1763, simple_loss=0.2755, pruned_loss=0.03854, over 4814.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.273, pruned_loss=0.04642, over 940646.86 frames. ], batch size: 14, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:21:48,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=183053.33333333334, ans=0.125 +2024-07-28 17:22:00,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=183080.0, ans=0.125 +2024-07-28 17:22:02,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=183080.0, ans=0.125 +2024-07-28 17:22:07,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=183093.33333333334, ans=0.0 +2024-07-28 17:22:10,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=183093.33333333334, ans=15.0 +2024-07-28 17:22:11,018 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.57 vs. limit=15.0 +2024-07-28 17:22:20,464 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.69 vs. limit=15.0 +2024-07-28 17:22:22,140 INFO [train.py:1114] (3/4) Epoch 14, batch 4450, loss[loss=0.1601, simple_loss=0.2473, pruned_loss=0.03646, over 4932.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2728, pruned_loss=0.04671, over 938818.85 frames. ], batch size: 12, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:22:35,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=183146.66666666666, ans=0.125 +2024-07-28 17:22:38,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=183146.66666666666, ans=0.0 +2024-07-28 17:22:40,199 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.25 vs. limit=15.0 +2024-07-28 17:22:42,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=183160.0, ans=0.0 +2024-07-28 17:26:48,549 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.393e+01 5.470e+01 5.943e+01 6.622e+01 1.092e+02, threshold=1.189e+02, percent-clipped=0.0 +2024-07-28 17:26:49,860 INFO [train.py:1114] (3/4) Epoch 14, batch 4500, loss[loss=0.1901, simple_loss=0.2788, pruned_loss=0.05072, over 4742.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2735, pruned_loss=0.04676, over 938184.73 frames. ], batch size: 14, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:26:58,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=183186.66666666666, ans=0.025 +2024-07-28 17:27:03,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=183200.0, ans=0.125 +2024-07-28 17:27:04,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=183200.0, ans=0.125 +2024-07-28 17:27:07,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=183200.0, ans=0.0 +2024-07-28 17:27:13,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=183213.33333333334, ans=0.125 +2024-07-28 17:27:21,680 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:27:26,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=183240.0, ans=0.2 +2024-07-28 17:27:28,603 INFO [train.py:1114] (3/4) Epoch 14, batch 4550, loss[loss=0.1698, simple_loss=0.2613, pruned_loss=0.03912, over 4907.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2736, pruned_loss=0.04724, over 940223.90 frames. ], batch size: 13, lr: 5.36e-03, grad_scale: 32.0 +2024-07-28 17:27:31,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=183253.33333333334, ans=0.125 +2024-07-28 17:27:39,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=183266.66666666666, ans=0.125 +2024-07-28 17:27:42,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=183280.0, ans=0.125 +2024-07-28 17:27:45,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=183280.0, ans=0.125 +2024-07-28 17:27:51,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=183293.33333333334, ans=0.125 +2024-07-28 17:28:01,173 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.581e+01 5.634e+01 6.361e+01 7.770e+01 1.092e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 17:28:02,547 INFO [train.py:1114] (3/4) Epoch 14, batch 4600, loss[loss=0.1818, simple_loss=0.2775, pruned_loss=0.04304, over 4527.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2733, pruned_loss=0.04741, over 938694.73 frames. ], batch size: 21, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:28:17,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=183346.66666666666, ans=0.125 +2024-07-28 17:28:25,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=183360.0, ans=0.1 +2024-07-28 17:28:34,778 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.74 vs. limit=15.0 +2024-07-28 17:28:35,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=183386.66666666666, ans=0.05 +2024-07-28 17:28:35,615 INFO [train.py:1114] (3/4) Epoch 14, batch 4650, loss[loss=0.2293, simple_loss=0.3048, pruned_loss=0.07688, over 4830.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.275, pruned_loss=0.04844, over 940198.17 frames. ], batch size: 16, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:28:37,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=183386.66666666666, ans=0.125 +2024-07-28 17:28:39,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=183386.66666666666, ans=0.0 +2024-07-28 17:28:40,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=183386.66666666666, ans=0.125 +2024-07-28 17:28:42,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=183400.0, ans=0.1 +2024-07-28 17:28:44,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=183400.0, ans=0.125 +2024-07-28 17:28:50,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=183413.33333333334, ans=0.0 +2024-07-28 17:28:53,221 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:29:04,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=183440.0, ans=0.125 +2024-07-28 17:29:07,304 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:29:09,678 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.277e+01 5.803e+01 6.288e+01 7.232e+01 1.102e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 17:29:10,997 INFO [train.py:1114] (3/4) Epoch 14, batch 4700, loss[loss=0.1617, simple_loss=0.2416, pruned_loss=0.04094, over 4721.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2736, pruned_loss=0.04808, over 938083.31 frames. ], batch size: 11, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:29:13,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=183453.33333333334, ans=0.2 +2024-07-28 17:29:35,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=183493.33333333334, ans=0.2 +2024-07-28 17:29:39,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=183506.66666666666, ans=0.125 +2024-07-28 17:29:45,322 INFO [train.py:1114] (3/4) Epoch 14, batch 4750, loss[loss=0.1885, simple_loss=0.2803, pruned_loss=0.04837, over 4530.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2739, pruned_loss=0.04852, over 936002.20 frames. ], batch size: 21, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:29:47,146 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.84 vs. limit=12.0 +2024-07-28 17:30:03,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=183546.66666666666, ans=0.2 +2024-07-28 17:30:08,296 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.49 vs. limit=6.0 +2024-07-28 17:30:14,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=183573.33333333334, ans=0.125 +2024-07-28 17:30:17,965 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.592e+01 6.256e+01 7.365e+01 1.010e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 17:30:19,280 INFO [train.py:1114] (3/4) Epoch 14, batch 4800, loss[loss=0.204, simple_loss=0.3056, pruned_loss=0.05116, over 4689.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2737, pruned_loss=0.04859, over 933327.06 frames. ], batch size: 13, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:30:25,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=183600.0, ans=0.2 +2024-07-28 17:30:35,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=183613.33333333334, ans=0.0 +2024-07-28 17:30:36,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=183613.33333333334, ans=0.2 +2024-07-28 17:30:40,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=183626.66666666666, ans=0.0 +2024-07-28 17:30:43,410 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.50 vs. limit=22.5 +2024-07-28 17:30:53,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=183640.0, ans=0.125 +2024-07-28 17:30:54,678 INFO [train.py:1114] (3/4) Epoch 14, batch 4850, loss[loss=0.1844, simple_loss=0.2754, pruned_loss=0.04672, over 4736.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2739, pruned_loss=0.04823, over 932571.80 frames. ], batch size: 14, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:31:11,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=183680.0, ans=0.125 +2024-07-28 17:31:15,367 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:31:16,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=183693.33333333334, ans=0.5 +2024-07-28 17:31:16,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=183693.33333333334, ans=0.125 +2024-07-28 17:31:17,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=183693.33333333334, ans=0.125 +2024-07-28 17:31:21,564 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.55 vs. limit=10.0 +2024-07-28 17:31:21,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=183693.33333333334, ans=0.1 +2024-07-28 17:31:30,323 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.639e+01 5.421e+01 5.850e+01 6.499e+01 1.354e+02, threshold=1.170e+02, percent-clipped=1.0 +2024-07-28 17:31:31,705 INFO [train.py:1114] (3/4) Epoch 14, batch 4900, loss[loss=0.1664, simple_loss=0.2544, pruned_loss=0.03923, over 4763.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2729, pruned_loss=0.04767, over 934222.55 frames. ], batch size: 13, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:31:32,094 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.24 vs. limit=22.5 +2024-07-28 17:31:54,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=183760.0, ans=0.125 +2024-07-28 17:32:06,233 INFO [train.py:1114] (3/4) Epoch 14, batch 4950, loss[loss=0.2264, simple_loss=0.2962, pruned_loss=0.07829, over 3687.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2739, pruned_loss=0.04836, over 931447.36 frames. ], batch size: 35, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:32:08,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=183786.66666666666, ans=0.0 +2024-07-28 17:32:10,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=183786.66666666666, ans=0.0 +2024-07-28 17:32:12,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=183786.66666666666, ans=0.2 +2024-07-28 17:32:21,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=183813.33333333334, ans=0.05 +2024-07-28 17:32:38,287 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.280e+01 5.530e+01 6.017e+01 6.862e+01 9.810e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 17:32:39,708 INFO [train.py:1114] (3/4) Epoch 14, batch 5000, loss[loss=0.1944, simple_loss=0.2859, pruned_loss=0.0515, over 4660.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2742, pruned_loss=0.04822, over 935407.96 frames. ], batch size: 14, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:32:45,332 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.96 vs. limit=15.0 +2024-07-28 17:32:48,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=183866.66666666666, ans=0.5 +2024-07-28 17:32:51,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=183866.66666666666, ans=0.0 +2024-07-28 17:32:53,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=183880.0, ans=0.0 +2024-07-28 17:32:53,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=183880.0, ans=0.125 +2024-07-28 17:32:55,917 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.16 vs. limit=12.0 +2024-07-28 17:32:58,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183880.0, ans=0.1 +2024-07-28 17:33:12,765 INFO [train.py:1114] (3/4) Epoch 14, batch 5050, loss[loss=0.1691, simple_loss=0.246, pruned_loss=0.04612, over 4845.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.273, pruned_loss=0.04775, over 937842.99 frames. ], batch size: 12, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:33:13,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=183920.0, ans=0.2 +2024-07-28 17:33:16,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=183920.0, ans=0.025 +2024-07-28 17:33:42,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=183973.33333333334, ans=0.1 +2024-07-28 17:33:45,598 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 5.607e+01 6.225e+01 6.953e+01 1.020e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 17:33:46,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=183986.66666666666, ans=0.07 +2024-07-28 17:33:46,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=183986.66666666666, ans=0.125 +2024-07-28 17:33:47,327 INFO [train.py:1114] (3/4) Epoch 14, batch 5100, loss[loss=0.1753, simple_loss=0.2546, pruned_loss=0.04798, over 4765.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2727, pruned_loss=0.04748, over 935607.59 frames. ], batch size: 12, lr: 5.35e-03, grad_scale: 32.0 +2024-07-28 17:33:49,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=183986.66666666666, ans=0.125 +2024-07-28 17:33:50,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=183986.66666666666, ans=15.0 +2024-07-28 17:33:52,577 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.03 vs. limit=15.0 +2024-07-28 17:33:55,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=184000.0, ans=0.0 +2024-07-28 17:33:57,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=184000.0, ans=0.125 +2024-07-28 17:33:57,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=184000.0, ans=0.125 +2024-07-28 17:34:07,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=184026.66666666666, ans=0.07 +2024-07-28 17:34:12,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=184026.66666666666, ans=0.0 +2024-07-28 17:34:14,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=184040.0, ans=0.125 +2024-07-28 17:34:21,093 INFO [train.py:1114] (3/4) Epoch 14, batch 5150, loss[loss=0.1651, simple_loss=0.2629, pruned_loss=0.03368, over 4837.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2727, pruned_loss=0.04714, over 936435.74 frames. ], batch size: 16, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:34:27,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=184053.33333333334, ans=0.0 +2024-07-28 17:34:34,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184066.66666666666, ans=0.1 +2024-07-28 17:34:51,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=184106.66666666666, ans=0.5 +2024-07-28 17:34:53,216 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.49 vs. limit=15.0 +2024-07-28 17:34:54,938 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.666e+01 6.187e+01 7.169e+01 1.415e+02, threshold=1.237e+02, percent-clipped=1.0 +2024-07-28 17:34:56,317 INFO [train.py:1114] (3/4) Epoch 14, batch 5200, loss[loss=0.2031, simple_loss=0.3009, pruned_loss=0.05262, over 4676.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2728, pruned_loss=0.04721, over 936397.93 frames. ], batch size: 14, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:35:01,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184120.0, ans=0.1 +2024-07-28 17:35:06,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=184133.33333333334, ans=0.125 +2024-07-28 17:35:08,597 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.02 vs. limit=15.0 +2024-07-28 17:35:13,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=184146.66666666666, ans=0.0 +2024-07-28 17:35:15,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=184146.66666666666, ans=0.125 +2024-07-28 17:35:19,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=184160.0, ans=0.2 +2024-07-28 17:35:24,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184173.33333333334, ans=0.1 +2024-07-28 17:35:25,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=184173.33333333334, ans=0.125 +2024-07-28 17:35:27,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=184173.33333333334, ans=0.125 +2024-07-28 17:35:42,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=184173.33333333334, ans=0.0 +2024-07-28 17:35:42,514 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.11 vs. limit=10.0 +2024-07-28 17:35:42,621 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.23 vs. limit=15.0 +2024-07-28 17:35:44,103 INFO [train.py:1114] (3/4) Epoch 14, batch 5250, loss[loss=0.1687, simple_loss=0.2524, pruned_loss=0.04245, over 4890.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2721, pruned_loss=0.04727, over 936354.51 frames. ], batch size: 13, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:36:05,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=184186.66666666666, ans=0.125 +2024-07-28 17:36:12,676 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.45 vs. limit=15.0 +2024-07-28 17:36:19,128 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.49 vs. limit=22.5 +2024-07-28 17:36:37,712 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.57 vs. limit=6.0 +2024-07-28 17:37:40,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.69 vs. limit=22.5 +2024-07-28 17:37:44,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=184240.0, ans=0.025 +2024-07-28 17:37:48,425 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.478e+01 6.210e+01 7.367e+01 1.027e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 17:37:49,751 INFO [train.py:1114] (3/4) Epoch 14, batch 5300, loss[loss=0.2279, simple_loss=0.3123, pruned_loss=0.07179, over 4624.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2716, pruned_loss=0.04706, over 934623.52 frames. ], batch size: 16, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:37:54,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=184253.33333333334, ans=0.125 +2024-07-28 17:38:01,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=184266.66666666666, ans=0.125 +2024-07-28 17:38:09,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=184280.0, ans=0.0 +2024-07-28 17:38:12,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=184280.0, ans=0.035 +2024-07-28 17:38:12,941 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.54 vs. limit=22.5 +2024-07-28 17:38:13,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184293.33333333334, ans=0.1 +2024-07-28 17:38:15,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=184293.33333333334, ans=0.0 +2024-07-28 17:38:16,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184293.33333333334, ans=0.1 +2024-07-28 17:38:18,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184293.33333333334, ans=0.1 +2024-07-28 17:38:20,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=184306.66666666666, ans=0.0 +2024-07-28 17:38:26,463 INFO [train.py:1114] (3/4) Epoch 14, batch 5350, loss[loss=0.16, simple_loss=0.2435, pruned_loss=0.0382, over 4527.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2719, pruned_loss=0.04671, over 936587.37 frames. ], batch size: 10, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:38:29,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=184320.0, ans=0.0 +2024-07-28 17:38:30,259 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.56 vs. limit=22.5 +2024-07-28 17:38:34,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184333.33333333334, ans=0.1 +2024-07-28 17:38:51,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=184360.0, ans=0.1 +2024-07-28 17:38:51,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=184360.0, ans=0.0 +2024-07-28 17:38:52,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=184360.0, ans=0.125 +2024-07-28 17:38:53,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=184360.0, ans=0.035 +2024-07-28 17:38:57,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=184373.33333333334, ans=0.125 +2024-07-28 17:39:01,374 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.641e+01 6.338e+01 7.374e+01 1.167e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 17:39:01,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=184386.66666666666, ans=0.0 +2024-07-28 17:39:02,099 INFO [train.py:1114] (3/4) Epoch 14, batch 5400, loss[loss=0.2047, simple_loss=0.2876, pruned_loss=0.06086, over 4111.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2735, pruned_loss=0.04771, over 930634.68 frames. ], batch size: 25, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:39:03,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=184386.66666666666, ans=0.0 +2024-07-28 17:39:13,207 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.51 vs. limit=15.0 +2024-07-28 17:39:13,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184400.0, ans=0.1 +2024-07-28 17:39:16,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=184413.33333333334, ans=0.2 +2024-07-28 17:39:18,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=184413.33333333334, ans=0.125 +2024-07-28 17:39:24,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=184426.66666666666, ans=0.125 +2024-07-28 17:39:25,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten.whitening_limit, batch_count=184426.66666666666, ans=15.0 +2024-07-28 17:39:35,485 INFO [train.py:1114] (3/4) Epoch 14, batch 5450, loss[loss=0.1725, simple_loss=0.2534, pruned_loss=0.04577, over 4715.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2732, pruned_loss=0.04762, over 933537.53 frames. ], batch size: 11, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:39:37,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=184453.33333333334, ans=0.0 +2024-07-28 17:39:39,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=184453.33333333334, ans=0.1 +2024-07-28 17:39:53,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=184480.0, ans=0.125 +2024-07-28 17:40:00,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184493.33333333334, ans=0.1 +2024-07-28 17:40:00,231 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.22 vs. limit=15.0 +2024-07-28 17:40:00,266 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.10 vs. limit=10.0 +2024-07-28 17:40:03,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=184506.66666666666, ans=0.0 +2024-07-28 17:40:04,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184506.66666666666, ans=0.1 +2024-07-28 17:40:08,545 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.352e+01 5.587e+01 6.313e+01 7.261e+01 1.072e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 17:40:09,234 INFO [train.py:1114] (3/4) Epoch 14, batch 5500, loss[loss=0.174, simple_loss=0.2724, pruned_loss=0.03781, over 4292.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2723, pruned_loss=0.04748, over 931066.97 frames. ], batch size: 25, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:40:33,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184560.0, ans=0.1 +2024-07-28 17:40:45,306 INFO [train.py:1114] (3/4) Epoch 14, batch 5550, loss[loss=0.1826, simple_loss=0.2755, pruned_loss=0.04488, over 4705.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2723, pruned_loss=0.04746, over 933012.24 frames. ], batch size: 12, lr: 5.34e-03, grad_scale: 16.0 +2024-07-28 17:40:46,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=184586.66666666666, ans=0.0 +2024-07-28 17:40:50,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=184586.66666666666, ans=0.2 +2024-07-28 17:40:50,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=184586.66666666666, ans=0.125 +2024-07-28 17:40:56,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=184600.0, ans=0.125 +2024-07-28 17:41:00,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=184613.33333333334, ans=0.1 +2024-07-28 17:41:12,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=184640.0, ans=0.0 +2024-07-28 17:41:14,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=184640.0, ans=0.2 +2024-07-28 17:41:19,033 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.688e+01 6.017e+01 6.994e+01 8.294e+01 1.224e+02, threshold=1.399e+02, percent-clipped=0.0 +2024-07-28 17:41:19,712 INFO [train.py:1114] (3/4) Epoch 14, batch 5600, loss[loss=0.1788, simple_loss=0.2682, pruned_loss=0.04471, over 4739.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.273, pruned_loss=0.04737, over 934498.89 frames. ], batch size: 14, lr: 5.34e-03, grad_scale: 32.0 +2024-07-28 17:41:22,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=184653.33333333334, ans=0.125 +2024-07-28 17:41:33,561 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.54 vs. limit=15.0 +2024-07-28 17:41:47,650 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.49 vs. limit=15.0 +2024-07-28 17:41:55,514 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.04 vs. limit=6.0 +2024-07-28 17:42:53,633 INFO [train.py:1114] (3/4) Epoch 14, batch 5650, loss[loss=0.1764, simple_loss=0.2759, pruned_loss=0.0384, over 4499.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2715, pruned_loss=0.04651, over 936968.04 frames. ], batch size: 21, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:43:06,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=184720.0, ans=0.125 +2024-07-28 17:43:11,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=184733.33333333334, ans=0.125 +2024-07-28 17:43:17,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=184746.66666666666, ans=0.1 +2024-07-28 17:43:20,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=184746.66666666666, ans=0.125 +2024-07-28 17:43:35,139 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.575e+01 6.312e+01 7.151e+01 9.820e+01, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 17:43:35,894 INFO [train.py:1114] (3/4) Epoch 14, batch 5700, loss[loss=0.2151, simple_loss=0.3098, pruned_loss=0.06022, over 4694.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2714, pruned_loss=0.04637, over 938149.38 frames. ], batch size: 13, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:43:39,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=184786.66666666666, ans=0.0 +2024-07-28 17:43:48,973 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.78 vs. limit=6.0 +2024-07-28 17:44:00,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=184826.66666666666, ans=0.95 +2024-07-28 17:44:00,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=184826.66666666666, ans=0.05 +2024-07-28 17:44:04,971 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.77 vs. limit=15.0 +2024-07-28 17:44:09,002 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.93 vs. limit=10.0 +2024-07-28 17:44:12,489 INFO [train.py:1114] (3/4) Epoch 14, batch 5750, loss[loss=0.1627, simple_loss=0.2627, pruned_loss=0.03136, over 4727.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2727, pruned_loss=0.04674, over 938104.58 frames. ], batch size: 19, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:44:52,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer_na.min_abs, batch_count=184906.66666666666, ans=0.02 +2024-07-28 17:44:53,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=184906.66666666666, ans=0.025 +2024-07-28 17:44:56,333 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.58 vs. limit=15.0 +2024-07-28 17:44:56,654 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.552e+01 6.040e+01 6.826e+01 9.653e+01, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 17:44:57,318 INFO [train.py:1114] (3/4) Epoch 14, batch 5800, loss[loss=0.1889, simple_loss=0.2813, pruned_loss=0.04828, over 4701.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2724, pruned_loss=0.04701, over 937143.10 frames. ], batch size: 19, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:45:05,644 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.53 vs. limit=22.5 +2024-07-28 17:45:06,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=184933.33333333334, ans=0.1 +2024-07-28 17:45:13,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=184946.66666666666, ans=0.125 +2024-07-28 17:45:14,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=184946.66666666666, ans=0.0 +2024-07-28 17:45:18,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=184960.0, ans=0.025 +2024-07-28 17:45:24,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=184973.33333333334, ans=0.025 +2024-07-28 17:45:24,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=184973.33333333334, ans=0.0 +2024-07-28 17:45:32,354 INFO [train.py:1114] (3/4) Epoch 14, batch 5850, loss[loss=0.1841, simple_loss=0.2757, pruned_loss=0.04626, over 4461.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.273, pruned_loss=0.04745, over 937854.79 frames. ], batch size: 21, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:45:46,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=185013.33333333334, ans=0.025 +2024-07-28 17:45:50,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=185013.33333333334, ans=0.125 +2024-07-28 17:45:52,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=185026.66666666666, ans=0.125 +2024-07-28 17:45:58,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=185040.0, ans=0.1 +2024-07-28 17:45:59,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185040.0, ans=0.1 +2024-07-28 17:46:01,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=185040.0, ans=0.125 +2024-07-28 17:46:05,084 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.554e+01 5.675e+01 6.318e+01 7.157e+01 1.040e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 17:46:05,802 INFO [train.py:1114] (3/4) Epoch 14, batch 5900, loss[loss=0.1908, simple_loss=0.2842, pruned_loss=0.04869, over 4686.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2719, pruned_loss=0.04678, over 937910.22 frames. ], batch size: 15, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:46:07,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=185053.33333333334, ans=0.125 +2024-07-28 17:46:12,500 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.71 vs. limit=15.0 +2024-07-28 17:46:14,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=185066.66666666666, ans=0.0 +2024-07-28 17:46:26,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=185093.33333333334, ans=0.025 +2024-07-28 17:46:30,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=185093.33333333334, ans=0.0 +2024-07-28 17:46:40,166 INFO [train.py:1114] (3/4) Epoch 14, batch 5950, loss[loss=0.1884, simple_loss=0.2804, pruned_loss=0.04823, over 4664.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2718, pruned_loss=0.04641, over 939891.85 frames. ], batch size: 15, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:46:42,160 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.18 vs. limit=15.0 +2024-07-28 17:46:43,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=185120.0, ans=0.0 +2024-07-28 17:46:52,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=185133.33333333334, ans=0.2 +2024-07-28 17:46:54,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=185133.33333333334, ans=0.125 +2024-07-28 17:46:57,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=185146.66666666666, ans=10.0 +2024-07-28 17:46:57,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=185146.66666666666, ans=0.125 +2024-07-28 17:47:10,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=185173.33333333334, ans=0.125 +2024-07-28 17:47:18,298 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.153e+01 5.664e+01 6.270e+01 7.000e+01 1.010e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 17:47:19,014 INFO [train.py:1114] (3/4) Epoch 14, batch 6000, loss[loss=0.2067, simple_loss=0.2955, pruned_loss=0.05897, over 4157.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2712, pruned_loss=0.04636, over 937208.15 frames. ], batch size: 25, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:47:19,014 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 17:49:17,896 INFO [train.py:1146] (3/4) Epoch 14, validation: loss=0.1656, simple_loss=0.2686, pruned_loss=0.03133, over 944034.00 frames. +2024-07-28 17:49:17,897 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 17:49:17,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=185186.66666666666, ans=0.2 +2024-07-28 17:49:22,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=185186.66666666666, ans=0.5 +2024-07-28 17:49:27,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=185200.0, ans=0.125 +2024-07-28 17:49:30,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=185200.0, ans=0.0 +2024-07-28 17:49:43,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=185226.66666666666, ans=0.125 +2024-07-28 17:49:43,627 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.21 vs. limit=22.5 +2024-07-28 17:49:48,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=185240.0, ans=0.07 +2024-07-28 17:49:51,987 INFO [train.py:1114] (3/4) Epoch 14, batch 6050, loss[loss=0.1698, simple_loss=0.2578, pruned_loss=0.04095, over 4773.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2705, pruned_loss=0.04634, over 938521.63 frames. ], batch size: 12, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:49:59,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=185266.66666666666, ans=0.1 +2024-07-28 17:50:13,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=185293.33333333334, ans=0.125 +2024-07-28 17:50:26,095 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+01 5.501e+01 6.141e+01 7.204e+01 9.755e+01, threshold=1.228e+02, percent-clipped=0.0 +2024-07-28 17:50:26,760 INFO [train.py:1114] (3/4) Epoch 14, batch 6100, loss[loss=0.2068, simple_loss=0.2965, pruned_loss=0.05857, over 4672.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2707, pruned_loss=0.04618, over 937838.34 frames. ], batch size: 15, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:50:41,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=185346.66666666666, ans=0.125 +2024-07-28 17:50:44,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=185346.66666666666, ans=0.2 +2024-07-28 17:51:00,626 INFO [train.py:1114] (3/4) Epoch 14, batch 6150, loss[loss=0.2297, simple_loss=0.3082, pruned_loss=0.07566, over 3254.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2718, pruned_loss=0.04643, over 936653.48 frames. ], batch size: 35, lr: 5.33e-03, grad_scale: 32.0 +2024-07-28 17:51:02,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=185386.66666666666, ans=0.2 +2024-07-28 17:51:12,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=185400.0, ans=0.125 +2024-07-28 17:51:16,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=185413.33333333334, ans=0.0 +2024-07-28 17:51:33,270 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.416e+01 5.520e+01 6.468e+01 7.669e+01 1.156e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-28 17:51:33,903 INFO [train.py:1114] (3/4) Epoch 14, batch 6200, loss[loss=0.1881, simple_loss=0.2745, pruned_loss=0.05084, over 4743.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2713, pruned_loss=0.04596, over 936006.07 frames. ], batch size: 14, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:51:46,693 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.92 vs. limit=15.0 +2024-07-28 17:51:52,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=185480.0, ans=0.125 +2024-07-28 17:51:53,235 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.85 vs. limit=15.0 +2024-07-28 17:52:00,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=185493.33333333334, ans=0.025 +2024-07-28 17:52:02,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=185493.33333333334, ans=0.025 +2024-07-28 17:52:11,474 INFO [train.py:1114] (3/4) Epoch 14, batch 6250, loss[loss=0.2259, simple_loss=0.316, pruned_loss=0.06787, over 4807.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.272, pruned_loss=0.04684, over 933045.59 frames. ], batch size: 14, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:52:13,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=185520.0, ans=0.0 +2024-07-28 17:52:16,478 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.05 vs. limit=15.0 +2024-07-28 17:52:16,550 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.66 vs. limit=15.0 +2024-07-28 17:52:17,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=185533.33333333334, ans=0.1 +2024-07-28 17:52:26,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=185546.66666666666, ans=0.0 +2024-07-28 17:52:29,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=185546.66666666666, ans=0.1 +2024-07-28 17:52:35,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=185560.0, ans=0.0 +2024-07-28 17:52:40,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=185573.33333333334, ans=0.0 +2024-07-28 17:52:44,095 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.81 vs. limit=15.0 +2024-07-28 17:52:46,202 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+01 5.580e+01 6.337e+01 7.212e+01 1.101e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 17:52:46,905 INFO [train.py:1114] (3/4) Epoch 14, batch 6300, loss[loss=0.1598, simple_loss=0.2315, pruned_loss=0.04402, over 4540.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2739, pruned_loss=0.04817, over 930340.85 frames. ], batch size: 10, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:52:47,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=185586.66666666666, ans=0.05 +2024-07-28 17:53:04,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=185613.33333333334, ans=0.125 +2024-07-28 17:53:05,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=185613.33333333334, ans=0.07 +2024-07-28 17:53:19,480 INFO [train.py:1114] (3/4) Epoch 14, batch 6350, loss[loss=0.1793, simple_loss=0.2707, pruned_loss=0.04395, over 4614.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2726, pruned_loss=0.04772, over 934855.89 frames. ], batch size: 21, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:53:38,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=185680.0, ans=0.0 +2024-07-28 17:53:40,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=185693.33333333334, ans=0.0 +2024-07-28 17:53:42,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=185693.33333333334, ans=0.0 +2024-07-28 17:53:51,454 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.48 vs. limit=10.0 +2024-07-28 17:54:02,939 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.778e+01 6.430e+01 7.550e+01 1.026e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 17:54:03,634 INFO [train.py:1114] (3/4) Epoch 14, batch 6400, loss[loss=0.1953, simple_loss=0.2793, pruned_loss=0.05569, over 4641.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2723, pruned_loss=0.04781, over 935827.99 frames. ], batch size: 13, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:54:08,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=185720.0, ans=0.0 +2024-07-28 17:54:19,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=185746.66666666666, ans=0.2 +2024-07-28 17:54:22,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=185746.66666666666, ans=0.0 +2024-07-28 17:54:24,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=185746.66666666666, ans=0.0 +2024-07-28 17:54:29,643 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.50 vs. limit=15.0 +2024-07-28 17:54:39,359 INFO [train.py:1114] (3/4) Epoch 14, batch 6450, loss[loss=0.197, simple_loss=0.2926, pruned_loss=0.05073, over 4576.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2728, pruned_loss=0.04762, over 939430.49 frames. ], batch size: 21, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:55:03,890 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.19 vs. limit=22.5 +2024-07-28 17:55:11,352 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.589e+01 5.736e+01 6.499e+01 7.740e+01 1.076e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-28 17:55:12,081 INFO [train.py:1114] (3/4) Epoch 14, batch 6500, loss[loss=0.2251, simple_loss=0.3047, pruned_loss=0.07279, over 3547.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2724, pruned_loss=0.04696, over 940566.42 frames. ], batch size: 35, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:55:15,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=185853.33333333334, ans=0.125 +2024-07-28 17:55:17,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=185853.33333333334, ans=0.1 +2024-07-28 17:55:26,423 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.70 vs. limit=6.0 +2024-07-28 17:55:32,321 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:55:33,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=185893.33333333334, ans=0.2 +2024-07-28 17:55:36,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=185893.33333333334, ans=0.125 +2024-07-28 17:55:36,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=185893.33333333334, ans=0.0 +2024-07-28 17:55:40,382 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.69 vs. limit=15.0 +2024-07-28 17:55:47,173 INFO [train.py:1114] (3/4) Epoch 14, batch 6550, loss[loss=0.149, simple_loss=0.2278, pruned_loss=0.0351, over 4807.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2712, pruned_loss=0.04605, over 943388.70 frames. ], batch size: 11, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:55:49,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=185920.0, ans=0.125 +2024-07-28 17:55:54,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=185933.33333333334, ans=0.2 +2024-07-28 17:55:55,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=185933.33333333334, ans=0.0 +2024-07-28 17:55:55,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=185933.33333333334, ans=0.125 +2024-07-28 17:55:56,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=185933.33333333334, ans=0.125 +2024-07-28 17:55:57,362 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-07-28 17:55:57,533 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.26 vs. limit=15.0 +2024-07-28 17:56:11,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=185960.0, ans=0.04949747468305833 +2024-07-28 17:56:12,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=185973.33333333334, ans=0.125 +2024-07-28 17:56:15,279 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.60 vs. limit=15.0 +2024-07-28 17:56:19,459 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.641e+01 6.098e+01 6.852e+01 1.074e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 17:56:20,139 INFO [train.py:1114] (3/4) Epoch 14, batch 6600, loss[loss=0.1886, simple_loss=0.2831, pruned_loss=0.0471, over 4922.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.271, pruned_loss=0.04619, over 945244.14 frames. ], batch size: 14, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:56:21,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=185986.66666666666, ans=0.125 +2024-07-28 17:56:32,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=186000.0, ans=0.2 +2024-07-28 17:56:48,220 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.45 vs. limit=15.0 +2024-07-28 17:56:53,742 INFO [train.py:1114] (3/4) Epoch 14, batch 6650, loss[loss=0.1815, simple_loss=0.269, pruned_loss=0.047, over 4608.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2708, pruned_loss=0.04612, over 943601.85 frames. ], batch size: 17, lr: 5.32e-03, grad_scale: 32.0 +2024-07-28 17:56:55,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=186053.33333333334, ans=0.125 +2024-07-28 17:57:04,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=186066.66666666666, ans=0.125 +2024-07-28 17:57:13,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=186093.33333333334, ans=0.125 +2024-07-28 17:57:30,076 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.923e+01 5.766e+01 6.384e+01 7.192e+01 1.160e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 17:57:30,771 INFO [train.py:1114] (3/4) Epoch 14, batch 6700, loss[loss=0.1819, simple_loss=0.2777, pruned_loss=0.04303, over 4737.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.271, pruned_loss=0.04597, over 942250.59 frames. ], batch size: 19, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:57:38,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=186133.33333333334, ans=0.125 +2024-07-28 17:57:47,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=186146.66666666666, ans=0.0 +2024-07-28 17:57:57,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=186173.33333333334, ans=0.0 +2024-07-28 17:57:59,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=186173.33333333334, ans=0.125 +2024-07-28 17:58:06,978 INFO [train.py:1114] (3/4) Epoch 14, batch 6750, loss[loss=0.2037, simple_loss=0.2971, pruned_loss=0.05522, over 4232.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2718, pruned_loss=0.04643, over 940142.85 frames. ], batch size: 25, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:58:15,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=186200.0, ans=0.125 +2024-07-28 17:58:16,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=186200.0, ans=0.125 +2024-07-28 17:58:24,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=186213.33333333334, ans=0.125 +2024-07-28 17:58:27,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=186226.66666666666, ans=0.07 +2024-07-28 17:58:28,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=186226.66666666666, ans=0.125 +2024-07-28 17:58:28,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=186226.66666666666, ans=0.0 +2024-07-28 17:58:30,433 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.45 vs. limit=15.0 +2024-07-28 17:58:37,404 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:58:40,452 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.736e+01 5.569e+01 6.146e+01 7.020e+01 9.338e+01, threshold=1.229e+02, percent-clipped=0.0 +2024-07-28 17:58:41,118 INFO [train.py:1114] (3/4) Epoch 14, batch 6800, loss[loss=0.21, simple_loss=0.284, pruned_loss=0.06796, over 4635.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.272, pruned_loss=0.04683, over 938586.03 frames. ], batch size: 13, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:58:42,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=186253.33333333334, ans=0.0 +2024-07-28 17:58:42,966 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.64 vs. limit=12.0 +2024-07-28 17:58:43,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=186253.33333333334, ans=0.1 +2024-07-28 17:58:45,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186253.33333333334, ans=0.1 +2024-07-28 17:58:45,395 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.29 vs. limit=15.0 +2024-07-28 17:58:49,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186266.66666666666, ans=0.1 +2024-07-28 17:58:51,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=186266.66666666666, ans=0.09899494936611666 +2024-07-28 17:58:53,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=186280.0, ans=0.125 +2024-07-28 17:58:57,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=186280.0, ans=0.0 +2024-07-28 17:59:03,373 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 17:59:14,772 INFO [train.py:1114] (3/4) Epoch 14, batch 6850, loss[loss=0.1705, simple_loss=0.2707, pruned_loss=0.03516, over 4695.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2716, pruned_loss=0.04698, over 940508.85 frames. ], batch size: 13, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 17:59:18,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=186320.0, ans=0.125 +2024-07-28 17:59:22,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=186333.33333333334, ans=0.125 +2024-07-28 17:59:24,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=186333.33333333334, ans=0.025 +2024-07-28 17:59:31,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=186346.66666666666, ans=15.0 +2024-07-28 17:59:34,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=186360.0, ans=0.125 +2024-07-28 17:59:44,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=186373.33333333334, ans=0.125 +2024-07-28 17:59:47,792 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.718e+01 6.199e+01 6.949e+01 1.067e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 17:59:48,472 INFO [train.py:1114] (3/4) Epoch 14, batch 6900, loss[loss=0.1796, simple_loss=0.2719, pruned_loss=0.04366, over 4959.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2716, pruned_loss=0.04676, over 943300.45 frames. ], batch size: 13, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:00:01,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=186400.0, ans=0.125 +2024-07-28 18:00:06,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=186413.33333333334, ans=0.0 +2024-07-28 18:00:08,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186426.66666666666, ans=0.1 +2024-07-28 18:00:14,883 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.64 vs. limit=22.5 +2024-07-28 18:00:22,488 INFO [train.py:1114] (3/4) Epoch 14, batch 6950, loss[loss=0.1544, simple_loss=0.2394, pruned_loss=0.03472, over 4472.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2713, pruned_loss=0.04679, over 940464.29 frames. ], batch size: 10, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:00:49,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=186506.66666666666, ans=0.125 +2024-07-28 18:00:52,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff3.min_abs, batch_count=186506.66666666666, ans=0.2 +2024-07-28 18:00:53,820 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.42 vs. limit=12.0 +2024-07-28 18:00:54,796 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.637e+01 6.195e+01 7.111e+01 9.946e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 18:00:55,478 INFO [train.py:1114] (3/4) Epoch 14, batch 7000, loss[loss=0.1774, simple_loss=0.2616, pruned_loss=0.04659, over 4613.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2709, pruned_loss=0.04669, over 938913.80 frames. ], batch size: 17, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:00:56,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=186520.0, ans=0.0 +2024-07-28 18:00:59,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186520.0, ans=0.1 +2024-07-28 18:01:03,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=186533.33333333334, ans=0.125 +2024-07-28 18:01:04,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=186533.33333333334, ans=0.0 +2024-07-28 18:01:26,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=186573.33333333334, ans=0.07 +2024-07-28 18:01:28,538 INFO [train.py:1114] (3/4) Epoch 14, batch 7050, loss[loss=0.2325, simple_loss=0.315, pruned_loss=0.07495, over 4682.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2715, pruned_loss=0.04717, over 942095.43 frames. ], batch size: 19, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:01:28,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=186586.66666666666, ans=0.1 +2024-07-28 18:01:31,646 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.11 vs. limit=10.0 +2024-07-28 18:01:42,368 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:01:46,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=186613.33333333334, ans=0.0 +2024-07-28 18:02:00,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=186640.0, ans=0.125 +2024-07-28 18:02:03,021 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+01 5.702e+01 6.224e+01 7.168e+01 1.076e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 18:02:03,697 INFO [train.py:1114] (3/4) Epoch 14, batch 7100, loss[loss=0.1519, simple_loss=0.2502, pruned_loss=0.02679, over 4792.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2727, pruned_loss=0.04768, over 935489.12 frames. ], batch size: 15, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:02:20,836 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.48 vs. limit=12.0 +2024-07-28 18:02:27,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186680.0, ans=0.1 +2024-07-28 18:02:30,024 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.09 vs. limit=15.0 +2024-07-28 18:02:33,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=186693.33333333334, ans=0.0 +2024-07-28 18:02:40,482 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.46 vs. limit=22.5 +2024-07-28 18:02:46,764 INFO [train.py:1114] (3/4) Epoch 14, batch 7150, loss[loss=0.2137, simple_loss=0.2912, pruned_loss=0.06808, over 4491.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2715, pruned_loss=0.04714, over 936647.65 frames. ], batch size: 21, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:02:48,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=186720.0, ans=0.1 +2024-07-28 18:02:52,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=186720.0, ans=10.0 +2024-07-28 18:02:53,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=186733.33333333334, ans=0.125 +2024-07-28 18:03:52,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=186746.66666666666, ans=0.1 +2024-07-28 18:03:58,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=186746.66666666666, ans=0.0 +2024-07-28 18:04:03,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=186760.0, ans=0.0 +2024-07-28 18:04:09,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=186760.0, ans=0.125 +2024-07-28 18:04:20,569 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.670e+01 5.638e+01 6.134e+01 6.924e+01 9.250e+01, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 18:04:21,225 INFO [train.py:1114] (3/4) Epoch 14, batch 7200, loss[loss=0.2196, simple_loss=0.3099, pruned_loss=0.06459, over 4791.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2724, pruned_loss=0.04725, over 937552.90 frames. ], batch size: 15, lr: 5.31e-03, grad_scale: 32.0 +2024-07-28 18:04:33,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=186786.66666666666, ans=0.125 +2024-07-28 18:04:54,407 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.70 vs. limit=15.0 +2024-07-28 18:04:55,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=186813.33333333334, ans=0.125 +2024-07-28 18:04:57,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=186813.33333333334, ans=0.125 +2024-07-28 18:04:59,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=186826.66666666666, ans=0.0 +2024-07-28 18:05:14,656 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=6.18 vs. limit=15.0 +2024-07-28 18:05:17,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=186840.0, ans=0.125 +2024-07-28 18:05:17,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=186840.0, ans=0.05 +2024-07-28 18:05:25,091 INFO [train.py:1114] (3/4) Epoch 14, batch 7250, loss[loss=0.1966, simple_loss=0.285, pruned_loss=0.05413, over 4844.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2718, pruned_loss=0.04715, over 939261.51 frames. ], batch size: 12, lr: 5.30e-03, grad_scale: 32.0 +2024-07-28 18:05:25,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=186853.33333333334, ans=0.0 +2024-07-28 18:05:46,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=186880.0, ans=0.09899494936611666 +2024-07-28 18:05:50,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=186880.0, ans=0.0 +2024-07-28 18:06:00,914 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.66 vs. limit=15.0 +2024-07-28 18:06:05,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=186893.33333333334, ans=0.09899494936611666 +2024-07-28 18:06:07,094 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.59 vs. limit=10.0 +2024-07-28 18:06:09,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=186906.66666666666, ans=0.1 +2024-07-28 18:06:13,403 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.490e+01 5.624e+01 6.211e+01 6.890e+01 1.048e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 18:06:13,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=186920.0, ans=0.025 +2024-07-28 18:06:14,140 INFO [train.py:1114] (3/4) Epoch 14, batch 7300, loss[loss=0.1607, simple_loss=0.2526, pruned_loss=0.03442, over 4851.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2714, pruned_loss=0.04705, over 939963.06 frames. ], batch size: 12, lr: 5.30e-03, grad_scale: 32.0 +2024-07-28 18:06:15,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff3.min_abs, batch_count=186920.0, ans=0.2 +2024-07-28 18:06:17,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=186920.0, ans=0.125 +2024-07-28 18:06:26,249 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:06:26,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=186946.66666666666, ans=0.04949747468305833 +2024-07-28 18:06:32,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=186946.66666666666, ans=0.125 +2024-07-28 18:06:36,092 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:06:46,848 INFO [train.py:1114] (3/4) Epoch 14, batch 7350, loss[loss=0.149, simple_loss=0.2461, pruned_loss=0.0259, over 4640.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2707, pruned_loss=0.04643, over 939155.97 frames. ], batch size: 12, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:06:52,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=187000.0, ans=0.125 +2024-07-28 18:06:54,442 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.13 vs. limit=15.0 +2024-07-28 18:07:10,291 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.71 vs. limit=6.0 +2024-07-28 18:07:10,645 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:07:20,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=187040.0, ans=0.04949747468305833 +2024-07-28 18:07:20,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=187040.0, ans=0.0 +2024-07-28 18:07:21,805 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.550e+01 5.480e+01 5.943e+01 6.743e+01 9.456e+01, threshold=1.189e+02, percent-clipped=0.0 +2024-07-28 18:07:22,466 INFO [train.py:1114] (3/4) Epoch 14, batch 7400, loss[loss=0.2424, simple_loss=0.3296, pruned_loss=0.07765, over 4687.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2721, pruned_loss=0.04674, over 940351.99 frames. ], batch size: 13, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:07:22,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=187053.33333333334, ans=0.125 +2024-07-28 18:07:38,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=187080.0, ans=0.125 +2024-07-28 18:07:44,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=187093.33333333334, ans=22.5 +2024-07-28 18:07:48,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=187106.66666666666, ans=0.125 +2024-07-28 18:07:50,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=187106.66666666666, ans=0.035 +2024-07-28 18:07:52,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=187106.66666666666, ans=0.125 +2024-07-28 18:07:54,560 INFO [train.py:1114] (3/4) Epoch 14, batch 7450, loss[loss=0.1857, simple_loss=0.2662, pruned_loss=0.05259, over 4623.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2721, pruned_loss=0.04716, over 937785.83 frames. ], batch size: 11, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:07:56,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=187120.0, ans=0.125 +2024-07-28 18:08:02,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.max_abs, batch_count=187133.33333333334, ans=10.0 +2024-07-28 18:08:11,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=187146.66666666666, ans=0.125 +2024-07-28 18:08:16,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=187160.0, ans=0.0 +2024-07-28 18:08:21,635 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.93 vs. limit=12.0 +2024-07-28 18:08:26,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=187173.33333333334, ans=0.125 +2024-07-28 18:08:27,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=187173.33333333334, ans=0.125 +2024-07-28 18:08:28,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=187173.33333333334, ans=0.125 +2024-07-28 18:08:28,502 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.589e+01 6.200e+01 6.910e+01 1.220e+02, threshold=1.240e+02, percent-clipped=1.0 +2024-07-28 18:08:29,223 INFO [train.py:1114] (3/4) Epoch 14, batch 7500, loss[loss=0.2467, simple_loss=0.325, pruned_loss=0.08417, over 3542.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2725, pruned_loss=0.04741, over 935709.36 frames. ], batch size: 35, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:08:29,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=187186.66666666666, ans=0.125 +2024-07-28 18:08:32,304 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=15.0 +2024-07-28 18:08:33,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=187186.66666666666, ans=0.125 +2024-07-28 18:08:34,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=187186.66666666666, ans=0.125 +2024-07-28 18:09:22,915 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.95 vs. limit=15.0 +2024-07-28 18:09:23,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=187253.33333333334, ans=0.0 +2024-07-28 18:09:23,921 INFO [train.py:1114] (3/4) Epoch 14, batch 7550, loss[loss=0.2134, simple_loss=0.3093, pruned_loss=0.05878, over 4645.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2744, pruned_loss=0.0479, over 935692.98 frames. ], batch size: 17, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:09:24,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=187253.33333333334, ans=0.035 +2024-07-28 18:09:24,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=187253.33333333334, ans=10.0 +2024-07-28 18:09:35,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=187266.66666666666, ans=0.125 +2024-07-28 18:09:36,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=187266.66666666666, ans=0.0 +2024-07-28 18:09:37,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=187280.0, ans=0.2 +2024-07-28 18:09:37,465 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:09:53,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=187306.66666666666, ans=0.0 +2024-07-28 18:09:56,066 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.167e+01 5.585e+01 6.021e+01 6.660e+01 1.005e+02, threshold=1.204e+02, percent-clipped=0.0 +2024-07-28 18:09:56,709 INFO [train.py:1114] (3/4) Epoch 14, batch 7600, loss[loss=0.2018, simple_loss=0.2914, pruned_loss=0.05607, over 4814.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2744, pruned_loss=0.04764, over 937567.91 frames. ], batch size: 14, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:10:00,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=187320.0, ans=0.0 +2024-07-28 18:10:05,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=187333.33333333334, ans=15.0 +2024-07-28 18:10:17,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=187360.0, ans=0.0 +2024-07-28 18:10:29,867 INFO [train.py:1114] (3/4) Epoch 14, batch 7650, loss[loss=0.1594, simple_loss=0.2485, pruned_loss=0.03508, over 4943.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2739, pruned_loss=0.04765, over 936156.56 frames. ], batch size: 12, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:10:34,486 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.82 vs. limit=15.0 +2024-07-28 18:11:02,119 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.854e+01 5.638e+01 6.341e+01 7.114e+01 1.063e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 18:11:02,778 INFO [train.py:1114] (3/4) Epoch 14, batch 7700, loss[loss=0.1723, simple_loss=0.2684, pruned_loss=0.03811, over 4697.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2743, pruned_loss=0.04769, over 933591.11 frames. ], batch size: 13, lr: 5.30e-03, grad_scale: 64.0 +2024-07-28 18:11:04,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=187453.33333333334, ans=0.125 +2024-07-28 18:11:11,581 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:11:12,511 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.12 vs. limit=12.0 +2024-07-28 18:11:32,819 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.97 vs. limit=22.5 +2024-07-28 18:11:48,690 INFO [train.py:1114] (3/4) Epoch 14, batch 7750, loss[loss=0.1959, simple_loss=0.2939, pruned_loss=0.04899, over 4925.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.275, pruned_loss=0.04732, over 935165.59 frames. ], batch size: 14, lr: 5.29e-03, grad_scale: 64.0 +2024-07-28 18:12:01,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.whiten.whitening_limit, batch_count=187546.66666666666, ans=12.0 +2024-07-28 18:12:09,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=187560.0, ans=0.125 +2024-07-28 18:12:10,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=187560.0, ans=0.5 +2024-07-28 18:12:17,608 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.24 vs. limit=15.0 +2024-07-28 18:12:22,669 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.155e+01 5.694e+01 6.124e+01 6.801e+01 8.564e+01, threshold=1.225e+02, percent-clipped=0.0 +2024-07-28 18:12:24,028 INFO [train.py:1114] (3/4) Epoch 14, batch 7800, loss[loss=0.1694, simple_loss=0.2581, pruned_loss=0.04039, over 4663.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2749, pruned_loss=0.04717, over 936867.27 frames. ], batch size: 14, lr: 5.29e-03, grad_scale: 64.0 +2024-07-28 18:12:30,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=187600.0, ans=0.07 +2024-07-28 18:12:34,496 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.14 vs. limit=15.0 +2024-07-28 18:12:42,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=187613.33333333334, ans=0.125 +2024-07-28 18:13:18,102 INFO [train.py:1114] (3/4) Epoch 14, batch 7850, loss[loss=0.1579, simple_loss=0.2415, pruned_loss=0.0372, over 4526.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2746, pruned_loss=0.04753, over 935187.00 frames. ], batch size: 10, lr: 5.29e-03, grad_scale: 64.0 +2024-07-28 18:13:18,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=187653.33333333334, ans=0.1 +2024-07-28 18:13:32,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=187680.0, ans=0.125 +2024-07-28 18:13:36,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=187680.0, ans=0.125 +2024-07-28 18:13:44,751 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.98 vs. limit=15.0 +2024-07-28 18:13:45,177 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:13:49,282 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.67 vs. limit=6.0 +2024-07-28 18:13:51,289 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.474e+01 5.677e+01 6.181e+01 6.848e+01 9.012e+01, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 18:13:51,946 INFO [train.py:1114] (3/4) Epoch 14, batch 7900, loss[loss=0.1992, simple_loss=0.283, pruned_loss=0.0577, over 4871.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2756, pruned_loss=0.04832, over 932886.19 frames. ], batch size: 14, lr: 5.29e-03, grad_scale: 64.0 +2024-07-28 18:13:52,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=187720.0, ans=0.1 +2024-07-28 18:13:53,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=187720.0, ans=0.125 +2024-07-28 18:14:00,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=187733.33333333334, ans=0.0 +2024-07-28 18:14:09,743 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.67 vs. limit=10.0 +2024-07-28 18:14:12,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=187760.0, ans=0.125 +2024-07-28 18:14:13,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=187760.0, ans=0.0 +2024-07-28 18:14:20,667 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.20 vs. limit=15.0 +2024-07-28 18:14:25,547 INFO [train.py:1114] (3/4) Epoch 14, batch 7950, loss[loss=0.1789, simple_loss=0.2724, pruned_loss=0.04268, over 3633.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2749, pruned_loss=0.04787, over 935356.20 frames. ], batch size: 35, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:14:36,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=187800.0, ans=0.2 +2024-07-28 18:14:47,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=187826.66666666666, ans=0.125 +2024-07-28 18:14:48,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=187826.66666666666, ans=0.2 +2024-07-28 18:14:50,947 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.51 vs. limit=6.0 +2024-07-28 18:14:56,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=187840.0, ans=0.1 +2024-07-28 18:15:01,007 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.483e+01 5.795e+01 6.761e+01 7.899e+01 1.107e+02, threshold=1.352e+02, percent-clipped=0.0 +2024-07-28 18:15:01,041 INFO [train.py:1114] (3/4) Epoch 14, batch 8000, loss[loss=0.1641, simple_loss=0.2436, pruned_loss=0.04227, over 4611.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2729, pruned_loss=0.04727, over 934916.58 frames. ], batch size: 11, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:15:01,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=187853.33333333334, ans=0.2 +2024-07-28 18:15:22,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=187880.0, ans=0.125 +2024-07-28 18:15:24,308 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.34 vs. limit=15.0 +2024-07-28 18:15:26,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=187893.33333333334, ans=0.125 +2024-07-28 18:16:09,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=187920.0, ans=0.0 +2024-07-28 18:16:09,700 INFO [train.py:1114] (3/4) Epoch 14, batch 8050, loss[loss=0.1808, simple_loss=0.2847, pruned_loss=0.0385, over 4805.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2734, pruned_loss=0.04728, over 934320.79 frames. ], batch size: 14, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:16:24,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=187946.66666666666, ans=0.0 +2024-07-28 18:16:41,832 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.834e+01 6.714e+01 7.875e+01 1.229e+02, threshold=1.343e+02, percent-clipped=0.0 +2024-07-28 18:16:41,866 INFO [train.py:1114] (3/4) Epoch 14, batch 8100, loss[loss=0.1803, simple_loss=0.2735, pruned_loss=0.0436, over 4806.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2746, pruned_loss=0.04732, over 933512.40 frames. ], batch size: 15, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:16:50,807 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.47 vs. limit=15.0 +2024-07-28 18:16:59,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188013.33333333334, ans=0.1 +2024-07-28 18:16:59,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=188013.33333333334, ans=0.0 +2024-07-28 18:16:59,962 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=11.76 vs. limit=15.0 +2024-07-28 18:17:14,590 INFO [train.py:1114] (3/4) Epoch 14, batch 8150, loss[loss=0.1764, simple_loss=0.2714, pruned_loss=0.0407, over 4799.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2732, pruned_loss=0.04709, over 936979.01 frames. ], batch size: 15, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:17:18,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=188053.33333333334, ans=0.09899494936611666 +2024-07-28 18:17:30,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=188080.0, ans=0.07 +2024-07-28 18:17:30,289 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.28 vs. limit=15.0 +2024-07-28 18:17:38,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=188093.33333333334, ans=0.0 +2024-07-28 18:17:44,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=188106.66666666666, ans=0.2 +2024-07-28 18:17:45,887 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:17:48,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=188106.66666666666, ans=0.125 +2024-07-28 18:17:49,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=188120.0, ans=0.0 +2024-07-28 18:17:49,787 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.677e+01 5.661e+01 6.103e+01 6.886e+01 9.464e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 18:17:49,821 INFO [train.py:1114] (3/4) Epoch 14, batch 8200, loss[loss=0.1779, simple_loss=0.2782, pruned_loss=0.03885, over 4797.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2731, pruned_loss=0.04686, over 938424.21 frames. ], batch size: 15, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:17:52,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=188120.0, ans=0.125 +2024-07-28 18:17:53,450 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.49 vs. limit=12.0 +2024-07-28 18:17:59,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=188133.33333333334, ans=0.125 +2024-07-28 18:18:03,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=188146.66666666666, ans=0.2 +2024-07-28 18:18:07,596 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=188146.66666666666, ans=10.0 +2024-07-28 18:18:17,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=188173.33333333334, ans=0.0 +2024-07-28 18:18:20,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=188173.33333333334, ans=0.1 +2024-07-28 18:18:22,385 INFO [train.py:1114] (3/4) Epoch 14, batch 8250, loss[loss=0.1724, simple_loss=0.2497, pruned_loss=0.04756, over 4890.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2726, pruned_loss=0.04702, over 938653.87 frames. ], batch size: 13, lr: 5.29e-03, grad_scale: 32.0 +2024-07-28 18:18:23,431 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.20 vs. limit=10.0 +2024-07-28 18:18:31,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188200.0, ans=0.1 +2024-07-28 18:18:34,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=188200.0, ans=0.125 +2024-07-28 18:18:36,454 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=28.30 vs. limit=22.5 +2024-07-28 18:18:40,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=188213.33333333334, ans=0.1 +2024-07-28 18:18:48,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=188240.0, ans=0.125 +2024-07-28 18:18:52,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=188240.0, ans=0.0 +2024-07-28 18:18:54,602 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.568e+01 5.616e+01 6.253e+01 7.427e+01 1.123e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 18:18:54,635 INFO [train.py:1114] (3/4) Epoch 14, batch 8300, loss[loss=0.1837, simple_loss=0.283, pruned_loss=0.04216, over 4911.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2728, pruned_loss=0.04714, over 938654.49 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:18:57,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=188253.33333333334, ans=0.0 +2024-07-28 18:19:04,190 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.75 vs. limit=10.0 +2024-07-28 18:19:05,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=188266.66666666666, ans=0.0 +2024-07-28 18:19:21,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=188306.66666666666, ans=0.125 +2024-07-28 18:19:25,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=188306.66666666666, ans=0.0 +2024-07-28 18:19:27,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=188320.0, ans=0.1 +2024-07-28 18:19:28,348 INFO [train.py:1114] (3/4) Epoch 14, batch 8350, loss[loss=0.1769, simple_loss=0.2646, pruned_loss=0.04456, over 4814.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2727, pruned_loss=0.04701, over 941576.69 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:19:29,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=188320.0, ans=0.125 +2024-07-28 18:19:39,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=188333.33333333334, ans=0.0 +2024-07-28 18:20:00,909 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.607e+01 5.571e+01 5.977e+01 6.680e+01 9.102e+01, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 18:20:00,943 INFO [train.py:1114] (3/4) Epoch 14, batch 8400, loss[loss=0.1714, simple_loss=0.2606, pruned_loss=0.04111, over 4778.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2726, pruned_loss=0.04675, over 940476.82 frames. ], batch size: 12, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:20:03,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=188386.66666666666, ans=0.1 +2024-07-28 18:20:05,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=188386.66666666666, ans=0.0 +2024-07-28 18:20:07,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_na.min_abs, batch_count=188400.0, ans=0.02 +2024-07-28 18:20:08,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_na.min_abs, batch_count=188400.0, ans=0.02 +2024-07-28 18:20:20,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=188426.66666666666, ans=0.125 +2024-07-28 18:20:25,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=188426.66666666666, ans=0.0 +2024-07-28 18:20:32,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=188440.0, ans=0.0 +2024-07-28 18:20:33,551 INFO [train.py:1114] (3/4) Epoch 14, batch 8450, loss[loss=0.1989, simple_loss=0.2915, pruned_loss=0.05317, over 4796.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2736, pruned_loss=0.04701, over 939326.31 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:20:40,361 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.66 vs. limit=15.0 +2024-07-28 18:20:45,268 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.33 vs. limit=15.0 +2024-07-28 18:20:51,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=188480.0, ans=0.5 +2024-07-28 18:21:03,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=188506.66666666666, ans=0.125 +2024-07-28 18:21:04,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=188506.66666666666, ans=0.125 +2024-07-28 18:21:05,571 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.332e+01 6.107e+01 6.939e+01 8.222e+01 1.191e+02, threshold=1.388e+02, percent-clipped=0.0 +2024-07-28 18:21:05,605 INFO [train.py:1114] (3/4) Epoch 14, batch 8500, loss[loss=0.1821, simple_loss=0.2581, pruned_loss=0.053, over 4616.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.273, pruned_loss=0.04697, over 939053.48 frames. ], batch size: 11, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:21:07,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=188520.0, ans=0.125 +2024-07-28 18:21:24,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=188546.66666666666, ans=0.0 +2024-07-28 18:21:27,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=188560.0, ans=0.125 +2024-07-28 18:21:38,569 INFO [train.py:1114] (3/4) Epoch 14, batch 8550, loss[loss=0.151, simple_loss=0.2366, pruned_loss=0.0327, over 4800.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.272, pruned_loss=0.04682, over 940262.01 frames. ], batch size: 11, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:21:50,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=188600.0, ans=0.125 +2024-07-28 18:21:53,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=188613.33333333334, ans=0.125 +2024-07-28 18:22:08,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=188613.33333333334, ans=0.0 +2024-07-28 18:22:16,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=188626.66666666666, ans=0.125 +2024-07-28 18:22:17,618 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.96 vs. limit=15.0 +2024-07-28 18:22:17,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=188626.66666666666, ans=0.07 +2024-07-28 18:22:26,142 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.740e+01 5.708e+01 6.188e+01 7.242e+01 1.269e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 18:22:26,175 INFO [train.py:1114] (3/4) Epoch 14, batch 8600, loss[loss=0.2014, simple_loss=0.2891, pruned_loss=0.05683, over 4805.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2712, pruned_loss=0.04678, over 939962.39 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:22:28,188 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=188653.33333333334, ans=0.0 +2024-07-28 18:22:29,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=188653.33333333334, ans=0.1 +2024-07-28 18:22:33,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=188666.66666666666, ans=0.0 +2024-07-28 18:22:38,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=188680.0, ans=0.1 +2024-07-28 18:22:44,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=188680.0, ans=0.2 +2024-07-28 18:22:52,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=188680.0, ans=0.125 +2024-07-28 18:22:54,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=188693.33333333334, ans=0.125 +2024-07-28 18:22:58,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=188693.33333333334, ans=0.0 +2024-07-28 18:22:58,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=188693.33333333334, ans=0.04949747468305833 +2024-07-28 18:23:06,725 INFO [train.py:1114] (3/4) Epoch 14, batch 8650, loss[loss=0.1964, simple_loss=0.2734, pruned_loss=0.05971, over 4900.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2708, pruned_loss=0.04661, over 941419.17 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:23:06,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=188720.0, ans=0.125 +2024-07-28 18:23:15,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=188733.33333333334, ans=0.125 +2024-07-28 18:23:36,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=188773.33333333334, ans=0.04949747468305833 +2024-07-28 18:23:38,960 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 5.747e+01 6.354e+01 7.150e+01 1.051e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 18:23:38,994 INFO [train.py:1114] (3/4) Epoch 14, batch 8700, loss[loss=0.1789, simple_loss=0.2828, pruned_loss=0.0375, over 4760.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2719, pruned_loss=0.04692, over 938555.15 frames. ], batch size: 13, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:23:41,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=188786.66666666666, ans=0.125 +2024-07-28 18:23:46,947 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.04 vs. limit=6.0 +2024-07-28 18:23:49,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=188800.0, ans=0.0 +2024-07-28 18:23:56,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=188813.33333333334, ans=0.2 +2024-07-28 18:24:00,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=188826.66666666666, ans=0.0 +2024-07-28 18:24:01,550 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=9.74 vs. limit=12.0 +2024-07-28 18:24:07,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=188840.0, ans=0.04949747468305833 +2024-07-28 18:24:08,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=188840.0, ans=0.125 +2024-07-28 18:24:13,573 INFO [train.py:1114] (3/4) Epoch 14, batch 8750, loss[loss=0.2032, simple_loss=0.3, pruned_loss=0.05325, over 4688.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2734, pruned_loss=0.04759, over 937346.28 frames. ], batch size: 15, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:24:27,502 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=22.26 vs. limit=15.0 +2024-07-28 18:24:30,319 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.38 vs. limit=15.0 +2024-07-28 18:24:32,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=188893.33333333334, ans=0.125 +2024-07-28 18:24:32,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=188893.33333333334, ans=0.0 +2024-07-28 18:24:34,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.40 vs. limit=6.0 +2024-07-28 18:24:35,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=188893.33333333334, ans=0.125 +2024-07-28 18:24:42,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=188906.66666666666, ans=0.0 +2024-07-28 18:24:45,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=188920.0, ans=0.0 +2024-07-28 18:24:45,708 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.401e+01 5.577e+01 5.996e+01 6.718e+01 9.459e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 18:24:45,742 INFO [train.py:1114] (3/4) Epoch 14, batch 8800, loss[loss=0.1657, simple_loss=0.2669, pruned_loss=0.03231, over 4934.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2735, pruned_loss=0.04757, over 937979.22 frames. ], batch size: 14, lr: 5.28e-03, grad_scale: 32.0 +2024-07-28 18:24:47,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=188920.0, ans=0.125 +2024-07-28 18:24:54,032 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=5.46 vs. limit=6.0 +2024-07-28 18:25:08,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=188960.0, ans=0.0 +2024-07-28 18:25:18,766 INFO [train.py:1114] (3/4) Epoch 14, batch 8850, loss[loss=0.1935, simple_loss=0.2811, pruned_loss=0.05294, over 4499.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2731, pruned_loss=0.0476, over 932602.16 frames. ], batch size: 21, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:25:25,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=189000.0, ans=0.125 +2024-07-28 18:25:36,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=189013.33333333334, ans=0.125 +2024-07-28 18:25:45,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=189040.0, ans=0.2 +2024-07-28 18:25:51,403 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.385e+01 5.694e+01 6.232e+01 7.298e+01 9.650e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 18:25:51,437 INFO [train.py:1114] (3/4) Epoch 14, batch 8900, loss[loss=0.1475, simple_loss=0.2294, pruned_loss=0.03277, over 4947.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2729, pruned_loss=0.0475, over 930598.83 frames. ], batch size: 12, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:25:53,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=189053.33333333334, ans=0.0 +2024-07-28 18:26:16,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=189093.33333333334, ans=0.125 +2024-07-28 18:26:24,558 INFO [train.py:1114] (3/4) Epoch 14, batch 8950, loss[loss=0.2002, simple_loss=0.2903, pruned_loss=0.05504, over 4487.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2729, pruned_loss=0.04747, over 931195.48 frames. ], batch size: 21, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:26:29,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=189120.0, ans=0.025 +2024-07-28 18:26:30,111 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.21 vs. limit=6.0 +2024-07-28 18:26:38,305 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.38 vs. limit=15.0 +2024-07-28 18:26:46,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.68 vs. limit=15.0 +2024-07-28 18:26:55,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=189173.33333333334, ans=0.07 +2024-07-28 18:26:57,059 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.595e+01 6.149e+01 7.157e+01 9.804e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 18:26:57,092 INFO [train.py:1114] (3/4) Epoch 14, batch 9000, loss[loss=0.1873, simple_loss=0.2726, pruned_loss=0.05104, over 4642.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2725, pruned_loss=0.04734, over 934152.52 frames. ], batch size: 12, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:26:57,092 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 18:27:03,892 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.4951, 2.5789, 3.2615, 3.3042], device='cuda:3') +2024-07-28 18:27:04,010 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.0641, 3.8026, 2.7064, 2.5555], device='cuda:3') +2024-07-28 18:27:09,000 INFO [train.py:1146] (3/4) Epoch 14, validation: loss=0.1644, simple_loss=0.2676, pruned_loss=0.03058, over 944034.00 frames. +2024-07-28 18:27:09,000 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 18:27:11,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=189186.66666666666, ans=0.125 +2024-07-28 18:27:12,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=189186.66666666666, ans=0.025 +2024-07-28 18:27:16,368 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.10 vs. limit=15.0 +2024-07-28 18:27:26,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=189213.33333333334, ans=0.2 +2024-07-28 18:27:30,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=189226.66666666666, ans=0.125 +2024-07-28 18:27:30,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_na.min_abs, batch_count=189226.66666666666, ans=0.02 +2024-07-28 18:27:32,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=189226.66666666666, ans=0.95 +2024-07-28 18:27:38,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=189240.0, ans=0.025 +2024-07-28 18:27:41,986 INFO [train.py:1114] (3/4) Epoch 14, batch 9050, loss[loss=0.211, simple_loss=0.2877, pruned_loss=0.06712, over 4476.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2718, pruned_loss=0.0469, over 934552.97 frames. ], batch size: 10, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:27:50,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=189266.66666666666, ans=0.2 +2024-07-28 18:27:52,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=189266.66666666666, ans=0.125 +2024-07-28 18:27:57,775 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.55 vs. limit=15.0 +2024-07-28 18:28:14,827 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.467e+01 5.696e+01 6.239e+01 6.974e+01 1.014e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 18:28:14,862 INFO [train.py:1114] (3/4) Epoch 14, batch 9100, loss[loss=0.2041, simple_loss=0.2926, pruned_loss=0.05783, over 4933.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2708, pruned_loss=0.04626, over 937011.21 frames. ], batch size: 14, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:28:17,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=189320.0, ans=0.0 +2024-07-28 18:28:34,188 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=189333.33333333334, ans=0.07 +2024-07-28 18:28:48,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=189373.33333333334, ans=0.125 +2024-07-28 18:28:53,710 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.38 vs. limit=10.0 +2024-07-28 18:28:54,498 INFO [train.py:1114] (3/4) Epoch 14, batch 9150, loss[loss=0.1684, simple_loss=0.2701, pruned_loss=0.03331, over 4807.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.272, pruned_loss=0.04659, over 935856.08 frames. ], batch size: 14, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:28:58,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189386.66666666666, ans=0.1 +2024-07-28 18:29:04,360 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.40 vs. limit=12.0 +2024-07-28 18:29:07,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=189413.33333333334, ans=0.125 +2024-07-28 18:29:21,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=189440.0, ans=0.0 +2024-07-28 18:29:21,202 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.08 vs. limit=15.0 +2024-07-28 18:29:23,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=189440.0, ans=0.1 +2024-07-28 18:29:25,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn1.whiten.whitening_limit, batch_count=189440.0, ans=22.5 +2024-07-28 18:29:26,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=189440.0, ans=0.125 +2024-07-28 18:29:27,776 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.765e+01 6.391e+01 7.042e+01 1.009e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-28 18:29:27,810 INFO [train.py:1114] (3/4) Epoch 14, batch 9200, loss[loss=0.1957, simple_loss=0.2646, pruned_loss=0.06344, over 4842.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2721, pruned_loss=0.04689, over 937406.79 frames. ], batch size: 12, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:29:31,004 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=5.95 vs. limit=15.0 +2024-07-28 18:29:32,210 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.16 vs. limit=15.0 +2024-07-28 18:29:37,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=189466.66666666666, ans=0.125 +2024-07-28 18:29:38,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=189466.66666666666, ans=0.125 +2024-07-28 18:29:46,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=189493.33333333334, ans=0.125 +2024-07-28 18:29:50,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=189493.33333333334, ans=0.125 +2024-07-28 18:29:51,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189493.33333333334, ans=0.1 +2024-07-28 18:30:02,097 INFO [train.py:1114] (3/4) Epoch 14, batch 9250, loss[loss=0.1803, simple_loss=0.2696, pruned_loss=0.04545, over 4640.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2723, pruned_loss=0.04698, over 938287.34 frames. ], batch size: 13, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:30:02,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=189520.0, ans=0.125 +2024-07-28 18:30:07,233 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.84 vs. limit=5.0 +2024-07-28 18:30:12,357 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.06 vs. limit=15.0 +2024-07-28 18:30:19,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=189546.66666666666, ans=0.2 +2024-07-28 18:30:21,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=189560.0, ans=0.125 +2024-07-28 18:30:21,923 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.29 vs. limit=15.0 +2024-07-28 18:30:30,451 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.43 vs. limit=15.0 +2024-07-28 18:30:38,952 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.722e+01 6.128e+01 6.836e+01 1.013e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 18:30:38,985 INFO [train.py:1114] (3/4) Epoch 14, batch 9300, loss[loss=0.18, simple_loss=0.2629, pruned_loss=0.04857, over 4779.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2719, pruned_loss=0.04701, over 938106.72 frames. ], batch size: 12, lr: 5.27e-03, grad_scale: 32.0 +2024-07-28 18:30:40,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=189586.66666666666, ans=0.125 +2024-07-28 18:30:49,891 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.10 vs. limit=10.0 +2024-07-28 18:30:54,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=189613.33333333334, ans=0.5 +2024-07-28 18:30:56,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=189613.33333333334, ans=0.1 +2024-07-28 18:34:59,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=189640.0, ans=0.1 +2024-07-28 18:35:05,510 INFO [train.py:1114] (3/4) Epoch 14, batch 9350, loss[loss=0.1851, simple_loss=0.2677, pruned_loss=0.05129, over 4784.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2719, pruned_loss=0.04694, over 934571.04 frames. ], batch size: 11, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:35:14,852 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.77 vs. limit=10.0 +2024-07-28 18:35:16,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=189666.66666666666, ans=0.0 +2024-07-28 18:35:20,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=189680.0, ans=0.1 +2024-07-28 18:35:25,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=189693.33333333334, ans=0.125 +2024-07-28 18:35:29,179 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=9.95 vs. limit=22.5 +2024-07-28 18:35:38,439 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.799e+01 5.700e+01 6.300e+01 7.033e+01 1.050e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-28 18:35:38,473 INFO [train.py:1114] (3/4) Epoch 14, batch 9400, loss[loss=0.1784, simple_loss=0.278, pruned_loss=0.03937, over 4703.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2717, pruned_loss=0.04671, over 932445.50 frames. ], batch size: 13, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:35:41,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=189720.0, ans=0.125 +2024-07-28 18:35:50,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=189733.33333333334, ans=0.125 +2024-07-28 18:35:51,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=189746.66666666666, ans=0.125 +2024-07-28 18:35:54,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=189746.66666666666, ans=0.125 +2024-07-28 18:35:55,949 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.78 vs. limit=22.5 +2024-07-28 18:36:11,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=189773.33333333334, ans=0.125 +2024-07-28 18:36:12,487 INFO [train.py:1114] (3/4) Epoch 14, batch 9450, loss[loss=0.1505, simple_loss=0.2361, pruned_loss=0.03242, over 4808.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2713, pruned_loss=0.04625, over 932181.85 frames. ], batch size: 11, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:36:17,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=189786.66666666666, ans=0.025 +2024-07-28 18:36:31,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=189826.66666666666, ans=0.0 +2024-07-28 18:36:37,579 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:36:38,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=189840.0, ans=0.1 +2024-07-28 18:36:42,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=189840.0, ans=0.0 +2024-07-28 18:36:43,733 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.801e+01 5.560e+01 6.240e+01 6.918e+01 1.034e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 18:36:43,767 INFO [train.py:1114] (3/4) Epoch 14, batch 9500, loss[loss=0.166, simple_loss=0.2489, pruned_loss=0.04156, over 4704.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2713, pruned_loss=0.04605, over 934294.32 frames. ], batch size: 12, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:36:58,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=189880.0, ans=0.0 +2024-07-28 18:37:03,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=189893.33333333334, ans=0.125 +2024-07-28 18:37:09,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=189906.66666666666, ans=0.0 +2024-07-28 18:37:14,411 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.31 vs. limit=15.0 +2024-07-28 18:37:15,311 INFO [train.py:1114] (3/4) Epoch 14, batch 9550, loss[loss=0.2065, simple_loss=0.2814, pruned_loss=0.06581, over 4772.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.272, pruned_loss=0.04685, over 931549.51 frames. ], batch size: 12, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:37:21,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=189933.33333333334, ans=0.0 +2024-07-28 18:37:23,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_na.min_abs, batch_count=189933.33333333334, ans=0.02 +2024-07-28 18:37:24,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=189933.33333333334, ans=0.125 +2024-07-28 18:37:37,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=189960.0, ans=0.2 +2024-07-28 18:37:39,277 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.83 vs. limit=15.0 +2024-07-28 18:37:42,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=189973.33333333334, ans=0.2 +2024-07-28 18:37:46,547 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.418e+01 5.773e+01 6.422e+01 7.521e+01 1.253e+02, threshold=1.284e+02, percent-clipped=1.0 +2024-07-28 18:37:46,582 INFO [train.py:1114] (3/4) Epoch 14, batch 9600, loss[loss=0.2299, simple_loss=0.3081, pruned_loss=0.0759, over 3266.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2734, pruned_loss=0.04714, over 930451.99 frames. ], batch size: 35, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:38:02,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=190013.33333333334, ans=0.0 +2024-07-28 18:38:06,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=190026.66666666666, ans=0.0 +2024-07-28 18:38:11,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=190040.0, ans=0.125 +2024-07-28 18:38:12,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=190040.0, ans=0.125 +2024-07-28 18:38:16,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=190040.0, ans=0.0 +2024-07-28 18:38:16,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=190040.0, ans=0.125 +2024-07-28 18:38:17,650 INFO [train.py:1114] (3/4) Epoch 14, batch 9650, loss[loss=0.178, simple_loss=0.2713, pruned_loss=0.0423, over 4857.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2745, pruned_loss=0.04784, over 926488.02 frames. ], batch size: 16, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:38:17,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=190053.33333333334, ans=0.09899494936611666 +2024-07-28 18:38:21,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=190053.33333333334, ans=0.0 +2024-07-28 18:38:33,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=190080.0, ans=0.2 +2024-07-28 18:38:35,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=190080.0, ans=0.2 +2024-07-28 18:38:37,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=190093.33333333334, ans=0.125 +2024-07-28 18:38:38,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=190093.33333333334, ans=0.0 +2024-07-28 18:38:49,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=190120.0, ans=0.125 +2024-07-28 18:38:49,747 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.576e+01 5.692e+01 6.329e+01 7.195e+01 1.065e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 18:38:49,781 INFO [train.py:1114] (3/4) Epoch 14, batch 9700, loss[loss=0.1573, simple_loss=0.2553, pruned_loss=0.02965, over 4398.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2744, pruned_loss=0.04788, over 924728.11 frames. ], batch size: 26, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:38:51,873 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.04 vs. limit=15.0 +2024-07-28 18:39:02,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=190146.66666666666, ans=0.0 +2024-07-28 18:39:17,286 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.07 vs. limit=12.0 +2024-07-28 18:39:17,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=190173.33333333334, ans=0.125 +2024-07-28 18:39:20,415 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-07-28 18:39:20,729 INFO [train.py:1114] (3/4) Epoch 14, batch 9750, loss[loss=0.1946, simple_loss=0.2814, pruned_loss=0.05397, over 4682.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2747, pruned_loss=0.04793, over 925463.79 frames. ], batch size: 15, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:39:23,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=190186.66666666666, ans=0.1 +2024-07-28 18:39:25,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=190186.66666666666, ans=0.5 +2024-07-28 18:39:25,880 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:39:38,477 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.58 vs. limit=12.0 +2024-07-28 18:39:39,692 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.11 vs. limit=15.0 +2024-07-28 18:39:40,224 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.57 vs. limit=15.0 +2024-07-28 18:39:43,428 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.69 vs. limit=15.0 +2024-07-28 18:39:43,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=190226.66666666666, ans=0.125 +2024-07-28 18:39:46,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=190240.0, ans=0.2 +2024-07-28 18:39:48,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=190240.0, ans=0.125 +2024-07-28 18:39:51,825 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.404e+01 5.577e+01 6.285e+01 7.276e+01 9.873e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 18:39:51,859 INFO [train.py:1114] (3/4) Epoch 14, batch 9800, loss[loss=0.1489, simple_loss=0.2497, pruned_loss=0.02409, over 4703.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2733, pruned_loss=0.04754, over 925023.57 frames. ], batch size: 12, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:39:52,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190253.33333333334, ans=0.1 +2024-07-28 18:39:54,187 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.81 vs. limit=6.0 +2024-07-28 18:40:01,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=190266.66666666666, ans=0.0 +2024-07-28 18:40:11,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190293.33333333334, ans=0.1 +2024-07-28 18:40:16,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=190293.33333333334, ans=0.0 +2024-07-28 18:40:18,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=190306.66666666666, ans=0.125 +2024-07-28 18:40:20,981 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.95 vs. limit=12.0 +2024-07-28 18:40:23,777 INFO [train.py:1114] (3/4) Epoch 14, batch 9850, loss[loss=0.1908, simple_loss=0.2834, pruned_loss=0.04909, over 4895.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2737, pruned_loss=0.04755, over 927336.56 frames. ], batch size: 15, lr: 5.26e-03, grad_scale: 32.0 +2024-07-28 18:40:28,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=190320.0, ans=0.125 +2024-07-28 18:40:41,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=190360.0, ans=0.0 +2024-07-28 18:40:48,862 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.13 vs. limit=15.0 +2024-07-28 18:40:50,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=190373.33333333334, ans=0.0 +2024-07-28 18:40:51,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=190373.33333333334, ans=0.0 +2024-07-28 18:40:54,571 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.802e+01 6.503e+01 7.443e+01 1.103e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 18:40:54,605 INFO [train.py:1114] (3/4) Epoch 14, batch 9900, loss[loss=0.1902, simple_loss=0.2838, pruned_loss=0.04832, over 4823.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2744, pruned_loss=0.04787, over 926610.29 frames. ], batch size: 16, lr: 5.25e-03, grad_scale: 32.0 +2024-07-28 18:40:56,529 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:40:59,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=190386.66666666666, ans=0.125 +2024-07-28 18:41:01,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=190400.0, ans=0.0 +2024-07-28 18:41:09,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=190413.33333333334, ans=0.125 +2024-07-28 18:41:14,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=190426.66666666666, ans=0.1 +2024-07-28 18:41:25,151 INFO [train.py:1114] (3/4) Epoch 14, batch 9950, loss[loss=0.1389, simple_loss=0.2203, pruned_loss=0.02874, over 4475.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2735, pruned_loss=0.04798, over 928641.81 frames. ], batch size: 10, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:41:25,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=190453.33333333334, ans=0.125 +2024-07-28 18:41:28,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=190453.33333333334, ans=0.07 +2024-07-28 18:41:36,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=190466.66666666666, ans=0.0 +2024-07-28 18:41:37,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=190480.0, ans=0.125 +2024-07-28 18:41:40,480 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.88 vs. limit=15.0 +2024-07-28 18:41:51,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=190506.66666666666, ans=0.0 +2024-07-28 18:41:52,862 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.07 vs. limit=15.0 +2024-07-28 18:41:55,825 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.10 vs. limit=22.5 +2024-07-28 18:41:56,000 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.155e+01 6.024e+01 6.660e+01 7.558e+01 1.184e+02, threshold=1.332e+02, percent-clipped=0.0 +2024-07-28 18:41:56,034 INFO [train.py:1114] (3/4) Epoch 14, batch 10000, loss[loss=0.2171, simple_loss=0.2976, pruned_loss=0.06836, over 4631.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2762, pruned_loss=0.04868, over 926015.78 frames. ], batch size: 16, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:42:01,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=190520.0, ans=0.0 +2024-07-28 18:42:18,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=190560.0, ans=0.2 +2024-07-28 18:42:21,020 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.58 vs. limit=6.0 +2024-07-28 18:42:28,096 INFO [train.py:1114] (3/4) Epoch 14, batch 10050, loss[loss=0.2207, simple_loss=0.2952, pruned_loss=0.07314, over 3314.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2804, pruned_loss=0.05072, over 913938.61 frames. ], batch size: 35, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:42:28,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.max_positive, batch_count=190586.66666666666, ans=0.95 +2024-07-28 18:42:40,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=190600.0, ans=0.125 +2024-07-28 18:42:40,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=190600.0, ans=0.125 +2024-07-28 18:42:47,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=190613.33333333334, ans=0.125 +2024-07-28 18:42:48,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=190626.66666666666, ans=0.125 +2024-07-28 18:42:51,689 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.66 vs. limit=22.5 +2024-07-28 18:42:54,751 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.78 vs. limit=12.0 +2024-07-28 18:42:56,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=190640.0, ans=0.125 +2024-07-28 18:43:02,249 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.109e+01 6.099e+01 7.025e+01 7.577e+01 1.043e+02, threshold=1.405e+02, percent-clipped=0.0 +2024-07-28 18:43:02,283 INFO [train.py:1114] (3/4) Epoch 14, batch 10100, loss[loss=0.205, simple_loss=0.2968, pruned_loss=0.05667, over 3316.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2856, pruned_loss=0.05581, over 863104.17 frames. ], batch size: 35, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:43:04,467 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.13 vs. limit=15.0 +2024-07-28 18:43:13,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=190666.66666666666, ans=0.025 +2024-07-28 18:43:13,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=190666.66666666666, ans=0.09899494936611666 +2024-07-28 18:43:15,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=190680.0, ans=0.0 +2024-07-28 18:43:16,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=190680.0, ans=0.125 +2024-07-28 18:43:24,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=190693.33333333334, ans=0.2 +2024-07-28 18:43:26,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=190693.33333333334, ans=0.0 +2024-07-28 18:43:34,823 INFO [train.py:1114] (3/4) Epoch 14, batch 10150, loss[loss=0.2217, simple_loss=0.2962, pruned_loss=0.07361, over 3456.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2894, pruned_loss=0.05963, over 823452.43 frames. ], batch size: 36, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:43:34,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=190720.0, ans=0.0 +2024-07-28 18:43:41,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=190733.33333333334, ans=0.125 +2024-07-28 18:43:42,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=190733.33333333334, ans=0.2 +2024-07-28 18:44:02,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=190773.33333333334, ans=0.0 +2024-07-28 18:44:05,257 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=5.34 vs. limit=12.0 +2024-07-28 18:44:06,124 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.761e+01 6.519e+01 6.916e+01 7.399e+01 9.914e+01, threshold=1.383e+02, percent-clipped=0.0 +2024-07-28 18:44:06,158 INFO [train.py:1114] (3/4) Epoch 14, batch 10200, loss[loss=0.2383, simple_loss=0.3014, pruned_loss=0.08763, over 3208.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2921, pruned_loss=0.06305, over 790287.14 frames. ], batch size: 35, lr: 5.25e-03, grad_scale: 64.0 +2024-07-28 18:45:00,513 INFO [train.py:1114] (3/4) Epoch 15, batch 0, loss[loss=0.1325, simple_loss=0.2161, pruned_loss=0.0245, over 4855.00 frames. ], tot_loss[loss=0.1325, simple_loss=0.2161, pruned_loss=0.0245, over 4855.00 frames. ], batch size: 12, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:45:00,514 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 18:45:12,053 INFO [train.py:1146] (3/4) Epoch 15, validation: loss=0.1655, simple_loss=0.2703, pruned_loss=0.03031, over 944034.00 frames. +2024-07-28 18:45:12,054 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 18:45:12,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=190816.0, ans=0.2 +2024-07-28 18:45:13,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=190816.0, ans=0.025 +2024-07-28 18:45:17,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=190816.0, ans=0.125 +2024-07-28 18:45:25,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=190842.66666666666, ans=0.07 +2024-07-28 18:45:25,580 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.70 vs. limit=22.5 +2024-07-28 18:45:29,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=190842.66666666666, ans=0.125 +2024-07-28 18:45:44,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=190869.33333333334, ans=0.2 +2024-07-28 18:45:47,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=190882.66666666666, ans=0.09899494936611666 +2024-07-28 18:45:49,206 INFO [train.py:1114] (3/4) Epoch 15, batch 50, loss[loss=0.1619, simple_loss=0.2541, pruned_loss=0.03483, over 4625.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2747, pruned_loss=0.04698, over 207257.17 frames. ], batch size: 11, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:45:53,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=190882.66666666666, ans=0.0 +2024-07-28 18:46:01,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=190909.33333333334, ans=0.025 +2024-07-28 18:46:07,817 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.277e+01 5.683e+01 6.465e+01 7.180e+01 1.067e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-28 18:46:24,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=190922.66666666666, ans=0.125 +2024-07-28 18:46:33,570 INFO [train.py:1114] (3/4) Epoch 15, batch 100, loss[loss=0.1718, simple_loss=0.2556, pruned_loss=0.04397, over 4650.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2752, pruned_loss=0.04732, over 365789.30 frames. ], batch size: 12, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:46:35,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=190949.33333333334, ans=0.025 +2024-07-28 18:46:40,706 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.07 vs. limit=22.5 +2024-07-28 18:46:41,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=190962.66666666666, ans=0.2 +2024-07-28 18:46:59,611 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.08 vs. limit=22.5 +2024-07-28 18:47:07,598 INFO [train.py:1114] (3/4) Epoch 15, batch 150, loss[loss=0.1736, simple_loss=0.2614, pruned_loss=0.04296, over 4620.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2709, pruned_loss=0.04558, over 494254.57 frames. ], batch size: 11, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:47:09,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=191016.0, ans=0.0 +2024-07-28 18:47:25,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=191042.66666666666, ans=0.2 +2024-07-28 18:47:28,156 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.483e+01 5.436e+01 5.988e+01 6.579e+01 9.241e+01, threshold=1.198e+02, percent-clipped=0.0 +2024-07-28 18:47:28,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=191056.0, ans=0.2 +2024-07-28 18:47:42,919 INFO [train.py:1114] (3/4) Epoch 15, batch 200, loss[loss=0.1835, simple_loss=0.2615, pruned_loss=0.05276, over 4524.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2702, pruned_loss=0.0457, over 594112.36 frames. ], batch size: 21, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:47:44,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=191082.66666666666, ans=0.0 +2024-07-28 18:47:51,415 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.49 vs. limit=10.0 +2024-07-28 18:47:56,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191096.0, ans=0.1 +2024-07-28 18:48:01,633 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.30 vs. limit=6.0 +2024-07-28 18:48:16,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_na.min_abs, batch_count=191136.0, ans=0.02 +2024-07-28 18:48:17,637 INFO [train.py:1114] (3/4) Epoch 15, batch 250, loss[loss=0.2046, simple_loss=0.2972, pruned_loss=0.05599, over 4632.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2715, pruned_loss=0.04653, over 670752.99 frames. ], batch size: 16, lr: 5.07e-03, grad_scale: 64.0 +2024-07-28 18:48:35,743 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.62 vs. limit=15.0 +2024-07-28 18:48:36,566 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 5.893e+01 6.659e+01 7.264e+01 1.310e+02, threshold=1.332e+02, percent-clipped=1.0 +2024-07-28 18:48:38,842 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.29 vs. limit=15.0 +2024-07-28 18:48:41,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=191189.33333333334, ans=0.0 +2024-07-28 18:48:47,511 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.34 vs. limit=12.0 +2024-07-28 18:48:48,049 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 18:48:48,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=191202.66666666666, ans=0.125 +2024-07-28 18:48:51,302 INFO [train.py:1114] (3/4) Epoch 15, batch 300, loss[loss=0.2216, simple_loss=0.3143, pruned_loss=0.06446, over 4805.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.271, pruned_loss=0.04628, over 730290.31 frames. ], batch size: 15, lr: 5.06e-03, grad_scale: 64.0 +2024-07-28 18:48:53,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191216.0, ans=0.1 +2024-07-28 18:48:56,305 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=5.98 vs. limit=15.0 +2024-07-28 18:49:07,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=191242.66666666666, ans=0.125 +2024-07-28 18:49:14,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=191256.0, ans=0.5 +2024-07-28 18:49:22,130 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.99 vs. limit=15.0 +2024-07-28 18:49:23,740 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.57 vs. limit=10.0 +2024-07-28 18:49:26,579 INFO [train.py:1114] (3/4) Epoch 15, batch 350, loss[loss=0.1584, simple_loss=0.2357, pruned_loss=0.04056, over 4928.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.271, pruned_loss=0.04603, over 776286.03 frames. ], batch size: 12, lr: 5.06e-03, grad_scale: 64.0 +2024-07-28 18:49:44,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191296.0, ans=0.1 +2024-07-28 18:49:45,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=191309.33333333334, ans=0.125 +2024-07-28 18:49:47,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191309.33333333334, ans=0.1 +2024-07-28 18:49:56,401 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+01 5.550e+01 6.008e+01 7.215e+01 1.087e+02, threshold=1.202e+02, percent-clipped=0.0 +2024-07-28 18:50:06,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=191336.0, ans=0.125 +2024-07-28 18:50:10,472 INFO [train.py:1114] (3/4) Epoch 15, batch 400, loss[loss=0.1949, simple_loss=0.2798, pruned_loss=0.05499, over 4689.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2709, pruned_loss=0.04579, over 813567.36 frames. ], batch size: 13, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:50:12,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=191349.33333333334, ans=0.05 +2024-07-28 18:50:16,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=191362.66666666666, ans=0.07 +2024-07-28 18:50:44,705 INFO [train.py:1114] (3/4) Epoch 15, batch 450, loss[loss=0.1934, simple_loss=0.2895, pruned_loss=0.04864, over 4638.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2725, pruned_loss=0.04664, over 838712.92 frames. ], batch size: 13, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:50:49,120 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.23 vs. limit=15.0 +2024-07-28 18:50:51,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=191429.33333333334, ans=15.0 +2024-07-28 18:50:55,248 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.37 vs. limit=22.5 +2024-07-28 18:51:03,916 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.382e+01 5.595e+01 6.045e+01 6.958e+01 9.344e+01, threshold=1.209e+02, percent-clipped=0.0 +2024-07-28 18:51:13,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=191469.33333333334, ans=0.125 +2024-07-28 18:51:18,076 INFO [train.py:1114] (3/4) Epoch 15, batch 500, loss[loss=0.2099, simple_loss=0.3131, pruned_loss=0.0533, over 4669.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2709, pruned_loss=0.04565, over 861110.53 frames. ], batch size: 15, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:51:26,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=191496.0, ans=0.125 +2024-07-28 18:51:29,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191496.0, ans=0.1 +2024-07-28 18:51:32,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=191496.0, ans=0.125 +2024-07-28 18:51:33,245 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.61 vs. limit=15.0 +2024-07-28 18:51:34,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191509.33333333334, ans=0.1 +2024-07-28 18:51:35,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=191509.33333333334, ans=0.125 +2024-07-28 18:51:43,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=191522.66666666666, ans=0.0 +2024-07-28 18:51:45,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff3.min_abs, batch_count=191522.66666666666, ans=0.2 +2024-07-28 18:51:46,960 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.24 vs. limit=22.5 +2024-07-28 18:51:47,573 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-28 18:51:49,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=191536.0, ans=0.2 +2024-07-28 18:51:54,346 INFO [train.py:1114] (3/4) Epoch 15, batch 550, loss[loss=0.2063, simple_loss=0.3072, pruned_loss=0.05268, over 4622.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2707, pruned_loss=0.04548, over 877111.58 frames. ], batch size: 17, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:51:56,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=191549.33333333334, ans=0.125 +2024-07-28 18:51:56,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=191549.33333333334, ans=0.025 +2024-07-28 18:52:13,587 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 5.498e+01 5.942e+01 6.485e+01 9.965e+01, threshold=1.188e+02, percent-clipped=0.0 +2024-07-28 18:52:14,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=191589.33333333334, ans=0.0 +2024-07-28 18:52:18,540 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.22 vs. limit=6.0 +2024-07-28 18:52:27,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=191616.0, ans=0.125 +2024-07-28 18:52:27,550 INFO [train.py:1114] (3/4) Epoch 15, batch 600, loss[loss=0.1813, simple_loss=0.2773, pruned_loss=0.04267, over 4634.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.271, pruned_loss=0.04525, over 891898.16 frames. ], batch size: 16, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:52:30,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=191616.0, ans=0.025 +2024-07-28 18:52:37,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=191629.33333333334, ans=0.0 +2024-07-28 18:52:39,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=191629.33333333334, ans=0.125 +2024-07-28 18:52:40,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191642.66666666666, ans=0.1 +2024-07-28 18:52:41,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=191642.66666666666, ans=0.0 +2024-07-28 18:52:42,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=191642.66666666666, ans=0.0 +2024-07-28 18:52:50,701 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.40 vs. limit=15.0 +2024-07-28 18:52:51,891 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.67 vs. limit=15.0 +2024-07-28 18:53:03,041 INFO [train.py:1114] (3/4) Epoch 15, batch 650, loss[loss=0.1938, simple_loss=0.2783, pruned_loss=0.05465, over 4763.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2695, pruned_loss=0.0448, over 903662.01 frames. ], batch size: 13, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:53:17,234 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=191709.33333333334, ans=0.125 +2024-07-28 18:53:20,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=191709.33333333334, ans=0.125 +2024-07-28 18:53:21,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=191709.33333333334, ans=0.125 +2024-07-28 18:53:22,452 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.353e+01 5.431e+01 6.039e+01 6.829e+01 9.137e+01, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 18:53:37,683 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.30 vs. limit=6.0 +2024-07-28 18:53:38,719 INFO [train.py:1114] (3/4) Epoch 15, batch 700, loss[loss=0.1844, simple_loss=0.2736, pruned_loss=0.04765, over 4646.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2699, pruned_loss=0.04504, over 911546.86 frames. ], batch size: 12, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:53:42,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=191749.33333333334, ans=0.125 +2024-07-28 18:53:45,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=191762.66666666666, ans=0.125 +2024-07-28 18:53:45,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=191762.66666666666, ans=0.125 +2024-07-28 18:53:48,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=191762.66666666666, ans=0.0 +2024-07-28 18:53:54,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191776.0, ans=0.1 +2024-07-28 18:53:55,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=191776.0, ans=0.5 +2024-07-28 18:53:56,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=191776.0, ans=0.2 +2024-07-28 18:53:58,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=191789.33333333334, ans=0.0 +2024-07-28 18:54:02,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=191789.33333333334, ans=0.125 +2024-07-28 18:54:05,747 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.34 vs. limit=15.0 +2024-07-28 18:54:10,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=191802.66666666666, ans=0.0 +2024-07-28 18:54:11,821 INFO [train.py:1114] (3/4) Epoch 15, batch 750, loss[loss=0.1853, simple_loss=0.2701, pruned_loss=0.05019, over 4693.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2698, pruned_loss=0.04482, over 917855.96 frames. ], batch size: 13, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:54:19,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=191829.33333333334, ans=0.1 +2024-07-28 18:54:22,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=191829.33333333334, ans=0.0 +2024-07-28 18:54:29,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=191842.66666666666, ans=0.1 +2024-07-28 18:54:31,054 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.222e+01 5.661e+01 6.305e+01 7.556e+01 1.211e+02, threshold=1.261e+02, percent-clipped=1.0 +2024-07-28 18:54:48,290 INFO [train.py:1114] (3/4) Epoch 15, batch 800, loss[loss=0.1632, simple_loss=0.2443, pruned_loss=0.04106, over 4836.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2692, pruned_loss=0.04485, over 922806.75 frames. ], batch size: 12, lr: 5.06e-03, grad_scale: 32.0 +2024-07-28 18:54:48,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=191882.66666666666, ans=0.0 +2024-07-28 18:54:51,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=191882.66666666666, ans=0.1 +2024-07-28 18:55:23,937 INFO [train.py:1114] (3/4) Epoch 15, batch 850, loss[loss=0.1653, simple_loss=0.2565, pruned_loss=0.03705, over 4656.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2692, pruned_loss=0.04502, over 927375.37 frames. ], batch size: 14, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 18:55:39,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=191976.0, ans=0.0 +2024-07-28 18:55:41,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=191976.0, ans=0.0 +2024-07-28 18:55:43,398 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.257e+01 5.494e+01 6.079e+01 6.525e+01 1.058e+02, threshold=1.216e+02, percent-clipped=0.0 +2024-07-28 19:01:39,934 INFO [train.py:1114] (3/4) Epoch 15, batch 900, loss[loss=0.1661, simple_loss=0.2626, pruned_loss=0.03482, over 4860.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2709, pruned_loss=0.04606, over 928158.96 frames. ], batch size: 12, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:01:40,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=192016.0, ans=0.0 +2024-07-28 19:02:08,176 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.44 vs. limit=15.0 +2024-07-28 19:02:10,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=192069.33333333334, ans=0.05 +2024-07-28 19:02:13,186 INFO [train.py:1114] (3/4) Epoch 15, batch 950, loss[loss=0.2028, simple_loss=0.2823, pruned_loss=0.06168, over 4775.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.271, pruned_loss=0.04606, over 930106.96 frames. ], batch size: 12, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:02:15,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=192082.66666666666, ans=0.125 +2024-07-28 19:02:20,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=192096.0, ans=0.125 +2024-07-28 19:02:34,367 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.707e+01 5.740e+01 6.341e+01 7.414e+01 2.683e+02, threshold=1.268e+02, percent-clipped=1.0 +2024-07-28 19:02:41,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=192136.0, ans=0.025 +2024-07-28 19:02:46,196 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.70 vs. limit=12.0 +2024-07-28 19:02:48,489 INFO [train.py:1114] (3/4) Epoch 15, batch 1000, loss[loss=0.1908, simple_loss=0.2821, pruned_loss=0.04975, over 4960.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2716, pruned_loss=0.0463, over 930138.20 frames. ], batch size: 13, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:02:50,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=192149.33333333334, ans=0.125 +2024-07-28 19:03:00,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=192162.66666666666, ans=0.5 +2024-07-28 19:03:07,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=192176.0, ans=0.125 +2024-07-28 19:03:08,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=192189.33333333334, ans=0.125 +2024-07-28 19:03:09,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=192189.33333333334, ans=0.125 +2024-07-28 19:03:22,060 INFO [train.py:1114] (3/4) Epoch 15, batch 1050, loss[loss=0.1862, simple_loss=0.2809, pruned_loss=0.04576, over 4870.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2706, pruned_loss=0.04596, over 932512.83 frames. ], batch size: 14, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:03:41,201 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.689e+01 5.436e+01 6.028e+01 6.736e+01 8.653e+01, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 19:03:50,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192269.33333333334, ans=0.1 +2024-07-28 19:03:52,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=192269.33333333334, ans=10.0 +2024-07-28 19:03:55,267 INFO [train.py:1114] (3/4) Epoch 15, batch 1100, loss[loss=0.1508, simple_loss=0.2428, pruned_loss=0.02939, over 4907.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2707, pruned_loss=0.04606, over 934877.97 frames. ], batch size: 13, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:04:03,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=192296.0, ans=0.2 +2024-07-28 19:04:05,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=192296.0, ans=0.125 +2024-07-28 19:04:14,364 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=192309.33333333334, ans=0.0 +2024-07-28 19:04:30,214 INFO [train.py:1114] (3/4) Epoch 15, batch 1150, loss[loss=0.1587, simple_loss=0.2513, pruned_loss=0.03306, over 4908.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2711, pruned_loss=0.04626, over 934367.64 frames. ], batch size: 13, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:04:32,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=192349.33333333334, ans=0.0 +2024-07-28 19:04:34,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=192349.33333333334, ans=0.2 +2024-07-28 19:04:36,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=192362.66666666666, ans=0.0 +2024-07-28 19:04:37,358 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.78 vs. limit=5.0 +2024-07-28 19:04:39,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=192362.66666666666, ans=0.0 +2024-07-28 19:04:42,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=192362.66666666666, ans=10.0 +2024-07-28 19:04:51,446 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.762e+01 5.625e+01 6.208e+01 7.192e+01 1.002e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 19:05:05,558 INFO [train.py:1114] (3/4) Epoch 15, batch 1200, loss[loss=0.1934, simple_loss=0.2854, pruned_loss=0.05072, over 4877.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2711, pruned_loss=0.04607, over 933542.03 frames. ], batch size: 14, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:05:12,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=192429.33333333334, ans=0.125 +2024-07-28 19:05:12,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.07 vs. limit=22.5 +2024-07-28 19:05:13,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=192429.33333333334, ans=0.09899494936611666 +2024-07-28 19:05:28,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=192456.0, ans=0.2 +2024-07-28 19:05:30,712 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.30 vs. limit=15.0 +2024-07-28 19:05:32,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=192469.33333333334, ans=10.0 +2024-07-28 19:05:38,623 INFO [train.py:1114] (3/4) Epoch 15, batch 1250, loss[loss=0.2303, simple_loss=0.312, pruned_loss=0.07431, over 4807.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2707, pruned_loss=0.04561, over 938000.81 frames. ], batch size: 15, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:05:42,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=192482.66666666666, ans=0.2 +2024-07-28 19:05:46,426 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=7.59 vs. limit=12.0 +2024-07-28 19:05:54,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192509.33333333334, ans=0.1 +2024-07-28 19:05:55,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=192509.33333333334, ans=0.125 +2024-07-28 19:05:57,774 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.394e+01 5.598e+01 6.184e+01 7.240e+01 1.147e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 19:05:59,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=192522.66666666666, ans=0.1 +2024-07-28 19:06:02,806 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.82 vs. limit=15.0 +2024-07-28 19:06:14,164 INFO [train.py:1114] (3/4) Epoch 15, batch 1300, loss[loss=0.2421, simple_loss=0.3265, pruned_loss=0.07889, over 4703.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2705, pruned_loss=0.04549, over 939356.24 frames. ], batch size: 19, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:06:17,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=192549.33333333334, ans=0.05 +2024-07-28 19:06:17,825 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.74 vs. limit=10.0 +2024-07-28 19:06:21,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=192562.66666666666, ans=0.0 +2024-07-28 19:06:22,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=192562.66666666666, ans=0.1 +2024-07-28 19:06:27,858 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.46 vs. limit=12.0 +2024-07-28 19:06:42,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=192602.66666666666, ans=0.125 +2024-07-28 19:06:44,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=192602.66666666666, ans=0.0 +2024-07-28 19:06:47,775 INFO [train.py:1114] (3/4) Epoch 15, batch 1350, loss[loss=0.2002, simple_loss=0.2789, pruned_loss=0.06075, over 4763.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2705, pruned_loss=0.04567, over 941400.44 frames. ], batch size: 13, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:06:51,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=192616.0, ans=0.125 +2024-07-28 19:06:54,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=192629.33333333334, ans=0.04949747468305833 +2024-07-28 19:06:57,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=192629.33333333334, ans=0.125 +2024-07-28 19:07:02,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=192642.66666666666, ans=0.0 +2024-07-28 19:07:02,739 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:07:04,146 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.76 vs. limit=15.0 +2024-07-28 19:07:07,203 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.665e+01 5.559e+01 6.171e+01 7.538e+01 1.379e+02, threshold=1.234e+02, percent-clipped=1.0 +2024-07-28 19:07:10,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=192656.0, ans=0.125 +2024-07-28 19:07:13,004 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.07 vs. limit=12.0 +2024-07-28 19:07:21,209 INFO [train.py:1114] (3/4) Epoch 15, batch 1400, loss[loss=0.1497, simple_loss=0.2279, pruned_loss=0.03569, over 4691.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2699, pruned_loss=0.04552, over 942992.73 frames. ], batch size: 11, lr: 5.05e-03, grad_scale: 32.0 +2024-07-28 19:07:31,438 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.39 vs. limit=15.0 +2024-07-28 19:07:32,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=192696.0, ans=0.0 +2024-07-28 19:07:32,321 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=11.42 vs. limit=15.0 +2024-07-28 19:07:39,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=192709.33333333334, ans=0.07 +2024-07-28 19:07:41,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=192722.66666666666, ans=0.0 +2024-07-28 19:07:47,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.whiten.whitening_limit, batch_count=192736.0, ans=12.0 +2024-07-28 19:07:49,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=192736.0, ans=0.125 +2024-07-28 19:07:56,458 INFO [train.py:1114] (3/4) Epoch 15, batch 1450, loss[loss=0.1945, simple_loss=0.2956, pruned_loss=0.04672, over 4687.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2706, pruned_loss=0.04539, over 942795.01 frames. ], batch size: 15, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:07:57,844 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=192749.33333333334, ans=0.0 +2024-07-28 19:08:07,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=192762.66666666666, ans=0.125 +2024-07-28 19:08:10,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=192776.0, ans=0.2 +2024-07-28 19:08:15,576 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.624e+01 5.633e+01 5.995e+01 6.598e+01 8.860e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 19:08:23,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192802.66666666666, ans=0.1 +2024-07-28 19:08:26,189 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.75 vs. limit=15.0 +2024-07-28 19:08:29,691 INFO [train.py:1114] (3/4) Epoch 15, batch 1500, loss[loss=0.1994, simple_loss=0.3024, pruned_loss=0.04823, over 4802.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2706, pruned_loss=0.04526, over 942522.24 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:08:33,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=192816.0, ans=0.0 +2024-07-28 19:08:46,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=192842.66666666666, ans=10.0 +2024-07-28 19:09:03,682 INFO [train.py:1114] (3/4) Epoch 15, batch 1550, loss[loss=0.1737, simple_loss=0.2684, pruned_loss=0.03955, over 4905.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2714, pruned_loss=0.04583, over 938730.54 frames. ], batch size: 15, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:09:05,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=192882.66666666666, ans=0.0 +2024-07-28 19:09:16,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=192909.33333333334, ans=0.125 +2024-07-28 19:09:23,057 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.711e+01 6.284e+01 7.073e+01 1.043e+02, threshold=1.257e+02, percent-clipped=0.0 +2024-07-28 19:09:27,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=192922.66666666666, ans=0.025 +2024-07-28 19:09:28,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=192922.66666666666, ans=0.0 +2024-07-28 19:09:28,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=192922.66666666666, ans=0.0 +2024-07-28 19:09:39,405 INFO [train.py:1114] (3/4) Epoch 15, batch 1600, loss[loss=0.1598, simple_loss=0.2585, pruned_loss=0.03055, over 4876.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2715, pruned_loss=0.0462, over 937137.01 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:09:40,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=192949.33333333334, ans=0.1 +2024-07-28 19:09:59,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=192989.33333333334, ans=0.125 +2024-07-28 19:10:14,699 INFO [train.py:1114] (3/4) Epoch 15, batch 1650, loss[loss=0.1889, simple_loss=0.2903, pruned_loss=0.04375, over 4657.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2708, pruned_loss=0.04622, over 937174.12 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:10:26,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=193029.33333333334, ans=0.04949747468305833 +2024-07-28 19:10:28,531 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=5.41 vs. limit=5.0 +2024-07-28 19:10:30,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=193042.66666666666, ans=0.0 +2024-07-28 19:10:30,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=193042.66666666666, ans=0.125 +2024-07-28 19:10:33,849 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.043e+01 5.642e+01 6.032e+01 7.016e+01 1.079e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 19:10:36,959 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.53 vs. limit=10.0 +2024-07-28 19:10:42,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=193069.33333333334, ans=0.125 +2024-07-28 19:10:47,878 INFO [train.py:1114] (3/4) Epoch 15, batch 1700, loss[loss=0.1732, simple_loss=0.2605, pruned_loss=0.04298, over 4706.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2706, pruned_loss=0.04582, over 938786.37 frames. ], batch size: 11, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:11:04,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=193109.33333333334, ans=0.025 +2024-07-28 19:11:08,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=193122.66666666666, ans=0.125 +2024-07-28 19:11:21,434 INFO [train.py:1114] (3/4) Epoch 15, batch 1750, loss[loss=0.1828, simple_loss=0.2551, pruned_loss=0.05531, over 4808.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2703, pruned_loss=0.04578, over 939828.48 frames. ], batch size: 11, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:11:24,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=193149.33333333334, ans=0.0 +2024-07-28 19:11:28,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=193162.66666666666, ans=0.0 +2024-07-28 19:11:28,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=193162.66666666666, ans=0.0 +2024-07-28 19:11:42,913 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.964e+01 5.710e+01 6.387e+01 7.487e+01 1.072e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-28 19:11:52,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=193202.66666666666, ans=15.0 +2024-07-28 19:11:52,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=193202.66666666666, ans=0.125 +2024-07-28 19:11:56,803 INFO [train.py:1114] (3/4) Epoch 15, batch 1800, loss[loss=0.1871, simple_loss=0.2773, pruned_loss=0.04845, over 4631.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2699, pruned_loss=0.04562, over 940310.07 frames. ], batch size: 13, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:11:58,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=193216.0, ans=0.0 +2024-07-28 19:11:58,525 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.61 vs. limit=22.5 +2024-07-28 19:12:03,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=193229.33333333334, ans=0.0 +2024-07-28 19:12:11,698 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.10 vs. limit=15.0 +2024-07-28 19:12:13,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=193242.66666666666, ans=0.125 +2024-07-28 19:12:19,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=193256.0, ans=0.1 +2024-07-28 19:12:30,409 INFO [train.py:1114] (3/4) Epoch 15, batch 1850, loss[loss=0.1818, simple_loss=0.2674, pruned_loss=0.04807, over 4823.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2694, pruned_loss=0.04549, over 940638.70 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:12:32,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.60 vs. limit=12.0 +2024-07-28 19:12:35,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=193282.66666666666, ans=0.125 +2024-07-28 19:12:49,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=193309.33333333334, ans=0.1 +2024-07-28 19:12:50,210 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.636e+01 5.579e+01 6.374e+01 7.062e+01 1.422e+02, threshold=1.275e+02, percent-clipped=2.0 +2024-07-28 19:13:22,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=193336.0, ans=0.035 +2024-07-28 19:13:24,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=193336.0, ans=0.0 +2024-07-28 19:13:26,483 INFO [train.py:1114] (3/4) Epoch 15, batch 1900, loss[loss=0.1974, simple_loss=0.2826, pruned_loss=0.05606, over 4683.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2706, pruned_loss=0.04627, over 942108.21 frames. ], batch size: 14, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:13:45,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=193376.0, ans=0.2 +2024-07-28 19:13:48,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=193389.33333333334, ans=10.0 +2024-07-28 19:13:55,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=193402.66666666666, ans=0.125 +2024-07-28 19:14:01,209 INFO [train.py:1114] (3/4) Epoch 15, batch 1950, loss[loss=0.1424, simple_loss=0.237, pruned_loss=0.02389, over 4895.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2716, pruned_loss=0.04612, over 943924.81 frames. ], batch size: 13, lr: 5.04e-03, grad_scale: 32.0 +2024-07-28 19:14:18,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=193442.66666666666, ans=0.0 +2024-07-28 19:14:20,683 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.328e+01 5.675e+01 6.115e+01 6.828e+01 9.814e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 19:14:20,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=193456.0, ans=0.125 +2024-07-28 19:14:29,117 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.20 vs. limit=15.0 +2024-07-28 19:14:34,641 INFO [train.py:1114] (3/4) Epoch 15, batch 2000, loss[loss=0.1549, simple_loss=0.2475, pruned_loss=0.03118, over 4797.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2719, pruned_loss=0.04616, over 941385.04 frames. ], batch size: 11, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:14:41,928 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.17 vs. limit=6.0 +2024-07-28 19:14:43,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=193496.0, ans=0.1 +2024-07-28 19:14:52,483 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:15:08,817 INFO [train.py:1114] (3/4) Epoch 15, batch 2050, loss[loss=0.1465, simple_loss=0.2291, pruned_loss=0.03193, over 4609.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2709, pruned_loss=0.04593, over 939646.96 frames. ], batch size: 11, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:15:11,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=193549.33333333334, ans=0.125 +2024-07-28 19:15:21,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=193562.66666666666, ans=0.125 +2024-07-28 19:15:26,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=193576.0, ans=0.125 +2024-07-28 19:15:31,316 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.698e+01 6.282e+01 7.137e+01 1.040e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 19:15:36,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=193589.33333333334, ans=0.0 +2024-07-28 19:15:40,319 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.31 vs. limit=22.5 +2024-07-28 19:15:44,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=193602.66666666666, ans=0.1 +2024-07-28 19:15:45,949 INFO [train.py:1114] (3/4) Epoch 15, batch 2100, loss[loss=0.1516, simple_loss=0.2437, pruned_loss=0.02972, over 4758.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2695, pruned_loss=0.04514, over 941138.58 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:15:46,460 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.22 vs. limit=15.0 +2024-07-28 19:15:56,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=193629.33333333334, ans=0.125 +2024-07-28 19:16:03,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=193642.66666666666, ans=0.0 +2024-07-28 19:16:05,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=193642.66666666666, ans=0.0 +2024-07-28 19:16:07,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=193656.0, ans=0.125 +2024-07-28 19:16:09,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=193656.0, ans=0.0 +2024-07-28 19:16:18,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=193669.33333333334, ans=0.0 +2024-07-28 19:16:20,812 INFO [train.py:1114] (3/4) Epoch 15, batch 2150, loss[loss=0.1879, simple_loss=0.289, pruned_loss=0.04341, over 4884.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2694, pruned_loss=0.04534, over 944456.82 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:16:26,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=193682.66666666666, ans=0.0 +2024-07-28 19:16:30,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=193696.0, ans=0.2 +2024-07-28 19:16:32,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=193696.0, ans=0.1 +2024-07-28 19:16:35,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=193709.33333333334, ans=0.0 +2024-07-28 19:16:37,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=193709.33333333334, ans=0.125 +2024-07-28 19:16:39,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=193709.33333333334, ans=0.0 +2024-07-28 19:16:40,103 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.631e+01 5.589e+01 6.207e+01 7.234e+01 9.865e+01, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 19:16:40,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=193722.66666666666, ans=0.125 +2024-07-28 19:16:45,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=193722.66666666666, ans=0.125 +2024-07-28 19:16:54,162 INFO [train.py:1114] (3/4) Epoch 15, batch 2200, loss[loss=0.1916, simple_loss=0.2908, pruned_loss=0.04626, over 4800.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2696, pruned_loss=0.04548, over 943223.48 frames. ], batch size: 14, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:16:54,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=193749.33333333334, ans=0.1 +2024-07-28 19:16:58,933 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:17:01,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=193762.66666666666, ans=0.0 +2024-07-28 19:17:04,365 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.36 vs. limit=15.0 +2024-07-28 19:17:04,452 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.52 vs. limit=15.0 +2024-07-28 19:17:06,975 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.08 vs. limit=22.5 +2024-07-28 19:17:07,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=193776.0, ans=0.1 +2024-07-28 19:17:33,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=193802.66666666666, ans=0.125 +2024-07-28 19:17:34,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=193802.66666666666, ans=0.125 +2024-07-28 19:17:43,138 INFO [train.py:1114] (3/4) Epoch 15, batch 2250, loss[loss=0.1918, simple_loss=0.2857, pruned_loss=0.04897, over 4692.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2704, pruned_loss=0.04584, over 941854.31 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:17:54,793 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:17:59,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=193842.66666666666, ans=0.025 +2024-07-28 19:18:02,476 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.434e+01 5.887e+01 6.714e+01 1.189e+02, threshold=1.177e+02, percent-clipped=0.0 +2024-07-28 19:18:10,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=193869.33333333334, ans=0.0 +2024-07-28 19:18:13,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=193869.33333333334, ans=0.125 +2024-07-28 19:18:16,464 INFO [train.py:1114] (3/4) Epoch 15, batch 2300, loss[loss=0.1723, simple_loss=0.2654, pruned_loss=0.0396, over 4931.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2689, pruned_loss=0.04552, over 939850.51 frames. ], batch size: 12, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:18:22,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=193896.0, ans=0.125 +2024-07-28 19:18:23,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=193896.0, ans=0.125 +2024-07-28 19:18:25,356 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.06 vs. limit=22.5 +2024-07-28 19:18:30,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=193909.33333333334, ans=0.0 +2024-07-28 19:18:31,111 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.33 vs. limit=12.0 +2024-07-28 19:18:33,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=193909.33333333334, ans=0.05 +2024-07-28 19:18:37,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=193922.66666666666, ans=0.0 +2024-07-28 19:18:49,536 INFO [train.py:1114] (3/4) Epoch 15, batch 2350, loss[loss=0.1834, simple_loss=0.2789, pruned_loss=0.04395, over 4636.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2685, pruned_loss=0.04527, over 941499.47 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:19:04,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=193962.66666666666, ans=0.0 +2024-07-28 19:19:11,718 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=193976.0, ans=0.1 +2024-07-28 19:19:13,027 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.332e+01 5.729e+01 6.304e+01 7.186e+01 9.939e+01, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 19:19:19,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=193989.33333333334, ans=0.125 +2024-07-28 19:19:27,913 INFO [train.py:1114] (3/4) Epoch 15, batch 2400, loss[loss=0.1727, simple_loss=0.2544, pruned_loss=0.0455, over 4643.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2695, pruned_loss=0.04554, over 941226.27 frames. ], batch size: 12, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:19:32,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=194016.0, ans=0.0 +2024-07-28 19:19:38,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=194029.33333333334, ans=0.125 +2024-07-28 19:19:55,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=194069.33333333334, ans=0.125 +2024-07-28 19:19:55,593 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.42 vs. limit=15.0 +2024-07-28 19:20:01,300 INFO [train.py:1114] (3/4) Epoch 15, batch 2450, loss[loss=0.1847, simple_loss=0.2748, pruned_loss=0.04726, over 4693.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2716, pruned_loss=0.04659, over 936958.97 frames. ], batch size: 13, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:20:04,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=194082.66666666666, ans=0.2 +2024-07-28 19:20:21,700 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.395e+01 5.722e+01 6.164e+01 6.844e+01 9.609e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 19:20:26,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=194122.66666666666, ans=0.2 +2024-07-28 19:20:33,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=194136.0, ans=0.0 +2024-07-28 19:20:34,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=194149.33333333334, ans=0.0 +2024-07-28 19:20:35,245 INFO [train.py:1114] (3/4) Epoch 15, batch 2500, loss[loss=0.1589, simple_loss=0.2525, pruned_loss=0.03263, over 4812.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.271, pruned_loss=0.04639, over 939092.51 frames. ], batch size: 14, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:20:41,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194162.66666666666, ans=0.1 +2024-07-28 19:20:47,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=194162.66666666666, ans=0.125 +2024-07-28 19:21:00,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=194202.66666666666, ans=0.0 +2024-07-28 19:21:03,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=194202.66666666666, ans=0.125 +2024-07-28 19:21:04,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=194202.66666666666, ans=0.0 +2024-07-28 19:21:05,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=194202.66666666666, ans=0.025 +2024-07-28 19:21:08,114 INFO [train.py:1114] (3/4) Epoch 15, batch 2550, loss[loss=0.1735, simple_loss=0.2618, pruned_loss=0.04261, over 4802.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.271, pruned_loss=0.04617, over 938770.35 frames. ], batch size: 11, lr: 5.03e-03, grad_scale: 32.0 +2024-07-28 19:21:08,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=194216.0, ans=0.125 +2024-07-28 19:21:09,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=194216.0, ans=0.125 +2024-07-28 19:21:15,122 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.98 vs. limit=22.5 +2024-07-28 19:21:18,917 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=194229.33333333334, ans=0.0 +2024-07-28 19:21:20,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=194229.33333333334, ans=0.2 +2024-07-28 19:21:24,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=194242.66666666666, ans=0.125 +2024-07-28 19:21:24,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=194242.66666666666, ans=0.025 +2024-07-28 19:21:28,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=194242.66666666666, ans=0.0 +2024-07-28 19:21:29,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194242.66666666666, ans=0.1 +2024-07-28 19:21:30,730 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.237e+01 5.497e+01 6.084e+01 7.068e+01 9.259e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-28 19:21:30,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=194256.0, ans=0.2 +2024-07-28 19:21:40,701 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.51 vs. limit=15.0 +2024-07-28 19:21:45,671 INFO [train.py:1114] (3/4) Epoch 15, batch 2600, loss[loss=0.1844, simple_loss=0.2673, pruned_loss=0.0507, over 4901.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2717, pruned_loss=0.04669, over 937945.07 frames. ], batch size: 13, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:21:49,395 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.01 vs. limit=12.0 +2024-07-28 19:22:19,325 INFO [train.py:1114] (3/4) Epoch 15, batch 2650, loss[loss=0.2137, simple_loss=0.3092, pruned_loss=0.05907, over 4633.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.272, pruned_loss=0.04647, over 940210.19 frames. ], batch size: 16, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:22:21,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=194349.33333333334, ans=0.125 +2024-07-28 19:22:21,668 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.80 vs. limit=15.0 +2024-07-28 19:22:28,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=194362.66666666666, ans=0.0 +2024-07-28 19:22:28,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=194362.66666666666, ans=0.1 +2024-07-28 19:22:30,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=194362.66666666666, ans=0.95 +2024-07-28 19:22:40,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=194376.0, ans=0.125 +2024-07-28 19:22:43,426 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.390e+01 5.437e+01 6.145e+01 6.904e+01 9.658e+01, threshold=1.229e+02, percent-clipped=0.0 +2024-07-28 19:22:50,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=194389.33333333334, ans=0.125 +2024-07-28 19:22:58,688 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:23:00,533 INFO [train.py:1114] (3/4) Epoch 15, batch 2700, loss[loss=0.1825, simple_loss=0.2686, pruned_loss=0.04818, over 4745.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2719, pruned_loss=0.04658, over 939861.32 frames. ], batch size: 14, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:23:12,255 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.50 vs. limit=10.0 +2024-07-28 19:23:22,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=194456.0, ans=0.0 +2024-07-28 19:23:22,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=194456.0, ans=0.125 +2024-07-28 19:23:27,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=194456.0, ans=0.0 +2024-07-28 19:23:35,796 INFO [train.py:1114] (3/4) Epoch 15, batch 2750, loss[loss=0.2037, simple_loss=0.2868, pruned_loss=0.0603, over 4712.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.271, pruned_loss=0.04674, over 940057.39 frames. ], batch size: 12, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:23:35,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=194482.66666666666, ans=0.0 +2024-07-28 19:23:41,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=194482.66666666666, ans=0.125 +2024-07-28 19:23:56,140 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.670e+01 5.701e+01 6.363e+01 7.329e+01 1.129e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-28 19:24:05,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=194536.0, ans=0.0 +2024-07-28 19:24:10,232 INFO [train.py:1114] (3/4) Epoch 15, batch 2800, loss[loss=0.242, simple_loss=0.3156, pruned_loss=0.08416, over 3517.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2712, pruned_loss=0.04643, over 938067.16 frames. ], batch size: 35, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:24:10,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194549.33333333334, ans=0.1 +2024-07-28 19:24:15,555 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.42 vs. limit=15.0 +2024-07-28 19:24:21,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=194562.66666666666, ans=0.125 +2024-07-28 19:24:22,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=194562.66666666666, ans=0.025 +2024-07-28 19:24:31,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=194589.33333333334, ans=0.0 +2024-07-28 19:24:43,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194602.66666666666, ans=0.1 +2024-07-28 19:24:44,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.65 vs. limit=22.5 +2024-07-28 19:24:46,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194602.66666666666, ans=0.1 +2024-07-28 19:24:47,354 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.84 vs. limit=15.0 +2024-07-28 19:24:48,363 INFO [train.py:1114] (3/4) Epoch 15, batch 2850, loss[loss=0.2022, simple_loss=0.2932, pruned_loss=0.05558, over 4965.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2721, pruned_loss=0.04681, over 935906.11 frames. ], batch size: 13, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:24:55,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=194616.0, ans=0.125 +2024-07-28 19:24:57,178 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.63 vs. limit=12.0 +2024-07-28 19:24:59,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=194629.33333333334, ans=0.1 +2024-07-28 19:25:09,892 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.207e+01 5.952e+01 6.499e+01 7.318e+01 1.007e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-28 19:25:23,265 INFO [train.py:1114] (3/4) Epoch 15, batch 2900, loss[loss=0.1799, simple_loss=0.2799, pruned_loss=0.03995, over 4831.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2731, pruned_loss=0.0466, over 939769.74 frames. ], batch size: 13, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:25:28,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=194682.66666666666, ans=0.0 +2024-07-28 19:25:39,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=194709.33333333334, ans=0.0 +2024-07-28 19:25:43,652 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:25:43,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=194722.66666666666, ans=0.07 +2024-07-28 19:25:57,087 INFO [train.py:1114] (3/4) Epoch 15, batch 2950, loss[loss=0.171, simple_loss=0.2593, pruned_loss=0.04139, over 4713.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2712, pruned_loss=0.0457, over 938943.48 frames. ], batch size: 12, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:26:17,152 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.233e+01 5.428e+01 5.966e+01 6.720e+01 8.904e+01, threshold=1.193e+02, percent-clipped=0.0 +2024-07-28 19:26:17,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=194789.33333333334, ans=0.2 +2024-07-28 19:26:18,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=194789.33333333334, ans=0.2 +2024-07-28 19:27:08,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=194802.66666666666, ans=10.0 +2024-07-28 19:27:08,513 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=21.04 vs. limit=22.5 +2024-07-28 19:27:15,594 INFO [train.py:1114] (3/4) Epoch 15, batch 3000, loss[loss=0.142, simple_loss=0.2274, pruned_loss=0.02831, over 4761.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2701, pruned_loss=0.04538, over 938321.04 frames. ], batch size: 13, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:27:15,594 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 19:27:22,047 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([4.0872, 3.5856, 4.4885, 3.9693, 4.5664, 4.3448, 3.6359, 3.7248], + device='cuda:3') +2024-07-28 19:27:27,985 INFO [train.py:1146] (3/4) Epoch 15, validation: loss=0.1635, simple_loss=0.2667, pruned_loss=0.03013, over 944034.00 frames. +2024-07-28 19:27:27,986 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 19:27:34,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=194829.33333333334, ans=0.0 +2024-07-28 19:27:39,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=194829.33333333334, ans=0.125 +2024-07-28 19:27:46,974 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.73 vs. limit=15.0 +2024-07-28 19:27:50,402 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.25 vs. limit=12.0 +2024-07-28 19:27:50,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=194856.0, ans=0.125 +2024-07-28 19:27:52,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=194856.0, ans=0.125 +2024-07-28 19:27:58,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=194869.33333333334, ans=0.125 +2024-07-28 19:28:04,531 INFO [train.py:1114] (3/4) Epoch 15, batch 3050, loss[loss=0.1543, simple_loss=0.2413, pruned_loss=0.03361, over 4638.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2711, pruned_loss=0.04599, over 937396.42 frames. ], batch size: 12, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:28:13,234 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.56 vs. limit=10.0 +2024-07-28 19:28:24,667 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.69 vs. limit=15.0 +2024-07-28 19:28:24,842 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.462e+01 5.642e+01 6.422e+01 7.764e+01 9.574e+01, threshold=1.284e+02, percent-clipped=0.0 +2024-07-28 19:28:32,387 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.02 vs. limit=10.0 +2024-07-28 19:28:36,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=194936.0, ans=0.0 +2024-07-28 19:28:41,478 INFO [train.py:1114] (3/4) Epoch 15, batch 3100, loss[loss=0.1852, simple_loss=0.2796, pruned_loss=0.04535, over 4637.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2708, pruned_loss=0.04602, over 938323.27 frames. ], batch size: 16, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:28:51,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=194962.66666666666, ans=0.125 +2024-07-28 19:28:54,613 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=12.20 vs. limit=22.5 +2024-07-28 19:29:17,393 INFO [train.py:1114] (3/4) Epoch 15, batch 3150, loss[loss=0.1827, simple_loss=0.2699, pruned_loss=0.04773, over 4651.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2707, pruned_loss=0.04593, over 938441.63 frames. ], batch size: 17, lr: 5.02e-03, grad_scale: 32.0 +2024-07-28 19:29:21,710 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.08 vs. limit=15.0 +2024-07-28 19:29:24,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=195029.33333333334, ans=0.125 +2024-07-28 19:29:25,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=195029.33333333334, ans=0.125 +2024-07-28 19:29:30,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=195029.33333333334, ans=0.125 +2024-07-28 19:29:47,865 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.902e+01 5.711e+01 6.349e+01 7.434e+01 1.242e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 19:29:49,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=195056.0, ans=0.2 +2024-07-28 19:30:00,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=195069.33333333334, ans=0.125 +2024-07-28 19:30:01,493 INFO [train.py:1114] (3/4) Epoch 15, batch 3200, loss[loss=0.2035, simple_loss=0.2879, pruned_loss=0.05958, over 4831.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2709, pruned_loss=0.04651, over 939786.91 frames. ], batch size: 13, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:30:16,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195109.33333333334, ans=0.1 +2024-07-28 19:30:18,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=195109.33333333334, ans=0.0 +2024-07-28 19:30:27,716 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.58 vs. limit=15.0 +2024-07-28 19:30:30,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=195136.0, ans=0.2 +2024-07-28 19:30:31,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=195136.0, ans=0.09899494936611666 +2024-07-28 19:30:35,969 INFO [train.py:1114] (3/4) Epoch 15, batch 3250, loss[loss=0.18, simple_loss=0.2941, pruned_loss=0.03297, over 4929.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2714, pruned_loss=0.04604, over 940855.72 frames. ], batch size: 14, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:30:56,189 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.478e+01 5.941e+01 6.673e+01 9.852e+01, threshold=1.188e+02, percent-clipped=0.0 +2024-07-28 19:31:09,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=195202.66666666666, ans=0.125 +2024-07-28 19:31:11,845 INFO [train.py:1114] (3/4) Epoch 15, batch 3300, loss[loss=0.1959, simple_loss=0.2934, pruned_loss=0.04919, over 4706.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2697, pruned_loss=0.04577, over 941333.66 frames. ], batch size: 19, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:31:23,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=195229.33333333334, ans=0.0 +2024-07-28 19:31:44,971 INFO [train.py:1114] (3/4) Epoch 15, batch 3350, loss[loss=0.2245, simple_loss=0.3123, pruned_loss=0.06835, over 4636.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2709, pruned_loss=0.04604, over 938893.34 frames. ], batch size: 17, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:31:46,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195282.66666666666, ans=0.1 +2024-07-28 19:31:47,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=195282.66666666666, ans=0.125 +2024-07-28 19:31:53,130 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:31:55,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=195296.0, ans=0.125 +2024-07-28 19:31:59,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=195309.33333333334, ans=0.0 +2024-07-28 19:32:14,044 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.94 vs. limit=15.0 +2024-07-28 19:32:15,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=195309.33333333334, ans=0.125 +2024-07-28 19:32:19,700 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.536e+01 5.781e+01 6.264e+01 6.966e+01 9.522e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 19:32:25,707 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.68 vs. limit=15.0 +2024-07-28 19:32:26,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=195336.0, ans=0.125 +2024-07-28 19:32:31,837 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.47 vs. limit=6.0 +2024-07-28 19:32:34,328 INFO [train.py:1114] (3/4) Epoch 15, batch 3400, loss[loss=0.1601, simple_loss=0.2488, pruned_loss=0.0357, over 4802.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2708, pruned_loss=0.04596, over 937939.18 frames. ], batch size: 11, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:32:36,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=195349.33333333334, ans=0.0 +2024-07-28 19:32:47,302 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:32:50,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=195376.0, ans=0.125 +2024-07-28 19:32:54,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=195389.33333333334, ans=0.025 +2024-07-28 19:32:57,759 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.48 vs. limit=15.0 +2024-07-28 19:33:11,736 INFO [train.py:1114] (3/4) Epoch 15, batch 3450, loss[loss=0.2007, simple_loss=0.2948, pruned_loss=0.05327, over 4679.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2708, pruned_loss=0.0459, over 937974.02 frames. ], batch size: 19, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:33:12,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=195416.0, ans=0.2 +2024-07-28 19:33:13,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=195416.0, ans=0.0 +2024-07-28 19:33:14,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=195416.0, ans=0.0 +2024-07-28 19:33:33,578 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.369e+01 5.667e+01 6.148e+01 6.825e+01 9.914e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-28 19:33:34,012 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.14 vs. limit=22.5 +2024-07-28 19:33:36,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=195456.0, ans=0.125 +2024-07-28 19:33:46,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=195469.33333333334, ans=0.125 +2024-07-28 19:33:48,993 INFO [train.py:1114] (3/4) Epoch 15, batch 3500, loss[loss=0.1524, simple_loss=0.2379, pruned_loss=0.0335, over 4960.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2704, pruned_loss=0.04596, over 938447.31 frames. ], batch size: 12, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:33:53,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=195482.66666666666, ans=0.0 +2024-07-28 19:33:55,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=195496.0, ans=0.0 +2024-07-28 19:34:06,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=195509.33333333334, ans=0.125 +2024-07-28 19:34:18,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=195536.0, ans=0.0 +2024-07-28 19:34:23,070 INFO [train.py:1114] (3/4) Epoch 15, batch 3550, loss[loss=0.1961, simple_loss=0.281, pruned_loss=0.05562, over 4664.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2705, pruned_loss=0.0458, over 939291.68 frames. ], batch size: 14, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:34:34,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=195562.66666666666, ans=0.125 +2024-07-28 19:34:40,697 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 19:34:40,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=195576.0, ans=0.0 +2024-07-28 19:34:42,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=195576.0, ans=0.125 +2024-07-28 19:34:43,328 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.372e+01 5.567e+01 6.296e+01 7.208e+01 1.241e+02, threshold=1.259e+02, percent-clipped=1.0 +2024-07-28 19:34:56,851 INFO [train.py:1114] (3/4) Epoch 15, batch 3600, loss[loss=0.1697, simple_loss=0.2552, pruned_loss=0.0421, over 4959.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2707, pruned_loss=0.04589, over 940866.33 frames. ], batch size: 13, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:35:02,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195616.0, ans=0.1 +2024-07-28 19:35:25,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=195656.0, ans=0.0 +2024-07-28 19:35:35,847 INFO [train.py:1114] (3/4) Epoch 15, batch 3650, loss[loss=0.1806, simple_loss=0.2599, pruned_loss=0.05066, over 4907.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2693, pruned_loss=0.04531, over 941287.86 frames. ], batch size: 15, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:35:39,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=195682.66666666666, ans=0.125 +2024-07-28 19:36:00,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=195709.33333333334, ans=0.09899494936611666 +2024-07-28 19:36:00,967 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.24 vs. limit=10.0 +2024-07-28 19:36:02,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=195709.33333333334, ans=0.125 +2024-07-28 19:36:07,033 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.599e+01 5.794e+01 6.353e+01 7.232e+01 1.193e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 19:36:10,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=195722.66666666666, ans=0.125 +2024-07-28 19:36:23,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=195736.0, ans=0.2 +2024-07-28 19:36:23,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=195736.0, ans=0.125 +2024-07-28 19:36:25,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=195736.0, ans=0.125 +2024-07-28 19:36:26,503 INFO [train.py:1114] (3/4) Epoch 15, batch 3700, loss[loss=0.2026, simple_loss=0.2905, pruned_loss=0.05739, over 4933.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2697, pruned_loss=0.04491, over 942289.73 frames. ], batch size: 14, lr: 5.01e-03, grad_scale: 32.0 +2024-07-28 19:36:32,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=195762.66666666666, ans=0.0 +2024-07-28 19:36:42,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=195776.0, ans=0.125 +2024-07-28 19:36:44,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=195776.0, ans=0.125 +2024-07-28 19:36:46,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=195776.0, ans=0.125 +2024-07-28 19:36:49,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195789.33333333334, ans=0.1 +2024-07-28 19:36:56,225 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.68 vs. limit=22.5 +2024-07-28 19:37:09,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=195802.66666666666, ans=0.0 +2024-07-28 19:37:12,712 INFO [train.py:1114] (3/4) Epoch 15, batch 3750, loss[loss=0.1776, simple_loss=0.262, pruned_loss=0.04662, over 4814.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2689, pruned_loss=0.04459, over 943717.69 frames. ], batch size: 11, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:37:30,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=195816.0, ans=0.125 +2024-07-28 19:37:30,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195816.0, ans=0.1 +2024-07-28 19:37:47,140 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.420e+01 5.433e+01 6.069e+01 6.768e+01 1.859e+02, threshold=1.214e+02, percent-clipped=1.0 +2024-07-28 19:37:55,558 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.39 vs. limit=15.0 +2024-07-28 19:38:00,624 INFO [train.py:1114] (3/4) Epoch 15, batch 3800, loss[loss=0.1755, simple_loss=0.2803, pruned_loss=0.03532, over 4805.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2683, pruned_loss=0.04425, over 941936.84 frames. ], batch size: 14, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:38:04,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=195882.66666666666, ans=0.025 +2024-07-28 19:38:04,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195882.66666666666, ans=0.1 +2024-07-28 19:38:08,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=195896.0, ans=0.2 +2024-07-28 19:38:10,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=195896.0, ans=0.1 +2024-07-28 19:38:23,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=195922.66666666666, ans=0.125 +2024-07-28 19:38:31,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=195936.0, ans=10.0 +2024-07-28 19:38:35,872 INFO [train.py:1114] (3/4) Epoch 15, batch 3850, loss[loss=0.1907, simple_loss=0.2734, pruned_loss=0.054, over 4592.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2686, pruned_loss=0.0443, over 942404.42 frames. ], batch size: 16, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:38:36,860 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.96 vs. limit=22.5 +2024-07-28 19:38:38,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=195949.33333333334, ans=0.125 +2024-07-28 19:38:46,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=195949.33333333334, ans=0.2 +2024-07-28 19:38:47,606 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.02 vs. limit=15.0 +2024-07-28 19:38:52,465 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.63 vs. limit=15.0 +2024-07-28 19:38:58,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=195976.0, ans=0.0 +2024-07-28 19:39:04,046 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.933e+01 5.600e+01 6.154e+01 6.941e+01 1.032e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 19:39:08,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=195989.33333333334, ans=0.125 +2024-07-28 19:39:11,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=195989.33333333334, ans=0.1 +2024-07-28 19:39:19,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=196016.0, ans=0.09899494936611666 +2024-07-28 19:39:55,317 INFO [train.py:1114] (3/4) Epoch 15, batch 3900, loss[loss=0.1875, simple_loss=0.2826, pruned_loss=0.04618, over 4812.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2689, pruned_loss=0.04433, over 942396.47 frames. ], batch size: 14, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:39:55,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=196016.0, ans=0.0 +2024-07-28 19:39:56,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=196016.0, ans=0.125 +2024-07-28 19:41:59,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=196042.66666666666, ans=0.125 +2024-07-28 19:42:14,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=196056.0, ans=0.0 +2024-07-28 19:42:17,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=196056.0, ans=0.0 +2024-07-28 19:42:26,989 INFO [train.py:1114] (3/4) Epoch 15, batch 3950, loss[loss=0.1832, simple_loss=0.2738, pruned_loss=0.04634, over 4829.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2691, pruned_loss=0.04474, over 944692.31 frames. ], batch size: 16, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:42:27,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.97 vs. limit=15.0 +2024-07-28 19:42:31,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=196082.66666666666, ans=0.125 +2024-07-28 19:42:51,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=196109.33333333334, ans=0.125 +2024-07-28 19:42:56,266 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.391e+01 5.595e+01 6.291e+01 6.996e+01 9.236e+01, threshold=1.258e+02, percent-clipped=0.0 +2024-07-28 19:42:58,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=196122.66666666666, ans=0.125 +2024-07-28 19:43:01,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=196122.66666666666, ans=0.125 +2024-07-28 19:43:04,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=196122.66666666666, ans=0.0 +2024-07-28 19:44:13,282 INFO [train.py:1114] (3/4) Epoch 15, batch 4000, loss[loss=0.1708, simple_loss=0.2578, pruned_loss=0.04195, over 4771.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2696, pruned_loss=0.04519, over 941388.63 frames. ], batch size: 12, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:44:20,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=196149.33333333334, ans=0.125 +2024-07-28 19:44:24,869 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.10 vs. limit=12.0 +2024-07-28 19:44:28,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=196176.0, ans=0.1 +2024-07-28 19:44:28,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=196176.0, ans=0.2 +2024-07-28 19:44:32,562 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.40 vs. limit=15.0 +2024-07-28 19:44:38,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=196189.33333333334, ans=0.0 +2024-07-28 19:44:39,521 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.53 vs. limit=15.0 +2024-07-28 19:44:40,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=196189.33333333334, ans=0.0 +2024-07-28 19:44:40,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=196189.33333333334, ans=0.125 +2024-07-28 19:44:48,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=196202.66666666666, ans=0.0 +2024-07-28 19:44:49,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=196216.0, ans=0.1 +2024-07-28 19:44:49,524 INFO [train.py:1114] (3/4) Epoch 15, batch 4050, loss[loss=0.1855, simple_loss=0.2742, pruned_loss=0.04836, over 3481.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2707, pruned_loss=0.04545, over 940022.97 frames. ], batch size: 35, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:45:02,672 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.54 vs. limit=15.0 +2024-07-28 19:45:06,657 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.81 vs. limit=15.0 +2024-07-28 19:47:43,149 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.74 vs. limit=15.0 +2024-07-28 19:47:43,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=196242.66666666666, ans=0.025 +2024-07-28 19:47:45,311 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.92 vs. limit=15.0 +2024-07-28 19:48:20,681 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.610e+01 5.678e+01 6.345e+01 7.266e+01 1.118e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 19:48:20,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=196256.0, ans=0.125 +2024-07-28 19:48:26,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff3.min_abs, batch_count=196256.0, ans=0.2 +2024-07-28 19:48:39,022 INFO [train.py:1114] (3/4) Epoch 15, batch 4100, loss[loss=0.1855, simple_loss=0.2706, pruned_loss=0.05016, over 4890.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2708, pruned_loss=0.0457, over 938702.06 frames. ], batch size: 15, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:48:50,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=196296.0, ans=0.1 +2024-07-28 19:48:55,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=196309.33333333334, ans=0.125 +2024-07-28 19:49:07,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=196322.66666666666, ans=0.0 +2024-07-28 19:49:13,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=196322.66666666666, ans=0.07 +2024-07-28 19:49:22,847 INFO [train.py:1114] (3/4) Epoch 15, batch 4150, loss[loss=0.1747, simple_loss=0.2608, pruned_loss=0.0443, over 4827.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.27, pruned_loss=0.04527, over 938356.82 frames. ], batch size: 13, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:49:26,473 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.94 vs. limit=15.0 +2024-07-28 19:49:26,617 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.51 vs. limit=15.0 +2024-07-28 19:49:31,041 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.55 vs. limit=22.5 +2024-07-28 19:49:51,173 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.66 vs. limit=10.0 +2024-07-28 19:49:52,756 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.168e+01 5.670e+01 6.330e+01 7.256e+01 1.542e+02, threshold=1.266e+02, percent-clipped=1.0 +2024-07-28 19:50:08,870 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.96 vs. limit=15.0 +2024-07-28 19:50:09,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=196402.66666666666, ans=0.125 +2024-07-28 19:50:10,365 INFO [train.py:1114] (3/4) Epoch 15, batch 4200, loss[loss=0.2001, simple_loss=0.2974, pruned_loss=0.05145, over 4918.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2703, pruned_loss=0.04575, over 939804.73 frames. ], batch size: 15, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:50:14,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=196416.0, ans=0.125 +2024-07-28 19:50:18,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=196429.33333333334, ans=0.125 +2024-07-28 19:50:26,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=196429.33333333334, ans=0.05 +2024-07-28 19:50:39,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=196429.33333333334, ans=0.125 +2024-07-28 19:51:03,979 INFO [train.py:1114] (3/4) Epoch 15, batch 4250, loss[loss=0.1546, simple_loss=0.2443, pruned_loss=0.03247, over 4648.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2704, pruned_loss=0.04535, over 940933.91 frames. ], batch size: 12, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:51:17,015 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.81 vs. limit=15.0 +2024-07-28 19:51:23,970 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.654e+01 5.709e+01 6.318e+01 7.581e+01 1.158e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 19:51:26,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.66 vs. limit=15.0 +2024-07-28 19:51:36,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=196536.0, ans=0.0 +2024-07-28 19:51:37,890 INFO [train.py:1114] (3/4) Epoch 15, batch 4300, loss[loss=0.1942, simple_loss=0.2863, pruned_loss=0.05103, over 4760.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2702, pruned_loss=0.04565, over 940658.03 frames. ], batch size: 13, lr: 5.00e-03, grad_scale: 32.0 +2024-07-28 19:51:38,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=196549.33333333334, ans=0.05 +2024-07-28 19:51:43,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=196549.33333333334, ans=0.0 +2024-07-28 19:51:54,007 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.17 vs. limit=15.0 +2024-07-28 19:52:08,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.min_abs, batch_count=196602.66666666666, ans=0.5 +2024-07-28 19:52:13,578 INFO [train.py:1114] (3/4) Epoch 15, batch 4350, loss[loss=0.2016, simple_loss=0.2775, pruned_loss=0.06281, over 4757.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2704, pruned_loss=0.04556, over 941259.76 frames. ], batch size: 13, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:52:30,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=196642.66666666666, ans=0.04949747468305833 +2024-07-28 19:52:34,516 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.697e+01 5.717e+01 6.359e+01 7.024e+01 1.032e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 19:52:48,049 INFO [train.py:1114] (3/4) Epoch 15, batch 4400, loss[loss=0.1503, simple_loss=0.2492, pruned_loss=0.0257, over 4813.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2704, pruned_loss=0.04534, over 940855.63 frames. ], batch size: 14, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:52:49,171 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.76 vs. limit=15.0 +2024-07-28 19:52:49,951 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=17.75 vs. limit=22.5 +2024-07-28 19:52:57,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=196696.0, ans=0.125 +2024-07-28 19:53:07,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=196709.33333333334, ans=0.125 +2024-07-28 19:53:07,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=196722.66666666666, ans=0.0 +2024-07-28 19:53:19,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=196736.0, ans=0.2 +2024-07-28 19:53:25,046 INFO [train.py:1114] (3/4) Epoch 15, batch 4450, loss[loss=0.1684, simple_loss=0.2594, pruned_loss=0.0387, over 4940.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2711, pruned_loss=0.04595, over 939011.38 frames. ], batch size: 12, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:55:39,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=196776.0, ans=0.2 +2024-07-28 19:55:40,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=196776.0, ans=0.09899494936611666 +2024-07-28 19:55:45,663 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.242e+01 5.676e+01 6.225e+01 6.763e+01 9.651e+01, threshold=1.245e+02, percent-clipped=0.0 +2024-07-28 19:55:45,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=196789.33333333334, ans=0.125 +2024-07-28 19:55:46,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=196789.33333333334, ans=0.125 +2024-07-28 19:55:47,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=196789.33333333334, ans=0.2 +2024-07-28 19:55:50,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=196789.33333333334, ans=0.1 +2024-07-28 19:55:52,495 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.23 vs. limit=22.5 +2024-07-28 19:55:59,332 INFO [train.py:1114] (3/4) Epoch 15, batch 4500, loss[loss=0.1658, simple_loss=0.2598, pruned_loss=0.03586, over 4745.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2724, pruned_loss=0.04605, over 938427.23 frames. ], batch size: 14, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:56:08,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=196829.33333333334, ans=0.125 +2024-07-28 19:56:08,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=196829.33333333334, ans=0.125 +2024-07-28 19:56:16,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=196842.66666666666, ans=0.2 +2024-07-28 19:56:32,361 INFO [train.py:1114] (3/4) Epoch 15, batch 4550, loss[loss=0.1859, simple_loss=0.2758, pruned_loss=0.04797, over 4915.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2714, pruned_loss=0.04565, over 940140.88 frames. ], batch size: 13, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:56:47,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=196909.33333333334, ans=0.125 +2024-07-28 19:56:49,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=196909.33333333334, ans=0.125 +2024-07-28 19:56:54,579 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.611e+01 5.447e+01 5.965e+01 6.710e+01 1.037e+02, threshold=1.193e+02, percent-clipped=0.0 +2024-07-28 19:57:01,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=196936.0, ans=0.0 +2024-07-28 19:57:04,401 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.59 vs. limit=10.0 +2024-07-28 19:57:07,904 INFO [train.py:1114] (3/4) Epoch 15, batch 4600, loss[loss=0.1794, simple_loss=0.2727, pruned_loss=0.04308, over 4472.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2708, pruned_loss=0.04569, over 938279.29 frames. ], batch size: 21, lr: 4.99e-03, grad_scale: 64.0 +2024-07-28 19:57:16,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=196962.66666666666, ans=0.0 +2024-07-28 19:57:22,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=196976.0, ans=0.025 +2024-07-28 19:57:27,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=196989.33333333334, ans=0.125 +2024-07-28 19:57:40,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197002.66666666666, ans=0.1 +2024-07-28 19:57:41,454 INFO [train.py:1114] (3/4) Epoch 15, batch 4650, loss[loss=0.204, simple_loss=0.2976, pruned_loss=0.05515, over 4846.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.272, pruned_loss=0.04585, over 939689.65 frames. ], batch size: 16, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:57:56,153 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.63 vs. limit=15.0 +2024-07-28 19:58:02,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=197042.66666666666, ans=0.125 +2024-07-28 19:58:02,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=197042.66666666666, ans=0.1 +2024-07-28 19:58:03,991 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.740e+01 5.604e+01 6.309e+01 7.191e+01 9.740e+01, threshold=1.262e+02, percent-clipped=0.0 +2024-07-28 19:58:09,831 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.56 vs. limit=6.0 +2024-07-28 19:58:12,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=197069.33333333334, ans=0.05 +2024-07-28 19:58:23,486 INFO [train.py:1114] (3/4) Epoch 15, batch 4700, loss[loss=0.1559, simple_loss=0.2323, pruned_loss=0.0397, over 4712.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2714, pruned_loss=0.04585, over 936872.25 frames. ], batch size: 11, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:58:23,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=197082.66666666666, ans=15.0 +2024-07-28 19:58:25,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=197082.66666666666, ans=0.2 +2024-07-28 19:58:29,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=197096.0, ans=0.0 +2024-07-28 19:58:35,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=197096.0, ans=0.125 +2024-07-28 19:58:41,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=197109.33333333334, ans=0.0 +2024-07-28 19:58:53,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=197136.0, ans=0.125 +2024-07-28 19:58:57,024 INFO [train.py:1114] (3/4) Epoch 15, batch 4750, loss[loss=0.1914, simple_loss=0.2877, pruned_loss=0.0476, over 4464.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2716, pruned_loss=0.04644, over 935360.45 frames. ], batch size: 21, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:59:00,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=197149.33333333334, ans=0.2 +2024-07-28 19:59:02,991 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.81 vs. limit=15.0 +2024-07-28 19:59:06,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=197162.66666666666, ans=0.0 +2024-07-28 19:59:17,881 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.663e+01 6.511e+01 7.507e+01 1.082e+02, threshold=1.302e+02, percent-clipped=0.0 +2024-07-28 19:59:26,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=197202.66666666666, ans=0.125 +2024-07-28 19:59:30,788 INFO [train.py:1114] (3/4) Epoch 15, batch 4800, loss[loss=0.1668, simple_loss=0.2643, pruned_loss=0.03471, over 4693.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2712, pruned_loss=0.04655, over 932938.51 frames. ], batch size: 13, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 19:59:35,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=197216.0, ans=0.025 +2024-07-28 19:59:35,948 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.32 vs. limit=22.5 +2024-07-28 19:59:37,469 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.65 vs. limit=6.0 +2024-07-28 19:59:55,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=197256.0, ans=0.025 +2024-07-28 19:59:57,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197269.33333333334, ans=0.1 +2024-07-28 20:00:00,488 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.22 vs. limit=12.0 +2024-07-28 20:00:04,001 INFO [train.py:1114] (3/4) Epoch 15, batch 4850, loss[loss=0.2078, simple_loss=0.2991, pruned_loss=0.05829, over 4736.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2709, pruned_loss=0.04646, over 932263.13 frames. ], batch size: 14, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 20:00:04,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=197282.66666666666, ans=0.125 +2024-07-28 20:00:15,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=197282.66666666666, ans=0.0 +2024-07-28 20:00:30,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=197322.66666666666, ans=0.07 +2024-07-28 20:00:31,447 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.502e+01 5.598e+01 6.100e+01 6.871e+01 9.023e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 20:00:41,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197336.0, ans=0.1 +2024-07-28 20:00:48,006 INFO [train.py:1114] (3/4) Epoch 15, batch 4900, loss[loss=0.1938, simple_loss=0.2852, pruned_loss=0.05124, over 4765.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2703, pruned_loss=0.04585, over 934065.61 frames. ], batch size: 13, lr: 4.99e-03, grad_scale: 32.0 +2024-07-28 20:00:58,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=197362.66666666666, ans=0.125 +2024-07-28 20:01:03,433 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.29 vs. limit=15.0 +2024-07-28 20:01:20,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=197402.66666666666, ans=0.0 +2024-07-28 20:01:25,293 INFO [train.py:1114] (3/4) Epoch 15, batch 4950, loss[loss=0.2116, simple_loss=0.2927, pruned_loss=0.06524, over 3438.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2724, pruned_loss=0.04664, over 931507.18 frames. ], batch size: 35, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:01:36,311 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.59 vs. limit=22.5 +2024-07-28 20:01:45,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=197456.0, ans=0.1 +2024-07-28 20:01:45,881 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+01 5.531e+01 5.983e+01 6.546e+01 1.015e+02, threshold=1.197e+02, percent-clipped=0.0 +2024-07-28 20:01:48,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=197456.0, ans=0.125 +2024-07-28 20:01:52,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=197469.33333333334, ans=0.125 +2024-07-28 20:01:58,983 INFO [train.py:1114] (3/4) Epoch 15, batch 5000, loss[loss=0.1893, simple_loss=0.2943, pruned_loss=0.04214, over 4670.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2713, pruned_loss=0.04606, over 935443.70 frames. ], batch size: 14, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:02:05,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=197496.0, ans=0.125 +2024-07-28 20:02:06,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=197496.0, ans=0.0 +2024-07-28 20:02:06,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=197496.0, ans=0.125 +2024-07-28 20:02:15,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.52 vs. limit=15.0 +2024-07-28 20:02:18,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=197522.66666666666, ans=0.5 +2024-07-28 20:02:28,811 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=13.39 vs. limit=15.0 +2024-07-28 20:02:33,779 INFO [train.py:1114] (3/4) Epoch 15, batch 5050, loss[loss=0.1758, simple_loss=0.2616, pruned_loss=0.04502, over 4864.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2704, pruned_loss=0.04586, over 937937.06 frames. ], batch size: 12, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:02:36,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=197549.33333333334, ans=0.125 +2024-07-28 20:02:44,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=197562.66666666666, ans=0.0 +2024-07-28 20:02:54,788 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.283e+01 5.690e+01 6.527e+01 7.473e+01 1.062e+02, threshold=1.305e+02, percent-clipped=0.0 +2024-07-28 20:02:56,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=197589.33333333334, ans=0.125 +2024-07-28 20:02:56,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=197589.33333333334, ans=10.0 +2024-07-28 20:02:57,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=197589.33333333334, ans=0.125 +2024-07-28 20:03:00,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=197589.33333333334, ans=0.0 +2024-07-28 20:03:01,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=197602.66666666666, ans=0.125 +2024-07-28 20:03:01,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=197602.66666666666, ans=0.125 +2024-07-28 20:03:03,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=197602.66666666666, ans=0.125 +2024-07-28 20:03:07,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=197602.66666666666, ans=0.1 +2024-07-28 20:03:08,386 INFO [train.py:1114] (3/4) Epoch 15, batch 5100, loss[loss=0.1534, simple_loss=0.2382, pruned_loss=0.03428, over 4778.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2714, pruned_loss=0.04634, over 935176.67 frames. ], batch size: 12, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:03:10,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=197616.0, ans=0.2 +2024-07-28 20:03:33,799 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.94 vs. limit=15.0 +2024-07-28 20:03:35,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=197669.33333333334, ans=0.1 +2024-07-28 20:03:41,947 INFO [train.py:1114] (3/4) Epoch 15, batch 5150, loss[loss=0.1956, simple_loss=0.2897, pruned_loss=0.0507, over 4863.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2715, pruned_loss=0.04621, over 936775.04 frames. ], batch size: 16, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:03:44,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=197682.66666666666, ans=0.2 +2024-07-28 20:03:55,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=197696.0, ans=0.0 +2024-07-28 20:03:59,650 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:04:02,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=197709.33333333334, ans=0.125 +2024-07-28 20:04:04,747 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.460e+01 5.595e+01 6.018e+01 6.676e+01 9.613e+01, threshold=1.204e+02, percent-clipped=0.0 +2024-07-28 20:04:05,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=197722.66666666666, ans=0.0 +2024-07-28 20:04:15,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=197736.0, ans=0.0 +2024-07-28 20:04:16,834 INFO [train.py:1114] (3/4) Epoch 15, batch 5200, loss[loss=0.1833, simple_loss=0.2898, pruned_loss=0.03842, over 4673.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2706, pruned_loss=0.0454, over 936467.24 frames. ], batch size: 14, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:04:22,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=197749.33333333334, ans=0.1 +2024-07-28 20:04:27,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=197762.66666666666, ans=0.07 +2024-07-28 20:04:31,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=197776.0, ans=0.0 +2024-07-28 20:04:50,333 INFO [train.py:1114] (3/4) Epoch 15, batch 5250, loss[loss=0.1574, simple_loss=0.2393, pruned_loss=0.03777, over 4900.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2703, pruned_loss=0.04553, over 936040.21 frames. ], batch size: 13, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:04:51,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=197816.0, ans=0.125 +2024-07-28 20:04:58,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=197829.33333333334, ans=0.125 +2024-07-28 20:05:04,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=197842.66666666666, ans=0.2 +2024-07-28 20:05:12,231 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.422e+01 5.761e+01 6.623e+01 7.609e+01 1.184e+02, threshold=1.325e+02, percent-clipped=0.0 +2024-07-28 20:05:16,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=197856.0, ans=0.0 +2024-07-28 20:05:24,594 INFO [train.py:1114] (3/4) Epoch 15, batch 5300, loss[loss=0.1896, simple_loss=0.2872, pruned_loss=0.04607, over 4630.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2708, pruned_loss=0.0459, over 933983.13 frames. ], batch size: 16, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:05:25,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=197882.66666666666, ans=0.125 +2024-07-28 20:05:29,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=197882.66666666666, ans=0.125 +2024-07-28 20:05:31,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=197896.0, ans=0.125 +2024-07-28 20:05:41,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=197896.0, ans=0.125 +2024-07-28 20:05:48,413 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.78 vs. limit=15.0 +2024-07-28 20:05:57,260 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.48 vs. limit=15.0 +2024-07-28 20:06:01,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=197936.0, ans=0.2 +2024-07-28 20:06:01,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=197936.0, ans=0.125 +2024-07-28 20:06:02,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=197936.0, ans=0.0 +2024-07-28 20:06:04,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=197936.0, ans=0.125 +2024-07-28 20:06:05,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=197936.0, ans=0.125 +2024-07-28 20:06:07,653 INFO [train.py:1114] (3/4) Epoch 15, batch 5350, loss[loss=0.1442, simple_loss=0.2232, pruned_loss=0.03265, over 4519.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2716, pruned_loss=0.04582, over 935892.29 frames. ], batch size: 10, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:06:16,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=197962.66666666666, ans=0.09899494936611666 +2024-07-28 20:06:20,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=197976.0, ans=0.125 +2024-07-28 20:06:33,132 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.665e+01 5.458e+01 6.203e+01 7.042e+01 1.086e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-28 20:06:41,260 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.44 vs. limit=15.0 +2024-07-28 20:06:43,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=198002.66666666666, ans=0.125 +2024-07-28 20:06:45,671 INFO [train.py:1114] (3/4) Epoch 15, batch 5400, loss[loss=0.1981, simple_loss=0.2849, pruned_loss=0.05564, over 4205.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2729, pruned_loss=0.04661, over 929877.32 frames. ], batch size: 25, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:06:54,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=198029.33333333334, ans=0.035 +2024-07-28 20:06:57,787 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.39 vs. limit=15.0 +2024-07-28 20:07:12,474 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.39 vs. limit=15.0 +2024-07-28 20:07:13,131 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.30 vs. limit=22.5 +2024-07-28 20:07:14,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=198069.33333333334, ans=0.2 +2024-07-28 20:07:15,481 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=198069.33333333334, ans=0.0 +2024-07-28 20:07:18,650 INFO [train.py:1114] (3/4) Epoch 15, batch 5450, loss[loss=0.1642, simple_loss=0.2398, pruned_loss=0.0443, over 4712.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2715, pruned_loss=0.04589, over 933278.54 frames. ], batch size: 11, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:07:22,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=198082.66666666666, ans=0.125 +2024-07-28 20:07:40,428 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+01 5.624e+01 6.275e+01 7.403e+01 1.039e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-28 20:07:40,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=198122.66666666666, ans=0.125 +2024-07-28 20:07:43,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=198122.66666666666, ans=0.125 +2024-07-28 20:07:54,578 INFO [train.py:1114] (3/4) Epoch 15, batch 5500, loss[loss=0.1967, simple_loss=0.278, pruned_loss=0.05766, over 4391.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2702, pruned_loss=0.04554, over 931416.73 frames. ], batch size: 26, lr: 4.98e-03, grad_scale: 32.0 +2024-07-28 20:08:00,417 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.03 vs. limit=15.0 +2024-07-28 20:08:00,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=198149.33333333334, ans=0.0 +2024-07-28 20:08:26,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=198202.66666666666, ans=0.125 +2024-07-28 20:08:27,686 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.67 vs. limit=12.0 +2024-07-28 20:08:29,839 INFO [train.py:1114] (3/4) Epoch 15, batch 5550, loss[loss=0.1627, simple_loss=0.2647, pruned_loss=0.0303, over 4709.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2699, pruned_loss=0.04555, over 933386.48 frames. ], batch size: 12, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:08:33,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=198216.0, ans=0.125 +2024-07-28 20:08:35,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=198216.0, ans=0.125 +2024-07-28 20:08:43,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=198242.66666666666, ans=0.0 +2024-07-28 20:08:43,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=198242.66666666666, ans=0.125 +2024-07-28 20:08:50,253 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.71 vs. limit=15.0 +2024-07-28 20:08:51,164 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.553e+01 5.843e+01 6.570e+01 7.982e+01 1.258e+02, threshold=1.314e+02, percent-clipped=1.0 +2024-07-28 20:08:54,429 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.08 vs. limit=15.0 +2024-07-28 20:09:03,376 INFO [train.py:1114] (3/4) Epoch 15, batch 5600, loss[loss=0.1917, simple_loss=0.2846, pruned_loss=0.04938, over 4744.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.271, pruned_loss=0.04612, over 934179.83 frames. ], batch size: 14, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:09:31,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=198336.0, ans=0.0 +2024-07-28 20:09:34,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=198336.0, ans=0.05 +2024-07-28 20:09:35,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=198336.0, ans=0.125 +2024-07-28 20:09:38,923 INFO [train.py:1114] (3/4) Epoch 15, batch 5650, loss[loss=0.1868, simple_loss=0.2857, pruned_loss=0.04399, over 4478.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2703, pruned_loss=0.0458, over 936630.80 frames. ], batch size: 21, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:09:48,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=198362.66666666666, ans=0.125 +2024-07-28 20:09:59,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=198389.33333333334, ans=0.2 +2024-07-28 20:10:00,144 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.751e+01 5.802e+01 6.478e+01 7.454e+01 1.007e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-28 20:10:03,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=198389.33333333334, ans=0.125 +2024-07-28 20:10:10,234 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.59 vs. limit=15.0 +2024-07-28 20:10:12,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=198416.0, ans=0.0 +2024-07-28 20:10:12,530 INFO [train.py:1114] (3/4) Epoch 15, batch 5700, loss[loss=0.1821, simple_loss=0.2756, pruned_loss=0.04432, over 4690.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2702, pruned_loss=0.04576, over 937664.78 frames. ], batch size: 13, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:10:13,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=198416.0, ans=0.09899494936611666 +2024-07-28 20:10:15,870 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.22 vs. limit=6.0 +2024-07-28 20:10:17,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=198416.0, ans=0.2 +2024-07-28 20:10:19,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=198429.33333333334, ans=0.125 +2024-07-28 20:10:22,747 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=11.42 vs. limit=15.0 +2024-07-28 20:10:24,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=198429.33333333334, ans=0.2 +2024-07-28 20:10:32,135 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198442.66666666666, ans=0.1 +2024-07-28 20:10:37,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=198456.0, ans=0.125 +2024-07-28 20:10:46,723 INFO [train.py:1114] (3/4) Epoch 15, batch 5750, loss[loss=0.1746, simple_loss=0.2726, pruned_loss=0.03831, over 4670.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2717, pruned_loss=0.04627, over 937802.51 frames. ], batch size: 19, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:10:52,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=198482.66666666666, ans=0.025 +2024-07-28 20:10:53,176 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.87 vs. limit=6.0 +2024-07-28 20:10:54,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=198496.0, ans=0.125 +2024-07-28 20:10:54,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=198496.0, ans=0.07 +2024-07-28 20:10:56,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=198496.0, ans=0.0 +2024-07-28 20:11:08,231 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.757e+01 5.533e+01 6.199e+01 7.119e+01 1.016e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 20:11:11,906 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.64 vs. limit=22.5 +2024-07-28 20:11:15,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=198536.0, ans=0.025 +2024-07-28 20:11:20,263 INFO [train.py:1114] (3/4) Epoch 15, batch 5800, loss[loss=0.2345, simple_loss=0.3187, pruned_loss=0.07511, over 4755.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.273, pruned_loss=0.04681, over 937451.07 frames. ], batch size: 19, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:11:26,691 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.63 vs. limit=15.0 +2024-07-28 20:11:31,247 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.33 vs. limit=12.0 +2024-07-28 20:11:37,858 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.34 vs. limit=22.5 +2024-07-28 20:11:40,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198589.33333333334, ans=0.1 +2024-07-28 20:11:53,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=198602.66666666666, ans=0.2 +2024-07-28 20:11:55,691 INFO [train.py:1114] (3/4) Epoch 15, batch 5850, loss[loss=0.1797, simple_loss=0.2688, pruned_loss=0.0453, over 4485.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2722, pruned_loss=0.04687, over 937815.23 frames. ], batch size: 21, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:11:57,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=198616.0, ans=0.1 +2024-07-28 20:11:57,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=198616.0, ans=0.125 +2024-07-28 20:12:15,523 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.07 vs. limit=15.0 +2024-07-28 20:12:19,164 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.882e+01 5.725e+01 6.353e+01 6.909e+01 1.131e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-28 20:12:23,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=198656.0, ans=0.125 +2024-07-28 20:12:27,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=198669.33333333334, ans=0.125 +2024-07-28 20:12:31,564 INFO [train.py:1114] (3/4) Epoch 15, batch 5900, loss[loss=0.1801, simple_loss=0.2788, pruned_loss=0.04066, over 4682.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2724, pruned_loss=0.04686, over 938047.05 frames. ], batch size: 15, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:12:41,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198696.0, ans=0.1 +2024-07-28 20:12:43,421 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.18 vs. limit=22.5 +2024-07-28 20:12:47,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=198709.33333333334, ans=0.0 +2024-07-28 20:12:48,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=198709.33333333334, ans=0.0 +2024-07-28 20:12:48,793 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.80 vs. limit=15.0 +2024-07-28 20:12:51,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=198722.66666666666, ans=0.025 +2024-07-28 20:13:08,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=198736.0, ans=0.125 +2024-07-28 20:13:11,341 INFO [train.py:1114] (3/4) Epoch 15, batch 5950, loss[loss=0.2019, simple_loss=0.3096, pruned_loss=0.04706, over 4679.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2722, pruned_loss=0.04653, over 939889.66 frames. ], batch size: 15, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:13:23,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198762.66666666666, ans=0.1 +2024-07-28 20:13:34,731 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.387e+01 5.687e+01 6.210e+01 6.868e+01 1.023e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 20:13:47,279 INFO [train.py:1114] (3/4) Epoch 15, batch 6000, loss[loss=0.1898, simple_loss=0.2893, pruned_loss=0.0451, over 4258.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.272, pruned_loss=0.04669, over 936864.21 frames. ], batch size: 25, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:13:50,111 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 20:14:09,824 INFO [train.py:1146] (3/4) Epoch 15, validation: loss=0.1637, simple_loss=0.2666, pruned_loss=0.03037, over 944034.00 frames. +2024-07-28 20:14:09,825 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 20:14:16,232 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=11.01 vs. limit=15.0 +2024-07-28 20:14:17,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=198829.33333333334, ans=0.125 +2024-07-28 20:14:27,165 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.27 vs. limit=15.0 +2024-07-28 20:14:34,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=198856.0, ans=0.1 +2024-07-28 20:14:41,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=198869.33333333334, ans=0.125 +2024-07-28 20:14:43,680 INFO [train.py:1114] (3/4) Epoch 15, batch 6050, loss[loss=0.178, simple_loss=0.272, pruned_loss=0.04198, over 4773.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2716, pruned_loss=0.04655, over 938294.43 frames. ], batch size: 12, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:14:43,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=198882.66666666666, ans=0.125 +2024-07-28 20:15:06,831 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.394e+01 5.577e+01 6.176e+01 7.301e+01 1.116e+02, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 20:15:15,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=198936.0, ans=0.125 +2024-07-28 20:15:15,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=198936.0, ans=0.0 +2024-07-28 20:15:15,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=198936.0, ans=0.125 +2024-07-28 20:15:18,932 INFO [train.py:1114] (3/4) Epoch 15, batch 6100, loss[loss=0.1911, simple_loss=0.2912, pruned_loss=0.04548, over 4694.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2712, pruned_loss=0.04643, over 937841.53 frames. ], batch size: 15, lr: 4.97e-03, grad_scale: 32.0 +2024-07-28 20:15:21,251 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.94 vs. limit=15.0 +2024-07-28 20:15:24,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.min_positive, batch_count=198949.33333333334, ans=0.025 +2024-07-28 20:15:31,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=198976.0, ans=0.1 +2024-07-28 20:15:35,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=198976.0, ans=0.0 +2024-07-28 20:15:38,612 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:15:52,713 INFO [train.py:1114] (3/4) Epoch 15, batch 6150, loss[loss=0.2113, simple_loss=0.2925, pruned_loss=0.06509, over 3293.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2713, pruned_loss=0.04652, over 936710.90 frames. ], batch size: 35, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:16:13,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=199056.0, ans=0.0 +2024-07-28 20:16:14,488 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.237e+01 5.430e+01 6.165e+01 7.118e+01 1.181e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 20:16:16,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=199056.0, ans=0.125 +2024-07-28 20:16:18,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=199056.0, ans=0.0 +2024-07-28 20:16:26,466 INFO [train.py:1114] (3/4) Epoch 15, batch 6200, loss[loss=0.1655, simple_loss=0.2683, pruned_loss=0.03138, over 4739.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2723, pruned_loss=0.04706, over 936235.91 frames. ], batch size: 14, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:16:32,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=199096.0, ans=0.1 +2024-07-28 20:16:46,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=199122.66666666666, ans=0.07 +2024-07-28 20:16:51,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=199122.66666666666, ans=0.125 +2024-07-28 20:17:00,526 INFO [train.py:1114] (3/4) Epoch 15, batch 6250, loss[loss=0.2071, simple_loss=0.3037, pruned_loss=0.05526, over 4801.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2714, pruned_loss=0.04718, over 932739.16 frames. ], batch size: 14, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:17:01,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=199149.33333333334, ans=0.125 +2024-07-28 20:17:02,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=199149.33333333334, ans=0.0 +2024-07-28 20:17:09,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=199162.66666666666, ans=0.2 +2024-07-28 20:17:25,819 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.583e+01 5.601e+01 6.121e+01 7.200e+01 1.148e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 20:17:32,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=199202.66666666666, ans=0.2 +2024-07-28 20:17:36,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=199202.66666666666, ans=0.125 +2024-07-28 20:17:37,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=199202.66666666666, ans=0.125 +2024-07-28 20:17:38,305 INFO [train.py:1114] (3/4) Epoch 15, batch 6300, loss[loss=0.1209, simple_loss=0.2087, pruned_loss=0.01657, over 4492.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.272, pruned_loss=0.04734, over 929093.76 frames. ], batch size: 10, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:17:46,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=199229.33333333334, ans=0.0 +2024-07-28 20:17:56,300 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:17:58,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=199256.0, ans=0.125 +2024-07-28 20:18:04,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=199269.33333333334, ans=0.0 +2024-07-28 20:18:06,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=199269.33333333334, ans=0.0 +2024-07-28 20:18:07,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=199269.33333333334, ans=0.125 +2024-07-28 20:18:10,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199269.33333333334, ans=0.1 +2024-07-28 20:18:11,386 INFO [train.py:1114] (3/4) Epoch 15, batch 6350, loss[loss=0.1878, simple_loss=0.2882, pruned_loss=0.04371, over 4442.00 frames. ], tot_loss[loss=0.182, simple_loss=0.271, pruned_loss=0.04649, over 933240.71 frames. ], batch size: 21, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:18:11,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=199282.66666666666, ans=0.0 +2024-07-28 20:18:11,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=199282.66666666666, ans=0.2 +2024-07-28 20:18:17,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=199296.0, ans=0.125 +2024-07-28 20:18:17,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=199296.0, ans=0.125 +2024-07-28 20:18:19,414 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-07-28 20:18:22,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=199296.0, ans=0.0 +2024-07-28 20:18:33,049 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 5.687e+01 6.281e+01 7.134e+01 1.278e+02, threshold=1.256e+02, percent-clipped=1.0 +2024-07-28 20:18:34,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.min_positive, batch_count=199322.66666666666, ans=0.05 +2024-07-28 20:18:36,137 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-28 20:18:37,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=199336.0, ans=0.125 +2024-07-28 20:18:41,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=199336.0, ans=0.1 +2024-07-28 20:18:44,872 INFO [train.py:1114] (3/4) Epoch 15, batch 6400, loss[loss=0.1964, simple_loss=0.287, pruned_loss=0.05291, over 4632.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2706, pruned_loss=0.04617, over 934858.76 frames. ], batch size: 13, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:18:50,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=199349.33333333334, ans=0.1 +2024-07-28 20:18:56,402 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.60 vs. limit=15.0 +2024-07-28 20:18:56,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=199362.66666666666, ans=0.1 +2024-07-28 20:19:20,034 INFO [train.py:1114] (3/4) Epoch 15, batch 6450, loss[loss=0.1902, simple_loss=0.2769, pruned_loss=0.05175, over 4588.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2718, pruned_loss=0.04646, over 938571.94 frames. ], batch size: 21, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:19:20,427 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.04 vs. limit=15.0 +2024-07-28 20:19:20,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=199416.0, ans=0.125 +2024-07-28 20:19:22,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=199416.0, ans=0.07 +2024-07-28 20:19:26,151 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:19:41,416 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+01 5.703e+01 6.605e+01 7.565e+01 1.204e+02, threshold=1.321e+02, percent-clipped=0.0 +2024-07-28 20:19:42,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=199456.0, ans=0.125 +2024-07-28 20:19:47,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=199469.33333333334, ans=0.0 +2024-07-28 20:19:53,869 INFO [train.py:1114] (3/4) Epoch 15, batch 6500, loss[loss=0.2423, simple_loss=0.3068, pruned_loss=0.08891, over 3327.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2719, pruned_loss=0.04652, over 940133.53 frames. ], batch size: 35, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:19:56,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=199482.66666666666, ans=0.125 +2024-07-28 20:20:00,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=199496.0, ans=0.125 +2024-07-28 20:20:13,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=199509.33333333334, ans=0.2 +2024-07-28 20:20:16,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199522.66666666666, ans=0.1 +2024-07-28 20:20:22,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=199536.0, ans=0.125 +2024-07-28 20:20:23,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=199536.0, ans=0.125 +2024-07-28 20:20:25,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_na.min_abs, batch_count=199536.0, ans=0.02 +2024-07-28 20:20:29,567 INFO [train.py:1114] (3/4) Epoch 15, batch 6550, loss[loss=0.1574, simple_loss=0.2388, pruned_loss=0.03803, over 4794.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2705, pruned_loss=0.04565, over 943012.45 frames. ], batch size: 11, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:20:33,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=199549.33333333334, ans=0.2 +2024-07-28 20:20:35,312 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.17 vs. limit=15.0 +2024-07-28 20:20:37,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=199562.66666666666, ans=0.04949747468305833 +2024-07-28 20:20:39,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199562.66666666666, ans=0.1 +2024-07-28 20:20:42,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=199576.0, ans=0.125 +2024-07-28 20:20:50,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=199589.33333333334, ans=0.125 +2024-07-28 20:20:51,052 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.602e+01 6.242e+01 7.548e+01 1.165e+02, threshold=1.248e+02, percent-clipped=0.0 +2024-07-28 20:20:53,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=199589.33333333334, ans=0.0 +2024-07-28 20:20:57,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199602.66666666666, ans=0.1 +2024-07-28 20:21:03,269 INFO [train.py:1114] (3/4) Epoch 15, batch 6600, loss[loss=0.1834, simple_loss=0.2767, pruned_loss=0.04507, over 4940.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2707, pruned_loss=0.0458, over 944752.53 frames. ], batch size: 14, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:21:04,329 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-07-28 20:21:10,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=199629.33333333334, ans=0.0 +2024-07-28 20:21:13,310 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.03 vs. limit=15.0 +2024-07-28 20:21:34,718 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:21:36,207 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.65 vs. limit=15.0 +2024-07-28 20:21:37,071 INFO [train.py:1114] (3/4) Epoch 15, batch 6650, loss[loss=0.2059, simple_loss=0.2815, pruned_loss=0.06516, over 4643.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.27, pruned_loss=0.04564, over 943432.00 frames. ], batch size: 17, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:21:38,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=199682.66666666666, ans=0.125 +2024-07-28 20:21:39,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=199682.66666666666, ans=0.2 +2024-07-28 20:21:47,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=199696.0, ans=0.04949747468305833 +2024-07-28 20:21:49,006 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.96 vs. limit=10.0 +2024-07-28 20:21:56,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=199709.33333333334, ans=0.125 +2024-07-28 20:21:56,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=199722.66666666666, ans=0.025 +2024-07-28 20:21:58,838 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.766e+01 5.657e+01 6.507e+01 7.358e+01 9.845e+01, threshold=1.301e+02, percent-clipped=0.0 +2024-07-28 20:22:11,048 INFO [train.py:1114] (3/4) Epoch 15, batch 6700, loss[loss=0.1945, simple_loss=0.2868, pruned_loss=0.05112, over 4758.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2708, pruned_loss=0.04613, over 942447.54 frames. ], batch size: 19, lr: 4.96e-03, grad_scale: 32.0 +2024-07-28 20:22:18,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=199762.66666666666, ans=0.07 +2024-07-28 20:22:20,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_abs, batch_count=199762.66666666666, ans=0.5 +2024-07-28 20:22:21,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=199762.66666666666, ans=0.1 +2024-07-28 20:22:25,092 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.37 vs. limit=22.5 +2024-07-28 20:22:26,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=199776.0, ans=0.125 +2024-07-28 20:22:30,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=199776.0, ans=0.125 +2024-07-28 20:22:32,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=199789.33333333334, ans=0.125 +2024-07-28 20:22:40,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=199802.66666666666, ans=0.125 +2024-07-28 20:22:41,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=199802.66666666666, ans=0.125 +2024-07-28 20:22:46,723 INFO [train.py:1114] (3/4) Epoch 15, batch 6750, loss[loss=0.1907, simple_loss=0.2801, pruned_loss=0.05059, over 4254.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2715, pruned_loss=0.04631, over 940232.93 frames. ], batch size: 25, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:22:50,169 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:22:54,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=199829.33333333334, ans=0.0 +2024-07-28 20:23:00,825 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.48 vs. limit=15.0 +2024-07-28 20:23:03,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_positive, batch_count=199842.66666666666, ans=0.05 +2024-07-28 20:23:09,781 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.538e+01 5.776e+01 6.215e+01 6.945e+01 1.166e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 20:23:15,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=199869.33333333334, ans=0.0 +2024-07-28 20:23:21,872 INFO [train.py:1114] (3/4) Epoch 15, batch 6800, loss[loss=0.1598, simple_loss=0.2654, pruned_loss=0.02708, over 4636.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2719, pruned_loss=0.04603, over 938255.55 frames. ], batch size: 13, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:23:41,521 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.90 vs. limit=15.0 +2024-07-28 20:23:48,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=199936.0, ans=0.2 +2024-07-28 20:23:48,702 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.16 vs. limit=15.0 +2024-07-28 20:23:55,351 INFO [train.py:1114] (3/4) Epoch 15, batch 6850, loss[loss=0.1582, simple_loss=0.2535, pruned_loss=0.03139, over 4697.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.272, pruned_loss=0.04611, over 940262.03 frames. ], batch size: 13, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:23:59,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=199949.33333333334, ans=0.07 +2024-07-28 20:24:00,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=199949.33333333334, ans=0.07 +2024-07-28 20:24:01,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=199962.66666666666, ans=0.0 +2024-07-28 20:24:04,430 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.16 vs. limit=6.0 +2024-07-28 20:24:04,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=199962.66666666666, ans=0.025 +2024-07-28 20:24:05,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=199962.66666666666, ans=0.125 +2024-07-28 20:24:11,282 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.68 vs. limit=10.0 +2024-07-28 20:24:12,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=199976.0, ans=0.025 +2024-07-28 20:24:16,045 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.13 vs. limit=10.0 +2024-07-28 20:24:16,992 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.771e+01 5.729e+01 6.369e+01 7.119e+01 1.032e+02, threshold=1.274e+02, percent-clipped=0.0 +2024-07-28 20:24:17,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=199989.33333333334, ans=0.125 +2024-07-28 20:24:29,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=200002.66666666666, ans=0.0 +2024-07-28 20:24:29,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=200002.66666666666, ans=0.125 +2024-07-28 20:24:31,053 INFO [train.py:1114] (3/4) Epoch 15, batch 6900, loss[loss=0.1702, simple_loss=0.2569, pruned_loss=0.04174, over 4974.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2714, pruned_loss=0.04566, over 942877.83 frames. ], batch size: 13, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:24:32,686 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.36 vs. limit=15.0 +2024-07-28 20:24:37,298 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.26 vs. limit=12.0 +2024-07-28 20:24:37,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=200029.33333333334, ans=0.5 +2024-07-28 20:24:47,077 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:25:03,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=200069.33333333334, ans=0.2 +2024-07-28 20:25:04,287 INFO [train.py:1114] (3/4) Epoch 15, batch 6950, loss[loss=0.1934, simple_loss=0.2806, pruned_loss=0.05308, over 4532.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2722, pruned_loss=0.04612, over 940561.51 frames. ], batch size: 10, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:25:05,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=200082.66666666666, ans=0.07 +2024-07-28 20:25:21,752 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=200109.33333333334, ans=15.0 +2024-07-28 20:25:25,324 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+01 5.615e+01 6.056e+01 6.911e+01 1.034e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-28 20:25:30,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200136.0, ans=0.1 +2024-07-28 20:25:37,536 INFO [train.py:1114] (3/4) Epoch 15, batch 7000, loss[loss=0.1786, simple_loss=0.2719, pruned_loss=0.04265, over 4586.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2703, pruned_loss=0.04538, over 938945.50 frames. ], batch size: 17, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:25:45,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=200162.66666666666, ans=0.2 +2024-07-28 20:25:53,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=200176.0, ans=0.1 +2024-07-28 20:25:59,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=200189.33333333334, ans=0.025 +2024-07-28 20:26:00,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200189.33333333334, ans=0.1 +2024-07-28 20:26:12,402 INFO [train.py:1114] (3/4) Epoch 15, batch 7050, loss[loss=0.1825, simple_loss=0.282, pruned_loss=0.0415, over 4762.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2704, pruned_loss=0.04506, over 942361.58 frames. ], batch size: 19, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:26:19,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=200229.33333333334, ans=0.125 +2024-07-28 20:26:28,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=200242.66666666666, ans=0.05 +2024-07-28 20:26:32,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=200242.66666666666, ans=0.1 +2024-07-28 20:26:35,598 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.524e+01 5.674e+01 6.340e+01 7.118e+01 1.081e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-28 20:26:40,071 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.25 vs. limit=15.0 +2024-07-28 20:26:47,602 INFO [train.py:1114] (3/4) Epoch 15, batch 7100, loss[loss=0.1983, simple_loss=0.2814, pruned_loss=0.05764, over 4798.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2709, pruned_loss=0.04566, over 936354.84 frames. ], batch size: 15, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:26:52,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=200282.66666666666, ans=0.125 +2024-07-28 20:26:54,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=200296.0, ans=0.05 +2024-07-28 20:26:55,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=200296.0, ans=0.0 +2024-07-28 20:27:07,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=200322.66666666666, ans=0.04949747468305833 +2024-07-28 20:27:08,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=200322.66666666666, ans=0.125 +2024-07-28 20:27:11,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=200322.66666666666, ans=0.025 +2024-07-28 20:27:11,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=200322.66666666666, ans=0.125 +2024-07-28 20:27:12,595 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:27:14,850 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.47 vs. limit=15.0 +2024-07-28 20:27:17,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=200336.0, ans=0.125 +2024-07-28 20:27:20,254 INFO [train.py:1114] (3/4) Epoch 15, batch 7150, loss[loss=0.2057, simple_loss=0.2964, pruned_loss=0.0575, over 4541.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2695, pruned_loss=0.04537, over 936952.63 frames. ], batch size: 21, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:27:32,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=200362.66666666666, ans=0.125 +2024-07-28 20:27:41,583 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.495e+01 6.100e+01 6.664e+01 1.254e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 20:27:53,622 INFO [train.py:1114] (3/4) Epoch 15, batch 7200, loss[loss=0.1895, simple_loss=0.2834, pruned_loss=0.04779, over 4796.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2694, pruned_loss=0.04484, over 937389.19 frames. ], batch size: 15, lr: 4.95e-03, grad_scale: 64.0 +2024-07-28 20:27:57,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=200416.0, ans=0.125 +2024-07-28 20:28:00,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=200429.33333333334, ans=0.125 +2024-07-28 20:28:06,897 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:28:26,772 INFO [train.py:1114] (3/4) Epoch 15, batch 7250, loss[loss=0.1506, simple_loss=0.2462, pruned_loss=0.02751, over 4869.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2691, pruned_loss=0.04499, over 939187.77 frames. ], batch size: 12, lr: 4.95e-03, grad_scale: 64.0 +2024-07-28 20:28:30,281 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.55 vs. limit=22.5 +2024-07-28 20:28:34,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=200496.0, ans=0.1 +2024-07-28 20:28:37,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=200496.0, ans=0.2 +2024-07-28 20:28:39,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=200509.33333333334, ans=0.025 +2024-07-28 20:28:41,006 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:28:42,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=200509.33333333334, ans=0.025 +2024-07-28 20:28:43,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=200509.33333333334, ans=0.125 +2024-07-28 20:28:47,959 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.349e+01 5.542e+01 5.960e+01 6.678e+01 9.539e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-28 20:28:59,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=200549.33333333334, ans=0.2 +2024-07-28 20:28:59,492 INFO [train.py:1114] (3/4) Epoch 15, batch 7300, loss[loss=0.1611, simple_loss=0.2409, pruned_loss=0.04066, over 4855.00 frames. ], tot_loss[loss=0.18, simple_loss=0.269, pruned_loss=0.04545, over 939657.09 frames. ], batch size: 12, lr: 4.95e-03, grad_scale: 32.0 +2024-07-28 20:29:05,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=200549.33333333334, ans=0.125 +2024-07-28 20:29:18,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=200576.0, ans=0.125 +2024-07-28 20:29:19,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200589.33333333334, ans=0.1 +2024-07-28 20:29:26,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=200602.66666666666, ans=0.0 +2024-07-28 20:29:28,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.96 vs. limit=15.0 +2024-07-28 20:29:32,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=200616.0, ans=0.025 +2024-07-28 20:29:32,746 INFO [train.py:1114] (3/4) Epoch 15, batch 7350, loss[loss=0.173, simple_loss=0.266, pruned_loss=0.04001, over 4643.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2686, pruned_loss=0.04512, over 938991.54 frames. ], batch size: 12, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:29:34,982 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.54 vs. limit=15.0 +2024-07-28 20:29:46,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=200642.66666666666, ans=0.125 +2024-07-28 20:29:54,105 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.821e+01 5.678e+01 6.177e+01 7.167e+01 1.153e+02, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 20:30:05,185 INFO [train.py:1114] (3/4) Epoch 15, batch 7400, loss[loss=0.1939, simple_loss=0.2798, pruned_loss=0.05402, over 4693.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2692, pruned_loss=0.04519, over 940258.64 frames. ], batch size: 13, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:30:11,303 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=200696.0, ans=0.125 +2024-07-28 20:30:11,579 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.39 vs. limit=10.0 +2024-07-28 20:30:13,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=200696.0, ans=0.125 +2024-07-28 20:30:16,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=200696.0, ans=0.125 +2024-07-28 20:30:20,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=200709.33333333334, ans=0.125 +2024-07-28 20:30:23,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=200709.33333333334, ans=0.125 +2024-07-28 20:30:24,854 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.09 vs. limit=6.0 +2024-07-28 20:30:27,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=200722.66666666666, ans=0.1 +2024-07-28 20:30:36,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=200736.0, ans=0.1 +2024-07-28 20:30:37,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=200736.0, ans=0.5 +2024-07-28 20:30:38,352 INFO [train.py:1114] (3/4) Epoch 15, batch 7450, loss[loss=0.146, simple_loss=0.2267, pruned_loss=0.03266, over 4604.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2687, pruned_loss=0.04553, over 937742.21 frames. ], batch size: 11, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:30:43,951 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.30 vs. limit=10.0 +2024-07-28 20:30:44,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=200762.66666666666, ans=0.0 +2024-07-28 20:30:48,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=200762.66666666666, ans=0.04949747468305833 +2024-07-28 20:30:54,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=200776.0, ans=0.2 +2024-07-28 20:30:59,630 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=5.50 vs. limit=10.0 +2024-07-28 20:30:59,743 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.621e+01 5.506e+01 6.120e+01 7.059e+01 1.130e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-28 20:31:00,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=200789.33333333334, ans=0.0 +2024-07-28 20:31:03,742 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.58 vs. limit=22.5 +2024-07-28 20:31:11,143 INFO [train.py:1114] (3/4) Epoch 15, batch 7500, loss[loss=0.1867, simple_loss=0.2662, pruned_loss=0.05355, over 3193.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2693, pruned_loss=0.04557, over 935685.62 frames. ], batch size: 35, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:31:13,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=200816.0, ans=0.125 +2024-07-28 20:31:18,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=200829.33333333334, ans=0.125 +2024-07-28 20:31:19,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=200829.33333333334, ans=0.0 +2024-07-28 20:31:20,292 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.86 vs. limit=15.0 +2024-07-28 20:31:29,567 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.06 vs. limit=15.0 +2024-07-28 20:31:33,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=200856.0, ans=0.0 +2024-07-28 20:31:34,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=200856.0, ans=0.125 +2024-07-28 20:31:38,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=200856.0, ans=0.025 +2024-07-28 20:31:41,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=200869.33333333334, ans=0.125 +2024-07-28 20:31:45,668 INFO [train.py:1114] (3/4) Epoch 15, batch 7550, loss[loss=0.2422, simple_loss=0.3269, pruned_loss=0.07873, over 4600.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2706, pruned_loss=0.04595, over 935773.24 frames. ], batch size: 17, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:31:50,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=200882.66666666666, ans=0.035 +2024-07-28 20:31:52,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=200896.0, ans=0.125 +2024-07-28 20:32:08,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=200922.66666666666, ans=0.125 +2024-07-28 20:32:08,710 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.815e+01 5.439e+01 5.885e+01 6.380e+01 8.239e+01, threshold=1.177e+02, percent-clipped=0.0 +2024-07-28 20:32:13,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=200936.0, ans=0.0 +2024-07-28 20:32:19,798 INFO [train.py:1114] (3/4) Epoch 15, batch 7600, loss[loss=0.1878, simple_loss=0.2971, pruned_loss=0.03921, over 4814.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2699, pruned_loss=0.04521, over 937849.27 frames. ], batch size: 14, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:32:20,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=200949.33333333334, ans=0.125 +2024-07-28 20:32:57,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=200976.0, ans=0.125 +2024-07-28 20:32:58,320 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=11.00 vs. limit=15.0 +2024-07-28 20:33:04,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=200989.33333333334, ans=0.0 +2024-07-28 20:33:04,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff2.min_abs, batch_count=200989.33333333334, ans=0.1 +2024-07-28 20:33:12,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=201002.66666666666, ans=0.125 +2024-07-28 20:33:13,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=201016.0, ans=0.2 +2024-07-28 20:33:13,671 INFO [train.py:1114] (3/4) Epoch 15, batch 7650, loss[loss=0.1463, simple_loss=0.2236, pruned_loss=0.03446, over 4943.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.27, pruned_loss=0.04552, over 936856.59 frames. ], batch size: 12, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:33:14,535 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.91 vs. limit=15.0 +2024-07-28 20:33:37,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=201042.66666666666, ans=0.2 +2024-07-28 20:33:39,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=201042.66666666666, ans=0.0 +2024-07-28 20:33:50,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=201056.0, ans=0.125 +2024-07-28 20:33:52,758 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.430e+01 5.492e+01 6.279e+01 7.005e+01 1.015e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-28 20:34:18,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=201069.33333333334, ans=0.0 +2024-07-28 20:34:21,683 INFO [train.py:1114] (3/4) Epoch 15, batch 7700, loss[loss=0.1974, simple_loss=0.2902, pruned_loss=0.05228, over 4700.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.271, pruned_loss=0.04599, over 934568.09 frames. ], batch size: 13, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:34:25,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=201082.66666666666, ans=0.0 +2024-07-28 20:34:45,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=201109.33333333334, ans=0.125 +2024-07-28 20:34:58,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=201136.0, ans=0.125 +2024-07-28 20:35:06,318 INFO [train.py:1114] (3/4) Epoch 15, batch 7750, loss[loss=0.213, simple_loss=0.296, pruned_loss=0.06504, over 4933.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2722, pruned_loss=0.04615, over 935853.55 frames. ], batch size: 14, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:35:14,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201149.33333333334, ans=0.1 +2024-07-28 20:35:37,371 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.480e+01 5.525e+01 5.917e+01 6.791e+01 1.166e+02, threshold=1.183e+02, percent-clipped=0.0 +2024-07-28 20:35:39,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201189.33333333334, ans=0.1 +2024-07-28 20:35:56,541 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.68 vs. limit=22.5 +2024-07-28 20:36:07,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=201202.66666666666, ans=0.0 +2024-07-28 20:36:12,994 INFO [train.py:1114] (3/4) Epoch 15, batch 7800, loss[loss=0.1927, simple_loss=0.2872, pruned_loss=0.04907, over 4666.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2731, pruned_loss=0.04654, over 937189.06 frames. ], batch size: 14, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:36:35,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=201229.33333333334, ans=0.2 +2024-07-28 20:36:35,482 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.50 vs. limit=15.0 +2024-07-28 20:36:47,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=201242.66666666666, ans=0.2 +2024-07-28 20:36:57,665 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.57 vs. limit=15.0 +2024-07-28 20:37:03,084 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.59 vs. limit=6.0 +2024-07-28 20:37:04,505 INFO [train.py:1114] (3/4) Epoch 15, batch 7850, loss[loss=0.1576, simple_loss=0.2402, pruned_loss=0.0375, over 4493.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2735, pruned_loss=0.04692, over 936242.97 frames. ], batch size: 10, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:37:04,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=201282.66666666666, ans=0.0 +2024-07-28 20:37:07,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=201282.66666666666, ans=0.125 +2024-07-28 20:37:07,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=201282.66666666666, ans=0.125 +2024-07-28 20:37:08,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.61 vs. limit=10.0 +2024-07-28 20:37:20,268 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.02 vs. limit=15.0 +2024-07-28 20:37:34,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=201322.66666666666, ans=0.125 +2024-07-28 20:37:36,069 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 5.654e+01 6.198e+01 6.976e+01 9.701e+01, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 20:37:42,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=201336.0, ans=0.125 +2024-07-28 20:37:47,270 INFO [train.py:1114] (3/4) Epoch 15, batch 7900, loss[loss=0.2012, simple_loss=0.2936, pruned_loss=0.05436, over 4867.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2741, pruned_loss=0.04707, over 933735.18 frames. ], batch size: 14, lr: 4.94e-03, grad_scale: 32.0 +2024-07-28 20:37:56,476 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.36 vs. limit=15.0 +2024-07-28 20:38:03,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=201376.0, ans=0.125 +2024-07-28 20:38:04,092 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.91 vs. limit=15.0 +2024-07-28 20:38:21,277 INFO [train.py:1114] (3/4) Epoch 15, batch 7950, loss[loss=0.2279, simple_loss=0.2958, pruned_loss=0.08, over 3381.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2739, pruned_loss=0.04713, over 935640.35 frames. ], batch size: 36, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:38:24,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=201416.0, ans=0.1 +2024-07-28 20:38:26,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=201416.0, ans=0.125 +2024-07-28 20:38:40,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201456.0, ans=0.1 +2024-07-28 20:38:42,611 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.203e+01 5.519e+01 6.026e+01 6.724e+01 9.656e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 20:38:44,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=201456.0, ans=0.125 +2024-07-28 20:38:49,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=201469.33333333334, ans=0.07 +2024-07-28 20:38:53,250 INFO [train.py:1114] (3/4) Epoch 15, batch 8000, loss[loss=0.1681, simple_loss=0.2587, pruned_loss=0.03875, over 4616.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2722, pruned_loss=0.04656, over 935050.25 frames. ], batch size: 11, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:38:54,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=201482.66666666666, ans=0.0 +2024-07-28 20:38:58,067 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.75 vs. limit=15.0 +2024-07-28 20:39:05,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=201496.0, ans=0.1 +2024-07-28 20:39:07,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201509.33333333334, ans=0.1 +2024-07-28 20:39:25,596 INFO [train.py:1114] (3/4) Epoch 15, batch 8050, loss[loss=0.1929, simple_loss=0.2915, pruned_loss=0.04717, over 4810.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2719, pruned_loss=0.04606, over 935036.17 frames. ], batch size: 14, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:39:46,855 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.771e+01 6.002e+01 6.838e+01 8.210e+01 1.277e+02, threshold=1.368e+02, percent-clipped=1.0 +2024-07-28 20:39:50,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=201589.33333333334, ans=0.1 +2024-07-28 20:39:58,047 INFO [train.py:1114] (3/4) Epoch 15, batch 8100, loss[loss=0.2002, simple_loss=0.2821, pruned_loss=0.05914, over 4804.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2716, pruned_loss=0.04589, over 934331.80 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:39:58,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=201616.0, ans=0.025 +2024-07-28 20:39:58,382 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.90 vs. limit=12.0 +2024-07-28 20:39:59,977 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=201616.0, ans=0.0 +2024-07-28 20:40:00,874 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.29 vs. limit=22.5 +2024-07-28 20:40:03,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=201616.0, ans=0.05 +2024-07-28 20:40:03,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=201629.33333333334, ans=0.025 +2024-07-28 20:40:04,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=201629.33333333334, ans=0.2 +2024-07-28 20:40:11,174 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.31 vs. limit=15.0 +2024-07-28 20:40:17,359 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.68 vs. limit=15.0 +2024-07-28 20:40:17,547 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.75 vs. limit=5.0 +2024-07-28 20:40:19,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=201656.0, ans=0.125 +2024-07-28 20:40:30,021 INFO [train.py:1114] (3/4) Epoch 15, batch 8150, loss[loss=0.202, simple_loss=0.2901, pruned_loss=0.05694, over 4797.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2705, pruned_loss=0.04573, over 937637.00 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:40:40,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=201696.0, ans=0.2 +2024-07-28 20:40:44,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=201709.33333333334, ans=0.125 +2024-07-28 20:40:44,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=201709.33333333334, ans=0.0 +2024-07-28 20:40:50,485 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.80 vs. limit=22.5 +2024-07-28 20:40:51,248 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.751e+01 6.330e+01 7.260e+01 1.173e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 20:40:52,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=201722.66666666666, ans=0.2 +2024-07-28 20:40:58,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=201736.0, ans=0.125 +2024-07-28 20:41:02,471 INFO [train.py:1114] (3/4) Epoch 15, batch 8200, loss[loss=0.1585, simple_loss=0.2594, pruned_loss=0.02886, over 4795.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2701, pruned_loss=0.04503, over 938807.47 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:41:05,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=201749.33333333334, ans=0.0 +2024-07-28 20:41:08,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=201762.66666666666, ans=0.0 +2024-07-28 20:41:27,264 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.41 vs. limit=15.0 +2024-07-28 20:41:36,032 INFO [train.py:1114] (3/4) Epoch 15, batch 8250, loss[loss=0.1589, simple_loss=0.2571, pruned_loss=0.03039, over 4906.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2701, pruned_loss=0.04488, over 939116.96 frames. ], batch size: 13, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:41:38,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=201816.0, ans=0.1 +2024-07-28 20:41:54,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=201842.66666666666, ans=0.0 +2024-07-28 20:41:57,636 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.753e+01 5.622e+01 6.090e+01 6.800e+01 1.043e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-28 20:42:08,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=201882.66666666666, ans=0.125 +2024-07-28 20:42:08,664 INFO [train.py:1114] (3/4) Epoch 15, batch 8300, loss[loss=0.208, simple_loss=0.2952, pruned_loss=0.06041, over 4918.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2714, pruned_loss=0.04524, over 938847.94 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:42:20,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_na.min_abs, batch_count=201896.0, ans=0.02 +2024-07-28 20:42:28,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=201922.66666666666, ans=0.0 +2024-07-28 20:42:32,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=201922.66666666666, ans=0.125 +2024-07-28 20:42:34,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=201936.0, ans=0.1 +2024-07-28 20:42:38,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=201936.0, ans=0.0 +2024-07-28 20:42:41,353 INFO [train.py:1114] (3/4) Epoch 15, batch 8350, loss[loss=0.1837, simple_loss=0.276, pruned_loss=0.04566, over 4814.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2715, pruned_loss=0.04559, over 941643.16 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:42:42,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=201949.33333333334, ans=0.125 +2024-07-28 20:42:44,414 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.96 vs. limit=15.0 +2024-07-28 20:42:50,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=201962.66666666666, ans=0.05 +2024-07-28 20:42:53,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=201962.66666666666, ans=0.125 +2024-07-28 20:42:54,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=201962.66666666666, ans=0.0 +2024-07-28 20:43:03,764 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.556e+01 6.243e+01 6.901e+01 1.019e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-28 20:43:07,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=201989.33333333334, ans=0.125 +2024-07-28 20:43:12,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202002.66666666666, ans=0.1 +2024-07-28 20:43:14,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=202016.0, ans=0.125 +2024-07-28 20:43:15,642 INFO [train.py:1114] (3/4) Epoch 15, batch 8400, loss[loss=0.1809, simple_loss=0.2641, pruned_loss=0.04882, over 4773.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2719, pruned_loss=0.04577, over 940261.91 frames. ], batch size: 12, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:43:16,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=202016.0, ans=10.0 +2024-07-28 20:43:25,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=202029.33333333334, ans=0.0 +2024-07-28 20:43:37,239 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.96 vs. limit=15.0 +2024-07-28 20:43:39,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=202056.0, ans=0.125 +2024-07-28 20:43:46,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.whiten.whitening_limit, batch_count=202069.33333333334, ans=12.0 +2024-07-28 20:43:51,498 INFO [train.py:1114] (3/4) Epoch 15, batch 8450, loss[loss=0.2586, simple_loss=0.33, pruned_loss=0.09355, over 4800.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2729, pruned_loss=0.04631, over 939160.89 frames. ], batch size: 15, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:43:52,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.64 vs. limit=22.5 +2024-07-28 20:44:18,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=202109.33333333334, ans=0.035 +2024-07-28 20:44:18,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=202109.33333333334, ans=0.0 +2024-07-28 20:44:18,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=202109.33333333334, ans=0.125 +2024-07-28 20:44:21,221 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.759e+01 5.800e+01 6.456e+01 7.440e+01 1.040e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-28 20:44:22,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=202122.66666666666, ans=0.0 +2024-07-28 20:44:27,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=202136.0, ans=0.2 +2024-07-28 20:44:31,327 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.89 vs. limit=22.5 +2024-07-28 20:44:34,656 INFO [train.py:1114] (3/4) Epoch 15, batch 8500, loss[loss=0.1721, simple_loss=0.2437, pruned_loss=0.05023, over 4599.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2715, pruned_loss=0.04593, over 938779.53 frames. ], batch size: 11, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:44:35,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=202149.33333333334, ans=0.025 +2024-07-28 20:44:38,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=202149.33333333334, ans=0.025 +2024-07-28 20:44:49,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=202176.0, ans=0.125 +2024-07-28 20:44:49,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=202176.0, ans=0.125 +2024-07-28 20:44:57,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=202189.33333333334, ans=0.125 +2024-07-28 20:44:58,024 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.86 vs. limit=22.5 +2024-07-28 20:44:58,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=202189.33333333334, ans=0.125 +2024-07-28 20:44:59,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=202189.33333333334, ans=0.2 +2024-07-28 20:45:07,513 INFO [train.py:1114] (3/4) Epoch 15, batch 8550, loss[loss=0.197, simple_loss=0.2656, pruned_loss=0.06416, over 4814.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2713, pruned_loss=0.04624, over 939599.88 frames. ], batch size: 11, lr: 4.93e-03, grad_scale: 32.0 +2024-07-28 20:45:18,371 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.79 vs. limit=15.0 +2024-07-28 20:45:22,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=202242.66666666666, ans=0.2 +2024-07-28 20:45:28,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=4.45 vs. limit=15.0 +2024-07-28 20:45:30,957 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.429e+01 5.682e+01 6.336e+01 7.358e+01 1.234e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-28 20:45:41,902 INFO [train.py:1114] (3/4) Epoch 15, batch 8600, loss[loss=0.162, simple_loss=0.2748, pruned_loss=0.02466, over 4796.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2714, pruned_loss=0.04583, over 939265.35 frames. ], batch size: 15, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:45:42,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=202282.66666666666, ans=0.125 +2024-07-28 20:45:56,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=202309.33333333334, ans=0.2 +2024-07-28 20:46:00,024 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.52 vs. limit=22.5 +2024-07-28 20:46:04,687 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.48 vs. limit=10.0 +2024-07-28 20:46:06,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=202322.66666666666, ans=0.025 +2024-07-28 20:46:06,907 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:46:08,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=202336.0, ans=0.125 +2024-07-28 20:46:14,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=202349.33333333334, ans=0.125 +2024-07-28 20:46:15,002 INFO [train.py:1114] (3/4) Epoch 15, batch 8650, loss[loss=0.2129, simple_loss=0.315, pruned_loss=0.05536, over 4906.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2717, pruned_loss=0.04619, over 940527.75 frames. ], batch size: 15, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:46:17,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=202349.33333333334, ans=0.125 +2024-07-28 20:46:29,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=202376.0, ans=0.125 +2024-07-28 20:46:32,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=202376.0, ans=0.0 +2024-07-28 20:46:32,766 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.33 vs. limit=10.0 +2024-07-28 20:46:36,414 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.784e+01 5.651e+01 6.077e+01 6.775e+01 1.563e+02, threshold=1.215e+02, percent-clipped=1.0 +2024-07-28 20:46:44,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=202402.66666666666, ans=0.125 +2024-07-28 20:46:45,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=202402.66666666666, ans=0.2 +2024-07-28 20:46:47,378 INFO [train.py:1114] (3/4) Epoch 15, batch 8700, loss[loss=0.1897, simple_loss=0.2751, pruned_loss=0.0521, over 4761.00 frames. ], tot_loss[loss=0.182, simple_loss=0.272, pruned_loss=0.04605, over 937950.79 frames. ], batch size: 13, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:46:52,326 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=5.18 vs. limit=15.0 +2024-07-28 20:46:57,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=202429.33333333334, ans=0.0 +2024-07-28 20:47:19,426 INFO [train.py:1114] (3/4) Epoch 15, batch 8750, loss[loss=0.186, simple_loss=0.2718, pruned_loss=0.05006, over 4679.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2714, pruned_loss=0.04604, over 936490.82 frames. ], batch size: 15, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:47:40,674 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.522e+01 5.559e+01 6.196e+01 6.974e+01 1.029e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 20:47:43,572 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.13 vs. limit=12.0 +2024-07-28 20:47:49,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=202536.0, ans=0.07 +2024-07-28 20:47:49,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=202536.0, ans=0.125 +2024-07-28 20:47:51,466 INFO [train.py:1114] (3/4) Epoch 15, batch 8800, loss[loss=0.1679, simple_loss=0.2608, pruned_loss=0.03753, over 4930.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2721, pruned_loss=0.0461, over 937275.22 frames. ], batch size: 14, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:47:54,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=202549.33333333334, ans=0.0 +2024-07-28 20:47:55,659 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.56 vs. limit=12.0 +2024-07-28 20:47:57,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=202562.66666666666, ans=0.125 +2024-07-28 20:48:13,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=202589.33333333334, ans=0.0 +2024-07-28 20:48:15,109 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=9.77 vs. limit=15.0 +2024-07-28 20:48:22,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=202602.66666666666, ans=0.1 +2024-07-28 20:48:23,830 INFO [train.py:1114] (3/4) Epoch 15, batch 8850, loss[loss=0.1751, simple_loss=0.2701, pruned_loss=0.04007, over 4513.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2723, pruned_loss=0.04673, over 932471.20 frames. ], batch size: 21, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:48:24,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=202616.0, ans=0.1 +2024-07-28 20:48:31,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=202616.0, ans=0.125 +2024-07-28 20:48:39,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=202629.33333333334, ans=0.0 +2024-07-28 20:48:43,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=202642.66666666666, ans=0.125 +2024-07-28 20:48:48,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=202656.0, ans=0.125 +2024-07-28 20:48:50,490 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.697e+01 5.661e+01 6.393e+01 7.198e+01 1.179e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-28 20:49:10,211 INFO [train.py:1114] (3/4) Epoch 15, batch 8900, loss[loss=0.1625, simple_loss=0.246, pruned_loss=0.03955, over 4932.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2726, pruned_loss=0.04699, over 930362.82 frames. ], batch size: 12, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:49:16,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=202696.0, ans=0.125 +2024-07-28 20:49:20,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=202696.0, ans=0.1 +2024-07-28 20:49:23,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=202709.33333333334, ans=0.025 +2024-07-28 20:49:27,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=202709.33333333334, ans=0.125 +2024-07-28 20:49:35,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=202736.0, ans=0.0 +2024-07-28 20:49:42,091 INFO [train.py:1114] (3/4) Epoch 15, batch 8950, loss[loss=0.2187, simple_loss=0.3162, pruned_loss=0.06062, over 4541.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2726, pruned_loss=0.04662, over 931135.69 frames. ], batch size: 21, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:49:44,112 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 20:49:46,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=202749.33333333334, ans=0.1 +2024-07-28 20:49:53,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=202762.66666666666, ans=0.125 +2024-07-28 20:50:03,357 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.618e+01 5.676e+01 6.111e+01 7.140e+01 9.937e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 20:50:09,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=202802.66666666666, ans=0.1 +2024-07-28 20:50:14,240 INFO [train.py:1114] (3/4) Epoch 15, batch 9000, loss[loss=0.1679, simple_loss=0.2585, pruned_loss=0.03868, over 4649.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2716, pruned_loss=0.04639, over 934317.88 frames. ], batch size: 12, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:50:14,241 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 20:50:29,447 INFO [train.py:1146] (3/4) Epoch 15, validation: loss=0.164, simple_loss=0.2673, pruned_loss=0.03039, over 944034.00 frames. +2024-07-28 20:50:29,448 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 20:50:32,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=202816.0, ans=0.125 +2024-07-28 20:50:43,995 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.04 vs. limit=10.0 +2024-07-28 20:50:44,669 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.60 vs. limit=15.0 +2024-07-28 20:50:47,174 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.23 vs. limit=15.0 +2024-07-28 20:50:50,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=202856.0, ans=0.125 +2024-07-28 20:50:52,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=202856.0, ans=0.0 +2024-07-28 20:51:01,323 INFO [train.py:1114] (3/4) Epoch 15, batch 9050, loss[loss=0.1412, simple_loss=0.2249, pruned_loss=0.02877, over 4469.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2702, pruned_loss=0.04552, over 934798.19 frames. ], batch size: 10, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:51:03,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=202882.66666666666, ans=0.0 +2024-07-28 20:51:12,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=202896.0, ans=0.125 +2024-07-28 20:51:17,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=202909.33333333334, ans=0.1 +2024-07-28 20:51:21,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=202922.66666666666, ans=0.0 +2024-07-28 20:51:21,885 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.625e+01 5.604e+01 6.217e+01 7.321e+01 1.269e+02, threshold=1.243e+02, percent-clipped=1.0 +2024-07-28 20:51:30,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=202936.0, ans=0.2 +2024-07-28 20:51:32,959 INFO [train.py:1114] (3/4) Epoch 15, batch 9100, loss[loss=0.1844, simple_loss=0.2747, pruned_loss=0.04702, over 4932.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2688, pruned_loss=0.04504, over 937322.77 frames. ], batch size: 14, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:51:33,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=202949.33333333334, ans=0.125 +2024-07-28 20:51:44,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=202976.0, ans=0.1 +2024-07-28 20:51:57,023 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.97 vs. limit=22.5 +2024-07-28 20:51:58,913 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.77 vs. limit=6.0 +2024-07-28 20:52:04,338 INFO [train.py:1114] (3/4) Epoch 15, batch 9150, loss[loss=0.2148, simple_loss=0.317, pruned_loss=0.05632, over 4798.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2705, pruned_loss=0.04558, over 935440.34 frames. ], batch size: 14, lr: 4.92e-03, grad_scale: 32.0 +2024-07-28 20:52:12,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=203029.33333333334, ans=0.125 +2024-07-28 20:52:18,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=203042.66666666666, ans=0.0 +2024-07-28 20:52:25,461 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.329e+01 5.523e+01 6.043e+01 6.925e+01 1.017e+02, threshold=1.209e+02, percent-clipped=0.0 +2024-07-28 20:52:35,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=203082.66666666666, ans=0.0 +2024-07-28 20:52:36,148 INFO [train.py:1114] (3/4) Epoch 15, batch 9200, loss[loss=0.1624, simple_loss=0.2444, pruned_loss=0.04023, over 4860.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2695, pruned_loss=0.04539, over 937225.18 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:52:38,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=203082.66666666666, ans=0.0 +2024-07-28 20:52:46,686 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.32 vs. limit=15.0 +2024-07-28 20:52:55,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=203122.66666666666, ans=0.125 +2024-07-28 20:53:08,407 INFO [train.py:1114] (3/4) Epoch 15, batch 9250, loss[loss=0.173, simple_loss=0.2634, pruned_loss=0.04125, over 4634.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2695, pruned_loss=0.04529, over 938017.39 frames. ], batch size: 13, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:53:11,230 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.09 vs. limit=6.0 +2024-07-28 20:53:11,341 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.82 vs. limit=12.0 +2024-07-28 20:53:12,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=203149.33333333334, ans=0.1 +2024-07-28 20:53:12,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=203149.33333333334, ans=0.0 +2024-07-28 20:53:17,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=203162.66666666666, ans=15.0 +2024-07-28 20:53:18,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=203162.66666666666, ans=0.1 +2024-07-28 20:53:25,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=203176.0, ans=0.04949747468305833 +2024-07-28 20:53:29,341 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.390e+01 5.672e+01 6.344e+01 6.747e+01 1.004e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 20:53:31,530 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.88 vs. limit=15.0 +2024-07-28 20:53:35,018 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.45 vs. limit=10.0 +2024-07-28 20:53:36,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=203202.66666666666, ans=0.125 +2024-07-28 20:53:37,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=203202.66666666666, ans=0.125 +2024-07-28 20:53:37,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=203202.66666666666, ans=0.125 +2024-07-28 20:53:41,024 INFO [train.py:1114] (3/4) Epoch 15, batch 9300, loss[loss=0.1633, simple_loss=0.2375, pruned_loss=0.04455, over 4761.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2687, pruned_loss=0.04536, over 937778.68 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:53:55,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=203242.66666666666, ans=0.1 +2024-07-28 20:54:13,771 INFO [train.py:1114] (3/4) Epoch 15, batch 9350, loss[loss=0.1589, simple_loss=0.2381, pruned_loss=0.03987, over 4813.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2697, pruned_loss=0.04531, over 935611.88 frames. ], batch size: 11, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:54:17,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.27 vs. limit=15.0 +2024-07-28 20:54:18,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=203282.66666666666, ans=0.125 +2024-07-28 20:54:23,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=203296.0, ans=0.1 +2024-07-28 20:54:24,771 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.70 vs. limit=15.0 +2024-07-28 20:54:25,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=203296.0, ans=0.0 +2024-07-28 20:54:25,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.whiten.whitening_limit, batch_count=203296.0, ans=12.0 +2024-07-28 20:54:28,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=203309.33333333334, ans=0.125 +2024-07-28 20:54:28,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=203309.33333333334, ans=0.5 +2024-07-28 20:54:34,861 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.610e+01 5.452e+01 6.189e+01 7.531e+01 9.435e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 20:54:45,593 INFO [train.py:1114] (3/4) Epoch 15, batch 9400, loss[loss=0.1858, simple_loss=0.271, pruned_loss=0.0503, over 4689.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.27, pruned_loss=0.04584, over 933700.67 frames. ], batch size: 13, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:54:59,685 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.37 vs. limit=6.0 +2024-07-28 20:55:04,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=203389.33333333334, ans=0.2 +2024-07-28 20:55:14,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=203402.66666666666, ans=0.125 +2024-07-28 20:55:15,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=203402.66666666666, ans=0.125 +2024-07-28 20:55:17,114 INFO [train.py:1114] (3/4) Epoch 15, batch 9450, loss[loss=0.1798, simple_loss=0.2536, pruned_loss=0.05299, over 4800.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2697, pruned_loss=0.04526, over 933048.51 frames. ], batch size: 11, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:55:27,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=203429.33333333334, ans=0.0 +2024-07-28 20:55:37,633 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.245e+01 5.463e+01 5.974e+01 6.797e+01 9.307e+01, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 20:55:41,986 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.00 vs. limit=15.0 +2024-07-28 20:55:48,429 INFO [train.py:1114] (3/4) Epoch 15, batch 9500, loss[loss=0.152, simple_loss=0.2358, pruned_loss=0.03405, over 4713.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2704, pruned_loss=0.04531, over 935275.55 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:55:54,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=203496.0, ans=0.125 +2024-07-28 20:55:56,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.49 vs. limit=22.5 +2024-07-28 20:56:09,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=203522.66666666666, ans=0.025 +2024-07-28 20:56:19,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=203549.33333333334, ans=0.0 +2024-07-28 20:56:19,725 INFO [train.py:1114] (3/4) Epoch 15, batch 9550, loss[loss=0.1632, simple_loss=0.2442, pruned_loss=0.0411, over 4766.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2695, pruned_loss=0.045, over 932339.72 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:56:19,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=203549.33333333334, ans=0.5 +2024-07-28 20:56:26,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=203562.66666666666, ans=0.04949747468305833 +2024-07-28 20:56:34,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=203576.0, ans=0.1 +2024-07-28 20:56:40,326 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.803e+01 5.499e+01 6.112e+01 6.972e+01 9.508e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 20:56:41,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=203589.33333333334, ans=0.125 +2024-07-28 20:56:50,869 INFO [train.py:1114] (3/4) Epoch 15, batch 9600, loss[loss=0.2318, simple_loss=0.3085, pruned_loss=0.07758, over 3224.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2702, pruned_loss=0.04523, over 931098.45 frames. ], batch size: 35, lr: 4.91e-03, grad_scale: 64.0 +2024-07-28 20:56:56,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=203616.0, ans=10.0 +2024-07-28 20:56:56,238 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=5.77 vs. limit=12.0 +2024-07-28 20:57:05,077 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.48 vs. limit=10.0 +2024-07-28 20:57:20,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=203669.33333333334, ans=0.125 +2024-07-28 20:57:22,694 INFO [train.py:1114] (3/4) Epoch 15, batch 9650, loss[loss=0.1777, simple_loss=0.2674, pruned_loss=0.04398, over 4846.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2704, pruned_loss=0.04563, over 926479.30 frames. ], batch size: 16, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:57:24,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=203682.66666666666, ans=0.0 +2024-07-28 20:57:35,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=203709.33333333334, ans=0.1 +2024-07-28 20:57:40,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=203709.33333333334, ans=0.0 +2024-07-28 20:57:44,441 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.709e+01 6.228e+01 7.235e+01 8.715e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 20:57:46,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=203722.66666666666, ans=0.0 +2024-07-28 20:57:52,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=203736.0, ans=0.2 +2024-07-28 20:57:55,048 INFO [train.py:1114] (3/4) Epoch 15, batch 9700, loss[loss=0.2305, simple_loss=0.3013, pruned_loss=0.07989, over 4386.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2704, pruned_loss=0.04566, over 924212.27 frames. ], batch size: 26, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:58:05,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=203762.66666666666, ans=0.0 +2024-07-28 20:58:16,984 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.02 vs. limit=15.0 +2024-07-28 20:58:27,167 INFO [train.py:1114] (3/4) Epoch 15, batch 9750, loss[loss=0.1991, simple_loss=0.299, pruned_loss=0.04957, over 4682.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2707, pruned_loss=0.04577, over 925168.98 frames. ], batch size: 15, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:58:48,315 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.470e+01 5.731e+01 6.608e+01 7.819e+01 1.278e+02, threshold=1.322e+02, percent-clipped=1.0 +2024-07-28 20:58:51,317 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.70 vs. limit=5.0 +2024-07-28 20:58:57,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=203869.33333333334, ans=0.125 +2024-07-28 20:59:03,966 INFO [train.py:1114] (3/4) Epoch 15, batch 9800, loss[loss=0.1441, simple_loss=0.2434, pruned_loss=0.02241, over 4709.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2689, pruned_loss=0.04508, over 924756.11 frames. ], batch size: 12, lr: 4.91e-03, grad_scale: 32.0 +2024-07-28 20:59:15,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=203909.33333333334, ans=0.125 +2024-07-28 20:59:15,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=203909.33333333334, ans=0.125 +2024-07-28 20:59:19,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=203909.33333333334, ans=0.1 +2024-07-28 20:59:20,527 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.92 vs. limit=15.0 +2024-07-28 20:59:30,008 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.08 vs. limit=12.0 +2024-07-28 20:59:30,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=203936.0, ans=0.125 +2024-07-28 20:59:35,232 INFO [train.py:1114] (3/4) Epoch 15, batch 9850, loss[loss=0.1837, simple_loss=0.2803, pruned_loss=0.04359, over 4895.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2698, pruned_loss=0.04506, over 926969.85 frames. ], batch size: 15, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 20:59:43,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=203962.66666666666, ans=0.0 +2024-07-28 20:59:45,903 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=203962.66666666666, ans=0.125 +2024-07-28 20:59:46,871 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.19 vs. limit=22.5 +2024-07-28 20:59:53,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=203989.33333333334, ans=0.1 +2024-07-28 20:59:53,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=203989.33333333334, ans=0.125 +2024-07-28 20:59:54,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=203989.33333333334, ans=0.025 +2024-07-28 20:59:56,144 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.481e+01 5.690e+01 6.538e+01 7.363e+01 1.082e+02, threshold=1.308e+02, percent-clipped=0.0 +2024-07-28 20:59:58,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=203989.33333333334, ans=0.125 +2024-07-28 20:59:58,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=203989.33333333334, ans=0.125 +2024-07-28 20:59:59,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=204002.66666666666, ans=0.05 +2024-07-28 21:00:03,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=204002.66666666666, ans=0.0 +2024-07-28 21:00:06,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=204016.0, ans=0.025 +2024-07-28 21:00:06,871 INFO [train.py:1114] (3/4) Epoch 15, batch 9900, loss[loss=0.2041, simple_loss=0.2883, pruned_loss=0.05991, over 4837.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2709, pruned_loss=0.04607, over 926193.07 frames. ], batch size: 16, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:00:10,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=204016.0, ans=0.0 +2024-07-28 21:00:10,993 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.93 vs. limit=12.0 +2024-07-28 21:00:12,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204029.33333333334, ans=0.1 +2024-07-28 21:00:20,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=204042.66666666666, ans=0.125 +2024-07-28 21:00:28,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=204056.0, ans=0.125 +2024-07-28 21:00:31,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=204069.33333333334, ans=0.1 +2024-07-28 21:00:35,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=204069.33333333334, ans=0.125 +2024-07-28 21:00:37,696 INFO [train.py:1114] (3/4) Epoch 15, batch 9950, loss[loss=0.1734, simple_loss=0.2594, pruned_loss=0.04374, over 4533.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2712, pruned_loss=0.04655, over 928950.67 frames. ], batch size: 10, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:00:43,338 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.37 vs. limit=22.5 +2024-07-28 21:00:47,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=204096.0, ans=0.025 +2024-07-28 21:00:50,453 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.17 vs. limit=10.0 +2024-07-28 21:00:59,864 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.488e+01 6.089e+01 6.834e+01 7.968e+01 1.113e+02, threshold=1.367e+02, percent-clipped=0.0 +2024-07-28 21:01:05,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=204136.0, ans=0.2 +2024-07-28 21:01:09,730 INFO [train.py:1114] (3/4) Epoch 15, batch 10000, loss[loss=0.2038, simple_loss=0.2923, pruned_loss=0.05762, over 4608.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2744, pruned_loss=0.0475, over 926578.83 frames. ], batch size: 16, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:01:32,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=204189.33333333334, ans=0.0 +2024-07-28 21:01:41,140 INFO [train.py:1114] (3/4) Epoch 15, batch 10050, loss[loss=0.2039, simple_loss=0.2859, pruned_loss=0.06094, over 2986.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2775, pruned_loss=0.049, over 913987.60 frames. ], batch size: 35, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:01:48,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=204229.33333333334, ans=0.025 +2024-07-28 21:01:49,231 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.18 vs. limit=22.5 +2024-07-28 21:01:53,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=204229.33333333334, ans=0.125 +2024-07-28 21:02:04,624 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.770e+01 5.990e+01 6.680e+01 7.345e+01 9.959e+01, threshold=1.336e+02, percent-clipped=0.0 +2024-07-28 21:02:05,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=204256.0, ans=0.125 +2024-07-28 21:02:10,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=204269.33333333334, ans=0.025 +2024-07-28 21:02:15,465 INFO [train.py:1114] (3/4) Epoch 15, batch 10100, loss[loss=0.208, simple_loss=0.2948, pruned_loss=0.06063, over 3670.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2823, pruned_loss=0.05389, over 860847.96 frames. ], batch size: 37, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:02:17,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=204282.66666666666, ans=0.125 +2024-07-28 21:02:19,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=204282.66666666666, ans=0.09899494936611666 +2024-07-28 21:02:22,512 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.24 vs. limit=15.0 +2024-07-28 21:02:34,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=204309.33333333334, ans=0.125 +2024-07-28 21:02:36,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=204322.66666666666, ans=0.025 +2024-07-28 21:02:38,394 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=204322.66666666666, ans=0.125 +2024-07-28 21:02:42,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=204336.0, ans=0.0 +2024-07-28 21:02:48,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=204349.33333333334, ans=0.0 +2024-07-28 21:02:48,828 INFO [train.py:1114] (3/4) Epoch 15, batch 10150, loss[loss=0.2143, simple_loss=0.2988, pruned_loss=0.06489, over 3410.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2859, pruned_loss=0.05747, over 819421.33 frames. ], batch size: 35, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:02:55,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=204362.66666666666, ans=0.2 +2024-07-28 21:02:58,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=204362.66666666666, ans=0.1 +2024-07-28 21:03:05,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=204376.0, ans=0.125 +2024-07-28 21:03:09,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=204389.33333333334, ans=0.0 +2024-07-28 21:03:12,068 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.568e+01 6.640e+01 7.110e+01 7.457e+01 9.149e+01, threshold=1.422e+02, percent-clipped=0.0 +2024-07-28 21:03:22,476 INFO [train.py:1114] (3/4) Epoch 15, batch 10200, loss[loss=0.237, simple_loss=0.3212, pruned_loss=0.07636, over 3146.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2894, pruned_loss=0.06113, over 787114.66 frames. ], batch size: 35, lr: 4.90e-03, grad_scale: 32.0 +2024-07-28 21:03:24,011 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.67 vs. limit=15.0 +2024-07-28 21:04:37,185 INFO [train.py:1114] (3/4) Epoch 16, batch 0, loss[loss=0.1549, simple_loss=0.2439, pruned_loss=0.03293, over 4846.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2439, pruned_loss=0.03293, over 4846.00 frames. ], batch size: 12, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:04:37,185 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 21:04:45,109 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.0946, 3.9800, 4.0068, 3.9234, 4.4669, 4.3925, 4.4683, 3.9629], + device='cuda:3') +2024-07-28 21:04:48,652 INFO [train.py:1146] (3/4) Epoch 16, validation: loss=0.1648, simple_loss=0.2693, pruned_loss=0.03017, over 944034.00 frames. +2024-07-28 21:04:48,652 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 21:04:53,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=204445.33333333334, ans=0.125 +2024-07-28 21:05:02,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=204472.0, ans=0.0 +2024-07-28 21:05:22,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=204512.0, ans=0.125 +2024-07-28 21:05:23,361 INFO [train.py:1114] (3/4) Epoch 16, batch 50, loss[loss=0.1813, simple_loss=0.2608, pruned_loss=0.05087, over 4614.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2737, pruned_loss=0.04589, over 206232.04 frames. ], batch size: 11, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:05:29,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=204512.0, ans=0.2 +2024-07-28 21:05:36,791 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.417e+01 5.659e+01 6.518e+01 7.271e+01 1.139e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-28 21:05:45,795 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=204538.66666666666, ans=0.125 +2024-07-28 21:05:54,603 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.93 vs. limit=15.0 +2024-07-28 21:05:56,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=204552.0, ans=0.025 +2024-07-28 21:06:04,208 INFO [train.py:1114] (3/4) Epoch 16, batch 100, loss[loss=0.1953, simple_loss=0.2842, pruned_loss=0.05314, over 4647.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2757, pruned_loss=0.04696, over 365525.73 frames. ], batch size: 12, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:06:15,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=204592.0, ans=0.0 +2024-07-28 21:06:16,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=204592.0, ans=0.0 +2024-07-28 21:06:22,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=204605.33333333334, ans=0.0 +2024-07-28 21:06:36,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.91 vs. limit=22.5 +2024-07-28 21:06:37,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=204632.0, ans=0.125 +2024-07-28 21:06:39,312 INFO [train.py:1114] (3/4) Epoch 16, batch 150, loss[loss=0.1657, simple_loss=0.2556, pruned_loss=0.03785, over 4611.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2724, pruned_loss=0.04495, over 494189.40 frames. ], batch size: 11, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:06:41,718 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.98 vs. limit=15.0 +2024-07-28 21:06:48,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=204658.66666666666, ans=0.125 +2024-07-28 21:06:50,236 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 5.360e+01 5.968e+01 6.673e+01 1.001e+02, threshold=1.194e+02, percent-clipped=0.0 +2024-07-28 21:06:51,916 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.19 vs. limit=12.0 +2024-07-28 21:06:53,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=204658.66666666666, ans=0.125 +2024-07-28 21:06:56,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204672.0, ans=0.1 +2024-07-28 21:06:59,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=204672.0, ans=0.125 +2024-07-28 21:07:18,886 INFO [train.py:1114] (3/4) Epoch 16, batch 200, loss[loss=0.1654, simple_loss=0.2636, pruned_loss=0.03362, over 4532.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2698, pruned_loss=0.04429, over 593759.76 frames. ], batch size: 21, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:07:22,702 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.38 vs. limit=12.0 +2024-07-28 21:07:23,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=204712.0, ans=0.125 +2024-07-28 21:07:29,205 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-07-28 21:07:44,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204752.0, ans=0.1 +2024-07-28 21:07:52,284 INFO [train.py:1114] (3/4) Epoch 16, batch 250, loss[loss=0.1923, simple_loss=0.2909, pruned_loss=0.0468, over 4623.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2703, pruned_loss=0.04498, over 670399.45 frames. ], batch size: 16, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:07:58,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=204778.66666666666, ans=0.1 +2024-07-28 21:07:59,823 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.91 vs. limit=15.0 +2024-07-28 21:08:05,814 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 5.773e+01 6.705e+01 7.902e+01 1.167e+02, threshold=1.341e+02, percent-clipped=0.0 +2024-07-28 21:08:05,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=204792.0, ans=0.125 +2024-07-28 21:08:23,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=204818.66666666666, ans=0.2 +2024-07-28 21:08:26,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=204832.0, ans=0.125 +2024-07-28 21:08:28,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=204832.0, ans=0.125 +2024-07-28 21:08:36,621 INFO [train.py:1114] (3/4) Epoch 16, batch 300, loss[loss=0.1948, simple_loss=0.2897, pruned_loss=0.04999, over 4802.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2691, pruned_loss=0.04458, over 730240.40 frames. ], batch size: 15, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:08:40,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=204845.33333333334, ans=0.0 +2024-07-28 21:08:59,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=204885.33333333334, ans=0.125 +2024-07-28 21:09:04,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.96 vs. limit=12.0 +2024-07-28 21:09:07,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=204898.66666666666, ans=0.2 +2024-07-28 21:09:09,773 INFO [train.py:1114] (3/4) Epoch 16, batch 350, loss[loss=0.1357, simple_loss=0.2247, pruned_loss=0.02329, over 4938.00 frames. ], tot_loss[loss=0.179, simple_loss=0.269, pruned_loss=0.04445, over 776282.02 frames. ], batch size: 12, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:09:17,696 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.457e+01 5.458e+01 6.054e+01 6.509e+01 1.036e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-28 21:09:38,601 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.31 vs. limit=22.5 +2024-07-28 21:09:40,892 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:09:41,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=204965.33333333334, ans=0.125 +2024-07-28 21:09:41,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=204965.33333333334, ans=0.0 +2024-07-28 21:09:44,827 INFO [train.py:1114] (3/4) Epoch 16, batch 400, loss[loss=0.199, simple_loss=0.2987, pruned_loss=0.04963, over 4695.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2677, pruned_loss=0.04396, over 813664.87 frames. ], batch size: 13, lr: 4.74e-03, grad_scale: 32.0 +2024-07-28 21:09:58,123 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.26 vs. limit=15.0 +2024-07-28 21:13:43,533 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-28 21:13:44,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=205018.66666666666, ans=0.125 +2024-07-28 21:13:48,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=205032.0, ans=0.1 +2024-07-28 21:13:53,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=205032.0, ans=0.07 +2024-07-28 21:13:59,383 INFO [train.py:1114] (3/4) Epoch 16, batch 450, loss[loss=0.1813, simple_loss=0.2833, pruned_loss=0.03963, over 4635.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2679, pruned_loss=0.04396, over 839324.98 frames. ], batch size: 13, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:14:12,126 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.99 vs. limit=15.0 +2024-07-28 21:14:13,707 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.574e+01 5.588e+01 6.021e+01 6.553e+01 1.018e+02, threshold=1.204e+02, percent-clipped=0.0 +2024-07-28 21:14:33,981 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.71 vs. limit=22.5 +2024-07-28 21:14:39,790 INFO [train.py:1114] (3/4) Epoch 16, batch 500, loss[loss=0.1901, simple_loss=0.2752, pruned_loss=0.05248, over 4690.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2674, pruned_loss=0.04382, over 861555.69 frames. ], batch size: 15, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:14:41,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=205112.0, ans=0.125 +2024-07-28 21:14:49,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=205125.33333333334, ans=0.125 +2024-07-28 21:14:54,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=205138.66666666666, ans=0.025 +2024-07-28 21:15:04,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=205152.0, ans=0.125 +2024-07-28 21:15:11,754 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.11 vs. limit=15.0 +2024-07-28 21:15:13,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=205165.33333333334, ans=0.125 +2024-07-28 21:15:27,933 INFO [train.py:1114] (3/4) Epoch 16, batch 550, loss[loss=0.1822, simple_loss=0.2749, pruned_loss=0.04475, over 4835.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2677, pruned_loss=0.04368, over 877815.85 frames. ], batch size: 18, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:15:28,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=205178.66666666666, ans=0.125 +2024-07-28 21:15:34,219 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=18.65 vs. limit=15.0 +2024-07-28 21:15:37,689 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.508e+01 5.496e+01 6.135e+01 6.977e+01 1.008e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-28 21:15:47,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=205205.33333333334, ans=0.125 +2024-07-28 21:15:47,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=205205.33333333334, ans=0.2 +2024-07-28 21:15:54,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=205218.66666666666, ans=0.125 +2024-07-28 21:16:01,016 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.44 vs. limit=15.0 +2024-07-28 21:16:04,238 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=19.51 vs. limit=15.0 +2024-07-28 21:16:05,156 INFO [train.py:1114] (3/4) Epoch 16, batch 600, loss[loss=0.1964, simple_loss=0.2956, pruned_loss=0.04859, over 4635.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2676, pruned_loss=0.04361, over 892075.90 frames. ], batch size: 16, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:16:15,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=205258.66666666666, ans=0.125 +2024-07-28 21:16:29,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=205285.33333333334, ans=0.125 +2024-07-28 21:16:31,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=205298.66666666666, ans=0.125 +2024-07-28 21:16:33,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=205298.66666666666, ans=0.125 +2024-07-28 21:16:38,110 INFO [train.py:1114] (3/4) Epoch 16, batch 650, loss[loss=0.1824, simple_loss=0.2801, pruned_loss=0.04238, over 4765.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2677, pruned_loss=0.04413, over 903583.27 frames. ], batch size: 13, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:16:39,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=205312.0, ans=0.1 +2024-07-28 21:16:45,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=205325.33333333334, ans=0.125 +2024-07-28 21:16:46,461 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.676e+01 5.356e+01 6.014e+01 6.947e+01 8.768e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 21:16:46,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=205325.33333333334, ans=0.125 +2024-07-28 21:16:52,176 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.65 vs. limit=10.0 +2024-07-28 21:16:56,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=205338.66666666666, ans=0.125 +2024-07-28 21:17:11,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=205365.33333333334, ans=0.125 +2024-07-28 21:17:11,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=205365.33333333334, ans=0.0 +2024-07-28 21:17:12,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=205378.66666666666, ans=0.1 +2024-07-28 21:17:12,598 INFO [train.py:1114] (3/4) Epoch 16, batch 700, loss[loss=0.151, simple_loss=0.2457, pruned_loss=0.0282, over 4635.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2685, pruned_loss=0.04469, over 911463.59 frames. ], batch size: 12, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:17:18,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=205392.0, ans=0.0 +2024-07-28 21:17:33,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=205418.66666666666, ans=0.0 +2024-07-28 21:17:40,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=205432.0, ans=0.0 +2024-07-28 21:17:45,816 INFO [train.py:1114] (3/4) Epoch 16, batch 750, loss[loss=0.1751, simple_loss=0.2618, pruned_loss=0.04423, over 4690.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2679, pruned_loss=0.04462, over 917737.65 frames. ], batch size: 13, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:17:47,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=205445.33333333334, ans=0.0 +2024-07-28 21:17:53,634 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.216e+01 5.543e+01 6.025e+01 6.972e+01 9.778e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 21:18:04,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=205472.0, ans=0.125 +2024-07-28 21:18:08,427 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.78 vs. limit=22.5 +2024-07-28 21:18:10,933 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:18:18,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=205498.66666666666, ans=0.125 +2024-07-28 21:18:21,379 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.66 vs. limit=15.0 +2024-07-28 21:18:22,218 INFO [train.py:1114] (3/4) Epoch 16, batch 800, loss[loss=0.1877, simple_loss=0.2713, pruned_loss=0.05208, over 4855.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2686, pruned_loss=0.04499, over 923068.41 frames. ], batch size: 12, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:18:27,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=205512.0, ans=0.0 +2024-07-28 21:18:30,289 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-07-28 21:18:36,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=205525.33333333334, ans=0.0 +2024-07-28 21:18:39,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.85 vs. limit=15.0 +2024-07-28 21:18:55,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=205565.33333333334, ans=0.025 +2024-07-28 21:19:00,049 INFO [train.py:1114] (3/4) Epoch 16, batch 850, loss[loss=0.2134, simple_loss=0.3064, pruned_loss=0.06014, over 4663.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2695, pruned_loss=0.04522, over 927407.38 frames. ], batch size: 14, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:19:08,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=205578.66666666666, ans=0.125 +2024-07-28 21:19:11,256 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.554e+01 6.346e+01 7.200e+01 1.191e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-28 21:19:13,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=205592.0, ans=0.125 +2024-07-28 21:19:14,060 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.89 vs. limit=22.5 +2024-07-28 21:19:42,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=205618.66666666666, ans=0.125 +2024-07-28 21:19:51,900 INFO [train.py:1114] (3/4) Epoch 16, batch 900, loss[loss=0.1467, simple_loss=0.24, pruned_loss=0.02675, over 4858.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2708, pruned_loss=0.0456, over 928026.73 frames. ], batch size: 12, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:19:53,651 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.49 vs. limit=6.0 +2024-07-28 21:20:09,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=205658.66666666666, ans=0.0 +2024-07-28 21:20:22,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=205685.33333333334, ans=0.0 +2024-07-28 21:20:23,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=205685.33333333334, ans=0.0 +2024-07-28 21:20:34,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=205712.0, ans=0.09899494936611666 +2024-07-28 21:20:34,605 INFO [train.py:1114] (3/4) Epoch 16, batch 950, loss[loss=0.1857, simple_loss=0.283, pruned_loss=0.04418, over 4777.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.27, pruned_loss=0.04516, over 929637.83 frames. ], batch size: 12, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:20:38,371 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.26 vs. limit=22.5 +2024-07-28 21:20:40,116 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:20:42,593 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.753e+01 5.442e+01 5.900e+01 6.572e+01 1.088e+02, threshold=1.180e+02, percent-clipped=0.0 +2024-07-28 21:20:47,018 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.68 vs. limit=15.0 +2024-07-28 21:20:47,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=205738.66666666666, ans=0.0 +2024-07-28 21:20:52,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=205738.66666666666, ans=0.125 +2024-07-28 21:21:04,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=205765.33333333334, ans=0.125 +2024-07-28 21:21:07,914 INFO [train.py:1114] (3/4) Epoch 16, batch 1000, loss[loss=0.1567, simple_loss=0.258, pruned_loss=0.02773, over 4963.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2704, pruned_loss=0.04517, over 929229.35 frames. ], batch size: 13, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:21:12,989 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.57 vs. limit=15.0 +2024-07-28 21:21:14,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=205792.0, ans=0.04949747468305833 +2024-07-28 21:21:18,582 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.45 vs. limit=15.0 +2024-07-28 21:21:31,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=205805.33333333334, ans=0.2 +2024-07-28 21:21:55,967 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.42 vs. limit=22.5 +2024-07-28 21:21:56,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=205832.0, ans=0.05 +2024-07-28 21:21:57,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=205832.0, ans=0.125 +2024-07-28 21:22:05,611 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.82 vs. limit=15.0 +2024-07-28 21:22:08,536 INFO [train.py:1114] (3/4) Epoch 16, batch 1050, loss[loss=0.2075, simple_loss=0.3052, pruned_loss=0.05484, over 4877.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2699, pruned_loss=0.04516, over 931681.68 frames. ], batch size: 14, lr: 4.73e-03, grad_scale: 32.0 +2024-07-28 21:22:17,359 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.06 vs. limit=15.0 +2024-07-28 21:22:19,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=205858.66666666666, ans=0.0 +2024-07-28 21:22:20,840 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.413e+01 5.557e+01 6.013e+01 7.001e+01 9.107e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 21:22:30,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=205872.0, ans=0.125 +2024-07-28 21:22:33,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=205872.0, ans=0.125 +2024-07-28 21:22:39,395 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.68 vs. limit=15.0 +2024-07-28 21:24:11,060 INFO [train.py:1114] (3/4) Epoch 16, batch 1100, loss[loss=0.1846, simple_loss=0.261, pruned_loss=0.05412, over 4892.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2689, pruned_loss=0.04466, over 934179.35 frames. ], batch size: 13, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:24:24,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=205938.66666666666, ans=0.125 +2024-07-28 21:24:32,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=205938.66666666666, ans=0.125 +2024-07-28 21:24:38,300 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.33 vs. limit=15.0 +2024-07-28 21:25:48,843 INFO [train.py:1114] (3/4) Epoch 16, batch 1150, loss[loss=0.2066, simple_loss=0.2907, pruned_loss=0.06126, over 4898.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2686, pruned_loss=0.04444, over 933983.00 frames. ], batch size: 13, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:29:11,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=205978.66666666666, ans=0.125 +2024-07-28 21:30:01,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=205992.0, ans=0.2 +2024-07-28 21:30:29,625 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.544e+01 5.518e+01 6.042e+01 7.033e+01 1.072e+02, threshold=1.208e+02, percent-clipped=0.0 +2024-07-28 21:30:29,970 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.46 vs. limit=15.0 +2024-07-28 21:30:30,146 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.44 vs. limit=10.0 +2024-07-28 21:30:33,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=205992.0, ans=0.2 +2024-07-28 21:31:10,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=206005.33333333334, ans=0.1 +2024-07-28 21:32:17,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=206005.33333333334, ans=0.2 +2024-07-28 21:32:17,674 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.59 vs. limit=15.0 +2024-07-28 21:34:07,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=206018.66666666666, ans=0.125 +2024-07-28 21:34:39,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=206018.66666666666, ans=0.09899494936611666 +2024-07-28 21:34:40,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=206018.66666666666, ans=0.125 +2024-07-28 21:34:40,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=206018.66666666666, ans=0.125 +2024-07-28 21:35:02,926 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.15 vs. limit=10.0 +2024-07-28 21:35:04,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=206032.0, ans=0.1 +2024-07-28 21:35:05,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=206032.0, ans=0.125 +2024-07-28 21:35:05,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=206032.0, ans=0.2 +2024-07-28 21:35:15,736 INFO [train.py:1114] (3/4) Epoch 16, batch 1200, loss[loss=0.1567, simple_loss=0.2658, pruned_loss=0.02382, over 4868.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2706, pruned_loss=0.04526, over 933144.58 frames. ], batch size: 14, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:35:22,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=206058.66666666666, ans=0.0 +2024-07-28 21:35:39,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=206058.66666666666, ans=0.025 +2024-07-28 21:37:46,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=206098.66666666666, ans=0.025 +2024-07-28 21:37:46,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=206098.66666666666, ans=0.125 +2024-07-28 21:38:11,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=206098.66666666666, ans=0.125 +2024-07-28 21:38:19,717 INFO [train.py:1114] (3/4) Epoch 16, batch 1250, loss[loss=0.1881, simple_loss=0.281, pruned_loss=0.04764, over 4804.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2703, pruned_loss=0.0445, over 937241.67 frames. ], batch size: 15, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:38:46,735 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.375e+01 5.568e+01 5.937e+01 6.680e+01 9.097e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-28 21:38:57,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=206125.33333333334, ans=0.125 +2024-07-28 21:38:59,042 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.94 vs. limit=15.0 +2024-07-28 21:39:10,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=206138.66666666666, ans=0.125 +2024-07-28 21:39:16,999 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.19 vs. limit=15.0 +2024-07-28 21:39:18,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=206152.0, ans=0.0 +2024-07-28 21:39:21,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=206152.0, ans=0.1 +2024-07-28 21:39:23,850 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.91 vs. limit=15.0 +2024-07-28 21:39:33,231 INFO [train.py:1114] (3/4) Epoch 16, batch 1300, loss[loss=0.1808, simple_loss=0.2779, pruned_loss=0.04184, over 4728.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2695, pruned_loss=0.04425, over 938773.87 frames. ], batch size: 19, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:39:34,327 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-07-28 21:39:36,918 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.82 vs. limit=15.0 +2024-07-28 21:39:38,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=206178.66666666666, ans=0.125 +2024-07-28 21:40:02,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=206218.66666666666, ans=0.125 +2024-07-28 21:40:11,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=206232.0, ans=0.1 +2024-07-28 21:40:15,097 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=11.16 vs. limit=15.0 +2024-07-28 21:40:17,468 INFO [train.py:1114] (3/4) Epoch 16, batch 1350, loss[loss=0.1584, simple_loss=0.254, pruned_loss=0.03138, over 4756.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2692, pruned_loss=0.0438, over 940929.31 frames. ], batch size: 13, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:40:26,191 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=13.67 vs. limit=22.5 +2024-07-28 21:40:26,348 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.922e+01 5.660e+01 6.386e+01 7.583e+01 1.369e+02, threshold=1.277e+02, percent-clipped=2.0 +2024-07-28 21:40:38,785 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=206258.66666666666, ans=0.2 +2024-07-28 21:40:53,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=206285.33333333334, ans=0.09899494936611666 +2024-07-28 21:40:53,500 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.67 vs. limit=10.0 +2024-07-28 21:41:16,164 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.87 vs. limit=15.0 +2024-07-28 21:41:32,873 INFO [train.py:1114] (3/4) Epoch 16, batch 1400, loss[loss=0.124, simple_loss=0.207, pruned_loss=0.02045, over 4697.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2686, pruned_loss=0.044, over 942583.28 frames. ], batch size: 11, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:41:34,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206312.0, ans=0.1 +2024-07-28 21:41:37,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=206312.0, ans=0.05 +2024-07-28 21:41:38,395 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:41:44,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=206325.33333333334, ans=0.2 +2024-07-28 21:41:45,414 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.70 vs. limit=15.0 +2024-07-28 21:42:33,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=206352.0, ans=0.125 +2024-07-28 21:45:05,380 INFO [train.py:1114] (3/4) Epoch 16, batch 1450, loss[loss=0.1812, simple_loss=0.2764, pruned_loss=0.04296, over 4678.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2689, pruned_loss=0.04399, over 942500.35 frames. ], batch size: 15, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:45:23,233 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.652e+01 5.557e+01 6.212e+01 6.784e+01 1.021e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-28 21:45:37,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=206405.33333333334, ans=0.125 +2024-07-28 21:46:21,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=206405.33333333334, ans=0.0 +2024-07-28 21:46:30,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=206405.33333333334, ans=0.0 +2024-07-28 21:46:40,138 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.51 vs. limit=15.0 +2024-07-28 21:46:44,898 INFO [train.py:1114] (3/4) Epoch 16, batch 1500, loss[loss=0.169, simple_loss=0.2713, pruned_loss=0.0334, over 4813.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2697, pruned_loss=0.04443, over 942101.30 frames. ], batch size: 14, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:46:52,021 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=12.86 vs. limit=15.0 +2024-07-28 21:46:58,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=206458.66666666666, ans=10.0 +2024-07-28 21:47:25,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=206485.33333333334, ans=0.125 +2024-07-28 21:47:39,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=206498.66666666666, ans=0.025 +2024-07-28 21:47:44,839 INFO [train.py:1114] (3/4) Epoch 16, batch 1550, loss[loss=0.229, simple_loss=0.3061, pruned_loss=0.0759, over 4884.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2704, pruned_loss=0.04499, over 938535.71 frames. ], batch size: 15, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:48:06,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=206512.0, ans=0.125 +2024-07-28 21:48:20,505 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.523e+01 5.574e+01 6.317e+01 7.056e+01 9.850e+01, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 21:48:24,013 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.48 vs. limit=15.0 +2024-07-28 21:48:39,857 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn1.whiten, num_groups=1, num_channels=192, metric=11.39 vs. limit=22.5 +2024-07-28 21:48:53,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=206552.0, ans=0.1 +2024-07-28 21:49:10,279 INFO [train.py:1114] (3/4) Epoch 16, batch 1600, loss[loss=0.1632, simple_loss=0.2601, pruned_loss=0.03319, over 4880.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2708, pruned_loss=0.04531, over 937612.14 frames. ], batch size: 14, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:49:44,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=206605.33333333334, ans=0.0 +2024-07-28 21:50:00,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=206618.66666666666, ans=0.1 +2024-07-28 21:50:02,112 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-07-28 21:50:27,684 INFO [train.py:1114] (3/4) Epoch 16, batch 1650, loss[loss=0.1961, simple_loss=0.2904, pruned_loss=0.0509, over 4659.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2709, pruned_loss=0.0455, over 938081.09 frames. ], batch size: 14, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:50:30,783 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=206645.33333333334, ans=0.0 +2024-07-28 21:50:34,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=206658.66666666666, ans=0.125 +2024-07-28 21:50:35,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=206658.66666666666, ans=0.0 +2024-07-28 21:50:36,683 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.778e+01 5.640e+01 6.319e+01 7.228e+01 1.155e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 21:50:51,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=206685.33333333334, ans=0.125 +2024-07-28 21:51:08,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=206698.66666666666, ans=0.125 +2024-07-28 21:51:10,364 INFO [train.py:1114] (3/4) Epoch 16, batch 1700, loss[loss=0.1544, simple_loss=0.2369, pruned_loss=0.03596, over 4732.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2705, pruned_loss=0.04545, over 939822.93 frames. ], batch size: 11, lr: 4.72e-03, grad_scale: 32.0 +2024-07-28 21:51:51,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=206738.66666666666, ans=0.125 +2024-07-28 21:51:56,607 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 21:52:39,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=206752.0, ans=0.0 +2024-07-28 21:52:41,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=206765.33333333334, ans=0.125 +2024-07-28 21:52:59,252 INFO [train.py:1114] (3/4) Epoch 16, batch 1750, loss[loss=0.1475, simple_loss=0.223, pruned_loss=0.03604, over 4801.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2696, pruned_loss=0.04483, over 940755.92 frames. ], batch size: 11, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:53:03,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=206778.66666666666, ans=0.07 +2024-07-28 21:53:07,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=206792.0, ans=0.125 +2024-07-28 21:53:09,574 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.365e+01 5.795e+01 6.698e+01 8.081e+01 1.290e+02, threshold=1.340e+02, percent-clipped=1.0 +2024-07-28 21:53:17,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=206805.33333333334, ans=0.0 +2024-07-28 21:53:44,571 INFO [train.py:1114] (3/4) Epoch 16, batch 1800, loss[loss=0.1554, simple_loss=0.2512, pruned_loss=0.02984, over 4639.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2695, pruned_loss=0.04455, over 941330.62 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:54:03,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=206872.0, ans=0.125 +2024-07-28 21:54:07,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=206885.33333333334, ans=0.125 +2024-07-28 21:54:10,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=206885.33333333334, ans=0.125 +2024-07-28 21:54:24,982 INFO [train.py:1114] (3/4) Epoch 16, batch 1850, loss[loss=0.1895, simple_loss=0.2698, pruned_loss=0.05467, over 4806.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2699, pruned_loss=0.0448, over 941091.46 frames. ], batch size: 14, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:54:35,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=206925.33333333334, ans=0.1 +2024-07-28 21:54:37,013 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.341e+01 5.631e+01 6.106e+01 7.258e+01 1.128e+02, threshold=1.221e+02, percent-clipped=0.0 +2024-07-28 21:54:45,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=206938.66666666666, ans=0.125 +2024-07-28 21:54:57,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=206952.0, ans=0.0 +2024-07-28 21:55:08,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=206965.33333333334, ans=0.125 +2024-07-28 21:55:17,727 INFO [train.py:1114] (3/4) Epoch 16, batch 1900, loss[loss=0.1768, simple_loss=0.2789, pruned_loss=0.03738, over 4669.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2695, pruned_loss=0.04433, over 942079.14 frames. ], batch size: 14, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:55:17,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_na.min_abs, batch_count=206978.66666666666, ans=0.02 +2024-07-28 21:55:27,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.58 vs. limit=10.0 +2024-07-28 21:55:37,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.04 vs. limit=15.0 +2024-07-28 21:55:40,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=207005.33333333334, ans=0.0 +2024-07-28 21:55:40,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=207018.66666666666, ans=0.0 +2024-07-28 21:55:48,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=207018.66666666666, ans=0.2 +2024-07-28 21:56:08,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=207032.0, ans=0.125 +2024-07-28 21:56:10,897 INFO [train.py:1114] (3/4) Epoch 16, batch 1950, loss[loss=0.1439, simple_loss=0.246, pruned_loss=0.02087, over 4900.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2714, pruned_loss=0.04507, over 944118.97 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:56:21,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=207045.33333333334, ans=10.0 +2024-07-28 21:56:30,865 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.457e+01 5.561e+01 6.255e+01 6.715e+01 9.914e+01, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 21:56:42,371 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=207072.0, ans=0.125 +2024-07-28 21:56:53,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=207085.33333333334, ans=0.0 +2024-07-28 21:56:54,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=207085.33333333334, ans=0.125 +2024-07-28 21:56:55,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=207085.33333333334, ans=0.125 +2024-07-28 21:56:58,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=207098.66666666666, ans=0.125 +2024-07-28 21:56:59,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=207098.66666666666, ans=0.0 +2024-07-28 21:57:05,250 INFO [train.py:1114] (3/4) Epoch 16, batch 2000, loss[loss=0.191, simple_loss=0.272, pruned_loss=0.05498, over 4816.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.272, pruned_loss=0.0457, over 941574.15 frames. ], batch size: 11, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:57:05,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=207112.0, ans=0.125 +2024-07-28 21:57:06,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=207112.0, ans=0.0 +2024-07-28 21:57:15,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=207112.0, ans=0.125 +2024-07-28 21:57:27,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=207138.66666666666, ans=0.125 +2024-07-28 21:57:28,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=207138.66666666666, ans=0.125 +2024-07-28 21:57:41,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207138.66666666666, ans=0.1 +2024-07-28 21:58:14,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=207165.33333333334, ans=0.5 +2024-07-28 21:58:20,292 INFO [train.py:1114] (3/4) Epoch 16, batch 2050, loss[loss=0.1369, simple_loss=0.2208, pruned_loss=0.02648, over 4623.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2704, pruned_loss=0.04538, over 939245.87 frames. ], batch size: 11, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:58:23,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=207178.66666666666, ans=0.0 +2024-07-28 21:58:36,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207192.0, ans=0.1 +2024-07-28 21:58:36,905 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.500e+01 5.615e+01 6.198e+01 7.046e+01 1.043e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 21:58:54,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=207205.33333333334, ans=0.05 +2024-07-28 21:59:16,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.87 vs. limit=15.0 +2024-07-28 21:59:20,888 INFO [train.py:1114] (3/4) Epoch 16, batch 2100, loss[loss=0.1922, simple_loss=0.2779, pruned_loss=0.05321, over 4758.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.27, pruned_loss=0.04497, over 940941.93 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 21:59:42,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=207258.66666666666, ans=0.025 +2024-07-28 21:59:50,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=207272.0, ans=0.2 +2024-07-28 21:59:50,661 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-28 22:00:06,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=207285.33333333334, ans=0.125 +2024-07-28 22:01:09,926 INFO [train.py:1114] (3/4) Epoch 16, batch 2150, loss[loss=0.1797, simple_loss=0.2696, pruned_loss=0.04496, over 4893.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2687, pruned_loss=0.04413, over 944043.38 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:01:10,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=207312.0, ans=0.125 +2024-07-28 22:01:14,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=207312.0, ans=0.1 +2024-07-28 22:01:55,945 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.16 vs. limit=6.0 +2024-07-28 22:01:58,057 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.517e+01 5.463e+01 6.183e+01 7.182e+01 9.894e+01, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 22:02:21,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=207338.66666666666, ans=0.1 +2024-07-28 22:02:36,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=207365.33333333334, ans=0.125 +2024-07-28 22:02:47,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=207365.33333333334, ans=0.125 +2024-07-28 22:02:47,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=207365.33333333334, ans=0.09899494936611666 +2024-07-28 22:02:53,140 INFO [train.py:1114] (3/4) Epoch 16, batch 2200, loss[loss=0.1747, simple_loss=0.2679, pruned_loss=0.04077, over 4816.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2682, pruned_loss=0.04375, over 943325.88 frames. ], batch size: 14, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:03:02,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=207392.0, ans=0.0 +2024-07-28 22:03:25,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=207418.66666666666, ans=0.125 +2024-07-28 22:03:47,380 INFO [train.py:1114] (3/4) Epoch 16, batch 2250, loss[loss=0.1532, simple_loss=0.244, pruned_loss=0.03126, over 4695.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2682, pruned_loss=0.04382, over 941560.14 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:03:58,322 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.399e+01 5.527e+01 6.028e+01 7.010e+01 1.004e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 22:03:58,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=207458.66666666666, ans=0.025 +2024-07-28 22:04:27,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=207485.33333333334, ans=0.1 +2024-07-28 22:04:27,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=207485.33333333334, ans=0.0 +2024-07-28 22:04:30,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=207498.66666666666, ans=0.035 +2024-07-28 22:04:53,005 INFO [train.py:1114] (3/4) Epoch 16, batch 2300, loss[loss=0.1532, simple_loss=0.2379, pruned_loss=0.03426, over 4937.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2676, pruned_loss=0.04378, over 940148.11 frames. ], batch size: 12, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:04:55,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=207512.0, ans=0.125 +2024-07-28 22:05:05,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=207512.0, ans=0.125 +2024-07-28 22:05:31,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=207538.66666666666, ans=0.1 +2024-07-28 22:05:32,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=207538.66666666666, ans=0.1 +2024-07-28 22:05:36,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=207538.66666666666, ans=0.125 +2024-07-28 22:05:47,043 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:05:52,806 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:05:56,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207578.66666666666, ans=0.1 +2024-07-28 22:05:57,166 INFO [train.py:1114] (3/4) Epoch 16, batch 2350, loss[loss=0.1815, simple_loss=0.2761, pruned_loss=0.04342, over 4637.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.267, pruned_loss=0.04356, over 941886.44 frames. ], batch size: 13, lr: 4.71e-03, grad_scale: 32.0 +2024-07-28 22:06:13,018 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.426e+01 5.459e+01 6.024e+01 6.952e+01 8.823e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-28 22:06:16,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=207592.0, ans=0.04949747468305833 +2024-07-28 22:06:23,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=207605.33333333334, ans=0.125 +2024-07-28 22:06:25,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=207618.66666666666, ans=0.025 +2024-07-28 22:06:31,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=207618.66666666666, ans=0.1 +2024-07-28 22:06:43,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=207632.0, ans=0.1 +2024-07-28 22:06:44,811 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.07 vs. limit=15.0 +2024-07-28 22:06:45,792 INFO [train.py:1114] (3/4) Epoch 16, batch 2400, loss[loss=0.1787, simple_loss=0.2693, pruned_loss=0.0441, over 4640.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2675, pruned_loss=0.04394, over 941533.62 frames. ], batch size: 12, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:06:45,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=207645.33333333334, ans=0.125 +2024-07-28 22:07:02,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=207672.0, ans=0.125 +2024-07-28 22:07:17,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=207698.66666666666, ans=0.05 +2024-07-28 22:07:27,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=207712.0, ans=0.0 +2024-07-28 22:07:33,019 INFO [train.py:1114] (3/4) Epoch 16, batch 2450, loss[loss=0.1872, simple_loss=0.2834, pruned_loss=0.0455, over 4694.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2687, pruned_loss=0.04448, over 937428.10 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:07:38,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=207712.0, ans=0.05 +2024-07-28 22:07:38,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=207712.0, ans=0.125 +2024-07-28 22:07:45,691 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.532e+01 5.574e+01 6.192e+01 6.939e+01 1.187e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-28 22:07:46,174 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.74 vs. limit=15.0 +2024-07-28 22:07:46,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=207725.33333333334, ans=0.2 +2024-07-28 22:07:49,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=207725.33333333334, ans=0.125 +2024-07-28 22:08:18,704 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=207765.33333333334, ans=0.025 +2024-07-28 22:08:21,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=207765.33333333334, ans=0.125 +2024-07-28 22:08:23,505 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.21 vs. limit=22.5 +2024-07-28 22:08:24,491 INFO [train.py:1114] (3/4) Epoch 16, batch 2500, loss[loss=0.1759, simple_loss=0.2655, pruned_loss=0.04311, over 4815.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2693, pruned_loss=0.04465, over 939613.16 frames. ], batch size: 14, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:08:37,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=207778.66666666666, ans=0.125 +2024-07-28 22:08:49,665 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=207792.0, ans=0.0 +2024-07-28 22:09:05,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=207805.33333333334, ans=0.2 +2024-07-28 22:09:07,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=207805.33333333334, ans=0.125 +2024-07-28 22:09:10,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=207818.66666666666, ans=0.2 +2024-07-28 22:09:12,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=207818.66666666666, ans=0.0 +2024-07-28 22:09:23,880 INFO [train.py:1114] (3/4) Epoch 16, batch 2550, loss[loss=0.1472, simple_loss=0.2244, pruned_loss=0.03502, over 4804.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2692, pruned_loss=0.04442, over 938943.21 frames. ], batch size: 11, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:09:35,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=207845.33333333334, ans=0.025 +2024-07-28 22:09:38,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=207858.66666666666, ans=0.1 +2024-07-28 22:09:38,982 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.300e+01 5.535e+01 6.272e+01 7.311e+01 1.144e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-28 22:10:41,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=207872.0, ans=0.125 +2024-07-28 22:12:57,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=207898.66666666666, ans=0.5 +2024-07-28 22:13:06,687 INFO [train.py:1114] (3/4) Epoch 16, batch 2600, loss[loss=0.161, simple_loss=0.2539, pruned_loss=0.0341, over 4898.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2693, pruned_loss=0.04462, over 937405.14 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:13:53,171 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.16 vs. limit=6.0 +2024-07-28 22:13:55,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=207925.33333333334, ans=0.125 +2024-07-28 22:14:14,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=207952.0, ans=0.0 +2024-07-28 22:15:02,481 INFO [train.py:1114] (3/4) Epoch 16, batch 2650, loss[loss=0.1896, simple_loss=0.2717, pruned_loss=0.05375, over 4614.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2696, pruned_loss=0.04471, over 939514.05 frames. ], batch size: 16, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:15:07,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=207978.66666666666, ans=0.0 +2024-07-28 22:15:41,445 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.682e+01 6.199e+01 7.227e+01 9.483e+01, threshold=1.240e+02, percent-clipped=0.0 +2024-07-28 22:22:29,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=208005.33333333334, ans=0.125 +2024-07-28 22:22:56,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=208018.66666666666, ans=0.0 +2024-07-28 22:27:28,520 INFO [train.py:1114] (3/4) Epoch 16, batch 2700, loss[loss=0.2047, simple_loss=0.3027, pruned_loss=0.05339, over 4723.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2693, pruned_loss=0.04491, over 939758.79 frames. ], batch size: 14, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:27:53,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=208072.0, ans=0.0 +2024-07-28 22:30:40,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=208098.66666666666, ans=0.0 +2024-07-28 22:30:45,957 INFO [train.py:1114] (3/4) Epoch 16, batch 2750, loss[loss=0.174, simple_loss=0.2537, pruned_loss=0.04717, over 4709.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2686, pruned_loss=0.04471, over 939684.67 frames. ], batch size: 12, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:30:46,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=208112.0, ans=0.125 +2024-07-28 22:31:02,417 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.384e+01 5.637e+01 6.771e+01 7.935e+01 1.190e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-28 22:31:34,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=208152.0, ans=0.125 +2024-07-28 22:31:58,121 INFO [train.py:1114] (3/4) Epoch 16, batch 2800, loss[loss=0.2394, simple_loss=0.3117, pruned_loss=0.08349, over 3320.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2692, pruned_loss=0.04496, over 937840.42 frames. ], batch size: 36, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:32:13,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=208192.0, ans=0.125 +2024-07-28 22:32:15,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=208205.33333333334, ans=0.0 +2024-07-28 22:32:20,120 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.84 vs. limit=6.0 +2024-07-28 22:32:20,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=208205.33333333334, ans=0.1 +2024-07-28 22:32:33,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=208232.0, ans=0.125 +2024-07-28 22:32:38,585 INFO [train.py:1114] (3/4) Epoch 16, batch 2850, loss[loss=0.1801, simple_loss=0.2761, pruned_loss=0.04207, over 4961.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2687, pruned_loss=0.04447, over 935632.71 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:32:38,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=208245.33333333334, ans=0.125 +2024-07-28 22:32:47,409 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.515e+01 5.805e+01 6.352e+01 7.417e+01 1.040e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-28 22:32:49,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=208258.66666666666, ans=0.1 +2024-07-28 22:33:36,456 INFO [train.py:1114] (3/4) Epoch 16, batch 2900, loss[loss=0.1498, simple_loss=0.2501, pruned_loss=0.02477, over 4832.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2699, pruned_loss=0.0444, over 939788.39 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:33:45,844 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:34:02,681 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.58 vs. limit=12.0 +2024-07-28 22:34:24,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=208365.33333333334, ans=0.125 +2024-07-28 22:34:36,448 INFO [train.py:1114] (3/4) Epoch 16, batch 2950, loss[loss=0.1881, simple_loss=0.265, pruned_loss=0.0556, over 4708.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2693, pruned_loss=0.04437, over 939445.79 frames. ], batch size: 12, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:34:46,788 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.287e+01 5.436e+01 5.951e+01 6.814e+01 8.870e+01, threshold=1.190e+02, percent-clipped=0.0 +2024-07-28 22:34:49,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=208392.0, ans=0.1 +2024-07-28 22:34:50,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.34 vs. limit=15.0 +2024-07-28 22:35:15,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=208432.0, ans=0.0 +2024-07-28 22:35:19,627 INFO [train.py:1114] (3/4) Epoch 16, batch 3000, loss[loss=0.1635, simple_loss=0.256, pruned_loss=0.0355, over 4755.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2684, pruned_loss=0.04389, over 938722.86 frames. ], batch size: 13, lr: 4.70e-03, grad_scale: 32.0 +2024-07-28 22:35:19,627 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 22:37:08,208 INFO [train.py:1146] (3/4) Epoch 16, validation: loss=0.1628, simple_loss=0.2657, pruned_loss=0.02996, over 944034.00 frames. +2024-07-28 22:37:08,209 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 22:38:00,318 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.53 vs. limit=15.0 +2024-07-28 22:38:04,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=208458.66666666666, ans=0.0 +2024-07-28 22:38:09,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=208472.0, ans=0.125 +2024-07-28 22:38:42,179 INFO [train.py:1114] (3/4) Epoch 16, batch 3050, loss[loss=0.178, simple_loss=0.2742, pruned_loss=0.04095, over 4639.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2692, pruned_loss=0.04392, over 937667.92 frames. ], batch size: 12, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:38:43,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=208512.0, ans=0.025 +2024-07-28 22:38:43,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=208512.0, ans=0.125 +2024-07-28 22:38:45,332 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.60 vs. limit=22.5 +2024-07-28 22:38:48,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=208525.33333333334, ans=0.0 +2024-07-28 22:38:51,725 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.977e+01 5.726e+01 6.358e+01 7.092e+01 1.092e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-28 22:38:54,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=208525.33333333334, ans=0.05 +2024-07-28 22:39:15,664 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=12.99 vs. limit=15.0 +2024-07-28 22:39:28,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=208565.33333333334, ans=0.125 +2024-07-28 22:39:40,202 INFO [train.py:1114] (3/4) Epoch 16, batch 3100, loss[loss=0.2299, simple_loss=0.3143, pruned_loss=0.07278, over 4593.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2692, pruned_loss=0.04437, over 938416.51 frames. ], batch size: 16, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:39:42,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=208578.66666666666, ans=0.2 +2024-07-28 22:41:02,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=208618.66666666666, ans=0.125 +2024-07-28 22:41:04,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=208632.0, ans=0.125 +2024-07-28 22:41:13,450 INFO [train.py:1114] (3/4) Epoch 16, batch 3150, loss[loss=0.1951, simple_loss=0.294, pruned_loss=0.04808, over 4656.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2692, pruned_loss=0.04457, over 938348.18 frames. ], batch size: 17, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:41:29,375 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.743e+01 5.555e+01 6.673e+01 7.571e+01 1.321e+02, threshold=1.335e+02, percent-clipped=1.0 +2024-07-28 22:41:29,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=208658.66666666666, ans=0.025 +2024-07-28 22:41:30,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=208658.66666666666, ans=0.0 +2024-07-28 22:42:06,211 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.59 vs. limit=15.0 +2024-07-28 22:42:08,992 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=208698.66666666666, ans=0.125 +2024-07-28 22:42:09,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=208698.66666666666, ans=0.2 +2024-07-28 22:42:14,164 INFO [train.py:1114] (3/4) Epoch 16, batch 3200, loss[loss=0.1751, simple_loss=0.2605, pruned_loss=0.04488, over 4824.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2678, pruned_loss=0.04383, over 939930.13 frames. ], batch size: 13, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:42:38,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=208738.66666666666, ans=0.125 +2024-07-28 22:42:38,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=208738.66666666666, ans=0.125 +2024-07-28 22:43:03,063 INFO [train.py:1114] (3/4) Epoch 16, batch 3250, loss[loss=0.21, simple_loss=0.2983, pruned_loss=0.06092, over 4933.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2684, pruned_loss=0.0439, over 940933.70 frames. ], batch size: 14, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:43:03,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=208778.66666666666, ans=0.2 +2024-07-28 22:43:05,298 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.48 vs. limit=22.5 +2024-07-28 22:43:13,632 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.572e+01 5.431e+01 6.056e+01 6.661e+01 1.204e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-28 22:43:22,024 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.91 vs. limit=15.0 +2024-07-28 22:44:03,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=208845.33333333334, ans=0.1 +2024-07-28 22:44:04,203 INFO [train.py:1114] (3/4) Epoch 16, batch 3300, loss[loss=0.2174, simple_loss=0.3019, pruned_loss=0.06648, over 4729.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2677, pruned_loss=0.04457, over 940904.48 frames. ], batch size: 19, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:44:07,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=208845.33333333334, ans=0.1 +2024-07-28 22:44:13,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=208845.33333333334, ans=0.125 +2024-07-28 22:44:19,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=208858.66666666666, ans=0.0 +2024-07-28 22:44:23,264 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.47 vs. limit=22.5 +2024-07-28 22:44:55,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=208898.66666666666, ans=0.125 +2024-07-28 22:45:00,565 INFO [train.py:1114] (3/4) Epoch 16, batch 3350, loss[loss=0.201, simple_loss=0.2845, pruned_loss=0.05868, over 4632.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.268, pruned_loss=0.04441, over 938864.94 frames. ], batch size: 17, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:45:01,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=208912.0, ans=0.04949747468305833 +2024-07-28 22:45:02,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=208912.0, ans=0.0 +2024-07-28 22:45:11,078 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.370e+01 5.569e+01 6.115e+01 6.727e+01 9.175e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-28 22:45:19,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=208938.66666666666, ans=0.125 +2024-07-28 22:45:57,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=208965.33333333334, ans=0.125 +2024-07-28 22:45:59,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=208978.66666666666, ans=0.2 +2024-07-28 22:46:00,446 INFO [train.py:1114] (3/4) Epoch 16, batch 3400, loss[loss=0.1429, simple_loss=0.2263, pruned_loss=0.02975, over 4808.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2683, pruned_loss=0.04466, over 937234.55 frames. ], batch size: 11, lr: 4.69e-03, grad_scale: 64.0 +2024-07-28 22:46:01,558 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=22.27 vs. limit=22.5 +2024-07-28 22:46:01,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=208978.66666666666, ans=0.0 +2024-07-28 22:46:01,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=208978.66666666666, ans=0.125 +2024-07-28 22:46:06,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=208978.66666666666, ans=0.0 +2024-07-28 22:46:14,983 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.34 vs. limit=12.0 +2024-07-28 22:46:37,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=209005.33333333334, ans=0.2 +2024-07-28 22:46:46,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=209005.33333333334, ans=0.0 +2024-07-28 22:46:56,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=209018.66666666666, ans=0.125 +2024-07-28 22:47:04,416 INFO [train.py:1114] (3/4) Epoch 16, batch 3450, loss[loss=0.1878, simple_loss=0.2901, pruned_loss=0.04275, over 4727.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2686, pruned_loss=0.04448, over 937280.87 frames. ], batch size: 19, lr: 4.69e-03, grad_scale: 64.0 +2024-07-28 22:47:12,847 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.338e+01 5.483e+01 6.084e+01 6.778e+01 9.605e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-28 22:48:01,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=209072.0, ans=0.125 +2024-07-28 22:48:42,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=209085.33333333334, ans=0.125 +2024-07-28 22:48:55,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=209098.66666666666, ans=0.0 +2024-07-28 22:48:56,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=209098.66666666666, ans=0.5 +2024-07-28 22:48:59,083 INFO [train.py:1114] (3/4) Epoch 16, batch 3500, loss[loss=0.1744, simple_loss=0.2584, pruned_loss=0.04525, over 4932.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2682, pruned_loss=0.04439, over 938057.51 frames. ], batch size: 12, lr: 4.69e-03, grad_scale: 64.0 +2024-07-28 22:49:01,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=209112.0, ans=0.125 +2024-07-28 22:49:06,613 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.60 vs. limit=15.0 +2024-07-28 22:49:10,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=209125.33333333334, ans=0.125 +2024-07-28 22:49:10,708 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.66 vs. limit=22.5 +2024-07-28 22:49:16,635 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.97 vs. limit=15.0 +2024-07-28 22:49:16,808 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.88 vs. limit=22.5 +2024-07-28 22:49:20,359 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:49:24,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_na.min_abs, batch_count=209152.0, ans=0.02 +2024-07-28 22:49:27,663 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.71 vs. limit=15.0 +2024-07-28 22:49:32,617 INFO [train.py:1114] (3/4) Epoch 16, batch 3550, loss[loss=0.1939, simple_loss=0.294, pruned_loss=0.04696, over 4664.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2689, pruned_loss=0.04469, over 938776.14 frames. ], batch size: 14, lr: 4.69e-03, grad_scale: 64.0 +2024-07-28 22:49:38,082 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.56 vs. limit=15.0 +2024-07-28 22:49:41,601 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.365e+01 5.691e+01 6.213e+01 7.399e+01 9.936e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-28 22:49:56,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=209218.66666666666, ans=0.0 +2024-07-28 22:50:02,561 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:50:03,459 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.69 vs. limit=22.5 +2024-07-28 22:50:06,329 INFO [train.py:1114] (3/4) Epoch 16, batch 3600, loss[loss=0.1544, simple_loss=0.2412, pruned_loss=0.03381, over 4962.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2682, pruned_loss=0.04405, over 940776.82 frames. ], batch size: 13, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:50:41,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=209298.66666666666, ans=0.125 +2024-07-28 22:50:42,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=209298.66666666666, ans=0.0 +2024-07-28 22:50:42,924 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.92 vs. limit=15.0 +2024-07-28 22:50:46,880 INFO [train.py:1114] (3/4) Epoch 16, batch 3650, loss[loss=0.1941, simple_loss=0.2833, pruned_loss=0.0525, over 4891.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.267, pruned_loss=0.04379, over 941124.05 frames. ], batch size: 15, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:50:54,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=209325.33333333334, ans=0.125 +2024-07-28 22:50:57,253 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.506e+01 5.574e+01 6.186e+01 7.126e+01 1.218e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-28 22:51:04,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=209338.66666666666, ans=0.0 +2024-07-28 22:51:05,210 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.09 vs. limit=15.0 +2024-07-28 22:51:12,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=209352.0, ans=0.125 +2024-07-28 22:51:15,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=209365.33333333334, ans=0.0 +2024-07-28 22:51:15,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=209365.33333333334, ans=0.2 +2024-07-28 22:51:23,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=209365.33333333334, ans=0.125 +2024-07-28 22:51:26,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=209378.66666666666, ans=0.125 +2024-07-28 22:51:27,275 INFO [train.py:1114] (3/4) Epoch 16, batch 3700, loss[loss=0.1814, simple_loss=0.27, pruned_loss=0.04633, over 4934.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2674, pruned_loss=0.04358, over 942095.01 frames. ], batch size: 14, lr: 4.69e-03, grad_scale: 32.0 +2024-07-28 22:51:31,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=209378.66666666666, ans=0.0 +2024-07-28 22:51:33,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=209392.0, ans=0.1 +2024-07-28 22:51:46,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=209405.33333333334, ans=0.04949747468305833 +2024-07-28 22:51:47,859 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.07 vs. limit=15.0 +2024-07-28 22:51:51,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=209418.66666666666, ans=0.125 +2024-07-28 22:52:02,369 INFO [train.py:1114] (3/4) Epoch 16, batch 3750, loss[loss=0.1669, simple_loss=0.2516, pruned_loss=0.04109, over 4813.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2685, pruned_loss=0.04388, over 943824.79 frames. ], batch size: 11, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:52:08,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=209445.33333333334, ans=0.0 +2024-07-28 22:52:16,090 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.68 vs. limit=10.0 +2024-07-28 22:52:17,619 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.493e+01 5.513e+01 6.031e+01 6.754e+01 8.866e+01, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 22:52:19,298 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.42 vs. limit=15.0 +2024-07-28 22:52:31,840 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=209485.33333333334, ans=0.125 +2024-07-28 22:52:37,356 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.90 vs. limit=15.0 +2024-07-28 22:52:38,677 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.89 vs. limit=15.0 +2024-07-28 22:52:41,570 INFO [train.py:1114] (3/4) Epoch 16, batch 3800, loss[loss=0.1782, simple_loss=0.2782, pruned_loss=0.03907, over 4801.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2683, pruned_loss=0.04372, over 941833.57 frames. ], batch size: 14, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:52:53,104 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:52:59,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=209525.33333333334, ans=0.0 +2024-07-28 22:53:24,772 INFO [train.py:1114] (3/4) Epoch 16, batch 3850, loss[loss=0.1849, simple_loss=0.2758, pruned_loss=0.04698, over 4612.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2688, pruned_loss=0.04402, over 942280.33 frames. ], batch size: 16, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:53:33,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=209592.0, ans=0.125 +2024-07-28 22:53:34,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=209592.0, ans=0.0 +2024-07-28 22:53:35,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209592.0, ans=0.1 +2024-07-28 22:53:44,018 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.595e+01 5.438e+01 6.014e+01 6.827e+01 9.667e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 22:53:48,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=209605.33333333334, ans=0.125 +2024-07-28 22:53:58,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=209618.66666666666, ans=0.0 +2024-07-28 22:54:11,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209645.33333333334, ans=0.1 +2024-07-28 22:54:11,538 INFO [train.py:1114] (3/4) Epoch 16, batch 3900, loss[loss=0.1884, simple_loss=0.2704, pruned_loss=0.05321, over 4798.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2686, pruned_loss=0.04377, over 942350.50 frames. ], batch size: 14, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:54:14,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=209645.33333333334, ans=0.1 +2024-07-28 22:54:22,890 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.10 vs. limit=15.0 +2024-07-28 22:54:23,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.max_abs, batch_count=209658.66666666666, ans=10.0 +2024-07-28 22:54:37,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=209672.0, ans=22.5 +2024-07-28 22:55:00,449 INFO [train.py:1114] (3/4) Epoch 16, batch 3950, loss[loss=0.1938, simple_loss=0.2827, pruned_loss=0.05243, over 4840.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2689, pruned_loss=0.0443, over 944383.25 frames. ], batch size: 16, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:55:04,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=209712.0, ans=0.0 +2024-07-28 22:55:09,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=209725.33333333334, ans=0.2 +2024-07-28 22:55:14,738 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 5.589e+01 5.934e+01 6.636e+01 9.172e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-28 22:55:16,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=209725.33333333334, ans=0.0 +2024-07-28 22:55:23,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=209738.66666666666, ans=0.125 +2024-07-28 22:55:49,231 INFO [train.py:1114] (3/4) Epoch 16, batch 4000, loss[loss=0.1692, simple_loss=0.2534, pruned_loss=0.04254, over 4772.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2682, pruned_loss=0.04439, over 940846.13 frames. ], batch size: 12, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:55:58,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=209792.0, ans=0.125 +2024-07-28 22:56:06,077 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.45 vs. limit=15.0 +2024-07-28 22:56:09,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=209805.33333333334, ans=0.125 +2024-07-28 22:56:14,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=209818.66666666666, ans=0.025 +2024-07-28 22:56:29,038 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 22:56:39,776 INFO [train.py:1114] (3/4) Epoch 16, batch 4050, loss[loss=0.2244, simple_loss=0.3046, pruned_loss=0.07209, over 3527.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.268, pruned_loss=0.04418, over 939591.63 frames. ], batch size: 35, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:56:50,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=209858.66666666666, ans=0.0 +2024-07-28 22:56:51,882 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.300e+01 5.462e+01 6.013e+01 7.148e+01 1.181e+02, threshold=1.203e+02, percent-clipped=0.0 +2024-07-28 22:56:58,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=209872.0, ans=0.2 +2024-07-28 22:57:00,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=209872.0, ans=0.1 +2024-07-28 22:57:14,450 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.78 vs. limit=15.0 +2024-07-28 22:57:25,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=209898.66666666666, ans=0.0 +2024-07-28 22:57:27,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=209898.66666666666, ans=0.2 +2024-07-28 22:57:32,727 INFO [train.py:1114] (3/4) Epoch 16, batch 4100, loss[loss=0.2327, simple_loss=0.3212, pruned_loss=0.07203, over 4893.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.269, pruned_loss=0.04481, over 938470.50 frames. ], batch size: 15, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:57:51,049 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.58 vs. limit=22.5 +2024-07-28 22:57:51,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209912.0, ans=0.1 +2024-07-28 22:58:10,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=209952.0, ans=0.2 +2024-07-28 22:58:12,285 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.02 vs. limit=10.0 +2024-07-28 22:58:15,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=209952.0, ans=0.125 +2024-07-28 22:58:22,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=209965.33333333334, ans=0.125 +2024-07-28 22:58:22,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=209965.33333333334, ans=0.125 +2024-07-28 22:58:22,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=209965.33333333334, ans=0.1 +2024-07-28 22:58:28,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=209965.33333333334, ans=0.0 +2024-07-28 22:58:29,582 INFO [train.py:1114] (3/4) Epoch 16, batch 4150, loss[loss=0.1983, simple_loss=0.2837, pruned_loss=0.05644, over 4821.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2686, pruned_loss=0.0451, over 937891.96 frames. ], batch size: 13, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:58:39,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=209992.0, ans=0.125 +2024-07-28 22:58:40,273 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.631e+01 5.817e+01 6.318e+01 7.435e+01 1.178e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-28 22:58:46,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210005.33333333334, ans=0.1 +2024-07-28 22:58:55,530 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=15.25 vs. limit=22.5 +2024-07-28 22:59:08,471 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.74 vs. limit=15.0 +2024-07-28 22:59:08,737 INFO [train.py:1114] (3/4) Epoch 16, batch 4200, loss[loss=0.2097, simple_loss=0.3022, pruned_loss=0.05859, over 4910.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2679, pruned_loss=0.04429, over 939480.89 frames. ], batch size: 15, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:59:12,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=210045.33333333334, ans=0.125 +2024-07-28 22:59:18,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=210058.66666666666, ans=0.125 +2024-07-28 22:59:21,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=210072.0, ans=0.0 +2024-07-28 22:59:28,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=210072.0, ans=0.0 +2024-07-28 22:59:30,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210072.0, ans=0.1 +2024-07-28 22:59:33,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=210085.33333333334, ans=0.125 +2024-07-28 22:59:36,736 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.32 vs. limit=6.0 +2024-07-28 22:59:44,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=210098.66666666666, ans=0.0 +2024-07-28 22:59:46,502 INFO [train.py:1114] (3/4) Epoch 16, batch 4250, loss[loss=0.1888, simple_loss=0.2787, pruned_loss=0.04949, over 4643.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2686, pruned_loss=0.04454, over 940813.88 frames. ], batch size: 12, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 22:59:46,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=210112.0, ans=0.025 +2024-07-28 22:59:53,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210125.33333333334, ans=0.1 +2024-07-28 22:59:55,619 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.556e+01 5.557e+01 6.153e+01 6.698e+01 1.216e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-28 23:00:01,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210138.66666666666, ans=0.1 +2024-07-28 23:00:10,413 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.25 vs. limit=12.0 +2024-07-28 23:00:15,233 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.39 vs. limit=15.0 +2024-07-28 23:00:17,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=210165.33333333334, ans=0.125 +2024-07-28 23:00:20,173 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:00:21,357 INFO [train.py:1114] (3/4) Epoch 16, batch 4300, loss[loss=0.1921, simple_loss=0.2842, pruned_loss=0.04997, over 4757.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2685, pruned_loss=0.0444, over 940179.00 frames. ], batch size: 13, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 23:00:23,746 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=10.19 vs. limit=15.0 +2024-07-28 23:00:35,909 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.99 vs. limit=10.0 +2024-07-28 23:00:38,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=210205.33333333334, ans=0.015 +2024-07-28 23:00:40,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=210205.33333333334, ans=0.0 +2024-07-28 23:00:54,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=210232.0, ans=0.0 +2024-07-28 23:00:58,189 INFO [train.py:1114] (3/4) Epoch 16, batch 4350, loss[loss=0.2016, simple_loss=0.2946, pruned_loss=0.05428, over 4762.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2681, pruned_loss=0.04401, over 940963.70 frames. ], batch size: 13, lr: 4.68e-03, grad_scale: 32.0 +2024-07-28 23:01:18,167 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.840e+01 5.387e+01 6.031e+01 6.844e+01 1.009e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-28 23:01:24,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=210272.0, ans=0.125 +2024-07-28 23:01:28,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=210285.33333333334, ans=0.025 +2024-07-28 23:01:36,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=210298.66666666666, ans=0.025 +2024-07-28 23:01:40,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=210298.66666666666, ans=0.05 +2024-07-28 23:01:42,106 INFO [train.py:1114] (3/4) Epoch 16, batch 4400, loss[loss=0.1894, simple_loss=0.2833, pruned_loss=0.04773, over 4811.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2683, pruned_loss=0.04416, over 940830.08 frames. ], batch size: 14, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:01:42,588 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.33 vs. limit=15.0 +2024-07-28 23:01:46,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=210312.0, ans=0.07 +2024-07-28 23:01:57,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=210325.33333333334, ans=0.0 +2024-07-28 23:02:05,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=210338.66666666666, ans=0.0 +2024-07-28 23:02:06,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=210338.66666666666, ans=0.125 +2024-07-28 23:02:19,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=210365.33333333334, ans=0.05 +2024-07-28 23:02:21,303 INFO [train.py:1114] (3/4) Epoch 16, batch 4450, loss[loss=0.1399, simple_loss=0.2251, pruned_loss=0.02734, over 4948.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2686, pruned_loss=0.04438, over 938641.39 frames. ], batch size: 12, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:02:22,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210378.66666666666, ans=0.1 +2024-07-28 23:02:24,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=210378.66666666666, ans=0.0 +2024-07-28 23:02:27,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=210392.0, ans=0.025 +2024-07-28 23:02:30,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=210392.0, ans=0.125 +2024-07-28 23:02:30,821 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.581e+01 5.996e+01 6.828e+01 9.558e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-28 23:02:33,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=210392.0, ans=0.125 +2024-07-28 23:02:48,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=210432.0, ans=0.025 +2024-07-28 23:02:52,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=210432.0, ans=0.125 +2024-07-28 23:02:55,308 INFO [train.py:1114] (3/4) Epoch 16, batch 4500, loss[loss=0.18, simple_loss=0.2736, pruned_loss=0.04318, over 4740.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2694, pruned_loss=0.04485, over 938354.06 frames. ], batch size: 14, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:03:00,088 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:03:01,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=210458.66666666666, ans=0.2 +2024-07-28 23:03:10,288 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.59 vs. limit=5.0 +2024-07-28 23:03:21,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=210498.66666666666, ans=0.125 +2024-07-28 23:03:28,424 INFO [train.py:1114] (3/4) Epoch 16, batch 4550, loss[loss=0.2019, simple_loss=0.2811, pruned_loss=0.06131, over 4902.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.269, pruned_loss=0.04514, over 940082.91 frames. ], batch size: 13, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:03:33,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=210512.0, ans=0.125 +2024-07-28 23:03:34,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210512.0, ans=0.1 +2024-07-28 23:03:37,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=210525.33333333334, ans=0.0 +2024-07-28 23:03:39,557 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=16.43 vs. limit=22.5 +2024-07-28 23:03:39,689 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.876e+01 5.739e+01 6.533e+01 7.196e+01 1.162e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 23:03:46,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210538.66666666666, ans=0.1 +2024-07-28 23:04:01,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=210565.33333333334, ans=0.125 +2024-07-28 23:04:06,027 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.89 vs. limit=6.0 +2024-07-28 23:04:06,251 INFO [train.py:1114] (3/4) Epoch 16, batch 4600, loss[loss=0.1948, simple_loss=0.2867, pruned_loss=0.05142, over 4476.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2688, pruned_loss=0.04472, over 938322.37 frames. ], batch size: 21, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:04:27,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=210618.66666666666, ans=0.04949747468305833 +2024-07-28 23:04:27,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.11 vs. limit=12.0 +2024-07-28 23:04:39,385 INFO [train.py:1114] (3/4) Epoch 16, batch 4650, loss[loss=0.1975, simple_loss=0.2967, pruned_loss=0.04914, over 4825.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2692, pruned_loss=0.04465, over 940267.78 frames. ], batch size: 16, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:04:52,460 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.612e+01 5.652e+01 6.180e+01 7.051e+01 1.016e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-28 23:04:53,451 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.40 vs. limit=10.0 +2024-07-28 23:05:02,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=210672.0, ans=0.0 +2024-07-28 23:05:10,723 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.97 vs. limit=15.0 +2024-07-28 23:05:13,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=210698.66666666666, ans=0.0 +2024-07-28 23:05:13,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=210698.66666666666, ans=0.1 +2024-07-28 23:05:19,329 INFO [train.py:1114] (3/4) Epoch 16, batch 4700, loss[loss=0.1812, simple_loss=0.268, pruned_loss=0.04723, over 4704.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2693, pruned_loss=0.04455, over 937976.69 frames. ], batch size: 11, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:05:20,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=210712.0, ans=0.125 +2024-07-28 23:05:33,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=210738.66666666666, ans=0.125 +2024-07-28 23:05:34,644 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.93 vs. limit=15.0 +2024-07-28 23:05:36,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=210738.66666666666, ans=0.04949747468305833 +2024-07-28 23:05:48,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=210752.0, ans=0.125 +2024-07-28 23:05:56,317 INFO [train.py:1114] (3/4) Epoch 16, batch 4750, loss[loss=0.1955, simple_loss=0.277, pruned_loss=0.05704, over 4475.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2693, pruned_loss=0.04443, over 936064.63 frames. ], batch size: 21, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:06:02,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=210792.0, ans=0.0 +2024-07-28 23:06:04,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=210792.0, ans=0.125 +2024-07-28 23:06:06,439 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.562e+01 6.169e+01 6.958e+01 1.016e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-28 23:06:08,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210792.0, ans=0.1 +2024-07-28 23:06:10,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=210805.33333333334, ans=0.0 +2024-07-28 23:06:10,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=210805.33333333334, ans=0.2 +2024-07-28 23:06:11,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=210805.33333333334, ans=0.1 +2024-07-28 23:06:30,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=210832.0, ans=0.2 +2024-07-28 23:06:30,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=210832.0, ans=0.0 +2024-07-28 23:06:34,619 INFO [train.py:1114] (3/4) Epoch 16, batch 4800, loss[loss=0.1736, simple_loss=0.2742, pruned_loss=0.03656, over 4689.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2692, pruned_loss=0.04454, over 933147.82 frames. ], batch size: 13, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:06:48,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=210872.0, ans=0.1 +2024-07-28 23:06:54,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=210885.33333333334, ans=0.0 +2024-07-28 23:06:57,336 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:07:07,830 INFO [train.py:1114] (3/4) Epoch 16, batch 4850, loss[loss=0.1766, simple_loss=0.2757, pruned_loss=0.03876, over 4739.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2699, pruned_loss=0.04486, over 932209.21 frames. ], batch size: 14, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:07:25,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=210912.0, ans=0.0 +2024-07-28 23:07:29,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=210925.33333333334, ans=0.125 +2024-07-28 23:07:30,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=210925.33333333334, ans=0.125 +2024-07-28 23:07:31,469 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.823e+01 5.391e+01 6.068e+01 6.775e+01 1.177e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 23:07:31,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=210925.33333333334, ans=0.125 +2024-07-28 23:07:37,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=210938.66666666666, ans=0.0 +2024-07-28 23:07:43,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=210952.0, ans=0.1 +2024-07-28 23:07:48,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=210952.0, ans=0.07 +2024-07-28 23:07:59,285 INFO [train.py:1114] (3/4) Epoch 16, batch 4900, loss[loss=0.1715, simple_loss=0.2707, pruned_loss=0.03613, over 4747.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2694, pruned_loss=0.04463, over 934085.62 frames. ], batch size: 13, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:08:06,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=210992.0, ans=0.0 +2024-07-28 23:08:12,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211005.33333333334, ans=0.1 +2024-07-28 23:08:13,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.37 vs. limit=12.0 +2024-07-28 23:08:13,864 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.79 vs. limit=15.0 +2024-07-28 23:08:16,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211005.33333333334, ans=0.1 +2024-07-28 23:08:33,346 INFO [train.py:1114] (3/4) Epoch 16, batch 4950, loss[loss=0.2417, simple_loss=0.3157, pruned_loss=0.08383, over 3548.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2705, pruned_loss=0.04553, over 931434.79 frames. ], batch size: 36, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:08:38,911 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:08:41,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=211058.66666666666, ans=0.0 +2024-07-28 23:08:42,920 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.320e+01 5.430e+01 5.977e+01 6.818e+01 1.036e+02, threshold=1.195e+02, percent-clipped=0.0 +2024-07-28 23:08:47,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=211072.0, ans=0.0 +2024-07-28 23:08:49,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=211072.0, ans=0.125 +2024-07-28 23:08:51,833 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.74 vs. limit=15.0 +2024-07-28 23:09:08,840 INFO [train.py:1114] (3/4) Epoch 16, batch 5000, loss[loss=0.1771, simple_loss=0.2744, pruned_loss=0.03995, over 4664.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2708, pruned_loss=0.04591, over 935163.21 frames. ], batch size: 14, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:09:10,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=211112.0, ans=0.025 +2024-07-28 23:09:15,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=211125.33333333334, ans=0.2 +2024-07-28 23:09:26,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=211138.66666666666, ans=0.125 +2024-07-28 23:09:41,975 INFO [train.py:1114] (3/4) Epoch 16, batch 5050, loss[loss=0.1393, simple_loss=0.2209, pruned_loss=0.02887, over 4857.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2705, pruned_loss=0.04576, over 937457.13 frames. ], batch size: 12, lr: 4.67e-03, grad_scale: 32.0 +2024-07-28 23:09:46,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.90 vs. limit=15.0 +2024-07-28 23:09:53,129 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.519e+01 5.620e+01 6.064e+01 6.522e+01 1.168e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-28 23:10:16,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=211232.0, ans=0.125 +2024-07-28 23:10:18,016 INFO [train.py:1114] (3/4) Epoch 16, batch 5100, loss[loss=0.1691, simple_loss=0.257, pruned_loss=0.04062, over 4778.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2711, pruned_loss=0.04622, over 934637.07 frames. ], batch size: 12, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:10:30,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=211272.0, ans=0.125 +2024-07-28 23:10:37,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=211285.33333333334, ans=0.125 +2024-07-28 23:10:39,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=211285.33333333334, ans=0.125 +2024-07-28 23:10:42,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=211285.33333333334, ans=0.125 +2024-07-28 23:10:43,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=211285.33333333334, ans=0.125 +2024-07-28 23:10:44,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=211298.66666666666, ans=0.125 +2024-07-28 23:10:51,318 INFO [train.py:1114] (3/4) Epoch 16, batch 5150, loss[loss=0.1686, simple_loss=0.263, pruned_loss=0.03706, over 4822.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2712, pruned_loss=0.04615, over 935633.85 frames. ], batch size: 16, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:10:54,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=211312.0, ans=0.125 +2024-07-28 23:10:54,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=211312.0, ans=0.125 +2024-07-28 23:11:00,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer_ff3.min_abs, batch_count=211325.33333333334, ans=0.2 +2024-07-28 23:11:00,629 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.854e+01 6.432e+01 7.346e+01 1.040e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-28 23:11:02,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=211325.33333333334, ans=0.125 +2024-07-28 23:11:06,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=211338.66666666666, ans=0.125 +2024-07-28 23:11:18,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=211352.0, ans=0.125 +2024-07-28 23:11:26,553 INFO [train.py:1114] (3/4) Epoch 16, batch 5200, loss[loss=0.1613, simple_loss=0.2532, pruned_loss=0.03468, over 4672.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2716, pruned_loss=0.0458, over 935705.30 frames. ], batch size: 14, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:11:42,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=211405.33333333334, ans=0.0 +2024-07-28 23:11:45,431 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=211405.33333333334, ans=0.0 +2024-07-28 23:11:50,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=211418.66666666666, ans=0.125 +2024-07-28 23:11:55,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=211432.0, ans=0.125 +2024-07-28 23:12:01,691 INFO [train.py:1114] (3/4) Epoch 16, batch 5250, loss[loss=0.1516, simple_loss=0.2539, pruned_loss=0.02461, over 4898.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2705, pruned_loss=0.04531, over 935209.83 frames. ], batch size: 13, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:12:09,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211445.33333333334, ans=0.1 +2024-07-28 23:12:12,938 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.256e+01 5.544e+01 6.376e+01 7.640e+01 1.111e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-28 23:12:37,219 INFO [train.py:1114] (3/4) Epoch 16, batch 5300, loss[loss=0.1774, simple_loss=0.2753, pruned_loss=0.03969, over 4595.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2685, pruned_loss=0.04457, over 933899.19 frames. ], batch size: 16, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:12:39,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=211512.0, ans=0.0 +2024-07-28 23:12:40,319 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.67 vs. limit=10.0 +2024-07-28 23:12:43,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211525.33333333334, ans=0.1 +2024-07-28 23:12:44,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=211525.33333333334, ans=0.1 +2024-07-28 23:12:58,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=211552.0, ans=0.125 +2024-07-28 23:13:06,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=211565.33333333334, ans=0.2 +2024-07-28 23:13:10,603 INFO [train.py:1114] (3/4) Epoch 16, batch 5350, loss[loss=0.1652, simple_loss=0.2537, pruned_loss=0.03837, over 4542.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2686, pruned_loss=0.04442, over 935870.25 frames. ], batch size: 10, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:13:12,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=211578.66666666666, ans=0.1 +2024-07-28 23:13:15,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=211578.66666666666, ans=0.2 +2024-07-28 23:13:19,948 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.569e+01 5.493e+01 6.071e+01 6.914e+01 1.248e+02, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 23:13:21,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=211592.0, ans=0.0 +2024-07-28 23:13:29,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=211605.33333333334, ans=0.025 +2024-07-28 23:13:29,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=211605.33333333334, ans=0.125 +2024-07-28 23:13:44,729 INFO [train.py:1114] (3/4) Epoch 16, batch 5400, loss[loss=0.1966, simple_loss=0.2909, pruned_loss=0.05116, over 4130.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2699, pruned_loss=0.04499, over 929827.80 frames. ], batch size: 25, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:13:44,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=211645.33333333334, ans=0.2 +2024-07-28 23:13:49,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=211645.33333333334, ans=0.125 +2024-07-28 23:13:51,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=211658.66666666666, ans=0.2 +2024-07-28 23:14:04,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211685.33333333334, ans=0.1 +2024-07-28 23:14:12,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=211698.66666666666, ans=0.0 +2024-07-28 23:14:16,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=211698.66666666666, ans=0.1 +2024-07-28 23:14:18,000 INFO [train.py:1114] (3/4) Epoch 16, batch 5450, loss[loss=0.1482, simple_loss=0.2282, pruned_loss=0.03413, over 4701.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2688, pruned_loss=0.04435, over 933084.23 frames. ], batch size: 11, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:14:22,860 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:14:27,868 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.730e+01 5.574e+01 6.234e+01 6.810e+01 1.084e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 23:14:33,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=211738.66666666666, ans=0.0 +2024-07-28 23:14:42,725 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.62 vs. limit=15.0 +2024-07-28 23:14:43,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=211752.0, ans=0.1 +2024-07-28 23:14:45,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=211752.0, ans=0.1 +2024-07-28 23:14:47,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=211765.33333333334, ans=0.125 +2024-07-28 23:14:53,032 INFO [train.py:1114] (3/4) Epoch 16, batch 5500, loss[loss=0.1792, simple_loss=0.2701, pruned_loss=0.04414, over 4393.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2676, pruned_loss=0.04417, over 931390.50 frames. ], batch size: 26, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:15:11,383 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=211805.33333333334, ans=0.0 +2024-07-28 23:15:14,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=211818.66666666666, ans=0.125 +2024-07-28 23:15:16,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=211818.66666666666, ans=0.09899494936611666 +2024-07-28 23:15:28,504 INFO [train.py:1114] (3/4) Epoch 16, batch 5550, loss[loss=0.175, simple_loss=0.2636, pruned_loss=0.0432, over 4700.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2674, pruned_loss=0.04412, over 933282.91 frames. ], batch size: 12, lr: 4.66e-03, grad_scale: 32.0 +2024-07-28 23:15:29,403 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:15:33,362 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:15:37,921 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.698e+01 6.304e+01 7.513e+01 1.256e+02, threshold=1.261e+02, percent-clipped=1.0 +2024-07-28 23:15:44,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=211872.0, ans=0.0 +2024-07-28 23:15:47,232 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.84 vs. limit=10.0 +2024-07-28 23:16:02,700 INFO [train.py:1114] (3/4) Epoch 16, batch 5600, loss[loss=0.1904, simple_loss=0.2884, pruned_loss=0.04618, over 4740.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2674, pruned_loss=0.044, over 934457.22 frames. ], batch size: 14, lr: 4.66e-03, grad_scale: 64.0 +2024-07-28 23:16:07,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.89 vs. limit=15.0 +2024-07-28 23:16:22,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=211925.33333333334, ans=0.04949747468305833 +2024-07-28 23:16:30,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=211952.0, ans=0.125 +2024-07-28 23:16:36,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=211952.0, ans=0.0 +2024-07-28 23:16:49,032 INFO [train.py:1114] (3/4) Epoch 16, batch 5650, loss[loss=0.1913, simple_loss=0.28, pruned_loss=0.05133, over 4469.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2669, pruned_loss=0.04428, over 936847.35 frames. ], batch size: 21, lr: 4.66e-03, grad_scale: 64.0 +2024-07-28 23:16:51,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=211978.66666666666, ans=0.125 +2024-07-28 23:16:58,580 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.452e+01 5.506e+01 6.230e+01 6.941e+01 1.207e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-28 23:17:02,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=212005.33333333334, ans=0.025 +2024-07-28 23:17:13,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=212018.66666666666, ans=0.2 +2024-07-28 23:17:15,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=212032.0, ans=0.1 +2024-07-28 23:17:22,864 INFO [train.py:1114] (3/4) Epoch 16, batch 5700, loss[loss=0.1804, simple_loss=0.2768, pruned_loss=0.04203, over 4699.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2677, pruned_loss=0.04431, over 937866.80 frames. ], batch size: 13, lr: 4.66e-03, grad_scale: 64.0 +2024-07-28 23:17:27,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=212045.33333333334, ans=0.0 +2024-07-28 23:17:39,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=212072.0, ans=0.2 +2024-07-28 23:17:46,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=212085.33333333334, ans=0.125 +2024-07-28 23:17:51,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=212085.33333333334, ans=0.125 +2024-07-28 23:17:51,880 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:17:57,368 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.45 vs. limit=10.0 +2024-07-28 23:17:58,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=212098.66666666666, ans=0.125 +2024-07-28 23:17:59,820 INFO [train.py:1114] (3/4) Epoch 16, batch 5750, loss[loss=0.204, simple_loss=0.2989, pruned_loss=0.05461, over 4722.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2686, pruned_loss=0.04477, over 938065.11 frames. ], batch size: 19, lr: 4.66e-03, grad_scale: 64.0 +2024-07-28 23:18:02,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=212112.0, ans=0.025 +2024-07-28 23:18:06,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212125.33333333334, ans=0.1 +2024-07-28 23:18:09,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=212125.33333333334, ans=0.125 +2024-07-28 23:18:10,180 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.804e+01 5.670e+01 6.129e+01 6.618e+01 9.069e+01, threshold=1.226e+02, percent-clipped=0.0 +2024-07-28 23:18:13,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=212125.33333333334, ans=0.125 +2024-07-28 23:18:14,972 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=212138.66666666666, ans=0.125 +2024-07-28 23:18:16,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=212138.66666666666, ans=0.0 +2024-07-28 23:18:22,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=212138.66666666666, ans=0.0 +2024-07-28 23:18:23,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=212138.66666666666, ans=0.09899494936611666 +2024-07-28 23:18:32,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=212165.33333333334, ans=0.125 +2024-07-28 23:18:38,583 INFO [train.py:1114] (3/4) Epoch 16, batch 5800, loss[loss=0.2205, simple_loss=0.3072, pruned_loss=0.0669, over 4715.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2695, pruned_loss=0.04526, over 937416.14 frames. ], batch size: 19, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:18:46,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=212192.0, ans=0.125 +2024-07-28 23:18:48,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=212192.0, ans=0.1 +2024-07-28 23:18:49,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=212192.0, ans=0.07 +2024-07-28 23:18:52,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=212205.33333333334, ans=0.0 +2024-07-28 23:18:56,247 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.75 vs. limit=15.0 +2024-07-28 23:19:12,013 INFO [train.py:1114] (3/4) Epoch 16, batch 5850, loss[loss=0.2063, simple_loss=0.2935, pruned_loss=0.05954, over 4529.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2679, pruned_loss=0.04433, over 938031.93 frames. ], batch size: 21, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:19:20,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=212258.66666666666, ans=0.125 +2024-07-28 23:19:21,303 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+01 5.632e+01 6.313e+01 6.909e+01 9.080e+01, threshold=1.263e+02, percent-clipped=0.0 +2024-07-28 23:19:25,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=212272.0, ans=0.125 +2024-07-28 23:19:34,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=212285.33333333334, ans=0.025 +2024-07-28 23:19:43,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=212298.66666666666, ans=0.125 +2024-07-28 23:19:46,152 INFO [train.py:1114] (3/4) Epoch 16, batch 5900, loss[loss=0.1987, simple_loss=0.304, pruned_loss=0.04666, over 4675.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2679, pruned_loss=0.04399, over 938524.29 frames. ], batch size: 15, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:20:00,792 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.50 vs. limit=15.0 +2024-07-28 23:20:03,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=212338.66666666666, ans=0.025 +2024-07-28 23:20:10,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=212352.0, ans=0.125 +2024-07-28 23:20:15,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212365.33333333334, ans=0.1 +2024-07-28 23:20:19,844 INFO [train.py:1114] (3/4) Epoch 16, batch 5950, loss[loss=0.1687, simple_loss=0.2694, pruned_loss=0.03398, over 4681.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2677, pruned_loss=0.04376, over 940273.00 frames. ], batch size: 15, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:20:28,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=212392.0, ans=0.125 +2024-07-28 23:20:29,213 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.762e+01 5.558e+01 6.099e+01 6.527e+01 9.669e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-28 23:20:53,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=212432.0, ans=0.125 +2024-07-28 23:20:55,234 INFO [train.py:1114] (3/4) Epoch 16, batch 6000, loss[loss=0.1699, simple_loss=0.2668, pruned_loss=0.03652, over 4228.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2672, pruned_loss=0.04365, over 937973.14 frames. ], batch size: 26, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:20:55,235 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-28 23:21:00,930 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.4510, 4.7020, 4.8104, 4.8648], device='cuda:3') +2024-07-28 23:21:07,047 INFO [train.py:1146] (3/4) Epoch 16, validation: loss=0.1625, simple_loss=0.2653, pruned_loss=0.02984, over 944034.00 frames. +2024-07-28 23:21:07,048 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-28 23:21:07,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=212445.33333333334, ans=0.09899494936611666 +2024-07-28 23:21:08,825 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.03 vs. limit=15.0 +2024-07-28 23:21:11,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=212445.33333333334, ans=0.125 +2024-07-28 23:21:13,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=212458.66666666666, ans=0.125 +2024-07-28 23:21:19,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=212458.66666666666, ans=0.125 +2024-07-28 23:21:27,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=212485.33333333334, ans=0.125 +2024-07-28 23:21:32,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=212485.33333333334, ans=0.0 +2024-07-28 23:21:41,039 INFO [train.py:1114] (3/4) Epoch 16, batch 6050, loss[loss=0.1884, simple_loss=0.2715, pruned_loss=0.05263, over 4781.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.267, pruned_loss=0.04354, over 938823.55 frames. ], batch size: 12, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:21:45,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer_na.min_abs, batch_count=212512.0, ans=0.02 +2024-07-28 23:21:56,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=212525.33333333334, ans=0.0 +2024-07-28 23:21:57,155 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.422e+01 5.490e+01 6.163e+01 6.956e+01 9.204e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-28 23:22:08,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=212538.66666666666, ans=0.125 +2024-07-28 23:22:15,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=212552.0, ans=0.1 +2024-07-28 23:22:24,107 INFO [train.py:1114] (3/4) Epoch 16, batch 6100, loss[loss=0.1642, simple_loss=0.2612, pruned_loss=0.03357, over 4698.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2677, pruned_loss=0.04381, over 938346.52 frames. ], batch size: 15, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:22:25,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=212578.66666666666, ans=0.0 +2024-07-28 23:22:46,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=212592.0, ans=0.0 +2024-07-28 23:22:57,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=212605.33333333334, ans=0.125 +2024-07-28 23:23:02,473 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.64 vs. limit=15.0 +2024-07-28 23:23:04,863 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.00 vs. limit=22.5 +2024-07-28 23:23:14,151 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.62 vs. limit=6.0 +2024-07-28 23:23:20,108 INFO [train.py:1114] (3/4) Epoch 16, batch 6150, loss[loss=0.2476, simple_loss=0.3311, pruned_loss=0.08201, over 3247.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.267, pruned_loss=0.04341, over 936714.93 frames. ], batch size: 36, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:23:29,817 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.196e+01 5.601e+01 6.236e+01 7.046e+01 1.205e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-28 23:23:34,246 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.60 vs. limit=6.0 +2024-07-28 23:23:45,428 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.03 vs. limit=15.0 +2024-07-28 23:23:53,680 INFO [train.py:1114] (3/4) Epoch 16, batch 6200, loss[loss=0.2025, simple_loss=0.2765, pruned_loss=0.06426, over 4747.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2676, pruned_loss=0.04374, over 936076.92 frames. ], batch size: 14, lr: 4.65e-03, grad_scale: 64.0 +2024-07-28 23:23:56,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff3.min_abs, batch_count=212712.0, ans=0.2 +2024-07-28 23:24:00,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=212725.33333333334, ans=0.0 +2024-07-28 23:24:02,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=212725.33333333334, ans=0.125 +2024-07-28 23:24:03,256 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:24:06,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=212738.66666666666, ans=0.025 +2024-07-28 23:24:06,633 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.13 vs. limit=15.0 +2024-07-28 23:24:08,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=212738.66666666666, ans=0.125 +2024-07-28 23:24:10,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=212738.66666666666, ans=0.1 +2024-07-28 23:24:11,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=212738.66666666666, ans=0.125 +2024-07-28 23:24:16,000 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.14 vs. limit=6.0 +2024-07-28 23:24:19,359 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.20 vs. limit=10.0 +2024-07-28 23:24:23,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=212752.0, ans=0.125 +2024-07-28 23:24:25,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=212765.33333333334, ans=0.0 +2024-07-28 23:24:30,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=212765.33333333334, ans=0.025 +2024-07-28 23:24:32,616 INFO [train.py:1114] (3/4) Epoch 16, batch 6250, loss[loss=0.1833, simple_loss=0.2646, pruned_loss=0.05103, over 4810.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2682, pruned_loss=0.04454, over 932554.31 frames. ], batch size: 14, lr: 4.65e-03, grad_scale: 32.0 +2024-07-28 23:24:53,131 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.346e+01 5.825e+01 6.547e+01 7.445e+01 1.087e+02, threshold=1.309e+02, percent-clipped=0.0 +2024-07-28 23:25:06,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=212805.33333333334, ans=0.0 +2024-07-28 23:25:07,913 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.40 vs. limit=22.5 +2024-07-28 23:25:09,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=212805.33333333334, ans=0.0 +2024-07-28 23:25:31,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=212832.0, ans=0.015 +2024-07-28 23:25:38,077 INFO [train.py:1114] (3/4) Epoch 16, batch 6300, loss[loss=0.1711, simple_loss=0.252, pruned_loss=0.04505, over 4534.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2693, pruned_loss=0.04523, over 929433.56 frames. ], batch size: 10, lr: 4.65e-03, grad_scale: 32.0 +2024-07-28 23:25:54,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=212858.66666666666, ans=0.0 +2024-07-28 23:25:56,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=212872.0, ans=0.125 +2024-07-28 23:26:02,208 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.88 vs. limit=10.0 +2024-07-28 23:26:02,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=212872.0, ans=0.125 +2024-07-28 23:26:06,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=212885.33333333334, ans=0.125 +2024-07-28 23:26:07,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212885.33333333334, ans=0.1 +2024-07-28 23:26:09,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=212885.33333333334, ans=0.0 +2024-07-28 23:26:20,515 INFO [train.py:1114] (3/4) Epoch 16, batch 6350, loss[loss=0.1804, simple_loss=0.2714, pruned_loss=0.04469, over 4531.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2682, pruned_loss=0.04445, over 933446.65 frames. ], batch size: 21, lr: 4.65e-03, grad_scale: 32.0 +2024-07-28 23:26:20,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=212912.0, ans=0.1 +2024-07-28 23:26:45,086 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.844e+01 5.666e+01 6.076e+01 6.815e+01 1.142e+02, threshold=1.215e+02, percent-clipped=0.0 +2024-07-28 23:26:47,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=212925.33333333334, ans=0.125 +2024-07-28 23:26:59,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=212952.0, ans=0.125 +2024-07-28 23:27:14,264 INFO [train.py:1114] (3/4) Epoch 16, batch 6400, loss[loss=0.1779, simple_loss=0.2647, pruned_loss=0.04551, over 4641.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2689, pruned_loss=0.04464, over 934972.32 frames. ], batch size: 13, lr: 4.65e-03, grad_scale: 32.0 +2024-07-28 23:27:25,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=212992.0, ans=0.125 +2024-07-28 23:27:31,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=213005.33333333334, ans=0.0 +2024-07-28 23:27:36,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=213005.33333333334, ans=0.125 +2024-07-28 23:27:43,053 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.72 vs. limit=15.0 +2024-07-28 23:27:45,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=213032.0, ans=0.125 +2024-07-28 23:27:46,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=213032.0, ans=0.2 +2024-07-28 23:27:47,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=213032.0, ans=0.1 +2024-07-28 23:27:51,962 INFO [train.py:1114] (3/4) Epoch 16, batch 6450, loss[loss=0.1826, simple_loss=0.2776, pruned_loss=0.04377, over 4473.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2688, pruned_loss=0.04466, over 938523.40 frames. ], batch size: 21, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:28:05,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.90 vs. limit=15.0 +2024-07-28 23:28:08,734 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.830e+01 6.533e+01 7.899e+01 1.104e+02, threshold=1.307e+02, percent-clipped=0.0 +2024-07-28 23:28:14,188 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.92 vs. limit=15.0 +2024-07-28 23:28:19,337 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.34 vs. limit=15.0 +2024-07-28 23:28:24,139 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.94 vs. limit=10.0 +2024-07-28 23:28:34,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=213112.0, ans=0.0 +2024-07-28 23:28:35,509 INFO [train.py:1114] (3/4) Epoch 16, batch 6500, loss[loss=0.2148, simple_loss=0.2905, pruned_loss=0.06956, over 3287.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2687, pruned_loss=0.04438, over 939561.43 frames. ], batch size: 36, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:28:51,903 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.09 vs. limit=12.0 +2024-07-28 23:29:06,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=213152.0, ans=0.2 +2024-07-28 23:29:12,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=213165.33333333334, ans=0.0 +2024-07-28 23:29:19,567 INFO [train.py:1114] (3/4) Epoch 16, batch 6550, loss[loss=0.1404, simple_loss=0.2269, pruned_loss=0.02692, over 4799.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2683, pruned_loss=0.04413, over 942741.95 frames. ], batch size: 11, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:29:31,658 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.619e+01 5.761e+01 6.311e+01 7.241e+01 1.321e+02, threshold=1.262e+02, percent-clipped=1.0 +2024-07-28 23:29:38,553 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.60 vs. limit=15.0 +2024-07-28 23:29:57,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=213232.0, ans=0.07 +2024-07-28 23:29:59,645 INFO [train.py:1114] (3/4) Epoch 16, batch 6600, loss[loss=0.1859, simple_loss=0.2877, pruned_loss=0.04209, over 4938.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2693, pruned_loss=0.04408, over 944613.11 frames. ], batch size: 14, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:30:07,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213258.66666666666, ans=0.1 +2024-07-28 23:30:09,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=213258.66666666666, ans=0.125 +2024-07-28 23:30:11,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=213258.66666666666, ans=0.07 +2024-07-28 23:30:29,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=213298.66666666666, ans=0.125 +2024-07-28 23:30:35,780 INFO [train.py:1114] (3/4) Epoch 16, batch 6650, loss[loss=0.1781, simple_loss=0.2694, pruned_loss=0.04336, over 4630.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2698, pruned_loss=0.04457, over 943333.55 frames. ], batch size: 17, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:30:37,583 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.18 vs. limit=15.0 +2024-07-28 23:30:46,059 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.373e+01 5.740e+01 6.263e+01 6.841e+01 9.907e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 23:30:58,409 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:31:01,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=213352.0, ans=0.025 +2024-07-28 23:31:03,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=213352.0, ans=0.0 +2024-07-28 23:31:05,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=213365.33333333334, ans=0.125 +2024-07-28 23:31:11,673 INFO [train.py:1114] (3/4) Epoch 16, batch 6700, loss[loss=0.1712, simple_loss=0.2771, pruned_loss=0.0326, over 4725.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2697, pruned_loss=0.04424, over 942280.54 frames. ], batch size: 19, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:31:18,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=213392.0, ans=0.0 +2024-07-28 23:31:29,883 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.52 vs. limit=15.0 +2024-07-28 23:31:30,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=213405.33333333334, ans=0.125 +2024-07-28 23:31:39,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=213432.0, ans=10.0 +2024-07-28 23:31:42,544 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.39 vs. limit=15.0 +2024-07-28 23:31:44,988 INFO [train.py:1114] (3/4) Epoch 16, batch 6750, loss[loss=0.1886, simple_loss=0.2913, pruned_loss=0.04299, over 4174.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2698, pruned_loss=0.04432, over 940544.31 frames. ], batch size: 25, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:31:47,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.min_positive, batch_count=213445.33333333334, ans=0.025 +2024-07-28 23:31:48,688 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.92 vs. limit=15.0 +2024-07-28 23:31:59,718 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.135e+01 5.547e+01 6.307e+01 7.303e+01 1.020e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 23:33:07,358 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.48 vs. limit=15.0 +2024-07-28 23:33:12,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=213498.66666666666, ans=0.035 +2024-07-28 23:33:47,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=213498.66666666666, ans=10.0 +2024-07-28 23:33:52,028 INFO [train.py:1114] (3/4) Epoch 16, batch 6800, loss[loss=0.1812, simple_loss=0.2816, pruned_loss=0.04042, over 4639.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2695, pruned_loss=0.04398, over 939034.81 frames. ], batch size: 13, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:33:59,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=213525.33333333334, ans=10.0 +2024-07-28 23:34:01,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=213525.33333333334, ans=0.0 +2024-07-28 23:34:05,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=213525.33333333334, ans=0.125 +2024-07-28 23:34:15,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=213538.66666666666, ans=0.125 +2024-07-28 23:34:20,942 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.09 vs. limit=15.0 +2024-07-28 23:34:28,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=213565.33333333334, ans=0.05 +2024-07-28 23:34:31,087 INFO [train.py:1114] (3/4) Epoch 16, batch 6850, loss[loss=0.1728, simple_loss=0.273, pruned_loss=0.03631, over 4686.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2684, pruned_loss=0.04361, over 940678.45 frames. ], batch size: 13, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:34:40,947 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.265e+01 5.896e+01 6.305e+01 7.215e+01 1.193e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 23:34:58,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=213632.0, ans=0.1 +2024-07-28 23:35:04,751 INFO [train.py:1114] (3/4) Epoch 16, batch 6900, loss[loss=0.1619, simple_loss=0.2511, pruned_loss=0.03636, over 4964.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2682, pruned_loss=0.04347, over 942921.94 frames. ], batch size: 13, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:35:04,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=213645.33333333334, ans=0.0 +2024-07-28 23:35:13,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=213658.66666666666, ans=0.125 +2024-07-28 23:35:14,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=213658.66666666666, ans=0.125 +2024-07-28 23:35:18,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=213658.66666666666, ans=0.95 +2024-07-28 23:35:22,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=213672.0, ans=0.0 +2024-07-28 23:35:23,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=213672.0, ans=0.125 +2024-07-28 23:35:24,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_ff2.min_abs, batch_count=213672.0, ans=0.1 +2024-07-28 23:35:29,193 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=213685.33333333334, ans=0.125 +2024-07-28 23:35:29,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=213685.33333333334, ans=0.05 +2024-07-28 23:35:36,077 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:35:39,961 INFO [train.py:1114] (3/4) Epoch 16, batch 6950, loss[loss=0.1918, simple_loss=0.2689, pruned_loss=0.05737, over 4508.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2682, pruned_loss=0.04368, over 939933.37 frames. ], batch size: 10, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:35:50,013 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.539e+01 5.740e+01 6.194e+01 7.107e+01 9.358e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 23:35:52,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=213738.66666666666, ans=0.025 +2024-07-28 23:35:54,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=213738.66666666666, ans=0.0 +2024-07-28 23:36:00,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=213752.0, ans=0.05 +2024-07-28 23:36:09,817 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.15 vs. limit=15.0 +2024-07-28 23:36:12,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=213765.33333333334, ans=0.125 +2024-07-28 23:36:15,189 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.40 vs. limit=10.0 +2024-07-28 23:36:17,392 INFO [train.py:1114] (3/4) Epoch 16, batch 7000, loss[loss=0.1827, simple_loss=0.2814, pruned_loss=0.04202, over 4617.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2676, pruned_loss=0.04339, over 938187.23 frames. ], batch size: 17, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:36:19,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=213778.66666666666, ans=0.2 +2024-07-28 23:36:30,887 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.58 vs. limit=15.0 +2024-07-28 23:36:32,728 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.46 vs. limit=10.0 +2024-07-28 23:36:41,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=213818.66666666666, ans=0.05 +2024-07-28 23:36:43,281 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.34 vs. limit=15.0 +2024-07-28 23:36:45,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=213832.0, ans=0.025 +2024-07-28 23:36:45,795 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.74 vs. limit=22.5 +2024-07-28 23:36:48,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=213832.0, ans=0.125 +2024-07-28 23:36:48,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=213832.0, ans=0.2 +2024-07-28 23:36:50,052 INFO [train.py:1114] (3/4) Epoch 16, batch 7050, loss[loss=0.2194, simple_loss=0.3161, pruned_loss=0.06129, over 4688.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2679, pruned_loss=0.04349, over 941622.35 frames. ], batch size: 19, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:36:52,559 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.95 vs. limit=15.0 +2024-07-28 23:36:53,874 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.95 vs. limit=10.0 +2024-07-28 23:37:00,888 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.296e+01 5.658e+01 6.254e+01 7.324e+01 1.123e+02, threshold=1.251e+02, percent-clipped=0.0 +2024-07-28 23:37:04,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=213872.0, ans=0.0 +2024-07-28 23:37:08,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=213872.0, ans=0.125 +2024-07-28 23:37:24,464 INFO [train.py:1114] (3/4) Epoch 16, batch 7100, loss[loss=0.1848, simple_loss=0.2749, pruned_loss=0.04739, over 4804.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2675, pruned_loss=0.04367, over 937297.75 frames. ], batch size: 15, lr: 4.64e-03, grad_scale: 32.0 +2024-07-28 23:37:25,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=213912.0, ans=0.125 +2024-07-28 23:37:47,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=213952.0, ans=0.125 +2024-07-28 23:37:49,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=213952.0, ans=0.125 +2024-07-28 23:37:55,174 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.06 vs. limit=15.0 +2024-07-28 23:37:56,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=213965.33333333334, ans=0.125 +2024-07-28 23:37:59,201 INFO [train.py:1114] (3/4) Epoch 16, batch 7150, loss[loss=0.1583, simple_loss=0.2528, pruned_loss=0.03193, over 4479.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.266, pruned_loss=0.04347, over 937772.09 frames. ], batch size: 21, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:38:05,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=213992.0, ans=0.0 +2024-07-28 23:38:08,738 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.615e+01 6.266e+01 7.149e+01 9.915e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-28 23:38:14,068 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:38:25,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=214032.0, ans=0.125 +2024-07-28 23:38:31,227 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.45 vs. limit=6.0 +2024-07-28 23:38:31,555 INFO [train.py:1114] (3/4) Epoch 16, batch 7200, loss[loss=0.2464, simple_loss=0.3239, pruned_loss=0.08449, over 4818.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2666, pruned_loss=0.04332, over 938400.77 frames. ], batch size: 15, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:38:38,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=214058.66666666666, ans=0.0 +2024-07-28 23:38:54,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214085.33333333334, ans=0.1 +2024-07-28 23:39:01,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=214098.66666666666, ans=0.2 +2024-07-28 23:39:05,750 INFO [train.py:1114] (3/4) Epoch 16, batch 7250, loss[loss=0.1727, simple_loss=0.2559, pruned_loss=0.04476, over 4848.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2662, pruned_loss=0.04333, over 940282.05 frames. ], batch size: 12, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:39:06,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=214112.0, ans=0.025 +2024-07-28 23:39:09,308 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.72 vs. limit=15.0 +2024-07-28 23:39:15,401 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.637e+01 5.553e+01 6.069e+01 6.578e+01 8.706e+01, threshold=1.214e+02, percent-clipped=0.0 +2024-07-28 23:39:56,635 INFO [train.py:1114] (3/4) Epoch 16, batch 7300, loss[loss=0.1758, simple_loss=0.2538, pruned_loss=0.04885, over 4854.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2663, pruned_loss=0.0432, over 940146.89 frames. ], batch size: 12, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:40:24,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=214232.0, ans=0.0 +2024-07-28 23:40:25,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=214232.0, ans=0.1 +2024-07-28 23:40:28,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=214232.0, ans=0.0 +2024-07-28 23:40:29,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214232.0, ans=0.1 +2024-07-28 23:40:30,741 INFO [train.py:1114] (3/4) Epoch 16, batch 7350, loss[loss=0.1714, simple_loss=0.2644, pruned_loss=0.03913, over 4633.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2663, pruned_loss=0.0435, over 940032.49 frames. ], batch size: 12, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:40:31,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=214245.33333333334, ans=0.125 +2024-07-28 23:40:40,103 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.22 vs. limit=15.0 +2024-07-28 23:40:41,009 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.596e+01 5.572e+01 6.176e+01 6.846e+01 9.880e+01, threshold=1.235e+02, percent-clipped=0.0 +2024-07-28 23:40:46,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=214272.0, ans=0.125 +2024-07-28 23:40:59,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=214298.66666666666, ans=0.125 +2024-07-28 23:41:00,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=214298.66666666666, ans=0.2 +2024-07-28 23:41:05,357 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:41:10,934 INFO [train.py:1114] (3/4) Epoch 16, batch 7400, loss[loss=0.1827, simple_loss=0.2682, pruned_loss=0.04861, over 4693.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2673, pruned_loss=0.0436, over 941223.21 frames. ], batch size: 13, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:41:21,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=214325.33333333334, ans=0.125 +2024-07-28 23:41:23,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=214325.33333333334, ans=0.1 +2024-07-28 23:41:28,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=214338.66666666666, ans=0.125 +2024-07-28 23:41:31,753 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=214338.66666666666, ans=0.0 +2024-07-28 23:41:35,412 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:41:40,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=214352.0, ans=0.125 +2024-07-28 23:41:44,108 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.27 vs. limit=15.0 +2024-07-28 23:41:49,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=214365.33333333334, ans=0.125 +2024-07-28 23:41:50,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=214378.66666666666, ans=0.125 +2024-07-28 23:41:50,693 INFO [train.py:1114] (3/4) Epoch 16, batch 7450, loss[loss=0.1815, simple_loss=0.2741, pruned_loss=0.04443, over 4615.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2665, pruned_loss=0.04326, over 938620.08 frames. ], batch size: 11, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:41:55,138 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:41:56,142 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.44 vs. limit=15.0 +2024-07-28 23:42:02,403 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.501e+01 5.840e+01 6.491e+01 7.591e+01 1.266e+02, threshold=1.298e+02, percent-clipped=1.0 +2024-07-28 23:42:15,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=214418.66666666666, ans=0.0 +2024-07-28 23:42:21,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=214432.0, ans=0.025 +2024-07-28 23:42:23,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=214432.0, ans=0.0 +2024-07-28 23:42:24,144 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.81 vs. limit=10.0 +2024-07-28 23:42:25,723 INFO [train.py:1114] (3/4) Epoch 16, batch 7500, loss[loss=0.1993, simple_loss=0.2752, pruned_loss=0.06172, over 3295.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2682, pruned_loss=0.04409, over 936081.79 frames. ], batch size: 35, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:50:24,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=214458.66666666666, ans=0.125 +2024-07-28 23:50:36,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=214472.0, ans=0.125 +2024-07-28 23:50:41,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.max_positive, batch_count=214485.33333333334, ans=0.95 +2024-07-28 23:50:53,144 INFO [train.py:1114] (3/4) Epoch 16, batch 7550, loss[loss=0.2025, simple_loss=0.2823, pruned_loss=0.06137, over 4600.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2685, pruned_loss=0.04427, over 936344.64 frames. ], batch size: 17, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:50:56,255 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.22 vs. limit=10.0 +2024-07-28 23:50:57,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=214512.0, ans=0.04949747468305833 +2024-07-28 23:51:02,811 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.731e+01 5.493e+01 6.004e+01 6.763e+01 8.407e+01, threshold=1.201e+02, percent-clipped=0.0 +2024-07-28 23:51:07,210 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.78 vs. limit=22.5 +2024-07-28 23:51:12,382 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:51:13,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=214552.0, ans=0.125 +2024-07-28 23:51:25,723 INFO [train.py:1114] (3/4) Epoch 16, batch 7600, loss[loss=0.1846, simple_loss=0.2824, pruned_loss=0.04341, over 4816.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2689, pruned_loss=0.04422, over 938225.60 frames. ], batch size: 14, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:51:32,168 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:51:32,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=214592.0, ans=0.2 +2024-07-28 23:52:16,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=214632.0, ans=0.1 +2024-07-28 23:52:20,202 INFO [train.py:1114] (3/4) Epoch 16, batch 7650, loss[loss=0.1696, simple_loss=0.2482, pruned_loss=0.04548, over 4958.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2697, pruned_loss=0.04504, over 937494.36 frames. ], batch size: 12, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:52:30,298 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.652e+01 6.093e+01 6.907e+01 1.144e+02, threshold=1.219e+02, percent-clipped=0.0 +2024-07-28 23:52:32,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=214658.66666666666, ans=0.125 +2024-07-28 23:52:32,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=214658.66666666666, ans=0.125 +2024-07-28 23:52:44,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=214685.33333333334, ans=0.125 +2024-07-28 23:52:48,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=214698.66666666666, ans=22.5 +2024-07-28 23:52:53,661 INFO [train.py:1114] (3/4) Epoch 16, batch 7700, loss[loss=0.1599, simple_loss=0.2554, pruned_loss=0.03221, over 4693.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2694, pruned_loss=0.04463, over 934557.38 frames. ], batch size: 13, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:52:58,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=214712.0, ans=0.04949747468305833 +2024-07-28 23:53:00,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=214725.33333333334, ans=0.125 +2024-07-28 23:53:08,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=214738.66666666666, ans=0.125 +2024-07-28 23:53:09,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=214738.66666666666, ans=0.2 +2024-07-28 23:53:11,406 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.57 vs. limit=15.0 +2024-07-28 23:53:16,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214752.0, ans=0.1 +2024-07-28 23:53:19,154 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-28 23:53:24,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=214765.33333333334, ans=0.125 +2024-07-28 23:53:24,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=214765.33333333334, ans=0.0 +2024-07-28 23:53:25,966 INFO [train.py:1114] (3/4) Epoch 16, batch 7750, loss[loss=0.1786, simple_loss=0.2763, pruned_loss=0.04048, over 4935.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2696, pruned_loss=0.04431, over 935821.74 frames. ], batch size: 14, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:53:27,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=214778.66666666666, ans=0.0 +2024-07-28 23:53:38,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=214778.66666666666, ans=0.0 +2024-07-28 23:53:45,186 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.76 vs. limit=15.0 +2024-07-28 23:53:46,521 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.451e+01 5.576e+01 5.953e+01 6.432e+01 8.446e+01, threshold=1.191e+02, percent-clipped=0.0 +2024-07-28 23:53:55,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=214805.33333333334, ans=0.125 +2024-07-28 23:53:55,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=214818.66666666666, ans=0.125 +2024-07-28 23:53:56,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=214818.66666666666, ans=0.125 +2024-07-28 23:53:58,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=214818.66666666666, ans=0.0 +2024-07-28 23:54:04,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=214832.0, ans=0.125 +2024-07-28 23:54:04,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=214832.0, ans=10.0 +2024-07-28 23:54:09,318 INFO [train.py:1114] (3/4) Epoch 16, batch 7800, loss[loss=0.181, simple_loss=0.2741, pruned_loss=0.0439, over 4671.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2702, pruned_loss=0.04438, over 937279.79 frames. ], batch size: 14, lr: 4.63e-03, grad_scale: 32.0 +2024-07-28 23:54:12,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=214845.33333333334, ans=0.0 +2024-07-28 23:54:24,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=214872.0, ans=0.015 +2024-07-28 23:54:27,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=214885.33333333334, ans=0.0 +2024-07-28 23:54:31,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=214885.33333333334, ans=0.1 +2024-07-28 23:54:42,807 INFO [train.py:1114] (3/4) Epoch 16, batch 7850, loss[loss=0.1498, simple_loss=0.2345, pruned_loss=0.03255, over 4547.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2694, pruned_loss=0.04422, over 936458.88 frames. ], batch size: 10, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:54:52,701 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.575e+01 6.196e+01 7.184e+01 1.116e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-28 23:54:54,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=214925.33333333334, ans=0.0 +2024-07-28 23:55:09,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=214938.66666666666, ans=0.125 +2024-07-28 23:55:10,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=214938.66666666666, ans=0.0 +2024-07-28 23:55:16,703 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.62 vs. limit=12.0 +2024-07-28 23:55:39,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=214965.33333333334, ans=0.125 +2024-07-28 23:55:40,818 INFO [train.py:1114] (3/4) Epoch 16, batch 7900, loss[loss=0.1925, simple_loss=0.2789, pruned_loss=0.05301, over 4867.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2713, pruned_loss=0.04484, over 933021.26 frames. ], batch size: 14, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:55:42,425 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.08 vs. limit=15.0 +2024-07-28 23:55:49,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=214978.66666666666, ans=0.0 +2024-07-28 23:55:55,811 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.71 vs. limit=15.0 +2024-07-28 23:55:57,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=215005.33333333334, ans=0.125 +2024-07-28 23:56:02,697 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=215005.33333333334, ans=0.2 +2024-07-28 23:56:21,110 INFO [train.py:1114] (3/4) Epoch 16, batch 7950, loss[loss=0.2377, simple_loss=0.32, pruned_loss=0.0777, over 3171.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2716, pruned_loss=0.04495, over 934694.58 frames. ], batch size: 35, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:56:48,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=215058.66666666666, ans=0.125 +2024-07-28 23:56:50,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=215058.66666666666, ans=0.0 +2024-07-28 23:56:52,124 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.700e+01 5.562e+01 6.109e+01 6.836e+01 1.076e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-28 23:56:57,066 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.22 vs. limit=15.0 +2024-07-28 23:57:38,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=215112.0, ans=0.025 +2024-07-28 23:57:39,462 INFO [train.py:1114] (3/4) Epoch 16, batch 8000, loss[loss=0.1467, simple_loss=0.2369, pruned_loss=0.02818, over 4628.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2699, pruned_loss=0.04453, over 934507.65 frames. ], batch size: 11, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:57:40,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=215112.0, ans=0.0 +2024-07-28 23:57:54,830 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.83 vs. limit=15.0 +2024-07-28 23:57:57,229 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=215138.66666666666, ans=0.0 +2024-07-28 23:58:07,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=215165.33333333334, ans=0.125 +2024-07-28 23:58:13,493 INFO [train.py:1114] (3/4) Epoch 16, batch 8050, loss[loss=0.1716, simple_loss=0.2596, pruned_loss=0.04175, over 4811.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2702, pruned_loss=0.04471, over 934178.67 frames. ], batch size: 14, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:58:15,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=215178.66666666666, ans=0.1 +2024-07-28 23:58:16,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=215178.66666666666, ans=0.0 +2024-07-28 23:58:20,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=215192.0, ans=0.1 +2024-07-28 23:58:24,548 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.751e+01 5.579e+01 6.307e+01 7.164e+01 1.118e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-28 23:58:31,553 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.35 vs. limit=15.0 +2024-07-28 23:58:43,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=215232.0, ans=0.025 +2024-07-28 23:58:45,157 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.90 vs. limit=15.0 +2024-07-28 23:58:46,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=215232.0, ans=0.125 +2024-07-28 23:58:47,250 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.58 vs. limit=6.0 +2024-07-28 23:58:48,139 INFO [train.py:1114] (3/4) Epoch 16, batch 8100, loss[loss=0.1822, simple_loss=0.2891, pruned_loss=0.03766, over 4803.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2701, pruned_loss=0.04439, over 933460.94 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:58:48,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=215245.33333333334, ans=0.125 +2024-07-28 23:58:52,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=215245.33333333334, ans=0.2 +2024-07-28 23:58:55,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=215258.66666666666, ans=0.125 +2024-07-28 23:59:07,003 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=8.27 vs. limit=15.0 +2024-07-28 23:59:16,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=215298.66666666666, ans=0.0 +2024-07-28 23:59:21,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=215298.66666666666, ans=0.2 +2024-07-28 23:59:22,315 INFO [train.py:1114] (3/4) Epoch 16, batch 8150, loss[loss=0.2066, simple_loss=0.29, pruned_loss=0.06157, over 4800.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2697, pruned_loss=0.04446, over 936933.55 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:59:32,146 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.628e+01 5.614e+01 6.330e+01 7.419e+01 1.009e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-28 23:59:37,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=215338.66666666666, ans=0.025 +2024-07-28 23:59:38,578 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-28 23:59:45,286 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.89 vs. limit=15.0 +2024-07-28 23:59:54,897 INFO [train.py:1114] (3/4) Epoch 16, batch 8200, loss[loss=0.1667, simple_loss=0.2587, pruned_loss=0.03738, over 4806.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2688, pruned_loss=0.04379, over 937920.46 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 32.0 +2024-07-28 23:59:56,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=215378.66666666666, ans=0.0 +2024-07-29 00:00:10,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=215405.33333333334, ans=0.2 +2024-07-29 00:00:14,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=215418.66666666666, ans=0.125 +2024-07-29 00:00:15,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=215418.66666666666, ans=0.125 +2024-07-29 00:00:21,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=215432.0, ans=0.0 +2024-07-29 00:00:30,611 INFO [train.py:1114] (3/4) Epoch 16, batch 8250, loss[loss=0.1487, simple_loss=0.2493, pruned_loss=0.02409, over 4903.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2678, pruned_loss=0.04307, over 938433.47 frames. ], batch size: 13, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:00:31,623 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.64 vs. limit=15.0 +2024-07-29 00:00:44,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=215445.33333333334, ans=15.0 +2024-07-29 00:00:48,543 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.803e+01 5.664e+01 6.137e+01 6.796e+01 1.110e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 00:00:51,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=215458.66666666666, ans=0.125 +2024-07-29 00:00:55,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=215472.0, ans=0.1 +2024-07-29 00:01:12,207 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=215498.66666666666, ans=0.07 +2024-07-29 00:01:13,400 INFO [train.py:1114] (3/4) Epoch 16, batch 8300, loss[loss=0.1879, simple_loss=0.2902, pruned_loss=0.04285, over 4884.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2681, pruned_loss=0.04308, over 938329.42 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:01:49,156 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.36 vs. limit=15.0 +2024-07-29 00:02:42,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=215538.66666666666, ans=0.0 +2024-07-29 00:02:58,645 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.86 vs. limit=15.0 +2024-07-29 00:03:03,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=215552.0, ans=0.125 +2024-07-29 00:03:22,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=215565.33333333334, ans=0.0 +2024-07-29 00:03:22,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=215565.33333333334, ans=0.025 +2024-07-29 00:03:26,031 INFO [train.py:1114] (3/4) Epoch 16, batch 8350, loss[loss=0.2, simple_loss=0.2885, pruned_loss=0.05573, over 4812.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2689, pruned_loss=0.04347, over 941164.55 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:03:30,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=215578.66666666666, ans=0.125 +2024-07-29 00:03:31,970 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.01 vs. limit=22.5 +2024-07-29 00:03:32,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=215592.0, ans=0.125 +2024-07-29 00:03:39,855 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.661e+01 5.677e+01 6.151e+01 6.738e+01 9.364e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 00:03:39,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=215592.0, ans=0.0 +2024-07-29 00:03:43,485 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.57 vs. limit=15.0 +2024-07-29 00:03:57,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=215632.0, ans=0.0 +2024-07-29 00:04:04,285 INFO [train.py:1114] (3/4) Epoch 16, batch 8400, loss[loss=0.1774, simple_loss=0.2625, pruned_loss=0.04612, over 4769.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2684, pruned_loss=0.04334, over 939983.65 frames. ], batch size: 12, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:04:18,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=215672.0, ans=0.125 +2024-07-29 00:04:18,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=215672.0, ans=0.2 +2024-07-29 00:04:20,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=215672.0, ans=0.0 +2024-07-29 00:04:20,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=215672.0, ans=0.125 +2024-07-29 00:04:36,868 INFO [train.py:1114] (3/4) Epoch 16, batch 8450, loss[loss=0.1887, simple_loss=0.2768, pruned_loss=0.05032, over 4794.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2681, pruned_loss=0.04299, over 938918.00 frames. ], batch size: 15, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:04:42,245 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.54 vs. limit=15.0 +2024-07-29 00:04:46,409 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.807e+01 5.964e+01 6.863e+01 7.657e+01 1.232e+02, threshold=1.373e+02, percent-clipped=1.0 +2024-07-29 00:04:48,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=215725.33333333334, ans=0.125 +2024-07-29 00:04:49,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=215725.33333333334, ans=15.0 +2024-07-29 00:04:55,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=215752.0, ans=0.125 +2024-07-29 00:04:56,118 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.09 vs. limit=22.5 +2024-07-29 00:05:02,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=215765.33333333334, ans=0.125 +2024-07-29 00:05:04,025 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.54 vs. limit=15.0 +2024-07-29 00:05:09,550 INFO [train.py:1114] (3/4) Epoch 16, batch 8500, loss[loss=0.165, simple_loss=0.251, pruned_loss=0.0395, over 4623.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2682, pruned_loss=0.04285, over 938788.39 frames. ], batch size: 11, lr: 4.62e-03, grad_scale: 64.0 +2024-07-29 00:05:14,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=215778.66666666666, ans=0.0 +2024-07-29 00:05:22,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=215805.33333333334, ans=0.025 +2024-07-29 00:05:31,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=215818.66666666666, ans=0.125 +2024-07-29 00:05:33,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=215818.66666666666, ans=0.0 +2024-07-29 00:05:41,908 INFO [train.py:1114] (3/4) Epoch 16, batch 8550, loss[loss=0.1607, simple_loss=0.2449, pruned_loss=0.0383, over 4797.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2678, pruned_loss=0.04337, over 939398.92 frames. ], batch size: 11, lr: 4.61e-03, grad_scale: 64.0 +2024-07-29 00:05:48,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=215845.33333333334, ans=0.125 +2024-07-29 00:05:49,571 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=215858.66666666666, ans=0.0 +2024-07-29 00:05:52,590 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.843e+01 6.495e+01 7.573e+01 1.241e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-29 00:05:52,751 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=215858.66666666666, ans=0.125 +2024-07-29 00:05:53,542 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.05 vs. limit=15.0 +2024-07-29 00:05:54,201 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.79 vs. limit=22.5 +2024-07-29 00:05:55,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=215872.0, ans=0.1 +2024-07-29 00:05:55,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=215872.0, ans=0.2 +2024-07-29 00:06:01,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=215872.0, ans=0.04949747468305833 +2024-07-29 00:06:06,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=215885.33333333334, ans=0.0 +2024-07-29 00:06:16,112 INFO [train.py:1114] (3/4) Epoch 16, batch 8600, loss[loss=0.2087, simple_loss=0.2936, pruned_loss=0.06188, over 4808.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2681, pruned_loss=0.04364, over 939238.71 frames. ], batch size: 15, lr: 4.61e-03, grad_scale: 64.0 +2024-07-29 00:06:27,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=215912.0, ans=0.125 +2024-07-29 00:06:27,496 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.78 vs. limit=15.0 +2024-07-29 00:06:28,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.33 vs. limit=15.0 +2024-07-29 00:06:39,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=215938.66666666666, ans=0.07 +2024-07-29 00:06:40,373 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:06:52,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=215965.33333333334, ans=0.0 +2024-07-29 00:06:55,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=215965.33333333334, ans=0.125 +2024-07-29 00:06:57,226 INFO [train.py:1114] (3/4) Epoch 16, batch 8650, loss[loss=0.2077, simple_loss=0.3035, pruned_loss=0.05599, over 4907.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.267, pruned_loss=0.04307, over 940374.79 frames. ], batch size: 15, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:07:02,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=215978.66666666666, ans=0.125 +2024-07-29 00:07:06,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=215992.0, ans=0.125 +2024-07-29 00:07:08,158 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.461e+01 5.650e+01 6.263e+01 7.133e+01 1.178e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 00:07:09,290 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.13 vs. limit=15.0 +2024-07-29 00:07:25,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=216032.0, ans=0.125 +2024-07-29 00:07:30,201 INFO [train.py:1114] (3/4) Epoch 16, batch 8700, loss[loss=0.1709, simple_loss=0.2669, pruned_loss=0.03744, over 4758.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.269, pruned_loss=0.0438, over 937405.14 frames. ], batch size: 13, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:07:38,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=216058.66666666666, ans=0.025 +2024-07-29 00:07:41,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=216058.66666666666, ans=0.05 +2024-07-29 00:07:45,339 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.46 vs. limit=15.0 +2024-07-29 00:07:52,800 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.22 vs. limit=6.0 +2024-07-29 00:07:55,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=216085.33333333334, ans=0.125 +2024-07-29 00:08:01,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=216098.66666666666, ans=0.09899494936611666 +2024-07-29 00:08:06,547 INFO [train.py:1114] (3/4) Epoch 16, batch 8750, loss[loss=0.1841, simple_loss=0.2737, pruned_loss=0.04728, over 4684.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2681, pruned_loss=0.0437, over 936235.69 frames. ], batch size: 15, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:08:14,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=216125.33333333334, ans=0.125 +2024-07-29 00:08:16,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=216125.33333333334, ans=0.0 +2024-07-29 00:08:16,793 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.699e+01 5.631e+01 6.456e+01 7.086e+01 1.065e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-29 00:08:22,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=216138.66666666666, ans=0.125 +2024-07-29 00:08:24,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=216138.66666666666, ans=0.125 +2024-07-29 00:08:27,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=216152.0, ans=0.125 +2024-07-29 00:08:32,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=216152.0, ans=0.125 +2024-07-29 00:08:38,173 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.30 vs. limit=15.0 +2024-07-29 00:08:41,394 INFO [train.py:1114] (3/4) Epoch 16, batch 8800, loss[loss=0.1466, simple_loss=0.2501, pruned_loss=0.02157, over 4943.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2678, pruned_loss=0.0433, over 937038.73 frames. ], batch size: 14, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:08:42,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=216178.66666666666, ans=0.09899494936611666 +2024-07-29 00:08:44,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=216178.66666666666, ans=0.0 +2024-07-29 00:08:51,442 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.60 vs. limit=15.0 +2024-07-29 00:09:05,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=10.10 vs. limit=15.0 +2024-07-29 00:09:11,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=216232.0, ans=0.2 +2024-07-29 00:09:14,340 INFO [train.py:1114] (3/4) Epoch 16, batch 8850, loss[loss=0.1883, simple_loss=0.2824, pruned_loss=0.04709, over 4589.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2684, pruned_loss=0.04375, over 931691.45 frames. ], batch size: 21, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:09:14,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=216245.33333333334, ans=0.1 +2024-07-29 00:09:25,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=216258.66666666666, ans=0.125 +2024-07-29 00:09:25,916 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.646e+01 5.534e+01 6.492e+01 7.361e+01 1.003e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-29 00:09:31,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=216272.0, ans=0.0 +2024-07-29 00:09:34,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=216285.33333333334, ans=0.125 +2024-07-29 00:09:35,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=216285.33333333334, ans=0.0 +2024-07-29 00:09:38,067 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.03 vs. limit=15.0 +2024-07-29 00:09:40,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=216285.33333333334, ans=0.0 +2024-07-29 00:09:40,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=216285.33333333334, ans=0.125 +2024-07-29 00:09:41,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=216298.66666666666, ans=0.1 +2024-07-29 00:09:41,392 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.16 vs. limit=6.0 +2024-07-29 00:09:44,055 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.13 vs. limit=6.0 +2024-07-29 00:09:48,097 INFO [train.py:1114] (3/4) Epoch 16, batch 8900, loss[loss=0.1336, simple_loss=0.2175, pruned_loss=0.02486, over 4946.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2685, pruned_loss=0.04344, over 929686.67 frames. ], batch size: 12, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:09:49,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216312.0, ans=0.1 +2024-07-29 00:09:50,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=216312.0, ans=0.125 +2024-07-29 00:09:53,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=216312.0, ans=0.0 +2024-07-29 00:09:56,753 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.53 vs. limit=15.0 +2024-07-29 00:09:59,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=216325.33333333334, ans=0.125 +2024-07-29 00:10:10,431 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.20 vs. limit=22.5 +2024-07-29 00:10:14,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=216365.33333333334, ans=0.0 +2024-07-29 00:10:15,442 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:10:21,259 INFO [train.py:1114] (3/4) Epoch 16, batch 8950, loss[loss=0.2333, simple_loss=0.3315, pruned_loss=0.06752, over 4550.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2685, pruned_loss=0.04344, over 930446.72 frames. ], batch size: 21, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:10:27,325 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.39 vs. limit=15.0 +2024-07-29 00:10:29,955 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=28.21 vs. limit=15.0 +2024-07-29 00:10:31,360 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.323e+01 5.469e+01 6.023e+01 7.554e+01 1.113e+02, threshold=1.205e+02, percent-clipped=0.0 +2024-07-29 00:10:35,934 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:10:36,221 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.86 vs. limit=15.0 +2024-07-29 00:10:44,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=216418.66666666666, ans=0.0 +2024-07-29 00:10:53,217 INFO [train.py:1114] (3/4) Epoch 16, batch 9000, loss[loss=0.1515, simple_loss=0.2391, pruned_loss=0.0319, over 4637.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2671, pruned_loss=0.04338, over 933698.11 frames. ], batch size: 12, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:10:53,218 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 00:11:10,917 INFO [train.py:1146] (3/4) Epoch 16, validation: loss=0.1631, simple_loss=0.2656, pruned_loss=0.03028, over 944034.00 frames. +2024-07-29 00:11:10,918 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 00:11:22,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=216458.66666666666, ans=0.05 +2024-07-29 00:11:22,375 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.49 vs. limit=22.5 +2024-07-29 00:11:33,581 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216485.33333333334, ans=0.1 +2024-07-29 00:11:35,787 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.49 vs. limit=12.0 +2024-07-29 00:11:43,596 INFO [train.py:1114] (3/4) Epoch 16, batch 9050, loss[loss=0.1516, simple_loss=0.2419, pruned_loss=0.03066, over 4529.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2671, pruned_loss=0.04315, over 933924.73 frames. ], batch size: 10, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:11:45,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=216512.0, ans=0.0 +2024-07-29 00:11:52,531 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.45 vs. limit=22.5 +2024-07-29 00:11:54,113 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.634e+01 5.894e+01 6.647e+01 7.904e+01 1.086e+02, threshold=1.329e+02, percent-clipped=0.0 +2024-07-29 00:12:07,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=216552.0, ans=0.1 +2024-07-29 00:12:13,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=216565.33333333334, ans=0.125 +2024-07-29 00:12:13,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=216565.33333333334, ans=0.125 +2024-07-29 00:12:17,315 INFO [train.py:1114] (3/4) Epoch 16, batch 9100, loss[loss=0.1596, simple_loss=0.2538, pruned_loss=0.03267, over 4931.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2676, pruned_loss=0.04358, over 936645.40 frames. ], batch size: 14, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:12:22,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=216578.66666666666, ans=0.0 +2024-07-29 00:12:29,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.58 vs. limit=12.0 +2024-07-29 00:12:41,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=216618.66666666666, ans=0.125 +2024-07-29 00:12:47,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=216632.0, ans=0.125 +2024-07-29 00:12:49,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=216632.0, ans=0.2 +2024-07-29 00:12:51,515 INFO [train.py:1114] (3/4) Epoch 16, batch 9150, loss[loss=0.2241, simple_loss=0.3087, pruned_loss=0.06968, over 4824.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2679, pruned_loss=0.04353, over 935657.69 frames. ], batch size: 14, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:13:01,748 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.263e+01 5.764e+01 6.440e+01 7.377e+01 1.090e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 00:13:05,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.44 vs. limit=15.0 +2024-07-29 00:13:10,371 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.98 vs. limit=10.0 +2024-07-29 00:13:12,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=216685.33333333334, ans=0.125 +2024-07-29 00:13:13,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=216685.33333333334, ans=0.0 +2024-07-29 00:13:17,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=216698.66666666666, ans=0.0 +2024-07-29 00:13:20,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=216698.66666666666, ans=0.2 +2024-07-29 00:13:23,167 INFO [train.py:1114] (3/4) Epoch 16, batch 9200, loss[loss=0.1678, simple_loss=0.2544, pruned_loss=0.04059, over 4846.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2681, pruned_loss=0.04383, over 937644.68 frames. ], batch size: 12, lr: 4.61e-03, grad_scale: 32.0 +2024-07-29 00:13:28,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216712.0, ans=0.1 +2024-07-29 00:13:33,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=216725.33333333334, ans=0.125 +2024-07-29 00:13:35,604 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=21.80 vs. limit=15.0 +2024-07-29 00:13:46,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=216738.66666666666, ans=0.0 +2024-07-29 00:13:46,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.37 vs. limit=15.0 +2024-07-29 00:13:50,180 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:13:59,487 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=15.0 +2024-07-29 00:14:01,551 INFO [train.py:1114] (3/4) Epoch 16, batch 9250, loss[loss=0.1835, simple_loss=0.281, pruned_loss=0.04302, over 4632.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2676, pruned_loss=0.0435, over 938418.34 frames. ], batch size: 13, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:14:11,592 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.669e+01 5.549e+01 6.033e+01 6.747e+01 9.644e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-29 00:14:32,955 INFO [train.py:1114] (3/4) Epoch 16, batch 9300, loss[loss=0.1835, simple_loss=0.2761, pruned_loss=0.04548, over 4784.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2676, pruned_loss=0.04351, over 938158.00 frames. ], batch size: 12, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:14:35,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=216845.33333333334, ans=0.125 +2024-07-29 00:14:45,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=216872.0, ans=0.2 +2024-07-29 00:14:46,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=216872.0, ans=0.1 +2024-07-29 00:14:51,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=216885.33333333334, ans=0.2 +2024-07-29 00:14:54,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=216885.33333333334, ans=0.07 +2024-07-29 00:14:58,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=216898.66666666666, ans=0.125 +2024-07-29 00:15:04,407 INFO [train.py:1114] (3/4) Epoch 16, batch 9350, loss[loss=0.141, simple_loss=0.2252, pruned_loss=0.02842, over 4805.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2669, pruned_loss=0.04311, over 935296.56 frames. ], batch size: 11, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:15:07,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=216912.0, ans=0.125 +2024-07-29 00:15:10,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=216925.33333333334, ans=0.0 +2024-07-29 00:15:10,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=216925.33333333334, ans=0.0 +2024-07-29 00:15:14,952 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.736e+01 6.318e+01 7.656e+01 1.489e+02, threshold=1.264e+02, percent-clipped=1.0 +2024-07-29 00:15:18,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=216938.66666666666, ans=0.125 +2024-07-29 00:15:21,036 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.88 vs. limit=15.0 +2024-07-29 00:15:34,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=216965.33333333334, ans=0.0 +2024-07-29 00:15:35,904 INFO [train.py:1114] (3/4) Epoch 16, batch 9400, loss[loss=0.1703, simple_loss=0.2585, pruned_loss=0.04111, over 4707.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2672, pruned_loss=0.04317, over 933020.16 frames. ], batch size: 13, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:15:35,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=216978.66666666666, ans=0.0 +2024-07-29 00:15:42,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=216992.0, ans=0.0 +2024-07-29 00:15:43,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=216992.0, ans=0.0 +2024-07-29 00:15:50,332 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=11.18 vs. limit=15.0 +2024-07-29 00:15:52,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=217005.33333333334, ans=0.125 +2024-07-29 00:15:55,646 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.56 vs. limit=15.0 +2024-07-29 00:16:05,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=217032.0, ans=0.025 +2024-07-29 00:16:08,013 INFO [train.py:1114] (3/4) Epoch 16, batch 9450, loss[loss=0.1421, simple_loss=0.2255, pruned_loss=0.02929, over 4809.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2683, pruned_loss=0.04418, over 932280.95 frames. ], batch size: 11, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:16:09,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=217045.33333333334, ans=0.125 +2024-07-29 00:16:13,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=217045.33333333334, ans=10.0 +2024-07-29 00:16:14,320 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.30 vs. limit=15.0 +2024-07-29 00:16:17,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=217045.33333333334, ans=0.125 +2024-07-29 00:16:19,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=217058.66666666666, ans=0.07 +2024-07-29 00:16:22,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=217058.66666666666, ans=0.125 +2024-07-29 00:16:25,645 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.291e+01 5.921e+01 6.735e+01 1.029e+02, threshold=1.184e+02, percent-clipped=0.0 +2024-07-29 00:16:36,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=217085.33333333334, ans=0.125 +2024-07-29 00:16:40,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=217098.66666666666, ans=0.0 +2024-07-29 00:16:42,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=217098.66666666666, ans=0.025 +2024-07-29 00:16:46,845 INFO [train.py:1114] (3/4) Epoch 16, batch 9500, loss[loss=0.1553, simple_loss=0.2543, pruned_loss=0.02815, over 4697.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2676, pruned_loss=0.04383, over 934280.63 frames. ], batch size: 12, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:17:14,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=217165.33333333334, ans=0.125 +2024-07-29 00:17:18,860 INFO [train.py:1114] (3/4) Epoch 16, batch 9550, loss[loss=0.149, simple_loss=0.2336, pruned_loss=0.03223, over 4773.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2675, pruned_loss=0.04377, over 931625.44 frames. ], batch size: 12, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:17:28,560 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.794e+01 5.662e+01 6.269e+01 6.816e+01 8.303e+01, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 00:17:29,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=217192.0, ans=10.0 +2024-07-29 00:17:35,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=217205.33333333334, ans=0.125 +2024-07-29 00:17:39,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=217218.66666666666, ans=0.125 +2024-07-29 00:17:48,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=217232.0, ans=0.015 +2024-07-29 00:17:50,237 INFO [train.py:1114] (3/4) Epoch 16, batch 9600, loss[loss=0.2809, simple_loss=0.3452, pruned_loss=0.1083, over 3534.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2679, pruned_loss=0.04397, over 930458.86 frames. ], batch size: 35, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:18:02,726 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=4.09 vs. limit=15.0 +2024-07-29 00:18:07,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=217272.0, ans=0.125 +2024-07-29 00:18:16,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=217298.66666666666, ans=0.09899494936611666 +2024-07-29 00:18:22,114 INFO [train.py:1114] (3/4) Epoch 16, batch 9650, loss[loss=0.2052, simple_loss=0.3004, pruned_loss=0.05502, over 4834.00 frames. ], tot_loss[loss=0.179, simple_loss=0.269, pruned_loss=0.04447, over 925906.33 frames. ], batch size: 16, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:18:32,294 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.780e+01 5.687e+01 6.553e+01 7.550e+01 1.146e+02, threshold=1.311e+02, percent-clipped=0.0 +2024-07-29 00:18:43,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=217352.0, ans=0.125 +2024-07-29 00:18:48,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=217365.33333333334, ans=0.0 +2024-07-29 00:18:53,904 INFO [train.py:1114] (3/4) Epoch 16, batch 9700, loss[loss=0.1856, simple_loss=0.2926, pruned_loss=0.0393, over 4304.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2691, pruned_loss=0.04442, over 924471.94 frames. ], batch size: 25, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:18:55,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=217378.66666666666, ans=0.125 +2024-07-29 00:18:57,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=217378.66666666666, ans=0.2 +2024-07-29 00:18:59,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=217392.0, ans=0.125 +2024-07-29 00:19:06,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=217405.33333333334, ans=0.125 +2024-07-29 00:19:18,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=217418.66666666666, ans=0.0 +2024-07-29 00:19:18,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=217418.66666666666, ans=0.125 +2024-07-29 00:19:19,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=217432.0, ans=0.0 +2024-07-29 00:19:20,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=217432.0, ans=0.07 +2024-07-29 00:19:25,695 INFO [train.py:1114] (3/4) Epoch 16, batch 9750, loss[loss=0.2098, simple_loss=0.2943, pruned_loss=0.06265, over 4678.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2691, pruned_loss=0.0444, over 924708.83 frames. ], batch size: 15, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:19:36,075 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.471e+01 5.634e+01 6.378e+01 7.099e+01 1.078e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 00:19:38,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=217472.0, ans=0.2 +2024-07-29 00:19:41,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=217472.0, ans=0.0 +2024-07-29 00:19:46,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=217485.33333333334, ans=0.125 +2024-07-29 00:19:54,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=217498.66666666666, ans=0.2 +2024-07-29 00:19:57,518 INFO [train.py:1114] (3/4) Epoch 16, batch 9800, loss[loss=0.1782, simple_loss=0.2709, pruned_loss=0.04276, over 4709.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.269, pruned_loss=0.04481, over 924446.21 frames. ], batch size: 12, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:20:00,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.65 vs. limit=12.0 +2024-07-29 00:20:15,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=217525.33333333334, ans=0.2 +2024-07-29 00:20:20,925 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.80 vs. limit=15.0 +2024-07-29 00:20:25,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217552.0, ans=0.1 +2024-07-29 00:20:34,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=217565.33333333334, ans=0.125 +2024-07-29 00:20:36,550 INFO [train.py:1114] (3/4) Epoch 16, batch 9850, loss[loss=0.1777, simple_loss=0.2715, pruned_loss=0.04191, over 4903.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2695, pruned_loss=0.04486, over 926892.30 frames. ], batch size: 15, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:20:37,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=217578.66666666666, ans=0.0 +2024-07-29 00:20:39,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=217578.66666666666, ans=0.0 +2024-07-29 00:20:47,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217592.0, ans=0.1 +2024-07-29 00:20:50,314 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.242e+01 5.769e+01 6.344e+01 7.479e+01 1.066e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-29 00:21:00,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=217618.66666666666, ans=0.0 +2024-07-29 00:21:04,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=217618.66666666666, ans=0.2 +2024-07-29 00:21:11,732 INFO [train.py:1114] (3/4) Epoch 16, batch 9900, loss[loss=0.1776, simple_loss=0.2678, pruned_loss=0.04365, over 4834.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2698, pruned_loss=0.04492, over 926574.68 frames. ], batch size: 16, lr: 4.60e-03, grad_scale: 32.0 +2024-07-29 00:21:29,853 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.43 vs. limit=15.0 +2024-07-29 00:21:42,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=217712.0, ans=0.125 +2024-07-29 00:21:42,965 INFO [train.py:1114] (3/4) Epoch 16, batch 9950, loss[loss=0.1972, simple_loss=0.2728, pruned_loss=0.06079, over 4507.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2704, pruned_loss=0.04528, over 928924.25 frames. ], batch size: 10, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:21:44,516 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.01 vs. limit=10.0 +2024-07-29 00:21:56,427 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.282e+01 5.930e+01 6.462e+01 7.578e+01 1.307e+02, threshold=1.292e+02, percent-clipped=1.0 +2024-07-29 00:22:05,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=217752.0, ans=0.07 +2024-07-29 00:22:17,191 INFO [train.py:1114] (3/4) Epoch 16, batch 10000, loss[loss=0.2269, simple_loss=0.3004, pruned_loss=0.07668, over 4679.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2746, pruned_loss=0.04718, over 926727.80 frames. ], batch size: 16, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:22:17,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=217778.66666666666, ans=0.125 +2024-07-29 00:22:24,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=217792.0, ans=0.125 +2024-07-29 00:22:26,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=217792.0, ans=0.125 +2024-07-29 00:22:29,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=217805.33333333334, ans=0.125 +2024-07-29 00:22:32,134 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.68 vs. limit=15.0 +2024-07-29 00:22:36,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=217818.66666666666, ans=0.2 +2024-07-29 00:22:36,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=217818.66666666666, ans=0.0 +2024-07-29 00:22:46,706 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.36 vs. limit=15.0 +2024-07-29 00:22:49,160 INFO [train.py:1114] (3/4) Epoch 16, batch 10050, loss[loss=0.1955, simple_loss=0.2705, pruned_loss=0.06032, over 2964.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2783, pruned_loss=0.04937, over 915199.69 frames. ], batch size: 36, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:22:49,564 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=9.53 vs. limit=12.0 +2024-07-29 00:22:53,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.48 vs. limit=15.0 +2024-07-29 00:23:01,096 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.821e+01 5.872e+01 6.658e+01 7.418e+01 1.272e+02, threshold=1.332e+02, percent-clipped=0.0 +2024-07-29 00:23:02,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=217858.66666666666, ans=0.0 +2024-07-29 00:23:08,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=217872.0, ans=0.125 +2024-07-29 00:23:13,919 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=217885.33333333334, ans=0.1 +2024-07-29 00:23:14,215 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=9.32 vs. limit=15.0 +2024-07-29 00:23:16,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=217898.66666666666, ans=0.07 +2024-07-29 00:23:18,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=217898.66666666666, ans=0.125 +2024-07-29 00:23:23,187 INFO [train.py:1114] (3/4) Epoch 16, batch 10100, loss[loss=0.224, simple_loss=0.3068, pruned_loss=0.07061, over 3420.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2829, pruned_loss=0.05399, over 863319.15 frames. ], batch size: 35, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:23:23,471 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=15.87 vs. limit=15.0 +2024-07-29 00:23:23,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=217912.0, ans=0.07 +2024-07-29 00:23:26,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=217912.0, ans=0.0 +2024-07-29 00:23:26,894 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.88 vs. limit=15.0 +2024-07-29 00:23:27,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=217912.0, ans=0.0 +2024-07-29 00:23:32,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=217925.33333333334, ans=0.125 +2024-07-29 00:23:32,715 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.62 vs. limit=15.0 +2024-07-29 00:23:48,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=217965.33333333334, ans=0.125 +2024-07-29 00:23:49,975 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=10.64 vs. limit=12.0 +2024-07-29 00:23:53,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217965.33333333334, ans=0.1 +2024-07-29 00:23:54,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=217965.33333333334, ans=0.1 +2024-07-29 00:23:55,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=217978.66666666666, ans=0.025 +2024-07-29 00:23:56,123 INFO [train.py:1114] (3/4) Epoch 16, batch 10150, loss[loss=0.1985, simple_loss=0.2736, pruned_loss=0.06167, over 3402.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2858, pruned_loss=0.05727, over 822718.68 frames. ], batch size: 35, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:23:57,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=217978.66666666666, ans=0.2 +2024-07-29 00:24:02,903 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=9.56 vs. limit=12.0 +2024-07-29 00:24:06,950 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.947e+01 6.782e+01 7.198e+01 7.904e+01 2.355e+02, threshold=1.440e+02, percent-clipped=1.0 +2024-07-29 00:24:07,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=217992.0, ans=0.125 +2024-07-29 00:24:10,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=218005.33333333334, ans=0.1 +2024-07-29 00:24:13,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=218005.33333333334, ans=0.05 +2024-07-29 00:24:15,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=218018.66666666666, ans=0.09899494936611666 +2024-07-29 00:24:21,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218032.0, ans=0.1 +2024-07-29 00:24:21,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=218032.0, ans=0.0 +2024-07-29 00:24:27,768 INFO [train.py:1114] (3/4) Epoch 16, batch 10200, loss[loss=0.2652, simple_loss=0.3196, pruned_loss=0.1054, over 3207.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2882, pruned_loss=0.06032, over 788476.97 frames. ], batch size: 35, lr: 4.59e-03, grad_scale: 32.0 +2024-07-29 00:24:32,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=218045.33333333334, ans=0.125 +2024-07-29 00:24:34,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=10.07 vs. limit=15.0 +2024-07-29 00:24:35,355 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.03 vs. limit=22.5 +2024-07-29 00:24:38,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218058.66666666666, ans=0.1 +2024-07-29 00:24:38,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=218058.66666666666, ans=0.0 +2024-07-29 00:25:24,911 INFO [train.py:1114] (3/4) Epoch 17, batch 0, loss[loss=0.1359, simple_loss=0.2227, pruned_loss=0.02455, over 4854.00 frames. ], tot_loss[loss=0.1359, simple_loss=0.2227, pruned_loss=0.02455, over 4854.00 frames. ], batch size: 12, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:25:24,912 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 00:25:30,263 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.9412, 3.5098, 3.4603, 3.7044], device='cuda:3') +2024-07-29 00:25:36,964 INFO [train.py:1146] (3/4) Epoch 17, validation: loss=0.1632, simple_loss=0.2676, pruned_loss=0.0294, over 944034.00 frames. +2024-07-29 00:25:36,965 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 00:25:50,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=218101.33333333334, ans=0.125 +2024-07-29 00:25:50,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=218101.33333333334, ans=0.125 +2024-07-29 00:25:52,869 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.94 vs. limit=15.0 +2024-07-29 00:26:15,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=218114.66666666666, ans=0.0 +2024-07-29 00:26:23,654 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.877e+01 5.974e+01 6.557e+01 7.210e+01 8.434e+01, threshold=1.311e+02, percent-clipped=0.0 +2024-07-29 00:26:27,166 INFO [train.py:1114] (3/4) Epoch 17, batch 50, loss[loss=0.166, simple_loss=0.2524, pruned_loss=0.03974, over 4618.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2672, pruned_loss=0.04253, over 206445.19 frames. ], batch size: 11, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:26:36,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=218154.66666666666, ans=0.0 +2024-07-29 00:26:49,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218181.33333333334, ans=0.1 +2024-07-29 00:26:54,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218194.66666666666, ans=0.1 +2024-07-29 00:27:00,338 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.56 vs. limit=15.0 +2024-07-29 00:27:00,756 INFO [train.py:1114] (3/4) Epoch 17, batch 100, loss[loss=0.1785, simple_loss=0.2613, pruned_loss=0.04783, over 4638.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2719, pruned_loss=0.04451, over 365323.84 frames. ], batch size: 12, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:27:05,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=218208.0, ans=0.09899494936611666 +2024-07-29 00:27:18,119 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.69 vs. limit=22.5 +2024-07-29 00:27:29,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=218261.33333333334, ans=0.0 +2024-07-29 00:27:30,361 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.772e+01 6.593e+01 7.419e+01 9.701e+01, threshold=1.319e+02, percent-clipped=0.0 +2024-07-29 00:27:33,593 INFO [train.py:1114] (3/4) Epoch 17, batch 150, loss[loss=0.1226, simple_loss=0.2098, pruned_loss=0.01766, over 4616.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2689, pruned_loss=0.04281, over 493941.41 frames. ], batch size: 11, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:27:34,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=218274.66666666666, ans=0.125 +2024-07-29 00:27:53,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=218314.66666666666, ans=0.025 +2024-07-29 00:27:55,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=218314.66666666666, ans=0.0 +2024-07-29 00:27:57,317 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.66 vs. limit=15.0 +2024-07-29 00:27:58,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff3.min_abs, batch_count=218314.66666666666, ans=0.2 +2024-07-29 00:28:03,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=218328.0, ans=0.0 +2024-07-29 00:28:04,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=218328.0, ans=0.0 +2024-07-29 00:28:05,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218328.0, ans=0.1 +2024-07-29 00:28:06,813 INFO [train.py:1114] (3/4) Epoch 17, batch 200, loss[loss=0.1805, simple_loss=0.2803, pruned_loss=0.04036, over 4524.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2684, pruned_loss=0.04329, over 593620.73 frames. ], batch size: 21, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:28:09,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=218341.33333333334, ans=0.125 +2024-07-29 00:28:10,484 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.91 vs. limit=15.0 +2024-07-29 00:28:10,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=218341.33333333334, ans=0.2 +2024-07-29 00:28:12,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=218341.33333333334, ans=0.05 +2024-07-29 00:28:12,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=218354.66666666666, ans=0.2 +2024-07-29 00:28:16,015 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=218354.66666666666, ans=0.2 +2024-07-29 00:28:18,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218354.66666666666, ans=0.1 +2024-07-29 00:28:18,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=218354.66666666666, ans=0.125 +2024-07-29 00:28:24,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=218368.0, ans=0.125 +2024-07-29 00:28:34,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=218394.66666666666, ans=0.125 +2024-07-29 00:28:36,801 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.366e+01 5.659e+01 6.456e+01 7.215e+01 1.150e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-29 00:28:38,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=218394.66666666666, ans=0.125 +2024-07-29 00:28:40,260 INFO [train.py:1114] (3/4) Epoch 17, batch 250, loss[loss=0.2001, simple_loss=0.2939, pruned_loss=0.05315, over 4633.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2683, pruned_loss=0.04371, over 670335.01 frames. ], batch size: 16, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:28:47,244 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:28:49,858 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:29:01,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=218448.0, ans=0.0 +2024-07-29 00:29:01,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=218448.0, ans=0.125 +2024-07-29 00:29:10,280 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=9.75 vs. limit=15.0 +2024-07-29 00:29:15,284 INFO [train.py:1114] (3/4) Epoch 17, batch 300, loss[loss=0.2103, simple_loss=0.3043, pruned_loss=0.05816, over 4808.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.268, pruned_loss=0.04388, over 730072.39 frames. ], batch size: 15, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:29:21,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=218474.66666666666, ans=0.0 +2024-07-29 00:29:25,309 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.87 vs. limit=15.0 +2024-07-29 00:29:47,007 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+01 5.418e+01 5.933e+01 6.484e+01 8.977e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-29 00:29:50,307 INFO [train.py:1114] (3/4) Epoch 17, batch 350, loss[loss=0.1611, simple_loss=0.2417, pruned_loss=0.04027, over 4935.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2682, pruned_loss=0.04392, over 776200.67 frames. ], batch size: 12, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:29:50,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=218541.33333333334, ans=0.125 +2024-07-29 00:29:55,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=218541.33333333334, ans=0.125 +2024-07-29 00:30:00,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=218554.66666666666, ans=0.1 +2024-07-29 00:30:04,695 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.08 vs. limit=12.0 +2024-07-29 00:30:15,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=218581.33333333334, ans=0.0 +2024-07-29 00:30:15,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218581.33333333334, ans=0.1 +2024-07-29 00:30:19,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_positive, batch_count=218594.66666666666, ans=0.05 +2024-07-29 00:30:19,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=218594.66666666666, ans=0.125 +2024-07-29 00:30:23,660 INFO [train.py:1114] (3/4) Epoch 17, batch 400, loss[loss=0.1849, simple_loss=0.274, pruned_loss=0.04791, over 4689.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2683, pruned_loss=0.04388, over 813403.02 frames. ], batch size: 13, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:30:24,053 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.83 vs. limit=22.5 +2024-07-29 00:30:24,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=218608.0, ans=0.2 +2024-07-29 00:30:25,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=218608.0, ans=0.0 +2024-07-29 00:30:27,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=218608.0, ans=0.125 +2024-07-29 00:30:28,800 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.93 vs. limit=15.0 +2024-07-29 00:30:37,458 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.56 vs. limit=15.0 +2024-07-29 00:30:37,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=218621.33333333334, ans=0.2 +2024-07-29 00:30:46,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=218648.0, ans=0.125 +2024-07-29 00:30:50,385 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.13 vs. limit=15.0 +2024-07-29 00:30:58,241 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.369e+01 5.560e+01 5.995e+01 6.560e+01 9.746e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-29 00:30:58,610 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.81 vs. limit=10.0 +2024-07-29 00:31:00,781 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.56 vs. limit=15.0 +2024-07-29 00:31:01,603 INFO [train.py:1114] (3/4) Epoch 17, batch 450, loss[loss=0.1896, simple_loss=0.2804, pruned_loss=0.04945, over 4636.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2683, pruned_loss=0.04367, over 839495.69 frames. ], batch size: 13, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:31:01,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=218674.66666666666, ans=0.1 +2024-07-29 00:31:02,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218674.66666666666, ans=0.1 +2024-07-29 00:31:09,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=218688.0, ans=0.0 +2024-07-29 00:31:11,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.04 vs. limit=15.0 +2024-07-29 00:31:29,287 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=7.80 vs. limit=15.0 +2024-07-29 00:31:30,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=218728.0, ans=0.125 +2024-07-29 00:31:34,903 INFO [train.py:1114] (3/4) Epoch 17, batch 500, loss[loss=0.1949, simple_loss=0.2797, pruned_loss=0.05504, over 4674.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2665, pruned_loss=0.04277, over 861971.57 frames. ], batch size: 15, lr: 4.45e-03, grad_scale: 32.0 +2024-07-29 00:31:37,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=218741.33333333334, ans=0.2 +2024-07-29 00:31:37,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=218741.33333333334, ans=0.125 +2024-07-29 00:31:40,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=218741.33333333334, ans=0.025 +2024-07-29 00:31:52,307 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=218768.0, ans=0.125 +2024-07-29 00:32:01,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=218781.33333333334, ans=0.125 +2024-07-29 00:32:04,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=218781.33333333334, ans=0.125 +2024-07-29 00:32:07,879 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=218794.66666666666, ans=0.09899494936611666 +2024-07-29 00:32:09,026 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.474e+01 5.408e+01 6.097e+01 6.893e+01 9.871e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 00:32:12,366 INFO [train.py:1114] (3/4) Epoch 17, batch 550, loss[loss=0.2108, simple_loss=0.3068, pruned_loss=0.0574, over 4633.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2662, pruned_loss=0.04255, over 877988.28 frames. ], batch size: 17, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:32:12,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218808.0, ans=0.1 +2024-07-29 00:32:18,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218821.33333333334, ans=0.1 +2024-07-29 00:32:28,176 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:32:33,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.70 vs. limit=10.0 +2024-07-29 00:32:48,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=218861.33333333334, ans=0.1 +2024-07-29 00:32:50,134 INFO [train.py:1114] (3/4) Epoch 17, batch 600, loss[loss=0.1933, simple_loss=0.2917, pruned_loss=0.04745, over 4615.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2666, pruned_loss=0.04257, over 892134.18 frames. ], batch size: 16, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:32:57,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=218874.66666666666, ans=0.0 +2024-07-29 00:33:03,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=218888.0, ans=0.025 +2024-07-29 00:33:27,196 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:33:27,705 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.556e+01 5.574e+01 6.190e+01 7.231e+01 1.147e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 00:33:29,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=218928.0, ans=0.125 +2024-07-29 00:33:31,218 INFO [train.py:1114] (3/4) Epoch 17, batch 650, loss[loss=0.1864, simple_loss=0.2766, pruned_loss=0.04805, over 4761.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2666, pruned_loss=0.0428, over 903842.24 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:33:37,589 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.32 vs. limit=15.0 +2024-07-29 00:33:40,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=218954.66666666666, ans=0.125 +2024-07-29 00:33:42,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=218954.66666666666, ans=0.1 +2024-07-29 00:33:54,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=218981.33333333334, ans=0.125 +2024-07-29 00:34:01,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=218994.66666666666, ans=0.0 +2024-07-29 00:34:05,192 INFO [train.py:1114] (3/4) Epoch 17, batch 700, loss[loss=0.1685, simple_loss=0.2682, pruned_loss=0.03436, over 4639.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2676, pruned_loss=0.04295, over 911667.27 frames. ], batch size: 12, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:34:05,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=219008.0, ans=0.125 +2024-07-29 00:34:17,971 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.63 vs. limit=15.0 +2024-07-29 00:34:19,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=219034.66666666666, ans=0.0 +2024-07-29 00:34:21,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=219034.66666666666, ans=0.125 +2024-07-29 00:34:30,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=219048.0, ans=0.0 +2024-07-29 00:34:34,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=219048.0, ans=0.125 +2024-07-29 00:34:41,479 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.534e+01 5.477e+01 6.099e+01 6.897e+01 1.014e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 00:34:44,984 INFO [train.py:1114] (3/4) Epoch 17, batch 750, loss[loss=0.192, simple_loss=0.2836, pruned_loss=0.05019, over 4686.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2669, pruned_loss=0.04275, over 918236.97 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:34:45,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=219074.66666666666, ans=15.0 +2024-07-29 00:34:49,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=219074.66666666666, ans=0.125 +2024-07-29 00:34:52,196 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.38 vs. limit=15.0 +2024-07-29 00:35:09,358 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.45 vs. limit=10.0 +2024-07-29 00:35:13,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=219128.0, ans=0.0 +2024-07-29 00:35:14,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=219128.0, ans=10.0 +2024-07-29 00:35:15,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=219128.0, ans=0.125 +2024-07-29 00:35:17,741 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:35:21,500 INFO [train.py:1114] (3/4) Epoch 17, batch 800, loss[loss=0.1696, simple_loss=0.2522, pruned_loss=0.04357, over 4854.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2669, pruned_loss=0.04286, over 923401.29 frames. ], batch size: 12, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:35:28,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219154.66666666666, ans=0.1 +2024-07-29 00:35:31,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=219154.66666666666, ans=0.025 +2024-07-29 00:35:32,918 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:35:34,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=219168.0, ans=0.125 +2024-07-29 00:35:39,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=219168.0, ans=0.125 +2024-07-29 00:35:40,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=219168.0, ans=0.0 +2024-07-29 00:35:41,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=219181.33333333334, ans=0.035 +2024-07-29 00:35:52,215 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.327e+01 5.596e+01 6.013e+01 6.802e+01 9.397e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-29 00:35:53,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=219194.66666666666, ans=0.1 +2024-07-29 00:35:55,665 INFO [train.py:1114] (3/4) Epoch 17, batch 850, loss[loss=0.1521, simple_loss=0.2488, pruned_loss=0.02772, over 4660.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2668, pruned_loss=0.04265, over 927429.84 frames. ], batch size: 14, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:35:56,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=219208.0, ans=0.0 +2024-07-29 00:36:00,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219208.0, ans=0.1 +2024-07-29 00:36:06,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=219221.33333333334, ans=0.125 +2024-07-29 00:36:15,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=219248.0, ans=0.025 +2024-07-29 00:36:19,550 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.92 vs. limit=6.0 +2024-07-29 00:36:21,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=219248.0, ans=0.125 +2024-07-29 00:36:22,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=219261.33333333334, ans=0.0 +2024-07-29 00:36:25,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=219261.33333333334, ans=0.125 +2024-07-29 00:36:31,446 INFO [train.py:1114] (3/4) Epoch 17, batch 900, loss[loss=0.1547, simple_loss=0.2483, pruned_loss=0.03059, over 4860.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.267, pruned_loss=0.04288, over 929051.97 frames. ], batch size: 12, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:36:33,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=219274.66666666666, ans=0.125 +2024-07-29 00:36:39,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=219288.0, ans=0.0 +2024-07-29 00:36:40,762 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=219288.0, ans=0.0 +2024-07-29 00:36:46,099 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:36:50,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=219314.66666666666, ans=0.125 +2024-07-29 00:36:54,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=219314.66666666666, ans=0.2 +2024-07-29 00:36:57,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=219328.0, ans=0.025 +2024-07-29 00:36:58,290 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.65 vs. limit=15.0 +2024-07-29 00:36:59,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=219328.0, ans=0.0 +2024-07-29 00:37:01,136 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.414e+01 5.608e+01 6.144e+01 6.799e+01 1.059e+02, threshold=1.229e+02, percent-clipped=0.0 +2024-07-29 00:37:04,603 INFO [train.py:1114] (3/4) Epoch 17, batch 950, loss[loss=0.154, simple_loss=0.237, pruned_loss=0.03551, over 4776.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2665, pruned_loss=0.04266, over 930873.64 frames. ], batch size: 12, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:37:22,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=219368.0, ans=0.125 +2024-07-29 00:37:24,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=219381.33333333334, ans=0.125 +2024-07-29 00:37:33,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=219394.66666666666, ans=0.125 +2024-07-29 00:37:38,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=219394.66666666666, ans=0.125 +2024-07-29 00:37:41,931 INFO [train.py:1114] (3/4) Epoch 17, batch 1000, loss[loss=0.1766, simple_loss=0.2714, pruned_loss=0.04091, over 4965.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2671, pruned_loss=0.04289, over 930243.97 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:37:42,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=219408.0, ans=0.035 +2024-07-29 00:37:53,784 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=219421.33333333334, ans=0.0 +2024-07-29 00:37:55,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=219434.66666666666, ans=0.0 +2024-07-29 00:38:09,389 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.72 vs. limit=22.5 +2024-07-29 00:38:13,187 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.01 vs. limit=15.0 +2024-07-29 00:38:13,341 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.561e+01 5.640e+01 5.981e+01 6.813e+01 9.582e+01, threshold=1.196e+02, percent-clipped=0.0 +2024-07-29 00:38:15,240 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.43 vs. limit=22.5 +2024-07-29 00:38:15,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=219461.33333333334, ans=0.125 +2024-07-29 00:38:16,861 INFO [train.py:1114] (3/4) Epoch 17, batch 1050, loss[loss=0.185, simple_loss=0.2839, pruned_loss=0.04304, over 4873.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2659, pruned_loss=0.04259, over 932709.36 frames. ], batch size: 14, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:38:24,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=219474.66666666666, ans=0.125 +2024-07-29 00:38:33,841 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=219488.0, ans=0.0 +2024-07-29 00:38:41,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=219501.33333333334, ans=0.0 +2024-07-29 00:38:48,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=219514.66666666666, ans=0.125 +2024-07-29 00:38:57,328 INFO [train.py:1114] (3/4) Epoch 17, batch 1100, loss[loss=0.1912, simple_loss=0.2752, pruned_loss=0.05363, over 4906.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2659, pruned_loss=0.0426, over 935020.00 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:38:57,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=219541.33333333334, ans=0.0 +2024-07-29 00:39:00,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=219541.33333333334, ans=0.125 +2024-07-29 00:39:04,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=219554.66666666666, ans=0.025 +2024-07-29 00:39:11,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=219568.0, ans=0.0 +2024-07-29 00:39:12,084 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=219568.0, ans=0.125 +2024-07-29 00:39:16,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219581.33333333334, ans=0.1 +2024-07-29 00:39:17,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=219581.33333333334, ans=0.125 +2024-07-29 00:39:22,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=219581.33333333334, ans=0.125 +2024-07-29 00:39:24,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=219594.66666666666, ans=0.0 +2024-07-29 00:39:27,541 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.574e+01 5.915e+01 6.747e+01 1.337e+02, threshold=1.183e+02, percent-clipped=1.0 +2024-07-29 00:39:30,887 INFO [train.py:1114] (3/4) Epoch 17, batch 1150, loss[loss=0.1301, simple_loss=0.2184, pruned_loss=0.02092, over 4902.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2659, pruned_loss=0.04232, over 935067.00 frames. ], batch size: 13, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:39:38,544 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:39:39,962 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:39:40,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=219621.33333333334, ans=0.125 +2024-07-29 00:39:50,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219648.0, ans=0.1 +2024-07-29 00:40:03,189 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.08 vs. limit=15.0 +2024-07-29 00:40:04,855 INFO [train.py:1114] (3/4) Epoch 17, batch 1200, loss[loss=0.1792, simple_loss=0.2688, pruned_loss=0.04482, over 4875.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2671, pruned_loss=0.04258, over 934010.74 frames. ], batch size: 14, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:40:07,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219674.66666666666, ans=0.1 +2024-07-29 00:40:33,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=219728.0, ans=0.025 +2024-07-29 00:40:37,451 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.683e+01 5.543e+01 6.182e+01 6.957e+01 1.085e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-29 00:40:41,020 INFO [train.py:1114] (3/4) Epoch 17, batch 1250, loss[loss=0.1887, simple_loss=0.274, pruned_loss=0.0517, over 4792.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2681, pruned_loss=0.04246, over 937947.27 frames. ], batch size: 15, lr: 4.44e-03, grad_scale: 32.0 +2024-07-29 00:40:44,830 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.90 vs. limit=6.0 +2024-07-29 00:40:51,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=219754.66666666666, ans=0.0 +2024-07-29 00:40:57,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=219768.0, ans=0.0 +2024-07-29 00:40:58,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=219768.0, ans=0.0 +2024-07-29 00:41:17,215 INFO [train.py:1114] (3/4) Epoch 17, batch 1300, loss[loss=0.1912, simple_loss=0.28, pruned_loss=0.05115, over 4715.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2674, pruned_loss=0.04242, over 938910.51 frames. ], batch size: 19, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:41:17,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=219808.0, ans=0.0 +2024-07-29 00:41:19,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=219808.0, ans=0.0 +2024-07-29 00:41:21,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=219808.0, ans=0.1 +2024-07-29 00:41:24,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=219821.33333333334, ans=0.0 +2024-07-29 00:41:27,023 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.70 vs. limit=22.5 +2024-07-29 00:41:29,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=219821.33333333334, ans=0.125 +2024-07-29 00:41:37,550 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.00 vs. limit=15.0 +2024-07-29 00:41:44,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=219848.0, ans=0.125 +2024-07-29 00:41:52,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=219861.33333333334, ans=0.0 +2024-07-29 00:41:52,950 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.663e+01 5.796e+01 6.477e+01 7.611e+01 1.197e+02, threshold=1.295e+02, percent-clipped=0.0 +2024-07-29 00:41:56,417 INFO [train.py:1114] (3/4) Epoch 17, batch 1350, loss[loss=0.1692, simple_loss=0.2526, pruned_loss=0.04295, over 4759.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2668, pruned_loss=0.0422, over 941191.62 frames. ], batch size: 13, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:41:59,760 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.67 vs. limit=22.5 +2024-07-29 00:42:05,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=219888.0, ans=0.1 +2024-07-29 00:42:15,694 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.57 vs. limit=15.0 +2024-07-29 00:42:33,536 INFO [train.py:1114] (3/4) Epoch 17, batch 1400, loss[loss=0.1303, simple_loss=0.2187, pruned_loss=0.02097, over 4699.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2664, pruned_loss=0.04245, over 942997.04 frames. ], batch size: 11, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:42:34,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=219941.33333333334, ans=0.125 +2024-07-29 00:42:37,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=219941.33333333334, ans=0.125 +2024-07-29 00:42:40,037 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.03 vs. limit=15.0 +2024-07-29 00:42:55,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=219981.33333333334, ans=0.1 +2024-07-29 00:43:01,561 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=13.83 vs. limit=22.5 +2024-07-29 00:43:06,595 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.372e+01 5.575e+01 5.917e+01 6.621e+01 1.311e+02, threshold=1.183e+02, percent-clipped=1.0 +2024-07-29 00:43:10,159 INFO [train.py:1114] (3/4) Epoch 17, batch 1450, loss[loss=0.1707, simple_loss=0.2659, pruned_loss=0.03771, over 4683.00 frames. ], tot_loss[loss=0.175, simple_loss=0.266, pruned_loss=0.04206, over 942879.22 frames. ], batch size: 15, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:43:16,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=220008.0, ans=0.125 +2024-07-29 00:43:37,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=220048.0, ans=0.0 +2024-07-29 00:43:38,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=220048.0, ans=22.5 +2024-07-29 00:43:40,161 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.52 vs. limit=15.0 +2024-07-29 00:43:48,451 INFO [train.py:1114] (3/4) Epoch 17, batch 1500, loss[loss=0.1492, simple_loss=0.2477, pruned_loss=0.02538, over 4812.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2663, pruned_loss=0.04225, over 942725.16 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:43:48,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=220074.66666666666, ans=0.0 +2024-07-29 00:44:15,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=220128.0, ans=0.125 +2024-07-29 00:44:16,831 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.91 vs. limit=22.5 +2024-07-29 00:44:17,873 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=220128.0, ans=0.125 +2024-07-29 00:44:18,916 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.888e+01 5.771e+01 6.251e+01 6.983e+01 1.071e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-29 00:44:22,218 INFO [train.py:1114] (3/4) Epoch 17, batch 1550, loss[loss=0.2031, simple_loss=0.2879, pruned_loss=0.05915, over 4902.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2666, pruned_loss=0.04303, over 939053.36 frames. ], batch size: 15, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:44:22,525 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=14.56 vs. limit=22.5 +2024-07-29 00:44:26,389 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=220141.33333333334, ans=0.125 +2024-07-29 00:44:27,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=220141.33333333334, ans=0.125 +2024-07-29 00:44:33,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=220154.66666666666, ans=0.125 +2024-07-29 00:44:37,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=220168.0, ans=0.125 +2024-07-29 00:44:42,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=220181.33333333334, ans=0.025 +2024-07-29 00:44:42,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220181.33333333334, ans=0.1 +2024-07-29 00:44:44,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=220181.33333333334, ans=0.2 +2024-07-29 00:44:55,766 INFO [train.py:1114] (3/4) Epoch 17, batch 1600, loss[loss=0.1593, simple_loss=0.2651, pruned_loss=0.02678, over 4872.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.267, pruned_loss=0.04316, over 937674.68 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:44:56,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=220208.0, ans=0.0 +2024-07-29 00:44:57,478 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.86 vs. limit=22.5 +2024-07-29 00:44:59,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=220208.0, ans=0.09899494936611666 +2024-07-29 00:45:00,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=220208.0, ans=0.025 +2024-07-29 00:45:09,199 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.69 vs. limit=22.5 +2024-07-29 00:45:26,614 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.909e+01 5.495e+01 6.270e+01 6.960e+01 9.456e+01, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 00:45:28,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=220261.33333333334, ans=0.125 +2024-07-29 00:45:30,186 INFO [train.py:1114] (3/4) Epoch 17, batch 1650, loss[loss=0.1658, simple_loss=0.2562, pruned_loss=0.03774, over 4656.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2668, pruned_loss=0.04288, over 937405.50 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:45:37,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=220288.0, ans=0.2 +2024-07-29 00:45:45,037 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.78 vs. limit=15.0 +2024-07-29 00:45:46,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220301.33333333334, ans=0.1 +2024-07-29 00:45:55,065 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.21 vs. limit=22.5 +2024-07-29 00:46:00,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=220328.0, ans=0.1 +2024-07-29 00:46:04,532 INFO [train.py:1114] (3/4) Epoch 17, batch 1700, loss[loss=0.1489, simple_loss=0.2289, pruned_loss=0.03445, over 4702.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.267, pruned_loss=0.04315, over 938979.82 frames. ], batch size: 11, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:46:05,636 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.87 vs. limit=22.5 +2024-07-29 00:46:08,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=220341.33333333334, ans=0.125 +2024-07-29 00:46:10,784 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.46 vs. limit=15.0 +2024-07-29 00:46:23,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=220368.0, ans=0.125 +2024-07-29 00:46:28,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=220381.33333333334, ans=0.0 +2024-07-29 00:46:33,555 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=220394.66666666666, ans=0.125 +2024-07-29 00:46:36,628 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.642e+01 5.850e+01 6.496e+01 7.744e+01 1.150e+02, threshold=1.299e+02, percent-clipped=0.0 +2024-07-29 00:46:36,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=220394.66666666666, ans=0.125 +2024-07-29 00:46:37,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=220394.66666666666, ans=0.125 +2024-07-29 00:46:40,163 INFO [train.py:1114] (3/4) Epoch 17, batch 1750, loss[loss=0.1674, simple_loss=0.2557, pruned_loss=0.03952, over 4831.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2668, pruned_loss=0.04352, over 940113.68 frames. ], batch size: 11, lr: 4.43e-03, grad_scale: 32.0 +2024-07-29 00:46:40,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=220408.0, ans=0.0 +2024-07-29 00:46:42,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=220408.0, ans=0.125 +2024-07-29 00:46:52,427 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:46:56,631 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.82 vs. limit=10.0 +2024-07-29 00:46:57,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=220434.66666666666, ans=0.125 +2024-07-29 00:47:04,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=220448.0, ans=0.125 +2024-07-29 00:47:08,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=220461.33333333334, ans=0.125 +2024-07-29 00:47:13,450 INFO [train.py:1114] (3/4) Epoch 17, batch 1800, loss[loss=0.1797, simple_loss=0.2704, pruned_loss=0.04453, over 4639.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2673, pruned_loss=0.04359, over 940553.72 frames. ], batch size: 13, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:47:14,868 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:47:26,537 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.44 vs. limit=12.0 +2024-07-29 00:47:33,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten.whitening_limit, batch_count=220514.66666666666, ans=15.0 +2024-07-29 00:47:45,634 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.755e+01 5.725e+01 6.271e+01 7.257e+01 1.188e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 00:47:49,042 INFO [train.py:1114] (3/4) Epoch 17, batch 1850, loss[loss=0.211, simple_loss=0.2921, pruned_loss=0.06496, over 4812.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2676, pruned_loss=0.04372, over 940226.21 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:48:00,240 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=220554.66666666666, ans=0.0 +2024-07-29 00:48:01,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.87 vs. limit=22.5 +2024-07-29 00:48:16,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=220594.66666666666, ans=0.0 +2024-07-29 00:48:17,844 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:48:18,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=220594.66666666666, ans=0.125 +2024-07-29 00:48:23,129 INFO [train.py:1114] (3/4) Epoch 17, batch 1900, loss[loss=0.1676, simple_loss=0.2552, pruned_loss=0.04007, over 4661.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2677, pruned_loss=0.04389, over 941328.28 frames. ], batch size: 14, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:48:23,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=220608.0, ans=0.125 +2024-07-29 00:48:43,904 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.59 vs. limit=6.0 +2024-07-29 00:48:44,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=220648.0, ans=0.125 +2024-07-29 00:48:48,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=220648.0, ans=0.125 +2024-07-29 00:48:55,014 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.475e+01 5.883e+01 6.427e+01 8.062e+01 1.126e+02, threshold=1.285e+02, percent-clipped=0.0 +2024-07-29 00:49:13,498 INFO [train.py:1114] (3/4) Epoch 17, batch 1950, loss[loss=0.1756, simple_loss=0.2743, pruned_loss=0.03847, over 4890.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2681, pruned_loss=0.04353, over 943379.40 frames. ], batch size: 13, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:49:15,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=220674.66666666666, ans=0.0 +2024-07-29 00:49:22,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=220688.0, ans=0.125 +2024-07-29 00:49:48,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=220688.0, ans=0.125 +2024-07-29 00:49:58,840 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.94 vs. limit=15.0 +2024-07-29 00:50:06,023 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:50:28,633 INFO [train.py:1114] (3/4) Epoch 17, batch 2000, loss[loss=0.1516, simple_loss=0.2431, pruned_loss=0.03004, over 4810.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.268, pruned_loss=0.04356, over 940913.10 frames. ], batch size: 11, lr: 4.43e-03, grad_scale: 64.0 +2024-07-29 00:50:57,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer_ff3.min_abs, batch_count=220741.33333333334, ans=0.2 +2024-07-29 00:51:00,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220741.33333333334, ans=0.1 +2024-07-29 00:51:04,407 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.78 vs. limit=12.0 +2024-07-29 00:51:11,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=220768.0, ans=0.0 +2024-07-29 00:51:15,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=220768.0, ans=0.025 +2024-07-29 00:51:22,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=220781.33333333334, ans=0.2 +2024-07-29 00:51:22,726 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.50 vs. limit=15.0 +2024-07-29 00:51:32,746 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.500e+01 5.436e+01 5.997e+01 6.741e+01 1.066e+02, threshold=1.199e+02, percent-clipped=0.0 +2024-07-29 00:51:32,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=220794.66666666666, ans=0.0 +2024-07-29 00:51:36,127 INFO [train.py:1114] (3/4) Epoch 17, batch 2050, loss[loss=0.1381, simple_loss=0.2271, pruned_loss=0.02453, over 4610.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2668, pruned_loss=0.04328, over 938884.90 frames. ], batch size: 11, lr: 4.42e-03, grad_scale: 64.0 +2024-07-29 00:51:38,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=220808.0, ans=0.1 +2024-07-29 00:51:47,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=220821.33333333334, ans=0.1 +2024-07-29 00:52:00,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=220848.0, ans=0.2 +2024-07-29 00:52:02,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=220861.33333333334, ans=0.0 +2024-07-29 00:52:08,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=220861.33333333334, ans=0.125 +2024-07-29 00:52:17,010 INFO [train.py:1114] (3/4) Epoch 17, batch 2100, loss[loss=0.1958, simple_loss=0.2806, pruned_loss=0.05552, over 4764.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2669, pruned_loss=0.04347, over 941234.64 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:52:19,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=220874.66666666666, ans=0.2 +2024-07-29 00:52:36,697 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:52:41,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=220914.66666666666, ans=0.125 +2024-07-29 00:52:43,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=220928.0, ans=0.0 +2024-07-29 00:52:45,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=220928.0, ans=0.0 +2024-07-29 00:52:48,038 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.684e+01 5.567e+01 6.209e+01 7.288e+01 1.074e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-29 00:52:48,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=220928.0, ans=0.125 +2024-07-29 00:52:48,365 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.51 vs. limit=10.0 +2024-07-29 00:52:50,926 INFO [train.py:1114] (3/4) Epoch 17, batch 2150, loss[loss=0.1686, simple_loss=0.257, pruned_loss=0.04009, over 4888.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.266, pruned_loss=0.04291, over 944492.43 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:53:03,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=220954.66666666666, ans=0.025 +2024-07-29 00:53:24,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=220994.66666666666, ans=0.2 +2024-07-29 00:53:24,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=220994.66666666666, ans=0.07 +2024-07-29 00:53:26,710 INFO [train.py:1114] (3/4) Epoch 17, batch 2200, loss[loss=0.1989, simple_loss=0.2904, pruned_loss=0.05369, over 4813.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.266, pruned_loss=0.04266, over 943662.91 frames. ], batch size: 14, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:53:29,716 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.64 vs. limit=15.0 +2024-07-29 00:53:33,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221021.33333333334, ans=0.1 +2024-07-29 00:53:41,201 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.78 vs. limit=22.5 +2024-07-29 00:53:41,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=221034.66666666666, ans=0.125 +2024-07-29 00:53:57,772 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.391e+01 5.667e+01 6.562e+01 7.774e+01 1.023e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-29 00:54:00,466 INFO [train.py:1114] (3/4) Epoch 17, batch 2250, loss[loss=0.1852, simple_loss=0.271, pruned_loss=0.0497, over 4707.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.266, pruned_loss=0.04258, over 941978.44 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:54:01,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=221074.66666666666, ans=0.125 +2024-07-29 00:54:09,291 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=221088.0, ans=0.125 +2024-07-29 00:54:10,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=221088.0, ans=0.5 +2024-07-29 00:54:12,220 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.26 vs. limit=22.5 +2024-07-29 00:54:22,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=221114.66666666666, ans=0.0 +2024-07-29 00:54:30,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=221128.0, ans=0.0 +2024-07-29 00:54:31,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=221128.0, ans=0.0 +2024-07-29 00:54:33,732 INFO [train.py:1114] (3/4) Epoch 17, batch 2300, loss[loss=0.1443, simple_loss=0.2281, pruned_loss=0.03029, over 4949.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2642, pruned_loss=0.04194, over 939572.07 frames. ], batch size: 12, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:54:42,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=221154.66666666666, ans=0.0 +2024-07-29 00:54:50,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=221168.0, ans=0.0 +2024-07-29 00:54:57,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=221181.33333333334, ans=0.0 +2024-07-29 00:55:06,704 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.80 vs. limit=15.0 +2024-07-29 00:55:06,975 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+01 5.506e+01 6.021e+01 6.838e+01 1.144e+02, threshold=1.204e+02, percent-clipped=0.0 +2024-07-29 00:55:09,638 INFO [train.py:1114] (3/4) Epoch 17, batch 2350, loss[loss=0.1624, simple_loss=0.2608, pruned_loss=0.03202, over 4639.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2651, pruned_loss=0.04245, over 941557.83 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:55:11,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=221208.0, ans=0.025 +2024-07-29 00:55:12,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=221208.0, ans=0.125 +2024-07-29 00:55:13,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=221208.0, ans=0.125 +2024-07-29 00:55:22,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=221221.33333333334, ans=0.125 +2024-07-29 00:55:23,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221234.66666666666, ans=0.1 +2024-07-29 00:55:43,237 INFO [train.py:1114] (3/4) Epoch 17, batch 2400, loss[loss=0.168, simple_loss=0.2675, pruned_loss=0.0343, over 4648.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2653, pruned_loss=0.04225, over 941497.69 frames. ], batch size: 12, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:55:44,446 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=12.02 vs. limit=15.0 +2024-07-29 00:55:58,888 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.66 vs. limit=15.0 +2024-07-29 00:56:04,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=221314.66666666666, ans=0.125 +2024-07-29 00:56:07,065 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.77 vs. limit=15.0 +2024-07-29 00:56:07,126 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.61 vs. limit=12.0 +2024-07-29 00:56:11,398 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.60 vs. limit=15.0 +2024-07-29 00:56:17,995 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.747e+01 5.694e+01 6.302e+01 6.928e+01 9.959e+01, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 00:56:20,726 INFO [train.py:1114] (3/4) Epoch 17, batch 2450, loss[loss=0.16, simple_loss=0.2661, pruned_loss=0.02693, over 4702.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2662, pruned_loss=0.04285, over 936985.04 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:56:22,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=221341.33333333334, ans=0.1 +2024-07-29 00:56:25,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=221341.33333333334, ans=0.025 +2024-07-29 00:56:40,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=221381.33333333334, ans=0.05 +2024-07-29 00:56:42,361 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:56:49,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=221394.66666666666, ans=0.125 +2024-07-29 00:56:54,119 INFO [train.py:1114] (3/4) Epoch 17, batch 2500, loss[loss=0.186, simple_loss=0.281, pruned_loss=0.04548, over 4810.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2662, pruned_loss=0.04255, over 939067.97 frames. ], batch size: 14, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:56:56,892 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:57:07,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=221434.66666666666, ans=0.125 +2024-07-29 00:57:13,430 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=221448.0, ans=0.125 +2024-07-29 00:57:13,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=221448.0, ans=0.125 +2024-07-29 00:57:23,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=221461.33333333334, ans=0.0 +2024-07-29 00:57:24,996 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.527e+01 5.455e+01 6.019e+01 6.790e+01 9.676e+01, threshold=1.204e+02, percent-clipped=0.0 +2024-07-29 00:57:25,528 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.06 vs. limit=15.0 +2024-07-29 00:57:27,773 INFO [train.py:1114] (3/4) Epoch 17, batch 2550, loss[loss=0.1496, simple_loss=0.2371, pruned_loss=0.03104, over 4804.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2662, pruned_loss=0.04245, over 938792.18 frames. ], batch size: 11, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:57:29,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=221474.66666666666, ans=0.2 +2024-07-29 00:57:35,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221488.0, ans=0.1 +2024-07-29 00:57:44,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=221488.0, ans=0.0 +2024-07-29 00:57:47,654 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.49 vs. limit=5.0 +2024-07-29 00:58:07,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=221514.66666666666, ans=0.035 +2024-07-29 00:58:11,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=221514.66666666666, ans=0.0 +2024-07-29 00:58:22,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=221514.66666666666, ans=0.125 +2024-07-29 00:58:29,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=221541.33333333334, ans=0.125 +2024-07-29 00:58:30,272 INFO [train.py:1114] (3/4) Epoch 17, batch 2600, loss[loss=0.1667, simple_loss=0.2616, pruned_loss=0.03595, over 4892.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2665, pruned_loss=0.04302, over 937391.88 frames. ], batch size: 13, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:58:37,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=221554.66666666666, ans=0.1 +2024-07-29 00:58:46,544 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=221554.66666666666, ans=0.04949747468305833 +2024-07-29 00:59:06,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=221594.66666666666, ans=0.125 +2024-07-29 00:59:07,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=221594.66666666666, ans=0.0 +2024-07-29 00:59:08,387 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.679e+01 5.744e+01 6.230e+01 7.123e+01 1.037e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-29 00:59:11,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=221594.66666666666, ans=0.025 +2024-07-29 00:59:17,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=221608.0, ans=0.025 +2024-07-29 00:59:21,106 INFO [train.py:1114] (3/4) Epoch 17, batch 2650, loss[loss=0.2178, simple_loss=0.3124, pruned_loss=0.06164, over 4645.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2673, pruned_loss=0.04329, over 939452.46 frames. ], batch size: 16, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 00:59:26,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=221608.0, ans=0.0 +2024-07-29 00:59:29,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=221621.33333333334, ans=0.1 +2024-07-29 00:59:30,001 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=221621.33333333334, ans=0.025 +2024-07-29 00:59:40,879 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.02 vs. limit=22.5 +2024-07-29 00:59:52,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=221661.33333333334, ans=0.0 +2024-07-29 00:59:53,613 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 00:59:54,776 INFO [train.py:1114] (3/4) Epoch 17, batch 2700, loss[loss=0.2019, simple_loss=0.293, pruned_loss=0.05546, over 4727.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2676, pruned_loss=0.04334, over 939516.96 frames. ], batch size: 14, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 01:00:07,866 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:00:13,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=221701.33333333334, ans=0.125 +2024-07-29 01:00:16,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=221714.66666666666, ans=0.025 +2024-07-29 01:00:28,666 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.00 vs. limit=22.5 +2024-07-29 01:00:29,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=221714.66666666666, ans=0.025 +2024-07-29 01:00:33,686 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=221728.0, ans=0.2 +2024-07-29 01:00:37,056 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.539e+01 6.361e+01 7.423e+01 1.026e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-29 01:00:41,155 INFO [train.py:1114] (3/4) Epoch 17, batch 2750, loss[loss=0.1642, simple_loss=0.2582, pruned_loss=0.03509, over 4717.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2661, pruned_loss=0.0428, over 939626.96 frames. ], batch size: 12, lr: 4.42e-03, grad_scale: 32.0 +2024-07-29 01:00:42,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=221741.33333333334, ans=0.125 +2024-07-29 01:00:48,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.19 vs. limit=22.5 +2024-07-29 01:00:50,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=221754.66666666666, ans=0.125 +2024-07-29 01:00:55,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=221768.0, ans=0.2 +2024-07-29 01:01:07,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=221781.33333333334, ans=0.125 +2024-07-29 01:01:08,119 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.40 vs. limit=6.0 +2024-07-29 01:01:14,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.25 vs. limit=12.0 +2024-07-29 01:01:22,010 INFO [train.py:1114] (3/4) Epoch 17, batch 2800, loss[loss=0.2211, simple_loss=0.2999, pruned_loss=0.07119, over 3418.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2661, pruned_loss=0.04252, over 937429.85 frames. ], batch size: 35, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:01:26,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=221808.0, ans=0.125 +2024-07-29 01:01:39,373 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:01:53,598 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.485e+01 5.698e+01 6.511e+01 7.478e+01 1.084e+02, threshold=1.302e+02, percent-clipped=0.0 +2024-07-29 01:01:56,360 INFO [train.py:1114] (3/4) Epoch 17, batch 2850, loss[loss=0.1575, simple_loss=0.243, pruned_loss=0.03604, over 4968.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2663, pruned_loss=0.04272, over 935205.49 frames. ], batch size: 13, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:02:00,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=221874.66666666666, ans=0.2 +2024-07-29 01:02:05,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=221874.66666666666, ans=0.125 +2024-07-29 01:02:23,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=221914.66666666666, ans=0.0 +2024-07-29 01:02:24,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=221914.66666666666, ans=0.1 +2024-07-29 01:02:35,088 INFO [train.py:1114] (3/4) Epoch 17, batch 2900, loss[loss=0.1921, simple_loss=0.2855, pruned_loss=0.04933, over 4823.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2679, pruned_loss=0.04295, over 939310.79 frames. ], batch size: 13, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:02:45,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=23.01 vs. limit=22.5 +2024-07-29 01:02:50,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=221968.0, ans=0.1 +2024-07-29 01:03:00,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=221981.33333333334, ans=0.0 +2024-07-29 01:03:07,959 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.340e+01 5.542e+01 6.312e+01 7.539e+01 1.199e+02, threshold=1.262e+02, percent-clipped=0.0 +2024-07-29 01:03:10,783 INFO [train.py:1114] (3/4) Epoch 17, batch 2950, loss[loss=0.1515, simple_loss=0.2328, pruned_loss=0.03514, over 4712.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2669, pruned_loss=0.04277, over 938506.21 frames. ], batch size: 12, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:03:16,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=222008.0, ans=0.125 +2024-07-29 01:03:27,353 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.66 vs. limit=15.0 +2024-07-29 01:03:44,746 INFO [train.py:1114] (3/4) Epoch 17, batch 3000, loss[loss=0.1863, simple_loss=0.2639, pruned_loss=0.05437, over 4750.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2666, pruned_loss=0.04296, over 938485.44 frames. ], batch size: 13, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:03:44,747 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 01:03:49,866 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.8926, 3.1278, 3.5621, 3.6633], device='cuda:3') +2024-07-29 01:03:53,152 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.6185, 3.0696, 3.4064, 3.7621], device='cuda:3') +2024-07-29 01:04:05,746 INFO [train.py:1146] (3/4) Epoch 17, validation: loss=0.1635, simple_loss=0.2655, pruned_loss=0.03068, over 944034.00 frames. +2024-07-29 01:04:05,747 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 01:04:14,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=222088.0, ans=0.025 +2024-07-29 01:04:37,609 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.930e+01 5.724e+01 6.244e+01 7.233e+01 1.089e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 01:04:40,427 INFO [train.py:1114] (3/4) Epoch 17, batch 3050, loss[loss=0.1641, simple_loss=0.2584, pruned_loss=0.03492, over 4642.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2672, pruned_loss=0.04323, over 937119.09 frames. ], batch size: 12, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:04:43,483 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.39 vs. limit=10.0 +2024-07-29 01:04:59,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=222168.0, ans=0.125 +2024-07-29 01:05:00,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=222168.0, ans=0.125 +2024-07-29 01:05:06,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=222181.33333333334, ans=0.125 +2024-07-29 01:05:13,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=222194.66666666666, ans=0.2 +2024-07-29 01:05:14,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=222194.66666666666, ans=0.125 +2024-07-29 01:05:16,135 INFO [train.py:1114] (3/4) Epoch 17, batch 3100, loss[loss=0.177, simple_loss=0.2642, pruned_loss=0.04489, over 4611.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2665, pruned_loss=0.04304, over 937849.22 frames. ], batch size: 16, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:05:19,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=222208.0, ans=0.125 +2024-07-29 01:05:20,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=222208.0, ans=0.125 +2024-07-29 01:05:22,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=222221.33333333334, ans=0.125 +2024-07-29 01:05:22,852 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.73 vs. limit=5.0 +2024-07-29 01:05:23,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=222221.33333333334, ans=0.2 +2024-07-29 01:05:38,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=222234.66666666666, ans=0.125 +2024-07-29 01:05:51,830 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.936e+01 5.691e+01 6.608e+01 7.636e+01 1.029e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-29 01:05:54,538 INFO [train.py:1114] (3/4) Epoch 17, batch 3150, loss[loss=0.1689, simple_loss=0.2613, pruned_loss=0.03827, over 4618.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2664, pruned_loss=0.04264, over 938011.58 frames. ], batch size: 17, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:05:56,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=222274.66666666666, ans=0.025 +2024-07-29 01:06:02,196 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=46.30 vs. limit=22.5 +2024-07-29 01:06:21,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=222314.66666666666, ans=0.1 +2024-07-29 01:06:29,715 INFO [train.py:1114] (3/4) Epoch 17, batch 3200, loss[loss=0.1704, simple_loss=0.2602, pruned_loss=0.04035, over 4826.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2661, pruned_loss=0.04256, over 939580.96 frames. ], batch size: 13, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:06:35,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=222341.33333333334, ans=0.125 +2024-07-29 01:06:49,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=222368.0, ans=0.125 +2024-07-29 01:06:58,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=222394.66666666666, ans=0.2 +2024-07-29 01:07:02,015 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.480e+01 5.614e+01 6.191e+01 6.817e+01 1.066e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 01:07:05,961 INFO [train.py:1114] (3/4) Epoch 17, batch 3250, loss[loss=0.186, simple_loss=0.2847, pruned_loss=0.04366, over 4931.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2667, pruned_loss=0.04271, over 940663.88 frames. ], batch size: 14, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:07:10,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=222408.0, ans=0.125 +2024-07-29 01:07:23,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=222434.66666666666, ans=0.2 +2024-07-29 01:07:26,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=222448.0, ans=0.1 +2024-07-29 01:07:39,379 INFO [train.py:1114] (3/4) Epoch 17, batch 3300, loss[loss=0.1832, simple_loss=0.2816, pruned_loss=0.04236, over 4763.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2656, pruned_loss=0.04244, over 940573.73 frames. ], batch size: 19, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:07:53,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=222488.0, ans=0.1 +2024-07-29 01:07:55,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=222501.33333333334, ans=0.125 +2024-07-29 01:07:56,257 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-29 01:07:56,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=222501.33333333334, ans=0.125 +2024-07-29 01:08:01,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=222514.66666666666, ans=0.05 +2024-07-29 01:08:12,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=222528.0, ans=0.125 +2024-07-29 01:08:13,578 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+01 5.660e+01 6.307e+01 7.257e+01 1.096e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 01:08:16,305 INFO [train.py:1114] (3/4) Epoch 17, batch 3350, loss[loss=0.1936, simple_loss=0.279, pruned_loss=0.0541, over 4657.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2657, pruned_loss=0.04276, over 938785.18 frames. ], batch size: 17, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:08:19,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=222541.33333333334, ans=0.125 +2024-07-29 01:08:29,415 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.24 vs. limit=22.5 +2024-07-29 01:08:30,128 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.61 vs. limit=22.5 +2024-07-29 01:08:38,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222581.33333333334, ans=0.1 +2024-07-29 01:08:43,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=222594.66666666666, ans=0.125 +2024-07-29 01:08:45,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=222594.66666666666, ans=0.0 +2024-07-29 01:08:46,831 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:08:49,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=222594.66666666666, ans=0.125 +2024-07-29 01:08:51,960 INFO [train.py:1114] (3/4) Epoch 17, batch 3400, loss[loss=0.1581, simple_loss=0.242, pruned_loss=0.03707, over 4800.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2661, pruned_loss=0.04319, over 937488.46 frames. ], batch size: 11, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:09:03,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=222621.33333333334, ans=0.125 +2024-07-29 01:09:06,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=222621.33333333334, ans=0.2 +2024-07-29 01:09:10,215 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.84 vs. limit=22.5 +2024-07-29 01:09:13,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=222634.66666666666, ans=0.0 +2024-07-29 01:09:27,087 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.040e+01 6.022e+01 6.843e+01 8.395e+01 1.350e+02, threshold=1.369e+02, percent-clipped=1.0 +2024-07-29 01:09:29,762 INFO [train.py:1114] (3/4) Epoch 17, batch 3450, loss[loss=0.2145, simple_loss=0.3089, pruned_loss=0.06009, over 4738.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2663, pruned_loss=0.04305, over 937700.90 frames. ], batch size: 19, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:09:31,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=222674.66666666666, ans=0.1 +2024-07-29 01:09:33,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=222674.66666666666, ans=0.125 +2024-07-29 01:09:38,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=222688.0, ans=0.1 +2024-07-29 01:09:38,695 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.76 vs. limit=15.0 +2024-07-29 01:09:43,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=222701.33333333334, ans=0.125 +2024-07-29 01:09:44,157 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.60 vs. limit=15.0 +2024-07-29 01:10:02,875 INFO [train.py:1114] (3/4) Epoch 17, batch 3500, loss[loss=0.1526, simple_loss=0.2365, pruned_loss=0.03432, over 4936.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2652, pruned_loss=0.04249, over 938225.47 frames. ], batch size: 12, lr: 4.41e-03, grad_scale: 32.0 +2024-07-29 01:10:05,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=222741.33333333334, ans=0.125 +2024-07-29 01:10:15,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=222754.66666666666, ans=0.0 +2024-07-29 01:10:35,613 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.400e+01 5.391e+01 6.097e+01 6.632e+01 8.722e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 01:10:38,349 INFO [train.py:1114] (3/4) Epoch 17, batch 3550, loss[loss=0.1909, simple_loss=0.2872, pruned_loss=0.04735, over 4672.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2657, pruned_loss=0.04216, over 938393.90 frames. ], batch size: 14, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:10:44,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=222808.0, ans=10.0 +2024-07-29 01:11:02,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=222848.0, ans=0.0 +2024-07-29 01:11:14,871 INFO [train.py:1114] (3/4) Epoch 17, batch 3600, loss[loss=0.1343, simple_loss=0.2272, pruned_loss=0.02073, over 4958.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.265, pruned_loss=0.0419, over 940303.73 frames. ], batch size: 13, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:11:17,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=222874.66666666666, ans=0.1 +2024-07-29 01:11:24,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=222888.0, ans=0.125 +2024-07-29 01:11:31,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=222901.33333333334, ans=0.2 +2024-07-29 01:11:47,271 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.818e+01 6.519e+01 7.348e+01 1.094e+02, threshold=1.304e+02, percent-clipped=0.0 +2024-07-29 01:11:50,098 INFO [train.py:1114] (3/4) Epoch 17, batch 3650, loss[loss=0.1973, simple_loss=0.297, pruned_loss=0.04876, over 4897.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2643, pruned_loss=0.04152, over 940974.39 frames. ], batch size: 15, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:11:55,318 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.65 vs. limit=6.0 +2024-07-29 01:11:59,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=222954.66666666666, ans=0.025 +2024-07-29 01:12:02,702 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.99 vs. limit=15.0 +2024-07-29 01:12:03,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=222968.0, ans=0.125 +2024-07-29 01:12:04,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=222968.0, ans=0.2 +2024-07-29 01:12:08,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=222968.0, ans=0.0 +2024-07-29 01:12:21,301 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=222981.33333333334, ans=0.0 +2024-07-29 01:12:27,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=222994.66666666666, ans=0.125 +2024-07-29 01:12:34,075 INFO [train.py:1114] (3/4) Epoch 17, batch 3700, loss[loss=0.1783, simple_loss=0.2753, pruned_loss=0.04068, over 4931.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2649, pruned_loss=0.04193, over 941762.23 frames. ], batch size: 14, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:12:36,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223008.0, ans=0.1 +2024-07-29 01:12:47,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=223021.33333333334, ans=0.125 +2024-07-29 01:12:48,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=223021.33333333334, ans=0.125 +2024-07-29 01:12:49,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=223021.33333333334, ans=0.125 +2024-07-29 01:12:51,006 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:12:51,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=223034.66666666666, ans=0.125 +2024-07-29 01:12:51,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=223034.66666666666, ans=0.125 +2024-07-29 01:13:04,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=223061.33333333334, ans=0.2 +2024-07-29 01:13:09,042 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.453e+01 5.690e+01 6.166e+01 6.901e+01 9.277e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 01:13:14,300 INFO [train.py:1114] (3/4) Epoch 17, batch 3750, loss[loss=0.1564, simple_loss=0.243, pruned_loss=0.03488, over 4797.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2648, pruned_loss=0.04202, over 942949.52 frames. ], batch size: 11, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:13:20,292 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.98 vs. limit=15.0 +2024-07-29 01:13:27,540 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.55 vs. limit=15.0 +2024-07-29 01:13:48,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=223128.0, ans=0.125 +2024-07-29 01:13:54,167 INFO [train.py:1114] (3/4) Epoch 17, batch 3800, loss[loss=0.2218, simple_loss=0.3103, pruned_loss=0.06668, over 4817.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2651, pruned_loss=0.04241, over 941236.23 frames. ], batch size: 14, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:13:56,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=223141.33333333334, ans=0.125 +2024-07-29 01:13:58,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=223141.33333333334, ans=0.125 +2024-07-29 01:14:09,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=223168.0, ans=0.04949747468305833 +2024-07-29 01:14:13,859 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=14.67 vs. limit=15.0 +2024-07-29 01:14:17,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=223181.33333333334, ans=0.0 +2024-07-29 01:14:28,211 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.538e+01 6.338e+01 7.177e+01 1.035e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-29 01:14:30,993 INFO [train.py:1114] (3/4) Epoch 17, batch 3850, loss[loss=0.147, simple_loss=0.2494, pruned_loss=0.02228, over 4650.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2643, pruned_loss=0.0419, over 941785.58 frames. ], batch size: 16, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:14:36,694 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.92 vs. limit=15.0 +2024-07-29 01:14:41,574 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.23 vs. limit=15.0 +2024-07-29 01:14:59,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=223261.33333333334, ans=0.025 +2024-07-29 01:15:05,370 INFO [train.py:1114] (3/4) Epoch 17, batch 3900, loss[loss=0.1954, simple_loss=0.3006, pruned_loss=0.04509, over 4811.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2653, pruned_loss=0.04236, over 942485.13 frames. ], batch size: 14, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:15:19,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=223301.33333333334, ans=0.0 +2024-07-29 01:15:24,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=223301.33333333334, ans=0.1 +2024-07-29 01:15:32,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff2.min_abs, batch_count=223314.66666666666, ans=0.1 +2024-07-29 01:15:32,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=223314.66666666666, ans=0.2 +2024-07-29 01:15:42,537 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.779e+01 5.533e+01 5.996e+01 6.814e+01 1.002e+02, threshold=1.199e+02, percent-clipped=0.0 +2024-07-29 01:15:43,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=223328.0, ans=0.125 +2024-07-29 01:15:44,425 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=17.67 vs. limit=15.0 +2024-07-29 01:15:45,486 INFO [train.py:1114] (3/4) Epoch 17, batch 3950, loss[loss=0.1773, simple_loss=0.2732, pruned_loss=0.04069, over 4844.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2648, pruned_loss=0.04209, over 944435.77 frames. ], batch size: 16, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:15:49,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=223341.33333333334, ans=0.2 +2024-07-29 01:16:06,215 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.09 vs. limit=6.0 +2024-07-29 01:16:32,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=223368.0, ans=0.125 +2024-07-29 01:17:06,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=223394.66666666666, ans=0.2 +2024-07-29 01:17:23,528 INFO [train.py:1114] (3/4) Epoch 17, batch 4000, loss[loss=0.1715, simple_loss=0.2627, pruned_loss=0.04017, over 4771.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2647, pruned_loss=0.04202, over 941044.25 frames. ], batch size: 12, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:17:47,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=223421.33333333334, ans=0.0 +2024-07-29 01:18:04,016 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:18:08,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=223448.0, ans=0.125 +2024-07-29 01:18:21,853 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.44 vs. limit=22.5 +2024-07-29 01:18:25,018 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.438e+01 5.691e+01 6.092e+01 6.901e+01 9.634e+01, threshold=1.218e+02, percent-clipped=0.0 +2024-07-29 01:18:33,082 INFO [train.py:1114] (3/4) Epoch 17, batch 4050, loss[loss=0.2156, simple_loss=0.2985, pruned_loss=0.06634, over 3351.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2648, pruned_loss=0.04216, over 939654.38 frames. ], batch size: 35, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:18:43,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=223474.66666666666, ans=0.1 +2024-07-29 01:19:35,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=223514.66666666666, ans=0.125 +2024-07-29 01:19:35,305 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.26 vs. limit=15.0 +2024-07-29 01:25:14,257 INFO [train.py:1114] (3/4) Epoch 17, batch 4100, loss[loss=0.1827, simple_loss=0.2803, pruned_loss=0.04257, over 4891.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2653, pruned_loss=0.04255, over 938747.10 frames. ], batch size: 15, lr: 4.40e-03, grad_scale: 64.0 +2024-07-29 01:26:03,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=223554.66666666666, ans=0.0 +2024-07-29 01:26:57,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=223581.33333333334, ans=0.125 +2024-07-29 01:27:02,818 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=223581.33333333334, ans=0.025 +2024-07-29 01:27:50,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=223581.33333333334, ans=0.025 +2024-07-29 01:28:06,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=223594.66666666666, ans=0.125 +2024-07-29 01:28:22,404 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.591e+01 5.689e+01 6.101e+01 7.334e+01 1.100e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 01:28:28,314 INFO [train.py:1114] (3/4) Epoch 17, batch 4150, loss[loss=0.155, simple_loss=0.2443, pruned_loss=0.03287, over 4820.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2652, pruned_loss=0.0424, over 938372.04 frames. ], batch size: 13, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:28:45,094 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.47 vs. limit=8.0 +2024-07-29 01:29:11,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=223621.33333333334, ans=0.95 +2024-07-29 01:29:40,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=223634.66666666666, ans=0.1 +2024-07-29 01:29:47,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=223634.66666666666, ans=0.95 +2024-07-29 01:30:47,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=223661.33333333334, ans=0.0 +2024-07-29 01:30:48,969 INFO [train.py:1114] (3/4) Epoch 17, batch 4200, loss[loss=0.1841, simple_loss=0.2794, pruned_loss=0.04445, over 4903.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2653, pruned_loss=0.04244, over 939828.87 frames. ], batch size: 15, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:30:54,172 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=8.08 vs. limit=8.0 +2024-07-29 01:31:16,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=223688.0, ans=0.025 +2024-07-29 01:32:28,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2.whitening_limit, batch_count=223714.66666666666, ans=15.0 +2024-07-29 01:32:29,977 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.66 vs. limit=15.0 +2024-07-29 01:32:32,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=223714.66666666666, ans=0.5 +2024-07-29 01:33:25,471 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.507e+01 5.649e+01 6.155e+01 7.132e+01 1.062e+02, threshold=1.231e+02, percent-clipped=0.0 +2024-07-29 01:33:25,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=223728.0, ans=0.125 +2024-07-29 01:33:32,170 INFO [train.py:1114] (3/4) Epoch 17, batch 4250, loss[loss=0.1527, simple_loss=0.25, pruned_loss=0.02769, over 4642.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2652, pruned_loss=0.04268, over 941027.41 frames. ], batch size: 12, lr: 4.40e-03, grad_scale: 32.0 +2024-07-29 01:33:48,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=223754.66666666666, ans=0.1 +2024-07-29 01:33:48,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=223754.66666666666, ans=0.5 +2024-07-29 01:34:29,795 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.33 vs. limit=15.0 +2024-07-29 01:34:36,353 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=223781.33333333334, ans=0.125 +2024-07-29 01:34:36,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=223781.33333333334, ans=0.0 +2024-07-29 01:34:40,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=223781.33333333334, ans=0.0 +2024-07-29 01:34:56,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=223781.33333333334, ans=0.025 +2024-07-29 01:35:05,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=223781.33333333334, ans=0.125 +2024-07-29 01:35:05,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=223781.33333333334, ans=0.125 +2024-07-29 01:35:15,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=223794.66666666666, ans=0.125 +2024-07-29 01:35:27,639 INFO [train.py:1114] (3/4) Epoch 17, batch 4300, loss[loss=0.2061, simple_loss=0.2932, pruned_loss=0.05946, over 4759.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.266, pruned_loss=0.04315, over 940553.05 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:36:25,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=223821.33333333334, ans=0.2 +2024-07-29 01:36:30,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=223834.66666666666, ans=0.0 +2024-07-29 01:36:30,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=223834.66666666666, ans=0.125 +2024-07-29 01:38:10,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=223848.0, ans=0.025 +2024-07-29 01:38:12,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=223848.0, ans=0.125 +2024-07-29 01:38:25,971 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.698e+01 5.646e+01 6.421e+01 7.211e+01 1.436e+02, threshold=1.284e+02, percent-clipped=1.0 +2024-07-29 01:38:26,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=223861.33333333334, ans=0.1 +2024-07-29 01:38:30,618 INFO [train.py:1114] (3/4) Epoch 17, batch 4350, loss[loss=0.1598, simple_loss=0.2533, pruned_loss=0.03317, over 4765.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2656, pruned_loss=0.04298, over 941569.70 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:39:35,679 INFO [train.py:1114] (3/4) Epoch 17, batch 4400, loss[loss=0.2114, simple_loss=0.3002, pruned_loss=0.06132, over 4807.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2668, pruned_loss=0.04321, over 941575.33 frames. ], batch size: 14, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:39:53,302 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=223941.33333333334, ans=0.125 +2024-07-29 01:40:14,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=223954.66666666666, ans=0.2 +2024-07-29 01:40:26,118 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=223968.0, ans=0.125 +2024-07-29 01:40:29,942 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.64 vs. limit=10.0 +2024-07-29 01:40:30,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=223968.0, ans=0.0 +2024-07-29 01:40:31,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=223968.0, ans=0.125 +2024-07-29 01:40:40,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=223981.33333333334, ans=0.125 +2024-07-29 01:40:47,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=223994.66666666666, ans=0.125 +2024-07-29 01:40:49,806 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.24 vs. limit=15.0 +2024-07-29 01:40:54,550 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.541e+01 6.235e+01 6.902e+01 1.046e+02, threshold=1.247e+02, percent-clipped=0.0 +2024-07-29 01:40:59,858 INFO [train.py:1114] (3/4) Epoch 17, batch 4450, loss[loss=0.1759, simple_loss=0.2631, pruned_loss=0.04435, over 4945.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2679, pruned_loss=0.04377, over 939727.81 frames. ], batch size: 12, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:41:27,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=224021.33333333334, ans=0.1 +2024-07-29 01:41:28,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=224021.33333333334, ans=0.1 +2024-07-29 01:42:14,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=224034.66666666666, ans=0.1 +2024-07-29 01:42:26,041 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:42:38,237 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:42:54,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=224061.33333333334, ans=0.125 +2024-07-29 01:42:57,261 INFO [train.py:1114] (3/4) Epoch 17, batch 4500, loss[loss=0.1655, simple_loss=0.2476, pruned_loss=0.0417, over 4742.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2681, pruned_loss=0.04361, over 939069.72 frames. ], batch size: 14, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:43:01,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=224074.66666666666, ans=0.1 +2024-07-29 01:43:17,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=224088.0, ans=0.0 +2024-07-29 01:43:26,037 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=224101.33333333334, ans=0.125 +2024-07-29 01:43:54,433 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.989e+01 5.696e+01 6.215e+01 7.468e+01 9.739e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 01:43:56,648 INFO [train.py:1114] (3/4) Epoch 17, batch 4550, loss[loss=0.171, simple_loss=0.2592, pruned_loss=0.04134, over 4894.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2677, pruned_loss=0.04315, over 940482.26 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:44:07,200 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.22 vs. limit=15.0 +2024-07-29 01:44:07,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=224154.66666666666, ans=0.0 +2024-07-29 01:44:19,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=224181.33333333334, ans=0.0 +2024-07-29 01:44:25,322 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:44:33,953 INFO [train.py:1114] (3/4) Epoch 17, batch 4600, loss[loss=0.197, simple_loss=0.2898, pruned_loss=0.05207, over 4610.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2679, pruned_loss=0.04334, over 938309.49 frames. ], batch size: 21, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:44:34,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten.whitening_limit, batch_count=224208.0, ans=15.0 +2024-07-29 01:44:39,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=224208.0, ans=0.125 +2024-07-29 01:44:44,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1.whitening_limit, batch_count=224221.33333333334, ans=10.0 +2024-07-29 01:44:48,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=224234.66666666666, ans=0.0 +2024-07-29 01:44:50,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=224234.66666666666, ans=0.125 +2024-07-29 01:44:55,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=224248.0, ans=0.1 +2024-07-29 01:45:17,104 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.685e+01 5.605e+01 6.267e+01 6.922e+01 9.428e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 01:45:19,075 INFO [train.py:1114] (3/4) Epoch 17, batch 4650, loss[loss=0.1885, simple_loss=0.2829, pruned_loss=0.04705, over 4844.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2686, pruned_loss=0.0436, over 939943.35 frames. ], batch size: 16, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:45:26,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=224274.66666666666, ans=10.0 +2024-07-29 01:46:10,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=224328.0, ans=0.1 +2024-07-29 01:46:12,743 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.11 vs. limit=12.0 +2024-07-29 01:46:15,067 INFO [train.py:1114] (3/4) Epoch 17, batch 4700, loss[loss=0.1464, simple_loss=0.2309, pruned_loss=0.03099, over 4689.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2674, pruned_loss=0.04347, over 937206.60 frames. ], batch size: 11, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:46:19,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=224341.33333333334, ans=0.0 +2024-07-29 01:46:26,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=224354.66666666666, ans=0.1 +2024-07-29 01:46:29,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=224354.66666666666, ans=0.125 +2024-07-29 01:46:29,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=224354.66666666666, ans=0.09899494936611666 +2024-07-29 01:46:33,643 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=224368.0, ans=0.0 +2024-07-29 01:46:35,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=224368.0, ans=0.125 +2024-07-29 01:46:47,298 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.50 vs. limit=10.0 +2024-07-29 01:46:49,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=224394.66666666666, ans=0.0 +2024-07-29 01:46:53,327 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.618e+01 6.268e+01 7.126e+01 1.011e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 01:46:58,155 INFO [train.py:1114] (3/4) Epoch 17, batch 4750, loss[loss=0.1867, simple_loss=0.2944, pruned_loss=0.03946, over 4427.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.268, pruned_loss=0.04357, over 934744.98 frames. ], batch size: 21, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:46:58,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=224408.0, ans=0.125 +2024-07-29 01:47:24,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=224421.33333333334, ans=0.2 +2024-07-29 01:47:30,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=224434.66666666666, ans=0.2 +2024-07-29 01:47:31,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=224434.66666666666, ans=0.2 +2024-07-29 01:47:47,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=224448.0, ans=0.0 +2024-07-29 01:47:52,132 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=224448.0, ans=0.125 +2024-07-29 01:47:57,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=224461.33333333334, ans=0.025 +2024-07-29 01:47:58,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=224461.33333333334, ans=0.125 +2024-07-29 01:48:01,680 INFO [train.py:1114] (3/4) Epoch 17, batch 4800, loss[loss=0.2011, simple_loss=0.2951, pruned_loss=0.05352, over 4694.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2684, pruned_loss=0.04405, over 932550.61 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:48:10,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=224488.0, ans=0.2 +2024-07-29 01:48:11,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=224488.0, ans=0.125 +2024-07-29 01:48:40,181 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.91 vs. limit=22.5 +2024-07-29 01:48:48,685 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=224528.0, ans=0.125 +2024-07-29 01:48:49,936 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.702e+01 6.152e+01 7.356e+01 9.741e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 01:48:53,605 INFO [train.py:1114] (3/4) Epoch 17, batch 4850, loss[loss=0.1858, simple_loss=0.2842, pruned_loss=0.04367, over 4734.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2679, pruned_loss=0.04381, over 931994.03 frames. ], batch size: 14, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:48:55,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=224541.33333333334, ans=0.1 +2024-07-29 01:49:04,514 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.28 vs. limit=22.5 +2024-07-29 01:49:16,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=224568.0, ans=0.2 +2024-07-29 01:49:17,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=224568.0, ans=0.125 +2024-07-29 01:49:21,204 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.09 vs. limit=6.0 +2024-07-29 01:49:25,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=224594.66666666666, ans=0.125 +2024-07-29 01:49:42,566 INFO [train.py:1114] (3/4) Epoch 17, batch 4900, loss[loss=0.1971, simple_loss=0.2881, pruned_loss=0.05304, over 4756.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2675, pruned_loss=0.04375, over 933885.31 frames. ], batch size: 13, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:49:45,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=224608.0, ans=0.125 +2024-07-29 01:49:46,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=224608.0, ans=0.0 +2024-07-29 01:49:50,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=224621.33333333334, ans=0.125 +2024-07-29 01:50:16,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=224661.33333333334, ans=0.125 +2024-07-29 01:50:48,850 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.565e+01 5.722e+01 6.197e+01 6.933e+01 1.189e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 01:51:00,387 INFO [train.py:1114] (3/4) Epoch 17, batch 4950, loss[loss=0.2334, simple_loss=0.3056, pruned_loss=0.08061, over 3733.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2684, pruned_loss=0.04422, over 931596.54 frames. ], batch size: 35, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:51:00,893 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.67 vs. limit=5.0 +2024-07-29 01:51:04,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=224674.66666666666, ans=0.125 +2024-07-29 01:51:10,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=224674.66666666666, ans=0.125 +2024-07-29 01:51:17,212 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.36 vs. limit=15.0 +2024-07-29 01:51:22,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=224701.33333333334, ans=0.0 +2024-07-29 01:51:31,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=224714.66666666666, ans=0.1 +2024-07-29 01:51:31,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=224714.66666666666, ans=0.1 +2024-07-29 01:51:42,477 INFO [train.py:1114] (3/4) Epoch 17, batch 5000, loss[loss=0.1885, simple_loss=0.2896, pruned_loss=0.04373, over 4649.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2686, pruned_loss=0.04434, over 935271.94 frames. ], batch size: 14, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:51:42,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=224741.33333333334, ans=0.0 +2024-07-29 01:51:43,269 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=224741.33333333334, ans=0.125 +2024-07-29 01:51:46,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=224741.33333333334, ans=0.1 +2024-07-29 01:52:03,644 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.95 vs. limit=15.0 +2024-07-29 01:52:04,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=224768.0, ans=0.04949747468305833 +2024-07-29 01:52:47,965 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.329e+01 5.582e+01 6.302e+01 7.015e+01 1.020e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 01:52:50,186 INFO [train.py:1114] (3/4) Epoch 17, batch 5050, loss[loss=0.177, simple_loss=0.2589, pruned_loss=0.04759, over 4848.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2679, pruned_loss=0.04396, over 937809.38 frames. ], batch size: 12, lr: 4.39e-03, grad_scale: 32.0 +2024-07-29 01:52:52,255 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.85 vs. limit=15.0 +2024-07-29 01:53:00,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=224821.33333333334, ans=0.09899494936611666 +2024-07-29 01:53:04,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=224834.66666666666, ans=0.125 +2024-07-29 01:53:17,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=224848.0, ans=0.0 +2024-07-29 01:53:30,857 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.20 vs. limit=15.0 +2024-07-29 01:53:32,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=224861.33333333334, ans=0.025 +2024-07-29 01:53:34,548 INFO [train.py:1114] (3/4) Epoch 17, batch 5100, loss[loss=0.1543, simple_loss=0.2457, pruned_loss=0.03144, over 4773.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2676, pruned_loss=0.04382, over 935275.01 frames. ], batch size: 12, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:53:46,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=224888.0, ans=0.2 +2024-07-29 01:53:55,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=224901.33333333334, ans=0.0 +2024-07-29 01:54:00,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=224914.66666666666, ans=0.07 +2024-07-29 01:54:11,314 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.613e+01 5.725e+01 6.244e+01 7.275e+01 1.073e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 01:54:13,315 INFO [train.py:1114] (3/4) Epoch 17, batch 5150, loss[loss=0.1967, simple_loss=0.2849, pruned_loss=0.05421, over 4846.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2685, pruned_loss=0.04388, over 936173.05 frames. ], batch size: 16, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:54:20,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=224954.66666666666, ans=0.125 +2024-07-29 01:54:25,880 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.69 vs. limit=15.0 +2024-07-29 01:54:33,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=224981.33333333334, ans=0.125 +2024-07-29 01:54:35,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=224981.33333333334, ans=0.0 +2024-07-29 01:54:48,430 INFO [train.py:1114] (3/4) Epoch 17, batch 5200, loss[loss=0.2116, simple_loss=0.3104, pruned_loss=0.05636, over 4666.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2678, pruned_loss=0.04335, over 935835.60 frames. ], batch size: 14, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:54:50,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225008.0, ans=0.1 +2024-07-29 01:54:55,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=225021.33333333334, ans=0.0 +2024-07-29 01:55:02,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=225034.66666666666, ans=0.2 +2024-07-29 01:55:16,269 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.54 vs. limit=22.5 +2024-07-29 01:55:17,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=225034.66666666666, ans=0.125 +2024-07-29 01:55:31,353 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.50 vs. limit=15.0 +2024-07-29 01:55:37,222 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.752e+01 5.840e+01 6.748e+01 7.869e+01 1.303e+02, threshold=1.350e+02, percent-clipped=1.0 +2024-07-29 01:55:39,366 INFO [train.py:1114] (3/4) Epoch 17, batch 5250, loss[loss=0.1798, simple_loss=0.2775, pruned_loss=0.04101, over 4897.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2672, pruned_loss=0.04295, over 935505.30 frames. ], batch size: 13, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:56:10,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=225101.33333333334, ans=0.0 +2024-07-29 01:56:12,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=225114.66666666666, ans=0.0 +2024-07-29 01:56:17,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=225114.66666666666, ans=0.125 +2024-07-29 01:56:19,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=225128.0, ans=0.125 +2024-07-29 01:56:19,221 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=13.11 vs. limit=15.0 +2024-07-29 01:56:25,592 INFO [train.py:1114] (3/4) Epoch 17, batch 5300, loss[loss=0.2087, simple_loss=0.2958, pruned_loss=0.06081, over 4654.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.267, pruned_loss=0.04315, over 933779.35 frames. ], batch size: 16, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:56:38,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=225168.0, ans=0.125 +2024-07-29 01:56:52,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225194.66666666666, ans=0.1 +2024-07-29 01:56:56,605 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.76 vs. limit=22.5 +2024-07-29 01:56:57,571 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.355e+01 5.754e+01 6.386e+01 7.426e+01 1.100e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 01:56:59,695 INFO [train.py:1114] (3/4) Epoch 17, batch 5350, loss[loss=0.1385, simple_loss=0.217, pruned_loss=0.02994, over 4520.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2674, pruned_loss=0.043, over 936028.32 frames. ], batch size: 10, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:56:59,858 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.max_abs, batch_count=225208.0, ans=10.0 +2024-07-29 01:56:59,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=225208.0, ans=0.125 +2024-07-29 01:57:09,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=225221.33333333334, ans=0.0 +2024-07-29 01:57:10,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=225221.33333333334, ans=0.125 +2024-07-29 01:57:10,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=225221.33333333334, ans=0.125 +2024-07-29 01:57:13,299 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.42 vs. limit=15.0 +2024-07-29 01:57:26,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=225261.33333333334, ans=0.125 +2024-07-29 01:57:34,863 INFO [train.py:1114] (3/4) Epoch 17, batch 5400, loss[loss=0.1647, simple_loss=0.255, pruned_loss=0.03718, over 4198.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2684, pruned_loss=0.04376, over 930052.28 frames. ], batch size: 25, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:57:42,504 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.20 vs. limit=15.0 +2024-07-29 01:57:43,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=225288.0, ans=0.0 +2024-07-29 01:57:44,350 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.79 vs. limit=6.0 +2024-07-29 01:57:48,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=225288.0, ans=0.125 +2024-07-29 01:57:49,019 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.08 vs. limit=6.0 +2024-07-29 01:57:52,102 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.03 vs. limit=8.0 +2024-07-29 01:58:04,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=225328.0, ans=0.125 +2024-07-29 01:58:06,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=225328.0, ans=0.025 +2024-07-29 01:58:09,521 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.272e+01 5.738e+01 6.198e+01 6.838e+01 9.669e+01, threshold=1.240e+02, percent-clipped=0.0 +2024-07-29 01:58:11,786 INFO [train.py:1114] (3/4) Epoch 17, batch 5450, loss[loss=0.1753, simple_loss=0.2531, pruned_loss=0.04879, over 4710.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2674, pruned_loss=0.04297, over 933498.38 frames. ], batch size: 11, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:58:13,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=225341.33333333334, ans=0.0 +2024-07-29 01:58:13,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=225341.33333333334, ans=0.0 +2024-07-29 01:58:23,467 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 01:58:27,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=225368.0, ans=0.125 +2024-07-29 01:58:28,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=225368.0, ans=0.125 +2024-07-29 01:58:30,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=225368.0, ans=0.025 +2024-07-29 01:58:31,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=225381.33333333334, ans=0.1 +2024-07-29 01:58:33,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=225381.33333333334, ans=0.125 +2024-07-29 01:58:35,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=225381.33333333334, ans=0.0 +2024-07-29 01:58:41,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=225394.66666666666, ans=0.125 +2024-07-29 01:58:45,723 INFO [train.py:1114] (3/4) Epoch 17, batch 5500, loss[loss=0.1977, simple_loss=0.2916, pruned_loss=0.05187, over 4137.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2668, pruned_loss=0.04292, over 931030.27 frames. ], batch size: 25, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:58:50,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=225408.0, ans=0.5 +2024-07-29 01:58:57,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=225421.33333333334, ans=0.125 +2024-07-29 01:59:06,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=225448.0, ans=0.04949747468305833 +2024-07-29 01:59:16,896 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.448e+01 5.683e+01 6.448e+01 7.775e+01 1.067e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 01:59:18,941 INFO [train.py:1114] (3/4) Epoch 17, batch 5550, loss[loss=0.1673, simple_loss=0.2587, pruned_loss=0.03801, over 4704.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2661, pruned_loss=0.04297, over 933053.74 frames. ], batch size: 12, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:59:21,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=225474.66666666666, ans=0.125 +2024-07-29 01:59:22,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=225474.66666666666, ans=0.2 +2024-07-29 01:59:23,899 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.00 vs. limit=6.0 +2024-07-29 01:59:24,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=225474.66666666666, ans=10.0 +2024-07-29 01:59:32,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=225501.33333333334, ans=0.125 +2024-07-29 01:59:55,061 INFO [train.py:1114] (3/4) Epoch 17, batch 5600, loss[loss=0.1826, simple_loss=0.2728, pruned_loss=0.0462, over 4738.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2665, pruned_loss=0.04337, over 934267.18 frames. ], batch size: 14, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 01:59:58,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=225541.33333333334, ans=0.125 +2024-07-29 02:00:13,508 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.83 vs. limit=15.0 +2024-07-29 02:00:17,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=225581.33333333334, ans=0.2 +2024-07-29 02:00:27,271 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.722e+01 5.591e+01 6.348e+01 7.500e+01 1.117e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-29 02:00:29,292 INFO [train.py:1114] (3/4) Epoch 17, batch 5650, loss[loss=0.1843, simple_loss=0.2754, pruned_loss=0.04659, over 4505.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2674, pruned_loss=0.04348, over 936523.68 frames. ], batch size: 21, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:00:29,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=225608.0, ans=0.125 +2024-07-29 02:00:56,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=225621.33333333334, ans=0.125 +2024-07-29 02:00:59,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=225621.33333333334, ans=0.0 +2024-07-29 02:01:12,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=225648.0, ans=0.0 +2024-07-29 02:01:16,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=225661.33333333334, ans=0.2 +2024-07-29 02:01:18,473 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.86 vs. limit=6.0 +2024-07-29 02:01:24,142 INFO [train.py:1114] (3/4) Epoch 17, batch 5700, loss[loss=0.192, simple_loss=0.2781, pruned_loss=0.053, over 4698.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2685, pruned_loss=0.04392, over 937944.83 frames. ], batch size: 13, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:01:32,163 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=16.90 vs. limit=22.5 +2024-07-29 02:01:42,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225701.33333333334, ans=0.1 +2024-07-29 02:01:52,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=225728.0, ans=0.025 +2024-07-29 02:01:55,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=225728.0, ans=0.125 +2024-07-29 02:01:56,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=225728.0, ans=0.1 +2024-07-29 02:01:57,713 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.493e+01 6.225e+01 7.048e+01 1.096e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-29 02:02:02,543 INFO [train.py:1114] (3/4) Epoch 17, batch 5750, loss[loss=0.1769, simple_loss=0.2807, pruned_loss=0.03657, over 4758.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2697, pruned_loss=0.04449, over 937881.73 frames. ], batch size: 19, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:02:17,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=225768.0, ans=0.2 +2024-07-29 02:02:26,191 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.31 vs. limit=15.0 +2024-07-29 02:02:34,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=225794.66666666666, ans=0.125 +2024-07-29 02:02:35,951 INFO [train.py:1114] (3/4) Epoch 17, batch 5800, loss[loss=0.1845, simple_loss=0.2854, pruned_loss=0.04178, over 4711.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.27, pruned_loss=0.04416, over 936986.21 frames. ], batch size: 19, lr: 4.38e-03, grad_scale: 32.0 +2024-07-29 02:02:37,772 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:02:42,089 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:02:56,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten.whitening_limit, batch_count=225821.33333333334, ans=15.0 +2024-07-29 02:03:14,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=225861.33333333334, ans=0.0 +2024-07-29 02:03:14,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=225861.33333333334, ans=0.125 +2024-07-29 02:03:18,477 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.618e+01 6.216e+01 6.871e+01 1.068e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 02:03:21,244 INFO [train.py:1114] (3/4) Epoch 17, batch 5850, loss[loss=0.1871, simple_loss=0.2787, pruned_loss=0.04777, over 4495.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2688, pruned_loss=0.04389, over 937522.02 frames. ], batch size: 21, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:03:26,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=225874.66666666666, ans=0.035 +2024-07-29 02:03:40,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=225901.33333333334, ans=0.0 +2024-07-29 02:03:55,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=225941.33333333334, ans=0.125 +2024-07-29 02:03:56,487 INFO [train.py:1114] (3/4) Epoch 17, batch 5900, loss[loss=0.2397, simple_loss=0.3217, pruned_loss=0.07886, over 4680.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2672, pruned_loss=0.04327, over 937244.42 frames. ], batch size: 15, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:04:16,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=225981.33333333334, ans=0.1 +2024-07-29 02:04:18,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=225981.33333333334, ans=0.0 +2024-07-29 02:04:28,165 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 5.754e+01 6.416e+01 7.190e+01 1.147e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 02:04:30,245 INFO [train.py:1114] (3/4) Epoch 17, batch 5950, loss[loss=0.2069, simple_loss=0.2922, pruned_loss=0.06085, over 4663.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2668, pruned_loss=0.04311, over 939357.91 frames. ], batch size: 15, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:04:32,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=226008.0, ans=0.0 +2024-07-29 02:04:32,685 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.50 vs. limit=15.0 +2024-07-29 02:04:54,050 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=15.0 +2024-07-29 02:04:58,584 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.24 vs. limit=15.0 +2024-07-29 02:04:59,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=226061.33333333334, ans=0.0 +2024-07-29 02:05:06,718 INFO [train.py:1114] (3/4) Epoch 17, batch 6000, loss[loss=0.2124, simple_loss=0.3028, pruned_loss=0.06098, over 4216.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2659, pruned_loss=0.04284, over 937004.87 frames. ], batch size: 25, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:05:06,718 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 02:05:30,505 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([1.8159, 2.0652, 2.2217, 1.9814, 2.3101, 2.5205, 2.4416, 2.2795], + device='cuda:3') +2024-07-29 02:05:35,589 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.4.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.9724, 3.6709, 4.0791, 4.3702], device='cuda:3') +2024-07-29 02:05:38,066 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.4302, 4.5586, 4.4712, 5.1407], device='cuda:3') +2024-07-29 02:05:43,823 INFO [train.py:1146] (3/4) Epoch 17, validation: loss=0.1623, simple_loss=0.2646, pruned_loss=0.02995, over 944034.00 frames. +2024-07-29 02:05:43,825 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 02:05:50,779 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.44 vs. limit=15.0 +2024-07-29 02:05:52,764 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.38 vs. limit=15.0 +2024-07-29 02:05:54,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=226088.0, ans=0.125 +2024-07-29 02:05:55,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=226088.0, ans=0.09899494936611666 +2024-07-29 02:06:03,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=226101.33333333334, ans=0.09899494936611666 +2024-07-29 02:06:05,136 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.09 vs. limit=15.0 +2024-07-29 02:06:06,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=226114.66666666666, ans=0.125 +2024-07-29 02:06:08,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=226114.66666666666, ans=0.0 +2024-07-29 02:06:17,673 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.382e+01 5.832e+01 6.475e+01 7.861e+01 1.037e+02, threshold=1.295e+02, percent-clipped=0.0 +2024-07-29 02:06:19,742 INFO [train.py:1114] (3/4) Epoch 17, batch 6050, loss[loss=0.1648, simple_loss=0.2559, pruned_loss=0.03692, over 4781.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2664, pruned_loss=0.04294, over 937828.14 frames. ], batch size: 12, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:06:22,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=226141.33333333334, ans=0.0 +2024-07-29 02:06:24,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=226141.33333333334, ans=0.2 +2024-07-29 02:06:31,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=226154.66666666666, ans=0.2 +2024-07-29 02:06:35,991 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.45 vs. limit=12.0 +2024-07-29 02:06:37,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=226168.0, ans=0.125 +2024-07-29 02:06:38,960 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.56 vs. limit=6.0 +2024-07-29 02:06:49,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=226194.66666666666, ans=0.125 +2024-07-29 02:07:00,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=226208.0, ans=0.0 +2024-07-29 02:07:01,459 INFO [train.py:1114] (3/4) Epoch 17, batch 6100, loss[loss=0.1724, simple_loss=0.281, pruned_loss=0.03192, over 4690.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2652, pruned_loss=0.04227, over 937145.30 frames. ], batch size: 15, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:07:17,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=226221.33333333334, ans=0.125 +2024-07-29 02:07:17,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=226221.33333333334, ans=0.0 +2024-07-29 02:07:44,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=226248.0, ans=0.125 +2024-07-29 02:07:45,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.51 vs. limit=5.0 +2024-07-29 02:07:49,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=226261.33333333334, ans=0.025 +2024-07-29 02:07:53,314 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.857e+01 5.509e+01 6.001e+01 6.915e+01 1.050e+02, threshold=1.200e+02, percent-clipped=0.0 +2024-07-29 02:07:53,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=226261.33333333334, ans=0.0 +2024-07-29 02:07:55,391 INFO [train.py:1114] (3/4) Epoch 17, batch 6150, loss[loss=0.2011, simple_loss=0.2868, pruned_loss=0.05773, over 3262.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2659, pruned_loss=0.04226, over 935728.49 frames. ], batch size: 35, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:08:07,761 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.68 vs. limit=15.0 +2024-07-29 02:08:15,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226288.0, ans=0.1 +2024-07-29 02:08:19,467 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226288.0, ans=0.1 +2024-07-29 02:08:30,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=226301.33333333334, ans=0.95 +2024-07-29 02:08:33,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=226314.66666666666, ans=0.0 +2024-07-29 02:08:40,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=226328.0, ans=0.125 +2024-07-29 02:08:43,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=226328.0, ans=0.125 +2024-07-29 02:08:45,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=226328.0, ans=0.0 +2024-07-29 02:08:56,970 INFO [train.py:1114] (3/4) Epoch 17, batch 6200, loss[loss=0.15, simple_loss=0.2527, pruned_loss=0.02358, over 4744.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.267, pruned_loss=0.0427, over 935289.56 frames. ], batch size: 14, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:08:58,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=226341.33333333334, ans=0.1 +2024-07-29 02:09:47,094 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.06 vs. limit=12.0 +2024-07-29 02:09:47,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=226394.66666666666, ans=0.05 +2024-07-29 02:09:48,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=226394.66666666666, ans=0.125 +2024-07-29 02:09:51,543 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.525e+01 5.563e+01 6.274e+01 7.227e+01 1.075e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 02:09:53,626 INFO [train.py:1114] (3/4) Epoch 17, batch 6250, loss[loss=0.1968, simple_loss=0.3001, pruned_loss=0.04671, over 4809.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2667, pruned_loss=0.04244, over 932425.20 frames. ], batch size: 14, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:09:53,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=226408.0, ans=0.0 +2024-07-29 02:09:54,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=226408.0, ans=0.125 +2024-07-29 02:10:07,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=226434.66666666666, ans=0.1 +2024-07-29 02:10:13,518 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.89 vs. limit=15.0 +2024-07-29 02:10:19,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=226448.0, ans=0.0 +2024-07-29 02:10:21,447 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=6.19 vs. limit=12.0 +2024-07-29 02:10:30,816 INFO [train.py:1114] (3/4) Epoch 17, batch 6300, loss[loss=0.1719, simple_loss=0.2451, pruned_loss=0.04937, over 4502.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2665, pruned_loss=0.04249, over 929016.94 frames. ], batch size: 10, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:10:44,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=226474.66666666666, ans=0.0 +2024-07-29 02:10:46,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=226488.0, ans=0.125 +2024-07-29 02:10:55,081 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.67 vs. limit=15.0 +2024-07-29 02:11:14,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=226528.0, ans=0.125 +2024-07-29 02:11:15,466 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.120e+01 5.607e+01 6.569e+01 7.954e+01 1.446e+02, threshold=1.314e+02, percent-clipped=2.0 +2024-07-29 02:11:15,694 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=226528.0, ans=0.125 +2024-07-29 02:11:15,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=226528.0, ans=0.0 +2024-07-29 02:11:16,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=226528.0, ans=0.125 +2024-07-29 02:11:31,874 INFO [train.py:1114] (3/4) Epoch 17, batch 6350, loss[loss=0.2016, simple_loss=0.2859, pruned_loss=0.05866, over 4492.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2669, pruned_loss=0.04285, over 933707.81 frames. ], batch size: 21, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:12:14,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=226568.0, ans=0.0 +2024-07-29 02:12:30,408 INFO [train.py:1114] (3/4) Epoch 17, batch 6400, loss[loss=0.1606, simple_loss=0.2562, pruned_loss=0.03248, over 4638.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2663, pruned_loss=0.04293, over 934867.53 frames. ], batch size: 13, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:12:42,502 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:12:52,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=226608.0, ans=0.0 +2024-07-29 02:12:59,797 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:13:08,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=226634.66666666666, ans=0.0 +2024-07-29 02:13:09,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=226634.66666666666, ans=0.0 +2024-07-29 02:13:10,990 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:13:14,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=226648.0, ans=0.0 +2024-07-29 02:13:15,904 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.73 vs. limit=5.0 +2024-07-29 02:13:24,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=226661.33333333334, ans=0.0 +2024-07-29 02:13:28,828 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.008e+01 5.819e+01 6.340e+01 7.116e+01 1.046e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-29 02:13:33,296 INFO [train.py:1114] (3/4) Epoch 17, batch 6450, loss[loss=0.1787, simple_loss=0.2656, pruned_loss=0.04592, over 4458.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2661, pruned_loss=0.04222, over 938427.47 frames. ], batch size: 21, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:13:40,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=226688.0, ans=0.0 +2024-07-29 02:13:40,351 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.50 vs. limit=15.0 +2024-07-29 02:14:10,840 INFO [train.py:1114] (3/4) Epoch 17, batch 6500, loss[loss=0.1902, simple_loss=0.2615, pruned_loss=0.05944, over 3367.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2654, pruned_loss=0.04182, over 939793.78 frames. ], batch size: 35, lr: 4.37e-03, grad_scale: 64.0 +2024-07-29 02:14:11,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=226741.33333333334, ans=0.5 +2024-07-29 02:14:14,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=226741.33333333334, ans=0.125 +2024-07-29 02:14:15,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=226741.33333333334, ans=0.2 +2024-07-29 02:14:21,446 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.33 vs. limit=15.0 +2024-07-29 02:14:31,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=226781.33333333334, ans=0.0 +2024-07-29 02:14:34,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=226781.33333333334, ans=0.2 +2024-07-29 02:14:42,543 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.227e+01 5.659e+01 6.416e+01 7.709e+01 1.114e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 02:14:43,952 INFO [train.py:1114] (3/4) Epoch 17, batch 6550, loss[loss=0.1898, simple_loss=0.269, pruned_loss=0.05529, over 4796.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2658, pruned_loss=0.04202, over 942702.75 frames. ], batch size: 11, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:14:46,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=226808.0, ans=0.1 +2024-07-29 02:14:47,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=226808.0, ans=0.0 +2024-07-29 02:14:49,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=226821.33333333334, ans=0.0 +2024-07-29 02:14:50,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=226821.33333333334, ans=0.1 +2024-07-29 02:14:52,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=226821.33333333334, ans=0.0 +2024-07-29 02:14:59,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=226834.66666666666, ans=0.2 +2024-07-29 02:15:07,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=226848.0, ans=0.0 +2024-07-29 02:15:10,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=226848.0, ans=0.025 +2024-07-29 02:15:11,005 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.61 vs. limit=15.0 +2024-07-29 02:15:12,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=226861.33333333334, ans=0.125 +2024-07-29 02:15:18,655 INFO [train.py:1114] (3/4) Epoch 17, batch 6600, loss[loss=0.1679, simple_loss=0.267, pruned_loss=0.03436, over 4939.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2656, pruned_loss=0.04178, over 944652.60 frames. ], batch size: 14, lr: 4.37e-03, grad_scale: 32.0 +2024-07-29 02:15:18,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=226874.66666666666, ans=0.0 +2024-07-29 02:15:18,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=226874.66666666666, ans=0.025 +2024-07-29 02:15:37,326 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.56 vs. limit=12.0 +2024-07-29 02:15:40,270 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.25 vs. limit=22.5 +2024-07-29 02:15:41,549 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-29 02:15:46,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=226914.66666666666, ans=15.0 +2024-07-29 02:15:55,951 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.636e+01 5.664e+01 6.465e+01 7.332e+01 1.238e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-29 02:15:57,324 INFO [train.py:1114] (3/4) Epoch 17, batch 6650, loss[loss=0.2011, simple_loss=0.2963, pruned_loss=0.05297, over 4641.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2654, pruned_loss=0.04148, over 943260.76 frames. ], batch size: 17, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:15:58,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=226941.33333333334, ans=0.125 +2024-07-29 02:16:03,089 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.69 vs. limit=12.0 +2024-07-29 02:16:04,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=226941.33333333334, ans=0.2 +2024-07-29 02:16:10,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=226954.66666666666, ans=0.125 +2024-07-29 02:16:10,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=226954.66666666666, ans=0.125 +2024-07-29 02:16:15,334 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.62 vs. limit=15.0 +2024-07-29 02:16:33,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=226994.66666666666, ans=0.125 +2024-07-29 02:16:35,771 INFO [train.py:1114] (3/4) Epoch 17, batch 6700, loss[loss=0.1568, simple_loss=0.2523, pruned_loss=0.03065, over 4744.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2652, pruned_loss=0.04161, over 941893.17 frames. ], batch size: 19, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:16:40,162 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.29 vs. limit=15.0 +2024-07-29 02:16:44,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=227021.33333333334, ans=0.95 +2024-07-29 02:16:48,780 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.60 vs. limit=12.0 +2024-07-29 02:16:50,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=227034.66666666666, ans=0.2 +2024-07-29 02:16:54,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=227034.66666666666, ans=0.0 +2024-07-29 02:17:00,848 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.23 vs. limit=15.0 +2024-07-29 02:17:08,093 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.780e+01 5.902e+01 6.582e+01 7.550e+01 1.119e+02, threshold=1.316e+02, percent-clipped=0.0 +2024-07-29 02:17:08,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=227074.66666666666, ans=0.2 +2024-07-29 02:17:09,557 INFO [train.py:1114] (3/4) Epoch 17, batch 6750, loss[loss=0.1862, simple_loss=0.2788, pruned_loss=0.04684, over 4232.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2653, pruned_loss=0.04177, over 939858.40 frames. ], batch size: 25, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:17:10,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=227074.66666666666, ans=0.025 +2024-07-29 02:17:13,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=227074.66666666666, ans=0.04949747468305833 +2024-07-29 02:17:16,583 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.22 vs. limit=15.0 +2024-07-29 02:17:18,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=227088.0, ans=0.05 +2024-07-29 02:17:32,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=227114.66666666666, ans=0.015 +2024-07-29 02:17:37,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=227114.66666666666, ans=0.125 +2024-07-29 02:17:44,460 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.34 vs. limit=22.5 +2024-07-29 02:17:46,135 INFO [train.py:1114] (3/4) Epoch 17, batch 6800, loss[loss=0.2035, simple_loss=0.2797, pruned_loss=0.06364, over 4626.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.267, pruned_loss=0.04287, over 938569.79 frames. ], batch size: 13, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:17:48,577 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.97 vs. limit=15.0 +2024-07-29 02:17:50,332 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:17:54,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227141.33333333334, ans=0.1 +2024-07-29 02:17:58,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=227154.66666666666, ans=0.0 +2024-07-29 02:18:05,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=227168.0, ans=0.125 +2024-07-29 02:18:07,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=227168.0, ans=0.125 +2024-07-29 02:18:10,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=227181.33333333334, ans=0.2 +2024-07-29 02:18:12,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227181.33333333334, ans=0.1 +2024-07-29 02:18:19,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=227194.66666666666, ans=0.0 +2024-07-29 02:18:23,158 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.461e+01 5.796e+01 6.354e+01 7.528e+01 1.110e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-29 02:18:24,490 INFO [train.py:1114] (3/4) Epoch 17, batch 6850, loss[loss=0.1515, simple_loss=0.259, pruned_loss=0.02201, over 4704.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.267, pruned_loss=0.04277, over 940296.54 frames. ], batch size: 13, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:18:28,073 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=227208.0, ans=0.125 +2024-07-29 02:18:30,215 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.81 vs. limit=15.0 +2024-07-29 02:18:37,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=227234.66666666666, ans=0.0 +2024-07-29 02:18:44,836 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.19 vs. limit=15.0 +2024-07-29 02:18:50,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=227248.0, ans=0.125 +2024-07-29 02:18:52,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=227261.33333333334, ans=0.2 +2024-07-29 02:18:56,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten.whitening_limit, batch_count=227261.33333333334, ans=15.0 +2024-07-29 02:18:58,429 INFO [train.py:1114] (3/4) Epoch 17, batch 6900, loss[loss=0.1589, simple_loss=0.2568, pruned_loss=0.03051, over 4962.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2671, pruned_loss=0.04259, over 942693.05 frames. ], batch size: 13, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:19:10,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=227288.0, ans=0.125 +2024-07-29 02:19:22,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=227314.66666666666, ans=0.025 +2024-07-29 02:19:30,581 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.702e+01 5.673e+01 6.337e+01 7.070e+01 9.910e+01, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 02:19:31,921 INFO [train.py:1114] (3/4) Epoch 17, batch 6950, loss[loss=0.1394, simple_loss=0.2407, pruned_loss=0.01901, over 4520.00 frames. ], tot_loss[loss=0.176, simple_loss=0.267, pruned_loss=0.04255, over 939673.54 frames. ], batch size: 10, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:19:33,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.41 vs. limit=15.0 +2024-07-29 02:19:38,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=227341.33333333334, ans=0.0 +2024-07-29 02:19:39,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=227341.33333333334, ans=10.0 +2024-07-29 02:19:40,449 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.60 vs. limit=15.0 +2024-07-29 02:19:41,336 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.76 vs. limit=22.5 +2024-07-29 02:19:42,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_abs, batch_count=227354.66666666666, ans=0.5 +2024-07-29 02:19:49,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=227368.0, ans=0.125 +2024-07-29 02:19:55,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=227381.33333333334, ans=0.125 +2024-07-29 02:20:02,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=227394.66666666666, ans=0.125 +2024-07-29 02:20:03,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=227394.66666666666, ans=0.0 +2024-07-29 02:20:06,916 INFO [train.py:1114] (3/4) Epoch 17, batch 7000, loss[loss=0.1808, simple_loss=0.2753, pruned_loss=0.04308, over 4598.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2663, pruned_loss=0.0423, over 938533.24 frames. ], batch size: 17, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:20:21,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=227434.66666666666, ans=0.125 +2024-07-29 02:20:22,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=227434.66666666666, ans=0.0 +2024-07-29 02:20:23,154 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.45 vs. limit=15.0 +2024-07-29 02:20:25,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.94 vs. limit=15.0 +2024-07-29 02:20:29,428 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=227448.0, ans=0.125 +2024-07-29 02:20:38,599 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.627e+01 5.582e+01 6.064e+01 6.691e+01 1.096e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-29 02:20:39,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=227474.66666666666, ans=0.09899494936611666 +2024-07-29 02:20:39,945 INFO [train.py:1114] (3/4) Epoch 17, batch 7050, loss[loss=0.1935, simple_loss=0.2884, pruned_loss=0.04927, over 4634.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2664, pruned_loss=0.04239, over 941776.34 frames. ], batch size: 19, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:20:46,871 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:20:49,891 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.78 vs. limit=22.5 +2024-07-29 02:20:54,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=227488.0, ans=0.025 +2024-07-29 02:20:56,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=227501.33333333334, ans=0.025 +2024-07-29 02:21:08,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227514.66666666666, ans=0.1 +2024-07-29 02:21:08,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=227514.66666666666, ans=0.0 +2024-07-29 02:21:09,064 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.37 vs. limit=22.5 +2024-07-29 02:21:09,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=227514.66666666666, ans=0.125 +2024-07-29 02:21:10,521 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.28 vs. limit=6.0 +2024-07-29 02:21:13,464 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=227528.0, ans=0.125 +2024-07-29 02:21:17,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=227528.0, ans=0.2 +2024-07-29 02:21:20,635 INFO [train.py:1114] (3/4) Epoch 17, batch 7100, loss[loss=0.2418, simple_loss=0.3345, pruned_loss=0.07455, over 4804.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.267, pruned_loss=0.0432, over 936360.35 frames. ], batch size: 15, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:21:33,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227568.0, ans=0.1 +2024-07-29 02:21:52,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=227594.66666666666, ans=0.025 +2024-07-29 02:21:53,006 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.433e+01 5.574e+01 6.289e+01 7.294e+01 1.340e+02, threshold=1.258e+02, percent-clipped=1.0 +2024-07-29 02:21:54,447 INFO [train.py:1114] (3/4) Epoch 17, batch 7150, loss[loss=0.2022, simple_loss=0.288, pruned_loss=0.05825, over 4624.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.265, pruned_loss=0.04271, over 937547.92 frames. ], batch size: 21, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:21:56,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=227608.0, ans=0.125 +2024-07-29 02:22:04,800 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.75 vs. limit=15.0 +2024-07-29 02:22:07,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=227634.66666666666, ans=0.2 +2024-07-29 02:22:09,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=227634.66666666666, ans=0.2 +2024-07-29 02:22:15,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=227648.0, ans=0.0 +2024-07-29 02:22:22,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=227661.33333333334, ans=0.125 +2024-07-29 02:22:27,909 INFO [train.py:1114] (3/4) Epoch 17, batch 7200, loss[loss=0.1631, simple_loss=0.2669, pruned_loss=0.02961, over 4809.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2651, pruned_loss=0.04218, over 937803.82 frames. ], batch size: 15, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:22:28,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=227674.66666666666, ans=0.125 +2024-07-29 02:22:37,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227688.0, ans=0.1 +2024-07-29 02:22:39,465 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.97 vs. limit=15.0 +2024-07-29 02:22:43,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=227701.33333333334, ans=0.1 +2024-07-29 02:22:44,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=227701.33333333334, ans=0.125 +2024-07-29 02:22:47,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=227714.66666666666, ans=0.025 +2024-07-29 02:22:47,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=227714.66666666666, ans=0.5 +2024-07-29 02:22:49,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=227714.66666666666, ans=10.0 +2024-07-29 02:22:52,866 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.57 vs. limit=15.0 +2024-07-29 02:22:59,858 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.749e+01 5.624e+01 6.163e+01 6.917e+01 1.062e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 02:23:01,135 INFO [train.py:1114] (3/4) Epoch 17, batch 7250, loss[loss=0.158, simple_loss=0.2451, pruned_loss=0.03545, over 4862.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2645, pruned_loss=0.04196, over 939240.71 frames. ], batch size: 12, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:23:14,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=227754.66666666666, ans=0.125 +2024-07-29 02:23:23,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=227781.33333333334, ans=0.0 +2024-07-29 02:23:24,830 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:23:33,017 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.38 vs. limit=10.0 +2024-07-29 02:23:35,258 INFO [train.py:1114] (3/4) Epoch 17, batch 7300, loss[loss=0.1415, simple_loss=0.2332, pruned_loss=0.02493, over 4860.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2641, pruned_loss=0.0415, over 940196.75 frames. ], batch size: 12, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:23:35,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=227808.0, ans=10.0 +2024-07-29 02:23:42,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227821.33333333334, ans=0.1 +2024-07-29 02:23:43,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten.whitening_limit, batch_count=227821.33333333334, ans=15.0 +2024-07-29 02:23:45,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=227821.33333333334, ans=0.1 +2024-07-29 02:23:54,845 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:24:00,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=227848.0, ans=0.125 +2024-07-29 02:24:00,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=227848.0, ans=0.125 +2024-07-29 02:24:03,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=227861.33333333334, ans=0.0 +2024-07-29 02:24:07,034 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.752e+01 5.670e+01 6.102e+01 6.863e+01 9.457e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 02:24:08,392 INFO [train.py:1114] (3/4) Epoch 17, batch 7350, loss[loss=0.1609, simple_loss=0.2466, pruned_loss=0.03759, over 4638.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.265, pruned_loss=0.04198, over 939808.66 frames. ], batch size: 12, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:24:17,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=227888.0, ans=0.125 +2024-07-29 02:24:24,529 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:24:28,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=227901.33333333334, ans=0.1 +2024-07-29 02:24:37,076 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.66 vs. limit=15.0 +2024-07-29 02:24:40,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=227928.0, ans=0.0 +2024-07-29 02:24:46,994 INFO [train.py:1114] (3/4) Epoch 17, batch 7400, loss[loss=0.1706, simple_loss=0.2751, pruned_loss=0.033, over 4691.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2658, pruned_loss=0.0421, over 941055.46 frames. ], batch size: 13, lr: 4.36e-03, grad_scale: 32.0 +2024-07-29 02:24:53,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=227941.33333333334, ans=0.0 +2024-07-29 02:25:07,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=227981.33333333334, ans=0.125 +2024-07-29 02:25:09,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=227981.33333333334, ans=0.2 +2024-07-29 02:25:09,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=227981.33333333334, ans=0.125 +2024-07-29 02:25:12,046 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=227981.33333333334, ans=0.025 +2024-07-29 02:25:18,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=227994.66666666666, ans=0.125 +2024-07-29 02:25:20,903 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.500e+01 5.692e+01 6.442e+01 7.535e+01 1.153e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 02:25:22,144 INFO [train.py:1114] (3/4) Epoch 17, batch 7450, loss[loss=0.14, simple_loss=0.2266, pruned_loss=0.02671, over 4608.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2648, pruned_loss=0.04198, over 937808.84 frames. ], batch size: 11, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:25:22,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=228008.0, ans=0.025 +2024-07-29 02:25:22,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=228008.0, ans=0.025 +2024-07-29 02:25:23,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=228008.0, ans=0.2 +2024-07-29 02:25:27,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=228008.0, ans=0.1 +2024-07-29 02:25:28,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=228021.33333333334, ans=0.0 +2024-07-29 02:25:36,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=228034.66666666666, ans=0.0 +2024-07-29 02:25:43,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn1.whiten.whitening_limit, batch_count=228048.0, ans=22.5 +2024-07-29 02:25:45,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.22 vs. limit=22.5 +2024-07-29 02:25:48,758 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.44 vs. limit=10.0 +2024-07-29 02:25:51,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=228061.33333333334, ans=0.2 +2024-07-29 02:25:55,075 INFO [train.py:1114] (3/4) Epoch 17, batch 7500, loss[loss=0.2117, simple_loss=0.2931, pruned_loss=0.0652, over 3575.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2651, pruned_loss=0.04205, over 936582.06 frames. ], batch size: 35, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:25:55,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=228074.66666666666, ans=0.125 +2024-07-29 02:26:08,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=228101.33333333334, ans=0.125 +2024-07-29 02:26:16,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.whiten.whitening_limit, batch_count=228114.66666666666, ans=12.0 +2024-07-29 02:26:21,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=228128.0, ans=0.125 +2024-07-29 02:26:26,553 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.792e+01 5.692e+01 6.151e+01 7.079e+01 1.117e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 02:26:27,912 INFO [train.py:1114] (3/4) Epoch 17, batch 7550, loss[loss=0.1746, simple_loss=0.272, pruned_loss=0.03861, over 4587.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2675, pruned_loss=0.04357, over 936134.13 frames. ], batch size: 17, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:26:35,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=228154.66666666666, ans=0.09899494936611666 +2024-07-29 02:26:41,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=228168.0, ans=0.1 +2024-07-29 02:26:48,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=228181.33333333334, ans=0.0 +2024-07-29 02:26:55,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=228194.66666666666, ans=0.05 +2024-07-29 02:26:59,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=228194.66666666666, ans=0.0 +2024-07-29 02:27:00,383 INFO [train.py:1114] (3/4) Epoch 17, batch 7600, loss[loss=0.2205, simple_loss=0.3155, pruned_loss=0.06277, over 4811.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2671, pruned_loss=0.04306, over 937783.65 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:27:07,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=228221.33333333334, ans=0.0 +2024-07-29 02:27:32,119 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.867e+01 5.577e+01 6.101e+01 6.985e+01 1.081e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 02:27:33,474 INFO [train.py:1114] (3/4) Epoch 17, batch 7650, loss[loss=0.1528, simple_loss=0.2417, pruned_loss=0.03199, over 4942.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2673, pruned_loss=0.04361, over 936848.45 frames. ], batch size: 12, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:27:38,287 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=228274.66666666666, ans=0.125 +2024-07-29 02:27:42,596 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.81 vs. limit=15.0 +2024-07-29 02:27:42,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=228288.0, ans=0.1 +2024-07-29 02:27:52,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=228314.66666666666, ans=0.025 +2024-07-29 02:27:53,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=228314.66666666666, ans=0.125 +2024-07-29 02:28:06,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=228341.33333333334, ans=0.125 +2024-07-29 02:28:06,777 INFO [train.py:1114] (3/4) Epoch 17, batch 7700, loss[loss=0.1753, simple_loss=0.2706, pruned_loss=0.04001, over 4695.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2677, pruned_loss=0.04276, over 933930.81 frames. ], batch size: 13, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:28:17,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=228354.66666666666, ans=0.0 +2024-07-29 02:28:19,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=228368.0, ans=0.125 +2024-07-29 02:28:28,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=228381.33333333334, ans=0.09899494936611666 +2024-07-29 02:28:31,927 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.83 vs. limit=22.5 +2024-07-29 02:28:38,552 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.830e+01 5.778e+01 6.221e+01 6.817e+01 1.028e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-29 02:28:39,813 INFO [train.py:1114] (3/4) Epoch 17, batch 7750, loss[loss=0.1747, simple_loss=0.2707, pruned_loss=0.03934, over 4930.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2679, pruned_loss=0.04263, over 934914.03 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:28:45,432 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.39 vs. limit=15.0 +2024-07-29 02:28:59,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=228434.66666666666, ans=0.125 +2024-07-29 02:29:01,771 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.77 vs. limit=15.0 +2024-07-29 02:29:13,586 INFO [train.py:1114] (3/4) Epoch 17, batch 7800, loss[loss=0.1941, simple_loss=0.3038, pruned_loss=0.04221, over 4661.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.268, pruned_loss=0.04267, over 936936.05 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:29:22,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=228488.0, ans=0.125 +2024-07-29 02:29:30,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=228501.33333333334, ans=0.035 +2024-07-29 02:29:33,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=228514.66666666666, ans=0.125 +2024-07-29 02:29:35,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=228514.66666666666, ans=0.125 +2024-07-29 02:29:36,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=228514.66666666666, ans=0.0 +2024-07-29 02:29:41,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228528.0, ans=0.1 +2024-07-29 02:29:43,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.81 vs. limit=15.0 +2024-07-29 02:29:45,763 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.634e+01 5.601e+01 6.061e+01 6.909e+01 9.922e+01, threshold=1.212e+02, percent-clipped=0.0 +2024-07-29 02:29:47,132 INFO [train.py:1114] (3/4) Epoch 17, batch 7850, loss[loss=0.1598, simple_loss=0.2458, pruned_loss=0.03688, over 4584.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2683, pruned_loss=0.04307, over 936279.35 frames. ], batch size: 10, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:30:01,774 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.65 vs. limit=10.0 +2024-07-29 02:30:05,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=228568.0, ans=0.125 +2024-07-29 02:30:19,668 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.08 vs. limit=10.0 +2024-07-29 02:30:20,614 INFO [train.py:1114] (3/4) Epoch 17, batch 7900, loss[loss=0.1884, simple_loss=0.2819, pruned_loss=0.0474, over 4873.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.269, pruned_loss=0.04346, over 933138.84 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:30:21,294 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=228608.0, ans=0.125 +2024-07-29 02:30:28,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=228621.33333333334, ans=0.125 +2024-07-29 02:30:29,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=228621.33333333334, ans=0.125 +2024-07-29 02:30:33,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=228634.66666666666, ans=0.0 +2024-07-29 02:30:35,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=228634.66666666666, ans=0.125 +2024-07-29 02:30:35,739 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=228634.66666666666, ans=0.0 +2024-07-29 02:30:35,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=228634.66666666666, ans=0.125 +2024-07-29 02:30:35,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=228634.66666666666, ans=0.0 +2024-07-29 02:30:41,243 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.95 vs. limit=12.0 +2024-07-29 02:30:42,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=228648.0, ans=0.2 +2024-07-29 02:30:51,950 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.771e+01 6.375e+01 7.176e+01 1.150e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-29 02:30:53,271 INFO [train.py:1114] (3/4) Epoch 17, batch 7950, loss[loss=0.2085, simple_loss=0.2986, pruned_loss=0.05919, over 3377.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2693, pruned_loss=0.04339, over 935282.30 frames. ], batch size: 35, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:30:56,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=228674.66666666666, ans=0.125 +2024-07-29 02:31:04,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=228688.0, ans=0.0 +2024-07-29 02:31:05,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=228688.0, ans=0.125 +2024-07-29 02:31:33,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=228728.0, ans=10.0 +2024-07-29 02:31:37,299 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.76 vs. limit=10.0 +2024-07-29 02:31:43,744 INFO [train.py:1114] (3/4) Epoch 17, batch 8000, loss[loss=0.1739, simple_loss=0.256, pruned_loss=0.04594, over 4612.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2675, pruned_loss=0.04285, over 934812.79 frames. ], batch size: 11, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:31:45,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=228741.33333333334, ans=0.025 +2024-07-29 02:31:55,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=228754.66666666666, ans=0.0 +2024-07-29 02:32:07,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=228781.33333333334, ans=0.125 +2024-07-29 02:32:15,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=228794.66666666666, ans=0.0 +2024-07-29 02:32:17,773 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.670e+01 5.673e+01 6.449e+01 7.589e+01 1.080e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 02:32:17,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=228794.66666666666, ans=0.125 +2024-07-29 02:32:19,119 INFO [train.py:1114] (3/4) Epoch 17, batch 8050, loss[loss=0.1722, simple_loss=0.2757, pruned_loss=0.03435, over 4810.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.268, pruned_loss=0.04286, over 934678.10 frames. ], batch size: 14, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:32:21,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=228808.0, ans=0.025 +2024-07-29 02:32:32,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=228821.33333333334, ans=0.125 +2024-07-29 02:32:41,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=228848.0, ans=0.0 +2024-07-29 02:32:46,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=228848.0, ans=0.025 +2024-07-29 02:32:52,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=228861.33333333334, ans=0.0 +2024-07-29 02:32:54,994 INFO [train.py:1114] (3/4) Epoch 17, batch 8100, loss[loss=0.1908, simple_loss=0.2774, pruned_loss=0.05209, over 4817.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2679, pruned_loss=0.04289, over 934221.05 frames. ], batch size: 15, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:32:59,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=228874.66666666666, ans=0.0 +2024-07-29 02:33:04,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=228888.0, ans=0.2 +2024-07-29 02:33:12,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=228901.33333333334, ans=10.0 +2024-07-29 02:33:20,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=228914.66666666666, ans=0.0 +2024-07-29 02:33:28,142 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.710e+01 5.750e+01 6.401e+01 7.734e+01 1.146e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-29 02:33:29,429 INFO [train.py:1114] (3/4) Epoch 17, batch 8150, loss[loss=0.1699, simple_loss=0.2652, pruned_loss=0.03733, over 4798.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.267, pruned_loss=0.04282, over 937519.75 frames. ], batch size: 15, lr: 4.35e-03, grad_scale: 32.0 +2024-07-29 02:33:32,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=228941.33333333334, ans=0.125 +2024-07-29 02:33:35,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=228954.66666666666, ans=0.125 +2024-07-29 02:33:38,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=228954.66666666666, ans=0.1 +2024-07-29 02:33:47,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=228968.0, ans=0.1 +2024-07-29 02:33:47,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=228968.0, ans=0.0 +2024-07-29 02:33:47,953 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.04 vs. limit=15.0 +2024-07-29 02:33:50,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=228981.33333333334, ans=0.125 +2024-07-29 02:33:59,044 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=20.73 vs. limit=22.5 +2024-07-29 02:33:59,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=228994.66666666666, ans=0.0 +2024-07-29 02:34:02,002 INFO [train.py:1114] (3/4) Epoch 17, batch 8200, loss[loss=0.2013, simple_loss=0.297, pruned_loss=0.0528, over 4791.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2674, pruned_loss=0.04278, over 938533.63 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:34:08,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=229021.33333333334, ans=0.0 +2024-07-29 02:34:10,466 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=15.72 vs. limit=22.5 +2024-07-29 02:34:10,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=229021.33333333334, ans=0.2 +2024-07-29 02:34:15,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=229034.66666666666, ans=0.125 +2024-07-29 02:34:27,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=229048.0, ans=0.125 +2024-07-29 02:34:30,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=229061.33333333334, ans=0.0 +2024-07-29 02:34:34,884 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.838e+01 5.522e+01 6.074e+01 7.199e+01 1.525e+02, threshold=1.215e+02, percent-clipped=1.0 +2024-07-29 02:34:36,182 INFO [train.py:1114] (3/4) Epoch 17, batch 8250, loss[loss=0.1589, simple_loss=0.2453, pruned_loss=0.03621, over 4898.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2683, pruned_loss=0.04289, over 938982.17 frames. ], batch size: 13, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:34:36,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=229074.66666666666, ans=0.0 +2024-07-29 02:34:45,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=229088.0, ans=0.0 +2024-07-29 02:34:46,910 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=229088.0, ans=0.0 +2024-07-29 02:34:50,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=229101.33333333334, ans=0.0 +2024-07-29 02:34:55,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=229101.33333333334, ans=0.0 +2024-07-29 02:35:02,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229114.66666666666, ans=0.1 +2024-07-29 02:35:10,889 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.91 vs. limit=15.0 +2024-07-29 02:35:11,245 INFO [train.py:1114] (3/4) Epoch 17, batch 8300, loss[loss=0.1999, simple_loss=0.2983, pruned_loss=0.05073, over 4907.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2693, pruned_loss=0.04336, over 939072.64 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:35:12,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=229141.33333333334, ans=0.1 +2024-07-29 02:35:28,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=229168.0, ans=0.0 +2024-07-29 02:35:34,636 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.12 vs. limit=15.0 +2024-07-29 02:35:38,455 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:35:41,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=229194.66666666666, ans=0.125 +2024-07-29 02:35:44,145 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.879e+01 5.675e+01 6.316e+01 6.956e+01 1.152e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 02:35:45,459 INFO [train.py:1114] (3/4) Epoch 17, batch 8350, loss[loss=0.2106, simple_loss=0.3012, pruned_loss=0.06003, over 4793.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2689, pruned_loss=0.04306, over 941720.45 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:35:59,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=229234.66666666666, ans=0.125 +2024-07-29 02:36:04,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=229234.66666666666, ans=0.125 +2024-07-29 02:36:19,791 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=23.43 vs. limit=22.5 +2024-07-29 02:36:20,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=229261.33333333334, ans=0.0 +2024-07-29 02:36:20,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229261.33333333334, ans=0.1 +2024-07-29 02:36:21,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=229261.33333333334, ans=0.07 +2024-07-29 02:36:22,689 INFO [train.py:1114] (3/4) Epoch 17, batch 8400, loss[loss=0.1685, simple_loss=0.2659, pruned_loss=0.03551, over 4765.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2678, pruned_loss=0.04258, over 940645.95 frames. ], batch size: 12, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:36:26,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=229274.66666666666, ans=0.125 +2024-07-29 02:36:34,150 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-29 02:36:37,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=229301.33333333334, ans=0.125 +2024-07-29 02:36:40,002 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.31 vs. limit=10.0 +2024-07-29 02:36:40,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229301.33333333334, ans=0.1 +2024-07-29 02:36:57,643 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.781e+01 6.432e+01 7.454e+01 1.243e+02, threshold=1.286e+02, percent-clipped=0.0 +2024-07-29 02:36:58,929 INFO [train.py:1114] (3/4) Epoch 17, batch 8450, loss[loss=0.16, simple_loss=0.2594, pruned_loss=0.0303, over 4816.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2686, pruned_loss=0.04265, over 939380.86 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:37:02,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=229341.33333333334, ans=0.0 +2024-07-29 02:37:15,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=229368.0, ans=0.0 +2024-07-29 02:37:27,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=229394.66666666666, ans=0.125 +2024-07-29 02:37:29,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=229394.66666666666, ans=0.025 +2024-07-29 02:37:31,065 INFO [train.py:1114] (3/4) Epoch 17, batch 8500, loss[loss=0.15, simple_loss=0.2381, pruned_loss=0.03095, over 4629.00 frames. ], tot_loss[loss=0.176, simple_loss=0.267, pruned_loss=0.04247, over 939466.06 frames. ], batch size: 11, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:37:36,011 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=229408.0, ans=0.0 +2024-07-29 02:37:46,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=229434.66666666666, ans=0.125 +2024-07-29 02:37:52,268 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=229434.66666666666, ans=0.0 +2024-07-29 02:37:52,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=229434.66666666666, ans=0.125 +2024-07-29 02:37:53,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=229448.0, ans=0.125 +2024-07-29 02:38:04,986 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.408e+01 5.595e+01 6.449e+01 7.243e+01 1.266e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 02:38:06,373 INFO [train.py:1114] (3/4) Epoch 17, batch 8550, loss[loss=0.1539, simple_loss=0.2263, pruned_loss=0.04078, over 4795.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2669, pruned_loss=0.0429, over 940289.05 frames. ], batch size: 11, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:38:06,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=229474.66666666666, ans=0.0 +2024-07-29 02:38:06,469 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:38:09,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=229474.66666666666, ans=0.125 +2024-07-29 02:38:19,078 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=229488.0, ans=0.125 +2024-07-29 02:38:19,298 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.52 vs. limit=15.0 +2024-07-29 02:38:26,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=229514.66666666666, ans=0.0 +2024-07-29 02:38:33,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=229528.0, ans=0.125 +2024-07-29 02:38:36,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=229528.0, ans=0.0 +2024-07-29 02:38:39,391 INFO [train.py:1114] (3/4) Epoch 17, batch 8600, loss[loss=0.186, simple_loss=0.2729, pruned_loss=0.04956, over 4821.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2663, pruned_loss=0.04275, over 940177.11 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:38:43,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=229541.33333333334, ans=0.125 +2024-07-29 02:38:52,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=229568.0, ans=0.125 +2024-07-29 02:38:53,858 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.65 vs. limit=15.0 +2024-07-29 02:38:55,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=229568.0, ans=0.0 +2024-07-29 02:39:00,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=229581.33333333334, ans=0.125 +2024-07-29 02:39:13,132 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.480e+01 5.824e+01 6.675e+01 7.491e+01 1.199e+02, threshold=1.335e+02, percent-clipped=0.0 +2024-07-29 02:39:14,438 INFO [train.py:1114] (3/4) Epoch 17, batch 8650, loss[loss=0.1683, simple_loss=0.2607, pruned_loss=0.03799, over 4909.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2659, pruned_loss=0.04231, over 941166.05 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:39:39,851 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.75 vs. limit=22.5 +2024-07-29 02:39:46,643 INFO [train.py:1114] (3/4) Epoch 17, batch 8700, loss[loss=0.1588, simple_loss=0.2578, pruned_loss=0.02995, over 4759.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2662, pruned_loss=0.04218, over 938641.55 frames. ], batch size: 13, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:39:52,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=229688.0, ans=0.125 +2024-07-29 02:39:58,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=229701.33333333334, ans=0.0 +2024-07-29 02:40:00,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229701.33333333334, ans=0.1 +2024-07-29 02:40:05,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=229714.66666666666, ans=0.2 +2024-07-29 02:40:08,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=229714.66666666666, ans=0.125 +2024-07-29 02:40:11,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=229714.66666666666, ans=0.125 +2024-07-29 02:40:12,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=229728.0, ans=0.125 +2024-07-29 02:40:12,717 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.99 vs. limit=15.0 +2024-07-29 02:40:19,048 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.617e+01 6.057e+01 6.881e+01 1.135e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 02:40:20,340 INFO [train.py:1114] (3/4) Epoch 17, batch 8750, loss[loss=0.168, simple_loss=0.256, pruned_loss=0.03998, over 4672.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2669, pruned_loss=0.04283, over 936788.30 frames. ], batch size: 15, lr: 4.34e-03, grad_scale: 64.0 +2024-07-29 02:40:21,290 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.68 vs. limit=15.0 +2024-07-29 02:40:23,124 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=229741.33333333334, ans=0.025 +2024-07-29 02:40:25,539 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=229741.33333333334, ans=0.125 +2024-07-29 02:40:35,108 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.43 vs. limit=15.0 +2024-07-29 02:40:45,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=229781.33333333334, ans=0.0 +2024-07-29 02:40:51,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229794.66666666666, ans=0.1 +2024-07-29 02:40:55,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=229794.66666666666, ans=0.0 +2024-07-29 02:40:55,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=229808.0, ans=0.025 +2024-07-29 02:40:55,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=229808.0, ans=0.1 +2024-07-29 02:40:56,105 INFO [train.py:1114] (3/4) Epoch 17, batch 8800, loss[loss=0.1815, simple_loss=0.2765, pruned_loss=0.04323, over 4932.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2674, pruned_loss=0.0431, over 937739.00 frames. ], batch size: 14, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:41:16,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=229848.0, ans=0.0 +2024-07-29 02:41:17,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=229848.0, ans=0.125 +2024-07-29 02:41:28,619 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.643e+01 5.657e+01 6.109e+01 6.683e+01 1.097e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-29 02:41:29,299 INFO [train.py:1114] (3/4) Epoch 17, batch 8850, loss[loss=0.1657, simple_loss=0.2573, pruned_loss=0.037, over 4610.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2672, pruned_loss=0.04292, over 932589.05 frames. ], batch size: 21, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:41:36,757 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.74 vs. limit=10.0 +2024-07-29 02:41:37,981 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=10.35 vs. limit=22.5 +2024-07-29 02:41:44,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=229901.33333333334, ans=0.125 +2024-07-29 02:41:50,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=229901.33333333334, ans=0.0 +2024-07-29 02:41:51,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=229901.33333333334, ans=0.5 +2024-07-29 02:41:55,237 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.51 vs. limit=12.0 +2024-07-29 02:41:56,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=229914.66666666666, ans=0.09899494936611666 +2024-07-29 02:41:58,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=229914.66666666666, ans=0.0 +2024-07-29 02:42:07,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=229941.33333333334, ans=0.1 +2024-07-29 02:42:08,017 INFO [train.py:1114] (3/4) Epoch 17, batch 8900, loss[loss=0.1504, simple_loss=0.2415, pruned_loss=0.02967, over 4936.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2676, pruned_loss=0.04311, over 930693.25 frames. ], batch size: 12, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:42:12,078 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.50 vs. limit=10.0 +2024-07-29 02:42:13,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=229941.33333333334, ans=0.125 +2024-07-29 02:42:18,687 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=229954.66666666666, ans=0.09899494936611666 +2024-07-29 02:42:23,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=229968.0, ans=0.125 +2024-07-29 02:42:24,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=229968.0, ans=0.0 +2024-07-29 02:42:28,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=229968.0, ans=0.125 +2024-07-29 02:42:41,806 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.55 vs. limit=22.5 +2024-07-29 02:42:43,285 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.920e+01 5.712e+01 6.272e+01 7.147e+01 1.085e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 02:42:43,949 INFO [train.py:1114] (3/4) Epoch 17, batch 8950, loss[loss=0.1822, simple_loss=0.2707, pruned_loss=0.04682, over 4554.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2686, pruned_loss=0.04366, over 931227.16 frames. ], batch size: 21, lr: 4.34e-03, grad_scale: 32.0 +2024-07-29 02:42:50,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=230021.33333333334, ans=0.0 +2024-07-29 02:43:06,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230034.66666666666, ans=0.1 +2024-07-29 02:43:07,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=230034.66666666666, ans=0.025 +2024-07-29 02:43:09,906 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=230034.66666666666, ans=0.125 +2024-07-29 02:43:14,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=230048.0, ans=0.1 +2024-07-29 02:43:26,314 INFO [train.py:1114] (3/4) Epoch 17, batch 9000, loss[loss=0.1826, simple_loss=0.2699, pruned_loss=0.0476, over 4643.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2682, pruned_loss=0.04365, over 934391.60 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:43:26,315 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 02:43:31,094 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.3.self_attn_weights, attn_weights_entropy = tensor([3.0281, 1.9545, 2.5564, 2.6852, 2.5507, 2.4268, 2.7259, 2.0635], + device='cuda:3') +2024-07-29 02:43:32,981 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.6842, 3.7136, 2.6396, 3.9880, 3.3886, 3.6350, 4.2871, 3.9273], + device='cuda:3') +2024-07-29 02:43:36,540 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.6165, 4.1437, 4.7792, 3.6906], device='cuda:3') +2024-07-29 02:43:37,870 INFO [train.py:1146] (3/4) Epoch 17, validation: loss=0.1619, simple_loss=0.2644, pruned_loss=0.02967, over 944034.00 frames. +2024-07-29 02:43:37,871 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 02:43:41,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=230074.66666666666, ans=0.025 +2024-07-29 02:43:42,869 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=230074.66666666666, ans=0.2 +2024-07-29 02:43:42,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230074.66666666666, ans=0.1 +2024-07-29 02:43:45,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=230088.0, ans=0.2 +2024-07-29 02:43:52,882 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.68 vs. limit=22.5 +2024-07-29 02:44:05,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=230128.0, ans=0.125 +2024-07-29 02:44:07,211 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=230128.0, ans=0.125 +2024-07-29 02:44:12,197 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.638e+01 5.566e+01 6.347e+01 7.363e+01 1.043e+02, threshold=1.269e+02, percent-clipped=0.0 +2024-07-29 02:44:12,230 INFO [train.py:1114] (3/4) Epoch 17, batch 9050, loss[loss=0.1458, simple_loss=0.2341, pruned_loss=0.02879, over 4552.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2664, pruned_loss=0.04248, over 934803.90 frames. ], batch size: 10, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:44:12,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=230141.33333333334, ans=0.0 +2024-07-29 02:44:14,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=230141.33333333334, ans=0.125 +2024-07-29 02:44:15,583 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:44:20,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=230154.66666666666, ans=0.1 +2024-07-29 02:44:28,135 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.28 vs. limit=15.0 +2024-07-29 02:44:29,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=230168.0, ans=0.1 +2024-07-29 02:44:39,210 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.87 vs. limit=15.0 +2024-07-29 02:44:41,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=230194.66666666666, ans=0.125 +2024-07-29 02:44:41,648 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.66 vs. limit=15.0 +2024-07-29 02:44:42,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=230194.66666666666, ans=0.0 +2024-07-29 02:44:43,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=230194.66666666666, ans=0.09899494936611666 +2024-07-29 02:44:46,648 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.04 vs. limit=15.0 +2024-07-29 02:44:47,572 INFO [train.py:1114] (3/4) Epoch 17, batch 9100, loss[loss=0.183, simple_loss=0.267, pruned_loss=0.04955, over 4938.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2657, pruned_loss=0.04186, over 937368.46 frames. ], batch size: 14, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:45:00,462 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:45:02,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=230234.66666666666, ans=0.125 +2024-07-29 02:45:19,825 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.594e+01 5.656e+01 6.287e+01 6.947e+01 9.623e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-29 02:45:19,860 INFO [train.py:1114] (3/4) Epoch 17, batch 9150, loss[loss=0.1888, simple_loss=0.2931, pruned_loss=0.04224, over 4809.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2665, pruned_loss=0.04216, over 936011.14 frames. ], batch size: 14, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:45:23,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=230274.66666666666, ans=0.07 +2024-07-29 02:45:24,457 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.91 vs. limit=15.0 +2024-07-29 02:45:30,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=230288.0, ans=0.125 +2024-07-29 02:45:30,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=230288.0, ans=0.125 +2024-07-29 02:45:34,499 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=230301.33333333334, ans=0.0 +2024-07-29 02:45:47,149 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=10.29 vs. limit=15.0 +2024-07-29 02:45:52,562 INFO [train.py:1114] (3/4) Epoch 17, batch 9200, loss[loss=0.1681, simple_loss=0.2572, pruned_loss=0.03947, over 4860.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2657, pruned_loss=0.04226, over 937925.43 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:45:54,674 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=230341.33333333334, ans=0.125 +2024-07-29 02:45:59,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=230354.66666666666, ans=0.1 +2024-07-29 02:46:01,459 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.89 vs. limit=15.0 +2024-07-29 02:46:01,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=230354.66666666666, ans=0.125 +2024-07-29 02:46:18,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=230394.66666666666, ans=0.0 +2024-07-29 02:46:21,065 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.94 vs. limit=22.5 +2024-07-29 02:46:24,550 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.375e+01 5.574e+01 6.025e+01 6.747e+01 8.782e+01, threshold=1.205e+02, percent-clipped=0.0 +2024-07-29 02:46:24,584 INFO [train.py:1114] (3/4) Epoch 17, batch 9250, loss[loss=0.1701, simple_loss=0.2717, pruned_loss=0.0343, over 4632.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2657, pruned_loss=0.04227, over 938923.36 frames. ], batch size: 13, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:46:28,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=230408.0, ans=0.125 +2024-07-29 02:46:29,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=230408.0, ans=0.0 +2024-07-29 02:46:30,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=230421.33333333334, ans=0.0 +2024-07-29 02:46:34,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=230421.33333333334, ans=0.125 +2024-07-29 02:46:40,240 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.57 vs. limit=15.0 +2024-07-29 02:46:40,544 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=230434.66666666666, ans=0.035 +2024-07-29 02:46:52,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=230461.33333333334, ans=0.125 +2024-07-29 02:46:56,561 INFO [train.py:1114] (3/4) Epoch 17, batch 9300, loss[loss=0.1407, simple_loss=0.2281, pruned_loss=0.02668, over 4769.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2658, pruned_loss=0.04229, over 938483.02 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:47:15,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=230514.66666666666, ans=0.0 +2024-07-29 02:47:17,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=230514.66666666666, ans=0.125 +2024-07-29 02:47:24,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=230528.0, ans=0.2 +2024-07-29 02:47:28,550 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 5.719e+01 6.284e+01 7.337e+01 9.845e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-29 02:47:28,585 INFO [train.py:1114] (3/4) Epoch 17, batch 9350, loss[loss=0.1884, simple_loss=0.2649, pruned_loss=0.05592, over 4797.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2663, pruned_loss=0.04233, over 934852.74 frames. ], batch size: 11, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:47:28,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=230541.33333333334, ans=0.125 +2024-07-29 02:47:28,933 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.56 vs. limit=22.5 +2024-07-29 02:47:29,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=230541.33333333334, ans=0.0 +2024-07-29 02:47:31,439 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.05 vs. limit=15.0 +2024-07-29 02:47:34,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=230554.66666666666, ans=0.025 +2024-07-29 02:47:43,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=230568.0, ans=0.125 +2024-07-29 02:47:45,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=230568.0, ans=0.0 +2024-07-29 02:47:51,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=230581.33333333334, ans=0.0 +2024-07-29 02:48:00,627 INFO [train.py:1114] (3/4) Epoch 17, batch 9400, loss[loss=0.1778, simple_loss=0.2672, pruned_loss=0.0442, over 4692.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2668, pruned_loss=0.0426, over 932589.45 frames. ], batch size: 13, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:48:04,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=230608.0, ans=0.0 +2024-07-29 02:48:25,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=230648.0, ans=0.0 +2024-07-29 02:48:27,439 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=230661.33333333334, ans=0.0 +2024-07-29 02:48:34,669 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.369e+01 5.567e+01 6.049e+01 6.960e+01 9.210e+01, threshold=1.210e+02, percent-clipped=0.0 +2024-07-29 02:48:34,703 INFO [train.py:1114] (3/4) Epoch 17, batch 9450, loss[loss=0.2096, simple_loss=0.2837, pruned_loss=0.06776, over 4787.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2665, pruned_loss=0.04244, over 932252.42 frames. ], batch size: 11, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:48:34,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=230674.66666666666, ans=0.95 +2024-07-29 02:48:34,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=230674.66666666666, ans=0.125 +2024-07-29 02:48:38,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=230674.66666666666, ans=0.2 +2024-07-29 02:48:50,384 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:48:50,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=230701.33333333334, ans=0.125 +2024-07-29 02:48:51,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=230701.33333333334, ans=0.125 +2024-07-29 02:48:55,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230714.66666666666, ans=0.1 +2024-07-29 02:49:06,175 INFO [train.py:1114] (3/4) Epoch 17, batch 9500, loss[loss=0.1413, simple_loss=0.2365, pruned_loss=0.02306, over 4704.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2664, pruned_loss=0.04222, over 934697.09 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:49:06,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=230741.33333333334, ans=0.0 +2024-07-29 02:49:15,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=230754.66666666666, ans=0.125 +2024-07-29 02:49:19,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=230768.0, ans=0.09899494936611666 +2024-07-29 02:49:27,221 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=230781.33333333334, ans=0.125 +2024-07-29 02:49:29,867 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.27 vs. limit=22.5 +2024-07-29 02:49:37,902 INFO [train.py:1114] (3/4) Epoch 17, batch 9550, loss[loss=0.1565, simple_loss=0.2471, pruned_loss=0.03292, over 4775.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2667, pruned_loss=0.04275, over 931750.70 frames. ], batch size: 12, lr: 4.33e-03, grad_scale: 16.0 +2024-07-29 02:49:39,096 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.760e+01 5.645e+01 6.246e+01 7.009e+01 1.042e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 02:49:40,089 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.83 vs. limit=15.0 +2024-07-29 02:49:52,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=230834.66666666666, ans=0.0 +2024-07-29 02:49:56,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=230848.0, ans=0.125 +2024-07-29 02:50:04,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=230861.33333333334, ans=0.125 +2024-07-29 02:50:09,770 INFO [train.py:1114] (3/4) Epoch 17, batch 9600, loss[loss=0.2158, simple_loss=0.2934, pruned_loss=0.06913, over 3359.00 frames. ], tot_loss[loss=0.176, simple_loss=0.267, pruned_loss=0.04251, over 930461.23 frames. ], batch size: 35, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:50:19,265 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=230888.0, ans=0.0 +2024-07-29 02:50:19,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=230888.0, ans=0.1 +2024-07-29 02:50:26,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=230901.33333333334, ans=0.125 +2024-07-29 02:50:44,361 INFO [train.py:1114] (3/4) Epoch 17, batch 9650, loss[loss=0.1846, simple_loss=0.2755, pruned_loss=0.04689, over 4825.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2677, pruned_loss=0.04327, over 926551.46 frames. ], batch size: 16, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:50:44,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=230941.33333333334, ans=0.125 +2024-07-29 02:50:44,985 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.601e+01 5.884e+01 6.433e+01 7.222e+01 1.107e+02, threshold=1.287e+02, percent-clipped=0.0 +2024-07-29 02:50:45,048 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=230941.33333333334, ans=0.025 +2024-07-29 02:50:48,625 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.78 vs. limit=5.0 +2024-07-29 02:50:51,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=230954.66666666666, ans=0.125 +2024-07-29 02:50:51,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=230954.66666666666, ans=0.125 +2024-07-29 02:50:57,833 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.38 vs. limit=10.0 +2024-07-29 02:50:59,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=230968.0, ans=0.125 +2024-07-29 02:51:01,105 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.98 vs. limit=15.0 +2024-07-29 02:51:08,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=230994.66666666666, ans=0.025 +2024-07-29 02:51:15,275 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.26 vs. limit=15.0 +2024-07-29 02:51:16,186 INFO [train.py:1114] (3/4) Epoch 17, batch 9700, loss[loss=0.1844, simple_loss=0.2798, pruned_loss=0.04449, over 4444.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2679, pruned_loss=0.04322, over 924665.18 frames. ], batch size: 26, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:51:32,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=231034.66666666666, ans=0.125 +2024-07-29 02:51:34,796 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.83 vs. limit=15.0 +2024-07-29 02:51:37,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=231048.0, ans=0.125 +2024-07-29 02:51:42,372 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.84 vs. limit=15.0 +2024-07-29 02:51:45,528 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=6.58 vs. limit=15.0 +2024-07-29 02:51:47,651 INFO [train.py:1114] (3/4) Epoch 17, batch 9750, loss[loss=0.2099, simple_loss=0.2943, pruned_loss=0.06277, over 4698.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.267, pruned_loss=0.0429, over 925263.45 frames. ], batch size: 15, lr: 4.33e-03, grad_scale: 32.0 +2024-07-29 02:51:48,239 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.114e+01 5.556e+01 6.243e+01 6.911e+01 1.115e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 02:51:52,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=231074.66666666666, ans=0.125 +2024-07-29 02:51:52,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231074.66666666666, ans=0.1 +2024-07-29 02:51:55,965 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=231088.0, ans=0.125 +2024-07-29 02:52:01,734 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:52:05,674 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.02 vs. limit=15.0 +2024-07-29 02:52:07,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=231114.66666666666, ans=0.125 +2024-07-29 02:52:13,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231128.0, ans=0.1 +2024-07-29 02:52:18,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=231141.33333333334, ans=0.025 +2024-07-29 02:52:19,057 INFO [train.py:1114] (3/4) Epoch 17, batch 9800, loss[loss=0.1457, simple_loss=0.2329, pruned_loss=0.02926, over 4709.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2662, pruned_loss=0.04309, over 925363.30 frames. ], batch size: 12, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:52:29,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=231154.66666666666, ans=0.0 +2024-07-29 02:52:34,014 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.98 vs. limit=6.0 +2024-07-29 02:52:38,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=231181.33333333334, ans=0.05 +2024-07-29 02:52:38,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=231181.33333333334, ans=0.125 +2024-07-29 02:52:49,576 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=231208.0, ans=0.2 +2024-07-29 02:52:50,079 INFO [train.py:1114] (3/4) Epoch 17, batch 9850, loss[loss=0.1849, simple_loss=0.2799, pruned_loss=0.04492, over 4894.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2671, pruned_loss=0.04299, over 927807.21 frames. ], batch size: 15, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:52:50,654 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.758e+01 6.441e+01 7.212e+01 9.230e+01, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 02:52:57,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=231221.33333333334, ans=0.025 +2024-07-29 02:53:04,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=231234.66666666666, ans=0.2 +2024-07-29 02:53:07,258 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=231234.66666666666, ans=0.0 +2024-07-29 02:53:22,319 INFO [train.py:1114] (3/4) Epoch 17, batch 9900, loss[loss=0.1621, simple_loss=0.2567, pruned_loss=0.03379, over 4836.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2671, pruned_loss=0.04307, over 926840.13 frames. ], batch size: 16, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:53:26,247 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:53:30,739 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=231288.0, ans=0.0 +2024-07-29 02:53:48,474 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.39 vs. limit=22.5 +2024-07-29 02:53:49,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=231328.0, ans=0.0 +2024-07-29 02:53:50,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=231328.0, ans=0.125 +2024-07-29 02:53:53,562 INFO [train.py:1114] (3/4) Epoch 17, batch 9950, loss[loss=0.1542, simple_loss=0.235, pruned_loss=0.03671, over 4502.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2677, pruned_loss=0.04409, over 928864.04 frames. ], batch size: 10, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:53:54,161 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.774e+01 5.766e+01 6.356e+01 7.245e+01 1.147e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-29 02:53:56,756 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=231341.33333333334, ans=0.1 +2024-07-29 02:54:06,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=231368.0, ans=0.2 +2024-07-29 02:54:23,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=231394.66666666666, ans=0.125 +2024-07-29 02:54:24,961 INFO [train.py:1114] (3/4) Epoch 17, batch 10000, loss[loss=0.2012, simple_loss=0.2834, pruned_loss=0.05948, over 4667.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2701, pruned_loss=0.045, over 926248.74 frames. ], batch size: 16, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:54:25,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=231408.0, ans=0.125 +2024-07-29 02:54:39,886 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:54:53,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=231461.33333333334, ans=0.2 +2024-07-29 02:54:56,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=231474.66666666666, ans=0.0 +2024-07-29 02:55:00,064 INFO [train.py:1114] (3/4) Epoch 17, batch 10050, loss[loss=0.2019, simple_loss=0.2812, pruned_loss=0.06125, over 3505.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2723, pruned_loss=0.04607, over 913565.44 frames. ], batch size: 35, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:55:00,785 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.986e+01 5.675e+01 6.187e+01 6.969e+01 9.766e+01, threshold=1.237e+02, percent-clipped=0.0 +2024-07-29 02:55:05,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=231474.66666666666, ans=0.0 +2024-07-29 02:55:05,294 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.54 vs. limit=10.0 +2024-07-29 02:55:07,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=231474.66666666666, ans=0.2 +2024-07-29 02:55:07,135 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:55:09,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=231488.0, ans=0.025 +2024-07-29 02:55:27,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231528.0, ans=0.1 +2024-07-29 02:55:31,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=231528.0, ans=0.2 +2024-07-29 02:55:35,509 INFO [train.py:1114] (3/4) Epoch 17, batch 10100, loss[loss=0.19, simple_loss=0.2781, pruned_loss=0.05096, over 3335.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2762, pruned_loss=0.04999, over 860038.54 frames. ], batch size: 35, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:55:56,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=231568.0, ans=0.125 +2024-07-29 02:56:04,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=231581.33333333334, ans=0.125 +2024-07-29 02:56:04,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=231581.33333333334, ans=0.0 +2024-07-29 02:56:04,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=231581.33333333334, ans=0.1 +2024-07-29 02:56:06,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=231594.66666666666, ans=0.05 +2024-07-29 02:56:08,495 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.01 vs. limit=15.0 +2024-07-29 02:56:13,984 INFO [train.py:1114] (3/4) Epoch 17, batch 10150, loss[loss=0.2398, simple_loss=0.3176, pruned_loss=0.08103, over 3202.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2805, pruned_loss=0.05366, over 820459.47 frames. ], batch size: 35, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:56:14,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=231608.0, ans=0.09899494936611666 +2024-07-29 02:56:14,591 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.873e+01 6.975e+01 7.380e+01 8.032e+01 1.303e+02, threshold=1.476e+02, percent-clipped=1.0 +2024-07-29 02:56:15,692 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.81 vs. limit=15.0 +2024-07-29 02:56:15,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=231608.0, ans=0.125 +2024-07-29 02:56:23,645 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=231621.33333333334, ans=0.0 +2024-07-29 02:56:45,758 INFO [train.py:1114] (3/4) Epoch 17, batch 10200, loss[loss=0.214, simple_loss=0.2932, pruned_loss=0.06741, over 3356.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2834, pruned_loss=0.05648, over 788929.35 frames. ], batch size: 35, lr: 4.32e-03, grad_scale: 32.0 +2024-07-29 02:56:48,709 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.26 vs. limit=15.0 +2024-07-29 02:56:50,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=231674.66666666666, ans=0.5 +2024-07-29 02:58:12,626 INFO [train.py:1114] (3/4) Epoch 18, batch 0, loss[loss=0.1463, simple_loss=0.2394, pruned_loss=0.02661, over 4855.00 frames. ], tot_loss[loss=0.1463, simple_loss=0.2394, pruned_loss=0.02661, over 4855.00 frames. ], batch size: 12, lr: 4.20e-03, grad_scale: 32.0 +2024-07-29 02:58:12,627 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 02:58:24,197 INFO [train.py:1146] (3/4) Epoch 18, validation: loss=0.1629, simple_loss=0.2668, pruned_loss=0.02955, over 944034.00 frames. +2024-07-29 02:58:24,198 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 02:58:26,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231705.33333333334, ans=0.1 +2024-07-29 02:58:26,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=231705.33333333334, ans=0.125 +2024-07-29 02:58:27,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=231705.33333333334, ans=0.2 +2024-07-29 02:58:31,741 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 02:58:37,924 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=231732.0, ans=0.0 +2024-07-29 02:58:44,123 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.569e+01 6.224e+01 6.772e+01 7.416e+01 8.385e+01, threshold=1.354e+02, percent-clipped=0.0 +2024-07-29 02:58:49,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=231745.33333333334, ans=0.125 +2024-07-29 02:58:54,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=231758.66666666666, ans=0.0 +2024-07-29 02:58:57,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=231758.66666666666, ans=0.2 +2024-07-29 02:58:59,033 INFO [train.py:1114] (3/4) Epoch 18, batch 50, loss[loss=0.1523, simple_loss=0.2424, pruned_loss=0.03106, over 4620.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2645, pruned_loss=0.04096, over 206833.46 frames. ], batch size: 11, lr: 4.20e-03, grad_scale: 32.0 +2024-07-29 02:59:01,538 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.47 vs. limit=15.0 +2024-07-29 02:59:02,215 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.53 vs. limit=22.5 +2024-07-29 02:59:08,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=231785.33333333334, ans=0.2 +2024-07-29 02:59:11,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=231798.66666666666, ans=0.0 +2024-07-29 02:59:19,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=231812.0, ans=0.1 +2024-07-29 02:59:20,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=231812.0, ans=0.125 +2024-07-29 02:59:34,338 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.59 vs. limit=15.0 +2024-07-29 02:59:34,496 INFO [train.py:1114] (3/4) Epoch 18, batch 100, loss[loss=0.1674, simple_loss=0.2419, pruned_loss=0.04645, over 4637.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2676, pruned_loss=0.04249, over 366139.65 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 02:59:35,238 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=231838.66666666666, ans=0.025 +2024-07-29 02:59:39,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=231838.66666666666, ans=0.1 +2024-07-29 02:59:40,281 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=24.05 vs. limit=22.5 +2024-07-29 02:59:44,521 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=231852.0, ans=0.2 +2024-07-29 02:59:51,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=231865.33333333334, ans=0.125 +2024-07-29 02:59:54,427 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.485e+01 5.467e+01 5.995e+01 6.645e+01 8.215e+01, threshold=1.199e+02, percent-clipped=0.0 +2024-07-29 02:59:54,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=231865.33333333334, ans=0.0 +2024-07-29 03:00:08,835 INFO [train.py:1114] (3/4) Epoch 18, batch 150, loss[loss=0.1431, simple_loss=0.2374, pruned_loss=0.02437, over 4613.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2663, pruned_loss=0.04197, over 494437.76 frames. ], batch size: 11, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:00:08,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=231905.33333333334, ans=0.0 +2024-07-29 03:00:09,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=231905.33333333334, ans=0.125 +2024-07-29 03:00:10,517 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.61 vs. limit=15.0 +2024-07-29 03:00:18,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=231918.66666666666, ans=0.125 +2024-07-29 03:00:20,188 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=231918.66666666666, ans=0.125 +2024-07-29 03:00:26,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=231932.0, ans=0.125 +2024-07-29 03:00:27,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=231932.0, ans=0.1 +2024-07-29 03:00:28,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=231932.0, ans=0.025 +2024-07-29 03:00:28,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=231945.33333333334, ans=0.125 +2024-07-29 03:00:35,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=231958.66666666666, ans=0.0 +2024-07-29 03:00:42,582 INFO [train.py:1114] (3/4) Epoch 18, batch 200, loss[loss=0.1769, simple_loss=0.2606, pruned_loss=0.04657, over 4455.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2659, pruned_loss=0.04252, over 593941.03 frames. ], batch size: 21, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:00:48,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.07 vs. limit=15.0 +2024-07-29 03:00:51,441 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.25 vs. limit=15.0 +2024-07-29 03:01:08,542 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.689e+01 5.881e+01 6.844e+01 7.850e+01 1.252e+02, threshold=1.369e+02, percent-clipped=1.0 +2024-07-29 03:01:15,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=232012.0, ans=0.0 +2024-07-29 03:01:20,676 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.62 vs. limit=15.0 +2024-07-29 03:01:50,242 INFO [train.py:1114] (3/4) Epoch 18, batch 250, loss[loss=0.1963, simple_loss=0.2934, pruned_loss=0.04956, over 4658.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2667, pruned_loss=0.0428, over 670914.52 frames. ], batch size: 16, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:01:58,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=232052.0, ans=0.07 +2024-07-29 03:02:24,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=232052.0, ans=0.2 +2024-07-29 03:02:34,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=232078.66666666666, ans=0.0 +2024-07-29 03:02:39,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=232078.66666666666, ans=0.125 +2024-07-29 03:02:50,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=232092.0, ans=0.1 +2024-07-29 03:02:58,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=232105.33333333334, ans=0.0 +2024-07-29 03:02:58,739 INFO [train.py:1114] (3/4) Epoch 18, batch 300, loss[loss=0.1523, simple_loss=0.2467, pruned_loss=0.02895, over 4806.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2645, pruned_loss=0.04199, over 730449.41 frames. ], batch size: 15, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:02:59,661 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.67 vs. limit=22.5 +2024-07-29 03:03:09,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=232118.66666666666, ans=0.1 +2024-07-29 03:03:09,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=232118.66666666666, ans=0.2 +2024-07-29 03:03:17,149 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:03:17,682 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.434e+01 5.467e+01 6.061e+01 6.995e+01 1.248e+02, threshold=1.212e+02, percent-clipped=0.0 +2024-07-29 03:03:30,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=232158.66666666666, ans=22.5 +2024-07-29 03:03:32,390 INFO [train.py:1114] (3/4) Epoch 18, batch 350, loss[loss=0.1718, simple_loss=0.2485, pruned_loss=0.0476, over 4945.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2648, pruned_loss=0.04153, over 776628.63 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:03:42,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=232185.33333333334, ans=0.1 +2024-07-29 03:03:47,616 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.40 vs. limit=15.0 +2024-07-29 03:03:50,963 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.02 vs. limit=15.0 +2024-07-29 03:03:53,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=232212.0, ans=0.125 +2024-07-29 03:03:57,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=232212.0, ans=0.0 +2024-07-29 03:03:58,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=232225.33333333334, ans=0.125 +2024-07-29 03:03:58,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=232225.33333333334, ans=0.125 +2024-07-29 03:04:00,499 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:04:02,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=232225.33333333334, ans=0.2 +2024-07-29 03:04:05,786 INFO [train.py:1114] (3/4) Epoch 18, batch 400, loss[loss=0.1974, simple_loss=0.2829, pruned_loss=0.05592, over 4695.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2655, pruned_loss=0.04221, over 813991.69 frames. ], batch size: 13, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:04:13,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=232252.0, ans=0.125 +2024-07-29 03:04:26,840 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.483e+01 6.110e+01 6.835e+01 9.648e+01, threshold=1.222e+02, percent-clipped=0.0 +2024-07-29 03:04:31,241 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=8.22 vs. limit=15.0 +2024-07-29 03:04:34,293 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:04:41,611 INFO [train.py:1114] (3/4) Epoch 18, batch 450, loss[loss=0.1867, simple_loss=0.2821, pruned_loss=0.04564, over 4638.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2655, pruned_loss=0.04157, over 839127.94 frames. ], batch size: 13, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:04:41,993 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.36 vs. limit=10.0 +2024-07-29 03:04:42,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=232305.33333333334, ans=0.125 +2024-07-29 03:04:53,570 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:05:05,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=232345.33333333334, ans=0.125 +2024-07-29 03:05:15,092 INFO [train.py:1114] (3/4) Epoch 18, batch 500, loss[loss=0.2091, simple_loss=0.2917, pruned_loss=0.06319, over 4673.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2645, pruned_loss=0.04142, over 861618.70 frames. ], batch size: 15, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:05:16,668 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=232372.0, ans=0.125 +2024-07-29 03:05:20,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232372.0, ans=0.1 +2024-07-29 03:05:24,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=232385.33333333334, ans=0.0 +2024-07-29 03:05:28,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=232398.66666666666, ans=0.2 +2024-07-29 03:05:34,179 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.530e+01 5.559e+01 6.071e+01 6.831e+01 9.618e+01, threshold=1.214e+02, percent-clipped=0.0 +2024-07-29 03:05:44,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=232425.33333333334, ans=0.125 +2024-07-29 03:05:46,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=232425.33333333334, ans=0.0 +2024-07-29 03:05:48,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=232438.66666666666, ans=0.1 +2024-07-29 03:05:48,919 INFO [train.py:1114] (3/4) Epoch 18, batch 550, loss[loss=0.1829, simple_loss=0.2802, pruned_loss=0.04278, over 4575.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2648, pruned_loss=0.04142, over 877399.63 frames. ], batch size: 17, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:05:55,865 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:06:16,861 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.57 vs. limit=15.0 +2024-07-29 03:06:26,516 INFO [train.py:1114] (3/4) Epoch 18, batch 600, loss[loss=0.1957, simple_loss=0.2775, pruned_loss=0.05692, over 4646.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2646, pruned_loss=0.04152, over 892094.88 frames. ], batch size: 16, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:06:26,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=232505.33333333334, ans=0.0 +2024-07-29 03:06:34,424 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:06:37,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=232518.66666666666, ans=0.125 +2024-07-29 03:06:38,568 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.13 vs. limit=10.0 +2024-07-29 03:06:43,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=232532.0, ans=0.125 +2024-07-29 03:06:44,913 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.582e+01 6.053e+01 7.206e+01 1.079e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 03:06:47,559 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.09 vs. limit=15.0 +2024-07-29 03:06:49,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=232545.33333333334, ans=0.2 +2024-07-29 03:06:51,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=232545.33333333334, ans=0.125 +2024-07-29 03:06:58,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=232558.66666666666, ans=0.0 +2024-07-29 03:07:03,966 INFO [train.py:1114] (3/4) Epoch 18, batch 650, loss[loss=0.177, simple_loss=0.2639, pruned_loss=0.04501, over 4753.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2642, pruned_loss=0.0415, over 903764.82 frames. ], batch size: 13, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:07:09,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=232572.0, ans=0.125 +2024-07-29 03:07:16,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=232585.33333333334, ans=0.125 +2024-07-29 03:07:25,154 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.57 vs. limit=6.0 +2024-07-29 03:07:25,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=232598.66666666666, ans=0.0 +2024-07-29 03:07:38,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=232625.33333333334, ans=0.0 +2024-07-29 03:07:47,035 INFO [train.py:1114] (3/4) Epoch 18, batch 700, loss[loss=0.158, simple_loss=0.2575, pruned_loss=0.02923, over 4637.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2654, pruned_loss=0.04145, over 911651.94 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:07:53,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=232652.0, ans=0.2 +2024-07-29 03:07:53,957 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.17 vs. limit=12.0 +2024-07-29 03:07:54,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=232652.0, ans=0.125 +2024-07-29 03:08:05,613 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.658e+01 5.680e+01 6.121e+01 6.839e+01 1.044e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 03:08:06,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=232678.66666666666, ans=0.0 +2024-07-29 03:08:20,419 INFO [train.py:1114] (3/4) Epoch 18, batch 750, loss[loss=0.1653, simple_loss=0.2649, pruned_loss=0.03287, over 4698.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2644, pruned_loss=0.04103, over 918252.78 frames. ], batch size: 13, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:08:23,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=232705.33333333334, ans=0.125 +2024-07-29 03:08:25,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=232705.33333333334, ans=0.1 +2024-07-29 03:08:27,825 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=232718.66666666666, ans=0.1 +2024-07-29 03:08:30,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=232718.66666666666, ans=0.0 +2024-07-29 03:08:53,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=232732.0, ans=0.0 +2024-07-29 03:08:54,519 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.92 vs. limit=22.5 +2024-07-29 03:09:01,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=232745.33333333334, ans=0.125 +2024-07-29 03:09:02,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=232758.66666666666, ans=0.125 +2024-07-29 03:09:10,164 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=10.84 vs. limit=15.0 +2024-07-29 03:09:10,286 INFO [train.py:1114] (3/4) Epoch 18, batch 800, loss[loss=0.158, simple_loss=0.2363, pruned_loss=0.03984, over 4863.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2641, pruned_loss=0.04116, over 923517.85 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:09:10,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=232772.0, ans=0.125 +2024-07-29 03:09:22,204 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=232785.33333333334, ans=0.125 +2024-07-29 03:09:25,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=232798.66666666666, ans=0.0 +2024-07-29 03:09:25,964 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.90 vs. limit=22.5 +2024-07-29 03:09:28,890 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.641e+01 5.634e+01 6.203e+01 6.793e+01 1.019e+02, threshold=1.241e+02, percent-clipped=0.0 +2024-07-29 03:09:43,907 INFO [train.py:1114] (3/4) Epoch 18, batch 850, loss[loss=0.1723, simple_loss=0.2624, pruned_loss=0.04105, over 4665.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2644, pruned_loss=0.04143, over 927720.34 frames. ], batch size: 14, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:09:44,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=232838.66666666666, ans=0.0 +2024-07-29 03:09:50,361 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.85 vs. limit=6.0 +2024-07-29 03:09:58,693 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=4.04 vs. limit=12.0 +2024-07-29 03:10:07,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=232878.66666666666, ans=0.125 +2024-07-29 03:10:10,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=232892.0, ans=0.1 +2024-07-29 03:10:11,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=232892.0, ans=0.125 +2024-07-29 03:10:18,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=232892.0, ans=0.2 +2024-07-29 03:10:19,905 INFO [train.py:1114] (3/4) Epoch 18, batch 900, loss[loss=0.1406, simple_loss=0.2147, pruned_loss=0.03325, over 4842.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2644, pruned_loss=0.04148, over 928689.10 frames. ], batch size: 12, lr: 4.19e-03, grad_scale: 32.0 +2024-07-29 03:10:32,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=232918.66666666666, ans=0.0 +2024-07-29 03:10:43,081 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.76 vs. limit=15.0 +2024-07-29 03:10:44,076 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.656e+01 6.090e+01 7.210e+01 1.010e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-29 03:10:45,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=232945.33333333334, ans=0.025 +2024-07-29 03:10:57,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=232958.66666666666, ans=0.125 +2024-07-29 03:11:02,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=232958.66666666666, ans=0.125 +2024-07-29 03:11:04,782 INFO [train.py:1114] (3/4) Epoch 18, batch 950, loss[loss=0.1708, simple_loss=0.2546, pruned_loss=0.04346, over 4769.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.264, pruned_loss=0.04106, over 930186.37 frames. ], batch size: 12, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:11:15,691 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.42 vs. limit=15.0 +2024-07-29 03:11:29,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=233012.0, ans=0.125 +2024-07-29 03:11:30,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=233012.0, ans=0.2 +2024-07-29 03:11:33,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=233025.33333333334, ans=0.2 +2024-07-29 03:11:38,381 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.91 vs. limit=15.0 +2024-07-29 03:11:39,994 INFO [train.py:1114] (3/4) Epoch 18, batch 1000, loss[loss=0.1479, simple_loss=0.2286, pruned_loss=0.03361, over 4959.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2652, pruned_loss=0.04177, over 930001.05 frames. ], batch size: 13, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:11:40,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=233038.66666666666, ans=0.05 +2024-07-29 03:11:50,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=233052.0, ans=0.125 +2024-07-29 03:11:54,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=233065.33333333334, ans=0.125 +2024-07-29 03:11:58,672 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.759e+01 5.660e+01 6.268e+01 7.166e+01 1.041e+02, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 03:11:58,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=233065.33333333334, ans=0.125 +2024-07-29 03:12:02,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=233078.66666666666, ans=0.125 +2024-07-29 03:12:14,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=233092.0, ans=0.95 +2024-07-29 03:12:15,362 INFO [train.py:1114] (3/4) Epoch 18, batch 1050, loss[loss=0.1753, simple_loss=0.273, pruned_loss=0.03877, over 4876.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.264, pruned_loss=0.04129, over 932247.55 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:12:30,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=233118.66666666666, ans=0.2 +2024-07-29 03:12:57,205 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.67 vs. limit=22.5 +2024-07-29 03:13:24,182 INFO [train.py:1114] (3/4) Epoch 18, batch 1100, loss[loss=0.1608, simple_loss=0.2587, pruned_loss=0.0315, over 4903.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2638, pruned_loss=0.04109, over 934827.21 frames. ], batch size: 13, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:14:24,121 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.42 vs. limit=15.0 +2024-07-29 03:14:26,844 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.721e+01 5.370e+01 5.951e+01 6.699e+01 1.093e+02, threshold=1.190e+02, percent-clipped=0.0 +2024-07-29 03:14:45,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=233212.0, ans=0.0 +2024-07-29 03:14:45,805 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.13 vs. limit=15.0 +2024-07-29 03:15:11,269 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.72 vs. limit=15.0 +2024-07-29 03:15:12,271 INFO [train.py:1114] (3/4) Epoch 18, batch 1150, loss[loss=0.1928, simple_loss=0.2822, pruned_loss=0.0517, over 4910.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2656, pruned_loss=0.04186, over 934733.87 frames. ], batch size: 13, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:15:25,726 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=233252.0, ans=0.125 +2024-07-29 03:15:27,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=233252.0, ans=0.125 +2024-07-29 03:15:33,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=233265.33333333334, ans=0.0 +2024-07-29 03:15:54,166 INFO [train.py:1114] (3/4) Epoch 18, batch 1200, loss[loss=0.1728, simple_loss=0.2751, pruned_loss=0.03521, over 4869.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2667, pruned_loss=0.04229, over 934105.19 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:15:54,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=233305.33333333334, ans=0.0 +2024-07-29 03:15:55,574 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:16:02,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=233318.66666666666, ans=0.125 +2024-07-29 03:19:13,688 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+01 5.527e+01 5.938e+01 6.741e+01 1.045e+02, threshold=1.188e+02, percent-clipped=0.0 +2024-07-29 03:19:15,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233345.33333333334, ans=0.1 +2024-07-29 03:19:15,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=233345.33333333334, ans=0.0 +2024-07-29 03:19:23,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233358.66666666666, ans=0.1 +2024-07-29 03:19:26,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233358.66666666666, ans=0.1 +2024-07-29 03:19:30,341 INFO [train.py:1114] (3/4) Epoch 18, batch 1250, loss[loss=0.182, simple_loss=0.2724, pruned_loss=0.04581, over 4802.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2665, pruned_loss=0.04179, over 938064.54 frames. ], batch size: 15, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:19:33,846 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=233372.0, ans=0.125 +2024-07-29 03:19:46,036 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.95 vs. limit=22.5 +2024-07-29 03:19:53,153 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.53 vs. limit=6.0 +2024-07-29 03:19:54,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=233412.0, ans=0.0 +2024-07-29 03:19:56,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=233425.33333333334, ans=0.025 +2024-07-29 03:20:03,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.whiten.whitening_limit, batch_count=233438.66666666666, ans=12.0 +2024-07-29 03:20:03,284 INFO [train.py:1114] (3/4) Epoch 18, batch 1300, loss[loss=0.1853, simple_loss=0.2698, pruned_loss=0.05043, over 4704.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2654, pruned_loss=0.04143, over 939409.68 frames. ], batch size: 19, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:20:21,874 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.415e+01 5.468e+01 6.194e+01 6.881e+01 8.786e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 03:20:24,354 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=17.60 vs. limit=22.5 +2024-07-29 03:20:31,651 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=233492.0, ans=0.0 +2024-07-29 03:20:35,251 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=233492.0, ans=0.125 +2024-07-29 03:20:38,028 INFO [train.py:1114] (3/4) Epoch 18, batch 1350, loss[loss=0.1586, simple_loss=0.2579, pruned_loss=0.02965, over 4754.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2653, pruned_loss=0.0412, over 941287.41 frames. ], batch size: 13, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:20:38,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=233505.33333333334, ans=0.125 +2024-07-29 03:20:41,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=233505.33333333334, ans=0.2 +2024-07-29 03:21:37,804 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.89 vs. limit=15.0 +2024-07-29 03:22:01,371 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.61 vs. limit=22.5 +2024-07-29 03:22:31,848 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.89 vs. limit=15.0 +2024-07-29 03:22:33,506 INFO [train.py:1114] (3/4) Epoch 18, batch 1400, loss[loss=0.154, simple_loss=0.2367, pruned_loss=0.03564, over 4713.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2653, pruned_loss=0.04131, over 943150.49 frames. ], batch size: 11, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:22:37,360 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=9.52 vs. limit=15.0 +2024-07-29 03:22:43,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=233585.33333333334, ans=0.0 +2024-07-29 03:22:57,219 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.738e+01 5.887e+01 6.413e+01 7.105e+01 1.184e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 03:22:58,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=233612.0, ans=0.2 +2024-07-29 03:23:26,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten.whitening_limit, batch_count=233625.33333333334, ans=15.0 +2024-07-29 03:23:33,208 INFO [train.py:1114] (3/4) Epoch 18, batch 1450, loss[loss=0.2046, simple_loss=0.2976, pruned_loss=0.05576, over 4688.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2668, pruned_loss=0.04182, over 943113.82 frames. ], batch size: 15, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:23:39,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=233638.66666666666, ans=0.125 +2024-07-29 03:23:48,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=233665.33333333334, ans=0.2 +2024-07-29 03:23:58,858 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.98 vs. limit=22.5 +2024-07-29 03:24:01,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=233692.0, ans=0.125 +2024-07-29 03:24:09,766 INFO [train.py:1114] (3/4) Epoch 18, batch 1500, loss[loss=0.1967, simple_loss=0.2939, pruned_loss=0.04976, over 4816.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2676, pruned_loss=0.04218, over 942692.13 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:24:11,231 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=233705.33333333334, ans=0.1 +2024-07-29 03:24:44,217 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.781e+01 5.562e+01 6.096e+01 6.763e+01 1.145e+02, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 03:24:51,076 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.31 vs. limit=15.0 +2024-07-29 03:24:56,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=233758.66666666666, ans=0.125 +2024-07-29 03:25:14,229 INFO [train.py:1114] (3/4) Epoch 18, batch 1550, loss[loss=0.179, simple_loss=0.2722, pruned_loss=0.04293, over 4897.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2666, pruned_loss=0.04231, over 939040.66 frames. ], batch size: 15, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:25:17,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=233772.0, ans=0.125 +2024-07-29 03:25:27,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=233785.33333333334, ans=0.0 +2024-07-29 03:25:28,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=233785.33333333334, ans=0.1 +2024-07-29 03:25:29,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=233785.33333333334, ans=0.0 +2024-07-29 03:25:31,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=233785.33333333334, ans=0.0 +2024-07-29 03:25:32,863 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.36 vs. limit=12.0 +2024-07-29 03:25:36,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=233798.66666666666, ans=0.1 +2024-07-29 03:25:40,446 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=233812.0, ans=0.0 +2024-07-29 03:25:52,678 INFO [train.py:1114] (3/4) Epoch 18, batch 1600, loss[loss=0.1821, simple_loss=0.2732, pruned_loss=0.04553, over 4872.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2673, pruned_loss=0.04286, over 937849.67 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 64.0 +2024-07-29 03:25:54,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=233838.66666666666, ans=0.2 +2024-07-29 03:25:55,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=233838.66666666666, ans=0.125 +2024-07-29 03:25:58,404 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=233838.66666666666, ans=0.05 +2024-07-29 03:26:12,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=233865.33333333334, ans=0.0 +2024-07-29 03:26:12,703 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.717e+01 5.724e+01 6.283e+01 7.250e+01 9.354e+01, threshold=1.257e+02, percent-clipped=0.0 +2024-07-29 03:26:12,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=233878.66666666666, ans=0.2 +2024-07-29 03:26:15,021 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=11.12 vs. limit=15.0 +2024-07-29 03:26:28,468 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=233878.66666666666, ans=0.0 +2024-07-29 03:26:33,937 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.57 vs. limit=15.0 +2024-07-29 03:26:35,607 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=233892.0, ans=0.125 +2024-07-29 03:26:37,503 INFO [train.py:1114] (3/4) Epoch 18, batch 1650, loss[loss=0.1466, simple_loss=0.2489, pruned_loss=0.02214, over 4651.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2665, pruned_loss=0.04296, over 938000.51 frames. ], batch size: 14, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:26:42,769 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.57 vs. limit=6.0 +2024-07-29 03:26:56,677 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=14.92 vs. limit=22.5 +2024-07-29 03:27:10,158 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.04 vs. limit=6.0 +2024-07-29 03:27:42,593 INFO [train.py:1114] (3/4) Epoch 18, batch 1700, loss[loss=0.1634, simple_loss=0.2538, pruned_loss=0.03649, over 4704.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2664, pruned_loss=0.04285, over 939421.37 frames. ], batch size: 11, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:27:43,758 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=512, metric=2.91 vs. limit=15.0 +2024-07-29 03:27:47,873 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.61 vs. limit=15.0 +2024-07-29 03:27:56,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=233998.66666666666, ans=0.0 +2024-07-29 03:27:58,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=233998.66666666666, ans=0.125 +2024-07-29 03:28:03,945 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.990e+01 5.769e+01 6.208e+01 7.214e+01 1.058e+02, threshold=1.242e+02, percent-clipped=0.0 +2024-07-29 03:28:15,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=234025.33333333334, ans=0.125 +2024-07-29 03:28:17,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=234038.66666666666, ans=0.04949747468305833 +2024-07-29 03:28:18,161 INFO [train.py:1114] (3/4) Epoch 18, batch 1750, loss[loss=0.1508, simple_loss=0.2352, pruned_loss=0.03321, over 4818.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2654, pruned_loss=0.04219, over 940151.69 frames. ], batch size: 11, lr: 4.18e-03, grad_scale: 32.0 +2024-07-29 03:28:29,706 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=234052.0, ans=0.0 +2024-07-29 03:28:31,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=234065.33333333334, ans=0.0 +2024-07-29 03:28:34,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=234065.33333333334, ans=0.07 +2024-07-29 03:28:44,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=234092.0, ans=0.125 +2024-07-29 03:28:51,356 INFO [train.py:1114] (3/4) Epoch 18, batch 1800, loss[loss=0.1868, simple_loss=0.2665, pruned_loss=0.05359, over 4629.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2647, pruned_loss=0.04243, over 940855.96 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:28:59,907 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.53 vs. limit=12.0 +2024-07-29 03:29:01,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=234118.66666666666, ans=0.0 +2024-07-29 03:29:03,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.attention_skip_rate, batch_count=234118.66666666666, ans=0.0 +2024-07-29 03:29:04,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=234132.0, ans=0.125 +2024-07-29 03:29:09,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=234132.0, ans=0.125 +2024-07-29 03:29:10,856 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.536e+01 5.659e+01 6.366e+01 7.110e+01 1.077e+02, threshold=1.273e+02, percent-clipped=0.0 +2024-07-29 03:29:13,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234145.33333333334, ans=0.1 +2024-07-29 03:29:13,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234145.33333333334, ans=0.1 +2024-07-29 03:29:14,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=234145.33333333334, ans=0.0 +2024-07-29 03:29:25,997 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=9.96 vs. limit=15.0 +2024-07-29 03:29:27,020 INFO [train.py:1114] (3/4) Epoch 18, batch 1850, loss[loss=0.1812, simple_loss=0.2685, pruned_loss=0.04698, over 4814.00 frames. ], tot_loss[loss=0.174, simple_loss=0.264, pruned_loss=0.04203, over 940610.90 frames. ], batch size: 14, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:29:34,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=234172.0, ans=0.2 +2024-07-29 03:29:41,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=234185.33333333334, ans=0.0 +2024-07-29 03:29:48,506 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=234198.66666666666, ans=0.125 +2024-07-29 03:29:57,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=234225.33333333334, ans=0.95 +2024-07-29 03:30:03,547 INFO [train.py:1114] (3/4) Epoch 18, batch 1900, loss[loss=0.2036, simple_loss=0.302, pruned_loss=0.05261, over 4671.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2655, pruned_loss=0.04244, over 941505.43 frames. ], batch size: 14, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:30:05,632 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=234238.66666666666, ans=0.125 +2024-07-29 03:30:06,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=234238.66666666666, ans=0.07 +2024-07-29 03:30:17,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=234265.33333333334, ans=0.0 +2024-07-29 03:30:22,650 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.802e+01 5.553e+01 6.272e+01 7.085e+01 9.977e+01, threshold=1.254e+02, percent-clipped=0.0 +2024-07-29 03:30:26,218 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.36 vs. limit=15.0 +2024-07-29 03:30:28,193 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.72 vs. limit=22.5 +2024-07-29 03:30:31,929 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=234292.0, ans=0.02 +2024-07-29 03:30:36,381 INFO [train.py:1114] (3/4) Epoch 18, batch 1950, loss[loss=0.1632, simple_loss=0.2534, pruned_loss=0.03644, over 4888.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.267, pruned_loss=0.04284, over 943653.16 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:30:47,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=234318.66666666666, ans=0.0 +2024-07-29 03:30:48,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=234318.66666666666, ans=0.125 +2024-07-29 03:30:49,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=234332.0, ans=0.07 +2024-07-29 03:30:55,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234332.0, ans=0.1 +2024-07-29 03:30:58,980 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.47 vs. limit=22.5 +2024-07-29 03:31:01,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=234345.33333333334, ans=0.0 +2024-07-29 03:31:10,163 INFO [train.py:1114] (3/4) Epoch 18, batch 2000, loss[loss=0.1692, simple_loss=0.254, pruned_loss=0.04219, over 4806.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.268, pruned_loss=0.04327, over 940618.34 frames. ], batch size: 11, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:31:18,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=234385.33333333334, ans=0.125 +2024-07-29 03:31:33,958 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.658e+01 6.506e+01 7.206e+01 1.041e+02, threshold=1.301e+02, percent-clipped=0.0 +2024-07-29 03:31:36,128 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:31:42,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=234425.33333333334, ans=0.125 +2024-07-29 03:31:44,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=234425.33333333334, ans=0.125 +2024-07-29 03:31:48,063 INFO [train.py:1114] (3/4) Epoch 18, batch 2050, loss[loss=0.1686, simple_loss=0.2595, pruned_loss=0.03881, over 4629.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2669, pruned_loss=0.04271, over 938659.49 frames. ], batch size: 11, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:31:57,504 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.39 vs. limit=15.0 +2024-07-29 03:32:43,647 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.05 vs. limit=8.0 +2024-07-29 03:32:56,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=234492.0, ans=0.125 +2024-07-29 03:32:57,062 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.70 vs. limit=15.0 +2024-07-29 03:33:00,549 INFO [train.py:1114] (3/4) Epoch 18, batch 2100, loss[loss=0.1745, simple_loss=0.276, pruned_loss=0.03646, over 4758.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2659, pruned_loss=0.04215, over 940431.84 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:33:01,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=234505.33333333334, ans=0.0 +2024-07-29 03:33:03,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234505.33333333334, ans=0.1 +2024-07-29 03:33:04,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=234505.33333333334, ans=0.0 +2024-07-29 03:33:05,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=234505.33333333334, ans=0.025 +2024-07-29 03:33:11,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=234518.66666666666, ans=0.125 +2024-07-29 03:33:11,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=234518.66666666666, ans=0.125 +2024-07-29 03:33:14,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=234518.66666666666, ans=0.0 +2024-07-29 03:33:22,960 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.558e+01 5.524e+01 6.278e+01 7.367e+01 1.141e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-29 03:33:51,006 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=234545.33333333334, ans=0.125 +2024-07-29 03:33:53,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=234558.66666666666, ans=0.125 +2024-07-29 03:33:58,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=234558.66666666666, ans=0.2 +2024-07-29 03:33:59,993 INFO [train.py:1114] (3/4) Epoch 18, batch 2150, loss[loss=0.1652, simple_loss=0.2518, pruned_loss=0.03935, over 4889.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2652, pruned_loss=0.04183, over 943670.53 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:34:00,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=234572.0, ans=0.1 +2024-07-29 03:34:06,266 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=234585.33333333334, ans=0.0 +2024-07-29 03:34:08,978 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=234585.33333333334, ans=0.1 +2024-07-29 03:34:11,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=234585.33333333334, ans=0.0 +2024-07-29 03:34:29,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=234625.33333333334, ans=0.0 +2024-07-29 03:34:29,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=234625.33333333334, ans=0.125 +2024-07-29 03:34:33,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=234625.33333333334, ans=0.0 +2024-07-29 03:34:36,796 INFO [train.py:1114] (3/4) Epoch 18, batch 2200, loss[loss=0.1832, simple_loss=0.2869, pruned_loss=0.03971, over 4808.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2643, pruned_loss=0.04167, over 943101.05 frames. ], batch size: 14, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:34:41,754 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:34:57,725 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.703e+01 6.363e+01 7.397e+01 1.281e+02, threshold=1.273e+02, percent-clipped=1.0 +2024-07-29 03:35:01,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=234678.66666666666, ans=0.125 +2024-07-29 03:35:11,871 INFO [train.py:1114] (3/4) Epoch 18, batch 2250, loss[loss=0.1548, simple_loss=0.2522, pruned_loss=0.02874, over 4698.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2635, pruned_loss=0.04124, over 941586.12 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:35:24,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=234718.66666666666, ans=0.125 +2024-07-29 03:35:45,218 INFO [train.py:1114] (3/4) Epoch 18, batch 2300, loss[loss=0.1875, simple_loss=0.2535, pruned_loss=0.06081, over 4950.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2637, pruned_loss=0.0418, over 939572.13 frames. ], batch size: 12, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:35:50,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=234772.0, ans=0.125 +2024-07-29 03:35:51,819 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=17.97 vs. limit=22.5 +2024-07-29 03:35:57,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=234785.33333333334, ans=0.2 +2024-07-29 03:36:06,803 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.542e+01 5.677e+01 6.195e+01 6.878e+01 1.027e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 03:36:09,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=234812.0, ans=0.125 +2024-07-29 03:36:09,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=234812.0, ans=0.125 +2024-07-29 03:36:15,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=234825.33333333334, ans=0.2 +2024-07-29 03:36:19,452 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.04 vs. limit=15.0 +2024-07-29 03:36:20,963 INFO [train.py:1114] (3/4) Epoch 18, batch 2350, loss[loss=0.2164, simple_loss=0.294, pruned_loss=0.06942, over 4635.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2638, pruned_loss=0.0418, over 941835.33 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:36:37,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=234865.33333333334, ans=0.0 +2024-07-29 03:36:38,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=234865.33333333334, ans=0.0 +2024-07-29 03:36:40,572 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.37 vs. limit=15.0 +2024-07-29 03:36:44,611 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.09 vs. limit=6.0 +2024-07-29 03:36:52,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=234892.0, ans=0.125 +2024-07-29 03:36:52,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=234892.0, ans=0.125 +2024-07-29 03:36:55,207 INFO [train.py:1114] (3/4) Epoch 18, batch 2400, loss[loss=0.1643, simple_loss=0.2625, pruned_loss=0.03301, over 4644.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2644, pruned_loss=0.04185, over 941693.11 frames. ], batch size: 12, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:37:08,945 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.38 vs. limit=15.0 +2024-07-29 03:37:15,794 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.568e+01 6.148e+01 7.129e+01 1.066e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 03:37:31,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=234958.66666666666, ans=0.125 +2024-07-29 03:37:40,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=234958.66666666666, ans=0.0 +2024-07-29 03:37:42,433 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.85 vs. limit=15.0 +2024-07-29 03:37:44,177 INFO [train.py:1114] (3/4) Epoch 18, batch 2450, loss[loss=0.1727, simple_loss=0.271, pruned_loss=0.03726, over 4692.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2657, pruned_loss=0.04249, over 937865.61 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:37:45,889 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.33 vs. limit=22.5 +2024-07-29 03:37:48,554 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.27 vs. limit=15.0 +2024-07-29 03:37:49,311 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=8.14 vs. limit=15.0 +2024-07-29 03:37:53,064 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.57 vs. limit=10.0 +2024-07-29 03:38:12,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=235012.0, ans=0.125 +2024-07-29 03:38:41,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=235012.0, ans=0.1 +2024-07-29 03:38:41,222 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.61 vs. limit=15.0 +2024-07-29 03:38:42,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=235012.0, ans=0.07 +2024-07-29 03:38:45,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235012.0, ans=0.1 +2024-07-29 03:38:52,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=235025.33333333334, ans=0.125 +2024-07-29 03:38:53,620 INFO [train.py:1114] (3/4) Epoch 18, batch 2500, loss[loss=0.1937, simple_loss=0.2998, pruned_loss=0.04386, over 4801.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.266, pruned_loss=0.04242, over 939918.26 frames. ], batch size: 14, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:39:59,998 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=235052.0, ans=0.0 +2024-07-29 03:40:04,222 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.72 vs. limit=15.0 +2024-07-29 03:40:08,865 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.687e+01 5.526e+01 6.445e+01 7.148e+01 1.003e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-29 03:40:09,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=235078.66666666666, ans=0.125 +2024-07-29 03:40:20,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=235092.0, ans=15.0 +2024-07-29 03:40:21,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=235092.0, ans=0.125 +2024-07-29 03:40:25,116 INFO [train.py:1114] (3/4) Epoch 18, batch 2550, loss[loss=0.1453, simple_loss=0.2313, pruned_loss=0.02962, over 4810.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2653, pruned_loss=0.042, over 939184.76 frames. ], batch size: 11, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:40:25,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=235105.33333333334, ans=0.125 +2024-07-29 03:40:25,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=235105.33333333334, ans=0.125 +2024-07-29 03:40:27,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=235105.33333333334, ans=0.0 +2024-07-29 03:41:10,217 INFO [train.py:1114] (3/4) Epoch 18, batch 2600, loss[loss=0.1697, simple_loss=0.2529, pruned_loss=0.04322, over 4891.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2648, pruned_loss=0.04182, over 938012.86 frames. ], batch size: 13, lr: 4.17e-03, grad_scale: 32.0 +2024-07-29 03:41:27,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235185.33333333334, ans=0.1 +2024-07-29 03:41:30,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=235198.66666666666, ans=0.0 +2024-07-29 03:41:36,741 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.470e+01 5.697e+01 6.154e+01 6.937e+01 9.396e+01, threshold=1.231e+02, percent-clipped=0.0 +2024-07-29 03:41:48,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=235212.0, ans=0.125 +2024-07-29 03:41:53,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=235225.33333333334, ans=0.125 +2024-07-29 03:41:58,071 INFO [train.py:1114] (3/4) Epoch 18, batch 2650, loss[loss=0.1876, simple_loss=0.2721, pruned_loss=0.05152, over 4632.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2648, pruned_loss=0.04163, over 939981.72 frames. ], batch size: 16, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:42:04,830 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=235238.66666666666, ans=0.0 +2024-07-29 03:42:13,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=235265.33333333334, ans=0.125 +2024-07-29 03:42:31,827 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.56 vs. limit=15.0 +2024-07-29 03:42:36,015 INFO [train.py:1114] (3/4) Epoch 18, batch 2700, loss[loss=0.1648, simple_loss=0.2487, pruned_loss=0.04048, over 4740.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2651, pruned_loss=0.04173, over 939827.42 frames. ], batch size: 14, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:43:01,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=235332.0, ans=0.0 +2024-07-29 03:43:05,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=235332.0, ans=0.2 +2024-07-29 03:43:06,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=235332.0, ans=0.0 +2024-07-29 03:43:06,503 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.340e+01 5.534e+01 6.342e+01 7.179e+01 1.053e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-29 03:43:33,213 INFO [train.py:1114] (3/4) Epoch 18, batch 2750, loss[loss=0.172, simple_loss=0.2619, pruned_loss=0.04103, over 4714.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2642, pruned_loss=0.04143, over 939537.44 frames. ], batch size: 12, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:43:39,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=235385.33333333334, ans=0.1 +2024-07-29 03:43:45,484 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.39 vs. limit=22.5 +2024-07-29 03:43:48,204 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.76 vs. limit=15.0 +2024-07-29 03:43:54,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=235398.66666666666, ans=0.125 +2024-07-29 03:43:55,399 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=235398.66666666666, ans=0.2 +2024-07-29 03:43:58,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=235398.66666666666, ans=0.125 +2024-07-29 03:44:00,834 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.71 vs. limit=15.0 +2024-07-29 03:44:03,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=235412.0, ans=0.125 +2024-07-29 03:44:07,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=235425.33333333334, ans=0.1 +2024-07-29 03:44:15,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=235438.66666666666, ans=0.125 +2024-07-29 03:44:15,747 INFO [train.py:1114] (3/4) Epoch 18, batch 2800, loss[loss=0.2448, simple_loss=0.3238, pruned_loss=0.08284, over 3451.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2643, pruned_loss=0.04158, over 937426.85 frames. ], batch size: 35, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:44:21,507 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.17 vs. limit=15.0 +2024-07-29 03:44:25,053 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-29 03:44:25,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.41 vs. limit=15.0 +2024-07-29 03:44:29,626 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:44:32,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=235465.33333333334, ans=0.0 +2024-07-29 03:44:36,877 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.862e+01 5.675e+01 6.325e+01 7.095e+01 1.073e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 03:44:37,916 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.44 vs. limit=12.0 +2024-07-29 03:46:35,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=235492.0, ans=0.125 +2024-07-29 03:46:37,207 INFO [train.py:1114] (3/4) Epoch 18, batch 2850, loss[loss=0.1489, simple_loss=0.2398, pruned_loss=0.02898, over 4964.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2646, pruned_loss=0.04153, over 935904.94 frames. ], batch size: 13, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:46:54,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=235532.0, ans=0.125 +2024-07-29 03:46:56,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=235545.33333333334, ans=0.125 +2024-07-29 03:46:59,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=235545.33333333334, ans=0.05 +2024-07-29 03:47:01,883 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:47:10,079 INFO [train.py:1114] (3/4) Epoch 18, batch 2900, loss[loss=0.1783, simple_loss=0.2675, pruned_loss=0.04456, over 4839.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2658, pruned_loss=0.04184, over 939704.61 frames. ], batch size: 13, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:47:21,740 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=235585.33333333334, ans=0.025 +2024-07-29 03:47:25,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=235598.66666666666, ans=0.025 +2024-07-29 03:47:29,764 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.582e+01 5.690e+01 6.267e+01 7.332e+01 1.125e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 03:47:40,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=235625.33333333334, ans=0.1 +2024-07-29 03:47:43,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=235625.33333333334, ans=0.125 +2024-07-29 03:47:45,813 INFO [train.py:1114] (3/4) Epoch 18, batch 2950, loss[loss=0.1546, simple_loss=0.2531, pruned_loss=0.02804, over 4716.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2645, pruned_loss=0.04164, over 939000.54 frames. ], batch size: 12, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:47:47,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=235638.66666666666, ans=0.025 +2024-07-29 03:47:56,701 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.18 vs. limit=12.0 +2024-07-29 03:48:02,746 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=235665.33333333334, ans=0.125 +2024-07-29 03:48:17,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=235692.0, ans=0.0 +2024-07-29 03:48:21,412 INFO [train.py:1114] (3/4) Epoch 18, batch 3000, loss[loss=0.1637, simple_loss=0.2474, pruned_loss=0.03998, over 4761.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2645, pruned_loss=0.04168, over 938543.02 frames. ], batch size: 13, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:48:21,412 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 03:48:41,295 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.4.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([3.4204, 4.2337, 3.2019, 3.0969], device='cuda:3') +2024-07-29 03:48:41,715 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.1910, 4.8819, 4.2640, 4.9136], device='cuda:3') +2024-07-29 03:48:44,158 INFO [train.py:1146] (3/4) Epoch 18, validation: loss=0.1624, simple_loss=0.2643, pruned_loss=0.03024, over 944034.00 frames. +2024-07-29 03:48:44,158 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 03:48:59,494 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.44 vs. limit=15.0 +2024-07-29 03:49:03,052 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.56 vs. limit=15.0 +2024-07-29 03:49:04,555 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.349e+01 5.607e+01 6.067e+01 7.332e+01 1.132e+02, threshold=1.213e+02, percent-clipped=0.0 +2024-07-29 03:49:10,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=235745.33333333334, ans=0.125 +2024-07-29 03:49:18,685 INFO [train.py:1114] (3/4) Epoch 18, batch 3050, loss[loss=0.1792, simple_loss=0.2625, pruned_loss=0.0479, over 4646.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2656, pruned_loss=0.0422, over 937711.26 frames. ], batch size: 12, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:49:37,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=235798.66666666666, ans=0.2 +2024-07-29 03:49:43,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=235812.0, ans=0.125 +2024-07-29 03:49:47,677 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.17 vs. limit=15.0 +2024-07-29 03:49:53,299 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.75 vs. limit=22.5 +2024-07-29 03:49:53,606 INFO [train.py:1114] (3/4) Epoch 18, batch 3100, loss[loss=0.2126, simple_loss=0.2965, pruned_loss=0.0643, over 4656.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2651, pruned_loss=0.04187, over 938493.12 frames. ], batch size: 16, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:50:00,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.37 vs. limit=15.0 +2024-07-29 03:50:04,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=235852.0, ans=0.125 +2024-07-29 03:50:14,445 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.311e+01 5.371e+01 5.941e+01 6.939e+01 1.181e+02, threshold=1.188e+02, percent-clipped=0.0 +2024-07-29 03:50:17,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=235878.66666666666, ans=0.1 +2024-07-29 03:50:28,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=235905.33333333334, ans=0.2 +2024-07-29 03:50:28,777 INFO [train.py:1114] (3/4) Epoch 18, batch 3150, loss[loss=0.1667, simple_loss=0.2681, pruned_loss=0.03263, over 4640.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2647, pruned_loss=0.04133, over 937608.93 frames. ], batch size: 17, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:50:31,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=235905.33333333334, ans=0.035 +2024-07-29 03:50:40,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=235918.66666666666, ans=0.125 +2024-07-29 03:51:02,001 INFO [train.py:1114] (3/4) Epoch 18, batch 3200, loss[loss=0.1627, simple_loss=0.2569, pruned_loss=0.03425, over 4825.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2647, pruned_loss=0.04153, over 939378.74 frames. ], batch size: 13, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:51:09,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=235985.33333333334, ans=0.125 +2024-07-29 03:51:23,934 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.338e+01 5.694e+01 6.317e+01 7.020e+01 1.050e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 03:51:27,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=236012.0, ans=0.0 +2024-07-29 03:51:35,626 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.78 vs. limit=15.0 +2024-07-29 03:51:38,111 INFO [train.py:1114] (3/4) Epoch 18, batch 3250, loss[loss=0.1749, simple_loss=0.2737, pruned_loss=0.03802, over 4937.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2656, pruned_loss=0.0414, over 940168.05 frames. ], batch size: 14, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:51:49,064 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:51:52,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=236065.33333333334, ans=0.125 +2024-07-29 03:52:08,730 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.76 vs. limit=22.5 +2024-07-29 03:52:11,782 INFO [train.py:1114] (3/4) Epoch 18, batch 3300, loss[loss=0.1891, simple_loss=0.2746, pruned_loss=0.05183, over 4739.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2636, pruned_loss=0.04087, over 940527.33 frames. ], batch size: 19, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:52:13,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=236105.33333333334, ans=0.125 +2024-07-29 03:52:19,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=236118.66666666666, ans=0.2 +2024-07-29 03:52:21,659 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.30 vs. limit=12.0 +2024-07-29 03:52:24,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=236132.0, ans=0.09899494936611666 +2024-07-29 03:52:31,045 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.049e+01 5.536e+01 6.135e+01 6.929e+01 1.182e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 03:52:45,456 INFO [train.py:1114] (3/4) Epoch 18, batch 3350, loss[loss=0.1833, simple_loss=0.2758, pruned_loss=0.04535, over 4628.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2651, pruned_loss=0.04149, over 938458.09 frames. ], batch size: 17, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:52:45,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=236172.0, ans=0.125 +2024-07-29 03:52:55,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.48 vs. limit=22.5 +2024-07-29 03:53:01,010 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=236198.66666666666, ans=0.1 +2024-07-29 03:53:02,297 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=236198.66666666666, ans=0.125 +2024-07-29 03:53:18,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=236225.33333333334, ans=0.125 +2024-07-29 03:53:20,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=236225.33333333334, ans=0.125 +2024-07-29 03:53:23,138 INFO [train.py:1114] (3/4) Epoch 18, batch 3400, loss[loss=0.1424, simple_loss=0.2187, pruned_loss=0.03308, over 4806.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2648, pruned_loss=0.04179, over 937737.94 frames. ], batch size: 11, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:53:29,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=236252.0, ans=0.125 +2024-07-29 03:53:29,504 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:53:31,549 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:53:35,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=236252.0, ans=0.125 +2024-07-29 03:53:38,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=236265.33333333334, ans=0.2 +2024-07-29 03:53:40,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=236265.33333333334, ans=0.125 +2024-07-29 03:53:43,250 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.614e+01 5.661e+01 6.178e+01 6.933e+01 1.009e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-29 03:53:46,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=236278.66666666666, ans=0.0 +2024-07-29 03:53:57,532 INFO [train.py:1114] (3/4) Epoch 18, batch 3450, loss[loss=0.1912, simple_loss=0.2874, pruned_loss=0.04752, over 4688.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2649, pruned_loss=0.04188, over 937853.14 frames. ], batch size: 19, lr: 4.16e-03, grad_scale: 32.0 +2024-07-29 03:53:59,999 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.61 vs. limit=15.0 +2024-07-29 03:54:00,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=236305.33333333334, ans=0.125 +2024-07-29 03:54:13,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=236332.0, ans=0.2 +2024-07-29 03:54:25,041 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236358.66666666666, ans=0.1 +2024-07-29 03:54:30,876 INFO [train.py:1114] (3/4) Epoch 18, batch 3500, loss[loss=0.156, simple_loss=0.2423, pruned_loss=0.03479, over 4934.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.264, pruned_loss=0.04154, over 938582.56 frames. ], batch size: 12, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 03:54:33,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=236372.0, ans=0.025 +2024-07-29 03:54:36,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=236372.0, ans=0.0 +2024-07-29 03:54:36,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=236372.0, ans=0.0 +2024-07-29 03:54:37,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=236385.33333333334, ans=0.2 +2024-07-29 03:54:50,269 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.13 vs. limit=15.0 +2024-07-29 03:54:50,373 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.411e+01 5.401e+01 5.925e+01 6.709e+01 9.541e+01, threshold=1.185e+02, percent-clipped=0.0 +2024-07-29 03:54:59,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=236425.33333333334, ans=0.125 +2024-07-29 03:55:01,216 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:55:02,062 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.48 vs. limit=15.0 +2024-07-29 03:55:04,387 INFO [train.py:1114] (3/4) Epoch 18, batch 3550, loss[loss=0.1726, simple_loss=0.273, pruned_loss=0.03606, over 4662.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2647, pruned_loss=0.04181, over 939074.05 frames. ], batch size: 14, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 03:55:13,136 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=236452.0, ans=0.125 +2024-07-29 03:55:16,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.05 vs. limit=6.0 +2024-07-29 03:55:27,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=236478.66666666666, ans=0.0 +2024-07-29 03:55:29,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=236478.66666666666, ans=0.1 +2024-07-29 03:55:31,146 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=236478.66666666666, ans=0.0 +2024-07-29 03:55:39,456 INFO [train.py:1114] (3/4) Epoch 18, batch 3600, loss[loss=0.1786, simple_loss=0.2771, pruned_loss=0.04011, over 4960.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2657, pruned_loss=0.04177, over 940425.97 frames. ], batch size: 13, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 03:55:44,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=236505.33333333334, ans=0.2 +2024-07-29 03:55:56,620 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 03:55:58,071 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.66 vs. limit=15.0 +2024-07-29 03:55:59,088 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.793e+01 6.774e+01 8.193e+01 1.238e+02, threshold=1.355e+02, percent-clipped=1.0 +2024-07-29 03:56:01,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=236545.33333333334, ans=0.0 +2024-07-29 03:56:13,419 INFO [train.py:1114] (3/4) Epoch 18, batch 3650, loss[loss=0.2011, simple_loss=0.2965, pruned_loss=0.05285, over 4908.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2647, pruned_loss=0.04097, over 940864.38 frames. ], batch size: 15, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:56:13,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_abs, batch_count=236572.0, ans=0.5 +2024-07-29 03:56:17,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=236572.0, ans=0.0 +2024-07-29 03:56:21,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=236585.33333333334, ans=0.125 +2024-07-29 03:56:25,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=236585.33333333334, ans=0.125 +2024-07-29 03:56:27,392 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.82 vs. limit=15.0 +2024-07-29 03:56:42,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=236625.33333333334, ans=0.125 +2024-07-29 03:56:43,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=236625.33333333334, ans=0.125 +2024-07-29 03:56:46,967 INFO [train.py:1114] (3/4) Epoch 18, batch 3700, loss[loss=0.182, simple_loss=0.2748, pruned_loss=0.04454, over 4933.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2644, pruned_loss=0.04065, over 941958.50 frames. ], batch size: 14, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:57:07,763 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.494e+01 5.372e+01 6.083e+01 6.875e+01 9.330e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-29 03:57:10,492 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.40 vs. limit=6.0 +2024-07-29 03:57:22,956 INFO [train.py:1114] (3/4) Epoch 18, batch 3750, loss[loss=0.1597, simple_loss=0.2395, pruned_loss=0.03989, over 4811.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2639, pruned_loss=0.04074, over 943646.78 frames. ], batch size: 11, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:57:33,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=236705.33333333334, ans=0.1 +2024-07-29 03:57:39,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=236718.66666666666, ans=0.125 +2024-07-29 03:57:42,071 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.41 vs. limit=12.0 +2024-07-29 03:57:46,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=236732.0, ans=0.1 +2024-07-29 03:57:54,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=236745.33333333334, ans=0.125 +2024-07-29 03:58:02,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=236758.66666666666, ans=0.125 +2024-07-29 03:58:03,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=236758.66666666666, ans=10.0 +2024-07-29 03:58:05,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=236758.66666666666, ans=0.125 +2024-07-29 03:58:09,063 INFO [train.py:1114] (3/4) Epoch 18, batch 3800, loss[loss=0.1694, simple_loss=0.2697, pruned_loss=0.03455, over 4819.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2635, pruned_loss=0.04063, over 941761.79 frames. ], batch size: 14, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:58:12,544 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=236772.0, ans=0.0 +2024-07-29 03:58:14,101 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.43 vs. limit=15.0 +2024-07-29 03:58:16,699 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.12 vs. limit=12.0 +2024-07-29 03:58:25,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=236798.66666666666, ans=0.0 +2024-07-29 03:58:28,166 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.450e+01 5.460e+01 5.890e+01 6.474e+01 8.788e+01, threshold=1.178e+02, percent-clipped=0.0 +2024-07-29 03:58:44,393 INFO [train.py:1114] (3/4) Epoch 18, batch 3850, loss[loss=0.192, simple_loss=0.2863, pruned_loss=0.04887, over 4675.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2631, pruned_loss=0.04014, over 942195.05 frames. ], batch size: 16, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:58:45,476 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.16 vs. limit=15.0 +2024-07-29 03:58:58,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=236865.33333333334, ans=0.0 +2024-07-29 03:58:59,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=236865.33333333334, ans=0.1 +2024-07-29 03:59:19,425 INFO [train.py:1114] (3/4) Epoch 18, batch 3900, loss[loss=0.1639, simple_loss=0.2657, pruned_loss=0.03106, over 4809.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2641, pruned_loss=0.04046, over 942690.51 frames. ], batch size: 14, lr: 4.15e-03, grad_scale: 64.0 +2024-07-29 03:59:22,501 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.81 vs. limit=22.5 +2024-07-29 03:59:23,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=236905.33333333334, ans=0.125 +2024-07-29 03:59:24,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=236905.33333333334, ans=0.2 +2024-07-29 03:59:28,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=236918.66666666666, ans=0.025 +2024-07-29 03:59:42,385 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.722e+01 5.580e+01 6.101e+01 6.865e+01 9.868e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 03:59:42,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=236945.33333333334, ans=0.125 +2024-07-29 03:59:56,442 INFO [train.py:1114] (3/4) Epoch 18, batch 3950, loss[loss=0.2028, simple_loss=0.2901, pruned_loss=0.05771, over 4857.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2636, pruned_loss=0.04057, over 944608.05 frames. ], batch size: 16, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:00:00,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=236972.0, ans=0.2 +2024-07-29 04:00:19,873 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.92 vs. limit=15.0 +2024-07-29 04:00:23,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=237025.33333333334, ans=0.0 +2024-07-29 04:00:29,758 INFO [train.py:1114] (3/4) Epoch 18, batch 4000, loss[loss=0.1606, simple_loss=0.249, pruned_loss=0.03609, over 4784.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2645, pruned_loss=0.04164, over 941376.98 frames. ], batch size: 12, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:00:49,603 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.725e+01 5.687e+01 6.252e+01 7.100e+01 1.258e+02, threshold=1.250e+02, percent-clipped=1.0 +2024-07-29 04:00:51,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=237078.66666666666, ans=0.2 +2024-07-29 04:00:54,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=237078.66666666666, ans=0.2 +2024-07-29 04:01:03,423 INFO [train.py:1114] (3/4) Epoch 18, batch 4050, loss[loss=0.1847, simple_loss=0.2695, pruned_loss=0.04995, over 3617.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2637, pruned_loss=0.04172, over 939896.02 frames. ], batch size: 35, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:01:08,100 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.94 vs. limit=6.0 +2024-07-29 04:01:10,510 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.76 vs. limit=12.0 +2024-07-29 04:01:11,556 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=237105.33333333334, ans=0.1 +2024-07-29 04:01:24,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=237132.0, ans=0.0 +2024-07-29 04:01:24,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=237132.0, ans=0.125 +2024-07-29 04:01:26,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=237145.33333333334, ans=0.0 +2024-07-29 04:01:39,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=237158.66666666666, ans=0.125 +2024-07-29 04:01:41,044 INFO [train.py:1114] (3/4) Epoch 18, batch 4100, loss[loss=0.1746, simple_loss=0.2749, pruned_loss=0.03719, over 4911.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2648, pruned_loss=0.04232, over 939001.81 frames. ], batch size: 15, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:01:44,888 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.37 vs. limit=15.0 +2024-07-29 04:01:49,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=237185.33333333334, ans=0.125 +2024-07-29 04:02:01,261 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.620e+01 5.605e+01 6.193e+01 7.147e+01 1.131e+02, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 04:02:07,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=237225.33333333334, ans=0.125 +2024-07-29 04:02:25,854 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.02 vs. limit=22.5 +2024-07-29 04:02:32,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237225.33333333334, ans=0.1 +2024-07-29 04:02:51,890 INFO [train.py:1114] (3/4) Epoch 18, batch 4150, loss[loss=0.1586, simple_loss=0.2581, pruned_loss=0.02955, over 4825.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2646, pruned_loss=0.04204, over 938405.91 frames. ], batch size: 13, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:02:52,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=237238.66666666666, ans=0.0 +2024-07-29 04:02:53,951 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.96 vs. limit=15.0 +2024-07-29 04:03:42,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=237238.66666666666, ans=0.0 +2024-07-29 04:03:49,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=237252.0, ans=0.0 +2024-07-29 04:04:15,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=237278.66666666666, ans=0.125 +2024-07-29 04:04:23,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=237292.0, ans=0.95 +2024-07-29 04:04:24,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=237292.0, ans=0.025 +2024-07-29 04:04:32,874 INFO [train.py:1114] (3/4) Epoch 18, batch 4200, loss[loss=0.1929, simple_loss=0.2889, pruned_loss=0.04849, over 4903.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2651, pruned_loss=0.04224, over 940107.54 frames. ], batch size: 15, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:04:33,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=237305.33333333334, ans=0.2 +2024-07-29 04:04:37,387 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.23 vs. limit=22.5 +2024-07-29 04:04:45,497 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.08 vs. limit=10.0 +2024-07-29 04:04:50,292 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237332.0, ans=0.1 +2024-07-29 04:06:02,032 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.489e+01 5.423e+01 5.970e+01 6.521e+01 1.016e+02, threshold=1.194e+02, percent-clipped=0.0 +2024-07-29 04:06:18,674 INFO [train.py:1114] (3/4) Epoch 18, batch 4250, loss[loss=0.1555, simple_loss=0.2478, pruned_loss=0.03162, over 4636.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2651, pruned_loss=0.042, over 941299.29 frames. ], batch size: 12, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:06:26,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=237385.33333333334, ans=0.0 +2024-07-29 04:06:29,956 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=237385.33333333334, ans=0.0 +2024-07-29 04:06:38,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer_ff3.min_abs, batch_count=237412.0, ans=0.2 +2024-07-29 04:06:39,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=237412.0, ans=0.125 +2024-07-29 04:06:47,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=237425.33333333334, ans=0.125 +2024-07-29 04:06:49,738 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:06:51,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=237438.66666666666, ans=0.125 +2024-07-29 04:06:52,272 INFO [train.py:1114] (3/4) Epoch 18, batch 4300, loss[loss=0.1828, simple_loss=0.2737, pruned_loss=0.04591, over 4762.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2648, pruned_loss=0.0419, over 940232.97 frames. ], batch size: 13, lr: 4.15e-03, grad_scale: 32.0 +2024-07-29 04:06:59,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=237452.0, ans=0.125 +2024-07-29 04:07:19,951 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.72 vs. limit=22.5 +2024-07-29 04:07:24,057 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+01 5.877e+01 6.356e+01 7.291e+01 9.513e+01, threshold=1.271e+02, percent-clipped=0.0 +2024-07-29 04:07:37,527 INFO [train.py:1114] (3/4) Epoch 18, batch 4350, loss[loss=0.2068, simple_loss=0.2977, pruned_loss=0.05798, over 4743.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2663, pruned_loss=0.04213, over 941166.00 frames. ], batch size: 13, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:07:42,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=237505.33333333334, ans=0.1 +2024-07-29 04:07:54,796 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:08:01,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=237545.33333333334, ans=0.2 +2024-07-29 04:08:06,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=237558.66666666666, ans=0.125 +2024-07-29 04:08:12,605 INFO [train.py:1114] (3/4) Epoch 18, batch 4400, loss[loss=0.1635, simple_loss=0.269, pruned_loss=0.02901, over 4808.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2659, pruned_loss=0.04194, over 941194.05 frames. ], batch size: 14, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:08:16,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=237572.0, ans=0.2 +2024-07-29 04:08:33,226 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.243e+01 5.655e+01 6.397e+01 7.492e+01 1.030e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-29 04:08:35,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=237612.0, ans=0.0 +2024-07-29 04:08:41,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=237625.33333333334, ans=0.5 +2024-07-29 04:08:43,597 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:08:45,189 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.08 vs. limit=15.0 +2024-07-29 04:08:46,776 INFO [train.py:1114] (3/4) Epoch 18, batch 4450, loss[loss=0.1682, simple_loss=0.2563, pruned_loss=0.04001, over 4930.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2652, pruned_loss=0.04166, over 939167.73 frames. ], batch size: 12, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:08:51,583 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:08:52,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=237638.66666666666, ans=0.0 +2024-07-29 04:08:52,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=237652.0, ans=0.025 +2024-07-29 04:08:55,653 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=17.89 vs. limit=15.0 +2024-07-29 04:09:04,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=237665.33333333334, ans=0.125 +2024-07-29 04:09:08,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=237678.66666666666, ans=0.1 +2024-07-29 04:09:10,539 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.10 vs. limit=22.5 +2024-07-29 04:09:15,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=237692.0, ans=0.125 +2024-07-29 04:09:21,654 INFO [train.py:1114] (3/4) Epoch 18, batch 4500, loss[loss=0.1669, simple_loss=0.2677, pruned_loss=0.03307, over 4735.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2662, pruned_loss=0.04203, over 938421.65 frames. ], batch size: 14, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:09:30,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=237718.66666666666, ans=0.0 +2024-07-29 04:09:43,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=237732.0, ans=0.125 +2024-07-29 04:09:50,468 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.387e+01 5.651e+01 6.463e+01 7.658e+01 1.183e+02, threshold=1.293e+02, percent-clipped=0.0 +2024-07-29 04:10:00,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=237745.33333333334, ans=0.05 +2024-07-29 04:10:07,204 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.39 vs. limit=15.0 +2024-07-29 04:10:13,159 INFO [train.py:1114] (3/4) Epoch 18, batch 4550, loss[loss=0.1602, simple_loss=0.2512, pruned_loss=0.03457, over 4892.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2669, pruned_loss=0.04245, over 940118.85 frames. ], batch size: 13, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:10:20,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=237785.33333333334, ans=0.125 +2024-07-29 04:10:21,487 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.80 vs. limit=15.0 +2024-07-29 04:10:24,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=237785.33333333334, ans=0.0 +2024-07-29 04:10:35,513 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=9.53 vs. limit=15.0 +2024-07-29 04:10:40,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=237825.33333333334, ans=0.125 +2024-07-29 04:10:46,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=237825.33333333334, ans=0.07 +2024-07-29 04:10:47,601 INFO [train.py:1114] (3/4) Epoch 18, batch 4600, loss[loss=0.2254, simple_loss=0.3136, pruned_loss=0.06861, over 4547.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2669, pruned_loss=0.04232, over 938460.87 frames. ], batch size: 21, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:10:51,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=237838.66666666666, ans=0.125 +2024-07-29 04:10:53,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=237838.66666666666, ans=0.0 +2024-07-29 04:10:56,465 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.41 vs. limit=22.5 +2024-07-29 04:11:09,106 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.515e+01 5.588e+01 6.056e+01 7.096e+01 1.037e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 04:11:09,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=237878.66666666666, ans=0.2 +2024-07-29 04:11:18,269 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=20.54 vs. limit=22.5 +2024-07-29 04:11:21,920 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=237905.33333333334, ans=0.125 +2024-07-29 04:11:22,406 INFO [train.py:1114] (3/4) Epoch 18, batch 4650, loss[loss=0.1721, simple_loss=0.2704, pruned_loss=0.03692, over 4859.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2676, pruned_loss=0.04261, over 940288.82 frames. ], batch size: 16, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:11:23,392 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.51 vs. limit=10.0 +2024-07-29 04:11:26,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=237905.33333333334, ans=0.07 +2024-07-29 04:11:41,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=237918.66666666666, ans=0.04949747468305833 +2024-07-29 04:11:45,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=237932.0, ans=0.0 +2024-07-29 04:11:46,710 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.14 vs. limit=10.0 +2024-07-29 04:11:50,682 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.19 vs. limit=15.0 +2024-07-29 04:12:04,315 INFO [train.py:1114] (3/4) Epoch 18, batch 4700, loss[loss=0.1613, simple_loss=0.2453, pruned_loss=0.03862, over 4704.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2662, pruned_loss=0.04237, over 937636.35 frames. ], batch size: 11, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:12:09,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=237972.0, ans=0.0 +2024-07-29 04:12:24,423 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.519e+01 5.692e+01 6.166e+01 6.744e+01 9.680e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 04:12:24,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238012.0, ans=0.1 +2024-07-29 04:12:27,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=238012.0, ans=0.0 +2024-07-29 04:12:41,498 INFO [train.py:1114] (3/4) Epoch 18, batch 4750, loss[loss=0.1774, simple_loss=0.2668, pruned_loss=0.04401, over 4495.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2663, pruned_loss=0.04262, over 935983.10 frames. ], batch size: 21, lr: 4.14e-03, grad_scale: 16.0 +2024-07-29 04:12:43,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=238038.66666666666, ans=0.0 +2024-07-29 04:12:45,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=238038.66666666666, ans=0.025 +2024-07-29 04:12:49,129 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:12:51,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=238052.0, ans=0.0 +2024-07-29 04:12:57,553 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=238065.33333333334, ans=0.0 +2024-07-29 04:13:07,160 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.82 vs. limit=15.0 +2024-07-29 04:13:44,028 INFO [train.py:1114] (3/4) Epoch 18, batch 4800, loss[loss=0.1768, simple_loss=0.2866, pruned_loss=0.03353, over 4691.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2657, pruned_loss=0.0429, over 933226.38 frames. ], batch size: 13, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:13:55,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=238118.66666666666, ans=0.125 +2024-07-29 04:13:58,332 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.85 vs. limit=10.0 +2024-07-29 04:14:01,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=238132.0, ans=0.125 +2024-07-29 04:14:07,345 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.662e+01 6.486e+01 7.810e+01 1.129e+02, threshold=1.297e+02, percent-clipped=0.0 +2024-07-29 04:14:11,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=238145.33333333334, ans=0.1 +2024-07-29 04:14:26,468 INFO [train.py:1114] (3/4) Epoch 18, batch 4850, loss[loss=0.1681, simple_loss=0.2684, pruned_loss=0.03389, over 4736.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2654, pruned_loss=0.04282, over 932969.57 frames. ], batch size: 14, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:14:29,248 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=238172.0, ans=0.0 +2024-07-29 04:14:40,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=238185.33333333334, ans=0.2 +2024-07-29 04:14:40,392 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.21 vs. limit=15.0 +2024-07-29 04:14:44,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=238198.66666666666, ans=0.125 +2024-07-29 04:15:30,372 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.77 vs. limit=22.5 +2024-07-29 04:15:31,240 INFO [train.py:1114] (3/4) Epoch 18, batch 4900, loss[loss=0.1733, simple_loss=0.2646, pruned_loss=0.04099, over 4763.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.265, pruned_loss=0.04236, over 934181.83 frames. ], batch size: 13, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:15:37,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=238238.66666666666, ans=0.125 +2024-07-29 04:15:52,804 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.332e+01 5.541e+01 6.118e+01 7.300e+01 1.058e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 04:15:56,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=238278.66666666666, ans=0.0 +2024-07-29 04:16:05,558 INFO [train.py:1114] (3/4) Epoch 18, batch 4950, loss[loss=0.1872, simple_loss=0.2749, pruned_loss=0.04976, over 3259.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2663, pruned_loss=0.04275, over 930906.84 frames. ], batch size: 35, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:16:15,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=238318.66666666666, ans=0.125 +2024-07-29 04:16:19,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=238332.0, ans=0.125 +2024-07-29 04:16:19,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=238332.0, ans=0.125 +2024-07-29 04:16:26,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=238345.33333333334, ans=0.025 +2024-07-29 04:16:31,577 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.47 vs. limit=10.0 +2024-07-29 04:16:31,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=238358.66666666666, ans=0.125 +2024-07-29 04:16:32,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=238358.66666666666, ans=0.0 +2024-07-29 04:16:39,307 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=5.23 vs. limit=12.0 +2024-07-29 04:16:40,864 INFO [train.py:1114] (3/4) Epoch 18, batch 5000, loss[loss=0.1649, simple_loss=0.2584, pruned_loss=0.0357, over 4660.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2662, pruned_loss=0.04236, over 935010.10 frames. ], batch size: 14, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:16:40,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238372.0, ans=0.1 +2024-07-29 04:16:40,961 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:16:55,410 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.56 vs. limit=22.5 +2024-07-29 04:17:01,715 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.450e+01 5.523e+01 5.940e+01 6.612e+01 9.274e+01, threshold=1.188e+02, percent-clipped=0.0 +2024-07-29 04:17:01,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=238412.0, ans=0.125 +2024-07-29 04:17:03,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=238412.0, ans=0.125 +2024-07-29 04:17:05,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=238412.0, ans=0.0 +2024-07-29 04:17:14,229 INFO [train.py:1114] (3/4) Epoch 18, batch 5050, loss[loss=0.1617, simple_loss=0.2574, pruned_loss=0.033, over 4853.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2648, pruned_loss=0.04129, over 937568.12 frames. ], batch size: 12, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:18:28,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=238452.0, ans=0.0 +2024-07-29 04:18:28,619 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.34 vs. limit=15.0 +2024-07-29 04:18:31,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=238465.33333333334, ans=0.125 +2024-07-29 04:18:51,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=238492.0, ans=0.0 +2024-07-29 04:18:55,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=238492.0, ans=0.125 +2024-07-29 04:18:57,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=238492.0, ans=0.025 +2024-07-29 04:18:58,348 INFO [train.py:1114] (3/4) Epoch 18, batch 5100, loss[loss=0.1584, simple_loss=0.2509, pruned_loss=0.03295, over 4775.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2651, pruned_loss=0.04152, over 934837.17 frames. ], batch size: 12, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:18:59,149 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=238505.33333333334, ans=0.2 +2024-07-29 04:18:59,448 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.82 vs. limit=6.0 +2024-07-29 04:18:59,994 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=238505.33333333334, ans=0.025 +2024-07-29 04:19:00,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238505.33333333334, ans=0.1 +2024-07-29 04:19:11,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=238518.66666666666, ans=0.025 +2024-07-29 04:19:21,953 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.577e+01 6.307e+01 7.309e+01 1.155e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 04:19:29,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=238558.66666666666, ans=0.0 +2024-07-29 04:19:34,578 INFO [train.py:1114] (3/4) Epoch 18, batch 5150, loss[loss=0.1967, simple_loss=0.283, pruned_loss=0.05519, over 4830.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2661, pruned_loss=0.04209, over 935980.34 frames. ], batch size: 16, lr: 4.14e-03, grad_scale: 32.0 +2024-07-29 04:19:39,566 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=238572.0, ans=0.0 +2024-07-29 04:19:47,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=238598.66666666666, ans=0.125 +2024-07-29 04:19:48,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=238598.66666666666, ans=0.125 +2024-07-29 04:19:58,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=238612.0, ans=0.0 +2024-07-29 04:19:58,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=238612.0, ans=0.0 +2024-07-29 04:20:01,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=238612.0, ans=0.125 +2024-07-29 04:20:02,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=238625.33333333334, ans=0.1 +2024-07-29 04:20:04,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=238625.33333333334, ans=0.2 +2024-07-29 04:20:06,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=238625.33333333334, ans=0.125 +2024-07-29 04:20:08,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=238638.66666666666, ans=0.0 +2024-07-29 04:20:08,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=238638.66666666666, ans=0.2 +2024-07-29 04:20:08,823 INFO [train.py:1114] (3/4) Epoch 18, batch 5200, loss[loss=0.1744, simple_loss=0.2789, pruned_loss=0.03494, over 4654.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2655, pruned_loss=0.04124, over 936235.37 frames. ], batch size: 14, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:20:08,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=238638.66666666666, ans=0.0 +2024-07-29 04:20:26,396 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=238665.33333333334, ans=0.025 +2024-07-29 04:20:30,406 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.633e+01 5.651e+01 6.355e+01 7.516e+01 2.460e+02, threshold=1.271e+02, percent-clipped=1.0 +2024-07-29 04:20:34,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=238678.66666666666, ans=0.125 +2024-07-29 04:20:44,966 INFO [train.py:1114] (3/4) Epoch 18, batch 5250, loss[loss=0.1559, simple_loss=0.2454, pruned_loss=0.03325, over 4903.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2634, pruned_loss=0.04069, over 936077.41 frames. ], batch size: 13, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:20:46,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.max_abs, batch_count=238705.33333333334, ans=10.0 +2024-07-29 04:20:46,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=238705.33333333334, ans=0.125 +2024-07-29 04:20:47,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238705.33333333334, ans=0.1 +2024-07-29 04:20:47,811 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.67 vs. limit=15.0 +2024-07-29 04:20:48,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=238705.33333333334, ans=0.0 +2024-07-29 04:21:04,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=238732.0, ans=0.0 +2024-07-29 04:21:17,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=238758.66666666666, ans=0.025 +2024-07-29 04:21:18,562 INFO [train.py:1114] (3/4) Epoch 18, batch 5300, loss[loss=0.2171, simple_loss=0.3042, pruned_loss=0.06496, over 4657.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2634, pruned_loss=0.04109, over 934503.42 frames. ], batch size: 16, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:21:18,879 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-29 04:21:27,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=238785.33333333334, ans=0.125 +2024-07-29 04:21:30,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=238785.33333333334, ans=0.1 +2024-07-29 04:21:31,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=238785.33333333334, ans=0.0 +2024-07-29 04:21:37,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=238798.66666666666, ans=0.125 +2024-07-29 04:21:41,632 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.581e+01 5.630e+01 6.188e+01 7.457e+01 1.076e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 04:21:43,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=238812.0, ans=0.2 +2024-07-29 04:21:43,172 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:21:47,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=238812.0, ans=0.125 +2024-07-29 04:22:09,542 INFO [train.py:1114] (3/4) Epoch 18, batch 5350, loss[loss=0.131, simple_loss=0.2063, pruned_loss=0.02788, over 4544.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2645, pruned_loss=0.04145, over 936798.60 frames. ], batch size: 10, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:23:04,847 INFO [train.py:1114] (3/4) Epoch 18, batch 5400, loss[loss=0.1982, simple_loss=0.2787, pruned_loss=0.05883, over 4242.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.265, pruned_loss=0.0419, over 930403.70 frames. ], batch size: 25, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:23:13,961 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.12 vs. limit=22.5 +2024-07-29 04:23:15,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=238918.66666666666, ans=0.125 +2024-07-29 04:23:21,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=238932.0, ans=0.125 +2024-07-29 04:23:25,933 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+01 5.845e+01 6.439e+01 7.513e+01 9.975e+01, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 04:23:26,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=238945.33333333334, ans=0.1 +2024-07-29 04:23:26,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.min_positive, batch_count=238945.33333333334, ans=0.05 +2024-07-29 04:23:28,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=238945.33333333334, ans=0.0 +2024-07-29 04:23:29,781 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.25 vs. limit=15.0 +2024-07-29 04:23:37,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=238958.66666666666, ans=0.125 +2024-07-29 04:23:38,584 INFO [train.py:1114] (3/4) Epoch 18, batch 5450, loss[loss=0.146, simple_loss=0.2334, pruned_loss=0.02929, over 4716.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2635, pruned_loss=0.04122, over 933156.92 frames. ], batch size: 11, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:23:42,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=238972.0, ans=0.125 +2024-07-29 04:24:07,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=239025.33333333334, ans=0.2 +2024-07-29 04:24:13,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=239038.66666666666, ans=0.0 +2024-07-29 04:24:14,268 INFO [train.py:1114] (3/4) Epoch 18, batch 5500, loss[loss=0.1911, simple_loss=0.2899, pruned_loss=0.04618, over 4343.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2632, pruned_loss=0.04121, over 931100.39 frames. ], batch size: 25, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:24:26,628 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.65 vs. limit=15.0 +2024-07-29 04:24:39,145 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.809e+01 5.775e+01 6.562e+01 7.641e+01 1.081e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-29 04:24:39,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239078.66666666666, ans=0.1 +2024-07-29 04:24:42,023 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=239078.66666666666, ans=0.125 +2024-07-29 04:25:02,566 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.93 vs. limit=10.0 +2024-07-29 04:25:02,894 INFO [train.py:1114] (3/4) Epoch 18, batch 5550, loss[loss=0.1516, simple_loss=0.2471, pruned_loss=0.0281, over 4695.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2642, pruned_loss=0.04186, over 933633.71 frames. ], batch size: 12, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:25:07,351 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.39 vs. limit=15.0 +2024-07-29 04:25:11,842 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=239118.66666666666, ans=0.035 +2024-07-29 04:25:20,657 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239132.0, ans=0.1 +2024-07-29 04:25:22,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=239132.0, ans=0.0 +2024-07-29 04:25:23,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=239145.33333333334, ans=0.125 +2024-07-29 04:25:26,458 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:25:35,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=239158.66666666666, ans=0.125 +2024-07-29 04:25:38,201 INFO [train.py:1114] (3/4) Epoch 18, batch 5600, loss[loss=0.1773, simple_loss=0.2739, pruned_loss=0.0404, over 4743.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2648, pruned_loss=0.042, over 934725.54 frames. ], batch size: 14, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:25:54,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=239198.66666666666, ans=0.125 +2024-07-29 04:25:58,190 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=239198.66666666666, ans=0.125 +2024-07-29 04:25:59,986 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.772e+01 5.791e+01 6.471e+01 7.649e+01 1.137e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-29 04:26:08,723 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239225.33333333334, ans=0.1 +2024-07-29 04:26:12,729 INFO [train.py:1114] (3/4) Epoch 18, batch 5650, loss[loss=0.1976, simple_loss=0.3001, pruned_loss=0.04755, over 4538.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2635, pruned_loss=0.04136, over 937161.37 frames. ], batch size: 21, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:26:14,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=239238.66666666666, ans=0.0 +2024-07-29 04:26:18,292 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:26:18,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=239238.66666666666, ans=0.04949747468305833 +2024-07-29 04:26:24,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=239252.0, ans=0.0 +2024-07-29 04:26:28,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=239265.33333333334, ans=0.0 +2024-07-29 04:26:35,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.99 vs. limit=15.0 +2024-07-29 04:26:40,160 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.69 vs. limit=15.0 +2024-07-29 04:26:47,111 INFO [train.py:1114] (3/4) Epoch 18, batch 5700, loss[loss=0.1899, simple_loss=0.2956, pruned_loss=0.04208, over 4692.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2641, pruned_loss=0.04142, over 938207.73 frames. ], batch size: 13, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:26:50,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239305.33333333334, ans=0.1 +2024-07-29 04:27:05,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=239332.0, ans=15.0 +2024-07-29 04:27:08,694 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.28 vs. limit=15.0 +2024-07-29 04:27:09,679 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.698e+01 5.703e+01 6.369e+01 7.336e+01 1.206e+02, threshold=1.274e+02, percent-clipped=0.0 +2024-07-29 04:27:14,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=239345.33333333334, ans=0.025 +2024-07-29 04:27:14,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=239345.33333333334, ans=0.0 +2024-07-29 04:27:16,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=239358.66666666666, ans=0.2 +2024-07-29 04:27:22,268 INFO [train.py:1114] (3/4) Epoch 18, batch 5750, loss[loss=0.2082, simple_loss=0.3005, pruned_loss=0.05796, over 4781.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2644, pruned_loss=0.04172, over 938282.70 frames. ], batch size: 19, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:27:26,473 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.13 vs. limit=15.0 +2024-07-29 04:27:29,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=239385.33333333334, ans=0.125 +2024-07-29 04:27:35,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=239398.66666666666, ans=0.0 +2024-07-29 04:27:46,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=239412.0, ans=0.0 +2024-07-29 04:27:48,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=239412.0, ans=0.5 +2024-07-29 04:27:50,688 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=239425.33333333334, ans=0.0 +2024-07-29 04:27:57,371 INFO [train.py:1114] (3/4) Epoch 18, batch 5800, loss[loss=0.1757, simple_loss=0.259, pruned_loss=0.04621, over 4726.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2649, pruned_loss=0.04194, over 937562.40 frames. ], batch size: 19, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:28:02,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=239438.66666666666, ans=0.125 +2024-07-29 04:28:18,261 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.507e+01 5.596e+01 6.128e+01 6.728e+01 1.003e+02, threshold=1.226e+02, percent-clipped=0.0 +2024-07-29 04:28:30,896 INFO [train.py:1114] (3/4) Epoch 18, batch 5850, loss[loss=0.1749, simple_loss=0.2789, pruned_loss=0.03545, over 4366.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2652, pruned_loss=0.04194, over 937913.64 frames. ], batch size: 21, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:28:35,040 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239505.33333333334, ans=0.125 +2024-07-29 04:28:35,865 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=239505.33333333334, ans=0.0 +2024-07-29 04:28:41,780 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=239518.66666666666, ans=0.125 +2024-07-29 04:28:47,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239532.0, ans=0.1 +2024-07-29 04:29:00,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=239558.66666666666, ans=0.2 +2024-07-29 04:29:04,793 INFO [train.py:1114] (3/4) Epoch 18, batch 5900, loss[loss=0.1765, simple_loss=0.278, pruned_loss=0.03746, over 4676.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2655, pruned_loss=0.04194, over 937673.34 frames. ], batch size: 15, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:29:15,403 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=239585.33333333334, ans=0.125 +2024-07-29 04:29:17,771 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.54 vs. limit=22.5 +2024-07-29 04:29:27,146 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.678e+01 5.760e+01 6.303e+01 7.095e+01 1.028e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 04:29:30,223 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.84 vs. limit=15.0 +2024-07-29 04:29:37,305 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=239625.33333333334, ans=0.0 +2024-07-29 04:29:51,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=239625.33333333334, ans=0.05 +2024-07-29 04:29:51,963 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239625.33333333334, ans=0.1 +2024-07-29 04:29:53,215 INFO [train.py:1114] (3/4) Epoch 18, batch 5950, loss[loss=0.1825, simple_loss=0.2665, pruned_loss=0.04923, over 4666.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2651, pruned_loss=0.04179, over 939758.71 frames. ], batch size: 15, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:30:07,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=239638.66666666666, ans=0.125 +2024-07-29 04:30:08,051 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=6.25 vs. limit=6.0 +2024-07-29 04:30:10,748 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.60 vs. limit=12.0 +2024-07-29 04:30:18,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.34 vs. limit=22.5 +2024-07-29 04:30:51,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=239678.66666666666, ans=0.025 +2024-07-29 04:30:52,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=239678.66666666666, ans=0.125 +2024-07-29 04:30:54,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=239692.0, ans=0.0 +2024-07-29 04:30:58,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=239692.0, ans=0.125 +2024-07-29 04:30:59,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239692.0, ans=0.1 +2024-07-29 04:31:01,588 INFO [train.py:1114] (3/4) Epoch 18, batch 6000, loss[loss=0.1872, simple_loss=0.2754, pruned_loss=0.04951, over 4224.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2647, pruned_loss=0.04153, over 936876.83 frames. ], batch size: 26, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:31:01,589 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 04:31:09,063 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.3860, 4.8997, 5.1042, 5.0276], device='cuda:3') +2024-07-29 04:31:13,506 INFO [train.py:1146] (3/4) Epoch 18, validation: loss=0.1615, simple_loss=0.2636, pruned_loss=0.0297, over 944034.00 frames. +2024-07-29 04:31:13,507 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 04:31:15,932 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=19.12 vs. limit=22.5 +2024-07-29 04:31:16,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=239705.33333333334, ans=0.0 +2024-07-29 04:31:31,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=239732.0, ans=0.0 +2024-07-29 04:31:33,984 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:31:36,675 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.961e+01 5.758e+01 6.298e+01 7.334e+01 1.056e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 04:31:49,516 INFO [train.py:1114] (3/4) Epoch 18, batch 6050, loss[loss=0.1642, simple_loss=0.2562, pruned_loss=0.0361, over 4772.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2635, pruned_loss=0.04135, over 937942.20 frames. ], batch size: 12, lr: 4.13e-03, grad_scale: 32.0 +2024-07-29 04:31:51,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff2_skip_rate, batch_count=239772.0, ans=0.0 +2024-07-29 04:31:53,105 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.05 vs. limit=6.0 +2024-07-29 04:32:15,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=239812.0, ans=0.125 +2024-07-29 04:32:16,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=239825.33333333334, ans=0.125 +2024-07-29 04:32:17,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=239825.33333333334, ans=0.0 +2024-07-29 04:32:17,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=239825.33333333334, ans=0.125 +2024-07-29 04:32:20,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=239825.33333333334, ans=0.0 +2024-07-29 04:32:40,018 INFO [train.py:1114] (3/4) Epoch 18, batch 6100, loss[loss=0.1825, simple_loss=0.2778, pruned_loss=0.0436, over 4690.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2637, pruned_loss=0.04173, over 937778.62 frames. ], batch size: 15, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:32:41,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=239838.66666666666, ans=0.125 +2024-07-29 04:32:53,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=239838.66666666666, ans=0.125 +2024-07-29 04:32:53,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=239838.66666666666, ans=0.1 +2024-07-29 04:32:54,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=239838.66666666666, ans=0.2 +2024-07-29 04:33:10,856 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.665e+01 5.504e+01 6.224e+01 7.220e+01 1.027e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-29 04:33:14,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=239878.66666666666, ans=0.0 +2024-07-29 04:33:16,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239892.0, ans=0.1 +2024-07-29 04:33:25,615 INFO [train.py:1114] (3/4) Epoch 18, batch 6150, loss[loss=0.2368, simple_loss=0.3163, pruned_loss=0.07862, over 3392.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2635, pruned_loss=0.04149, over 936571.22 frames. ], batch size: 36, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:33:38,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=239918.66666666666, ans=0.0 +2024-07-29 04:33:40,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=239932.0, ans=0.125 +2024-07-29 04:33:54,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239958.66666666666, ans=0.1 +2024-07-29 04:33:59,194 INFO [train.py:1114] (3/4) Epoch 18, batch 6200, loss[loss=0.181, simple_loss=0.2869, pruned_loss=0.03759, over 4740.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2636, pruned_loss=0.04151, over 936181.67 frames. ], batch size: 14, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:34:01,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=239972.0, ans=0.2 +2024-07-29 04:34:07,819 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=239985.33333333334, ans=0.1 +2024-07-29 04:34:08,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=239985.33333333334, ans=0.0 +2024-07-29 04:34:22,491 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.59 vs. limit=15.0 +2024-07-29 04:34:41,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=239998.66666666666, ans=0.09899494936611666 +2024-07-29 04:34:44,995 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.635e+01 5.597e+01 6.193e+01 7.328e+01 9.537e+01, threshold=1.239e+02, percent-clipped=0.0 +2024-07-29 04:34:50,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=240012.0, ans=0.125 +2024-07-29 04:34:54,310 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=240025.33333333334, ans=0.125 +2024-07-29 04:34:57,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=240025.33333333334, ans=0.125 +2024-07-29 04:34:58,271 INFO [train.py:1114] (3/4) Epoch 18, batch 6250, loss[loss=0.1795, simple_loss=0.2918, pruned_loss=0.03358, over 4802.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2641, pruned_loss=0.04154, over 933570.29 frames. ], batch size: 14, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:34:58,590 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.75 vs. limit=6.0 +2024-07-29 04:35:10,097 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=240052.0, ans=0.0 +2024-07-29 04:35:12,729 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240065.33333333334, ans=0.1 +2024-07-29 04:35:19,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=240078.66666666666, ans=0.0 +2024-07-29 04:35:22,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=240078.66666666666, ans=0.125 +2024-07-29 04:35:25,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=240092.0, ans=0.07 +2024-07-29 04:35:27,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=240092.0, ans=0.0 +2024-07-29 04:35:33,223 INFO [train.py:1114] (3/4) Epoch 18, batch 6300, loss[loss=0.1675, simple_loss=0.2493, pruned_loss=0.04284, over 4490.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2658, pruned_loss=0.04236, over 929887.32 frames. ], batch size: 10, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:35:44,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=240118.66666666666, ans=0.1 +2024-07-29 04:35:45,062 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.81 vs. limit=15.0 +2024-07-29 04:35:46,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=240132.0, ans=0.125 +2024-07-29 04:35:53,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=240132.0, ans=0.125 +2024-07-29 04:35:58,644 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.473e+01 5.639e+01 6.264e+01 7.118e+01 1.029e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 04:36:01,060 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.06 vs. limit=22.5 +2024-07-29 04:36:01,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=240145.33333333334, ans=0.125 +2024-07-29 04:36:11,346 INFO [train.py:1114] (3/4) Epoch 18, batch 6350, loss[loss=0.1811, simple_loss=0.2694, pruned_loss=0.04643, over 4499.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2645, pruned_loss=0.04143, over 933864.27 frames. ], batch size: 21, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:36:19,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=240172.0, ans=0.125 +2024-07-29 04:36:24,886 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=240185.33333333334, ans=0.125 +2024-07-29 04:37:00,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=240225.33333333334, ans=0.0 +2024-07-29 04:37:00,829 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=240225.33333333334, ans=0.125 +2024-07-29 04:37:05,083 INFO [train.py:1114] (3/4) Epoch 18, batch 6400, loss[loss=0.1724, simple_loss=0.2558, pruned_loss=0.04451, over 4642.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2647, pruned_loss=0.04147, over 935388.47 frames. ], batch size: 13, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:37:08,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=240238.66666666666, ans=0.0 +2024-07-29 04:37:24,449 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:37:55,452 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.669e+01 6.278e+01 7.394e+01 9.691e+01, threshold=1.256e+02, percent-clipped=0.0 +2024-07-29 04:38:05,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=240292.0, ans=0.0 +2024-07-29 04:38:07,969 INFO [train.py:1114] (3/4) Epoch 18, batch 6450, loss[loss=0.1711, simple_loss=0.268, pruned_loss=0.03712, over 4572.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2651, pruned_loss=0.04155, over 939074.52 frames. ], batch size: 21, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:38:08,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240305.33333333334, ans=0.1 +2024-07-29 04:38:10,334 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=13.86 vs. limit=15.0 +2024-07-29 04:38:12,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=240305.33333333334, ans=0.1 +2024-07-29 04:38:33,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=240358.66666666666, ans=0.125 +2024-07-29 04:38:34,039 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=4.96 vs. limit=15.0 +2024-07-29 04:38:37,511 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=240358.66666666666, ans=0.125 +2024-07-29 04:38:51,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=240358.66666666666, ans=0.0 +2024-07-29 04:38:53,613 INFO [train.py:1114] (3/4) Epoch 18, batch 6500, loss[loss=0.2096, simple_loss=0.2938, pruned_loss=0.06273, over 3244.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2648, pruned_loss=0.04095, over 940291.87 frames. ], batch size: 35, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:38:54,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=240372.0, ans=0.125 +2024-07-29 04:39:13,960 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.498e+01 5.507e+01 6.062e+01 6.906e+01 9.828e+01, threshold=1.212e+02, percent-clipped=0.0 +2024-07-29 04:39:22,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240425.33333333334, ans=0.1 +2024-07-29 04:39:27,372 INFO [train.py:1114] (3/4) Epoch 18, batch 6550, loss[loss=0.1488, simple_loss=0.2351, pruned_loss=0.03126, over 4800.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2648, pruned_loss=0.04123, over 943073.17 frames. ], batch size: 11, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:39:34,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=240438.66666666666, ans=0.125 +2024-07-29 04:39:41,347 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.40 vs. limit=15.0 +2024-07-29 04:39:46,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=240465.33333333334, ans=0.025 +2024-07-29 04:39:47,129 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=14.45 vs. limit=15.0 +2024-07-29 04:39:47,153 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.50 vs. limit=12.0 +2024-07-29 04:39:47,275 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.26 vs. limit=15.0 +2024-07-29 04:39:52,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=240478.66666666666, ans=0.125 +2024-07-29 04:40:06,035 INFO [train.py:1114] (3/4) Epoch 18, batch 6600, loss[loss=0.1776, simple_loss=0.2772, pruned_loss=0.03904, over 4927.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2646, pruned_loss=0.04143, over 944833.91 frames. ], batch size: 14, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:40:06,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=240505.33333333334, ans=0.2 +2024-07-29 04:40:12,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=240518.66666666666, ans=0.0 +2024-07-29 04:40:16,704 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=12.13 vs. limit=15.0 +2024-07-29 04:40:23,270 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=240518.66666666666, ans=0.125 +2024-07-29 04:40:31,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=240532.0, ans=0.025 +2024-07-29 04:40:34,501 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.498e+01 5.621e+01 6.292e+01 7.270e+01 1.272e+02, threshold=1.258e+02, percent-clipped=1.0 +2024-07-29 04:40:46,974 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.86 vs. limit=15.0 +2024-07-29 04:40:47,142 INFO [train.py:1114] (3/4) Epoch 18, batch 6650, loss[loss=0.2343, simple_loss=0.3229, pruned_loss=0.07284, over 4632.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2642, pruned_loss=0.04146, over 943439.54 frames. ], batch size: 17, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:40:55,405 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.61 vs. limit=22.5 +2024-07-29 04:41:08,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=240612.0, ans=0.125 +2024-07-29 04:41:12,550 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=240612.0, ans=0.2 +2024-07-29 04:41:24,684 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.48 vs. limit=10.0 +2024-07-29 04:41:30,162 INFO [train.py:1114] (3/4) Epoch 18, batch 6700, loss[loss=0.2181, simple_loss=0.3144, pruned_loss=0.06093, over 4701.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2652, pruned_loss=0.04209, over 942445.32 frames. ], batch size: 19, lr: 4.12e-03, grad_scale: 32.0 +2024-07-29 04:41:39,388 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=240652.0, ans=0.125 +2024-07-29 04:41:44,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=240665.33333333334, ans=0.125 +2024-07-29 04:41:50,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=240678.66666666666, ans=0.0 +2024-07-29 04:41:50,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=240678.66666666666, ans=0.125 +2024-07-29 04:41:51,431 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.779e+01 5.784e+01 6.386e+01 7.165e+01 1.123e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 04:41:52,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=240678.66666666666, ans=0.125 +2024-07-29 04:41:58,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=240692.0, ans=0.125 +2024-07-29 04:42:03,471 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.70 vs. limit=15.0 +2024-07-29 04:42:03,821 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=240705.33333333334, ans=0.2 +2024-07-29 04:42:04,337 INFO [train.py:1114] (3/4) Epoch 18, batch 6750, loss[loss=0.1953, simple_loss=0.275, pruned_loss=0.05783, over 4229.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2651, pruned_loss=0.04201, over 940444.70 frames. ], batch size: 25, lr: 4.12e-03, grad_scale: 64.0 +2024-07-29 04:42:05,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=240705.33333333334, ans=0.125 +2024-07-29 04:42:13,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240718.66666666666, ans=0.1 +2024-07-29 04:42:15,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=240718.66666666666, ans=0.125 +2024-07-29 04:42:18,659 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240732.0, ans=0.1 +2024-07-29 04:42:22,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=240732.0, ans=0.0 +2024-07-29 04:42:22,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=240732.0, ans=0.125 +2024-07-29 04:42:33,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=240758.66666666666, ans=0.0 +2024-07-29 04:42:37,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=240758.66666666666, ans=0.0 +2024-07-29 04:42:38,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=240772.0, ans=0.2 +2024-07-29 04:42:38,951 INFO [train.py:1114] (3/4) Epoch 18, batch 6800, loss[loss=0.1515, simple_loss=0.2536, pruned_loss=0.02474, over 4636.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2653, pruned_loss=0.04224, over 938966.58 frames. ], batch size: 13, lr: 4.12e-03, grad_scale: 64.0 +2024-07-29 04:42:42,642 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=9.57 vs. limit=15.0 +2024-07-29 04:42:55,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=240798.66666666666, ans=0.0 +2024-07-29 04:42:57,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=240798.66666666666, ans=0.025 +2024-07-29 04:42:58,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=240812.0, ans=0.125 +2024-07-29 04:42:59,654 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.731e+01 5.569e+01 6.060e+01 6.382e+01 1.017e+02, threshold=1.212e+02, percent-clipped=0.0 +2024-07-29 04:43:01,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240812.0, ans=0.1 +2024-07-29 04:43:02,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.57 vs. limit=15.0 +2024-07-29 04:43:07,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=240825.33333333334, ans=0.07 +2024-07-29 04:43:09,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=240825.33333333334, ans=0.125 +2024-07-29 04:43:14,295 INFO [train.py:1114] (3/4) Epoch 18, batch 6850, loss[loss=0.1509, simple_loss=0.2447, pruned_loss=0.02854, over 4692.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2648, pruned_loss=0.04162, over 940503.54 frames. ], batch size: 13, lr: 4.12e-03, grad_scale: 64.0 +2024-07-29 04:43:17,921 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:43:36,247 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.47 vs. limit=10.0 +2024-07-29 04:43:37,803 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:43:46,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240892.0, ans=0.1 +2024-07-29 04:43:47,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=240892.0, ans=0.125 +2024-07-29 04:43:48,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=240905.33333333334, ans=0.0 +2024-07-29 04:43:48,573 INFO [train.py:1114] (3/4) Epoch 18, batch 6900, loss[loss=0.1565, simple_loss=0.2576, pruned_loss=0.02769, over 4955.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2646, pruned_loss=0.04155, over 942550.01 frames. ], batch size: 13, lr: 4.12e-03, grad_scale: 64.0 +2024-07-29 04:43:51,528 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.21 vs. limit=22.5 +2024-07-29 04:43:55,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=240905.33333333334, ans=0.125 +2024-07-29 04:43:58,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=240918.66666666666, ans=0.125 +2024-07-29 04:43:59,050 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=256, metric=11.66 vs. limit=22.5 +2024-07-29 04:44:03,362 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=240932.0, ans=0.2 +2024-07-29 04:44:11,088 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.810e+01 6.480e+01 7.498e+01 1.027e+02, threshold=1.296e+02, percent-clipped=0.0 +2024-07-29 04:44:14,195 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=240945.33333333334, ans=0.025 +2024-07-29 04:44:14,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=240945.33333333334, ans=0.125 +2024-07-29 04:44:14,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=240945.33333333334, ans=0.2 +2024-07-29 04:44:21,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=240958.66666666666, ans=0.1 +2024-07-29 04:44:23,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=240972.0, ans=0.125 +2024-07-29 04:44:24,893 INFO [train.py:1114] (3/4) Epoch 18, batch 6950, loss[loss=0.1669, simple_loss=0.2453, pruned_loss=0.04425, over 4484.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.264, pruned_loss=0.04139, over 939721.26 frames. ], batch size: 10, lr: 4.11e-03, grad_scale: 64.0 +2024-07-29 04:44:31,035 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=240985.33333333334, ans=0.125 +2024-07-29 04:44:43,236 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:44:43,847 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=240998.66666666666, ans=0.125 +2024-07-29 04:44:50,611 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=241012.0, ans=0.125 +2024-07-29 04:44:51,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=241012.0, ans=0.125 +2024-07-29 04:44:54,284 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.94 vs. limit=15.0 +2024-07-29 04:44:59,693 INFO [train.py:1114] (3/4) Epoch 18, batch 7000, loss[loss=0.1914, simple_loss=0.2725, pruned_loss=0.05519, over 4636.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2644, pruned_loss=0.04167, over 938351.81 frames. ], batch size: 17, lr: 4.11e-03, grad_scale: 64.0 +2024-07-29 04:45:03,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=241038.66666666666, ans=0.125 +2024-07-29 04:45:12,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=241065.33333333334, ans=0.0 +2024-07-29 04:45:20,297 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.289e+01 5.764e+01 6.515e+01 7.633e+01 1.207e+02, threshold=1.303e+02, percent-clipped=0.0 +2024-07-29 04:45:25,458 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=6.06 vs. limit=10.0 +2024-07-29 04:45:29,877 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=241092.0, ans=10.0 +2024-07-29 04:45:33,136 INFO [train.py:1114] (3/4) Epoch 18, batch 7050, loss[loss=0.1776, simple_loss=0.2677, pruned_loss=0.04369, over 4782.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2635, pruned_loss=0.04136, over 941667.21 frames. ], batch size: 19, lr: 4.11e-03, grad_scale: 64.0 +2024-07-29 04:45:39,520 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.75 vs. limit=15.0 +2024-07-29 04:45:43,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=241118.66666666666, ans=0.125 +2024-07-29 04:45:44,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=241118.66666666666, ans=0.2 +2024-07-29 04:45:44,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=241118.66666666666, ans=0.025 +2024-07-29 04:45:49,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=241132.0, ans=0.0 +2024-07-29 04:45:55,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241145.33333333334, ans=0.1 +2024-07-29 04:45:55,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=241145.33333333334, ans=0.07 +2024-07-29 04:46:00,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=241158.66666666666, ans=0.0 +2024-07-29 04:46:03,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=241158.66666666666, ans=0.125 +2024-07-29 04:46:06,964 INFO [train.py:1114] (3/4) Epoch 18, batch 7100, loss[loss=0.2083, simple_loss=0.2956, pruned_loss=0.06054, over 4796.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2646, pruned_loss=0.04191, over 935929.30 frames. ], batch size: 15, lr: 4.11e-03, grad_scale: 64.0 +2024-07-29 04:46:14,687 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.92 vs. limit=15.0 +2024-07-29 04:46:25,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241198.66666666666, ans=0.1 +2024-07-29 04:46:28,376 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.603e+01 5.554e+01 6.044e+01 6.901e+01 9.600e+01, threshold=1.209e+02, percent-clipped=0.0 +2024-07-29 04:46:29,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=241212.0, ans=0.05 +2024-07-29 04:46:33,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=241225.33333333334, ans=0.025 +2024-07-29 04:46:38,660 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.95 vs. limit=15.0 +2024-07-29 04:46:40,688 INFO [train.py:1114] (3/4) Epoch 18, batch 7150, loss[loss=0.1808, simple_loss=0.2712, pruned_loss=0.04516, over 4556.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2641, pruned_loss=0.04171, over 937315.45 frames. ], batch size: 21, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:46:44,247 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.32 vs. limit=12.0 +2024-07-29 04:46:52,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=241252.0, ans=0.125 +2024-07-29 04:46:53,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=241265.33333333334, ans=0.125 +2024-07-29 04:46:58,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten.whitening_limit, batch_count=241265.33333333334, ans=22.5 +2024-07-29 04:47:03,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=241278.66666666666, ans=0.125 +2024-07-29 04:47:14,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=241278.66666666666, ans=0.125 +2024-07-29 04:47:14,534 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.98 vs. limit=10.0 +2024-07-29 04:47:20,202 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:47:22,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=241292.0, ans=0.1 +2024-07-29 04:47:24,561 INFO [train.py:1114] (3/4) Epoch 18, batch 7200, loss[loss=0.1777, simple_loss=0.2671, pruned_loss=0.04422, over 4794.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2645, pruned_loss=0.04152, over 937550.62 frames. ], batch size: 15, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:47:45,846 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.087e+01 5.707e+01 6.356e+01 7.350e+01 1.020e+02, threshold=1.271e+02, percent-clipped=0.0 +2024-07-29 04:47:57,745 INFO [train.py:1114] (3/4) Epoch 18, batch 7250, loss[loss=0.1489, simple_loss=0.24, pruned_loss=0.02888, over 4863.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2645, pruned_loss=0.0417, over 939181.84 frames. ], batch size: 12, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:48:01,062 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=241372.0, ans=0.125 +2024-07-29 04:48:32,911 INFO [train.py:1114] (3/4) Epoch 18, batch 7300, loss[loss=0.1473, simple_loss=0.2344, pruned_loss=0.03014, over 4861.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2639, pruned_loss=0.04137, over 939480.88 frames. ], batch size: 12, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:48:35,967 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.17 vs. limit=15.0 +2024-07-29 04:48:50,680 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=241465.33333333334, ans=0.125 +2024-07-29 04:48:53,885 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.829e+01 5.415e+01 6.086e+01 6.711e+01 9.900e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-29 04:49:11,741 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=241492.0, ans=0.125 +2024-07-29 04:49:12,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=241492.0, ans=0.125 +2024-07-29 04:49:14,861 INFO [train.py:1114] (3/4) Epoch 18, batch 7350, loss[loss=0.1737, simple_loss=0.2642, pruned_loss=0.04161, over 4645.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2642, pruned_loss=0.04107, over 939066.53 frames. ], batch size: 12, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:49:18,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=241505.33333333334, ans=0.125 +2024-07-29 04:49:24,240 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=241518.66666666666, ans=0.0 +2024-07-29 04:49:28,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=241518.66666666666, ans=0.125 +2024-07-29 04:49:29,306 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.56 vs. limit=10.0 +2024-07-29 04:49:32,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=241532.0, ans=0.125 +2024-07-29 04:49:43,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=241545.33333333334, ans=0.2 +2024-07-29 04:49:48,545 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.37 vs. limit=15.0 +2024-07-29 04:49:54,735 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.39 vs. limit=15.0 +2024-07-29 04:49:54,801 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=24.35 vs. limit=22.5 +2024-07-29 04:49:58,975 INFO [train.py:1114] (3/4) Epoch 18, batch 7400, loss[loss=0.1675, simple_loss=0.2637, pruned_loss=0.03562, over 4692.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2643, pruned_loss=0.04065, over 940457.98 frames. ], batch size: 13, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:49:59,284 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.64 vs. limit=15.0 +2024-07-29 04:50:03,788 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:50:14,160 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=241598.66666666666, ans=0.0 +2024-07-29 04:50:16,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=241598.66666666666, ans=0.1 +2024-07-29 04:50:16,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=241598.66666666666, ans=0.0 +2024-07-29 04:50:19,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=241598.66666666666, ans=0.2 +2024-07-29 04:50:22,050 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.538e+01 5.627e+01 6.295e+01 7.057e+01 1.550e+02, threshold=1.259e+02, percent-clipped=1.0 +2024-07-29 04:50:25,450 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=241612.0, ans=0.0 +2024-07-29 04:50:33,795 INFO [train.py:1114] (3/4) Epoch 18, batch 7450, loss[loss=0.1369, simple_loss=0.2259, pruned_loss=0.02399, over 4612.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2628, pruned_loss=0.04023, over 937830.63 frames. ], batch size: 11, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:50:35,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=241638.66666666666, ans=0.125 +2024-07-29 04:50:48,049 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.07 vs. limit=22.5 +2024-07-29 04:50:51,755 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=241665.33333333334, ans=0.125 +2024-07-29 04:51:03,730 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=241692.0, ans=0.1 +2024-07-29 04:51:06,926 INFO [train.py:1114] (3/4) Epoch 18, batch 7500, loss[loss=0.2174, simple_loss=0.3087, pruned_loss=0.06305, over 3537.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2632, pruned_loss=0.04066, over 936750.75 frames. ], batch size: 35, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:51:07,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241705.33333333334, ans=0.1 +2024-07-29 04:51:15,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=241718.66666666666, ans=0.125 +2024-07-29 04:51:23,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=241732.0, ans=0.125 +2024-07-29 04:51:23,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=241732.0, ans=0.1 +2024-07-29 04:51:24,051 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=5.48 vs. limit=15.0 +2024-07-29 04:51:28,212 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.400e+01 5.735e+01 6.333e+01 6.793e+01 1.076e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 04:51:31,089 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=241745.33333333334, ans=0.2 +2024-07-29 04:51:40,185 INFO [train.py:1114] (3/4) Epoch 18, batch 7550, loss[loss=0.2212, simple_loss=0.307, pruned_loss=0.06767, over 4616.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2645, pruned_loss=0.04117, over 936317.95 frames. ], batch size: 17, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:51:43,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=241772.0, ans=0.0 +2024-07-29 04:51:52,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys.whitening_limit, batch_count=241772.0, ans=6.0 +2024-07-29 04:52:04,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass_mid.scale_min, batch_count=241798.66666666666, ans=0.2 +2024-07-29 04:52:06,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=241798.66666666666, ans=0.1 +2024-07-29 04:52:09,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=241812.0, ans=0.125 +2024-07-29 04:52:21,661 INFO [train.py:1114] (3/4) Epoch 18, batch 7600, loss[loss=0.2128, simple_loss=0.3062, pruned_loss=0.05967, over 4805.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.264, pruned_loss=0.04071, over 937930.36 frames. ], batch size: 14, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:52:22,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=241838.66666666666, ans=0.025 +2024-07-29 04:52:22,424 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:52:25,381 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.46 vs. limit=15.0 +2024-07-29 04:52:30,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=241852.0, ans=0.0 +2024-07-29 04:52:31,635 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=241852.0, ans=0.04949747468305833 +2024-07-29 04:52:35,256 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.11 vs. limit=15.0 +2024-07-29 04:52:43,230 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.006e+01 5.592e+01 6.086e+01 6.807e+01 8.936e+01, threshold=1.217e+02, percent-clipped=0.0 +2024-07-29 04:52:46,598 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=241878.66666666666, ans=0.2 +2024-07-29 04:52:51,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=241892.0, ans=0.025 +2024-07-29 04:52:52,308 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.93 vs. limit=15.0 +2024-07-29 04:52:55,131 INFO [train.py:1114] (3/4) Epoch 18, batch 7650, loss[loss=0.1729, simple_loss=0.25, pruned_loss=0.04791, over 4942.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2639, pruned_loss=0.04078, over 937090.72 frames. ], batch size: 12, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:53:10,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=241932.0, ans=0.125 +2024-07-29 04:53:14,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=241932.0, ans=0.0 +2024-07-29 04:53:19,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=241945.33333333334, ans=0.125 +2024-07-29 04:53:27,660 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=241958.66666666666, ans=0.0 +2024-07-29 04:53:28,758 INFO [train.py:1114] (3/4) Epoch 18, batch 7700, loss[loss=0.167, simple_loss=0.2624, pruned_loss=0.03586, over 4688.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2639, pruned_loss=0.04104, over 933884.68 frames. ], batch size: 13, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:53:28,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=241972.0, ans=0.125 +2024-07-29 04:53:31,011 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.30 vs. limit=15.0 +2024-07-29 04:53:48,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=241998.66666666666, ans=0.0 +2024-07-29 04:53:52,078 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.452e+01 5.527e+01 6.014e+01 6.715e+01 9.821e+01, threshold=1.203e+02, percent-clipped=0.0 +2024-07-29 04:53:56,892 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.05 vs. limit=12.0 +2024-07-29 04:54:03,546 INFO [train.py:1114] (3/4) Epoch 18, batch 7750, loss[loss=0.1899, simple_loss=0.2905, pruned_loss=0.04462, over 4930.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2655, pruned_loss=0.0415, over 935464.55 frames. ], batch size: 14, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:54:04,623 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.70 vs. limit=15.0 +2024-07-29 04:54:06,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=242038.66666666666, ans=0.0 +2024-07-29 04:54:07,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=242038.66666666666, ans=0.125 +2024-07-29 04:54:14,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=242052.0, ans=0.2 +2024-07-29 04:54:19,332 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=242065.33333333334, ans=0.125 +2024-07-29 04:54:34,016 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242092.0, ans=0.1 +2024-07-29 04:54:37,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=242092.0, ans=0.125 +2024-07-29 04:54:39,129 INFO [train.py:1114] (3/4) Epoch 18, batch 7800, loss[loss=0.1718, simple_loss=0.2729, pruned_loss=0.03531, over 4666.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2664, pruned_loss=0.04189, over 937215.61 frames. ], batch size: 14, lr: 4.11e-03, grad_scale: 32.0 +2024-07-29 04:54:48,317 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=242118.66666666666, ans=0.125 +2024-07-29 04:54:49,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=242118.66666666666, ans=0.0 +2024-07-29 04:54:55,171 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.52 vs. limit=22.5 +2024-07-29 04:55:00,039 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.773e+01 5.583e+01 6.063e+01 6.593e+01 8.807e+01, threshold=1.213e+02, percent-clipped=0.0 +2024-07-29 04:55:00,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=242145.33333333334, ans=0.125 +2024-07-29 04:55:02,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242145.33333333334, ans=0.1 +2024-07-29 04:55:14,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=242158.66666666666, ans=0.0 +2024-07-29 04:55:28,053 INFO [train.py:1114] (3/4) Epoch 18, batch 7850, loss[loss=0.1882, simple_loss=0.2561, pruned_loss=0.06015, over 4523.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2663, pruned_loss=0.04183, over 936519.74 frames. ], batch size: 10, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:55:43,453 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=242172.0, ans=0.5 +2024-07-29 04:55:50,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242172.0, ans=0.1 +2024-07-29 04:55:50,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=242172.0, ans=0.125 +2024-07-29 04:56:10,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=242212.0, ans=0.0 +2024-07-29 04:56:18,461 INFO [train.py:1114] (3/4) Epoch 18, batch 7900, loss[loss=0.1893, simple_loss=0.2857, pruned_loss=0.04645, over 4869.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2674, pruned_loss=0.04225, over 932963.24 frames. ], batch size: 14, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:56:28,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=242252.0, ans=0.125 +2024-07-29 04:56:34,935 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.13 vs. limit=22.5 +2024-07-29 04:56:37,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=242278.66666666666, ans=0.2 +2024-07-29 04:56:39,132 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+01 5.700e+01 6.249e+01 7.197e+01 1.145e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-29 04:56:44,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=242292.0, ans=0.125 +2024-07-29 04:56:50,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=242305.33333333334, ans=0.0 +2024-07-29 04:56:51,092 INFO [train.py:1114] (3/4) Epoch 18, batch 7950, loss[loss=0.19, simple_loss=0.2721, pruned_loss=0.05391, over 3326.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2667, pruned_loss=0.04168, over 935454.03 frames. ], batch size: 35, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:56:52,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=242305.33333333334, ans=0.0 +2024-07-29 04:56:55,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=242305.33333333334, ans=0.2 +2024-07-29 04:56:59,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=242318.66666666666, ans=0.125 +2024-07-29 04:57:17,766 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.81 vs. limit=15.0 +2024-07-29 04:57:24,042 INFO [train.py:1114] (3/4) Epoch 18, batch 8000, loss[loss=0.1741, simple_loss=0.2545, pruned_loss=0.04687, over 4619.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2648, pruned_loss=0.04134, over 935069.55 frames. ], batch size: 11, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:57:26,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=242372.0, ans=0.125 +2024-07-29 04:57:32,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer_ff3.min_abs, batch_count=242385.33333333334, ans=0.2 +2024-07-29 04:57:43,419 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.30 vs. limit=15.0 +2024-07-29 04:57:45,065 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.436e+01 5.556e+01 6.379e+01 7.313e+01 1.044e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 04:57:51,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=242425.33333333334, ans=0.0 +2024-07-29 04:57:56,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=242425.33333333334, ans=0.2 +2024-07-29 04:57:58,321 INFO [train.py:1114] (3/4) Epoch 18, batch 8050, loss[loss=0.1465, simple_loss=0.2494, pruned_loss=0.02179, over 4810.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2649, pruned_loss=0.04152, over 934803.05 frames. ], batch size: 14, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:58:00,623 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.01 vs. limit=15.0 +2024-07-29 04:58:03,252 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.28 vs. limit=15.0 +2024-07-29 04:58:21,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=242478.66666666666, ans=0.07 +2024-07-29 04:58:21,353 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 04:58:25,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=242492.0, ans=0.125 +2024-07-29 04:58:30,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=242492.0, ans=0.125 +2024-07-29 04:58:31,320 INFO [train.py:1114] (3/4) Epoch 18, batch 8100, loss[loss=0.2144, simple_loss=0.3035, pruned_loss=0.06272, over 4797.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.266, pruned_loss=0.04163, over 934263.81 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:58:31,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=242505.33333333334, ans=0.125 +2024-07-29 04:58:33,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=242505.33333333334, ans=0.025 +2024-07-29 04:58:45,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=242532.0, ans=0.125 +2024-07-29 04:58:48,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=242532.0, ans=0.0 +2024-07-29 04:58:52,548 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.503e+01 5.727e+01 6.609e+01 7.504e+01 1.146e+02, threshold=1.322e+02, percent-clipped=0.0 +2024-07-29 04:58:56,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.37 vs. limit=15.0 +2024-07-29 04:58:58,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=242558.66666666666, ans=0.2 +2024-07-29 04:59:04,413 INFO [train.py:1114] (3/4) Epoch 18, batch 8150, loss[loss=0.193, simple_loss=0.288, pruned_loss=0.04899, over 4795.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2653, pruned_loss=0.04138, over 937564.29 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:59:07,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=242572.0, ans=0.2 +2024-07-29 04:59:23,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=242612.0, ans=0.125 +2024-07-29 04:59:41,327 INFO [train.py:1114] (3/4) Epoch 18, batch 8200, loss[loss=0.1814, simple_loss=0.2789, pruned_loss=0.04192, over 4798.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2657, pruned_loss=0.04107, over 938628.36 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 04:59:45,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=242638.66666666666, ans=0.125 +2024-07-29 05:00:01,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=242652.0, ans=0.125 +2024-07-29 05:00:04,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=242665.33333333334, ans=0.0 +2024-07-29 05:00:13,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242665.33333333334, ans=0.1 +2024-07-29 05:00:18,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=242678.66666666666, ans=0.0 +2024-07-29 05:00:20,497 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.484e+01 5.660e+01 6.350e+01 7.311e+01 1.182e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-29 05:00:24,814 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242678.66666666666, ans=0.1 +2024-07-29 05:00:27,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=242692.0, ans=0.0 +2024-07-29 05:00:28,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten.whitening_limit, batch_count=242692.0, ans=15.0 +2024-07-29 05:00:33,416 INFO [train.py:1114] (3/4) Epoch 18, batch 8250, loss[loss=0.1753, simple_loss=0.2785, pruned_loss=0.03603, over 4889.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2652, pruned_loss=0.04084, over 938713.30 frames. ], batch size: 13, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:00:38,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=242705.33333333334, ans=0.125 +2024-07-29 05:00:42,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=242718.66666666666, ans=0.125 +2024-07-29 05:00:46,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=242732.0, ans=0.025 +2024-07-29 05:00:49,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=242732.0, ans=0.125 +2024-07-29 05:01:00,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=242758.66666666666, ans=0.125 +2024-07-29 05:01:03,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242758.66666666666, ans=0.1 +2024-07-29 05:01:06,377 INFO [train.py:1114] (3/4) Epoch 18, batch 8300, loss[loss=0.168, simple_loss=0.2668, pruned_loss=0.03456, over 4907.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2668, pruned_loss=0.04174, over 938343.74 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:01:14,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=242785.33333333334, ans=0.0 +2024-07-29 05:01:26,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=242812.0, ans=0.2 +2024-07-29 05:01:26,871 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.844e+01 5.743e+01 6.319e+01 7.194e+01 1.218e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-29 05:01:34,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=242825.33333333334, ans=0.1 +2024-07-29 05:01:34,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.scale_min, batch_count=242825.33333333334, ans=0.2 +2024-07-29 05:01:36,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=242825.33333333334, ans=0.125 +2024-07-29 05:01:38,533 INFO [train.py:1114] (3/4) Epoch 18, batch 8350, loss[loss=0.1534, simple_loss=0.2439, pruned_loss=0.0315, over 4810.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2656, pruned_loss=0.04121, over 941293.29 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:01:53,173 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.68 vs. limit=15.0 +2024-07-29 05:01:56,393 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-07-29 05:02:01,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=242865.33333333334, ans=0.0 +2024-07-29 05:02:10,896 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=242878.66666666666, ans=0.0 +2024-07-29 05:02:18,748 INFO [train.py:1114] (3/4) Epoch 18, batch 8400, loss[loss=0.1416, simple_loss=0.2364, pruned_loss=0.02338, over 4781.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2653, pruned_loss=0.04121, over 939890.41 frames. ], batch size: 12, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:02:37,768 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=242945.33333333334, ans=0.0 +2024-07-29 05:02:39,517 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.393e+01 5.882e+01 6.556e+01 7.323e+01 1.088e+02, threshold=1.311e+02, percent-clipped=0.0 +2024-07-29 05:02:41,086 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.86 vs. limit=6.0 +2024-07-29 05:02:49,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=242958.66666666666, ans=0.2 +2024-07-29 05:02:50,068 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.83 vs. limit=15.0 +2024-07-29 05:02:51,083 INFO [train.py:1114] (3/4) Epoch 18, batch 8450, loss[loss=0.1967, simple_loss=0.2899, pruned_loss=0.05171, over 4801.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.266, pruned_loss=0.04138, over 938884.66 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:02:57,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=242985.33333333334, ans=0.125 +2024-07-29 05:03:01,554 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.71 vs. limit=6.0 +2024-07-29 05:03:01,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=242985.33333333334, ans=0.1 +2024-07-29 05:03:30,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=242998.66666666666, ans=0.07 +2024-07-29 05:03:32,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=243012.0, ans=0.2 +2024-07-29 05:03:34,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=243012.0, ans=0.125 +2024-07-29 05:03:39,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=243025.33333333334, ans=0.0 +2024-07-29 05:03:44,953 INFO [train.py:1114] (3/4) Epoch 18, batch 8500, loss[loss=0.1464, simple_loss=0.2314, pruned_loss=0.03069, over 4598.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2658, pruned_loss=0.04153, over 939068.48 frames. ], batch size: 11, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:03:46,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=243038.66666666666, ans=0.0 +2024-07-29 05:03:49,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.min_positive, batch_count=243038.66666666666, ans=0.05 +2024-07-29 05:03:56,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=243052.0, ans=0.125 +2024-07-29 05:03:59,157 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.99 vs. limit=22.5 +2024-07-29 05:04:00,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243065.33333333334, ans=0.1 +2024-07-29 05:04:06,898 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=243078.66666666666, ans=0.0 +2024-07-29 05:04:07,965 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.366e+01 5.501e+01 6.348e+01 7.091e+01 9.836e+01, threshold=1.270e+02, percent-clipped=0.0 +2024-07-29 05:04:10,442 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.76 vs. limit=6.0 +2024-07-29 05:04:19,721 INFO [train.py:1114] (3/4) Epoch 18, batch 8550, loss[loss=0.1658, simple_loss=0.2433, pruned_loss=0.04414, over 4808.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.266, pruned_loss=0.04216, over 939870.18 frames. ], batch size: 11, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:04:28,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=243118.66666666666, ans=0.2 +2024-07-29 05:04:30,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=243118.66666666666, ans=0.125 +2024-07-29 05:04:31,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=243118.66666666666, ans=0.1 +2024-07-29 05:04:35,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=243132.0, ans=0.125 +2024-07-29 05:04:47,481 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=10.30 vs. limit=15.0 +2024-07-29 05:04:47,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=243158.66666666666, ans=0.0 +2024-07-29 05:04:52,780 INFO [train.py:1114] (3/4) Epoch 18, batch 8600, loss[loss=0.19, simple_loss=0.2865, pruned_loss=0.04681, over 4809.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2653, pruned_loss=0.04144, over 939307.22 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:05:06,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer_na.min_abs, batch_count=243198.66666666666, ans=0.02 +2024-07-29 05:05:10,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.45 vs. limit=8.0 +2024-07-29 05:05:13,978 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.872e+01 5.639e+01 6.288e+01 7.210e+01 1.078e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-29 05:05:21,206 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=243225.33333333334, ans=0.125 +2024-07-29 05:05:23,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=243225.33333333334, ans=0.0 +2024-07-29 05:05:25,406 INFO [train.py:1114] (3/4) Epoch 18, batch 8650, loss[loss=0.182, simple_loss=0.2848, pruned_loss=0.03956, over 4916.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.265, pruned_loss=0.04112, over 940437.97 frames. ], batch size: 15, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:05:30,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=243238.66666666666, ans=0.125 +2024-07-29 05:05:48,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=243278.66666666666, ans=0.0 +2024-07-29 05:05:54,229 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.55 vs. limit=15.0 +2024-07-29 05:05:57,721 INFO [train.py:1114] (3/4) Epoch 18, batch 8700, loss[loss=0.1736, simple_loss=0.2709, pruned_loss=0.03814, over 4766.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2661, pruned_loss=0.04146, over 937694.46 frames. ], batch size: 13, lr: 4.10e-03, grad_scale: 32.0 +2024-07-29 05:06:06,352 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.62 vs. limit=12.0 +2024-07-29 05:06:14,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=243332.0, ans=0.2 +2024-07-29 05:06:15,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=243332.0, ans=0.0 +2024-07-29 05:06:17,893 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=9.66 vs. limit=15.0 +2024-07-29 05:06:18,234 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.815e+01 5.797e+01 6.169e+01 6.761e+01 9.579e+01, threshold=1.234e+02, percent-clipped=0.0 +2024-07-29 05:06:18,695 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.14 vs. limit=12.0 +2024-07-29 05:06:20,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=243345.33333333334, ans=0.125 +2024-07-29 05:06:20,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=243345.33333333334, ans=0.125 +2024-07-29 05:06:21,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=243345.33333333334, ans=0.2 +2024-07-29 05:06:29,978 INFO [train.py:1114] (3/4) Epoch 18, batch 8750, loss[loss=0.1775, simple_loss=0.2765, pruned_loss=0.03923, over 4688.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.266, pruned_loss=0.04185, over 936564.41 frames. ], batch size: 15, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:06:34,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=243372.0, ans=0.2 +2024-07-29 05:06:35,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=243372.0, ans=0.0 +2024-07-29 05:06:43,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=243398.66666666666, ans=0.0 +2024-07-29 05:06:52,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=243412.0, ans=0.2 +2024-07-29 05:07:05,112 INFO [train.py:1114] (3/4) Epoch 18, batch 8800, loss[loss=0.1818, simple_loss=0.2777, pruned_loss=0.04296, over 4935.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2666, pruned_loss=0.04186, over 937709.21 frames. ], batch size: 14, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:07:08,262 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.17 vs. limit=15.0 +2024-07-29 05:07:08,356 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=12.85 vs. limit=15.0 +2024-07-29 05:07:14,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=243452.0, ans=0.0 +2024-07-29 05:07:18,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_positive, batch_count=243465.33333333334, ans=0.05 +2024-07-29 05:07:22,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=243465.33333333334, ans=0.125 +2024-07-29 05:07:24,969 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=243478.66666666666, ans=0.125 +2024-07-29 05:07:26,775 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.698e+01 5.641e+01 6.387e+01 7.548e+01 9.629e+01, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 05:07:28,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=243478.66666666666, ans=0.125 +2024-07-29 05:07:35,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=243492.0, ans=0.125 +2024-07-29 05:07:38,886 INFO [train.py:1114] (3/4) Epoch 18, batch 8850, loss[loss=0.2076, simple_loss=0.3032, pruned_loss=0.05597, over 4523.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2664, pruned_loss=0.04232, over 931635.78 frames. ], batch size: 21, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:07:41,411 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=15.47 vs. limit=15.0 +2024-07-29 05:07:41,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243505.33333333334, ans=0.1 +2024-07-29 05:07:45,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=243518.66666666666, ans=0.0 +2024-07-29 05:07:50,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=243518.66666666666, ans=0.0 +2024-07-29 05:07:52,935 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.76 vs. limit=15.0 +2024-07-29 05:07:59,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=243545.33333333334, ans=0.125 +2024-07-29 05:08:07,522 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=243558.66666666666, ans=0.0 +2024-07-29 05:08:12,257 INFO [train.py:1114] (3/4) Epoch 18, batch 8900, loss[loss=0.1607, simple_loss=0.2428, pruned_loss=0.03927, over 4939.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2662, pruned_loss=0.04235, over 930010.81 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:08:14,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=243572.0, ans=0.025 +2024-07-29 05:08:29,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=243598.66666666666, ans=0.025 +2024-07-29 05:08:32,777 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.616e+01 5.619e+01 6.277e+01 7.423e+01 9.938e+01, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 05:08:35,552 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=243612.0, ans=0.125 +2024-07-29 05:08:38,771 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=243625.33333333334, ans=0.125 +2024-07-29 05:08:44,475 INFO [train.py:1114] (3/4) Epoch 18, batch 8950, loss[loss=0.1732, simple_loss=0.2717, pruned_loss=0.03737, over 4536.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2657, pruned_loss=0.04212, over 931218.85 frames. ], batch size: 21, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:08:44,905 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.77 vs. limit=22.5 +2024-07-29 05:08:45,435 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.50 vs. limit=15.0 +2024-07-29 05:08:53,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=243652.0, ans=0.2 +2024-07-29 05:09:02,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243665.33333333334, ans=0.1 +2024-07-29 05:09:07,930 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=243678.66666666666, ans=0.0 +2024-07-29 05:09:14,400 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=243692.0, ans=0.125 +2024-07-29 05:09:18,124 INFO [train.py:1114] (3/4) Epoch 18, batch 9000, loss[loss=0.1508, simple_loss=0.2402, pruned_loss=0.03072, over 4642.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2644, pruned_loss=0.04154, over 934325.36 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:09:18,125 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 05:09:28,505 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([2.7337, 4.1796, 4.7738, 3.7927], device='cuda:3') +2024-07-29 05:09:29,958 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.3.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([1.1376, 3.1059, 2.1762, 3.3032, 2.9386, 3.0278, 3.5212, 3.3681], + device='cuda:3') +2024-07-29 05:09:33,029 INFO [train.py:1146] (3/4) Epoch 18, validation: loss=0.1616, simple_loss=0.2637, pruned_loss=0.02971, over 944034.00 frames. +2024-07-29 05:09:33,030 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 05:09:39,980 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=243718.66666666666, ans=0.125 +2024-07-29 05:09:46,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=243732.0, ans=0.125 +2024-07-29 05:09:48,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=243732.0, ans=0.0 +2024-07-29 05:09:50,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=243732.0, ans=0.125 +2024-07-29 05:09:54,499 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.133e+01 5.845e+01 6.498e+01 7.420e+01 1.015e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-29 05:09:56,813 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.86 vs. limit=22.5 +2024-07-29 05:09:58,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=243745.33333333334, ans=0.0 +2024-07-29 05:10:01,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=243758.66666666666, ans=0.125 +2024-07-29 05:10:05,372 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=243772.0, ans=0.125 +2024-07-29 05:10:05,953 INFO [train.py:1114] (3/4) Epoch 18, batch 9050, loss[loss=0.1369, simple_loss=0.2193, pruned_loss=0.02724, over 4573.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2632, pruned_loss=0.04117, over 934542.60 frames. ], batch size: 10, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:10:18,918 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=10.81 vs. limit=15.0 +2024-07-29 05:10:19,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=243798.66666666666, ans=0.0 +2024-07-29 05:10:27,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=243812.0, ans=0.1 +2024-07-29 05:10:37,840 INFO [train.py:1114] (3/4) Epoch 18, batch 9100, loss[loss=0.1986, simple_loss=0.2989, pruned_loss=0.04913, over 4929.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2638, pruned_loss=0.04137, over 936844.71 frames. ], batch size: 14, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:10:39,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=243838.66666666666, ans=0.2 +2024-07-29 05:10:56,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=243878.66666666666, ans=0.125 +2024-07-29 05:10:58,034 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+01 5.927e+01 6.725e+01 7.788e+01 1.053e+02, threshold=1.345e+02, percent-clipped=0.0 +2024-07-29 05:11:02,434 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=243892.0, ans=0.125 +2024-07-29 05:11:06,082 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.38 vs. limit=15.0 +2024-07-29 05:11:07,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=243892.0, ans=0.025 +2024-07-29 05:11:09,473 INFO [train.py:1114] (3/4) Epoch 18, batch 9150, loss[loss=0.1823, simple_loss=0.2736, pruned_loss=0.0455, over 4813.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2649, pruned_loss=0.04145, over 935290.81 frames. ], batch size: 14, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:11:18,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=243918.66666666666, ans=0.2 +2024-07-29 05:11:25,822 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.84 vs. limit=15.0 +2024-07-29 05:11:31,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=243945.33333333334, ans=0.1 +2024-07-29 05:11:33,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=243945.33333333334, ans=0.125 +2024-07-29 05:11:38,017 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=243958.66666666666, ans=0.125 +2024-07-29 05:11:42,339 INFO [train.py:1114] (3/4) Epoch 18, batch 9200, loss[loss=0.1624, simple_loss=0.2537, pruned_loss=0.03558, over 4864.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2649, pruned_loss=0.04152, over 937224.05 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:11:44,947 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=243972.0, ans=0.0 +2024-07-29 05:11:48,588 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:11:50,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=243985.33333333334, ans=0.0 +2024-07-29 05:11:55,235 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=243998.66666666666, ans=0.2 +2024-07-29 05:12:03,091 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.715e+01 5.677e+01 6.144e+01 6.790e+01 1.037e+02, threshold=1.229e+02, percent-clipped=0.0 +2024-07-29 05:12:04,701 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=244012.0, ans=0.125 +2024-07-29 05:12:11,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=244025.33333333334, ans=0.0 +2024-07-29 05:12:14,994 INFO [train.py:1114] (3/4) Epoch 18, batch 9250, loss[loss=0.1837, simple_loss=0.2874, pruned_loss=0.04002, over 4641.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2647, pruned_loss=0.04158, over 938689.42 frames. ], batch size: 13, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:12:26,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=244065.33333333334, ans=0.0 +2024-07-29 05:12:28,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=244065.33333333334, ans=0.125 +2024-07-29 05:12:35,593 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.41 vs. limit=12.0 +2024-07-29 05:12:36,497 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=244078.66666666666, ans=0.125 +2024-07-29 05:12:47,060 INFO [train.py:1114] (3/4) Epoch 18, batch 9300, loss[loss=0.1344, simple_loss=0.2221, pruned_loss=0.02333, over 4769.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2646, pruned_loss=0.0416, over 938324.30 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:12:47,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=244105.33333333334, ans=0.125 +2024-07-29 05:12:57,191 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_ff2.min_abs, batch_count=244118.66666666666, ans=0.1 +2024-07-29 05:12:57,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=244118.66666666666, ans=0.5 +2024-07-29 05:13:06,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244145.33333333334, ans=0.1 +2024-07-29 05:13:07,075 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.497e+01 5.598e+01 6.030e+01 6.861e+01 1.072e+02, threshold=1.206e+02, percent-clipped=0.0 +2024-07-29 05:13:07,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=244145.33333333334, ans=0.125 +2024-07-29 05:13:10,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=244145.33333333334, ans=0.125 +2024-07-29 05:13:11,833 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=244145.33333333334, ans=0.0 +2024-07-29 05:13:19,384 INFO [train.py:1114] (3/4) Epoch 18, batch 9350, loss[loss=0.1456, simple_loss=0.2276, pruned_loss=0.03181, over 4813.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2645, pruned_loss=0.04139, over 935447.64 frames. ], batch size: 11, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:13:20,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=244172.0, ans=0.125 +2024-07-29 05:13:25,625 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=244172.0, ans=0.0 +2024-07-29 05:13:27,533 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=244185.33333333334, ans=0.0 +2024-07-29 05:13:31,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=244185.33333333334, ans=0.0 +2024-07-29 05:13:32,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244185.33333333334, ans=0.1 +2024-07-29 05:13:40,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=244212.0, ans=0.0 +2024-07-29 05:13:41,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=244212.0, ans=0.125 +2024-07-29 05:13:41,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=244212.0, ans=0.0 +2024-07-29 05:13:43,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=244212.0, ans=10.0 +2024-07-29 05:13:44,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=244212.0, ans=0.04949747468305833 +2024-07-29 05:13:44,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=244212.0, ans=0.125 +2024-07-29 05:13:49,895 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=244225.33333333334, ans=0.2 +2024-07-29 05:13:52,877 INFO [train.py:1114] (3/4) Epoch 18, batch 9400, loss[loss=0.1776, simple_loss=0.2679, pruned_loss=0.04361, over 4697.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2639, pruned_loss=0.04185, over 933304.06 frames. ], batch size: 13, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:13:53,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=244238.66666666666, ans=0.125 +2024-07-29 05:13:58,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=244238.66666666666, ans=0.125 +2024-07-29 05:14:00,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=244252.0, ans=0.0 +2024-07-29 05:14:01,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=244252.0, ans=0.025 +2024-07-29 05:14:03,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=244252.0, ans=0.025 +2024-07-29 05:14:14,129 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.767e+01 5.480e+01 6.250e+01 7.248e+01 1.054e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-29 05:14:15,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=244278.66666666666, ans=0.125 +2024-07-29 05:14:23,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244292.0, ans=0.1 +2024-07-29 05:14:24,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=244305.33333333334, ans=0.2 +2024-07-29 05:14:25,263 INFO [train.py:1114] (3/4) Epoch 18, batch 9450, loss[loss=0.148, simple_loss=0.2322, pruned_loss=0.03196, over 4805.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2644, pruned_loss=0.04136, over 932428.36 frames. ], batch size: 11, lr: 4.09e-03, grad_scale: 64.0 +2024-07-29 05:14:31,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=244318.66666666666, ans=0.0 +2024-07-29 05:14:38,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=244332.0, ans=0.125 +2024-07-29 05:14:39,316 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=13.95 vs. limit=22.5 +2024-07-29 05:14:56,515 INFO [train.py:1114] (3/4) Epoch 18, batch 9500, loss[loss=0.1838, simple_loss=0.2714, pruned_loss=0.04812, over 4696.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2651, pruned_loss=0.04164, over 934767.99 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:14:56,563 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=244372.0, ans=0.0 +2024-07-29 05:14:57,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=244372.0, ans=0.125 +2024-07-29 05:15:05,575 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=10.60 vs. limit=15.0 +2024-07-29 05:15:11,209 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.32 vs. limit=15.0 +2024-07-29 05:15:17,054 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.953e+01 5.635e+01 6.260e+01 7.098e+01 9.795e+01, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 05:15:27,757 INFO [train.py:1114] (3/4) Epoch 18, batch 9550, loss[loss=0.1637, simple_loss=0.2654, pruned_loss=0.03101, over 4769.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2659, pruned_loss=0.04197, over 932171.29 frames. ], batch size: 12, lr: 4.09e-03, grad_scale: 32.0 +2024-07-29 05:15:32,054 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:15:39,052 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=244452.0, ans=0.0 +2024-07-29 05:15:50,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=244478.66666666666, ans=0.125 +2024-07-29 05:15:51,145 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.88 vs. limit=6.0 +2024-07-29 05:15:53,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=244492.0, ans=0.125 +2024-07-29 05:15:56,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=244492.0, ans=0.0 +2024-07-29 05:16:00,335 INFO [train.py:1114] (3/4) Epoch 18, batch 9600, loss[loss=0.2508, simple_loss=0.3189, pruned_loss=0.0913, over 3136.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2659, pruned_loss=0.04205, over 930946.13 frames. ], batch size: 35, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:16:02,508 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=244505.33333333334, ans=0.0 +2024-07-29 05:16:03,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=244505.33333333334, ans=0.125 +2024-07-29 05:16:04,225 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.23 vs. limit=15.0 +2024-07-29 05:16:05,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=244505.33333333334, ans=0.2 +2024-07-29 05:16:07,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244518.66666666666, ans=0.1 +2024-07-29 05:16:08,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=244518.66666666666, ans=0.0 +2024-07-29 05:16:14,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=244532.0, ans=0.0 +2024-07-29 05:16:21,032 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.566e+01 5.711e+01 6.305e+01 6.902e+01 1.149e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 05:16:28,568 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=244558.66666666666, ans=0.1 +2024-07-29 05:16:31,668 INFO [train.py:1114] (3/4) Epoch 18, batch 9650, loss[loss=0.1887, simple_loss=0.2847, pruned_loss=0.04638, over 4831.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2664, pruned_loss=0.04225, over 926402.01 frames. ], batch size: 16, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:16:52,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244612.0, ans=0.1 +2024-07-29 05:17:03,160 INFO [train.py:1114] (3/4) Epoch 18, batch 9700, loss[loss=0.1788, simple_loss=0.2699, pruned_loss=0.04392, over 4341.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2666, pruned_loss=0.04243, over 924949.45 frames. ], batch size: 26, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:17:03,853 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=244638.66666666666, ans=0.125 +2024-07-29 05:17:05,677 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:17:05,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.min_positive, batch_count=244638.66666666666, ans=0.025 +2024-07-29 05:17:16,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=244665.33333333334, ans=0.125 +2024-07-29 05:17:18,530 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.39 vs. limit=15.0 +2024-07-29 05:17:23,685 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.541e+01 5.681e+01 6.275e+01 7.162e+01 1.082e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 05:17:29,114 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=11.75 vs. limit=15.0 +2024-07-29 05:17:34,254 INFO [train.py:1114] (3/4) Epoch 18, batch 9750, loss[loss=0.1652, simple_loss=0.2507, pruned_loss=0.0398, over 4690.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2666, pruned_loss=0.04258, over 925024.20 frames. ], batch size: 15, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:17:34,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=244705.33333333334, ans=0.5 +2024-07-29 05:17:34,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=244705.33333333334, ans=0.04949747468305833 +2024-07-29 05:17:39,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=244705.33333333334, ans=0.125 +2024-07-29 05:17:42,226 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.82 vs. limit=12.0 +2024-07-29 05:17:42,251 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.53 vs. limit=15.0 +2024-07-29 05:17:42,867 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.11 vs. limit=22.5 +2024-07-29 05:17:42,967 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.83 vs. limit=15.0 +2024-07-29 05:17:46,836 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=244732.0, ans=10.0 +2024-07-29 05:17:50,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=244732.0, ans=0.125 +2024-07-29 05:17:58,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=244745.33333333334, ans=0.0 +2024-07-29 05:18:06,041 INFO [train.py:1114] (3/4) Epoch 18, batch 9800, loss[loss=0.1607, simple_loss=0.2488, pruned_loss=0.03631, over 4701.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2655, pruned_loss=0.04188, over 924682.25 frames. ], batch size: 12, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:18:14,353 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.85 vs. limit=15.0 +2024-07-29 05:18:14,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=244785.33333333334, ans=0.125 +2024-07-29 05:18:16,794 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.88 vs. limit=15.0 +2024-07-29 05:18:25,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=244812.0, ans=0.025 +2024-07-29 05:18:26,929 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.929e+01 5.797e+01 6.276e+01 7.162e+01 9.479e+01, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 05:18:36,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=244825.33333333334, ans=0.1 +2024-07-29 05:18:38,928 INFO [train.py:1114] (3/4) Epoch 18, batch 9850, loss[loss=0.1973, simple_loss=0.2754, pruned_loss=0.05957, over 4908.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2659, pruned_loss=0.04232, over 927122.93 frames. ], batch size: 15, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:18:48,363 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=244852.0, ans=0.125 +2024-07-29 05:18:49,656 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.10 vs. limit=6.0 +2024-07-29 05:18:53,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=244865.33333333334, ans=0.125 +2024-07-29 05:18:56,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=244865.33333333334, ans=0.125 +2024-07-29 05:19:02,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=244878.66666666666, ans=0.125 +2024-07-29 05:19:08,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=244892.0, ans=0.125 +2024-07-29 05:19:09,935 INFO [train.py:1114] (3/4) Epoch 18, batch 9900, loss[loss=0.1789, simple_loss=0.2793, pruned_loss=0.0393, over 4836.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2663, pruned_loss=0.04217, over 926837.72 frames. ], batch size: 16, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:19:28,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=244932.0, ans=0.0 +2024-07-29 05:19:28,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=244932.0, ans=0.09899494936611666 +2024-07-29 05:19:30,946 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.456e+01 5.784e+01 6.438e+01 7.578e+01 1.058e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 05:19:40,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=244958.66666666666, ans=0.125 +2024-07-29 05:19:41,554 INFO [train.py:1114] (3/4) Epoch 18, batch 9950, loss[loss=0.1632, simple_loss=0.25, pruned_loss=0.03826, over 4579.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2666, pruned_loss=0.04304, over 929024.41 frames. ], batch size: 10, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:19:41,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=244972.0, ans=0.125 +2024-07-29 05:19:46,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=244972.0, ans=0.125 +2024-07-29 05:19:46,564 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.59 vs. limit=6.0 +2024-07-29 05:20:02,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=245012.0, ans=0.0 +2024-07-29 05:20:04,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=245012.0, ans=0.1 +2024-07-29 05:20:06,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=245025.33333333334, ans=0.125 +2024-07-29 05:20:09,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=245025.33333333334, ans=0.125 +2024-07-29 05:20:12,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=245038.66666666666, ans=0.125 +2024-07-29 05:20:12,654 INFO [train.py:1114] (3/4) Epoch 18, batch 10000, loss[loss=0.162, simple_loss=0.2521, pruned_loss=0.03599, over 4622.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2689, pruned_loss=0.04391, over 926467.45 frames. ], batch size: 16, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:20:13,437 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:20:18,610 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.38 vs. limit=10.0 +2024-07-29 05:20:33,061 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.790e+01 5.781e+01 6.382e+01 8.189e+01 1.255e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 05:20:34,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten.whitening_limit, batch_count=245078.66666666666, ans=15.0 +2024-07-29 05:20:35,572 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.04 vs. limit=15.0 +2024-07-29 05:20:45,045 INFO [train.py:1114] (3/4) Epoch 18, batch 10050, loss[loss=0.2091, simple_loss=0.2774, pruned_loss=0.07041, over 3292.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2721, pruned_loss=0.04559, over 913802.28 frames. ], batch size: 36, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:21:10,075 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.55 vs. limit=15.0 +2024-07-29 05:21:14,524 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=245158.66666666666, ans=0.125 +2024-07-29 05:21:15,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=245158.66666666666, ans=0.125 +2024-07-29 05:21:19,140 INFO [train.py:1114] (3/4) Epoch 18, batch 10100, loss[loss=0.1729, simple_loss=0.2594, pruned_loss=0.04316, over 3173.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2758, pruned_loss=0.049, over 862339.85 frames. ], batch size: 35, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:21:32,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=245198.66666666666, ans=0.0 +2024-07-29 05:21:32,565 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=9.69 vs. limit=15.0 +2024-07-29 05:21:36,891 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.30 vs. limit=10.0 +2024-07-29 05:21:41,090 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.138e+01 6.796e+01 7.277e+01 7.758e+01 1.071e+02, threshold=1.455e+02, percent-clipped=0.0 +2024-07-29 05:21:50,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=245225.33333333334, ans=0.125 +2024-07-29 05:21:52,417 INFO [train.py:1114] (3/4) Epoch 18, batch 10150, loss[loss=0.2011, simple_loss=0.2949, pruned_loss=0.05368, over 3413.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2794, pruned_loss=0.05296, over 820264.97 frames. ], batch size: 35, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:21:55,559 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.71 vs. limit=15.0 +2024-07-29 05:21:58,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=245252.0, ans=0.125 +2024-07-29 05:22:01,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=245252.0, ans=0.125 +2024-07-29 05:22:06,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=245265.33333333334, ans=0.125 +2024-07-29 05:22:08,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=245265.33333333334, ans=0.125 +2024-07-29 05:22:09,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=245265.33333333334, ans=0.1 +2024-07-29 05:22:15,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=245278.66666666666, ans=0.0 +2024-07-29 05:22:24,101 INFO [train.py:1114] (3/4) Epoch 18, batch 10200, loss[loss=0.1888, simple_loss=0.2602, pruned_loss=0.05872, over 3410.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2827, pruned_loss=0.05596, over 788510.27 frames. ], batch size: 35, lr: 4.08e-03, grad_scale: 32.0 +2024-07-29 05:22:25,219 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=5.54 vs. limit=15.0 +2024-07-29 05:22:28,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=245305.33333333334, ans=0.125 +2024-07-29 05:22:33,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=245318.66666666666, ans=0.125 +2024-07-29 05:22:36,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=245332.0, ans=0.0 +2024-07-29 05:22:40,553 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=14.28 vs. limit=15.0 +2024-07-29 05:24:06,503 INFO [train.py:1114] (3/4) Epoch 19, batch 0, loss[loss=0.1326, simple_loss=0.2268, pruned_loss=0.0192, over 4855.00 frames. ], tot_loss[loss=0.1326, simple_loss=0.2268, pruned_loss=0.0192, over 4855.00 frames. ], batch size: 12, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:24:06,503 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 05:24:11,378 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.0.layers.1.self_attn_weights, attn_weights_entropy = tensor([5.0709, 4.4493, 4.4102, 4.8761], device='cuda:3') +2024-07-29 05:24:18,360 INFO [train.py:1146] (3/4) Epoch 19, validation: loss=0.1627, simple_loss=0.2658, pruned_loss=0.02977, over 944034.00 frames. +2024-07-29 05:24:18,361 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 05:24:24,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=245349.33333333334, ans=0.125 +2024-07-29 05:24:25,126 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.889e+01 6.550e+01 7.036e+01 7.426e+01 9.937e+01, threshold=1.407e+02, percent-clipped=0.0 +2024-07-29 05:24:35,550 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.83 vs. limit=12.0 +2024-07-29 05:24:42,350 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.49 vs. limit=15.0 +2024-07-29 05:24:46,604 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.54 vs. limit=15.0 +2024-07-29 05:24:55,433 INFO [train.py:1114] (3/4) Epoch 19, batch 50, loss[loss=0.146, simple_loss=0.2289, pruned_loss=0.03158, over 4632.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2707, pruned_loss=0.04387, over 206486.31 frames. ], batch size: 11, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:24:58,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=245402.66666666666, ans=0.1 +2024-07-29 05:25:12,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=245429.33333333334, ans=0.125 +2024-07-29 05:25:14,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.min_positive, batch_count=245429.33333333334, ans=0.025 +2024-07-29 05:25:21,447 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.73 vs. limit=15.0 +2024-07-29 05:25:25,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=245456.0, ans=0.125 +2024-07-29 05:25:29,106 INFO [train.py:1114] (3/4) Epoch 19, batch 100, loss[loss=0.1882, simple_loss=0.2685, pruned_loss=0.05398, over 4637.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.269, pruned_loss=0.04263, over 365573.38 frames. ], batch size: 12, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:25:33,304 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=245469.33333333334, ans=0.2 +2024-07-29 05:25:34,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=245469.33333333334, ans=0.0 +2024-07-29 05:25:35,846 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.410e+01 5.533e+01 6.230e+01 7.043e+01 1.593e+02, threshold=1.246e+02, percent-clipped=1.0 +2024-07-29 05:25:36,619 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=245482.66666666666, ans=0.0 +2024-07-29 05:25:39,617 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.51 vs. limit=6.0 +2024-07-29 05:25:40,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=245482.66666666666, ans=0.07 +2024-07-29 05:25:43,569 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.60 vs. limit=6.0 +2024-07-29 05:25:44,842 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.46 vs. limit=10.0 +2024-07-29 05:25:47,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=245496.0, ans=0.025 +2024-07-29 05:25:51,612 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.53 vs. limit=15.0 +2024-07-29 05:25:52,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=245509.33333333334, ans=0.0 +2024-07-29 05:26:00,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=245522.66666666666, ans=0.125 +2024-07-29 05:26:02,309 INFO [train.py:1114] (3/4) Epoch 19, batch 150, loss[loss=0.1459, simple_loss=0.2293, pruned_loss=0.03121, over 4627.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2666, pruned_loss=0.04123, over 494151.61 frames. ], batch size: 11, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:26:13,713 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.27 vs. limit=15.0 +2024-07-29 05:26:17,931 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=15.74 vs. limit=22.5 +2024-07-29 05:26:20,196 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=245562.66666666666, ans=0.05 +2024-07-29 05:26:23,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=245576.0, ans=0.125 +2024-07-29 05:26:27,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=245576.0, ans=0.125 +2024-07-29 05:26:27,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=245576.0, ans=0.0 +2024-07-29 05:26:29,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=245589.33333333334, ans=0.125 +2024-07-29 05:26:35,604 INFO [train.py:1114] (3/4) Epoch 19, batch 200, loss[loss=0.1975, simple_loss=0.2874, pruned_loss=0.05378, over 4554.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2655, pruned_loss=0.04066, over 593582.32 frames. ], batch size: 21, lr: 3.97e-03, grad_scale: 32.0 +2024-07-29 05:26:37,026 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=245602.66666666666, ans=0.125 +2024-07-29 05:26:42,098 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.445e+01 5.609e+01 6.216e+01 6.903e+01 1.039e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 05:26:47,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=245616.0, ans=0.2 +2024-07-29 05:26:55,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=245642.66666666666, ans=0.0 +2024-07-29 05:27:03,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=245656.0, ans=0.125 +2024-07-29 05:27:06,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=245656.0, ans=0.125 +2024-07-29 05:27:08,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=245656.0, ans=0.125 +2024-07-29 05:27:10,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=245669.33333333334, ans=0.2 +2024-07-29 05:27:10,910 INFO [train.py:1114] (3/4) Epoch 19, batch 250, loss[loss=0.1683, simple_loss=0.2659, pruned_loss=0.03532, over 4631.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2643, pruned_loss=0.04036, over 670304.09 frames. ], batch size: 16, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:27:13,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=245669.33333333334, ans=0.0 +2024-07-29 05:27:18,701 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.10 vs. limit=15.0 +2024-07-29 05:27:22,032 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=8.00 vs. limit=10.0 +2024-07-29 05:27:22,708 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.00 vs. limit=22.5 +2024-07-29 05:27:29,581 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.82 vs. limit=22.5 +2024-07-29 05:27:44,367 INFO [train.py:1114] (3/4) Epoch 19, batch 300, loss[loss=0.1744, simple_loss=0.2746, pruned_loss=0.0371, over 4791.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2637, pruned_loss=0.04052, over 729931.92 frames. ], batch size: 15, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:27:51,012 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.436e+01 5.537e+01 6.057e+01 6.917e+01 1.022e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 05:27:53,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=245749.33333333334, ans=0.025 +2024-07-29 05:28:07,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=245776.0, ans=0.0 +2024-07-29 05:28:09,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.29 vs. limit=6.0 +2024-07-29 05:28:15,312 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=245789.33333333334, ans=0.0 +2024-07-29 05:28:17,797 INFO [train.py:1114] (3/4) Epoch 19, batch 350, loss[loss=0.1618, simple_loss=0.2487, pruned_loss=0.03745, over 4945.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2634, pruned_loss=0.04025, over 776107.72 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:28:28,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=245816.0, ans=0.125 +2024-07-29 05:28:35,137 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.11 vs. limit=12.0 +2024-07-29 05:28:39,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=245842.66666666666, ans=0.09899494936611666 +2024-07-29 05:28:43,192 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.84 vs. limit=6.0 +2024-07-29 05:28:51,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=245869.33333333334, ans=0.2 +2024-07-29 05:28:51,561 INFO [train.py:1114] (3/4) Epoch 19, batch 400, loss[loss=0.168, simple_loss=0.2637, pruned_loss=0.03615, over 4697.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.262, pruned_loss=0.03976, over 813522.39 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:28:55,498 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.51 vs. limit=15.0 +2024-07-29 05:28:58,548 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.537e+01 5.341e+01 5.802e+01 6.594e+01 8.688e+01, threshold=1.160e+02, percent-clipped=0.0 +2024-07-29 05:29:06,094 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=245896.0, ans=0.0 +2024-07-29 05:29:16,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=245909.33333333334, ans=0.2 +2024-07-29 05:29:19,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=245909.33333333334, ans=0.07 +2024-07-29 05:29:21,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=245922.66666666666, ans=0.125 +2024-07-29 05:29:23,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff3_skip_rate, batch_count=245922.66666666666, ans=0.0 +2024-07-29 05:29:27,354 INFO [train.py:1114] (3/4) Epoch 19, batch 450, loss[loss=0.1909, simple_loss=0.2872, pruned_loss=0.04732, over 4637.00 frames. ], tot_loss[loss=0.172, simple_loss=0.263, pruned_loss=0.04052, over 839140.89 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:29:32,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=245936.0, ans=0.0 +2024-07-29 05:29:38,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn2.whiten.whitening_limit, batch_count=245949.33333333334, ans=22.5 +2024-07-29 05:29:38,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=245949.33333333334, ans=0.0 +2024-07-29 05:29:50,100 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:29:57,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=245989.33333333334, ans=0.2 +2024-07-29 05:30:00,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.min_abs, batch_count=245989.33333333334, ans=0.5 +2024-07-29 05:30:01,058 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.91 vs. limit=15.0 +2024-07-29 05:30:02,706 INFO [train.py:1114] (3/4) Epoch 19, batch 500, loss[loss=0.191, simple_loss=0.2786, pruned_loss=0.0517, over 4690.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2628, pruned_loss=0.04054, over 861528.72 frames. ], batch size: 15, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:30:11,397 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.402e+01 5.590e+01 6.119e+01 6.735e+01 9.052e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 05:30:11,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246016.0, ans=0.1 +2024-07-29 05:30:17,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=246029.33333333334, ans=0.125 +2024-07-29 05:30:20,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=246029.33333333334, ans=0.125 +2024-07-29 05:30:25,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=246042.66666666666, ans=0.125 +2024-07-29 05:30:26,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=246042.66666666666, ans=0.125 +2024-07-29 05:30:27,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=246042.66666666666, ans=0.0 +2024-07-29 05:30:31,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=246056.0, ans=0.025 +2024-07-29 05:30:32,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=246056.0, ans=0.125 +2024-07-29 05:30:32,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246056.0, ans=0.1 +2024-07-29 05:30:34,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=246056.0, ans=0.125 +2024-07-29 05:30:34,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=246056.0, ans=0.125 +2024-07-29 05:30:38,076 INFO [train.py:1114] (3/4) Epoch 19, batch 550, loss[loss=0.208, simple_loss=0.3024, pruned_loss=0.05682, over 4559.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2636, pruned_loss=0.04038, over 877404.45 frames. ], batch size: 17, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:30:38,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246069.33333333334, ans=0.1 +2024-07-29 05:30:55,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=246096.0, ans=0.0 +2024-07-29 05:30:57,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=246109.33333333334, ans=0.0 +2024-07-29 05:31:05,791 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.80 vs. limit=6.0 +2024-07-29 05:31:09,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=246122.66666666666, ans=0.125 +2024-07-29 05:31:11,619 INFO [train.py:1114] (3/4) Epoch 19, batch 600, loss[loss=0.1939, simple_loss=0.2917, pruned_loss=0.04808, over 4637.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2646, pruned_loss=0.04098, over 892006.80 frames. ], batch size: 16, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:31:18,195 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.268e+01 5.519e+01 6.137e+01 7.010e+01 1.025e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 05:31:22,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=246149.33333333334, ans=0.2 +2024-07-29 05:31:40,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=246189.33333333334, ans=0.125 +2024-07-29 05:31:41,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=246189.33333333334, ans=0.0 +2024-07-29 05:31:44,108 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=246202.66666666666, ans=0.0 +2024-07-29 05:31:44,598 INFO [train.py:1114] (3/4) Epoch 19, batch 650, loss[loss=0.212, simple_loss=0.2953, pruned_loss=0.06434, over 4756.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2651, pruned_loss=0.04134, over 903764.40 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:31:55,768 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=10.22 vs. limit=15.0 +2024-07-29 05:32:10,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=246242.66666666666, ans=0.125 +2024-07-29 05:32:14,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=246256.0, ans=0.125 +2024-07-29 05:32:18,690 INFO [train.py:1114] (3/4) Epoch 19, batch 700, loss[loss=0.1469, simple_loss=0.2324, pruned_loss=0.03068, over 4657.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2654, pruned_loss=0.04094, over 911527.68 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:32:19,128 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.91 vs. limit=15.0 +2024-07-29 05:32:20,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer_ff2.min_abs, batch_count=246269.33333333334, ans=0.1 +2024-07-29 05:32:25,373 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.359e+01 5.672e+01 6.319e+01 7.208e+01 1.301e+02, threshold=1.264e+02, percent-clipped=1.0 +2024-07-29 05:32:30,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=246282.66666666666, ans=0.125 +2024-07-29 05:32:36,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.min_positive, batch_count=246296.0, ans=0.05 +2024-07-29 05:32:45,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246309.33333333334, ans=0.1 +2024-07-29 05:32:47,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=246322.66666666666, ans=0.035 +2024-07-29 05:32:53,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=246336.0, ans=0.125 +2024-07-29 05:32:54,398 INFO [train.py:1114] (3/4) Epoch 19, batch 750, loss[loss=0.1663, simple_loss=0.2712, pruned_loss=0.03064, over 4692.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2651, pruned_loss=0.04103, over 918058.05 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:33:04,570 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=246349.33333333334, ans=0.125 +2024-07-29 05:33:16,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=246376.0, ans=0.125 +2024-07-29 05:33:28,102 INFO [train.py:1114] (3/4) Epoch 19, batch 800, loss[loss=0.155, simple_loss=0.2383, pruned_loss=0.03586, over 4842.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.265, pruned_loss=0.0411, over 923138.12 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:33:31,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.88 vs. limit=5.0 +2024-07-29 05:33:34,594 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.732e+01 5.665e+01 6.243e+01 7.363e+01 1.175e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 05:33:34,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=246416.0, ans=0.025 +2024-07-29 05:33:48,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=246442.66666666666, ans=0.0 +2024-07-29 05:33:54,419 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246456.0, ans=0.1 +2024-07-29 05:33:55,708 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:34:01,533 INFO [train.py:1114] (3/4) Epoch 19, batch 850, loss[loss=0.1714, simple_loss=0.2742, pruned_loss=0.03432, over 4668.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2643, pruned_loss=0.04086, over 927396.21 frames. ], batch size: 14, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:34:02,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=246469.33333333334, ans=0.125 +2024-07-29 05:34:04,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=246469.33333333334, ans=0.125 +2024-07-29 05:34:07,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=246482.66666666666, ans=0.2 +2024-07-29 05:34:11,377 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=246482.66666666666, ans=0.0 +2024-07-29 05:34:14,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=246496.0, ans=0.0 +2024-07-29 05:34:20,882 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=246509.33333333334, ans=0.1 +2024-07-29 05:34:22,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=246509.33333333334, ans=0.2 +2024-07-29 05:34:22,856 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=246509.33333333334, ans=0.125 +2024-07-29 05:34:26,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=246509.33333333334, ans=0.125 +2024-07-29 05:34:29,984 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.63 vs. limit=15.0 +2024-07-29 05:34:32,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=246522.66666666666, ans=0.0 +2024-07-29 05:34:34,789 INFO [train.py:1114] (3/4) Epoch 19, batch 900, loss[loss=0.1657, simple_loss=0.2452, pruned_loss=0.04314, over 4861.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2641, pruned_loss=0.04122, over 928767.81 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:34:41,422 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+01 5.690e+01 6.264e+01 7.142e+01 9.700e+01, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 05:34:51,388 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.31 vs. limit=15.0 +2024-07-29 05:34:54,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=246562.66666666666, ans=0.1 +2024-07-29 05:34:57,787 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=246576.0, ans=0.125 +2024-07-29 05:34:59,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=246576.0, ans=0.025 +2024-07-29 05:35:01,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=246576.0, ans=0.09899494936611666 +2024-07-29 05:35:05,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=246589.33333333334, ans=0.125 +2024-07-29 05:35:10,549 INFO [train.py:1114] (3/4) Epoch 19, batch 950, loss[loss=0.1626, simple_loss=0.2538, pruned_loss=0.03574, over 4785.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2637, pruned_loss=0.04109, over 929753.61 frames. ], batch size: 12, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:35:22,021 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:35:31,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246629.33333333334, ans=0.1 +2024-07-29 05:35:33,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=246642.66666666666, ans=0.125 +2024-07-29 05:35:38,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=246642.66666666666, ans=0.0 +2024-07-29 05:35:39,696 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.00 vs. limit=10.0 +2024-07-29 05:35:48,351 INFO [train.py:1114] (3/4) Epoch 19, batch 1000, loss[loss=0.1796, simple_loss=0.2646, pruned_loss=0.04733, over 4962.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2648, pruned_loss=0.0414, over 928968.26 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:35:54,964 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.458e+01 5.789e+01 6.385e+01 7.432e+01 1.004e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 05:35:56,881 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module2.whiten, num_groups=1, num_channels=192, metric=4.33 vs. limit=15.0 +2024-07-29 05:35:58,032 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-29 05:36:03,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=246696.0, ans=0.0 +2024-07-29 05:36:21,799 INFO [train.py:1114] (3/4) Epoch 19, batch 1050, loss[loss=0.1674, simple_loss=0.2659, pruned_loss=0.03441, over 4879.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2631, pruned_loss=0.04074, over 931726.49 frames. ], batch size: 14, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:36:31,535 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.11 vs. limit=15.0 +2024-07-29 05:36:32,691 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys.whitening_limit, batch_count=246749.33333333334, ans=6.0 +2024-07-29 05:36:33,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=246749.33333333334, ans=0.1 +2024-07-29 05:36:50,299 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.67 vs. limit=22.5 +2024-07-29 05:36:55,277 INFO [train.py:1114] (3/4) Epoch 19, batch 1100, loss[loss=0.1611, simple_loss=0.2502, pruned_loss=0.03601, over 4895.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2637, pruned_loss=0.04077, over 934267.90 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:37:01,943 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.394e+01 5.545e+01 5.987e+01 6.620e+01 9.087e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-29 05:37:25,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=246856.0, ans=0.125 +2024-07-29 05:37:25,626 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.07 vs. limit=6.0 +2024-07-29 05:37:28,458 INFO [train.py:1114] (3/4) Epoch 19, batch 1150, loss[loss=0.164, simple_loss=0.2494, pruned_loss=0.03925, over 4902.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2641, pruned_loss=0.04094, over 934629.75 frames. ], batch size: 13, lr: 3.96e-03, grad_scale: 32.0 +2024-07-29 05:37:36,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=246882.66666666666, ans=0.0 +2024-07-29 05:37:39,192 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=246882.66666666666, ans=0.0 +2024-07-29 05:37:41,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=246896.0, ans=0.0 +2024-07-29 05:37:47,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=246896.0, ans=0.2 +2024-07-29 05:37:56,121 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=246922.66666666666, ans=0.125 +2024-07-29 05:38:07,850 INFO [train.py:1114] (3/4) Epoch 19, batch 1200, loss[loss=0.1623, simple_loss=0.2534, pruned_loss=0.03559, over 4872.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2649, pruned_loss=0.04103, over 933715.86 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:38:08,161 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=246936.0, ans=0.125 +2024-07-29 05:38:14,344 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.19 vs. limit=15.0 +2024-07-29 05:38:14,576 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 5.835e+01 6.415e+01 7.072e+01 9.087e+01, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 05:38:32,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=246976.0, ans=0.1 +2024-07-29 05:38:34,638 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.71 vs. limit=15.0 +2024-07-29 05:38:38,513 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.34 vs. limit=15.0 +2024-07-29 05:38:40,825 INFO [train.py:1114] (3/4) Epoch 19, batch 1250, loss[loss=0.1832, simple_loss=0.2768, pruned_loss=0.04477, over 4804.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2654, pruned_loss=0.0409, over 937921.44 frames. ], batch size: 15, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:38:42,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=247002.66666666666, ans=0.0 +2024-07-29 05:39:08,386 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=247056.0, ans=0.0 +2024-07-29 05:39:14,090 INFO [train.py:1114] (3/4) Epoch 19, batch 1300, loss[loss=0.2017, simple_loss=0.2835, pruned_loss=0.05998, over 4754.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2646, pruned_loss=0.0408, over 939127.75 frames. ], batch size: 19, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:39:14,577 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.42 vs. limit=15.0 +2024-07-29 05:39:15,239 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.41 vs. limit=10.0 +2024-07-29 05:39:25,987 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.840e+01 5.578e+01 5.975e+01 6.963e+01 1.137e+02, threshold=1.195e+02, percent-clipped=0.0 +2024-07-29 05:39:34,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=247082.66666666666, ans=0.035 +2024-07-29 05:39:37,004 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.30 vs. limit=10.0 +2024-07-29 05:39:38,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247096.0, ans=0.1 +2024-07-29 05:39:44,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=247109.33333333334, ans=0.0 +2024-07-29 05:39:44,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=247109.33333333334, ans=0.125 +2024-07-29 05:39:54,574 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.92 vs. limit=15.0 +2024-07-29 05:39:56,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=247136.0, ans=0.125 +2024-07-29 05:39:56,775 INFO [train.py:1114] (3/4) Epoch 19, batch 1350, loss[loss=0.1764, simple_loss=0.2624, pruned_loss=0.04517, over 4754.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2641, pruned_loss=0.04041, over 941380.94 frames. ], batch size: 13, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:40:01,489 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=247136.0, ans=0.125 +2024-07-29 05:40:12,049 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=247162.66666666666, ans=0.125 +2024-07-29 05:40:16,709 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=247176.0, ans=0.035 +2024-07-29 05:40:32,021 INFO [train.py:1114] (3/4) Epoch 19, batch 1400, loss[loss=0.1645, simple_loss=0.2517, pruned_loss=0.03864, over 4720.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2633, pruned_loss=0.04023, over 942984.24 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:40:38,798 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.545e+01 5.620e+01 6.318e+01 7.023e+01 1.312e+02, threshold=1.264e+02, percent-clipped=1.0 +2024-07-29 05:40:47,732 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=247229.33333333334, ans=0.0 +2024-07-29 05:40:56,199 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=247242.66666666666, ans=0.125 +2024-07-29 05:40:56,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=247242.66666666666, ans=0.025 +2024-07-29 05:40:59,214 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.73 vs. limit=10.0 +2024-07-29 05:41:04,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=247256.0, ans=0.125 +2024-07-29 05:41:07,800 INFO [train.py:1114] (3/4) Epoch 19, batch 1450, loss[loss=0.1809, simple_loss=0.2725, pruned_loss=0.04467, over 4692.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2637, pruned_loss=0.04027, over 942537.92 frames. ], batch size: 15, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:41:11,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=247269.33333333334, ans=0.125 +2024-07-29 05:41:18,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=247282.66666666666, ans=0.0 +2024-07-29 05:41:22,821 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=10.45 vs. limit=22.5 +2024-07-29 05:41:27,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=247296.0, ans=0.125 +2024-07-29 05:41:36,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=247322.66666666666, ans=0.125 +2024-07-29 05:41:42,896 INFO [train.py:1114] (3/4) Epoch 19, batch 1500, loss[loss=0.164, simple_loss=0.2693, pruned_loss=0.02935, over 4818.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.264, pruned_loss=0.04028, over 942331.67 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:41:44,576 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.29 vs. limit=12.0 +2024-07-29 05:41:45,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=247336.0, ans=0.0 +2024-07-29 05:41:46,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=247336.0, ans=0.0 +2024-07-29 05:41:49,695 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.122e+01 5.560e+01 6.078e+01 6.890e+01 1.039e+02, threshold=1.216e+02, percent-clipped=0.0 +2024-07-29 05:41:54,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=247349.33333333334, ans=0.125 +2024-07-29 05:41:59,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=247362.66666666666, ans=0.125 +2024-07-29 05:42:07,473 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=247376.0, ans=0.125 +2024-07-29 05:42:14,826 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=247389.33333333334, ans=0.125 +2024-07-29 05:42:16,782 INFO [train.py:1114] (3/4) Epoch 19, batch 1550, loss[loss=0.1581, simple_loss=0.2559, pruned_loss=0.03018, over 4904.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2636, pruned_loss=0.04018, over 938594.17 frames. ], batch size: 15, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:42:25,731 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.57 vs. limit=22.5 +2024-07-29 05:42:26,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=247416.0, ans=0.0 +2024-07-29 05:42:26,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=247416.0, ans=0.035 +2024-07-29 05:42:29,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=247429.33333333334, ans=0.0 +2024-07-29 05:42:30,396 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.26 vs. limit=15.0 +2024-07-29 05:42:34,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=247429.33333333334, ans=0.1 +2024-07-29 05:42:38,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=247442.66666666666, ans=0.125 +2024-07-29 05:42:41,712 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.34 vs. limit=15.0 +2024-07-29 05:42:42,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=247456.0, ans=0.125 +2024-07-29 05:42:47,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247456.0, ans=0.1 +2024-07-29 05:42:50,243 INFO [train.py:1114] (3/4) Epoch 19, batch 1600, loss[loss=0.1864, simple_loss=0.2781, pruned_loss=0.04735, over 4883.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2637, pruned_loss=0.04028, over 937566.68 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 64.0 +2024-07-29 05:42:58,143 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.792e+01 5.562e+01 6.323e+01 7.561e+01 1.065e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 05:43:03,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=247482.66666666666, ans=0.0 +2024-07-29 05:43:13,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=247509.33333333334, ans=0.0 +2024-07-29 05:43:16,869 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.29 vs. limit=15.0 +2024-07-29 05:43:24,348 INFO [train.py:1114] (3/4) Epoch 19, batch 1650, loss[loss=0.1854, simple_loss=0.2727, pruned_loss=0.04907, over 4681.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2633, pruned_loss=0.04072, over 937476.69 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:43:25,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=247536.0, ans=0.125 +2024-07-29 05:43:26,788 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=247536.0, ans=0.125 +2024-07-29 05:43:40,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=247562.66666666666, ans=0.125 +2024-07-29 05:43:47,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247576.0, ans=0.1 +2024-07-29 05:43:59,825 INFO [train.py:1114] (3/4) Epoch 19, batch 1700, loss[loss=0.1457, simple_loss=0.2226, pruned_loss=0.03437, over 4695.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2629, pruned_loss=0.04062, over 939252.52 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:44:05,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=247602.66666666666, ans=0.04949747468305833 +2024-07-29 05:44:08,233 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.846e+01 5.612e+01 6.497e+01 7.246e+01 1.413e+02, threshold=1.299e+02, percent-clipped=1.0 +2024-07-29 05:44:09,418 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=9.39 vs. limit=15.0 +2024-07-29 05:44:18,049 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.91 vs. limit=15.0 +2024-07-29 05:44:32,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=247656.0, ans=0.125 +2024-07-29 05:44:34,127 INFO [train.py:1114] (3/4) Epoch 19, batch 1750, loss[loss=0.1689, simple_loss=0.2547, pruned_loss=0.04154, over 4804.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2628, pruned_loss=0.04057, over 940210.11 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:44:47,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=247696.0, ans=0.2 +2024-07-29 05:44:47,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=247696.0, ans=0.125 +2024-07-29 05:44:56,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=247709.33333333334, ans=0.2 +2024-07-29 05:45:00,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=247722.66666666666, ans=0.0 +2024-07-29 05:45:00,983 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.47 vs. limit=15.0 +2024-07-29 05:45:06,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=247736.0, ans=0.125 +2024-07-29 05:45:10,874 INFO [train.py:1114] (3/4) Epoch 19, batch 1800, loss[loss=0.1443, simple_loss=0.2492, pruned_loss=0.01969, over 4632.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2635, pruned_loss=0.04081, over 940918.11 frames. ], batch size: 13, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:45:12,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=247736.0, ans=0.0 +2024-07-29 05:45:18,176 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.396e+01 5.835e+01 6.491e+01 8.060e+01 1.072e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-29 05:45:30,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=247749.33333333334, ans=0.125 +2024-07-29 05:45:34,039 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.16 vs. limit=15.0 +2024-07-29 05:45:46,815 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-29 05:45:56,314 INFO [train.py:1114] (3/4) Epoch 19, batch 1850, loss[loss=0.167, simple_loss=0.2614, pruned_loss=0.03631, over 4817.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2631, pruned_loss=0.04062, over 940994.22 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:46:14,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=247829.33333333334, ans=0.025 +2024-07-29 05:46:34,832 INFO [train.py:1114] (3/4) Epoch 19, batch 1900, loss[loss=0.1484, simple_loss=0.2383, pruned_loss=0.02924, over 4660.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.264, pruned_loss=0.04071, over 942140.24 frames. ], batch size: 14, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:46:37,840 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.09 vs. limit=15.0 +2024-07-29 05:46:38,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=247869.33333333334, ans=0.09899494936611666 +2024-07-29 05:46:43,052 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.675e+01 6.450e+01 7.490e+01 1.080e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 05:46:50,715 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.50 vs. limit=15.0 +2024-07-29 05:46:59,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=247909.33333333334, ans=0.07 +2024-07-29 05:47:06,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=247922.66666666666, ans=0.125 +2024-07-29 05:47:11,509 INFO [train.py:1114] (3/4) Epoch 19, batch 1950, loss[loss=0.1673, simple_loss=0.2635, pruned_loss=0.03557, over 4901.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2656, pruned_loss=0.04114, over 943879.89 frames. ], batch size: 13, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:47:11,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=247936.0, ans=0.1 +2024-07-29 05:47:21,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=247949.33333333334, ans=0.04949747468305833 +2024-07-29 05:47:28,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=247962.66666666666, ans=0.0 +2024-07-29 05:47:57,264 INFO [train.py:1114] (3/4) Epoch 19, batch 2000, loss[loss=0.1461, simple_loss=0.2313, pruned_loss=0.03042, over 4805.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2652, pruned_loss=0.04067, over 941616.17 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:48:04,710 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.516e+01 5.566e+01 6.044e+01 6.728e+01 1.044e+02, threshold=1.209e+02, percent-clipped=0.0 +2024-07-29 05:48:22,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=248042.66666666666, ans=0.1 +2024-07-29 05:48:27,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=248056.0, ans=0.125 +2024-07-29 05:48:27,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=248056.0, ans=0.0 +2024-07-29 05:48:31,048 INFO [train.py:1114] (3/4) Epoch 19, batch 2050, loss[loss=0.1555, simple_loss=0.2398, pruned_loss=0.03562, over 4612.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2642, pruned_loss=0.04042, over 939702.43 frames. ], batch size: 11, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:48:34,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=248069.33333333334, ans=0.125 +2024-07-29 05:48:42,660 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:48:44,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_ff2.min_abs, batch_count=248096.0, ans=0.1 +2024-07-29 05:48:47,545 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.49 vs. limit=12.0 +2024-07-29 05:48:55,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=248109.33333333334, ans=0.2 +2024-07-29 05:48:57,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=248109.33333333334, ans=0.1 +2024-07-29 05:49:07,024 INFO [train.py:1114] (3/4) Epoch 19, batch 2100, loss[loss=0.1603, simple_loss=0.2533, pruned_loss=0.03362, over 4762.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2633, pruned_loss=0.0402, over 941325.70 frames. ], batch size: 13, lr: 3.95e-03, grad_scale: 32.0 +2024-07-29 05:49:12,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=248136.0, ans=0.125 +2024-07-29 05:49:14,319 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.765e+01 5.813e+01 6.323e+01 7.221e+01 1.090e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 05:49:17,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=248149.33333333334, ans=0.0 +2024-07-29 05:49:20,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=248162.66666666666, ans=0.125 +2024-07-29 05:49:37,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=248189.33333333334, ans=0.0 +2024-07-29 05:49:40,154 INFO [train.py:1114] (3/4) Epoch 19, batch 2150, loss[loss=0.1678, simple_loss=0.2636, pruned_loss=0.03602, over 4894.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2622, pruned_loss=0.03974, over 944504.56 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:49:47,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=248202.66666666666, ans=0.0 +2024-07-29 05:49:49,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=248216.0, ans=0.0 +2024-07-29 05:49:50,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=248216.0, ans=0.1 +2024-07-29 05:49:51,729 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.47 vs. limit=15.0 +2024-07-29 05:49:56,667 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=9.10 vs. limit=15.0 +2024-07-29 05:50:00,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=248229.33333333334, ans=0.5 +2024-07-29 05:50:08,996 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=248242.66666666666, ans=0.125 +2024-07-29 05:50:14,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=248256.0, ans=0.07 +2024-07-29 05:50:19,693 INFO [train.py:1114] (3/4) Epoch 19, batch 2200, loss[loss=0.1711, simple_loss=0.267, pruned_loss=0.03759, over 4807.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2624, pruned_loss=0.04015, over 943238.82 frames. ], batch size: 14, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:50:27,104 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.703e+01 5.574e+01 6.118e+01 6.873e+01 9.817e+01, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 05:50:27,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=248282.66666666666, ans=0.0 +2024-07-29 05:50:31,535 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.10 vs. limit=15.0 +2024-07-29 05:50:37,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=248296.0, ans=0.0 +2024-07-29 05:50:40,886 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.87 vs. limit=12.0 +2024-07-29 05:50:41,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248309.33333333334, ans=0.1 +2024-07-29 05:50:45,518 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.76 vs. limit=12.0 +2024-07-29 05:51:23,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=248322.66666666666, ans=0.125 +2024-07-29 05:51:26,224 INFO [train.py:1114] (3/4) Epoch 19, batch 2250, loss[loss=0.1886, simple_loss=0.2888, pruned_loss=0.04424, over 4688.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2628, pruned_loss=0.04027, over 942100.03 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:51:26,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=248336.0, ans=0.125 +2024-07-29 05:51:33,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=248349.33333333334, ans=0.125 +2024-07-29 05:51:47,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=248376.0, ans=0.035 +2024-07-29 05:52:00,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=248402.66666666666, ans=0.0 +2024-07-29 05:52:00,495 INFO [train.py:1114] (3/4) Epoch 19, batch 2300, loss[loss=0.1406, simple_loss=0.2307, pruned_loss=0.02526, over 4929.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.262, pruned_loss=0.04029, over 939549.79 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:52:09,134 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.821e+01 6.321e+01 7.286e+01 1.025e+02, threshold=1.264e+02, percent-clipped=0.0 +2024-07-29 05:52:10,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=248416.0, ans=0.025 +2024-07-29 05:52:25,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.76 vs. limit=10.0 +2024-07-29 05:52:28,526 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.90 vs. limit=6.0 +2024-07-29 05:52:30,712 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=6.06 vs. limit=15.0 +2024-07-29 05:52:35,424 INFO [train.py:1114] (3/4) Epoch 19, batch 2350, loss[loss=0.1808, simple_loss=0.2795, pruned_loss=0.04106, over 4644.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2618, pruned_loss=0.0397, over 941758.15 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:52:42,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=248469.33333333334, ans=0.125 +2024-07-29 05:52:56,647 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=248496.0, ans=0.0 +2024-07-29 05:53:02,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=248509.33333333334, ans=0.0 +2024-07-29 05:53:04,946 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=248509.33333333334, ans=0.0 +2024-07-29 05:53:05,004 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=248509.33333333334, ans=0.0 +2024-07-29 05:53:12,724 INFO [train.py:1114] (3/4) Epoch 19, batch 2400, loss[loss=0.202, simple_loss=0.2763, pruned_loss=0.06386, over 4649.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2623, pruned_loss=0.04017, over 941744.57 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:53:12,878 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=248536.0, ans=0.125 +2024-07-29 05:53:21,905 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.538e+01 5.999e+01 6.676e+01 9.357e+01, threshold=1.200e+02, percent-clipped=0.0 +2024-07-29 05:53:32,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=248562.66666666666, ans=0.0 +2024-07-29 05:53:33,033 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248562.66666666666, ans=0.1 +2024-07-29 05:53:35,227 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.31 vs. limit=15.0 +2024-07-29 05:53:48,251 INFO [train.py:1114] (3/4) Epoch 19, batch 2450, loss[loss=0.1721, simple_loss=0.2635, pruned_loss=0.0403, over 4693.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2634, pruned_loss=0.04079, over 937893.69 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:53:55,311 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=21.18 vs. limit=22.5 +2024-07-29 05:53:59,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=248616.0, ans=0.0 +2024-07-29 05:54:01,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=248629.33333333334, ans=0.125 +2024-07-29 05:54:03,022 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=248629.33333333334, ans=0.125 +2024-07-29 05:54:14,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=248656.0, ans=0.025 +2024-07-29 05:54:14,281 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:54:21,386 INFO [train.py:1114] (3/4) Epoch 19, batch 2500, loss[loss=0.1778, simple_loss=0.277, pruned_loss=0.03929, over 4812.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2632, pruned_loss=0.04059, over 939790.11 frames. ], batch size: 14, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:54:23,288 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.77 vs. limit=10.0 +2024-07-29 05:54:28,634 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.354e+01 5.769e+01 6.395e+01 7.394e+01 1.044e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-29 05:54:39,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=248696.0, ans=0.125 +2024-07-29 05:54:46,212 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.74 vs. limit=22.5 +2024-07-29 05:54:49,169 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.41 vs. limit=15.0 +2024-07-29 05:54:54,732 INFO [train.py:1114] (3/4) Epoch 19, batch 2550, loss[loss=0.1642, simple_loss=0.2449, pruned_loss=0.04176, over 4791.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2622, pruned_loss=0.03972, over 939347.66 frames. ], batch size: 11, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:54:58,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248736.0, ans=0.1 +2024-07-29 05:55:07,653 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:55:08,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=248762.66666666666, ans=0.0 +2024-07-29 05:55:10,200 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=248762.66666666666, ans=0.0 +2024-07-29 05:55:24,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=248789.33333333334, ans=0.0 +2024-07-29 05:55:28,620 INFO [train.py:1114] (3/4) Epoch 19, batch 2600, loss[loss=0.1766, simple_loss=0.2615, pruned_loss=0.04585, over 4902.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2633, pruned_loss=0.04032, over 938083.94 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:55:28,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=248802.66666666666, ans=0.1 +2024-07-29 05:55:32,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=248802.66666666666, ans=0.0 +2024-07-29 05:55:32,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=248802.66666666666, ans=0.2 +2024-07-29 05:55:33,516 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=248802.66666666666, ans=0.0 +2024-07-29 05:55:35,909 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.520e+01 6.096e+01 6.841e+01 9.069e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 05:55:36,055 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=248816.0, ans=0.0 +2024-07-29 05:55:37,409 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:55:38,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=248816.0, ans=0.125 +2024-07-29 05:55:40,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=248816.0, ans=0.0 +2024-07-29 05:55:51,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=248842.66666666666, ans=0.0 +2024-07-29 05:55:53,962 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.59 vs. limit=12.0 +2024-07-29 05:56:03,691 INFO [train.py:1114] (3/4) Epoch 19, batch 2650, loss[loss=0.1605, simple_loss=0.2502, pruned_loss=0.03544, over 4654.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2636, pruned_loss=0.04043, over 940418.00 frames. ], batch size: 16, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:56:10,115 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.61 vs. limit=15.0 +2024-07-29 05:56:18,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=248896.0, ans=0.2 +2024-07-29 05:56:19,763 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=248896.0, ans=0.0 +2024-07-29 05:56:20,020 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.24 vs. limit=15.0 +2024-07-29 05:56:20,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=248896.0, ans=0.0 +2024-07-29 05:56:21,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=248896.0, ans=0.1 +2024-07-29 05:56:24,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=248909.33333333334, ans=0.125 +2024-07-29 05:56:34,299 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.20 vs. limit=15.0 +2024-07-29 05:56:42,260 INFO [train.py:1114] (3/4) Epoch 19, batch 2700, loss[loss=0.1843, simple_loss=0.2894, pruned_loss=0.03962, over 4747.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2644, pruned_loss=0.04058, over 940124.03 frames. ], batch size: 14, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:57:08,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=248949.33333333334, ans=0.125 +2024-07-29 05:57:09,486 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.466e+01 5.838e+01 6.361e+01 7.244e+01 1.025e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-29 05:57:11,103 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=248949.33333333334, ans=0.0 +2024-07-29 05:57:17,246 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.45 vs. limit=10.0 +2024-07-29 05:57:19,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=248962.66666666666, ans=0.125 +2024-07-29 05:57:32,105 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.85 vs. limit=10.0 +2024-07-29 05:57:35,854 INFO [train.py:1114] (3/4) Epoch 19, batch 2750, loss[loss=0.1889, simple_loss=0.2732, pruned_loss=0.05235, over 4714.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2639, pruned_loss=0.04053, over 939855.52 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 16.0 +2024-07-29 05:58:03,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=249056.0, ans=0.0 +2024-07-29 05:58:06,236 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249056.0, ans=0.1 +2024-07-29 05:58:09,572 INFO [train.py:1114] (3/4) Epoch 19, batch 2800, loss[loss=0.228, simple_loss=0.2935, pruned_loss=0.08126, over 3628.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2645, pruned_loss=0.04086, over 937644.61 frames. ], batch size: 35, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:58:17,635 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.437e+01 5.861e+01 6.601e+01 8.054e+01 1.135e+02, threshold=1.320e+02, percent-clipped=0.0 +2024-07-29 05:58:22,675 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.08 vs. limit=10.0 +2024-07-29 05:58:24,721 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=249096.0, ans=0.125 +2024-07-29 05:58:39,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249109.33333333334, ans=0.1 +2024-07-29 05:58:47,372 INFO [train.py:1114] (3/4) Epoch 19, batch 2850, loss[loss=0.1962, simple_loss=0.2789, pruned_loss=0.05675, over 4949.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2652, pruned_loss=0.04117, over 935636.17 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:58:48,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=249136.0, ans=0.0 +2024-07-29 05:58:52,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=249136.0, ans=0.125 +2024-07-29 05:58:53,206 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.47 vs. limit=10.0 +2024-07-29 05:58:55,743 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.69 vs. limit=6.0 +2024-07-29 05:58:56,573 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten.whitening_limit, batch_count=249149.33333333334, ans=15.0 +2024-07-29 05:59:16,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=249189.33333333334, ans=10.0 +2024-07-29 05:59:22,307 INFO [train.py:1114] (3/4) Epoch 19, batch 2900, loss[loss=0.1567, simple_loss=0.2464, pruned_loss=0.03346, over 4832.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2662, pruned_loss=0.04131, over 939511.18 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 32.0 +2024-07-29 05:59:27,781 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 05:59:30,346 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.673e+01 5.762e+01 6.380e+01 7.309e+01 1.230e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 05:59:34,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=249216.0, ans=0.0 +2024-07-29 05:59:36,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=249229.33333333334, ans=0.125 +2024-07-29 05:59:37,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=249229.33333333334, ans=0.125 +2024-07-29 05:59:46,847 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.95 vs. limit=6.0 +2024-07-29 05:59:55,835 INFO [train.py:1114] (3/4) Epoch 19, batch 2950, loss[loss=0.1706, simple_loss=0.2675, pruned_loss=0.03683, over 4709.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2651, pruned_loss=0.04132, over 938636.96 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 16.0 +2024-07-29 06:00:12,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=249296.0, ans=0.0 +2024-07-29 06:00:16,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=249309.33333333334, ans=0.125 +2024-07-29 06:00:16,984 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=249309.33333333334, ans=0.125 +2024-07-29 06:00:21,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249309.33333333334, ans=0.1 +2024-07-29 06:00:29,425 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=18.69 vs. limit=22.5 +2024-07-29 06:00:29,672 INFO [train.py:1114] (3/4) Epoch 19, batch 3000, loss[loss=0.1775, simple_loss=0.2644, pruned_loss=0.04534, over 4755.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2651, pruned_loss=0.04141, over 938012.66 frames. ], batch size: 13, lr: 3.94e-03, grad_scale: 16.0 +2024-07-29 06:00:29,673 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 06:00:39,730 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.3354, 5.1294, 4.5537, 4.7882], device='cuda:3') +2024-07-29 06:00:41,088 INFO [train.py:1146] (3/4) Epoch 19, validation: loss=0.161, simple_loss=0.2631, pruned_loss=0.02943, over 944034.00 frames. +2024-07-29 06:00:41,088 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 06:00:44,370 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.87 vs. limit=10.0 +2024-07-29 06:00:44,804 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:00:45,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249336.0, ans=0.1 +2024-07-29 06:00:45,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=249336.0, ans=0.2 +2024-07-29 06:00:49,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=249349.33333333334, ans=0.025 +2024-07-29 06:00:50,058 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.473e+01 5.637e+01 6.118e+01 7.161e+01 1.064e+02, threshold=1.224e+02, percent-clipped=0.0 +2024-07-29 06:00:56,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=249362.66666666666, ans=0.0 +2024-07-29 06:01:01,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=249376.0, ans=0.125 +2024-07-29 06:01:07,409 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=249376.0, ans=0.2 +2024-07-29 06:01:14,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=249389.33333333334, ans=0.0 +2024-07-29 06:01:15,364 INFO [train.py:1114] (3/4) Epoch 19, batch 3050, loss[loss=0.1505, simple_loss=0.2369, pruned_loss=0.03206, over 4634.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2649, pruned_loss=0.04135, over 936805.05 frames. ], batch size: 12, lr: 3.94e-03, grad_scale: 16.0 +2024-07-29 06:01:16,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=249402.66666666666, ans=0.1 +2024-07-29 06:01:16,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=249402.66666666666, ans=0.125 +2024-07-29 06:01:20,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=249402.66666666666, ans=0.125 +2024-07-29 06:01:21,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249402.66666666666, ans=0.1 +2024-07-29 06:01:21,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=249402.66666666666, ans=0.125 +2024-07-29 06:01:27,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=249416.0, ans=0.125 +2024-07-29 06:01:29,572 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.38 vs. limit=10.0 +2024-07-29 06:01:31,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=249429.33333333334, ans=0.04949747468305833 +2024-07-29 06:01:37,885 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=249442.66666666666, ans=0.0 +2024-07-29 06:01:41,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=249442.66666666666, ans=0.125 +2024-07-29 06:01:51,072 INFO [train.py:1114] (3/4) Epoch 19, batch 3100, loss[loss=0.1815, simple_loss=0.2723, pruned_loss=0.04538, over 4648.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2643, pruned_loss=0.04107, over 937515.63 frames. ], batch size: 16, lr: 3.93e-03, grad_scale: 16.0 +2024-07-29 06:01:57,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=249482.66666666666, ans=0.0 +2024-07-29 06:01:59,699 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.648e+01 5.499e+01 6.213e+01 7.046e+01 1.053e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 06:02:04,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=249496.0, ans=0.125 +2024-07-29 06:02:12,461 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.63 vs. limit=15.0 +2024-07-29 06:02:24,696 INFO [train.py:1114] (3/4) Epoch 19, batch 3150, loss[loss=0.1804, simple_loss=0.284, pruned_loss=0.03841, over 4649.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.264, pruned_loss=0.04073, over 937904.55 frames. ], batch size: 17, lr: 3.93e-03, grad_scale: 16.0 +2024-07-29 06:02:41,092 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.78 vs. limit=15.0 +2024-07-29 06:02:55,912 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=249589.33333333334, ans=0.0 +2024-07-29 06:03:01,863 INFO [train.py:1114] (3/4) Epoch 19, batch 3200, loss[loss=0.1552, simple_loss=0.2473, pruned_loss=0.03152, over 4827.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2632, pruned_loss=0.04072, over 939601.33 frames. ], batch size: 13, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:03:04,803 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.95 vs. limit=6.0 +2024-07-29 06:03:05,580 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.76 vs. limit=15.0 +2024-07-29 06:03:07,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=249602.66666666666, ans=0.2 +2024-07-29 06:03:07,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=249616.0, ans=0.125 +2024-07-29 06:03:10,259 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.865e+01 5.924e+01 6.807e+01 8.203e+01 1.254e+02, threshold=1.361e+02, percent-clipped=1.0 +2024-07-29 06:03:37,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=249642.66666666666, ans=0.125 +2024-07-29 06:03:38,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass_mid.scale_min, batch_count=249642.66666666666, ans=0.2 +2024-07-29 06:03:39,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=249656.0, ans=0.125 +2024-07-29 06:03:48,468 INFO [train.py:1114] (3/4) Epoch 19, batch 3250, loss[loss=0.1659, simple_loss=0.2608, pruned_loss=0.03549, over 4931.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2643, pruned_loss=0.04077, over 940328.84 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:03:48,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=249669.33333333334, ans=0.125 +2024-07-29 06:03:49,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=249669.33333333334, ans=0.1 +2024-07-29 06:03:54,116 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=249669.33333333334, ans=0.0 +2024-07-29 06:03:59,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=249682.66666666666, ans=0.0 +2024-07-29 06:04:05,128 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=249682.66666666666, ans=0.125 +2024-07-29 06:04:09,143 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=249696.0, ans=0.125 +2024-07-29 06:06:10,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=249709.33333333334, ans=0.1 +2024-07-29 06:07:15,045 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=249722.66666666666, ans=0.125 +2024-07-29 06:07:17,411 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.38 vs. limit=6.0 +2024-07-29 06:07:21,178 INFO [train.py:1114] (3/4) Epoch 19, batch 3300, loss[loss=0.1975, simple_loss=0.2848, pruned_loss=0.05517, over 4673.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2631, pruned_loss=0.04078, over 940402.45 frames. ], batch size: 19, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:07:38,744 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.082e+01 5.786e+01 6.492e+01 7.177e+01 1.036e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-29 06:07:42,473 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=7.15 vs. limit=15.0 +2024-07-29 06:07:45,240 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=7.85 vs. limit=15.0 +2024-07-29 06:07:55,269 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.59 vs. limit=15.0 +2024-07-29 06:08:02,672 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.70 vs. limit=15.0 +2024-07-29 06:08:07,273 INFO [train.py:1114] (3/4) Epoch 19, batch 3350, loss[loss=0.2224, simple_loss=0.3093, pruned_loss=0.06774, over 4591.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2639, pruned_loss=0.04131, over 937898.86 frames. ], batch size: 17, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:08:10,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=249802.66666666666, ans=0.0 +2024-07-29 06:08:16,390 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.99 vs. limit=15.0 +2024-07-29 06:08:20,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=249829.33333333334, ans=0.025 +2024-07-29 06:08:22,675 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.21 vs. limit=15.0 +2024-07-29 06:08:25,226 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.93 vs. limit=15.0 +2024-07-29 06:08:39,939 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=249856.0, ans=0.2 +2024-07-29 06:08:40,054 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=249856.0, ans=0.125 +2024-07-29 06:08:41,239 INFO [train.py:1114] (3/4) Epoch 19, batch 3400, loss[loss=0.1498, simple_loss=0.2363, pruned_loss=0.03164, over 4808.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2632, pruned_loss=0.04088, over 936981.52 frames. ], batch size: 11, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:08:49,839 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.563e+01 5.488e+01 5.998e+01 6.910e+01 1.087e+02, threshold=1.200e+02, percent-clipped=0.0 +2024-07-29 06:08:52,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249882.66666666666, ans=0.1 +2024-07-29 06:08:59,105 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=249896.0, ans=0.125 +2024-07-29 06:09:05,686 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.97 vs. limit=15.0 +2024-07-29 06:09:06,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=249909.33333333334, ans=0.125 +2024-07-29 06:09:09,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=249922.66666666666, ans=0.1 +2024-07-29 06:09:15,443 INFO [train.py:1114] (3/4) Epoch 19, batch 3450, loss[loss=0.1878, simple_loss=0.2832, pruned_loss=0.04616, over 4732.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2633, pruned_loss=0.04099, over 937264.39 frames. ], batch size: 19, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:09:23,461 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=249949.33333333334, ans=0.125 +2024-07-29 06:09:44,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=249989.33333333334, ans=0.09899494936611666 +2024-07-29 06:09:48,788 INFO [train.py:1114] (3/4) Epoch 19, batch 3500, loss[loss=0.164, simple_loss=0.251, pruned_loss=0.0385, over 4932.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2629, pruned_loss=0.04055, over 938128.79 frames. ], batch size: 12, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:09:57,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=250016.0, ans=0.0 +2024-07-29 06:09:57,883 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.930e+01 5.660e+01 6.096e+01 6.757e+01 8.865e+01, threshold=1.219e+02, percent-clipped=0.0 +2024-07-29 06:10:00,030 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=250016.0, ans=0.0 +2024-07-29 06:10:02,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=250029.33333333334, ans=0.2 +2024-07-29 06:10:05,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=250029.33333333334, ans=0.125 +2024-07-29 06:10:06,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=250029.33333333334, ans=0.0 +2024-07-29 06:10:13,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=250042.66666666666, ans=0.125 +2024-07-29 06:10:15,408 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=7.88 vs. limit=15.0 +2024-07-29 06:10:15,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=250042.66666666666, ans=0.125 +2024-07-29 06:10:24,715 INFO [train.py:1114] (3/4) Epoch 19, batch 3550, loss[loss=0.1584, simple_loss=0.2628, pruned_loss=0.027, over 4657.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2627, pruned_loss=0.04022, over 939063.91 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:10:28,326 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.08 vs. limit=22.5 +2024-07-29 06:11:09,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=250122.66666666666, ans=0.125 +2024-07-29 06:11:10,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=250122.66666666666, ans=0.125 +2024-07-29 06:11:17,174 INFO [train.py:1114] (3/4) Epoch 19, batch 3600, loss[loss=0.1306, simple_loss=0.2215, pruned_loss=0.01984, over 4964.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2625, pruned_loss=0.03957, over 940820.63 frames. ], batch size: 13, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:11:20,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer1.prob, batch_count=250136.0, ans=0.125 +2024-07-29 06:11:28,761 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.784e+01 5.577e+01 6.277e+01 7.321e+01 1.396e+02, threshold=1.255e+02, percent-clipped=3.0 +2024-07-29 06:11:35,914 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.70 vs. limit=22.5 +2024-07-29 06:11:38,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=250162.66666666666, ans=0.125 +2024-07-29 06:11:41,292 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.28 vs. limit=15.0 +2024-07-29 06:11:52,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=250189.33333333334, ans=0.2 +2024-07-29 06:11:53,652 INFO [train.py:1114] (3/4) Epoch 19, batch 3650, loss[loss=0.1975, simple_loss=0.2915, pruned_loss=0.05173, over 4908.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2623, pruned_loss=0.03943, over 941305.50 frames. ], batch size: 15, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:11:55,791 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=250202.66666666666, ans=0.125 +2024-07-29 06:11:59,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=250202.66666666666, ans=0.1 +2024-07-29 06:12:04,111 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.67 vs. limit=15.0 +2024-07-29 06:12:13,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=250242.66666666666, ans=0.0 +2024-07-29 06:12:17,889 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=250242.66666666666, ans=0.125 +2024-07-29 06:12:21,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=250256.0, ans=0.0 +2024-07-29 06:12:27,241 INFO [train.py:1114] (3/4) Epoch 19, batch 3700, loss[loss=0.1683, simple_loss=0.2656, pruned_loss=0.03552, over 4926.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.262, pruned_loss=0.03938, over 942011.64 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:12:30,203 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.10 vs. limit=12.0 +2024-07-29 06:12:31,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=250269.33333333334, ans=0.2 +2024-07-29 06:12:35,698 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.472e+01 5.449e+01 6.027e+01 6.709e+01 1.105e+02, threshold=1.205e+02, percent-clipped=0.0 +2024-07-29 06:12:41,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=250296.0, ans=0.125 +2024-07-29 06:12:55,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=250309.33333333334, ans=0.125 +2024-07-29 06:13:04,998 INFO [train.py:1114] (3/4) Epoch 19, batch 3750, loss[loss=0.1551, simple_loss=0.2306, pruned_loss=0.03977, over 4802.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2628, pruned_loss=0.03978, over 943688.41 frames. ], batch size: 11, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:13:07,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250336.0, ans=0.1 +2024-07-29 06:13:13,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=250349.33333333334, ans=0.0 +2024-07-29 06:13:16,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=250349.33333333334, ans=0.125 +2024-07-29 06:13:18,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=250362.66666666666, ans=0.2 +2024-07-29 06:13:32,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=250376.0, ans=0.125 +2024-07-29 06:13:35,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=250389.33333333334, ans=0.2 +2024-07-29 06:13:41,746 INFO [train.py:1114] (3/4) Epoch 19, batch 3800, loss[loss=0.1763, simple_loss=0.2668, pruned_loss=0.04292, over 4803.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2629, pruned_loss=0.04036, over 942738.18 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:13:50,554 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.347e+01 5.643e+01 6.466e+01 7.181e+01 9.486e+01, threshold=1.293e+02, percent-clipped=0.0 +2024-07-29 06:14:07,945 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.78 vs. limit=15.0 +2024-07-29 06:14:11,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=250456.0, ans=0.125 +2024-07-29 06:14:15,723 INFO [train.py:1114] (3/4) Epoch 19, batch 3850, loss[loss=0.1829, simple_loss=0.2849, pruned_loss=0.04051, over 4623.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2619, pruned_loss=0.03976, over 943086.09 frames. ], batch size: 16, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:14:21,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=250469.33333333334, ans=10.0 +2024-07-29 06:14:21,658 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.66 vs. limit=22.5 +2024-07-29 06:14:23,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=250469.33333333334, ans=0.0 +2024-07-29 06:14:23,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250469.33333333334, ans=0.1 +2024-07-29 06:14:28,707 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.96 vs. limit=6.0 +2024-07-29 06:14:29,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=250482.66666666666, ans=0.0 +2024-07-29 06:14:36,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=250482.66666666666, ans=0.125 +2024-07-29 06:14:40,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250496.0, ans=0.1 +2024-07-29 06:14:40,634 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=17.73 vs. limit=22.5 +2024-07-29 06:15:01,947 INFO [train.py:1114] (3/4) Epoch 19, batch 3900, loss[loss=0.1709, simple_loss=0.2671, pruned_loss=0.03738, over 4812.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2626, pruned_loss=0.03996, over 943299.11 frames. ], batch size: 14, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:15:05,411 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=250536.0, ans=0.125 +2024-07-29 06:15:10,405 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=8.87 vs. limit=15.0 +2024-07-29 06:15:10,489 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.812e+01 5.443e+01 5.935e+01 6.800e+01 9.417e+01, threshold=1.187e+02, percent-clipped=0.0 +2024-07-29 06:15:13,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=250549.33333333334, ans=0.125 +2024-07-29 06:15:15,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=250562.66666666666, ans=0.125 +2024-07-29 06:15:20,191 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=25.38 vs. limit=22.5 +2024-07-29 06:15:24,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=250576.0, ans=0.1 +2024-07-29 06:15:27,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=250576.0, ans=0.0 +2024-07-29 06:15:37,618 INFO [train.py:1114] (3/4) Epoch 19, batch 3950, loss[loss=0.2207, simple_loss=0.3096, pruned_loss=0.06587, over 4861.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2622, pruned_loss=0.03967, over 945041.03 frames. ], batch size: 16, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:15:38,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=250602.66666666666, ans=0.025 +2024-07-29 06:15:39,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=250602.66666666666, ans=0.1 +2024-07-29 06:15:41,775 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.min_positive, batch_count=250602.66666666666, ans=0.05 +2024-07-29 06:15:51,642 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=250629.33333333334, ans=0.1 +2024-07-29 06:16:12,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=250669.33333333334, ans=0.125 +2024-07-29 06:16:13,304 INFO [train.py:1114] (3/4) Epoch 19, batch 4000, loss[loss=0.1328, simple_loss=0.22, pruned_loss=0.02284, over 4783.00 frames. ], tot_loss[loss=0.171, simple_loss=0.262, pruned_loss=0.04004, over 941808.46 frames. ], batch size: 12, lr: 3.93e-03, grad_scale: 32.0 +2024-07-29 06:16:19,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=250669.33333333334, ans=0.125 +2024-07-29 06:16:24,305 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.630e+01 6.259e+01 7.111e+01 1.064e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 06:16:28,622 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=5.07 vs. limit=12.0 +2024-07-29 06:16:34,081 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.36 vs. limit=15.0 +2024-07-29 06:16:35,859 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=250709.33333333334, ans=0.125 +2024-07-29 06:16:39,139 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=250709.33333333334, ans=0.1 +2024-07-29 06:16:49,520 INFO [train.py:1114] (3/4) Epoch 19, batch 4050, loss[loss=0.2193, simple_loss=0.3011, pruned_loss=0.06872, over 3420.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2613, pruned_loss=0.03961, over 939598.63 frames. ], batch size: 35, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:16:53,145 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.22 vs. limit=15.0 +2024-07-29 06:16:54,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=250736.0, ans=0.125 +2024-07-29 06:16:55,778 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=250749.33333333334, ans=0.2 +2024-07-29 06:17:07,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=250762.66666666666, ans=0.125 +2024-07-29 06:17:21,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=250789.33333333334, ans=0.2 +2024-07-29 06:17:23,765 INFO [train.py:1114] (3/4) Epoch 19, batch 4100, loss[loss=0.1865, simple_loss=0.2817, pruned_loss=0.04568, over 4896.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2623, pruned_loss=0.04, over 938705.60 frames. ], batch size: 15, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:17:23,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=250802.66666666666, ans=0.1 +2024-07-29 06:17:32,506 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.791e+01 5.808e+01 6.562e+01 7.760e+01 1.349e+02, threshold=1.312e+02, percent-clipped=1.0 +2024-07-29 06:17:32,902 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.66 vs. limit=15.0 +2024-07-29 06:17:51,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=250842.66666666666, ans=15.0 +2024-07-29 06:17:56,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=250842.66666666666, ans=0.1 +2024-07-29 06:17:57,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=250856.0, ans=0.125 +2024-07-29 06:18:00,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=250856.0, ans=0.125 +2024-07-29 06:18:04,512 INFO [train.py:1114] (3/4) Epoch 19, batch 4150, loss[loss=0.16, simple_loss=0.2673, pruned_loss=0.02631, over 4829.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2614, pruned_loss=0.03958, over 938360.77 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:18:08,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=250869.33333333334, ans=0.07 +2024-07-29 06:18:09,462 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=250869.33333333334, ans=0.2 +2024-07-29 06:18:15,722 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=250882.66666666666, ans=0.125 +2024-07-29 06:19:23,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=250909.33333333334, ans=0.125 +2024-07-29 06:19:59,623 INFO [train.py:1114] (3/4) Epoch 19, batch 4200, loss[loss=0.182, simple_loss=0.2709, pruned_loss=0.04658, over 4894.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2615, pruned_loss=0.0396, over 939720.62 frames. ], batch size: 15, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:20:06,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=250936.0, ans=0.07 +2024-07-29 06:20:17,256 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.502e+01 5.500e+01 5.908e+01 6.556e+01 1.150e+02, threshold=1.182e+02, percent-clipped=0.0 +2024-07-29 06:20:19,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=250949.33333333334, ans=0.0 +2024-07-29 06:20:34,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=250976.0, ans=0.125 +2024-07-29 06:20:46,969 INFO [train.py:1114] (3/4) Epoch 19, batch 4250, loss[loss=0.1512, simple_loss=0.2409, pruned_loss=0.03076, over 4638.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2612, pruned_loss=0.03955, over 940592.27 frames. ], batch size: 12, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:20:50,110 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.84 vs. limit=15.0 +2024-07-29 06:20:52,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=251002.66666666666, ans=0.0 +2024-07-29 06:21:22,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=251029.33333333334, ans=0.2 +2024-07-29 06:21:22,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=251029.33333333334, ans=0.0 +2024-07-29 06:22:06,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=251042.66666666666, ans=0.0 +2024-07-29 06:22:17,987 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.31 vs. limit=15.0 +2024-07-29 06:22:38,267 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.96 vs. limit=10.0 +2024-07-29 06:22:52,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251056.0, ans=0.1 +2024-07-29 06:22:57,469 INFO [train.py:1114] (3/4) Epoch 19, batch 4300, loss[loss=0.1363, simple_loss=0.2413, pruned_loss=0.0157, over 4762.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2618, pruned_loss=0.03991, over 940492.05 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:23:00,871 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=251069.33333333334, ans=0.125 +2024-07-29 06:23:47,226 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.890e+01 5.705e+01 6.376e+01 7.099e+01 1.039e+02, threshold=1.275e+02, percent-clipped=0.0 +2024-07-29 06:24:28,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=251082.66666666666, ans=0.025 +2024-07-29 06:25:00,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=251096.0, ans=0.125 +2024-07-29 06:25:03,140 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251096.0, ans=0.1 +2024-07-29 06:25:03,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=251096.0, ans=0.1 +2024-07-29 06:26:19,163 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=251109.33333333334, ans=0.125 +2024-07-29 06:26:24,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=251122.66666666666, ans=0.04949747468305833 +2024-07-29 06:26:51,242 INFO [train.py:1114] (3/4) Epoch 19, batch 4350, loss[loss=0.1541, simple_loss=0.2489, pruned_loss=0.02961, over 4752.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2632, pruned_loss=0.04063, over 941215.63 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:27:20,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=251136.0, ans=0.2 +2024-07-29 06:27:22,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=251136.0, ans=0.2 +2024-07-29 06:28:38,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=251189.33333333334, ans=0.0 +2024-07-29 06:28:38,167 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.85 vs. limit=15.0 +2024-07-29 06:28:43,543 INFO [train.py:1114] (3/4) Epoch 19, batch 4400, loss[loss=0.1632, simple_loss=0.2622, pruned_loss=0.03208, over 4812.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2631, pruned_loss=0.04069, over 940585.24 frames. ], batch size: 14, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:28:51,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=251216.0, ans=0.125 +2024-07-29 06:28:52,353 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.562e+01 5.682e+01 6.192e+01 7.414e+01 9.950e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 06:28:55,256 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=251216.0, ans=0.0 +2024-07-29 06:28:57,276 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251229.33333333334, ans=0.1 +2024-07-29 06:29:21,931 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=251256.0, ans=0.2 +2024-07-29 06:29:37,349 INFO [train.py:1114] (3/4) Epoch 19, batch 4450, loss[loss=0.1493, simple_loss=0.2415, pruned_loss=0.02861, over 4930.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2634, pruned_loss=0.04106, over 938426.58 frames. ], batch size: 12, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:29:51,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=251296.0, ans=0.0 +2024-07-29 06:30:13,624 INFO [train.py:1114] (3/4) Epoch 19, batch 4500, loss[loss=0.1562, simple_loss=0.2601, pruned_loss=0.02612, over 4743.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2638, pruned_loss=0.04069, over 937793.64 frames. ], batch size: 14, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:30:14,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=251336.0, ans=0.0 +2024-07-29 06:30:41,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=251336.0, ans=0.09899494936611666 +2024-07-29 06:30:42,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=251336.0, ans=0.1 +2024-07-29 06:30:43,020 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.93 vs. limit=8.0 +2024-07-29 06:31:20,187 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.458e+01 5.536e+01 6.082e+01 6.951e+01 9.632e+01, threshold=1.216e+02, percent-clipped=0.0 +2024-07-29 06:31:23,745 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.48 vs. limit=22.5 +2024-07-29 06:32:06,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff3_skip_rate, batch_count=251376.0, ans=0.0 +2024-07-29 06:32:06,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251376.0, ans=0.1 +2024-07-29 06:32:14,380 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=251389.33333333334, ans=0.125 +2024-07-29 06:32:16,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=251389.33333333334, ans=0.0 +2024-07-29 06:32:20,711 INFO [train.py:1114] (3/4) Epoch 19, batch 4550, loss[loss=0.1616, simple_loss=0.2439, pruned_loss=0.03965, over 4897.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2645, pruned_loss=0.04099, over 939795.71 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:32:24,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=251402.66666666666, ans=0.0 +2024-07-29 06:32:35,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=251416.0, ans=0.125 +2024-07-29 06:32:50,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251442.66666666666, ans=0.1 +2024-07-29 06:32:51,661 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=251442.66666666666, ans=0.0 +2024-07-29 06:32:55,093 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=251456.0, ans=0.125 +2024-07-29 06:33:01,148 INFO [train.py:1114] (3/4) Epoch 19, batch 4600, loss[loss=0.1879, simple_loss=0.2758, pruned_loss=0.05003, over 4457.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2642, pruned_loss=0.04101, over 938032.62 frames. ], batch size: 21, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:33:12,133 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=251482.66666666666, ans=0.125 +2024-07-29 06:33:12,270 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=6.30 vs. limit=15.0 +2024-07-29 06:33:12,680 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.648e+01 5.756e+01 6.471e+01 7.460e+01 1.091e+02, threshold=1.294e+02, percent-clipped=0.0 +2024-07-29 06:33:36,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=251509.33333333334, ans=0.0 +2024-07-29 06:34:00,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=251522.66666666666, ans=0.125 +2024-07-29 06:34:02,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=251522.66666666666, ans=0.2 +2024-07-29 06:34:16,611 INFO [train.py:1114] (3/4) Epoch 19, batch 4650, loss[loss=0.2015, simple_loss=0.2935, pruned_loss=0.05473, over 4850.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2646, pruned_loss=0.041, over 939706.65 frames. ], batch size: 16, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:34:19,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=251536.0, ans=0.025 +2024-07-29 06:34:29,874 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=7.59 vs. limit=15.0 +2024-07-29 06:34:35,827 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=251549.33333333334, ans=0.1 +2024-07-29 06:34:56,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=251549.33333333334, ans=0.0 +2024-07-29 06:35:57,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=251589.33333333334, ans=0.0 +2024-07-29 06:36:00,592 INFO [train.py:1114] (3/4) Epoch 19, batch 4700, loss[loss=0.1569, simple_loss=0.2415, pruned_loss=0.03615, over 4719.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2637, pruned_loss=0.04043, over 936756.20 frames. ], batch size: 11, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:36:01,170 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=251602.66666666666, ans=0.1 +2024-07-29 06:36:28,168 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.797e+01 5.853e+01 6.382e+01 7.357e+01 1.166e+02, threshold=1.276e+02, percent-clipped=0.0 +2024-07-29 06:38:09,496 INFO [train.py:1114] (3/4) Epoch 19, batch 4750, loss[loss=0.1944, simple_loss=0.294, pruned_loss=0.04736, over 4523.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2644, pruned_loss=0.04134, over 934994.90 frames. ], batch size: 21, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:38:27,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer2.prob, batch_count=251669.33333333334, ans=0.125 +2024-07-29 06:38:30,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=251682.66666666666, ans=0.1 +2024-07-29 06:38:31,201 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=251682.66666666666, ans=0.125 +2024-07-29 06:38:49,487 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=251682.66666666666, ans=0.2 +2024-07-29 06:39:36,813 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=10.30 vs. limit=15.0 +2024-07-29 06:40:04,371 INFO [train.py:1114] (3/4) Epoch 19, batch 4800, loss[loss=0.1685, simple_loss=0.2597, pruned_loss=0.03863, over 4704.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2644, pruned_loss=0.04123, over 932987.38 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:40:33,251 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.567e+01 5.823e+01 6.588e+01 7.932e+01 1.236e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 06:41:11,895 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.55 vs. limit=15.0 +2024-07-29 06:41:17,268 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.64 vs. limit=6.0 +2024-07-29 06:41:52,682 INFO [train.py:1114] (3/4) Epoch 19, batch 4850, loss[loss=0.1809, simple_loss=0.2885, pruned_loss=0.0367, over 4731.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2643, pruned_loss=0.04116, over 932037.93 frames. ], batch size: 14, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:41:55,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=251802.66666666666, ans=0.025 +2024-07-29 06:42:16,206 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.41 vs. limit=15.0 +2024-07-29 06:42:18,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=251816.0, ans=0.125 +2024-07-29 06:42:39,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=251842.66666666666, ans=0.0 +2024-07-29 06:43:11,764 INFO [train.py:1114] (3/4) Epoch 19, batch 4900, loss[loss=0.1638, simple_loss=0.264, pruned_loss=0.03174, over 4765.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2631, pruned_loss=0.0406, over 933797.22 frames. ], batch size: 13, lr: 3.92e-03, grad_scale: 32.0 +2024-07-29 06:43:37,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=251869.33333333334, ans=0.125 +2024-07-29 06:43:51,844 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=512, metric=19.31 vs. limit=22.5 +2024-07-29 06:43:51,969 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.31 vs. limit=6.0 +2024-07-29 06:43:53,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=251882.66666666666, ans=0.125 +2024-07-29 06:43:53,919 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.47 vs. limit=6.0 +2024-07-29 06:43:55,402 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.611e+01 6.100e+01 6.685e+01 9.009e+01, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 06:44:08,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=251896.0, ans=0.0 +2024-07-29 06:44:41,324 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=251909.33333333334, ans=0.0 +2024-07-29 06:44:51,788 INFO [train.py:1114] (3/4) Epoch 19, batch 4950, loss[loss=0.2218, simple_loss=0.3024, pruned_loss=0.07065, over 3481.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2647, pruned_loss=0.04131, over 930913.57 frames. ], batch size: 35, lr: 3.92e-03, grad_scale: 64.0 +2024-07-29 06:45:07,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=251949.33333333334, ans=10.0 +2024-07-29 06:45:17,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=251962.66666666666, ans=0.2 +2024-07-29 06:45:18,265 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.32 vs. limit=6.0 +2024-07-29 06:45:31,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=251976.0, ans=0.025 +2024-07-29 06:45:51,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=251989.33333333334, ans=0.0 +2024-07-29 06:45:53,096 INFO [train.py:1114] (3/4) Epoch 19, batch 5000, loss[loss=0.1761, simple_loss=0.279, pruned_loss=0.0366, over 4670.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2648, pruned_loss=0.0411, over 934954.85 frames. ], batch size: 14, lr: 3.91e-03, grad_scale: 64.0 +2024-07-29 06:45:54,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=252002.66666666666, ans=0.0 +2024-07-29 06:46:17,620 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.630e+01 5.743e+01 6.406e+01 6.805e+01 1.014e+02, threshold=1.281e+02, percent-clipped=0.0 +2024-07-29 06:46:22,284 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.99 vs. limit=22.5 +2024-07-29 06:46:31,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252029.33333333334, ans=0.1 +2024-07-29 06:46:34,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=252029.33333333334, ans=0.125 +2024-07-29 06:46:41,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=252042.66666666666, ans=0.125 +2024-07-29 06:46:42,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=252042.66666666666, ans=0.125 +2024-07-29 06:46:45,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=252056.0, ans=0.0 +2024-07-29 06:46:51,741 INFO [train.py:1114] (3/4) Epoch 19, batch 5050, loss[loss=0.1529, simple_loss=0.2387, pruned_loss=0.03349, over 4873.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2647, pruned_loss=0.04084, over 937742.68 frames. ], batch size: 12, lr: 3.91e-03, grad_scale: 64.0 +2024-07-29 06:46:53,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=252069.33333333334, ans=0.0 +2024-07-29 06:46:56,387 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=4.66 vs. limit=15.0 +2024-07-29 06:46:56,901 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.31 vs. limit=15.0 +2024-07-29 06:47:34,573 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.73 vs. limit=22.5 +2024-07-29 06:47:38,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=252122.66666666666, ans=0.125 +2024-07-29 06:47:39,224 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=252122.66666666666, ans=0.2 +2024-07-29 06:47:40,384 INFO [train.py:1114] (3/4) Epoch 19, batch 5100, loss[loss=0.1461, simple_loss=0.2206, pruned_loss=0.03583, over 4782.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2646, pruned_loss=0.04119, over 935431.28 frames. ], batch size: 12, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:47:49,419 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.04 vs. limit=15.0 +2024-07-29 06:47:52,764 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:47:55,092 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.629e+01 5.744e+01 6.473e+01 7.169e+01 1.065e+02, threshold=1.295e+02, percent-clipped=0.0 +2024-07-29 06:48:24,590 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.87 vs. limit=6.0 +2024-07-29 06:49:24,764 INFO [train.py:1114] (3/4) Epoch 19, batch 5150, loss[loss=0.1748, simple_loss=0.2741, pruned_loss=0.03775, over 4832.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2648, pruned_loss=0.04101, over 936177.77 frames. ], batch size: 16, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:49:29,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=252202.66666666666, ans=0.125 +2024-07-29 06:49:31,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252202.66666666666, ans=0.1 +2024-07-29 06:49:32,425 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252202.66666666666, ans=0.1 +2024-07-29 06:49:34,280 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=252202.66666666666, ans=0.025 +2024-07-29 06:49:34,407 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252202.66666666666, ans=0.1 +2024-07-29 06:49:37,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=252216.0, ans=0.125 +2024-07-29 06:49:41,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=252216.0, ans=0.0 +2024-07-29 06:49:42,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=252229.33333333334, ans=0.125 +2024-07-29 06:49:46,773 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.78 vs. limit=15.0 +2024-07-29 06:49:47,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=252229.33333333334, ans=0.2 +2024-07-29 06:49:52,535 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=15.52 vs. limit=22.5 +2024-07-29 06:50:08,828 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.67 vs. limit=15.0 +2024-07-29 06:50:13,876 INFO [train.py:1114] (3/4) Epoch 19, batch 5200, loss[loss=0.1684, simple_loss=0.2742, pruned_loss=0.03124, over 4654.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2636, pruned_loss=0.04021, over 936299.10 frames. ], batch size: 14, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:50:19,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=252269.33333333334, ans=0.2 +2024-07-29 06:50:22,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=252282.66666666666, ans=0.09899494936611666 +2024-07-29 06:50:24,637 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.474e+01 5.788e+01 6.281e+01 7.022e+01 9.096e+01, threshold=1.256e+02, percent-clipped=0.0 +2024-07-29 06:50:28,095 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=252296.0, ans=0.125 +2024-07-29 06:50:28,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=252296.0, ans=0.2 +2024-07-29 06:50:32,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=252296.0, ans=0.125 +2024-07-29 06:50:35,720 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.62 vs. limit=15.0 +2024-07-29 06:50:49,262 INFO [train.py:1114] (3/4) Epoch 19, batch 5250, loss[loss=0.1451, simple_loss=0.239, pruned_loss=0.02559, over 4901.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2631, pruned_loss=0.04078, over 936167.85 frames. ], batch size: 13, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:50:57,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=252349.33333333334, ans=0.0 +2024-07-29 06:51:08,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=252362.66666666666, ans=0.125 +2024-07-29 06:51:23,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=252389.33333333334, ans=0.0 +2024-07-29 06:51:24,377 INFO [train.py:1114] (3/4) Epoch 19, batch 5300, loss[loss=0.1744, simple_loss=0.2704, pruned_loss=0.03914, over 4610.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2633, pruned_loss=0.04069, over 934870.90 frames. ], batch size: 16, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:51:30,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=252416.0, ans=0.125 +2024-07-29 06:51:32,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=252416.0, ans=0.0 +2024-07-29 06:51:33,496 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.682e+01 5.685e+01 6.229e+01 6.963e+01 9.686e+01, threshold=1.246e+02, percent-clipped=0.0 +2024-07-29 06:51:44,756 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.94 vs. limit=6.0 +2024-07-29 06:51:47,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252442.66666666666, ans=0.1 +2024-07-29 06:51:55,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=252456.0, ans=0.025 +2024-07-29 06:51:57,737 INFO [train.py:1114] (3/4) Epoch 19, batch 5350, loss[loss=0.1394, simple_loss=0.2216, pruned_loss=0.02866, over 4475.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2644, pruned_loss=0.0412, over 936767.76 frames. ], batch size: 10, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:52:04,860 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.23 vs. limit=15.0 +2024-07-29 06:52:07,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=252482.66666666666, ans=0.125 +2024-07-29 06:52:23,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=252509.33333333334, ans=0.125 +2024-07-29 06:52:23,468 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.63 vs. limit=15.0 +2024-07-29 06:52:26,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=252522.66666666666, ans=0.125 +2024-07-29 06:52:28,225 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=252522.66666666666, ans=0.125 +2024-07-29 06:52:32,371 INFO [train.py:1114] (3/4) Epoch 19, batch 5400, loss[loss=0.1762, simple_loss=0.27, pruned_loss=0.04124, over 4327.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2653, pruned_loss=0.04182, over 931515.69 frames. ], batch size: 26, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:52:38,849 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.81 vs. limit=6.0 +2024-07-29 06:52:42,197 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.436e+01 5.716e+01 6.217e+01 6.684e+01 8.948e+01, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 06:52:44,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=252549.33333333334, ans=0.2 +2024-07-29 06:52:57,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=252576.0, ans=0.2 +2024-07-29 06:52:59,720 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=252576.0, ans=0.125 +2024-07-29 06:53:01,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=252589.33333333334, ans=0.125 +2024-07-29 06:53:05,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=252589.33333333334, ans=0.125 +2024-07-29 06:53:08,930 INFO [train.py:1114] (3/4) Epoch 19, batch 5450, loss[loss=0.1575, simple_loss=0.2332, pruned_loss=0.04085, over 4700.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2638, pruned_loss=0.04123, over 933944.96 frames. ], batch size: 11, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:53:31,525 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.64 vs. limit=10.0 +2024-07-29 06:53:36,164 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:53:45,873 INFO [train.py:1114] (3/4) Epoch 19, batch 5500, loss[loss=0.1781, simple_loss=0.2781, pruned_loss=0.03909, over 4181.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2641, pruned_loss=0.04087, over 931079.14 frames. ], batch size: 25, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:53:47,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=252669.33333333334, ans=0.0 +2024-07-29 06:53:55,330 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.797e+01 5.621e+01 6.177e+01 7.042e+01 9.819e+01, threshold=1.235e+02, percent-clipped=0.0 +2024-07-29 06:53:57,538 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=252682.66666666666, ans=0.1 +2024-07-29 06:54:09,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=252709.33333333334, ans=0.125 +2024-07-29 06:54:09,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=252709.33333333334, ans=0.125 +2024-07-29 06:54:11,813 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=252709.33333333334, ans=0.1 +2024-07-29 06:54:11,973 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.80 vs. limit=12.0 +2024-07-29 06:54:15,225 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.98 vs. limit=15.0 +2024-07-29 06:54:21,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=252722.66666666666, ans=0.125 +2024-07-29 06:54:24,295 INFO [train.py:1114] (3/4) Epoch 19, batch 5550, loss[loss=0.1273, simple_loss=0.2157, pruned_loss=0.01949, over 4702.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2624, pruned_loss=0.0403, over 933569.38 frames. ], batch size: 12, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:54:30,152 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.06 vs. limit=15.0 +2024-07-29 06:54:40,798 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer2.prob, batch_count=252762.66666666666, ans=0.125 +2024-07-29 06:54:57,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=252789.33333333334, ans=0.0 +2024-07-29 06:54:57,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=252789.33333333334, ans=0.1 +2024-07-29 06:55:00,396 INFO [train.py:1114] (3/4) Epoch 19, batch 5600, loss[loss=0.1827, simple_loss=0.2868, pruned_loss=0.03932, over 4745.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2637, pruned_loss=0.04072, over 934139.14 frames. ], batch size: 14, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:55:07,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=252816.0, ans=0.125 +2024-07-29 06:55:08,083 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=252816.0, ans=0.2 +2024-07-29 06:55:10,086 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.976e+01 6.000e+01 7.138e+01 7.919e+01 1.152e+02, threshold=1.428e+02, percent-clipped=0.0 +2024-07-29 06:55:19,228 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=13.10 vs. limit=15.0 +2024-07-29 06:55:21,884 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=252842.66666666666, ans=0.2 +2024-07-29 06:55:24,705 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=252842.66666666666, ans=0.0 +2024-07-29 06:55:28,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=252856.0, ans=0.125 +2024-07-29 06:55:30,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=252856.0, ans=0.125 +2024-07-29 06:55:37,016 INFO [train.py:1114] (3/4) Epoch 19, batch 5650, loss[loss=0.1609, simple_loss=0.2575, pruned_loss=0.03212, over 4539.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2626, pruned_loss=0.04047, over 936845.68 frames. ], batch size: 21, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:55:49,063 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:55:56,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=252896.0, ans=0.95 +2024-07-29 06:56:02,111 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:56:15,651 INFO [train.py:1114] (3/4) Epoch 19, batch 5700, loss[loss=0.1711, simple_loss=0.2712, pruned_loss=0.03546, over 4688.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2627, pruned_loss=0.0406, over 937927.44 frames. ], batch size: 13, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:56:18,247 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.13 vs. limit=22.5 +2024-07-29 06:56:18,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=252936.0, ans=0.0 +2024-07-29 06:56:20,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=252936.0, ans=0.125 +2024-07-29 06:56:25,030 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.780e+01 5.631e+01 6.115e+01 6.862e+01 9.521e+01, threshold=1.223e+02, percent-clipped=0.0 +2024-07-29 06:56:25,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass_mid.scale_min, batch_count=252949.33333333334, ans=0.2 +2024-07-29 06:56:25,237 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=252949.33333333334, ans=0.0 +2024-07-29 06:56:35,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=252962.66666666666, ans=0.125 +2024-07-29 06:56:45,216 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=252976.0, ans=0.0 +2024-07-29 06:56:55,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=252989.33333333334, ans=0.125 +2024-07-29 06:56:56,391 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=252989.33333333334, ans=0.0 +2024-07-29 06:56:57,573 INFO [train.py:1114] (3/4) Epoch 19, batch 5750, loss[loss=0.1764, simple_loss=0.2742, pruned_loss=0.03932, over 4705.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.264, pruned_loss=0.0408, over 938182.76 frames. ], batch size: 19, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:57:10,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=253016.0, ans=0.0 +2024-07-29 06:57:18,154 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=253029.33333333334, ans=0.125 +2024-07-29 06:57:30,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=253056.0, ans=0.035 +2024-07-29 06:57:33,966 INFO [train.py:1114] (3/4) Epoch 19, batch 5800, loss[loss=0.2005, simple_loss=0.2878, pruned_loss=0.05655, over 4681.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2645, pruned_loss=0.04075, over 937695.35 frames. ], batch size: 19, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:57:43,290 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.734e+01 5.591e+01 6.504e+01 7.272e+01 1.266e+02, threshold=1.301e+02, percent-clipped=1.0 +2024-07-29 06:57:44,102 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 06:57:47,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=253096.0, ans=0.1 +2024-07-29 06:58:02,266 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.50 vs. limit=22.5 +2024-07-29 06:58:03,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=253122.66666666666, ans=0.125 +2024-07-29 06:58:08,035 INFO [train.py:1114] (3/4) Epoch 19, batch 5850, loss[loss=0.1809, simple_loss=0.2877, pruned_loss=0.03698, over 4610.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2639, pruned_loss=0.04055, over 938635.63 frames. ], batch size: 21, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:58:11,482 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=253136.0, ans=0.0 +2024-07-29 06:58:19,011 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.17 vs. limit=15.0 +2024-07-29 06:58:23,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=253162.66666666666, ans=0.025 +2024-07-29 06:58:32,129 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.scale_min, batch_count=253176.0, ans=0.2 +2024-07-29 06:58:32,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=253176.0, ans=0.125 +2024-07-29 06:58:39,429 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253189.33333333334, ans=0.1 +2024-07-29 06:58:40,658 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=253189.33333333334, ans=0.025 +2024-07-29 06:58:45,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=253189.33333333334, ans=0.125 +2024-07-29 06:58:46,369 INFO [train.py:1114] (3/4) Epoch 19, batch 5900, loss[loss=0.1513, simple_loss=0.2484, pruned_loss=0.02707, over 4699.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2639, pruned_loss=0.04058, over 938616.33 frames. ], batch size: 15, lr: 3.91e-03, grad_scale: 32.0 +2024-07-29 06:58:52,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=253216.0, ans=0.0 +2024-07-29 06:58:55,597 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+01 5.657e+01 6.141e+01 7.066e+01 1.029e+02, threshold=1.228e+02, percent-clipped=0.0 +2024-07-29 06:58:55,742 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass.skip_rate, batch_count=253216.0, ans=0.04949747468305833 +2024-07-29 06:59:07,151 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=253242.66666666666, ans=0.125 +2024-07-29 06:59:10,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=253242.66666666666, ans=0.2 +2024-07-29 06:59:11,507 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.20 vs. limit=15.0 +2024-07-29 06:59:17,059 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=253256.0, ans=0.125 +2024-07-29 06:59:19,489 INFO [train.py:1114] (3/4) Epoch 19, batch 5950, loss[loss=0.2253, simple_loss=0.3243, pruned_loss=0.06309, over 4680.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2642, pruned_loss=0.0409, over 940450.14 frames. ], batch size: 15, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 06:59:31,628 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=253282.66666666666, ans=0.025 +2024-07-29 06:59:51,327 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=253322.66666666666, ans=0.125 +2024-07-29 06:59:58,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=253336.0, ans=0.125 +2024-07-29 06:59:58,956 INFO [train.py:1114] (3/4) Epoch 19, batch 6000, loss[loss=0.2177, simple_loss=0.3126, pruned_loss=0.06133, over 4193.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2639, pruned_loss=0.04125, over 937952.00 frames. ], batch size: 25, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 06:59:58,956 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 07:00:15,062 INFO [train.py:1146] (3/4) Epoch 19, validation: loss=0.1606, simple_loss=0.2627, pruned_loss=0.02924, over 944034.00 frames. +2024-07-29 07:00:15,063 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 07:00:24,593 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.643e+01 5.715e+01 6.299e+01 6.877e+01 1.010e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 07:00:41,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253376.0, ans=0.1 +2024-07-29 07:00:48,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=253389.33333333334, ans=0.0 +2024-07-29 07:00:57,730 INFO [train.py:1114] (3/4) Epoch 19, batch 6050, loss[loss=0.1469, simple_loss=0.2333, pruned_loss=0.03029, over 4772.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2633, pruned_loss=0.04056, over 938965.81 frames. ], batch size: 12, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:01:05,913 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=253402.66666666666, ans=0.07 +2024-07-29 07:01:30,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=253456.0, ans=0.0 +2024-07-29 07:01:35,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=253469.33333333334, ans=0.07 +2024-07-29 07:01:35,548 INFO [train.py:1114] (3/4) Epoch 19, batch 6100, loss[loss=0.1589, simple_loss=0.2664, pruned_loss=0.02571, over 4681.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2629, pruned_loss=0.04047, over 938336.20 frames. ], batch size: 15, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:01:40,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=253469.33333333334, ans=0.0 +2024-07-29 07:01:46,499 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.578e+01 5.746e+01 6.337e+01 7.599e+01 1.096e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 07:01:50,184 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_abs, batch_count=253496.0, ans=0.5 +2024-07-29 07:01:56,921 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=253509.33333333334, ans=0.0 +2024-07-29 07:02:03,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=253522.66666666666, ans=0.0 +2024-07-29 07:02:05,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=253522.66666666666, ans=0.125 +2024-07-29 07:02:07,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=253522.66666666666, ans=0.1 +2024-07-29 07:02:07,400 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.55 vs. limit=15.0 +2024-07-29 07:02:10,982 INFO [train.py:1114] (3/4) Epoch 19, batch 6150, loss[loss=0.1795, simple_loss=0.2639, pruned_loss=0.04753, over 3557.00 frames. ], tot_loss[loss=0.171, simple_loss=0.262, pruned_loss=0.03999, over 937156.29 frames. ], batch size: 35, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:02:12,691 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.67 vs. limit=15.0 +2024-07-29 07:02:13,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=253536.0, ans=0.125 +2024-07-29 07:02:17,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=253549.33333333334, ans=15.0 +2024-07-29 07:02:18,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=253549.33333333334, ans=0.0 +2024-07-29 07:02:32,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=253576.0, ans=0.0 +2024-07-29 07:02:36,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=253576.0, ans=0.1 +2024-07-29 07:02:36,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=253576.0, ans=0.125 +2024-07-29 07:02:46,251 INFO [train.py:1114] (3/4) Epoch 19, batch 6200, loss[loss=0.1727, simple_loss=0.286, pruned_loss=0.0297, over 4745.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2628, pruned_loss=0.04008, over 936627.20 frames. ], batch size: 14, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:02:46,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=253602.66666666666, ans=0.0 +2024-07-29 07:02:49,422 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.64 vs. limit=10.0 +2024-07-29 07:03:00,685 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.88 vs. limit=6.0 +2024-07-29 07:03:00,854 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.644e+01 5.872e+01 6.274e+01 7.114e+01 1.110e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 07:03:03,032 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=253616.0, ans=10.0 +2024-07-29 07:03:26,826 INFO [train.py:1114] (3/4) Epoch 19, batch 6250, loss[loss=0.1667, simple_loss=0.2631, pruned_loss=0.03514, over 4805.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2635, pruned_loss=0.04083, over 933205.07 frames. ], batch size: 14, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:03:42,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=253696.0, ans=0.07 +2024-07-29 07:03:50,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=253709.33333333334, ans=0.2 +2024-07-29 07:04:00,510 INFO [train.py:1114] (3/4) Epoch 19, batch 6300, loss[loss=0.14, simple_loss=0.2295, pruned_loss=0.02527, over 4515.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.263, pruned_loss=0.04075, over 930410.73 frames. ], batch size: 10, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:04:09,766 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.560e+01 5.656e+01 6.439e+01 7.394e+01 1.114e+02, threshold=1.288e+02, percent-clipped=0.0 +2024-07-29 07:04:26,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=253776.0, ans=0.125 +2024-07-29 07:04:32,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=253789.33333333334, ans=0.2 +2024-07-29 07:04:47,722 INFO [train.py:1114] (3/4) Epoch 19, batch 6350, loss[loss=0.1467, simple_loss=0.2463, pruned_loss=0.0235, over 4557.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2622, pruned_loss=0.04049, over 934297.70 frames. ], batch size: 21, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:04:50,955 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=21.52 vs. limit=22.5 +2024-07-29 07:04:51,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=253802.66666666666, ans=0.0 +2024-07-29 07:04:52,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=253802.66666666666, ans=0.2 +2024-07-29 07:04:55,438 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=253816.0, ans=0.0 +2024-07-29 07:04:55,444 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=253816.0, ans=0.0 +2024-07-29 07:05:04,923 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=16.95 vs. limit=22.5 +2024-07-29 07:05:17,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=253856.0, ans=0.0 +2024-07-29 07:05:21,132 INFO [train.py:1114] (3/4) Epoch 19, batch 6400, loss[loss=0.1789, simple_loss=0.2683, pruned_loss=0.04474, over 4646.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2623, pruned_loss=0.04051, over 935656.90 frames. ], batch size: 13, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:05:25,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=253869.33333333334, ans=0.125 +2024-07-29 07:05:29,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=253882.66666666666, ans=0.125 +2024-07-29 07:05:30,226 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.587e+01 5.936e+01 6.680e+01 7.365e+01 1.184e+02, threshold=1.336e+02, percent-clipped=0.0 +2024-07-29 07:05:33,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=253882.66666666666, ans=0.125 +2024-07-29 07:05:42,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer1.prob, batch_count=253909.33333333334, ans=0.125 +2024-07-29 07:05:43,452 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.76 vs. limit=15.0 +2024-07-29 07:05:43,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=253909.33333333334, ans=0.125 +2024-07-29 07:05:46,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=253909.33333333334, ans=15.0 +2024-07-29 07:05:54,170 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.71 vs. limit=15.0 +2024-07-29 07:05:55,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten1.whitening_limit, batch_count=253922.66666666666, ans=10.0 +2024-07-29 07:05:58,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=253922.66666666666, ans=0.1 +2024-07-29 07:06:01,009 INFO [train.py:1114] (3/4) Epoch 19, batch 6450, loss[loss=0.1625, simple_loss=0.2482, pruned_loss=0.03843, over 4411.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2629, pruned_loss=0.0403, over 939033.70 frames. ], batch size: 21, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:06:09,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=253949.33333333334, ans=0.125 +2024-07-29 07:06:14,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=253962.66666666666, ans=0.0 +2024-07-29 07:06:34,900 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=253976.0, ans=0.0 +2024-07-29 07:06:40,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=253989.33333333334, ans=0.125 +2024-07-29 07:06:45,348 INFO [train.py:1114] (3/4) Epoch 19, batch 6500, loss[loss=0.2014, simple_loss=0.2834, pruned_loss=0.05964, over 3211.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2619, pruned_loss=0.0399, over 939862.33 frames. ], batch size: 35, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:06:46,194 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=254002.66666666666, ans=0.125 +2024-07-29 07:06:49,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten.whitening_limit, batch_count=254002.66666666666, ans=15.0 +2024-07-29 07:06:54,895 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.939e+01 5.824e+01 6.462e+01 7.830e+01 1.082e+02, threshold=1.292e+02, percent-clipped=0.0 +2024-07-29 07:06:57,204 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:07:03,981 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=12.81 vs. limit=15.0 +2024-07-29 07:07:05,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff2_skip_rate, batch_count=254042.66666666666, ans=0.0 +2024-07-29 07:07:20,277 INFO [train.py:1114] (3/4) Epoch 19, batch 6550, loss[loss=0.1417, simple_loss=0.2306, pruned_loss=0.02637, over 4793.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.261, pruned_loss=0.03958, over 942786.90 frames. ], batch size: 11, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:07:39,992 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.49 vs. limit=22.5 +2024-07-29 07:07:40,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254096.0, ans=0.1 +2024-07-29 07:07:44,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=254109.33333333334, ans=0.125 +2024-07-29 07:07:56,773 INFO [train.py:1114] (3/4) Epoch 19, batch 6600, loss[loss=0.1812, simple_loss=0.2678, pruned_loss=0.04729, over 4935.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2624, pruned_loss=0.04015, over 944899.19 frames. ], batch size: 14, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:08:06,404 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.740e+01 5.577e+01 6.191e+01 6.872e+01 1.333e+02, threshold=1.238e+02, percent-clipped=1.0 +2024-07-29 07:08:15,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=254162.66666666666, ans=0.0 +2024-07-29 07:08:18,204 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.69 vs. limit=15.0 +2024-07-29 07:08:22,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=254176.0, ans=0.0 +2024-07-29 07:08:22,849 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=254176.0, ans=0.0 +2024-07-29 07:08:23,348 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254189.33333333334, ans=0.1 +2024-07-29 07:08:27,458 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=254189.33333333334, ans=0.0 +2024-07-29 07:08:30,610 INFO [train.py:1114] (3/4) Epoch 19, batch 6650, loss[loss=0.1937, simple_loss=0.2852, pruned_loss=0.05114, over 4620.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2624, pruned_loss=0.04032, over 943109.06 frames. ], batch size: 17, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:08:34,177 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=254202.66666666666, ans=0.1 +2024-07-29 07:08:47,761 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=254229.33333333334, ans=0.0 +2024-07-29 07:08:50,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.min_positive, batch_count=254242.66666666666, ans=0.025 +2024-07-29 07:08:55,076 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=254242.66666666666, ans=0.0 +2024-07-29 07:08:56,699 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.85 vs. limit=15.0 +2024-07-29 07:09:04,190 INFO [train.py:1114] (3/4) Epoch 19, batch 6700, loss[loss=0.1848, simple_loss=0.2721, pruned_loss=0.04879, over 4740.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2629, pruned_loss=0.04078, over 942200.63 frames. ], batch size: 19, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:09:04,241 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=254269.33333333334, ans=0.125 +2024-07-29 07:09:07,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=254269.33333333334, ans=0.2 +2024-07-29 07:09:07,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=254269.33333333334, ans=0.125 +2024-07-29 07:09:08,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=254269.33333333334, ans=0.0 +2024-07-29 07:09:13,554 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.47 vs. limit=6.0 +2024-07-29 07:09:13,674 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.681e+01 5.588e+01 6.301e+01 6.767e+01 8.851e+01, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 07:09:16,738 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=254282.66666666666, ans=0.125 +2024-07-29 07:09:19,325 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=254296.0, ans=0.0 +2024-07-29 07:09:20,278 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.50 vs. limit=10.0 +2024-07-29 07:09:20,354 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.51 vs. limit=15.0 +2024-07-29 07:09:21,637 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.73 vs. limit=15.0 +2024-07-29 07:09:22,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=254296.0, ans=0.0 +2024-07-29 07:09:33,092 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=4.10 vs. limit=15.0 +2024-07-29 07:09:34,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=254322.66666666666, ans=0.125 +2024-07-29 07:09:34,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=254322.66666666666, ans=15.0 +2024-07-29 07:09:37,838 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=254336.0, ans=0.04949747468305833 +2024-07-29 07:09:38,293 INFO [train.py:1114] (3/4) Epoch 19, batch 6750, loss[loss=0.1884, simple_loss=0.2745, pruned_loss=0.05112, over 4244.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2638, pruned_loss=0.04103, over 940179.40 frames. ], batch size: 25, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:09:54,367 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.47 vs. limit=10.0 +2024-07-29 07:09:55,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=254362.66666666666, ans=0.0 +2024-07-29 07:09:57,629 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.58 vs. limit=15.0 +2024-07-29 07:10:02,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=254376.0, ans=0.1 +2024-07-29 07:10:02,904 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=254376.0, ans=0.07 +2024-07-29 07:10:04,335 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=254376.0, ans=0.125 +2024-07-29 07:10:10,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=254389.33333333334, ans=0.125 +2024-07-29 07:10:13,858 INFO [train.py:1114] (3/4) Epoch 19, batch 6800, loss[loss=0.2008, simple_loss=0.3029, pruned_loss=0.04939, over 4630.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2641, pruned_loss=0.04074, over 938338.38 frames. ], batch size: 13, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:10:17,325 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:10:20,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=254416.0, ans=0.0 +2024-07-29 07:10:22,884 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.653e+01 5.699e+01 6.328e+01 7.077e+01 1.070e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-29 07:10:24,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_positive, batch_count=254416.0, ans=0.05 +2024-07-29 07:10:40,556 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.04 vs. limit=12.0 +2024-07-29 07:10:46,746 INFO [train.py:1114] (3/4) Epoch 19, batch 6850, loss[loss=0.2001, simple_loss=0.2898, pruned_loss=0.05518, over 4688.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2641, pruned_loss=0.041, over 940533.33 frames. ], batch size: 13, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:10:48,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=254469.33333333334, ans=0.125 +2024-07-29 07:10:48,945 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=254469.33333333334, ans=0.2 +2024-07-29 07:10:54,169 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=254482.66666666666, ans=0.125 +2024-07-29 07:10:56,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=254482.66666666666, ans=0.0 +2024-07-29 07:11:03,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=254496.0, ans=0.125 +2024-07-29 07:11:15,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=254522.66666666666, ans=0.2 +2024-07-29 07:11:20,050 INFO [train.py:1114] (3/4) Epoch 19, batch 6900, loss[loss=0.1597, simple_loss=0.2562, pruned_loss=0.03165, over 4952.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2638, pruned_loss=0.0406, over 942901.32 frames. ], batch size: 13, lr: 3.90e-03, grad_scale: 32.0 +2024-07-29 07:11:21,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=254536.0, ans=0.025 +2024-07-29 07:11:27,875 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.98 vs. limit=15.0 +2024-07-29 07:11:29,546 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.761e+01 5.808e+01 6.453e+01 7.424e+01 1.237e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-29 07:11:39,162 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.40 vs. limit=15.0 +2024-07-29 07:11:41,649 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=254576.0, ans=0.125 +2024-07-29 07:11:42,922 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=254576.0, ans=0.125 +2024-07-29 07:11:49,790 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=254589.33333333334, ans=0.125 +2024-07-29 07:11:52,058 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.85 vs. limit=15.0 +2024-07-29 07:11:52,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=254589.33333333334, ans=0.125 +2024-07-29 07:11:53,785 INFO [train.py:1114] (3/4) Epoch 19, batch 6950, loss[loss=0.1282, simple_loss=0.2167, pruned_loss=0.01983, over 4559.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2642, pruned_loss=0.04067, over 940585.23 frames. ], batch size: 10, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:12:05,951 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=254616.0, ans=0.125 +2024-07-29 07:12:09,567 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.27 vs. limit=15.0 +2024-07-29 07:12:12,629 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=254629.33333333334, ans=0.125 +2024-07-29 07:12:17,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=254642.66666666666, ans=10.0 +2024-07-29 07:12:26,435 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=254656.0, ans=0.125 +2024-07-29 07:12:26,477 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=254656.0, ans=0.0 +2024-07-29 07:12:29,160 INFO [train.py:1114] (3/4) Epoch 19, batch 7000, loss[loss=0.1681, simple_loss=0.2639, pruned_loss=0.03614, over 4681.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2632, pruned_loss=0.04022, over 939204.07 frames. ], batch size: 17, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:12:37,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=254682.66666666666, ans=0.5 +2024-07-29 07:12:38,423 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.459e+01 5.806e+01 6.455e+01 7.186e+01 1.060e+02, threshold=1.291e+02, percent-clipped=0.0 +2024-07-29 07:12:39,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=254682.66666666666, ans=0.125 +2024-07-29 07:12:41,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=254682.66666666666, ans=0.125 +2024-07-29 07:12:42,222 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=8.40 vs. limit=15.0 +2024-07-29 07:12:43,239 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254696.0, ans=0.1 +2024-07-29 07:12:47,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=254696.0, ans=0.125 +2024-07-29 07:12:53,281 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.40 vs. limit=12.0 +2024-07-29 07:13:02,140 INFO [train.py:1114] (3/4) Epoch 19, batch 7050, loss[loss=0.1748, simple_loss=0.2822, pruned_loss=0.03373, over 4706.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2627, pruned_loss=0.03981, over 942337.18 frames. ], batch size: 19, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:13:12,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254749.33333333334, ans=0.1 +2024-07-29 07:13:17,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=254762.66666666666, ans=0.2 +2024-07-29 07:13:26,490 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=254776.0, ans=0.025 +2024-07-29 07:13:34,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=254789.33333333334, ans=0.125 +2024-07-29 07:13:34,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=254789.33333333334, ans=0.1 +2024-07-29 07:13:36,038 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:13:38,640 INFO [train.py:1114] (3/4) Epoch 19, batch 7100, loss[loss=0.2046, simple_loss=0.3037, pruned_loss=0.05273, over 4791.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2638, pruned_loss=0.04029, over 937111.17 frames. ], batch size: 15, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:13:41,298 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=254802.66666666666, ans=0.125 +2024-07-29 07:13:42,891 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=4.24 vs. limit=12.0 +2024-07-29 07:13:49,300 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.888e+01 5.809e+01 6.351e+01 7.232e+01 1.086e+02, threshold=1.270e+02, percent-clipped=0.0 +2024-07-29 07:14:00,648 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=254842.66666666666, ans=0.0 +2024-07-29 07:14:02,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=254842.66666666666, ans=0.125 +2024-07-29 07:14:03,960 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.whiten, num_groups=1, num_channels=192, metric=3.72 vs. limit=12.0 +2024-07-29 07:14:13,152 INFO [train.py:1114] (3/4) Epoch 19, batch 7150, loss[loss=0.1641, simple_loss=0.259, pruned_loss=0.03466, over 4536.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2626, pruned_loss=0.04003, over 938075.50 frames. ], batch size: 21, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:14:25,356 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=254896.0, ans=0.1 +2024-07-29 07:14:36,715 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=254909.33333333334, ans=0.0 +2024-07-29 07:14:39,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=254922.66666666666, ans=0.0 +2024-07-29 07:14:46,073 INFO [train.py:1114] (3/4) Epoch 19, batch 7200, loss[loss=0.2358, simple_loss=0.3157, pruned_loss=0.07796, over 4801.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2626, pruned_loss=0.04022, over 938604.13 frames. ], batch size: 15, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:14:47,819 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=10.75 vs. limit=15.0 +2024-07-29 07:14:55,077 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.575e+01 5.607e+01 6.088e+01 6.745e+01 8.858e+01, threshold=1.218e+02, percent-clipped=0.0 +2024-07-29 07:14:55,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=254949.33333333334, ans=0.0 +2024-07-29 07:15:10,881 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=254976.0, ans=0.0 +2024-07-29 07:15:11,083 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.94 vs. limit=15.0 +2024-07-29 07:15:18,456 INFO [train.py:1114] (3/4) Epoch 19, batch 7250, loss[loss=0.1399, simple_loss=0.2179, pruned_loss=0.03091, over 4851.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2619, pruned_loss=0.0401, over 940010.12 frames. ], batch size: 12, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:15:23,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=255002.66666666666, ans=0.125 +2024-07-29 07:15:24,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=255016.0, ans=0.125 +2024-07-29 07:15:27,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255016.0, ans=0.1 +2024-07-29 07:15:43,092 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=255042.66666666666, ans=0.125 +2024-07-29 07:15:50,898 INFO [train.py:1114] (3/4) Epoch 19, batch 7300, loss[loss=0.1752, simple_loss=0.2592, pruned_loss=0.0456, over 4855.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2616, pruned_loss=0.03973, over 940678.31 frames. ], batch size: 12, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:15:55,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=255069.33333333334, ans=0.0 +2024-07-29 07:15:55,670 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255069.33333333334, ans=0.1 +2024-07-29 07:16:00,099 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.606e+01 6.073e+01 6.714e+01 9.388e+01, threshold=1.215e+02, percent-clipped=0.0 +2024-07-29 07:16:17,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255122.66666666666, ans=0.1 +2024-07-29 07:16:19,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=255122.66666666666, ans=0.0 +2024-07-29 07:16:19,393 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.73 vs. limit=22.5 +2024-07-29 07:16:23,608 INFO [train.py:1114] (3/4) Epoch 19, batch 7350, loss[loss=0.166, simple_loss=0.2563, pruned_loss=0.03782, over 4642.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2625, pruned_loss=0.03996, over 939683.27 frames. ], batch size: 12, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:16:33,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=255149.33333333334, ans=0.125 +2024-07-29 07:16:35,982 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=16.41 vs. limit=22.5 +2024-07-29 07:16:39,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=255162.66666666666, ans=0.2 +2024-07-29 07:16:56,413 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=255176.0, ans=0.125 +2024-07-29 07:16:57,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.min_positive, batch_count=255176.0, ans=0.05 +2024-07-29 07:16:59,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=255176.0, ans=0.0 +2024-07-29 07:17:10,011 INFO [train.py:1114] (3/4) Epoch 19, batch 7400, loss[loss=0.1857, simple_loss=0.2763, pruned_loss=0.04758, over 4693.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2636, pruned_loss=0.04004, over 940531.87 frames. ], batch size: 13, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:17:12,130 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=255202.66666666666, ans=0.035 +2024-07-29 07:17:12,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=255202.66666666666, ans=0.025 +2024-07-29 07:17:16,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=255216.0, ans=0.125 +2024-07-29 07:17:16,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=255216.0, ans=0.2 +2024-07-29 07:17:18,553 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=8.74 vs. limit=15.0 +2024-07-29 07:17:19,331 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.361e+01 5.806e+01 6.617e+01 8.276e+01 1.312e+02, threshold=1.323e+02, percent-clipped=3.0 +2024-07-29 07:17:32,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=255242.66666666666, ans=0.1 +2024-07-29 07:17:39,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=255256.0, ans=0.125 +2024-07-29 07:17:42,968 INFO [train.py:1114] (3/4) Epoch 19, batch 7450, loss[loss=0.1558, simple_loss=0.2429, pruned_loss=0.0344, over 4614.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2623, pruned_loss=0.04005, over 938062.85 frames. ], batch size: 11, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:17:43,091 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=255269.33333333334, ans=0.125 +2024-07-29 07:17:44,641 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.35 vs. limit=22.5 +2024-07-29 07:17:54,897 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.ff2_skip_rate, batch_count=255282.66666666666, ans=0.0 +2024-07-29 07:17:57,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_ff3.min_abs, batch_count=255296.0, ans=0.2 +2024-07-29 07:18:12,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=255322.66666666666, ans=0.125 +2024-07-29 07:18:14,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=255322.66666666666, ans=0.125 +2024-07-29 07:18:15,925 INFO [train.py:1114] (3/4) Epoch 19, batch 7500, loss[loss=0.2287, simple_loss=0.3134, pruned_loss=0.07201, over 3505.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2624, pruned_loss=0.03967, over 936352.04 frames. ], batch size: 36, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:18:17,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=255336.0, ans=0.09899494936611666 +2024-07-29 07:18:20,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=255336.0, ans=0.0 +2024-07-29 07:18:20,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=255336.0, ans=0.125 +2024-07-29 07:18:21,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=255349.33333333334, ans=0.125 +2024-07-29 07:18:25,193 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.636e+01 5.501e+01 5.980e+01 6.814e+01 1.020e+02, threshold=1.196e+02, percent-clipped=0.0 +2024-07-29 07:18:28,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=255349.33333333334, ans=0.125 +2024-07-29 07:18:41,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=255389.33333333334, ans=0.05 +2024-07-29 07:18:43,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=255389.33333333334, ans=0.0 +2024-07-29 07:18:48,939 INFO [train.py:1114] (3/4) Epoch 19, batch 7550, loss[loss=0.196, simple_loss=0.2847, pruned_loss=0.05364, over 4633.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2638, pruned_loss=0.04032, over 936368.86 frames. ], batch size: 17, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:18:52,311 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=255402.66666666666, ans=0.125 +2024-07-29 07:18:59,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.prob, batch_count=255416.0, ans=0.125 +2024-07-29 07:19:04,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=255429.33333333334, ans=0.0 +2024-07-29 07:19:14,445 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.44 vs. limit=15.0 +2024-07-29 07:19:15,570 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:19:19,077 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=22.81 vs. limit=22.5 +2024-07-29 07:19:32,999 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=255456.0, ans=0.125 +2024-07-29 07:19:34,138 INFO [train.py:1114] (3/4) Epoch 19, batch 7600, loss[loss=0.186, simple_loss=0.2718, pruned_loss=0.05015, over 4816.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2627, pruned_loss=0.04006, over 937935.03 frames. ], batch size: 14, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:21:11,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer2.prob, batch_count=255482.66666666666, ans=0.125 +2024-07-29 07:21:11,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=255482.66666666666, ans=0.125 +2024-07-29 07:21:14,853 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.660e+01 5.357e+01 5.885e+01 6.503e+01 9.082e+01, threshold=1.177e+02, percent-clipped=0.0 +2024-07-29 07:21:21,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=255496.0, ans=0.125 +2024-07-29 07:21:26,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=255509.33333333334, ans=0.125 +2024-07-29 07:21:26,944 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.58 vs. limit=12.0 +2024-07-29 07:21:32,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255522.66666666666, ans=0.1 +2024-07-29 07:21:37,714 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.99 vs. limit=15.0 +2024-07-29 07:21:38,459 INFO [train.py:1114] (3/4) Epoch 19, batch 7650, loss[loss=0.1561, simple_loss=0.2465, pruned_loss=0.03289, over 4938.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2618, pruned_loss=0.03976, over 937349.02 frames. ], batch size: 12, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:21:44,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=255549.33333333334, ans=0.2 +2024-07-29 07:21:54,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer2.prob, batch_count=255562.66666666666, ans=0.125 +2024-07-29 07:21:57,848 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.72 vs. limit=15.0 +2024-07-29 07:21:58,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=255576.0, ans=0.0 +2024-07-29 07:22:02,069 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.98 vs. limit=22.5 +2024-07-29 07:22:02,453 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:22:05,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=255589.33333333334, ans=0.125 +2024-07-29 07:22:07,253 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module2.whiten, num_groups=1, num_channels=192, metric=8.30 vs. limit=15.0 +2024-07-29 07:22:11,506 INFO [train.py:1114] (3/4) Epoch 19, batch 7700, loss[loss=0.1728, simple_loss=0.2589, pruned_loss=0.04339, over 4700.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2627, pruned_loss=0.03995, over 934615.93 frames. ], batch size: 13, lr: 3.89e-03, grad_scale: 64.0 +2024-07-29 07:22:21,009 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.579e+01 5.495e+01 5.903e+01 6.797e+01 9.764e+01, threshold=1.181e+02, percent-clipped=0.0 +2024-07-29 07:22:23,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255616.0, ans=0.1 +2024-07-29 07:22:24,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=255616.0, ans=0.125 +2024-07-29 07:22:24,663 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=255616.0, ans=0.0 +2024-07-29 07:22:27,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=255629.33333333334, ans=0.2 +2024-07-29 07:22:28,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=255629.33333333334, ans=0.125 +2024-07-29 07:22:40,565 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.47 vs. limit=22.5 +2024-07-29 07:22:41,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=255656.0, ans=0.125 +2024-07-29 07:22:46,046 INFO [train.py:1114] (3/4) Epoch 19, batch 7750, loss[loss=0.1527, simple_loss=0.25, pruned_loss=0.02768, over 4928.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2633, pruned_loss=0.04008, over 935583.92 frames. ], batch size: 14, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:22:47,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer2.prob, batch_count=255669.33333333334, ans=0.125 +2024-07-29 07:22:59,961 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.prob, batch_count=255682.66666666666, ans=0.125 +2024-07-29 07:23:41,559 INFO [train.py:1114] (3/4) Epoch 19, batch 7800, loss[loss=0.1717, simple_loss=0.2653, pruned_loss=0.039, over 4668.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2645, pruned_loss=0.04034, over 937063.44 frames. ], batch size: 14, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:23:57,874 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.016e+01 5.806e+01 6.397e+01 7.223e+01 9.492e+01, threshold=1.279e+02, percent-clipped=0.0 +2024-07-29 07:24:02,793 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.91 vs. limit=15.0 +2024-07-29 07:24:05,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=255762.66666666666, ans=0.0 +2024-07-29 07:24:06,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=255762.66666666666, ans=0.07 +2024-07-29 07:24:17,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=255789.33333333334, ans=0.07 +2024-07-29 07:24:17,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=255789.33333333334, ans=0.125 +2024-07-29 07:24:17,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=255789.33333333334, ans=0.0 +2024-07-29 07:24:20,615 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=255789.33333333334, ans=0.0 +2024-07-29 07:24:21,833 INFO [train.py:1114] (3/4) Epoch 19, batch 7850, loss[loss=0.1698, simple_loss=0.2508, pruned_loss=0.04434, over 4550.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2647, pruned_loss=0.04082, over 936409.79 frames. ], batch size: 10, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:24:32,272 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=255802.66666666666, ans=0.1 +2024-07-29 07:24:32,997 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=255816.0, ans=0.1 +2024-07-29 07:24:47,893 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=255842.66666666666, ans=0.0 +2024-07-29 07:24:47,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=255842.66666666666, ans=0.125 +2024-07-29 07:24:53,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.const_attention_rate, batch_count=255856.0, ans=0.025 +2024-07-29 07:24:59,391 INFO [train.py:1114] (3/4) Epoch 19, batch 7900, loss[loss=0.165, simple_loss=0.2649, pruned_loss=0.03257, over 4878.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2656, pruned_loss=0.04119, over 933510.88 frames. ], batch size: 14, lr: 3.89e-03, grad_scale: 32.0 +2024-07-29 07:25:15,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=255869.33333333334, ans=0.2 +2024-07-29 07:25:19,920 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.535e+01 5.757e+01 6.184e+01 6.980e+01 1.069e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-29 07:26:55,419 INFO [train.py:1114] (3/4) Epoch 19, batch 7950, loss[loss=0.2178, simple_loss=0.3048, pruned_loss=0.06544, over 3149.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2648, pruned_loss=0.04084, over 935338.39 frames. ], batch size: 37, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:27:33,366 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.22 vs. limit=12.0 +2024-07-29 07:27:41,222 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=255989.33333333334, ans=0.125 +2024-07-29 07:27:50,605 INFO [train.py:1114] (3/4) Epoch 19, batch 8000, loss[loss=0.1526, simple_loss=0.2409, pruned_loss=0.03212, over 4607.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2638, pruned_loss=0.04081, over 935083.15 frames. ], batch size: 11, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:28:01,417 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.608e+01 5.673e+01 6.447e+01 7.571e+01 1.092e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-29 07:28:14,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=256042.66666666666, ans=0.125 +2024-07-29 07:28:19,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=256056.0, ans=0.125 +2024-07-29 07:28:21,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=256056.0, ans=0.125 +2024-07-29 07:28:23,180 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn1.whiten.whitening_limit, batch_count=256056.0, ans=22.5 +2024-07-29 07:28:24,187 INFO [train.py:1114] (3/4) Epoch 19, batch 8050, loss[loss=0.1812, simple_loss=0.2906, pruned_loss=0.03593, over 4813.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2646, pruned_loss=0.04102, over 934550.72 frames. ], batch size: 14, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:28:27,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=256069.33333333334, ans=0.125 +2024-07-29 07:28:28,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=256069.33333333334, ans=0.0 +2024-07-29 07:28:35,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=256082.66666666666, ans=0.2 +2024-07-29 07:28:38,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=256096.0, ans=0.0 +2024-07-29 07:28:41,350 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.46 vs. limit=15.0 +2024-07-29 07:28:43,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=256109.33333333334, ans=15.0 +2024-07-29 07:28:55,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=256122.66666666666, ans=0.07 +2024-07-29 07:28:56,866 INFO [train.py:1114] (3/4) Epoch 19, batch 8100, loss[loss=0.1949, simple_loss=0.2683, pruned_loss=0.06075, over 4804.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2653, pruned_loss=0.04144, over 934104.63 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:28:58,281 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=256136.0, ans=0.125 +2024-07-29 07:28:58,880 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=256136.0, ans=0.0 +2024-07-29 07:29:00,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=256136.0, ans=0.125 +2024-07-29 07:29:02,000 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=256136.0, ans=0.125 +2024-07-29 07:29:03,457 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=256149.33333333334, ans=0.125 +2024-07-29 07:29:06,405 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.662e+01 5.781e+01 6.315e+01 7.245e+01 1.091e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 07:29:26,366 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer1.prob, batch_count=256189.33333333334, ans=0.125 +2024-07-29 07:29:29,465 INFO [train.py:1114] (3/4) Epoch 19, batch 8150, loss[loss=0.2171, simple_loss=0.3124, pruned_loss=0.06085, over 4812.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2647, pruned_loss=0.0416, over 937728.78 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:29:34,908 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=256202.66666666666, ans=0.125 +2024-07-29 07:29:37,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=256216.0, ans=0.0 +2024-07-29 07:29:54,106 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.82 vs. limit=10.0 +2024-07-29 07:29:57,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=256256.0, ans=0.025 +2024-07-29 07:29:58,395 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=256256.0, ans=0.0 +2024-07-29 07:30:03,067 INFO [train.py:1114] (3/4) Epoch 19, batch 8200, loss[loss=0.1739, simple_loss=0.262, pruned_loss=0.0429, over 4792.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2644, pruned_loss=0.04108, over 939160.31 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:30:08,500 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.91 vs. limit=15.0 +2024-07-29 07:30:11,459 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=256282.66666666666, ans=0.125 +2024-07-29 07:30:12,535 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.925e+01 5.702e+01 6.206e+01 7.193e+01 9.525e+01, threshold=1.241e+02, percent-clipped=0.0 +2024-07-29 07:30:17,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=256296.0, ans=0.025 +2024-07-29 07:30:25,339 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=9.75 vs. limit=15.0 +2024-07-29 07:30:33,477 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.23 vs. limit=22.5 +2024-07-29 07:30:34,021 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=256322.66666666666, ans=0.125 +2024-07-29 07:30:34,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=256336.0, ans=0.0 +2024-07-29 07:30:35,090 INFO [train.py:1114] (3/4) Epoch 19, batch 8250, loss[loss=0.178, simple_loss=0.2721, pruned_loss=0.04196, over 4896.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.264, pruned_loss=0.04083, over 939217.59 frames. ], batch size: 13, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:30:36,604 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=256336.0, ans=0.0 +2024-07-29 07:30:47,166 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=256349.33333333334, ans=0.025 +2024-07-29 07:31:00,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=256389.33333333334, ans=0.125 +2024-07-29 07:31:06,432 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=256389.33333333334, ans=0.0 +2024-07-29 07:31:07,550 INFO [train.py:1114] (3/4) Epoch 19, batch 8300, loss[loss=0.1657, simple_loss=0.2637, pruned_loss=0.03387, over 4901.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2648, pruned_loss=0.04078, over 938979.49 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:31:17,051 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.581e+01 6.136e+01 6.669e+01 1.025e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 07:31:21,717 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=256429.33333333334, ans=0.125 +2024-07-29 07:31:32,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=256456.0, ans=0.125 +2024-07-29 07:31:34,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=256456.0, ans=0.125 +2024-07-29 07:31:36,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=256456.0, ans=0.125 +2024-07-29 07:31:41,617 INFO [train.py:1114] (3/4) Epoch 19, batch 8350, loss[loss=0.1779, simple_loss=0.269, pruned_loss=0.0434, over 4805.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2644, pruned_loss=0.04078, over 941963.50 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:31:42,385 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.prob, batch_count=256469.33333333334, ans=0.125 +2024-07-29 07:31:48,700 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=256482.66666666666, ans=0.015 +2024-07-29 07:31:50,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=256482.66666666666, ans=0.125 +2024-07-29 07:31:51,756 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.75 vs. limit=15.0 +2024-07-29 07:31:54,503 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.86 vs. limit=10.0 +2024-07-29 07:32:05,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=256509.33333333334, ans=0.125 +2024-07-29 07:32:14,723 INFO [train.py:1114] (3/4) Epoch 19, batch 8400, loss[loss=0.1251, simple_loss=0.2117, pruned_loss=0.01927, over 4775.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2638, pruned_loss=0.04021, over 940412.48 frames. ], batch size: 12, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:32:21,418 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=256549.33333333334, ans=0.125 +2024-07-29 07:32:24,405 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.693e+01 5.636e+01 6.331e+01 6.924e+01 1.027e+02, threshold=1.266e+02, percent-clipped=0.0 +2024-07-29 07:32:30,199 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.01 vs. limit=15.0 +2024-07-29 07:32:37,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.25 vs. limit=15.0 +2024-07-29 07:32:49,194 INFO [train.py:1114] (3/4) Epoch 19, batch 8450, loss[loss=0.1865, simple_loss=0.2704, pruned_loss=0.05128, over 4803.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2641, pruned_loss=0.0404, over 939397.74 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:32:55,623 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=256616.0, ans=0.125 +2024-07-29 07:33:02,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=256616.0, ans=0.125 +2024-07-29 07:33:12,399 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:33:26,837 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=256656.0, ans=0.1 +2024-07-29 07:33:28,016 INFO [train.py:1114] (3/4) Epoch 19, batch 8500, loss[loss=0.1425, simple_loss=0.2307, pruned_loss=0.02715, over 4603.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2638, pruned_loss=0.04044, over 939293.77 frames. ], batch size: 11, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:33:37,643 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.521e+01 5.699e+01 6.220e+01 6.936e+01 1.043e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-29 07:33:38,452 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=256682.66666666666, ans=0.5 +2024-07-29 07:33:41,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=256696.0, ans=0.04949747468305833 +2024-07-29 07:33:47,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=256696.0, ans=0.2 +2024-07-29 07:34:03,199 INFO [train.py:1114] (3/4) Epoch 19, batch 8550, loss[loss=0.1523, simple_loss=0.2394, pruned_loss=0.03257, over 4791.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2631, pruned_loss=0.04041, over 939829.08 frames. ], batch size: 11, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:34:06,523 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=256736.0, ans=0.125 +2024-07-29 07:34:07,808 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=256736.0, ans=0.0 +2024-07-29 07:34:08,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=256736.0, ans=0.0 +2024-07-29 07:34:11,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.max_abs, batch_count=256749.33333333334, ans=10.0 +2024-07-29 07:34:46,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=256789.33333333334, ans=0.125 +2024-07-29 07:34:52,557 INFO [train.py:1114] (3/4) Epoch 19, batch 8600, loss[loss=0.2007, simple_loss=0.2934, pruned_loss=0.05398, over 4814.00 frames. ], tot_loss[loss=0.172, simple_loss=0.263, pruned_loss=0.04054, over 939549.37 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:35:04,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=256816.0, ans=0.0 +2024-07-29 07:35:04,896 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.663e+01 5.627e+01 6.563e+01 7.545e+01 1.202e+02, threshold=1.313e+02, percent-clipped=0.0 +2024-07-29 07:35:18,060 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=256842.66666666666, ans=0.0 +2024-07-29 07:35:18,287 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.81 vs. limit=6.0 +2024-07-29 07:36:04,067 INFO [train.py:1114] (3/4) Epoch 19, batch 8650, loss[loss=0.2016, simple_loss=0.3037, pruned_loss=0.04974, over 4907.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2621, pruned_loss=0.0401, over 940633.00 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:36:06,838 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.26 vs. limit=15.0 +2024-07-29 07:36:18,246 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=256896.0, ans=0.0 +2024-07-29 07:36:35,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=256909.33333333334, ans=0.125 +2024-07-29 07:36:44,197 INFO [train.py:1114] (3/4) Epoch 19, batch 8700, loss[loss=0.172, simple_loss=0.2693, pruned_loss=0.03731, over 4758.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2637, pruned_loss=0.04077, over 938598.05 frames. ], batch size: 13, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:36:53,801 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.837e+01 5.735e+01 6.299e+01 7.253e+01 1.043e+02, threshold=1.260e+02, percent-clipped=0.0 +2024-07-29 07:36:57,662 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=256962.66666666666, ans=0.0 +2024-07-29 07:37:03,005 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=256976.0, ans=0.125 +2024-07-29 07:37:06,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=256976.0, ans=0.125 +2024-07-29 07:37:20,802 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=256989.33333333334, ans=0.125 +2024-07-29 07:37:27,841 INFO [train.py:1114] (3/4) Epoch 19, batch 8750, loss[loss=0.1936, simple_loss=0.2896, pruned_loss=0.04883, over 4684.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2638, pruned_loss=0.04038, over 936900.62 frames. ], batch size: 15, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:37:55,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=257029.33333333334, ans=0.2 +2024-07-29 07:38:04,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=257042.66666666666, ans=0.025 +2024-07-29 07:38:06,240 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.12 vs. limit=15.0 +2024-07-29 07:38:06,533 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:38:15,037 INFO [train.py:1114] (3/4) Epoch 19, batch 8800, loss[loss=0.155, simple_loss=0.2464, pruned_loss=0.03187, over 4929.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2646, pruned_loss=0.04043, over 937680.63 frames. ], batch size: 14, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:38:24,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=257069.33333333334, ans=0.0 +2024-07-29 07:38:27,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=257082.66666666666, ans=0.07 +2024-07-29 07:38:38,826 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.487e+01 5.702e+01 6.437e+01 7.118e+01 1.132e+02, threshold=1.287e+02, percent-clipped=0.0 +2024-07-29 07:38:57,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=257109.33333333334, ans=0.025 +2024-07-29 07:39:17,547 INFO [train.py:1114] (3/4) Epoch 19, batch 8850, loss[loss=0.1961, simple_loss=0.296, pruned_loss=0.0481, over 4538.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2645, pruned_loss=0.04091, over 932726.53 frames. ], batch size: 21, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:39:25,113 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=257149.33333333334, ans=0.125 +2024-07-29 07:39:31,289 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=257149.33333333334, ans=0.0 +2024-07-29 07:39:31,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=257149.33333333334, ans=0.1 +2024-07-29 07:39:32,836 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.50 vs. limit=22.5 +2024-07-29 07:39:33,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=257149.33333333334, ans=0.125 +2024-07-29 07:39:49,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=257189.33333333334, ans=0.125 +2024-07-29 07:39:54,403 INFO [train.py:1114] (3/4) Epoch 19, batch 8900, loss[loss=0.1384, simple_loss=0.2253, pruned_loss=0.02572, over 4919.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2646, pruned_loss=0.04118, over 930319.42 frames. ], batch size: 12, lr: 3.88e-03, grad_scale: 32.0 +2024-07-29 07:40:02,351 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=257202.66666666666, ans=0.0 +2024-07-29 07:40:06,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=257216.0, ans=0.1 +2024-07-29 07:40:18,532 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=257216.0, ans=0.125 +2024-07-29 07:40:20,224 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.370e+01 5.736e+01 6.296e+01 7.033e+01 9.064e+01, threshold=1.259e+02, percent-clipped=0.0 +2024-07-29 07:42:53,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=257229.33333333334, ans=0.125 +2024-07-29 07:42:57,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=257229.33333333334, ans=0.2 +2024-07-29 07:43:02,530 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=257242.66666666666, ans=0.125 +2024-07-29 07:43:03,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=257242.66666666666, ans=0.125 +2024-07-29 07:43:07,810 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=257256.0, ans=0.0 +2024-07-29 07:43:10,904 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.08 vs. limit=15.0 +2024-07-29 07:43:11,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=257256.0, ans=0.0 +2024-07-29 07:43:14,246 INFO [train.py:1114] (3/4) Epoch 19, batch 8950, loss[loss=0.183, simple_loss=0.2739, pruned_loss=0.04609, over 4530.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2638, pruned_loss=0.04039, over 931235.87 frames. ], batch size: 21, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:43:30,836 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=9.62 vs. limit=15.0 +2024-07-29 07:43:43,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=257322.66666666666, ans=0.0 +2024-07-29 07:43:47,529 INFO [train.py:1114] (3/4) Epoch 19, batch 9000, loss[loss=0.1979, simple_loss=0.2863, pruned_loss=0.05479, over 4644.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2625, pruned_loss=0.03985, over 934097.79 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:43:47,529 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 07:43:59,126 INFO [train.py:1146] (3/4) Epoch 19, validation: loss=0.1612, simple_loss=0.2635, pruned_loss=0.02943, over 944034.00 frames. +2024-07-29 07:43:59,127 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 07:44:05,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=257349.33333333334, ans=0.0 +2024-07-29 07:44:07,354 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.06 vs. limit=6.0 +2024-07-29 07:44:08,779 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.731e+01 5.623e+01 6.391e+01 7.404e+01 1.117e+02, threshold=1.278e+02, percent-clipped=0.0 +2024-07-29 07:44:09,579 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=257349.33333333334, ans=0.125 +2024-07-29 07:44:10,556 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.74 vs. limit=15.0 +2024-07-29 07:44:17,161 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.63 vs. limit=15.0 +2024-07-29 07:44:22,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=257376.0, ans=0.125 +2024-07-29 07:44:23,378 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=257376.0, ans=0.2 +2024-07-29 07:44:31,560 INFO [train.py:1114] (3/4) Epoch 19, batch 9050, loss[loss=0.1566, simple_loss=0.2467, pruned_loss=0.0333, over 4481.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2621, pruned_loss=0.04017, over 933956.21 frames. ], batch size: 10, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:44:31,764 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=257402.66666666666, ans=0.125 +2024-07-29 07:44:47,299 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:44:48,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=257429.33333333334, ans=10.0 +2024-07-29 07:44:52,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=257442.66666666666, ans=0.0 +2024-07-29 07:44:55,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=257442.66666666666, ans=0.125 +2024-07-29 07:44:58,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=257442.66666666666, ans=0.125 +2024-07-29 07:44:59,393 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=257442.66666666666, ans=0.1 +2024-07-29 07:45:06,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=257456.0, ans=0.125 +2024-07-29 07:45:09,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer1.prob, batch_count=257456.0, ans=0.125 +2024-07-29 07:45:10,208 INFO [train.py:1114] (3/4) Epoch 19, batch 9100, loss[loss=0.1483, simple_loss=0.241, pruned_loss=0.02785, over 4942.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.262, pruned_loss=0.03983, over 936575.13 frames. ], batch size: 14, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:45:11,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=257469.33333333334, ans=0.07 +2024-07-29 07:45:19,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=257482.66666666666, ans=0.0 +2024-07-29 07:45:26,410 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.518e+01 5.674e+01 6.326e+01 7.504e+01 9.644e+01, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 07:45:26,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=257482.66666666666, ans=0.2 +2024-07-29 07:45:32,850 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=257496.0, ans=0.04949747468305833 +2024-07-29 07:45:36,156 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=257496.0, ans=0.2 +2024-07-29 07:45:47,603 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward3.hidden_balancer.prob, batch_count=257522.66666666666, ans=0.125 +2024-07-29 07:45:53,170 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.98 vs. limit=10.0 +2024-07-29 07:45:54,915 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=257522.66666666666, ans=0.0 +2024-07-29 07:45:57,964 INFO [train.py:1114] (3/4) Epoch 19, batch 9150, loss[loss=0.155, simple_loss=0.2457, pruned_loss=0.03213, over 4814.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2632, pruned_loss=0.04026, over 935498.79 frames. ], batch size: 14, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:46:15,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=257562.66666666666, ans=0.0 +2024-07-29 07:46:21,441 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=257576.0, ans=0.125 +2024-07-29 07:46:26,979 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=257589.33333333334, ans=0.0 +2024-07-29 07:46:34,022 INFO [train.py:1114] (3/4) Epoch 19, batch 9200, loss[loss=0.1499, simple_loss=0.2316, pruned_loss=0.03406, over 4853.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2626, pruned_loss=0.04024, over 937684.02 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:46:38,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.min_positive, batch_count=257602.66666666666, ans=0.025 +2024-07-29 07:46:43,417 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.603e+01 5.777e+01 6.391e+01 7.233e+01 9.749e+01, threshold=1.278e+02, percent-clipped=0.0 +2024-07-29 07:46:43,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.pos_emb_skip_rate, batch_count=257616.0, ans=0.0 +2024-07-29 07:46:49,346 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.52 vs. limit=10.0 +2024-07-29 07:46:53,937 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=257642.66666666666, ans=0.125 +2024-07-29 07:47:00,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=257656.0, ans=0.125 +2024-07-29 07:47:05,831 INFO [train.py:1114] (3/4) Epoch 19, batch 9250, loss[loss=0.2058, simple_loss=0.2897, pruned_loss=0.06099, over 4634.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2632, pruned_loss=0.04049, over 938498.98 frames. ], batch size: 13, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:47:07,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=257669.33333333334, ans=0.025 +2024-07-29 07:47:07,946 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.40 vs. limit=15.0 +2024-07-29 07:47:08,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=257669.33333333334, ans=0.125 +2024-07-29 07:47:08,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=257669.33333333334, ans=0.125 +2024-07-29 07:47:10,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=257669.33333333334, ans=0.09899494936611666 +2024-07-29 07:47:27,588 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=257709.33333333334, ans=0.0 +2024-07-29 07:47:37,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=257736.0, ans=0.0 +2024-07-29 07:47:38,218 INFO [train.py:1114] (3/4) Epoch 19, batch 9300, loss[loss=0.1465, simple_loss=0.2218, pruned_loss=0.03557, over 4785.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2629, pruned_loss=0.04038, over 937917.83 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:47:44,911 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.63 vs. limit=22.5 +2024-07-29 07:47:45,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=257749.33333333334, ans=0.0 +2024-07-29 07:47:45,964 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=257749.33333333334, ans=0.125 +2024-07-29 07:47:47,627 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.607e+01 5.571e+01 6.148e+01 7.388e+01 1.007e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 07:47:57,725 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=257762.66666666666, ans=0.0 +2024-07-29 07:47:57,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=257762.66666666666, ans=0.2 +2024-07-29 07:48:04,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=257776.0, ans=0.0 +2024-07-29 07:48:08,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=257789.33333333334, ans=0.125 +2024-07-29 07:48:12,933 INFO [train.py:1114] (3/4) Epoch 19, batch 9350, loss[loss=0.1496, simple_loss=0.2308, pruned_loss=0.03422, over 4812.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2635, pruned_loss=0.04052, over 934491.84 frames. ], batch size: 11, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:48:20,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward3.hidden_balancer.prob, batch_count=257816.0, ans=0.125 +2024-07-29 07:48:33,890 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer1.prob, batch_count=257842.66666666666, ans=0.125 +2024-07-29 07:48:35,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=257842.66666666666, ans=0.2 +2024-07-29 07:48:40,314 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=12.76 vs. limit=22.5 +2024-07-29 07:48:43,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=257856.0, ans=0.125 +2024-07-29 07:48:44,700 INFO [train.py:1114] (3/4) Epoch 19, batch 9400, loss[loss=0.1693, simple_loss=0.272, pruned_loss=0.03331, over 4685.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2633, pruned_loss=0.04044, over 932079.83 frames. ], batch size: 13, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:48:53,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=257882.66666666666, ans=0.125 +2024-07-29 07:48:54,089 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.828e+01 5.728e+01 6.199e+01 7.519e+01 1.174e+02, threshold=1.240e+02, percent-clipped=0.0 +2024-07-29 07:48:58,713 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=257896.0, ans=0.125 +2024-07-29 07:49:04,544 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.65 vs. limit=15.0 +2024-07-29 07:49:12,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=257922.66666666666, ans=0.0 +2024-07-29 07:49:16,043 INFO [train.py:1114] (3/4) Epoch 19, batch 9450, loss[loss=0.151, simple_loss=0.2367, pruned_loss=0.03263, over 4817.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2639, pruned_loss=0.04065, over 931252.21 frames. ], batch size: 11, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:49:18,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=257936.0, ans=0.0 +2024-07-29 07:49:19,369 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:49:21,323 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=257936.0, ans=0.125 +2024-07-29 07:49:26,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=257949.33333333334, ans=0.09899494936611666 +2024-07-29 07:49:46,496 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.47 vs. limit=15.0 +2024-07-29 07:49:55,423 INFO [train.py:1114] (3/4) Epoch 19, batch 9500, loss[loss=0.1487, simple_loss=0.2399, pruned_loss=0.02871, over 4707.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2634, pruned_loss=0.04054, over 933751.96 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:49:58,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=258002.66666666666, ans=0.125 +2024-07-29 07:49:59,845 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=258002.66666666666, ans=0.125 +2024-07-29 07:50:02,975 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=258016.0, ans=0.125 +2024-07-29 07:50:03,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258016.0, ans=0.1 +2024-07-29 07:50:04,906 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.577e+01 5.446e+01 5.959e+01 6.735e+01 9.596e+01, threshold=1.192e+02, percent-clipped=0.0 +2024-07-29 07:50:05,053 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=258016.0, ans=0.0 +2024-07-29 07:50:16,865 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 07:50:25,110 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=258056.0, ans=0.125 +2024-07-29 07:50:27,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=258056.0, ans=0.0 +2024-07-29 07:50:29,002 INFO [train.py:1114] (3/4) Epoch 19, batch 9550, loss[loss=0.1706, simple_loss=0.2525, pruned_loss=0.04435, over 4783.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.263, pruned_loss=0.04058, over 931198.53 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:50:45,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258096.0, ans=0.1 +2024-07-29 07:51:08,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=258122.66666666666, ans=0.125 +2024-07-29 07:51:10,813 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=7.15 vs. limit=15.0 +2024-07-29 07:51:12,497 INFO [train.py:1114] (3/4) Epoch 19, batch 9600, loss[loss=0.2108, simple_loss=0.2725, pruned_loss=0.07456, over 3445.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2632, pruned_loss=0.04056, over 930470.63 frames. ], batch size: 35, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:51:16,531 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=258136.0, ans=0.0 +2024-07-29 07:51:21,989 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.779e+01 5.937e+01 6.386e+01 7.744e+01 1.025e+02, threshold=1.277e+02, percent-clipped=0.0 +2024-07-29 07:51:26,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=258162.66666666666, ans=0.0 +2024-07-29 07:51:26,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=258162.66666666666, ans=0.125 +2024-07-29 07:51:29,654 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=258162.66666666666, ans=0.1 +2024-07-29 07:51:30,179 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=258162.66666666666, ans=0.0 +2024-07-29 07:51:36,560 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=258176.0, ans=0.09899494936611666 +2024-07-29 07:51:46,121 INFO [train.py:1114] (3/4) Epoch 19, batch 9650, loss[loss=0.1714, simple_loss=0.2765, pruned_loss=0.03315, over 4822.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2646, pruned_loss=0.04126, over 926236.13 frames. ], batch size: 16, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:51:50,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=258202.66666666666, ans=0.07 +2024-07-29 07:51:58,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=258229.33333333334, ans=0.2 +2024-07-29 07:52:00,824 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.18 vs. limit=12.0 +2024-07-29 07:52:02,637 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=258229.33333333334, ans=0.125 +2024-07-29 07:52:04,767 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.07 vs. limit=10.0 +2024-07-29 07:52:05,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=258242.66666666666, ans=0.125 +2024-07-29 07:52:07,807 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=258242.66666666666, ans=0.125 +2024-07-29 07:52:12,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=258256.0, ans=0.125 +2024-07-29 07:52:17,705 INFO [train.py:1114] (3/4) Epoch 19, batch 9700, loss[loss=0.1601, simple_loss=0.2517, pruned_loss=0.0342, over 4239.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2654, pruned_loss=0.04132, over 924072.21 frames. ], batch size: 25, lr: 3.87e-03, grad_scale: 32.0 +2024-07-29 07:52:23,486 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=12.95 vs. limit=22.5 +2024-07-29 07:52:26,892 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.593e+01 5.780e+01 6.621e+01 7.551e+01 1.114e+02, threshold=1.324e+02, percent-clipped=0.0 +2024-07-29 07:52:29,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=258282.66666666666, ans=0.125 +2024-07-29 07:52:52,003 INFO [train.py:1114] (3/4) Epoch 19, batch 9750, loss[loss=0.1682, simple_loss=0.2603, pruned_loss=0.03802, over 4666.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.265, pruned_loss=0.04109, over 924634.70 frames. ], batch size: 15, lr: 3.87e-03, grad_scale: 64.0 +2024-07-29 07:52:55,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=258336.0, ans=0.0 +2024-07-29 07:53:04,773 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=258362.66666666666, ans=0.025 +2024-07-29 07:53:10,736 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=258362.66666666666, ans=0.125 +2024-07-29 07:53:12,609 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=258376.0, ans=0.125 +2024-07-29 07:53:13,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=258376.0, ans=0.125 +2024-07-29 07:54:03,953 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=258389.33333333334, ans=0.2 +2024-07-29 07:54:17,465 INFO [train.py:1114] (3/4) Epoch 19, batch 9800, loss[loss=0.1333, simple_loss=0.2269, pruned_loss=0.01986, over 4711.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2642, pruned_loss=0.04062, over 924790.76 frames. ], batch size: 12, lr: 3.87e-03, grad_scale: 64.0 +2024-07-29 07:54:21,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=258402.66666666666, ans=0.2 +2024-07-29 07:54:24,993 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=258416.0, ans=0.125 +2024-07-29 07:54:27,267 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.539e+01 5.598e+01 6.395e+01 7.278e+01 1.117e+02, threshold=1.279e+02, percent-clipped=0.0 +2024-07-29 07:54:44,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=258456.0, ans=0.0 +2024-07-29 07:54:45,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=258456.0, ans=0.0 +2024-07-29 07:54:49,139 INFO [train.py:1114] (3/4) Epoch 19, batch 9850, loss[loss=0.1933, simple_loss=0.283, pruned_loss=0.05177, over 4901.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2645, pruned_loss=0.04067, over 927298.38 frames. ], batch size: 15, lr: 3.87e-03, grad_scale: 64.0 +2024-07-29 07:55:14,220 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=258482.66666666666, ans=0.125 +2024-07-29 07:55:32,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten.whitening_limit, batch_count=258509.33333333334, ans=15.0 +2024-07-29 07:55:40,082 INFO [train.py:1114] (3/4) Epoch 19, batch 9900, loss[loss=0.1937, simple_loss=0.2849, pruned_loss=0.05122, over 4837.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.265, pruned_loss=0.04109, over 926542.13 frames. ], batch size: 16, lr: 3.87e-03, grad_scale: 64.0 +2024-07-29 07:55:42,727 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=258536.0, ans=0.125 +2024-07-29 07:55:43,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=258536.0, ans=0.125 +2024-07-29 07:55:44,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=258536.0, ans=0.125 +2024-07-29 07:55:49,456 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.672e+01 5.747e+01 6.549e+01 7.522e+01 9.931e+01, threshold=1.310e+02, percent-clipped=0.0 +2024-07-29 07:55:49,636 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=258549.33333333334, ans=0.1 +2024-07-29 07:55:54,063 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=258562.66666666666, ans=0.0 +2024-07-29 07:55:59,554 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.92 vs. limit=6.0 +2024-07-29 07:56:10,862 INFO [train.py:1114] (3/4) Epoch 19, batch 9950, loss[loss=0.1605, simple_loss=0.2411, pruned_loss=0.03997, over 4535.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2659, pruned_loss=0.0419, over 928709.72 frames. ], batch size: 10, lr: 3.86e-03, grad_scale: 64.0 +2024-07-29 07:56:14,335 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.48 vs. limit=15.0 +2024-07-29 07:56:23,414 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=258629.33333333334, ans=0.0 +2024-07-29 07:56:42,849 INFO [train.py:1114] (3/4) Epoch 19, batch 10000, loss[loss=0.2094, simple_loss=0.2893, pruned_loss=0.06473, over 4638.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.268, pruned_loss=0.04245, over 925970.80 frames. ], batch size: 16, lr: 3.86e-03, grad_scale: 64.0 +2024-07-29 07:56:45,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=258669.33333333334, ans=0.2 +2024-07-29 07:56:46,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=258669.33333333334, ans=0.0 +2024-07-29 07:56:51,994 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.927e+01 5.763e+01 6.186e+01 6.988e+01 1.066e+02, threshold=1.237e+02, percent-clipped=0.0 +2024-07-29 07:56:57,480 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.60 vs. limit=15.0 +2024-07-29 07:57:10,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=258722.66666666666, ans=0.0 +2024-07-29 07:57:14,769 INFO [train.py:1114] (3/4) Epoch 19, batch 10050, loss[loss=0.2108, simple_loss=0.292, pruned_loss=0.06485, over 3535.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2716, pruned_loss=0.04447, over 913945.23 frames. ], batch size: 35, lr: 3.86e-03, grad_scale: 64.0 +2024-07-29 07:57:16,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=258736.0, ans=0.125 +2024-07-29 07:57:21,526 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=15.63 vs. limit=15.0 +2024-07-29 07:57:32,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=258762.66666666666, ans=0.125 +2024-07-29 07:57:33,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=258762.66666666666, ans=0.0 +2024-07-29 07:57:33,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=258762.66666666666, ans=0.125 +2024-07-29 07:57:33,611 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.68 vs. limit=6.0 +2024-07-29 07:57:48,580 INFO [train.py:1114] (3/4) Epoch 19, batch 10100, loss[loss=0.2465, simple_loss=0.3136, pruned_loss=0.0897, over 3237.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2758, pruned_loss=0.0486, over 862062.61 frames. ], batch size: 38, lr: 3.86e-03, grad_scale: 64.0 +2024-07-29 07:57:52,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=258802.66666666666, ans=0.0 +2024-07-29 07:57:55,705 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=13.64 vs. limit=15.0 +2024-07-29 07:57:58,541 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.153e+01 6.634e+01 7.311e+01 7.897e+01 1.171e+02, threshold=1.462e+02, percent-clipped=0.0 +2024-07-29 07:58:04,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=258829.33333333334, ans=0.0 +2024-07-29 07:58:20,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=258869.33333333334, ans=0.125 +2024-07-29 07:58:21,409 INFO [train.py:1114] (3/4) Epoch 19, batch 10150, loss[loss=0.2208, simple_loss=0.2853, pruned_loss=0.07817, over 3323.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2781, pruned_loss=0.05167, over 822021.20 frames. ], batch size: 35, lr: 3.86e-03, grad_scale: 32.0 +2024-07-29 07:58:21,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=258869.33333333334, ans=0.125 +2024-07-29 07:58:24,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=258869.33333333334, ans=0.125 +2024-07-29 07:58:26,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=258869.33333333334, ans=0.0 +2024-07-29 07:58:40,242 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=258896.0, ans=0.125 +2024-07-29 07:58:50,471 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=258896.0, ans=0.125 +2024-07-29 07:58:58,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=258909.33333333334, ans=0.0 +2024-07-29 07:59:01,329 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=258909.33333333334, ans=0.025 +2024-07-29 07:59:02,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=258922.66666666666, ans=0.0 +2024-07-29 07:59:07,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=258922.66666666666, ans=0.07 +2024-07-29 07:59:09,559 INFO [train.py:1114] (3/4) Epoch 19, batch 10200, loss[loss=0.1925, simple_loss=0.2823, pruned_loss=0.05135, over 3512.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2805, pruned_loss=0.05471, over 789211.05 frames. ], batch size: 36, lr: 3.86e-03, grad_scale: 32.0 +2024-07-29 07:59:18,731 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=258949.33333333334, ans=0.125 +2024-07-29 07:59:19,708 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 6.022e+01 7.024e+01 7.484e+01 8.101e+01 1.029e+02, threshold=1.497e+02, percent-clipped=0.0 +2024-07-29 07:59:23,390 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=258962.66666666666, ans=0.125 +2024-07-29 08:02:27,570 INFO [train.py:1114] (3/4) Epoch 20, batch 0, loss[loss=0.1316, simple_loss=0.2203, pruned_loss=0.02144, over 4855.00 frames. ], tot_loss[loss=0.1316, simple_loss=0.2203, pruned_loss=0.02144, over 4855.00 frames. ], batch size: 12, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:02:27,570 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 08:02:31,873 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([4.0264, 3.9359, 3.5465, 3.6591], device='cuda:3') +2024-07-29 08:02:35,299 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.1.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([5.7220, 5.6519, 5.0280, 5.2565], device='cuda:3') +2024-07-29 08:02:40,769 INFO [train.py:1146] (3/4) Epoch 20, validation: loss=0.161, simple_loss=0.2644, pruned_loss=0.02883, over 944034.00 frames. +2024-07-29 08:02:40,770 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 08:02:52,382 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=20.27 vs. limit=22.5 +2024-07-29 08:03:02,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=259006.66666666666, ans=0.0 +2024-07-29 08:03:10,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.min_positive, batch_count=259020.0, ans=0.025 +2024-07-29 08:03:13,451 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=259020.0, ans=0.0 +2024-07-29 08:03:17,473 INFO [train.py:1114] (3/4) Epoch 20, batch 50, loss[loss=0.1405, simple_loss=0.225, pruned_loss=0.02802, over 4613.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.263, pruned_loss=0.03967, over 206526.31 frames. ], batch size: 11, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:03:34,258 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.60 vs. limit=6.0 +2024-07-29 08:03:35,260 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259060.0, ans=0.1 +2024-07-29 08:03:47,270 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.549e+01 5.582e+01 6.158e+01 6.826e+01 9.280e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-29 08:03:51,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=259100.0, ans=0.0 +2024-07-29 08:03:51,953 INFO [train.py:1114] (3/4) Epoch 20, batch 100, loss[loss=0.1848, simple_loss=0.2712, pruned_loss=0.04916, over 4641.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2647, pruned_loss=0.03992, over 366057.71 frames. ], batch size: 12, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:03:59,916 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=259113.33333333334, ans=0.2 +2024-07-29 08:04:03,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=259113.33333333334, ans=0.2 +2024-07-29 08:04:05,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=259113.33333333334, ans=0.0 +2024-07-29 08:04:11,350 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=259126.66666666666, ans=0.0 +2024-07-29 08:04:14,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=259140.0, ans=0.125 +2024-07-29 08:04:19,422 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=259140.0, ans=0.0 +2024-07-29 08:04:26,693 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=259166.66666666666, ans=0.1 +2024-07-29 08:04:27,184 INFO [train.py:1114] (3/4) Epoch 20, batch 150, loss[loss=0.1694, simple_loss=0.256, pruned_loss=0.04139, over 4626.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2639, pruned_loss=0.03934, over 494701.87 frames. ], batch size: 11, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:04:37,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=259180.0, ans=0.0 +2024-07-29 08:04:38,417 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.45 vs. limit=15.0 +2024-07-29 08:04:38,891 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259180.0, ans=0.1 +2024-07-29 08:04:57,099 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.664e+01 5.593e+01 6.135e+01 6.886e+01 1.305e+02, threshold=1.227e+02, percent-clipped=1.0 +2024-07-29 08:05:01,699 INFO [train.py:1114] (3/4) Epoch 20, batch 200, loss[loss=0.2095, simple_loss=0.3123, pruned_loss=0.05335, over 4470.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2627, pruned_loss=0.03936, over 594351.22 frames. ], batch size: 21, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:05:07,424 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=259233.33333333334, ans=0.125 +2024-07-29 08:05:12,938 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.33 vs. limit=10.0 +2024-07-29 08:05:16,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=259246.66666666666, ans=0.125 +2024-07-29 08:05:23,511 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:05:24,974 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=259260.0, ans=0.0 +2024-07-29 08:05:26,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=259260.0, ans=0.1 +2024-07-29 08:05:37,789 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=259273.33333333334, ans=0.125 +2024-07-29 08:05:40,535 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=259273.33333333334, ans=0.125 +2024-07-29 08:05:50,871 INFO [train.py:1114] (3/4) Epoch 20, batch 250, loss[loss=0.1757, simple_loss=0.2684, pruned_loss=0.04153, over 4639.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2628, pruned_loss=0.03946, over 670858.03 frames. ], batch size: 16, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:05:56,305 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.69 vs. limit=12.0 +2024-07-29 08:05:57,663 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=192, metric=5.96 vs. limit=15.0 +2024-07-29 08:06:01,928 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=259313.33333333334, ans=0.025 +2024-07-29 08:06:14,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=259326.66666666666, ans=0.125 +2024-07-29 08:06:51,634 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.70 vs. limit=15.0 +2024-07-29 08:06:55,365 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer2.prob, batch_count=259326.66666666666, ans=0.125 +2024-07-29 08:06:55,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module1.balancer2.prob, batch_count=259326.66666666666, ans=0.125 +2024-07-29 08:07:30,204 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.296e+01 5.734e+01 6.099e+01 7.044e+01 1.100e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 08:07:36,798 INFO [train.py:1114] (3/4) Epoch 20, batch 300, loss[loss=0.1671, simple_loss=0.2594, pruned_loss=0.03747, over 4802.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2623, pruned_loss=0.03944, over 730295.08 frames. ], batch size: 15, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:07:43,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=259380.0, ans=0.2 +2024-07-29 08:07:47,733 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:07:57,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten2.whitening_limit, batch_count=259406.66666666666, ans=15.0 +2024-07-29 08:08:06,576 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.23 vs. limit=6.0 +2024-07-29 08:08:14,334 INFO [train.py:1114] (3/4) Epoch 20, batch 350, loss[loss=0.161, simple_loss=0.2516, pruned_loss=0.03518, over 4939.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2639, pruned_loss=0.04008, over 775948.02 frames. ], batch size: 12, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:08:17,025 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=259433.33333333334, ans=0.1 +2024-07-29 08:08:29,361 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=259460.0, ans=0.0 +2024-07-29 08:08:32,776 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=259460.0, ans=0.125 +2024-07-29 08:08:36,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=259460.0, ans=0.125 +2024-07-29 08:08:42,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=259473.33333333334, ans=0.125 +2024-07-29 08:08:47,021 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.322e+01 5.507e+01 5.880e+01 6.811e+01 8.968e+01, threshold=1.176e+02, percent-clipped=0.0 +2024-07-29 08:08:47,864 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:08:47,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer2.prob, batch_count=259486.66666666666, ans=0.125 +2024-07-29 08:08:51,656 INFO [train.py:1114] (3/4) Epoch 20, batch 400, loss[loss=0.1777, simple_loss=0.2765, pruned_loss=0.0394, over 4695.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2641, pruned_loss=0.04001, over 813489.87 frames. ], batch size: 13, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:08:55,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=259500.0, ans=0.0 +2024-07-29 08:08:59,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=259513.33333333334, ans=0.1 +2024-07-29 08:08:59,613 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass_mid.scale_min, batch_count=259513.33333333334, ans=0.2 +2024-07-29 08:09:01,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.80 vs. limit=15.0 +2024-07-29 08:09:05,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=259526.66666666666, ans=0.125 +2024-07-29 08:09:10,104 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.77 vs. limit=15.0 +2024-07-29 08:09:14,249 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=259540.0, ans=0.0 +2024-07-29 08:09:16,579 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=9.09 vs. limit=15.0 +2024-07-29 08:09:20,530 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:09:27,505 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:09:30,920 INFO [train.py:1114] (3/4) Epoch 20, batch 450, loss[loss=0.1589, simple_loss=0.2525, pruned_loss=0.03258, over 4631.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2638, pruned_loss=0.04042, over 838888.62 frames. ], batch size: 13, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:09:33,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=259566.66666666666, ans=0.125 +2024-07-29 08:09:35,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=259566.66666666666, ans=0.0 +2024-07-29 08:09:37,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=259580.0, ans=0.125 +2024-07-29 08:09:49,696 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=259593.33333333334, ans=0.2 +2024-07-29 08:10:05,753 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.282e+01 5.641e+01 6.168e+01 6.736e+01 1.200e+02, threshold=1.234e+02, percent-clipped=0.0 +2024-07-29 08:10:10,562 INFO [train.py:1114] (3/4) Epoch 20, batch 500, loss[loss=0.1752, simple_loss=0.2711, pruned_loss=0.03962, over 4672.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2631, pruned_loss=0.04029, over 861510.45 frames. ], batch size: 15, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:10:26,748 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=259660.0, ans=0.125 +2024-07-29 08:10:51,612 INFO [train.py:1114] (3/4) Epoch 20, batch 550, loss[loss=0.1712, simple_loss=0.2593, pruned_loss=0.0415, over 4618.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2623, pruned_loss=0.04005, over 877248.55 frames. ], batch size: 17, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:15:10,433 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=259740.0, ans=0.125 +2024-07-29 08:15:18,476 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.28 vs. limit=15.0 +2024-07-29 08:15:20,531 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.64 vs. limit=15.0 +2024-07-29 08:15:21,353 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.691e+01 5.418e+01 6.036e+01 6.579e+01 9.144e+01, threshold=1.207e+02, percent-clipped=0.0 +2024-07-29 08:15:27,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.max_abs, batch_count=259753.33333333334, ans=10.0 +2024-07-29 08:15:29,555 INFO [train.py:1114] (3/4) Epoch 20, batch 600, loss[loss=0.1716, simple_loss=0.2646, pruned_loss=0.03929, over 4644.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2619, pruned_loss=0.03977, over 892042.38 frames. ], batch size: 16, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:15:30,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=259766.66666666666, ans=0.125 +2024-07-29 08:15:59,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=259806.66666666666, ans=0.125 +2024-07-29 08:16:10,530 INFO [train.py:1114] (3/4) Epoch 20, batch 650, loss[loss=0.1477, simple_loss=0.241, pruned_loss=0.02718, over 4763.00 frames. ], tot_loss[loss=0.17, simple_loss=0.261, pruned_loss=0.0395, over 903696.72 frames. ], batch size: 13, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:17:39,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=259846.66666666666, ans=0.2 +2024-07-29 08:17:50,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=259860.0, ans=0.0 +2024-07-29 08:18:30,218 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.467e+01 5.565e+01 6.152e+01 6.795e+01 9.682e+01, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 08:18:35,046 INFO [train.py:1114] (3/4) Epoch 20, batch 700, loss[loss=0.1305, simple_loss=0.2262, pruned_loss=0.01744, over 4638.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2613, pruned_loss=0.0395, over 911616.67 frames. ], batch size: 12, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:18:45,896 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=8.15 vs. limit=15.0 +2024-07-29 08:18:47,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=259900.0, ans=0.125 +2024-07-29 08:18:51,814 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:19:16,868 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=259953.33333333334, ans=0.0 +2024-07-29 08:19:19,935 INFO [train.py:1114] (3/4) Epoch 20, batch 750, loss[loss=0.1676, simple_loss=0.2589, pruned_loss=0.0381, over 4695.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2611, pruned_loss=0.0395, over 918404.35 frames. ], batch size: 13, lr: 3.76e-03, grad_scale: 32.0 +2024-07-29 08:19:20,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=259966.66666666666, ans=0.0 +2024-07-29 08:19:27,313 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=259980.0, ans=0.125 +2024-07-29 08:19:43,344 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=260006.66666666666, ans=0.0 +2024-07-29 08:19:50,726 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:19:50,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=260020.0, ans=0.09899494936611666 +2024-07-29 08:19:51,132 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.556e+01 5.579e+01 6.090e+01 6.934e+01 1.125e+02, threshold=1.218e+02, percent-clipped=0.0 +2024-07-29 08:19:53,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=260020.0, ans=0.2 +2024-07-29 08:19:55,776 INFO [train.py:1114] (3/4) Epoch 20, batch 800, loss[loss=0.1726, simple_loss=0.2543, pruned_loss=0.0455, over 4847.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2605, pruned_loss=0.03917, over 923050.65 frames. ], batch size: 12, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:20:06,772 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.39 vs. limit=10.0 +2024-07-29 08:20:33,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=260073.33333333334, ans=0.09899494936611666 +2024-07-29 08:20:33,306 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=7.13 vs. limit=15.0 +2024-07-29 08:20:34,035 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.09 vs. limit=6.0 +2024-07-29 08:20:37,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=260086.66666666666, ans=0.125 +2024-07-29 08:20:42,991 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=260100.0, ans=0.2 +2024-07-29 08:20:43,558 INFO [train.py:1114] (3/4) Epoch 20, batch 850, loss[loss=0.1627, simple_loss=0.2643, pruned_loss=0.03054, over 4672.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2608, pruned_loss=0.03953, over 927650.06 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:20:47,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=260100.0, ans=0.0 +2024-07-29 08:20:53,809 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.whiten, num_groups=1, num_channels=512, metric=4.92 vs. limit=12.0 +2024-07-29 08:21:12,434 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.741e+01 5.604e+01 6.314e+01 7.197e+01 9.359e+01, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 08:21:17,235 INFO [train.py:1114] (3/4) Epoch 20, batch 900, loss[loss=0.153, simple_loss=0.2285, pruned_loss=0.03877, over 4848.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2605, pruned_loss=0.03952, over 928259.14 frames. ], batch size: 12, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:21:21,863 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=260166.66666666666, ans=0.125 +2024-07-29 08:21:27,493 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.65 vs. limit=15.0 +2024-07-29 08:21:52,560 INFO [train.py:1114] (3/4) Epoch 20, batch 950, loss[loss=0.1665, simple_loss=0.2523, pruned_loss=0.04032, over 4773.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2611, pruned_loss=0.03932, over 930195.41 frames. ], batch size: 12, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:21:53,346 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=260233.33333333334, ans=0.125 +2024-07-29 08:21:57,601 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=16.07 vs. limit=22.5 +2024-07-29 08:23:13,571 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.80 vs. limit=15.0 +2024-07-29 08:23:47,051 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.88 vs. limit=15.0 +2024-07-29 08:23:51,832 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=260260.0, ans=0.0 +2024-07-29 08:23:53,531 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=7.11 vs. limit=15.0 +2024-07-29 08:23:55,205 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=260273.33333333334, ans=0.2 +2024-07-29 08:24:02,477 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.701e+01 5.740e+01 6.532e+01 7.410e+01 9.580e+01, threshold=1.306e+02, percent-clipped=0.0 +2024-07-29 08:24:06,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=260286.66666666666, ans=0.125 +2024-07-29 08:24:07,420 INFO [train.py:1114] (3/4) Epoch 20, batch 1000, loss[loss=0.1505, simple_loss=0.2349, pruned_loss=0.03308, over 4970.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2629, pruned_loss=0.04022, over 929717.34 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:24:07,816 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.98 vs. limit=22.5 +2024-07-29 08:24:14,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.min_abs, batch_count=260313.33333333334, ans=0.5 +2024-07-29 08:24:18,529 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.47 vs. limit=15.0 +2024-07-29 08:24:34,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.scale_min, batch_count=260353.33333333334, ans=0.2 +2024-07-29 08:24:41,506 INFO [train.py:1114] (3/4) Epoch 20, batch 1050, loss[loss=0.2099, simple_loss=0.2965, pruned_loss=0.06166, over 4873.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2621, pruned_loss=0.04007, over 932203.90 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:24:46,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=260366.66666666666, ans=0.125 +2024-07-29 08:24:51,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=260380.0, ans=0.2 +2024-07-29 08:24:56,326 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=260393.33333333334, ans=0.0 +2024-07-29 08:24:56,452 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.43 vs. limit=15.0 +2024-07-29 08:25:04,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=260406.66666666666, ans=0.2 +2024-07-29 08:25:12,292 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.430e+01 5.624e+01 6.219e+01 7.008e+01 1.029e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-29 08:25:13,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=260420.0, ans=0.125 +2024-07-29 08:25:17,058 INFO [train.py:1114] (3/4) Epoch 20, batch 1100, loss[loss=0.1801, simple_loss=0.2671, pruned_loss=0.0466, over 4896.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2628, pruned_loss=0.04011, over 934237.32 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:25:17,155 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260433.33333333334, ans=0.1 +2024-07-29 08:25:25,292 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=12.65 vs. limit=15.0 +2024-07-29 08:25:52,969 INFO [train.py:1114] (3/4) Epoch 20, batch 1150, loss[loss=0.1786, simple_loss=0.2623, pruned_loss=0.04751, over 4890.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2631, pruned_loss=0.04032, over 934327.21 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:25:56,018 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn1.whiten, num_groups=1, num_channels=512, metric=18.43 vs. limit=22.5 +2024-07-29 08:25:59,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260513.33333333334, ans=0.1 +2024-07-29 08:26:11,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=260526.66666666666, ans=0.2 +2024-07-29 08:26:18,375 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260540.0, ans=0.1 +2024-07-29 08:26:22,171 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.531e+01 5.714e+01 6.232e+01 6.999e+01 1.113e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-29 08:26:25,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=260553.33333333334, ans=0.125 +2024-07-29 08:26:26,995 INFO [train.py:1114] (3/4) Epoch 20, batch 1200, loss[loss=0.1818, simple_loss=0.2736, pruned_loss=0.04496, over 4870.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2642, pruned_loss=0.04059, over 933445.47 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:26:41,983 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.49 vs. limit=22.5 +2024-07-29 08:27:24,444 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=8.24 vs. limit=15.0 +2024-07-29 08:27:26,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=260593.33333333334, ans=0.1 +2024-07-29 08:28:46,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=260606.66666666666, ans=0.125 +2024-07-29 08:28:54,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260620.0, ans=0.1 +2024-07-29 08:28:59,364 INFO [train.py:1114] (3/4) Epoch 20, batch 1250, loss[loss=0.2172, simple_loss=0.2958, pruned_loss=0.06929, over 4797.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2645, pruned_loss=0.04051, over 937349.16 frames. ], batch size: 15, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:29:14,085 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=4.52 vs. limit=15.0 +2024-07-29 08:29:16,072 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.76 vs. limit=15.0 +2024-07-29 08:29:21,491 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=260673.33333333334, ans=0.125 +2024-07-29 08:29:29,551 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.711e+01 5.565e+01 6.190e+01 6.882e+01 9.944e+01, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 08:29:29,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=260686.66666666666, ans=0.125 +2024-07-29 08:29:42,142 INFO [train.py:1114] (3/4) Epoch 20, batch 1300, loss[loss=0.2192, simple_loss=0.3103, pruned_loss=0.06403, over 4666.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2636, pruned_loss=0.04008, over 939014.52 frames. ], batch size: 19, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:29:49,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=260713.33333333334, ans=0.125 +2024-07-29 08:29:55,656 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=13.58 vs. limit=22.5 +2024-07-29 08:30:15,514 INFO [train.py:1114] (3/4) Epoch 20, batch 1350, loss[loss=0.161, simple_loss=0.2673, pruned_loss=0.02741, over 4758.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2626, pruned_loss=0.03966, over 941046.60 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:30:15,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=260766.66666666666, ans=0.0 +2024-07-29 08:30:31,340 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=260793.33333333334, ans=0.09899494936611666 +2024-07-29 08:30:32,020 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=260793.33333333334, ans=0.0 +2024-07-29 08:30:32,954 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer2.prob, batch_count=260793.33333333334, ans=0.125 +2024-07-29 08:30:37,177 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.19 vs. limit=15.0 +2024-07-29 08:30:37,714 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260806.66666666666, ans=0.1 +2024-07-29 08:30:42,918 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=260820.0, ans=0.2 +2024-07-29 08:30:43,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=260820.0, ans=0.125 +2024-07-29 08:30:44,740 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.728e+01 5.649e+01 6.305e+01 7.298e+01 1.047e+02, threshold=1.261e+02, percent-clipped=0.0 +2024-07-29 08:30:45,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=260820.0, ans=0.125 +2024-07-29 08:30:47,554 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=260820.0, ans=0.2 +2024-07-29 08:30:49,526 INFO [train.py:1114] (3/4) Epoch 20, batch 1400, loss[loss=0.162, simple_loss=0.2444, pruned_loss=0.03985, over 4689.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2622, pruned_loss=0.0393, over 942830.61 frames. ], batch size: 11, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:30:49,638 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=260833.33333333334, ans=0.0 +2024-07-29 08:30:53,793 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=6.16 vs. limit=12.0 +2024-07-29 08:30:56,219 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=260846.66666666666, ans=0.125 +2024-07-29 08:31:03,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=260860.0, ans=0.125 +2024-07-29 08:31:09,616 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer1.prob, batch_count=260873.33333333334, ans=0.125 +2024-07-29 08:31:21,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=260886.66666666666, ans=0.0 +2024-07-29 08:31:25,044 INFO [train.py:1114] (3/4) Epoch 20, batch 1450, loss[loss=0.1623, simple_loss=0.2621, pruned_loss=0.03125, over 4683.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2619, pruned_loss=0.03878, over 942702.42 frames. ], batch size: 15, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:31:33,174 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=260913.33333333334, ans=0.125 +2024-07-29 08:31:33,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=260913.33333333334, ans=0.0 +2024-07-29 08:31:33,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=260913.33333333334, ans=0.05 +2024-07-29 08:31:34,447 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=260913.33333333334, ans=0.0 +2024-07-29 08:31:41,178 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=260926.66666666666, ans=0.125 +2024-07-29 08:31:53,426 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.680e+01 5.592e+01 6.212e+01 7.267e+01 9.238e+01, threshold=1.242e+02, percent-clipped=0.0 +2024-07-29 08:31:57,087 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:31:57,749 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=260966.66666666666, ans=0.1 +2024-07-29 08:31:58,327 INFO [train.py:1114] (3/4) Epoch 20, batch 1500, loss[loss=0.1696, simple_loss=0.2698, pruned_loss=0.03465, over 4811.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2624, pruned_loss=0.03912, over 942073.23 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:31:59,479 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=21.53 vs. limit=22.5 +2024-07-29 08:32:14,968 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=260993.33333333334, ans=0.025 +2024-07-29 08:32:27,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.layerdrop_rate, batch_count=261020.0, ans=0.015 +2024-07-29 08:32:33,960 INFO [train.py:1114] (3/4) Epoch 20, batch 1550, loss[loss=0.1866, simple_loss=0.2635, pruned_loss=0.05486, over 4898.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.263, pruned_loss=0.03969, over 938534.03 frames. ], batch size: 15, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:32:40,167 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=261046.66666666666, ans=0.0 +2024-07-29 08:32:45,936 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=261046.66666666666, ans=0.025 +2024-07-29 08:33:04,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=261086.66666666666, ans=0.125 +2024-07-29 08:33:04,817 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.534e+01 6.134e+01 7.096e+01 1.070e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 08:33:09,551 INFO [train.py:1114] (3/4) Epoch 20, batch 1600, loss[loss=0.1592, simple_loss=0.2629, pruned_loss=0.02774, over 4868.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.263, pruned_loss=0.04041, over 937079.15 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:33:11,150 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:33:13,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff3_skip_rate, batch_count=261100.0, ans=0.0 +2024-07-29 08:33:14,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=261100.0, ans=0.0 +2024-07-29 08:33:15,471 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.07 vs. limit=6.0 +2024-07-29 08:33:16,469 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.balancer1.prob, batch_count=261113.33333333334, ans=0.125 +2024-07-29 08:33:22,665 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=9.01 vs. limit=15.0 +2024-07-29 08:33:36,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=261140.0, ans=0.125 +2024-07-29 08:33:37,734 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=261153.33333333334, ans=0.2 +2024-07-29 08:33:39,875 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.72 vs. limit=6.0 +2024-07-29 08:33:41,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer2.prob, batch_count=261153.33333333334, ans=0.125 +2024-07-29 08:33:44,379 INFO [train.py:1114] (3/4) Epoch 20, batch 1650, loss[loss=0.1771, simple_loss=0.2659, pruned_loss=0.04417, over 4657.00 frames. ], tot_loss[loss=0.171, simple_loss=0.262, pruned_loss=0.04003, over 937076.40 frames. ], batch size: 14, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:33:48,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=261166.66666666666, ans=0.125 +2024-07-29 08:34:22,549 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.514e+01 5.635e+01 6.098e+01 6.570e+01 1.046e+02, threshold=1.220e+02, percent-clipped=0.0 +2024-07-29 08:34:23,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.const_attention_rate, batch_count=261220.0, ans=0.025 +2024-07-29 08:34:24,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=261220.0, ans=0.125 +2024-07-29 08:34:25,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=261220.0, ans=0.0 +2024-07-29 08:34:25,483 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=261220.0, ans=0.125 +2024-07-29 08:34:27,328 INFO [train.py:1114] (3/4) Epoch 20, batch 1700, loss[loss=0.1371, simple_loss=0.2154, pruned_loss=0.02942, over 4706.00 frames. ], tot_loss[loss=0.171, simple_loss=0.262, pruned_loss=0.04002, over 938775.61 frames. ], batch size: 11, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:34:28,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=261233.33333333334, ans=0.125 +2024-07-29 08:34:28,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=261233.33333333334, ans=0.025 +2024-07-29 08:34:34,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=261246.66666666666, ans=0.95 +2024-07-29 08:34:36,090 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=261246.66666666666, ans=0.0 +2024-07-29 08:34:36,698 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=261246.66666666666, ans=0.125 +2024-07-29 08:34:36,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=261246.66666666666, ans=0.125 +2024-07-29 08:34:38,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261246.66666666666, ans=0.1 +2024-07-29 08:34:41,519 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.31 vs. limit=15.0 +2024-07-29 08:34:52,117 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer1.prob, batch_count=261273.33333333334, ans=0.125 +2024-07-29 08:34:54,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=261273.33333333334, ans=0.0 +2024-07-29 08:35:01,944 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.attention_skip_rate, batch_count=261286.66666666666, ans=0.0 +2024-07-29 08:35:05,856 INFO [train.py:1114] (3/4) Epoch 20, batch 1750, loss[loss=0.1604, simple_loss=0.231, pruned_loss=0.04491, over 4822.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2609, pruned_loss=0.03958, over 940042.24 frames. ], batch size: 11, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:35:05,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=261300.0, ans=0.0 +2024-07-29 08:35:17,406 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=261313.33333333334, ans=0.125 +2024-07-29 08:35:33,144 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.07 vs. limit=10.0 +2024-07-29 08:36:11,239 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.491e+01 5.694e+01 6.446e+01 7.395e+01 1.026e+02, threshold=1.289e+02, percent-clipped=0.0 +2024-07-29 08:36:13,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=261353.33333333334, ans=0.2 +2024-07-29 08:36:15,534 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261366.66666666666, ans=0.1 +2024-07-29 08:36:16,135 INFO [train.py:1114] (3/4) Epoch 20, batch 1800, loss[loss=0.1616, simple_loss=0.2538, pruned_loss=0.0347, over 4635.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2611, pruned_loss=0.03976, over 940991.06 frames. ], batch size: 13, lr: 3.75e-03, grad_scale: 32.0 +2024-07-29 08:36:23,855 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.88 vs. limit=15.0 +2024-07-29 08:36:26,012 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.89 vs. limit=15.0 +2024-07-29 08:37:10,779 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer2.prob, batch_count=261393.33333333334, ans=0.125 +2024-07-29 08:37:23,574 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=261420.0, ans=0.0 +2024-07-29 08:37:36,912 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.79 vs. limit=10.0 +2024-07-29 08:37:37,486 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=8.60 vs. limit=15.0 +2024-07-29 08:37:47,134 INFO [train.py:1114] (3/4) Epoch 20, batch 1850, loss[loss=0.1775, simple_loss=0.2728, pruned_loss=0.04114, over 4807.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2603, pruned_loss=0.03948, over 940979.00 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:37:59,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=261433.33333333334, ans=0.125 +2024-07-29 08:38:05,384 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=261446.66666666666, ans=0.0 +2024-07-29 08:38:27,578 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=261486.66666666666, ans=0.125 +2024-07-29 08:38:30,214 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.557e+01 5.603e+01 6.221e+01 6.965e+01 1.039e+02, threshold=1.244e+02, percent-clipped=0.0 +2024-07-29 08:38:34,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_skip_rate, batch_count=261500.0, ans=0.0 +2024-07-29 08:38:34,804 INFO [train.py:1114] (3/4) Epoch 20, batch 1900, loss[loss=0.1589, simple_loss=0.247, pruned_loss=0.03537, over 4654.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2615, pruned_loss=0.03983, over 942201.77 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:38:37,513 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=261500.0, ans=0.125 +2024-07-29 08:38:38,899 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=261500.0, ans=0.025 +2024-07-29 08:39:02,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=261540.0, ans=0.125 +2024-07-29 08:39:04,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=261553.33333333334, ans=0.125 +2024-07-29 08:39:11,509 INFO [train.py:1114] (3/4) Epoch 20, batch 1950, loss[loss=0.1772, simple_loss=0.2684, pruned_loss=0.04302, over 4889.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2623, pruned_loss=0.03977, over 943964.08 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 64.0 +2024-07-29 08:39:14,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=261566.66666666666, ans=0.2 +2024-07-29 08:39:28,219 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=512, metric=19.69 vs. limit=22.5 +2024-07-29 08:39:30,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261593.33333333334, ans=0.1 +2024-07-29 08:39:33,601 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=261606.66666666666, ans=0.0 +2024-07-29 08:39:37,793 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=22.17 vs. limit=22.5 +2024-07-29 08:39:39,735 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.min_abs, batch_count=261620.0, ans=0.5 +2024-07-29 08:39:40,865 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.726e+01 5.670e+01 6.297e+01 7.133e+01 1.211e+02, threshold=1.259e+02, percent-clipped=0.0 +2024-07-29 08:39:45,718 INFO [train.py:1114] (3/4) Epoch 20, batch 2000, loss[loss=0.141, simple_loss=0.2271, pruned_loss=0.02739, over 4802.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2625, pruned_loss=0.03981, over 940589.12 frames. ], batch size: 11, lr: 3.74e-03, grad_scale: 64.0 +2024-07-29 08:39:54,512 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=256, metric=6.04 vs. limit=15.0 +2024-07-29 08:40:15,898 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:40:17,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=261673.33333333334, ans=0.125 +2024-07-29 08:40:19,244 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=261673.33333333334, ans=0.125 +2024-07-29 08:40:23,378 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=8.25 vs. limit=15.0 +2024-07-29 08:40:28,456 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.94 vs. limit=10.0 +2024-07-29 08:40:29,402 INFO [train.py:1114] (3/4) Epoch 20, batch 2050, loss[loss=0.1763, simple_loss=0.258, pruned_loss=0.04732, over 4623.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2612, pruned_loss=0.03941, over 938187.13 frames. ], batch size: 11, lr: 3.74e-03, grad_scale: 64.0 +2024-07-29 08:40:43,413 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.73 vs. limit=6.0 +2024-07-29 08:41:05,940 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261740.0, ans=0.1 +2024-07-29 08:41:08,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=261740.0, ans=0.0 +2024-07-29 08:41:14,701 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.484e+01 5.787e+01 6.324e+01 7.549e+01 1.272e+02, threshold=1.265e+02, percent-clipped=1.0 +2024-07-29 08:41:15,617 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=261753.33333333334, ans=0.125 +2024-07-29 08:41:18,634 INFO [train.py:1114] (3/4) Epoch 20, batch 2100, loss[loss=0.2026, simple_loss=0.2984, pruned_loss=0.05337, over 4761.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2608, pruned_loss=0.03901, over 940211.67 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:41:30,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=261780.0, ans=0.025 +2024-07-29 08:41:32,145 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.76 vs. limit=10.0 +2024-07-29 08:41:43,879 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.88 vs. limit=22.5 +2024-07-29 08:41:51,684 INFO [train.py:1114] (3/4) Epoch 20, batch 2150, loss[loss=0.1867, simple_loss=0.2842, pruned_loss=0.04465, over 4900.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2607, pruned_loss=0.03899, over 943545.56 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:42:25,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=261860.0, ans=0.125 +2024-07-29 08:42:28,071 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=261873.33333333334, ans=0.2 +2024-07-29 08:42:43,084 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=512, metric=3.71 vs. limit=15.0 +2024-07-29 08:42:43,999 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.706e+01 5.512e+01 6.073e+01 7.043e+01 1.112e+02, threshold=1.215e+02, percent-clipped=0.0 +2024-07-29 08:42:45,436 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.attention_skip_rate, batch_count=261886.66666666666, ans=0.0 +2024-07-29 08:42:48,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=261886.66666666666, ans=0.07 +2024-07-29 08:42:53,724 INFO [train.py:1114] (3/4) Epoch 20, batch 2200, loss[loss=0.1686, simple_loss=0.266, pruned_loss=0.03566, over 4808.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2609, pruned_loss=0.03897, over 943052.88 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:43:00,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=261913.33333333334, ans=0.0 +2024-07-29 08:43:10,780 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.88 vs. limit=15.0 +2024-07-29 08:43:13,815 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=261913.33333333334, ans=0.0 +2024-07-29 08:43:39,367 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=261953.33333333334, ans=0.125 +2024-07-29 08:43:42,631 INFO [train.py:1114] (3/4) Epoch 20, batch 2250, loss[loss=0.1707, simple_loss=0.2801, pruned_loss=0.0307, over 4690.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2611, pruned_loss=0.03914, over 941452.97 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:44:14,213 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=261993.33333333334, ans=0.1 +2024-07-29 08:44:25,995 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=261993.33333333334, ans=0.125 +2024-07-29 08:44:26,014 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:44:32,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=262006.66666666666, ans=0.2 +2024-07-29 08:44:46,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=262020.0, ans=0.125 +2024-07-29 08:44:51,830 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.604e+01 5.514e+01 6.259e+01 6.946e+01 1.195e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 08:45:10,738 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.43 vs. limit=15.0 +2024-07-29 08:45:13,583 INFO [train.py:1114] (3/4) Epoch 20, batch 2300, loss[loss=0.1351, simple_loss=0.2277, pruned_loss=0.02124, over 4952.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2595, pruned_loss=0.03856, over 939212.25 frames. ], batch size: 12, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:45:55,264 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=262046.66666666666, ans=0.0 +2024-07-29 08:47:00,860 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=262060.0, ans=0.025 +2024-07-29 08:47:10,433 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.whiten, num_groups=1, num_channels=192, metric=4.29 vs. limit=12.0 +2024-07-29 08:47:32,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=262086.66666666666, ans=0.0 +2024-07-29 08:47:34,612 INFO [train.py:1114] (3/4) Epoch 20, batch 2350, loss[loss=0.1567, simple_loss=0.2535, pruned_loss=0.02997, over 4640.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2595, pruned_loss=0.03835, over 941478.74 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:48:11,156 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=9.37 vs. limit=15.0 +2024-07-29 08:48:11,195 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.50 vs. limit=10.0 +2024-07-29 08:48:28,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=262126.66666666666, ans=0.125 +2024-07-29 08:48:31,194 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=3.39 vs. limit=10.0 +2024-07-29 08:48:37,271 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer_ff3.min_abs, batch_count=262140.0, ans=0.2 +2024-07-29 08:48:49,107 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=262153.3333333333, ans=0.125 +2024-07-29 08:48:49,224 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=11.57 vs. limit=15.0 +2024-07-29 08:48:50,828 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.744e+01 5.808e+01 6.182e+01 6.944e+01 1.016e+02, threshold=1.236e+02, percent-clipped=0.0 +2024-07-29 08:48:51,633 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=262153.3333333333, ans=0.125 +2024-07-29 08:48:54,795 INFO [train.py:1114] (3/4) Epoch 20, batch 2400, loss[loss=0.195, simple_loss=0.2758, pruned_loss=0.05705, over 4652.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2603, pruned_loss=0.03849, over 941251.29 frames. ], batch size: 12, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:48:57,419 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=8.12 vs. limit=15.0 +2024-07-29 08:48:59,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=262166.6666666667, ans=0.0 +2024-07-29 08:48:59,792 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262166.6666666667, ans=0.1 +2024-07-29 08:49:01,861 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=262180.0, ans=0.125 +2024-07-29 08:49:06,108 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.24 vs. limit=15.0 +2024-07-29 08:49:06,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=262180.0, ans=0.025 +2024-07-29 08:49:09,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=262193.3333333333, ans=0.0 +2024-07-29 08:49:18,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer1.prob, batch_count=262206.6666666667, ans=0.125 +2024-07-29 08:49:20,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=262206.6666666667, ans=0.125 +2024-07-29 08:49:32,083 INFO [train.py:1114] (3/4) Epoch 20, batch 2450, loss[loss=0.1722, simple_loss=0.2682, pruned_loss=0.03806, over 4703.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2615, pruned_loss=0.03908, over 936990.26 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:49:33,005 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=14.55 vs. limit=22.5 +2024-07-29 08:49:45,589 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262246.6666666667, ans=0.1 +2024-07-29 08:50:03,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=262260.0, ans=0.1 +2024-07-29 08:50:04,543 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer1.prob, batch_count=262260.0, ans=0.125 +2024-07-29 08:50:16,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=262273.3333333333, ans=0.125 +2024-07-29 08:50:30,681 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=262286.6666666667, ans=0.125 +2024-07-29 08:50:33,800 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.547e+01 5.751e+01 6.244e+01 7.182e+01 1.173e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 08:50:38,478 INFO [train.py:1114] (3/4) Epoch 20, batch 2500, loss[loss=0.167, simple_loss=0.2607, pruned_loss=0.03667, over 4803.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2616, pruned_loss=0.03925, over 938833.34 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:50:58,285 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=262313.3333333333, ans=0.125 +2024-07-29 08:50:58,354 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=262313.3333333333, ans=0.07 +2024-07-29 08:51:00,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=262326.6666666667, ans=0.125 +2024-07-29 08:51:00,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=262326.6666666667, ans=0.125 +2024-07-29 08:51:01,037 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=6.88 vs. limit=8.0 +2024-07-29 08:51:05,852 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262326.6666666667, ans=0.1 +2024-07-29 08:51:06,528 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=262326.6666666667, ans=0.125 +2024-07-29 08:51:09,067 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=262326.6666666667, ans=0.0 +2024-07-29 08:51:27,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=262353.3333333333, ans=0.125 +2024-07-29 08:51:32,586 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262353.3333333333, ans=0.1 +2024-07-29 08:51:32,595 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=262353.3333333333, ans=0.125 +2024-07-29 08:51:36,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=262353.3333333333, ans=0.2 +2024-07-29 08:51:43,162 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=262366.6666666667, ans=15.0 +2024-07-29 08:51:43,403 INFO [train.py:1114] (3/4) Epoch 20, batch 2550, loss[loss=0.1332, simple_loss=0.2256, pruned_loss=0.02042, over 4802.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2617, pruned_loss=0.03906, over 938349.31 frames. ], batch size: 11, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:51:43,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=262366.6666666667, ans=0.125 +2024-07-29 08:52:06,522 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.24 vs. limit=15.0 +2024-07-29 08:52:07,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=262393.3333333333, ans=0.125 +2024-07-29 08:52:09,542 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=262406.6666666667, ans=0.1 +2024-07-29 08:52:12,658 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.47 vs. limit=15.0 +2024-07-29 08:52:23,025 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.587e+01 5.541e+01 6.134e+01 6.874e+01 1.013e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 08:52:27,208 INFO [train.py:1114] (3/4) Epoch 20, batch 2600, loss[loss=0.1772, simple_loss=0.2666, pruned_loss=0.04385, over 4903.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2625, pruned_loss=0.03945, over 937033.16 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:52:31,451 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:52:52,966 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262460.0, ans=0.1 +2024-07-29 08:53:04,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=262473.3333333333, ans=0.125 +2024-07-29 08:53:05,417 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=192, metric=4.99 vs. limit=10.0 +2024-07-29 08:53:09,978 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.41 vs. limit=15.0 +2024-07-29 08:53:12,625 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.38 vs. limit=15.0 +2024-07-29 08:53:13,185 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.max_abs, batch_count=262486.6666666667, ans=10.0 +2024-07-29 08:53:24,803 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262486.6666666667, ans=0.1 +2024-07-29 08:53:24,809 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer_na.min_abs, batch_count=262486.6666666667, ans=0.02 +2024-07-29 08:53:27,585 INFO [train.py:1114] (3/4) Epoch 20, batch 2650, loss[loss=0.2204, simple_loss=0.3023, pruned_loss=0.06928, over 4631.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2626, pruned_loss=0.03964, over 939410.88 frames. ], batch size: 16, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:53:31,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=262500.0, ans=0.0 +2024-07-29 08:53:32,640 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=262500.0, ans=0.09899494936611666 +2024-07-29 08:53:43,438 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=11.20 vs. limit=15.0 +2024-07-29 08:53:52,904 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=512, metric=4.84 vs. limit=15.0 +2024-07-29 08:53:54,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=262540.0, ans=0.125 +2024-07-29 08:53:56,181 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=5.07 vs. limit=10.0 +2024-07-29 08:53:59,315 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=262540.0, ans=0.125 +2024-07-29 08:54:09,626 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.850e+01 5.564e+01 6.225e+01 7.006e+01 1.126e+02, threshold=1.245e+02, percent-clipped=0.0 +2024-07-29 08:54:09,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=262553.3333333333, ans=0.07 +2024-07-29 08:54:10,733 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.const_attention_rate, batch_count=262553.3333333333, ans=0.025 +2024-07-29 08:54:13,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=262553.3333333333, ans=0.2 +2024-07-29 08:54:14,098 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=262566.6666666667, ans=0.125 +2024-07-29 08:54:14,579 INFO [train.py:1114] (3/4) Epoch 20, batch 2700, loss[loss=0.2443, simple_loss=0.3393, pruned_loss=0.07464, over 4736.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2635, pruned_loss=0.04001, over 939238.23 frames. ], batch size: 14, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:54:24,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=262580.0, ans=0.025 +2024-07-29 08:54:24,948 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262580.0, ans=0.1 +2024-07-29 08:54:25,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=262580.0, ans=0.2 +2024-07-29 08:54:25,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=262580.0, ans=0.1 +2024-07-29 08:54:25,646 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=262580.0, ans=0.125 +2024-07-29 08:54:26,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.prob, batch_count=262580.0, ans=0.125 +2024-07-29 08:54:30,338 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=262593.3333333333, ans=0.2 +2024-07-29 08:54:30,463 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=8.60 vs. limit=15.0 +2024-07-29 08:54:34,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=262593.3333333333, ans=0.0 +2024-07-29 08:54:52,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=262620.0, ans=0.125 +2024-07-29 08:54:54,031 INFO [train.py:1114] (3/4) Epoch 20, batch 2750, loss[loss=0.1715, simple_loss=0.2564, pruned_loss=0.04323, over 4711.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2626, pruned_loss=0.04005, over 939559.39 frames. ], batch size: 12, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:55:01,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=262646.6666666667, ans=0.07 +2024-07-29 08:55:04,275 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262646.6666666667, ans=0.1 +2024-07-29 08:55:06,655 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.whiten2.whitening_limit, batch_count=262646.6666666667, ans=15.0 +2024-07-29 08:55:09,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=262660.0, ans=0.2 +2024-07-29 08:55:40,168 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.537e+01 5.878e+01 6.772e+01 7.962e+01 1.092e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-29 08:55:46,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=262686.6666666667, ans=0.0 +2024-07-29 08:55:47,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=262686.6666666667, ans=0.0 +2024-07-29 08:55:48,980 INFO [train.py:1114] (3/4) Epoch 20, batch 2800, loss[loss=0.2434, simple_loss=0.3227, pruned_loss=0.08206, over 3220.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2628, pruned_loss=0.04013, over 937453.21 frames. ], batch size: 35, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:55:50,178 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.65 vs. limit=10.0 +2024-07-29 08:55:58,221 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=7.76 vs. limit=15.0 +2024-07-29 08:56:02,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=262726.6666666667, ans=0.05 +2024-07-29 08:56:04,220 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=4.60 vs. limit=6.0 +2024-07-29 08:56:04,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=262726.6666666667, ans=0.125 +2024-07-29 08:56:08,157 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=262726.6666666667, ans=0.1 +2024-07-29 08:56:08,745 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=262740.0, ans=0.04949747468305833 +2024-07-29 08:56:23,252 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=262753.3333333333, ans=0.125 +2024-07-29 08:56:26,497 INFO [train.py:1114] (3/4) Epoch 20, batch 2850, loss[loss=0.1671, simple_loss=0.2497, pruned_loss=0.04222, over 4962.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2642, pruned_loss=0.04078, over 935463.05 frames. ], batch size: 13, lr: 3.74e-03, grad_scale: 32.0 +2024-07-29 08:56:26,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=262766.6666666667, ans=0.0 +2024-07-29 08:56:34,470 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=262780.0, ans=0.125 +2024-07-29 08:56:47,054 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.30 vs. limit=15.0 +2024-07-29 08:56:48,888 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=262793.3333333333, ans=0.125 +2024-07-29 08:56:51,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262793.3333333333, ans=0.1 +2024-07-29 08:56:53,777 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.26 vs. limit=15.0 +2024-07-29 08:56:56,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.out_combiner.scale_min, batch_count=262806.6666666667, ans=0.2 +2024-07-29 08:57:02,166 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.528e+01 5.799e+01 6.410e+01 7.214e+01 1.051e+02, threshold=1.282e+02, percent-clipped=0.0 +2024-07-29 08:57:03,606 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=262820.0, ans=0.1 +2024-07-29 08:57:05,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=262833.3333333333, ans=0.0 +2024-07-29 08:57:06,627 INFO [train.py:1114] (3/4) Epoch 20, batch 2900, loss[loss=0.1964, simple_loss=0.2796, pruned_loss=0.05661, over 4824.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2643, pruned_loss=0.04021, over 939508.16 frames. ], batch size: 13, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 08:57:28,230 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:57:28,240 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=262873.3333333333, ans=0.2 +2024-07-29 08:57:40,145 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=262900.0, ans=0.0 +2024-07-29 08:57:40,660 INFO [train.py:1114] (3/4) Epoch 20, batch 2950, loss[loss=0.1517, simple_loss=0.2393, pruned_loss=0.0321, over 4698.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2625, pruned_loss=0.03981, over 938317.87 frames. ], batch size: 12, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 08:57:51,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=262913.3333333333, ans=0.035 +2024-07-29 08:57:53,959 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=262926.6666666667, ans=0.0 +2024-07-29 08:58:02,055 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.51 vs. limit=15.0 +2024-07-29 08:58:09,793 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=262953.3333333333, ans=0.0 +2024-07-29 08:58:09,801 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer2.prob, batch_count=262953.3333333333, ans=0.125 +2024-07-29 08:58:12,843 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.161e+01 5.582e+01 5.984e+01 6.557e+01 9.213e+01, threshold=1.197e+02, percent-clipped=0.0 +2024-07-29 08:58:17,484 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer2.prob, batch_count=262953.3333333333, ans=0.125 +2024-07-29 08:58:18,636 INFO [train.py:1114] (3/4) Epoch 20, batch 3000, loss[loss=0.1499, simple_loss=0.2452, pruned_loss=0.02731, over 4757.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2621, pruned_loss=0.03969, over 937697.67 frames. ], batch size: 13, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 08:58:18,636 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 08:58:44,396 INFO [train.py:1146] (3/4) Epoch 20, validation: loss=0.1605, simple_loss=0.2625, pruned_loss=0.02922, over 944034.00 frames. +2024-07-29 08:58:44,397 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 08:58:47,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=262966.6666666667, ans=0.125 +2024-07-29 08:58:58,923 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=262980.0, ans=0.125 +2024-07-29 08:59:11,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=262993.3333333333, ans=0.1 +2024-07-29 08:59:15,909 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=263006.6666666667, ans=0.2 +2024-07-29 08:59:19,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer2.prob, batch_count=263006.6666666667, ans=0.125 +2024-07-29 08:59:20,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.max_positive, batch_count=263020.0, ans=0.95 +2024-07-29 08:59:23,387 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=263020.0, ans=0.04949747468305833 +2024-07-29 08:59:24,677 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=263020.0, ans=0.05 +2024-07-29 08:59:26,074 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=263020.0, ans=0.0 +2024-07-29 08:59:38,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=263020.0, ans=0.2 +2024-07-29 08:59:39,720 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 08:59:40,237 INFO [train.py:1114] (3/4) Epoch 20, batch 3050, loss[loss=0.1424, simple_loss=0.2382, pruned_loss=0.02331, over 4633.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2623, pruned_loss=0.0395, over 936600.52 frames. ], batch size: 12, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 08:59:55,502 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=263046.6666666667, ans=0.125 +2024-07-29 08:59:55,540 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=263046.6666666667, ans=0.1 +2024-07-29 08:59:55,730 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=4.86 vs. limit=12.0 +2024-07-29 08:59:58,056 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263060.0, ans=0.1 +2024-07-29 09:00:00,270 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=4.00 vs. limit=10.0 +2024-07-29 09:00:04,967 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=263073.3333333333, ans=0.2 +2024-07-29 09:00:19,901 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.456e+01 5.658e+01 6.248e+01 7.167e+01 1.022e+02, threshold=1.250e+02, percent-clipped=0.0 +2024-07-29 09:00:30,470 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=4.46 vs. limit=12.0 +2024-07-29 09:00:33,142 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer1.prob, batch_count=263100.0, ans=0.125 +2024-07-29 09:00:33,576 INFO [train.py:1114] (3/4) Epoch 20, batch 3100, loss[loss=0.1912, simple_loss=0.2823, pruned_loss=0.05003, over 4594.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2622, pruned_loss=0.0396, over 937446.52 frames. ], batch size: 16, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:00:46,126 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=263113.3333333333, ans=0.125 +2024-07-29 09:00:50,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=263126.6666666667, ans=0.125 +2024-07-29 09:01:04,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263140.0, ans=0.1 +2024-07-29 09:01:13,567 INFO [train.py:1114] (3/4) Epoch 20, batch 3150, loss[loss=0.192, simple_loss=0.2924, pruned_loss=0.0458, over 4612.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2618, pruned_loss=0.03917, over 937612.98 frames. ], batch size: 17, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:01:30,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=263193.3333333333, ans=0.025 +2024-07-29 09:01:46,061 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.636e+01 5.746e+01 6.588e+01 7.668e+01 1.344e+02, threshold=1.318e+02, percent-clipped=1.0 +2024-07-29 09:01:48,576 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.95 vs. limit=15.0 +2024-07-29 09:01:50,149 INFO [train.py:1114] (3/4) Epoch 20, batch 3200, loss[loss=0.1666, simple_loss=0.2597, pruned_loss=0.03672, over 4820.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2619, pruned_loss=0.03923, over 938982.59 frames. ], batch size: 13, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:02:01,072 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=263246.6666666667, ans=0.2 +2024-07-29 09:02:02,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=263246.6666666667, ans=0.2 +2024-07-29 09:02:17,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263286.6666666667, ans=0.1 +2024-07-29 09:02:22,316 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=263286.6666666667, ans=0.2 +2024-07-29 09:02:27,328 INFO [train.py:1114] (3/4) Epoch 20, batch 3250, loss[loss=0.1831, simple_loss=0.273, pruned_loss=0.04661, over 4932.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2618, pruned_loss=0.03902, over 940043.43 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:02:28,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=263300.0, ans=0.0 +2024-07-29 09:02:33,693 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.00 vs. limit=12.0 +2024-07-29 09:02:34,096 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=263313.3333333333, ans=0.1 +2024-07-29 09:02:34,112 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.scale_min, batch_count=263313.3333333333, ans=0.2 +2024-07-29 09:02:49,080 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer1.prob, batch_count=263340.0, ans=0.125 +2024-07-29 09:02:54,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=263340.0, ans=0.2 +2024-07-29 09:02:59,774 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.410e+01 5.543e+01 6.289e+01 7.306e+01 9.331e+01, threshold=1.258e+02, percent-clipped=0.0 +2024-07-29 09:03:00,359 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.10 vs. limit=8.0 +2024-07-29 09:03:02,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263353.3333333333, ans=0.1 +2024-07-29 09:03:03,839 INFO [train.py:1114] (3/4) Epoch 20, batch 3300, loss[loss=0.2165, simple_loss=0.2966, pruned_loss=0.06819, over 4726.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2612, pruned_loss=0.03916, over 940358.69 frames. ], batch size: 19, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:03:04,137 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=263366.6666666667, ans=0.125 +2024-07-29 09:03:15,564 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=263380.0, ans=0.0 +2024-07-29 09:03:23,694 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=512, metric=20.26 vs. limit=22.5 +2024-07-29 09:03:35,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=263420.0, ans=0.0 +2024-07-29 09:03:37,235 INFO [train.py:1114] (3/4) Epoch 20, batch 3350, loss[loss=0.2104, simple_loss=0.2996, pruned_loss=0.06065, over 4622.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2617, pruned_loss=0.03941, over 938571.73 frames. ], batch size: 17, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:03:39,866 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=263433.3333333333, ans=0.125 +2024-07-29 09:03:42,683 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module1.balancer1.prob, batch_count=263433.3333333333, ans=0.125 +2024-07-29 09:03:44,222 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.71 vs. limit=15.0 +2024-07-29 09:03:48,468 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.41 vs. limit=12.0 +2024-07-29 09:04:00,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=263473.3333333333, ans=0.125 +2024-07-29 09:04:07,370 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.670e+01 5.699e+01 6.337e+01 7.173e+01 1.148e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 09:04:11,616 INFO [train.py:1114] (3/4) Epoch 20, batch 3400, loss[loss=0.16, simple_loss=0.2498, pruned_loss=0.03513, over 4810.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2615, pruned_loss=0.03943, over 937613.36 frames. ], batch size: 11, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:04:12,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_skip_rate, batch_count=263500.0, ans=0.0 +2024-07-29 09:04:18,117 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=18.82 vs. limit=22.5 +2024-07-29 09:04:32,247 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=263526.6666666667, ans=0.125 +2024-07-29 09:04:36,474 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:04:49,171 INFO [train.py:1114] (3/4) Epoch 20, batch 3450, loss[loss=0.207, simple_loss=0.2978, pruned_loss=0.05813, over 4713.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2614, pruned_loss=0.03918, over 937999.29 frames. ], batch size: 19, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:04:53,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.balancer.min_positive, batch_count=263566.6666666667, ans=0.05 +2024-07-29 09:05:00,376 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.hidden_balancer.prob, batch_count=263580.0, ans=0.125 +2024-07-29 09:05:07,299 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=263593.3333333333, ans=0.025 +2024-07-29 09:05:15,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=263620.0, ans=0.2 +2024-07-29 09:05:18,523 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.353e+01 5.782e+01 6.590e+01 7.406e+01 1.017e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 09:05:22,632 INFO [train.py:1114] (3/4) Epoch 20, batch 3500, loss[loss=0.166, simple_loss=0.2529, pruned_loss=0.03953, over 4937.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2613, pruned_loss=0.03923, over 938640.53 frames. ], batch size: 12, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:05:31,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=263646.6666666667, ans=0.1 +2024-07-29 09:05:51,811 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=263686.6666666667, ans=0.125 +2024-07-29 09:05:53,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=263686.6666666667, ans=0.1 +2024-07-29 09:05:56,339 INFO [train.py:1114] (3/4) Epoch 20, batch 3550, loss[loss=0.1582, simple_loss=0.2658, pruned_loss=0.0253, over 4673.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2604, pruned_loss=0.03868, over 938806.42 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:06:01,309 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263700.0, ans=0.1 +2024-07-29 09:06:19,086 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=263740.0, ans=0.2 +2024-07-29 09:06:30,417 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.38 vs. limit=15.0 +2024-07-29 09:06:35,176 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.555e+01 5.573e+01 6.229e+01 6.741e+01 1.100e+02, threshold=1.246e+02, percent-clipped=0.0 +2024-07-29 09:06:40,734 INFO [train.py:1114] (3/4) Epoch 20, batch 3600, loss[loss=0.1425, simple_loss=0.2401, pruned_loss=0.02241, over 4969.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2602, pruned_loss=0.0386, over 940750.40 frames. ], batch size: 13, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:06:40,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=263766.6666666667, ans=0.1 +2024-07-29 09:06:58,394 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=384, metric=20.99 vs. limit=22.5 +2024-07-29 09:07:02,758 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=263806.6666666667, ans=0.125 +2024-07-29 09:07:06,831 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff2_skip_rate, batch_count=263820.0, ans=0.0 +2024-07-29 09:07:12,369 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:07:14,879 INFO [train.py:1114] (3/4) Epoch 20, batch 3650, loss[loss=0.1836, simple_loss=0.2787, pruned_loss=0.04424, over 4911.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.259, pruned_loss=0.038, over 941498.98 frames. ], batch size: 15, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:07:22,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=263846.6666666667, ans=0.0 +2024-07-29 09:07:22,627 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten.whitening_limit, batch_count=263846.6666666667, ans=15.0 +2024-07-29 09:07:26,341 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=263846.6666666667, ans=0.125 +2024-07-29 09:07:36,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=263873.3333333333, ans=0.0 +2024-07-29 09:07:44,531 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.409e+01 5.430e+01 6.137e+01 7.012e+01 1.010e+02, threshold=1.227e+02, percent-clipped=0.0 +2024-07-29 09:07:48,574 INFO [train.py:1114] (3/4) Epoch 20, batch 3700, loss[loss=0.1532, simple_loss=0.251, pruned_loss=0.02768, over 4942.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2586, pruned_loss=0.0378, over 942626.20 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:07:51,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=263900.0, ans=0.1 +2024-07-29 09:08:05,353 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.17 vs. limit=22.5 +2024-07-29 09:08:14,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=263953.3333333333, ans=0.2 +2024-07-29 09:08:14,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=263953.3333333333, ans=0.05 +2024-07-29 09:08:17,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=263953.3333333333, ans=0.125 +2024-07-29 09:08:21,544 INFO [train.py:1114] (3/4) Epoch 20, batch 3750, loss[loss=0.1342, simple_loss=0.2204, pruned_loss=0.02404, over 4794.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.26, pruned_loss=0.0384, over 943912.35 frames. ], batch size: 11, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:08:38,347 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.hidden_balancer.prob, batch_count=263993.3333333333, ans=0.125 +2024-07-29 09:08:43,472 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.83 vs. limit=15.0 +2024-07-29 09:08:57,862 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.310e+01 5.705e+01 6.410e+01 7.000e+01 1.025e+02, threshold=1.282e+02, percent-clipped=0.0 +2024-07-29 09:09:00,008 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer2.prob, batch_count=264020.0, ans=0.125 +2024-07-29 09:09:01,337 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=264033.3333333333, ans=0.025 +2024-07-29 09:09:02,215 INFO [train.py:1114] (3/4) Epoch 20, batch 3800, loss[loss=0.1428, simple_loss=0.2451, pruned_loss=0.02024, over 4812.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2605, pruned_loss=0.0386, over 942162.52 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:09:09,022 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:09:28,818 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=12.65 vs. limit=15.0 +2024-07-29 09:09:54,166 INFO [train.py:1114] (3/4) Epoch 20, batch 3850, loss[loss=0.1889, simple_loss=0.2768, pruned_loss=0.05043, over 4653.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2599, pruned_loss=0.03823, over 942628.76 frames. ], batch size: 16, lr: 3.73e-03, grad_scale: 32.0 +2024-07-29 09:10:00,983 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=264113.3333333333, ans=0.125 +2024-07-29 09:10:03,799 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass_mid.scale_min, batch_count=264113.3333333333, ans=0.2 +2024-07-29 09:10:08,637 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.53 vs. limit=15.0 +2024-07-29 09:10:12,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=264126.6666666667, ans=0.125 +2024-07-29 09:10:16,953 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.57 vs. limit=15.0 +2024-07-29 09:10:24,565 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.802e+01 5.626e+01 6.107e+01 6.849e+01 9.588e+01, threshold=1.221e+02, percent-clipped=0.0 +2024-07-29 09:10:31,761 INFO [train.py:1114] (3/4) Epoch 20, batch 3900, loss[loss=0.1578, simple_loss=0.2553, pruned_loss=0.03013, over 4808.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2607, pruned_loss=0.03841, over 942805.90 frames. ], batch size: 14, lr: 3.73e-03, grad_scale: 16.0 +2024-07-29 09:10:34,465 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=264166.6666666667, ans=0.07 +2024-07-29 09:11:03,952 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.98 vs. limit=6.0 +2024-07-29 09:11:26,907 INFO [train.py:1114] (3/4) Epoch 20, batch 3950, loss[loss=0.164, simple_loss=0.26, pruned_loss=0.034, over 4837.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2605, pruned_loss=0.03835, over 944737.70 frames. ], batch size: 16, lr: 3.72e-03, grad_scale: 16.0 +2024-07-29 09:11:28,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.prob, batch_count=264233.3333333333, ans=0.125 +2024-07-29 09:11:32,592 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer2.prob, batch_count=264233.3333333333, ans=0.125 +2024-07-29 09:11:54,257 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=264273.3333333333, ans=0.2 +2024-07-29 09:11:55,681 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.whiten, num_groups=1, num_channels=384, metric=3.57 vs. limit=12.0 +2024-07-29 09:11:59,942 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.407e+01 5.612e+01 6.214e+01 7.012e+01 1.031e+02, threshold=1.243e+02, percent-clipped=0.0 +2024-07-29 09:12:03,451 INFO [train.py:1114] (3/4) Epoch 20, batch 4000, loss[loss=0.18, simple_loss=0.27, pruned_loss=0.04497, over 4783.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2615, pruned_loss=0.03915, over 941167.86 frames. ], batch size: 12, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:12:41,042 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=264300.0, ans=0.125 +2024-07-29 09:13:00,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=264340.0, ans=0.125 +2024-07-29 09:13:03,914 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264340.0, ans=0.1 +2024-07-29 09:13:42,174 INFO [train.py:1114] (3/4) Epoch 20, batch 4050, loss[loss=0.232, simple_loss=0.3153, pruned_loss=0.07435, over 3159.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2612, pruned_loss=0.03919, over 939277.68 frames. ], batch size: 35, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:13:51,343 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=264380.0, ans=0.125 +2024-07-29 09:13:54,675 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=264380.0, ans=0.0 +2024-07-29 09:14:16,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=264420.0, ans=0.125 +2024-07-29 09:14:18,733 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 3.925e+01 5.618e+01 6.150e+01 7.099e+01 1.073e+02, threshold=1.230e+02, percent-clipped=0.0 +2024-07-29 09:14:20,595 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn2.whiten, num_groups=1, num_channels=384, metric=15.97 vs. limit=22.5 +2024-07-29 09:14:20,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=264420.0, ans=0.125 +2024-07-29 09:14:22,374 INFO [train.py:1114] (3/4) Epoch 20, batch 4100, loss[loss=0.1914, simple_loss=0.2794, pruned_loss=0.05167, over 4904.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2617, pruned_loss=0.03958, over 938919.68 frames. ], batch size: 15, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:14:24,043 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=264433.3333333333, ans=0.0 +2024-07-29 09:14:27,488 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=264433.3333333333, ans=0.125 +2024-07-29 09:14:34,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=264446.6666666667, ans=0.125 +2024-07-29 09:14:37,806 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=264460.0, ans=0.125 +2024-07-29 09:14:38,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=264460.0, ans=0.125 +2024-07-29 09:14:42,798 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=6.66 vs. limit=15.0 +2024-07-29 09:14:44,127 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module1.balancer1.prob, batch_count=264460.0, ans=0.125 +2024-07-29 09:14:51,485 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.attention_skip_rate, batch_count=264486.6666666667, ans=0.0 +2024-07-29 09:14:55,215 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=11.25 vs. limit=22.5 +2024-07-29 09:14:58,599 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.24 vs. limit=22.5 +2024-07-29 09:14:59,305 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.47 vs. limit=8.0 +2024-07-29 09:14:59,486 INFO [train.py:1114] (3/4) Epoch 20, batch 4150, loss[loss=0.1443, simple_loss=0.24, pruned_loss=0.02425, over 4834.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2614, pruned_loss=0.03931, over 938827.93 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:15:03,198 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=264500.0, ans=0.025 +2024-07-29 09:15:16,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=264526.6666666667, ans=0.0 +2024-07-29 09:15:26,265 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:15:35,198 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.580e+01 5.706e+01 6.359e+01 7.433e+01 1.126e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-29 09:15:39,929 INFO [train.py:1114] (3/4) Epoch 20, batch 4200, loss[loss=0.2001, simple_loss=0.29, pruned_loss=0.05506, over 4904.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2623, pruned_loss=0.03976, over 940315.55 frames. ], batch size: 15, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:15:58,582 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=264580.0, ans=0.0 +2024-07-29 09:16:10,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=264580.0, ans=0.0 +2024-07-29 09:16:12,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer1.prob, batch_count=264580.0, ans=0.125 +2024-07-29 09:16:12,075 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=264580.0, ans=0.125 +2024-07-29 09:16:12,639 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264580.0, ans=0.1 +2024-07-29 09:16:31,903 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=7.67 vs. limit=15.0 +2024-07-29 09:16:35,915 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=12.78 vs. limit=15.0 +2024-07-29 09:16:39,848 INFO [train.py:1114] (3/4) Epoch 20, batch 4250, loss[loss=0.1586, simple_loss=0.2402, pruned_loss=0.0385, over 4635.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2621, pruned_loss=0.0397, over 941484.86 frames. ], batch size: 12, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:17:06,329 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=5.82 vs. limit=15.0 +2024-07-29 09:17:09,650 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.05 vs. limit=15.0 +2024-07-29 09:17:09,904 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.690e+01 5.620e+01 6.275e+01 6.899e+01 1.013e+02, threshold=1.255e+02, percent-clipped=0.0 +2024-07-29 09:17:13,148 INFO [train.py:1114] (3/4) Epoch 20, batch 4300, loss[loss=0.1715, simple_loss=0.2648, pruned_loss=0.0391, over 4765.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2624, pruned_loss=0.03973, over 940886.80 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:17:21,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264700.0, ans=0.1 +2024-07-29 09:17:24,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=264713.3333333333, ans=0.2 +2024-07-29 09:17:25,973 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=264713.3333333333, ans=0.2 +2024-07-29 09:17:30,509 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=264726.6666666667, ans=0.1 +2024-07-29 09:17:33,564 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=10.61 vs. limit=22.5 +2024-07-29 09:17:42,039 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=264740.0, ans=0.125 +2024-07-29 09:17:42,943 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.whiten.whitening_limit, batch_count=264740.0, ans=12.0 +2024-07-29 09:17:47,360 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=264753.3333333333, ans=0.1 +2024-07-29 09:17:51,898 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:17:53,010 INFO [train.py:1114] (3/4) Epoch 20, batch 4350, loss[loss=0.1782, simple_loss=0.2638, pruned_loss=0.04631, over 4766.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2635, pruned_loss=0.04007, over 941467.74 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:17:54,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=264766.6666666667, ans=0.1 +2024-07-29 09:17:58,369 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=264766.6666666667, ans=0.2 +2024-07-29 09:18:24,602 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer2.prob, batch_count=264820.0, ans=0.125 +2024-07-29 09:18:27,678 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.816e+01 5.702e+01 6.164e+01 6.960e+01 9.569e+01, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 09:18:27,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=264820.0, ans=0.125 +2024-07-29 09:18:31,108 INFO [train.py:1114] (3/4) Epoch 20, batch 4400, loss[loss=0.1945, simple_loss=0.2858, pruned_loss=0.05158, over 4804.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2635, pruned_loss=0.03972, over 941145.95 frames. ], batch size: 14, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:18:51,707 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=264860.0, ans=0.0 +2024-07-29 09:19:00,719 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=264873.3333333333, ans=0.125 +2024-07-29 09:19:02,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.skip_rate, batch_count=264873.3333333333, ans=0.09899494936611666 +2024-07-29 09:19:04,352 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=264873.3333333333, ans=0.0 +2024-07-29 09:19:12,245 INFO [train.py:1114] (3/4) Epoch 20, batch 4450, loss[loss=0.1396, simple_loss=0.2277, pruned_loss=0.02578, over 4947.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2625, pruned_loss=0.03917, over 939031.63 frames. ], batch size: 12, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:19:14,314 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=264900.0, ans=0.125 +2024-07-29 09:19:17,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=264900.0, ans=0.1 +2024-07-29 09:19:18,914 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:19:19,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=264900.0, ans=0.125 +2024-07-29 09:19:21,562 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=264913.3333333333, ans=0.125 +2024-07-29 09:19:21,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass_mid.scale_min, batch_count=264913.3333333333, ans=0.2 +2024-07-29 09:19:29,140 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:19:29,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=264926.6666666667, ans=0.125 +2024-07-29 09:19:33,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=264926.6666666667, ans=0.125 +2024-07-29 09:19:35,577 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.34 vs. limit=8.0 +2024-07-29 09:19:36,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=264940.0, ans=0.0 +2024-07-29 09:19:38,708 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=256, metric=3.92 vs. limit=15.0 +2024-07-29 09:19:53,315 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.879e+01 5.589e+01 6.388e+01 7.277e+01 9.167e+01, threshold=1.278e+02, percent-clipped=0.0 +2024-07-29 09:19:57,880 INFO [train.py:1114] (3/4) Epoch 20, batch 4500, loss[loss=0.1914, simple_loss=0.2763, pruned_loss=0.05321, over 4748.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2629, pruned_loss=0.03932, over 938489.01 frames. ], batch size: 14, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:20:05,509 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.38 vs. limit=15.0 +2024-07-29 09:20:06,284 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=264980.0, ans=0.2 +2024-07-29 09:20:11,938 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=264993.3333333333, ans=0.125 +2024-07-29 09:20:18,288 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.min_abs, batch_count=264993.3333333333, ans=0.5 +2024-07-29 09:20:18,392 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer1.prob, batch_count=264993.3333333333, ans=0.125 +2024-07-29 09:20:28,081 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module1.balancer1.prob, batch_count=265006.6666666667, ans=0.125 +2024-07-29 09:20:35,708 INFO [train.py:1114] (3/4) Epoch 20, batch 4550, loss[loss=0.1613, simple_loss=0.2502, pruned_loss=0.03619, over 4897.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2624, pruned_loss=0.0391, over 940208.07 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:20:51,019 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer1.prob, batch_count=265060.0, ans=0.125 +2024-07-29 09:20:51,082 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.const_attention_rate, batch_count=265060.0, ans=0.025 +2024-07-29 09:20:56,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=265073.3333333333, ans=0.125 +2024-07-29 09:21:08,498 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=265073.3333333333, ans=0.125 +2024-07-29 09:21:08,735 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=10.43 vs. limit=15.0 +2024-07-29 09:21:16,615 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.804e+01 5.639e+01 6.516e+01 7.459e+01 1.043e+02, threshold=1.303e+02, percent-clipped=0.0 +2024-07-29 09:21:19,973 INFO [train.py:1114] (3/4) Epoch 20, batch 4600, loss[loss=0.1821, simple_loss=0.2818, pruned_loss=0.04122, over 4508.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2617, pruned_loss=0.03908, over 938323.17 frames. ], batch size: 21, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:21:22,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=265100.0, ans=0.125 +2024-07-29 09:21:41,525 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer2.min_positive, batch_count=265140.0, ans=0.05 +2024-07-29 09:21:42,148 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=265140.0, ans=0.0 +2024-07-29 09:21:42,998 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=16.56 vs. limit=22.5 +2024-07-29 09:21:53,410 INFO [train.py:1114] (3/4) Epoch 20, batch 4650, loss[loss=0.2233, simple_loss=0.3139, pruned_loss=0.06633, over 4837.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2627, pruned_loss=0.03965, over 940160.33 frames. ], batch size: 16, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:21:57,073 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:22:06,218 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.39 vs. limit=15.0 +2024-07-29 09:22:12,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass_mid.scale_min, batch_count=265193.3333333333, ans=0.2 +2024-07-29 09:22:13,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=265193.3333333333, ans=0.0 +2024-07-29 09:22:14,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=265206.6666666667, ans=0.0 +2024-07-29 09:22:20,783 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.32 vs. limit=15.0 +2024-07-29 09:22:22,737 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.out_combiner.scale_min, batch_count=265220.0, ans=0.2 +2024-07-29 09:22:25,176 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.723e+01 5.549e+01 5.991e+01 6.748e+01 1.053e+02, threshold=1.198e+02, percent-clipped=0.0 +2024-07-29 09:22:26,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=265220.0, ans=0.125 +2024-07-29 09:22:28,489 INFO [train.py:1114] (3/4) Epoch 20, batch 4700, loss[loss=0.1367, simple_loss=0.2229, pruned_loss=0.02525, over 4702.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.262, pruned_loss=0.03958, over 937685.96 frames. ], batch size: 11, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:22:36,712 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=265233.3333333333, ans=0.125 +2024-07-29 09:22:44,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=265246.6666666667, ans=0.125 +2024-07-29 09:23:01,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=265273.3333333333, ans=0.125 +2024-07-29 09:23:12,106 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=265300.0, ans=0.0 +2024-07-29 09:23:12,638 INFO [train.py:1114] (3/4) Epoch 20, batch 4750, loss[loss=0.1506, simple_loss=0.2531, pruned_loss=0.02402, over 4559.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2624, pruned_loss=0.03961, over 935926.07 frames. ], batch size: 21, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:23:23,144 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=265313.3333333333, ans=0.0 +2024-07-29 09:23:23,173 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff3_skip_rate, batch_count=265313.3333333333, ans=0.0 +2024-07-29 09:23:25,175 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=265313.3333333333, ans=0.1 +2024-07-29 09:23:28,855 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=265313.3333333333, ans=0.125 +2024-07-29 09:23:51,123 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.dropout.p, batch_count=265340.0, ans=0.1 +2024-07-29 09:23:59,498 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.589e+01 5.596e+01 6.192e+01 7.037e+01 1.008e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 09:24:06,740 INFO [train.py:1114] (3/4) Epoch 20, batch 4800, loss[loss=0.1784, simple_loss=0.2693, pruned_loss=0.04371, over 4694.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2619, pruned_loss=0.03978, over 933006.96 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:24:33,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=265393.3333333333, ans=0.125 +2024-07-29 09:24:36,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=265393.3333333333, ans=0.0 +2024-07-29 09:24:38,133 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.51 vs. limit=15.0 +2024-07-29 09:24:50,454 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=265420.0, ans=0.0 +2024-07-29 09:24:53,009 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=265420.0, ans=0.125 +2024-07-29 09:24:59,666 INFO [train.py:1114] (3/4) Epoch 20, batch 4850, loss[loss=0.1792, simple_loss=0.2793, pruned_loss=0.03951, over 4743.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2624, pruned_loss=0.04005, over 932609.79 frames. ], batch size: 14, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:25:13,253 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=265433.3333333333, ans=0.0 +2024-07-29 09:25:20,689 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=265460.0, ans=0.0 +2024-07-29 09:25:22,774 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=265460.0, ans=0.125 +2024-07-29 09:25:23,512 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=265460.0, ans=0.025 +2024-07-29 09:25:35,028 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.attention_skip_rate, batch_count=265486.6666666667, ans=0.0 +2024-07-29 09:25:36,942 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=265486.6666666667, ans=0.0 +2024-07-29 09:25:43,423 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.605e+01 5.554e+01 6.055e+01 6.631e+01 1.173e+02, threshold=1.211e+02, percent-clipped=0.0 +2024-07-29 09:25:44,261 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=265486.6666666667, ans=0.1 +2024-07-29 09:25:46,888 INFO [train.py:1114] (3/4) Epoch 20, batch 4900, loss[loss=0.1776, simple_loss=0.2587, pruned_loss=0.04827, over 4760.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2623, pruned_loss=0.03984, over 934146.62 frames. ], batch size: 13, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:25:58,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=265513.3333333333, ans=0.2 +2024-07-29 09:26:03,682 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=265526.6666666667, ans=0.0 +2024-07-29 09:26:34,954 INFO [train.py:1114] (3/4) Epoch 20, batch 4950, loss[loss=0.1907, simple_loss=0.279, pruned_loss=0.0512, over 3359.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2628, pruned_loss=0.04012, over 931325.23 frames. ], batch size: 35, lr: 3.72e-03, grad_scale: 32.0 +2024-07-29 09:26:41,872 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=265580.0, ans=0.125 +2024-07-29 09:26:45,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=265580.0, ans=0.125 +2024-07-29 09:26:54,822 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=265606.6666666667, ans=0.0 +2024-07-29 09:27:01,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=265620.0, ans=0.0 +2024-07-29 09:27:04,054 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:27:11,246 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.725e+01 5.554e+01 6.246e+01 6.923e+01 9.859e+01, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 09:27:12,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=265620.0, ans=0.0 +2024-07-29 09:27:14,684 INFO [train.py:1114] (3/4) Epoch 20, batch 5000, loss[loss=0.1741, simple_loss=0.2604, pruned_loss=0.04388, over 4650.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2627, pruned_loss=0.03991, over 935094.78 frames. ], batch size: 14, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:27:16,941 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=6.24 vs. limit=15.0 +2024-07-29 09:27:26,277 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.scale_min, batch_count=265646.6666666667, ans=0.2 +2024-07-29 09:27:33,796 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265660.0, ans=0.1 +2024-07-29 09:27:34,355 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=265660.0, ans=0.125 +2024-07-29 09:27:39,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=265673.3333333333, ans=0.1 +2024-07-29 09:27:41,820 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=265673.3333333333, ans=0.0 +2024-07-29 09:27:50,274 INFO [train.py:1114] (3/4) Epoch 20, batch 5050, loss[loss=0.1625, simple_loss=0.2426, pruned_loss=0.04117, over 4857.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.262, pruned_loss=0.03983, over 937776.23 frames. ], batch size: 12, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:27:52,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=265700.0, ans=0.125 +2024-07-29 09:27:54,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=265700.0, ans=0.0 +2024-07-29 09:28:01,218 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:28:10,867 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=265713.3333333333, ans=0.1 +2024-07-29 09:28:10,990 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=265713.3333333333, ans=0.0 +2024-07-29 09:28:24,408 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=8.75 vs. limit=15.0 +2024-07-29 09:28:27,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=265740.0, ans=0.125 +2024-07-29 09:28:34,374 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.463e+01 5.782e+01 6.489e+01 7.303e+01 1.011e+02, threshold=1.298e+02, percent-clipped=0.0 +2024-07-29 09:28:35,290 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=265753.3333333333, ans=0.125 +2024-07-29 09:28:36,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff3_skip_rate, batch_count=265753.3333333333, ans=0.0 +2024-07-29 09:28:38,658 INFO [train.py:1114] (3/4) Epoch 20, batch 5100, loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.03109, over 4776.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2632, pruned_loss=0.04059, over 935090.26 frames. ], batch size: 12, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:28:39,767 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.67 vs. limit=6.0 +2024-07-29 09:29:09,816 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.scale_min, batch_count=265820.0, ans=0.2 +2024-07-29 09:29:11,197 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer2.prob, batch_count=265820.0, ans=0.125 +2024-07-29 09:29:12,312 INFO [train.py:1114] (3/4) Epoch 20, batch 5150, loss[loss=0.1727, simple_loss=0.2655, pruned_loss=0.03997, over 4852.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2629, pruned_loss=0.04054, over 936183.69 frames. ], batch size: 16, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:29:14,021 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=4.50 vs. limit=15.0 +2024-07-29 09:29:14,548 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass_mid.scale_min, batch_count=265833.3333333333, ans=0.2 +2024-07-29 09:29:16,560 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.1.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:29:16,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=265833.3333333333, ans=0.125 +2024-07-29 09:29:32,672 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=12.05 vs. limit=15.0 +2024-07-29 09:29:34,797 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn2.whiten, num_groups=1, num_channels=192, metric=12.15 vs. limit=22.5 +2024-07-29 09:29:36,732 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=1.93 vs. limit=6.0 +2024-07-29 09:29:42,066 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=265886.6666666667, ans=0.95 +2024-07-29 09:29:44,549 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.013e+01 5.725e+01 6.279e+01 7.318e+01 1.119e+02, threshold=1.256e+02, percent-clipped=0.0 +2024-07-29 09:29:46,958 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=265886.6666666667, ans=0.125 +2024-07-29 09:29:47,591 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.balancer2.prob, batch_count=265900.0, ans=0.125 +2024-07-29 09:30:06,161 INFO [train.py:1114] (3/4) Epoch 20, batch 5200, loss[loss=0.1771, simple_loss=0.2826, pruned_loss=0.03582, over 4667.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2624, pruned_loss=0.03999, over 936421.43 frames. ], batch size: 14, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:30:07,320 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=17.07 vs. limit=15.0 +2024-07-29 09:30:13,029 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=265913.3333333333, ans=0.0 +2024-07-29 09:30:28,417 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.skip_rate, batch_count=265913.3333333333, ans=0.07 +2024-07-29 09:30:28,501 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.04 vs. limit=15.0 +2024-07-29 09:30:33,523 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=10.31 vs. limit=10.0 +2024-07-29 09:31:00,351 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.64 vs. limit=12.0 +2024-07-29 09:31:02,528 INFO [train.py:1114] (3/4) Epoch 20, batch 5250, loss[loss=0.1777, simple_loss=0.2618, pruned_loss=0.04675, over 4898.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2616, pruned_loss=0.03956, over 935815.99 frames. ], batch size: 13, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:31:04,612 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=265966.6666666667, ans=0.125 +2024-07-29 09:31:10,014 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=265980.0, ans=0.125 +2024-07-29 09:31:26,835 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=266006.6666666667, ans=0.2 +2024-07-29 09:31:30,171 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=266020.0, ans=0.1 +2024-07-29 09:31:31,549 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.skip_rate, batch_count=266020.0, ans=0.09899494936611666 +2024-07-29 09:31:32,636 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.533e+01 5.588e+01 6.109e+01 7.391e+01 1.107e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-29 09:31:32,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=266020.0, ans=0.0 +2024-07-29 09:31:37,707 INFO [train.py:1114] (3/4) Epoch 20, batch 5300, loss[loss=0.1763, simple_loss=0.2536, pruned_loss=0.04952, over 4617.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2613, pruned_loss=0.03959, over 934211.79 frames. ], batch size: 16, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:31:44,370 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module2.balancer1.prob, batch_count=266046.6666666667, ans=0.125 +2024-07-29 09:31:45,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=266046.6666666667, ans=0.0 +2024-07-29 09:31:48,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=266046.6666666667, ans=0.0 +2024-07-29 09:31:59,476 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.const_attention_rate, batch_count=266073.3333333333, ans=0.025 +2024-07-29 09:32:01,982 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=266073.3333333333, ans=0.125 +2024-07-29 09:32:09,479 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=266086.6666666667, ans=0.125 +2024-07-29 09:32:12,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff3_skip_rate, batch_count=266086.6666666667, ans=0.0 +2024-07-29 09:32:15,630 INFO [train.py:1114] (3/4) Epoch 20, batch 5350, loss[loss=0.147, simple_loss=0.2372, pruned_loss=0.0284, over 4499.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2634, pruned_loss=0.04005, over 936356.70 frames. ], batch size: 10, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:32:18,722 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.68 vs. limit=10.0 +2024-07-29 09:32:39,262 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=266113.3333333333, ans=0.125 +2024-07-29 09:32:45,245 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=266113.3333333333, ans=0.125 +2024-07-29 09:32:49,644 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.max_abs, batch_count=266126.6666666667, ans=10.0 +2024-07-29 09:32:53,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=266126.6666666667, ans=0.125 +2024-07-29 09:33:06,389 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.461e+01 5.787e+01 6.416e+01 7.278e+01 1.158e+02, threshold=1.283e+02, percent-clipped=0.0 +2024-07-29 09:33:09,780 INFO [train.py:1114] (3/4) Epoch 20, batch 5400, loss[loss=0.2074, simple_loss=0.2886, pruned_loss=0.06308, over 4367.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2641, pruned_loss=0.04067, over 930676.80 frames. ], batch size: 26, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:33:17,787 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=6.53 vs. limit=15.0 +2024-07-29 09:33:22,580 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.prob, batch_count=266193.3333333333, ans=0.125 +2024-07-29 09:33:26,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=266193.3333333333, ans=0.125 +2024-07-29 09:33:26,345 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=266193.3333333333, ans=0.0 +2024-07-29 09:33:35,577 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff2_skip_rate, batch_count=266206.6666666667, ans=0.0 +2024-07-29 09:33:46,290 INFO [train.py:1114] (3/4) Epoch 20, batch 5450, loss[loss=0.1295, simple_loss=0.2149, pruned_loss=0.02207, over 4690.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2629, pruned_loss=0.04026, over 933277.62 frames. ], batch size: 11, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:34:23,536 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=4.72 vs. limit=15.0 +2024-07-29 09:34:28,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=266246.6666666667, ans=0.1 +2024-07-29 09:34:32,486 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=266246.6666666667, ans=0.2 +2024-07-29 09:34:36,515 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer2.prob, batch_count=266260.0, ans=0.125 +2024-07-29 09:34:46,622 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=266286.6666666667, ans=0.1 +2024-07-29 09:34:50,647 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.141e+01 5.730e+01 6.160e+01 6.781e+01 9.375e+01, threshold=1.232e+02, percent-clipped=0.0 +2024-07-29 09:34:51,376 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=11.64 vs. limit=15.0 +2024-07-29 09:34:52,500 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=266286.6666666667, ans=0.0 +2024-07-29 09:34:54,546 INFO [train.py:1114] (3/4) Epoch 20, batch 5500, loss[loss=0.192, simple_loss=0.2853, pruned_loss=0.04934, over 4184.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2621, pruned_loss=0.04007, over 930364.57 frames. ], batch size: 25, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:35:01,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=266313.3333333333, ans=0.0 +2024-07-29 09:35:06,932 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=266313.3333333333, ans=0.2 +2024-07-29 09:35:17,104 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.attention_skip_rate, batch_count=266326.6666666667, ans=0.0 +2024-07-29 09:35:20,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=266340.0, ans=0.125 +2024-07-29 09:35:29,125 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer2.prob, batch_count=266353.3333333333, ans=0.125 +2024-07-29 09:35:34,351 INFO [train.py:1114] (3/4) Epoch 20, batch 5550, loss[loss=0.1417, simple_loss=0.2399, pruned_loss=0.02171, over 4698.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2619, pruned_loss=0.04006, over 932718.04 frames. ], batch size: 12, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:35:36,188 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.27 vs. limit=15.0 +2024-07-29 09:35:49,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer1.min_positive, batch_count=266393.3333333333, ans=0.025 +2024-07-29 09:35:51,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten.whitening_limit, batch_count=266393.3333333333, ans=22.5 +2024-07-29 09:35:58,318 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=266406.6666666667, ans=0.125 +2024-07-29 09:36:04,970 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=266420.0, ans=0.025 +2024-07-29 09:36:07,404 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.955e+01 5.872e+01 6.404e+01 7.729e+01 1.135e+02, threshold=1.281e+02, percent-clipped=0.0 +2024-07-29 09:36:10,510 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=266433.3333333333, ans=0.0 +2024-07-29 09:36:10,949 INFO [train.py:1114] (3/4) Epoch 20, batch 5600, loss[loss=0.1796, simple_loss=0.2782, pruned_loss=0.04047, over 4727.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2621, pruned_loss=0.03984, over 933915.57 frames. ], batch size: 14, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:36:12,609 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=9.99 vs. limit=15.0 +2024-07-29 09:36:21,217 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=266446.6666666667, ans=0.125 +2024-07-29 09:36:24,759 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=266446.6666666667, ans=0.1 +2024-07-29 09:36:28,318 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.5.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:36:28,875 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward2.hidden_balancer.prob, batch_count=266460.0, ans=0.125 +2024-07-29 09:36:37,134 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=266473.3333333333, ans=0.1 +2024-07-29 09:36:40,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff2_skip_rate, batch_count=266473.3333333333, ans=0.0 +2024-07-29 09:36:43,797 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=266486.6666666667, ans=0.1 +2024-07-29 09:36:47,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=266486.6666666667, ans=0.09899494936611666 +2024-07-29 09:36:52,283 INFO [train.py:1114] (3/4) Epoch 20, batch 5650, loss[loss=0.1774, simple_loss=0.2782, pruned_loss=0.03826, over 4601.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2614, pruned_loss=0.03962, over 936380.89 frames. ], batch size: 21, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:37:02,501 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=266513.3333333333, ans=0.125 +2024-07-29 09:37:17,765 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.min_positive, batch_count=266540.0, ans=0.05 +2024-07-29 09:37:22,266 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.795e+01 5.835e+01 6.614e+01 7.684e+01 1.140e+02, threshold=1.323e+02, percent-clipped=0.0 +2024-07-29 09:37:25,572 INFO [train.py:1114] (3/4) Epoch 20, batch 5700, loss[loss=0.1908, simple_loss=0.2873, pruned_loss=0.04714, over 4698.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2617, pruned_loss=0.03954, over 937747.13 frames. ], batch size: 13, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:37:38,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=266566.6666666667, ans=0.0 +2024-07-29 09:37:42,671 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=266580.0, ans=0.0 +2024-07-29 09:37:43,864 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=266580.0, ans=0.125 +2024-07-29 09:37:59,474 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.39 vs. limit=10.0 +2024-07-29 09:38:22,559 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=266606.6666666667, ans=0.125 +2024-07-29 09:38:24,626 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=266620.0, ans=0.125 +2024-07-29 09:38:31,811 INFO [train.py:1114] (3/4) Epoch 20, batch 5750, loss[loss=0.1938, simple_loss=0.2985, pruned_loss=0.04455, over 4757.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2616, pruned_loss=0.03908, over 937912.11 frames. ], batch size: 19, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:38:55,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.min_positive, batch_count=266646.6666666667, ans=0.05 +2024-07-29 09:38:55,466 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=266646.6666666667, ans=0.125 +2024-07-29 09:39:06,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=266673.3333333333, ans=0.125 +2024-07-29 09:39:06,437 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=384, metric=3.51 vs. limit=15.0 +2024-07-29 09:39:10,460 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.max_positive, batch_count=266673.3333333333, ans=0.95 +2024-07-29 09:39:17,486 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.798e+01 5.816e+01 6.291e+01 7.219e+01 1.004e+02, threshold=1.258e+02, percent-clipped=0.0 +2024-07-29 09:39:20,772 INFO [train.py:1114] (3/4) Epoch 20, batch 5800, loss[loss=0.1785, simple_loss=0.2695, pruned_loss=0.04381, over 4675.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.262, pruned_loss=0.03932, over 936639.06 frames. ], batch size: 19, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:39:24,926 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.prob, batch_count=266700.0, ans=0.125 +2024-07-29 09:39:36,929 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=15.33 vs. limit=22.5 +2024-07-29 09:39:38,631 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=266726.6666666667, ans=0.125 +2024-07-29 09:39:47,159 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.attention_skip_rate, batch_count=266753.3333333333, ans=0.0 +2024-07-29 09:39:52,781 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=266753.3333333333, ans=0.2 +2024-07-29 09:39:55,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=266753.3333333333, ans=0.0 +2024-07-29 09:39:55,952 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=266753.3333333333, ans=0.1 +2024-07-29 09:40:00,085 INFO [train.py:1114] (3/4) Epoch 20, batch 5850, loss[loss=0.1852, simple_loss=0.2837, pruned_loss=0.04334, over 4572.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2623, pruned_loss=0.03987, over 937547.90 frames. ], batch size: 21, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:40:02,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer2.prob, batch_count=266766.6666666667, ans=0.125 +2024-07-29 09:40:18,181 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=266793.3333333333, ans=0.125 +2024-07-29 09:40:31,386 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.976e+01 5.785e+01 6.450e+01 7.129e+01 1.228e+02, threshold=1.290e+02, percent-clipped=0.0 +2024-07-29 09:40:32,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.skip_rate, batch_count=266820.0, ans=0.07 +2024-07-29 09:40:34,113 INFO [train.py:1114] (3/4) Epoch 20, batch 5900, loss[loss=0.1587, simple_loss=0.2506, pruned_loss=0.03341, over 4687.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2619, pruned_loss=0.03994, over 937746.01 frames. ], batch size: 15, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:40:36,902 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=266833.3333333333, ans=0.025 +2024-07-29 09:40:44,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module2.balancer2.min_abs, batch_count=266833.3333333333, ans=0.5 +2024-07-29 09:40:47,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff3_skip_rate, batch_count=266846.6666666667, ans=0.0 +2024-07-29 09:40:56,152 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=266846.6666666667, ans=0.0 +2024-07-29 09:40:58,150 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.bypass_mid.scale_min, batch_count=266846.6666666667, ans=0.2 +2024-07-29 09:41:11,988 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=266873.3333333333, ans=0.09899494936611666 +2024-07-29 09:41:21,156 INFO [train.py:1114] (3/4) Epoch 20, batch 5950, loss[loss=0.1885, simple_loss=0.2713, pruned_loss=0.05289, over 4691.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.262, pruned_loss=0.03991, over 939994.79 frames. ], batch size: 15, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:41:26,305 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=13.17 vs. limit=22.5 +2024-07-29 09:41:31,176 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer1.prob, batch_count=266913.3333333333, ans=0.125 +2024-07-29 09:41:32,583 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=266913.3333333333, ans=0.125 +2024-07-29 09:41:39,652 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff2_skip_rate, batch_count=266913.3333333333, ans=0.0 +2024-07-29 09:41:42,575 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=266926.6666666667, ans=0.125 +2024-07-29 09:41:56,035 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.66 vs. limit=10.0 +2024-07-29 09:42:00,474 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.647e+01 5.654e+01 6.112e+01 6.775e+01 1.038e+02, threshold=1.222e+02, percent-clipped=0.0 +2024-07-29 09:42:27,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=266966.6666666667, ans=0.125 +2024-07-29 09:42:28,146 INFO [train.py:1114] (3/4) Epoch 20, batch 6000, loss[loss=0.192, simple_loss=0.2885, pruned_loss=0.04771, over 4362.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2613, pruned_loss=0.03944, over 937095.43 frames. ], batch size: 26, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:42:28,147 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 09:42:44,431 INFO [train.py:1146] (3/4) Epoch 20, validation: loss=0.1606, simple_loss=0.2622, pruned_loss=0.02953, over 944034.00 frames. +2024-07-29 09:42:44,432 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 09:42:57,047 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=266980.0, ans=0.125 +2024-07-29 09:43:04,673 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=267006.6666666667, ans=0.125 +2024-07-29 09:43:18,286 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=267033.3333333333, ans=0.2 +2024-07-29 09:43:18,762 INFO [train.py:1114] (3/4) Epoch 20, batch 6050, loss[loss=0.1805, simple_loss=0.2636, pruned_loss=0.04871, over 4773.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2614, pruned_loss=0.03955, over 938422.23 frames. ], batch size: 12, lr: 3.71e-03, grad_scale: 32.0 +2024-07-29 09:43:22,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.const_attention_rate, batch_count=267033.3333333333, ans=0.025 +2024-07-29 09:43:27,068 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=267033.3333333333, ans=0.2 +2024-07-29 09:43:29,448 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=10.70 vs. limit=15.0 +2024-07-29 09:43:40,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer2.prob, batch_count=267046.6666666667, ans=0.125 +2024-07-29 09:43:54,590 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=267073.3333333333, ans=0.125 +2024-07-29 09:43:55,230 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=267073.3333333333, ans=0.035 +2024-07-29 09:44:05,096 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.461e+01 5.588e+01 6.267e+01 7.088e+01 1.023e+02, threshold=1.253e+02, percent-clipped=0.0 +2024-07-29 09:44:05,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=267086.6666666667, ans=0.2 +2024-07-29 09:44:07,212 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=5.53 vs. limit=15.0 +2024-07-29 09:44:12,290 INFO [train.py:1114] (3/4) Epoch 20, batch 6100, loss[loss=0.2048, simple_loss=0.2897, pruned_loss=0.0599, over 4690.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2623, pruned_loss=0.03997, over 937473.32 frames. ], batch size: 15, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:44:18,252 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.77 vs. limit=22.5 +2024-07-29 09:44:18,666 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=267100.0, ans=0.1 +2024-07-29 09:44:20,279 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.75 vs. limit=6.0 +2024-07-29 09:44:29,824 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module2.balancer2.prob, batch_count=267126.6666666667, ans=0.125 +2024-07-29 09:44:50,610 INFO [train.py:1114] (3/4) Epoch 20, batch 6150, loss[loss=0.1844, simple_loss=0.2741, pruned_loss=0.04738, over 3436.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2626, pruned_loss=0.03987, over 936095.60 frames. ], batch size: 36, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:44:51,493 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.ff3_skip_rate, batch_count=267166.6666666667, ans=0.0 +2024-07-29 09:45:00,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_skip_rate, batch_count=267180.0, ans=0.0 +2024-07-29 09:45:00,887 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=267180.0, ans=0.2 +2024-07-29 09:45:03,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_skip_rate, batch_count=267180.0, ans=0.0 +2024-07-29 09:45:05,099 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.04 vs. limit=15.0 +2024-07-29 09:45:16,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=267206.6666666667, ans=0.125 +2024-07-29 09:45:26,396 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.706e+01 5.902e+01 6.533e+01 7.507e+01 1.268e+02, threshold=1.307e+02, percent-clipped=1.0 +2024-07-29 09:45:27,349 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=267220.0, ans=0.2 +2024-07-29 09:45:29,241 INFO [train.py:1114] (3/4) Epoch 20, batch 6200, loss[loss=0.1937, simple_loss=0.286, pruned_loss=0.05077, over 4730.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2618, pruned_loss=0.03954, over 935942.98 frames. ], batch size: 14, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:45:32,760 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=267233.3333333333, ans=0.1 +2024-07-29 09:45:53,896 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.conv_module2.whiten, num_groups=1, num_channels=256, metric=3.69 vs. limit=15.0 +2024-07-29 09:46:02,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=267286.6666666667, ans=0.0 +2024-07-29 09:46:05,743 INFO [train.py:1114] (3/4) Epoch 20, batch 6250, loss[loss=0.2173, simple_loss=0.3083, pruned_loss=0.06314, over 4812.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2626, pruned_loss=0.04034, over 932436.69 frames. ], batch size: 14, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:46:16,621 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.const_attention_rate, batch_count=267313.3333333333, ans=0.025 +2024-07-29 09:46:23,379 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.07 vs. limit=15.0 +2024-07-29 09:46:34,762 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.convnext.out_whiten, num_groups=1, num_channels=128, metric=4.59 vs. limit=5.0 +2024-07-29 09:46:39,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=267340.0, ans=0.0 +2024-07-29 09:46:39,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=267340.0, ans=0.125 +2024-07-29 09:46:40,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.bypass.skip_rate, batch_count=267340.0, ans=0.09899494936611666 +2024-07-29 09:46:45,642 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.404e+01 5.746e+01 6.370e+01 7.341e+01 9.825e+01, threshold=1.274e+02, percent-clipped=0.0 +2024-07-29 09:46:56,401 INFO [train.py:1114] (3/4) Epoch 20, batch 6300, loss[loss=0.1367, simple_loss=0.226, pruned_loss=0.02371, over 4552.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2632, pruned_loss=0.04072, over 929239.09 frames. ], batch size: 10, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:46:56,475 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.out_proj.dropout_p, batch_count=267366.6666666667, ans=0.1 +2024-07-29 09:46:59,978 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=256, metric=6.77 vs. limit=15.0 +2024-07-29 09:47:03,192 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=512, metric=18.78 vs. limit=22.5 +2024-07-29 09:47:19,503 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=267406.6666666667, ans=0.125 +2024-07-29 09:47:26,594 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=267420.0, ans=0.2 +2024-07-29 09:47:32,263 INFO [train.py:1114] (3/4) Epoch 20, batch 6350, loss[loss=0.1615, simple_loss=0.2653, pruned_loss=0.02888, over 4532.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2631, pruned_loss=0.04039, over 933355.32 frames. ], batch size: 21, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:47:34,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.prob, batch_count=267433.3333333333, ans=0.125 +2024-07-29 09:47:44,029 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.32 vs. limit=22.5 +2024-07-29 09:47:44,455 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass_mid.scale_min, batch_count=267446.6666666667, ans=0.2 +2024-07-29 09:47:49,222 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.77 vs. limit=22.5 +2024-07-29 09:47:51,888 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.79 vs. limit=12.0 +2024-07-29 09:48:06,772 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.592e+01 5.677e+01 6.317e+01 7.481e+01 1.107e+02, threshold=1.263e+02, percent-clipped=0.0 +2024-07-29 09:48:06,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.const_attention_rate, batch_count=267486.6666666667, ans=0.025 +2024-07-29 09:48:09,432 INFO [train.py:1114] (3/4) Epoch 20, batch 6400, loss[loss=0.187, simple_loss=0.2807, pruned_loss=0.04671, over 4632.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2632, pruned_loss=0.04031, over 935046.58 frames. ], batch size: 13, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:48:17,676 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module1.balancer2.prob, batch_count=267513.3333333333, ans=0.125 +2024-07-29 09:48:33,034 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer1.prob, batch_count=267540.0, ans=0.125 +2024-07-29 09:48:42,725 INFO [train.py:1114] (3/4) Epoch 20, batch 6450, loss[loss=0.1688, simple_loss=0.2528, pruned_loss=0.04239, over 4582.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2632, pruned_loss=0.04011, over 938758.50 frames. ], batch size: 21, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:48:42,911 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=267566.6666666667, ans=0.05 +2024-07-29 09:48:48,101 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=267566.6666666667, ans=0.07 +2024-07-29 09:48:50,007 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass_mid.scale_min, batch_count=267580.0, ans=0.2 +2024-07-29 09:49:06,065 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer1.prob, batch_count=267593.3333333333, ans=0.125 +2024-07-29 09:49:13,331 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module1.balancer1.max_abs, batch_count=267606.6666666667, ans=10.0 +2024-07-29 09:49:17,955 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=7.01 vs. limit=15.0 +2024-07-29 09:49:18,333 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.ff2_skip_rate, batch_count=267606.6666666667, ans=0.0 +2024-07-29 09:49:24,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer1.prob, batch_count=267620.0, ans=0.125 +2024-07-29 09:49:43,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=267620.0, ans=0.1 +2024-07-29 09:49:44,600 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.22 vs. limit=15.0 +2024-07-29 09:49:46,233 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.162e+01 5.798e+01 6.360e+01 7.229e+01 1.035e+02, threshold=1.272e+02, percent-clipped=0.0 +2024-07-29 09:49:48,984 INFO [train.py:1114] (3/4) Epoch 20, batch 6500, loss[loss=0.2055, simple_loss=0.2867, pruned_loss=0.06215, over 3508.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.262, pruned_loss=0.03933, over 940232.74 frames. ], batch size: 35, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:50:04,976 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=267646.6666666667, ans=0.125 +2024-07-29 09:50:14,907 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=267660.0, ans=0.125 +2024-07-29 09:50:19,328 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.convnext.out_balancer.prob, batch_count=267673.3333333333, ans=0.125 +2024-07-29 09:50:29,747 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=267686.6666666667, ans=0.1 +2024-07-29 09:50:31,653 INFO [train.py:1114] (3/4) Epoch 20, batch 6550, loss[loss=0.1446, simple_loss=0.225, pruned_loss=0.03209, over 4793.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.261, pruned_loss=0.03876, over 943028.23 frames. ], batch size: 11, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:50:33,843 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.skip_rate, batch_count=267700.0, ans=0.07 +2024-07-29 09:50:36,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.attention_skip_rate, batch_count=267700.0, ans=0.0 +2024-07-29 09:50:44,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module1.balancer2.prob, batch_count=267726.6666666667, ans=0.125 +2024-07-29 09:51:10,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=267726.6666666667, ans=0.2 +2024-07-29 09:51:18,986 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.skip_rate, batch_count=267740.0, ans=0.04949747468305833 +2024-07-29 09:51:23,608 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=267753.3333333333, ans=0.125 +2024-07-29 09:51:26,544 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.591e+01 5.663e+01 6.386e+01 7.207e+01 1.403e+02, threshold=1.277e+02, percent-clipped=3.0 +2024-07-29 09:51:29,172 INFO [train.py:1114] (3/4) Epoch 20, batch 6600, loss[loss=0.1792, simple_loss=0.2712, pruned_loss=0.04363, over 4931.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2609, pruned_loss=0.03876, over 944917.10 frames. ], batch size: 14, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:51:46,941 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=267793.3333333333, ans=0.2 +2024-07-29 09:51:52,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=267806.6666666667, ans=0.0 +2024-07-29 09:52:59,777 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=267820.0, ans=0.125 +2024-07-29 09:53:12,457 INFO [train.py:1114] (3/4) Epoch 20, batch 6650, loss[loss=0.1597, simple_loss=0.2496, pruned_loss=0.03496, over 4604.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2601, pruned_loss=0.0386, over 943422.33 frames. ], batch size: 17, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:53:16,801 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=4.93 vs. limit=12.0 +2024-07-29 09:53:23,630 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.ff2_skip_rate, batch_count=267846.6666666667, ans=0.0 +2024-07-29 09:53:35,857 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=267873.3333333333, ans=0.0 +2024-07-29 09:53:44,858 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.775e+01 5.709e+01 6.420e+01 7.242e+01 1.116e+02, threshold=1.284e+02, percent-clipped=0.0 +2024-07-29 09:53:47,498 INFO [train.py:1114] (3/4) Epoch 20, batch 6700, loss[loss=0.1802, simple_loss=0.2774, pruned_loss=0.04152, over 4757.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2603, pruned_loss=0.03874, over 942073.24 frames. ], batch size: 19, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:54:03,274 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn2.whiten, num_groups=1, num_channels=384, metric=19.48 vs. limit=22.5 +2024-07-29 09:54:03,679 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.skip_rate, batch_count=267913.3333333333, ans=0.07 +2024-07-29 09:54:04,368 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.ff2_skip_rate, batch_count=267913.3333333333, ans=0.0 +2024-07-29 09:54:05,364 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.76 vs. limit=12.0 +2024-07-29 09:54:35,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass.scale_min, batch_count=267940.0, ans=0.2 +2024-07-29 09:54:42,038 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.hidden_balancer.prob, batch_count=267953.3333333333, ans=0.125 +2024-07-29 09:55:03,122 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.skip_rate, batch_count=267953.3333333333, ans=0.09899494936611666 +2024-07-29 09:55:06,360 INFO [train.py:1114] (3/4) Epoch 20, batch 6750, loss[loss=0.1959, simple_loss=0.2807, pruned_loss=0.05548, over 4185.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2611, pruned_loss=0.0392, over 940096.55 frames. ], batch size: 25, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:55:06,421 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.attention_skip_rate, batch_count=267966.6666666667, ans=0.0 +2024-07-29 09:55:15,877 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:55:22,088 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer1.prob, batch_count=267980.0, ans=0.125 +2024-07-29 09:55:35,957 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=267993.3333333333, ans=0.125 +2024-07-29 09:55:52,203 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass_mid.scale_min, batch_count=268020.0, ans=0.2 +2024-07-29 09:55:53,696 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.727e+01 5.999e+01 6.595e+01 7.628e+01 1.756e+02, threshold=1.319e+02, percent-clipped=1.0 +2024-07-29 09:55:56,350 INFO [train.py:1114] (3/4) Epoch 20, batch 6800, loss[loss=0.1865, simple_loss=0.2777, pruned_loss=0.04769, over 4637.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2613, pruned_loss=0.03917, over 938557.47 frames. ], batch size: 13, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:55:56,565 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=268033.3333333333, ans=0.04949747468305833 +2024-07-29 09:56:05,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_skip_rate, batch_count=268046.6666666667, ans=0.0 +2024-07-29 09:56:18,061 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.bypass.scale_min, batch_count=268046.6666666667, ans=0.2 +2024-07-29 09:56:29,884 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:56:34,322 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=268086.6666666667, ans=0.125 +2024-07-29 09:56:40,834 INFO [train.py:1114] (3/4) Epoch 20, batch 6850, loss[loss=0.1612, simple_loss=0.2562, pruned_loss=0.03304, over 4697.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2608, pruned_loss=0.0389, over 940324.53 frames. ], batch size: 13, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:56:52,227 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.balancer_na.min_abs, batch_count=268113.3333333333, ans=0.02 +2024-07-29 09:57:05,744 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=11.85 vs. limit=15.0 +2024-07-29 09:57:06,314 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=512, metric=21.21 vs. limit=22.5 +2024-07-29 09:57:15,280 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.3.encoder.layers.3.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:57:36,086 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.628e+01 5.831e+01 6.589e+01 8.147e+01 1.219e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 09:57:38,784 INFO [train.py:1114] (3/4) Epoch 20, batch 6900, loss[loss=0.1819, simple_loss=0.2724, pruned_loss=0.04572, over 4968.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2611, pruned_loss=0.03896, over 942510.36 frames. ], batch size: 13, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:57:48,187 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.81 vs. limit=15.0 +2024-07-29 09:57:49,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=268166.6666666667, ans=0.1 +2024-07-29 09:57:53,408 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.attention_skip_rate, batch_count=268180.0, ans=0.0 +2024-07-29 09:58:06,474 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_skip_rate, batch_count=268193.3333333333, ans=0.0 +2024-07-29 09:58:08,480 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=268206.6666666667, ans=0.0 +2024-07-29 09:58:10,036 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=268206.6666666667, ans=0.025 +2024-07-29 09:58:19,536 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=268220.0, ans=0.0 +2024-07-29 09:58:21,472 INFO [train.py:1114] (3/4) Epoch 20, batch 6950, loss[loss=0.1502, simple_loss=0.2323, pruned_loss=0.03406, over 4519.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2612, pruned_loss=0.03916, over 939897.50 frames. ], batch size: 10, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 09:58:29,478 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=268246.6666666667, ans=0.0 +2024-07-29 09:59:16,748 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.whiten, num_groups=1, num_channels=384, metric=3.38 vs. limit=12.0 +2024-07-29 09:59:17,925 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 09:59:22,415 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=268273.3333333333, ans=0.125 +2024-07-29 09:59:23,971 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=9.38 vs. limit=15.0 +2024-07-29 09:59:44,202 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.attention_skip_rate, batch_count=268286.6666666667, ans=0.0 +2024-07-29 09:59:44,603 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.835e+01 5.602e+01 6.145e+01 6.791e+01 9.985e+01, threshold=1.229e+02, percent-clipped=0.0 +2024-07-29 10:00:04,782 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=268286.6666666667, ans=0.05 +2024-07-29 10:00:05,970 INFO [train.py:1114] (3/4) Epoch 20, batch 7000, loss[loss=0.1894, simple_loss=0.2812, pruned_loss=0.04878, over 4589.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.261, pruned_loss=0.03909, over 938637.56 frames. ], batch size: 17, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 10:00:08,692 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=268300.0, ans=0.125 +2024-07-29 10:00:12,457 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:00:14,083 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.09 vs. limit=6.0 +2024-07-29 10:00:36,962 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.balancer.min_positive, batch_count=268313.3333333333, ans=0.05 +2024-07-29 10:00:52,624 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=268340.0, ans=0.125 +2024-07-29 10:00:57,212 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=19.58 vs. limit=15.0 +2024-07-29 10:01:03,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer1.prob, batch_count=268353.3333333333, ans=0.125 +2024-07-29 10:01:11,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.ff3_skip_rate, batch_count=268353.3333333333, ans=0.0 +2024-07-29 10:01:12,420 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=268366.6666666667, ans=0.1 +2024-07-29 10:01:12,962 INFO [train.py:1114] (3/4) Epoch 20, batch 7050, loss[loss=0.185, simple_loss=0.2703, pruned_loss=0.0498, over 4725.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2612, pruned_loss=0.03905, over 942125.77 frames. ], batch size: 19, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 10:01:25,618 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=268380.0, ans=0.125 +2024-07-29 10:01:37,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.conv_module2.balancer1.prob, batch_count=268406.6666666667, ans=0.125 +2024-07-29 10:01:37,888 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=12.91 vs. limit=15.0 +2024-07-29 10:01:40,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=268420.0, ans=0.125 +2024-07-29 10:01:45,958 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.517e+01 5.713e+01 6.192e+01 7.192e+01 1.067e+02, threshold=1.238e+02, percent-clipped=0.0 +2024-07-29 10:01:49,615 INFO [train.py:1114] (3/4) Epoch 20, batch 7100, loss[loss=0.1577, simple_loss=0.2475, pruned_loss=0.034, over 4801.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2618, pruned_loss=0.03939, over 937115.85 frames. ], batch size: 15, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 10:01:49,951 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.87 vs. limit=6.0 +2024-07-29 10:01:53,269 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.56 vs. limit=12.0 +2024-07-29 10:02:08,401 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module1.balancer2.prob, batch_count=268460.0, ans=0.125 +2024-07-29 10:02:25,397 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_skip_rate, batch_count=268486.6666666667, ans=0.0 +2024-07-29 10:02:28,258 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=2.15 vs. limit=6.0 +2024-07-29 10:02:31,665 INFO [train.py:1114] (3/4) Epoch 20, batch 7150, loss[loss=0.1879, simple_loss=0.283, pruned_loss=0.04641, over 4596.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2598, pruned_loss=0.03872, over 938791.16 frames. ], batch size: 21, lr: 3.70e-03, grad_scale: 32.0 +2024-07-29 10:02:43,650 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=268500.0, ans=0.0 +2024-07-29 10:02:43,743 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=268500.0, ans=0.125 +2024-07-29 10:02:44,526 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.37 vs. limit=15.0 +2024-07-29 10:02:47,514 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=268500.0, ans=0.125 +2024-07-29 10:03:07,824 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.59 vs. limit=10.0 +2024-07-29 10:03:10,874 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=268553.3333333333, ans=0.1 +2024-07-29 10:03:33,075 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.350e+01 5.664e+01 6.258e+01 7.035e+01 1.192e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 10:03:33,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff2_skip_rate, batch_count=268553.3333333333, ans=0.0 +2024-07-29 10:03:59,860 INFO [train.py:1114] (3/4) Epoch 20, batch 7200, loss[loss=0.2005, simple_loss=0.2907, pruned_loss=0.05512, over 4800.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2613, pruned_loss=0.03947, over 938541.29 frames. ], batch size: 15, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:04:06,971 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=268566.6666666667, ans=0.1 +2024-07-29 10:08:22,695 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=268606.6666666667, ans=0.1 +2024-07-29 10:09:35,405 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=268620.0, ans=0.1 +2024-07-29 10:09:40,804 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.bypass.scale_min, batch_count=268620.0, ans=0.2 +2024-07-29 10:09:48,551 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=268620.0, ans=0.125 +2024-07-29 10:09:49,102 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.balancer1.prob, batch_count=268633.3333333333, ans=0.125 +2024-07-29 10:09:49,548 INFO [train.py:1114] (3/4) Epoch 20, batch 7250, loss[loss=0.148, simple_loss=0.2321, pruned_loss=0.03194, over 4848.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2618, pruned_loss=0.03987, over 939784.18 frames. ], batch size: 12, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:10:30,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.const_attention_rate, batch_count=268660.0, ans=0.025 +2024-07-29 10:10:32,218 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=268673.3333333333, ans=0.125 +2024-07-29 10:10:40,527 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=268673.3333333333, ans=0.125 +2024-07-29 10:11:35,253 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.440e+01 5.763e+01 6.435e+01 7.253e+01 9.940e+01, threshold=1.287e+02, percent-clipped=0.0 +2024-07-29 10:11:35,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.out_combiner.scale_min, batch_count=268686.6666666667, ans=0.2 +2024-07-29 10:11:45,462 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=512, metric=6.65 vs. limit=15.0 +2024-07-29 10:12:08,052 INFO [train.py:1114] (3/4) Epoch 20, batch 7300, loss[loss=0.1423, simple_loss=0.2306, pruned_loss=0.02703, over 4849.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2621, pruned_loss=0.04, over 940509.56 frames. ], batch size: 12, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:17:14,610 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_skip_rate, batch_count=268713.3333333333, ans=0.0 +2024-07-29 10:17:48,138 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=268726.6666666667, ans=0.2 +2024-07-29 10:17:52,537 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=268726.6666666667, ans=0.2 +2024-07-29 10:18:48,883 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.conv_module2.balancer1.prob, batch_count=268740.0, ans=0.125 +2024-07-29 10:18:52,634 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.ff3_skip_rate, batch_count=268753.3333333333, ans=0.0 +2024-07-29 10:19:39,281 INFO [train.py:1114] (3/4) Epoch 20, batch 7350, loss[loss=0.1516, simple_loss=0.2514, pruned_loss=0.02591, over 4636.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2615, pruned_loss=0.03973, over 939907.68 frames. ], batch size: 12, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:19:46,274 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=268766.6666666667, ans=0.0 +2024-07-29 10:19:53,426 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.balancer_ff3.min_abs, batch_count=268780.0, ans=0.2 +2024-07-29 10:21:02,027 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=268806.6666666667, ans=0.1 +2024-07-29 10:21:06,600 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff3_skip_rate, batch_count=268806.6666666667, ans=0.0 +2024-07-29 10:22:45,110 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.56 vs. limit=15.0 +2024-07-29 10:22:45,957 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.689e+01 5.764e+01 6.635e+01 7.838e+01 1.063e+02, threshold=1.327e+02, percent-clipped=0.0 +2024-07-29 10:22:59,771 INFO [train.py:1114] (3/4) Epoch 20, batch 7400, loss[loss=0.1585, simple_loss=0.2479, pruned_loss=0.03461, over 4696.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2612, pruned_loss=0.03964, over 940935.56 frames. ], batch size: 13, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:23:44,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=268860.0, ans=0.0 +2024-07-29 10:23:53,379 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=268873.3333333333, ans=0.0 +2024-07-29 10:24:03,115 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=268886.6666666667, ans=0.1 +2024-07-29 10:24:06,255 INFO [train.py:1114] (3/4) Epoch 20, batch 7450, loss[loss=0.1598, simple_loss=0.2509, pruned_loss=0.03431, over 4629.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2608, pruned_loss=0.03959, over 938281.01 frames. ], batch size: 11, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:24:14,085 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer_na.min_abs, batch_count=268913.3333333333, ans=0.02 +2024-07-29 10:25:31,519 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.bypass_mid.scale_min, batch_count=268926.6666666667, ans=0.2 +2024-07-29 10:25:40,794 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=268953.3333333333, ans=0.125 +2024-07-29 10:25:41,503 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=256, metric=8.72 vs. limit=15.0 +2024-07-29 10:25:41,620 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.60 vs. limit=15.0 +2024-07-29 10:25:42,786 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=268953.3333333333, ans=0.1 +2024-07-29 10:25:46,121 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.697e+01 5.622e+01 6.334e+01 7.188e+01 1.210e+02, threshold=1.267e+02, percent-clipped=0.0 +2024-07-29 10:25:51,755 INFO [train.py:1114] (3/4) Epoch 20, batch 7500, loss[loss=0.2267, simple_loss=0.3069, pruned_loss=0.0732, over 3333.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2616, pruned_loss=0.03977, over 936471.71 frames. ], batch size: 35, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:26:24,839 INFO [train.py:1114] (3/4) Epoch 20, batch 7550, loss[loss=0.201, simple_loss=0.284, pruned_loss=0.05901, over 4583.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2624, pruned_loss=0.04029, over 936349.81 frames. ], batch size: 17, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:26:27,605 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=269033.3333333333, ans=0.125 +2024-07-29 10:26:39,147 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.whiten, num_groups=1, num_channels=512, metric=5.38 vs. limit=12.0 +2024-07-29 10:27:02,772 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=269086.6666666667, ans=0.125 +2024-07-29 10:27:03,442 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=269086.6666666667, ans=0.1 +2024-07-29 10:27:06,368 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.638e+01 5.698e+01 6.238e+01 6.861e+01 9.805e+01, threshold=1.248e+02, percent-clipped=0.0 +2024-07-29 10:27:14,599 INFO [train.py:1114] (3/4) Epoch 20, batch 7600, loss[loss=0.1736, simple_loss=0.2571, pruned_loss=0.04508, over 4801.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2615, pruned_loss=0.03985, over 938016.22 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:27:52,971 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.whiten, num_groups=1, num_channels=512, metric=5.55 vs. limit=12.0 +2024-07-29 10:27:53,321 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.const_attention_rate, batch_count=269153.3333333333, ans=0.025 +2024-07-29 10:27:56,437 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=269166.6666666667, ans=0.125 +2024-07-29 10:27:56,948 INFO [train.py:1114] (3/4) Epoch 20, batch 7650, loss[loss=0.1559, simple_loss=0.2392, pruned_loss=0.03632, over 4936.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2619, pruned_loss=0.04019, over 937148.00 frames. ], batch size: 12, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:27:57,769 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer1.prob, batch_count=269166.6666666667, ans=0.125 +2024-07-29 10:28:05,445 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.80 vs. limit=12.0 +2024-07-29 10:28:09,295 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=269180.0, ans=0.125 +2024-07-29 10:28:12,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.2.prob, batch_count=269193.3333333333, ans=0.125 +2024-07-29 10:28:55,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff2_skip_rate, batch_count=269206.6666666667, ans=0.0 +2024-07-29 10:29:09,109 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=269220.0, ans=0.125 +2024-07-29 10:29:09,506 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.493e+01 5.550e+01 6.272e+01 7.437e+01 1.310e+02, threshold=1.254e+02, percent-clipped=1.0 +2024-07-29 10:29:13,689 INFO [train.py:1114] (3/4) Epoch 20, batch 7700, loss[loss=0.18, simple_loss=0.2747, pruned_loss=0.04262, over 4693.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.262, pruned_loss=0.04007, over 934614.17 frames. ], batch size: 13, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:29:15,427 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.27 vs. limit=6.0 +2024-07-29 10:29:15,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.nonlin_attention.balancer.max_positive, batch_count=269233.3333333333, ans=0.95 +2024-07-29 10:29:29,555 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=9.96 vs. limit=10.0 +2024-07-29 10:29:33,172 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=269273.3333333333, ans=0.0 +2024-07-29 10:29:48,585 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.self_attn1.whiten, num_groups=1, num_channels=192, metric=19.17 vs. limit=22.5 +2024-07-29 10:29:49,383 INFO [train.py:1114] (3/4) Epoch 20, batch 7750, loss[loss=0.1792, simple_loss=0.2832, pruned_loss=0.03763, over 4930.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2621, pruned_loss=0.03964, over 935861.79 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:29:51,572 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.balancer2.prob, batch_count=269300.0, ans=0.125 +2024-07-29 10:29:54,613 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.conv_module2.whiten, num_groups=1, num_channels=512, metric=3.71 vs. limit=15.0 +2024-07-29 10:30:08,641 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=269313.3333333333, ans=0.2 +2024-07-29 10:30:14,696 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.23 vs. limit=15.0 +2024-07-29 10:30:16,664 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=269326.6666666667, ans=0.1 +2024-07-29 10:30:17,330 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.skip_rate, batch_count=269326.6666666667, ans=0.035 +2024-07-29 10:30:24,463 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=269353.3333333333, ans=0.125 +2024-07-29 10:30:28,809 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.865e+01 5.687e+01 6.141e+01 6.601e+01 8.666e+01, threshold=1.228e+02, percent-clipped=0.0 +2024-07-29 10:30:31,962 INFO [train.py:1114] (3/4) Epoch 20, batch 7800, loss[loss=0.184, simple_loss=0.2768, pruned_loss=0.04559, over 4663.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2631, pruned_loss=0.03958, over 937491.04 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:31:22,443 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.balancer2.prob, batch_count=269380.0, ans=0.125 +2024-07-29 10:31:26,703 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=269393.3333333333, ans=0.125 +2024-07-29 10:31:46,477 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=16.57 vs. limit=22.5 +2024-07-29 10:31:48,022 INFO [train.py:1114] (3/4) Epoch 20, batch 7850, loss[loss=0.169, simple_loss=0.2603, pruned_loss=0.03886, over 4503.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2636, pruned_loss=0.04004, over 936289.35 frames. ], batch size: 10, lr: 3.69e-03, grad_scale: 32.0 +2024-07-29 10:31:50,817 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module1.balancer1.prob, batch_count=269433.3333333333, ans=0.125 +2024-07-29 10:31:53,687 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=6.34 vs. limit=15.0 +2024-07-29 10:32:16,653 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=269486.6666666667, ans=0.2 +2024-07-29 10:32:18,451 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.034e+01 5.765e+01 6.588e+01 7.311e+01 1.076e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 10:32:21,185 INFO [train.py:1114] (3/4) Epoch 20, batch 7900, loss[loss=0.1849, simple_loss=0.2662, pruned_loss=0.0518, over 4866.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2642, pruned_loss=0.04023, over 933685.61 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:32:24,614 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.ff3_skip_rate, batch_count=269500.0, ans=0.0 +2024-07-29 10:32:25,153 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=269500.0, ans=0.125 +2024-07-29 10:33:05,232 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.ff3_skip_rate, batch_count=269553.3333333333, ans=0.0 +2024-07-29 10:33:07,551 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=6.34 vs. limit=10.0 +2024-07-29 10:33:09,094 INFO [train.py:1114] (3/4) Epoch 20, batch 7950, loss[loss=0.2155, simple_loss=0.302, pruned_loss=0.06452, over 3295.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2632, pruned_loss=0.03961, over 935637.83 frames. ], batch size: 35, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:33:23,561 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.min_positive, batch_count=269580.0, ans=0.025 +2024-07-29 10:33:26,279 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.const_attention_rate, batch_count=269580.0, ans=0.025 +2024-07-29 10:33:30,100 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=269593.3333333333, ans=0.125 +2024-07-29 10:33:39,933 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.attention_skip_rate, batch_count=269620.0, ans=0.0 +2024-07-29 10:33:44,266 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.355e+01 5.756e+01 6.342e+01 7.191e+01 1.019e+02, threshold=1.268e+02, percent-clipped=0.0 +2024-07-29 10:33:46,951 INFO [train.py:1114] (3/4) Epoch 20, batch 8000, loss[loss=0.1389, simple_loss=0.2269, pruned_loss=0.02547, over 4625.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.262, pruned_loss=0.0396, over 934706.94 frames. ], batch size: 11, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:34:12,416 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=269646.6666666667, ans=0.125 +2024-07-29 10:34:17,637 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.64 vs. limit=15.0 +2024-07-29 10:34:18,147 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=269660.0, ans=0.0 +2024-07-29 10:34:20,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.const_attention_rate, batch_count=269660.0, ans=0.025 +2024-07-29 10:34:37,423 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=269686.6666666667, ans=0.125 +2024-07-29 10:34:48,977 INFO [train.py:1114] (3/4) Epoch 20, batch 8050, loss[loss=0.1593, simple_loss=0.262, pruned_loss=0.02826, over 4813.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.262, pruned_loss=0.0393, over 934326.71 frames. ], batch size: 14, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:34:49,099 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=269700.0, ans=0.0 +2024-07-29 10:34:49,423 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn2.whiten, num_groups=1, num_channels=192, metric=9.59 vs. limit=22.5 +2024-07-29 10:35:01,934 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=7.08 vs. limit=15.0 +2024-07-29 10:35:09,050 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.conv_module2.balancer1.prob, batch_count=269726.6666666667, ans=0.125 +2024-07-29 10:35:19,551 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=10.91 vs. limit=15.0 +2024-07-29 10:35:21,086 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.839e+01 5.695e+01 6.260e+01 6.907e+01 1.067e+02, threshold=1.252e+02, percent-clipped=0.0 +2024-07-29 10:35:35,910 INFO [train.py:1114] (3/4) Epoch 20, batch 8100, loss[loss=0.194, simple_loss=0.2847, pruned_loss=0.05167, over 4799.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.263, pruned_loss=0.03979, over 934223.47 frames. ], batch size: 15, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:35:38,070 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_skip_rate, batch_count=269766.6666666667, ans=0.0 +2024-07-29 10:35:40,558 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.attention_skip_rate, batch_count=269766.6666666667, ans=0.0 +2024-07-29 10:35:51,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=269793.3333333333, ans=0.1 +2024-07-29 10:35:54,518 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=269793.3333333333, ans=0.0 +2024-07-29 10:35:59,058 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=269806.6666666667, ans=0.0 +2024-07-29 10:36:00,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=269806.6666666667, ans=0.0 +2024-07-29 10:36:05,839 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass_mid.scale_min, batch_count=269820.0, ans=0.2 +2024-07-29 10:36:08,436 INFO [train.py:1114] (3/4) Epoch 20, batch 8150, loss[loss=0.1779, simple_loss=0.2836, pruned_loss=0.03609, over 4805.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2626, pruned_loss=0.03994, over 937371.93 frames. ], batch size: 15, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:36:08,695 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=5.90 vs. limit=15.0 +2024-07-29 10:36:34,057 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=269846.6666666667, ans=0.0 +2024-07-29 10:36:52,370 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.965e+01 5.540e+01 6.167e+01 6.859e+01 1.030e+02, threshold=1.233e+02, percent-clipped=0.0 +2024-07-29 10:36:55,166 INFO [train.py:1114] (3/4) Epoch 20, batch 8200, loss[loss=0.174, simple_loss=0.269, pruned_loss=0.03948, over 4796.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2637, pruned_loss=0.04008, over 938601.34 frames. ], batch size: 15, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:37:02,092 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=384, metric=6.03 vs. limit=15.0 +2024-07-29 10:37:06,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_module1.balancer2.prob, batch_count=269913.3333333333, ans=0.125 +2024-07-29 10:38:01,282 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=512, metric=13.10 vs. limit=15.0 +2024-07-29 10:38:11,128 INFO [train.py:1114] (3/4) Epoch 20, batch 8250, loss[loss=0.156, simple_loss=0.2528, pruned_loss=0.02959, over 4901.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.263, pruned_loss=0.04, over 938766.20 frames. ], batch size: 13, lr: 3.69e-03, grad_scale: 64.0 +2024-07-29 10:38:25,934 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.8.prob, batch_count=269980.0, ans=0.125 +2024-07-29 10:38:42,339 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=270006.6666666667, ans=0.0 +2024-07-29 10:38:42,955 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=270006.6666666667, ans=0.125 +2024-07-29 10:38:46,412 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=270020.0, ans=0.125 +2024-07-29 10:38:49,951 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.571e+01 5.582e+01 6.021e+01 6.658e+01 1.061e+02, threshold=1.204e+02, percent-clipped=0.0 +2024-07-29 10:38:50,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.balancer1.prob, batch_count=270020.0, ans=0.125 +2024-07-29 10:38:52,537 INFO [train.py:1114] (3/4) Epoch 20, batch 8300, loss[loss=0.2072, simple_loss=0.3052, pruned_loss=0.05459, over 4908.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2643, pruned_loss=0.0405, over 938564.07 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:38:58,427 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=270046.6666666667, ans=0.125 +2024-07-29 10:39:02,381 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=270046.6666666667, ans=0.2 +2024-07-29 10:39:05,024 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=270060.0, ans=0.125 +2024-07-29 10:39:28,021 INFO [train.py:1114] (3/4) Epoch 20, batch 8350, loss[loss=0.1698, simple_loss=0.2651, pruned_loss=0.03724, over 4799.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.264, pruned_loss=0.04032, over 941389.22 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:39:33,601 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=512, metric=11.97 vs. limit=15.0 +2024-07-29 10:39:45,267 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.conv_module2.balancer1.prob, batch_count=270126.6666666667, ans=0.125 +2024-07-29 10:39:49,684 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=270140.0, ans=0.0 +2024-07-29 10:39:51,031 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.out_combiner.scale_min, batch_count=270140.0, ans=0.2 +2024-07-29 10:39:53,292 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.conv_module2.whiten, num_groups=1, num_channels=384, metric=2.35 vs. limit=15.0 +2024-07-29 10:39:57,942 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.685e+01 5.732e+01 6.400e+01 7.266e+01 9.706e+01, threshold=1.280e+02, percent-clipped=0.0 +2024-07-29 10:40:00,644 INFO [train.py:1114] (3/4) Epoch 20, batch 8400, loss[loss=0.1513, simple_loss=0.2436, pruned_loss=0.02944, over 4772.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2633, pruned_loss=0.04019, over 940168.29 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:40:00,744 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=270166.6666666667, ans=0.1 +2024-07-29 10:40:13,440 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=270180.0, ans=0.2 +2024-07-29 10:40:15,273 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=270180.0, ans=0.125 +2024-07-29 10:40:25,250 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.balancer1.prob, batch_count=270193.3333333333, ans=0.125 +2024-07-29 10:40:29,667 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=270206.6666666667, ans=0.125 +2024-07-29 10:40:37,114 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=270220.0, ans=0.125 +2024-07-29 10:40:41,434 INFO [train.py:1114] (3/4) Epoch 20, batch 8450, loss[loss=0.2019, simple_loss=0.3015, pruned_loss=0.0512, over 4802.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.264, pruned_loss=0.04014, over 939321.52 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:40:43,507 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=270233.3333333333, ans=0.125 +2024-07-29 10:40:43,600 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=6.39 vs. limit=15.0 +2024-07-29 10:40:46,515 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=6.50 vs. limit=15.0 +2024-07-29 10:41:04,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=270273.3333333333, ans=0.2 +2024-07-29 10:41:12,716 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.341e+01 5.700e+01 6.559e+01 7.490e+01 1.068e+02, threshold=1.312e+02, percent-clipped=0.0 +2024-07-29 10:41:20,972 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=256, metric=5.16 vs. limit=15.0 +2024-07-29 10:41:22,381 INFO [train.py:1114] (3/4) Epoch 20, batch 8500, loss[loss=0.1388, simple_loss=0.2247, pruned_loss=0.0264, over 4629.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.263, pruned_loss=0.03976, over 938962.08 frames. ], batch size: 11, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:41:25,264 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=4.07 vs. limit=12.0 +2024-07-29 10:41:27,296 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=270300.0, ans=0.125 +2024-07-29 10:41:27,505 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_whiten.whitening_limit, batch_count=270300.0, ans=15.0 +2024-07-29 10:41:27,935 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=270300.0, ans=0.1 +2024-07-29 10:41:28,179 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.01 vs. limit=15.0 +2024-07-29 10:41:29,182 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=270313.3333333333, ans=0.0 +2024-07-29 10:41:37,374 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward3.hidden_balancer.prob, batch_count=270326.6666666667, ans=0.125 +2024-07-29 10:41:40,728 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward1.out_proj.dropout_p, batch_count=270326.6666666667, ans=0.1 +2024-07-29 10:41:40,892 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=270326.6666666667, ans=0.1 +2024-07-29 10:41:56,529 INFO [train.py:1114] (3/4) Epoch 20, batch 8550, loss[loss=0.1257, simple_loss=0.2189, pruned_loss=0.01624, over 4800.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2622, pruned_loss=0.03922, over 939949.30 frames. ], batch size: 11, lr: 3.68e-03, grad_scale: 64.0 +2024-07-29 10:42:01,320 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=270366.6666666667, ans=0.0 +2024-07-29 10:42:28,841 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.852e+01 5.753e+01 6.246e+01 7.230e+01 1.151e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 10:42:29,557 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.prob, batch_count=270420.0, ans=0.125 +2024-07-29 10:42:30,774 INFO [train.py:1114] (3/4) Epoch 20, batch 8600, loss[loss=0.1701, simple_loss=0.2576, pruned_loss=0.04131, over 4803.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2618, pruned_loss=0.03947, over 939306.89 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:42:31,837 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.04 vs. limit=10.0 +2024-07-29 10:42:40,828 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.const_attention_rate, batch_count=270433.3333333333, ans=0.025 +2024-07-29 10:42:42,079 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=270446.6666666667, ans=0.2 +2024-07-29 10:42:44,186 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.bypass.scale_min, batch_count=270446.6666666667, ans=0.2 +2024-07-29 10:42:57,398 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=270473.3333333333, ans=0.0 +2024-07-29 10:43:08,642 INFO [train.py:1114] (3/4) Epoch 20, batch 8650, loss[loss=0.202, simple_loss=0.2869, pruned_loss=0.05852, over 4909.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2631, pruned_loss=0.04024, over 940735.32 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:43:10,399 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=5.84 vs. limit=10.0 +2024-07-29 10:43:21,752 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=8.30 vs. limit=15.0 +2024-07-29 10:43:24,283 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.conv_module1.balancer1.prob, batch_count=270526.6666666667, ans=0.125 +2024-07-29 10:43:40,854 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_skip_rate, batch_count=270553.3333333333, ans=0.0 +2024-07-29 10:43:41,993 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.826e+01 5.889e+01 6.581e+01 7.578e+01 1.021e+02, threshold=1.316e+02, percent-clipped=0.0 +2024-07-29 10:43:43,908 INFO [train.py:1114] (3/4) Epoch 20, batch 8700, loss[loss=0.1784, simple_loss=0.2817, pruned_loss=0.03754, over 4761.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2638, pruned_loss=0.04045, over 938399.03 frames. ], batch size: 13, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:43:49,336 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.bypass.skip_rate, batch_count=270566.6666666667, ans=0.07 +2024-07-29 10:43:57,064 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=270593.3333333333, ans=0.125 +2024-07-29 10:44:07,165 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.out_whiten.whitening_limit, batch_count=270606.6666666667, ans=15.0 +2024-07-29 10:44:12,271 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.feed_forward3.out_whiten, num_groups=1, num_channels=256, metric=6.71 vs. limit=15.0 +2024-07-29 10:44:16,439 INFO [train.py:1114] (3/4) Epoch 20, batch 8750, loss[loss=0.1911, simple_loss=0.2796, pruned_loss=0.05126, over 4680.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2638, pruned_loss=0.04035, over 936794.16 frames. ], batch size: 15, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:44:26,527 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=4.74 vs. limit=6.0 +2024-07-29 10:44:28,311 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.nonlin_attention.whiten1, num_groups=1, num_channels=288, metric=3.59 vs. limit=10.0 +2024-07-29 10:44:32,009 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.2.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 10:44:32,567 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=270646.6666666667, ans=0.0 +2024-07-29 10:44:36,445 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.bypass.scale_min, batch_count=270660.0, ans=0.2 +2024-07-29 10:44:37,051 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.out_combiner.scale_min, batch_count=270660.0, ans=0.2 +2024-07-29 10:44:46,597 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=270673.3333333333, ans=0.125 +2024-07-29 10:44:49,210 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=270686.6666666667, ans=0.0 +2024-07-29 10:44:54,754 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=270686.6666666667, ans=0.0 +2024-07-29 10:44:55,916 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.478e+01 5.802e+01 6.247e+01 7.031e+01 1.068e+02, threshold=1.249e+02, percent-clipped=0.0 +2024-07-29 10:44:57,842 INFO [train.py:1114] (3/4) Epoch 20, batch 8800, loss[loss=0.1769, simple_loss=0.2724, pruned_loss=0.04071, over 4932.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2634, pruned_loss=0.03984, over 937715.55 frames. ], batch size: 14, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:45:22,901 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_skip_rate, batch_count=270740.0, ans=0.0 +2024-07-29 10:45:28,614 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.whiten, num_groups=1, num_channels=384, metric=5.22 vs. limit=12.0 +2024-07-29 10:45:31,699 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=270753.3333333333, ans=0.0 +2024-07-29 10:45:32,208 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=270753.3333333333, ans=0.0 +2024-07-29 10:45:34,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_module2.balancer2.prob, batch_count=270753.3333333333, ans=0.125 +2024-07-29 10:45:35,523 INFO [train.py:1114] (3/4) Epoch 20, batch 8850, loss[loss=0.1744, simple_loss=0.268, pruned_loss=0.04038, over 4473.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2635, pruned_loss=0.04024, over 932391.27 frames. ], batch size: 21, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:45:38,254 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=270766.6666666667, ans=0.125 +2024-07-29 10:45:54,297 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=3.54 vs. limit=12.0 +2024-07-29 10:46:01,087 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=270793.3333333333, ans=0.125 +2024-07-29 10:46:01,678 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=270793.3333333333, ans=0.125 +2024-07-29 10:46:05,729 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.0.conv_module1.whiten, num_groups=1, num_channels=384, metric=3.65 vs. limit=15.0 +2024-07-29 10:46:28,851 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.feed_forward1.hidden_balancer.prob, batch_count=270820.0, ans=0.125 +2024-07-29 10:46:30,254 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.550e+01 5.751e+01 6.544e+01 7.566e+01 1.087e+02, threshold=1.309e+02, percent-clipped=0.0 +2024-07-29 10:46:32,206 INFO [train.py:1114] (3/4) Epoch 20, batch 8900, loss[loss=0.172, simple_loss=0.2508, pruned_loss=0.04663, over 4934.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2637, pruned_loss=0.04009, over 930058.28 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:46:34,555 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=256, metric=17.76 vs. limit=22.5 +2024-07-29 10:46:35,638 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.2.self_attn1.whiten, num_groups=1, num_channels=384, metric=14.27 vs. limit=22.5 +2024-07-29 10:46:37,661 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.36 vs. limit=6.0 +2024-07-29 10:46:43,987 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer1.prob, batch_count=270846.6666666667, ans=0.125 +2024-07-29 10:46:59,278 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.conv_skip_rate, batch_count=270886.6666666667, ans=0.0 +2024-07-29 10:47:00,545 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module2.balancer2.prob, batch_count=270886.6666666667, ans=0.125 +2024-07-29 10:47:06,559 INFO [train.py:1114] (3/4) Epoch 20, batch 8950, loss[loss=0.1699, simple_loss=0.2742, pruned_loss=0.03284, over 4494.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2632, pruned_loss=0.04003, over 931145.77 frames. ], batch size: 21, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:47:36,960 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass.scale_min, batch_count=270926.6666666667, ans=0.2 +2024-07-29 10:48:46,044 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.feed_forward2.hidden_balancer.prob, batch_count=270940.0, ans=0.125 +2024-07-29 10:49:18,496 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=270953.3333333333, ans=0.1 +2024-07-29 10:49:33,598 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.383e+01 5.554e+01 6.323e+01 6.834e+01 1.028e+02, threshold=1.265e+02, percent-clipped=0.0 +2024-07-29 10:49:37,841 INFO [train.py:1114] (3/4) Epoch 20, batch 9000, loss[loss=0.1766, simple_loss=0.2696, pruned_loss=0.04178, over 4641.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2623, pruned_loss=0.0398, over 933890.65 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:49:37,842 INFO [train.py:1137] (3/4) Computing validation loss +2024-07-29 10:51:12,529 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.4.encoder.layers.0.self_attn_weights, attn_weights_entropy = tensor([2.6067, 2.8018, 3.3276, 3.3001], device='cuda:3') +2024-07-29 10:51:33,795 INFO [zipformer.py:1858] (3/4) name=encoder.encoders.2.encoder.layers.2.self_attn_weights, attn_weights_entropy = tensor([3.4186, 4.5713, 5.1654, 4.3697], device='cuda:3') +2024-07-29 10:51:38,762 INFO [train.py:1146] (3/4) Epoch 20, validation: loss=0.1604, simple_loss=0.262, pruned_loss=0.02938, over 944034.00 frames. +2024-07-29 10:51:38,763 INFO [train.py:1147] (3/4) Maximum memory allocated so far is 4142MB +2024-07-29 10:52:07,620 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.skip_rate, batch_count=270980.0, ans=0.07 +2024-07-29 10:53:22,593 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.feed_forward1.hidden_balancer.prob, batch_count=271020.0, ans=0.125 +2024-07-29 10:53:27,065 INFO [train.py:1114] (3/4) Epoch 20, batch 9050, loss[loss=0.1444, simple_loss=0.2214, pruned_loss=0.03374, over 4497.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.262, pruned_loss=0.03971, over 934160.03 frames. ], batch size: 10, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:53:28,937 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=4.48 vs. limit=12.0 +2024-07-29 10:53:35,584 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=271033.3333333333, ans=0.0 +2024-07-29 10:53:35,587 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_skip_rate, batch_count=271033.3333333333, ans=0.0 +2024-07-29 10:53:41,116 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.30 vs. limit=15.0 +2024-07-29 10:53:46,507 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.2.feed_forward2.out_whiten, num_groups=1, num_channels=512, metric=9.82 vs. limit=15.0 +2024-07-29 10:54:10,599 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.ff2_skip_rate, batch_count=271073.3333333333, ans=0.0 +2024-07-29 10:54:12,766 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.balancer.prob, batch_count=271073.3333333333, ans=0.125 +2024-07-29 10:54:13,588 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.1.whiten, num_groups=1, num_channels=256, metric=5.76 vs. limit=12.0 +2024-07-29 10:54:16,894 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=271073.3333333333, ans=0.125 +2024-07-29 10:54:18,187 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward3.hidden_balancer.prob, batch_count=271086.6666666667, ans=0.125 +2024-07-29 10:54:18,876 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.attention_skip_rate, batch_count=271086.6666666667, ans=0.0 +2024-07-29 10:54:20,111 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=271086.6666666667, ans=0.0 +2024-07-29 10:54:22,476 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.720e+01 5.730e+01 6.143e+01 7.007e+01 1.074e+02, threshold=1.229e+02, percent-clipped=0.0 +2024-07-29 10:54:30,294 INFO [train.py:1114] (3/4) Epoch 20, batch 9100, loss[loss=0.1757, simple_loss=0.2577, pruned_loss=0.04682, over 4932.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2615, pruned_loss=0.03942, over 936600.57 frames. ], batch size: 14, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:54:35,449 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=271100.0, ans=0.1 +2024-07-29 10:54:59,189 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=271126.6666666667, ans=0.1 +2024-07-29 10:55:09,590 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=256, metric=7.71 vs. limit=15.0 +2024-07-29 10:55:19,602 INFO [train.py:1114] (3/4) Epoch 20, batch 9150, loss[loss=0.1887, simple_loss=0.2869, pruned_loss=0.04527, over 4809.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.262, pruned_loss=0.0398, over 935285.91 frames. ], batch size: 14, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:56:35,600 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.nonlin_attention.whiten1, num_groups=1, num_channels=384, metric=7.14 vs. limit=10.0 +2024-07-29 10:56:52,955 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.481e+01 5.764e+01 6.206e+01 7.056e+01 9.843e+01, threshold=1.241e+02, percent-clipped=0.0 +2024-07-29 10:56:55,341 INFO [train.py:1114] (3/4) Epoch 20, batch 9200, loss[loss=0.1733, simple_loss=0.2624, pruned_loss=0.04216, over 4843.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2618, pruned_loss=0.04002, over 937203.27 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:56:57,263 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=271233.3333333333, ans=0.1 +2024-07-29 10:57:08,226 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.conv_module2.balancer2.min_abs, batch_count=271246.6666666667, ans=0.5 +2024-07-29 10:57:27,504 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=271286.6666666667, ans=0.125 +2024-07-29 10:57:31,171 INFO [train.py:1114] (3/4) Epoch 20, batch 9250, loss[loss=0.1508, simple_loss=0.2407, pruned_loss=0.03049, over 4636.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2608, pruned_loss=0.03981, over 937965.10 frames. ], batch size: 13, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:57:36,388 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=192, metric=7.98 vs. limit=15.0 +2024-07-29 10:57:40,636 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward1.out_whiten, num_groups=1, num_channels=384, metric=8.09 vs. limit=15.0 +2024-07-29 10:58:02,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=271340.0, ans=0.0 +2024-07-29 10:58:10,948 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=8.41 vs. limit=15.0 +2024-07-29 10:58:11,146 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.506e+01 5.724e+01 6.498e+01 7.478e+01 1.094e+02, threshold=1.300e+02, percent-clipped=0.0 +2024-07-29 10:58:14,102 INFO [train.py:1114] (3/4) Epoch 20, batch 9300, loss[loss=0.1539, simple_loss=0.2446, pruned_loss=0.03159, over 4773.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2604, pruned_loss=0.03973, over 938172.41 frames. ], batch size: 12, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 10:58:41,547 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.ff3_skip_rate, batch_count=271380.0, ans=0.0 +2024-07-29 10:58:46,546 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.const_attention_rate, batch_count=271380.0, ans=0.025 +2024-07-29 10:58:51,823 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.attention_skip_rate, batch_count=271393.3333333333, ans=0.0 +2024-07-29 10:58:55,069 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=271393.3333333333, ans=0.0 +2024-07-29 10:58:55,767 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.nonlin_attention.balancer.prob, batch_count=271393.3333333333, ans=0.125 +2024-07-29 10:58:57,757 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.feed_forward1.hidden_balancer.prob, batch_count=271393.3333333333, ans=0.125 +2024-07-29 10:59:06,085 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.5.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=3.12 vs. limit=12.0 +2024-07-29 10:59:07,255 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.balancer2.prob, batch_count=271406.6666666667, ans=0.125 +2024-07-29 10:59:09,711 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.ff2_skip_rate, batch_count=271406.6666666667, ans=0.0 +2024-07-29 10:59:10,748 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=14.44 vs. limit=15.0 +2024-07-29 11:01:37,036 INFO [train.py:1114] (3/4) Epoch 20, batch 9350, loss[loss=0.1553, simple_loss=0.2454, pruned_loss=0.03261, over 4813.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2599, pruned_loss=0.03968, over 934791.25 frames. ], batch size: 11, lr: 3.68e-03, grad_scale: 32.0 +2024-07-29 11:01:55,927 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=271433.3333333333, ans=0.125 +2024-07-29 11:02:04,373 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=271433.3333333333, ans=0.125 +2024-07-29 11:02:12,002 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.nonlin_attention.balancer.prob, batch_count=271433.3333333333, ans=0.125 +2024-07-29 11:02:32,724 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=271446.6666666667, ans=0.125 +2024-07-29 11:02:39,233 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.conv_skip_rate, batch_count=271446.6666666667, ans=0.0 +2024-07-29 11:02:52,212 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=271460.0, ans=0.0 +2024-07-29 11:02:57,243 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.balancer2.prob, batch_count=271473.3333333333, ans=0.125 +2024-07-29 11:02:58,051 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.conv_module2.whiten, num_groups=1, num_channels=512, metric=5.87 vs. limit=15.0 +2024-07-29 11:03:03,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.ff3_skip_rate, batch_count=271486.6666666667, ans=0.0 +2024-07-29 11:03:22,476 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.890e+01 5.751e+01 6.401e+01 7.888e+01 1.207e+02, threshold=1.280e+02, percent-clipped=0.0 +2024-07-29 11:03:24,395 INFO [train.py:1114] (3/4) Epoch 20, batch 9400, loss[loss=0.1803, simple_loss=0.2776, pruned_loss=0.04153, over 4702.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.261, pruned_loss=0.04012, over 933069.14 frames. ], batch size: 13, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:03:28,893 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.nonlin_attention.whiten1, num_groups=1, num_channels=144, metric=4.34 vs. limit=10.0 +2024-07-29 11:03:33,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.balancer2.prob, batch_count=271513.3333333333, ans=0.125 +2024-07-29 11:03:39,579 INFO [scaling.py:1024] (3/4) Whitening: name=encoder_embed.out_whiten, num_groups=1, num_channels=192, metric=7.75 vs. limit=8.0 +2024-07-29 11:04:01,716 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=271553.3333333333, ans=0.1 +2024-07-29 11:04:02,884 INFO [train.py:1114] (3/4) Epoch 20, batch 9450, loss[loss=0.1386, simple_loss=0.2216, pruned_loss=0.0278, over 4801.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2611, pruned_loss=0.03961, over 931922.28 frames. ], batch size: 11, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:04:05,495 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.max_positive, batch_count=271566.6666666667, ans=0.95 +2024-07-29 11:04:07,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=271566.6666666667, ans=0.2 +2024-07-29 11:04:09,950 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=271580.0, ans=0.1 +2024-07-29 11:04:11,334 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module2.balancer1.prob, batch_count=271580.0, ans=0.125 +2024-07-29 11:04:21,357 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=271593.3333333333, ans=0.125 +2024-07-29 11:04:29,300 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=271620.0, ans=0.0 +2024-07-29 11:04:31,451 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.2.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=10.87 vs. limit=15.0 +2024-07-29 11:04:33,853 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.597e+01 5.763e+01 6.160e+01 6.815e+01 1.077e+02, threshold=1.232e+02, percent-clipped=0.0 +2024-07-29 11:04:35,849 INFO [train.py:1114] (3/4) Epoch 20, batch 9500, loss[loss=0.1474, simple_loss=0.2487, pruned_loss=0.02299, over 4697.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2626, pruned_loss=0.03971, over 934086.98 frames. ], batch size: 12, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:04:36,585 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward3.hidden_balancer.prob, batch_count=271633.3333333333, ans=0.125 +2024-07-29 11:04:50,750 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=271660.0, ans=0.1 +2024-07-29 11:04:56,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=271673.3333333333, ans=0.125 +2024-07-29 11:05:02,168 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_skip_rate, batch_count=271686.6666666667, ans=0.0 +2024-07-29 11:05:02,800 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.conv_module1.balancer1.prob, batch_count=271686.6666666667, ans=0.125 +2024-07-29 11:05:08,686 INFO [train.py:1114] (3/4) Epoch 20, batch 9550, loss[loss=0.1757, simple_loss=0.2551, pruned_loss=0.04818, over 4780.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2619, pruned_loss=0.03974, over 931144.76 frames. ], batch size: 12, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:05:14,834 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=271713.3333333333, ans=0.125 +2024-07-29 11:05:24,517 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=271713.3333333333, ans=0.125 +2024-07-29 11:05:42,141 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.attention_skip_rate, batch_count=271726.6666666667, ans=0.0 +2024-07-29 11:05:48,492 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.1.const_attention_rate, batch_count=271740.0, ans=0.025 +2024-07-29 11:05:48,541 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_module2.balancer2.prob, batch_count=271740.0, ans=0.125 +2024-07-29 11:05:49,708 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer2.min_positive, batch_count=271740.0, ans=0.05 +2024-07-29 11:05:49,870 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass.scale_min, batch_count=271740.0, ans=0.2 +2024-07-29 11:05:54,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer2.prob, batch_count=271753.3333333333, ans=0.125 +2024-07-29 11:05:54,981 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=271753.3333333333, ans=0.1 +2024-07-29 11:05:59,120 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=271753.3333333333, ans=0.125 +2024-07-29 11:06:00,809 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.785e+01 5.730e+01 6.530e+01 7.456e+01 1.001e+02, threshold=1.306e+02, percent-clipped=0.0 +2024-07-29 11:06:02,402 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.feed_forward1.out_proj.dropout_p, batch_count=271766.6666666667, ans=0.1 +2024-07-29 11:06:02,900 INFO [train.py:1114] (3/4) Epoch 20, batch 9600, loss[loss=0.2067, simple_loss=0.3042, pruned_loss=0.05458, over 3432.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2618, pruned_loss=0.03948, over 930044.26 frames. ], batch size: 35, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:06:08,619 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.whiten, num_groups=1, num_channels=256, metric=4.45 vs. limit=12.0 +2024-07-29 11:06:12,638 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.conv_module1.whiten, num_groups=1, num_channels=192, metric=7.44 vs. limit=15.0 +2024-07-29 11:06:12,848 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.balancer2.prob, batch_count=271780.0, ans=0.125 +2024-07-29 11:06:15,319 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.1.self_attn_weights.pos_emb_skip_rate, batch_count=271780.0, ans=0.0 +2024-07-29 11:06:17,209 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=271793.3333333333, ans=0.1 +2024-07-29 11:06:39,305 INFO [train.py:1114] (3/4) Epoch 20, batch 9650, loss[loss=0.2054, simple_loss=0.2972, pruned_loss=0.05683, over 4828.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2616, pruned_loss=0.03958, over 926544.77 frames. ], batch size: 16, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:06:39,342 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.prob, batch_count=271833.3333333333, ans=0.125 +2024-07-29 11:06:42,669 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.conv_module2.balancer1.prob, batch_count=271833.3333333333, ans=0.125 +2024-07-29 11:06:49,223 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder_embed.conv.5.prob, batch_count=271846.6666666667, ans=0.125 +2024-07-29 11:07:01,872 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.0.self_attn1.whiten, num_groups=1, num_channels=512, metric=20.67 vs. limit=22.5 +2024-07-29 11:07:04,949 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer2.prob, batch_count=271860.0, ans=0.125 +2024-07-29 11:07:14,690 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.nonlin_attention.balancer.prob, batch_count=271873.3333333333, ans=0.125 +2024-07-29 11:07:21,935 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.580e+01 5.811e+01 6.589e+01 7.580e+01 1.190e+02, threshold=1.318e+02, percent-clipped=0.0 +2024-07-29 11:07:23,851 INFO [train.py:1114] (3/4) Epoch 20, batch 9700, loss[loss=0.1692, simple_loss=0.2633, pruned_loss=0.03753, over 4273.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.262, pruned_loss=0.03982, over 924981.75 frames. ], batch size: 25, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:07:32,308 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer2.prob, batch_count=271900.0, ans=0.125 +2024-07-29 11:07:55,306 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.2.bypass.scale_min, batch_count=271940.0, ans=0.2 +2024-07-29 11:08:08,450 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.self_attn_weights.whiten_keys, num_groups=4, num_channels=128, metric=3.37 vs. limit=6.0 +2024-07-29 11:08:11,656 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=271966.6666666667, ans=0.125 +2024-07-29 11:08:12,203 INFO [train.py:1114] (3/4) Epoch 20, batch 9750, loss[loss=0.1687, simple_loss=0.2661, pruned_loss=0.03564, over 4688.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2613, pruned_loss=0.03941, over 925404.20 frames. ], batch size: 15, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:08:16,456 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.bypass.skip_rate, batch_count=271966.6666666667, ans=0.035 +2024-07-29 11:08:18,905 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.const_attention_rate, batch_count=271980.0, ans=0.025 +2024-07-29 11:08:22,884 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=384, metric=12.97 vs. limit=22.5 +2024-07-29 11:08:22,894 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.3.whiten, num_groups=1, num_channels=512, metric=9.51 vs. limit=12.0 +2024-07-29 11:08:28,472 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.self_attn_weights.pos_emb_skip_rate, batch_count=271993.3333333333, ans=0.0 +2024-07-29 11:08:39,812 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.balancer1.prob, batch_count=271993.3333333333, ans=0.125 +2024-07-29 11:08:42,131 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.feed_forward3.hidden_balancer.prob, batch_count=271993.3333333333, ans=0.125 +2024-07-29 11:08:53,792 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.conv_module1.whiten, num_groups=1, num_channels=192, metric=11.07 vs. limit=15.0 +2024-07-29 11:08:57,293 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.conv_module1.balancer1.prob, batch_count=272020.0, ans=0.125 +2024-07-29 11:09:06,960 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+01 5.621e+01 6.289e+01 7.582e+01 9.528e+01, threshold=1.258e+02, percent-clipped=0.0 +2024-07-29 11:09:09,240 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.nonlin_attention.balancer.prob, batch_count=272020.0, ans=0.125 +2024-07-29 11:09:10,469 INFO [train.py:1114] (3/4) Epoch 20, batch 9800, loss[loss=0.1537, simple_loss=0.2395, pruned_loss=0.03391, over 4694.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2611, pruned_loss=0.04003, over 925506.94 frames. ], batch size: 12, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:09:15,494 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module2.balancer1.max_abs, batch_count=272033.3333333333, ans=10.0 +2024-07-29 11:09:18,862 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward2.hidden_balancer.prob, batch_count=272046.6666666667, ans=0.125 +2024-07-29 11:09:22,526 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.bypass_mid.scale_min, batch_count=272060.0, ans=0.2 +2024-07-29 11:09:23,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.conv_module1.balancer1.min_positive, batch_count=272060.0, ans=0.025 +2024-07-29 11:09:24,358 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward1.hidden_balancer.prob, batch_count=272060.0, ans=0.125 +2024-07-29 11:09:26,158 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.0.conv_module2.balancer1.min_positive, batch_count=272060.0, ans=0.025 +2024-07-29 11:10:13,504 INFO [train.py:1114] (3/4) Epoch 20, batch 9850, loss[loss=0.1892, simple_loss=0.2789, pruned_loss=0.04979, over 4898.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2619, pruned_loss=0.04035, over 927718.37 frames. ], batch size: 15, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:10:14,770 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.1.encoder.layers.0.bypass.skip_rate, batch_count=272100.0, ans=0.04949747468305833 +2024-07-29 11:10:25,931 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=192, metric=6.79 vs. limit=15.0 +2024-07-29 11:10:27,448 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.ff2_skip_rate, batch_count=272126.6666666667, ans=0.0 +2024-07-29 11:10:39,520 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.ff2_skip_rate, batch_count=272153.3333333333, ans=0.0 +2024-07-29 11:10:43,567 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.655e+01 6.014e+01 6.927e+01 8.025e+01 1.186e+02, threshold=1.385e+02, percent-clipped=0.0 +2024-07-29 11:10:45,489 INFO [train.py:1114] (3/4) Epoch 20, batch 9900, loss[loss=0.2031, simple_loss=0.3089, pruned_loss=0.04862, over 4845.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2627, pruned_loss=0.04096, over 927013.46 frames. ], batch size: 16, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:10:47,621 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.4.encoder.layers.0.nonlin_attention.whiten2, num_groups=1, num_channels=384, metric=5.67 vs. limit=15.0 +2024-07-29 11:10:48,710 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.feed_forward2.hidden_balancer.prob, batch_count=272166.6666666667, ans=0.125 +2024-07-29 11:11:00,468 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:11:04,259 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.3.feed_forward1.out_proj.dropout_p, batch_count=272206.6666666667, ans=0.1 +2024-07-29 11:11:16,894 INFO [train.py:1114] (3/4) Epoch 20, batch 9950, loss[loss=0.1538, simple_loss=0.2388, pruned_loss=0.03436, over 4535.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2631, pruned_loss=0.04165, over 929298.02 frames. ], batch size: 10, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:11:22,529 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.1.bypass.scale_min, batch_count=272233.3333333333, ans=0.2 +2024-07-29 11:11:26,430 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.3.encoder.layers.1.self_attn_weights.whiten_keys, num_groups=8, num_channels=256, metric=5.60 vs. limit=6.0 +2024-07-29 11:11:26,805 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.balancer1.prob, batch_count=272246.6666666667, ans=0.125 +2024-07-29 11:11:54,018 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=272273.3333333333, ans=0.125 +2024-07-29 11:11:59,214 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.0.conv_module1.balancer2.prob, batch_count=272286.6666666667, ans=0.125 +2024-07-29 11:12:01,985 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module1.balancer1.prob, batch_count=272286.6666666667, ans=0.125 +2024-07-29 11:12:02,408 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.748e+01 5.836e+01 6.511e+01 7.365e+01 1.166e+02, threshold=1.302e+02, percent-clipped=0.0 +2024-07-29 11:12:04,276 INFO [train.py:1114] (3/4) Epoch 20, batch 10000, loss[loss=0.1817, simple_loss=0.2666, pruned_loss=0.04842, over 4637.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2659, pruned_loss=0.04231, over 926484.91 frames. ], batch size: 16, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:12:06,712 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.2.encoder.layers.0.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:12:18,489 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.4.encoder.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:12:23,410 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.feed_forward2.hidden_balancer.prob, batch_count=272326.6666666667, ans=0.125 +2024-07-29 11:12:29,273 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.1.encoder.layers.0.self_attn2.whiten, num_groups=1, num_channels=256, metric=14.35 vs. limit=22.5 +2024-07-29 11:12:29,788 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.feed_forward3.out_whiten, num_groups=1, num_channels=384, metric=9.65 vs. limit=15.0 +2024-07-29 11:12:48,183 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.bypass.scale_min, batch_count=272366.6666666667, ans=0.2 +2024-07-29 11:12:48,745 INFO [train.py:1114] (3/4) Epoch 20, batch 10050, loss[loss=0.2553, simple_loss=0.3239, pruned_loss=0.09333, over 3283.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2684, pruned_loss=0.04351, over 914856.28 frames. ], batch size: 35, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:13:13,359 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.1.conv_module2.balancer2.prob, batch_count=272380.0, ans=0.125 +2024-07-29 11:14:45,433 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 4.683e+01 5.969e+01 6.772e+01 7.755e+01 1.002e+02, threshold=1.354e+02, percent-clipped=0.0 +2024-07-29 11:14:47,461 INFO [train.py:1114] (3/4) Epoch 20, batch 10100, loss[loss=0.2242, simple_loss=0.2883, pruned_loss=0.08008, over 3396.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.272, pruned_loss=0.0474, over 862350.24 frames. ], batch size: 35, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:14:52,989 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.1.feed_forward2.hidden_balancer.prob, batch_count=272433.3333333333, ans=0.125 +2024-07-29 11:15:02,672 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.5.encoder.layers.0.bypass.skip_rate, batch_count=272460.0, ans=0.04949747468305833 +2024-07-29 11:15:03,228 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.2.encoder.layers.2.feed_forward1.out_proj.dropout_p, batch_count=272460.0, ans=0.1 +2024-07-29 11:15:16,702 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.bypass_mid.scale_min, batch_count=272486.6666666667, ans=0.2 +2024-07-29 11:15:19,714 INFO [train.py:1114] (3/4) Epoch 20, batch 10150, loss[loss=0.2172, simple_loss=0.281, pruned_loss=0.07668, over 3170.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2755, pruned_loss=0.05063, over 822153.85 frames. ], batch size: 35, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:15:34,503 INFO [scaling.py:1120] (3/4) WithLoss: name=encoder.encoders.0.layers.1.self_attn_weights, loss-sum=0.000e+00 +2024-07-29 11:15:38,012 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.0.layers.1.balancer2.prob, batch_count=272526.6666666667, ans=0.125 +2024-07-29 11:15:40,013 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.ff3_skip_rate, batch_count=272540.0, ans=0.0 +2024-07-29 11:15:53,423 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.2.encoder.layers.1.conv_module1.whiten, num_groups=1, num_channels=384, metric=2.49 vs. limit=15.0 +2024-07-29 11:15:53,607 WARNING [optim.py:487] (3/4) Clipping_scale=2.0, grad-norm quartiles 5.562e+01 6.747e+01 7.203e+01 7.565e+01 9.241e+01, threshold=1.441e+02, percent-clipped=0.0 +2024-07-29 11:15:58,180 INFO [train.py:1114] (3/4) Epoch 20, batch 10200, loss[loss=0.2092, simple_loss=0.28, pruned_loss=0.0692, over 3569.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2776, pruned_loss=0.05276, over 788514.17 frames. ], batch size: 36, lr: 3.67e-03, grad_scale: 32.0 +2024-07-29 11:16:58,569 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.0.conv_skip_rate, batch_count=272566.6666666667, ans=0.0 +2024-07-29 11:18:56,003 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.4.encoder.layers.0.feed_forward1.out_proj.dropout_p, batch_count=272580.0, ans=0.1 +2024-07-29 11:18:57,164 INFO [scaling.py:214] (3/4) ScheduledFloat: name=encoder.encoders.3.encoder.layers.2.self_attn_weights.pos_emb_skip_rate, batch_count=272580.0, ans=0.0 +2024-07-29 11:18:58,085 INFO [scaling.py:1024] (3/4) Whitening: name=encoder.encoders.0.layers.1.feed_forward2.out_whiten, num_groups=1, num_channels=192, metric=7.17 vs. limit=15.0 +2024-07-29 11:19:22,622 INFO [train.py:1387] (3/4) Done! diff --git a/zipformer/pretrained/non_ctc/non_causal/exp/tensorboard/events.out.tfevents.1722096605.cdr2658.int.cedar.computecanada.ca.71.0 b/zipformer/pretrained/non_ctc/non_causal/exp/tensorboard/events.out.tfevents.1722096605.cdr2658.int.cedar.computecanada.ca.71.0 new file mode 100644 index 0000000000000000000000000000000000000000..79ea6ab57977682d5b48892292336c7a356b97c9 --- /dev/null +++ b/zipformer/pretrained/non_ctc/non_causal/exp/tensorboard/events.out.tfevents.1722096605.cdr2658.int.cedar.computecanada.ca.71.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c922f1a00b9f44606dd888e4dd1742451188d7e80bf21125d349fffb1a98ce93 +size 1976883